spark-2012.0.deb/0000755000175000017500000000000011773666462012461 5ustar eugeneugenspark-2012.0.deb/sparksimp/0000755000175000017500000000000011753203757014462 5ustar eugeneugenspark-2012.0.deb/sparksimp/spark.ads0000644000175000017500000000177611753202340016271 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package SPARK is pragma Pure (SPARK); end SPARK; spark-2012.0.deb/sparksimp/work_manager.ads0000644000175000017500000000720111753202340017612 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Streams.Stream_IO; with GNAT.OS_Lib; with GNAT.Table; package Work_Manager is type AnalysisType is (Simplify, Zombiescope, -- Jobs below here do not produce a log file when "-l" is specified. Victor, Riposte); subtype Logless_Job is AnalysisType range Victor .. Riposte; MaxErrorStringIndex : constant Integer := 100; subtype ErrorStringIndex is Integer range 1 .. MaxErrorStringIndex; subtype ErrorString is String (ErrorStringIndex); NullErrorString : constant ErrorString := ErrorString'(others => ' '); subtype Job_Index is Natural; subtype Worker_ID is Natural; AnyFailed : Boolean := False; -- Keep track of the status of jobs in the list. -- At present this is not really used for anything, but it could be -- useful info if we add a monitor thread at a later date. type JobStatus is (Pending, InProgress, Finished, Failed); -- Record for details of each job. type Work_Package is record File_Name : GNAT.OS_Lib.String_Access; File_Size : Ada.Streams.Stream_IO.Count; Analysis : AnalysisType; Status : JobStatus := Pending; Worker : Worker_ID := 0; WhyFailed : ErrorString; end record; -- Work_List manages the list of jobs to be done. package Jobs is procedure Add_Work_Package (File : in String; JobType : in AnalysisType); procedure GetNextJob (Job_ID : out Job_Index); procedure JobFinished (Job : in Job_Index); procedure JobFailed (Job : in Job_Index; FailReason : in ErrorString); procedure Sort_Files_By_Size; procedure Display_Status_Banner; function Total_Number_Of_Files return Job_Index; function Number_Of_Files (Of_Status : in JobStatus) return Job_Index; procedure List_Jobs; function Get_File_Name (Job : in Job_Index) return String; function Get_Simple_File_Name (Job : in Job_Index) return String; function Get_Analysis_Type (Job : in Job_Index) return AnalysisType; function Get_HasFailed (Job : in Job_Index) return Boolean; function Get_WhyFailed (Job : in Job_Index) return ErrorString; procedure Clear; end Jobs; private package Work_Table is new GNAT.Table ( Table_Component_Type => Work_Package, Table_Index_Type => Job_Index, Table_Low_Bound => 1, Table_Initial => 1000, Table_Increment => 100); end Work_Manager; spark-2012.0.deb/sparksimp/log_files.adb0000644000175000017500000000464711753202340017073 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Unchecked_Deallocation; package body Log_Files is procedure Initialize (File_Set : out Log_File_Set) is begin for I in File_Set'Range loop File_Set (I).In_Use := False; -- No initial value available for the File_Type component. end loop; end Initialize; procedure Open (File_Name : in String; Index : out Positive; File_Set : in out Log_File_Set) is procedure Set_Next_Free is begin Index := 1; while File_Set (Index).In_Use loop Index := Index + 1; end loop; end Set_Next_Free; begin Set_Next_Free; File_Set (Index).FT := new Ada.Text_IO.File_Type; Ada.Text_IO.Create (File_Set (Index).FT.all, Ada.Text_IO.Out_File, File_Name); File_Set (Index).In_Use := True; end Open; procedure Close (Index : in Positive; File_Set : in out Log_File_Set) is procedure Free is new Ada.Unchecked_Deallocation (Ada.Text_IO.File_Type, Ada_Text_IO_File_Type_Ptr); begin Ada.Text_IO.Close (File_Set (Index).FT.all); Free (File_Set (Index).FT); File_Set (Index).In_Use := False; end Close; function File_Type (Index : Positive; File_Set : Log_File_Set) return access Ada.Text_IO.File_Type is begin return File_Set (Index).FT; end File_Type; end Log_Files; spark-2012.0.deb/sparksimp/log_files.ads0000644000175000017500000000400311753202340017076 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Text_IO; -- This package is needed because we want to store a reference to the log -- file for each Simplifier. If the File_Type was stored with the other -- data, that record would become limited private and we could not copy the -- data (e.g. when compacting the Worker_Set). package Log_Files is type Log_File is private; type Log_File_Set is array (Positive range <>) of Log_File; procedure Initialize (File_Set : out Log_File_Set); procedure Open (File_Name : in String; Index : out Positive; File_Set : in out Log_File_Set); procedure Close (Index : in Positive; File_Set : in out Log_File_Set); function File_Type (Index : Positive; File_Set : Log_File_Set) return access Ada.Text_IO.File_Type; private type Ada_Text_IO_File_Type_Ptr is access Ada.Text_IO.File_Type; type Log_File is record In_Use : Boolean; FT : Ada_Text_IO_File_Type_Ptr; end record; end Log_Files; spark-2012.0.deb/sparksimp/g-trasym.adb0000644000175000017500000000255211753202340016664 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body GNAT.Traceback.Symbolic is ------------------------ -- Symbolic_Traceback -- ------------------------ function Symbolic_Traceback (Traceback : Tracebacks_Array) return String is begin return ""; end Symbolic_Traceback; function Symbolic_Traceback (E : Exception_Occurrence) return String is begin return ""; end Symbolic_Traceback; end GNAT.Traceback.Symbolic; spark-2012.0.deb/sparksimp/Makefile0000644000175000017500000000466011753202341016114 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for SPARKSimp # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=sparksimp # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # PLATFORM INDEPENDENT CONFIGURATION ################################################################################ LINK_ARGS:=new_expect.o ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: new_expect.o gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} ${OUTPUT_NAME} -o $@ -bargs ${BIND_OPTS} -largs ${LINK_ARGS} new_expect.o: new_expect.c gcc -c -g -O2 -D${PREP_TARGET} $< # Cleaning code base # ================== clean: standardclean reallyclean: clean targetclean ################################################################################ # END-OF-FILE spark-2012.0.deb/sparksimp/sparksimp.adb0000644000175000017500000005073111753202340017134 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------ -- SPARKSimp -- -- -- -- This program traverses a directory and all of its subdirectories trying -- -- to find any files (typically VCG or DPC files) that need to be processed -- by the specified analysis tool(s) (for instance Simplifier, ZombieScope, -- Victor). -- -- -- This program can only be compiled with GNAT 5.04 or later, since it -- -- relies on several of the GNAT.* predefined library units. -- ------------------------------------------------------------------------------ with Ada.Real_Time; with Ada.Exceptions; with Ada.Text_IO; with GNAT.Regpat; with GNAT.Directory_Operations; with GNAT.IO_Aux; with GNAT.OS_Lib; with GNAT.Traceback.Symbolic; with Version; with CMD; with Work_Manager; with Utility; with Workers; use type Ada.Real_Time.Time; use type GNAT.OS_Lib.String_Access; use type GNAT.OS_Lib.Argument_List; use type GNAT.Regpat.Match_Location; use type Work_Manager.AnalysisType; procedure SPARKSimp is --------------------------------------------------------------------------- -- Utility subprogams -- --------------------------------------------------------------------------- -- Print tool banner to Standard_Output procedure Banner is begin Ada.Text_IO.Put_Line ("SPARKSimp " & Version.Toolset_Banner_Line); Ada.Text_IO.Put_Line (Version.Toolset_Copyright); -- Report location of simplifier, ZombieScope and Victor binaries - -- this should prevent "running the wrong simplifier by accident" -- problems if Workers.Spadesimp_Exe /= null then Ada.Text_IO.Put_Line ("Simplifier binary located at: " & Workers.Spadesimp_Exe.all); end if; if Workers.ZombieScope_Exe /= null then Ada.Text_IO.Put_Line ("ZombieScope binary located at: " & Workers.ZombieScope_Exe.all); end if; if CMD.Run_Victor then if Workers.Victor_Exe /= null then Ada.Text_IO.Put_Line ("Victor binary located at: " & Workers.Victor_Exe.all); end if; end if; if CMD.Run_Riposte then if Workers.Riposte_Exe /= null then Ada.Text_IO.Put_Line ("Riposte located at: " & Workers.Riposte_Exe.all); end if; if Workers.Python_Exe /= null then Ada.Text_IO.Put_Line ("Python located at: " & Workers.Python_Exe.all); end if; end if; Ada.Text_IO.New_Line; end Banner; -- returns True if Left is older than (i.e. preceeds) Right function Is_Older (Left, Right : in GNAT.OS_Lib.OS_Time) return Boolean is Result : Boolean; use GNAT.OS_Lib; begin if GM_Year (Left) = GM_Year (Right) then if GM_Month (Left) = GM_Month (Right) then if GM_Day (Left) = GM_Day (Right) then if GM_Hour (Left) = GM_Hour (Right) then if GM_Minute (Left) = GM_Minute (Right) then Result := GM_Second (Left) < GM_Second (Right); else Result := GM_Minute (Left) < GM_Minute (Right); end if; else Result := GM_Hour (Left) < GM_Hour (Right); end if; else Result := GM_Day (Left) < GM_Day (Right); end if; else Result := GM_Month (Left) < GM_Month (Right); end if; else Result := GM_Year (Left) < GM_Year (Right); end if; return Result; end Is_Older; -- Traverses all files and directories rooted at current-working -- directory, find VCG and DPC files that need analysis, and put -- them into Work_Manager. -- -- A VCG or DPC file need analysis (simplification) if either: -- 1) The CMD.All_Files flag is True, or -- 2) The VCG (DPC) file has no corresponding SIV (SDP) file, or -- 3) The VCG (DPC) file has a corresponding SIV (SDP) file, but the -- SIV (SDP) file's time stamp os older than that of the -- VCG (DPC) file. -- A VCG file also needs analysis (victor) if CMD.Run_Victor is -- true and if either: -- 1) The VCT file does not exist yet, or -- 2) The VCT file is older than the VCG file, or -- 3) The VCT file is older than the SIV file. -- Finally, if Cmd.Run_Riposte is true and: -- 1) The RSM file does not exist yet, or -- 2) The RSM file is older than the VCG file, or -- 3) The RSM file is older than the SIV file. procedure Find_Files_To_Analyse is use GNAT.Directory_Operations; procedure Scan_Directory (Dir : in Dir_Name_Str); function File_Needs_Analysis (File : in String) return Boolean is Result : Boolean; begin if Utility.Is_A_VCG_File (File) or Utility.Is_A_DPC_File (File) then if CMD.All_Files then Result := True; else declare Simplified_File : String (1 .. File'Length); Victored_File : String (1 .. File'Length); Riposte_Summary_File : String (1 .. File'Length); File_Time : GNAT.OS_Lib.OS_Time; Simplified_File_Time : GNAT.OS_Lib.OS_Time; Victored_File_Time : GNAT.OS_Lib.OS_Time; Riposte_File_Time : GNAT.OS_Lib.OS_Time; begin Simplified_File := File; Victored_File := File; Riposte_Summary_File := File; -- Simplified VCG files end in ".siv", while -- simplified DPC files end in ".sdp", so... Simplified_File (Simplified_File'Last - 2) := 's'; if Utility.Is_A_VCG_File (File) then Simplified_File (Simplified_File'Last - 1) := 'i'; Simplified_File (Simplified_File'Last) := 'v'; -- Victored files end in ".vct" Victored_File (Victored_File'Last) := 't'; -- And riposte files end with ".rsm" Riposte_Summary_File (Riposte_Summary_File'Last - 2) := 'r'; Riposte_Summary_File (Riposte_Summary_File'Last - 1) := 's'; Riposte_Summary_File (Riposte_Summary_File'Last) := 'm'; else Simplified_File (Simplified_File'Last - 1) := 'd'; Simplified_File (Simplified_File'Last) := 'p'; end if; if GNAT.IO_Aux.File_Exists (Simplified_File) then File_Time := GNAT.OS_Lib.File_Time_Stamp (File); Simplified_File_Time := GNAT.OS_Lib.File_Time_Stamp (Simplified_File); Result := Is_Older (Simplified_File_Time, File_Time); else -- Simplified file does not exist, so we definitely need -- to simplify the VCG or DPC file Result := True; end if; -- Check if we need to run victor. We only do this -- for VCG files. if not Result and then Utility.Is_A_VCG_File (File) and then CMD.Run_Victor then if GNAT.IO_Aux.File_Exists (Victored_File) then Victored_File_Time := GNAT.OS_Lib.File_Time_Stamp (Victored_File); Result := Is_Older (Victored_File_Time, File_Time); -- If we run both victor AND the simplifier, -- then a simplified file older than the -- victored file is also reason to re-run -- victor. if CMD.Run_Simplifier then Result := Result or Is_Older (Victored_File_Time, Simplified_File_Time); end if; else -- If the vct file does not exist, we need to -- run victor anyway. Result := True; end if; end if; -- Similar, for Riposte... if not Result and then Utility.Is_A_VCG_File (File) and then CMD.Run_Riposte then if GNAT.IO_Aux.File_Exists (Riposte_Summary_File) then Riposte_File_Time := GNAT.OS_Lib.File_Time_Stamp (Riposte_Summary_File); Result := Is_Older (Riposte_File_Time, File_Time); -- If we run both riposte AND the simplifier, -- then a simplified file older than the -- riposte summary file is also reason to -- re-run victor. if CMD.Run_Simplifier then Result := Result or Is_Older (Riposte_File_Time, Simplified_File_Time); end if; else -- If the rsm file does not exist, we need to -- run Riposte anyway. Result := True; end if; end if; end; end if; else Result := False; end if; return Result; end File_Needs_Analysis; procedure Scan_Directory (Dir : in Dir_Name_Str) is D : Dir_Type; Str : String (1 .. 1024); Last : Natural; begin Open (D, Dir); loop Read (D, Str, Last); exit when Last = 0; declare F : constant String := Dir & Str (1 .. Last); begin if GNAT.OS_Lib.Is_Directory (F) then Utility.Debug ("Found a directory : " & F); -- Ignore "." and ".." if ((Last = 1) and then (Str (1) = '.')) or ((Last = 2) and then (Str (1) = '.' and Str (2) = '.')) then null; else -- Recurse here Scan_Directory (F & GNAT.OS_Lib.Directory_Separator); end if; elsif File_Needs_Analysis (F) then if Utility.Is_A_VCG_File (F) then Utility.Debug ("Found a VCG file : " & F); if CMD.Run_Simplifier then Work_Manager.Jobs.Add_Work_Package (F, Work_Manager.Simplify); end if; -- Only add victor and riposte jobs directly if -- we don't run the simplifier. Otherwise each -- simplifier job will also kick off a victor -- and/or riposte job. if not CMD.Run_Simplifier and CMD.Run_Victor then Work_Manager.Jobs.Add_Work_Package (F, Work_Manager.Victor); end if; if not CMD.Run_Simplifier and CMD.Run_Riposte then Work_Manager.Jobs.Add_Work_Package (F, Work_Manager.Riposte); end if; elsif Utility.Is_A_DPC_File (F) then Utility.Debug ("Found a DPC file : " & F); if CMD.Run_Zombiescope then Work_Manager.Jobs.Add_Work_Package (F, Work_Manager.Zombiescope); end if; end if; end if; end; end loop; Close (D); exception when others => Close (D); raise; end Scan_Directory; CWD : constant Dir_Name_Str := Get_Current_Dir; begin Scan_Directory (CWD); exception when others => Ada.Text_IO.Put_Line ("Error scanning directories."); Work_Manager.Jobs.Clear; end Find_Files_To_Analyse; -- Run the desired analyses of all files in Work_Manager procedure Analyse_Files is Start_Time : Ada.Real_Time.Time; End_Time : Ada.Real_Time.Time; Elapsed_Time : Duration; Working_Set : Workers.Worker_Set (CMD.Processes); Job_ID : Work_Manager.Job_Index; TF, PF, IPF, CF, FF : Work_Manager.Job_Index; WA : Natural; begin -- Do we have any work? if Work_Manager.Jobs.Total_Number_Of_Files /= 0 then Start_Time := Ada.Real_Time.Clock; Workers.Initialize (Working_Set, CMD.SArgs, CMD.ZArgs, CMD.VArgs, CMD.RArgs); loop while Workers.Workers_Available (Working_Set) > 0 and Work_Manager.Jobs.Number_Of_Files (Of_Status => Work_Manager.Pending) /= 0 loop Work_Manager.Jobs.GetNextJob (Job_ID); Utility.Debug ("Starting Job with JobID =" & Integer'Image (Job_ID)); Workers.Start_Analysis (Job_ID, Working_Set); end loop; Workers.Run_Analysis (Working_Set); TF := Work_Manager.Jobs.Total_Number_Of_Files; PF := Work_Manager.Jobs.Number_Of_Files (Work_Manager.Pending); IPF := Work_Manager.Jobs.Number_Of_Files (Work_Manager.InProgress); CF := Work_Manager.Jobs.Number_Of_Files (Work_Manager.Finished); FF := Work_Manager.Jobs.Number_Of_Files (Work_Manager.Failed); WA := Workers.Workers_Available (Working_Set); Utility.Debug ("Job finished..."); Utility.Debug ("Total " & Integer'Image (TF)); Utility.Debug ("Pending " & Integer'Image (PF)); Utility.Debug ("InProgress " & Integer'Image (IPF)); Utility.Debug ("Finished " & Integer'Image (CF)); Utility.Debug ("Failed " & Integer'Image (FF)); Utility.Debug ("WA is " & Integer'Image (WA)); exit when (CF + FF = TF) and (PF = 0) and (IPF = 0); end loop; End_Time := Ada.Real_Time.Clock; Elapsed_Time := Ada.Real_Time.To_Duration (End_Time - Start_Time); Utility.Put_Message_With_Duration (Message => "Total elapsed time: ", D => Elapsed_Time); end if; end Analyse_Files; -- put any errors found onto the screen procedure Report_Errors is Last_Job : Work_Manager.Job_Index; begin Last_Job := Work_Manager.Jobs.Total_Number_Of_Files; if Work_Manager.AnyFailed then Ada.Text_IO.New_Line; Ada.Text_IO.Put_Line ("The following files reported an error during simplification:"); Ada.Text_IO.New_Line; -- number of files is always > 0 when report_errors is called for Job_Id in Natural range 1 .. Last_Job loop if Work_Manager.Jobs.Get_HasFailed (Job_Id) then Ada.Text_IO.Put_Line (Work_Manager.Jobs.Get_File_Name (Job_Id)); Ada.Text_IO.Put_Line (" " & Work_Manager.Jobs.Get_WhyFailed (Job_Id)); end if; end loop; end if; end Report_Errors; begin -- Code of SPARKSimp -- SPARKSimp now always uses '-' for all switches since release 8.1.5 CMD.Process_Command_Line ('-'); if CMD.Version_Requested then Banner; elsif CMD.Valid then Workers.Locate_Binaries; Banner; declare Found_Required_Binaries : Boolean := True; begin -- If we need the Simplifier, but can't find it, then complain if CMD.Run_Simplifier and Workers.Spadesimp_Exe = null then Found_Required_Binaries := False; Ada.Text_IO.Put ("Error: Can't locate "); if CMD.Simplifier_Exe_Switch = null then Ada.Text_IO.Put_Line (Workers.Spadesimp_Command & " binary on PATH"); else Ada.Text_IO.Put ("simplifier executable specified by "); Ada.Text_IO.Put_Line ("-x=" & CMD.Simplifier_Exe_Switch.all); end if; end if; -- If we need ZombieScope, but can't find it, then complain if CMD.Run_Zombiescope and Workers.ZombieScope_Exe = null then Found_Required_Binaries := False; Ada.Text_IO.Put ("Error: Can't locate "); if CMD.ZombieScope_Exe_Switch = null then Ada.Text_IO.Put_Line (Workers.ZombieScope_Command & " binary on PATH"); else Ada.Text_IO.Put ("ZombieScope executable specified by "); Ada.Text_IO.Put_Line ("-z=" & CMD.ZombieScope_Exe_Switch.all); end if; end if; -- If we need Victor (and Alt-Ergo), but can't find it, then complain if CMD.Run_Victor then if Workers.Victor_Exe = null then Found_Required_Binaries := False; Ada.Text_IO.Put ("Error: Can't locate "); Ada.Text_IO.Put_Line (Workers.Victor_Command & " binary on PATH"); end if; end if; -- If we need Riposte (and python), but can't find it, then complain if CMD.Run_Riposte then if Workers.Riposte_Exe = null then Found_Required_Binaries := False; Ada.Text_IO.Put ("Error: Can't locate "); Ada.Text_IO.Put_Line (Workers.Riposte_Command & " binary on PATH"); end if; if Workers.Python_Exe = null then Found_Required_Binaries := False; Ada.Text_IO.Put ("Error: Can't locate "); Ada.Text_IO.Put_Line (Workers.Python_Command & " binary on PATH"); end if; end if; if Found_Required_Binaries then Find_Files_To_Analyse; Work_Manager.Jobs.List_Jobs; if Work_Manager.Jobs.Total_Number_Of_Files > 0 then if CMD.Dry_Run then Ada.Text_IO.Put_Line ("Dry run mode - no simplifications performed"); else Work_Manager.Jobs.Sort_Files_By_Size; Work_Manager.Jobs.Display_Status_Banner; Analyse_Files; Report_Errors; end if; end if; end if; end; else Banner; CMD.Usage; end if; exception when E : others => Ada.Text_IO.New_Line; Ada.Text_IO.Put_Line ("Unhandled Exception in SPARKSimp."); Ada.Text_IO.New_Line; Ada.Text_IO.Put_Line (Version.Toolset_Support_Line1); Ada.Text_IO.Put_Line (Version.Toolset_Support_Line2); Ada.Text_IO.Put_Line (Version.Toolset_Support_Line3); Ada.Text_IO.Put_Line (Version.Toolset_Support_Line4); Ada.Text_IO.New_Line; Ada.Text_IO.Put_Line ("Exception information:"); Ada.Text_IO.Put_Line (Ada.Exceptions.Exception_Information (E)); Ada.Text_IO.Put_Line ("Traceback:"); Ada.Text_IO.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); end SPARKSimp; spark-2012.0.deb/sparksimp/workers.ads0000644000175000017500000000724311753202340016640 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Real_Time; with GNAT.OS_Lib; with SPARK.Expect; with Work_Manager; with Log_Files; --------------------------------------------------------------------------- -- Workers package specification -- -- -- -- This package provides the Start_Analysis and Run_Analysis procedures to -- -- run multiple simplifications in parallel. The number of Workers is -- -- set by the -p option in the command line. -- --------------------------------------------------------------------------- package Workers is Path : GNAT.OS_Lib.String_Access; Spadesimp_Exe : GNAT.OS_Lib.String_Access; ZombieScope_Exe : GNAT.OS_Lib.String_Access; Victor_Exe : GNAT.OS_Lib.String_Access; Riposte_Exe : GNAT.OS_Lib.String_Access; Python_Exe : GNAT.OS_Lib.String_Access; Spadesimp_Command : constant String := "spadesimp"; ZombieScope_Command : constant String := "zombiescope"; Victor_Command : constant String := "victor"; Riposte_Command : constant String := "riposte.py"; Python_Command : constant String := "python"; -- Sets Path, Spadesimp_Exe, ZombieScope_Exe, Victor_Exe and Alt-Ergo -- depending on setting of -x flag, -z flag, and PATH environment procedure Locate_Binaries; type Worker_Set (Size : Positive) is limited private; procedure Initialize (Work_Set : in out Worker_Set; S_Options : in GNAT.OS_Lib.Argument_List_Access; Z_Options : in GNAT.OS_Lib.Argument_List_Access; V_Options : in GNAT.OS_Lib.Argument_List_Access; R_Options : in GNAT.OS_Lib.Argument_List_Access); procedure Start_Analysis (The_Job : in Work_Manager.Job_Index; Work_Set : in out Worker_Set); function Workers_Available (Work_Set : Worker_Set) return Natural; procedure Run_Analysis (Work_Set : in out Worker_Set); private type Work_Data is record Job_ID : Work_Manager.Job_Index; Start_Time : Ada.Real_Time.Time; End_Time : Ada.Real_Time.Time; Elapsed_Time : Duration; OK : Boolean; WhyFailed : Work_Manager.ErrorString; OP : Positive; end record; type Work_Set_Data is array (Positive range <>) of Work_Data; type Worker_Set (Size : Positive) is record Worker_Count : Positive; Working_Count : Natural; Files : Work_Set_Data (1 .. Size); Procs : SPARK.Expect.Multiprocess_Regexp_Array (1 .. Size); Logs : Log_Files.Log_File_Set (1 .. Size); end record; end Workers; spark-2012.0.deb/sparksimp/workers.adb0000644000175000017500000004663211753202340016624 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Real_Time; with Ada.Text_IO; with Ada.IO_Exceptions; with Ada.Strings.Fixed; with Ada.Characters; with Ada.Characters.Latin_1; with GNAT.Regpat; with GNAT.Directory_Operations; with GNAT.OS_Lib; with Utility; with Wrap; with CMD; with Work_Manager; with SPARK.Expect; use type Ada.Real_Time.Time; use type GNAT.OS_Lib.String_Access; use type GNAT.OS_Lib.Argument_List; use type GNAT.Regpat.Match_Location; use type SPARK.Expect.Process_Descriptor_Access; use type SPARK.Expect.Expect_Match; use type Work_Manager.AnalysisType; package body Workers is Simplifier_Options : GNAT.OS_Lib.Argument_List_Access; ZombieScope_Options : GNAT.OS_Lib.Argument_List_Access; Victor_Options : GNAT.OS_Lib.Argument_List_Access; Riposte_Options : GNAT.OS_Lib.Argument_List_Access; Pat : constant GNAT.Regpat.Pattern_Matcher := GNAT.Regpat.Compile ("^.*\n", GNAT.Regpat.Multiple_Lines); Pat_Access : constant SPARK.Expect.Pattern_Matcher_Access := new GNAT.Regpat.Pattern_Matcher'(Pat); procedure Locate_Binaries is begin Path := GNAT.OS_Lib.Getenv ("PATH"); if Path = null then Path := GNAT.OS_Lib.Getenv ("path"); end if; -- Find Simplifier binary - check -x switch first. -- If that's not specified, then look on PATH if CMD.Simplifier_Exe_Switch /= null then -- simplifer executable specified by -x= switch Spadesimp_Exe := GNAT.OS_Lib.Locate_Exec_On_Path (CMD.Simplifier_Exe_Switch.all); else if Path = null then Ada.Text_IO.Put_Line ("Error: can't find PATH environment variable"); Spadesimp_Exe := null; else Spadesimp_Exe := GNAT.OS_Lib.Locate_Exec_On_Path (Spadesimp_Command); end if; end if; if CMD.ZombieScope_Exe_Switch /= null then -- ZombieScope executable specified by -z= switch ZombieScope_Exe := GNAT.OS_Lib.Locate_Exec_On_Path (CMD.ZombieScope_Exe_Switch.all); else if Path = null then Ada.Text_IO.Put_Line ("Error: can't find PATH environment variable"); ZombieScope_Exe := null; else ZombieScope_Exe := GNAT.OS_Lib.Locate_Exec_On_Path (ZombieScope_Command); end if; end if; if CMD.Victor_Exe_Switch /= null then -- Victor executable specified by -c= switch Victor_Exe := GNAT.OS_Lib.Locate_Exec_On_Path (CMD.Victor_Exe_Switch.all); else if Path = null then Ada.Text_IO.Put_Line ("Error: can't find PATH environment variable"); Victor_Exe := null; else Victor_Exe := GNAT.OS_Lib.Locate_Exec_On_Path (Victor_Command); end if; end if; if Path = null then Riposte_Exe := null; Python_Exe := null; else Riposte_Exe := GNAT.OS_Lib.Locate_Exec_On_Path (Riposte_Command); Python_Exe := GNAT.OS_Lib.Locate_Exec_On_Path (Python_Command); end if; end Locate_Binaries; procedure Initialize (Work_Set : in out Worker_Set; S_Options : in GNAT.OS_Lib.Argument_List_Access; Z_Options : in GNAT.OS_Lib.Argument_List_Access; V_Options : in GNAT.OS_Lib.Argument_List_Access; R_Options : in GNAT.OS_Lib.Argument_List_Access) is begin Locate_Binaries; Simplifier_Options := S_Options; ZombieScope_Options := Z_Options; Victor_Options := V_Options; Riposte_Options := R_Options; Work_Set.Worker_Count := Work_Set.Procs'Length; Work_Set.Working_Count := 0; for I in Work_Set.Procs'Range loop Work_Set.Procs (I) := SPARK.Expect.Multiprocess_Regexp' (Descriptor => null, Regexp => Pat_Access); end loop; -- No need to initialize Work_Set.Files (access to it is controlled -- by Working_Count). Log_Files.Initialize (Work_Set.Logs); end Initialize; procedure Start_Analysis (The_Job : in Work_Manager.Job_Index; Work_Set : in out Worker_Set) is procedure Create_Log_File (For_Worker : Natural); Worker : Natural; On_File : constant String := Work_Manager.Jobs.Get_File_Name (The_Job); -- Find the first directory separator from the right hand -- end of File_Name, so we can split into the directory, -- and the plain file name Dir_Index : constant Natural := Ada.Strings.Fixed.Index (On_File, Utility.String_1'(1 => GNAT.OS_Lib.Directory_Separator), Ada.Strings.Backward); -- Directory in which to run simplifier Dir : constant String := On_File (1 .. Dir_Index); -- Simple file name of file to be processed, with 4-char suffix -- (e.g. ".vcg" or ".dpc") removed Simple_File_Name : aliased constant String := Work_Manager.Jobs.Get_Simple_File_Name (The_Job); SF : constant GNAT.OS_Lib.String_Access := new String'(Simple_File_Name); Simplifier_Expect_Args : constant GNAT.OS_Lib.Argument_List := (1 => SF) & Simplifier_Options.all; ZombieScope_Expect_Args : constant GNAT.OS_Lib.Argument_List := (1 => SF) & ZombieScope_Options.all; Victor_Expect_Args : constant GNAT.OS_Lib.Argument_List := (1 => SF) & Victor_Options.all; -- See note below in Start_Analysis why this is different. Riposte_Expect_Args : constant GNAT.OS_Lib.Argument_List := (1 => Riposte_Exe, 2 => SF) & Riposte_Options.all; procedure Create_Log_File (For_Worker : Natural) is OK : Boolean := False; Suffix : Utility.String_3; begin -- Decide the correct suffix if Utility.Is_A_VCG_File (On_File) then Suffix := "log"; else Suffix := "zsl"; end if; declare Log_File : constant String := On_File (1 .. On_File'Last - 3) & Suffix; begin loop begin Log_Files.Open (Log_File, Work_Set.Files (For_Worker).OP, Work_Set.Logs); OK := True; exception when Ada.IO_Exceptions.Use_Error | Ada.IO_Exceptions.Name_Error => Ada.Text_IO.Put_Line ("Create failed - trying again..."); OK := False; end; exit when OK; end loop; end; end Create_Log_File; FD_Access : constant SPARK.Expect.Process_Descriptor_Access := new SPARK.Expect.Process_Descriptor; Job_Type : Work_Manager.AnalysisType; -- Result : SPARK.Expect.Expect_Match; begin -- Start_Analysis Job_Type := Work_Manager.Jobs.Get_Analysis_Type (The_Job); Work_Set.Working_Count := Work_Set.Working_Count + 1; Worker := Work_Set.Working_Count; if CMD.Log_Output and Job_Type /= Work_Manager.Victor then -- and this particlular job is not a victor job Create_Log_File (Worker); end if; declare FN : constant String := Work_Manager.Jobs.Get_File_Name (The_Job); Job_Str : constant String := Utility.Format_Int (Item => The_Job, Width => 6); begin -- Print a message to indicate the job has started. Ada.Text_IO.Put_Line (Job_Str & " Started - " & Work_Manager.AnalysisType'Image (Job_Type) & " - " & FN); -- Create a Process_Descriptor that can be accessed via a -- Process_Descriptor_Access. If one already exists then it was -- created for a previous simplification and recycled in Compact. -- This ensures that only the minimum number of objects are created -- on the heap and we don't need to worry about Free'ing them. if Work_Set.Procs (Worker).Descriptor = null then Work_Set.Procs (Worker).Descriptor := new SPARK.Expect.Process_Descriptor; end if; Work_Set.Files (Worker).Start_Time := Ada.Real_Time.Clock; GNAT.Directory_Operations.Change_Dir (Dir); -- if Is_A_VCG_File (FN) then case Job_Type is when Work_Manager.Simplify => SPARK.Expect.Non_Blocking_Spawn (FD_Access.all, Spadesimp_Exe.all, Simplifier_Expect_Args, 0, False); when Work_Manager.Zombiescope => SPARK.Expect.Non_Blocking_Spawn (FD_Access.all, ZombieScope_Exe.all, ZombieScope_Expect_Args, 0, False); when Work_Manager.Victor => SPARK.Expect.Non_Blocking_Spawn (FD_Access.all, Victor_Exe.all, Victor_Expect_Args, 0, True); when Work_Manager.Riposte => -- Note that this invocation is different to the other -- tools above; we say "python riposte.py " -- instead of "riposte.py ". The reason for -- this that on Windows spawning processes is -- different from a shell invocation. The chosen -- workaround is portable on all supported platforms. SPARK.Expect.Non_Blocking_Spawn (FD_Access.all, Python_Exe.all, Riposte_Expect_Args, 0, True); end case; end; Work_Set.Procs (Worker).Descriptor := FD_Access; Work_Set.Files (Worker).Job_ID := The_Job; Work_Set.Files (Worker).OK := True; Work_Set.Files (Worker).WhyFailed := Work_Manager.NullErrorString; end Start_Analysis; function Workers_Available (Work_Set : Worker_Set) return Natural is begin return Work_Set.Worker_Count - Work_Set.Working_Count; end Workers_Available; procedure Run_Analysis (Work_Set : in out Worker_Set) is procedure Compact (Removing : in Positive); procedure Close_Log_File; Result : SPARK.Expect.Expect_Match; Never_Timeout : constant Integer := -1; Worker : Natural; Job_ID : Work_Manager.Job_Index; procedure Close_Log_File is OK : Boolean := False; begin loop begin Log_Files.Close (Work_Set.Files (Worker).OP, Work_Set.Logs); OK := True; exception when Ada.IO_Exceptions.Device_Error => -- if OP is still open, then try again! if Ada.Text_IO.Is_Open (Log_Files.File_Type (Work_Set.Files (Worker).OP, Work_Set.Logs).all) then Ada.Text_IO.Put_Line ("Close failed with Device_Error - try again..."); OK := False; else Ada.Text_IO.Put_Line ("Close failed with Device_Error - aborting..."); OK := True; end if; when Storage_Error => Ada.Text_IO.Put_Line ("Close failed with Storage_Error - aborting..."); OK := True; end; exit when OK; end loop; end Close_Log_File; procedure Compact (Removing : in Positive) is -- We need to preserve the pointer to the process descriptor -- so that it can be re-used in a later simplification. PD_Acc : constant SPARK.Expect.Process_Descriptor_Access := Work_Set.Procs (Removing).Descriptor; begin for I in Removing .. Work_Set.Working_Count - 1 loop Work_Set.Files (I) := Work_Set.Files (I + 1); Work_Set.Procs (I) := Work_Set.Procs (I + 1); end loop; Work_Set.Procs (Work_Set.Working_Count).Descriptor := PD_Acc; Work_Set.Working_Count := Work_Set.Working_Count - 1; end Compact; --------------------------------------------------------- -- This string is prodced by the Simplifier at the start -- of a line to signal a critical error. This string -- must match that in spade/simplifier/src/utilities.pl -- in the clause write_error_preamble/0 --------------------------------------------------------- Error_Preamble : constant String := "*** ERROR - "; begin -- Code of Run_Analysis loop SPARK.Expect.Expect (Result, Worker, Work_Set.Procs (1 .. Work_Set.Working_Count), Never_Timeout); if Result in 1 .. SPARK.Expect.Expect_Match (Work_Set.Working_Count) then Worker := Integer (Result); declare S : constant String := SPARK.Expect.Expect_Out_Match (Work_Set.Procs (Worker).Descriptor.all); Final_Char : Natural; begin Job_ID := Work_Set.Files (Worker).Job_ID; -- On NT, we want to turn the CR/LF sequence -- coming from the Simplifier back into a -- standard line-ending sequence, so... if S'Length >= 2 and then (S (S'Last) = Ada.Characters.Latin_1.LF and S (S'Last - 1) = Ada.Characters.Latin_1.CR) then Final_Char := S'Last - 2; -- On Other platforms, the line might end in just -- a single LF, so strip that as well if the case -- above didn't apply. elsif S'Length >= 1 and then S (S'Last) = Ada.Characters.Latin_1.LF then Final_Char := S'Last - 1; else Final_Char := S'Last; end if; if CMD.Log_Output and Work_Manager.Jobs.Get_Analysis_Type (Job_ID) /= Work_Manager.Victor then -- wrap each line to the log file Wrap.CopyAndMaybeWrapLine (Log_Files.File_Type (Work_Set.Files (Worker).OP, Work_Set.Logs).all, S (S'First .. Final_Char)); end if; if CMD.Echo_Output then Wrap.CopyAndMaybeWrapLine (Ada.Text_IO.Standard_Output, S (S'First .. Final_Char)); end if; -- if an error is found pass out why if S'Length >= Error_Preamble'Length and then S (1 .. Error_Preamble'Length) = Error_Preamble then Work_Set.Files (Worker).OK := False; if S'Length <= Work_Manager.MaxErrorStringIndex then Work_Set.Files (Worker).WhyFailed (1 .. S'Length) := S; else Work_Set.Files (Worker).WhyFailed := S (1 .. Work_Manager.MaxErrorStringIndex); end if; end if; end; elsif Result = SPARK.Expect.Expect_Timeout then -- Timeout is OK - go round again... Utility.Debug ("Expect timeout"); elsif Result = SPARK.Expect.Expect_Full_Buffer then Utility.Debug ("Expect Full Buffer"); exit; elsif Result = SPARK.Expect.Expect_Process_Died then Utility.Debug ("Expect Process Died with Worker = " & Integer'Image (Worker)); exit; else Utility.Debug ("Got an unexpected exception from Expect"); exit; end if; end loop; -- Tidy up when an analysis has finished. if Worker = 0 then -- Exit from Run_Analysis with error message; Utility.Debug ("Can't find completed Simplifier process."); return; end if; Job_ID := Work_Set.Files (Worker).Job_ID; Work_Set.Files (Worker).End_Time := Ada.Real_Time.Clock; Work_Set.Files (Worker).Elapsed_Time := Ada.Real_Time.To_Duration (Work_Set.Files (Worker).End_Time - Work_Set.Files (Worker).Start_Time); SPARK.Expect.Close (Work_Set.Procs (Worker).Descriptor.all); if CMD.Log_Output and Work_Manager.Jobs.Get_Analysis_Type (Job_ID) not in Work_Manager.Logless_Job then Utility.Debug ("Closing Log File"); Close_Log_File; end if; -- If we are a simplification job, follow up with Victor and Riposte. if Work_Manager.Jobs.Get_Analysis_Type (Job_ID) = Work_Manager.Simplify then if CMD.Run_Victor then Work_Manager.Jobs.Add_Work_Package (Work_Manager.Jobs.Get_File_Name (Work_Set.Files (Worker).Job_ID), Work_Manager.Victor); end if; if CMD.Run_Riposte then Work_Manager.Jobs.Add_Work_Package (Work_Manager.Jobs.Get_File_Name (Work_Set.Files (Worker).Job_ID), Work_Manager.Riposte); end if; end if; Utility.Debug ("Worker is " & Integer'Image (Worker)); Utility.Debug ("Job_ID is " & Integer'Image (Job_ID)); if Work_Set.Files (Worker).OK then Work_Manager.Jobs.JobFinished (Job_ID); else Work_Manager.Jobs.JobFailed (Job_ID, Work_Set.Files (Worker).WhyFailed); end if; -- Display a message that the job is finished. declare Job_Str : constant String := Utility.Format_Int (Item => Job_ID, Width => 6); begin Utility.Put_Message_With_Duration (Message => Job_Str & " Finished ", D => Work_Set.Files (Worker).Elapsed_Time); end; Compact (Removing => Worker); end Run_Analysis; end Workers; spark-2012.0.deb/sparksimp/spark-expect.ads0000644000175000017500000006621511753202340017556 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------ -- This file has been derived from the standard version of this -- -- package body in the GNAT runtime library. Below is the copyright notice -- -- from g-expect.ads -- -- -- -- Copyright (C) 2000-2009, AdaCore -- -- -- -- GNAT is free software; you can redistribute it and/or modify it under -- -- terms of the GNU General Public License as published by the Free Soft- -- -- ware Foundation; either version 2, or (at your option) any later ver- -- -- sion. GNAT is distributed in the hope that it will be useful, but WITH- -- -- OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -- -- or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -- -- for more details. You should have received a copy of the GNU General -- -- Public License distributed with GNAT; see file COPYING. If not, write -- -- to the Free Software Foundation, 51 Franklin Street, Fifth Floor, -- -- Boston, MA 02110-1301, USA. -- -- -- -- As a special exception, if other files instantiate generics from this -- -- unit, or you link this unit with other files to produce an executable, -- -- this unit does not by itself cause the resulting executable to be -- -- covered by the GNU General Public License. This exception does not -- -- however invalidate any other reasons why the executable file might be -- -- covered by the GNU Public License. -- -- -- -- GNAT was originally developed by the GNAT team at New York University. -- -- Extensive contributions were provided by Ada Core Technologies Inc. -- ------------------------------------------------------------------------------ -- ----------- -- -- Usage -- -- ----------- -- This package provides a set of subprograms similar to what is available -- with the standard Tcl Expect tool. -- It allows you to easily spawn and communicate with an external process. -- You can send commands or inputs to the process, and compare the output -- with some expected regular expression. -- Usage example: -- Non_Blocking_Spawn -- (Fd, "ftp", -- (1 => new String' ("machine@domain"))); -- Timeout := 10000; -- 10 seconds -- Expect (Fd, Result, Regexp_Array'(+"\(user\)", +"\(passwd\)"), -- Timeout); -- case Result is -- when 1 => Send (Fd, "my_name"); -- matched "user" -- when 2 => Send (Fd, "my_passwd"); -- matched "passwd" -- when Expect_Timeout => null; -- timeout -- when others => null; -- end case; -- Close (Fd); -- You can also combine multiple regular expressions together, and get the -- specific string matching a parenthesis pair by doing something like. If you -- expect either "lang=optional ada" or "lang=ada" from the external process, -- you can group the two together, which is more efficient, and simply get the -- name of the language by doing: -- declare -- Matched : Match_Array (0 .. 2); -- begin -- Expect (Fd, Result, "lang=(optional)? ([a-z]+)", Matched); -- Put_Line ("Seen: " & -- Expect_Out (Fd) (Matched (2).First .. Matched (2).Last)); -- end; -- Alternatively, you might choose to use a lower-level interface to the -- processes, where you can give your own input and output filters every -- time characters are read from or written to the process. -- procedure My_Filter -- (Descriptor : Process_Descriptor'Class; -- Str : String; -- User_Data : System.Address) -- is -- begin -- Put_Line (Str); -- end; -- Non_Blocking_Spawn -- (Fd, "tail", -- (new String' ("-f"), new String' ("a_file"))); -- Add_Filter (Fd, My_Filter'Access, Output); -- Expect (Fd, Result, "", 0); -- wait forever -- The above example should probably be run in a separate task, since it is -- blocking on the call to Expect. -- Both examples can be combined, for instance to systematically print the -- output seen by expect, even though you still want to let Expect do the -- filtering. You can use the Trace_Filter subprogram for such a filter. -- If you want to get the output of a simple command, and ignore any previous -- existing output, it is recommended to do something like: -- Expect (Fd, Result, ".*", Timeout => 0); -- -- Empty the buffer, by matching everything (after checking -- -- if there was any input). -- Send (Fd, "command"); -- Expect (Fd, Result, ".."); -- match only on the output of command -- ----------------- -- -- Task Safety -- -- ----------------- -- This package is not task-safe: there should be not concurrent calls to -- the functions defined in this package. In other words, separate tasks -- may not access the facilities of this package without synchronization -- that serializes access. with System; with GNAT.OS_Lib; with GNAT.Regpat; package SPARK.Expect is type Process_Id is new Integer; Invalid_Pid : constant Process_Id := -1; Null_Pid : constant Process_Id := 0; type Filter_Type is (Output, Input, Died); -- The signals that are emitted by the Process_Descriptor upon state -- changed in the child. One can connect to any of this signal through -- the Add_Filter subprograms. -- -- Output => Every time new characters are read from the process -- associated with Descriptor, the filter is called with -- these new characters in argument. -- -- Note that output is only generated when the program is -- blocked in a call to Expect. -- -- Input => Every time new characters are written to the process -- associated with Descriptor, the filter is called with -- these new characters in argument. -- Note that input is only generated by calls to Send. -- -- Died => The child process has died, or was explicitly killed type Process_Descriptor is tagged private; -- Contains all the components needed to describe a process handled -- in this package, including a process identifier, file descriptors -- associated with the standard input, output and error, and the buffer -- needed to handle the expect calls. type Process_Descriptor_Access is access Process_Descriptor'Class; ------------------------ -- Spawning a process -- ------------------------ procedure Non_Blocking_Spawn (Descriptor : out Process_Descriptor'Class; Command : String; Args : GNAT.OS_Lib.Argument_List; Buffer_Size : Natural := 4096; Err_To_Out : Boolean := False); -- This call spawns a new process and allows sending commands to -- the process and/or automatic parsing of the output. -- -- The expect buffer associated with that process can contain at most -- Buffer_Size characters. Older characters are simply discarded when -- this buffer is full. Beware that if the buffer is too big, this could -- slow down the Expect calls if not output is matched, since Expect has -- to match all the regexp against all the characters in the buffer. -- If Buffer_Size is 0, there is no limit (ie all the characters are kept -- till Expect matches), but this is slower. -- -- If Err_To_Out is True, then the standard error of the spawned process is -- connected to the standard output. This is the only way to get the -- Except subprograms also match on output on standard error. -- -- Invalid_Process is raised if the process could not be spawned. procedure Close (Descriptor : in out Process_Descriptor); -- Terminate the process and close the pipes to it. It implicitly -- does the 'wait' command required to clean up the process table. -- This also frees the buffer associated with the process id. procedure Close (Descriptor : in out Process_Descriptor; Status : out Integer); -- Same as above, but also returns the exit status of the process, -- as set for example by the procedure GNAT.OS_Lib.OS_Exit. procedure Send_Signal (Descriptor : Process_Descriptor; Signal : Integer); -- Send a given signal to the process procedure Interrupt (Descriptor : in out Process_Descriptor); -- Interrupt the process (the equivalent of Ctrl-C on unix and windows) -- and call close if the process dies. function Get_Input_Fd (Descriptor : Process_Descriptor) return GNAT.OS_Lib.File_Descriptor; -- Return the input file descriptor associated with Descriptor function Get_Output_Fd (Descriptor : Process_Descriptor) return GNAT.OS_Lib.File_Descriptor; -- Return the output file descriptor associated with Descriptor function Get_Error_Fd (Descriptor : Process_Descriptor) return GNAT.OS_Lib.File_Descriptor; -- Return the error output file descriptor associated with Descriptor function Get_Pid (Descriptor : Process_Descriptor) return Process_Id; -- Return the process id assocated with a given process descriptor function Get_Command_Output (Command : String; Arguments : GNAT.OS_Lib.Argument_List; Input : String; Status : access Integer; Err_To_Out : Boolean := False) return String; -- Execute Command with the specified Arguments and Input, and return the -- generated standard output data as a single string. If Err_To_Out is -- True, generated standard error output is included as well. On return, -- Status is set to the command's exit status. -------------------- -- Adding filters -- -------------------- -- This is a rather low-level interface to subprocesses, since basically -- the filtering is left entirely to the user. See the Expect subprograms -- below for higher level functions. type Filter_Function is access procedure (Descriptor : Process_Descriptor'Class; Str : String; User_Data : System.Address := System.Null_Address); -- Function called every time new characters are read from or written -- to the process. -- -- Str is a string of all these characters. -- -- User_Data, if specified, is a user specific data that will be passed to -- the filter. Note that no checks are done on this parameter that should -- be used with cautiousness. procedure Add_Filter (Descriptor : in out Process_Descriptor; Filter : Filter_Function; Filter_On : Filter_Type := Output; User_Data : System.Address := System.Null_Address; After : Boolean := False); -- Add a new filter for one of the filter type. This filter will be -- run before all the existing filters, unless After is set True, -- in which case it will be run after existing filters. User_Data -- is passed as is to the filter procedure. procedure Remove_Filter (Descriptor : in out Process_Descriptor; Filter : Filter_Function); -- Remove a filter from the list of filters (whatever the type of the -- filter). procedure Trace_Filter (Descriptor : Process_Descriptor'Class; Str : String; User_Data : System.Address := System.Null_Address); -- Function that can be used a filter and that simply outputs Str on -- Standard_Output. This is mainly used for debugging purposes. -- User_Data is ignored. procedure Lock_Filters (Descriptor : in out Process_Descriptor); -- Temporarily disables all output and input filters. They will be -- reactivated only when Unlock_Filters has been called as many times as -- Lock_Filters; procedure Unlock_Filters (Descriptor : in out Process_Descriptor); -- Unlocks the filters. They are reactivated only if Unlock_Filters -- has been called as many times as Lock_Filters. ------------------ -- Sending data -- ------------------ procedure Send (Descriptor : in out Process_Descriptor; Str : String; Add_LF : Boolean := True; Empty_Buffer : Boolean := False); -- Send a string to the file descriptor. -- -- The string is not formatted in any way, except if Add_LF is True, -- in which case an Latin_1.LF is added at the end, so that Str is -- recognized as a command by the external process. -- -- If Empty_Buffer is True, any input waiting from the process (or in the -- buffer) is first discarded before the command is sent. The output -- filters are of course called as usual. ----------------------------------------------------------- -- Working on the output (single process, simple regexp) -- ----------------------------------------------------------- type Expect_Match is new Integer; Expect_Full_Buffer : constant Expect_Match := -1; -- If the buffer was full and some characters were discarded Expect_Timeout : constant Expect_Match := -2; -- If not output matching the regexps was found before the timeout Expect_Process_Died : constant Expect_Match := -3; -- A sub-process terminated function "+" (S : String) return GNAT.OS_Lib.String_Access; -- Allocate some memory for the string. This is merely a convenience -- function to help create the array of regexps in the call to Expect. procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexp : String; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Wait till a string matching Fd can be read from Fd, and return 1 -- if a match was found. -- -- It consumes all the characters read from Fd until a match found, and -- then sets the return values for the subprograms Expect_Out and -- Expect_Out_Match. -- -- The empty string "" will never match, and can be used if you only want -- to match after a specific timeout. Beware that if Timeout is -1 at the -- time, the current task will be blocked forever. -- -- This command times out after Timeout milliseconds (or never if Timeout -- is -1). In that case, Expect_Timeout is returned. The value returned by -- Expect_Out and Expect_Out_Match are meaningless in that case. -- -- Note that using a timeout of 0ms leads to unpredictable behavior, since -- the result depends on whether the process has already sent some output -- the first time Expect checks, and this depends on the operating system. -- -- The regular expression must obey the syntax described in GNAT.Regpat. -- -- If Full_Buffer is True, then Expect will match if the buffer was too -- small and some characters were about to be discarded. In that case, -- Expect_Full_Buffer is returned. procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexp : GNAT.Regpat.Pattern_Matcher; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Same as the previous one, but with a precompiled regular expression. -- This is more efficient however, especially if you are using this -- expression multiple times, since this package won't need to recompile -- the regexp every time. procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexp : String; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Same as above, but it is now possible to get the indexes of the -- substrings for the parentheses in the regexp (see the example at the -- top of this package, as well as the documentation in the package -- GNAT.Regpat). -- -- Matched'First should be 0, and this index will contain the indexes for -- the whole string that was matched. The index 1 will contain the indexes -- for the first parentheses-pair, and so on. ------------ -- Expect -- ------------ procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexp : GNAT.Regpat.Pattern_Matcher; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Same as above, but with a precompiled regular expression ------------------------------------------------------------- -- Working on the output (single process, multiple regexp) -- ------------------------------------------------------------- type Regexp_Array is array (Positive range <>) of GNAT.OS_Lib.String_Access; type Pattern_Matcher_Access is access GNAT.Regpat.Pattern_Matcher; type Compiled_Regexp_Array is array (Positive range <>) of Pattern_Matcher_Access; function "+" (P : GNAT.Regpat.Pattern_Matcher) return Pattern_Matcher_Access; -- Allocate some memory for the pattern matcher. -- This is only a convenience function to help create the array of -- compiled regular expressoins. procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexps : Regexp_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Wait till a string matching one of the regular expressions in Regexps -- is found. This function returns the index of the regexp that matched. -- This command is blocking, but will timeout after Timeout milliseconds. -- In that case, Timeout is returned. procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexps : Compiled_Regexp_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Same as the previous one, but with precompiled regular expressions. -- This can be much faster if you are using them multiple times. procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexps : Regexp_Array; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Same as above, except that you can also access the parenthesis -- groups inside the matching regular expression. -- The first index in Matched must be 0, or Constraint_Error will be -- raised. The index 0 contains the indexes for the whole string that was -- matched, the index 1 contains the indexes for the first parentheses -- pair, and so on. procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexps : Compiled_Regexp_Array; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Same as above, but with precompiled regular expressions. -- The first index in Matched must be 0, or Constraint_Error will be -- raised. ------------------------------------------- -- Working on the output (multi-process) -- ------------------------------------------- type Multiprocess_Regexp is record Descriptor : Process_Descriptor_Access; Regexp : Pattern_Matcher_Access; end record; type Multiprocess_Regexp_Array is array (Positive range <>) of Multiprocess_Regexp; procedure Expect (Result : out Expect_Match; Regexps : Multiprocess_Regexp_Array; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Same as above, but for multi processes procedure Expect (Result : out Expect_Match; Regexps : Multiprocess_Regexp_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False); -- Same as the previous one, but for multiple processes. -- This procedure finds the first regexp that match the associated process. procedure Expect (Result : out Expect_Match; Process_Index : out Positive; Regexps : in Multiprocess_Regexp_Array; Timeout : in Integer := 10000; Full_Buffer : in Boolean := False); -- As above, but returns the index of the matched (or died...) process -- in Process_Index. This procedure is new in SPARK.Expect ------------------------ -- Getting the output -- ------------------------ procedure Flush (Descriptor : in out Process_Descriptor; Timeout : Integer := 0); -- Discard all output waiting from the process. -- -- This output is simply discarded, and no filter is called. This output -- will also not be visible by the next call to Expect, nor will any -- output currently buffered. -- -- Timeout is the delay for which we wait for output to be available from -- the process. If 0, we only get what is immediately available. function Expect_Out (Descriptor : Process_Descriptor) return String; -- Return the string matched by the last Expect call. -- -- The returned string is in fact the concatenation of all the strings -- read from the file descriptor up to, and including, the characters -- that matched the regular expression. -- -- For instance, with an input "philosophic", and a regular expression -- "hi" in the call to expect, the strings returned the first and second -- time would be respectively "phi" and "losophi". function Expect_Out_Match (Descriptor : Process_Descriptor) return String; -- Return the string matched by the last Expect call. -- -- The returned string includes only the character that matched the -- specific regular expression. All the characters that came before are -- simply discarded. -- -- For instance, with an input "philosophic", and a regular expression -- "hi" in the call to expect, the strings returned the first and second -- time would both be "hi". ---------------- -- Exceptions -- ---------------- Invalid_Process : exception; -- Raised by most subprograms above when the parameter Descriptor is not a -- valid process or is a closed process. Process_Died : exception; -- Raised by all the expect subprograms if Descriptor was originally a -- valid process that died while Expect was executing. It is also raised -- when Expect receives an end-of-file. private type Filter_List_Elem; type Filter_List is access Filter_List_Elem; type Filter_List_Elem is record Filter : Filter_Function; User_Data : System.Address; Filter_On : Filter_Type; Next : Filter_List; end record; type Pipe_Type is record Input, Output : GNAT.OS_Lib.File_Descriptor; end record; -- This type represents a pipe, used to communicate between two processes procedure Set_Up_Communications (Pid : in out Process_Descriptor; Err_To_Out : Boolean; Pipe1 : access Pipe_Type; Pipe2 : access Pipe_Type; Pipe3 : access Pipe_Type); -- Set up all the communication pipes and file descriptors prior to -- spawning the child process. procedure Set_Up_Parent_Communications (Pid : in out Process_Descriptor; Pipe1 : in out Pipe_Type; Pipe2 : in out Pipe_Type; Pipe3 : in out Pipe_Type); -- Finish the set up of the pipes while in the parent process procedure Set_Up_Child_Communications (Pid : in out Process_Descriptor; Pipe1 : in out Pipe_Type; Pipe2 : in out Pipe_Type; Pipe3 : in out Pipe_Type; Cmd : String; Args : System.Address); -- Finish the set up of the pipes while in the child process -- This also spawns the child process (based on Cmd). -- On systems that support fork, this procedure is executed inside the -- newly created process. type Process_Descriptor is tagged record Pid : aliased Process_Id := Invalid_Pid; Input_Fd : GNAT.OS_Lib.File_Descriptor := GNAT.OS_Lib.Invalid_FD; Output_Fd : GNAT.OS_Lib.File_Descriptor := GNAT.OS_Lib.Invalid_FD; Error_Fd : GNAT.OS_Lib.File_Descriptor := GNAT.OS_Lib.Invalid_FD; Filters_Lock : Integer := 0; Filters : Filter_List := null; Buffer : GNAT.OS_Lib.String_Access := null; Buffer_Size : Natural := 0; Buffer_Index : Natural := 0; Last_Match_Start : Natural := 0; Last_Match_End : Natural := 0; end record; -- The following subprogram is provided for use in the body, and also -- possibly in future child units providing extensions to this package. procedure Portable_Execvp (Pid : access Process_Id; Cmd : String; Args : System.Address); pragma Import (C, Portable_Execvp, "__gnat_expect_portable_execvp"); -- Executes, in a portable way, the command Cmd (full path must be -- specified), with the given Args. Args must be an array of string -- pointers. Note that the first element in Args must be the executable -- name, and the last element must be a null pointer. The returned value -- in Pid is the process ID, or zero if not supported on the platform. end SPARK.Expect; spark-2012.0.deb/sparksimp/cmd.adb0000644000175000017500000002515211753202340015665 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Latin_1; with Ada.Text_IO; with GNAT.Command_Line; with Version; package body CMD is -- helper to workaround GNAT.Command_Line.Getopt special case (TN JB18-019), -- this needs to stay in synch with Section_Delimiters parameter in call to -- GNAT.Command_Line.Initialize_Option_Scan below function Is_Section_Delimiter (S : String) return Boolean is begin return S = "-sargs" or S = "-zargs" or S = "-vargs" or S = "-rargs"; end Is_Section_Delimiter; function Number_Of_Args_In (Section : in String) return Natural is Count : Natural := 0; begin GNAT.Command_Line.Goto_Section (Section); loop case GNAT.Command_Line.Getopt ("*") is when Ada.Characters.Latin_1.NUL => exit; when '*' => -- If the section is empty Getopt ("*") may return the -- delimiter itself, this check may be removed if/when -- GNAT.Command_Line.Getopt is fixed, see TN JB18-019 if Count = 0 and then Is_Section_Delimiter (GNAT.Command_Line.Full_Switch) then exit; end if; Count := Count + 1; when others => null; end case; end loop; return Count; end Number_Of_Args_In; -- Returns the arguments in the specified section of the command -- line. The returned array has exactly the number of elements -- required (can have no elements). procedure Populate_Arg_List (Section : in String; Arg_List : out GNAT.OS_Lib.Argument_List_Access) is NA : constant Natural := Number_Of_Args_In (Section); subtype Arg_Index is Natural range 1 .. NA; subtype Args is GNAT.OS_Lib.Argument_List (Arg_Index); I : Positive := 1; begin Arg_List := new Args'(others => null); GNAT.Command_Line.Goto_Section (Section); loop case GNAT.Command_Line.Getopt ("*") is when Ada.Characters.Latin_1.NUL => exit; when '*' => -- If the section is empty Getopt ("*") may return the -- delimiter itself, this check may be removed if/when -- GNAT.Command_Line.Getopt is fixed, see TN JB18-019 if I = 1 and then Is_Section_Delimiter (GNAT.Command_Line.Full_Switch) then exit; end if; Arg_List.all (I) := new String'(GNAT.Command_Line.Full_Switch); I := I + 1; when others => null; end case; end loop; GNAT.OS_Lib.Normalize_Arguments (Arg_List.all); end Populate_Arg_List; procedure Process_Command_Line (Switch_Char : in Character) is begin Valid := True; -- See documentation of GNAT.Command_Line in -- GNAT library source in g-comlin.ads begin GNAT.Command_Line.Initialize_Option_Scan (Switch_Char => Switch_Char, Stop_At_First_Non_Switch => False, Section_Delimiters => "sargs zargs vargs rargs"); loop case GNAT.Command_Line.Getopt ("a v victor riposte V l e n ns nz t r p= x= z= c=") is when Ada.Characters.Latin_1.NUL => -- Must be start of next section... exit; when 'a' => All_Files := True; when 'v' => if GNAT.Command_Line.Full_Switch = "victor" then Run_Victor := True; else Version_Requested := True; end if; when 'V' => Verbose := True; when 't' => Sort_VCGs := True; when 'r' => if GNAT.Command_Line.Full_Switch = "riposte" then Run_Riposte := True; else Reverse_Order := True; end if; when 'l' => Log_Output := True; when 'e' => Echo_Output := True; when 'n' => if GNAT.Command_Line.Full_Switch = "ns" then Run_Simplifier := False; elsif GNAT.Command_Line.Full_Switch = "nz" then Run_Zombiescope := False; elsif GNAT.Command_Line.Full_Switch = "n" then Dry_Run := True; else null; end if; when 'p' => Processes := Positive'Value (GNAT.Command_Line.Parameter); when 'x' => Simplifier_Exe_Switch := new String'(GNAT.Command_Line.Parameter); when 'z' => ZombieScope_Exe_Switch := new String'(GNAT.Command_Line.Parameter); when 'c' => Victor_Exe_Switch := new String'(GNAT.Command_Line.Parameter); when others => null; end case; end loop; loop declare S : constant String := GNAT.Command_Line.Get_Argument (Do_Expansion => True); begin exit when S'Length = 0; -- If we get any argument at all, we know the commandline is bad. Valid := False; end; end loop; -- If we have more than one thread running it makes no sense to send -- simplifier output to the screen. if Processes > 1 and Echo_Output then Ada.Text_IO.Put_Line ("Simplifier output cannot be echoed to the screen"); Ada.Text_IO.Put_Line ("when more than 1 concurrent process is used"); Valid := False; end if; -- Section arguments here must agree with the strings used in -- Is_Section_Delimiter and in call to Initialize_Option_Scan -- Pick up the sargs section Populate_Arg_List (Section => "sargs", Arg_List => SArgs); -- Pick up the zargs section Populate_Arg_List (Section => "zargs", Arg_List => ZArgs); -- Pick up the vargs section Populate_Arg_List (Section => "vargs", Arg_List => VArgs); -- Pick up the rargs section Populate_Arg_List (Section => "rargs", Arg_List => RArgs); exception when GNAT.Command_Line.Invalid_Parameter => Valid := False; Ada.Text_IO.Put_Line ("Invalid Parameter " & GNAT.Command_Line.Full_Switch); when GNAT.Command_Line.Invalid_Switch => Valid := False; Ada.Text_IO.Put_Line ("Invalid Switch " & GNAT.Command_Line.Full_Switch); end; end Process_Command_Line; procedure Usage is begin Ada.Text_IO.Put_Line ("Usage: sparksimp [-a] [-v] [-V] [-n] [-ns] [-nz] [-t] [-r] [-l] [-e] [-p=N]"); Ada.Text_IO.Put_Line (" [-victor]"); Ada.Text_IO.Put_Line (" [-riposte]"); Ada.Text_IO.Put_Line (" [-x=Sexec] [-z=Zexec] [-c=Vexec]"); Ada.Text_IO.Put_Line (" [-sargs {simplifier_options}]"); Ada.Text_IO.Put_Line (" [-zargs {zombiescope_options}]"); Ada.Text_IO.Put_Line (" [-vargs {victor_options}]"); Ada.Text_IO.Put_Line (" [-rargs {riposte_options}]"); Ada.Text_IO.New_Line; Ada.Text_IO.Put_Line ("Options: -a all - ignore time stamps, so process all eligible files"); Ada.Text_IO.Put_Line (" -v report version and terminate"); Ada.Text_IO.Put_Line (" -V verbose output"); Ada.Text_IO.Put_Line (" -n dry run - print list of files found and then stop"); Ada.Text_IO.Put_Line (" -ns do not run the Simplifier"); Ada.Text_IO.Put_Line (" -nz do not run ZombieScope"); Ada.Text_IO.Put_Line (" -victor run Victor/Alt-Ergo on VCs left unproven by Simplifier"); Ada.Text_IO.Put_Line (" (If -ns is specified then this will run Victor on all VCs.)"); Ada.Text_IO.Put_Line (" -riposte run Riposte on VCs left unproven by Simplifier"); Ada.Text_IO.Put_Line (" (If -ns is specified then this will run Riposte on all VCs.)"); Ada.Text_IO.Put_Line (" -t sort VCG files, largest first"); Ada.Text_IO.Put_Line (" -r reverse simplification order"); Ada.Text_IO.Put_Line (" -l log output for XXX.vcg to XXX.log and for YYY.dpc to YYY.zsl"); Ada.Text_IO.Put_Line (" -e echo Simplifier output to screen"); Ada.Text_IO.Put_Line (" -p=N use N concurrent processes"); Ada.Text_IO.Put_Line (" -x=Sexec, specifies an alternative Simplifier executable"); Ada.Text_IO.Put_Line (" -z=Zexec, specifies an alternative ZombieScope executable"); Ada.Text_IO.Put_Line (" -c=Vexec, specifies an alternative Victor executable"); Ada.Text_IO.Put_Line (" -sargs pass following options in this section to Simplifier"); Ada.Text_IO.Put_Line (" -zargs pass following options in this section to ZombieScope"); Ada.Text_IO.Put_Line (" -vargs pass following options in this section to Victor"); Ada.Text_IO.Put_Line (" -rargs pass following options in this section to Riposte"); Ada.Text_IO.New_Line; Ada.Text_IO.Put_Line (Version.Toolset_Support_Line1); Ada.Text_IO.Put_Line (Version.Toolset_Support_Line2); Ada.Text_IO.Put_Line (Version.Toolset_Support_Line3); Ada.Text_IO.Put_Line (Version.Toolset_Support_Line4); end Usage; end CMD; spark-2012.0.deb/sparksimp/utility.adb0000644000175000017500000000734411753202340016630 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Handling; with Ada.Integer_Text_IO; with Ada.Text_IO; with CMD; package body Utility is function Format_Int (Item : in Integer; Width : in Integer) return String is Temp_String : String (1 .. Width); begin Ada.Integer_Text_IO.Put (To => Temp_String, Item => Item, Base => 10); return Temp_String; end Format_Int; function Get_File_Size (File : in String) return Ada.Streams.Stream_IO.Count is package SIO renames Ada.Streams.Stream_IO; S : SIO.Count; F : SIO.File_Type; begin SIO.Open (F, SIO.In_File, File, ""); S := SIO.Size (F); SIO.Close (F); return S; exception when others => if SIO.Is_Open (F) then SIO.Close (F); end if; return 0; end Get_File_Size; procedure Put_Message_With_Duration (Message : in String; D : in Duration) is package Duration_IO is new Ada.Text_IO.Fixed_IO (Duration); Hours : Natural; Minutes : Natural; Seconds : Duration; Residue : Long_Float; LD : Long_Float; begin LD := Long_Float (D); Hours := Natural (Long_Float'Floor (LD / 3600.0)); Residue := LD - (Long_Float (Hours) * 3600.0); Minutes := Natural (Long_Float'Floor (Residue / 60.0)); Seconds := Duration (Residue - (Long_Float (Minutes) * 60.0)); Ada.Text_IO.Put (Message); Ada.Integer_Text_IO.Put (Item => Hours, Width => 4, Base => 10); Ada.Text_IO.Put (':'); Ada.Integer_Text_IO.Put (Item => Minutes, Width => 2, Base => 10); Ada.Text_IO.Put (':'); Duration_IO.Put (Item => Seconds, Fore => 2, Aft => 2, Exp => 0); Ada.Text_IO.New_Line; end Put_Message_With_Duration; procedure Debug (Str : in String) is begin if CMD.Verbose then Ada.Text_IO.Put_Line (Str); end if; end Debug; function Is_A_VCG_File (File : in String) return Boolean is T : constant String := Ada.Characters.Handling.To_Lower (File); begin return (T'Length >= 5) and then (T (T'Last - 3) = '.' and T (T'Last - 2) = 'v' and T (T'Last - 1) = 'c' and T (T'Last) = 'g'); end Is_A_VCG_File; function Is_A_DPC_File (File : in String) return Boolean is T : constant String := Ada.Characters.Handling.To_Lower (File); begin return (T'Length >= 5) and then (T (T'Last - 3) = '.' and T (T'Last - 2) = 'd' and T (T'Last - 1) = 'p' and T (T'Last) = 'c'); end Is_A_DPC_File; end Utility; spark-2012.0.deb/sparksimp/cmd.ads0000644000175000017500000000603511753202340015705 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with GNAT.OS_Lib; package CMD is Valid : Boolean := False; All_Files : Boolean := False; -- -a flag Verbose : Boolean := False; -- -V flag Version_Requested : Boolean := False; -- -v flag Run_Simplifier : Boolean := True; -- made False by -ns Run_Zombiescope : Boolean := True; -- made False by -nz Run_Victor : Boolean := False; -- made True by -victor Run_Riposte : Boolean := False; -- made True by -riposte Dry_Run : Boolean := False; -- -n flag Log_Output : Boolean := False; -- -l flag Echo_Output : Boolean := False; -- -e flag Sort_VCGs : Boolean := False; -- -t flag Reverse_Order : Boolean := False; -- -r flag Processes : Positive := 1; -- -p=N flag -- References to strings for the binaries Simplifier_Exe_Switch : GNAT.OS_Lib.String_Access := null; ZombieScope_Exe_Switch : GNAT.OS_Lib.String_Access := null; Victor_Exe_Switch : GNAT.OS_Lib.String_Access := null; Riposte_Exe_Switch : GNAT.OS_Lib.String_Access := null; -- Returns the arguments in the "sargs" section of the command -- line. The returned array has exactly the number of elements -- required. The returned array can be null (i.e. no elements) SArgs : GNAT.OS_Lib.Argument_List_Access := null; -- Ditto for the "zargs" section ZArgs : GNAT.OS_Lib.Argument_List_Access := null; -- Ditto for the "vargs" section VArgs : GNAT.OS_Lib.Argument_List_Access := null; -- Ditto for the "rargs" section RArgs : GNAT.OS_Lib.Argument_List_Access := null; ------------------------------------------------ -- Reads and parses the command line and sets -- -- program-wide flags as appropriate. -- ------------------------------------------------ procedure Process_Command_Line (Switch_Char : in Character); -------------------------------------------- -- Print command name, options and brief -- -- description of each to Standard_Output -- -------------------------------------------- procedure Usage; end CMD; spark-2012.0.deb/sparksimp/new_expect.c0000644000175000017500000000742411753202341016762 0ustar eugeneugen//----------------------------------------------------------------------------- // (C) Altran Praxis Limited //----------------------------------------------------------------------------- // // The SPARK toolset is free software; you can redistribute it and/or modify it // under terms of the GNU General Public License as published by the Free // Software Foundation; either version 3, or (at your option) any later // version. The SPARK toolset is distributed in the hope that it will be // useful, but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General // Public License for more details. You should have received a copy of the GNU // General Public License distributed with the SPARK toolset; see file // COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of // the license. // //============================================================================= #ifdef Darwin /* Work around the fact that gcc/cpp does not define "__unix__" on Darwin. */ #define __unix__ #endif #ifdef _WIN32 #include #include int __new_expect_poll (int *fd, int num_fd, int timeout, int *is_set) { #define MAX_DELAY 100 int i, delay, infinite = 0; DWORD avail; HANDLE handles[num_fd]; for (i = 0; i < num_fd; i++) is_set[i] = 0; for (i = 0; i < num_fd; i++) handles[i] = (HANDLE) _get_osfhandle (fd [i]); /* Start with small delays, and then increase them, to avoid polling too much when waiting a long time */ delay = 5; if (timeout < 0) infinite = 1; while (1) { for (i = 0; i < num_fd; i++) { if (!PeekNamedPipe (handles [i], NULL, 0, NULL, &avail, NULL)) return -(i + 1); if (avail > 0) { is_set[i] = 1; return 1; } } if (!infinite && timeout <= 0) return 0; Sleep (delay); timeout -= delay; if (delay < MAX_DELAY) delay += 10; } } #elif defined (__unix__) #include #include #include #include #ifndef NO_FD_SET #define SELECT_MASK fd_set #else /* !NO_FD_SET */ #ifndef _AIX typedef long fd_mask; #endif /* _AIX */ #ifdef _IBMR2 #define SELECT_MASK void #else /* !_IBMR2 */ #define SELECT_MASK int #endif /* !_IBMR2 */ #endif /* !NO_FD_SET */ int __new_expect_poll (int *fd, int num_fd, int timeout, int *is_set) { struct timeval tv; SELECT_MASK rset; SELECT_MASK eset; int max_fd = 0; int ready; int i; int received; tv.tv_sec = timeout / 1000; tv.tv_usec = (timeout % 1000) * 1000; do { FD_ZERO (&rset); FD_ZERO (&eset); for (i = 0; i < num_fd; i++) { FD_SET (fd[i], &rset); FD_SET (fd[i], &eset); if (fd[i] > max_fd) max_fd = fd[i]; } ready = select (max_fd + 1, &rset, NULL, &eset, timeout == -1 ? NULL : &tv); if (ready > 0) { received = 0; for (i = 0; i < num_fd; i++) { if (FD_ISSET (fd[i], &rset)) { is_set[i] = 1; received = 1; } else is_set[i] = 0; } #ifdef __hpux__ for (i = 0; i < num_fd; i++) { if (FD_ISSET (fd[i], &eset)) { struct request_info ei; /* Only query and reset error state if no file descriptor is ready to be read, otherwise we will be signalling a died process too early */ if (!received) { ioctl (fd[i], TIOCREQCHECK, &ei); if (ei.request == TIOCCLOSE) { ioctl (fd[i], TIOCREQSET, &ei); return -1; } ioctl (fd[i], TIOCREQSET, &ei); } ready--; } } #endif } } while (timeout == -1 && ready == 0); return ready; } #endif spark-2012.0.deb/sparksimp/spark-expect.adb0000644000175000017500000015421011753202340017526 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------ -- This file has been derived from the standard version of this -- -- package body in the GNAT runtime library. -- -- -- -- This package extends the standard package to support the multi-process -- -- functionality required by SPARKSimp. -- -- -- -- This package is a stop-gap measure until the multi-process functionality -- -- is supported in the standard GNAT sources. -- -- Below is the copyright notice from g-expect.adb -- -- -- -- Copyright (C) 2000-2009, AdaCore -- -- -- -- GNAT is free software; you can redistribute it and/or modify it under -- -- terms of the GNU General Public License as published by the Free Soft- -- -- ware Foundation; either version 2, or (at your option) any later ver- -- -- sion. GNAT is distributed in the hope that it will be useful, but WITH- -- -- OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -- -- or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -- -- for more details. You should have received a copy of the GNU General -- -- Public License distributed with GNAT; see file COPYING. If not, write -- -- to the Free Software Foundation, 51 Franklin Street, Fifth Floor, -- -- Boston, MA 02110-1301, USA. -- -- -- -- As a special exception, if other files instantiate generics from this -- -- unit, or you link this unit with other files to produce an executable, -- -- this unit does not by itself cause the resulting executable to be -- -- covered by the GNU General Public License. This exception does not -- -- however invalidate any other reasons why the executable file might be -- -- covered by the GNU Public License. -- -- -- -- GNAT was originally developed by the GNAT team at New York University. -- -- Extensive contributions were provided by Ada Core Technologies Inc. -- ------------------------------------------------------------------------------ with System; use System; with Ada.Calendar; use Ada.Calendar; with Ada.Characters.Latin_1; with GNAT.IO; with GNAT.OS_Lib; with GNAT.Regpat; use GNAT.Regpat; with Unchecked_Deallocation; use type GNAT.OS_Lib.File_Descriptor; use type GNAT.OS_Lib.String_Access; package body SPARK.Expect is type Array_Of_Pd is array (Positive range <>) of Process_Descriptor_Access; procedure Expect_Internal (Descriptors : in out Array_Of_Pd; Result : out Expect_Match; Timeout : Integer; Full_Buffer : Boolean); -- Internal function used to read from the process Descriptor. -- -- Three outputs are possible: -- Result=Expect_Timeout, if no output was available before the timeout -- expired. -- Result=Expect_Full_Buffer, if Full_Buffer is True and some characters -- had to be discarded from the internal buffer of Descriptor. -- Result=, indicates how many characters were added to the -- internal buffer. These characters are from indexes -- Descriptor.Buffer_Index - Result + 1 .. Descriptor.Buffer_Index -- Process_Died is raised if the process is no longer valid. procedure New_Expect_Internal (Descriptors : in out Array_Of_Pd; Result : out Expect_Match; Process_Index : out Positive; Timeout : in Integer; Full_Buffer : in Boolean); -- Internal function used by New_Expect below -- -- Three outputs are possible: -- Result=Expect_Timeout, if no output was available before the timeout -- expired. -- Result=Expect_Full_Buffer, if Full_Buffer is True and some characters -- had to be discarded from the internal buffer of Descriptor. -- Result=, indicates how many characters were added to the -- internal buffer. These characters are from indexes -- Descriptor.Buffer_Index - Result + 1 .. Descriptor.Buffer_Index -- Result=Expect_Process_Died -- When a process dies, this implementation return its descriptor -- index in Process_Index. No exception is raised in this case. procedure New_Expect (Result : out Expect_Match; Process_Index : out Positive; Regexps : in Multiprocess_Regexp_Array; Matched : out GNAT.Regpat.Match_Array; Timeout : in Integer := 10000; Full_Buffer : in Boolean := False); -- General purpose Expect, but return the index of the matched or -- terminated Process in Process_Index. No exception is -- raised in this case. procedure Reinitialize_Buffer (Descriptor : in out Process_Descriptor'Class); -- Reinitialize the internal buffer. -- The buffer is deleted up to the end of the last match. procedure Free is new Unchecked_Deallocation (Pattern_Matcher, Pattern_Matcher_Access); procedure Free is new Unchecked_Deallocation (Filter_List_Elem, Filter_List); procedure Call_Filters (Pid : Process_Descriptor'Class; Str : String; Filter_On : Filter_Type); -- Call all the filters that have the appropriate type. -- This function does nothing if the filters are locked ------------------------------ -- Target dependent section -- ------------------------------ function Dup (Fd : GNAT.OS_Lib.File_Descriptor) return GNAT.OS_Lib.File_Descriptor; pragma Import (C, Dup); procedure Dup2 (Old_Fd, New_Fd : GNAT.OS_Lib.File_Descriptor); pragma Import (C, Dup2); procedure Kill (Pid : Process_Id; Sig_Num : Integer; Close : Integer); pragma Import (C, Kill, "__gnat_kill"); -- if Close is set to 1 all OS resources used by the Pid must be freed function Create_Pipe (Pipe : access Pipe_Type) return Integer; pragma Import (C, Create_Pipe, "__gnat_pipe"); function Poll (Fds : System.Address; Num_Fds : Integer; Timeout : Integer; Is_Set : System.Address) return Integer; pragma Import (C, Poll, "__gnat_expect_poll"); -- Check whether there is any data waiting on the file descriptor -- Out_fd, and wait if there is none, at most Timeout milliseconds -- Returns -1 in case of error, 0 if the timeout expired before -- data became available. -- -- Out_Is_Set is set to 1 if data was available, 0 otherwise. function New_Poll (Fds : System.Address; Num_Fds : Integer; Timeout : Integer; Is_Set : System.Address) return Integer; pragma Import (C, New_Poll, "__new_expect_poll"); -- Check whether there is any data waiting on the file descriptors -- denoted by Fds, and wait if there is none, at most Timeout milliseconds -- -- The meaning of the return value differs between UNIX systems -- (which _do_ bother to implement select() properly) and Windows -- (which never has!) ------------------- -- On UNIX platforms (e.g. Solaris, Linux, OS X/Darwin) -- -- Returns a positive number indicating that number of descriptors are -- ready, and Is_Set (N)=1 if data is available on descriptor N. -- -- If a sub-process N dies, then Is_Set (N)=1 BUT -- a subsequent call to GNAT.OS_Lib.Read on that descriptor will -- indicate End_of_File. -- -- Returns 0 is no descriptors are ready AND the Timeout expires. -- -- Returns -1 in the event of some other error. ------------------- -- On Windows platforms -- -- Returns 1 and Is_Set(N)=1 when data is available on descriptor N. -- -- Returns 0 is no descriptors are ready AND the Timeout expires. -- -- Returns -N in the case where the sub-process connected to -- descriptor N has died. ------------------- function Waitpid (Pid : Process_Id) return Integer; pragma Import (C, Waitpid, "__gnat_waitpid"); -- Wait for a specific process id, and return its exit code --------- -- "+" -- --------- function "+" (S : String) return GNAT.OS_Lib.String_Access is begin return new String'(S); end "+"; --------- -- "+" -- --------- function "+" (P : GNAT.Regpat.Pattern_Matcher) return Pattern_Matcher_Access is begin return new GNAT.Regpat.Pattern_Matcher'(P); end "+"; ---------------- -- Add_Filter -- ---------------- procedure Add_Filter (Descriptor : in out Process_Descriptor; Filter : Filter_Function; Filter_On : Filter_Type := Output; User_Data : System.Address := System.Null_Address; After : Boolean := False) is Current : Filter_List := Descriptor.Filters; begin if After then while Current /= null and then Current.Next /= null loop Current := Current.Next; end loop; if Current = null then Descriptor.Filters := new Filter_List_Elem'(Filter => Filter, Filter_On => Filter_On, User_Data => User_Data, Next => null); else Current.Next := new Filter_List_Elem'(Filter => Filter, Filter_On => Filter_On, User_Data => User_Data, Next => null); end if; else Descriptor.Filters := new Filter_List_Elem'(Filter => Filter, Filter_On => Filter_On, User_Data => User_Data, Next => Descriptor.Filters); end if; end Add_Filter; ------------------ -- Call_Filters -- ------------------ procedure Call_Filters (Pid : Process_Descriptor'Class; Str : String; Filter_On : Filter_Type) is Current_Filter : Filter_List; begin if Pid.Filters_Lock = 0 then Current_Filter := Pid.Filters; while Current_Filter /= null loop if Current_Filter.Filter_On = Filter_On then Current_Filter.Filter (Pid, Str, Current_Filter.User_Data); end if; Current_Filter := Current_Filter.Next; end loop; end if; end Call_Filters; ----------- -- Close -- ----------- procedure Close (Descriptor : in out Process_Descriptor; Status : out Integer) is Current_Filter : Filter_List; Next_Filter : Filter_List; begin GNAT.OS_Lib.Close (Descriptor.Input_Fd); if Descriptor.Error_Fd /= Descriptor.Output_Fd then GNAT.OS_Lib.Close (Descriptor.Error_Fd); end if; GNAT.OS_Lib.Close (Descriptor.Output_Fd); -- ??? Should have timeouts for different signals Kill (Descriptor.Pid, 9, 0); GNAT.OS_Lib.Free (Descriptor.Buffer); Descriptor.Buffer_Size := 0; Current_Filter := Descriptor.Filters; while Current_Filter /= null loop Next_Filter := Current_Filter.Next; Free (Current_Filter); Current_Filter := Next_Filter; end loop; Descriptor.Filters := null; Status := Waitpid (Descriptor.Pid); end Close; procedure Close (Descriptor : in out Process_Descriptor) is Status : Integer; begin Close (Descriptor, Status); end Close; ------------ -- Expect -- ------------ procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexp : String; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is begin if Regexp = "" then Expect (Descriptor, Result, Never_Match, Timeout, Full_Buffer); else Expect (Descriptor, Result, Compile (Regexp), Timeout, Full_Buffer); end if; end Expect; procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexp : String; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is begin pragma Assert (Matched'First = 0); if Regexp = "" then Expect (Descriptor, Result, Never_Match, Matched, Timeout, Full_Buffer); else Expect (Descriptor, Result, Compile (Regexp), Matched, Timeout, Full_Buffer); end if; end Expect; procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexp : GNAT.Regpat.Pattern_Matcher; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is Matched : GNAT.Regpat.Match_Array (0 .. 0); begin Expect (Descriptor, Result, Regexp, Matched, Timeout, Full_Buffer); end Expect; procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexp : GNAT.Regpat.Pattern_Matcher; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is N : Expect_Match; Descriptors : Array_Of_Pd := (1 => Descriptor'Unrestricted_Access); Try_Until : constant Time := Clock + Duration (Timeout) / 1000.0; Timeout_Tmp : Integer := Timeout; begin pragma Assert (Matched'First = 0); Reinitialize_Buffer (Descriptor); loop -- First, test if what is already in the buffer matches (This is -- required if this package is used in multi-task mode, since one of -- the tasks might have added something in the buffer, and we don't -- want other tasks to wait for new input to be available before -- checking the regexps). Match (Regexp, Descriptor.Buffer (1 .. Descriptor.Buffer_Index), Matched); if Descriptor.Buffer_Index >= 1 and then Matched (0).First /= 0 then Result := 1; Descriptor.Last_Match_Start := Matched (0).First; Descriptor.Last_Match_End := Matched (0).Last; return; end if; -- Else try to read new input Expect_Internal (Descriptors, N, Timeout_Tmp, Full_Buffer); if N = Expect_Timeout or else N = Expect_Full_Buffer then Result := N; return; end if; -- Calculate the timeout for the next turn -- Note that Timeout is, from the caller's perspective, the maximum -- time until a match, not the maximum time until some output is -- read, and thus cannot be reused as is for Expect_Internal. if Timeout /= -1 then Timeout_Tmp := Integer (Try_Until - Clock) * 1000; if Timeout_Tmp < 0 then Result := Expect_Timeout; exit; end if; end if; end loop; -- Even if we had the general timeout above, we have to test that the -- last test we read from the external process didn't match. Match (Regexp, Descriptor.Buffer (1 .. Descriptor.Buffer_Index), Matched); if Matched (0).First /= 0 then Result := 1; Descriptor.Last_Match_Start := Matched (0).First; Descriptor.Last_Match_End := Matched (0).Last; return; end if; end Expect; procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexps : Regexp_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is Patterns : Compiled_Regexp_Array (Regexps'Range); Matched : GNAT.Regpat.Match_Array (0 .. 0); begin for J in Regexps'Range loop Patterns (J) := new Pattern_Matcher'(Compile (Regexps (J).all)); end loop; Expect (Descriptor, Result, Patterns, Matched, Timeout, Full_Buffer); for J in Regexps'Range loop Free (Patterns (J)); end loop; end Expect; procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexps : Compiled_Regexp_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is Matched : GNAT.Regpat.Match_Array (0 .. 0); begin Expect (Descriptor, Result, Regexps, Matched, Timeout, Full_Buffer); end Expect; procedure Expect (Result : out Expect_Match; Regexps : Multiprocess_Regexp_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is Matched : GNAT.Regpat.Match_Array (0 .. 0); begin Expect (Result, Regexps, Matched, Timeout, Full_Buffer); end Expect; procedure Expect (Result : out Expect_Match; Process_Index : out Positive; Regexps : in Multiprocess_Regexp_Array; Timeout : in Integer := 10000; Full_Buffer : in Boolean := False) is Matched : GNAT.Regpat.Match_Array (0 .. 0); begin New_Expect (Result, Process_Index, Regexps, Matched, Timeout, Full_Buffer); end Expect; procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexps : Regexp_Array; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is Patterns : Compiled_Regexp_Array (Regexps'Range); begin pragma Assert (Matched'First = 0); for J in Regexps'Range loop Patterns (J) := new Pattern_Matcher'(Compile (Regexps (J).all)); end loop; Expect (Descriptor, Result, Patterns, Matched, Timeout, Full_Buffer); for J in Regexps'Range loop Free (Patterns (J)); end loop; end Expect; procedure Expect (Descriptor : in out Process_Descriptor; Result : out Expect_Match; Regexps : Compiled_Regexp_Array; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is N : Expect_Match; Descriptors : Array_Of_Pd := (1 => Descriptor'Unrestricted_Access); begin pragma Assert (Matched'First = 0); Reinitialize_Buffer (Descriptor); loop -- First, test if what is already in the buffer matches (This is -- required if this package is used in multi-task mode, since one of -- the tasks might have added something in the buffer, and we don't -- want other tasks to wait for new input to be available before -- checking the regexps). if Descriptor.Buffer /= null then for J in Regexps'Range loop Match (Regexps (J).all, Descriptor.Buffer (1 .. Descriptor.Buffer_Index), Matched); if Matched (0) /= No_Match then Result := Expect_Match (J); Descriptor.Last_Match_Start := Matched (0).First; Descriptor.Last_Match_End := Matched (0).Last; return; end if; end loop; end if; Expect_Internal (Descriptors, N, Timeout, Full_Buffer); if N = Expect_Timeout or else N = Expect_Full_Buffer then Result := N; return; end if; end loop; end Expect; procedure Expect (Result : out Expect_Match; Regexps : Multiprocess_Regexp_Array; Matched : out GNAT.Regpat.Match_Array; Timeout : Integer := 10000; Full_Buffer : Boolean := False) is N : Expect_Match; Descriptors : Array_Of_Pd (Regexps'Range); begin pragma Assert (Matched'First = 0); for J in Descriptors'Range loop Descriptors (J) := Regexps (J).Descriptor; Reinitialize_Buffer (Regexps (J).Descriptor.all); end loop; loop -- First, test if what is already in the buffer matches (This is -- required if this package is used in multi-task mode, since one of -- the tasks might have added something in the buffer, and we don't -- want other tasks to wait for new input to be available before -- checking the regexps). for J in Regexps'Range loop Match (Regexps (J).Regexp.all, Regexps (J).Descriptor.Buffer (1 .. Regexps (J).Descriptor.Buffer_Index), Matched); if Matched (0) /= No_Match then Result := Expect_Match (J); Regexps (J).Descriptor.Last_Match_Start := Matched (0).First; Regexps (J).Descriptor.Last_Match_End := Matched (0).Last; return; end if; end loop; Expect_Internal (Descriptors, N, Timeout, Full_Buffer); if N = Expect_Timeout or else N = Expect_Full_Buffer then Result := N; return; end if; end loop; end Expect; ---------------- -- New_Expect -- ---------------- procedure New_Expect (Result : out Expect_Match; Process_Index : out Positive; Regexps : in Multiprocess_Regexp_Array; Matched : out GNAT.Regpat.Match_Array; Timeout : in Integer := 10000; Full_Buffer : in Boolean := False) is N : Expect_Match; Descriptors : Array_Of_Pd (Regexps'Range); begin pragma Assert (Matched'First = 0); for J in Descriptors'Range loop Descriptors (J) := Regexps (J).Descriptor; Reinitialize_Buffer (Regexps (J).Descriptor.all); end loop; loop -- First, test if what is already in the buffer matches (This is -- required if this package is used in multi-task mode, since one of -- the tasks might have added something in the buffer, and we don't -- want other tasks to wait for new input to be available before -- checking the regexps). for J in Regexps'Range loop Match (Regexps (J).Regexp.all, Regexps (J).Descriptor.Buffer (1 .. Regexps (J).Descriptor.Buffer_Index), Matched); if Matched (0) /= No_Match then Result := Expect_Match (J); Process_Index := J; Regexps (J).Descriptor.Last_Match_Start := Matched (0).First; Regexps (J).Descriptor.Last_Match_End := Matched (0).Last; return; end if; end loop; New_Expect_Internal (Descriptors, N, Process_Index, Timeout, Full_Buffer); -- Repeat until Timeout, full buffer, or process(es) died if N = Expect_Timeout or else N = Expect_Full_Buffer or else N = Expect_Process_Died then Result := N; return; end if; end loop; end New_Expect; --------------------- -- Expect_Internal -- --------------------- procedure Expect_Internal (Descriptors : in out Array_Of_Pd; Result : out Expect_Match; Timeout : Integer; Full_Buffer : Boolean) is Num_Descriptors : Integer; Buffer_Size : Integer := 0; N : Integer; type File_Descriptor_Array is array (Descriptors'Range) of GNAT.OS_Lib.File_Descriptor; Fds : aliased File_Descriptor_Array; type Integer_Array is array (Descriptors'Range) of Integer; Is_Set : aliased Integer_Array; begin for J in Descriptors'Range loop Fds (J) := Descriptors (J).Output_Fd; if Descriptors (J).Buffer_Size = 0 then Buffer_Size := Integer'Max (Buffer_Size, 4096); else Buffer_Size := Integer'Max (Buffer_Size, Descriptors (J).Buffer_Size); end if; end loop; declare Buffer : aliased String (1 .. Buffer_Size); -- Buffer used for input. This is allocated only once, not for -- every iteration of the loop begin -- Loop until we match or we have a timeout loop Num_Descriptors := Poll (Fds'Address, Fds'Length, Timeout, Is_Set'Address); case Num_Descriptors is -- Error? when -1 => raise Process_Died; -- Timeout? when 0 => Result := Expect_Timeout; return; -- Some input when others => for J in Descriptors'Range loop if Is_Set (J) = 1 then Buffer_Size := Descriptors (J).Buffer_Size; if Buffer_Size = 0 then Buffer_Size := 4096; end if; N := GNAT.OS_Lib.Read (Descriptors (J).Output_Fd, Buffer'Address, Buffer_Size); -- Error or End of file if N <= 0 then -- ??? Note that ddd tries again up to three times -- in that case. See LiterateA.C:174 raise Process_Died; else -- If there is no limit to the buffer size if Descriptors (J).Buffer_Size = 0 then declare Tmp : GNAT.OS_Lib.String_Access := Descriptors (J).Buffer; begin if Tmp /= null then Descriptors (J).Buffer := new String (1 .. Tmp'Length + N); Descriptors (J).Buffer (1 .. Tmp'Length) := Tmp.all; Descriptors (J).Buffer (Tmp'Length + 1 .. Tmp'Length + N) := Buffer (1 .. N); GNAT.OS_Lib.Free (Tmp); Descriptors (J).Buffer_Index := Descriptors (J).Buffer'Last; else Descriptors (J).Buffer := new String (1 .. N); Descriptors (J).Buffer.all := Buffer (1 .. N); Descriptors (J).Buffer_Index := N; end if; end; else -- Add what we read to the buffer if Descriptors (J).Buffer_Index + N - 1 > Descriptors (J).Buffer_Size then -- If the user wants to know when we have -- read more than the buffer can contain. if Full_Buffer then Result := Expect_Full_Buffer; return; end if; -- Keep as much as possible from the buffer, -- and forget old characters. Descriptors (J).Buffer (1 .. Descriptors (J).Buffer_Size - N) := Descriptors (J).Buffer ( N - Descriptors (J).Buffer_Size + Descriptors (J).Buffer_Index + 1 .. Descriptors (J).Buffer_Index); Descriptors (J).Buffer_Index := Descriptors (J).Buffer_Size - N; end if; -- Keep what we read in the buffer Descriptors (J).Buffer (Descriptors (J).Buffer_Index + 1 .. Descriptors (J).Buffer_Index + N) := Buffer (1 .. N); Descriptors (J).Buffer_Index := Descriptors (J).Buffer_Index + N; end if; -- Call each of the output filter with what we -- read. Call_Filters (Descriptors (J).all, Buffer (1 .. N), Output); Result := Expect_Match (N); return; end if; end if; end loop; end case; end loop; end; end Expect_Internal; ------------------------- -- New_Expect_Internal -- ------------------------- procedure New_Expect_Internal (Descriptors : in out Array_Of_Pd; Result : out Expect_Match; Process_Index : out Positive; Timeout : in Integer; Full_Buffer : in Boolean) is Num_Descriptors : Integer; Buffer_Size : Integer := 0; N : Integer; type File_Descriptor_Array is array (Descriptors'Range) of GNAT.OS_Lib.File_Descriptor; Fds : aliased File_Descriptor_Array; type Integer_Array is array (Descriptors'Range) of Integer; Is_Set : aliased Integer_Array; begin Process_Index := Positive'First; for J in Descriptors'Range loop Fds (J) := Descriptors (J).Output_Fd; if Descriptors (J).Buffer_Size = 0 then Buffer_Size := Integer'Max (Buffer_Size, 4096); else Buffer_Size := Integer'Max (Buffer_Size, Descriptors (J).Buffer_Size); end if; end loop; declare Buffer : aliased String (1 .. Buffer_Size); -- Buffer used for input. This is allocated only once, not for -- every iteration of the loop begin -- Loop until we match or we have a timeout loop Num_Descriptors := New_Poll (Fds'Address, Fds'Length, Timeout, Is_Set'Address); case Num_Descriptors is -- Error? when Integer'First .. -1 => -- On Windows, a negative number -N indicates that process -- in slot N has died. Return this info to the caller. Result := Expect_Process_Died; Process_Index := -Num_Descriptors; return; -- Timeout? when 0 => Result := Expect_Timeout; return; -- Some input when others => for J in Descriptors'Range loop if Is_Set (J) = 1 then Buffer_Size := Descriptors (J).Buffer_Size; if Buffer_Size = 0 then Buffer_Size := 4096; end if; N := GNAT.OS_Lib.Read (Descriptors (J).Output_Fd, Buffer'Address, Buffer_Size); if N <= 0 then -- On Unix platforms, the read can fail here -- if a subprocess has died. -- Indicate this back to the caller via Result -- and Process_Index Result := Expect_Process_Died; Process_Index := J; return; else -- If there is no limit to the buffer size if Descriptors (J).Buffer_Size = 0 then declare Tmp : GNAT.OS_Lib.String_Access := Descriptors (J).Buffer; begin if Tmp /= null then Descriptors (J).Buffer := new String (1 .. Tmp'Length + N); Descriptors (J).Buffer (1 .. Tmp'Length) := Tmp.all; Descriptors (J).Buffer (Tmp'Length + 1 .. Tmp'Length + N) := Buffer (1 .. N); GNAT.OS_Lib.Free (Tmp); Descriptors (J).Buffer_Index := Descriptors (J).Buffer'Last; else Descriptors (J).Buffer := new String (1 .. N); Descriptors (J).Buffer.all := Buffer (1 .. N); Descriptors (J).Buffer_Index := N; end if; end; else -- Add what we read to the buffer if Descriptors (J).Buffer_Index + N - 1 > Descriptors (J).Buffer_Size then -- If the user wants to know when we have -- read more than the buffer can contain. if Full_Buffer then Result := Expect_Full_Buffer; Process_Index := J; return; end if; -- Keep as much as possible from the buffer, -- and forget old characters. Descriptors (J).Buffer (1 .. Descriptors (J).Buffer_Size - N) := Descriptors (J).Buffer ( N - Descriptors (J).Buffer_Size + Descriptors (J).Buffer_Index + 1 .. Descriptors (J).Buffer_Index); Descriptors (J).Buffer_Index := Descriptors (J).Buffer_Size - N; end if; -- Keep what we read in the buffer Descriptors (J).Buffer (Descriptors (J).Buffer_Index + 1 .. Descriptors (J).Buffer_Index + N) := Buffer (1 .. N); Descriptors (J).Buffer_Index := Descriptors (J).Buffer_Index + N; end if; -- Call each of the output filter with what we -- read. Call_Filters (Descriptors (J).all, Buffer (1 .. N), Output); Result := Expect_Match (N); Process_Index := J; return; end if; end if; end loop; end case; end loop; end; end New_Expect_Internal; ---------------- -- Expect_Out -- ---------------- function Expect_Out (Descriptor : Process_Descriptor) return String is begin return Descriptor.Buffer (1 .. Descriptor.Last_Match_End); end Expect_Out; ---------------------- -- Expect_Out_Match -- ---------------------- function Expect_Out_Match (Descriptor : Process_Descriptor) return String is begin return Descriptor.Buffer (Descriptor.Last_Match_Start .. Descriptor.Last_Match_End); end Expect_Out_Match; ----------- -- Flush -- ----------- procedure Flush (Descriptor : in out Process_Descriptor; Timeout : Integer := 0) is Buffer_Size : constant Integer := 8192; Num_Descriptors : Integer; N : Integer; Is_Set : aliased Integer; Buffer : aliased String (1 .. Buffer_Size); begin -- Empty the current buffer Descriptor.Last_Match_End := Descriptor.Buffer_Index; Reinitialize_Buffer (Descriptor); -- Read everything from the process to flush its output loop Num_Descriptors := Poll (Descriptor.Output_Fd'Address, 1, Timeout, Is_Set'Address); case Num_Descriptors is -- Error ? when -1 => raise Process_Died; -- Timeout => End of flush when 0 => return; -- Some input when others => if Is_Set = 1 then N := GNAT.OS_Lib.Read (Descriptor.Output_Fd, Buffer'Address, Buffer_Size); if N = -1 then raise Process_Died; elsif N = 0 then return; end if; end if; end case; end loop; end Flush; ------------------------ -- Get_Command_Output -- ------------------------ function Get_Command_Output (Command : String; Arguments : GNAT.OS_Lib.Argument_List; Input : String; Status : access Integer; Err_To_Out : Boolean := False) return String is Process : Process_Descriptor; Output : GNAT.OS_Lib.String_Access := new String (1 .. 1024); -- Buffer used to accumulate standard output from the launched -- command, expanded as necessary during execution. Last : Integer := 0; -- Index of the last used character within Output begin Non_Blocking_Spawn (Descriptor => Process, Command => Command, Args => Arguments, Err_To_Out => Err_To_Out); if Input'Length > 0 then Send (Process, Input); end if; GNAT.OS_Lib.Close (Get_Input_Fd (Process)); declare Result : Expect_Match; begin -- This loop runs until the call to Expect raises Process_Died loop Expect (Process, Result, ".+"); declare NOutput : GNAT.OS_Lib.String_Access; S : constant String := Expect_Out (Process); pragma Assert (S'Length > 0); begin -- Expand buffer if we need more space if Last + S'Length > Output'Last then NOutput := new String (1 .. 2 * Output'Last); NOutput (Output'Range) := Output.all; GNAT.OS_Lib.Free (Output); -- Here if current buffer size is OK else NOutput := Output; end if; NOutput (Last + 1 .. Last + S'Length) := S; Last := Last + S'Length; Output := NOutput; end; end loop; exception when Process_Died => Close (Process, Status.all); end; if Last = 0 then return ""; end if; declare S : constant String := Output (1 .. Last); begin GNAT.OS_Lib.Free (Output); return S; end; end Get_Command_Output; ------------------ -- Get_Error_Fd -- ------------------ function Get_Error_Fd (Descriptor : Process_Descriptor) return GNAT.OS_Lib.File_Descriptor is begin return Descriptor.Error_Fd; end Get_Error_Fd; ------------------ -- Get_Input_Fd -- ------------------ function Get_Input_Fd (Descriptor : Process_Descriptor) return GNAT.OS_Lib.File_Descriptor is begin return Descriptor.Input_Fd; end Get_Input_Fd; ------------------- -- Get_Output_Fd -- ------------------- function Get_Output_Fd (Descriptor : Process_Descriptor) return GNAT.OS_Lib.File_Descriptor is begin return Descriptor.Output_Fd; end Get_Output_Fd; ------------- -- Get_Pid -- ------------- function Get_Pid (Descriptor : Process_Descriptor) return Process_Id is begin return Descriptor.Pid; end Get_Pid; --------------- -- Interrupt -- --------------- procedure Interrupt (Descriptor : in out Process_Descriptor) is SIGINT : constant := 2; begin Send_Signal (Descriptor, SIGINT); end Interrupt; ------------------ -- Lock_Filters -- ------------------ procedure Lock_Filters (Descriptor : in out Process_Descriptor) is begin Descriptor.Filters_Lock := Descriptor.Filters_Lock + 1; end Lock_Filters; ------------------------ -- Non_Blocking_Spawn -- ------------------------ procedure Non_Blocking_Spawn (Descriptor : out Process_Descriptor'Class; Command : String; Args : GNAT.OS_Lib.Argument_List; Buffer_Size : Natural := 4096; Err_To_Out : Boolean := False) is function Fork return Process_Id; pragma Import (C, Fork, "__gnat_expect_fork"); -- Starts a new process if possible. See the Unix command fork for more -- information. On systems that do not support this capability (such as -- Windows...), this command does nothing, and Fork will return -- Null_Pid. Pipe1, Pipe2, Pipe3 : aliased Pipe_Type; Arg : GNAT.OS_Lib.String_Access; Arg_List : GNAT.OS_Lib.String_List (1 .. Args'Length + 2); C_Arg_List : aliased array (1 .. Args'Length + 2) of System.Address; Command_With_Path : GNAT.OS_Lib.String_Access; begin -- Create the rest of the pipes Set_Up_Communications (Descriptor, Err_To_Out, Pipe1'Access, Pipe2'Access, Pipe3'Access); Command_With_Path := GNAT.OS_Lib.Locate_Exec_On_Path (Command); if Command_With_Path = null then raise Invalid_Process; end if; -- Fork a new process Descriptor.Pid := Fork; -- Are we now in the child (or, for Windows, still in the common -- process). if Descriptor.Pid = Null_Pid then -- Prepare an array of arguments to pass to C Arg := new String (1 .. Command_With_Path'Length + 1); Arg (1 .. Command_With_Path'Length) := Command_With_Path.all; Arg (Arg'Last) := Ada.Characters.Latin_1.NUL; Arg_List (1) := Arg; for J in Args'Range loop Arg := new String (1 .. Args (J)'Length + 1); Arg (1 .. Args (J)'Length) := Args (J).all; Arg (Arg'Last) := Ada.Characters.Latin_1.NUL; Arg_List (J + 2 - Args'First) := Arg.all'Access; end loop; Arg_List (Arg_List'Last) := null; -- Make sure all arguments are compatible with OS conventions GNAT.OS_Lib.Normalize_Arguments (Arg_List); -- Prepare low-level argument list from the normalized arguments for K in Arg_List'Range loop if Arg_List (K) /= null then C_Arg_List (K) := Arg_List (K).all'Address; else C_Arg_List (K) := System.Null_Address; end if; end loop; -- This does not return on Unix systems Set_Up_Child_Communications (Descriptor, Pipe1, Pipe2, Pipe3, Command_With_Path.all, C_Arg_List'Address); end if; GNAT.OS_Lib.Free (Command_With_Path); -- Did we have an error when spawning the child ? if Descriptor.Pid < Null_Pid then raise Invalid_Process; else -- We are now in the parent process Set_Up_Parent_Communications (Descriptor, Pipe1, Pipe2, Pipe3); end if; -- Create the buffer Descriptor.Buffer_Size := Buffer_Size; if Buffer_Size /= 0 then Descriptor.Buffer := new String (1 .. Positive (Buffer_Size)); end if; -- Initialize the filters Descriptor.Filters := null; end Non_Blocking_Spawn; ------------------------- -- Reinitialize_Buffer -- ------------------------- procedure Reinitialize_Buffer (Descriptor : in out Process_Descriptor'Class) is begin if Descriptor.Buffer_Size = 0 then declare Tmp : GNAT.OS_Lib.String_Access := Descriptor.Buffer; begin Descriptor.Buffer := new String (1 .. Descriptor.Buffer_Index - Descriptor.Last_Match_End); if Tmp /= null then Descriptor.Buffer.all := Tmp (Descriptor.Last_Match_End + 1 .. Descriptor.Buffer_Index); GNAT.OS_Lib.Free (Tmp); end if; end; Descriptor.Buffer_Index := Descriptor.Buffer'Last; else Descriptor.Buffer (1 .. Descriptor.Buffer_Index - Descriptor.Last_Match_End) := Descriptor.Buffer (Descriptor.Last_Match_End + 1 .. Descriptor.Buffer_Index); if Descriptor.Buffer_Index > Descriptor.Last_Match_End then Descriptor.Buffer_Index := Descriptor.Buffer_Index - Descriptor.Last_Match_End; else Descriptor.Buffer_Index := 0; end if; end if; Descriptor.Last_Match_Start := 0; Descriptor.Last_Match_End := 0; end Reinitialize_Buffer; ------------------- -- Remove_Filter -- ------------------- procedure Remove_Filter (Descriptor : in out Process_Descriptor; Filter : Filter_Function) is Previous : Filter_List := null; Current : Filter_List := Descriptor.Filters; begin while Current /= null loop if Current.Filter = Filter then if Previous = null then Descriptor.Filters := Current.Next; else Previous.Next := Current.Next; end if; end if; Previous := Current; Current := Current.Next; end loop; end Remove_Filter; ---------- -- Send -- ---------- procedure Send (Descriptor : in out Process_Descriptor; Str : String; Add_LF : Boolean := True; Empty_Buffer : Boolean := False) is Full_Str : constant String := Str & Ada.Characters.Latin_1.LF; Last : Natural; Result : Expect_Match; Descriptors : Array_Of_Pd := (1 => Descriptor'Unrestricted_Access); Dummy : Natural; pragma Unreferenced (Dummy); begin if Empty_Buffer then -- Force a read on the process if there is anything waiting Expect_Internal (Descriptors => Descriptors, Result => Result, Timeout => 0, Full_Buffer => False); Descriptor.Last_Match_End := Descriptor.Buffer_Index; -- Empty the buffer Reinitialize_Buffer (Descriptor); end if; if Add_LF then Last := Full_Str'Last; else Last := Full_Str'Last - 1; end if; Call_Filters (Descriptor, Full_Str (Full_Str'First .. Last), Input); Dummy := GNAT.OS_Lib.Write (Descriptor.Input_Fd, Full_Str'Address, Last - Full_Str'First + 1); end Send; ----------------- -- Send_Signal -- ----------------- procedure Send_Signal (Descriptor : Process_Descriptor; Signal : Integer) is begin Kill (Descriptor.Pid, Signal, 1); -- ??? Need to check process status here end Send_Signal; --------------------------------- -- Set_Up_Child_Communications -- --------------------------------- procedure Set_Up_Child_Communications (Pid : in out Process_Descriptor; Pipe1 : in out Pipe_Type; Pipe2 : in out Pipe_Type; Pipe3 : in out Pipe_Type; Cmd : String; Args : System.Address) is pragma Warnings (Off, Pid); Input : GNAT.OS_Lib.File_Descriptor; Output : GNAT.OS_Lib.File_Descriptor; Error : GNAT.OS_Lib.File_Descriptor; begin -- Since Windows does not have a separate fork/exec, we need to -- perform the following actions: -- - save stdin, stdout, stderr -- - replace them by our pipes -- - create the child with process handle inheritance -- - revert to the previous stdin, stdout and stderr. Input := Dup (GNAT.OS_Lib.Standin); Output := Dup (GNAT.OS_Lib.Standout); Error := Dup (GNAT.OS_Lib.Standerr); -- Since we are still called from the parent process, there is no way -- currently we can cleanly close the unneeded ends of the pipes, but -- this doesn't really matter. -- We could close Pipe1.Output, Pipe2.Input, Pipe3.Input. Dup2 (Pipe1.Input, GNAT.OS_Lib.Standin); Dup2 (Pipe2.Output, GNAT.OS_Lib.Standout); Dup2 (Pipe3.Output, GNAT.OS_Lib.Standerr); Portable_Execvp (Pid.Pid'Access, Cmd & Ada.Characters.Latin_1.NUL, Args); -- The following commands are not executed on Unix systems, and are -- only required for Windows systems. We are now in the parent process. -- Restore the old descriptors Dup2 (Input, GNAT.OS_Lib.Standin); Dup2 (Output, GNAT.OS_Lib.Standout); Dup2 (Error, GNAT.OS_Lib.Standerr); GNAT.OS_Lib.Close (Input); GNAT.OS_Lib.Close (Output); GNAT.OS_Lib.Close (Error); end Set_Up_Child_Communications; --------------------------- -- Set_Up_Communications -- --------------------------- procedure Set_Up_Communications (Pid : in out Process_Descriptor; Err_To_Out : Boolean; Pipe1 : access Pipe_Type; Pipe2 : access Pipe_Type; Pipe3 : access Pipe_Type) is procedure Set_Close_On_Exec (FD : GNAT.OS_Lib.File_Descriptor; Close_On_Exec : Boolean; Status : out Boolean); procedure Set_Close_On_Exec (FD : GNAT.OS_Lib.File_Descriptor; Close_On_Exec : Boolean; Status : out Boolean) is begin GNAT.OS_Lib.Set_Close_On_Exec (FD, Close_On_Exec, Status); end Set_Close_On_Exec; Status : Boolean; pragma Unreferenced (Status); begin -- Create the pipes if Create_Pipe (Pipe1) /= 0 then return; end if; if Create_Pipe (Pipe2) /= 0 then return; end if; -- Record the 'parent' end of the two pipes in Pid: -- Child stdin is connected to the 'write' end of Pipe1; -- Child stdout is connected to the 'read' end of Pipe2. -- We do not want these descriptors to remain open in the child -- process, so we mark them close-on-exec/non-inheritable. Pid.Input_Fd := Pipe1.Output; Set_Close_On_Exec (Pipe1.Output, True, Status); Pid.Output_Fd := Pipe2.Input; Set_Close_On_Exec (Pipe2.Input, True, Status); if Err_To_Out then -- Reuse the standard output pipe for standard error Pipe3.all := Pipe2.all; else -- Create a separate pipe for standard error if Create_Pipe (Pipe3) /= 0 then return; end if; end if; -- As above, we record the proper fd for the child's -- standard error stream. Pid.Error_Fd := Pipe3.Input; Set_Close_On_Exec (Pipe3.Input, True, Status); end Set_Up_Communications; ---------------------------------- -- Set_Up_Parent_Communications -- ---------------------------------- procedure Set_Up_Parent_Communications (Pid : in out Process_Descriptor; Pipe1 : in out Pipe_Type; Pipe2 : in out Pipe_Type; Pipe3 : in out Pipe_Type) is pragma Warnings (Off, Pid); begin GNAT.OS_Lib.Close (Pipe1.Input); GNAT.OS_Lib.Close (Pipe2.Output); GNAT.OS_Lib.Close (Pipe3.Output); end Set_Up_Parent_Communications; ------------------ -- Trace_Filter -- ------------------ procedure Trace_Filter (Descriptor : Process_Descriptor'Class; Str : String; User_Data : System.Address := System.Null_Address) is pragma Warnings (Off, Descriptor); pragma Warnings (Off, User_Data); begin GNAT.IO.Put (Str); end Trace_Filter; -------------------- -- Unlock_Filters -- -------------------- procedure Unlock_Filters (Descriptor : in out Process_Descriptor) is begin if Descriptor.Filters_Lock > 0 then Descriptor.Filters_Lock := Descriptor.Filters_Lock - 1; end if; end Unlock_Filters; end SPARK.Expect; spark-2012.0.deb/sparksimp/wrap.ads0000644000175000017500000000254211753202340016112 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Text_IO; package Wrap is -- Copies SimpLine to the given File, wrapping the output -- at or near 80 columns, splitting onto multiple lines -- as needed. A line may be wrapped at any of the following -- characters: ' ', '(', ')', '[', or ']' procedure CopyAndMaybeWrapLine (File : in Ada.Text_IO.File_Type; SimpLine : in String); end Wrap; spark-2012.0.deb/sparksimp/wrap.adb0000644000175000017500000001367111753202340016076 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Latin_1; package body Wrap is --------------------------------------------------------- -- Most of this code has been borrowed and adapted from -- spade/simplifier/utils/wraps. Functionally, it's -- the same as wrap_utility, but this code has been -- brought up to date for Ada95, GNAT, and -gnaty style --------------------------------------------------------- -- Maximum no. of output columns normally permitted MaxCols : constant := 80; -- Columns to indent for 2nd. & subsequent wraps Indentation : constant := 10; SubsequentLineWidth : constant := MaxCols - Indentation; -- Copies a line, wrapping if it appears to be necessary. procedure CopyAndMaybeWrapLine (File : in Ada.Text_IO.File_Type; SimpLine : in String) is function IsALongLine (L : in String) return Boolean; -- Writes out the given slice of L procedure OutPartOfLine (L : in String; FromCol, ToCol : in Positive); -- Writes out line-buffer L unchanged, without any line-wrapping. procedure OutUnchangedLine (L : in String); -- Write out line-buffer L, wrapping where necessary. procedure OutWrappedLine (L : in String); -- True if we need to wrap the line around - i.e. more than MaxCols. function IsALongLine (L : in String) return Boolean is begin return L'Length > MaxCols; end IsALongLine; procedure OutPartOfLine (L : in String; FromCol, ToCol : in Positive) is begin Ada.Text_IO.Put (File, L (FromCol .. ToCol)); end OutPartOfLine; procedure OutUnchangedLine (L : in String) is begin Ada.Text_IO.Put_Line (File, L); end OutUnchangedLine; procedure OutWrappedLine (L : in String) is procedure OutNextPartOfLine (L : in String; FromCol : in out Positive; InWidth : in Positive); procedure Indent; OnCol, Width : Positive; procedure OutNextPartOfLine (L : in String; FromCol : in out Positive; InWidth : in Positive) is function OKSplitChar (C : in Character) return Boolean; ToCol : Natural; -- Returns true if C is a space, parenthesis or bracket. function OKSplitChar (C : in Character) return Boolean is use Ada.Characters.Latin_1; begin return (C = Space) or else (C = Left_Parenthesis) or else (C = Right_Parenthesis) or else (C = Left_Square_Bracket) or else (C = Right_Square_Bracket); end OKSplitChar; begin -- OutNextPartOfLine ToCol := FromCol + InWidth - 1; -- if line can't be split at the exact length you want it, -- search left along the text for the first feasible place if not OKSplitChar (L (ToCol)) then -- drat! loop ToCol := ToCol - 1; exit when OKSplitChar (L (ToCol)) or else (ToCol = FromCol); end loop; -- if the line can't be split at any point then search right -- along the text i.e. the line will be longer than you want if ToCol = FromCol then -- double drat! ToCol := FromCol + InWidth - 1; loop exit when (ToCol >= L'Length) or else OKSplitChar (L (ToCol)); ToCol := ToCol + 1; end loop; end if; end if; OutPartOfLine (L, FromCol, ToCol); FromCol := ToCol + 1; end OutNextPartOfLine; procedure Indent is subtype IndentIndex is Positive range 1 .. Indentation; subtype IndentString is String (IndentIndex); IndentC : constant IndentString := IndentString'(others => ' '); begin Ada.Text_IO.Put (File, IndentC); end Indent; begin -- OutWrappedLine OnCol := 1; -- Start at column 1 Width := MaxCols; -- To start with while OnCol + Width <= L'Length loop OutNextPartOfLine (L, OnCol, Width); Width := SubsequentLineWidth; Ada.Text_IO.New_Line (File, 1); Indent; end loop; if OnCol <= L'Length then OutPartOfLine (L, OnCol, L'Length); Ada.Text_IO.New_Line (File, 1); end if; end OutWrappedLine; begin -- CopyAndMaybeWrapLine -- if line is too long, wrap it, otherwise output it unchanged if IsALongLine (SimpLine) then OutWrappedLine (SimpLine); else OutUnchangedLine (SimpLine); end if; end CopyAndMaybeWrapLine; end Wrap; spark-2012.0.deb/sparksimp/utility.ads0000644000175000017500000000437311753202340016650 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Streams.Stream_IO; package Utility is subtype String_1_Index is Positive range 1 .. 1; subtype String_1 is String (String_1_Index); subtype String_3_Index is Positive range 1 .. 3; subtype String_3 is String (String_3_Index); -- Subprograms Format_Int, Get_File_Size and Put_Message_With_Duration -- were previously in Safe_IO. -- returns Item as a String of just the right given Width function Format_Int (Item : in Integer; Width : in Integer) return String; -- Read and return size of File, using Ada.Streams.Stream_IO -- On a 32-bit machine that allows files >2GB, I doubt this will -- work, but if anyone has a 2GB VCG file, then that's their -- problem! function Get_File_Size (File : in String) return Ada.Streams.Stream_IO.Count; procedure Put_Message_With_Duration (Message : in String; D : in Duration); -- Print Str to Standard_Output if CMD.Verbose is True procedure Debug (Str : in String); -- True iff File is >= 5 chars and ends in ".vcg" (case insensitive) function Is_A_VCG_File (File : in String) return Boolean; -- True iff File is >= 5 chars and ends in ".dpc" (case insensitive) function Is_A_DPC_File (File : in String) return Boolean; end Utility; spark-2012.0.deb/sparksimp/work_manager.adb0000644000175000017500000002067011753202340017576 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Strings; with Ada.Strings.Fixed; with Ada.Text_IO; with GNAT.Heap_Sort_G; with CMD; with Utility; package body Work_Manager is package body Jobs is type Count_Per_Status is array (JobStatus) of Job_Index; Next_Job : Job_Index := 1; Job_Counts : Count_Per_Status := Count_Per_Status'(others => 0); procedure Check_Invariant is Total : Job_Index := 0; begin for I in JobStatus loop Total := Total + Job_Counts (I); end loop; if Total /= Work_Table.Last then Utility.Debug ("Invariant failure in Work_Manager"); for I in JobStatus loop Utility.Debug (Natural'Image (Job_Counts (I))); end loop; Utility.Debug (Natural'Image (Work_Table.Last)); end if; end Check_Invariant; procedure Add_Work_Package (File : in String; JobType : in AnalysisType) is begin Work_Table.Increment_Last; Job_Counts (Pending) := Job_Counts (Pending) + 1; Work_Table.Table (Work_Table.Last) := Work_Package' (File_Name => new String'(File), File_Size => Utility.Get_File_Size (File), Analysis => JobType, Status => Pending, Worker => 0, WhyFailed => NullErrorString); Check_Invariant; end Add_Work_Package; procedure GetNextJob (Job_ID : out Job_Index) is begin -- Get the next job. Job_ID := Next_Job; -- Set the status of the job, and the worker. Work_Table.Table (Next_Job).Status := InProgress; Job_Counts (InProgress) := Job_Counts (InProgress) + 1; Job_Counts (Pending) := Job_Counts (Pending) - 1; Next_Job := Next_Job + 1; Check_Invariant; end GetNextJob; function Get_File_Name (Job : in Job_Index) return String is begin return Work_Table.Table (Job).File_Name.all; end Get_File_Name; function Get_Simple_File_Name (Job : in Job_Index) return String is On_File : constant String := Get_File_Name (Job); -- Find the first directory separator from the right hand -- end of File_Name, so we can split into the directory, -- and the plain file name Dir_Index : constant Natural := Ada.Strings.Fixed.Index (On_File, Utility.String_1'(1 => GNAT.OS_Lib.Directory_Separator), Ada.Strings.Backward); -- Simple file name of file to be processed, with 4-char suffix -- (e.g. ".vcg" or ".dpc") removed SF : constant String := On_File (Dir_Index + 1 .. (On_File'Last - 4)); begin return SF; end Get_Simple_File_Name; function Get_Analysis_Type (Job : in Job_Index) return AnalysisType is begin return Work_Table.Table (Job).Analysis; end Get_Analysis_Type; function Get_HasFailed (Job : in Job_Index) return Boolean is begin return Work_Table.Table (Job).Status = Failed; end Get_HasFailed; function Get_WhyFailed (Job : in Job_Index) return ErrorString is begin return Work_Table.Table (Job).WhyFailed; end Get_WhyFailed; procedure JobFinished (Job : in Job_Index) is begin -- Mark the job as completed. Work_Table.Table (Job).Status := Finished; Job_Counts (Finished) := Job_Counts (Finished) + 1; Job_Counts (InProgress) := Job_Counts (InProgress) - 1; Check_Invariant; end JobFinished; -- Signal that a job has failed and record the reason. procedure JobFailed (Job : in Job_Index; FailReason : in ErrorString) is begin -- Mark the job as failed. Work_Table.Table (Job).Status := Failed; Work_Table.Table (Job).WhyFailed := FailReason; Work_Manager.AnyFailed := True; Job_Counts (Failed) := Job_Counts (Failed) + 1; Job_Counts (InProgress) := Job_Counts (InProgress) - 1; Check_Invariant; end JobFailed; procedure Sort_Files_By_Size is use Ada.Streams.Stream_IO; Heap_Sort_Temp : Work_Package; procedure Move_Work_Package (From : in Natural; To : in Natural) is begin if From = 0 then Work_Table.Table (To) := Heap_Sort_Temp; elsif To = 0 then Heap_Sort_Temp := Work_Table.Table (From); else Work_Table.Table (To) := Work_Table.Table (From); end if; end Move_Work_Package; function Lt_Work_Package (Op1 : in Natural; Op2 : in Natural) return Boolean is File_Size_Op1 : Ada.Streams.Stream_IO.Count; File_Size_Op2 : Ada.Streams.Stream_IO.Count; begin if Op1 = 0 then File_Size_Op1 := Heap_Sort_Temp.File_Size; else File_Size_Op1 := Work_Table.Table (Op1).File_Size; end if; if Op2 = 0 then File_Size_Op2 := Heap_Sort_Temp.File_Size; else File_Size_Op2 := Work_Table.Table (Op2).File_Size; end if; return File_Size_Op1 < File_Size_Op2; end Lt_Work_Package; function Lt_Work_Package_Reversed (Op1 : in Natural; Op2 : in Natural) return Boolean is begin return not Lt_Work_Package (Op1, Op2); end Lt_Work_Package_Reversed; package WP_Heap_Sort_Asc is new GNAT.Heap_Sort_G (Move_Work_Package, Lt_Work_Package); package WP_Heap_Sort_Dsc is new GNAT.Heap_Sort_G (Move_Work_Package, Lt_Work_Package_Reversed); begin if CMD.Sort_VCGs then if CMD.Reverse_Order then WP_Heap_Sort_Asc.Sort (Work_Table.Last); else WP_Heap_Sort_Dsc.Sort (Work_Table.Last); end if; end if; end Sort_Files_By_Size; procedure Display_Status_Banner is begin Ada.Text_IO.Put_Line ("Job-ID Status Filename"); Ada.Text_IO.Put_Line ("====== ====== ========"); end Display_Status_Banner; function Total_Number_Of_Files return Job_Index is begin return Work_Table.Last; end Total_Number_Of_Files; function Number_Of_Files (Of_Status : in JobStatus) return Job_Index is begin return Job_Counts (Of_Status); end Number_Of_Files; -- Display the list of jobs to do, to the screen. procedure List_Jobs is begin if Total_Number_Of_Files = 0 then Ada.Text_IO.Put_Line ("No VCG or DPC files were found that require processing"); else Ada.Text_IO.Put_Line ("Files to be simplified are:"); for I in Natural range 1 .. Total_Number_Of_Files loop Ada.Text_IO.Put_Line (Work_Table.Table (I).File_Name.all & "," & Ada.Streams.Stream_IO.Count'Image (Work_Table.Table (I).File_Size) & " bytes"); end loop; Ada.Text_IO.New_Line; Ada.Text_IO.Put_Line (Natural'Image (Total_Number_Of_Files) & " files require processing"); end if; Ada.Text_IO.New_Line; end List_Jobs; procedure Clear is begin Work_Table.Set_Last (0); end Clear; end Jobs; end Work_Manager; spark-2012.0.deb/victor_wrapper/0000755000175000017500000000000011753203760015511 5ustar eugeneugenspark-2012.0.deb/victor_wrapper/victor_wrapper.adb0000644000175000017500000005612111753202341021226 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with GNAT.OS_Lib; with GNAT.Strings; with SPARK_IO; with SPARK.Ada.Strings.Unbounded.Not_SPARK; with Command_Line; with Ada.Containers.Vectors; use type SPARK_IO.File_Status; use type Command_Line.Solver_T; package body Victor_Wrapper is type Environment_T is record Victor_Binary : E_Strings.T; -- Absolute path for vct. Prover_Binary : E_Strings.T; -- Absolute path for alt-ergo. Library_Directory : E_Strings.T; -- Absolute path for the directory containing prelude.fdl, -- divmod.fdl and divmod.rul. Vcg_File_Exists : Boolean; -- True if a vcg file is present. Siv_File_Exists : Boolean; -- True if a siv file is present. end record; -- A wrapper around GNAT.OS_Lib.Locate_Exe_On_Path, which also -- generates a useful error message if the specified executable -- cannot be found. procedure Find_On_Path (Executable_Name : in String; Found : out Boolean; Error : out E_Strings.T; Actual_Path : out E_Strings.T) is --# hide Find_On_Path; use type GNAT.OS_Lib.String_Access; Tmp : GNAT.OS_Lib.String_Access; begin Error := E_Strings.Empty_String; Tmp := GNAT.OS_Lib.Locate_Exec_On_Path (Executable_Name); Found := Tmp /= null; -- Try again, with .exe appended. if not Found then Tmp := GNAT.OS_Lib.Locate_Exec_On_Path (Executable_Name & ".exe"); Found := Tmp /= null; end if; if not Found then Error := E_Strings.Copy_String ("Could not locate '"); E_Strings.Append_String (Error, Executable_Name); E_Strings.Append_String (Error, "' on path."); Actual_Path := E_Strings.Empty_String; else Actual_Path := E_Strings.Copy_String (Tmp.all); end if; end Find_On_Path; -- Tries to open and close the given file and sets is_valid -- depending on success. The name of the file opened is -- 'Basename & Extension', either of which can be blank. procedure Valid_File (Basename : in E_Strings.T; Extension : in String; Is_Valid : out Boolean) --# global in out SPARK_IO.File_Sys; is Tmp_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Tmp_Status : SPARK_IO.File_Status; Tmp_Name : E_Strings.T; begin Tmp_Name := Basename; E_Strings.Append_String (Tmp_Name, Extension); E_Strings.Open (File => Tmp_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Tmp_Name, Form_Of_File => "", Status => Tmp_Status); Is_Valid := Tmp_Status = SPARK_IO.Ok; if Is_Valid then --# accept Flow, 10, Tmp_File, "We're closing the file, so we don't care."; SPARK_IO.Close (Tmp_File, Tmp_Status); --# end accept; Is_Valid := Tmp_Status = SPARK_IO.Ok; end if; end Valid_File; -- Given a path, for example foo/bar/baz.vcg or foo/bar/, this -- function returns the underlying directory. Thus, in both cases, -- it will return "foo/bar/". Note the trailing slash. function Dirname (Filename : E_Strings.T) return E_Strings.T is Retval : E_Strings.T; begin Retval := Filename; if E_Strings.Get_Length (Filename) > 0 then for I in reverse E_Strings.Positions range E_Strings.Positions'First .. E_Strings.Get_Length (Filename) loop --# assert I <= E_Strings.Get_Length (Filename) and E_Strings.Get_Length (Filename) > 0; if E_Strings.Get_Element (Filename, I) = '/' or E_Strings.Get_Element (Filename, I) = '\' then Retval := E_Strings.Section (E_Str => Filename, Start_Pos => E_Strings.Positions'First, Length => I); exit; end if; end loop; end if; return Retval; end Dirname; -- This procedure will determine the location of the various -- binaries, libraries and other files necessary to run vct. If -- anything is missing, the error string will be set accordingly. procedure Check_Environment (CL : in Command_Line.Command_Line_Data_T; Ok : out Boolean; Error : out E_Strings.T; Environment : out Environment_T) --# global in out SPARK_IO.File_Sys; is Tmp : Boolean; -- A helper function to require a particular file. If it cannot -- be found then Ok and Error of the enclosing procedure are -- set accordingly. procedure Require_File (Basename : in E_Strings.T; Extension : in String) --# global in out Error; --# in out Ok; --# in out SPARK_IO.File_Sys; is begin if Ok then Valid_File (Basename, Extension, Ok); if not Ok then Error := E_Strings.Copy_String ("Could not find required file '"); E_Strings.Append_Examiner_String (Error, Basename); E_Strings.Append_String (Error, Extension); E_Strings.Append_String (Error, "'"); end if; end if; end Require_File; begin -- Initialise with some defaults. Environment := Environment_T' (Victor_Binary => E_Strings.Empty_String, Prover_Binary => E_Strings.Empty_String, Library_Directory => E_Strings.Empty_String, Vcg_File_Exists => False, Siv_File_Exists => False); Error := E_Strings.Empty_String; Ok := True; -- Check for the existance of the vcg and siv files. Valid_File (CL.Unit_Name, ".vcg", Environment.Vcg_File_Exists); Valid_File (CL.Unit_Name, ".siv", Environment.Siv_File_Exists); -- Check that we've got at least a valid vcg file. if not Environment.Vcg_File_Exists then Ok := False; Error := E_Strings.Copy_String ("Could not find a vcg file for unit '"); E_Strings.Append_Examiner_String (Error, CL.Unit_Name); E_Strings.Append_Char (Error, '''); end if; --# assert True; -- Find vct if Ok then Find_On_Path (Executable_Name => "vct", Found => Ok, Error => Error, Actual_Path => Environment.Victor_Binary); end if; --# assert True; -- Find alt-ergo (or other supported solvers) if Ok then case CL.Solver is when Command_Line.Alt_Ergo => Find_On_Path (Executable_Name => "alt-ergo", Found => Ok, Error => Error, Actual_Path => Environment.Prover_Binary); when Command_Line.CVC3 => Find_On_Path (Executable_Name => "cvc3", Found => Ok, Error => Error, Actual_Path => Environment.Prover_Binary); when Command_Line.CVC4_SMTLIB1 | Command_Line.CVC4 => Find_On_Path (Executable_Name => "cvc4", Found => Ok, Error => Error, Actual_Path => Environment.Prover_Binary); when Command_Line.Yices => Find_On_Path (Executable_Name => "yices", Found => Ok, Error => Error, Actual_Path => Environment.Prover_Binary); when Command_Line.Z3 => Find_On_Path (Executable_Name => "z3", Found => Ok, Error => Error, Actual_Path => Environment.Prover_Binary); end case; end if; --# assert True; -- Check for prelude.fdl, prelude.rul and divmod.rul if Ok then -- First we check for the files in their 'compiled' -- location. Environment.Library_Directory := Dirname (Environment.Victor_Binary); E_Strings.Append_String (Environment.Library_Directory, "../run/"); Valid_File (Environment.Library_Directory, "prelude.fdl", Tmp); if not Tmp then -- If the prelude isn't there, the rest won't be -- either. Let's look into the 'installed' location -- instead. Environment.Library_Directory := Dirname (Environment.Victor_Binary); E_Strings.Append_String (Environment.Library_Directory, "../share/spark/"); end if; Require_File (Environment.Library_Directory, "prelude.fdl"); Require_File (Environment.Library_Directory, "prelude.rul"); Require_File (Environment.Library_Directory, "divmod.rul"); end if; end Check_Environment; procedure Execute (CL : in Command_Line.Command_Line_Data_T; Ok : out Boolean; Error : out E_Strings.T) is Environment : Environment_T; -- This hidden procedure asseembles the Argument_List used to -- call vct from Environment and CL and then calls Spawn. procedure Do_Spawn --# global in CL; --# in Environment; --# in out Error; --# in out Ok; is --# hide Do_Spawn; use type GNAT.Strings.String_List; use type GNAT.Strings.String_Access; type Interface_Mode_T is (SMTLIB, SMTLIB2); package SVec is new Ada.Containers.Vectors (Element_Type => GNAT.Strings.String_Access, Index_Type => Positive); -- Vector for all arguments passed to vct. Vct_Args : SVec.Vector := SVec.Empty_Vector; -- Directory containing prelude.fdl, etc. Victor_Library : constant String := SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (E_Strings.To_Unbounded_String (Environment.Library_Directory)); -- The name of the unit to translate with vct. Unit_Name : constant String := SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (E_Strings.To_Unbounded_String (CL.Unit_Name)); -- The actual invocation of the solver. Solver_Invocation : E_Strings.T; Interface_Mode : Interface_Mode_T; begin -- The most important argument, the solver. We also work out -- what kind of interface to use. Solver_Invocation := Environment.Prover_Binary; case CL.Solver is when Command_Line.Alt_Ergo => if CL.Proof_Steps > 0 then E_Strings.Append_String (Solver_Invocation, " -steps " & Natural'Image (CL.Proof_Steps)); end if; Interface_Mode := SMTLIB; when Command_Line.CVC3 => E_Strings.Append_String (Solver_Invocation, " -lang smt"); Interface_Mode := SMTLIB; when Command_Line.CVC4_SMTLIB1 => E_Strings.Append_String (Solver_Invocation, " --lang smtlib"); Interface_Mode := SMTLIB; when Command_Line.CVC4 => E_Strings.Append_String (Solver_Invocation, " --lang smtlib2"); Interface_Mode := SMTLIB2; when Command_Line.Yices => E_Strings.Append_String (Solver_Invocation, " -smt"); Interface_Mode := SMTLIB; when Command_Line.Z3 => E_Strings.Append_String (Solver_Invocation, " -smt"); Interface_Mode := SMTLIB; end case; SVec.Append (Vct_Args, new String'("-prover-command=" & SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (E_Strings.To_Unbounded_String (Solver_Invocation)))); -- Standard arguments. SVec.Append (Vct_Args, new String'("-fuse-concls")); SVec.Append (Vct_Args, new String'("-decls=" & Victor_Library & "prelude.fdl")); SVec.Append (Vct_Args, new String'("-rules=" & Victor_Library & "prelude.rul")); SVec.Append (Vct_Args, new String'("-rules=" & Victor_Library & "divmod.rul")); SVec.Append (Vct_Args, new String'("-elim-enums")); SVec.Append (Vct_Args, new String'("-ground-eval-exp")); SVec.Append (Vct_Args, new String'("-abstract-exp")); SVec.Append (Vct_Args, new String'("-abstract-divmod")); SVec.Append (Vct_Args, new String'("-utick")); SVec.Append (Vct_Args, new String'("-gtick")); SVec.Append (Vct_Args, new String'("-gstime-inc-setup")); SVec.Append (Vct_Args, new String'("-longtick")); SVec.Append (Vct_Args, new String'("-echo-final-stats")); SVec.Append (Vct_Args, new String'("-csv-reports-include-unit-kind")); SVec.Append (Vct_Args, new String'("-level=warning")); -- General SMTLIB options. See vct/run/Makefile-prelude.mk -- for a mapping and ideas. -- A-D are only included in SMTLIB1; for SMTLIB2 they are -- skipped entirely. if Interface_Mode = SMTLIB then -- A SVec.Append (Vct_Args, new String'("-refine-bit-type-as-int-subtype")); -- B SVec.Append (Vct_Args, new String'("-refine-bit-eq-equiv")); -- C -- None. -- D -- None. end if; -- E SVec.Append (Vct_Args, new String'("-abstract-arrays-records-late")); -- F SVec.Append (Vct_Args, new String'("-elim-record-constructors")); SVec.Append (Vct_Args, new String'("-add-record-select-update-axioms")); SVec.Append (Vct_Args, new String'("-abstract-record-selects-updates")); -- G -- None. SVec.Append (Vct_Args, new String'("-logic=AUFNIRA")); -- Again, see vct/run/Makefile-prelude.mk case Interface_Mode is when SMTLIB => SVec.Append (Vct_Args, new String'("-interface-mode=smtlib")); SVec.Append (Vct_Args, new String'("-bit-type")); SVec.Append (Vct_Args, new String'("-bit-type-bool-eq-to-iff")); SVec.Append (Vct_Args, new String'("-refine-types")); SVec.Append (Vct_Args, new String'("-refine-int-subrange-type")); SVec.Append (Vct_Args, new String'("-elim-array-constructors")); SVec.Append (Vct_Args, new String'("-add-array-select-box-update-axioms")); SVec.Append (Vct_Args, new String'("-abstract-array-box-updates")); SVec.Append (Vct_Args, new String'("-add-array-select-update-axioms")); SVec.Append (Vct_Args, new String'("-abstract-array-select-updates")); SVec.Append (Vct_Args, new String'("-abstract-array-types")); SVec.Append (Vct_Args, new String'("-abstract-record-types")); SVec.Append (Vct_Args, new String'("-abstract-bit-ops")); SVec.Append (Vct_Args, new String'("-abstract-bit-valued-eqs")); SVec.Append (Vct_Args, new String'("-abstract-bit-valued-int-le")); SVec.Append (Vct_Args, new String'("-elim-bit-type-and-consts")); SVec.Append (Vct_Args, new String'("-abstract-reals")); SVec.Append (Vct_Args, new String'("-lift-quants")); SVec.Append (Vct_Args, new String'("-strip-quantifier-patterns")); SVec.Append (Vct_Args, new String'("-elim-type-aliases")); -- Extra arguments for the various provers. See -- vct/run/Makefile-prelude.mk for more information. case CL.Solver is when Command_Line.Yices => SVec.Append (Vct_Args, new String'("-abstract-nonlin-times")); SVec.Append (Vct_Args, new String'("-logic=AUFLIA")); when others => null; end case; when SMTLIB2 => SVec.Append (Vct_Args, new String'("-interface-mode=smtlib2")); SVec.Append (Vct_Args, new String'("-refine-types")); SVec.Append (Vct_Args, new String'("-refine-int-subrange-type")); SVec.Append (Vct_Args, new String'("-elim-array-constructors")); SVec.Append (Vct_Args, new String'("-add-array-select-box-update-axioms")); SVec.Append (Vct_Args, new String'("-abstract-array-box-updates")); SVec.Append (Vct_Args, new String'("-add-array-select-update-axioms")); SVec.Append (Vct_Args, new String'("-abstract-array-select-updates")); SVec.Append (Vct_Args, new String'("-abstract-array-types")); SVec.Append (Vct_Args, new String'("-abstract-record-types")); SVec.Append (Vct_Args, new String'("-lift-quants")); SVec.Append (Vct_Args, new String'("-strip-quantifier-patterns")); -- Extra arguments for the various provers. See -- vct/run/Makefile-prelude.mk for more information. case CL.Solver is when Command_Line.Alt_Ergo => SVec.Append (Vct_Args, new String'("-elim-type-aliases")); SVec.Append (Vct_Args, new String'("-smtlib2-add-to_real-decl")); when Command_Line.CVC3 => SVec.Append (Vct_Args, new String'("-elim-type-aliases")); SVec.Append (Vct_Args, new String'("-smtlib2-omit-set-option-command")); SVec.Append (Vct_Args, new String'("-smtlib2-add-to_real-decl")); when others => null; end case; end case; if CL.Time_Out > 0 then SVec.Append (Vct_Args, new String'("-ulimit-timeout=" & Natural'Image (CL.Time_Out))); end if; if CL.Memory_Limit > 0 then -- Remember, we store the memory limit in megabytes, so -- we need to multiply by 1024 here. This multiplication -- is safe because the range of Memory_Limit_T is chosen -- appropriately. SVec.Append (Vct_Args, new String'("-ulimit-memory=" & Natural'Image (CL.Memory_Limit * 1024))); end if; -- If there is a siv file and -v has not been specified, we -- should look at that instead. if Environment.Siv_File_Exists and not CL.Ignore_SIV then SVec.Append (Vct_Args, new String'("-siv")); end if; if CL.Plain then SVec.Append (Vct_Args, new String'("-plain")); else SVec.Append (Vct_Args, new String'("-gstime")); end if; if CL.User_Rules then SVec.Append (Vct_Args, new String'("-read-unit-rlu-files")); SVec.Append (Vct_Args, new String'("-read-directory-rlu-files")); -- Global rules likely contain rules with identifiers -- which are relevant to only some programs, so we better -- delete the ones we don't know about. (Otherwise vct -- will abort with an FDL normalisation error). SVec.Append (Vct_Args, new String'("-delete-rules-with-undeclared-ids")); end if; if CL.Keep_Temp then SVec.Append (Vct_Args, new String'("-flat-working-files")); SVec.Append (Vct_Args, new String'("-working-dir=.")); else SVec.Append (Vct_Args, new String'("-unique-working-files")); SVec.Append (Vct_Args, new String'("-delete-working-files")); end if; -- Finally, we add the unit to analyse to the arguments. SVec.Append (Vct_Args, new String'("-report=" & Unit_Name)); SVec.Append (Vct_Args, new String'(Unit_Name)); -- Other misc. setup below. if CL.Solver = Command_Line.Alt_Ergo then -- In order for alt-ergo to avoid using the hard-coded -- path, we set the ERGOLIB environment variable to point -- to the same location where the prelude.fdl, etc. files -- are contained. GNAT.OS_Lib.Setenv (Name => "ERGOLIB", Value => Victor_Library); end if; -- At last; run vct with the given arguments. declare subtype Arg_Index is Positive range 1 .. Positive (SVec.Length (Vct_Args)); subtype Arg_List_T is GNAT.OS_Lib.Argument_List (Arg_Index); Args : Arg_List_T; begin -- Copy the argument list. for I in Arg_Index loop Args (I) := SVec.Element (Vct_Args, I); end loop; -- This is not strictly necessary since Spawn itself -- calls this, but if the Spawn call fails we want to -- print out the arguments exactly as used by Spawn. GNAT.OS_Lib.Normalize_Arguments (Args); GNAT.OS_Lib.Spawn (Program_Name => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (E_Strings.To_Unbounded_String (Environment.Victor_Binary)), Args => Args, Success => Ok); -- Set a useful error message, containing all the -- arguments used. if not Ok then Error := E_Strings.Copy_String ("Program '"); E_Strings.Append_Examiner_String (Error, Environment.Victor_Binary); E_Strings.Append_String (Error, "' could not be spawned. Arguments were:"); for I in Positive range Args'Range loop E_Strings.Append_Char (Error, ' '); E_Strings.Append_String (Error, Args (I).all); end loop; end if; end; end Do_Spawn; begin -- Check some basic stuff, including presence of binaries and vcg/siv files. Check_Environment (CL => CL, Ok => Ok, Error => Error, Environment => Environment); -- Proceed only if everything is in order. if Ok then Do_Spawn; end if; end Execute; end Victor_Wrapper; spark-2012.0.deb/victor_wrapper/all.wrn0000644000175000017500000000013211753202341016777 0ustar eugeneugen-- Warning control file for the ViCToR Wrapper hidden_parts notes pragma all with_clauses spark-2012.0.deb/victor_wrapper/Makefile0000644000175000017500000000466511753202341017156 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for the Victor Wrapper # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=victor # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: preamble prep gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} ${OUTPUT_NAME} -o $@ -bargs ${BIND_OPTS} self-analysis: preamble prep -spark -plain @${OUTPUT_NAME}.smf # Initialisations # =============== preamble: $(MAKE) -C ${ROOT}/examiner clean # Platform specific prepping # ========================== prep: $(MAKE) -C ${ROOT}/examiner prep # Cleaning code base # ================== clean: standardclean reallyclean: clean targetclean vcclean preamble $(MAKE) -C ${ROOT}/examiner reallyclean ################################################################################ # END-OF-FILE spark-2012.0.deb/victor_wrapper/spark.sw0000644000175000017500000000025211753202341017175 0ustar eugeneugen-sparklib -output_directory=vcg -config_file=../common/gnat.cfg -listing_extension=ls_ -casing -index_file=victor.idx -report=victor.rep -flow=data -vcg -dpc -rules=lazy spark-2012.0.deb/victor_wrapper/victor.idx0000644000175000017500000000204711753202341017522 0ustar eugeneugen------------------------------------------------------------------------------ -- Components from Examiner ------------------------------------------------------------------------------ commonstring specification is in ../examiner/commonstring.ads commonstringutilities specification is in ../examiner/commonstringutilities.ads e_strings specification is in ../examiner/e_strings.ads examinerconstants specification is in ../examiner/examinerconstants.ads spark_io specification is in ../examiner/spark_io.ads ------------------------------------------------------------------------------ -- Version ------------------------------------------------------------------------------ version specification is in ../common/versioning/version.ads ------------------------------------------------------------------------------ -- ViCToR Wrapper ------------------------------------------------------------------------------ command_line specification is in command_line.ads banner specification is in banner.ads victor_wrapper specification is in victor_wrapper.ads spark-2012.0.deb/victor_wrapper/victor_wrapper.ads0000644000175000017500000000312411753202341021242 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line; with E_Strings; --# inherit Command_Line, --# E_Strings, --# SPARK.Ada.Strings.Unbounded, --# SPARK_IO; package Victor_Wrapper is -- This procedure does two things: -- -- 1. It first checks that the environment is OK, i.e. all -- relevant binaries, libraries and vcg/siv files are present. -- -- 2. It then assembles the argument list to vct and spawns. procedure Execute (CL : in Command_Line.Command_Line_Data_T; Ok : out Boolean; Error : out E_Strings.T); --# global in out SPARK_IO.File_Sys; end Victor_Wrapper; spark-2012.0.deb/victor_wrapper/command_line.ads0000644000175000017500000000471011753202341020623 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit E_Strings; package Command_Line is subtype Memory_Limit_T is Natural range Natural'First .. Natural'Last / 1024; type Solver_T is ( Alt_Ergo, CVC3, CVC4_SMTLIB1, CVC4, Yices, Z3); type Command_Line_Data_T is record Unit_Name : E_Strings.T; Proof_Steps : Natural; Time_Out : Natural; Memory_Limit : Memory_Limit_T; -- In megabytes as opposed to kbytes Ignore_SIV : Boolean; Plain : Boolean; Show_Help : Boolean; Solver : Solver_T; User_Rules : Boolean; Keep_Temp : Boolean; end record; -- If you change any of those, please also update the help text in banner.adb. Default_Options : constant Command_Line_Data_T := Command_Line_Data_T' (Unit_Name => E_Strings.Empty_String, Proof_Steps => 5000, Time_Out => 0, Memory_Limit => 0, Ignore_SIV => False, Plain => False, Show_Help => False, Solver => Alt_Ergo, User_Rules => True, Keep_Temp => False); -- Parse command line arguments and options. procedure Initialize (Data : out Command_Line_Data_T; Ok : out Boolean; Error : out E_Strings.T); -- Set the exit status of the program to a non-zero value. procedure Set_Exit_Status_Error; end Command_Line; spark-2012.0.deb/victor_wrapper/victor.smf0000644000175000017500000000007211753202341017517 0ustar eugeneugenvictor.adb command_line.adb banner.adb victor_wrapper.adb spark-2012.0.deb/victor_wrapper/vcg/0000755000175000017500000000000011753203760016270 5ustar eugeneugenspark-2012.0.deb/victor_wrapper/command_line.adb0000644000175000017500000002023511753202341020602 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Command_Line; with GNAT.Command_Line; package body Command_Line is procedure Int_From_Commandline (E_Str : in E_Strings.T; Limit_Low : in Integer; Limit_High : in Integer; Output : out Integer; Ok : out Boolean; Error : out E_Strings.T) --# pre Limit_Low <= Limit_High; --# post Output >= Limit_Low and Output <= Limit_High; is Tmp_Str : E_Strings.T; Tmp_Nat : Natural; begin -- Chop off the '=' at the start, if present if E_Strings.Get_Length (E_Str) >= 2 and then E_Strings.Get_Element (E_Str, 1) = '=' then Tmp_Str := E_Strings.Section (E_Str => E_Str, Start_Pos => 2, Length => E_Strings.Get_Length (E_Str) - 1); else Tmp_Str := E_Str; end if; E_Strings.Get_Int_From_String (Source => Tmp_Str, Item => Output, Start_Pt => 1, Stop => Tmp_Nat); if Tmp_Nat = E_Strings.Get_Length (Tmp_Str) and Output >= Limit_Low and Output <= Limit_High then Ok := True; Error := E_Strings.Empty_String; else Output := Limit_Low; Ok := False; Error := E_Strings.Copy_String ("Parameter must be a valid number. Got '"); E_Strings.Append_Examiner_String (Error, Tmp_Str); E_Strings.Append_String (Error, "' instead."); end if; end Int_From_Commandline; procedure Solver_From_Commandline (E_Str : in E_Strings.T; Output : out Solver_T; Ok : out Boolean; Error : out E_Strings.T) is Tmp_Str : E_Strings.T; begin -- Chop off the '=' at the start, if present if E_Strings.Get_Length (E_Str) >= 2 and then E_Strings.Get_Element (E_Str, 1) = '=' then Tmp_Str := E_Strings.Section (E_Str => E_Str, Start_Pos => 2, Length => E_Strings.Get_Length (E_Str) - 1); else Tmp_Str := E_Str; end if; Ok := True; Error := E_Strings.Empty_String; if E_Strings.Eq1_String (Tmp_Str, "alt-ergo") then Output := Alt_Ergo; elsif E_Strings.Eq1_String (Tmp_Str, "cvc3") then Output := CVC3; elsif E_Strings.Eq1_String (Tmp_Str, "cvc4_smtlib1") then Output := CVC4_SMTLIB1; elsif E_Strings.Eq1_String (Tmp_Str, "cvc4") then Output := CVC4; elsif E_Strings.Eq1_String (Tmp_Str, "yices") then Output := Yices; elsif E_Strings.Eq1_String (Tmp_Str, "z3") then Output := Z3; else Output := Alt_Ergo; Ok := False; Error := E_Strings.Copy_String ("Invalid/unsupported solver."); end if; end Solver_From_Commandline; procedure Initialize (Data : out Command_Line_Data_T; Ok : out Boolean; Error : out E_Strings.T) is --# hide Initialize; Tmp_Int : Integer; begin Data := Default_Options; Ok := True; Error := E_Strings.Empty_String; begin loop case GNAT.Command_Line.Getopt ("h help m: plain solver: steps: t: v nouserrules keep") is when ASCII.NUL => exit; when 'h' => -- Both the -h and -help options. Data.Show_Help := True; when 'k' => Data.Keep_Temp := True; when 'm' => Int_From_Commandline (E_Str => E_Strings.Copy_String (GNAT.Command_Line.Parameter), Limit_Low => Memory_Limit_T'First, Limit_High => Memory_Limit_T'Last, Output => Tmp_Int, Ok => Ok, Error => Error); if Ok then Data.Memory_Limit := Memory_Limit_T'(Tmp_Int); end if; when 'n' => Data.User_Rules := False; when 'p' => Data.Plain := True; when 's' => if GNAT.Command_Line.Full_Switch = "steps" then Int_From_Commandline (E_Str => E_Strings.Copy_String (GNAT.Command_Line.Parameter), Limit_Low => Natural'First, Limit_High => Natural'Last, Output => Tmp_Int, Ok => Ok, Error => Error); if Ok then Data.Proof_Steps := Natural'(Tmp_Int); end if; elsif GNAT.Command_Line.Full_Switch = "solver" then Solver_From_Commandline (E_Str => E_Strings.Copy_String (GNAT.Command_Line.Parameter), Output => Data.Solver, Ok => Ok, Error => Error); else null; end if; when 't' => Int_From_Commandline (E_Str => E_Strings.Copy_String (GNAT.Command_Line.Parameter), Limit_Low => Natural'First, Limit_High => Natural'Last, Output => Tmp_Int, Ok => Ok, Error => Error); if Ok then Data.Time_Out := Natural'(Tmp_Int); end if; when 'v' => Data.Ignore_SIV := True; when others => Ok := False; Error := E_Strings.Copy_String ("Could not parse commandline."); exit; end case; end loop; loop declare S : constant String := GNAT.Command_Line.Get_Argument (Do_Expansion => True); begin exit when S'Length = 0; if E_Strings.Is_Empty (Data.Unit_Name) then Data.Unit_Name := E_Strings.Copy_String (S); else Ok := False; Error := E_Strings.Copy_String ("You can only specify a single unit."); end if; end; end loop; exception when GNAT.Command_Line.Invalid_Switch => Ok := False; Error := E_Strings.Copy_String ("Invalid commandline switch " & GNAT.Command_Line.Full_Switch); when GNAT.Command_Line.Invalid_Parameter => Ok := False; Error := E_Strings.Copy_String ("No parameter for " & GNAT.Command_Line.Full_Switch); end; -- Finally, check that a unit is given. If not, we just show -- the help text. if E_Strings.Is_Empty (Data.Unit_Name) then Data.Show_Help := True; end if; end Initialize; procedure Set_Exit_Status_Error is --# hide Set_Exit_Status_Error; begin Ada.Command_Line.Set_Exit_Status (1); end Set_Exit_Status_Error; end Command_Line; spark-2012.0.deb/victor_wrapper/banner.ads0000644000175000017500000000237511753202341017450 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --# inherit Command_Line, --# E_Strings, --# SPARK_IO, --# Version; package Banner is procedure Show_Banner (Plain_Mode : in Boolean); --# global in out SPARK_IO.File_Sys; procedure Show_Help (Plain_Mode : in Boolean); --# global in out SPARK_IO.File_Sys; end Banner; spark-2012.0.deb/victor_wrapper/banner.adb0000644000175000017500000001132711753202341017424 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; with Version; with Command_Line; package body Banner is procedure Put_Str (S : in String) --# global in out SPARK_IO.File_Sys; is begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, S, 0); end Put_Str; procedure Put_Char (C : in Character) --# global in out SPARK_IO.File_Sys; is begin SPARK_IO.Put_Char (SPARK_IO.Standard_Output, C); end Put_Char; procedure Put_Int (I : in Integer) --# global in out SPARK_IO.File_Sys; is begin SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => I, Width => 0, Base => 10); end Put_Int; procedure Put_Line (S : in String) --# global in out SPARK_IO.File_Sys; is begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, S, 0); end Put_Line; procedure New_Line --# global in out SPARK_IO.File_Sys; is begin SPARK_IO.New_Line (SPARK_IO.Standard_Output, Positive'First); end New_Line; procedure Show_Banner (Plain_Mode : in Boolean) is begin Put_Str ("Victor Wrapper"); if not Plain_Mode then Put_Char (' '); Put_Str (Version.Toolset_Banner_Line); end if; New_Line; Put_Line ("Using ViCToR (vct) which is currently an unsupported experimental feature."); end Show_Banner; procedure Show_Help (Plain_Mode : in Boolean) is --# for Command_Line.Default_Options declare rule; begin Put_Line ("Usage: victor [OPTIONS] UNIT_NAME"); New_Line; Put_Line ("UNIT_NAME is set of VCs to prove. For example, for the VC's"); Put_Line (" contained in foo/bar.vcg, specify 'foo/bar' as the unit name."); New_Line; Put_Line ("OPTIONS can be one or more of the below:"); Put_Line (" -h / -help Show this help message."); Put_Line (" -v Ignore .siv files and only use .vcg files."); Put_Line (" -plain Plain mode - supress timings and versions."); Put_Line (" -nouserrules Do not use user rules."); Put_Line (" -keep Keep intermediate SMT files in the current directory."); --# check Command_Line.Default_Options.Solver = Command_Line.Alt_Ergo; Put_Line (" -solver=X Select a different SMT solver. Supported values are:"); Put_Line (" alt-ergo [default] (Shipped with SPARK)"); Put_Line (" cvc3 (http://www.cs.nyu.edu/acsys/cvc3)"); -- Very experimental. -- Put_Line (" cvc4_smtlib1 (http://cs.nyu.edu/acsys/cvc4)"); -- Put_Line (" cvc4"); Put_Line (" yices (http://yices.csl.sri.com)"); -- License is rather problematic. -- Put_Line (" z3 (http://research.microsoft.com/projects/z3)"); New_Line; Put_Line ("OPTIONS available for alt-ergo only:"); Put_Str (" -steps=N Stop after N proof steps. Default is "); Put_Int (Command_Line.Default_Options.Proof_Steps); Put_Line (" steps."); New_Line; Put_Line ("OPTIONS available on GNU/Linux only:"); --# check Command_Line.Default_Options.Time_Out = 0; Put_Line (" -t=N Time-out SMT solver after N seconds. Default is no timeout."); --# check Command_Line.Default_Options.Memory_Limit = 0; Put_Line (" -m=X Limit SMT solver to X megabytes of virtual memory. Default is no limit."); if not Plain_Mode then New_Line; Put_Line (Version.Toolset_Support_Line1); Put_Line (Version.Toolset_Support_Line2); Put_Line (Version.Toolset_Support_Line3); Put_Line (Version.Toolset_Support_Line4); end if; end Show_Help; end Banner; spark-2012.0.deb/victor_wrapper/victor.adb0000644000175000017500000000464211753202341017467 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line; with SPARK_IO; with E_Strings; with Banner; with Victor_Wrapper; --# inherit Banner, --# Command_Line, --# E_Strings, --# SPARK_IO, --# Victor_Wrapper; --# main_program; procedure Victor --# global in out SPARK_IO.File_Sys; is Command_Line_Data : Command_Line.Command_Line_Data_T; Command_Line_Ok : Boolean; Victor_Ok : Boolean := True; Error_String : E_Strings.T; begin Command_Line.Initialize (Data => Command_Line_Data, Ok => Command_Line_Ok, Error => Error_String); -- Note that Initialize will correctly set Plain regardless Banner.Show_Banner (Plain_Mode => Command_Line_Data.Plain); --# accept Flow, 22, "Command_Line_Ok is not invariant."; if Command_Line_Ok and not Command_Line_Data.Show_Help then -- Call vct only if no errors in the command line and we don't -- want to show the help text. Victor_Wrapper.Execute (CL => Command_Line_Data, Ok => Victor_Ok, Error => Error_String); else -- Otherwise show the help text. Banner.Show_Help (Plain_Mode => Command_Line_Data.Plain); end if; if not (Command_Line_Ok and Victor_Ok) then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Error: ", 0); E_Strings.Put_Line (SPARK_IO.Standard_Output, Error_String); Command_Line.Set_Exit_Status_Error; end if; end Victor; spark-2012.0.deb/examiner/0000755000175000017500000000000011753203755014257 5ustar eugeneugenspark-2012.0.deb/examiner/lextokenlists.adb0000644000175000017500000001144511753202336017636 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors; package body LexTokenLists is function Prefix_Unit (Poss_Prefix, Prefixed : Lists) return Boolean is Result : Boolean; begin if Poss_Prefix.Length <= Prefixed.Length then Result := True; for I in Positions range 1 .. Poss_Prefix.Length loop Result := Result and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Poss_Prefix.Content (I), Lex_Str2 => Prefixed.Content (I)) = LexTokenManager.Str_Eq; exit when not Result; end loop; else Result := False; end if; return Result; end Prefix_Unit; function Eq_Unit (First_Item, Second : Lists) return Boolean is Result : Boolean; begin if First_Item.Length = Second.Length then Result := True; for I in Positions range 1 .. First_Item.Length loop Result := Result and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => First_Item.Content (I), Lex_Str2 => Second.Content (I)) = LexTokenManager.Str_Eq; exit when not Result; end loop; else Result := False; end if; return Result; end Eq_Unit; procedure Append (List : in out Lists; Item : in LexTokenManager.Lex_String) is begin if List.Length = ExaminerConstants.Lex_Token_Lists_Max_Length then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Unit_Nesting_Too_Deep, Msg => "in LexTokenLists"); end if; List.Length := List.Length + 1; List.Content (List.Length) := Item; end Append; procedure Print_List (File : in SPARK_IO.File_Type; List : in Lists) is begin for I in Positions range 1 .. List.Length loop E_Strings.Put_String (File => File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => List.Content (I))); if I < List.Length then SPARK_IO.Put_Char (File => File, Item => '.'); end if; end loop; end Print_List; function Token_List_To_String (Token_List : Lists) return E_Strings.T is Full_Str, E_Str : E_Strings.T; begin if Token_List.Length > 0 then Full_Str := LexTokenManager.Lex_String_To_String (Lex_Str => Token_List.Content (1)); for I in Positions range 2 .. Token_List.Length loop E_Str := LexTokenManager.Lex_String_To_String (Lex_Str => Token_List.Content (I)); E_Strings.Append_String (E_Str => Full_Str, Str => "."); E_Strings.Append_Examiner_String (E_Str1 => Full_Str, E_Str2 => E_Str); end loop; else Full_Str := E_Strings.Empty_String; end if; return Full_Str; end Token_List_To_String; function Get_Length (List : Lists) return Lengths is begin return List.Length; end Get_Length; function Get_Element (List : Lists; Pos : Positions) return LexTokenManager.Lex_String is Result : LexTokenManager.Lex_String := LexTokenManager.Null_String; begin if Pos <= List.Length then Result := List.Content (Pos); end if; return Result; end Get_Element; procedure Pop (List : in out Lists; Item : out LexTokenManager.Lex_String) is begin if List.Length > 0 then Item := List.Content (List.Length); List.Content (List.Length) := LexTokenManager.Null_String; List.Length := List.Length - 1; else Item := LexTokenManager.Null_String; end if; end Pop; end LexTokenLists; spark-2012.0.deb/examiner/date_time.adb0000644000175000017500000002534111753202336016661 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Date_Time is type Time_T is record Day : Days_T; Month : Months_T; Year : Years_T; Hour : Hours_T; Minutes : Minutes_T; Seconds : Seconds_T; end record; -- This function parses the given 3-character month and returns -- its number. On error, this function returns 0. This function -- assumes the month abbreviation is in English. function Month_To_Number (E_Str : E_Strings.T) return Natural --# return X => X = 0 or X in Months_T; is Result : Natural; begin Result := 0; for I in Months_T loop if E_Strings.Eq1_String (E_Str, Month_Names (I)) then Result := I; end if; --# assert Result = 0 or Result = I; exit when Result > 0; end loop; return Result; end Month_To_Number; -- This function returns true iff all characters in E_Str in -- the given range are digits. function Is_Digit_Range (E_Str : E_Strings.T; Low : E_Strings.Positions; High : E_Strings.Positions) return Boolean --# pre Low <= High and High <= E_Strings.Get_Length (E_Str); -- Once we use victor, we can also prove the below. The loop -- invariant below will also require the obvious adjustment. -- return (for all I in E_Strings.Positions range Low .. High -> -- (E_Strings.Get_Element (E_Str, I) in '0' .. '9')); is Result : Boolean; begin for I in E_Strings.Positions range Low .. High loop Result := E_Strings.Get_Element (E_Str, I) in '0' .. '9'; exit when not Result; --# assert Result and I in Low .. High and Low = Low% and High = High%; end loop; --# accept F, 501, Result, "The value will always be defined", --# "since the loop range cannot be empty." & --# F, 602, Result, "Result is never undefined."; return Result; end Is_Digit_Range; -- This procedure parses a timestamp given in the DD-MMM-YYYY, -- HH:MM:SS format (where the comma is optional). On any -- parsing error Success is set to False, otherwise Time will -- contain the parsed timestamp. This function assumes the -- month abbreviation is in English. procedure Parse_Time (Timestamp : in E_Strings.T; Success : out Boolean; Time : out Time_T) --# derives Success, --# Time from Timestamp; is Dummy : Natural; Tmp : Integer; Len : E_Strings.Lengths; Start_Of_Time : E_Strings.Positions; begin Time := Time_T' (Day => Days_T'First, Month => Months_T'First, Year => Years_T'First, Hour => Hours_T'First, Minutes => Minutes_T'First, Seconds => Seconds_T'First); -- Cache the string length. Len := E_Strings.Get_Length (Timestamp); --# accept F, 10, Dummy, "We do not care about any assignment to the dummy variable." & --# F, 33, Dummy, "We don't use the dummy variable to derive anything."; -- The date is always 10 characters long. if Len >= 11 then Success := Is_Digit_Range (Timestamp, 1, 2) and then E_Strings.Get_Element (Timestamp, 3) = '-' and then E_Strings.Get_Element (Timestamp, 7) = '-' and then Is_Digit_Range (Timestamp, 8, 11); if Success then -- Obtain the day. E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Timestamp, Start_Pos => 1, Length => 2), Item => Tmp, Start_Pt => 1, Stop => Dummy); if Tmp in Days_T then Time.Day := Days_T'(Tmp); else Success := False; end if; --# assert Len = E_Strings.Get_Length (Timestamp); -- Obtain the month. Since Month_To_Number returns -- zero on error, we need to check for that. Tmp := Month_To_Number (E_Strings.Section (E_Str => Timestamp, Start_Pos => 4, Length => 3)); if Tmp in Months_T then Time.Month := Months_T'(Tmp); else Success := False; end if; --# assert Len = E_Strings.Get_Length (Timestamp); -- Obtain the year. E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Timestamp, Start_Pos => 8, Length => 4), Item => Tmp, Start_Pt => 1, Stop => Dummy); if Tmp in Years_T then Time.Year := Years_T'(Tmp); else Success := False; end if; end if; else Success := False; end if; --# assert Len = E_Strings.Get_Length (Timestamp); if Success and Len >= 12 then if E_Strings.Get_Element (Timestamp, 12) = ',' then Start_Of_Time := 13; else Start_Of_Time := 12; end if; else Success := False; Start_Of_Time := E_Strings.Positions'First; end if; --# assert Len = E_Strings.Get_Length (Timestamp) and Start_Of_Time <= 13; -- The actual time consists out 8 more characters after a -- mandatory space. if Success and Len >= Start_Of_Time + 8 then Success := E_Strings.Get_Element (Timestamp, Start_Of_Time) = ' ' and then Is_Digit_Range (Timestamp, Start_Of_Time + 1, Start_Of_Time + 2) and then E_Strings.Get_Element (Timestamp, Start_Of_Time + 3) = ':' and then Is_Digit_Range (Timestamp, Start_Of_Time + 4, Start_Of_Time + 5) and then E_Strings.Get_Element (Timestamp, Start_Of_Time + 6) = ':' and then Is_Digit_Range (Timestamp, Start_Of_Time + 7, Start_Of_Time + 8); if Success then -- Obtain the hour. E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Timestamp, Start_Pos => Start_Of_Time + 1, Length => 2), Item => Tmp, Start_Pt => 1, Stop => Dummy); if Tmp in Hours_T then Time.Hour := Hours_T'(Tmp); else Success := False; end if; --# assert Len = E_Strings.Get_Length (Timestamp) and Start_Of_Time <= 13; -- Obtain the minutes. E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Timestamp, Start_Pos => Start_Of_Time + 4, Length => 2), Item => Tmp, Start_Pt => 1, Stop => Dummy); if Tmp in Minutes_T then Time.Minutes := Minutes_T'(Tmp); else Success := False; end if; --# assert Len = E_Strings.Get_Length (Timestamp) and Start_Of_Time <= 13; -- Obtain the seconds. E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Timestamp, Start_Pos => Start_Of_Time + 7, Length => 2), Item => Tmp, Start_Pt => 1, Stop => Dummy); if Tmp in Seconds_T then Time.Seconds := Seconds_T'(Tmp); else Success := False; end if; end if; else Success := False; end if; end Parse_Time; function Compare_Timestamps (Timestamp_A : E_Strings.T; Timestamp_B : E_Strings.T) return Timestamp_Comparison_Result_T is -- This can be used to loop over all fields in Time_T, above. type One_To_Six is range 1 .. 6; Time_A, Time_B : Time_T; Success : Boolean; Result : Timestamp_Comparison_Result_T; Tmp_A, Tmp_B : Natural; begin Result := Malformed_Timestamps; Parse_Time (Timestamp_A, Success, Time_A); if Success then Parse_Time (Timestamp_B, Success, Time_B); if Success then Result := A_Equals_B; for I in One_To_Six loop case I is when 1 => Tmp_A := Time_A.Year; Tmp_B := Time_B.Year; when 2 => Tmp_A := Time_A.Month; Tmp_B := Time_B.Month; when 3 => Tmp_A := Time_A.Day; Tmp_B := Time_B.Day; when 4 => Tmp_A := Time_A.Hour; Tmp_B := Time_B.Hour; when 5 => Tmp_A := Time_A.Minutes; Tmp_B := Time_B.Minutes; when 6 => Tmp_A := Time_A.Seconds; Tmp_B := Time_B.Seconds; end case; --# assert Result = A_Equals_B; if Tmp_A > Tmp_B then Result := A_Greater_Than_B; elsif Tmp_A < Tmp_B then Result := A_Less_Than_B; end if; exit when Result /= A_Equals_B; end loop; end if; end if; return Result; end Compare_Timestamps; end Date_Time; spark-2012.0.deb/examiner/sli.adb0000644000175000017500000033155311753202336015522 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with ContextManager.Ops; with E_Strings; with FileSystem; with IndexManager; with LexTokenLists; with SLI.IO; with SLI.Xref; with SPARK_IO; with SP_Symbols; with SystemErrors; with Version; use type CommandLineData.Concurrency_Profiles; use type CommandLineData.Language_Profiles; use type SP_Symbols.SP_Symbol; package body SLI --# own State is SLI.Xref.State, --# out SLI.IO.Stream_Buffer; is function "=" (Left, Right : IO.File_Status) return Boolean renames IO."="; procedure Create_File (File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# out IO.Stream_Buffer; --# derives IO.Stream_Buffer from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# File_Descriptor, --# LexTokenManager.State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# File_Descriptor, --# LexTokenManager.State; is SLI_File_Status : IO.File_Status; SLI_File_Name : E_Strings.T; begin -- Build the SLI filename. SLI_File_Name := FileSystem.Just_File (Fn => LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => File_Descriptor)), Ext => False); E_Strings.Append_String (E_Str => SLI_File_Name, Str => ".sli"); CommandLineData.Normalize_File_Name_To_Output_Directory (F => SLI_File_Name); -- Create the SLI file. IO.Create_File (Name_Of_File => SLI_File_Name, Status => SLI_File_Status); if SLI_File_Status /= IO.Ok then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Unable to open SLI file: ", Stop => 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => SLI_File_Name); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => ""); end if; end Create_File; procedure Close_File --# global out IO.Stream_Buffer; --# derives IO.Stream_Buffer from ; is begin -- Close the SLI file. IO.Close; end Close_File; procedure Header (File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in Dictionary.Dict; --# in STree.Table; --# in out ContextManager.Ops.Unit_Heap; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# out IO.Stream_Buffer; --# derives ContextManager.Ops.Unit_Heap from * & --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# IO.Stream_Buffer from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content; is It : ContextManager.UnitDescriptors; Unit_Pointer_Spec : ContextManager.UnitDescriptors; Unit_Pointer_Body : ContextManager.UnitDescriptors; Unit_Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; Str : E_Strings.T; Lex_String_Source_File_Name : LexTokenManager.Lex_String; Source_File_Name : E_Strings.T; Actual_Unit_Type : ContextManager.UnitTypes; Found : Boolean; Line_Number : Positive; -- Write the A section (see A section of the ALI format) -- containing all the arguments given to the Examiner in -- the SLI file. procedure Print_Args --# global in CommandLineData.Content; --# in SPARK_IO.File_Sys; --# in out XMLReport.State; --# out IO.Stream_Buffer; --# derives IO.Stream_Buffer from CommandLineData.Content, --# SPARK_IO.File_Sys, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content; is Option_Str : E_Strings.T; begin CommandLineData.Output_Command_Line (Prefix => "A -", XML => False, Option_Str => Option_Str); IO.E_Strings_Put_String (E_Str => Option_Str); end Print_Args; -- Write a dotted name identifier (List) in the SLI file. procedure Print_List (List : in LexTokenLists.Lists) --# global in LexTokenManager.State; --# out IO.Stream_Buffer; --# derives IO.Stream_Buffer from LexTokenManager.State, --# List; is begin for I in LexTokenLists.Positions range 1 .. LexTokenLists.Get_Length (List => List) loop IO.E_Strings_Put_String (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenLists.Get_Element (List => List, Pos => I))); if I < LexTokenLists.Get_Length (List => List) then IO.Put_Char (Item => '.'); end if; end loop; end Print_List; begin -- Header -- Write the V, A, P, R sections. if CommandLineData.Content.Plain_Output then IO.Put_Line (Item => "V ""SPARK 0.0.0"""); else IO.Put_Line (Item => "V ""SPARK " & Version.Toolset_Version & """"); end if; Print_Args; IO.Put_Line (Item => "P"); IO.Put_Line (Item => "R"); IO.New_Line; -- Find the Unit_Pointer_Body of the File_Descriptor. ContextManager.Ops.Get_Unit (Descriptor => File_Descriptor, Unit_Descriptor => Unit_Pointer_Body); -- Write the U section. ContextManager.Ops.GetUnitName (Descriptor => Unit_Pointer_Body, UnitName => Unit_Name, UnitType => Unit_Type); IO.Put_String (Item => "U "); Print_List (List => Unit_Name); Str := LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => File_Descriptor)); if Unit_Type = ContextManager.PackageSpecification then -- It is an Ada package specification. Unit_Pointer_Spec := Unit_Pointer_Body; IO.Put_String (Item => "%s "); IO.E_Strings_Put_Line (E_Str => FileSystem.Just_File (Fn => Str, Ext => True)); elsif Unit_Type = ContextManager.PackageBody then -- It is an Ada package body. IO.Put_String (Item => "%b "); IO.E_Strings_Put_Line (E_Str => FileSystem.Just_File (Fn => Str, Ext => True)); -- Find the specification of the Unit_Pointer_Body. ContextManager.Ops.GetUnitByName (UnitName => Unit_Name, UnitTypeSet => ContextManager.PackageSpecificationSet, Descriptor => Unit_Pointer_Spec); IO.Put_String (Item => "U "); Print_List (List => Unit_Name); IO.Put_String (Item => "%s "); IO.E_Strings_Put_Line (E_Str => FileSystem.Just_File (Fn => LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Unit_Pointer_Spec))), Ext => True)); elsif Unit_Type = ContextManager.SubUnit or else Unit_Type = ContextManager.MainProgram or else Unit_Type = ContextManager.GenericSubprogramBody then -- It is an Ada separate unit or an Ada main program. Unit_Pointer_Spec := ContextManager.NullUnit; IO.Put_String (Item => "%b "); IO.E_Strings_Put_Line (E_Str => FileSystem.Just_File (Fn => Str, Ext => True)); else Unit_Pointer_Spec := ContextManager.NullUnit; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.HEADER PROGRAM ERROR"); end if; -- Write the W section. It := ContextManager.Ops.FirstUnitDescriptor; while It /= ContextManager.NullUnit loop ContextManager.Ops.GetUnitName (Descriptor => It, UnitName => Unit_Name, UnitType => Unit_Type); if Unit_Type = ContextManager.PackageSpecification and then ContextManager.Ops.In_Closure (Descriptor => It) and then It /= Unit_Pointer_Spec then -- It is an Ada package specification in the closure. IO.Put_String (Item => "W "); Print_List (List => Unit_Name); IO.Put_String (Item => "%s "); --# accept Flow, 10, Actual_Unit_Type, "Expected ineffective assignment to Actual_Unit_Type"; IndexManager.Look_Up (Required_Unit => Unit_Name, Possible_Unit_Types => ContextManager.SubUnitSet, Source_Filename => Lex_String_Source_File_Name, Actual_Unit_Type => Actual_Unit_Type, Found => Found); --# end accept; if not Found then -- The SLI file is not associated with an Ada package -- body or an Ada separate unit. Lex_String_Source_File_Name := ContextManager.Ops.GetSourceFileName (Descriptor => ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => It)); end if; Source_File_Name := LexTokenManager.Lex_String_To_String (Lex_Str => Lex_String_Source_File_Name); IO.E_Strings_Put_String (E_Str => FileSystem.Just_File (Fn => Source_File_Name, Ext => True)); IO.Put_Char (Item => ' '); IO.E_Strings_Put_String (E_Str => FileSystem.Just_File (Fn => Source_File_Name, Ext => False)); IO.Put_Line (Item => ".sli"); end if; It := ContextManager.Ops.NextUnitDescriptor (Descriptor => It); end loop; IO.New_Line; -- Write the D section. Line_Number := 1; It := ContextManager.Ops.FirstUnitDescriptor; while It /= ContextManager.NullUnit loop if ContextManager.Ops.In_Closure (Descriptor => It) then -- It is an Ada package in the closure. IO.Put_String (Item => "D "); IO.E_Strings_Put_String (E_Str => FileSystem.Just_File (Fn => LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => It))), Ext => True)); IO.Put_Line (Item => " 00000000000000 00000000"); -- Keep the line number in the SLI file. ContextManager.Ops.Set_Line_Number (Descriptor => It, Line_Number => Line_Number); Line_Number := Line_Number + 1; end if; It := ContextManager.Ops.NextUnitDescriptor (Descriptor => It); end loop; --# accept Flow, 33, Actual_Unit_Type, "Expected Actual_Unit_Type to be neither referenced nor exported"; end Header; procedure Dump_Xref (File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# out IO.Stream_Buffer; --# derives ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# File_Descriptor, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# IO.Stream_Buffer from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# File_Descriptor, --# LexTokenManager.State, --# Xref.State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# LexTokenManager.State, --# STree.Table, --# Xref.State & --# Xref.State from *, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# File_Descriptor, --# LexTokenManager.State; is Unit_Descriptor : ContextManager.UnitDescriptors; begin ContextManager.Ops.Get_Unit (Descriptor => File_Descriptor, Unit_Descriptor => Unit_Descriptor); Xref.Dump (Comp_Unit => Unit_Descriptor); end Dump_Xref; procedure Look_Up (Prefix : in out Dictionary.Symbol; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; Lex_Str : in LexTokenManager.Lex_String; Pos : in LexTokenManager.Token_Position; Full_Package_Name : in Boolean) is Prefix_Str : E_Strings.T; Sym : Dictionary.Symbol; begin if Dictionary.Is_Null_Symbol (Prefix) then -- Try to find the symbol definition in the dictionary. Sym := Dictionary.LookupItem (Name => Lex_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => Full_Package_Name); if Dictionary.Is_Null_Symbol (Sym) and then not Dictionary.Is_Null_Symbol (Subprog_Sym) then -- Try to find the symbol definition with the -- procedure/function name as prefix in the dictionary. Sym := Dictionary.LookupSelectedItem (Prefix => Subprog_Sym, Selector => Lex_Str, Scope => Scope, Context => Dictionary.ProofContext); end if; Prefix_Str := E_Strings.Empty_String; else -- Try to find the symbol definition with a prefix in the -- dictionary. Sym := Dictionary.LookupSelectedItem (Prefix => Prefix, Selector => Lex_Str, Scope => Scope, Context => Dictionary.ProofContext); Prefix_Str := Dictionary.GenerateSimpleName (Item => Prefix, Separator => "."); E_Strings.Append_String (E_Str => Prefix_Str, Str => "."); end if; if Dictionary.Is_Null_Symbol (Sym) and then CommandLineData.Content.Debug.SLI then -- Symbol not found -> show the error. SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ' '); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Prefix_Str); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str)); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " (", Stop => 0); SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => Integer (Pos.Start_Line_No), Width => 0, Base => 10); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ':'); SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => Pos.Start_Pos, Width => 0, Base => 10); SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => ") SYMBOL NOT FOUND", Stop => 0); end if; Prefix := Sym; end Look_Up; -- Add a cross-reference for the symbol (Sym) located at the node -- (Current_Node). The symbol is used in the compilation unit -- (Comp_Unit). The type of reference is Ref_Type. procedure Add_Usage (Comp_Unit : in ContextManager.UnitDescriptors; Sym : in Dictionary.Symbol; Current_Node : in STree.SyntaxNode; Ref_Type : in Character) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Current_Node, --# Dictionary.Dict, --# LexTokenManager.State, --# Ref_Type, --# STree.Table, --# Sym, --# Xref.State & --# Xref.State from *, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Current_Node, --# Dictionary.Dict, --# Ref_Type, --# STree.Table, --# Sym; is Node_Pos : LexTokenManager.Token_Position; Decl_Pos : LexTokenManager.Token_Position; Str : E_Strings.T; Unit_Des : ContextManager.UnitDescriptors; Node_File_Des : ContextManager.FileDescriptors; Decl_File_Des : ContextManager.FileDescriptors; begin Node_Pos := STree.Node_Position (Node => Current_Node); Decl_Pos := Dictionary.Get_Symbol_Location (Item => Sym); Unit_Des := Dictionary.Get_Symbol_Compilation_Unit (Item => Sym); if Unit_Des /= ContextManager.NullUnit then Decl_File_Des := ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Unit_Des); else Decl_File_Des := ContextManager.NullFile; end if; if Comp_Unit /= ContextManager.NullUnit then Node_File_Des := ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Comp_Unit); else Node_File_Des := ContextManager.NullFile; end if; if Node_Pos /= Decl_Pos or Decl_File_Des /= Node_File_Des then Xref.Add_Usage (Decl_Comp_Unit => Unit_Des, Sym => Sym, Usage_Comp_Unit => Comp_Unit, Pos => Node_Pos, Ref_Type => Ref_Type); if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ' '); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Sym, Separator => ".")); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " (", Stop => 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => FileSystem.Just_File (Fn => LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Comp_Unit))), Ext => True)); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ':'); SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => Integer (Node_Pos.Start_Line_No), Width => 0, Base => 10); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => Ref_Type); SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => Node_Pos.Start_Pos, Width => 0, Base => 10); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => ") FOUND AT ", Stop => 0); if Unit_Des /= ContextManager.NullUnit then Str := LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Unit_Des))); else Str := E_Strings.Copy_String (Str => "no_file.ada"); end if; E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => FileSystem.Just_File (Fn => Str, Ext => True)); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ':'); SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => Integer (Decl_Pos.Start_Line_No), Width => 0, Base => 10); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ','); SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => Decl_Pos.Start_Pos, Width => 0, Base => 10); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!!", Stop => 0); end if; end if; end Add_Usage; -- Add cross-references for the dotted name identifier (Node) -- giving a procedure/function name (Subprog_Sym) and a scope -- (Scope). The dotted name identifier is used in the compilation -- unit (Comp_Unit). The type of reference is Ref_Type. procedure Generate_Xref_List (Comp_Unit : in ContextManager.UnitDescriptors; Node : in STree.SyntaxNode; Ref_Type : in Character; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; Full_Package_Name : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# Full_Package_Name, --# LexTokenManager.State, --# Node, --# Ref_Type, --# Scope, --# STree.Table, --# Subprog_Sym, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# Full_Package_Name, --# LexTokenManager.State, --# Node, --# Ref_Type, --# Scope, --# STree.Table, --# Subprog_Sym; is Prefix_Sym : Dictionary.Symbol; It : STree.Iterator; Current_Node : STree.SyntaxNode; begin It := STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); if It /= STree.NullIterator then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! XREF", Stop => 0); end if; Prefix_Sym := Dictionary.NullSymbol; while It /= STree.NullIterator loop Current_Node := STree.Get_Node (It => It); Look_Up (Prefix => Prefix_Sym, Scope => Scope, Subprog_Sym => Subprog_Sym, Lex_Str => STree.Node_Lex_String (Node => Current_Node), Pos => STree.Node_Position (Node => Current_Node), Full_Package_Name => Full_Package_Name); if not Dictionary.Is_Null_Symbol (Prefix_Sym) then Add_Usage (Comp_Unit => Comp_Unit, Sym => Prefix_Sym, Current_Node => Current_Node, Ref_Type => Ref_Type); It := STree.NextNode (It => It); else It := STree.NullIterator; end if; end loop; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_LIST PROGRAM ERROR"); end if; end Generate_Xref_List; -- Add cross-reference for the identifier (Node) giving a -- procedure/function name (Subprog_Sym) and a scope (Scope). The -- identifier is used in the compilation unit (Comp_Unit). procedure Generate_Xref_Identifier (Comp_Unit : in ContextManager.UnitDescriptors; Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; Full_Package_Name : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# Full_Package_Name, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# Full_Package_Name, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym; is Current_Node : STree.SyntaxNode; Sym : Dictionary.Symbol; begin Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then Sym := Dictionary.NullSymbol; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! XREF", Stop => 0); end if; Look_Up (Prefix => Sym, Scope => Scope, Subprog_Sym => Subprog_Sym, Lex_Str => STree.Node_Lex_String (Node => Current_Node), Pos => STree.Node_Position (Node => Current_Node), Full_Package_Name => Full_Package_Name); if not Dictionary.Is_Null_Symbol (Sym) then Add_Usage (Comp_Unit => Comp_Unit, Sym => Sym, Current_Node => Current_Node, Ref_Type => 'r'); end if; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_IDENTIFIER PROGRAM ERROR"); end if; end Generate_Xref_Identifier; procedure Generate_Xref_Inherit (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; is Current_Node : STree.SyntaxNode; It : STree.Iterator; begin -- XRef for the inherit clause. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.inherit_clause, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF INHERIT CLAUSE !!!", Stop => 0); end if; It := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Current_Node, In_Direction => STree.Down); if It /= STree.NullIterator then while It /= STree.NullIterator loop -- inherit P ! w ! package P Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It), Ref_Type => 'w', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_INHERIT PROGRAM ERROR"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_INHERIT PROGRAM ERROR"); end if; end Generate_Xref_Inherit; procedure Generate_Xref_Suspends_Protects (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; is It : STree.Iterator; begin -- XRef for suspends or protects property. It := STree.Find_First_Node (Node_Kind => SP_Symbols.annotation_primary, From_Root => Parse_Tree, In_Direction => STree.Down); if It /= STree.NullIterator then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF PROPERTY LIST : SUSPENDS PROTECTS !!!", Stop => 0); end if; while It /= STree.NullIterator loop -- declare ... => P... ! r ! package P -- declare ... => V ! r ! [own] V : [in|out] T -- own [protected|task] ... (... => P...) ! r ! package P -- own [protected|task] ... (... => V) ! r ! [own] V : [in|out] T -- own [protected|task] ... (... => Proc) ! r ! procedure/entry Proc -- own [protected|task] ... (... => F) ! r ! function F Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It), Ref_Type => 'r', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_SUSPENDS_PROTECTS PROGRAM ERROR"); end if; end Generate_Xref_Suspends_Protects; procedure Generate_Xref_Interrupt (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; is It : STree.Iterator; begin -- XRef for interrupt property. It := STree.Find_First_Node (Node_Kind => SP_Symbols.annotation_aggregate_choice_rep, From_Root => Parse_Tree, In_Direction => STree.Down); if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF PROPERTY LIST : INTERRUPT !!!", Stop => 0); end if; while It /= STree.NullIterator loop -- declare interrupt => (Proc => ...) ! r ! procedure/entry Proc -- declare interrupt => (F => ...) ! r ! function F -- own [protected|task] ... (interrupt => (Proc => ...)) ! r ! procedure/entry Proc -- own [protected|task] ... (interrupt => (F => ...)) ! r ! function F Generate_Xref_Identifier (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It), Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); It := STree.NextNode (It => It); end loop; end Generate_Xref_Interrupt; procedure Generate_Xref_Own (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; is Current_Node : STree.SyntaxNode; Prev_Node : STree.SyntaxNode; Value_Node : STree.SyntaxNode; It : STree.Iterator; It_Id : STree.Iterator; It_Prop : STree.Iterator; Sym : Dictionary.Symbol; PO_Scope : Dictionary.Scopes; Name_Node : STree.SyntaxNode; Name_Str : E_Strings.T; begin -- XRef for the own variable clause. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.own_variable_clause, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then Prev_Node := Current_Node; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF OWN VARIABLE CLAUSE !!!", Stop => 0); end if; -- XRef for the own variable. It := STree.Find_First_Node (Node_Kind => SP_Symbols.own_variable_list, From_Root => Prev_Node, In_Direction => STree.Down); if It /= STree.NullIterator then while It /= STree.NullIterator loop --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF OWN VARIABLE LIST !!!", Stop => 0); end if; --# end accept; It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down); if It_Id /= STree.NullIterator then while It_Id /= STree.NullIterator loop -- own [in|out|protected|task] V : T ! o ! V : T --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! XREF", Stop => 0); end if; --# end accept; Current_Node := STree.Get_Node (It => It_Id); Sym := Dictionary.NullSymbol; Look_Up (Prefix => Sym, Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Lex_Str => STree.Node_Lex_String (Node => Current_Node), Pos => STree.Node_Position (Node => Current_Node), Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Sym) then Add_Usage (Comp_Unit => Comp_Unit, Sym => Sym, Current_Node => Current_Node, Ref_Type => 'o'); end if; It_Id := STree.NextNode (It => It_Id); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; --# end accept; end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_OWN PROGRAM ERROR"); end if; It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_OWN PROGRAM ERROR"); end if; -- XRef for the type. It := STree.Find_First_Node (Node_Kind => SP_Symbols.type_mark, From_Root => Prev_Node, In_Direction => STree.Down); while It /= STree.NullIterator loop --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF TYPE MARK !!!", Stop => 0); end if; --# end accept; Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down)); if Current_Node /= STree.NullNode then -- own ... : P... ! r ! package P -- own ... : T ! r ! [protected|task] type T is ... / type T is abstract Generate_Xref_List (Comp_Unit => Comp_Unit, Node => Current_Node, Ref_Type => 'r', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_OWN PROGRAM ERROR"); end if; It := STree.NextNode (It => It); end loop; -- XRef for property list. It := STree.Find_First_Node (Node_Kind => SP_Symbols.own_variable_specification, From_Root => Prev_Node, In_Direction => STree.Down); while It /= STree.NullIterator loop It_Prop := STree.Find_First_Node (Node_Kind => SP_Symbols.property, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down); while It_Prop /= STree.NullIterator loop Name_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Get_Node (It => It_Prop))); Value_Node := STree.Next_Sibling (Current_Node => Name_Node); Name_Str := LexTokenManager.Lex_String_To_String (Lex_Str => STree.Node_Lex_String (Name_Node)); if E_Strings.Eq1_String (E_Str => Name_Str, Str => "protects") then Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.annotation_primary, From_Root => Value_Node, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then -- Update the scope. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.type_mark, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down)); --# accept F, 41, "Stable expression expected here"; if Current_Node /= STree.NullNode then PO_Scope := Scope; It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Current_Node, In_Direction => STree.Down); while It_Id /= STree.NullIterator loop Sym := Dictionary.LookupImmediateScope (Name => STree.Node_Lex_String (Node => STree.Get_Node (It => It_Id)), Scope => PO_Scope, Context => Dictionary.ProofContext); if not Dictionary.Is_Null_Symbol (Sym) then PO_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Sym); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_OWN PROGRAM ERROR"); end if; It_Id := STree.NextNode (It => It_Id); end loop; else PO_Scope := Scope; end if; --# end accept; Generate_Xref_Suspends_Protects (Comp_Unit => Comp_Unit, Parse_Tree => Value_Node, Scope => PO_Scope); end if; elsif E_Strings.Eq1_String (E_Str => Name_Str, Str => "interrupt") then Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.annotation_aggregate_choice_rep, From_Root => Value_Node, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then -- Update the scope. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.type_mark, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down)); --# accept F, 41, "Stable expression expected here"; if Current_Node /= STree.NullNode then PO_Scope := Scope; It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Current_Node, In_Direction => STree.Down); while It_Id /= STree.NullIterator loop Sym := Dictionary.LookupImmediateScope (Name => STree.Node_Lex_String (Node => STree.Get_Node (It => It_Id)), Scope => PO_Scope, Context => Dictionary.ProofContext); if not Dictionary.Is_Null_Symbol (Sym) then PO_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Sym); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_OWN PROGRAM ERROR"); end if; It_Id := STree.NextNode (It => It_Id); end loop; else PO_Scope := Scope; end if; --# end accept; Generate_Xref_Interrupt (Comp_Unit => Comp_Unit, Parse_Tree => Value_Node, Scope => PO_Scope); end if; end if; It_Prop := STree.NextNode (It => It_Prop); end loop; It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_OWN PROGRAM ERROR"); end if; end Generate_Xref_Own; procedure Generate_Xref_Refinement (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; is Current_Node : STree.SyntaxNode; It : STree.Iterator; It_Id : STree.Iterator; Sym : Dictionary.Symbol; begin -- XRef for the own refinement. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.refinement_definition, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF OWN REFINEMENT DEFINITION !!!", Stop => 0); end if; It := STree.Find_First_Node (Node_Kind => SP_Symbols.refinement_clause, From_Root => Current_Node, In_Direction => STree.Down); if It /= STree.NullIterator then while It /= STree.NullIterator loop --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF OWN REFINEMENT CLAUSE !!!", Stop => 0); end if; --# end accept; Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down)); if Current_Node /= STree.NullNode then -- own S is ... ! c ! own S --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! XREF", Stop => 0); end if; --# end accept; Sym := Dictionary.NullSymbol; Look_Up (Prefix => Sym, Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Lex_Str => STree.Node_Lex_String (Node => Current_Node), Pos => STree.Node_Position (Node => Current_Node), Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Sym) then Add_Usage (Comp_Unit => Comp_Unit, Sym => Sym, Current_Node => Current_Node, Ref_Type => 'c'); end if; --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; --# end accept; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_REFINEMENT PROGRAM ERROR"); end if; It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down); if It_Id /= STree.NullIterator then while It_Id /= STree.NullIterator loop -- own ... is [in|out] P... ! o ! package P -- own ... is [in|out] V ! o ! V : T Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It_Id), Ref_Type => 'o', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); It_Id := STree.NextNode (It => It_Id); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_REFINEMENT PROGRAM ERROR"); end if; It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_REFINEMENT PROGRAM ERROR"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_REFINEMENT PROGRAM ERROR"); end if; end Generate_Xref_Refinement; procedure Generate_Xref_Initializes (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; is Current_Node : STree.SyntaxNode; It : STree.Iterator; Sym : Dictionary.Symbol; begin -- XRef for the initializes. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.initialization_specification, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF INITIALIZATION_SPECIFICATION !!!", Stop => 0); end if; It := STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Current_Node, In_Direction => STree.Down); if It /= STree.NullIterator then while It /= STree.NullIterator loop -- initializes S ! r ! own S -- initializes V ! r ! V : T --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! XREF", Stop => 0); end if; --# end accept; Current_Node := STree.Get_Node (It => It); Sym := Dictionary.NullSymbol; Look_Up (Prefix => Sym, Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Lex_Str => STree.Node_Lex_String (Node => Current_Node), Pos => STree.Node_Position (Node => Current_Node), Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Sym) then Add_Usage (Comp_Unit => Comp_Unit, Sym => Sym, Current_Node => Current_Node, Ref_Type => 'r'); end if; It := STree.NextNode (It => It); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; --# end accept; end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_INITIALIZES PROGRAM ERROR"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_INITIALIZES PROGRAM ERROR"); end if; end Generate_Xref_Initializes; procedure Generate_Xref_Global (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym; is Current_Node : STree.SyntaxNode; It : STree.Iterator; It_Id : STree.Iterator; Mode : Dictionary.Modes; begin -- XRef for the globals. It := STree.Find_First_Node (Node_Kind => SP_Symbols.global_variable_clause, From_Root => Parse_Tree, In_Direction => STree.Down); if It /= STree.NullIterator then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF GLOBAL VARIABLE CLAUSE !!!", Stop => 0); end if; while It /= STree.NullIterator loop Current_Node := STree.Child_Node (Current_Node => STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.mode, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down))); -- Get the mode (in/out/in out) of the global. if Current_Node /= STree.NullNode then if STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.in_mode then Mode := Dictionary.InMode; elsif STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.inout_mode then Mode := Dictionary.InOutMode; elsif STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.out_mode then Mode := Dictionary.OutMode; else Mode := Dictionary.InvalidMode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_GLOBAL PROGRAM ERROR"); end if; else -- Limitation will assume IN as default mode. No -- consequence for cross-references. Mode := Dictionary.DefaultMode; end if; It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down); if It_Id /= STree.NullIterator then while It_Id /= STree.NullIterator loop -- global [in|out] P... ! r ! package P -- global [in|out] S ! r ! own S -- global [in|out] V ! r ! [own] V : [in|out] T / {protected|task} type V is ... Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It_Id), Ref_Type => 'r', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); -- global [in|out] S ! >/=/< ! procedure/entry Proc -- global [in|out] V ! >/=/< ! procedure/entry Proc -- global [in|out] S ! > ! function F -- global [in|out] V ! > ! function F --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! XREF", Stop => 0); end if; --# end accept; --# accept Flow, 41, "Expected stable expression"; if Mode = Dictionary.InMode or Mode = Dictionary.DefaultMode then Add_Usage (Comp_Unit => Comp_Unit, Sym => Subprog_Sym, Current_Node => STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => STree.Get_Node (It => It_Id))), Ref_Type => '>'); elsif Mode = Dictionary.InOutMode then Add_Usage (Comp_Unit => Comp_Unit, Sym => Subprog_Sym, Current_Node => STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => STree.Get_Node (It => It_Id))), Ref_Type => '='); elsif Mode = Dictionary.OutMode then Add_Usage (Comp_Unit => Comp_Unit, Sym => Subprog_Sym, Current_Node => STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => STree.Get_Node (It => It_Id))), Ref_Type => '<'); end if; --# end accept; --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; --# end accept; It_Id := STree.NextNode (It => It_Id); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_GLOBAL PROGRAM ERROR"); end if; It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_GLOBAL PROGRAM ERROR"); end if; end Generate_Xref_Global; procedure Generate_Xref_Derives (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym; is Current_Node : STree.SyntaxNode; It : STree.Iterator; It_Id : STree.Iterator; begin -- XRef for the derives. if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF GLOBAL DEPENDENCY CLAUSE !!!", Stop => 0); end if; It := STree.Find_First_Node (Node_Kind => SP_Symbols.dependency_clause, From_Root => Parse_Tree, In_Direction => STree.Down); while It /= STree.NullIterator loop -- XRef for the derives before the 'from'. Current_Node := STree.Child_Node (Current_Node => STree.Get_Node (It => It)); if Current_Node /= STree.NullNode then It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Current_Node, In_Direction => STree.Down); if It_Id /= STree.NullIterator then while It_Id /= STree.NullIterator loop -- derives P... from ... ! m ! package P -- derives S from ... ! m ! own S -- derives V from ... ! m ! [own] V : [in|out] T/{protected|task} type V is ... Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It_Id), Ref_Type => 'm', Scope => Scope, Subprog_Sym => Subprog_Sym, Full_Package_Name => False); It_Id := STree.NextNode (It => It_Id); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_DERIVES PROGRAM ERROR"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_DERIVES PROGRAM ERROR"); end if; -- XRef for the derives after the 'from'. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.dependency_clause_opt, From_Root => STree.Get_Node (It => It), In_Direction => STree.Down)); if Current_Node /= STree.NullNode then It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Current_Node, In_Direction => STree.Down); while It_Id /= STree.NullIterator loop -- derives ... from P... ! r ! package P -- derives ... from S ! r ! own S -- derives ... from V ! r ! [own] V : [in|out] T/{protected|task} type V is ... Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It_Id), Ref_Type => 'r', Scope => Scope, Subprog_Sym => Subprog_Sym, Full_Package_Name => False); It_Id := STree.NextNode (It => It_Id); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_DERIVES PROGRAM ERROR"); end if; It := STree.NextNode (It => It); end loop; -- XRef for null import list. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.null_import_list, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then -- XRef for the derives after the 'from'. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.dependency_clause_optrep, From_Root => Current_Node, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Current_Node, In_Direction => STree.Down); while It_Id /= STree.NullIterator loop -- derives null from P... ! r ! package P -- derives null from S ! r ! own S -- derives null from V ! r ! [own] V : [in|out] T/{protected|task} type V is ... Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It_Id), Ref_Type => 'r', Scope => Scope, Subprog_Sym => Subprog_Sym, Full_Package_Name => False); It_Id := STree.NextNode (It => It_Id); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_DERIVES PROGRAM ERROR"); end if; end if; end Generate_Xref_Derives; procedure Generate_Xref_Justification (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; is Current_Node : STree.SyntaxNode; It : STree.Iterator; begin -- XRef for the justification clause. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.justification_statement, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF JUSTIFICATION CLAUSE !!!", Stop => 0); end if; It := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Current_Node, In_Direction => STree.Down); while It /= STree.NullIterator loop -- accept P ! r ! package P -- accept V ! r ! [own] V : [in|out] T -- accept T ! r ! [protected|task] type T is ... Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It), Ref_Type => 'r', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_JUSTIFICATION PROGRAM ERROR"); end if; end Generate_Xref_Justification; procedure Generate_Xref_Proof_Function (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym; is Top_Node : STree.SyntaxNode; Current_Node : STree.SyntaxNode; It : STree.Iterator; It_Id : STree.Iterator; begin -- XRef for the proof function. Top_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.function_specification, From_Root => Parse_Tree, In_Direction => STree.Down)); if Top_Node /= STree.NullNode then Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.formal_part, From_Root => Top_Node, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then -- XRef for the formal parameters of a proof function. if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF FORMAL PARAMETERS !!!", Stop => 0); end if; It := STree.Find_First_Node (Node_Kind => SP_Symbols.parameter_specification, From_Root => Current_Node, In_Direction => STree.Down); if It /= STree.NullIterator then while It /= STree.NullIterator loop -- XRef for the formal parameter name. Current_Node := STree.Child_Node (Current_Node => STree.Get_Node (It => It)); if Current_Node /= STree.NullNode then --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF FORMAL PARAMETER NAMES !!!", Stop => 0); end if; --# end accept; It_Id := STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Current_Node, In_Direction => STree.Down); if It_Id /= STree.NullIterator then while It_Id /= STree.NullIterator loop --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! XREF", Stop => 0); end if; --# end accept; -- function F (V : ...) return ... ! > ! function F Add_Usage (Comp_Unit => Comp_Unit, Sym => Subprog_Sym, Current_Node => STree.Get_Node (It => It_Id), Ref_Type => '>'); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; --# end accept; It_Id := STree.NextNode (It => It_Id); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_PROOF_FUNCTION PROGRAM ERROR"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_PROOF_FUNCTION PROGRAM ERROR"); end if; -- XRef for the formal parameter type. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => STree.Last_Sibling_Of (Start_Node => Current_Node), In_Direction => STree.Down)); if Current_Node /= STree.NullNode then --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF FORMAL PARAMETER TYPE !!!", Stop => 0); end if; --# end accept; -- function ... (... : P...) return ... ! r ! package P -- function ... (... : T) return ... ! r ! type T is ... Generate_Xref_List (Comp_Unit => Comp_Unit, Node => Current_Node, Ref_Type => 'r', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_PROOF_FUNCTION PROGRAM ERROR"); end if; It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_PROOF_FUNCTION PROGRAM ERROR"); end if; end if; Current_Node := STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => Top_Node)); if Current_Node /= STree.NullNode then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF RETURN TYPE !!!", Stop => 0); end if; Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Current_Node, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then -- function ... (... : ...) return P... ! r ! package P -- function ... (... : ...) return T ! r ! type T is ... Generate_Xref_List (Comp_Unit => Comp_Unit, Node => Current_Node, Ref_Type => 'r', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_PROOF_FUNCTION PROGRAM ERROR"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_PROOF_FUNCTION PROGRAM ERROR"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_PROOF_FUNCTION PROGRAM ERROR"); end if; end Generate_Xref_Proof_Function; procedure Generate_Xref_Object_Assertion (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Xref.State & --# Xref.State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; is Current_Node : STree.SyntaxNode; It : STree.Iterator; begin -- XRef for the object assertion. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.object_assertion, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then It := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => STree.Child_Node (Current_Node => Current_Node), In_Direction => STree.Down); if It /= STree.NullIterator then while It /= STree.NullIterator loop -- for P... declare [rule|norule] ! r ! package P -- for V declare [rule|norule] ! r ! V : const T := ... Generate_Xref_List (Comp_Unit => Comp_Unit, Node => STree.Get_Node (It => It), Ref_Type => 'r', Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Full_Package_Name => False); It := STree.NextNode (It => It); end loop; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_OBJECT_ASSERTION PROGRAM ERROR"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_OBJECT_ASSERTION PROGRAM ERROR"); end if; end Generate_Xref_Object_Assertion; procedure Generate_Xref_Symbol (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Symbol : in Dictionary.Symbol; Is_Declaration : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# Is_Declaration, --# LexTokenManager.State, --# Parse_Tree, --# STree.Table, --# Symbol, --# Xref.State & --# Xref.State from *, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# Is_Declaration, --# Parse_Tree, --# STree.Table, --# Symbol; is Current_Node : STree.SyntaxNode; begin -- XRef for the precondition -- XRef for the postcondition -- XRef for the return -- XRef for the assert -- XRef for the check Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "!!! XREF PREDICATE !!!", Stop => 0); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! XREF", Stop => 0); end if; -- pre ... P ... ! r ! package P -- pre ... F ... ! r ! function F -- pre ... S ... ! r ! own S -- pre ... V ... ! r ! [own] V : [in|out] T -- pre ... T ... ! r ! [protected|task] type T is ... -- post ... P ... ! r ! package P -- post ... F ... ! r ! function F -- post ... S ... ! r ! own S -- post ... V ... ! r ! [own] V : [in|out] T -- post ... T ... ! r ! [protected|task] type T is ... -- return ... P ... ! r ! package P -- return ... F ... ! r ! function F -- return ... S ... ! r ! own S -- return ... V ... ! r ! [own] V : [in|out] T -- return ... T ... ! r ! [protected|task] type T is ... -- assert ... P ... ! r ! package P -- assert ... F ... ! r ! function F -- assert ... S ... ! r ! own S -- assert ... V ... ! r ! [own] V : [in|out] T -- assert ... T ... ! r ! [protected|task] type T is ... -- check ... P ... ! r ! package P -- check ... F ... ! r ! function F -- check ... S ... ! r ! own S -- check ... V ... ! r ! [own] V : [in|out] T -- check ... T ... ! r ! [protected|task] type T is ... -- V : ... ! O ! own V : ... -- type T is ... ! O ! own ... : T if Is_Declaration then Add_Usage (Comp_Unit => Comp_Unit, Sym => Symbol, Current_Node => Current_Node, Ref_Type => 'O'); else Add_Usage (Comp_Unit => Comp_Unit, Sym => Symbol, Current_Node => Current_Node, Ref_Type => 'r'); end if; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.GENERATE_XREF_SYMBOL PROGRAM ERROR"); end if; end Generate_Xref_Symbol; procedure Cleanup --# global in out Xref.State; --# derives Xref.State from *; is begin Xref.Cleanup_Decl_Comp_Unit; end Cleanup; procedure Increment_Nb_Separates (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Heap; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out Xref.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# LexTokenManager.State, --# Parse_Tree, --# STree.Table & --# Xref.State from *, --# Comp_Unit, --# Parse_Tree, --# STree.Table; is Current_Node : STree.SyntaxNode; Unit_Name : LexTokenLists.Lists; Dummy_Unit_Type : ContextManager.UnitTypes; Lex_Str : LexTokenManager.Lex_String; begin -- Count number of separates. Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.body_stub, From_Root => Parse_Tree, In_Direction => STree.Down)); -- Try to find if the SP_Symbols.body_stub is a separate. The -- keyword "separate" (SP_Symbols.RWseparate) is not kept into -- the tree. But, from the SPARK.LLA file, if the last child is -- a pragma (SP_Symbols.apragma) then it is never a separate. if Current_Node /= STree.NullNode and then STree.Syntax_Node_Type (Node => STree.Last_Child_Of (Start_Node => Current_Node)) /= SP_Symbols.apragma then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "!!! ADD SEPARATE IN ", Stop => 0); --# accept F, 10, Dummy_Unit_Type, "Ineffective assignment here OK"; ContextManager.Ops.GetUnitName (Descriptor => Comp_Unit, UnitName => Unit_Name, UnitType => Dummy_Unit_Type); --# end accept; LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Unit_Name); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => '.'); Current_Node := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Parse_Tree, In_Direction => STree.Down)); if Current_Node /= STree.NullNode then Lex_Str := STree.Node_Lex_String (Node => Current_Node); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str)); end if; SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => " !!!", Stop => 0); end if; Xref.Increment_Nb_Separates (Comp_Unit => Comp_Unit); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.INCREMENT_NB_SEPARATES PROGRAM ERROR"); end if; --# accept F, 33,Dummy_Unit_Type, "Dummy_Unit_Type not referenced here"; end Increment_Nb_Separates; end SLI; spark-2012.0.deb/examiner/dag-buildgraph.adb0000644000175000017500000105366311753202336017611 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Clists; with DAG_IO; with Debug; with ExaminerConstants; with SeqAlgebra; with SystemErrors; separate (DAG) procedure BuildGraph (StartNode : in STree.SyntaxNode; SubprogSym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; OutputFile : in SPARK_IO.File_Type; EndPosition : in LexTokenManager.Token_Position; VCGFailure : in out Boolean; VCGHeap : in out Cells.Heap_Record; FlowHeap : in out Heap.HeapRecord; Semantic_Error_In_Subprogram : in Boolean; DataFlowErrorInSubprogram : in Boolean; Type_Check_Exports : in Boolean) is InitialRecord : StmtStack.StmtRecord; LastNode, Local_Node, Node : STree.SyntaxNode; NodeType : SP_Symbols.SP_Symbol; StmtLabel : Labels.Label; LineNmbr : Integer; LScope : Dictionary.Scopes; SubprogramCalls : Natural; ShortCircuitStack : CStacks.Stack; CheckStack : CStacks.Stack; ContainsReals : Boolean; PreConstraints : Cells.Cell; ImportConstraints : Cells.Cell := Cells.Null_Cell; -- global used to distinguish Run_Time_Check from PreConCheck KindOfStackedCheck : Graph.Proof_Context_Type := Graph.Run_Time_Check; DoAssumeLocalRvalues : Boolean; LoopStack : LoopContext.T; ------------------------------------------------------------------------- -- Cell Creation Utilities ------------------------------------------------------------------------- procedure SubstituteTwiddled (Abstraction : in Dictionary.Abstractions; SubProg : in Dictionary.Symbol; ConstraintRoot : in Cells.Cell) -- replace import-export variables by their twiddled version -- in a precondition. -- This has been plagiarised from SubstituteParameters in -- ModelPrecondition. --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Statistics.TableUsage, --# VCGHeap from *, --# Abstraction, --# ConstraintRoot, --# Dictionary.Dict, --# SubProg, --# VCGHeap; is P : Cells.Cell; S : CStacks.Stack; VarSym : Dictionary.Symbol; begin -- DAG traversal algorithm of D.E. Knuth, Fundamental Algorithms, p.317; CStacks.CreateStack (S); P := ConstraintRoot; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCGHeap, P, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then P := Cells.Null_Cell; else P := LeftPtr (VCGHeap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCGHeap, S); CStacks.Pop (VCGHeap, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then if Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Reference then VarSym := Cells.Get_Symbol_Value (VCGHeap, P); if Dictionary.IsImportExport (Abstraction, SubProg, VarSym) then SetTilde (P, VCGHeap); end if; end if; P := Cells.Null_Cell; else P := RightPtr (VCGHeap, P); end if; end loop; end SubstituteTwiddled; --------------------------------------------------------------------- -- More cell creation utilities --------------------------------------------------------------------- procedure Advance (NodeNmbr : in Natural) --# global in out StmtStack.S; --# derives StmtStack.S from *, --# NodeNmbr; is S : StmtStack.StmtRecord; begin S := StmtStack.Top; StmtStack.Pop; S.StmtNmbr := NodeNmbr; StmtStack.Push (S); end Advance; ------------------------------------------------------------------------ -- function return true if VarSym is visible in Ada context from Scope function IsDirectlyVisible (VarSym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean; begin -- first check to see if we can see the VarSym directly without prefixing Result := Dictionary.LookupItem (Name => Dictionary.GetSimpleName (VarSym), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False) = VarSym; -- if not look it up in the package it belongs to if not Result then Result := Dictionary.LookupSelectedItem (Dictionary.GetRegion (Dictionary.GetScope (VarSym)), Dictionary.GetSimpleName (VarSym), Scope, Dictionary.ProgramContext) = VarSym; end if; return Result; end IsDirectlyVisible; --883 procedure ConjoinParamConstraint (Type_Sym : in Dictionary.Symbol; Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Consider_Always_Valid : in Boolean; DAGCell : in out Cells.Cell) --# global in out ContainsReals; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# DAGCell, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# VCGFailure from *, --# Consider_Always_Valid, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Type_Sym, --# Var_Sym, --# VCGHeap & --# Statistics.TableUsage, --# VCGHeap from *, --# Consider_Always_Valid, --# DAGCell, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Type_Sym, --# Var_Sym, --# VCGHeap; is Constr, VarCell : Cells.Cell; begin CreateReferenceCell (VarCell, VCGHeap, Var_Sym); Type_Constraint.Make (The_Type => Type_Sym, The_Expression => VarCell, Scope => Scope, Consider_Always_Valid => Consider_Always_Valid, The_Constraint => Constr, VCG_Heap => VCGHeap, VC_Contains_Reals => ContainsReals, VC_Failure => VCGFailure); if not Cells.Is_Null_Cell (Constr) then Cells.Utility.Conjoin (VCGHeap, Constr, DAGCell); end if; end ConjoinParamConstraint; procedure AssumeTypesOfFormalImportParams (Abstraction : in Dictionary.Abstractions; SubProg : in Dictionary.Symbol; Scope : in Dictionary.Scopes; DAGCell : in out Cells.Cell) --# global in out ContainsReals; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# DAGCell, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# Abstraction, --# DAGCell, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# SubProg, --# VCGHeap; is NumOfParams : Natural; FormalParam : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; begin NumOfParams := Dictionary.GetNumberOfSubprogramParameters (SubProg); for ParamNum in Natural range 1 .. NumOfParams loop FormalParam := Dictionary.GetSubprogramParameter (SubProg, ParamNum); if Dictionary.IsFunction (SubProg) or else Dictionary.IsImport (Abstraction, SubProg, FormalParam) then Type_Sym := Dictionary.GetType (FormalParam); -- suppress assumption for private types if not Dictionary.IsPrivateType (Type_Sym, Scope) or else Dictionary.IsPredefinedTimeType (Type_Sym) then ConjoinParamConstraint (Type_Sym, FormalParam, Scope, False, DAGCell); end if; end if; end loop; end AssumeTypesOfFormalImportParams; procedure AssumeTypesOfImportGlobals (Abstraction : in Dictionary.Abstractions; SubProg : in Dictionary.Symbol; Scope : in Dictionary.Scopes; DAGCell : in out Cells.Cell) --# global in CommandLineData.Content; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# DAGCell, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# Abstraction, --# CommandLineData.Content, --# DAGCell, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# SubProg, --# VCGHeap; is It : Dictionary.Iterator; GlobalVar : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; begin It := Dictionary.FirstGlobalVariable (Abstraction, SubProg); while not Dictionary.IsNullIterator (It) loop GlobalVar := Dictionary.CurrentSymbol (It); if Dictionary.IsFunction (SubProg) or else Dictionary.IsImport (Abstraction, SubProg, GlobalVar) then -- In mode stream are treated with normal variables, since we now -- use the MarkedValid bit in the symbol table to remember if they -- produce values in-type, or not. The test occurs as the -- hypotheses are generated, since it can vary subcomponent by -- subcomponent with in-mode record variables. if Dictionary.GetOwnVariableOrConstituentMode (GlobalVar) = Dictionary.DefaultMode or else (Dictionary.GetOwnVariableOrConstituentMode (GlobalVar) = Dictionary.InMode) then if IsDirectlyVisible (GlobalVar, Scope) then Type_Sym := Dictionary.GetType (GlobalVar); if not Dictionary.IsPrivateType (Type_Sym, Scope) or else Dictionary.IsPredefinedTimeType (Type_Sym) then -- Hack to avoid generating extra true hypotheses in most -- cases - now a true hypothesis will only be generated in -- record variables with some valid fields (new case, and -- hence OK), or when a whole stream in record is invalid. -- Stream-in invalid discretes are short-cut here if Dictionary.IsRecordTypeMark (Type_Sym, Scope) or else Dictionary.IsArrayTypeMark (Type_Sym, Scope) or else ((Dictionary.TypeIsScalar (Type_Sym) or else Dictionary.IsPredefinedTimeType (Type_Sym)) and then Dictionary.VariableOrSubcomponentIsMarkedValid (GlobalVar)) then ConjoinParamConstraint (Type_Sym, GlobalVar, Scope, True, DAGCell); end if; end if; end if; end if; end if; It := Dictionary.NextSymbol (It); end loop; end AssumeTypesOfImportGlobals; ------------------------------------------------------------------------ procedure ModelCheckStmt (Expn_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Expn_Node, --# LexTokenManager.State, --# LoopStack, --# LScope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expn_Node, --# LexTokenManager.State, --# LoopStack, --# LScope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Expn_Node, --# Graph.Table, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# StmtStack.S, --# STree.Table, --# VCGHeap; is Conjunct, DAG_Cell : Cells.Cell; DAG_Cell_Copy : Cells.Cell; Function_Defs : CStacks.Stack; begin CStacks.CreateStack (Function_Defs); Build_Annotation_Expression (Exp_Node => Expn_Node, Instantiated_Subprogram => Dictionary.NullSymbol, Scope => LScope, Calling_Scope => LScope, Force_Abstract => False, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => DAG_Cell, Function_Defs => Function_Defs); -- Create a copy as the DAG is used in both the check and the assumption Structures.CopyStructure (Heap => VCGHeap, Root => DAG_Cell, RootCopy => DAG_Cell_Copy); if not CStacks.IsEmpty (Function_Defs) then -- Use null statement as place holder for function definitions ModelNullStmt (VCGHeap); -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjunct, VCG_Heap => VCGHeap); --# end accept; -- Assume the function definitions from the point of the null statement IncorporateAssumption (VCGHeap, Conjunct); end if; -- Use null statement to hold the check predicate ModelNullStmt (VCGHeap); Graph.Set_Proof_Context (X => Graph.Check_Statement); Graph.Set_Text_Line_Nmbr (X => LineNmbr); Graph.Set_Assertion_Locn (X => DAG_Cell); -- Use a null statement as a place holder for the assumption that the -- check predicate is true. ModelNullStmt (VCGHeap); IncorporateAssumption (VCGHeap, DAG_Cell_Copy); end ModelCheckStmt; ------------------------------------------------------------------------ procedure Model_Assume_Statement (Expn_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in LoopStack; --# in LScope; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Expn_Node, --# LexTokenManager.State, --# LoopStack, --# LScope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expn_Node, --# LexTokenManager.State, --# LoopStack, --# LScope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Expn_Node, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# LScope, --# StmtStack.S, --# STree.Table, --# VCGHeap; is Conjunct, DAG_Cell : Cells.Cell; Function_Defs : CStacks.Stack; begin CStacks.CreateStack (Function_Defs); Build_Annotation_Expression (Exp_Node => Expn_Node, Instantiated_Subprogram => Dictionary.NullSymbol, Scope => LScope, Calling_Scope => LScope, Force_Abstract => False, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => DAG_Cell, Function_Defs => Function_Defs); if not CStacks.IsEmpty (Function_Defs) then -- Use null statement as place holder for function definitions ModelNullStmt (VCGHeap); -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjunct, VCG_Heap => VCGHeap); --# end accept; -- Assume the function definitions from the point of the null statement IncorporateAssumption (VCGHeap, Conjunct); end if; -- Use a null statement as a place holder for the assumption -- that the assume predicate is true. ModelNullStmt (VCGHeap); IncorporateAssumption (VCGHeap, DAG_Cell); end Model_Assume_Statement; ------------------------------------------------------------------------- -- procedure used by ModelAssertStmt and CheckPlantAssert to provide RTC -- information about for loop counters procedure AssertForLoopCounterProperties (StartingScope : in Dictionary.Scopes; CellToConjoin : in out Cells.Cell) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CellToConjoin, --# Statistics.TableUsage, --# VCGHeap from *, --# CellToConjoin, --# Dictionary.Dict, --# LexTokenManager.State, --# StartingScope, --# VCGHeap; is LocalScope : Dictionary.Scopes; TheLoop : Dictionary.Symbol; LoopCounterCell : Cells.Cell; LoopCounterCheck : Cells.Cell; LoopCounterSym : Dictionary.Symbol; -- following used to assert that loop counter variable <= loop exit expression OpCell : Cells.Cell; ExitExpn : Cells.Cell; EntryExpn : Cells.Cell; -- following used to assert that vars used in for loop exit expn are in type ExitExpnVarsIt : Dictionary.Iterator; ExitVarInTypeCheck : Cells.Cell; VarInExitExpn : Cells.Cell; VarSym : Dictionary.Symbol; VarTypeSym : Dictionary.Symbol; -- Plant assertions that: -- (1) Loop counter is in its type (except Boolean) -- (2a) Loop counter is >= EntryExpn or <= EntryExpn (for reverse loops) (except for Boolean) -- (2b) Loop counter is <= ExitExpn or >= ExitExpn (for reverse loops) (except for Boolean) -- (2c) Loop counter is >= A'First if A is an unconstrained formal array parameter -- (3) That each variable that appears in the exit expression (and is therefore frozen in the -- loop body is in its type ----------------------------------------- procedure PossiblyCreateLoopCounterInitialValueAssertion (ExitExpn : in Cells.Cell; LoopCounterSym : in Dictionary.Symbol; CellToConjoin : in out Cells.Cell) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CellToConjoin, --# Statistics.TableUsage, --# VCGHeap from *, --# CellToConjoin, --# Dictionary.Dict, --# ExitExpn, --# LexTokenManager.State, --# LoopCounterSym, --# VCGHeap; is ConstraintSymbol : Dictionary.Symbol; ExitAttribute : LexTokenManager.Lex_String; CounterInitialValueAssertion : Cells.Cell; EntryAttribute : Cells.Cell; EntryExpression : Cells.Cell; EntryConstraint : Cells.Cell; LoopCounterCell : Cells.Cell; begin -- We first check that the exits condition is in terms of an attribute. Nothing else -- happens unless this is the case. if Cells.Get_Kind (VCGHeap, ExitExpn) = Cell_Storage.Op and then (Cells.Get_Op_Symbol (VCGHeap, ExitExpn) = SP_Symbols.apostrophe) then -- Obtain the prefix of the attribute ConstraintSymbol := Cells.Get_Symbol_Value (VCGHeap, LeftPtr (VCGHeap, ExitExpn)); -- Again, nothing happens unless this prefix is a Dictionary.ParameterConstraintSymbol if Dictionary.IsParameterConstraint (ConstraintSymbol) then -- Obtain the attribute value used in the exit condition of the loop ExitAttribute := Cells.Get_Lex_Str (VCGHeap, RightPtr (VCGHeap, ExitExpn)); -- We now have everything we need to construct the additional constraint on the loop counter -- the prefix of the entry constraint is the same as that of the exit CreateFixedVarCell (EntryConstraint, VCGHeap, ConstraintSymbol); -- the attribute value is the "opposite" of that used in the exit expression if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => ExitAttribute, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq then CreateAttribValueCell (EntryAttribute, VCGHeap, LexTokenManager.Last_Token); else CreateAttribValueCell (EntryAttribute, VCGHeap, LexTokenManager.First_Token); end if; -- the expression itself is the ' operator joining the prefix and the attribute CreateOpCell (EntryExpression, VCGHeap, SP_Symbols.apostrophe); -- Assemble attribute expression SetLeftArgument (EntryExpression, EntryConstraint, VCGHeap); SetRightArgument (EntryExpression, EntryAttribute, VCGHeap); -- the actual assertion is that the loop counter is >= this expresson (<= for reverse loops) if Dictionary.LoopParameterMovesInReverse (LoopCounterSym) then CreateOpCell (CounterInitialValueAssertion, VCGHeap, SP_Symbols.less_or_equal); else -- forward CreateOpCell (CounterInitialValueAssertion, VCGHeap, SP_Symbols.greater_or_equal); end if; CreateReferenceCell (LoopCounterCell, VCGHeap, LoopCounterSym); -- Construct the assertion SetLeftArgument (CounterInitialValueAssertion, LoopCounterCell, VCGHeap); SetRightArgument (CounterInitialValueAssertion, EntryExpression, VCGHeap); -- And add it to the overall assertion Cells.Utility.Conjoin (VCGHeap, CounterInitialValueAssertion, CellToConjoin); end if; end if; end PossiblyCreateLoopCounterInitialValueAssertion; ----------------------------------------- begin -- AssertForLoopCounterProperties LocalScope := StartingScope; TheLoop := Dictionary.GetRegion (LocalScope); while Dictionary.IsLoop (TheLoop) loop LoopCounterSym := Dictionary.GetLoopParameter (TheLoop); if not Dictionary.Is_Null_Symbol (LoopCounterSym) then if not Dictionary.TypeIsBoolean (Dictionary.GetType (LoopCounterSym)) then -- (1) ---------------------------------------------------------------------------- -- it's a for loop so add assertion that counter is in type (unless Boolean) CreateReferenceCell (LoopCounterCell, VCGHeap, LoopCounterSym); Type_Constraint.Process_Discrete (The_Type => Dictionary.GetType (LoopCounterSym), The_Expression => LoopCounterCell, The_Constraint => LoopCounterCheck, VCG_Heap => VCGHeap); Cells.Utility.Conjoin (VCGHeap, LoopCounterCheck, CellToConjoin); -- (2a) --------------------------------------------------------------------------- -- Loop counter >= EntryExpn (<= for reverse loops) if Dictionary.LoopParameterMovesInReverse (LoopCounterSym) then CreateOpCell (OpCell, VCGHeap, SP_Symbols.less_or_equal); else -- forward CreateOpCell (OpCell, VCGHeap, SP_Symbols.greater_or_equal); end if; CreateReferenceCell (LoopCounterCell, VCGHeap, LoopCounterSym); -- Recover entry expression put in Dict by AssignLoopCounterEntryValue EntryExpn := Cells.Cell (Dictionary.GetLoopEntryExpn (TheLoop)); -- There should always be an expression to get since it is planted by -- AssignLoopCounterEntryValue which is always called by StartForStmtModel. -- Therefore, the following assertion should never fail SystemErrors.RT_Assert (C => not Cells.Is_Null_Cell (EntryExpn), Sys_Err => SystemErrors.Assertion_Failure, Msg => "EntryExpn Null in AssertForLoopCounterProperties"); SetLeftArgument (OpCell, LoopCounterCell, VCGHeap); SetRightArgument (OpCell, EntryExpn, VCGHeap); Cells.Utility.Conjoin (VCGHeap, OpCell, CellToConjoin); -- (2b) --------------------------------------------------------------------------- -- Loop counter <= ExitExpn (>= for reverse loops) if Dictionary.LoopParameterMovesInReverse (LoopCounterSym) then CreateOpCell (OpCell, VCGHeap, SP_Symbols.greater_or_equal); else -- forward CreateOpCell (OpCell, VCGHeap, SP_Symbols.less_or_equal); end if; CreateReferenceCell (LoopCounterCell, VCGHeap, LoopCounterSym); -- Recover exit expression put in Dict by AssignLoopBoundsToExitVariable ExitExpn := Cells.Cell (Dictionary.GetLoopExitExpn (TheLoop)); -- There should always be an expression to get since it is planted by -- AssignLoopBoundsToExitVariable which is always called by StartForStmtModel. -- Therefore, the following assertion should never fail SystemErrors.RT_Assert (C => not Cells.Is_Null_Cell (ExitExpn), Sys_Err => SystemErrors.Assertion_Failure, Msg => "ExitExpn Null in AssertForLoopCounterProperties"); SetLeftArgument (OpCell, LoopCounterCell, VCGHeap); SetRightArgument (OpCell, ExitExpn, VCGHeap); Cells.Utility.Conjoin (VCGHeap, OpCell, CellToConjoin); -- (2c) --------------------------------------------------------------------------- -- if, and only if, the exit condition is in the form of an attribute of a -- Dictionary.ParameterConstraintSymbol then that indicates that we are looping -- over an unconstrained formal parameter. In this case, the assertion that the -- for loop counter is in its type (Step 1 above) and <= exit value (code of step 2 above) is -- too weak: we also know that the loop counter is >= the first value of the ParameterConstraintSymbol PossiblyCreateLoopCounterInitialValueAssertion (ExitExpn, LoopCounterSym, CellToConjoin); end if; -- Boolean loop counter -- (3) -------------------------------------------------------------------------------- -- Any variable used in the for loop exit expn must be in type too. -- We can do this check even if the for loop is over Boolean because it might still -- contain non-Boolean variables in its exit condition (hence this block is outside -- the Boolean loop counter if statement just above) -- (e.g. "for i in Boolean range False .. (X > Y)") -- Get each variable used in for loop exit expression. These have already been -- turned in to special % variables by FreezeExitExpn. ExitExpnVarsIt := Dictionary.FirstLoopOnEntryVar (TheLoop); while not Dictionary.IsNullIterator (ExitExpnVarsIt) loop VarSym := Dictionary.CurrentSymbol (ExitExpnVarsIt); VarTypeSym := Dictionary.GetType (VarSym); -- build check for suitable scalars only if DiscreteTypeWithCheck (VarTypeSym, LocalScope) then CreateReferenceCell (VarInExitExpn, VCGHeap, VarSym); Type_Constraint.Process_Discrete (The_Type => Dictionary.GetType (VarSym), The_Expression => VarInExitExpn, The_Constraint => ExitVarInTypeCheck, VCG_Heap => VCGHeap); Cells.Utility.Conjoin (VCGHeap, ExitVarInTypeCheck, CellToConjoin); end if; ExitExpnVarsIt := Dictionary.NextSymbol (ExitExpnVarsIt); end loop; -- end of (3) -------------------------------------------- end if; -- its a for loop LocalScope := Dictionary.GetEnclosingScope (LocalScope); TheLoop := Dictionary.GetRegion (LocalScope); end loop; end AssertForLoopCounterProperties; ------------------------------------------------------------------------- procedure ModelAssertStmt (Expn_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in ImportConstraints; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in PreConstraints; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Expn_Node, --# LexTokenManager.State, --# LoopStack, --# LScope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expn_Node, --# LexTokenManager.State, --# LoopStack, --# LScope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Graph.Table, --# StmtStack.S, --# VCGHeap from CommandLineData.Content, --# Dictionary.Dict, --# Expn_Node, --# Graph.Table, --# ImportConstraints, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# PreConstraints, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Expn_Node, --# Graph.Table, --# ImportConstraints, --# LexTokenManager.State, --# LoopStack, --# LScope, --# PreConstraints, --# StmtStack.S, --# STree.Table, --# VCGHeap; is Conjoined_Function_Defs : Cells.Cell; Copy_Conjoined_Function_Defs : Cells.Cell; Copy_Import_Constraints : Cells.Cell; Copy_Pre_Constraints : Cells.Cell; DAG_Cell : Cells.Cell; Function_Defs : CStacks.Stack; begin CStacks.CreateStack (Function_Defs); Build_Annotation_Expression (Exp_Node => Expn_Node, Instantiated_Subprogram => Dictionary.NullSymbol, Scope => LScope, Calling_Scope => LScope, Force_Abstract => False, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => DAG_Cell, Function_Defs => Function_Defs); if not CStacks.IsEmpty (Function_Defs) then -- Use null statement as place holder for function definitions ModelNullStmt (VCGHeap); -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; -- Assume the function definitions from the point of the null statement IncorporateAssumption (VCGHeap, Conjoined_Function_Defs); else Conjoined_Function_Defs := Cells.Null_Cell; end if; -- Use null statement to hold the assertion ModelNullStmt (VCGHeap); Graph.Set_Proof_Context (X => Graph.Assertion); Graph.Set_Text_Line_Nmbr (X => LineNmbr); if not Cells.Is_Null_Cell (ImportConstraints) then -- propagate import constraints in assertion Structures.CopyStructure (VCGHeap, ImportConstraints, Copy_Import_Constraints); Cells.Utility.Conjoin (VCGHeap, Copy_Import_Constraints, DAG_Cell); end if; if not Cells.Is_Null_Cell (PreConstraints) then -- propagate precondition in assertion Structures.CopyStructure (VCGHeap, PreConstraints, Copy_Pre_Constraints); Cells.Utility.Conjoin (VCGHeap, Copy_Pre_Constraints, DAG_Cell); end if; if not Cells.Is_Null_Cell (Conjoined_Function_Defs) then -- propagate function defs in assertion Structures.CopyStructure (VCGHeap, Conjoined_Function_Defs, Copy_Conjoined_Function_Defs); Cells.Utility.Conjoin (VCGHeap, Copy_Conjoined_Function_Defs, DAG_Cell); end if; -- see if we have a for loop and append loop counter in range info if so AssertForLoopCounterProperties (LScope, DAG_Cell); Graph.Set_Assertion_Locn (X => DAG_Cell); ModelNullStmt (VCGHeap); end ModelAssertStmt; ------------------------------------------------------------------------ procedure ModelAssignmentStmt --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is separate; ------------------------------------------------------------------------ procedure ModelInitialisedVariables --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in SubprogSym; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# SubprogSym, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# SubprogSym, --# VCGHeap; is It : Dictionary.Iterator; procedure ModelOneVar (VarSym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VarSym, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VarSym, --# VCGHeap; is DAGRoot, VarCell, StmtCell, ModList : Cells.Cell; StmtLabel : Labels.Label; begin -- ModelOneVar PrepareLabel (VCGHeap, StmtLabel, StmtCell); Clists.CreateList (VCGHeap, ModList); CreateModifiedCell (VarCell, VCGHeap, VarSym); Clists.AppendCell (VCGHeap, VarCell, ModList); BuildExpnDAG (StartNode => STree.RefToNode (Dictionary.GetVariableExpNode (VarSym)), ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => DAGRoot); SetRightArgument (VarCell, DAGRoot, VCGHeap); SetAuxPtr (StmtCell, ModList, VCGHeap); Chain (StmtLabel, VCGHeap); end ModelOneVar; begin -- ModelInitialisedVariables It := Dictionary.First_Initialized_Variable (Subprogram => SubprogSym); while not Dictionary.IsNullIterator (It) loop ModelOneVar (Dictionary.CurrentSymbol (It)); It := Dictionary.NextSymbol (It); end loop; end ModelInitialisedVariables; -------------------------------------------------------------------- procedure ModelProcedureCall --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out SubprogramCalls; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# SubprogramCalls, --# VCGHeap & --# Dictionary.Dict, --# FlowHeap, --# VCGFailure from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# SubprogramCalls from *; is separate; ------------------------------------------------------------------------ procedure CreateSwitch (DAGRoot : in Cells.Cell; FalseBranchLabel, TrueBranchLabel : out Labels.Label) --# global in out Statistics.TableUsage; --# in out VCGHeap; --# derives FalseBranchLabel, --# VCGHeap from DAGRoot, --# VCGHeap & --# Statistics.TableUsage from *, --# DAGRoot, --# VCGHeap & --# TrueBranchLabel from VCGHeap; is LocalTrueBranchLabel : Labels.Label; DAGRootCopy, FalseBranchLabelHead, TrueBranchPairCell, Negation : Cells.Cell; begin PrepareLabel (VCGHeap, LocalTrueBranchLabel, TrueBranchPairCell); TrueBranchLabel := LocalTrueBranchLabel; SetRightArgument (TrueBranchPairCell, DAGRoot, VCGHeap); Structures.CopyStructure (VCGHeap, Labels.LabelHead (LocalTrueBranchLabel), FalseBranchLabelHead); FalseBranchLabel := Labels.CellToLabel (FalseBranchLabelHead); DAGRootCopy := RightPtr (VCGHeap, LeftPtr (VCGHeap, FalseBranchLabelHead)); CreateOpCell (Negation, VCGHeap, SP_Symbols.RWnot); SetRightArgument (Negation, DAGRootCopy, VCGHeap); SetRightArgument (LeftPtr (VCGHeap, FalseBranchLabelHead), Negation, VCGHeap); end CreateSwitch; procedure StartIfModel --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is DAGRoot : Cells.Cell; FalseBranchLabel, TrueBranchLabel : Labels.Label; R : StmtStack.StmtRecord; Predecessor : Graph.Matrix_Index; begin BuildExpnDAG (StartNode => Node, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => True, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => DAGRoot); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); CreateSwitch (DAGRoot, FalseBranchLabel, TrueBranchLabel); Predecessor := StmtStack.Top.StmtNmbr; -- push record holding nmbr of tail (exit) of if-statement; Graph.Inc_Nmbr_Of_Stmts; R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; R.Kind := StmtStack.IfStart; StmtStack.Push (R); -- push False branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => Predecessor, J => Graph.Get_Nmbr_Of_Stmts, K => FalseBranchLabel); R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; R.Kind := StmtStack.IfFalseBranch; StmtStack.Push (R); -- push True branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => Predecessor, J => Graph.Get_Nmbr_Of_Stmts, K => TrueBranchLabel); R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; R.Kind := StmtStack.IfTrueBranch; StmtStack.Push (R); end StartIfModel; procedure StartElseModel --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# Statistics.TableUsage from *, --# StmtStack.S, --# VCGHeap & --# StmtStack.S from * & --# VCGHeap from *, --# Graph.Table, --# StmtStack.S; is NullStmtLabel : Labels.Label; R : StmtStack.StmtRecord; InitialEndPoint, IfTail : Graph.Matrix_Index; begin if StmtStack.Top.Kind = StmtStack.LoopExit then StmtStack.Pop; else -- Here the stmtstack contains the final node of the last if or elsif branch. InitialEndPoint := StmtStack.Top.StmtNmbr; -- tail of if-branch; StmtStack.Pop; -- eject if-branch data; R := StmtStack.Top; -- temporarily remove and store else-branch, StmtStack.Pop; -- to expose if-statement tail; IfTail := StmtStack.Top.StmtNmbr; StmtStack.Push (R); CreateUnitLabel (NullStmtLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => InitialEndPoint, J => IfTail, K => NullStmtLabel); end if; end StartElseModel; procedure StartElsIfModel --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is DAGRoot : Cells.Cell; FalseBranchLabel, TrueBranchLabel, NullStmtLabel : Labels.Label; R : StmtStack.StmtRecord; IfTail, InitialEndPoint, Predecessor : Graph.Matrix_Index; begin -- Here the stmtstack contains the final node of the last if or elsif branch. InitialEndPoint := StmtStack.Top.StmtNmbr; -- tail of current if- or elsif-branch; StmtStack.Pop; -- eject if-branch data; R := StmtStack.Top; -- temporarily remove and store else-branch, StmtStack.Pop; -- to expose if-statement tail; IfTail := StmtStack.Top.StmtNmbr; StmtStack.Push (R); CreateUnitLabel (NullStmtLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => InitialEndPoint, J => IfTail, K => NullStmtLabel); BuildExpnDAG (StartNode => Node, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => True, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => DAGRoot); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); CreateSwitch (DAGRoot, FalseBranchLabel, TrueBranchLabel); Predecessor := StmtStack.Top.StmtNmbr; StmtStack.Pop; -- push False branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => Predecessor, J => Graph.Get_Nmbr_Of_Stmts, K => FalseBranchLabel); R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; R.Kind := StmtStack.IfFalseBranch; StmtStack.Push (R); -- push True branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => Predecessor, J => Graph.Get_Nmbr_Of_Stmts, K => TrueBranchLabel); R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; R.Kind := StmtStack.IfTrueBranch; StmtStack.Push (R); end StartElsIfModel; procedure StartCaseModel --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is R : StmtStack.StmtRecord; Predecessor : Graph.Matrix_Index; CaseExpn : Cells.Cell; begin Predecessor := StmtStack.Top.StmtNmbr; -- push record holding nmbr of exit of case-statement; Graph.Inc_Nmbr_Of_Stmts; --263 R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; -- ignore flow error about undefined R R.Kind := StmtStack.CaseExit; StmtStack.Push (R); -- push record holding nmbr of start of case-statement; R.StmtNmbr := Predecessor; R.Kind := StmtStack.CaseStart; StmtStack.Push (R); -- Build expn DAG just to generate VCs; so Examiner will detect -- that assignment to CaseExpn is ineffective. --# accept F, 10, CaseExpn, "CaseExpn unused here" & --# F, 33, CaseExpn, "CaseExpn unused here"; BuildExpnDAG (StartNode => Node, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => True, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => CaseExpn); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); end StartCaseModel; -- Expect CaseExpn unused procedure ModelAlternative --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is AlternativeLabel, UnitLabel : Labels.Label; AlternativeDAG, CaseExpnDAG, ChoiceDAG, CopyCaseExpnDAG, PairCell : Cells.Cell; FirstChoice : Boolean; LocalNode : STree.SyntaxNode; R, S : StmtStack.StmtRecord; procedure BuildChoiceDAG (CaseExpnDAG : in Cells.Cell; CaseChoiceNode : in STree.SyntaxNode; ChoiceDAG : out Cells.Cell) --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CaseChoiceNode, --# CaseExpnDAG, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ChoiceDAG from CaseChoiceNode, --# CaseExpnDAG, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CaseChoiceNode, --# CaseExpnDAG, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is CaseChoice, CopyCaseExpnDAG, FirstCell, LastCell, FirstApost, SecondApost, LTECell, GTECell, LocalDAG : Cells.Cell; procedure BuildDiscreteRange (LHSnode : in STree.SyntaxNode; RangeDAG : out Cells.Cell) --# global in CaseExpnDAG; --# in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CaseExpnDAG, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LHSnode, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CaseExpnDAG, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LHSnode, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# RangeDAG from CaseExpnDAG, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LHSnode, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap; is CaseChoice, CopyCaseExpnDAG, LTECell, GTECell, LocalDAG : Cells.Cell; begin -- BuildDiscreteRange Structures.CopyStructure (VCGHeap, CaseExpnDAG, CopyCaseExpnDAG); CreateOpCell (GTECell, VCGHeap, SP_Symbols.greater_or_equal); SetLeftArgument (GTECell, CaseExpnDAG, VCGHeap); -- This call to BuildExpnDAG will not generate any checks. BuildExpnDAG (StartNode => LHSnode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => CaseChoice); SetRightArgument (GTECell, CaseChoice, VCGHeap); CreateOpCell (LTECell, VCGHeap, SP_Symbols.less_or_equal); SetLeftArgument (LTECell, CopyCaseExpnDAG, VCGHeap); -- This call to BuildExpnDAG will not generate any checks. BuildExpnDAG (StartNode => STree.Next_Sibling (Current_Node => LHSnode), ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => CaseChoice); SetRightArgument (LTECell, CaseChoice, VCGHeap); CreateOpCell (LocalDAG, VCGHeap, SP_Symbols.RWand); SetLeftArgument (LocalDAG, GTECell, VCGHeap); SetRightArgument (LocalDAG, LTECell, VCGHeap); RangeDAG := LocalDAG; end BuildDiscreteRange; begin -- BuildChoiceDAG -- CaseChoiceNode is Derivative of case_choice, of one of the following forms: -- simple_expression -- simple_expression range_constraint -- simple_expression double_dot simple_expression if STree.Next_Sibling (Current_Node => CaseChoiceNode) = STree.NullNode then -- choice is a simple_expression; -- it might be a type mark or a value -- This call to BuildExpnDAG will not generate any checks. BuildExpnDAG (StartNode => CaseChoiceNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => CaseChoice); -- if choice is a type mark then CasChoice will be FixedVar cell and it Sym will -- be a type mark symbol. Such choice needs a range model, any other is equality if Cells.Get_Kind (VCGHeap, CaseChoice) = Cell_Storage.Fixed_Var and then Dictionary.IsTypeMark (Cells.Get_Symbol_Value (VCGHeap, CaseChoice)) then -- process as range Structures.CopyStructure (VCGHeap, CaseExpnDAG, CopyCaseExpnDAG); CreateOpCell (FirstApost, VCGHeap, SP_Symbols.apostrophe); CreateOpCell (SecondApost, VCGHeap, SP_Symbols.apostrophe); CreateCellKind (FirstCell, VCGHeap, Cell_Storage.Attrib_Value); CreateCellKind (LastCell, VCGHeap, Cell_Storage.Attrib_Value); Cells.Set_Lex_Str (VCGHeap, FirstCell, LexTokenManager.First_Token); Cells.Set_Lex_Str (VCGHeap, LastCell, LexTokenManager.Last_Token); SetRightArgument (FirstApost, FirstCell, VCGHeap); SetLeftArgument (FirstApost, CaseChoice, VCGHeap); SetRightArgument (SecondApost, LastCell, VCGHeap); SetLeftArgument (SecondApost, CaseChoice, VCGHeap); CreateOpCell (GTECell, VCGHeap, SP_Symbols.greater_or_equal); CreateOpCell (LTECell, VCGHeap, SP_Symbols.less_or_equal); SetRightArgument (GTECell, FirstApost, VCGHeap); SetLeftArgument (GTECell, CaseExpnDAG, VCGHeap); SetRightArgument (LTECell, SecondApost, VCGHeap); SetLeftArgument (LTECell, CopyCaseExpnDAG, VCGHeap); CreateOpCell (LocalDAG, VCGHeap, SP_Symbols.RWand); SetRightArgument (LocalDAG, GTECell, VCGHeap); SetLeftArgument (LocalDAG, LTECell, VCGHeap); else -- process as equality CreateOpCell (LocalDAG, VCGHeap, SP_Symbols.equals); SetLeftArgument (LocalDAG, CaseExpnDAG, VCGHeap); SetRightArgument (LocalDAG, CaseChoice, VCGHeap); end if; elsif STree.Syntax_Node_Type (Node => STree.Next_Sibling (Current_Node => CaseChoiceNode)) = SP_Symbols.range_constraint then -- of the form "subtypemark range rangeconstraint" -- in this case the type mark adds no extra information so we just need to process -- the range. -- -- The range can grammatically be a range attribute or simpex .. simpex; since it -- must be static in a case statement we can assume it is the latter and just dig -- down to the LHS simple_expression BuildDiscreteRange (STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => CaseChoiceNode))), -- to get LocalDAG); else -- choice is of form simple_expression double_dot simple_expression BuildDiscreteRange (CaseChoiceNode, -- to get LocalDAG); end if; ChoiceDAG := LocalDAG; end BuildChoiceDAG; procedure BindChoiceDAGs --# global in ChoiceDAG; --# in FirstChoice; --# in out AlternativeDAG; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives AlternativeDAG from ChoiceDAG, --# FirstChoice, --# VCGHeap & --# Statistics.TableUsage from *, --# FirstChoice, --# VCGHeap & --# VCGHeap from *, --# AlternativeDAG, --# ChoiceDAG, --# FirstChoice; is OrCell : Cells.Cell; begin if FirstChoice then AlternativeDAG := ChoiceDAG; else CreateOpCell (OrCell, VCGHeap, SP_Symbols.RWor); SetLeftArgument (OrCell, ChoiceDAG, VCGHeap); SetRightArgument (OrCell, AlternativeDAG, VCGHeap); AlternativeDAG := OrCell; end if; end BindChoiceDAGs; begin -- ModelAlternative if StmtStack.Top.Kind = StmtStack.CaseBranch then -- close existing alternative; R := StmtStack.Top; -- existing alternative; StmtStack.Pop; S := StmtStack.Top; -- start of case_statement; StmtStack.Pop; -- top of stack holds exit of case-statement; CreateUnitLabel (UnitLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => R.StmtNmbr, J => StmtStack.Top.StmtNmbr, K => UnitLabel); StmtStack.Push (S); end if; -- start new alternative; -- top of stack holds start of case-statement; -- Node is case_statement_alternative_rep; PrepareLabel (VCGHeap, AlternativeLabel, PairCell); LocalNode := Node; loop exit when STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.case_statement; LocalNode := STree.Parent_Node (Current_Node => LocalNode); end loop; LocalNode := STree.Child_Node (Current_Node => LocalNode); -- LocalNode is case expression node; -- Do not generate checks with this call BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => CaseExpnDAG); LocalNode := STree.Child_Node (Current_Node => Node); -- LocalNode is case_statement_alternative_rep; FirstChoice := True; AlternativeDAG := Cells.Null_Cell; -- to avoid DFA on first call to BuildChoiceDAGs loop exit when STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.case_choice; Structures.CopyStructure (VCGHeap, CaseExpnDAG, CopyCaseExpnDAG); BuildChoiceDAG (CopyCaseExpnDAG, STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => LocalNode)), ChoiceDAG); BindChoiceDAGs; FirstChoice := False; LocalNode := STree.Child_Node (Current_Node => LocalNode); end loop; BuildChoiceDAG (CaseExpnDAG, STree.Child_Node (Current_Node => LocalNode), ChoiceDAG); BindChoiceDAGs; SetRightArgument (PairCell, AlternativeDAG, VCGHeap); Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => StmtStack.Top.StmtNmbr, J => Graph.Get_Nmbr_Of_Stmts, K => AlternativeLabel); R := StmtStack.StmtRecord'(Graph.Get_Nmbr_Of_Stmts, StmtStack.CaseBranch); StmtStack.Push (R); end ModelAlternative; procedure ModelOthersPart --# global in Dictionary.Dict; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# Dictionary.Dict, --# Graph.Table, --# StmtStack.S, --# VCGHeap; is OthersLabel, UnitLabel : Labels.Label; OthersDAG, PairCell : Cells.Cell; R, S : StmtStack.StmtRecord; procedure FormOthersDAG --# global in Dictionary.Dict; --# in Graph.Table; --# in StmtStack.S; --# in out Statistics.TableUsage; --# in out VCGHeap; --# out OthersDAG; --# derives OthersDAG, --# VCGHeap from Dictionary.Dict, --# Graph.Table, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Graph.Table, --# StmtStack.S, --# VCGHeap; is FirstLabel : Boolean; CopyDAG, NewDAG : Cells.Cell; begin OthersDAG := Cells.Null_Cell; -- in case we fall right through proc FirstLabel := True; for I in Graph.Matrix_Index range 1 .. Graph.Get_Nmbr_Of_Stmts loop if not Labels.IsNull (Graph.Coefficient (VCGHeap, StmtStack.Top.StmtNmbr, I)) then Structures.CopyStructure (VCGHeap, Cells.Get_B_Ptr (VCGHeap, Pairs.PairHead (Labels.FirstPair (VCGHeap, Graph.Coefficient (VCGHeap, StmtStack.Top.StmtNmbr, I)))), CopyDAG); CreateOpCell (NewDAG, VCGHeap, SP_Symbols.RWnot); SetRightArgument (NewDAG, CopyDAG, VCGHeap); if FirstLabel then FirstLabel := False; OthersDAG := NewDAG; else Cells.Utility.Conjoin (VCGHeap, NewDAG, OthersDAG); end if; end if; end loop; end FormOthersDAG; begin -- ModelOthersPart if StmtStack.Top.Kind = StmtStack.CaseBranch then -- close existing alternative; R := StmtStack.Top; -- existing alternative; StmtStack.Pop; S := StmtStack.Top; -- start of case_statement; StmtStack.Pop; -- top of stack holds exit of case-statement; CreateUnitLabel (UnitLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => R.StmtNmbr, J => StmtStack.Top.StmtNmbr, K => UnitLabel); StmtStack.Push (S); end if; -- start others part; -- top of stack holds start of case-statement; PrepareLabel (VCGHeap, OthersLabel, PairCell); FormOthersDAG; SetRightArgument (PairCell, OthersDAG, VCGHeap); Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => StmtStack.Top.StmtNmbr, J => Graph.Get_Nmbr_Of_Stmts, K => OthersLabel); R := StmtStack.StmtRecord'(Graph.Get_Nmbr_Of_Stmts, StmtStack.CaseBranch); StmtStack.Push (R); end ModelOthersPart; ---------------------------------------------------------------------- -- Loops ---------------------------------------------------------------------- procedure StartLoopModel (Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in ImportConstraints; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in PreConstraints; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# ImportConstraints, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# PreConstraints, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# ImportConstraints, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# PreConstraints, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is LocalNode, LoopInvariantNode : STree.SyntaxNode; LoopHead : Graph.Matrix_Index; R : StmtStack.StmtRecord; procedure StartLoopBody --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Graph.Table, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap; is LoopHead : Graph.Matrix_Index; R : StmtStack.StmtRecord; StmtLabel : Labels.Label; begin R := StmtStack.Top; -- store loop tail record; StmtStack.Pop; -- pop loop tail to expose loop head; LoopHead := StmtStack.Top.StmtNmbr; StmtStack.Push (R); -- restore loop tail record; Graph.Inc_Nmbr_Of_Stmts; CreateUnitLabel (StmtLabel, VCGHeap); -- create null arc in body; Graph.Create_Coeff (Heap => VCGHeap, I => LoopHead, J => Graph.Get_Nmbr_Of_Stmts, K => StmtLabel); R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; -- create loop body record; R.Kind := StmtStack.Elementary; StmtStack.Push (R); end StartLoopBody; procedure ModelWhile --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LocalNode; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LocalNode, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LocalNode, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is DAGRoot : Cells.Cell; FalseBranchLabel, TrueBranchLabel : Labels.Label; R : StmtStack.StmtRecord; LoopTail, SwitchNode : Graph.Matrix_Index; begin BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => True, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => DAGRoot); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); SwitchNode := StmtStack.Top.StmtNmbr; StmtStack.Pop; -- pop last statement in loop body; LoopTail := StmtStack.Top.StmtNmbr; CreateSwitch (DAGRoot, FalseBranchLabel, TrueBranchLabel); Graph.Create_Coeff (Heap => VCGHeap, I => SwitchNode, J => LoopTail, K => FalseBranchLabel); -- push True branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => SwitchNode, J => Graph.Get_Nmbr_Of_Stmts, K => TrueBranchLabel); R := StmtStack.StmtRecord'(Graph.Get_Nmbr_Of_Stmts, StmtStack.Elementary); StmtStack.Push (R); end ModelWhile; function FindAssert (LoopNode : STree.SyntaxNode) return Boolean --# global in STree.Table; is LastNode, StartNode, LocNode, Node : STree.SyntaxNode; NodeType : SP_Symbols.SP_Symbol; foundAssert : Boolean; begin -- LoopNode is a loop_statement_opt -- set StartNode to parent loop node StartNode := STree.Parent_Node (Current_Node => LoopNode); if STree.Syntax_Node_Type (Node => StartNode) /= SP_Symbols.loop_statement then StartNode := STree.Parent_Node (Current_Node => StartNode); end if; foundAssert := False; Node := StartNode; loop ---------------------------down loop------------------------------ LastNode := Node; NodeType := STree.Syntax_Node_Type (Node => Node); case NodeType is when SP_Symbols.loop_statement => if Node = StartNode then Node := STree.Child_Node (Current_Node => Node); else -- skip over any nested loops Node := STree.NullNode; end if; when SP_Symbols.sequence_of_statements | SP_Symbols.simple_statement | SP_Symbols.compound_statement | SP_Symbols.elsif_part | SP_Symbols.else_part | SP_Symbols.statement | SP_Symbols.alternatives => Node := STree.Child_Node (Current_Node => Node); when SP_Symbols.simple_name | SP_Symbols.apragma | SP_Symbols.null_statement | SP_Symbols.assignment_statement | SP_Symbols.procedure_call_statement => Node := STree.NullNode; when SP_Symbols.proof_statement => if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Node)) = SP_Symbols.assert_statement then foundAssert := True; else Node := STree.NullNode; end if; when SP_Symbols.return_statement | SP_Symbols.exit_statement | SP_Symbols.delay_statement | SP_Symbols.condition => Node := STree.NullNode; when SP_Symbols.if_statement | SP_Symbols.case_statement => Node := STree.Child_Node (Current_Node => Node); LastNode := Node; Node := STree.NullNode; when SP_Symbols.case_statement_alternative => Node := STree.Child_Node (Current_Node => Node); Node := STree.Next_Sibling (Current_Node => Node); when SP_Symbols.others_part => if STree.Child_Node (Current_Node => Node) /= STree.NullNode then Node := STree.Child_Node (Current_Node => Node); else Node := STree.NullNode; end if; when SP_Symbols.loop_statement_opt => -- advance to iteration_scheme; LocNode := STree.Child_Node (Current_Node => Node); if LocNode /= STree.NullNode then -- loop has an iteration_scheme -- localnode is of type iteration_scheme if STree.Next_Sibling (Current_Node => LocNode) /= STree.NullNode then -- iteration_scheme has an invariant foundAssert := True; end if; end if; Node := STree.NullNode; when others => Node := STree.NullNode; end case; if not foundAssert and Node = STree.NullNode and LastNode /= StartNode then loop -----------------------up loop----------------------- Node := STree.Next_Sibling (Current_Node => LastNode); exit when Node /= STree.NullNode; Node := STree.Parent_Node (Current_Node => LastNode); exit when Node = STree.NullNode or Node = StartNode; LastNode := Node; end loop; ----------------------------up---------------- end if; exit when foundAssert or Node = STree.NullNode or Node = StartNode; end loop; ------------------down---------------------- return foundAssert; end FindAssert; procedure CheckPlantAssert --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in ImportConstraints; --# in LexTokenManager.State; --# in LineNmbr; --# in LoopStack; --# in Node; --# in PreConstraints; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LineNmbr, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Graph.Table, --# StmtStack.S, --# VCGHeap from Dictionary.Dict, --# Graph.Table, --# ImportConstraints, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# PreConstraints, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Graph.Table, --# ImportConstraints, --# LexTokenManager.State, --# LoopStack, --# Node, --# PreConstraints, --# StmtStack.S, --# STree.Table, --# VCGHeap; is CpPreConstraints, InsertedAssert : Cells.Cell; procedure DebugPrintInvariant --# derives ; is --# hide DebugPrintInvariant; begin if CommandLineData.Content.Debug.Invariants then SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Default loop invariant inserted: ", 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " ", 0); DAG_IO.PrintDag (VCGHeap, SPARK_IO.Standard_Output, InsertedAssert, Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => LoopContext.CurrentLoopSym (LoopStack, VCGHeap)), DAG_IO.Default_Wrap_Limit); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end if; end DebugPrintInvariant; begin -- CheckPlantAssert if not FindAssert (Node) then ErrorHandler.Add_Cut_Point (At_Line => LexTokenManager.Line_Numbers (LineNmbr)); ModelNullStmt (VCGHeap); Graph.Set_Proof_Context (X => Graph.Default_Assertion); Graph.Set_Text_Line_Nmbr (X => LineNmbr); if Cells.Is_Null_Cell (ImportConstraints) and Cells.Is_Null_Cell (PreConstraints) then CreateTrueCell (VCGHeap, InsertedAssert); elsif Cells.Is_Null_Cell (ImportConstraints) then Structures.CopyStructure (VCGHeap, PreConstraints, InsertedAssert); elsif Cells.Is_Null_Cell (PreConstraints) then Structures.CopyStructure (VCGHeap, ImportConstraints, InsertedAssert); else Structures.CopyStructure (VCGHeap, ImportConstraints, InsertedAssert); Structures.CopyStructure (VCGHeap, PreConstraints, CpPreConstraints); Cells.Utility.Conjoin (VCGHeap, CpPreConstraints, InsertedAssert); end if; -- if the loop being cut is a for a loop (including nested loops) then assert -- that the loop counter must be in its type AssertForLoopCounterProperties (Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => LoopContext.CurrentLoopSym (LoopStack, VCGHeap)), InsertedAssert); DebugPrintInvariant; Graph.Set_Assertion_Locn (X => InsertedAssert); ModelNullStmt (VCGHeap); end if; end CheckPlantAssert; begin -- StartLoopModel LoopHead := StmtStack.Top.StmtNmbr; -- push record holding nmbr of exit of loop-statement; Graph.Inc_Nmbr_Of_Stmts; R := StmtStack.StmtRecord'(Graph.Get_Nmbr_Of_Stmts, StmtStack.LoopExit); StmtStack.Push (R); Graph.Insert_Text_Line_Nmbr (Index => LoopHead, X => LineNmbr); StartLoopBody; CheckPlantAssert; -- advance to iteration_scheme; LocalNode := STree.Child_Node (Current_Node => Node); if LocalNode /= STree.NullNode then -- loop has an iteration_scheme; -- localnode is of type iteration_scheme; LoopInvariantNode := STree.Next_Sibling (Current_Node => LocalNode); LocalNode := STree.Child_Node (Current_Node => LocalNode); if LoopInvariantNode /= STree.NullNode then ModelAssertStmt (STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => LoopInvariantNode)))); end if; case STree.Syntax_Node_Type (Node => LocalNode) is when SP_Symbols.condition => ModelWhile; when others => null; end case; end if; end StartLoopModel; ---------------------------------------------------------------------- procedure ModelExit --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is DAGRoot : Cells.Cell; FalseBranchLabel, TrueBranchLabel, UnitLabel : Labels.Label; Q, R, S, T : StmtStack.StmtRecord; IfTail, LoopTail, Switchnode : Graph.Matrix_Index; begin if Node /= STree.NullNode then -- exit occurs in a statement of the form "exit when ..."; BuildExpnDAG (StartNode => Node, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => True, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => DAGRoot); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); CreateSwitch (DAGRoot, FalseBranchLabel, TrueBranchLabel); Switchnode := StmtStack.Top.StmtNmbr; StmtStack.Pop; -- pop last statement in loop body, LoopTail := StmtStack.Top.StmtNmbr; Graph.Create_Coeff (Heap => VCGHeap, I => Switchnode, J => LoopTail, K => TrueBranchLabel); -- push False branch; Graph.Inc_Nmbr_Of_Stmts; --263 Graph.Create_Coeff (Heap => VCGHeap, I => Switchnode, J => Graph.Get_Nmbr_Of_Stmts, K => FalseBranchLabel); R := StmtStack.StmtRecord'(Graph.Get_Nmbr_Of_Stmts, StmtStack.Elementary); StmtStack.Push (R); else -- exit occurs in a statement of the form if ... then ... exit; Q := StmtStack.Top; -- store if-branch record; StmtStack.Pop; -- pop if_branch record; IfTail := Q.StmtNmbr; -- store else-branch record; -- pop else_branch record; top of stack now holds if-statement tail; R := StmtStack.Top; StmtStack.Pop; -- pop else_branch record; -- top of stack now holds if-statement tail; S := StmtStack.Top; -- temporarily store and StmtStack.Pop; -- pop if-stmt tail; -- top of stack is now current first statement in loop body; T := StmtStack.Top; -- temporarily store and StmtStack.Pop; -- pop first stmt of loop body; LoopTail := StmtStack.Top.StmtNmbr; StmtStack.Push (T); -- restore loop stmt; StmtStack.Push (S); -- restore if-stmt tail; StmtStack.Push (R); -- restore else-branch; Q.Kind := StmtStack.LoopExit; -- prepare for StartElseModel; StmtStack.Push (Q); -- restore modified if-branch; CreateUnitLabel (UnitLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => IfTail, J => LoopTail, K => UnitLabel); end if; end ModelExit; ---------------------------------------------------------------------- -- procedure to plant a notional "exit when false" at the end of any -- loop that does not have an exit statement or an iteration scheme. procedure ModelDefaultExit --# global in Dictionary.Dict; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# Dictionary.Dict, --# Graph.Table, --# StmtStack.S, --# VCGHeap; is DAGRoot : Cells.Cell; FalseBranchLabel, TrueBranchLabel : Labels.Label; R : StmtStack.StmtRecord; LoopTail, Switchnode : Graph.Matrix_Index; begin -- ModelDefaultExit -- create False exit expression CreateNamedConstCell (DAGRoot, VCGHeap, Dictionary.GetFalse); -- build switch CreateSwitch (DAGRoot, FalseBranchLabel, TrueBranchLabel); Switchnode := StmtStack.Top.StmtNmbr; StmtStack.Pop; -- pop last statement in loop body, LoopTail := StmtStack.Top.StmtNmbr; Graph.Create_Coeff (Heap => VCGHeap, I => Switchnode, J => LoopTail, K => TrueBranchLabel); -- push False branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => Switchnode, J => Graph.Get_Nmbr_Of_Stmts, K => FalseBranchLabel); R := StmtStack.StmtRecord'(Graph.Get_Nmbr_Of_Stmts, StmtStack.Elementary); StmtStack.Push (R); end ModelDefaultExit; ---------------------------------------------------------------------- procedure CloseIfStmt --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# VCGHeap from Graph.Table, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# StmtStack.S from *; is NullStmtLabel : Labels.Label; ElsePartEndPoint, JoinNode : Graph.Matrix_Index; begin -- Here, the StmtStack is expected to have two entries -- at the top. -- The top-most entry represents the tail of the DAG for the final "else" part -- The next entry represents the node where the alternatives "join" -- -- So...take a note of the statement numbers of these two entries, and -- pop them off: ElsePartEndPoint := StmtStack.Top.StmtNmbr; StmtStack.Pop; JoinNode := StmtStack.Top.StmtNmbr; -- final node of if_statement; StmtStack.Pop; -- and then create a null arc from the end of the "else" part to the Join node CreateUnitLabel (NullStmtLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => ElsePartEndPoint, J => JoinNode, K => NullStmtLabel); Advance (JoinNode); end CloseIfStmt; ---------------------------------------------------------------------- procedure CloseForStmtEnclosingIf --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# Statistics.TableUsage, --# VCGHeap from *, --# Graph.Table, --# StmtStack.S, --# VCGHeap & --# StmtStack.S from *; is NullStmtLabel : Labels.Label; NullStmtLabel2 : Labels.Label; ThenBranchEndPoint, ElseBranchEndPoint, JoinNode : Graph.Matrix_Index; begin -- Here, the StmtStack is expected to have three entries -- at the top. -- The top-most entry represents the tail of the DAG for the "then" part -- (the path that _does_ enter the loop) -- The next entry represents the tail of the DAG for the "else" part -- (the path that _doesn't_ enter the loop at all owing to a null range) -- The next entry represents the node where the these alternative "join" -- -- So...take a note of the statement numbers of these three entries, and -- pop them off... ThenBranchEndPoint := StmtStack.Top.StmtNmbr; StmtStack.Pop; ElseBranchEndPoint := StmtStack.Top.StmtNmbr; StmtStack.Pop; JoinNode := StmtStack.Top.StmtNmbr; -- final node of if_statement; StmtStack.Pop; -- ...then link both the ThenBranch and the ElseBranch to JoinNode CreateUnitLabel (NullStmtLabel, VCGHeap); CreateUnitLabel (NullStmtLabel2, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => ThenBranchEndPoint, J => JoinNode, K => NullStmtLabel); Graph.Create_Coeff (Heap => VCGHeap, I => ElseBranchEndPoint, J => JoinNode, K => NullStmtLabel2); Advance (JoinNode); end CloseForStmtEnclosingIf; ---------------------------------------------------------------------- procedure CombineCases --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# VCGHeap from Graph.Table, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# StmtStack.S from *; is UnitLabel : Labels.Label; R, S : StmtStack.StmtRecord; begin R := StmtStack.Top; -- existing alternative branch, to be closed; StmtStack.Pop; -- remove record of alternative branch; StmtStack.Pop; -- remove record of fanout node; S := StmtStack.Top; -- top record holds tail of case_statement; StmtStack.Pop; CreateUnitLabel (UnitLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => R.StmtNmbr, J => S.StmtNmbr, K => UnitLabel); Advance (S.StmtNmbr); end CombineCases; ---------------------------------------------------------------------- procedure CloseLoop --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# VCGHeap from Graph.Table, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# StmtStack.S from *; is BodyTail, LoopHead, LoopTail : Graph.Matrix_Index; NullStmtLabel : Labels.Label; begin BodyTail := StmtStack.Top.StmtNmbr; StmtStack.Pop; -- pop last statement of loop body; LoopTail := StmtStack.Top.StmtNmbr; StmtStack.Pop; -- pop record of loop tail; LoopHead := StmtStack.Top.StmtNmbr; CreateUnitLabel (NullStmtLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => BodyTail, J => LoopHead, K => NullStmtLabel); Advance (LoopTail); end CloseLoop; ---------------------------------------------------------------------- procedure ModelDelayStmt --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is DAGRoot : Cells.Cell; ExpnNode : STree.SyntaxNode; begin -- The delay statement itself is modelled like the null statement. ModelNullStmt (VCGHeap); ExpnNode := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); -- Build expn DAG is called just to generate VCs for the delay expression. -- The Examiner will detect that assignment to DAGRoot is ineffective. --# accept F, 10, DAGRoot, "DAGRoot not used here" & --# F, 33, DAGRoot, "DAGRoot not used here"; BuildExpnDAG (StartNode => ExpnNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => True, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => DAGRoot); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); end ModelDelayStmt; -- Expect flow error for DAGRoot unused ---------------------------------------------------------------------- -- For Loops ---------------------------------------------------------------------- function RangeMayBeNull (RangeNode : STree.SyntaxNode) return Boolean --# global in Dictionary.Dict; --# in STree.Table; is Result : Boolean; TypeSymbolPlantedAtRangeNode : Dictionary.Symbol; begin -- RangeMayBeNull Result := False; if (RangeNode /= STree.NullNode) then -- it is possible for the range to be null if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => RangeNode)) = SP_Symbols.simple_expression then -- we have an explicit range of --form lo .. hi and hi may be < --lo Result := True; else -- Otherwise range may be an 'range attribute which can only be empty if type is string (because -- the actual parameter might be the string literal "". -- We obtained type of the range constraint planted by the wellformation checker TypeSymbolPlantedAtRangeNode := STree.NodeSymbol (STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => RangeNode)))); -- If the planted type is a special constraint type, indicating that the attribute is of an -- unconstrained array object then we check to see if the asociated parameter is a string. -- The loop may be empty if and only if that condition holds. if Dictionary.IsParameterConstraint (TypeSymbolPlantedAtRangeNode) then Result := Dictionary.IsPredefinedStringType (Dictionary.GetType (Dictionary.GetParameterAssociatedWithParameterConstraint (TypeSymbolPlantedAtRangeNode))); else -- an ordinary index type has been planted and that can't be empty Result := False; end if; end if; end if; return Result; end RangeMayBeNull; -------------------------------------------------------- procedure StartForStmtModel (Node, LoopNode : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in ImportConstraints; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in PreConstraints; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# ImportConstraints, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopNode, --# LoopStack, --# LScope, --# Node, --# PreConstraints, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# ImportConstraints, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopNode, --# LoopStack, --# LScope, --# Node, --# PreConstraints, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is ControlVarNode, DirectionNode, TypeNode, ArangeNode : STree.SyntaxNode; ---------------------------------------------- -- Called for all FOR loops. Walks range boundary expression -- and assigns it to ExitBound local variable associated with current loop. Where the -- range boundary expression contains variables, these are store in special "on_entry" -- copies and these copies substitted i nthe expression to "freeze" it. procedure AssignLoopBoundsToExitVariable (RangeNode : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is ExitExpnDAG, CopyExitExpnDAG : Cells.Cell; procedure BuildExitExpn (DAGRoot : out Cells.Cell) --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in RangeNode; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# DAGRoot from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is ExitValueRoot : Cells.Cell; LocalNode : STree.SyntaxNode; TypeMarkCell, LASTCell : Cells.Cell; begin -- BuildExitExpn -- Handles ranges in the form of: -- (1) A type mark with no range constraint -- (2) A range in the form L .. R; -- (3) A range in the form of an attribute (e.g. for I in Index range A'Range) -- (1) --------------------------------------------------------- if RangeNode = STree.NullNode then -- statement is of form "for ControlVar in [reverse] ControlVarRange" CreateFixedVarCell (TypeMarkCell, VCGHeap, Dictionary.GetType (LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap))); CreateCellKind (LASTCell, VCGHeap, Cell_Storage.Attrib_Value); if LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then Cells.Set_Lex_Str (VCGHeap, LASTCell, LexTokenManager.First_Token); else Cells.Set_Lex_Str (VCGHeap, LASTCell, LexTokenManager.Last_Token); end if; CreateOpCell (ExitValueRoot, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (ExitValueRoot, TypeMarkCell, VCGHeap); SetRightArgument (ExitValueRoot, LASTCell, VCGHeap); else LocalNode := STree.Child_Node (Current_Node => RangeNode); -- (2) ------------------------------------------------------------ if STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.simple_expression then -- exit condition is given in terms of an expression; if not LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then -- move to RH side of L .. R LocalNode := STree.Next_Sibling (Current_Node => LocalNode); end if; -- Do not generate checks BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => ExitValueRoot); else -- (3) ---------------------------------------------------------- -- exit condition is given in terms of an attribute; -- This call to BuildExpnDAG will not create any checks. BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => ExitValueRoot); if LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then ExitValueRoot := LeftPtr (VCGHeap, ExitValueRoot); else ExitValueRoot := RightPtr (VCGHeap, ExitValueRoot); end if; end if; end if; DAGRoot := ExitValueRoot; end BuildExitExpn; --------------------- procedure BuildAssignment (Destination, Source : in Cells.Cell) --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Destination, --# Graph.Table, --# Source, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# Destination, --# Source, --# VCGHeap; is StmtCell, ModList : Cells.Cell; StmtLabel : Labels.Label; begin -- BuildAssignment PrepareLabel (VCGHeap, StmtLabel, StmtCell); Clists.CreateList (VCGHeap, ModList); Clists.AppendCell (VCGHeap, Destination, ModList); SetRightArgument (Destination, Source, VCGHeap); SetAuxPtr (StmtCell, ModList, VCGHeap); Chain (StmtLabel, VCGHeap); end BuildAssignment; --------------------- procedure FreezeExitExpn --# global in ExitExpnDAG; --# in LoopStack; --# in out Dictionary.Dict; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Dictionary.Dict, --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# Dictionary.Dict, --# ExitExpnDAG, --# Graph.Table, --# LoopStack, --# StmtStack.S, --# VCGHeap; is P : Cells.Cell; S : CStacks.Stack; VarSym : Dictionary.Symbol; LoopEntryVarSym : Dictionary.Symbol; ---------------------------------- procedure SaveEntryValue (TheVariable, TheLoopEntryVariable : in Dictionary.Symbol) --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Graph.Table, --# StmtStack.S, --# TheLoopEntryVariable, --# TheVariable, --# VCGHeap & --# Statistics.TableUsage from *, --# TheLoopEntryVariable, --# TheVariable, --# VCGHeap; is Source, Dest : Cells.Cell; begin -- SaveEntryValue CreateModifiedCell (Dest, VCGHeap, TheLoopEntryVariable); CreateReferenceCell (Source, VCGHeap, TheVariable); BuildAssignment (Dest, Source); end SaveEntryValue; begin -- FreezeExitExpn; -- Traverse the loop exit expression. For each variable found, create -- or get a special OnLoopEntryVariable. Also, build an assignment from -- the original value of the variable to its special OnEntry equivalent. -- DAG traversal algorithm of D.E. Knuth, Fundamental -- Algorithms, p.317; CStacks.CreateStack (S); P := ExitExpnDAG; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCGHeap, P, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then P := Cells.Null_Cell; else P := LeftPtr (VCGHeap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCGHeap, S); CStacks.Pop (VCGHeap, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then if Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Reference then VarSym := Cells.Get_Symbol_Value (VCGHeap, P); if Dictionary.Is_Variable (VarSym) then -- We create a special variable in which to store the value -- on entry into the loop and substitute this new var into -- the exit expression DAG we are walking here Dictionary.IdempotentCreateLoopOnEntryVariable (OriginalVariable => VarSym, TheLoop => LoopContext.CurrentLoopSym (LoopStack, VCGHeap), OnEntryVariable => LoopEntryVarSym); SaveEntryValue (VarSym, LoopEntryVarSym); Cells.Set_Symbol_Value (VCGHeap, P, LoopEntryVarSym); end if; -- IsVariable end if; -- is RefCell P := Cells.Null_Cell; else P := RightPtr (VCGHeap, P); end if; end loop; end FreezeExitExpn; begin -- AssignLoopBoundsToExitVariable BuildExitExpn (DAGRoot => ExitExpnDAG); -- for each variable X in the exit expression: -- (1) construct the assignment X_on_entry := X; -- (2) substitute X_on_entry for X in exit expression FreezeExitExpn; -- Force exit expn cell into Dictionary so we can recover it when building loop invariants -- First deep copy expression Structures.CopyStructure (VCGHeap, ExitExpnDAG, CopyExitExpnDAG); -- Then save in Dictionary. associated with loop Dictionary.SetLoopExitExpn (LoopContext.CurrentLoopSym (LoopStack, VCGHeap), Natural (CopyExitExpnDAG)); end AssignLoopBoundsToExitVariable; procedure AssignLoopCounterEntryValue (RangeNode : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is EntryExpnDAG, CopyEntryExpnDAG : Cells.Cell; procedure BuildEntryExpn (DAGRoot : out Cells.Cell) --# global in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in RangeNode; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# DAGRoot from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is EntryValueRoot : Cells.Cell; LocalNode : STree.SyntaxNode; TypeMarkCell, LASTCell : Cells.Cell; begin -- BuildEntryExpn -- Handles ranges in the form of: -- (1) A type mark with no range constraint -- (2) A range in the form L .. R; -- (3) A range in the form of an attribute (e.g. for I in Index range A'Range) -- (1) --------------------------------------------------------- if RangeNode = STree.NullNode then -- statement is of form "for ControlVar in [reverse] ControlVarRange" CreateFixedVarCell (TypeMarkCell, VCGHeap, Dictionary.GetType (LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap))); CreateCellKind (LASTCell, VCGHeap, Cell_Storage.Attrib_Value); if LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then Cells.Set_Lex_Str (VCGHeap, LASTCell, LexTokenManager.Last_Token); else Cells.Set_Lex_Str (VCGHeap, LASTCell, LexTokenManager.First_Token); end if; CreateOpCell (EntryValueRoot, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (EntryValueRoot, TypeMarkCell, VCGHeap); SetRightArgument (EntryValueRoot, LASTCell, VCGHeap); else LocalNode := STree.Child_Node (Current_Node => RangeNode); -- (2) ------------------------------------------------------------ if STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.simple_expression then -- entry value is given in terms of a simple_expression if LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then -- move to RH side of L .. R LocalNode := STree.Next_Sibling (Current_Node => LocalNode); end if; -- Do not generate checks BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => EntryValueRoot); else -- (3) ---------------------------------------------------------- -- entry value is given in terms of a 'Range attribute; -- This call to BuildExpnDAG will not create any checks. BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => EntryValueRoot); -- EntryValueRoot now points at "double_dot" Cell containing the -- DAG for L .. R. For a normal "for" loop, the initial value -- of the loop counter is L, which is in the LeftPtr cell from -- EntryValueRoot. -- -- For a "reverse for" loop, R is the initial value, so... if LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then EntryValueRoot := RightPtr (VCGHeap, EntryValueRoot); else EntryValueRoot := LeftPtr (VCGHeap, EntryValueRoot); end if; end if; end if; DAGRoot := EntryValueRoot; end BuildEntryExpn; begin BuildEntryExpn (DAGRoot => EntryExpnDAG); -- Force entry expn cell into Dictionary so we can recover it when building loop invariants -- First deep copy expression Structures.CopyStructure (VCGHeap, EntryExpnDAG, CopyEntryExpnDAG); -- Then save in Dictionary. associated with loop Dictionary.SetLoopEntryExpn (LoopContext.CurrentLoopSym (LoopStack, VCGHeap), Natural (CopyEntryExpnDAG)); end AssignLoopCounterEntryValue; ---------------------------------------------- procedure StartForStmtEnclosingIf --# global in ArangeNode; --# in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in TypeNode; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# Graph.Table, --# KindOfStackedCheck, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# TypeNode, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# LexTokenManager.State, --# ShortCircuitStack, --# VCGFailure from *, --# ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is DAGRoot : Cells.Cell; FalseBranchLabel, TrueBranchLabel : Labels.Label; R : StmtStack.StmtRecord; Predecessor : Graph.Matrix_Index; ------------------------------------------ procedure CreateSwitchExpn --# global in ArangeNode; --# in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in TypeNode; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# out DAGRoot; --# derives CheckStack, --# Graph.Table, --# KindOfStackedCheck, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# TypeNode, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# LexTokenManager.State, --# ShortCircuitStack, --# VCGFailure from *, --# ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# DAGRoot from ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# TypeNode, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is NotCell : Cells.Cell; AttributeExpression, ExpnRoot1, ExpnRoot2 : Cells.Cell; LocalNode : STree.SyntaxNode; begin LocalNode := STree.Child_Node (Current_Node => ArangeNode); -- model simple expression or attribute as appropriate if STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.simple_expression then -- range is of form simpexp .. simpexp BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => ExpnRoot1); LocalNode := STree.Next_Sibling (Current_Node => LocalNode); BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => ExpnRoot2); else -- attribute case BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => AttributeExpression); Structures.CopyStructure (VCGHeap, LeftPtr (VCGHeap, AttributeExpression), ExpnRoot1); Structures.CopyStructure (VCGHeap, RightPtr (VCGHeap, AttributeExpression), ExpnRoot2); end if; if Dictionary.TypeIsBoolean (STree.NodeSymbol (TypeNode)) then -- special expression needed to avoid Boolean inequalities -- Boolean equivalent of lower <= upper is -- not Lower or upper CreateOpCell (DAGRoot, VCGHeap, SP_Symbols.RWor); CreateOpCell (NotCell, VCGHeap, SP_Symbols.RWnot); SetRightArgument (NotCell, ExpnRoot1, VCGHeap); SetLeftArgument (DAGRoot, NotCell, VCGHeap); SetRightArgument (DAGRoot, ExpnRoot2, VCGHeap); else -- proceed as before CreateOpCell (DAGRoot, VCGHeap, SP_Symbols.less_or_equal); SetLeftArgument (DAGRoot, ExpnRoot1, VCGHeap); SetRightArgument (DAGRoot, ExpnRoot2, VCGHeap); end if; UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); end CreateSwitchExpn; ------------------------------------------ begin -- StartForStmtEnclosingIf CreateSwitchExpn; CreateSwitch (DAGRoot, FalseBranchLabel, TrueBranchLabel); Predecessor := StmtStack.Top.StmtNmbr; -- push record holding nmbr of tail (exit) of if-statement; Graph.Inc_Nmbr_Of_Stmts; R := StmtStack.StmtRecord'(Graph.Get_Nmbr_Of_Stmts, StmtStack.IfStart); StmtStack.Push (R); -- push False branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => Predecessor, J => Graph.Get_Nmbr_Of_Stmts, K => FalseBranchLabel); R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; R.Kind := StmtStack.IfFalseBranch; StmtStack.Push (R); -- push True branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => Predecessor, J => Graph.Get_Nmbr_Of_Stmts, K => TrueBranchLabel); R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; R.Kind := StmtStack.IfTrueBranch; StmtStack.Push (R); end StartForStmtEnclosingIf; ------------------------------- procedure CheckRangeBounds (TypeNode, RangeNode : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# TypeNode, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# RangeNode, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# TypeNode, --# VCGHeap; is BaseTypeSym, TypeSym : Dictionary.Symbol; LocalNode : STree.SyntaxNode; LowerBound, RangeBoundDAG : Cells.Cell; procedure BuildImpliedSubtypeCheck (Type_Sym : in Dictionary.Symbol; LowExpr, HighExpr : in Cells.Cell) --# global in Dictionary.Dict; --# in out CheckStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# HighExpr, --# LowExpr, --# ShortCircuitStack, --# Type_Sym, --# VCGHeap; is Lhs, Rhs, Implies1, Implies2, LessThan, ImpliedExpression1, ImpliedExpression2 : Cells.Cell; begin -- BuildImpliedSubtypeCheck -- build lhs of implies expression (lower <= upper) Structures.CopyStructure (VCGHeap, LowExpr, Lhs); Structures.CopyStructure (VCGHeap, HighExpr, Rhs); CreateOpCell (LessThan, VCGHeap, SP_Symbols.less_or_equal); SetLeftArgument (LessThan, Lhs, VCGHeap); SetRightArgument (LessThan, Rhs, VCGHeap); -- create X in T model for LowExpr Type_Constraint.Process_Discrete (The_Type => Type_Sym, The_Expression => LowExpr, The_Constraint => ImpliedExpression1, VCG_Heap => VCGHeap); -- and for HighExpr Type_Constraint.Process_Discrete (The_Type => Type_Sym, The_Expression => HighExpr, The_Constraint => ImpliedExpression2, VCG_Heap => VCGHeap); -- make --> operators CreateOpCell (Implies1, VCGHeap, SP_Symbols.implies); CreateOpCell (Implies2, VCGHeap, SP_Symbols.implies); -- assemble entire X <= Y -> X in T expressions SetLeftArgument (Implies1, LessThan, VCGHeap); SetRightArgument (Implies1, ImpliedExpression1, VCGHeap); SetLeftArgument (Implies2, LessThan, VCGHeap); SetRightArgument (Implies2, ImpliedExpression2, VCGHeap); -- plant checks PlantCheckStatement (Implies1, VCGHeap, ShortCircuitStack, CheckStack); PlantCheckStatement (Implies2, VCGHeap, ShortCircuitStack, CheckStack); end BuildImpliedSubtypeCheck; --------------------------------------- begin -- CheckRangeBounds if RangeNode /= STree.NullNode then -- something to do TypeSym := STree.NodeSymbol (TypeNode); -- recover from wff planting -- Guard generation of checks so that they are not generated for Booleans if not Dictionary.TypeIsBoolean (TypeSym) then -- something to do BaseTypeSym := Dictionary.GetRootType (TypeSym); -- note that if TypeSym is not a subtype then TypeSym = BaseTypeSym -- we either have simpex .. simpex or an attribute to deal with LocalNode := STree.Child_Node (Current_Node => RangeNode); if STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.simple_expression then -- range is of form simpexp .. simpexp -- LocalNode points at LHS of range at this point -- first check that bounds are in base type BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => True, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => RangeBoundDAG); CheckConstraintRunTimeError (BaseTypeSym, RangeBoundDAG, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); LowerBound := RangeBoundDAG; -- now do RHS of range LocalNode := STree.Next_Sibling (Current_Node => LocalNode); BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => True, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => RangeBoundDAG); CheckConstraintRunTimeError (BaseTypeSym, RangeBoundDAG, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); -- generate separate VC for range bounds in base type before constructing implied check UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); -- now construct implication check that range is in subtype BuildImpliedSubtypeCheck (TypeSym, LowerBound, RangeBoundDAG); else -- range is in the form of an attribute -- first check that bounds are in base type BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => DoAssumeLocalRvalues, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => RangeBoundDAG); CheckConstraintRunTimeError (BaseTypeSym, LeftPtr (VCGHeap, RangeBoundDAG), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); -- Upper bound CheckConstraintRunTimeError (BaseTypeSym, RightPtr (VCGHeap, RangeBoundDAG), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); -- now construct implication check that range is in subtype BuildImpliedSubtypeCheck (TypeSym, LeftPtr (VCGHeap, RangeBoundDAG), RightPtr (VCGHeap, RangeBoundDAG)); end if; UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); end if; end if; end CheckRangeBounds; ------------------------------- procedure InitializeControlVar --# global in ArangeNode; --# in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is AssignedVarCell, DAGRoot, StmtCell, ModList : Cells.Cell; StmtLabel : Labels.Label; -------------------------------------- procedure CreateInitialExpn --# global in ArangeNode; --# in CommandLineData.Content; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# out DAGRoot; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# DAGRoot from ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from ArangeNode, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap; is LocalNode : STree.SyntaxNode; TypeMarkCell, FIRSTCell : Cells.Cell; begin -- CreateInitialExpn if ArangeNode = STree.NullNode then -- statement is of form "for ControlVar in [reverse] ControlVarRange" CreateFixedVarCell (TypeMarkCell, VCGHeap, Dictionary.GetType (LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap))); CreateCellKind (FIRSTCell, VCGHeap, Cell_Storage.Attrib_Value); if LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then Cells.Set_Lex_Str (VCGHeap, FIRSTCell, LexTokenManager.Last_Token); else Cells.Set_Lex_Str (VCGHeap, FIRSTCell, LexTokenManager.First_Token); end if; CreateOpCell (DAGRoot, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (DAGRoot, TypeMarkCell, VCGHeap); SetRightArgument (DAGRoot, FIRSTCell, VCGHeap); else LocalNode := STree.Child_Node (Current_Node => ArangeNode); if STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.simple_expression then -- range is of form simpexp .. simpexp if LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then LocalNode := STree.Next_Sibling (Current_Node => LocalNode); end if; -- Do not generate checks with this call BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => DAGRoot); else -- LocalNode is range attribute -- This call to BuildExpnDAG will not generate any checks. BuildExpnDAG (StartNode => LocalNode, ExpnScope => LScope, Scope => Scope, LineNmbr => LineNmbr, DoRtc => False, AssumeRvalues => False, LoopStack => LoopStack, FlowHeap => FlowHeap, VCGHeap => VCGHeap, ContainsReals => ContainsReals, VCGFailure => VCGFailure, ShortCircuitStack => ShortCircuitStack, CheckStack => CheckStack, KindOfStackedCheck => KindOfStackedCheck, DAGRoot => DAGRoot); if LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then DAGRoot := RightPtr (VCGHeap, DAGRoot); else DAGRoot := LeftPtr (VCGHeap, DAGRoot); end if; end if; end if; end CreateInitialExpn; ------------------------------------------------------ begin -- InitializeControlVar PrepareLabel (VCGHeap, StmtLabel, StmtCell); Clists.CreateList (VCGHeap, ModList); CreateModifiedCell (AssignedVarCell, VCGHeap, LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap)); Clists.AppendCell (VCGHeap, AssignedVarCell, ModList); CreateInitialExpn; SetRightArgument (AssignedVarCell, DAGRoot, VCGHeap); SetAuxPtr (StmtCell, ModList, VCGHeap); Chain (StmtLabel, VCGHeap); end InitializeControlVar; -------------------------------------------------------- begin -- StartForStmtModel -- FOR statement is modelled as follows: -- -- for I in Type range L .. R loop -- LoopBody; -- end loop; -- -- Determine exit condition which is either L or R; or Type'First or Type'Last; or a range attribute -- depending on whether loop is forward or backward. -- Build an expression DAG for the exit expression -- For each variable X in the expression: save X in specially created X_On_Entry variable -- Substitute X_On_entry for X in the exit expression -- Store the exit expression in the Dictionary for use in loop invariant generation -- -- Build model: -- -- -- AssignLoopBoundsToExitVariable -- R_vars_on_entry := R_vars -- as described above -- -- StartForStmtEnclosingIf -- if L <= R then -- omitted if loop must be entered -- -- InitializeControlVar -- I := L; -- loop -- StartLoopModel -- -- AssertForLoopCounterProperties -- --# assert I <= R_with_on_entry_substitution -- BodyOfLoop; -- exit when I = R_with_on_entry_substitution -- ModelForExit -- I := I + 1; -- AdvanceControlVar -- end loop; -- CloseFor Stmt -- end if; -- Node is loop_parameter_specification; ControlVarNode := STree.Child_Node (Current_Node => Node); DirectionNode := STree.Next_Sibling (Current_Node => ControlVarNode); TypeNode := STree.Next_Sibling (Current_Node => DirectionNode); ArangeNode := STree.Next_Sibling (Current_Node => TypeNode); -- Following call does two things (see description above): -- (1) It creates a model of the exit expression of the for loop in terms of the variables' state -- on entry to the loop and stores that expression in the dictionary so that we can use it -- when generating loop invariant. -- (2) It assigns that expression to the loop's exit variable which is also stored in the dictionary. -- Construction of the exit switch statement is made in terms of this variable rather than in terms -- of the exit expression. AssignLoopBoundsToExitVariable (ArangeNode); -- Now produce a DAG for the initial value of the loop counter, -- and save that in the Dictionary as well. This is used later -- in producing the default loop invariant. AssignLoopCounterEntryValue (ArangeNode); -- check any explicit range is in control var subtype CheckRangeBounds (TypeNode, ArangeNode); -- test whether range could possibly be null (i.e. empty). If so, -- then we need to generate an "If" statement that encloses the loop if RangeMayBeNull (ArangeNode) then StartForStmtEnclosingIf; end if; InitializeControlVar; StartLoopModel (LoopNode); end StartForStmtModel; -------------------------------------------------------------------------- procedure CloseForStmt (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in LoopStack; --# in STree.Table; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Node, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# LexTokenManager.State from *, --# Dictionary.Dict, --# Graph.Table, --# LoopStack, --# StmtStack.S, --# VCGHeap; is ControlVarNode, DirectionNode, ArangeNode : STree.SyntaxNode; ----------------------------- procedure ModelForExit --# global in Dictionary.Dict; --# in LoopStack; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# Dictionary.Dict, --# Graph.Table, --# LoopStack, --# StmtStack.S, --# VCGHeap; is DAGRoot : Cells.Cell; FalseBranchLabel, TrueBranchLabel : Labels.Label; R : StmtStack.StmtRecord; LoopTail, Predecessor : Graph.Matrix_Index; ----------------------------------------- procedure CreateExitExpn --# global in Dictionary.Dict; --# in LoopStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# out DAGRoot; --# derives DAGRoot, --# VCGHeap from Dictionary.Dict, --# LoopStack, --# VCGHeap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# LoopStack, --# VCGHeap; is ControlVarRefCell, ExitValueRoot : Cells.Cell; begin -- CreateExitExpn (e.g. I = ) CreateReferenceCell (ControlVarRefCell, VCGHeap, LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap)); -- select = or <-> as appropriate if Dictionary.TypeIsBoolean (Dictionary.GetType (LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap))) then CreateOpCell (DAGRoot, VCGHeap, SP_Symbols.is_equivalent_to); else CreateOpCell (DAGRoot, VCGHeap, SP_Symbols.equals); end if; SetLeftArgument (DAGRoot, ControlVarRefCell, VCGHeap); -- Recover exit expression put in Dict by AssignLoopBoundsToExitVariable ExitValueRoot := Cells.Cell (Dictionary.GetLoopExitExpn (LoopContext.CurrentLoopSym (LoopStack, VCGHeap))); -- There should always be an expression to get since it is planted by -- AssignLoopBoundsToExitVariable which is always called by StartForStmtModel. -- Therefore, the following assertion should never fail SystemErrors.RT_Assert (C => not Cells.Is_Null_Cell (ExitValueRoot), Sys_Err => SystemErrors.Assertion_Failure, Msg => "ExitExpn Null in CreateExitExpn"); SetRightArgument (DAGRoot, ExitValueRoot, VCGHeap); end CreateExitExpn; ------------------------------------------------- begin -- ModelForExit CreateExitExpn; CreateSwitch (DAGRoot, FalseBranchLabel, TrueBranchLabel); Predecessor := StmtStack.Top.StmtNmbr; StmtStack.Pop; -- pop last statement in loop body; LoopTail := StmtStack.Top.StmtNmbr; Graph.Create_Coeff (Heap => VCGHeap, I => Predecessor, J => LoopTail, K => TrueBranchLabel); -- push False branch; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => Predecessor, J => Graph.Get_Nmbr_Of_Stmts, K => FalseBranchLabel); R := StmtStack.StmtRecord'(Graph.Get_Nmbr_Of_Stmts, StmtStack.Elementary); StmtStack.Push (R); end ModelForExit; ----------------------------------------------------- procedure AdvanceControlVar --# global in Dictionary.Dict; --# in LoopStack; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# StmtStack.S, --# VCGHeap & --# LexTokenManager.State, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# VCGHeap; is AssignedVarCell, DAGRoot, StmtCell, ModList : Cells.Cell; StmtLabel : Labels.Label; -------------------------------------------- procedure CreateAdvancingExpn --# global in Dictionary.Dict; --# in LoopStack; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# out DAGRoot; --# derives DAGRoot from Dictionary.Dict, --# LoopStack, --# VCGHeap & --# LexTokenManager.State, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# LoopStack, --# VCGHeap & --# VCGHeap from *, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack; is LexStr : LexTokenManager.Lex_String; ControlVarRefCell : Cells.Cell; UnityCell : Cells.Cell; begin CreateReferenceCell (ControlVarRefCell, VCGHeap, LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap)); if Dictionary.TypeIsNumeric (Dictionary.GetType (LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap))) then -- define advance of control variable using plus or minus; if not LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then CreateOpCell (DAGRoot, VCGHeap, SP_Symbols.plus); else CreateOpCell (DAGRoot, VCGHeap, SP_Symbols.minus); end if; SetLeftArgument (DAGRoot, ControlVarRefCell, VCGHeap); LexTokenManager.Insert_Nat (N => 1, Lex_Str => LexStr); CreateManifestConstCell (UnityCell, VCGHeap, LexStr); SetRightArgument (DAGRoot, UnityCell, VCGHeap); -- insertion to handle Boolean loop counters elsif Dictionary.TypeIsBoolean (Dictionary.GetType (LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap))) then -- for a well formed Boolean expression both Succ and Pred are the equivalent -- of "not" CreateOpCell (DAGRoot, VCGHeap, SP_Symbols.RWnot); SetRightArgument (DAGRoot, ControlVarRefCell, VCGHeap); --continue for non-Boolean enumerated types as before else -- define advance of control variable using succ or pred; Cells.Create_Cell (VCGHeap, DAGRoot); if not LoopContext.CurrentLoopMovesInReverse (LoopStack, VCGHeap) then Cells.Set_Kind (VCGHeap, DAGRoot, Cell_Storage.Succ_Function); else Cells.Set_Kind (VCGHeap, DAGRoot, Cell_Storage.Pred_Function); end if; SetRightArgument (DAGRoot, ControlVarRefCell, VCGHeap); end if; end CreateAdvancingExpn; -------------------------------------------- begin -- AdvanceControlVar PrepareLabel (VCGHeap, StmtLabel, StmtCell); Clists.CreateList (VCGHeap, ModList); CreateModifiedCell (AssignedVarCell, VCGHeap, LoopContext.CurrentLoopParameterSym (LoopStack, VCGHeap)); Clists.AppendCell (VCGHeap, AssignedVarCell, ModList); CreateAdvancingExpn; SetRightArgument (AssignedVarCell, DAGRoot, VCGHeap); SetAuxPtr (StmtCell, ModList, VCGHeap); Chain (StmtLabel, VCGHeap); end AdvanceControlVar; ------------------------------------------------------------ begin -- CloseForStmt -- Node is loop_parameter_specification; ControlVarNode := STree.Child_Node (Current_Node => Node); DirectionNode := STree.Next_Sibling (Current_Node => ControlVarNode); ArangeNode := STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => DirectionNode)); -- test whether control variable has reached limiting value; ModelForExit; AdvanceControlVar; CloseLoop; -- If we previously generated an enclosing "if" statement to -- deal with the possibility of a null range, then we need to -- close it here. if RangeMayBeNull (ArangeNode) then CloseForStmtEnclosingIf; end if; end CloseForStmt; ----------------------------------------------------------------------- procedure DefineTwiddledVars --# global in Dictionary.Dict; --# in Scope; --# in SubprogSym; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Dictionary.Dict, --# Graph.Table, --# Scope, --# StmtStack.S, --# SubprogSym, --# VCGHeap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Scope, --# SubprogSym, --# VCGHeap; is Abstraction : Dictionary.Abstractions; ExportIt : Dictionary.Iterator; MappingStmtCreated : Boolean; StmtLabel : Labels.Label; ModList : Cells.Cell; procedure CreateMappingStmt --# global in out Statistics.TableUsage; --# in out VCGHeap; --# out ModList; --# out StmtLabel; --# derives ModList, --# StmtLabel, --# VCGHeap from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap; is StmtPair : Cells.Cell; begin PrepareLabel (VCGHeap, StmtLabel, StmtPair); Clists.CreateList (VCGHeap, ModList); SetAuxPtr (StmtPair, ModList, VCGHeap); end CreateMappingStmt; procedure TwiddledVarAssignment (Sym : Dictionary.Symbol) --# global in ModList; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Statistics.TableUsage, --# VCGHeap from *, --# ModList, --# Sym, --# VCGHeap; is ImpExpVarCell, AssignedVarCell : Cells.Cell; begin CreateModifiedCell (AssignedVarCell, VCGHeap, Sym); SetTilde (AssignedVarCell, VCGHeap); Clists.AppendCell (VCGHeap, AssignedVarCell, ModList); CreateReferenceCell (ImpExpVarCell, VCGHeap, Sym); SetRightArgument (AssignedVarCell, ImpExpVarCell, VCGHeap); end TwiddledVarAssignment; begin -- DefineTwiddledVars -- Set up series of assignment of the form X~ = X; at the start of the sequence of statements model. -- These have the effect of preserving imported values thus allowing effective use of tilde. We only -- do this for things that are imported AND exported (but that includes implicit exportation of "streams". ModList := Cells.Null_Cell; -- initialized in case we bypass while loop that follows StmtLabel := Labels.CellToLabel (Cells.Null_Cell); -- to avoid flow-sensitive DFA later MappingStmtCreated := False; Abstraction := Dictionary.GetAbstraction (SubprogSym, Scope); -- For functions, we need to set up twiddles for any external var that the function references; this -- is because reading the external var will have an implicit side effect of removing the head from the -- external sequence and we may need to be able to refer to the initial value by using tilde if Dictionary.IsFunction (SubprogSym) then -- each function global is a potential "export" ExportIt := Dictionary.FirstGlobalVariable (Abstraction, SubprogSym); while not Dictionary.IsNullIterator (ExportIt) loop if Dictionary.IsOwnVariableOrConstituentWithMode (Dictionary.CurrentSymbol (ExportIt)) then -- it's an external variable the importation of which implies an export for modelling purposes if not MappingStmtCreated then CreateMappingStmt; MappingStmtCreated := True; end if; TwiddledVarAssignment (Dictionary.CurrentSymbol (ExportIt)); end if; ExportIt := Dictionary.NextSymbol (ExportIt); end loop; else -- For procedures, we can use the import/export list to decide what twiddles need to be st up. Note that -- the list work for streams as well as other import/exports because we patch the derives annotation -- appropriately when procedure reference or update streams. ExportIt := Dictionary.FirstExport (Abstraction, SubprogSym); while not Dictionary.IsNullIterator (ExportIt) loop if Dictionary.IsImport (Abstraction, SubprogSym, Dictionary.CurrentSymbol (ExportIt)) then if not MappingStmtCreated then CreateMappingStmt; MappingStmtCreated := True; end if; TwiddledVarAssignment (Dictionary.CurrentSymbol (ExportIt)); end if; ExportIt := Dictionary.NextSymbol (ExportIt); end loop; end if; if MappingStmtCreated then Chain (StmtLabel, VCGHeap); else ModelNullStmt (VCGHeap); -- SIMPLIFY? end if; end DefineTwiddledVars; ---------------------------------------------------------------------- procedure IncorporateProofStmt --# global in CommandLineData.Content; --# in ImportConstraints; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Node; --# in PreConstraints; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# LScope, --# Node, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# LScope, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Graph.Table, --# ImportConstraints, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# PreConstraints, --# StmtStack.S, --# STree.Table, --# VCGHeap; is ExpnNode, LocalNode : STree.SyntaxNode; LocalNodeType : SP_Symbols.SP_Symbol; begin LocalNode := STree.Child_Node (Current_Node => Node); LocalNodeType := STree.Syntax_Node_Type (Node => LocalNode); case LocalNodeType is when SP_Symbols.assert_statement => ExpnNode := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => LocalNode)); ModelAssertStmt (ExpnNode); when SP_Symbols.assume_statement => ExpnNode := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => LocalNode)); Model_Assume_Statement (ExpnNode); when SP_Symbols.check_statement => ExpnNode := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => LocalNode)); ModelCheckStmt (ExpnNode); when others => null; end case; end IncorporateProofStmt; procedure IncorporateConstraints (Semantic_Error_In_Subprogram : in Boolean; Type_Check_Exports : in Boolean; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in ImportConstraints; --# in LineNmbr; --# in LoopStack; --# in StartNode; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# FlowHeap, --# KindOfStackedCheck, --# ShortCircuitStack, --# StmtStack.S from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# ImportConstraints, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Scope, --# Semantic_Error_In_Subprogram, --# ShortCircuitStack, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# ImportConstraints, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Scope, --# Semantic_Error_In_Subprogram, --# ShortCircuitStack, --# StartNode, --# StmtStack.S, --# STree.Table, --# Type_Check_Exports, --# VCGHeap & --# ErrorHandler.Error_Context from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# ImportConstraints, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Scope, --# Semantic_Error_In_Subprogram, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# SPARK_IO.File_Sys from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# ImportConstraints, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Scope, --# Semantic_Error_In_Subprogram, --# ShortCircuitStack, --# StartNode, --# StmtStack.S, --# STree.Table, --# Type_Check_Exports, --# VCGHeap; is separate; procedure CollectImportConstraints (Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# out ImportConstraints; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# VCGHeap & --# ImportConstraints from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# VCGHeap; is Abstraction : Dictionary.Abstractions; SubProg : Dictionary.Symbol; BarrierCell : Cells.Cell; begin SubProg := Dictionary.GetRegion (Scope); ImportConstraints := Cells.Null_Cell; if Dictionary.Is_Subprogram (SubProg) or else Dictionary.IsTaskType (SubProg) then Abstraction := Dictionary.GetAbstraction (SubProg, Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => SubProg)); -- Collect type constraints on imported parameters and globals AssumeTypesOfFormalImportParams (Abstraction, SubProg, Scope, ImportConstraints); AssumeTypesOfImportGlobals (Abstraction, SubProg, Scope, ImportConstraints); -- if the SubProg is an Entry we can also assert its Barrier is True if Dictionary.IsEntry (SubProg) then CreateModifiedCell (BarrierCell, VCGHeap, Dictionary.GetSubprogramEntryBarrier (SubProg)); Cells.Utility.Conjoin (VCGHeap, BarrierCell, ImportConstraints); end if; end if; end CollectImportConstraints; procedure CollectPreConstraints --# global in CommandLineData.Content; --# in LoopStack; --# in Scope; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# out PreConstraints; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# PreConstraints from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap; is Precondition_Node : STree.SyntaxNode; Abstraction : Dictionary.Abstractions; Subprog : Dictionary.Symbol; Function_Defs_Not_Used : CStacks.Stack; begin CStacks.CreateStack (Function_Defs_Not_Used); Subprog := Dictionary.GetRegion (Scope); PreConstraints := Cells.Null_Cell; if Dictionary.Is_Subprogram (Subprog) then -- Build precondition DAG - only a subprogram can have one (not package elab. or task) Abstraction := Dictionary.GetConstraintAbstraction (Subprog, Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog)); Precondition_Node := STree.RefToNode (Dictionary.GetPrecondition (Abstraction, Subprog)); if Precondition_Node /= STree.NullNode then -- We do not need the function defs because they are regenerated each -- time that a function is applied within a proof context -- so Generate_Function_Instantiations is False --# accept F, 10, Function_Defs_Not_Used, "The function defs are needed here"; Build_Annotation_Expression (Exp_Node => Precondition_Node, Instantiated_Subprogram => Dictionary.NullSymbol, Scope => Scope, Calling_Scope => Scope, Force_Abstract => False, Loop_Stack => LoopStack, Generate_Function_Instantiations => False, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => PreConstraints, Function_Defs => Function_Defs_Not_Used); --# end accept; -- substitute import-export variables by the twiddled version if Dictionary.IsProcedure (Subprog) then SubstituteTwiddled (Abstraction, Subprog, PreConstraints); end if; end if; end if; end CollectPreConstraints; -------------------------------------------------------- procedure SetDoAssumeLocalRvalues --# global in DataFlowErrorInSubprogram; --# in Dictionary.Dict; --# in Scope; --# out DoAssumeLocalRvalues; --# derives DoAssumeLocalRvalues from DataFlowErrorInSubprogram, --# Dictionary.Dict, --# Scope; is function UsesUncheckedConversion return Boolean --# global in Dictionary.Dict; --# in Scope; is begin return Dictionary.UsesUncheckedConversion (Dictionary.GetEnclosingCompilationUnit (Scope)); end UsesUncheckedConversion; function AssignsFromExternal return Boolean --# global in Dictionary.Dict; --# in Scope; is begin return Dictionary.AssignsFromExternal (Dictionary.GetEnclosingCompilationUnit (Scope)); end AssignsFromExternal; begin -- SetDoAssumeLocalRvalues DoAssumeLocalRvalues := not (AssignsFromExternal or else UsesUncheckedConversion) and then not DataFlowErrorInSubprogram; end SetDoAssumeLocalRvalues; -------------------------------------------------------- begin -- BuildGraph LineNmbr := 0; SubprogramCalls := 0; LScope := Scope; CStacks.CreateStack (ShortCircuitStack); CStacks.CreateStack (CheckStack); ContainsReals := False; StmtStack.Clear; LoopContext.Initialize (LoopStack); SetDoAssumeLocalRvalues; CreateUnitLabel (StmtLabel, VCGHeap); Graph.Create_Coeff (Heap => VCGHeap, I => 1, J => 2, K => StmtLabel); InitialRecord := StmtStack.StmtRecord'(StmtNmbr => 2, Kind => StmtStack.Elementary); StmtStack.Push (InitialRecord); Graph.Set_Nmbr_Of_Stmts (N => 2); -- Guard all statement syntax tree walking in case of semantic errors in the subprgoram being modelled -- Any graph model of invalid SPARK is useless to us so we don't do it. if not Semantic_Error_In_Subprogram then DefineTwiddledVars; -- collect import type constraints to be used in particular to -- propagate the information across assertions. CollectImportConstraints (Scope => Scope); CollectPreConstraints; ModelInitialisedVariables; -- CheckStack always defined Node := StartNode; loop -- Down Loop ----------------------------------------------------- LastNode := Node; NodeType := STree.Syntax_Node_Type (Node => Node); case NodeType is when SP_Symbols.sequence_of_statements | SP_Symbols.simple_statement | SP_Symbols.compound_statement | SP_Symbols.elsif_part | SP_Symbols.loop_statement | SP_Symbols.alternatives => Node := STree.Child_Node (Current_Node => Node); when SP_Symbols.statement => LineNmbr := Integer (STree.Node_Position (Node => Node).Start_Line_No); Node := STree.Child_Node (Current_Node => Node); when SP_Symbols.simple_name | SP_Symbols.sequence_of_labels | SP_Symbols.label => Node := STree.NullNode; -- to ignore labels or optional loop labels when SP_Symbols.apragma | SP_Symbols.null_statement | SP_Symbols.justification_statement => ModelNullStmt (VCGHeap); Node := STree.NullNode; when SP_Symbols.proof_statement => IncorporateProofStmt; Node := STree.NullNode; when SP_Symbols.assignment_statement => ModelAssignmentStmt; -- CheckStack always defined Node := STree.NullNode; when SP_Symbols.procedure_call_statement => ModelProcedureCall; -- CheckStack always defined Node := STree.NullNode; when SP_Symbols.if_statement => Node := STree.Child_Node (Current_Node => Node); LastNode := Node; StartIfModel; -- CheckStack always defined Node := STree.NullNode; when SP_Symbols.condition => -- there is no Statement associated with the condition of the elsif so we need to get -- get the line number from the condition node rather than base it on whatever the -- last statement line number happend to be LineNmbr := Integer (STree.Node_Position (Node => Node).Start_Line_No); StartElsIfModel; -- CheckStack always defined Node := STree.NullNode; when SP_Symbols.else_part => StartElseModel; Node := STree.Child_Node (Current_Node => Node); ModelNullStmt (VCGHeap); when SP_Symbols.case_statement => Node := STree.Child_Node (Current_Node => Node); LastNode := Node; StartCaseModel; -- CheckStack always defined Node := STree.NullNode; when SP_Symbols.case_statement_alternative => Node := STree.Child_Node (Current_Node => Node); ModelAlternative; -- CheckStack always defined Node := STree.Next_Sibling (Current_Node => Node); when SP_Symbols.others_part => if STree.Child_Node (Current_Node => Node) /= STree.NullNode then ModelOthersPart; Node := STree.Child_Node (Current_Node => Node); else Node := STree.NullNode; end if; when SP_Symbols.return_statement => Node := STree.NullNode; when SP_Symbols.loop_statement_opt => LoopContext.EnterLoop (Scope, LoopStack, VCGHeap, -- to get LScope); -- advance to iteration_scheme; Local_Node := STree.Child_Node (Current_Node => Node); if Local_Node = STree.NullNode then ModelNullStmt (VCGHeap); StartLoopModel (Node); else -- loop has an iteration_scheme; -- localnode is of type iteration_scheme; Local_Node := STree.Child_Node (Current_Node => Local_Node); case STree.Syntax_Node_Type (Node => Local_Node) is when SP_Symbols.loop_parameter_specification => StartForStmtModel (Local_Node, Node); when others => ModelNullStmt (VCGHeap); StartLoopModel (Node); end case; end if; Node := STree.NullNode; when SP_Symbols.exit_statement => Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.simple_name then -- Exit statement has a label, which we must skip -- to get to the condition. Node := STree.Next_Sibling (Current_Node => Node); end if; ModelExit; -- CheckStack is always defined Node := STree.NullNode; when SP_Symbols.end_of_loop => Node := STree.NullNode; when SP_Symbols.delay_statement => ModelDelayStmt; -- CheckStack is always defined Node := STree.NullNode; when others => -- HTML Directives --! --! --! unexpected-node-kind-in-main-tree Node := STree.NullNode; SPARK_IO.New_Line (OutputFile, 1); SPARK_IO.Put_Line (OutputFile, "!!! Unexpected node kind in main tree", 0); VCGFailure := True; --! !!! Unexpected node kind in main tree --! This message indicates corruption of the syntax tree being --! processed by the VC Generator. It should not be seen in normal --! operation. end case; if Node = STree.NullNode and LastNode /= StartNode then loop -- Up Loop ------------------------------------------ Node := STree.Next_Sibling (Current_Node => LastNode); exit when Node /= STree.NullNode; Node := STree.Parent_Node (Current_Node => LastNode); exit when Node = STree.NullNode or Node = StartNode; NodeType := STree.Syntax_Node_Type (Node => Node); case NodeType is when SP_Symbols.if_statement => CloseIfStmt; when SP_Symbols.case_statement => CombineCases; when SP_Symbols.loop_statement => Local_Node := STree.Child_Node (Current_Node => Node); -- Local_Node is simple_name or loop_statement_opt; if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.simple_name then Local_Node := STree.Next_Sibling (Current_Node => Local_Node); end if; -- Local_Node is loop_statement_opt; Local_Node := STree.Child_Node (Current_Node => Local_Node); if Local_Node = STree.NullNode then -- if the loop has no exits then add in a default "exit when false" if not Dictionary.GetLoopHasExits (Dictionary.GetRegion (LScope)) then ModelDefaultExit; end if; CloseLoop; else -- loop has an iteration_scheme; -- Local_Node is of type iteration_scheme; Local_Node := STree.Child_Node (Current_Node => Local_Node); case STree.Syntax_Node_Type (Node => Local_Node) is when SP_Symbols.loop_parameter_specification => CloseForStmt (Local_Node); -- CheckStack is always defined when others => CloseLoop; end case; end if; LoopContext.ExitLoop (LoopStack, VCGHeap, LScope); when others => null; end case; LastNode := Node; end loop; -- Up ------------------------------------------ end if; exit when Node = STree.NullNode or Node = StartNode; end loop; -- Down ------------------------------------ end if; -- add "finish" node with arc to it; CreateUnitLabel (StmtLabel, VCGHeap); Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => StmtStack.Top.StmtNmbr, J => Graph.Get_Nmbr_Of_Stmts, K => StmtLabel); -- We pass Semantic_Error_In_Subprogram to IncorporateConstraints so that it can create a FALSE VC -- for malformed subprogram bodies. -- CheckStack is always defined -- ImportConstraints is defined if RedType.RTC -- Any last assignment to QuantNum, CheckStack and ShortCircuitStack -- within IncorporateConstraints will be reported as ineffective. --# accept F, 10, KindOfStackedCheck, "Ineffective assignment here OK" & --# F, 10, CheckStack, "Ineffective assignment here OK" & --# F, 10, ShortCircuitStack, "Ineffective assignment here OK"; IncorporateConstraints (Semantic_Error_In_Subprogram => Semantic_Error_In_Subprogram, Type_Check_Exports => Type_Check_Exports, Scope => Scope); --# end accept; if Semantic_Error_In_Subprogram then SPARK_IO.New_Line (OutputFile, 1); SPARK_IO.Put_Line (OutputFile, "/* False VC generated due to semantic errors in subprogram */", 0); end if; if ContainsReals then ErrorHandler.Semantic_Warning (Err_Num => 405, Position => EndPosition, Id_Str => LexTokenManager.Null_String); end if; end BuildGraph; spark-2012.0.deb/examiner/errorhandler-getfileline.adb0000644000175000017500000000417611753202336021705 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) procedure GetFileLine is Cur_Line : E_Strings.T; Line_No_Before_Get : LexTokenManager.Line_Numbers; begin -- Handling of line containing eight-bit characters if SPARK_IO.End_Of_Line (Error_Context_Rec.Source) then SPARK_IO.Skip_Line (Error_Context_Rec.Source, 1); Error_Context_Rec.Line_No := Error_Context_Rec.Line_No + 1; Error_Context_Rec.Current_Line := E_Strings.Empty_String; else Line_No_Before_Get := LexTokenManager.Line_Numbers (SPARK_IO.Line (Error_Context_Rec.Source)); E_Strings.Get_Line (File => Error_Context_Rec.Source, E_Str => Cur_Line); Error_Context_Rec.Current_Line := Cur_Line; Error_Context_Rec.Line_No := Error_Context_Rec.Line_No + 1; if E_Strings.Get_Length (E_Str => Error_Context_Rec.Current_Line) = 0 then Error_Context_Rec.Current_Line := E_Strings.Copy_String (Str => " -- *** ILLEGAL LINE REPLACED *** --"); end if; if Line_No_Before_Get = LexTokenManager.Line_Numbers (SPARK_IO.Line (Error_Context_Rec.Source)) then SPARK_IO.Skip_Line (Error_Context_Rec.Source, 1); end if; end if; end GetFileLine; spark-2012.0.deb/examiner/sp_relations-sp_left_corner.adb0000644000175000017500000000324211753202336022426 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- packed version of SPLeftCorner. separate (SP_Relations) function SP_Left_Corner (Parent : SP_Symbols.SP_Symbol; Child : SP_Symbols.SP_Symbol) return Boolean is Low_Index, High_Index : Left_Corner_Range; Found : Boolean; begin if Parent in SP_Symbols.SP_Terminal then Found := False; else Low_Index := Left_Corner_Range ((Rel_Tab (Parent) / Low) mod Low_Lim); High_Index := Left_Corner_Range ((Rel_Tab (Parent) / High) mod High_Lim); loop Found := Left_Corner (Low_Index) = Child; exit when Found or else Low_Index = High_Index; Low_Index := Low_Index + 1; end loop; end if; return Found; end SP_Left_Corner; spark-2012.0.deb/examiner/dag-buildgraph-incorporateconstraints.adb0000644000175000017500000032246311753202336024420 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; separate (DAG.BuildGraph) procedure IncorporateConstraints (Semantic_Error_In_Subprogram : in Boolean; Type_Check_Exports : in Boolean; Scope : in Dictionary.Scopes) is type Equalities is (UseEquals, UseImplication); PreConNode, PostConNode : STree.SyntaxNode; -------------------------------- -- returns IsRefined if we are dealing with a subprogram that has a second, -- refined annotation, otherwise returns IsAbstract. -- Note that we are interested here in whether it has a second proof annotation -- or a second flow annotation, either is enough to trigger refinement VCs -- Note: When we do package initialization constraits we may need to extend this -- function to handle that case as well function Which_Abstraction (Scope : Dictionary.Scopes) return Dictionary.Abstractions --# global in Dictionary.Dict; is Result : Dictionary.Abstractions := Dictionary.IsAbstract; Sym : Dictionary.Symbol; begin Sym := Dictionary.GetRegion (Scope); if Dictionary.Is_Subprogram (Sym) or else Dictionary.IsTaskType (Sym) then Result := Dictionary.GetConstraintAbstraction (Sym, Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sym)); end if; return Result; end Which_Abstraction; -------------------------------- procedure Plant_Precondition (Node : in STree.SyntaxNode; Semantic_Error_In_Subprogram : in Boolean; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ImportConstraints; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ImportConstraints, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# STree.Table, --# VCGHeap; is DAG_Cell : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; Function_Defs : CStacks.Stack; begin Graph.Set_First_Proof_Context (X => Graph.Precondition); -- if there is no precondition, or the subprogram contains semantic errors, then -- we force the precondition to TRUE if Semantic_Error_In_Subprogram or else Node = STree.NullNode then CreateTrueCell (VCGHeap, DAG_Cell); else -- otherwise we traverse the predicate and build a DAG for it. -- Initialize the function definition stack CStacks.CreateStack (Function_Defs); Build_Annotation_Expression (Exp_Node => Node, Instantiated_Subprogram => Dictionary.NullSymbol, Scope => Scope, Calling_Scope => Scope, Force_Abstract => False, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => DAG_Cell, Function_Defs => Function_Defs); if not CStacks.IsEmpty (Function_Defs) then -- Functions are called within the precondition of the subprogram -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; -- conjoin the function definitions with the precondition Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, DAG_Cell); end if; end if; if not Cells.Is_Null_Cell (ImportConstraints) then Cells.Utility.Conjoin (VCGHeap, ImportConstraints, DAG_Cell); end if; Graph.Set_First_Assertion_Locn (X => DAG_Cell); end Plant_Precondition; -------------------------------- -- SubstituteImplicit replaces each occurrence of the implicit -- variable (Implicit_Var) in a function return annotation (Return_Anno_DAG). -- by the expression of the return statement (referenced by RetExpDAG). -- The return annotation is updated in place. procedure SubstituteImplicit (Return_Anno_DAG : in out Cells.Cell; Implicit_Var : in Dictionary.Symbol; RetExpDAG : in Cells.Cell) -- the thing to substitute --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Return_Anno_DAG, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# Implicit_Var, --# RetExpDAG, --# Return_Anno_DAG, --# VCGHeap; is P : Cells.Cell; S : CStacks.Stack; The_Substitution : Cells.Cell; begin -- We need to take a copy of the thing we're substituting to -- ensure we don't tangle up the graphs :) Structures.CopyStructure (VCGHeap, RetExpDAG, The_Substitution); -- DAG traversal algorithm of D.E. Knuth, Fundamental Algorithms, p.317; CStacks.CreateStack (S); P := Return_Anno_DAG; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCGHeap, P, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then P := Cells.Null_Cell; else P := LeftPtr (VCGHeap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCGHeap, S); CStacks.Pop (VCGHeap, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then if Dictionary.IsImplicitReturnVariable (Cells.Get_Symbol_Value (VCGHeap, P)) and then Dictionary.Implicit_Return_Variables_Are_Equal (Left_Symbol => Cells.Get_Symbol_Value (VCGHeap, P), Right_Symbol => Implicit_Var) then Cells.Copy_Contents (VCGHeap, The_Substitution, P); end if; P := Cells.Null_Cell; else P := RightPtr (VCGHeap, P); end if; end loop; --# accept F, 31, Return_Anno_DAG, "Return_Anno_DAG is indirectly updated via the local pointer P" & --# F, 50, Return_Anno_DAG, Implicit_Var, "Return_Anno_DAG indirectly references Implicit_Var via local pointer P" & --# F, 50, Return_Anno_DAG, RetExpDAG, "Return_Anno_DAG indirectly references RetExpDAG via local pointer P" & --# F, 50, Return_Anno_DAG, VCGHeap, "Return_Anno_DAG indirectly references VCGHeap via local pointer P" & --# F, 50, Return_Anno_DAG, Dictionary.Dict, "Return_Anno_DAG indirectly references Dictionary.Dict via local pointer P" & --# W, 3, "Suppress warnings on ReturnAnnoDAG"; -- Return_Anno_DAG appears to be only an input to Ada but it is -- effectively a pointer to a data structure which is updated. pragma Warnings (Off, Return_Anno_DAG); end SubstituteImplicit; ----------------------------------------------------------------------------------- -- In general we don't need to RT check procedure exports because any expression -- used to assign to them will have been checked; however, where stream variables -- are involved we may have rogue values returned from the environment. To avoid -- a large number of unnecessary checks we need to detect the special cases where -- streams are involved. ----------------------------------------------------------------------------------- procedure Plant_Postcondition (Node : in STree.SyntaxNode; Subprog : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Semantic_Error_In_Subprogram : in Boolean; Type_Check_Exports : in Boolean; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# StmtStack.S, --# STree.Table, --# Subprog, --# Type_Check_Exports, --# VCGHeap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# SPARK_IO.File_Sys from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# StmtStack.S, --# STree.Table, --# Subprog, --# Type_Check_Exports, --# VCGHeap & --# StmtStack.S from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# STree.Table, --# VCGHeap; is DAG_Cell : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; Function_Defs : CStacks.Stack; ---------------------------------------------------------------------------- procedure Check_Type_Of_Exports (Abstraction : in Dictionary.Abstractions; SubProg : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Conjuncts : in out Cells.Cell) --# global in CommandLineData.Content; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives Conjuncts, --# ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# Abstraction, --# CommandLineData.Content, --# Conjuncts, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# SubProg, --# VCGHeap; is It : Dictionary.Iterator; ExportVar : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; Data_View : Dictionary.Abstractions; function ExportDependsOnStream (Export : Dictionary.Symbol; Abstraction : Dictionary.Abstractions; SubProg : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is ImportIt : Dictionary.Iterator; Result : Boolean := False; begin -- ExportDependsOnStream ImportIt := Dictionary.FirstDependency (Abstraction, SubProg, Export); while not Dictionary.IsNullIterator (ImportIt) loop if Dictionary.GetOwnVariableOrConstituentMode (Dictionary.CurrentSymbol (ImportIt)) = Dictionary.InMode then Result := True; exit; end if; ImportIt := Dictionary.NextSymbol (ImportIt); end loop; return Result; end ExportDependsOnStream; begin -- Check_Type_Of_Exports if Abstraction = Dictionary.IsRefined then -- The view of the pre and post condition is refined. -- Determine whether the view of the data should be -- Abstract (Proof Refinement only) or -- Refined (Data Refinement is present). if Dictionary.IsNullIterator (Dictionary.FirstGlobalVariable (Dictionary.IsRefined, SubProg)) then -- No Data Refinement. Data_View := Dictionary.IsAbstract; else -- Data Refinement is present. Data_View := Dictionary.IsRefined; end if; else -- Only proof refinement is present. Data_View := Dictionary.IsAbstract; end if; It := Dictionary.FirstExport (Data_View, SubProg); while not Dictionary.IsNullIterator (It) loop ExportVar := Dictionary.CurrentSymbol (It); if ExportDependsOnStream (Export => ExportVar, Abstraction => Abstraction, SubProg => SubProg) and then -- RTC only for exports which are dependent on streams IsDirectlyVisible (ExportVar, Scope) and then -- and which are visible in Ada context (Dictionary.GetOwnVariableOrConstituentMode (ExportVar) = Dictionary.DefaultMode) then -- no RTC unless stream has been assigned to some non-moded export --903 Type_Sym := Dictionary.GetType (ExportVar); -- cannot plant the type constraint check directly because it -- refers to the postcond. Need to conjoin it with -- the rest of the postcond instead if not Dictionary.IsPrivateType (Type_Sym, Scope) or else Dictionary.IsPredefinedTimeType (Type_Sym) then ConjoinParamConstraint (Type_Sym, ExportVar, Scope, False, Conjuncts); end if; end if; It := Dictionary.NextSymbol (It); end loop; end Check_Type_Of_Exports; begin -- Plant_Postcondition if Semantic_Error_In_Subprogram then -- If subprgoram has semantic errors then force postcondition to FALSE to help user notice problem CreateFalseCell (VCGHeap, DAG_Cell); else -- Otherwise build TRUE or actual postcondition if Node = STree.NullNode then CreateTrueCell (VCGHeap, DAG_Cell); if Type_Check_Exports then Check_Type_Of_Exports (Abstraction => Abstraction, SubProg => Subprog, Scope => Scope, Conjuncts => DAG_Cell); end if; else -- Initialize the function definition stack CStacks.CreateStack (Function_Defs); Build_Annotation_Expression (Exp_Node => Node, Instantiated_Subprogram => Dictionary.NullSymbol, Scope => Scope, Calling_Scope => Scope, Force_Abstract => False, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => DAG_Cell, Function_Defs => Function_Defs); if not CStacks.IsEmpty (Function_Defs) then -- Functions are called within the postcondition of the subprogram -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; -- Insert a null statement as a place holder for the conjoined -- function defs; ModelNullStmt (VCGHeap); -- and attach the function definitions to the null statement IncorporateAssumption (VCGHeap, Conjoined_Function_Defs); end if; if Type_Check_Exports then Check_Type_Of_Exports (Abstraction => Abstraction, SubProg => Subprog, Scope => Scope, Conjuncts => DAG_Cell); end if; end if; end if; Graph.Set_Proof_Context (X => Graph.Postcondition); Graph.Set_Assertion_Locn (X => DAG_Cell); end Plant_Postcondition; -------------------------------- -- Plant_Return_Expression is called to insert constraints determined from -- the return annotation and the return type of a function which must be -- proven to be satisfied the body of the function. -- Plant_Return_Expression traverses the syntax tree for a sequnece of -- statements of the function subprogram to locate the -- (only) return statement which will be the last statement of the sequence. -- Once the return statement has been located the return expression subtree -- is obtained and is used as the basis of a new DAG node which is a -- translation of the return annotation. The DAG node represents: -- for an explicit return annotation the expression -- return_expression <-> return_annotation -- for a Boolean return type -- return_expression = return_annotation -- for other return types -- for an implicit return annotation -- Each instance of the implicit_variable in the return-annotation is -- substituted with the return_expression. -- when there is no return annotation present the DAG node represents -- the expression "true". -- Once the DAG node has been associated with the appropriate expression -- a further constraint to check that the value of the return expression is -- in-type is conjoined to the DAG node. procedure Plant_Return_Expression (Return_Type : in Dictionary.Symbol; Node : in STree.SyntaxNode; Semantic_Error_In_Subprogram : in Boolean; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in StartNode; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# FlowHeap, --# KindOfStackedCheck, --# ShortCircuitStack from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Scope, --# Semantic_Error_In_Subprogram, --# ShortCircuitStack, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# ShortCircuitStack, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# Scope, --# Semantic_Error_In_Subprogram, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# Return_Type, --# Scope, --# Semantic_Error_In_Subprogram, --# ShortCircuitStack, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap; is Return_Expression_Node : STree.SyntaxNode; RetExpDAG, AnnoRetExpDAG, Postcondition_DAG : Cells.Cell; Implicit_Var : Dictionary.Symbol; TheAnnoDAG, CopyRetExpDAG : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; Function_Defs : CStacks.Stack; procedure ConjoinToReturnExpr (Type_Sym : in Dictionary.Symbol; Expr : in Cells.Cell; Scope : in Dictionary.Scopes; DAG_Cell : in out Cells.Cell) --# global in Dictionary.Dict; --# in out ContainsReals; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ContainsReals from *, --# Dictionary.Dict, --# Type_Sym & --# DAG_Cell, --# Statistics.TableUsage, --# VCGHeap from *, --# DAG_Cell, --# Dictionary.Dict, --# Expr, --# Scope, --# Type_Sym, --# VCGHeap; is Conjunct_Cell : Cells.Cell; begin if DiscreteTypeWithCheck (Type_Sym, Scope) then Type_Constraint.Process_Discrete (The_Type => Type_Sym, The_Expression => Expr, The_Constraint => Conjunct_Cell, VCG_Heap => VCGHeap); Cells.Utility.Conjoin (VCGHeap, Conjunct_Cell, DAG_Cell); end if; if IsRealType (Type_Sym) then ContainsReals := True; end if; end ConjoinToReturnExpr; begin -- Plant_Return_Expression if Semantic_Error_In_Subprogram then -- If subprogram body has semantic errors then force postcondition to FALSE Graph.Set_Proof_Context (X => Graph.Postcondition); CreateFalseCell (VCGHeap, TheAnnoDAG); Graph.Set_Assertion_Locn (X => TheAnnoDAG); else -- otherwise traverse return expression and construct predicate DAG Return_Expression_Node := STree.Child_Node (Current_Node => StartNode); -- Now we have a sequence_of_statements which can be reduced to: -- sequence_of_statements statement | statement ; -- (See SPARK.LLA) -- If the sequence_of_statements is a sequence_of_statements followed by -- a statement then skip to the statement (which will be the final statement -- in the subprogram). if STree.Syntax_Node_Type (Node => Return_Expression_Node) = SP_Symbols.sequence_of_statements then Return_Expression_Node := STree.Next_Sibling (Current_Node => Return_Expression_Node); end if; -- The final statement in the subprogram should be a return statement, but we -- need to cater for labels because a statement can be reduced to: -- simple_statement | sequence_of_labels simple_statement ... -- (and a simple_statement can be reduced to a return_statement). -- The child node will either be a simple_statement or a sequence_of_labels Return_Expression_Node := STree.Child_Node (Current_Node => Return_Expression_Node); -- Skip the label(s) if present. if STree.Syntax_Node_Type (Node => Return_Expression_Node) = SP_Symbols.sequence_of_labels then Return_Expression_Node := STree.Next_Sibling (Current_Node => Return_Expression_Node); end if; -- Now we have reached the final statement in the subprogram. This should be -- a return statement. Locate the return expression... while STree.Syntax_Node_Type (Node => Return_Expression_Node) /= SP_Symbols.expression loop -- SEQUENCE_OF_LABELS Return_Expression_Node := STree.Child_Node (Current_Node => Return_Expression_Node); end loop; -- create DAG for SPARK return expression BuildExpnDAG (Return_Expression_Node, Scope, Scope, LineNmbr, True, DoAssumeLocalRvalues, LoopStack, FlowHeap, VCGHeap, ContainsReals, VCGFailure, ShortCircuitStack, CheckStack, KindOfStackedCheck, -- to get RetExpDAG); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); Structures.CopyStructure (VCGHeap, RetExpDAG, CopyRetExpDAG); -- now deal with return annotation if there is one if Node /= STree.NullNode then -- set up the function definition stack CStacks.CreateStack (Function_Defs); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression then -- build post condition for simple return expression Build_Annotation_Expression (Exp_Node => Node, Instantiated_Subprogram => Dictionary.NullSymbol, Scope => Scope, Calling_Scope => Scope, Force_Abstract => False, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => AnnoRetExpDAG, Function_Defs => Function_Defs); -- set two DAGs to be equal -- use <-> for boolean functions otherwise use = if Dictionary.TypeIsBoolean (Return_Type) then CreateOpCell (Postcondition_DAG, VCGHeap, SP_Symbols.is_equivalent_to); else CreateOpCell (Postcondition_DAG, VCGHeap, SP_Symbols.equals); end if; SetLeftArgument (Postcondition_DAG, RetExpDAG, VCGHeap); SetRightArgument (Postcondition_DAG, AnnoRetExpDAG, VCGHeap); TheAnnoDAG := Postcondition_DAG; -- Indicate that there is no implicit var associated with this -- return annotation. Implicit_Var := Dictionary.NullSymbol; else -- build post condition for implicit return expression -- get symbol of implicit return var Implicit_Var := Dictionary.GetImplicitReturnVariable (Which_Abstraction (Scope => Scope), Dictionary.GetRegion (Scope)); -- walk predicate part of implicit return statement Build_Annotation_Expression (Exp_Node => STree.Next_Sibling (Current_Node => Node), Instantiated_Subprogram => Dictionary.NullSymbol, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Implicit_Var), Calling_Scope => Scope, Force_Abstract => False, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => AnnoRetExpDAG, Function_Defs => Function_Defs); -- now subsitute actual return expression DAG for occurrences of -- implicit return variable SubstituteImplicit (AnnoRetExpDAG, Implicit_Var, RetExpDAG); TheAnnoDAG := AnnoRetExpDAG; end if; if not CStacks.IsEmpty (Function_Defs) then -- Functions are called within the return annotation of the subprogram -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; -- Check to see if we need to substitute an implicit variable if not Dictionary.Is_Null_Symbol (Implicit_Var) then -- The return anno does have an implicit variable we must -- do a subsitution SubstituteImplicit (Conjoined_Function_Defs, Implicit_Var, RetExpDAG); end if; -- Insert a null statement as a place holder for the conjoined -- function defs; ModelNullStmt (VCGHeap); -- and attach the function definitions to the null statement IncorporateAssumption (VCGHeap, Conjoined_Function_Defs); end if; else -- no return expression given so plant true CreateTrueCell (VCGHeap, TheAnnoDAG); end if; -- We still want to do the check that the return expression is in type, -- in contrast to CheckTypeofExports in ModelPostCondition. ConjoinToReturnExpr (Type_Sym => Dictionary.GetType (Dictionary.GetRegion (Scope)), Expr => CopyRetExpDAG, Scope => Scope, DAG_Cell => TheAnnoDAG); Graph.Set_Proof_Context (X => Graph.Postcondition); Graph.Set_Assertion_Locn (X => TheAnnoDAG); end if; end Plant_Return_Expression; -------------------------------- -- Procedure, used for generation of refinement and subclass checks, for creating -- required anno DAGs; created so we can deal with absence of explicit pre/post con -- and create a "true" cell instead procedure CreateAnnotationDAG (Start_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Force_Abstract : in Boolean; Conjoined_Function_Defs : out Cells.Cell; DAG_Root : out Cells.Cell) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives Conjoined_Function_Defs, --# DAG_Root, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGHeap from CommandLineData.Content, --# Dictionary.Dict, --# Force_Abstract, --# LexTokenManager.State, --# LoopStack, --# Scope, --# Start_Node, --# STree.Table, --# VCGHeap & --# ContainsReals, --# Statistics.TableUsage, --# VCGFailure from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Force_Abstract, --# LexTokenManager.State, --# LoopStack, --# Scope, --# Start_Node, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Force_Abstract, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# Start_Node, --# STree.Table, --# VCGHeap; is Function_Defs : CStacks.Stack; begin if Start_Node = STree.NullNode then -- The expression is null so assume True for both the expression -- and the called function definitions CreateTrueCell (VCGHeap, DAG_Root); CreateTrueCell (VCGHeap, Conjoined_Function_Defs); else -- Create stack for the definition of any functions called within the -- annotation expression CStacks.CreateStack (Function_Defs); Build_Annotation_Expression (Exp_Node => Start_Node, Instantiated_Subprogram => Dictionary.NullSymbol, Scope => Scope, Calling_Scope => Scope, Force_Abstract => Force_Abstract, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => DAG_Root, Function_Defs => Function_Defs); -- Functions are called within the annotation expression -- conjoin their definitions. --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; end if; end CreateAnnotationDAG; ---------------------------------------------------------------------- -- Performs similar function to CreateAnnotationDAG but deals with function -- return annotations (both explicit and implicit). This procedure used to be -- local to Plant_Refinement_Checks but has been moved out so that it can also -- be used by Plant_Sub_Class_Checks. procedure CreateFunctionAnnotationDAG (Sub_Prog : in Dictionary.Symbol; StartNode : in STree.SyntaxNode; Scope : in Dictionary.Scopes; ForceAbstract : in Boolean; Return_Cell : in Cells.Cell; EqualityOperator : in Equalities; Abstraction : in Dictionary.Abstractions; Conjoined_Function_Defs : out Cells.Cell; DAGRoot : out Cells.Cell) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives Conjoined_Function_Defs from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ForceAbstract, --# LexTokenManager.State, --# LoopStack, --# Return_Cell, --# Scope, --# StartNode, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ForceAbstract, --# LexTokenManager.State, --# LoopStack, --# Scope, --# StartNode, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# DAGRoot, --# VCGHeap from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# EqualityOperator, --# ForceAbstract, --# LexTokenManager.State, --# LoopStack, --# Return_Cell, --# Scope, --# StartNode, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# ForceAbstract, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# StartNode, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# Statistics.TableUsage from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# EqualityOperator, --# ForceAbstract, --# LexTokenManager.State, --# LoopStack, --# Return_Cell, --# Scope, --# StartNode, --# STree.Table, --# Sub_Prog, --# VCGHeap; is DAGRootLocal, ExpnDAGRoot : Cells.Cell; Implicit_Return_Var : Dictionary.Symbol; begin -- CreateFunctionAnnotationDAG if StartNode = STree.NullNode then CreateTrueCell (VCGHeap, DAGRootLocal); Conjoined_Function_Defs := Cells.Null_Cell; elsif STree.Syntax_Node_Type (Node => StartNode) = SP_Symbols.annotation_expression then -- build post condition for simple return expression CreateAnnotationDAG (Start_Node => StartNode, Scope => Scope, Force_Abstract => ForceAbstract, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => ExpnDAGRoot); -- now we need to equate the ReturnCell with the ExpnDAG -- first cet up the desired operator if EqualityOperator = UseEquals then CreateOpCell (DAGRootLocal, VCGHeap, SP_Symbols.equals); else CreateOpCell (DAGRootLocal, VCGHeap, SP_Symbols.is_equivalent_to); end if; -- and build expression SetLeftArgument (DAGRootLocal, Return_Cell, VCGHeap); SetRightArgument (DAGRootLocal, ExpnDAGRoot, VCGHeap); else -- build post condition for implicit return expression Implicit_Return_Var := Dictionary.GetImplicitReturnVariable (Abstraction, Sub_Prog); CreateAnnotationDAG (Start_Node => STree.Next_Sibling (Current_Node => StartNode), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Implicit_Return_Var), Force_Abstract => ForceAbstract, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => DAGRootLocal); -- substitute ReturnCell for all location in the above expression where -- the original annotation had an implciit return variable SubstituteImplicit (DAGRootLocal, Implicit_Return_Var, Return_Cell); -- Also substuitute the implicit return variable in all function definitions. SubstituteImplicit (Conjoined_Function_Defs, Implicit_Return_Var, Return_Cell); end if; DAGRoot := DAGRootLocal; end CreateFunctionAnnotationDAG; ---------------------------------- procedure Plant_Refinement_Checks (Scope : Dictionary.Scopes) --# global in CommandLineData.Content; --# in ImportConstraints; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ImportConstraints, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# ImportConstraints, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap; is Sub_Prog : Dictionary.Symbol; PreCheck : Cells.Cell; AbstractPre : Cells.Cell; RefinedPre : Cells.Cell; PreCheckHyp : Cells.Cell; CommonHypotheses : Cells.Cell; DataRefinement : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; Data_View : Dictionary.Abstractions; ---------------------------------------------------------------------- procedure Create_Data_Refinement (Sub_Prog : in Dictionary.Symbol; In_Post_Con : in Boolean; The_DAG : out Cells.Cell) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Sub_Prog & --# Statistics.TableUsage, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# In_Post_Con, --# LexTokenManager.State, --# Sub_Prog, --# VCGHeap & --# The_DAG from CommandLineData.Content, --# Dictionary.Dict, --# In_Post_Con, --# LexTokenManager.State, --# Sub_Prog, --# VCGHeap; is type Passes is (ProcessGlobals, ProcessParameters); pragma Unreferenced (ProcessParameters); It : Dictionary.Iterator; Abstract_It : Dictionary.Iterator; Abstract_Sym, Current_Sym : Dictionary.Symbol; First_Item : Boolean := True; DAG_Root, DAG_Local : Cells.Cell; --------------------------- procedure Build_One_Refinement (Refined_Sym : in Dictionary.Symbol; In_Post_Con, Is_Import, Is_Export : in Boolean; Sub_Prog : in Dictionary.Symbol; The_DAG : out Cells.Cell) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Refined_Sym, --# Sub_Prog & --# Statistics.TableUsage, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# In_Post_Con, --# Is_Export, --# Is_Import, --# LexTokenManager.State, --# Refined_Sym, --# Sub_Prog, --# VCGHeap & --# The_DAG from CommandLineData.Content, --# Dictionary.Dict, --# In_Post_Con, --# Is_Export, --# Is_Import, --# LexTokenManager.State, --# Refined_Sym, --# Sub_Prog, --# VCGHeap; is Tilde_Version, Operator_Cell, Field_Cell, Concrete_Cell, Abstract_Cell : Cells.Cell; Field_Name : LexTokenManager.Lex_String; procedure Build_Field_Name --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Refined_Sym; --# in Sub_Prog; --# in out LexTokenManager.State; --# out Field_Name; --# derives Field_Name, --# LexTokenManager.State from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Refined_Sym, --# Sub_Prog; is Prefix : E_Strings.T; begin -- Build_Field_Name Prefix := Dictionary.GetAnyPrefixNeeded (Sym => Refined_Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetEnclosingPackage (Dictionary.GetScope (Sub_Prog))), Separator => "__"); if E_Strings.Get_Length (E_Str => Prefix) > 0 then E_Strings.Append_String (E_Str => Prefix, Str => "__"); end if; E_Strings.Append_Examiner_String (E_Str1 => Prefix, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Refined_Sym))); LexTokenManager.Insert_Examiner_String (Str => Prefix, Lex_Str => Field_Name); end Build_Field_Name; begin -- Build_One_Refinement -- first define the operator to use CreateOpCell (Operator_Cell, VCGHeap, SP_Symbols.equals); if Dictionary.TypeIsBoolean (Dictionary.GetType (Refined_Sym)) then Cells.Set_Op_Symbol (VCGHeap, Operator_Cell, SP_Symbols.is_equivalent_to); end if; -- now create lhs CreateReferenceCell (Concrete_Cell, VCGHeap, Refined_Sym); -- now rhs CreateCellKind (Field_Cell, VCGHeap, Cell_Storage.Field_Access_Function); Cells.Set_Symbol_Value (VCGHeap, Field_Cell, Refined_Sym); Build_Field_Name; Cells.Set_Lex_Str (VCGHeap, Field_Cell, Field_Name); CreateReferenceCell (Abstract_Cell, VCGHeap, Dictionary.GetSubject (Refined_Sym)); SetRightArgument (Field_Cell, Abstract_Cell, VCGHeap); -- assemble SetLeftArgument (Operator_Cell, Concrete_Cell, VCGHeap); SetRightArgument (Operator_Cell, Field_Cell, VCGHeap); -- mark as initial values if needed if In_Post_Con then -- might need some tildes else definitely don't if Is_Import then -- must be either a pure import or and import/export either of which -- need a tilded version of some sort. Whatever form this takes, the -- abstract side will be tilded Structures.CopyStructure (VCGHeap, Operator_Cell, Tilde_Version); SetTilde (Abstract_Cell, VCGHeap); if Is_Export then -- item is both imported and exported so we need to tilde both sides SetTilde (Concrete_Cell, VCGHeap); end if; -- Is_Export Cells.Utility.Conjoin (VCGHeap, Tilde_Version, Operator_Cell); end if; -- Is_Import end if; -- In_Post_Condition -- return The_DAG := Operator_Cell; end Build_One_Refinement; ------------- function Requires_Refinement_Relation (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in Sub_Prog; is begin return Dictionary.IsConstituent (Sym) and then Dictionary.IsPackage (Dictionary.GetOwner (Dictionary.GetSubject (Sym))) and then Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetOwner (Dictionary.GetSubject (Sym)), Right_Symbol => Dictionary.GetEnclosingPackage (Dictionary.GetScope (Sub_Prog))); end Requires_Refinement_Relation; ------------- function Is_Abstract_Own_Variable_Of_This_Package (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in Sub_Prog; is begin return Dictionary.IsRefinedOwnVariable (Sym) and then Dictionary.IsPackage (Dictionary.GetOwner (Sym)) and then Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetOwner (Sym), Right_Symbol => Dictionary.GetEnclosingPackage (Dictionary.GetScope (Sub_Prog))); end Is_Abstract_Own_Variable_Of_This_Package; begin -- Create_Data_Refinement DAG_Root := Cells.Null_Cell; for I in Passes loop if I = ProcessGlobals then It := Dictionary.FirstGlobalVariable (Dictionary.IsRefined, Sub_Prog); else It := Dictionary.FirstSubprogramParameter (Sub_Prog); end if; while not Dictionary.IsNullIterator (It) loop Current_Sym := Dictionary.CurrentSymbol (It); -- we are interested if Current_Sym is a refinement constituent of a -- locally-declared abstract own variable if Requires_Refinement_Relation (Sym => Current_Sym) then Build_One_Refinement (Refined_Sym => Current_Sym, In_Post_Con => In_Post_Con, Is_Import => Dictionary.IsImport (Dictionary.IsRefined, Sub_Prog, Current_Sym), Is_Export => Dictionary.IsExport (Dictionary.IsRefined, Sub_Prog, Current_Sym), Sub_Prog => Sub_Prog, The_DAG => DAG_Local); if First_Item then DAG_Root := DAG_Local; First_Item := False; else Cells.Utility.Conjoin (VCGHeap, DAG_Local, DAG_Root); end if; end if; It := Dictionary.NextSymbol (It); end loop; end loop; -- now loop through all constituents of the abstract own variables involved and -- treat their constituents as imports (i.e. read but unchanged) if they are not -- referenced at all by the subprogram. Clearly if they are not referenced at all -- then they must be unchanged. We need hypotheses to this effect to complete the -- data refinement Abstract_It := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Sub_Prog); while not Dictionary.IsNullIterator (Abstract_It) loop Abstract_Sym := Dictionary.CurrentSymbol (Abstract_It); if Is_Abstract_Own_Variable_Of_This_Package (Sym => Abstract_Sym) then It := Dictionary.FirstConstituent (Abstract_Sym); while not Dictionary.IsNullIterator (It) loop Current_Sym := Dictionary.CurrentSymbol (It); -- we only need to do something if this constituent is neither an import or an -- export in the refined view, otherwise it will already have been handled by -- code above if not (Dictionary.IsImport (Dictionary.IsRefined, Sub_Prog, Current_Sym) or Dictionary.IsExport (Dictionary.IsRefined, Sub_Prog, Current_Sym)) then Build_One_Refinement (Refined_Sym => Current_Sym, In_Post_Con => In_Post_Con, Is_Import => True, Is_Export => False, Sub_Prog => Sub_Prog, The_DAG => DAG_Local); if First_Item then DAG_Root := DAG_Local; First_Item := False; else Cells.Utility.Conjoin (VCGHeap, DAG_Local, DAG_Root); end if; end if; It := Dictionary.NextSymbol (It); end loop; end if; Abstract_It := Dictionary.NextSymbol (Abstract_It); end loop; The_DAG := DAG_Root; end Create_Data_Refinement; ---------------------------------------------------------------------- -- procedure for use in creating both procedure and function -- refinement post condition checks; this procedure creates the -- things which are common to all cases such as twiddled pre cons procedure CreateCommonHypotheses (Data_View : in Dictionary.Abstractions; AbstractPre : in Cells.Cell; RefinedPre : in Cells.Cell; CommonHypotheses : out Cells.Cell) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in ImportConstraints; --# in Sub_Prog; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CommonHypotheses, --# VCGHeap from AbstractPre, --# CommandLineData.Content, --# Data_View, --# Dictionary.Dict, --# ImportConstraints, --# LexTokenManager.State, --# RefinedPre, --# Sub_Prog, --# VCGHeap & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Data_View, --# Dictionary.Dict, --# Sub_Prog & --# Statistics.TableUsage from *, --# AbstractPre, --# CommandLineData.Content, --# Data_View, --# Dictionary.Dict, --# ImportConstraints, --# LexTokenManager.State, --# RefinedPre, --# Sub_Prog, --# VCGHeap; is CopyOfImportConstraints, CopyOfAbstractPre, CopyOfRefinedPre, DataRefinement : Cells.Cell; begin Structures.CopyStructure (VCGHeap, AbstractPre, CopyOfAbstractPre); Structures.CopyStructure (VCGHeap, RefinedPre, CopyOfRefinedPre); -- The two precondition components of the hypothesis need any -- import/exports to be twiddled. -- Note that the refined precondition may have an abstract or refined -- data view depending on whether Data Refinement is present. SubstituteTwiddled (Dictionary.IsAbstract, Sub_Prog, CopyOfAbstractPre); SubstituteTwiddled (Data_View, Sub_Prog, CopyOfRefinedPre); -- Now assemble common parts of hypotheses Cells.Utility.Conjoin (VCGHeap, CopyOfRefinedPre, CopyOfAbstractPre); if Data_View = Dictionary.IsRefined then -- Data refinement is present and so add it to the common hypotheses. Create_Data_Refinement (Sub_Prog => Sub_Prog, In_Post_Con => True, The_DAG => DataRefinement); Cells.Utility.Conjoin (VCGHeap, DataRefinement, CopyOfAbstractPre); end if; -- Obtain a copy of the import constraints for use in the refinement VCs. Structures.CopyStructure (VCGHeap, ImportConstraints, CopyOfImportConstraints); -- Add in import type information if it exists -- Note that import types may have an abstract or refined -- data view depending on whether Data Refinement is present. if not Cells.Is_Null_Cell (CopyOfImportConstraints) then SubstituteTwiddled (Data_View, Sub_Prog, CopyOfImportConstraints); Cells.Utility.Conjoin (VCGHeap, CopyOfImportConstraints, CopyOfAbstractPre); end if; -- and return result CommonHypotheses := CopyOfAbstractPre; end CreateCommonHypotheses; ---------------------------------------------------------------------- procedure ProcedureRefinementPostCheck (Data_View : in Dictionary.Abstractions; CommonHypotheses : in Cells.Cell; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in Sub_Prog; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# CommandLineData.Content, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# SPARK_IO.File_Sys, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# VCGHeap from *, --# CommandLineData.Content, --# CommonHypotheses, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Data_View, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# Sub_Prog, --# VCGHeap; is PostCheck : Cells.Cell; PostHypotheses : Cells.Cell; AbstractPost : Cells.Cell; RefinedPost : Cells.Cell; ExportConstraints : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; Initial_Function_Defs : Cells.Cell; procedure GetExportConstraints (Data_View : in Dictionary.Abstractions; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Sub_Prog; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# out ExportConstraints; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# CommandLineData.Content, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Sub_Prog, --# VCGHeap & --# ExportConstraints from CommandLineData.Content, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Sub_Prog, --# VCGHeap; is ExportIt : Dictionary.Iterator; VarSym, TypeSym : Dictionary.Symbol; begin -- GetExportConstraints ExportConstraints := Cells.Null_Cell; -- The data view of the Exports may be Abstract or Refined -- dependent on the use of Data Refinement. ExportIt := Dictionary.FirstExport (Data_View, Sub_Prog); while not Dictionary.IsNullIterator (ExportIt) loop VarSym := Dictionary.CurrentSymbol (ExportIt); if IsDirectlyVisible (VarSym, Scope) then TypeSym := Dictionary.GetType (VarSym); if not Dictionary.IsPrivateType (TypeSym, Scope) or else Dictionary.IsPredefinedTimeType (TypeSym) then ConjoinParamConstraint (TypeSym, VarSym, Scope, False, ExportConstraints); end if; end if; ExportIt := Dictionary.NextSymbol (ExportIt); end loop; end GetExportConstraints; begin -- ProcedureRefinementPostCheck CreateAnnotationDAG (Start_Node => STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsAbstract, Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Sub_Prog), Force_Abstract => True, Conjoined_Function_Defs => Initial_Function_Defs, DAG_Root => AbstractPost); CreateAnnotationDAG (Start_Node => STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsRefined, Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sub_Prog), Force_Abstract => False, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => RefinedPost); -- Conjoin all of the called function definitions. -- True if there are none. Cells.Utility.Conjoin (VCGHeap, Initial_Function_Defs, Conjoined_Function_Defs); PostHypotheses := CommonHypotheses; -- build in info that exports must be in type GetExportConstraints (Data_View => Data_View, Scope => Scope); if not Cells.Is_Null_Cell (ExportConstraints) then Cells.Utility.Conjoin (VCGHeap, ExportConstraints, PostHypotheses); end if; Cells.Utility.Conjoin (VCGHeap, RefinedPost, PostHypotheses); -- conjoin called function definitions to the Post Hypotheses Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, PostHypotheses); Cells.Create_Cell (VCGHeap, PostCheck); SetRightArgument (PostCheck, PostHypotheses, VCGHeap); -- set up hypothesis SetAuxPtr (PostCheck, AbstractPost, VCGHeap); -- set up conclusion Graph.Set_Refinement_Post_Check (X => PostCheck); -- pass it to graph end ProcedureRefinementPostCheck; ---------------------------------------------------------------------- procedure FunctionRefinementPostCheck (CommonHypotheses : in Cells.Cell; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in Sub_Prog; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# SPARK_IO.File_Sys, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# VCGHeap from *, --# CommandLineData.Content, --# CommonHypotheses, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# Sub_Prog, --# VCGHeap & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# Sub_Prog, --# VCGHeap; is Return_Type : Dictionary.Symbol; Return_Cell : Cells.Cell; EqualityOperator : Equalities; ExportConstraint : Cells.Cell; PostCheck : Cells.Cell; PostHypotheses : Cells.Cell; AbstractPost : Cells.Cell; RefinedPost : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; Initial_Function_Defs : Cells.Cell; begin -- identify function return type Return_Type := Dictionary.GetType (Sub_Prog); -- create a ReturnVar cell containing the function return type CreateCellKind (Return_Cell, VCGHeap, Cell_Storage.Return_Var); Cells.Set_Symbol_Value (VCGHeap, Return_Cell, Return_Type); -- for explicit returns we need to equate the ReturnVar to the returned expression -- so we need to choose the appropriate operator if Dictionary.TypeIsBoolean (Return_Type) then EqualityOperator := UseImplication; else EqualityOperator := UseEquals; end if; -- now build abstract and refined "post conditions" CreateFunctionAnnotationDAG (Sub_Prog => Sub_Prog, StartNode => STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsAbstract, Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Sub_Prog), ForceAbstract => True, Return_Cell => Return_Cell, EqualityOperator => EqualityOperator, Abstraction => Dictionary.IsAbstract, Conjoined_Function_Defs => Initial_Function_Defs, DAGRoot => AbstractPost); CreateFunctionAnnotationDAG (Sub_Prog => Sub_Prog, StartNode => STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsRefined, Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sub_Prog), ForceAbstract => False, Return_Cell => Return_Cell, EqualityOperator => EqualityOperator, Abstraction => Dictionary.IsRefined, Conjoined_Function_Defs => Conjoined_Function_Defs, DAGRoot => RefinedPost); -- Conjoin all of the called function definitions. -- True if there are none. Cells.Utility.Conjoin (VCGHeap, Initial_Function_Defs, Conjoined_Function_Defs); PostHypotheses := CommonHypotheses; -- add in constraint that returned valued must be in type Type_Constraint.Make (The_Type => Return_Type, The_Expression => Return_Cell, Scope => Scope, Consider_Always_Valid => False, The_Constraint => ExportConstraint, VCG_Heap => VCGHeap, VC_Contains_Reals => ContainsReals, VC_Failure => VCGFailure); Cells.Utility.Conjoin (VCGHeap, ExportConstraint, PostHypotheses); Cells.Utility.Conjoin (VCGHeap, RefinedPost, PostHypotheses); -- conjoin called function definitions to the Post Hypotheses Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, PostHypotheses); Cells.Create_Cell (VCGHeap, PostCheck); SetRightArgument (PostCheck, PostHypotheses, VCGHeap); -- set up hypothesis SetAuxPtr (PostCheck, AbstractPost, VCGHeap); -- set up conclusion Graph.Set_Refinement_Post_Check (X => PostCheck); -- pass it to graph end FunctionRefinementPostCheck; begin -- Plant_Refinement_Checks Sub_Prog := Dictionary.GetRegion (Scope); -- first create VC: abstract pre --> refined pre CreateAnnotationDAG (Start_Node => STree.RefToNode (Dictionary.GetPrecondition (Dictionary.IsAbstract, Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Sub_Prog), Force_Abstract => True, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => PreCheckHyp); Structures.CopyStructure (VCGHeap, PreCheckHyp, AbstractPre); -- Determine whether the view of the data should be -- Abstract (Proof Refinement only) or -- Refined (Data Refinement is present). Create_Data_Refinement (Sub_Prog => Sub_Prog, In_Post_Con => False, The_DAG => DataRefinement); if Cells.Is_Null_Cell (DataRefinement) then -- No Data Refinement. Data_View := Dictionary.IsAbstract; else -- Data Refinement is present. Data_View := Dictionary.IsRefined; end if; if Data_View = Dictionary.IsRefined then -- Data Refinement is present and so add it in to the hypotheses Cells.Utility.Conjoin (VCGHeap, DataRefinement, PreCheckHyp); end if; if not Cells.Is_Null_Cell (ImportConstraints) then -- add in import type information to hypotheses if these exist Cells.Utility.Conjoin (VCGHeap, ImportConstraints, PreCheckHyp); end if; -- Conjoin any function definitions called in the abstract pre condition -- to the Pre check Hypotheses Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, PreCheckHyp); CreateAnnotationDAG (Start_Node => STree.RefToNode (Dictionary.GetPrecondition (Dictionary.IsRefined, Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sub_Prog), Force_Abstract => False, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => RefinedPre); -- Conjoin any function definitions called in the refined pre condition -- to the Pre check Hypotheses Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, PreCheckHyp); Cells.Create_Cell (VCGHeap, PreCheck); SetRightArgument (PreCheck, PreCheckHyp, VCGHeap); -- set up hypothesis SetAuxPtr (PreCheck, RefinedPre, VCGHeap); -- set up conclusion Graph.Set_Refinement_Pre_Check (X => PreCheck); -- pass to graph -- now create VC: refined post /\ refined pre /\ abstract pre --> abstract post CreateCommonHypotheses (Data_View, AbstractPre, RefinedPre, CommonHypotheses); if Dictionary.IsFunction (Dictionary.GetRegion (Scope)) then FunctionRefinementPostCheck (CommonHypotheses => CommonHypotheses, Scope => Scope); else ProcedureRefinementPostCheck (Data_View => Data_View, CommonHypotheses => CommonHypotheses, Scope => Scope); end if; end Plant_Refinement_Checks; -------------------------------- procedure Plant_Sub_Class_Checks (Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap; is Sub_Prog, Root_Sub_Prog : Dictionary.Symbol; Pre_Cell, Post_Cell, RootPre, RootPost, ExtPre, ExtPost : Cells.Cell; Return_Type : Dictionary.Symbol; Return_Cell : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; EqualityOperator : Equalities; procedure SubstituteParameters (Subprog, RootSubprog : in Dictionary.Symbol; RootConstraintRoot : in Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# RootConstraintRoot, --# RootSubprog, --# Subprog, --# VCGHeap; is P, NewP : Cells.Cell; S : CStacks.Stack; VarSym : Dictionary.Symbol; begin -- DAG traversal algorithm of D.E. Knuth, Fundamental Algorithms, p.317; CStacks.CreateStack (S); P := RootConstraintRoot; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCGHeap, P, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then P := Cells.Null_Cell; else P := LeftPtr (VCGHeap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCGHeap, S); CStacks.Pop (VCGHeap, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then if Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Reference then VarSym := Cells.Get_Symbol_Value (VCGHeap, P); if Dictionary.IsSubprogramParameter (VarSym) then -- Substitute name. Cells.Set_Symbol_Value (VCGHeap, P, Dictionary.GetSubprogramParameter (Subprog, Dictionary.GetSubprogramParameterNumber (VarSym))); if Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (VarSym), Right_Symbol => Dictionary.GetSubprogramControllingType (RootSubprog), Full_Range_Subtype => False) then -- Substitute an inherit dereference to the parameter used in Subprog (because we are -- constructing VC in terms of Sub_Prog not Root_Sub_Prog so references to the controlling -- parameter of Root_Sub_Prog must be replaced by fld_inherit ("controlling param of Sub_Prog") Cells.Create_Cell (VCGHeap, NewP); Cells.Copy_Contents (VCGHeap, P, NewP); InsertInheritDeReference (Cells.Get_Symbol_Value (VCGHeap, P), VCGHeap, NewP); Cells.Copy_Contents (VCGHeap, NewP, P); end if; end if; end if; P := Cells.Null_Cell; else P := RightPtr (VCGHeap, P); end if; end loop; end SubstituteParameters; begin -- Plant_Sub_Class_Checks Sub_Prog := Dictionary.GetRegion (Scope); if Dictionary.Is_Subprogram (Sub_Prog) then Root_Sub_Prog := Dictionary.GetOverriddenSubprogram (Sub_Prog); if not Dictionary.Is_Null_Symbol (Root_Sub_Prog) then -- subclass integrity checks required -- first do preconditon check: abstract root pre -> abstract extended pre -- create abstract root pre CreateAnnotationDAG (Start_Node => STree.RefToNode (Dictionary.GetPrecondition (Dictionary.IsAbstract, Root_Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Root_Sub_Prog), Force_Abstract => True, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => RootPre); -- Conjoin the called function definitions from the root precondition -- with the hypotheses for the root precondition Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, RootPre); -- change parameter names in root op to match those in extended op SubstituteParameters (Sub_Prog, Root_Sub_Prog, RootPre); -- create abstract extended pre CreateAnnotationDAG (Start_Node => STree.RefToNode (Dictionary.GetPrecondition (Dictionary.IsAbstract, Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sub_Prog), Force_Abstract => True, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => ExtPre); -- Conjoin the called function definitions from the extended precondition -- with the hypotheses for the root precondition Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, RootPre); -- assemble check Cells.Create_Cell (VCGHeap, Pre_Cell); SetRightArgument (Pre_Cell, RootPre, VCGHeap); -- set up hypothesis SetAuxPtr (Pre_Cell, ExtPre, VCGHeap); -- set up conclusion Graph.Set_Subclass_Pre_Check (X => Pre_Cell); -- pass to graph -- go on to do post check here if Dictionary.IsFunction (Sub_Prog) then Return_Type := Dictionary.GetType (Sub_Prog); -- create a ReturnVar cell containing the function return type CreateCellKind (Return_Cell, VCGHeap, Cell_Storage.Return_Var); Cells.Set_Symbol_Value (VCGHeap, Return_Cell, Return_Type); -- for explicit returns we need to equate the ReturnVar to the returned expression -- so we need to choose the appropriate operator if Dictionary.TypeIsBoolean (Return_Type) then EqualityOperator := UseImplication; else EqualityOperator := UseEquals; end if; -- now create "postcondition" DAGs CreateFunctionAnnotationDAG (Sub_Prog, STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsAbstract, Sub_Prog)), Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sub_Prog), True, Return_Cell, EqualityOperator, Dictionary.IsAbstract, -- to get Conjoined_Function_Defs, ExtPost); -- Conjoin the called function definitions from the extended postcondition -- with the hypotheses for the extended postcondition Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, ExtPost); CreateFunctionAnnotationDAG (Root_Sub_Prog, STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsAbstract, Root_Sub_Prog)), Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Root_Sub_Prog), True, Return_Cell, EqualityOperator, Dictionary.IsAbstract, -- to get Conjoined_Function_Defs, RootPost); -- Conjoin the called function definitions from the root postcondition -- with the hypotheses for the extended postcondition Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, ExtPost); else -- procedure -- create abstract extended post CreateAnnotationDAG (Start_Node => STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsAbstract, Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sub_Prog), Force_Abstract => True, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => ExtPost); -- Conjoin the called function definitions from the extended postcondition -- with the hypotheses for the extended postcondition Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, ExtPost); -- create abstract root post CreateAnnotationDAG (Start_Node => STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsAbstract, Root_Sub_Prog)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Root_Sub_Prog), Force_Abstract => True, Conjoined_Function_Defs => Conjoined_Function_Defs, DAG_Root => RootPost); -- Conjoin the called function definitions from the root postcondition -- with the hypotheses for the extended postcondition Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, ExtPost); end if; SubstituteParameters (Sub_Prog, Root_Sub_Prog, RootPost); -- assemble check Cells.Create_Cell (VCGHeap, Post_Cell); SetRightArgument (Post_Cell, ExtPost, VCGHeap); -- set up hypothesis SetAuxPtr (Post_Cell, RootPost, VCGHeap); -- set up conclusion Graph.Set_Subclass_Post_Check (X => Post_Cell); -- pass to graph end if; end if; end Plant_Sub_Class_Checks; begin -- IncorporateConstraints -- first do Pre Condition if Subprogram if Dictionary.Is_Subprogram (Dictionary.GetRegion (Scope)) or else Dictionary.IsTaskType (Dictionary.GetRegion (Scope)) then PreConNode := STree.RefToNode (Dictionary.GetPrecondition (Which_Abstraction (Scope => Scope), Dictionary.GetRegion (Scope))); Plant_Precondition (Node => PreConNode, Semantic_Error_In_Subprogram => Semantic_Error_In_Subprogram, Scope => Scope); if not Semantic_Error_In_Subprogram then -- Only create these VCs in the absence of semantic errors in the subprgoram body if Which_Abstraction (Scope => Scope) = Dictionary.IsRefined then -- then a second annotation has been found so refinement proof is needed Plant_Refinement_Checks (Scope => Scope); end if; Plant_Sub_Class_Checks (Scope => Scope); end if; end if; -- then Post Condition for subprogram or package init PostConNode := STree.RefToNode (Dictionary.GetPostcondition (Which_Abstraction (Scope => Scope), Dictionary.GetRegion (Scope))); if Dictionary.IsFunction (Dictionary.GetRegion (Scope)) then Plant_Return_Expression (Return_Type => Dictionary.GetType (Dictionary.GetRegion (Scope)), Node => PostConNode, Semantic_Error_In_Subprogram => Semantic_Error_In_Subprogram, Scope => Scope); else Plant_Postcondition (Node => PostConNode, Subprog => Dictionary.GetRegion (Scope), Abstraction => Which_Abstraction (Scope => Scope), Semantic_Error_In_Subprogram => Semantic_Error_In_Subprogram, Type_Check_Exports => Type_Check_Exports, Scope => Scope); end if; end IncorporateConstraints; spark-2012.0.deb/examiner/dictionary-writeoperatorrenamingdeclaration.adb0000644000175000017500000001370611753202336025730 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure WriteOperatorRenamingDeclaration (The_Operator : in RawDict.Operator_Info_Ref; Scope : in Scopes) is procedure Write_Operator_Name (File : in SPARK_IO.File_Type; Name : in SP_Symbols.SP_Symbol) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Name; is begin case Name is when SP_Symbols.RWand => Write_String (File, "and"); when SP_Symbols.RWor => Write_String (File, "or"); when SP_Symbols.RWxor => Write_String (File, "xor"); when SP_Symbols.plus => Write_String (File, "+"); when SP_Symbols.minus => Write_String (File, "-"); when SP_Symbols.RWmod => Write_String (File, "mod"); when SP_Symbols.equals => Write_String (File, "="); when SP_Symbols.less_than => Write_String (File, "<"); when SP_Symbols.less_or_equal => Write_String (File, "<="); when SP_Symbols.greater_than => Write_String (File, ">"); when SP_Symbols.greater_or_equal => Write_String (File, ">="); when SP_Symbols.multiply => Write_String (File, "*"); when SP_Symbols.divide => Write_String (File, "/"); when SP_Symbols.double_star => Write_String (File, "**"); when SP_Symbols.RWabs => Write_String (File, "abs"); when others => Write_String (File, "not"); end case; end Write_Operator_Name; -------------------------------------------------------------------------------- procedure Write_Unary_Operator_Renaming_Declaration (Name : in SP_Symbols.SP_Symbol; Operand : in RawDict.Type_Info_Ref; Scope : in Scopes) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State, --# Name, --# Operand, --# Scope; is begin Write_String (Dict.TemporaryFile, "unary operator "); Write_Operator_Name (File => Dict.TemporaryFile, Name => Name); Write_String (Dict.TemporaryFile, " "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (Operand)); Write_String (Dict.TemporaryFile, " is renamed in "); Write_Scope (Dict.TemporaryFile, Scope); Write_Line (Dict.TemporaryFile, " ;"); end Write_Unary_Operator_Renaming_Declaration; -------------------------------------------------------------------------------- procedure Write_Binary_Operator_Renaming_Declaration (Name : in SP_Symbols.SP_Symbol; Left : in RawDict.Type_Info_Ref; Right : in RawDict.Type_Info_Ref; Scope : in Scopes) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# Left, --# LexTokenManager.State, --# Name, --# Right, --# Scope; is begin Write_String (Dict.TemporaryFile, "binary operator "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (Left)); Write_String (Dict.TemporaryFile, " "); Write_Operator_Name (File => Dict.TemporaryFile, Name => Name); Write_String (Dict.TemporaryFile, " "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (Right)); Write_String (Dict.TemporaryFile, " is renamed in "); Write_Scope (Dict.TemporaryFile, Scope); Write_Line (Dict.TemporaryFile, " ;"); end Write_Binary_Operator_Renaming_Declaration; begin -- WriteOperatorRenamingDeclaration case RawDict.Get_Operator_Is_Binary (The_Operator => The_Operator) is when False => Write_Unary_Operator_Renaming_Declaration (Name => RawDict.Get_Operator_Name (The_Operator => The_Operator), Operand => RawDict.Get_Operator_Operand (The_Operator => The_Operator), Scope => Scope); when True => Write_Binary_Operator_Renaming_Declaration (Name => RawDict.Get_Operator_Name (The_Operator => The_Operator), Left => RawDict.Get_Operator_Left_Operand (The_Operator => The_Operator), Right => RawDict.Get_Operator_Right_Operand (The_Operator => The_Operator), Scope => Scope); end case; end WriteOperatorRenamingDeclaration; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-semanticerr-semanticerrexpl.adb0000644000175000017500000041016111753202337031346 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.SemanticErr) procedure SemanticErrExpl (E_Str : in out E_Strings.T) is begin case Err_Num.ErrorNum is when 1 => E_Strings.Append_String (E_Str => E_Str, Str => "If the identifier is declared in a" & " separate (or parent) package, the package must be included in an inherit clause" & " and the identifier prefixed with the package name." & " Ensure that there are no errors in the declaration of the identifier"); when 5 => E_Strings.Append_String (E_Str => E_Str, Str => "Usually associated with the use of an identifier other than a package" & " name as a prefix in a selected component."); when 8 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when the selector in a selected component of a record" & " references a non-existent field."); when 9 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs if the prefix to a selected component representing a procedure" & " in a procedure call statement or a type mark is not a package. Also" & " occurs if a selector is applied in an expression to an object which" & " is not a record variable."); when 11 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a package body is encountered for which there is no" & " package specification."); when 12 => E_Strings.Append_String (E_Str => E_Str, Str => "If the object in question is really a constant, then remove it from" & " the enclosing package's own variable annotation."); when 14 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if the name in a ""separate"" clause of a subunit does not" & " correctly identify a compilation unit. Common causes of this error" & " are a syntax error in the parent unit or omitting the" & " parent unit specification and/or parent unit body entries from the index file"); when 15 => E_Strings.Append_String (E_Str => E_Str, Str => "Common causes of this error are an error in the declaration" & " of the stub or the omission of the parent unit body from the index file"); when 20 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a renaming declaration contains a non-existent operator."); when 21 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an attempt is made to index into a name which does not" & " represent an array."); when 22 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs when the type given in the Ada declaration of an own variable" & " differs from that" & " ""announced"" in the package's own variable clause."); when 24 => E_Strings.Append_String (E_Str => E_Str, Str => "When a procedure is called any global variables exported by that" & " procedure must be visible at the point of call. This error message" & " indicates that the global variable concerned is not visible." & " It may be that it needs to be added to the global annotation of" & " the procedure containing the call (or some further enclosing subprogram)" & " or it may be that an inherit clause is missing from the package containing" & " the call."); when 25 => E_Strings.Append_String (E_Str => E_Str, Str => "When a procedure is called any global variables imported by that" & " procedure must be visible at the point of call. This error message" & " indicates that the global variable concerned is not visible." & " It may be that it needs to be added to the global annotation of" & " the subprogram containing the call (or some further enclosing subprogram)" & " or it may be that an inherit clause is missing from the package containing" & " the call."); when 26 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued at the end of a package specification if no full declaration" & " has been supplied for a deferred constant declared in the package" & " specification."); when 27 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued at the end of a package specification if no full declaration" & " has been supplied for a private type declared in the package specification."); when 28 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued at the end of a package body if an own variable announced in the" & " package specification has neither been given an Ada declaration nor refined."); when 30 => E_Strings.Append_String (E_Str => E_Str, Str => "The attribute is identified in Annex K of the SPARK 95 report as a" & " valid SPARK 95" & " attribute but the Examiner does not currently support it." & " It is" & " possible to work round the omission by putting the use of the attribute" & " inside a" & " suitable function which is hidden from the Examiner."); when 32 => E_Strings.Append_String (E_Str => E_Str, Str => "Likely causes are type conversions involving record types or" & " non-convertible arrays."); when 33 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if the prefix of an aggregate is not a composite type."); when 34 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a call is made to a user-defined subprogram in a" & " package initialization part."); when 35 => E_Strings.Append_String (E_Str => E_Str, Str => "Indicates use of an undeclared binary operator; this message means that" & " the type on each side" & " of the operator cannot appear with the operator used. e.g." & " attempting to add an integer to" & " an enumeration literal."); when 39 => E_Strings.Append_String (E_Str => E_Str, Str => "An unconstrained array type or variable of such a type is" & " illegally used. Use of" & " unconstrained arrays in SPARK is limited to passing them as" & " parameters, indexing into them" & " and taking attributes of them. This message also arises if a string" & " literal is used as an actual parameter where the formal parameter is a string subtype." & " In this case, the error can be removed by qualifying the string literal with the subtype" & " name."); when 40 => E_Strings.Append_String (E_Str => E_Str, Str => "This operator is only defined for numeric types and, if the" & " Ravenscar Profile is" & " selected, for type Ada.Real_Time.Time_Span."); when 41 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a subtype declaration taking the form of a constrained" & " subtype of an" & " unconstrained array type is encountered but with a type mark which" & " does not represent an" & " array."); when 42 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a name represents an object which is not of the required type."); when 44 => E_Strings.Append_String (E_Str => E_Str, Str => "The bounds of an explicit range must be scalar types."); when 47 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an array type definition is encountered where one or" & " more of the index types used in the definition contained errors in its original declaration. For" & " example, SPARK requires array index bounds to be constant (known at compile time) so an attempt" & " to use an illegal subtype with variable bounds as an array index will generate this message."); when 49 => E_Strings.Append_String (E_Str => E_Str, Str => "Only SPARK 95 attributes 'Min and 'Max require two arguments."); when 50 => E_Strings.Append_String (E_Str => E_Str, Str => "To assign a non-constant expression to a variable, an assignment statement" & " in the body of the program unit (following the 'begin') must be used."); when 51 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an ordering operator such as ""<"" is encountered between" & " objects of an array" & " type other than string or a constrained subtype of string."); when 52 => E_Strings.Append_String (E_Str => E_Str, Str => "Ordering operators are only defined for scalar types and type" & " String plus, if the" & " Ravenscar Profile is selected, types Time and Time_Span in package" & " Ada.Real_Time."); when 53 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK record aggregates may not contain an others clause."); when 54 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an attribute not supported by SPARK is used."); when 58 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs at the end of a package, subprogram, protected type, task type" & " or loop if the terminal identifier does not" & " match the name or label originally given."); when 59 => E_Strings.Append_String (E_Str => E_Str, Str => "A subtype of the form applicable to a subrange of a scalar type has" & " been encountered" & " but the type provided is not a scalar type."); when 60 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK record aggregates may not contain multiple choices, each" & " field must be" & " assigned a value individually."); when 62 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an embedded package declares subprograms or own variables" & " and no body is" & " provided."); when 65 => E_Strings.Append_String (E_Str => E_Str, Str => "The names of the parameters used in renaming declarations may not" & " be altered from Left," & " Right for binary operators and Right for unary operators. These are" & " the names given for" & " the parameters in the ARM and the SPARK Definition requires that" & " parameter names are not" & " changed."); when 66 => E_Strings.Append_String (E_Str => E_Str, Str => "Either the package does not have an initializes annotation or all" & " the own variables" & " requiring initialization were given values at the point of declaration."); when 67 => E_Strings.Append_String (E_Str => E_Str, Str => "This is an Ada 83 rule. Machine code can only be used in procedures."); when 68 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an attempt is made to rename an operator using a subtype" & " of the type for" & " which it was originally implicitly declared."); when 71 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if the name supplied in a pragma interface, import or attach_handler" & " does not match the name of the associated subprogram or variable."); when 72 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma import may only occur in a body stub, or immediately" & " after a subprogram declaration in the visible part of a package," & " or immediately after a variable declaration." & " Pragma interface may only occur in a body stub or immediately" & " after a subprogram declaration in the visible part of a package."); when 73 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an Ada declaration is given for an own variable which" & " has been refined, or in" & " a refinement clause if an own variable is refined more than once."); when 74 => E_Strings.Append_String (E_Str => E_Str, Str => "A subject of a refinement definition of a package must be an own" & " variable of that" & " package."); when 75 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an attempt is made to refine an own variable onto an own" & " variable of a" & " non-embedded package."); when 76 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a refinement clause in a package body attempts to name" & " an embedded package" & " own variable as a refinement constituent and the name given for the" & " embedded package is" & " already in use."); when 77 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs in the own variable clause of a package embedded in another" & " package if an own" & " variable which is a refinement constituent of an own variable of the" & " enclosing package is" & " omitted."); when 78 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an own variable occurs in the initialization clause of" & " an embedded package" & " and the own variable concerned is a refinement constituent of another" & " own variable which" & " is not listed in the initialization specification of its package."); when 79 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs in the initialization clause of a package embedded in another" & " package if an own" & " variable which is a refinement constituent of an initialized own variable" & " of the enclosing" & " package is omitted."); when 82 => E_Strings.Append_String (E_Str => E_Str, Str => "An own variable initialization clause and that of its refinement" & " constituents must be" & " consistent."); when 83 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an own variable does not occur in the initialization" & " clause of an embedded" & " package and the own variable concerned is a refinement constituent" & " of another own variable" & " which is listed in the initialization clause of its package."); when 84 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued at the end of a package if a refinement constituent of a" & " refined own variable" & " has not been given an Ada declaration or further refined."); when 85 => E_Strings.Append_String (E_Str => E_Str, Str => "A variable XXX which has occurred in a refined global annotation" & " is neither a variable" & " that occurred in the earlier global definition nor a refinement" & " constituent of any such" & " variable."); when 86 => E_Strings.Append_String (E_Str => E_Str, Str => "If the global annotation of a procedure specification contains" & " an own variable and that" & " own variable is later refined then at least one refinement constituent" & " of the own variable" & " shall appear in the second global annotation supplied for the procedure" & " body."); when 87 => E_Strings.Append_String (E_Str => E_Str, Str => "A global definition containing abstract own variables was given in" & " the definition for" & " subprogram XXX, in a package specification. A refined global definition" & " is required in the" & " package body."); when 94 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued anywhere a boolean expression is required (e.g. in if, " & " exit and while statements) and the expression provided" & " is not of type boolean."); when 97 => E_Strings.Append_String (E_Str => E_Str, Str => "'BASE may only be used as a prefix to another attribute."); when 99 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs if a subtype is declared of an array which is already constrained."); when 113 => E_Strings.Append_String (E_Str => E_Str, Str => "Private subprograms would not be callable in SPARK 83 and are therefore not" & " permitted; they may be declared and called in SPARK 95."); when 114 => E_Strings.Append_String (E_Str => E_Str, Str => "A subtype mark or an explicit Range attribute may not be used in a context" & " where a simple expression is expected."); when 115 => E_Strings.Append_String (E_Str => E_Str, Str => "Annotation should be of the form 'own S is A, B, C;'"); when 117 => E_Strings.Append_String (E_Str => E_Str, Str => "Aggregates are qualified expressions so they must be prefixed with a" & " subtype mark. An exception is made in the case of aggregate assignments to" & " unconstrained arrays as the rules of Ada do not permit unconstrained array" & " aggregates to be qualified"); when 118 => E_Strings.Append_String (E_Str => E_Str, Str => "Unqualified aggregates may only be used in assignments to one-dimensional unconstrained" & " arrays. SPARK does not permit aggregate assignment to multi-dimensional unconstrained" & " arrays"); when 119 => E_Strings.Append_String (E_Str => E_Str, Str => "Indicates use of an undeclared unary operator; this message means that" & " the type on the right hand side" & " of the operator cannot appear with the operator used. e.g." & " attempting to negate an enumeration literal."); when 121 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier indicating what kind of message to justify must be either" & " 'Flow_Message' or 'Warning_Message' or some unique abbreviation of them such as" & " 'Fl' or even 'F'. Case is ignored."); when 122 => E_Strings.Append_String (E_Str => E_Str, Str => "This item should be an integer literal representing the error or warning message that" & " is being marked as expected."); when 123 => E_Strings.Append_String (E_Str => E_Str, Str => "It does not make sense to allow certain warnings to be justified with the accept annotation." & " In particular, attempting to justify warnings raised by the justification system itself" & " could lead to some special kind of recursive hell that we would not wish to enter."); when 124 => E_Strings.Append_String (E_Str => E_Str, Str => "This class of error does not reference any variables, and therefore requires" & " no names."); when 125 => E_Strings.Append_String (E_Str => E_Str, Str => "This class of error references one variable, and therefore requires" & " one name."); when 126 => E_Strings.Append_String (E_Str => E_Str, Str => "This class of error references two variables, and therefore requires" & " two names. Two names are need to justify expected information" & " flow messages such as ""X is not derived from Y""." & " Note that for messages of this kind the accept annotation should list the names in the order" & " ""export, import"""); when 127 => E_Strings.Append_String (E_Str => E_Str, Str => "This class of error references either zero or one variable, and therefore" & " requires either zero or one name. An ineffective assignment error requires" & " the name of variable being assigned to. An ineffective statement error" & " has no name associated with it."); when 128 => E_Strings.Append_String (E_Str => E_Str, Str => "This class of error references either one or two variables, and therefore" & " requires either one or two names. One name is required when the export" & " is a function return value."); when 132 => E_Strings.Append_String (E_Str => E_Str, Str => "A package should not 'with' itself and a subunit" & " should not 'with' the package (or main program) which declares its stub"); when 135 => E_Strings.Append_String (E_Str => E_Str, Str => "Possible causes of this error are" & " an error in the inherited package specification or" & " omitting an entry for the package specification from the index file or" & " circular inheritance"); when 136 => E_Strings.Append_String (E_Str => E_Str, Str => "A refinement clause of a package body defines the constituent" & " parts of own variables given in the own variable clause of the" & " corresponding package declaration"); when 137 => E_Strings.Append_String (E_Str => E_Str, Str => "Possible causes of this error are" & " an error in the child package specification or" & " omitting the child from the parent's component list in the index file" & " or omitting the child specification entry from the index file"); when 138 => E_Strings.Append_String (E_Str => E_Str, Str => "A constituent of a refinement clause which is defined in a" & " child package must be an own variable of the child package"); when 139 => E_Strings.Append_String (E_Str => E_Str, Str => "A package can only initialize variables" & " declared in its own variable clause"); when 140 => E_Strings.Append_String (E_Str => E_Str, Str => "The parent of a child package must be a library package and" & " must be declared prior to a child package. If using an index file" & " the parent must have an entry in the index file and the child package" & " must be listed as a component of the parent package"); when 142 => E_Strings.Append_String (E_Str => E_Str, Str => "The prefix should appear in the inherit clause of the current package"); when 144 => E_Strings.Append_String (E_Str => E_Str, Str => "The name must denote an entire variable or an own variable" & " of a package. If the variable or own variable is declared in a" & " separate (or parent) package, the package must be included in an inherit clause" & " and the identifier prefixed with the package name"); when 145 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier should be a typemark. If the typemark is declared in a" & " separate (or parent) package, the package must be included in an inherit clause" & " and the identifier prefixed with the package name." & " Ensure that there are no errors in the declaration of the typemark"); when 148 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables may be ""type announced"" as being of an abstract proof" & " type only where" & " that type is declared later in the same package. Thus --# own State :" & " T; is legal if" & " --# type T is abstract; appears later in the package; however, --# own" & " State : P.T; is" & " illegal if T is an abstract proof type declared in remote package P."); when 149 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs when an own variable clause announces more than one own variable" & " as being of" & " a type XXX and XXX is later declared as being of an abstract proof type." & " Each abstract" & " own variable must be of a unique type."); when 150 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a the name of a constant is found in a mandatory annotation such as" & " a global or derives annotation. Constants should not appear in such annotations."); when 151 => E_Strings.Append_String (E_Str => E_Str, Str => "A variable declared in a package must have been previously announced" & " as either an own" & " variable or as a concrete refinement constituent of an own variable."); when 153 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if the name, type or parameter mode of a parameter is different" & " in the" & " subprogram body declaration from that declared originally."); when 154 => E_Strings.Append_String (E_Str => E_Str, Str => "A subprogram or task body must have a global annotation if it references" & " global variables; a" & " procedure or task body must have a dependency relation to perform" & " information flow analysis."); when 155 => E_Strings.Append_String (E_Str => E_Str, Str => "Do not repeat global or derives annotations in the body" & " (or body stub) of a subprogram, entry or task except for" & " state (own variable) refinement."); when 156 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an identifier which SPARK requires to be an entire" & " variable represents" & " something other than this. Most commonly this message occurs when" & " a component of a" & " structured variable appears in a core annotation."); when 158 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued in a global annotation if it names a formal parameter of the" & " subprogram."); when 161 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a parameter appears as an export to a procedure when it is of" & " parameter mode" & " in."); when 162 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a parameter appears as an import to a procedure when it is of" & " parameter mode" & " out."); when 163 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK contains rules to prevent construction of programs containing" & " recursive" & " subprogram calls; this error message occurs if a procedure or function" & " is called before" & " its body has been declared. Re-ordering of subprogram bodies in the" & " package concerned will" & " be required."); when 165 => E_Strings.Append_String (E_Str => E_Str, Str => "Violation of the anti-aliasing rule."); when 166 => E_Strings.Append_String (E_Str => E_Str, Str => "Violation of the anti-aliasing rule."); when 167 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an own variable which is imported into the main program" & " procedure (or a task when the Ravenscar profile is enabled) has not" & " been declared as being initialized by its package. At the main program" & " level the only" & " imports that are permitted are initialized own variables of inherited" & " packages. There are" & " two possible cases to consider: (1) the main program should be importing" & " the variable in" & " which case it should be annotated in its package with --# initializes" & " (and, of course, actually" & " initialized in some way) or be an external variable or protected variable" & " which is implicitly" & " initialized; or (2) the own variable concerned is not initialized at" & " elaboration," & " should not therefore be considered an import to the main program and" & " should be removed from the" & " main program's import list."); when 169 => E_Strings.Append_String (E_Str => E_Str, Str => "It is an important property of SPARK that functions cannot have" & " side-effects, therefore" & " only the reading of global variable is permitted. It is usually" & " convenient to omit" & " modes from function global annotations but use of mode 'in' is" & " permitted."); when 172 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an actual parameter which is an array element is associated" & " with an exported" & " formal parameter in a procedure call. Exported parameters must be either" & " entire variables" & " or a record field."); when 173 => E_Strings.Append_String (E_Str => E_Str, Str => "Violation of the anti-aliasing rule."); when 174 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs in initialization specifications if something other than a" & " variable is listed as" & " being initialized."); when 175 => E_Strings.Append_String (E_Str => E_Str, Str => "A justification of an error requires the actual variables named in" & " the error message to be referenced. The keyword ""all"" can only" & " be used with language profiles for auto-code generators such as SCADE KCG." & " Such profiles are only available with the SPARK Pro Toolset."); when 176 => E_Strings.Append_String (E_Str => E_Str, Str => "When analysing with flow=auto, a procedure or entry without a derives annotation" & " may not be called by a procedure, task or entry with a derives annotation. This is because" & " the body of the caller must be checked against its derives annotation." & " In order to calculate the correct dependency relation for the body of the caller" & " there must be derives annotations present on all called procedures or entries."); when 180 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an identifier which SPARK requires to be an entire" & " composite constant represents something other than this."); when 182 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a rule policy has already been declared for this" & " constant within this declarative region. This rule policy will" & " be ineffective."); when 200 => E_Strings.Append_String (E_Str => E_Str, Str => "Each formal parameter of a subprogram shall be imported or exported or both."); when 201 => E_Strings.Append_String (E_Str => E_Str, Str => "Every variable in a global definition must also appear in the associated derives annotation" & " where it will be either imported or exported or both."); when 252 => E_Strings.Append_String (E_Str => E_Str, Str => "The value of 'Size must be static and must be of" & " an integer type"); when 253 => E_Strings.Append_String (E_Str => E_Str, Str => "The value of 'Size must be a positive integer or zero"); when 254 => E_Strings.Append_String (E_Str => E_Str, Str => "Setting 'Size for a user-defined non-first subtype is not permitted." & " See Ada95 LRM 13.3(48)"); when 255 => E_Strings.Append_String (E_Str => E_Str, Str => "Ada95 LRM Annex N.31 defines a program unit to be either a package, a task unit," & " a protected unit, a protected entry, a generic unit, or an explicitly" & " declared subprogram other than an enumeration literal."); when 273 => E_Strings.Append_String (E_Str => E_Str, Str => "Where a type mark is included in an own variable declaration it" & " indicates that the own" & " variable will either be of a concrete type of that name (which may" & " be either already" & " declared or be declared later in the package) or of an abstract proof" & " type declared in" & " the package specification. In the former case the refinement is" & " illegal because own" & " variables of concrete Ada types may not be refined. In the latter case" & " it is legal;" & " however, no suitable proof type declaration has been found in this case."); when 300 => E_Strings.Append_String (E_Str => E_Str, Str => "A renaming declaration must be the first declarative item of a package" & " body or main" & " program or it must be placed immediately after the declaration of" & " an embedded package."); when 301 => E_Strings.Append_String (E_Str => E_Str, Str => "A renaming declaration may be placed immediately after the declaration" & " of an embedded" & " package; in this case it may only rename subprograms declared in that" & " package."); when 302 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued in a subprogram renaming declaration if it contains parameter" & " names, numbers or" & " types which differ from those originally declared."); when 303 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a renaming declaration has a different operator on each" & " side of the reserved" & " word RENAMES."); when 305 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued in an operator renaming declaration if it contains types" & " which differ from those" & " applicable to the operator being renamed."); when 306 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs in an operator renaming declaration if an attempt is made" & " to rename an operator" & " which is already visible. (The message will also appear as a secondary" & " consequence of" & " trying to rename an operator between undeclared types.)"); when 308 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an attempt is made to assign a variable of a type which is" & " limited or which" & " contains a limited type."); when 309 => E_Strings.Append_String (E_Str => E_Str, Str => "This message means that the operator exists between the types on each" & " side of it but" & " that it is not visible. The most likely cause is that the types" & " concerned are defined in" & " another package and that renaming is required to make the operator visible."); when 310 => E_Strings.Append_String (E_Str => E_Str, Str => "The % operator is used to indicate the value of a variable on entry to a for loop. This is because" & " the variable may be used in the exit expression of the loop and may also be modified in the body" & " of the loop. Since the semantics of Ada require the exit expression to be fixed after evaluation" & " we require a way of reasoning about the original value of a variable prior to any alteration in" & " the loop body. No other situation requires this value so % may not be used anywhere else."); when 311 => E_Strings.Append_String (E_Str => E_Str, Str => "Where an own variable is announced as being of some type," & " SPARK requires that type" & " to be declared; the declaration cannot be in the form of an" & " unconstrained array because" & " SPARK prohibits unconstrained variables."); when 314 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an attempt is made to use, in a second annotation, an own" & " variable which has" & " been refined. Second annotations should use the appropriate refinement" & " constituents of the" & " own variable."); when 315 => E_Strings.Append_String (E_Str => E_Str, Str => "Certain proof contexts have been included in the syntax of SPARK but" & " are not yet" & " supported; this error message results if one is found."); when 316 => E_Strings.Append_String (E_Str => E_Str, Str => "If a type is private, then record field selectors may not be used." & " In pre- and post-conditions, a proof function can be declared to" & " yield the required attribute of a private type."); when 317 => E_Strings.Append_String (E_Str => E_Str, Str => "The tilde decoration indicates the initial value of a variable or" & " parameter which is both imported and exported. A function may not have an explicit side effect" & " on a program variable and so cannot be regarded as exporting such a variable. For modelling purposes" & " a read of an external (stream) variable is regarded as having a side effect (outside the SPARK" & " boundary). Since it may be necessary to refer to the initial value of the external variable, before" & " this implicit side effect occurs, the use of tilde is allowed only for external variables of mode IN which" & " are globally referenced by function."); when 318 => E_Strings.Append_String (E_Str => E_Str, Str => "The tilde decoration indicates the initial value of a variable or" & " parameter which is" & " both imported and exported. Percent indicates the value of a variable" & " on entry to a for loop; this message occurs if either operator is applied" & " to any other object."); when 319 => E_Strings.Append_String (E_Str => E_Str, Str => "The tilde decoration indicates the initial value of a variable or" & " parameter which is" & " both imported and exported; this message occurs if the variable" & " concerned is either" & " exported only or imported only in which case no distinction between" & " its initial and final" & " value is required."); when 320 => E_Strings.Append_String (E_Str => E_Str, Str => "Tilde (and %) may not be applied to an element of an array or field of a record." & " e.g. to" & " indicate the initial value of the Ith element of array V use V~(I)" & " not V(I)~."); when 321 => E_Strings.Append_String (E_Str => E_Str, Str => "Since it does not make sense to refer to anything other than the" & " initial value of a" & " variable in a pre-condition there is no need to use tilde to distinguish" & " initial from" & " final values."); when 322 => E_Strings.Append_String (E_Str => E_Str, Str => "Pre-conditions are concerned with the initial values of information" & " carried into a" & " subprogram. Since only imports can do this only imports can appear in" & " pre-condition" & " expressions."); when 323 => E_Strings.Append_String (E_Str => E_Str, Str => "The extended SPARK update syntax is only used to express changes to" & " components of a" & " structured variable."); when 324 => E_Strings.Append_String (E_Str => E_Str, Str => "When using the extended SPARK update syntax for a record, you can not" & " update more than" & " one element in each clause of the update. For example, you cannot use" & " [x,y => z], you" & " must instead use [x => z; y => z]."); when 325 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs if a type is ""announced"" as part of an own variable" & " clause and the end" & " of the package is reached without an Ada declaration for a type of" & " this name being found."); when 326 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs anywhere where a proof context is found not to be a boolean" & " expression."); when 328 => E_Strings.Append_String (E_Str => E_Str, Str => "Occurs if a function calls a procedure which exports a global variable;" & " this would" & " create an illegal side-effect of the function."); when 329 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a call is made to a user-defined subprogram in a package" & " initialization part."); when 330 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an attempt is made, in a package initialization part, to" & " update an own" & " variable of a non-enclosing package."); when 332 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where the type contains a component which is a limited private" & " type, but where" & " the declaration of this type in the visible part of the package does" & " not specify that the" & " type is limited."); when 333 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an own variable is initialized either by assignment or" & " by having a pragma Import attached to it when initialization of the variable" & " is not announced in its package's own variable initialization specification."); when 335 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a function is declared in a package specification without an" & " annotation but" & " one is then supplied on the function body."); when 337 => E_Strings.Append_String (E_Str => E_Str, Str => "Selected component notation may not be used in places where an item is" & " directly" & " visible."); when 338 => E_Strings.Append_String (E_Str => E_Str, Str => "Parameters passed as mode in out must be listed as imports in the" & " subprogram's" & " dependency relation if they are of scalar types. The rule also applies" & " to a parameter of a" & " private type if its full declaration is scalar."); when 340 => E_Strings.Append_String (E_Str => E_Str, Str => "User-declared subprograms may not be renamed in package specifications" & " although the" & " implicitly declared function subprograms associated with operators may be."); when 341 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a range is found where a single value is expected, for example," & " if an array" & " slice is constructed."); when 342 => E_Strings.Append_String (E_Str => E_Str, Str => "Like global and derives annotations, proof annotations should be placed" & " on the first" & " appearance of a subprogram. There may also be a requirement for a" & " second proof annotation" & " on a subprogram body where it references an abstract own variable."); when 343 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a second proof annotation for a subprogram is found but" & " the subprogram does" & " not reference any abstract own variables. A second annotation is" & " only required where it" & " is necessary to express both an abstract (external) and a refined" & " (internal) view of an" & " operation."); when 399 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a proof annotation contains an expression that would cause" & " a constraint error" & " if it were in an executable Ada statement. For example: ""--# post X =" & " T'Succ(T'Last);""" & " VCs generated from such malformed predicates would always be unprovable."); when 400 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a static expression, evaluated using perfect arithmetic," & " is found to" & " contain a division by zero."); when 401 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a numeric literal is illegal because it contains, for example," & " digits not" & " compatible with its number base."); when 402 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued whenever a static expression would cause a constraint error." & " e.g. assigning a" & " value to a constant outside the constant's type range. In SPARK a static" & " expression may" & " not yield a value which violates a range constraint."); when 403 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an array attribute containing an argument is found and the" & " value of the" & " argument is inconsistent with the number of dimensions of the array" & " type to which it is" & " being applied."); when 407 => E_Strings.Append_String (E_Str => E_Str, Str => "Choices in case statements and array aggregates may not overlap."); when 408 => E_Strings.Append_String (E_Str => E_Str, Str => "A case statement must either explicitly supply choices to cover the" & " whole range of the" & " (sub)type of the controlling expression, or it must supply an others choice."); when 409 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK, no static range is permitted to be null."); when 410 => E_Strings.Append_String (E_Str => E_Str, Str => "The choices in case statements and array aggregates must be within" & " the constraints of" & " the appropriate (sub)type."); when 411 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where an others clause is required to satisfy the Ada language rules."); when 413 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where a range constraint is outside the range of the (sub)type to" & " which the" & " constraint applies."); when 414 => E_Strings.Append_String (E_Str => E_Str, Str => "An array aggregate must either explicitly supply values for all array" & " elements or" & " provide an others clause."); when 415 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where an array aggregate using positional association contains" & " more entries than" & " required by the array index type."); when 418 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where assignment, association or type conversion is attempted" & " between two" & " different constrained subtypes of the same unconstrained array type, and" & " where the index" & " bounds do not match."); when 419 => E_Strings.Append_String (E_Str => E_Str, Str => "When an entity is renamed, the fully qualified name is no longer visible," & " and so must not be used."); when 420 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an attempt is made to convert between two arrays whose indexes" & " are neither" & " of the same type nor numeric."); when 421 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a type conversion attempts to convert between two array types" & " whose" & " components are of different types."); when 422 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a type conversion attempts to convert between two array types" & " whose" & " components are of the same type but do not have constraints which can be" & " statically" & " determined to be identical."); when 423 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when attempting to convert between two array types which have" & " different numbers" & " of dimensions."); when 424 => E_Strings.Append_String (E_Str => E_Str, Str => "Character attributes such as 'Val, 'Pos, 'Succ and 'Pred are not" & " permitted below a concatentation operator in a String expression."); when 425 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if the argument of a type conversion is a string literal. A common" & " cause is an" & " attempt to type qualify a string and accidentally omitting the tick character."); when 500 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when performing data flow analysis only where a subprogram has no" & " dependency" & " clause and its global variables have not been given modes in the global" & " annotation."); when 501 => E_Strings.Append_String (E_Str => E_Str, Str => "A dependency relation is required for each procedure if information flow" & " analysis is to" & " be performed."); when 502 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a procedure has both a global annotation with modes and a" & " dependency" & " relation, and a global of mode in is listed as an export in the dependency" & " relation."); when 503 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a procedure has both a global annotation with modes and a" & " dependency" & " relation, and a global of mode out is listed as an import in the" & " dependency relation."); when 505 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where a procedure has both a global annotation with modes and" & " a dependency" & " relation, and a global variable of mode in out is not listed as an" & " import in the" & " dependency relation."); when 507 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where a procedure has both a global annotation with modes" & " and a dependency" & " relation, and a global variable of mode in out is not listed as an" & " export in the" & " dependency relation."); when 509 => E_Strings.Append_String (E_Str => E_Str, Str => "When using refinement in automatic flow analysis mode, if there is a" & " dependency relation on the subprogram specification then there must" & " also be one on the body. Similarly, if there is no dependency relation" & " on the specification then the body is not permitted to have one."); when 604 => E_Strings.Append_String (E_Str => E_Str, Str => "In Ada 95, a package body is illegal unless it is required for the" & " purpose of providing" & " a subprogram body, or unless this pragma is used. This error is" & " issued where a package" & " body is found for a package whose specification does not require a body."); when 606 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when the parameter to a pragma Elaborate_Body is invalid."); when 607 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where a package specification contains no subprogram declarations," & " but whose own" & " variables (as specified in the package annotation) are not all declared" & " (and initialized" & " where appropriate) in the package specification. This is because such a" & " package is not" & " allowed a body in Ada 95 unless either the pragma is given or a" & " subprogram declared."); when 613 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a dotted name appears in a body stub as in ""package body" & " P.Q is" & " separate"". No legal stub could ever have such a name."); when 614 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an attempt is made to declare a child package which is" & " embedded in a package" & " or subprogram."); when 615 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if the closing identifier of a package has a different number of" & " identifiers" & " from the name originally given for the package. For example ""package" & " P.Q is ... end" & " P.Q.R;"""); when 616 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an attempt is made to with or inherit a private package" & " from the visible part" & " of a public package."); when 617 => E_Strings.Append_String (E_Str => E_Str, Str => "Arises from attempting to inherit a public sibling child package" & " from a private child" & " package."); when 618 => E_Strings.Append_String (E_Str => E_Str, Str => "A private descendent (although it may be a public package) can only" & " inherit a remote package if its parent also inherits" & " it; this is a analogous to the behaviour of embedded packages which" & " may also only inherit" & " a remote package if their enclosing package also does so."); when 619 => E_Strings.Append_String (E_Str => E_Str, Str => "This message indicates an attempt to claim that own variables of a" & " package other than a" & " private child package of the current package are refinement constituents" & " of an abstract" & " own variable of the current package."); when 620 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables of private child packages must appear as refinement" & " constituents of the" & " package which owns the child. If the Examiner has seen the owner" & " package body before" & " processing the child and has not found the required refinement" & " constituent then this" & " message results on processing the child."); when 621 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables of private child packages must appear as refinement" & " constituents of the" & " package which owns the child. If the Examiner has seen a child package" & " which declares an" & " own variable before examining its owner’s body then this message" & " is issued if the" & " owner lacks the required refinement constituent declaration."); when 622 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an own variable appears in an initialization clause and is" & " also a refinement" & " constituent of an own variable which is not marked as initialized."); when 623 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an own variable does not appear in an initialization clause" & " and is also a" & " refinement constituent of an own variable that is marked as initialized."); when 624 => E_Strings.Append_String (E_Str => E_Str, Str => "A package must appear in a with clause before types declared in" & " it can be specified in a use type clause."); when 630 => E_Strings.Append_String (E_Str => E_Str, Str => "Only generic subprogram can be instantiated."); when 631 => E_Strings.Append_String (E_Str => E_Str, Str => "Subprogram kind of generic and its instantiation must match"); when 632 => E_Strings.Append_String (E_Str => E_Str, Str => "Subprogram kind of generic and its instantiation must match"); when 635 => E_Strings.Append_String (E_Str => E_Str, Str => "The number of generic formal and actual parameters must match exactly"); when 636 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type" & " which is of a compatible class. Note that SPARK does not have default values for such" & " associations"); when 637 => E_Strings.Append_String (E_Str => E_Str, Str => "There are weaknesses in the generic type model of Ada 83 that prevent the implementation" & " of a safe subset of generics in SPARK 83. These deficiencies are overcome in Ada 95." & " SPARK 83 users may employ the predefined unit Unchecked_Conversion only."); when 638 => E_Strings.Append_String (E_Str => E_Str, Str => "A standalone generic subprogram may not have a global annotation. Note that a subprogram in a" & " generic package may have a global annotation as long as it only refers to own variables that" & " are local to the package."); when 639 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK restricts formal objects to being constants in order to avoid concealed information" & " flows."); when 640 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK restricts formal objects to being constants in order to avoid concealed information" & " flows."); when 641 => E_Strings.Append_String (E_Str => E_Str, Str => "A generic body must be preceded by a generic declaration of the same name."); when 645 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type" & " which is of a compatible class. Note that SPARK does not have default values for such" & " associations"); when 646 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type" & " which is of a compatible class. Note that SPARK does not have default values for such" & " associations"); when 647 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type" & " which is of a compatible class. Note that SPARK does not have default values for such" & " associations"); when 648 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type" & " which is of a compatible class. Note that SPARK does not have default values for such" & " associations"); when 649 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type" & " which is of a compatible class. Note that SPARK does not have default values for such" & " associations"); when 650 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type" & " which is of a compatible class. Note that SPARK does not have default values for such" & " associations"); when 651 => E_Strings.Append_String (E_Str => E_Str, Str => "In non-generic code we statically know the value being assigned to the variable and can" & " check that it is in range. In the case of a generic we cannot do this because we do not" & " know the bounds of the variable's type. The variable may, however, be assigned to in the sequence" & " of statements in the generic body because generation of run-time checks will provide" & " suitable protection from out-of-range values."); when 652 => E_Strings.Append_String (E_Str => E_Str, Str => "In non-generic code we statically know the values being used as the range bounds for" & " a subtype and can check that they are in range. In the case of a generic we cannot" & " do this because we do not know the bounds of the variable's type."); when 653 => E_Strings.Append_String (E_Str => E_Str, Str => "In non-generic code we statically know the value being assigned to the constant and can" & " check that it is in range. In the case of a generic we cannot do this because we do not" & " know the bounds of the constant's type. A variable, assigned to in the sequence" & " of statements in the generic body, may be a suitable substitute for such a constant."); when 654 => E_Strings.Append_String (E_Str => E_Str, Str => "Generic units provide a template for creating callable units and are not directly" & " callable."); when 655 => E_Strings.Append_String (E_Str => E_Str, Str => "Components of generic packages cannot be accessed directly. First instantiate the package" & " and then access components of the instantiation."); when 657 => E_Strings.Append_String (E_Str => E_Str, Str => "The 'Always_Valid assertion can only be applied to variables or" & " to components of record variables."); when 658 => E_Strings.Append_String (E_Str => E_Str, Str => "The 'Always_Valid assertion can only be applied to objects which are:" & " (1) of a scalar type," & " (2) a one dimensional array of scalar components," & " (3) an entire record variable of a non-tagged type with all" & " components that are either scalar or an array of scalar components," & " (4) an array variable whose components are records satisfying (3)." & " Additionally a field of a record satisfying these constraints may" & " be marked individually as always valid."); when 662 => E_Strings.Append_String (E_Str => E_Str, Str => "The 'Always_Valid assertion can only be applied to variables which are" & " own variables with the mode in, or to subcomponents of records" & " which are mode in own variables."); when 700 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables may be given a mode to indicate that they are system level" & " inputs" & " or outputs (i.e. they obtain values from or pass values to the external" & " environment). Since effective SPARK design strictly separates inputs from" & " outputs the mode 'in out' is not permitted."); when 701 => E_Strings.Append_String (E_Str => E_Str, Str => "If an abstract own variable is given a mode then its refinement" & " constituents must" & " all be of the same mode."); when 702 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an own variable of an embedded package is not given the" & " same mode as" & " the earlier refinement constituent that announced it would exist."); when 703 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an own variable of an embedded package is not given the same" & " mode as" & " the earlier refinement constituent that announced it would exist."); when 704 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if an own variable of an embedded package is given a mode when" & " the earlier refinement constituent that announced it would exist did not" & " have one."); when 705 => E_Strings.Append_String (E_Str => E_Str, Str => "If a refinement constituent is an own variable of a private package then the" & " constituent must have the same mode as the own variable to which it refers."); when 706 => E_Strings.Append_String (E_Str => E_Str, Str => "If a refinement constituent is an own variable of a private package then the" & " constituent must have the same mode as the own variable to which it refers."); when 707 => E_Strings.Append_String (E_Str => E_Str, Str => "If a refinement constituent is an own variable of a private package then the" & " constituent can only be given a mode if the own variable to which it" & " refers has one."); when 708 => E_Strings.Append_String (E_Str => E_Str, Str => "Mode own variables (stream variables) are implicitly initialized by the" & " environment" & " to which they are connected and may not appear in initializes clauses" & " since this" & " would require their explicit initialization."); when 709 => E_Strings.Append_String (E_Str => E_Str, Str => "Functions are permitted to reference own variables that are either unmoded" & " or of" & " mode 'in'. Since mode 'out' own variables represent outputs to the" & " environment," & " reading them in a function does not make sense and is not allowed."); when 710 => E_Strings.Append_String (E_Str => E_Str, Str => "Global modes, if given, must be consistent with the modes of own variables" & " that appear" & " in the global list."); when 711 => E_Strings.Append_String (E_Str => E_Str, Str => "Global modes, if given, must be consistent with the modes of own" & " variables that appear" & " in the global list."); when 712 => E_Strings.Append_String (E_Str => E_Str, Str => "Global modes, if given, must be consistent with the modes of own variables" & " that appear" & " in the global list."); when 713 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables with mode 'in' denote system-level inputs; their exportation" & " is" & " not allowed."); when 714 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables with mode 'out' denote system-level outputs; their" & " importation is" & " not allowed."); when 715 => E_Strings.Append_String (E_Str => E_Str, Str => "To avoid ordering effects, functions which globally access own" & " variables which" & " have modes (indicating that they are connected to the external" & " environment) may" & " only appear directly in assignment or return statements." & " They may not appear as" & " actual parameters or in any other form of expression."); when 716 => E_Strings.Append_String (E_Str => E_Str, Str => "To avoid ordering effects, own variables which" & " have modes (indicating that they are connected to the external" & " environment) may" & " only appear directly in assignment or return statements. They may not" & " appear as" & " actual parameters (other than to instantiations of Unchecked_Conversion) or in any other form of expression."); when 717 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables with mode 'in' represent inputs to the system from the" & " external" & " environment. As such, assigning to them does not make sense and is not" & " permitted."); when 718 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables with mode 'out' represent outputs to the external" & " environment from the system. As such, referencing them does not make sense" & " and is not permitted."); when 719 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables with modes represent inputs and outputs between the external" & " environment and the system. Referencing or updating them during package" & " elaboration would introduce ordering effects and is not permitted."); when 720 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables with modes represent inputs and outputs between the external" & " environment and the system. Referencing or updating them during package" & " elaboration would introduce ordering effects and is not permitted."); when 721 => E_Strings.Append_String (E_Str => E_Str, Str => "Functions may be used to reference external (stream) variables and the" & " Examiner" & " generates the appropriate information flow to show that the value returned" & " by" & " the function is 'volatile'. If the abstract view of the same function" & " shows it" & " referencing an own variable which is not an external stream then the" & " volatility" & " of the function is concealed. The error can be removed either by making the" & " abstract own variable a mode 'in' stream or by using a procedure instead" & " of a" & " function to read the refined stream variable."); when 722 => E_Strings.Append_String (E_Str => E_Str, Str => "Where a procedure references an external (stream) variable of mode 'in' the" & " Examiner constructs appropriate information flow to show that the input" & " stream" & " is 'volatile'. If the abstract view shows that the procedure obtains its" & " result" & " by simply reading an own variable which is not an external stream then the" & " volatility is concealed. The error can be removed either by making the" & " global" & " mode of XXX 'in out' or making XXX an external (stream) variable of mode" & " 'in'."); when 723 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a global variable which is present in the first (abstract)" & " global annotation" & " is omitted from the second (refined) one."); when 724 => E_Strings.Append_String (E_Str => E_Str, Str => "If an exit statement names a loop label, then the most closely enclosing" & " loop statement must have a matching label"); when 725 => E_Strings.Append_String (E_Str => E_Str, Str => "To avoid ordering effects, protected functions may" & " only appear directly in assignment or return statements. They may not" & " appear as" & " actual parameters or in any other form of expression. Ordering effects" & " occur" & " because the global state referenced by the protected function may be updated" & " by another process during expression evaluation."); when 730 => E_Strings.Append_String (E_Str => E_Str, Str => "If a loop has neither an iteration scheme nor any exit statements then it" & " will" & " run forever. Any statements following it will be unreachable. SPARK only" & " allows" & " one such loop which must be the last statement of the main program"); when 750 => E_Strings.Append_String (E_Str => E_Str, Str => "The type mark used for the index of an array type declaration must not" & " be the same as the name of the array type being declared"); when 751 => E_Strings.Append_String (E_Str => E_Str, Str => "The type mark given for a field in a record type declaration must" & " not be the same as the name of the record type being declared"); when 752 => E_Strings.Append_String (E_Str => E_Str, Str => "For an identifier to appear legally as an import in a derives annotation," & " it must be a formal parameter or must appear legally in a" & " preceding global annotation and must be of mode 'in' or mode 'in out'"); when 753 => E_Strings.Append_String (E_Str => E_Str, Str => "For an identifier to appear legally as an export in a derives annotation," & " it must be a formal parameter or must appear legally in a" & " preceding global annotation and must be of mode 'out' or mode 'in out'"); when 754 => E_Strings.Append_String (E_Str => E_Str, Str => "For a package name to be visible in Ada context, it must appear in" & " both the inherit clause and the with clause of the enclosing package"); when 755 => E_Strings.Append_String (E_Str => E_Str, Str => "A parent of a child package must be inherited (but not withed)" & " to be visible in that child."); when 756 => E_Strings.Append_String (E_Str => E_Str, Str => "A grandparent of a child package should not be included in prefixes" & " referencing a declaration of the child package"); when 770 => E_Strings.Append_String (E_Str => E_Str, Str => "If the type Any_Priority is defined in package System, then the subtypes" & " Priority and" & " Interrupt_Priority must also be defined; if support for tasking is not" & " required, then" & " the definition of Any_Priority may be removed"); when 771 => E_Strings.Append_String (E_Str => E_Str, Str => "Ada 95 requires that both Priority and Interrupt_Priority be immediate" & " subtypes" & " of Any_Priority."); when 772 => E_Strings.Append_String (E_Str => E_Str, Str => "Ada 95 requires that the range of the subtype Priority include at least" & " 30 values;" & " this requirement is stated in the Ada 95 Language Reference Manual at" & " D.1(26)"); when 773 => E_Strings.Append_String (E_Str => E_Str, Str => "Ada 95 requires that task priority types meet the following criteria," & " the second of which is relevant to this error:" & " subtype Any_Priority is Integer range implementation-defined;" & " subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;" & " subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last"); when 774 => E_Strings.Append_String (E_Str => E_Str, Str => "Ada 95 requires that task priority types meet the following criteria," & " the third of which is relevant to this error:" & " subtype Any_Priority is Integer range implementation-defined;" & " subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;" & " subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last"); when 775 => E_Strings.Append_String (E_Str => E_Str, Str => "Ada 95 requires that task priority types meet the following criteria," & " the third of which is relevant to this error:" & " subtype Any_Priority is Integer range implementation-defined;" & " subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;" & " subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last"); when 776 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK95 mode, the packages that may be specified in the target" & " configuration file are: Standard, System, Ada.Real_Time and Ada.Interrupts." & " The latter two are ignored unless the Ravenscar profile is selected."); when 777 => E_Strings.Append_String (E_Str => E_Str, Str => "Ada 95, and hence SPARK95, defines Priority as being an immediate" & " subtype of Integer"); when 778 => E_Strings.Append_String (E_Str => E_Str, Str => "The specified identifier cannot be used here; it is most probably" & " either not valid in the target configuration file at all, or might" & " be valid in a different package, but not here."); when 779 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK83 mode, only package Standard may be specified in the" & " target configuration file."); when 780 => E_Strings.Append_String (E_Str => E_Str, Str => "This type may only be declared as private in the target configuration" & " file."); when 781 => E_Strings.Append_String (E_Str => E_Str, Str => "This error can only be generated in SPARK95 mode when the configuration" & " file specifies" & " a value for System.Min_Int"); when 782 => E_Strings.Append_String (E_Str => E_Str, Str => "This error can only be generated in SPARK95 mode when the configuration" & " file specifies" & " a value for System.Max_Int"); when 783 => E_Strings.Append_String (E_Str => E_Str, Str => "This error can only be generated in SPARK95 mode when the configuration" & " file specifies" & " a value for System.Max_Binary_Modulus"); when 785 => E_Strings.Append_String (E_Str => E_Str, Str => "The maximum decimal precision for a floating point type, where" & " a range specification has not been included, is defined" & " by System.Max_Digits"); when 786 => E_Strings.Append_String (E_Str => E_Str, Str => "The maximum decimal precision for a floating point type, where" & " a range specification has been included, is defined" & " by System.Max_Base_Digits"); when 791 => E_Strings.Append_String (E_Str => E_Str, Str => "Predefined types are those defined either by the language, or in package" & " Standard, using the configuration file mechanism"); when 793 => E_Strings.Append_String (E_Str => E_Str, Str => "If a predefined type is to be used in a base type assertion or in" & " a derived type declaration, then it must appear in the configuration" & " file and have a well-defined range."); when 796 => E_Strings.Append_String (E_Str => E_Str, Str => "A base type assertion can only be given exactly once. Explicitly derived" & " scalar types and predefined types never need a base type assertion."); when 800 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK, modular types must have a modulus which is a positive" & " power of 2"); when 801 => E_Strings.Append_String (E_Str => E_Str, Str => "Ada83 (and hence SPARK83) does not include modular types"); when 803 => E_Strings.Append_String (E_Str => E_Str, Str => "Unary arithmetic operators are of little value. The ""abs"" and ""+"" operators" & " have no effect for modular types, and so are not required. The unary minus" & " operator is a source of potential confusion, and so is not permitted in" & " SPARK"); when 804 => E_Strings.Append_String (E_Str => E_Str, Str => "A universal expression cannot be used as the left hand operand of a binary" & " operator if the right hand operand is of a modular type. Qualification of" & " the" & " left hand expression is required in this case."); when 805 => E_Strings.Append_String (E_Str => E_Str, Str => "A universal expression cannot be used as the right hand operand of a binary" & " operator if the left hand operand is of a modular type. Qualification of" & " the" & " right hand expression is required in this case."); when 806 => E_Strings.Append_String (E_Str => E_Str, Str => "A universal expression cannot be used as operand of an unary ""not""" & " operator if no type can be determined from the context of the expression." & " Qualification of the operand is required in this case."); when 814 => E_Strings.Append_String (E_Str => E_Str, Str => "The only possible type for the constant System.Default_Bit_Order" & " is System.Bit_Order when it appears in the configuration file"); when 815 => E_Strings.Append_String (E_Str => E_Str, Str => "System.Bit_Order is implicity declared in package System when a configuration" & " file is given. This is an enumeration type with only two literals" & " Low_Order_First and High_Order_First"); when 820 => E_Strings.Append_String (E_Str => E_Str, Str => "Only non-abstract tagged types are currently supported. It is hoped to" & " lift this" & " restriction in a future Examiner release."); when 821 => E_Strings.Append_String (E_Str => E_Str, Str => "If a type is declared as ""tagged private"" then its full declaration must" & " be a tagged" & " record."); when 822 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK, ""new"" can only be used to declare a type extension; other" & " derived types are not permitted."); when 823 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK only permits types from another library package to be extended." & " This rule prevents" & " overloading of inherited operations."); when 824 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK only permits one type extension per package. This rule prevents" & " overloading of inherited operations."); when 825 => E_Strings.Append_String (E_Str => E_Str, Str => "Since SPARK only permits one type extension per package it follows that" & " the declaration" & " ""new XXX with private"" in a package visible part must be paired with ""new" & " XXX with record...""" & " in its private part. The ancestor type XXX must be the same in both" & " declarations."); when 826 => E_Strings.Append_String (E_Str => E_Str, Str => "Type extension is an Ada 95 feature not included in Ada or SPARK 83."); when 827 => E_Strings.Append_String (E_Str => E_Str, Str => "There are several reasons for this SPARK rule. Firstly, Ada requires" & " tagged parameters to be" & " passed by reference and so an object must exist at least implicitly." & " Secondly, in order to" & " perform flow analysis of inherited subprogram calls, the Examiner needs" & " identify what subset of the" & " information available at the point of call" & " is passed to and from the called subprogram. Since information can only" & " flow through objects it" & " follows that actual parameter must be an object."); when 828 => E_Strings.Append_String (E_Str => E_Str, Str => "This SPARK rule facilitates the main uses of tagged types while greatly" & " simplifying visibility rules."); when 829 => E_Strings.Append_String (E_Str => E_Str, Str => "To avoid overloading, SPARK prohibits more than one potentially visible" & " subprogram having the" & " same name."); when 830 => E_Strings.Append_String (E_Str => E_Str, Str => "This rule means that a private type can only be implemented as a tagged" & " type if the private" & " type itself is tagged."); when 831 => E_Strings.Append_String (E_Str => E_Str, Str => "This is an Ada rule: type conversions simply omit unused fields of the" & " extended type. It follows" & " that conversions must be in the direction of the root type."); when 832 => E_Strings.Append_String (E_Str => E_Str, Str => "For flow analysis purposes the Examiner needs to know what subset of the" & " information in" & " the unconverted view is available in the converted view. Since" & " information can only flow" & " through objects it follows that only objects can be converted."); when 833 => E_Strings.Append_String (E_Str => E_Str, Str => "If an extended type has a private ancestor then an extension aggregate" & " must be used rather" & " than a normal aggregate."); when 834 => E_Strings.Append_String (E_Str => E_Str, Str => "An empty record can have no use in a SPARK program others than as a" & " root type from which" & " other types can be derived and extended. For this reason, null records" & " are only allowed" & " if they are tagged."); when 835 => E_Strings.Append_String (E_Str => E_Str, Str => "An extension aggregate is only appropriate if the record type it is" & " defining is" & " an extended record. A normal aggregate should be used for other record" & " (and array)" & " types."); when 836 => E_Strings.Append_String (E_Str => E_Str, Str => "The expression before the reserved word ""with"" must be of an ancestor type" & " of the overall aggregate" & " type. In SPARK, the ancestor expression may not be a subtype mark."); when 837 => E_Strings.Append_String (E_Str => E_Str, Str => "The ancestor type can be an tagged type with a private extension;" & " however, there must be no private" & " extensions between the ancestor type and the type of the aggregate."); when 838 => E_Strings.Append_String (E_Str => E_Str, Str => "The aggregate form ""with null record"" can only be used if the type of" & " the aggregate is a null record" & " extension of the ancestor type. If any fields are added between the" & " ancestor type and the aggregate type then" & " values need to be supplied for them so ""null record"" is inappropriate."); when 839 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK permits one root tagged type or one tagged type extension per" & " package, but not both. This rule prevents" & " the declaration of illegal operations with more than one controlling" & " parameter."); when 840 => E_Strings.Append_String (E_Str => E_Str, Str => "A primitive function controlled by its return result would be almost" & " unusable in SPARK because a data" & " flow error would occur wherever it was used."); when 841 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when there is an error in the return type on a function's" & " initial declaration. In this situation" & " we cannot be sure what return type is expected in the function's body." & " It would be misleading to simply" & " report a type mismatch since the types might match perfectly and both" & " be wrong. Instead, the Examiner reports" & " the above error and refuses to analyse the function body until its" & " specification is corrected."); when 844 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK2005, an operation which successfully overrides a parent operation" & " must be specified as Overriding."); when 845 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK2005, an overriding operation must successfully override an" & " operation inherited from the parent."); when 850 => E_Strings.Append_String (E_Str => E_Str, Str => "Support for concurrent features of the SPARK language, including" & " protected objects," & " tasking, etc. are only supported when the Ravenscar profile is selected."); when 851 => E_Strings.Append_String (E_Str => E_Str, Str => "The parameter to pragma Atomic must be a simple_name; and may not be" & " passed using" & " a named association"); when 852 => E_Strings.Append_String (E_Str => E_Str, Str => "This is an Ada rule (pragma Atomic takes a local name see LRM 13.1(1))." & " Note that this precludes the use of pragma Atomic on a predefined type"); when 853 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma Atomic may only be applied to base types that are scalar. (i.e." & " enumeration types, integer types, real types, modular types) or a non-tagged" & " record type with a single field which is a predefined scalar type, such" & " as Integer, Character or Boolean. As an additional special case, a" & " record type with a single field of type System.Address is also allowed."); when 855 => E_Strings.Append_String (E_Str => E_Str, Str => "An own variable with a task modifier must be of a task type. A task own" & " variable must have the task modifier. An own variable with a protected" & " modifier must be a protected object, suspension object or pragma" & " atomic type. A protected or suspension object own variable must" & " have the protected modifier."); when 858 => E_Strings.Append_String (E_Str => E_Str, Str => "A variable in a protects list is effectively protected and hence" & " cannot be refined."); when 859 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected state cannot be refined or be used as refinement constituents"); when 860 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected state (including all refinement constituents) must be initialized."); when 861 => E_Strings.Append_String (E_Str => E_Str, Str => "If an abstract own variable has an Integrity property, then so must" & " all its refinement constituents, and vice-versa."); when 862 => E_Strings.Append_String (E_Str => E_Str, Str => "If both an abstract own variable and a refinement constituent have" & " Integrity properties specified, then the value of the Integrity" & " must be the same."); when 863 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected own variables must always be initialized, and should not appear in" & " initializes annotations."); when 864 => E_Strings.Append_String (E_Str => E_Str, Str => "An own variable initialization clause and that of its refinement" & " constituents must be" & " consistent."); when 865 => E_Strings.Append_String (E_Str => E_Str, Str => "An extension aggregate must supply values for all fields that are part of" & " the overall aggregate" & " type but not those which are part of the ancestor part."); when 866 => E_Strings.Append_String (E_Str => E_Str, Str => "When the Ravenscar Profile is selected, the delay until statement may be" & " used." & " The argument of this statement must be of type Ada.Real_Time.Time."); when 867 => E_Strings.Append_String (E_Str => E_Str, Str => "Any subprogram that may call delay until must have a delay property in a" & " declare annotation. Your subprogram is directly or indirectly making a" & " call to delay until"); when 868 => E_Strings.Append_String (E_Str => E_Str, Str => "This error message is issued if a type mark representing a protected type" & " appears anywhere other than in" & " a library level variable declaration or library-level own variable type" & " announcement."); when 869 => E_Strings.Append_String (E_Str => E_Str, Str => "The Ravenscar profile prohibits a protected type from declaring more than" & " one entry."); when 870 => E_Strings.Append_String (E_Str => E_Str, Str => "A protected type which provides no operations can never be used so SPARK" & " requires the" & " declaration of at least one."); when 872 => E_Strings.Append_String (E_Str => E_Str, Str => "In order to avoid the possibility of shared data corruption," & " SPARK prohibits protected operations" & " from accessing unprotected data items."); when 873 => E_Strings.Append_String (E_Str => E_Str, Str => "In order to statically-detect certain bounded errors defined by the" & " Ravenscar profile, SPARK" & " requires every visible operation of protected type to globally" & " reference the abstract state of" & " the type."); when 874 => E_Strings.Append_String (E_Str => E_Str, Str => "Because there is no guarantee that a concurrent thread that initializes a" & " protected object will be executed before one that reads it, the only way" & " we can be sure that a protected object is properly initialized is to do" & " so at the point of declaration. You have either declared some protected" & " state and not included an initialization or you have tried to initialize" & " some protected state in package body elaboration."); when 875 => E_Strings.Append_String (E_Str => E_Str, Str => "Access discriminants have been allowed in SPARK solely to allow devices" & " made up of co-operating" & " Ravenscar-compliant units to be constructed. For this reason only" & " protected types may appear in" & " access discriminants."); when 876 => E_Strings.Append_String (E_Str => E_Str, Str => "To allow the static detection of certain bounded errors defined by the" & " Ravenscar profile, SPARK requires" & " an explicitly-set priority for each protected type, task type or object" & " of those types. The System.Default_Priority" & " may used explicitly provided package System has been defined in the" & " configuration file."); when 878 => E_Strings.Append_String (E_Str => E_Str, Str => "To allow the static detection of certain bounded errors defined by the" & " Ravenscar profile, SPARK requires" & " an explicitly-set priority for each protected type or object." & " The System.Default_Priority may used explicitly" & " provided package System has been defined in the configuration file."); when 879 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma Interrupt_Priority must be the first item in" & " a protected type declaration or task type declaration; pragma Priority" & " must be the first item in" & " a protected type declaration, task type declaration or the main program."); when 880 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a pragma other than Priority or Interrupt_Priority appears as" & " the first item in" & " a protected type or task type declaration."); when 881 => E_Strings.Append_String (E_Str => E_Str, Str => "See LRM D.1(17)."); when 883 => E_Strings.Append_String (E_Str => E_Str, Str => "Interrupt_Handler is of no use unless dynamic attachment of interrupt" & " handlers is to be used."); when 884 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma Attach_Handler may only be used within a protected type" & " declaration. Furthermore, it must" & " immediately follow a protected procedure declaration with the same name" & " as the first argument to the pragma."); when 885 => E_Strings.Append_String (E_Str => E_Str, Str => "See LRM C.3.1(5)."); when 887 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a task or protected type priority is set using an expression" & " involving a discriminant. The use" & " of such an expression greatly complicates the static evaluation of the" & " priority of task or protected subtypes" & " thus preventing the static elimination of certain Ravenscar bounded errors."); when 888 => E_Strings.Append_String (E_Str => E_Str, Str => "A procedure may only have a maximum of one delay annotation."); when 889 => E_Strings.Append_String (E_Str => E_Str, Str => "The type used to declare this object must be a protected type with and" & " entry or a suspension object type"); when 890 => E_Strings.Append_String (E_Str => E_Str, Str => "Items may not appear more than once in an a suspends list."); when 891 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued in a subtype declaration where the constraint is a discriminant" & " constraint. Only task and protected types" & " may take a discriminant constraint as part of a subtype declaration."); when 892 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued in a subtype declaration where the constraint is a either a" & " discriminant constraint or an index" & " constraint (these two forms cannot always be distinguished" & " syntactically). Only task and protected types" & " may take a discriminant constraint and only array types may" & " take an index constraint as part of a" & " subtype declaration."); when 893 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued in a subtype declaration if too many or two few discriminant" & " constraints are supplied."); when 894 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK supports the keyword aliased in variable declarations only so" & " that protected and task types can support" & " access discriminants. Since it has no other purpose it may not be" & " used except in a protected object declaration."); when 895 => E_Strings.Append_String (E_Str => E_Str, Str => "This is a slightly annoying Ada issue. Marking a variable as aliased" & " prevents it being placed in a register" & " which would make pointing at it hazardous; however, SPARK only permits" & " 'Access on protected types which are" & " limited and therefore always passed by reference anyway and immune from" & " register optimization. Requiring" & " aliased on protected objects that will appear in discriminant" & " constraints is therefore unwanted syntactic sugar" & " only."); when 896 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued at the end of a package body if a task type declared in its" & " specification contains neither a body" & " nor a body stub for it."); when 897 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued at the end of a package body if a protected type declared in" & " its specification contains neither a body" & " nor a body stub for it."); when 898 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a body or body stub for a task or protected type is" & " encountered and there is no matching specification."); when 899 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a body or body stub for a task or protected type is" & " encountered and an earlier body" & " has already been encountered."); when 901 => E_Strings.Append_String (E_Str => E_Str, Str => "Suspension objects must be declared at library level. They cannot" & " be used in protected type state or as local variables in subprograms."); when 903 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected and suspension objects are used to ensure integrity of shared" & " objects. If it is necessary to share constant data then these constructs" & " should not be used."); when 904 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK does not currently support this feature."); when 905 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK does not currently support this feature."); when 906 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected and suspension objects cannot be used in record or array" & " structures."); when 907 => E_Strings.Append_String (E_Str => E_Str, Str => "You are calling delay until from an invalid construct." & " Any construct that calls delay until must have a delay property in the" & " declare annotation. This construct must be one of a task or procedure body"); when 908 => E_Strings.Append_String (E_Str => E_Str, Str => "Procedures in protected scope must not block and therefore blocking" & " properties are prohibited"); when 909 => E_Strings.Append_String (E_Str => E_Str, Str => "You are either applying the suspendable property to an own variable" & " that cannot suspend or you have declared a variable (whose own variable" & " has the suspendable property) which cannot suspend. Or you have used an" & " item in a suspends list that does not have the suspendable property." & " An object can only suspend if it is a suspension object or a protected" & " type with an entry."); when 910 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected entry calls and calls to" & " Ada.Synchronous_Task_Control.Suspend_Until_True may block the currently" & " executing task. SPARK requires you announce this fact by placing the" & " actual callee name in the suspends list for the enclosing unit."); when 911 => E_Strings.Append_String (E_Str => E_Str, Str => "If the main program priority is not an integer literal then you" & " should declare a constant that has the required value in the" & " declarative part of the main program prior to the position of the pragma."); when 912 => E_Strings.Append_String (E_Str => E_Str, Str => "The call being made has a declare annotation that contains a delay" & " property. SPARK requires that this property is propagated up" & " the call chain and hence must appear in a declare annotation" & " for the enclosing unit."); when 913 => E_Strings.Append_String (E_Str => E_Str, Str => "The call being made has a declare annotation that contains a suspends" & " list. SPARK requires that the entire list is propagated up the call" & " chain and hence must appear in a declare annotation for the enclosing unit."); when 914 => E_Strings.Append_String (E_Str => E_Str, Str => "You have specified the name of a protected or suspension object in" & " the suspends list that can never be called by this procedure or task."); when 915 => E_Strings.Append_String (E_Str => E_Str, Str => "You have specified a delay property for this procedure but delay" & " until can never be called from it."); when 916 => E_Strings.Append_String (E_Str => E_Str, Str => "The type of the protected object mentions the protected object name in" & " the derives list for the given subprogram"); when 917 => E_Strings.Append_String (E_Str => E_Str, Str => "The procedure being called may block and hence cannot be called from" & " a protected action."); when 918 => E_Strings.Append_String (E_Str => E_Str, Str => "The delay property may only be applied to a procedure"); when 919 => E_Strings.Append_String (E_Str => E_Str, Str => "The priority property can only be applied to protected own variables" & " which are type announced. If the type has been declared it must be a" & " protected type"); when 920 => E_Strings.Append_String (E_Str => E_Str, Str => "The suspends property may only be applied to task type specifications" & " and procedures"); when 921 => E_Strings.Append_String (E_Str => E_Str, Str => "The property list can only specify the reserved word delay, suspends or" & " priority."); when 922 => E_Strings.Append_String (E_Str => E_Str, Str => "In order to perform the ceiling priority checks the priority property must" & " be given to all own variables of protected type."); when 923 => E_Strings.Append_String (E_Str => E_Str, Str => "Blocking is seen as a side effect and hence procedures that potentially" & " block cannot be called from functions."); when 924 => E_Strings.Append_String (E_Str => E_Str, Str => "Objects that suspend must be declared as own protected variables"); when 925 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables of protected type and own tasks must have a type announcement"); when 926 => E_Strings.Append_String (E_Str => E_Str, Str => "Task objects must be declared in library level package specifications or" & " bodies."); when 927 => E_Strings.Append_String (E_Str => E_Str, Str => "The task type declaration has name XXX in its list and this must appear" & " in the own task annotation"); when 928 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected type XXX has been used to declare a protected, moded own variable." & " Protected, moded own variables are refined onto a set of virtual elements with" & " the same mode. As such private elements are not allowed."); when 929 => E_Strings.Append_String (E_Str => E_Str, Str => "Declare annotations cannot appear on the procedure body if it appears" & " on the procedure specification"); when 930 => E_Strings.Append_String (E_Str => E_Str, Str => "A task has been declared that is not specified as an own task of the" & " package."); when 931 => E_Strings.Append_String (E_Str => E_Str, Str => "A task name appears in the own task annotation for this package but" & " is never declared"); when 932 => E_Strings.Append_String (E_Str => E_Str, Str => "The priority given in the priority property must match that given in" & " the protected type."); when 933 => E_Strings.Append_String (E_Str => E_Str, Str => "When SPARK profile Ravenscar is selected, all tasks, protected objects" & " and the main program must explicitly be assigned a priority."); when 934 => E_Strings.Append_String (E_Str => E_Str, Str => "The active priority of a task is the higher of its base priority" & " and the ceiling priorities of all protected objects that it is" & " executing. The active priority at the point of a call to a" & " protected operation must not exceed the ceiling priority of the" & " callee."); when 935 => E_Strings.Append_String (E_Str => E_Str, Str => "An own variable has been declared using a protected type with a pragma" & " attach handler. Such objects are used in interrupt processing and must" & " have the interrupt property specified in their own variable declaration"); when 936 => E_Strings.Append_String (E_Str => E_Str, Str => "The interrupt property can only be applied to protected own variables" & " that are type announced. If the type is declared then it must be a" & " protected type that contains an attach handler"); when 937 => E_Strings.Append_String (E_Str => E_Str, Str => "The protects property can only be applied to protected own variables" & " that are type announced. If the type is declared then it must be a" & " protected type."); when 938 => E_Strings.Append_String (E_Str => E_Str, Str => "XXX is an unprotected variable that appears in the global list of the" & " threads YYY and ZZZ. Unprotected variables cannot be shared between" & " threads in SPARK. A thread is one of: the main program, a task, an" & " interrupt handler."); when 939 => E_Strings.Append_String (E_Str => E_Str, Str => "XXX is an own variable with the suspends property that appears in the" & " suspends list of the threads YYY and ZZZ. SPARK prohibits this to" & " prevent more than one thread being suspended on the same item at any" & " one time. A thread is one of: the main program, a task, an interrupt" & " handler."); when 940 => E_Strings.Append_String (E_Str => E_Str, Str => "The use of protected variables in pre and postconditions or other proof annotations is not (currently)" & " supported. Protected variables are volatile because they can be changed at any time by another program" & " thread and this may invalidate some common proof techniques. The prohibition of protected variables" & " does not prevent proof of absence of run-time errors nor proof of protected operation bodies. See the" & " manual ""SPARK Proof Manual"" for more details."); when 941 => E_Strings.Append_String (E_Str => E_Str, Str => "The type used to an announce an own variable with a protects property" & " must be declared in the same package."); when 942 => E_Strings.Append_String (E_Str => E_Str, Str => "Type XXX has a protects property. This means there can be only one object" & " in the package that has this type or any subtype of this type."); when 943 => E_Strings.Append_String (E_Str => E_Str, Str => "All items in a protects list must be unprotected own variables owned by" & " this package"); when 944 => E_Strings.Append_String (E_Str => E_Str, Str => "The name XXX appears in more than one protects list. The first time" & " it appeared was for own variable YYY. XXX should appear in at most" & " one protects list."); when 945 => E_Strings.Append_String (E_Str => E_Str, Str => "This property can only accept a static expression."); when 946 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable XXX is protected by the protected type YYY and hence" & " must never be accessed from anywhere else."); when 947 => E_Strings.Append_String (E_Str => E_Str, Str => "The protected type YYY claims to protect XXX via a protects property." & " However, the variable XXX is not used by any operation in YYY."); when 948 => E_Strings.Append_String (E_Str => E_Str, Str => "Own tasks and protected own variables of a protected type must be" & " announced using the base type. The" & " subsequent variable declaration may be a subtype of the base type"); when 949 => E_Strings.Append_String (E_Str => E_Str, Str => "When the sequential SPARK profile is selected, the global and derives" & " annotation on the main program describes" & " the entire program's behaviour. No additional, partition annotation" & " is required or permitted. Note that an" & " annotation must appear here if the Ravenscar profile is selected."); when 950 => E_Strings.Append_String (E_Str => E_Str, Str => "When the Ravenscar profile is selected the global and derives annotation" & " on the main program describes the" & " behaviour of the environment task only, not the entire program." & " An additional annotation, called the" & " partition annotation, is required to describe the entire program's" & " behaviour; this annotation follows" & " immediately after 'main_program;'"); when 951 => E_Strings.Append_String (E_Str => E_Str, Str => "In order to ensure that a Ravenscar program is complete, SPARK requires" & " that all 'active' packages" & " inherited by the environment task also appear in a corresponding" & " with clause. This check ensures that" & " any program entities described in the partition annotation are also" & " linked into the program itself."); when 952 => E_Strings.Append_String (E_Str => E_Str, Str => "Interrupt handler operations cannot be called."); when 953 => E_Strings.Append_String (E_Str => E_Str, Str => "The handler names in an interrupt property must match one in the" & " protected type of the own variable."); when 954 => E_Strings.Append_String (E_Str => E_Str, Str => "The stream name must be unprefixed and not already in use within the" & " scope of the package."); when 955 => E_Strings.Append_String (E_Str => E_Str, Str => "Interrupt stream variables are used only to enhance the partition" & " wide flow annotation and must not be used elsewhere."); when 956 => E_Strings.Append_String (E_Str => E_Str, Str => "An interrupt handler can be mapped onto exactly one interrupt stream" & " variable. An interrupt stream variable may be mapped onto many interrupt" & " handlers."); when 957 => E_Strings.Append_String (E_Str => E_Str, Str => "A protected variable cannot be updated without direct reference to its" & " preceding value more than once within a subprogram or task." & " Each update of a protected variable may have a wider effect than" & " just the change of value of the protected variable. The overall" & " change is considered to be the accumulation of all updates and" & " reads of the protected variable and to preseve this information flow" & " successive updates must directly depend on the preceding value of" & " the variable"); when 958 => E_Strings.Append_String (E_Str => E_Str, Str => "A task may not import unprotected state unless it is mode in." & " This is because under the concurrent elaboration policy, the task cannot" & " rely on the state being initialized before it is run."); when 959 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable XXX is being accessed by a task. The order in which the task" & " is run and the own variable initialized is non-deterministic under a" & " concurrent elaboration policy. In this case SPARK forces the task to" & " perform the initialization and as such the own variable must not appear" & " in an initializes clause."); when 960 => E_Strings.Append_String (E_Str => E_Str, Str => "To avoid ordering effects, functions which globally access own" & " variables which have modes (indicating that they are connected to" & " the external environment) may only appear directly in assignment or" & " return statements. They may not appear as actual parameters or in any" & " other form of expression." & " SPARK relaxes the illegal use of function calls in elaboration code in" & " the case of the function Ada.Real_Time.Clock. However the function can only" & " be used to directly initialize a constant value."); when 961 => E_Strings.Append_String (E_Str => E_Str, Str => "Please check the user manual for valid property value formats."); when 962 => E_Strings.Append_String (E_Str => E_Str, Str => "This message is echoed to the screen if an unrecoverable" & " error occurs which" & " makes the generation of VCs for the current subprogram impossible." & " Another message more precisely identifying the problem will be placed in" & " the .vcg file."); when 986 => E_Strings.Append_String (E_Str => E_Str, Str => "See LRM 9.5.1 (2). A protected function has read access to the" & " protected elements of the type whereas" & " the called procedure has read-write access. There is no way in which" & " an Ada compiler can determine whether" & " the procedure will illegally update the protected state or not so the" & " call is prohibited by the rules of Ada." & " (Of course, in SPARK, we know there is no function side effect but the" & " rules of Ada must prevail nonetheless)."); when 987 => E_Strings.Append_String (E_Str => E_Str, Str => "The Examiner performs certain important checks at the whole program level" & " such as detection of illegal sharing of" & " unprotected state and partition-level information flow analysis." & " These checks require visibility of task" & " types and protected types (especially those containing interrupt" & " handlers). SPARK therefore requires these" & " types to be declare in package specifications. Subtypes and objects" & " of task types, protected types and their" & " subtypes may be declared in package bodies."); when 988 => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK does not allow the re-use of operation names which are already" & " in use in a directly visible" & " protected type. The restriction is necessary to avoid overload resolution" & " issues in the protected" & " body. For example, type PT in package P declares operation K." & " Package P also declares an operation K." & " From inside the body of PT, a call to K could refer to either of the" & " two Ks since both are directly visible."); when 989 => E_Strings.Append_String (E_Str => E_Str, Str => "To prevent any possibility of a task terminating (which can lead to a" & " bounded error), SPARK requires" & " each task to end with a non-terminating loop. The environment task (or" & " ""main program"") does not need" & " to end in a plain loop provided the program closure includes at least" & " one other task. If there are" & " no other tasks, then the environment task must be made non-terminating" & " with a plain loop."); when 990 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a pre, post or declare annotation is attached to a task body."); when 991 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if task body is encountered for which there is no preceding" & " declaration."); when 992 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a duplicate body or body stub is encountered for a task."); when 993 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a body is found for a protected types for which there is" & " no preceding declaration."); when 994 => E_Strings.Append_String (E_Str => E_Str, Str => "The SPARK Ravenscar rules require a simple Boolean guard which must" & " be one of the protected elements" & " of the type declaring the entry."); when 995 => E_Strings.Append_String (E_Str => E_Str, Str => "Local entries are not permitted so a protected body can declare at" & " most one entry body and that must have" & " declared in the protected type specification."); when 996 => E_Strings.Append_String (E_Str => E_Str, Str => "Each exported protected operation must have a matching implementation" & " in the associated protected body."); when 997 => E_Strings.Append_String (E_Str => E_Str, Str => "Each protected type declaration must have exactly one matching" & " protected body or body stub."); when 998 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a protected body or body stub is found and there is no" & " matching declaration for it."); when 999 => E_Strings.Append_String (E_Str => E_Str, Str => "Generics are currently limited to instantiation of Unchecked_Conversion."); when others => null; end case; end SemanticErrExpl; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_positional_argument_association.adb0000644000175000017500000011256011753202336030415 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Positional_Argument_Association (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; Ref_Var : in SeqAlgebra.Seq; E_Stack : in out Exp_Stack.Exp_Stack_Type; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Exp_Result, Type_Info : Sem.Exp_Record; Expected_Type, Sym, Param_Sym : Dictionary.Symbol; Types_Are_Convertable : Boolean; Exp_Value, Unused_Value : Maths.Value; Error_Found : Boolean := False; -------------------------------------------------------------- procedure Chain_Up_To_Name_List (Node : in out STree.SyntaxNode) --# global in STree.Table; --# derives Node from *, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association; --# post STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.name_argument_list or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_name_argument_list; is begin while STree.Syntax_Node_Type (Node => Node) /= SP_Symbols.name_argument_list and then STree.Syntax_Node_Type (Node => Node) /= SP_Symbols.annotation_name_argument_list loop --# assert STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association; Node := STree.Parent_Node (Current_Node => Node); -- ASSUME Node = annotation_positional_argument_association OR annotation_name_argument_list OR -- positional_argument_association OR name_argument_list SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.name_argument_list or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.positional_argument_association or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_name_argument_list or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_positional_argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = annotation_positional_argument_association OR " & "annotation_name_argument_list OR positional_argument_association OR " & "name_argument_list in Chain_Up_To_Name_List"); end loop; end Chain_Up_To_Name_List; -------------------------------------------------------------- procedure Check_Types_Are_Convertable (Node : in STree.SyntaxNode; Target, Source : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# Source, --# SPARK_IO.File_Sys, --# STree.Table, --# Target & --# Ok from Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# Source, --# STree.Table, --# Target; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association; is Undefined : Boolean := False; --------------------------------------- function Dimensions_Match (Target, Source : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.GetNumberOfDimensions (Target) = Dictionary.GetNumberOfDimensions (Source); end Dimensions_Match; --------------------------------------- function Indexes_Are_Convertible (Target, Source : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Tgt_It, Src_It : Dictionary.Iterator; Ok : Boolean; --------------------------------------- function Convertible (Src, Tgt : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return (Dictionary.TypeIsNumeric (Src) and then Dictionary.TypeIsNumeric (Tgt)) or else Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetRootType (Src), Right_Symbol => Dictionary.GetRootType (Tgt), Full_Range_Subtype => False); end Convertible; begin -- Indexes_Are_Convertible Ok := True; Tgt_It := Dictionary.FirstArrayIndex (Target); Src_It := Dictionary.FirstArrayIndex (Source); while not Dictionary.IsNullIterator (Tgt_It) loop if not Convertible (Src => Dictionary.CurrentSymbol (Src_It), Tgt => Dictionary.CurrentSymbol (Tgt_It)) then Ok := False; exit; end if; Tgt_It := Dictionary.NextSymbol (Tgt_It); Src_It := Dictionary.NextSymbol (Src_It); end loop; return Ok; end Indexes_Are_Convertible; --------------------------------------- function Components_Same_Type (Target, Source : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetRootType (Dictionary.GetArrayComponent (Target)), Right_Symbol => Dictionary.GetRootType (Dictionary.GetArrayComponent (Source)), Full_Range_Subtype => False); end Components_Same_Type; --------------------------------------- function Components_Constraints_Match (Target, Source : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Tgt_Component, Src_Component : Dictionary.Symbol; Result : Boolean; function Scalar_Bounds_Match (Tgt_Sym, Src_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Src_Sym), Lex_Str2 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Tgt_Sym)) = LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Src_Sym), Lex_Str2 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Tgt_Sym)) = LexTokenManager.Str_Eq; end Scalar_Bounds_Match; begin -- Components_Constraints_Match Tgt_Component := Dictionary.GetArrayComponent (Target); Src_Component := Dictionary.GetArrayComponent (Source); if Dictionary.TypeIsScalar (Tgt_Component) then Result := Scalar_Bounds_Match (Tgt_Sym => Tgt_Component, Src_Sym => Src_Component); elsif Dictionary.TypeIsArray (Tgt_Component) then Result := Sem.Indexes_Match (Target => Tgt_Component, Source => Src_Component); else Result := Dictionary.TypeIsRecord (Tgt_Component); end if; return Result; end Components_Constraints_Match; begin -- Check_Types_Are_Convertable -- UnknownTypes considered convertable to stop error propagation if Dictionary.IsUnknownTypeMark (Target) or else Dictionary.IsUnknownTypeMark (Source) then Ok := True; Undefined := True; elsif STree.Syntax_Node_Type (Node => Node) = SP_Symbols.positional_argument_association and then (Dictionary.IsPrivateType (Source, Scope) or else Dictionary.IsPrivateType (Target, Scope)) and then not Dictionary.Types_Are_Equal (Left_Symbol => Target, Right_Symbol => Source, Full_Range_Subtype => False) then Ok := False; elsif Dictionary.TypeIsNumeric (Target) and then Dictionary.TypeIsNumeric (Source) then Ok := True; elsif Dictionary.TypeIsArray (Target) and then Dictionary.TypeIsArray (Source) then Ok := True; if not Dimensions_Match (Target => Target, Source => Source) then ErrorHandler.Semantic_Error (Err_Num => 423, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Ok := False; elsif not Indexes_Are_Convertible (Target => Target, Source => Source) then ErrorHandler.Semantic_Error (Err_Num => 420, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Ok := False; elsif not Sem.Indexes_Match (Target => Target, Source => Source) then ErrorHandler.Semantic_Error (Err_Num => 418, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Ok := False; elsif not Components_Same_Type (Target => Target, Source => Source) then ErrorHandler.Semantic_Error (Err_Num => 421, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Ok := False; elsif not Components_Constraints_Match (Target => Target, Source => Source) then ErrorHandler.Semantic_Error (Err_Num => 422, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Ok := False; end if; -- allow unnecessary conversions, warning will be produced by later if clause else Ok := Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetRootType (Target), Right_Symbol => Dictionary.GetRootType (Source), Full_Range_Subtype => False); end if; -- if legal (other than undefined case, check if necessary) if Ok and then not Undefined and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetRootType (Target), Right_Symbol => Dictionary.GetRootType (Source), Full_Range_Subtype => False) then ErrorHandler.Semantic_Warning (Err_Num => 309, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node)))), Id_Str => LexTokenManager.Null_String); end if; end Check_Types_Are_Convertable; -------------------------------------------------------------- procedure Do_Tagged_Type_Conversion (Node : in STree.SyntaxNode; Ref_Var : in SeqAlgebra.Seq; Target : in out Sem.Exp_Record; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Source : in Sem.Exp_Record) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Component_Data, --# Dictionary.Dict from Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Node, --# Source, --# STree.Table, --# Target, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Source, --# SPARK_IO.File_Sys, --# STree.Table, --# Target & --# Statistics.TableUsage, --# The_Heap from *, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Node, --# Ref_Var, --# Source, --# STree.Table, --# Target, --# The_Heap & --# Target from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Node, --# Source, --# STree.Table, --# The_Heap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association; is procedure Raise_Error (Node : in STree.SyntaxNode; Err_No : in Natural; Target : out Sem.Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_No, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Target from Dictionary.Dict; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association; is begin Target := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => Err_No, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node)))), Id_Str => LexTokenManager.Null_String); end Raise_Error; begin -- Do_Tagged_Type_Conversion -- On entry we know Target.TypeSymbol is tagged. If Source.TypeSymbol is not then we have some grossly -- malformed type conversion if not Dictionary.TypeIsTagged (Source.Type_Symbol) then Raise_Error (Node => Node, Err_No => 32, Target => Target); elsif not Dictionary.IsAnExtensionOf (Target.Type_Symbol, Source.Type_Symbol) then Raise_Error (Node => Node, Err_No => 831, Target => Target); else -- We have two tagged types and the target is an ancestor of the source; basically ok if Source.Is_AVariable or else Source.Is_Constant then -- we have an object to convert if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_positional_argument_association or else Source.Is_Constant then -- In an annotation, or for a constant, all we need to is change -- the result type to that expected. Target.Sort := Sem.Type_Result; Target.Is_Constant := Source.Is_Constant; Target.Is_Static := Source.Is_Static and then CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83; Target.Other_Symbol := Dictionary.NullSymbol; Target.Variable_Symbol := Source.Variable_Symbol; Target.Is_AVariable := Source.Is_AVariable; Target.Is_An_Entire_Variable := False; else -- In a normal expression with a variable and we need to convert -- appropriate record subcomponent symbols. -- We can't replace X with X.Inherit unless we add X's subcomponents first Sem.Add_Record_Sub_Components (Record_Var_Sym => Source.Variable_Symbol, Record_Type_Sym => Dictionary.GetType (Source.Variable_Symbol), Component_Data => Component_Data, The_Heap => The_Heap); -- Set up Sem.Exp_Record Target.Variable_Symbol := Sem.Convert_Tagged_Actual (Actual => Source.Variable_Symbol, Tagged_Parameter_Sym => Target.Type_Symbol); Target.Sort := Sem.Type_Result; Target.Is_Constant := Source.Is_Constant; Target.Is_Static := Source.Is_Static and then CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83; Target.Other_Symbol := Dictionary.NullSymbol; Target.Is_AVariable := Source.Is_AVariable; Target.Is_An_Entire_Variable := False; -- Substitute reference variables to show we only used a subcomponent of Source. -- We have to look for the source variable because there may be other items in -- the Ref_Var list if, for example, the type conversion forms part of a larger -- expression such as a function call. SeqAlgebra.RemoveMember (The_Heap, Ref_Var, Natural (Dictionary.SymbolRef (Source.Variable_Symbol))); SeqAlgebra.AddMember (The_Heap, Ref_Var, Natural (Dictionary.SymbolRef (Target.Variable_Symbol))); end if; else -- not an object Raise_Error (Node => Node, Err_No => 832, Target => Target); end if; end if; end Do_Tagged_Type_Conversion; -------------------------------------------------------------- function Convert_Value (Target : Dictionary.Symbol; Exp : Sem.Exp_Record) return Maths.Value --# global in CommandLineData.Content; --# in Dictionary.Dict; is Val : Maths.Value; begin Val := Exp.Value; if not Maths.HasNoValue (Val) then if Dictionary.IsUnknownTypeMark (Target) then Val := Maths.NoValue; elsif Dictionary.TypeIsReal (Target) then Maths.ConvertToReal (Val); elsif Dictionary.TypeIsInteger (Target) and then Dictionary.TypeIsReal (Exp.Type_Symbol) then case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Val := Maths.NoValue; -- can't do real to integer safely when CommandLineData.SPARK95_Onwards => Val := Maths.Ada95RealToInteger (Val); end case; end if; end if; return Val; end Convert_Value; -------------------------------------------------------------- function Expression_Location (Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association; is Result : STree.SyntaxNode; begin Result := STree.Child_Node (Current_Node => Node); -- ASSUME Result = annotation_positional_argument_association OR annotation_expression OR -- positional_argument_association OR expression if STree.Syntax_Node_Type (Node => Result) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_expression then -- ASSUME Result = expression OR annotation_expression Result := Node; -- ASSUME Result = positional_argument_association OR annotation_positional_argument_association elsif STree.Syntax_Node_Type (Node => Result) = SP_Symbols.positional_argument_association or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_positional_argument_association then -- ASSUME Result = positional_argument_association OR annotation_positional_argument_association Result := STree.Next_Sibling (Current_Node => Result); -- ASSUME Result = expression OR annotation_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Result) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = annotation_expression OR expression in Expression_Location"); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = annotation_positional_argument_association OR annotation_expression OR " & "positional_argument_association OR expression in Expression_Location"); end if; return STree.Node_Position (Node => Result); end Expression_Location; -------------------------------------------------------------- function Get_Expected_Array_Index_Type (Var_Sym : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; Dimension : Positive; Scope : Dictionary.Scopes) return Dictionary.Symbol --# global in Dictionary.Dict; --# return S => (Dictionary.Is_Null_Symbol (S) or --# Dictionary.IsTypeMark (S, Dictionary.Dict) or --# Dictionary.IsParameterConstraint (S, Dictionary.Dict)); is Result : Dictionary.Symbol; begin -- This function determines what type to plant in the syntax tree so that the VCG can check -- that array accesses are in bounds. FOr a constrained object it is edy - we plant the -- appropriate index type for the dimension being accessed. For indexing into unconstrained -- objects we plant a symbol of a special kind (ParameterConstraintSymbol) associated with -- the array object (rather than its type); this special symbol represents "the index as -- constrained by 'something' at this point". Typically we will no know the actual bounds -- of the constraint represented by this symbol. if Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Sym, Scope) then -- For unconstrained arrays, obtain the implcitly declared constraint symbol for the array object Result := Dictionary.GetSubprogramParameterConstraint (Var_Sym, Dimension); if Dictionary.Is_Null_Symbol (Result) then Result := Dictionary.GetUnknownTypeMark; end if; else -- For constrained arrays then obtain appropriate index for the array type; this is what the VCG needs Result := Dictionary.GetArrayIndex (Type_Sym, Dimension); end if; return Result; end Get_Expected_Array_Index_Type; begin -- Wf_Positional_Argument_Association Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); Sym := Type_Info.Other_Symbol; case Type_Info.Sort is when Sem.Is_Type_Mark => -- seed syntax tree with type for run-time check STree.Add_Node_Symbol (Node => Node, Sym => Exp_Result.Type_Symbol); if Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Info.Type_Symbol, Scope) then ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node)))), Id_Str => LexTokenManager.Null_String); Type_Info := Sem.Unknown_Type_Record; -- special handling for type conversion of string literals. elsif Dictionary.IsPredefinedStringType (Exp_Result.Type_Symbol) and then Exp_Result.Range_RHS /= Maths.NoValue then ErrorHandler.Semantic_Error (Err_Num => 425, Reference => 22, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Type_Info := Sem.Unknown_Type_Record; elsif Dictionary.TypeIsTagged (Type_Info.Type_Symbol) then Do_Tagged_Type_Conversion (Node => Node, Ref_Var => Ref_Var, Target => Type_Info, Component_Data => Component_Data, The_Heap => The_Heap, Source => Exp_Result); else -- some "normal" conversion case if Exp_Result.Is_ARange then -- Type conversion of a range is illegal. This also -- catches the illegal case of type-conversion of a -- subtype mark, such as Integer (Natural) ErrorHandler.Semantic_Error (Err_Num => 114, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Node)), Id_Str => LexTokenManager.Null_String); Type_Info := Sem.Unknown_Type_Record; else Check_Types_Are_Convertable (Node => Node, Target => Type_Info.Type_Symbol, Source => Exp_Result.Type_Symbol, Scope => Scope, Ok => Types_Are_Convertable); if Types_Are_Convertable then Sem.Constraint_Check (Val => Exp_Result.Value, New_Val => Exp_Value, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_positional_argument_association, Typ => Type_Info.Type_Symbol, Position => STree.Node_Position (Node => Node)); Exp_Result.Value := Exp_Value; Type_Info.Sort := Sem.Type_Result; Type_Info.Is_Constant := Exp_Result.Is_Constant; Type_Info.Is_Static := Exp_Result.Is_Static and then CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83; Type_Info.Other_Symbol := Dictionary.NullSymbol; Type_Info.Value := Convert_Value (Target => Type_Info.Type_Symbol, Exp => Exp_Result); Type_Info.Variable_Symbol := Exp_Result.Variable_Symbol; Type_Info.Is_AVariable := False; Type_Info.Is_An_Entire_Variable := False; else ErrorHandler.Semantic_Error (Err_Num => 32, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node)))), Id_Str => LexTokenManager.Null_String); Type_Info := Sem.Unknown_Type_Record; end if; end if; end if; when Sem.Is_Function => if Type_Info.Param_Count >= Dictionary.GetNumberOfSubprogramParameters (Sym) then Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 3, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); Chain_Up_To_Name_List (Node => Node); else Type_Info.Param_Count := Type_Info.Param_Count + 1; Param_Sym := Dictionary.GetSubprogramParameter (Sym, Type_Info.Param_Count); Expected_Type := Dictionary.GetType (Param_Sym); -- Seed syntax tree with expected type for run-time check; -- but, don't do this for instantiation of unchecked_conversion -- because we don't want any RTCs for association of those parameters -- (provided the function parameter subtype and actual subtype match) if not (Dictionary.IsAnUncheckedConversion (Sym) and then Dictionary.Types_Are_Equal (Left_Symbol => Exp_Result.Type_Symbol, Right_Symbol => Expected_Type, Full_Range_Subtype => False)) then STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); end if; -- There is a special case involving functions an stream variables. We allow a stream -- variable to be a parameter to an Unchecked_Conversion but need to ensure that -- the function inherits the restrictions associated with referencing a stream -- (e.g. cannot be used in general expression). We can do this here by checking -- the StreamSymbol of the parameter expression (there will only be one if we are -- talking about an unchecked conversion) and if it is non-null then setting the -- stream symbol of the function result record (now an object) to the function symbol. -- Note that this clause will only be executed for an unchecked conversion because -- a parameter which is a stream would hav ebeen rejected at wf_primary in all other -- cases if not Dictionary.Is_Null_Symbol (Exp_Result.Stream_Symbol) then Type_Info.Stream_Symbol := Sym; end if; Sem.Wf_Argument_Association (Node => Node, Scope => Scope, Param_Type => Dictionary.GetType (Param_Sym), Position => Expression_Location (Node => Node), Exp_Result => Exp_Result, Fun_Info => Type_Info, Error_Found => Error_Found); end if; when Sem.Is_Object => if Type_Info.Param_Count >= Dictionary.GetNumberOfDimensions (Type_Info.Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 93, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); Type_Info := Unknown_Symbol_Record; Chain_Up_To_Name_List (Node => Node); else Type_Info.Param_Count := Type_Info.Param_Count + 1; -- Expected_Type is used to ensure that indexing expression is well-typed Expected_Type := Dictionary.GetArrayIndex (Type_Info.Type_Symbol, Type_Info.Param_Count); -- VCG_Type is used to tell VCG what indexing type to expect. Same as Expected type for a -- constrained array but different for unconstrained. See comment in function Get_Expected_Array_Index_Type -- seed syntax tree with expected type for run-time check STree.Add_Node_Symbol (Node => Node, Sym => Get_Expected_Array_Index_Type (Var_Sym => Type_Info.Other_Symbol, Type_Sym => Type_Info.Type_Symbol, Dimension => Type_Info.Param_Count, Scope => Scope)); Sem.Range_Check (A_Range => Exp_Result.Is_ARange, Position => Expression_Location (Node => Node), Error_Found => Error_Found); Type_Info.Is_Constant := Type_Info.Is_Constant and then Exp_Result.Is_Constant; if Dictionary.CompatibleTypes (Scope, Expected_Type, Exp_Result.Type_Symbol) then --# accept Flow, 10, Unused_Value, "Expected ineffective assignment"; Sem.Constraint_Check (Val => Exp_Result.Value, New_Val => Unused_Value, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_positional_argument_association, Typ => Expected_Type, Position => Expression_Location (Node => Node)); --# end accept; else Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Expression_Location (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; when others => null; end case; Type_Info.Errors_In_Expression := Error_Found or else Type_Info.Errors_In_Expression or else Exp_Result.Errors_In_Expression; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); --# accept Flow, 33, Unused_Value, "Expected to be neither referenced nor exported"; end Wf_Positional_Argument_Association; spark-2012.0.deb/examiner/sem.smf0000644000175000017500000002575611753202337015564 0ustar eugeneugensem.adb -vcg sem-add_derives_stream_effects.adb -vcg sem-add_record_sub_components.adb -vcg sem-aggregate_stack.adb -vcg sem-assignment_check.adb -vcg sem-check_announced_types_declared.adb -vcg sem-check_ceiling_priority.adb -vcg sem-check_closing_identifier.adb -vcg sem-check_interrupt_property_consistency.adb -vcg sem-check_named_association.adb -vcg sem-check_no_overloading_from_tagged_ops.adb -vcg sem-check_no_overloading_from_tagged_ops-successfully_overrides.adb -vcg sem-check_package_prefix.adb -vcg sem-check_priority_property_consistency.adb -vcg sem-check_priority_range.adb -vcg sem-check_protected_modifier_consistency.adb -vcg sem-check_suspendable_property_consistency.adb -vcg sem-check_task_modifier_consistency.adb -vcg sem-check_valid_ident.adb -vcg sem-compunit.adb -vcg sem-compunit-checkembedbodies.adb -vcg sem-compunit-checkpackageneedsbody.adb -vcg sem-compunit-checksuspendslistaccountedfor.adb -vcg sem-compunit-stack.adb -vcg sem-compunit-up_wf_package_body.adb -vcg sem-compunit-up_wf_protected_body.adb -vcg sem-compunit-up_wf_subprogram_body.adb -vcg sem-compunit-up_wf_task_body.adb -vcg sem-compunit-walkstatements.adb -vcg sem-compunit-walkstatements-checkformutuallyexclusivebranches.adb -vcg sem-compunit-walkstatements-down_loop.adb -vcg sem-compunit-walkstatements-up_case.adb -vcg sem-compunit-walkstatements-up_loop.adb -vcg sem-compunit-walkstatements-variableupdatehistory.adb -vcg sem-compunit-walkstatements-wf_assign.adb -vcg sem-compunit-walkstatements-wf_case.adb -vcg sem-compunit-walkstatements-wf_case_choice.adb -vcg sem-compunit-walkstatements-wf_condition.adb -vcg sem-compunit-walkstatements-wf_delay_until.adb -vcg sem-compunit-walkstatements-wf_exit.adb -vcg sem-compunit-walkstatements-wf_loop_param.adb -vcg sem-compunit-walkstatements-wf_proc_call.adb -vcg sem-compunit-walkstatements-wf_return.adb -vcg sem-compunit-wf_body_stub.adb -vcg sem-compunit-wf_entry_body.adb -vcg sem-compunit-wf_generic_declaration.adb -vcg sem-compunit-wf_generic_package_instantiation.adb -vcg sem-compunit-wf_machine_code_insertion.adb -vcg sem-compunit-wf_package_body.adb -vcg sem-compunit-wf_package_body-wf_refine.adb -vcg sem-compunit-wf_package_body-wf_refine-wf_clause.adb -vcg sem-compunit-wf_package_initialization.adb -vcg sem-compunit-wf_proof_function_declaration.adb -vcg sem-compunit-wf_proof_renaming_declaration.adb -vcg sem-compunit-wf_protected_body.adb -vcg sem-compunit-wf_subprogram_body.adb -vcg sem-compunit-wf_subprogram_body-processpartitionannotation.adb -vcg sem-compunit-wf_subunit.adb -vcg sem-compunit-wf_task_body.adb -vcg sem-compunit-wf_use_type_clause.adb -vcg sem-constraint_check.adb -vcg sem-convert_tagged_actual.adb -vcg sem-create_implicit_positive_subtype.adb -vcg sem-create_interrupt_stream_variable.adb -vcg sem-dependency_relation.adb -vcg sem-dependency_relation-check_derives_consistency.adb -vcg sem-dependency_relation-create_full_dependency.adb -vcg sem-dependency_relation-create_full_subprog_dependency.adb -vcg sem-dependency_relation-wf_dependency_relation.adb -vcg sem-find_actual_node.adb -vcg sem-find_previous_package.adb -vcg sem-get_literal_value.adb -vcg sem-get_subprogram_anno_key_nodes.adb -vcg sem-get_type_bounds.adb -vcg sem-illegal_unconstrained.adb -vcg sem-indexes_match.adb -vcg sem-in_package_initialization.adb -vcg sem-is_enclosing_package.adb -vcg sem-is_external_interface.adb -vcg sem-needs_synthetic_dependency.adb -vcg sem-plant_constraining_type.adb -vcg sem-range_check.adb -vcg sem-subprogram_specification.adb -vcg sem-substitute_protected_type_self_reference.adb -vcg sem-unexpected_initialization.adb -vcg sem-unknown_type_record.adb -vcg sem-walk_expression_p.adb -vcg sem-walk_expression_p-add_name.adb -vcg sem-walk_expression_p-attribute_designator_type_from_context.adb -vcg sem-walk_expression_p-calc_binary_operator.adb -vcg sem-walk_expression_p-check_binary_operator.adb -vcg sem-walk_expression_p-check_binary_operator-homo_impl_type_conv.adb -vcg sem-walk_expression_p-create_name_list.adb -vcg sem-walk_expression_p-dispose_of_name_list.adb -vcg sem-walk_expression_p-down_wf_aggregate.adb -vcg sem-walk_expression_p-down_wf_aggregate_or_expression.adb -vcg sem-walk_expression_p-down_wf_name_argument_list.adb -vcg sem-walk_expression_p-expression_type_from_context.adb -vcg sem-walk_expression_p-exp_stack.adb -vcg sem-walk_expression_p-find_named_argument_association_parameter.adb -vcg sem-walk_expression_p-get_character_literal.adb -vcg sem-walk_expression_p-get_string_literal_length.adb -vcg sem-walk_expression_p-null_parameter_record.adb -vcg sem-walk_expression_p-null_type_record.adb -vcg sem-walk_expression_p-ops_are_same_and_commutative.adb -vcg sem-walk_expression_p-primary_type_from_context.adb -vcg sem-walk_expression_p-put_exp_record.adb -vcg sem-walk_expression_p-range_constraint_type_from_context.adb -vcg sem-walk_expression_p-simple_expression_type_from_context.adb -vcg sem-walk_expression_p-stack_identifier.adb -vcg sem-walk_expression_p-type_context_stack.adb -vcg sem-walk_expression_p-unknown_symbol_record.adb -vcg sem-walk_expression_p-up_wf_aggregate.adb -vcg sem-walk_expression_p-up_wf_aggregate_or_expression.adb -vcg sem-walk_expression_p-up_wf_name_argument_list.adb -vcg sem-walk_expression_p-walk_annotation_expression.adb -vcg sem-walk_expression_p-walk_annotation_expression-down_wf_quantifier.adb -vcg sem-walk_expression_p-walk_annotation_expression-down_wf_store.adb -vcg sem-walk_expression_p-walk_annotation_expression-down_wf_store_list.adb -vcg sem-walk_expression_p-walk_annotation_expression-up_wf_quantifier.adb -vcg sem-walk_expression_p-walk_annotation_expression-up_wf_store.adb -vcg sem-walk_expression_p-walk_annotation_expression-up_wf_store_list.adb -vcg sem-walk_expression_p-walk_expression.adb -vcg sem-walk_expression_p-wf_aggregate_choice.adb -vcg sem-walk_expression_p-wf_aggregate_choice_rep.adb -vcg sem-walk_expression_p-wf_ancestor_part.adb -vcg sem-walk_expression_p-wf_arange.adb -vcg sem-walk_expression_p-wf_attribute.adb -vcg sem-walk_expression_p-wf_attribute_designator.adb -vcg sem-walk_expression_p-wf_attribute_designator-calc_attribute.adb -vcg sem-walk_expression_p-wf_component_association.adb -vcg sem-walk_expression_p-wf_expression.adb -vcg sem-walk_expression_p-wf_factor.adb -vcg sem-walk_expression_p-wf_identifier.adb -vcg sem-walk_expression_p-wf_named_argument_association.adb -vcg sem-walk_expression_p-wf_named_association_rep.adb -vcg sem-walk_expression_p-wf_named_record_component_association.adb -vcg sem-walk_expression_p-wf_percent.adb -vcg sem-walk_expression_p-wf_positional_argument_association.adb -vcg sem-walk_expression_p-wf_positional_association.adb -vcg sem-walk_expression_p-wf_positional_record_component_association.adb -vcg sem-walk_expression_p-wf_primary.adb -vcg sem-walk_expression_p-wf_primary-protected_references_by.adb -vcg sem-walk_expression_p-wf_qualified_expression.adb -vcg sem-walk_expression_p-wf_record_component_selector_name.adb -vcg sem-walk_expression_p-wf_relation.adb -vcg sem-walk_expression_p-wf_selected_component.adb -vcg sem-walk_expression_p-wf_simple_expression.adb -vcg sem-walk_expression_p-wf_simple_expression_opt.adb -vcg sem-walk_expression_p-wf_term.adb -vcg sem-walk_expression_p-wf_tilde.adb -vcg sem-walk_name.adb -vcg sem-wf_argument_association.adb -vcg sem-wf_argument_association-tagged_actual_must_be_object_check.adb -vcg sem-wf_array_type_definition.adb -vcg sem-wf_basic_declarative_item.adb -vcg sem-wf_basic_declarative_item-check_subtype_against_basetype_bounds.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_derived.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_enum.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_integer.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_known_discriminant_part.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_modular.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_priority_pragma.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration-wf_protected_op_dec.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_real.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_record.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_task_type_declaration.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_type_extension.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration-wf_ravenscar_subtype.adb -vcg sem-wf_basic_declarative_item-wf_basic_declaration-wf_variable_declaration.adb -vcg sem-wf_context_clause.adb -vcg sem-wf_context_clause_package_body.adb -vcg sem-wf_context_clause_package_body-use_clause.adb -vcg sem-wf_context_clause_package_body-with_clause.adb -vcg sem-wf_declare_annotation.adb -vcg sem-wf_entire_variable.adb -vcg sem-wf_external_interface.adb -vcg sem-wf_formal_part.adb -vcg sem-wf_formal_part-wf_param.adb -vcg sem-wf_generic_formal_part.adb -vcg sem-wf_generic_subprogram_instantiation.adb -vcg sem-wf_generic_subprogram_instantiation-wf_generic_actual_part.adb -vcg sem-wf_global_definition.adb -vcg sem-wf_inherit_clause.adb -vcg sem-wf_justification_statement.adb -vcg sem-wf_package_declaration.adb -vcg sem-wf_package_declaration-add_child.adb -vcg sem-wf_package_declaration-get_package_declaration_key_nodes.adb -vcg sem-wf_package_declaration-wf_package_specification.adb -vcg sem-wf_package_declaration-wf_package_specification-check_modes.adb -vcg sem-wf_package_declaration-wf_package_specification-check_state_can_be_initialized.adb -vcg sem-wf_package_declaration-wf_package_specification-check_types_can_be_used.adb -vcg sem-wf_package_declaration-wf_package_specification-wf_anno.adb -vcg sem-wf_package_declaration-wf_package_specification-wf_anno-wf_init_spec.adb -vcg sem-wf_package_declaration-wf_package_specification-wf_anno-wf_own.adb -vcg sem-wf_package_declaration-wf_package_specification-wf_private.adb -vcg sem-wf_package_declaration-wf_package_specification-wf_visible.adb -vcg sem-wf_package_declaration-wf_package_specification-wf_visible-wf_deferred.adb -vcg sem-wf_package_declaration-wf_package_specification-wf_visible-wf_private_type_declaration.adb -vcg sem-wf_pragma.adb -vcg sem-wf_pragma-wf_attach_handler.adb -vcg sem-wf_pragma-wf_elaborate_body.adb -vcg sem-wf_predicate.adb -vcg sem-wf_priority_value.adb -vcg sem-wf_property_list.adb -vcg sem-wf_renaming_declaration.adb -vcg sem-wf_subprogram_annotation.adb -vcg sem-wf_subprogram_constraint.adb -vcg sem-wf_subprogram_declaration.adb -vcg sem-wf_type_mark.adb -vcg spark-2012.0.deb/examiner/sem-check_valid_ident.adb0000644000175000017500000001057111753202336021126 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Valid_Ident (Ident_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Ok : out Boolean) is Ident_Str : LexTokenManager.Lex_String; ------------------------------------------------------------------------ -- If we are declaring a subprogram in a package spec and the spec contains -- protected types we search each of these to detect re-use of the subprogram -- name. If we don't trap such re-use at this point then we end up with a -- legal package spec for which no legal body could be written (since its -- implementation would inevitably involve overload resolution of calls made from -- within the protected body. e.g. type PT in package P declares operation K. Package -- P also declares an operation K. From inside the body of PT, a call to K could refer -- to either of the two Ks since both are directly visible. function Is_Defined_In_Protected_Type (Name : LexTokenManager.Lex_String; Current_Scope : Dictionary.Scopes; Is_Private : Boolean) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean := False; It : Dictionary.Iterator; begin if Dictionary.IsPackage (Dictionary.GetRegion (Current_Scope)) then if Is_Private then It := Dictionary.First_Private_Protected_Type (The_Package => Dictionary.GetRegion (Current_Scope)); else It := Dictionary.First_Visible_Protected_Type (The_Package => Dictionary.GetRegion (Current_Scope)); end if; while not Dictionary.IsNullIterator (It) loop Result := Dictionary.IsDirectlyDefined (Name => Name, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.CurrentSymbol (It)), Context => Dictionary.ProofContext); exit when Result; It := Dictionary.NextSymbol (It); end loop; end if; return Result; end Is_Defined_In_Protected_Type; begin -- Check_Valid_Ident -- check that name is not already in use Ident_Str := Node_Lex_String (Node => Ident_Node); if Dictionary.IsDefined (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then Ok := False; ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); elsif Is_Defined_In_Protected_Type (Name => Ident_Str, Current_Scope => Current_Scope, Is_Private => False) or else Is_Defined_In_Protected_Type (Name => Ident_Str, Current_Scope => Current_Scope, Is_Private => True) then Ok := False; ErrorHandler.Semantic_Error (Err_Num => 988, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else Ok := True; end if; end Check_Valid_Ident; spark-2012.0.deb/examiner/errorhandler-errorbuffer.adb0000644000175000017500000004725011753202336021741 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) package body ErrorBuffer --# own Buffer is Current_Flow_Error, --# The_Buffer; is subtype Buff_Index is Integer range 1 .. ExaminerConstants.ErrorBufferSize; subtype Buff_Ptr is Integer range 0 .. ExaminerConstants.ErrorBufferSize; type Error_Array is array (Buff_Index) of Error_Types.NumericError; type Buffers is record ErrorList : Error_Array; ErrPtr : Buff_Ptr; end record; The_Buffer : Buffers; Current_Flow_Error : Error_Types.NumericError; -------------------------------------------------------------------------- -- Local Procedures -------------------------------------------------------------------------- function Null_Error return Error_Types.NumericError --# global in Dictionary.Dict; is begin return Error_Types.NumericError' (ErrorType => Error_Types.NoErr, Position => LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0), Scope => Dictionary.GlobalScope, ErrorNum => 0, Reference => 0, Name1 => Error_Types.NoName, Name2 => Error_Types.NoName, Name3 => Error_Types.NoName); end Null_Error; -------------------------------------------------------------------------- -- Exported Procedures -------------------------------------------------------------------------- procedure Flush (Err_File : in out Error_IO.File_Type) --# global in Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# in out The_Buffer; --# derives Err_File from *, --# SPARK_IO.File_Sys, --# The_Buffer & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Err_File, --# The_Buffer & --# The_Buffer from ; is function Is_Less_Than (One, Two : LexTokenManager.Token_Position) return Boolean is Less_Than : Boolean; begin if One.Start_Line_No = Two.Start_Line_No then Less_Than := One.Start_Pos < Two.Start_Pos; else Less_Than := One.Start_Line_No < Two.Start_Line_No; end if; return Less_Than; end Is_Less_Than; -------------------------- procedure Sort_Buff --# global in out The_Buffer; --# derives The_Buffer from *; is procedure Swap (X, Y : in Buff_Ptr) --# global in out The_Buffer; --# derives The_Buffer from *, --# X, --# Y; is T : Error_Types.NumericError; begin T := The_Buffer.ErrorList (X); The_Buffer.ErrorList (X) := The_Buffer.ErrorList (Y); The_Buffer.ErrorList (Y) := T; end Swap; begin --Sort_Buff for I in Buff_Ptr range 1 .. The_Buffer.ErrPtr loop for J in Buff_Ptr range I .. The_Buffer.ErrPtr loop if Is_Less_Than (One => The_Buffer.ErrorList (J).Position, Two => The_Buffer.ErrorList (I).Position) then Swap (X => I, Y => J); end if; end loop; end loop; end Sort_Buff; ------------------------------------------ procedure Merge --# global in Dictionary.Dict; --# in The_Buffer; --# in out Err_File; --# in out SPARK_IO.File_Sys; --# derives Err_File from *, --# SPARK_IO.File_Sys, --# The_Buffer & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Err_File, --# The_Buffer; is New_File : Error_IO.File_Type; Ptr : Buff_Ptr; Buf_Empty, File_Empty : Boolean; Buf_Ent, File_Ent : Error_Types.NumericError; -------------------------------------------------------------------------- procedure Create_Temp (F : out Error_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives F, --# SPARK_IO.File_Sys from SPARK_IO.File_Sys; is OK : SPARK_IO.File_Status; Local_F : Error_IO.File_Type := Error_IO.Null_File; begin Error_IO.Create (Local_F, OK); if OK /= SPARK_IO.Ok then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Error_Handler_Temporary_Files, Msg => "in ErrorBuffer.Create_Temp"); end if; F := Local_F; end Create_Temp; -------------------------------------------------------------------------- procedure Reset_Temp (F : in out Error_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives F, --# SPARK_IO.File_Sys from *, --# F; is OK : SPARK_IO.File_Status; begin Error_IO.Reset (F, SPARK_IO.In_File, OK); if OK /= SPARK_IO.Ok then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Error_Handler_Temporary_Files, Msg => "in ErrorBuffer.Reset_Temp"); end if; end Reset_Temp; -------------------------------------------------------------------------- procedure Close_Temp (F : in out Error_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives F, --# SPARK_IO.File_Sys from *, --# F; is OK : SPARK_IO.File_Status; begin Error_IO.Close (F, OK); if OK /= SPARK_IO.Ok then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Error_Handler_Temporary_Files, Msg => "in ErrorBuffer.Close_Temp"); end if; end Close_Temp; -------------------------------------------------------------------------- procedure Get_Buffer_Entry (Ent : out Error_Types.NumericError; Empty : out Boolean) --# global in Dictionary.Dict; --# in The_Buffer; --# in out Ptr; --# derives Empty, --# Ptr from Ptr, --# The_Buffer & --# Ent from Dictionary.Dict, --# Ptr, --# The_Buffer; is begin if Ptr = The_Buffer.ErrPtr then Ent := Null_Error; Empty := True; else Ptr := Ptr + 1; Ent := The_Buffer.ErrorList (Ptr); Empty := False; end if; end Get_Buffer_Entry; begin --Merge Ptr := 0; Get_Buffer_Entry (Ent => Buf_Ent, Empty => Buf_Empty); if not Buf_Empty then --only merge sort if buffer contains some entries Create_Temp (F => New_File); Reset_Temp (F => Err_File); Error_IO.Get_Numeric_Error (Err_File, File_Ent); File_Empty := (File_Ent = Error_Types.Empty_NumericError); while not (Buf_Empty and File_Empty) loop if File_Empty then Error_IO.Put_Numeric_Error (New_File, Buf_Ent); Get_Buffer_Entry (Ent => Buf_Ent, Empty => Buf_Empty); elsif Buf_Empty then Error_IO.Put_Numeric_Error (New_File, File_Ent); Error_IO.Get_Numeric_Error (Err_File, File_Ent); File_Empty := (File_Ent = Error_Types.Empty_NumericError); else --neither empty if Is_Less_Than (One => Buf_Ent.Position, Two => File_Ent.Position) then Error_IO.Put_Numeric_Error (New_File, Buf_Ent); Get_Buffer_Entry (Ent => Buf_Ent, Empty => Buf_Empty); else Error_IO.Put_Numeric_Error (New_File, File_Ent); Error_IO.Get_Numeric_Error (Err_File, File_Ent); File_Empty := (File_Ent = Error_Types.Empty_NumericError); end if; end if; end loop; --# accept Flow, 10, Err_File, "Expected ineffective assignment"; Close_Temp (F => Err_File); --# end accept; Err_File := New_File; end if; end Merge; ------------------------------------------ procedure Init_Buff --# global out The_Buffer; --# derives The_Buffer from ; is begin The_Buffer.ErrPtr := 0; --intentional failure to initialize array will cause flow error here --# accept F, 31, The_Buffer.ErrorList, "Intentional incomplete initialization" & --# F, 32, The_Buffer.ErrorList, "Intentional incomplete initialization" & --# F, 602, The_Buffer, The_Buffer.ErrorList, "Intentional incomplete initialization"; end Init_Buff; -- Init. is partial but effecive. Expect 2 errs + 1 warning begin -- Flush Sort_Buff; Merge; Init_Buff; end Flush; -------------------------------------------------------------------------- procedure Add (Err_File : in out Error_IO.File_Type; Err_Type : in Error_Types.Error_Class; Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; Error_Number : in Natural; Reference : in Natural; Name1, Name2, Name3 : in Error_Types.Names; Echo_Str : out Error_Types.StringError) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Current_Flow_Error; --# in out SPARK_IO.File_Sys; --# in out The_Buffer; --# derives Conversions.State, --# The_Buffer from *, --# CommandLineData.Content, --# Current_Flow_Error, --# Error_Number, --# Err_Type, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope & --# Current_Flow_Error from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Error_Number, --# Err_Type, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope & --# Echo_Str from CommandLineData.Content, --# Conversions.State, --# Current_Flow_Error, --# Dictionary.Dict, --# Error_Number, --# Err_Type, --# LexTokenManager.State, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope & --# Err_File from *, --# CommandLineData.Content, --# Current_Flow_Error, --# Error_Number, --# Err_Type, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# The_Buffer & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Current_Flow_Error, --# Dictionary.Dict, --# Error_Number, --# Err_File, --# Err_Type, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope, --# The_Buffer; is New_Entry : Error_Types.NumericError; procedure Set_To_New_Errors (New_Entry : in out Error_Types.NumericError) --# derives New_Entry from *; is begin if (New_Entry.ErrorNum = ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.May_Be_Used) and New_Entry.ErrorType = Error_Types.CondlDependencyErr) then New_Entry.ErrorNum := ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.May_Be_Used_New); elsif (New_Entry.ErrorNum = ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.Not_Used) and New_Entry.ErrorType = Error_Types.UncondDependencyErr) then New_Entry.ErrorNum := ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.Not_Used_New); end if; end Set_To_New_Errors; procedure Reset_Error_Num (Err_Num : in out Error_Types.NumericError) --# global in Dictionary.Dict; --# in out Current_Flow_Error; --# derives Current_Flow_Error from *, --# Dictionary.Dict, --# Err_Num & --# Err_Num from *, --# Current_Flow_Error; is begin case Err_Num.ErrorType is when Error_Types.UncondDependencyErr => if Err_Num.ErrorNum = ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.Not_Used_New) then if Current_Flow_Error.ErrorNum = ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.Not_Used_New) and then (Err_Num.Name2 = Current_Flow_Error.Name2 and Err_Num.Scope = Current_Flow_Error.Scope and Err_Num.Position = Current_Flow_Error.Position) then -- Continuation Err_Num.ErrorNum := ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.Not_Used_Continue); else -- New Error; Current_Flow_Error := Err_Num; end if; else Current_Flow_Error := Null_Error; end if; when Error_Types.CondlDependencyErr => if Err_Num.ErrorNum = ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.May_Be_Used_New) then if Current_Flow_Error.ErrorNum = ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.May_Be_Used_New) and then (Err_Num.Name2 = Current_Flow_Error.Name2 and Err_Num.Scope = Current_Flow_Error.Scope and Err_Num.Position = Current_Flow_Error.Position) then -- Continuation Err_Num.ErrorNum := ErrorHandler.Dependency_Err_Number (Err_Type => ErrorHandler.May_Be_Used_Continue); else -- New Error; Current_Flow_Error := Err_Num; end if; else Current_Flow_Error := Null_Error; end if; when others => Current_Flow_Error := Null_Error; end case; end Reset_Error_Num; begin New_Entry := Error_Types.NumericError' (ErrorType => Err_Type, Position => Pos, Scope => Scope, ErrorNum => Error_Number, Reference => Reference, Name1 => Name1, Name2 => Name2, Name3 => Name3); if not CommandLineData.Content.Legacy_Errors then Set_To_New_Errors (New_Entry => New_Entry); end if; Reset_Error_Num (Err_Num => New_Entry); Conversions.ToString (New_Entry, Error_Types.ForScreen, Echo_Str); The_Buffer.ErrPtr := The_Buffer.ErrPtr + 1; The_Buffer.ErrorList (The_Buffer.ErrPtr) := New_Entry; if The_Buffer.ErrPtr = ExaminerConstants.ErrorBufferSize then Flush (Err_File => Err_File); end if; end Add; begin --init The_Buffer.ErrPtr := 0; Current_Flow_Error.ErrorType := Error_Types.NoErr; Current_Flow_Error.Position := LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0); Current_Flow_Error.ErrorNum := 0; Current_Flow_Error.Reference := 0; Current_Flow_Error.Name1 := Error_Types.NoName; Current_Flow_Error.Name2 := Error_Types.NoName; Current_Flow_Error.Name3 := Error_Types.NoName; --intentional non-initialization of array will cause flow error here --# accept F, 31, The_Buffer.ErrorList, "Intentional incomplete initialization" & --# F, 32, The_Buffer.ErrorList, "Intentional incomplete initialization" & --# F, 602, The_Buffer, The_Buffer.ErrorList, "Intentional incomplete initialization" & --# F, 31, Current_Flow_Error.Scope, "Intentional incomplete initialization" & --# F, 32, Current_Flow_Error.Scope, "Intentional incomplete initialization" & --# F, 602, Current_Flow_Error, Current_Flow_Error.Scope, "Intentional incomplete initialization"; end ErrorBuffer; -- Init. is partial but effective. Expect 4 errs + 2 warnings. spark-2012.0.deb/examiner/dictionary-lookupselecteditem.adb0000644000175000017500000007103511753202336022773 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function LookupSelectedItem (Prefix : Symbol; Selector : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return Symbol is Item : Symbol; -------------------------------------------------------------------------------- function Lookup_Selected_Item_In_Package (The_Package : RawDict.Package_Info_Ref; Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return Symbol --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; is Region : Symbol; The_Variable : RawDict.Variable_Info_Ref; Item : Symbol; Library_Package : RawDict.Package_Info_Ref; Is_Visible : Boolean; ------------------------------------------------------------------------------ procedure Lookup_Global_Variables (Abstraction : in Abstractions; The_Package : in RawDict.Package_Info_Ref; Name : in LexTokenManager.Lex_String; Subprogram : in Symbol; Context : in Contexts; The_Variable : out RawDict.Variable_Info_Ref; Is_Visible : out Boolean) --# global in Dict; --# in LexTokenManager.State; --# in Scope; --# derives Is_Visible from Abstraction, --# Context, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Subprogram, --# The_Package & --# The_Variable from Abstraction, --# Dict, --# LexTokenManager.State, --# Name, --# Subprogram, --# The_Package; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; function First_Global (Subprogram : Symbol; Abstraction : Abstractions) return RawDict.Global_Variable_Info_Ref --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; begin case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), Abstraction => Abstraction); when Type_Symbol => -- must be a task type SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Global"); The_Global_Variable := RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram), Abstraction => Abstraction); when others => -- non-exec code The_Global_Variable := RawDict.Null_Global_Variable_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Global"); end case; return The_Global_Variable; end First_Global; -- Two function lifted out to simplify Boolean expression in main procedure function Is_Directly_Visible (The_Variable : RawDict.Variable_Info_Ref; The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is begin return Get_Variable_Scope (The_Variable => The_Variable) = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)); end Is_Directly_Visible; function Is_Visible_To_A_Child (The_Variable : RawDict.Variable_Info_Ref; The_Package, Library_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is begin return Get_Variable_Scope (The_Variable => The_Variable) = Set_Visibility (The_Visibility => Privat, The_Unit => RawDict.Get_Package_Symbol (The_Package)) and then Is_Proper_Descendent (Inner_Package => Library_Package, Outer_Package => The_Package); end Is_Visible_To_A_Child; begin -- Lookup_Global_Variables The_Global_Variable := First_Global (Subprogram => Subprogram, Abstraction => Abstraction); loop if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then The_Variable := RawDict.Null_Variable_Info_Ref; Is_Visible := False; exit; end if; if RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Subprogram_Variable_Item or else RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Task_Type_Variable_Item then The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); else The_Variable := RawDict.Null_Variable_Info_Ref; end if; if The_Variable /= RawDict.Null_Variable_Info_Ref and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Variable_Name (The_Variable => The_Variable), Lex_Str2 => Name) = LexTokenManager.Str_Eq and then RawDict.Get_Global_Variable_Prefix_Needed (The_Global_Variable => The_Global_Variable) and then RawDict.GetSymbolDiscriminant (Get_Owner (The_Variable => The_Variable)) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => Get_Owner (The_Variable => The_Variable)) = The_Package then if Context = ProofContext then Is_Visible := True; elsif Variable_Is_Declared (The_Variable => The_Variable) then Is_Visible := Is_Directly_Visible (The_Variable => The_Variable, The_Package => The_Package) or else Is_Visible_To_A_Child (The_Variable => The_Variable, The_Package => The_Package, Library_Package => Get_Library_Package (Scope => Scope)) or else IsLocal (Scope, Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (The_Package))); else Is_Visible := False; end if; exit; end if; The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; end Lookup_Global_Variables; ------------------------------------------------------------------------------ function Lookup_Children (Sort : PackageSort; The_Package : RawDict.Package_Info_Ref; Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return RawDict.Package_Info_Ref --# global in Dict; --# in LexTokenManager.State; is Current_Package : RawDict.Package_Info_Ref; Result : RawDict.Package_Info_Ref := RawDict.Null_Package_Info_Ref; ------------------------------------------------------------------------------ function Check_Is_Inherited (The_Package : RawDict.Package_Info_Ref; Scope : Scopes) return RawDict.Package_Info_Ref --# global in Dict; is Current_Scope : Scopes; Region : Symbol; Result : RawDict.Package_Info_Ref; begin Current_Scope := Scope; loop Region := GetRegion (Current_Scope); if RawDict.GetSymbolDiscriminant (Region) = Package_Symbol then if Is_Package_Inherited (The_Inherited_Symbol => RawDict.Get_Package_Symbol (The_Package), The_Package => RawDict.Get_Package_Info_Ref (Item => Region)) then Result := The_Package; else Result := RawDict.Null_Package_Info_Ref; end if; exit; end if; if RawDict.GetSymbolDiscriminant (Region) = Subprogram_Symbol and then Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)) then if Is_Subprogram_Inherited (The_Inherited_Symbol => RawDict.Get_Package_Symbol (The_Package), The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)) then Result := The_Package; else Result := RawDict.Null_Package_Info_Ref; end if; exit; end if; Current_Scope := GetEnclosingScope (Current_Scope); end loop; return Result; -- *NB* the inheritance check also must succeed in the case where -- a private child package is with'd by its parent; since -- the context required for this check is not available here, -- this case is handled during wf checking of the with clause -- by insertion of a 'fake' inherit reference. end Check_Is_Inherited; ------------------------------------------------------------------------------ function Check_Is_Withed (The_Package : RawDict.Package_Info_Ref; Scope : Scopes; Context : Contexts) return RawDict.Package_Info_Ref --# global in Dict; is Current, Last1 : Scopes; Ancestor, Result : RawDict.Package_Info_Ref; begin if Context = ProofContext then Result := The_Package; else Current := Scope; Last1 := Current; loop exit when (RawDict.GetSymbolDiscriminant (GetRegion (Current)) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => GetRegion (Current)) = Get_Predefined_Package_Standard) or else Is_Withed (The_Withed_Symbol => RawDict.Get_Package_Symbol (The_Package), Scope => Current); Last1 := Current; Current := GetEnclosingScope (Current); end loop; if RawDict.GetSymbolDiscriminant (GetRegion (Current)) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => GetRegion (Current)) = Get_Predefined_Package_Standard then Result := RawDict.Null_Package_Info_Ref; if Last1 /= Current and then RawDict.GetSymbolDiscriminant (GetRegion (Last1)) = Package_Symbol then -- search through ancestors Ancestor := RawDict.Get_Package_Parent (The_Package => RawDict.Get_Package_Info_Ref (Item => GetRegion (Last1))); loop exit when Ancestor = RawDict.Null_Package_Info_Ref or else Is_Withed (The_Withed_Symbol => RawDict.Get_Package_Symbol (The_Package), Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Ancestor))); Ancestor := RawDict.Get_Package_Parent (The_Package => Ancestor); end loop; if Ancestor /= RawDict.Null_Package_Info_Ref then Result := The_Package; end if; end if; else Result := The_Package; end if; end if; return Result; end Check_Is_Withed; begin -- Lookup_Children case Sort is when Public => Current_Package := RawDict.Get_Package_First_Public_Child (The_Package => The_Package); when PrivateChild => Current_Package := RawDict.Get_Package_First_Private_Child (The_Package => The_Package); end case; loop exit when Current_Package = RawDict.Null_Package_Info_Ref; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Package_Name (The_Package => Current_Package), Lex_Str2 => Name) = LexTokenManager.Str_Eq then Result := Current_Package; exit; end if; Current_Package := RawDict.Get_Package_Next_Sibling (The_Package => Current_Package); end loop; if Result /= RawDict.Null_Package_Info_Ref then if not IsGlobalScope (Scope) then Result := Check_Is_Inherited (The_Package => Result, Scope => Scope); end if; if Result /= RawDict.Null_Package_Info_Ref then Result := Check_Is_Withed (The_Package => Result, Scope => Scope, Context => Context); end if; end if; return Result; end Lookup_Children; ------------------------------------------------------------------------------ -- CFR 806 Adds this function. Needed so scan for enclosing region -- ignores enclosing loop statements. SEPRS 889 and 1083 function GetEnclosingNonLoopRegion (Scope : Scopes) return Symbol --# global in Dict; is Current_Scope : Scopes; Region : Symbol; begin Current_Scope := Scope; loop Region := GetRegion (Current_Scope); exit when RawDict.GetSymbolDiscriminant (Region) /= LoopSymbol; Current_Scope := GetEnclosingScope (Current_Scope); end loop; return Region; end GetEnclosingNonLoopRegion; begin -- Lookup_Selected_Item_In_Package Trace_Lex_Str (Msg => "In LookUpSelectedItemInPackage seeking ", L => Name); Region := GetEnclosingNonLoopRegion (Scope); if Is_Subprogram (Region) then Lookup_Global_Variables (Abstraction => Get_Subprogram_Abstraction (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Region), Scope => Scope), The_Package => The_Package, Name => Name, Subprogram => Region, Context => Context, The_Variable => The_Variable, Is_Visible => Is_Visible); elsif RawDict.GetSymbolDiscriminant (Region) = Type_Symbol and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) then Lookup_Global_Variables (Abstraction => Get_Task_Type_Abstraction (The_Task_Type => RawDict.Get_Type_Info_Ref (Region), Scope => Scope), The_Package => The_Package, Name => Name, Subprogram => Region, Context => Context, The_Variable => The_Variable, Is_Visible => Is_Visible); else The_Variable := RawDict.Null_Variable_Info_Ref; Is_Visible := False; end if; Item := RawDict.Get_Variable_Symbol (The_Variable); --# assert True; if Item = NullSymbol then LookupScope (Name => Name, Stop_At => LexTokenManager.Null_String, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)), Calling_Scope => Scope, Context => Context, Item => Item, Is_Visible => Is_Visible); if Item = NullSymbol then Library_Package := Get_Library_Package (Scope => Scope); if Is_Proper_Descendent (Inner_Package => Library_Package, Outer_Package => The_Package) then if Scope /= Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Library_Package)) or else Descendent_Is_Private (Inner_Package => Library_Package, Outer_Package => The_Package) then LookupScope (Name => Name, Stop_At => LexTokenManager.Null_String, Scope => Set_Visibility (The_Visibility => Privat, The_Unit => RawDict.Get_Package_Symbol (The_Package)), Calling_Scope => Scope, Context => Context, Item => Item, Is_Visible => Is_Visible); end if; elsif IsLocal (Scope, Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (The_Package))) then Item := LookupImmediateScope (Name, Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (The_Package)), Context); Is_Visible := Item /= NullSymbol; end if; elsif Context = ProofContext and then RawDict.GetSymbolDiscriminant (Item) = Variable_Symbol and then Is_Own_Variable (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Item)) then Library_Package := Get_Library_Package (Scope => Scope); if Is_Descendent_Of_Private_Child (Candidate => Library_Package, The_Package => The_Package) then Item := LookupImmediateScope (Name, Set_Visibility (The_Visibility => Privat, The_Unit => RawDict.Get_Package_Symbol (The_Package)), ProgramContext); Is_Visible := Item /= NullSymbol; end if; end if; --# assert True; if Is_Variable (Item) and then (Is_Subprogram (Region) or else (RawDict.GetSymbolDiscriminant (Region) = Type_Symbol and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)))) then -- Region is equal to Scope.Region if not within a loop (see start of subprog) -- and second arg is equivalent to Scope -- otherwise it is nearest enclosing non-loop region (subprogram) Is_Visible := IsLocal (Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)), Set_Visibility (The_Visibility => Get_Visibility (Scope => Scope), The_Unit => Region)); elsif RawDict.GetSymbolDiscriminant (Item) = Subprogram_Symbol then Is_Visible := not Is_Renamed_Local (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item), Scope => Scope); elsif RawDict.GetSymbolDiscriminant (Item) = ImplicitProofFunctionSymbol then Is_Visible := not Is_Renamed_Local (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Item), Scope => Scope); end if; end if; --# assert True; if not Is_Visible then Item := NullSymbol; end if; --# assert True; if Item = NullSymbol then case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => null; when CommandLineData.SPARK95_Onwards => -- look up children, if relevant Library_Package := Get_Library_Package (Scope => Scope); if not Is_Proper_Descendent (Inner_Package => Library_Package, Outer_Package => The_Package) then Item := RawDict.Get_Package_Symbol (Lookup_Children (Sort => Public, The_Package => The_Package, Name => Name, Scope => Scope, Context => Context)); if Item = NullSymbol and then IsGlobalScope (Scope) then Item := RawDict.Get_Package_Symbol (Lookup_Children (Sort => PrivateChild, The_Package => The_Package, Name => Name, Scope => Scope, Context => Context)); end if; end if; end case; end if; return Item; end Lookup_Selected_Item_In_Package; -------------------------------------------------------------------------------- function Lookup_Selected_Item_In_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Name : LexTokenManager.Lex_String; Context : Contexts) return Symbol --# global in Dict; --# in LexTokenManager.State; is begin Trace_Lex_Str (Msg => "In LookUpSelectedItemInTypeMark seeking ", L => Name); -- The Get_Root_Type in the following statement is needed to handle -- lookup of items of protected subtypes and record subtypes. return LookupImmediateScope (Name, Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Type_Symbol (Get_Root_Type (Type_Mark => Type_Mark))), Context); end Lookup_Selected_Item_In_Type_Mark; -------------------------------------------------------------------------------- function LookupSelectedItemInObject (Object : Symbol; Name : LexTokenManager.Lex_String; Context : Contexts) return Symbol --# global in Dict; --# in LexTokenManager.State; is Result : Symbol; -------------------------------------------------------------------------------- function Lookup_Protected_Operation (Op_Name : LexTokenManager.Lex_String; The_Protected_Type : RawDict.Type_Info_Ref) return Symbol --# global in Dict; --# in LexTokenManager.State; is It : Iterator; Result : Symbol := NullSymbol; -- default answer for failure case begin It := First_Protected_Type_Visible_Subprogram (The_Protected_Type => The_Protected_Type); while not IsNullIterator (It) loop if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => GetSimpleName (CurrentSymbol (It)), Lex_Str2 => Op_Name) = LexTokenManager.Str_Eq then -- success Result := CurrentSymbol (It); exit; end if; It := NextSymbol (It); end loop; return Result; end Lookup_Protected_Operation; begin -- LookupSelectedItemInObject Trace_Lex_Str (Msg => "In LookUpSelectedItemInObject seeking ", L => Name); if RawDict.GetSymbolDiscriminant (Object) = Constant_Symbol then Result := Lookup_Selected_Item_In_Type_Mark (Type_Mark => RawDict.Get_Constant_Type (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Object)), Name => Name, Context => Context); else -- We need to distinguish between a record object where we will seek a field -- in local scope of the object and a protected object where we will need to -- seek an operation in the visible part of the type of the protected object if RawDict.Get_Type_Discriminant (Type_Mark => Get_Type (Object)) = Record_Type_Item then Result := LookupImmediateScope (Name, Set_Visibility (The_Visibility => Local, The_Unit => Object), Context); elsif RawDict.Get_Type_Discriminant (Type_Mark => Get_Type (Object)) = Protected_Type_Item then Result := Lookup_Protected_Operation (Op_Name => Name, The_Protected_Type => Get_Root_Type (Type_Mark => Get_Type (Object))); else Result := NullSymbol; -- to avoid DF error SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "LookupSelectedItemInObject called with prefix which is not record or protected"); end if; end if; return Result; end LookupSelectedItemInObject; begin -- LookupSelectedItem Trace_Lex_Str (Msg => "In LookUpSelectedItem seeking ", L => Selector); Trace_Sym (Msg => " in ", Sym => Prefix, Scope => Scope); case RawDict.GetSymbolDiscriminant (Prefix) is when Package_Symbol => Item := Lookup_Selected_Item_In_Package (The_Package => RawDict.Get_Package_Info_Ref (Item => Prefix), Name => Selector, Scope => Scope, Context => Context); when Type_Symbol => Item := Lookup_Selected_Item_In_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Prefix), Name => Selector, Context => Context); when Variable_Symbol | Constant_Symbol | Subcomponent_Symbol | -- record object field may also have fields Subprogram_Parameter_Symbol => Item := LookupSelectedItemInObject (Prefix, Selector, Context); when Subprogram_Symbol => Item := LookupImmediateScope (Selector, Set_Visibility (The_Visibility => Local, The_Unit => Prefix), Context); when others => -- non-exec code Item := NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.LookUpSelectedItem"); end case; return Item; end LookupSelectedItem; spark-2012.0.deb/examiner/screenecho.adb0000644000175000017500000001623111753202336017042 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body ScreenEcho is procedure Put_Char (Item : in Character) is begin SPARK_IO.Put_Char (SPARK_IO.Standard_Output, Item); end Put_Char; ---------------------------------------------------------------------------- procedure Put_Integer (Item : in Integer; Width : in Natural; Base : in SPARK_IO.Number_Base) is begin SPARK_IO.Put_Integer (SPARK_IO.Standard_Output, Item, Width, Base); end Put_Integer; ---------------------------------------------------------------------------- procedure Put_String (Item : in String) is begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, Item, 0); end Put_String; ---------------------------------------------------------------------------- procedure Put_Line (Item : in String) is begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Item, 0); end Put_Line; ---------------------------------------------------------------------------- procedure Put_StringWithLength (Item : in String; Stop : in Natural) is begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, Item, Stop); end Put_StringWithLength; ---------------------------------------------------------------------------- procedure Put_LineWithLength (Item : in String; Stop : in Natural) is begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Item, Stop); end Put_LineWithLength; ---------------------------------------------------------------------------- procedure New_Line (Spacing : in Positive) is begin SPARK_IO.New_Line (SPARK_IO.Standard_Output, Spacing); end New_Line; ---------------------------------------------------------------------------- procedure Set_Col (Posn : in Positive) is begin SPARK_IO.Set_Col (SPARK_IO.Standard_Output, Posn); end Set_Col; ---------------------------------------------------------------------------- procedure Put_ExaminerString (Item : in E_Strings.T) is begin E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Item); end Put_ExaminerString; ---------------------------------------------------------------------------- procedure Put_ExaminerLine (Item : in E_Strings.T) is begin E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Item); end Put_ExaminerLine; ---------------------------------------------------------------------------- procedure Echo (Str : in E_Strings.T) is procedure Print_Line (Start_Pos, End_Pos, Indent : in Natural; Line : in E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# End_Pos, --# Indent, --# Line, --# Start_Pos; is Pos, Current_Line_End, Current_Line_Start : Natural; procedure Print_Current_Line --# global in Current_Line_End; --# in Current_Line_Start; --# in Line; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Current_Line_End, --# Current_Line_Start, --# Line; is begin E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Current_Line_Start, Length => (Current_Line_End - Current_Line_Start) + 1)); end Print_Current_Line; procedure Find_Current_Line_End (Current_Line_Pos : in Natural) --# global in Current_Line_Start; --# in End_Pos; --# in Line; --# in Pos; --# out Current_Line_End; --# derives Current_Line_End from Current_Line_Pos, --# Current_Line_Start, --# End_Pos, --# Line, --# Pos; is Next_Space_Pos, Current_Space_Pos : Integer; function Find_Next_Space (Curr_Pos : in E_Strings.Positions) return Natural --# global in Line; is Found : Boolean; Next_Pos : Natural; begin E_Strings.Find_Char_After (E_Str => Line, Search_Start => Curr_Pos, Search_Char => ' ', Char_Found => Found, Char_Pos => Next_Pos); if not Found then Next_Pos := E_Strings.Get_Length (E_Str => Line); end if; return Next_Pos; end Find_Next_Space; begin Current_Space_Pos := Find_Next_Space (Curr_Pos => Current_Line_Pos + 1); loop exit when Current_Space_Pos = E_Strings.Get_Length (E_Str => Line); Next_Space_Pos := Find_Next_Space (Curr_Pos => Current_Space_Pos + 1); exit when (Pos + Next_Space_Pos) - Current_Line_Start > End_Pos; Current_Space_Pos := Next_Space_Pos; end loop; Current_Line_End := Current_Space_Pos; end Find_Current_Line_End; begin Current_Line_Start := 1; Pos := Start_Pos; Find_Current_Line_End (0); loop Print_Current_Line; exit when Current_Line_End = E_Strings.Get_Length (E_Str => Line); New_Line (1); Set_Col (Indent); Pos := Indent; Current_Line_Start := Current_Line_End + 1; Find_Current_Line_End (Current_Line_Start); end loop; end Print_Line; begin New_Line (1); Set_Col (12); Print_Line (12, 80, 12, Str); Put_Line (" ..."); end Echo; end ScreenEcho; ././@LongLink0000000000000000000000000000017100000000000011564 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_visible-wf_private_type_declaration.adbspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_visible-wf_private_0000644000175000017500000001534711753202336033043 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Visible) procedure Wf_Private_Type_Declaration (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes) is Is_Limited : Boolean; Ident_Node, Next_Node, Tag_Option_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Sym : Dictionary.Symbol; Is_Abstract : Boolean; Is_Tagged : Boolean; ------------------------------------------------------------------------ procedure Set_Tag_Status (Tag_Option_Node : in STree.SyntaxNode; Is_Abstract : out Boolean; Is_Tagged : out Boolean) --# global in STree.Table; --# derives Is_Abstract, --# Is_Tagged from STree.Table, --# Tag_Option_Node; --# pre Syntax_Node_Type (Tag_Option_Node, STree.Table) = SP_Symbols.non_abstract_tagged or --# Syntax_Node_Type (Tag_Option_Node, STree.Table) = SP_Symbols.abstract_tagged or --# Syntax_Node_Type (Tag_Option_Node, STree.Table) = SP_Symbols.non_tagged; is begin Is_Abstract := Syntax_Node_Type (Node => Tag_Option_Node) = SP_Symbols.abstract_tagged; Is_Tagged := Is_Abstract or else Syntax_Node_Type (Node => Tag_Option_Node) = SP_Symbols.non_abstract_tagged; end Set_Tag_Status; begin -- Wf_Private_Type_Declaration Next_Node := Child_Node (Current_Node => Node); -- ASSUME Next_Node = non_limited_private_type_declaration OR limited_private_type_declaration SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.non_limited_private_type_declaration or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.limited_private_type_declaration, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = non_limited_private_type_declaration OR " & "limited_private_type_declaration in Wf_Private_Type_Declaration"); Is_Limited := (Syntax_Node_Type (Node => Next_Node) = SP_Symbols.limited_private_type_declaration); Ident_Node := Child_Node (Current_Node => Next_Node); -- ASSUME Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Private_Type_Declaration"); Ident_Str := Node_Lex_String (Node => Ident_Node); Tag_Option_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Ident_Node)); SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Tag_Option_Node) = SP_Symbols.non_abstract_tagged or else Syntax_Node_Type (Node => Tag_Option_Node) = SP_Symbols.abstract_tagged or else Syntax_Node_Type (Node => Tag_Option_Node) = SP_Symbols.non_tagged, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Tag_Option_Node = non_abstract_tagged OR abstract_tagged OR non_tagged in Wf_Private_Type_Declaration"); Set_Tag_Status (Tag_Option_Node => Tag_Option_Node, Is_Abstract => Is_Abstract, Is_Tagged => Is_Tagged); -- temporary prevention of use of abstract types if Is_Abstract then ErrorHandler.Semantic_Error (Err_Num => 820, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; if Is_Tagged and then (Dictionary.PackageDeclaresTaggedType (Dictionary.GetRegion (Current_Scope)) or else Dictionary.PackageExtendsAnotherPackage (Dictionary.GetRegion (Current_Scope))) then -- illegal second private tagged type declaration ErrorHandler.Semantic_Error (Err_Num => 839, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) or else (Dictionary.IsTypeMark (Sym) and then Dictionary.TypeIsAnnounced (TheType => Sym) and then not Dictionary.Is_Declared (Item => Sym) -- already declared, non private and then not Dictionary.TypeIsPrivate (TheType => Sym)) then -- already declared, private if not Dictionary.Is_Null_Symbol (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); end if; Dictionary.Add_Private_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), The_Package => Pack_Sym, Is_Limited => Is_Limited, Is_Tagged_Type => Is_Tagged, Extends => Dictionary.NullSymbol, The_Type => Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Sym); else ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; end if; end Wf_Private_Type_Declaration; spark-2012.0.deb/examiner/heap.adb0000644000175000017500000002137511753202336015646 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Exceptions; with Statistics; with SystemErrors; package body Heap is Initial_Heap_Capacity : constant := 2 ** 10; Empty_Atom : constant Heap_Storage.Atom_Descriptor := Heap_Storage.Atom_Descriptor'(PointerA => NullAtom, PointerB => NullAtom, ValueA => 0, ValueB => 0); Empty_Heap : constant HeapRecord := HeapRecord'(ListOfAtoms => Heap_Storage.Empty_Vector, HighMark => NullAtom, NextFreeAtom => NullAtom); -------------------------------------------------------------------- procedure Reset (TheHeap : in out HeapRecord) is begin -- Rest content ready for IFA of a fresh program unit TheHeap.HighMark := NullAtom; TheHeap.NextFreeAtom := NullAtom; Heap_Storage.Set_Element (V => TheHeap.ListOfAtoms, Index => Atom'First, Value => Empty_Atom); end Reset; procedure Initialize (TheHeap : out HeapRecord) is begin -- Complete initialization to make IFA happy. TheHeap := Empty_Heap; -- Now allocate some storage Heap_Storage.Initialize (Initial_Heap_Capacity, TheHeap.ListOfAtoms); -- and reset the initial content Reset (TheHeap); end Initialize; -------------------------------------------------------------------- procedure CreateAtom (TheHeap : in out HeapRecord; NewAtom : out Atom) is A : Atom; begin if TheHeap.NextFreeAtom /= NullAtom then -- There are atoms in the returned free list, -- so recycle the first Atom off the free list. A := TheHeap.NextFreeAtom; TheHeap.NextFreeAtom := Heap_Storage.Get_Element (TheHeap.ListOfAtoms, TheHeap.NextFreeAtom).PointerA; Heap_Storage.Set_Element (V => TheHeap.ListOfAtoms, Index => A, Value => Empty_Atom); NewAtom := A; elsif TheHeap.HighMark < Heap_Storage.Last_Index (TheHeap.ListOfAtoms) then -- Still rooom in the array no need to extend TheHeap.HighMark := TheHeap.HighMark + 1; A := TheHeap.HighMark; Heap_Storage.Set_Element (TheHeap.ListOfAtoms, A, Empty_Atom); NewAtom := A; elsif TheHeap.HighMark < Atom'Last then --All the current array elements have been used - extend by appending TheHeap.HighMark := TheHeap.HighMark + 1; A := TheHeap.HighMark; Heap_Storage.Append (TheHeap.ListOfAtoms, Empty_Atom); NewAtom := A; else -- TheHeap.HighMark = Atom'Last, so -- Array and returned atoms in free list both used up -- and set usage to 100% before exiting Statistics.SetTableUsage (Statistics.RelationTable, Integer (Atom'Last)); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Empty_Heap, Msg => "in Heap.CreateAtom"); NewAtom := NullAtom; end if; exception --# hide CreateAtom; when Storage_Error => -- Heap_Storage.Append really has run out of memory Statistics.SetTableUsage (Statistics.RelationTable, Integer (Atom'Last)); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Empty_Heap, Msg => "in Heap.CreateAtom - Storage_Error in attempt to extend Heap"); NewAtom := NullAtom; when E : others => -- Something else has gone wrong Statistics.SetTableUsage (Statistics.RelationTable, Integer (Atom'Last)); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Empty_Heap, Msg => "in Heap.CreateAtom - " & Ada.Exceptions.Exception_Name (E) & " - " & Ada.Exceptions.Exception_Message (E)); NewAtom := NullAtom; end CreateAtom; -------------------------------------------------------------------- procedure DisposeOfAtom (TheHeap : in out HeapRecord; OldAtom : in Atom) is The_Atom : Heap_Storage.Atom_Descriptor; begin The_Atom := Heap_Storage.Get_Element (TheHeap.ListOfAtoms, OldAtom); The_Atom.PointerA := TheHeap.NextFreeAtom; Heap_Storage.Set_Element (TheHeap.ListOfAtoms, OldAtom, The_Atom); TheHeap.NextFreeAtom := OldAtom; end DisposeOfAtom; -------------------------------------------------------------------- function APointer (TheHeap : HeapRecord; A : Atom) return Atom is begin return Heap_Storage.Get_Element (TheHeap.ListOfAtoms, A).PointerA; end APointer; -------------------------------------------------------------------- function BPointer (TheHeap : HeapRecord; A : Atom) return Atom is begin return Heap_Storage.Get_Element (TheHeap.ListOfAtoms, A).PointerB; end BPointer; -------------------------------------------------------------------- function AValue (TheHeap : HeapRecord; A : Atom) return Natural is begin return Heap_Storage.Get_Element (TheHeap.ListOfAtoms, A).ValueA; end AValue; -------------------------------------------------------------------- function BValue (TheHeap : HeapRecord; A : Atom) return Natural is begin return Heap_Storage.Get_Element (TheHeap.ListOfAtoms, A).ValueB; end BValue; -------------------------------------------------------------------- procedure UpdateAPointer (TheHeap : in out HeapRecord; A, Pointer : in Atom) is The_Atom : Heap_Storage.Atom_Descriptor; begin The_Atom := Heap_Storage.Get_Element (TheHeap.ListOfAtoms, A); The_Atom.PointerA := Pointer; Heap_Storage.Set_Element (TheHeap.ListOfAtoms, A, The_Atom); end UpdateAPointer; -------------------------------------------------------------------- procedure UpdateBPointer (TheHeap : in out HeapRecord; A, Pointer : in Atom) is The_Atom : Heap_Storage.Atom_Descriptor; begin The_Atom := Heap_Storage.Get_Element (TheHeap.ListOfAtoms, A); The_Atom.PointerB := Pointer; Heap_Storage.Set_Element (TheHeap.ListOfAtoms, A, The_Atom); end UpdateBPointer; -------------------------------------------------------------------- procedure UpdateAValue (TheHeap : in out HeapRecord; A : in Atom; Value : in Natural) is The_Atom : Heap_Storage.Atom_Descriptor; begin The_Atom := Heap_Storage.Get_Element (TheHeap.ListOfAtoms, A); The_Atom.ValueA := Value; Heap_Storage.Set_Element (TheHeap.ListOfAtoms, A, The_Atom); end UpdateAValue; -------------------------------------------------------------------- procedure UpdateBValue (TheHeap : in out HeapRecord; A : in Atom; Value : in Natural) is The_Atom : Heap_Storage.Atom_Descriptor; begin The_Atom := Heap_Storage.Get_Element (TheHeap.ListOfAtoms, A); The_Atom.ValueB := Value; Heap_Storage.Set_Element (TheHeap.ListOfAtoms, A, The_Atom); end UpdateBValue; -------------------------------------------------------------------- function IsNullPointer (A : Atom) return Boolean is begin return A = NullAtom; end IsNullPointer; procedure ReportUsage (TheHeap : in HeapRecord) is begin -- As the heap now uses the free list before increasing HighMark, -- the max usage is HighMark Statistics.SetTableUsage (Statistics.RelationTable, Integer (TheHeap.HighMark)); end ReportUsage; end Heap; spark-2012.0.deb/examiner/sem-walk_expression_p-null_type_record.adb0000644000175000017500000000373311753202336024614 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Null_Type_Record return Sem.Exp_Record is begin return Sem.Exp_Record' (Type_Symbol => Dictionary.GetUnknownTypeMark, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Static => False, Is_Constant => False, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); end Null_Type_Record; spark-2012.0.deb/examiner/fatal.ads0000644000175000017500000000226711753202336016040 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package Fatal is Static_Limit : exception; --table overflows Operating_System_Limit : exception; --file handles etc Index_Manager : exception; --index manager fatal error Internal_Error : exception; --anything else end Fatal; spark-2012.0.deb/examiner/spark.idx0000644000175000017500000000003211753202337016073 0ustar eugeneugensuperindex is in main.idx spark-2012.0.deb/examiner/sem-add_record_sub_components.adb0000644000175000017500000000756611753202336022725 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Add_Record_Sub_Components (Record_Var_Sym : in Dictionary.Symbol; Record_Type_Sym : in Dictionary.Symbol; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Type_Component : Dictionary.Symbol; Var_Component : Dictionary.Symbol; Prefix_Component_Node : ComponentManager.Component; Current_Record_Var : Dictionary.Symbol; Current_Record_Type : Dictionary.Symbol; Number_Of_Non_Extended_Components : Natural; begin Current_Record_Var := Record_Var_Sym; Current_Record_Type := Record_Type_Sym; if Dictionary.IsSubtype (Current_Record_Type) then Current_Record_Type := Dictionary.GetRootType (Current_Record_Type); end if; Prefix_Component_Node := ComponentManager.GetComponentNode (Component_Data, Current_Record_Var); -- for non tagged records the outer loop only runs once. For extended tagged -- records we loop through all the inherited fields and add them too -- A private extended tagged type where the private part is hidden -- might result in Prefix_Component_Node being Null here, in which -- case nothing can be done. Number_Of_Non_Extended_Components := Dictionary.GetNumberOfNonExtendedComponents (Current_Record_Type); loop for I in Natural range 1 .. Number_Of_Non_Extended_Components loop Type_Component := Dictionary.GetNonExtendedRecordComponent (Current_Record_Type, I); Dictionary.AddRecordSubcomponent (Prefix => Current_Record_Var, Component => Type_Component, Comp_Unit => ContextManager.Ops.Current_Unit, Subcomponent => Var_Component); Prefix_Component_Node := ComponentManager.GetComponentNode (Component_Data, Current_Record_Var); ComponentManager.AddNextChild (Component_Data, The_Heap, Prefix_Component_Node, Var_Component); end loop; Current_Record_Type := Dictionary.GetRootOfExtendedType (Current_Record_Type); -- stop when there are no more inherited fields exit when Dictionary.Is_Null_Symbol (Current_Record_Type); -- if the record has no components (perhaps it is a private type and the private -- part is hidden) then we cannot add any subcomponents Number_Of_Non_Extended_Components := Dictionary.GetNumberOfNonExtendedComponents (Current_Record_Type); exit when Number_Of_Non_Extended_Components = 0; -- Simillarly, if the Prefix_Component_Node is null, then we -- can't find its FirstChild, so we must be done. exit when ComponentManager.IsNullComponent (Prefix_Component_Node); Current_Record_Var := ComponentManager.GetName (Component_Data, ComponentManager.GetFirstChild (Component_Data, Prefix_Component_Node)); end loop; ComponentManager.Dump_All_Component_Trees (Data => Component_Data); end Add_Record_Sub_Components; spark-2012.0.deb/examiner/sem-compunit-up_wf_subprogram_body.adb0000644000175000017500000003022211753202336023734 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Up_Wf_Subprogram_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) is Ident_Node, End_Desig_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; SubProg_Sym : Dictionary.Symbol; Abstraction : Dictionary.Abstractions; --------------------------------------------------------- procedure Check_Global_Imports_Are_Initialized (Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position; Abstraction : in Dictionary.Abstractions) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys, --# Sym; is It : Dictionary.Iterator; Var_Sym : Dictionary.Symbol; begin It := Dictionary.FirstGlobalVariable (Abstraction, Sym); while not Dictionary.IsNullIterator (It) loop Var_Sym := Dictionary.CurrentSymbol (It); if Dictionary.IsImport (Abstraction, Sym, Var_Sym) and then not Dictionary.OwnVariableIsInitialized (Var_Sym) and then Dictionary.GetOwnVariableOrConstituentMode (Var_Sym) = Dictionary.DefaultMode then ErrorHandler.Semantic_Error (Err_Num => 167, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Var_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Global_Imports_Are_Initialized; --------------------------------------------------------- procedure Main_Program_Ceiling_Priority_Check (Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position; Abstraction : in Dictionary.Abstractions) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# Sym; is It : Dictionary.Iterator; Main_Program_Priority_Lex : LexTokenManager.Lex_String; begin if Dictionary.MainProgramPrioritySupplied then Main_Program_Priority_Lex := Dictionary.GetMainProgramPriority; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Main_Program_Priority_Lex, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then -- We have a valid value for the priority. This will have been range-checked if -- Priority has been supplied in the Config file. We can do the ceiling check -- irrespective of whether the range check was performed or not, as long as the -- priority values are known not to be out of any supplied range. -- The Lex value was created using StorageRep in CheckPriorityPragma, so we can -- convert it back to a Value using ValueRep. It := Dictionary.FirstGlobalVariable (Abstraction, Sym); Check_Ceiling_Priority (Sym => Sym, Scope => Scope, Check_List => It, Priority_Lex_Value => Main_Program_Priority_Lex, Error_Node_Pos => Node_Pos); else -- An out of range Priority value was supplied for Main. This will have already -- been reported as a semantic error, so we don't need any further errors or -- warnings here, but of course we can't do the ceiling check. null; end if; elsif Dictionary.BodyIsHidden (Sym) then -- Pragma priority may be there but is unavailable. ErrorHandler.Semantic_Warning (Err_Num => 311, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); else -- "A pragma Priority is required for the main program" ErrorHandler.Semantic_Error (Err_Num => 933, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end Main_Program_Ceiling_Priority_Check; --------------------------------------------------------- procedure Check_Delay_Property_Accounted_For (Proc_Or_Task : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Proc_Or_Task, --# SPARK_IO.File_Sys; is begin if Dictionary.HasDelayProperty (Proc_Or_Task) and then (not Dictionary.DelayPropertyIsAccountedFor (Proc_Or_Task)) and then (not Dictionary.BodyIsHidden (Proc_Or_Task)) then ErrorHandler.Semantic_Error (Err_Num => 915, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Proc_Or_Task)); end if; end Check_Delay_Property_Accounted_For; begin -- Up_Wf_Subprogram_Body SubProg_Sym := Dictionary.GetRegion (Scope); -- determine which annotation to use Abstraction := Dictionary.GetAbstraction (SubProg_Sym, Scope); -- If the overriding_indicator is present then the Ident_Node is the -- Last_Child_Of the next sibling of the -- overriding_indicator node (Child_Node (Node)). Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = overriding_indicator OR procedure_specification OR function_specification if Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.overriding_indicator then -- ASSUME Ident_Node = overriding_indicator Ident_Node := Next_Sibling (Current_Node => Ident_Node); elsif Syntax_Node_Type (Node => Ident_Node) /= SP_Symbols.procedure_specification and then Syntax_Node_Type (Node => Ident_Node) /= SP_Symbols.function_specification then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = overriding_indicator OR " & "procedure_specification OR function_specification in Up_Wf_Subprogram_Body"); end if; -- ASSUME Ident_Node = procedure_specification OR function_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.function_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = procedure_specification OR function_specification in Up_Wf_Subprogram_Body"); Ident_Node := Last_Child_Of (Start_Node => Ident_Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Up_Wf_Subprogram_Body"); Ident_Str := Node_Lex_String (Node => Ident_Node); End_Desig_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Node)))); -- ASSUME End_Desig_Node = designator OR hidden_part if Syntax_Node_Type (Node => End_Desig_Node) = SP_Symbols.designator then -- ASSUME End_Desig_Node = designator if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => Node_Lex_String (Node => Child_Node (End_Desig_Node))) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => End_Desig_Node), Id_Str => Ident_Str); end if; elsif Syntax_Node_Type (Node => End_Desig_Node) /= SP_Symbols.hidden_part then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Desig_Node = designator OR hidden_part in Up_Wf_Subprogram_Body"); end if; if Dictionary.IsMainProgram (SubProg_Sym) then -- check that global imports are initialized has been done in -- wf_dependency_clause for procedure main programs but a check is -- needed here for the (very unlikely) case of a function main prog if Dictionary.IsFunction (SubProg_Sym) then Check_Global_Imports_Are_Initialized (Sym => SubProg_Sym, Node_Pos => Node_Position (Node => End_Desig_Node), Abstraction => Abstraction); end if; if CommandLineData.Ravenscar_Selected then -- For Ravenscar, perform the ceiling priority check for the main program PO calls. Main_Program_Ceiling_Priority_Check (Sym => SubProg_Sym, Node_Pos => Node_Position (Node => Node), Abstraction => Abstraction); end if; end if; CheckEmbedBodies (Comp_Sym => SubProg_Sym, Node_Pos => Node_Position (Node => End_Desig_Node)); Check_Delay_Property_Accounted_For (Proc_Or_Task => SubProg_Sym, Node_Pos => Node_Position (Node => Node)); CheckSuspendsListAccountedFor (Proc_Or_Task => SubProg_Sym, Node_Pos => Node_Position (Node => Node)); Scope := Dictionary.GetEnclosingScope (Scope); end Up_Wf_Subprogram_Body; spark-2012.0.deb/examiner/sem-aggregate_stack.adb0000644000175000017500000002211211753202336020614 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) package body Aggregate_Stack --# own State is S, --# Top_Ptr; is type Typ_Entry is record Type_Sym : Dictionary.Symbol; Lower_Bound : Sem.Typ_Type_Bound; Upper_Bound : Sem.Typ_Type_Bound; Agg_Flags : Sem.Typ_Agg_Flags; Counter : Natural; Complete_Rec : CompleteCheck.T; end record; Null_Entry : constant Typ_Entry := Typ_Entry' (Type_Sym => Dictionary.NullSymbol, Lower_Bound => Sem.Unknown_Type_Bound, Upper_Bound => Sem.Unknown_Type_Bound, Agg_Flags => Sem.Null_Typ_Agg_Flags, Counter => Natural'First, Complete_Rec => CompleteCheck.NullT); subtype Index_Range is Integer range 1 .. ExaminerConstants.AggregateStackMax; type Stack_Array is array (Index_Range) of Typ_Entry; subtype Top_Range is Integer range 0 .. ExaminerConstants.AggregateStackMax; S : Stack_Array; Top_Ptr : Top_Range; procedure Init --# global in Dictionary.Dict; --# out S; --# out Top_Ptr; --# derives S, --# Top_Ptr from & --# null from Dictionary.Dict; --# post (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((Dictionary.Is_Null_Symbol (S(I).Type_Sym) or Dictionary.IsTypeMark (S(I).Type_Sym, Dictionary.Dict)) and --# ((S(I).Lower_Bound.Is_Defined and S(I).Upper_Bound.Is_Defined) -> --# (S(I).Lower_Bound.Value <= S(I).Upper_Bound.Value)) and --# (S(I).Complete_Rec.ActualUpperBound - S(I).Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize))); is begin Top_Ptr := 0; S := Stack_Array'(others => Null_Entry); --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK"; end Init; procedure Push (Type_Sym : in Dictionary.Symbol; Lower_Bound : in Sem.Typ_Type_Bound; Upper_Bound : in Sem.Typ_Type_Bound; Agg_Flags : in Sem.Typ_Agg_Flags; Counter : in Natural; Complete_Rec : in CompleteCheck.T) --# global in Dictionary.Dict; --# in out S; --# in out Top_Ptr; --# derives S from *, --# Agg_Flags, --# Complete_Rec, --# Counter, --# Lower_Bound, --# Top_Ptr, --# Type_Sym, --# Upper_Bound & --# Top_Ptr from * & --# null from Dictionary.Dict; --# pre (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((Dictionary.Is_Null_Symbol (S(I).Type_Sym) or Dictionary.IsTypeMark (S(I).Type_Sym, Dictionary.Dict)) and --# ((S(I).Lower_Bound.Is_Defined and S(I).Upper_Bound.Is_Defined) -> --# (S(I).Lower_Bound.Value <= S(I).Upper_Bound.Value)) and --# (S(I).Complete_Rec.ActualUpperBound - S(I).Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize))) and --# (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and --# ((Lower_Bound.Is_Defined and Upper_Bound.Is_Defined) -> (Lower_Bound.Value <= Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize); --# post (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((Dictionary.Is_Null_Symbol (S(I).Type_Sym) or Dictionary.IsTypeMark (S(I).Type_Sym, Dictionary.Dict)) and --# ((S(I).Lower_Bound.Is_Defined and S(I).Upper_Bound.Is_Defined) -> --# (S(I).Lower_Bound.Value <= S(I).Upper_Bound.Value)) and --# (S(I).Complete_Rec.ActualUpperBound - S(I).Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize))); is begin if Top_Ptr = ExaminerConstants.AggregateStackMax then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Aggregate_Stack_Over_Flow, Msg => "in Aggregate_Stack.Push"); end if; --# check Top_Ptr < ExaminerConstants.AggregateStackMax; Top_Ptr := Top_Ptr + 1; S (Top_Ptr) := Typ_Entry' (Type_Sym => Type_Sym, Lower_Bound => Lower_Bound, Upper_Bound => Upper_Bound, Agg_Flags => Agg_Flags, Counter => Counter, Complete_Rec => Complete_Rec); --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK"; end Push; procedure Pop (Type_Sym : out Dictionary.Symbol; Lower_Bound : out Sem.Typ_Type_Bound; Upper_Bound : out Sem.Typ_Type_Bound; Agg_Flags : out Sem.Typ_Agg_Flags; Counter : out Natural; Complete_Rec : out CompleteCheck.T) --# global in Dictionary.Dict; --# in S; --# in out Top_Ptr; --# derives Agg_Flags, --# Complete_Rec, --# Counter, --# Lower_Bound, --# Type_Sym, --# Upper_Bound from S, --# Top_Ptr & --# Top_Ptr from * & --# null from Dictionary.Dict; --# pre (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((Dictionary.Is_Null_Symbol (S(I).Type_Sym) or Dictionary.IsTypeMark (S(I).Type_Sym, Dictionary.Dict)) and --# ((S(I).Lower_Bound.Is_Defined and S(I).Upper_Bound.Is_Defined) -> --# (S(I).Lower_Bound.Value <= S(I).Upper_Bound.Value)) and --# (S(I).Complete_Rec.ActualUpperBound - S(I).Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize))); --# post (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((Dictionary.Is_Null_Symbol (S(I).Type_Sym) or Dictionary.IsTypeMark (S(I).Type_Sym, Dictionary.Dict)) and --# ((S(I).Lower_Bound.Is_Defined and S(I).Upper_Bound.Is_Defined) -> --# (S(I).Lower_Bound.Value <= S(I).Upper_Bound.Value)) and --# (S(I).Complete_Rec.ActualUpperBound - S(I).Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize))) and --# (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and --# ((Lower_Bound.Is_Defined and Upper_Bound.Is_Defined) -> (Lower_Bound.Value <= Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize); is begin if Top_Ptr = 0 then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Aggregate_Stack_Under_Flow, Msg => "in Aggregate_Stack.Pop"); end if; --# check Top_Ptr > 0; Type_Sym := S (Top_Ptr).Type_Sym; Lower_Bound := S (Top_Ptr).Lower_Bound; Upper_Bound := S (Top_Ptr).Upper_Bound; Agg_Flags := S (Top_Ptr).Agg_Flags; Counter := S (Top_Ptr).Counter; Complete_Rec := S (Top_Ptr).Complete_Rec; Top_Ptr := Top_Ptr - 1; --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK"; end Pop; function Top_Type_Sym return Dictionary.Symbol --# global in Dictionary.Dict; --# in S; --# in Top_Ptr; --# pre (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((Dictionary.Is_Null_Symbol (S(I).Type_Sym) or Dictionary.IsTypeMark (S(I).Type_Sym, Dictionary.Dict)) and --# ((S(I).Lower_Bound.Is_Defined and S(I).Upper_Bound.Is_Defined) -> --# (S(I).Lower_Bound.Value <= S(I).Upper_Bound.Value)) and --# (S(I).Complete_Rec.ActualUpperBound - S(I).Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize))); --# return Type_Sym => (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)); is Result : Dictionary.Symbol := Dictionary.NullSymbol; begin if Top_Ptr > 0 then Result := S (Top_Ptr).Type_Sym; end if; --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK" & --# Flow, 50, Dictionary.Dict, "Value is not derived from the imported value OK"; return Result; end Top_Type_Sym; end Aggregate_Stack; spark-2012.0.deb/examiner/relationalgebra-debug.ads0000644000175000017500000000443711753202336021171 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- This package is for use in debugging the Examiner. It facilitates the -- printing of the flow relations generated during flow analysis. -- The package only need be with'd and inherited when debugging. -- The body of this package is not SPARK and need not be included in analysis. -- The entire body is hidden. --# inherit Heap, --# RelationAlgebra; package RelationAlgebra.Debug is -- Print out the Rho relation to standard output. procedure Print_Rho (Msg : in String; Rho : in RelationAlgebra.Relation; TheHeap : in Heap.HeapRecord); --# derives null from Msg, --# Rho, --# TheHeap; -- Print out the Mu relation to standard output. procedure Print_Mu (Msg : in String; Mu : in RelationAlgebra.Relation; TheHeap : in Heap.HeapRecord); --# derives null from Msg, --# Mu, --# TheHeap; -- Print out the Lambda relation to standard output. procedure Print_Lambda (Msg : in String; Lambda : in RelationAlgebra.Relation; TheHeap : in Heap.HeapRecord); --# derives null from Lambda, --# Msg, --# TheHeap; end RelationAlgebra.Debug; spark-2012.0.deb/examiner/sem-compunit-wf_machine_code_insertion.adb0000644000175000017500000000360511753202336024527 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Node is SP_Symbols.machine_code_insertion -- If Scope is body of a function then machine code is illegal in SPARK 83 -- In all other cases, issue warning separate (Sem.CompUnit) procedure Wf_Machine_Code_Insertion (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is Subprog_Sym : Dictionary.Symbol; begin Subprog_Sym := Dictionary.GetRegion (Scope); if CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 and then Dictionary.IsFunction (Subprog_Sym) then ErrorHandler.Semantic_Error (Err_Num => 67, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Warning (Err_Num => 6, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end Wf_Machine_Code_Insertion; spark-2012.0.deb/examiner/dag.ads0000644000175000017500000002216511753202336015503 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; with Dictionary; with Heap; with LexTokenManager; with SPARK_IO; with STree; use type Cells.Cell_Kind; use type Dictionary.Abstractions; use type Dictionary.Modes; use type Dictionary.Scopes; use type Dictionary.Symbol; use type Dictionary.Visibility; use type LexTokenManager.Str_Comp_Result; use type SPARK_IO.File_Status; use type STree.SyntaxNode; --# inherit Cells, --# Cells.Utility, --# Cells.Utility.List, --# Cell_Storage, --# Clists, --# CommandLineData, --# ContextManager, --# CStacks, --# DAG_IO, --# Debug, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# Graph, --# Heap, --# Labels, --# LexTokenManager, --# Maths, --# Pairs, --# ScreenEcho, --# SeqAlgebra, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# StmtStack, --# STree, --# Structures, --# Symbol_Set, --# SystemErrors; package DAG is -- Traverses the Syntax Tree of a subprogram and produces a complete -- labelled Basic Path Graph in the state Graph.Table procedure BuildGraph (StartNode : in STree.SyntaxNode; SubprogSym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; OutputFile : in SPARK_IO.File_Type; EndPosition : in LexTokenManager.Token_Position; VCGFailure : in out Boolean; VCGHeap : in out Cells.Heap_Record; FlowHeap : in out Heap.HeapRecord; Semantic_Error_In_Subprogram : in Boolean; DataFlowErrorInSubprogram : in Boolean; Type_Check_Exports : in Boolean); --# global in CommandLineData.Content; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# out StmtStack.S; --# derives Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# CommandLineData.Content, --# DataFlowErrorInSubprogram, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# Scope, --# Semantic_Error_In_Subprogram, --# StartNode, --# STree.Table, --# SubprogSym, --# Type_Check_Exports, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# DataFlowErrorInSubprogram, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# OutputFile, --# Scope, --# Semantic_Error_In_Subprogram, --# SPARK_IO.File_Sys, --# StartNode, --# STree.Table, --# SubprogSym, --# Type_Check_Exports, --# VCGHeap & --# FlowHeap, --# StmtStack.S from CommandLineData.Content, --# DataFlowErrorInSubprogram, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# Scope, --# Semantic_Error_In_Subprogram, --# StartNode, --# STree.Table, --# SubprogSym, --# VCGHeap; -- Builds a DAG representing the expression (which might -- be a complete aggregate) that initializes a constant. This is -- used to build the FDL replacement rule for composite constants, -- as controlled by the -rules switch. procedure BuildConstantInitializationDAG (StartNode : in STree.SyntaxNode; Scope : in Dictionary.Scopes; TheHeap : in out Cells.Heap_Record; FlowHeap : in out Heap.HeapRecord; DAGRoot : out Cells.Cell); --# global in CommandLineData.Content; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives DAGRoot, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# StmtStack.S, --# TheHeap from CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# Scope, --# StartNode, --# StmtStack.S, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# StartNode, --# StmtStack.S, --# STree.Table, --# TheHeap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# Scope, --# StartNode, --# StmtStack.S, --# STree.Table, --# TheHeap; end DAG; spark-2012.0.deb/examiner/sem-compunit-up_wf_task_body.adb0000644000175000017500000002603111753202336022520 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Up_Wf_Task_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) is Task_Sym : Dictionary.Symbol; End_Node : STree.SyntaxNode; procedure Check_For_Endless_Loop (Node : in STree.SyntaxNode; Task_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Task_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.declarative_part or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.sequence_of_statements; is Current_Node : STree.SyntaxNode; Last_Loop : Dictionary.Symbol; function Last_Statement (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.declarative_part or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.sequence_of_statements; --# return Result => Syntax_Node_Type (Result, STree.Table) = SP_Symbols.statement; is Result : STree.SyntaxNode; begin Result := Node; if Syntax_Node_Type (Node => Result) = SP_Symbols.declarative_part then -- ASSUME Result = declarative_part Result := Next_Sibling (Current_Node => Result); end if; -- ASSUME Result = sequence_of_statements SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.sequence_of_statements, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = sequence_of_statements in Last_Statement"); Result := Child_Node (Current_Node => Result); -- ASSUME Result = sequence_of_statements OR statement if Syntax_Node_Type (Node => Result) = SP_Symbols.sequence_of_statements then -- ASSUME Result = sequence_of_statements if Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Sibling (Current_Node => Result))) = SP_Symbols.justification_statement then -- ASSUME Child_Node (Current_Node => Next_Sibling (Current_Node => Result)) = justification_statement Result := Child_Node (Current_Node => Result); -- ASSUME Result = sequence_of_statements OR statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.sequence_of_statements or else Syntax_Node_Type (Node => Result) = SP_Symbols.statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = sequence_of_statements OR statement in Last_Statement"); end if; elsif Syntax_Node_Type (Node => Result) /= SP_Symbols.statement then Result := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = sequence_of_statements OR statement in Last_Statement"); end if; --# check Syntax_Node_Type (Result, STree.Table) = SP_Symbols.sequence_of_statements or --# Syntax_Node_Type (Result, STree.Table) = SP_Symbols.statement; if Syntax_Node_Type (Node => Result) = SP_Symbols.sequence_of_statements then -- ASSUME Result = sequence_of_statements Result := Next_Sibling (Current_Node => Result); -- ASSUME Result = statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = statement in Last_Statement"); end if; return Result; end Last_Statement; begin -- Check_For_Endless_Loop Current_Node := Last_Statement (Node => Node); --# check Syntax_Node_Type (Current_Node, STree.Table) = SP_Symbols.statement; Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = sequence_of_labels OR simple_statement OR compound_statement OR -- proof_statement OR justification_statement OR apragma if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.sequence_of_labels then -- ASSUME Current_Node = sequence_of_labels Current_Node := Next_Sibling (Current_Node => Current_Node); elsif Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.simple_statement and then Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.compound_statement and then Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.proof_statement and then Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.justification_statement and then Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.apragma then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = sequence_of_labels OR simple_statement OR compound_statement OR " & "proof_statement OR justification_statement OR apragma in Check_For_Endless_Loop"); end if; -- ASSUME Current_Node = simple_statement OR compound_statement OR -- proof_statement OR justification_statement OR apragma if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.compound_statement then -- ASSUME Current_Node = compound_statement Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = if_statement OR case_statement OR loop_statement if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.loop_statement then -- ASSUME Current_Node = loop_statement -- check loop has no exits Last_Loop := Dictionary.LastMostEnclosingLoop (Task_Sym); if not Dictionary.Is_Null_Symbol (Last_Loop) and then Dictionary.GetLoopHasExits (Last_Loop) then ErrorHandler.Semantic_Error (Err_Num => 989, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Last_Sibling_Of (Start_Node => Node)), Id_Str => LexTokenManager.Null_String); end if; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.if_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.case_statement then -- ASSUME Current_Node = if_statement OR case_statement -- last statement not a loop ErrorHandler.Semantic_Error (Err_Num => 989, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Last_Sibling_Of (Start_Node => Node)), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = if_statement OR case_statement OR loop_statement in Check_For_Endless_Loop"); end if; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.simple_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.proof_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.justification_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.apragma then -- ASSUME Current_Node = simple_statement OR proof_statement OR justification_statement OR apragma -- last statement not a loop ErrorHandler.Semantic_Error (Err_Num => 989, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Last_Sibling_Of (Start_Node => Node)), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = simple_statement OR compound_statement OR " & "proof_statement OR justification_statement OR apragma in Check_For_Endless_Loop"); end if; end Check_For_Endless_Loop; begin -- Up_Wf_Task_Body Task_Sym := Dictionary.GetRegion (Scope); End_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Next_Sibling (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node))))); -- ASSUME End_Node = declarative_part OR sequence_of_statements OR code_insertion OR hidden_part if Syntax_Node_Type (Node => End_Node) = SP_Symbols.declarative_part or else Syntax_Node_Type (Node => End_Node) = SP_Symbols.sequence_of_statements then -- ASSUME End_Node = declarative_part OR sequence_of_statements Check_For_Endless_Loop (Node => End_Node, Task_Sym => Task_Sym); elsif Syntax_Node_Type (Node => End_Node) /= SP_Symbols.code_insertion and then Syntax_Node_Type (Node => End_Node) /= SP_Symbols.hidden_part then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Node = declarative_part OR sequence_of_statements OR " & "code_insertion OR hidden_part in Up_Wf_Task_Body"); end if; CheckSuspendsListAccountedFor (Proc_Or_Task => Task_Sym, Node_Pos => Node_Position (Node => Node)); -- step out to enclosing scope for continued tree walk Scope := Dictionary.GetEnclosingScope (Scope); end Up_Wf_Task_Body; spark-2012.0.deb/examiner/flows.smf0000644000175000017500000000076111753202337016117 0ustar eugeneugenflowanalyser.adb -vcg flowanalyser-ifa_stack.adb -vcg flowanalyser-flowanalyse.adb flowanalyser-flowanalyse-analyserelations.adb flowanalyser-flowanalyse-analyserelations-checkexpressions.adb flowanalyser-flowanalyse-analyserelations-checkusages.adb -vcg flowanalyser-flowanalyse-analyserelations-checkdependencies.adb -vcg flowanalyser-flowanalyse-analyserelations-checkunused.adb -vcg flowanalyser-flowanalyse-analyserelations-mergeandhandleerrors.adb flowanalyser-flowanalysepartition.adb -vcg spark-2012.0.deb/examiner/sem-walk_expression_p-null_parameter_record.adb0000644000175000017500000000375211753202336025614 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Null_Parameter_Record return Sem.Exp_Record is begin return Sem.Exp_Record' (Type_Symbol => Dictionary.GetUnknownTypeMark, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Is_Parameter_Name, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => True, Has_Operators => False, Is_Static => False, Is_Constant => False, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); end Null_Parameter_Record; spark-2012.0.deb/examiner/spparser.ads0000644000175000017500000000511411753202336016602 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# LexTokenManager, --# SparkLex, --# SPARK_IO, --# SP_Expected_Symbols, --# SP_Parser_Actions, --# SP_Parser_Goto, --# SP_Productions, --# SP_Relations, --# SP_Symbols, --# STree, --# SystemErrors; package SPParser is procedure SPParse (ProgText : in SPARK_IO.File_Type; MaxStackSize : out Natural; FileEnd : out Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# FileEnd, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# ProgText, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# STree.Table & --# MaxStackSize from ; end SPParser; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-usageerror-usageerrorexpl.adb0000644000175000017500000000471011753202337031051 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.UsageError) procedure UsageErrorExpl (E_Str : in out E_Strings.T) is begin case Err_Type is when ErrorHandler.Redefined_Import => E_Strings.Append_String (E_Str => E_Str, Str => "The updating of imported-only variables is forbidden under all" & " circumstances."); when ErrorHandler.Ineffective_Import => E_Strings.Append_String (E_Str => E_Str, Str => "The meaning of this message is explained in Section 4.2 of Appendix A"); when ErrorHandler.Referenced_But_Not_In_Partition => E_Strings.Append_String (E_Str => E_Str, Str => "This message is only issued when processing the partition annotation. The partition annotation" & " must describe all the actions of the tasks and interrupt handlers making up the program. Therefore," & " if a variable is imported somewhere in the program by a task or interrupt handler, then it must" & " also be an import at the partition level. As well as the omission of explicit imports, this message is also" & " generated if the implicit imports of tasks and interrupt handlers are omitted. For tasks this means" & " any variable the task suspends on and for interrupt handlers it means the name of the protected" & " object containing the handler or, if given, the name of the interrupt stream associated with the" & " handler."); when ErrorHandler.Updated_But_Not_In_Partition => E_Strings.Append_String (E_Str => E_Str, Str => "This message is only issued when processing the partition annotation. The partition annotation" & " must describe all the actions of the tasks and interrupt handlers making up the program. Therefore," & " if a variable is exported somewhere in the program by a task or interrupt handler, then it must" & " also be an export at the partition level."); when ErrorHandler.Uninitialized_Protected_Element => E_Strings.Append_String (E_Str => E_Str, Str => "To avoid potential race conditions during program startup, all" & " elements of a protected type must be initialized with a constant value" & " at the point of declaration."); when others => null; end case; end UsageErrorExpl; spark-2012.0.deb/examiner/errorhandler-echoerrorentry.adb0000644000175000017500000002035611753202336022466 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) procedure EchoErrorEntry (Echo_File : in SPARK_IO.File_Type; Error : in Error_Types.StringError) is subtype Big_String_I is Positive range 1 .. 1024; subtype Big_String is String (Big_String_I); Source_File_Name : Big_String; Source_File_Stop : Natural; Source_Filename : E_Strings.T; Err_Struct : Error_Struct; New_Start : Natural; Was_Active : Boolean; Line_Length : Natural; New_Line_Count : Natural; Local_Error : Error_Types.StringError; Explanation : E_Strings.T; procedure Echo_Source_Line (Echo_File : in SPARK_IO.File_Type; Err_Line_No : in LexTokenManager.Line_Numbers) --# global in CommandLineData.Content; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Error_Context_Rec from *, --# Err_Line_No, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Echo_File, --# Error_Context_Rec, --# Err_Line_No; is Source_File : SPARK_IO.File_Type; Success : SPARK_IO.File_Status; begin if Error_Context_Rec.Line_No > Err_Line_No then Error_Context_Rec.Line_No := 0; Error_Context_Rec.Current_Line := E_Strings.Empty_String; Source_File := Error_Context_Rec.Source; --# accept Flow, 10, Success, "Expected ineffective assignment to Success"; SPARK_IO.Reset (Source_File, SPARK_IO.In_File, Success); --# end accept; Error_Context_Rec.Source := Source_File; end if; if Err_Line_No > 0 and then Error_Context_Rec.Line_No < Err_Line_No - 1 then SPARK_IO.Skip_Line (Error_Context_Rec.Source, Integer ((Err_Line_No - Error_Context_Rec.Line_No) - 1)); end if; GetFileLine; Error_Context_Rec.Line_No := Err_Line_No; New_Line (Echo_File, 1); Print_Source_Line (To_File => Echo_File); --# accept Flow, 33, Success, "Expected Success to be neither referenced nor exported"; end Echo_Source_Line; procedure Put_Error_Pointer (Echo_File : in SPARK_IO.File_Type; Err_Pos : in Natural) --# global in Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Echo_File, --# Error_Context_Rec, --# Err_Pos; is begin Move_To_Indent (Source_File => Echo_File, Line => Error_Context_Rec.Current_Line, Indent => Source_Line_Indent, Position => Err_Pos - 1); Put_Char (Echo_File, '^'); New_Line (SPARK_IO.Standard_Output, 1); end Put_Error_Pointer; function Need_Source_Line (Error : Error_Types.StringError) return Boolean --# global in Error_Context_Rec; is begin return Error.Position.Start_Line_No /= Error_Context_Rec.Line_No and Error.ErrorType /= Error_Types.NoErr; end Need_Source_Line; begin if Error_Context_Rec.Echo then if CommandLineData.Content.Brief then Line_Length := 0; New_Line_Count := 1; else Line_Length := Error_Line_Length; New_Line_Count := 1; end if; Err_Struct := Error_Struct'(Err_Num => 0, Error => Error); Was_Active := ErrorAccumulator.Is_Active (This => Echo_Accumulator); if ErrorAccumulator.Is_Active (This => Echo_Accumulator) then ErrorAccumulator.Add (Echo_Accumulator, Err_Struct, Line_Length, 11, Echo_File); end if; --# assert True; if not ErrorAccumulator.Is_Active (This => Echo_Accumulator) then if Was_Active then New_Line (Echo_File, New_Line_Count); end if; if CommandLineData.Content.Brief then SPARK_IO.Name (Error_Context_Rec.Source, Source_File_Name, Source_File_Stop); Source_Filename := E_Strings.Section (E_Strings.Copy_String (Str => Source_File_Name), 1, Source_File_Stop); case CommandLineData.Content.Brief_Option is when CommandLineData.No_Path => E_Strings.Put_String (File => Echo_File, E_Str => FileSystem.Base_Name (Path => Source_Filename, Suffix => "")); when CommandLineData.Full_Path => E_Strings.Put_String (File => Echo_File, E_Str => Source_Filename); end case; Put_Char (Echo_File, ':'); Put_Integer (Echo_File, Integer (Error.Position.Start_Line_No), 0, 10); Put_Char (Echo_File, ':'); Put_Integer (Echo_File, Error.Position.Start_Pos, 0, 10); Put_Char (Echo_File, ':'); Put_Char (Echo_File, ' '); Output_Brief_Error_Marker (File => Echo_File, Err_Type => Error.ErrorType, Message_Id => Error.MessageId); else if Need_Source_Line (Error => Error) then Echo_Source_Line (Echo_File => Echo_File, Err_Line_No => Error.Position.Start_Line_No); end if; if Error_Has_Position_Inline (Err_Type => Error.ErrorType) then Put_Error_Pointer (Echo_File => Echo_File, Err_Pos => Error.Position.Start_Pos); else New_Line (Echo_File, 1); end if; Output_Error_Marker (File => Echo_File, Err_Type => Error.ErrorType, Message_Id => Error.MessageId, Err_Count => 0); end if; if ErrorAccumulator.Is_Error_Start (The_Error => Error) then -- extract explanation from message here and pass it into ErrorAccumulator for later Flush --PNA Local_Error := Error; Split_String_At_Explanation (E_Str => Local_Error.Message, Explanation => Explanation); PrintLine (Echo_File, 31, Line_Length, 11, Local_Error.Message, False, New_Start); ErrorAccumulator.Start_Msg (This => Echo_Accumulator, Start_Error => Err_Struct, Start_Indent => New_Start, Explanation => Explanation, Line_Length => Line_Length, Indent => 11); -- in rather than recalculate them in error accumulator else -- leave message text unchanged and display it --# accept Flow, 10, New_Start, "Expected ineffective assignment to New_Start"; PrintLine (Echo_File, 31, Line_Length, 11, -- Expect ineff assignment to New_Start Error.Message, False, New_Start); --# end accept; New_Line (Echo_File, New_Line_Count); end if; end if; end if; end EchoErrorEntry; spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_exit.adb0000644000175000017500000002112011753202336024044 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Wf_Exit (Node : in STree.SyntaxNode; The_Loop : in Dictionary.Symbol; Condition_Node : out STree.SyntaxNode) is If_Node, Local_Node : STree.SyntaxNode; Exit_Label : STree.SyntaxNode; begin -- The procedure checks that the conditions -- of Section 5.7 of the SPARK Definition apply to the exit statement. Local_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Local_Node = condition OR simple_name OR NULL if Local_Node = STree.NullNode then -- ASSUME Local_Node = NULL -- A simple exit statement - no label identifier and no exit condition Condition_Node := STree.NullNode; Exit_Label := STree.NullNode; elsif Syntax_Node_Type (Node => Local_Node) = SP_Symbols.simple_name then -- ASSUME Local_Node = simple_name -- Exit has a label name Condition_Node := Next_Sibling (Current_Node => Local_Node); -- get the exit condition Exit_Label := Child_Node (Current_Node => Local_Node); -- get the label identifier elsif Syntax_Node_Type (Node => Local_Node) = SP_Symbols.condition then -- ASSUME Local_Node = condition -- Must be an exit with a condition but no label Condition_Node := Local_Node; Exit_Label := STree.NullNode; else Condition_Node := STree.NullNode; Exit_Label := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = condition OR simple_name OR NULL in Wf_Exit"); end if; -- ASSUME Exit_Label = identifier OR NULL if Syntax_Node_Type (Node => Exit_Label) = SP_Symbols.identifier then -- ASSUME Exit_Label = identifier -- Exit names a loop label. It must match the label attached to the -- most closely enclosing loop statement. if (not Dictionary.LoopHasName (The_Loop)) or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (The_Loop), Lex_Str2 => Node_Lex_String (Node => Exit_Label)) /= LexTokenManager.Str_Eq then -- Enclosing loop does not have a label, or labels -- are present, but do not match ErrorHandler.Semantic_Error (Err_Num => 724, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exit_Label), Id_Str => LexTokenManager.Null_String); end if; elsif Exit_Label /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exit_Label = identifier OR NULL in Wf_Exit"); end if; -- determine whether exit statement contains a when clause; -- ASSUME Condition_Node = condition OR NULL if Condition_Node = STree.NullNode then -- ASSUME Condition_Node = NULL -- exit statement is in an if_statement, therefore check that condition (3) -- of SPARK Definition Section 5.7 applies: -- check that exit-statement is last in its sequence of statements; if Is_Last_In_Sequence (Node => Node) then -- check that closest containing compound statement is an if_statement; Local_Node := Parent_Of_Sequence (Node => Node); if Syntax_Node_Type (Node => Local_Node) = SP_Symbols.if_statement then -- ASSUME Local_Node = if_statement -- check remainder of condition (3); If_Node := Local_Node; -- ASSUME If_Node = if_statement Local_Node := Child_Node (Current_Node => Local_Node); -- ASSUME Local_Node = condition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.condition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = condition in Wf_Exit"); Local_Node := Next_Sibling (Current_Node => Local_Node); -- ASSUME Local_Node = sequence_of_statements SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.sequence_of_statements, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = sequence_of_statements in Wf_Exit"); Local_Node := Next_Sibling (Current_Node => Local_Node); -- ASSUME Local_Node = elsif_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.elsif_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = elsif_part in Wf_Exit"); -- check that elsif_part is null; if Child_Node (Current_Node => Local_Node) = STree.NullNode then -- ASSUME Child_Node (Current_Node => Local_Node) = NULL Local_Node := Next_Sibling (Current_Node => Local_Node); -- ASSUME Local_Node = else_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.else_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = else_part in Wf_Exit"); -- check that else_part is null; if Child_Node (Current_Node => Local_Node) = STree.NullNode then -- ASSUME Child_Node (Current_Node => Local_Node) = NULL -- check that closest-containing compound statement is a loop statement; Local_Node := Parent_Of_Sequence (Node => If_Node); if Syntax_Node_Type (Node => Local_Node) /= SP_Symbols.loop_statement then -- ASSUME Local_Node /= loop_statement ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Misplaced_Exit, Position => Node_Position (Node => Node)); end if; else ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Misplaced_Exit, Position => Node_Position (Node => Node)); end if; else ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Misplaced_Exit, Position => Node_Position (Node => Node)); end if; else ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Misplaced_Exit, Position => Node_Position (Node => Node)); end if; else ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Misplaced_Exit, Position => Node_Position (Node => Node)); end if; elsif Syntax_Node_Type (Node => Condition_Node) = SP_Symbols.condition then -- ASSUME Condition_Node = condition -- exit statement contains a when clause, therefore check that condition (2) -- of SPARK Definition Section 5.7 applies, i.e. check that closest- -- containing compound statement is a loop statement; Local_Node := Parent_Of_Sequence (Node => Node); if Syntax_Node_Type (Node => Local_Node) /= SP_Symbols.loop_statement then -- ASSUME Local_Node /= loop_statement ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Misplaced_Exit, Position => Node_Position (Node => Node)); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Condition_Node = condition OR NULL in Wf_Exit"); end if; end Wf_Exit; spark-2012.0.deb/examiner/sem-compunit-walkstatements-down_loop.adb0000644000175000017500000001040711753202336024405 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Down_Loop (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) is Ident_Node : STree.SyntaxNode; Iteration_Scheme_Node : STree.SyntaxNode; Loop_Sym : Dictionary.Symbol; Loop_Ident : LexTokenManager.Lex_String; begin Dictionary.AddLoop (Scope => Scope, Comp_Unit => ContextManager.Ops.Current_Unit, LoopStatement => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), TheLoop => Loop_Sym); Iteration_Scheme_Node := Child_Node (Current_Node => Node); -- ASSUME Iteration_Scheme_Node = simple_name OR loop_statement_opt if Syntax_Node_Type (Node => Iteration_Scheme_Node) = SP_Symbols.simple_name then -- ASSUME Iteration_Scheme_Node = simple_name Ident_Node := Child_Node (Current_Node => Iteration_Scheme_Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Down_Loop"); Loop_Ident := Node_Lex_String (Node => Ident_Node); if Dictionary.IsDefined (Name => Loop_Ident, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Loop_Ident); else Dictionary.AddLoopName (Loop_Ident, Loop_Sym); end if; Iteration_Scheme_Node := Next_Sibling (Current_Node => Iteration_Scheme_Node); -- skip loop name elsif Syntax_Node_Type (Node => Iteration_Scheme_Node) /= SP_Symbols.loop_statement_opt then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Iteration_Scheme_Node = simple_name OR loop_statement_opt in Down_Loop"); end if; -- ASSUME Iteration_Scheme_Node = loop_statement_opt SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Iteration_Scheme_Node) = SP_Symbols.loop_statement_opt, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Iteration_Scheme_Node = loop_statement_opt in Down_Loop"); -- check for an iteration scheme, if one is found then mark loop as having exits Iteration_Scheme_Node := Child_Node (Current_Node => Iteration_Scheme_Node); -- ASSUME Iteration_Scheme_Node = iteration_scheme OR NULL if Syntax_Node_Type (Node => Iteration_Scheme_Node) = SP_Symbols.iteration_scheme then -- ASSUME Iteration_Scheme_Node = iteration_scheme Dictionary.MarkLoopHasExits (Loop_Sym); elsif Iteration_Scheme_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Iteration_Scheme_Node = iteration_scheme OR NULL in Down_Loop"); end if; Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Loop_Sym); end Down_Loop; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-stabilityerror-stabilityerrorexpl.adb0000644000175000017500000000436311753202337032655 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.StabilityError) procedure StabilityErrorExpl (E_Str : in out E_Strings.T) is begin case Stab_Typ is when ErrorHandler.Stable_Exit_Cond => E_Strings.Append_String (E_Str => E_Str, Str => "In these cases the (loop) exit condition occurs in an iteration scheme," & " an exit statement, or an if-statement whose (unique) sequence of" & " statements ends with an unconditional exit statement - see the SPARK" & " Definition. The concept of loop stability is explained in Section" & " 4.4 of Appendix A. A loop exit condition which is stable of index 0" & " takes the same value at every iteration around the loop, and with a" & " stability index of 1, it always takes the same value after the first" & " iteration. Stability with indices greater" & " than 0 does not necessarily indicate a program error, but the" & " conditions for loop termination require careful consideration"); when ErrorHandler.Stable_Fork_Cond => E_Strings.Append_String (E_Str => E_Str, Str => "The expression, occurring within a loop, is either a case expression" & " or a condition (Boolean-valued expression) associated with an" & " if-statement, whose value determines the path taken through the body" & " of the loop, but does not (directly) cause loop termination." & " Information flow analysis shows that the expression does not vary" & " as the loop is executed, so the same branch of the case or if statement will" & " be taken on every loop iteration. An Index of 0 means that the expression is" & " immediately stable, 1 means it becomes stable after the first pass through the loop and so on." & " The stability index is given with reference to the loop most" & " closely-containing the expression. Stable conditionals are not necessarily" & " an error but do require careful evaluation; they can often be removed by lifting them" & " outside the loop."); end case; end StabilityErrorExpl; spark-2012.0.deb/examiner/sem-substitute_protected_type_self_reference.adb0000644000175000017500000000417511753202336026066 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function Substitute_Protected_Type_Self_Reference (Sym, Prefix_Symbol : Dictionary.Symbol) return Dictionary.Symbol is Result : Dictionary.Symbol; begin -- if Sym is the implicitly-declared own variable of a protected type -- then we must replace it with the "current instance of the protected object" -- before checking whether it is visible. -- Background: given protected type PT its operations will globally reference and -- derive PT meaning, in this case, "myself". -- If an object PO of type PT (or a subtype of PT) is declared then calls to its -- operations will take the form PO.Op and the calling environment will be annotated -- in terms of PO. Therefore, when checking that the globals necessary for the call -- PO.Op are visible (for example), we need to replace all references to PT into -- references to PO before making the check. The Prefix Symbol of the call is the -- symbol we need to substitute in. Result := Sym; if Dictionary.IsOwnVariable (Sym) and then Dictionary.IsProtectedType (Dictionary.GetOwner (Sym)) then Result := Prefix_Symbol; end if; return Result; end Substitute_Protected_Type_Self_Reference; spark-2012.0.deb/examiner/sem-walk_expression_p-down_wf_name_argument_list.adb0000644000175000017500000002313411753202336026640 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; separate (Sem.Walk_Expression_P) procedure Down_Wf_Name_Argument_List (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap; Next_Node : out STree.SyntaxNode) is Type_Info : Sem.Exp_Record; Ptr : Lists.List; Child, Child_Child : STree.SyntaxNode; begin Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); case Type_Info.Sort is when Sem.Is_Unknown => Type_Info := Unknown_Symbol_Record; Next_Node := STree.NullNode; when Sem.Is_Type_Mark => Child := STree.Child_Node (Current_Node => Node); -- ASSUME Child = named_argument_association OR positional_argument_association OR -- annotation_named_argument_association OR annotation_positional_argument_association if STree.Syntax_Node_Type (Node => Child) = SP_Symbols.positional_argument_association or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_positional_argument_association then -- ASSUME Child = positional_argument_association OR annotation_positional_argument_association Child_Child := STree.Child_Node (Current_Node => Child); -- ASSUME Child_Child = positional_argument_association OR expression OR -- annotation_positional_argument_association OR annotation_expression if STree.Syntax_Node_Type (Node => Child_Child) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Child_Child) = SP_Symbols.annotation_expression then -- ASSUME Child_Child = expression OR annotation_expression Next_Node := Child; elsif STree.Syntax_Node_Type (Node => Child_Child) = SP_Symbols.positional_argument_association or else STree.Syntax_Node_Type (Node => Child_Child) = SP_Symbols.annotation_positional_argument_association then -- ASSUME Child_Child = positional_argument_association OR annotation_positional_argument_association Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 32, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Node)), Id_Str => LexTokenManager.Null_String); Next_Node := STree.NullNode; else Next_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Child = positional_argument_association OR expression OR " & "annotation_positional_argument_association OR annotation_expression in Down_Wf_Name_Argument_List"); end if; elsif STree.Syntax_Node_Type (Node => Child) = SP_Symbols.named_argument_association or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_named_argument_association then -- ASSUME Child = named_argument_association OR annotation_named_argument_association Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 32, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Node)), Id_Str => LexTokenManager.Null_String); Next_Node := STree.NullNode; else Next_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = named_argument_association OR positional_argument_association OR " & "annotation_named_argument_association OR annotation_positional_argument_association " & "in Down_Wf_Name_Argument_List"); end if; when Sem.Is_Function => Child := STree.Child_Node (Current_Node => Node); -- ASSUME Child = named_argument_association OR positional_argument_association OR -- annotation_named_argument_association OR annotation_positional_argument_association if STree.Syntax_Node_Type (Node => Child) = SP_Symbols.positional_argument_association or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_positional_argument_association then -- ASSUME Child = positional_argument_association OR annotation_positional_argument_association Type_Info.Param_Count := 0; elsif STree.Syntax_Node_Type (Node => Child) = SP_Symbols.named_argument_association or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_named_argument_association then -- ASSUME Child = named_argument_association OR annotation_named_argument_association Create_Name_List (List => Ptr, Heap_Param => Heap_Param); Type_Info.Param_List := Ptr; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = named_argument_association OR positional_argument_association OR " & "annotation_named_argument_association OR annotation_positional_argument_association " & "in Down_Wf_Name_Argument_List"); end if; if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.name_argument_list then Type_Info.Arg_List_Found := True; end if; Next_Node := Child; when Sem.Is_Object => if Dictionary.IsArrayTypeMark (Type_Info.Type_Symbol, Scope) then Child := STree.Child_Node (Current_Node => Node); -- ASSUME Child = named_argument_association OR positional_argument_association OR -- annotation_named_argument_association OR annotation_positional_argument_association if STree.Syntax_Node_Type (Node => Child) = SP_Symbols.positional_argument_association or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_positional_argument_association then -- ASSUME Child = positional_argument_association OR annotation_positional_argument_association Type_Info.Param_Count := 0; Type_Info.Is_Static := False; if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.name_argument_list then Type_Info.Arg_List_Found := True; end if; Next_Node := Child; elsif STree.Syntax_Node_Type (Node => Child) = SP_Symbols.named_argument_association or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_named_argument_association then -- ASSUME Child = named_argument_association OR annotation_named_argument_association Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 92, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Next_Node := STree.NullNode; else Next_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = named_argument_association OR positional_argument_association OR " & "annotation_named_argument_association OR annotation_positional_argument_association " & "in Down_Wf_Name_Argument_List"); end if; else Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 21, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Node)), Id_Str => LexTokenManager.Null_String); Next_Node := STree.NullNode; end if; when others => Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 21, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Node)), Id_Str => LexTokenManager.Null_String); Next_Node := STree.NullNode; end case; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); end Down_Wf_Name_Argument_List; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-depsemanticerr.adb0000644000175000017500000001606711753202336026642 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure DepSemanticErr (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is procedure DepSemanticErrExpl (E_Str : in out E_Strings.T) --# global in Err_Num; --# derives E_Str from *, --# Err_Num; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an AppendString for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Num; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Num, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation DepSemanticErrExpl (Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin case Err_Num.ErrorNum is -- HTML Directives --! <"flow-"> --! <"!!! Flow Error : "><" : "> when 1 => E_Strings.Append_String (E_Str => E_Str, Str => "The previously stated updating of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has been omitted"); --! XXX occurred as an export in the earlier dependency relation but --! neither XXX nor any refinement constituent of it occurs in the --! refined dependency relation. when 2 => E_Strings.Append_String (E_Str => E_Str, Str => "The updating of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has not been previously stated"); --! A refinement constituent of XXX occurs as an export in the --! refined dependency relation but XXX does not occur as an export in --! the earlier dependency relation. when 3 => E_Strings.Append_String (E_Str => E_Str, Str => "The previously stated dependency of the exported value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " on the imported value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has been omitted"); --! The dependency of the exported value of XXX on the imported value --! of YYY occurs in the earlier dependency relation but in the refined --! dependency relation, no constituents of XXX --! depend on any constituents of YYY. when 4 => E_Strings.Append_String (E_Str => E_Str, Str => "The dependency of the exported value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " on the imported value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has not been previously stated"); --! A refined dependency relation states a dependency of XXX or a --! constituent of XXX on YYY or a constituent of YYY, but in the --! earlier relation, no dependency of XXX on YYY is stated. when 5 => E_Strings.Append_String (E_Str => E_Str, Str => "The (possibly implicit) dependency of the exported value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " on its imported value"); E_Strings.Append_String (E_Str => E_Str, Str => " has not been previously stated"); --! Either a dependency of a constituent of XXX on at least one --! constituent of XXX occurs in the refined dependency relation, or --! not all the constituents of XXX occur as exports in --! the refined dependency relation. However, the dependency of XXX on --! itself does not occur in the earlier dependency relation. when others => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO DepSemanticErr"); end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end DepSemanticErr; spark-2012.0.deb/examiner/sem-compunit-wf_subunit.adb0000644000175000017500000001403711753202336021531 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Subunit (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) is Next_Node, Child_Next_Node : STree.SyntaxNode; Sym : Dictionary.Symbol; Parent : Dictionary.Symbol; Ok : Boolean; --------------------------------------------------- procedure Check_Sym (Node : in STree.SyntaxNode; Sym : in Dictionary.Symbol; Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Sym & --# Ok from Dictionary.Dict, --# Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier; is begin if Dictionary.Is_Null_Symbol (Sym) or else not Dictionary.IsCompilationUnit (Sym) then Ok := False; ErrorHandler.Semantic_Error (Err_Num => 14, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Node)); else Ok := True; end if; end Check_Sym; begin -- Wf_Subunit Next_Node := Parent_Node (Current_Node => Last_Child_Of (Start_Node => Node)); -- ASSUME Next_Node = simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = simple_name in Wf_Subunit"); -- Next_Node is now bottom simple_name in tree Child_Next_Node := Child_Node (Current_Node => Next_Node); -- ASSUME Child_Next_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child_Next_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Next_Node = identifier in Wf_Subunit"); Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Child_Next_Node), Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); Check_Sym (Node => Child_Next_Node, Sym => Sym, Ok => Ok); loop --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.simple_name and --# Syntax_Node_Type (Child_Next_Node, STree.Table) = SP_Symbols.identifier and --# STree.Table = STree.Table~; exit when not Ok; STree.Set_Node_Lex_String (Sym => Sym, Node => Child_Next_Node); Next_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Next_Node)); exit when Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.simple_name; Child_Next_Node := Child_Node (Current_Node => Next_Node); -- ASSUME Child_Next_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child_Next_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Next_Node = identifier in Wf_Subunit"); Parent := Sym; Sym := Dictionary.LookupSelectedItem (Prefix => Parent, Selector => Node_Lex_String (Node => Child_Next_Node), Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext); if Dictionary.Is_Null_Symbol (Sym) then Sym := Dictionary.LookupImmediateScope (Name => Node_Lex_String (Node => Child_Next_Node), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Parent), Context => Dictionary.ProgramContext); end if; -- The above look up may return the symbol of an implicit proof function associated with an -- Ada function. If so, we want to process the subunit in the context of the Ada function if Dictionary.IsProofFunction (Sym) then Sym := Dictionary.GetAdaFunction (Sym); end if; Check_Sym (Node => Child_Next_Node, Sym => Sym, Ok => Ok); end loop; if Ok then Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sym); end if; end Wf_Subunit; spark-2012.0.deb/examiner/clists.adb0000644000175000017500000001472711753202335016234 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Clists is -- A_Ptr fields join the cells of a list. -- B_Ptr of a listhead is the last cell in its list. -------------------------------------------------------------------------- function FirstCell (Heap : Cells.Heap_Record; ListName : Cells.Cell) return Cells.Cell is begin return Cells.Get_A_Ptr (Heap, ListName); end FirstCell; -------------------------------------------------------------------------- function LastCell (Heap : Cells.Heap_Record; ListName : Cells.Cell) return Cells.Cell is begin return Cells.Get_B_Ptr (Heap, ListName); end LastCell; -------------------------------------------------------------------------- function NextCell (Heap : Cells.Heap_Record; CellName : Cells.Cell) return Cells.Cell is begin return Cells.Get_A_Ptr (Heap, CellName); end NextCell; -------------------------------------------------------------------------- function IsEmptyList (Heap : Cells.Heap_Record; ListName : Cells.Cell) return Boolean is begin return Cells.Is_Null_Cell (FirstCell (Heap, ListName)); end IsEmptyList; -------------------------------------------------------------------------- procedure AppendCell (Heap : in out Cells.Heap_Record; CellName : in Cells.Cell; ListName : in Cells.Cell) is begin -- prepare new cell; Cells.Set_A_Ptr (Heap, CellName, Cells.Null_Cell); -- modify existing pointers of list; if IsEmptyList (Heap, ListName) then Cells.Set_A_Ptr (Heap, ListName, CellName); else Cells.Set_A_Ptr (Heap, LastCell (Heap, ListName), CellName); end if; Cells.Set_B_Ptr (Heap, ListName, CellName); end AppendCell; -------------------------------------------------------------------------- procedure Concatenate (Heap : in out Cells.Heap_Record; List_1 : in Cells.Cell; List_2 : in Cells.Cell) is begin if not IsEmptyList (Heap, List_2) then if IsEmptyList (Heap, List_1) then Cells.Set_A_Ptr (Heap, List_1, FirstCell (Heap, List_2)); else Cells.Set_A_Ptr (Heap, LastCell (Heap, List_1), FirstCell (Heap, List_2)); end if; Cells.Set_B_Ptr (Heap, List_1, LastCell (Heap, List_2)); end if; Cells.Dispose_Of_Cell (Heap, List_2); end Concatenate; -------------------------------------------------------------------------- procedure InsertCell (Heap : in out Cells.Heap_Record; CellName : in Cells.Cell; ListName : in Cells.Cell) is M, N : Cells.Cell; GivenVarName : Natural; begin M := ListName; N := FirstCell (Heap, ListName); GivenVarName := Cells.Get_Natural_Value (Heap, CellName); loop if Cells.Is_Null_Cell (N) then Cells.Set_B_Ptr (Heap, ListName, CellName); exit; end if; if Cells.Get_Natural_Value (Heap, N) > GivenVarName then Cells.Set_A_Ptr (Heap, CellName, N); exit; end if; M := N; N := NextCell (Heap, N); end loop; Cells.Set_A_Ptr (Heap, M, CellName); end InsertCell; -------------------------------------------------------------------------- procedure TransferCells (Heap : in out Cells.Heap_Record; List_1 : in Cells.Cell; List_2 : in Cells.Cell) is begin if not IsEmptyList (Heap, List_1) then if IsEmptyList (Heap, List_2) then Cells.Set_A_Ptr (Heap, List_2, FirstCell (Heap, List_1)); else Cells.Set_A_Ptr (Heap, LastCell (Heap, List_2), FirstCell (Heap, List_1)); end if; Cells.Set_B_Ptr (Heap, List_2, LastCell (Heap, List_1)); end if; end TransferCells; -------------------------------------------------------------------------- procedure CreateList (Heap : in out Cells.Heap_Record; ListName : out Cells.Cell) is begin -- create listhead, with null A_Ptr and B_Ptr; Cells.Create_Cell (Heap, ListName); end CreateList; -------------------------------------------------------------------------- procedure DisposeOfList (Heap : in out Cells.Heap_Record; ListName : in Cells.Cell) is ListCell, NextListCell : Cells.Cell; begin ListCell := FirstCell (Heap, ListName); -- dispose of listhead; Cells.Dispose_Of_Cell (Heap, ListName); -- dispose of list cells; loop exit when Cells.Is_Null_Cell (ListCell); NextListCell := NextCell (Heap, ListCell); Cells.Dispose_Of_Cell (Heap, ListCell); ListCell := NextListCell; end loop; end DisposeOfList; -------------------------------------------------------------------------- procedure RemoveLeader (Heap : in out Cells.Heap_Record; ListName : in Cells.Cell) is begin if Cells.Are_Identical (FirstCell (Heap, ListName), LastCell (Heap, ListName)) then Cells.Set_A_Ptr (Heap, ListName, Cells.Null_Cell); Cells.Set_B_Ptr (Heap, ListName, Cells.Null_Cell); else Cells.Set_A_Ptr (Heap, ListName, NextCell (Heap, FirstCell (Heap, ListName))); end if; end RemoveLeader; end Clists; spark-2012.0.deb/examiner/commandlinehandler.adb0000644000175000017500000054403511753202335020557 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Handling; with Ada.Characters.Latin_1; with Ada.Command_Line; with CommandLineData; with FileSystem; with ScreenEcho; with SPARK_IO; with ExaminerConstants; use type CommandLineData.Concurrency_Profiles; use type CommandLineData.Flow_Analysis_Options; use type CommandLineData.Info_Flow_Policies; use type CommandLineData.Language_Profiles; use type SPARK_IO.File_Status; package body CommandLineHandler is type Command_Line_Errors is (ES_Index, ES_IndexName, ES_NoIndex, ES_SourceExt, ES_ListingExt, ES_NoReport, ES_Report, ES_ReportName, ES_Rules, ES_RuleUnknown, ES_NoEcho, ES_NoDict, ES_NoDuration, ES_Dict, ES_InvalidOption, ES_ListingFile, ES_NoComma, ES_Source, EW_Overlap, EW_Listing, EW_Index, EW_Report, EW_Source, ES_VCGandFDL, ES_DPCandFDL, ES_VCGRepeated, ES_DPCRepeated, ES_NoMorePFS, ES_NoMoreRTC, ES_NoMoreEXP, ES_NoMoreReal, ES_ErrorExplanations, EW_Too_Many, ES_Warning, ES_WarningName, ES_NoWarning, EW_Warning, ES_OutputDir, ES_OutputDirRepeated, ES_OutputDirNotFound, SOFstyle, ES_Syntax, ES_Statistics, ES_NoStatistics, ES_FDLoption, ES_InformationFlow, ES_TargetData, ES_TargetDataName, ES_NoTargetData, EW_Target, ES_ConfigFile, ES_ConfigFileName, ES_NoConfigFile, EW_Config, ES_LanguageRepeated, ES_LanguageProfile, ES_NoRavenscarInSPARK83, ES_NoSPARKLibInSPARK83, ES_NoAutoFlowInSPARK83, ES_Debug, ES_Casing, ES_Anno, ES_Html, ES_PlainOutput, ES_Brief, ES_BriefOption, ES_XML, ES_XMLandHTML, ES_XMLnorep, ES_HTMLnorep, ES_DataFlowAndInfoFlowPolicy, ES_OriginalFlowErrors, ES_ErrorExplanationOption, ES_JustificationRepeated, ES_JustificationOption, ES_Profile, ES_ProfileOption, ES_NoListings, ES_DataAndConfig, ES_Makefile, ES_Makefile_Repeated); function Is_White_Space (Space_Char : Character) return Boolean is begin return (Space_Char = ' ') or else (Space_Char = Ada.Characters.Latin_1.HT) or else (Space_Char = Ada.Characters.Latin_1.CR); end Is_White_Space; procedure Output_Error (E : in Command_Line_Errors) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# E; is Check_Msg : constant String := " (check default switch file)"; begin case E is when ES_Index => ScreenEcho.Put_String ("Index option already specified or NoIndex option already specified" & Check_Msg); when ES_IndexName => ScreenEcho.Put_String ("Index option incorrectly specified"); when ES_Source => ScreenEcho.Put_String ("Source file incorrectly specified"); when ES_NoIndex => ScreenEcho.Put_String ("NoIndex option already specified or Index option already specified" & Check_Msg); when ES_SourceExt => ScreenEcho.Put_String ("Source extension option already specified or incorrectly specified"); when ES_ListingExt => ScreenEcho.Put_String ("Listing extension option already specified or incorrectly specified"); when ES_NoReport => ScreenEcho.Put_String ("NoReport option already specified, or contradictory report option found" & Check_Msg); when ES_Report => ScreenEcho.Put_String ("Report option already specified or NoReport option already specified" & Check_Msg); when ES_ReportName => ScreenEcho.Put_String ("Report option incorrectly specified"); when ES_Rules => ScreenEcho.Put_String ("Rules option may not appear more than once" & Check_Msg); when ES_RuleUnknown => ScreenEcho.Put_String ("Rules option not recognised: legal values are: " & CommandLineData.Option_Rules_None & ", " & CommandLineData.Option_Rules_Lazy & ", " & CommandLineData.Option_Rules_Keen & ", " & CommandLineData.Option_Rules_All); when ES_NoEcho => ScreenEcho.Put_String ("NoEcho option may not appear more than once" & Check_Msg); when ES_NoDict => ScreenEcho.Put_String ("NoDictionary option already specified or Dictionary option already specified (check default switch file)"); when ES_NoDuration => ScreenEcho.Put_String ("NoDuration option may not appear more than once" & Check_Msg); when ES_Dict => ScreenEcho.Put_String ("Dictionary option already specified or NoDictionary option already specified (check default switch file)"); when ES_InvalidOption => ScreenEcho.Put_String ("Invalid command line option:"); when ES_ListingFile => ScreenEcho.Put_String ("Listing file option incorrect"); when ES_NoComma => ScreenEcho.Put_String ("Comma missing in line"); when ES_VCGandFDL => ScreenEcho.Put_String ("-" & CommandLineData.Option_Vcg & " and -" & CommandLineData.Option_Fdl_Identifiers & "=" & CommandLineData.Option_Fdl_Identifiers_Accept & " cannot be used together" & Check_Msg); when ES_DPCandFDL => ScreenEcho.Put_String ("-" & CommandLineData.Option_Dpc & " and -" & CommandLineData.Option_Fdl_Identifiers & "=" & CommandLineData.Option_Fdl_Identifiers_Accept & " cannot be used together" & Check_Msg); when ES_VCGRepeated => ScreenEcho.Put_String ("-" & CommandLineData.Option_Vcg & " already specified" & Check_Msg); when ES_DPCRepeated => ScreenEcho.Put_String ("-" & CommandLineData.Option_Dpc & " already specified" & Check_Msg); when ES_NoMorePFS => ScreenEcho.Put_String ("PFS option is no longer supported" & Check_Msg); when ES_NoMoreRTC => ScreenEcho.Put_String ("RTC option is no longer supported. Please use -" & CommandLineData.Option_Vcg & " instead " & Check_Msg); when ES_NoMoreEXP => ScreenEcho.Put_String ("EXP option is no longer supported. Please use -" & CommandLineData.Option_Vcg & " instead " & Check_Msg); when ES_NoMoreReal => ScreenEcho.Put_String ("Real_RTCs option is no longer supported. Please use -" & CommandLineData.Option_Vcg & " instead " & Check_Msg); when ES_ErrorExplanations => ScreenEcho.Put_String ("Error_Explanations option may not appear more than once" & Check_Msg); when EW_Overlap => ScreenEcho.Put_String ("File argument is repeated:"); when EW_Listing => ScreenEcho.Put_String ("Cannot create file"); when EW_Index => ScreenEcho.Put_String ("Cannot open index file"); when EW_Report => ScreenEcho.Put_String ("Cannot create file"); when EW_Source => ScreenEcho.Put_String ("Cannot open source file"); when EW_Too_Many => ScreenEcho.Put_String ("Too many source files on command line"); when ES_WarningName => ScreenEcho.Put_String ("Warning option incorrectly specified"); when ES_Warning => ScreenEcho.Put_String ("Warning option already specified" & Check_Msg); when ES_NoWarning => ScreenEcho.Put_String ("NoWarning option already specifies" & Check_Msg); when EW_Warning => ScreenEcho.Put_String ("Cannot open warning control file"); when ES_OutputDir => ScreenEcho.Put_String ("Output directory option must have exactly one argument"); when ES_OutputDirRepeated => ScreenEcho.Put_String ("Output directory option already specified" & Check_Msg); when ES_OutputDirNotFound => ScreenEcho.Put_String ("Cannot find or write to output directory"); when SOFstyle => ScreenEcho.Put_String ("-concat is no longer supported. -tree is now the default and only option" & Check_Msg); when ES_Syntax => ScreenEcho.Put_String ("Syntax check option may not appear more than once" & Check_Msg); when ES_Statistics => ScreenEcho.Put_String ("Statistics option already specified, or NoStatistics option already specified" & Check_Msg); when ES_NoStatistics => ScreenEcho.Put_String ("NoStatistics option already specified, or Statistics option already specified" & Check_Msg); when ES_FDLoption => ScreenEcho.Put_String ("FDL identifier option already specified, or contradictory option found" & Check_Msg); when ES_InformationFlow => ScreenEcho.Put_String ("Flow analysis option already specified or incorrectly specified:"); when ES_TargetDataName => ScreenEcho.Put_String ("Target compiler data option incorrectly specified"); when ES_TargetData => ScreenEcho.Put_String ("Target compiler data option already specified or NoTarget compiler data option already specified" & Check_Msg); when ES_NoTargetData => ScreenEcho.Put_String ("NoTarget compiler data option already specified or Target compiler data option already specified" & Check_Msg); when EW_Target => ScreenEcho.Put_String ("Cannot open target data file"); when ES_ConfigFileName => ScreenEcho.Put_String ("Configuration file incorrectly specified"); when ES_ConfigFile => ScreenEcho.Put_String ("Configuration file already specified or NoConfiguration file already specified" & Check_Msg); when ES_NoConfigFile => ScreenEcho.Put_String ("NoConfiguration file already specified or Configuration file already specified" & Check_Msg); when EW_Config => ScreenEcho.Put_String ("Cannot open configuration file"); when ES_LanguageRepeated => ScreenEcho.Put_String ("Language profile already specified" & Check_Msg); when ES_LanguageProfile => ScreenEcho.Put_String ("Language profile incorrect. Value must be one of " & CommandLineData.Option_Language_83 & ", " & CommandLineData.Option_Language_95 & ", "); if CommandLineData.Content.Distribution_Is_Pro then ScreenEcho.Put_String (CommandLineData.Option_Language_2005 & ", or " & CommandLineData.Option_Language_KCG); else ScreenEcho.Put_String ("or " & CommandLineData.Option_Language_2005); end if; when ES_NoRavenscarInSPARK83 => ScreenEcho.Put_String ("Ravenscar concurrency profile is not permitted in SPARK83"); when ES_NoSPARKLibInSPARK83 => ScreenEcho.Put_String ("-sparklib is not permitted in SPARK83"); when ES_NoAutoFlowInSPARK83 => ScreenEcho.Put_String ("-flow=auto is not permitted in SPARK83"); when ES_Debug => ScreenEcho.Put_String ("Debugging option already specified, or incorrectly specified" & Check_Msg); when ES_Casing => ScreenEcho.Put_String ("Casing option already specified, or incorrectly specified" & Check_Msg); when ES_Anno => ScreenEcho.Put_String ("Annotation character option may not appear more than once" & Check_Msg); when ES_Html => ScreenEcho.Put_String ("HTML option may not appear more than once" & Check_Msg); when ES_PlainOutput => ScreenEcho.Put_String ("Plain output option may not appear more than once" & Check_Msg); when ES_Brief => ScreenEcho.Put_String ("Brief output option may not appear more than once" & Check_Msg); when ES_BriefOption => ScreenEcho.Put_String ("Brief option value must be one of: " & CommandLineData.Option_Brief_No_Path & " or " & CommandLineData.Option_Brief_Full_Path); when ES_OriginalFlowErrors => ScreenEcho.Put_String ("OriginalFlowErrors output option may not appear more than once" & Check_Msg); when ES_Profile => ScreenEcho.Put_String ("Analysis profile option may not appear more than once" & Check_Msg); when ES_ProfileOption => ScreenEcho.Put_String ("Analysis profile option incorrect:"); when ES_ErrorExplanationOption => ScreenEcho.Put_String ("Error_Explanation option incorrect:"); when ES_DataAndConfig => ScreenEcho.Put_String ("Target data and config file cannot be specified together" & Check_Msg); when ES_XML => ScreenEcho.Put_String ("XML option may not appear more than once" & Check_Msg); when ES_XMLandHTML => ScreenEcho.Put_String ("XML and HTML cannot be specified together" & Check_Msg); when ES_HTMLnorep => ScreenEcho.Put_String ("HTML and noreport cannot be specified together" & Check_Msg); when ES_XMLnorep => ScreenEcho.Put_String ("XML and noreport cannot be specified together" & Check_Msg); when ES_NoListings => ScreenEcho.Put_String ("No listings option may not be specified more than once" & Check_Msg); when ES_DataFlowAndInfoFlowPolicy => ScreenEcho.Put_String ("Information flow policy requires " & CommandLineData.Option_Flow_Analysis & "=" & CommandLineData.Option_Flow_Analysis_Information); when ES_JustificationRepeated => ScreenEcho.Put_String ("Justification_Option may not appear more than once" & Check_Msg); when ES_JustificationOption => ScreenEcho.Put_String ("Justification_Option value must be one of: " & CommandLineData.Option_Justification_Option_Ignore & ", " & CommandLineData.Option_Justification_Option_Full & ", or " & CommandLineData.Option_Justification_Option_Brief); when ES_Makefile => ScreenEcho.Put_String ("The -" & CommandLineData.Option_Makefile_Mode & " option requires -" & CommandLineData.Option_Brief & " and is incompatible with -" & CommandLineData.Option_No_Echo & "."); when ES_Makefile_Repeated => ScreenEcho.Put_String ("The -" & CommandLineData.Option_Makefile_Mode & " option may not be specified more than once" & Check_Msg); end case; end Output_Error; procedure Possible_Error (E : in Command_Line_Errors) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# E; is begin if not CommandLineData.Content.Valid then Output_Error (E => E); ScreenEcho.New_Line (1); end if; end Possible_Error; procedure Possible_Error2 (E : in Command_Line_Errors; F : in E_Strings.T) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# E, --# F; is begin if not CommandLineData.Content.Valid then Output_Error (E => E); ScreenEcho.Put_Char (' '); if CommandLineData.Content.Plain_Output then ScreenEcho.Put_ExaminerLine (E_Strings.Lower_Case (E_Str => F)); else ScreenEcho.Put_ExaminerLine (F); end if; end if; end Possible_Error2; procedure Read_Default_Switches (Default_Switches_Found : out Boolean; Cmd_Line : out E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives Cmd_Line, --# Default_Switches_Found, --# SPARK_IO.File_Sys from SPARK_IO.File_Sys; -- post (not Default_Switches_Found and Cmd_Line_Ptr = 1) or -- (Default_Switches_Found and Cmd_Line_Ptr = Cmd_Line'Last + 2); is Switch_File_Found : Boolean; Switch_File : SPARK_IO.File_Type; Cmd_Line_Local : E_Strings.T; Unused : SPARK_IO.File_Status; procedure Open_File (Switch_File_Found : out Boolean; Switch_File : out SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# Switch_File, --# Switch_File_Found from SPARK_IO.File_Sys; is Switch_File_Local : SPARK_IO.File_Type := SPARK_IO.Null_File; File_Status : SPARK_IO.File_Status; begin SPARK_IO.Open (Switch_File_Local, SPARK_IO.In_File, 8, "spark.sw", "", File_Status); if File_Status = SPARK_IO.Ok then Switch_File_Found := True; Switch_File := Switch_File_Local; else Switch_File_Found := False; Switch_File := SPARK_IO.Null_File; end if; end Open_File; procedure Process_Switch_File (Switch_File : in SPARK_IO.File_Type; Cmd_Line : in out E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives Cmd_Line, --# SPARK_IO.File_Sys from *, --# SPARK_IO.File_Sys, --# Switch_File; is Current_Line : E_Strings.T; In_Quoted_String : Boolean; function Strip_Leading_Spaces (S : E_Strings.T) return E_Strings.T is Ptr : E_Strings.Positions := 1; begin loop exit when Ptr > E_Strings.Get_Length (E_Str => S); exit when not Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => S, Pos => Ptr)); Ptr := Ptr + 1; end loop; return E_Strings.Section (E_Str => S, Start_Pos => Ptr, Length => (E_Strings.Get_Length (E_Str => S) - Ptr) + 1); end Strip_Leading_Spaces; function Strip_Comments (S : E_Strings.T) return E_Strings.T is Res : E_Strings.T; begin if E_Strings.Get_Element (E_Str => S, Pos => 1) = '-' and then E_Strings.Get_Element (E_Str => S, Pos => 2) = '-' then Res := E_Strings.Empty_String; else Res := S; end if; return Res; end Strip_Comments; function Line_Is_Empty (S : E_Strings.T) return Boolean is begin return E_Strings.Get_Length (E_Str => S) = 0; end Line_Is_Empty; begin -- Process_Switch_File In_Quoted_String := False; loop -- look for non-empty lines in switch file and process them exit when SPARK_IO.End_Of_File (Switch_File); E_Strings.Get_Line (File => Switch_File, E_Str => Current_Line); Current_Line := Strip_Comments (S => Strip_Leading_Spaces (S => Current_Line)); if not Line_Is_Empty (S => Current_Line) then for I in E_Strings.Lengths range 1 .. E_Strings.Get_Length (E_Str => Current_Line) loop -- premature exit if comment found in line exit when E_Strings.Get_Element (E_Str => Current_Line, Pos => I) = '-' and then I < E_Strings.Get_Length (E_Str => Current_Line) and then E_Strings.Get_Element (E_Str => Current_Line, Pos => I + 1) = '-'; if E_Strings.Get_Element (E_Str => Current_Line, Pos => I) = Ada.Characters.Latin_1.Quotation then In_Quoted_String := not In_Quoted_String; end if; if In_Quoted_String then -- In a quoted string, we leave the character -- alone. E_Strings.Append_Char (E_Str => Cmd_Line, Ch => E_Strings.Get_Element (E_Str => Current_Line, Pos => I)); elsif Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => Current_Line, Pos => I)) then -- Not in a quoted string, so whitespace gets -- replaced by the host's preferred argument -- separator character E_Strings.Append_Char (E_Str => Cmd_Line, Ch => FileSystem.Argument_Separator); else -- We just append the character as is in any -- other case. E_Strings.Append_Char (E_Str => Cmd_Line, Ch => E_Strings.Get_Element (E_Str => Current_Line, Pos => I)); end if; end loop; E_Strings.Append_Char (E_Str => Cmd_Line, Ch => FileSystem.Argument_Separator); end if; end loop; end Process_Switch_File; begin -- Read_Default_Switches Cmd_Line_Local := E_Strings.Empty_String; Open_File (Switch_File_Found => Switch_File_Found, Switch_File => Switch_File); if Switch_File_Found then Process_Switch_File (Switch_File => Switch_File, Cmd_Line => Cmd_Line_Local); --# accept F, 10, Unused, "Returned status not used here" & --# F, 10, Switch_File, "Returned handle not used here"; SPARK_IO.Close (Switch_File, Unused); --expect flow error end if; Default_Switches_Found := Switch_File_Found; Cmd_Line := Cmd_Line_Local; --# accept F, 33, Unused, "Unused not referenced here"; end Read_Default_Switches; procedure Skip_Spaces (Command_String : in out Command_Strings) is begin loop exit when Command_String.Current_Position > E_Strings.Get_Length (E_Str => Command_String.Contents); exit when not ((E_Strings.Get_Element (E_Str => Command_String.Contents, Pos => Command_String.Current_Position) = ' ') or else (E_Strings.Get_Element (E_Str => Command_String.Contents, Pos => Command_String.Current_Position) = FileSystem.Argument_Separator)); Command_String.Current_Position := Command_String.Current_Position + 1; end loop; end Skip_Spaces; procedure Read_The_String (Command_String : in out Command_Strings; Next_Symbol : in out Symbols) is Current_Ch : Character; begin Next_Symbol.The_String := E_Strings.Empty_String; Current_Ch := E_Strings.Get_Element (E_Str => Command_String.Contents, Pos => Command_String.Current_Position); loop -- Skip over quotes, but allow spaces in the string if Current_Ch /= Ada.Characters.Latin_1.Quotation then E_Strings.Append_Char (E_Str => Next_Symbol.The_String, Ch => Current_Ch); end if; Command_String.Current_Position := Command_String.Current_Position + 1; Current_Ch := E_Strings.Get_Element (E_Str => Command_String.Contents, Pos => Command_String.Current_Position); exit when Command_String.Current_Position > E_Strings.Get_Length (E_Str => Command_String.Contents) or else FileSystem.Is_An_Argument_Terminator (Ch => Current_Ch); end loop; end Read_The_String; procedure Get_Next_Symbol (Command_String : in out Command_Strings; Next_Symbol : out Symbols) --# derives Command_String, --# Next_Symbol from Command_String; is Local_Next_Symbol : Symbols; begin -- This procedure is intended to return Next_Symbol; however, if the -- symbol is not a string then the string field is not set. Although -- it is not used in these circumstances its lack of definition -- causes so many flow errors its is better to use an aggregate to -- initialize Next_Symbol here, and then assign the final value -- to Next_Symbol (for compatibility with the Ada83 "out parameter" rule Local_Next_Symbol := Symbols'(Typ => S_Empty, The_String => E_Strings.Empty_String); Skip_Spaces (Command_String => Command_String); if Command_String.Current_Position <= E_Strings.Get_Length (E_Str => Command_String.Contents) then case E_Strings.Get_Element (E_Str => Command_String.Contents, Pos => Command_String.Current_Position) is when '=' => Local_Next_Symbol.Typ := S_Equal; Command_String.Current_Position := Command_String.Current_Position + 1; when ',' => --this condition is invariant for any particular system, we are actually --simulating conditional compilation for different target platforms. --Intended behaviour is correct despite flow error that will result. --# accept F, 22, "Stable expression here OK"; if FileSystem.Use_Unix_Command_Line then Local_Next_Symbol.Typ := S_String; Read_The_String (Command_String => Command_String, Next_Symbol => Local_Next_Symbol); else -- Windows Local_Next_Symbol.Typ := S_Comma; Command_String.Current_Position := Command_String.Current_Position + 1; end if; --# end accept; when '-' => -- On all platforms '-' is the switch character. Local_Next_Symbol.Typ := S_Switch_Character; Command_String.Current_Position := Command_String.Current_Position + 1; when others => Local_Next_Symbol.Typ := S_String; Read_The_String (Command_String => Command_String, Next_Symbol => Local_Next_Symbol); end case; else -- Exceeded maximum command line length Local_Next_Symbol.Typ := S_Empty; end if; Next_Symbol := Local_Next_Symbol; end Get_Next_Symbol; procedure Read_Option (Opt_Name : out E_Strings.T; Opt_Name_OK : out Boolean; Opt_Val : out E_Strings.T; Opt_Val_OK : out Boolean; Command_String : in out Command_Strings; Next_Symbol : out Symbols) --# derives Command_String, --# Next_Symbol, --# Opt_Name, --# Opt_Name_OK, --# Opt_Val, --# Opt_Val_OK from Command_String; -- pre Next_Symbol.Typ = SSlash; is Opt_Name_OK_Local : Boolean; Local_Next_Symbol : Symbols; begin Opt_Val := E_Strings.Empty_String; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Local_Next_Symbol); Opt_Name_OK_Local := Local_Next_Symbol.Typ = S_String; Opt_Name := Local_Next_Symbol.The_String; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Local_Next_Symbol); if Opt_Name_OK_Local and Local_Next_Symbol.Typ = S_Equal then Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Local_Next_Symbol); Opt_Val_OK := Local_Next_Symbol.Typ = S_String; Opt_Val := Local_Next_Symbol.The_String; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Local_Next_Symbol); else Opt_Val_OK := False; end if; Opt_Name_OK := Opt_Name_OK_Local; Next_Symbol := Local_Next_Symbol; end Read_Option; function Check_Option_Name (Opt_Name : E_Strings.T; Str : String) return Boolean is OK : Boolean := False; begin if E_Strings.Get_Length (E_Str => Opt_Name) <= Str'Length then for I in E_Strings.Lengths range 1 .. E_Strings.Get_Length (E_Str => Opt_Name) loop OK := Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => I)) = Ada.Characters.Handling.To_Lower (Str (I)); exit when not OK; end loop; end if; return OK; end Check_Option_Name; procedure Parse_Command_Options (Command_String : in out Command_Strings; Next_Symbol : in out Symbols) --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# Command_String, --# Next_Symbol, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Command_String, --# Next_Symbol; is IndexFound : Boolean := False; OutputDirectoryFound : Boolean := False; WarningFound : Boolean := False; NoWarningFound : Boolean := False; SourceFound : Boolean := False; ListingFound : Boolean := False; OutputFound : Boolean := False; ReportFound : Boolean := False; ErrorExplanationsFound : Boolean := False; JustificationOptionFound : Boolean := False; RulesFound : Boolean := False; VCGFound : Boolean := False; DPCFound : Boolean := False; NoReportOptionFound : Boolean := False; SyntaxCheckFound : Boolean := False; StatisticsFound : Boolean := False; FDLreserveOptionFound : Boolean := False; FDLignoreOptionFound : Boolean := False; FlowOptionFound : Boolean := False; TargetDataFound : Boolean := False; ConfigFileFound : Boolean := False; LanguageProfileFound : Boolean := False; DebugEnabledFound : Boolean := False; CasingEnabledFound : Boolean := False; AnnotationCharacterFound : Boolean := False; HTMLFound : Boolean := False; PlainOutputFound : Boolean := False; NoDurationFound : Boolean := False; BriefFound : Boolean := False; XMLFound : Boolean := False; OriginalFlowErrorsFound : Boolean := False; NoListingsFound : Boolean := False; DictFound : Boolean := False; ConcurrencyProfileFound : Boolean := False; Makefile_Found : Boolean := False; Opt_Name, Opt_Val : E_Strings.T; Opt_Name_OK, Opt_Val_OK : Boolean; procedure Process_A --# global in FlowOptionFound; --# in Opt_Name; --# in Opt_Val; --# in out AnnotationCharacterFound; --# in out CommandLineData.Content; --# in out LanguageProfileFound; --# in out SPARK_IO.File_Sys; --# derives AnnotationCharacterFound, --# LanguageProfileFound from *, --# Opt_Name & --# CommandLineData.Content from *, --# AnnotationCharacterFound, --# FlowOptionFound, --# LanguageProfileFound, --# Opt_Name, --# Opt_Val & --# SPARK_IO.File_Sys from *, --# AnnotationCharacterFound, --# CommandLineData.Content, --# LanguageProfileFound, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Flow_Option, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Language_Profile, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Anno_Char, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'd' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => "ada83") then CommandLineData.Content.Valid := not LanguageProfileFound; Possible_Error (E => ES_LanguageRepeated); LanguageProfileFound := True; CommandLineData.Content.Language_Profile := CommandLineData.SPARK83; if not FlowOptionFound then CommandLineData.Content.Flow_Option := CommandLineData.Info_Flow; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'n' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Annotation_Character) then CommandLineData.Content.Valid := not AnnotationCharacterFound; Possible_Error (E => ES_Anno); AnnotationCharacterFound := True; CommandLineData.Content.Anno_Char := E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1); else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_A; procedure Process_B --# global in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out BriefFound; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives BriefFound from *, --# Opt_Name & --# CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# BriefFound, --# CommandLineData.Content, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Brief_Option, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Brief, "Direct updates OK here"; if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Brief) then CommandLineData.Content.Valid := not BriefFound; Possible_Error (E => ES_Brief); BriefFound := True; CommandLineData.Content.Brief := True; if Opt_Val_OK then -- Overriding the default the default brief option: "-brief=something" case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1)) is when 'n' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Brief_No_Path) then CommandLineData.Content.Brief_Option := CommandLineData.No_Path; else -- error involving 'nopath' CommandLineData.Content.Valid := False; Possible_Error (E => ES_BriefOption); end if; when 'f' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Brief_Full_Path) then CommandLineData.Content.Brief_Option := CommandLineData.Full_Path; else -- error involving 'fullpath' CommandLineData.Content.Valid := False; Possible_Error (E => ES_BriefOption); end if; when others => CommandLineData.Content.Valid := False; Possible_Error (E => ES_BriefOption); end case; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; --# end accept; end Process_B; procedure Process_C --# global in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in TargetDataFound; --# in out CasingEnabledFound; --# in out CommandLineData.Content; --# in out ConfigFileFound; --# in out SPARK_IO.File_Sys; --# derives CasingEnabledFound from *, --# Opt_Name & --# CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CasingEnabledFound, --# CommandLineData.Content, --# ConfigFileFound, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK, --# TargetDataFound & --# ConfigFileFound from *, --# Opt_Name, --# Opt_Val_OK, --# TargetDataFound; is File_Name : E_Strings.T; begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Casing_Standard, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Casing_Identifier, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Target_Config, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Target_Config_File, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'a' => if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Casing) then CommandLineData.Content.Valid := not CasingEnabledFound; Possible_Error (E => ES_Casing); CasingEnabledFound := True; if Opt_Val_OK then for I in Integer range 1 .. E_Strings.Get_Length (E_Str => Opt_Val) loop case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => I)) is when CommandLineData.Option_Casing_Standard => if CommandLineData.Content.Casing_Standard then CommandLineData.Content.Valid := False; Possible_Error (E => ES_Casing); else CommandLineData.Content.Casing_Standard := True; end if; when CommandLineData.Option_Casing_Identifier => if CommandLineData.Content.Casing_Identifier then CommandLineData.Content.Valid := False; Possible_Error (E => ES_Casing); else CommandLineData.Content.Casing_Identifier := True; end if; when others => CommandLineData.Content.Valid := False; Possible_Error (E => ES_Casing); end case; end loop; else CommandLineData.Content.Casing_Standard := True; CommandLineData.Content.Casing_Identifier := True; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'o' => -- 2nd letter case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 3)) is -- 3rd letter when 'n' => case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 4)) is -- 4th letter when 'f' => if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Config_File) then if TargetDataFound then CommandLineData.Content.Valid := False; Possible_Error (E => ES_DataAndConfig); else CommandLineData.Content.Valid := Opt_Val_OK; Possible_Error (E => ES_ConfigFileName); if Opt_Val_OK then CommandLineData.Content.Valid := not ConfigFileFound; Possible_Error (E => ES_ConfigFile); end if; if CommandLineData.Content.Valid then ConfigFileFound := True; CommandLineData.Content.Target_Config := True; File_Name := Opt_Val; FileSystem.Check_Extension (Fn => File_Name, Ext => E_Strings.Copy_String (Str => CommandLineData.Default_Config_Extension)); CommandLineData.Content.Target_Config_File := File_Name; end if; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'c' => -- 4th letter -- -concat is removed in release 8.1.1 and above, but -- recognize it here and warn the user. if Check_Option_Name (Opt_Name => Opt_Name, Str => "concatenate") then CommandLineData.Content.Valid := False; Possible_Error (E => SOFstyle); else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_C; procedure Process_D --# global in FDLignoreOptionFound; --# in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out CommandLineData.Content; --# in out DebugEnabledFound; --# in out DictFound; --# in out DPCFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# DebugEnabledFound, --# DictFound, --# DPCFound, --# FDLignoreOptionFound, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK & --# DebugEnabledFound, --# DPCFound from *, --# Opt_Name & --# DictFound from *, --# Opt_Name, --# Opt_Val_OK; is Default_Dict_Extension : constant String := "dic"; File_Name : E_Strings.T; begin --# accept W, 169, CommandLineData.Content.DPC, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Write_Dict, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Dict_File_Name, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Enabled, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.HTML, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Expressions, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Lookup_Trace, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.File_Names, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Units, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Invariants, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Components, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Rho, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.FDL_Ranking, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.VCG, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Parser, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.SLI, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.VCG_All, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.Extra_Stats, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Debug.DAG, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'i' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Dictionary_File) then CommandLineData.Content.Valid := Opt_Val_OK and then not DictFound; Possible_Error (E => ES_Dict); if CommandLineData.Content.Valid then DictFound := True; CommandLineData.Content.Write_Dict := True; File_Name := Opt_Val; FileSystem.Check_Extension (Fn => File_Name, Ext => E_Strings.Copy_String (Str => Default_Dict_Extension)); CommandLineData.Content.Dict_File_Name := File_Name; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'e' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Debug) then CommandLineData.Content.Valid := not DebugEnabledFound; Possible_Error (E => ES_Debug); DebugEnabledFound := True; CommandLineData.Content.Debug.Enabled := True; -- The -debug switch may also have a parameter which -- is a list of character-codes for specific debug -- options. These are: -- c - Print component manager state -- e - Debug Expresion syntax and tree walking -- f - Trace filename storage/open/create -- h - Debug HTML generation -- i - Print default loop invariants -- k - Trace ranking and printing of FDL declarations -- l - Trace dictionary Look-Ups -- p - Print parser state on detection of syntax Error -- r - Print computed Rho relation for each subprogram -- u - Trace required units and index lookups -- v - Print VCG State and BPG after DAG.BuildGraph -- V - As v, but also print BPG during iteration of Graph.GenVCs -- x - Print cross-reference debug -- d - Print FDL DAG following BuildExpnDAG or -- BuildAnnotationExpnDAG -- These codes may be combined, so -- -debug=eh and -- -debug=he are both allowed and are equivalent if Opt_Val_OK then for I in Integer range 1 .. E_Strings.Get_Length (E_Str => Opt_Val) loop case E_Strings.Get_Element (E_Str => Opt_Val, Pos => I) is when CommandLineData.Option_Debug_C => CommandLineData.Content.Debug.Components := True; when CommandLineData.Option_Debug_D => CommandLineData.Content.Debug.DAG := True; when CommandLineData.Option_Debug_E => CommandLineData.Content.Debug.Expressions := True; when CommandLineData.Option_Debug_F => CommandLineData.Content.Debug.File_Names := True; when CommandLineData.Option_Debug_H => CommandLineData.Content.Debug.HTML := True; when CommandLineData.Option_Debug_I => CommandLineData.Content.Debug.Invariants := True; when CommandLineData.Option_Debug_K => CommandLineData.Content.Debug.FDL_Ranking := True; when CommandLineData.Option_Debug_L => CommandLineData.Content.Debug.Lookup_Trace := True; when CommandLineData.Option_Debug_P => CommandLineData.Content.Debug.Parser := True; when CommandLineData.Option_Debug_R => CommandLineData.Content.Debug.Rho := True; when CommandLineData.Option_Debug_T => CommandLineData.Content.Debug.Extra_Stats := True; when CommandLineData.Option_Debug_U => CommandLineData.Content.Debug.Units := True; when CommandLineData.Option_Debug_V => CommandLineData.Content.Debug.VCG := True; when CommandLineData.Option_Debug_V_Upper => CommandLineData.Content.Debug.VCG := True; CommandLineData.Content.Debug.VCG_All := True; when CommandLineData.Option_Debug_X => CommandLineData.Content.Debug.SLI := True; when others => CommandLineData.Content.Valid := False; end case; end loop; Possible_Error (E => ES_Debug); end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'p' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Dpc) then if DPCFound then CommandLineData.Content.Valid := False; Possible_Error (E => ES_DPCRepeated); elsif FDLignoreOptionFound and then E_Strings.Is_Empty (E_Str => CommandLineData.Content.FDL_Mangle) then CommandLineData.Content.Valid := False; Possible_Error (E => ES_DPCandFDL); else CommandLineData.Content.Valid := True; end if; DPCFound := True; CommandLineData.Content.DPC := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => -- starts "d" but not valid CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_D; procedure Process_E --# global in Opt_Name; --# in Opt_Val; --# in out CommandLineData.Content; --# in out ErrorExplanationsFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorExplanationsFound, --# Opt_Name, --# Opt_Val & --# ErrorExplanationsFound from *, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Error_Explanation, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'x' => -- 2nd letter CommandLineData.Content.Valid := False; if Check_Option_Name (Opt_Name => Opt_Name, Str => "exp_checks") then Possible_Error (E => ES_NoMoreEXP); else Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'r' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Error_Explanations) then CommandLineData.Content.Valid := not ErrorExplanationsFound; Possible_Error (E => ES_ErrorExplanations); if CommandLineData.Content.Valid then ErrorExplanationsFound := True; -- now check argument - must be one of Off, FirstOccurrence, EveryOccurrence case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1)) is when 'o' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Error_Explanations_Off) then CommandLineData.Content.Error_Explanation := CommandLineData.Off; else -- error involving 'off' CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ErrorExplanationOption, F => Opt_Val); end if; when 'f' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Error_Explanations_First_Occurrence) then CommandLineData.Content.Error_Explanation := CommandLineData.First_Occurrence; else -- error involving 'first_occurrence' CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ErrorExplanationOption, F => Opt_Val); end if; when 'e' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Error_Explanations_Every_Occurrence) then CommandLineData.Content.Error_Explanation := CommandLineData.Every_Occurrence; else -- error involving 'every_occurrence' CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ErrorExplanationOption, F => Opt_Val); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ErrorExplanationOption, F => Opt_Val); end case; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_E; procedure Process_F --# global in DPCFound; --# in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in VCGFound; --# in out CommandLineData.Content; --# in out FDLignoreOptionFound; --# in out FDLreserveOptionFound; --# in out FlowOptionFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# DPCFound, --# FDLignoreOptionFound, --# FDLreserveOptionFound, --# FlowOptionFound, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK, --# VCGFound & --# FDLignoreOptionFound, --# FDLreserveOptionFound from *, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK & --# FlowOptionFound from *, --# Opt_Name, --# Opt_Val_OK; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.FDL_Mangle, "Direct updates OK here" & --# W, 169, CommandLineData.Content.FDL_Reserved, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Flow_Option, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'd' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Fdl_Identifiers) then CommandLineData.Content.Valid := not (FDLreserveOptionFound or FDLignoreOptionFound); Possible_Error (E => ES_FDLoption); if Opt_Val_OK then if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Fdl_Identifiers_Accept) then CommandLineData.Content.FDL_Reserved := False; CommandLineData.Content.Valid := not VCGFound and not DPCFound; if VCGFound then Possible_Error (E => ES_VCGandFDL); elsif DPCFound then Possible_Error (E => ES_DPCandFDL); end if; FDLignoreOptionFound := True; else if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Fdl_Identifiers_Reject) then CommandLineData.Content.FDL_Reserved := True; FDLreserveOptionFound := True; else CommandLineData.Content.FDL_Reserved := False; -- Will not reject fdl reserved words CommandLineData.Content.FDL_Mangle := Opt_Val; -- But will mangle them FDLignoreOptionFound := True; end if; end if; else ScreenEcho.Put_Line ("Warning: The -fdl option is now deprecated. Please use -fdl=reject"); FDLreserveOptionFound := True; CommandLineData.Content.FDL_Reserved := True; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'l' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Flow_Analysis) then CommandLineData.Content.Valid := Opt_Val_OK and then not FlowOptionFound; Possible_Error2 (E => ES_InformationFlow, F => Opt_Val); if CommandLineData.Content.Valid then --go on to check selection FlowOptionFound := True; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1)) is when 'a' => if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Flow_Analysis_Auto) then CommandLineData.Content.Flow_Option := CommandLineData.Auto_Flow; else -- something which begins with 'a' other than auto CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InformationFlow, F => Opt_Val); end if; when 'i' => if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Flow_Analysis_Information) then CommandLineData.Content.Flow_Option := CommandLineData.Info_Flow; else -- something which begins with 'i' other than information CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InformationFlow, F => Opt_Val); end if; when 'd' => if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Flow_Analysis_Data) then -- Here we have a potentially valid selection flow_analysis=data. -- This is only allowed if information flow integrity checking has -- not been selected if (CommandLineData.Content.Info_Flow_Policy = CommandLineData.None) then CommandLineData.Content.Flow_Option := CommandLineData.Data_Flow; else CommandLineData.Content.Valid := False; Possible_Error (E => ES_DataFlowAndInfoFlowPolicy); end if; else -- something which begins with 'd' other than data CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InformationFlow, F => Opt_Val); end if; when others => -- doesn't begin with 'i' or 'd' CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InformationFlow, F => Opt_Val); end case; end if; else -- begins with 'f' but not fdl... or flow... CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_F; procedure Process_G --# global in Opt_Name; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Distribution_Is_Pro, "Direct updates OK here" & --# W, 169, CommandLineData.Content.GPL_Switch, "Direct updates OK here"; if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_GPL) then CommandLineData.Content.Valid := True; CommandLineData.Content.Distribution_Is_Pro := False; CommandLineData.Content.GPL_Switch := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; end Process_G; procedure Process_H --# global in NoReportOptionFound; --# in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in XMLFound; --# in out CommandLineData.Content; --# in out HTMLFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content from *, --# HTMLFound, --# NoReportOptionFound, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK, --# XMLFound & --# HTMLFound from *, --# Opt_Name & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# HTMLFound, --# NoReportOptionFound, --# Opt_Name, --# XMLFound; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.HTML, "Direct updates OK here" & --# W, 169, CommandLineData.Content.HTML_Directory, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Plain_Output, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Help_Requested, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 't' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Html) then CommandLineData.Content.Valid := not (HTMLFound or else NoReportOptionFound or else XMLFound); if XMLFound then Possible_Error (E => ES_XMLandHTML); elsif NoReportOptionFound then Possible_Error (E => ES_HTMLnorep); else Possible_Error (E => ES_Html); end if; HTMLFound := True; CommandLineData.Content.HTML := True; if Opt_Val_OK then CommandLineData.Content.HTML_Directory := Opt_Val; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'e' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Help) then -- Examiner Help requested, so abandon all further -- option processing by setting Valid = False. CommandLineData.Content.Valid := False; CommandLineData.Content.Help_Requested := True; -- Override setting of PlainOutput so Help information -- always appears with Examiner version number and date. CommandLineData.Content.Plain_Output := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_H; procedure Process_I --# global in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out CommandLineData.Content; --# in out IndexFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content from *, --# IndexFound, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK & --# IndexFound from *, --# Opt_Name, --# Opt_Val_OK & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# IndexFound, --# Opt_Name, --# Opt_Val_OK; is File_Name : E_Strings.T; begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Index, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Index_File_Name, "Direct updates OK here"; if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Index_File) then CommandLineData.Content.Valid := Opt_Val_OK; Possible_Error (E => ES_IndexName); if Opt_Val_OK then CommandLineData.Content.Valid := not IndexFound; Possible_Error (E => ES_Index); end if; if CommandLineData.Content.Valid then IndexFound := True; CommandLineData.Content.Index := True; File_Name := Opt_Val; FileSystem.Check_Extension (Fn => File_Name, Ext => E_Strings.Copy_String (Str => CommandLineData.Default_Index_Extension)); CommandLineData.Content.Index_File_Name := File_Name; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; --# end accept; end Process_I; procedure Process_J --# global in Opt_Name; --# in Opt_Val; --# in out CommandLineData.Content; --# in out JustificationOptionFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# JustificationOptionFound, --# Opt_Name, --# Opt_Val & --# JustificationOptionFound from *, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Justification_Option, "Direct updates OK here"; if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Justification_Option) then CommandLineData.Content.Valid := not JustificationOptionFound; Possible_Error (E => ES_JustificationRepeated); if CommandLineData.Content.Valid then JustificationOptionFound := True; -- now check argument - must be one of Ignore, Full, or Brief case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1)) is when 'i' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Justification_Option_Ignore) then CommandLineData.Content.Justification_Option := CommandLineData.Ignore; else -- error involving 'ignore' CommandLineData.Content.Valid := False; Possible_Error (E => ES_JustificationOption); end if; when 'f' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Justification_Option_Full) then CommandLineData.Content.Justification_Option := CommandLineData.Full; else -- error involving 'full' CommandLineData.Content.Valid := False; Possible_Error (E => ES_JustificationOption); end if; when 'b' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Justification_Option_Brief) then CommandLineData.Content.Justification_Option := CommandLineData.Brief; else -- error involving 'brief' CommandLineData.Content.Valid := False; Possible_Error (E => ES_JustificationOption); end if; when others => CommandLineData.Content.Valid := False; Possible_Error (E => ES_JustificationOption); end case; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; --# end accept; end Process_J; procedure Process_L --# global in FlowOptionFound; --# in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out CommandLineData.Content; --# in out LanguageProfileFound; --# in out ListingFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content from *, --# FlowOptionFound, --# LanguageProfileFound, --# ListingFound, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK & --# LanguageProfileFound, --# ListingFound from *, --# Opt_Name & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LanguageProfileFound, --# ListingFound, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Flow_Option, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Language_Profile, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Listing_Extension, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'i' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Listing_Extension) then CommandLineData.Content.Valid := Opt_Val_OK and then not ListingFound; Possible_Error (E => ES_ListingExt); ListingFound := True; CommandLineData.Content.Listing_Extension := Opt_Val; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'a' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Language) then CommandLineData.Content.Valid := not LanguageProfileFound; Possible_Error (E => ES_LanguageRepeated); if CommandLineData.Content.Valid then LanguageProfileFound := True; -- now check argument - must be one of 83, 95, 2005, or KCG case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1)) is when '8' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Language_83) then CommandLineData.Content.Language_Profile := CommandLineData.SPARK83; if not FlowOptionFound then CommandLineData.Content.Flow_Option := CommandLineData.Info_Flow; end if; else -- error involving '83' CommandLineData.Content.Valid := False; Possible_Error (E => ES_LanguageProfile); end if; when '9' => if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Language_95) then CommandLineData.Content.Language_Profile := CommandLineData.SPARK95; else -- error involving '95' CommandLineData.Content.Valid := False; Possible_Error (E => ES_LanguageProfile); end if; when '2' => if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Language_2005) then CommandLineData.Content.Language_Profile := CommandLineData.SPARK2005; else -- error involving '2005' CommandLineData.Content.Valid := False; Possible_Error (E => ES_LanguageProfile); end if; when 'k' => if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Language_KCG) and then CommandLineData.Content.Distribution_Is_Pro then CommandLineData.Content.Language_Profile := CommandLineData.KCG; else -- error involving 'KCG' CommandLineData.Content.Valid := False; Possible_Error (E => ES_LanguageProfile); end if; when others => CommandLineData.Content.Valid := False; Possible_Error (E => ES_LanguageProfile); end case; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_L; procedure Process_M --# global in Opt_Name; --# in out CommandLineData.Content; --# in out Makefile_Found; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Makefile_Found, --# Opt_Name & --# Makefile_Found from *, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Makefile_Mode, "Direct updates OK here"; -- There is only a single option starting with m. if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Makefile_Mode) then CommandLineData.Content.Valid := not Makefile_Found; Possible_Error (E => ES_Makefile_Repeated); if CommandLineData.Content.Valid then Makefile_Found := True; CommandLineData.Content.Makefile_Mode := True; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; end Process_M; procedure Process_N --# global in DPCFound; --# in FDLreserveOptionFound; --# in HTMLFound; --# in Opt_Name; --# in VCGFound; --# in XMLFound; --# in out CommandLineData.Content; --# in out ConfigFileFound; --# in out DictFound; --# in out FDLignoreOptionFound; --# in out IndexFound; --# in out NoDurationFound; --# in out NoListingsFound; --# in out NoReportOptionFound; --# in out NoWarningFound; --# in out OutputFound; --# in out ReportFound; --# in out SPARK_IO.File_Sys; --# in out StatisticsFound; --# in out TargetDataFound; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ConfigFileFound, --# DictFound, --# DPCFound, --# FDLignoreOptionFound, --# FDLreserveOptionFound, --# HTMLFound, --# IndexFound, --# NoDurationFound, --# NoListingsFound, --# NoWarningFound, --# Opt_Name, --# OutputFound, --# ReportFound, --# StatisticsFound, --# TargetDataFound, --# VCGFound, --# XMLFound & --# ConfigFileFound, --# DictFound, --# FDLignoreOptionFound, --# IndexFound, --# NoDurationFound, --# NoListingsFound, --# NoReportOptionFound, --# NoWarningFound, --# OutputFound, --# ReportFound, --# StatisticsFound, --# TargetDataFound from *, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Index, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Warning, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Echo, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Generate_SLI, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Report, "Direct updates OK here" & --# W, 169, CommandLineData.Content.No_Duration, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Write_Dict, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Write_Statistics, "Direct updates OK here" & --# W, 169, CommandLineData.Content.FDL_Reserved, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Target_Data , "Direct updates OK here" & --# W, 169, CommandLineData.Content.Target_Config, "Direct updates OK here" & --# W, 169, CommandLineData.Content.No_Listings, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 3)) is when 'i' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => "no" & CommandLineData.Option_Index_File) then CommandLineData.Content.Valid := not IndexFound; Possible_Error (E => ES_NoIndex); IndexFound := True; CommandLineData.Content.Index := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'w' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => "no" & CommandLineData.Option_Warning_File) then CommandLineData.Content.Valid := not NoWarningFound; Possible_Error (E => ES_NoWarning); NoWarningFound := True; CommandLineData.Content.Warning := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'e' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_No_Echo) then CommandLineData.Content.Valid := not OutputFound; Possible_Error (E => ES_NoEcho); OutputFound := True; CommandLineData.Content.Echo := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'r' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => "no" & CommandLineData.Option_Report_File) then CommandLineData.Content.Valid := not (ReportFound or else HTMLFound or else XMLFound); Possible_Error (E => ES_NoReport); ReportFound := True; NoReportOptionFound := True; CommandLineData.Content.Report := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'd' => -- 3rd letter case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 4)) is when 'i' => -- 4th letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_No_Dictionary) then CommandLineData.Content.Valid := not DictFound; Possible_Error (E => ES_NoDict); DictFound := True; CommandLineData.Content.Write_Dict := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'u' => -- 4th letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_No_Duration) then CommandLineData.Content.Valid := not NoDurationFound; Possible_Error (E => ES_NoDuration); NoDurationFound := True; CommandLineData.Content.No_Duration := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when 's' => -- 3rd letter case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 4)) is when 'l' => -- 4th letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_No_Sli) then CommandLineData.Content.Generate_SLI := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 't' => -- 4th letter if Check_Option_Name (Opt_Name => Opt_Name, Str => "no" & CommandLineData.Option_Statistics) then CommandLineData.Content.Valid := not StatisticsFound; Possible_Error (E => ES_NoStatistics); StatisticsFound := True; CommandLineData.Content.Write_Statistics := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'w' => -- 4th Letter -- -noswitch -- -- Already processed by Ignore_Default_Switch_File below, -- so allow but ignore here. if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_No_Switch) then null; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when 'f' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => "no" & CommandLineData.Option_Fdl_Identifiers) then CommandLineData.Content.Valid := not (FDLreserveOptionFound or else FDLignoreOptionFound or else DPCFound or else VCGFound); Possible_Error (E => ES_FDLoption); FDLignoreOptionFound := True; CommandLineData.Content.FDL_Reserved := False; ScreenEcho.Put_Line ("Warning: the -nofdl option is now deprecated. Please use -fdl=accept"); else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 't' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => "no" & CommandLineData.Option_Target_Compiler_Data) then CommandLineData.Content.Valid := not TargetDataFound; Possible_Error (E => ES_NoTargetData); TargetDataFound := True; CommandLineData.Content.Target_Data := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'c' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => "no" & CommandLineData.Option_Config_File) then CommandLineData.Content.Valid := not ConfigFileFound; Possible_Error (E => ES_NoConfigFile); ConfigFileFound := True; CommandLineData.Content.Target_Config := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'l' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_No_Listings) then CommandLineData.Content.Valid := not NoListingsFound; Possible_Error (E => ES_NoListings); NoListingsFound := True; CommandLineData.Content.No_Listings := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_N; procedure Process_O --# global in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out CommandLineData.Content; --# in out OriginalFlowErrorsFound; --# in out OutputDirectoryFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content from *, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK, --# OriginalFlowErrorsFound, --# OutputDirectoryFound & --# OriginalFlowErrorsFound from *, --# Opt_Name & --# OutputDirectoryFound from *, --# Opt_Name, --# Opt_Val_OK & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Opt_Name, --# Opt_Val_OK, --# OriginalFlowErrorsFound, --# OutputDirectoryFound; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Output_Directory, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Output_Directory_Name, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Legacy_Errors, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'r' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Original_Flow_Errors) then CommandLineData.Content.Valid := not OriginalFlowErrorsFound; Possible_Error (E => ES_OriginalFlowErrors); OriginalFlowErrorsFound := True; CommandLineData.Content.Legacy_Errors := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'u' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Output_Directory) then CommandLineData.Content.Valid := Opt_Val_OK; Possible_Error (E => ES_OutputDir); if Opt_Val_OK then CommandLineData.Content.Valid := not OutputDirectoryFound; Possible_Error (E => ES_OutputDirRepeated); end if; if CommandLineData.Content.Valid then OutputDirectoryFound := True; CommandLineData.Content.Output_Directory := True; CommandLineData.Content.Output_Directory_Name := Opt_Val; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_O; procedure Process_P --# global in Opt_Name; --# in Opt_Val; --# in out CommandLineData.Content; --# in out ConcurrencyProfileFound; --# in out PlainOutputFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ConcurrencyProfileFound, --# Opt_Name, --# Opt_Val, --# PlainOutputFound & --# ConcurrencyProfileFound, --# PlainOutputFound from *, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Plain_Output, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Info_Flow_Policy, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Concurrency_Profile, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'f' => -- 2nd letter CommandLineData.Content.Valid := False; if Check_Option_Name (Opt_Name => Opt_Name, Str => "pfs") then Possible_Error (E => ES_NoMorePFS); else Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'l' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Plain_Output) then CommandLineData.Content.Valid := not PlainOutputFound; Possible_Error (E => ES_PlainOutput); PlainOutputFound := True; CommandLineData.Content.Plain_Output := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'r' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Profile) then CommandLineData.Content.Valid := not ConcurrencyProfileFound; Possible_Error (E => ES_Profile); if CommandLineData.Content.Valid then ConcurrencyProfileFound := True; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1)) is when 'r' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Profile_Ravenscar) then CommandLineData.Content.Concurrency_Profile := CommandLineData.Ravenscar; else -- other error involving ravenscar CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ProfileOption, F => Opt_Val); end if; when 's' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Profile_Sequential) then CommandLineData.Content.Concurrency_Profile := CommandLineData.Sequential; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ProfileOption, F => Opt_Val); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ProfileOption, F => Opt_Val); end case; end if; else -- begins with "pr" but not "profile" CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'o' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Policy) then CommandLineData.Content.Valid := CommandLineData.Content.Flow_Option = CommandLineData.Info_Flow; Possible_Error (E => ES_DataFlowAndInfoFlowPolicy); if CommandLineData.Content.Valid then case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1)) is when 's' => -- 1st letter of option case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 2)) is when 'e' => if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Policy_Security) then CommandLineData.Content.Info_Flow_Policy := CommandLineData.Security; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ProfileOption, F => Opt_Val); end if; when 'a' => if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Policy_Safety) then CommandLineData.Content.Info_Flow_Policy := CommandLineData.Safety; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ProfileOption, F => Opt_Val); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ProfileOption, F => Opt_Val); end case; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_ProfileOption, F => Opt_Val); end case; end if; end if; -- begin with "p" but not a valid option when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_P; procedure Process_R --# global in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out CommandLineData.Content; --# in out ReportFound; --# in out RulesFound; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK, --# ReportFound, --# RulesFound & --# ReportFound from *, --# Opt_Name, --# Opt_Val_OK & --# RulesFound from *, --# Opt_Name; is Default_Report_Extension : constant String := "rep"; File_Name : E_Strings.T; begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Report, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Report_File_Name, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Constant_Rules, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'e' => -- 2nd letter case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 3)) is when 'p' => -- 3rd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Report_File) then CommandLineData.Content.Valid := Opt_Val_OK; Possible_Error (E => ES_ReportName); if Opt_Val_OK then CommandLineData.Content.Valid := not ReportFound; Possible_Error (E => ES_Report); end if; if CommandLineData.Content.Valid then ReportFound := True; CommandLineData.Content.Report := True; File_Name := Opt_Val; FileSystem.Check_Extension (Fn => File_Name, Ext => E_Strings.Copy_String (Str => Default_Report_Extension)); CommandLineData.Content.Report_File_Name := File_Name; end if; else -- starts "rep" but is not valid CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'a' => -- 3rd letter CommandLineData.Content.Valid := False; if Check_Option_Name (Opt_Name => Opt_Name, Str => "realrtcs") then Possible_Error (E => ES_NoMoreReal); else Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => -- starts "re" but isn't valid CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; -- 3rd letter when 't' => -- 2nd letter CommandLineData.Content.Valid := False; if Check_Option_Name (Opt_Name => Opt_Name, Str => "rtc") then Possible_Error (E => ES_NoMoreRTC); else Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'u' => -- 2nd letter u - should be "rules" switch if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Rules) then if RulesFound then CommandLineData.Content.Valid := False; Possible_Error (E => ES_Rules); else RulesFound := True; CommandLineData.Content.Valid := Opt_Val_OK; if CommandLineData.Content.Valid then case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1)) is when 'n' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Rules_None) then CommandLineData.Content.Constant_Rules := CommandLineData.No_Rules; else CommandLineData.Content.Valid := False; Possible_Error (E => ES_RuleUnknown); end if; when 'l' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Rules_Lazy) then CommandLineData.Content.Constant_Rules := CommandLineData.Lazy; else CommandLineData.Content.Valid := False; Possible_Error (E => ES_RuleUnknown); end if; when 'k' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Rules_Keen) then CommandLineData.Content.Constant_Rules := CommandLineData.Keen; else CommandLineData.Content.Valid := False; Possible_Error (E => ES_RuleUnknown); end if; when 'a' => -- 1st letter of option if Check_Option_Name (Opt_Name => Opt_Val, Str => CommandLineData.Option_Rules_All) then CommandLineData.Content.Constant_Rules := CommandLineData.All_Rules; else CommandLineData.Content.Valid := False; Possible_Error (E => ES_RuleUnknown); end if; when others => CommandLineData.Content.Valid := False; Possible_Error (E => ES_RuleUnknown); end case; else CommandLineData.Content.Valid := False; Possible_Error (E => ES_RuleUnknown); end if; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => -- starts "r" but not valid CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_R; procedure Process_S --# global in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out CommandLineData.Content; --# in out SourceFound; --# in out SPARK_IO.File_Sys; --# in out StatisticsFound; --# in out SyntaxCheckFound; --# derives CommandLineData.Content from *, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK, --# SourceFound, --# StatisticsFound, --# SyntaxCheckFound & --# SourceFound, --# StatisticsFound, --# SyntaxCheckFound from *, --# Opt_Name & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Opt_Name, --# Opt_Val_OK, --# SourceFound, --# StatisticsFound, --# SyntaxCheckFound; is begin --# accept W, 169, CommandLineData.Content.Write_Statistics, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Source_Extension, "Direct updates OK here" & --# W, 169, CommandLineData.Content.SPARK_Lib, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Syntax_Only, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'o' => if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Source_Extension) then CommandLineData.Content.Valid := Opt_Val_OK and then not SourceFound; Possible_Error (E => ES_SourceExt); SourceFound := True; CommandLineData.Content.Source_Extension := Opt_Val; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'p' => if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_SPARK_Lib) then CommandLineData.Content.SPARK_Lib := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 't' => if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Statistics) then CommandLineData.Content.Valid := not StatisticsFound; Possible_Error (E => ES_Statistics); StatisticsFound := True; CommandLineData.Content.Write_Statistics := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'y' => if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Syntax_Check) then CommandLineData.Content.Valid := not SyntaxCheckFound; Possible_Error (E => ES_Syntax); SyntaxCheckFound := True; CommandLineData.Content.Syntax_Only := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_S; procedure Process_T --# global in ConfigFileFound; --# in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out TargetDataFound; --# derives CommandLineData.Content from *, --# ConfigFileFound, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK, --# TargetDataFound & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ConfigFileFound, --# Opt_Name, --# Opt_Val_OK, --# TargetDataFound & --# TargetDataFound from *, --# ConfigFileFound, --# Opt_Name, --# Opt_Val_OK; is File_Name : E_Strings.T; begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Target_Data, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Target_Data_File, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is -- -tree is removed in release 8.1.1 and above, but it remains -- the default, so at least recognize it here and take no action -- for backward compatiblility with users' existing scripts. when 'r' => -- 2nd letter if not Check_Option_Name (Opt_Name => Opt_Name, Str => "tree") then CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'a' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Target_Compiler_Data) then if ConfigFileFound then CommandLineData.Content.Valid := False; Possible_Error (E => ES_DataAndConfig); else CommandLineData.Content.Valid := Opt_Val_OK; Possible_Error (E => ES_TargetDataName); if Opt_Val_OK then CommandLineData.Content.Valid := not TargetDataFound; Possible_Error (E => ES_TargetData); end if; if CommandLineData.Content.Valid then TargetDataFound := True; CommandLineData.Content.Target_Data := True; File_Name := Opt_Val; FileSystem.Check_Extension (Fn => File_Name, Ext => E_Strings.Copy_String (Str => CommandLineData.Default_Data_Extension)); CommandLineData.Content.Target_Data_File := File_Name; end if; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_T; procedure Process_V --# global in FDLignoreOptionFound; --# in Opt_Name; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out VCGFound; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# FDLignoreOptionFound, --# Opt_Name, --# VCGFound & --# VCGFound from *, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.VCG, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Plain_Output, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Version_Requested, "Direct updates OK here"; case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2)) is when 'c' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Vcg) then if VCGFound then CommandLineData.Content.Valid := False; Possible_Error (E => ES_VCGRepeated); elsif FDLignoreOptionFound and then E_Strings.Is_Empty (E_Str => CommandLineData.Content.FDL_Mangle) then CommandLineData.Content.Valid := False; Possible_Error (E => ES_VCGandFDL); else CommandLineData.Content.Valid := True; end if; VCGFound := True; CommandLineData.Content.VCG := True; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'e' => -- 2nd letter if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Version) then -- Examiner version requested, so abandon all further -- option processing by setting Valid = False. CommandLineData.Content.Valid := False; CommandLineData.Content.Version_Requested := True; CommandLineData.Content.Plain_Output := False; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; --# end accept; end Process_V; procedure Process_W --# global in Opt_Name; --# in Opt_Val; --# in Opt_Val_OK; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out WarningFound; --# derives CommandLineData.Content from *, --# Opt_Name, --# Opt_Val, --# Opt_Val_OK, --# WarningFound & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Opt_Name, --# Opt_Val_OK, --# WarningFound & --# WarningFound from *, --# Opt_Name, --# Opt_Val_OK; is File_Name : E_Strings.T; begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Warning, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Warning_File_Name, "Direct updates OK here"; if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Warning_File) then CommandLineData.Content.Valid := Opt_Val_OK; Possible_Error (E => ES_WarningName); if Opt_Val_OK then CommandLineData.Content.Valid := not WarningFound; Possible_Error (E => ES_Warning); end if; if CommandLineData.Content.Valid then WarningFound := True; CommandLineData.Content.Warning := True; File_Name := Opt_Val; FileSystem.Check_Extension (Fn => File_Name, Ext => E_Strings.Copy_String (Str => CommandLineData.Default_Warning_Extension)); CommandLineData.Content.Warning_File_Name := File_Name; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; --# end accept; end Process_W; procedure Process_X --# global in HTMLFound; --# in NoReportOptionFound; --# in Opt_Name; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out XMLFound; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# HTMLFound, --# NoReportOptionFound, --# Opt_Name, --# XMLFound & --# XMLFound from *, --# Opt_Name; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.XML, "Direct updates OK here"; if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Xml) then CommandLineData.Content.Valid := not (XMLFound or else NoReportOptionFound or else HTMLFound); if HTMLFound then Possible_Error (E => ES_XMLandHTML); elsif XMLFound then Possible_Error (E => ES_XML); else Possible_Error (E => ES_XMLnorep); end if; XMLFound := True; if CommandLineData.Content.Valid then CommandLineData.Content.XML := True; end if; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; --# end accept; end Process_X; begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; loop exit when Next_Symbol.Typ /= S_Switch_Character; Read_Option (Opt_Name => Opt_Name, Opt_Name_OK => Opt_Name_OK, Opt_Val => Opt_Val, Opt_Val_OK => Opt_Val_OK, Command_String => Command_String, Next_Symbol => Next_Symbol); if Opt_Name_OK then case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 1)) is when 'a' => Process_A; when 'b' => Process_B; when 'c' => Process_C; when 'd' => Process_D; when 'e' => Process_E; when 'f' => Process_F; when 'h' => Process_H; when 'g' => Process_G; when 'i' => Process_I; when 'j' => Process_J; when 'l' => Process_L; when 'm' => Process_M; when 'n' => Process_N; when 'o' => Process_O; when 'p' => Process_P; when 'r' => Process_R; when 's' => Process_S; when 't' => Process_T; when 'v' => Process_V; when 'w' => Process_W; when 'x' => Process_X; when others => CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; exit when not CommandLineData.Content.Valid; end loop; --# end accept; end Parse_Command_Options; procedure Parse_Command_Line (Command_String : in Command_Strings) --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Command_String; is Next_Symbol : Symbols; Local_Command_String : Command_Strings; procedure Parse_Arguments (Command_String : in Command_Strings; Next_Symbol : in Symbols) --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Command_String, --# Next_Symbol; -- pre Next_Symbol.Typ in {SString, SEmpty}; is Local_Next_Symbol : Symbols; Local_Command_String : Command_Strings; procedure Parse_File_Entry (Command_String : in out Command_Strings; Next_Symbol : in out Symbols) --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Command_String, --# Next_Symbol & --# Command_String, --# Next_Symbol from Command_String, --# Next_Symbol; -- pre Next_Symbol.Typ = SString; -- post Next_Symbol.Typ in {SComma, SEmpty}; is File_Name : E_Strings.T; procedure Parse_Argument_Option (Command_String : in out Command_Strings; Next_Symbol : in out Symbols) --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Command_String, --# Next_Symbol & --# Command_String, --# Next_Symbol from Command_String, --# Next_Symbol; is File_Name : E_Strings.T; Do_List : Boolean; OK : Boolean; procedure Read_Argument_Option (Listing_File_Name : out E_Strings.T; Listing : out Boolean; OK : out Boolean; Command_String : in out Command_Strings; Next_Symbol : out Symbols) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Command_String, --# Listing, --# Listing_File_Name, --# Next_Symbol, --# OK from Command_String & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Command_String; -- pre Next_Symbol.Typ = SSlash; -- post Next_Symbol.Typ in {SComma, SEmpty}; is Opt_Name : E_Strings.T; Opt_Val : E_Strings.T; Opt_Name_OK : Boolean; Opt_Val_OK : Boolean; begin Listing_File_Name := E_Strings.Empty_String; Listing := False; Read_Option (Opt_Name => Opt_Name, Opt_Name_OK => Opt_Name_OK, Opt_Val => Opt_Val, Opt_Val_OK => Opt_Val_OK, Command_String => Command_String, Next_Symbol => Next_Symbol); case Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Opt_Name, Pos => 1)) is when 'l' => if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_Listing_File) then OK := Opt_Name_OK and then Opt_Val_OK; Listing := True; Listing_File_Name := Opt_Val; else OK := False; Listing := False; Output_Error (E => ES_InvalidOption); ScreenEcho.New_Line (1); end if; when 'n' => if Check_Option_Name (Opt_Name => Opt_Name, Str => CommandLineData.Option_No_Listing_File) then OK := Opt_Name_OK and then not Opt_Val_OK; else OK := False; Output_Error (E => ES_InvalidOption); ScreenEcho.New_Line (1); end if; Listing := False; when others => OK := False; end case; end Read_Argument_Option; begin --# accept W, 169, CommandLineData.Content.Source_File_List, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; if Next_Symbol.Typ = S_Switch_Character then Read_Argument_Option (Listing_File_Name => File_Name, Listing => Do_List, OK => OK, Command_String => Command_String, Next_Symbol => Next_Symbol); CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing_File_Name := File_Name; CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing := Do_List; CommandLineData.Content.Valid := OK; Possible_Error (E => ES_ListingFile); else CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing := True; CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing_File_Name := FileSystem.Just_File (Fn => CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Source_File_Name, Ext => False); end if; if CommandLineData.Content.Valid and then CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing then File_Name := CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing_File_Name; FileSystem.Check_Listing_Extension (Source_Name => CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Source_File_Name, Fn => File_Name, Ext => CommandLineData.Content.Listing_Extension); CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing_File_Name := File_Name; end if; --# end accept; end Parse_Argument_Option; begin --# accept W, 169, CommandLineData.Content.Source_File_List, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Number_Source, "Direct updates OK here"; if Next_Symbol.Typ = S_String then CommandLineData.Content.Number_Source := CommandLineData.Content.Number_Source + 1; File_Name := Next_Symbol.The_String; if E_Strings.Get_Element (E_Str => File_Name, Pos => 1) = '@' then FileSystem.Check_Extension (Fn => File_Name, Ext => E_Strings.Copy_String (Str => CommandLineData.Meta_File_Extension)); CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Source_File_Name := File_Name; CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing := False; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Next_Symbol); else FileSystem.Check_Extension (Fn => File_Name, Ext => CommandLineData.Content.Source_Extension); CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Source_File_Name := File_Name; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Next_Symbol); Parse_Argument_Option (Command_String => Command_String, Next_Symbol => Next_Symbol); end if; else CommandLineData.Content.Valid := False; Possible_Error (E => ES_Source); end if; --# end accept; end Parse_File_Entry; begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Number_Source, "Direct updates OK here"; Local_Next_Symbol := Next_Symbol; Local_Command_String := Command_String; CommandLineData.Content.Number_Source := 0; loop Parse_File_Entry (Command_String => Local_Command_String, Next_Symbol => Local_Next_Symbol); exit when Local_Next_Symbol.Typ = S_Empty or else not CommandLineData.Content.Valid; if CommandLineData.Content.Number_Source = ExaminerConstants.MaxFilesOnCommandLine then CommandLineData.Content.Valid := False; Output_Error (E => EW_Too_Many); exit; end if; if FileSystem.Use_Windows_Command_Line and then Local_Next_Symbol.Typ = S_Comma then Get_Next_Symbol (Command_String => Local_Command_String, Next_Symbol => Local_Next_Symbol); end if; end loop; --# end accept; end Parse_Arguments; begin Local_Command_String := Command_String; Get_Next_Symbol (Command_String => Local_Command_String, Next_Symbol => Next_Symbol); Parse_Command_Options (Command_String => Local_Command_String, Next_Symbol => Next_Symbol); if CommandLineData.Content.Valid then Parse_Arguments (Command_String => Local_Command_String, Next_Symbol => Next_Symbol); end if; end Parse_Command_Line; ---------------------------------------------------------------------- -- Does a quick "look ahead" through all the command-line -- switches to check for the presence of "-noswitch", "/noswitch" -- or any unambiguous abbreviation thereof. -- -- Uses Ada.Command_Line directly, so is hidden from SPARK. ---------------------------------------------------------------------- function Ignore_Default_Switch_File return Boolean --# global in SPARK_IO.File_Sys; is --# hide Ignore_Default_Switch_File; Result : Boolean; Arg_Count : constant Natural := Ada.Command_Line.Argument_Count; begin Result := False; for I in Positive range 1 .. Arg_Count loop declare Arg : constant String := Ada.Command_Line.Argument (I); begin if Arg'Length >= 5 then declare Prefix : constant Character := Arg (1); Switch : constant String := String (Arg (2 .. Arg'Last)); Prefix_Legal : Boolean; begin Prefix_Legal := (Prefix = '-'); if Prefix_Legal and then (Switch = "noswitch" or else Switch = "noswitc" or else Switch = "noswit" or else Switch = "noswi" or else Switch = "nosw") then Result := True; exit; end if; end; end if; end; end loop; return Result; end Ignore_Default_Switch_File; procedure Process is Command_String : Command_Strings; procedure Wff_Command_Line --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from CommandLineData.Content, --# SPARK_IO.File_Sys; is procedure Wff_Non_Overlapping --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content; is function Exists_In (F : E_Strings.T; L : CommandLineData.Source_File_Lists; Start_Pos : CommandLineData.Source_File_Positions; Finish_Pos : CommandLineData.Source_File_Positions) return Boolean is Result : Boolean; begin Result := False; for Ix in CommandLineData.Source_File_Positions range Start_Pos .. Finish_Pos loop Result := FileSystem.Same_File (F1 => F, F2 => L (Ix).Source_File_Name); exit when Result; if L (Ix).Listing then Result := FileSystem.Same_File (F1 => F, F2 => L (Ix).Listing_File_Name); end if; exit when Result; end loop; return Result; end Exists_In; procedure Index_Non_Overlapping --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; if CommandLineData.Content.Index then CommandLineData.Content.Valid := not FileSystem.Same_File (F1 => CommandLineData.Content.Index_File_Name, F2 => CommandLineData.Content.Report_File_Name) and then not (CommandLineData.Content.Warning and then FileSystem.Same_File (F1 => CommandLineData.Content.Index_File_Name, F2 => CommandLineData.Content.Warning_File_Name)) and then not Exists_In (F => CommandLineData.Content.Index_File_Name, L => CommandLineData.Content.Source_File_List, Start_Pos => 1, Finish_Pos => CommandLineData.Content.Number_Source); Possible_Error2 (E => EW_Overlap, F => CommandLineData.Content.Index_File_Name); end if; --# end accept; end Index_Non_Overlapping; procedure Warning_Non_Overlapping --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; if CommandLineData.Content.Warning then CommandLineData.Content.Valid := not FileSystem.Same_File (F1 => CommandLineData.Content.Warning_File_Name, F2 => CommandLineData.Content.Report_File_Name) and then not Exists_In (F => CommandLineData.Content.Warning_File_Name, L => CommandLineData.Content.Source_File_List, Start_Pos => 1, Finish_Pos => CommandLineData.Content.Number_Source); Possible_Error2 (E => EW_Overlap, F => CommandLineData.Content.Warning_File_Name); end if; --# end accept; end Warning_Non_Overlapping; procedure Report_Non_Overlapping --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; if CommandLineData.Content.Report then CommandLineData.Content.Valid := not Exists_In (F => CommandLineData.Content.Report_File_Name, L => CommandLineData.Content.Source_File_List, Start_Pos => 1, Finish_Pos => CommandLineData.Content.Number_Source); Possible_Error2 (E => EW_Overlap, F => CommandLineData.Content.Report_File_Name); end if; --# end accept; end Report_Non_Overlapping; procedure Source_File_List_Non_Overlapping --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; for Ix in CommandLineData.Source_File_Positions range 1 .. (CommandLineData.Content.Number_Source - 1) loop CommandLineData.Content.Valid := not Exists_In (F => CommandLineData.Content.Source_File_List (Ix).Source_File_Name, L => CommandLineData.Content.Source_File_List, Start_Pos => Ix + 1, Finish_Pos => CommandLineData.Content.Number_Source); Possible_Error2 (E => EW_Overlap, F => CommandLineData.Content.Source_File_List (Ix).Source_File_Name); exit when not CommandLineData.Content.Valid; if CommandLineData.Content.Source_File_List (Ix).Listing then CommandLineData.Content.Valid := not FileSystem.Same_File (F1 => CommandLineData.Content.Source_File_List (Ix).Source_File_Name, F2 => CommandLineData.Content.Source_File_List (Ix).Listing_File_Name) and then not Exists_In (F => CommandLineData.Content.Source_File_List (Ix).Source_File_Name, L => CommandLineData.Content.Source_File_List, Start_Pos => Ix + 1, Finish_Pos => CommandLineData.Content.Number_Source); Possible_Error2 (E => EW_Overlap, F => CommandLineData.Content.Source_File_List (Ix).Source_File_Name); end if; exit when not CommandLineData.Content.Valid; end loop; if CommandLineData.Content.Valid and then CommandLineData.Content.Number_Source /= 0 then CommandLineData.Content.Valid := not FileSystem.Same_File (F1 => CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Source_File_Name, F2 => CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Listing_File_Name); Possible_Error2 (E => EW_Overlap, F => CommandLineData.Content.Source_File_List (CommandLineData.Content.Number_Source).Source_File_Name); end if; --# end accept; end Source_File_List_Non_Overlapping; begin Index_Non_Overlapping; if CommandLineData.Content.Valid then Warning_Non_Overlapping; end if; if CommandLineData.Content.Valid then Report_Non_Overlapping; end if; if CommandLineData.Content.Valid then Source_File_List_Non_Overlapping; end if; end Wff_Non_Overlapping; function Strip_Any_At (S : E_Strings.T) return E_Strings.T is Result : E_Strings.T; begin if E_Strings.Get_Element (E_Str => S, Pos => 1) = '@' then Result := E_Strings.Section (E_Str => S, Start_Pos => 2, Length => E_Strings.Get_Length (E_Str => S) - 1); else Result := S; end if; return Result; end Strip_Any_At; function Adjust_Filename (S : E_Strings.T) return E_Strings.T --# global in CommandLineData.Content; is R : E_Strings.T; begin if CommandLineData.Content.Plain_Output then R := FileSystem.Just_File (Fn => S, Ext => True); else R := S; end if; return R; end Adjust_Filename; procedure Check_Output_Directory_Exists --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from CommandLineData.Content, --# SPARK_IO.File_Sys; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here" & --# W, 169, CommandLineData.Content, "Direct updates OK here"; if CommandLineData.Content.Valid and then CommandLineData.Content.Output_Directory then if FileSystem.Is_Directory (F => CommandLineData.Content.Output_Directory_Name) then -- Make sure it ends with an EndOfPath separator, so it can be used -- later in a call to FileSystem.InterpretRelative FileSystem.Append_End_Of_Path_If_Needed (D => CommandLineData.Content.Output_Directory_Name); else CommandLineData.Content.Valid := False; Possible_Error2 (E => ES_OutputDirNotFound, F => Adjust_Filename (S => CommandLineData.Content.Output_Directory_Name)); end if; end if; --# end accept; end Check_Output_Directory_Exists; procedure Check_Language_Profile --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content; is begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; if CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 then -- Ravenscar is not permitted in SPARK83 mode if CommandLineData.Content.Concurrency_Profile = CommandLineData.Ravenscar then CommandLineData.Content.Valid := False; Possible_Error (E => ES_NoRavenscarInSPARK83); end if; -- -sparklib is not permitted in SPARK83 mode if CommandLineData.Content.SPARK_Lib then CommandLineData.Content.Valid := False; Possible_Error (E => ES_NoSPARKLibInSPARK83); end if; -- -flow=auto is not permitted in SPARK83 mode if CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow then CommandLineData.Content.Valid := False; Possible_Error (E => ES_NoAutoFlowInSPARK83); end if; end if; --# end accept; end Check_Language_Profile; begin --# accept W, 169, CommandLineData.Content.Valid, "Direct updates OK here"; -- Check index file readable if CommandLineData.Content.Valid and then CommandLineData.Content.Index then CommandLineData.Content.Valid := FileSystem.Is_Readable_File (F => CommandLineData.Content.Index_File_Name); Possible_Error2 (E => EW_Index, F => Adjust_Filename (S => CommandLineData.Content.Index_File_Name)); end if; --# assert True; -- Check warning control file readable if CommandLineData.Content.Valid and then CommandLineData.Content.Warning then CommandLineData.Content.Valid := FileSystem.Is_Readable_File (F => CommandLineData.Content.Warning_File_Name); Possible_Error2 (E => EW_Warning, F => Adjust_Filename (S => CommandLineData.Content.Warning_File_Name)); end if; --# assert True; -- Check target data file readable if CommandLineData.Content.Valid and then CommandLineData.Content.Target_Data then CommandLineData.Content.Valid := FileSystem.Is_Readable_File (F => CommandLineData.Content.Target_Data_File); Possible_Error2 (E => EW_Target, F => Adjust_Filename (S => CommandLineData.Content.Target_Data_File)); end if; --# assert True; -- Check config file readable if CommandLineData.Content.Valid and then CommandLineData.Content.Target_Config then CommandLineData.Content.Valid := FileSystem.Is_Readable_File (F => CommandLineData.Content.Target_Config_File); Possible_Error2 (E => EW_Config, F => Adjust_Filename (S => CommandLineData.Content.Target_Config_File)); end if; --# assert True; -- Check source files readable if CommandLineData.Content.Valid then for Ix in CommandLineData.Source_File_Positions range 1 .. CommandLineData.Content.Number_Source loop CommandLineData.Content.Valid := FileSystem.Is_Readable_File (F => Strip_Any_At (S => CommandLineData.Content.Source_File_List (Ix).Source_File_Name)); Possible_Error2 (E => EW_Source, F => Adjust_Filename (S => CommandLineData.Content.Source_File_List (Ix).Source_File_Name)); exit when not CommandLineData.Content.Valid; end loop; end if; --# assert True; -- The -makefile option can only be specified in certain -- circumstances. if CommandLineData.Content.Valid and then CommandLineData.Content.Makefile_Mode then CommandLineData.Content.Valid := CommandLineData.Content.Echo and CommandLineData.Content.Brief; Possible_Error (ES_Makefile); end if; --# assert True; Check_Output_Directory_Exists; Check_Language_Profile; if CommandLineData.Content.Valid then Wff_Non_Overlapping; end if; --# end accept; end Wff_Command_Line; procedure Read_Command_Line (Command_String : out Command_Strings) --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# SPARK_IO.File_Sys & --# Command_String from SPARK_IO.File_Sys; is Cmd_Line_Found : Boolean; Default_Switches_Found : Boolean; Cmd_Line : E_Strings.T; Tmp : E_Strings.T; begin --# accept W, 169, CommandLineData.Content.Default_Switch_File, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Help_Requested, "Direct updates OK here" & --# W, 169, CommandLineData.Content.Plain_Output, "Direct updates OK here"; Cmd_Line := E_Strings.Empty_String; if Ignore_Default_Switch_File then CommandLineData.Content.Default_Switch_File := False; else Read_Default_Switches (Default_Switches_Found => Default_Switches_Found, Cmd_Line => Cmd_Line); if Default_Switches_Found then CommandLineData.Content.Default_Switch_File := True; end if; end if; -- Read commandline and append to the `switch line' (if any) FileSystem.Read_Cmd_Line (Cmd_Line_Found => Cmd_Line_Found, Cmd_Line => Tmp); E_Strings.Append_Examiner_String (E_Str1 => Cmd_Line, E_Str2 => Tmp); Command_String := Command_Strings'(Current_Position => 1, Contents => Cmd_Line); if not Cmd_Line_Found then -- Nothing on command-line so abandon processing and -- indicate that the user needs Help! CommandLineData.Content.Help_Requested := True; -- Override setting of PlainOutput so Help information -- always appears with Examiner version number and date. CommandLineData.Content.Plain_Output := False; end if; --# end accept; end Read_Command_Line; begin -- Process Read_Command_Line (Command_String => Command_String); -- If there's no command line at all, then we assume Help -- is needed, so don't bother to parse the remainder or default switches. if not CommandLineData.Content.Help_Requested then Parse_Command_Line (Command_String => Command_String); if CommandLineData.Content.Valid then Wff_Command_Line; end if; end if; end Process; procedure Process_Defaults_From_Switch_File is Command_String : Command_Strings; procedure Read_Switch_File (Command_String : out Command_Strings) --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# SPARK_IO.File_Sys & --# Command_String from SPARK_IO.File_Sys; is Default_Switches_Found : Boolean; Switch_Line : E_Strings.T; Command_String_Content : E_Strings.T; begin Command_String_Content := E_Strings.Empty_String; Read_Default_Switches (Default_Switches_Found => Default_Switches_Found, Cmd_Line => Switch_Line); if Default_Switches_Found then --# accept W, 169, CommandLineData.Content.Default_Switch_File, "Direct updates OK here"; CommandLineData.Content.Default_Switch_File := True; --# end accept; Command_String_Content := Switch_Line; end if; Command_String := Command_Strings'(1, Command_String_Content); end Read_Switch_File; procedure Parse_Switch_File (Command_String : in Command_Strings) --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Command_String; is Next_Symbol : Symbols; Local_Command_String : Command_Strings; begin Local_Command_String := Command_String; Get_Next_Symbol (Command_String => Local_Command_String, Next_Symbol => Next_Symbol); --# accept F, 10, Next_Symbol, "Next_Symbol not used here" & --# F, 10, Local_Command_String, "Local_Command_String not used here"; Parse_Command_Options (Command_String => Local_Command_String, Next_Symbol => Next_Symbol); end Parse_Switch_File; begin -- Process_Defaults_From_Switch_File if Ignore_Default_Switch_File then null; else Read_Switch_File (Command_String => Command_String); Parse_Switch_File (Command_String => Command_String); end if; end Process_Defaults_From_Switch_File; end CommandLineHandler; spark-2012.0.deb/examiner/heap_storage.ads0000644000175000017500000000636611753202336017416 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Heap_Storage -- -- Purpose: -- This pakcage supports the Heap package, proving an abstraction -- of an automatically-extending array of Atom_Descriptors, indexed -- by the type Atom. -- -- In the past, the Examiner used a fixed-size array in Heap to -- offer this abstraction, but this had the drawback of a fixed -- capacity. This package, introduced in Release 10.1 solves -- that by offering an implementation where the Vector automatically -- extends its capacity as required. -- -- Implementation -- Uses Ada.Containers.Vectors in the (hidden) private part and -- body. -------------------------------------------------------------------------------- with Ada.Containers; with Ada.Containers.Vectors; with SPARK.Ada.Containers; --# inherit SPARK.Ada.Containers; package Heap_Storage is type Atom is range 0 .. SPARK.Ada.Containers.Count_Type'Last; --# assert Atom'Base is Integer; type Vector is private; Empty_Vector : constant Vector; type Atom_Descriptor is record ValueA, ValueB : Natural; PointerA, PointerB : Atom; end record; procedure Initialize (Initial_Length : in SPARK.Ada.Containers.Count_Type; V : out Vector); --# derives V from Initial_Length; function Last_Index (V : Vector) return Atom; function Get_Element (V : in Vector; Index : in Atom) return Atom_Descriptor; procedure Set_Element (V : in out Vector; Index : in Atom; Value : in Atom_Descriptor); --# derives V from *, --# Index, --# Value; -- Appends Value to V, extending the capacity of V if necessary procedure Append (V : in out Vector; Value : in Atom_Descriptor); --# derives V from *, --# Value; private --# hide Heap_Storage; package Vectors is new Ada.Containers.Vectors (Index_Type => Atom, Element_Type => Atom_Descriptor); type Vector is record Vec : Vectors.Vector; end record; Empty_Vector : constant Vector := Vector'(Vec => Vectors.Empty_Vector); end Heap_Storage; spark-2012.0.deb/examiner/dictionary-get_scalar_attribute_value.adb0000644000175000017500000001432011753202336024447 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function Get_Scalar_Attribute_Value (Base : Boolean; Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return LexTokenManager.Lex_String is Result : LexTokenManager.Lex_String; -------------------------------------------------------------------------------- function Get_Base_Attribute_Value (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return LexTokenManager.Lex_String --# global in Dict; --# in LexTokenManager.State; is Result : LexTokenManager.Lex_String; Base_Type : RawDict.Type_Info_Ref; begin -- set default case; override for various successes Result := LexTokenManager.Null_String; Base_Type := Get_Base_Type (Type_Mark => Type_Mark); if Base_Type /= RawDict.Null_Type_Info_Ref then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq then if RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Integer_Type_Item or else RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Enumeration_Type_Item or else RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Modular_Type_Item or else RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Floating_Point_Type_Item then Result := RawDict.Get_Type_Lower (Type_Mark => Base_Type); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then if RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Integer_Type_Item or else RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Enumeration_Type_Item or else RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Modular_Type_Item or else RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Floating_Point_Type_Item then Result := RawDict.Get_Type_Upper (Type_Mark => Base_Type); end if; end if; end if; return Result; end Get_Base_Attribute_Value; -------------------------------------------------------------------------------- function Get_Attribute_Value (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return LexTokenManager.Lex_String --# global in Dict; --# in LexTokenManager.State; is Result : LexTokenManager.Lex_String; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Delta_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Digits_Token) = LexTokenManager.Str_Eq then Result := RawDict.Get_Type_Error_Bound (Type_Mark => Type_Mark); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq then if Type_Is_Scalar (Type_Mark => Type_Mark) then Result := RawDict.Get_Type_Lower (Type_Mark => Type_Mark); else Result := RawDict.Get_Type_Lower (Type_Mark => Get_Array_Index (Type_Mark => Type_Mark, Dimension => 1)); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then if Type_Is_Scalar (Type_Mark => Type_Mark) then Result := RawDict.Get_Type_Upper (Type_Mark => Type_Mark); else Result := RawDict.Get_Type_Upper (Type_Mark => Get_Array_Index (Type_Mark => Type_Mark, Dimension => 1)); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Modulus_Token) = LexTokenManager.Str_Eq then Result := RawDict.Get_Type_Modulus (Type_Mark => Type_Mark); else Result := LexTokenManager.Null_String; end if; return Result; end Get_Attribute_Value; begin -- Get_Scalar_Attribute_Value if Type_Mark = Get_Unknown_Type_Mark then Result := LexTokenManager.Null_String; elsif Base then Result := Get_Base_Attribute_Value (Name => Name, Type_Mark => Get_Root_Type (Type_Mark => Type_Mark)); else Result := Get_Attribute_Value (Name => Name, Type_Mark => Type_Mark); end if; return Result; end Get_Scalar_Attribute_Value; spark-2012.0.deb/examiner/sem-walk_expression_p-down_wf_aggregate.adb0000644000175000017500000006770011753202336024720 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Down_Wf_Aggregate (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Next_Node : out STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) is Has_Others_Part : Boolean; Association_Type : Sem.Typ_Agg_Association_Type; Name_Exp : Sem.Exp_Record; Child, Parent, Others_Node : STree.SyntaxNode; Ptr : Lists.List; Unknown_Or_Indiscrete_Found : Boolean; Index_Type_Mark : Dictionary.Symbol; Error_Found : Boolean := False; Number_Of_Dimensions : Positive; -------------------------------------------------------------------- procedure Create_Aggregate_Stack_Entry (Index_Type_Symbol : in Dictionary.Symbol; Association_Type : in Sem.Typ_Agg_Association_Type; Has_Others_Part : in Boolean; Scope : in Dictionary.Scopes) -- this procedure discriminates between the cases listed in S.P0468.53.11 -- and sets up the stack entry accordingly -- -- preconditions to entry to this procedure: -- aggregate is an array aggregate -- -- NB aggregate may be a lone others clause --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Aggregate_Stack.State; --# derives Aggregate_Stack.State from *, --# Association_Type, --# Dictionary.Dict, --# Has_Others_Part, --# Index_Type_Symbol, --# LexTokenManager.State, --# Scope; --# pre (Dictionary.Is_Null_Symbol (Index_Type_Symbol) or Dictionary.IsTypeMark (Index_Type_Symbol, Dictionary.Dict)) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); --# post Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); is Complete_Check_Range_From : Integer; Complete_Check_Range_To : Integer; Complete_Check_Range_State : CompleteCheck.TypRangeState; Type_Lower_Bound : Sem.Typ_Type_Bound; Type_Upper_Bound : Sem.Typ_Type_Bound; Complete_Rec : CompleteCheck.T; Check_Completeness : Boolean; Warn_No_Others : Boolean; Check_Overlap : Boolean; Signal_Out_Of_Range : Boolean; begin -- if index type unknown or not discrete then cannot do much at all if Dictionary.IsUnknownTypeMark (Index_Type_Symbol) or else not Dictionary.IsDiscreteTypeMark (Index_Type_Symbol, Scope) then Type_Lower_Bound := Sem.Unknown_Type_Bound; Type_Upper_Bound := Sem.Unknown_Type_Bound; Complete_Check_Range_From := -(ExaminerConstants.CompleteCheckSize / 2); Complete_Check_Range_To := (Complete_Check_Range_From + ExaminerConstants.CompleteCheckSize) - 1; --NB we 'know' that Complete_Check_Range_State will return RangeDoesFit, -- so the value is ignored, giving a flow error --# accept Flow, 10, Complete_Check_Range_State, "Expected ineffective assignment to Complete_Check_Range_State"; CompleteCheck.Init (Complete_Rec, Complete_Check_Range_From, Complete_Check_Range_To, Complete_Check_Range_State); --# end accept; Check_Completeness := False; Warn_No_Others := False; Signal_Out_Of_Range := False; if Dictionary.IsUnknownTypeMark (Index_Type_Symbol) and then Association_Type = Sem.Aggregate_Is_Named then Check_Overlap := True; else Check_Overlap := False; end if; else -- get bounds from dictionary Sem.Get_Type_Bounds (Type_Symbol => Index_Type_Symbol, Lower_Bound => Type_Lower_Bound, Upper_Bound => Type_Upper_Bound); if not (Type_Lower_Bound.Is_Defined and then Type_Upper_Bound.Is_Defined) then -- one or other bound is unknown to the dictionary -- set flags accordingly Check_Completeness := False; Warn_No_Others := True; if Association_Type = Sem.Aggregate_Is_Positional then Check_Overlap := False; Signal_Out_Of_Range := False; else Check_Overlap := True; Signal_Out_Of_Range := True; end if; -- set up range for completeness checker -- if both bounds unknown use symmetric range if (not Type_Lower_Bound.Is_Defined) and then (not Type_Upper_Bound.Is_Defined) then Complete_Check_Range_From := -(ExaminerConstants.CompleteCheckSize / 2); Complete_Check_Range_To := (Complete_Check_Range_From + ExaminerConstants.CompleteCheckSize) - 1; -- otherwise use range extending from known bound elsif Type_Lower_Bound.Is_Defined then Complete_Check_Range_From := Type_Lower_Bound.Value; if Complete_Check_Range_From <= (Integer'Last - ExaminerConstants.CompleteCheckSize) then Complete_Check_Range_To := (Complete_Check_Range_From + ExaminerConstants.CompleteCheckSize) - 1; else Complete_Check_Range_To := Integer'Last; end if; else -- Type_Upper_Bound.IsDefined Complete_Check_Range_To := Type_Upper_Bound.Value; if Complete_Check_Range_To >= (Integer'First + ExaminerConstants.CompleteCheckSize) then Complete_Check_Range_From := (Complete_Check_Range_To - ExaminerConstants.CompleteCheckSize) + 1; else Complete_Check_Range_From := Integer'First; end if; end if; --NB we 'know' that Complete_Check_Range_State will return RangeDoesFit, -- so the value is ignored, giving a flow error --# accept Flow, 10, Complete_Check_Range_State, "Expected ineffective assignment to Complete_Check_Range_State"; CompleteCheck.Init (Complete_Rec, Complete_Check_Range_From, Complete_Check_Range_To, Complete_Check_Range_State); --# end accept; else -- both bounds known to dictionary -- set up completeness checker CompleteCheck.Init (Complete_Rec, Type_Lower_Bound.Value, Type_Upper_Bound.Value, Complete_Check_Range_State); -- for positional association, the question of whether the -- type is too big for the completeness checker is irrelevant if Association_Type = Sem.Aggregate_Is_Positional then Check_Completeness := True; Warn_No_Others := False; Check_Overlap := False; Signal_Out_Of_Range := False; else -- set flags according to whether range fits in completeness checker if Complete_Check_Range_State = CompleteCheck.RangeDoesFit then Check_Completeness := True; Warn_No_Others := False; Check_Overlap := True; Signal_Out_Of_Range := False; else Check_Completeness := False; Warn_No_Others := True; Check_Overlap := True; Signal_Out_Of_Range := True; end if; end if; end if; end if; Aggregate_Stack.Push (Type_Sym => Index_Type_Symbol, Lower_Bound => Type_Lower_Bound, Upper_Bound => Type_Upper_Bound, Agg_Flags => Sem.Typ_Agg_Flags'(Check_Completeness => Check_Completeness, Warn_No_Others => Warn_No_Others, Check_Overlap => Check_Overlap, Signal_Out_Of_Range => Signal_Out_Of_Range, Out_Of_Range_Seen => False, More_Entries_Than_Natural => False, Has_Others_Part => Has_Others_Part, Association_Type => Association_Type), Counter => 0, Complete_Rec => Complete_Rec); end Create_Aggregate_Stack_Entry; begin -- Down_Wf_Aggregate -- code to determine association type enhanced to detect the -- occurrence of a 'lone' others clause, and moved here so that -- the information is available to anonymous aggregates -- code determining presence of others part moved here for same reason Child := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Child = aggregate_or_expression OR named_association OR positional_association OR -- annotation_aggregate_or_expression OR annotation_named_association OR annotation_positional_association case STree.Syntax_Node_Type (Node => Child) is when SP_Symbols.positional_association | SP_Symbols.annotation_positional_association => -- ASSUME Child = positional_association OR annotation_positional_association Association_Type := Sem.Aggregate_Is_Positional; Others_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Child)); -- ASSUME Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR NULL if STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_aggregate_or_expression then -- ASSUME Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression Has_Others_Part := True; elsif Others_Node = STree.NullNode then -- ASSUME Others_Node = NULL Has_Others_Part := False; else Has_Others_Part := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR " & "NULL in Down_Wf_Aggregate"); end if; when SP_Symbols.named_association | SP_Symbols.annotation_named_association => -- ASSUME Child = named_association OR annotation_named_association Association_Type := Sem.Aggregate_Is_Named; Others_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Child)); -- ASSUME Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR NULL if STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_aggregate_or_expression then -- ASSUME Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression Has_Others_Part := True; elsif Others_Node = STree.NullNode then -- ASSUME Others_Node = NULL Has_Others_Part := False; else Has_Others_Part := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR " & "NULL in Down_Wf_Aggregate"); end if; when SP_Symbols.aggregate_or_expression | SP_Symbols.annotation_aggregate_or_expression => -- ASSUME Child = aggregate_or_expression OR annotation_aggregate_or_expression Association_Type := Sem.Aggregate_Is_Lone_Others; Others_Node := STree.Child_Node (Current_Node => Child); -- ASSUME Others_Node = aggregate OR expression OR -- annotation_aggregate OR annotation_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_aggregate or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Others_Node = aggregate OR expression OR " & "annotation_aggregate OR annotation_expression in Down_Wf_Aggregate"); Has_Others_Part := True; when others => Association_Type := Sem.Aggregate_Is_Lone_Others; Others_Node := STree.NullNode; Has_Others_Part := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = aggregate_or_expression OR named_association OR positional_association OR " & "annotation_aggregate_or_expression OR annotation_named_association OR annotation_positional_association " & "in Down_Wf_Aggregate"); end case; --# assert (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# (STree.Syntax_Node_Type (Others_Node, STree.Table) = SP_Symbols.aggregate_or_expression or --# STree.Syntax_Node_Type (Others_Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression or --# STree.Syntax_Node_Type (Others_Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Others_Node, STree.Table) = SP_Symbols.expression or --# STree.Syntax_Node_Type (Others_Node, STree.Table) = SP_Symbols.annotation_aggregate or --# STree.Syntax_Node_Type (Others_Node, STree.Table) = SP_Symbols.annotation_expression or --# Others_Node = STree.NullNode); Parent := STree.Parent_Node (Current_Node => Node); -- ASSUME Parent = enumeration_representation_clause OR code_statement OR -- aggregate_or_expression OR qualified_expression OR -- annotation_aggregate_or_expression OR annotation_qualified_expression if STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.qualified_expression or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_qualified_expression then -- ASSUME Parent = qualified_expression OR annotation_qualified_expression -- this is a top level, not embedded, aggregate Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); case Name_Exp.Sort is when Sem.Is_Type_Mark => Name_Exp.Is_Constant := True; if Dictionary.IsArrayTypeMark (Name_Exp.Type_Symbol, Scope) then if Dictionary.Is_Unconstrained_Array_Type_Mark (Name_Exp.Type_Symbol, Scope) and then STree.Syntax_Node_Type (Node => Node) = SP_Symbols.aggregate then -- Qualified aggregates of unconstrained array types only permitted in -- annotation context. -- So you are allowed to say, for example, "post X = T'(others => 0)" for a -- subprogram that initializes an unconstrained array or "post X /= Y" where -- both X and Y are unconstrained array types. Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Parent), Id_Str => LexTokenManager.Null_String); end if; --# assert (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# (STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.qualified_expression or --# STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.annotation_qualified_expression); Name_Exp.Param_Count := 1; -- used to record depth of dimension reached Create_Aggregate_Stack_Entry (Index_Type_Symbol => Dictionary.GetArrayIndex (Name_Exp.Type_Symbol, 1), Association_Type => Association_Type, Has_Others_Part => Has_Others_Part, Scope => Scope); -- check types of all array dimensions, and warn if checking -- may be incomplete because any of the index types is unknown -- or indiscrete Unknown_Or_Indiscrete_Found := False; Number_Of_Dimensions := Dictionary.GetNumberOfDimensions (Name_Exp.Type_Symbol); for I in Positive range 1 .. Number_Of_Dimensions --# assert (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# (STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.qualified_expression or --# STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.annotation_qualified_expression); loop Index_Type_Mark := Dictionary.GetArrayIndex (Name_Exp.Type_Symbol, I); if Dictionary.IsUnknownTypeMark (Index_Type_Mark) or else (not Dictionary.IsDiscreteTypeMark (Index_Type_Mark, Scope)) then Unknown_Or_Indiscrete_Found := True; end if; end loop; --# assert (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# (STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.qualified_expression or --# STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.annotation_qualified_expression); if Unknown_Or_Indiscrete_Found then Error_Found := True; ErrorHandler.Semantic_Warning (Err_Num => 307, Position => STree.Node_Position (Node => Parent), Id_Str => LexTokenManager.Null_String); end if; Name_Exp.Errors_In_Expression := Name_Exp.Errors_In_Expression or else Error_Found; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = component_association OR annotation_component_association SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_component_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = component_association OR annotation_component_association in Down_Wf_Aggregate"); elsif Dictionary.IsRecordTypeMark (Name_Exp.Type_Symbol, Scope) then if Has_Others_Part then Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); Next_Node := STree.NullNode; ErrorHandler.Semantic_Error (Err_Num => 53, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Others_Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.TypeIsExtendedTagged (Name_Exp.Type_Symbol) and then Dictionary.ExtendedTaggedHasPrivateAncestors (Name_Exp.Type_Symbol, Scope) then Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); Next_Node := STree.NullNode; ErrorHandler.Semantic_Error_Sym (Err_Num => 833, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Parent), Sym => Name_Exp.Type_Symbol, Scope => Scope); else -- OK, not illegal tagged record and has no others clause -- Name_Exp.TypeSymbol here might denote a record subtype. For subsequent -- checking of the aggregate, we need the root record type, so... Name_Exp.Type_Symbol := Dictionary.GetRootType (Name_Exp.Type_Symbol); if Association_Type = Sem.Aggregate_Is_Named then Create_Name_List (List => Ptr, Heap_Param => Heap_Param); Name_Exp.Param_List := Ptr; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Next_Node := STree.Child_Node (Current_Node => Node); else -- positional association Name_Exp.Param_Count := 0; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Next_Node := STree.Child_Node (Current_Node => Node); end if; -- ASSUME Next_Node = component_association OR annotation_component_association SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_component_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = component_association OR annotation_component_association " & "in Down_Wf_Aggregate"); end if; else -- not a record or array Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 33, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Next_Node := STree.NullNode; end if; when Sem.Is_Unknown => -- illegal name prefix but we can continue walk to check internal -- validity of any expressions that follow. Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = component_association OR annotation_component_association SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_component_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = component_association OR annotation_component_association in Down_Wf_Aggregate"); when others => Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 95, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Parent), Id_Str => LexTokenManager.Null_String); Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = component_association OR annotation_component_association SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_component_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = component_association OR annotation_component_association in Down_Wf_Aggregate"); end case; elsif STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.enumeration_representation_clause or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.code_statement or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_aggregate_or_expression then -- ASSUME Parent = enumeration_representation_clause OR code_statement OR -- aggregate_or_expression OR annotation_aggregate_or_expression -- it is an embedded aggregate of a multi-dim array Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); -- increase depth of dimension count if Name_Exp.Param_Count < Natural'Last then Name_Exp.Param_Count := Name_Exp.Param_Count + 1; Create_Aggregate_Stack_Entry (Index_Type_Symbol => Dictionary.GetArrayIndex (Name_Exp.Type_Symbol, Name_Exp.Param_Count), Association_Type => Association_Type, Has_Others_Part => Has_Others_Part, Scope => Scope); else Name_Exp := Sem.Unknown_Type_Record; end if; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = component_association OR annotation_component_association SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_component_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = component_association OR annotation_component_association in Down_Wf_Aggregate"); else Next_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = enumeration_representation_clause OR code_statement OR " & "aggregate_or_expression OR qualified_expression OR " & "annotation_aggregate_or_expression OR annotation_qualified_expression in Down_Wf_Aggregate"); end if; end Down_Wf_Aggregate; spark-2012.0.deb/examiner/contextmanager.ads0000644000175000017500000000747111753202336017772 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ExaminerConstants; --# inherit ExaminerConstants; package ContextManager is type UnitTypes is ( MainProgram, PackageSpecification, -- use this one when we have a package body and know we need spec PackageBody, SubUnit, InterUnitPragma, GenericSubprogramDeclaration, -- use this one when we have a generic body and know we need spec GenericSubprogramBody, InvalidUnit); -- this one is for inherited items where we don't know which it is type UnitTypeSets is array (UnitTypes) of Boolean; -- set of inheritable things (was previously only packages) InheritableItem : constant UnitTypeSets := UnitTypeSets'(PackageSpecification => True, GenericSubprogramDeclaration => True, others => False); PackageSpecificationSet : constant UnitTypeSets := UnitTypeSets'(PackageSpecification => True, others => False); -- this is for generic subprogram declarations, where we know that is what we want because we found a generic body GenericDeclarationSet : constant UnitTypeSets := UnitTypeSets'(GenericSubprogramDeclaration => True, others => False); BodySet : constant UnitTypeSets := UnitTypeSets'(MainProgram | PackageBody => True, others => False); SubUnitSet : constant UnitTypeSets := UnitTypeSets'(SubUnit | PackageBody => True, others => False); type UnitDescriptors is private; type FileDescriptors is private; NullUnit : constant UnitDescriptors; NullFile : constant FileDescriptors; type UnitStatus is ( NoUnitEntry, UnitCreated, UnitParsed, UnitDeferred, UnitAnalysed, NoIndexFile, NotInIndexFile, CannotOpenFile, UnableToLocate); subtype UnitNotFound is UnitStatus range NoIndexFile .. UnableToLocate; type FileStatus is (NoFileEntry, FileCreated, FileOpen, FileEnd, UnableToOpen); private type UnitDescriptors is range 0 .. ExaminerConstants.ContextManagerMaxUnits; --# assert UnitDescriptors'Base is Short_Integer; -- for GNAT type FileDescriptors is range 0 .. ExaminerConstants.ContextManagerMaxFiles; --# assert FileDescriptors'Base is Short_Integer; -- for GNAT NullUnit : constant UnitDescriptors := 0; NullFile : constant FileDescriptors := 0; end ContextManager; spark-2012.0.deb/examiner/pairs.ads0000644000175000017500000001723511753202336016070 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cell_Storage; with Cells; --# inherit Cells, --# Cell_Storage, --# Clists, --# CStacks, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# Structures; package Pairs is --------------------------------------------------------------------------- -- This package provides an ADT for a "Pair" structure built within -- the Cells.Heap_Record data structure. -- -- A "Pair" is a 2-valued tuple, which can either be: -- -- A "predicate" and an "action" -- OR -- A Verification Condition, consisting of a Hypotheses and Conclusions -- (both of which are actually just an FDL predicate) -- -- Whether a particular Pair represents a predicate/action or a VC -- depends on the context of the calling unit. --------------------------------------------------------------------------- type Pair is private; type CombinationKind is (PredicateTransformation, ActionComposition); ---------------------------------------------------------------------------- -- A NullPair is synonymous with Cells.Null_Cell ---------------------------------------------------------------------------- function IsNullPair (P : Pair) return Boolean; ---------------------------------------------------------------------------- -- Returns True if the predicate or hypotheses part is Null ---------------------------------------------------------------------------- function IsTrue (Heap : Cells.Heap_Record; P : Pair) return Boolean; ---------------------------------------------------------------------------- -- Returns True if the Action part is Null ---------------------------------------------------------------------------- function IsUnitAction (Heap : Cells.Heap_Record; P : Pair) return Boolean; ---------------------------------------------------------------------------- -- Conversion function to/from Cells.Cell ---------------------------------------------------------------------------- function CellToPair (C : Cells.Cell) return Pair; function PairHead (P : Pair) return Cells.Cell; ---------------------------------------------------------------------------- -- Deep copy Original to Copy, following B_Ptr and C_Ptr -- fields, but ignoring A_Ptr field. ---------------------------------------------------------------------------- procedure CopyPair (Heap : in out Cells.Heap_Record; Original : in Pair; Copy : out Pair); --# global in out Statistics.TableUsage; --# derives Copy, --# Heap from Heap, --# Original & --# Statistics.TableUsage from *, --# Heap, --# Original; -- Action_R is the structure of an action R -- Predicate_q is the structure of a predicate q procedure CombinePredicateWithAction (Heap : in out Cells.Heap_Record; Action_R, Predicate_q : in Cells.Cell; Result : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Action_R, --# Heap, --# Predicate_q & --# Result from Predicate_q; -- Action_R is the structure of an action R -- Action_S is the structure of an action S procedure ComposeActions (Heap : in out Cells.Heap_Record; Action_R, Action_S : in Cells.Cell; Result : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Action_R, --# Action_S, --# Heap & --# Result from Action_R; ---------------------------------------------------------------------------- -- FormConjunction -- -- Returns Result = Predicate_1 and Predicate_2 -- -- Note the Result refers to the original Predicates in-place - a deep -- copy of them is NOT taken. -- -- NOTE: this subprogram doesn't really involve Pairs at all, -- so it might be better if located in Cells package instead. ---------------------------------------------------------------------------- procedure FormConjunction (Heap : in out Cells.Heap_Record; Predicate_1, Predicate_2 : in Cells.Cell; Result : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Heap from *, --# Predicate_1, --# Predicate_2 & --# Result from Heap & --# Statistics.TableUsage from *, --# Heap; ---------------------------------------------------------------------------- -- MultiplyPairs -- -- Given two predicate-action pairs (p, R) and (q, S) this procedure forms -- their product (p and q!R, R.S). -- -- q!R denotes predicate q with assignments in action R substituted -- a la Hoare's assignment axiom. -- -- R.S denotes the "action composition" of actions R and S ---------------------------------------------------------------------------- procedure MultiplyPairs (Heap : in out Cells.Heap_Record; Multiplicand, Multiplier : in Pair; Product : out Pair); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Multiplicand, --# Multiplier & --# Product from Heap; private ---------------------------------------------------------------------------- -- A Pair is actually just a Cells.Cell value, but made -- distinct here to prevent unintentional mixing of -- Cells and Pairs. -- -- Representation: -- -- The A_Ptr field is used to link together lists of Pairs. -- -- The B_Ptr field of a Pair references the "predicate" or "hypotheses" part -- -- The C_Ptr field of a Pair references the "action" or "conclusions" part ---------------------------------------------------------------------------- type Pair is range 0 .. Cell_Storage.Cell'Last; --# assert Pair'Base is Integer; end Pairs; spark-2012.0.deb/examiner/sem-wf_entire_variable.adb0000644000175000017500000002030611753202336021333 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Entire_Variable (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Error_Hint : in Visibility_Error_Hint; Var_Sym : out Dictionary.Symbol; Dotted : out Boolean) is Id_Node : STree.SyntaxNode; It : STree.Iterator; P_Id_Str, Id_Str : LexTokenManager.Lex_String; Sym_So_Far : Dictionary.Symbol; Prefix_Ok : Boolean; begin -- Node is the root of any variable (dotted or otherwise). P_Id_Str := LexTokenManager.Null_String; Var_Sym := Dictionary.NullSymbol; -- This procedure traverses (depth first) the tree under the root looking for -- identifiers. Var_Sym is made up of all the identifiers found. It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); --# check Syntax_Node_Type (Get_Node (It), STree.Table) = SP_Symbols.identifier or --# STree.IsNull (It); if not STree.IsNull (It => It) then -- ASSUME It = identifier Id_Node := Get_Node (It => It); --# check Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier; Id_Str := Node_Lex_String (Node => Id_Node); Var_Sym := Dictionary.LookupItem (Name => Id_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); loop --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.entire_variable or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_primary) and --# Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier and --# Id_Node = Get_Node (It); if Dictionary.Is_Null_Symbol (Var_Sym) then case Error_Hint is when No_Hint => ErrorHandler.Semantic_Error2 (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Id_Str, Id_Str2 => P_Id_Str); when In_Global_List => ErrorHandler.Semantic_Error2 (Err_Num => 144, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Id_Str, Id_Str2 => P_Id_Str); when In_Derives_Import_List => ErrorHandler.Semantic_Error2 (Err_Num => 752, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Id_Str, Id_Str2 => P_Id_Str); when In_Derives_Export_List => ErrorHandler.Semantic_Error2 (Err_Num => 753, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Id_Str, Id_Str2 => P_Id_Str); when In_Suspends_List => ErrorHandler.Semantic_Error2 (Err_Num => 755, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Id_Str, Id_Str2 => P_Id_Str); end case; exit; end if; It := STree.NextNode (It); --# check Syntax_Node_Type (Get_Node (It), STree.Table) = SP_Symbols.identifier or --# STree.IsNull (It); if Dictionary.Is_Variable (Var_Sym) then -- at this point Sym is a variable, final check that there is no dotted -- part to the right of it as there would be if a record field was there if STree.IsNull (It => It) then -- ASSUME It = NULL STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Id_Node); elsif Syntax_Node_Type (Node => Get_Node (It => It)) = SP_Symbols.identifier then -- ASSUME It = identifier ErrorHandler.Semantic_Error (Err_Num => 156, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Var_Sym := Dictionary.NullSymbol; end if; exit; end if; if Dictionary.Is_Constant (Var_Sym) then ErrorHandler.Semantic_Error (Err_Num => 150, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Var_Sym := Dictionary.NullSymbol; exit; end if; if not Dictionary.IsPackage (Var_Sym) then ErrorHandler.Semantic_Error (Err_Num => 156, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Var_Sym := Dictionary.NullSymbol; exit; end if; if STree.IsNull (It => It) then -- ASSUME It = NULL -- package without a selected component ErrorHandler.Semantic_Error (Err_Num => 156, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Var_Sym := Dictionary.NullSymbol; exit; end if; Check_Package_Prefix (Node_Pos => Node_Position (Node => Id_Node), Pack_Sym => Var_Sym, Scope => Scope, OK => Prefix_Ok); if not Prefix_Ok then Var_Sym := Dictionary.NullSymbol; exit; end if; STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Id_Node); P_Id_Str := Id_Str; Id_Node := Get_Node (It => It); Id_Str := Node_Lex_String (Node => Id_Node); Sym_So_Far := Var_Sym; Var_Sym := Dictionary.LookupSelectedItem (Prefix => Var_Sym, Selector => Id_Str, Scope => Scope, Context => Dictionary.ProofContext); -- check to see if we are getting the same symbol over and again if Var_Sym = Sym_So_Far then -- P.P.P.P.X case Var_Sym := Dictionary.NullSymbol; -- to cause "Not visible" error at top of loop end if; end loop; end if; Dotted := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => P_Id_Str, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then not Dictionary.Is_Null_Symbol (Var_Sym); end Wf_Entire_Variable; spark-2012.0.deb/examiner/spark_io.adb0000644000175000017500000005242111753202336016534 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Latin_1; with CommandLineData; with E_Strings; with FileSystem; with IO_Exceptions; with SystemErrors; with Text_IO; with Unchecked_Deallocation; package body SPARK_IO is --# hide SPARK_IO; -- File Management type File_Descriptor is record File_Ref : Text_IO.File_Type; Is_Temp : Boolean; end record; File_System_Standard_Input : aliased File_Descriptor := File_Descriptor'(File_Ref => Text_IO.Standard_Input, Is_Temp => False); File_System_Standard_Output : aliased File_Descriptor := File_Descriptor'(File_Ref => Text_IO.Standard_Output, Is_Temp => False); type File_System is record Standard_Input : File_Type; Standard_Output : File_Type; end record; File_Sys : constant File_System := File_System'(Standard_Input => File_System_Standard_Input'Access, Standard_Output => File_System_Standard_Output'Access); function Normalize (S : in String) return E_Strings.T is E_Str : E_Strings.T; begin E_Str := E_Strings.Copy_String (S); if CommandLineData.Content.Plain_Output then E_Str := FileSystem.Just_File (Fn => E_Str, Ext => True); if FileSystem.Use_Windows_Command_Line then E_Str := E_Strings.Lower_Case (E_Str); end if; end if; return E_Str; end Normalize; function Standard_Input return File_Type is begin return File_Sys.Standard_Input; end Standard_Input; function Standard_Output return File_Type is begin return File_Sys.Standard_Output; end Standard_Output; procedure Dispose is new Unchecked_Deallocation (File_Descriptor, File_Type); procedure Create (File : in out File_Type; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is begin File := new File_Descriptor; File.Is_Temp := (Name_Of_File = ""); if CommandLineData.Content.Debug.File_Names then Text_IO.Put ("SPARK_IO.Create - "); if File.Is_Temp then Text_IO.Put (""); else E_Strings.Put_String (Standard_Output, Normalize (Name_Of_File (Name_Of_File'First .. Name_Length))); end if; if Form_Of_File = "" then Text_IO.New_Line; else Text_IO.Put_Line (", " & Form_Of_File); end if; end if; Text_IO.Create (File.File_Ref, Text_IO.Out_File, Name_Of_File (Name_Of_File'First .. Name_Length), Form_Of_File); Status := Ok; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Name_Error => Status := Name_Error; Dispose (File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); end Create; procedure Open (File : in out File_Type; Mode_Of_File : in File_Mode; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is F_Mode : Text_IO.File_Mode; begin if CommandLineData.Content.Debug.File_Names then Text_IO.Put ("SPARK_IO.Open - "); if Name_Length = 0 then Text_IO.Put ("WARNING - Name_Length is Zero"); else E_Strings.Put_String (Standard_Output, Normalize (Name_Of_File (Name_Of_File'First .. Name_Length))); end if; Text_IO.Put (", " & File_Mode'Image (Mode_Of_File)); if Form_Of_File = "" then Text_IO.New_Line; else Text_IO.Put_Line (", " & Form_Of_File); end if; end if; File := new File_Descriptor; case Mode_Of_File is when In_File => F_Mode := Text_IO.In_File; when Out_File => F_Mode := Text_IO.Out_File; when Append_File => F_Mode := Text_IO.Append_File; end case; Text_IO.Open (File.File_Ref, F_Mode, Name_Of_File (Name_Of_File'First .. Name_Length), Form_Of_File); File.Is_Temp := False; Status := Ok; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Name_Error => Status := Name_Error; Dispose (File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); end Open; procedure Close (File : in out File_Type; Status : out File_Status) is begin if File = null then Status := Status_Error; else if CommandLineData.Content.Debug.File_Names then Text_IO.Put ("SPARK_IO.Close - "); if CommandLineData.Content.Plain_Output and File.Is_Temp then Text_IO.Put (""); else E_Strings.Put_String (Standard_Output, Normalize (Text_IO.Name (File => File.File_Ref))); end if; Text_IO.Put (", " & Text_IO.File_Mode'Image (Text_IO.Mode (File => File.File_Ref))); if Text_IO.Form (File => File.File_Ref) = "" then Text_IO.New_Line; else Text_IO.Put_Line (", " & Text_IO.Form (File => File.File_Ref)); end if; end if; Text_IO.Close (File.File_Ref); Dispose (File); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); end Close; procedure Delete (File : in out File_Type; Status : out File_Status) is begin if File = null then Status := Status_Error; else Text_IO.Delete (File.File_Ref); Dispose (File); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); end Delete; procedure Reset (File : in out File_Type; Mode_Of_File : in File_Mode; Status : out File_Status) is F_Mode : Text_IO.File_Mode; begin if File = null then Status := Status_Error; else case Mode_Of_File is when In_File => F_Mode := Text_IO.In_File; when Out_File => F_Mode := Text_IO.Out_File; when Append_File => F_Mode := Text_IO.Append_File; end case; Text_IO.Reset (File.File_Ref, F_Mode); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); end Reset; function Valid_File (File : File_Type) return Boolean is begin return File /= null; end Valid_File; function Is_Open (File : File_Type) return Boolean is begin return Valid_File (File) and then Text_IO.Is_Open (File.File_Ref); end Is_Open; function Mode (File : File_Type) return File_Mode is F_Mode : File_Mode; begin if Is_Open (File) and then Text_IO.Is_Open (File.File_Ref) then case Text_IO.Mode (File.File_Ref) is when Text_IO.In_File => F_Mode := In_File; when Text_IO.Out_File => F_Mode := Out_File; when Text_IO.Append_File => F_Mode := Append_File; end case; else F_Mode := In_File; end if; return F_Mode; end Mode; function Is_In (File : File_Type) return Boolean is begin return Is_Open (File) and then Mode (File) = In_File; end Is_In; function Is_Out (File : File_Type) return Boolean is begin return Is_Open (File) and then (Mode (File) = Out_File or Mode (File) = Append_File); end Is_Out; procedure Name (File : in File_Type; Name_Of_File : out String; Stop : out Natural) is begin if Is_Open (File) then declare FN : constant String := Text_IO.Name (File.File_Ref); begin if Name_Of_File'Length >= FN'Length then Name_Of_File (FN'Range) := FN; Stop := FN'Length; else Name_Of_File := FN (Name_Of_File'Range); Stop := Name_Of_File'Length; end if; end; else Stop := Name_Of_File'First - 1; end if; exception when others => Stop := Name_Of_File'First - 1; end Name; procedure Form (File : in File_Type; Form_Of_File : out String; Stop : out Natural) is begin if Is_Open (File) then declare FM : constant String := Text_IO.Form (File.File_Ref); begin if Form_Of_File'Length >= FM'Length then Form_Of_File (FM'Range) := FM; Stop := FM'Length; else Form_Of_File := FM (Form_Of_File'Range); Stop := Form_Of_File'Length; end if; end; else Stop := Form_Of_File'First - 1; end if; exception when others => Stop := Form_Of_File'First - 1; end Form; -- Line and file terminator control function P_To_PC (P : Positive) return Text_IO.Positive_Count is begin return Text_IO.Positive_Count (P); end P_To_PC; function PC_To_P (PC : Text_IO.Positive_Count) return Positive is begin return Positive (PC); end PC_To_P; procedure New_Line (File : in File_Type; Spacing : in Positive) is Gap : Text_IO.Positive_Count; begin if Is_Out (File) then Gap := P_To_PC (Spacing); Text_IO.New_Line (File.File_Ref, Gap); end if; exception when others => null; end New_Line; procedure Skip_Line (File : in File_Type; Spacing : in Positive) is Gap : Text_IO.Positive_Count; begin if Is_In (File) then Gap := P_To_PC (Spacing); Text_IO.Skip_Line (File.File_Ref, Gap); end if; exception when others => null; end Skip_Line; procedure New_Page (File : in File_Type) is begin if Is_Out (File) then Text_IO.New_Page (File.File_Ref); end if; exception when others => null; end New_Page; function End_Of_Line (File : File_Type) return Boolean is Ch : Character; EOLN : Boolean; begin if Is_In (File) then EOLN := Text_IO.End_Of_Line (File.File_Ref); -- Support DOS line ending. if not EOLN then Text_IO.Look_Ahead (File.File_Ref, Ch, EOLN); if Ch = Ada.Characters.Latin_1.CR then Text_IO.Get (File.File_Ref, Ch); EOLN := Text_IO.End_Of_Line (File.File_Ref); else EOLN := False; end if; end if; else EOLN := False; end if; return EOLN; end End_Of_Line; function End_Of_File (File : File_Type) return Boolean is Ch : Character; EOF : Boolean; begin if Is_In (File) then EOF := Text_IO.End_Of_File (File.File_Ref); -- Support DOS line ending. if not EOF then Text_IO.Look_Ahead (File.File_Ref, Ch, EOF); if Ch = Ada.Characters.Latin_1.CR then Text_IO.Get (File.File_Ref, Ch); EOF := Text_IO.End_Of_File (File.File_Ref); else EOF := False; end if; end if; else EOF := True; end if; return EOF; end End_Of_File; procedure Set_Col (File : in File_Type; Posn : in Positive) is Col : Text_IO.Positive_Count; begin if Is_Open (File) then Col := P_To_PC (Posn); Text_IO.Set_Col (File.File_Ref, Col); end if; exception when others => null; end Set_Col; function Col (File : File_Type) return Positive is Posn : Positive; Col : Text_IO.Positive_Count; begin if Is_Open (File) then Col := Text_IO.Col (File.File_Ref); Posn := PC_To_P (Col); else Posn := 1; end if; return Posn; exception when Text_IO.Status_Error => return 1; when Text_IO.Layout_Error => return PC_To_P (Text_IO.Count'Last); when Text_IO.Device_Error => return 1; end Col; function Line (File : File_Type) return Positive is Posn : Positive; Line : Text_IO.Positive_Count; begin if Is_Open (File) then Line := Text_IO.Line (File.File_Ref); Posn := PC_To_P (Line); else Posn := 1; end if; return Posn; exception when Text_IO.Status_Error => return 1; when Text_IO.Layout_Error => return PC_To_P (Text_IO.Count'Last); when Text_IO.Device_Error => return 1; end Line; -- Character IO procedure Get_Char (File : in File_Type; Item : out Character) is begin if Is_In (File) then Text_IO.Get (File.File_Ref, Item); else Item := Character'First; end if; exception when others => null; end Get_Char; procedure Put_Char (File : in File_Type; Item : in Character) is begin if Is_Out (File) then Text_IO.Put (File.File_Ref, Item); end if; exception when IO_Exceptions.Use_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Disk_Full_Error, Msg => "in Put_Char"); when others => null; end Put_Char; -- String IO procedure Get_String (File : in File_Type; Item : out String; Stop : out Natural) is LSTP : Natural; begin if Is_In (File) then LSTP := Item'First - 1; loop exit when End_Of_File (File); LSTP := LSTP + 1; Get_Char (File, Item (LSTP)); exit when LSTP = Item'Last; end loop; Stop := LSTP; else Stop := Item'First - 1; end if; end Get_String; procedure Put_String (File : in File_Type; Item : in String; Stop : in Natural) is Pad : Natural; begin if Is_Out (File) then if Stop = 0 then Text_IO.Put (File.File_Ref, Item); elsif Stop <= Item'Last then Text_IO.Put (File.File_Ref, Item (Item'First .. Stop)); else Pad := Stop - Item'Last; Text_IO.Put (File.File_Ref, Item); while Pad > 0 loop Text_IO.Put (File.File_Ref, ' '); Pad := Pad - 1; end loop; end if; end if; exception when IO_Exceptions.Use_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Disk_Full_Error, Msg => "in Put_String"); when others => null; end Put_String; procedure Get_Line (File : in File_Type; Item : out String; Stop : out Natural) is begin if Is_In (File) then Text_IO.Get_Line (File.File_Ref, Item, Stop); -- Support DOS line ending. if Stop in Item'Range and then Item (Stop) = Ada.Characters.Latin_1.CR then Stop := Stop - 1; end if; else Stop := Item'First - 1; end if; exception when others => Stop := Item'First - 1; end Get_Line; procedure Put_Line (File : in File_Type; Item : in String; Stop : in Natural) is ES : Positive; begin if Stop = 0 then ES := Item'Last; else ES := Stop; end if; if Is_Out (File) then Text_IO.Put_Line (File.File_Ref, Item (Item'First .. ES)); end if; exception when IO_Exceptions.Use_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Disk_Full_Error, Msg => "in Put_Line"); when others => null; end Put_Line; -- Integer IO package Integer_IO is new Text_IO.Integer_IO (Integer); procedure Get_Integer (File : in File_Type; Item : out Integer; Width : in Natural; Read : out Boolean) is begin if Is_In (File) then Integer_IO.Get (File.File_Ref, Item, Width); Read := True; else Read := False; end if; exception when others => Read := False; end Get_Integer; procedure Put_Integer (File : in File_Type; Item : in Integer; Width : in Natural; Base : in Number_Base) is begin if Is_Out (File) then Integer_IO.Put (File.File_Ref, Item, Width, Base); end if; exception when IO_Exceptions.Use_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Disk_Full_Error, Msg => "in Put_Integer"); when others => null; end Put_Integer; procedure Get_Int_From_String (Source : in String; Item : out Integer; Start_Pos : in Positive; Stop : out Natural) is begin Integer_IO.Get (Source (Start_Pos .. Source'Last), Item, Stop); exception when others => Stop := Start_Pos - 1; end Get_Int_From_String; -- Float IO package Real_IO is new Text_IO.Float_IO (Float); procedure Get_Float (File : in File_Type; Item : out Float; Width : in Natural; Read : out Boolean) is begin if Is_In (File) then Real_IO.Get (File.File_Ref, Item, Width); Read := True; else Read := False; end if; exception when others => Read := False; end Get_Float; procedure Put_Float (File : in File_Type; Item : in Float; Fore : in Natural; Aft : in Natural; Exp : in Natural) is begin if Is_Out (File) then Real_IO.Put (File.File_Ref, Item, Fore, Aft, Exp); end if; exception when IO_Exceptions.Use_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Disk_Full_Error, Msg => "in Put_Float"); when others => null; end Put_Float; procedure Get_Float_From_String (Source : in String; Item : out Float; Start_Pos : in Positive; Stop : out Natural) is begin Real_IO.Get (Source (Start_Pos .. Source'Last), Item, Stop); exception when others => Stop := Start_Pos - 1; end Get_Float_From_String; procedure Put_Float_To_String (Dest : in out String; Item : in Float; Start_Pos : in Positive; Aft : in Natural; Exp : in Natural) is begin Real_IO.Put (Dest (Start_Pos .. Dest'Last), Item, Aft, Exp); exception when others => null; end Put_Float_To_String; end SPARK_IO; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_factor.adb0000644000175000017500000005637311753202336023225 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------- -- Overview: Called to check validity of a -- factor node. Replaces calls to StaticTerm, Base_TypeTerm and CheckTypeTerm ---------------------------------------------------------------------------- separate (Sem.Walk_Expression_P) procedure Wf_Factor (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) is Op_Node : STree.SyntaxNode; --------------------------------------------------------------- procedure Do_Abs_Or_Not (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# T_Stack & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# T_Stack & --# STree.Table from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# T_Stack; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.factor or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_factor) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post STree.Table = STree.Table~; is Result : Sem.Exp_Record; Base_Type : Dictionary.Symbol; Op_Node : STree.SyntaxNode; Operator : SP_Symbols.SP_Symbol; Val : Maths.Value; Error : Boolean; procedure Resolve_Universal (Node : in STree.SyntaxNode; Operator : in SP_Symbols.SP_Symbol; T_Stack : in Type_Context_Stack.T_Stack_Type; Val : in Maths.Value; T : in out Dictionary.Symbol; Error : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error from Dictionary.Dict, --# LexTokenManager.State, --# Operator, --# T, --# T_Stack, --# Val & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Operator, --# SPARK_IO.File_Sys, --# STree.Table, --# T, --# T_Stack, --# Val & --# T from *, --# Dictionary.Dict, --# T_Stack; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.factor or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_factor) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack) and --# (Dictionary.Is_Null_Symbol (T) or Dictionary.IsTypeMark (T, Dictionary.Dict)); --# post Dictionary.Is_Null_Symbol (T) or Dictionary.IsTypeMark (T, Dictionary.Dict); is Base_Type : Dictionary.Symbol; Val_Check : Maths.Value; procedure Debug_Print --# derives ; is --# hide Debug_Print; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Wf_Factor encounters a universal expression. Resolving by context to type ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Type_Context_Stack.Top (Stack => T_Stack)))); end if; end Debug_Print; begin -- Resolve_Universal Error := False; if Dictionary.IsUniversalRealType (T) then -- We want to convert but not check T := Type_Context_Stack.Top (Stack => T_Stack); Debug_Print; elsif Dictionary.IsUniversalIntegerType (T) then T := Type_Context_Stack.Top (Stack => T_Stack); Debug_Print; Val_Check := Val; if Operator = SP_Symbols.RWabs then -- Check against T'Base Base_Type := Dictionary.GetBaseType (T); if not Dictionary.Is_Null_Symbol (Base_Type) then Sem.Constraint_Check (Val => Val, New_Val => Val_Check, Is_Annotation => False, Typ => Base_Type, Position => STree.Node_Position (Node => STree.Last_Child_Of (Start_Node => STree.Last_Sibling_Of (Start_Node => STree.Last_Child_Of (Start_Node => Node))))); end if; elsif Operator = SP_Symbols.RWnot then -- Check against T if Dictionary.IsUnknownTypeMark (T) then -- Trying to resolve from Universal_Integer to Unknown means we don't have -- enough context to resolve the type or value of this expression, so SPARK -- requires qualification. ErrorHandler.Semantic_Error (Err_Num => 806, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Sem.Constraint_Check (Val => Val, New_Val => Val_Check, Is_Annotation => False, Typ => T, Position => STree.Node_Position (Node => STree.Last_Child_Of (Start_Node => STree.Last_Sibling_Of (Start_Node => STree.Last_Child_Of (Start_Node => Node))))); end if; end if; if Maths.HasNoValue (Val_Check) then Error := True; end if; end if; end Resolve_Universal; procedure Calc_Abs_Or_Not (Base_Type : in Dictionary.Symbol; Operator : in SP_Symbols.SP_Symbol; Val : in out Maths.Value) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# derives Val from *, --# Base_Type, --# Dictionary.Dict, --# LexTokenManager.State, --# Operator; is begin if Operator = SP_Symbols.RWabs then Maths.Absolute (Val); elsif Operator = SP_Symbols.RWnot then if Dictionary.TypeIsModular (Base_Type) then Maths.ModularNotOp (Val, Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.Modulus_Token, Base_Type))); else Maths.NotOp (Val); end if; end if; end Calc_Abs_Or_Not; begin -- Do_Abs_Or_Not Exp_Stack.Pop (Item => Result, Stack => E_Stack); -- same result type and flags Op_Node := STree.Child_Node (Current_Node => Node); Operator := STree.Syntax_Node_Type (Node => Op_Node); -- ASSUME Operator = RWabs OR RWnot SystemErrors.RT_Assert (C => Operator = SP_Symbols.RWabs or else Operator = SP_Symbols.RWnot, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Operator = RWabs OR RWnot in Do_Abs_Or_Not"); Val := Result.Value; Resolve_Universal (Node => Node, Operator => Operator, T_Stack => T_Stack, Val => Val, T => Result.Type_Symbol, Error => Error); Base_Type := Dictionary.GetRootType (Result.Type_Symbol); STree.Add_Node_Symbol (Node => Op_Node, Sym => Result.Type_Symbol); if Error then Result := Sem.Unknown_Type_Record; end if; if not Dictionary.UnaryOperatorIsDefined (Operator, Base_Type) then Result := Sem.Unknown_Type_Record; if Operator = SP_Symbols.RWabs then if Dictionary.IsModularType (Base_Type, Scope) then ErrorHandler.Semantic_Error (Err_Num => 803, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 40, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Next_Sibling (Op_Node)), Id_Str => LexTokenManager.Null_String); end if; elsif Operator = SP_Symbols.RWnot then ErrorHandler.Semantic_Error_Sym (Err_Num => 119, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Sym => Base_Type, Scope => Scope); end if; elsif STree.Syntax_Node_Type (Node => Node) = SP_Symbols.factor and then not Dictionary.UnaryOperatorIsVisible (Operator, Base_Type, Scope) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 309, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); else -- check for misuse of unconstrained boolean array if Dictionary.Is_Unconstrained_Array_Type_Mark (Result.Type_Symbol, Scope) then Result.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Next_Sibling (Op_Node)), Id_Str => LexTokenManager.Null_String); end if; Val := Result.Value; Calc_Abs_Or_Not (Base_Type => Base_Type, Operator => Operator, Val => Val); Result.Value := Val; if Dictionary.TypeIsScalar (Result.Type_Symbol) then Result.Type_Symbol := Base_Type; end if; Result.Variable_Symbol := Dictionary.NullSymbol; Result.Is_AVariable := False; Result.Is_An_Entire_Variable := False; Result.Has_Operators := True; end if; -- OtherSymbol may carry a function symbol in the case of uses of unchecked_conversion. -- This symbol is used (by wf_Assign) to convery information to the VCG to supress -- checks when an unchecked_conversion is assigned to something of the same subtype. -- We do not want this mechanism if the unchecked_conversion is used in any other context -- than a direct assignment. Therefore we clear OtherSymbol here: Result.Other_Symbol := Dictionary.NullSymbol; Exp_Stack.Push (X => Result, Stack => E_Stack); end Do_Abs_Or_Not; --------------------------------------------------------------- procedure Do_Star_Star (Node, Op_Node : in STree.SyntaxNode; Left_Node_Pos, Right_Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Left_Node_Pos, --# LexTokenManager.State, --# Node, --# Op_Node, --# Right_Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# T_Stack & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# T_Stack & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Node, --# Op_Node, --# Scope, --# T_Stack; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.factor or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_factor) and --# STree.Syntax_Node_Type (Op_Node, STree.Table) = SP_Symbols.double_star and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post STree.Table = STree.Table~; is Left, Right, Result : Sem.Exp_Record; The_Child_Node : STree.SyntaxNode; begin Exp_Stack.Pop (Item => Right, Stack => E_Stack); Exp_Stack.Pop (Item => Left, Stack => E_Stack); Result := Null_Type_Record; -- safety: we may not set all fields below -- do static checks first Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; Result.Is_Static := Left.Is_Static and then Right.Is_Static; Result.Has_Operators := True; if Left.Is_ARange or else Right.Is_ARange then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 90, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); else -- neither are ranges -- Now do type compat and operator visibility checks First, -- implicit type conversion -- Implicitly convert the Left operand if its type is -- Universal, and the context is not unknown. if (Dictionary.IsUniversalIntegerType (Left.Type_Symbol) or else Dictionary.IsUniversalRealType (Left.Type_Symbol)) and then not Dictionary.IsUnknownTypeMark (Type_Context_Stack.Top (Stack => T_Stack)) then Left.Type_Symbol := Type_Context_Stack.Top (Stack => T_Stack); end if; -- The right operand of ** is always predefined Integer, so this -- does not depend upon the context stack. if Dictionary.IsUniversalIntegerType (Right.Type_Symbol) then Right.Type_Symbol := Dictionary.GetPredefinedIntegerType; end if; The_Child_Node := STree.Child_Node (Current_Node => Node); -- ASSUME The_Child_Node = primary OR annotation_primary OR RWabs OR RWnot SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.primary or else STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.annotation_primary or else STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.RWabs or else STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.RWnot, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Child_Node = primary OR annotation_primary OR RWabs OR RWnot in Do_Star_Star"); -- add type of LHS to syntax tree for use by VCG in run-time checks STree.Add_Node_Symbol (Node => The_Child_Node, Sym => Left.Type_Symbol); -- then, operator visibility Check_Binary_Operator (Operator => SP_Symbols.double_star, Left => Left, Right => Right, Scope => Scope, T_Stack => T_Stack, Op_Pos => STree.Node_Position (Node => Op_Node), Left_Pos => Left_Node_Pos, Right_Pos => Right_Node_Pos, Convert => False, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_factor, Result => Result); STree.Add_Node_Symbol (Node => Op_Node, Sym => Result.Type_Symbol); Calc_Binary_Operator (Node_Pos => STree.Node_Position (Node => Node), Operator => SP_Symbols.double_star, Left_Val => Left.Value, Right_Val => Right.Value, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_factor, Result => Result); end if; Result.Errors_In_Expression := Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; -- OtherSymbol may carry a function symbol in the case of uses of unchecked_conversion. -- This symbol is used (by wf_Assign) to convery information to the VCG to supress -- checks when an unchecked_conversion is assigned to something of the same subtype. -- We do not want this mechanism if the unchecked_conversion is sued in any other context -- than a direct assignment. Therefore we clear OtherSymbol here: Result.Other_Symbol := Dictionary.NullSymbol; Exp_Stack.Push (X => Result, Stack => E_Stack); end Do_Star_Star; begin -- Wf_Factor Op_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Op_Node = RWabs OR RWnot OR primary OR annotation_primary if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWabs or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWnot then -- ASSUME Op_Node = RWabs OR RWnot Do_Abs_Or_Not (Node => Node, Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack); elsif STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.primary or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.annotation_primary then -- ASSUME Op_Node = primary OR annotation_primary Op_Node := STree.Next_Sibling (Current_Node => Op_Node); -- ASSUME Op_Node = double_star OR NULL if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.double_star then -- ASSUME Op_Node = double_star Do_Star_Star (Node => Node, Op_Node => Op_Node, Left_Node_Pos => STree.Node_Position (Node => STree.Child_Node (Current_Node => Node)), Right_Node_Pos => STree.Node_Position (Node => STree.Next_Sibling (Current_Node => Op_Node)), Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack); elsif Op_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = double_star OR NULL in Wf_Factor"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = RWabs OR RWnot OR primary OR annotation_primary in Wf_Factor"); end if; end Wf_Factor; spark-2012.0.deb/examiner/indexmanager.idx0000644000175000017500000000070711753202337017426 0ustar eugeneugenindexmanager components are indexmanager.cache, indexmanager.index_table_p indexmanager specification is in indexmanager.ads indexmanager body is in indexmanager.adb indexmanager.cache specification is in indexmanager-cache.ads indexmanager.cache body is in indexmanager-cache.shb indexmanager.index_table_p specification is in indexmanager-index_table_p.ads indexmanager.index_table_p body is in indexmanager-index_table_p.adb spark-2012.0.deb/examiner/filesystem.ads0000644000175000017500000001617011753202336017133 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with SPARK_IO; --# inherit ExaminerConstants, --# E_Strings, --# SPARK_IO; package FileSystem is ----------------------------------------------------------------- -- Constants, types, and functions relating to reading -- command-line arguments ----------------------------------------------------------------- function Argument_Separator return Character; function Is_An_Argument_Terminator (Ch : Character) return Boolean; function Use_Windows_Command_Line return Boolean; function Use_Unix_Command_Line return Boolean; procedure Read_Cmd_Line (Cmd_Line_Found : out Boolean; Cmd_Line : out E_Strings.T); --# global in out SPARK_IO.File_Sys; --# derives Cmd_Line, --# Cmd_Line_Found, --# SPARK_IO.File_Sys from SPARK_IO.File_Sys; ----------------------------------------------------------------- -- Constants, types, and functions relating to the casing -- of filenames ----------------------------------------------------------------- -- Select default filename casing when Opening files function Case_Of_Files_For_Open (E_Str : E_Strings.T) return E_Strings.T; -- Select default filename casing when Creating files function Case_Of_Files_For_Create (E_Str : E_Strings.T) return E_Strings.T; ----------------------------------------------------------------- -- Constants, types, and functions relating to Source files ----------------------------------------------------------------- -- Opens a source file for reading, but using the Form required -- by the host compiler to enable sharing of the file. procedure Open_Source_File (File : in out SPARK_IO.File_Type; Name : in E_Strings.T; Status : out SPARK_IO.File_Status); --# global in out SPARK_IO.File_Sys; --# derives File, --# SPARK_IO.File_Sys, --# Status from File, --# Name, --# SPARK_IO.File_Sys; ----------------------------------------------------------------- -- Constants, types, and subprograms relating to filenames and -- extensions. ----------------------------------------------------------------- type Typ_File_Spec_Status is (File_Found, File_Missing, File_Invalid); -- Returns File_Name fully normalized relative to the second argument. -- If the second argument is an empty string, then the filename -- is interpretated relative to current working directory function Interpret_Relative (File_Name, Relative_To_Directory : E_Strings.T) return E_Strings.T; -- Find the fully normalized pathname for File_Spec relative -- to current working directory and returns it in Full_File_Name. -- File_Status is set to File_Found if this pathname denotes -- an existing regular file, or File_Missing otherwise. procedure Find_Full_File_Name (File_Spec : in E_Strings.T; File_Status : out Typ_File_Spec_Status; Full_File_Name : out E_Strings.T); --# global in SPARK_IO.File_Sys; --# derives File_Status, --# Full_File_Name from File_Spec, --# SPARK_IO.File_Sys; -- Returns the basename of Fn, with or without the extension. For example: -- Just_File ("a/b/c/f.dat", True) = "f.dat" -- Just_File ("a/b/c/g.exe", False) = "g" function Just_File (Fn : E_Strings.T; Ext : Boolean) return E_Strings.T; -- Any directory prefix is removed. If Suffix is non-empty and is a -- suffix of Fn (1 .. Stop), it is removed. This implements the -- same semantics as GNAT.Directory_Operations.Base_Name on systems -- where this library is available, but is legal SPARK. function Base_Name (Path : E_Strings.T; Suffix : String) return E_Strings.T; procedure Check_Extension (Fn : in out E_Strings.T; Ext : in E_Strings.T); --# derives Fn from *, --# Ext; --780--special version of Check_Extension just for listing files procedure Check_Listing_Extension (Source_Name : in E_Strings.T; Fn : in out E_Strings.T; Ext : in E_Strings.T); --# derives Fn from *, --# Ext, --# Source_Name; function Is_Readable_File (F : E_Strings.T) return Boolean; --# global in SPARK_IO.File_Sys; function Same_File (F1, F2 : E_Strings.T) return Boolean; function Is_Directory (F : E_Strings.T) return Boolean; --# global in SPARK_IO.File_Sys; function Start_Of_Directory return E_Strings.T; function Directory_Separator return E_Strings.T; -- if directory name D ends with an End_Of_Path string, then -- no change, otherwise append End_Of_Path procedure Append_End_Of_Path_If_Needed (D : in out E_Strings.T); --# derives D from *; -- if directory name D ends with an End_Of_Path string, then -- remove it, otherwise no change procedure Remove_End_Of_Path_If_Present (D : in out E_Strings.T); --# derives D from *; procedure Idempotent_Create_Subdirectory (Path : in E_Strings.T; Ok : out Boolean); --# global in out SPARK_IO.File_Sys; --# derives Ok, --# SPARK_IO.File_Sys from Path, --# SPARK_IO.File_Sys; -- For Unix/NT does not return a trailing / or \ function Working_Directory return E_Strings.T; --# global in SPARK_IO.File_Sys; -- For Unix/NT does not return a trailing / or \ function Examiner_Lib_Directory return E_Strings.T; --# global in SPARK_IO.File_Sys; -- For Unix/NT does not return a trailing / or \ function Examiner_SPARK_Lib_Directory return E_Strings.T; --# global in SPARK_IO.File_Sys; function Get_Environment_Variable (Env_Var_Name : String) return E_Strings.T; --# global in SPARK_IO.File_Sys; function Get_Relative_Path (Full_Path : E_Strings.T; Prefix : E_Strings.T) return E_Strings.T; function End_Of_Line return E_Strings.T; end FileSystem; spark-2012.0.deb/examiner/dictionary.adb0000644000175000017500000445744011753202336017110 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- -- Rules of the Dictionary: -- -- 1. Each subprogram callable from outside of Dictionary should have 2 versions: -- - the version without underscore used for types conversion ONLY from/to Dictionary.Symbol. -- - the version with underscore or terminated by "_Local" containing the actual implementation on the relevant type -- (NOT Dictionary.Symbol) -- Example: -- function Foo_Bar (A_B : RawDict.A_B_Info_ref; -- Arg : T) return RawDict.C_D -- --# global in Dict; -- is -- return [THE_ACTUAL_IMPLEMENTATION]; -- end Foo_Bar; -- -- function FooBar (AB : Symbol; -- Arg : T) return Symbol is -- begin -- return RawDict.Get_C_D_Symbol (Foo_Bar (A_B => RawDict.Get_A_B_Info_Ref (AB), -- Arg => Arg)); -- end FooBar; -- -- 2. For each type A_B in RawDict, they are 2 functions for types conversion: -- - RawDict.Get_A_B_Info_Ref converts a Dictionary.Symbol into a RawDict.A_B_Info_Ref. This function may raise a -- SystemErrors.Fatal_Error if the Symbol is not of the expected type. -- - RawDict.Get_A_B_Symbol converts a RawDict.A_B_Info_Ref into a Dictionary.Symbol. This function succeeds all the time. -- -- 3. If a types conversion RawDict.Get_A_B_Info_Ref is safe (protected by a case/if statement on the -- RawDict.GetSymbolDiscriminant (Symbol), the named argument is used otherwise the positional argument is used. -- Example: -- case RawDict.GetSymbolDiscriminant (Symbol) is -- when A_B_Symbol => -- R := Foo_Bar (A_B => RawDict.Get_A_B_Info_Ref (Item => Symbol), -- Arg => Arg); -- when others => -- ... -- end case; -- -- 4. The subprogram callable from outside (the one without underscore or "_Local") should never be called from inside Dictionary. -- When a subprogram from Dictionary has to call another subprogram from Dictionary, the one with underscore or "_Local" has to -- be used. This version is safer (no risk of SystemErrors.Fatal_Error linked with types conversion) and faster (prevent 2 -- types conversion) because it is strongly typed. -- -- 5. If a subprogram callable from outside may get different types of Dictionary.Symbol, a case statement will be used with the -- following pattern: -- function FooBar (Sym : Symbol) return T is -- R : T; -- begin -- case RawDict.GetSymbolDiscriminant (Symb) is -- when A_B_Symbol => -- R := F (A_B => RawDict.Get_A_B_Info_Ref (Item => Sym)); -- when C_D_Symbol => -- R := G (C_D => RawDict.Get_C_D_Info_Ref (Item => Sym)); -- when others => -- R := Null_T; -- SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, -- Msg => "in FooBar"); -- end case; -- return R; -- end FooBar; -- -- The "when others =>" must raise a SystemErrors.Fatal_Error! This enforces to know what type of Dictionary.Symbol that is -- in use. -- -- 6. If a subprogram is ONLY called from outside of Dictionary, the subprogram is called purely external. -- Each line containing types conversion on the arguments/return variable of the subprogram will be terminated by -- "-- GAA External". This allows "grep" filtering only on relevant types conversion inside Dictionary. -- Let assume that the function FooBar is never called from inside Dictionary, the function will be: -- -- function FooBar (Sym : Symbol) return Symbol is -- R : RawDict.C_D; -- begin -- case RawDict.GetSymbolDiscriminant (Symb) is -- when A_B_Symbol => -- R := F (A_B => RawDict.Get_A_B_Info_Ref (Item => Sym)); -- GAA External -- when C_D_Symbol => -- R := G (C_D => RawDict.Get_C_D_Info_Ref (Item => Sym)); -- GAA External -- when others => -- R := Null_T; -- SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, -- Msg => "in FooBar"); -- end case; -- return RawDict.Get_C_D_Symbol (R); -- GAA External -- end FooBar; -- -- 7. The case statement is preferred over the if/elsif/else statement. -- Example: -- case RawDict.GetSymbolDiscriminant (Symb) is -- when A_B_Symbol => -- R := F (A_B => RawDict.Get_A_B_Info_Ref (Item => Sym)); -- when C_D_Symbol => -- R := G (C_D => RawDict.Get_C_D_Info_Ref (Item => Sym)); -- when others => -- R := Null_T; -- SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, -- Msg => "in FooBar"); -- end case; -- -- is preferred over -- -- if RawDict.GetSymbolDiscriminant (Symb) = A_B_Symbol then -- R := F (A_B => RawDict.Get_A_B_Info_Ref (Item => Sym)); -- elsif RawDict.GetSymbolDiscriminant (Symb) = C_D_Symbol then -- R := G (C_D => RawDict.Get_C_D_Info_Ref (Item => Sym)); -- else -- R := Null_T; -- SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, -- Msg => "in FooBar"); -- end if; -- -------------------------------------------------------------------------------- with CommandLineData; with FileSystem; with Maths; with Statistics; with SystemErrors; with Ada.Containers.Vectors; use type CommandLineData.Language_Profiles; use type Maths.ErrorCode; package body Dictionary is ----------------------------------------------------------------------------- -- TYPES -- ----------------------------------------------------------------------------- type SymbolDiscriminant is ( Null_Symbol, Declaration_Symbol, Enumeration_Literal_Symbol, Array_Index_Symbol, Record_Component_Symbol, Subcomponent_Symbol, Type_Symbol, Constant_Symbol, Variable_Symbol, Own_Variable_Symbol, Global_Variable_Symbol, Quantified_Variable_Symbol, Implicit_Return_Variable_Symbol, Implicit_In_Stream_Symbol, -- notional volatile source for reads of protected vars Rule_Policy_Symbol, Constituent_Symbol, Context_Clause_Symbol, Use_Type_Clause_Symbol, Parameter_Constraint_Symbol, -- index information for unconstrained formal parameters Subprogram_Parameter_Symbol, Subprogram_Symbol, Operator_Symbol, Dependency_Symbol, Package_Symbol, Generic_Parameter_Symbol, Generic_Unit_Symbol, Generic_Association_Symbol, OwnTaskSymbol, ImplicitProofFunctionSymbol, SuspendsListItemSymbol, InterruptStreamMappingSymbol, VirtualElementSymbol, LoopSymbol, LoopParameterSymbol, LoopEntryVariableSymbol, ProtectedInfoSymbol, TaskInfoSymbol, KnownDiscriminantSymbol, SubtypeSymbol, -- extra info on Task/Protected subtypes DiscriminantConstraintSymbol); -- constrains for above type Type_Discriminant is ( Unknown_Type_Item, Enumeration_Type_Item, Integer_Type_Item, Modular_Type_Item, Floating_Point_Type_Item, Fixed_Point_Type_Item, Array_Type_Item, Record_Type_Item, Abstract_Proof_Type_Item, Protected_Type_Item, Task_Type_Item, Access_Type_Item, Generic_Type_Item); type Generic_Type_Discriminant is ( Invalid_Generic_Type, Generic_Private_Type, Generic_Discrete_Type, Generic_Integer_Type, Generic_Modular_Type, Generic_Floating_Point_Type, Generic_Fixed_Point_Type, Generic_Array_Type); type TriState is (Never, Sometimes, Always); -- RefType needs to be big enough to be suitable for Unchecked_Conversion -- from/to an access type, so... type Ref_Type is range -2 ** (ExaminerConstants.Address_Size - 1) .. 2 ** (ExaminerConstants.Address_Size - 1) - 1; -- If conducting proof, it may be useful to assert the base type of RefType. -- This base type will be platform dependent. On 32 bit systems the correct -- assertion may look like: -- --# assert RefType'Base is Integer; -- While on 64 bit systems the correct assertion may look like: -- --# assert RefType'Base is Long_Long_Integer; for Ref_Type'Size use ExaminerConstants.Address_Size; --# inherit ContextManager, --# Dictionary, --# LexTokenManager, --# SystemErrors; package Dynamic_Symbol_Table is subtype Valid_Symbol is Dictionary.Symbol range 1 .. Dictionary.Symbol'Last; type T is private; procedure Initialize (The_Table : out T); --# derives The_Table from ; function Get_Current_Usage (The_Table : in T) return Natural; procedure Add_Symbol (The_Table : in out T; Discriminant : in Dictionary.SymbolDiscriminant; Ref : in Dictionary.Ref_Type; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; Item : out Dictionary.Symbol); --# derives Item from The_Table & --# The_Table from *, --# Comp_Unit, --# Discriminant, --# Loc, --# Ref; function Get_Symbol_Discriminant (The_Table : in T; Item : in Dictionary.Symbol) return Dictionary.SymbolDiscriminant; function Get_Symbol_Ref (The_Table : in T; Item : in Dictionary.Symbol) return Dictionary.Ref_Type; function Get_Symbol_Compilation_Unit (The_Table : in T; Item : in Dictionary.Symbol) return ContextManager.UnitDescriptors; function Get_Symbol_Location (The_Table : in T; Item : in Dictionary.Symbol) return LexTokenManager.Token_Position; procedure Set_Symbol_Location (The_Table : in out T; Item : in Dictionary.Symbol; Location : in LexTokenManager.Token_Position); --# derives The_Table from *, --# Item, --# Location; -- This type should mode in the private part of the package when "hide" is removed. type Symbol_Info is record Discriminant : Dictionary.SymbolDiscriminant; Comp_Unit : ContextManager.UnitDescriptors; Loc : LexTokenManager.Token_Position; Ref : Dictionary.Ref_Type; end record; private --# hide Dynamic_Symbol_Table; package ST_Vec is new Ada.Containers.Vectors (Index_Type => Valid_Symbol, Element_Type => Symbol_Info); type T is record Vec : ST_Vec.Vector; end record; end Dynamic_Symbol_Table; -------------------------------------------------------------------------------- -- RAW DICTIONARY -- -------------------------------------------------------------------------------- --# inherit ContextManager, --# Dictionary, --# Dynamic_Symbol_Table, --# ExaminerConstants, --# LexTokenManager, --# SP_Symbols; package RawDict is type Declaration_Info_Ref is private; Null_Declaration_Info_Ref : constant Declaration_Info_Ref; -------------------------------------------------------------------------------- type Enumeration_Literal_Info_Ref is private; Null_Enumeration_Literal_Info_Ref : constant Enumeration_Literal_Info_Ref; -------------------------------------------------------------------------------- type Array_Index_Info_Ref is private; Null_Array_Index_Info_Ref : constant Array_Index_Info_Ref; -------------------------------------------------------------------------------- type Record_Component_Info_Ref is private; Null_Record_Component_Info_Ref : constant Record_Component_Info_Ref; -------------------------------------------------------------------------------- type Subcomponent_Info_Ref is private; Null_Subcomponent_Info_Ref : constant Subcomponent_Info_Ref; -------------------------------------------------------------------------------- type Type_Info_Ref is private; Null_Type_Info_Ref : constant Type_Info_Ref; -------------------------------------------------------------------------------- type Constant_Info_Ref is private; Null_Constant_Info_Ref : constant Constant_Info_Ref; -------------------------------------------------------------------------------- type Variable_Info_Ref is private; Null_Variable_Info_Ref : constant Variable_Info_Ref; -------------------------------------------------------------------------------- type Own_Variable_Info_Ref is private; Null_Own_Variable_Info_Ref : constant Own_Variable_Info_Ref; -------------------------------------------------------------------------------- type Global_Variable_Info_Ref is private; Null_Global_Variable_Info_Ref : constant Global_Variable_Info_Ref; -------------------------------------------------------------------------------- type Quantified_Variable_Info_Ref is private; Null_Quantified_Variable_Info_Ref : constant Quantified_Variable_Info_Ref; -------------------------------------------------------------------------------- type Implicit_Return_Variable_Info_Ref is private; Null_Implicit_Return_Variable_Info_Ref : constant Implicit_Return_Variable_Info_Ref; -------------------------------------------------------------------------------- type Implicit_In_Stream_Info_Ref is private; Null_Implicit_In_Stream_Info_Ref : constant Implicit_In_Stream_Info_Ref; -------------------------------------------------------------------------------- type Rule_Policy_Info_Ref is private; Null_Rule_Policy_Info_Ref : constant Rule_Policy_Info_Ref; -------------------------------------------------------------------------------- type Constituent_Info_Ref is private; Null_Constituent_Info_Ref : constant Constituent_Info_Ref; -------------------------------------------------------------------------------- type Context_Clause_Info_Ref is private; Null_Context_Clause_Info_Ref : constant Context_Clause_Info_Ref; -------------------------------------------------------------------------------- type Use_Type_Clause_Info_Ref is private; Null_Use_Type_Clause_Info_Ref : constant Use_Type_Clause_Info_Ref; -------------------------------------------------------------------------------- type Parameter_Constraint_Info_Ref is private; Null_Parameter_Constraint_Info_Ref : constant Parameter_Constraint_Info_Ref; -------------------------------------------------------------------------------- type Subprogram_Parameter_Info_Ref is private; Null_Subprogram_Parameter_Info_Ref : constant Subprogram_Parameter_Info_Ref; -------------------------------------------------------------------------------- type Subprogram_Info_Ref is private; Null_Subprogram_Info_Ref : constant Subprogram_Info_Ref; -------------------------------------------------------------------------------- type Operator_Info_Ref is private; Null_Operator_Info_Ref : constant Operator_Info_Ref; -------------------------------------------------------------------------------- type Dependency_Info_Ref is private; Null_Dependency_Info_Ref : constant Dependency_Info_Ref; -------------------------------------------------------------------------------- type Package_Info_Ref is private; Null_Package_Info_Ref : constant Package_Info_Ref; -------------------------------------------------------------------------------- type Generic_Parameter_Info_Ref is private; Null_Generic_Parameter_Info_Ref : constant Generic_Parameter_Info_Ref; -------------------------------------------------------------------------------- type Generic_Unit_Info_Ref is private; Null_Generic_Unit_Info_Ref : constant Generic_Unit_Info_Ref; -------------------------------------------------------------------------------- type Generic_Association_Info_Ref is private; Null_Generic_Association_Info_Ref : constant Generic_Association_Info_Ref; -------------------------------------------------------------------------------- function GetSymbolDiscriminant (Item : Dictionary.Symbol) return Dictionary.SymbolDiscriminant; --# global in Dictionary.Dict; function Get_Symbol_Compilation_Unit (Item : Dictionary.Symbol) return ContextManager.UnitDescriptors; --# global in Dictionary.Dict; function Get_Symbol_Location (Item : Dictionary.Symbol) return LexTokenManager.Token_Position; --# global in Dictionary.Dict; procedure Set_Symbol_Location (Item : in Dictionary.Symbol; Location : in LexTokenManager.Token_Position); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Item, --# Location; -------------------------------------------------------------------------------- -- Declaration_Info -------------------------------------------------------------------------------- function Get_Declaration_Info_Ref (Item : Dictionary.Symbol) return Declaration_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Declaration_Info_Ref); procedure Create_Declaration (Context : in Dictionary.Contexts; Scope : in Dictionary.Scopes; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Declaration : out Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Declaration from Comp_Unit, --# Context, --# Dictionary.Dict, --# Loc, --# Scope; --# post The_Declaration /= Null_Declaration_Info_Ref; procedure Set_Declaration_Context (The_Declaration : in Declaration_Info_Ref; Context : in Dictionary.Contexts); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Context, --# The_Declaration; procedure Set_Declaration_Item (The_Declaration : in Declaration_Info_Ref; Item : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Item, --# The_Declaration; procedure Set_Next_Declaration (The_Declaration, Next : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Declaration; function Get_Declaration_Symbol (The_Declaration : Declaration_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Declaration = Null_Declaration_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Declaration_Symbol)); function Get_Declaration_Context (The_Declaration : Declaration_Info_Ref) return Dictionary.Contexts; --# global in Dictionary.Dict; function Get_Declaration_Scope (The_Declaration : Declaration_Info_Ref) return Dictionary.Scopes; --# global in Dictionary.Dict; function Get_Declaration_Item (The_Declaration : Declaration_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Next_Declaration (The_Declaration : Declaration_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Enumeration_Literal_Info -------------------------------------------------------------------------------- function Get_Enumeration_Literal_Info_Ref (Item : Dictionary.Symbol) return Enumeration_Literal_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Enumeration_Literal_Info_Ref); procedure Create_Enumeration_Literal (Name : in LexTokenManager.Lex_String; Position : in LexTokenManager.Lex_String; Enumeration_Type : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Enumeration_Literal : out Enumeration_Literal_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Enumeration_Literal from Comp_Unit, --# Dictionary.Dict, --# Enumeration_Type, --# Loc, --# Name, --# Position; --# post The_Enumeration_Literal /= Null_Enumeration_Literal_Info_Ref; procedure Set_Next_Enumeration_Literal (The_Enumeration_Literal, Next : in Enumeration_Literal_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Enumeration_Literal; function Get_Enumeration_Literal_Symbol (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Enumeration_Literal = Null_Enumeration_Literal_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Enumeration_Literal_Symbol)); function Get_Enumeration_Literal_Name (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Enumeration_Literal_Position (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Enumeration_Literal_Type (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Enumeration_Literal (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return Enumeration_Literal_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Array_Index_Info -------------------------------------------------------------------------------- function Get_Array_Index_Info_Ref (Item : Dictionary.Symbol) return Array_Index_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Array_Index_Info_Ref); procedure Create_Array_Index (Index_Type : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Array_Index : out Array_Index_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Array_Index from Comp_Unit, --# Dictionary.Dict, --# Index_Type, --# Loc; --# post The_Array_Index /= Null_Array_Index_Info_Ref; procedure Set_Next_Array_Index (The_Array_Index, Next : in Array_Index_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Array_Index; function Get_Array_Index_Symbol (The_Array_Index : Array_Index_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Array_Index = Null_Array_Index_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Array_Index_Symbol)); function Get_Array_Index_Type (The_Array_Index : Array_Index_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Array_Index (The_Array_Index : Array_Index_Info_Ref) return Array_Index_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Record_Component_Info -------------------------------------------------------------------------------- function Get_Record_Component_Info_Ref (Item : Dictionary.Symbol) return Record_Component_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Record_Component_Info_Ref); procedure Create_Record_Component (Name : in LexTokenManager.Lex_String; Record_Type : in Type_Info_Ref; Component_Type : in Type_Info_Ref; Inherited_Field : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Record_Component : out Record_Component_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Record_Component from Component_Type, --# Comp_Unit, --# Dictionary.Dict, --# Inherited_Field, --# Loc, --# Name, --# Record_Type; --# post The_Record_Component /= Null_Record_Component_Info_Ref; procedure Set_Next_Record_Component (The_Record_Component, Next : in Record_Component_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Record_Component; function Get_Record_Component_Symbol (The_Record_Component : Record_Component_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Record_Component = Null_Record_Component_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Record_Component_Symbol)); function Get_Record_Component_Name (The_Record_Component : Record_Component_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Record_Component_Record_Type (The_Record_Component : Record_Component_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Record_Component_Type (The_Record_Component : Record_Component_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Record_Component_Inherited_Field (The_Record_Component : Record_Component_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Next_Record_Component (The_Record_Component : Record_Component_Info_Ref) return Record_Component_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Subcomponent_Info -------------------------------------------------------------------------------- function Get_Subcomponent_Info_Ref (Item : Dictionary.Symbol) return Subcomponent_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Subcomponent_Info_Ref); procedure Create_Subcomponent (Object : in Dictionary.Symbol; Record_Component : in Record_Component_Info_Ref; Marked_Valid : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Subcomponent : out Subcomponent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Subcomponent from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Marked_Valid, --# Object, --# Record_Component; --# post The_Subcomponent /= Null_Subcomponent_Info_Ref; procedure Set_Subcomponent_Subcomponents (The_Subcomponent : in Subcomponent_Info_Ref; Sibling : in Subcomponent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Sibling, --# The_Subcomponent; procedure Set_Subcomponent_Marked_Valid (The_Subcomponent : in Subcomponent_Info_Ref; Marked_Valid : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Marked_Valid, --# The_Subcomponent; procedure Set_Next_Subcomponent (The_Subcomponent, Next : in Subcomponent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Subcomponent; function Get_Subcomponent_Symbol (The_Subcomponent : Subcomponent_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Subcomponent = Null_Subcomponent_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Subcomponent_Symbol)); function Get_Subcomponent_Object (The_Subcomponent : Subcomponent_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Subcomponent_Record_Component (The_Subcomponent : Subcomponent_Info_Ref) return Record_Component_Info_Ref; --# global in Dictionary.Dict; function Get_Subcomponent_Subcomponents (The_Subcomponent : Subcomponent_Info_Ref) return Subcomponent_Info_Ref; --# global in Dictionary.Dict; function Get_Subcomponent_Marked_Valid (The_Subcomponent : Subcomponent_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Next_Subcomponent (The_Subcomponent : Subcomponent_Info_Ref) return Subcomponent_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Type_Info -------------------------------------------------------------------------------- function Get_Type_Info_Ref (Item : Dictionary.Symbol) return Type_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Type_Info_Ref); procedure Create_Type (Name : in LexTokenManager.Lex_String; The_Declaration : in Declaration_Info_Ref; Is_Private : in Boolean; Is_Announcement : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; Type_Mark : out Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# Type_Mark from Comp_Unit, --# Dictionary.Dict, --# Is_Announcement, --# Is_Private, --# Loc, --# Name, --# The_Declaration; --# post Type_Mark /= Null_Type_Info_Ref; procedure Set_Type_Parent (Type_Mark : in Type_Info_Ref; Parent : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Parent, --# Type_Mark; procedure Set_Type_Declaration (Type_Mark : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# Type_Mark; procedure Set_Type_Is_Full_Range_Subtype (Type_Mark : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Type_Mark; procedure Set_Type_Discriminant (Type_Mark : in Type_Info_Ref; Discriminant : in Dictionary.Type_Discriminant); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Discriminant, --# Type_Mark; procedure Set_Type_Private (Type_Mark : in Type_Info_Ref; Is_Private : in Dictionary.TriState); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Is_Private, --# Type_Mark; procedure Set_Type_Limited (Type_Mark : in Type_Info_Ref; Is_Limited : in Dictionary.TriState); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Is_Limited, --# Type_Mark; procedure Set_Type_Is_Tagged (Type_Mark : in Type_Info_Ref; Is_Tagged : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Is_Tagged, --# Type_Mark; procedure Set_Type_Is_Own_Var_Type (Type_Mark : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Type_Mark; procedure Set_Type_Extends (Type_Mark : in Type_Info_Ref; Root_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Root_Type, --# Type_Mark; procedure Set_Type_Accesses (Type_Mark : in Type_Info_Ref; The_Access : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Access, --# Type_Mark; procedure Set_Type_Limited_Private (Type_Mark : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Type_Mark; procedure Set_Type_Derived (Type_Mark : in Type_Info_Ref; Is_Derived : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Is_Derived, --# Type_Mark; procedure Set_Type_Equality_Defined (Type_Mark : in Type_Info_Ref; Equality_Defined : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Equality_Defined, --# Type_Mark; procedure Set_Type_Contains_Float (Type_Mark : in Type_Info_Ref; Contains_Float : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Contains_Float, --# Type_Mark; procedure Set_Type_Constrained (Type_Mark : in Type_Info_Ref; Constrained : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Constrained, --# Type_Mark; procedure Set_Type_Static (Type_Mark : in Type_Info_Ref; Static : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Static, --# Type_Mark; procedure Set_Type_Wellformed (Type_Mark : in Type_Info_Ref; Wellformed : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Type_Mark, --# Wellformed; procedure Set_Type_Lower (Type_Mark : in Type_Info_Ref; Lower : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Lower, --# Type_Mark; procedure Set_Type_Upper (Type_Mark : in Type_Info_Ref; Upper : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Type_Mark, --# Upper; procedure Set_Type_Modulus (Type_Mark : in Type_Info_Ref; Modulus : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Modulus, --# Type_Mark; procedure Set_Type_Error_Bound (Type_Mark : in Type_Info_Ref; Error_Bound : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Error_Bound, --# Type_Mark; procedure Set_Type_Base_Type (Type_Mark : in Type_Info_Ref; Base_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Base_Type, --# Type_Mark; procedure Set_Type_First_Enumeration_Literal (Type_Mark : in Type_Info_Ref; Enumeration_Literal : in Enumeration_Literal_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Enumeration_Literal, --# Type_Mark; procedure Set_Type_Last_Enumeration_Literal (Type_Mark : in Type_Info_Ref; Enumeration_Literal : in Enumeration_Literal_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Enumeration_Literal, --# Type_Mark; procedure Set_Type_First_Array_Index (Type_Mark : in Type_Info_Ref; Array_Index : in Array_Index_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Array_Index, --# Type_Mark; procedure Set_Type_Last_Array_Index (Type_Mark : in Type_Info_Ref; Array_Index : in Array_Index_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Array_Index, --# Type_Mark; procedure Set_Type_Array_Component (Type_Mark : in Type_Info_Ref; Component_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Component_Type, --# Type_Mark; procedure Set_Type_First_Record_Component (Type_Mark : in Type_Info_Ref; Record_Component : in Record_Component_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Record_Component, --# Type_Mark; procedure Set_Type_Last_Record_Component (Type_Mark : in Type_Info_Ref; Record_Component : in Record_Component_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Record_Component, --# Type_Mark; procedure Set_Type_Ancillary_Fields (Type_Mark : in Type_Info_Ref; The_Declaration : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# Type_Mark; procedure Set_Type_Size_Attribute (Type_Mark : in Type_Info_Ref; Size_Val : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Size_Val, --# Type_Mark; procedure Set_Type_Atomic (Type_Mark : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Type_Mark; procedure Set_Type_Virtual_Element_List (Type_Mark : in Type_Info_Ref; The_List : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_List, --# Type_Mark; procedure Set_Type_Private_Type_Declaration (Type_Mark : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# Type_Mark; procedure Set_Type_Kind_Of_Generic (Type_Mark : in Type_Info_Ref; Kind_Of_Generic : in Dictionary.Generic_Type_Discriminant); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Kind_Of_Generic, --# Type_Mark; function Get_Type_Symbol (Type_Mark : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> Type_Mark = Null_Type_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Type_Symbol)); function Get_Type_Name (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Type_Parent (Type_Mark : Type_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Declaration (Type_Mark : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Is_Full_Range_Subtype (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_First_Constrained_Subtype (Type_Mark : Type_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Discriminant (Type_Mark : Type_Info_Ref) return Dictionary.Type_Discriminant; --# global in Dictionary.Dict; function Get_Type_Private (Type_Mark : Type_Info_Ref) return Dictionary.TriState; --# global in Dictionary.Dict; function Get_Type_Limited (Type_Mark : Type_Info_Ref) return Dictionary.TriState; --# global in Dictionary.Dict; function Get_Type_Limited_Private (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Derived (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Is_Tagged (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Is_Own_Var_Type (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Extends (Type_Mark : Type_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Accesses (Type_Mark : Type_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Equality_Defined (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Contains_Float (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Constrained (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Static (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Wellformed (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Base_Type (Type_Mark : Type_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Lower (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Type_Upper (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Type_Modulus (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Type_Error_Bound (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Type_First_Enumeration_Literal (Type_Mark : Type_Info_Ref) return Enumeration_Literal_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Last_Enumeration_Literal (Type_Mark : Type_Info_Ref) return Enumeration_Literal_Info_Ref; --# global in Dictionary.Dict; function Get_Type_First_Array_Index (Type_Mark : Type_Info_Ref) return Array_Index_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Last_Array_Index (Type_Mark : Type_Info_Ref) return Array_Index_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Array_Component (Type_Mark : Type_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Type_First_Record_Component (Type_Mark : Type_Info_Ref) return Record_Component_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Last_Record_Component (Type_Mark : Type_Info_Ref) return Record_Component_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Announcement (Type_Mark : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Private_Type_Declaration (Type_Mark : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Type_Ancillary_Fields (Type_Mark : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Type_Size_Attribute (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Type_Atomic (Type_Mark : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Type_Virtual_Element_List (Type_Mark : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Type_Kind_Of_Generic (Type_Mark : Type_Info_Ref) return Dictionary.Generic_Type_Discriminant; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Protected_Type_Info -------------------------------------------------------------------------------- procedure Set_Protected_Type_Own_Variable (The_Protected_Type : in Type_Info_Ref; Own_Variable : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Own_Variable, --# The_Protected_Type; procedure Set_Protected_Type_Elements_Hidden (The_Protected_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Protected_Type; procedure Set_Protected_Type_First_Visible_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Protected_Type; procedure Set_Protected_Type_Last_Visible_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Protected_Type; procedure Set_Protected_Type_First_Private_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Protected_Type; procedure Set_Protected_Type_Last_Private_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Protected_Type; procedure Set_Protected_Type_First_Local_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Protected_Type; procedure Set_Protected_Type_Last_Local_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Protected_Type; procedure Set_Protected_Type_The_Entry (The_Protected_Type : in Type_Info_Ref; The_Entry : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Entry, --# The_Protected_Type; procedure Set_Protected_Type_First_Discriminant (The_Protected_Type : in Type_Info_Ref; Discriminant : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Discriminant, --# The_Protected_Type; procedure Set_Protected_Type_Last_Discriminant (The_Protected_Type : in Type_Info_Ref; Discriminant : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Discriminant, --# The_Protected_Type; procedure Set_Protected_Type_Has_Pragma (The_Protected_Type : in Type_Info_Ref; The_Pragma : in Dictionary.RavenscarPragmas); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Pragma, --# The_Protected_Type; procedure Set_Protected_Type_Pragma_Value (The_Protected_Type : in Type_Info_Ref; The_Pragma : in Dictionary.RavenscarPragmasWithValue; The_Value : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Pragma, --# The_Protected_Type, --# The_Value; procedure Set_Protected_Type_Body (The_Protected_Type : in Type_Info_Ref; The_Body : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Body, --# The_Protected_Type; procedure Set_Protected_Type_Has_Proper_Body (The_Protected_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Protected_Type; procedure Set_Protected_Type_With_Clauses (The_Protected_Type : in Type_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Context_Clause, --# The_Protected_Type; procedure Set_Protected_Type_Use_Type_Clauses (The_Protected_Type : in Type_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Protected_Type, --# The_Use_Type_Clause; function Get_Protected_Type_Own_Variable (The_Protected_Type : Type_Info_Ref) return Own_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_Elements_Hidden (The_Protected_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Protected_Type_Has_Entry (The_Protected_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Protected_Type_First_Visible_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_Last_Visible_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_First_Private_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_Last_Private_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_First_Local_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_Last_Local_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_First_Discriminant (The_Protected_Type : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Protected_Type_Last_Discriminant (The_Protected_Type : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Protected_Type_Has_Pragma (The_Protected_Type : Type_Info_Ref; The_Pragma : Dictionary.RavenscarPragmas) return Boolean; --# global in Dictionary.Dict; function Get_Protected_Type_Pragma_Value (The_Protected_Type : Type_Info_Ref; The_Pragma : Dictionary.RavenscarPragmasWithValue) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Protected_Type_Body (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_Has_Proper_Body (The_Protected_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Protected_Type_With_Clauses (The_Protected_Type : Type_Info_Ref) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Protected_Type_Use_Type_Clauses (The_Protected_Type : Type_Info_Ref) return Use_Type_Clause_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Task_Type_Info -------------------------------------------------------------------------------- procedure Set_Task_Type_Signature_Not_Wellformed (The_Task_Type : in Type_Info_Ref; Abstraction : in Dictionary.Abstractions); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Task_Type; procedure Set_Task_Type_Has_Second_Annotation (The_Task_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Task_Type; procedure Set_Task_Type_Has_Derives_Annotation (The_Task_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Task_Type; procedure Set_Task_Type_First_Local_Declaration (The_Task_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Task_Type; procedure Set_Task_Type_Last_Local_Declaration (The_Task_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Task_Type; procedure Set_Task_Type_With_Clauses (The_Task_Type : in Type_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Context_Clause, --# The_Task_Type; procedure Set_Task_Type_Use_Type_Clauses (The_Task_Type : in Type_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Task_Type, --# The_Use_Type_Clause; procedure Set_Task_Type_First_Discriminant (The_Task_Type : in Type_Info_Ref; Discriminant : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Discriminant, --# The_Task_Type; procedure Set_Task_Type_Last_Discriminant (The_Task_Type : in Type_Info_Ref; Discriminant : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Discriminant, --# The_Task_Type; procedure Set_Task_Type_First_Global_Variable (The_Task_Type : in Type_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Global_Variable : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Global_Variable, --# The_Task_Type; procedure Set_Task_Type_Last_Global_Variable (The_Task_Type : in Type_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Global_Variable : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Global_Variable, --# The_Task_Type; procedure Set_Task_Type_Has_Pragma (The_Task_Type : in Type_Info_Ref; The_Pragma : in Dictionary.RavenscarPragmas); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Pragma, --# The_Task_Type; procedure Set_Task_Type_Pragma_Value (The_Task_Type : in Type_Info_Ref; The_Pragma : in Dictionary.RavenscarPragmasWithValue; The_Value : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Pragma, --# The_Task_Type, --# The_Value; procedure Set_Task_Type_First_Loop (The_Task_Type : in Type_Info_Ref; The_Loop : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Loop, --# The_Task_Type; procedure Set_Task_Type_Last_Loop (The_Task_Type : in Type_Info_Ref; The_Loop : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Loop, --# The_Task_Type; procedure Set_Task_Type_Suspends_List (The_Task_Type : in Type_Info_Ref; The_Suspends_List : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Suspends_List, --# The_Task_Type; procedure Set_Task_Type_Body (The_Task_Type : in Type_Info_Ref; The_Body : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Body, --# The_Task_Type; procedure Set_Task_Type_Has_Proper_Body (The_Task_Type : in Type_Info_Ref; Is_Hidden : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Is_Hidden, --# The_Task_Type; procedure Set_Task_Type_Uses_Unprotected_Variables (The_Task_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Task_Type; procedure Set_Task_Type_Uses_Unchecked_Conversion (The_Task_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Task_Type; procedure Set_Task_Type_Assigns_From_External (The_Task_Type : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Task_Type; function Get_Task_Type_Signature_Is_Wellformed (The_Task_Type : Type_Info_Ref; Abstraction : Dictionary.Abstractions) return Boolean; --# global in Dictionary.Dict; function Get_Task_Type_Has_Second_Annotation (The_Task_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Task_Type_Has_Derives_Annotation (The_Task_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Task_Type_First_Local_Declaration (The_Task_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Task_Type_Last_Local_Declaration (The_Task_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Task_Type_With_Clauses (The_Task_Type : Type_Info_Ref) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Task_Type_Use_Type_Clauses (The_Task_Type : Type_Info_Ref) return Use_Type_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Task_Type_First_Discriminant (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Task_Type_Last_Discriminant (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Task_Type_First_Global_Variable (The_Task_Type : Type_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Task_Type_Last_Global_Variable (The_Task_Type : Type_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Task_Type_Has_Pragma (The_Task_Type : Type_Info_Ref; The_Pragma : Dictionary.RavenscarPragmas) return Boolean; --# global in Dictionary.Dict; function Get_Task_Type_Pragma_Value (The_Task_Type : Type_Info_Ref; The_Pragma : Dictionary.RavenscarPragmasWithValue) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Task_Type_First_Loop (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Task_Type_Last_Loop (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Task_Type_Suspends_List (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Task_Type_Body (The_Task_Type : Type_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Task_Type_Has_Proper_Body (The_Task_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Task_Type_Uses_Unprotected_Variables (The_Task_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Task_Type_Uses_Unchecked_Conversion (The_Task_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Task_Type_Assigns_From_External (The_Task_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Task_Type_Body_Is_Hidden (The_Task_Type : Type_Info_Ref) return Boolean; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Constant_Info -------------------------------------------------------------------------------- function Get_Constant_Info_Ref (Item : Dictionary.Symbol) return Constant_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Constant_Info_Ref); procedure Create_Constant (Name : in LexTokenManager.Lex_String; Type_Mark : in Type_Info_Ref; Static : in Boolean; The_Declaration : in Declaration_Info_Ref; Is_Deferred : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Constant : out Constant_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Constant from Comp_Unit, --# Dictionary.Dict, --# Is_Deferred, --# Loc, --# Name, --# Static, --# The_Declaration, --# Type_Mark; --# post The_Constant /= Null_Constant_Info_Ref; procedure Set_Constant_Value (The_Constant : in Constant_Info_Ref; Value : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Constant, --# Value; procedure Set_Constant_Exp_Node (The_Constant : in Constant_Info_Ref; Exp_Is_Wellformed : in Boolean; Exp_Node : in ExaminerConstants.RefType); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Exp_Is_Wellformed, --# Exp_Node, --# The_Constant; procedure Set_Constant_Static (The_Constant : in Constant_Info_Ref; Static : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Static, --# The_Constant; procedure Set_Constant_Declaration (The_Constant : in Constant_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Constant, --# The_Declaration; procedure Set_Constant_Deferred_Declaration (The_Constant : in Constant_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Constant, --# The_Declaration; procedure Set_Constant_Associated_Generic_Parameter (The_Constant : in Constant_Info_Ref; The_Generic_Parameter : in Generic_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Constant, --# The_Generic_Parameter; procedure Set_Constant_First_Rule_Policy (The_Constant : in Constant_Info_Ref; The_Rule_Policy : in Rule_Policy_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Constant, --# The_Rule_Policy; procedure Set_Constant_Last_Rule_Policy (The_Constant : in Constant_Info_Ref; The_Rule_Policy : in Rule_Policy_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Constant, --# The_Rule_Policy; function Get_Constant_Symbol (The_Constant : Constant_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Constant = Null_Constant_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Constant_Symbol)); function Get_Constant_Name (The_Constant : Constant_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Constant_Type (The_Constant : Constant_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Constant_Value (The_Constant : Constant_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Constant_First_Rule_Policy (The_Constant : Constant_Info_Ref) return Rule_Policy_Info_Ref; --# global in Dictionary.Dict; function Get_Constant_Last_Rule_Policy (The_Constant : Constant_Info_Ref) return Rule_Policy_Info_Ref; --# global in Dictionary.Dict; function Get_Constant_Exp_Node (The_Constant : Constant_Info_Ref) return ExaminerConstants.RefType; --# global in Dictionary.Dict; function Get_Constant_Exp_Is_Wellformed (The_Constant : Constant_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Constant_Static (The_Constant : Constant_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Constant_Declaration (The_Constant : Constant_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Constant_Deferred_Declaration (The_Constant : Constant_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Constant_Associated_Generic_Parameter (The_Constant : Constant_Info_Ref) return Generic_Parameter_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Variable_Info -------------------------------------------------------------------------------- function Get_Variable_Info_Ref (Item : Dictionary.Symbol) return Variable_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Variable_Info_Ref); procedure Create_Variable (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Variable : out Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Variable from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Name; --# post The_Variable /= Null_Variable_Info_Ref; procedure Set_Variable_Type (The_Variable : in Variable_Info_Ref; Type_Mark : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Variable, --# Type_Mark; procedure Set_Variable_Abstract_Type (The_Variable : in Variable_Info_Ref; Abstract_Type_Mark : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstract_Type_Mark, --# The_Variable; procedure Set_Variable_Initialized (The_Variable : in Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Variable; procedure Set_Variable_Has_Address_Clause (The_Variable : in Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Variable; procedure Set_Variable_Has_Pragma_Import (The_Variable : in Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Variable; procedure Set_Variable_Is_Aliased (The_Variable : in Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Variable; procedure Set_Variable_Marked_Valid (The_Variable : in Variable_Info_Ref; Val : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Variable, --# Val; procedure Set_Variable_Declaration (The_Variable : in Variable_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Variable; procedure Set_Variable_Exp_Node (The_Variable : in Variable_Info_Ref; Exp_Node : in ExaminerConstants.RefType); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Exp_Node, --# The_Variable; procedure Set_Variable_Own_Task (The_Variable : in Variable_Info_Ref; Own_Task : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Own_Task, --# The_Variable; procedure Set_Variable_Virtual_Element (The_Variable : in Variable_Info_Ref; Virtual_Element : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Variable, --# Virtual_Element; procedure Set_Variable_Global_References (The_Variable : in Variable_Info_Ref; Abstraction : in Dictionary.Abstractions; Reference : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# Reference, --# The_Variable; procedure Set_Variable_Own_Variable (The_Variable : in Variable_Info_Ref; Own_Variable : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Own_Variable, --# The_Variable; procedure Set_Variable_Constituent (The_Variable : in Variable_Info_Ref; The_Constituent : in Constituent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Constituent, --# The_Variable; procedure Set_Variable_Subcomponents (The_Variable : in Variable_Info_Ref; Subcomponents : in Subcomponent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Subcomponents, --# The_Variable; function Get_Variable_Symbol (The_Variable : Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Variable = Null_Variable_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Variable_Symbol)); function Get_Variable_Name (The_Variable : Variable_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Variable_Type (The_Variable : Variable_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Variable_Abstract_Type (The_Variable : Variable_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Variable_Initialized (The_Variable : Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Variable_Has_Address_Clause (The_Variable : Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Variable_Has_Pragma_Import (The_Variable : Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Variable_Is_Aliased (The_Variable : Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Variable_Marked_Valid (The_Variable : Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Variable_Declaration (The_Variable : Variable_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Variable_Exp_Node (The_Variable : Variable_Info_Ref) return ExaminerConstants.RefType; --# global in Dictionary.Dict; function Get_Variable_Global_References (The_Variable : Variable_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Variable_Own_Variable (The_Variable : Variable_Info_Ref) return Own_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Variable_Own_Task (The_Variable : Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Variable_Constituent (The_Variable : Variable_Info_Ref) return Constituent_Info_Ref; --# global in Dictionary.Dict; function Get_Variable_Subcomponents (The_Variable : Variable_Info_Ref) return Subcomponent_Info_Ref; --# global in Dictionary.Dict; function Get_Variable_Virtual_Element (The_Variable : Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Global_Variable_Info -------------------------------------------------------------------------------- type Kind_Of_Global_Variable_T is (Subprogram_Parameter_Item, Subprogram_Variable_Item, Task_Type_Variable_Item); function Get_Global_Variable_Info_Ref (Item : Dictionary.Symbol) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Global_Variable_Info_Ref); procedure Create_Global_Variable (Mode : in Dictionary.Modes; Prefix_Needed : in Boolean; The_Subprogram : in Subprogram_Info_Ref; The_Task_Type : in Type_Info_Ref; Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Variable : in Variable_Info_Ref; Next_Subprogram : in Global_Variable_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Global_Variable : out Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Global_Variable from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Mode, --# Next_Subprogram, --# Prefix_Needed, --# Subprogram_Parameter, --# The_Subprogram, --# The_Task_Type, --# Variable; --# post The_Global_Variable /= Null_Global_Variable_Info_Ref; procedure Set_Global_Variable_Exported (The_Global_Variable : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Global_Variable; procedure Set_Global_Variable_Imported (The_Global_Variable : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Global_Variable; procedure Set_Global_Variable_Dependencies (The_Global_Variable : in Global_Variable_Info_Ref; Abstraction : in Dictionary.Abstractions; Dependency : in Dependency_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# Dependency, --# The_Global_Variable; procedure Set_Next_Global_Variable (The_Global_Variable, Next : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Global_Variable; function Get_Global_Variable_Symbol (The_Global_Variable : Global_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Global_Variable = Null_Global_Variable_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Global_Variable_Symbol)); function Get_Global_Variable_Mode (The_Global_Variable : Global_Variable_Info_Ref) return Dictionary.Modes; --# global in Dictionary.Dict; function Get_Global_Variable_Exported (The_Global_Variable : Global_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Global_Variable_Imported (The_Global_Variable : Global_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Global_Variable_Prefix_Needed (The_Global_Variable : Global_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Global_Variable_Dependencies (The_Global_Variable : Global_Variable_Info_Ref; Abstraction : Dictionary.Abstractions) return Dependency_Info_Ref; --# global in Dictionary.Dict; function Get_Global_Variable_Next_Subprogram (The_Global_Variable : Global_Variable_Info_Ref) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Global_Variable_Subprogram (The_Global_Variable : Global_Variable_Info_Ref) return Subprogram_Info_Ref; --# global in Dictionary.Dict; function Get_Global_Variable_Task_Type (The_Global_Variable : Global_Variable_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Kind_Of_Global_Variable (The_Global_Variable : Global_Variable_Info_Ref) return Kind_Of_Global_Variable_T; --# global in Dictionary.Dict; function Get_Global_Variable_Variable (The_Global_Variable : Global_Variable_Info_Ref) return Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Global_Variable_Parameter (The_Global_Variable : Global_Variable_Info_Ref) return Subprogram_Parameter_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Global_Variable (The_Global_Variable : Global_Variable_Info_Ref) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Own_Variable_Info -------------------------------------------------------------------------------- function Get_Own_Variable_Info_Ref (Item : Dictionary.Symbol) return Own_Variable_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Own_Variable_Info_Ref); procedure Create_Own_Variable (Variable : in Variable_Info_Ref; Owner : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Own_Variable : out Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Own_Variable from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Owner, --# Variable; --# post The_Own_Variable /= Null_Own_Variable_Info_Ref; procedure Set_Own_Variable_Announced (The_Own_Variable : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Own_Variable; procedure Set_Own_Variable_Typed (The_Own_Variable : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Own_Variable; procedure Set_Own_Variable_Initialized (The_Own_Variable : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Own_Variable; procedure Set_Own_Variable_Constituents (The_Own_Variable : in Own_Variable_Info_Ref; The_Constituent : in Constituent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Constituent, --# The_Own_Variable; procedure Set_Own_Variable_Mode (The_Own_Variable : in Own_Variable_Info_Ref; Mode : in Dictionary.Modes); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Mode, --# The_Own_Variable; procedure Set_Own_Variable_Protected (The_Own_Variable : in Own_Variable_Info_Ref; Is_Protected : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Is_Protected, --# The_Own_Variable; procedure Set_Next_Own_Variable (The_Own_Variable, Next : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Own_Variable; procedure Set_Own_Variable_Interrupt_Stream_Mappings (The_Own_Variable : in Own_Variable_Info_Ref; The_Interrupt_Stream_Mappings : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Interrupt_Stream_Mappings, --# The_Own_Variable; procedure Set_Own_Variable_Implicit_In_Stream (The_Own_Variable : in Own_Variable_Info_Ref; The_Implicit_In_Stream : in Implicit_In_Stream_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Implicit_In_Stream, --# The_Own_Variable; procedure Set_Own_Variable_Is_Interrupt_Stream (The_Own_Variable : in Own_Variable_Info_Ref; Is_Interrupt_Stream : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Is_Interrupt_Stream, --# The_Own_Variable; procedure Set_Own_Variable_Unprotected_Reference (The_Own_Variable : in Own_Variable_Info_Ref; By_Thread : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# By_Thread, --# The_Own_Variable; procedure Set_Own_Variable_Suspends_Reference (The_Own_Variable : in Own_Variable_Info_Ref; By_Thread : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# By_Thread, --# The_Own_Variable; procedure Set_Own_Variable_Is_Suspendable (The_Own_Variable : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Own_Variable; procedure Set_Own_Variable_Has_Interrupt_Property (The_Own_Variable : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Own_Variable; procedure Set_Own_Variable_Priority_Property (The_Own_Variable : in Own_Variable_Info_Ref; The_Value : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Own_Variable, --# The_Value; procedure Set_Own_Variable_Integrity_Property (The_Own_Variable : in Own_Variable_Info_Ref; The_Value : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Own_Variable, --# The_Value; function Get_Own_Variable_Symbol (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Own_Variable = Null_Own_Variable_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Own_Variable_Symbol)); function Get_Own_Variable_Variable (The_Own_Variable : Own_Variable_Info_Ref) return Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Own_Variable_Owner (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Own_Variable_Announced (The_Own_Variable : Own_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Own_Variable_Typed (The_Own_Variable : Own_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Own_Variable_Initialized (The_Own_Variable : Own_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Own_Variable_Constituents (The_Own_Variable : Own_Variable_Info_Ref) return Constituent_Info_Ref; --# global in Dictionary.Dict; function Get_Own_Variable_Mode (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Modes; --# global in Dictionary.Dict; function Get_Own_Variable_Protected (The_Own_Variable : Own_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Own_Variable_Implicit_In_Stream (The_Own_Variable : Own_Variable_Info_Ref) return Implicit_In_Stream_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Own_Variable (The_Own_Variable : Own_Variable_Info_Ref) return Own_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Own_Variable_Interrupt_Stream_Mappings (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Own_Variable_Is_Interrupt_Stream (The_Own_Variable : Own_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Own_Variable_Is_Suspendable (The_Own_Variable : Own_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Own_Variable_Has_Interrupt_Property (The_Own_Variable : Own_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Own_Variable_Priority_Property (The_Own_Variable : Own_Variable_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Own_Variable_Integrity_Property (The_Own_Variable : Own_Variable_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Own_Variable_Unprotected_Reference (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Own_Variable_Suspends_Reference (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Own_Variable_Has_Valid_Priority_Property (The_Own_Variable : Own_Variable_Info_Ref) return Boolean; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Quantified_Variable_Info -------------------------------------------------------------------------------- function Get_Quantified_Variable_Info_Ref (Item : Dictionary.Symbol) return Quantified_Variable_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Quantified_Variable_Info_Ref); procedure Create_Quantified_Variable (Name : in LexTokenManager.Lex_String; Type_Mark : in Type_Info_Ref; The_Parameter_Constraint : in Parameter_Constraint_Info_Ref; Region : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Quantified_Variable : out Quantified_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Quantified_Variable from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Name, --# Region, --# The_Parameter_Constraint, --# Type_Mark; --# post The_Quantified_Variable /= Null_Quantified_Variable_Info_Ref; function Get_Quantified_Variable_Symbol (The_Quantified_Variable : Quantified_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Quantified_Variable = Null_Quantified_Variable_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Quantified_Variable_Symbol)); function Get_Quantified_Variable_Name (The_Quantified_Variable : Quantified_Variable_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Quantified_Variable_Type (The_Quantified_Variable : Quantified_Variable_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Quantified_Variable_Constraint (The_Quantified_Variable : Quantified_Variable_Info_Ref) return Parameter_Constraint_Info_Ref; --# global in Dictionary.Dict; function Get_Quantified_Variable_Region (The_Quantified_Variable : Quantified_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Implicit_Return_Variable_Info -------------------------------------------------------------------------------- function Get_Implicit_Return_Variable_Info_Ref (Item : Dictionary.Symbol) return Implicit_Return_Variable_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Implicit_Return_Variable_Info_Ref); procedure Create_Implicit_Return_Variable (Name : in LexTokenManager.Lex_String; The_Function : in Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Implicit_Return_Variable : out Implicit_Return_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Implicit_Return_Variable from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Name, --# The_Function; --# post The_Implicit_Return_Variable /= Null_Implicit_Return_Variable_Info_Ref; function Get_Implicit_Return_Variable_Symbol (The_Implicit_Return_Variable : Implicit_Return_Variable_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Implicit_Return_Variable = Null_Implicit_Return_Variable_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Implicit_Return_Variable_Symbol)); function Get_Implicit_Return_Variable_Name (The_Implicit_Return_Variable : Implicit_Return_Variable_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Implicit_Return_Variable_Function (The_Implicit_Return_Variable : Implicit_Return_Variable_Info_Ref) return Subprogram_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Implicit_In_Stream_Info -------------------------------------------------------------------------------- function Get_Implicit_In_Stream_Info_Ref (Item : Dictionary.Symbol) return Implicit_In_Stream_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Implicit_In_Stream_Info_Ref); procedure Create_Implicit_In_Stream (The_Own_Variable : in Own_Variable_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Implicit_In_Stream : out Implicit_In_Stream_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Implicit_In_Stream from Comp_Unit, --# Dictionary.Dict, --# Loc, --# The_Own_Variable; --# post The_Implicit_In_Stream /= Null_Implicit_In_Stream_Info_Ref; function Get_Implicit_In_Stream_Symbol (The_Implicit_In_Stream : Implicit_In_Stream_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Implicit_In_Stream = Null_Implicit_In_Stream_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Implicit_In_Stream_Symbol)); function Get_Implicit_In_Stream_Own_Variable (The_Implicit_In_Stream : Implicit_In_Stream_Info_Ref) return Own_Variable_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Rule_Policy_Info -------------------------------------------------------------------------------- procedure Create_Rule_Policy (Scope : in Dictionary.Scopes; Value : in Dictionary.Rule_Policies; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Rule_Policy : out Rule_Policy_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Rule_Policy from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Scope, --# Value; --# post The_Rule_Policy /= Null_Rule_Policy_Info_Ref; procedure Set_Next_Rule_Policy (The_Rule_Policy, Next : in Rule_Policy_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Rule_Policy; function Get_Rule_Policy_Symbol (The_Rule_Policy : Rule_Policy_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Rule_Policy = Null_Rule_Policy_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Rule_Policy_Symbol)); function Get_Rule_Policy_Scope (The_Rule_Policy : Rule_Policy_Info_Ref) return Dictionary.Scopes; --# global in Dictionary.Dict; function Get_Rule_Policy_Value (The_Rule_Policy : Rule_Policy_Info_Ref) return Dictionary.Rule_Policies; --# global in Dictionary.Dict; function Get_Next_Rule_Policy (The_Rule_Policy : Rule_Policy_Info_Ref) return Rule_Policy_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Constituent_Info -------------------------------------------------------------------------------- function Get_Constituent_Info_Ref (Item : Dictionary.Symbol) return Constituent_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Constituent_Info_Ref); procedure Create_Constituent (The_Own_Variable : in Own_Variable_Info_Ref; The_Variable : in Variable_Info_Ref; Mode : in Dictionary.Modes; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Constituent : out Constituent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Constituent from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Mode, --# The_Own_Variable, --# The_Variable; --# post The_Constituent /= Null_Constituent_Info_Ref; procedure Set_Next_Constituent (The_Constituent, Next : in Constituent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Constituent; function Get_Constituent_Symbol (The_Constituent : Constituent_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Constituent = Null_Constituent_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Constituent_Symbol)); function Get_Constituent_Own_Variable (The_Constituent : Constituent_Info_Ref) return Own_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Constituent_Variable (The_Constituent : Constituent_Info_Ref) return Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Constituent_Mode (The_Constituent : Constituent_Info_Ref) return Dictionary.Modes; --# global in Dictionary.Dict; function Get_Next_Constituent (The_Constituent : Constituent_Info_Ref) return Constituent_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Context_Clause_Info -------------------------------------------------------------------------------- function Get_Context_Clause_Info_Ref (Item : Dictionary.Symbol) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Context_Clause_Info_Ref); procedure Create_Context_Clause (The_Package : in Package_Info_Ref; The_Subprogram : in Subprogram_Info_Ref; Explicit : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Context_Clause : out Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Context_Clause from Comp_Unit, --# Dictionary.Dict, --# Explicit, --# Loc, --# The_Package, --# The_Subprogram; --# post The_Context_Clause /= Null_Context_Clause_Info_Ref; procedure Set_Context_Clause_Explicit (The_Context_Clause : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Context_Clause; procedure Set_Next_Context_Clause (The_Context_Clause, Next : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Context_Clause; function Get_Context_Clause_Symbol (The_Context_Clause : Context_Clause_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Context_Clause = Null_Context_Clause_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Context_Clause_Symbol)); function Get_Context_Clause_Is_Subprogram (The_Context_Clause : Context_Clause_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Context_Clause_Package (The_Context_Clause : Context_Clause_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; function Get_Context_Clause_Subprogram (The_Context_Clause : Context_Clause_Info_Ref) return Subprogram_Info_Ref; --# global in Dictionary.Dict; function Get_Context_Clause_Explicit (The_Context_Clause : Context_Clause_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Next_Context_Clause (The_Context_Clause : Context_Clause_Info_Ref) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Use_Type_Clause_Info -------------------------------------------------------------------------------- procedure Create_Use_Type_Clause (Type_Mark : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Use_Type_Clause : out Use_Type_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Use_Type_Clause from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Type_Mark; --# post The_Use_Type_Clause /= Null_Use_Type_Clause_Info_Ref; procedure Set_Next_Use_Type_Clause (The_Use_Type_Clause, Next : in Use_Type_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Use_Type_Clause; function Get_Use_Type_Clause_Type (The_Use_Type_Clause : Use_Type_Clause_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Use_Type_Clause (The_Use_Type_Clause : Use_Type_Clause_Info_Ref) return Use_Type_Clause_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Parameter_Constraint_Info_Ref -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Info_Ref (Item : Dictionary.Symbol) return Parameter_Constraint_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Parameter_Constraint_Info_Ref); procedure Create_Parameter_Constraint (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Dimension : in Positive; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Parameter_Constraint : out Parameter_Constraint_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Parameter_Constraint from Comp_Unit, --# Dictionary.Dict, --# Dimension, --# Loc, --# The_Subprogram_Parameter; --# post The_Parameter_Constraint /= Null_Parameter_Constraint_Info_Ref; procedure Set_Next_Parameter_Constraint (The_Parameter_Constraint, Next : in Parameter_Constraint_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Parameter_Constraint; function Get_Parameter_Constraint_Symbol (The_Parameter_Constraint : Parameter_Constraint_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Parameter_Constraint = Null_Parameter_Constraint_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Parameter_Constraint_Symbol)); function Get_Parameter_Constraint_Subprogram_Parameter (The_Parameter_Constraint : Parameter_Constraint_Info_Ref) return Subprogram_Parameter_Info_Ref; --# global in Dictionary.Dict; function Get_Parameter_Constraint_Dimension (The_Parameter_Constraint : Parameter_Constraint_Info_Ref) return Positive; --# global in Dictionary.Dict; function Get_Next_Parameter_Constraint (The_Parameter_Constraint : Parameter_Constraint_Info_Ref) return Parameter_Constraint_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Subprogram_Parameter_Info -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Info_Ref (Item : Dictionary.Symbol) return Subprogram_Parameter_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Subprogram_Parameter_Info_Ref); procedure Create_Subprogram_Parameter (Name : in LexTokenManager.Lex_String; The_Subprogram : in Subprogram_Info_Ref; Type_Mark : in Type_Info_Ref; Mode : in Dictionary.Modes; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Subprogram_Parameter : out Subprogram_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Subprogram_Parameter from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Mode, --# Name, --# The_Subprogram, --# Type_Mark; --# post The_Subprogram_Parameter /= Null_Subprogram_Parameter_Info_Ref; procedure Set_Subprogram_Parameter_Exported (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Abstraction : in Dictionary.Abstractions); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Subprogram_Parameter; procedure Set_Subprogram_Parameter_Imported (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Abstraction : in Dictionary.Abstractions); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Subprogram_Parameter; procedure Set_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Abstraction : in Dictionary.Abstractions; Dependency : in Dependency_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# Dependency, --# The_Subprogram_Parameter; procedure Set_Subprogram_Parameter_Global_References (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Abstraction : in Dictionary.Abstractions; Reference : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# Reference, --# The_Subprogram_Parameter; procedure Set_Subprogram_Parameter_Subcomponents (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Subcomponents : in Subcomponent_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Subcomponents, --# The_Subprogram_Parameter; procedure Set_Subprogram_Parameter_Index_Constraints (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; The_Index_Constraints : in Parameter_Constraint_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Index_Constraints, --# The_Subprogram_Parameter; procedure Set_Next_Subprogram_Parameter (The_Subprogram_Parameter, Next : in Subprogram_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Subprogram_Parameter; function Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Subprogram_Parameter_Symbol)); function Get_Subprogram_Parameter_Name (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Subprogram_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Type (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Mode (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Dictionary.Modes; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Exported (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Abstraction : Dictionary.Abstractions) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Imported (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Abstraction : Dictionary.Abstractions) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Abstraction : Dictionary.Abstractions) return Dependency_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Global_References (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Subcomponents (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Subcomponent_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Parameter_Index_Constraints (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Parameter_Constraint_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Subprogram_Parameter (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Subprogram_Parameter_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Subprogram_Info -------------------------------------------------------------------------------- function Get_Subprogram_Info_Ref (Item : Dictionary.Symbol) return Subprogram_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Subprogram_Info_Ref); procedure Create_Subprogram (Name : in LexTokenManager.Lex_String; The_Declaration : in Declaration_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Subprogram : out Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Subprogram from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Name, --# The_Declaration; --# post The_Subprogram /= Null_Subprogram_Info_Ref; procedure Set_Subprogram_Implicit_Proof_Function (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Proof_Function : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Proof_Function, --# The_Subprogram; procedure Set_Subprogram_Implicit_Return_Variable (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Implicit_Return_Variable : in Implicit_Return_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Implicit_Return_Variable, --# The_Subprogram; procedure Set_Subprogram_Signature_Not_Wellformed (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Subprogram; procedure Set_Subprogram_Precondition (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; Precondition : in ExaminerConstants.RefType); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# Precondition, --# The_Subprogram; procedure Set_Subprogram_Postcondition (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; Postcondition : in ExaminerConstants.RefType); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# Postcondition, --# The_Subprogram; procedure Set_Subprogram_Body (The_Subprogram : in Subprogram_Info_Ref; The_Body : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Body, --# The_Subprogram; procedure Set_Subprogram_Has_Proper_Body (The_Subprogram : in Subprogram_Info_Ref; Is_Hidden : in Boolean); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Is_Hidden, --# The_Subprogram; procedure Set_Subprogram_Has_Second_Annotation (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Has_Second_Constraint (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Has_Derives_Annotation (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Has_Delay_Property (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Mark_Accounts_For_Delay (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Generic_Unit (The_Subprogram : in Subprogram_Info_Ref; The_Generic_Unit : in Generic_Unit_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic_Unit, --# The_Subprogram; procedure Set_Subprogram_Instantiation_Of (The_Subprogram : in Subprogram_Info_Ref; The_Generic : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic, --# The_Subprogram; procedure Set_Subprogram_First_Generic_Association (The_Subprogram : in Subprogram_Info_Ref; The_Generic_Association : in Generic_Association_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic_Association, --# The_Subprogram; procedure Set_Subprogram_Last_Generic_Association (The_Subprogram : in Subprogram_Info_Ref; The_Generic_Association : in Generic_Association_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic_Association, --# The_Subprogram; procedure Set_Subprogram_Suspends_List (The_Subprogram : in Subprogram_Info_Ref; The_Suspends_List : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram, --# The_Suspends_List; procedure Set_Subprogram_Is_Entry (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Entry_Barrier (The_Subprogram : in Subprogram_Info_Ref; The_Barrier : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Barrier, --# The_Subprogram; procedure Set_Subprogram_Is_Interrupt_Handler (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Uses_Unprotected_Variables (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Uses_Unchecked_Conversion (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Assigns_From_External (The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram; procedure Set_Subprogram_Return_Type (The_Subprogram : in Subprogram_Info_Ref; Type_Mark : in Type_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram, --# Type_Mark; procedure Set_Subprogram_With_Clauses (The_Subprogram : in Subprogram_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Context_Clause, --# The_Subprogram; procedure Set_Subprogram_Use_Type_Clauses (The_Subprogram : in Subprogram_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram, --# The_Use_Type_Clause; procedure Set_Subprogram_Inherit_Clauses (The_Subprogram : in Subprogram_Info_Ref; The_Inherit_Clause : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Inherit_Clause, --# The_Subprogram; procedure Set_Subprogram_First_Parameter (The_Subprogram : in Subprogram_Info_Ref; The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram, --# The_Subprogram_Parameter; procedure Set_Subprogram_Last_Parameter (The_Subprogram : in Subprogram_Info_Ref; The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Subprogram, --# The_Subprogram_Parameter; procedure Set_Subprogram_First_Global_Variable (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Global_Variable : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Global_Variable, --# The_Subprogram; procedure Set_Subprogram_Last_Global_Variable (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Global_Variable : in Global_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# The_Global_Variable, --# The_Subprogram; procedure Set_Subprogram_Renaming_Declarations (The_Subprogram : in Subprogram_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Subprogram; procedure Set_Subprogram_First_Declaration (The_Subprogram : in Subprogram_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Subprogram; procedure Set_Subprogram_Last_Declaration (The_Subprogram : in Subprogram_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Subprogram; procedure Set_Subprogram_First_Loop (The_Subprogram : in Subprogram_Info_Ref; The_Loop : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Loop, --# The_Subprogram; procedure Set_Subprogram_Last_Loop (The_Subprogram : in Subprogram_Info_Ref; The_Loop : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Loop, --# The_Subprogram; function Get_Subprogram_Symbol (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Subprogram = Null_Subprogram_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Subprogram_Symbol)); function Get_Subprogram_Name (The_Subprogram : Subprogram_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Subprogram_Implicit_Proof_Function (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Subprogram_Implicit_Return_Variable (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Implicit_Return_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Signature_Is_Wellformed (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Precondition (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return ExaminerConstants.RefType; --# global in Dictionary.Dict; function Get_Subprogram_Postcondition (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return ExaminerConstants.RefType; --# global in Dictionary.Dict; function Get_Subprogram_Body (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Has_Proper_Body (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Body_Is_Hidden (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Has_Second_Annotation (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Has_Second_Constraint (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Has_Derives_Annotation (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Has_Delay_Property (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Delay_Property_Is_Accounted_For (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Generic_Unit (The_Subprogram : Subprogram_Info_Ref) return Generic_Unit_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Instantiation_Of (The_Subprogram : Subprogram_Info_Ref) return Subprogram_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_First_Generic_Association (The_Subprogram : Subprogram_Info_Ref) return Generic_Association_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Last_Generic_Association (The_Subprogram : Subprogram_Info_Ref) return Generic_Association_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Suspends_List (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Subprogram_Is_Entry (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Entry_Barrier (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Subprogram_Is_Interrupt_Handler (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Uses_Unprotected_Variables (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Uses_Unchecked_Conversion (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Assigns_From_External (The_Subprogram : Subprogram_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Subprogram_Return_Type (The_Subprogram : Subprogram_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_With_Clauses (The_Subprogram : Subprogram_Info_Ref) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Use_Type_Clauses (The_Subprogram : Subprogram_Info_Ref) return Use_Type_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Inherit_Clauses (The_Subprogram : Subprogram_Info_Ref) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_First_Parameter (The_Subprogram : Subprogram_Info_Ref) return Subprogram_Parameter_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Last_Parameter (The_Subprogram : Subprogram_Info_Ref) return Subprogram_Parameter_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_First_Global_Variable (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Last_Global_Variable (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Renaming_Declarations (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_First_Declaration (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_Last_Declaration (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Subprogram_First_Loop (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Subprogram_Last_Loop (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Subprogram_Specification (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Operator_Info -------------------------------------------------------------------------------- function Get_Operator_Info_Ref (Item : Dictionary.Symbol) return Operator_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Operator_Info_Ref); procedure Create_Unary_Operator (Name : in SP_Symbols.SP_Symbol; Operand : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Operator : out Operator_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Operator from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Name, --# Operand; --# post The_Operator /= Null_Operator_Info_Ref; procedure Create_Binary_Operator (Name : in SP_Symbols.SP_Symbol; Left, Right : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Operator : out Operator_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Operator from Comp_Unit, --# Dictionary.Dict, --# Left, --# Loc, --# Name, --# Right; --# post The_Operator /= Null_Operator_Info_Ref; function Get_Operator_Symbol (The_Operator : Operator_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Operator = Null_Operator_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Operator_Symbol)); function Get_Operator_Name (The_Operator : Operator_Info_Ref) return SP_Symbols.SP_Symbol; --# global in Dictionary.Dict; function Get_Operator_Is_Binary (The_Operator : Operator_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Operator_Operand (The_Operator : Operator_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Operator_Left_Operand (The_Operator : Operator_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Operator_Right_Operand (The_Operator : Operator_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Dependency_Info -------------------------------------------------------------------------------- type Kind_Of_Dependency_T is (Dependency_Parameter_Item, Dependency_Variable_Item); function Get_Dependency_Info_Ref (Item : Dictionary.Symbol) return Dependency_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Dependency_Info_Ref); procedure Create_Dependency (The_Import_Parameter : in Subprogram_Parameter_Info_Ref; The_Import_Variable : in Variable_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Dependency : out Dependency_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Dependency from Comp_Unit, --# Dictionary.Dict, --# Loc, --# The_Import_Parameter, --# The_Import_Variable; --# post The_Dependency /= Null_Dependency_Info_Ref; procedure Set_Next_Dependency (The_Dependency, Next : in Dependency_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Dependency; function Get_Dependency_Symbol (The_Dependency : Dependency_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Dependency = Null_Dependency_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Dependency_Symbol)); function Get_Kind_Of_Dependency (The_Dependency : Dependency_Info_Ref) return Kind_Of_Dependency_T; --# global in Dictionary.Dict; function Get_Dependency_Import_Parameter (The_Dependency : Dependency_Info_Ref) return Subprogram_Parameter_Info_Ref; --# global in Dictionary.Dict; function Get_Dependency_Import_Variable (The_Dependency : Dependency_Info_Ref) return Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Dependency (The_Dependency : Dependency_Info_Ref) return Dependency_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Package_Info -------------------------------------------------------------------------------- function Get_Package_Info_Ref (Item : Dictionary.Symbol) return Package_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Package_Info_Ref); procedure Create_Package (Name : in LexTokenManager.Lex_String; The_Declaration : in Declaration_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Package : out Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Package from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Name, --# The_Declaration; --# post The_Package /= Null_Package_Info_Ref; procedure Set_Package_Body (The_Package : in Package_Info_Ref; The_Body : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Body, --# The_Package; procedure Set_Package_Has_Proper_Body (The_Package : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Package; procedure Set_Package_Inherit_Clauses (The_Package : in Package_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Context_Clause, --# The_Package; procedure Set_Package_Own_Variables (The_Package : in Package_Info_Ref; Own_Variables : in Own_Variable_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Own_Variables, --# The_Package; procedure Set_Package_Task_List (The_Package : in Package_Info_Ref; Task_List : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Task_List, --# The_Package; procedure Set_Package_First_Loop (The_Package : in Package_Info_Ref; The_Loop : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Loop, --# The_Package; procedure Set_Package_Last_Loop (The_Package : in Package_Info_Ref; The_Loop : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Loop, --# The_Package; procedure Set_Package_Visible_With_Clauses (The_Package : in Package_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Context_Clause, --# The_Package; procedure Set_Package_Visible_Use_Type_Clauses (The_Package : in Package_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Package, --# The_Use_Type_Clause; procedure Set_Package_Visible_Renaming_Declarations (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Package; procedure Set_Package_First_Visible_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Package; procedure Set_Package_Last_Visible_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Package; procedure Set_Package_Local_With_Clauses (The_Package : in Package_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Context_Clause, --# The_Package; procedure Set_Package_Local_Use_Type_Clauses (The_Package : in Package_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Package, --# The_Use_Type_Clause; procedure Set_Package_Local_Renaming_Declarations (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Package; procedure Set_Package_First_Local_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Package; procedure Set_Package_Last_Local_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Package; procedure Set_Package_First_Private_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Package; procedure Set_Package_Last_Private_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Package; procedure Set_Package_Elaborate_Body_Found (The_Package : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Package; procedure Set_Package_Parent (The_Package : in Package_Info_Ref; The_Parent : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Package, --# The_Parent; procedure Set_Package_Next_Sibling (The_Package : in Package_Info_Ref; The_Sibling : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Package, --# The_Sibling; procedure Set_Package_Extends (The_Package : in Package_Info_Ref; The_Extends : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Extends, --# The_Package; procedure Set_Package_Declares_Tagged_Type (The_Package : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Package; procedure Set_Package_First_Private_Child (The_Package : in Package_Info_Ref; The_Child : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Child, --# The_Package; procedure Set_Package_Last_Private_Child (The_Package : in Package_Info_Ref; The_Child : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Child, --# The_Package; procedure Set_Package_First_Public_Child (The_Package : in Package_Info_Ref; The_Child : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Child, --# The_Package; procedure Set_Package_Last_Public_Child (The_Package : in Package_Info_Ref; The_Child : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Child, --# The_Package; procedure Set_Package_Is_Private (The_Package : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Package; procedure Set_Package_Generic_Unit (The_Package : in Package_Info_Ref; The_Generic_Unit : in Generic_Unit_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic_Unit, --# The_Package; function Get_Package_Symbol (The_Package : Package_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Package = Null_Package_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Package_Symbol)); function Get_Package_Name (The_Package : Package_Info_Ref) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function Get_Package_Body (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Has_Proper_Body (The_Package : Package_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Package_Inherit_Clauses (The_Package : Package_Info_Ref) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Own_Variables (The_Package : Package_Info_Ref) return Own_Variable_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Task_List (The_Package : Package_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Package_First_Loop (The_Package : Package_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Package_Last_Loop (The_Package : Package_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; function Get_Package_Specification (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Visible_With_Clauses (The_Package : Package_Info_Ref) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Visible_Use_Type_Clauses (The_Package : Package_Info_Ref) return Use_Type_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Visible_Renaming_Declarations (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_First_Visible_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Last_Visible_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Local_With_Clauses (The_Package : Package_Info_Ref) return Context_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Local_Use_Type_Clauses (The_Package : Package_Info_Ref) return Use_Type_Clause_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Local_Renaming_Declarations (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_First_Local_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Last_Local_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_First_Private_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Last_Private_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Elaborate_Body_Found (The_Package : Package_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Package_Parent (The_Package : Package_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Next_Sibling (The_Package : Package_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Extends (The_Package : Package_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Declares_Tagged_Type (The_Package : Package_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Package_First_Private_Child (The_Package : Package_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Last_Private_Child (The_Package : Package_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; function Get_Package_First_Public_Child (The_Package : Package_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Last_Public_Child (The_Package : Package_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; function Get_Package_Is_Private (The_Package : Package_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Package_Generic_Unit (The_Package : Package_Info_Ref) return Generic_Unit_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Generic_Parameter_Info -------------------------------------------------------------------------------- function Get_Generic_Parameter_Info_Ref (Item : Dictionary.Symbol) return Generic_Parameter_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Generic_Parameter_Info_Ref); procedure Create_Generic_Parameter (Owning_Generic : in Generic_Unit_Info_Ref; Type_Mark : in Type_Info_Ref; Object : in Constant_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Generic_Parameter : out Generic_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Generic_Parameter from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Object, --# Owning_Generic, --# Type_Mark; --# post The_Generic_Parameter /= Null_Generic_Parameter_Info_Ref; procedure Set_Next_Generic_Parameter (The_Generic_Parameter, Next : in Generic_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Generic_Parameter; function Get_Generic_Parameter_Symbol (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Generic_Parameter = Null_Generic_Parameter_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Generic_Parameter_Symbol)); function Get_Generic_Parameter_Owning_Generic (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Generic_Unit_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Parameter_Kind (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Dictionary.Generic_Parameter_Kind; --# global in Dictionary.Dict; function Get_Generic_Parameter_Type (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Parameter_Object (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Constant_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Generic_Parameter (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Generic_Parameter_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Generic_Unit_Info -------------------------------------------------------------------------------- function Get_Generic_Unit_Info_Ref (Item : Dictionary.Symbol) return Generic_Unit_Info_Ref; --# global in Dictionary.Dict; --# return S => (Item = Dictionary.NullSymbol <-> S = Null_Generic_Unit_Info_Ref); procedure Create_Generic_Unit (Kind : in Dictionary.Generic_Kind; Scope : in Dictionary.Scopes; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Generic_Unit : out Generic_Unit_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Generic_Unit from Comp_Unit, --# Dictionary.Dict, --# Kind, --# Loc, --# Scope; --# post The_Generic_Unit /= Null_Generic_Unit_Info_Ref; procedure Set_Generic_Unit_First_Declaration (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Generic_Unit; procedure Set_Generic_Unit_Last_Declaration (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Declaration : in Declaration_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Declaration, --# The_Generic_Unit; procedure Set_Generic_Unit_First_Generic_Parameter (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Generic_Parameter : in Generic_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic_Parameter, --# The_Generic_Unit; procedure Set_Generic_Unit_Last_Generic_Parameter (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Generic_Parameter : in Generic_Parameter_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic_Parameter, --# The_Generic_Unit; procedure Set_Generic_Unit_Owning_Subprogram (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Subprogram : in Subprogram_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic_Unit, --# The_Subprogram; procedure Set_Generic_Unit_Owning_Package (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Package : in Package_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# The_Generic_Unit, --# The_Package; function Get_Generic_Unit_Symbol (The_Generic_Unit : Generic_Unit_Info_Ref) return Dictionary.Symbol; --# global in Dictionary.Dict; --# return S => ((S = Dictionary.NullSymbol <-> The_Generic_Unit = Null_Generic_Unit_Info_Ref) and --# (S = Dictionary.NullSymbol or GetSymbolDiscriminant (S, Dictionary.Dict) = Dictionary.Generic_Unit_Symbol)); function Get_Generic_Unit_Scope (The_Generic_Unit : Generic_Unit_Info_Ref) return Dictionary.Scopes; --# global in Dictionary.Dict; function Get_Generic_Unit_First_Declaration (The_Generic_Unit : Generic_Unit_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Unit_Last_Declaration (The_Generic_Unit : Generic_Unit_Info_Ref) return Declaration_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Unit_First_Generic_Parameter (The_Generic_Unit : Generic_Unit_Info_Ref) return Generic_Parameter_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Unit_Last_Generic_Parameter (The_Generic_Unit : Generic_Unit_Info_Ref) return Generic_Parameter_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Unit_Kind (The_Generic_Unit : Generic_Unit_Info_Ref) return Dictionary.Generic_Kind; --# global in Dictionary.Dict; function Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit : Generic_Unit_Info_Ref) return Subprogram_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Unit_Owning_Package (The_Generic_Unit : Generic_Unit_Info_Ref) return Package_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -- Generic_Association_Info -------------------------------------------------------------------------------- procedure Create_Generic_Type_Association (Formal_Type : in Type_Info_Ref; Actual_Type : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Generic_Association : out Generic_Association_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Generic_Association from Actual_Type, --# Comp_Unit, --# Dictionary.Dict, --# Formal_Type, --# Loc; --# post The_Generic_Association /= Null_Generic_Association_Info_Ref; procedure Create_Generic_Object_Association (Formal_Object : in Constant_Info_Ref; Actual_Object : in Constant_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Generic_Association : out Generic_Association_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# The_Generic_Association from Actual_Object, --# Comp_Unit, --# Dictionary.Dict, --# Formal_Object, --# Loc; --# post The_Generic_Association /= Null_Generic_Association_Info_Ref; procedure Set_Next_Generic_Association (The_Generic_Association, Next : in Generic_Association_Info_Ref); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# The_Generic_Association; function Get_Generic_Association_Is_Object (The_Generic_Association : Generic_Association_Info_Ref) return Boolean; --# global in Dictionary.Dict; function Get_Generic_Association_Formal_Type (The_Generic_Association : Generic_Association_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Association_Formal_Object (The_Generic_Association : Generic_Association_Info_Ref) return Constant_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Association_Actual_Type (The_Generic_Association : Generic_Association_Info_Ref) return Type_Info_Ref; --# global in Dictionary.Dict; function Get_Generic_Association_Actual_Object (The_Generic_Association : Generic_Association_Info_Ref) return Constant_Info_Ref; --# global in Dictionary.Dict; function Get_Next_Generic_Association (The_Generic_Association : Generic_Association_Info_Ref) return Generic_Association_Info_Ref; --# global in Dictionary.Dict; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure CreateOwnTask (Variable : in Variable_Info_Ref; Owner : in Package_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; OwnTask : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Comp_Unit, --# Loc, --# Owner, --# Variable & --# OwnTask from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure SetNextOwnTask (Current, Next : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Current, --# Next; function GetNextOwnTask (Current : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetOwnTaskVariable (OwnTask : Dictionary.Symbol) return Variable_Info_Ref; --# global in Dictionary.Dict; function GetOwnTaskOwner (OwnTask : Dictionary.Symbol) return Package_Info_Ref; --# global in Dictionary.Dict; procedure CreateImplicitProofFunction (Ada_Function : in Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; ProofFunction : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Ada_Function, --# Comp_Unit, --# Loc & --# ProofFunction from Comp_Unit, --# Dictionary.Dict, --# Loc; function GetImplicitProofFunctionAdaFunction (ProofFunction : Dictionary.Symbol) return Subprogram_Info_Ref; --# global in Dictionary.Dict; -- discriminants ----------------------------------------------------------------------- procedure CreateKnownDiscriminant (Name : in LexTokenManager.Lex_String; Protected_Type : in Type_Info_Ref; Type_Mark : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; KnownDiscriminant : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# KnownDiscriminant from Comp_Unit, --# Dictionary.Dict, --# Loc, --# Name, --# Protected_Type, --# Type_Mark; procedure SetNextDiscriminant (Current, Next : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Current, --# Next; procedure SetDiscriminantSetsPriority (Discriminant : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Discriminant; function GetNextDiscriminant (Discriminant : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetDiscriminantName (Discriminant : Dictionary.Symbol) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function GetDiscriminantTypeMark (Discriminant : Dictionary.Symbol) return Type_Info_Ref; --# global in Dictionary.Dict; function GetDiscriminantProtectedType (Discriminant : Dictionary.Symbol) return Type_Info_Ref; --# global in Dictionary.Dict; function GetDiscriminantSetsPriority (Discriminant : Dictionary.Symbol) return Boolean; --# global in Dictionary.Dict; -- subtype here means extra infor for task and protected subtypes only procedure CreateSubtype (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheSubtype : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# TheSubtype from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure SetSubtypeInfoPriority (TheSubtype : in Dictionary.Symbol; Priority : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Priority, --# TheSubtype; procedure SetSubtypeInfoFirstConstraint (TheSubtype : in Dictionary.Symbol; TheConstraint : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# TheConstraint, --# TheSubtype; procedure SetSubtypeInfoLastConstraint (TheSubtype : in Dictionary.Symbol; TheConstraint : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# TheConstraint, --# TheSubtype; function GetSubtypeInfoPriority (TheSubtype : Dictionary.Symbol) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function GetSubtypeInfoFirstConstraint (TheSubtype : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetSubtypeInfoLastConstraint (TheSubtype : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; procedure CreateDiscriminantConstraint (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheDiscriminantConstraint : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# TheDiscriminantConstraint from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure SetDiscriminantConstraintStaticValue (TheConstraint : in Dictionary.Symbol; TheValue : in LexTokenManager.Lex_String); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# TheConstraint, --# TheValue; procedure SetDiscriminantConstraintAccessedObject (TheConstraint : in Dictionary.Symbol; TheObject : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# TheConstraint, --# TheObject; procedure SetNextDiscriminantConstraint (TheConstraint : in Dictionary.Symbol; Next : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Next, --# TheConstraint; function GetDiscriminantConstraintStaticValue (TheConstraint : Dictionary.Symbol) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function GetDiscriminantConstraintAccessedObject (TheConstraint : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetNextDiscriminantConstraint (TheConstraint : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; procedure CreateInterruptStreamMapping (TheHandler : in LexTokenManager.Lex_String; TheInterruptStream : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheMapping : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Comp_Unit, --# Loc, --# TheHandler, --# TheInterruptStream & --# TheMapping from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure SetNextInterruptStreamMapping (Current, Next : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Current, --# Next; function GetNextInterruptStreamMapping (TheMapping : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetInterruptStreamMappingHandler (TheMapping : Dictionary.Symbol) return LexTokenManager.Lex_String; function GetInterruptStreamMappingStream (TheMapping : Dictionary.Symbol) return LexTokenManager.Lex_String; procedure CreateProtectedInfo (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; InfoSym : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# InfoSym from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure CreateSuspendsListItem (ThePOorSO : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; SuspendsListItem : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Comp_Unit, --# Loc, --# ThePOorSO & --# SuspendsListItem from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure SetNextSuspendsListItem (Current, Next : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Current, --# Next; function GetSuspendsListItem (SuspendsListItem : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; procedure SetSuspendsListItemIsAccountedFor (SuspendsListItem : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# SuspendsListItem; function GetSuspendsListItemIsAccountedFor (SuspendsListItem : Dictionary.Symbol) return Boolean; --# global in Dictionary.Dict; function GetNextSuspendsListItem (SuspendsListItem : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; procedure CreateVirtualElement (The_Variable : in Variable_Info_Ref; TheOwner : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheVirtualElement : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Comp_Unit, --# Loc, --# TheOwner, --# The_Variable & --# TheVirtualElement from Comp_Unit, --# Dictionary.Dict, --# Loc; function GetVirtualElementVariable (VirtualElement : Dictionary.Symbol) return Variable_Info_Ref; --# global in Dictionary.Dict; function GetVirtualElementOwner (VirtualElement : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; procedure SetVirtualElementSeenByOwner (VirtualElement : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# VirtualElement; function GetVirtualElementSeenByOwner (VirtualElement : Dictionary.Symbol) return Boolean; --# global in Dictionary.Dict; procedure SetNextVirtualElement (Current, Next : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Current, --# Next; function GetNextVirtualElement (VirtualElement : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; procedure CreateLoop (Region : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheLoop : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Comp_Unit, --# Loc, --# Region & --# TheLoop from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure SetLoopName (Name : in LexTokenManager.Lex_String; TheLoop : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Name, --# TheLoop; procedure SetLoopParameter (ForLoop, LoopParameter : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# ForLoop, --# LoopParameter; procedure SetLoopOnEntryVars (ForLoop, OnEntryVars : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# ForLoop, --# OnEntryVars; procedure SetLoopExitExpn (ForLoop : in Dictionary.Symbol; Expn : in Natural); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Expn, --# ForLoop; procedure SetLoopEntryExpn (ForLoop : in Dictionary.Symbol; Expn : in Natural); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Expn, --# ForLoop; procedure SetLoopHasExits (TheLoop : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# TheLoop; procedure SetNextLoop (Current, Next : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Current, --# Next; function GetLoopName (TheLoop : Dictionary.Symbol) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function GetLoopRegion (TheLoop : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetLoopOnEntryVars (TheLoop : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetLoopExitExpn (TheLoop : Dictionary.Symbol) return Natural; --# global in Dictionary.Dict; function GetLoopEntryExpn (TheLoop : Dictionary.Symbol) return Natural; --# global in Dictionary.Dict; function GetLoopParameter (TheLoop : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetLoopHasExits (TheLoop : Dictionary.Symbol) return Boolean; --# global in Dictionary.Dict; function GetNextLoop (TheLoop : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; procedure CreateTaskInfo (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; InfoSym : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# InfoSym from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure CreateLoopParameter (Name : in LexTokenManager.Lex_String; Type_Mark : in Type_Info_Ref; TheLoop : in Dictionary.Symbol; HasStaticRange : in Boolean; IsReverse : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; LoopParameter : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict, --# LoopParameter from Comp_Unit, --# Dictionary.Dict, --# HasStaticRange, --# IsReverse, --# Loc, --# Name, --# TheLoop, --# Type_Mark; function GetLoopParameterName (LoopParameter : Dictionary.Symbol) return LexTokenManager.Lex_String; --# global in Dictionary.Dict; function GetLoopParameterType (LoopParameter : Dictionary.Symbol) return Type_Info_Ref; --# global in Dictionary.Dict; function GetLoopParameterHasStaticRange (LoopParameter : Dictionary.Symbol) return Boolean; --# global in Dictionary.Dict; function GetLoopParameterIsReverse (LoopParameter : Dictionary.Symbol) return Boolean; --# global in Dictionary.Dict; function GetLoopParameterLoop (LoopParameter : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; procedure CreateLoopEntryVariable (OriginalVar : in Dictionary.Symbol; TheLoop : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; LoopEntryVariable : out Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Comp_Unit, --# Loc, --# OriginalVar, --# TheLoop & --# LoopEntryVariable from Comp_Unit, --# Dictionary.Dict, --# Loc; procedure SetLoopEntryVariableNext (LoopEntryVariable : in Dictionary.Symbol; Next : in Dictionary.Symbol); --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# LoopEntryVariable, --# Next; function GetLoopEntryVariableOriginalVar (LoopEntryVariable : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetLoopEntryVariableTheLoop (LoopEntryVariable : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; function GetLoopEntryVariableNext (LoopEntryVariable : Dictionary.Symbol) return Dictionary.Symbol; --# global in Dictionary.Dict; private --# hide RawDict; type Declaration_Info; type Declaration_Info_Ref is access all Declaration_Info; Null_Declaration_Info_Ref : constant Declaration_Info_Ref := null; -------------------------------------------------------------------------------- type Enumeration_Literal_Info; type Enumeration_Literal_Info_Ref is access all Enumeration_Literal_Info; Null_Enumeration_Literal_Info_Ref : constant Enumeration_Literal_Info_Ref := null; -------------------------------------------------------------------------------- type Array_Index_Info; type Array_Index_Info_Ref is access all Array_Index_Info; Null_Array_Index_Info_Ref : constant Array_Index_Info_Ref := null; -------------------------------------------------------------------------------- type Record_Component_Info; type Record_Component_Info_Ref is access all Record_Component_Info; Null_Record_Component_Info_Ref : constant Record_Component_Info_Ref := null; -------------------------------------------------------------------------------- type Subcomponent_Info; type Subcomponent_Info_Ref is access all Subcomponent_Info; Null_Subcomponent_Info_Ref : constant Subcomponent_Info_Ref := null; -------------------------------------------------------------------------------- type Type_Info; type Type_Info_Ref is access all Type_Info; Null_Type_Info_Ref : constant Type_Info_Ref := null; -------------------------------------------------------------------------------- type Constant_Info; type Constant_Info_Ref is access all Constant_Info; Null_Constant_Info_Ref : constant Constant_Info_Ref := null; -------------------------------------------------------------------------------- type Variable_Info; type Variable_Info_Ref is access all Variable_Info; Null_Variable_Info_Ref : constant Variable_Info_Ref := null; -------------------------------------------------------------------------------- type Global_Variable_Info; type Global_Variable_Info_Ref is access all Global_Variable_Info; Null_Global_Variable_Info_Ref : constant Global_Variable_Info_Ref := null; -------------------------------------------------------------------------------- type Own_Variable_Info; type Own_Variable_Info_Ref is access all Own_Variable_Info; Null_Own_Variable_Info_Ref : constant Own_Variable_Info_Ref := null; -------------------------------------------------------------------------------- type Quantified_Variable_Info; type Quantified_Variable_Info_Ref is access all Quantified_Variable_Info; Null_Quantified_Variable_Info_Ref : constant Quantified_Variable_Info_Ref := null; -------------------------------------------------------------------------------- type Implicit_Return_Variable_Info; type Implicit_Return_Variable_Info_Ref is access all Implicit_Return_Variable_Info; Null_Implicit_Return_Variable_Info_Ref : constant Implicit_Return_Variable_Info_Ref := null; -------------------------------------------------------------------------------- type Implicit_In_Stream_Info; type Implicit_In_Stream_Info_Ref is access all Implicit_In_Stream_Info; Null_Implicit_In_Stream_Info_Ref : constant Implicit_In_Stream_Info_Ref := null; -------------------------------------------------------------------------------- type Rule_Policy_Info; type Rule_Policy_Info_Ref is access all Rule_Policy_Info; Null_Rule_Policy_Info_Ref : constant Rule_Policy_Info_Ref := null; -------------------------------------------------------------------------------- type Constituent_Info; type Constituent_Info_Ref is access all Constituent_Info; Null_Constituent_Info_Ref : constant Constituent_Info_Ref := null; -------------------------------------------------------------------------------- type Context_Clause_Info; type Context_Clause_Info_Ref is access all Context_Clause_Info; Null_Context_Clause_Info_Ref : constant Context_Clause_Info_Ref := null; -------------------------------------------------------------------------------- type Use_Type_Clause_Info; type Use_Type_Clause_Info_Ref is access all Use_Type_Clause_Info; Null_Use_Type_Clause_Info_Ref : constant Use_Type_Clause_Info_Ref := null; -------------------------------------------------------------------------------- type Parameter_Constraint_Info; type Parameter_Constraint_Info_Ref is access all Parameter_Constraint_Info; Null_Parameter_Constraint_Info_Ref : constant Parameter_Constraint_Info_Ref := null; -------------------------------------------------------------------------------- type Subprogram_Parameter_Info; type Subprogram_Parameter_Info_Ref is access all Subprogram_Parameter_Info; Null_Subprogram_Parameter_Info_Ref : constant Subprogram_Parameter_Info_Ref := null; -------------------------------------------------------------------------------- type Subprogram_Info; type Subprogram_Info_Ref is access all Subprogram_Info; Null_Subprogram_Info_Ref : constant Subprogram_Info_Ref := null; -------------------------------------------------------------------------------- type Operator_Info; type Operator_Info_Ref is access all Operator_Info; Null_Operator_Info_Ref : constant Operator_Info_Ref := null; -------------------------------------------------------------------------------- type Dependency_Info; type Dependency_Info_Ref is access all Dependency_Info; Null_Dependency_Info_Ref : constant Dependency_Info_Ref := null; -------------------------------------------------------------------------------- type Package_Info; type Package_Info_Ref is access all Package_Info; Null_Package_Info_Ref : constant Package_Info_Ref := null; -------------------------------------------------------------------------------- type Generic_Parameter_Info; type Generic_Parameter_Info_Ref is access all Generic_Parameter_Info; Null_Generic_Parameter_Info_Ref : constant Generic_Parameter_Info_Ref := null; -------------------------------------------------------------------------------- type Generic_Unit_Info; type Generic_Unit_Info_Ref is access all Generic_Unit_Info; Null_Generic_Unit_Info_Ref : constant Generic_Unit_Info_Ref := null; -------------------------------------------------------------------------------- type Generic_Association_Info; type Generic_Association_Info_Ref is access all Generic_Association_Info; Null_Generic_Association_Info_Ref : constant Generic_Association_Info_Ref := null; -------------------------------------------------------------------------------- type Symbols is array (Dictionary.Abstractions) of Dictionary.Symbol; Null_Symbols : constant Symbols := Symbols'(Dictionary.Abstractions => Dictionary.NullSymbol); -------------------------------------------------------------------------------- type Booleans is array (Dictionary.Abstractions) of Boolean; type Constraints is array (Dictionary.Abstractions) of ExaminerConstants.RefType; type Global_Variables_T is array (Dictionary.Abstractions) of Global_Variable_Info_Ref; type Implicit_Return_Variables_T is array (Dictionary.Abstractions) of Implicit_Return_Variable_Info_Ref; type Dependencies_T is array (Dictionary.Abstractions) of Dependency_Info_Ref; -------------------------------------------------------------------------------- type Declaration_Info is record Self : Dictionary.Symbol; Context : Dictionary.Contexts; Scope : Dictionary.Scopes; Item : Dictionary.Symbol; Next : Declaration_Info_Ref; end record; type Enumeration_Literal_Info is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Position : LexTokenManager.Lex_String; The_Type : Type_Info_Ref; Next : Enumeration_Literal_Info_Ref; end record; type Array_Index_Info is record Self : Dictionary.Symbol; Index_Type : Type_Info_Ref; Next : Array_Index_Info_Ref; end record; type Record_Component_Info is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Record_Type : Type_Info_Ref; Component_Type : Type_Info_Ref; Inherited_Field : Boolean; -- tag Next : Record_Component_Info_Ref; end record; type Subcomponent_Info is record Self : Dictionary.Symbol; Object : Dictionary.Symbol; Record_Component : Record_Component_Info_Ref; Subcomponents : Subcomponent_Info_Ref; Marked_Valid : Boolean; Next : Subcomponent_Info_Ref; end record; type Type_Info (Is_Announcement_Or_Private : Boolean) is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Parent : Type_Info_Ref; Declaration : Declaration_Info_Ref; Is_Full_Range_Subtype : Boolean; Discriminant : Dictionary.Type_Discriminant; Is_Private : Dictionary.TriState; Is_Limited : Dictionary.TriState; Is_Limited_Private : Boolean; Is_Derived : Boolean; Is_Tagged : Boolean; Is_Own_Var_Type : Boolean; -- type can be used for abstract view of own variable Is_Atomic : Boolean; Equality_Defined : Boolean; Contains_Float : Boolean; Constrained : Boolean; Static : Boolean; Wellformed : Boolean; Base_Type : Type_Info_Ref; Extends : Type_Info_Ref; -- root type for tagged type extension Accesses : Type_Info_Ref; Lower : LexTokenManager.Lex_String; Upper : LexTokenManager.Lex_String; Modulus : LexTokenManager.Lex_String; Error_Bound : LexTokenManager.Lex_String; -- linked list of indexes for an array type, or record -- components for a record type Head : Dictionary.Symbol; Tail : Dictionary.Symbol; Component_Type : Type_Info_Ref; The_Virtual_Element_List : Dictionary.Symbol; Ancillary_Fields : Dictionary.Symbol; -- points to ProtectedInfo, TaskInfo or SubtypeInfo record Size_Attribute : LexTokenManager.Lex_String; case Is_Announcement_Or_Private is when True => Is_Announcement : Boolean; Announcement_Declaration : Declaration_Info_Ref; Private_Declaration : Declaration_Info_Ref; when False => Kind_Of_Generic : Dictionary.Generic_Type_Discriminant; end case; end record; type Constant_Info (Is_Deferred : Boolean) is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Static : Boolean; Type_Mark : Type_Info_Ref; Value : LexTokenManager.Lex_String; Exp_Is_Wellformed : Boolean; Exp_Node : ExaminerConstants.RefType; -- points back to GenericParameterInfo if const is generic object Associated_Generic_Parameter : Generic_Parameter_Info_Ref; First_Rule_Policy : Rule_Policy_Info_Ref; Last_Rule_Policy : Rule_Policy_Info_Ref; Declaration : Declaration_Info_Ref; case Is_Deferred is when True => Deferred_Declaration : Declaration_Info_Ref; when False => null; end case; end record; type Variable_Info is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Type_Mark : Type_Info_Ref; Abstract_Type_Mark : Type_Info_Ref; Initialized : Boolean; Has_Address_Clause : Boolean; Has_Pragma_Import : Boolean; Is_Aliased : Boolean; Marked_Valid : Boolean; Exp_Node : ExaminerConstants.RefType; Declaration : Declaration_Info_Ref; Global_References : Global_Variables_T; Own_Variable : Own_Variable_Info_Ref; Own_Task : Dictionary.Symbol; Virtual_Element : Dictionary.Symbol; Constituent : Constituent_Info_Ref; Subcomponents : Subcomponent_Info_Ref; end record; type Global_Variable_Info (Kind_Of_Global_Variable : Kind_Of_Global_Variable_T) is record Self : Dictionary.Symbol; Mode : Modes; Exported : Boolean; Imported : Boolean; Prefix_Needed : Boolean; Dependencies : Dependencies_T; Next_Subprogram : Global_Variable_Info_Ref; Next_Variable : Global_Variable_Info_Ref; case Kind_Of_Global_Variable is when Subprogram_Parameter_Item => Subprogram_With_Parameter : Subprogram_Info_Ref; Subprogram_Parameter : Subprogram_Parameter_Info_Ref; when Subprogram_Variable_Item => Subprogram_With_Variable : Subprogram_Info_Ref; Variable : Variable_Info_Ref; when Task_Type_Variable_Item => Task_Type : Type_Info_Ref; Task_Type_Variable : Variable_Info_Ref; end case; end record; type Own_Variable_Info is record Self : Dictionary.Symbol; Variable : Variable_Info_Ref; Owner : Dictionary.Symbol; Announced : Boolean; Typed : Boolean; Initialized : Boolean; Mode : Dictionary.Modes; Is_Protected : Boolean; Is_Interrupt_Stream : Boolean; Interrupt_Stream_Mappings : Dictionary.Symbol; Unprotected_Reference : Dictionary.Symbol; Suspends_Reference : Dictionary.Symbol; Implicit_In_Stream : Implicit_In_Stream_Info_Ref; -- only set for un-moded, protected own variables Priority : LexTokenManager.Lex_String; -- StorageRep of number Integrity : LexTokenManager.Lex_String; -- StorageRep of number Suspendable : Boolean; Interruptable : Boolean; Constituents : Constituent_Info_Ref; Next : Own_Variable_Info_Ref; end record; type Quantified_Variable_Info is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Type_Mark : Type_Info_Ref; Parameter_Constraint : Parameter_Constraint_Info_Ref; Region : Dictionary.Symbol; end record; type Implicit_Return_Variable_Info is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; The_Function : Subprogram_Info_Ref; end record; type Implicit_In_Stream_Info is record Self : Dictionary.Symbol; Own_Variable : Own_Variable_Info_Ref; -- points back to the own variable with which this stream is associated end record; type Rule_Policy_Info is record Self : Dictionary.Symbol; Scope : Dictionary.Scopes; Value : Dictionary.Rule_Policies; Next : Rule_Policy_Info_Ref; end record; type Constituent_Info is record Self : Dictionary.Symbol; Own_Variable : Own_Variable_Info_Ref; Variable : Variable_Info_Ref; Mode : Modes; Next : Constituent_Info_Ref; end record; type Context_Clause_Info (Is_Subprogram : Boolean) is record Self : Dictionary.Symbol; Explicit : Boolean; -- To allow for implicit withing of parents when child with'd Next : Context_Clause_Info_Ref; case Is_Subprogram is when False => The_Package : Package_Info_Ref; when True => The_Subprogram : Subprogram_Info_Ref; end case; end record; type Use_Type_Clause_Info is record The_Type : Type_Info_Ref; Next : Use_Type_Clause_Info_Ref; end record; -- If a subprogram parameter is unconstrained we need symbols to represent the anonymous constraints that -- are provided by the calling environment. These are Parameter_Constraint symbols and they are in a linked list -- because the unconstrained formal parameter may have more than one dimension. type Parameter_Constraint_Info is record Self : Dictionary.Symbol; Subprogram_Parameter : Subprogram_Parameter_Info_Ref; -- links back to SubprogramParameterInfo Dimension : Positive; Next : Parameter_Constraint_Info_Ref; -- links to another ParameterConstraintInfo for next dimension end record; type Subprogram_Parameter_Info is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Subprogram : Subprogram_Info_Ref; Type_Mark : Type_Info_Ref; Mode : Modes; Exported : Booleans; Imported : Booleans; Dependencies : Dependencies_T; Global_References : Global_Variables_T; Subcomponents : Subcomponent_Info_Ref; Index_Constraints : Parameter_Constraint_Info_Ref; -- links to Parameter_Constraint_Info record Next : Subprogram_Parameter_Info_Ref; end record; type Subprogram_Info is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Implicit_Proof_Functions : Symbols; Implicit_Return_Variables : Implicit_Return_Variables_T; Signature_Is_Wellformed : Booleans; Precondition : Constraints; Postcondition : Constraints; Subprogram_Body : Declaration_Info_Ref; Has_Proper_Body : Boolean; Body_Is_Hidden : Boolean; Has_Second_Annotation : Boolean; Has_Second_Constraint : Boolean; -- true for procedures or entries with no explicit derives annotation Has_Derives_Annotation : Boolean; Has_Delay_Property : Boolean; Delay_Property_Is_Accounted_For : Boolean; -- set for generic subprogram Generic_Unit : Generic_Unit_Info_Ref; -- points to generic subprogram symbol if this subprogram is an instantiation Instantiation_Of : Subprogram_Info_Ref; -- linked list of generic formal/actual pairings, only used if Instantiation_Of is not null First_Generic_Association : Generic_Association_Info_Ref; Last_Generic_Association : Generic_Association_Info_Ref; Suspends_List : Dictionary.Symbol; Is_Entry : Boolean; Entry_Barrier : Dictionary.Symbol; Is_Interrupt_Handler : Boolean; Uses_Unprotected_Variables : Boolean; Uses_Unchecked_Conversion : Boolean; Assigns_From_External : Boolean; Type_Mark : Type_Info_Ref; With_Clauses : Context_Clause_Info_Ref; Use_Type_Clauses : Use_Type_Clause_Info_Ref; Inherit_Clauses : Context_Clause_Info_Ref; First_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Last_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; First_Global_Variable : Global_Variables_T; Last_Global_Variable : Global_Variables_T; Renaming_Declarations : Declaration_Info_Ref; First_Declaration : Declaration_Info_Ref; Last_Declaration : Declaration_Info_Ref; First_Loop : Dictionary.Symbol; Last_Loop : Dictionary.Symbol; Specification : Declaration_Info_Ref; end record; type Operator_Info (Is_Binary : Boolean) is record Self : Dictionary.Symbol; Name : SP_Symbols.SP_Symbol; case Is_Binary is when False => Operand : Type_Info_Ref; when True => Left_Operand : Type_Info_Ref; Right_Operand : Type_Info_Ref; end case; end record; type Dependency_Info (Kind_Of_Dependency : Kind_Of_Dependency_T) is record Self : Dictionary.Symbol; Next : Dependency_Info_Ref; case Kind_Of_Dependency is when Dependency_Parameter_Item => Import_Parameter : Subprogram_Parameter_Info_Ref; when Dependency_Variable_Item => Import_Variable : Variable_Info_Ref; end case; end record; type Part_Info is record With_Clauses : Context_Clause_Info_Ref; Use_Type_Clauses : Use_Type_Clause_Info_Ref; Renaming_Declarations : Declaration_Info_Ref; First_Declaration : Declaration_Info_Ref; Last_Declaration : Declaration_Info_Ref; end record; type Child_Info is record First_Child : Package_Info_Ref; Last_Child : Package_Info_Ref; end record; type Package_Info is record Self : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Package_Body : Declaration_Info_Ref; Package_Has_Proper_Body : Boolean; Inherit_Clauses : Context_Clause_Info_Ref; Own_Variables : Own_Variable_Info_Ref; Task_List : Dictionary.Symbol; First_Loop : Dictionary.Symbol; Last_Loop : Dictionary.Symbol; Specification : Declaration_Info_Ref; Visible_Part : Part_Info; Local_Part : Part_Info; Private_Part : Part_Info; Elaborate_Body_Found : Boolean; Parent : Package_Info_Ref; Next_Sibling : Package_Info_Ref; Extends : Package_Info_Ref; -- type extension/tagged types Declares_Tagged_Type : Boolean; Private_Children : Child_Info; Public_Children : Child_Info; Is_Private : Boolean; Generic_Unit : Generic_Unit_Info_Ref; -- set for generic package end record; type Generic_Parameter_Info (Kind : Dictionary.Generic_Parameter_Kind) is record Self : Dictionary.Symbol; Owning_Generic : Generic_Unit_Info_Ref; Next : Generic_Parameter_Info_Ref; case Kind is when Dictionary.Generic_Type_Parameter => Type_Mark : Type_Info_Ref; when Dictionary.Generic_Object_Parameter => Object : Constant_Info_Ref; end case; end record; type Generic_Unit_Info (Kind : Dictionary.Generic_Kind) is record Self : Dictionary.Symbol; Scope : Scopes; First_Declaration : Declaration_Info_Ref; Last_Declaration : Declaration_Info_Ref; -- linked list of generic formals First_Generic_Parameter : Generic_Parameter_Info_Ref; Last_Generic_Parameter : Generic_Parameter_Info_Ref; case Kind is when Dictionary.Generic_Of_Subprogram => Owning_Subprogram : Subprogram_Info_Ref; when Dictionary.Generic_Of_Package => Owning_Package : Package_Info_Ref; end case; end record; type Generic_Association_Info (Is_Object : Boolean) is record Next : Generic_Association_Info_Ref; case Is_Object is when False => Formal_Type : Type_Info_Ref; Actual_Type : Type_Info_Ref; when True => Formal_Object : Constant_Info_Ref; Actual_Object : Constant_Info_Ref; end case; end record; end RawDict; function "=" (Left, Right : RawDict.Kind_Of_Global_Variable_T) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Declaration_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Enumeration_Literal_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Array_Index_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Record_Component_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Subcomponent_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Type_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Constant_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Variable_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Own_Variable_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Global_Variable_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Quantified_Variable_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Implicit_Return_Variable_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Implicit_In_Stream_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Rule_Policy_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Constituent_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Context_Clause_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Use_Type_Clause_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Parameter_Constraint_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Subprogram_Parameter_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Subprogram_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Operator_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Dependency_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Package_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Generic_Parameter_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Generic_Unit_Info_Ref) return Boolean renames RawDict."="; function "=" (Left, Right : RawDict.Generic_Association_Info_Ref) return Boolean renames RawDict."="; type DeclarationDiscriminant is (TypeAnnouncement, TypeDeclaration, PrivateTypeDeclaration); type ProofStatementDiscriminant is (AssertStatement, CheckStatement); type ReferenceDiscriminant is (ReadRef, WriteRef, CallRef, OtherRef); type Universal_Types is (Universal_Integer_Type, Universal_Fixed_Type, Universal_Real_Type); type Universal_Type_List is array (Universal_Types) of RawDict.Type_Info_Ref; type Predefined_Types is ( Predefined_Boolean_Type, Predefined_Integer_Type, Predefined_Long_Integer_Type, Predefined_Float_Type, Predefined_Long_Float_Type, Predefined_Character_Type, Predefined_String_Type, Predefined_Natural_Subtype, Predefined_Positive_Subtype, Predefined_Duration_Type, Predefined_Time_Type, Predefined_Time_Span_Type, Predefined_Suspension_Object_Type); type Predefined_Type_List is array (Predefined_Types) of RawDict.Type_Info_Ref; type Predefined_Literals is (Predefined_False, Predefined_True); type Predefined_Literal_List is array (Predefined_Literals) of RawDict.Enumeration_Literal_Info_Ref; type Type_Set is record Unknown_Type_Mark : RawDict.Type_Info_Ref; The_Universal_Types : Universal_Type_List; The_Predefined_Types : Predefined_Type_List; The_Predefined_Literals : Predefined_Literal_List; end record; type Main_Program_Set is record Subprogram : RawDict.Subprogram_Info_Ref; Priority_Given : Boolean; The_Priority : LexTokenManager.Lex_String; end record; ------------------------------------------------------------------ -- Symbols for the predefined packages. In SPARK83 mode, -- these are set to NullSymbol where such packages do not -- exist. -- -- Note - package System is NOT included here, since some -- older SPARK83 projects supply it explicitly as a shadow -- specification which gets read in via the normal index file -- mechanism. More recent projects specify System in the config -- file. In that case, System is added in configfile.adb ------------------------------------------------------------------ type Package_Set is record Package_Standard : RawDict.Package_Info_Ref; -- All Package_ASCII : RawDict.Package_Info_Ref; -- All (but considered obsolete in SPARK95) Package_Ada : RawDict.Package_Info_Ref; -- SPARK95 and RavenSPARK only Package_Ada_Characters : RawDict.Package_Info_Ref; -- SPARK95 nad RavenSPARK only Package_Ada_Characters_Latin1 : RawDict.Package_Info_Ref; -- SPARK95 nad RavenSPARK only Package_Real_Time : RawDict.Package_Info_Ref; -- RavenSPARK only Package_Synchronous_Task_Control : RawDict.Package_Info_Ref; -- RavenSPARK only Package_Interrupts : RawDict.Package_Info_Ref; -- RavenSPARK only end record; type Subprogram_Set is record STC_Suspend_Until_True : RawDict.Subprogram_Info_Ref; -- STC: Synchronous Task Control Ada_Real_Time_Clock : RawDict.Subprogram_Info_Ref; Unchecked_Conversion : RawDict.Subprogram_Info_Ref; end record; type Dictionaries is record File_Name : E_Strings.T; TemporaryFile : SPARK_IO.File_Type; Symbols : Dynamic_Symbol_Table.T; Types : Type_Set; Main : Main_Program_Set; Packages : Package_Set; Subprograms : Subprogram_Set; Null_Variable : RawDict.Variable_Info_Ref; -- an own variable of package standard used as "data sink" The_Partition : RawDict.Subprogram_Info_Ref; -- a subprogram record used to store the partition-wide flow relation end record; -------------------------------------------------------------------------------- -- VARIABLES -- -------------------------------------------------------------------------------- Dict : Dictionaries; ------------------------------------------------------------------------ --# inherit CommandLineData, --# CommandLineHandler, --# Dictionary, --# E_Strings, --# FileSystem, --# LexTokenManager, --# Maths, --# RawDict, --# ScreenEcho, --# SPARK_IO, --# SystemErrors, --# XMLReport; package TargetData is procedure Read_Target_Data_File; --# global in CommandLineData.Content; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys; procedure Output_Target_Data_File (To_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives LexTokenManager.State, --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# To_File, --# XMLReport.State; end TargetData; --------------------------------------------------------------------------- --# inherit CommandLineData, --# Dictionary, --# RawDict, --# SP_Symbols, --# SystemErrors; package Operator_Is_Visible is function Unary_Operator_Is_Visible (Name : SP_Symbols.SP_Symbol; Type_Mark : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean; --# global in CommandLineData.Content; --# in Dictionary.Dict; function Binary_Operator_Is_Visible (Name : SP_Symbols.SP_Symbol; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean; --# global in CommandLineData.Content; --# in Dictionary.Dict; end Operator_Is_Visible; -------------------------------------------------------------------------------- package body Dynamic_Symbol_Table is separate; package body RawDict is separate; -------------------------------------------------------------------------------- -- SYMBOLS and ORDERING -- -------------------------------------------------------------------------------- function Declared_Before (Left, Right : Symbol) return Boolean is begin return Left < Right; end Declared_Before; -------------------------------------------------------------------------------- -- SUBPROGRAMS -- -------------------------------------------------------------------------------- function Is_Null_Symbol (TheSymbol : Symbol) return Boolean is begin return TheSymbol = NullSymbol; end Is_Null_Symbol; -------------------------------------------------------------------------------- function IsDeclaration (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Declaration_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Declaration_Symbol; end IsDeclaration; -------------------------------------------------------------------------------- function IsEnumerationLiteral (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Enumeration_Literal_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Enumeration_Literal_Symbol; end IsEnumerationLiteral; -------------------------------------------------------------------------------- function IsArrayIndex (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Array_Index_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Array_Index_Symbol; end IsArrayIndex; -------------------------------------------------------------------------------- function IsSubcomponent (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Subcomponent_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Subcomponent_Symbol; end IsSubcomponent; -------------------------------------------------------------------------------- function IsTypeMark (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Type_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol; end IsTypeMark; -------------------------------------------------------------------------------- function IsConstant (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Constant_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Constant_Symbol; end IsConstant; -------------------------------------------------------------------------------- function IsVariable (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Variable_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Variable_Symbol; end IsVariable; -------------------------------------------------------------------------------- function IsGlobalVariable (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Global_Variable_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Global_Variable_Symbol; end IsGlobalVariable; -------------------------------------------------------------------------------- function IsQuantifiedVariable (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Quantified_Variable_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Quantified_Variable_Symbol; end IsQuantifiedVariable; -------------------------------------------------------------------------------- function IsImplicitReturnVariable (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Implicit_Return_Variable_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Implicit_Return_Variable_Symbol; end IsImplicitReturnVariable; -------------------------------------------------------------------------------- function IsImplicitInStream (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Implicit_In_Stream_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Implicit_In_Stream_Symbol; end IsImplicitInStream; -------------------------------------------------------------------------------- function IsRulePolicy (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Rule_Policy_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Rule_Policy_Symbol; end IsRulePolicy; -------------------------------------------------------------------------------- function IsParameterConstraint (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Parameter_Constraint_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Parameter_Constraint_Symbol; end IsParameterConstraint; -------------------------------------------------------------------------------- function IsSubprogramParameter (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Subprogram_Parameter_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Subprogram_Parameter_Symbol; end IsSubprogramParameter; -------------------------------------------------------------------------------- function IsSubprogram (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Subprogram_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Subprogram_Symbol; end IsSubprogram; -------------------------------------------------------------------------------- function IsOperator (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Operator_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Operator_Symbol; end IsOperator; -------------------------------------------------------------------------------- function IsDependency (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Dependency_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Dependency_Symbol; end IsDependency; -------------------------------------------------------------------------------- function IsPackage (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Package_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Package_Symbol; end IsPackage; -------------------------------------------------------------------------------- function IsGenericParameterSymbol (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Generic_Parameter_Symbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Generic_Parameter_Symbol; end IsGenericParameterSymbol; -------------------------------------------------------------------------------- function IsKnownDiscriminant (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = KnownDiscriminantSymbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = KnownDiscriminantSymbol; end IsKnownDiscriminant; -------------------------------------------------------------------------------- function IsLoop (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = LoopSymbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = LoopSymbol; end IsLoop; -------------------------------------------------------------------------------- function IsImplicitProofFunction (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = ImplicitProofFunctionSymbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = ImplicitProofFunctionSymbol; end IsImplicitProofFunction; -------------------------------------------------------------------------------- function IsLoopParameter (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = LoopParameterSymbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = LoopParameterSymbol; end IsLoopParameter; -------------------------------------------------------------------------------- -- Are_Equal -------------------------------------------------------------------------------- function Enumeration_Literals_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean is begin SystemErrors.RT_Assert (C => (Left_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Left_Symbol) = Enumeration_Literal_Symbol) and then (Right_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Right_Symbol) = Enumeration_Literal_Symbol), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Enumeration_Literals_Are_Equal"); --# accept Flow, 35, Dict, "Importation of the initial value is ineffective OK" & --# Flow, 50, Dict, "Value is not derived from the imported value OK"; return Left_Symbol = Right_Symbol; end Enumeration_Literals_Are_Equal; -------------------------------------------------------------------------------- function Record_Components_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean is begin SystemErrors.RT_Assert (C => (Left_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Left_Symbol) = Record_Component_Symbol) and then (Right_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Right_Symbol) = Record_Component_Symbol), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Record_Components_Are_Equal"); --# accept Flow, 35, Dict, "Importation of the initial value is ineffective OK" & --# Flow, 50, Dict, "Value is not derived from the imported value OK"; return Left_Symbol = Right_Symbol; end Record_Components_Are_Equal; -------------------------------------------------------------------------------- function Types_Are_Equal (Left_Symbol, Right_Symbol : Symbol; Full_Range_Subtype : Boolean) return Boolean is Result : Boolean; begin if Full_Range_Subtype then Result := RawDict.Get_First_Constrained_Subtype (Type_Mark => RawDict.Get_Type_Info_Ref (Left_Symbol)) = -- GAA External RawDict.Get_First_Constrained_Subtype (Type_Mark => RawDict.Get_Type_Info_Ref (Right_Symbol)); -- GAA External else SystemErrors.RT_Assert (C => (Left_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Left_Symbol) = Type_Symbol) and then (Right_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Right_Symbol) = Type_Symbol), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Types_Are_Equal"); Result := Left_Symbol = Right_Symbol; end if; return Result; end Types_Are_Equal; -------------------------------------------------------------------------------- function Variables_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean is begin SystemErrors.RT_Assert (C => (Left_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Left_Symbol) = Variable_Symbol) and then (Right_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Right_Symbol) = Variable_Symbol), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Variables_Are_Equal"); --# accept Flow, 35, Dict, "Importation of the initial value is ineffective OK" & --# Flow, 50, Dict, "Value is not derived from the imported value OK"; return Left_Symbol = Right_Symbol; end Variables_Are_Equal; -------------------------------------------------------------------------------- function Implicit_Return_Variables_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean is begin SystemErrors.RT_Assert (C => (Left_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Left_Symbol) = Implicit_Return_Variable_Symbol) and then (Right_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Right_Symbol) = Implicit_Return_Variable_Symbol), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Implicit_Return_Variables_Are_Equal"); --# accept Flow, 35, Dict, "Importation of the initial value is ineffective OK" & --# Flow, 50, Dict, "Value is not derived from the imported value OK"; return Left_Symbol = Right_Symbol; end Implicit_Return_Variables_Are_Equal; -------------------------------------------------------------------------------- function Subprograms_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean is begin SystemErrors.RT_Assert (C => (Left_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Left_Symbol) = Subprogram_Symbol) and then (Right_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Right_Symbol) = Subprogram_Symbol), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Subprograms_Are_Equal"); --# accept Flow, 35, Dict, "Importation of the initial value is ineffective OK" & --# Flow, 50, Dict, "Value is not derived from the imported value OK"; return Left_Symbol = Right_Symbol; end Subprograms_Are_Equal; -------------------------------------------------------------------------------- function Packages_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean is begin SystemErrors.RT_Assert (C => (Left_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Left_Symbol) = Package_Symbol) and then (Right_Symbol = NullSymbol or else RawDict.GetSymbolDiscriminant (Right_Symbol) = Package_Symbol), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Packages_Are_Equal"); --# accept Flow, 35, Dict, "Importation of the initial value is ineffective OK" & --# Flow, 50, Dict, "Value is not derived from the imported value OK"; return Left_Symbol = Right_Symbol; end Packages_Are_Equal; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Set_Current_File_Name (File_Name : in E_Strings.T) is begin if CommandLineData.Content.Plain_Output then Dict.File_Name := FileSystem.Base_Name (Path => File_Name, Suffix => ""); else Dict.File_Name := File_Name; end if; end Set_Current_File_Name; -------------------------------------------------------------------------------- function GetRegion (Scope : Scopes) return Symbol is begin return Scope.The_Unit; end GetRegion; -------------------------------------------------------------------------------- -- Get_Unknown_Type_Mark -------------------------------------------------------------------------------- function Get_Unknown_Type_Mark return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.Unknown_Type_Mark; end Get_Unknown_Type_Mark; -------------------------------------------------------------------------------- function GetUnknownTypeMark return Symbol is begin return RawDict.Get_Type_Symbol (Get_Unknown_Type_Mark); -- GAA External end GetUnknownTypeMark; -------------------------------------------------------------------------------- function IsUnknownTypeMark (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Unknown_Type_Mark; -- GAA External end IsUnknownTypeMark; ------------------------------------------------------------------------------ -- Is_Subtype ------------------------------------------------------------------------------ function Is_Subtype (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Parent (Type_Mark => Type_Mark) /= RawDict.Null_Type_Info_Ref; end Is_Subtype; -------------------------------------------------------------------------------- function IsSubtype (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Subtype (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External end IsSubtype; -------------------------------------------------------------------------------- -- Is_Type -------------------------------------------------------------------------------- function Is_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global Dict; is begin return Type_Mark /= Get_Unknown_Type_Mark and then not Is_Subtype (Type_Mark => Type_Mark); end Is_Type; -------------------------------------------------------------------------------- function IsType (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External end IsType; -------------------------------------------------------------------------------- -- Is_Task_Type -------------------------------------------------------------------------------- function Is_Task_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Task_Type_Item; end Is_Task_Type; -------------------------------------------------------------------------------- function IsTaskType (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External end IsTaskType; -------------------------------------------------------------------------------- -- Is_Protected_Type -------------------------------------------------------------------------------- function Is_Protected_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Protected_Type_Item; end Is_Protected_Type; -------------------------------------------------------------------------------- function IsProtectedType (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External end IsProtectedType; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsProtectedTypeMark (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)) = Protected_Type_Item; -- GAA External end IsProtectedTypeMark; -------------------------------------------------------------------------------- function Set_Visibility (The_Visibility : Visibility; The_Unit : Symbol) return Scopes is begin return Scopes'(The_Visibility => The_Visibility, The_Unit => The_Unit); end Set_Visibility; -------------------------------------------------------------------------------- function Get_Visibility (Scope : Scopes) return Visibility is begin return Scope.The_Visibility; end Get_Visibility; -------------------------------------------------------------------------------- procedure Add_Declaration (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in Location; Scope : in Scopes; Context : in Contexts; The_Declaration : out RawDict.Declaration_Info_Ref) --# global in out Dict; --# derives Dict, --# The_Declaration from Comp_Unit, --# Context, --# Dict, --# Loc, --# Scope; --# post The_Declaration /= RawDict.Null_Declaration_Info_Ref; is separate; -------------------------------------------------------------------------------- -- Type_Is_Private -------------------------------------------------------------------------------- function Type_Is_Private (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Private_Type_Declaration (Type_Mark => RawDict.Get_First_Constrained_Subtype (Type_Mark => Type_Mark)) /= RawDict.Null_Declaration_Info_Ref; end Type_Is_Private; -------------------------------------------------------------------------------- function TypeIsPrivate (TheType : Symbol) return Boolean is begin return Type_Is_Private (Type_Mark => RawDict.Get_Type_Info_Ref (TheType)); -- GAA External end TypeIsPrivate; -------------------------------------------------------------------------------- -- Type_Is_Tagged -------------------------------------------------------------------------------- function Type_Is_Tagged (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Is_Tagged (Type_Mark => Type_Mark) or else RawDict.Get_Type_Extends (Type_Mark => Type_Mark) /= RawDict.Null_Type_Info_Ref; -- derived type of tagged type also tagged end Type_Is_Tagged; -------------------------------------------------------------------------------- function TypeIsTagged (TheType : Symbol) return Boolean is begin return Type_Is_Tagged (Type_Mark => RawDict.Get_Type_Info_Ref (TheType)); -- GAA External end TypeIsTagged; -------------------------------------------------------------------------------- -- Type_Is_Extended_Tagged -------------------------------------------------------------------------------- function Type_Is_Extended_Tagged (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Extends (Type_Mark => Type_Mark) /= RawDict.Null_Type_Info_Ref; end Type_Is_Extended_Tagged; -------------------------------------------------------------------------------- function TypeIsExtendedTagged (TheType : Symbol) return Boolean is begin return Type_Is_Extended_Tagged (Type_Mark => RawDict.Get_Type_Info_Ref (TheType)); -- GAA External end TypeIsExtendedTagged; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function TypeIsTask (TheType : Symbol) return Boolean is begin return RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (TheType)) = Task_Type_Item; -- GAA External end TypeIsTask; -------------------------------------------------------------------------------- function TypeIsAccess (TheType : Symbol) return Boolean is begin return RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (TheType)) = Access_Type_Item; -- GAA External end TypeIsAccess; -------------------------------------------------------------------------------- -- Is_An_Extension_Of -------------------------------------------------------------------------------- function Is_An_Extension_Of (Root_Type, Extended_Type : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is Result : Boolean := False; Extended_Local : RawDict.Type_Info_Ref; begin if Type_Is_Tagged (Type_Mark => Root_Type) and then -- to trap null symbol Type_Is_Extended_Tagged (Type_Mark => Extended_Type) then Extended_Local := Extended_Type; loop Result := Root_Type = Extended_Local; exit when Result; Extended_Local := RawDict.Get_Type_Extends (Type_Mark => Extended_Local); exit when Extended_Local = RawDict.Null_Type_Info_Ref; end loop; end if; return Result; end Is_An_Extension_Of; -------------------------------------------------------------------------------- function IsAnExtensionOf (RootType, ExtendedType : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (ExtendedType) = Type_Symbol and then Is_An_Extension_Of (Root_Type => RawDict.Get_Type_Info_Ref (RootType), -- GAA External Extended_Type => RawDict.Get_Type_Info_Ref (Item => ExtendedType)); -- GAA External end IsAnExtensionOf; -------------------------------------------------------------------------------- -- TypeIsAnnounced -------------------------------------------------------------------------------- function TypeIsAnnounced (TheType : Symbol) return Boolean is function Type_Is_Announced (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Announcement (Type_Mark => Type_Mark) /= RawDict.Null_Declaration_Info_Ref; end Type_Is_Announced; begin -- TypeIsAnnounced return Type_Is_Announced (Type_Mark => RawDict.Get_Type_Info_Ref (TheType)); -- GAA External end TypeIsAnnounced; -------------------------------------------------------------------------------- -- Constant_Is_Deferred -------------------------------------------------------------------------------- function Constant_Is_Deferred (The_Constant : RawDict.Constant_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Constant_Deferred_Declaration (The_Constant => The_Constant) /= RawDict.Null_Declaration_Info_Ref; end Constant_Is_Deferred; -------------------------------------------------------------------------------- function IsDeferredConstant (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Constant_Symbol and then Constant_Is_Deferred (The_Constant => RawDict.Get_Constant_Info_Ref (Item => TheSymbol)); -- GAA External end IsDeferredConstant; -------------------------------------------------------------------------------- -- Get_Declaration -------------------------------------------------------------------------------- function Get_Type_Declaration (Type_Mark : RawDict.Type_Info_Ref) return RawDict.Declaration_Info_Ref --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; begin -- for a protected or task type we want to return the Ada declaration if available if (RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Protected_Type_Item or else RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Task_Type_Item) and then RawDict.Get_Type_Declaration (Type_Mark => Type_Mark) /= RawDict.Null_Declaration_Info_Ref then The_Declaration := RawDict.Get_Type_Declaration (Type_Mark => Type_Mark); -- if not one of these types, or if there is not Ada declaration, -- return type announcement if available elsif RawDict.Get_Type_Announcement (Type_Mark => Type_Mark) /= RawDict.Null_Declaration_Info_Ref and then RawDict.Get_Type_Declaration (Type_Mark => Type_Mark) = RawDict.Null_Declaration_Info_Ref then -- we have a type announcement but no Ada declaration The_Declaration := RawDict.Get_Type_Announcement (Type_Mark => Type_Mark); -- or proceed to check for private/visible Ada declarations elsif RawDict.Get_Type_Private_Type_Declaration (Type_Mark => Type_Mark) /= RawDict.Null_Declaration_Info_Ref then The_Declaration := RawDict.Get_Type_Private_Type_Declaration (Type_Mark => Type_Mark); else -- this case now also handles type that is announced _and_ declared The_Declaration := RawDict.Get_Type_Declaration (Type_Mark => Type_Mark); end if; return The_Declaration; end Get_Type_Declaration; -------------------------------------------------------------------------------- function Get_Constant_Declaration (The_Constant : RawDict.Constant_Info_Ref) return RawDict.Declaration_Info_Ref --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; begin if Constant_Is_Deferred (The_Constant => The_Constant) then The_Declaration := RawDict.Get_Constant_Deferred_Declaration (The_Constant => The_Constant); else The_Declaration := RawDict.Get_Constant_Declaration (The_Constant => The_Constant); end if; return The_Declaration; end Get_Constant_Declaration; -------------------------------------------------------------------------------- function SubprogramSignatureIsWellformed (Abstraction : Abstractions; Subprogram : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => Result := RawDict.Get_Subprogram_Signature_Is_Wellformed (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.SubprogramSignatureIsWellformed"); Result := RawDict.Get_Task_Type_Signature_Is_Wellformed (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.SubprogramSignatureIsWellformed"); end case; return Result; end SubprogramSignatureIsWellformed; -------------------------------------------------------------------------------- function GetAdaFunction (ProofFunction : Symbol) return Symbol is begin return RawDict.Get_Subprogram_Symbol (RawDict.GetImplicitProofFunctionAdaFunction (ProofFunction)); -- GAA External end GetAdaFunction; -------------------------------------------------------------------------------- function GetImplicitProofFunction (Abstraction : Abstractions; TheFunction : Symbol) return Symbol is begin return RawDict.Get_Subprogram_Implicit_Proof_Function (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (TheFunction), -- GAA External Abstraction => Abstraction); end GetImplicitProofFunction; -------------------------------------------------------------------------------- -- Has_Implicit_Return_Variable -------------------------------------------------------------------------------- function HasImplicitReturnVariable (Abstraction : Abstractions; TheFunction : Symbol) return Boolean is function Has_Implicit_Return_Variable (The_Function : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions) return Boolean --# global in Dict; is begin return RawDict.Get_Subprogram_Implicit_Return_Variable (The_Subprogram => The_Function, Abstraction => Abstraction) /= RawDict.Null_Implicit_Return_Variable_Info_Ref; end Has_Implicit_Return_Variable; begin -- HasImplicitReturnVariable return Has_Implicit_Return_Variable (The_Function => RawDict.Get_Subprogram_Info_Ref (TheFunction), -- GAA External Abstraction => Abstraction); end HasImplicitReturnVariable; -------------------------------------------------------------------------------- -- Get_Implicit_Return_Variable -------------------------------------------------------------------------------- function GetImplicitReturnVariable (Abstraction : Abstractions; TheFunction : Symbol) return Symbol is function Get_Implicit_Return_Variable (The_Function : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions) return RawDict.Implicit_Return_Variable_Info_Ref --# global in Dict; is Result : RawDict.Implicit_Return_Variable_Info_Ref; begin Result := RawDict.Get_Subprogram_Implicit_Return_Variable (The_Subprogram => The_Function, Abstraction => Abstraction); if Result = RawDict.Null_Implicit_Return_Variable_Info_Ref and then RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => The_Function) /= RawDict.Null_Subprogram_Info_Ref and then RawDict.Get_Subprogram_Postcondition (The_Subprogram => The_Function, Abstraction => Abstraction) = 0 then Result := RawDict.Get_Subprogram_Implicit_Return_Variable (The_Subprogram => RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => The_Function), Abstraction => Abstraction); end if; return Result; end Get_Implicit_Return_Variable; begin -- GetImplicitReturnVariable return RawDict.Get_Implicit_Return_Variable_Symbol -- GAA External (Get_Implicit_Return_Variable (The_Function => RawDict.Get_Subprogram_Info_Ref (TheFunction), -- GAA External Abstraction => Abstraction)); end GetImplicitReturnVariable; -------------------------------------------------------------------------------- -- Get_Type -------------------------------------------------------------------------------- function Get_Subcomponent_Type (The_Subcomponent : RawDict.Subcomponent_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is begin return RawDict.Get_Record_Component_Type (The_Record_Component => RawDict.Get_Subcomponent_Record_Component (The_Subcomponent => The_Subcomponent)); end Get_Subcomponent_Type; -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Type (The_Parameter_Constraint : RawDict.Parameter_Constraint_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is The_Dimension : Positive; The_Array_Index : RawDict.Array_Index_Info_Ref; begin -- Parameter constraint symbols are only used to communicate information about -- unconstrained parameters bewteen the WFFs and te VCG. As such they don't really -- have a type other than for FDL declaration purposes where they behave as if they -- were subtypes. For this purpose it is enough to return the type of matching index of the -- uncontrained type of teh obhject associated with the constraint symbol. -- So if parameter X is an unconstrained array of type T then the type of X__index__subtype__2 is -- the type of the 2nd dimension of T. -- The rather cumbersome use of RawDict calls here is caused by ordering problems between Get_Type and the -- higher-level Dictionary iterator calls that could otherwise be used. The_Dimension := RawDict.Get_Parameter_Constraint_Dimension (The_Parameter_Constraint => The_Parameter_Constraint); -- Follow linked list of indexes The_Array_Index := RawDict.Get_Type_First_Array_Index (Type_Mark => RawDict.Get_Subprogram_Parameter_Type (The_Subprogram_Parameter => RawDict.Get_Parameter_Constraint_Subprogram_Parameter (The_Parameter_Constraint => The_Parameter_Constraint))); while The_Dimension > 1 loop The_Array_Index := RawDict.Get_Next_Array_Index (The_Array_Index => The_Array_Index); The_Dimension := The_Dimension - 1; end loop; return RawDict.Get_Array_Index_Type (The_Array_Index => The_Array_Index); end Get_Parameter_Constraint_Type; function Get_Type (The_Symbol : Symbol) return RawDict.Type_Info_Ref --# global in Dict; is Result : RawDict.Type_Info_Ref; Local_Symbol : Symbol; -------------------------------------------------------------------------------- function Get_Implicit_Return_Variable_Type (The_Implicit_Return_Variable : RawDict.Implicit_Return_Variable_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is begin return RawDict.Get_Subprogram_Return_Type (The_Subprogram => RawDict.Get_Implicit_Return_Variable_Function (The_Implicit_Return_Variable => The_Implicit_Return_Variable)); end Get_Implicit_Return_Variable_Type; -------------------------------------------------------------------------------- function Get_Generic_Parameter_Type (The_Generic_Parameter : RawDict.Generic_Parameter_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is Result : RawDict.Type_Info_Ref; begin case RawDict.Get_Generic_Parameter_Kind (The_Generic_Parameter => The_Generic_Parameter) is when Generic_Type_Parameter => -- if its a type parameter then this is enough, we have the type mark Result := RawDict.Get_Generic_Parameter_Type (The_Generic_Parameter => The_Generic_Parameter); when Generic_Object_Parameter => -- its an object so above gave us a constant and next line gets its type Result := RawDict.Get_Constant_Type (The_Constant => RawDict.Get_Generic_Parameter_Object (The_Generic_Parameter => The_Generic_Parameter)); end case; return Result; end Get_Generic_Parameter_Type; begin -- Get_Type -- If the symbol is a special variable used to store values on entry to a for loop -- then we "dereference" it back to the original variable before seeking the type if RawDict.GetSymbolDiscriminant (The_Symbol) = LoopEntryVariableSymbol then Local_Symbol := RawDict.GetLoopEntryVariableOriginalVar (The_Symbol); else Local_Symbol := The_Symbol; end if; case RawDict.GetSymbolDiscriminant (Local_Symbol) is when Type_Symbol => Result := RawDict.Get_Type_Parent (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Local_Symbol)); when Enumeration_Literal_Symbol => Result := RawDict.Get_Enumeration_Literal_Type (The_Enumeration_Literal => RawDict.Get_Enumeration_Literal_Info_Ref (Item => Local_Symbol)); when Record_Component_Symbol => Result := RawDict.Get_Record_Component_Type (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (Item => Local_Symbol)); when Subcomponent_Symbol => Result := Get_Subcomponent_Type (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Local_Symbol)); when Variable_Symbol => Result := RawDict.Get_Variable_Type (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Local_Symbol)); when Quantified_Variable_Symbol => Result := RawDict.Get_Quantified_Variable_Type (The_Quantified_Variable => RawDict.Get_Quantified_Variable_Info_Ref (Item => Local_Symbol)); when Implicit_Return_Variable_Symbol => Result := Get_Implicit_Return_Variable_Type (The_Implicit_Return_Variable => RawDict.Get_Implicit_Return_Variable_Info_Ref (Item => Local_Symbol)); when Constant_Symbol => Result := RawDict.Get_Constant_Type (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Local_Symbol)); when Subprogram_Symbol => Result := RawDict.Get_Subprogram_Return_Type (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Local_Symbol)); when ImplicitProofFunctionSymbol => Result := RawDict.Get_Subprogram_Return_Type (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Local_Symbol)); when Subprogram_Parameter_Symbol => Result := RawDict.Get_Subprogram_Parameter_Type (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Local_Symbol)); when Parameter_Constraint_Symbol => Result := Get_Parameter_Constraint_Type (The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (Item => Local_Symbol)); when Generic_Parameter_Symbol => Result := Get_Generic_Parameter_Type (The_Generic_Parameter => RawDict.Get_Generic_Parameter_Info_Ref (Item => Local_Symbol)); when LoopParameterSymbol => Result := RawDict.GetLoopParameterType (Local_Symbol); when KnownDiscriminantSymbol => Result := RawDict.GetDiscriminantTypeMark (Local_Symbol); when others => -- non-exec code Result := RawDict.Null_Type_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Get_Type"); end case; return Result; end Get_Type; ------------------------------------------------------------------------------ function GetType (TheSymbol : Symbol) return Symbol is begin return RawDict.Get_Type_Symbol (Get_Type (The_Symbol => TheSymbol)); -- GAA External end GetType; -------------------------------------------------------------------------------- -- Get_Root_Type -------------------------------------------------------------------------------- function Get_Root_Type (Type_Mark : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is Current : RawDict.Type_Info_Ref; begin Current := Type_Mark; -- loop through possible chain of subtype to get actual type while Current /= Get_Unknown_Type_Mark and then Is_Subtype (Type_Mark => Current) loop Current := RawDict.Get_Type_Parent (Type_Mark => Current); end loop; return Current; end Get_Root_Type; -------------------------------------------------------------------------------- function GetRootType (TypeMark : Symbol) return Symbol is Current : RawDict.Type_Info_Ref; begin case RawDict.GetSymbolDiscriminant (TypeMark) is when Parameter_Constraint_Symbol => -- synthetic index type of unconst array params, first get type of the constraint Current := Get_Parameter_Constraint_Type (The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (Item => TypeMark)); -- GAA External when Type_Symbol => Current := RawDict.Get_Type_Info_Ref (Item => TypeMark); -- GAA External when others => Current := RawDict.Null_Type_Info_Ref; end case; return RawDict.Get_Type_Symbol (Get_Root_Type (Type_Mark => Current)); -- GAA External end GetRootType; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Is_Variable (TheSymbol : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (TheSymbol) is when Variable_Symbol | LoopEntryVariableSymbol | Quantified_Variable_Symbol | Implicit_Return_Variable_Symbol | Subprogram_Parameter_Symbol | LoopParameterSymbol => Result := True; when others => Result := False; end case; return Result; end Is_Variable; -------------------------------------------------------------------------------- -- Type_Is_Atomic -------------------------------------------------------------------------------- function Type_Is_Atomic (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return -- If this is a subtype then check the base type (Is_Subtype (Type_Mark => Type_Mark) and then RawDict.Get_Type_Atomic (Type_Mark => Get_Root_Type (Type_Mark => Type_Mark))) or else RawDict.Get_Type_Atomic (Type_Mark => Type_Mark); -- Otherwise just check the type itself end Type_Is_Atomic; ------------------------------------------------------------------------------ function IsAtomic (TheSymbol : Symbol) return Boolean is begin return (Is_Variable (TheSymbol) and then Type_Is_Atomic (Get_Type (The_Symbol => TheSymbol))) or else (RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Type_Is_Atomic (RawDict.Get_Type_Info_Ref (Item => TheSymbol))); end IsAtomic; ------------------------------------------------------------------------------ -- Is_Predefined_Suspension_Object_Type ------------------------------------------------------------------------------ function Is_Predefined_Suspension_Object_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in CommandLineData.Content; --# in Dict; is begin return CommandLineData.Ravenscar_Selected and then Type_Mark = Dict.Types.The_Predefined_Types (Predefined_Suspension_Object_Type); end Is_Predefined_Suspension_Object_Type; -------------------------------------------------------------------------------- function IsPredefinedSuspensionObjectType (TheSymbol : Symbol) return Boolean is begin return Is_Predefined_Suspension_Object_Type (Type_Mark => RawDict.Get_Type_Info_Ref (TheSymbol)); -- GAA External end IsPredefinedSuspensionObjectType; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsPragmaAtomic (TheSymbol : Symbol) return Boolean is begin return IsAtomic (TheSymbol) and then not (RawDict.Get_Type_Discriminant (Type_Mark => Get_Type (The_Symbol => TheSymbol)) = Protected_Type_Item) and then not Is_Predefined_Suspension_Object_Type (Type_Mark => Get_Type (The_Symbol => TheSymbol)); end IsPragmaAtomic; ------------------------------------------------------------------------------ function Get_Unconstrained_Array_Index (TheSymbol : Symbol) return Symbol is begin return RawDict.Get_Parameter_Constraint_Symbol -- GAA External (RawDict.Get_Quantified_Variable_Constraint (The_Quantified_Variable => RawDict.Get_Quantified_Variable_Info_Ref (TheSymbol))); -- GAA External end Get_Unconstrained_Array_Index; ------------------------------------------------------------------------------ function GetAccess (TheProtectedType : Symbol) return Symbol is begin return RawDict.Get_Type_Symbol -- GAA External (RawDict.Get_Type_Accesses (Type_Mark => RawDict.Get_Type_Info_Ref (TheProtectedType))); -- GAA External end GetAccess; ------------------------------------------------------------------------------ -- Get_Context ------------------------------------------------------------------------------ function Get_Type_Context (Type_Mark : RawDict.Type_Info_Ref) return Contexts --# global in Dict; is begin return RawDict.Get_Declaration_Context (The_Declaration => Get_Type_Declaration (Type_Mark => Type_Mark)); end Get_Type_Context; ------------------------------------------------------------------------------ function Get_Constant_Context (The_Constant : RawDict.Constant_Info_Ref) return Contexts --# global in Dict; is begin return RawDict.Get_Declaration_Context (The_Declaration => Get_Constant_Declaration (The_Constant => The_Constant)); end Get_Constant_Context; ------------------------------------------------------------------------------ function Get_Subprogram_Context (The_Subprogram : RawDict.Subprogram_Info_Ref) return Contexts --# global in Dict; is begin return RawDict.Get_Declaration_Context (The_Declaration => RawDict.Get_Subprogram_Specification (The_Subprogram => The_Subprogram)); end Get_Subprogram_Context; ------------------------------------------------------------------------------ function GetContext (TheSymbol : Symbol) return Contexts is Context : Contexts; ------------------------------------------------------------------------------ function Get_Package_Context (The_Package : RawDict.Package_Info_Ref) return Contexts --# global in Dict; is begin return RawDict.Get_Declaration_Context (The_Declaration => RawDict.Get_Package_Specification (The_Package => The_Package)); end Get_Package_Context; ------------------------------------------------------------------------------ function Get_Variable_Context (The_Variable : RawDict.Variable_Info_Ref) return Contexts --# global in Dict; is Context : Contexts; begin if RawDict.Get_Variable_Declaration (The_Variable => The_Variable) = RawDict.Null_Declaration_Info_Ref then Context := ProofContext; else Context := RawDict.Get_Declaration_Context (The_Declaration => RawDict.Get_Variable_Declaration (The_Variable => The_Variable)); end if; return Context; end Get_Variable_Context; ------------------------------------------------------------------------------ function Get_Enumeration_Literal_Context (The_Enumeration_Literal : RawDict.Enumeration_Literal_Info_Ref) return Contexts --# global in Dict; is begin return RawDict.Get_Declaration_Context (The_Declaration => Get_Type_Declaration (Type_Mark => RawDict.Get_Enumeration_Literal_Type (The_Enumeration_Literal => The_Enumeration_Literal))); end Get_Enumeration_Literal_Context; ------------------------------------------------------------------------------ function Get_Record_Component_Context (The_Record_Component : RawDict.Record_Component_Info_Ref) return Contexts --# global in Dict; is begin return RawDict.Get_Declaration_Context (The_Declaration => Get_Type_Declaration (Type_Mark => RawDict.Get_Record_Component_Type (The_Record_Component => The_Record_Component))); end Get_Record_Component_Context; ------------------------------------------------------------------------------ function Get_Subprogram_Parameter_Context (The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref) return Contexts --# global in Dict; is begin return RawDict.Get_Declaration_Context (The_Declaration => RawDict.Get_Subprogram_Specification (The_Subprogram => RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Subprogram_Parameter))); end Get_Subprogram_Parameter_Context; begin -- GetContext case RawDict.GetSymbolDiscriminant (TheSymbol) is when Type_Symbol => Context := Get_Type_Context (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External when Constant_Symbol => Context := Get_Constant_Context (The_Constant => RawDict.Get_Constant_Info_Ref (Item => TheSymbol)); -- GAA External when Package_Symbol => Context := Get_Package_Context (The_Package => RawDict.Get_Package_Info_Ref (Item => TheSymbol)); -- GAA External when Subprogram_Symbol => Context := Get_Subprogram_Context (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheSymbol)); -- GAA External when Variable_Symbol => Context := Get_Variable_Context (The_Variable => RawDict.Get_Variable_Info_Ref (Item => TheSymbol)); -- GAA External when Quantified_Variable_Symbol | ImplicitProofFunctionSymbol | Implicit_Return_Variable_Symbol => Context := ProofContext; when Enumeration_Literal_Symbol => Context := Get_Enumeration_Literal_Context (The_Enumeration_Literal => RawDict.Get_Enumeration_Literal_Info_Ref (Item => TheSymbol)); -- GAA External when Record_Component_Symbol => Context := Get_Record_Component_Context (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (Item => TheSymbol)); -- GAA External when LoopSymbol | LoopParameterSymbol => Context := ProgramContext; when Subprogram_Parameter_Symbol => Context := Get_Subprogram_Parameter_Context (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => TheSymbol)); -- GAA External when others => Context := ProgramContext; end case; return Context; end GetContext; -------------------------------------------------------------------------------- -- Get_Root_Package -------------------------------------------------------------------------------- function Get_Root_Package (The_Package : RawDict.Package_Info_Ref) return RawDict.Package_Info_Ref --# global in Dict; is Current_Package, Next_Package : RawDict.Package_Info_Ref; begin Next_Package := The_Package; loop Current_Package := Next_Package; Next_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); exit when Next_Package = RawDict.Null_Package_Info_Ref; end loop; return Current_Package; end Get_Root_Package; -------------------------------------------------------------------------------- function GetRootPackage (ThePackage : Symbol) return Symbol is begin return RawDict.Get_Package_Symbol (Get_Root_Package (The_Package => RawDict.Get_Package_Info_Ref (ThePackage))); -- GAA External end GetRootPackage; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsPrivatePackage (ThePackage : Symbol) return Boolean is begin return RawDict.Get_Package_Is_Private (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end IsPrivatePackage; -------------------------------------------------------------------------------- function GetPackageParent (ThePackage : Symbol) return Symbol is begin return RawDict.Get_Package_Symbol (RawDict.Get_Package_Parent (The_Package => RawDict.Get_Package_Info_Ref (ThePackage))); -- GAA External end GetPackageParent; -------------------------------------------------------------------------------- function PackageDeclaresTaggedType (ThePackage : Symbol) return Boolean is begin return RawDict.Get_Package_Declares_Tagged_Type (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end PackageDeclaresTaggedType; -------------------------------------------------------------------------------- -- Package_Extends_Another_Package -------------------------------------------------------------------------------- function PackageExtendsAnotherPackage (ThePackage : Symbol) return Boolean is function Package_Extends_Another_Package (The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Package_Extends (The_Package => The_Package) /= RawDict.Null_Package_Info_Ref; end Package_Extends_Another_Package; begin -- PackageExtendsAnotherPackage return Package_Extends_Another_Package (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end PackageExtendsAnotherPackage; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetPackageThatIsExtended (ThePackage : Symbol) return Symbol is begin return RawDict.Get_Package_Symbol (RawDict.Get_Package_Extends (The_Package => RawDict.Get_Package_Info_Ref (ThePackage))); -- GAA External end GetPackageThatIsExtended; -------------------------------------------------------------------------------- -- Is_Proper_Descendent -------------------------------------------------------------------------------- function Is_Proper_Descendent (Inner_Package, Outer_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is Current_Package : RawDict.Package_Info_Ref; begin Current_Package := Inner_Package; loop Current_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); exit when Current_Package = Outer_Package or else Current_Package = RawDict.Null_Package_Info_Ref; end loop; return Current_Package = Outer_Package; end Is_Proper_Descendent; -------------------------------------------------------------------------------- function IsProperDescendent (Inner, Outer : Symbol) return Boolean is begin return Is_Proper_Descendent (Inner_Package => RawDict.Get_Package_Info_Ref (Inner), -- GAA External Outer_Package => RawDict.Get_Package_Info_Ref (Outer)); -- GAA External end IsProperDescendent; -------------------------------------------------------------------------------- -- Is_Private_Descendent -------------------------------------------------------------------------------- function IsPrivateDescendent (Inner, Outer : Symbol) return Boolean is function Is_Private_Descendent (Inner_Package, Outer_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is Current_Package : RawDict.Package_Info_Ref; Is_Private : Boolean := False; begin Current_Package := Inner_Package; loop Is_Private := Is_Private or else RawDict.Get_Package_Is_Private (The_Package => Current_Package); Current_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); exit when Current_Package = Outer_Package or else Current_Package = RawDict.Null_Package_Info_Ref; end loop; return Current_Package = Outer_Package and then Is_Private; end Is_Private_Descendent; begin -- IsPrivateDescendent return Is_Private_Descendent (Inner_Package => RawDict.Get_Package_Info_Ref (Inner), -- GAA External Outer_Package => RawDict.Get_Package_Info_Ref (Outer)); -- GAA External end IsPrivateDescendent; -------------------------------------------------------------------------------- -- Is_Descendent_Of_Private_Sibling -------------------------------------------------------------------------------- function IsDescendentOfPrivateSibling (Candidate, ThePackage : Symbol) return Boolean is function Is_Descendent_Of_Private_Sibling (Candidate, The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is Parent_Package, Current_Package, Next_Package : RawDict.Package_Info_Ref; begin Parent_Package := RawDict.Get_Package_Parent (The_Package => The_Package); Current_Package := Candidate; loop Next_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); exit when Next_Package = Parent_Package or else Next_Package = RawDict.Null_Package_Info_Ref; Current_Package := Next_Package; end loop; return Next_Package = Parent_Package and then RawDict.Get_Package_Is_Private (The_Package => Current_Package); end Is_Descendent_Of_Private_Sibling; begin -- IsDescendentOfPrivateSibling return Is_Descendent_Of_Private_Sibling (Candidate => RawDict.Get_Package_Info_Ref (Candidate), -- GAA External The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end IsDescendentOfPrivateSibling; -------------------------------------------------------------------------------- -- Is_Declared -------------------------------------------------------------------------------- function Variable_Is_Declared (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Variable_Declaration (The_Variable => The_Variable) /= RawDict.Null_Declaration_Info_Ref; end Variable_Is_Declared; -------------------------------------------------------------------------------- function Is_Declared (Item : Symbol) return Boolean is Result : Boolean; -------------------------------------------------------------------------------- function Type_Is_Declared (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Declaration (Type_Mark => Type_Mark) /= RawDict.Null_Declaration_Info_Ref; end Type_Is_Declared; -------------------------------------------------------------------------------- function Constant_Is_Declared (The_Constant : RawDict.Constant_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Constant_Declaration (The_Constant => The_Constant) /= RawDict.Null_Declaration_Info_Ref; end Constant_Is_Declared; begin -- Is_Declared case RawDict.GetSymbolDiscriminant (Item) is when Type_Symbol => Result := Type_Is_Declared (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)); -- GAA External when Variable_Symbol => Result := Variable_Is_Declared (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Item)); -- GAA External when Constant_Symbol => Result := Constant_Is_Declared (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Item)); -- GAA External when Quantified_Variable_Symbol | Subprogram_Symbol | Package_Symbol | LoopParameterSymbol | Enumeration_Literal_Symbol | Record_Component_Symbol | ImplicitProofFunctionSymbol | Subcomponent_Symbol | Implicit_Return_Variable_Symbol | Subprogram_Parameter_Symbol => Result := True; when others => Result := False; end case; return Result; end Is_Declared; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Get_Value (The_Constant : Symbol) return LexTokenManager.Lex_String is Result : LexTokenManager.Lex_String := LexTokenManager.Null_String; begin if RawDict.GetSymbolDiscriminant (The_Constant) = Constant_Symbol then Result := RawDict.Get_Constant_Value (The_Constant => RawDict.Get_Constant_Info_Ref (Item => The_Constant)); -- GAA External end if; return Result; end Get_Value; -------------------------------------------------------------------------------- function GetConstantExpNode (TheConstant : Symbol) return ExaminerConstants.RefType is begin return RawDict.Get_Constant_Exp_Node (The_Constant => RawDict.Get_Constant_Info_Ref (TheConstant)); -- GAA External end GetConstantExpNode; -------------------------------------------------------------------------------- function ConstantExpIsWellformed (TheConstant : Symbol) return Boolean is begin return RawDict.Get_Constant_Exp_Is_Wellformed (The_Constant => RawDict.Get_Constant_Info_Ref (TheConstant)); -- GAA External end ConstantExpIsWellformed; -------------------------------------------------------------------------------- -- Is_Own_Variable -------------------------------------------------------------------------------- function Is_Own_Variable (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable) /= RawDict.Null_Own_Variable_Info_Ref; end Is_Own_Variable; -------------------------------------------------------------------------------- function IsOwnVariable (Variable : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (Variable) = Variable_Symbol and then Is_Own_Variable (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External end IsOwnVariable; -------------------------------------------------------------------------------- -- Is_Own_Task -------------------------------------------------------------------------------- function Is_Own_Task (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Variable_Own_Task (The_Variable => The_Variable) /= NullSymbol; end Is_Own_Task; -------------------------------------------------------------------------------- function IsOwnTask (Variable : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (Variable) = Variable_Symbol and then Is_Own_Task (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External end IsOwnTask; -------------------------------------------------------------------------------- -- Get_Owner -------------------------------------------------------------------------------- function Get_Owner (The_Variable : RawDict.Variable_Info_Ref) return Symbol --# global in Dict; is Result : Symbol; begin if Is_Own_Variable (The_Variable => The_Variable) then Result := RawDict.Get_Own_Variable_Owner (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); elsif Is_Own_Task (The_Variable => The_Variable) then Result := RawDict.Get_Package_Symbol (RawDict.GetOwnTaskOwner (RawDict.Get_Variable_Own_Task (The_Variable => The_Variable))); else Result := RawDict.Get_Own_Variable_Owner (The_Own_Variable => RawDict.Get_Constituent_Own_Variable (The_Constituent => RawDict.Get_Variable_Constituent (The_Variable => The_Variable))); end if; return Result; end Get_Owner; -------------------------------------------------------------------------------- function GetOwner (Variable : Symbol) return Symbol is begin return Get_Owner (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetOwner; -------------------------------------------------------------------------------- -- Type_Is_Enumeration -------------------------------------------------------------------------------- function Type_Is_Enumeration (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Enumeration_Type_Item or else (RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Generic_Type_Item and then RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Type_Mark) = Generic_Discrete_Type); end Type_Is_Enumeration; -------------------------------------------------------------------------------- function TypeIsEnumeration (TypeMark : Symbol) return Boolean is begin return Type_Is_Enumeration (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsEnumeration; -------------------------------------------------------------------------------- -- Type_Is_Integer -------------------------------------------------------------------------------- function Type_Is_Integer (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Integer_Type_Item or else (RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Generic_Type_Item and then RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Type_Mark) = Generic_Integer_Type); end Type_Is_Integer; -------------------------------------------------------------------------------- function TypeIsInteger (TypeMark : Symbol) return Boolean is begin return Type_Is_Integer (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsInteger; -------------------------------------------------------------------------------- -- Type_Is_Modular -------------------------------------------------------------------------------- function Type_Is_Modular (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Modular_Type_Item or else (RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Generic_Type_Item and then RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Type_Mark) = Generic_Modular_Type); end Type_Is_Modular; -------------------------------------------------------------------------------- function TypeIsModular (TypeMark : Symbol) return Boolean is begin return Type_Is_Modular (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsModular; -------------------------------------------------------------------------------- -- Type_Is_Floating_Point -------------------------------------------------------------------------------- function Type_Is_Floating_Point (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Floating_Point_Type_Item or else (RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Generic_Type_Item and then RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Type_Mark) = Generic_Floating_Point_Type); end Type_Is_Floating_Point; -------------------------------------------------------------------------------- function TypeIsFloatingPoint (TypeMark : Symbol) return Boolean is begin return Type_Is_Floating_Point (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsFloatingPoint; -------------------------------------------------------------------------------- -- Type_Is_Fixed_Point -------------------------------------------------------------------------------- function Type_Is_Fixed_Point (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Fixed_Point_Type_Item or else (RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Generic_Type_Item and then RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Type_Mark) = Generic_Fixed_Point_Type); end Type_Is_Fixed_Point; -------------------------------------------------------------------------------- function TypeIsFixedPoint (TypeMark : Symbol) return Boolean is begin return Type_Is_Fixed_Point (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsFixedPoint; -------------------------------------------------------------------------------- -- Type_Is_Array -------------------------------------------------------------------------------- function Type_Is_Array (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Array_Type_Item or else (RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Generic_Type_Item and then RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Type_Mark) = Generic_Array_Type); end Type_Is_Array; -------------------------------------------------------------------------------- function TypeIsArray (TypeMark : Symbol) return Boolean is begin return Type_Is_Array (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsArray; -------------------------------------------------------------------------------- -- Get_Predefined_Boolean_Type -------------------------------------------------------------------------------- function Get_Predefined_Boolean_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Boolean_Type); end Get_Predefined_Boolean_Type; -------------------------------------------------------------------------------- function GetPredefinedBooleanType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Predefined_Boolean_Type); -- GAA External end GetPredefinedBooleanType; -------------------------------------------------------------------------------- function IsPredefinedBooleanType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Predefined_Boolean_Type; -- GAA External end IsPredefinedBooleanType; -------------------------------------------------------------------------------- -- Get_False -------------------------------------------------------------------------------- function Get_False return RawDict.Enumeration_Literal_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Literals (Predefined_False); end Get_False; -------------------------------------------------------------------------------- function GetFalse return Symbol is begin return RawDict.Get_Enumeration_Literal_Symbol (Get_False); -- GAA External end GetFalse; -------------------------------------------------------------------------------- -- Get_True -------------------------------------------------------------------------------- function Get_True return RawDict.Enumeration_Literal_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Literals (Predefined_True); end Get_True; -------------------------------------------------------------------------------- function GetTrue return Symbol is begin return RawDict.Get_Enumeration_Literal_Symbol (Get_True); -- GAA External end GetTrue; -------------------------------------------------------------------------------- -- Get_Universal_Real_Type -------------------------------------------------------------------------------- function Get_Universal_Real_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Universal_Types (Universal_Real_Type); end Get_Universal_Real_Type; -------------------------------------------------------------------------------- function GetUniversalRealType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Universal_Real_Type); -- GAA External end GetUniversalRealType; -------------------------------------------------------------------------------- function IsUniversalRealType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Universal_Real_Type; -- GAA External end IsUniversalRealType; -------------------------------------------------------------------------------- -- Get_Universal_Fixed_Type -------------------------------------------------------------------------------- function Get_Universal_Fixed_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Universal_Types (Universal_Fixed_Type); end Get_Universal_Fixed_Type; -------------------------------------------------------------------------------- function GetUniversalFixedType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Universal_Fixed_Type); -- GAA External end GetUniversalFixedType; -------------------------------------------------------------------------------- function IsUniversalFixedType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Universal_Fixed_Type; -- GAA External end IsUniversalFixedType; -------------------------------------------------------------------------------- -- Get_Universal_Integer_Type -------------------------------------------------------------------------------- function Get_Universal_Integer_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Universal_Types (Universal_Integer_Type); end Get_Universal_Integer_Type; -------------------------------------------------------------------------------- function GetUniversalIntegerType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Universal_Integer_Type); -- GAA External end GetUniversalIntegerType; -------------------------------------------------------------------------------- function IsUniversalIntegerType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Universal_Integer_Type; -- GAA External end IsUniversalIntegerType; -------------------------------------------------------------------------------- -- Get_Predefined_Integer_Type -------------------------------------------------------------------------------- function Get_Predefined_Integer_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Integer_Type); end Get_Predefined_Integer_Type; -------------------------------------------------------------------------------- function GetPredefinedIntegerType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Predefined_Integer_Type); -- GAA External end GetPredefinedIntegerType; -------------------------------------------------------------------------------- function IsPredefinedIntegerType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Predefined_Integer_Type; -- GAA External end IsPredefinedIntegerType; -------------------------------------------------------------------------------- -- Get_Predefined_Long_Integer_Type -------------------------------------------------------------------------------- function Get_Predefined_Long_Integer_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Long_Integer_Type); end Get_Predefined_Long_Integer_Type; -------------------------------------------------------------------------------- function GetPredefinedLongIntegerType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Predefined_Long_Integer_Type); -- GAA External end GetPredefinedLongIntegerType; -------------------------------------------------------------------------------- -- Get_Predefined_Float_Type -------------------------------------------------------------------------------- function Get_Predefined_Float_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Float_Type); end Get_Predefined_Float_Type; -------------------------------------------------------------------------------- function IsPredefinedFloatType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Predefined_Float_Type; -- GAA External end IsPredefinedFloatType; -------------------------------------------------------------------------------- -- Get_Predefined_Time_Type -------------------------------------------------------------------------------- function Get_Predefined_Time_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Time_Type); end Get_Predefined_Time_Type; -------------------------------------------------------------------------------- function GetPredefinedTimeType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Predefined_Time_Type); -- GAA External end GetPredefinedTimeType; -------------------------------------------------------------------------------- -- Get_Predefined_Time_Span_Type -------------------------------------------------------------------------------- function Get_Predefined_Time_Span_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Time_Span_Type); end Get_Predefined_Time_Span_Type; -------------------------------------------------------------------------------- function IsPredefinedTimeSpanType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Predefined_Time_Span_Type; -- GAA External end IsPredefinedTimeSpanType; -------------------------------------------------------------------------------- -- Get_Null_Variable -------------------------------------------------------------------------------- function Get_Null_Variable return RawDict.Variable_Info_Ref --# global in Dict; is begin return Dict.Null_Variable; end Get_Null_Variable; -------------------------------------------------------------------------------- function GetNullVariable return Symbol is begin return RawDict.Get_Variable_Symbol (Get_Null_Variable); -- GAA External end GetNullVariable; -------------------------------------------------------------------------------- function Is_Null_Variable (TheVariable : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheVariable) = Variable_Symbol and then RawDict.Get_Variable_Info_Ref (TheVariable) = Get_Null_Variable; -- GAA External end Is_Null_Variable; -------------------------------------------------------------------------------- -- Get_The_Partition -------------------------------------------------------------------------------- function Get_The_Partition return RawDict.Subprogram_Info_Ref --# global in Dict; is begin return Dict.The_Partition; end Get_The_Partition; -------------------------------------------------------------------------------- function GetThePartition return Symbol is begin return RawDict.Get_Subprogram_Symbol (Get_The_Partition); -- GAA External end GetThePartition; -------------------------------------------------------------------------------- -- Is_Predefined_Time_Type -------------------------------------------------------------------------------- function Is_Predefined_Time_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Type_Mark = Get_Predefined_Time_Type or else Type_Mark = Get_Predefined_Time_Span_Type; end Is_Predefined_Time_Type; -------------------------------------------------------------------------------- function IsPredefinedTimeType (TheSymbol : Symbol) return Boolean is begin return Is_Predefined_Time_Type (Type_Mark => RawDict.Get_Type_Info_Ref (TheSymbol)); -- GAA External end IsPredefinedTimeType; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function CurrentSymbol (CurrentIterator : Iterator) return Symbol is Result : Symbol; begin case CurrentIterator.Discriminant is when NullSymIterator .. EmbeddedPackageIterator => Result := CurrentIterator.Current; when OwnVariableIterator .. AbstractOwnVariableIterator => Result := RawDict.Get_Variable_Symbol (RawDict.Get_Own_Variable_Variable (The_Own_Variable => RawDict.Get_Own_Variable_Info_Ref (CurrentIterator.Current))); when ConstituentIterator => Result := RawDict.Get_Variable_Symbol (RawDict.Get_Constituent_Variable (The_Constituent => RawDict.Get_Constituent_Info_Ref (CurrentIterator.Current))); end case; return Result; end CurrentSymbol; -------------------------------------------------------------------------------- function GetEnclosingObject (Object : Symbol) return Symbol is begin return RawDict.Get_Subcomponent_Object (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Object)); -- GAA External end GetEnclosingObject; -------------------------------------------------------------------------------- function GetMostEnclosingObject (Object : Symbol) return Symbol is Current : Symbol; begin Current := Object; while RawDict.GetSymbolDiscriminant (Current) = Subcomponent_Symbol loop Current := RawDict.Get_Subcomponent_Object (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Current)); end loop; return Current; end GetMostEnclosingObject; -------------------------------------------------------------------------------- function Is_Constant (TheSymbol : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (TheSymbol) is when Constant_Symbol => Result := True; when Generic_Parameter_Symbol => -- we can regard a generic formal object as a constant since we will only allow -- it to be instantiated with a constant Result := RawDict.Get_Generic_Parameter_Kind (The_Generic_Parameter => RawDict.Get_Generic_Parameter_Info_Ref (Item => TheSymbol)) = -- GAA External Generic_Object_Parameter; when others => Result := False; end case; return Result; end Is_Constant; -------------------------------------------------------------------------------- -- First_Global_Variable -------------------------------------------------------------------------------- function First_Subprogram_Global_Variable (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions) return Iterator --# global in Dict; is function First_Global_Variable (The_Global_Variable : RawDict.Global_Variable_Info_Ref) return Iterator --# global in Dict; is Global_Variables : Iterator := NullIterator; begin if The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref then case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => Global_Variables := Iterator' (GlobalVariableIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable)), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); when RawDict.Subprogram_Parameter_Item => Global_Variables := Iterator' (GlobalVariableIterator, IsAbstract, RawDict.Get_Subprogram_Parameter_Symbol (RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable)), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); when others => -- non-exec code Global_Variables := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Global_Variable"); end case; end if; return Global_Variables; end First_Global_Variable; begin -- First_Subprogram_Global_Variable return First_Global_Variable (The_Global_Variable => RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction)); end First_Subprogram_Global_Variable; -------------------------------------------------------------------------------- function First_Task_Type_Global_Variable (The_Task_Type : RawDict.Type_Info_Ref; Abstraction : Abstractions) return Iterator --# global in Dict; is function First_Global_Variable (The_Global_Variable : RawDict.Global_Variable_Info_Ref) return Iterator --# global in Dict; is Global_Variables : Iterator := NullIterator; begin if The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref then Global_Variables := Iterator' (GlobalVariableIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable)), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); end if; return Global_Variables; end First_Global_Variable; begin -- First_Task_Type_Global_Variable return First_Global_Variable (The_Global_Variable => RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction)); end First_Task_Type_Global_Variable; -------------------------------------------------------------------------------- function FirstGlobalVariable (Abstraction : Abstractions; Subprogram : Symbol) return Iterator is Global_Variables : Iterator; begin case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => Global_Variables := First_Subprogram_Global_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstGlobalVariable"); Global_Variables := First_Task_Type_Global_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when others => -- non-exec code Global_Variables := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstGlobalVariable"); end case; return Global_Variables; end FirstGlobalVariable; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsVariableOrSubcomponent (TheSymbol : Symbol) return Boolean is begin return Is_Variable (TheSymbol) or else RawDict.GetSymbolDiscriminant (TheSymbol) = Subcomponent_Symbol; end IsVariableOrSubcomponent; -------------------------------------------------------------------------------- -- Get_First_Record_Subcomponent -------------------------------------------------------------------------------- function GetFirstRecordSubcomponent (TheSymbol : Symbol) return Symbol is function Get_First_Record_Subcomponent (TheSymbol : Symbol) return RawDict.Subcomponent_Info_Ref --# global in Dict; is The_Subcomponent : RawDict.Subcomponent_Info_Ref; begin case RawDict.GetSymbolDiscriminant (TheSymbol) is when Subcomponent_Symbol => The_Subcomponent := RawDict.Get_Subcomponent_Subcomponents (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => TheSymbol)); -- GAA External when Variable_Symbol => The_Subcomponent := RawDict.Get_Variable_Subcomponents (The_Variable => RawDict.Get_Variable_Info_Ref (Item => TheSymbol)); -- GAA External when Subprogram_Parameter_Symbol => The_Subcomponent := RawDict.Get_Subprogram_Parameter_Subcomponents (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => TheSymbol)); -- GAA External when others => The_Subcomponent := RawDict.Null_Subcomponent_Info_Ref; end case; return The_Subcomponent; end Get_First_Record_Subcomponent; begin -- GetFirstRecordSubcomponent return RawDict.Get_Subcomponent_Symbol (Get_First_Record_Subcomponent (TheSymbol)); -- GAA External end GetFirstRecordSubcomponent; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetNextRecordSubcomponent (TheSubcomponent : Symbol) return Symbol is begin return RawDict.Get_Subcomponent_Symbol -- GAA External (RawDict.Get_Next_Subcomponent (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (TheSubcomponent))); -- GAA External end GetNextRecordSubcomponent; -------------------------------------------------------------------------------- function VariableIsInitialized (Variable : Symbol) return Boolean is begin return RawDict.Get_Variable_Initialized (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end VariableIsInitialized; -------------------------------------------------------------------------------- function VariableHasAddressClause (Variable : Symbol) return Boolean is begin return RawDict.Get_Variable_Has_Address_Clause (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end VariableHasAddressClause; -------------------------------------------------------------------------------- function VariableHasPragmaImport (Variable : Symbol) return Boolean is begin return RawDict.Get_Variable_Has_Pragma_Import (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end VariableHasPragmaImport; -------------------------------------------------------------------------------- function TypeSizeAttribute (TypeMark : Symbol) return LexTokenManager.Lex_String is begin return RawDict.Get_Type_Size_Attribute (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeSizeAttribute; -------------------------------------------------------------------------------- function VariableIsAliased (Variable : Symbol) return Boolean is begin return RawDict.Get_Variable_Is_Aliased (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end VariableIsAliased; -------------------------------------------------------------------------------- function VariableIsMarkedValid (TheVariable : Symbol) return Boolean is begin return RawDict.Get_Variable_Marked_Valid (The_Variable => RawDict.Get_Variable_Info_Ref (TheVariable)); -- GAA External end VariableIsMarkedValid; -------------------------------------------------------------------------------- function SubcomponentIsMarkedValid (TheSubcomponent : Symbol) return Boolean is begin return RawDict.Get_Subcomponent_Marked_Valid (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (TheSubcomponent)); -- GAA External end SubcomponentIsMarkedValid; -------------------------------------------------------------------------------- function VariableOrSubcomponentIsMarkedValid (TheSym : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (TheSym) is when Variable_Symbol => Result := RawDict.Get_Variable_Marked_Valid (The_Variable => RawDict.Get_Variable_Info_Ref (Item => TheSym)); -- GAA External when Subcomponent_Symbol => Result := RawDict.Get_Subcomponent_Marked_Valid (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => TheSym)); -- GAA External when LoopEntryVariableSymbol | Quantified_Variable_Symbol | Implicit_Return_Variable_Symbol | Subprogram_Parameter_Symbol | LoopParameterSymbol => Result := True; when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.VariableOrSubcomponentIsMarkedValid"); end case; return Result; end VariableOrSubcomponentIsMarkedValid; -------------------------------------------------------------------------------- function SetsPriority (TheDiscriminant : Symbol) return Boolean is begin return RawDict.GetDiscriminantSetsPriority (TheDiscriminant); end SetsPriority; -------------------------------------------------------------------------------- function IsFormalParameter (Subprogram, Parameter : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (Parameter) = Subprogram_Parameter_Symbol and then RawDict.GetSymbolDiscriminant (Subprogram) = Subprogram_Symbol and then RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Parameter)) = -- GAA External RawDict.Get_Subprogram_Info_Ref (Item => Subprogram); -- GAA External end IsFormalParameter; -------------------------------------------------------------------------------- function IsGenericFormalParameter (TheGeneric, Parameter : Symbol) return Boolean is Parameter_Or_Constant : RawDict.Generic_Parameter_Info_Ref; Associated_Generic_Formal_Of_Constant : RawDict.Generic_Parameter_Info_Ref; begin -- if the generic formal is an object then a constant will have been declared in the visible -- scope of the geenric unit and linked to the list of its generic formal parameters. In many -- cases, e.g. in the VCG, the symbol that we will find when looking up a generic formal object parameter -- will be the symbol of this implicitly-declared constant. We want such constants to be correctly identified -- as generic formal parameters. The following test checks for this case before going on to check whether -- the parameter supplied is the one we are looking for. case RawDict.GetSymbolDiscriminant (Parameter) is when Constant_Symbol => -- if its a constant then it might be an implicitly-declared constant associated with a generic formal Associated_Generic_Formal_Of_Constant := RawDict.Get_Constant_Associated_Generic_Parameter (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Parameter)); -- GAA External if Associated_Generic_Formal_Of_Constant /= RawDict.Null_Generic_Parameter_Info_Ref then -- then it really is an implicitly-declared constant associated with the generic formal Parameter_Or_Constant := Associated_Generic_Formal_Of_Constant; else Parameter_Or_Constant := RawDict.Null_Generic_Parameter_Info_Ref; end if; when Generic_Parameter_Symbol => Parameter_Or_Constant := RawDict.Get_Generic_Parameter_Info_Ref (Item => Parameter); -- GAA External when others => Parameter_Or_Constant := RawDict.Null_Generic_Parameter_Info_Ref; end case; -- now check whether Parameter, perhaps modified by code above, is actually a generic formal of the TheGeneric return Parameter_Or_Constant /= RawDict.Null_Generic_Parameter_Info_Ref and then RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => RawDict.Get_Generic_Parameter_Owning_Generic (The_Generic_Parameter => Parameter_Or_Constant)) = RawDict.Get_Subprogram_Info_Ref (TheGeneric); -- GAA External end IsGenericFormalParameter; -------------------------------------------------------------------------------- function Is_Subprogram (TheSymbol : Symbol) return Boolean --# return RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = Subprogram_Symbol or --# RawDict.GetSymbolDiscriminant (TheSymbol, Dict) = ImplicitProofFunctionSymbol; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Subprogram_Symbol or else RawDict.GetSymbolDiscriminant (TheSymbol) = ImplicitProofFunctionSymbol; end Is_Subprogram; -------------------------------------------------------------------------------- function Is_Generic_Subprogram (The_Symbol : Symbol) return Boolean is The_Subprogram : RawDict.Subprogram_Info_Ref; begin case RawDict.GetSymbolDiscriminant (The_Symbol) is when Subprogram_Symbol => The_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Symbol); when ImplicitProofFunctionSymbol => The_Subprogram := RawDict.GetImplicitProofFunctionAdaFunction (The_Symbol); when others => The_Subprogram := RawDict.Null_Subprogram_Info_Ref; end case; return RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => The_Subprogram) /= RawDict.Null_Generic_Unit_Info_Ref; end Is_Generic_Subprogram; -------------------------------------------------------------------------------- -- Get_Precondition -------------------------------------------------------------------------------- function GetPrecondition (Abstraction : Abstractions; Subprogram : Symbol) return ExaminerConstants.RefType is Precondition : ExaminerConstants.RefType; -------------------------------------------------------------------------------- function Get_Precondition (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions) return ExaminerConstants.RefType --# global in Dict; is Precondition : ExaminerConstants.RefType; begin Precondition := RawDict.Get_Subprogram_Precondition (The_Subprogram => The_Subprogram, Abstraction => Abstraction); if Precondition = 0 and then RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => The_Subprogram) /= RawDict.Null_Subprogram_Info_Ref then Precondition := RawDict.Get_Subprogram_Precondition (The_Subprogram => RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => The_Subprogram), Abstraction => Abstraction); end if; return Precondition; end Get_Precondition; begin -- GetPrecondition case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => Precondition := Get_Precondition (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when ImplicitProofFunctionSymbol => Precondition := Get_Precondition (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Subprogram), Abstraction => Abstraction); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetPrecondition"); Precondition := 0; -- valid but pointless request for a task's precondition when others => -- non-exec code Precondition := 0; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetPrecondition"); end case; return Precondition; end GetPrecondition; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function HasPrecondition (Abstraction : Abstractions; Subprogram : Symbol) return Boolean is begin return GetPrecondition (Abstraction, Subprogram) /= 0; end HasPrecondition; -------------------------------------------------------------------------------- -- Get_Postcondition -------------------------------------------------------------------------------- function GetPostcondition (Abstraction : Abstractions; Subprogram : Symbol) return ExaminerConstants.RefType is Postcondition : ExaminerConstants.RefType; -------------------------------------------------------------------------------- function Get_Postcondition (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions) return ExaminerConstants.RefType --# global in Dict; is Postcondition : ExaminerConstants.RefType; begin Postcondition := RawDict.Get_Subprogram_Postcondition (The_Subprogram => The_Subprogram, Abstraction => Abstraction); if Postcondition = 0 and then RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => The_Subprogram) /= RawDict.Null_Subprogram_Info_Ref then Postcondition := RawDict.Get_Subprogram_Postcondition (The_Subprogram => RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => The_Subprogram), Abstraction => Abstraction); end if; return Postcondition; end Get_Postcondition; begin -- GetPostcondition case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => Postcondition := Get_Postcondition (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when ImplicitProofFunctionSymbol => Postcondition := Get_Postcondition (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Subprogram), Abstraction => Abstraction); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetPostcondition"); Postcondition := 0; -- valid but pointless request for a task's postcondition when others => -- non-exec code Postcondition := 0; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetPostcondition"); end case; return Postcondition; end GetPostcondition; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function HasPostcondition (Abstraction : Abstractions; Subprogram : Symbol) return Boolean is begin return GetPostcondition (Abstraction, Subprogram) /= 0; end HasPostcondition; -------------------------------------------------------------------------------- -- Is_Import -------------------------------------------------------------------------------- function Is_Imported_Variable (The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is Tmp_Global_Variable : RawDict.Global_Variable_Info_Ref; Result : Boolean; begin Tmp_Global_Variable := The_Global_Variable; loop if Tmp_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Result := False; exit; end if; if (RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => Tmp_Global_Variable) = RawDict.Subprogram_Variable_Item or else RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => Tmp_Global_Variable) = RawDict.Task_Type_Variable_Item) and then RawDict.Get_Global_Variable_Variable (The_Global_Variable => Tmp_Global_Variable) = The_Variable then Result := RawDict.Get_Global_Variable_Imported (The_Global_Variable => Tmp_Global_Variable); exit; end if; Tmp_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => Tmp_Global_Variable); end loop; return Result; end Is_Imported_Variable; -------------------------------------------------------------------------------- function Is_Imported_Subprogram_Variable (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Imported_Variable (The_Global_Variable => RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction), The_Variable => The_Variable); end Is_Imported_Subprogram_Variable; -------------------------------------------------------------------------------- function Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Is_Implicit_Proof_Function : Boolean) return Boolean --# global in Dict; is Result : Boolean; -------------------------------------------------------------------------------- function Is_Imported_Subprogram_Parameter (The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref) return Boolean --# global in Dict; is Tmp_Global_Variable : RawDict.Global_Variable_Info_Ref; Result : Boolean; begin Tmp_Global_Variable := The_Global_Variable; loop if Tmp_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Result := False; exit; end if; if RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => Tmp_Global_Variable) = RawDict.Subprogram_Parameter_Item and then RawDict.Get_Global_Variable_Parameter (The_Global_Variable => Tmp_Global_Variable) = The_Subprogram_Parameter then Result := RawDict.Get_Global_Variable_Imported (The_Global_Variable => Tmp_Global_Variable); exit; end if; Tmp_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => Tmp_Global_Variable); end loop; return Result; end Is_Imported_Subprogram_Parameter; begin -- Is_Imported_Subprogram_Subprogram_Parameter if Is_Implicit_Proof_Function then Result := False; else Result := RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Subprogram_Parameter) = The_Subprogram and then RawDict.Get_Subprogram_Parameter_Imported (The_Subprogram_Parameter => The_Subprogram_Parameter, Abstraction => Abstraction); end if; if not Result then Result := Is_Imported_Subprogram_Parameter (The_Global_Variable => RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction), The_Subprogram_Parameter => The_Subprogram_Parameter); end if; return Result; end Is_Imported_Subprogram_Subprogram_Parameter; -------------------------------------------------------------------------------- function Is_Imported_Task_Type_Variable (The_Task_Type : RawDict.Type_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Imported_Variable (The_Global_Variable => RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction), The_Variable => The_Variable); end Is_Imported_Task_Type_Variable; -------------------------------------------------------------------------------- function IsImport (Abstraction : Abstractions; TheProcedure : Symbol; Variable : Symbol) return Boolean is Result : Boolean; begin -- allow use with task type as well as procedure case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => case RawDict.GetSymbolDiscriminant (Variable) is when Subprogram_Parameter_Symbol => Result := Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable), -- GAA External Is_Implicit_Proof_Function => False); when Variable_Symbol => Result := Is_Imported_Subprogram_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsImport"); end case; when ImplicitProofFunctionSymbol => case RawDict.GetSymbolDiscriminant (Variable) is when Subprogram_Parameter_Symbol => Result := Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (TheProcedure), Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable), -- GAA External Is_Implicit_Proof_Function => True); when Variable_Symbol => Result := Is_Imported_Subprogram_Variable (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (TheProcedure), Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsImport"); end case; when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsImport"); Result := Is_Imported_Task_Type_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsImport"); end case; return Result; end IsImport; -------------------------------------------------------------------------------- -- Is_Export -------------------------------------------------------------------------------- function Is_Exported_Variable (The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is Tmp_Global_Variable : RawDict.Global_Variable_Info_Ref; Result : Boolean; begin Tmp_Global_Variable := The_Global_Variable; loop if Tmp_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Result := False; exit; end if; if (RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => Tmp_Global_Variable) = RawDict.Subprogram_Variable_Item or else RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => Tmp_Global_Variable) = RawDict.Task_Type_Variable_Item) and then RawDict.Get_Global_Variable_Variable (The_Global_Variable => Tmp_Global_Variable) = The_Variable then Result := RawDict.Get_Global_Variable_Exported (The_Global_Variable => Tmp_Global_Variable); exit; end if; Tmp_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => Tmp_Global_Variable); end loop; return Result; end Is_Exported_Variable; -------------------------------------------------------------------------------- function Is_Exported_Subprogram_Variable (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Exported_Variable (The_Global_Variable => RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction), The_Variable => The_Variable); end Is_Exported_Subprogram_Variable; -------------------------------------------------------------------------------- function Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Is_Implicit_Proof_Function : Boolean) return Boolean --# global in Dict; is Result : Boolean; -------------------------------------------------------------------------------- function Is_Exported_Subprogram_Parameter (The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref) return Boolean --# global in Dict; is Tmp_Global_Variable : RawDict.Global_Variable_Info_Ref; Result : Boolean; begin Tmp_Global_Variable := The_Global_Variable; loop if Tmp_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Result := False; exit; end if; if RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => Tmp_Global_Variable) = RawDict.Subprogram_Parameter_Item and then RawDict.Get_Global_Variable_Parameter (The_Global_Variable => Tmp_Global_Variable) = The_Subprogram_Parameter then Result := RawDict.Get_Global_Variable_Exported (The_Global_Variable => Tmp_Global_Variable); exit; end if; Tmp_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => Tmp_Global_Variable); end loop; return Result; end Is_Exported_Subprogram_Parameter; begin -- Is_Exported_Subprogram_Subprogram_Parameter if Is_Implicit_Proof_Function then Result := False; else Result := RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Subprogram_Parameter) = The_Subprogram and then RawDict.Get_Subprogram_Parameter_Exported (The_Subprogram_Parameter => The_Subprogram_Parameter, Abstraction => Abstraction); end if; if not Result then Result := Is_Exported_Subprogram_Parameter (The_Global_Variable => RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction), The_Subprogram_Parameter => The_Subprogram_Parameter); end if; return Result; end Is_Exported_Subprogram_Subprogram_Parameter; -------------------------------------------------------------------------------- function Is_Exported_Task_Type_Variable (The_Task_Type : RawDict.Type_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Exported_Variable (The_Global_Variable => RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction), The_Variable => The_Variable); end Is_Exported_Task_Type_Variable; -------------------------------------------------------------------------------- function IsExport (Abstraction : Abstractions; TheProcedure : Symbol; Variable : Symbol) return Boolean is Result : Boolean; begin -- allow use with task type as well as procedure case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => case RawDict.GetSymbolDiscriminant (Variable) is when Subprogram_Parameter_Symbol => Result := Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable), Is_Implicit_Proof_Function => False); when Variable_Symbol => Result := Is_Exported_Subprogram_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when Parameter_Constraint_Symbol | Quantified_Variable_Symbol | Type_Symbol => Result := False; when LoopParameterSymbol => Result := False; when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsExport"); end case; when ImplicitProofFunctionSymbol => case RawDict.GetSymbolDiscriminant (Variable) is when Subprogram_Parameter_Symbol => Result := Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (TheProcedure), Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable), -- GAA External Is_Implicit_Proof_Function => True); when Variable_Symbol => Result := Is_Exported_Subprogram_Variable (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (TheProcedure), Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when Parameter_Constraint_Symbol | Quantified_Variable_Symbol | Type_Symbol => Result := False; when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsExport"); end case; when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsExport"); Result := Is_Exported_Task_Type_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsExport"); end case; return Result; end IsExport; -------------------------------------------------------------------------------- -- Is_Import_Export -------------------------------------------------------------------------------- function Is_Subprogram_Import_Export_Variable (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Imported_Subprogram_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => The_Variable) and then Is_Exported_Subprogram_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => The_Variable); end Is_Subprogram_Import_Export_Variable; -------------------------------------------------------------------------------- function Is_Subprogram_Import_Export_Subprogram_Parameter (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Is_Implicit_Proof_Function : Boolean) return Boolean --# global in Dict; is begin return Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => Is_Implicit_Proof_Function) and then Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => Is_Implicit_Proof_Function); end Is_Subprogram_Import_Export_Subprogram_Parameter; -------------------------------------------------------------------------------- function Is_Task_Type_Import_Export (The_Task_Type : RawDict.Type_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Imported_Task_Type_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Variable) and then Is_Exported_Task_Type_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Variable); end Is_Task_Type_Import_Export; -------------------------------------------------------------------------------- function IsImportExport (Abstraction : Abstractions; TheProcedure : Symbol; Variable : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => case RawDict.GetSymbolDiscriminant (Variable) is when Subprogram_Parameter_Symbol => Result := Is_Subprogram_Import_Export_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable), -- GAA External Is_Implicit_Proof_Function => False); when Variable_Symbol => Result := Is_Subprogram_Import_Export_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when Parameter_Constraint_Symbol | Quantified_Variable_Symbol | Type_Symbol => Result := False; when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsImportExport"); end case; when ImplicitProofFunctionSymbol => case RawDict.GetSymbolDiscriminant (Variable) is when Subprogram_Parameter_Symbol => Result := Is_Subprogram_Import_Export_Subprogram_Parameter (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (TheProcedure), Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable), -- GAA External Is_Implicit_Proof_Function => True); when Variable_Symbol => Result := Is_Subprogram_Import_Export_Variable (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (TheProcedure), Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when Parameter_Constraint_Symbol | Quantified_Variable_Symbol | Type_Symbol => Result := False; when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsImportExport"); end case; when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsImportExport"); Result := Is_Task_Type_Import_Export (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsImportExport"); end case; return Result; end IsImportExport; -------------------------------------------------------------------------------- -- First_Record_Component -------------------------------------------------------------------------------- function First_Record_Component (Type_Mark : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is The_Record_Component : RawDict.Record_Component_Info_Ref; Components : Iterator := NullIterator; begin The_Record_Component := RawDict.Get_Type_First_Record_Component (Type_Mark => Type_Mark); if The_Record_Component /= RawDict.Null_Record_Component_Info_Ref then Components := Iterator' (RecordComponentIterator, IsAbstract, RawDict.Get_Record_Component_Symbol (The_Record_Component), NullSymbol); end if; return Components; end First_Record_Component; -------------------------------------------------------------------------------- function FirstRecordComponent (TheRecordType : Symbol) return Iterator is begin return First_Record_Component (Type_Mark => RawDict.Get_Type_Info_Ref (TheRecordType)); -- GAA External end FirstRecordComponent; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -- This function is to do with iterating over the fields of extended tagged records. -- It is declared here because it is used by FirstExtendedRecordComponent and NextSymbol function Back_Track_Up_Inherit_Chain (Start_Sym, Stop_Sym : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is Result : RawDict.Type_Info_Ref; begin -- Starting at extended record Start_Sym follow the Inherit pointers to the -- record _preceding_ Stop_Sym. If Start_Sym = Stop_Sym return NullSymbol. if Start_Sym = Stop_Sym then Result := RawDict.Null_Type_Info_Ref; else Result := Start_Sym; loop exit when Get_Type (The_Symbol => CurrentSymbol (First_Record_Component (Type_Mark => Result))) = Stop_Sym; Result := Get_Type (The_Symbol => CurrentSymbol (First_Record_Component (Type_Mark => Result))); end loop; end if; return Result; end Back_Track_Up_Inherit_Chain; -------------------------------------------------------------------------------- -- First_Enumeration_Literal -------------------------------------------------------------------------------- function First_Enumeration_Literal (Type_Mark : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is The_Enumeration_Literal : RawDict.Enumeration_Literal_Info_Ref; Literals : Iterator := NullIterator; begin The_Enumeration_Literal := RawDict.Get_Type_First_Enumeration_Literal (Type_Mark => Type_Mark); if The_Enumeration_Literal /= RawDict.Null_Enumeration_Literal_Info_Ref then Literals := Iterator' (EnumerationLiteralIterator, IsAbstract, RawDict.Get_Enumeration_Literal_Symbol (The_Enumeration_Literal), NullSymbol); end if; return Literals; end First_Enumeration_Literal; -------------------------------------------------------------------------------- function FirstEnumerationLiteral (EnumerationType : Symbol) return Iterator is begin return First_Enumeration_Literal (Type_Mark => RawDict.Get_Type_Info_Ref (EnumerationType)); -- GAA External end FirstEnumerationLiteral; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetPositionNumber (Literal : Symbol) return LexTokenManager.Lex_String is begin return RawDict.Get_Enumeration_Literal_Position (The_Enumeration_Literal => RawDict.Get_Enumeration_Literal_Info_Ref (Literal)); -- GAA External end GetPositionNumber; -------------------------------------------------------------------------------- -- Get_Record_Type -------------------------------------------------------------------------------- function GetRecordType (Component : Symbol) return Symbol is function Get_Record_Type (Component : Symbol) return RawDict.Type_Info_Ref --# global in Dict; is The_Record_Component : RawDict.Record_Component_Info_Ref; begin case RawDict.GetSymbolDiscriminant (Component) is when Record_Component_Symbol => The_Record_Component := RawDict.Get_Record_Component_Info_Ref (Item => Component); -- GAA External when Subcomponent_Symbol => The_Record_Component := RawDict.Get_Subcomponent_Record_Component (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Component)); -- GAA External when others => The_Record_Component := RawDict.Null_Record_Component_Info_Ref; end case; return RawDict.Get_Record_Component_Record_Type (The_Record_Component => The_Record_Component); end Get_Record_Type; begin -- GetRecordType return RawDict.Get_Type_Symbol (Get_Record_Type (Component)); -- GAA External end GetRecordType; -------------------------------------------------------------------------------- -- Get_Predefined_Package_Standard -------------------------------------------------------------------------------- function Get_Predefined_Package_Standard return RawDict.Package_Info_Ref --# global in Dict; is begin return Dict.Packages.Package_Standard; end Get_Predefined_Package_Standard; -------------------------------------------------------------------------------- function GetPredefinedPackageStandard return Symbol is begin return RawDict.Get_Package_Symbol (Get_Predefined_Package_Standard); -- GAA External end GetPredefinedPackageStandard; -------------------------------------------------------------------------------- function IsPredefinedPackageStandard (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Package_Info_Ref (TheSymbol) = Get_Predefined_Package_Standard; -- GAA External end IsPredefinedPackageStandard; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Get_Predefined_Package_Ada return RawDict.Package_Info_Ref --# global in Dict; is begin return Dict.Packages.Package_Ada; end Get_Predefined_Package_Ada; -------------------------------------------------------------------------------- function Predefined_Scope return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_Standard)); end Predefined_Scope; -------------------------------------------------------------------------------- function GetLocalScope (Scope : Scopes) return Scopes is begin return Set_Visibility (The_Visibility => Local, The_Unit => GetRegion (Scope)); end GetLocalScope; -------------------------------------------------------------------------------- function GlobalScope return Scopes is begin return Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_Standard)); end GlobalScope; -------------------------------------------------------------------------------- -- Get_Scope -------------------------------------------------------------------------------- function Get_Type_Scope (Type_Mark : RawDict.Type_Info_Ref) return Scopes --# global in Dict; is begin return RawDict.Get_Declaration_Scope (The_Declaration => Get_Type_Declaration (Type_Mark => Type_Mark)); end Get_Type_Scope; -------------------------------------------------------------------------------- function Get_Constant_Scope (The_Constant : RawDict.Constant_Info_Ref) return Scopes --# global in Dict; is begin return RawDict.Get_Declaration_Scope (The_Declaration => Get_Constant_Declaration (The_Constant => The_Constant)); end Get_Constant_Scope; -------------------------------------------------------------------------------- function Get_Subprogram_Scope (The_Subprogram : RawDict.Subprogram_Info_Ref) return Scopes --# global in Dict; is begin return RawDict.Get_Declaration_Scope (The_Declaration => RawDict.Get_Subprogram_Specification (The_Subprogram => The_Subprogram)); end Get_Subprogram_Scope; -------------------------------------------------------------------------------- function Get_Package_Scope (The_Package : RawDict.Package_Info_Ref) return Scopes --# global in Dict; is begin return RawDict.Get_Declaration_Scope (The_Declaration => RawDict.Get_Package_Specification (The_Package => The_Package)); end Get_Package_Scope; -------------------------------------------------------------------------------- function Get_Variable_Scope (The_Variable : RawDict.Variable_Info_Ref) return Scopes --# global in Dict; is Scope : Scopes; The_Package : Symbol; begin if RawDict.Get_Variable_Declaration (The_Variable => The_Variable) = RawDict.Null_Declaration_Info_Ref then The_Package := Get_Owner (The_Variable => The_Variable); if RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable) = RawDict.Null_Own_Variable_Info_Ref and then RawDict.Get_Variable_Own_Task (The_Variable => The_Variable) = NullSymbol then Scope := Set_Visibility (The_Visibility => Local, The_Unit => The_Package); else Scope := Set_Visibility (The_Visibility => Visible, The_Unit => The_Package); end if; else Scope := RawDict.Get_Declaration_Scope (The_Declaration => RawDict.Get_Variable_Declaration (The_Variable => The_Variable)); end if; return Scope; end Get_Variable_Scope; -------------------------------------------------------------------------------- function GetScope (Item : Symbol) return Scopes is separate; -------------------------------------------------------------------------------- function NextSymbol (Previous : Iterator) return Iterator is separate; -------------------------------------------------------------------------------- function GetEnclosingScope (Scope : Scopes) return Scopes is Region : Symbol; Enclosing_Scope : Scopes; begin Region := GetRegion (Scope); Enclosing_Scope := GetScope (Region); if Is_Subprogram (Region) then if Get_Visibility (Scope => Scope) = Local and then (Get_Visibility (Scope => Enclosing_Scope) = Visible or else Get_Visibility (Scope => Enclosing_Scope) = Privat) then Enclosing_Scope := GetLocalScope (Enclosing_Scope); elsif Is_Generic_Subprogram (Region) then Enclosing_Scope := Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Generic_Unit_Symbol (RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)))); end if; elsif RawDict.GetSymbolDiscriminant (Region) = Type_Symbol and then (Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) or else Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region))) and then Get_Visibility (Scope => Scope) = Local then Enclosing_Scope := GetLocalScope (Enclosing_Scope); elsif RawDict.GetSymbolDiscriminant (Region) = Package_Symbol and then RawDict.Get_Package_Generic_Unit (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)) /= RawDict.Null_Generic_Unit_Info_Ref then Enclosing_Scope := Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Generic_Unit_Symbol (RawDict.Get_Package_Generic_Unit (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)))); end if; return Enclosing_Scope; end GetEnclosingScope; -------------------------------------------------------------------------------- function IsPredefinedScope (Scope : Scopes) return Boolean is begin return Scope = Predefined_Scope; end IsPredefinedScope; -------------------------------------------------------------------------------- function IsGlobalScope (Scope : Scopes) return Boolean is begin return Scope = GlobalScope; end IsGlobalScope; -------------------------------------------------------------------------------- function IsLibraryLevel (Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (GetRegion (Scope)) = Package_Symbol and then (RawDict.Get_Package_Info_Ref (Item => GetRegion (Scope)) = Get_Predefined_Package_Standard or else (RawDict.GetSymbolDiscriminant (GetRegion (GetEnclosingScope (Scope))) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => GetRegion (GetEnclosingScope (Scope))) = Get_Predefined_Package_Standard)); end IsLibraryLevel; -------------------------------------------------------------------------------- function IsLocal (Inner, Outer : Scopes) return Boolean is Current_Scope : Scopes; Current_Region : Symbol; Outer_Region : Symbol; begin Current_Scope := Inner; Outer_Region := GetRegion (Outer); loop Current_Region := GetRegion (Current_Scope); exit when Current_Region = Outer_Region or else (RawDict.GetSymbolDiscriminant (Current_Region) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => Current_Region) = Get_Predefined_Package_Standard); Current_Scope := GetEnclosingScope (Current_Scope); end loop; return Current_Region = Outer_Region; end IsLocal; -------------------------------------------------------------------------------- -- Get_Enclosing_Package -------------------------------------------------------------------------------- function Get_Enclosing_Package (Scope : Scopes) return RawDict.Package_Info_Ref --# global in Dict; is Current : Scopes; Region : Symbol; begin Current := Scope; loop Region := GetRegion (Current); exit when RawDict.GetSymbolDiscriminant (Region) = Package_Symbol; Current := GetEnclosingScope (Current); end loop; return RawDict.Get_Package_Info_Ref (Item => Region); end Get_Enclosing_Package; -------------------------------------------------------------------------------- function GetEnclosingPackage (Scope : Scopes) return Symbol is begin return RawDict.Get_Package_Symbol (Get_Enclosing_Package (Scope => Scope)); -- GAA External end GetEnclosingPackage; -------------------------------------------------------------------------------- -- Get_Library_Package -------------------------------------------------------------------------------- function Get_Library_Package (Scope : Scopes) return RawDict.Package_Info_Ref --# global in Dict; is Current_Package, Enclosing_Package : RawDict.Package_Info_Ref; begin Current_Package := Get_Enclosing_Package (Scope => Scope); if Current_Package /= Get_Predefined_Package_Standard then loop exit when RawDict.Get_Package_Parent (The_Package => Current_Package) /= RawDict.Null_Package_Info_Ref; Enclosing_Package := Get_Enclosing_Package (Scope => Get_Package_Scope (The_Package => Current_Package)); exit when Enclosing_Package = Get_Predefined_Package_Standard; Current_Package := Enclosing_Package; end loop; end if; return Current_Package; end Get_Library_Package; -------------------------------------------------------------------------------- function GetLibraryPackage (Scope : Scopes) return Symbol is begin return RawDict.Get_Package_Symbol (Get_Library_Package (Scope => Scope)); -- GAA External end GetLibraryPackage; -------------------------------------------------------------------------------- -- Get_Package_Owner -------------------------------------------------------------------------------- -- The owner of a package (if any) is defined to be the -- the parent of its closest private ancestor, where the term ancestor is -- as defined 10.1.1(11) of the Ada 95 LRM [2], to include the package itself. function GetPackageOwner (ThePackage : Symbol) return Symbol is function Get_Package_Owner (The_Package : RawDict.Package_Info_Ref) return RawDict.Package_Info_Ref --# global in Dict; is Current_Package : RawDict.Package_Info_Ref; begin Current_Package := The_Package; loop exit when RawDict.Get_Package_Is_Private (The_Package => Current_Package); Current_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); exit when Current_Package = RawDict.Null_Package_Info_Ref; end loop; if Current_Package /= RawDict.Null_Package_Info_Ref then Current_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); end if; return Current_Package; end Get_Package_Owner; begin -- GetPackageOwner return RawDict.Get_Package_Symbol (Get_Package_Owner (The_Package => RawDict.Get_Package_Info_Ref (ThePackage))); -- GAA External end GetPackageOwner; -------------------------------------------------------------------------------- -- Get_Constant_Rule_Policy_For_Current_Scope -------------------------------------------------------------------------------- function Get_Constant_Rule_Policy_For_Current_Scope (The_Constant : RawDict.Constant_Info_Ref; The_Scope : Scopes) return Rule_Policies --# global in Dict; is The_Rule_Policy : RawDict.Rule_Policy_Info_Ref; Rule_Policy : Rule_Policies := Unspecified; begin The_Rule_Policy := RawDict.Get_Constant_First_Rule_Policy (The_Constant => The_Constant); while The_Rule_Policy /= RawDict.Null_Rule_Policy_Info_Ref loop if RawDict.Get_Rule_Policy_Scope (The_Rule_Policy => The_Rule_Policy) = The_Scope then Rule_Policy := RawDict.Get_Rule_Policy_Value (The_Rule_Policy => The_Rule_Policy); exit; end if; The_Rule_Policy := RawDict.Get_Next_Rule_Policy (The_Rule_Policy => The_Rule_Policy); end loop; return Rule_Policy; end Get_Constant_Rule_Policy_For_Current_Scope; -------------------------------------------------------------------------------- function IsConstantRulePolicyPresent (TheConstant : Symbol; TheScope : Scopes) return Boolean is begin return Get_Constant_Rule_Policy_For_Current_Scope (The_Constant => RawDict.Get_Constant_Info_Ref (TheConstant), -- GAA External The_Scope => TheScope) /= Unspecified; end IsConstantRulePolicyPresent; -------------------------------------------------------------------------------- -- Get_Constant_Rule_Policy -------------------------------------------------------------------------------- function GetConstantRulePolicy (TheConstant : Symbol; TheScope : Scopes) return Rule_Policies is function Get_Constant_Rule_Policy (The_Constant : RawDict.Constant_Info_Ref; The_Scope : Scopes) return Rule_Policies --# global in Dict; is Current_Scope : Scopes; Rule_Policy : Rule_Policies; begin Current_Scope := The_Scope; -- check the list of policies for the current scope and any enclosing scope. loop Rule_Policy := Get_Constant_Rule_Policy_For_Current_Scope (The_Constant => The_Constant, The_Scope => Current_Scope); exit when Rule_Policy /= Unspecified; exit when IsPredefinedScope (Current_Scope); -- if we have a local package scope check the visible scope for this package next if Get_Visibility (Scope => Current_Scope) = Local and then RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) = Package_Symbol then Current_Scope := Set_Visibility (The_Visibility => Visible, The_Unit => GetRegion (Current_Scope)); else Current_Scope := GetEnclosingScope (Current_Scope); end if; end loop; -- if still unspecified then check the declaritive region for the constant. if Rule_Policy = Unspecified then Current_Scope := Get_Constant_Scope (The_Constant => The_Constant); Rule_Policy := Get_Constant_Rule_Policy_For_Current_Scope (The_Constant => The_Constant, The_Scope => Current_Scope); end if; return Rule_Policy; end Get_Constant_Rule_Policy; begin -- GetConstantRulePolicy return Get_Constant_Rule_Policy (The_Constant => RawDict.Get_Constant_Info_Ref (TheConstant), -- GAA External The_Scope => TheScope); end GetConstantRulePolicy; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function TypeIsWellformed (TheType : Symbol) return Boolean is begin return RawDict.Get_Type_Wellformed (Type_Mark => RawDict.Get_Type_Info_Ref (TheType)); -- GAA External end TypeIsWellformed; -------------------------------------------------------------------------------- function Descendent_Is_Private (Inner_Package, Outer_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is Current_Package : RawDict.Package_Info_Ref; begin Current_Package := Inner_Package; loop exit when RawDict.Get_Package_Is_Private (The_Package => Current_Package); Current_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); exit when Current_Package = Outer_Package or else Current_Package = RawDict.Null_Package_Info_Ref; end loop; return Current_Package /= Outer_Package and then Current_Package /= RawDict.Null_Package_Info_Ref; end Descendent_Is_Private; -------------------------------------------------------------------------------- function Is_Private_Seeing_Descendent (Scope : Scopes; The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is Library_Package : RawDict.Package_Info_Ref; begin Library_Package := Get_Library_Package (Scope => Scope); return Is_Proper_Descendent (Inner_Package => Library_Package, Outer_Package => The_Package) and then (Scope /= Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Library_Package)) or else Descendent_Is_Private (Inner_Package => Library_Package, Outer_Package => The_Package)); end Is_Private_Seeing_Descendent; -------------------------------------------------------------------------------- -- Type_Is_Incomplete_Here -------------------------------------------------------------------------------- function Type_Is_Incomplete_Here (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is Result : Boolean; begin if RawDict.Get_Type_Declaration (Type_Mark => Type_Mark) /= RawDict.Null_Declaration_Info_Ref then Result := (not ((not Type_Is_Private (Type_Mark => Type_Mark) -- Type_Mark is declared in the visible part of package spec and then Get_Visibility (Scope => Get_Type_Scope (Type_Mark => Type_Mark)) = Visible) -- and isn't private or else IsLocal (Scope, Get_Type_Scope (Type_Mark => RawDict.Get_First_Constrained_Subtype (Type_Mark => Type_Mark))) -- Scope is Local relative to Type_Mark (e.g. in same package body) or else Is_Private_Seeing_Descendent (Scope => Scope, The_Package => RawDict.Get_Package_Info_Ref (GetRegion (Get_Type_Scope (Type_Mark => RawDict.Get_First_Constrained_Subtype (Type_Mark => Type_Mark)))))) or else -- if we are in the visible scope of the place where the type is declared it can't be complete yet Scope = Set_Visibility (The_Visibility => Visible, The_Unit => GetRegion (Get_Type_Scope (Type_Mark => Type_Mark)))); else -- No Declaration - must be an announced type with no completion Result := True; end if; return Result; end Type_Is_Incomplete_Here; -------------------------------------------------------------------------------- function TypeIsIncompleteHere (TypeMark : Symbol; Scope : Scopes) return Boolean is begin return Type_Is_Incomplete_Here (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Scope => Scope); end TypeIsIncompleteHere; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Type_Is_Private_Here (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Private (Type_Mark => Type_Mark) and then -- type must be private to be private here! Type_Is_Incomplete_Here (Type_Mark => Type_Mark, Scope => Scope); end Type_Is_Private_Here; ------------------------------------------------------------------------------- -- Type_Is_Real ------------------------------------------------------------------------------- function Type_Is_Real (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Type_Is_Fixed_Point (Type_Mark => Type_Mark) or else Type_Is_Floating_Point (Type_Mark => Type_Mark); end Type_Is_Real; -------------------------------------------------------------------------------- function TypeIsReal (TypeMark : Symbol) return Boolean is begin return Type_Is_Real (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsReal; -------------------------------------------------------------------------------- -- Is_Real_Type_Mark -------------------------------------------------------------------------------- function Is_Real_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Real (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Real_Type_Mark; -------------------------------------------------------------------------------- function IsRealTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Real_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (TheSymbol), -- GAA External Scope => Scope); end IsRealTypeMark; -------------------------------------------------------------------------------- -- Get_Base_Type -------------------------------------------------------------------------------- function Get_Base_Type (Type_Mark : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is Result : RawDict.Type_Info_Ref; begin Result := Get_Root_Type (Type_Mark => Type_Mark); -- go to first named subtype if RawDict.Get_Type_Derived (Type_Mark => Result) then -- find base type Result := RawDict.Get_Type_Base_Type (Type_Mark => Result); end if; return Result; end Get_Base_Type; -------------------------------------------------------------------------------- function GetBaseType (TypeMark : Symbol) return Symbol is begin return RawDict.Get_Type_Symbol (Get_Base_Type (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark))); -- GAA External end GetBaseType; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetRootOfExtendedType (TypeMark : Symbol) return Symbol is begin return RawDict.Get_Type_Symbol (RawDict.Get_Type_Extends (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark))); -- GAA External end GetRootOfExtendedType; -------------------------------------------------------------------------------- -- Get_Enumeration_Literal -------------------------------------------------------------------------------- function Get_Enumeration_Literal (Type_Mark : RawDict.Type_Info_Ref; Position : LexTokenManager.Lex_String) return Symbol --# global in Dict; --# in LexTokenManager.State; is Literals : Iterator; begin Literals := First_Enumeration_Literal (Type_Mark => Get_Root_Type (Type_Mark => Type_Mark)); loop exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Enumeration_Literal_Position (The_Enumeration_Literal => RawDict.Get_Enumeration_Literal_Info_Ref (CurrentSymbol (Literals))), Lex_Str2 => Position) = LexTokenManager.Str_Eq; Literals := NextSymbol (Literals); end loop; return CurrentSymbol (Literals); end Get_Enumeration_Literal; -------------------------------------------------------------------------------- function GetEnumerationLiteral (EnumerationType : Symbol; Position : LexTokenManager.Lex_String) return Symbol is begin return Get_Enumeration_Literal (Type_Mark => RawDict.Get_Type_Info_Ref (EnumerationType), -- GAA External Position => Position); end GetEnumerationLiteral; -------------------------------------------------------------------------------- -- Is_Integer_Type_Mark -------------------------------------------------------------------------------- function Is_Integer_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Integer (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Integer_Type_Mark; -------------------------------------------------------------------------------- function IsIntegerTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Integer_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsIntegerTypeMark; -------------------------------------------------------------------------------- -- Is_Modular_Type_Mark -------------------------------------------------------------------------------- function Is_Modular_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Modular (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Modular_Type_Mark; -------------------------------------------------------------------------------- function IsModularTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Modular_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsModularTypeMark; -------------------------------------------------------------------------------- -- Is_Floating_Point_Type_Mark -------------------------------------------------------------------------------- function Is_Floating_Point_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Floating_Point (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Floating_Point_Type_Mark; -------------------------------------------------------------------------------- function IsFloatingPointTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Floating_Point_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsFloatingPointTypeMark; -------------------------------------------------------------------------------- -- Is_Fixed_Point_Type_Mark -------------------------------------------------------------------------------- function Is_Fixed_Point_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Fixed_Point (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Fixed_Point_Type_Mark; -------------------------------------------------------------------------------- function IsFixedPointTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Fixed_Point_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsFixedPointTypeMark; -------------------------------------------------------------------------------- -- Is_Array_Type_Mark -------------------------------------------------------------------------------- function Is_Array_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Array (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Array_Type_Mark; -------------------------------------------------------------------------------- function IsArrayTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is begin return Is_Array_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (TheSymbol), -- GAA External Scope => Scope); end IsArrayTypeMark; -------------------------------------------------------------------------------- -- Is_Integer_Type -------------------------------------------------------------------------------- function IsIntegerType (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Integer_Type (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then Is_Integer_Type_Mark (Type_Mark => Type_Mark, Scope => Scope); end Is_Integer_Type; begin -- IsIntegerType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Integer_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsIntegerType; -------------------------------------------------------------------------------- -- Is_Modular_Type -------------------------------------------------------------------------------- function IsModularType (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Modular_Type (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then Is_Modular_Type_Mark (Type_Mark => Type_Mark, Scope => Scope); end Is_Modular_Type; begin -- IsModularType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Modular_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsModularType; -------------------------------------------------------------------------------- -- Is_Floating_Point_Type -------------------------------------------------------------------------------- function IsFloatingPointType (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Floating_Point_Type (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then Is_Floating_Point_Type_Mark (Type_Mark => Type_Mark, Scope => Scope); end Is_Floating_Point_Type; begin -- IsFloatingPointType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Floating_Point_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsFloatingPointType; -------------------------------------------------------------------------------- -- Is_Fixed_Point_Type -------------------------------------------------------------------------------- function IsFixedPointType (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Fixed_Point_Type (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then Is_Fixed_Point_Type_Mark (Type_Mark => Type_Mark, Scope => Scope); end Is_Fixed_Point_Type; begin -- IsFixedPointType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Fixed_Point_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsFixedPointType; -------------------------------------------------------------------------------- -- Compatible_Types -------------------------------------------------------------------------------- function CompatibleTypes (Scope : Scopes; Left : Symbol; Right : Symbol) return Boolean is function Compatible_Types (Scope : Scopes; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is Base1, Base2 : RawDict.Type_Info_Ref; Result : Boolean; begin if The_Left_Type = Get_Unknown_Type_Mark or else The_Right_Type = Get_Unknown_Type_Mark then Result := True; else Base1 := Get_Root_Type (Type_Mark => The_Left_Type); Base2 := Get_Root_Type (Type_Mark => The_Right_Type); if Base1 = Base2 then Result := True; elsif Base1 = Get_Universal_Integer_Type then Result := Is_Integer_Type_Mark (Type_Mark => Base2, Scope => Scope) or else Is_Modular_Type_Mark (Type_Mark => Base2, Scope => Scope); elsif Base2 = Get_Universal_Integer_Type then Result := Is_Integer_Type_Mark (Type_Mark => Base1, Scope => Scope) or else Is_Modular_Type_Mark (Type_Mark => Base1, Scope => Scope); elsif Base1 = Get_Universal_Real_Type then Result := Is_Real_Type_Mark (Type_Mark => Base2, Scope => Scope); elsif Base2 = Get_Universal_Real_Type then Result := Is_Real_Type_Mark (Type_Mark => Base1, Scope => Scope); else Result := False; end if; end if; return Result; end Compatible_Types; begin -- CompatibleTypes return Compatible_Types (Scope => Scope, The_Left_Type => RawDict.Get_Type_Info_Ref (Left), -- GAA External The_Right_Type => RawDict.Get_Type_Info_Ref (Right)); -- GAA External end CompatibleTypes; -------------------------------------------------------------------------------- -- Get_Predefined_Character_Type -------------------------------------------------------------------------------- function Get_Predefined_Character_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Character_Type); end Get_Predefined_Character_Type; -------------------------------------------------------------------------------- function GetPredefinedCharacterType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Predefined_Character_Type); -- GAA External end GetPredefinedCharacterType; -------------------------------------------------------------------------------- function IsPredefinedCharacterType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Predefined_Character_Type; -- GAA External end IsPredefinedCharacterType; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function TypeIsGeneric (TheType : Symbol) return Boolean is begin return RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (TheType)) = Generic_Type_Item; -- GAA External end TypeIsGeneric; -------------------------------------------------------------------------------- -- Type_Is_Discrete -------------------------------------------------------------------------------- function Type_Is_Discrete (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Type_Is_Enumeration (Type_Mark => Type_Mark) or else Type_Is_Integer (Type_Mark => Type_Mark) or else Type_Is_Modular (Type_Mark => Type_Mark); end Type_Is_Discrete; -------------------------------------------------------------------------------- function TypeIsDiscrete (TypeMark : Symbol) return Boolean is begin return Type_Is_Discrete (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsDiscrete; -------------------------------------------------------------------------------- -- Type_Is_Numeric -------------------------------------------------------------------------------- function Type_Is_Numeric (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Type_Is_Integer (Type_Mark => Type_Mark) or else Type_Is_Modular (Type_Mark => Type_Mark) or else Type_Is_Real (Type_Mark => Type_Mark); end Type_Is_Numeric; -------------------------------------------------------------------------------- function TypeIsNumeric (TypeMark : Symbol) return Boolean is begin return Type_Is_Numeric (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsNumeric; -------------------------------------------------------------------------------- -- Type_Is_Scalar -------------------------------------------------------------------------------- function Type_Is_Scalar (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Type_Is_Discrete (Type_Mark => Type_Mark) or else Type_Is_Numeric (Type_Mark => Type_Mark); end Type_Is_Scalar; -------------------------------------------------------------------------------- function TypeIsScalar (TypeMark : Symbol) return Boolean is begin return Type_Is_Scalar (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsScalar; -------------------------------------------------------------------------------- -- First_Array_Index -------------------------------------------------------------------------------- function First_Array_Index (Type_Mark : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is The_Array_Index : RawDict.Array_Index_Info_Ref; Array_Indices : Iterator := NullIterator; begin The_Array_Index := RawDict.Get_Type_First_Array_Index (Type_Mark => Type_Mark); if The_Array_Index /= RawDict.Null_Array_Index_Info_Ref then Array_Indices := Iterator' (ArrayIndexIterator, IsAbstract, RawDict.Get_Type_Symbol (RawDict.Get_Array_Index_Type (The_Array_Index => The_Array_Index)), RawDict.Get_Array_Index_Symbol (The_Array_Index)); end if; return Array_Indices; end First_Array_Index; -------------------------------------------------------------------------------- function FirstArrayIndex (TypeMark : Symbol) return Iterator is begin return First_Array_Index (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end FirstArrayIndex; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsPrivateTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Type_Is_Private_Here (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsPrivateTypeMark; -------------------------------------------------------------------------------- -- Is_Private_Type -------------------------------------------------------------------------------- function IsPrivateType (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Private_Type (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Private_Type; begin -- IsPrivateType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Private_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsPrivateType; -------------------------------------------------------------------------------- -- Is_Limited_Private_Type -------------------------------------------------------------------------------- function IsLimitedPrivateType (TheSymbol : Symbol) return Boolean is function Is_Limited_Private_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then RawDict.Get_Type_Limited_Private (Type_Mark => Type_Mark); end Is_Limited_Private_Type; begin -- IsLimitedPrivateType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Limited_Private_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External end IsLimitedPrivateType; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function TypeIsAbstractProof (TypeMark : Symbol) return Boolean is begin return RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)) = -- GAA External Abstract_Proof_Type_Item; end TypeIsAbstractProof; -------------------------------------------------------------------------------- function Is_Proof_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return not Is_Subtype (Type_Mark => Type_Mark) and then (Get_Type_Context (Type_Mark => Type_Mark) = ProofContext or else RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Abstract_Proof_Type_Item); end Is_Proof_Type; -------------------------------------------------------------------------------- -- Type_Is_Own_Abstract_Here -------------------------------------------------------------------------------- -- This function determines whether the declaration of TypeMark is -- not visible from the current viewpoint (Scope) i.e. "abstract here". -- In the case of TypeMark being overloaded to mean both a concrete -- type and a type that has been "announced" in an own variable announcement -- the function determines which is the right view of TypeMark in the -- given scope (abstract or concrete). function TypeIsOwnAbstractHere (TypeMark : Symbol; Scope : Scopes) return Boolean is function Type_Is_Own_Abstract_Here (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is DeclScope : Scopes; DeclarationVisibleHere, Result : Boolean; begin DeclScope := Get_Type_Scope (Type_Mark => Type_Mark); DeclarationVisibleHere := IsLocal (Scope, DeclScope) or else (RawDict.GetSymbolDiscriminant (GetRegion (Get_Type_Scope (Type_Mark => Type_Mark))) = Package_Symbol and then Is_Private_Seeing_Descendent (Scope => Scope, The_Package => RawDict.Get_Package_Info_Ref (GetRegion (DeclScope)))) or else (not Type_Is_Private (Type_Mark => Type_Mark) and then Get_Visibility (Scope => Get_Type_Scope (Type_Mark => Type_Mark)) = Visible); Result := RawDict.Get_Type_Is_Own_Var_Type (Type_Mark => Type_Mark) and then (Is_Proof_Type (Type_Mark => Type_Mark) or else not DeclarationVisibleHere); return Result; end Type_Is_Own_Abstract_Here; begin -- TypeIsOwnAbstractHere return Type_Is_Own_Abstract_Here (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Scope => Scope); end TypeIsOwnAbstractHere; -------------------------------------------------------------------------------- -- Is_Unconstrained_Array_Type -------------------------------------------------------------------------------- function Is_Unconstrained_Array_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Type_Is_Array (Type_Mark => Type_Mark) and then not RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); end Is_Unconstrained_Array_Type; -------------------------------------------------------------------------------- function IsUnconstrainedArrayType (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Unconstrained_Array_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External end IsUnconstrainedArrayType; -------------------------------------------------------------------------------- -- Is_Unconstrained_Task_Type -------------------------------------------------------------------------------- function IsUnconstrainedTaskType (TheSymbol : Symbol) return Boolean is function Is_Unconstrained_Task_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Is_Task_Type (Type_Mark => Type_Mark) and then not RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); end Is_Unconstrained_Task_Type; begin -- IsUnconstrainedTaskType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Unconstrained_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External end IsUnconstrainedTaskType; -------------------------------------------------------------------------------- -- Is_Unconstrained_Protected_Type -------------------------------------------------------------------------------- function IsUnconstrainedProtectedType (TheSymbol : Symbol) return Boolean is function Is_Unconstrained_Protected_Type (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Is_Protected_Type (Type_Mark => Type_Mark) and then not RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); end Is_Unconstrained_Protected_Type; begin -- IsUnconstrainedProtectedType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Unconstrained_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol)); -- GAA External end IsUnconstrainedProtectedType; -------------------------------------------------------------------------------- -- Is_Array_Attribute -------------------------------------------------------------------------------- function IsArrayAttribute (Name : LexTokenManager.Lex_String; TypeMark : Symbol) return Boolean is function Is_Array_Attribute (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Array (Type_Mark => Type_Mark); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Component_Size_Token) = LexTokenManager.Str_Eq then Result := True; else Result := False; end if; return Result; end Is_Array_Attribute; begin -- IsArrayAttribute return Is_Array_Attribute (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end IsArrayAttribute; -------------------------------------------------------------------------------- -- Own_Variable_Has_Type -------------------------------------------------------------------------------- -- Note that this function returns false if the type of the own variable is -- an abstract proof type. Note also that it may return true (and does) -- even if the variable is not declared (Is_Declared). The function checks -- for visibility of type declaration from the current scope, but does not -- take into account whether the declaration itself (which may not exist) -- is visible from the given scope or whether the viewpoint is a child -- package seeing private declarations. See also GetOwnVariableTypeHere and -- TypeIsOwnAbstractHere. function OwnVariableHasType (OwnVariable : Symbol; Scope : Scopes) return Boolean is function Own_Variable_Has_Type (Own_Variable : RawDict.Variable_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is Type_Mark : RawDict.Type_Info_Ref; Result : Boolean; begin Type_Mark := RawDict.Get_Variable_Type (The_Variable => Own_Variable); if Type_Mark = Get_Unknown_Type_Mark then Result := False; elsif RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Abstract_Proof_Type_Item then Result := False; elsif RawDict.Get_Own_Variable_Typed (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => Own_Variable)) then Result := True; else Result := Get_Visibility (Scope => Get_Type_Scope (Type_Mark => Type_Mark)) = Visible or else IsLocal (Scope, Get_Type_Scope (Type_Mark => Type_Mark)); end if; return Result; end Own_Variable_Has_Type; begin -- OwnVariableHasType return Own_Variable_Has_Type (Own_Variable => RawDict.Get_Variable_Info_Ref (OwnVariable), -- GAA External Scope => Scope); end OwnVariableHasType; -------------------------------------------------------------------------------- -- Own_Variable_Is_Announced -------------------------------------------------------------------------------- function OwnVariableIsAnnounced (Variable : Symbol) return Boolean is function Own_Variable_Is_Announced (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Own_Variable_Announced (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Own_Variable_Is_Announced; begin -- OwnVariableIsAnnounced return Own_Variable_Is_Announced (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end OwnVariableIsAnnounced; -------------------------------------------------------------------------------- -- Own_Variable_Is_Initialized -------------------------------------------------------------------------------- function Own_Variable_Is_Initialized (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Own_Variable_Initialized (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Own_Variable_Is_Initialized; -------------------------------------------------------------------------------- function OwnVariableIsInitialized (Variable : Symbol) return Boolean is begin return Own_Variable_Is_Initialized (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end OwnVariableIsInitialized; -------------------------------------------------------------------------------- -- Own_Variable_Has_Constituents -------------------------------------------------------------------------------- function Own_Variable_Has_Constituents (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Own_Variable_Constituents (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)) /= RawDict.Null_Constituent_Info_Ref; end Own_Variable_Has_Constituents; -------------------------------------------------------------------------------- function OwnVariableHasConstituents (Variable : Symbol) return Boolean is begin return Own_Variable_Has_Constituents (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end OwnVariableHasConstituents; -------------------------------------------------------------------------------- -- First_Constituent -------------------------------------------------------------------------------- function First_Constituent (The_Variable : RawDict.Variable_Info_Ref) return Iterator --# global in Dict; is The_Constituent : RawDict.Constituent_Info_Ref; Constituents : Iterator := NullIterator; begin The_Constituent := RawDict.Get_Own_Variable_Constituents (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); if The_Constituent /= RawDict.Null_Constituent_Info_Ref then Constituents := Iterator' (ConstituentIterator, IsAbstract, RawDict.Get_Constituent_Symbol (The_Constituent), NullSymbol); end if; return Constituents; end First_Constituent; -------------------------------------------------------------------------------- function FirstConstituent (Subject : Symbol) return Iterator is begin return First_Constituent (The_Variable => RawDict.Get_Variable_Info_Ref (Subject)); -- GAA External end FirstConstituent; -------------------------------------------------------------------------------- -- Get_Main_Program -------------------------------------------------------------------------------- function Get_Main_Program return RawDict.Subprogram_Info_Ref --# global in Dict; is begin return Dict.Main.Subprogram; end Get_Main_Program; -------------------------------------------------------------------------------- function GetMainProgram return Symbol is begin return RawDict.Get_Subprogram_Symbol (Get_Main_Program); -- GAA External end GetMainProgram; -------------------------------------------------------------------------------- -- Is_Main_Program -------------------------------------------------------------------------------- -- special handling that affects the main subprogram also affects the "subprogram" -- that handles partition-wide stuff so this function returns true for either function Is_Main_Program (The_Subprogram : RawDict.Subprogram_Info_Ref) return Boolean --# global in Dict; is begin return The_Subprogram = Get_Main_Program or else The_Subprogram = Get_The_Partition; end Is_Main_Program; -------------------------------------------------------------------------------- function IsMainProgram (Subprogram : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (Subprogram) = Subprogram_Symbol and then Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram)); -- GAA External end IsMainProgram; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -- this can be used to distinguish the main program symbol from the partition sym. function IsThePartition (Subprogram : Symbol) return Boolean is begin return RawDict.Get_Subprogram_Info_Ref (Subprogram) = Get_The_Partition; -- GAA External end IsThePartition; -------------------------------------------------------------------------------- function FirstInheritsClause (Sym : Symbol) return Iterator is Inherit_Clause : RawDict.Context_Clause_Info_Ref; Inherit_Clauses : Iterator := NullIterator; begin case RawDict.GetSymbolDiscriminant (Sym) is when Package_Symbol => Inherit_Clause := RawDict.Get_Package_Inherit_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Sym)); -- GAA External when Subprogram_Symbol => if Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)) then -- GAA External Inherit_Clause := RawDict.Get_Subprogram_Inherit_Clauses (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)); -- GAA External else Inherit_Clause := RawDict.Null_Context_Clause_Info_Ref; end if; when others => -- non-exec code Inherit_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstInheritsClause"); end case; if Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref then case RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => Inherit_Clause) is when False => Inherit_Clauses := Iterator' (InheritedPackageIterator, IsAbstract, RawDict.Get_Package_Symbol (RawDict.Get_Context_Clause_Package (The_Context_Clause => Inherit_Clause)), RawDict.Get_Context_Clause_Symbol (Inherit_Clause)); when True => Inherit_Clauses := Iterator' (InheritedPackageIterator, IsAbstract, RawDict.Get_Subprogram_Symbol (RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => Inherit_Clause)), RawDict.Get_Context_Clause_Symbol (Inherit_Clause)); end case; end if; return Inherit_Clauses; end FirstInheritsClause; -------------------------------------------------------------------------------- -- First_Local_Variable -------------------------------------------------------------------------------- function First_Local_Variable (Subprogram : Symbol) return Iterator is Variables : Iterator; -------------------------------------------------------------------------------- function First_Local_Variable_Local (The_Declaration : RawDict.Declaration_Info_Ref) return Iterator --# global in Dict; is Tmp_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Variables : Iterator := NullIterator; begin Tmp_Declaration := The_Declaration; while Tmp_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => Tmp_Declaration); if Is_Variable (Item) then Variables := Iterator' (LocalVariableIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (Tmp_Declaration)); Tmp_Declaration := RawDict.Null_Declaration_Info_Ref; else Tmp_Declaration := RawDict.Get_Next_Declaration (The_Declaration => Tmp_Declaration); end if; end loop; return Variables; end First_Local_Variable_Local; begin -- First_Local_Variable case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => Variables := First_Local_Variable_Local (The_Declaration => RawDict.Get_Subprogram_First_Declaration (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram))); -- GAA External when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)) is -- GAA External when Task_Type_Item => Variables := First_Local_Variable_Local (The_Declaration => RawDict.Get_Task_Type_First_Local_Declaration (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram))); -- GAA External when Protected_Type_Item => Variables := NullIterator; when others => -- non-exec code Variables := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Local_Variable"); end case; when others => -- non-exec code Variables := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Local_Variable"); end case; return Variables; end First_Local_Variable; -------------------------------------------------------------------------------- -- First_Initialized_Variable -------------------------------------------------------------------------------- function First_Initialized_Variable (Subprogram : Symbol) return Iterator is Variables : Iterator; -------------------------------------------------------------------------------- function First_Initialized_Variable_Local (The_Declaration : RawDict.Declaration_Info_Ref) return Iterator --# global in Dict; is Tmp_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Variables : Iterator := NullIterator; begin Tmp_Declaration := The_Declaration; while Tmp_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => Tmp_Declaration); if Is_Variable (Item) and then RawDict.Get_Variable_Initialized (The_Variable => RawDict.Get_Variable_Info_Ref (Item)) then Variables := Iterator' (InitializedVariableIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (Tmp_Declaration)); Tmp_Declaration := RawDict.Null_Declaration_Info_Ref; else Tmp_Declaration := RawDict.Get_Next_Declaration (The_Declaration => Tmp_Declaration); end if; end loop; return Variables; end First_Initialized_Variable_Local; begin -- First_Initialized_Variable case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => Variables := First_Initialized_Variable_Local (The_Declaration => RawDict.Get_Subprogram_First_Declaration (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram))); -- GAA External when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)) is -- GAA External when Task_Type_Item => Variables := First_Initialized_Variable_Local (The_Declaration => RawDict.Get_Task_Type_First_Local_Declaration (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram))); -- GAA External when Protected_Type_Item => Variables := NullIterator; when others => -- non-exec code Variables := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Initialized_Variable"); end case; when others => -- non-exec code Variables := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Initialized_Variable"); end case; return Variables; end First_Initialized_Variable; -------------------------------------------------------------------------------- function IsNullIterator (Current : Iterator) return Boolean is begin return Current.Discriminant = NullSymIterator; end IsNullIterator; -------------------------------------------------------------------------------- function GetNumberOfGlobalVariables (Abstraction : Abstractions; Subprogram : Symbol) return Natural is GlobalVariable : Iterator; Count : Natural := Natural'First; begin GlobalVariable := FirstGlobalVariable (Abstraction, Subprogram); while not IsNullIterator (GlobalVariable) and then Count < Natural'Last loop GlobalVariable := NextSymbol (GlobalVariable); Count := Count + 1; end loop; return Count; end GetNumberOfGlobalVariables; -------------------------------------------------------------------------------- -- Get_Array_Index -------------------------------------------------------------------------------- function Get_Array_Index (Type_Mark : RawDict.Type_Info_Ref; Dimension : Positive) return RawDict.Type_Info_Ref --# global in Dict; is Array_Index : Iterator; begin Array_Index := First_Array_Index (Type_Mark => Type_Mark); for Dim in Positive range 1 .. Dimension - 1 loop Array_Index := NextSymbol (Array_Index); end loop; return RawDict.Get_Type_Info_Ref (CurrentSymbol (Array_Index)); end Get_Array_Index; -------------------------------------------------------------------------------- function GetArrayIndex (TypeMark : Symbol; Dimension : Positive) return Symbol is begin return RawDict.Get_Type_Symbol -- GAA External (Get_Array_Index (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Dimension => Dimension)); end GetArrayIndex; -------------------------------------------------------------------------------- -- Get_Simple_Name -------------------------------------------------------------------------------- function Get_Subcomponent_Simple_Name (The_Subcomponent : RawDict.Subcomponent_Info_Ref) return LexTokenManager.Lex_String --# global Dict; is begin return RawDict.Get_Record_Component_Name (The_Record_Component => RawDict.Get_Subcomponent_Record_Component (The_Subcomponent => The_Subcomponent)); end Get_Subcomponent_Simple_Name; -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Simple_Name (The_Parameter_Constraint : RawDict.Parameter_Constraint_Info_Ref) return LexTokenManager.Lex_String --# global Dict; is begin return RawDict.Get_Subprogram_Parameter_Name (The_Subprogram_Parameter => RawDict.Get_Parameter_Constraint_Subprogram_Parameter (The_Parameter_Constraint => The_Parameter_Constraint)); end Get_Parameter_Constraint_Simple_Name; -------------------------------------------------------------------------------- function GetSimpleName (Item : Symbol) return LexTokenManager.Lex_String is Name : LexTokenManager.Lex_String; Item_Local : Symbol; -------------------------------------------------------------------------------- function Get_Generic_Parameter_Simple_Name (The_Generic_Parameter : RawDict.Generic_Parameter_Info_Ref) return LexTokenManager.Lex_String --# global Dict; is Result : LexTokenManager.Lex_String; begin -- A generic parameter might be a type or an object. The Kind field tells us which. The name -- lives in either the type or the object pointed at. case RawDict.Get_Generic_Parameter_Kind (The_Generic_Parameter => The_Generic_Parameter) is when Generic_Type_Parameter => Result := RawDict.Get_Type_Name (Type_Mark => RawDict.Get_Generic_Parameter_Type (The_Generic_Parameter => The_Generic_Parameter)); when Generic_Object_Parameter => Result := RawDict.Get_Constant_Name (The_Constant => RawDict.Get_Generic_Parameter_Object (The_Generic_Parameter => The_Generic_Parameter)); end case; return Result; end Get_Generic_Parameter_Simple_Name; -------------------------------------------------------------------------------- function Get_Constituent_Simple_Name (The_Constituent : RawDict.Constituent_Info_Ref) return LexTokenManager.Lex_String --# global Dict; is begin return RawDict.Get_Variable_Name (The_Variable => RawDict.Get_Constituent_Variable (The_Constituent => The_Constituent)); end Get_Constituent_Simple_Name; -------------------------------------------------------------------------------- function Get_Generic_Unit_Simple_Name (The_Generic_Unit : RawDict.Generic_Unit_Info_Ref) return LexTokenManager.Lex_String --# global Dict; is Name : LexTokenManager.Lex_String; begin case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => Name := RawDict.Get_Package_Name (The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => Name := RawDict.Get_Subprogram_Name (The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; return Name; end Get_Generic_Unit_Simple_Name; begin -- GetSimpleName Item_Local := Item; -- A special symbol kind is used to store the value of variables on entry to a for -- loop. We "dereference" these back to the original variable before attempting -- to retrieve the name if RawDict.GetSymbolDiscriminant (Item_Local) = LoopEntryVariableSymbol then Item_Local := RawDict.GetLoopEntryVariableOriginalVar (Item); end if; case RawDict.GetSymbolDiscriminant (Item_Local) is when Null_Symbol => Name := LexTokenManager.Null_String; when Type_Symbol => Name := RawDict.Get_Type_Name (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item_Local)); when Enumeration_Literal_Symbol => Name := RawDict.Get_Enumeration_Literal_Name (The_Enumeration_Literal => RawDict.Get_Enumeration_Literal_Info_Ref (Item => Item_Local)); when Record_Component_Symbol => Name := RawDict.Get_Record_Component_Name (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (Item => Item_Local)); when Subcomponent_Symbol => Name := Get_Subcomponent_Simple_Name (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Item_Local)); when Variable_Symbol => Name := RawDict.Get_Variable_Name (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Item_Local)); when Quantified_Variable_Symbol => Name := RawDict.Get_Quantified_Variable_Name (The_Quantified_Variable => RawDict.Get_Quantified_Variable_Info_Ref (Item => Item_Local)); when Constant_Symbol => Name := RawDict.Get_Constant_Name (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Item_Local)); when Package_Symbol => Name := RawDict.Get_Package_Name (The_Package => RawDict.Get_Package_Info_Ref (Item => Item_Local)); when Subprogram_Symbol => Name := RawDict.Get_Subprogram_Name (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item_Local)); when ImplicitProofFunctionSymbol => Name := RawDict.Get_Subprogram_Name (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Item_Local)); when Implicit_Return_Variable_Symbol => Name := RawDict.Get_Implicit_Return_Variable_Name (The_Implicit_Return_Variable => RawDict.Get_Implicit_Return_Variable_Info_Ref (Item => Item_Local)); when Subprogram_Parameter_Symbol => Name := RawDict.Get_Subprogram_Parameter_Name (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Item_Local)); when Parameter_Constraint_Symbol => -- Parameter constraint symbols don't have name of their own. We return the name of the parameter -- with which they are associated. Dictionary subunit GenerateSimpleName creates a synthetic name -- for teh constraint using the parameter name as a prefix Name := Get_Parameter_Constraint_Simple_Name (The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (Item => Item_Local)); when LoopSymbol => Name := RawDict.GetLoopName (Item_Local); when KnownDiscriminantSymbol => Name := RawDict.GetDiscriminantName (Item_Local); when LoopParameterSymbol => Name := RawDict.GetLoopParameterName (Item_Local); when Generic_Parameter_Symbol => Name := Get_Generic_Parameter_Simple_Name (The_Generic_Parameter => RawDict.Get_Generic_Parameter_Info_Ref (Item => Item_Local)); --1606 Item below added by JEB as part of generic package work when Constituent_Symbol => Name := Get_Constituent_Simple_Name (The_Constituent => RawDict.Get_Constituent_Info_Ref (Item => Item_Local)); when Generic_Unit_Symbol => Name := Get_Generic_Unit_Simple_Name (The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Item => Item_Local)); when others => -- non-exec code Name := LexTokenManager.Null_String; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetSimpleName"); end case; return Name; end GetSimpleName; -------------------------------------------------------------------------------- -- Get_Scalar_Attribute_Type -------------------------------------------------------------------------------- function Get_Scalar_Attribute_Type (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; --# in LexTokenManager.State; is separate; -------------------------------------------------------------------------------- function GetScalarAttributeType (Name : LexTokenManager.Lex_String; TypeMark : Symbol) return Symbol is begin return RawDict.Get_Type_Symbol -- GAA External (Get_Scalar_Attribute_Type (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark))); -- GAA External end GetScalarAttributeType; -------------------------------------------------------------------------------- -- Get_Scalar_Attribute_Value -------------------------------------------------------------------------------- function Get_Scalar_Attribute_Value (Base : Boolean; Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return LexTokenManager.Lex_String --# global in Dict; --# in LexTokenManager.State; is separate; -------------------------------------------------------------------------------- function GetScalarAttributeValue (Base : Boolean; Name : LexTokenManager.Lex_String; TypeMark : Symbol) return LexTokenManager.Lex_String is begin return Get_Scalar_Attribute_Value (Base => Base, Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end GetScalarAttributeValue; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function ContainsFloat (TypeMark : Symbol) return Boolean is begin return RawDict.Get_Type_Contains_Float (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end ContainsFloat; -------------------------------------------------------------------------------- -- Type_Is_Limited -------------------------------------------------------------------------------- function Type_Is_Limited (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is Result : Boolean; begin case RawDict.Get_Type_Limited (Type_Mark => Type_Mark) is when Never => Result := False; when Sometimes => Result := not IsLocal (Scope, Get_Type_Scope (Type_Mark => Type_Mark)); when Always => Result := True; end case; return Result; end Type_Is_Limited; -------------------------------------------------------------------------------- function TypeIsLimited (TypeMark : Symbol; Scope : Scopes) return Boolean is begin return Type_Is_Limited (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Scope => Scope); end TypeIsLimited; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Add_Integer_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Static : in Boolean; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String) --# global in out Dict; --# derives Dict from *, --# Lower, --# Static, --# Type_Mark, --# Upper; is begin RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Integer_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => Static); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Lower); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Upper); end Add_Integer_Type_Mark; -------------------------------------------------------------------------------- procedure Add_Modular_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Static : in Boolean; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Modulus : in LexTokenManager.Lex_String) --# global in out Dict; --# derives Dict from *, --# Lower, --# Modulus, --# Static, --# Type_Mark, --# Upper; is begin RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Modular_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => Static); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Lower); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Upper); RawDict.Set_Type_Modulus (Type_Mark => Type_Mark, Modulus => Modulus); end Add_Modular_Type_Mark; -------------------------------------------------------------------------------- procedure Add_Floating_Point_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Static : in Boolean) --# global in out Dict; --# derives Dict from *, --# Static, --# Type_Mark; is begin RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Floating_Point_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => Static); RawDict.Set_Type_Contains_Float (Type_Mark => Type_Mark, Contains_Float => True); end Add_Floating_Point_Type_Mark; -------------------------------------------------------------------------------- procedure Add_Fixed_Point_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Static : in Boolean) --# global in out Dict; --# derives Dict from *, --# Static, --# Type_Mark; is begin RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Fixed_Point_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => Static); end Add_Fixed_Point_Type_Mark; -------------------------------------------------------------------------------- procedure Add_Array_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Component_Type : in RawDict.Type_Info_Ref; Static : in Boolean) --# global in out Dict; --# derives Dict from *, --# Component_Type, --# Static, --# Type_Mark; is Is_Private, Is_Limited : TriState; begin case RawDict.Get_Type_Private (Type_Mark => Component_Type) is when Never => Is_Private := Never; when Sometimes => if IsLocal (Get_Type_Scope (Type_Mark => Type_Mark), Get_Type_Scope (Type_Mark => Component_Type)) then Is_Private := Sometimes; else Is_Private := Always; end if; when Always => Is_Private := Always; end case; case RawDict.Get_Type_Limited (Type_Mark => Component_Type) is when Never => Is_Limited := Never; when Sometimes => if IsLocal (Get_Type_Scope (Type_Mark => Type_Mark), Get_Type_Scope (Type_Mark => Component_Type)) then Is_Limited := Sometimes; else Is_Limited := Always; end if; when Always => Is_Limited := Always; end case; RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Array_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => Static); RawDict.Set_Type_Private (Type_Mark => Type_Mark, Is_Private => Is_Private); RawDict.Set_Type_Limited (Type_Mark => Type_Mark, Is_Limited => Is_Limited); RawDict.Set_Type_Equality_Defined (Type_Mark => Type_Mark, Equality_Defined => RawDict.Get_Type_Equality_Defined (Type_Mark => Component_Type)); RawDict.Set_Type_Contains_Float (Type_Mark => Type_Mark, Contains_Float => RawDict.Get_Type_Contains_Float (Type_Mark => Component_Type)); RawDict.Set_Type_Array_Component (Type_Mark => Type_Mark, Component_Type => Component_Type); end Add_Array_Type_Mark; -------------------------------------------------------------------------------- procedure Add_Enumeration_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Static : in Boolean) --# global in out Dict; --# derives Dict from *, --# Static, --# Type_Mark; is begin RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Enumeration_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => Static); end Add_Enumeration_Type_Mark; -------------------------------------------------------------------------------- -- Add_Enumeration_Literal -------------------------------------------------------------------------------- procedure Add_Enumeration_Literal (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Position : in LexTokenManager.Lex_String; Type_Mark : in RawDict.Type_Info_Ref; The_Enumeration_Literal : out RawDict.Enumeration_Literal_Info_Ref) --# global in out Dict; --# derives Dict, --# The_Enumeration_Literal from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Position, --# Type_Mark; --# post The_Enumeration_Literal /= RawDict.Null_Enumeration_Literal_Info_Ref; is Current : RawDict.Enumeration_Literal_Info_Ref; begin RawDict.Create_Enumeration_Literal (Name => Name, Position => Position, Enumeration_Type => Type_Mark, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Enumeration_Literal => The_Enumeration_Literal); Current := RawDict.Get_Type_Last_Enumeration_Literal (Type_Mark => Type_Mark); if Current = RawDict.Null_Enumeration_Literal_Info_Ref then RawDict.Set_Type_First_Enumeration_Literal (Type_Mark => Type_Mark, Enumeration_Literal => The_Enumeration_Literal); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Position); else RawDict.Set_Next_Enumeration_Literal (The_Enumeration_Literal => Current, Next => The_Enumeration_Literal); end if; RawDict.Set_Type_Last_Enumeration_Literal (Type_Mark => Type_Mark, Enumeration_Literal => The_Enumeration_Literal); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Position); end Add_Enumeration_Literal; -------------------------------------------------------------------------------- procedure AddEnumerationLiteral (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Position : in LexTokenManager.Lex_String; The_Type : in Symbol; TheEnumerationLiteral : out Symbol) is The_Enumeration_Literal : RawDict.Enumeration_Literal_Info_Ref; begin Add_Enumeration_Literal (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Position => Position, Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External The_Enumeration_Literal => The_Enumeration_Literal); TheEnumerationLiteral := RawDict.Get_Enumeration_Literal_Symbol (The_Enumeration_Literal); -- GAA External end AddEnumerationLiteral; -------------------------------------------------------------------------------- -- Get_Predefined_Positive_Subtype -------------------------------------------------------------------------------- function Get_Predefined_Positive_Subtype return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Positive_Subtype); end Get_Predefined_Positive_Subtype; -------------------------------------------------------------------------------- function GetPredefinedPositiveSubtype return Symbol is begin return RawDict.Get_Type_Symbol (Get_Predefined_Positive_Subtype); -- GAA External end GetPredefinedPositiveSubtype; -------------------------------------------------------------------------------- -- Get_Predefined_Package_ASCII -------------------------------------------------------------------------------- function Get_Predefined_Package_ASCII return RawDict.Package_Info_Ref --# global in Dict; is begin return Dict.Packages.Package_ASCII; end Get_Predefined_Package_ASCII; -------------------------------------------------------------------------------- function GetPredefinedPackageASCII return Symbol is begin return RawDict.Get_Package_Symbol (Get_Predefined_Package_ASCII); -- GAA External end GetPredefinedPackageASCII; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function TypeIsRecord (TypeMark : Symbol) return Boolean is begin return RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)) = Record_Type_Item; -- GAA External end TypeIsRecord; -------------------------------------------------------------------------------- -- Get_Array_Component -------------------------------------------------------------------------------- function Get_Array_Component (Type_Mark : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is Result : RawDict.Type_Info_Ref; begin if Type_Mark = Get_Unknown_Type_Mark then Result := Type_Mark; else Result := RawDict.Get_Type_Array_Component (Type_Mark => Get_Root_Type (Type_Mark => Type_Mark)); end if; return Result; end Get_Array_Component; -------------------------------------------------------------------------------- function GetArrayComponent (TypeMark : Symbol) return Symbol is begin return RawDict.Get_Type_Symbol (Get_Array_Component (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark))); -- GAA External end GetArrayComponent; -------------------------------------------------------------------------------- -- Get_Number_Of_Dimensions -------------------------------------------------------------------------------- function Get_Number_Of_Dimensions (Type_Mark : RawDict.Type_Info_Ref) return Positive --# global in Dict; is Array_Index : Iterator; Count : Positive := Positive'First; begin -- If Type_Mark denotes a subtype, then find its root type Array_Index := First_Array_Index (Type_Mark => Get_Root_Type (Type_Mark => Type_Mark)); loop Array_Index := NextSymbol (Array_Index); exit when IsNullIterator (Array_Index) or else Count = Positive'Last; Count := Count + 1; end loop; return Count; end Get_Number_Of_Dimensions; -------------------------------------------------------------------------------- function GetNumberOfDimensions (TypeMark : Symbol) return Positive is begin return Get_Number_Of_Dimensions (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end GetNumberOfDimensions; -------------------------------------------------------------------------------- -- Type_Is_Boolean -------------------------------------------------------------------------------- function Type_Is_Boolean (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Get_Root_Type (Type_Mark => Type_Mark) = Get_Predefined_Boolean_Type; end Type_Is_Boolean; -------------------------------------------------------------------------------- function TypeIsBoolean (TypeMark : Symbol) return Boolean is begin return Type_Is_Boolean (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsBoolean; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Type_Is_Boolean_Array (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Type_Is_Array (Type_Mark => Type_Mark) and then Type_Is_Boolean (Type_Mark => Get_Array_Component (Type_Mark => Type_Mark)) and then Get_Number_Of_Dimensions (Type_Mark => Type_Mark) = 1; end Type_Is_Boolean_Array; -------------------------------------------------------------------------------- procedure Write_Space (File : in SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File; is begin SPARK_IO.Put_Char (File, ' '); end Write_Space; -------------------------------------------------------------------------------- procedure Write_String (File : in SPARK_IO.File_Type; Str : in String) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Str; is begin SPARK_IO.Put_String (File, Str, 0); end Write_String; -------------------------------------------------------------------------------- procedure Write_Line (File : in SPARK_IO.File_Type; Str : in String) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Str; is begin SPARK_IO.Put_Line (File, Str, 0); end Write_Line; -------------------------------------------------------------------------------- function LoopHasName (TheLoop : Symbol) return Boolean is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => GetSimpleName (TheLoop), Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq; end LoopHasName; -------------------------------------------------------------------------------- function First_Loop (CompilationUnit : Symbol) return Iterator --# global in Dict; is The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; TheLoop : Symbol; Loops : Iterator; begin case RawDict.GetSymbolDiscriminant (CompilationUnit) is when Subprogram_Symbol => TheLoop := RawDict.Get_Subprogram_First_Loop (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit)); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Loop"); TheLoop := RawDict.Get_Task_Type_First_Loop (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)); when Package_Symbol => TheLoop := RawDict.Get_Package_First_Loop (The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit)); when Generic_Unit_Symbol => The_Generic_Unit := RawDict.Get_Generic_Unit_Info_Ref (Item => CompilationUnit); case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => TheLoop := RawDict.Get_Package_First_Loop (The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => TheLoop := RawDict.Get_Subprogram_First_Loop (The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; when others => -- non-exec code TheLoop := NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Loop"); end case; if TheLoop = NullSymbol then Loops := NullIterator; else Loops := Iterator'(LoopIterator, IsAbstract, TheLoop, NullSymbol); end if; return Loops; end First_Loop; -------------------------------------------------------------------------------- function FirstLoopOnEntryVar (TheLoop : Symbol) return Iterator is OnEntryVars : Iterator; TheVar : Symbol; begin TheVar := RawDict.GetLoopOnEntryVars (TheLoop); if TheVar = NullSymbol then OnEntryVars := NullIterator; else OnEntryVars := Iterator'(LoopOnEntryVarIterator, IsAbstract, TheVar, NullSymbol); end if; return OnEntryVars; end FirstLoopOnEntryVar; -------------------------------------------------------------------------------- function IsCompilationUnit (TheSymbol : Symbol) return Boolean is begin return Is_Subprogram (TheSymbol) or else RawDict.GetSymbolDiscriminant (TheSymbol) = Package_Symbol or else RawDict.GetSymbolDiscriminant (TheSymbol) = Generic_Unit_Symbol; end IsCompilationUnit; -------------------------------------------------------------------------------- procedure AddUsesUncheckedConversion (TheUnit : in Symbol) is begin case RawDict.GetSymbolDiscriminant (TheUnit) is when Subprogram_Symbol => -- covers procedures, functions and entries RawDict.Set_Subprogram_Uses_Unchecked_Conversion (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheUnit)); -- GAA External when Type_Symbol => -- covers task bodies SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheUnit)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in AddUsesUncheckedConversion"); RawDict.Set_Task_Type_Uses_Unchecked_Conversion (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheUnit)); -- GAA External when Package_Symbol => -- a package could only use U_C in elabotration code where it would be illegal null; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddUsesUncheckedConversion"); end case; end AddUsesUncheckedConversion; -------------------------------------------------------------------------------- procedure AddAssignsFromExternal (TheUnit : in Symbol) is begin case RawDict.GetSymbolDiscriminant (TheUnit) is when Subprogram_Symbol => -- covers procedures, functions and entries RawDict.Set_Subprogram_Assigns_From_External (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheUnit)); -- GAA External when Type_Symbol => -- covers task bodies SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheUnit)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in AddAssignsFromExternal"); RawDict.Set_Task_Type_Assigns_From_External (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheUnit)); -- GAA External when Package_Symbol => -- a package could only read external variable in elabotration code -- where it would be illegal null; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddAssignsFromExternal"); end case; end AddAssignsFromExternal; -------------------------------------------------------------------------------- function LastMostEnclosingLoop (CompilationUnit : Symbol) return Symbol is Sym : Symbol; TheResult : Symbol; begin case RawDict.GetSymbolDiscriminant (CompilationUnit) is when Subprogram_Symbol => Sym := RawDict.Get_Subprogram_Last_Loop (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit)); -- GAA External when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in LastMostEnclosingLoop"); Sym := RawDict.Get_Task_Type_Last_Loop (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)); -- GAA External when Package_Symbol => Sym := RawDict.Get_Package_Last_Loop (RawDict.Get_Package_Info_Ref (Item => CompilationUnit)); -- GAA External when others => -- non-exec code Sym := NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.LastMostEnclosingLoop"); end case; TheResult := Sym; while not IsCompilationUnit (Sym) loop Sym := GetRegion (GetScope (Sym)); if RawDict.GetSymbolDiscriminant (Sym) = LoopSymbol then TheResult := Sym; end if; end loop; return TheResult; end LastMostEnclosingLoop; -------------------------------------------------------------------------------- function GetEnclosingCompilationUnit (Scope : Scopes) return Symbol is Current : Scopes; Region : Symbol; begin Current := Scope; loop Region := GetRegion (Current); exit when IsCompilationUnit (Region) or else (RawDict.GetSymbolDiscriminant (Region) = Type_Symbol -- to deal with task bodies and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region))); Current := GetEnclosingScope (Current); end loop; return Region; end GetEnclosingCompilationUnit; -------------------------------------------------------------------------------- function GetLoopNumber (TheLoop : Symbol) return Positive is Loops : Iterator; Number : Positive := Positive'First; begin Loops := First_Loop (CompilationUnit => GetEnclosingCompilationUnit (Set_Visibility (The_Visibility => Local, The_Unit => TheLoop))); while CurrentSymbol (Loops) /= TheLoop and then Number < Positive'Last loop Loops := NextSymbol (Loops); Number := Number + 1; end loop; return Number; end GetLoopNumber; -------------------------------------------------------------------------------- function RecordComponentIsInherited (TheComponent : Symbol) return Boolean is begin return RawDict.Get_Record_Component_Inherited_Field (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (TheComponent)); -- GAA External end RecordComponentIsInherited; -------------------------------------------------------------------------------- function Get_Own_Variable_Of_Protected_Implicit_In_Stream (The_Implicit_In_Stream : RawDict.Implicit_In_Stream_Info_Ref) return RawDict.Variable_Info_Ref --# global in Dict; is begin return RawDict.Get_Own_Variable_Variable (The_Own_Variable => RawDict.Get_Implicit_In_Stream_Own_Variable (The_Implicit_In_Stream => The_Implicit_In_Stream)); end Get_Own_Variable_Of_Protected_Implicit_In_Stream; -------------------------------------------------------------------------------- function GetSubprogramParameterConstraintDimension (TheConstraint : Symbol) return Positive is begin return RawDict.Get_Parameter_Constraint_Dimension (The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (TheConstraint)); -- GAA External end GetSubprogramParameterConstraintDimension; -------------------------------------------------------------------------------- -- Generate_Simple_Name -------------------------------------------------------------------------------- function Fetch_Simple_Name (Type_Mark : RawDict.Type_Info_Ref) return E_Strings.T --# global in Dict; --# in LexTokenManager.State; is Type_Mark_Local : RawDict.Type_Info_Ref; begin Type_Mark_Local := Type_Mark; if Is_Type (Type_Mark => Type_Mark_Local) and then RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark_Local) = Access_Type_Item then Type_Mark_Local := RawDict.Get_Type_Accesses (Type_Mark => Type_Mark_Local); end if; return LexTokenManager.Lex_String_To_String (Lex_Str => RawDict.Get_Type_Name (Type_Mark => Type_Mark_Local)); end Fetch_Simple_Name; -------------------------------------------------------------------------------- function GenerateSimpleName (Item : Symbol; Separator : String) return E_Strings.T is separate; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetAnyPrefixNeeded (Sym : Symbol; Scope : Scopes; Separator : String) return E_Strings.T is separate; -------------------------------------------------------------------------------- -- Is_Valid_Generic_Type_Association -------------------------------------------------------------------------------- function IsValidGenericTypeAssociation (Formal, Actual : Symbol; Scope : Scopes) return Boolean is function Is_Valid_Generic_Type_Association (Formal_Type, Actual_Type : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is type Array_Actual_Type is array (Type_Discriminant) of Boolean; type Array_Formal_Type is array (Generic_Type_Discriminant) of Array_Actual_Type; pragma Style_Checks (Off); The_Table : constant Array_Formal_Type := Array_Formal_Type' -- Actual type Unknown, Enumeration, Integer, Modular, Float, Fixed, Array, Record, Abstract, Protected, Task, Access, Generic -- Generic type (Array_Actual_Type'(False, False, False, False, False, False, False, False, False, False, False, False, False), -- Invalid_Generic_Type Array_Actual_Type'( True, True, True, True, True, True, True, True, False, True, True, True, False), -- Generic_Private_Type Array_Actual_Type'(False, True, True, True, False, False, False, False, False, False, False, False, False), -- Generic_Discrete_Type Array_Actual_Type'(False, False, True, False, False, False, False, False, False, False, False, False, False), -- Generic_Integer_Type Array_Actual_Type'(False, False, False, True, False, False, False, False, False, False, False, False, False), -- Generic_Modular_Type Array_Actual_Type'(False, False, False, False, True, False, False, False, False, False, False, False, False), -- Generic_Floating_Point_Type Array_Actual_Type'(False, False, False, False, False, True, False, False, False, False, False, False, False), -- Generic_Fixed_Point_Type Array_Actual_Type'(False, False, False, False, False, False, False, False, False, False, False, False, False)); -- Generic_Array_Type pragma Style_Checks (On); begin return RawDict.Get_Type_Discriminant (Type_Mark => Formal_Type) = Generic_Type_Item and then ((Type_Is_Limited (Type_Mark => Actual_Type, Scope => Scope) and then RawDict.Get_Type_Limited_Private (Type_Mark => Formal_Type)) or else (not Type_Is_Limited (Type_Mark => Actual_Type, Scope => Scope) and then not (Type_Is_Boolean (Type_Mark => Actual_Type) and then RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Formal_Type) = Generic_Discrete_Type) and then not Is_Unconstrained_Array_Type (Type_Mark => Actual_Type) and then The_Table (RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Formal_Type)) (RawDict.Get_Type_Discriminant (Type_Mark => Actual_Type)))); end Is_Valid_Generic_Type_Association; begin -- IsValidGenericTypeAssociation return Is_Valid_Generic_Type_Association (Formal_Type => RawDict.Get_Type_Info_Ref (Formal), -- GAA External Actual_Type => RawDict.Get_Type_Info_Ref (Actual), -- GAA External Scope => Scope); end IsValidGenericTypeAssociation; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Get_Symbol_Compilation_Unit (Item : Symbol) return ContextManager.UnitDescriptors is begin return RawDict.Get_Symbol_Compilation_Unit (Item); end Get_Symbol_Compilation_Unit; -------------------------------------------------------------------------------- function Get_Symbol_Location (Item : Symbol) return LexTokenManager.Token_Position is begin return RawDict.Get_Symbol_Location (Item); end Get_Symbol_Location; -------------------------------------------------------------------------------- procedure Get_SLI_Type (Item : in Symbol; Result : out SLI_Type) is Type_Mark : RawDict.Type_Info_Ref; procedure Get_Type_Discriminant (Type_Mark : in RawDict.Type_Info_Ref; Is_A_Type : in Boolean; Result : out SLI_Type) --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Result from Dict, --# Is_A_Type, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dict, --# Is_A_Type, --# LexTokenManager.State, --# Type_Mark; is begin if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " Type symbol = ", Stop => 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Fetch_Simple_Name (Type_Mark => Type_Mark)); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : ", Stop => 0); end if; case RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) is when Unknown_Type_Item => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Unknown_Type", Stop => 0); end if; Result := SLI_Unknown_Type; when Enumeration_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Enumeration_Type", Stop => 0); end if; Result := SLI_Enumeration_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Enumeration_Object", Stop => 0); end if; Result := SLI_Enumeration_Object; end if; when Integer_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Signed_Integer_Type", Stop => 0); end if; Result := SLI_Signed_Integer_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Signed_Integer_Object", Stop => 0); end if; Result := SLI_Signed_Integer_Object; end if; when Modular_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Modular_Integer_Type", Stop => 0); end if; Result := SLI_Modular_Integer_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Modular_Integer_Object", Stop => 0); end if; Result := SLI_Modular_Integer_Object; end if; when Floating_Point_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Floating_Point_Type", Stop => 0); end if; Result := SLI_Floating_Point_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Floating_Point_Object", Stop => 0); end if; Result := SLI_Floating_Point_Object; end if; when Fixed_Point_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Fixed_Point_Type", Stop => 0); end if; Result := SLI_Fixed_Point_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Fixed_Point_Object", Stop => 0); end if; Result := SLI_Fixed_Point_Object; end if; when Array_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Array_Type", Stop => 0); end if; Result := SLI_Array_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Array_Object", Stop => 0); end if; Result := SLI_Array_Object; end if; when Record_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Record_Type", Stop => 0); end if; Result := SLI_Record_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Record_Object", Stop => 0); end if; Result := SLI_Record_Object; end if; when Abstract_Proof_Type_Item => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Abstract_Type", Stop => 0); end if; Result := SLI_Abstract_Type; when Protected_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Protected_Type", Stop => 0); end if; Result := SLI_Protected_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Protected_Object", Stop => 0); end if; Result := SLI_Protected_Object; end if; when Task_Type_Item => if Is_A_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Task_Type", Stop => 0); end if; Result := SLI_Task_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Task_Object", Stop => 0); end if; Result := SLI_Task_Object; end if; when Generic_Type_Item => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Generic_Formal_Parameter", Stop => 0); end if; Result := SLI_Generic_Formal_Parameter; when others => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "Unknown_Type", Stop => 0); end if; Result := SLI_Unknown_Type; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "DICTIONARY.GET_TYPE_DISCRIMINANT : PROGRAM ERRROR"); end case; end Get_Type_Discriminant; begin -- Get_SLI_Type if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " [DICTIONARY.GET_SLI_TYPE : Symbol = ", Stop => 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => GenerateSimpleName (Item => Item, Separator => ".")); end if; case RawDict.GetSymbolDiscriminant (Item => Item) is when Type_Symbol => Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item), -- GAA External Is_A_Type => True, Result => Result); when Enumeration_Literal_Symbol => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Enumeration_Literal", Stop => 0); end if; Result := SLI_Enumeration_Literal; when Record_Component_Symbol => Get_Type_Discriminant (Type_Mark => RawDict.Get_Record_Component_Type (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (Item => Item)), -- GAA External Is_A_Type => False, Result => Result); when Subcomponent_Symbol => Get_Type_Discriminant (Type_Mark => RawDict.Get_Record_Component_Type (The_Record_Component => RawDict.Get_Subcomponent_Record_Component (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Item))), -- GAA External Is_A_Type => False, Result => Result); when Variable_Symbol => Get_Type_Discriminant (Type_Mark => RawDict.Get_Variable_Type (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Item)), -- GAA External Is_A_Type => False, Result => Result); when Quantified_Variable_Symbol => Get_Type_Discriminant (Type_Mark => RawDict.Get_Quantified_Variable_Type (The_Quantified_Variable => RawDict.Get_Quantified_Variable_Info_Ref (Item => Item)), -- GAA External Is_A_Type => False, Result => Result); when Implicit_Return_Variable_Symbol => Get_Type_Discriminant (Type_Mark => RawDict.Get_Subprogram_Return_Type (The_Subprogram => RawDict.Get_Implicit_Return_Variable_Function (The_Implicit_Return_Variable => RawDict.Get_Implicit_Return_Variable_Info_Ref (Item => Item))), -- GAA External Is_A_Type => False, Result => Result); when Own_Variable_Symbol => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Record_Object", Stop => 0); end if; Result := SLI_Record_Object; when OwnTaskSymbol => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Task_Object", Stop => 0); end if; Result := SLI_Task_Object; when Constant_Symbol => Type_Mark := RawDict.Get_Constant_Type (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Item)); -- GAA External if Type_Mark = Get_Universal_Integer_Type or else Type_Mark = Get_Universal_Real_Type then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Named_Number", Stop => 0); end if; Result := SLI_Named_Number; elsif Type_Mark = Get_Unknown_Type_Mark then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Unknown_Type", Stop => 0); end if; Result := SLI_Unknown_Type; else Get_Type_Discriminant (Type_Mark => Type_Mark, Is_A_Type => False, Result => Result); end if; when Package_Symbol => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Package_Type", Stop => 0); end if; Result := SLI_Package_Type; when Subprogram_Symbol => if RawDict.Get_Subprogram_Return_Type (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)) = -- GAA External RawDict.Null_Type_Info_Ref then if RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)) /= -- GAA External RawDict.Null_Generic_Unit_Info_Ref then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Generic_Procedure_Type", Stop => 0); end if; Result := SLI_Generic_Procedure_Type; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Procedure_Type", Stop => 0); end if; Result := SLI_Procedure_Type; end if; else if RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)) /= -- GAA External RawDict.Null_Generic_Unit_Info_Ref then if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Generic_Function_Op", Stop => 0); end if; Result := SLI_Generic_Function_Op; else if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Function_Op", Stop => 0); end if; Result := SLI_Function_Op; end if; end if; when ImplicitProofFunctionSymbol => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Function_Op", Stop => 0); end if; Result := SLI_Function_Op; when Subprogram_Parameter_Symbol => Get_Type_Discriminant (Type_Mark => RawDict.Get_Subprogram_Parameter_Type (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Item)), -- GAA External Is_A_Type => False, Result => Result); when Operator_Symbol => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Function_Op", Stop => 0); end if; Result := SLI_Function_Op; when others => if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " : Unknown_Type", Stop => 0); end if; Result := SLI_Unknown_Type; end case; if CommandLineData.Content.Debug.SLI then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "] ", Stop => 0); end if; end Get_SLI_Type; -------------------------------------------------------------------------------- procedure Write_Simple_Name (File : in SPARK_IO.File_Type; Item : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# Item, --# LexTokenManager.State; is begin E_Strings.Put_String (File => File, E_Str => GenerateSimpleName (Item => Item, Separator => ".")); end Write_Simple_Name; -------------------------------------------------------------------------------- procedure Write_Name (File : in SPARK_IO.File_Type; Item : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# Item, --# LexTokenManager.State; is Local_Item : Symbol; procedure Write_Prefix (File : in SPARK_IO.File_Type; Item : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# Item, --# LexTokenManager.State; is Prefix : E_Strings.T; Region : Symbol; Scope : Scopes; function Append_Name (Prefix, Suffix : E_Strings.T) return E_Strings.T is Name : E_Strings.T; begin Name := Prefix; E_Strings.Append_String (E_Str => Name, Str => "."); E_Strings.Append_Examiner_String (E_Str1 => Name, E_Str2 => Suffix); return Name; end Append_Name; begin -- Write_Prefix Prefix := E_Strings.Empty_String; if RawDict.GetSymbolDiscriminant (Item) /= Package_Symbol or else RawDict.Get_Package_Info_Ref (Item => Item) /= Get_Predefined_Package_Standard then -- don't try and get a prefix for this! Scope := GetScope (Item); loop Region := GetRegion (Scope); exit when RawDict.GetSymbolDiscriminant (Region) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => Region) = Get_Predefined_Package_Standard; Prefix := Append_Name (Prefix => GenerateSimpleName (Item => Region, Separator => "."), Suffix => Prefix); Scope := GetEnclosingScope (Scope); end loop; end if; E_Strings.Put_String (File => File, E_Str => Prefix); end Write_Prefix; -- Init of Prefix is partial but effective. begin -- Write_Name Local_Item := Item; if RawDict.GetSymbolDiscriminant (Local_Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Local_Item)) and then RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Local_Item)) = Access_Type_Item then SPARK_IO.Put_String (File, "access ", 0); Local_Item := RawDict.Get_Type_Symbol (RawDict.Get_Type_Accesses (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Local_Item))); end if; if RawDict.GetSymbolDiscriminant (Item) /= Type_Symbol or else RawDict.Get_Type_Info_Ref (Item => Item) /= Get_Unknown_Type_Mark then Write_Prefix (File => File, Item => Local_Item); end if; Write_Simple_Name (File => File, Item => Local_Item); end Write_Name; -------------------------------------------------------------------------------- procedure Write_Scope (File : in SPARK_IO.File_Type; Scope : in Scopes) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Scope; is begin if Get_Visibility (Scope => Scope) = Visible then Write_String (File, "specification"); elsif Get_Visibility (Scope => Scope) = Privat then Write_String (File, "private part of"); else Write_String (File, "body"); end if; Write_String (File, " of "); Write_Name (File => File, Item => GetRegion (Scope)); end Write_Scope; -------------------------------------------------------------------------------- procedure Write_Integer (File : in SPARK_IO.File_Type; Int : in Integer) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Int; is begin SPARK_IO.Put_Integer (File, Int, 0, 10); end Write_Integer; -------------------------------------------------------------------------------- -- First_Own_Variable -------------------------------------------------------------------------------- function First_Own_Variable (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is Own_Variable : RawDict.Own_Variable_Info_Ref; Own_Variables : Iterator; begin Own_Variable := RawDict.Get_Package_Own_Variables (The_Package => The_Package); if Own_Variable = RawDict.Null_Own_Variable_Info_Ref then Own_Variables := NullIterator; else Own_Variables := Iterator'(OwnVariableIterator, IsAbstract, RawDict.Get_Own_Variable_Symbol (Own_Variable), NullSymbol); end if; return Own_Variables; end First_Own_Variable; -------------------------------------------------------------------------------- function FirstOwnVariable (ThePackage : Symbol) return Iterator is begin return First_Own_Variable (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end FirstOwnVariable; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function First_Abstract_Own_Variable (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is Own_Variables, Abstract_Own_Variables : Iterator; begin Own_Variables := First_Own_Variable (The_Package => The_Package); loop if IsNullIterator (Own_Variables) then Abstract_Own_Variables := NullIterator; exit; end if; if Own_Variable_Has_Constituents (The_Variable => RawDict.Get_Variable_Info_Ref (CurrentSymbol (Own_Variables))) then Abstract_Own_Variables := Iterator'(AbstractOwnVariableIterator, IsAbstract, Own_Variables.Current, NullSymbol); exit; end if; Own_Variables := NextSymbol (Own_Variables); end loop; return Abstract_Own_Variables; end First_Abstract_Own_Variable; -------------------------------------------------------------------------------- function GetLoopParameter (TheLoop : Symbol) return Symbol is begin return RawDict.GetLoopParameter (TheLoop); end GetLoopParameter; -------------------------------------------------------------------------------- function GetLoopExitExpn (TheLoop : Symbol) return Natural is begin return RawDict.GetLoopExitExpn (TheLoop); end GetLoopExitExpn; -------------------------------------------------------------------------------- function GetLoopEntryExpn (TheLoop : Symbol) return Natural is begin return RawDict.GetLoopEntryExpn (TheLoop); end GetLoopEntryExpn; -------------------------------------------------------------------------------- function GetLoopHasExits (TheLoop : Symbol) return Boolean is begin return RawDict.GetLoopHasExits (TheLoop); end GetLoopHasExits; -------------------------------------------------------------------------------- function Is_For_Loop (TheSymbol : Symbol) return Boolean --# global in Dict; is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = LoopSymbol and then RawDict.GetLoopParameter (TheSymbol) /= NullSymbol; end Is_For_Loop; -------------------------------------------------------------------------------- function LoopParameterHasStaticRange (TheLoopParameter : Symbol) return Boolean is begin return RawDict.GetLoopParameterHasStaticRange (TheLoopParameter); end LoopParameterHasStaticRange; -------------------------------------------------------------------------------- function LoopParameterMovesInReverse (TheLoopParameter : Symbol) return Boolean is begin return RawDict.GetLoopParameterIsReverse (TheLoopParameter); end LoopParameterMovesInReverse; -------------------------------------------------------------------------------- -- First_Generic_Formal_Subprogram_Parameter -------------------------------------------------------------------------------- function First_Generic_Formal_Subprogram_Parameter (The_Subprogram : RawDict.Subprogram_Info_Ref) return Iterator --# global in Dict; is The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Generic_Parameter : RawDict.Generic_Parameter_Info_Ref; FormalParameters : Iterator; begin The_Generic_Unit := RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => The_Subprogram); SystemErrors.RT_Assert (C => The_Generic_Unit /= RawDict.Null_Generic_Unit_Info_Ref, Sys_Err => SystemErrors.Precondition_Failure, Msg => "Wrong unit kind in Dictionary.First_Generic_Formal_Subprogram_Parameter"); The_Generic_Parameter := RawDict.Get_Generic_Unit_First_Generic_Parameter (The_Generic_Unit => The_Generic_Unit); if The_Generic_Parameter = RawDict.Null_Generic_Parameter_Info_Ref then FormalParameters := NullIterator; else FormalParameters := Iterator' (GenericFormalParameterIterator, IsAbstract, RawDict.Get_Generic_Parameter_Symbol (The_Generic_Parameter), NullSymbol); end if; return FormalParameters; end First_Generic_Formal_Subprogram_Parameter; -------------------------------------------------------------------------------- function FirstGenericFormalParameter (TheGeneric : Symbol) return Iterator is FormalParameters : Iterator; -- initialization strictly not needed, fatal error occurs if attempt to -- return an undefined value occurs begin case RawDict.GetSymbolDiscriminant (TheGeneric) is when Subprogram_Symbol => FormalParameters := First_Generic_Formal_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheGeneric)); -- GAA External when ImplicitProofFunctionSymbol => FormalParameters := First_Generic_Formal_Subprogram_Parameter (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (TheGeneric)); when others => -- non-exec code FormalParameters := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstGenericFormalParameter"); end case; return FormalParameters; end FirstGenericFormalParameter; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Is_Loop_Name (Name : LexTokenManager.Lex_String; Scope : Scopes) return Boolean --# global in Dict; --# in LexTokenManager.State; is Current : Iterator; begin Current := First_Loop (CompilationUnit => GetEnclosingCompilationUnit (Scope)); loop exit when IsNullIterator (Current); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => GetSimpleName (CurrentSymbol (Current)), Lex_Str2 => Name) = LexTokenManager.Str_Eq; Current := NextSymbol (Current); end loop; return not IsNullIterator (Current); end Is_Loop_Name; -------------------------------------------------------------------------------- -- Is_Refined_Own_Variable -------------------------------------------------------------------------------- function IsRefinedOwnVariable (Variable : Symbol) return Boolean is function Is_Refined_Own_Variable (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Own_Variable (The_Variable => The_Variable) and then Own_Variable_Has_Constituents (The_Variable => The_Variable); end Is_Refined_Own_Variable; begin -- IsRefinedOwnVariable return RawDict.GetSymbolDiscriminant (Variable) = Variable_Symbol and then Is_Refined_Own_Variable (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External end IsRefinedOwnVariable; -------------------------------------------------------------------------------- -- Is_Procedure -------------------------------------------------------------------------------- function Is_Procedure (The_Subprogram : RawDict.Subprogram_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Subprogram_Return_Type (The_Subprogram => The_Subprogram) = RawDict.Null_Type_Info_Ref; end Is_Procedure; -------------------------------------------------------------------------------- function IsProcedure (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Subprogram_Symbol and then Is_Procedure (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheSymbol)); -- GAA External end IsProcedure; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetLoop (CompilationUnit : Symbol; Number : Positive) return Symbol is TheLoop : Iterator; begin TheLoop := First_Loop (CompilationUnit => CompilationUnit); for No in Positive range 1 .. Number - 1 loop TheLoop := NextSymbol (TheLoop); end loop; return CurrentSymbol (TheLoop); end GetLoop; -------------------------------------------------------------------------------- -- Get_Abstraction -------------------------------------------------------------------------------- function Get_Subprogram_Abstraction (The_Subprogram : RawDict.Subprogram_Info_Ref; Scope : Scopes) return Abstractions --# global Dict; is Declared_Scope : Scopes; Abstraction : Abstractions; begin if RawDict.Get_Subprogram_Has_Second_Annotation (The_Subprogram => The_Subprogram) then Declared_Scope := Get_Subprogram_Scope (The_Subprogram => The_Subprogram); if (Get_Visibility (Scope => Declared_Scope) = Visible or else Get_Visibility (Scope => Declared_Scope) = Privat) and then IsLocal (Scope, GetLocalScope (Declared_Scope)) then Abstraction := IsRefined; else Abstraction := IsAbstract; end if; else Abstraction := IsAbstract; end if; return Abstraction; end Get_Subprogram_Abstraction; -------------------------------------------------------------------------------- function Get_Task_Type_Abstraction (The_Task_Type : RawDict.Type_Info_Ref; Scope : Scopes) return Abstractions --# global Dict; is Declared_Scope : Scopes; Abstraction : Abstractions; begin if RawDict.Get_Task_Type_Has_Second_Annotation (The_Task_Type => The_Task_Type) then Declared_Scope := Get_Type_Scope (Type_Mark => The_Task_Type); if (Get_Visibility (Scope => Declared_Scope) = Visible or else Get_Visibility (Scope => Declared_Scope) = Privat) and then IsLocal (Scope, GetLocalScope (Declared_Scope)) then Abstraction := IsRefined; else Abstraction := IsAbstract; end if; else Abstraction := IsAbstract; end if; return Abstraction; end Get_Task_Type_Abstraction; -------------------------------------------------------------------------------- function GetAbstraction (Subprogram : Symbol; Scope : Scopes) return Abstractions is Abstraction : Abstractions; begin case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => Abstraction := Get_Subprogram_Abstraction (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Scope => Scope); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetAbstraction"); Abstraction := Get_Task_Type_Abstraction (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram), -- GAA External Scope => Scope); when others => -- non-exec code Abstraction := IsAbstract; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetAbstraction"); end case; return Abstraction; end GetAbstraction; -------------------------------------------------------------------------------- -- Similar to preceding function but is concerned with whether there is a second -- constraint rather than a second flow annotation. If there is a second flow -- anno then it is assumed there is a second constraint (even if the user has just -- left it as True). If there is NO second flow anno then we still need to check -- whether there is a second constraint (to cover the private type refinement case) function GetConstraintAbstraction (Subprogram : Symbol; Scope : Scopes) return Abstractions is DeclaredScope : Scopes; Abstraction : Abstractions; begin Abstraction := GetAbstraction (Subprogram, Scope); if Abstraction = IsAbstract and then RawDict.GetSymbolDiscriminant (Subprogram) = Subprogram_Symbol then -- No second flow anno, so check for second constraint. Second half of guard protects against -- looking for a second proof annotation on a task or protected type if RawDict.Get_Subprogram_Has_Second_Constraint (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram)) then -- GAA External DeclaredScope := Get_Subprogram_Scope (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram)); -- GAA External if (Get_Visibility (Scope => DeclaredScope) = Visible or else Get_Visibility (Scope => DeclaredScope) = Privat) and then IsLocal (Scope, GetLocalScope (DeclaredScope)) then Abstraction := IsRefined; else Abstraction := IsAbstract; end if; else Abstraction := IsAbstract; end if; end if; return Abstraction; end GetConstraintAbstraction; -------------------------------------------------------------------------------- procedure AdjustTypeUpperBound (TypeMark : Symbol; NewBound : LexTokenManager.Lex_String) is begin RawDict.Set_Type_Upper (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Upper => NewBound); end AdjustTypeUpperBound; -------------------------------------------------------------------------------- procedure AdjustTypeLowerBound (TypeMark : Symbol; NewBound : LexTokenManager.Lex_String) is begin RawDict.Set_Type_Lower (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Lower => NewBound); end AdjustTypeLowerBound; -------------------------------------------------------------------------------- procedure AdjustTypeErrorBound (TypeMark : Symbol; NewBound : LexTokenManager.Lex_String) is begin RawDict.Set_Type_Error_Bound (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Error_Bound => NewBound); end AdjustTypeErrorBound; -------------------------------------------------------------------------------- function GetMainProgramPriority return LexTokenManager.Lex_String is begin return Dict.Main.The_Priority; -- NullString if not PriorityGiven or invalid value end GetMainProgramPriority; -------------------------------------------------------------------------------- function IsFunction (TheSymbol : Symbol) return Boolean is begin return Is_Subprogram (TheSymbol) and then Get_Type (The_Symbol => TheSymbol) /= RawDict.Null_Type_Info_Ref; end IsFunction; -------------------------------------------------------------------------------- function IsProtectedFunction (TheSymbol : Symbol) return Boolean is begin return IsFunction (TheSymbol) and then RawDict.GetSymbolDiscriminant (GetRegion (GetScope (TheSymbol))) = Type_Symbol and then Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => GetRegion (GetScope (TheSymbol)))); end IsProtectedFunction; -------------------------------------------------------------------------------- function IsEntry (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Subprogram_Symbol and then RawDict.Get_Subprogram_Is_Entry (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheSymbol)); -- GAA External end IsEntry; -------------------------------------------------------------------------------- function IsProgramUnit (Sym : Symbol) return Boolean is begin return Sym /= NullSymbol and then (RawDict.GetSymbolDiscriminant (Sym) = Package_Symbol or else (RawDict.GetSymbolDiscriminant (Sym) = Type_Symbol and then (Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym)) -- GAA External or else Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym)))) -- GAA External or else (RawDict.GetSymbolDiscriminant (Sym) = Subprogram_Symbol and then (RawDict.Get_Subprogram_Is_Entry (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)) -- GAA External or else RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)) /= -- GAA External RawDict.Null_Generic_Unit_Info_Ref)) or else Is_Subprogram (Sym)); end IsProgramUnit; -------------------------------------------------------------------------------- function GetSubprogramEntryBarrier (Subprogram : Symbol) return Symbol is Result : Symbol := NullSymbol; begin if RawDict.GetSymbolDiscriminant (Subprogram) = Subprogram_Symbol and then RawDict.Get_Subprogram_Is_Entry (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram)) then -- GAA External Result := RawDict.Get_Subprogram_Entry_Barrier (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram)); -- GAA External end if; return Result; end GetSubprogramEntryBarrier; -------------------------------------------------------------------------------- function IsInterruptHandler (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Subprogram_Symbol and then RawDict.Get_Subprogram_Is_Interrupt_Handler (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheSymbol)); -- GAA External end IsInterruptHandler; -------------------------------------------------------------------------------- function IsAdaFunction (TheSymbol : Symbol) return Boolean is begin return IsFunction (TheSymbol) and then RawDict.GetSymbolDiscriminant (TheSymbol) = Subprogram_Symbol and then Get_Subprogram_Context (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheSymbol)) = -- GAA External ProgramContext; end IsAdaFunction; -------------------------------------------------------------------------------- function IsAnUncheckedConversion (TheSymbol : Symbol) return Boolean is begin return IsAdaFunction (TheSymbol) and then RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (TheSymbol)) = -- GAA External Dict.Subprograms.Unchecked_Conversion; end IsAnUncheckedConversion; -------------------------------------------------------------------------------- function UsesUncheckedConversion (TheUnit : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (TheUnit) is when Subprogram_Symbol => -- covers procedures, functions and entries Result := RawDict.Get_Subprogram_Uses_Unchecked_Conversion (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheUnit)); -- GAA External when Type_Symbol => -- covers task bodies SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheUnit)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.UsesUncheckedConversion"); Result := RawDict.Get_Task_Type_Uses_Unchecked_Conversion (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheUnit)); -- GAA External when Package_Symbol => -- a package could only use U_C in elabotration code where it would be illegal Result := False; when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.UsesUncheckedConversion"); end case; return Result; end UsesUncheckedConversion; -------------------------------------------------------------------------------- function AssignsFromExternal (TheUnit : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (TheUnit) is when Subprogram_Symbol => -- covers procedures, functions and entries Result := RawDict.Get_Subprogram_Assigns_From_External (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheUnit)); -- GAA External when Type_Symbol => -- covers task bodies SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheUnit)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AssignsFromExternal"); Result := RawDict.Get_Task_Type_Assigns_From_External (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheUnit)); -- GAA External when Package_Symbol => -- a package could only read external variable in elabotration code -- where it would be illegal Result := False; when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AssignsFromExternal"); end case; return Result; end AssignsFromExternal; -------------------------------------------------------------------------------- function IsObject (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Constant_Symbol or else Is_Variable (TheSymbol); end IsObject; -------------------------------------------------------------------------------- function PrefixAllowed (Prefix : Symbol; Scope : Scopes) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (Prefix) is when Package_Symbol => Result := RawDict.Get_Package_Info_Ref (Item => Prefix) /= Get_Enclosing_Package (Scope => Scope); -- GAA External when Type_Symbol => Result := RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Prefix)) = -- GAA External Record_Type_Item; when others => if IsObject (Prefix) then Result := RawDict.Get_Type_Discriminant (Type_Mark => Get_Type (The_Symbol => Prefix)) = Record_Type_Item or else RawDict.Get_Type_Discriminant (Type_Mark => Get_Type (The_Symbol => Prefix)) = Protected_Type_Item; else Result := False; end if; end case; return Result; end PrefixAllowed; -------------------------------------------------------------------------------- function PrefixRequired (Item : Symbol; Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (GetRegion (GetScope (Item))) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => GetRegion (GetScope (Item))) /= Get_Enclosing_Package (Scope => Scope); -- GAA External end PrefixRequired; -------------------------------------------------------------------------------- function GetPrefix (Item : Symbol) return LexTokenManager.Lex_String is begin return GetSimpleName (GetRegion (GetScope (Item))); end GetPrefix; -------------------------------------------------------------------------------- function GetEnclosingProtectedRegion (Scope : Scopes) return Symbol is CurrentRegion : Symbol; CurrentScope : Scopes; Result : Symbol; begin Result := NullSymbol; CurrentScope := Scope; loop CurrentRegion := GetRegion (CurrentScope); if RawDict.GetSymbolDiscriminant (CurrentRegion) = Type_Symbol and then RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => CurrentRegion)) = Protected_Type_Item then Result := CurrentRegion; exit; end if; exit when IsLibraryLevel (CurrentScope); CurrentScope := GetEnclosingScope (CurrentScope); end loop; return Result; end GetEnclosingProtectedRegion; -------------------------------------------------------------------------------- function IsOrIsInProtectedScope (Scope : Scopes) return Boolean is begin return GetEnclosingProtectedRegion (Scope) /= NullSymbol; end IsOrIsInProtectedScope; -------------------------------------------------------------------------------- -- First_Protected_Element -------------------------------------------------------------------------------- function First_Protected_Element (The_Protected_Type : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Protected_Elements : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Protected_Type_First_Private_Declaration (The_Protected_Type => The_Protected_Type); if The_Declaration /= RawDict.Null_Declaration_Info_Ref then Protected_Elements := Iterator' (ProtectedElementIterator, IsAbstract, RawDict.Get_Declaration_Item (The_Declaration => The_Declaration), RawDict.Get_Declaration_Symbol (The_Declaration)); end if; return Protected_Elements; end First_Protected_Element; -------------------------------------------------------------------------------- function FirstProtectedElement (The_Protected_Type : Symbol) return Iterator is begin return First_Protected_Element (The_Protected_Type => RawDict.Get_Type_Info_Ref (The_Protected_Type)); -- GAA External end FirstProtectedElement; -------------------------------------------------------------------------------- -- Get_Protected_Type_Own_Variable -------------------------------------------------------------------------------- function Get_Protected_Type_Own_Variable (The_Protected_Type : RawDict.Type_Info_Ref) return RawDict.Variable_Info_Ref --# global in Dict; is begin return RawDict.Get_Own_Variable_Variable (The_Own_Variable => RawDict.Get_Protected_Type_Own_Variable (The_Protected_Type => The_Protected_Type)); end Get_Protected_Type_Own_Variable; -------------------------------------------------------------------------------- function GetProtectedTypeOwnVariable (TheProtectedType : Symbol) return Symbol is begin return RawDict.Get_Variable_Symbol -- GAA External (Get_Protected_Type_Own_Variable (The_Protected_Type => RawDict.Get_Type_Info_Ref (TheProtectedType))); -- GAA External end GetProtectedTypeOwnVariable; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -- Trace routines for dictionary look up operations. These are invisible to the -- flow analyser. They are enabled by -debug=l (for Lookup) procedure TraceMsg (Msg : in String) --# derives null from Msg; is --# hide TraceMsg; begin if CommandLineData.Content.Debug.Lookup_Trace then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Msg, 0); end if; end TraceMsg; procedure Trace_Lex_Str (Msg : in String; L : in LexTokenManager.Lex_String) --# derives null from L, --# Msg; is --# hide Trace_Lex_Str; begin if CommandLineData.Content.Debug.Lookup_Trace then SPARK_IO.Put_String (SPARK_IO.Standard_Output, Msg, 0); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => L, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Null String", 0); else E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => L)); end if; end if; end Trace_Lex_Str; procedure Trace_Sym (Msg : in String; Sym : in Symbol; Scope : in Scopes) --# derives null from Msg, --# Scope, --# Sym; is --# hide Trace_Sym; begin if CommandLineData.Content.Debug.Lookup_Trace then SPARK_IO.Put_String (SPARK_IO.Standard_Output, Msg, 0); if Sym = NullSymbol then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Null Symbol", 0); else E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => GetAnyPrefixNeeded (Sym => Sym, Scope => Scope, Separator => ".")); SPARK_IO.Put_Char (SPARK_IO.Standard_Output, '.'); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => GenerateSimpleName (Item => Sym, Separator => ".")); end if; end if; end Trace_Sym; -------------------------------------------------------------------------------- procedure LookupScope (Name : in LexTokenManager.Lex_String; Stop_At : in LexTokenManager.Lex_String; Scope : in Scopes; Calling_Scope : in Scopes; Context : in Contexts; Item : out Symbol; Is_Visible : out Boolean) --# global in Dict; --# in LexTokenManager.State; --# derives Is_Visible, --# Item from Calling_Scope, --# Context, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Stop_At; is separate; -------------------------------------------------------------------------------- function LookupImmediateScope (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return Symbol is Item : Symbol; IsVisible : Boolean; begin Trace_Lex_Str (Msg => "In LookupImmediateScope, seeking ", L => Name); LookupScope (Name => Name, Stop_At => LexTokenManager.Null_String, Scope => Scope, Calling_Scope => Scope, Context => Context, Item => Item, Is_Visible => IsVisible); if not IsVisible then Item := NullSymbol; end if; return Item; end LookupImmediateScope; -------------------------------------------------------------------------------- function IsDirectlyDefined (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return Boolean is begin return Is_Loop_Name (Name => Name, Scope => Scope) or else LookupImmediateScope (Name => Name, Scope => Scope, Context => Context) /= NullSymbol; end IsDirectlyDefined; -------------------------------------------------------------------------------- -- Is_Withed_Locally -------------------------------------------------------------------------------- function Is_Withed_Locally (The_Withed_Symbol : Symbol; Scope : Scopes) return Boolean is Region : Symbol; The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Context_Clause : RawDict.Context_Clause_Info_Ref; The_Withed_Package : RawDict.Package_Info_Ref; The_Withed_Subprogram : RawDict.Subprogram_Info_Ref; begin Region := GetRegion (Scope); case Get_Visibility (Scope => Scope) is when Visible | Privat => case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => The_Context_Clause := RawDict.Get_Package_Visible_With_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) or else Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Withed_Locally"); The_Context_Clause := RawDict.Get_Package_Visible_With_Clauses (The_Package => Get_Enclosing_Package (Scope => Scope)); when Generic_Unit_Symbol => The_Generic_Unit := RawDict.Get_Generic_Unit_Info_Ref (Item => Region); case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => The_Context_Clause := RawDict.Get_Package_Visible_With_Clauses (The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => The_Context_Clause := RawDict.Get_Subprogram_With_Clauses (The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Withed_Locally"); end case; when Local => case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => The_Context_Clause := RawDict.Get_Package_Local_With_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Subprogram_Symbol => The_Context_Clause := RawDict.Get_Subprogram_With_Clauses (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) is when Protected_Type_Item => The_Context_Clause := RawDict.Get_Protected_Type_With_Clauses (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => Region)); when Task_Type_Item => The_Context_Clause := RawDict.Get_Task_Type_With_Clauses (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Region)); when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Withed_Locally"); end case; when LoopSymbol => The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Withed_Locally"); end case; end case; case RawDict.GetSymbolDiscriminant (The_Withed_Symbol) is when Package_Symbol => The_Withed_Package := RawDict.Get_Package_Info_Ref (Item => The_Withed_Symbol); while The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref and then (RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) or else RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Context_Clause) /= The_Withed_Package) loop The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Context_Clause); end loop; when Subprogram_Symbol => The_Withed_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Withed_Symbol); while The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref and then (not RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) or else RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Context_Clause) /= The_Withed_Subprogram) loop The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Context_Clause); end loop; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Withed_Locally"); end case; return The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref; end Is_Withed_Locally; -------------------------------------------------------------------------------- -- Is_Withed -------------------------------------------------------------------------------- function Is_Withed (The_Withed_Symbol : Symbol; Scope : Scopes) return Boolean is Region : Symbol; The_Context_Clause : RawDict.Context_Clause_Info_Ref; The_Withed_Package : RawDict.Package_Info_Ref; The_Withed_Subprogram : RawDict.Subprogram_Info_Ref; Result : Boolean; begin Region := GetRegion (Scope); Result := Is_Withed_Locally (The_Withed_Symbol => The_Withed_Symbol, Scope => Scope); if not Result and then RawDict.GetSymbolDiscriminant (Region) = Package_Symbol and then Get_Visibility (Scope => Scope) = Local then The_Context_Clause := RawDict.Get_Package_Visible_With_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); case RawDict.GetSymbolDiscriminant (The_Withed_Symbol) is when Package_Symbol => The_Withed_Package := RawDict.Get_Package_Info_Ref (Item => The_Withed_Symbol); while The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref and then (RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) or else RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Context_Clause) /= The_Withed_Package) loop The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Context_Clause); end loop; when Subprogram_Symbol => The_Withed_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Withed_Symbol); while The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref and then (not RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) or else RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Context_Clause) /= The_Withed_Subprogram) loop The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Context_Clause); end loop; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Withed"); end case; Result := The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref; end if; return Result; end Is_Withed; -------------------------------------------------------------------------------- -- Is_Used_Locally -------------------------------------------------------------------------------- function Is_Used_Locally (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global Dict; is Region : Symbol; The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Use_Type_Clause : RawDict.Use_Type_Clause_Info_Ref; begin Region := GetRegion (Scope); case Get_Visibility (Scope => Scope) is when Visible | Privat => case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => The_Use_Type_Clause := RawDict.Get_Package_Visible_Use_Type_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) is when Protected_Type_Item => The_Use_Type_Clause := RawDict.Get_Package_Visible_Use_Type_Clauses (The_Package => Get_Enclosing_Package (Scope => Scope)); when Task_Type_Item => The_Use_Type_Clause := RawDict.Get_Package_Visible_Use_Type_Clauses (The_Package => Get_Enclosing_Package (Scope => Scope)); when others => -- non-exec code The_Use_Type_Clause := RawDict.Null_Use_Type_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Used_Locally"); end case; when Generic_Unit_Symbol => The_Generic_Unit := RawDict.Get_Generic_Unit_Info_Ref (Item => Region); case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => The_Use_Type_Clause := RawDict.Get_Package_Visible_Use_Type_Clauses (The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => The_Use_Type_Clause := RawDict.Get_Subprogram_Use_Type_Clauses (The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; when others => -- non-exec code The_Use_Type_Clause := RawDict.Null_Use_Type_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Used_Locally"); end case; when Local => case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => The_Use_Type_Clause := RawDict.Get_Package_Local_Use_Type_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Subprogram_Symbol => The_Use_Type_Clause := RawDict.Get_Subprogram_Use_Type_Clauses (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) is when Protected_Type_Item => The_Use_Type_Clause := RawDict.Get_Protected_Type_Use_Type_Clauses (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => Region)); when Task_Type_Item => The_Use_Type_Clause := RawDict.Get_Task_Type_Use_Type_Clauses (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Region)); when others => -- non-exec code The_Use_Type_Clause := RawDict.Null_Use_Type_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Used_Locally"); end case; when others => -- non-exec code The_Use_Type_Clause := RawDict.Null_Use_Type_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Used_Locally"); end case; end case; while The_Use_Type_Clause /= RawDict.Null_Use_Type_Clause_Info_Ref and then RawDict.Get_Use_Type_Clause_Type (The_Use_Type_Clause => The_Use_Type_Clause) /= Type_Mark loop The_Use_Type_Clause := RawDict.Get_Next_Use_Type_Clause (The_Use_Type_Clause => The_Use_Type_Clause); end loop; return The_Use_Type_Clause /= RawDict.Null_Use_Type_Clause_Info_Ref; end Is_Used_Locally; -------------------------------------------------------------------------------- function IsUsedLocally (TheType : Symbol; Scope : Scopes) return Boolean is begin return Is_Used_Locally (Type_Mark => RawDict.Get_Type_Info_Ref (TheType), -- GAA External Scope => Scope); end IsUsedLocally; -------------------------------------------------------------------------------- -- Is_Embedded_Package -------------------------------------------------------------------------------- function Is_Embedded_Package (The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is begin return GetRegion (Get_Package_Scope (The_Package => The_Package)) /= RawDict.Get_Package_Symbol (Get_Predefined_Package_Standard); end Is_Embedded_Package; -------------------------------------------------------------------------------- function IsEmbeddedPackage (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Package_Symbol and then Is_Embedded_Package (The_Package => RawDict.Get_Package_Info_Ref (Item => TheSymbol)); -- GAA External end IsEmbeddedPackage; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Is_Descendent_Of_Private_Child (Candidate, The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is Current_Package, Next_Package : RawDict.Package_Info_Ref; begin Current_Package := Candidate; loop Next_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); exit when Next_Package = The_Package or else Next_Package = RawDict.Null_Package_Info_Ref; Current_Package := Next_Package; end loop; return Next_Package = The_Package and then RawDict.Get_Package_Is_Private (The_Package => Current_Package); end Is_Descendent_Of_Private_Child; -------------------------------------------------------------------------------- function Is_Global_Variable (Abstraction : Abstractions; Subprogram : Symbol; Variable : Symbol) return Boolean is The_Global_Variable : RawDict.Global_Variable_Info_Ref; begin case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Global_Variable"); The_Global_Variable := RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when others => -- non-exec code The_Global_Variable := RawDict.Null_Global_Variable_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Global_Variable"); end case; case RawDict.GetSymbolDiscriminant (Variable) is when Subprogram_Parameter_Symbol => while The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref and then (RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Subprogram_Variable_Item or else RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Task_Type_Variable_Item or else RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable) /= RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable)) loop -- GAA External The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; when Variable_Symbol => while The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref and then (RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Subprogram_Parameter_Item or else RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable) /= RawDict.Get_Variable_Info_Ref (Item => Variable)) loop -- GAA External The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Global_Variable"); end case; return The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref; end Is_Global_Variable; -------------------------------------------------------------------------------- function LookupItem (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts; Full_Package_Name : Boolean) return Symbol is separate; -------------------------------------------------------------------------------- -- Is_Renamed -------------------------------------------------------------------------------- function Is_Renamed_Local (The_Subprogram : RawDict.Subprogram_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is separate; -------------------------------------------------------------------------------- function Is_Renamed (Subprogram : Symbol; Scope : Scopes) return Boolean is begin return Is_Renamed_Local (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External Scope => Scope); end Is_Renamed; -------------------------------------------------------------------------------- -- Is_Callable -------------------------------------------------------------------------------- function Is_Callable (The_Subprogram : RawDict.Subprogram_Info_Ref; Prefix_Needed : Boolean; Scope : Scopes) return Boolean --# global in Dict; is separate; -------------------------------------------------------------------------------- function IsCallable (Subprogram : Symbol; PrefixNeeded : Boolean; Scope : Scopes) return Boolean is begin return Is_Callable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External Prefix_Needed => PrefixNeeded, Scope => Scope); end IsCallable; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsDefined (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts; Full_Package_Name : Boolean) return Boolean is begin return Is_Loop_Name (Name => Name, Scope => Scope) or else LookupItem (Name => Name, Scope => Scope, Context => Context, Full_Package_Name => Full_Package_Name) /= NullSymbol; end IsDefined; -------------------------------------------------------------------------------- -- Unary_Operator_Is_Defined -------------------------------------------------------------------------------- function UnaryOperatorIsDefined (Name : SP_Symbols.SP_Symbol; Operand : Symbol) return Boolean is function Unary_Operator_Is_Defined (Name : SP_Symbols.SP_Symbol; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in CommandLineData.Content; --# in Dict; is Result : Boolean; begin case Name is when SP_Symbols.plus | SP_Symbols.minus | SP_Symbols.RWabs => -- Unary arithmetic operators on modular types are not -- allowed in SPARK at present. "abs" and "+" are just no-op -- for modular type, so they're useless. Unary "-" is a potential -- danger, since "-1" for a modular type is confusing and can always -- be written as T'Last or 16#FFFF# or similar. -- -- For time type Time_Span, unary "abs" and "-" are defined but "+" is not. Result := Type_Is_Integer (Type_Mark => Type_Mark) or else Type_Is_Real (Type_Mark => Type_Mark) or else (CommandLineData.Ravenscar_Selected and then Type_Mark = Get_Predefined_Time_Span_Type and then Name /= SP_Symbols.plus) or else Type_Mark = Get_Unknown_Type_Mark; when SP_Symbols.RWnot => -- Note - unary "not" is allowed for modular types. Result := Type_Is_Boolean (Type_Mark => Type_Mark) or else Type_Is_Modular (Type_Mark => Type_Mark) or else Type_Mark = Get_Universal_Integer_Type or else Type_Is_Boolean_Array (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; when others => Result := False; end case; return Result; end Unary_Operator_Is_Defined; begin -- UnaryOperatorIsDefined return Unary_Operator_Is_Defined (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (Operand)); -- GAA External end UnaryOperatorIsDefined; -------------------------------------------------------------------------------- -- Get_Predefined_String_Type -------------------------------------------------------------------------------- function Get_Predefined_String_Type return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_String_Type); end Get_Predefined_String_Type; -------------------------------------------------------------------------------- function GetPredefinedStringType return Symbol is begin return RawDict.Get_Type_Symbol (Get_Predefined_String_Type); -- GAA External end GetPredefinedStringType; -------------------------------------------------------------------------------- function IsPredefinedStringType (TheSymbol : Symbol) return Boolean is begin return RawDict.Get_Type_Info_Ref (TheSymbol) = Get_Predefined_String_Type; -- GAA External end IsPredefinedStringType; -------------------------------------------------------------------------------- -- Get_Predefined_Natural_Subtype -------------------------------------------------------------------------------- function Get_Predefined_Natural_Subtype return RawDict.Type_Info_Ref --# global in Dict; is begin return Dict.Types.The_Predefined_Types (Predefined_Natural_Subtype); end Get_Predefined_Natural_Subtype; -------------------------------------------------------------------------------- function GetPredefinedNaturalSubtype return Symbol is begin return RawDict.Get_Type_Symbol (Get_Predefined_Natural_Subtype); -- GAA External end GetPredefinedNaturalSubtype; -------------------------------------------------------------------------------- -- Get_Binary_Operator_Type -------------------------------------------------------------------------------- function Get_Binary_Operator_Type_Local (Name : SP_Symbols.SP_Symbol; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in CommandLineData.Content; --# in Dict; is separate; -------------------------------------------------------------------------------- function Get_Binary_Operator_Type (Name : SP_Symbols.SP_Symbol; Left : Symbol; Right : Symbol) return Symbol is begin return RawDict.Get_Type_Symbol -- GAA External (Get_Binary_Operator_Type_Local (Name => Name, The_Left_Type => RawDict.Get_Type_Info_Ref (Left), -- GAA External The_Right_Type => RawDict.Get_Type_Info_Ref (Right))); -- GAA External end Get_Binary_Operator_Type; -------------------------------------------------------------------------------- -- Defined_In_Package_Standard -------------------------------------------------------------------------------- function Defined_In_Package_Standard (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is Result : Boolean; begin if Type_Mark = Get_Unknown_Type_Mark then Result := False; else Result := Get_Type_Scope (Type_Mark => Type_Mark) = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_Standard)); end if; return Result; end Defined_In_Package_Standard; -------------------------------------------------------------------------------- function DefinedInPackageStandard (TheSymbol : Symbol) return Boolean is begin return Defined_In_Package_Standard (Type_Mark => RawDict.Get_Type_Info_Ref (TheSymbol)); -- GAA External end DefinedInPackageStandard; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsPredefined (TheSymbol : Symbol) return Boolean is Result : Boolean; Scope : Scopes; begin if RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then RawDict.Get_Type_Info_Ref (Item => TheSymbol) = Get_Unknown_Type_Mark then -- GAA External Result := False; else Scope := GetScope (TheSymbol); case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Result := Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_Standard)) or else Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_ASCII)); when CommandLineData.SPARK95_Onwards => Result := Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_Standard)) or else Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_ASCII)) or else Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_Ada)) or else Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Dict.Packages.Package_Ada_Characters)) or else Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Dict.Packages.Package_Ada_Characters_Latin1)) or else (CommandLineData.Ravenscar_Selected and then (Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Dict.Packages.Package_Synchronous_Task_Control)) or else Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Dict.Packages.Package_Real_Time)) or else Scope = Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Dict.Packages.Package_Interrupts)))); end case; end if; return Result; end IsPredefined; -------------------------------------------------------------------------------- -- Operator_Is_Visible -------------------------------------------------------------------------------- package body Operator_Is_Visible is separate; -------------------------------------------------------------------------------- function UnaryOperatorIsVisible (Name : SP_Symbols.SP_Symbol; Operand : Symbol; Scope : Scopes) return Boolean is begin return Operator_Is_Visible.Unary_Operator_Is_Visible (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (Operand), -- GAA External Scope => Scope); end UnaryOperatorIsVisible; -------------------------------------------------------------------------------- function BinaryOperatorIsVisible (Name : SP_Symbols.SP_Symbol; Left : Symbol; Right : Symbol; Scope : Scopes) return Boolean is begin return Operator_Is_Visible.Binary_Operator_Is_Visible (Name => Name, The_Left_Type => RawDict.Get_Type_Info_Ref (Left), -- GAA External The_Right_Type => RawDict.Get_Type_Info_Ref (Right), -- GAA External Scope => Scope); end BinaryOperatorIsVisible; -------------------------------------------------------------------------------- -- Is_Scalar_Type_Mark -------------------------------------------------------------------------------- function Is_Scalar_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Scalar (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Scalar_Type_Mark; -------------------------------------------------------------------------------- function IsScalarTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Scalar_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsScalarTypeMark; -------------------------------------------------------------------------------- -- Is_Discrete_Type_Mark -------------------------------------------------------------------------------- function IsDiscreteTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Discrete_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Type_Is_Discrete (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Discrete_Type_Mark; begin -- IsDiscreteTypeMark return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Discrete_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsDiscreteTypeMark; -------------------------------------------------------------------------------- -- Is_Constrained_Array_Type_Mark -------------------------------------------------------------------------------- function Is_Constrained_Array_Type_Mark (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Constrained_Array_Type_Mark_Local (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Array_Type_Mark (Type_Mark => Type_Mark, Scope => Scope) and then RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); end Is_Constrained_Array_Type_Mark_Local; begin -- Is_Constrained_Array_Type_Mark return Is_Constrained_Array_Type_Mark_Local (Type_Mark => RawDict.Get_Type_Info_Ref (TheSymbol), -- GAA External Scope => Scope); end Is_Constrained_Array_Type_Mark; -------------------------------------------------------------------------------- -- Is_Unconstrained_Array_Type_Mark -------------------------------------------------------------------------------- function Is_Unconstrained_Array_Type_Mark (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Unconstrained_Array_Type_Mark_Local (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Array_Type_Mark (Type_Mark => Type_Mark, Scope => Scope) and then not RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); end Is_Unconstrained_Array_Type_Mark_Local; begin -- Is_Unconstrained_Array_Type_Mark return Is_Unconstrained_Array_Type_Mark_Local (Type_Mark => RawDict.Get_Type_Info_Ref (TheSymbol), -- GAA External Scope => Scope); end Is_Unconstrained_Array_Type_Mark; -------------------------------------------------------------------------------- -- Attribute_Is_Visible -------------------------------------------------------------------------------- function Attribute_Is_Visible (Name : LexTokenManager.Lex_String; Prefix : PrefixSort; Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; is separate; -------------------------------------------------------------------------------- function AttributeIsVisible (Name : LexTokenManager.Lex_String; Prefix : PrefixSort; TypeMark : Symbol; Scope : Scopes) return Boolean is begin return Attribute_Is_Visible (Name => Name, Prefix => Prefix, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Scope => Scope); end AttributeIsVisible; -------------------------------------------------------------------------------- -- Attribute_Is_Visible_But_Obsolete -------------------------------------------------------------------------------- function Attribute_Is_Visible_But_Obsolete_Local (Name : LexTokenManager.Lex_String; Prefix : PrefixSort; Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; is separate; -------------------------------------------------------------------------------- function Attribute_Is_Visible_But_Obsolete (Name : LexTokenManager.Lex_String; Prefix : PrefixSort; TypeMark : Symbol; Scope : Scopes) return Boolean is begin return Attribute_Is_Visible_But_Obsolete_Local (Name => Name, Prefix => Prefix, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Scope => Scope); end Attribute_Is_Visible_But_Obsolete; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Write_Location (File : in SPARK_IO.File_Type; Loc : in Location) --# global in Dict; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# Loc; is procedure Write_Token_Position (File : in SPARK_IO.File_Type; Pos : in LexTokenManager.Token_Position) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Pos; is procedure Write_Line_Number (File : in SPARK_IO.File_Type; Value : in LexTokenManager.Line_Numbers) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Value; is begin Write_Integer (File, Integer (Value)); end Write_Line_Number; begin -- Write_Token_Position Write_String (File, "line "); Write_Line_Number (File => File, Value => Pos.Start_Line_No); Write_String (File, " column "); Write_Integer (File, Pos.Start_Pos); end Write_Token_Position; begin -- Write_Location E_Strings.Put_String (File => File, E_Str => Dict.File_Name); Write_Space (File => File); Write_Token_Position (File => File, Pos => Loc.Start_Position); Write_String (File, " to "); Write_Token_Position (File => File, Pos => Loc.End_Position); end Write_Location; -------------------------------------------------------------------------------- procedure Write_Reference (Discriminant : in ReferenceDiscriminant; Item : in Symbol; CompilationUnit : in Symbol; Reference : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CompilationUnit, --# Dict, --# Discriminant, --# Item, --# LexTokenManager.State, --# Reference; is procedure Write_Discriminant (File : in SPARK_IO.File_Type; Discriminant : in ReferenceDiscriminant) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Discriminant, --# File; is begin case Discriminant is when ReadRef => Write_String (File, "read"); when WriteRef => Write_String (File, "write"); when CallRef => Write_String (File, "call"); when OtherRef => Write_String (File, "other"); end case; end Write_Discriminant; begin -- Write_Reference if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_Discriminant (File => Dict.TemporaryFile, Discriminant => Discriminant); Write_String (Dict.TemporaryFile, " reference to "); Write_Name (File => Dict.TemporaryFile, Item => Item); Write_String (Dict.TemporaryFile, " in "); Write_Name (File => Dict.TemporaryFile, Item => CompilationUnit); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Reference); Write_Line (Dict.TemporaryFile, " ;"); end if; end Write_Reference; -------------------------------------------------------------------------------- procedure AddOtherReference (Item, CompilationUnit : in Symbol; Reference : in Location) is begin Write_Reference (Discriminant => OtherRef, Item => Item, CompilationUnit => CompilationUnit, Reference => Reference); end AddOtherReference; -------------------------------------------------------------------------------- -- Write_Declaration -------------------------------------------------------------------------------- procedure Write_Constant_Declaration (The_Constant : in RawDict.Constant_Info_Ref; Is_Deferred : in Boolean; Declaration : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Declaration, --# Dict, --# Is_Deferred, --# LexTokenManager.State, --# The_Constant; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Is_Deferred then Write_String (Dict.TemporaryFile, "deferred "); end if; Write_String (Dict.TemporaryFile, "constant declaration of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Constant_Symbol (The_Constant)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Declaration); Write_Line (Dict.TemporaryFile, " ;"); end if; end Write_Constant_Declaration; -------------------------------------------------------------------------------- procedure Write_Variable_Declaration (The_Variable : in RawDict.Variable_Info_Ref; Declaration : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Declaration, --# Dict, --# LexTokenManager.State, --# The_Variable; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "variable declaration of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Variable_Symbol (The_Variable)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Declaration); Write_Line (Dict.TemporaryFile, " ;"); end if; end Write_Variable_Declaration; -------------------------------------------------------------------------------- procedure Write_Type_Declaration (Type_Mark : in RawDict.Type_Info_Ref; Discriminant : in DeclarationDiscriminant; Declaration : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Declaration, --# Dict, --# Discriminant, --# LexTokenManager.State, --# Type_Mark; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then case Discriminant is when TypeDeclaration => Write_String (Dict.TemporaryFile, "type declaration of "); when TypeAnnouncement => Write_String (Dict.TemporaryFile, "type announcement of "); when PrivateTypeDeclaration => Write_String (Dict.TemporaryFile, "private type declaration of "); end case; Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Declaration); Write_Line (Dict.TemporaryFile, " ;"); end if; end Write_Type_Declaration; -------------------------------------------------------------------------------- procedure Write_Subprogram_Declaration (The_Subprogram : in RawDict.Subprogram_Info_Ref; Is_Renaming : in Boolean; Scope : in Scopes; Declaration : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Declaration, --# Dict, --# Is_Renaming, --# LexTokenManager.State, --# Scope, --# The_Subprogram; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Is_Renaming then Write_String (Dict.TemporaryFile, "subprogram "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (Dict.TemporaryFile, " is renamed in "); Write_Scope (Dict.TemporaryFile, Scope); else Write_String (Dict.TemporaryFile, "subprogram specification of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (Dict.TemporaryFile, " is"); end if; Write_String (Dict.TemporaryFile, " at "); Write_Location (File => Dict.TemporaryFile, Loc => Declaration); Write_Line (Dict.TemporaryFile, " ;"); end if; end Write_Subprogram_Declaration; -------------------------------------------------------------------------------- -- Add_Deferred_Constant -------------------------------------------------------------------------------- procedure Add_Deferred_Constant_Local (Name : in LexTokenManager.Lex_String; Type_Mark : in RawDict.Type_Info_Ref; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in RawDict.Package_Info_Ref; The_Constant : out RawDict.Constant_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Constant from Comp_Unit, --# Declaration, --# Dict, --# Name, --# The_Package, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# The_Package, --# Type_Mark, --# Type_Reference; --# post The_Constant /= RawDict.Null_Constant_Info_Ref; is The_Declaration : RawDict.Declaration_Info_Ref; begin Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)), Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Create_Constant (Name => Name, Type_Mark => Type_Mark, Static => False, The_Declaration => The_Declaration, Is_Deferred => True, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Constant => The_Constant); if Type_Mark /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), RawDict.Get_Package_Symbol (The_Package), Type_Reference); end if; Write_Constant_Declaration (The_Constant => The_Constant, Is_Deferred => True, Declaration => Declaration); end Add_Deferred_Constant_Local; -------------------------------------------------------------------------------- procedure Add_Deferred_Constant (Name : in LexTokenManager.Lex_String; Type_Mark : in Symbol; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in Symbol; TheConstant : out Symbol) is The_Constant : RawDict.Constant_Info_Ref; begin Add_Deferred_Constant_Local (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (Type_Mark), -- GAA External Type_Reference => Type_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, The_Package => RawDict.Get_Package_Info_Ref (The_Package), -- GAA External The_Constant => The_Constant); TheConstant := RawDict.Get_Constant_Symbol (The_Constant); -- GAA External end Add_Deferred_Constant; -------------------------------------------------------------------------------- -- Add_Constant -------------------------------------------------------------------------------- procedure Add_Constant_Local (Name : in LexTokenManager.Lex_String; Type_Mark : in RawDict.Type_Info_Ref; Static : in Boolean; The_Declaration : in RawDict.Declaration_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Value : in LexTokenManager.Lex_String; Exp_Is_Wellformed : in Boolean; Exp_Node : in ExaminerConstants.RefType; The_Constant : in out RawDict.Constant_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Exp_Is_Wellformed, --# Exp_Node, --# Name, --# Static, --# The_Constant, --# The_Declaration, --# Type_Mark, --# Value & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Exp_Is_Wellformed, --# Exp_Node, --# LexTokenManager.State, --# Name, --# Static, --# The_Constant, --# The_Declaration, --# Type_Mark, --# Value & --# The_Constant from *, --# Comp_Unit, --# Declaration, --# Dict, --# Name, --# Static, --# The_Declaration, --# Type_Mark; --# post The_Constant /= RawDict.Null_Constant_Info_Ref; is begin if The_Constant = RawDict.Null_Constant_Info_Ref then RawDict.Create_Constant (Name => Name, Type_Mark => Type_Mark, Static => Static, The_Declaration => The_Declaration, Is_Deferred => False, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Constant => The_Constant); else RawDict.Set_Constant_Static (The_Constant => The_Constant, Static => Static); RawDict.Set_Constant_Declaration (The_Constant => The_Constant, The_Declaration => The_Declaration); end if; RawDict.Set_Constant_Value (The_Constant => The_Constant, Value => Value); RawDict.Set_Constant_Exp_Node (The_Constant => The_Constant, Exp_Is_Wellformed => Exp_Is_Wellformed, Exp_Node => Exp_Node); Write_Constant_Declaration (The_Constant => The_Constant, Is_Deferred => False, Declaration => Declaration); end Add_Constant_Local; -------------------------------------------------------------------------------- procedure Add_Constant (Name : in LexTokenManager.Lex_String; The_Type : in Symbol; Static : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Value : in LexTokenManager.Lex_String; Exp_Is_Wellformed : in Boolean; Exp_Node : in ExaminerConstants.RefType; Constant_Sym : in out Symbol) is The_Constant : RawDict.Constant_Info_Ref; begin The_Constant := RawDict.Get_Constant_Info_Ref (Constant_Sym); -- GAA External Add_Constant_Local (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Static => Static, The_Declaration => RawDict.Null_Declaration_Info_Ref, Comp_Unit => Comp_Unit, Declaration => Declaration, Value => Value, Exp_Is_Wellformed => Exp_Is_Wellformed, Exp_Node => Exp_Node, The_Constant => The_Constant); Constant_Sym := RawDict.Get_Constant_Symbol (The_Constant); -- GAA External end Add_Constant; -------------------------------------------------------------------------------- -- Add_Constant_Declaration -------------------------------------------------------------------------------- procedure Add_Constant_Declaration (Name : in LexTokenManager.Lex_String; Type_Mark : in Symbol; Type_Reference : in Location; Value : in LexTokenManager.Lex_String; Exp_Is_Wellformed : in Boolean; Exp_Node : in ExaminerConstants.RefType; Static : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; TheConstant : out Symbol) is The_Constant : RawDict.Constant_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Constant_Declaration_Local (Name : in LexTokenManager.Lex_String; Type_Mark : in RawDict.Type_Info_Ref; Type_Reference : in Location; Value : in LexTokenManager.Lex_String; Exp_Is_Wellformed : in Boolean; Exp_Node : in ExaminerConstants.RefType; Static : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Constant : out RawDict.Constant_Info_Ref) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# CommandLineData.Content, --# Comp_Unit, --# Context, --# Declaration, --# Exp_Is_Wellformed, --# Exp_Node, --# LexTokenManager.State, --# Name, --# Scope, --# Static, --# Type_Mark, --# Value & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# Exp_Is_Wellformed, --# Exp_Node, --# LexTokenManager.State, --# Name, --# Scope, --# Static, --# Type_Mark, --# Type_Reference, --# Value & --# The_Constant from CommandLineData.Content, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Static, --# Type_Mark; --# post The_Constant /= RawDict.Null_Constant_Info_Ref; is Region : Symbol; The_Declaration : RawDict.Declaration_Info_Ref; begin Region := GetRegion (Scope); if (Type_Is_Private (Type_Mark => Get_Root_Type (Type_Mark => Type_Mark)) or else CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83) and then Get_Visibility (Scope => Scope) = Privat and then RawDict.GetSymbolDiscriminant (Region) = Package_Symbol and then Context = ProgramContext then The_Constant := RawDict.Get_Constant_Info_Ref (LookupImmediateScope (Name, Set_Visibility (The_Visibility => Visible, The_Unit => Region), Context)); else The_Constant := RawDict.Null_Constant_Info_Ref; end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Scope, Context => Context, The_Declaration => The_Declaration); Add_Constant_Local (Name => Name, Type_Mark => Type_Mark, Static => Static, The_Declaration => The_Declaration, Comp_Unit => Comp_Unit, Declaration => Declaration, Value => Value, Exp_Is_Wellformed => Exp_Is_Wellformed, Exp_Node => Exp_Node, The_Constant => The_Constant); if Type_Mark /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), Region, Type_Reference); end if; end Add_Constant_Declaration_Local; begin -- Add_Constant_Declaration Add_Constant_Declaration_Local (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (Type_Mark), -- GAA External Type_Reference => Type_Reference, Value => Value, Exp_Is_Wellformed => Exp_Is_Wellformed, Exp_Node => Exp_Node, Static => Static, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, The_Constant => The_Constant); TheConstant := RawDict.Get_Constant_Symbol (The_Constant); -- GAA External end Add_Constant_Declaration; -------------------------------------------------------------------------------- -- Promote_Deferred_To_Full_Constant -------------------------------------------------------------------------------- -- This procedure is only use in 1 special case - to promote the -- standard declaration of System.Default_Bit_Order from a deferred -- constant to a normal constant in the case where the full declaration -- is supplied in the configuration file. (Some compilers, particularly GNAT, -- do give such a full constant declaration) procedure Promote_Deferred_To_Full_Constant (Constant_Sym : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Value : in LexTokenManager.Lex_String; Exp_Node : in ExaminerConstants.RefType; The_Package : in Symbol) is procedure Promote_Deferred_To_Full_Constant_Local (The_Constant : in RawDict.Constant_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Value : in LexTokenManager.Lex_String; Exp_Node : in ExaminerConstants.RefType; The_Package : in Symbol) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Exp_Node, --# The_Constant, --# The_Package, --# Value; is The_Declaration : RawDict.Declaration_Info_Ref; begin Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => The_Package), Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Constant_Symbol (The_Constant)); RawDict.Set_Constant_Value (The_Constant => The_Constant, Value => Value); RawDict.Set_Constant_Exp_Node (The_Constant => The_Constant, Exp_Is_Wellformed => True, Exp_Node => Exp_Node); RawDict.Set_Constant_Static (The_Constant => The_Constant, Static => True); RawDict.Set_Constant_Declaration (The_Constant => The_Constant, The_Declaration => The_Declaration); RawDict.Set_Constant_Deferred_Declaration (The_Constant => The_Constant, The_Declaration => RawDict.Null_Declaration_Info_Ref); end Promote_Deferred_To_Full_Constant_Local; begin -- Promote_Deferred_To_Full_Constant Promote_Deferred_To_Full_Constant_Local (The_Constant => RawDict.Get_Constant_Info_Ref (Constant_Sym), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, Value => Value, Exp_Node => Exp_Node, The_Package => The_Package); end Promote_Deferred_To_Full_Constant; -------------------------------------------------------------------------------- -- Add_Constant_Rule_Policy -------------------------------------------------------------------------------- procedure AddConstantRulePolicy (TheConstant : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheScope : in Scopes; ThePolicy : in Rule_Policies; TheRulePolicy : out Symbol) is The_Rule_Policy : RawDict.Rule_Policy_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Constant_Rule_Policy (The_Constant : in RawDict.Constant_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Policy : in Rule_Policies; The_Rule_Policy : out RawDict.Rule_Policy_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Scope, --# The_Constant, --# The_Policy & --# The_Rule_Policy from Comp_Unit, --# Declaration, --# Dict, --# Scope, --# The_Policy; --# post The_Rule_Policy /= RawDict.Null_Rule_Policy_Info_Ref; is begin RawDict.Create_Rule_Policy (Scope => Scope, Value => The_Policy, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Rule_Policy => The_Rule_Policy); if RawDict.Get_Constant_First_Rule_Policy (The_Constant => The_Constant) = RawDict.Null_Rule_Policy_Info_Ref then RawDict.Set_Constant_First_Rule_Policy (The_Constant => The_Constant, The_Rule_Policy => The_Rule_Policy); else RawDict.Set_Next_Rule_Policy (The_Rule_Policy => RawDict.Get_Constant_Last_Rule_Policy (The_Constant => The_Constant), Next => The_Rule_Policy); end if; RawDict.Set_Constant_Last_Rule_Policy (The_Constant => The_Constant, The_Rule_Policy => The_Rule_Policy); end Add_Constant_Rule_Policy; begin -- AddConstantRulePolicy Add_Constant_Rule_Policy (The_Constant => RawDict.Get_Constant_Info_Ref (TheConstant), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => TheScope, The_Policy => ThePolicy, The_Rule_Policy => The_Rule_Policy); TheRulePolicy := RawDict.Get_Rule_Policy_Symbol (The_Rule_Policy); -- GAA External end AddConstantRulePolicy; ------------------------------------------------------------------------------- -- Add_Variable_Declaration ------------------------------------------------------------------------------- procedure Add_Variable_Declaration_Local (The_Variable : in RawDict.Variable_Info_Ref; Type_Mark : in RawDict.Type_Info_Ref; Initialized : in Boolean; Is_Aliased : in Boolean; Exp_Node : in ExaminerConstants.RefType; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Declaration : out RawDict.Declaration_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Exp_Node, --# Initialized, --# Is_Aliased, --# Scope, --# The_Variable, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Exp_Node, --# Initialized, --# Is_Aliased, --# LexTokenManager.State, --# Scope, --# The_Variable, --# Type_Mark, --# Type_Reference & --# The_Declaration from Comp_Unit, --# Declaration, --# Dict, --# Scope, --# The_Variable; --# post The_Declaration /= RawDict.Null_Declaration_Info_Ref; is begin if RawDict.Get_Symbol_Compilation_Unit (Item => RawDict.Get_Variable_Symbol (The_Variable)) = Comp_Unit then -- There was a usage of the variable before its actual -- declaration -> update the declaration position. The -- declaration must be in the same compilation unit as the -- usage (in the package specification) to prevent to get a -- cross-reference from the package specification to the -- package body. RawDict.Set_Symbol_Location (Item => RawDict.Get_Variable_Symbol (The_Variable), Location => Declaration.Start_Position); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Variable_Declaration (The_Variable => The_Variable, The_Declaration => The_Declaration); RawDict.Set_Variable_Exp_Node (The_Variable => The_Variable, Exp_Node => Exp_Node); RawDict.Set_Variable_Type (The_Variable => The_Variable, Type_Mark => Type_Mark); if Is_Aliased then RawDict.Set_Variable_Is_Aliased (The_Variable => The_Variable); end if; if Initialized then RawDict.Set_Variable_Initialized (The_Variable => The_Variable); end if; if Type_Mark /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), GetRegion (Scope), Type_Reference); end if; Write_Variable_Declaration (The_Variable => The_Variable, Declaration => Declaration); end Add_Variable_Declaration_Local; ------------------------------------------------------------------------------- procedure Add_Variable_Declaration (Variable_Sym : in Symbol; The_Type : in Symbol; Initialized : in Boolean; Is_Aliased : in Boolean; Exp_Node : in ExaminerConstants.RefType; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Declaration_Symbol : out Symbol) is The_Declaration : RawDict.Declaration_Info_Ref; begin Add_Variable_Declaration_Local (The_Variable => RawDict.Get_Variable_Info_Ref (Variable_Sym), -- GAA External Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Initialized => Initialized, Is_Aliased => Is_Aliased, Exp_Node => Exp_Node, Type_Reference => Type_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, The_Declaration => The_Declaration); Declaration_Symbol := RawDict.Get_Declaration_Symbol (The_Declaration); -- GAA External end Add_Variable_Declaration; ------------------------------------------------------------------------------- -- Add_Variable ------------------------------------------------------------------------------- procedure Add_Variable (Name : in LexTokenManager.Lex_String; The_Type : in Symbol; Initialized : in Boolean; Is_Aliased : in Boolean; Exp_Node : in ExaminerConstants.RefType; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Declaration_Symbol : out Symbol; Variable_Symbol : out Symbol) is The_Declaration : RawDict.Declaration_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; ------------------------------------------------------------------------------- procedure Add_Variable_Local (Name : in LexTokenManager.Lex_String; Type_Mark : in RawDict.Type_Info_Ref; Initialized : in Boolean; Is_Aliased : in Boolean; Exp_Node : in ExaminerConstants.RefType; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Declaration : out RawDict.Declaration_Info_Ref; The_Variable : out RawDict.Variable_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Exp_Node, --# Initialized, --# Is_Aliased, --# Name, --# Scope, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Exp_Node, --# Initialized, --# Is_Aliased, --# LexTokenManager.State, --# Name, --# Scope, --# Type_Mark, --# Type_Reference & --# The_Declaration from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Scope & --# The_Variable from Comp_Unit, --# Declaration, --# Dict, --# Name; --# post The_Declaration /= RawDict.Null_Declaration_Info_Ref and --# The_Variable /= RawDict.Null_Variable_Info_Ref; is begin RawDict.Create_Variable (Name => Name, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Variable => The_Variable); Add_Variable_Declaration_Local (The_Variable => The_Variable, Type_Mark => Type_Mark, Initialized => Initialized, Is_Aliased => Is_Aliased, Exp_Node => Exp_Node, Type_Reference => Type_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, The_Declaration => The_Declaration); end Add_Variable_Local; begin -- Add_Variable Add_Variable_Local (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Initialized => Initialized, Is_Aliased => Is_Aliased, Exp_Node => Exp_Node, Type_Reference => Type_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, The_Declaration => The_Declaration, The_Variable => The_Variable); Declaration_Symbol := RawDict.Get_Declaration_Symbol (The_Declaration); -- GAA External Variable_Symbol := RawDict.Get_Variable_Symbol (The_Variable); -- GAA External end Add_Variable; ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- procedure AddVariableAddressClause (Variable : in Symbol) is begin RawDict.Set_Variable_Has_Address_Clause (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end AddVariableAddressClause; ------------------------------------------------------------------------------- -- Add_Variable_Pragma_Import ------------------------------------------------------------------------------- procedure AddVariablePragmaImport (Variable : in Symbol) is procedure Add_Variable_Pragma_Import (The_Variable : in RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Variable; is begin RawDict.Set_Variable_Has_Pragma_Import (The_Variable => The_Variable); -- Ada LRM states that variable with pragma import is deemed to be initialized by external code segment RawDict.Set_Variable_Initialized (The_Variable => The_Variable); -- so we can mark it as initialized end Add_Variable_Pragma_Import; begin -- AddVariablePragmaImport Add_Variable_Pragma_Import (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end AddVariablePragmaImport; ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- procedure AddTypeSizeAttribute (TypeMark : in Symbol; SizeVal : in LexTokenManager.Lex_String) is begin RawDict.Set_Type_Size_Attribute (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Size_Val => SizeVal); end AddTypeSizeAttribute; ------------------------------------------------------------------------------- procedure SetVariableMarkedValid (Variable : in Symbol; Val : in Boolean) is begin RawDict.Set_Variable_Marked_Valid (The_Variable => RawDict.Get_Variable_Info_Ref (Variable), -- GAA External Val => Val); end SetVariableMarkedValid; ------------------------------------------------------------------------------- procedure SetSubcomponentMarkedValid (Subcomponent : in Symbol; Val : in Boolean) is begin RawDict.Set_Subcomponent_Marked_Valid (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Subcomponent), -- GAA External Marked_Valid => Val); end SetSubcomponentMarkedValid; ------------------------------------------------------------------------------- procedure SetTypeAtomic (TypeMark : in Symbol) is begin RawDict.Set_Type_Atomic (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end SetTypeAtomic; ------------------------------------------------------------------------------- -- Add_Record_Subcomponent ------------------------------------------------------------------------------- procedure Add_Record_Subcomponent (Prefix : in Symbol; The_Record_Component : in RawDict.Record_Component_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; The_Subcomponent : out RawDict.Subcomponent_Info_Ref) --# global in out Dict; --# derives Dict, --# The_Subcomponent from Comp_Unit, --# Dict, --# Prefix, --# The_Record_Component; --# post The_Subcomponent /= RawDict.Null_Subcomponent_Info_Ref; is separate; ------------------------------------------------------------------------------- procedure AddRecordSubcomponent (Prefix : in Symbol; Component : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Subcomponent : out Symbol) is The_Subcomponent : RawDict.Subcomponent_Info_Ref; begin Add_Record_Subcomponent (Prefix => Prefix, The_Record_Component => RawDict.Get_Record_Component_Info_Ref (Component), -- GAA External Comp_Unit => Comp_Unit, The_Subcomponent => The_Subcomponent); Subcomponent := RawDict.Get_Subcomponent_Symbol (The_Subcomponent); -- GAA External end AddRecordSubcomponent; ------------------------------------------------------------------------------- -- Add_Quantified_Variable ------------------------------------------------------------------------------- procedure AddQuantifiedVariable (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TypeMark : in Symbol; TheConstraint : in Symbol; Region : in Symbol; Variable : out Symbol) is The_Quantified_Variable : RawDict.Quantified_Variable_Info_Ref; ------------------------------------------------------------------------------- procedure Add_Quantified_Variable (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Type_Mark : in RawDict.Type_Info_Ref; The_Parameter_Constraint : in RawDict.Parameter_Constraint_Info_Ref; Region : in Symbol; The_Quantified_Variable : out RawDict.Quantified_Variable_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Quantified_Variable from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Region, --# The_Parameter_Constraint, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Region, --# The_Parameter_Constraint, --# Type_Mark; --# post The_Quantified_Variable /= RawDict.Null_Quantified_Variable_Info_Ref; is begin RawDict.Create_Quantified_Variable (Name => Name, Type_Mark => Type_Mark, The_Parameter_Constraint => The_Parameter_Constraint, Region => Region, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Quantified_Variable => The_Quantified_Variable); if Type_Mark /= RawDict.Null_Type_Info_Ref then if Type_Mark /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), Region, Declaration); end if; elsif The_Parameter_Constraint /= RawDict.Null_Parameter_Constraint_Info_Ref then AddOtherReference (RawDict.Get_Parameter_Constraint_Symbol (The_Parameter_Constraint), Region, Declaration); end if; end Add_Quantified_Variable; begin -- AddQuantifiedVariable Add_Quantified_Variable (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (TheConstraint), -- GAA External Region => Region, The_Quantified_Variable => The_Quantified_Variable); Variable := RawDict.Get_Quantified_Variable_Symbol (The_Quantified_Variable); -- GAA External end AddQuantifiedVariable; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Add_Record_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Is_Tagged_Type : in Boolean; Extends : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Extends, --# Is_Tagged_Type, --# Type_Mark; is begin RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Record_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => False); RawDict.Set_Type_Is_Tagged (Type_Mark => Type_Mark, Is_Tagged => Is_Tagged_Type); RawDict.Set_Type_Extends (Type_Mark => Type_Mark, Root_Type => Extends); end Add_Record_Type_Mark; -------------------------------------------------------------------------------- -- Add_Type_Announcement -------------------------------------------------------------------------------- procedure Add_Type_Announcement (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in Symbol; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Type_Announcement_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in Symbol; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# Type_Mark from Comp_Unit, --# Declaration, --# Dict, --# Name, --# The_Package & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# The_Package; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is The_Declaration : RawDict.Declaration_Info_Ref; begin Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => The_Package), Context => ProofContext, The_Declaration => The_Declaration); RawDict.Create_Type (Name => Name, The_Declaration => The_Declaration, Is_Private => False, Is_Announcement => True, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, Type_Mark => Type_Mark); Write_Type_Declaration (Type_Mark => Type_Mark, Discriminant => TypeAnnouncement, Declaration => Declaration); end Add_Type_Announcement_Local; begin -- Add_Type_Announcement Add_Type_Announcement_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, The_Package => The_Package, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Type_Announcement; -------------------------------------------------------------------------------- -- Add_Private_Type -------------------------------------------------------------------------------- procedure Add_Private_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in RawDict.Package_Info_Ref; Is_Limited : in Boolean; Is_Tagged_Type : in Boolean; Extends : in RawDict.Type_Info_Ref; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Extends, --# Is_Limited, --# Is_Tagged_Type, --# LexTokenManager.State, --# Name, --# The_Package & --# Type_Mark from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# The_Package; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is The_Declaration : RawDict.Declaration_Info_Ref; begin Type_Mark := RawDict.Get_Type_Info_Ref (LookupImmediateScope (Name, Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)), ProofContext)); Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)), Context => ProgramContext, The_Declaration => The_Declaration); if Type_Mark = RawDict.Null_Type_Info_Ref then RawDict.Create_Type (Name => Name, The_Declaration => The_Declaration, Is_Private => True, Is_Announcement => False, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, Type_Mark => Type_Mark); elsif RawDict.Get_Symbol_Compilation_Unit (Item => RawDict.Get_Type_Symbol (Type_Mark)) = Comp_Unit then -- There was a usage of the type before its actual -- declaration -> update the declaration position. The -- declaration must be in the same compilation unit as the -- usage (in the package specification) to prevent to get a -- cross-reference from the package specification to the -- package body. RawDict.Set_Symbol_Location (Item => RawDict.Get_Type_Symbol (Type_Mark), Location => Declaration.Start_Position); RawDict.Set_Type_Private_Type_Declaration (Type_Mark => Type_Mark, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Type_Symbol (Type_Mark)); end if; RawDict.Set_Type_Extends (Type_Mark => Type_Mark, Root_Type => Extends); if Is_Limited then RawDict.Set_Type_Limited (Type_Mark => Type_Mark, Is_Limited => Sometimes); RawDict.Set_Type_Limited_Private (Type_Mark => Type_Mark); else RawDict.Set_Type_Limited (Type_Mark => Type_Mark, Is_Limited => Never); end if; if Is_Tagged_Type then RawDict.Set_Package_Declares_Tagged_Type (The_Package => The_Package); end if; RawDict.Set_Type_Is_Tagged (Type_Mark => Type_Mark, Is_Tagged => Is_Tagged_Type); Write_Type_Declaration (Type_Mark => Type_Mark, Discriminant => PrivateTypeDeclaration, Declaration => Declaration); end Add_Private_Type_Local; -------------------------------------------------------------------------------- procedure Add_Private_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in Symbol; Is_Limited : in Boolean; Is_Tagged_Type : in Boolean; Extends : in Symbol; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; begin Add_Private_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, The_Package => RawDict.Get_Package_Info_Ref (The_Package), -- GAA External Is_Limited => Is_Limited, Is_Tagged_Type => Is_Tagged_Type, Extends => RawDict.Get_Type_Info_Ref (Extends), -- GAA External Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Private_Type; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Add_Type_Declaration (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is Region : Symbol; The_Declaration : RawDict.Declaration_Info_Ref; begin Region := GetRegion (Scope); if RawDict.GetSymbolDiscriminant (Region) = Package_Symbol and then Context = ProgramContext then ------------------------------------------------------------- -- If declaring a type within a package in ProgramContext, -- then that type may have already been declared by way of a -- private type declaration, a type-annoucement or both. ------------------------------------------------------------- Type_Mark := RawDict.Get_Type_Info_Ref (LookupImmediateScope (Name, Set_Visibility (The_Visibility => Visible, The_Unit => Region), ProofContext)); else Type_Mark := RawDict.Null_Type_Info_Ref; end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Scope, Context => Context, The_Declaration => The_Declaration); if Type_Mark = RawDict.Null_Type_Info_Ref then ------------------------------------------------------------ -- Type does not already exist in Dictionary, so create it ------------------------------------------------------------ RawDict.Create_Type (Name => Name, The_Declaration => The_Declaration, Is_Private => False, Is_Announcement => False, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, Type_Mark => Type_Mark); else ------------------------------------------------------------ -- Type already exists in the Dictionary, so it MUST be -- either private, announced or both. ------------------------------------------------------------ SystemErrors.RT_Assert (C => RawDict.Get_Type_Announcement (Type_Mark => Type_Mark) /= RawDict.Null_Declaration_Info_Ref or else Type_Is_Private (Type_Mark => Type_Mark), Sys_Err => SystemErrors.Assertion_Failure, Msg => "Add_Type_Declaration : Re-declaration of a non-announced, non-private type"); RawDict.Set_Type_Declaration (Type_Mark => Type_Mark, The_Declaration => The_Declaration); if RawDict.Get_Symbol_Compilation_Unit (Item => RawDict.Get_Type_Symbol (Type_Mark)) = Comp_Unit then -- There was a usage of the type before its actual -- declaration -> update the declaration position. The -- declaration must be in the same compilation unit as -- the usage (in the package specification) to prevent -- to get a cross-reference from the package -- specification to the package body. RawDict.Set_Symbol_Location (Item => RawDict.Get_Type_Symbol (Type_Mark), Location => Declaration.Start_Position); end if; end if; Write_Type_Declaration (Type_Mark => Type_Mark, Discriminant => TypeDeclaration, Declaration => Declaration); end Add_Type_Declaration; -------------------------------------------------------------------------------- -- Add_Enumeration_Type -------------------------------------------------------------------------------- procedure Add_Enumeration_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Enumeration_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Enumeration_Type_Mark (Type_Mark => Type_Mark, Static => True); end Add_Enumeration_Type_Local; begin -- Add_Enumeration_Type Add_Enumeration_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Enumeration_Type; -------------------------------------------------------------------------------- -- Add_Protected_Type -------------------------------------------------------------------------------- procedure Add_Protected_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Mode : in Modes; Constrained : in Boolean; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Protected_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Mode : in Modes; Constrained : in Boolean; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Constrained, --# Context, --# Declaration, --# LexTokenManager.State, --# Mode, --# Name, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is The_Variable : RawDict.Variable_Info_Ref; The_Own_Variable : RawDict.Own_Variable_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Protected_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Type_Mark; is Protected_Info_Sym : Symbol; begin RawDict.CreateProtectedInfo (Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, InfoSym => Protected_Info_Sym); RawDict.Set_Type_Ancillary_Fields (Type_Mark => Type_Mark, The_Declaration => Protected_Info_Sym); RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Protected_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => False); RawDict.Set_Type_Equality_Defined (Type_Mark => Type_Mark, Equality_Defined => False); RawDict.Set_Type_Limited (Type_Mark => Type_Mark, Is_Limited => Always); RawDict.Set_Type_Atomic (Type_Mark => Type_Mark); end Add_Protected_Type_Mark; -------------------------------------------------------------------------------- procedure Add_Access_Type (Accessed_Type : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) --# global in out Dict; --# derives Dict from *, --# Accessed_Type, --# Comp_Unit, --# Declaration; is New_Access_Type : RawDict.Type_Info_Ref; begin -- Create a type record with the name "Access" to ensure we never look it up -- by mistake (access being reserved word). RawDict.Create_Type (Name => LexTokenManager.Access_Token, The_Declaration => RawDict.Null_Declaration_Info_Ref, Is_Private => False, Is_Announcement => False, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, Type_Mark => New_Access_Type); -- mark it as an access type RawDict.Set_Type_Discriminant (Type_Mark => New_Access_Type, Discriminant => Access_Type_Item); -- set other fields to "safe" values RawDict.Set_Type_Static (Type_Mark => New_Access_Type, Static => False); RawDict.Set_Type_Equality_Defined (Type_Mark => New_Access_Type, Equality_Defined => False); RawDict.Set_Type_Limited (Type_Mark => New_Access_Type, Is_Limited => Always); -- link type and access type together RawDict.Set_Type_Accesses (Type_Mark => Accessed_Type, The_Access => New_Access_Type); RawDict.Set_Type_Accesses (Type_Mark => New_Access_Type, The_Access => Accessed_Type); end Add_Access_Type; begin -- Add_Protected_Type_Local Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Protected_Type_Mark (Type_Mark => Type_Mark, Comp_Unit => Comp_Unit, Declaration => Declaration); -- We also declare an abstract own variable with the same name as the protected type -- at this point. The existence of such a variable simplifies the use of the protected -- type name in the annotations of its operations RawDict.Create_Variable (Name => Name, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Variable => The_Variable); RawDict.Create_Own_Variable (Variable => The_Variable, Owner => RawDict.Get_Type_Symbol (Type_Mark), Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Own_Variable => The_Own_Variable); RawDict.Set_Variable_Own_Variable (The_Variable => The_Variable, Own_Variable => The_Own_Variable); RawDict.Set_Protected_Type_Own_Variable (The_Protected_Type => Type_Mark, Own_Variable => The_Own_Variable); RawDict.Set_Type_Constrained (Type_Mark => Type_Mark, Constrained => Constrained); -- the type of this fictitious abstract own variable is the type itself RawDict.Set_Variable_Type (The_Variable => The_Variable, Type_Mark => Type_Mark); RawDict.Set_Variable_Abstract_Type (The_Variable => The_Variable, Abstract_Type_Mark => Type_Mark); RawDict.Set_Own_Variable_Typed (The_Own_Variable => The_Own_Variable); -- set some sensible values for the new own variable RawDict.Set_Own_Variable_Announced (The_Own_Variable => The_Own_Variable); RawDict.Set_Own_Variable_Initialized (The_Own_Variable => The_Own_Variable); RawDict.Set_Own_Variable_Mode (The_Own_Variable => The_Own_Variable, Mode => Mode); RawDict.Set_Own_Variable_Protected (The_Own_Variable => The_Own_Variable, Is_Protected => True); -- implicitly declare an access type to go with the PT Add_Access_Type (Accessed_Type => Type_Mark, Comp_Unit => Comp_Unit, Declaration => Declaration); end Add_Protected_Type_Local; begin -- Add_Protected_Type Add_Protected_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Mode => Mode, Constrained => Constrained, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Protected_Type; -------------------------------------------------------------------------------- -- Add_Task_Type -------------------------------------------------------------------------------- procedure Add_Task_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Constrained : in Boolean; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Task_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Constrained : in Boolean; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Constrained, --# Context, --# Declaration, --# LexTokenManager.State, --# Name, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is procedure Add_Task_Type_Mark (Type_Mark : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Type_Mark; is Task_Info_Sym : Symbol; begin RawDict.CreateTaskInfo (Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, InfoSym => Task_Info_Sym); RawDict.Set_Type_Ancillary_Fields (Type_Mark => Type_Mark, The_Declaration => Task_Info_Sym); RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Task_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => False); RawDict.Set_Type_Equality_Defined (Type_Mark => Type_Mark, Equality_Defined => False); RawDict.Set_Type_Limited (Type_Mark => Type_Mark, Is_Limited => Always); end Add_Task_Type_Mark; begin -- Add_Task_Type_Local Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Task_Type_Mark (Type_Mark => Type_Mark, Comp_Unit => Comp_Unit, Declaration => Declaration); RawDict.Set_Type_Constrained (Type_Mark => Type_Mark, Constrained => Constrained); end Add_Task_Type_Local; begin -- Add_Task_Type Add_Task_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Constrained => Constrained, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Task_Type; -------------------------------------------------------------------------------- -- Add_Task_Or_Protected_Subtype -------------------------------------------------------------------------------- procedure Add_Task_Or_Protected_Subtype (Name : in LexTokenManager.Lex_String; Parent : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Task_Or_Protected_Subtype_Local (Name : in LexTokenManager.Lex_String; Parent_Type : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent_Type, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is Extra_Info : Symbol; begin SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => Parent_Type) or else Is_Protected_Type (Type_Mark => Parent_Type), Sys_Err => SystemErrors.Precondition_Failure, Msg => "In call to AddTaskOrProtectedSubtype"); -- add a "basic" type record making sure we use any existing announced type Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); -- set its fields to suitable values: -- subtype is same sort of thing as its parent RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => RawDict.Get_Type_Discriminant (Type_Mark => Parent_Type)); -- make types as limited as possible RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => False); RawDict.Set_Type_Equality_Defined (Type_Mark => Type_Mark, Equality_Defined => False); RawDict.Set_Type_Limited (Type_Mark => Type_Mark, Is_Limited => Always); if Type_Is_Atomic (Parent_Type) then RawDict.Set_Type_Atomic (Type_Mark => Type_Mark); end if; -- link subtype to parent RawDict.Set_Type_Parent (Type_Mark => Type_Mark, Parent => Parent_Type); -- a legal subtype must have had its discriminants constrained RawDict.Set_Type_Constrained (Type_Mark => Type_Mark, Constrained => True); -- create subtype info record and hook it to Ancillary_Fields of subtype RawDict.CreateSubtype (Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, TheSubtype => Extra_Info); RawDict.Set_Type_Ancillary_Fields (Type_Mark => Type_Mark, The_Declaration => Extra_Info); Write_Type_Declaration (Type_Mark => Type_Mark, Discriminant => TypeDeclaration, Declaration => Declaration); end Add_Task_Or_Protected_Subtype_Local; begin -- Add_Task_Or_Protected_Subtype Add_Task_Or_Protected_Subtype_Local (Name => Name, Parent_Type => RawDict.Get_Type_Info_Ref (Parent), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Subtype := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Task_Or_Protected_Subtype; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Add_Representation_Clause (The_Type : in Symbol; Clause : in Location) is procedure Write_Representation_Clause (Type_Mark : in RawDict.Type_Info_Ref; Clause : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Clause, --# Dict, --# LexTokenManager.State, --# Type_Mark; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "representation clause of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Clause); Write_Line (Dict.TemporaryFile, " ;"); end if; end Write_Representation_Clause; begin -- Add_Representation_Clause Write_Representation_Clause (Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Clause => Clause); end Add_Representation_Clause; -------------------------------------------------------------------------------- procedure AddEnumerationLiteralRepresentation (Literal : in Symbol; Code : in Integer) is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "enumeration literal representation of "); Write_Name (File => Dict.TemporaryFile, Item => Literal); Write_String (Dict.TemporaryFile, " is "); Write_Integer (Dict.TemporaryFile, Code); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddEnumerationLiteralRepresentation; -------------------------------------------------------------------------------- -- Add_Integer_Type -------------------------------------------------------------------------------- procedure Add_Integer_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# LexTokenManager.State, --# Lower, --# Name, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Integer_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => Lower, Upper => Upper); RawDict.Set_Type_Derived (Type_Mark => Type_Mark, Is_Derived => True); end Add_Integer_Type_Local; -------------------------------------------------------------------------------- procedure Add_Integer_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; begin Add_Integer_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Lower => Lower, Upper => Upper, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Integer_Type; -------------------------------------------------------------------------------- -- Add_Predef_Integer_Type -------------------------------------------------------------------------------- procedure Add_Predef_Integer_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts) is Type_Mark : RawDict.Type_Info_Ref; begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Integer_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => Lower, Upper => Upper); end Add_Predef_Integer_Type; -------------------------------------------------------------------------------- -- Add_Modular_Type -------------------------------------------------------------------------------- procedure Add_Modular_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Modulus : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Modular_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Modulus : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# LexTokenManager.State, --# Modulus, --# Name, --# Scope & --# LexTokenManager.State from *, --# Modulus & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is Upper : LexTokenManager.Lex_String; Upper_Value : Maths.Value; Ok : Maths.ErrorCode; begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); -- T'Last is (T'Modulus - 1), so... Maths.Subtract (Maths.ValueRep (Modulus), Maths.OneInteger, -- to get Upper_Value, Ok); if Ok = Maths.NoError then Maths.StorageRep (Upper_Value, Upper); else Upper := LexTokenManager.Null_String; end if; Add_Modular_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => LexTokenManager.Zero_Value, Upper => Upper, Modulus => Modulus); end Add_Modular_Type_Local; begin -- Add_Modular_Type Add_Modular_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Modulus => Modulus, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Modular_Type; -------------------------------------------------------------------------------- -- Add_Abstract_Proof_Type -------------------------------------------------------------------------------- procedure Add_Abstract_Proof_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => ProgramContext, Type_Mark => Type_Mark); RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Abstract_Proof_Type_Item); end Add_Abstract_Proof_Type_Local; -------------------------------------------------------------------------------- procedure Add_Abstract_Proof_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; begin Add_Abstract_Proof_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Abstract_Proof_Type; -------------------------------------------------------------------------------- -- Add_Default_Abstract_Proof_Type -------------------------------------------------------------------------------- procedure Add_Default_Abstract_Proof_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Default_Abstract_Proof_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Type_Mark : out RawDict.Type_Info_Ref) --# global in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# LexTokenManager.State from *, --# Name & --# Type_Mark from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is E_Name : E_Strings.T; Type_Name : LexTokenManager.Lex_String; begin -- Create name for type by appending to __type to own variable name (e.g. state__type) E_Name := LexTokenManager.Lex_String_To_String (Lex_Str => Name); E_Strings.Append_String (E_Str => E_Name, Str => "__type"); LexTokenManager.Insert_Examiner_String (Str => E_Name, Lex_Str => Type_Name); Add_Abstract_Proof_Type_Local (Name => Type_Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Type_Mark => Type_Mark); end Add_Default_Abstract_Proof_Type_Local; begin -- Add_Default_Abstract_Proof_Type Add_Default_Abstract_Proof_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Default_Abstract_Proof_Type; -------------------------------------------------------------------------------- -- Add_Floating_Point_Type -------------------------------------------------------------------------------- procedure Add_Floating_Point_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Floating_Point_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Floating_Point_Type_Mark (Type_Mark => Type_Mark, Static => True); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Lower); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Upper); RawDict.Set_Type_Error_Bound (Type_Mark => Type_Mark, Error_Bound => Error_Bound); RawDict.Set_Type_Derived (Type_Mark => Type_Mark, Is_Derived => True); end Add_Floating_Point_Type_Local; begin -- Add_Floating_Point_Type Add_Floating_Point_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Lower => Lower, Upper => Upper, Error_Bound => Error_Bound, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Floating_Point_Type; -------------------------------------------------------------------------------- -- Add_Predef_Floating_Point_Type -------------------------------------------------------------------------------- procedure Add_Predef_Floating_Point_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts) is Type_Mark : RawDict.Type_Info_Ref; begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Floating_Point_Type_Mark (Type_Mark => Type_Mark, Static => True); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Lower); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Upper); RawDict.Set_Type_Error_Bound (Type_Mark => Type_Mark, Error_Bound => Error_Bound); end Add_Predef_Floating_Point_Type; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure SetTypeIsWellformed (TypeMark : in Symbol; Wellformed : in Boolean) is begin RawDict.Set_Type_Wellformed (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Wellformed => Wellformed); end SetTypeIsWellformed; -------------------------------------------------------------------------------- procedure SetBaseType (TypeMark, BaseType : in Symbol) is begin RawDict.Set_Type_Base_Type (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Base_Type => RawDict.Get_Type_Info_Ref (BaseType)); -- GAA External end SetBaseType; -------------------------------------------------------------------------------- -- Add_Fixed_Point_Type -------------------------------------------------------------------------------- procedure Add_Fixed_Point_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Fixed_Point_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Fixed_Point_Type_Mark (Type_Mark => Type_Mark, Static => True); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Lower); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Upper); RawDict.Set_Type_Error_Bound (Type_Mark => Type_Mark, Error_Bound => Error_Bound); end Add_Fixed_Point_Type_Local; begin -- Add_Fixed_Point_Type Add_Fixed_Point_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Lower => Lower, Upper => Upper, Error_Bound => Error_Bound, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Fixed_Point_Type; -------------------------------------------------------------------------------- -- Add_Array_Type -------------------------------------------------------------------------------- procedure Add_Array_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Constrained : in Boolean; Component_Type : in Symbol; Component_Type_Reference : in Location; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Array_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Constrained : in Boolean; The_Component_Type : in RawDict.Type_Info_Ref; Component_Type_Reference : in Location; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Constrained, --# Context, --# Declaration, --# LexTokenManager.State, --# Name, --# Scope, --# The_Component_Type & --# SPARK_IO.File_Sys from *, --# Component_Type_Reference, --# Comp_Unit, --# Constrained, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# The_Component_Type & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Array_Type_Mark (Type_Mark => Type_Mark, Component_Type => The_Component_Type, Static => False); RawDict.Set_Type_Constrained (Type_Mark => Type_Mark, Constrained => Constrained); if The_Component_Type /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (The_Component_Type), GetRegion (Scope), Component_Type_Reference); end if; end Add_Array_Type_Local; begin -- Add_Array_Type Add_Array_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Constrained => Constrained, The_Component_Type => RawDict.Get_Type_Info_Ref (Component_Type), -- GAA External Component_Type_Reference => Component_Type_Reference, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Array_Type; -------------------------------------------------------------------------------- -- Add_Array_Index -------------------------------------------------------------------------------- procedure Add_Array_Index (The_Array_Type : in RawDict.Type_Info_Ref; Index_Type : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Array_Index : out RawDict.Array_Index_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Index_Type, --# The_Array_Type & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Index_Type, --# LexTokenManager.State, --# The_Array_Type & --# The_Array_Index from Comp_Unit, --# Declaration, --# Dict, --# Index_Type; --# post The_Array_Index /= RawDict.Null_Array_Index_Info_Ref; is Current : RawDict.Array_Index_Info_Ref; begin RawDict.Create_Array_Index (Index_Type => Index_Type, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Array_Index => The_Array_Index); Current := RawDict.Get_Type_Last_Array_Index (Type_Mark => The_Array_Type); if Current = RawDict.Null_Array_Index_Info_Ref then RawDict.Set_Type_First_Array_Index (Type_Mark => The_Array_Type, Array_Index => The_Array_Index); else RawDict.Set_Next_Array_Index (The_Array_Index => Current, Next => The_Array_Index); end if; RawDict.Set_Type_Last_Array_Index (Type_Mark => The_Array_Type, Array_Index => The_Array_Index); if Index_Type /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Index_Type), GetRegion (Get_Type_Scope (The_Array_Type)), Declaration); end if; end Add_Array_Index; procedure AddArrayIndex (TheArrayType : in Symbol; IndexType : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheArrayIndex : out Symbol) is The_Array_Index : RawDict.Array_Index_Info_Ref; begin Add_Array_Index (The_Array_Type => RawDict.Get_Type_Info_Ref (TheArrayType), -- GAA External Index_Type => RawDict.Get_Type_Info_Ref (IndexType), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, The_Array_Index => The_Array_Index); TheArrayIndex := RawDict.Get_Array_Index_Symbol (The_Array_Index); -- GAA External end AddArrayIndex; -------------------------------------------------------------------------------- -- Add_Record_Type -------------------------------------------------------------------------------- procedure Add_Record_Type (Name : in LexTokenManager.Lex_String; Is_Tagged_Type : in Boolean; Extends : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Record_Type_Local (Name : in LexTokenManager.Lex_String; Is_Tagged_Type : in Boolean; Extends : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# Extends, --# Is_Tagged_Type, --# LexTokenManager.State, --# Name, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Record_Type_Mark (Type_Mark => Type_Mark, Is_Tagged_Type => Is_Tagged_Type, Extends => Extends); -- mark package as containing a tagged type if necessary if Is_Tagged_Type then RawDict.Set_Package_Declares_Tagged_Type (The_Package => RawDict.Get_Package_Info_Ref (GetRegion (Scope))); end if; end Add_Record_Type_Local; begin -- Add_Record_Type Add_Record_Type_Local (Name => Name, Is_Tagged_Type => Is_Tagged_Type, Extends => RawDict.Get_Type_Info_Ref (Extends), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Record_Type; -------------------------------------------------------------------------------- -- Add_Record_Component -------------------------------------------------------------------------------- procedure Add_Record_Component (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Record_Type : in RawDict.Type_Info_Ref; The_Component_Type : in RawDict.Type_Info_Ref; InheritedField : in Boolean; ComponentTypeReference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# InheritedField, --# Name, --# The_Component_Type, --# The_Record_Type & --# SPARK_IO.File_Sys from *, --# ComponentTypeReference, --# Comp_Unit, --# Declaration, --# Dict, --# InheritedField, --# LexTokenManager.State, --# Name, --# The_Component_Type, --# The_Record_Type; is separate; -------------------------------------------------------------------------------- procedure AddRecordComponent (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheRecordType : in Symbol; TheComponentType : in Symbol; InheritedField : in Boolean; ComponentTypeReference : in Location) is begin Add_Record_Component (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, The_Record_Type => RawDict.Get_Type_Info_Ref (TheRecordType), -- GAA External The_Component_Type => RawDict.Get_Type_Info_Ref (TheComponentType), -- GAA External InheritedField => InheritedField, ComponentTypeReference => ComponentTypeReference); end AddRecordComponent; -------------------------------------------------------------------------------- -- Add_Generic_Type -------------------------------------------------------------------------------- procedure Add_Generic_Type_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# Type_Mark from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => ProgramContext, Type_Mark => Type_Mark); RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Generic_Type_Item); end Add_Generic_Type_Local; -------------------------------------------------------------------------------- procedure Add_Generic_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Type : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; begin Add_Generic_Type_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Type_Mark => Type_Mark); The_Type := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Generic_Type; -------------------------------------------------------------------------------- -- Set_Generic_Private_Type -------------------------------------------------------------------------------- procedure Set_Generic_Private_Type_Local (Type_Mark : in RawDict.Type_Info_Ref; Is_Limited : in Boolean) --# global in out Dict; --# derives Dict from *, --# Is_Limited, --# Type_Mark; is begin RawDict.Set_Type_Kind_Of_Generic (Type_Mark => Type_Mark, Kind_Of_Generic => Generic_Private_Type); if Is_Limited then RawDict.Set_Type_Limited (Type_Mark => Type_Mark, Is_Limited => Sometimes); RawDict.Set_Type_Limited_Private (Type_Mark => Type_Mark); else RawDict.Set_Type_Limited (Type_Mark => Type_Mark, Is_Limited => Never); end if; end Set_Generic_Private_Type_Local; -------------------------------------------------------------------------------- procedure Set_Generic_Private_Type (The_Type : in Symbol; Is_Limited : in Boolean) is begin Set_Generic_Private_Type_Local (Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Is_Limited => Is_Limited); end Set_Generic_Private_Type; -------------------------------------------------------------------------------- -- Set_Generic_Discrete_Type -------------------------------------------------------------------------------- procedure Set_Generic_Discrete_Type (The_Type : in Symbol) is begin RawDict.Set_Type_Kind_Of_Generic (Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Kind_Of_Generic => Generic_Discrete_Type); end Set_Generic_Discrete_Type; -------------------------------------------------------------------------------- -- Set_Generic_Integer_Type -------------------------------------------------------------------------------- procedure Set_Generic_Integer_Type (The_Type : in Symbol) is begin RawDict.Set_Type_Kind_Of_Generic (Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Kind_Of_Generic => Generic_Integer_Type); end Set_Generic_Integer_Type; -------------------------------------------------------------------------------- -- Set_Generic_Modular_Type -------------------------------------------------------------------------------- procedure Set_Generic_Modular_Type (The_Type : in Symbol) is begin RawDict.Set_Type_Kind_Of_Generic (Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Kind_Of_Generic => Generic_Modular_Type); end Set_Generic_Modular_Type; -------------------------------------------------------------------------------- -- Set_Generic_Floating_Point_Type -------------------------------------------------------------------------------- procedure Set_Generic_Floating_Point_Type (The_Type : in Symbol) is begin RawDict.Set_Type_Kind_Of_Generic (Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Kind_Of_Generic => Generic_Floating_Point_Type); end Set_Generic_Floating_Point_Type; -------------------------------------------------------------------------------- -- Set_Generic_Fixed_Point_Type -------------------------------------------------------------------------------- procedure Set_Generic_Fixed_Point_Type (The_Type : in Symbol) is begin RawDict.Set_Type_Kind_Of_Generic (Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External Kind_Of_Generic => Generic_Fixed_Point_Type); end Set_Generic_Fixed_Point_Type; -------------------------------------------------------------------------------- -- Set_Generic_Array_Type -------------------------------------------------------------------------------- procedure Set_Generic_Array_Type (The_Type : in Symbol) is procedure Set_Generic_Array_Type_Local (Type_Mark : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Type_Mark; is begin RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Generic_Type_Item); RawDict.Set_Type_Kind_Of_Generic (Type_Mark => Type_Mark, Kind_Of_Generic => Generic_Array_Type); end Set_Generic_Array_Type_Local; begin -- Set_Generic_Array_Type Set_Generic_Array_Type_Local (Type_Mark => RawDict.Get_Type_Info_Ref (The_Type)); -- GAA External end Set_Generic_Array_Type; -------------------------------------------------------------------------------- -- Add_Generic_Object -------------------------------------------------------------------------------- procedure Add_Generic_Object (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Type : in Symbol; The_Object_Sym : out Symbol) is The_Object : RawDict.Constant_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Generic_Object_Local (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Type_Mark : in RawDict.Type_Info_Ref; The_Object : out RawDict.Constant_Info_Ref) --# global in out Dict; --# derives Dict, --# The_Object from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Scope, --# Type_Mark; --# post The_Object /= RawDict.Null_Constant_Info_Ref; is The_Declaration : RawDict.Declaration_Info_Ref; begin Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Create_Constant (Name => Name, Type_Mark => Type_Mark, Static => False, The_Declaration => The_Declaration, Is_Deferred => False, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Constant => The_Object); end Add_Generic_Object_Local; begin -- Add_Generic_Object Add_Generic_Object_Local (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External The_Object => The_Object); The_Object_Sym := RawDict.Get_Constant_Symbol (The_Object); -- GAA External end Add_Generic_Object; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure AddRecordComponentRepresentation (Component : in Symbol; Clause : in Location; RelativeAddress : in Natural; FirstBitPosition : in Natural; LastBitPosition : in Natural) is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "record component representation of "); Write_Name (File => Dict.TemporaryFile, Item => Component); Write_Space (File => Dict.TemporaryFile); Write_Integer (Dict.TemporaryFile, RelativeAddress); Write_String (Dict.TemporaryFile, " range "); Write_Integer (Dict.TemporaryFile, FirstBitPosition); Write_String (Dict.TemporaryFile, " ... "); Write_Integer (Dict.TemporaryFile, LastBitPosition); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Clause); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddRecordComponentRepresentation; -------------------------------------------------------------------------------- procedure AddAlignmentClause (TheType : in Symbol; Clause : in Location) is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "alignment clause of "); Write_Name (File => Dict.TemporaryFile, Item => TheType); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Clause); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddAlignmentClause; -------------------------------------------------------------------------------- procedure AddLoop (Scope : in Scopes; Comp_Unit : in ContextManager.UnitDescriptors; LoopStatement : in Location; TheLoop : out Symbol) is separate; -------------------------------------------------------------------------------- procedure AddLoopName (Name : in LexTokenManager.Lex_String; TheLoop : in Symbol) is begin RawDict.SetLoopName (Name, TheLoop); end AddLoopName; -------------------------------------------------------------------------------- -- AddLoopParameter -------------------------------------------------------------------------------- procedure AddLoopParameter (TheLoop : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Name : in LexTokenManager.Lex_String; TypeMark : in Symbol; StaticRange : in Boolean; IsReverse : in Boolean; TypeReference : in Location) is procedure Add_Loop_Parameter (The_Loop : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Name : in LexTokenManager.Lex_String; Type_Mark : in RawDict.Type_Info_Ref; Static_Range : in Boolean; Is_Reverse : in Boolean; Type_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Is_Reverse, --# Name, --# Static_Range, --# The_Loop, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Is_Reverse, --# LexTokenManager.State, --# Name, --# Static_Range, --# The_Loop, --# Type_Mark, --# Type_Reference; is Loop_Parameter : Symbol; begin RawDict.CreateLoopParameter (Name => Name, Type_Mark => Type_Mark, TheLoop => The_Loop, HasStaticRange => Static_Range, IsReverse => Is_Reverse, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, LoopParameter => Loop_Parameter); RawDict.SetLoopParameter (The_Loop, Loop_Parameter); if Type_Mark /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), The_Loop, Type_Reference); end if; end Add_Loop_Parameter; begin -- AddLoopParameter Add_Loop_Parameter (The_Loop => TheLoop, Comp_Unit => Comp_Unit, Declaration => Declaration, Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Static_Range => StaticRange, Is_Reverse => IsReverse, Type_Reference => TypeReference); end AddLoopParameter; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure MarkLoopHasExits (TheLoop : in Symbol) is begin if RawDict.GetSymbolDiscriminant (TheLoop) = LoopSymbol then RawDict.SetLoopHasExits (TheLoop); end if; -- if it is not a loop then the exit statement must be illegally placed so we do nothing end MarkLoopHasExits; -------------------------------------------------------------------------------- procedure SetLoopExitExpn (ForLoop : in Symbol; Expn : in Natural) is begin RawDict.SetLoopExitExpn (ForLoop, Expn); end SetLoopExitExpn; -------------------------------------------------------------------------------- procedure SetLoopEntryExpn (ForLoop : in Symbol; Expn : in Natural) is begin RawDict.SetLoopEntryExpn (ForLoop, Expn); end SetLoopEntryExpn; -------------------------------------------------------------------------------- function Loop_Var_Only_An_Import (OriginalVariable : Symbol; TheLoop : Symbol) return Boolean --# global in Dict; is CurrentRegion : Symbol; IsOnlyAnImport : Boolean := False; Stop : Boolean := False; begin CurrentRegion := RawDict.GetLoopRegion (TheLoop); while not Stop loop case RawDict.GetSymbolDiscriminant (CurrentRegion) is when Subprogram_Symbol => --# accept Flow, 41, "Stable expression expected here"; case RawDict.GetSymbolDiscriminant (OriginalVariable) is when Variable_Symbol => IsOnlyAnImport := (Is_Imported_Subprogram_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CurrentRegion), Abstraction => IsAbstract, The_Variable => RawDict.Get_Variable_Info_Ref (Item => OriginalVariable)) or else Is_Imported_Subprogram_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CurrentRegion), Abstraction => IsRefined, The_Variable => RawDict.Get_Variable_Info_Ref (Item => OriginalVariable))) and then not (Is_Exported_Subprogram_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CurrentRegion), Abstraction => IsAbstract, The_Variable => RawDict.Get_Variable_Info_Ref (Item => OriginalVariable)) or else Is_Exported_Subprogram_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CurrentRegion), Abstraction => IsRefined, The_Variable => RawDict.Get_Variable_Info_Ref (Item => OriginalVariable))); when Subprogram_Parameter_Symbol => IsOnlyAnImport := (Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CurrentRegion), Abstraction => IsAbstract, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => OriginalVariable), Is_Implicit_Proof_Function => False) or else Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CurrentRegion), Abstraction => IsRefined, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => OriginalVariable), Is_Implicit_Proof_Function => False)) and then not (Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CurrentRegion), Abstraction => IsAbstract, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => OriginalVariable), Is_Implicit_Proof_Function => False) or else Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CurrentRegion), Abstraction => IsRefined, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => OriginalVariable), Is_Implicit_Proof_Function => False)); when others => -- non-exec code IsOnlyAnImport := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Loop_Var_Only_An_Import"); end case; --# end accept; Stop := True; when ImplicitProofFunctionSymbol => --# accept Flow, 41, "Stable expression expected here"; case RawDict.GetSymbolDiscriminant (OriginalVariable) is when Variable_Symbol => IsOnlyAnImport := (Is_Imported_Subprogram_Variable (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (CurrentRegion), Abstraction => IsAbstract, The_Variable => RawDict.Get_Variable_Info_Ref (Item => OriginalVariable)) or else Is_Imported_Subprogram_Variable (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (CurrentRegion), Abstraction => IsRefined, The_Variable => RawDict.Get_Variable_Info_Ref (Item => OriginalVariable))) and then not (Is_Exported_Subprogram_Variable (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (CurrentRegion), Abstraction => IsAbstract, The_Variable => RawDict.Get_Variable_Info_Ref (Item => OriginalVariable)) or else Is_Exported_Subprogram_Variable (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (CurrentRegion), Abstraction => IsRefined, The_Variable => RawDict.Get_Variable_Info_Ref (Item => OriginalVariable))); when Subprogram_Parameter_Symbol => IsOnlyAnImport := (Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (CurrentRegion), Abstraction => IsAbstract, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => OriginalVariable), Is_Implicit_Proof_Function => True) or else Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (CurrentRegion), Abstraction => IsRefined, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => OriginalVariable), Is_Implicit_Proof_Function => True)) and then not (Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (CurrentRegion), Abstraction => IsAbstract, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => OriginalVariable), Is_Implicit_Proof_Function => True) or else Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (CurrentRegion), Abstraction => IsRefined, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => OriginalVariable), Is_Implicit_Proof_Function => True)); when others => -- non-exec code IsOnlyAnImport := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Loop_Var_Only_An_Import"); end case; --# end accept; Stop := True; when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => CurrentRegion)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Loop_Var_Only_An_Import"); IsOnlyAnImport := (Is_Imported_Task_Type_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CurrentRegion), Abstraction => IsAbstract, The_Variable => RawDict.Get_Variable_Info_Ref (OriginalVariable)) or else Is_Imported_Task_Type_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CurrentRegion), Abstraction => IsRefined, The_Variable => RawDict.Get_Variable_Info_Ref (OriginalVariable))) and then not (Is_Exported_Task_Type_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CurrentRegion), Abstraction => IsAbstract, The_Variable => RawDict.Get_Variable_Info_Ref (OriginalVariable)) or else Is_Exported_Task_Type_Variable (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CurrentRegion), Abstraction => IsRefined, The_Variable => RawDict.Get_Variable_Info_Ref (OriginalVariable))); Stop := True; when others => CurrentRegion := RawDict.GetLoopRegion (CurrentRegion); end case; end loop; return IsOnlyAnImport; end Loop_Var_Only_An_Import; -------------------------------------------------------------------------------- procedure IdempotentCreateLoopOnEntryVariable (OriginalVariable : in Symbol; TheLoop : in Symbol; OnEntryVariable : out Symbol) is Found : Boolean := False; OnEntryVariableToTry : Symbol; NewOnEntryVariable : Symbol; begin -- Before creating a new variable, check whether the standard variable -- itself is acceptable in this context due to non-modification. if (RawDict.GetSymbolDiscriminant (OriginalVariable) = Variable_Symbol or else RawDict.GetSymbolDiscriminant (OriginalVariable) = Subprogram_Parameter_Symbol) and then Loop_Var_Only_An_Import (OriginalVariable => OriginalVariable, TheLoop => TheLoop) then OnEntryVariable := OriginalVariable; else -- Search linked list of OnEntryVariables associated with TheLoop. If an entry matching -- OriginalVariable is found then current search posiiton; this is the -- required OnEntryVariable symbol. Otherwise, create a new entry and link it in to list. OnEntryVariableToTry := RawDict.GetLoopOnEntryVars (TheLoop); while OnEntryVariableToTry /= NullSymbol loop Found := RawDict.GetLoopEntryVariableOriginalVar (OnEntryVariableToTry) = OriginalVariable; exit when Found; OnEntryVariableToTry := RawDict.GetLoopEntryVariableNext (OnEntryVariableToTry); end loop; if Found then -- Return existing var OnEntryVariable := OnEntryVariableToTry; else RawDict.CreateLoopEntryVariable (OriginalVar => OriginalVariable, TheLoop => TheLoop, Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, LoopEntryVariable => NewOnEntryVariable); -- Insert it at the head of the list RawDict.SetLoopEntryVariableNext (NewOnEntryVariable, RawDict.GetLoopOnEntryVars (TheLoop)); RawDict.SetLoopOnEntryVars (TheLoop, NewOnEntryVariable); -- Return newly created var OnEntryVariable := NewOnEntryVariable; end if; end if; end IdempotentCreateLoopOnEntryVariable; -------------------------------------------------------------------------------- function GetLoopOnEntryVariable (OriginalVariable : Symbol; TheLoop : Symbol) return Symbol is Result : Symbol := NullSymbol; -- Default result for error case only OnEntryVariableToTry : Symbol; begin if (RawDict.GetSymbolDiscriminant (OriginalVariable) = Variable_Symbol or else RawDict.GetSymbolDiscriminant (OriginalVariable) = Subprogram_Parameter_Symbol) and then Loop_Var_Only_An_Import (OriginalVariable => OriginalVariable, TheLoop => TheLoop) then Result := OriginalVariable; else OnEntryVariableToTry := RawDict.GetLoopOnEntryVars (TheLoop); while OnEntryVariableToTry /= NullSymbol loop if RawDict.GetLoopEntryVariableOriginalVar (OnEntryVariableToTry) = OriginalVariable then Result := OnEntryVariableToTry; exit; end if; OnEntryVariableToTry := RawDict.GetLoopEntryVariableNext (OnEntryVariableToTry); end loop; end if; return Result; end GetLoopOnEntryVariable; -------------------------------------------------------------------------------- procedure Add_Subtype_Declaration (Name : in LexTokenManager.Lex_String; Is_Full_Range_Subtype : in Boolean; Parent : in RawDict.Type_Info_Ref; Parent_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# Is_Full_Range_Subtype, --# LexTokenManager.State, --# Name, --# Parent, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# Is_Full_Range_Subtype, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Type_Declaration (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); if Is_Full_Range_Subtype then RawDict.Set_Type_Is_Full_Range_Subtype (Type_Mark => Type_Mark); end if; RawDict.Set_Type_Parent (Type_Mark => Type_Mark, Parent => Parent); if Parent /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Parent), GetRegion (Scope), Parent_Reference); end if; end Add_Subtype_Declaration; -------------------------------------------------------------------------------- -- Add_Full_Range_Subtype -------------------------------------------------------------------------------- procedure Add_Full_Range_Subtype (Name : in LexTokenManager.Lex_String; Parent : in Symbol; Parent_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; begin Add_Subtype_Declaration (Name => Name, Is_Full_Range_Subtype => True, Parent => RawDict.Get_Type_Info_Ref (Parent), -- GAA External Parent_Reference => Parent_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Subtype := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Full_Range_Subtype; -------------------------------------------------------------------------------- -- Add_Enumeration_Subtype -------------------------------------------------------------------------------- procedure Add_Enumeration_Subtype (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Enumeration_Subtype_Local (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in RawDict.Type_Info_Ref; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Static, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Subtype_Declaration (Name => Name, Is_Full_Range_Subtype => False, Parent => Parent, Parent_Reference => Parent_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Enumeration_Type_Mark (Type_Mark => Type_Mark, Static => Static); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Lower); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Upper); end Add_Enumeration_Subtype_Local; begin -- Add_Enumeration_Subtype Add_Enumeration_Subtype_Local (Name => Name, Static => Static, Parent => RawDict.Get_Type_Info_Ref (Parent), -- GAA External Parent_Reference => Parent_Reference, Lower => Lower, Upper => Upper, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Subtype := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Enumeration_Subtype; -------------------------------------------------------------------------------- -- Add_Integer_Subtype -------------------------------------------------------------------------------- procedure Add_Integer_Subtype (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Integer_Subtype_Local (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in RawDict.Type_Info_Ref; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Static, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Subtype_Declaration (Name => Name, Is_Full_Range_Subtype => False, Parent => Parent, Parent_Reference => Parent_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Integer_Type_Mark (Type_Mark => Type_Mark, Static => Static, Lower => Lower, Upper => Upper); end Add_Integer_Subtype_Local; begin -- Add_Integer_Subtype Add_Integer_Subtype_Local (Name => Name, Static => Static, Parent => RawDict.Get_Type_Info_Ref (Parent), -- GAA External Parent_Reference => Parent_Reference, Lower => Lower, Upper => Upper, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Subtype := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Integer_Subtype; -------------------------------------------------------------------------------- -- Add_Modular_Subtype -------------------------------------------------------------------------------- procedure Add_Modular_Subtype (Name : in LexTokenManager.Lex_String; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Modular_Subtype_Local (Name : in LexTokenManager.Lex_String; Parent : in RawDict.Type_Info_Ref; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Subtype_Declaration (Name => Name, Is_Full_Range_Subtype => False, Parent => Parent, Parent_Reference => Parent_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Modular_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => Lower, Upper => Upper, Modulus => Get_Scalar_Attribute_Value (False, LexTokenManager.Modulus_Token, Parent)); end Add_Modular_Subtype_Local; begin -- Add_Modular_Subtype Add_Modular_Subtype_Local (Name => Name, Parent => RawDict.Get_Type_Info_Ref (Parent), -- GAA External Parent_Reference => Parent_Reference, Lower => Lower, Upper => Upper, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Subtype := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Modular_Subtype; -------------------------------------------------------------------------------- -- Add_Floating_Point_Subtype -------------------------------------------------------------------------------- procedure Add_Floating_Point_Subtype (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Floating_Point_Subtype_Local (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in RawDict.Type_Info_Ref; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Static, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Subtype_Declaration (Name => Name, Is_Full_Range_Subtype => False, Parent => Parent, Parent_Reference => Parent_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Floating_Point_Type_Mark (Type_Mark => Type_Mark, Static => Static); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Lower); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Upper); RawDict.Set_Type_Error_Bound (Type_Mark => Type_Mark, Error_Bound => Error_Bound); end Add_Floating_Point_Subtype_Local; begin -- Add_Floating_Point_Subtype Add_Floating_Point_Subtype_Local (Name => Name, Static => Static, Parent => RawDict.Get_Type_Info_Ref (Parent), -- GAA External Parent_Reference => Parent_Reference, Lower => Lower, Upper => Upper, Error_Bound => Error_Bound, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Subtype := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Floating_Point_Subtype; -------------------------------------------------------------------------------- -- Add_Fixed_Point_Subtype -------------------------------------------------------------------------------- procedure Add_Fixed_Point_Subtype (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Fixed_Point_Subtype_Local (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in RawDict.Type_Info_Ref; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Static, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Subtype_Declaration (Name => Name, Is_Full_Range_Subtype => False, Parent => Parent, Parent_Reference => Parent_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Fixed_Point_Type_Mark (Type_Mark => Type_Mark, Static => Static); RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => Lower); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Upper); RawDict.Set_Type_Error_Bound (Type_Mark => Type_Mark, Error_Bound => Error_Bound); end Add_Fixed_Point_Subtype_Local; begin -- Add_Fixed_Point_Subtype Add_Fixed_Point_Subtype_Local (Name => Name, Static => Static, Parent => RawDict.Get_Type_Info_Ref (Parent), -- GAA External Parent_Reference => Parent_Reference, Lower => Lower, Upper => Upper, Error_Bound => Error_Bound, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); The_Subtype := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Fixed_Point_Subtype; -------------------------------------------------------------------------------- -- Add_Array_Subtype -------------------------------------------------------------------------------- procedure Add_Array_Subtype (Name : in LexTokenManager.Lex_String; Parent : in Symbol; Parent_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Static : in Boolean; The_Subtype : out Symbol) is Type_Mark : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Array_Subtype_Local (Name : in LexTokenManager.Lex_String; Parent : in RawDict.Type_Info_Ref; Parent_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Static : in Boolean; Type_Mark : out RawDict.Type_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# LexTokenManager.State, --# Name, --# Parent, --# Scope, --# Static & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope & --# Type_Mark from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post Type_Mark /= RawDict.Null_Type_Info_Ref; is begin Add_Subtype_Declaration (Name => Name, Is_Full_Range_Subtype => False, Parent => Parent, Parent_Reference => Parent_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Type_Mark => Type_Mark); Add_Array_Type_Mark (Type_Mark => Type_Mark, Component_Type => Get_Array_Component (Type_Mark => RawDict.Get_Type_Parent (Type_Mark => Type_Mark)), Static => Static); RawDict.Set_Type_Constrained (Type_Mark => Type_Mark, Constrained => True); end Add_Array_Subtype_Local; begin -- Add_Array_Subtype Add_Array_Subtype_Local (Name => Name, Parent => RawDict.Get_Type_Info_Ref (Parent), -- GAA External Parent_Reference => Parent_Reference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope, Context => Context, Static => Static, Type_Mark => Type_Mark); The_Subtype := RawDict.Get_Type_Symbol (Type_Mark); -- GAA External end Add_Array_Subtype; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Write_Proof_Statement (Discriminant : in ProofStatementDiscriminant; Statement : in Location) --# global in Dict; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# Discriminant, --# Statement; is procedure Write_Discriminant (File : in SPARK_IO.File_Type; Discriminant : in ProofStatementDiscriminant) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Discriminant, --# File; is begin case Discriminant is when AssertStatement => Write_String (File, "assert statement"); when CheckStatement => Write_String (File, "check statement"); end case; end Write_Discriminant; begin -- Write_Proof_Statement if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_Discriminant (File => Dict.TemporaryFile, Discriminant => Discriminant); Write_String (Dict.TemporaryFile, " at "); Write_Location (File => Dict.TemporaryFile, Loc => Statement); Write_Line (Dict.TemporaryFile, " ;"); end if; end Write_Proof_Statement; -------------------------------------------------------------------------------- procedure AddAssertStatement (Statement : in Location) is begin Write_Proof_Statement (Discriminant => AssertStatement, Statement => Statement); end AddAssertStatement; -------------------------------------------------------------------------------- procedure AddCheckStatement (Statement : in Location) is begin Write_Proof_Statement (Discriminant => CheckStatement, Statement => Statement); end AddCheckStatement; -------------------------------------------------------------------------------- -- Add_Precondition -------------------------------------------------------------------------------- procedure AddPrecondition (Abstraction : in Abstractions; Subprogram : in Symbol; Predicate : in ExaminerConstants.RefType; Precondition : in Location) is procedure Add_Precondition (Abstraction : in Abstractions; The_Subprogram : in RawDict.Subprogram_Info_Ref; Predicate : in ExaminerConstants.RefType; Precondition : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Predicate, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# LexTokenManager.State, --# Precondition, --# Predicate, --# The_Subprogram; is begin -- mark subprog as having second proof constraint (independent of whether it has second flow anno) if Abstraction = IsRefined then RawDict.Set_Subprogram_Has_Second_Constraint (The_Subprogram => The_Subprogram); end if; RawDict.Set_Subprogram_Precondition (The_Subprogram => The_Subprogram, Abstraction => Abstraction, Precondition => Predicate); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Abstraction = IsRefined then Write_String (Dict.TemporaryFile, "refined "); end if; Write_String (Dict.TemporaryFile, "precondition of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Precondition); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Precondition; begin -- AddPrecondition Add_Precondition (Abstraction => Abstraction, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External Predicate => Predicate, Precondition => Precondition); end AddPrecondition; -------------------------------------------------------------------------------- -- Add_Postcondition -------------------------------------------------------------------------------- procedure AddPostcondition (Abstraction : in Abstractions; Subprogram : in Symbol; Predicate : in ExaminerConstants.RefType; Postcondition : in Location) is procedure Add_Postcondition (Abstraction : in Abstractions; The_Subprogram : in RawDict.Subprogram_Info_Ref; Predicate : in ExaminerConstants.RefType; Postcondition : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Predicate, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# LexTokenManager.State, --# Postcondition, --# Predicate, --# The_Subprogram; is begin -- mark subprog as having second proof constraint (independent of whether it has second flow anno) if Abstraction = IsRefined then RawDict.Set_Subprogram_Has_Second_Constraint (The_Subprogram => The_Subprogram); end if; RawDict.Set_Subprogram_Postcondition (The_Subprogram => The_Subprogram, Abstraction => Abstraction, Postcondition => Predicate); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Abstraction = IsRefined then Write_String (Dict.TemporaryFile, "refined "); end if; Write_String (Dict.TemporaryFile, "postcondition of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Postcondition); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Postcondition; begin -- AddPostcondition Add_Postcondition (Abstraction => Abstraction, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External Predicate => Predicate, Postcondition => Postcondition); end AddPostcondition; -------------------------------------------------------------------------------- -- Add_Subprogram -------------------------------------------------------------------------------- procedure Add_Subprogram (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; Context : in Contexts; The_Subprogram : out RawDict.Subprogram_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Subprogram from Comp_Unit, --# Context, --# Dict, --# Name, --# Scope, --# Specification & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Specification; --# post The_Subprogram /= RawDict.Null_Subprogram_Info_Ref; is The_Declaration : RawDict.Declaration_Info_Ref; begin Add_Declaration (Comp_Unit => Comp_Unit, Loc => Specification, Scope => Scope, Context => Context, The_Declaration => The_Declaration); RawDict.Create_Subprogram (Name => Name, The_Declaration => The_Declaration, Comp_Unit => Comp_Unit, Loc => Specification.Start_Position, The_Subprogram => The_Subprogram); Write_Subprogram_Declaration (The_Subprogram => The_Subprogram, Is_Renaming => False, Scope => Scope, Declaration => Specification); end Add_Subprogram; -------------------------------------------------------------------------------- procedure AddSubprogram (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; Context : in Contexts; Subprogram : out Symbol) is The_Subprogram : RawDict.Subprogram_Info_Ref; begin Add_Subprogram (Name => Name, Comp_Unit => Comp_Unit, Specification => Specification, Scope => Scope, Context => Context, The_Subprogram => The_Subprogram); Subprogram := RawDict.Get_Subprogram_Symbol (The_Subprogram); -- GAA External end AddSubprogram; -------------------------------------------------------------------------------- -- Actual_Of_Generic_Formal_Type -------------------------------------------------------------------------------- function Actual_Of_Generic_Formal_Type (The_Generic_Formal_Type : RawDict.Type_Info_Ref; Actual_Subprogram : RawDict.Subprogram_Info_Ref; Not_Found_Expected : Boolean) return RawDict.Type_Info_Ref --# global in Dict; --# return Result => (Not_Found_Expected or Result /= RawDict.Null_Type_Info_Ref); is Current_Generic_Association : RawDict.Generic_Association_Info_Ref; Result : RawDict.Type_Info_Ref := RawDict.Null_Type_Info_Ref; begin -- now that we have the actual generic formal type we can look for it in the linked list of parameters Current_Generic_Association := RawDict.Get_Subprogram_First_Generic_Association (The_Subprogram => Actual_Subprogram); while Current_Generic_Association /= RawDict.Null_Generic_Association_Info_Ref loop if not RawDict.Get_Generic_Association_Is_Object (The_Generic_Association => Current_Generic_Association) and then RawDict.Get_Generic_Association_Formal_Type (The_Generic_Association => Current_Generic_Association) = The_Generic_Formal_Type then -- match found Result := RawDict.Get_Generic_Association_Actual_Type (The_Generic_Association => Current_Generic_Association); exit; end if; Current_Generic_Association := RawDict.Get_Next_Generic_Association (The_Generic_Association => Current_Generic_Association); end loop; SystemErrors.RT_Assert (C => Not_Found_Expected or else Result /= RawDict.Null_Type_Info_Ref, Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Actual_Of_Generic_Formal_Type"); --# accept Flow, 35, Not_Found_Expected, "Importation of the initial value is ineffective OK" & --# Flow, 50, Not_Found_Expected, "Value is not derived from the imported value OK"; return Result; end Actual_Of_Generic_Formal_Type; -------------------------------------------------------------------------------- function ActualOfGenericFormalType (TheGenericFormalSym : Symbol; ActualSubprogramSym : Symbol) return Symbol is begin return RawDict.Get_Type_Symbol -- GAA External (Actual_Of_Generic_Formal_Type (The_Generic_Formal_Type => RawDict.Get_Type_Info_Ref (TheGenericFormalSym), -- GAA External Actual_Subprogram => RawDict.Get_Subprogram_Info_Ref (ActualSubprogramSym), -- GAA External Not_Found_Expected => False)); end ActualOfGenericFormalType; -------------------------------------------------------------------------------- -- Actual_Of_Generic_Formal_Object -------------------------------------------------------------------------------- function ActualOfGenericFormalObject (TheGenericFormalSym : Symbol; ActualSubprogramSym : Symbol) return Symbol is function Actual_Of_Generic_Formal_Object (The_Generic_Formal_Object : RawDict.Constant_Info_Ref; Actual_Subprogram : RawDict.Subprogram_Info_Ref) return RawDict.Constant_Info_Ref --# global in Dict; --# return Result => (Result /= RawDict.Null_Constant_Info_Ref); is Current_Generic_Association : RawDict.Generic_Association_Info_Ref; Result : RawDict.Constant_Info_Ref := RawDict.Null_Constant_Info_Ref; begin -- now that we have the actual generic formal object we can look for it in the linked list of parameters Current_Generic_Association := RawDict.Get_Subprogram_First_Generic_Association (The_Subprogram => Actual_Subprogram); while Current_Generic_Association /= RawDict.Null_Generic_Association_Info_Ref loop if RawDict.Get_Generic_Association_Is_Object (The_Generic_Association => Current_Generic_Association) and then RawDict.Get_Generic_Association_Formal_Object (The_Generic_Association => Current_Generic_Association) = The_Generic_Formal_Object then -- match found Result := RawDict.Get_Generic_Association_Actual_Object (The_Generic_Association => Current_Generic_Association); exit; end if; Current_Generic_Association := RawDict.Get_Next_Generic_Association (The_Generic_Association => Current_Generic_Association); end loop; SystemErrors.RT_Assert (C => Result /= RawDict.Null_Constant_Info_Ref, Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Actual_Of_Generic_Formal_Object"); return Result; end Actual_Of_Generic_Formal_Object; begin -- ActualOfGenericFormalObject return RawDict.Get_Constant_Symbol -- GAA External (Actual_Of_Generic_Formal_Object (The_Generic_Formal_Object => RawDict.Get_Constant_Info_Ref (TheGenericFormalSym), -- GAA External Actual_Subprogram => RawDict.Get_Subprogram_Info_Ref (ActualSubprogramSym))); -- GAA External end ActualOfGenericFormalObject; -------------------------------------------------------------------------------- -- Get_Generic_Of_Instantiation -------------------------------------------------------------------------------- function Get_Generic_Of_Instantiation (The_Subprogram : Symbol) return RawDict.Subprogram_Info_Ref --# global in Dict; is Result : RawDict.Subprogram_Info_Ref; begin case RawDict.GetSymbolDiscriminant (The_Subprogram) is when Subprogram_Symbol => Result := RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => The_Subprogram)); when ImplicitProofFunctionSymbol => Result := RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (The_Subprogram)); when others => -- non-exec code Result := RawDict.Null_Subprogram_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Get_Generic_Of_Instantiation"); end case; return Result; end Get_Generic_Of_Instantiation; -------------------------------------------------------------------------------- function GetGenericOfInstantiation (PackageOrSubProgram : Symbol) return Symbol is begin return RawDict.Get_Subprogram_Symbol (Get_Generic_Of_Instantiation (The_Subprogram => PackageOrSubProgram)); -- GAA External end GetGenericOfInstantiation; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsInstantiation (PackageOrSubProgram : Symbol) return Boolean is begin return Get_Generic_Of_Instantiation (The_Subprogram => PackageOrSubProgram) /= RawDict.Null_Subprogram_Info_Ref; end IsInstantiation; -------------------------------------------------------------------------------- procedure SetSubprogramSignatureNotWellformed (Abstraction : in Abstractions; Subprogram : in Symbol) is begin case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => RawDict.Set_Subprogram_Signature_Not_Wellformed (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.SetSubprogramSignatureNotWellformed"); RawDict.Set_Task_Type_Signature_Not_Wellformed (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.SetSubprogramSignatureNotWellformed"); end case; end SetSubprogramSignatureNotWellformed; -------------------------------------------------------------------------------- procedure SetSubprogramIsEntry (Subprogram : in Symbol) is begin RawDict.Set_Subprogram_Is_Entry (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram)); -- GAA External end SetSubprogramIsEntry; -------------------------------------------------------------------------------- procedure Set_Package_Generic_Unit (Pack_Sym : in Symbol; Generic_Unit : in Symbol) is begin RawDict.Set_Package_Generic_Unit (The_Package => RawDict.Get_Package_Info_Ref (Pack_Sym), -- GAA External The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Generic_Unit)); -- GAA External end Set_Package_Generic_Unit; -------------------------------------------------------------------------------- procedure Set_Subprogram_Generic_Unit (Subprogram : in Symbol; Generic_Unit : in Symbol) is begin RawDict.Set_Subprogram_Generic_Unit (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Generic_Unit)); -- GAA External end Set_Subprogram_Generic_Unit; -------------------------------------------------------------------------------- procedure Set_Generic_Unit_Owning_Package (Generic_Unit : in Symbol; Pack_Sym : in Symbol) is begin RawDict.Set_Generic_Unit_Owning_Package (The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Generic_Unit), -- GAA External The_Package => RawDict.Get_Package_Info_Ref (Pack_Sym)); -- GAA External end Set_Generic_Unit_Owning_Package; -------------------------------------------------------------------------------- procedure Set_Generic_Unit_Owning_Subprogram (Generic_Unit : in Symbol; Subprogram : in Symbol) is begin RawDict.Set_Generic_Unit_Owning_Subprogram (The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Generic_Unit), -- GAA External The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram)); -- GAA External end Set_Generic_Unit_Owning_Subprogram; -------------------------------------------------------------------------------- procedure SetHasDerivesAnnotation (Task_Or_Proc : in Symbol) is begin case RawDict.GetSymbolDiscriminant (Task_Or_Proc) is when Subprogram_Symbol => RawDict.Set_Subprogram_Has_Derives_Annotation (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Task_Or_Proc)); -- GAA External when ImplicitProofFunctionSymbol => RawDict.Set_Subprogram_Has_Derives_Annotation (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Task_Or_Proc)); when Type_Symbol => -- The only other possibility is a task. SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Task_Or_Proc)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.SetHasDerivesAnnotation"); RawDict.Set_Task_Type_Has_Derives_Annotation (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Task_Or_Proc)); -- GAA External when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.SetHasDerivesAnnotation"); end case; end SetHasDerivesAnnotation; -------------------------------------------------------------------------------- procedure SetSubprogramEntryBarrier (Subprogram, TheBarrier : in Symbol) is begin RawDict.Set_Subprogram_Entry_Barrier (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External The_Barrier => TheBarrier); end SetSubprogramEntryBarrier; -------------------------------------------------------------------------------- procedure SetIsInterruptHandler (Subprogram : in Symbol) is begin RawDict.Set_Subprogram_Is_Interrupt_Handler (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram)); -- GAA External end SetIsInterruptHandler; -------------------------------------------------------------------------------- procedure SetProtectedTypeElementsHidden (TheProtectedType : in Symbol) is begin RawDict.Set_Protected_Type_Elements_Hidden (The_Protected_Type => RawDict.Get_Type_Info_Ref (TheProtectedType)); -- GAA External end SetProtectedTypeElementsHidden; -------------------------------------------------------------------------------- procedure SetProtectedTypeEntry (TheProtectedType, TheEntry : in Symbol) is begin RawDict.Set_Protected_Type_The_Entry (The_Protected_Type => RawDict.Get_Type_Info_Ref (TheProtectedType), -- GAA External The_Entry => TheEntry); end SetProtectedTypeEntry; -------------------------------------------------------------------------------- -- Set_Type_Has_Pragma -------------------------------------------------------------------------------- procedure SetTypeHasPragma (TheProtectedOrTaskType : in Symbol; ThePragma : in RavenscarPragmas) is procedure Set_Type_Has_Pragma (The_Protected_Or_Task_Type : in RawDict.Type_Info_Ref; The_Pragma : in RavenscarPragmas) --# global in out Dict; --# derives Dict from *, --# The_Pragma, --# The_Protected_Or_Task_Type; is begin case RawDict.Get_Type_Discriminant (Type_Mark => The_Protected_Or_Task_Type) is when Protected_Type_Item => RawDict.Set_Protected_Type_Has_Pragma (The_Protected_Type => The_Protected_Or_Task_Type, The_Pragma => The_Pragma); when Task_Type_Item => RawDict.Set_Task_Type_Has_Pragma (The_Task_Type => The_Protected_Or_Task_Type, The_Pragma => The_Pragma); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.SetTypeHasPragma"); end case; end Set_Type_Has_Pragma; begin -- SetTypeHasPragma Set_Type_Has_Pragma (The_Protected_Or_Task_Type => RawDict.Get_Type_Info_Ref (TheProtectedOrTaskType), -- GAA External The_Pragma => ThePragma); end SetTypeHasPragma; -------------------------------------------------------------------------------- -- Set_Type_Pragma_Value -------------------------------------------------------------------------------- procedure SetTypePragmaValue (TheProtectedOrTaskType : in Symbol; ThePragma : in RavenscarPragmasWithValue; TheValue : in LexTokenManager.Lex_String) is procedure Set_Type_Pragma_Value (The_Protected_Or_Task_Type : in RawDict.Type_Info_Ref; The_Pragma : in RavenscarPragmasWithValue; The_Value : in LexTokenManager.Lex_String) --# global in out Dict; --# derives Dict from *, --# The_Pragma, --# The_Protected_Or_Task_Type, --# The_Value; is begin case RawDict.Get_Type_Discriminant (Type_Mark => The_Protected_Or_Task_Type) is when Protected_Type_Item => RawDict.Set_Protected_Type_Pragma_Value (The_Protected_Type => The_Protected_Or_Task_Type, The_Pragma => The_Pragma, The_Value => The_Value); when Task_Type_Item => RawDict.Set_Task_Type_Pragma_Value (The_Task_Type => The_Protected_Or_Task_Type, The_Pragma => The_Pragma, The_Value => The_Value); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.SetTypePragmaValue"); end case; end Set_Type_Pragma_Value; begin -- SetTypePragmaValue Set_Type_Pragma_Value (The_Protected_Or_Task_Type => RawDict.Get_Type_Info_Ref (TheProtectedOrTaskType), -- GAA External The_Pragma => ThePragma, The_Value => TheValue); end SetTypePragmaValue; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure SetMainProgramPriority (TheValue : in LexTokenManager.Lex_String) is begin Dict.Main.Priority_Given := True; Dict.Main.The_Priority := TheValue; null; end SetMainProgramPriority; -------------------------------------------------------------------------------- function IsPredefinedSuspendUntilTrueOperation (TheProcedure : Symbol) return Boolean is begin return RawDict.Get_Subprogram_Info_Ref (TheProcedure) = Dict.Subprograms.STC_Suspend_Until_True; -- GAA External end IsPredefinedSuspendUntilTrueOperation; -------------------------------------------------------------------------------- function IsPredefinedRealTimeClockOperation (TheProcedure : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheProcedure) = Subprogram_Symbol and then RawDict.Get_Subprogram_Info_Ref (TheProcedure) = Dict.Subprograms.Ada_Real_Time_Clock; -- GAA External end IsPredefinedRealTimeClockOperation; -------------------------------------------------------------------------------- -- Add_Return_Type -------------------------------------------------------------------------------- procedure Add_Return_Type (The_Function : in RawDict.Subprogram_Info_Ref; Type_Mark : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Type_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# The_Function, --# Type_Mark, --# Type_Reference & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# The_Function, --# Type_Mark, --# Type_Reference; is The_Proof_Function : Symbol; begin RawDict.Set_Subprogram_Return_Type (The_Subprogram => The_Function, Type_Mark => Type_Mark); RawDict.Set_Subprogram_Has_Derives_Annotation (The_Subprogram => The_Function); if Get_Subprogram_Context (The_Subprogram => The_Function) = ProgramContext then RawDict.CreateImplicitProofFunction (Ada_Function => The_Function, Comp_Unit => Comp_Unit, Loc => Type_Reference.Start_Position, ProofFunction => The_Proof_Function); RawDict.Set_Subprogram_Implicit_Proof_Function (The_Subprogram => The_Function, Abstraction => IsAbstract, The_Proof_Function => The_Proof_Function); end if; if Type_Mark /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), RawDict.Get_Subprogram_Symbol (The_Function), Type_Reference); end if; end Add_Return_Type; ------------------------------------------------------------------------------- procedure AddReturnType (TheFunction : in Symbol; TypeMark : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; TypeReference : in Location) is begin Add_Return_Type (The_Function => RawDict.Get_Subprogram_Info_Ref (TheFunction), -- GAA External Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Comp_Unit => Comp_Unit, Type_Reference => TypeReference); end AddReturnType; ------------------------------------------------------------------------------- -- Add_Implicit_Return_Variable ------------------------------------------------------------------------------- procedure AddImplicitReturnVariable (Abstraction : in Abstractions; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Name : in LexTokenManager.Lex_String; TheFunction : in Symbol; Variable : out Symbol) is The_Implicit_Return_Variable : RawDict.Implicit_Return_Variable_Info_Ref; ------------------------------------------------------------------------------- procedure Add_Implicit_Return_Variable (Abstraction : in Abstractions; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Name : in LexTokenManager.Lex_String; The_Function : in RawDict.Subprogram_Info_Ref; The_Implicit_Return_Variable : out RawDict.Implicit_Return_Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Abstraction, --# Comp_Unit, --# Declaration, --# Name, --# The_Function & --# The_Implicit_Return_Variable from Comp_Unit, --# Declaration, --# Dict, --# Name, --# The_Function; --# post The_Implicit_Return_Variable /= RawDict.Null_Implicit_Return_Variable_Info_Ref; is begin RawDict.Create_Implicit_Return_Variable (Name => Name, The_Function => The_Function, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Implicit_Return_Variable => The_Implicit_Return_Variable); RawDict.Set_Subprogram_Implicit_Return_Variable (The_Subprogram => The_Function, Abstraction => Abstraction, The_Implicit_Return_Variable => The_Implicit_Return_Variable); end Add_Implicit_Return_Variable; begin -- AddImplicitReturnVariable Add_Implicit_Return_Variable (Abstraction => Abstraction, Comp_Unit => Comp_Unit, Declaration => Declaration, Name => Name, The_Function => RawDict.Get_Subprogram_Info_Ref (TheFunction), -- GAA External The_Implicit_Return_Variable => The_Implicit_Return_Variable); Variable := RawDict.Get_Implicit_Return_Variable_Symbol (The_Implicit_Return_Variable); -- GAA External end AddImplicitReturnVariable; -------------------------------------------------------------------------------- -- Add_Subprogram_Parameter -------------------------------------------------------------------------------- procedure Add_Subprogram_Parameter (Name : in LexTokenManager.Lex_String; The_Subprogram : in RawDict.Subprogram_Info_Ref; Type_Mark : in RawDict.Type_Info_Ref; Type_Reference : in Location; Mode : in Modes; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Mode, --# Name, --# Specification, --# The_Subprogram, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Mode, --# Name, --# Specification, --# The_Subprogram, --# Type_Mark, --# Type_Reference; is separate; -------------------------------------------------------------------------------- procedure AddSubprogramParameter (Name : in LexTokenManager.Lex_String; Subprogram : in Symbol; TypeMark : in Symbol; TypeReference : in Location; Mode : in Modes; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location) is begin Add_Subprogram_Parameter (Name => Name, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Type_Reference => TypeReference, Mode => Mode, Comp_Unit => Comp_Unit, Specification => Specification); end AddSubprogramParameter; -------------------------------------------------------------------------------- -- Add_Generic_Formal_Parameter -------------------------------------------------------------------------------- procedure Add_Generic_Formal_Parameter_Local (Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Generic_Unit : in RawDict.Generic_Unit_Info_Ref; Type_Mark : in RawDict.Type_Info_Ref; The_Object : in RawDict.Constant_Info_Ref; The_Generic_Parameter : out RawDict.Generic_Parameter_Info_Ref) --# global in out Dict; --# derives Dict, --# The_Generic_Parameter from Comp_Unit, --# Declaration, --# Dict, --# The_Generic_Unit, --# The_Object, --# Type_Mark; is separate; -------------------------------------------------------------------------------- procedure Add_Generic_Formal_Parameter (Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Generic_Unit : in Symbol; The_Type : in Symbol; The_Object : in Symbol) is The_Generic_Parameter : RawDict.Generic_Parameter_Info_Ref; pragma Unreferenced (The_Generic_Parameter); begin --# accept Flow, 10, The_Generic_Parameter, "Expected ineffective assignment to The_Generic_Parameter"; Add_Generic_Formal_Parameter_Local (Comp_Unit => Comp_Unit, Declaration => Declaration, The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Generic_Unit), -- GAA External Type_Mark => RawDict.Get_Type_Info_Ref (The_Type), -- GAA External The_Object => RawDict.Get_Constant_Info_Ref (The_Object), -- GAA External The_Generic_Parameter => The_Generic_Parameter); --# end accept; --# accept Flow, 33, The_Generic_Parameter, "Expected The_Generic_Parameter to be neither referenced nor exported"; end Add_Generic_Formal_Parameter; -------------------------------------------------------------------------------- -- Add_Generic_Type_Association -------------------------------------------------------------------------------- procedure AddGenericTypeAssociation (SubprogramOrPackage : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; FormalSym : in Symbol; ActualSym : in Symbol) is procedure Add_Generic_Type_Association (The_Subprogram : in RawDict.Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Formal_Type : in RawDict.Type_Info_Ref; Actual_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Actual_Type, --# Comp_Unit, --# Declaration, --# Formal_Type, --# The_Subprogram; is The_Generic_Association, Previous : RawDict.Generic_Association_Info_Ref; begin RawDict.Create_Generic_Type_Association (Formal_Type => Formal_Type, Actual_Type => Actual_Type, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Generic_Association => The_Generic_Association); Previous := RawDict.Get_Subprogram_Last_Generic_Association (The_Subprogram => The_Subprogram); if Previous = RawDict.Null_Generic_Association_Info_Ref then RawDict.Set_Subprogram_First_Generic_Association (The_Subprogram => The_Subprogram, The_Generic_Association => The_Generic_Association); else RawDict.Set_Next_Generic_Association (The_Generic_Association => Previous, Next => The_Generic_Association); end if; RawDict.Set_Subprogram_Last_Generic_Association (The_Subprogram => The_Subprogram, The_Generic_Association => The_Generic_Association); end Add_Generic_Type_Association; begin -- AddGenericTypeAssociation Add_Generic_Type_Association (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (SubprogramOrPackage), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, Formal_Type => RawDict.Get_Type_Info_Ref (FormalSym), -- GAA External Actual_Type => RawDict.Get_Type_Info_Ref (ActualSym)); -- GAA External end AddGenericTypeAssociation; -------------------------------------------------------------------------------- -- Add_Generic_Object_Association -------------------------------------------------------------------------------- procedure AddGenericObjectAssociation (SubprogramOrPackage : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; FormalSym : in Symbol; ActualSym : in Symbol) is procedure Add_Generic_Object_Association (The_Subprogram : in RawDict.Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Formal_Object : in RawDict.Generic_Parameter_Info_Ref; Actual_Object : in RawDict.Constant_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Actual_Object, --# Comp_Unit, --# Declaration, --# Formal_Object, --# The_Subprogram; is The_Generic_Association, Previous : RawDict.Generic_Association_Info_Ref; begin RawDict.Create_Generic_Object_Association (Formal_Object => RawDict.Get_Generic_Parameter_Object (The_Generic_Parameter => Formal_Object), Actual_Object => Actual_Object, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Generic_Association => The_Generic_Association); Previous := RawDict.Get_Subprogram_Last_Generic_Association (The_Subprogram => The_Subprogram); if Previous = RawDict.Null_Generic_Association_Info_Ref then RawDict.Set_Subprogram_First_Generic_Association (The_Subprogram => The_Subprogram, The_Generic_Association => The_Generic_Association); else RawDict.Set_Next_Generic_Association (The_Generic_Association => Previous, Next => The_Generic_Association); end if; RawDict.Set_Subprogram_Last_Generic_Association (The_Subprogram => The_Subprogram, The_Generic_Association => The_Generic_Association); end Add_Generic_Object_Association; begin -- AddGenericObjectAssociation Add_Generic_Object_Association (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (SubprogramOrPackage), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, Formal_Object => RawDict.Get_Generic_Parameter_Info_Ref (FormalSym), -- GAA External Actual_Object => RawDict.Get_Constant_Info_Ref (ActualSym)); -- GAA External end AddGenericObjectAssociation; -------------------------------------------------------------------------------- -- Add_Known_Discriminant -------------------------------------------------------------------------------- procedure AddKnownDiscriminant (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; ProtectedOrTaskType : in Symbol; TypeMark : in Symbol) is procedure Add_Known_Discriminant (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Protected_Or_Task_Type : in RawDict.Type_Info_Ref; Type_Mark : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Name, --# The_Protected_Or_Task_Type, --# Type_Mark; is Discriminant, Previous : Symbol; begin RawDict.CreateKnownDiscriminant (Name => Name, Protected_Type => The_Protected_Or_Task_Type, Type_Mark => Type_Mark, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, KnownDiscriminant => Discriminant); case RawDict.Get_Type_Discriminant (Type_Mark => The_Protected_Or_Task_Type) is when Protected_Type_Item => Previous := RawDict.Get_Protected_Type_Last_Discriminant (The_Protected_Type => The_Protected_Or_Task_Type); if Previous = NullSymbol then RawDict.Set_Protected_Type_First_Discriminant (The_Protected_Type => The_Protected_Or_Task_Type, Discriminant => Discriminant); else RawDict.SetNextDiscriminant (Previous, Discriminant); end if; RawDict.Set_Protected_Type_Last_Discriminant (The_Protected_Type => The_Protected_Or_Task_Type, Discriminant => Discriminant); when Task_Type_Item => Previous := RawDict.Get_Task_Type_Last_Discriminant (The_Task_Type => The_Protected_Or_Task_Type); if Previous = NullSymbol then RawDict.Set_Task_Type_First_Discriminant (The_Task_Type => The_Protected_Or_Task_Type, Discriminant => Discriminant); else RawDict.SetNextDiscriminant (Previous, Discriminant); end if; RawDict.Set_Task_Type_Last_Discriminant (The_Task_Type => The_Protected_Or_Task_Type, Discriminant => Discriminant); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddKnownDiscriminant"); end case; end Add_Known_Discriminant; begin -- AddKnownDiscriminant Add_Known_Discriminant (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, The_Protected_Or_Task_Type => RawDict.Get_Type_Info_Ref (ProtectedOrTaskType), -- GAA External Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end AddKnownDiscriminant; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure SetDiscriminantSetsPriority (TheDiscriminant : in Symbol) is begin RawDict.SetDiscriminantSetsPriority (TheDiscriminant); end SetDiscriminantSetsPriority; -------------------------------------------------------------------------------- -- Add_Discriminant_Constraint_Static_Value -------------------------------------------------------------------------------- procedure AddDiscriminantConstraintStaticValue (ProtectedOrTaskSubtype : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheValue : in LexTokenManager.Lex_String) is procedure Add_Discriminant_Constraint_Static_Value (The_Protected_Or_Task_Subtype : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Value : in LexTokenManager.Lex_String) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# The_Protected_Or_Task_Subtype, --# The_Value; is DiscriminantConstraint : Symbol; SubtypeInfo : Symbol; Previous : Symbol; begin -- create DiscrimianntConstraint record RawDict.CreateDiscriminantConstraint (Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, TheDiscriminantConstraint => DiscriminantConstraint); RawDict.SetDiscriminantConstraintStaticValue (DiscriminantConstraint, The_Value); -- link to subtype SubtypeInfo := RawDict.Get_Type_Ancillary_Fields (Type_Mark => The_Protected_Or_Task_Subtype); -- assuming precondition met then SubtypeInfo is symbol of record containing priority, linked -- list of constraints etc. Previous := RawDict.GetSubtypeInfoLastConstraint (SubtypeInfo); if Previous = NullSymbol then RawDict.SetSubtypeInfoFirstConstraint (SubtypeInfo, DiscriminantConstraint); else RawDict.SetNextDiscriminantConstraint (Previous, DiscriminantConstraint); end if; RawDict.SetSubtypeInfoLastConstraint (SubtypeInfo, DiscriminantConstraint); end Add_Discriminant_Constraint_Static_Value; begin -- AddDiscriminantConstraintStaticValue Add_Discriminant_Constraint_Static_Value (The_Protected_Or_Task_Subtype => RawDict.Get_Type_Info_Ref (ProtectedOrTaskSubtype), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, The_Value => TheValue); end AddDiscriminantConstraintStaticValue; -------------------------------------------------------------------------------- -- Add_Discriminant_Constraint_Accessed_Object -------------------------------------------------------------------------------- procedure AddDiscriminantConstraintAccessedObject (ProtectedOrTaskSubtype : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheObject : in Symbol) is procedure Add_Discriminant_Constraint_Accessed_Object (The_Protected_Or_Task_Subtype : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Object : in Symbol) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# The_Object, --# The_Protected_Or_Task_Subtype; is DiscriminantConstraint : Symbol; SubtypeInfo : Symbol; Previous : Symbol; begin -- create DiscrimianntConstraint record RawDict.CreateDiscriminantConstraint (Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, TheDiscriminantConstraint => DiscriminantConstraint); RawDict.SetDiscriminantConstraintAccessedObject (DiscriminantConstraint, The_Object); -- link to subtype SubtypeInfo := RawDict.Get_Type_Ancillary_Fields (Type_Mark => The_Protected_Or_Task_Subtype); -- assuming precondition met then SubtypeInfo is symbol of record containing priority, linked -- list of constraints etc. Previous := RawDict.GetSubtypeInfoLastConstraint (SubtypeInfo); if Previous = NullSymbol then RawDict.SetSubtypeInfoFirstConstraint (SubtypeInfo, DiscriminantConstraint); else RawDict.SetNextDiscriminantConstraint (Previous, DiscriminantConstraint); end if; RawDict.SetSubtypeInfoLastConstraint (SubtypeInfo, DiscriminantConstraint); end Add_Discriminant_Constraint_Accessed_Object; begin -- AddDiscriminantConstraintAccessedObject Add_Discriminant_Constraint_Accessed_Object (The_Protected_Or_Task_Subtype => RawDict.Get_Type_Info_Ref (ProtectedOrTaskSubtype), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, The_Object => TheObject); end AddDiscriminantConstraintAccessedObject; -------------------------------------------------------------------------------- -- Set_Subtype_Priority -------------------------------------------------------------------------------- procedure SetSubtypePriority (ProtectedOrTaskSubtype : in Symbol; ThePriority : in LexTokenManager.Lex_String) is procedure Set_Subtype_Priority (The_Protected_Or_Task_Subtype : in RawDict.Type_Info_Ref; The_Priority : in LexTokenManager.Lex_String) --# global in out Dict; --# derives Dict from *, --# The_Priority, --# The_Protected_Or_Task_Subtype; is SubtypeInfo : Symbol; begin SubtypeInfo := RawDict.Get_Type_Ancillary_Fields (Type_Mark => The_Protected_Or_Task_Subtype); RawDict.SetSubtypeInfoPriority (SubtypeInfo, The_Priority); end Set_Subtype_Priority; begin -- SetSubtypePriority Set_Subtype_Priority (The_Protected_Or_Task_Subtype => RawDict.Get_Type_Info_Ref (ProtectedOrTaskSubtype), -- GAA External The_Priority => ThePriority); end SetSubtypePriority; -------------------------------------------------------------------------------- -- Has_Body_Stub -------------------------------------------------------------------------------- function Has_Package_Body_Stub (The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Package_Body (The_Package => The_Package) /= RawDict.Null_Declaration_Info_Ref and then not RawDict.Get_Package_Has_Proper_Body (The_Package => The_Package); end Has_Package_Body_Stub; -------------------------------------------------------------------------------- function Has_Protected_Type_Body_Stub (The_Protected_Type : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Protected_Type_Body (The_Protected_Type => The_Protected_Type) /= RawDict.Null_Declaration_Info_Ref and then not RawDict.Get_Protected_Type_Has_Proper_Body (The_Protected_Type => The_Protected_Type); end Has_Protected_Type_Body_Stub; -------------------------------------------------------------------------------- function Has_Task_Type_Body_Stub (The_Task_Type : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Task_Type_Body (The_Task_Type => The_Task_Type) /= RawDict.Null_Declaration_Info_Ref and then not RawDict.Get_Task_Type_Has_Proper_Body (The_Task_Type => The_Task_Type); end Has_Task_Type_Body_Stub; -------------------------------------------------------------------------------- function Has_Subprogram_Body_Stub (The_Subprogram : RawDict.Subprogram_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Subprogram_Body (The_Subprogram => The_Subprogram) /= RawDict.Null_Declaration_Info_Ref and then not RawDict.Get_Subprogram_Has_Proper_Body (The_Subprogram => The_Subprogram); end Has_Subprogram_Body_Stub; -------------------------------------------------------------------------------- function HasBodyStub (CompilationUnit : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (CompilationUnit) is when Package_Symbol => Result := Has_Package_Body_Stub (The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit)); -- GAA External when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)) is -- GAA External when Protected_Type_Item => Result := Has_Protected_Type_Body_Stub (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)); -- GAA External when Task_Type_Item => Result := Has_Task_Type_Body_Stub (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.HasBodyStub"); end case; when Subprogram_Symbol => Result := Has_Subprogram_Body_Stub (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit)); -- GAA External when Variable_Symbol => Result := False; when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.HasBodyStub"); end case; return Result; end HasBodyStub; -------------------------------------------------------------------------------- -- Add_Body -------------------------------------------------------------------------------- procedure Add_Subprogram_Body (The_Subprogram : in RawDict.Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; The_Body : in Location; Hidden : in Boolean) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Hidden, --# The_Body, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# Hidden, --# LexTokenManager.State, --# The_Body, --# The_Subprogram; is Scope : Scopes; The_Declaration : RawDict.Declaration_Info_Ref; begin if not Has_Subprogram_Body_Stub (The_Subprogram => The_Subprogram) then Scope := Get_Subprogram_Scope (The_Subprogram => The_Subprogram); if Get_Visibility (Scope => Scope) = Visible or else Get_Visibility (Scope => Scope) = Privat then Scope := GetLocalScope (Scope); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => The_Body, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); RawDict.Set_Subprogram_Body (The_Subprogram => The_Subprogram, The_Body => The_Declaration); end if; RawDict.Set_Subprogram_Has_Proper_Body (The_Subprogram => The_Subprogram, Is_Hidden => Hidden); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Hidden then Write_String (Dict.TemporaryFile, "hidden "); end if; Write_String (Dict.TemporaryFile, "body of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => The_Body); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Subprogram_Body; -------------------------------------------------------------------------------- procedure AddBody (CompilationUnit : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; TheBody : in Location; Hidden : in Boolean) is procedure Add_Package_Body (The_Package : in RawDict.Package_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; The_Body : in Location; Hidden : in Boolean) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# The_Body, --# The_Package & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# Hidden, --# LexTokenManager.State, --# The_Body, --# The_Package; is Scope : Scopes; The_Declaration : RawDict.Declaration_Info_Ref; begin if not Has_Package_Body_Stub (The_Package => The_Package) then Scope := Get_Package_Scope (The_Package => The_Package); if RawDict.Get_Package_Parent (The_Package => The_Package) /= RawDict.Null_Package_Info_Ref then -- adding child package body RawDict.Create_Declaration (Context => ProgramContext, Scope => Scope, Comp_Unit => Comp_Unit, Loc => The_Body.Start_Position, The_Declaration => The_Declaration); else if Get_Visibility (Scope => Scope) = Visible or else Get_Visibility (Scope => Scope) = Privat then Scope := GetLocalScope (Scope); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => The_Body, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); end if; RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Package_Symbol (The_Package)); RawDict.Set_Package_Body (The_Package => The_Package, The_Body => The_Declaration); end if; RawDict.Set_Package_Has_Proper_Body (The_Package => The_Package); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Hidden then Write_String (Dict.TemporaryFile, "hidden "); end if; Write_String (Dict.TemporaryFile, "body of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Package_Symbol (The_Package)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => The_Body); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Package_Body; -------------------------------------------------------------------------------- procedure Add_Protected_Type_Body (The_Protected_Type : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; The_Body : in Location; Hidden : in Boolean) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# The_Body, --# The_Protected_Type & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# Hidden, --# LexTokenManager.State, --# The_Body, --# The_Protected_Type; is Scope : Scopes; The_Declaration : RawDict.Declaration_Info_Ref; begin if not Has_Protected_Type_Body_Stub (The_Protected_Type => The_Protected_Type) then Scope := Get_Type_Scope (Type_Mark => The_Protected_Type); if Get_Visibility (Scope => Scope) = Visible or else Get_Visibility (Scope => Scope) = Privat then Scope := GetLocalScope (Scope); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => The_Body, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Type_Symbol (The_Protected_Type)); RawDict.Set_Protected_Type_Body (The_Protected_Type => The_Protected_Type, The_Body => The_Declaration); end if; RawDict.Set_Protected_Type_Has_Proper_Body (The_Protected_Type => The_Protected_Type); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Hidden then Write_String (Dict.TemporaryFile, "hidden "); end if; Write_String (Dict.TemporaryFile, "body of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (The_Protected_Type)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => The_Body); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Protected_Type_Body; -------------------------------------------------------------------------------- procedure Add_Task_Type_Body (The_Task_Type : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; The_Body : in Location; Hidden : in Boolean) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Hidden, --# The_Body, --# The_Task_Type & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# Hidden, --# LexTokenManager.State, --# The_Body, --# The_Task_Type; is Scope : Scopes; The_Declaration : RawDict.Declaration_Info_Ref; begin if not Has_Task_Type_Body_Stub (The_Task_Type => The_Task_Type) then Scope := Get_Type_Scope (Type_Mark => The_Task_Type); if Get_Visibility (Scope => Scope) = Visible or else Get_Visibility (Scope => Scope) = Privat then Scope := GetLocalScope (Scope); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => The_Body, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Type_Symbol (The_Task_Type)); RawDict.Set_Task_Type_Body (The_Task_Type => The_Task_Type, The_Body => The_Declaration); end if; RawDict.Set_Task_Type_Has_Proper_Body (The_Task_Type => The_Task_Type, Is_Hidden => Hidden); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Hidden then Write_String (Dict.TemporaryFile, "hidden "); end if; Write_String (Dict.TemporaryFile, "body of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (The_Task_Type)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => The_Body); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Task_Type_Body; begin -- AddBody case RawDict.GetSymbolDiscriminant (CompilationUnit) is when Package_Symbol => Add_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit), -- GAA External Comp_Unit => Comp_Unit, The_Body => TheBody, Hidden => Hidden); when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)) is -- GAA External when Protected_Type_Item => Add_Protected_Type_Body (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit), -- GAA External Comp_Unit => Comp_Unit, The_Body => TheBody, Hidden => Hidden); when Task_Type_Item => Add_Task_Type_Body (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit), -- GAA External Comp_Unit => Comp_Unit, The_Body => TheBody, Hidden => Hidden); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddBody"); end case; when Subprogram_Symbol => Add_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit), -- GAA External Comp_Unit => Comp_Unit, The_Body => TheBody, Hidden => Hidden); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddBody"); end case; end AddBody; -------------------------------------------------------------------------------- -- Add_Body_Stub -------------------------------------------------------------------------------- procedure AddBodyStub (CompilationUnit : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; BodyStub : in Location) is procedure Add_Package_Body_Stub (The_Package : in RawDict.Package_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Body_Stub : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Body_Stub, --# Comp_Unit, --# The_Package & --# SPARK_IO.File_Sys from *, --# Body_Stub, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# The_Package; is Scope : Scopes; The_Declaration : RawDict.Declaration_Info_Ref; begin Scope := Get_Package_Scope (The_Package => The_Package); if Get_Visibility (Scope => Scope) = Visible or else Get_Visibility (Scope => Scope) = Privat then Scope := GetLocalScope (Scope); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => Body_Stub, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Package_Symbol (The_Package)); RawDict.Set_Package_Body (The_Package => The_Package, The_Body => The_Declaration); if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "body stub of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Package_Symbol (The_Package)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Body_Stub); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Package_Body_Stub; -------------------------------------------------------------------------------- procedure Add_Protected_Type_Body_Stub (The_Protected_Type : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Body_Stub : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Body_Stub, --# Comp_Unit, --# The_Protected_Type & --# SPARK_IO.File_Sys from *, --# Body_Stub, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# The_Protected_Type; is Scope : Scopes; The_Declaration : RawDict.Declaration_Info_Ref; begin Scope := Get_Type_Scope (Type_Mark => The_Protected_Type); if Get_Visibility (Scope => Scope) = Visible or else Get_Visibility (Scope => Scope) = Privat then Scope := GetLocalScope (Scope); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => Body_Stub, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Type_Symbol (The_Protected_Type)); RawDict.Set_Protected_Type_Body (The_Protected_Type => The_Protected_Type, The_Body => The_Declaration); if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "body stub of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (The_Protected_Type)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Body_Stub); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Protected_Type_Body_Stub; -------------------------------------------------------------------------------- procedure Add_Task_Type_Body_Stub (The_Task_Type : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Body_Stub : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Body_Stub, --# Comp_Unit, --# The_Task_Type & --# SPARK_IO.File_Sys from *, --# Body_Stub, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# The_Task_Type; is Scope : Scopes; The_Declaration : RawDict.Declaration_Info_Ref; begin Scope := Get_Type_Scope (Type_Mark => The_Task_Type); if Get_Visibility (Scope => Scope) = Visible or else Get_Visibility (Scope => Scope) = Privat then Scope := GetLocalScope (Scope); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => Body_Stub, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Type_Symbol (The_Task_Type)); RawDict.Set_Task_Type_Body (The_Task_Type => The_Task_Type, The_Body => The_Declaration); if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "body stub of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (The_Task_Type)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Body_Stub); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Task_Type_Body_Stub; -------------------------------------------------------------------------------- procedure Add_Subprogram_Body_Stub (The_Subprogram : in RawDict.Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Body_Stub : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Body_Stub, --# Comp_Unit, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Body_Stub, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# The_Subprogram; is Scope : Scopes; The_Declaration : RawDict.Declaration_Info_Ref; begin Scope := Get_Subprogram_Scope (The_Subprogram => The_Subprogram); if Get_Visibility (Scope => Scope) = Visible or else Get_Visibility (Scope => Scope) = Privat then Scope := GetLocalScope (Scope); end if; Add_Declaration (Comp_Unit => Comp_Unit, Loc => Body_Stub, Scope => Scope, Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); RawDict.Set_Subprogram_Body (The_Subprogram => The_Subprogram, The_Body => The_Declaration); if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "body stub of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Body_Stub); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Subprogram_Body_Stub; begin -- AddBodyStub case RawDict.GetSymbolDiscriminant (CompilationUnit) is when Package_Symbol => Add_Package_Body_Stub (The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit), -- GAA External Comp_Unit => Comp_Unit, Body_Stub => BodyStub); when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)) is -- GAA External when Protected_Type_Item => Add_Protected_Type_Body_Stub (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit), -- GAA External Comp_Unit => Comp_Unit, Body_Stub => BodyStub); when Task_Type_Item => Add_Task_Type_Body_Stub (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit), -- GAA External Comp_Unit => Comp_Unit, Body_Stub => BodyStub); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddBodyStub"); end case; when Subprogram_Symbol => Add_Subprogram_Body_Stub (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit), -- GAA External Comp_Unit => Comp_Unit, Body_Stub => BodyStub); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddBodyStub"); end case; end AddBodyStub; -------------------------------------------------------------------------------- -- Add_Subprogram_Instantiation -------------------------------------------------------------------------------- procedure AddSubprogramInstantiation (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheGeneric : in Symbol; Specification : in Location; Scope : in Scopes; Context : in Contexts; Subprogram : out Symbol) is The_Subprogram : RawDict.Subprogram_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Subprogram_Instantiation (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Generic_Subprogram : in RawDict.Subprogram_Info_Ref; Specification : in Location; Scope : in Scopes; Context : in Contexts; The_Subprogram : out RawDict.Subprogram_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# Name, --# Scope, --# Specification, --# The_Generic_Subprogram & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Specification, --# The_Generic_Subprogram & --# The_Subprogram from Comp_Unit, --# Context, --# Dict, --# Name, --# Scope, --# Specification; --# post The_Subprogram /= RawDict.Null_Subprogram_Info_Ref; is begin Add_Subprogram (Name => Name, Comp_Unit => Comp_Unit, Specification => Specification, Scope => Scope, Context => Context, The_Subprogram => The_Subprogram); RawDict.Set_Subprogram_Instantiation_Of (The_Subprogram => The_Subprogram, The_Generic => The_Generic_Subprogram); if RawDict.Get_Subprogram_Has_Derives_Annotation (The_Subprogram => The_Generic_Subprogram) then RawDict.Set_Subprogram_Has_Derives_Annotation (The_Subprogram => The_Subprogram); end if; -- Mark as having hidden body so Examiner won't demand a body later on Add_Subprogram_Body (The_Subprogram => The_Subprogram, Comp_Unit => Comp_Unit, The_Body => Declaration, Hidden => True); end Add_Subprogram_Instantiation; begin -- AddSubprogramInstantiation Add_Subprogram_Instantiation (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, The_Generic_Subprogram => RawDict.Get_Subprogram_Info_Ref (TheGeneric), -- GAA External Specification => Specification, Scope => Scope, Context => Context, The_Subprogram => The_Subprogram); Subprogram := RawDict.Get_Subprogram_Symbol (The_Subprogram); -- GAA External end AddSubprogramInstantiation; -------------------------------------------------------------------------------- -- Add_Main_Program -------------------------------------------------------------------------------- procedure AddMainProgram (Subprogram : in Symbol; Annotation : in Location) is procedure Add_Main_Program (The_Subprogram : in RawDict.Subprogram_Info_Ref; Annotation : in Location) --# global in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Annotation, --# Dict, --# The_Subprogram; is begin Dict.Main.Subprogram := The_Subprogram; if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "main program annotation is at "); Write_Location (File => Dict.TemporaryFile, Loc => Annotation); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Main_Program; begin -- AddMainProgram Add_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External Annotation => Annotation); end AddMainProgram; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure AddDependencyRelation (Abstraction : in Abstractions; TheProcedure : in Symbol; DependencyRelation : in Location) is begin if Abstraction = IsRefined then case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => RawDict.Set_Subprogram_Has_Second_Annotation (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure)); -- GAA External when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddDependencyRelation"); RawDict.Set_Task_Type_Has_Second_Annotation (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure)); -- GAA External when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddDependencyRelation"); end case; end if; if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Abstraction = IsRefined then Write_String (Dict.TemporaryFile, "refined "); end if; Write_String (Dict.TemporaryFile, "dependency relation of "); Write_Name (File => Dict.TemporaryFile, Item => TheProcedure); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => DependencyRelation); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddDependencyRelation; -------------------------------------------------------------------------------- procedure Add_Renaming_Declaration (The_Subprogram : in RawDict.Subprogram_Info_Ref; The_Operator : in RawDict.Operator_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Scope, --# The_Operator, --# The_Subprogram; is separate; -------------------------------------------------------------------------------- -- Rename_Subprogram -------------------------------------------------------------------------------- procedure RenameSubprogram (Subprogram : in Symbol; SubprogramReference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes) is procedure Rename_Subprogram (The_Subprogram : in RawDict.Subprogram_Info_Ref; Subprogram_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Scope, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Scope, --# Subprogram_Reference, --# The_Subprogram; is begin Add_Renaming_Declaration (The_Subprogram => The_Subprogram, The_Operator => RawDict.Null_Operator_Info_Ref, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope); Write_Subprogram_Declaration (The_Subprogram => The_Subprogram, Is_Renaming => True, Scope => Scope, Declaration => Declaration); AddOtherReference (RawDict.Get_Subprogram_Symbol (The_Subprogram), GetRegion (Scope), Subprogram_Reference); end Rename_Subprogram; begin -- RenameSubprogram Rename_Subprogram (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram), -- GAA External Subprogram_Reference => SubprogramReference, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope); end RenameSubprogram; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure WriteOperatorRenamingDeclaration (The_Operator : in RawDict.Operator_Info_Ref; Scope : in Scopes) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State, --# Scope, --# The_Operator; is separate; -------------------------------------------------------------------------------- -- Rename_Unary_Operator -------------------------------------------------------------------------------- procedure RenameUnaryOperator (Name : in SP_Symbols.SP_Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Operand : in Symbol; Scope : in Scopes; Op_Sym : out Symbol) is The_Operator : RawDict.Operator_Info_Ref; -------------------------------------------------------------------------------- procedure Rename_Unary_Operator (Name : in SP_Symbols.SP_Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Operand : in RawDict.Type_Info_Ref; Scope : in Scopes; The_Operator : out RawDict.Operator_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Name, --# Operand, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Operand, --# Scope & --# The_Operator from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Operand; --# post The_Operator /= RawDict.Null_Operator_Info_Ref; is begin RawDict.Create_Unary_Operator (Name => Name, Operand => Operand, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Operator => The_Operator); Add_Renaming_Declaration (The_Subprogram => RawDict.Null_Subprogram_Info_Ref, The_Operator => The_Operator, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope); WriteOperatorRenamingDeclaration (The_Operator => The_Operator, Scope => Scope); end Rename_Unary_Operator; begin -- RenameUnaryOperator Rename_Unary_Operator (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Operand => RawDict.Get_Type_Info_Ref (Operand), -- GAA External Scope => Scope, The_Operator => The_Operator); Op_Sym := RawDict.Get_Operator_Symbol (The_Operator); -- GAA External end RenameUnaryOperator; -------------------------------------------------------------------------------- -- RenameBinaryOperator -------------------------------------------------------------------------------- procedure RenameBinaryOperator (Name : in SP_Symbols.SP_Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Left : in Symbol; Right : in Symbol; Scope : in Scopes; Op_Sym : out Symbol) is The_Operator : RawDict.Operator_Info_Ref; -------------------------------------------------------------------------------- procedure Rename_Binary_Operator (Name : in SP_Symbols.SP_Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Left : in RawDict.Type_Info_Ref; Right : in RawDict.Type_Info_Ref; Scope : in Scopes; The_Operator : out RawDict.Operator_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Left, --# Name, --# Right, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Left, --# LexTokenManager.State, --# Name, --# Right, --# Scope & --# The_Operator from Comp_Unit, --# Declaration, --# Dict, --# Left, --# Name, --# Right; --# post The_Operator /= RawDict.Null_Operator_Info_Ref; is begin RawDict.Create_Binary_Operator (Name => Name, Left => Left, Right => Right, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Operator => The_Operator); Add_Renaming_Declaration (The_Subprogram => RawDict.Null_Subprogram_Info_Ref, The_Operator => The_Operator, Comp_Unit => Comp_Unit, Declaration => Declaration, Scope => Scope); WriteOperatorRenamingDeclaration (The_Operator => The_Operator, Scope => Scope); end Rename_Binary_Operator; begin -- RenameBinaryOperator Rename_Binary_Operator (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Left => RawDict.Get_Type_Info_Ref (Left), -- GAA External Right => RawDict.Get_Type_Info_Ref (Right), -- GAA External Scope => Scope, The_Operator => The_Operator); Op_Sym := RawDict.Get_Operator_Symbol (The_Operator); -- GAA External end RenameBinaryOperator; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure AddGlobalAnnotation (Abstraction : in Abstractions; Subprogram : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Annotation : in Location) is The_Subprogram : RawDict.Subprogram_Info_Ref; The_Proof_Function : Symbol; begin if Abstraction = IsRefined and then RawDict.GetSymbolDiscriminant (Subprogram) = Subprogram_Symbol then The_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => Subprogram); -- GAA External if not RawDict.Get_Subprogram_Has_Second_Annotation (The_Subprogram => The_Subprogram) then RawDict.Set_Subprogram_Has_Second_Annotation (The_Subprogram => The_Subprogram); if IsFunction (Subprogram) and then Get_Subprogram_Context (The_Subprogram => The_Subprogram) = ProgramContext then RawDict.CreateImplicitProofFunction (Ada_Function => The_Subprogram, Comp_Unit => Comp_Unit, Loc => Annotation.Start_Position, ProofFunction => The_Proof_Function); RawDict.Set_Subprogram_Implicit_Proof_Function (The_Subprogram => The_Subprogram, Abstraction => IsRefined, The_Proof_Function => The_Proof_Function); end if; end if; end if; if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Abstraction = IsRefined then Write_String (Dict.TemporaryFile, "refined "); end if; Write_String (Dict.TemporaryFile, "global annotation of "); Write_Name (File => Dict.TemporaryFile, Item => Subprogram); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Annotation); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddGlobalAnnotation; -------------------------------------------------------------------------------- -- Add_Global_Variable -------------------------------------------------------------------------------- procedure Add_Subprogram_Global_Variable (Abstraction : in Abstractions; The_Subprogram : in RawDict.Subprogram_Info_Ref; The_Variable : in RawDict.Variable_Info_Ref; Mode : in Modes; Prefix_Needed : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Variable_Reference : in Location; The_Global_Variable : out RawDict.Global_Variable_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Global_Variable from Abstraction, --# Comp_Unit, --# Dict, --# Mode, --# Prefix_Needed, --# The_Subprogram, --# The_Variable, --# Variable_Reference & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Mode, --# Prefix_Needed, --# The_Subprogram, --# The_Variable, --# Variable_Reference; --# post The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref; is begin RawDict.Create_Global_Variable (Mode => Mode, Prefix_Needed => Prefix_Needed, The_Subprogram => The_Subprogram, The_Task_Type => RawDict.Null_Type_Info_Ref, Subprogram_Parameter => RawDict.Null_Subprogram_Parameter_Info_Ref, Variable => The_Variable, Next_Subprogram => RawDict.Get_Variable_Global_References (The_Variable => The_Variable, Abstraction => Abstraction), Comp_Unit => Comp_Unit, Loc => Variable_Reference.Start_Position, The_Global_Variable => The_Global_Variable); RawDict.Set_Variable_Global_References (The_Variable => The_Variable, Abstraction => Abstraction, Reference => The_Global_Variable); if RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction) = RawDict.Null_Global_Variable_Info_Ref then RawDict.Set_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Global_Variable => The_Global_Variable); else RawDict.Set_Next_Global_Variable (The_Global_Variable => RawDict.Get_Subprogram_Last_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction), Next => The_Global_Variable); end if; RawDict.Set_Subprogram_Last_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Global_Variable => The_Global_Variable); AddOtherReference (RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Subprogram_Symbol (The_Subprogram), Variable_Reference); end Add_Subprogram_Global_Variable; -------------------------------------------------------------------------------- procedure AddGlobalVariable (Abstraction : in Abstractions; Subprogram : in Symbol; Variable : in Symbol; Mode : in Modes; PrefixNeeded : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; VariableReference : in Location; Global_Variable_Sym : out Symbol) is The_Global_Variable : RawDict.Global_Variable_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Subprogram_Global_Parameter (Abstraction : in Abstractions; The_Subprogram : in RawDict.Subprogram_Info_Ref; The_Subprogram_Parameter : in RawDict.Subprogram_Parameter_Info_Ref; Mode : in Modes; Prefix_Needed : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Variable_Reference : in Location; The_Global_Variable : out RawDict.Global_Variable_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Global_Variable from Abstraction, --# Comp_Unit, --# Dict, --# Mode, --# Prefix_Needed, --# The_Subprogram, --# The_Subprogram_Parameter, --# Variable_Reference & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Mode, --# Prefix_Needed, --# The_Subprogram, --# The_Subprogram_Parameter, --# Variable_Reference; --# post The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref; is begin RawDict.Create_Global_Variable (Mode => Mode, Prefix_Needed => Prefix_Needed, The_Subprogram => The_Subprogram, The_Task_Type => RawDict.Null_Type_Info_Ref, Subprogram_Parameter => The_Subprogram_Parameter, Variable => RawDict.Null_Variable_Info_Ref, Next_Subprogram => RawDict.Get_Subprogram_Parameter_Global_References (The_Subprogram_Parameter => The_Subprogram_Parameter, Abstraction => Abstraction), Comp_Unit => Comp_Unit, Loc => Variable_Reference.Start_Position, The_Global_Variable => The_Global_Variable); RawDict.Set_Subprogram_Parameter_Global_References (The_Subprogram_Parameter => The_Subprogram_Parameter, Abstraction => Abstraction, Reference => The_Global_Variable); if RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction) = RawDict.Null_Global_Variable_Info_Ref then RawDict.Set_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Global_Variable => The_Global_Variable); else RawDict.Set_Next_Global_Variable (The_Global_Variable => RawDict.Get_Subprogram_Last_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction), Next => The_Global_Variable); end if; RawDict.Set_Subprogram_Last_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Global_Variable => The_Global_Variable); AddOtherReference (RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Subprogram_Symbol (The_Subprogram), Variable_Reference); end Add_Subprogram_Global_Parameter; -------------------------------------------------------------------------------- procedure Add_Task_Type_Global_Variable (Abstraction : in Abstractions; The_Task_Type : in RawDict.Type_Info_Ref; The_Variable : in RawDict.Variable_Info_Ref; Mode : in Modes; Prefix_Needed : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Variable_Reference : in Location; The_Global_Variable : out RawDict.Global_Variable_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Global_Variable from Abstraction, --# Comp_Unit, --# Dict, --# Mode, --# Prefix_Needed, --# The_Task_Type, --# The_Variable, --# Variable_Reference & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Mode, --# Prefix_Needed, --# The_Task_Type, --# The_Variable, --# Variable_Reference; --# post The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref; is begin RawDict.Create_Global_Variable (Mode => Mode, Prefix_Needed => Prefix_Needed, The_Subprogram => RawDict.Null_Subprogram_Info_Ref, The_Task_Type => The_Task_Type, Subprogram_Parameter => RawDict.Null_Subprogram_Parameter_Info_Ref, Variable => The_Variable, Next_Subprogram => RawDict.Get_Variable_Global_References (The_Variable => The_Variable, Abstraction => Abstraction), Comp_Unit => Comp_Unit, Loc => Variable_Reference.Start_Position, The_Global_Variable => The_Global_Variable); RawDict.Set_Variable_Global_References (The_Variable => The_Variable, Abstraction => Abstraction, Reference => The_Global_Variable); if RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction) = RawDict.Null_Global_Variable_Info_Ref then RawDict.Set_Task_Type_First_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Global_Variable => The_Global_Variable); else RawDict.Set_Next_Global_Variable (The_Global_Variable => RawDict.Get_Task_Type_Last_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction), Next => The_Global_Variable); end if; RawDict.Set_Task_Type_Last_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Global_Variable => The_Global_Variable); AddOtherReference (RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Type_Symbol (The_Task_Type), Variable_Reference); end Add_Task_Type_Global_Variable; begin -- AddGlobalVariable case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => case RawDict.GetSymbolDiscriminant (Variable) is when Variable_Symbol => Add_Subprogram_Global_Variable (Abstraction => Abstraction, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable), -- GAA External Mode => Mode, Prefix_Needed => PrefixNeeded, Comp_Unit => Comp_Unit, Variable_Reference => VariableReference, The_Global_Variable => The_Global_Variable); when Subprogram_Parameter_Symbol => Add_Subprogram_Global_Parameter (Abstraction => Abstraction, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable), -- GAA External Mode => Mode, Prefix_Needed => PrefixNeeded, Comp_Unit => Comp_Unit, Variable_Reference => VariableReference, The_Global_Variable => The_Global_Variable); when others => -- non-exec code The_Global_Variable := RawDict.Null_Global_Variable_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddGlobalVariable"); end case; when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddGlobalVariable"); Add_Task_Type_Global_Variable (Abstraction => Abstraction, The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram), -- GAA External The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable), -- GAA External Mode => Mode, Prefix_Needed => PrefixNeeded, Comp_Unit => Comp_Unit, Variable_Reference => VariableReference, The_Global_Variable => The_Global_Variable); when others => -- non-exec code The_Global_Variable := RawDict.Null_Global_Variable_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddGlobalVariable"); end case; Global_Variable_Sym := RawDict.Get_Global_Variable_Symbol (The_Global_Variable); -- GAA External end AddGlobalVariable; -------------------------------------------------------------------------------- -- Get_Global_Variable -------------------------------------------------------------------------------- function Get_Subprogram_Variable_Global_Variable (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return RawDict.Global_Variable_Info_Ref --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; begin The_Global_Variable := RawDict.Get_Variable_Global_References (The_Variable => The_Variable, Abstraction => Abstraction); loop exit when (RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Subprogram_Parameter_Item or else RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Subprogram_Variable_Item) and then RawDict.Get_Global_Variable_Subprogram (The_Global_Variable => The_Global_Variable) = The_Subprogram; The_Global_Variable := RawDict.Get_Global_Variable_Next_Subprogram (The_Global_Variable => The_Global_Variable); end loop; return The_Global_Variable; end Get_Subprogram_Variable_Global_Variable; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Global_Variable (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref) return RawDict.Global_Variable_Info_Ref --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; begin The_Global_Variable := RawDict.Get_Subprogram_Parameter_Global_References (The_Subprogram_Parameter => The_Subprogram_Parameter, Abstraction => Abstraction); loop exit when (RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Subprogram_Parameter_Item or else RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Subprogram_Variable_Item) and then RawDict.Get_Global_Variable_Subprogram (The_Global_Variable => The_Global_Variable) = The_Subprogram; The_Global_Variable := RawDict.Get_Global_Variable_Next_Subprogram (The_Global_Variable => The_Global_Variable); end loop; return The_Global_Variable; end Get_Subprogram_Parameter_Global_Variable; -------------------------------------------------------------------------------- function Get_Task_Type_Variable_Global_Variable (The_Task_Type : RawDict.Type_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return RawDict.Global_Variable_Info_Ref --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; begin The_Global_Variable := RawDict.Get_Variable_Global_References (The_Variable => The_Variable, Abstraction => Abstraction); loop exit when RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = RawDict.Task_Type_Variable_Item and then RawDict.Get_Global_Variable_Task_Type (The_Global_Variable => The_Global_Variable) = The_Task_Type; The_Global_Variable := RawDict.Get_Global_Variable_Next_Subprogram (The_Global_Variable => The_Global_Variable); end loop; return The_Global_Variable; end Get_Task_Type_Variable_Global_Variable; -------------------------------------------------------------------------------- -- Get_Global_Mode -------------------------------------------------------------------------------- function Get_Subprogram_Variable_Global_Mode (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return Modes --# global in Dict; is begin return RawDict.Get_Global_Variable_Mode (The_Global_Variable => Get_Subprogram_Variable_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => The_Variable)); end Get_Subprogram_Variable_Global_Mode; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Global_Mode (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref) return Modes --# global in Dict; is begin return RawDict.Get_Global_Variable_Mode (The_Global_Variable => Get_Subprogram_Parameter_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter)); end Get_Subprogram_Parameter_Global_Mode; -------------------------------------------------------------------------------- function Get_Task_Type_Variable_Global_Mode (The_Task_Type : RawDict.Type_Info_Ref; Abstraction : Abstractions; The_Variable : RawDict.Variable_Info_Ref) return Modes --# global in Dict; is begin return RawDict.Get_Global_Variable_Mode (The_Global_Variable => Get_Task_Type_Variable_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Variable)); end Get_Task_Type_Variable_Global_Mode; -------------------------------------------------------------------------------- function GetGlobalMode (Abstraction : Abstractions; Subprogram : Symbol; Variable : Symbol) return Modes is Result : Modes; begin case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => case RawDict.GetSymbolDiscriminant (Variable) is when Variable_Symbol => Result := Get_Subprogram_Variable_Global_Mode (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when Subprogram_Parameter_Symbol => Result := Get_Subprogram_Parameter_Global_Mode (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Variable)); -- GAA External when others => -- non-exec code Result := InvalidMode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetGlobalMode"); end case; when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetGlobalMode"); Result := Get_Task_Type_Variable_Global_Mode (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Subprogram), -- GAA External Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when others => -- non-exec code Result := InvalidMode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetGlobalMode"); end case; return Result; end GetGlobalMode; -------------------------------------------------------------------------------- -- Get_Own_Variable_Mode -------------------------------------------------------------------------------- function Get_Own_Variable_Mode (The_Variable : RawDict.Variable_Info_Ref) return Modes --# global in Dict; is begin return RawDict.Get_Own_Variable_Mode (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Get_Own_Variable_Mode; -------------------------------------------------------------------------------- function GetOwnVariableMode (Variable : Symbol) return Modes is begin return Get_Own_Variable_Mode (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetOwnVariableMode; -------------------------------------------------------------------------------- -- Get_Own_Variable_Protected -------------------------------------------------------------------------------- function Get_Own_Variable_Protected (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin SystemErrors.RT_Assert (C => Is_Own_Variable (The_Variable => The_Variable), Sys_Err => SystemErrors.Precondition_Failure, Msg => "In call to Get_Own_Variable_Protected"); return RawDict.Get_Own_Variable_Protected (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Get_Own_Variable_Protected; -------------------------------------------------------------------------------- function GetOwnVariableProtected (Variable : Symbol) return Boolean is begin return Get_Own_Variable_Protected (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetOwnVariableProtected; -------------------------------------------------------------------------------- -- Get_Own_Variable_Is_Interrupt_Stream -------------------------------------------------------------------------------- function GetOwnVariableIsInterruptStream (Variable : Symbol) return Boolean is function Get_Own_Variable_Is_Interrupt_Stream (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin SystemErrors.RT_Assert (C => Is_Own_Variable (The_Variable => The_Variable), Sys_Err => SystemErrors.Precondition_Failure, Msg => "in call to Get_OwnVariable_Is_Interrupt_Stream"); return RawDict.Get_Own_Variable_Is_Interrupt_Stream (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Get_Own_Variable_Is_Interrupt_Stream; begin -- GetOwnVariableIsInterruptStream return Get_Own_Variable_Is_Interrupt_Stream (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetOwnVariableIsInterruptStream; ------------------------------------------------------------------------------ -- Get_Own_Variable_Type_Here ------------------------------------------------------------------------------ -- This function extends the Dictionary.Get_Type (which does not use the -- scope to determine the type of a variable) for own variables. An own -- variable has different type depending on viewpoint (scope), abstract vs -- concrete view. The right type is determined by -- 1. Scope of viewpoint of the own variable (local, child package, or -- outside) -- 2. Announcement of the own variable type -- 3. Declaration scope of the corresponding concrete (Ada) variable function GetOwnVariableTypeHere (OwnVariable : Symbol; Scope : Scopes) return Symbol is function Get_Own_Variable_Type_Here (Own_Variable : RawDict.Variable_Info_Ref; Scope : Scopes) return RawDict.Type_Info_Ref --# global in Dict; is Decl_Scope : Scopes; Result_Type_Mark : RawDict.Type_Info_Ref; begin Decl_Scope := Get_Variable_Scope (The_Variable => Own_Variable); if Variable_Is_Declared (The_Variable => Own_Variable) and then (Get_Visibility (Scope => Decl_Scope) = Visible or else IsLocal (Scope, Decl_Scope) or else (RawDict.GetSymbolDiscriminant (GetRegion (Decl_Scope)) = Package_Symbol and then Is_Private_Seeing_Descendent (Scope => Scope, The_Package => RawDict.Get_Package_Info_Ref (GetRegion (Decl_Scope))))) then Result_Type_Mark := RawDict.Get_Variable_Type (The_Variable => Own_Variable); else Result_Type_Mark := RawDict.Get_Variable_Abstract_Type (The_Variable => Own_Variable); end if; return Result_Type_Mark; end Get_Own_Variable_Type_Here; begin -- GetOwnVariableTypeHere return RawDict.Get_Type_Symbol -- GAA External (Get_Own_Variable_Type_Here (Own_Variable => RawDict.Get_Variable_Info_Ref (OwnVariable), -- GAA External Scope => Scope)); end GetOwnVariableTypeHere; -------------------------------------------------------------------------------- -- Get_Constituent_Mode -------------------------------------------------------------------------------- function Get_Constituent_Mode (The_Variable : RawDict.Variable_Info_Ref) return Modes --# global in Dict; is begin return RawDict.Get_Constituent_Mode (The_Constituent => RawDict.Get_Variable_Constituent (The_Variable => The_Variable)); end Get_Constituent_Mode; -------------------------------------------------------------------------------- function GetConstituentMode (Variable : Symbol) return Modes is begin return Get_Constituent_Mode (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetConstituentMode; -------------------------------------------------------------------------------- -- Add_Export -------------------------------------------------------------------------------- procedure Add_Subprogram_Export_Variable (The_Subprogram : in RawDict.Subprogram_Info_Ref; Abstraction : in Abstractions; The_Export : in RawDict.Variable_Info_Ref; Export_Reference : in Location; Annotation : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# The_Export, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Annotation, --# Dict, --# Export_Reference, --# LexTokenManager.State, --# The_Export, --# The_Subprogram; is begin RawDict.Set_Global_Variable_Exported (The_Global_Variable => Get_Subprogram_Variable_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => The_Export)); AddOtherReference (RawDict.Get_Variable_Symbol (The_Export), RawDict.Get_Subprogram_Symbol (The_Subprogram), Export_Reference); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Abstraction = IsRefined then Write_String (Dict.TemporaryFile, "refined "); end if; Write_String (Dict.TemporaryFile, "export of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Variable_Symbol (The_Export)); Write_String (Dict.TemporaryFile, " from "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Annotation); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Subprogram_Export_Variable; -------------------------------------------------------------------------------- procedure Add_Subprogram_Export_Parameter (The_Subprogram : in RawDict.Subprogram_Info_Ref; Abstraction : in Abstractions; The_Export : in RawDict.Subprogram_Parameter_Info_Ref; Export_Reference : in Location; Annotation : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# The_Export, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Annotation, --# Dict, --# Export_Reference, --# LexTokenManager.State, --# The_Export, --# The_Subprogram; is begin if RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Export) = The_Subprogram then RawDict.Set_Subprogram_Parameter_Exported (The_Subprogram_Parameter => The_Export, Abstraction => Abstraction); else RawDict.Set_Global_Variable_Exported (The_Global_Variable => Get_Subprogram_Parameter_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Export)); end if; AddOtherReference (RawDict.Get_Subprogram_Parameter_Symbol (The_Export), RawDict.Get_Subprogram_Symbol (The_Subprogram), Export_Reference); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Abstraction = IsRefined then Write_String (Dict.TemporaryFile, "refined "); end if; Write_String (Dict.TemporaryFile, "export of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Parameter_Symbol (The_Export)); Write_String (Dict.TemporaryFile, " from "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Annotation); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Subprogram_Export_Parameter; -------------------------------------------------------------------------------- procedure AddExport (Abstraction : in Abstractions; TheProcedure : in Symbol; TheExport : in Symbol; ExportReference : in Location; Annotation : in Location) is procedure Add_Task_Type_Export (The_Task_Type : in RawDict.Type_Info_Ref; Abstraction : in Abstractions; The_Export : in RawDict.Variable_Info_Ref; Export_Reference : in Location; Annotation : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# The_Export, --# The_Task_Type & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Annotation, --# Dict, --# Export_Reference, --# LexTokenManager.State, --# The_Export, --# The_Task_Type; is begin RawDict.Set_Global_Variable_Exported (The_Global_Variable => Get_Task_Type_Variable_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Export)); AddOtherReference (RawDict.Get_Variable_Symbol (The_Export), RawDict.Get_Type_Symbol (The_Task_Type), Export_Reference); if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Abstraction = IsRefined then Write_String (Dict.TemporaryFile, "refined "); end if; Write_String (Dict.TemporaryFile, "export of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Variable_Symbol (The_Export)); Write_String (Dict.TemporaryFile, " from "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (The_Task_Type)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Annotation); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Task_Type_Export; begin -- AddExport case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => case RawDict.GetSymbolDiscriminant (TheExport) is when Variable_Symbol => Add_Subprogram_Export_Variable (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Export => RawDict.Get_Variable_Info_Ref (Item => TheExport), -- GAA External Export_Reference => ExportReference, Annotation => Annotation); when Subprogram_Parameter_Symbol => Add_Subprogram_Export_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Export => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => TheExport), -- GAA External Export_Reference => ExportReference, Annotation => Annotation); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddExport"); end case; when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddExport"); Add_Task_Type_Export (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Export => RawDict.Get_Variable_Info_Ref (TheExport), -- GAA External Export_Reference => ExportReference, Annotation => Annotation); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddExport"); end case; end AddExport; -------------------------------------------------------------------------------- -- Force_Import -------------------------------------------------------------------------------- procedure Force_Subprogram_Import (The_Subprogram : in RawDict.Subprogram_Info_Ref; Abstraction : in Abstractions; The_Import : in Symbol; Import_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# The_Import, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# Import_Reference, --# LexTokenManager.State, --# The_Import, --# The_Subprogram; is begin if RawDict.GetSymbolDiscriminant (The_Import) = Subprogram_Parameter_Symbol and then RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Import)) = The_Subprogram then RawDict.Set_Subprogram_Parameter_Imported (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Import), Abstraction => Abstraction); else case RawDict.GetSymbolDiscriminant (The_Import) is when Variable_Symbol => RawDict.Set_Global_Variable_Imported (The_Global_Variable => Get_Subprogram_Variable_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => The_Import))); when Subprogram_Parameter_Symbol => RawDict.Set_Global_Variable_Imported (The_Global_Variable => Get_Subprogram_Parameter_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Import))); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Force_Subprogram_Import"); end case; end if; AddOtherReference (The_Import, RawDict.Get_Subprogram_Symbol (The_Subprogram), Import_Reference); end Force_Subprogram_Import; -------------------------------------------------------------------------------- procedure Force_Task_Type_Import (The_Task_Type : in RawDict.Type_Info_Ref; Abstraction : in Abstractions; The_Import : in RawDict.Variable_Info_Ref; Import_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# The_Import, --# The_Task_Type & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# Import_Reference, --# LexTokenManager.State, --# The_Import, --# The_Task_Type; is begin RawDict.Set_Global_Variable_Imported (The_Global_Variable => Get_Task_Type_Variable_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Import)); AddOtherReference (RawDict.Get_Variable_Symbol (The_Import), RawDict.Get_Type_Symbol (The_Task_Type), Import_Reference); end Force_Task_Type_Import; -------------------------------------------------------------------------------- procedure ForceImport (Abstraction : in Abstractions; TheProcedure : in Symbol; TheImport : in Symbol; ImportReference : in Location) is begin case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => Force_Subprogram_Import (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Import => TheImport, Import_Reference => ImportReference); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.ForceImport"); Force_Task_Type_Import (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Import => RawDict.Get_Variable_Info_Ref (TheImport), Import_Reference => ImportReference); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.ForceImport"); end case; end ForceImport; -------------------------------------------------------------------------------- -- Add_Dependency -------------------------------------------------------------------------------- procedure Add_Subprogram_Dependency (Abstraction : in Abstractions; Comp_Unit : in ContextManager.UnitDescriptors; The_Subprogram : in RawDict.Subprogram_Info_Ref; The_Export : in Symbol; The_Import : in Symbol; Import_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Comp_Unit, --# Import_Reference, --# The_Export, --# The_Import, --# The_Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Comp_Unit, --# Dict, --# Import_Reference, --# LexTokenManager.State, --# The_Export, --# The_Import, --# The_Subprogram; is The_Dependency : RawDict.Dependency_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; begin case RawDict.GetSymbolDiscriminant (The_Import) is when Variable_Symbol => RawDict.Create_Dependency (The_Import_Parameter => RawDict.Null_Subprogram_Parameter_Info_Ref, The_Import_Variable => RawDict.Get_Variable_Info_Ref (Item => The_Import), Comp_Unit => Comp_Unit, Loc => Import_Reference.Start_Position, The_Dependency => The_Dependency); when Subprogram_Parameter_Symbol => RawDict.Create_Dependency (The_Import_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Import), The_Import_Variable => RawDict.Null_Variable_Info_Ref, Comp_Unit => Comp_Unit, Loc => Import_Reference.Start_Position, The_Dependency => The_Dependency); when others => The_Dependency := RawDict.Null_Dependency_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Subprogram_Dependency"); end case; if RawDict.GetSymbolDiscriminant (The_Export) = Subprogram_Parameter_Symbol and then RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Export)) = The_Subprogram then RawDict.Set_Next_Dependency (The_Dependency => The_Dependency, Next => RawDict.Get_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Export), Abstraction => Abstraction)); RawDict.Set_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Export), Abstraction => Abstraction, Dependency => The_Dependency); Force_Subprogram_Import (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Import => The_Import, Import_Reference => Import_Reference); else case RawDict.GetSymbolDiscriminant (The_Export) is when Variable_Symbol => The_Global_Variable := Get_Subprogram_Variable_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => The_Export)); when Subprogram_Parameter_Symbol => The_Global_Variable := Get_Subprogram_Parameter_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Export)); when others => -- non-exec code The_Global_Variable := RawDict.Null_Global_Variable_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Subprogram_Dependency"); end case; RawDict.Set_Next_Dependency (The_Dependency => The_Dependency, Next => RawDict.Get_Global_Variable_Dependencies (The_Global_Variable => The_Global_Variable, Abstraction => Abstraction)); RawDict.Set_Global_Variable_Dependencies (The_Global_Variable => The_Global_Variable, Abstraction => Abstraction, Dependency => The_Dependency); Force_Subprogram_Import (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Import => The_Import, Import_Reference => Import_Reference); end if; end Add_Subprogram_Dependency; -------------------------------------------------------------------------------- procedure AddDependency (Abstraction : in Abstractions; Comp_Unit : in ContextManager.UnitDescriptors; TheProcedure : in Symbol; TheExport : in Symbol; TheImport : in Symbol; ImportReference : in Location) is procedure Add_Task_Type_Dependency (Abstraction : in Abstractions; Comp_Unit : in ContextManager.UnitDescriptors; The_Task_Type : in RawDict.Type_Info_Ref; The_Export : in RawDict.Variable_Info_Ref; The_Import : in RawDict.Variable_Info_Ref; Import_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Comp_Unit, --# Import_Reference, --# The_Export, --# The_Import, --# The_Task_Type & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Comp_Unit, --# Dict, --# Import_Reference, --# LexTokenManager.State, --# The_Export, --# The_Import, --# The_Task_Type; is The_Dependency : RawDict.Dependency_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; begin RawDict.Create_Dependency (The_Import_Parameter => RawDict.Null_Subprogram_Parameter_Info_Ref, The_Import_Variable => The_Import, Comp_Unit => Comp_Unit, Loc => Import_Reference.Start_Position, The_Dependency => The_Dependency); The_Global_Variable := Get_Task_Type_Variable_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Export); RawDict.Set_Next_Dependency (The_Dependency => The_Dependency, Next => RawDict.Get_Global_Variable_Dependencies (The_Global_Variable => The_Global_Variable, Abstraction => Abstraction)); RawDict.Set_Global_Variable_Dependencies (The_Global_Variable => The_Global_Variable, Abstraction => Abstraction, Dependency => The_Dependency); Force_Task_Type_Import (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Import => The_Import, Import_Reference => Import_Reference); end Add_Task_Type_Dependency; begin -- AddDependency case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => Add_Subprogram_Dependency (Abstraction => Abstraction, Comp_Unit => Comp_Unit, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External The_Export => TheExport, The_Import => TheImport, Import_Reference => ImportReference); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddDependency"); Add_Task_Type_Dependency (Abstraction => Abstraction, Comp_Unit => Comp_Unit, The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External The_Export => RawDict.Get_Variable_Info_Ref (TheExport), The_Import => RawDict.Get_Variable_Info_Ref (TheImport), Import_Reference => ImportReference); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddDependency"); end case; end AddDependency; -------------------------------------------------------------------------------- -- Add_Virtual_Element -------------------------------------------------------------------------------- procedure AddVirtualElement (ToProtectedType : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheVirtualElement : in Symbol; TheOwner : in Symbol) is procedure Add_Virtual_Element (The_Protected_Type : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Virtual_Element : in RawDict.Variable_Info_Ref; TheOwner : in Symbol) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# TheOwner, --# The_Protected_Type, --# The_Virtual_Element; is NewVirtualElement : Symbol; begin RawDict.CreateVirtualElement (The_Variable => The_Virtual_Element, TheOwner => TheOwner, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, TheVirtualElement => NewVirtualElement); RawDict.SetNextVirtualElement (NewVirtualElement, RawDict.Get_Type_Virtual_Element_List (Type_Mark => The_Protected_Type)); RawDict.Set_Type_Virtual_Element_List (Type_Mark => The_Protected_Type, The_List => NewVirtualElement); RawDict.Set_Variable_Virtual_Element (The_Variable => The_Virtual_Element, Virtual_Element => NewVirtualElement); end Add_Virtual_Element; begin -- AddVirtualElement Add_Virtual_Element (The_Protected_Type => RawDict.Get_Type_Info_Ref (ToProtectedType), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, The_Virtual_Element => RawDict.Get_Variable_Info_Ref (TheVirtualElement), -- GAA External TheOwner => TheOwner); end AddVirtualElement; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure AddPOorSOToSuspendsList (TheTaskOrProc : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; ThePOorSO : in Symbol) is The_Suspends_List : Symbol; The_Subprogram : RawDict.Subprogram_Info_Ref; The_Task_Type : RawDict.Type_Info_Ref; begin RawDict.CreateSuspendsListItem (ThePOorSO => ThePOorSO, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, SuspendsListItem => The_Suspends_List); case RawDict.GetSymbolDiscriminant (TheTaskOrProc) is when Subprogram_Symbol => The_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => TheTaskOrProc); -- GAA External RawDict.SetNextSuspendsListItem (The_Suspends_List, RawDict.Get_Subprogram_Suspends_List (The_Subprogram => The_Subprogram)); RawDict.Set_Subprogram_Suspends_List (The_Subprogram => The_Subprogram, The_Suspends_List => The_Suspends_List); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddPOorSOToSuspendsList"); The_Task_Type := RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc); -- GAA External RawDict.SetNextSuspendsListItem (The_Suspends_List, RawDict.Get_Task_Type_Suspends_List (The_Task_Type => The_Task_Type)); RawDict.Set_Task_Type_Suspends_List (The_Task_Type => The_Task_Type, The_Suspends_List => The_Suspends_List); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddPOorSOToSuspendsList"); end case; end AddPOorSOToSuspendsList; -------------------------------------------------------------------------------- -- Add_Interrupt_Stream_Mapping -------------------------------------------------------------------------------- procedure AddInterruptStreamMapping (Subject : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheInterruptHandler : in LexTokenManager.Lex_String; TheInterruptStream : in LexTokenManager.Lex_String) is procedure Add_Interrupt_Stream_Mapping (The_Variable : in RawDict.Variable_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Interrupt_Handler : in LexTokenManager.Lex_String; The_Interrupt_Stream : in LexTokenManager.Lex_String) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# The_Interrupt_Handler, --# The_Interrupt_Stream, --# The_Variable; is NewSym : Symbol; The_Own_Variable : RawDict.Own_Variable_Info_Ref; begin The_Own_Variable := RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable); RawDict.CreateInterruptStreamMapping (TheHandler => The_Interrupt_Handler, TheInterruptStream => The_Interrupt_Stream, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, TheMapping => NewSym); RawDict.SetNextInterruptStreamMapping (Current => NewSym, Next => RawDict.Get_Own_Variable_Interrupt_Stream_Mappings (The_Own_Variable => The_Own_Variable)); RawDict.Set_Own_Variable_Interrupt_Stream_Mappings (The_Own_Variable => The_Own_Variable, The_Interrupt_Stream_Mappings => NewSym); end Add_Interrupt_Stream_Mapping; begin -- AddInterruptStreamMapping Add_Interrupt_Stream_Mapping (The_Variable => RawDict.Get_Variable_Info_Ref (Subject), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration, The_Interrupt_Handler => TheInterruptHandler, The_Interrupt_Stream => TheInterruptStream); end AddInterruptStreamMapping; -------------------------------------------------------------------------------- -- Copy_Dependency_List -------------------------------------------------------------------------------- procedure CopyDependencyList (Abstraction : in Abstractions; TheProcedure : in Symbol; FromExport : in Symbol; ToExport : in Symbol) is procedure Copy_Subprogram_Dependency_List (The_Subprogram : in RawDict.Subprogram_Info_Ref; Abstraction : in Abstractions; FromExport : in Symbol; ToExport : in Symbol) --# global in out Dict; --# derives Dict from *, --# Abstraction, --# FromExport, --# The_Subprogram, --# ToExport; is The_Dependency : RawDict.Dependency_Info_Ref; begin if RawDict.GetSymbolDiscriminant (FromExport) = Subprogram_Parameter_Symbol and then RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => FromExport)) = The_Subprogram then The_Dependency := RawDict.Get_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => FromExport), Abstraction => Abstraction); else case RawDict.GetSymbolDiscriminant (FromExport) is when Variable_Symbol => The_Dependency := RawDict.Get_Global_Variable_Dependencies (The_Global_Variable => Get_Subprogram_Variable_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => FromExport)), Abstraction => Abstraction); when Subprogram_Parameter_Symbol => The_Dependency := RawDict.Get_Global_Variable_Dependencies (The_Global_Variable => Get_Subprogram_Parameter_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => FromExport)), Abstraction => Abstraction); when others => -- non-exec code The_Dependency := RawDict.Null_Dependency_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Copy_Subprogram_Dependency_List"); end case; end if; if RawDict.GetSymbolDiscriminant (ToExport) = Subprogram_Parameter_Symbol and then RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => ToExport)) = The_Subprogram then RawDict.Set_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => ToExport), Abstraction => Abstraction, Dependency => The_Dependency); else case RawDict.GetSymbolDiscriminant (ToExport) is when Variable_Symbol => RawDict.Set_Global_Variable_Dependencies (The_Global_Variable => Get_Subprogram_Variable_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => ToExport)), Abstraction => Abstraction, Dependency => The_Dependency); when Subprogram_Parameter_Symbol => RawDict.Set_Global_Variable_Dependencies (The_Global_Variable => Get_Subprogram_Parameter_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => ToExport)), Abstraction => Abstraction, Dependency => The_Dependency); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Copy_Subprogram_Dependency_List"); end case; end if; end Copy_Subprogram_Dependency_List; -------------------------------------------------------------------------------- procedure Copy_Task_Type_Dependency_List (The_Task_Type : in RawDict.Type_Info_Ref; Abstraction : in Abstractions; From_Export : in RawDict.Variable_Info_Ref; To_Export : in RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Abstraction, --# From_Export, --# The_Task_Type, --# To_Export; is begin RawDict.Set_Global_Variable_Dependencies (The_Global_Variable => Get_Task_Type_Variable_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => To_Export), Abstraction => Abstraction, Dependency => RawDict.Get_Global_Variable_Dependencies (The_Global_Variable => Get_Task_Type_Variable_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => From_Export), Abstraction => Abstraction)); end Copy_Task_Type_Dependency_List; begin -- CopyDependencyList case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => Copy_Subprogram_Dependency_List (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, FromExport => FromExport, ToExport => ToExport); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.CopyDependencyList"); Copy_Task_Type_Dependency_List (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, From_Export => RawDict.Get_Variable_Info_Ref (FromExport), -- GAA External To_Export => RawDict.Get_Variable_Info_Ref (ToExport)); -- GAA External when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.CopyDependencyList"); end case; end CopyDependencyList; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Add_New_Package (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Parent : in RawDict.Package_Info_Ref; Child_Sort : in PackageSort; The_Package : out RawDict.Package_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Child_Sort, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# The_Parent & --# The_Package from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# The_Parent; --# post The_Package /= RawDict.Null_Package_Info_Ref; is The_Specification : RawDict.Declaration_Info_Ref; procedure Make_Child (The_Parent, The_Package : in RawDict.Package_Info_Ref; Child_Sort : in PackageSort) --# global in out Dict; --# derives Dict from *, --# Child_Sort, --# The_Package, --# The_Parent; is begin RawDict.Set_Package_Parent (The_Package => The_Package, The_Parent => The_Parent); case Child_Sort is when Public => if RawDict.Get_Package_First_Public_Child (The_Package => The_Parent) = RawDict.Null_Package_Info_Ref then -- Adding first public child RawDict.Set_Package_First_Public_Child (The_Package => The_Parent, The_Child => The_Package); RawDict.Set_Package_Last_Public_Child (The_Package => The_Parent, The_Child => The_Package); else -- Adding a sibling to an existing public child RawDict.Set_Package_Next_Sibling (The_Package => RawDict.Get_Package_Last_Public_Child (The_Package => The_Parent), The_Sibling => The_Package); RawDict.Set_Package_Last_Public_Child (The_Package => The_Parent, The_Child => The_Package); end if; when PrivateChild => if RawDict.Get_Package_First_Private_Child (The_Package => The_Parent) = RawDict.Null_Package_Info_Ref then -- Adding first private child RawDict.Set_Package_First_Private_Child (The_Package => The_Parent, The_Child => The_Package); RawDict.Set_Package_Last_Private_Child (The_Package => The_Parent, The_Child => The_Package); else -- Adding a sibling to an existing private child RawDict.Set_Package_Next_Sibling (The_Package => RawDict.Get_Package_Last_Private_Child (The_Package => The_Parent), The_Sibling => The_Package); RawDict.Set_Package_Last_Private_Child (The_Package => The_Parent, The_Child => The_Package); end if; end case; end Make_Child; -------------------------------------------------------------------------------- procedure Write_Package_Declaration (The_Package : in RawDict.Package_Info_Ref; Declaration : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Declaration, --# Dict, --# LexTokenManager.State, --# The_Package; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "package specification of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Package_Symbol (The_Package)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Declaration); Write_Line (Dict.TemporaryFile, " ;"); end if; end Write_Package_Declaration; begin -- Add_New_Package if The_Parent = RawDict.Null_Package_Info_Ref and then Get_Visibility (Scope => Scope) = Local then The_Package := RawDict.Get_Package_Info_Ref (LookupImmediateScope (Name, Scope, ProofContext)); else The_Package := RawDict.Null_Package_Info_Ref; end if; if The_Package = RawDict.Null_Package_Info_Ref then if The_Parent = RawDict.Null_Package_Info_Ref then Add_Declaration (Comp_Unit => Comp_Unit, Loc => Declaration, Scope => Scope, Context => ProgramContext, The_Declaration => The_Specification); else RawDict.Create_Declaration (Context => ProgramContext, Scope => Scope, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Declaration => The_Specification); end if; RawDict.Create_Package (Name => Name, The_Declaration => The_Specification, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Package => The_Package); else RawDict.Set_Declaration_Context (The_Declaration => RawDict.Get_Package_Specification (The_Package => The_Package), Context => ProgramContext); if RawDict.Get_Symbol_Compilation_Unit (Item => RawDict.Get_Package_Symbol (The_Package)) = Comp_Unit then -- There was a usage of the package before its actual -- declaration -> update the declaration position. The -- declaration must be in the same compilation unit as -- the usage (in the package specification) to prevent to -- get a cross-reference from the package specification -- to the package body. RawDict.Set_Symbol_Location (Item => RawDict.Get_Package_Symbol (The_Package), Location => Declaration.Start_Position); end if; end if; if The_Parent /= RawDict.Null_Package_Info_Ref then Make_Child (The_Parent => The_Parent, The_Package => The_Package, Child_Sort => Child_Sort); end if; if Child_Sort = PrivateChild then RawDict.Set_Package_Is_Private (The_Package => The_Package); end if; Write_Package_Declaration (The_Package => The_Package, Declaration => Declaration); end Add_New_Package; -------------------------------------------------------------------------------- procedure Add_Package (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; ThePackage : out Symbol) is The_Package : RawDict.Package_Info_Ref; begin Add_New_Package (Name => Name, Comp_Unit => Comp_Unit, Declaration => Specification, Scope => Scope, The_Parent => RawDict.Null_Package_Info_Ref, Child_Sort => Public, The_Package => The_Package); ThePackage := RawDict.Get_Package_Symbol (The_Package); -- GAA External end Add_Package; -------------------------------------------------------------------------------- procedure AddChildPackage (TheParent : in Symbol; Sort : in PackageSort; Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; ThePackage : out Symbol) is The_Package : RawDict.Package_Info_Ref; begin Add_New_Package (Name => Name, Comp_Unit => Comp_Unit, Declaration => Specification, Scope => Scope, The_Parent => RawDict.Get_Package_Info_Ref (TheParent), -- GAA External Child_Sort => Sort, The_Package => The_Package); ThePackage := RawDict.Get_Package_Symbol (The_Package); -- GAA External end AddChildPackage; -------------------------------------------------------------------------------- procedure AddPrivatePackage (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; ThePackage : out Symbol) is The_Package : RawDict.Package_Info_Ref; begin Add_New_Package (Name => Name, Comp_Unit => Comp_Unit, Declaration => Specification, Scope => Scope, The_Parent => RawDict.Null_Package_Info_Ref, Child_Sort => PrivateChild, The_Package => The_Package); ThePackage := RawDict.Get_Package_Symbol (The_Package); -- GAA External end AddPrivatePackage; -------------------------------------------------------------------------------- procedure SetPackageElaborateBodyFound (ThePackage : in Symbol) is begin RawDict.Set_Package_Elaborate_Body_Found (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end SetPackageElaborateBodyFound; -------------------------------------------------------------------------------- procedure SetPackageAsExtendingAnother (ThePackage : in Symbol; ThePackageItExtends : in Symbol) is begin RawDict.Set_Package_Extends (The_Package => RawDict.Get_Package_Info_Ref (ThePackage), -- GAA External The_Extends => RawDict.Get_Package_Info_Ref (ThePackageItExtends)); -- GAA External end SetPackageAsExtendingAnother; -------------------------------------------------------------------------------- procedure AddPrivatePart (ThePackage : in Symbol; PrivatePart : in Location; Hidden : in Boolean) is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Hidden then Write_String (Dict.TemporaryFile, "hidden "); end if; Write_String (Dict.TemporaryFile, "private part of "); Write_Name (File => Dict.TemporaryFile, Item => ThePackage); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => PrivatePart); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddPrivatePart; -------------------------------------------------------------------------------- procedure AddOwnAnnotation (ThePackage : in Symbol; Annotation : in Location) is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "own annotation of "); Write_Name (File => Dict.TemporaryFile, Item => ThePackage); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Annotation); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddOwnAnnotation; -------------------------------------------------------------------------------- -- Add_Own_Variable -------------------------------------------------------------------------------- procedure Add_Own_Variable_Local (Name : in LexTokenManager.Lex_String; The_Package : in RawDict.Package_Info_Ref; Mode : in Modes; Is_Protected : in Boolean; Is_Interrupt_Stream : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Variable : out RawDict.Variable_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Is_Interrupt_Stream, --# Is_Protected, --# LexTokenManager.State, --# Mode, --# Name, --# The_Package & --# The_Variable from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# The_Package; --# post The_Variable /= RawDict.Null_Variable_Info_Ref; is The_Implicit_In_Stream : RawDict.Implicit_In_Stream_Info_Ref; The_Own_Variable : RawDict.Own_Variable_Info_Ref; begin case Get_Visibility (Scope => Get_Package_Scope (The_Package => The_Package)) is when Visible => The_Variable := RawDict.Null_Variable_Info_Ref; when Privat | Local => The_Variable := RawDict.Get_Variable_Info_Ref (LookupImmediateScope (Name, Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)), ProofContext)); end case; if The_Variable = RawDict.Null_Variable_Info_Ref then RawDict.Create_Variable (Name => Name, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Variable => The_Variable); RawDict.Create_Own_Variable (Variable => The_Variable, Owner => RawDict.Get_Package_Symbol (The_Package), Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Own_Variable => The_Own_Variable); RawDict.Set_Variable_Own_Variable (The_Variable => The_Variable, Own_Variable => The_Own_Variable); RawDict.Set_Next_Own_Variable (The_Own_Variable => The_Own_Variable, Next => RawDict.Get_Package_Own_Variables (The_Package => The_Package)); RawDict.Set_Package_Own_Variables (The_Package => The_Package, Own_Variables => The_Own_Variable); else if RawDict.Get_Symbol_Compilation_Unit (Item => RawDict.Get_Variable_Symbol (The_Variable)) = Comp_Unit then -- There was a usage of the variable before its actual -- declaration -> update the declaration position. The -- declaration must be in the same compilation unit as -- the usage (in the package specification) to prevent to -- get a cross-reference from the package specification -- to the package body. RawDict.Set_Symbol_Location (Item => RawDict.Get_Variable_Symbol (The_Variable), Location => Declaration.Start_Position); end if; The_Own_Variable := RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable); end if; RawDict.Set_Own_Variable_Announced (The_Own_Variable => The_Own_Variable); RawDict.Set_Own_Variable_Mode (The_Own_Variable => The_Own_Variable, Mode => Mode); RawDict.Set_Own_Variable_Protected (The_Own_Variable => The_Own_Variable, Is_Protected => Is_Protected); RawDict.Set_Own_Variable_Is_Interrupt_Stream (The_Own_Variable => The_Own_Variable, Is_Interrupt_Stream => Is_Interrupt_Stream); AddOtherReference (RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Package_Symbol (The_Package), Declaration); -- For protected state that has no mode, we need to create an implicit in stream -- from which reads of the protected state are deemed to come; this facilitiates -- effective flow analysis of shared protected state. if Is_Protected and then Mode = DefaultMode then RawDict.Create_Implicit_In_Stream (The_Own_Variable => The_Own_Variable, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Implicit_In_Stream => The_Implicit_In_Stream); RawDict.Set_Own_Variable_Implicit_In_Stream (The_Own_Variable => The_Own_Variable, The_Implicit_In_Stream => The_Implicit_In_Stream); end if; end Add_Own_Variable_Local; -------------------------------------------------------------------------------- procedure Add_Own_Variable (Name : in LexTokenManager.Lex_String; The_Package : in Symbol; Mode : in Modes; Is_Protected : in Boolean; Is_Interrupt_Stream : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Var_Symbol : out Symbol) is The_Variable : RawDict.Variable_Info_Ref; begin Add_Own_Variable_Local (Name => Name, The_Package => RawDict.Get_Package_Info_Ref (The_Package), -- GAA External Mode => Mode, Is_Protected => Is_Protected, Is_Interrupt_Stream => Is_Interrupt_Stream, Comp_Unit => Comp_Unit, Declaration => Declaration, The_Variable => The_Variable); Var_Symbol := RawDict.Get_Variable_Symbol (The_Variable); -- GAA External end Add_Own_Variable; -------------------------------------------------------------------------------- -- Add_Own_Variable_Type -------------------------------------------------------------------------------- procedure Add_Own_Variable_Type (Own_Variable : in RawDict.Variable_Info_Ref; Type_Mark : in RawDict.Type_Info_Ref; Type_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Own_Variable, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State, --# Own_Variable, --# Type_Mark, --# Type_Reference; is begin RawDict.Set_Variable_Type (The_Variable => Own_Variable, Type_Mark => Type_Mark); -- Remember the abstract type, needed for spec-only view of own variable -- type, since the typeinfo gets overwritten during examination of body. RawDict.Set_Variable_Abstract_Type (The_Variable => Own_Variable, Abstract_Type_Mark => Type_Mark); RawDict.Set_Type_Is_Own_Var_Type (Type_Mark => Type_Mark); RawDict.Set_Own_Variable_Typed (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => Own_Variable)); if Type_Mark /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), Get_Owner (The_Variable => Own_Variable), Type_Reference); end if; end Add_Own_Variable_Type; -------------------------------------------------------------------------------- procedure AddOwnVariableType (OwnVariable : in Symbol; TypeMark : in Symbol; TypeReference : in Location) is begin Add_Own_Variable_Type (Own_Variable => RawDict.Get_Variable_Info_Ref (OwnVariable), -- GAA External Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Type_Reference => TypeReference); end AddOwnVariableType; -------------------------------------------------------------------------------- -- Add_Own_Task -------------------------------------------------------------------------------- procedure AddOwnTask (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TypeMark : in Symbol; ThePackage : in Symbol; TaskSym : out Symbol) is The_Task : RawDict.Variable_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Own_Task (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Type_Mark : in RawDict.Type_Info_Ref; The_Package : in RawDict.Package_Info_Ref; The_Task : out RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Name, --# The_Package, --# Type_Mark & --# The_Task from Comp_Unit, --# Declaration, --# Dict, --# Name; --# post The_Task /= RawDict.Null_Variable_Info_Ref; is OwnTask : Symbol; begin RawDict.Create_Variable (Name => Name, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Variable => The_Task); RawDict.Set_Variable_Type (The_Variable => The_Task, Type_Mark => Type_Mark); RawDict.CreateOwnTask (Variable => The_Task, Owner => The_Package, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, OwnTask => OwnTask); RawDict.Set_Variable_Own_Task (The_Variable => The_Task, Own_Task => OwnTask); RawDict.SetNextOwnTask (OwnTask, RawDict.Get_Package_Task_List (The_Package => The_Package)); RawDict.Set_Package_Task_List (The_Package => The_Package, Task_List => OwnTask); end Add_Own_Task; begin -- AddOwnTask Add_Own_Task (Name => Name, Comp_Unit => Comp_Unit, Declaration => Declaration, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External The_Package => RawDict.Get_Package_Info_Ref (ThePackage), -- GAA External The_Task => The_Task); TaskSym := RawDict.Get_Variable_Symbol (The_Task); -- GAA External end AddOwnTask; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure AddRefinementDefinition (ThePackage : in Symbol; Annotation : in Location) is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "refinement definition of "); Write_Name (File => Dict.TemporaryFile, Item => ThePackage); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Annotation); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddRefinementDefinition; -------------------------------------------------------------------------------- -- Add_Constituent_Sym -------------------------------------------------------------------------------- procedure AddConstituentSym (ConstituentVariable : in Symbol; Subject : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; ConstituentReference : in Location) is procedure Add_Constituent_Sym (Constituent_Variable : in RawDict.Variable_Info_Ref; The_Variable : in RawDict.Variable_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Constituent_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Constituent_Reference, --# Constituent_Variable, --# The_Variable & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Constituent_Reference, --# Constituent_Variable, --# Dict, --# LexTokenManager.State, --# The_Variable; is ThePackage : Symbol; The_Own_Variable : RawDict.Own_Variable_Info_Ref; The_Constituent : RawDict.Constituent_Info_Ref; begin The_Own_Variable := RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable); ThePackage := RawDict.Get_Own_Variable_Owner (The_Own_Variable => The_Own_Variable); RawDict.Create_Constituent (The_Own_Variable => The_Own_Variable, The_Variable => Constituent_Variable, Mode => Get_Own_Variable_Mode (The_Variable => Constituent_Variable), Comp_Unit => Comp_Unit, Loc => Constituent_Reference.Start_Position, The_Constituent => The_Constituent); RawDict.Set_Variable_Constituent (The_Variable => Constituent_Variable, The_Constituent => The_Constituent); RawDict.Set_Next_Constituent (The_Constituent => The_Constituent, Next => RawDict.Get_Own_Variable_Constituents (The_Own_Variable => The_Own_Variable)); RawDict.Set_Own_Variable_Constituents (The_Own_Variable => The_Own_Variable, The_Constituent => The_Constituent); AddOtherReference (RawDict.Get_Variable_Symbol (The_Variable), ThePackage, Null_Location); AddOtherReference (RawDict.Get_Variable_Symbol (Constituent_Variable), ThePackage, Constituent_Reference); end Add_Constituent_Sym; begin -- AddConstituentSym Add_Constituent_Sym (Constituent_Variable => RawDict.Get_Variable_Info_Ref (ConstituentVariable), -- GAA External The_Variable => RawDict.Get_Variable_Info_Ref (Subject), -- GAA External Comp_Unit => Comp_Unit, Constituent_Reference => ConstituentReference); end AddConstituentSym; -------------------------------------------------------------------------------- -- Add_Constituent -------------------------------------------------------------------------------- procedure AddConstituent (Name : in LexTokenManager.Lex_String; Subject : in Symbol; Mode : in Modes; SubjectReference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; ConstituentReference : in Location) is procedure Add_Constituent (Name : in LexTokenManager.Lex_String; The_Subject : in RawDict.Variable_Info_Ref; Mode : in Modes; Subject_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Constituent_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Constituent_Reference, --# Mode, --# Name, --# The_Subject & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Constituent_Reference, --# Dict, --# LexTokenManager.State, --# Name, --# Subject_Reference, --# The_Subject; is ThePackage : Symbol; The_Own_Variable : RawDict.Own_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; The_Constituent : RawDict.Constituent_Info_Ref; begin The_Own_Variable := RawDict.Get_Variable_Own_Variable (The_Variable => The_Subject); ThePackage := RawDict.Get_Own_Variable_Owner (The_Own_Variable => The_Own_Variable); RawDict.Create_Variable (Name => Name, Comp_Unit => Comp_Unit, Loc => Constituent_Reference.Start_Position, The_Variable => The_Variable); RawDict.Create_Constituent (The_Own_Variable => The_Own_Variable, The_Variable => The_Variable, Mode => Mode, Comp_Unit => Comp_Unit, Loc => Constituent_Reference.Start_Position, The_Constituent => The_Constituent); RawDict.Set_Variable_Constituent (The_Variable => The_Variable, The_Constituent => The_Constituent); RawDict.Set_Next_Constituent (The_Constituent => The_Constituent, Next => RawDict.Get_Own_Variable_Constituents (The_Own_Variable => The_Own_Variable)); RawDict.Set_Own_Variable_Constituents (The_Own_Variable => The_Own_Variable, The_Constituent => The_Constituent); AddOtherReference (RawDict.Get_Variable_Symbol (The_Subject), ThePackage, Subject_Reference); AddOtherReference (RawDict.Get_Variable_Symbol (The_Variable), ThePackage, Constituent_Reference); --# accept Flow, 601, SPARK_IO.File_Sys, Mode, "Spurious data coupling via Dictionary"; end Add_Constituent; begin -- AddConstituent Add_Constituent (Name => Name, The_Subject => RawDict.Get_Variable_Info_Ref (Subject), -- GAA External Mode => Mode, Subject_Reference => SubjectReference, Comp_Unit => Comp_Unit, Constituent_Reference => ConstituentReference); end AddConstituent; -------------------------------------------------------------------------------- -- Add_Embedded_Constituent -------------------------------------------------------------------------------- procedure AddEmbeddedConstituent (PackageName : in LexTokenManager.Lex_String; VariableName : in LexTokenManager.Lex_String; Subject : in Symbol; Mode : in Modes; SubjectReference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; ConstituentReference : in Location) is procedure Add_Embedded_Constituent (Package_Name : in LexTokenManager.Lex_String; Variable_Name : in LexTokenManager.Lex_String; The_Subject : in RawDict.Variable_Info_Ref; Mode : in Modes; Subject_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Constituent_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Constituent_Reference, --# LexTokenManager.State, --# Mode, --# Package_Name, --# The_Subject, --# Variable_Name & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Constituent_Reference, --# Dict, --# LexTokenManager.State, --# Package_Name, --# Subject_Reference, --# The_Subject, --# Variable_Name; is Outer_Package : Symbol; Inner_Package : RawDict.Package_Info_Ref; The_Specification : RawDict.Declaration_Info_Ref; The_Own_Variable, The_Constituent_Own_Variable : RawDict.Own_Variable_Info_Ref; The_Constituent_Variable : RawDict.Variable_Info_Ref; The_Constituent : RawDict.Constituent_Info_Ref; begin The_Own_Variable := RawDict.Get_Variable_Own_Variable (The_Variable => The_Subject); Outer_Package := RawDict.Get_Own_Variable_Owner (The_Own_Variable => The_Own_Variable); Inner_Package := RawDict.Get_Package_Info_Ref (LookupImmediateScope (Package_Name, Set_Visibility (The_Visibility => Local, The_Unit => Outer_Package), ProofContext)); if Inner_Package = RawDict.Null_Package_Info_Ref then Add_Declaration (Comp_Unit => Comp_Unit, Loc => Constituent_Reference, Scope => Set_Visibility (The_Visibility => Local, The_Unit => Outer_Package), Context => ProofContext, The_Declaration => The_Specification); RawDict.Create_Package (Name => Package_Name, The_Declaration => The_Specification, Comp_Unit => Comp_Unit, Loc => Constituent_Reference.Start_Position, The_Package => Inner_Package); end if; RawDict.Create_Variable (Name => Variable_Name, Comp_Unit => Comp_Unit, Loc => Constituent_Reference.Start_Position, The_Variable => The_Constituent_Variable); RawDict.Create_Own_Variable (Variable => The_Constituent_Variable, Owner => RawDict.Get_Package_Symbol (Inner_Package), Comp_Unit => Comp_Unit, Loc => Constituent_Reference.Start_Position, The_Own_Variable => The_Constituent_Own_Variable); RawDict.Set_Variable_Own_Variable (The_Variable => The_Constituent_Variable, Own_Variable => The_Constituent_Own_Variable); RawDict.Set_Next_Own_Variable (The_Own_Variable => The_Constituent_Own_Variable, Next => RawDict.Get_Package_Own_Variables (The_Package => Inner_Package)); RawDict.Set_Package_Own_Variables (The_Package => Inner_Package, Own_Variables => The_Constituent_Own_Variable); RawDict.Create_Constituent (The_Own_Variable => The_Own_Variable, The_Variable => The_Constituent_Variable, Mode => Mode, Comp_Unit => Comp_Unit, Loc => Constituent_Reference.Start_Position, The_Constituent => The_Constituent); RawDict.Set_Variable_Constituent (The_Variable => The_Constituent_Variable, The_Constituent => The_Constituent); RawDict.Set_Next_Constituent (The_Constituent => The_Constituent, Next => RawDict.Get_Own_Variable_Constituents (The_Own_Variable => The_Own_Variable)); RawDict.Set_Own_Variable_Constituents (The_Own_Variable => The_Own_Variable, The_Constituent => The_Constituent); AddOtherReference (RawDict.Get_Variable_Symbol (The_Subject), Outer_Package, Subject_Reference); AddOtherReference (RawDict.Get_Constituent_Symbol (The_Constituent), Outer_Package, Constituent_Reference); --# accept Flow, 601, SPARK_IO.File_Sys, Mode, "Spurious data coupling via Dictionary"; end Add_Embedded_Constituent; begin -- AddEmbeddedConstituent Add_Embedded_Constituent (Package_Name => PackageName, Variable_Name => VariableName, The_Subject => RawDict.Get_Variable_Info_Ref (Subject), -- GAA External Mode => Mode, Subject_Reference => SubjectReference, Comp_Unit => Comp_Unit, Constituent_Reference => ConstituentReference); end AddEmbeddedConstituent; -------------------------------------------------------------------------------- -- Add_Child_Constituent -------------------------------------------------------------------------------- procedure AddChildConstituent (Variable : in Symbol; Subject : in Symbol; Mode : in Modes; SubjectReference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; ConstituentReference : in Location) is procedure Add_Child_Constituent (The_Variable : in RawDict.Variable_Info_Ref; The_Subject : in RawDict.Variable_Info_Ref; Mode : in Modes; Subject_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Constituent_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Constituent_Reference, --# Mode, --# The_Subject, --# The_Variable & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Constituent_Reference, --# Dict, --# LexTokenManager.State, --# Subject_Reference, --# The_Subject, --# The_Variable; is ThePackage : Symbol; The_Own_Variable : RawDict.Own_Variable_Info_Ref; The_Constituent : RawDict.Constituent_Info_Ref; begin The_Own_Variable := RawDict.Get_Variable_Own_Variable (The_Variable => The_Subject); ThePackage := RawDict.Get_Own_Variable_Owner (The_Own_Variable => The_Own_Variable); RawDict.Create_Constituent (The_Own_Variable => The_Own_Variable, The_Variable => The_Variable, Mode => Mode, Comp_Unit => Comp_Unit, Loc => Constituent_Reference.Start_Position, The_Constituent => The_Constituent); RawDict.Set_Variable_Constituent (The_Variable => The_Variable, The_Constituent => The_Constituent); RawDict.Set_Next_Constituent (The_Constituent => The_Constituent, Next => RawDict.Get_Own_Variable_Constituents (The_Own_Variable => The_Own_Variable)); RawDict.Set_Own_Variable_Constituents (The_Own_Variable => The_Own_Variable, The_Constituent => The_Constituent); AddOtherReference (RawDict.Get_Variable_Symbol (The_Subject), ThePackage, Subject_Reference); AddOtherReference (RawDict.Get_Variable_Symbol (The_Variable), ThePackage, Constituent_Reference); --# accept Flow, 601, SPARK_IO.File_Sys, Mode, "Spurious data coupling via Dictionary"; end Add_Child_Constituent; begin -- AddChildConstituent Add_Child_Constituent (The_Variable => RawDict.Get_Variable_Info_Ref (Variable), -- GAA External The_Subject => RawDict.Get_Variable_Info_Ref (Subject), -- GAA External Mode => Mode, Subject_Reference => SubjectReference, Comp_Unit => Comp_Unit, Constituent_Reference => ConstituentReference); end AddChildConstituent; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure AddInitializationSpecification (ThePackage : in Symbol; Annotation : in Location) is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "initialization specification of "); Write_Name (File => Dict.TemporaryFile, Item => ThePackage); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Annotation); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddInitializationSpecification; -------------------------------------------------------------------------------- -- Add_Initialized_Own_Variable -------------------------------------------------------------------------------- procedure AddInitializedOwnVariable (Variable : in Symbol; VariableReference : in Location) is procedure Add_Initialized_Own_Variable (The_Variable : in RawDict.Variable_Info_Ref; Variable_Reference : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# The_Variable & --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State, --# The_Variable, --# Variable_Reference; is The_Own_Variable : RawDict.Own_Variable_Info_Ref; begin The_Own_Variable := RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable); RawDict.Set_Own_Variable_Initialized (The_Own_Variable => The_Own_Variable); AddOtherReference (RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Own_Variable_Owner (The_Own_Variable => The_Own_Variable), Variable_Reference); end Add_Initialized_Own_Variable; begin -- AddInitializedOwnVariable Add_Initialized_Own_Variable (The_Variable => RawDict.Get_Variable_Info_Ref (Variable), -- GAA External Variable_Reference => VariableReference); end AddInitializedOwnVariable; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure AddPackageInitialization (ThePackage : in Symbol; Initialization : in Location; Hidden : in Boolean) is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then if Hidden then Write_String (Dict.TemporaryFile, "hidden "); end if; Write_String (Dict.TemporaryFile, "package initialization of "); Write_Name (File => Dict.TemporaryFile, Item => ThePackage); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Initialization); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddPackageInitialization; -------------------------------------------------------------------------------- procedure AddWithReference (The_Visibility : in Visibility; The_Unit : in Symbol; The_Withed_Symbol : in Symbol; Explicit : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Already_Present : out Boolean) is separate; -------------------------------------------------------------------------------- -- Add_Use_Type_Reference -------------------------------------------------------------------------------- procedure Add_Use_Type_Reference (The_Visibility : in Visibility; The_Unit : in Symbol; Type_Mark : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# The_Unit, --# The_Visibility, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# The_Unit, --# The_Visibility, --# Type_Mark; is separate; -------------------------------------------------------------------------------- procedure AddUseTypeReference (The_Visibility : in Visibility; The_Unit : in Symbol; TheType : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) is begin Add_Use_Type_Reference (The_Visibility => The_Visibility, The_Unit => The_Unit, Type_Mark => RawDict.Get_Type_Info_Ref (TheType), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration); end AddUseTypeReference; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure AddInheritsReference (The_Unit : in Symbol; The_Inherited_Symbol : in Symbol; Explicit : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Already_Present : out Boolean) is separate; -------------------------------------------------------------------------------- procedure AddWriteReference (Variable, CompilationUnit : in Symbol; Reference : in Location) is begin Write_Reference (Discriminant => WriteRef, Item => Variable, CompilationUnit => CompilationUnit, Reference => Reference); end AddWriteReference; -------------------------------------------------------------------------------- procedure AddReadReference (Object, CompilationUnit : in Symbol; Reference : in Location) is begin Write_Reference (Discriminant => ReadRef, Item => Object, CompilationUnit => CompilationUnit, Reference => Reference); end AddReadReference; -------------------------------------------------------------------------------- procedure AddSubprogramCall (Subprogram, CompilationUnit : in Symbol; Call : in Location) is begin Write_Reference (Discriminant => CallRef, Item => Subprogram, CompilationUnit => CompilationUnit, Reference => Call); end AddSubprogramCall; -------------------------------------------------------------------------------- function SymbolRef (Item : Symbol) return ExaminerConstants.RefType is begin return ExaminerConstants.RefType (Item); end SymbolRef; -------------------------------------------------------------------------------- function ConvertSymbolRef (Ref : ExaminerConstants.RefType) return Symbol is begin return Symbol (Ref); end ConvertSymbolRef; -------------------------------------------------------------------------------- -- First_Extended_Record_Component -------------------------------------------------------------------------------- function FirstExtendedRecordComponent (TheRecordType : Symbol) return Iterator is function First_Extended_Record_Component (The_Record_Type : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is The_Record_Component : RawDict.Record_Component_Info_Ref; Components : Iterator; Current_Record : RawDict.Type_Info_Ref; begin Current_Record := The_Record_Type; -- find root record loop Current_Record := Get_Type (The_Symbol => CurrentSymbol (First_Record_Component (Type_Mark => Current_Record))); exit when not Type_Is_Extended_Tagged (Type_Mark => Current_Record); end loop; -- Current_Record now points at Root record -- Assuming this record has some fields, we need the first one. The_Record_Component := RawDict.Get_Type_First_Record_Component (Type_Mark => Current_Record); loop if The_Record_Component /= RawDict.Null_Record_Component_Info_Ref then -- found a field Components := Iterator' (ExtendedRecordComponentIterator, IsAbstract, RawDict.Get_Record_Component_Symbol (The_Record_Component), RawDict.Get_Type_Symbol (The_Record_Type)); exit; end if; -- if we get here then the root record had no fields so we need to try the 1st extension Current_Record := Back_Track_Up_Inherit_Chain (Start_Sym => The_Record_Type, Stop_Sym => Current_Record); if Current_Record = RawDict.Null_Type_Info_Ref then -- we didn't find ANY fields in the entire extended record structure! Components := NullIterator; exit; end if; -- if we get here then we have successfully backtracked one place up the inherit chain -- and can have another go. Since we are now in an extension we need the second field -- so as to skip the Inherit field The_Record_Component := RawDict.Get_Next_Record_Component (The_Record_Component => RawDict.Get_Type_First_Record_Component (Type_Mark => Current_Record)); end loop; return Components; end First_Extended_Record_Component; begin -- FirstExtendedRecordComponent return First_Extended_Record_Component (The_Record_Type => RawDict.Get_Type_Info_Ref (TheRecordType)); -- GAA External end FirstExtendedRecordComponent; -------------------------------------------------------------------------------- -- Extended_Tagged_Has_Private_Ancestors -------------------------------------------------------------------------------- function ExtendedTaggedHasPrivateAncestors (TheType : Symbol; Scope : Scopes) return Boolean is function Extended_Tagged_Has_Private_Ancestors (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is Result : Boolean := False; Current_Record : Symbol; Current_Type : RawDict.Type_Info_Ref; begin Current_Type := Type_Mark; loop -- follow chain of Inherit field pointers Current_Record := CurrentSymbol (First_Record_Component (Type_Mark => Current_Type)); exit when Current_Record = NullSymbol; -- root record is a null record Current_Type := Get_Type (The_Symbol => Current_Record); exit when not Type_Is_Tagged (Type_Mark => Current_Type); -- all fields checked - false result exit if Type_Is_Private_Here (Type_Mark => Current_Type, Scope => Scope) then Result := True; exit; -- true result exit end if; end loop; return Result; end Extended_Tagged_Has_Private_Ancestors; begin -- ExtendedTaggedHasPrivateAncestors return Extended_Tagged_Has_Private_Ancestors (Type_Mark => RawDict.Get_Type_Info_Ref (TheType), -- GAA External Scope => Scope); end ExtendedTaggedHasPrivateAncestors; -------------------------------------------------------------------------------- -- Get_Number_Of_Non_Extended_Components -------------------------------------------------------------------------------- function Get_Number_Of_Non_Extended_Components (The_Record_Type : RawDict.Type_Info_Ref) return Natural --# global in Dict; is Component : Iterator; Count : Natural := Natural'First; begin Component := First_Record_Component (Type_Mark => The_Record_Type); -- record has no components perhaps because it is a private type -- and the private part is hidden or perhaps because it is a -- tagged null record if CurrentSymbol (Component) /= NullSymbol then while not IsNullIterator (Component) and then Count < Natural'Last loop Component := NextSymbol (Component); Count := Count + 1; end loop; end if; return Count; end Get_Number_Of_Non_Extended_Components; -------------------------------------------------------------------------------- function GetNumberOfNonExtendedComponents (TheRecordType : Symbol) return Natural is begin return Get_Number_Of_Non_Extended_Components (The_Record_Type => RawDict.Get_Type_Info_Ref (TheRecordType)); -- GAA External end GetNumberOfNonExtendedComponents; -------------------------------------------------------------------------------- -- Get_Number_Of_Actual_Components -------------------------------------------------------------------------------- -- As above but ignore Inherit fields of extended tagged records function Get_Number_Of_Actual_Components (The_Record_Type : RawDict.Type_Info_Ref) return Natural --# global in Dict; is Result : Natural; begin Result := Get_Number_Of_Non_Extended_Components (The_Record_Type => The_Record_Type); if Type_Is_Extended_Tagged (Type_Mark => The_Record_Type) then if Result > 0 then Result := Result - 1; -- ignore Inherit field else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "in Dictionary.Get_Number_Of_Actual_Components"); end if; end if; return Result; end Get_Number_Of_Actual_Components; -------------------------------------------------------------------------------- function GetNumberOfActualComponents (TheRecordType : Symbol) return Natural is begin return Get_Number_Of_Actual_Components (The_Record_Type => RawDict.Get_Type_Info_Ref (TheRecordType)); -- GAA External end GetNumberOfActualComponents; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Get_Number_Of_Extended_Components (The_Record_Type : RawDict.Type_Info_Ref) return Natural --# global in Dict; is Current_Type : RawDict.Type_Info_Ref; Current_Record : Symbol; Number_Of_Actual_Components : Natural; Count : Natural := Natural'First; begin Current_Type := The_Record_Type; loop Number_Of_Actual_Components := Get_Number_Of_Actual_Components (The_Record_Type => Current_Type); exit when Number_Of_Actual_Components > Natural'Last - Count; Count := Count + Number_Of_Actual_Components; -- Move down chain of Inherit pointers Current_Record := CurrentSymbol (First_Record_Component (Type_Mark => Current_Type)); exit when Current_Record = NullSymbol; -- root record is null record Current_Type := Get_Type (The_Symbol => Current_Record); exit when not Type_Is_Tagged (Type_Mark => Current_Type); -- all inherited fields checked end loop; return Count; end Get_Number_Of_Extended_Components; -------------------------------------------------------------------------------- -- Get_Number_Of_Components -------------------------------------------------------------------------------- function GetNumberOfComponents (TheRecordType : Symbol) return Natural is function Get_Number_Of_Components (The_Record_Type : RawDict.Type_Info_Ref) return Natural --# global in Dict; is Result : Natural; begin if Type_Is_Extended_Tagged (Type_Mark => The_Record_Type) then Result := Get_Number_Of_Extended_Components (The_Record_Type => The_Record_Type); else Result := Get_Number_Of_Non_Extended_Components (The_Record_Type => The_Record_Type); end if; return Result; end Get_Number_Of_Components; begin -- GetNumberOfComponents return Get_Number_Of_Components (The_Record_Type => RawDict.Get_Type_Info_Ref (TheRecordType)); -- GAA External end GetNumberOfComponents; -------------------------------------------------------------------------------- -- No_Fields_Below_This_Record -------------------------------------------------------------------------------- function No_Fields_Below_This_Record (The_Record_Type : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is Result : Boolean := True; Current_Record : RawDict.Type_Info_Ref; begin Current_Record := RawDict.Get_Type_Extends (Type_Mark => The_Record_Type); loop -- stop if not an extended record and therefore nowhere else to check exit when Current_Record = RawDict.Null_Type_Info_Ref; -- if we find some fields then function returns false if Get_Number_Of_Actual_Components (The_Record_Type => Current_Record) > 0 then Result := False; exit; end if; -- move down inherit chain Current_Record := RawDict.Get_Type_Extends (Type_Mark => Current_Record); end loop; return Result; end No_Fields_Below_This_Record; -------------------------------------------------------------------------------- function NoFieldsBelowThisRecord (RecordSym : Symbol) return Boolean is begin return No_Fields_Below_This_Record (The_Record_Type => RawDict.Get_Type_Info_Ref (RecordSym)); -- GAA External end NoFieldsBelowThisRecord; -------------------------------------------------------------------------------- -- Record_Has_Some_Fields -------------------------------------------------------------------------------- function RecordHasSomeFields (RecordSym : Symbol) return Boolean is function Record_Has_Some_Fields (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is Root_Type : RawDict.Type_Info_Ref; Result : Boolean; begin -- a record need to be declared if: -- (1) it has some actual fields; or -- (2) it is an extended record and an inherited record has some actual fields -- -- The inverse is easier to calculate. We don't need a declaration if: -- (1) the record has no fields (even an Inherit one); or -- (2) it is extended and has no actual fields and there are no fields below it. -- Type_Mark might be a record subtype, so... Root_Type := Get_Root_Type (Type_Mark => Type_Mark); Result := (Get_Number_Of_Non_Extended_Components (The_Record_Type => Root_Type) = 0) or else (Type_Is_Extended_Tagged (Type_Mark => Root_Type) and then Get_Number_Of_Actual_Components (The_Record_Type => Root_Type) = 0 and then No_Fields_Below_This_Record (The_Record_Type => Root_Type)); return not Result; end Record_Has_Some_Fields; begin -- RecordHasSomeFields return Record_Has_Some_Fields (Type_Mark => RawDict.Get_Type_Info_Ref (RecordSym)); -- GAA External end RecordHasSomeFields; -------------------------------------------------------------------------------- -- Get_Non_Extended_Record_Component -------------------------------------------------------------------------------- function Get_Non_Extended_Record_Component (The_Record_Type : RawDict.Type_Info_Ref; Number : Positive) return Symbol --# global in Dict; is Component : Iterator; begin Component := First_Record_Component (Type_Mark => The_Record_Type); for No in Positive range 1 .. Number - 1 loop Component := NextSymbol (Component); end loop; return CurrentSymbol (Component); end Get_Non_Extended_Record_Component; -------------------------------------------------------------------------------- function GetNonExtendedRecordComponent (TheRecordType : Symbol; Number : Positive) return Symbol is begin return Get_Non_Extended_Record_Component (The_Record_Type => RawDict.Get_Type_Info_Ref (TheRecordType), -- GAA External Number => Number); end GetNonExtendedRecordComponent; -------------------------------------------------------------------------------- -- Get_Record_Component -------------------------------------------------------------------------------- function Get_Record_Component (The_Record_Type : RawDict.Type_Info_Ref; Number : Positive) return Symbol --# global in Dict; is separate; -------------------------------------------------------------------------------- function GetRecordComponent (TheRecordType : Symbol; Number : Positive) return Symbol is begin return Get_Record_Component (The_Record_Type => RawDict.Get_Type_Info_Ref (TheRecordType), -- GAA External Number => Number); end GetRecordComponent; -------------------------------------------------------------------------------- -- First_Subprogram_Parameter -------------------------------------------------------------------------------- function First_Ada_Subprogram_Parameter (The_Subprogram : RawDict.Subprogram_Info_Ref) return Iterator --# global in Dict; is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; SubprogramParameters : Iterator; begin The_Subprogram_Parameter := RawDict.Get_Subprogram_First_Parameter (The_Subprogram => The_Subprogram); if The_Subprogram_Parameter = RawDict.Null_Subprogram_Parameter_Info_Ref then SubprogramParameters := NullIterator; else SubprogramParameters := Iterator' (SubprogramParameterIterator, IsAbstract, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), NullSymbol); end if; return SubprogramParameters; end First_Ada_Subprogram_Parameter; -------------------------------------------------------------------------------- function First_Implicit_Proof_Function_Parameter (ProofFunction : Symbol) return Iterator --# global in Dict; is Ada_Function : RawDict.Subprogram_Info_Ref; SubprogramParameters : Iterator; Abstraction : Abstractions; Global_Variables : Iterator; begin Ada_Function := RawDict.GetImplicitProofFunctionAdaFunction (ProofFunction); SubprogramParameters := First_Ada_Subprogram_Parameter (The_Subprogram => Ada_Function); if RawDict.Get_Subprogram_Implicit_Proof_Function (The_Subprogram => Ada_Function, Abstraction => IsAbstract) = ProofFunction then Abstraction := IsAbstract; else Abstraction := IsRefined; end if; if IsNullIterator (SubprogramParameters) then Global_Variables := First_Subprogram_Global_Variable (The_Subprogram => Ada_Function, Abstraction => Abstraction); if not IsNullIterator (Global_Variables) then SubprogramParameters := Iterator' (ImplicitProofFunctionGlobalIterator, Abstraction, CurrentSymbol (Global_Variables), Global_Variables.Context); end if; else SubprogramParameters := Iterator'(ImplicitProofFunctionParameterIterator, Abstraction, CurrentSymbol (SubprogramParameters), ProofFunction); end if; return SubprogramParameters; end First_Implicit_Proof_Function_Parameter; -------------------------------------------------------------------------------- function FirstSubprogramParameter (Subprogram : Symbol) return Iterator is SubprogramParameters : Iterator; begin case RawDict.GetSymbolDiscriminant (Subprogram) is when Subprogram_Symbol => SubprogramParameters := First_Ada_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Subprogram)); when Type_Symbol => -- a task type has no parameters SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Subprogram)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstSubprogramParameter"); SubprogramParameters := NullIterator; when ImplicitProofFunctionSymbol => SubprogramParameters := First_Implicit_Proof_Function_Parameter (ProofFunction => Subprogram); when others => -- non-exec code SubprogramParameters := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstSubprogramParameter"); end case; return SubprogramParameters; end FirstSubprogramParameter; -------------------------------------------------------------------------------- -- Actual_Of_Generic_Parameter -------------------------------------------------------------------------------- function Actual_Of_Generic_Parameter (The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Actual_Subprogram : RawDict.Subprogram_Info_Ref) return RawDict.Subprogram_Parameter_Info_Ref --# global in Dict; --# return Result => (Result /= RawDict.Null_Subprogram_Parameter_Info_Ref); is Generic_It, Actual_It : Iterator; Result : RawDict.Subprogram_Parameter_Info_Ref := RawDict.Null_Subprogram_Parameter_Info_Ref; begin -- traverse generic and instantiation subprogram parameters lists in parallel Generic_It := First_Ada_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Subprogram_Parameter)); Actual_It := First_Ada_Subprogram_Parameter (The_Subprogram => Actual_Subprogram); while not IsNullIterator (Generic_It) and then not IsNullIterator (Actual_It) loop if RawDict.Get_Subprogram_Parameter_Info_Ref (Item => CurrentSymbol (Generic_It)) = The_Subprogram_Parameter then -- found it Result := RawDict.Get_Subprogram_Parameter_Info_Ref (Item => CurrentSymbol (Actual_It)); exit; end if; Generic_It := NextSymbol (Generic_It); Actual_It := NextSymbol (Actual_It); SystemErrors.RT_Assert (C => not (IsNullIterator (Generic_It) xor IsNullIterator (Actual_It)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Actual_Of_Generic_Parameter"); end loop; SystemErrors.RT_Assert (C => Result /= RawDict.Null_Subprogram_Parameter_Info_Ref, Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Actual_Of_Generic_Parameter"); return Result; end Actual_Of_Generic_Parameter; -------------------------------------------------------------------------------- function ActualOfGenericParameter (TheParameter : Symbol; ActualSubprogramSym : Symbol) return Symbol is begin return RawDict.Get_Subprogram_Parameter_Symbol -- GAA External (Actual_Of_Generic_Parameter (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (TheParameter), -- GAA External Actual_Subprogram => RawDict.Get_Subprogram_Info_Ref (ActualSubprogramSym))); -- GAA External end ActualOfGenericParameter; -------------------------------------------------------------------------------- -- Actual_Of_Parameter_Constraint -------------------------------------------------------------------------------- function ActualOfParameterConstraint (TheParameter : Symbol; ActualSubprogramSym : Symbol) return Symbol is function Actual_Of_Parameter_Constraint (The_Parameter_Constraint : RawDict.Parameter_Constraint_Info_Ref; Actual_Subprogram : RawDict.Subprogram_Info_Ref) return RawDict.Parameter_Constraint_Info_Ref --# global in Dict; --# return Result => (Result /= RawDict.Null_Parameter_Constraint_Info_Ref); is Actual_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Result : RawDict.Parameter_Constraint_Info_Ref; begin Actual_Subprogram_Parameter := Actual_Of_Generic_Parameter (The_Subprogram_Parameter => RawDict.Get_Parameter_Constraint_Subprogram_Parameter (The_Parameter_Constraint), Actual_Subprogram => Actual_Subprogram); Result := RawDict.Get_Subprogram_Parameter_Index_Constraints (The_Subprogram_Parameter => Actual_Subprogram_Parameter); while Result /= RawDict.Null_Parameter_Constraint_Info_Ref and then RawDict.Get_Parameter_Constraint_Dimension (The_Parameter_Constraint => Result) /= RawDict.Get_Parameter_Constraint_Dimension (The_Parameter_Constraint => The_Parameter_Constraint) loop Result := RawDict.Get_Next_Parameter_Constraint (The_Parameter_Constraint => Result); end loop; SystemErrors.RT_Assert (C => Result /= RawDict.Null_Parameter_Constraint_Info_Ref, Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Actual_Of_Parameter_Constraint"); return Result; end Actual_Of_Parameter_Constraint; begin -- ActualOfParameterConstraint return RawDict.Get_Parameter_Constraint_Symbol -- GAA External (Actual_Of_Parameter_Constraint (The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (TheParameter), -- GAA External Actual_Subprogram => RawDict.Get_Subprogram_Info_Ref (ActualSubprogramSym))); -- GAA External end ActualOfParameterConstraint; -------------------------------------------------------------------------------- -- First_Known_Discriminant -------------------------------------------------------------------------------- function First_Known_Discriminant (Protected_Or_Task_Type : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is Discriminant : Symbol; Discriminants : Iterator; begin case RawDict.Get_Type_Discriminant (Type_Mark => Protected_Or_Task_Type) is when Protected_Type_Item => Discriminant := RawDict.Get_Protected_Type_First_Discriminant (The_Protected_Type => Protected_Or_Task_Type); when Task_Type_Item => Discriminant := RawDict.Get_Task_Type_First_Discriminant (The_Task_Type => Protected_Or_Task_Type); when others => -- non-exec code Discriminant := NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Known_Discriminant"); end case; if Discriminant = NullSymbol then Discriminants := NullIterator; else Discriminants := Iterator'(KnownDiscriminantIterator, IsAbstract, Discriminant, NullSymbol); end if; return Discriminants; end First_Known_Discriminant; -------------------------------------------------------------------------------- function FirstKnownDiscriminant (ProtectedOrTaskType : Symbol) return Iterator is begin return First_Known_Discriminant (Protected_Or_Task_Type => RawDict.Get_Type_Info_Ref (ProtectedOrTaskType)); -- GAA External end FirstKnownDiscriminant; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function First_Discriminant_Constraint (Protected_Or_Task_Subtype : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is Discriminant : Symbol; Extra_Info : Symbol; Discriminants : Iterator; begin Extra_Info := RawDict.Get_Type_Ancillary_Fields (Type_Mark => Protected_Or_Task_Subtype); Discriminant := RawDict.GetSubtypeInfoFirstConstraint (Extra_Info); if Discriminant = NullSymbol then Discriminants := NullIterator; else Discriminants := Iterator'(DiscriminantConstraintIterator, IsAbstract, Discriminant, NullSymbol); end if; return Discriminants; end First_Discriminant_Constraint; -------------------------------------------------------------------------------- -- Is_Constituent -------------------------------------------------------------------------------- function Is_Constituent (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Variable_Constituent (The_Variable => The_Variable) /= RawDict.Null_Constituent_Info_Ref; end Is_Constituent; -------------------------------------------------------------------------------- function IsConstituent (Variable : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (Variable) = Variable_Symbol and then Is_Constituent (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end IsConstituent; -------------------------------------------------------------------------------- -- Get_Own_Variable_Or_Constituent_Mode -------------------------------------------------------------------------------- function Get_Own_Variable_Or_Constituent_Mode (The_Variable : RawDict.Variable_Info_Ref) return Modes --# global in Dict; is Result : Modes; begin if Is_Constituent (The_Variable => The_Variable) then Result := Get_Constituent_Mode (The_Variable => The_Variable); elsif Is_Own_Variable (The_Variable => The_Variable) then Result := Get_Own_Variable_Mode (The_Variable => The_Variable); else Result := DefaultMode; end if; return Result; end Get_Own_Variable_Or_Constituent_Mode; -------------------------------------------------------------------------------- function GetOwnVariableOrConstituentMode (Variable : Symbol) return Modes is Result : Modes; begin case RawDict.GetSymbolDiscriminant (Variable) is when Variable_Symbol => Result := Get_Own_Variable_Or_Constituent_Mode (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External when others => Result := DefaultMode; end case; return Result; end GetOwnVariableOrConstituentMode; -------------------------------------------------------------------------------- -- First_Dependency -------------------------------------------------------------------------------- function First_Subprogram_Dependency (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions; The_Export : Symbol) return Iterator --# global in Dict; is The_Dependency : RawDict.Dependency_Info_Ref; Dependencies : Iterator; begin -- look up dependencies as before if RawDict.GetSymbolDiscriminant (The_Export) = Subprogram_Parameter_Symbol and then RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Export)) = The_Subprogram then The_Dependency := RawDict.Get_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Export), Abstraction => Abstraction); else case RawDict.GetSymbolDiscriminant (The_Export) is when Variable_Symbol => The_Dependency := RawDict.Get_Global_Variable_Dependencies (The_Global_Variable => Get_Subprogram_Variable_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => The_Export)), Abstraction => Abstraction); when Subprogram_Parameter_Symbol => The_Dependency := RawDict.Get_Global_Variable_Dependencies (The_Global_Variable => Get_Subprogram_Parameter_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Export)), Abstraction => Abstraction); when others => -- non-exec code The_Dependency := RawDict.Null_Dependency_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Subprogram_Dependency"); end case; end if; if The_Dependency = RawDict.Null_Dependency_Info_Ref then Dependencies := NullIterator; else case RawDict.Get_Kind_Of_Dependency (The_Dependency => The_Dependency) is when RawDict.Dependency_Parameter_Item => Dependencies := Iterator' (DependencyIterator, IsAbstract, RawDict.Get_Subprogram_Parameter_Symbol (RawDict.Get_Dependency_Import_Parameter (The_Dependency => The_Dependency)), RawDict.Get_Dependency_Symbol (The_Dependency)); when RawDict.Dependency_Variable_Item => Dependencies := Iterator' (DependencyIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.Get_Dependency_Import_Variable (The_Dependency => The_Dependency)), RawDict.Get_Dependency_Symbol (The_Dependency)); end case; end if; return Dependencies; end First_Subprogram_Dependency; -------------------------------------------------------------------------------- function First_Task_Type_Dependency (The_Task_Type : RawDict.Type_Info_Ref; Abstraction : Abstractions; The_Export : RawDict.Variable_Info_Ref) return Iterator --# global in Dict; is The_Dependency : RawDict.Dependency_Info_Ref; Dependencies : Iterator; begin -- look up dependencies as before The_Dependency := RawDict.Get_Global_Variable_Dependencies (The_Global_Variable => Get_Task_Type_Variable_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Export), Abstraction => Abstraction); if The_Dependency = RawDict.Null_Dependency_Info_Ref then Dependencies := NullIterator; else Dependencies := Iterator' (DependencyIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.Get_Dependency_Import_Variable (The_Dependency => The_Dependency)), RawDict.Get_Dependency_Symbol (The_Dependency)); end if; return Dependencies; end First_Task_Type_Dependency; -------------------------------------------------------------------------------- function FirstDependency (Abstraction : Abstractions; TheProcedure : Symbol; TheExport : Symbol) return Iterator is Result : Iterator; begin case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => Result := First_Subprogram_Dependency (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Export => TheExport); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstDependency"); Result := First_Task_Type_Dependency (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction, The_Export => RawDict.Get_Variable_Info_Ref (TheExport)); -- GAA External when others => -- non-exec code Result := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstDependency"); end case; return Result; end FirstDependency; -------------------------------------------------------------------------------- -- First_Export -------------------------------------------------------------------------------- function First_Subprogram_Export (The_Subprogram : RawDict.Subprogram_Info_Ref; Abstraction : Abstractions) return Iterator --# global Dict; is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; Exports : Iterator; begin The_Subprogram_Parameter := RawDict.Get_Subprogram_First_Parameter (The_Subprogram => The_Subprogram); if The_Subprogram_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref then Exports := Iterator'(ExportIterator, Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), NullSymbol); if not Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Exports := NextSymbol (Exports); end if; else The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction); if The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref then case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); Exports := Iterator' (ExportIterator, Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); if not Is_Exported_Subprogram_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => The_Variable) then Exports := NextSymbol (Exports); end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); Exports := Iterator' (ExportIterator, Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); if not Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Exports := NextSymbol (Exports); end if; when others => -- non-exec code Exports := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Subprogram_Export"); end case; else Exports := NullIterator; end if; end if; return Exports; end First_Subprogram_Export; -------------------------------------------------------------------------------- function First_Task_Type_Export (The_Task_Type : RawDict.Type_Info_Ref; Abstraction : Abstractions) return Iterator --# global Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; Exports : Iterator; begin The_Global_Variable := RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction); if The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref then The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); Exports := Iterator' (ExportIterator, Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); if not Is_Exported_Task_Type_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Variable) then Exports := NextSymbol (Exports); end if; else Exports := NullIterator; end if; return Exports; end First_Task_Type_Export; -------------------------------------------------------------------------------- function FirstExport (Abstraction : Abstractions; TheProcedure : Symbol) return Iterator is Exports : Iterator; begin -- can now be called for task types as well and these have no parameters case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => -- seek exports etc. as before Exports := First_Subprogram_Export (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstExport"); Exports := First_Task_Type_Export (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheProcedure), -- GAA External Abstraction => Abstraction); when others => -- non-exec code Exports := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstExport"); end case; return Exports; end FirstExport; -------------------------------------------------------------------------------- -- First_Suspends_List_Item -------------------------------------------------------------------------------- function First_Task_Type_Suspends_List_Item (The_Task_Type : RawDict.Type_Info_Ref) return Iterator --# global Dict; is SuspendsList : Symbol; It : Iterator; begin SuspendsList := RawDict.Get_Task_Type_Suspends_List (The_Task_Type => The_Task_Type); if SuspendsList = NullSymbol then It := NullIterator; else It := Iterator'(SuspendsListItemIterator, IsAbstract, RawDict.GetSuspendsListItem (SuspendsList), SuspendsList); end if; return It; end First_Task_Type_Suspends_List_Item; -------------------------------------------------------------------------------- function First_Subprogram_Suspends_List_Item (The_Subprogram : RawDict.Subprogram_Info_Ref) return Iterator --# global Dict; is SuspendsList : Symbol; It : Iterator; begin SuspendsList := RawDict.Get_Subprogram_Suspends_List (The_Subprogram => The_Subprogram); if SuspendsList = NullSymbol then It := NullIterator; else It := Iterator'(SuspendsListItemIterator, IsAbstract, RawDict.GetSuspendsListItem (SuspendsList), SuspendsList); end if; return It; end First_Subprogram_Suspends_List_Item; -------------------------------------------------------------------------------- function FirstSuspendsListItem (TheTaskOrProc : Symbol) return Iterator is It : Iterator; begin case RawDict.GetSymbolDiscriminant (TheTaskOrProc) is when Subprogram_Symbol => It := First_Subprogram_Suspends_List_Item (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheTaskOrProc)); -- GAA External when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstSuspendsListItem"); It := First_Task_Type_Suspends_List_Item (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc)); -- GAA External when others => -- non-exec code It := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.FirstSuspendsListItem"); end case; return It; end FirstSuspendsListItem; -------------------------------------------------------------------------------- -- First_Embedded_Package -------------------------------------------------------------------------------- function First_Embedded_Package_In_Package (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Embedded_Packages : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Local_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Package_Symbol and then The_Declaration /= RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Item)) then Embedded_Packages := Iterator'(EmbeddedPackageIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Embedded_Packages; end First_Embedded_Package_In_Package; -------------------------------------------------------------------------------- function First_Embedded_Package (Compilation_Unit : Symbol) return Iterator is Embedded_Packages : Iterator; -------------------------------------------------------------------------------- function First_Embedded_Package_In_Subprogram (The_Subprogram : RawDict.Subprogram_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Embedded_Packages : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Subprogram_First_Declaration (The_Subprogram => The_Subprogram); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Package_Symbol and then The_Declaration /= RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Item)) then Embedded_Packages := Iterator'(EmbeddedPackageIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Embedded_Packages; end First_Embedded_Package_In_Subprogram; -------------------------------------------------------------------------------- function First_Embedded_Package_In_Task_Type (The_Task_Type : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Embedded_Packages : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Task_Type_First_Local_Declaration (The_Task_Type => The_Task_Type); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Package_Symbol and then The_Declaration /= RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Item)) then Embedded_Packages := Iterator'(EmbeddedPackageIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Embedded_Packages; end First_Embedded_Package_In_Task_Type; begin -- First_Embedded_Package case RawDict.GetSymbolDiscriminant (Compilation_Unit) is when Package_Symbol => Embedded_Packages := First_Embedded_Package_In_Package (The_Package => RawDict.Get_Package_Info_Ref (Item => Compilation_Unit)); -- GAA External when Subprogram_Symbol => Embedded_Packages := First_Embedded_Package_In_Subprogram (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Compilation_Unit)); -- GAA External when Type_Symbol => if RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Compilation_Unit)) = -- GAA External Task_Type_Item then Embedded_Packages := First_Embedded_Package_In_Task_Type (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Compilation_Unit)); -- GAA External else Embedded_Packages := NullIterator; end if; when others => -- non-exec code Embedded_Packages := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Embedded_Package"); end case; return Embedded_Packages; end First_Embedded_Package; -------------------------------------------------------------------------------- -- Get_Type_Has_Pragma -------------------------------------------------------------------------------- function Get_Type_Has_Pragma (Protected_Or_Task_Type : RawDict.Type_Info_Ref; The_Pragma : RavenscarPragmas) return Boolean --# global in Dict; is Result : Boolean; begin case RawDict.Get_Type_Discriminant (Type_Mark => Protected_Or_Task_Type) is when Protected_Type_Item => Result := RawDict.Get_Protected_Type_Has_Pragma (The_Protected_Type => Protected_Or_Task_Type, The_Pragma => The_Pragma); when Task_Type_Item => Result := RawDict.Get_Task_Type_Has_Pragma (The_Task_Type => Protected_Or_Task_Type, The_Pragma => The_Pragma); when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Get_Type_Has_Pragma"); end case; return Result; end Get_Type_Has_Pragma; -------------------------------------------------------------------------------- function GetTypeHasPragma (TheProtectedOrTaskType : Symbol; ThePragma : RavenscarPragmas) return Boolean is begin return Get_Type_Has_Pragma (Protected_Or_Task_Type => RawDict.Get_Type_Info_Ref (TheProtectedOrTaskType), -- GAA External The_Pragma => ThePragma); end GetTypeHasPragma; -------------------------------------------------------------------------------- -- Get_Type_Pragma_Value -------------------------------------------------------------------------------- function Get_Type_Pragma_Value (Protected_Or_Task_Type : RawDict.Type_Info_Ref; The_Pragma : RavenscarPragmasWithValue) return LexTokenManager.Lex_String --# global in Dict; is Result : LexTokenManager.Lex_String; begin case RawDict.Get_Type_Discriminant (Type_Mark => Protected_Or_Task_Type) is when Protected_Type_Item => Result := RawDict.Get_Protected_Type_Pragma_Value (The_Protected_Type => Protected_Or_Task_Type, The_Pragma => The_Pragma); when Task_Type_Item => Result := RawDict.Get_Task_Type_Pragma_Value (The_Task_Type => Protected_Or_Task_Type, The_Pragma => The_Pragma); when others => -- non-exec code Result := LexTokenManager.Null_String; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Get_Type_Pragma_Value"); end case; return Result; end Get_Type_Pragma_Value; -------------------------------------------------------------------------------- function GetTypePragmaValue (TheProtectedOrTaskType : Symbol; ThePragma : RavenscarPragmasWithValue) return LexTokenManager.Lex_String is begin return Get_Type_Pragma_Value (Protected_Or_Task_Type => RawDict.Get_Type_Info_Ref (TheProtectedOrTaskType), -- GAA External The_Pragma => ThePragma); end GetTypePragmaValue; -------------------------------------------------------------------------------- -- Get_Type_Priority -------------------------------------------------------------------------------- function Get_Type_Priority (Protected_Or_Task_Type : RawDict.Type_Info_Ref) return LexTokenManager.Lex_String --# global in Dict; is Result : LexTokenManager.Lex_String; begin if RawDict.Get_Type_Parent (Type_Mark => Protected_Or_Task_Type) = RawDict.Null_Type_Info_Ref then -- get priority of type if Get_Type_Has_Pragma (Protected_Or_Task_Type => Protected_Or_Task_Type, The_Pragma => Priority) then Result := Get_Type_Pragma_Value (Protected_Or_Task_Type => Protected_Or_Task_Type, The_Pragma => Priority); else -- must be Result := Get_Type_Pragma_Value (Protected_Or_Task_Type => Protected_Or_Task_Type, The_Pragma => InterruptPriority); end if; else -- get priority of subtype Result := RawDict.GetSubtypeInfoPriority (RawDict.Get_Type_Ancillary_Fields (Type_Mark => Protected_Or_Task_Type)); end if; return Result; end Get_Type_Priority; -------------------------------------------------------------------------------- function GetTypePriority (TheProtectedOrTaskType : Symbol) return LexTokenManager.Lex_String is begin return Get_Type_Priority (Protected_Or_Task_Type => RawDict.Get_Type_Info_Ref (TheProtectedOrTaskType)); -- GAA External end GetTypePriority; -------------------------------------------------------------------------------- -- Constant_Is_Deferred_Here -------------------------------------------------------------------------------- function Constant_Is_Deferred_Here (The_Constant : RawDict.Constant_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Constant_Is_Deferred (The_Constant => The_Constant) and then not IsLocal (Scope, Get_Constant_Scope (The_Constant => The_Constant)); end Constant_Is_Deferred_Here; -------------------------------------------------------------------------------- function ConstantIsDeferredHere (TheConstant : Symbol; Scope : Scopes) return Boolean is begin return Constant_Is_Deferred_Here (The_Constant => RawDict.Get_Constant_Info_Ref (TheConstant), -- GAA External Scope => Scope); end ConstantIsDeferredHere; -------------------------------------------------------------------------------- -- Is_Static -------------------------------------------------------------------------------- function Is_Static_Constant (The_Constant : RawDict.Constant_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return RawDict.Get_Constant_Static (The_Constant => The_Constant) and then not Constant_Is_Deferred_Here (The_Constant => The_Constant, Scope => Scope); end Is_Static_Constant; -------------------------------------------------------------------------------- function Is_Static_Type (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Static (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Static_Type; -------------------------------------------------------------------------------- function IsStatic (Item : Symbol; Scope : Scopes) return Boolean is Static : Boolean; begin case RawDict.GetSymbolDiscriminant (Item) is when Constant_Symbol => Static := Is_Static_Constant (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Item), -- GAA External Scope => Scope); when Type_Symbol => Static := Is_Static_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item), -- GAA External Scope => Scope); when others => Static := False; end case; return Static; end IsStatic; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsProofConstant (TheSymbol : Symbol) return Boolean is begin return RawDict.GetSymbolDiscriminant (TheSymbol) = Constant_Symbol and then Get_Constant_Context (The_Constant => RawDict.Get_Constant_Info_Ref (Item => TheSymbol)) = -- GAA External ProofContext; end IsProofConstant; -------------------------------------------------------------------------------- function IsProofFunction (TheSymbol : Symbol) return Boolean is begin return IsFunction (TheSymbol) and then (RawDict.GetSymbolDiscriminant (TheSymbol) = ImplicitProofFunctionSymbol or else Get_Subprogram_Context (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheSymbol)) = ProofContext); end IsProofFunction; -------------------------------------------------------------------------------- function GetSubprogramParameterMode (Parameter : Symbol) return Modes is begin return RawDict.Get_Subprogram_Parameter_Mode (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Parameter)); -- GAA External end GetSubprogramParameterMode; -------------------------------------------------------------------------------- function GetVariableExpNode (Variable : Symbol) return ExaminerConstants.RefType is begin return RawDict.Get_Variable_Exp_Node (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetVariableExpNode; -------------------------------------------------------------------------------- -- Get_Subprogram_Parameter_Number -------------------------------------------------------------------------------- function GetSubprogramParameterNumber (Parameter : Symbol) return Positive is function Get_Subprogram_Parameter_Number (The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref) return Positive --# global in Dict; is Current : Iterator; Number : Positive := Positive'First; begin Current := First_Ada_Subprogram_Parameter (The_Subprogram => RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Subprogram_Parameter)); while (RawDict.GetSymbolDiscriminant (CurrentSymbol (Current)) /= Subprogram_Parameter_Symbol or else RawDict.Get_Subprogram_Parameter_Info_Ref (Item => CurrentSymbol (Current)) /= The_Subprogram_Parameter) and then Number < Positive'Last loop Current := NextSymbol (Current); Number := Number + 1; end loop; return Number; end Get_Subprogram_Parameter_Number; begin -- GetSubprogramParameterNumber return Get_Subprogram_Parameter_Number (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Parameter)); -- GAA External end GetSubprogramParameterNumber; -------------------------------------------------------------------------------- -- Get_Subprogram_Parameter_Constraint -------------------------------------------------------------------------------- function GetSubprogramParameterConstraint (Parameter : Symbol; Dimension : Positive) return Symbol is function Get_Subprogram_Parameter_Constraint (Parameter : Symbol; Dimension : Positive) return RawDict.Parameter_Constraint_Info_Ref --# global in Dict; is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; The_Parameter_Constraint : RawDict.Parameter_Constraint_Info_Ref; begin if RawDict.GetSymbolDiscriminant (Parameter) = Subprogram_Parameter_Symbol then -- Precondition is met so: -- traverse linked list of constraints until right dimension found The_Subprogram_Parameter := RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Parameter); -- GAA External The_Parameter_Constraint := RawDict.Get_Subprogram_Parameter_Index_Constraints (The_Subprogram_Parameter => The_Subprogram_Parameter); while The_Parameter_Constraint /= RawDict.Null_Parameter_Constraint_Info_Ref and then RawDict.Get_Parameter_Constraint_Dimension (The_Parameter_Constraint => The_Parameter_Constraint) /= Dimension loop The_Parameter_Constraint := RawDict.Get_Next_Parameter_Constraint (The_Parameter_Constraint => The_Parameter_Constraint); end loop; else -- Precondition not met - only possible if source being processed by Examiner contains semantic error The_Parameter_Constraint := RawDict.Null_Parameter_Constraint_Info_Ref; end if; return The_Parameter_Constraint; end Get_Subprogram_Parameter_Constraint; begin -- GetSubprogramParameterConstraint return RawDict.Get_Parameter_Constraint_Symbol -- GAA External (Get_Subprogram_Parameter_Constraint (Parameter => Parameter, Dimension => Dimension)); end GetSubprogramParameterConstraint; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetNumberOfSubprogramParameters (Subprogram : Symbol) return Natural is Parameter : Iterator; Count : Natural := Natural'First; begin Parameter := FirstSubprogramParameter (Subprogram); while not IsNullIterator (Parameter) and then Count < Natural'Last loop Parameter := NextSymbol (Parameter); Count := Count + 1; end loop; return Count; end GetNumberOfSubprogramParameters; -------------------------------------------------------------------------------- function GetNumberOfGenericFormalParameters (TheGeneric : Symbol) return Natural is Parameter : Iterator; Count : Natural := Natural'First; begin Parameter := FirstGenericFormalParameter (TheGeneric); while not IsNullIterator (Parameter) and then Count < Natural'Last loop Parameter := NextSymbol (Parameter); Count := Count + 1; end loop; return Count; end GetNumberOfGenericFormalParameters; -------------------------------------------------------------------------------- function GetGenericFormalParameterKind (TheGenericFormalParameter : Symbol) return Generic_Parameter_Kind is begin return RawDict.Get_Generic_Parameter_Kind (The_Generic_Parameter => RawDict.Get_Generic_Parameter_Info_Ref (TheGenericFormalParameter)); -- GAA External end GetGenericFormalParameterKind; -------------------------------------------------------------------------------- function GetParameterAssociatedWithParameterConstraint (TheConstraint : Symbol) return Symbol is begin return RawDict.Get_Subprogram_Parameter_Symbol -- GAA External (RawDict.Get_Parameter_Constraint_Subprogram_Parameter (The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (TheConstraint))); -- GAA External end GetParameterAssociatedWithParameterConstraint; -------------------------------------------------------------------------------- function GetSubprogramParameter (Subprogram : Symbol; Number : Positive) return Symbol is Parameter : Iterator; begin Parameter := FirstSubprogramParameter (Subprogram); for No in Positive range 1 .. Number - 1 loop Parameter := NextSymbol (Parameter); end loop; return CurrentSymbol (Parameter); end GetSubprogramParameter; -------------------------------------------------------------------------------- -- Get_Array_Attribute_Type -------------------------------------------------------------------------------- function GetArrayAttributeType (Name : LexTokenManager.Lex_String; TypeMark : Symbol; Dimension : Positive) return Symbol is function Get_Array_Attribute_Type (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref; Dimension : Positive) return RawDict.Type_Info_Ref --# global in Dict; --# in LexTokenManager.State; is Result : RawDict.Type_Info_Ref; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Component_Size_Token) = LexTokenManager.Str_Eq then Result := Get_Universal_Integer_Type; else Result := Get_Array_Index (Type_Mark => Type_Mark, Dimension => Dimension); end if; return Result; end Get_Array_Attribute_Type; begin -- GetArrayAttributeType return RawDict.Get_Type_Symbol -- GAA External (Get_Array_Attribute_Type (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Dimension => Dimension)); end GetArrayAttributeType; -------------------------------------------------------------------------------- -- Get_Array_Attribute_Value -------------------------------------------------------------------------------- function GetArrayAttributeValue (Name : LexTokenManager.Lex_String; TypeMark : Symbol; Dimension : Positive) return LexTokenManager.Lex_String is function Get_Array_Attribute_Value (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref; Dimension : Positive) return LexTokenManager.Lex_String --# global in Dict; --# in LexTokenManager.State; is Index : RawDict.Type_Info_Ref; Result : LexTokenManager.Lex_String; begin Index := Get_Array_Index (Type_Mark => Type_Mark, Dimension => Dimension); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq then Result := RawDict.Get_Type_Lower (Type_Mark => Index); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then Result := RawDict.Get_Type_Upper (Type_Mark => Index); else Result := LexTokenManager.Null_String; end if; return Result; end Get_Array_Attribute_Value; begin -- GetArrayAttributeValue return Get_Array_Attribute_Value (Name => Name, Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark), -- GAA External Dimension => Dimension); end GetArrayAttributeValue; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function HasBody (CompilationUnit : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (CompilationUnit) is when Package_Symbol => Result := RawDict.Get_Package_Has_Proper_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit)); -- GAA External when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)) is -- GAA External when Protected_Type_Item => Result := RawDict.Get_Protected_Type_Has_Proper_Body (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)); -- GAA External when Task_Type_Item => Result := RawDict.Get_Task_Type_Has_Proper_Body (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.HasBody"); end case; when Subprogram_Symbol => Result := RawDict.Get_Subprogram_Has_Proper_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit)); -- GAA External when others => Result := False; end case; return Result; end HasBody; -------------------------------------------------------------------------------- -- Type_Is_Character -------------------------------------------------------------------------------- function TypeIsCharacter (TypeMark : Symbol) return Boolean is function Type_Is_Character (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return Get_Root_Type (Type_Mark => Type_Mark) = Get_Predefined_Character_Type; end Type_Is_Character; begin -- TypeIsCharacter return Type_Is_Character (Type_Mark => RawDict.Get_Type_Info_Ref (TypeMark)); -- GAA External end TypeIsCharacter; -------------------------------------------------------------------------------- -- Is_Record_Type_Mark -------------------------------------------------------------------------------- function IsRecordTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Record_Type_Mark (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Record_Type_Item and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Record_Type_Mark; begin -- IsRecordTypeMark return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Record_Type_Mark (Type_Mark => RawDict.Get_Type_Info_Ref (TheSymbol), -- GAA External Scope => Scope); end IsRecordTypeMark; -------------------------------------------------------------------------------- -- Is_Scalar_Type -------------------------------------------------------------------------------- function IsScalarType (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Scalar_Type (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then Is_Scalar_Type_Mark (Type_Mark => Type_Mark, Scope => Scope); end Is_Scalar_Type; begin -- IsScalarType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Scalar_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsScalarType; -------------------------------------------------------------------------------- -- Is_Numeric_Type -------------------------------------------------------------------------------- function IsNumericType (TheSymbol : Symbol; Scope : Scopes) return Boolean is function Is_Numeric_Type (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin return Is_Type (Type_Mark => Type_Mark) and then Type_Is_Numeric (Type_Mark => Type_Mark) and then not Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope); end Is_Numeric_Type; begin -- IsNumericType return RawDict.GetSymbolDiscriminant (TheSymbol) = Type_Symbol and then Is_Numeric_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheSymbol), -- GAA External Scope => Scope); end IsNumericType; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsRecordComponent (TheSymbol : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (TheSymbol) is when Record_Component_Symbol | Subcomponent_Symbol => Result := True; when others => Result := False; end case; return Result; end IsRecordComponent; -------------------------------------------------------------------------------- -- Is_Inherited -------------------------------------------------------------------------------- function Is_Package_Inherited (The_Inherited_Symbol : Symbol; The_Package : RawDict.Package_Info_Ref) return Boolean --# global Dict; is The_Inherit_Clause : RawDict.Context_Clause_Info_Ref; The_Inherited_Package : RawDict.Package_Info_Ref; The_Inherited_Subprogram : RawDict.Subprogram_Info_Ref; begin The_Inherit_Clause := RawDict.Get_Package_Inherit_Clauses (The_Package => The_Package); case RawDict.GetSymbolDiscriminant (The_Inherited_Symbol) is when Package_Symbol => The_Inherited_Package := RawDict.Get_Package_Info_Ref (Item => The_Inherited_Symbol); while The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref and then (RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Inherit_Clause) or else RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Inherit_Clause) /= The_Inherited_Package) loop The_Inherit_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Inherit_Clause); end loop; when Subprogram_Symbol => The_Inherited_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Inherited_Symbol); while The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref and then (not RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Inherit_Clause) or else RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Inherit_Clause) /= The_Inherited_Subprogram) loop The_Inherit_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Inherit_Clause); end loop; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Package_Inherited"); end case; return The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref; end Is_Package_Inherited; -------------------------------------------------------------------------------- function Is_Subprogram_Inherited (The_Inherited_Symbol : Symbol; The_Subprogram : RawDict.Subprogram_Info_Ref) return Boolean --# global Dict; is The_Inherit_Clause : RawDict.Context_Clause_Info_Ref; The_Inherited_Package : RawDict.Package_Info_Ref; The_Inherited_Subprogram : RawDict.Subprogram_Info_Ref; begin if Is_Main_Program (The_Subprogram => The_Subprogram) then The_Inherit_Clause := RawDict.Get_Subprogram_Inherit_Clauses (The_Subprogram => The_Subprogram); case RawDict.GetSymbolDiscriminant (The_Inherited_Symbol) is when Package_Symbol => The_Inherited_Package := RawDict.Get_Package_Info_Ref (Item => The_Inherited_Symbol); while The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref and then (RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Inherit_Clause) or else RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Inherit_Clause) /= The_Inherited_Package) loop The_Inherit_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Inherit_Clause); end loop; when Subprogram_Symbol => The_Inherited_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Inherited_Symbol); while The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref and then (not RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Inherit_Clause) or else RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Inherit_Clause) /= The_Inherited_Subprogram) loop The_Inherit_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Inherit_Clause); end loop; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Subprogram_Inherited"); end case; else The_Inherit_Clause := RawDict.Null_Context_Clause_Info_Ref; end if; return The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref; end Is_Subprogram_Inherited; -------------------------------------------------------------------------------- function IsInherited (ThePackage, CompilationUnit : Symbol) return Boolean is The_Inherited_Symbol : Symbol; Result : Boolean; begin case RawDict.GetSymbolDiscriminant (ThePackage) is when Package_Symbol | Subprogram_Symbol => The_Inherited_Symbol := ThePackage; when ImplicitProofFunctionSymbol => The_Inherited_Symbol := RawDict.Get_Subprogram_Symbol (RawDict.GetImplicitProofFunctionAdaFunction (ThePackage)); when others => -- non-exec code The_Inherited_Symbol := NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsInherited"); end case; case RawDict.GetSymbolDiscriminant (CompilationUnit) is when Package_Symbol => Result := Is_Package_Inherited (The_Inherited_Symbol => The_Inherited_Symbol, The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit)); -- GAA External when Subprogram_Symbol => Result := Is_Subprogram_Inherited (The_Inherited_Symbol => The_Inherited_Symbol, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.IsInherited"); end case; return Result; end IsInherited; -------------------------------------------------------------------------------- -- First_Visible_Subprogram -------------------------------------------------------------------------------- function First_Package_Visible_Subprogram (The_Package : RawDict.Package_Info_Ref) return Iterator --# global Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Visible_Subprograms : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Visible_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Subprogram_Symbol and then The_Declaration /= RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)) then Visible_Subprograms := Iterator'(VisibleSubprogramIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Visible_Subprograms; end First_Package_Visible_Subprogram; -------------------------------------------------------------------------------- function First_Protected_Type_Visible_Subprogram (The_Protected_Type : RawDict.Type_Info_Ref) return Iterator --# global Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Visible_Subprograms : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Protected_Type_First_Visible_Declaration (The_Protected_Type => The_Protected_Type); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Subprogram_Symbol and then The_Declaration /= RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)) then Visible_Subprograms := Iterator'(VisibleSubprogramIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Visible_Subprograms; end First_Protected_Type_Visible_Subprogram; -------------------------------------------------------------------------------- function First_Visible_Subprogram (The_Package_Or_Type : Symbol) return Iterator is Visible_Subprograms : Iterator; begin case RawDict.GetSymbolDiscriminant (The_Package_Or_Type) is when Package_Symbol => Visible_Subprograms := First_Package_Visible_Subprogram (The_Package => RawDict.Get_Package_Info_Ref (Item => The_Package_Or_Type)); -- GAA External when Type_Symbol => -- precondition means it must be a protected type SystemErrors.RT_Assert (C => Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => The_Package_Or_Type)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Get_Type_Pragma_Value"); Visible_Subprograms := First_Protected_Type_Visible_Subprogram (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => The_Package_Or_Type)); -- GAA External when others => Visible_Subprograms := NullIterator; end case; return Visible_Subprograms; end First_Visible_Subprogram; -------------------------------------------------------------------------------- -- First_Owned_Package -------------------------------------------------------------------------------- function First_Owned_Package (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is Item, Descendent : RawDict.Package_Info_Ref; Owned_Packages : Iterator; begin Item := RawDict.Get_Package_First_Private_Child (The_Package => The_Package); if Item = RawDict.Null_Package_Info_Ref then Owned_Packages := NullIterator; else loop Descendent := RawDict.Get_Package_First_Public_Child (The_Package => Item); exit when Descendent = RawDict.Null_Package_Info_Ref; Item := Descendent; end loop; Owned_Packages := Iterator' (OwnedPackageIterator, IsAbstract, RawDict.Get_Package_Symbol (Item), RawDict.Get_Package_Symbol (The_Package)); end if; return Owned_Packages; end First_Owned_Package; -------------------------------------------------------------------------------- function FirstOwnedPackage (ThePackage : Symbol) return Iterator is begin return First_Owned_Package (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); end FirstOwnedPackage; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Write (File_Name : in E_Strings.T; Status : out SPARK_IO.File_Status) is separate; -------------------------------------------------------------------------------- -- First_Private_Subprogram -------------------------------------------------------------------------------- function First_Private_Subprogram_Local (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Private_Subprograms : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Private_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Subprogram_Symbol and then The_Declaration /= RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)) then Private_Subprograms := Iterator'(PrivateSubprogramIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Private_Subprograms; end First_Private_Subprogram_Local; -------------------------------------------------------------------------------- function First_Private_Subprogram (The_Package : Symbol) return Iterator is begin return First_Private_Subprogram_Local (The_Package => RawDict.Get_Package_Info_Ref (The_Package)); -- GAA External end First_Private_Subprogram; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function LookupSelectedItem (Prefix : Symbol; Selector : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return Symbol is separate; -------------------------------------------------------------------------------- -- Search_For_Inherited_Operations -------------------------------------------------------------------------------- procedure Search_For_Inherited_Operations (Name : in LexTokenManager.Lex_String; Scope : in Scopes; Prefix : in RawDict.Package_Info_Ref; Context : in Contexts; OpSym : out Symbol; Actual_Tagged_Type : out RawDict.Type_Info_Ref) --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; --# derives Actual_Tagged_Type, --# OpSym from CommandLineData.Content, --# Context, --# Dict, --# LexTokenManager.State, --# Name, --# Prefix, --# Scope; --# post Is_Null_Symbol (OpSym) or Is_Subprogram (OpSym, Dict); is separate; -------------------------------------------------------------------------------- procedure SearchForInheritedOperations (Name : in LexTokenManager.Lex_String; Scope : in Scopes; Prefix : in Symbol; Context : in Contexts; OpSym : out Symbol; ActualTaggedType : out Symbol) is Actual_Tagged_Type : RawDict.Type_Info_Ref; begin Search_For_Inherited_Operations (Name => Name, Scope => Scope, Prefix => RawDict.Get_Package_Info_Ref (Prefix), -- GAA External Context => Context, OpSym => OpSym, Actual_Tagged_Type => Actual_Tagged_Type); ActualTaggedType := RawDict.Get_Type_Symbol (Actual_Tagged_Type); -- GAA External end SearchForInheritedOperations; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetSubprogramControllingType (Subprogram : Symbol) return Symbol is function Operation_Can_Be_Inherited (TheOpSym : Symbol) return RawDict.Type_Info_Ref --# global in Dict; is It : Iterator; The_Tagged_Type : RawDict.Type_Info_Ref; Current_Type : RawDict.Type_Info_Ref; function Is_Locally_Declared (Type_Mark : RawDict.Type_Info_Ref; TheOpSym : Symbol) return Boolean --# global in Dict; is begin return GetRegion (Get_Type_Scope (Type_Mark => Type_Mark)) = GetRegion (GetScope (TheOpSym)); end Is_Locally_Declared; begin -- Operation_Can_Be_Inherited The_Tagged_Type := RawDict.Null_Type_Info_Ref; -- default -- a subprogram is suitable for inheritance if it has a -- parameter of a tagged type declared in the same package It := FirstSubprogramParameter (TheOpSym); while not IsNullIterator (It) loop Current_Type := Get_Type (The_Symbol => CurrentSymbol (It)); if Type_Is_Tagged (Type_Mark => Current_Type) and then Is_Locally_Declared (Current_Type, TheOpSym) then The_Tagged_Type := Current_Type; exit; end if; It := NextSymbol (It); end loop; return The_Tagged_Type; end Operation_Can_Be_Inherited; begin -- GetSubprogramControllingType return RawDict.Get_Type_Symbol (Operation_Can_Be_Inherited (Subprogram)); -- GAA External end GetSubprogramControllingType; -------------------------------------------------------------------------------- function GetOverriddenSubprogram (Subprogram : Symbol) return Symbol is DeclaringRegion : Symbol; Result : Symbol := NullSymbol; begin SystemErrors.RT_Assert (C => Is_Subprogram (Subprogram), Sys_Err => SystemErrors.Precondition_Failure, Msg => "In call to GetOverriddenSubprogram"); DeclaringRegion := GetRegion (GetScope (Subprogram)); if RawDict.GetSymbolDiscriminant (DeclaringRegion) = Package_Symbol and then RawDict.Get_Package_Extends (The_Package => RawDict.Get_Package_Info_Ref (Item => DeclaringRegion)) /= RawDict.Null_Package_Info_Ref then -- It is possible that the subprogram overrides since it -- is declared in a package that extends another. -- Overriding has taken place if there is a subprogram of -- the same name in the package which has been extended; this -- test is strong enough because we know that a name cannot -- be re-used unless the overriding is succesful (overloading -- not being permitted). Result := LookupImmediateScope (GetSimpleName (Subprogram), Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (RawDict.Get_Package_Extends (The_Package => RawDict.Get_Package_Info_Ref (Item => DeclaringRegion)))), ProgramContext); -- Result will either be the overridden subprogram if present or null symbol if not end if; return Result; end GetOverriddenSubprogram; -------------------------------------------------------------------------------- -- Is_Concrete_Own_Variable -------------------------------------------------------------------------------- function IsConcreteOwnVariable (Variable : Symbol) return Boolean is function Is_Concrete_Own_Variable (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Own_Variable (The_Variable => The_Variable) and then Variable_Is_Declared (The_Variable => The_Variable); end Is_Concrete_Own_Variable; begin -- IsConcreteOwnVariable return Is_Concrete_Own_Variable (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end IsConcreteOwnVariable; -------------------------------------------------------------------------------- -- Get_Subject -------------------------------------------------------------------------------- function Get_Subject (The_Variable : RawDict.Variable_Info_Ref) return RawDict.Variable_Info_Ref --# global in Dict; is begin return RawDict.Get_Own_Variable_Variable (The_Own_Variable => RawDict.Get_Constituent_Own_Variable (The_Constituent => RawDict.Get_Variable_Constituent (The_Variable => The_Variable))); end Get_Subject; -------------------------------------------------------------------------------- function GetSubject (Constituent : Symbol) return Symbol is begin return RawDict.Get_Variable_Symbol (Get_Subject (The_Variable => RawDict.Get_Variable_Info_Ref (Constituent))); -- GAA External end GetSubject; -------------------------------------------------------------------------------- -- Is_Refinement -------------------------------------------------------------------------------- function Is_Refinement (Subject, Constituent : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Constituent (The_Variable => Constituent) and then Get_Subject (The_Variable => Constituent) = Subject; end Is_Refinement; -------------------------------------------------------------------------------- function IsRefinement (Subject, Constituent : Symbol) return Boolean is begin return Is_Refinement (Subject => RawDict.Get_Variable_Info_Ref (Subject), -- GAA External Constituent => RawDict.Get_Variable_Info_Ref (Constituent)); -- GAA External end IsRefinement; -------------------------------------------------------------------------------- -- Is_Refinement_Constituent -------------------------------------------------------------------------------- function IsRefinementConstituent (ThePackage, Variable : Symbol) return Boolean is function Is_Refinement_Constituent (ThePackage : Symbol; The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Constituent (The_Variable => The_Variable) and then Get_Owner (The_Variable => Get_Subject (The_Variable => The_Variable)) = ThePackage; end Is_Refinement_Constituent; begin -- IsRefinementConstituent return Is_Refinement_Constituent (ThePackage => ThePackage, The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end IsRefinementConstituent; -------------------------------------------------------------------------------- -- Is_Own_Variable_Or_Constituent_With_Mode -------------------------------------------------------------------------------- function IsOwnVariableOrConstituentWithMode (Variable : Symbol) return Boolean is function Is_Own_Variable_Or_Constituent_With_Mode (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Get_Own_Variable_Or_Constituent_Mode (The_Variable => The_Variable) /= DefaultMode; end Is_Own_Variable_Or_Constituent_With_Mode; begin -- IsOwnVariableOrConstituentWithMode return RawDict.GetSymbolDiscriminant (Variable) = Variable_Symbol and then Is_Own_Variable_Or_Constituent_With_Mode (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Variable)); -- GAA External end IsOwnVariableOrConstituentWithMode; -------------------------------------------------------------------------------- -- Is_Unmoded_Protected_Own_Variable -------------------------------------------------------------------------------- function IsUnmodedProtectedOwnVariable (Sym : Symbol) return Boolean is function Is_Unmoded_Protected_Own_Variable (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return Is_Own_Variable (The_Variable => The_Variable) and then Get_Own_Variable_Protected (The_Variable => The_Variable) and then Get_Own_Variable_Mode (The_Variable => The_Variable) = DefaultMode; end Is_Unmoded_Protected_Own_Variable; begin -- IsUnmodedProtectedOwnVariable return RawDict.GetSymbolDiscriminant (Sym) = Variable_Symbol and then Is_Unmoded_Protected_Own_Variable (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Sym)); -- GAA External end IsUnmodedProtectedOwnVariable; -------------------------------------------------------------------------------- -- Get_Protected_Implicit_In_Stream -------------------------------------------------------------------------------- function GetProtectedImplicitInStream (TheProtectedOwnVar : Symbol) return Symbol is function Get_Protected_Implicit_In_Stream (The_Protected_Own_Variable : RawDict.Variable_Info_Ref) return RawDict.Implicit_In_Stream_Info_Ref --# global in Dict; is begin return RawDict.Get_Own_Variable_Implicit_In_Stream (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Protected_Own_Variable)); end Get_Protected_Implicit_In_Stream; begin -- GetProtectedImplicitInStream return RawDict.Get_Implicit_In_Stream_Symbol -- GAA External (Get_Protected_Implicit_In_Stream (The_Protected_Own_Variable => RawDict.Get_Variable_Info_Ref (TheProtectedOwnVar))); -- GAA External end GetProtectedImplicitInStream; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetProtectedTypeHasEntry (TheProtectedType : Symbol) return Boolean is begin return RawDict.Get_Protected_Type_Has_Entry (The_Protected_Type => RawDict.Get_Type_Info_Ref (TheProtectedType)); -- GAA External end GetProtectedTypeHasEntry; -------------------------------------------------------------------------------- function MainProgramExists return Boolean is begin return Get_Main_Program /= RawDict.Null_Subprogram_Info_Ref; end MainProgramExists; -------------------------------------------------------------------------------- function MainProgramPrioritySupplied return Boolean is begin return Dict.Main.Priority_Given; end MainProgramPrioritySupplied; -------------------------------------------------------------------------------- -- First_Deferred_Constant -------------------------------------------------------------------------------- function First_Deferred_Constant (The_Package : Symbol) return Iterator is function First_Deferred_Constant_Local (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Deferred_Constants : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Visible_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Constant_Symbol and then Constant_Is_Deferred (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Item)) then Deferred_Constants := Iterator'(DeferredConstantIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Deferred_Constants; end First_Deferred_Constant_Local; begin -- First_Deferred_Constant return First_Deferred_Constant_Local (The_Package => RawDict.Get_Package_Info_Ref (The_Package)); -- GAA External end First_Deferred_Constant; -------------------------------------------------------------------------------- -- First_Undeclared_Type -------------------------------------------------------------------------------- function First_Undeclared_Type (The_Package : Symbol) return Iterator is function First_Undeclared_Type_Local (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Undeclared_Types : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Visible_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) = Unknown_Type_Item then Undeclared_Types := Iterator'(UndeclaredTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Undeclared_Types; end First_Undeclared_Type_Local; begin -- First_Undeclared_Type return First_Undeclared_Type_Local (The_Package => RawDict.Get_Package_Info_Ref (The_Package)); -- GAA External end First_Undeclared_Type; -------------------------------------------------------------------------------- -- First_Private_Type -------------------------------------------------------------------------------- function First_Private_Type (The_Package : Symbol) return Iterator is function First_Private_Type_Local (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Private_Types : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Visible_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then Type_Is_Private (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then RawDict.Get_Declaration_Context (The_Declaration => The_Declaration) = ProgramContext then Private_Types := Iterator'(PrivateTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Private_Types; end First_Private_Type_Local; begin -- First_Private_Type return First_Private_Type_Local (The_Package => RawDict.Get_Package_Info_Ref (The_Package)); -- GAA External end First_Private_Type; -------------------------------------------------------------------------------- -- First_Visible_Task_Type -------------------------------------------------------------------------------- function First_Visible_Task_Type_Local (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Task_Types : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Visible_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) then Task_Types := Iterator'(TaskTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Task_Types; end First_Visible_Task_Type_Local; -------------------------------------------------------------------------------- function First_Visible_Task_Type (The_Package : Symbol) return Iterator is begin return First_Visible_Task_Type_Local (The_Package => RawDict.Get_Package_Info_Ref (The_Package)); -- GAA External end First_Visible_Task_Type; -------------------------------------------------------------------------------- -- First_Private_Task_Type -------------------------------------------------------------------------------- function First_Private_Task_Type_Local (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Task_Types : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Private_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) then Task_Types := Iterator'(TaskTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Task_Types; end First_Private_Task_Type_Local; -------------------------------------------------------------------------------- function First_Private_Task_Type (The_Package : Symbol) return Iterator is begin return First_Private_Task_Type_Local (The_Package => RawDict.Get_Package_Info_Ref (The_Package)); -- GAA External end First_Private_Task_Type; -------------------------------------------------------------------------------- -- First_Own_Task -------------------------------------------------------------------------------- function FirstOwnTask (ThePackage : Symbol) return Iterator is function First_Own_Task (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is Context : Symbol; Result : Iterator; begin Context := RawDict.Get_Package_Task_List (The_Package => The_Package); if Context = NullSymbol then Result := NullIterator; else Result := Iterator' (Discriminant => OwnTaskIterator, Abstraction => IsAbstract, Current => RawDict.Get_Variable_Symbol (RawDict.GetOwnTaskVariable (Context)), Context => Context); end if; return Result; end First_Own_Task; begin -- FirstOwnTask return First_Own_Task (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end FirstOwnTask; -------------------------------------------------------------------------------- -- First_Visible_Protected_Type -------------------------------------------------------------------------------- function First_Visible_Protected_Type_Local (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Protected_Types : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Visible_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); -- filter items to leave just protected types that are actually declared (not just announced) if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then Get_Visibility (Scope => Get_Type_Scope (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item))) = Visible then Protected_Types := Iterator'(ProtectedTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Protected_Types; end First_Visible_Protected_Type_Local; -------------------------------------------------------------------------------- function First_Visible_Protected_Type (The_Package : Symbol) return Iterator is begin return First_Visible_Protected_Type_Local (The_Package => RawDict.Get_Package_Info_Ref (The_Package)); -- GAA External end First_Visible_Protected_Type; -------------------------------------------------------------------------------- -- First_Private_Protected_Type -------------------------------------------------------------------------------- function First_Private_Protected_Type_Local (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Protected_Types : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Private_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) then Protected_Types := Iterator'(ProtectedTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Protected_Types; end First_Private_Protected_Type_Local; -------------------------------------------------------------------------------- function First_Private_Protected_Type (The_Package : Symbol) return Iterator is begin return First_Private_Protected_Type_Local (The_Package => RawDict.Get_Package_Info_Ref (The_Package)); -- GAA External end First_Private_Protected_Type; -------------------------------------------------------------------------------- -- Package_Requires_Body -------------------------------------------------------------------------------- function PackageRequiresBody (ThePackage : Symbol) return Boolean is function Package_Requires_Body (The_Package : RawDict.Package_Info_Ref) return Boolean --# global in CommandLineData.Content; --# in Dict; is It : Iterator; SubProgSym : Symbol; Result : Boolean; begin -- look for a visible subprgoram that needs a body if RawDict.Get_Package_Elaborate_Body_Found (The_Package => The_Package) then Result := True; else Result := False; It := First_Package_Visible_Subprogram (The_Package => The_Package); while not IsNullIterator (It) loop SubProgSym := CurrentSymbol (It); if not (IsProofFunction (SubProgSym) or else RawDict.Get_Subprogram_Has_Proper_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => SubProgSym))) then Result := True; exit; end if; It := NextSymbol (It); end loop; end if; if not Result then -- look for a private subprogram that needs a body It := First_Private_Subprogram_Local (The_Package => The_Package); while not IsNullIterator (It) loop SubProgSym := CurrentSymbol (It); if not (IsProofFunction (SubProgSym) or else RawDict.Get_Subprogram_Has_Proper_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => SubProgSym))) then Result := True; exit; end if; It := NextSymbol (It); end loop; end if; -- if we have not already seen enough to know a body is needed and we are -- in Ravenscar profile then see if there are any task or protected types to -- complete if not Result and then CommandLineData.Ravenscar_Selected then Result := First_Visible_Task_Type_Local (The_Package => The_Package) /= NullIterator or else First_Private_Task_Type_Local (The_Package => The_Package) /= NullIterator or else First_Visible_Protected_Type_Local (The_Package => The_Package) /= NullIterator or else First_Private_Protected_Type_Local (The_Package => The_Package) /= NullIterator; end if; return Result; end Package_Requires_Body; begin -- PackageRequiresBody return Package_Requires_Body (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end PackageRequiresBody; -------------------------------------------------------------------------------- -- First_Interrupt_Stream_Mapping -------------------------------------------------------------------------------- function First_Interrupt_Stream_Mapping (The_Variable : RawDict.Variable_Info_Ref) return Iterator --# global in Dict; is The_Mappings : Symbol; It : Iterator; begin The_Mappings := RawDict.Get_Own_Variable_Interrupt_Stream_Mappings (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); if The_Mappings = NullSymbol then It := NullIterator; else It := Iterator'(InterruptStreamMappingIterator, IsAbstract, The_Mappings, The_Mappings); end if; return It; end First_Interrupt_Stream_Mapping; -------------------------------------------------------------------------------- function FirstInterruptStreamMapping (Sym : Symbol) return Iterator is begin return First_Interrupt_Stream_Mapping (The_Variable => RawDict.Get_Variable_Info_Ref (Sym)); -- GAA External end FirstInterruptStreamMapping; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetInterruptStreamMappingHandler (TheMapping : Symbol) return LexTokenManager.Lex_String is begin return RawDict.GetInterruptStreamMappingHandler (TheMapping); end GetInterruptStreamMappingHandler; -------------------------------------------------------------------------------- function GetInterruptStreamMappingStream (TheMapping : Symbol) return LexTokenManager.Lex_String is begin return RawDict.GetInterruptStreamMappingStream (TheMapping); end GetInterruptStreamMappingStream; -------------------------------------------------------------------------------- -- Get_Interrupt_Stream_Variable -------------------------------------------------------------------------------- function GetInterruptStreamVariable (ProtectedObject : Symbol; InterruptHandler : Symbol) return Symbol is function Get_Interrupt_Stream_Variable (The_Protected_Object : RawDict.Variable_Info_Ref; Interrupt_Handler : RawDict.Subprogram_Info_Ref) return Symbol --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; is It : Iterator; Current_Symbol : Symbol := NullSymbol; begin -- Check the PO/interrupt handler pair is valid if RawDict.Get_Type_Symbol (RawDict.Get_Variable_Type (The_Variable => The_Protected_Object)) = GetRegion (Get_Subprogram_Scope (The_Subprogram => Interrupt_Handler)) and then RawDict.Get_Subprogram_Is_Interrupt_Handler (The_Subprogram => Interrupt_Handler) then -- Look for the handler name in the interrupt property mappings It := First_Interrupt_Stream_Mapping (The_Variable => The_Protected_Object); while not IsNullIterator (It) loop if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.GetInterruptStreamMappingHandler (CurrentSymbol (It)), Lex_Str2 => RawDict.Get_Subprogram_Name (The_Subprogram => Interrupt_Handler)) = LexTokenManager.Str_Eq then -- Found it. Lookup the the stream variable name. Current_Symbol := LookupItem (Name => RawDict.GetInterruptStreamMappingStream (CurrentSymbol (It)), Scope => Get_Variable_Scope (The_Variable => The_Protected_Object), Context => ProofContext, Full_Package_Name => False); exit; end if; It := NextSymbol (It); end loop; if Current_Symbol = NullSymbol then -- The default return value is the protected object itself. -- This is used if no maping exists for the given interrupt handler. Current_Symbol := RawDict.Get_Variable_Symbol (The_Protected_Object); -- GAA External end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "in GetInterruptStreamVariable"); end if; return Current_Symbol; end Get_Interrupt_Stream_Variable; begin -- GetInterruptStreamVariable return Get_Interrupt_Stream_Variable (The_Protected_Object => RawDict.Get_Variable_Info_Ref (ProtectedObject), -- GAA External Interrupt_Handler => RawDict.Get_Subprogram_Info_Ref (Item => InterruptHandler)); -- GAA External end GetInterruptStreamVariable; -------------------------------------------------------------------------------- -- First_Import_Export -------------------------------------------------------------------------------- -- TBD not yet usable for task types function FirstImportExport (Abstraction : Abstractions; TheProcedure : Symbol) return Iterator is function First_Import_Export (Abstraction : Abstractions; The_Subprogram : RawDict.Subprogram_Info_Ref) return Iterator --# global in Dict; is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; ImportExports : Iterator := NullIterator; Stop : Boolean := False; begin The_Subprogram_Parameter := RawDict.Get_Subprogram_First_Parameter (The_Subprogram => The_Subprogram); while The_Subprogram_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref and then not Is_Subprogram_Import_Export_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) loop The_Subprogram_Parameter := RawDict.Get_Next_Subprogram_Parameter (The_Subprogram_Parameter => The_Subprogram_Parameter); end loop; if The_Subprogram_Parameter = RawDict.Null_Subprogram_Parameter_Info_Ref then The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction); while not Stop loop if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then ImportExports := NullIterator; exit; end if; case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Subprogram_Import_Export_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => The_Variable) then ImportExports := Iterator' (ImportExportIterator, Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); if Is_Subprogram_Import_Export_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then ImportExports := Iterator' (ImportExportIterator, Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when others => -- non-exec code ImportExports := NullIterator; Stop := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.FirstImportExport"); end case; The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; else ImportExports := Iterator' (ImportExportIterator, Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), NullSymbol); end if; return ImportExports; end First_Import_Export; begin -- FirstImportExport return First_Import_Export (Abstraction => Abstraction, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (TheProcedure)); -- GAA External end FirstImportExport; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function FirstImport (Abstraction : Abstractions; TheProcedure : Symbol) return Iterator is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref; The_Task_Type : RawDict.Type_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; Imports : Iterator; begin -- can now be called for task types as well and these have no parameters case RawDict.GetSymbolDiscriminant (TheProcedure) is when Subprogram_Symbol => The_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => TheProcedure); -- GAA External The_Subprogram_Parameter := RawDict.Get_Subprogram_First_Parameter (The_Subprogram => The_Subprogram); if The_Subprogram_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref then Imports := Iterator' (ImportIterator, Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), NullSymbol); if not Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Imports := NextSymbol (Imports); end if; else The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction); if The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref then case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); Imports := Iterator' (ImportIterator, Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); if not Is_Imported_Subprogram_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => The_Variable) then Imports := NextSymbol (Imports); end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); Imports := Iterator' (ImportIterator, Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); if not Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Imports := NextSymbol (Imports); end if; when others => -- non-exec code Imports := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.FirstImport"); end case; else Imports := NullIterator; end if; end if; when Type_Symbol => -- task type SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheProcedure)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.FirstImport"); The_Task_Type := RawDict.Get_Type_Info_Ref (Item => TheProcedure); -- GAA External The_Global_Variable := RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction); if The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref then The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); Imports := Iterator' (ImportIterator, Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); if not Is_Imported_Task_Type_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => The_Variable) then Imports := NextSymbol (Imports); end if; else Imports := NullIterator; end if; when others => -- non-exec code Imports := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.FirstImport"); end case; return Imports; end FirstImport; -------------------------------------------------------------------------------- -- Instantiate_Subprogram_Parameters -------------------------------------------------------------------------------- procedure Instantiate_Subprogram_Parameters (Actual_Subprogram : in RawDict.Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Actual_Subprogram, --# Comp_Unit, --# Declaration & --# SPARK_IO.File_Sys from *, --# Actual_Subprogram, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State; is separate; -------------------------------------------------------------------------------- procedure InstantiateSubprogramParameters (ActualSubprogramSym : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) is begin Instantiate_Subprogram_Parameters (Actual_Subprogram => RawDict.Get_Subprogram_Info_Ref (ActualSubprogramSym), -- GAA External Comp_Unit => Comp_Unit, Declaration => Declaration); end InstantiateSubprogramParameters; -------------------------------------------------------------------------------- -- First_Initialized_Own_Variable -------------------------------------------------------------------------------- function FirstInitializedOwnVariable (ThePackage : Symbol) return Iterator is function First_Initialized_Own_Variable (The_Package : RawDict.Package_Info_Ref) return Iterator --# global in Dict; is Own_Variables, InitializedOwnVariables : Iterator; begin Own_Variables := First_Own_Variable (The_Package => The_Package); loop if IsNullIterator (Own_Variables) then InitializedOwnVariables := NullIterator; exit; end if; if Own_Variable_Is_Initialized (The_Variable => RawDict.Get_Variable_Info_Ref (CurrentSymbol (Own_Variables))) then InitializedOwnVariables := Iterator'(InitializedOwnVariableIterator, IsAbstract, Own_Variables.Current, NullSymbol); exit; end if; Own_Variables := NextSymbol (Own_Variables); end loop; return InitializedOwnVariables; end First_Initialized_Own_Variable; begin -- FirstInitializedOwnVariable return First_Initialized_Own_Variable (The_Package => RawDict.Get_Package_Info_Ref (ThePackage)); -- GAA External end FirstInitializedOwnVariable; -------------------------------------------------------------------------------- -- First_Virtual_Element -------------------------------------------------------------------------------- function FirstVirtualElement (TheProtectedType : Symbol) return Iterator is function First_Virtual_Element (The_Protected_Type : RawDict.Type_Info_Ref) return Iterator --# global in Dict; is VirtualElementList : Symbol; It : Iterator; begin VirtualElementList := RawDict.Get_Type_Virtual_Element_List (Type_Mark => The_Protected_Type); if VirtualElementList = NullSymbol then It := NullIterator; else It := Iterator' (VirtualElementIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.GetVirtualElementVariable (VirtualElementList)), VirtualElementList); end if; return It; end First_Virtual_Element; begin -- FirstVirtualElement return First_Virtual_Element (The_Protected_Type => RawDict.Get_Type_Info_Ref (TheProtectedType)); -- GAA External end FirstVirtualElement; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure SetHasDelayProperty (TheProcedure : in Symbol) is begin RawDict.Set_Subprogram_Has_Delay_Property (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (TheProcedure)); -- GAA External end SetHasDelayProperty; -------------------------------------------------------------------------------- procedure SetUsesUnprotectedVariables (Sym : in Symbol) is begin case RawDict.GetSymbolDiscriminant (Sym) is when Subprogram_Symbol => RawDict.Set_Subprogram_Uses_Unprotected_Variables (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)); -- GAA External when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.SetUsesUnprotectedVariables"); RawDict.Set_Task_Type_Uses_Unprotected_Variables (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Sym)); -- GAA External when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.SetUsesUnprotectedVariables"); end case; end SetUsesUnprotectedVariables; -------------------------------------------------------------------------------- -- Set_Unprotected_Reference -------------------------------------------------------------------------------- procedure SetUnprotectedReference (Variable : in Symbol; ByThread : in Symbol) is procedure Set_Unprotected_Reference (The_Variable : in RawDict.Variable_Info_Ref; By_Thread : in Symbol) --# global in out Dict; --# derives Dict from *, --# By_Thread, --# The_Variable; is begin RawDict.Set_Own_Variable_Unprotected_Reference (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable), By_Thread => By_Thread); end Set_Unprotected_Reference; begin -- SetUnprotectedReference Set_Unprotected_Reference (The_Variable => RawDict.Get_Variable_Info_Ref (Variable), -- GAA External By_Thread => ByThread); end SetUnprotectedReference; -------------------------------------------------------------------------------- -- Set_Suspends_Reference -------------------------------------------------------------------------------- procedure SetSuspendsReference (Variable : in Symbol; ByThread : in Symbol) is procedure Set_Suspends_Reference (The_Variable : in RawDict.Variable_Info_Ref; By_Thread : in Symbol) --# global in out Dict; --# derives Dict from *, --# By_Thread, --# The_Variable; is begin RawDict.Set_Own_Variable_Suspends_Reference (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable), By_Thread => By_Thread); end Set_Suspends_Reference; begin -- SetSuspendsReference Set_Suspends_Reference (The_Variable => RawDict.Get_Variable_Info_Ref (Variable), -- GAA External By_Thread => ByThread); end SetSuspendsReference; -------------------------------------------------------------------------------- -- Set_Virtual_Element_Seen_By_Owner -------------------------------------------------------------------------------- procedure SetVirtualElementSeenByOwner (TheVariable : in Symbol) is procedure Set_Virtual_Element_Seen_By_Owner (The_Variable : in RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Variable; is begin RawDict.SetVirtualElementSeenByOwner (RawDict.Get_Variable_Virtual_Element (The_Variable => The_Variable)); end Set_Virtual_Element_Seen_By_Owner; begin -- SetVirtualElementSeenByOwner Set_Virtual_Element_Seen_By_Owner (The_Variable => RawDict.Get_Variable_Info_Ref (TheVariable)); -- GAA External end SetVirtualElementSeenByOwner; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function HasDelayProperty (TheProcedure : Symbol) return Boolean is begin return RawDict.Get_Subprogram_Has_Delay_Property (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (TheProcedure)); -- GAA External end HasDelayProperty; -------------------------------------------------------------------------------- -- Subprogram_May_Block -------------------------------------------------------------------------------- function SubprogramMayBlock (Subprogram : Symbol) return Boolean is function Subprogram_May_Block (The_Subprogram : RawDict.Subprogram_Info_Ref) return Boolean --# global in Dict; is begin return The_Subprogram = Dict.Subprograms.STC_Suspend_Until_True or else RawDict.Get_Subprogram_Is_Entry (The_Subprogram => The_Subprogram) or else RawDict.Get_Subprogram_Has_Delay_Property (The_Subprogram => The_Subprogram) or else not IsNullIterator (First_Subprogram_Suspends_List_Item (The_Subprogram => The_Subprogram)); end Subprogram_May_Block; begin -- SubprogramMayBlock return Subprogram_May_Block (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Subprogram)); -- GAA External end SubprogramMayBlock; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function BodyIsHidden (Sym : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (Sym) is when Subprogram_Symbol => Result := RawDict.Get_Subprogram_Body_Is_Hidden (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)); -- GAA External when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.BodyIsHidden"); Result := RawDict.Get_Task_Type_Body_Is_Hidden (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Sym)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.BodyIsHidden"); end case; return Result; end BodyIsHidden; -------------------------------------------------------------------------------- function GetHasDerivesAnnotation (Task_Or_Proc : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (Task_Or_Proc) is when Subprogram_Symbol => Result := RawDict.Get_Subprogram_Has_Derives_Annotation (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Task_Or_Proc)); -- GAA External when ImplicitProofFunctionSymbol => Result := RawDict.Get_Subprogram_Has_Derives_Annotation (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Task_Or_Proc)); when Type_Symbol => -- The only other possibility is a task. We could explicitly check this and raise -- a fatal error here, but the same check will be done by GetTaskRef. SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Task_Or_Proc)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.GetHasDerivesAnnotation"); Result := RawDict.Get_Task_Type_Has_Derives_Annotation (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Task_Or_Proc)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.GetHasDerivesAnnotation"); end case; return Result; end GetHasDerivesAnnotation; -------------------------------------------------------------------------------- function DelayPropertyIsAccountedFor (TheProcedure : Symbol) return Boolean is begin return RawDict.Get_Subprogram_Delay_Property_Is_Accounted_For (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (TheProcedure)); -- GAA External end DelayPropertyIsAccountedFor; -------------------------------------------------------------------------------- function Ultimate_Subject (Own_Variable : RawDict.Variable_Info_Ref) return RawDict.Variable_Info_Ref --# global in Dict; is The_Variable : RawDict.Variable_Info_Ref; begin The_Variable := Own_Variable; while Is_Constituent (The_Variable => The_Variable) loop The_Variable := Get_Subject (The_Variable => The_Variable); end loop; return The_Variable; end Ultimate_Subject; -------------------------------------------------------------------------------- -- Set_Is_Suspendable -------------------------------------------------------------------------------- procedure SetIsSuspendable (Variable : in Symbol) is procedure Set_Is_Suspendable (The_Variable : in RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Variable; is begin RawDict.Set_Own_Variable_Is_Suspendable (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Set_Is_Suspendable; begin -- SetIsSuspendable Set_Is_Suspendable (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end SetIsSuspendable; -------------------------------------------------------------------------------- -- Get_Is_Suspendable -------------------------------------------------------------------------------- function GetIsSuspendable (Variable : Symbol) return Boolean is function Get_Is_Suspendable (Own_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Own_Variable_Is_Suspendable (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => Ultimate_Subject (Own_Variable => Own_Variable))); end Get_Is_Suspendable; begin -- GetIsSuspendable return Get_Is_Suspendable (Own_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetIsSuspendable; -------------------------------------------------------------------------------- -- Set_Has_Interrupt_Property -------------------------------------------------------------------------------- procedure SetHasInterruptProperty (Variable : in Symbol) is procedure Set_Has_Interrupt_Property (The_Variable : in RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Variable; is begin RawDict.Set_Own_Variable_Has_Interrupt_Property (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Set_Has_Interrupt_Property; begin -- SetHasInterruptProperty Set_Has_Interrupt_Property (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end SetHasInterruptProperty; -------------------------------------------------------------------------------- -- Get_Has_Interrupt_Property -------------------------------------------------------------------------------- function GetHasInterruptProperty (Variable : Symbol) return Boolean is function Get_Has_Interrupt_Property (Own_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Own_Variable_Has_Interrupt_Property (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => Ultimate_Subject (Own_Variable => Own_Variable))); end Get_Has_Interrupt_Property; begin -- GetHasInterruptProperty return Get_Has_Interrupt_Property (Own_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetHasInterruptProperty; -------------------------------------------------------------------------------- -- Get_Virtual_Element_Owner -------------------------------------------------------------------------------- function Get_Virtual_Element_Owner (The_Variable : RawDict.Variable_Info_Ref) return Symbol --# global in Dict; is begin return RawDict.GetVirtualElementOwner (RawDict.Get_Variable_Virtual_Element (The_Variable => The_Variable)); end Get_Virtual_Element_Owner; -------------------------------------------------------------------------------- function GetVirtualElementOwner (Variable : Symbol) return Symbol is begin return Get_Virtual_Element_Owner (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetVirtualElementOwner; -------------------------------------------------------------------------------- -- Is_Virtual_Element -------------------------------------------------------------------------------- function IsVirtualElement (Variable : Symbol) return Boolean is function Is_Virtual_Element (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Variable_Virtual_Element (The_Variable => The_Variable) /= NullSymbol; end Is_Virtual_Element; begin -- IsVirtualElement return Is_Virtual_Element (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end IsVirtualElement; -------------------------------------------------------------------------------- -- Is_Virtual_Element_For_Type -------------------------------------------------------------------------------- function IsVirtualElementForType (TheVariable : Symbol; TheProtectedType : Symbol) return Boolean is function Is_Virtual_Element_For_Type (The_Variable : RawDict.Variable_Info_Ref; The_Protected_Type : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Variable_Virtual_Element (The_Variable => The_Variable) /= NullSymbol and then Get_Root_Type (Type_Mark => Get_Type (The_Symbol => Get_Virtual_Element_Owner (The_Variable))) = The_Protected_Type; end Is_Virtual_Element_For_Type; begin -- IsVirtualElementForType return Is_Virtual_Element_For_Type (The_Variable => RawDict.Get_Variable_Info_Ref (TheVariable), -- GAA External The_Protected_Type => RawDict.Get_Type_Info_Ref (TheProtectedType)); -- GAA External end IsVirtualElementForType; -------------------------------------------------------------------------------- -- Virtual_Element_Seen_By_Owner -------------------------------------------------------------------------------- function VirtualElementSeenByOwner (Variable : Symbol) return Boolean is function Virtual_Element_Seen_By_Owner (The_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.GetVirtualElementSeenByOwner (RawDict.Get_Variable_Virtual_Element (The_Variable => The_Variable)); end Virtual_Element_Seen_By_Owner; begin -- VirtualElementSeenByOwner return Virtual_Element_Seen_By_Owner (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end VirtualElementSeenByOwner; -------------------------------------------------------------------------------- -- Set_Priority_Property -------------------------------------------------------------------------------- procedure SetPriorityProperty (OwnVariable : in Symbol; TheValue : in LexTokenManager.Lex_String) is procedure Set_Priority_Property (Own_Variable : in RawDict.Variable_Info_Ref; The_Value : in LexTokenManager.Lex_String) --# global in out Dict; --# derives Dict from *, --# Own_Variable, --# The_Value; is begin RawDict.Set_Own_Variable_Priority_Property (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => Own_Variable), The_Value => The_Value); end Set_Priority_Property; begin -- SetPriorityProperty Set_Priority_Property (Own_Variable => RawDict.Get_Variable_Info_Ref (OwnVariable), -- GAA External The_Value => TheValue); end SetPriorityProperty; -------------------------------------------------------------------------------- -- Set_Integrity_Property -------------------------------------------------------------------------------- procedure SetIntegrityProperty (OwnVariable : in Symbol; TheValue : in LexTokenManager.Lex_String) is procedure Set_Integrity_Property (The_Variable : in RawDict.Variable_Info_Ref; The_Value : in LexTokenManager.Lex_String) --# global in out Dict; --# derives Dict from *, --# The_Value, --# The_Variable; is begin RawDict.Set_Own_Variable_Integrity_Property (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable), The_Value => The_Value); end Set_Integrity_Property; begin -- SetIntegrityProperty Set_Integrity_Property (The_Variable => RawDict.Get_Variable_Info_Ref (OwnVariable), -- GAA External The_Value => TheValue); end SetIntegrityProperty; -------------------------------------------------------------------------------- -- Get_Priority_Property -------------------------------------------------------------------------------- function GetPriorityProperty (OwnVariable : Symbol) return LexTokenManager.Lex_String is function Get_Priority_Property (Own_Variable : RawDict.Variable_Info_Ref) return LexTokenManager.Lex_String --# global in Dict; is begin return RawDict.Get_Own_Variable_Priority_Property (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => Ultimate_Subject (Own_Variable => Own_Variable))); end Get_Priority_Property; begin -- GetPriorityProperty return Get_Priority_Property (Own_Variable => RawDict.Get_Variable_Info_Ref (OwnVariable)); -- GAA External end GetPriorityProperty; -------------------------------------------------------------------------------- -- Get_Integrity_Property -------------------------------------------------------------------------------- function Get_Integrity_Property (The_Variable : RawDict.Variable_Info_Ref) return LexTokenManager.Lex_String --# global in Dict; is Result : LexTokenManager.Lex_String; begin if Is_Own_Variable (The_Variable => The_Variable) then Result := RawDict.Get_Own_Variable_Integrity_Property (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); else Result := LexTokenManager.Null_String; end if; return Result; end Get_Integrity_Property; -------------------------------------------------------------------------------- function GetIntegrityProperty (S : Symbol) return LexTokenManager.Lex_String is begin return Get_Integrity_Property (The_Variable => RawDict.Get_Variable_Info_Ref (S)); -- GAA External end GetIntegrityProperty; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function RelationViolatesInfoFlowPolicy (TheExport : Symbol; TheImport : Symbol) return Boolean is ExpILexToken, ImpILexToken : LexTokenManager.Lex_String; ExpIVal, ImpIVal : Maths.Value; ExpI, ImpI : Integer; ExpN, ImpN : Natural; Ok : Maths.ErrorCode; Result : Boolean; DefinedPolicy : CommandLineData.Defined_Info_Flow_Policies; type DefaultIntegrities is array (CommandLineData.Defined_Info_Flow_Policies) of Natural; DefaultExportIntegrities : constant DefaultIntegrities := DefaultIntegrities'(CommandLineData.Safety => Natural'Last, CommandLineData.Security => Natural'First); DefaultImportIntegrities : constant DefaultIntegrities := DefaultIntegrities'(CommandLineData.Safety => Natural'First, CommandLineData.Security => Natural'Last); begin -- Only compare integrity levels for own variables. This avoids -- generating many spurious errors for interior, hidden -- information flows involving local variables. These flows, of course, -- exhibit themselves in the checking of a subprograms imports -- and exports, so are not lost. if RawDict.GetSymbolDiscriminant (TheImport) = Variable_Symbol and then Is_Own_Variable (The_Variable => RawDict.Get_Variable_Info_Ref (Item => TheImport)) -- GAA External and then RawDict.GetSymbolDiscriminant (TheExport) = Variable_Symbol and then Is_Own_Variable (The_Variable => RawDict.Get_Variable_Info_Ref (Item => TheExport)) then -- GAA External case CommandLineData.Content.Info_Flow_Policy is when CommandLineData.None => Result := False; when CommandLineData.Safety | CommandLineData.Security => DefinedPolicy := CommandLineData.Content.Info_Flow_Policy; ExpILexToken := Get_Integrity_Property (The_Variable => RawDict.Get_Variable_Info_Ref (Item => TheExport)); -- GAA External ImpILexToken := Get_Integrity_Property (The_Variable => RawDict.Get_Variable_Info_Ref (Item => TheImport)); -- GAA External if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => ExpILexToken, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => ImpILexToken, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then -- Both unspecified local variables, so anything goes... Result := False; else Result := False; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => ExpILexToken, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then ExpN := DefaultExportIntegrities (DefinedPolicy); else ExpIVal := Maths.ValueRep (ExpILexToken); Maths.ValueToInteger (ExpIVal, ExpI, Ok); if Ok = Maths.NoError and then ExpI >= 0 then ExpN := ExpI; else ExpN := 0; Result := True; end if; end if; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => ImpILexToken, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then ImpN := DefaultImportIntegrities (DefinedPolicy); else ImpIVal := Maths.ValueRep (ImpILexToken); Maths.ValueToInteger (ImpIVal, ImpI, Ok); if Ok = Maths.NoError and then ImpI >= 0 then ImpN := ImpI; else ImpN := 0; Result := True; end if; end if; if not Result then case DefinedPolicy is when CommandLineData.Security => Result := ExpN < ImpN; when CommandLineData.Safety => Result := ExpN > ImpN; end case; end if; end if; end case; else Result := False; end if; return Result; end RelationViolatesInfoFlowPolicy; -------------------------------------------------------------------------------- -- Has_Valid_Priority_Property -------------------------------------------------------------------------------- function HasValidPriorityProperty (OwnVariable : Symbol) return Boolean is function Has_Valid_Priority_Property (Own_Variable : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Own_Variable_Has_Valid_Priority_Property (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => Ultimate_Subject (Own_Variable => Own_Variable))); end Has_Valid_Priority_Property; begin -- HasValidPriorityProperty return Has_Valid_Priority_Property (Own_Variable => RawDict.Get_Variable_Info_Ref (OwnVariable)); -- GAA External end HasValidPriorityProperty; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function IsThread (Sym : Symbol) return Boolean is begin return (RawDict.GetSymbolDiscriminant (Sym) = Type_Symbol and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym))) -- GAA External or else (RawDict.GetSymbolDiscriminant (Sym) = Subprogram_Symbol and then (Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)) -- GAA External or else RawDict.Get_Subprogram_Is_Interrupt_Handler (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)))); -- GAA External end IsThread; -------------------------------------------------------------------------------- function UsesUnprotectedVariables (Sym : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (Sym) is when Subprogram_Symbol => Result := RawDict.Get_Subprogram_Uses_Unprotected_Variables (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Sym)); -- GAA External when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.UsesUnprotectedVariables"); Result := RawDict.Get_Task_Type_Uses_Unprotected_Variables (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Sym)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.UsesUnprotectedVariables"); end case; return Result; end UsesUnprotectedVariables; -------------------------------------------------------------------------------- -- Get_Unprotected_Reference -------------------------------------------------------------------------------- function GetUnprotectedReference (Variable : Symbol) return Symbol is function Get_Unprotected_Reference (The_Variable : RawDict.Variable_Info_Ref) return Symbol --# global in Dict; is begin return RawDict.Get_Own_Variable_Unprotected_Reference (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Get_Unprotected_Reference; begin -- GetUnprotectedReference return Get_Unprotected_Reference (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetUnprotectedReference; -------------------------------------------------------------------------------- -- Get_Suspends_Reference -------------------------------------------------------------------------------- function GetSuspendsReference (Variable : Symbol) return Symbol is function Get_Suspends_Reference (The_Variable : RawDict.Variable_Info_Ref) return Symbol --# global in Dict; is begin return RawDict.Get_Own_Variable_Suspends_Reference (The_Own_Variable => RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable)); end Get_Suspends_Reference; begin -- GetSuspendsReference return Get_Suspends_Reference (The_Variable => RawDict.Get_Variable_Info_Ref (Variable)); -- GAA External end GetSuspendsReference; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function Is_Or_Is_Refinement (The_Variable1, The_Variable2 : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; -- Returns True if Sym1 and Sym2 are the same symbol or Sym2 is a refinement of Sym1 is begin return (The_Variable1 = The_Variable2) or else (Is_Own_Variable (The_Variable => The_Variable1) and then Is_Refinement (Subject => The_Variable1, Constituent => The_Variable2)); end Is_Or_Is_Refinement; -------------------------------------------------------------------------------- -- Suspends_On -------------------------------------------------------------------------------- function Suspends_On_Procedure (The_Subprogram : RawDict.Subprogram_Info_Ref; The_PO_Or_SO : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is Result : Boolean := False; It : Iterator; begin It := First_Subprogram_Suspends_List_Item (The_Subprogram => The_Subprogram); while not IsNullIterator (It) loop Result := Is_Or_Is_Refinement (The_Variable1 => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It)), The_Variable2 => The_PO_Or_SO); exit when Result; It := NextSymbol (It); end loop; return Result; end Suspends_On_Procedure; -------------------------------------------------------------------------------- function Suspends_On_Task_Type (The_Task_Type : RawDict.Type_Info_Ref; The_PO_Or_SO : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is Result : Boolean := False; It : Iterator; begin It := First_Task_Type_Suspends_List_Item (The_Task_Type => The_Task_Type); while not IsNullIterator (It) loop Result := Is_Or_Is_Refinement (The_Variable1 => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It)), The_Variable2 => The_PO_Or_SO); exit when Result; It := NextSymbol (It); end loop; return Result; end Suspends_On_Task_Type; -------------------------------------------------------------------------------- function SuspendsOn (TheTaskOrProc : Symbol; ThePOorSO : Symbol) return Boolean is Result : Boolean; begin case RawDict.GetSymbolDiscriminant (TheTaskOrProc) is when Subprogram_Symbol => Result := Suspends_On_Procedure (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheTaskOrProc), -- GAA External The_PO_Or_SO => RawDict.Get_Variable_Info_Ref (ThePOorSO)); -- GAA External when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.SuspendsOn"); Result := Suspends_On_Task_Type (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc), -- GAA External The_PO_Or_SO => RawDict.Get_Variable_Info_Ref (ThePOorSO)); -- GAA External when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.SuspendsOn"); end case; return Result; end SuspendsOn; -------------------------------------------------------------------------------- -- Suspends_List_Item_Is_Accounted_For -------------------------------------------------------------------------------- function SuspendsListItemIsAccountedFor (TheTaskOrProc : Symbol; ThePOorSO : Symbol) return Boolean is function Suspends_List_Item_Is_Accounted_For (TheTaskOrProc : Symbol; The_PO_Or_SO : RawDict.Variable_Info_Ref) return Boolean --# global in Dict; is Result : Boolean; It : Iterator; begin Result := False; case RawDict.GetSymbolDiscriminant (TheTaskOrProc) is when Subprogram_Symbol => It := First_Subprogram_Suspends_List_Item (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheTaskOrProc)); -- GAA External when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.SuspendsListItemIsAccountedFor"); It := First_Task_Type_Suspends_List_Item (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc)); -- GAA External when others => -- non-exec code It := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.SuspendsListItemIsAccountedFor"); end case; while not IsNullIterator (It) loop if Is_Or_Is_Refinement (The_Variable1 => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It)), The_Variable2 => The_PO_Or_SO) then Result := RawDict.GetSuspendsListItemIsAccountedFor (It.Context); exit; end if; It := NextSymbol (It); end loop; return Result; end Suspends_List_Item_Is_Accounted_For; begin -- SuspendsListItemIsAccountedFor return Suspends_List_Item_Is_Accounted_For (TheTaskOrProc => TheTaskOrProc, The_PO_Or_SO => RawDict.Get_Variable_Info_Ref (ThePOorSO)); -- GAA External end SuspendsListItemIsAccountedFor; -------------------------------------------------------------------------------- -- Mark_Accounts_For_Suspends_List_Item -------------------------------------------------------------------------------- procedure Mark_Procedure_Accounts_For_Suspends_List_Item (The_Subprogram : in RawDict.Subprogram_Info_Ref; The_PO_Or_SO : in RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_PO_Or_SO, --# The_Subprogram; is It : Iterator; begin It := First_Subprogram_Suspends_List_Item (The_Subprogram => The_Subprogram); while not IsNullIterator (It) loop if Is_Or_Is_Refinement (The_Variable1 => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It)), The_Variable2 => The_PO_Or_SO) then RawDict.SetSuspendsListItemIsAccountedFor (It.Context); exit; end if; It := NextSymbol (It); end loop; end Mark_Procedure_Accounts_For_Suspends_List_Item; -------------------------------------------------------------------------------- procedure Mark_Task_Type_Accounts_For_Suspends_List_Item (The_Task_Type : in RawDict.Type_Info_Ref; The_PO_Or_SO : in RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_PO_Or_SO, --# The_Task_Type; is It : Iterator; begin It := First_Task_Type_Suspends_List_Item (The_Task_Type => The_Task_Type); while not IsNullIterator (It) loop if Is_Or_Is_Refinement (The_Variable1 => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It)), The_Variable2 => The_PO_Or_SO) then RawDict.SetSuspendsListItemIsAccountedFor (It.Context); exit; end if; It := NextSymbol (It); end loop; end Mark_Task_Type_Accounts_For_Suspends_List_Item; -------------------------------------------------------------------------------- -- Mark_Accounts_For_Suspends_List_Item -------------------------------------------------------------------------------- procedure MarkAccountsForSuspendsListItem (TheTaskOrProc : in Symbol; ThePOorSO : in Symbol) is procedure Mark_Accounts_For_Suspends_List_Item (TheTaskOrProc : in Symbol; The_PO_Or_SO : in RawDict.Variable_Info_Ref) --# global in out Dict; --# derives Dict from *, --# TheTaskOrProc, --# The_PO_Or_SO; is begin case RawDict.GetSymbolDiscriminant (TheTaskOrProc) is when Subprogram_Symbol => Mark_Procedure_Accounts_For_Suspends_List_Item (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheTaskOrProc), -- GAA External The_PO_Or_SO => The_PO_Or_SO); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.MarkAccountsForSuspendsListItem"); Mark_Task_Type_Accounts_For_Suspends_List_Item (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc), -- GAA External The_PO_Or_SO => The_PO_Or_SO); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.MarkAccountsForSuspendsListItem"); end case; end Mark_Accounts_For_Suspends_List_Item; begin -- MarkAccountsForSuspendsListItem Mark_Accounts_For_Suspends_List_Item (TheTaskOrProc => TheTaskOrProc, The_PO_Or_SO => RawDict.Get_Variable_Info_Ref (ThePOorSO)); -- GAA External end MarkAccountsForSuspendsListItem; -------------------------------------------------------------------------------- -- Mark_Accounts_For_Suspends_List_Items -------------------------------------------------------------------------------- procedure MarkAccountsForSuspendsListItems (TheTaskOrProc : in Symbol; TheItemsInProcedure : in Symbol) is procedure Mark_Accounts_For_Suspends_List_Items (TheTaskOrProc : in Symbol; The_Items_In_Procedure : in RawDict.Subprogram_Info_Ref) --# global in out Dict; --# derives Dict from *, --# TheTaskOrProc, --# The_Items_In_Procedure; is It : Iterator; begin It := First_Subprogram_Suspends_List_Item (The_Subprogram => The_Items_In_Procedure); while not IsNullIterator (It) loop case RawDict.GetSymbolDiscriminant (TheTaskOrProc) is when Subprogram_Symbol => Mark_Procedure_Accounts_For_Suspends_List_Item (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => TheTaskOrProc), -- GAA External The_PO_Or_SO => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It))); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.MarkAccountsForSuspendsListItems"); Mark_Task_Type_Accounts_For_Suspends_List_Item (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheTaskOrProc), -- GAA External The_PO_Or_SO => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It))); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.MarkAccountsForSuspendsListItems"); end case; It := NextSymbol (It); end loop; end Mark_Accounts_For_Suspends_List_Items; begin -- MarkAccountsForSuspendsListItems Mark_Accounts_For_Suspends_List_Items (TheTaskOrProc => TheTaskOrProc, The_Items_In_Procedure => RawDict.Get_Subprogram_Info_Ref (Item => TheItemsInProcedure)); -- GAA External end MarkAccountsForSuspendsListItems; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure MarkAccountsForDelay (TheProcedure : in Symbol) is begin RawDict.Set_Subprogram_Mark_Accounts_For_Delay (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (TheProcedure)); -- GAA External end MarkAccountsForDelay; -------------------------------------------------------------------------------- -- Suspends_List_Is_Propagated -------------------------------------------------------------------------------- function SuspendsListIsPropagated (FromProcedure : Symbol; ToTaskOrProc : Symbol) return Boolean is function Suspends_List_Is_Propagated (From_Procedure : RawDict.Subprogram_Info_Ref; ToTaskOrProc : Symbol) return Boolean --# global in Dict; is Result : Boolean; It : Iterator; begin Result := True; It := First_Subprogram_Suspends_List_Item (The_Subprogram => From_Procedure); while not IsNullIterator (It) loop --# accept Flow, 41, "Stable expression expected here"; case RawDict.GetSymbolDiscriminant (ToTaskOrProc) is when Subprogram_Symbol => Result := Suspends_On_Procedure (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => ToTaskOrProc), -- GAA External The_PO_Or_SO => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It))); when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => ToTaskOrProc)), -- GAA External Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.SuspendsListIsPropagated"); Result := Suspends_On_Task_Type (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => ToTaskOrProc), -- GAA External The_PO_Or_SO => RawDict.Get_Variable_Info_Ref (CurrentSymbol (It))); when others => -- non-exec code Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.SuspendsListIsPropagated"); end case; --# end accept; exit when not Result; It := NextSymbol (It); end loop; return Result; end Suspends_List_Is_Propagated; begin -- SuspendsListIsPropagated return Suspends_List_Is_Propagated (From_Procedure => RawDict.Get_Subprogram_Info_Ref (Item => FromProcedure), -- GAA External ToTaskOrProc => ToTaskOrProc); end SuspendsListIsPropagated; -------------------------------------------------------------------------------- -- Get_Inherit_Depth -------------------------------------------------------------------------------- function GetInheritDepth (FieldName : LexTokenManager.Lex_String; RecordType : Symbol) return Natural is function Get_Inherit_Depth (Field_Name : LexTokenManager.Lex_String; Record_Type : RawDict.Type_Info_Ref) return Natural --# global in Dict; --# in LexTokenManager.State; is The_Record_Component : RawDict.Record_Component_Info_Ref; The_First_Record_Component : RawDict.Record_Component_Info_Ref; Current_Record : RawDict.Type_Info_Ref; Result : Natural := Natural'First; begin Current_Record := Record_Type; loop The_First_Record_Component := RawDict.Get_Type_First_Record_Component (Type_Mark => Current_Record); The_Record_Component := The_First_Record_Component; while The_Record_Component /= RawDict.Null_Record_Component_Info_Ref -- did not find and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Record_Component_Name (The_Record_Component => The_Record_Component), Lex_Str2 => Field_Name) /= LexTokenManager.Str_Eq loop -- found The_Record_Component := RawDict.Get_Next_Record_Component (The_Record_Component => The_Record_Component); end loop; exit when The_Record_Component /= RawDict.Null_Record_Component_Info_Ref -- carry success out of outer loop -- if we get here we failed to find field in local declarations -- so we search in inherited fields if there are any -- exit when no inherited fields found or else not RawDict.Get_Record_Component_Inherited_Field (The_Record_Component => The_First_Record_Component) or else Result = Natural'Last; -- restart search in inherited fields Current_Record := RawDict.Get_Record_Component_Type (The_Record_Component => The_First_Record_Component); Result := Result + 1; end loop; return Result; end Get_Inherit_Depth; begin -- GetInheritDepth return Get_Inherit_Depth (Field_Name => FieldName, Record_Type => RawDict.Get_Type_Info_Ref (RecordType)); -- GAA External end GetInheritDepth; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- procedure Add_Generic_Unit (Kind : in Generic_Kind; Scope : in Scopes; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Generic_Unit : out Symbol) is The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; begin RawDict.Create_Generic_Unit (Kind => Kind, Scope => Scope, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Generic_Unit => The_Generic_Unit); Generic_Unit := RawDict.Get_Generic_Unit_Symbol (The_Generic_Unit); -- GAA External end Add_Generic_Unit; -------------------------------------------------------------------------------- procedure ReportUsage is begin -- dictionary symbols are never recycled, so the 'Current' symbol -- gives the maximum usage Statistics.SetTableUsage (Statistics.SymbolTable, Dynamic_Symbol_Table.Get_Current_Usage (Dict.Symbols)); end ReportUsage; -------------------------------------------------------------------------------- procedure Initialize (Write_To_File : in Boolean) is separate; -------------------------------------------------------------------------------- package body TargetData is separate; -------------------------------------------------------------------------------- procedure Read_Target_Data_File is begin TargetData.Read_Target_Data_File; end Read_Target_Data_File; procedure Output_Target_Data_File (To_File : in SPARK_IO.File_Type) is begin TargetData.Output_Target_Data_File (To_File => To_File); end Output_Target_Data_File; end Dictionary; spark-2012.0.deb/examiner/sem-get_type_bounds.adb0000644000175000017500000000514411753202336020701 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Get_Type_Bounds (Type_Symbol : in Dictionary.Symbol; Lower_Bound, Upper_Bound : out Typ_Type_Bound) is Maths_Error : Maths.ErrorCode; Bound_Val : Integer; begin if Dictionary.TypeIsBoolean (Type_Symbol) then Lower_Bound := Typ_Type_Bound'(Is_Defined => True, Value => 0); Upper_Bound := Typ_Type_Bound'(Is_Defined => True, Value => 1); else Maths.ValueToInteger (Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- don't want base type LexTokenManager.First_Token, Type_Symbol)), Bound_Val, -- to get Maths_Error); -- to get Lower_Bound := Typ_Type_Bound'(Is_Defined => (Maths_Error = Maths.NoError), Value => Bound_Val); Maths.ValueToInteger (Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- don't want base type LexTokenManager.Last_Token, Type_Symbol)), Bound_Val, -- to get Maths_Error); -- to get Upper_Bound := Typ_Type_Bound'(Is_Defined => (Maths_Error = Maths.NoError), Value => Bound_Val); SystemErrors.RT_Assert (C => not (Lower_Bound.Is_Defined and then Upper_Bound.Is_Defined) or else (Lower_Bound.Value <= Upper_Bound.Value), Sys_Err => SystemErrors.Math_Error, Msg => "Lower bound > Upper bound in Get_Type_Bounds"); end if; end Get_Type_Bounds; spark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration.adb0000644000175000017500000011476711753202336027001 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Basic_Declarative_Item) procedure Wf_Basic_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Node_To_Check : STree.SyntaxNode; procedure Wf_Constant_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.constant_declaration; --# post STree.Table = STree.Table~; is Ident_Node, Type_Node, Exp_Node : STree.SyntaxNode; Type_Sym : Dictionary.Symbol; Exp_Type : Exp_Record; Unwanted_Seq : SeqAlgebra.Seq; Store_Val : LexTokenManager.Lex_String; Unused_Component_Data : ComponentManager.ComponentData; ------------------------------------------------------------------------- procedure Create_Implicit_String_Subtype (String_Length : in Maths.Value; Location : in Dictionary.Location; The_String_Subtype : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Location, --# String_Length & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# String_Length & --# The_String_Subtype from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Location, --# String_Length; is String_Subtype_Str : E_Strings.T; String_Subtype_Lex_Str : LexTokenManager.Lex_String; Index_Constraint, The_String_Subtype_Local, The_Array_Index : Dictionary.Symbol; begin -- First create an index subtype of the form positive__n Create_Implicit_Positive_Subtype (String_Length => String_Length, Location => Location, Index_Constraint => Index_Constraint); -- Create an ExaminerString of the form "String__n" where n is the string length String_Subtype_Str := E_Strings.Copy_String (Str => "String__"); -- The value of "n" will not exceed a size that can be printed within an ExaminerString -- so the conversion will not truncate here. E_Strings.Append_Examiner_String (E_Str1 => String_Subtype_Str, E_Str2 => Maths.ValueToString (String_Length)); -- Insert this name into the string table; either we add it an get the LexStr back or, -- if it is already there, we get the existing LexStr back -- Insert this name into the string table; either we add it and get the LexStr back or, -- if it is already there, we get the existing LexStr back LexTokenManager.Insert_Examiner_String (Str => String_Subtype_Str, Lex_Str => String_Subtype_Lex_Str); -- Look up type in Dictionary in case it has previously been added -- Note that we put these implicit subtype in Standard (where Positive and String themselves live) -- and that we declare them in proof rather than Ada context The_String_Subtype_Local := Dictionary.LookupItem (Name => String_Subtype_Lex_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetPredefinedPackageStandard), Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (The_String_Subtype_Local) then -- not previously added, so we add a new subtype here Dictionary.Add_Array_Subtype (Name => String_Subtype_Lex_Str, Parent => Dictionary.GetPredefinedStringType, Parent_Reference => Location, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Location, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetPredefinedPackageStandard), Context => Dictionary.ProofContext, Static => CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83, The_Subtype => The_String_Subtype_Local); --# accept Flow, 10, The_Array_Index, "Expected ineffective assignment to OK"; Dictionary.AddArrayIndex (TheArrayType => The_String_Subtype_Local, IndexType => Index_Constraint, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Location, TheArrayIndex => The_Array_Index); --# end accept; end if; The_String_Subtype := The_String_Subtype_Local; --# accept Flow, 33, The_Array_Index, "Expected to be neither referenced nor exported"; end Create_Implicit_String_Subtype; ----------------------------------------------------------------------------------- function Valid_Named_Number_Type (Sym : Dictionary.Symbol) return Dictionary.Symbol --# global in CommandLineData.Content; --# in Dictionary.Dict; -- This returns universal_real or universal_integer if a valid (according to tool -- variant) type is supplied or the unknown type mark if it not is Result : Dictionary.Symbol; begin case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => if Dictionary.IsUniversalIntegerType (Sym) then Result := Sym; elsif Dictionary.IsUniversalRealType (Sym) then Result := Sym; else Result := Dictionary.GetUnknownTypeMark; end if; when CommandLineData.SPARK95_Onwards => if Dictionary.TypeIsInteger (Sym) or Dictionary.TypeIsModular (Sym) then Result := Dictionary.GetUniversalIntegerType; elsif Dictionary.TypeIsReal (Sym) then Result := Dictionary.GetUniversalRealType; else Result := Dictionary.GetUnknownTypeMark; end if; end case; return Result; end Valid_Named_Number_Type; ------------------------------------------------------------------------- procedure Do_Identifier_List (Node, Exp_Node : in STree.SyntaxNode; Type_Node_Pos : in LexTokenManager.Token_Position; Type_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Exp_Is_Wellformed : in Boolean; Static : in Boolean; Store_Val : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# Exp_Is_Wellformed, --# Exp_Node, --# LexTokenManager.State, --# Node, --# Static, --# Store_Val, --# STree.Table, --# Type_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Is_Wellformed, --# Exp_Node, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# Static, --# Store_Val, --# STree.Table, --# Type_Node_Pos, --# Type_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier_list and --# Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression; --# post STree.Table = STree.Table~; is Next_Node : STree.SyntaxNode; It : STree.Iterator; Ident_Str, Store_Val_Local : LexTokenManager.Lex_String; Sym, The_Constant : Dictionary.Symbol; OK_To_Add, Static_Local : Boolean; Type_Location, Ident_Location : Dictionary.Location; ------------------------------------------------------------------------- function Is_Deferred_Constant_Resolution (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return not Dictionary.Is_Declared (Item => Sym) and then Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Privat and then Dictionary.IsDeferredConstant (Sym) and then Dictionary.GetRegion (Scope) = Dictionary.GetRegion (Dictionary.GetScope (Sym)); end Is_Deferred_Constant_Resolution; begin -- Do_Identifier_List OK_To_Add := False; Type_Location := Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos); It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); Ident_Str := Node_Lex_String (Node => Next_Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) then OK_To_Add := True; elsif Is_Deferred_Constant_Resolution (Sym => Sym, Scope => Current_Scope) then if Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => Dictionary.GetType (Sym), Full_Range_Subtype => False) then STree.Set_Node_Lex_String (Sym => Sym, Node => Next_Node); OK_To_Add := True; else ErrorHandler.Semantic_Error (Err_Num => 22, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; else -- already exists but is not a deferred constant completion if Dictionary.IsOwnVariable (Sym) or Dictionary.IsConstituent (Sym) then -- A common mistake - trying to complete an own variable with -- a constant declaration. Spot this to give a better error -- message here. ErrorHandler.Semantic_Error (Err_Num => 12, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); else ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; end if; if OK_To_Add then Ident_Location := Dictionary.Location' (Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)); if Dictionary.IsPrivateTypeMark (Type_Sym, Current_Scope) then Store_Val_Local := Store_Val; Static_Local := False; elsif Dictionary.IsRecordTypeMark (Type_Sym, Current_Scope) or else Dictionary.IsProtectedTypeMark (Type_Sym) then Store_Val_Local := LexTokenManager.Null_String; Static_Local := False; else Store_Val_Local := Store_Val; Static_Local := Static; end if; Dictionary.Add_Constant_Declaration (Name => Ident_Str, Type_Mark => Type_Sym, Type_Reference => Type_Location, Value => Store_Val_Local, Exp_Is_Wellformed => Exp_Is_Wellformed, Exp_Node => STree.NodeToRef (Exp_Node), Static => Static_Local, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Ident_Location, Scope => Current_Scope, Context => Dictionary.ProgramContext, TheConstant => The_Constant); STree.Add_Node_Symbol (Node => Next_Node, Sym => The_Constant); end if; It := STree.NextNode (It); end loop; end Do_Identifier_List; begin -- Wf_Constant_Declaration Heap.Reset (The_Heap); Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier_list in Wf_Constant_Declaration"); Type_Node := Next_Sibling (Current_Node => Ident_Node); -- ASSUME Type_Node = type_mark OR expression if Syntax_Node_Type (Node => Type_Node) = SP_Symbols.expression then -- ASSUME Type_Node = expression Exp_Node := Type_Node; Type_Node := STree.NullNode; Type_Sym := Dictionary.GetUnknownTypeMark; elsif Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark then -- ASSUME Type_Node = type_mark Exp_Node := Next_Sibling (Current_Node => Type_Node); Wf_Type_Mark (Node => Type_Node, Current_Scope => Current_Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); if Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Sym, Current_Scope) and then not Dictionary.IsPredefinedStringType (Type_Sym) then -- allow string constants ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) or else Dictionary.IsProtectedTypeMark (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 903, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.TypeIsGeneric (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 653, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; else Exp_Node := STree.NullNode; Type_Sym := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark OR expression in Wf_Constant_Declaration"); end if; -- ASSUME Type_Node = type_mark OR NULL SystemErrors.RT_Assert (C => Type_Node = STree.NullNode or else Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark OR NULL in Wf_Constant_Declaration"); -- ASSUME Exp_Node = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = expression in Wf_Constant_Declaration"); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Current_Scope, Type_Context => Type_Sym, Context_Requires_Static => False, Ref_Var => Unwanted_Seq, Result => Exp_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Exp_Type.Value, Store_Val); -- scalar value if needed later if Type_Node = STree.NullNode then -- must be a named number if Exp_Type.Is_ARange then ErrorHandler.Semantic_Error (Err_Num => 114, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); else if not Exp_Type.Is_Constant then ErrorHandler.Semantic_Error (Err_Num => 37, Reference => 13, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; Type_Sym := Valid_Named_Number_Type (Sym => Exp_Type.Type_Symbol); if Dictionary.IsUnknownTypeMark (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 38, Reference => 10, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; end if; else -- end of named number checks -- If it's a constant of a constrained String subtype like -- C : constant String_2 := "xx"; -- or a String constant constrained by initialization, like -- C : constant String := "Wibble"; -- then we need to grab the value of the initializing expression returned -- from WalkExpression and make sure it gets stored in the Dictionary. if Dictionary.IsPredefinedStringType (Dictionary.GetRootType (Type_Sym)) then -- If it's constrained by initialization, then we also need to create an implicit -- subtype for it, thus: if Dictionary.IsPredefinedStringType (Type_Sym) and then not Maths.HasNoValue (Exp_Type.Range_RHS) then -- but only create subtype if range known -- We have a constant of type string, implicitly constrained by its initializing -- string literal. In this case we create a string subtype of the right length -- and substitute this subtype for string before adding the constant. Create_Implicit_String_Subtype (String_Length => Exp_Type.Range_RHS, Location => Dictionary.Location'(Start_Position => Node_Position (Node => Type_Node), End_Position => Node_Position (Node => Type_Node)), The_String_Subtype => Type_Sym); end if; -- Grab the value of the initializing expression return from WalkExpression and -- record in Store_Val. This is used to populate the Dictionary later on in -- Do_Identifier_List Store_Val := Exp_Type.String_Value; end if; SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Exp_Type.Type_Symbol) or else Dictionary.IsTypeMark (Exp_Type.Type_Symbol), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Type.Type_Symbol to be a type in Wf_Constant_Declaration"); Assignment_Check (Position => Node_Position (Node => Exp_Node), Scope => Current_Scope, Target_Type => Type_Sym, Exp_Result => Exp_Type); if not Exp_Type.Is_Constant then ErrorHandler.Semantic_Error (Err_Num => 37, Reference => 13, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; end if; Do_Identifier_List (Node => Ident_Node, Exp_Node => Exp_Node, Type_Node_Pos => Node_Position (Node => Type_Node), Type_Sym => Type_Sym, Current_Scope => Current_Scope, Exp_Is_Wellformed => not Exp_Type.Errors_In_Expression, Static => Dictionary.IsStatic (Type_Sym, Current_Scope) and then Exp_Type.Is_Static, Store_Val => Store_Val); Heap.ReportUsage (The_Heap); end Wf_Constant_Declaration; ------------------------------------------------------------------- -- Note: Enclosing_Unit_Scope represents the scope of the enclosing program -- unit, whilst Declaration_Scope represents the scope of the current -- declaration. These are normally the same. The exception is the special -- case of a declaration within a protected type, where they may differ. -- For example, the declaration could be in the private part of the PT but -- the PT itself could be in the visible part of the enclosing unit. procedure Wf_Variable_Declaration (Node : in STree.SyntaxNode; Enclosing_Unit_Scope : in Dictionary.Scopes; Declaration_Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Declaration_Scope, --# Dictionary.Dict, --# Enclosing_Unit_Scope, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Declaration_Scope, --# Dictionary.Dict, --# Enclosing_Unit_Scope, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.variable_declaration; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------- procedure Wf_Full_Type_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.full_type_declaration; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Wf_Subtype_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subtype_declaration; --# post STree.Table = STree.Table~; is separate; begin -- Wf_Basic_Declaration Node_To_Check := Child_Node (Current_Node => Node); -- ASSUME Node_To_Check = object_declaration OR full_type_declaration OR subtype_declaration if Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.object_declaration then -- ASSUME Node_To_Check = object_declaration Node_To_Check := Child_Node (Current_Node => Node_To_Check); -- ASSUME Node_To_Check = constant_declaration OR variable_declaration if Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.variable_declaration then -- ASSUME Node_To_Check = variable_declaration -- In the case of a basic variable declaration the scope of the -- enclosing program unit and the scope of the declaration are -- the same, hence Current_Scope is used for both parameters. Wf_Variable_Declaration (Node => Node_To_Check, Enclosing_Unit_Scope => Current_Scope, Declaration_Scope => Current_Scope, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.constant_declaration then -- ASSUME Node_To_Check = constant_declaration Wf_Constant_Declaration (Node => Node_To_Check, Current_Scope => Current_Scope, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node_To_Check = constant_declaration OR variable_declaration in Wf_Basic_Declaration"); end if; elsif Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.full_type_declaration then -- ASSUME Node_To_Check = full_type_declaration Wf_Full_Type_Declaration (Node => Node_To_Check, Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.subtype_declaration then -- ASSUME Node_To_Check = subtype_declaration Wf_Subtype_Declaration (Node => Node_To_Check, Scope => Current_Scope, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node_To_Check = object_declaration OR full_type_declaration OR " & "subtype_declaration in Wf_Basic_Declaration"); end if; end Wf_Basic_Declaration; spark-2012.0.deb/examiner/cstacks.ads0000644000175000017500000000737411753202336016410 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cell_Storage; with Cells; use type Cells.Cell_Kind; --# inherit Cells, --# Cell_Storage, --# SPARK_IO, --# Statistics; package CStacks is --------------------------------------------------------------- -- A Stack of Cells.Cell values, built within a Cell Heap. --------------------------------------------------------------- type Stack is private; function IsEmpty (S : Stack) return Boolean; function Top (Heap : Cells.Heap_Record; S : Stack) return Cells.Cell; --# pre not IsEmpty(S); procedure CreateStack (S : out Stack); --# derives S from ; --# post IsEmpty (S); procedure Pop (Heap : in out Cells.Heap_Record; S : in out Stack); --# derives Heap, --# S from Heap, --# S; --# pre not IsEmpty(S); -- New procedure (shorthand for Top followed by Pop) procedure PopOff (Heap : in out Cells.Heap_Record; S : in out Stack; C : out Cells.Cell); --# derives C, --# Heap, --# S from Heap, --# S; --# pre not IsEmpty(S); procedure Push (Heap : in out Cells.Heap_Record; CellName : in Cells.Cell; S : in out Stack); --# global in out Statistics.TableUsage; --# derives Heap from *, --# CellName, --# S & --# S from Heap & --# Statistics.TableUsage from *, --# Heap; --# post not IsEmpty (S); -- Leave Stack unchanged but return a "new stack" that starts one item lower function NonDestructivePop (Heap : Cells.Heap_Record; S : Stack) return Stack; -- New function which peeks down stack to find kind the first -- IncompleteAggregate cell. Used in Build[Annotation]ExpnDAG when processing aggregates. function FindAggregateCell (Heap : Cells.Heap_Record; S : Stack) return Cells.Cell; --# return M => Cells.Get_Kind (Heap, M) = Cell_Storage.Incomplete_Aggregate; -- AND M is the FIRST such element in S. -- New function which peeks down stack to find kind the first -- AggregateCounter cell. Used in Build[Annotation]ExpnDAG when processing aggregates. function FindAggregateCounter (Heap : Cells.Heap_Record; S : Stack) return Cells.Cell; --# return M => Cells.Get_Kind (Heap, M) = Cell_Storage.Aggregate_Counter; -- AND M is the FIRST such element in S. private -- A Stack is the Cell Number of the top of the stack type Stack is range 0 .. Cell_Storage.Cell'Last; --# assert Stack'Base is Integer; end CStacks; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_primary.adb0000644000175000017500000006342511753202336023426 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Primary (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ref_Var : in SeqAlgebra.Seq; E_Stack : in out Exp_Stack.Exp_Stack_Type; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Type_Info : Sem.Exp_Record; Sym : Dictionary.Symbol; Child_Primary : STree.SyntaxNode; ----------------------------------------------------------------- procedure Check_Readability (Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys, --# Sym; is Enclosing_Sym : Dictionary.Symbol; begin Enclosing_Sym := Dictionary.GetMostEnclosingObject (Sym); if CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 and then Dictionary.IsSubprogramParameter (Enclosing_Sym) and then Dictionary.GetSubprogramParameterMode (Enclosing_Sym) = Dictionary.OutMode then ErrorHandler.Semantic_Error (Err_Num => 171, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Enclosing_Sym)); end if; end Check_Readability; ----------------------------------------------------------------- procedure Check_Invalid_Stream_Use (Primary_Node : in STree.SyntaxNode; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; E_Stack : in Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Primary_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Sym & --# STree.Table from *, --# Dictionary.Dict, --# E_Stack, --# Primary_Node, --# Scope, --# Sym; --# pre (STree.Syntax_Node_Type (Primary_Node, STree.Table) = SP_Symbols.primary) and --# (Dictionary.Is_Null_Symbol (Sym) or --# Dictionary.IsFunction (Sym, Dictionary.Dict) or --# Dictionary.IsOwnVariableOrConstituentWithMode (Sym, Dictionary.Dict)); --# post STree.Table = STree.Table~; is Current_Node : STree.SyntaxNode; Assignment_Or_Return, Parameter_To_Unchecked_Conversion, Constant_Declaration, Has_Branches : Boolean; Error_Number : Natural := 0; begin if not Dictionary.Is_Null_Symbol (Sym) then -- a stream has been referenced -- now start search for illegal uses -- track up syntax tree until expression or simple_expression -- below arange is found Current_Node := Primary_Node; -- ASSUME Current_Node = primary loop --# assert STree.Table = STree.Table~; exit when (STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.expression) or else (STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.simple_expression and then STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Current_Node)) = SP_Symbols.arange); Current_Node := STree.Parent_Node (Current_Node => Current_Node); end loop; -- ASSUME Current_Node = expression OR simple_expression -- to be valid the expression node just found must be below an assignment or return Assignment_Or_Return := STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.expression and then (STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Current_Node)) = SP_Symbols.return_statement or else STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Current_Node)) = SP_Symbols.assignment_statement); -- or, the single actual parameter to an instance of unchecked conversion. -- If the expression is below a name_argument_list then we see if there is a record representing the -- not-yet-complete invocation of an unchecked_conversion, thus: Parameter_To_Unchecked_Conversion := STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.expression and then STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Current_Node))) = SP_Symbols.name_argument_list and then not Exp_Stack.Is_Empty (Stack => E_Stack) and then Dictionary.IsAnUncheckedConversion (Exp_Stack.Top (Stack => E_Stack).Other_Symbol); Constant_Declaration := STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.expression and then STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Current_Node)) = SP_Symbols.constant_declaration; -- track back down tree to primary node checking for right branches which show -- some kind of expression which is not allowed in this context Has_Branches := False; loop --# assert STree.Table = STree.Table~; if STree.Next_Sibling (Current_Node => Current_Node) /= STree.NullNode then Has_Branches := True; exit; end if; exit when Current_Node = Primary_Node; Current_Node := STree.Child_Node (Current_Node => Current_Node); end loop; if Dictionary.IsPredefinedRealTimeClockOperation (Sym) then -- Special case for Ada.Real_Time.Clock. -- This function MUST be used -- 1. directly if Has_Branches -- 2. in a library level constant declaration. or else (Dictionary.IsLibraryLevel (Scope) and then not Constant_Declaration) -- 3. in an assignment or return statement.. or else (not Dictionary.IsLibraryLevel (Scope) and then not Assignment_Or_Return) then Error_Number := 960; end if; elsif Has_Branches or else not (Assignment_Or_Return or else Parameter_To_Unchecked_Conversion) then -- illegal use of stream variable or function if Dictionary.IsFunction (Sym) then Error_Number := 715; else -- variable Error_Number := 716; end if; end if; if Error_Number = 0 then -- put symbol of stream into primary node of syntax tree for -- use by vcg-producevcs-buildgraph-modelassignmentstatement STree.Add_Node_Symbol (Node => Primary_Node, Sym => Sym); else ErrorHandler.Semantic_Error_Sym (Err_Num => Error_Number, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Primary_Node), Sym => Sym, Scope => Scope); end if; end if; end Check_Invalid_Stream_Use; ----------------------------------------------------------------- procedure Check_Invalid_Protected_Function_Use (Primary_Node : in STree.SyntaxNode; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Primary_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Sym; --# pre STree.Syntax_Node_Type (Primary_Node, STree.Table) = SP_Symbols.primary; is Current_Node : STree.SyntaxNode; Assignment_Or_Return, Has_Branches : Boolean; begin if not Dictionary.Is_Null_Symbol (Sym) then -- a protected state has been referenced -- track up syntax tree until expression found Current_Node := Primary_Node; -- ASSUME Current_Node = primary while STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.expression loop --# assert STree.Syntax_Node_Type (Current_Node, STree.Table) /= SP_Symbols.expression; Current_Node := STree.Parent_Node (Current_Node => Current_Node); end loop; -- ASSUME Current_Node = expression -- to be valid the expression node just found must be below an assignment or return Assignment_Or_Return := STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Current_Node)) = SP_Symbols.return_statement or else STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Current_Node)) = SP_Symbols.assignment_statement; -- track back down tree to primary node checking for right branches which show -- some kind of expression which is not allowed in this context Has_Branches := False; loop if STree.Next_Sibling (Current_Node => Current_Node) /= STree.NullNode then Has_Branches := True; exit; end if; exit when Current_Node = Primary_Node; Current_Node := STree.Child_Node (Current_Node => Current_Node); end loop; if Has_Branches or else not Assignment_Or_Return then ErrorHandler.Semantic_Error_Sym (Err_Num => 725, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Primary_Node), Sym => Sym, Scope => Scope); end if; end if; end Check_Invalid_Protected_Function_Use; ----------------------------------------------------------------- function Root_Wise_Conversion_Required (Node : STree.SyntaxNode; Actual_Sym : Dictionary.Symbol; E_Stack : Exp_Stack.Exp_Stack_Type) return Boolean --# global in Dictionary.Dict; --# in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.primary; is Result : Boolean := False; Possible_Function_Sym : Dictionary.Symbol; Controlling_Type : Dictionary.Symbol; ----------------------------------------------------------------- function Is_Direct_Function_Parameter (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.primary; is Local_Node : STree.SyntaxNode; Result : Boolean := False; --------------- function Valid_Positional_Argument (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression; is begin return STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.positional_argument_association; end Valid_Positional_Argument; --------------- function Valid_Named_Argument (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression; is begin return STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.named_argument_association; end Valid_Named_Argument; begin -- Is_Direct_Function_Parameter Local_Node := Node; loop -- Success case is when we find an expression which has an argument -- association above it. In any case we don't continue looking once -- an expression has been found if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.expression then -- ASSUME Local_Node = expression Result := Valid_Positional_Argument (Node => Local_Node) or else Valid_Named_Argument (Node => Local_Node); exit; end if; -- failure condition: right branches exist showing that primary -- is part of an expresssion exit when STree.Next_Sibling (Current_Node => Local_Node) /= STree.NullNode; -- failure condition: we are part of a right branch showing that primary -- is part of an expresssion exit when STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => STree.Parent_Node (Current_Node => Local_Node))) /= STree.NullNode; Local_Node := STree.Parent_Node (Current_Node => Local_Node); end loop; return Result; end Is_Direct_Function_Parameter; begin -- Root_Wise_Conversion_Required -- conversion is required iff -- (1) Actual_Sym is an object or subcomponent of an object -- (2) If it is a direct function parameter (i.e. not an expression) -- (3) This primary is being processed in the context of function parameter list -- (4) The function has a controlling type -- (5) The type of Actual_Sym is an extension of the controlling type if Dictionary.IsVariableOrSubcomponent (Actual_Sym) and then Is_Direct_Function_Parameter (Node => Node) and then not Exp_Stack.Is_Empty (Stack => E_Stack) then -- there may function info available Possible_Function_Sym := Exp_Stack.Top (Stack => E_Stack).Other_Symbol; if Dictionary.IsFunction (Possible_Function_Sym) then Controlling_Type := Dictionary.GetSubprogramControllingType (Possible_Function_Sym); Result := not Dictionary.Is_Null_Symbol (Controlling_Type) and then Dictionary.IsAnExtensionOf (Controlling_Type, Dictionary.GetType (Actual_Sym)); end if; end if; return Result; end Root_Wise_Conversion_Required; --------------------------------------------------------------- -- this function checks if the symbol passed is a protected variable or -- a function which globally accesses a protected variable. If it is and -- the protected variable is not owned by the region we are calling from -- it returns the symbol otherwise it returns NullSymbol function Protected_References_By (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Dictionary.Symbol --# global in Dictionary.Dict; is separate; begin -- Wf_Primary Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); Sym := Type_Info.Other_Symbol; case Type_Info.Sort is when Sem.Is_Unknown => Type_Info := Sem.Unknown_Type_Record; when Sem.Type_Result => null; when Sem.Is_Package => Type_Info := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 5, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Last_Child_Of (Start_Node => Node)), Id_Str => Dictionary.GetSimpleName (Sym)); when Sem.Is_Function => ErrorHandler.Semantic_Error (Err_Num => 3, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Last_Child_Of (Start_Node => Node)), Id_Str => Dictionary.GetSimpleName (Sym)); Type_Info.Is_Static := False; Type_Info.Is_Constant := False; Type_Info.Is_ARange := False; Type_Info.Errors_In_Expression := True; when Sem.Is_Object => Type_Info.Is_ARange := False; if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.primary then -- ASSUME Node = primary Check_Readability (Sym => Sym, Node_Pos => STree.Node_Position (Node => Node)); if Root_Wise_Conversion_Required (Node => Node, Actual_Sym => Sym, E_Stack => E_Stack) then -- Actual parameter is a variable so we can select the subset of subcomponents -- applicable to the root view required. -- We can't convert actual to the appropriate subcomponent unless we add them first Sem.Add_Record_Sub_Components (Record_Var_Sym => Sym, Record_Type_Sym => Dictionary.GetType (Sym), Component_Data => Component_Data, The_Heap => The_Heap); Sym := Sem.Convert_Tagged_Actual (Actual => Sym, Tagged_Parameter_Sym => Dictionary.GetSubprogramControllingType (Exp_Stack.Top (Stack => E_Stack).Other_Symbol)); SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Type_Info.Stream_Symbol) or else Dictionary.IsFunction (Type_Info.Stream_Symbol) or else Dictionary.IsOwnVariableOrConstituentWithMode (Type_Info.Stream_Symbol), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Unexpected Type_Info.Stream_Symbol in Wf_Primary"); end if; if Dictionary.IsVariableOrSubcomponent (Sym) then SeqAlgebra.AddMember (The_Heap, Ref_Var, Natural (Dictionary.SymbolRef (Sym))); end if; elsif STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_primary then -- ASSUME Node = annotation_primary Type_Info.Is_Static := Dictionary.IsStatic (Sym, Scope); Type_Info.Is_Constant := Dictionary.Is_Constant (Sym); end if; when Sem.Is_Type_Mark => if Dictionary.IsScalarTypeMark (Sym, Scope) then Type_Info.Is_Static := Dictionary.IsStatic (Sym, Scope); Type_Info.Is_Constant := True; Type_Info.Is_ARange := True; Type_Info.Value := Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- don't want base type LexTokenManager.First_Token, Sym)); Type_Info.Range_RHS := Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- don't want base type LexTokenManager.Last_Token, Sym)); else Type_Info := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 5, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); end if; when Sem.Is_Parameter_Name => null; -- should never occur end case; -- if a primary references an external stream variable we need to check that it -- has done so in a simple assignment statement only (no branches in syntax tree) if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.primary then -- ASSUME Node = primary Check_Invalid_Stream_Use (Primary_Node => Node, Sym => Type_Info.Stream_Symbol, Scope => Scope, E_Stack => E_Stack); -- similarly check that protected function is only used directly in assignment statement Check_Invalid_Protected_Function_Use (Primary_Node => Node, Sym => Protected_References_By (Sym => Type_Info.Other_Symbol, Scope => Scope), Scope => Scope); end if; Type_Info.Sort := Sem.Type_Result; Type_Info.Param_Count := 0; Type_Info.Param_List := Lists.Null_List; -- Normally we set OtherSymbol to null at this point because we have finished with it; -- however, if the OtherSymbol represents an in instantiation of unchecked conversion then -- we leave it alone. This allows wf_assign to know that the assigned expression is -- an unchecked conversion and for it to seed the syntax tree so that the VCG knows as well. -- We need to do this in order to suppress RTCs associated with assignment of unchecked -- conversions to an object of the same subtype. The process is identical to that used -- for the assignment of external in variables. -- -- Similarly, we need to keep the component information when processing -- the assignment of an in stream variable, in case it is marked 'Always_Valid if (STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_primary or else not Dictionary.IsAnUncheckedConversion (Type_Info.Other_Symbol)) and then not (Type_Info.Is_AVariable and then Dictionary.GetOwnVariableOrConstituentMode (Type_Info.Variable_Symbol) = Dictionary.InMode) then Type_Info.Other_Symbol := Dictionary.NullSymbol; end if; if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.primary then -- ASSUME Node = primary Type_Info.Arg_List_Found := False; Child_Primary := STree.Child_Node (Current_Node => Node); -- ASSUME Child_Primary = numeric_literal OR character_literal OR string_literal OR name OR -- qualified_expression OR expression OR attribute if STree.Syntax_Node_Type (Node => Child_Primary) = SP_Symbols.numeric_literal or else STree.Syntax_Node_Type (Node => Child_Primary) = SP_Symbols.character_literal or else STree.Syntax_Node_Type (Node => Child_Primary) = SP_Symbols.string_literal or else STree.Syntax_Node_Type (Node => Child_Primary) = SP_Symbols.qualified_expression or else STree.Syntax_Node_Type (Node => Child_Primary) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Child_Primary) = SP_Symbols.attribute then -- ASSUME Child_Primary = numeric_literal OR character_literal OR string_literal OR qualified_expression OR -- expression OR attribute Type_Info.Is_AVariable := False; Type_Info.Is_An_Entire_Variable := False; elsif STree.Syntax_Node_Type (Node => Child_Primary) /= SP_Symbols.name then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Primary = numeric_literal OR character_literal OR string_literal OR name OR " & "qualified_expression OR expression OR attribute in Wf_Primary"); end if; end if; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); --# accept F, 601, ErrorHandler.Error_Context, The_Heap, "False coupling through SPARK_IO" & --# F, 601, E_Stack, The_Heap, "False coupling through SPARK_IO" & --# F, 601, E_Stack, Component_Data, "False coupling through SPARK_IO"; end Wf_Primary; spark-2012.0.deb/examiner/sem-unexpected_initialization.adb0000644000175000017500000000305611753202336022762 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function Unexpected_Initialization (Sym : Dictionary.Symbol) return Boolean is begin return -- not an initialized own variable ((Dictionary.IsOwnVariable (Sym) and then not Dictionary.OwnVariableIsInitialized (Sym)) or else -- not an initialized constituent (Dictionary.IsConstituent (Sym) and then not Dictionary.OwnVariableIsInitialized (Dictionary.GetSubject (Sym)))) and then -- not moded (this last limb to suppress error 333 for stream vars Dictionary.GetOwnVariableOrConstituentMode (Sym) = Dictionary.DefaultMode; end Unexpected_Initialization; spark-2012.0.deb/examiner/structures.adb0000644000175000017500000001413311753202336017146 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CStacks; package body Structures is procedure CopyStructure (Heap : in out Cells.Heap_Record; Root : in Cells.Cell; RootCopy : out Cells.Cell) is CopiedCellStack, UnexploredCellStack : CStacks.Stack; TopCell, TopCellCopy : Cells.Cell; ------------------------------------------- procedure Mark (C : in Cells.Cell) --# global in out CopiedCellStack; --# in out Heap; --# in out Statistics.TableUsage; --# in out UnexploredCellStack; --# derives CopiedCellStack, --# UnexploredCellStack from *, --# C, --# CopiedCellStack, --# Heap & --# Heap, --# Statistics.TableUsage from *, --# C, --# CopiedCellStack, --# Heap, --# UnexploredCellStack; is begin if not Cells.Is_Null_Cell (C) then if not Cells.Is_Marked (Heap, C) then Cells.Mark_Cell (Heap, C); CStacks.Push (Heap, C, CopiedCellStack); CStacks.Push (Heap, C, UnexploredCellStack); Cells.Create_Copy (Heap, C); end if; end if; end Mark; ---------------------------------------------- begin -- CopyStructure; CStacks.CreateStack (UnexploredCellStack); CStacks.CreateStack (CopiedCellStack); Mark (Root); RootCopy := Cells.Get_Copy (Heap, Root); loop exit when CStacks.IsEmpty (UnexploredCellStack); TopCell := CStacks.Top (Heap, UnexploredCellStack); CStacks.Pop (Heap, UnexploredCellStack); Mark (Cells.Get_A_Ptr (Heap, TopCell)); Mark (Cells.Get_B_Ptr (Heap, TopCell)); Mark (Cells.Get_C_Ptr (Heap, TopCell)); end loop; loop exit when CStacks.IsEmpty (CopiedCellStack); TopCell := CStacks.Top (Heap, CopiedCellStack); TopCellCopy := Cells.Get_Copy (Heap, TopCell); if not Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, TopCell)) then Cells.Set_A_Ptr (Heap, TopCellCopy, Cells.Get_Copy (Heap, Cells.Get_A_Ptr (Heap, TopCell))); end if; if not Cells.Is_Null_Cell (Cells.Get_B_Ptr (Heap, TopCell)) then Cells.Set_B_Ptr (Heap, TopCellCopy, Cells.Get_Copy (Heap, Cells.Get_B_Ptr (Heap, TopCell))); end if; if not Cells.Is_Null_Cell (Cells.Get_C_Ptr (Heap, TopCell)) then Cells.Set_C_Ptr (Heap, TopCellCopy, Cells.Get_Copy (Heap, Cells.Get_C_Ptr (Heap, TopCell))); end if; Cells.UnMark_Cell (Heap, TopCell); CStacks.Pop (Heap, CopiedCellStack); end loop; end CopyStructure; -------------------------------------------------------------------------- procedure DisposeOfStructure (Heap : in out Cells.Heap_Record; Root : in Cells.Cell) is DefunctCellStack, UnexploredCellStack : CStacks.Stack; TopCell : Cells.Cell; ----------------------------------------------- procedure MarkAndPush (C : in Cells.Cell) --# global in out DefunctCellStack; --# in out Heap; --# in out Statistics.TableUsage; --# in out UnexploredCellStack; --# derives DefunctCellStack, --# Statistics.TableUsage, --# UnexploredCellStack from *, --# C, --# DefunctCellStack, --# Heap & --# Heap from *, --# C, --# DefunctCellStack, --# UnexploredCellStack; is begin if not Cells.Is_Null_Cell (C) then if not Cells.Is_Marked (Heap, C) then Cells.Mark_Cell (Heap, C); CStacks.Push (Heap, C, DefunctCellStack); CStacks.Push (Heap, C, UnexploredCellStack); end if; end if; end MarkAndPush; --------------------------------------------------- begin -- DisposeOfStructure; CStacks.CreateStack (UnexploredCellStack); CStacks.CreateStack (DefunctCellStack); MarkAndPush (Root); loop exit when CStacks.IsEmpty (UnexploredCellStack); TopCell := CStacks.Top (Heap, UnexploredCellStack); CStacks.Pop (Heap, UnexploredCellStack); MarkAndPush (Cells.Get_A_Ptr (Heap, TopCell)); MarkAndPush (Cells.Get_B_Ptr (Heap, TopCell)); MarkAndPush (Cells.Get_C_Ptr (Heap, TopCell)); end loop; loop exit when CStacks.IsEmpty (DefunctCellStack); Cells.Dispose_Of_Cell (Heap, CStacks.Top (Heap, DefunctCellStack)); CStacks.Pop (Heap, DefunctCellStack); end loop; end DisposeOfStructure; end Structures; spark-2012.0.deb/examiner/completecheck.ads0000644000175000017500000001503711753202335017555 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- CompleteCheck -- -- Purpose: -- This file implements the completeness checker component described in -- S.P0468.41.1 and S.P0468.50.3, as part of work package S.P0468.47.1 -- -- The completeness checker supplies an ADT which keeps track of the elements -- of a type which have been defined in an array aggregate or case -- statement. -- -- Clients: -- Most notable Sem.Compunit and all its subunits that handle -- Aggregates and Case Statements. -- -- Use: -- 1. Initialization -- 2. Indicate that a single element has been seen -- 3. Indicate that a range of elements has been seen -- Repeat 2 - 3 as necessary, then -- 4. Indicate that an others clause has been seen -- 5. Check whether the whole type has been covered -- -- Extension: -- This package is currently limited to checking sets whose -- cardinality does not exceed ExaminerConstants.CompleteCheckSize. -- -- A future implementation should remove this restriction. -------------------------------------------------------------------------------- with ExaminerConstants; --# inherit ExaminerConstants, --# Maths; package CompleteCheck is -- the following are the types used to return status values to -- the calling environment type TypElementState is (Seen, NotSeen); type TypOverlapState is (Overlap, NoOverlap); type TypRangeState is (RangeTooBig, RangeDoesFit); type TypCompleteState is (Complete, Incomplete); subtype ElementArrayIndex is Integer range 0 .. ExaminerConstants.CompleteCheckSize - 1; type ElementArray is array (ElementArrayIndex) of Boolean; pragma Pack (ElementArray); type T is record LowerBound : Integer; ActualUpperBound : Integer; OthersClause : TypElementState; Elements : ElementArray; Undeterminable : Boolean; end record; NullT : constant T := T' (LowerBound => 0, ActualUpperBound => 0, OthersClause => NotSeen, Elements => ElementArray'(others => False), Undeterminable => False); ------------------------------------------------------------------------------ -- initialize a data type to check completeness -- corresponds to the Init schema in the Z spec procedure Init (Data : out T; RangeFrom : in Integer; RangeTo : in Integer; RangeState : out TypRangeState); --# derives Data, --# RangeState from RangeFrom, --# RangeTo; --# pre RangeFrom <= RangeTo; --# post ((RangeState = RangeDoesFit) <-> --# (RangeTo - RangeFrom + 1) <= ExaminerConstants.CompleteCheckSize) --# and ((RangeState = RangeTooBig) <-> --# (RangeTo - RangeFrom + 1) > ExaminerConstants.CompleteCheckSize) --# and (Data.ActualUpperBound - Data.LowerBound < --# ExaminerConstants.CompleteCheckSize); ------------------------------------------------------------------------------ -- indicate that a single element has been seen -- corresponds to the SeenElement schema. -- The OutOfRangeSeen flag indicates whether part or all of the range -- imported is outside the range over which the completeness checker is -- operating procedure SeenElement (Data : in out T; ElementNum : in Integer; OutOfRangeSeen : out Boolean; OverlapState : out TypOverlapState); --# derives Data, --# OutOfRangeSeen, --# OverlapState from Data, --# ElementNum; --# pre (Data.ActualUpperBound - Data.LowerBound < ExaminerConstants.CompleteCheckSize); --# post (Data.ActualUpperBound - Data.LowerBound < ExaminerConstants.CompleteCheckSize); ------------------------------------------------------------------------------ -- indicate that a range has been seen -- corresponds to the SeenRange schema. -- The OutOfRangeSeen flag indicates whether part or all of the range -- imported is outside the range over which the completeness checker is -- operating procedure SeenRange (Data : in out T; RangeFrom : in Integer; RangeTo : in Integer; OutOfRangeSeen : out Boolean; OverlapState : out TypOverlapState); --# derives Data, --# OutOfRangeSeen, --# OverlapState from Data, --# RangeFrom, --# RangeTo; --# pre (RangeFrom <= RangeTo) and --# (Data.ActualUpperBound - Data.LowerBound < ExaminerConstants.CompleteCheckSize); --# post (Data.ActualUpperBound - Data.LowerBound < ExaminerConstants.CompleteCheckSize); ------------------------------------------------------------------------------ -- indicate that an others clause has been seen -- corresponds to the SeenOthers schema procedure SeenOthers (Data : in out T); --# derives Data from *; --# pre (Data.ActualUpperBound - Data.LowerBound < ExaminerConstants.CompleteCheckSize); --# post (Data.ActualUpperBound - Data.LowerBound < ExaminerConstants.CompleteCheckSize); ------------------------------------------------------------------------------ -- report whether the type has been completely covered -- corresponds to the IsComplete schema function IsComplete (Data : T) return TypCompleteState; --# pre (Data.ActualUpperBound - Data.LowerBound < --# ExaminerConstants.CompleteCheckSize) or (Data.OthersClause = Seen); ------------------------------------------------------------------------------ end CompleteCheck; spark-2012.0.deb/examiner/vcg.adb0000644000175000017500000013214311753202337015505 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; with Debug; with Declarations; with ErrorHandler; with SystemErrors; with E_Strings; with Fatal; with File_Utils; with FileSystem; with Graph; with LexTokenLists; with ScreenEcho; with SPARK_IO; use type SPARK_IO.File_Status; use type File_Utils.File_Types; package body VCG is ------------------------------------------------------------------------------ procedure ProduceVCs (VCG_Heap : in out Cells.Heap_Record; Start_Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; VCG_Output_File : in SPARK_IO.File_Type; DPC_Output_File : in SPARK_IO.File_Type; Output_Filename : in E_Strings.T; End_Position : in LexTokenManager.Token_Position; Flow_Heap : in out Heap.HeapRecord; Semantic_Error_In_Subprogram : in Boolean; Data_Flow_Error_In_Subprogram : in Boolean; Type_Check_Exports : in Boolean) --# global in CommandLineData.Content; --# in STree.Table; --# in out Declarations.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# out Graph.Table; --# out StmtStack.S; --# derives Declarations.State, --# Dictionary.Dict, --# Flow_Heap, --# Graph.Table, --# LexTokenManager.State, --# StmtStack.S, --# VCG_Heap from CommandLineData.Content, --# Data_Flow_Error_In_Subprogram, --# Declarations.State, --# Dictionary.Dict, --# Flow_Heap, --# LexTokenManager.State, --# Scope, --# Semantic_Error_In_Subprogram, --# Start_Node, --# STree.Table, --# Subprog_Sym, --# Type_Check_Exports, --# VCG_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Data_Flow_Error_In_Subprogram, --# Declarations.State, --# Dictionary.Dict, --# DPC_Output_File, --# End_Position, --# ErrorHandler.Error_Context, --# Flow_Heap, --# LexTokenManager.State, --# Scope, --# Semantic_Error_In_Subprogram, --# SPARK_IO.File_Sys, --# Start_Node, --# STree.Table, --# Subprog_Sym, --# Type_Check_Exports, --# VCG_Heap, --# VCG_Output_File & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Data_Flow_Error_In_Subprogram, --# Declarations.State, --# Dictionary.Dict, --# Flow_Heap, --# LexTokenManager.State, --# Scope, --# Semantic_Error_In_Subprogram, --# Start_Node, --# STree.Table, --# Subprog_Sym, --# Type_Check_Exports, --# VCG_Heap & --# null from Output_Filename; is separate; ------------------------------------------------------------------------------ procedure Full_Symbol_Name (Item : in Dictionary.Symbol; Ancestors : out LexTokenLists.Lists; List : out LexTokenLists.Lists) --# global in Dictionary.Dict; --# derives Ancestors, --# List from Dictionary.Dict, --# Item; is Reverse_Prefix_List, Full_Symbol_Name_List : LexTokenLists.Lists; Ancestor_List : LexTokenLists.Lists; Pack_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes; Lex_Token_Item : LexTokenManager.Lex_String; begin Reverse_Prefix_List := LexTokenLists.Null_List; Scope := Dictionary.GetScope (Item); loop exit when Dictionary.IsGlobalScope (Scope) or else Dictionary.IsPredefinedScope (Scope); LexTokenLists.Append (Reverse_Prefix_List, Dictionary.GetSimpleName (Dictionary.GetRegion (Scope))); Scope := Dictionary.GetEnclosingScope (Scope); end loop; Full_Symbol_Name_List := LexTokenLists.Null_List; while LexTokenLists.Get_Length (List => Reverse_Prefix_List) > 0 loop LexTokenLists.Pop (List => Reverse_Prefix_List, Item => Lex_Token_Item); LexTokenLists.Append (List => Full_Symbol_Name_List, Item => Lex_Token_Item); end loop; LexTokenLists.Append (Full_Symbol_Name_List, Dictionary.GetSimpleName (Item)); List := Full_Symbol_Name_List; Ancestor_List := LexTokenLists.Null_List; Pack_Sym := Dictionary.GetLibraryPackage (Dictionary.GetScope (Item)); if not Dictionary.IsPredefinedPackageStandard (Pack_Sym) then Reverse_Prefix_List := LexTokenLists.Null_List; loop Pack_Sym := Dictionary.GetPackageParent (Pack_Sym); exit when Dictionary.Is_Null_Symbol (Pack_Sym); LexTokenLists.Append (Reverse_Prefix_List, Dictionary.GetSimpleName (Pack_Sym)); end loop; while LexTokenLists.Get_Length (List => Reverse_Prefix_List) > 0 loop LexTokenLists.Pop (List => Reverse_Prefix_List, Item => Lex_Token_Item); LexTokenLists.Append (List => Ancestor_List, Item => Lex_Token_Item); end loop; end if; Ancestors := Ancestor_List; end Full_Symbol_Name; -------------------------------------------- procedure Produce_Output_Files (Subprog_Sym : in Dictionary.Symbol; VCG_Output_File : in out SPARK_IO.File_Type; DPC_Output_File : in out SPARK_IO.File_Type; Declarations_File : in out SPARK_IO.File_Type; Rule_File : in out SPARK_IO.File_Type; Output_Filename : out E_Strings.T; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Declarations_File, --# DPC_Output_File, --# Rule_File, --# VCG_Output_File from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# LexTokenManager.State from *, --# Dictionary.Dict, --# Subprog_Sym & --# OK, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Declarations_File, --# Dictionary.Dict, --# DPC_Output_File, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Rule_File, --# SPARK_IO.File_Sys, --# Subprog_Sym, --# VCG_Output_File & --# Output_Filename from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym; is VCG_Extension : constant String := "vcg"; DPC_Extension : constant String := "dpc"; FDL_Extension : constant String := "fdl"; RLS_Extension : constant String := "rls"; Local_OK : Boolean; Unit_Name : LexTokenLists.Lists; Ancestor_Name : LexTokenLists.Lists; Filename : E_Strings.T; -------------------------------------------- procedure Check_Windows_Filename_Length_Bug --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Filename; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Filename, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# LexTokenManager.State from *, --# Filename; is Abs_Filename : E_Strings.T := E_Strings.Empty_String; Tmp : LexTokenManager.Lex_String; begin --# accept F, 22, "We are aware that Use_Windows_Command_Line can't change."; if FileSystem.Use_Windows_Command_Line then --# end accept; -- We assemble the full name of the vcg file to be -- generated out of the working directory... E_Strings.Append_Examiner_String (Abs_Filename, FileSystem.Working_Directory); E_Strings.Append_Examiner_String (Abs_Filename, FileSystem.Directory_Separator); -- and the unit's name... E_Strings.Append_Examiner_String (Abs_Filename, Filename); -- followed by the extension. E_Strings.Append_Char (Abs_Filename, '.'); E_Strings.Append_String (Abs_Filename, VCG_Extension); if E_Strings.Get_Length (Abs_Filename) >= 255 then -- This is a workaround as we can't give ErrorHandler an E_String. LexTokenManager.Insert_Examiner_String (Abs_Filename, Tmp); ErrorHandler.Semantic_Warning (Err_Num => 495, Position => LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => E_Strings.Lengths'First), Id_Str => Tmp); end if; end if; --# accept F, 601, LexTokenManager.State, SPARK_IO.File_Sys, "False coupling through SPARK_IO."; end Check_Windows_Filename_Length_Bug; procedure Build_Filename_Nest --# global in Ancestor_Name; --# in CommandLineData.Content; --# in LexTokenManager.State; --# in Unit_Name; --# in out SPARK_IO.File_Sys; --# out Filename; --# out Local_OK; --# derives Filename from Ancestor_Name, --# LexTokenManager.State, --# Unit_Name & --# Local_OK, --# SPARK_IO.File_Sys from Ancestor_Name, --# CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name; is Pos : LexTokenLists.Lengths; Str : E_Strings.T; Adjusted_Dir : E_Strings.T; begin Local_OK := True; Filename := E_Strings.Empty_String; if LexTokenLists.Get_Length (List => Unit_Name) /= 1 then -- is not a main program so build nested dirs E_Strings.Append_Examiner_String (E_Str1 => Filename, E_Str2 => FileSystem.Start_Of_Directory); Pos := 1; loop exit when Pos > LexTokenLists.Get_Length (List => Ancestor_Name); Str := LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenLists.Get_Element (List => Ancestor_Name, Pos => Pos)); -- Note that directories for VCG files are always created -- using lower-case names on all platforms. E_Strings.Append_Examiner_String (E_Str1 => Filename, E_Str2 => E_Strings.Lower_Case (E_Str => Str)); E_Strings.Append_String (E_Str => Filename, Str => "_"); if Local_OK then -- If the user has asked for an alterative output directory, then start -- there, otherwise start at current working directory Adjusted_Dir := Filename; CommandLineData.Normalize_File_Name_To_Output_Directory (F => Adjusted_Dir); FileSystem.Idempotent_Create_Subdirectory (Adjusted_Dir, Local_OK); end if; E_Strings.Append_Examiner_String (E_Str1 => Filename, E_Str2 => FileSystem.Directory_Separator); Pos := Pos + 1; end loop; Pos := 1; loop Str := LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenLists.Get_Element (List => Unit_Name, Pos => Pos)); -- Note that directories for VCG files are always created -- using lower-case names on all platforms. E_Strings.Append_Examiner_String (E_Str1 => Filename, E_Str2 => E_Strings.Lower_Case (E_Str => Str)); -- If the user has asked for an alterative output directory, then start -- there, otherwise start at current working directory. Note that we -- must preserve the case of the user-specified directory. Adjusted_Dir := Filename; CommandLineData.Normalize_File_Name_To_Output_Directory (F => Adjusted_Dir); FileSystem.Idempotent_Create_Subdirectory (Adjusted_Dir, Local_OK); exit when Pos = LexTokenLists.Get_Length (List => Unit_Name) - 1; E_Strings.Append_Examiner_String (E_Str1 => Filename, E_Str2 => FileSystem.Directory_Separator); Pos := Pos + 1; end loop; E_Strings.Append_Examiner_String (E_Str1 => Filename, E_Str2 => FileSystem.Directory_Separator); end if; Str := LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenLists.Get_Element (List => Unit_Name, Pos => LexTokenLists.Get_Length (List => Unit_Name))); E_Strings.Append_Examiner_String (E_Str1 => Filename, E_Str2 => E_Strings.Lower_Case (E_Str => Str)); end Build_Filename_Nest; -------------------------------------------- procedure Put_Subprogram_Name (File : in SPARK_IO.File_Type; Sym : in Dictionary.Symbol; File_Type : in File_Utils.File_Types) --# global in Ancestor_Name; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Unit_Name; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Ancestor_Name, --# Dictionary.Dict, --# File, --# File_Type, --# LexTokenManager.State, --# Sym, --# Unit_Name; is Head_Line : E_Strings.T; Page_Width : constant Natural := 78; begin if Dictionary.IsFunction (Sym) then Head_Line := E_Strings.Copy_String (Str => "function "); elsif Dictionary.IsProcedure (Sym) then Head_Line := E_Strings.Copy_String (Str => "procedure "); elsif Dictionary.IsTaskType (Sym) then Head_Line := E_Strings.Copy_String (Str => "task_type "); else -- Placeholder for package body initialization. We do not -- expect to reach this currently. Head_Line := E_Strings.Copy_String (Str => "initialization_of "); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Assertion_Failure, Msg => "Expected symbol to be either a function, procedure or task_type."); end if; if LexTokenLists.Get_Length (Ancestor_Name) /= 0 then E_Strings.Append_Examiner_String (E_Str1 => Head_Line, E_Str2 => LexTokenLists.Token_List_To_String (Token_List => Ancestor_Name)); E_Strings.Append_String (Head_Line, "."); end if; E_Strings.Append_Examiner_String (E_Str1 => Head_Line, E_Str2 => LexTokenLists.Token_List_To_String (Token_List => Unit_Name)); if (E_Strings.Get_Length (E_Str => Head_Line) + 1) < Page_Width then SPARK_IO.Set_Col (File, (Page_Width - E_Strings.Get_Length (E_Str => Head_Line)) / 2); end if; if File_Type = File_Utils.Dec_File then SPARK_IO.Put_Char (File, '{'); E_Strings.Put_String (File => File, E_Str => Head_Line); SPARK_IO.Put_Char (File, '}'); SPARK_IO.New_Line (File, 1); elsif File_Type = File_Utils.Rule_File then SPARK_IO.Put_String (File, "/*", 0); E_Strings.Put_String (File => File, E_Str => Head_Line); SPARK_IO.Put_String (File, "*/", 0); SPARK_IO.New_Line (File, 1); else E_Strings.Put_Line (File => File, E_Str => Head_Line); end if; SPARK_IO.New_Line (File, 2); end Put_Subprogram_Name; --------------------------------------------------------- procedure Produce_VCG_Output_File --# global in Ancestor_Name; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in Filename; --# in LexTokenManager.State; --# in Subprog_Sym; --# in Unit_Name; --# in out SPARK_IO.File_Sys; --# in out VCG_Output_File; --# out Local_OK; --# out Output_Filename; --# derives Local_OK, --# VCG_Output_File from Filename, --# SPARK_IO.File_Sys, --# VCG_Output_File & --# Output_Filename from Filename & --# SPARK_IO.File_Sys from *, --# Ancestor_Name, --# CommandLineData.Content, --# Dictionary.Dict, --# Filename, --# LexTokenManager.State, --# Subprog_Sym, --# Unit_Name, --# VCG_Output_File; is Success : SPARK_IO.File_Status; begin Output_Filename := Filename; FileSystem.Check_Extension (Fn => Output_Filename, Ext => E_Strings.Copy_String (Str => VCG_Extension)); E_Strings.Create (File => VCG_Output_File, Name_Of_File => Output_Filename, Form_Of_File => "", Status => Success); Local_OK := Success = SPARK_IO.Ok; if Local_OK then File_Utils.Print_A_Header (File => VCG_Output_File, Header_Line => "Semantic Analysis of SPARK Text", File_Type => File_Utils.Other_File); Put_Subprogram_Name (File => VCG_Output_File, Sym => Subprog_Sym, File_Type => File_Utils.Other_File); end if; end Produce_VCG_Output_File; --------------------------------------------------------- procedure Produce_DPC_Output_File --# global in Ancestor_Name; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in Filename; --# in LexTokenManager.State; --# in Subprog_Sym; --# in Unit_Name; --# in out DPC_Output_File; --# in out SPARK_IO.File_Sys; --# out Local_OK; --# derives DPC_Output_File, --# Local_OK from DPC_Output_File, --# Filename, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Ancestor_Name, --# CommandLineData.Content, --# Dictionary.Dict, --# DPC_Output_File, --# Filename, --# LexTokenManager.State, --# Subprog_Sym, --# Unit_Name; is Success : SPARK_IO.File_Status; DPC_Output_Filename : E_Strings.T; begin DPC_Output_Filename := Filename; FileSystem.Check_Extension (Fn => DPC_Output_Filename, Ext => E_Strings.Copy_String (Str => DPC_Extension)); E_Strings.Create (File => DPC_Output_File, Name_Of_File => DPC_Output_Filename, Form_Of_File => "", Status => Success); Local_OK := Success = SPARK_IO.Ok; if Local_OK then File_Utils.Print_A_Header (File => DPC_Output_File, Header_Line => "Semantic Analysis of SPARK Text", File_Type => File_Utils.Other_File); Put_Subprogram_Name (File => DPC_Output_File, Sym => Subprog_Sym, File_Type => File_Utils.Other_File); end if; end Produce_DPC_Output_File; ------------------------------------------------------------ procedure Produce_Declarations_File --# global in Ancestor_Name; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in Filename; --# in LexTokenManager.State; --# in Subprog_Sym; --# in Unit_Name; --# in out Declarations_File; --# in out SPARK_IO.File_Sys; --# out Local_OK; --# derives Declarations_File, --# Local_OK from Declarations_File, --# Filename, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Ancestor_Name, --# CommandLineData.Content, --# Declarations_File, --# Dictionary.Dict, --# Filename, --# LexTokenManager.State, --# Subprog_Sym, --# Unit_Name; is Declarations_Filename : E_Strings.T; Success : SPARK_IO.File_Status; begin Declarations_Filename := Filename; FileSystem.Check_Extension (Fn => Declarations_Filename, Ext => E_Strings.Copy_String (Str => FDL_Extension)); E_Strings.Create (File => Declarations_File, Name_Of_File => Declarations_Filename, Form_Of_File => "", Status => Success); Local_OK := Success = SPARK_IO.Ok; if Local_OK then File_Utils.Print_A_Header (File => Declarations_File, Header_Line => "FDL Declarations", File_Type => File_Utils.Dec_File); end if; Put_Subprogram_Name (File => Declarations_File, Sym => Subprog_Sym, File_Type => File_Utils.Dec_File); end Produce_Declarations_File; ------------------------------------------------------------------ procedure Produce_Rule_File --# global in Ancestor_Name; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in Filename; --# in LexTokenManager.State; --# in Subprog_Sym; --# in Unit_Name; --# in out Rule_File; --# in out SPARK_IO.File_Sys; --# out Local_OK; --# derives Local_OK, --# Rule_File from Filename, --# Rule_File, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Ancestor_Name, --# CommandLineData.Content, --# Dictionary.Dict, --# Filename, --# LexTokenManager.State, --# Rule_File, --# Subprog_Sym, --# Unit_Name; is Rule_Filename : E_Strings.T; Success : SPARK_IO.File_Status; begin Rule_Filename := Filename; FileSystem.Check_Extension (Fn => Rule_Filename, Ext => E_Strings.Copy_String (Str => RLS_Extension)); E_Strings.Create (File => Rule_File, Name_Of_File => Rule_Filename, Form_Of_File => "", Status => Success); Local_OK := Success = SPARK_IO.Ok; if Local_OK then File_Utils.Print_A_Header (File => Rule_File, Header_Line => "Proof Rule Declarations", File_Type => File_Utils.Rule_File); end if; Put_Subprogram_Name (File => Rule_File, Sym => Subprog_Sym, File_Type => File_Utils.Rule_File); end Produce_Rule_File; ------------------------------------------------------------------ begin -- Produce_Output_Files Full_Symbol_Name (Item => Subprog_Sym, Ancestors => Ancestor_Name, List => Unit_Name); Build_Filename_Nest; -- We need to check for files with a total name longer than 255 -- characters as this will cause problems on Windows. Check_Windows_Filename_Length_Bug; -- Filename now contains the basename of the required output file(s) -- without an extension. It is also a relative to the current working -- directory - for example for subprogram P.Q, we end up with Filename -- being "p/q" -- -- If the user has requested an alternative output directory, then we adjust -- Filename now CommandLineData.Normalize_File_Name_To_Output_Directory (F => Filename); Output_Filename := E_Strings.Empty_String; if Local_OK then Produce_Declarations_File; if Local_OK then Produce_Rule_File; if Local_OK and CommandLineData.Content.VCG then Produce_VCG_Output_File; end if; if Local_OK and CommandLineData.Content.DPC then Produce_DPC_Output_File; end if; end if; end if; OK := Local_OK; --# accept Flow, 601, VCG_Output_File, Declarations_File, "ignore data coupling between files thro' SPARK_IO" & --# Flow, 601, VCG_Output_File, Rule_File, "ignore data coupling between files thro' SPARK_IO" & --# Flow, 601, DPC_Output_File, Rule_File, "ignore data coupling between files thro' SPARK_IO" & --# Flow, 601, DPC_Output_File, Declarations_File, "ignore data coupling between files thro' SPARK_IO" & --# Flow, 601, DPC_Output_File, VCG_Output_File, "ignore data coupling between files thro' SPARK_IO" & --# Flow, 601, Output_Filename, Rule_File, "ignore data coupling between files thro' SPARK_IO" & --# Flow, 601, Output_Filename, Declarations_File, "ignore data coupling between files thro' SPARK_IO" & --# Flow, 601, Rule_File, Declarations_File, "ignore data coupling between files thro' SPARK_IO" & --# Flow, 601, ErrorHandler.Error_Context, Subprog_Sym, "false coupling through SPARK_IO" & --# Flow, 601, VCG_Output_File, ErrorHandler.Error_Context, "false coupling through SPARK_IO" & --# Flow, 601, DPC_Output_File, ErrorHandler.Error_Context, "false coupling through SPARK_IO" & --# Flow, 601, Declarations_File, ErrorHandler.Error_Context, "false coupling through SPARK_IO" & --# Flow, 601, Rule_File, ErrorHandler.Error_Context, "false coupling through SPARK_IO" & --# Flow, 601, Output_Filename, ErrorHandler.Error_Context, "false coupling through SPARK_IO"; end Produce_Output_Files; --------------------------------------------------------------------------- procedure Generate_VCs_Local (Start_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; End_Position : in LexTokenManager.Token_Position; Flow_Heap : in out Heap.HeapRecord; Semantic_Error_In_Subprogram : in Boolean; Data_Flow_Error_In_Subprogram : in Boolean; Type_Check_Exports : in Boolean) --# global in CommandLineData.Content; --# in STree.Table; --# in out Declarations.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives Declarations.State, --# Dictionary.Dict, --# Flow_Heap, --# Graph.Table, --# LexTokenManager.State, --# Statistics.TableUsage, --# StmtStack.S from *, --# CommandLineData.Content, --# Data_Flow_Error_In_Subprogram, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Flow_Heap, --# LexTokenManager.State, --# Scope, --# Semantic_Error_In_Subprogram, --# SPARK_IO.File_Sys, --# Start_Node, --# STree.Table, --# Type_Check_Exports & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Data_Flow_Error_In_Subprogram, --# Dictionary.Dict, --# End_Position, --# ErrorHandler.Error_Context, --# Flow_Heap, --# LexTokenManager.State, --# Scope, --# Semantic_Error_In_Subprogram, --# SPARK_IO.File_Sys, --# Start_Node, --# STree.Table, --# Type_Check_Exports; is Subprog_Sym : Dictionary.Symbol; VCG_Output_File : SPARK_IO.File_Type; DPC_Output_File : SPARK_IO.File_Type; Output_Filename : E_Strings.T; Declarations_File : SPARK_IO.File_Type; Rule_File : SPARK_IO.File_Type; OK : Boolean; Success : SPARK_IO.File_Status; VCG_Heap : Cells.Heap_Record; -- In case of a fatal error, we generate a single "False" VC in the -- VCG or DPC file using this procedure. procedure Generate_False_VC (Output_File : in SPARK_IO.File_Type) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in Subprog_Sym; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Output_File, --# Subprog_Sym; is Head_Line : E_Strings.T; Unit_Name : LexTokenLists.Lists; Ancestor_Name : LexTokenLists.Lists; begin --# accept F, 10, Ancestor_Name, "Ineffective assignment here OK"; Full_Symbol_Name (Item => Subprog_Sym, Ancestors => Ancestor_Name, List => Unit_Name); --# end accept; -- The header of the VC has to have the correct name and prefix -- for POGS, so we have to deduce this here from Subprog_Sym if Dictionary.IsFunction (Subprog_Sym) then Head_Line := E_Strings.Copy_String (Str => "function_"); elsif Dictionary.IsTaskType (Subprog_Sym) then Head_Line := E_Strings.Copy_String (Str => "task_type_"); else -- must be a procedure Head_Line := E_Strings.Copy_String (Str => "procedure_"); end if; E_Strings.Append_Examiner_String (E_Str1 => Head_Line, E_Str2 => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenLists.Get_Element (List => Unit_Name, Pos => LexTokenLists.Get_Length (List => Unit_Name))))); E_Strings.Append_String (E_Str => Head_Line, Str => "_1."); SPARK_IO.New_Line (Output_File, 1); SPARK_IO.Put_Line (Output_File, "/* False VC generated due to VCG heap exhausted */", 0); SPARK_IO.New_Line (Output_File, 2); SPARK_IO.Put_Line (Output_File, "For path(s) from start to finish:", 0); SPARK_IO.New_Line (Output_File, 1); E_Strings.Put_Line (File => Output_File, E_Str => Head_Line); SPARK_IO.Put_Line (Output_File, "H1: true .", 0); SPARK_IO.Put_Line (Output_File, " ->", 0); SPARK_IO.Put_Line (Output_File, "C1: false .", 0); SPARK_IO.New_Line (Output_File, 2); --# accept F, 33, Ancestor_Name, "Ancestor_Name not referenced here OK"; end Generate_False_VC; begin VCG_Output_File := SPARK_IO.Null_File; DPC_Output_File := SPARK_IO.Null_File; Declarations_File := SPARK_IO.Null_File; Rule_File := SPARK_IO.Null_File; Subprog_Sym := Dictionary.GetRegion (Scope); Produce_Output_Files (Subprog_Sym => Subprog_Sym, VCG_Output_File => VCG_Output_File, DPC_Output_File => DPC_Output_File, Declarations_File => Declarations_File, Rule_File => Rule_File, Output_Filename => Output_Filename, OK => OK); if OK then Cells.Initialize (VCG_Heap); Declarations.StartProcessing (VCG_Heap); ProduceVCs (VCG_Heap => VCG_Heap, Start_Node => Start_Node, Subprog_Sym => Subprog_Sym, Scope => Scope, VCG_Output_File => VCG_Output_File, DPC_Output_File => DPC_Output_File, Output_Filename => Output_Filename, End_Position => End_Position, Flow_Heap => Flow_Heap, Semantic_Error_In_Subprogram => Semantic_Error_In_Subprogram, Data_Flow_Error_In_Subprogram => Data_Flow_Error_In_Subprogram, Type_Check_Exports => Type_Check_Exports); Declarations.OutputDeclarations (VCG_Heap, Declarations_File, Rule_File, Scope, True, End_Position); Cells.Report_Usage (VCG_Heap); else -- Unable to create output files ErrorHandler.Semantic_Warning (406, End_Position, LexTokenManager.Null_String); end if; --# accept Flow, 10, Success, "Expected ineffective assignment to Success" & --# Flow, 10, VCG_Output_File, "Expected ineffective assignment to VCG_Output_File" & --# Flow, 10, DPC_Output_File, "Expected ineffective assignment to VCG_Output_File" & --# Flow, 10, Declarations_File, "Expected ineffective assignment to Declarations_File" & --# Flow, 10, Rule_File, "Expected ineffective assignment to Rule_File"; SPARK_IO.Close (Declarations_File, Success); SPARK_IO.Close (Rule_File, Success); if SPARK_IO.Is_Open (VCG_Output_File) then SPARK_IO.Close (VCG_Output_File, Success); end if; if SPARK_IO.Is_Open (DPC_Output_File) then SPARK_IO.Close (DPC_Output_File, Success); end if; --# end accept; --# accept Flow, 33, Success, "Expected Success to be neither referenced nor exported"; exception --# hide Generate_VCs_Local; when Fatal.Static_Limit => -- Here owing to a VCG Heap/table exhausted. -- We need to close open files, making sure they are at least -- syntactically legal for the Simplifier. -- We insert an explicitly False VC here, so it is sure -- to be undischarged and picked up by POGS ErrorHandler.Semantic_Warning (409, End_Position, LexTokenManager.Null_String); if SPARK_IO.Is_Open (VCG_Output_File) then Generate_False_VC (Output_File => VCG_Output_File); SPARK_IO.Close (VCG_Output_File, Success); end if; if SPARK_IO.Is_Open (DPC_Output_File) then Generate_False_VC (Output_File => DPC_Output_File); SPARK_IO.Close (DPC_Output_File, Success); end if; if SPARK_IO.Is_Open (Rule_File) then SPARK_IO.Close (Rule_File, Success); end if; if SPARK_IO.Is_Open (Declarations_File) then -- Make sure the FDL file is termianted properly before closing it Declarations.PrintDeclarationTail (Declarations_File); SPARK_IO.Close (Declarations_File, Success); end if; -- We DONT'T re-raise here - there may be other subprograms -- requiring VC Generation in the enclosing unit, so we -- carry on. when others => -- Any other exception reaching here. -- We need to close open files, then re-raise if SPARK_IO.Is_Open (VCG_Output_File) then SPARK_IO.Close (VCG_Output_File, Success); end if; if SPARK_IO.Is_Open (DPC_Output_File) then SPARK_IO.Close (DPC_Output_File, Success); end if; if SPARK_IO.Is_Open (Rule_File) then SPARK_IO.Close (Rule_File, Success); end if; if SPARK_IO.Is_Open (Declarations_File) then -- Make sure the FDL file is termianted properly before closing it Declarations.PrintDeclarationTail (Declarations_File); SPARK_IO.Close (Declarations_File, Success); end if; raise; end Generate_VCs_Local; procedure Generate_VCs (Start_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Do_VCG : in Boolean; End_Position : in LexTokenManager.Token_Position; Flow_Heap : in out Heap.HeapRecord; Semantic_Error_In_Subprogram : in Boolean; Data_Flow_Error_In_Subprogram : in Boolean; Type_Check_Exports : in Boolean) is Errors_In_Subprogram_Or_Its_Signature : Boolean; begin if Do_VCG then Invoked := True; Errors_In_Subprogram_Or_Its_Signature := Semantic_Error_In_Subprogram or (not Dictionary.SubprogramSignatureIsWellformed (Dictionary.IsAbstract, Dictionary.GetRegion (Scope))); if Errors_In_Subprogram_Or_Its_Signature then ErrorHandler.Semantic_Warning (408, End_Position, LexTokenManager.Null_String); end if; Generate_VCs_Local (Start_Node => Start_Node, Scope => Scope, End_Position => End_Position, Flow_Heap => Flow_Heap, Semantic_Error_In_Subprogram => Errors_In_Subprogram_Or_Its_Signature, Data_Flow_Error_In_Subprogram => Data_Flow_Error_In_Subprogram, Type_Check_Exports => Type_Check_Exports); end if; end Generate_VCs; end VCG; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_identifier.adb0000644000175000017500000001260011753202336024052 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Walk_Expression_P) procedure Wf_Identifier (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; The_Heap : in out Heap.HeapRecord; Ref_Var : in SeqAlgebra.Seq; Context : in Sem.Tilde_Context) is Sym : Dictionary.Symbol; Next_Node : STree.SyntaxNode; begin if Context = Sem.Code then Sym := Dictionary.LookupItem (Name => STree.Node_Lex_String (Node => Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); else Sym := Dictionary.LookupItem (Name => STree.Node_Lex_String (Node => Node), Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Sym) and then ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Node, Symbol => Sym, Is_Declaration => False); end if; end if; if not Dictionary.Is_Null_Symbol (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Node); end if; if Dictionary.IsPackage (Sym) then Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetUnknownTypeMark, Other_Symbol => Sym, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Is_Package, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Static => False, Is_Constant => False, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue), Stack => E_Stack); else Stack_Identifier (Sym => Sym, Id_Str => STree.Node_Lex_String (Node => Node), Node => Node, Prefix => Dictionary.NullSymbol, Scope => Scope, E_Stack => E_Stack, The_Heap => The_Heap, Ref_Var => Ref_Var, Dotted => False, Context => Context, Is_Annotation => Context /= Sem.Code); end if; if Context /= Sem.Code then Next_Node := STree.Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = tilde OR percent OR NULL if Next_Node /= STree.NullNode then -- ASSUME Next_Node = tilde OR percent -- identifier may have ~ or % after it case STree.Syntax_Node_Type (Node => Next_Node) is when SP_Symbols.tilde => -- ASSUME Next_Node = tilde Wf_Tilde (Node_Pos => STree.Node_Position (Node => Next_Node), Scope => Scope, E_Stack => E_Stack, Context => Context); when SP_Symbols.percent => -- ASSUME Next_Node = percent Wf_Percent (Node_Pos => STree.Node_Position (Node => Next_Node), Scope => Scope, E_Stack => E_Stack); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = tilde OR percent OR NULL in Wf_Identifier"); end case; end if; end if; end Wf_Identifier; spark-2012.0.deb/examiner/lextokenmanager-relation_algebra.ads0000644000175000017500000001551211753202336023422 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Heap; with LexTokenManager; with RelationAlgebra; with SeqAlgebra; with LexTokenManager.Seq_Algebra; use type RelationAlgebra.ColLeader; use type RelationAlgebra.Pair; use type RelationAlgebra.RowLeader; --# inherit Heap, --# LexTokenManager, --# LexTokenManager.Seq_Algebra, --# RelationAlgebra, --# SeqAlgebra, --# Statistics, --# SystemErrors; -- This package extends the package RelationalAlgebra to support -- Lex_String. This package uses case insensitive comparison betweeen -- strings. package LexTokenManager.Relation_Algebra is type Relation is private; procedure Create_Relation (The_Heap : in out Heap.HeapRecord; R : out Relation); --# global in out Statistics.TableUsage; --# derives R, --# The_Heap from The_Heap & --# Statistics.TableUsage from *, --# The_Heap; -- Objects of type Relations utilize storage managed by the package Heap. -- The storage used by a relation R must be returned to the Heap by calling -- DisposeOfRelation before R goes out of scope. procedure Dispose_Of_Relation (The_Heap : in out Heap.HeapRecord; R : in Relation); --# derives The_Heap from *, --# R; -- InsertPair provides the basic means to build a relation. -- Each pair in the relation R represented by the row value, I, and the -- column value J may be inserted individually. The pair is only inserted -- if it is not already present. There are no duplicates. procedure Insert_Pair (The_Heap : in out Heap.HeapRecord; R : in Relation; I : in Natural; J : in LexTokenManager.Lex_String); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# I, --# J, --# LexTokenManager.State, --# R, --# The_Heap; -- Creates a new set S containing all the column value -- entries for the row of a relation R specified by GivenIndex. -- If a row specified by the GivenIndex is not present in R, -- S is the empty set. procedure Row_Extraction (The_Heap : in out Heap.HeapRecord; R : in Relation; Given_Index : in Natural; S : out Seq_Algebra.Seq); --# global in out Statistics.TableUsage; --# derives S from The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Given_Index, --# R, --# The_Heap; -- AddRow adds an entire row to a relation R. -- The row index I is applied to each of the values in the set S to obtain -- a set of pairs that are added to the relation R if they are not already -- present in R. -- R and S must have been initialized using their corresponding Create ops. procedure Add_Row (The_Heap : in out Heap.HeapRecord; R : in Relation; I : in Natural; S : in Seq_Algebra.Seq); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# I, --# LexTokenManager.State, --# R, --# S, --# The_Heap; -- AddCol adds an entire column to a relation R. -- The column index J is applied to each of the values in the set S to obtain -- a set of pairs that are added to the relation R if they are not already -- present in R. -- R and S must have been initialized using their corresponding Create ops. procedure Add_Col (The_Heap : in out Heap.HeapRecord; R : in Relation; J : in LexTokenManager.Lex_String; S : in SeqAlgebra.Seq); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# J, --# LexTokenManager.State, --# R, --# S, --# The_Heap; private type Relation is record The_Relation : RelationAlgebra.Relation; end record; function Column_Value (The_Heap : Heap.HeapRecord; P : RelationAlgebra.Pair) return LexTokenManager.Lex_String; function Convert_To_Relation (R : RelationAlgebra.Relation) return Relation; procedure Insert_Col_Leader (The_Heap : in out Heap.HeapRecord; R : in Relation; J : in LexTokenManager.Lex_String; Cache : in out RelationAlgebra.Caches); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Cache, --# The_Heap from Cache, --# J, --# LexTokenManager.State, --# R, --# The_Heap & --# Statistics.TableUsage from *, --# Cache, --# J, --# LexTokenManager.State, --# The_Heap; end LexTokenManager.Relation_Algebra; spark-2012.0.deb/examiner/sem-check_closing_identifier.adb0000644000175000017500000000646111753202336022507 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Closing_Identifier (End_Name_Node, Ident_Node : in STree.SyntaxNode) is Start_Ident_Node : STree.SyntaxNode; End_Ident_Node : STree.SyntaxNode; begin Start_Ident_Node := Ident_Node; End_Ident_Node := Last_Child_Of (Start_Node => End_Name_Node); -- ASSUME End_Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => End_Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Ident_Node = identifier in Check_Closing_Identifier"); loop --# assert Syntax_Node_Type (End_Name_Node, STree.Table) = SP_Symbols.dotted_simple_name and --# Syntax_Node_Type (Start_Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (End_Ident_Node, STree.Table) = SP_Symbols.identifier; -- check identifiers at current positions: if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Start_Ident_Node), Lex_Str2 => Node_Lex_String (Node => End_Ident_Node)) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => End_Ident_Node), Id_Str => Node_Lex_String (Node => Start_Ident_Node)); exit; end if; -- move on to next identifiers: Start_Ident_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Start_Ident_Node)); End_Ident_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => End_Ident_Node)); -- finished when both exhausted: exit when Syntax_Node_Type (Node => Start_Ident_Node) /= SP_Symbols.identifier and then Syntax_Node_Type (Node => End_Ident_Node) /= SP_Symbols.identifier; -- check if only one exhausted (length mismatch): if Syntax_Node_Type (Node => Start_Ident_Node) /= SP_Symbols.identifier or else Syntax_Node_Type (Node => End_Ident_Node) /= SP_Symbols.identifier then ErrorHandler.Semantic_Error (Err_Num => 615, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => End_Name_Node), Id_Str => LexTokenManager.Null_String); exit; end if; end loop; end Check_Closing_Identifier; spark-2012.0.deb/examiner/xmlreport.adb0000644000175000017500000016425711753202337016775 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with FileSystem; with SystemErrors; package body XMLReport --# own State is File_Depth, --# Schema, --# Schema_State, --# Section_Depths, --# Tag_IDs, --# Tag_Strings; is -- Set the version of the report file schema. This should be the CVS version of the -- sparkreport.xsd file. Schema_Version : constant String := "1.8"; type My_Tag is ( MT_Brief_Justifications, MT_Commandline, MT_Compiler_Data, MT_Compiler_Item, MT_Cyclic_Requirements, MT_File, MT_Filename, MT_Full_Justification, MT_Full_Justifications, MT_Index, MT_Indexes, MT_Justifications_Section, MT_Listing, MT_Message, MT_Messages, MT_Metafile, MT_Metafiles, MT_Option, MT_Prologue, MT_Report, MT_Results, MT_Scope, MT_Suppressed, MT_Suppressed_Pragma, MT_Symbol, MT_Target_Config, MT_Unit, MT_Units_Notfound, MT_Warnings_Config, MT_Units_In_File); type Tag_String_Array is array (My_Tag) of E_Strings.T; type Tag_IDArray is array (My_Tag) of SPARK_XML.Tag_ID; type Section_Depth_Array is array (Sections) of SPARK_XML.Tag_Depth; ------------------- -- Own Variables -- ------------------- -- Dictionary for the schema Tag_Strings : Tag_String_Array; Tag_IDs : Tag_IDArray; -- The schema Schema : SPARK_XML.Schema_Record; Schema_State : SPARK_XML.Schema_State_Record; -- Additional schema state info File_Depth : SPARK_XML.Tag_Depth; Section_Depths : Section_Depth_Array; -------------------- -- Error handling -- -------------------- procedure Handle_Error (Status : in SPARK_XML.Schema_Status) --# global in Schema; --# in Schema_State; --# derives null from Schema, --# Schema_State, --# Status; is begin if SPARK_XML.Is_Error (Status) then SPARK_XML.Print_Schema_Error (Status); SPARK_XML.Print_Working_State (Schema, Schema_State); case Status is when SPARK_XML.SS_To_Many_Attributes => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.XML_Generation_Error, Msg => "Exceeded Attribute Limit"); when SPARK_XML.SS_Stack_Full => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.XML_Generation_Error, Msg => "Exceeded XML Stack limit"); when SPARK_XML.SS_Tag_Incomplete => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "Attempted to gerenate incomplete tag"); when SPARK_XML.SS_Invalid_Depth => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "Invalid depth value for tag closure"); when SPARK_XML.SS_Stack_Empty => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "Tag stack empty"); when SPARK_XML.SS_Wrong_Content_Type => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "Incorrect content type for attribute"); when SPARK_XML.SS_Invalid_Attribute => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "Invalid attribute for tag"); when SPARK_XML.SS_Invalid_Tag => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "Invalid tag"); when SPARK_XML.SS_No_Such_Tag => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "No such tag defined in schema"); when SPARK_XML.SS_Tag_Not_Found => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "Could not find instance of the tag to close"); when SPARK_XML.SS_OK => -- We should never get to this case SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Illegal_XML_Generation_Attempt, Msg => "Everything is fine"); end case; end if; end Handle_Error; procedure Handle_Schema_Error (Success : in Boolean; Msg : in String) --# derives null from Msg, --# Success; is --# hide Handle_Schema_Error; begin if not Success then SPARK_XML.Print_Working_State (Schema, Schema_State); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.XML_Schema_Error, Msg => Msg); end if; end Handle_Schema_Error; -------------------- -- Initialisation -- -------------------- -- Build the dictionary, then create the schema representation for SPARK_XML procedure Init --# global out File_Depth; --# out Schema; --# out Schema_State; --# out Section_Depths; --# out Tag_IDs; --# out Tag_Strings; --# derives File_Depth, --# Schema, --# Schema_State, --# Section_Depths, --# Tag_IDs, --# Tag_Strings from ; is -- Load the tag definitions in to the Schema procedure Load_Tags --# global in out Schema; --# out Tag_IDs; --# out Tag_Strings; --# derives Schema, --# Tag_IDs from Schema & --# Tag_Strings from ; is Tmp_Tag : SPARK_XML.Tag_ID; begin Tag_Strings := Tag_String_Array' (MT_Brief_Justifications => E_Strings.Copy_String (Str => "brief_justifications"), MT_Commandline => E_Strings.Copy_String (Str => "commandline"), MT_Compiler_Data => E_Strings.Copy_String (Str => "compiler_data"), MT_Compiler_Item => E_Strings.Copy_String (Str => "compiler_item"), MT_Cyclic_Requirements => E_Strings.Copy_String (Str => "cyclic_requirements"), MT_File => E_Strings.Copy_String (Str => "file"), MT_Filename => E_Strings.Copy_String (Str => "filename"), MT_Full_Justification => E_Strings.Copy_String (Str => "full_justification"), MT_Full_Justifications => E_Strings.Copy_String (Str => "full_justifications"), MT_Index => E_Strings.Copy_String (Str => "index"), MT_Indexes => E_Strings.Copy_String (Str => "indexes"), MT_Justifications_Section => E_Strings.Copy_String (Str => "justifications_section"), MT_Listing => E_Strings.Copy_String (Str => "listing"), MT_Message => E_Strings.Copy_String (Str => "message"), MT_Messages => E_Strings.Copy_String (Str => "messages"), MT_Metafile => E_Strings.Copy_String (Str => "metafile"), MT_Metafiles => E_Strings.Copy_String (Str => "metafiles"), MT_Option => E_Strings.Copy_String (Str => "option"), MT_Prologue => E_Strings.Copy_String (Str => "prologue"), MT_Report => E_Strings.Copy_String (Str => "report"), MT_Results => E_Strings.Copy_String (Str => "results"), MT_Scope => E_Strings.Copy_String (Str => "scope"), MT_Suppressed => E_Strings.Copy_String (Str => "suppressed"), MT_Suppressed_Pragma => E_Strings.Copy_String (Str => "pragma"), MT_Symbol => E_Strings.Copy_String (Str => "symbol"), MT_Target_Config => E_Strings.Copy_String (Str => "target_config"), MT_Unit => E_Strings.Copy_String (Str => "unit"), MT_Units_Notfound => E_Strings.Copy_String (Str => "units_notfound"), MT_Warnings_Config => E_Strings.Copy_String (Str => "warnings_config"), MT_Units_In_File => E_Strings.Copy_String (Str => "units")); Tag_IDs := Tag_IDArray'(others => SPARK_XML.Null_Tag); for I in My_Tag loop SPARK_XML.Add_Tag (Schema, Tag_Strings (I), Tmp_Tag); Tag_IDs (I) := Tmp_Tag; if SPARK_XML.Is_Null_Tag (Tmp_Tag) then -- Run out of room, so generate an error. Handle_Schema_Error (Success => False, Msg => "Failure adding tag"); exit; end if; end loop; end Load_Tags; -- Load the attribute definitions procedure Load_Attributes --# global in Tag_IDs; --# in out Schema; --# derives Schema from *, --# Tag_IDs; is -- Set Total_Attributes to the total number of attributes to be added -- to tags in the Schema. Total_Attributes : constant Integer := 25; type Attribute is record ID : SPARK_XML.Tag_ID; Name : E_Strings.T; Typ : SPARK_XML.Attribute_Type; Req : Boolean; end record; type Attribute_Index is range 1 .. Total_Attributes; type Attribute_List is array (Attribute_Index) of Attribute; Tmp_Attrib : SPARK_XML.Attribute_ID; Tmp_Success : Boolean; Attributes : Attribute_List; begin -- This table encodes the legal attributes of each XML element, as specified -- in the SPARKReport Schema Description in sparkreport.xsd Attributes := Attribute_List' (Attribute'(Tag_IDs (MT_Report), E_Strings.Copy_String (Str => "version"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Symbol), E_Strings.Copy_String (Str => "id"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Option), E_Strings.Copy_String (Str => "id"), SPARK_XML.At_String, False), -- Remove? Attribute'(Tag_IDs (MT_Unit), E_Strings.Copy_String (Str => "name"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Unit), E_Strings.Copy_String (Str => "type"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Unit), E_Strings.Copy_String (Str => "status"), SPARK_XML.At_String, False), Attribute'(Tag_IDs (MT_Message), E_Strings.Copy_String (Str => "class"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Message), E_Strings.Copy_String (Str => "code"), SPARK_XML.At_Integer, True), Attribute'(Tag_IDs (MT_Message), E_Strings.Copy_String (Str => "line"), SPARK_XML.At_Integer, True), Attribute'(Tag_IDs (MT_Message), E_Strings.Copy_String (Str => "offset"), SPARK_XML.At_Integer, True), Attribute'(Tag_IDs (MT_Full_Justification), E_Strings.Copy_String (Str => "class"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Full_Justification), E_Strings.Copy_String (Str => "code"), SPARK_XML.At_Integer, True), Attribute'(Tag_IDs (MT_Full_Justification), E_Strings.Copy_String (Str => "line_from"), SPARK_XML.At_Integer, True), Attribute'(Tag_IDs (MT_Full_Justification), E_Strings.Copy_String (Str => "line_to"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Full_Justification), E_Strings.Copy_String (Str => "match_no"), SPARK_XML.At_Integer, True), Attribute'(Tag_IDs (MT_Full_Justification), E_Strings.Copy_String (Str => "match_line"), SPARK_XML.At_Integer, True), Attribute'(Tag_IDs (MT_Brief_Justifications), E_Strings.Copy_String (Str => "matched"), SPARK_XML.At_Integer, True), Attribute'(Tag_IDs (MT_Brief_Justifications), E_Strings.Copy_String (Str => "unmatched"), SPARK_XML.At_Integer, False), Attribute'(Tag_IDs (MT_Metafile), E_Strings.Copy_String (Str => "name"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_File), E_Strings.Copy_String (Str => "unit"), SPARK_XML.At_String, False), Attribute'(Tag_IDs (MT_File), E_Strings.Copy_String (Str => "type"), SPARK_XML.At_String, False), Attribute'(Tag_IDs (MT_File), E_Strings.Copy_String (Str => "filename"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Compiler_Item), E_Strings.Copy_String (Str => "item"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Scope), E_Strings.Copy_String (Str => "id"), SPARK_XML.At_String, True), Attribute'(Tag_IDs (MT_Scope), E_Strings.Copy_String (Str => "type"), SPARK_XML.At_String, True)); for I in Attribute_Index loop --# accept Flow, 10, Tmp_Attrib, "Expected ineffective assignment to Tmp_Attrib"; SPARK_XML.Add_Attribute_To_Tag (Schema, -- Expect ineffective statement Attributes (I).ID, Attributes (I).Name, Attributes (I).Typ, Attributes (I).Req, Tmp_Attrib, Tmp_Success); --# end accept; Handle_Schema_Error (Success => Tmp_Success, Msg => "Failure adding attribute to tag"); exit when not Tmp_Success; end loop; --# accept Flow, 33, Tmp_Attrib, "Expected to be neither referenced nor exported"; end Load_Attributes; -- Load the hierarchy definitions procedure Build_Hierarchy --# global in Tag_IDs; --# in out Schema; --# derives Schema from *, --# Tag_IDs; is Max_Relations : constant Integer := 37; type Tag_Rel is record Parent : SPARK_XML.Tag_ID; Child : SPARK_XML.Tag_ID; Required : Boolean; end record; subtype Rel_Index is Integer range 1 .. Max_Relations; type Rel_Array is array (Rel_Index) of Tag_Rel; Relations : Rel_Array; Tmp_Success : Boolean; begin -- This table encodes the legal nesting of XML elements, as specified -- in the SPARKReport Schema Description in sparkreport.xsd Relations := Rel_Array' (Tag_Rel'(SPARK_XML.Null_Tag, Tag_IDs (MT_Report), False), Tag_Rel'(Tag_IDs (MT_Scope), Tag_IDs (MT_Scope), False), Tag_Rel'(Tag_IDs (MT_Scope), Tag_IDs (MT_Message), False), Tag_Rel'(Tag_IDs (MT_Message), Tag_IDs (MT_Symbol), False), Tag_Rel'(Tag_IDs (MT_Report), Tag_IDs (MT_Prologue), False), Tag_Rel'(Tag_IDs (MT_Report), Tag_IDs (MT_Results), False), Tag_Rel'(Tag_IDs (MT_Prologue), Tag_IDs (MT_Commandline), False), Tag_Rel'(Tag_IDs (MT_Prologue), Tag_IDs (MT_Indexes), False), Tag_Rel'(Tag_IDs (MT_Prologue), Tag_IDs (MT_Metafiles), False), Tag_Rel'(Tag_IDs (MT_Prologue), Tag_IDs (MT_Warnings_Config), False), Tag_Rel'(Tag_IDs (MT_Prologue), Tag_IDs (MT_Compiler_Data), False), Tag_Rel'(Tag_IDs (MT_Prologue), Tag_IDs (MT_Target_Config), False), Tag_Rel'(Tag_IDs (MT_Prologue), Tag_IDs (MT_Units_Notfound), False), Tag_Rel'(Tag_IDs (MT_Prologue), Tag_IDs (MT_Cyclic_Requirements), False), Tag_Rel'(Tag_IDs (MT_Commandline), Tag_IDs (MT_Filename), False), Tag_Rel'(Tag_IDs (MT_Commandline), Tag_IDs (MT_Option), False), Tag_Rel'(Tag_IDs (MT_Compiler_Data), Tag_IDs (MT_Compiler_Item), False), Tag_Rel'(Tag_IDs (MT_Cyclic_Requirements), Tag_IDs (MT_Unit), False), Tag_Rel'(Tag_IDs (MT_Indexes), Tag_IDs (MT_Index), False), Tag_Rel'(Tag_IDs (MT_Target_Config), Tag_IDs (MT_Filename), False), Tag_Rel'(Tag_IDs (MT_Target_Config), Tag_IDs (MT_Messages), False), Tag_Rel'(Tag_IDs (MT_Messages), Tag_IDs (MT_Message), False), Tag_Rel'(Tag_IDs (MT_Metafiles), Tag_IDs (MT_Metafile), False), Tag_Rel'(Tag_IDs (MT_Metafile), Tag_IDs (MT_Metafile), False), Tag_Rel'(Tag_IDs (MT_Metafile), Tag_IDs (MT_Filename), False), Tag_Rel'(Tag_IDs (MT_Units_Notfound), Tag_IDs (MT_Unit), False), Tag_Rel'(Tag_IDs (MT_Warnings_Config), Tag_IDs (MT_Suppressed), False), Tag_Rel'(Tag_IDs (MT_Warnings_Config), Tag_IDs (MT_Suppressed_Pragma), False), Tag_Rel'(Tag_IDs (MT_File), Tag_IDs (MT_Units_In_File), False), Tag_Rel'(Tag_IDs (MT_File), Tag_IDs (MT_Scope), False), Tag_Rel'(Tag_IDs (MT_File), Tag_IDs (MT_Message), False), Tag_Rel'(Tag_IDs (MT_File), Tag_IDs (MT_Justifications_Section), False), Tag_Rel'(Tag_IDs (MT_Justifications_Section), Tag_IDs (MT_Full_Justifications), False), Tag_Rel'(Tag_IDs (MT_Justifications_Section), Tag_IDs (MT_Brief_Justifications), False), Tag_Rel'(Tag_IDs (MT_Full_Justifications), Tag_IDs (MT_Full_Justification), False), Tag_Rel'(Tag_IDs (MT_Units_In_File), Tag_IDs (MT_Unit), False), Tag_Rel'(Tag_IDs (MT_Results), Tag_IDs (MT_File), False)); for I in Rel_Index loop SPARK_XML.Add_Child_Tag (Schema, Relations (I).Parent, Relations (I).Child, Relations (I).Required, Tmp_Success); Handle_Schema_Error (Success => Tmp_Success, Msg => "Failure loading tag hierarchy"); exit when not Tmp_Success; end loop; end Build_Hierarchy; begin Section_Depths := Section_Depth_Array'(others => SPARK_XML.Tag_Depth'First); File_Depth := SPARK_XML.Tag_Depth'First; SPARK_XML.Init_Schema (Schema); SPARK_XML.Init_Schema_State (Schema_State); Load_Tags; Load_Attributes; Build_Hierarchy; end Init; ------------------------------------------------------------------------ -- Tag producing functions -- ------------------------------------------------------------------------ ------------------------------------------------------------------------ -- Producers for simple container tags. -- -- These have no attributes of their own and only contain other tags. -- -- The only exception is the Listing tag, which contains large -- -- amounts of text, and is also included. -- ------------------------------------------------------------------------ procedure Start_Section (Section : in Sections; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out Section_Depths; --# in out SPARK_IO.File_Sys; --# derives Schema_State, --# Section_Depths from *, --# Schema, --# Schema_State, --# Section, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Report, --# Schema, --# Schema_State, --# Section, --# Tag_IDs; is Tag : SPARK_XML.Tag_ID; Status : SPARK_XML.Schema_Status; Depth : SPARK_XML.Tag_Depth; XML : E_Strings.T; begin case Section is when S_Report => Tag := Tag_IDs (MT_Report); when S_Prologue => Tag := Tag_IDs (MT_Prologue); when S_Commandline => Tag := Tag_IDs (MT_Commandline); when S_Compiler_Data => Tag := Tag_IDs (MT_Compiler_Data); when S_Cyclic_Requirements => Tag := Tag_IDs (MT_Cyclic_Requirements); when S_Indexes => Tag := Tag_IDs (MT_Indexes); when S_Target_Config => Tag := Tag_IDs (MT_Target_Config); when S_Messages => Tag := Tag_IDs (MT_Messages); when S_Meta_Files => Tag := Tag_IDs (MT_Metafiles); when S_Units_Not_Found => Tag := Tag_IDs (MT_Units_Notfound); when S_Warnings_Config => Tag := Tag_IDs (MT_Warnings_Config); when S_Results => Tag := Tag_IDs (MT_Results); when S_Listing => Tag := Tag_IDs (MT_Listing); when S_Units_In_File => Tag := Tag_IDs (MT_Units_In_File); when S_Justifications => Tag := Tag_IDs (MT_Justifications_Section); when S_Full_Justifications => Tag := Tag_IDs (MT_Full_Justifications); end case; SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag, Status => Status); Handle_Error (Status => Status); if Section = S_Report then SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "version"), Value => E_Strings.Copy_String (Str => Schema_Version), Status => Status); Handle_Error (Status => Status); end if; SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => XML, Depth => Depth, Status => Status); Section_Depths (Section) := Depth; Handle_Error (Status => Status); E_Strings.Put_String (File => Report, E_Str => XML); end Start_Section; procedure End_Section (Section : in Sections; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Section_Depths; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Section, --# Section_Depths & --# SPARK_IO.File_Sys from *, --# Report, --# Schema, --# Schema_State, --# Section, --# Section_Depths; is Status : SPARK_XML.Schema_Status; XML : E_Strings.T; begin SPARK_XML.Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => Section_Depths (Section), XML => XML, Status => Status); Handle_Error (Status => Status); E_Strings.Put_String (File => Report, E_Str => XML); end End_Section; -- Simple tags, ones whose opening and closing tags are generated -- at the same time, with simple text contents -- E.g. file.txt procedure Simple_Tag (Tag : in SPARK_XML.Tag_ID; Contents : in out E_Strings.T) --# global in Schema; --# in out Schema_State; --# derives Contents, --# Schema_State from *, --# Schema, --# Schema_State, --# Tag; is Acc_XML, Tmp_XML : E_Strings.T; -- XML accumulator and temp variable. Depth : SPARK_XML.Tag_Depth; Status : SPARK_XML.Schema_Status; begin SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag, Status => Status); Handle_Error (Status => Status); SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => Acc_XML, Depth => Depth, Status => Status); Handle_Error (Status => Status); E_Strings.Append_Examiner_String (E_Str1 => Acc_XML, E_Str2 => SPARK_XML.Filter_String (Str => Contents)); SPARK_XML.Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => Depth, XML => Tmp_XML, Status => Status); E_Strings.Append_Examiner_String (E_Str1 => Acc_XML, E_Str2 => Tmp_XML); Handle_Error (Status => Status); Contents := Acc_XML; end Simple_Tag; procedure Filename (Plain_Output : in Boolean; File : in out E_Strings.T) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# derives File from *, --# Plain_Output, --# Schema, --# Schema_State, --# Tag_IDs & --# Schema_State from *, --# Schema, --# Tag_IDs; is begin if Plain_Output then File := FileSystem.Just_File (Fn => File, Ext => True); end if; Simple_Tag (Tag => Tag_IDs (MT_Filename), Contents => File); end Filename; procedure Index (Plain_Output : in Boolean; Idx : in out E_Strings.T) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# derives Idx from *, --# Plain_Output, --# Schema, --# Schema_State, --# Tag_IDs & --# Schema_State from *, --# Schema, --# Tag_IDs; is begin if Plain_Output then Idx := FileSystem.Just_File (Idx, True); end if; Simple_Tag (Tag => Tag_IDs (MT_Index), Contents => Idx); end Index; procedure Option (Opt : in out E_Strings.T) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# derives Opt, --# Schema_State from *, --# Schema, --# Schema_State, --# Tag_IDs; is begin Simple_Tag (Tag => Tag_IDs (MT_Option), Contents => Opt); end Option; procedure Suppressed (Item : in out E_Strings.T) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# derives Item, --# Schema_State from *, --# Schema, --# Schema_State, --# Tag_IDs; is begin Simple_Tag (Tag => Tag_IDs (MT_Suppressed), Contents => Item); end Suppressed; procedure Suppressed_Pragma (Item : in out E_Strings.T) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# derives Item, --# Schema_State from *, --# Schema, --# Schema_State, --# Tag_IDs; is begin Simple_Tag (Tag => Tag_IDs (MT_Suppressed_Pragma), Contents => Item); end Suppressed_Pragma; --------------------------------------------- -- More complex tags that have attributes. -- --------------------------------------------- procedure Open_Compiler_Item (Item : in E_Strings.T; Depth : out SPARK_XML.Tag_Depth; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Depth, --# Schema_State from Item, --# Schema, --# Schema_State, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Item, --# Report, --# Schema, --# Schema_State, --# Tag_IDs; is Acc_XML : E_Strings.T; -- XML accumulator and temp variable. Stat : SPARK_XML.Schema_Status; begin SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag_IDs (MT_Compiler_Item), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "item"), Value => Item, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => Acc_XML, Depth => Depth, Status => Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => Acc_XML); -- Output the opening tag end Open_Compiler_Item; procedure Close_Compiler_Item (Depth : in SPARK_XML.Tag_Depth; Report : in SPARK_IO.File_Type) --# global in Schema; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Depth & --# SPARK_IO.File_Sys from *, --# Depth, --# Report, --# Schema, --# Schema_State; is Acc_XML : E_Strings.T; -- XML accumulator and temp variable. Stat : SPARK_XML.Schema_Status; begin SPARK_XML.Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => Depth, XML => Acc_XML, Status => Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => Acc_XML); end Close_Compiler_Item; procedure Compiler_Item (Item : in E_Strings.T; Val : in E_Strings.T; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Item, --# Schema, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Item, --# Report, --# Schema, --# Schema_State, --# Tag_IDs, --# Val; is Depth : SPARK_XML.Tag_Depth; begin Open_Compiler_Item (Item => Item, Depth => Depth, Report => Report); E_Strings.Put_String (File => Report, E_Str => SPARK_XML.Filter_String (Str => Val)); -- Output the value Close_Compiler_Item (Depth => Depth, Report => Report); end Compiler_Item; procedure Unit (Name : in E_Strings.T; Typ : in E_Strings.T; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Name, --# Schema, --# Tag_IDs, --# Typ & --# SPARK_IO.File_Sys from *, --# Name, --# Report, --# Schema, --# Schema_State, --# Tag_IDs, --# Typ; is Acc_XML, Tmp_XML : E_Strings.T; -- XML accumulator and temp variable. Depth : SPARK_XML.Tag_Depth; Stat : SPARK_XML.Schema_Status; begin SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag_IDs (MT_Unit), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "name"), Value => Name, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "type"), Value => Typ, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => Acc_XML, Depth => Depth, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => Depth, XML => Tmp_XML, Status => Stat); E_Strings.Append_Examiner_String (E_Str1 => Acc_XML, E_Str2 => Tmp_XML); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => Acc_XML); end Unit; procedure Ada_Unit (Name : in E_Strings.T; Typ : in E_Strings.T; Unit_Status : in E_Strings.T; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Name, --# Schema, --# Tag_IDs, --# Typ, --# Unit_Status & --# SPARK_IO.File_Sys from *, --# Name, --# Report, --# Schema, --# Schema_State, --# Tag_IDs, --# Typ, --# Unit_Status; is Acc_XML : E_Strings.T; -- XML accumulator. Depth : SPARK_XML.Tag_Depth; -- Dummy variable. Stat : SPARK_XML.Schema_Status; begin SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag_IDs (MT_Unit), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "name"), Value => Name, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "type"), Value => Typ, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "status"), Value => Unit_Status, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => Acc_XML, Depth => Depth, Status => Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => Acc_XML); SPARK_XML.Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => Depth, XML => Acc_XML, Status => Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => Acc_XML); end Ada_Unit; -- Depth is neither referenced or exported. --------------------------- -- Message construction. -- --------------------------- procedure Start_Message (Class : in E_Strings.T; Code : in Integer; Line : in Integer; Offset : in Integer; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Class, --# Code, --# Line, --# Offset, --# Schema, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Class, --# Code, --# Line, --# Offset, --# Report, --# Schema, --# Schema_State, --# Tag_IDs; is Depth : SPARK_XML.Tag_Depth; Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; begin SPARK_XML.Init_Opening_Tag_No_Check (Schema_State => Schema_State, TID => Tag_IDs (MT_Message), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "class"), Value => Class, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "code"), Code, Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "line"), Line, Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "offset"), Offset, Stat); Handle_Error (Status => Stat); --# accept Flow, 10, Depth, "Expected ineffective assignment to Depth"; SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => XML, Depth => Depth, Status => Stat); --# end accept; Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); --# accept Flow, 33, Depth, "Expected Depth to be neither referenced nor exported"; end Start_Message; procedure Symbol (Sym : in E_Strings.T; Sym_Num : in Integer; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Schema, --# Sym_Num, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Report, --# Schema, --# Schema_State, --# Sym, --# Sym_Num, --# Tag_IDs; is Acc_XML, Tmp_XML : E_Strings.T; -- XML accumulator and temp variable. Depth : SPARK_XML.Tag_Depth; Stat : SPARK_XML.Schema_Status; begin SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag_IDs (MT_Symbol), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "id"), Sym_Num, Stat); Handle_Error (Status => Stat); SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => Acc_XML, Depth => Depth, Status => Stat); Handle_Error (Status => Stat); -- Add the symbol contents. E_Strings.Append_Examiner_String (E_Str1 => Acc_XML, E_Str2 => SPARK_XML.Filter_String (Str => Sym)); -- Close the tag. SPARK_XML.Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => Depth, XML => Tmp_XML, Status => Stat); Handle_Error (Status => Stat); E_Strings.Append_Examiner_String (E_Str1 => Acc_XML, E_Str2 => Tmp_XML); E_Strings.Put_String (File => Report, E_Str => Acc_XML); end Symbol; -- End_Message will close the open message (of which there will be one -- in the hierarchy since only a can be placed in a procedure End_Message (Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Report, --# Schema, --# Schema_State, --# Tag_IDs; is Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; begin SPARK_XML.Close_Tag_By_ID (Schema, Schema_State, Tag_IDs (MT_Message), XML, Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); end End_Message; -------------------------- -- Metafile contruction -- -------------------------- procedure Start_Meta_File (Name : in E_Strings.T; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Name, --# Schema, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Name, --# Report, --# Schema, --# Schema_State, --# Tag_IDs; is Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; Id : SPARK_XML.Tag_Depth; begin -- Generate the XML SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag_IDs (MT_Metafile), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "name"), Value => Name, Status => Stat); Handle_Error (Status => Stat); --# accept Flow, 10, Id, "Expected ineffective assignment to Id"; SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => XML, Depth => Id, Status => Stat); --# end accept; Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); --# accept Flow, 33, Id, "Expected Id to be neither referenced nor exported"; end Start_Meta_File; procedure End_Meta_File (Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Report, --# Schema, --# Schema_State, --# Tag_IDs; is Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; begin SPARK_XML.Close_Top_Tag_By_ID (Schema, Schema_State, Tag_IDs (MT_Metafile), XML, Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); end End_Meta_File; ------------------------------ -- Results Section contents -- ------------------------------ procedure Start_File (Plain_Output : in Boolean; F_Name : in E_Strings.T; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# out File_Depth; --# derives File_Depth, --# Schema_State from F_Name, --# Plain_Output, --# Schema, --# Schema_State, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# F_Name, --# Plain_Output, --# Report, --# Schema, --# Schema_State, --# Tag_IDs; is Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; begin SPARK_XML.Init_Opening_Tag_No_Check (Schema_State => Schema_State, TID => Tag_IDs (MT_File), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes if Plain_Output then SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "filename"), Value => FileSystem.Just_File (Fn => F_Name, Ext => True), Status => Stat); else SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "filename"), Value => F_Name, Status => Stat); end if; Handle_Error (Status => Stat); SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => XML, Depth => File_Depth, Status => Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); end Start_File; procedure End_File (Report : in SPARK_IO.File_Type) --# global in File_Depth; --# in Schema; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# File_Depth & --# SPARK_IO.File_Sys from *, --# File_Depth, --# Report, --# Schema, --# Schema_State; is Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; begin -- Close the tag. SPARK_XML.Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => File_Depth, XML => XML, Status => Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); end End_File; -------------------------- -- Justification output -- -------------------------- procedure Brief_Justifications (Matched : in Natural; Unmatched : in Natural; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Matched, --# Schema, --# Tag_IDs, --# Unmatched & --# SPARK_IO.File_Sys from *, --# Matched, --# Report, --# Schema, --# Schema_State, --# Tag_IDs, --# Unmatched; is Depth : SPARK_XML.Tag_Depth; Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; begin SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag_IDs (MT_Brief_Justifications), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "matched"), Matched, Stat); Handle_Error (Status => Stat); if Unmatched > 0 then SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "unmatched"), Unmatched, Stat); Handle_Error (Status => Stat); end if; --# accept Flow, 10, Depth, "Expected ineffective assignment to Depth"; SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => XML, Depth => Depth, Status => Stat); --# end accept; Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); SPARK_XML.Close_Tag_By_ID (Schema, Schema_State, Tag_IDs (MT_Brief_Justifications), XML, Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); --# accept Flow, 33, Depth, "Expected Depth to be neither referenced nor exported"; end Brief_Justifications; procedure Start_Full_Justification (Class : in E_Strings.T; Code : in Integer; Line_From : in Integer; Line_To : in E_Strings.T; Match_No : in Integer; Match_Line : in Integer; Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Class, --# Code, --# Line_From, --# Line_To, --# Match_Line, --# Match_No, --# Schema, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Class, --# Code, --# Line_From, --# Line_To, --# Match_Line, --# Match_No, --# Report, --# Schema, --# Schema_State, --# Tag_IDs; is Depth : SPARK_XML.Tag_Depth; Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; begin SPARK_XML.Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Tag_IDs (MT_Full_Justification), Status => Stat); Handle_Error (Status => Stat); -- Add the attributes SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "class"), Value => Class, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "code"), Code, Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "line_from"), Line_From, Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Str (Schema => Schema, Schema_State => Schema_State, Name => E_Strings.Copy_String (Str => "line_to"), Value => Line_To, Status => Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "match_no"), Match_No, Stat); Handle_Error (Status => Stat); SPARK_XML.Add_Attribute_Int (Schema, Schema_State, E_Strings.Copy_String (Str => "match_line"), Match_Line, Stat); Handle_Error (Status => Stat); --# accept Flow, 10, Depth, "Expected ineffective assignment to Depth"; SPARK_XML.Output_Opening_Tag (Schema => Schema, Schema_State => Schema_State, XML => XML, Depth => Depth, Status => Stat); --# end accept; Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); --# accept Flow, 33, Depth, "Expected Depth to be neither referenced nor exported"; end Start_Full_Justification; procedure End_Full_Justification (Report : in SPARK_IO.File_Type) --# global in Schema; --# in Tag_IDs; --# in out Schema_State; --# in out SPARK_IO.File_Sys; --# derives Schema_State from *, --# Tag_IDs & --# SPARK_IO.File_Sys from *, --# Report, --# Schema, --# Schema_State, --# Tag_IDs; is Stat : SPARK_XML.Schema_Status; XML : E_Strings.T; begin SPARK_XML.Close_Tag_By_ID (Schema, Schema_State, Tag_IDs (MT_Full_Justification), XML, Stat); Handle_Error (Status => Stat); E_Strings.Put_String (File => Report, E_Str => XML); end End_Full_Justification; end XMLReport; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_named_association_rep.adb0000644000175000017500000003252211753202336026263 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Named_Association_Rep (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) is Name_Exp, Field_Name, Exp_Result : Sem.Exp_Record; Doing_Record : Boolean; Expected_Type : Dictionary.Symbol; Error_Found : Boolean := False; Next_Node : STree.SyntaxNode; --------------------------------------------------------- function Expression_Location (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_association_rep or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_association_rep; --# return Exp_Node => (STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_aggregate or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_expression); is Exp_Node : STree.SyntaxNode; begin Exp_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Exp_Node = named_association_rep OR aggregate_choice_rep OR -- annotation_named_association_rep OR annotation_aggregate_choice_rep if STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.named_association_rep or else STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.annotation_named_association_rep then -- ASSUME Exp_Node = named_association_rep OR annotation_named_association_rep Exp_Node := STree.Next_Sibling (Current_Node => Exp_Node); elsif STree.Syntax_Node_Type (Node => Exp_Node) /= SP_Symbols.aggregate_choice_rep and then STree.Syntax_Node_Type (Node => Exp_Node) /= SP_Symbols.annotation_aggregate_choice_rep then Exp_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = named_association_rep OR aggregate_choice_rep OR " & "annotation_named_association_rep OR annotation_aggregate_choice_rep in Doing_Embedded_Aggregate"); end if; -- ASSUME Exp_Node = aggregate_choice_rep OR annotation_aggregate_choice_rep SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.aggregate_choice_rep or else STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.annotation_aggregate_choice_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = aggregate_choice_rep OR annotation_aggregate_choice_rep in Doing_Embedded_Aggregate"); Exp_Node := STree.Next_Sibling (Current_Node => Exp_Node); -- ASSUME Exp_Node = aggregate_or_expression OR annotation_aggregate_or_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.annotation_aggregate_or_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = aggregate_or_expression OR annotation_aggregate_or_expression in Doing_Embedded_Aggregate"); Exp_Node := STree.Child_Node (Current_Node => Exp_Node); -- ASSUME Exp_Node = aggregate OR expression OR -- annotation_aggregate OR annotation_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.annotation_aggregate or else STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.annotation_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = aggregate OR expression OR " & "annotation_aggregate OR annotation_expression in Doing_Embedded_Aggregate"); return Exp_Node; end Expression_Location; --------------------------------------------------------- function Doing_Embedded_Aggregate (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_association_rep or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_association_rep; is Exp_Node : STree.SyntaxNode; begin Exp_Node := Expression_Location (Node => Node); --# check STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_aggregate or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_expression; return STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.annotation_aggregate; end Doing_Embedded_Aggregate; --------------------------------------------------------- procedure Check_Record_Completeness (Name_Exp : in out Sem.Exp_Record; Node : in STree.SyntaxNode; Heap_Param : in out Lists.List_Heap; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Heap_Param, --# LexTokenManager.State, --# Name_Exp, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found from *, --# Dictionary.Dict, --# Heap_Param, --# LexTokenManager.State, --# Name_Exp & --# Heap_Param from *, --# LexTokenManager.State, --# Name_Exp & --# Name_Exp from *; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_association_rep or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_association_rep; is It : Dictionary.Iterator; Field_Str : LexTokenManager.Lex_String; Error_Pos : LexTokenManager.Token_Position; Ptr : Lists.List; begin Error_Pos := STree.Node_Position (Node => Expression_Location (Node => Node)); if Dictionary.TypeIsExtendedTagged (Name_Exp.Type_Symbol) then It := Dictionary.FirstExtendedRecordComponent (Name_Exp.Type_Symbol); else It := Dictionary.FirstRecordComponent (Name_Exp.Type_Symbol); end if; while not Dictionary.IsNullIterator (It) loop Field_Str := Dictionary.GetSimpleName (Dictionary.CurrentSymbol (It)); if not Lists.Is_Member (Heap => Heap_Param, The_List => Name_Exp.Param_List, Str => Field_Str) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 104, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Field_Str); end if; It := Dictionary.NextSymbol (It); end loop; Ptr := Name_Exp.Param_List; Dispose_Of_Name_List (List => Ptr, Heap_Param => Heap_Param); Name_Exp.Param_List := Ptr; end Check_Record_Completeness; begin -- Wf_Named_Association_Rep if not Doing_Embedded_Aggregate (Node => Node) then Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if Name_Exp.Sort = Sem.Is_Parameter_Name then Doing_Record := True; Field_Name := Name_Exp; Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); else Doing_Record := False; Field_Name := Sem.Unknown_Type_Record; -- actually ineffective but removes spurious errs end if; if Dictionary.IsUnknownTypeMark (Name_Exp.Type_Symbol) then -- all we have been doing in this case is checking internal -- consistency of expression. We can't actually do anything -- with the result because the aggregate type is unknown. null; else -- we are dealing with an array or record if Doing_Record then if not Dictionary.Is_Null_Symbol (Field_Name.Other_Symbol) then Expected_Type := Dictionary.GetType (Field_Name.Other_Symbol); STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); Sem.Assignment_Check (Position => STree.Node_Position (Node => Expression_Location (Node => Node)), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Exp_Result); Name_Exp.Is_Constant := Name_Exp.Is_Constant and then Exp_Result.Is_Constant; end if; Next_Node := STree.Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = aggregate_or_expression OR aggregate_choice_rep OR NULL OR -- annotation_aggregate_or_expression OR annotation_aggregate_choice_rep if Next_Node = STree.NullNode then -- ASSUME Next_Node = NULL -- this is the last named association so we need to check that -- all fields have been given a value Check_Record_Completeness (Name_Exp => Name_Exp, Node => Node, Heap_Param => Heap_Param, Error_Found => Error_Found); elsif STree.Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.aggregate_or_expression and then STree.Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.aggregate_choice_rep and then STree.Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.annotation_aggregate_or_expression and then STree.Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.annotation_aggregate_choice_rep then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = aggregate_or_expression OR aggregate_choice_rep OR NULL OR " & "annotation_aggregate_or_expression OR annotation_aggregate_choice_rep " & "in Wf_Named_Association_Rep"); end if; else -- must be array Expected_Type := Dictionary.GetArrayComponent (Name_Exp.Type_Symbol); STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); Sem.Assignment_Check (Position => STree.Node_Position (Node => Expression_Location (Node => Node)), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Exp_Result); Name_Exp.Is_Constant := Name_Exp.Is_Constant and then Exp_Result.Is_Constant; end if; end if; Name_Exp.Errors_In_Expression := Error_Found or else Name_Exp.Errors_In_Expression or else Exp_Result.Errors_In_Expression; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); end if; end Wf_Named_Association_Rep; ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-check_types_can_be_used.adbspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-check_types_can_be_use0000644000175000017500000005124711753202336033052 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Package_Declaration.Wf_Package_Specification) procedure Check_Types_Can_Be_Used (Pack_Sym : in Dictionary.Symbol; Err_Node_Pos : in LexTokenManager.Token_Position) is Private_Type_It : Dictionary.Iterator; Private_Type : Dictionary.Symbol; ------------------------------------------------------------------------------------------------------ -- main procedures ------------------------------------------------------------------------------------------------------ procedure Check_One_Private_Type (Pack_Sym, Sym : in Dictionary.Symbol; Err_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Node_Pos, --# LexTokenManager.State, --# Pack_Sym, --# SPARK_IO.File_Sys, --# Sym; is Ok : Boolean; -- Following function detects a special case and is used by both InitializingProc and -- InitializingFuncExists. The idea is that although we are generally looking for -- a subprogram that exports soemthing of the private type without importing it such -- an import is acceptable if it takes the form of a global which is an initialized -- (or mode IN) own variables function Import_Is_Initialized_Or_Mode_In_Own_Var (Pack_Sym, Import_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsOwnVariable (Import_Sym) and then Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetOwner (Import_Sym), Right_Symbol => Pack_Sym) and then (Dictionary.OwnVariableIsInitialized (Import_Sym) or else (Dictionary.GetOwnVariableOrConstituentMode (Import_Sym) = Dictionary.InMode)); end Import_Is_Initialized_Or_Mode_In_Own_Var; ------------------------------------------------------------------------------------------------------ -- search for constructor procedures ------------------------------------------------------------------------------------------------------ -- function used by both Initializing_Procedure_Exists and Initializing_Protected_Procedure_Exists function Is_Suitable_Procedure (Pack_Sym, Subprog_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is function Type_Is_Exported (Subprog_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; Export_It : Dictionary.Iterator; Export_Sym : Dictionary.Symbol; begin Export_It := Dictionary.FirstExport (Dictionary.IsAbstract, Subprog_Sym); while not Dictionary.IsNullIterator (Export_It) loop Export_Sym := Dictionary.CurrentSymbol (Export_It); Result := Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Export_Sym), Right_Symbol => Type_Sym, Full_Range_Subtype => False) and then not Dictionary.IsImport (Dictionary.IsAbstract, -- check for IN OUT case Subprog_Sym, Export_Sym); exit when Result; Export_It := Dictionary.NextSymbol (Export_It); end loop; return Result; end Type_Is_Exported; function Type_Is_Imported (Pack_Sym, Subprog_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; Import_It : Dictionary.Iterator; Import_Sym : Dictionary.Symbol; begin Import_It := Dictionary.FirstImport (Dictionary.IsAbstract, Subprog_Sym); while not Dictionary.IsNullIterator (Import_It) loop Import_Sym := Dictionary.CurrentSymbol (Import_It); Result := Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Import_Sym), Right_Symbol => Type_Sym, Full_Range_Subtype => False) and then not Import_Is_Initialized_Or_Mode_In_Own_Var (Pack_Sym => Pack_Sym, Import_Sym => Import_Sym); exit when Result; Import_It := Dictionary.NextSymbol (Import_It); end loop; return Result; end Type_Is_Imported; begin -- Is_Suitable_Procedure return Dictionary.IsProcedure (Subprog_Sym) and then Type_Is_Exported (Subprog_Sym => Subprog_Sym, Type_Sym => Type_Sym) and then not Type_Is_Imported (Pack_Sym => Pack_Sym, Subprog_Sym => Subprog_Sym, Type_Sym => Type_Sym); end Is_Suitable_Procedure; ------------------------------------------------------------------------------------------------------ -- search for constructor functions ------------------------------------------------------------------------------------------------------ -- function used by Initializing_Function_Exists and Initializing_Protected_Function_Exists function Is_Suitable_Function (Pack_Sym, Subprog_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is function Type_Is_Parameter (Subprog_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; Import_It : Dictionary.Iterator; Import_Sym : Dictionary.Symbol; begin Import_It := Dictionary.FirstSubprogramParameter (Subprog_Sym); while not Dictionary.IsNullIterator (Import_It) loop Import_Sym := Dictionary.CurrentSymbol (Import_It); Result := Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Import_Sym), Right_Symbol => Type_Sym, Full_Range_Subtype => False); exit when Result; Import_It := Dictionary.NextSymbol (Import_It); end loop; return Result; end Type_Is_Parameter; function Type_Is_Global (Pack_Sym, Subprog_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; Import_It : Dictionary.Iterator; Import_Sym : Dictionary.Symbol; begin Import_It := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Subprog_Sym); while not Dictionary.IsNullIterator (Import_It) loop Import_Sym := Dictionary.CurrentSymbol (Import_It); Result := Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Import_Sym), Right_Symbol => Type_Sym, Full_Range_Subtype => False) and then not Import_Is_Initialized_Or_Mode_In_Own_Var (Pack_Sym => Pack_Sym, Import_Sym => Import_Sym); exit when Result; Import_It := Dictionary.NextSymbol (Import_It); end loop; return Result; end Type_Is_Global; begin -- Is_Suitable_Function return Dictionary.IsFunction (Subprog_Sym) and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Subprog_Sym), Right_Symbol => Type_Sym, Full_Range_Subtype => False) and then not Type_Is_Parameter (Subprog_Sym => Subprog_Sym, Type_Sym => Type_Sym) and then not Type_Is_Global (Pack_Sym => Pack_Sym, Subprog_Sym => Subprog_Sym, Type_Sym => Type_Sym); end Is_Suitable_Function; ------------------------------------------------------------------------------------------------------ -- search for constructor constants ------------------------------------------------------------------------------------------------------ function Initializing_Procedure_Exists (Pack_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is function Initializing_Procedure_Exists_Local (Subprog_It : Dictionary.Iterator; Pack_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is It : Dictionary.Iterator; Result : Boolean := False; Subprog_Sym : Dictionary.Symbol; begin It := Subprog_It; while not Dictionary.IsNullIterator (It) loop Subprog_Sym := Dictionary.CurrentSymbol (It); Result := Is_Suitable_Procedure (Pack_Sym => Pack_Sym, Subprog_Sym => Subprog_Sym, Type_Sym => Type_Sym); exit when Result; It := Dictionary.NextSymbol (It); end loop; return Result; end Initializing_Procedure_Exists_Local; begin -- Initializing_Procedure_Exists return Initializing_Procedure_Exists_Local (Subprog_It => Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Pack_Sym), Pack_Sym => Pack_Sym, Type_Sym => Type_Sym) or else Initializing_Procedure_Exists_Local (Subprog_It => Dictionary.First_Private_Subprogram (The_Package => Pack_Sym), Pack_Sym => Pack_Sym, Type_Sym => Type_Sym); end Initializing_Procedure_Exists; function Initializing_Protected_Procedure_Exists (Pack_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; procedure Check_Ops_In_Protected_Types (Pack_Sym, Type_Sym : in Dictionary.Symbol; It : in Dictionary.Iterator; Result : in out Boolean) --# global in Dictionary.Dict; --# derives Result from *, --# Dictionary.Dict, --# It, --# Pack_Sym, --# Type_Sym; is Op_It, Type_It : Dictionary.Iterator; begin Type_It := It; while not Dictionary.IsNullIterator (Type_It) loop Op_It := Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Dictionary.CurrentSymbol (Type_It)); while not Dictionary.IsNullIterator (Op_It) loop Result := Is_Suitable_Procedure (Pack_Sym => Pack_Sym, Subprog_Sym => Dictionary.CurrentSymbol (Op_It), Type_Sym => Type_Sym); exit when Result; Op_It := Dictionary.NextSymbol (Op_It); end loop; exit when Result; Type_It := Dictionary.NextSymbol (Type_It); end loop; end Check_Ops_In_Protected_Types; begin -- Initializing_Protected_Procedure_Exists Check_Ops_In_Protected_Types (Pack_Sym => Pack_Sym, Type_Sym => Type_Sym, It => Dictionary.First_Visible_Protected_Type (The_Package => Pack_Sym), Result => Result); if not Result then Check_Ops_In_Protected_Types (Pack_Sym => Pack_Sym, Type_Sym => Type_Sym, It => Dictionary.First_Private_Protected_Type (The_Package => Pack_Sym), Result => Result); end if; return Result; end Initializing_Protected_Procedure_Exists; function Initializing_Function_Exists (Pack_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is function Initializing_Function_Exists_Local (Subprog_It : Dictionary.Iterator; Pack_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; Subprog_Sym : Dictionary.Symbol; It : Dictionary.Iterator; begin It := Subprog_It; while not Dictionary.IsNullIterator (It) loop Subprog_Sym := Dictionary.CurrentSymbol (It); Result := Is_Suitable_Function (Pack_Sym => Pack_Sym, Subprog_Sym => Subprog_Sym, Type_Sym => Type_Sym); exit when Result; It := Dictionary.NextSymbol (It); end loop; return Result; end Initializing_Function_Exists_Local; begin -- Initializing_Function_Exists return Initializing_Function_Exists_Local (Subprog_It => Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Pack_Sym), Pack_Sym => Pack_Sym, Type_Sym => Type_Sym) or else Initializing_Function_Exists_Local (Subprog_It => Dictionary.First_Private_Subprogram (The_Package => Pack_Sym), Pack_Sym => Pack_Sym, Type_Sym => Type_Sym); end Initializing_Function_Exists; function Initializing_Protected_Function_Exists (Pack_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; procedure Check_Ops_In_Protected_Types (Pack_Sym, Type_Sym : in Dictionary.Symbol; It : in Dictionary.Iterator; Result : in out Boolean) --# global in Dictionary.Dict; --# derives Result from *, --# Dictionary.Dict, --# It, --# Pack_Sym, --# Type_Sym; is Op_It, Type_It : Dictionary.Iterator; begin Type_It := It; while not Dictionary.IsNullIterator (Type_It) loop Op_It := Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Dictionary.CurrentSymbol (Type_It)); while not Dictionary.IsNullIterator (Op_It) loop Result := Is_Suitable_Function (Pack_Sym => Pack_Sym, Subprog_Sym => Dictionary.CurrentSymbol (Op_It), Type_Sym => Type_Sym); exit when Result; Op_It := Dictionary.NextSymbol (Op_It); end loop; exit when Result; Type_It := Dictionary.NextSymbol (Type_It); end loop; end Check_Ops_In_Protected_Types; begin -- Initializing_Protected_Function_Exists Check_Ops_In_Protected_Types (Pack_Sym => Pack_Sym, Type_Sym => Type_Sym, It => Dictionary.First_Visible_Protected_Type (The_Package => Pack_Sym), Result => Result); if not Result then Check_Ops_In_Protected_Types (Pack_Sym => Pack_Sym, Type_Sym => Type_Sym, It => Dictionary.First_Private_Protected_Type (The_Package => Pack_Sym), Result => Result); end if; return Result; end Initializing_Protected_Function_Exists; function Initializing_Constant_Exists (Pack_Sym, Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is DefConIt : Dictionary.Iterator; Result : Boolean := False; begin DefConIt := Dictionary.First_Deferred_Constant (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (DefConIt) loop Result := Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Dictionary.CurrentSymbol (DefConIt)), Right_Symbol => Type_Sym, Full_Range_Subtype => False); exit when Result; DefConIt := Dictionary.NextSymbol (DefConIt); end loop; return Result; end Initializing_Constant_Exists; begin -- Check_One_Private_Type if Dictionary.IsLimitedPrivateType (Sym) then Ok := Initializing_Procedure_Exists (Pack_Sym => Pack_Sym, Type_Sym => Sym) or else Initializing_Protected_Procedure_Exists (Pack_Sym => Pack_Sym, Type_Sym => Sym); else -- private, not limited Ok := Initializing_Procedure_Exists (Pack_Sym => Pack_Sym, Type_Sym => Sym) or else Initializing_Protected_Procedure_Exists (Pack_Sym => Pack_Sym, Type_Sym => Sym) or else Initializing_Function_Exists (Pack_Sym => Pack_Sym, Type_Sym => Sym) or else Initializing_Protected_Function_Exists (Pack_Sym => Pack_Sym, Type_Sym => Sym) or else Initializing_Constant_Exists (Pack_Sym => Pack_Sym, Type_Sym => Sym); end if; if not Ok then case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Warning (Err_Num => 397, Position => Err_Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); when CommandLineData.SPARK95_Onwards => -- SPARK 95 onwards, weaker warning because of child packages ErrorHandler.Semantic_Warning (Err_Num => 394, Position => Err_Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end case; end if; end Check_One_Private_Type; begin -- Check_Types_Can_Be_Used Private_Type_It := Dictionary.First_Private_Type (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (Private_Type_It) -- exit when no more private types loop Private_Type := Dictionary.CurrentSymbol (Private_Type_It); Check_One_Private_Type (Pack_Sym => Pack_Sym, Sym => Private_Type, Err_Node_Pos => Err_Node_Pos); Private_Type_It := Dictionary.NextSymbol (Private_Type_It); end loop; end Check_Types_Can_Be_Used; spark-2012.0.deb/examiner/sem-get_subprogram_anno_key_nodes.adb0000644000175000017500000001661611753202336023610 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Get_Subprogram_Anno_Key_Nodes (Node : in STree.SyntaxNode; Global_Node : out STree.SyntaxNode; Dependency_Node : out STree.SyntaxNode; Declare_Node : out STree.SyntaxNode; Constraint_Node : out STree.SyntaxNode) is begin Constraint_Node := Child_Node (Current_Node => Node); -- ASSUME Constraint_Node = moded_global_definition OR dependency_relation OR declare_annotation OR -- procedure_constraint OR function_constraint if Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.function_constraint or else Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.procedure_constraint then -- ASSUME Constraint_Node = function_constraint OR procedure_constraint Global_Node := STree.NullNode; Dependency_Node := STree.NullNode; Declare_Node := STree.NullNode; -- only a constraint found elsif Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.declare_annotation then -- ASSUME Constraint_Node = declare_annotation Global_Node := STree.NullNode; Dependency_Node := STree.NullNode; Declare_Node := Constraint_Node; Constraint_Node := Last_Sibling_Of (Start_Node => Constraint_Node); elsif Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.dependency_relation then -- ASSUME Constraint_Node = dependency_relation Global_Node := STree.NullNode; Dependency_Node := Constraint_Node; Constraint_Node := Last_Sibling_Of (Start_Node => Constraint_Node); Declare_Node := Next_Sibling (Current_Node => Dependency_Node); -- ASSUME Declare_Node = declare_annotation OR procedure_constraint if Syntax_Node_Type (Node => Declare_Node) = SP_Symbols.procedure_constraint then -- ASSUME Declare_Node = procedure_constraint Declare_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Declare_Node) /= SP_Symbols.declare_annotation then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Declare_Node = declare_annotation OR procedure_constraint in Get_Subprogram_Anno_Key_Nodes"); end if; elsif Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.moded_global_definition then -- ASSUME Constraint_Node = moded_global_definition Global_Node := Constraint_Node; Constraint_Node := Last_Sibling_Of (Start_Node => Constraint_Node); Dependency_Node := Next_Sibling (Current_Node => Global_Node); -- ASSUME Dependency_Node = dependency_relation OR declare_annotation OR -- procedure_constraint OR function_constraint if Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.procedure_constraint or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.function_constraint then -- ASSUME Dependency_Node = procedure_constraint OR function_constraint Dependency_Node := STree.NullNode; Declare_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.declare_annotation then -- ASSUME Dependency_Node = declare_annotation Declare_Node := Dependency_Node; Dependency_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.dependency_relation then -- ASSUME Dependency_Node = dependency_relation Declare_Node := Next_Sibling (Current_Node => Dependency_Node); -- ASSUME Declare_Node = declare_annotation OR procedure_constraint if Syntax_Node_Type (Node => Declare_Node) = SP_Symbols.procedure_constraint then -- ASSUME Declare_Node = procedure_constraint Declare_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Declare_Node) /= SP_Symbols.declare_annotation then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Declare_Node = declare_annotation OR procedure_constraint in Get_Subprogram_Anno_Key_Nodes"); end if; else Declare_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Dependency_Node = dependency_relation OR declare_annotation OR " & "procedure_constraint OR function_constraint in Get_Subprogram_Anno_Key_Nodes"); end if; else Global_Node := STree.NullNode; Dependency_Node := STree.NullNode; Declare_Node := STree.NullNode; Constraint_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = moded_global_definition OR dependency_relation OR " & "declare_annotation OR procedure_constraint OR function_constraint in Get_Subprogram_Anno_Key_Nodes"); end if; -- ASSUME Global_Node = moded_global_definition OR NULL SystemErrors.RT_Assert (C => Global_Node = STree.NullNode or else Syntax_Node_Type (Node => Global_Node) = SP_Symbols.moded_global_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Global_Node = moded_global_definition OR NULL in Get_Subprogram_Anno_Key_Nodes"); -- ASSUME Dependency_Node = dependency_relation OR NULL SystemErrors.RT_Assert (C => Dependency_Node = STree.NullNode or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.dependency_relation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Dependency_Node = dependency_relation OR NULL in Get_Subprogram_Anno_Key_Nodes"); -- ASSUME Declare_Node = declare_annotation OR NULL SystemErrors.RT_Assert (C => Declare_Node = STree.NullNode or else Syntax_Node_Type (Node => Declare_Node) = SP_Symbols.declare_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Declare_Node = declare_annotation OR NULL in Get_Subprogram_Anno_Key_Nodes"); -- ASSUME Constraint_Node = function_constraint OR procedure_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.function_constraint or else Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.procedure_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = function_constraint OR procedure_constraint in Get_Subprogram_Anno_Key_Nodes"); end Get_Subprogram_Anno_Key_Nodes; spark-2012.0.deb/examiner/lextokenstacks.ads0000644000175000017500000000353411753202336020011 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with LexTokenManager; --# inherit LexTokenManager, --# SPARK_IO, --# SystemErrors; package LexTokenStacks is type Stacks is private; function IsEmpty (S : Stacks) return Boolean; procedure Clear (S : out Stacks); --# derives S from ; --# post IsEmpty(S); procedure Push (S : in out Stacks; Item : in LexTokenManager.Lex_String); --# derives S from *, --# Item; procedure Pop (S : in out Stacks; Item : out LexTokenManager.Lex_String); --# derives Item, --# S from S; private MaxDepth : constant Integer := 100; subtype Ptrs is Integer range 0 .. MaxDepth; subtype Slots is Integer range 1 .. MaxDepth; type Vectors is array (Slots) of LexTokenManager.Lex_String; type Stacks is record Vector : Vectors; Ptr : Ptrs; end record; end LexTokenStacks; spark-2012.0.deb/examiner/indexmanager.adb0000644000175000017500000032421311753202336017370 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Options for handling generic units are marked with -- GenOption -- -- With the code commented out, generic declarations are indexed as "specification" -- With the code included, they are indicated by "generic_declaration" -- The former is probably better because if a package body is found, there is no way -- of knowing whether a generic spec or normal spec is required. We know this for a -- generic subprgoram body because any subprogram body that is not a main program must -- be generic. It seems uneccesarily complicated to change the language of index files -- so as to say something about generic suprgorams that we can't say about generic -- packages. Therefore overloading "specification" seems the best option. with Ada.Characters.Latin_1; with CommandLineData; with FileSystem; with IndexManager.Cache; with IndexManager.Index_Table_P; with SystemErrors; package body IndexManager --# own State is IndexManager.Cache.The_Unit_Hash, --# IndexManager.Index_Table_P.Fatal_Error, --# IndexManager.Index_Table_P.Index_Table; is procedure Look_In_File (Lookup_Components : in Boolean; Required_Unit : in LexTokenLists.Lists; Possible_Unit_Types : in ContextManager.UnitTypeSets; Top_Filename : in LexTokenManager.Lex_String; Returned_Filename : out LexTokenManager.Lex_String; Actual_Unit_Type : out ContextManager.UnitTypes; Components : out Component_Lists; Found : out Boolean) --# global in CommandLineData.Content; --# in out Cache.The_Unit_Hash; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out Index_Table_P.Index_Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Actual_Unit_Type, --# Cache.The_Unit_Hash, --# Components, --# ErrorHandler.Error_Context, --# Found, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Returned_Filename, --# SPARK_IO.File_Sys from Cache.The_Unit_Hash, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Lookup_Components, --# Possible_Unit_Types, --# Required_Unit, --# SPARK_IO.File_Sys, --# Top_Filename & --# Index_Table_P.Fatal_Error from *, --# Cache.The_Unit_Hash, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Lookup_Components, --# Possible_Unit_Types, --# Required_Unit, --# SPARK_IO.File_Sys, --# Top_Filename; is L_Returned_Filename : E_Strings.T; L_Actual_Unit_Type : ContextManager.UnitTypes; L_Components : Component_Lists; L_Found : Boolean; Entry_Unit : LexTokenLists.Lists; Entry_Type : Entry_Types; Entry_Filename : E_Strings.T; Entry_Components : Component_Lists; Valid_Entry : Boolean; First_In_File : Boolean; Position : File_Position; Last_Filename : LexTokenManager.Lex_String; Poss_Error_Type : Library_Manager_Errors; In_Aux_Index : Boolean; Done : Boolean; File_Spec_Status : FileSystem.Typ_File_Spec_Status; Index_File : SPARK_IO.File_Type; File_Status : SPARK_IO.File_Status; Aux_Index_Unit : LexTokenLists.Lists; Super_Index_Found : Boolean; Index_Filename : E_Strings.T; Current_Index_Filename : LexTokenManager.Lex_String; Source_Position : File_Position; Last_Char : Character; SPARK_Library_Unit : LexTokenLists.Lists; Interfaces_Library_Unit : LexTokenLists.Lists; Ada_Library_Unit : LexTokenLists.Lists; function Is_End_Of_Buffered_Line (File : SPARK_IO.File_Type; Last_Char : Character) return Boolean --# global in SPARK_IO.File_Sys; is Answer : Boolean; begin Answer := False; if SPARK_IO.End_Of_Line (File) and then Last_Char = Ada.Characters.Latin_1.NUL then Answer := True; end if; return Answer; end Is_End_Of_Buffered_Line; function Is_End_Of_Buffered_File (File : SPARK_IO.File_Type; Last_Char : Character) return Boolean --# global in SPARK_IO.File_Sys; is Answer : Boolean; begin Answer := False; if SPARK_IO.End_Of_File (File) and then Last_Char = Ada.Characters.Latin_1.NUL then Answer := True; end if; return Answer; end Is_End_Of_Buffered_File; procedure Read_Buffered_Char (File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character; Ch : out Character) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out SPARK_IO.File_Sys; --# derives Ch, --# Last_Char from File, --# Last_Char, --# SPARK_IO.File_Sys & --# ErrorHandler.Error_Context, --# Index_Table_P.Fatal_Error from *, --# File, --# Last_Char, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State; is Token_Position : File_Position; begin if Last_Char = Ada.Characters.Latin_1.NUL then -- No buffered character SPARK_IO.Get_Char (File, Ch); if Ch = Ada.Characters.Latin_1.HT or else Ch = Ada.Characters.Latin_1.CR then Ch := ' '; end if; else -- Read the buffered character Ch := Last_Char; Last_Char := Ada.Characters.Latin_1.NUL; end if; -- Check for comments; skip line if found if Ch = '-' then if not Is_End_Of_Buffered_File (File => File, Last_Char => Last_Char) then SPARK_IO.Get_Char (File, Ch); if Ch = '-' then -- it is a comment so skip to end of line SPARK_IO.Skip_Line (File, 1); Ch := ' '; else -- only one '-' so its not a comment Last_Char := Ch; Ch := '-'; end if; else -- A single dash then EOF - nonsense -- error reading; assume no file can end with '-' -- In priciple calls to Line and Col may cause a run-time exception -- if Line or Col > Count'Last defined in the TEXT_IO package -- but in practice this is exception will not be raised because, for -- instance, in gnat Count'Last = Natural'Last Token_Position := File_Position'(Line => SPARK_IO.Line (File), Col => SPARK_IO.Col (File)); Index_Table_P.Output_Error (E => ES_Comment, Source_File => Index_Filename, Token_Position => Token_Position, Token_String => E_Strings.Empty_String); end if; end if; end Read_Buffered_Char; -- Get all the contiguous whitespace (including newlines) -- up to end of file or a non-whitespace char. Unget -- non-whitespace. procedure Skip_White_Space (File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# Index_Table_P.Fatal_Error, --# Last_Char, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is My_Char : Character; begin while Is_End_Of_Buffered_Line (File => File, Last_Char => Last_Char) and then (not Is_End_Of_Buffered_File (File => File, Last_Char => Last_Char)) loop SPARK_IO.Skip_Line (File, 1); end loop; if not Is_End_Of_Buffered_File (File => File, Last_Char => Last_Char) then Read_Buffered_Char (File => File, Index_Filename => Index_Filename, Last_Char => Last_Char, Ch => My_Char); if (My_Char /= ' ') then Last_Char := My_Char; end if; while My_Char = ' ' and then (not Is_End_Of_Buffered_File (File => File, Last_Char => Last_Char)) loop if Is_End_Of_Buffered_Line (File => File, Last_Char => Last_Char) then SPARK_IO.Skip_Line (File, 1); else -- Read the next char, as long as it's whitespace Read_Buffered_Char (File => File, Index_Filename => Index_Filename, Last_Char => Last_Char, Ch => My_Char); if (My_Char /= ' ') then Last_Char := My_Char; end if; end if; end loop; end if; end Skip_White_Space; procedure Get_Token_Position (Index_File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character; Position : out File_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# Index_Table_P.Fatal_Error, --# Last_Char, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Position from CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is begin Skip_White_Space (File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char); Position := File_Position'(Line => SPARK_IO.Line (Index_File), Col => SPARK_IO.Col (Index_File)); end Get_Token_Position; procedure Read_Entry (Index_File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character; Entry_Unit_Export : out LexTokenLists.Lists; Entry_Type : out Entry_Types; Entry_Filename : out E_Strings.T; Entry_Components : out Component_Lists; Valid_Entry : out Boolean) --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Entry_Components, --# Entry_Filename, --# Entry_Type, --# Entry_Unit_Export, --# ErrorHandler.Error_Context, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Valid_Entry from CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Index_Table_P.Fatal_Error from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Entry_Name_Valid, Is_In_Valid, Components_Valid, File_Location_Valid : Boolean; Entry_Type_Loc : Entry_Types; Current_Position : File_Position; -- Get the next single character; unget any whitespace procedure Get_Single_Char (File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; In_Quoted_String : in Boolean; Last_Char : in out Character; Ch : out Character; OK : out Boolean) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out SPARK_IO.File_Sys; --# derives Ch, --# OK from File, --# Last_Char, --# SPARK_IO.File_Sys & --# ErrorHandler.Error_Context, --# Index_Table_P.Fatal_Error from *, --# File, --# Last_Char, --# SPARK_IO.File_Sys & --# Last_Char from *, --# File, --# In_Quoted_String, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State; is begin if Is_End_Of_Buffered_File (File => File, Last_Char => Last_Char) then Ch := ' '; OK := False; elsif Is_End_Of_Buffered_Line (File => File, Last_Char => Last_Char) then Ch := ' '; OK := True; else Read_Buffered_Char (File => File, Index_Filename => Index_Filename, Last_Char => Last_Char, Ch => Ch); -- Allow for quoted strings containing spaces if Ch = ' ' and then not In_Quoted_String then Last_Char := Ch; end if; OK := True; end if; end Get_Single_Char; procedure Read_An_Identifier (Index_File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Identifier : in out E_Strings.T; Last_Char : in out Character; Valid : out Boolean; Follower : out Character) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# Identifier, --# Index_Table_P.Fatal_Error, --# Last_Char, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Follower, --# Valid from CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Ch : Character; Read_OK : Boolean; begin Skip_White_Space (File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char); Get_Single_Char (File => Index_File, Index_Filename => Index_Filename, In_Quoted_String => False, Last_Char => Last_Char, Ch => Ch, OK => Read_OK); if Read_OK and then (Ch in 'a' .. 'z' or else Ch in 'A' .. 'Z') then Identifier := E_Strings.Empty_String; loop E_Strings.Append_Char (E_Str => Identifier, Ch => Ch); Get_Single_Char (File => Index_File, Index_Filename => Index_Filename, In_Quoted_String => False, Last_Char => Last_Char, Ch => Ch, OK => Read_OK); exit when not Read_OK or else (Ch not in 'A' .. 'Z' and then Ch not in 'a' .. 'z' and then Ch not in '0' .. '9' and then Ch /= '_'); end loop; Valid := Is_End_Of_Buffered_File (File => Index_File, Last_Char => Last_Char) or else Read_OK; Follower := Ch; else Valid := False; Follower := ' '; end if; end Read_An_Identifier; procedure Read_An_Entry_Name (Index_File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character; Entry_Unit_Export : out LexTokenLists.Lists; Entry_Type : out Entry_Types; Entry_Name_Valid : out Boolean) --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Entry_Name_Valid, --# Entry_Type, --# Entry_Unit_Export, --# ErrorHandler.Error_Context, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Index_Table_P.Fatal_Error from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Unit_Valid : Boolean; A_String : E_Strings.T; String_Valid : Boolean; Next_Ch : Character; Position : File_Position; procedure Read_A_Unit_Name (Index_File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character; Unit_Name : out LexTokenLists.Lists; Valid : out Boolean) --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name, --# Valid from CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Index_Table_P.Fatal_Error from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Id : E_Strings.T; Valid_Identifier : Boolean; Next_Ch : Character; Token : LexTokenManager.Lex_String; begin Id := E_Strings.Empty_String; Unit_Name := LexTokenLists.Null_List; loop Read_An_Identifier (Index_File => Index_File, Index_Filename => Index_Filename, Identifier => Id, Last_Char => Last_Char, Valid => Valid_Identifier, Follower => Next_Ch); exit when not Valid_Identifier; LexTokenManager.Insert_Examiner_String (Str => Id, Lex_Str => Token); LexTokenLists.Append (List => Unit_Name, Item => Token); exit when Next_Ch /= '.'; end loop; Valid := Valid_Identifier and then Next_Ch = ' '; end Read_A_Unit_Name; begin Entry_Type := Invalid_Entry_Type; A_String := E_Strings.Empty_String; -- Get position of start of entry Get_Token_Position (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Position => Position); Read_A_Unit_Name (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Unit_Name => Entry_Unit_Export, Valid => Unit_Valid); if Unit_Valid then if LexTokenLists.Get_Length (List => Entry_Unit_Export) = 1 and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Entry_Unit_Export, Pos => 1), Lex_Str2 => LexTokenManager.Super_Index_Token) = LexTokenManager.Str_Eq then Entry_Type := Super_Index; Entry_Name_Valid := True; else -- Get the position of the start of the unit type Get_Token_Position (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Position => Position); Read_An_Identifier (Index_File => Index_File, Index_Filename => Index_Filename, Identifier => A_String, Last_Char => Last_Char, Valid => String_Valid, Follower => Next_Ch); if String_Valid and then Next_Ch = ' ' then if E_Strings.Eq1_String (E_Str => A_String, Str => "auxindex") then Entry_Type := Aux_Index; Entry_Name_Valid := True; elsif E_Strings.Eq1_String (E_Str => A_String, Str => "main_program") then Entry_Type := Main_P; Entry_Name_Valid := True; elsif E_Strings.Eq1_String (E_Str => A_String, Str => "specification") or else E_Strings.Eq1_String (E_Str => A_String, Str => "spec") then Entry_Type := P_Spec; Entry_Name_Valid := True; elsif E_Strings.Eq1_String (E_Str => A_String, Str => "body") then Entry_Type := P_Bodi; Entry_Name_Valid := True; elsif E_Strings.Eq1_String (E_Str => A_String, Str => "subunit") then Entry_Type := Subunit; Entry_Name_Valid := True; elsif E_Strings.Eq1_String (E_Str => A_String, Str => "components") then Entry_Type := Component_List; Entry_Name_Valid := True; else Entry_Name_Valid := False; Index_Table_P.Output_Error (E => ES_UnitEntry, Source_File => Index_Filename, Token_Position => Position, Token_String => A_String); end if; else Entry_Name_Valid := False; Index_Table_P.Output_Error (E => ES_UnitEntry, Source_File => Index_Filename, Token_Position => Position, Token_String => E_Strings.Empty_String); end if; end if; else Entry_Name_Valid := False; Index_Table_P.Output_Error (E => EW_IllegalUnitName, Source_File => Index_Filename, Token_Position => Position, Token_String => E_Strings.Empty_String); end if; end Read_An_Entry_Name; procedure Skip_Is_In (Index_File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character; Is_In_Valid : out Boolean) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# Index_Table_P.Fatal_Error, --# Last_Char, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Is_In_Valid from CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Id : E_Strings.T; Id_Valid : Boolean; Is_Valid : Boolean; In_Valid : Boolean; Next_Ch : Character; begin Id := E_Strings.Empty_String; Read_An_Identifier (Index_File => Index_File, Index_Filename => Index_Filename, Identifier => Id, Last_Char => Last_Char, Valid => Id_Valid, Follower => Next_Ch); if Id_Valid and then Next_Ch = ' ' then Is_Valid := E_Strings.Eq1_String (E_Str => Id, Str => "is"); else Is_Valid := False; end if; if Is_Valid then Read_An_Identifier (Index_File => Index_File, Index_Filename => Index_Filename, Identifier => Id, Last_Char => Last_Char, Valid => Id_Valid, Follower => Next_Ch); if Id_Valid and then Next_Ch = ' ' then In_Valid := E_Strings.Eq1_String (E_Str => Id, Str => "in"); else In_Valid := False; end if; if In_Valid then Is_In_Valid := True; else Is_In_Valid := False; end if; else Is_In_Valid := False; end if; end Skip_Is_In; procedure Read_A_File_Location (Index_File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character; Entry_Filename : out E_Strings.T; File_Location_Valid : out Boolean) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out SPARK_IO.File_Sys; --# derives Entry_Filename, --# ErrorHandler.Error_Context, --# File_Location_Valid, --# Last_Char, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Index_Table_P.Fatal_Error from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Ch : Character; Read_OK : Boolean; E_Str : E_Strings.T; In_Quoted_String : Boolean; begin In_Quoted_String := False; E_Str := E_Strings.Empty_String; Skip_White_Space (File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char); Get_Single_Char (File => Index_File, Index_Filename => Index_Filename, In_Quoted_String => False, Last_Char => Last_Char, Ch => Ch, OK => Read_OK); loop -- Allow for quoted strings containing spaces exit when not Read_OK or else (Ch = ' ' and then not In_Quoted_String); if Ch = Ada.Characters.Latin_1.Quotation then In_Quoted_String := not In_Quoted_String; else E_Strings.Append_Char (E_Str => E_Str, Ch => Ch); end if; Get_Single_Char (File => Index_File, Index_Filename => Index_Filename, In_Quoted_String => In_Quoted_String, Last_Char => Last_Char, Ch => Ch, OK => Read_OK); end loop; Skip_White_Space (File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char); Entry_Filename := E_Str; File_Location_Valid := E_Strings.Get_Length (E_Str => E_Str) > 0; end Read_A_File_Location; procedure Read_Components (Index_File : in SPARK_IO.File_Type; Index_Filename : in LexTokenManager.Lex_String; Last_Char : in out Character; The_Components : out Component_Lists; Entry_Valid : out Boolean) --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Entry_Valid, --# ErrorHandler.Error_Context, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Components from CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Index_Table_P.Fatal_Error from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_File, --# Index_Filename, --# Last_Char, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Id : E_Strings.T; Token : LexTokenManager.Lex_String; Index : Component_Index; Component : LexTokenLists.Lists; Valid : Boolean; Next_Ch : Character; Current_Position : File_Position; begin The_Components := Component_Lists'(others => LexTokenLists.Null_List); Index := Component_Index'First; Id := E_Strings.Empty_String; -- Locate the position of the "are" Get_Token_Position (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Position => Current_Position); Read_An_Identifier (Index_File => Index_File, Index_Filename => Index_Filename, Identifier => Id, Last_Char => Last_Char, Valid => Valid, Follower => Next_Ch); Valid := Valid and then Next_Ch = ' ' and then E_Strings.Eq1_String (E_Str => Id, Str => "are"); if Valid then loop -- over components Id := E_Strings.Empty_String; Component := LexTokenLists.Null_List; loop -- over component identifiers -- Locate the position of the next identifier Get_Token_Position (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Position => Current_Position); Read_An_Identifier (Index_File => Index_File, Index_Filename => Index_Filename, Identifier => Id, Last_Char => Last_Char, Valid => Valid, Follower => Next_Ch); exit when not Valid; LexTokenManager.Insert_Examiner_String (Str => Id, Lex_Str => Token); LexTokenLists.Append (List => Component, Item => Token); exit when Next_Ch /= '.'; end loop; exit when not Valid; The_Components (Index) := Component; if Index < Component_Index'Last then Index := Component_Index'Succ (Index); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Component_List_Full, Msg => ""); end if; exit when Next_Ch /= ','; end loop; Valid := Valid and then Next_Ch = ' '; if not Valid then Index_Table_P.Output_Error (E => ES_Components, Source_File => Index_Filename, Token_Position => Current_Position, Token_String => E_Strings.Empty_String); end if; else Index_Table_P.Output_Error (E => ES_Are, Source_File => Index_Filename, Token_Position => Current_Position, Token_String => E_Strings.Empty_String); end if; The_Components (Index) := LexTokenLists.Null_List; -- list terminator Entry_Valid := Valid; end Read_Components; begin -- Read_Entry Entry_Components := Component_Lists'(others => LexTokenLists.Null_List); Read_An_Entry_Name (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Entry_Unit_Export => Entry_Unit_Export, Entry_Type => Entry_Type_Loc, Entry_Name_Valid => Entry_Name_Valid); Entry_Type := Entry_Type_Loc; if Entry_Name_Valid then if Entry_Type_Loc = Component_List then Entry_Filename := E_Strings.Empty_String; Read_Components (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, The_Components => Entry_Components, Entry_Valid => Components_Valid); if Components_Valid then Valid_Entry := True; else Valid_Entry := False; -- Errors are reported by Read_Components SPARK_IO.Skip_Line (Index_File, 1); end if; else -- Locate the start of "is in" Get_Token_Position (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Position => Current_Position); Skip_Is_In (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Is_In_Valid => Is_In_Valid); if Is_In_Valid then -- Locate the start of file location Get_Token_Position (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Position => Current_Position); Read_A_File_Location (Index_File => Index_File, Index_Filename => Index_Filename, Last_Char => Last_Char, Entry_Filename => Entry_Filename, File_Location_Valid => File_Location_Valid); if File_Location_Valid then Valid_Entry := True; else Valid_Entry := False; Entry_Filename := E_Strings.Empty_String; Index_Table_P.Output_Error (E => ES_FileLocation, Source_File => Index_Filename, Token_Position => Current_Position, Token_String => E_Strings.Empty_String); end if; else Valid_Entry := False; Entry_Filename := E_Strings.Empty_String; Index_Table_P.Output_Error (E => ES_IsIn, Source_File => Index_Filename, Token_Position => Current_Position, Token_String => E_Strings.Empty_String); SPARK_IO.Skip_Line (Index_File, 1); end if; end if; else Valid_Entry := False; Entry_Filename := E_Strings.Empty_String; -- Error reported in call to ReadAnEntry SPARK_IO.Skip_Line (Index_File, 1); end if; end Read_Entry; procedure Handle_Main_P (Entry_Unit : in LexTokenLists.Lists; Required_Unit : in LexTokenLists.Lists; Entry_Filename : in E_Strings.T; Possible_Unit_Types : in ContextManager.UnitTypeSets; Found : in out Boolean; Returned_Filename : out E_Strings.T; Actual_Unit_Type : out ContextManager.UnitTypes) --# global in LexTokenManager.State; --# derives Actual_Unit_Type from Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit & --# Found from *, --# Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit & --# Returned_Filename from Entry_Filename, --# Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit; is begin if Possible_Unit_Types (ContextManager.MainProgram) and then LexTokenLists.Eq_Unit (First_Item => Entry_Unit, Second => Required_Unit) then Found := True; Returned_Filename := Entry_Filename; Actual_Unit_Type := ContextManager.MainProgram; else Returned_Filename := E_Strings.Empty_String; Actual_Unit_Type := ContextManager.InvalidUnit; end if; end Handle_Main_P; procedure Handle_P_Spec (Entry_Unit : in LexTokenLists.Lists; Required_Unit : in LexTokenLists.Lists; Entry_Filename : in E_Strings.T; Possible_Unit_Types : in ContextManager.UnitTypeSets; Found : in out Boolean; Returned_Filename : out E_Strings.T; Actual_Unit_Type : out ContextManager.UnitTypes) --# global in LexTokenManager.State; --# derives Actual_Unit_Type from Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit & --# Found from *, --# Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit & --# Returned_Filename from Entry_Filename, --# Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit; is begin if Possible_Unit_Types (ContextManager.PackageSpecification) and then LexTokenLists.Eq_Unit (First_Item => Entry_Unit, Second => Required_Unit) then Found := True; Returned_Filename := Entry_Filename; Actual_Unit_Type := ContextManager.PackageSpecification; elsif Possible_Unit_Types (ContextManager.GenericSubprogramDeclaration) and then LexTokenLists.Eq_Unit (First_Item => Entry_Unit, Second => Required_Unit) then Found := True; Returned_Filename := Entry_Filename; Actual_Unit_Type := ContextManager.GenericSubprogramDeclaration; else Returned_Filename := E_Strings.Empty_String; Actual_Unit_Type := ContextManager.InvalidUnit; end if; end Handle_P_Spec; procedure Handle_P_Bodi (Entry_Unit : in LexTokenLists.Lists; Required_Unit : in LexTokenLists.Lists; Entry_Filename : in E_Strings.T; Possible_Unit_Types : in ContextManager.UnitTypeSets; Found : in out Boolean; Returned_Filename : out E_Strings.T; Actual_Unit_Type : out ContextManager.UnitTypes) --# global in LexTokenManager.State; --# derives Actual_Unit_Type from Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit & --# Found from *, --# Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit & --# Returned_Filename from Entry_Filename, --# Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit; is begin if Possible_Unit_Types (ContextManager.PackageBody) and then LexTokenLists.Eq_Unit (First_Item => Entry_Unit, Second => Required_Unit) then Found := True; Returned_Filename := Entry_Filename; Actual_Unit_Type := ContextManager.PackageBody; else Returned_Filename := E_Strings.Empty_String; Actual_Unit_Type := ContextManager.InvalidUnit; end if; end Handle_P_Bodi; procedure Handle_Subunit (Entry_Unit : in LexTokenLists.Lists; Required_Unit : in LexTokenLists.Lists; Entry_Filename : in E_Strings.T; Possible_Unit_Types : in ContextManager.UnitTypeSets; Found : in out Boolean; Returned_Filename : out E_Strings.T; Actual_Unit_Type : out ContextManager.UnitTypes) --# global in LexTokenManager.State; --# derives Actual_Unit_Type from Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit & --# Found from *, --# Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit & --# Returned_Filename from Entry_Filename, --# Entry_Unit, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit; is begin if Possible_Unit_Types (ContextManager.SubUnit) and then LexTokenLists.Eq_Unit (First_Item => Entry_Unit, Second => Required_Unit) then Found := True; Returned_Filename := Entry_Filename; Actual_Unit_Type := ContextManager.SubUnit; else Returned_Filename := E_Strings.Empty_String; Actual_Unit_Type := ContextManager.InvalidUnit; end if; end Handle_Subunit; procedure Handle_Components (Entry_Unit : in LexTokenLists.Lists; Required_Unit : in LexTokenLists.Lists; Entry_Components : in Component_Lists; Components : in out Component_Lists; Found : in out Boolean) --# global in LexTokenManager.State; --# derives Components from *, --# Entry_Components, --# Entry_Unit, --# LexTokenManager.State, --# Required_Unit & --# Found from *, --# Entry_Unit, --# LexTokenManager.State, --# Required_Unit; is begin if LexTokenLists.Eq_Unit (First_Item => Entry_Unit, Second => Required_Unit) then Found := True; Components := Entry_Components; end if; end Handle_Components; function Get_SPARK_Lib_Index_Filename (Index_Base_Name : in String) return E_Strings.T --# global in SPARK_IO.File_Sys; is Index_Filename : E_Strings.T; begin Index_Filename := FileSystem.Examiner_SPARK_Lib_Directory; E_Strings.Append_Examiner_String (E_Str1 => Index_Filename, E_Str2 => FileSystem.Directory_Separator); E_Strings.Append_String (E_Str => Index_Filename, Str => Index_Base_Name); return Index_Filename; end Get_SPARK_Lib_Index_Filename; begin -- Look_In_File Returned_Filename := LexTokenManager.Null_String; Actual_Unit_Type := ContextManager.InvalidUnit; Components := Component_Lists'(others => LexTokenLists.Null_List); Found := False; if CommandLineData.Content.Index then -- We never assume that the list of index file has been -- initialized. --# accept F, 10, File_Spec_Status, "Expect File_Spec_Status Unused"; FileSystem.Find_Full_File_Name (File_Spec => CommandLineData.Content.Index_File_Name, File_Status => File_Spec_Status, Full_File_Name => Index_Filename); --# end accept; FileSystem.Check_Extension (Fn => Index_Filename, Ext => E_Strings.Copy_String (Str => CommandLineData.Default_Index_Extension)); Index_Table_P.Add_Index_File (Filename => FileSystem.Interpret_Relative (File_Name => Index_Filename, Relative_To_Directory => Index_Filename)); if CommandLineData.Content.SPARK_Lib then -- make the SPARK library visible LexTokenManager.Insert_Examiner_String (Str => Index_Filename, Lex_Str => Current_Index_Filename); -- Add "SPARK auxindex is in /spark.idx" to the user's index file SPARK_Library_Unit := LexTokenLists.Null_List; LexTokenLists.Append (List => SPARK_Library_Unit, Item => LexTokenManager.SPARK_Token); Index_Table_P.Add_Aux_Index_File (Filename => Get_SPARK_Lib_Index_Filename ("spark.idx"), Unit => SPARK_Library_Unit, Position => File_Position'(Line => 1, Col => 1), Source_File => Current_Index_Filename); -- Add "Interfaces auxindex is in /interfaces.idx" to the user's index file Interfaces_Library_Unit := LexTokenLists.Null_List; LexTokenLists.Append (List => Interfaces_Library_Unit, Item => LexTokenManager.Interfaces_Token); Index_Table_P.Add_Aux_Index_File (Filename => Get_SPARK_Lib_Index_Filename ("interfaces.idx"), Unit => Interfaces_Library_Unit, Position => File_Position'(Line => 1, Col => 1), Source_File => Current_Index_Filename); -- Add "Ada auxindex is in /ada.idx" to the user's index file Ada_Library_Unit := LexTokenLists.Null_List; LexTokenLists.Append (List => Ada_Library_Unit, Item => LexTokenManager.Ada_Token); Index_Table_P.Add_Aux_Index_File (Filename => Get_SPARK_Lib_Index_Filename ("ada.idx"), Unit => Ada_Library_Unit, Position => File_Position'(Line => 1, Col => 1), Source_File => Current_Index_Filename); end if; elsif CommandLineData.Content.SPARK_Lib then -- No index file provided by the user but make SPARK library visible -- In this case, we set the (sole) index file as sparklib.idx which is -- assumed to contain references or auxiliary indexes for the entire -- SPARK Library. Index_Table_P.Add_Index_File (Filename => Get_SPARK_Lib_Index_Filename ("sparklib.idx")); end if; if CommandLineData.Content.Index or else CommandLineData.Content.SPARK_Lib then Index_Table_P.Get_Next_Index_File (Unit => Required_Unit, Top_Filename => Top_Filename, Filename => Current_Index_Filename, File_Type => Entry_Type, Aux_Index_Unit => Aux_Index_Unit, Position => Source_Position); if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX MANAGER GET INDEX FILE = ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename), New_Line => True); end if; In_Aux_Index := Entry_Type = Aux_Index; case Entry_Type is when Super_Index => Poss_Error_Type := EW_Super; when others => Poss_Error_Type := EW_Index; end case; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Current_Index_Filename, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then Last_Filename := LexTokenManager.Null_String; Done := False; loop Index_File := SPARK_IO.Null_File; E_Strings.Open (File => Index_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename), Form_Of_File => "", Status => File_Status); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX MANAGER OPEN INDEX FILE = ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename), New_Line => True); end if; --# end accept; if File_Status = SPARK_IO.Ok then L_Returned_Filename := E_Strings.Empty_String; L_Actual_Unit_Type := ContextManager.InvalidUnit; L_Components := Component_Lists'(others => LexTokenLists.Null_List); L_Found := False; Super_Index_Found := False; First_In_File := True; Last_Char := Ada.Characters.Latin_1.NUL; loop exit when SPARK_IO.End_Of_File (Index_File); -- Get Start of Entry Get_Token_Position (Index_File => Index_File, Index_Filename => Current_Index_Filename, Last_Char => Last_Char, Position => Position); Read_Entry (Index_File => Index_File, Index_Filename => Current_Index_Filename, Last_Char => Last_Char, Entry_Unit_Export => Entry_Unit, Entry_Type => Entry_Type, Entry_Filename => Entry_Filename, Entry_Components => Entry_Components, Valid_Entry => Valid_Entry); if Valid_Entry then if In_Aux_Index and then not LexTokenLists.Prefix_Unit (Poss_Prefix => Aux_Index_Unit, Prefixed => Entry_Unit) then Index_Table_P.Output_Error (E => EW_Aux, Source_File => Current_Index_Filename, Token_Position => Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Aux_Index_Unit)); end if; case Entry_Type is when Super_Index => if In_Aux_Index or Super_Index_Found or not First_In_File then Index_Table_P.Output_Error (E => EW_UnexpectedSuper, Source_File => Current_Index_Filename, Token_Position => Position, Token_String => E_Strings.Empty_String); else Super_Index_Found := True; FileSystem.Check_Extension (Fn => Entry_Filename, Ext => E_Strings.Copy_String (Str => CommandLineData.Default_Index_Extension)); Index_Table_P.Add_Super_Index_File (Filename => FileSystem.Interpret_Relative (File_Name => Entry_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), Position => Position, Source_File => Current_Index_Filename); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX MANAGER ADD SUPER INDEX FILE = ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => FileSystem.Interpret_Relative (File_Name => Entry_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), New_Line => True); end if; --# end accept; end if; when Aux_Index => FileSystem.Check_Extension (Fn => Entry_Filename, Ext => E_Strings.Copy_String (Str => CommandLineData.Default_Index_Extension)); Index_Table_P.Add_Aux_Index_File (Filename => FileSystem.Interpret_Relative (File_Name => Entry_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), Unit => Entry_Unit, Position => Position, Source_File => Current_Index_Filename); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX MANAGER ADD AUX INDEX FILE = ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => FileSystem.Interpret_Relative (File_Name => Entry_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), New_Line => True); end if; --# end accept; when Main_P => --# accept F, 41, "Stable expression expected here"; if not Lookup_Components then Handle_Main_P (Entry_Unit => Entry_Unit, Required_Unit => Required_Unit, Entry_Filename => Entry_Filename, Possible_Unit_Types => Possible_Unit_Types, Found => L_Found, Returned_Filename => L_Returned_Filename, Actual_Unit_Type => L_Actual_Unit_Type); end if; --# end accept; FileSystem.Check_Extension (Fn => Entry_Filename, Ext => CommandLineData.Content.Source_Extension); Cache.Add_Unit (Unit => Entry_Unit, Unit_Types => ContextManager.MainProgram, Source_Filename => FileSystem.Interpret_Relative (File_Name => Entry_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), Index_Filename => Current_Index_Filename, Index_Position => Position); when P_Spec => --# accept F, 41, "Stable expression expected here"; if not Lookup_Components then Handle_P_Spec (Entry_Unit => Entry_Unit, Required_Unit => Required_Unit, Entry_Filename => Entry_Filename, Possible_Unit_Types => Possible_Unit_Types, Found => L_Found, Returned_Filename => L_Returned_Filename, Actual_Unit_Type => L_Actual_Unit_Type); end if; --# end accept; FileSystem.Check_Extension (Fn => Entry_Filename, Ext => CommandLineData.Content.Source_Extension); Cache.Add_Unit (Unit => Entry_Unit, Unit_Types => ContextManager.PackageSpecification, Source_Filename => FileSystem.Interpret_Relative (File_Name => Entry_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), Index_Filename => Current_Index_Filename, Index_Position => Position); -- GenOption -- when GenericDec => HandleGenericDec; when P_Bodi => --# accept F, 41, "Stable expression expected here"; if not Lookup_Components then Handle_P_Bodi (Entry_Unit => Entry_Unit, Required_Unit => Required_Unit, Entry_Filename => Entry_Filename, Possible_Unit_Types => Possible_Unit_Types, Found => L_Found, Returned_Filename => L_Returned_Filename, Actual_Unit_Type => L_Actual_Unit_Type); end if; --# end accept; FileSystem.Check_Extension (Fn => Entry_Filename, Ext => CommandLineData.Content.Source_Extension); Cache.Add_Unit (Unit => Entry_Unit, Unit_Types => ContextManager.PackageBody, Source_Filename => FileSystem.Interpret_Relative (File_Name => Entry_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), Index_Filename => Current_Index_Filename, Index_Position => Position); when Subunit => --# accept F, 41, "Stable expression expected here"; if not Lookup_Components then Handle_Subunit (Entry_Unit => Entry_Unit, Required_Unit => Required_Unit, Entry_Filename => Entry_Filename, Possible_Unit_Types => Possible_Unit_Types, Found => L_Found, Returned_Filename => L_Returned_Filename, Actual_Unit_Type => L_Actual_Unit_Type); end if; --# end accept; FileSystem.Check_Extension (Fn => Entry_Filename, Ext => CommandLineData.Content.Source_Extension); Cache.Add_Unit (Unit => Entry_Unit, Unit_Types => ContextManager.SubUnit, Source_Filename => FileSystem.Interpret_Relative (File_Name => Entry_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), Index_Filename => Current_Index_Filename, Index_Position => Position); when Component_List => --# accept F, 41, "Stable expression expected here"; if Lookup_Components then Handle_Components (Entry_Unit => Entry_Unit, Required_Unit => Required_Unit, Entry_Components => Entry_Components, Components => L_Components, Found => L_Found); end if; --# end accept; Cache.Add_Components (Unit => Entry_Unit, Components => Entry_Components, Index_Filename => Current_Index_Filename, Index_Position => Position); when Invalid_Entry_Type => null; end case; end if; if L_Found then Actual_Unit_Type := L_Actual_Unit_Type; Components := L_Components; Found := True; --# accept F, 41, "Stable expression expected here"; if not Lookup_Components then FileSystem.Check_Extension (Fn => L_Returned_Filename, Ext => CommandLineData.Content.Source_Extension); LexTokenManager.Insert_Examiner_String (Str => FileSystem.Interpret_Relative (File_Name => L_Returned_Filename, Relative_To_Directory => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)), Lex_Str => Returned_Filename); end if; --# end accept; --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.File_Names then -- Debug if Lookup_Components then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER FOUND ", Stop => 0); for I in Component_Index loop if Components (I) /= LexTokenLists.Null_List then LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Components (I)); end if; end loop; else SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER FOUND IN ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Returned_Filename), New_Line => False); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " FOR TYPE ", Stop => 0); Cache.Context_Manager_Unit_Types_Image (Unit_Type => Actual_Unit_Type); end if; SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; --# end accept; end if; L_Found := False; First_In_File := False; end loop; Last_Filename := Current_Index_Filename; Index_Table_P.Index_File_Done (Filename => Current_Index_Filename); --# accept F, 10, Index_File, "Expect ineffective assignment" & --# F, 10, File_Status, "Expect ineffective assignment"; SPARK_IO.Close (Index_File, File_Status); --# end accept; --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX MANAGER CLOSE INDEX FILE = ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename), New_Line => True); end if; --# end accept; Index_Table_P.Stop_SPARK; if Found then Done := True; else Index_Table_P.Get_Next_Index_File (Unit => Required_Unit, Top_Filename => Top_Filename, Filename => Current_Index_Filename, File_Type => Entry_Type, Aux_Index_Unit => Aux_Index_Unit, Position => Source_Position); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX MANAGER GET INDEX FILE = ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename), New_Line => True); end if; --# end accept; In_Aux_Index := Entry_Type = Aux_Index; case Entry_Type is when Super_Index => Poss_Error_Type := EW_Super; when others => Poss_Error_Type := EW_Index; end case; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Current_Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Found := False; Done := True; end if; end if; else Found := False; Done := True; Index_Table_P.Index_File_Done (Filename => Current_Index_Filename); Index_Table_P.Output_Error (E => Poss_Error_Type, Source_File => Last_Filename, Token_Position => Source_Position, Token_String => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Index_Filename)); end if; exit when Done; end loop; end if; end if; --# accept F, 33, File_Spec_Status, "Expect File_Spec_Status unused"; end Look_In_File; procedure Look_Up (Required_Unit : in LexTokenLists.Lists; Possible_Unit_Types : in ContextManager.UnitTypeSets; Source_Filename : out LexTokenManager.Lex_String; Actual_Unit_Type : out ContextManager.UnitTypes; Found : out Boolean) --# global in CommandLineData.Content; --# in out Cache.The_Unit_Hash; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out Index_Table_P.Index_Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Actual_Unit_Type, --# Cache.The_Unit_Hash, --# ErrorHandler.Error_Context, --# Found, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Source_Filename, --# SPARK_IO.File_Sys from Cache.The_Unit_Hash, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit, --# SPARK_IO.File_Sys & --# Index_Table_P.Fatal_Error from *, --# Cache.The_Unit_Hash, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit, --# SPARK_IO.File_Sys; is Components : Component_Lists; Source_Filename_From_Cache : LexTokenManager.Lex_String; Source_Filename_From_File : LexTokenManager.Lex_String; Actual_Unit_Type_From_Cache : ContextManager.UnitTypes; Actual_Unit_Type_From_File : ContextManager.UnitTypes; Found_From_Cache : Boolean; Found_From_File : Boolean; Need_To_Look_In_File : Boolean; Index_Filename_From_Cache : LexTokenManager.Lex_String; Index_Filename : LexTokenManager.Lex_String; File_Type : Entry_Types; Dummy_Aux_Index_Unit : LexTokenLists.Lists; Dummy_Source_Position : File_Position; procedure Trace (S : in LexTokenManager.Lex_String; M : in String) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# M, --# S; is begin if CommandLineData.Content.Debug.File_Names then SPARK_IO.Put_String (SPARK_IO.Standard_Output, M, 0); if CommandLineData.Content.Plain_Output then --# accept F, 22, "Stable expression here OK"; if FileSystem.Use_Windows_Command_Line then --# end accept; E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Strings.Lower_Case (E_Str => FileSystem.Just_File (Fn => LexTokenManager.Lex_String_To_String (Lex_Str => S), Ext => True))); else E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => FileSystem.Just_File (Fn => LexTokenManager.Lex_String_To_String (Lex_Str => S), Ext => True)); end if; else E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => S)); end if; end if; end Trace; begin -- Look_Up if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX MANAGER LOOKING FOR ", Stop => 0); LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Required_Unit); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " IN", Stop => 0); end if; -- Try to find the required unit in the cache. Source_Filename_From_Cache := LexTokenManager.Null_String; Actual_Unit_Type_From_Cache := ContextManager.InvalidUnit; Index_Filename_From_Cache := LexTokenManager.Null_String; Found_From_Cache := False; Found_From_File := False; for I in ContextManager.UnitTypes loop if Possible_Unit_Types (I) then Actual_Unit_Type_From_Cache := I; Cache.Get_Unit (Required_Unit => Required_Unit, Unit_Types => Actual_Unit_Type_From_Cache, Source_Filename => Source_Filename_From_Cache, Index_Filename => Index_Filename_From_Cache, Found => Found_From_Cache); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ' '); Cache.Context_Manager_Unit_Types_Image (Unit_Type => Actual_Unit_Type_From_Cache); end if; --# end accept; end if; exit when Found_From_Cache; end loop; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; if Found_From_Cache then -- The required unit has been found in the cache => check if -- a more local definition of the required unit may exist in -- a not yet parsed auxiliary index file. -- -- The Filename and Aux_Index_Unit values returned from the -- call to Index_Table_P.Get_Next_Index_File, rather the -- Index_Filename_From_Cache is retained as the most global -- index file for Look_In_File. --# accept F, 10, Dummy_Aux_Index_Unit, "Ineffective assignment here OK" & --# F, 10, Dummy_Source_Position, "Ineffective assignment here OK"; Index_Table_P.Get_Next_Index_File (Unit => Required_Unit, Top_Filename => Index_Filename_From_Cache, Filename => Index_Filename, File_Type => File_Type, Aux_Index_Unit => Dummy_Aux_Index_Unit, Position => Dummy_Source_Position); --# end accept; Need_To_Look_In_File := File_Type = Aux_Index; if CommandLineData.Content.Debug.File_Names then -- Debug -- Index_Table_P.Get_Next_Index may return an yet -- unparsed auxiliary index or super index. SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX FILENAME NOT YET PARSED : ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Index_Filename), New_Line => True); end if; else -- The required unit is not in the cache => need to lookup -- in the index files. Index_Filename_From_Cache := LexTokenManager.Null_String; Need_To_Look_In_File := True; end if; if Need_To_Look_In_File then -- The required unit has not been found in the cache => need -- to lookup in the index files -- -- or -- -- a more local definition of the required unit may exist in -- a not yet parsed index file => need to lookup in the more -- local index files only. --# accept F, 10, Components, "Ineffective assignment here OK"; Look_In_File (Lookup_Components => False, Required_Unit => Required_Unit, Possible_Unit_Types => Possible_Unit_Types, Top_Filename => Index_Filename_From_Cache, Returned_Filename => Source_Filename_From_File, Actual_Unit_Type => Actual_Unit_Type_From_File, Components => Components, Found => Found_From_File); --# end accept; if Found_From_File then -- The required unit has been found in the index files or -- a more local definition of the required unit has been -- found in a previously unparsed auxiliary index file => -- the answer is the value coming from the index file. Source_Filename := Source_Filename_From_File; Actual_Unit_Type := Actual_Unit_Type_From_File; elsif Found_From_Cache then -- A more local definition of the required unit has not -- been found in the index files => the answer is the -- value stored in the cache. Source_Filename := Source_Filename_From_Cache; Actual_Unit_Type := Actual_Unit_Type_From_Cache; else -- The required unit has not been found either in the -- cache or in the index files => the required unit can -- not be located. Source_Filename := LexTokenManager.Null_String; Actual_Unit_Type := ContextManager.InvalidUnit; end if; else -- The required unit has been found in the cache and no more -- local definition of the required unit can exist => the -- answer is the value stored in the cache. Source_Filename := Source_Filename_From_Cache; Actual_Unit_Type := Actual_Unit_Type_From_Cache; end if; Found := Found_From_Cache or Found_From_File; if Found then Trace (S => Source_Filename, M => "IndexManager.Lookup Source_Filename is:"); end if; --# accept F, 33, Components, "Expect Components unused" & --# F, 33, Dummy_Aux_Index_Unit, "Expect Dummy_Aux_Index_Unit unused" & --# F, 33, Dummy_Source_Position, "Expect Dummy_Source_Position unused"; end Look_Up; procedure Look_Up_Components (Required_Unit : in LexTokenLists.Lists; Components : out Component_Lists) --# global in CommandLineData.Content; --# in out Cache.The_Unit_Hash; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out Index_Table_P.Index_Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Cache.The_Unit_Hash, --# Components, --# ErrorHandler.Error_Context, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# SPARK_IO.File_Sys from Cache.The_Unit_Hash, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Required_Unit, --# SPARK_IO.File_Sys & --# Index_Table_P.Fatal_Error from *, --# Cache.The_Unit_Hash, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Required_Unit, --# SPARK_IO.File_Sys; is Possible_Unit_Types : constant ContextManager.UnitTypeSets := ContextManager.UnitTypeSets'(ContextManager.InvalidUnit => True, others => False); Source_Filename : LexTokenManager.Lex_String; Actual_Unit_Type : ContextManager.UnitTypes; Components_From_Cache : Component_Lists; Components_From_File : Component_Lists; Found_From_Cache : Boolean; Found_From_File : Boolean; Need_To_Look_In_File : Boolean; Index_Filename_From_Cache : LexTokenManager.Lex_String; Index_Filename : LexTokenManager.Lex_String; File_Type : Entry_Types; Dummy_Aux_Index_Unit : LexTokenLists.Lists; Dummy_Source_Position : File_Position; begin if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX MANAGER LOOKING UP COMPONENTS FOR ", Stop => 0); LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Required_Unit); SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; -- Try to find the required unit in the cache. Cache.Get_Components (Required_Unit => Required_Unit, Components => Components_From_Cache, Index_Filename => Index_Filename_From_Cache, Found => Found_From_Cache); if Found_From_Cache then -- The required unit has been found in the cache => check if -- a more local definition of the required unit may exist in -- a not yet parsed auxiliary index file. --# accept F, 10, Dummy_Aux_Index_Unit, "Ineffective assignment here OK" & --# F, 10, Dummy_Source_Position, "Ineffective assignment here OK"; Index_Table_P.Get_Next_Index_File (Unit => Required_Unit, Top_Filename => Index_Filename_From_Cache, Filename => Index_Filename, File_Type => File_Type, Aux_Index_Unit => Dummy_Aux_Index_Unit, Position => Dummy_Source_Position); --# end accept; Need_To_Look_In_File := File_Type = Aux_Index; if CommandLineData.Content.Debug.File_Names then -- Debug -- Index_Table_P.Get_Next_Index may return an yet -- unparsed auxiliary index or super index. SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEX FILENAME NOT YET PARSED : ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Index_Filename), New_Line => True); end if; else -- The required unit is not in the cache => need to lookup -- in the index files. Index_Filename_From_Cache := LexTokenManager.Null_String; Need_To_Look_In_File := True; end if; if Need_To_Look_In_File then -- The required unit has not been found in the cache => need -- to lookup in the index files -- -- or -- -- a more local definition of the required unit may exist in -- a not yet parsed index file => need to lookup in the more -- local index files only. --# accept F, 10, Source_Filename, "Ineffective assignment here OK" & --# F, 10, Actual_Unit_Type, "Ineffective assignment here OK"; Look_In_File (Lookup_Components => True, Required_Unit => Required_Unit, Possible_Unit_Types => Possible_Unit_Types, Top_Filename => Index_Filename_From_Cache, Returned_Filename => Source_Filename, Actual_Unit_Type => Actual_Unit_Type, Components => Components_From_File, Found => Found_From_File); --# end accept; if Found_From_File then -- The required unit has been found in the index files or -- a more local definition of the required unit has been -- found in a previously unparsed auxiliary index file => -- the answer is the value coming from the index file. Components := Components_From_File; elsif Found_From_Cache then -- A more local definition of the required unit has not -- been found in the index files => the answer is the -- value stored in the cache. Components := Components_From_Cache; else -- The required unit has not been found either in the -- cache or in the index files => the required unit can -- not be located. Components := Component_Lists'(others => LexTokenLists.Null_List); end if; else -- The required unit has been found in the cache and no more -- local definition of the required unit can exist => the -- answer is the value stored in the cache. Components := Components_From_Cache; end if; --# accept F, 33, Source_Filename, "Expect Actual_Unit_Type unused" & --# F, 33, Actual_Unit_Type, "Expect Actual_Unit_Type unused" & --# F, 33, Dummy_Aux_Index_Unit, "Expect Dummy_Aux_Index_Unit unused" & --# F, 33, Dummy_Source_Position, "Expect Dummy_Source_Position unused"; end Look_Up_Components; procedure Add_Unit (Unit : in LexTokenLists.Lists; Unit_Types : in ContextManager.UnitTypes; Source_Filename : in E_Strings.T) --# global in CommandLineData.Content; --# in Index_Table_P.Index_Table; --# in out Cache.The_Unit_Hash; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Cache.The_Unit_Hash, --# ErrorHandler.Error_Context, --# Index_Table_P.Fatal_Error from *, --# Cache.The_Unit_Hash, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Source_Filename, --# Unit, --# Unit_Types & --# LexTokenManager.State from *, --# Source_Filename & --# SPARK_IO.File_Sys from *, --# Cache.The_Unit_Hash, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Source_Filename, --# Unit, --# Unit_Types; is begin Cache.Add_Unit (Unit => Unit, Unit_Types => Unit_Types, Source_Filename => Source_Filename, Index_Filename => LexTokenManager.Null_String, Index_Position => File_Position'(Line => 1, Col => 1)); end Add_Unit; procedure List_Index_File (Report_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in Index_Table_P.Index_Table; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Report_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# Index_Table_P.Index_Table; is begin Index_Table_P.List_Index_File (Report_File => Report_File); end List_Index_File; end IndexManager; spark-2012.0.deb/examiner/sem-walk_expression_p-put_exp_record.adb0000644000175000017500000000736011753202336024265 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Put_Exp_Record (R : in Sem.Exp_Record) is --# hide Put_Exp_Record; F : SPARK_IO.File_Type; begin F := SPARK_IO.Standard_Output; SPARK_IO.Put_String (F, "Sort => ", 0); SPARK_IO.Put_Line (F, Exp_Record_Sort'Image (R.Sort), 0); SPARK_IO.Put_String (F, "Type_Symbol => ", 0); E_Strings.Put_Line (File => F, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (R.Type_Symbol))); SPARK_IO.Put_String (F, "Other_Symbol => ", 0); E_Strings.Put_Line (File => F, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (R.Other_Symbol))); SPARK_IO.Put_String (F, "String_Value => ", 0); E_Strings.Put_Line (File => F, E_Str => LexTokenManager.Lex_String_To_String (R.String_Value)); SPARK_IO.Put_String (F, "Value => ", 0); E_Strings.Put_Line (File => F, E_Str => Maths.ValueToString (R.Value)); SPARK_IO.Put_String (F, "Arg_List_Found => ", 0); SPARK_IO.Put_Line (F, Boolean'Image (R.Arg_List_Found), 0); SPARK_IO.Put_String (F, "Range_RHS => ", 0); E_Strings.Put_Line (File => F, E_Str => Maths.ValueToString (R.Range_RHS)); SPARK_IO.Put_String (F, "Param_Count =>", 0); SPARK_IO.Put_Line (F, Natural'Image (R.Param_Count), 0); SPARK_IO.Put_Line (F, "Param_List => (...not printed...)", 0); SPARK_IO.Put_String (F, "Is_Static => ", 0); SPARK_IO.Put_Line (F, Boolean'Image (R.Is_Static), 0); SPARK_IO.Put_String (F, "Is_Constant => ", 0); SPARK_IO.Put_Line (F, Boolean'Image (R.Is_Constant), 0); SPARK_IO.Put_String (F, "Is_ARange => ", 0); SPARK_IO.Put_Line (F, Boolean'Image (R.Is_ARange), 0); SPARK_IO.Put_String (F, "Variable_Symbol => ", 0); E_Strings.Put_Line (File => F, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (R.Variable_Symbol))); SPARK_IO.Put_String (F, "Is_AVariable => ", 0); SPARK_IO.Put_Line (F, Boolean'Image (R.Is_AVariable), 0); SPARK_IO.Put_String (F, "Is_An_Entire_Variable => ", 0); SPARK_IO.Put_Line (F, Boolean'Image (R.Is_An_Entire_Variable), 0); SPARK_IO.Put_String (F, "Errors_In_Expression => ", 0); SPARK_IO.Put_Line (F, Boolean'Image (R.Errors_In_Expression), 0); SPARK_IO.Put_String (F, "Has_Operators => ", 0); SPARK_IO.Put_Line (F, Boolean'Image (R.Has_Operators), 0); SPARK_IO.Put_String (F, "Stream_Symbol => ", 0); E_Strings.Put_Line (File => F, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (R.Stream_Symbol))); end Put_Exp_Record; spark-2012.0.deb/examiner/dictionary-add_generic_formal_parameter_local.adb0000644000175000017500000000554011753202336026066 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Add_Generic_Formal_Parameter_Local (Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Generic_Unit : in RawDict.Generic_Unit_Info_Ref; Type_Mark : in RawDict.Type_Info_Ref; The_Object : in RawDict.Constant_Info_Ref; The_Generic_Parameter : out RawDict.Generic_Parameter_Info_Ref) is Previous : RawDict.Generic_Parameter_Info_Ref; begin RawDict.Create_Generic_Parameter (Owning_Generic => The_Generic_Unit, Type_Mark => Type_Mark, Object => The_Object, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Generic_Parameter => The_Generic_Parameter); if The_Object /= RawDict.Null_Constant_Info_Ref then -- Set a back pointer from the constant so that we know that we created it to implement a generic object -- parameter and know which generic parameter the constant belongs to RawDict.Set_Constant_Associated_Generic_Parameter (The_Constant => The_Object, The_Generic_Parameter => The_Generic_Parameter); end if; Previous := RawDict.Get_Generic_Unit_Last_Generic_Parameter (The_Generic_Unit => The_Generic_Unit); if Previous = RawDict.Null_Generic_Parameter_Info_Ref then RawDict.Set_Generic_Unit_First_Generic_Parameter (The_Generic_Unit => The_Generic_Unit, The_Generic_Parameter => The_Generic_Parameter); else RawDict.Set_Next_Generic_Parameter (The_Generic_Parameter => Previous, Next => The_Generic_Parameter); end if; RawDict.Set_Generic_Unit_Last_Generic_Parameter (The_Generic_Unit => The_Generic_Unit, The_Generic_Parameter => The_Generic_Parameter); end Add_Generic_Formal_Parameter_Local; spark-2012.0.deb/examiner/dictionary-get_record_component.adb0000644000175000017500000000630611753202336023270 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function Get_Record_Component (The_Record_Type : RawDict.Type_Info_Ref; Number : Positive) return Symbol is Result : Symbol; -------------------------------------------------------------------------------- function Get_Extended_Record_Component (The_Record_Type : RawDict.Type_Info_Ref; Number : Positive) return Symbol --# global in Dict; is Component : Iterator; Current_Record : RawDict.Type_Info_Ref; Low, High : Positive; begin Current_Record := The_Record_Type; High := Get_Number_Of_Extended_Components (The_Record_Type => Current_Record); loop Low := (High - Get_Number_Of_Actual_Components (The_Record_Type => Current_Record)) + 1; exit when Number in Low .. High; -- found which bit of the record model holds field -- move down Inherit chain Current_Record := Get_Type (The_Symbol => CurrentSymbol (First_Record_Component (Type_Mark => Current_Record))); High := Low - 1; end loop; -- on exit, Current_Record indicates the record that holds field number is the -- range Low .. High Component := First_Record_Component (Type_Mark => Current_Record); -- if type is extended then the above returns the Inherit field which we don't -- want so we skip it thus: if Type_Is_Extended_Tagged (Type_Mark => Current_Record) then Component := NextSymbol (Component); end if; -- now loop through to find desired field for I in Positive range Low .. Number - 1 loop Component := NextSymbol (Component); end loop; return CurrentSymbol (Component); end Get_Extended_Record_Component; begin -- Get_Record_Component if Type_Is_Extended_Tagged (Type_Mark => The_Record_Type) then Result := Get_Extended_Record_Component (The_Record_Type => The_Record_Type, Number => Number); else Result := Get_Non_Extended_Record_Component (The_Record_Type => The_Record_Type, Number => Number); end if; return Result; end Get_Record_Component; spark-2012.0.deb/examiner/indexmanager.ads0000644000175000017500000001665511753202336017421 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- IndexManager -- -- Purpose: -- -- The goal of this package is to manage the link between a package -- name or a procedure/function name and the filename that contains -- the subunit. -- -------------------------------------------------------------------------------- with ContextManager; with ExaminerConstants; with E_Strings; with LexTokenLists; with LexTokenManager; with SPARK_IO; use type SPARK_IO.File_Status; use type LexTokenLists.Lists; use type LexTokenManager.Str_Comp_Result; --# inherit Ada.Characters.Latin_1, --# CommandLineData, --# ContextManager, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# LexTokenLists, --# LexTokenManager, --# ScreenEcho, --# SPARK_IO, --# SystemErrors, --# XMLReport; package IndexManager --# own State; --# initializes State; is subtype Component_Index is Integer range 1 .. ExaminerConstants.MaxIndexComponents; type Component_Lists is array (Component_Index) of LexTokenLists.Lists; procedure Look_Up (Required_Unit : in LexTokenLists.Lists; Possible_Unit_Types : in ContextManager.UnitTypeSets; Source_Filename : out LexTokenManager.Lex_String; Actual_Unit_Type : out ContextManager.UnitTypes; Found : out Boolean); --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out State; --# derives Actual_Unit_Type, --# ErrorHandler.Error_Context, --# Found, --# LexTokenManager.State, --# Source_Filename, --# SPARK_IO.File_Sys, --# State from CommandLineData.Content, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit, --# SPARK_IO.File_Sys, --# State; procedure Look_Up_Components (Required_Unit : in LexTokenLists.Lists; Components : out Component_Lists); --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out State; --# derives Components, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# State from CommandLineData.Content, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Required_Unit, --# SPARK_IO.File_Sys, --# State; procedure Add_Unit (Unit : in LexTokenLists.Lists; Unit_Types : in ContextManager.UnitTypes; Source_Filename : in E_Strings.T); --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out State; --# derives ErrorHandler.Error_Context, --# State from *, --# LexTokenManager.State, --# Source_Filename, --# State, --# Unit, --# Unit_Types & --# LexTokenManager.State from *, --# Source_Filename & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Source_Filename, --# State, --# Unit, --# Unit_Types; procedure List_Index_File (Report_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# Report_File, --# State, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# State; private type Entry_Types is ( Super_Index, Aux_Index, Main_P, P_Spec, -- GenOption -- GenericDec, -- See comment in body P_Bodi, Subunit, Component_List, Invalid_Entry_Type); type Library_Manager_Errors is ( ES_FileLocation, ES_IsIn, ES_UnitEntry, EW_Index, EW_UnexpectedSuper, ES_Comment, ES_Are, ES_Components, EW_IllegalUnitName, EW_Aux, EW_Super, ES_Recursion, EF_Contradiction, EW_Duplicate, EW_DuplicateAux); type File_Position is record Line : Positive; Col : Positive; end record; end IndexManager; spark-2012.0.deb/examiner/sem-walk_expression_p-up_wf_name_argument_list.adb0000644000175000017500000002024711753202336026317 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Up_Wf_Name_Argument_List (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) is Type_Info : Sem.Exp_Record; Sym : Dictionary.Symbol; All_Found : Boolean; Ptr : Lists.List; Unused_Value : Maths.Value; Next_Node : STree.SyntaxNode; ----------------------------------------------------------------------- procedure Check_Names_Are_All_There (Node_Pos : in LexTokenManager.Token_Position; Fun_Sym : in Dictionary.Symbol; List : in Lists.List; Heap_Param : in Lists.List_Heap; All_There : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives All_There from Dictionary.Dict, --# Fun_Sym, --# Heap_Param, --# LexTokenManager.State, --# List & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Fun_Sym, --# Heap_Param, --# LexTokenManager.State, --# List, --# Node_Pos, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Param_Str : LexTokenManager.Lex_String; begin All_There := True; It := Dictionary.FirstSubprogramParameter (Fun_Sym); while not Dictionary.IsNullIterator (It) loop Param_Str := Dictionary.GetSimpleName (Dictionary.CurrentSymbol (It)); if not Lists.Is_Member (Heap => Heap_Param, The_List => List, Str => Param_Str) then All_There := False; ErrorHandler.Semantic_Error (Err_Num => 23, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Param_Str); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Names_Are_All_There; begin -- Up_Wf_Name_Argument_List Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); Sym := Type_Info.Other_Symbol; case Type_Info.Sort is when Sem.Is_Function => Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = named_argument_association OR positional_argument_association OR -- annotation_named_argument_association OR annotation_positional_argument_association if STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.positional_argument_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_positional_argument_association then -- ASSUME Next_Node = positional_argument_association OR annotation_positional_argument_association if Type_Info.Param_Count = Dictionary.GetNumberOfSubprogramParameters (Sym) then Type_Info.Sort := Sem.Is_Object; else Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 3, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); end if; elsif STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.named_argument_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_named_argument_association then -- ASSUME Next_Node = named_argument_association OR annotation_named_argument_association Check_Names_Are_All_There (Node_Pos => STree.Node_Position (Node => Node), Fun_Sym => Sym, List => Type_Info.Param_List, Heap_Param => Heap_Param, All_There => All_Found); Ptr := Type_Info.Param_List; Dispose_Of_Name_List (List => Ptr, Heap_Param => Heap_Param); Type_Info.Param_List := Ptr; if All_Found then Type_Info.Sort := Sem.Is_Object; else Type_Info := Unknown_Symbol_Record; end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = named_argument_association OR positional_argument_association OR " & "annotation_named_argument_association OR annotation_positional_argument_association in " & "Up_Wf_Name_Argument_List"); end if; when Sem.Is_Object => if Type_Info.Param_Count = Dictionary.GetNumberOfDimensions (Type_Info.Type_Symbol) then Type_Info.Type_Symbol := Dictionary.GetArrayComponent (Type_Info.Type_Symbol); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.name_argument_list then Type_Info.Is_An_Entire_Variable := False; end if; Type_Info.Is_Constant := False; else Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 93, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); end if; when Sem.Type_Result => -- Must be a type conversion expression. -- If the type is scalar, and the argument is static, then -- we can do a ConstraintCheck here in SPARK95 or 2005 modes. -- Type conversions are never considered static in SPARK83, -- so we've no business checking them here in SPARK83 mode. if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Type_Info.Is_Static and then Dictionary.TypeIsScalar (Type_Info.Type_Symbol) then --# accept F, 10, Unused_Value, "Unused_Value not needed here."; Sem.Constraint_Check (Val => Type_Info.Value, New_Val => Unused_Value, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_name_argument_list, Typ => Type_Info.Type_Symbol, Position => STree.Node_Position (Node => Node)); --# end accept; end if; when others => null; end case; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); --# accept F, 33, Unused_Value, "Unused_Value not needed here."; end Up_Wf_Name_Argument_List; spark-2012.0.deb/examiner/sem-walk_expression_p-primary_type_from_context.adb0000644000175000017500000000610711753202336026554 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Primary_Type_From_Context (Node : STree.SyntaxNode; T_Stack : Type_Context_Stack.T_Stack_Type) return Dictionary.Symbol is New_Context_Type : Dictionary.Symbol; Parent, Op_Node : STree.SyntaxNode; begin Parent := STree.Parent_Node (Current_Node => Node); -- ASSUME Parent = factor OR annotation_factor SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.factor or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_factor, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = factor OR annotation_factor in Primary_Type_From_Context"); Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Parent)); -- ASSUME Op_Node = primary OR annotation_primary OR double_star OR NULL -- The context is unchanged for a primary, except in the case where -- this node is the right-hand operand of an exponentiation operator, -- in which case the context is Integer. if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.double_star then -- ASSUME Op_Node = double_star if STree.Next_Sibling (Current_Node => Op_Node) = Node then -- It's a ** operator and this primary node is the RHS operand New_Context_Type := Dictionary.GetPredefinedIntegerType; else New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end if; elsif Op_Node = STree.NullNode or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.primary or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.annotation_primary then -- ASSUME Op_Node = primary OR annotation_primary OR NULL New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); else New_Context_Type := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = primary OR annotation_primary OR double_star OR NULL in Primary_Type_From_Context"); end if; return New_Context_Type; end Primary_Type_From_Context; spark-2012.0.deb/examiner/dag.idx0000644000175000017500000000060511753202337015514 0ustar eugeneugendag specification is in dag.ads dag body is in dag.adb dag.buildannotationexpndag subunit is in dag-buildannotationexpndag.adb dag.buildexpndag subunit is in dag-buildexpndag.adb dag.buildgraph subunit is in dag-buildgraph.adb dag.loopcontext subunit is in dag-loopcontext.adb dag.type_constraint subunit is in dag-type_constraint.adb dag.substitutions subunit is in dag-substitutions.adb spark-2012.0.deb/examiner/main.smf0000644000175000017500000000036111753202337015705 0ustar eugeneugen@support.smf @sparklex.smf @spparser.smf @dict.smf @errorhandler.smf @flows.smf @declarations.smf @dag.smf @vcg.smf @sem.smf @../lib/spark/current/spark.smf sli.adb sli-io.adb -vcg sli-xref.shb mainloop.adb examiner.adb -vcg casing.adb -vcg ././@LongLink0000000000000000000000000000016500000000000011567 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_derived.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000002421611753202336033125 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Derived (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) is Base_Type_Node : STree.SyntaxNode; Range_Constraint_Node : STree.SyntaxNode; Base_Type_Symbol : Dictionary.Symbol; New_Type_Symbol : Dictionary.Symbol; Left_Exp_Type : Exp_Record; New_First, New_Last : LexTokenManager.Lex_String := LexTokenManager.Null_String; New_Digits : LexTokenManager.Lex_String; Unwanted_Seq : SeqAlgebra.Seq; Unused_Component_Data : ComponentManager.ComponentData; Errors : Boolean := False; begin Base_Type_Node := Child_Node (Current_Node => Node); -- ASSUME Base_Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Base_Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Base_Type_Node = type_mark in Wf_Derived"); Range_Constraint_Node := Next_Sibling (Current_Node => Base_Type_Node); -- ASSUME Range_Constraint_Node = range_constraint OR floating_point_constraint OR NULL if Range_Constraint_Node = STree.NullNode or else Syntax_Node_Type (Node => Range_Constraint_Node) = SP_Symbols.range_constraint then -- ASSUME Range_Constraint_Node = range_constraint OR NULL Wf_Type_Mark (Node => Base_Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Base_Type_Symbol); if Dictionary.IsType (Base_Type_Symbol) then if Dictionary.DefinedInPackageStandard (Base_Type_Symbol) and then (Dictionary.TypeIsInteger (Base_Type_Symbol) or else Dictionary.TypeIsFloatingPoint (Base_Type_Symbol)) then if Range_Constraint_Node = STree.NullNode then -- ASSUME Range_Constraint_Node = NULL -- No range constraint, so pick up First and Last of new type -- directly from those of base type. New_First := Dictionary.GetScalarAttributeValue (Base => False, Name => LexTokenManager.First_Token, TypeMark => Base_Type_Symbol); New_Last := Dictionary.GetScalarAttributeValue (Base => False, Name => LexTokenManager.Last_Token, TypeMark => Base_Type_Symbol); elsif Syntax_Node_Type (Node => Range_Constraint_Node) = SP_Symbols.range_constraint then -- ASSUME Range_Constraint_Node = range_constraint -- Range constraint specified, so we need to walk and evaluate those -- expressions. SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Range_Constraint_Node, Scope => Scope, Type_Context => Base_Type_Symbol, Context_Requires_Static => True, Ref_Var => Unwanted_Seq, Result => Left_Exp_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Left_Exp_Type.Value, New_First); Maths.StorageRep (Left_Exp_Type.Range_RHS, New_Last); if Left_Exp_Type.Is_Static then if (Dictionary.TypeIsInteger (Base_Type_Symbol) and then Dictionary.TypeIsInteger (Left_Exp_Type.Type_Symbol)) or else (Dictionary.TypeIsFloatingPoint (Base_Type_Symbol) and then Dictionary.TypeIsFloatingPoint (Left_Exp_Type.Type_Symbol)) then Errors := Left_Exp_Type.Errors_In_Expression; else ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Range_Constraint_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; else -- not static ErrorHandler.Semantic_Error (Err_Num => 45, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Range_Constraint_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; end if; -- If the base type is floating point, then inherit 'Digits from it. if Dictionary.TypeIsFloatingPoint (Base_Type_Symbol) then New_Digits := Dictionary.GetScalarAttributeValue (Base => False, Name => LexTokenManager.Digits_Token, TypeMark => Base_Type_Symbol); else New_Digits := LexTokenManager.Null_String; end if; Check_Subtype_Against_Basetype_Bounds (Base_Type_Sym => Base_Type_Symbol, Subtype_First => New_First, Subtype_Last => New_Last, Ident_Node_Pos => Node_Position (Node => Base_Type_Node), Range_Node_Pos => Node_Position (Node => Range_Constraint_Node), Errors => Errors); if not Errors then if Dictionary.TypeIsInteger (Base_Type_Symbol) then Dictionary.Add_Integer_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Lower => New_First, Upper => New_Last, Scope => Scope, Context => Dictionary.ProgramContext, The_Type => New_Type_Symbol); else -- must be floating point Dictionary.Add_Floating_Point_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Lower => New_First, Upper => New_Last, Error_Bound => New_Digits, Scope => Scope, Context => Dictionary.ProgramContext, The_Type => New_Type_Symbol); end if; STree.Add_Node_Symbol (Node => Ident_Node, Sym => New_Type_Symbol); Dictionary.SetBaseType (TypeMark => New_Type_Symbol, BaseType => Base_Type_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => New_Type_Symbol, Is_Declaration => True); end if; end if; else -- Not a predefined integer or floating point type, so... ErrorHandler.Semantic_Error (Err_Num => 871, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Base_Type_Node), Id_Str => LexTokenManager.Null_String); end if; else -- Not a type mark, so... ErrorHandler.Semantic_Error (Err_Num => 871, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Base_Type_Node), Id_Str => LexTokenManager.Null_String); end if; elsif Syntax_Node_Type (Node => Range_Constraint_Node) = SP_Symbols.floating_point_constraint then -- ASSUME Range_Constraint_Node = floating_point_constraint ErrorHandler.Semantic_Error (Err_Num => 608, Reference => 9, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Constraint_Node = range_constraint OR floating_point_constraint OR NULL in Wf_Derived"); end if; end Wf_Derived; spark-2012.0.deb/examiner/sem-wf_generic_subprogram_instantiation.adb0000644000175000017500000006660411753202336025034 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Generic_Subprogram_Instantiation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is type Kind_Of_Generic_T is (Generic_Procedure, Generic_Function); Generic_Sym, Instantiation_Sym : Dictionary.Symbol; Errors_Found, Ok : Boolean; Next_Node, Instantiation_Ident_Node, Constraint_Node : STree.SyntaxNode; Is_Overriding : Boolean := False; Kind_Of_Generic : Kind_Of_Generic_T; ---------------------------------------------------------- -- Checks that the identifier after "is new" represents a visible generic unit of the -- appropriate kind. Returns symbol of this generic unit if legal or a null symbol -- otherwise. procedure Check_Generic (Ident_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Kind : in Kind_Of_Generic_T; Generic_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# Kind, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Generic_Sym from CommandLineData.Content, --# Dictionary.Dict, --# Ident_Node, --# Kind, --# LexTokenManager.State, --# Scope, --# STree.Table & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Scope; --# pre Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.dotted_simple_name; --# post STree.Table = STree.Table~; is Current_Node : STree.SyntaxNode; begin Current_Node := Last_Child_Of (Start_Node => Ident_Node); -- ASSUME Current_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier in Check_Generic"); Generic_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Current_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); loop --# assert STree.Table = STree.Table~; if Dictionary.Is_Null_Symbol (Generic_Sym) then ErrorHandler.Semantic_Error (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); exit; end if; STree.Set_Node_Lex_String (Sym => Generic_Sym, Node => Current_Node); Current_Node := Parent_Node (Current_Node => Current_Node); -- ASSUME Current_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = dotted_simple_name in Check_Generic"); exit when Current_Node = Ident_Node; if not Dictionary.IsPackage (Generic_Sym) then ErrorHandler.Semantic_Error (Err_Num => 18, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); Generic_Sym := Dictionary.NullSymbol; exit; end if; Current_Node := Next_Sibling (Current_Node => Current_Node); -- ASSUME Current_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier in Check_Generic"); Generic_Sym := Dictionary.LookupSelectedItem (Prefix => Generic_Sym, Selector => Node_Lex_String (Node => Current_Node), Scope => Scope, Context => Dictionary.ProgramContext); end loop; -- Validate generic unit symbol if not Dictionary.Is_Null_Symbol (Generic_Sym) then -- Kludge Alert -- If I am honest, I don't really understand why this is needed. We have looked -- up the generic in program context so I would expect to get the Ada function -- symbol. A test case showed otherwise, hence this is in for now. PNA 13/1/4 if Dictionary.IsImplicitProofFunction (Generic_Sym) then Generic_Sym := Dictionary.GetAdaFunction (Generic_Sym); end if; if Dictionary.Is_Generic_Subprogram (The_Symbol => Generic_Sym) then if Kind = Generic_Procedure and then Dictionary.IsFunction (Generic_Sym) then -- wrong sort of subprogram ErrorHandler.Semantic_Error (Err_Num => 631, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => LexTokenManager.Null_String); Generic_Sym := Dictionary.NullSymbol; elsif Kind = Generic_Function and then Dictionary.IsProcedure (Generic_Sym) then -- wrong sort of subprogram ErrorHandler.Semantic_Error (Err_Num => 632, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => LexTokenManager.Null_String); Generic_Sym := Dictionary.NullSymbol; elsif not Dictionary.SubprogramSignatureIsWellformed (Dictionary.IsAbstract, Generic_Sym) then -- right sort of subprogram, but generic declaration had errors ErrorHandler.Semantic_Warning (Err_Num => 390, Position => Node_Position (Node => Current_Node), Id_Str => LexTokenManager.Null_String); Generic_Sym := Dictionary.NullSymbol; end if; else -- not a generic subprgoram at all ErrorHandler.Semantic_Error_Sym (Err_Num => 630, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Sym => Generic_Sym, Scope => Scope); Generic_Sym := Dictionary.NullSymbol; end if; end if; end Check_Generic; ---------------------------------------------------------- procedure Wf_Generic_Actual_Part (Actual_Part_Node : in STree.SyntaxNode; Instantiation_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Error_Found : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# Actual_Part_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Instantiation_Sym, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Actual_Part_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Instantiation_Sym, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Error_Found from Actual_Part_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Instantiation_Sym, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Actual_Part_Node, STree.Table) = SP_Symbols.generic_actual_part; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Process_Generic_Procedure_Instantiation (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; Instantiation_Ident_Node : out STree.SyntaxNode; Constraint_Node : out STree.SyntaxNode; Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Constraint_Node, --# Instantiation_Ident_Node, --# Node from Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Ok from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_specification; --# post Syntax_Node_Type (Instantiation_Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_annotation and --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint; is Formal_Part_Node, Global_Node, Dependency_Node, Declare_Node : STree.SyntaxNode; begin Instantiation_Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Instantiation_Ident_Node = simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Instantiation_Ident_Node) = SP_Symbols.simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Instantiation_Ident_Node = simple_name in Process_Generic_Procedure_Instantiation"); Formal_Part_Node := Next_Sibling (Current_Node => Instantiation_Ident_Node); Instantiation_Ident_Node := Child_Node (Current_Node => Instantiation_Ident_Node); -- ASSUME Instantiation_Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Instantiation_Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Instantiation_Ident_Node = identifier in Process_Generic_Procedure_Instantiation"); Check_Valid_Ident (Ident_Node => Instantiation_Ident_Node, Current_Scope => Scope, Ok => Ok); -- ASSUME Formal_Part_Node = formal_part OR NULL if Syntax_Node_Type (Node => Formal_Part_Node) = SP_Symbols.formal_part then -- ASSUME Formal_Part_Node = formal_part Ok := False; ErrorHandler.Semantic_Error (Err_Num => 628, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Formal_Part_Node), Id_Str => Node_Lex_String (Node => Instantiation_Ident_Node)); elsif Formal_Part_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Formal_Part_Node = formal_part OR NULL in Process_Generic_Procedure_Instantiation"); end if; Node := Next_Sibling (Current_Node => Node); -- ASSUME Node = procedure_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Node) = SP_Symbols.procedure_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = procedure_annotation in Process_Generic_Procedure_Instantiation"); Get_Subprogram_Anno_Key_Nodes (Node => Node, Global_Node => Global_Node, Dependency_Node => Dependency_Node, Declare_Node => Declare_Node, Constraint_Node => Constraint_Node); -- ASSUME Global_Node = moded_global_definition OR NULL -- ASSUME Dependency_Node = dependency_relation OR NULL -- ASSUME Declare_Node = declare_annotation OR NULL if Syntax_Node_Type (Node => Global_Node) = SP_Symbols.moded_global_definition or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.dependency_relation or else Syntax_Node_Type (Node => Declare_Node) = SP_Symbols.declare_annotation then -- ASSUME Global_Node = moded_global_definition Ok := False; ErrorHandler.Semantic_Error (Err_Num => 626, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Instantiation_Ident_Node)); end if; -- ASSUME Constraint_Node = procedure_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.procedure_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = procedure_constraint in Process_Generic_Procedure_Instantiation"); end Process_Generic_Procedure_Instantiation; ---------------------------------------------------- procedure Process_Generic_Function_Instantiation (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; Instantiation_Ident_Node : out STree.SyntaxNode; Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Instantiation_Ident_Node, --# Node from Node, --# STree.Table & --# Ok from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.simple_name; --# post Syntax_Node_Type (Instantiation_Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.function_constraint; is begin Instantiation_Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Instantiation_Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Instantiation_Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Instantiation_Ident_Node = identifier in Process_Generic_Function_Instantiation"); Check_Valid_Ident (Ident_Node => Instantiation_Ident_Node, Current_Scope => Scope, Ok => Ok); Node := Next_Sibling (Current_Node => Node); -- ASSUME Node = function_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Node) = SP_Symbols.function_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = function_constraint in Process_Generic_Function_Instantiation"); end Process_Generic_Function_Instantiation; begin -- Wf_Generic_Subprogram_Instantiation Next_Node := Child_Node (Current_Node => Node); -- ASSUME Next_Node = overriding_indicator OR procedure_specification OR simple_name if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.overriding_indicator then -- ASSUME Next_Node = overriding_indicator -- ASSUME Child_Node (Current_Node => Next_Node) = RWoverriding OR RWnot if Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) = SP_Symbols.RWoverriding then -- ASSUME Child_Node (Current_Node => Next_Node) = RWoverriding Is_Overriding := True; elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) /= SP_Symbols.RWnot then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Next_Node) = RWoverriding OR RWnot " & "in Wf_Generic_Subprogram_Instantiation"); end if; Next_Node := Next_Sibling (Current_Node => Next_Node); elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.procedure_specification and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.simple_name then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = overriding_indicator OR procedure_specification OR simple_name " & "in Wf_Generic_Subprogram_Instantiation"); end if; -- ASSUME Next_Node = procedure_specification OR simple_name if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.procedure_specification then -- ASSUME Next_Node = procedure_specification Process_Generic_Procedure_Instantiation (Node => Next_Node, Scope => Scope, Instantiation_Ident_Node => Instantiation_Ident_Node, Constraint_Node => Constraint_Node, Ok => Ok); Kind_Of_Generic := Generic_Procedure; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.simple_name then -- ASSUME Next_Node = simple_name Process_Generic_Function_Instantiation (Node => Next_Node, Scope => Scope, Instantiation_Ident_Node => Instantiation_Ident_Node, Ok => Ok); Constraint_Node := Next_Node; Kind_Of_Generic := Generic_Function; else Instantiation_Ident_Node := STree.NullNode; Constraint_Node := STree.NullNode; Ok := False; Kind_Of_Generic := Generic_Function; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = procedure_specification OR simple_name in Wf_Generic_Subprogram_Instantiation"); end if; Next_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME Next_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = dotted_simple_name in Wf_Generic_Subprogram_Instantiation"); if Ok then Check_Generic (Ident_Node => Next_Node, Scope => Scope, Kind => Kind_Of_Generic, Generic_Sym => Generic_Sym); if not Dictionary.Is_Null_Symbol (Generic_Sym) then -- add the instantiation Dictionary.AddSubprogramInstantiation (Name => Node_Lex_String (Node => Instantiation_Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Instantiation_Ident_Node), End_Position => Node_Position (Node => Instantiation_Ident_Node)), TheGeneric => Generic_Sym, Specification => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Scope => Scope, Context => Dictionary.ProgramContext, Subprogram => Instantiation_Sym); STree.Add_Node_Symbol (Node => Instantiation_Ident_Node, Sym => Instantiation_Sym); Next_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME Next_Node = generic_actual_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.generic_actual_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = generic_actual_part in Wf_Generic_Subprogram_Instantiation"); -- check parameters etc. Wf_Generic_Actual_Part (Actual_Part_Node => Next_Node, Instantiation_Sym => Instantiation_Sym, Scope => Scope, Component_Data => Component_Data, The_Heap => The_Heap, Error_Found => Errors_Found); if Errors_Found then Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Instantiation_Sym); else -- do formal/actual substitutions Dictionary.InstantiateSubprogramParameters (ActualSubprogramSym => Instantiation_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Instantiation_Ident_Node), End_Position => Node_Position (Node => Instantiation_Ident_Node))); Check_No_Overloading_From_Tagged_Ops (Ident_Node => Instantiation_Ident_Node, Subprog_Sym => Instantiation_Sym, Scope => Scope, Abstraction => Dictionary.IsRefined, Is_Overriding => Is_Overriding); Wf_Subprogram_Constraint (Node => Constraint_Node, Subprogram_Sym => Instantiation_Sym, First_Seen => True, Component_Data => Component_Data, The_Heap => The_Heap); end if; end if; end if; end Wf_Generic_Subprogram_Instantiation; spark-2012.0.deb/examiner/declarations-outputdeclarations-printdeclarations.adb0000644000175000017500000062560611753202336027062 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with AdjustFDL_RWs; with CommandLineData; with DAG_IO; with Debug; with E_Strings; with Lists; with Maths; with Symbol_Set; separate (Declarations.OutputDeclarations) procedure PrintDeclarations (Heap : in out Cells.Heap_Record; File : in SPARK_IO.File_Type; Rule_File : in SPARK_IO.File_Type; Needed_Symbols : in Cells.Cell; Scope : in Dictionary.Scopes; Write_Rules : in Boolean; End_Position : in LexTokenManager.Token_Position) is Sym : Dictionary.Symbol; Declare_List : Cells.Cell; Indent : constant Integer := 3; Max_Column : constant Integer := 78; -- Type_List records types that require proof rules to be -- generated for them. Prior to Examiner 7p2d08, only scalar -- type were added to this list, since these were the only -- types that could have replacement rules generated for them. -- Following 7p2d08, this list now records all types to allow -- for replacement rules for 'Size of non-scalar types. L_Heap : Lists.List_Heap; Type_List : Lists.List; Rule_Family_Name : E_Strings.T; Rule_Counter : Natural; Max_Rank : Cells.Cell_Rank; ---------------------------------------------------------------------- procedure Debug_Rank_Sym (Msg : in String; Sym : in Dictionary.Symbol) --# derives null from Msg, --# Sym; is --# hide Debug_Rank_Sym; begin if CommandLineData.Content.Debug.FDL_Ranking then Debug.Print_Sym (Msg => Msg, Sym => Sym); end if; end Debug_Rank_Sym; procedure Debug_Rank_Int (Msg : in String; N : in Integer) --# derives null from Msg, --# N; is --# hide Debug_Rank_Int; begin if CommandLineData.Content.Debug.FDL_Ranking then Debug.PrintInt (Msg, N); end if; end Debug_Rank_Int; ---------------------------------------------------------------------- function Get_Name (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return E_Strings.T --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Prefix : E_Strings.T; Simple_Name : E_Strings.T; Result : E_Strings.T; begin -- if VCs are generated from code with semantic errors in type declarations then -- null symbols etc. might get as far as here and cause an internal error. The first if -- provides protection against this. if Dictionary.Is_Null_Symbol (Sym) then Result := E_Strings.Copy_String (Str => "unexpected_null_symbol"); elsif Dictionary.IsTypeMark (Sym) and then Dictionary.IsUnknownTypeMark (Sym) then Result := E_Strings.Copy_String (Str => "unknown_type"); else if Dictionary.IsRecordComponent (Sym) then Prefix := E_Strings.Empty_String; else Prefix := Dictionary.GetAnyPrefixNeeded (Sym => Sym, Scope => Scope, Separator => "__"); end if; Simple_Name := Dictionary.GenerateSimpleName (Item => Sym, Separator => "__"); if E_Strings.Get_Length (E_Str => Prefix) > 0 then Result := Prefix; E_Strings.Append_String (E_Str => Result, Str => "__"); E_Strings.Append_Examiner_String (E_Str1 => Result, E_Str2 => Simple_Name); else if not E_Strings.Is_Empty (E_Str => CommandLineData.Content.FDL_Mangle) then AdjustFDL_RWs.Possibly_Adjust (E_Str => Simple_Name, Prefix => CommandLineData.Content.FDL_Mangle); end if; Result := Simple_Name; end if; Result := E_Strings.Lower_Case (E_Str => Result); end if; return Result; end Get_Name; ---------------------------------------------------------------------- -- Print Sym, without wrapping or white-space procedure Print_Symbol_No_Wrap (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is begin E_Strings.Put_String (File => File, E_Str => Get_Name (Sym => Sym, Scope => Scope)); end Print_Symbol_No_Wrap; ---------------------------------------------------------------------- -- Print Name, possibly wrapping and indenting to the next line procedure Print_Name (File : in SPARK_IO.File_Type; Name : in E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Name; is begin if SPARK_IO.Col (File) /= Indent and then (((SPARK_IO.Col (File) + E_Strings.Get_Length (E_Str => Name)) + 12) > Max_Column) then SPARK_IO.New_Line (File, 1); SPARK_IO.Set_Col (File, 2 * Indent); end if; E_Strings.Put_String (File => File, E_Str => Name); end Print_Name; ---------------------------------------------------------------------- -- Print Sym, possibly wrapping and indenting to the next line procedure Print_Symbol (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is Name : E_Strings.T; begin Name := Get_Name (Sym => Sym, Scope => Scope); Print_Name (File, Name); end Print_Symbol; -- Print Sym in upper-case, possibly wrapping and indenting to the next line procedure Print_Symbol_Upper_Case (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is Name : E_Strings.T; begin Name := Get_Name (Sym => Sym, Scope => Scope); Name := E_Strings.Upper_Case (Name); Print_Name (File, Name); end Print_Symbol_Upper_Case; ---------------------------------------------------------------------- procedure Print_Symbol_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sub_Type : in Boolean; Component : in Boolean; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Component, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sub_Type, --# Sym; is Type_Sym : Dictionary.Symbol; ---------------------------------------------------------------------- -- Function to calculate the required type symbol to be printed for a -- given symbol. Consider making this global and using it prior to all -- calls to Print_Symbol_Type (except when printing arrays and records) -- thus eliminating the need for the 'Component' flag. ---------------------------------------------------------------------- function Get_Type_Sym (Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) return Dictionary.Symbol --# global in Dictionary.Dict; is Type_Sym : Dictionary.Symbol; begin if Dictionary.IsTypeMark (Sym) and not Dictionary.IsSubtype (Sym) then Type_Sym := Dictionary.GetRootType (Sym); elsif Dictionary.Is_Variable (Sym) and then Dictionary.IsOwnVariable (Sym) then Type_Sym := Dictionary.GetRootType (Dictionary.GetOwnVariableTypeHere (OwnVariable => Sym, Scope => Scope)); else Type_Sym := Dictionary.GetRootType (Dictionary.GetType (Sym)); end if; return Type_Sym; end Get_Type_Sym; begin -- Print_Symbol_Type -- Sym is the symbol whose type is to be printed, and Type_Sym is the symbol of its (sub)type if Component then Type_Sym := Sym; -- When printing record component, array index or element types the Type_Sym is the Sym. else Type_Sym := Get_Type_Sym (Sym, Scope); end if; -- Print out the type symbol if possible, or just the basic type of integer, character, real or -- boolean otherwise. Note that the Simplifier cannot currently handle transitivity in subtype -- declarations, eg if we say "type t = integer; type st = t; type a = array[st] of ..." then the -- Simplifier rejects VCs that index into array 'a' using type st. Hence the flag Sub_Type is -- used to avoid this behaviour and print "type st = integer" in such cases. It may be possible to -- remove this flag in future if the Simplifier is modified to handle subtypes transitively. See -- discussion under [K401-011]. if (Dictionary.TypeIsInteger (Type_Sym) or else Dictionary.TypeIsModular (Type_Sym)) and then not Dictionary.IsPrivateType (Type_Sym, Scope) and then (Dictionary.IsUniversalIntegerType (Type_Sym) or Sub_Type) then SPARK_IO.Put_String (File, "integer", 0); elsif Dictionary.TypeIsCharacter (Type_Sym) then SPARK_IO.Put_String (File, "character", 0); elsif Dictionary.TypeIsReal (Type_Sym) then SPARK_IO.Put_String (File, "real", 0); elsif Dictionary.TypeIsBoolean (Type_Sym) then SPARK_IO.Put_String (File, "boolean", 0); else Print_Symbol (File => File, Scope => Scope, Sym => Type_Sym); end if; end Print_Symbol_Type; ---------------------------------------------------------------------- -- routines to print rules go here because they need to be seen by -- Print_Constant_Declaration and they use Print_Symbol ---------------------------------------------------------------------- procedure Print_Rule_Name (Rule_File : in SPARK_IO.File_Type) --# global in Rule_Family_Name; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from * & --# SPARK_IO.File_Sys from *, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File; is begin Rule_Counter := Rule_Counter + 1; E_Strings.Put_String (File => Rule_File, E_Str => Rule_Family_Name); SPARK_IO.Put_Char (Rule_File, '('); SPARK_IO.Put_Integer (Rule_File, Rule_Counter, 0, 10); SPARK_IO.Put_String (Rule_File, "): ", 0); end Print_Rule_Name; ---------------------------------------------- procedure End_A_Rule (Rule_File : in SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Rule_File; is begin SPARK_IO.Put_String (Rule_File, ".", 0); SPARK_IO.New_Line (Rule_File, 1); end End_A_Rule; ---------------------------------------------- function Get_Value (Store_Val : LexTokenManager.Lex_String; Type_Mark : Dictionary.Symbol; Scope : Dictionary.Scopes) return E_Strings.T --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Str : E_Strings.T; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Store_Val, Lex_Str2 => LexTokenManager.True_Token) = LexTokenManager.Str_Eq then Str := E_Strings.Copy_String (Str => "true"); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Store_Val, Lex_Str2 => LexTokenManager.False_Token) = LexTokenManager.Str_Eq then Str := E_Strings.Copy_String (Str => "false"); elsif Dictionary.TypeIsNumeric (Type_Mark) or else Dictionary.TypeIsCharacter (Type_Mark) then Str := Maths.ValueToString (Maths.ValueRep (Store_Val)); else Str := Get_Name (Sym => Dictionary.GetEnumerationLiteral (Type_Mark, Store_Val), Scope => Scope); end if; return Str; end Get_Value; ---------------------------------------------- procedure Print_Replacement_Rule (Rule_File : in SPARK_IO.File_Type; Store_Val : in LexTokenManager.Lex_String; Type_Mark : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_File, --# Scope, --# Store_Val, --# Type_Mark; is begin SPARK_IO.Put_String (Rule_File, " may_be_replaced_by ", 0); E_Strings.Put_String (File => Rule_File, E_Str => Get_Value (Store_Val => Store_Val, Type_Mark => Type_Mark, Scope => Scope)); End_A_Rule (Rule_File => Rule_File); end Print_Replacement_Rule; ---------------------------------------------- procedure PrintRuleHeader (Write_Rules : in Boolean; Rule_File : in SPARK_IO.File_Type) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in out L_Heap; --# in out SPARK_IO.File_Sys; --# out Rule_Counter; --# out Rule_Family_Name; --# out Type_List; --# derives L_Heap, --# Type_List from L_Heap & --# Rule_Counter from & --# Rule_Family_Name from Dictionary.Dict, --# LexTokenManager.State, --# Scope & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_File, --# Scope, --# Write_Rules; is separate; ---------------------------------------------- procedure PrintTypeRules (Write_Rules : in Boolean; Rule_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Scope; --# in out L_Heap; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# in out Type_List; --# derives L_Heap, --# Type_List from LexTokenManager.State, --# L_Heap, --# Type_List, --# Write_Rules & --# Rule_Counter from *, --# Dictionary.Dict, --# LexTokenManager.State, --# L_Heap, --# Scope, --# Type_List, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# L_Heap, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Type_List, --# Write_Rules; is separate; ---------------------------------------------------------------------- procedure PrintConstantRules (Write_Rules : in Boolean; Sym : in Dictionary.Symbol; Rule_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; End_Position : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in out ErrorHandler.Error_Context; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# End_Position, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# Write_Rules & --# Rule_Counter from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Sym, --# Write_Rules; is separate; procedure PrintConstantReplacementRule (Write_Rules : in Boolean; Sym : in Dictionary.Symbol; DAG : in Cells.Cell; Rule_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in out Heap; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# DAG, --# Heap, --# Write_Rules & --# Rule_Counter from *, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# DAG, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym, --# Write_Rules; is begin if Write_Rules then Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, " may_be_replaced_by ", 0); DAG_IO.PrintDag (Heap, Rule_File, DAG, Scope, DAG_IO.Default_Wrap_Limit); End_A_Rule (Rule_File => Rule_File); end if; end PrintConstantReplacementRule; ---------------------------------------------------------------------- -- Produces FDL rules that are common to ALL Examiner-generated rule -- files. procedure PrintStandardRules (Write_Rules : in Boolean; Rule_File : in SPARK_IO.File_Type) --# global in Rule_Family_Name; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Write_Rules; is --# hide PrintStandardRules; pragma Unreferenced (Write_Rules); pragma Unreferenced (Rule_File); begin -- Currently, there are no such standard rules. -- This used to produce rules for Character'Pos and 'Val, but -- these are no longer needed following CFR 1804. -- We leave the contract of this unit as-was, though, in case -- of future re-instatement of this procedure. null; end PrintStandardRules; ---------------------------------------------------------------------- procedure Print_Declaration_Head (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope; is Subprog_Sym : Dictionary.Symbol; begin -- Note that in the declarations header, we always print the -- _simple_ name of the subprogram, so it always matches the name -- output as the prefix of each VC generated by Graph.PrintVCs Subprog_Sym := Dictionary.GetRegion (Scope); SPARK_IO.Put_String (File, "title ", 0); if Dictionary.IsFunction (Subprog_Sym) then SPARK_IO.Put_String (File, "function ", 0); elsif Dictionary.IsProcedure (Subprog_Sym) then SPARK_IO.Put_String (File, "procedure ", 0); elsif Dictionary.IsTaskType (Subprog_Sym) then SPARK_IO.Put_String (File, "task_type ", 0); else null; end if; E_Strings.Put_String (File => File, E_Str => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Subprog_Sym)))); SPARK_IO.Put_Char (File, ';'); SPARK_IO.New_Line (File, 2); end Print_Declaration_Head; ---------------------------------------------------------------------- function Find_Own_Var_Matching_This_Type (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Dictionary.Symbol --# global in Dictionary.Dict; is It : Dictionary.Iterator; Result : Dictionary.Symbol := Dictionary.NullSymbol; Current_Own_Var : Dictionary.Symbol; begin It := Dictionary.FirstOwnVariable (Dictionary.GetEnclosingPackage (Scope)); while not Dictionary.IsNullIterator (It) loop Current_Own_Var := Dictionary.CurrentSymbol (It); if Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Current_Own_Var), Right_Symbol => Sym, Full_Range_Subtype => False) then Result := Current_Own_Var; exit; end if; It := Dictionary.NextSymbol (It); end loop; return Result; end Find_Own_Var_Matching_This_Type; ---------------------------------------------------------------------- procedure Print_Declaration (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Needed_Symbols; --# in Rule_Family_Name; --# in Rule_File; --# in out Heap; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap from *, --# Dictionary.Dict, --# Scope, --# Sym & --# Rule_Counter from *, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Scope, --# Sym; is procedure Print_Variable_Declaration (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "var ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, " : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Sym); SPARK_IO.Put_Line (File, ";", 0); end Print_Variable_Declaration; ---------------------------------------------------------------------- procedure Print_Constant_Declaration (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is begin if not Dictionary.IsEnumerationLiteral (Sym) then SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "const ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, " : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Sym); SPARK_IO.Put_String (File, " = pending", 0); SPARK_IO.Put_Line (File, ";", 0); end if; end Print_Constant_Declaration; ---------------------------------------------------------------------- procedure Print_Type_Declaration (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is Typ : Dictionary.Symbol; -------------------------- procedure Print_Private_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (File, " = pending;", 0); end Print_Private_Type; ------------------------------ -- Same as Print_Private_Type for now, but kept separate in case we -- implement further support for proof of protected types -- in future. procedure Print_Protected_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (File, " = pending;", 0); end Print_Protected_Type; ------------------------------ -- Same as Print_Private_Type for now, but kept separate for clarity procedure Print_Incomplete_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (File, " = pending;", 0); end Print_Incomplete_Type; ------------------------------ procedure Print_Sub_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, " = ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => True, Component => False, Sym => Sym); SPARK_IO.Put_Line (File, ";", 0); end Print_Sub_Type; -------------------------------------- procedure Print_Time_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is begin -- Ada95 LRM D.8 (27) says that time types (and their -- operators) behave as if they were integers. SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (File, " = integer;", 0); end Print_Time_Type; -------------------------------------- procedure Print_Enumerated_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is EnumIt : Dictionary.Iterator; First_One : Boolean; begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, " = ", 0); SPARK_IO.Put_Char (File, '('); if Dictionary.TypeIsGeneric (Sym) then -- we can't print the actual enumeration literals because there aren't any -- so we synthesise some to tell the proof tools this is an enumeration -- the synthetic literals will never appear in VCs and can't be used -- format is type T = (generic__t__lower, generic__t__upper); SPARK_IO.Put_String (File, "generic__", 0); Print_Symbol_No_Wrap (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, "__lower, ", 0); SPARK_IO.Put_String (File, "generic__", 0); Print_Symbol_No_Wrap (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, "__upper", 0); else -- not generic, get literals from Dictionary First_One := True; EnumIt := Dictionary.FirstEnumerationLiteral (Sym); while not Dictionary.IsNullIterator (EnumIt) loop if First_One then First_One := False; else SPARK_IO.Put_String (File, ", ", 0); end if; Print_Symbol (File => File, Scope => Scope, Sym => Dictionary.CurrentSymbol (EnumIt)); EnumIt := Dictionary.NextSymbol (EnumIt); end loop; end if; SPARK_IO.Put_Char (File, ')'); SPARK_IO.Put_Line (File, ";", 0); end Print_Enumerated_Type; --------------------------- procedure Print_Character_Type (File : in SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File; is begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_Line (File, "type character = integer;", 0); end Print_Character_Type; --------------------------- procedure Print_Record_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol; Has_Private_Fields : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Has_Private_Fields, --# LexTokenManager.State, --# Scope, --# Sym; is Component_It : Dictionary.Iterator; First_One : Boolean; Component_Sym : Dictionary.Symbol; begin -- If the root of an extended record is a null record then we don't want a declaration. if Dictionary.RecordHasSomeFields (Sym) then SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, " = ", 0); SPARK_IO.Put_Line (File, "record", 0); if Has_Private_Fields then -- just declare inherit field SPARK_IO.Set_Col (File, 3 * Indent); SPARK_IO.Put_String (File, "inherit : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Dictionary.GetRootOfExtendedType (Sym)); else -- declare all fields Component_It := Dictionary.FirstRecordComponent (Sym); -- If all ancestors of an extended record are null records then we don't want -- a declaration of an Inherit field referencing first of them. if Dictionary.TypeIsExtendedTagged (Sym) and then Dictionary.NoFieldsBelowThisRecord (Sym) then --skip inherit field Component_It := Dictionary.NextSymbol (Component_It); end if; First_One := True; while not Dictionary.IsNullIterator (Component_It) loop if First_One then First_One := False; else SPARK_IO.Put_Line (File, ";", 0); end if; SPARK_IO.Set_Col (File, 3 * Indent); Print_Symbol_No_Wrap (File => File, Scope => Scope, Sym => Dictionary.CurrentSymbol (Component_It)); SPARK_IO.Put_String (File, " : ", 0); Component_Sym := Dictionary.GetType (Dictionary.CurrentSymbol (Component_It)); if Dictionary.Is_Constrained_Array_Type_Mark (Component_Sym, Scope) or else (Dictionary.TypeIsRecord (Component_Sym) and then Dictionary.IsSubtype (Component_Sym)) then -- If the record component type is a constrained subtype of an unconstrained -- array then print the unconstrained array type. It may be possible to enhance -- this in future and print the constrained subtype here but there are two -- issues that would need to be addressed: -- a. Making the Simplifier deal with transitivity of subtypes properly. -- b. When printing out the rules for functions that take unconstrained arrays -- as parameters, the Examiner would need to print out a version of the -- function that took the constrained array as a parameter, or the Simplifier -- would need to be modified to understand that a constrained subtype can be -- passed to such a function in place of the unconstrained type. -- The same applies to record subtypes. Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => True, Sym => Dictionary.GetRootType (Component_Sym)); else -- The normal case is to print the type of the record component. Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => True, Sym => Component_Sym); end if; Component_It := Dictionary.NextSymbol (Component_It); end loop; end if; SPARK_IO.New_Line (File, 1); SPARK_IO.Set_Col (File, 2 * Indent); SPARK_IO.Put_String (File, "end", 0); SPARK_IO.Put_Line (File, ";", 0); end if; end Print_Record_Type; ------------------------------- procedure Print_Array_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is Index_It : Dictionary.Iterator; First_One : Boolean; Component_Sym : Dictionary.Symbol; begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, " = ", 0); SPARK_IO.Put_String (File, "array [", 0); Index_It := Dictionary.FirstArrayIndex (Sym); First_One := True; while not Dictionary.IsNullIterator (Index_It) loop if First_One then First_One := False; else SPARK_IO.Put_String (File, ",", 0); end if; Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => True, Sym => Dictionary.CurrentSymbol (Index_It)); Index_It := Dictionary.NextSymbol (Index_It); end loop; SPARK_IO.Put_String (File, "] of ", 0); Component_Sym := Dictionary.GetArrayComponent (Sym); if Dictionary.Is_Constrained_Array_Type_Mark (Component_Sym, Scope) or else (Dictionary.TypeIsRecord (Component_Sym) and then Dictionary.IsSubtype (Component_Sym)) then -- If the array component type is a constrained subtype of an unconstrained -- array then print the unconstrained array type. It may be possible to enhance -- this in future and print the constrained subtype here but there are two -- issues that would need to be addressed: -- a. Making the Simplifier deal with transitivity of subtypes properly. -- b. When printing out the rules for functions that take unconstrained arrays -- as parameters, the Examiner would need to print out a version of the -- function that took the constrained array as a parameter, or the Simplifier -- would need to be modified to understand that a constrained subtype can be -- passed to such a function in place of the unconstrained type. -- The same applies to record subtypes. Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => True, Sym => Dictionary.GetRootType (Component_Sym)); else -- The normal case is to print the type of the array component. Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => True, Sym => Component_Sym); end if; SPARK_IO.Put_Line (File, ";", 0); -- Wherever type String is declared, we also produce a -- declaration of a deferred constant that represents the -- Null String literal "" if Dictionary.IsPredefinedStringType (Sym) then SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "const ", 0); SPARK_IO.Put_String (File, DAG_IO.Null_String_Literal_Name, 0); SPARK_IO.Put_Line (File, " : string = pending;", 0); end if; end Print_Array_Type; ------------------------- -- Procedure to print abstract types: either "pending" -- or and FDL record declaration is printed depending on scope procedure Print_Abstract_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is Own_Var_Sym : Dictionary.Symbol; procedure Print_Refinement_Record (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is Component_It : Dictionary.Iterator; First_One : Boolean; begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Dictionary.GetType (Sym)); SPARK_IO.Put_String (File, " = ", 0); SPARK_IO.Put_Line (File, "record", 0); Component_It := Dictionary.FirstConstituent (Sym); First_One := True; while not Dictionary.IsNullIterator (Component_It) loop if First_One then First_One := False; else SPARK_IO.Put_Line (File, ";", 0); end if; SPARK_IO.Set_Col (File, 3 * Indent); Print_Symbol (File => File, Scope => Scope, Sym => Dictionary.CurrentSymbol (Component_It)); SPARK_IO.Put_String (File, " : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Dictionary.CurrentSymbol (Component_It)); Component_It := Dictionary.NextSymbol (Component_It); end loop; SPARK_IO.New_Line (File, 1); SPARK_IO.Set_Col (File, 2 * Indent); SPARK_IO.Put_String (File, "end", 0); SPARK_IO.Put_Line (File, ";", 0); end Print_Refinement_Record; ------------------------- begin --Print_Abstract_Type Own_Var_Sym := Find_Own_Var_Matching_This_Type (Sym => Sym, Scope => Scope); if IsLocalOwnVariableWithRefinement (Own_Var_Sym, Scope) then Print_Refinement_Record (File => File, Scope => Scope, Sym => Own_Var_Sym); else SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "type ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (File, " = pending;", 0); end if; end Print_Abstract_Type; begin -- Print_Type_Declaration -- Time types are private from the point of view of the -- type system, but need special handling in FDL so that -- their relational and arithmetic operators are well-defined, -- so we deal with them first. if Dictionary.IsPredefinedTimeType (Sym) or else Dictionary.IsPredefinedTimeSpanType (Sym) then Print_Time_Type (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.IsPrivateType (Sym, Scope) and then not Dictionary.TypeIsExtendedTagged (Sym) then Print_Private_Type (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.TypeIsAbstractProof (Sym) then Print_Abstract_Type (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.IsSubtype (Sym) and then not Dictionary.TypeIsRecord (Dictionary.GetRootType (Sym)) and then not Dictionary.TypeIsArray (Dictionary.GetRootType (Sym)) then -- Subtypes are those which actually appear in -- the VC, since only base types are ever added -- by generatesuccessors. These can only occur in attributes -- so we want the actual type not the base type. -- For record subtypes (which are always full range in SPARK) don't -- print the subtype as the root type is always used in FDL. -- Similarly for array subtypes (which are either full range, or are -- constrained subtypes of unconstrained arrays) don't print subtype -- because the root type is always used in FDL. Print_Sub_Type (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.TypeIsIncompleteHere (Sym, Scope) and then not Dictionary.TypeIsExtendedTagged (Sym) then Print_Incomplete_Type (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.IsParameterConstraint (Sym) then -- subprogram parameter constraint symbols are used to communicate information about -- unconstrained array formal parameters. They behave as subtypes for VC declaration purposes Print_Sub_Type (File => File, Scope => Scope, Sym => Sym); else Typ := Dictionary.GetRootType (Sym); if Dictionary.TypeIsCharacter (Typ) then Print_Character_Type (File => File); elsif Dictionary.IsPredefinedIntegerType (Typ) or Dictionary.IsPredefinedFloatType (Typ) or Dictionary.TypeIsBoolean (Typ) then -- No action, since "integer", "real" and "boolean" are -- predefined in FDL null; elsif Dictionary.TypeIsEnumeration (Typ) then Print_Enumerated_Type (File => File, Scope => Scope, Sym => Typ); elsif (Dictionary.TypeIsRecord (Typ) or Dictionary.TypeIsExtendedTagged (Sym)) then if Dictionary.IsSubtype (Sym) then -- Don't print out record subtypes. We always use the root type in FDL, and -- that will be printed separately. null; else Print_Record_Type (File => File, Scope => Scope, Sym => Typ, Has_Private_Fields => Dictionary.IsPrivateType (Sym, Scope)); end if; elsif Dictionary.TypeIsArray (Typ) then if Dictionary.IsSubtype (Sym) then -- Don't print out array subtypes. We always use the root type in FDL, and -- that will be printed separately. null; else Print_Array_Type (File => File, Scope => Scope, Sym => Typ); end if; elsif Dictionary.TypeIsInteger (Typ) or Dictionary.TypeIsReal (Typ) or Dictionary.TypeIsModular (Typ) then Print_Sub_Type (File => File, Scope => Scope, Sym => Typ); elsif Dictionary.IsProtectedType (Typ) then -- Protected types need at least a "pending" declaration in FDL, -- so that later proof functions and variables make sense. Print_Protected_Type (File => File, Scope => Scope, Sym => Typ); else Debug_Rank_Sym (Msg => "Print_Type_Declaration does not know how to deal with ", Sym => Typ); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Assertion_Failure, Msg => "Unknown type in Print_Type_Declaration"); end if; end if; end Print_Type_Declaration; -------------------------------------------- procedure Print_Procedure_Declaration (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Heap; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# Scope, --# Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Scope, --# Sym; is Exp_It, Imp_It : Dictionary.Iterator; First_One : Boolean; Abstraction : Dictionary.Abstractions; Export_Sym, Import_Sym : Dictionary.Symbol; -- These stacks are needed to revsers order of both exports and imports Import_Stack : CStacks.Stack; Temp_Cell : Cells.Cell; --stack member function Export_Name (Subprog_Sym, Export_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return E_Strings.T --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Result : E_Strings.T; begin if Dictionary.IsFormalParameter (Subprog_Sym, Export_Sym) then -- No prefix needed Result := LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Export_Sym)); else -- Its global and a prefix may be needed Result := Get_Name (Sym => Export_Sym, Scope => Scope); end if; return E_Strings.Lower_Case (E_Str => Result); end Export_Name; ------------------------------------------ begin -- Print_Procedure_Declaration -- Use stack to reverse order of exports CStacks.CreateStack (Import_Stack); Abstraction := Dictionary.GetAbstraction (Sym, Scope); Exp_It := Dictionary.FirstExport (Abstraction, Sym); while not Dictionary.IsNullIterator (Exp_It) loop Export_Sym := Dictionary.CurrentSymbol (Exp_It); SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "function ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, "__", 0); E_Strings.Put_String (File => File, E_Str => Export_Name (Subprog_Sym => Sym, Export_Sym => Export_Sym, Scope => Scope)); Imp_It := Dictionary.FirstDependency (Abstraction, Sym, Export_Sym); while not Dictionary.IsNullIterator (Imp_It) loop Cells.Create_Cell (Heap, Temp_Cell); Cells.Set_Symbol_Value (Heap, Temp_Cell, Dictionary.CurrentSymbol (Imp_It)); CStacks.Push (Heap, Temp_Cell, Import_Stack); Imp_It := Dictionary.NextSymbol (Imp_It); end loop; --now pop them one at a time and build an argument list for function First_One := True; while not CStacks.IsEmpty (Import_Stack) loop Import_Sym := Cells.Get_Symbol_Value (Heap, CStacks.Top (Heap, Import_Stack)); CStacks.Pop (Heap, Import_Stack); if First_One then SPARK_IO.Put_String (File, "(", 0); First_One := False; else SPARK_IO.Put_String (File, ", ", 0); end if; Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Import_Sym); end loop; if not First_One then SPARK_IO.Put_String (File, ")", 0); end if; SPARK_IO.Put_String (File, " : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Export_Sym); SPARK_IO.Put_Line (File, ";", 0); Exp_It := Dictionary.NextSymbol (Exp_It); end loop; end Print_Procedure_Declaration; --------------------------------------------------------- procedure Print_Function_Declaration (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Heap; --# in LexTokenManager.State; --# in Needed_Symbols; --# in Rule_Family_Name; --# in Rule_File; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Rule_Counter, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is Argument_It : Dictionary.Iterator; First_One : Boolean; Ada_Sym : Dictionary.Symbol; procedure Print_Proof_Function_Refinement (Ada_Function : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_File; --# in Scope; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives SPARK_IO.File_Sys from *, --# Ada_Function, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_File, --# Scope & --# Statistics.TableUsage from *, --# Ada_Function, --# Dictionary.Dict; is Abstract_Args : Symbol_Set.T; -- Sym is an abstract proof function. For each of its own -- variable parameter X, print the check -- "goal(checktype(X,))". procedure Print_Own_Type_Checks (Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_File; --# in Scope; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_File, --# Scope, --# Sym; is Argument_It : Dictionary.Iterator; First_One : Boolean; Param : Dictionary.Symbol; begin First_One := True; Argument_It := Dictionary.FirstSubprogramParameter (Sym); while not Dictionary.IsNullIterator (Argument_It) loop Param := Dictionary.CurrentSymbol (Argument_It); if Dictionary.IsOwnVariable (Param) then if First_One then First_One := False; else SPARK_IO.Put_String (Rule_File, ", ", 0); end if; SPARK_IO.Put_String (Rule_File, "goal(checktype(", 0); Print_Symbol_Upper_Case (File => Rule_File, Scope => Scope, Sym => Param); SPARK_IO.Put_String (Rule_File, ", ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Dictionary.GetType (Param)); SPARK_IO.Put_String (Rule_File, "))", 0); end if; Argument_It := Dictionary.NextSymbol (Argument_It); end loop; end Print_Own_Type_Checks; -- Print parameter Sym for a call to the abstract or refined -- proof function. If Sym is a constituent c1 of own -- variable x (for the refined call), and we are looking at -- the refined function, then print "fld_c1(X)". Otherwise, -- the parameter is just printed "X". procedure Print_Proof_Function_Argument (Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_File; --# in Scope; --# in out Abstract_Args; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Abstract_Args, --# Statistics.TableUsage from *, --# Abstraction, --# Abstract_Args, --# Sym & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Abstract_Args, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_File, --# Scope, --# Sym; is Do_Refinement : Boolean; begin -- This procedure is called in two contexts; first for -- each argument for the abstract version of a -- function and second for each argument of the -- refined version. When printing the arguments for -- the abstract version we: -- * Always print the abstract, non-refined version -- * Make a note of the symbol -- When printing the arguments for the refined version -- it is a bit more complicated: -- * Arguments that are not part of any own variable -- are printed as-is (i.e. WIBBLE) -- * Arguments that are part of an own variable -- are printed as-is if we have seen them -- before (during the abstract call) (i.e. WIBBLE) -- * Arguments that are part of an own variable -- are printed using the refined view if we have not -- seen them before (i.e. fld_foo(WIBBLE)). case Abstraction is when Dictionary.IsAbstract => Do_Refinement := False; Symbol_Set.Add (Abstract_Args, Sym); when Dictionary.IsRefined => Do_Refinement := (Dictionary.IsConstituent (Sym) and not Symbol_Set.Contains (Abstract_Args, Sym)); end case; if Do_Refinement then SPARK_IO.Put_String (Rule_File, "fld_", 0); Print_Symbol_No_Wrap (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "(", 0); Print_Symbol_Upper_Case (File => Rule_File, Scope => Scope, Sym => Dictionary.GetSubject (Sym)); SPARK_IO.Put_String (Rule_File, ")", 0); else Print_Symbol_Upper_Case (File => Rule_File, Scope => Scope, Sym => Sym); end if; end Print_Proof_Function_Argument; -- Print a call to the abstract or refined proof function -- for the Ada function Sym. Parameters are all universally -- quantified (thus upper-case), and refined own parameters -- are derived from the corresponding abstract own -- parameters with a suitable call to a "fld_" getter -- function. procedure Print_Proof_Function_Pattern (Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_File; --# in Scope; --# in out Abstract_Args; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Abstract_Args, --# Statistics.TableUsage from *, --# Abstraction, --# Abstract_Args, --# Dictionary.Dict, --# Sym & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Abstract_Args, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_File, --# Scope, --# Sym; is Argument_It : Dictionary.Iterator; First_One : Boolean; Proof_Sym : Dictionary.Symbol; begin Proof_Sym := Dictionary.GetImplicitProofFunction (Abstraction, Sym); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Proof_Sym); First_One := True; Argument_It := Dictionary.FirstSubprogramParameter (Proof_Sym); while not Dictionary.IsNullIterator (Argument_It) loop if First_One then SPARK_IO.Put_String (Rule_File, "(", 0); First_One := False; else SPARK_IO.Put_String (Rule_File, ", ", 0); end if; Print_Proof_Function_Argument (Sym => Dictionary.CurrentSymbol (Argument_It), Abstraction => Abstraction); Argument_It := Dictionary.NextSymbol (Argument_It); end loop; if not First_One then SPARK_IO.Put_String (Rule_File, ")", 0); end if; end Print_Proof_Function_Pattern; begin Symbol_Set.Initialise (Abstract_Args); Print_Proof_Function_Pattern (Ada_Function, Dictionary.IsAbstract); SPARK_IO.Put_String (Rule_File, " may_be_replaced_by ", 0); --# accept F, 10, Abstract_Args, "We are not actually assigning here anymore, just reading."; Print_Proof_Function_Pattern (Ada_Function, Dictionary.IsRefined); --# end accept; SPARK_IO.Put_String (Rule_File, " if [", 0); Print_Own_Type_Checks (Dictionary.GetImplicitProofFunction (Dictionary.IsAbstract, Ada_Function)); SPARK_IO.Put_Line (Rule_File, "].", 0); end Print_Proof_Function_Refinement; begin -- Print_Function_Declaration -- In the case of data refinement, if Sym is a refined proof -- function, and the corresponding abstract proof function -- is needed, then generate a rule stating that a call to -- the abstract proof function can be rewritten in a call to -- the refined proof function. if Dictionary.IsImplicitProofFunction (Sym) then Ada_Sym := Dictionary.GetAdaFunction (Sym); -- This is a case of data refinement if not Dictionary.IsNullIterator (Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Ada_Sym)) -- Sym is a refined proof function and then Dictionary.GetImplicitProofFunction (Dictionary.IsRefined, Ada_Sym) = Sym -- The corresponding abstract proof function is needed and then Pile.Contains (Heap => Heap, Symbol => Dictionary.GetImplicitProofFunction (Dictionary.IsAbstract, Ada_Sym), Node => Needed_Symbols) then Print_Rule_Name (Rule_File); Print_Proof_Function_Refinement (Ada_Sym); end if; end if; if Dictionary.GetRegion (Scope) = Sym then SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "var ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (File, " : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Sym); SPARK_IO.Put_Line (File, ";", 0); else SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "function ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Sym); First_One := True; Argument_It := Dictionary.FirstSubprogramParameter (Sym); while not Dictionary.IsNullIterator (Argument_It) loop if First_One then SPARK_IO.Put_String (File, "(", 0); First_One := False; else SPARK_IO.Put_String (File, ", ", 0); end if; Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Dictionary.CurrentSymbol (Argument_It)); Argument_It := Dictionary.NextSymbol (Argument_It); end loop; if not First_One then SPARK_IO.Put_String (File, ")", 0); end if; SPARK_IO.Put_String (File, " : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Sym); SPARK_IO.Put_Line (File, ";", 0); end if; end Print_Function_Declaration; ---------------------------------------------- begin --Print_Declaration if Dictionary.Is_Variable (Sym) then Print_Variable_Declaration (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.Is_Constant (Sym) then Print_Constant_Declaration (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.IsKnownDiscriminant (Sym) then Print_Constant_Declaration (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.IsTypeMark (Sym) then Print_Type_Declaration (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.IsProcedure (Sym) then Print_Procedure_Declaration (File => File, Scope => Scope, Sym => Sym); elsif Dictionary.IsFunction (Sym) then Print_Function_Declaration (File => File, Scope => Scope, Sym => Sym); else null; end if; end Print_Declaration; ---------------------------------------------- procedure Print_Attribute_Declarations (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes) --# global in AttributeList; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in Heap; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Type_List; --# in Write_Rules; --# in out L_Heap; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives L_Heap from *, --# AttributeList, --# Heap, --# LexTokenManager.State, --# Type_List, --# Write_Rules & --# Rule_Counter from *, --# AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Scope, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# AttributeList, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Write_Rules; is Current_Attrib, Prev_Attrib : Cells.Cell; ---------------------------- procedure Print_One_Attribute (Tick_Cell : in Cells.Cell; Prev_Cell : in Cells.Cell) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in File; --# in Heap; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in Type_List; --# in Write_Rules; --# in out L_Heap; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives L_Heap from *, --# Heap, --# LexTokenManager.State, --# Tick_Cell, --# Type_List, --# Write_Rules & --# Rule_Counter from *, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Prev_Cell, --# Scope, --# Tick_Cell, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Prev_Cell, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Tick_Cell, --# Write_Rules; is procedure Print_Attribute_Constant --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in File; --# in Heap; --# in LexTokenManager.State; --# in Scope; --# in Tick_Cell; --# in Type_List; --# in Write_Rules; --# in out L_Heap; --# in out SPARK_IO.File_Sys; --# derives L_Heap from *, --# Heap, --# LexTokenManager.State, --# Tick_Cell, --# Type_List, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Scope, --# Tick_Cell; is Lex_Str : LexTokenManager.Lex_String; Prefix_Cell : Cells.Cell; Has_Base : Boolean; Already_Present, Ok : Boolean; procedure Print_Attribute_Type (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol; Attrib : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Attrib, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# LexTokenManager.State, --# Scope, --# Sym; is The_Type : Dictionary.Symbol; begin if Dictionary.IsTypeMark (Sym) then The_Type := Sym; else The_Type := Dictionary.GetType (Sym); end if; if Dictionary.IsArrayAttribute (Attrib, The_Type) then Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Dictionary.GetArrayAttributeType (Attrib, The_Type, 1)); --note assumption of first index here pending expression evaluation else Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Dictionary.GetScalarAttributeType (Attrib, The_Type)); end if; end Print_Attribute_Type; begin -- Print_Attribute_Constant Has_Base := False; Prefix_Cell := Cells.Get_A_Ptr (Heap, Tick_Cell); if Cells.Get_Kind (Heap, Prefix_Cell) = Cell_Storage.Op then Has_Base := True; Prefix_Cell := Cells.Get_A_Ptr (Heap, Prefix_Cell); end if; SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "const ", 0); -- Make copy of symbol in list of type for later rule generation if Write_Rules then --# accept Flow, 10, Ok, "Expected ineffective assignment to Ok" & --# Flow, 10, Already_Present, "Expected ineffective assignment to Already_Present"; --The Heap contains the full details of each referenced entity --(integer, myvariable, etc.), including information that --allows working out if the entity involves a base part --(__base) or the form of any any tick part (__first, __last, --etc.). With this fine grain information, FDL is generated --only for the specific entities that are referenced. --However, below, only the referenced entity name is recorded --and passed to rule generation (by referencing the cell --entity name in a list). With only this larger grain information, --every rule is generated for an entity (all base forms and --tick forms). Care needs to be taken to ensure that FDL is --generated for each entity that will be referenced in the --generated rules. Lists.Add_Symbol (Heap => L_Heap, The_List => Type_List, Symbol => Cells.Get_Symbol_Value (Heap, Prefix_Cell), Already_Present => Already_Present, Ok => Ok); --# end accept; end if; Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Prefix_Cell)); if Has_Base then SPARK_IO.Put_String (File, "__base", 0); end if; SPARK_IO.Put_String (File, "__", 0); Lex_Str := Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, Tick_Cell)); E_Strings.Put_String (File => File, E_Str => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str))); SPARK_IO.Put_String (File, " : ", 0); Print_Attribute_Type (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Prefix_Cell), Attrib => Lex_Str); SPARK_IO.Put_Line (File, " = pending; ", 0); --# accept Flow, 33, Ok, "Expected Ok to be neither referenced nor exported" & --# Flow, 33, Already_Present, "Expected Already_Present to be neither referenced nor exported"; end Print_Attribute_Constant; ------------------ procedure Print_Attribute_Function --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in File; --# in Heap; --# in LexTokenManager.State; --# in Prev_Cell; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in Tick_Cell; --# in Write_Rules; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Prev_Cell, --# Scope, --# Tick_Cell, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Prev_Cell, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Tick_Cell, --# Write_Rules; is type Bounds is (Lower_Bound, Upper_Bound); Lex_Str : LexTokenManager.Lex_String; Ex_Str : E_Strings.T; Selector : E_Strings.T; Brackets : E_Strings.T; Component_Type, Index_Type, Rec_Variable, Type_Sym : Dictionary.Symbol; procedure Format_Record_Selector (Field_Name : in Dictionary.Symbol; Selector : out E_Strings.T; Brackets : out E_Strings.T; Variable : out Dictionary.Symbol) -- Analyse the fieldname, creating a field selector to access it, -- and a balancing set of close brackets -- also pick out the variable name -- This is also safe to call when the variable is not a record --# global in Dictionary.Dict; --# in LexTokenManager.State; --# derives Brackets, --# Variable from Dictionary.Dict, --# Field_Name & --# Selector from Dictionary.Dict, --# Field_Name, --# LexTokenManager.State; is Field : Dictionary.Symbol; -- the current field Sel : E_Strings.T; -- the field selector Brk : E_Strings.T; -- the close bracket list begin Field := Field_Name; Sel := E_Strings.Empty_String; Brk := E_Strings.Empty_String; loop -- exit when we've reached the variable name exit when not Dictionary.IsSubcomponent (Field); -- add the field selector to the end - note this serendipitously -- generates the nested selectors in the corrrect order E_Strings.Append_String (E_Str => Sel, Str => "fld_"); E_Strings.Append_Examiner_String (E_Str1 => Sel, E_Str2 => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Field)))); -- add a closing bracket, to match E_Strings.Append_String (E_Str => Sel, Str => "("); E_Strings.Append_String (E_Str => Brk, Str => ")"); -- get the enclosing field/variable Field := Dictionary.GetEnclosingObject (Field); end loop; Selector := Sel; Brackets := Brk; Variable := Field; end Format_Record_Selector; procedure Print_Function_Header -- print the common function declaration header --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Ex_Str; --# in File; --# in Heap; --# in LexTokenManager.State; --# in Scope; --# in Tick_Cell; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Ex_Str, --# File, --# Heap, --# LexTokenManager.State, --# Scope, --# Tick_Cell; is begin SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "function ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, "__", 0); E_Strings.Put_String (File => File, E_Str => E_Strings.Lower_Case (E_Str => Ex_Str)); end Print_Function_Header; -- When Bound = Upper_Bound -> -- <= __last. -- When Bound = Lower_Bound -> -- >= __first. procedure Print_A_Bound_Constraint (Rule_File : in SPARK_IO.File_Type; Free_Var : in String; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Bound : in Bounds) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Bound, --# CommandLineData.Content, --# Dictionary.Dict, --# Free_Var, --# LexTokenManager.State, --# Rule_File, --# Scope, --# Type_Sym; is begin if Bound = Lower_Bound then SPARK_IO.Put_String (File => Rule_File, Item => Free_Var, Stop => 0); SPARK_IO.Put_String (File => Rule_File, Item => " >= ", Stop => 0); else SPARK_IO.Put_String (File => Rule_File, Item => Free_Var, Stop => 0); SPARK_IO.Put_String (File => Rule_File, Item => " <= ", Stop => 0); end if; Print_Symbol (File => Rule_File, Scope => Scope, Sym => Type_Sym); if Bound = Lower_Bound then SPARK_IO.Put_String (File => Rule_File, Item => "__first", Stop => 0); else SPARK_IO.Put_String (File => Rule_File, Item => "__last", Stop => 0); end if; end Print_A_Bound_Constraint; -- Generically of the form dependent on Attribute, Bound and Complete: -- <= __last may_be_deduced_from -- [ __ ] . procedure Print_A_Bound_Rule (Rule_File : in SPARK_IO.File_Type; Free_Var : in String; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Bound : in Bounds; Attribute : in String; Attr_Type : in Dictionary.Symbol; Complete : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Attribute, --# Attr_Type, --# Bound, --# CommandLineData.Content, --# Complete, --# Dictionary.Dict, --# Free_Var, --# LexTokenManager.State, --# Rule_File, --# Scope, --# Type_Sym; is begin Print_A_Bound_Constraint (Rule_File => Rule_File, Free_Var => Free_Var, Type_Sym => Type_Sym, Scope => Scope, Bound => Bound); SPARK_IO.Put_String (File => Rule_File, Item => " may_be_deduced", Stop => 0); if Attribute'Length > 0 then SPARK_IO.Put_Line (File => Rule_File, Item => "_from", Stop => 0); SPARK_IO.Put_String (File => Rule_File, Item => " [ ", Stop => 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Attr_Type); SPARK_IO.Put_String (File => Rule_File, Item => "__", Stop => 0); SPARK_IO.Put_String (File => Rule_File, Item => Attribute, Stop => 0); -- Complete determines whether to close off the predicate -- or allow it to be extended. if Complete then SPARK_IO.Put_Line (File => Rule_File, Item => " ].", Stop => 0); end if; else SPARK_IO.Put_Line (File => Rule_File, Item => ".", Stop => 0); end if; end Print_A_Bound_Rule; -- Used to print side conditions to constrain that an index is in range. -- <= __last, -- >= __first procedure Print_An_Index_Constraint (Rule_File : in SPARK_IO.File_Type; Free_Var : in String; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Complete : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Complete, --# Dictionary.Dict, --# Free_Var, --# LexTokenManager.State, --# Rule_File, --# Scope, --# Type_Sym; is begin SPARK_IO.Put_String (File => Rule_File, Item => " ", Stop => 0); Print_A_Bound_Constraint (Rule_File => Rule_File, Free_Var => Free_Var, Type_Sym => Type_Sym, Scope => Scope, Bound => Upper_Bound); SPARK_IO.Put_Line (File => Rule_File, Item => ",", Stop => 0); SPARK_IO.Put_String (File => Rule_File, Item => " ", Stop => 0); Print_A_Bound_Constraint (Rule_File => Rule_File, Free_Var => Free_Var, Type_Sym => Type_Sym, Scope => Scope, Bound => Lower_Bound); if Complete then SPARK_IO.Put_Line (File => Rule_File, Item => " ].", Stop => 0); end if; end Print_An_Index_Constraint; -- Prints a rule of the form, dependent on Bound, -- (Bound = Upper_Bound) in example: -- element(A, [I]) <= __last -- may_be_deduced_from [ __always_valid(A), -- I <= __last, -- I >= __first ]. procedure Print_A_Scalar_Array_Element_Rule (Rule_File : in SPARK_IO.File_Type; Component_Type : in Dictionary.Symbol; Index_Type : in Dictionary.Symbol; Array_Type : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Bound : in Bounds) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Array_Type, --# Bound, --# CommandLineData.Content, --# Component_Type, --# Dictionary.Dict, --# Index_Type, --# LexTokenManager.State, --# Rule_File, --# Scope; is begin Print_A_Bound_Rule (Rule_File => Rule_File, Free_Var => "element(A, [I])", Type_Sym => Component_Type, Scope => Scope, Bound => Bound, Attribute => "always_valid(A)", Attr_Type => Array_Type, Complete => False); if not Dictionary.TypeIsBoolean (Index_Type) then -- Extend the bound rule with index constraints. SPARK_IO.Put_Line (File => Rule_File, Item => ",", Stop => 0); Print_An_Index_Constraint (Rule_File => Rule_File, Free_Var => "I", Type_Sym => Index_Type, Scope => Scope, Complete => False); end if; -- Complete the rule. SPARK_IO.Put_Line (File => Rule_File, Item => " ].", Stop => 0); end Print_A_Scalar_Array_Element_Rule; -- Prints a rule of the form, dependent on Bound, -- (Bound = Upper_Bound) in example: -- fld_(R) <= __last -- [ may_be_deduced_from __always_valid(R) ]. procedure Print_A_Scalar_Record_Component_Rule (Rule_File : in SPARK_IO.File_Type; Constrained_Var : in String; Applied_Var : in String; Record_Type : in Dictionary.Symbol; Field_Name : in Dictionary.Symbol; Field_Type : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Bound : in Bounds; Complete : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Applied_Var, --# Bound, --# CommandLineData.Content, --# Complete, --# Constrained_Var, --# Dictionary.Dict, --# Field_Name, --# Field_Type, --# LexTokenManager.State, --# Record_Type, --# Rule_File, --# Scope; is begin SPARK_IO.Put_String (File => Rule_File, Item => "fld_", Stop => 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Field_Name); Print_A_Bound_Rule (Rule_File => Rule_File, Free_Var => Constrained_Var, Type_Sym => Field_Type, Scope => Scope, Bound => Bound, Attribute => Applied_Var, Attr_Type => Record_Type, Complete => Complete); end Print_A_Scalar_Record_Component_Rule; -- Prints a rule of the form, dependent on Bound, -- (Bound = Upper_Bound) in example: -- element(fld_(R), [I]) <= __last -- may_be_deduced_from -- [ __always_valid(R), -- I <= __last, -- I >= __first ]. procedure Print_An_Array_Record_Component_Rule (Rule_File : in SPARK_IO.File_Type; Constrained_Var : in String; Applied_Var : in String; Record_Type : in Dictionary.Symbol; Field_Name : in Dictionary.Symbol; Array_Comp_Type : in Dictionary.Symbol; Index_Type : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Bound : in Bounds; Complete : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Applied_Var, --# Array_Comp_Type, --# Bound, --# CommandLineData.Content, --# Complete, --# Constrained_Var, --# Dictionary.Dict, --# Field_Name, --# Index_Type, --# LexTokenManager.State, --# Record_Type, --# Rule_File, --# Scope; is begin SPARK_IO.Put_String (File => Rule_File, Item => "element(fld_", Stop => 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Field_Name); Print_A_Bound_Rule (Rule_File => Rule_File, Free_Var => Constrained_Var, Type_Sym => Array_Comp_Type, Scope => Scope, Bound => Bound, Attribute => Applied_Var, Attr_Type => Record_Type, Complete => False); if not Dictionary.TypeIsBoolean (Index_Type) then -- Extend the bound rule with index constraints. SPARK_IO.Put_Line (File => Rule_File, Item => ",", Stop => 0); Print_An_Index_Constraint (Rule_File => Rule_File, Free_Var => "I", Type_Sym => Index_Type, Scope => Scope, Complete => False); end if; if Complete then SPARK_IO.Put_Line (File => Rule_File, Item => " ].", Stop => 0); end if; end Print_An_Array_Record_Component_Rule; -- Prints the rules related to all components of a record -- using the procedures Print_A_Scalar_Record_Component_Rule and -- Print_An_Array_Record_Component_Rule as appropriate. procedure Print_Record_Rules (Rule_File : in SPARK_IO.File_Type; Record_Type : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# Record_Type, --# Scope & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Record_Type, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope; is Iterator : Dictionary.Iterator; Array_Comp_Type, Curr_Symbol, Index_Type, Rec_Comp_Type : Dictionary.Symbol; begin Iterator := Dictionary.FirstRecordComponent (Record_Type); while not Dictionary.IsNullIterator (Iterator) loop Curr_Symbol := Dictionary.CurrentSymbol (Iterator); Rec_Comp_Type := Dictionary.GetType (Curr_Symbol); if not Dictionary.Is_Null_Symbol (Rec_Comp_Type) then if Dictionary.IsScalarTypeMark (Rec_Comp_Type, Scope) then Print_Rule_Name (Rule_File => Rule_File); Print_A_Scalar_Record_Component_Rule (Rule_File => Rule_File, Constrained_Var => "(R)", Applied_Var => "always_valid(R)", Record_Type => Record_Type, Field_Name => Curr_Symbol, Field_Type => Rec_Comp_Type, Scope => Scope, Bound => Upper_Bound, Complete => True); Print_Rule_Name (Rule_File => Rule_File); Print_A_Scalar_Record_Component_Rule (Rule_File => Rule_File, Constrained_Var => "(R)", Applied_Var => "always_valid(R)", Record_Type => Record_Type, Field_Name => Curr_Symbol, Field_Type => Rec_Comp_Type, Scope => Scope, Bound => Lower_Bound, Complete => True); elsif Dictionary.IsArrayTypeMark (Rec_Comp_Type, Scope) then -- Semantic checks ensure that it is an array of scalars -- It is an array type mark so get the component type -- and index type. Array_Comp_Type := Dictionary.GetArrayComponent (Rec_Comp_Type); Index_Type := Dictionary.GetArrayIndex (Rec_Comp_Type, 1); -- We assume that the array is one-dimensional -- and the index type is discrete: -- these constraints are enforced by static semantic checks. -- Check if the components of the array are scalar and -- non Boolean. if Dictionary.IsScalarTypeMark (Array_Comp_Type, Scope) and not Dictionary.TypeIsBoolean (Array_Comp_Type) then Print_Rule_Name (Rule_File => Rule_File); Print_An_Array_Record_Component_Rule (Rule_File => Rule_File, Constrained_Var => "(R), [I])", Applied_Var => "always_valid(R)", Record_Type => Record_Type, Field_Name => Curr_Symbol, Array_Comp_Type => Array_Comp_Type, Index_Type => Index_Type, Scope => Scope, Bound => Upper_Bound, Complete => True); Print_Rule_Name (Rule_File => Rule_File); Print_An_Array_Record_Component_Rule (Rule_File => Rule_File, Constrained_Var => "(R), [I])", Applied_Var => "always_valid(R)", Record_Type => Record_Type, Field_Name => Curr_Symbol, Array_Comp_Type => Array_Comp_Type, Index_Type => Index_Type, Scope => Scope, Bound => Lower_Bound, Complete => True); end if; end if; end if; Iterator := Dictionary.NextSymbol (Iterator); end loop; end Print_Record_Rules; -- Prints the rules related to all components of a record -- which is an element of an array, A, -- using the procedures Print_A_Scalar_Record_Component_Rule and -- Print_An_Array_Record_Component_Rule as appropriate in conjunction -- with Print_An_Index_Constraint to cover the index of A. procedure Print_Array_Of_Record_Rules (Rule_File : in SPARK_IO.File_Type; Array_Type : in Dictionary.Symbol; Outer_Index : in Dictionary.Symbol; Record_Type : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# Record_Type, --# Scope & --# SPARK_IO.File_Sys from *, --# Array_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Outer_Index, --# Record_Type, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope; is Iterator : Dictionary.Iterator; Array_Comp_Type, Curr_Symbol, Index_Type, Rec_Comp_Type : Dictionary.Symbol; begin Iterator := Dictionary.FirstRecordComponent (Record_Type); while not Dictionary.IsNullIterator (Iterator) loop Curr_Symbol := Dictionary.CurrentSymbol (Iterator); Rec_Comp_Type := Dictionary.GetType (Curr_Symbol); if not Dictionary.Is_Null_Symbol (Rec_Comp_Type) then if Dictionary.IsScalarTypeMark (Rec_Comp_Type, Scope) then Print_Rule_Name (Rule_File => Rule_File); Print_A_Scalar_Record_Component_Rule (Rule_File => Rule_File, Constrained_Var => "(element(A, [I]))", Applied_Var => "always_valid(A)", Record_Type => Array_Type, Field_Name => Curr_Symbol, Field_Type => Rec_Comp_Type, Scope => Scope, Bound => Upper_Bound, Complete => False); SPARK_IO.Put_Line (File => Rule_File, Item => ",", Stop => 0); Print_An_Index_Constraint (Rule_File => Rule_File, Free_Var => "I", Type_Sym => Outer_Index, Scope => Scope, Complete => True); Print_Rule_Name (Rule_File => Rule_File); Print_A_Scalar_Record_Component_Rule (Rule_File => Rule_File, Constrained_Var => "(element(A, [I]))", Applied_Var => "always_valid(A)", Record_Type => Array_Type, Field_Name => Curr_Symbol, Field_Type => Rec_Comp_Type, Scope => Scope, Bound => Lower_Bound, Complete => False); SPARK_IO.Put_Line (File => Rule_File, Item => ",", Stop => 0); Print_An_Index_Constraint (Rule_File => Rule_File, Free_Var => "I", Type_Sym => Outer_Index, Scope => Scope, Complete => True); elsif Dictionary.IsArrayTypeMark (Rec_Comp_Type, Scope) then -- Semantic checks ensure that it is an array of scalars -- It is an array type mark so get the component type -- and index type. Array_Comp_Type := Dictionary.GetArrayComponent (Rec_Comp_Type); Index_Type := Dictionary.GetArrayIndex (Rec_Comp_Type, 1); -- We assume that the array is one-dimensional -- and the index type is discrete: -- these constraints are enforced by static semantic checks. -- Check if the components of the array are scalar and -- non Boolean. if Dictionary.IsScalarTypeMark (Array_Comp_Type, Scope) and not Dictionary.TypeIsBoolean (Array_Comp_Type) then Print_Rule_Name (Rule_File => Rule_File); Print_An_Array_Record_Component_Rule (Rule_File => Rule_File, Constrained_Var => "(element(A, [I])), [J])", Applied_Var => "always_valid(A)", Record_Type => Array_Type, Field_Name => Curr_Symbol, Array_Comp_Type => Array_Comp_Type, Index_Type => Index_Type, Scope => Scope, Bound => Upper_Bound, Complete => False); SPARK_IO.Put_Line (File => Rule_File, Item => ",", Stop => 0); Print_An_Index_Constraint (Rule_File => Rule_File, Free_Var => "J", Type_Sym => Outer_Index, Scope => Scope, Complete => True); Print_Rule_Name (Rule_File => Rule_File); Print_An_Array_Record_Component_Rule (Rule_File => Rule_File, Constrained_Var => "(element(A, [I])), [J])", Applied_Var => "always_valid(A)", Record_Type => Array_Type, Field_Name => Curr_Symbol, Array_Comp_Type => Array_Comp_Type, Index_Type => Index_Type, Scope => Scope, Bound => Lower_Bound, Complete => False); SPARK_IO.Put_Line (File => Rule_File, Item => ",", Stop => 0); Print_An_Index_Constraint (Rule_File => Rule_File, Free_Var => "J", Type_Sym => Outer_Index, Scope => Scope, Complete => True); end if; end if; end if; Iterator := Dictionary.NextSymbol (Iterator); end loop; end Print_Array_Of_Record_Rules; begin -- Print_Attribute_Function -- Function attributes will have had 'BASE eliminated in model build Lex_Str := Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, Tick_Cell)); Ex_Str := LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str); -- Support for 'Valid in SPARK95 if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Valid_Token) = LexTokenManager.Str_Eq then Print_Function_Header; Type_Sym := Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell)); SPARK_IO.Put_Char (File, '('); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Type_Sym); SPARK_IO.Put_String (File, ") : boolean;", 0); -- and associated rules if Write_Rules then -- first rule Print_Rule_Name (Rule_File => Rule_File); Print_A_Bound_Rule (Rule_File => Rule_File, Free_Var => "X", Type_Sym => Type_Sym, Scope => Scope, Bound => Upper_Bound, Attribute => "valid(X)", Attr_Type => Type_Sym, Complete => True); -- second rule Print_Rule_Name (Rule_File => Rule_File); Print_A_Bound_Rule (Rule_File => Rule_File, Free_Var => "X", Type_Sym => Type_Sym, Scope => Scope, Bound => Lower_Bound, Attribute => "valid(X)", Attr_Type => Type_Sym, Complete => True); end if; -- Support for 'Always_Valid elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Always_Valid_Token) = LexTokenManager.Str_Eq then Type_Sym := Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell)); -- Only output the declarations and generic rules if this is the -- first occurance of 'Always_Valid for this type -- The simplification rule is always output, since it contains -- the variable name, and possibly the field selectors if Cells.Is_Null_Cell (Prev_Cell) or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, Prev_Cell)), Lex_Str2 => LexTokenManager.Always_Valid_Token) /= LexTokenManager.Str_Eq or else Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell)) /= Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Prev_Cell)) then Print_Function_Header; SPARK_IO.Put_Char (File, '('); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Type_Sym); SPARK_IO.Put_String (File, ") : boolean;", 0); -- and the associated rules that must only be output once per type if Write_Rules then if Dictionary.IsScalarTypeMark (Type_Sym, Scope) then -- first rule: X <= type__last may_be_deduced_from [ type__always_valid(X) ]. Print_Rule_Name (Rule_File => Rule_File); Print_A_Bound_Rule (Rule_File => Rule_File, Free_Var => "X", Type_Sym => Type_Sym, Scope => Scope, Bound => Upper_Bound, Attribute => "always_valid(X)", Attr_Type => Type_Sym, Complete => True); -- second rule: X >= type__first may_be_deduced_from [ type__always_valid(X) ]. Print_Rule_Name (Rule_File => Rule_File); Print_A_Bound_Rule (Rule_File => Rule_File, Free_Var => "X", Type_Sym => Type_Sym, Scope => Scope, Bound => Lower_Bound, Attribute => "always_valid(X)", Attr_Type => Type_Sym, Complete => True); elsif -- If an array is always valid then all of its components -- are always valid: -- First check if the symbol is an array type mark of which -- the complete type declaration is visible. not Dictionary.Is_Null_Symbol (Type_Sym) and then (not Dictionary.IsPrivateType (Type_Sym, Scope)) and then Dictionary.IsArrayTypeMark (Type_Sym, Scope) then -- It is an array type mark so get the component type -- and index type. Component_Type := Dictionary.GetArrayComponent (Type_Sym); Index_Type := Dictionary.GetArrayIndex (Type_Sym, 1); -- We assume that the array is one-dimensional -- and the index type is discrete: -- these constraints are enforced by static semantic checks. -- Check if the components of the array are scalar if Dictionary.IsScalarTypeMark (Component_Type, Scope) and not Dictionary.TypeIsBoolean (Component_Type) then -- The components of the array are scalar -- Generate deduduction rules that all scalar, -- non-Boolean components are in type, eg, -- element(A, [I]) >= safe_value_type__first -- may_be_deduced_from -- [array_type__always_valid(A), -- I >= array_range__first, I <= array_range__last] . -- and -- element(A, [I]) <= safe_value_type__last -- may_be_deduced_from -- [array_type__always_valid(A), -- I >= array_range__first, I <= array_range__last] . Print_Rule_Name (Rule_File => Rule_File); Print_A_Scalar_Array_Element_Rule (Rule_File => Rule_File, Component_Type => Component_Type, Index_Type => Index_Type, Array_Type => Type_Sym, Scope => Scope, Bound => Upper_Bound); Print_Rule_Name (Rule_File => Rule_File); Print_A_Scalar_Array_Element_Rule (Rule_File => Rule_File, Component_Type => Component_Type, Index_Type => Index_Type, Array_Type => Type_Sym, Scope => Scope, Bound => Lower_Bound); elsif Dictionary.IsRecordTypeMark (Component_Type, Scope) then Print_Array_Of_Record_Rules (Rule_File => Rule_File, Array_Type => Type_Sym, Outer_Index => Index_Type, Record_Type => Component_Type, Scope => Scope); end if; elsif -- If a record is always valid then all of its components -- are always valid: -- First check if the symbol is a record type mark of which -- the complete type declaration is visible. not Dictionary.Is_Null_Symbol (Type_Sym) and then (not Dictionary.IsPrivateType (Type_Sym, Scope)) and then Dictionary.IsRecordTypeMark (Type_Sym, Scope) then Print_Record_Rules (Rule_File => Rule_File, Record_Type => Type_Sym, Scope => Scope); end if; end if; end if; if Write_Rules then -- third rule: type__always_valid(variable__tail(X)) -- may_be_deduced_from type__always_valid(X). Format_Record_Selector (Field_Name => Cells.Get_Assoc_Var (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell)), Selector => Selector, Brackets => Brackets, Variable => Rec_Variable); -- Debug.PrintMsg ("Format_Record_Selector: ", False); -- E_Strings.PutString (SPARK_IO.Standard_Output, Selector); -- Debug.PrintMsg ("", True); -- Debug.PrintSym ("Format_Record_Selector Sym: ", Rec_Variable); Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (Rule_File, "__always_valid(", 0); E_Strings.Put_String (File => Rule_File, E_Str => Selector); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Rec_Variable); SPARK_IO.Put_String (Rule_File, "__tail(X))", 0); E_Strings.Put_String (File => Rule_File, E_Str => Brackets); SPARK_IO.New_Line (Rule_File, 1); SPARK_IO.Put_String (Rule_File, " may_be_deduced_from [", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (Rule_File, "__always_valid(", 0); E_Strings.Put_String (File => Rule_File, E_Str => Selector); SPARK_IO.Put_String (Rule_File, "X)", 0); E_Strings.Put_String (File => Rule_File, E_Str => Brackets); SPARK_IO.Put_Line (Rule_File, "].", 0); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq then Print_Function_Header; SPARK_IO.Put_Char (File, '('); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ", ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ") : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ";", 0); -- Now produce rules for Min and Max. -- The Ada LRM states that S'Max denotes a function with the following specification: -- function S'Max(Left, Right : S'Base) -- return S'Base; -- (and similarly for S'Min). if Write_Rules then -- first case Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (Rule_File, "__", 0); Lex_Str := Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, Tick_Cell)); E_Strings.Put_String (File => Rule_File, E_Str => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str))); SPARK_IO.Put_String (Rule_File, "(X, Y) may_be_replaced_by ", 0); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq then SPARK_IO.Put_Char (Rule_File, 'X'); else SPARK_IO.Put_Char (Rule_File, 'Y'); end if; SPARK_IO.Put_Line (Rule_File, " if [X >= Y].", 0); -- second case Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (Rule_File, "__", 0); Lex_Str := Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, Tick_Cell)); E_Strings.Put_String (File => Rule_File, E_Str => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str))); SPARK_IO.Put_String (Rule_File, "(X, Y) may_be_replaced_by ", 0); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq then SPARK_IO.Put_Char (Rule_File, 'Y'); else SPARK_IO.Put_Char (Rule_File, 'X'); end if; SPARK_IO.Put_Line (Rule_File, " if [Y >= X].", 0); -- type range of output -- lower bound Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (Rule_File, "__", 0); Lex_Str := Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, Tick_Cell)); E_Strings.Put_String (File => Rule_File, E_Str => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str))); SPARK_IO.Put_String (Rule_File, "(X, Y) >= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_Line (Rule_File, "__base__first may_be_deduced.", 0); -- upper bound Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (Rule_File, "__", 0); Lex_Str := Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, Tick_Cell)); E_Strings.Put_String (File => Rule_File, E_Str => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str))); SPARK_IO.Put_String (Rule_File, "(X, Y) <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_Line (Rule_File, "__base__last may_be_deduced.", 0); end if; -- Min and Max rules -- side effect modelling attributes for stream reads elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Tail_Token) = LexTokenManager.Str_Eq then Print_Function_Header; SPARK_IO.Put_Char (File, '('); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ") : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ";", 0); -- side effect modelling attributes for streams writes elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Append_Token) = LexTokenManager.Str_Eq then Print_Function_Header; SPARK_IO.Put_Char (File, '('); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ", ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ") : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ";", 0); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq then Print_Function_Header; SPARK_IO.Put_Char (File, '('); Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_String (File, ") : integer;", 0); if Write_Rules then if Dictionary.TypeIsInteger (Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))) or else Dictionary.TypeIsModular (Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))) then Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_Line (Rule_File, "__pos(X) may_be_replaced_by X .", 0); end if; end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lex_Str, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq then Print_Function_Header; SPARK_IO.Put_String (File, "(real) : real;", 0); else -- Must be 'Val Print_Function_Header; SPARK_IO.Put_String (File, "(integer) : ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_Char (File, ';'); if Write_Rules then if Dictionary.TypeIsInteger (Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))) or else Dictionary.TypeIsModular (Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))) then Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cells.Get_A_Ptr (Heap, Tick_Cell))); SPARK_IO.Put_Line (Rule_File, "__val(X) may_be_replaced_by X .", 0); end if; end if; end if; end Print_Attribute_Function; begin -- Print_One_Attribute if Cells.Get_Kind (Heap, Cells.Get_B_Ptr (Heap, Tick_Cell)) = Cell_Storage.Attrib_Value then Print_Attribute_Constant; else Print_Attribute_Function; end if; end Print_One_Attribute; begin -- Print_Attribute_Declarations Current_Attrib := AttributeList; Prev_Attrib := Cells.Null_Cell; while not Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, Current_Attrib)) loop Current_Attrib := Cells.Get_A_Ptr (Heap, Current_Attrib); Print_One_Attribute (Tick_Cell => Cells.Get_C_Ptr (Heap, Current_Attrib), Prev_Cell => Cells.Get_C_Ptr (Heap, Prev_Attrib)); Prev_Attrib := Current_Attrib; end loop; end Print_Attribute_Declarations; ---------------------------------------------- procedure Print_Bitwise_Op_Declarations (File, Rule_File : in SPARK_IO.File_Type; Write_Rules : in Boolean; Scope : in Dictionary.Scopes) --# global in BitwiseOpList; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in Heap; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# BitwiseOpList, --# Dictionary.Dict, --# Heap, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# BitwiseOpList, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Write_Rules; is Current_Op : Cells.Cell; ---------------------------- procedure Print_One_Bitwise_Op (Op_Cell : in Cells.Cell) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in File; --# in Heap; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in Write_Rules; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# Heap, --# Op_Cell, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Op_Cell, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Write_Rules; is IndexSym : Dictionary.Symbol; IndexFirst, IndexLast : LexTokenManager.Lex_String; -- procedure to print the common part of rules such as "thetype__and(X,Y)" procedure Print_Bitwise_Op_Function (Type_Sym : in Dictionary.Symbol; Operator : in SP_Symbols.SP_Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_File; --# in Scope; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Operator, --# Rule_File, --# Scope, --# Type_Sym; is begin Print_Symbol (File => Rule_File, Scope => Scope, Sym => Type_Sym); SPARK_IO.Put_String (Rule_File, "__", 0); case Operator is when SP_Symbols.RWand => SPARK_IO.Put_String (Rule_File, "and(", 0); when SP_Symbols.RWor => SPARK_IO.Put_String (Rule_File, "or(", 0); when SP_Symbols.RWxor => SPARK_IO.Put_String (Rule_File, "xor(", 0); when SP_Symbols.RWnot => SPARK_IO.Put_String (Rule_File, "not(", 0); when others => SPARK_IO.Put_String (Rule_File, "undef_op_value(", 0); end case; SPARK_IO.Put_String (Rule_File, "X", 0); if Operator /= SP_Symbols.RWnot then SPARK_IO.Put_String (Rule_File, ", Y", 0); end if; SPARK_IO.Put_String (Rule_File, ") ", 0); end Print_Bitwise_Op_Function; begin -- Print_One_Bitwise_Op if Dictionary.TypeIsArray (Cells.Get_Symbol_Value (Heap, Op_Cell)) then SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "function ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Op_Cell)); SPARK_IO.Put_String (File, "__", 0); case Cells.Get_Op_Symbol (Heap, Op_Cell) is when SP_Symbols.RWand => SPARK_IO.Put_String (File, "and(", 0); when SP_Symbols.RWor => SPARK_IO.Put_String (File, "or(", 0); when SP_Symbols.RWxor => SPARK_IO.Put_String (File, "xor(", 0); when SP_Symbols.RWnot => SPARK_IO.Put_String (File, "not(", 0); when others => SPARK_IO.Put_String (File, "undef_op_value(", 0); end case; --# assert True; Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Op_Cell)); if Cells.Get_Op_Symbol (Heap, Op_Cell) /= SP_Symbols.RWnot then SPARK_IO.Put_String (File, ", ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Op_Cell)); end if; SPARK_IO.Put_String (File, ") : ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Op_Cell)); SPARK_IO.Put_Char (File, ';'); end if; --# assert True; -- Now do rules for it if Write_Rules then -- we want rules for array bitwise ops if Dictionary.TypeIsArray (Cells.Get_Symbol_Value (Heap, Op_Cell)) then -- do array bitwise ops IndexSym := Dictionary.GetArrayIndex (Cells.Get_Symbol_Value (Heap, Op_Cell), 1); IndexFirst := Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, IndexSym); IndexLast := Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, IndexSym); Print_Rule_Name (Rule_File => Rule_File); SPARK_IO.Put_String (Rule_File, "element(", 0); Print_Bitwise_Op_Function (Type_Sym => Cells.Get_Symbol_Value (Heap, Op_Cell), Operator => Cells.Get_Op_Symbol (Heap, Op_Cell)); SPARK_IO.Put_Line (Rule_File, ", [I]) may_be_replaced_by ", 0); --# assert True; if Cells.Get_Op_Symbol (Heap, Op_Cell) = SP_Symbols.RWnot then SPARK_IO.Put_String (Rule_File, " not element(X, [I]) ", 0); elsif Cells.Get_Op_Symbol (Heap, Op_Cell) = SP_Symbols.RWxor then SPARK_IO.Put_String (Rule_File, " (element(X, [I]) or element(Y, [I])) and (not (element(X, [I]) and element(Y, [I])))", 0); else -- And or Or binary op directly reproducible in fdl SPARK_IO.Put_String (Rule_File, " element(X, [I]) ", 0); case Cells.Get_Op_Symbol (Heap, Op_Cell) is when SP_Symbols.RWand => SPARK_IO.Put_String (Rule_File, "and ", 0); when SP_Symbols.RWor => SPARK_IO.Put_String (Rule_File, "or ", 0); when others => SPARK_IO.Put_String (Rule_File, "undef_op_value ", 0); end case; SPARK_IO.Put_String (Rule_File, "element(Y, [I]) ", 0); end if; SPARK_IO.New_Line (Rule_File, 1); SPARK_IO.Put_String (Rule_File, " if [", 0); E_Strings.Put_String (File => Rule_File, E_Str => Get_Value (Store_Val => IndexFirst, Type_Mark => IndexSym, Scope => Scope)); SPARK_IO.Put_String (Rule_File, " <= I, I <= ", 0); E_Strings.Put_String (File => Rule_File, E_Str => Get_Value (Store_Val => IndexLast, Type_Mark => IndexSym, Scope => Scope)); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); end if; end if; end Print_One_Bitwise_Op; ---------------------------- begin --Print_Bitwise_Op_Declarations Current_Op := BitwiseOpList; while not Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, Current_Op)) loop Current_Op := Cells.Get_A_Ptr (Heap, Current_Op); Print_One_Bitwise_Op (Op_Cell => Cells.Get_C_Ptr (Heap, Current_Op)); end loop; end Print_Bitwise_Op_Declarations; ---------------------------------------------- procedure Print_Export_Variable_Declarations (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Heap; --# in LexTokenManager.State; --# in ProcedureExportList; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# ProcedureExportList, --# Scope; is List_Element : Cells.Cell; begin List_Element := Cells.Get_A_Ptr (Heap, ProcedureExportList); while not Cells.Is_Null_Cell (List_Element) loop SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "var ", 0); Print_Symbol (File => File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, List_Element)); SPARK_IO.Put_String (File, "__", 0); E_Strings.Put_String (File => File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Cells.Get_Lex_Str (Heap, List_Element))); SPARK_IO.Put_String (File, " : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, List_Element)); SPARK_IO.Put_Line (File, ";", 0); List_Element := Cells.Get_A_Ptr (Heap, List_Element); end loop; end Print_Export_Variable_Declarations; ---------------------------------------------- procedure PrintReturnSymbol (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Heap; --# in LexTokenManager.State; --# in ReturnSymbol; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# ReturnSymbol, --# Scope; is begin if ReturnSymbol /= Cells.Null_Cell then SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_String (File, "var return : ", 0); Print_Symbol_Type (File => File, Scope => Scope, Sub_Type => False, Component => False, Sym => Cells.Get_Symbol_Value (Heap, ReturnSymbol)); SPARK_IO.Put_Line (File, ";", 0); end if; end PrintReturnSymbol; ---------------------------------------------- -- print pending constants for Min_Int and Max_Int but only if they appear in VC procedure PrintRootIntegerDeclaration (File : in SPARK_IO.File_Type) --# global in RootIntegerUsed; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# RootIntegerUsed; is begin if RootIntegerUsed then SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_Line (File, "const system__min_int : integer = pending;", 0); SPARK_IO.Set_Col (File, Indent); SPARK_IO.Put_Line (File, "const system__max_int : integer = pending;", 0); end if; end PrintRootIntegerDeclaration; ---------------------------------------------- -- print replacement rules for Min_Int/Max_Int if they have been used procedure PrintRootIntegerRules (Rule_File : in SPARK_IO.File_Type; Write_Rules : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in RootIntegerUsed; --# in Rule_Family_Name; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# RootIntegerUsed, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# RootIntegerUsed, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Write_Rules; is SystemSym, MinMaxSym : Dictionary.Symbol; procedure PrintRule (Sym : in Dictionary.Symbol; MinOrMax : in String) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Sym & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# LexTokenManager.State, --# MinOrMax, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Sym; is Store_Val : LexTokenManager.Lex_String; begin if not Dictionary.Is_Null_Symbol (Sym) then Store_Val := Dictionary.Get_Value (The_Constant => Sym); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Store_Val, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then Print_Rule_Name (Rule_File => Rule_File); SPARK_IO.Put_String (Rule_File, "system__", 0); SPARK_IO.Put_String (Rule_File, MinOrMax, 0); SPARK_IO.Put_String (Rule_File, "_int may_be_replaced_by ", 0); E_Strings.Put_String (File => Rule_File, E_Str => Maths.ValueToString (Maths.ValueRep (Store_Val))); End_A_Rule (Rule_File => Rule_File); end if; end if; end PrintRule; begin -- PrintRootIntegerRules if RootIntegerUsed and Write_Rules then -- get symbol for Min_Int SystemSym := Dictionary.LookupItem (Name => LexTokenManager.System_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (SystemSym) then -- Package System has been defined in a config file -- or a shadow specification, so look for Min_Int in it MinMaxSym := Dictionary.LookupItem (Name => LexTokenManager.Min_Int_Token, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => SystemSym), Context => Dictionary.ProgramContext, Full_Package_Name => False); PrintRule (MinMaxSym, "min"); -- and then for Max_Int MinMaxSym := Dictionary.LookupItem (Name => LexTokenManager.Max_Int_Token, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => SystemSym), Context => Dictionary.ProgramContext, Full_Package_Name => False); PrintRule (MinMaxSym, "max"); end if; end if; end PrintRootIntegerRules; procedure PrintConstantsInDeclarationOrder --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in End_Position; --# in LexTokenManager.State; --# in Needed_Symbols; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in Write_Rules; --# in out ErrorHandler.Error_Context; --# in out Heap; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# End_Position, --# ErrorHandler.Error_Context, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# SPARK_IO.File_Sys, --# Write_Rules & --# Heap, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Write_Rules & --# Rule_Counter from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Scope, --# Write_Rules; is LSym : Dictionary.Symbol; L_Declare_List : Cells.Cell; begin L_Declare_List := Needed_Symbols; loop exit when Pile.IsNull (L_Declare_List); LSym := Pile.NodeSymbol (Heap, L_Declare_List); if Dictionary.Is_Constant (LSym) or else Dictionary.IsKnownDiscriminant (LSym) then PrintConstantRules (Write_Rules, LSym, Rule_File, Scope, End_Position); if not Cells.Is_Null_Cell (Pile.DAG (Heap, L_Declare_List)) then PrintConstantReplacementRule (Write_Rules, LSym, Pile.DAG (Heap, L_Declare_List), Rule_File, Scope); end if; end if; L_Declare_List := Pile.Sibling (Heap, L_Declare_List); end loop; end PrintConstantsInDeclarationOrder; -- Same as above, but prints rules in reverse-declaration order. -- Contract is _identical_ to above. Implementation is recursive, -- though, so hidden from SPARK! This implementation is provided, -- but unused at present. procedure PrintConstantsInReverseOrder --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in End_Position; --# in LexTokenManager.State; --# in Needed_Symbols; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in Write_Rules; --# in out ErrorHandler.Error_Context; --# in out Heap; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# End_Position, --# ErrorHandler.Error_Context, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# SPARK_IO.File_Sys, --# Write_Rules & --# Heap, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Write_Rules & --# Rule_Counter from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Scope, --# Write_Rules; is --# hide PrintConstantsInReverseOrder; -- Prints List C in reverse order - recursive algorithm, -- so hidden from SPARK. procedure PrintConstantsList (C : in Cells.Cell) is LSym : Dictionary.Symbol; begin if Pile.IsNull (C) then null; else PrintConstantsList (Pile.Sibling (Heap, C)); LSym := Pile.NodeSymbol (Heap, C); if Dictionary.Is_Constant (LSym) or else Dictionary.IsKnownDiscriminant (LSym) then PrintConstantRules (Write_Rules, LSym, Rule_File, Scope, End_Position); if not Cells.Is_Null_Cell (Pile.DAG (Heap, C)) then PrintConstantReplacementRule (Write_Rules, LSym, Pile.DAG (Heap, C), Rule_File, Scope); end if; end if; end if; end PrintConstantsList; begin PrintConstantsList (Needed_Symbols); end PrintConstantsInReverseOrder; pragma Unreferenced (PrintConstantsInReverseOrder); ---------------------------------------------- procedure RankDeclarations (Max_Rank : out Cells.Cell_Rank) --# global in Dictionary.Dict; --# in Needed_Symbols; --# in Scope; --# in out Heap; --# derives Heap, --# Max_Rank from Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Scope; is AllDeclarationsRanked : Boolean; Overall_Max_Rank : Cells.Cell_Rank; ----------------------------------------------------------------------------- -- RankDeclarationList -- -- Ranks as many cells in the Pile rooted at Needed_Symbols as it can. -- The Pile is not guaranteed to be in strict declaration-before-use order, -- so the rank of some cells may depend on the ranks of other Cells which -- are currently unranked. In this case, the offending Cell is left -- with UnknownRank and AllDeclarationsRanked returns as False, -- to be completed by a susequent iteration of RankDeclarations. -- When all Cells are ranked successfully, AlLDeclarationsRanked returns -- as True, and MaxOverallRank is set to the highest rank used in the Pile. ----------------------------------------------------------------------------- procedure RankDeclarationList --# global in Dictionary.Dict; --# in Needed_Symbols; --# in Scope; --# in out Heap; --# in out Overall_Max_Rank; --# out AllDeclarationsRanked; --# derives AllDeclarationsRanked, --# Heap from Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Scope & --# Overall_Max_Rank from *, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Scope; is Declare_List : Cells.Cell; -------------------------------------------------------------------------- -- RankDeclaration -- -- Tries to set the rank of a single type. Basically, scalar types have -- rank 1, array types have rank 1 more than the rank of their component -- type, and records have rank 1 more than the maximum rank of all their -- fields' types. AllDeclarationsRanked and MaxOverallRank are set -- as described above. -------------------------------------------------------------------------- procedure RankDeclaration --# global in Declare_List; --# in Dictionary.Dict; --# in Needed_Symbols; --# in Scope; --# in out AllDeclarationsRanked; --# in out Heap; --# in out Overall_Max_Rank; --# derives AllDeclarationsRanked, --# Heap, --# Overall_Max_Rank from *, --# Declare_List, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Scope; is Sym : Dictionary.Symbol; Own_Var_Sym : Dictionary.Symbol; NewRank : Cells.Cell_Rank; MaxFieldRank : Cells.Cell_Rank; RecordIt : Dictionary.Iterator; Component_It : Dictionary.Iterator; AllFieldsRanked : Boolean; -- Increment rank X, but saturate at CellRank'Last function IncRank (X : in Cells.Cell_Rank) return Cells.Cell_Rank is R : Cells.Cell_Rank; begin if X < Cells.Cell_Rank'Last then R := X + 1; else R := Cells.Cell_Rank'Last; end if; return R; end IncRank; -- Searches the entire Pile roorted at Needed_Symbols and returns -- the Rank of the Cell containing the given Sym. If no match is -- found, then UnknownRank is returned. function FindRankOf (Sym : in Dictionary.Symbol) return Cells.Cell_Rank --# global in Dictionary.Dict; --# in Heap; --# in Needed_Symbols; is CurrentCell : Cells.Cell; Result : Cells.Cell_Rank; begin if Dictionary.IsPredefinedBooleanType (Sym) or else Dictionary.IsUnknownTypeMark (Sym) then -- Special cases - Boolean is not entered in the Pile, so we -- won't find it there. It's scalar, so has rank 1. -- Secondly, Unknown type can appear in the Pile following -- a semantic error in a type declararation, so we need -- to return something or we will fail to terminate. Result := 1; elsif Dictionary.TypeIsScalar (Sym) then -- All scalar types have rank 1, so don't bother to -- search for them... Result := 1; elsif Dictionary.IsProtectedType (Sym) then -- Protected types always appear as "pending" in FDL, -- so we give them rank 1 Result := 1; else CurrentCell := Needed_Symbols; Result := Cells.Unknown_Rank; loop exit when Pile.IsNull (CurrentCell); if Dictionary.IsTypeMark (Pile.NodeSymbol (Heap, CurrentCell)) and then Dictionary.Types_Are_Equal (Left_Symbol => Sym, Right_Symbol => Pile.NodeSymbol (Heap, CurrentCell), Full_Range_Subtype => False) then Result := Cells.Get_Rank (Heap, CurrentCell); exit; end if; CurrentCell := Pile.Sibling (Heap, CurrentCell); end loop; end if; return Result; end FindRankOf; begin if Cells.Get_Rank (Heap, Declare_List) = Cells.Unknown_Rank then Sym := Pile.NodeSymbol (Heap, Declare_List); if Dictionary.IsType (Sym) or Dictionary.IsSubtype (Sym) then Debug_Rank_Sym (Msg => "Trying to rank Symbol ", Sym => Sym); if Dictionary.IsPrivateType (Sym, Scope) then -- All scalar types and abstract proof types have rank 1, since they -- don't depend on any other type. If a type is a private extension -- of a tagged type, then its rank is one more than the rank of its -- "inherit" field. if Dictionary.TypeIsExtendedTagged (Sym) then Sym := Dictionary.GetRootOfExtendedType (Sym); Debug_Rank_Sym (Msg => "Is extended tagged with root type ", Sym => Sym); NewRank := FindRankOf (Sym); if NewRank = Cells.Unknown_Rank then Debug_Rank_Int (Msg => "Extended tagged record, but parent has unknown rank", N => Integer (NewRank)); AllDeclarationsRanked := False; else NewRank := IncRank (NewRank); Debug_Rank_Int (Msg => "Extended tagged record, so setting rank to", N => Integer (NewRank)); Cells.Set_Rank (Heap, Declare_List, NewRank); if NewRank > Overall_Max_Rank then Overall_Max_Rank := NewRank; end if; end if; else Debug_Rank_Int (Msg => "Private here, so setting rank to", N => 1); Cells.Set_Rank (Heap, Declare_List, 1); end if; elsif Dictionary.TypeIsIncompleteHere (Sym, Scope) then -- Not complete, so appears as "pending" in FDL, -- so we give them rank 1 Debug_Rank_Int (Msg => "Incomplete type, so setting rank to", N => 1); Cells.Set_Rank (Heap, Declare_List, 1); elsif Dictionary.TypeIsScalar (Sym) then -- All scalar types and abstract proof types have rank 1, since they -- don't depend on any other type. Debug_Rank_Int (Msg => "Scalar, so setting rank to", N => 1); Cells.Set_Rank (Heap, Declare_List, 1); elsif Dictionary.IsProtectedType (Sym) then -- Protected types always appear as "pending" in FDL, -- so we give them rank 1 Debug_Rank_Int (Msg => "Protected, so setting rank to", N => 1); Cells.Set_Rank (Heap, Declare_List, 1); elsif Dictionary.TypeIsAbstractProof (Sym) then -- Abstract proof types are either "pending" or a record type -- depending on the Scope. Pending types don't depend on anything, -- and so have rank 1. Own_Var_Sym := Find_Own_Var_Matching_This_Type (Sym => Sym, Scope => Scope); if IsLocalOwnVariableWithRefinement (Own_Var_Sym, Scope) then Component_It := Dictionary.FirstConstituent (Own_Var_Sym); MaxFieldRank := 1; AllFieldsRanked := True; loop exit when Dictionary.IsNullIterator (Component_It); Sym := Dictionary.GetRootType (Dictionary.GetType (Dictionary.CurrentSymbol (Component_It))); NewRank := FindRankOf (Sym); if NewRank = Cells.Unknown_Rank then AllDeclarationsRanked := False; AllFieldsRanked := False; exit; end if; if NewRank > MaxFieldRank then MaxFieldRank := NewRank; end if; Component_It := Dictionary.NextSymbol (Component_It); end loop; if AllFieldsRanked then NewRank := IncRank (MaxFieldRank); Debug_Rank_Int (Msg => "Refinement record, so setting rank to", N => Integer (NewRank)); Cells.Set_Rank (Heap, Declare_List, NewRank); if NewRank > Overall_Max_Rank then Overall_Max_Rank := NewRank; end if; end if; else Debug_Rank_Int (Msg => "Pending abstract proof, so setting rank to", N => 1); Cells.Set_Rank (Heap, Declare_List, 1); end if; elsif Dictionary.TypeIsRecord (Sym) then -- The rank of record type is one more than the -- maximum rank of all its field types. MaxFieldRank := 1; AllFieldsRanked := True; RecordIt := Dictionary.FirstRecordComponent (Sym); loop exit when Dictionary.IsNullIterator (RecordIt); Sym := Dictionary.GetRootType (Dictionary.GetType (Dictionary.CurrentSymbol (RecordIt))); NewRank := FindRankOf (Sym); if NewRank = Cells.Unknown_Rank then AllDeclarationsRanked := False; AllFieldsRanked := False; exit; end if; if NewRank > MaxFieldRank then MaxFieldRank := NewRank; end if; RecordIt := Dictionary.NextSymbol (RecordIt); end loop; if AllFieldsRanked then NewRank := IncRank (MaxFieldRank); Debug_Rank_Int (Msg => "Record, so setting rank to", N => Integer (NewRank)); Cells.Set_Rank (Heap, Declare_List, NewRank); if NewRank > Overall_Max_Rank then Overall_Max_Rank := NewRank; end if; end if; elsif Dictionary.TypeIsArray (Sym) then -- Array index types are always discrete, so will always -- have rank 1. The rank of an array type is therefore one -- more than the rank of its component type. NewRank := FindRankOf (Dictionary.GetRootType (Dictionary.GetArrayComponent (Sym))); if NewRank = Cells.Unknown_Rank then AllDeclarationsRanked := False; else NewRank := IncRank (NewRank); Debug_Rank_Int (Msg => "Array, so setting rank to", N => Integer (NewRank)); Cells.Set_Rank (Heap, Declare_List, NewRank); if NewRank > Overall_Max_Rank then Overall_Max_Rank := NewRank; end if; end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Assertion_Failure, Msg => "RankDeclaration - unknown type"); end if; end if; end if; end RankDeclaration; begin AllDeclarationsRanked := True; Declare_List := Needed_Symbols; loop exit when Pile.IsNull (Declare_List); RankDeclaration; Declare_List := Pile.Sibling (Heap, Declare_List); end loop; end RankDeclarationList; begin -- RankDeclarations Overall_Max_Rank := 1; loop RankDeclarationList; exit when AllDeclarationsRanked; end loop; Max_Rank := Overall_Max_Rank; end RankDeclarations; procedure Print_Type_Declarations (Up_To_Rank : in Cells.Cell_Rank) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in File; --# in LexTokenManager.State; --# in Needed_Symbols; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in Type_List; --# in out Heap; --# in out L_Heap; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Rule_Counter, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Scope, --# Up_To_Rank & --# L_Heap from *, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Scope, --# Type_List, --# Up_To_Rank & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Up_To_Rank; is Declare_List : Cells.Cell; Sym : Dictionary.Symbol; Already_Present : Boolean; OK : Boolean; begin for CurrentRank in Cells.Cell_Rank range 1 .. Up_To_Rank loop Debug_Rank_Int (Msg => "Printing type declarations at rank", N => Integer (CurrentRank)); Declare_List := Needed_Symbols; loop exit when Pile.IsNull (Declare_List); Sym := Pile.NodeSymbol (Heap, Declare_List); if Dictionary.IsType (Sym) or Dictionary.IsSubtype (Sym) then if Cells.Get_Rank (Heap, Declare_List) = CurrentRank then Debug_Rank_Sym (Msg => " Printing type declaration for ", Sym => Sym); Print_Declaration (File, Scope, Sym); -- If Sym denotes a full view of a record type or a private view of -- an extended tagged record, then add it to Type_List so -- we later have the option of generating proof rules for that type. if Dictionary.TypeIsRecord (Sym) then if ((Dictionary.TypeIsExtendedTagged (Sym) or not Dictionary.IsPrivateType (Sym, Scope))) and not Dictionary.TypeIsOwnAbstractHere (Sym, Scope) then --# accept Flow, 10, OK, "Expected ineffective assignment to Ok" & --# Flow, 10, Already_Present, "Expected ineffective assignment to Already_Present"; Lists.Add_Symbol (Heap => L_Heap, The_List => Type_List, Symbol => Sym, Already_Present => Already_Present, Ok => OK); --# end accept; end if; end if; end if; end if; Declare_List := Pile.Sibling (Heap, Declare_List); end loop; end loop; --# accept Flow, 33, OK, "Expected ineffective assignment to Ok" & --# Flow, 33, Already_Present, "Expected ineffective assignment to Already_Present"; end Print_Type_Declarations; begin -- PrintDeclarations; Lists.Init (L_Heap); Print_Declaration_Head (File => File, Scope => Scope); SPARK_IO.Put_Line (File, " function round__(real) : integer;", 0); PrintRuleHeader (Write_Rules, Rule_File); PrintStandardRules (Write_Rules, Rule_File); RankDeclarations (Max_Rank); Print_Type_Declarations (Up_To_Rank => Max_Rank); -- Print constants and discriminants declarations Declare_List := Needed_Symbols; loop exit when Pile.IsNull (Declare_List); Sym := Pile.NodeSymbol (Heap, Declare_List); if Dictionary.Is_Constant (Sym) or else Dictionary.IsKnownDiscriminant (Sym) then Print_Declaration (File, Scope, Sym); end if; Declare_List := Pile.Sibling (Heap, Declare_List); end loop; -- Print rules for constants PrintConstantsInDeclarationOrder; -- It may be better to print constant replacement rules in reverse -- declaration order, so that composites that refer to scalars come -- out first, and this get replaced in that order by the Simplifier. -- Further study needed, though, in co-operation with further work -- on the Simplifier. -- PrintConstantsInReverseOrder; PrintRootIntegerDeclaration (File); PrintRootIntegerRules (Rule_File, Write_Rules); Print_Attribute_Declarations (File => File, Scope => Scope); Print_Bitwise_Op_Declarations (File => File, Rule_File => Rule_File, Write_Rules => Write_Rules, Scope => Scope); --# accept Flow, 10, L_Heap, "Expected ineffective assignment to L_Heap" & --# Flow, 10, Type_List, "Expected ineffective assignment to Type_List"; PrintTypeRules (Write_Rules, Rule_File); --# end accept; Declare_List := Needed_Symbols; loop exit when Pile.IsNull (Declare_List); Sym := Pile.NodeSymbol (Heap, Declare_List); if Dictionary.Is_Variable (Sym) then Print_Declaration (File, Scope, Sym); end if; Declare_List := Pile.Sibling (Heap, Declare_List); end loop; -- Print declaration of return variable here, after other vars and -- before the function declarations PrintReturnSymbol (File, Scope); Declare_List := Needed_Symbols; loop exit when Pile.IsNull (Declare_List); Sym := Pile.NodeSymbol (Heap, Declare_List); if Dictionary.Is_Subprogram (Sym) then Print_Declaration (File, Scope, Sym); end if; Declare_List := Pile.Sibling (Heap, Declare_List); end loop; Print_Export_Variable_Declarations (File => File, Scope => Scope); PrintDeclarationTail (File); end PrintDeclarations; spark-2012.0.deb/examiner/e_strings-not_spark.adb0000644000175000017500000000241011753202336020711 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Strings.Unbounded.Not_SPARK; package body E_Strings.Not_SPARK is --# hide E_Strings.Not_SPARK; function Get_String (E_Str : E_Strings.T) return String is begin return SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => E_Str.Content); end Get_String; end E_Strings.Not_SPARK; spark-2012.0.deb/examiner/lists.ads0000644000175000017500000001044111753202336016100 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Dictionary; with ExaminerConstants; with LexTokenManager; use type Dictionary.Symbol; use type ExaminerConstants.RefType; use type LexTokenManager.Str_Comp_Result; --# inherit Dictionary, --# ExaminerConstants, --# LexTokenManager; package Lists is type List_Heap is private; type List is private; Null_List : constant List; procedure Init (Heap : out List_Heap); --# derives Heap from ; procedure New_List (Heap : in out List_Heap; The_List : out List; OK : out Boolean); --# derives Heap, --# OK, --# The_List from Heap; procedure Add_Name (Heap : in out List_Heap; The_List : in List; Name : in LexTokenManager.Lex_String; Already_Present : out Boolean; Ok : out Boolean); --# global in LexTokenManager.State; --# derives Already_Present, --# Heap, --# Ok from Heap, --# LexTokenManager.State, --# Name, --# The_List; procedure Add_Symbol (Heap : in out List_Heap; The_List : in List; Symbol : in Dictionary.Symbol; Already_Present : out Boolean; Ok : out Boolean); --# global in LexTokenManager.State; --# derives Already_Present, --# Heap, --# Ok from Heap, --# LexTokenManager.State, --# Symbol, --# The_List; procedure Get_First (Heap : in out List_Heap; The_List : in out List; Symbol : out Dictionary.Symbol; Empty : out Boolean; Ok : out Boolean); --# global in LexTokenManager.State; --# derives Empty, --# Heap, --# Ok, --# Symbol, --# The_List from Heap, --# LexTokenManager.State, --# The_List; -- Removes head of list and returns it. Ok is false if list illegal. -- Empty is true if no value to retieve. -- List changes as a result of call but is never deleted even when empty. procedure Delete_List (Heap : in out List_Heap; The_List : in out List); --# global in LexTokenManager.State; --# derives Heap from *, --# LexTokenManager.State, --# The_List & --# The_List from ; function Is_Member (Heap : List_Heap; The_List : List; Str : LexTokenManager.Lex_String) return Boolean; --# global in LexTokenManager.State; private type List is range 0 .. ExaminerConstants.ListsHeapSize; --# assert List'Base is Integer; -- for the "Large" Examiner type Heap_Element is record -- case ??? is -- when ??? => Name : LexTokenManager.Lex_String; -- when ??? => Symbol : Dictionary.Symbol; -- end case; Next : List; end record; type Heap_Arrays is array (List) of Heap_Element; type List_Heap is record Heap_Array : Heap_Arrays; High_Mark : List; First_Free : List; end record; Null_List : constant List := 0; end Lists; spark-2012.0.deb/examiner/dictionary-getscope.adb0000644000175000017500000002543111753202336020702 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function GetScope (Item : Symbol) return Scopes is Scope : Scopes; Item_Local : Symbol; -------------------------------------------------------------------------------- function Get_Implicit_Proof_Function_Scope (Proof_Function : Symbol) return Scopes --# global in Dict; is Ada_Function : RawDict.Subprogram_Info_Ref; Scope : Scopes; begin Ada_Function := RawDict.GetImplicitProofFunctionAdaFunction (Proof_Function); Scope := RawDict.Get_Declaration_Scope (The_Declaration => RawDict.Get_Subprogram_Specification (The_Subprogram => Ada_Function)); if RawDict.Get_Subprogram_Implicit_Proof_Function (The_Subprogram => Ada_Function, Abstraction => IsRefined) = Proof_Function then Scope := GetLocalScope (Scope); end if; return Scope; end Get_Implicit_Proof_Function_Scope; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Scope (The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref) return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Subprogram_Symbol (RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Subprogram_Parameter))); end Get_Subprogram_Parameter_Scope; -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Scope (The_Parameter_Constraint : RawDict.Parameter_Constraint_Info_Ref) return Scopes --# global in Dict; is begin return Get_Subprogram_Parameter_Scope (The_Subprogram_Parameter => RawDict.Get_Parameter_Constraint_Subprogram_Parameter (The_Parameter_Constraint => The_Parameter_Constraint)); end Get_Parameter_Constraint_Scope; -------------------------------------------------------------------------------- function Get_Generic_Parameter_Scope (The_Generic_Parameter : RawDict.Generic_Parameter_Info_Ref) return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Generic_Unit_Symbol (RawDict.Get_Generic_Parameter_Owning_Generic (The_Generic_Parameter => The_Generic_Parameter))); end Get_Generic_Parameter_Scope; -------------------------------------------------------------------------------- function Get_Generic_Unit_Scope (The_Generic_Unit : RawDict.Generic_Unit_Info_Ref) return Scopes --# global in Dict; is begin return RawDict.Get_Generic_Unit_Scope (The_Generic_Unit => The_Generic_Unit); end Get_Generic_Unit_Scope; -------------------------------------------------------------------------------- function Get_Implicit_Return_Variable_Scope (The_Implicit_Return_Variable : RawDict.Implicit_Return_Variable_Info_Ref) return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Subprogram_Symbol (RawDict.Get_Implicit_Return_Variable_Function (The_Implicit_Return_Variable => The_Implicit_Return_Variable))); end Get_Implicit_Return_Variable_Scope; -------------------------------------------------------------------------------- function Get_Quantified_Variable_Scope (The_Quantified_Variable : RawDict.Quantified_Variable_Info_Ref) return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Quantified_Variable_Region (The_Quantified_Variable => The_Quantified_Variable)); end Get_Quantified_Variable_Scope; -------------------------------------------------------------------------------- function Get_Loop_Scope (The_Loop : Symbol) return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Local, The_Unit => RawDict.GetLoopRegion (The_Loop)); end Get_Loop_Scope; -------------------------------------------------------------------------------- function Get_Loop_Parameter_Scope (The_Parameter : Symbol) return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Local, The_Unit => RawDict.GetLoopParameterLoop (The_Parameter)); end Get_Loop_Parameter_Scope; -------------------------------------------------------------------------------- function Get_Enumeration_Literal_Scope (The_Enumeration_Literal : RawDict.Enumeration_Literal_Info_Ref) return Scopes --# global in Dict; is begin return RawDict.Get_Declaration_Scope (The_Declaration => Get_Type_Declaration (Type_Mark => RawDict.Get_Enumeration_Literal_Type (The_Enumeration_Literal => The_Enumeration_Literal))); end Get_Enumeration_Literal_Scope; -------------------------------------------------------------------------------- function Get_Record_Component_Scope (The_Record_Component : RawDict.Record_Component_Info_Ref) return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Type_Symbol (RawDict.Get_Record_Component_Record_Type (The_Record_Component => The_Record_Component))); end Get_Record_Component_Scope; -------------------------------------------------------------------------------- function Get_Known_Discriminant_Scope (The_Discriminant : Symbol) return Scopes --# global in Dict; is begin return Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Type_Symbol (RawDict.GetDiscriminantProtectedType (The_Discriminant))); end Get_Known_Discriminant_Scope; -------------------------------------------------------------------------------- function Get_Constituent_Scope (The_Constituent : RawDict.Constituent_Info_Ref) return Scopes --# global in Dict; is begin return Get_Variable_Scope (The_Variable => RawDict.Get_Constituent_Variable (The_Constituent => The_Constituent)); end Get_Constituent_Scope; begin -- GetScope Item_Local := Item; -- if the symbol is a special on loop entry variable we use the original variable -- to determine whether a prefix is needed if RawDict.GetSymbolDiscriminant (Item_Local) = LoopEntryVariableSymbol then Item_Local := RawDict.GetLoopEntryVariableOriginalVar (Item_Local); end if; -- Useful statements for investigating an InvalidGetScopeRequest failure -- Debug.PrintMsg ("Discriminant on entry to GetScope case statement is ", False); -- Debug.PrintMsg (SymbolDiscriminant'Image (RawDict.GetSymbolDiscriminant (Item_Local)), True); case RawDict.GetSymbolDiscriminant (Item_Local) is when Type_Symbol => Scope := Get_Type_Scope (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item_Local)); when Constant_Symbol => Scope := Get_Constant_Scope (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Item_Local)); when Subprogram_Symbol => Scope := Get_Subprogram_Scope (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item_Local)); when Package_Symbol => Scope := Get_Package_Scope (The_Package => RawDict.Get_Package_Info_Ref (Item => Item_Local)); when ImplicitProofFunctionSymbol => Scope := Get_Implicit_Proof_Function_Scope (Proof_Function => Item_Local); when Subprogram_Parameter_Symbol => Scope := Get_Subprogram_Parameter_Scope (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Item_Local)); when Parameter_Constraint_Symbol => Scope := Get_Parameter_Constraint_Scope (The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (Item => Item_Local)); when Generic_Parameter_Symbol => Scope := Get_Generic_Parameter_Scope (The_Generic_Parameter => RawDict.Get_Generic_Parameter_Info_Ref (Item => Item_Local)); when Generic_Unit_Symbol => Scope := Get_Generic_Unit_Scope (The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Item => Item_Local)); when Implicit_Return_Variable_Symbol => Scope := Get_Implicit_Return_Variable_Scope (The_Implicit_Return_Variable => RawDict.Get_Implicit_Return_Variable_Info_Ref (Item => Item_Local)); when Variable_Symbol => Scope := Get_Variable_Scope (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Item_Local)); when Quantified_Variable_Symbol => Scope := Get_Quantified_Variable_Scope (The_Quantified_Variable => RawDict.Get_Quantified_Variable_Info_Ref (Item => Item_Local)); when LoopSymbol => Scope := Get_Loop_Scope (The_Loop => Item_Local); when LoopParameterSymbol => Scope := Get_Loop_Parameter_Scope (The_Parameter => Item_Local); when Enumeration_Literal_Symbol => Scope := Get_Enumeration_Literal_Scope (The_Enumeration_Literal => RawDict.Get_Enumeration_Literal_Info_Ref (Item => Item_Local)); when Record_Component_Symbol => Scope := Get_Record_Component_Scope (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (Item => Item_Local)); when KnownDiscriminantSymbol => Scope := Get_Known_Discriminant_Scope (The_Discriminant => Item_Local); when Constituent_Symbol => Scope := Get_Constituent_Scope (The_Constituent => RawDict.Get_Constituent_Info_Ref (Item => Item_Local)); when others => -- non-exec code Scope := NullScope; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.GetScope"); end case; return Scope; end GetScope; spark-2012.0.deb/examiner/SPARK.LLA0000644000175000017500000016331611753202337015476 0ustar eugeneugen(*----------------------------------------------------------------------------- (* (C) Altran Praxis Limited (*----------------------------------------------------------------------------- (* (* The SPARK toolset is free software; you can redistribute it and/or modify it (* under terms of the GNU General Public License as published by the Free (* Software Foundation; either version 3, or (at your option) any later (* version. The SPARK toolset is distributed in the hope that it will be (* useful, but WITHOUT ANY WARRANTY; without even the implied warranty of (* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General (* Public License for more details. You should have received a copy of the GNU (* General Public License distributed with the SPARK toolset; see file (* COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of (* the license. (* (*============================================================================= ------------------------------------------------------------------------------*) (* SPARK.LLA Purpose: This file contains an LALR (1) grammar for the SPARK Language. It is used by the SPARK parser-generator SparkLALR to automatically construct a parser which creates a syntax tree as a SPARK text is read. The automatically generated parser requires a lexical analyser to partition a SPARK text into a sequence of terminal symbols. For the Examiner SparkLex serves this purpose. The grammar file has the following conventions: 1. Comments are multi-line and are designated as in Pascal. See the comment block containing this documentation. Comments cannot be nested, hence the comment delimiters cannot be given explicitly here. 2. The grammar file has two main sections a. The terminal definitions designated by the string "&TERM" followed by a list of terminal symbols separated by commas. b. The grammar rules designated by the string "&GRAM" followed by a list of production rules separated by semicolons. The rules define the non-terminal symbols of the grammar. 3. A grammar rule is defined in BNF format with ":" symbol replacing "::=". 4. Comments containing section numbers refer to the SPARK language definition: SPARK 95 - The SPADE Ada 95 Kernel (including RAVENSPARK). Clients: SparkLALR generates the parser SPParser which is then used by the Examiner MainLoop. Use: The SPARK.LLA grammar file is an input to the SPARK parser-generator SparkLALR which is to used create a parser for SPARK texts, SPParser. A SPARK package defining all the terminal and non-terminal symbols of the grammar as an enumerated type is required for SPParser and the lexical analyser SparkLex. In the Examiner the package SP_Symbols contains the declaration of the enumerated type SPSymbol. The symbols enumerated in the declaration of SPSymbol must be consistent with those appearing in the grammar file. Extension: SPARK.LLA should be updated with extreme caution as the parser generated from it creates a syntax tree which is directly used by the Examiner for its analysis. If the structure of the grammar and therefore the syntax tree is changed the Examiner will not behave correctly until it is updated to recognise the changed syntax tree structure. It is possible to extend the grammar without affecting the existing Examiner functionality providing the basic structure of the productions remains unchanged. The following sorts of extensions are possible with caution 1. Add a new terminal symbol (update SP_Symbols correspondingly). 2. Add a new non-terminal symbol via a new production (update SP_Symbols correspondingly). 3. Add one or more further options to an existing production. Any extra productions or extended productions must maintain the constraints placed on an LALR(1) grammar. ------------------------------------------------------------------------------*) &TERM (* Spark Terminal Symbols (From Lexical Analyser) *) predefined_FDL_identifier, (* 2.2 *) ampersand, apostrophe, left_paren, right_paren, multiply, plus, comma, minus, point, divide, colon, semicolon, less_than, equals, greater_than, vertical_bar, tilde, percent, arrow, double_dot, double_star, becomes, not_equal, greater_or_equal, less_or_equal, left_label_paren, right_label_paren, box, (* FDL operators *) implies, is_equivalent_to, square_open, square_close, (* 2.3 *) identifier, (* 2.4 *) (* numeric_literal, *) integer_number, real_number, based_integer, based_real, (* 2.5 *) character_literal, (* 2.6 *) string_literal, (* 2.7 *) comment, (* 2.9 *) RWabort, RWabs, RWabstract, RWaccept, RWaccess, RWaliased, RWall, RWand, RWandthen, RWany, RWarray, RWassert, RWassume, RWat, RWbegin, RWbody, RWcase, RWcheck, RWconstant, RWdeclare, RWdelay, RWdelta, RWderives, RWdigits, RWdo, RWelse, RWelsif, RWend, RWentry, RWexception, RWexit, RWfrom, RWfor, RWforall, RWforsome, RWfunction, RWgeneric, RWglobal, RWgoto, RWhide, RWif, RWin, RWinherit, RWinitializes, RWis, RWinterface, RWlimited, RWloop, RWmain_program, RWmod, RWnew, RWnot, RWnotin, RWnull, RWof, RWor, RWorelse, RWothers, RWout, RWoverriding, RWown, RWpackage, RWpost, RWpragma, RWpre, RWprivate, RWprocedure, RWprotected, RWraise, RWrange, RWrecord, RWrem, RWrenames, RWrequeue, RWreturn, RWreverse, RWselect, RWseparate, RWsome RWsubtype, RWsynchronized, RWtagged, RWtask, RWterminate, RWthen, RWtype, RWuntil, RWuse, RWwhen, RWwhile, RWwith, RWxor, (* 2.11 *) annotation_end, annotation_start, annotation_continuation, proof_context, hide_directive, (* 4.1.4 *) attribute_ident, (* Illegal Tokens *) illegal_id, illegal_number, illegal_token, illegal_comment, unterminated_string, &GRAM (* SPARK Production Rules *) (* 10.1 *) (* b *) compilation_unit : context_clause library_unit | library_unit | context_clause secondary_unit | secondary_unit | apragma | ; (* context_clause cannot be null *) (* 2.4 *) (* a *) numeric_literal : decimal_literal | based_literal ; (* 2.4.1 *) (* a *) decimal_literal : integer_number | real_number ; (* 2.4.2 *) (* a *) based_literal : based_integer | based_real ; (* 2.8 *) (* a *) pragma_rep : pragma_rep apragma | ; apragma : RWpragma identifier semicolon | RWpragma identifier left_paren argument_association_rep right_paren semicolon | assert_pragma ; (* special production since "assert" is a reserved word in SPARK *) assert_pragma : RWpragma RWassert left_paren argument_association_rep right_paren semicolon ; argument_association_rep : argument_association_rep comma argument_association | argument_association ; (* b *) argument_association : identifier arrow ADA_expression | ADA_expression ; (* 3.0 *) (* ------------------------------------------------------------------ The following rule is not used in the grammar definition declaration : program_declaration | proof_declaration ------------------------------------------------------------------*) (* 3.1 *) (* a *) basic_declaration : object_declaration | full_type_declaration | subtype_declaration ; (* 3.2 *) (* a *) object_declaration : constant_declaration | variable_declaration ; (* b *) constant_declaration : identifier_list colon RWconstant type_mark becomes expression semicolon | identifier_list colon RWconstant becomes expression semicolon ; (* c *) variable_declaration : identifier_list colon type_mark semicolon | identifier_list colon type_mark becomes expression semicolon | identifier_list colon RWaliased type_mark semicolon | identifier_list colon RWaliased type_mark becomes expression semicolon ; (* d *) identifier_list : identifier_list comma identifier | identifier ; (* 3.3.1 *) (* b *) full_type_declaration : RWtype identifier RWis type_definition semicolon | task_type_declaration | protected_type_declaration ; (* c *) type_definition : enumeration_type_definition | integer_type_definition | derived_type_definition | real_type_definition | array_type_definition | record_type_definition | modular_type_definition | type_extension ; (*tag*) (*tag*) type_extension : private_type_extension | record_type_extension ; (*tag*) private_type_extension : RWnew type_mark RWwith RWprivate ; (*tag*) record_type_extension : RWnew type_mark RWwith record_definition ; (* 3.3.2 *) (* a *) subtype_declaration : RWsubtype identifier RWis subtype_indication semicolon ; (* b *) subtype_indication : type_mark constraint | type_mark ; (* c *) type_mark : dotted_simple_name ; (* d *) constraint : range_constraint | floating_point_constraint | fixed_point_constraint | index_or_discriminant_constraint ; (* 3.4 *) (* 3.5 *) (* a *) range_constraint : RWrange arange ; (* b *) arange : attribute | simple_expression double_dot simple_expression ; (* 3.5.1 *) (* a *) enumeration_type_definition : left_paren enumeration_literal_specification comma enumeration_type_definition_rep right_paren | left_paren enumeration_literal_specification right_paren ; enumeration_type_definition_rep : enumeration_type_definition_rep comma enumeration_literal_specification | enumeration_literal_specification ; (* b *) enumeration_literal_specification : enumeration_literal ; (* c *) enumeration_literal : identifier ; (* 3.5.4 *) (* a *) integer_type_definition : range_constraint ; derived_type_definition : RWnew type_mark | RWnew type_mark range_constraint | RWnew type_mark floating_point_constraint ; (* allow in grammar, but reject in Sem *) modular_type_definition : RWmod simple_expression ; (* 3.5.6 *) (* a *) real_type_definition : floating_point_constraint | fixed_point_constraint ; (* 3.5.7 *) (* a *) floating_point_constraint : floating_accuracy_definition | floating_accuracy_definition range_constraint ; (* b *) floating_accuracy_definition : RWdigits simple_expression ; (* 3.5.9 *) (* a *) fixed_point_constraint : fixed_accuracy_definition | fixed_accuracy_definition range_constraint ; (* b *) fixed_accuracy_definition : RWdelta simple_expression ; (* 3.6 *) (* a *) array_type_definition : unconstrained_array_definition | constrained_array_definition ; (* b *) unconstrained_array_definition : RWarray left_paren unconstrained_array_definition_rep right_paren RWof type_mark ; unconstrained_array_definition_rep : unconstrained_array_definition_rep comma index_subtype_definition | index_subtype_definition ; (* c *) constrained_array_definition : RWarray index_constraint RWof type_mark ; (* d *) index_subtype_definition : type_mark RWrange box ; (* e *) index_constraint : left_paren index_constraint_rep right_paren ; index_constraint_rep : index_constraint_rep comma type_mark | type_mark ; (* 3.7 *) (* discriminants for protected declarations *) known_discriminant_part : left_paren known_discriminant_part_rep right_paren ; known_discriminant_part_rep : known_discriminant_part_rep semicolon discriminant_specification | discriminant_specification ; discriminant_specification : identifier_list colon type_mark | identifier_list colon access_definition ; access_definition : (* 3.10 *) RWaccess type_mark ; (* 3.7.1 *) index_or_discriminant_constraint : left_paren discriminant_association right_paren ; discriminant_association : named_argument_association | positional_argument_association ; (* 3.8 *) record_type_definition : tag_option record_definition ; record_definition : RWrecord component_list RWend RWrecord | null_record ; null_record : RWnull RWrecord | RWrecord RWnull semicolon RWend RWrecord ; component_list : component_list component_declaration | component_declaration ; (* c *) component_declaration : identifier_list colon component_subtype_definition semicolon ; (* d *) component_subtype_definition : type_mark ; (* 3.9 *) (* a *) declarative_part : renaming_declaration_rep initial_declarative_item_rep later_declarative_item_rep | initial_declarative_item_rep later_declarative_item_rep | renaming_declaration_rep initial_declarative_item_rep | initial_declarative_item_rep | renaming_declaration_rep later_declarative_item_rep | later_declarative_item_rep | renaming_declaration_rep ; initial_declarative_item_rep : initial_declarative_item_rep basic_declarative_item | basic_declarative_item | initial_declarative_item_rep package_declaration | package_declaration | initial_declarative_item_rep renaming_declaration | initial_declarative_item_rep use_type_clause | initial_declarative_item_rep proof_renaming_declaration | initial_declarative_item_rep apragma | generic_package_instantiation ; (* b *) basic_declarative_item : basic_declaration | justification_statement | representation_clause | basic_proof_declaration ; later_declarative_item_rep : later_declarative_item_rep later_declarative_item | later_declarative_item | later_declarative_item_rep apragma ; later_declarative_item : abody | proof_function_declaration | generic_subprogram_instantiation ; abody : proper_body | body_stub ; proper_body : subprogram_body | package_body | task_body | protected_body ; basic_proof_declaration : proof_type_declaration | type_assertion | object_assertion | proof_constant_declaration ; type_assertion : base_type_assertion | alwaysvalid_variable_assertion ; base_type_assertion : proof_context RWassert dotted_simple_name attribute_ident RWis identifier semicolon annotation_end ; alwaysvalid_variable_assertion : proof_context RWassert dotted_simple_name attribute_ident semicolon annotation_end ; object_assertion : proof_context RWfor simple_name_rep RWdeclare identifier semicolon annotation_end ; proof_type_declaration : proof_context RWtype identifier RWis proof_type_definition annotation_end ; proof_type_definition : RWabstract semicolon ; proof_constant_declaration : proof_context identifier_list colon RWconstant type_mark becomes annotation_expression semicolon annotation_end ; proof_function_declaration : proof_context function_specification semicolon annotation_end function_constraint ; annotation_range_constraint : RWrange annotation_arange ; annotation_arange : annotation_attribute | annotation_simple_expression double_dot annotation_simple_expression ; (*------------------------Annotation Expressions------------------------*) annotation_name : annotation_simple_name | annotation_selected_component | annotation_name left_paren annotation_name_argument_list right_paren | composite_update ; composite_update : annotation_name square_open store_rep square_close ; store_rep : store_rep semicolon store | store ; store : store_list arrow annotation_expression ; store_list : store_list comma annotation_expression | annotation_expression ; (* b *) annotation_simple_name : identifier | identifier tilde | identifier percent ; (* c *) annotation_prefix : annotation_name ; annotation_name_argument_list : annotation_named_argument_association | annotation_positional_argument_association ; annotation_named_argument_association : annotation_named_argument_association comma annotation_simple_name arrow annotation_expression | annotation_simple_name arrow annotation_expression ; annotation_positional_argument_association : annotation_positional_argument_association comma annotation_expression | annotation_expression ; annotation_selected_component : annotation_prefix point annotation_selector ; annotation_selector : annotation_simple_name ; annotation_attribute : annotation_prefix annotation_attribute_designator ; annotation_attribute_designator : annotation_attribute_designator attribute_ident annotation_attribute_designator_opt | attribute_ident annotation_attribute_designator_opt ; annotation_attribute_designator_opt : left_paren annotation_expression right_paren | left_paren annotation_expression comma annotation_expression right_paren | ; annotation_aggregate : left_paren annotation_component_association right_paren ; annotation_component_association : RWothers arrow annotation_aggregate_or_expression | annotation_named_association | annotation_positional_association ; annotation_named_association : annotation_named_association_rep comma RWothers arrow annotation_aggregate_or_expression | annotation_named_association_rep ; annotation_named_association_rep : annotation_named_association_rep comma annotation_aggregate_choice_rep arrow annotation_aggregate_or_expression | annotation_aggregate_choice_rep arrow annotation_aggregate_or_expression ; annotation_aggregate_choice_rep : annotation_aggregate_choice_rep vertical_bar annotation_aggregate_choice | annotation_aggregate_choice ; annotation_aggregate_choice : annotation_simple_expression | annotation_simple_expression annotation_range_constraint | annotation_simple_expression double_dot annotation_simple_expression ; annotation_positional_association : annotation_aggregate_or_expression comma RWothers arrow annotation_aggregate_or_expression | annotation_positional_association_rep comma RWothers arrow annotation_aggregate_or_expression | annotation_positional_association_rep ; annotation_positional_association_rep : annotation_positional_association_rep comma annotation_aggregate_or_expression | annotation_aggregate_or_expression comma annotation_aggregate_or_expression ; annotation_aggregate_or_expression : annotation_aggregate | annotation_expression ; annotation_expression : annotation_relation | annotation_relation RWand annotation_expression_rep1 | annotation_relation RWandthen annotation_expression_rep2 | annotation_relation RWor annotation_expression_rep3 | annotation_relation RWorelse annotation_expression_rep4 | annotation_relation RWxor annotation_expression_rep5 | annotation_relation implies annotation_expression_rep6 | annotation_relation is_equivalent_to annotation_expression_rep7 | quantified_expression ; annotation_expression_rep1 : annotation_expression_rep1 RWand annotation_relation | annotation_relation ; annotation_expression_rep2 : annotation_expression_rep2 RWandthen annotation_relation | annotation_relation ; annotation_expression_rep3 : annotation_expression_rep3 RWor annotation_relation | annotation_relation ; annotation_expression_rep4 : annotation_expression_rep4 RWorelse annotation_relation | annotation_relation ; annotation_expression_rep5 : annotation_expression_rep5 RWxor annotation_relation | annotation_relation ; annotation_expression_rep6 : annotation_expression_rep6 implies annotation_relation | annotation_relation ; annotation_expression_rep7 : annotation_expression_rep7 RWxor annotation_relation | annotation_relation ; annotation_relation : annotation_simple_expression | annotation_simple_expression relational_operator annotation_simple_expression | annotation_simple_expression inside annotation_arange | annotation_simple_expression outside annotation_arange | annotation_simple_expression inside annotation_name | annotation_simple_expression outside annotation_name ; annotation_simple_expression : annotation_simple_expression binary_adding_operator annotation_term | annotation_simple_expression_opt ; annotation_simple_expression_opt : unary_adding_operator annotation_term | annotation_term ; annotation_term : annotation_term multiplying_operator annotation_factor | annotation_factor ; annotation_factor : annotation_primary | annotation_primary double_star annotation_primary | RWabs annotation_primary | RWnot annotation_primary ; annotation_primary : numeric_literal | character_literal | string_literal | annotation_name | annotation_qualified_expression | left_paren annotation_expression right_paren | annotation_attribute ; annotation_qualified_expression : annotation_name apostrophe annotation_aggregate | annotation_name apostrophe annotation_extension_aggregate | annotation_name apostrophe left_paren annotation_expression right_paren ; annotation_extension_aggregate : left_paren annotation_ancestor_part RWwith annotation_record_component_association right_paren | left_paren annotation_ancestor_part RWwith RWnull RWrecord right_paren ; annotation_ancestor_part : annotation_expression ; annotation_record_component_association : annotation_named_record_component_association | annotation_positional_record_component_association ; annotation_positional_record_component_association : annotation_positional_record_component_association comma annotation_expression | annotation_expression ; annotation_named_record_component_association : annotation_named_record_component_association comma record_component_selector_name arrow annotation_expression | record_component_selector_name arrow annotation_expression ; record_component_selector_name : identifier ; (*----------------------------------------------------------------------*) (* 4.1 *) (* a *) name : simple_name | selected_component | name left_paren name_argument_list right_paren ; (* b *) simple_name : identifier ; dotted_simple_name : dotted_simple_name point identifier | identifier ; (* c *) prefix : name ; name_argument_list : named_argument_association | positional_argument_association ; named_argument_association : named_argument_association comma simple_name arrow expression | simple_name arrow expression ; positional_argument_association : positional_argument_association comma expression | expression ; (* 4.1.1 *) (* a *) (* 4.1.3 *) (* a *) selected_component : prefix point selector ; (* b *) selector : simple_name ; (* 4.1.4 *) (* a *) attribute : prefix attribute_designator | character_literal attribute_designator ; (* b *) attribute_designator : attribute_designator attribute_ident attribute_designator_opt | attribute_ident attribute_designator_opt ; attribute_designator_opt : left_paren expression right_paren | left_paren expression comma expression right_paren | ; (* 4.3 *) (* a *) aggregate : left_paren component_association right_paren ; (* b *) component_association : RWothers arrow aggregate_or_expression | named_association | positional_association ; (* c *) named_association : named_association_rep comma RWothers arrow aggregate_or_expression | named_association_rep ; named_association_rep : named_association_rep comma aggregate_choice_rep arrow aggregate_or_expression | aggregate_choice_rep arrow aggregate_or_expression ; aggregate_choice_rep : aggregate_choice_rep vertical_bar aggregate_choice | aggregate_choice ; (* d *) aggregate_choice : simple_expression | simple_expression range_constraint | simple_expression double_dot simple_expression ; (* e *) positional_association : aggregate_or_expression comma RWothers arrow aggregate_or_expression | positional_association_rep comma RWothers arrow aggregate_or_expression | positional_association_rep ; positional_association_rep : positional_association_rep comma aggregate_or_expression | aggregate_or_expression comma aggregate_or_expression ; aggregate_or_expression : aggregate | expression ; (* 4.4 *) (* a *) expression : relation | relation RWand expression_rep1 | relation RWandthen expression_rep2 | relation RWor expression_rep3 | relation RWorelse expression_rep4 | relation RWxor expression_rep5 ; expression_rep1 : expression_rep1 RWand relation | relation ; expression_rep2 : expression_rep2 RWandthen relation | relation ; expression_rep3 : expression_rep3 RWor relation | relation ; expression_rep4 : expression_rep4 RWorelse relation | relation ; expression_rep5 : expression_rep5 RWxor relation | relation ; (* b *) relation : simple_expression | simple_expression relational_operator simple_expression | simple_expression inside arange | simple_expression outside arange | simple_expression inside name | simple_expression outside name ; inside : RWin ; outside : RWnotin ; (* c *) simple_expression : simple_expression binary_adding_operator term | simple_expression_opt ; simple_expression_opt : unary_adding_operator term | term ; (* d *) term : term multiplying_operator factor | factor ; (* e *) factor : primary | primary double_star primary | RWabs primary | RWnot primary ; (* f *) primary : numeric_literal | character_literal | string_literal | name | qualified_expression | left_paren expression right_paren | attribute ; (* 4.5 *) (* a *) (* b *) relational_operator : equals | not_equal | less_than | less_or_equal | greater_than | greater_or_equal ; (* c *) binary_adding_operator : plus | minus | ampersand ; (* d *) unary_adding_operator : plus | minus ; (* e *) multiplying_operator : multiply | divide | RWmod | RWrem ; (* f *) (* 4.6 *) (* a *) (* 4.7 *) (* a *) qualified_expression : name apostrophe aggregate | name apostrophe extension_aggregate | name apostrophe left_paren expression right_paren ; extension_aggregate : left_paren ancestor_part RWwith record_component_association right_paren | left_paren ancestor_part RWwith RWnull RWrecord right_paren ; ancestor_part : expression ; record_component_association : named_record_component_association | positional_record_component_association ; positional_record_component_association : positional_record_component_association comma expression | expression ; named_record_component_association : named_record_component_association comma record_component_selector_name arrow expression | record_component_selector_name arrow expression ; (* 5. *) (* a *) sequence_of_statements : sequence_of_statements statement | statement ; (* b *) label : left_label_paren simple_name right_label_paren ; sequence_of_labels : sequence_of_labels label | label ; statement : sequence_of_labels simple_statement | simple_statement | sequence_of_labels compound_statement | compound_statement | proof_statement | justification_statement | sequence_of_labels apragma | apragma ; (* 5.1 *) (* a *) simple_statement : null_statement | assignment_statement | procedure_call_statement | exit_statement | return_statement | delay_statement ; (* b *) compound_statement : if_statement | case_statement | loop_statement ; (* c *) null_statement : RWnull semicolon ; (* 5.2 *) (* a *) (* The unconstrained_array_assignment is a special case where we allow an unqualified aggregate only where it is being used to fully assign to an out parameter of an unconstrained array type. *) unconstrained_array_assignment : name becomes left_paren RWothers arrow expression right_paren semicolon ; assignment_statement : name becomes expression semicolon | unconstrained_array_assignment ; (* 5.3 *) (* a *) if_statement : RWif condition RWthen sequence_of_statements elsif_part else_part RWend RWif semicolon ; elsif_part : elsif_part RWelsif condition RWthen sequence_of_statements | ; else_part : RWelse sequence_of_statements | ; (* b *) condition : expression ; (* 5.4 *) (* a *) case_statement : RWcase expression RWis alternatives others_part RWend RWcase semicolon ; alternatives : alternatives case_statement_alternative | case_statement_alternative ; (* b *) case_statement_alternative : RWwhen case_statement_alternative_rep arrow sequence_of_statements ; case_statement_alternative_rep : case_statement_alternative_rep vertical_bar case_choice | case_choice ; (* c *) case_choice : simple_expression | simple_expression range_constraint | simple_expression double_dot simple_expression ; others_part : RWwhen RWothers arrow sequence_of_statements | ; (* 5.5 *) (* a *) (* null production to provide a placeholder in infinite loops where we can model a syntactic exit to keep the flow analyser happy *) end_of_loop : ; loop_statement : simple_name colon loop_statement_opt RWloop sequence_of_statements end_of_loop RWend RWloop simple_name semicolon | loop_statement_opt RWloop sequence_of_statements end_of_loop RWend RWloop semicolon ; loop_statement_opt : iteration_scheme | iteration_scheme loop_invariant | ; (* b *) iteration_scheme : RWwhile condition | RWfor loop_parameter_specification ; (* c *) loop_parameter_specification : identifier forward type_mark | identifier forward type_mark RWrange arange | identifier backward type_mark | identifier backward type_mark RWrange arange ; forward : RWin ; backward : RWin RWreverse ; (* d *) loop_invariant : assert_statement ; (* 5.7 *) (* a *) exit_statement : RWexit semicolon | RWexit RWwhen condition semicolon | RWexit simple_name semicolon | RWexit simple_name RWwhen condition semicolon ; (* 5.8 *) (* a *) return_statement : RWreturn expression semicolon ; (* 5.10 *) proof_statement : assert_statement | assume_statement | check_statement ; assert_statement : proof_context RWassert predicate semicolon annotation_end ; assume_statement : proof_context RWassume predicate semicolon annotation_end ; check_statement : proof_context RWcheck predicate semicolon annotation_end ; (* justification of expected warnings *) (*PNA--------------------*) justification_statement : start_justification | end_justification ; start_justification : proof_context RWaccept justification_rep semicolon annotation_end ; justification_rep : justification_rep ampersand justification_clause | justification_clause ; justification_clause : identifier comma numeric_literal comma justification_opt justification_string | identifier comma numeric_literal comma justification_all justification_string ; dotted_simple_name_list : dotted_simple_name_list comma dotted_simple_name_or_null | dotted_simple_name_or_null ; dotted_simple_name_or_null : dotted_simple_name | null_name ; null_name : RWnull ; justification_opt : dotted_simple_name_list comma | ; justification_all : RWall comma ; justification_string : string_literal | string_literal comma justification_string ; end_justification : proof_context RWend RWaccept semicolon annotation_end ; (*------------------PNA *) (* 6.1 *) (* a *) overriding_indicator : RWnot RWoverriding | RWoverriding ; subprogram_declaration : overriding_indicator procedure_specification semicolon procedure_annotation | procedure_specification semicolon procedure_annotation | overriding_indicator function_specification semicolon function_annotation | function_specification semicolon function_annotation | proof_function_declaration ; not_overriding_subprogram_declaration : procedure_specification semicolon procedure_annotation | function_specification semicolon function_annotation | proof_function_declaration ; (* b *) subprogram_specification : procedure_specification | function_specification ; (* c *) procedure_specification : RWprocedure simple_name | RWprocedure simple_name formal_part ; (* d *) function_specification : RWfunction designator formal_part RWreturn type_mark | RWfunction designator RWreturn type_mark ; (* e *) designator : identifier ; (* f *) operator_symbol : string_literal ; (* g *) formal_part : left_paren formal_part_rep right_paren ; formal_part_rep : formal_part_rep semicolon parameter_specification | parameter_specification ; (* h *) parameter_specification : identifier_list colon mode type_mark ; (* i *) mode : in_mode | inout_mode | out_mode | ; in_mode : RWin ; inout_mode : RWin RWout ; out_mode : RWout ; (* 6.1.1 *) (* a *) procedure_annotation : procedure_constraint | declare_annotation procedure_constraint | dependency_relation procedure_constraint | dependency_relation declare_annotation procedure_constraint | moded_global_definition procedure_constraint | moded_global_definition declare_annotation procedure_constraint | moded_global_definition dependency_relation procedure_constraint | moded_global_definition dependency_relation declare_annotation procedure_constraint ; (* b *) function_annotation : function_constraint | moded_global_definition function_constraint ; (* 6.1.2 *) (* a *) moded_global_definition : annotation_start RWglobal global_definition_rep annotation_end ; global_definition_rep : global_definition_rep global_variable_clause | global_variable_clause ; global_variable_clause : mode global_variable_list semicolon ; (* b *) global_variable_list : global_variable_list comma global_variable | global_variable ; (* c *) global_variable : entire_variable ; (* d *) entire_variable : dotted_simple_name ; (* e *) dependency_relation : annotation_start RWderives dependency_relation_opt semicolon annotation_end ; dependency_relation_opt : dependency_relation_rep | dependency_relation_rep ampersand null_import_list | null_import_list | ; dependency_relation_rep : dependency_relation_rep ampersand dependency_clause | dependency_clause ; (* f *) dependency_clause : dependency_clause_optrep RWfrom dependency_clause_opt ; dependency_clause_opt : multiply comma dependency_clause_optrep | dependency_clause_optrep | multiply | ; dependency_clause_optrep : dependency_clause_optrep comma entire_variable | entire_variable ; null_import_list : RWnull RWfrom dependency_clause_optrep ; (* 6.1.3 *) (* c *) precondition : annotation_start RWpre predicate semicolon annotation_end ; (* d *) postcondition : annotation_start RWpost predicate semicolon annotation_end ; procedure_constraint : precondition postcondition | precondition | postcondition | ; function_constraint : precondition return_expression | precondition | return_expression | ; return_expression : annotation_start RWreturn annotation_expression semicolon annotation_end | annotation_start RWreturn simple_name arrow predicate semicolon annotation_end ; (* 6.1.4 *) (* a *) (* Note we use RWdeclare here to avoid potential ambiguity between this and base_type_assertion, which uses RWassert *) declare_annotation : annotation_start RWdeclare property_list semicolon annotation_end ; (* 6.3 *) (* a *) subprogram_body : overriding_indicator procedure_specification procedure_annotation RWis subprogram_implementation | overriding_indicator function_specification function_annotation RWis subprogram_implementation | procedure_specification procedure_annotation RWis subprogram_implementation | function_specification function_annotation RWis subprogram_implementation ; not_overriding_subprogram_body : procedure_specification procedure_annotation RWis subprogram_implementation | function_specification function_annotation RWis subprogram_implementation ; (* b *) subprogram_implementation : pragma_rep declarative_part RWbegin sequence_of_statements RWend designator semicolon | pragma_rep declarative_part RWbegin sequence_of_statements RWexception hidden_part semicolon | pragma_rep RWbegin sequence_of_statements RWend designator semicolon | pragma_rep RWbegin sequence_of_statements RWexception hidden_part semicolon | pragma_rep RWbegin code_insertion RWend designator semicolon | pragma_rep hidden_part semicolon ; (* c *) code_insertion : code_insertion apragma | code_insertion code_statement | code_statement ; (* d *) hidden_part : hide_directive ; (* 6.4 *) (* a *) procedure_call_statement : name semicolon ; (* c *) (* 7.1 *) (* a *) package_declaration : inherit_clause package_specification semicolon | package_specification semicolon ; private_package_declaration : inherit_clause RWprivate package_specification semicolon | RWprivate package_specification semicolon ; (* b *) package_specification : RWpackage dotted_simple_name package_annotation RWis visible_part RWprivate private_part | RWpackage dotted_simple_name package_annotation RWis visible_part RWend dotted_simple_name ; (* c *) visible_part : visible_part_rep ; visible_part_rep : visible_part_rep basic_declarative_item | visible_part_rep private_type_declaration | visible_part_rep deferred_constant_declaration | visible_part_rep subprogram_declaration | visible_part_rep generic_subprogram_instantiation | visible_part_rep apragma | visible_part_rep renaming_declaration | ; (* d *) private_part : basic_declarative_item_rep RWend dotted_simple_name | hidden_part ; basic_declarative_item_rep : basic_declarative_item_rep subprogram_declaration | basic_declarative_item_rep generic_subprogram_instantiation | basic_declarative_item_rep basic_declarative_item | basic_declarative_item_rep apragma | ; (* e *) package_body : RWpackage RWbody dotted_simple_name RWis package_implementation semicolon | RWpackage RWbody dotted_simple_name refinement_definition RWis package_implementation semicolon ; refinement_definition : annotation_start RWown refinement_clause_rep semicolon annotation_end ; refinement_clause_rep : refinement_clause_rep ampersand refinement_clause | refinement_clause ; refinement_clause : identifier RWis constituent_list | identifier ; (* illegal, but accept here and rejected in wellformedness *) constituent_list : constituent_list comma own_variable_modifier entire_variable | own_variable_modifier entire_variable ; (* f *) package_implementation : pragma_rep declarative_part RWend dotted_simple_name | pragma_rep RWend dotted_simple_name | pragma_rep declarative_part RWbegin package_initialization | pragma_rep RWbegin package_initialization | hidden_part ; (* g *) package_initialization : sequence_of_statements RWend dotted_simple_name | hidden_part ; use_type_clause : RWuse RWtype use_type_rep semicolon ; use_type_rep : use_type_rep comma type_mark | type_mark ; (* 7.2.1 *) (* a *) inherit_clause : annotation_start RWinherit inherit_clause_rep semicolon annotation_end ; inherit_clause_rep : inherit_clause_rep comma dotted_simple_name | dotted_simple_name ; (* 7.2.2 *) (* a *) package_annotation : own_variable_clause initialization_specification | own_variable_clause | ; (* 7.2.3 *) (* a *) own_variable_clause : annotation_start RWown own_variable_clause_rep annotation_end ; own_variable_clause_rep : own_variable_clause_rep own_variable_specification | own_variable_specification ; (* b *) own_variable_specification : own_variable_list semicolon | own_variable_list colon type_mark semicolon | own_variable_list left_paren property_list right_paren semicolon | own_variable_list colon type_mark left_paren property_list right_paren semicolon ; (* c *) own_variable_list : own_variable_list comma own_variable_modifier own_variable | own_variable_modifier own_variable ; own_variable_modifier : mode | protected_modifier | protected_moded_modifier | task_modifier ; protected_moded_modifier : protected_modifier in_mode | protected_modifier out_mode ; protected_modifier : RWprotected ; task_modifier : RWtask ; (* d *) own_variable : simple_name ; property_list : property_rep ; property_rep : property_rep comma property | property ; property : name_property | name_value_property ; name_property : RWdelay | identifier ; name_value_property : identifier arrow annotation_aggregate_or_expression ; (* 7.2.4 *) (* a *) initialization_specification : annotation_start RWinitializes package_variable_list semicolon annotation_end | annotation_start RWinitializes package_variable_list semicolon annotation_end postcondition ; (* b *) package_variable_list : package_variable_list comma own_variable | own_variable ; (* 7.4 *) (* a *) private_type_declaration : non_limited_private_type_declaration | limited_private_type_declaration ; non_limited_private_type_declaration : RWtype identifier RWis tag_option RWprivate semicolon ; (*tag*) limited_private_type_declaration : RWtype identifier RWis tag_option RWlimited RWprivate semicolon ; (*tag*) (*tag*) tag_option : non_abstract_tagged | abstract_tagged | non_tagged ; non_abstract_tagged : RWtagged ; abstract_tagged : RWabstract RWtagged ; non_tagged : ; (* b *) deferred_constant_declaration : identifier_list colon RWconstant type_mark semicolon ; (* 8.5 *) renaming_declaration_rep : renaming_declaration_rep renaming_declaration | renaming_declaration_rep proof_renaming_declaration | renaming_declaration_rep apragma | renaming_declaration | proof_renaming_declaration ; proof_renaming_declaration : proof_context function_specification RWrenames dotted_simple_name point simple_name semicolon annotation_end ; (* a *) renaming_declaration : RWfunction operator_symbol formal_part RWreturn type_mark RWrenames dotted_simple_name point operator_symbol semicolon | subprogram_specification RWrenames dotted_simple_name point simple_name semicolon | package_renaming_declaration ; package_renaming_declaration : RWpackage identifier RWrenames dotted_simple_name semicolon ; (* 9.1 *) task_type_declaration : RWtask RWtype identifier task_type_annotation task_definition | RWtask RWtype identifier known_discriminant_part task_type_annotation task_definition ; task_type_annotation : moded_global_definition dependency_relation declare_annotation | moded_global_definition dependency_relation | moded_global_definition declare_annotation | moded_global_definition ; task_definition : RWis task_pragma RWend identifier semicolon ; task_pragma : task_pragma apragma | priority_pragma ; task_body : RWtask RWbody identifier procedure_annotation RWis subprogram_implementation ; (* 9.4 *) protected_type_declaration : RWprotected RWtype identifier RWis protected_definition | RWprotected RWtype identifier known_discriminant_part RWis protected_definition ; protected_definition : protected_operation_declaration RWprivate protected_element_declaration RWend identifier semicolon | protected_operation_declaration RWend identifier semicolon | protected_operation_declaration RWprivate hidden_part semicolon ; protected_operation_declaration : priority_pragma entry_or_subprogram protected_operation_declaration_rep ; priority_pragma : RWpragma identifier left_paren expression right_paren semicolon ; entry_or_subprogram : not_overriding_subprogram_declaration | entry_declaration ; protected_operation_declaration_rep : protected_operation_declaration_rep apragma | protected_operation_declaration_rep not_overriding_subprogram_declaration | protected_operation_declaration_rep entry_declaration | ; entry_declaration : entry_specification semicolon procedure_annotation ; entry_specification : RWentry designator | RWentry designator formal_part ; protected_element_declaration : protected_element_list ; protected_element_list : protected_element_list protected_element | protected_element ; protected_element : variable_declaration | justification_statement ; protected_body : RWprotected RWbody identifier RWis protected_operation_item RWend identifier semicolon ; protected_operation_item : protected_operation_item not_overriding_subprogram_body | protected_operation_item entry_body | ; entry_body : entry_specification RWwhen identifier procedure_annotation RWis subprogram_implementation ; (* 9.6 *) delay_statement : RWdelay RWuntil expression semicolon ; (* 10.1 *) (* c *) library_unit : package_declaration | private_package_declaration | main_program_declaration | generic_declaration | generic_package_instantiation ; (* d *) secondary_unit : library_unit_body | subunit ; (* e *) library_unit_body : package_body | generic_subprogram_body ; generic_subprogram_body : not_overriding_subprogram_body ; (* f *) main_program_declaration : inherit_clause main_program_annotation not_overriding_subprogram_body | main_program_annotation not_overriding_subprogram_body | inherit_clause main_program_annotation moded_global_definition dependency_relation not_overriding_subprogram_body | inherit_clause main_program_annotation moded_global_definition not_overriding_subprogram_body | main_program_annotation moded_global_definition dependency_relation not_overriding_subprogram_body | main_program_annotation moded_global_definition not_overriding_subprogram_body ; (* g *) main_program_annotation : annotation_start RWmain_program annotation_end | annotation_start RWmain_program semicolon annotation_end ; (* 10.1.1 *) (* a *) context_clause : with_clause context_clause_rep pragma_rep ; (* b *) with_clause : RWwith simple_name_rep semicolon ; simple_name_rep : simple_name_rep comma dotted_simple_name | dotted_simple_name ; context_clause_rep : context_clause_rep with_clause | context_clause_rep use_type_clause | ; (* 10.2 *) (* a *) body_stub : procedure_specification procedure_annotation RWis RWseparate semicolon | overriding_indicator procedure_specification procedure_annotation RWis RWseparate semicolon | procedure_specification semicolon procedure_annotation apragma | function_specification function_annotation RWis RWseparate semicolon | overriding_indicator function_specification function_annotation RWis RWseparate semicolon | function_specification semicolon function_annotation apragma | RWpackage RWbody dotted_simple_name RWis RWseparate semicolon | task_stub semicolon | protected_stub semicolon ; task_stub : RWtask RWbody identifier procedure_annotation RWis RWseparate ; protected_stub : RWprotected RWbody identifier RWis RWseparate ; (* b *) subunit : RWseparate left_paren parent_unit_name right_paren proper_body ; parent_unit_name : parent_unit_name point simple_name | simple_name ; (* 12 *) generic_declaration : generic_subprogram_declaration | generic_package_declaration ; generic_subprogram_declaration : inherit_clause generic_formal_part not_overriding_subprogram_declaration | generic_formal_part not_overriding_subprogram_declaration ; generic_package_declaration : inherit_clause generic_formal_part package_specification semicolon | generic_formal_part package_specification semicolon ; generic_formal_part : RWgeneric generic_formal_part_rep ; generic_formal_part_rep : generic_formal_part_rep formal_object_declaration | generic_formal_part_rep formal_type_declaration | ; formal_object_declaration : identifier_list colon mode type_mark semicolon ; formal_type_declaration : RWtype identifier RWis formal_type_definition semicolon ; formal_type_definition : formal_private_type_definition | formal_discrete_type_definition | formal_signed_integer_type_definition | formal_modular_type_definition | formal_floating_point_definition | formal_ordinary_fixed_point_definition | formal_array_type_definition ; formal_private_type_definition : RWprivate | RWlimited RWprivate ; formal_discrete_type_definition : left_paren box right_paren ; formal_signed_integer_type_definition : RWrange box ; formal_modular_type_definition : RWmod box ; formal_floating_point_definition : RWdigits box ; formal_ordinary_fixed_point_definition : RWdelta box ; formal_array_type_definition : array_type_definition ; generic_package_instantiation : RWpackage dotted_simple_name package_annotation RWis RWnew identifier generic_actual_part semicolon | RWpackage dotted_simple_name package_annotation RWis RWnew identifier semicolon ; generic_subprogram_instantiation : overriding_indicator procedure_specification procedure_annotation RWis RWnew dotted_simple_name generic_actual_part semicolon | procedure_specification procedure_annotation RWis RWnew dotted_simple_name generic_actual_part semicolon | overriding_indicator RWfunction simple_name function_constraint RWis RWnew dotted_simple_name generic_actual_part semicolon | RWfunction simple_name function_constraint RWis RWnew dotted_simple_name generic_actual_part semicolon ; generic_actual_part : left_paren name_argument_list right_paren ; (* 13.1 *) (* a *) representation_clause : attribute_definition_clause | enumeration_representation_clause | record_representation_clause | at_clause ; (* 13.2 *) (* a *) attribute_definition_clause : RWfor attribute RWuse simple_expression semicolon ; (* 13.3 *) (* a *) enumeration_representation_clause : RWfor simple_name RWuse aggregate semicolon ; (* 13.4 *) (* a *) record_representation_clause : RWfor simple_name RWuse RWrecord record_representation_clause_rep RWend RWrecord semicolon | RWfor simple_name RWuse RWrecord mod_clause record_representation_clause_rep RWend RWrecord semicolon ; record_representation_clause_rep : record_representation_clause_rep component_clause | ; (* b *) mod_clause : RWat RWmod simple_expression semicolon ; (* c *) component_clause : simple_name RWat simple_expression RWrange arange semicolon ; (* 13.5 *) (* a *) at_clause : RWfor simple_name RWuse RWat simple_expression semicolon ; (* 13.8 *) (* a *) code_statement : name apostrophe aggregate semicolon ; (* Full ADA expressions *) ADA_name : identifier | character_literal | string_literal | ADA_name left_paren ADA_association_list right_paren | ADA_name point ADA_selector | ADA_name attribute_ident ; ADA_selector : identifier | character_literal | string_literal | RWall ; ADA_association_list : ADA_association_list comma ADA_association | ADA_association ; ADA_association : ADA_choice_list arrow ADA_range | RWothers arrow ADA_range | ADA_range ; ADA_choice_list : ADA_choice_list vertical_bar ADA_range | ADA_range ; ADA_range : ADA_name RWrange ADA_name | ADA_name RWrange ADA_simple_expression double_dot ADA_simple_expression | ADA_simple_expression | ADA_simple_expression double_dot ADA_simple_expression ; ADA_expression : ADA_relation | ADA_relation RWand ADA_expression_rep1 | ADA_relation RWandthen ADA_expression_rep2 | ADA_relation RWor ADA_expression_rep3 | ADA_relation RWorelse ADA_expression_rep4 | ADA_relation RWxor ADA_expression_rep5 ; ADA_expression_rep1 : ADA_expression_rep1 RWand ADA_relation | ADA_relation ; ADA_expression_rep2 : ADA_expression_rep2 RWandthen ADA_relation | ADA_relation ; ADA_expression_rep3 : ADA_expression_rep3 RWor ADA_relation | ADA_relation ; ADA_expression_rep4 : ADA_expression_rep4 RWorelse ADA_relation | ADA_relation ; ADA_expression_rep5 : ADA_expression_rep5 RWxor ADA_relation | ADA_relation ; ADA_relation : ADA_simple_expression | ADA_simple_expression relational_operator ADA_simple_expression | ADA_simple_expression inside ADA_range | ADA_simple_expression outside ADA_range ; ADA_simple_expression : ADA_simple_expression_opt | ADA_simple_expression binary_adding_operator ADA_term ; ADA_simple_expression_opt : unary_adding_operator ADA_term | ADA_term ; ADA_term : ADA_term multiplying_operator ADA_factor | ADA_factor ; ADA_factor : ADA_primary | ADA_primary double_star ADA_primary | RWabs ADA_primary | RWnot ADA_primary ; (* This production gives rise to a reduce/reduce conflict here, since a conflict-free *) (* grammar can't differentiate between a one-element positional unqualified aggregate *) (* and a parnentiesized expression. e.g. "(A)" could be either. *) (* We allow the conflict here, since an ADA_expression isn't semantically analyzed *) (* by the Examiner anyway, and it's more important to allow (legal) Ada syntax here. *) ADA_primary : numeric_literal | left_paren ADA_association_list right_paren | left_paren ADA_expression right_paren | ADA_name | ADA_name apostrophe left_paren ADA_association_list right_paren | RWnull ; (* Annotation Expressions *) predicate : annotation_expression ; quantified_expression : RWforall identifier RWin type_mark RWrange annotation_arange arrow left_paren predicate right_paren | RWforall identifier RWin type_mark arrow left_paren predicate right_paren | RWforsome identifier RWin type_mark RWrange annotation_arange arrow left_paren predicate right_paren | RWforsome identifier RWin type_mark arrow left_paren predicate right_paren ; spark-2012.0.deb/examiner/sem-check_package_prefix.adb0000644000175000017500000000276511753202336021622 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Package_Prefix (Node_Pos : in LexTokenManager.Token_Position; Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; OK : out Boolean) is begin if Dictionary.PrefixAllowed (Pack_Sym, Scope) then OK := True; else OK := False; ErrorHandler.Semantic_Error (Err_Num => 337, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Pack_Sym)); end if; end Check_Package_Prefix; spark-2012.0.deb/examiner/sem-compunit-walkstatements-variableupdatehistory.adb0000644000175000017500000001463311753202336027024 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) -------------------------------------------------------------------- -- VariableUpdateHistory -- -- Implementation Notes: -- The History_T ADT is implmented as a list of pairs using atoms -- from a given Heap.HeapRecord. See heap.ads. -------------------------------------------------------------------- package body VariableUpdateHistory is -------------------------------------------------------------------- -- Create_History -- -- Implementation Notes: -- Assumes that The_Heap is initialised using Heap.Initialize. -- Sets the A and B values of the initial atom to 0. The A and B -- pointers are set to 0 by Heap.CreateAtom. -------------------------------------------------------------------- procedure Create_History (The_Heap : in out Heap.HeapRecord; History : out History_T) is Atom : Heap.Atom; begin Heap.CreateAtom (The_Heap, Atom); Heap.UpdateAValue (The_Heap, Atom, 0); Heap.UpdateBValue (The_Heap, Atom, 0); History := History_T (Atom); end Create_History; -------------------------------------------------------------------- -- Dispose_Of_History -- -- Implementation Notes: -- The last atom in the list has a null A pointer. -------------------------------------------------------------------- procedure Dispose_Of_History (The_Heap : in out Heap.HeapRecord; History : in History_T) is Atom : Heap.Atom; Next_Atom : Heap.Atom; begin Atom := Heap.Atom (History); loop Next_Atom := Heap.APointer (The_Heap, Atom); Heap.DisposeOfAtom (The_Heap, Atom); Atom := Next_Atom; exit when Heap.IsNullPointer (Next_Atom); end loop; end Dispose_Of_History; -------------------------------------------------------------------- -- Add_Update -- -- Implementation Notes: -- A linear search is used to locate the specified Variable. -- If the end of the list is reached without finding the Variable -- a new atom is created and added to the end of the list. -- The list cannot contain duplicate variables. -------------------------------------------------------------------- procedure Add_Update (The_Heap : in out Heap.HeapRecord; History : in out History_T; Variable : in Natural; Node : in STree.SyntaxNode) is Atom : Heap.Atom; New_Atom : Heap.Atom; Value : Natural; begin Atom := Heap.Atom (History); Value := Heap.AValue (The_Heap, Atom); while Value /= Variable and not Heap.IsNullPointer (Heap.APointer (The_Heap, Atom)) loop Atom := Heap.APointer (The_Heap, Atom); Value := Heap.AValue (The_Heap, Atom); end loop; if Value /= Variable then -- Variable not found, add a new atom to the end of the list. Heap.CreateAtom (The_Heap, New_Atom); Heap.UpdateAValue (The_Heap, New_Atom, Variable); Heap.UpdateBValue (The_Heap, New_Atom, Natural (STree.NodeToRef (Node))); Heap.UpdateAPointer (The_Heap, Atom, New_Atom); else -- The variable is already in the history, update the previous node. Heap.UpdateBValue (The_Heap, Atom, Natural (STree.NodeToRef (Node))); end if; --# accept F, 30, STree.Table, "Used for precondition only" & --# F, 31, History, "History is logically updated but is represented by a pointer." & --# F, 50, History, The_Heap, "History is logically dependent on the contents of The_Heap." & --# F, 50, History, Variable, "History is logically dependent on Variable." & --# F, 50, History, Node, "History is logically dependent on Node."; end Add_Update; -------------------------------------------------------------------- -- Get_Last_Update -- -- Implementation Notes: -- A linear search is used to locate the specified Variable. -- If the end of the list is reached without finding the Variable -- the Result is set to STree.NullNode -------------------------------------------------------------------- procedure Get_Last_Update (The_Heap : in Heap.HeapRecord; History : in History_T; Variable : in Natural; Node : out STree.SyntaxNode) is Atom : Heap.Atom; Value : Natural; begin Atom := Heap.Atom (History); Value := Heap.AValue (The_Heap, Atom); while Value /= Variable and not Heap.IsNullPointer (Heap.APointer (The_Heap, Atom)) loop Atom := Heap.APointer (The_Heap, Atom); Value := Heap.AValue (The_Heap, Atom); end loop; if Value /= Variable then -- No previous update, return NullNode. Node := STree.NullNode; else Node := STree.RefToNode (ExaminerConstants.RefType (Heap.BValue (The_Heap, Atom))); end if; -- ASSUME Node = assignment_statement OR procedure_call_statement OR NULL SystemErrors.RT_Assert (C => Node = STree.NullNode or else Sem.Syntax_Node_Type (Node => Node) = SP_Symbols.assignment_statement or else Sem.Syntax_Node_Type (Node => Node) = SP_Symbols.procedure_call_statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = assignment_statement OR procedure_call_statement OR NULL in Get_Last_Update"); end Get_Last_Update; end VariableUpdateHistory; spark-2012.0.deb/examiner/sem-walk_expression_p-attribute_designator_type_from_context.adb0000644000175000017500000001744711753202336031324 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Attribute_Designator_Type_From_Context (Exp_Node : STree.SyntaxNode; E_Stack : Exp_Stack.Exp_Stack_Type; T_Stack : Type_Context_Stack.T_Stack_Type) return Dictionary.Symbol is Ident_Node : STree.SyntaxNode; Arg_Exp_Node : STree.SyntaxNode; New_Context_Type : Dictionary.Symbol; Top_Of_Exp_Stack : Sem.Exp_Record; Ident_Str : LexTokenManager.Lex_String; begin Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); -- Find the attribute identifier (e.g. "Val" or "Max") Ident_Node := STree.Child_Node (Current_Node => Exp_Node); -- ASSUME Ident_Node = attribute_designator OR annotation_attribute_designator OR attribute_ident if STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.attribute_designator or else STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.annotation_attribute_designator then -- ASSUME Ident_Node = attribute_designator OR annotation_attribute_designator Ident_Node := STree.Next_Sibling (Current_Node => Ident_Node); elsif STree.Syntax_Node_Type (Node => Ident_Node) /= SP_Symbols.attribute_ident then Ident_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = attribute_ident in Attribute_Designator_Type_From_Context"); end if; -- ASSUME Ident_Node = attribute_ident SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.attribute_ident, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = attribute_ident in Attribute_Designator_Type_From_Context"); Ident_Str := STree.Node_Lex_String (Node => Ident_Node); -- Find the (possibly non-existant) first argument. Arg_Exp_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Ident_Node)); -- ASSUME Arg_Exp_Node = expression OR annotation_expression OR NULL if Arg_Exp_Node = STree.NullNode then -- ASSUME Arg_Exp_Node = NULL -- No arguments for this attribute, so no change in context New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); elsif STree.Syntax_Node_Type (Node => Arg_Exp_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Arg_Exp_Node) = SP_Symbols.annotation_expression then -- ASSUME Arg_Exp_Node = expression OR annotation_expression -- This attribute has 1 or 2 arguments. The context for them -- is always the same (phew!), but depends on the prefix if (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq) then -- 'Val takes any integer, modular, or universal integer as argument -- in SPARK95, or universal integer only in SPARK83. -- Array attributes (when they have an argument) likewise. -- There is no context available. New_Context_Type := Dictionary.GetUniversalIntegerType; elsif (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Tail_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Append_Token) = LexTokenManager.Str_Eq) then -- Tail and Append never change context New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); elsif (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq) then -- Other attributes with arguments - -- context is the type given by the prefix. New_Context_Type := Top_Of_Exp_Stack.Type_Symbol; else -- Any other attribute with an argument must be an error, -- which will be picked up later on in wf_attribute_designator, but -- we still have to push something, so... New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end if; else New_Context_Type := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Exp_Node = expression OR annotation_expression OR NULL in Attribute_Designator_Type_From_Context"); end if; return New_Context_Type; end Attribute_Designator_Type_From_Context; spark-2012.0.deb/examiner/pile.ads0000644000175000017500000001265311753202336015702 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; with Dictionary; use type Dictionary.Symbol; --# inherit Cells, --# Dictionary, --# SPARK_IO, --# Statistics; package Pile is --------------------------------------------------------------------- -- A "Pile" is an ordered set of Cells built within a Heap structure -- -- Each entry contains a Value of type Dictionary.Symbol -- -- The pile is a set, so there are no repeated entries. -- -- The pile is ordered according to the Dictionary.Declared_Before -- relation on Symbols, so that iterating over the content of a Pile -- yields the Symbols in that order. --------------------------------------------------------------------- procedure Obtain (Heap : in out Cells.Heap_Record; Node : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap & --# Node from Heap; procedure Free (Heap : in out Cells.Heap_Record; Node : in Cells.Cell); --# derives Heap from *, --# Node; procedure SetParent (Heap : in out Cells.Heap_Record; Node : in Cells.Cell; ParentNode : in Cells.Cell); --# derives Heap from *, --# Node, --# ParentNode; procedure SetSibling (Heap : in out Cells.Heap_Record; Node : in Cells.Cell; SiblingNode : in Cells.Cell); --# derives Heap from *, --# Node, --# SiblingNode; -- Some Nodes in the Pile have a DAG associated with them. -- This is used, for example, to store the DAG associated -- with the initializing aggregate of a composite constant. procedure SetDAG (Heap : in out Cells.Heap_Record; Node : in Cells.Cell; DAGNode : in Cells.Cell); --# derives Heap from *, --# DAGNode, --# Node; procedure SetNodeSymbol (Heap : in out Cells.Heap_Record; Node : in Cells.Cell; Symbol : in Dictionary.Symbol); --# derives Heap from *, --# Node, --# Symbol; -- Inserts Symbol with DAG into the pile. If Symbol is already -- present, then no action. The "Declared_Before" ordering -- of the Pile is maintained. procedure Insert (Heap : in out Cells.Heap_Record; Symbol : in Dictionary.Symbol; DAG : in Cells.Cell; Node : in out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Heap from *, --# DAG, --# Node, --# Symbol & --# Node, --# Statistics.TableUsage from *, --# Heap, --# Node, --# Symbol; -- Determine if Symbol is already in the pile function Contains (Heap : in Cells.Heap_Record; Symbol : in Dictionary.Symbol; Node : in Cells.Cell) return Boolean; function IsNull (Node : Cells.Cell) return Boolean; function Parent (Heap : Cells.Heap_Record; Node : Cells.Cell) return Cells.Cell; function Sibling (Heap : Cells.Heap_Record; Node : Cells.Cell) return Cells.Cell; function DAG (Heap : Cells.Heap_Record; Node : Cells.Cell) return Cells.Cell; function NodeSymbol (Heap : Cells.Heap_Record; Node : Cells.Cell) return Dictionary.Symbol; ------------------------------------------------------- -- Assertion Checking and Debugging subprograms ------------------------------------------------------- -- Print a Pile starting at Node in textual format -- to Standard_Output procedure PrintPile (Heap : Cells.Heap_Record; Node : Cells.Cell); --# derives null from Heap, --# Node; -- Returns True iff the Pile starting at Node is in -- order given by Dictionary.Declared_Before over the Symbol -- field of each node. function OrderOK (Heap : Cells.Heap_Record; Node : Cells.Cell) return Boolean; end Pile; spark-2012.0.deb/examiner/sem-wf_type_mark.adb0000644000175000017500000004432111753202336020176 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: -- Starting at node type mark there are 2 possibilities: type mark indicates -- a type T or a package and type P.T -- Case 1: type T. WF if T is a type and T is visible. -- Case 2: type P.T WF if P is visible and P is a package and P.T is -- visible and P.T is a type. -- Possible errors: Not visible (semantic error 1 or 754 or 755) -- Not a type (semantic error 63) -- Not a package (semantic error 9) -------------------------------------------------------------------------------- separate (Sem) procedure Wf_Type_Mark (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Context : in Dictionary.Contexts; Type_Sym : out Dictionary.Symbol) is Current_Node : STree.SyntaxNode; ------------------------------------------------------- procedure Check_Symbol (Node_Pos : in LexTokenManager.Token_Position; Id_Node : in STree.SyntaxNode; Sym : in Dictionary.Symbol; Prefix : in LexTokenManager.Lex_String; Type_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Id_Node, --# LexTokenManager.State, --# Node_Pos, --# Prefix, --# SPARK_IO.File_Sys, --# STree.Table, --# Sym & --# STree.Table from *, --# Dictionary.Dict, --# Id_Node, --# Sym & --# Type_Sym from Dictionary.Dict, --# Sym; --# pre Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier; --# post (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and STree.Table = STree.Table~; is begin if Dictionary.Is_Null_Symbol (Sym) then -- not declared or visible Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error2 (Err_Num => 145, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str1 => Node_Lex_String (Node => Id_Node), Id_Str2 => Prefix); elsif Dictionary.IsTypeMark (Sym) then if Dictionary.TypeIsWellformed (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Id_Node); Type_Sym := Sym; else Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error2 (Err_Num => 145, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str1 => Node_Lex_String (Node => Id_Node), Id_Str2 => Prefix); end if; else -- not a type Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 63, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str => Node_Lex_String (Node => Id_Node)); end if; end Check_Symbol; ------------------------------------------------------- procedure Check_Type_Mark (Node_Pos : in LexTokenManager.Token_Position; Id_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Context : in Dictionary.Contexts; Type_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Id_Node, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table, --# Type_Sym from CommandLineData.Content, --# Context, --# Current_Scope, --# Dictionary.Dict, --# Id_Node, --# LexTokenManager.State, --# STree.Table; --# pre Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier; --# post (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and STree.Table = STree.Table~; is begin Check_Symbol (Node_Pos => Node_Pos, Id_Node => Id_Node, Sym => Dictionary.LookupItem (Name => Node_Lex_String (Node => Id_Node), Scope => Current_Scope, Context => Context, Full_Package_Name => False), Prefix => LexTokenManager.Null_String, Type_Sym => Type_Sym); end Check_Type_Mark; ------------------------------------------------------- procedure Check_Dotted_Type_Mark (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Context : in Dictionary.Contexts; Type_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table, --# Type_Sym from CommandLineData.Content, --# Context, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.type_mark; --# post (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and STree.Table = STree.Table~; is Sym : Dictionary.Symbol; Current_Node : STree.SyntaxNode; Prefix : LexTokenManager.Lex_String; Prev_Prefix : LexTokenManager.Lex_String; Prefix_OK : Boolean; In_Prefix : Boolean; Err_Num : Natural; ---------------------------------------------------------------------------- -- In the case where a Prefix is not visible, we need to distinguish -- between two cases: -- 1) Where the Prefix appears in a public child package and might denote -- a parent of that package. In this case, we need to issue a message -- saying that the named package needs to be inherited (not NOT withed) -- to be visible. -- 2) Where the child package may be inherited but the prefix erroneously -- includes the grandparent package. -- 3) Otherwise. In these cases, we issue a message saying that the -- named package has to be BOTH inherited and withed. -- -- Example 1. Consider a type mark P.T appearing in a public child A.P.B.C -- In this case, we find that the prefix "P" _is_ a potential parent -- package, so P needs to be inherited. -- -- Example 2. Consider a type mark A.P.T appearing in a public child A.P.B.C -- In this case, we find that the prefix "P" _is_ a potential parent -- package but should not be prefixed by the garndparent A. -- -- Example 3. Consider a type mark P.T appearing in a public child X.Y.Z -- In this case, we find that P cannot be a parent, so P must be -- inherited AND withed. ---------------------------------------------------------------------------- function Prefix_Can_Denote_An_Ancestor (Prefix : LexTokenManager.Lex_String; Current_Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is EP : Dictionary.Symbol; -- Enclosing package where Prefix appears CP : Dictionary.Symbol; -- Current Parent package Result : Boolean; begin EP := Dictionary.GetEnclosingPackage (Current_Scope); CP := Dictionary.GetPackageParent (EP); Result := False; -- CP = NullSymbol when EP is a library-level package. while not Dictionary.Is_Null_Symbol (CP) loop -- If the Prefix matches the current parent, then we're done. -- If not, then look for the grand-parent and try again until we -- reach library level. if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Prefix, Lex_Str2 => Dictionary.GetSimpleName (CP)) = LexTokenManager.Str_Eq then Result := True; exit; end if; CP := Dictionary.GetPackageParent (CP); end loop; return Result; end Prefix_Can_Denote_An_Ancestor; begin -- Check_Dotted_Type_Mark In_Prefix := False; Current_Node := Last_Child_Of (Start_Node => Node); -- ASSUME Current_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier in Check_Dotted_Type_Mark"); Prefix := Node_Lex_String (Node => Current_Node); Sym := Dictionary.LookupItem (Name => Prefix, Scope => Current_Scope, Context => Context, Full_Package_Name => False); loop -- we need a loop to handle multiple prefixes --# assert STree.Table = STree.Table~; if Dictionary.Is_Null_Symbol (Sym) then -- not declared or visible Type_Sym := Dictionary.GetUnknownTypeMark; if Prefix_Can_Denote_An_Ancestor (Prefix => Prefix, Current_Scope => Current_Scope) then --# accept F, 22, "Invariant expression OK here"; if In_Prefix then Err_Num := 756; else Err_Num := 755; end if; --# end accept; else Err_Num := 754; end if; ErrorHandler.Semantic_Error (Err_Num => Err_Num, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Prefix); exit; end if; if not Dictionary.IsPackage (Sym) then -- can't be dotted Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 9, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Prefix); exit; end if; -- Prefix (Sym) is visible and it's a package Check_Package_Prefix (Node_Pos => Node_Position (Node => Current_Node), Pack_Sym => Sym, Scope => Current_Scope, OK => Prefix_OK); if not Prefix_OK then Type_Sym := Dictionary.GetUnknownTypeMark; exit; end if; STree.Set_Node_Lex_String (Sym => Sym, Node => Current_Node); Current_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Current_Node)); -- ASSUME Current_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier in Check_Dotted_Type_Mark"); Sym := Dictionary.LookupSelectedItem (Prefix => Sym, Selector => Node_Lex_String (Node => Current_Node), Scope => Current_Scope, Context => Context); if Next_Sibling (Current_Node => Parent_Node (Current_Node => Current_Node)) = STree.NullNode then -- ASSUME Next_Sibling (Current_Node => Parent_Node (Current_Node => Current_Node)) = NULL -- no more identifiers to the right, so we should now have type name Check_Symbol (Node_Pos => Node_Position (Node => Node), Id_Node => Current_Node, Sym => Sym, Prefix => Prefix, Type_Sym => Type_Sym); exit; end if; -- elsif Syntax_Node_Type (Node => Next_Sibling (Current_Node => Parent_Node (Current_Node => Current_Node))) /= -- SP_Symbols.identifier then SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Sibling (Current_Node => Parent_Node (Current_Node => Current_Node))) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Sibling (Current_Node => Parent_Node (Current_Node => Current_Node)) = identifier OR " & "NULL in Check_Dotted_Type_Mark"); -- end if; -- Check that there are not recursive layers of the -- same package name (e.g. A.A.B) as the Dictionary -- lookup above will always return the same A Prev_Prefix := Prefix; Prefix := Node_Lex_String (Node => Current_Node); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Prev_Prefix, Lex_Str2 => Prefix) = LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 145, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); end if; -- otherwise go round again In_Prefix := True; end loop; end Check_Dotted_Type_Mark; begin -- Wf_Type_Mark Current_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Current_Node = identifier OR dotted_simple_name if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier then -- ASSUME Current_Node = identifier Check_Type_Mark (Node_Pos => Node_Position (Node => Node), Id_Node => Current_Node, Current_Scope => Current_Scope, Context => Context, Type_Sym => Type_Sym); elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Current_Node = dotted_simple_name Check_Dotted_Type_Mark (Node => Node, Current_Scope => Current_Scope, Context => Context, Type_Sym => Type_Sym); else Type_Sym := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier OR dotted_simple_name in Wf_Type_Mark"); end if; end Wf_Type_Mark; spark-2012.0.deb/examiner/flowanalyser.ads0000644000175000017500000001302711753202336017453 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ComponentManager; with Dictionary; with Heap; with LexTokenManager; with RefList; with STree; use type Dictionary.Modes; use type Heap.Atom; use type STree.SyntaxNode; --# inherit CommandLineData, --# ComponentErrors, --# ComponentManager, --# Debug, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# Heap, --# LexTokenManager, --# RefList, --# RelationAlgebra, --# RelationAlgebra.Debug, --# SeqAlgebra, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# STree, --# SystemErrors; package FlowAnalyser is procedure FlowAnalyse (SubprogSym : in Dictionary.Symbol; StartNode : in STree.SyntaxNode; EndPosition : in LexTokenManager.Token_Position; ComponentData : in out ComponentManager.ComponentData; TheHeap : in out Heap.HeapRecord; Table : in RefList.HashTable; DataFlowErrorFound : out Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ComponentData, --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# StartNode, --# STree.Table, --# SubprogSym, --# Table, --# TheHeap & --# DataFlowErrorFound from ComponentData, --# Dictionary.Dict, --# EndPosition, --# StartNode, --# STree.Table, --# SubprogSym, --# Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# StartNode, --# STree.Table, --# SubprogSym, --# Table, --# TheHeap; procedure FlowAnalysePartition (Node : in STree.SyntaxNode; TheHeap : in out Heap.HeapRecord); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# TheHeap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.main_program_declaration; end FlowAnalyser; spark-2012.0.deb/examiner/sem.idx0000644000175000017500000001072511753202337015551 0ustar eugeneugensem specification is in sem.ads sem body is in sem.adb sem.check_no_overloading_from_tagged_ops subunit is in sem-check_no_overloading_from_tagged_ops.adb sem.compunit subunit is in sem-compunit.adb sem.compunit.walkstatements subunit is in sem-compunit-walkstatements.adb sem.compunit.wf_package_body subunit is in sem-compunit-wf_package_body.adb sem.compunit.wf_package_body.wf_refine subunit is in sem-compunit-wf_package_body-wf_refine.adb sem.compunit.wf_subprogram_body subunit is in sem-compunit-wf_subprogram_body.adb sem.dependency_relation subunit is in sem-dependency_relation.adb sem.walk_expression_p subunit is in sem-walk_expression_p.adb sem.walk_expression_p.check_binary_operator subunit is in sem-walk_expression_p-check_binary_operator.adb sem.walk_expression_p.walk_annotation_expression subunit is in sem-walk_expression_p-walk_annotation_expression.adb sem.walk_expression_p.wf_attribute_designator subunit is in sem-walk_expression_p-wf_attribute_designator.adb sem.walk_expression_p.wf_primary subunit is in sem-walk_expression_p-wf_primary.adb sem.wf_basic_declarative_item subunit is in sem-wf_basic_declarative_item.adb sem.wf_basic_declarative_item.wf_basic_declaration subunit is in sem-wf_basic_declarative_item-wf_basic_declaration.adb sem.wf_basic_declarative_item.wf_basic_declaration.wf_full_type_declaration subunit is in sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration.adb sem.wf_basic_declarative_item.wf_basic_declaration.wf_full_type_declaration.wf_protected_type_declaration subunit is in sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration.adb sem.wf_basic_declarative_item.wf_basic_declaration.wf_subtype_declaration subunit is in sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration.adb sem.wf_context_clause_package_body subunit is in sem-wf_context_clause_package_body.adb sem.wf_formal_part subunit is in sem-wf_formal_part.adb sem.wf_generic_subprogram_instantiation subunit is in sem-wf_generic_subprogram_instantiation.adb sem.wf_package_declaration subunit is in sem-wf_package_declaration.adb sem.wf_package_declaration.wf_package_specification subunit is in sem-wf_package_declaration-wf_package_specification.adb sem.wf_package_declaration.wf_package_specification.wf_anno subunit is in sem-wf_package_declaration-wf_package_specification-wf_anno.adb sem.wf_package_declaration.wf_package_specification.wf_visible subunit is in sem-wf_package_declaration-wf_package_specification-wf_visible.adb sem.wf_pragma subunit is in sem-wf_pragma.adb ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_enum.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000001070511753202336033123 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Enum (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position) is It : STree.Iterator; Next_Node : STree.SyntaxNode; Enumeration_Symbol, Enumeration_Literal_Symbol : Dictionary.Symbol; Item_Str, Store_Rep : LexTokenManager.Lex_String; Pos_Number, Last_Pos_Number : Maths.Value; Unused_Err_Code : Maths.ErrorCode; begin Dictionary.Add_Enumeration_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Scope => Scope, Context => Dictionary.ProgramContext, The_Type => Enumeration_Symbol); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Enumeration_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Enumeration_Symbol, Is_Declaration => True); end if; Pos_Number := Maths.ZeroInteger; It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# STree.Table = STree.Table~; Item_Str := Node_Lex_String (Node => Next_Node); if Dictionary.IsDefined (Name => Item_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Item_Str); else Maths.StorageRep (Pos_Number, Store_Rep); Dictionary.AddEnumerationLiteral (Name => Item_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), Position => Store_Rep, The_Type => Enumeration_Symbol, TheEnumerationLiteral => Enumeration_Literal_Symbol); STree.Add_Node_Symbol (Node => Next_Node, Sym => Enumeration_Literal_Symbol); Last_Pos_Number := Pos_Number; --# accept Flow, 10, Unused_Err_Code, "Expected ineffective assignment"; Maths.Add (Last_Pos_Number, Maths.OneInteger, --to get Pos_Number, Unused_Err_Code); --# end accept; end if; It := STree.NextNode (It); end loop; --# accept Flow, 33, Unused_Err_Code, "Expected to be neither referenced nor exported"; end Wf_Enum; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-condldependency.adb0000644000175000017500000002122211753202336026760 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure CondlDependency (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is Err_Type : ErrorHandler.Full_Depend_Err_Type; procedure CondlDependencyExpl (E_Str : in out E_Strings.T) --# global in Err_Type; --# derives E_Str from *, --# Err_Type; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Type; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Type, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation CondlDependencyExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin Err_Type := ErrorHandler.Dependency_Err_Type'Val (Err_Num.ErrorNum - Error_Types.CondDependencyErrorOffset); case Err_Type is -- HTML Directives --! <"flow-"> --! <"??? Flow Error : "><" : "> when ErrorHandler.May_Be_Used_New => --! 601 Append_Export_Var (E_Str, Err_Num.Name2, Err_Num.Scope, True); E_Strings.Append_String (E_Str => E_Str, Str => " may be derived from the imported value(s) of "); Append_Export_Var (E_Str, Err_Num.Name1, Err_Num.Scope, False); --! Here the item on the left of "may be derived from ..." is an exported variable and --! the item(s) on the right are imports of a procedure subprogram. --! The message reports a possible dependency, found in the code, which --! does not appear in the specified dependency relation (derives annotation). --! The discrepancy could be caused by an error in the subprogram code which implements --! an unintended dependency. It could also be in an error in the subprogram derives annotation --! which omits a necessary and intended dependency. Finally, the Examiner may be reporting --! a false coupling between two items resulting from a non-executable code path or the --! sharing of disjoint parts of structured or abstract data (e.g one variable writing to one element --! of an array and another variable reading back a different element). --! Unexpected dependencies should be investigated carefully and only accepted without modification --! of either code or annotation if it is certain they are of "false coupling" kind. when ErrorHandler.May_Be_Used => -- Legacy --! 601 E_Strings.Append_String (E_Str => E_Str, Str => "The imported value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may be used in the derivation of "); Append_Export_Var (E_Str, Err_Num.Name2, Err_Num.Scope, False); --! Here first item is an import and the second is an export of a procedure subprogram. --! The message reports a possible dependency, found in the code, which --! does not appear in the specified dependency relation. --! This version of the message has been retained for backward compatibility. when ErrorHandler.May_Be_Used_Continue => E_Strings.Append_String (E_Str => E_Str, Str => ", "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when ErrorHandler.Uninitialised => --! 602 E_Strings.Append_String (E_Str => E_Str, Str => "The undefined initial value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may be used in the derivation of "); Append_Export_Var (E_Str, Err_Num.Name2, Err_Num.Scope, False); --! Here XXX is a non-imported variable, and YYY is an export, of a --! procedure subprogram. when ErrorHandler.Integrity_Violation => --! 605 E_Strings.Append_String (E_Str => E_Str, Str => "Information flow from "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " to "); Append_Export_Var (E_Str, Err_Num.Name2, Err_Num.Scope, False); E_Strings.Append_String (E_Str => E_Str, Str => " violates information flow policy"); --! This message indicates a violation of security or safety policy, such --! as information flow from a Secret input to an Unclassified output. when ErrorHandler.May_Be_Integrity_Violation => --! 606 E_Strings.Append_String (E_Str => E_Str, Str => "The imported value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may be used in the derivation of "); Append_Export_Var (E_Str, Err_Num.Name2, Err_Num.Scope, False); E_Strings.Append_String (E_Str => E_Str, Str => ". Furthermore, this information flow violates "); E_Strings.Append_String (E_Str => E_Str, Str => "information flow policy."); --! Here XXX is an import and YYY is an export of a procedure subprogram. --! The message reports a possible dependency, found in the code, which --! does not appear in the specified dependency relation. If this dependency --! did appear in the dependency relation, then it would also constitute --! an integrity violation. when others => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO Condl_Dependency"); end case; Append_Explanation; if Err_Type /= ErrorHandler.May_Be_Used_Continue and Err_Type /= ErrorHandler.May_Be_Used_New then E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; end CondlDependency; spark-2012.0.deb/examiner/dag-type_constraint.adb0000644000175000017500000007147211753202336020712 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ContextManager; separate (DAG) package body Type_Constraint is ------------------------------------------------------------------------------ -- Utility procedures ------------------------------------------------------------------------------ -- The_Type xor The_Constraint must be non-null. We use the type -- if we have an actual index; we use the_constraint for the magic -- unconstrained array index. procedure Create_Quant_Ident (Quant_Ident : out Dictionary.Symbol; The_Type : in Dictionary.Symbol; The_Constraint : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Quant_Id_Number : in out Positive) --# global in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# post Quant_Id_Number = Quant_Id_Number~ + 1; is Ident_Str : LexTokenManager.Lex_String; ------------------------------------------------------------------------------ procedure Create_Ident_Str (Ident_Str : out LexTokenManager.Lex_String; Suffix : in Natural) --# global in out LexTokenManager.State; is Ex_Lin : E_Strings.T; Ex_Str : E_Strings.T; begin -- Add I___ to start of Ex_Lin Ex_Str := E_Strings.Copy_String (Str => "I___"); E_Strings.Put_Int_To_String (Dest => Ex_Lin, Item => Suffix, Start_Pt => 1, Base => 10); E_Strings.Append_Examiner_String (E_Str1 => Ex_Str, E_Str2 => Ex_Lin); LexTokenManager.Insert_Examiner_String (Str => Ex_Str, Lex_Str => Ident_Str); end Create_Ident_Str; begin -- Create_Quant_Ident Create_Ident_Str (Ident_Str => Ident_Str, Suffix => Quant_Id_Number); SystemErrors.RT_Assert (C => Quant_Id_Number < Positive'Last, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Seriously? You have positive__last number of nested quantifiers?"); Quant_Id_Number := Quant_Id_Number + 1; Dictionary.AddQuantifiedVariable (Name => Ident_Str, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Null_Location, TypeMark => The_Type, TheConstraint => The_Constraint, Region => Dictionary.GetRegion (Scope), Variable => Quant_Ident); end Create_Quant_Ident; procedure Conjoin_Comma (NewConjunct : in Cells.Cell; VCGHeap : in out Cells.Heap_Record; Conjuncts : in out Cells.Cell) --# global in out Statistics.TableUsage; is CommaCell : Cells.Cell; begin if Cells.Is_Null_Cell (Conjuncts) then Conjuncts := NewConjunct; else DAG.CreateOpCell (CommaCell, VCGHeap, SP_Symbols.comma); DAG.SetLeftArgument (CommaCell, Conjuncts, VCGHeap); DAG.SetRightArgument (CommaCell, NewConjunct, VCGHeap); Conjuncts := CommaCell; end if; end Conjoin_Comma; procedure Process_Always_Valid (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Assoc_Var : in Dictionary.Symbol; The_Constraint : out Cells.Cell; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; is Type_Cell : Cells.Cell; AV_Cell : Cells.Cell; begin Cells.Create_Cell (VCG_Heap, Type_Cell); Cells.Set_Kind (VCG_Heap, Type_Cell, Cell_Storage.Reference); Cells.Set_Symbol_Value (VCG_Heap, Type_Cell, The_Type); -- Set the type Cells.Set_Assoc_Var (VCG_Heap, Type_Cell, Assoc_Var); -- Set the variable Cells.Create_Cell (VCG_Heap, AV_Cell); Cells.Set_Kind (VCG_Heap, AV_Cell, Cell_Storage.Attrib_Function); Cells.Set_Lex_Str (VCG_Heap, AV_Cell, LexTokenManager.Always_Valid_Token); Cells.Set_Assoc_Var (VCG_Heap, AV_Cell, Assoc_Var); Cells.Set_B_Ptr (VCG_Heap, AV_Cell, The_Expression); Cells.Create_Cell (VCG_Heap, The_Constraint); Cells.Set_Kind (VCG_Heap, The_Constraint, Cell_Storage.Op); Cells.Set_Op_Symbol (VCG_Heap, The_Constraint, SP_Symbols.apostrophe); Cells.Set_A_Ptr (VCG_Heap, The_Constraint, Type_Cell); Cells.Set_B_Ptr (VCG_Heap, The_Constraint, AV_Cell); end Process_Always_Valid; ------------------------------------------------------------------------------ -- The actual meat ------------------------------------------------------------------------------ procedure Process_Type_Rec (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Assoc_Var : in Dictionary.Symbol; Constraint_List : out Cells.Utility.List.Linked_List; VCG_Heap : in out Cells.Heap_Record; Context : in out Context_T) is --# hide Process_Type_Rec; begin Process_Type (The_Type => The_Type, The_Expression => The_Expression, Assoc_Var => Assoc_Var, Constraint_List => Constraint_List, VCG_Heap => VCG_Heap, Context => Context); end Process_Type_Rec; procedure Process_Discrete (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; The_Constraint : out Cells.Cell; VCG_Heap : in out Cells.Heap_Record) is Exp_Copy : Cells.Cell; Attr_First, Attr_Last : Cells.Cell; Lower_Bound, Upper_Bound : Cells.Cell; begin if Dictionary.IsTypeMark (The_Type) and then Dictionary.TypeIsBoolean (The_Type) then Cells.Utility.Create_Bool (VCG_Heap, True, The_Constraint); else -- Some callers of this may dispose of the original expression, -- so we best make a copy here. Structures.CopyStructure (VCG_Heap, The_Expression, Exp_Copy); -- Assemble the lower bound. Cells.Utility.Create_Type_Attribute (VCG_Heap => VCG_Heap, The_Type => The_Type, The_Attribute => Cells.Utility.Tick_First, Result => Attr_First); Cells.Utility.Create_Binary_Op_Cell (VCG_Heap => VCG_Heap, Left => Exp_Copy, Op => SP_Symbols.greater_or_equal, Right => Attr_First, Result => Lower_Bound); -- Assemble the upper bound. Cells.Utility.Create_Type_Attribute (VCG_Heap => VCG_Heap, The_Type => The_Type, The_Attribute => Cells.Utility.Tick_Last, Result => Attr_Last); Cells.Utility.Create_Binary_Op_Cell (VCG_Heap => VCG_Heap, Left => Exp_Copy, Op => SP_Symbols.less_or_equal, Right => Attr_Last, Result => Upper_Bound); -- Join the two together to form the constraint. Cells.Utility.Create_And (VCG_Heap => VCG_Heap, Left => Lower_Bound, Right => Upper_Bound, Conjunct => The_Constraint); end if; end Process_Discrete; -- The Assoc_Var is magical. If given, we no longer generate -- in-type constraints for the fields. Rather, the assumption is -- that we: -- -- * Only want to produce hypotheses. -- -- * All fields are assumed to never be in type. -- -- * Some fields marked valid will yield a 'always_valid magic -- function. procedure Process_Record (Record_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Assoc_Var : in Dictionary.Symbol; Constraint_List : out Cells.Utility.List.Linked_List; VCG_Heap : in out Cells.Heap_Record; Context : in out Context_T) --# global in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; is Component : Dictionary.Symbol; Component_Type : Dictionary.Symbol; Iter : Dictionary.Iterator; Field_Cell : Cells.Cell; Tmp : Cells.Cell; Tmp_List : Cells.Utility.List.Linked_List; begin Cells.Utility.List.Create (VCG_Heap, Constraint_List); -- We have two cases here, and yes, this is nasty. If we have -- an assoc var then we must necessarily also have an own in -- and we use it to go through the record; if we only have an -- expected type we use that and generate actual type -- constraints. if not Dictionary.Is_Null_Symbol (Assoc_Var) then if Dictionary.VariableOrSubcomponentIsMarkedValid (Assoc_Var) then Process_Always_Valid (The_Type => Record_Type, The_Expression => The_Expression, Assoc_Var => Assoc_Var, The_Constraint => Tmp, VCG_Heap => VCG_Heap); Cells.Utility.List.Append (VCG_Heap, Constraint_List, Tmp); else Component := Dictionary.GetFirstRecordSubcomponent (Assoc_Var); while not Dictionary.Is_Null_Symbol (Component) loop Component_Type := Dictionary.GetType (Component); -- Create a field access to the record. Cells.Utility.Create_Record_Access (The_Record => The_Expression, The_Component => Component, The_Field => Field_Cell, VCG_Heap => VCG_Heap); -- Work out the constraint and join it onto The_Constraint. Process_Type_Rec (The_Type => Component_Type, The_Expression => Field_Cell, Assoc_Var => Component, Constraint_List => Tmp_List, VCG_Heap => VCG_Heap, Context => Context); Cells.Utility.List.Append_List (VCG_Heap, Constraint_List, Tmp_List); Component := Dictionary.GetNextRecordSubcomponent (Component); end loop; -- Note: We could check if all fields have been marked -- always valid and actually generate a `this record is -- always valid' instead of one for each field. end if; else Iter := Dictionary.FirstRecordComponent (Record_Type); while not Dictionary.IsNullIterator (Iter) loop Component := Dictionary.CurrentSymbol (Iter); Component_Type := Dictionary.GetType (Component); -- Create a field access to the record. Cells.Utility.Create_Record_Access (The_Record => The_Expression, The_Component => Component, The_Field => Field_Cell, VCG_Heap => VCG_Heap); -- Work out the constraint and add it onto the constraint -- list. Process_Type_Rec (The_Type => Component_Type, The_Expression => Field_Cell, Assoc_Var => Dictionary.NullSymbol, Constraint_List => Tmp_List, VCG_Heap => VCG_Heap, Context => Context); Cells.Utility.List.Append_List (VCG_Heap, Constraint_List, Tmp_List); Iter := Dictionary.NextSymbol (Iter); end loop; end if; end Process_Record; -- Again, either The_Index_Type or The_P_Constraint must not be -- null. procedure Make_Quantifier (The_Index_Type : in Dictionary.Symbol; The_P_Constraint : in Dictionary.Symbol; The_Quantifier : out Cells.Cell; The_Body_Ptr : out Cells.Cell; The_Identifier : out Dictionary.Symbol; VCG_Heap : in out Cells.Heap_Record; Context : in out Context_T) --# global in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; is Decl_Cell : Cells.Cell; Comma_Cell : Cells.Cell; Range_Constraint : Cells.Cell; Ident_Ref : Cells.Cell; Type_Ref : Cells.Cell; Real_Index_Type : Dictionary.Symbol; begin -- Make the identifier. Create_Quant_Ident (Quant_Ident => The_Identifier, The_Constraint => The_P_Constraint, The_Type => The_Index_Type, Scope => Context.Scope, Quant_Id_Number => Context.Quant_Id_Number); -- Make the index in-type constraint: work out the bounds for -- the index and the fdl type of the quantified identifier. DAG.CreateReferenceCell (Ident_Ref, VCG_Heap, The_Identifier); if Dictionary.Is_Null_Symbol (The_Index_Type) then -- No actual index, this means we have an unconstrained -- array. Real_Index_Type := Dictionary.Get_Unconstrained_Array_Index (The_Identifier); DAG.CreateFixedVarCell (Type_Ref, VCG_Heap, Dictionary.GetRootType (Dictionary.GetType (Real_Index_Type))); else -- We do have an index, so just use that. Real_Index_Type := The_Index_Type; DAG.CreateFixedVarCell (Type_Ref, VCG_Heap, Dictionary.GetRootType (Real_Index_Type)); end if; -- We can only have arrays indexed by discrete -- types. Indexing by private types or similar is not -- allowed, so process_discrete is OK to use here. Process_Discrete (The_Type => Real_Index_Type, The_Expression => Ident_Ref, The_Constraint => Range_Constraint, VCG_Heap => VCG_Heap); if Cells.Is_Null_Cell (Range_Constraint) then DAG.CreateTrueCell (VCG_Heap, Range_Constraint); end if; -- Now we assemble the DAG for the quantifier. -- The_Identifier : The_Index_Type DAG.CreateOpCell (Decl_Cell, VCG_Heap, SP_Symbols.colon); DAG.SetLeftArgument (Decl_Cell, Ident_Ref, VCG_Heap); DAG.SetRightArgument (Decl_Cell, Type_Ref, VCG_Heap); -- [range constraint] -> ??? DAG.CreateOpCell (The_Body_Ptr, VCG_Heap, SP_Symbols.implies); DAG.SetLeftArgument (The_Body_Ptr, Range_Constraint, VCG_Heap); -- the right ptr will be filled in by the calling procedure. -- [decl_cell], [the_predicate] DAG.CreateOpCell (Comma_Cell, VCG_Heap, SP_Symbols.comma); DAG.SetLeftArgument (Comma_Cell, Decl_Cell, VCG_Heap); DAG.SetRightArgument (Comma_Cell, The_Body_Ptr, VCG_Heap); -- for_all [comma_cell] DAG.CreateOpCell (The_Quantifier, VCG_Heap, SP_Symbols.RWforall); DAG.SetRightArgument (The_Quantifier, Comma_Cell, VCG_Heap); end Make_Quantifier; procedure Process_Array (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Constraint_List : out Cells.Utility.List.Linked_List; VCG_Heap : in out Cells.Heap_Record; Context : in out Context_T) --# global in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; is N_Dim : Positive; Body_Ptr : Cells.Cell; Tmp_Ident : Dictionary.Symbol; Tmp_Ident_Ref : Cells.Cell; Tmp_Quant : Cells.Cell; New_Body_Ptr : Cells.Cell; Element_Cell : Cells.Cell; Tmp_Constraint : Cells.Cell; Final_Index : Cells.Cell := Cells.Null_Cell; Quant_Root : Cells.Cell; Tmp_List : Cells.Utility.List.Linked_List; List_Iter : Cells.Utility.List.Iterator; Quant_Copy : Cells.Cell; function Get_Index_Type (The_Array_Type : in Dictionary.Symbol; Dimension : in Positive; Context : in Context_T) return Dictionary.Symbol --# global in Dictionary.Dict; is R : Dictionary.Symbol; begin if Dictionary.Is_Unconstrained_Array_Type_Mark (The_Array_Type, Context.Scope) then R := Dictionary.NullSymbol; else R := Dictionary.GetArrayIndex (The_Array_Type, Dimension); end if; return R; end Get_Index_Type; function Get_P_Constraint (The_Array_Type : in Dictionary.Symbol; Dimension : in Positive; Context : in Context_T) return Dictionary.Symbol --# global in Dictionary.Dict; is R : Dictionary.Symbol; begin if Dictionary.Is_Unconstrained_Array_Type_Mark (The_Array_Type, Context.Scope) then R := Dictionary.GetSubprogramParameterConstraint (Context.Initial_Var, Dimension); else R := Dictionary.NullSymbol; end if; return R; end Get_P_Constraint; begin N_Dim := Dictionary.GetNumberOfDimensions (The_Type); -- Create the first outermost quantifier. Make_Quantifier (The_Index_Type => Get_Index_Type (The_Type, 1, Context), The_P_Constraint => Get_P_Constraint (The_Type, 1, Context), The_Quantifier => Quant_Root, The_Body_Ptr => Body_Ptr, The_Identifier => Tmp_Ident, VCG_Heap => VCG_Heap, Context => Context); DAG.CreateReferenceCell (Tmp_Ident_Ref, VCG_Heap, Tmp_Ident); Conjoin_Comma (Tmp_Ident_Ref, VCG_Heap, Final_Index); -- Chain the other quantifiers for the higher dimensions. for Dim in Positive range 2 .. N_Dim loop Make_Quantifier (The_Index_Type => Get_Index_Type (The_Type, Dim, Context), The_P_Constraint => Get_P_Constraint (The_Type, Dim, Context), The_Quantifier => Tmp_Quant, The_Body_Ptr => New_Body_Ptr, The_Identifier => Tmp_Ident, VCG_Heap => VCG_Heap, Context => Context); DAG.CreateReferenceCell (Tmp_Ident_Ref, VCG_Heap, Tmp_Ident); Conjoin_Comma (Tmp_Ident_Ref, VCG_Heap, Final_Index); DAG.SetRightArgument (Body_Ptr, Tmp_Quant, VCG_Heap); Body_Ptr := New_Body_Ptr; end loop; -- We can now generate the constraint for each element. First -- we need an array access. Cells.Utility.Create_Array_Access (VCG_Heap => VCG_Heap, The_Array => The_Expression, The_Index => Final_Index, The_Element => Element_Cell); -- Now we create the constraint for the element type. Process_Type_Rec (The_Type => Dictionary.GetArrayComponent (The_Type), The_Expression => Element_Cell, Assoc_Var => Dictionary.NullSymbol, Constraint_List => Tmp_List, VCG_Heap => VCG_Heap, Context => Context); Cells.Utility.List.Create (VCG_Heap, Constraint_List); List_Iter := Cells.Utility.List.First_Cell (VCG_Heap, Tmp_List); while not Cells.Utility.List.Is_Null_Iterator (List_Iter) loop -- Obtain a constraint. Tmp_Constraint := Cells.Utility.List.Current_Cell (VCG_Heap, List_Iter); if Cells.Utility.Is_True (VCG_Heap, Tmp_Constraint) then -- Do nothing; no need to add this to the list. null; else -- In our temp quantifier, substitute this constraint in. DAG.SetRightArgument (Body_Ptr, Tmp_Constraint, VCG_Heap); -- Now we copy the entire thing. This means in the next -- iteration we can again make use of Body_Ptr to just -- change the predicate. Structures.CopyStructure (VCG_Heap, Quant_Root, Quant_Copy); -- Finally we shove what we have in the constraint list -- returned by this procedure. Cells.Utility.List.Append (VCG_Heap, Constraint_List, Quant_Copy); end if; -- Advance the iterator. List_Iter := Cells.Utility.List.Next_Cell (VCG_Heap, List_Iter); end loop; -- TODO: Clean up the quantifier skeleton. end Process_Array; procedure Process_Type (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Assoc_Var : in Dictionary.Symbol; Constraint_List : out Cells.Utility.List.Linked_List; VCG_Heap : in out Cells.Heap_Record; Context : in out Context_T) is The_Constraint : Cells.Cell; begin -- Try not change the order of these. The following constraints -- exist: -- -- * We need to check for private (and similar) types first. -- -- * We need to go through the special record case before we -- handle the other always_valid cases. -- -- * We need to check for always valid before we go through any -- other case. if -- The predefined time types are magical. not (Dictionary.IsPredefinedTimeType (The_Type) or Dictionary.IsPredefinedTimeSpanType (The_Type)) and then -- Check if we don't know anything about this type. (Dictionary.IsPrivateType (The_Type, Context.Scope) or Dictionary.TypeIsBoolean (The_Type) or Dictionary.TypeIsOwnAbstractHere (The_Type, Context.Scope) or Dictionary.IsProtectedType (The_Type)) then -- Booleans never have a typecheck. Abstract owns are just -- like private types, which also do not have a -- typecheck. We also don't typecheck protected types for -- the same reason. Cells.Utility.List.Create (VCG_Heap, Constraint_List); elsif DAG.RecordTypeWithCheck (The_Type, Context.Scope) then Process_Record (Record_Type => The_Type, The_Expression => The_Expression, Assoc_Var => Assoc_Var, Constraint_List => Constraint_List, VCG_Heap => VCG_Heap, Context => Context); elsif not Dictionary.Is_Null_Symbol (Assoc_Var) then -- If we have an assoc var, then this means that -- Consider_Always_Valid has been requested. This variable -- is an own in variable; if it is always valid we say so -- otherwise we generate just `true' as an in-type -- hypotheses. Cells.Utility.List.Create (VCG_Heap, Constraint_List); if Dictionary.VariableOrSubcomponentIsMarkedValid (Assoc_Var) then Process_Always_Valid (The_Type => The_Type, The_Expression => The_Expression, Assoc_Var => Assoc_Var, The_Constraint => The_Constraint, VCG_Heap => VCG_Heap); Cells.Utility.List.Append (VCG_Heap, Constraint_List, The_Constraint); else -- Do nothing. null; end if; elsif DAG.DiscreteTypeWithCheck (The_Type, Context.Scope) then Process_Discrete (The_Type => The_Type, The_Expression => The_Expression, The_Constraint => The_Constraint, VCG_Heap => VCG_Heap); Cells.Utility.List.Create (VCG_Heap, Constraint_List); Cells.Utility.List.Append (VCG_Heap, Constraint_List, The_Constraint); if DAG.IsRealType (The_Type) then Context.VC_Contains_Reals := True; end if; elsif DAG.ArrayTypeWithCheck (The_Type, Context.Scope) then Process_Array (The_Type => The_Type, The_Expression => The_Expression, Constraint_List => Constraint_List, VCG_Heap => VCG_Heap, Context => Context); elsif Dictionary.IsUnknownTypeMark (The_Type) then -- In the old world of CreateStuctConstraint we used to -- silently generate `true'. Now we fail with more noise. We -- can get here if, for example, we had semantic errors -- earlier in the analysis. Cells.Utility.List.Create (VCG_Heap, Constraint_List); Context.VC_Failure := True; else Cells.Utility.List.Create (VCG_Heap, Constraint_List); Context.VC_Failure := True; Debug.PrintMsg ("*****************************************************************************", True); Debug.PrintBool ("* istype: ", Dictionary.IsType (The_Type)); Debug.Print_Sym ("* offending type: ", The_Type); Debug.PrintBool ("* is_task: ", Dictionary.TypeIsTask (The_Type)); Debug.PrintBool ("* is_own: ", Dictionary.TypeIsOwnAbstractHere (The_Type, Context.Scope)); Debug.PrintDAG ("* exp: ", The_Expression, VCG_Heap, Context.Scope); SystemErrors.Fatal_Error (SystemErrors.Assertion_Failure, "unhandeled type in DAG.create_constraint"); end if; -- Debug.PrintDAG (">>> ", The_Constraint, VCG_Heap, Context.Scope); end Process_Type; procedure Make (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Scope : in Dictionary.Scopes; Consider_Always_Valid : in Boolean; The_Constraint : out Cells.Cell; VCG_Heap : in out Cells.Heap_Record; VC_Contains_Reals : in out Boolean; VC_Failure : in out Boolean) is --# hide Make; -- So we can transition to flow=auto here. Context : Context_T; Assoc_Var : Dictionary.Symbol; Initial_Var : Dictionary.Symbol; Constraint_List : Cells.Utility.List.Linked_List; begin -- Debug.PrintMsg ("Creating contraint for:", True); -- Debug.PrintScope (" scope: ", Scope); -- Debug.Print_Sym (" type : ", The_Type); -- Debug.PrintDAG (" expr : ", The_Expression, VCG_Heap, Scope); -- The initial variable is used to deal with unconstrained -- arrays. if Cells.Get_Kind (VCG_Heap, The_Expression) = Cell_Storage.Fixed_Var or Cells.Is_Reference_Cell (VCG_Heap, The_Expression) then Initial_Var := Cells.Get_Symbol_Value (VCG_Heap, The_Expression); else Initial_Var := Dictionary.NullSymbol; end if; Context := Context_T' (VC_Contains_Reals => VC_Contains_Reals, VC_Failure => VC_Failure, Quant_Id_Number => 1, Scope => Scope, Initial_Var => Initial_Var); -- If we are asked to consider 'Always_Valid, we need to work -- out the actual variable that The_Expression represents. This -- significantly changes the behaviour of the code in this -- package. If the entire variable is marked always valid we -- generate a very special constraint. For records with -- individual fields marked always_valid, we do something very -- different. See Process_Record for a description. if Consider_Always_Valid then Assoc_Var := Initial_Var; if not (Dictionary.GetOwnVariableOrConstituentMode (Assoc_Var) = Dictionary.InMode) then -- If this is not actually an own in, we don't care. Assoc_Var := Dictionary.NullSymbol; end if; else Assoc_Var := Dictionary.NullSymbol; end if; Process_Type (The_Type => The_Type, The_Expression => The_Expression, Assoc_Var => Assoc_Var, Constraint_List => Constraint_List, VCG_Heap => VCG_Heap, Context => Context); Cells.Utility.List.Join_And (VCG_Heap, Constraint_List, The_Constraint); VC_Contains_Reals := Context.VC_Contains_Reals; VC_Failure := Context.VC_Failure; -- Debug.PrintDAG (" const: ", The_Constraint, VCG_Heap, Scope); end Make; end Type_Constraint; ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrootspark-2012.0.deb/examiner/errorhandler-conversions-tostring-controlflowerror-controlflowerrorexpl.adbspark-2012.0.deb/examiner/errorhandler-conversions-tostring-controlflowerror-controlflowerrorexpl.ad0000644000175000017500000000205511753202337033417 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.ControlFlowError) procedure ControlFlowErrorExpl (E_Str : in out E_Strings.T) is begin case Err_Type is when ErrorHandler.Misplaced_Exit => E_Strings.Append_String (E_Str => E_Str, Str => "Exit statements must be of the form ""exit when c;"" where the closest" & " enclosing statement is a loop or ""if c then S; exit;"" where the" & " if statement has no else part and its closest enclosing" & " statement is a loop. See the SPARK Definition for details."); when ErrorHandler.Misplaced_Return => E_Strings.Append_String (E_Str => E_Str, Str => "A return statement may only occur as the last statement of a function."); when ErrorHandler.Missing_Return => E_Strings.Append_String (E_Str => E_Str, Str => "SPARK requires that the last statement of a function be a return statement."); when others => null; end case; end ControlFlowErrorExpl; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-condldependency-condldependencyexpl.adb0000644000175000017500000000575711753202337033025 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.CondlDependency) procedure CondlDependencyExpl (E_Str : in out E_Strings.T) is begin case Err_Type is when ErrorHandler.May_Be_Used_New => E_Strings.Append_String (E_Str => E_Str, Str => "Here the item on the left of ""may be derived from ..."" is an exported variable and" & " the item(s) on the right are imports of a procedure subprogram." & " The message reports a possible dependency, found in the code, which" & " does not appear in the specified dependency relation (derives annotation)." & " The discrepancy could be caused by an error in the subprogram code which implements" & " an unintended dependency. It could also be in an error in the subprogram derives annotation" & " which omits a necessary and intended dependency. Finally, the Examiner may be reporting" & " a false coupling between two items resulting from a non-executable code path or the" & " sharing of disjoint parts of structured or abstract data (e.g one variable writing to one element" & " of an array and another variable reading back a different element)." & " Unexpected dependencies should be investigated carefully and only accepted without modification" & " of either code or annotation if it is certain they are of ""false coupling"" kind."); when ErrorHandler.May_Be_Used => E_Strings.Append_String (E_Str => E_Str, Str => "Here first item is an import and the second is an export of a procedure subprogram." & " The message reports a possible dependency, found in the code, which" & " does not appear in the specified dependency relation." & " This version of the message has been retained for backward compatibility."); when ErrorHandler.Uninitialised => E_Strings.Append_String (E_Str => E_Str, Str => "Here XXX is a non-imported variable, and YYY is an export, of a" & " procedure subprogram."); when ErrorHandler.Integrity_Violation => E_Strings.Append_String (E_Str => E_Str, Str => "This message indicates a violation of security or safety policy, such" & " as information flow from a Secret input to an Unclassified output."); when ErrorHandler.May_Be_Integrity_Violation => E_Strings.Append_String (E_Str => E_Str, Str => "Here XXX is an import and YYY is an export of a procedure subprogram." & " The message reports a possible dependency, found in the code, which" & " does not appear in the specified dependency relation. If this dependency" & " did appear in the dependency relation, then it would also constitute" & " an integrity violation."); when others => null; end case; end CondlDependencyExpl; spark-2012.0.deb/examiner/dictionary-addwithreference.adb0000644000175000017500000004703011753202336022373 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure AddWithReference (The_Visibility : in Visibility; The_Unit : in Symbol; The_Withed_Symbol : in Symbol; Explicit : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Already_Present : out Boolean) is The_Withed_Package : RawDict.Package_Info_Ref; The_Withed_Subprogram : RawDict.Subprogram_Info_Ref; The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Context_Clause : RawDict.Context_Clause_Info_Ref; Need_To_Add : Boolean; -------------------------------------------------------------------------------- procedure Add_With_Clause (The_Withed_Symbol : in Symbol; The_Unit : in Symbol; Declaration : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Declaration, --# Dict, --# LexTokenManager.State, --# The_Unit, --# The_Withed_Symbol; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "with clause of "); Write_Name (File => Dict.TemporaryFile, Item => The_Withed_Symbol); Write_String (Dict.TemporaryFile, " in "); Write_Name (File => Dict.TemporaryFile, Item => The_Unit); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Declaration); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_With_Clause; -------------------------------------------------------------------------------- procedure Add_Package_Visible_With_Reference (The_Context_Clause : in RawDict.Context_Clause_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Context_Clause, --# The_Package; is begin RawDict.Set_Next_Context_Clause (The_Context_Clause => The_Context_Clause, Next => RawDict.Get_Package_Visible_With_Clauses (The_Package => The_Package)); RawDict.Set_Package_Visible_With_Clauses (The_Package => The_Package, The_Context_Clause => The_Context_Clause); end Add_Package_Visible_With_Reference; -------------------------------------------------------------------------------- procedure Add_Package_Local_With_Reference (The_Context_Clause : in RawDict.Context_Clause_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Context_Clause, --# The_Package; is begin RawDict.Set_Next_Context_Clause (The_Context_Clause => The_Context_Clause, Next => RawDict.Get_Package_Local_With_Clauses (The_Package => The_Package)); RawDict.Set_Package_Local_With_Clauses (The_Package => The_Package, The_Context_Clause => The_Context_Clause); end Add_Package_Local_With_Reference; -------------------------------------------------------------------------------- procedure Add_Subprogram_With_Reference (The_Context_Clause : in RawDict.Context_Clause_Info_Ref; The_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Context_Clause, --# The_Subprogram; is begin RawDict.Set_Next_Context_Clause (The_Context_Clause => The_Context_Clause, Next => RawDict.Get_Subprogram_With_Clauses (The_Subprogram => The_Subprogram)); RawDict.Set_Subprogram_With_Clauses (The_Subprogram => The_Subprogram, The_Context_Clause => The_Context_Clause); end Add_Subprogram_With_Reference; -------------------------------------------------------------------------------- procedure Add_Protected_Type_With_Reference (The_Context_Clause : in RawDict.Context_Clause_Info_Ref; The_Protected_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Context_Clause, --# The_Protected_Type; is begin RawDict.Set_Next_Context_Clause (The_Context_Clause => The_Context_Clause, Next => RawDict.Get_Protected_Type_With_Clauses (The_Protected_Type => The_Protected_Type)); RawDict.Set_Protected_Type_With_Clauses (The_Protected_Type => The_Protected_Type, The_Context_Clause => The_Context_Clause); end Add_Protected_Type_With_Reference; -------------------------------------------------------------------------------- procedure Add_Task_Type_With_Reference (The_Context_Clause : in RawDict.Context_Clause_Info_Ref; The_Task_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Context_Clause, --# The_Task_Type; is begin RawDict.Set_Next_Context_Clause (The_Context_Clause => The_Context_Clause, Next => RawDict.Get_Task_Type_With_Clauses (The_Task_Type => The_Task_Type)); RawDict.Set_Task_Type_With_Clauses (The_Task_Type => The_Task_Type, The_Context_Clause => The_Context_Clause); end Add_Task_Type_With_Reference; -------------------------------------------------------------------------------- procedure Check_If_Already_Present (The_Withed_Symbol : in Symbol; The_Visibility : in Visibility; The_Unit : in Symbol; Explicit : in Boolean; Already_Present : out Boolean; Need_To_Add : out Boolean) --# global in out Dict; --# derives Already_Present, --# Dict from Dict, --# Explicit, --# The_Unit, --# The_Visibility, --# The_Withed_Symbol & --# Need_To_Add from Dict, --# The_Unit, --# The_Visibility, --# The_Withed_Symbol; is The_Withed_Package : RawDict.Package_Info_Ref; The_Withed_Subprogram : RawDict.Subprogram_Info_Ref; The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Context_Clause : RawDict.Context_Clause_Info_Ref; begin Already_Present := False; Need_To_Add := True; case The_Visibility is when Visible | Privat => case RawDict.GetSymbolDiscriminant (The_Unit) is when Package_Symbol => The_Context_Clause := RawDict.Get_Package_Visible_With_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => The_Unit)); -- GAA External when Generic_Unit_Symbol => The_Generic_Unit := RawDict.Get_Generic_Unit_Info_Ref (Item => The_Unit); -- GAA External case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => The_Context_Clause := RawDict.Get_Package_Visible_With_Clauses (The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => The_Context_Clause := RawDict.Get_Subprogram_With_Clauses (The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Check_If_Already_Present"); end case; when Local => case RawDict.GetSymbolDiscriminant (The_Unit) is when Package_Symbol => The_Context_Clause := RawDict.Get_Package_Local_With_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => The_Unit)); -- GAA External when Subprogram_Symbol => The_Context_Clause := RawDict.Get_Subprogram_With_Clauses (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => The_Unit)); -- GAA External when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => The_Unit)) is -- GAA External when Protected_Type_Item => The_Context_Clause := RawDict.Get_Protected_Type_With_Clauses (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => The_Unit)); -- GAA External when Task_Type_Item => The_Context_Clause := RawDict.Get_Task_Type_With_Clauses (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => The_Unit)); -- GAA External when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Check_If_Already_Present"); end case; when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Check_If_Already_Present"); end case; end case; case RawDict.GetSymbolDiscriminant (The_Withed_Symbol) is when Package_Symbol => The_Withed_Package := RawDict.Get_Package_Info_Ref (Item => The_Withed_Symbol); -- GAA External loop exit when The_Context_Clause = RawDict.Null_Context_Clause_Info_Ref; if not RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) and then RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Context_Clause) = The_Withed_Package then Need_To_Add := False; if Explicit then if RawDict.Get_Context_Clause_Explicit (The_Context_Clause => The_Context_Clause) then Already_Present := True; else RawDict.Set_Context_Clause_Explicit (The_Context_Clause => The_Context_Clause); end if; end if; exit; end if; The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Context_Clause); end loop; when Subprogram_Symbol | ImplicitProofFunctionSymbol => case RawDict.GetSymbolDiscriminant (The_Withed_Symbol) is when Subprogram_Symbol => The_Withed_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Withed_Symbol); -- GAA External when ImplicitProofFunctionSymbol => The_Withed_Subprogram := RawDict.GetImplicitProofFunctionAdaFunction (The_Withed_Symbol); when others => The_Withed_Subprogram := RawDict.Null_Subprogram_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Check_If_Already_Present"); end case; loop exit when The_Context_Clause = RawDict.Null_Context_Clause_Info_Ref; if RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) and then RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Context_Clause) = The_Withed_Subprogram then Need_To_Add := False; if Explicit then if RawDict.Get_Context_Clause_Explicit (The_Context_Clause => The_Context_Clause) then Already_Present := True; else RawDict.Set_Context_Clause_Explicit (The_Context_Clause => The_Context_Clause); end if; end if; exit; end if; The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Context_Clause); end loop; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Check_If_Already_Present"); end case; end Check_If_Already_Present; begin -- AddWithReference Check_If_Already_Present (The_Withed_Symbol => The_Withed_Symbol, The_Visibility => The_Visibility, The_Unit => The_Unit, Explicit => Explicit, Already_Present => Already_Present, Need_To_Add => Need_To_Add); if Need_To_Add then Add_With_Clause (The_Withed_Symbol => The_Withed_Symbol, The_Unit => The_Unit, Declaration => Declaration); case RawDict.GetSymbolDiscriminant (The_Withed_Symbol) is when Package_Symbol => The_Withed_Package := RawDict.Get_Package_Info_Ref (Item => The_Withed_Symbol); -- GAA External The_Withed_Subprogram := RawDict.Null_Subprogram_Info_Ref; when Subprogram_Symbol => The_Withed_Package := RawDict.Null_Package_Info_Ref; The_Withed_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Withed_Symbol); -- GAA External when ImplicitProofFunctionSymbol => The_Withed_Package := RawDict.Null_Package_Info_Ref; The_Withed_Subprogram := RawDict.GetImplicitProofFunctionAdaFunction (The_Withed_Symbol); when others => -- non-exec code The_Withed_Package := RawDict.Null_Package_Info_Ref; The_Withed_Subprogram := RawDict.Null_Subprogram_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddWithReference"); end case; RawDict.Create_Context_Clause (The_Package => The_Withed_Package, The_Subprogram => The_Withed_Subprogram, Explicit => Explicit, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Context_Clause => The_Context_Clause); case The_Visibility is when Visible | Privat => case RawDict.GetSymbolDiscriminant (The_Unit) is when Package_Symbol => Add_Package_Visible_With_Reference (The_Context_Clause => The_Context_Clause, The_Package => RawDict.Get_Package_Info_Ref (Item => The_Unit)); -- GAA External when Generic_Unit_Symbol => The_Generic_Unit := RawDict.Get_Generic_Unit_Info_Ref (Item => The_Unit); -- GAA External case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => Add_Package_Visible_With_Reference (The_Context_Clause => The_Context_Clause, The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => Add_Subprogram_With_Reference (The_Context_Clause => The_Context_Clause, The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddWithReference"); end case; when Local => case RawDict.GetSymbolDiscriminant (The_Unit) is when Package_Symbol => Add_Package_Local_With_Reference (The_Context_Clause => The_Context_Clause, The_Package => RawDict.Get_Package_Info_Ref (Item => The_Unit)); -- GAA External when Subprogram_Symbol => Add_Subprogram_With_Reference (The_Context_Clause => The_Context_Clause, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => The_Unit)); -- GAA External when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => The_Unit)) is -- GAA External when Protected_Type_Item => Add_Protected_Type_With_Reference (The_Context_Clause => The_Context_Clause, The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => The_Unit)); -- GAA External when Task_Type_Item => Add_Task_Type_With_Reference (The_Context_Clause => The_Context_Clause, The_Task_Type => RawDict.Get_Type_Info_Ref (Item => The_Unit)); -- GAA External when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddWithReference"); end case; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddWithReference"); end case; end case; AddOtherReference (The_Withed_Symbol, The_Unit, Declaration); end if; end AddWithReference; spark-2012.0.deb/examiner/dag-buildexpndag.adb0000644000175000017500000060546111753202336020134 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Debug; with ExaminerConstants; with E_Strings; with Labels; with SeqAlgebra; with SystemErrors; separate (DAG) procedure BuildExpnDAG (StartNode : in STree.SyntaxNode; ExpnScope : in Dictionary.Scopes; Scope : in Dictionary.Scopes; LineNmbr : in Integer; DoRtc : in Boolean; AssumeRvalues : in Boolean; LoopStack : in LoopContext.T; FlowHeap : in out Heap.HeapRecord; VCGHeap : in out Cells.Heap_Record; ContainsReals : in out Boolean; VCGFailure : in out Boolean; ShortCircuitStack : in out CStacks.Stack; CheckStack : in out CStacks.Stack; KindOfStackedCheck : in out Graph.Proof_Context_Type; DAGRoot : out Cells.Cell) -- This procedure traverses a syntax tree of an expression, which may be -- for example: -- - an expression of an assignment statement, -- - a condition of an if_statement (or elsif_part), -- - an expression of a case_statement, -- - a condition of an iteration scheme. -- - a subprogram parameter is DAGCell : Cells.Cell; ExpnStack : CStacks.Stack; NodeType : SP_Symbols.SP_Symbol; LastNode, Node : STree.SyntaxNode; ReferencedVars : SeqAlgebra.Seq; -- Set of rvalues of expression -- Populate set of r-values. This procedure is called from ProcessIdentifier and -- ProcessSelectedComponent whenever a variable is found. A set of referenced -- entire variables (R-values) is constructed by this means. procedure AddRvalueSymbol (TheHeap : in out Heap.HeapRecord; Sequ : in SeqAlgebra.Seq; Sym : in Dictionary.Symbol) --# global in AssumeRvalues; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# AssumeRvalues, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Sequ, --# Sym, --# TheHeap; is function IsLocalVariable return Boolean --# global in Dictionary.Dict; --# in Scope; --# in Sym; is begin -- A variable is "local" if its scope if that of the current -- subprogram, and it's not a formal parameter. return Dictionary.Is_Variable (Sym) and then Dictionary.GetScope (Sym) = Scope and then not Dictionary.IsSubprogramParameter (Sym); end IsLocalVariable; function IsOutModeFormalParameter return Boolean --# global in Dictionary.Dict; --# in Scope; --# in Sym; is begin return Dictionary.GetScope (Sym) = Scope and then Dictionary.IsSubprogramParameter (Sym) and then Dictionary.GetSubprogramParameterMode (Sym) = Dictionary.OutMode; end IsOutModeFormalParameter; function IsDeferredNonPrivateConstant return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in Sym; is begin -- If a non-private, scalar constant is referenced and we don't know its value -- then no rules will be generated. In that specific case it is worth asserting -- that the value is in type because that is the best we can do unless the type is universal. return Dictionary.Is_Constant (Sym) and then DiscreteTypeWithCheck (Dictionary.GetType (Sym), Scope) and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.Get_Value (The_Constant => Sym), Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq and then -- i.e. no value known not (Dictionary.IsUniversalIntegerType (Dictionary.GetType (Sym)) or Dictionary.IsUniversalRealType (Dictionary.GetType (Sym))); end IsDeferredNonPrivateConstant; begin -- AddRvalueSymbol if AssumeRvalues then -- Only add local variables or (in SPARK95) an "out" mode formal parameter or deferred constant if IsLocalVariable or IsOutModeFormalParameter or IsDeferredNonPrivateConstant then SeqAlgebra.AddMember (TheHeap, Sequ, Natural (Dictionary.SymbolRef (Sym))); end if; end if; end AddRvalueSymbol; ------------------------------------------------------------------------ procedure ConjoinParamConstraint (Type_Sym : in Dictionary.Symbol; Var_Sym : in Dictionary.Symbol; DAGCell : in out Cells.Cell) --# global in Scope; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# DAGCell, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# VCGFailure from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Type_Sym, --# Var_Sym, --# VCGHeap & --# Statistics.TableUsage, --# VCGHeap from *, --# DAGCell, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Type_Sym, --# Var_Sym, --# VCGHeap; is Constr, VarCell : Cells.Cell; begin CreateReferenceCell (VarCell, VCGHeap, Var_Sym); Type_Constraint.Make (The_Type => Type_Sym, The_Expression => VarCell, Scope => Scope, Consider_Always_Valid => False, The_Constraint => Constr, VCG_Heap => VCGHeap, VC_Contains_Reals => ContainsReals, VC_Failure => VCGFailure); if not Cells.Is_Null_Cell (Constr) then Cells.Utility.Conjoin (VCGHeap, Constr, DAGCell); end if; end ConjoinParamConstraint; -- Generate hypotheses that all variables in the -- ReferencedVars set are in their type procedure PlantRvalueAssumptions (ReferencedVars : in SeqAlgebra.Seq) --# global in Scope; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out FlowHeap; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCGFailure from *, --# Dictionary.Dict, --# FlowHeap, --# LexTokenManager.State, --# ReferencedVars, --# Scope, --# VCGHeap & --# FlowHeap from *, --# ReferencedVars & --# Graph.Table, --# StmtStack.S, --# VCGHeap from Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# ReferencedVars, --# Scope, --# StmtStack.S, --# VCGHeap; is DAGCell : Cells.Cell := Cells.Null_Cell; VarSym : Dictionary.Symbol; TypeSym : Dictionary.Symbol; X : SeqAlgebra.MemberOfSeq; StmtLabel : Labels.Label; StmtCell : Cells.Cell; begin -- ReferencedVars is a set of R-value leaves populated by BuildExpnDAG X := SeqAlgebra.FirstMember (FlowHeap, ReferencedVars); while not SeqAlgebra.IsNullMember (X) loop VarSym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => FlowHeap, M => X))); TypeSym := Dictionary.GetType (VarSym); ConjoinParamConstraint (TypeSym, VarSym, DAGCell); -- remove each element after it is used to recover heap space and prevent -- unnecessary repetition of hypothese SeqAlgebra.RemoveMember (FlowHeap, ReferencedVars, SeqAlgebra.Value_Of_Member (The_Heap => FlowHeap, M => X)); X := SeqAlgebra.FirstMember (FlowHeap, ReferencedVars); -- original method replaced two lines baove with one below, doen't recover heap space -- X := SeqAlgebra.NextMember (FlowHeap, X); end loop; -- DAGCell is now a complete list of constraints that we need to plant as a set of hypotheses PrepareLabel (VCGHeap, StmtLabel, StmtCell); SetRightArgument (StmtCell, DAGCell, VCGHeap); Chain (StmtLabel, VCGHeap); end PlantRvalueAssumptions; ------------------------------------------------------------------------ -- Conditionally generate hypotheses that all variables in the -- ReferencedVars set are in their type procedure CheckPlantRvalueAssumptions --# global in AssumeRvalues; --# in ReferencedVars; --# in Scope; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out FlowHeap; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCGFailure from *, --# AssumeRvalues, --# Dictionary.Dict, --# FlowHeap, --# LexTokenManager.State, --# ReferencedVars, --# Scope, --# VCGHeap & --# FlowHeap from *, --# AssumeRvalues, --# ReferencedVars & --# Graph.Table, --# StmtStack.S, --# VCGHeap from AssumeRvalues, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# ReferencedVars, --# Scope, --# StmtStack.S, --# VCGHeap; is begin if AssumeRvalues then PlantRvalueAssumptions (ReferencedVars); end if; end CheckPlantRvalueAssumptions; --------------------------------------------------------------------- -- Concrete_Function is a reference to a concrete function declaration. -- Abstraction specifies whether the abstract or refined view of the -- precondition is required. -- The_Precondition is a DAG containing the prcondition expression - it will -- be null if the function does not have a precondition. procedure Get_Precondition (Concrete_Function : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Scope : in Dictionary.Scopes; The_Precondition : out Cells.Cell; Function_Defs : in out CStacks.Stack) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# Function_Defs, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# Abstraction, --# CommandLineData.Content, --# Concrete_Function, --# Dictionary.Dict, --# Function_Defs, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Concrete_Function, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Function_Defs, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# The_Precondition from Abstraction, --# CommandLineData.Content, --# Concrete_Function, --# Dictionary.Dict, --# Function_Defs, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap; is Instantiated_Subprogram : Dictionary.Symbol; Local_Abstraction : Dictionary.Abstractions; Constraint : STree.SyntaxNode; begin if Dictionary.IsInstantiation (Concrete_Function) then Instantiated_Subprogram := Concrete_Function; Local_Abstraction := Dictionary.IsAbstract; else -- not generic Instantiated_Subprogram := Dictionary.NullSymbol; Local_Abstraction := Abstraction; end if; Constraint := STree.RefToNode (Dictionary.GetPrecondition (Local_Abstraction, Concrete_Function)); if Constraint /= STree.NullNode then Build_Annotation_Expression (Exp_Node => Constraint, Instantiated_Subprogram => Instantiated_Subprogram, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Concrete_Function), Calling_Scope => Scope, Force_Abstract => Abstraction = Dictionary.IsAbstract, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => The_Precondition, Function_Defs => Function_Defs); else The_Precondition := Cells.Null_Cell; end if; end Get_Precondition; ------------------------------------------------------------------- -- Actual_Called_Function is data structure created by Setup_Function_Call -- for a called function and must refer to a concrete function. -- Abstraction specifies whether the abstract or refined view of the -- precondition is to be planted. procedure Plant_Precondition_Check (Actual_Function_Call : in Cells.Cell; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S from *, --# Abstraction, --# Actual_Function_Call, --# CommandLineData.Content, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# VCGFailure from *, --# Abstraction, --# Actual_Function_Call, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# Actual_Function_Call, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# VCGHeap from *, --# Abstraction, --# Actual_Function_Call, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table; is DAG_Cell : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; Function_Symbol : Dictionary.Symbol; Function_Defs : CStacks.Stack; begin -- Stack of definitions of functions called within the precondition CStacks.CreateStack (Function_Defs); Function_Symbol := Cells.Get_Symbol_Value (VCGHeap, Actual_Function_Call); -- Function_Symbol must refer to a concrete function. Get_Precondition (Concrete_Function => Function_Symbol, Abstraction => Abstraction, Scope => Scope, The_Precondition => DAG_Cell, Function_Defs => Function_Defs); if not Cells.Is_Null_Cell (DAG_Cell) then AddAnyShortCircuitImplications (VCGHeap, DAG_Cell, ShortCircuitStack); Substitutions.Substitute_Parameters (Called_Function => Actual_Function_Call, Constraint => DAG_Cell, VCG_Heap => VCGHeap); if not CStacks.IsEmpty (Function_Defs) then -- There are called function definitions. -- Use null statement as place holder for them. ModelNullStmt (VCGHeap); -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; -- Substitute actual parameters for formal parameters referenced in -- the function calls within the precondition Substitutions.Substitute_Parameters (Called_Function => Actual_Function_Call, Constraint => Conjoined_Function_Defs, VCG_Heap => VCGHeap); -- Assume the function definitions from the point of the null statement IncorporateAssumption (VCGHeap, Conjoined_Function_Defs); end if; StackCheckStatement (DAG_Cell, VCGHeap, CheckStack); KindOfStackedCheck := Graph.Precon_Check; end if; -- No need to check type of import globals because these must already -- be in type because of previous checks. end Plant_Precondition_Check; ------------------------------------------------------------------- -- Assume_Function_Return_Type plants an assumption in the DAG that a -- function return value is in-type provided it is not an -- unchecked conversion function. -- The Implicit_Function must refer to an implicitly declared proof function -- corresponding to a concrete function declaration. procedure Assume_Function_Return_Type (Implicit_Function : in Cells.Cell) --# global in Scope; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# VCGFailure from *, --# Dictionary.Dict, --# Implicit_Function, --# LexTokenManager.State, --# Scope, --# VCGHeap & --# Graph.Table, --# StmtStack.S, --# VCGHeap from Dictionary.Dict, --# Graph.Table, --# Implicit_Function, --# LexTokenManager.State, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Implicit_Function, --# LexTokenManager.State, --# Scope, --# ShortCircuitStack, --# VCGHeap; is Concrete_Function_Symbol, Proof_Function_Symbol : Dictionary.Symbol; StmtCell, RetCell : Cells.Cell; StmtLabel : Labels.Label; begin Proof_Function_Symbol := Cells.Get_Symbol_Value (VCGHeap, Implicit_Function); Concrete_Function_Symbol := Dictionary.GetAdaFunction (Proof_Function_Symbol); -- Don't assume return type is valid if it is an unchecked conversion if not Dictionary.IsAnUncheckedConversion (Concrete_Function_Symbol) then Type_Constraint.Make (The_Type => Dictionary.GetType (Proof_Function_Symbol), The_Expression => Implicit_Function, Scope => Scope, Consider_Always_Valid => False, The_Constraint => RetCell, VCG_Heap => VCGHeap, VC_Contains_Reals => ContainsReals, VC_Failure => VCGFailure); if not Cells.Is_Null_Cell (RetCell) then AddAnyShortCircuitImplications (VCGHeap, RetCell, ShortCircuitStack); PrepareLabel (VCGHeap, StmtLabel, StmtCell); SetRightArgument (StmtCell, RetCell, VCGHeap); Chain (StmtLabel, VCGHeap); end if; end if; end Assume_Function_Return_Type; ---------------------------------------------------------------------------------------------- -- Assume_Function_Return_Annotation plants an assumption of the translation -- of the function's return annotation. -- Concrete_Function is a reference to a concrete function declaration. -- Abstraction specifies whether the abstract or refined view of the -- precondition is required. procedure Assume_Function_Return_Annotation (Actual_Function_Call : in Cells.Cell; Proof_Function_Call : in Cells.Cell; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# Abstraction, --# Actual_Function_Call, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# Actual_Function_Call, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Graph.Table, --# StmtStack.S, --# VCGHeap from Abstraction, --# Actual_Function_Call, --# CommandLineData.Content, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Proof_Function_Call, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ShortCircuitStack, --# Statistics.TableUsage from *, --# Abstraction, --# Actual_Function_Call, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Proof_Function_Call, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap; is Concrete_Function_Symbol : Dictionary.Symbol; Implicit_Var : Dictionary.Symbol; Assumed_Predicate : Cells.Cell; Assumed_Return_Anno : Cells.Cell; StmtCell : Cells.Cell; Persistent_Function_Call : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; StmtLabel : Labels.Label; Function_Defs : CStacks.Stack; ------------------------------------------------------------------- -- Concrete_Function is a reference to a concrete function declaration. -- Abstraction specifies whether the abstract or refined view of the -- the return annotation is required. -- The_Return_Anno is a DAG containing the expression of the return -- annotation - it will be null if the function does not have a return -- annotation. -- Implicit_Var is the implicit variable of an implicit return annoatation. -- If the return annotation is explicit the Implicit_Var will be null. procedure Get_Function_Return_Annotation (Concrete_Function : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Scope : in Dictionary.Scopes; The_Return_Anno : out Cells.Cell; Implicit_Var : out Dictionary.Symbol; Function_Defs : in out CStacks.Stack) --# global in CommandLineData.Content; --# in LoopStack; --# in STree.Table; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# Function_Defs, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# Abstraction, --# CommandLineData.Content, --# Concrete_Function, --# Dictionary.Dict, --# Function_Defs, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Concrete_Function, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Function_Defs, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Implicit_Var from Abstraction, --# Concrete_Function, --# Dictionary.Dict, --# STree.Table & --# The_Return_Anno from Abstraction, --# CommandLineData.Content, --# Concrete_Function, --# Dictionary.Dict, --# Function_Defs, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap; is Force_Abstract : Boolean; Instantiated_Subprogram : Dictionary.Symbol; Local_Abstraction : Dictionary.Abstractions; Constraint : STree.SyntaxNode; begin if Abstraction = Dictionary.IsAbstract and then Dictionary.Get_Visibility (Scope => Dictionary.GetScope (Concrete_Function)) = Dictionary.Local then -- declaration is in body only so contract should not be abstract (not refined either... see TN JC17-035) Force_Abstract := False; else Force_Abstract := Abstraction = Dictionary.IsAbstract; end if; if Dictionary.IsInstantiation (Concrete_Function) then Instantiated_Subprogram := Concrete_Function; Local_Abstraction := Dictionary.IsAbstract; else -- not generic Instantiated_Subprogram := Dictionary.NullSymbol; Local_Abstraction := Abstraction; end if; Constraint := STree.RefToNode (Dictionary.GetPostcondition (Local_Abstraction, Concrete_Function)); if Constraint /= STree.NullNode then if STree.Syntax_Node_Type (Node => Constraint) = SP_Symbols.annotation_expression then -- It is an explicit return annotation and has no implicit -- variable. Implicit_Var := Dictionary.NullSymbol; Build_Annotation_Expression (Exp_Node => Constraint, Instantiated_Subprogram => Instantiated_Subprogram, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Concrete_Function), Calling_Scope => Scope, Force_Abstract => Force_Abstract, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => The_Return_Anno, Function_Defs => Function_Defs); else -- It is an implicit return annotation - get the implicit -- variable. Implicit_Var := Dictionary.GetImplicitReturnVariable (Abstraction, Concrete_Function); -- Build s DAG of the expression containing the implicit -- variable. Build_Annotation_Expression (Exp_Node => STree.Next_Sibling (Current_Node => Constraint), Instantiated_Subprogram => Instantiated_Subprogram, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Implicit_Var), Calling_Scope => Scope, Force_Abstract => Force_Abstract, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => The_Return_Anno, Function_Defs => Function_Defs); end if; else The_Return_Anno := Cells.Null_Cell; Implicit_Var := Dictionary.NullSymbol; end if; end Get_Function_Return_Annotation; begin -- Assume_Function_Return_Annotation -- Stack for definition of functions called within the return annotation CStacks.CreateStack (Function_Defs); Concrete_Function_Symbol := Cells.Get_Symbol_Value (VCGHeap, Actual_Function_Call); Get_Function_Return_Annotation (Concrete_Function => Concrete_Function_Symbol, Abstraction => Abstraction, Scope => Scope, The_Return_Anno => Assumed_Return_Anno, Implicit_Var => Implicit_Var, Function_Defs => Function_Defs); if not Cells.Is_Null_Cell (Assumed_Return_Anno) then -- The function has a return annotation. Determine whether -- it is explict or implicit and process accordingly. if Dictionary.Is_Null_Symbol (Implicit_Var) then -- It is an explict return annotation. It needs to be made a -- boolean expression (a predicate) by making the assumption -- equals the annotation expression. if Dictionary.TypeIsBoolean (Dictionary.GetType (Concrete_Function_Symbol)) then -- set two DAGs to be equal -- use <-> for boolean functions otherwise use = CreateOpCell (Assumed_Predicate, VCGHeap, SP_Symbols.is_equivalent_to); else CreateOpCell (Assumed_Predicate, VCGHeap, SP_Symbols.equals); end if; -- The proof function view of a called function always used in VCs. -- A pesistent copy of the function call needs to be made from the -- temporary version on the ExprStack. Structures.CopyStructure (Heap => VCGHeap, Root => Proof_Function_Call, RootCopy => Persistent_Function_Call); SetLeftArgument (Assumed_Predicate, Persistent_Function_Call, VCGHeap); SetRightArgument (Assumed_Predicate, Assumed_Return_Anno, VCGHeap); AddAnyShortCircuitImplications (VCGHeap, Assumed_Predicate, ShortCircuitStack); Substitutions.Substitute_Parameters (Called_Function => Actual_Function_Call, Constraint => Assumed_Predicate, VCG_Heap => VCGHeap); else -- It is an impliciit return annotation and the -- Assumed_Return_Anno with the implict variables replaced by -- the proof function "call" is the boolean predicate. Substitutions.Substitute_Implicit_Vars (Proof_Function => Proof_Function_Call, Implicit_Var => Implicit_Var, Implicit_Return_Expr => Assumed_Return_Anno, VCG_Heap => VCGHeap); AddAnyShortCircuitImplications (VCGHeap, Assumed_Return_Anno, ShortCircuitStack); Substitutions.Substitute_Parameters (Called_Function => Actual_Function_Call, Constraint => Assumed_Return_Anno, VCG_Heap => VCGHeap); Assumed_Predicate := Assumed_Return_Anno; end if; if not CStacks.IsEmpty (Function_Defs) then -- There are function definititions --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; if not Dictionary.Is_Null_Symbol (Implicit_Var) then -- It is an implicit return expression; substitute -- implicit variable in the function definitions Substitutions.Substitute_Implicit_Vars (Proof_Function => Proof_Function_Call, Implicit_Var => Implicit_Var, Implicit_Return_Expr => Conjoined_Function_Defs, VCG_Heap => VCGHeap); end if; Substitutions.Substitute_Parameters (Called_Function => Actual_Function_Call, Constraint => Conjoined_Function_Defs, VCG_Heap => VCGHeap); -- Conjoin the function definitions with the return anno predicate Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, Assumed_Predicate); end if; PrepareLabel (VCGHeap, StmtLabel, StmtCell); SetRightArgument (StmtCell, Assumed_Predicate, VCGHeap); Chain (StmtLabel, VCGHeap); end if; end Assume_Function_Return_Annotation; ---------------------------------------------------------------------------------------------- -- Setup_Function_Call is called during the BuildExpnDAG "down-loop" and -- establishes a data structure to represent the actual parameters and -- globals of a function call. -- It enters all the global variables of a function into the data structure -- the parameters are entered during the "up-loop" by the procedure -- Process_Positional_Argument_Association or Process_Named_Argument_Association. -- If it is a parameterless function (it may have globals) then this -- procedure completes the model of the function call by adding the -- pre-condition checks to the DAG and then adding a in-type assumption to -- the DAG for its return value. The function symbol is changed to its -- implicit proof function equivalent so that all globals become parameters -- of the function. -- If the function has parameters then the completion of the function call -- model is performed by the procedure ProcessNameArgumentList during the -- BuildExpnDAG "up-loop". procedure Setup_Function_Call (ThisScope : in Dictionary.Scopes; Prefix : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in DoRtc; --# in LineNmbr; --# in LoopStack; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out ExpnStack; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# KindOfStackedCheck from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Prefix, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Prefix, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ErrorHandler.Error_Context, --# ExpnStack, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Prefix, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ExpnStack from *, --# Dictionary.Dict, --# ThisScope, --# VCGHeap; is -- Creates a data structure into which DAGs of actual parameters can be -- slotted. -- We end up with (after the empty data structure is populated by -- Process_Positional_Argument_Association (or the named equivalent)): -- function --- , --- , --- DAG -- | | -- DAG DAG -- -- (where DAG is a DAG of an actual parameter expression, a down bar is -- the A ptr and a right bar a B ptr). -- As well as the parameter DAGs, the structure will contain DAGs for any -- globals referenced by the function since, in the proof model, we don't -- have a concept of global variables. -- -- In addition, each DAG cell may have hanging off its C ptr, a cell -- conating an index cosntraint. This gets used in SubtituteParameters -- when dealing with attributes of unconstrained arrays where the actual -- parameter constrains the formal in some way. NumberOfParameters, NumberOfGlobals, TotalArguments : Natural; Function_Symbol, Proof_Function_Symbol : Dictionary.Symbol; Abstraction : Dictionary.Abstractions; ConstraintAbstraction : Dictionary.Abstractions; Actual_Function_Call, Proof_Function_Call : Cells.Cell; ---------------------------- procedure CopyInGlobals (ParamCount : in Natural; Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions) --# global in Dictionary.Dict; --# in ExpnStack; --# in Prefix; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Statistics.TableUsage, --# VCGHeap from *, --# Abstraction, --# Dictionary.Dict, --# ExpnStack, --# ParamCount, --# Prefix, --# Sym, --# VCGHeap; is GlobalCell, StartPoint : Cells.Cell; Unused : Boolean; It : Dictionary.Iterator; function SubstituteProtectedTypeSelfReference (Sym : Dictionary.Symbol) return Dictionary.Symbol --# global in Dictionary.Dict; --# in Prefix; is Result : Dictionary.Symbol; begin -- if Sym is the implicitly-declared own variable of a protected -- type then we must replace it with the "current instance of the -- protected object" -- -- Background: given protected type PT its operations will globally -- reference and derive PT meaning, in this case, "myself". -- If an object PO of type PT (or a subtype of PT) is declared then -- calls to its operations will take the form PO.Op and the calling -- environment will be annotated in terms of PO. Therefore, when -- checking that the globals necessary for the call PO.Op are -- visible (for example), we need to replace all references to PT -- by references to PO before making the check. The Prefix Symbol -- of the call is the symbol we need to substitute in. Result := Sym; if not Dictionary.Is_Null_Symbol (Prefix) and then Dictionary.IsOwnVariable (Sym) and then Dictionary.IsProtectedType (Dictionary.GetOwner (Sym)) then Result := Prefix; end if; return Result; end SubstituteProtectedTypeSelfReference; begin -- CopyInGlobals --# accept F, 10, Unused, "Unused here OK"; CalculateInsertPoint (VCGHeap, ExpnStack, ParamCount, -- to get StartPoint, Unused); --# end accept; It := Dictionary.FirstGlobalVariable (Abstraction, Sym); while not Dictionary.IsNullIterator (It) loop CreateReferenceCell (GlobalCell, VCGHeap, SubstituteProtectedTypeSelfReference (Dictionary.CurrentSymbol (It))); if Cells.Is_Null_Cell (RightPtr (VCGHeap, StartPoint)) then SetRightArgument (StartPoint, GlobalCell, VCGHeap); else StartPoint := RightPtr (VCGHeap, StartPoint); SetLeftArgument (StartPoint, GlobalCell, VCGHeap); end if; It := Dictionary.NextSymbol (It); end loop; --# accept F, 33, Unused, "Unused here OK"; end CopyInGlobals; ------------------------------------------ begin -- Setup_Function_Call Function_Symbol := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); -- The function call may have either an abstract or refined signature -- depending on where it is called. The refinement may be due to data -- refinement of an own variable or, a private data type refinement in -- which case only the pre and return annotations are refined. -- This abstraction relates to the global list and dependency relations. Abstraction := Dictionary.GetAbstraction (Function_Symbol, ThisScope); -- This abstraction relates to the pre and return annotations. ConstraintAbstraction := Dictionary.GetConstraintAbstraction (Function_Symbol, ThisScope); NumberOfParameters := Dictionary.GetNumberOfSubprogramParameters (Function_Symbol); NumberOfGlobals := Dictionary.GetNumberOfGlobalVariables (Abstraction, Function_Symbol); TotalArguments := NumberOfParameters + NumberOfGlobals; -- Create an empty data structure as described above to hold the actual -- parameters of the function call and any globals of the function and -- make it the right-hand argument of the function call cell at on -- the top of the ExpnStack. CreateEmptyList (TotalArguments, VCGHeap, ExpnStack); if NumberOfGlobals > 0 then -- The function has globals so copy them into the data structure. -- They will appear in the data structure after the function parameters. CopyInGlobals (NumberOfParameters, Function_Symbol, Abstraction); end if; -- If the function is parameterless then the function model has to be -- completed here on the down-loop because ProcessNameArgumentList will -- not be called on the up-loop to complete the function model. -- The function model is completed setting the Cell.Kind to a -- proof function and the Cell.SymbolValue to the implictly declared -- proof function corresponding to the concrete function. if NumberOfParameters = 0 then Cells.Set_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), Cell_Storage.Proof_Function); Proof_Function_Symbol := Dictionary.GetImplicitProofFunction (Abstraction, Function_Symbol); if DoRtc then -- If the function is parameterless the function model needs to be -- completed by inserting the precondition checks to the DAG -- and then adding an aasumption that the return value from the -- function is in-type. -- Only the proof functions appear in VCGs and so the function -- symbol is replaced by its proof function equivalent where all the -- globals copied in above appear as parameters of the function. -- Make a copy of the concrete function call from the -- ExpnStack because Assume_Function_Return_Annotation requires -- references to both the concrete and proof views of the of the -- function call Cells.Create_Cell (Heap => VCGHeap, CellName => Actual_Function_Call); Cells.Copy_Contents (Heap => VCGHeap, Source => CStacks.Top (VCGHeap, ExpnStack), Destination => Actual_Function_Call); -- Set the current top of ExpnStack to reference the implicitly -- declared proof function Cells.Set_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), Proof_Function_Symbol); Proof_Function_Call := CStacks.Top (VCGHeap, ExpnStack); -- Here Proof_Function_Call refers to the proof function "call" -- and a list of parameters (globals have been made parameters). -- Plant_Precondition_And_Parameter_Checks must be called with the -- Cell referencing the concrete function, i.e., the actual -- function call. Plant_Precondition_Check (Actual_Function_Call => Actual_Function_Call, Scope => Scope, Abstraction => ConstraintAbstraction); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); -- Assume_Function_Return_Type must be called with the -- Cell referencing the implicitly declared proof function, i.e., -- the implicit function. Assume_Function_Return_Type (Implicit_Function => Proof_Function_Call); -- Assume_Function_Return_Annotation requires both the concrete and -- proof views of the function call. Assume_Function_Return_Annotation (Actual_Function_Call => Actual_Function_Call, Proof_Function_Call => Proof_Function_Call, Scope => Scope, Abstraction => ConstraintAbstraction); Cells.Dispose_Of_Cell (Heap => VCGHeap, CellName => Actual_Function_Call); else -- Even if run time checks are not being generated the ExpnStack -- referencing the function still has to be updated to refer to the -- corresponding implictly declared proof function. Cells.Set_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), Proof_Function_Symbol); end if; end if; end Setup_Function_Call; ----------------------------------------------------------------------- procedure SetUpArrayAccess --# global in Dictionary.Dict; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# VCGHeap; is NumberOfDimensions : Positive; --ExpressionCell, DAGCell : Cells.Cell; TypeSym : Dictionary.Symbol; begin TypeSym := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); if Dictionary.IsTypeMark (TypeSym) then NumberOfDimensions := Dictionary.GetNumberOfDimensions (TypeSym); else NumberOfDimensions := Dictionary.GetNumberOfDimensions (Dictionary.GetType (TypeSym)); end if; CreateCellKind (DAGCell, VCGHeap, Cell_Storage.List_Function); CStacks.Push (VCGHeap, DAGCell, ExpnStack); CreateEmptyList (NumberOfDimensions, VCGHeap, ExpnStack); end SetUpArrayAccess; ----------------------------------------------------------------------- -- This procedure is called during the "up-loop" of BuildDAGExpnDAG for each -- positional argument encountered (either in a function call, -- array access, or type conversion). -- A function is identified by a Cell Kind of Pending_Function on the -- Expression stack. -- It populates the data structure for the arguments established during -- the down loop traversal -- (e.g., by Setup_Function_Call for function parameters). procedure Process_Positional_Argument_Association (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# STree.Table, --# VCGHeap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association; is ExpressionCell : Cells.Cell; TOSkind : Cells.Cell_Kind; ConversionTargetType, ConversionSourceType : Dictionary.Symbol; ConstraintCell : Cells.Cell; ConstraintIndex : Dictionary.Symbol; begin CStacks.PopOff (VCGHeap, ExpnStack, ExpressionCell); TOSkind := Cells.Get_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); if TOSkind = Cell_Storage.Pending_Function then if DoRtc then -- the wffs have provided the expected type. We extract that and use it to -- constraint check the parameter. If the function is an unchecked conversion -- and the wffs have determined that the subtype expected and given are identical, -- then no type symbol is planted and no check is generated. CheckConstraintRunTimeError (STree.NodeSymbol (Node), ExpressionCell, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; -- We may need to convert the actual parameter by inserting some inherit -- derefences in front of it; conversion is required if we have called -- an inherited root function. The parameter in this case must be an -- object. ConvertTaggedActualIfNecessary (Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)), VCGHeap, ExpressionCell); -- function symbol -- If the formal parameter is unconstrained and the actual is a constrained subtype, then -- the wffs will have planted a constraining type at the expression node. If we find such a -- type, we link it into the actual parameter expression DAG but using the expression's -- auxialliary (C) ptr. Linking it in this way means that it is not part of the DAG itself and won't -- be printed; however, it will be available when we want to substitute actuals for formals in -- any check of the called function's precondition. ConstraintIndex := STree.NodeSymbol (STree.Expression_From_Positional_Argument_Association (Node => Node)); if not Dictionary.Is_Null_Symbol (ConstraintIndex) then CreateCellKind (ConstraintCell, VCGHeap, Cell_Storage.Constraining_Index); Cells.Set_Symbol_Value (VCGHeap, ConstraintCell, ConstraintIndex); SetAuxPtr (ExpressionCell, ConstraintCell, VCGHeap); end if; InsertParameterInNextFreeSlot (CStacks.Top (VCGHeap, ExpnStack), ExpressionCell, VCGHeap); elsif TOSkind = Cell_Storage.List_Function then if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (Node), ExpressionCell, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; InsertParameterInNextFreeSlot (CStacks.Top (VCGHeap, ExpnStack), ExpressionCell, VCGHeap); elsif TOSkind = Cell_Storage.Fixed_Var then ConversionSourceType := STree.NodeSymbol (Node); ConversionTargetType := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); CStacks.Pop (VCGHeap, ExpnStack); -- get rid of type mark CStacks.Push (VCGHeap, ExpressionCell, ExpnStack); -- restore expression if IsRealType (ConversionSourceType) and then (IsIntegerType (ConversionTargetType) or else IsModularType (ConversionTargetType)) then PushFunction (Cell_Storage.Trunc_Function, VCGHeap, ExpnStack); end if; if DoRtc then if IsScalarType (ConversionTargetType) then CheckConstraintRunTimeError (ConversionTargetType, CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; end if; else -- must be dealing with first indexed expression of array access if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (Node), ExpressionCell, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; SetUpArrayAccess; InsertParameterInNextFreeSlot (CStacks.Top (VCGHeap, ExpnStack), ExpressionCell, VCGHeap); end if; end Process_Positional_Argument_Association; ----------------------------------------------------------------------- -- This procedure is called during the "up-loop" of BuildDAGExpnDAG for each -- named argument encountered in a function call. -- It populates the data structure for the arguments (actual parameters) -- established during the down loop traversal by Setup_Function_Call. procedure Process_Named_Argument_Association (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in LexTokenManager.State; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# Statistics.TableUsage from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ExpnStack from *, --# VCGHeap & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# STree.Table, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# LexTokenManager.State, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association; is InsertPoint, ExpressionCell : Cells.Cell; FunctionSym : Dictionary.Symbol; ParamPos : Positive; LastOne : Boolean; ConstraintCell : Cells.Cell; ConstraintIndex : Dictionary.Symbol; ---------------------------------------------------------------- function FindIdentifier (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; is IdentNode : STree.SyntaxNode; begin if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Node)) = SP_Symbols.simple_name then IdentNode := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)); else IdentNode := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))); end if; return IdentNode; end FindIdentifier; -------------------------------------------------------------- procedure GetParamNumber (Name : in LexTokenManager.Lex_String; FunSym : in Dictionary.Symbol; ParamNo : out Positive) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# derives ParamNo from Dictionary.Dict, --# FunSym, --# LexTokenManager.State, --# Name; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; begin It := Dictionary.FirstSubprogramParameter (FunSym); SystemErrors.RT_Assert (C => not Dictionary.IsNullIterator (It), Sys_Err => SystemErrors.Precondition_Failure, Msg => "Can't find first subprogram parameter in BuildExpnDAG.GetParamNumber"); loop Sym := Dictionary.CurrentSymbol (It); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Sym), Lex_Str2 => Name) = LexTokenManager.Str_Eq; It := Dictionary.NextSymbol (It); exit when Dictionary.IsNullIterator (It); end loop; ParamNo := Dictionary.GetSubprogramParameterNumber (Sym); end GetParamNumber; begin -- Process_Named_Argument_Association --we must be dealing with a function call CStacks.PopOff (VCGHeap, ExpnStack, ExpressionCell); if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (Node), ExpressionCell, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; FunctionSym := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); -- We may need to convert the actual parameter by inserting some inherit -- derefences in front of it; conversion is required if we have called -- an inherited root function. The parameter in this case must be an -- object. ConvertTaggedActualIfNecessary (FunctionSym, VCGHeap, ExpressionCell); -- If the formal parameter is unconstrained and the actual is a constrained subtype, then -- the wffs will have planted a constraining type at the expression node. If we find such a -- type, we link it into the actual parameter expression DAG but using the expression's -- auxialliary (C) ptr. Linking it in this way means that it is not part of the DAG itself and won't -- be printed; however, it will be available when we want to substitute actuals for formals in -- any check of the called function's precondition. ConstraintIndex := STree.NodeSymbol (STree.Expression_From_Named_Argument_Association (Node => Node)); if not Dictionary.Is_Null_Symbol (ConstraintIndex) then CreateCellKind (ConstraintCell, VCGHeap, Cell_Storage.Constraining_Index); Cells.Set_Symbol_Value (VCGHeap, ConstraintCell, ConstraintIndex); SetAuxPtr (ExpressionCell, ConstraintCell, VCGHeap); end if; GetParamNumber (STree.Node_Lex_String (Node => FindIdentifier (Node)), FunctionSym, -- to get ParamPos); CalculateInsertPoint (VCGHeap, ExpnStack, ParamPos, -- to get InsertPoint, LastOne); if LastOne then SetRightArgument (InsertPoint, ExpressionCell, VCGHeap); else SetLeftArgument (InsertPoint, ExpressionCell, VCGHeap); end if; end Process_Named_Argument_Association; ----------------------------------------------------------------------- -- This procedure is called during the "up-loop" of BuildDAGExpnDAG once -- all of the arguments (with positional or named association) have been -- processed by ProcessPositionalArgumentAssocaition or -- Process_Named_Argument_Association respectively. -- A function is identified by a Cell Kind of Pending_Function on the -- Expression stack and an array aggregate a Cell Kind of List_Function. -- It completes the model of a function call or an array access started -- on the down loop by Setup_Function_Call or SetUpArrayAccess. -- A function call model is completed by: -- planting a check corresponding to its precondition -- changing the function symbol to its implicit proof function equivalent -- (this is done to replace global variables by parameters as is -- required for FDL functions) -- planting assumptions that all its parameters are in type after the call -- planting an assumption that the result of the function is in-type. -- An array access model is completed by: -- translating the array access to an FDL element function -- associating the index type with the array access for potential use with -- translating unconstrained array attributes. procedure ProcessNameArgumentList --# global in AssumeRvalues; --# in CommandLineData.Content; --# in DoRtc; --# in ExpnScope; --# in LineNmbr; --# in LoopStack; --# in ReferencedVars; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out ExpnStack; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# KindOfStackedCheck from *, --# AssumeRvalues, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnScope, --# ExpnStack, --# FlowHeap, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# ReferencedVars, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# AssumeRvalues, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnScope, --# ExpnStack, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# ReferencedVars, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from AssumeRvalues, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ErrorHandler.Error_Context, --# ExpnScope, --# ExpnStack, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# ReferencedVars, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ExpnStack from *, --# VCGHeap & --# FlowHeap from *, --# AssumeRvalues, --# DoRtc, --# ExpnStack, --# ReferencedVars, --# VCGHeap; is TOSkind : Cells.Cell_Kind; Actual_Function_Call : Cells.Cell; Proof_Function_Call : Cells.Cell; Temp : Cells.Cell; TypeSym : Dictionary.Symbol; FunctionSym : Dictionary.Symbol; Abstraction : Dictionary.Abstractions; ConstraintAbstraction : Dictionary.Abstractions; begin TOSkind := Cells.Get_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); if TOSkind = Cell_Storage.Pending_Function then Cells.Set_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), Cell_Storage.Proof_Function); -- Make a copy of the concrete function call from the -- ExpnStack because Assume_Function_Return_Annotation requires -- references to both the concrete and proof views of the of the -- function call Cells.Create_Cell (Heap => VCGHeap, CellName => Actual_Function_Call); Cells.Copy_Contents (Heap => VCGHeap, Source => CStacks.Top (VCGHeap, ExpnStack), Destination => Actual_Function_Call); FunctionSym := Cells.Get_Symbol_Value (VCGHeap, Actual_Function_Call); -- We need to know whether we are constructing the function call model using the -- abstract or refined signature for it. Note that we need to consider this separately -- for flow annotations and proof annotations; this is because SEPR 1694 introduced the -- use of a second, refined constraint in the case of subprograms that manipulate a private -- type. In such cases, weher there is no own variable refinement, we may use the asbtract flow -- annotation and the refined proof annotation - so two abstractions are invovled, thus: Abstraction := Dictionary.GetAbstraction (FunctionSym, ExpnScope); ConstraintAbstraction := Dictionary.GetConstraintAbstraction (FunctionSym, ExpnScope); -- The function call model is completed by changing the function view -- from the concrete to the implicitly declared proof function -- equivalent Cells.Set_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), Dictionary.GetImplicitProofFunction (Abstraction, FunctionSym)); Proof_Function_Call := CStacks.Top (VCGHeap, ExpnStack); -- Here Proof_Function_Call refers to the proof function "call" -- and a list of parameters (globals have been made parameters). if DoRtc then Plant_Precondition_Check (Actual_Function_Call => Actual_Function_Call, Scope => Scope, Abstraction => ConstraintAbstraction); -- Before unstacking any RTCs associated with the now complete -- function call, we plant hypotheses to assume that all R-values -- (including the function's parameters are in type. CheckPlantRvalueAssumptions; -- then unstack checks UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); -- Assume_Function_Return_Type must be called with the -- Cell referencing the implicitly declared proof function, i.e., -- the implicit function. Assume_Function_Return_Type (Implicit_Function => Proof_Function_Call); -- Assume_Function_Return_Annotation requires both the concrete and -- proof views of the function call. Assume_Function_Return_Annotation (Actual_Function_Call => Actual_Function_Call, Proof_Function_Call => Proof_Function_Call, Scope => Scope, Abstraction => ConstraintAbstraction); end if; Cells.Dispose_Of_Cell (Heap => VCGHeap, CellName => Actual_Function_Call); elsif TOSkind = Cell_Storage.List_Function then -- complete element model and store type so far in case of further -- indexing (to handle array of arrays case) CStacks.PopOff (VCGHeap, ExpnStack, Temp); TypeSym := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); if Dictionary.IsTypeMark (TypeSym) then TypeSym := Dictionary.GetArrayComponent (TypeSym); else TypeSym := Dictionary.GetArrayComponent (Dictionary.GetType (TypeSym)); end if; CStacks.Push (VCGHeap, Temp, ExpnStack); PushOperator (Binary, SP_Symbols.comma, VCGHeap, ExpnStack); PushFunction (Cell_Storage.Element_Function, VCGHeap, ExpnStack); -- Note the TypeSym of the array component here. This used later on -- in BuildGraph.ModelProcedureCall to get the type of an array element -- actual parameter Cells.Set_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), TypeSym); -- elsif TOSkind = Cell_Storage.Fixed_Var -- then -- null; --type conversions not done yet end if; end ProcessNameArgumentList; ----------------------------------------------------------------------- procedure ModelQualifiedExpression (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap & --# ExpnStack from *, --# Node, --# STree.Table, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table; is ExpnCell, TypeMarkCell : Cells.Cell; begin if STree.Syntax_Node_Type (Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))) = SP_Symbols.expression then -- discard type indication and return its argument to top of stack; CStacks.PopOff (VCGHeap, ExpnStack, ExpnCell); -- the topmost stack cell contains the typemark; CStacks.PopOff (VCGHeap, ExpnStack, TypeMarkCell); CStacks.Push (VCGHeap, ExpnCell, ExpnStack); if DoRtc then CheckConstraintRunTimeError (Cells.Get_Symbol_Value (VCGHeap, TypeMarkCell), ExpnCell, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; end if; end ModelQualifiedExpression; ---------------------------------------------------------------------- procedure UpProcessAggregateChoice (Node : in STree.SyntaxNode) --# global in STree.Table; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap; is RangeNodeType : SP_Symbols.SP_Symbol; RangeExpression : Cells.Cell; begin RangeNodeType := STree.Syntax_Node_Type (Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))); if RangeNodeType = SP_Symbols.simple_expression then PushOperator (Binary, SP_Symbols.double_dot, VCGHeap, ExpnStack); elsif RangeNodeType = SP_Symbols.range_constraint then TransformRangeConstraint (VCGHeap, ExpnStack); CStacks.PopOff (VCGHeap, ExpnStack, RangeExpression); CStacks.Pop (VCGHeap, ExpnStack); -- discard type mark part of range CStacks.Push (VCGHeap, RangeExpression, ExpnStack); elsif Cells.Get_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)) = Cell_Storage.Fixed_Var then -- type mark found TransformTypeName (VCGHeap, ExpnStack); end if; end UpProcessAggregateChoice; --------------------------------------------------------------------- procedure UpProcessNamedAssociationRep (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# STree.Table, --# VCGHeap; is AggExp : Cells.Cell; begin -- check components of array & record aggregates using named assoc if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; PushOperator (Binary, SP_Symbols.becomes, VCGHeap, ExpnStack); if DoingArrayAggregate (VCGHeap, ExpnStack) then if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Node)) = SP_Symbols.named_association_rep then PushOperator (Binary, SP_Symbols.comma, VCGHeap, ExpnStack); end if; else -- record CStacks.PopOff (VCGHeap, ExpnStack, AggExp); InsertAssociation (CStacks.Top (VCGHeap, ExpnStack), AggExp, VCGHeap); end if; end UpProcessNamedAssociationRep; --------------------------------------------------------------------- procedure UpProcessNamedRecordComponentAssociation (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# STree.Table, --# VCGHeap; is AggExp : Cells.Cell; begin -- Node is named_record_component_association -- Direction is UP -- TOS is expression to be associated -- 2nd TOS is field name -- 3rd TOS is incomplete aggregate being constructed. -- check components of array & record aggregates using named assoc if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; -- associated field name with expression PushOperator (Binary, SP_Symbols.becomes, VCGHeap, ExpnStack); CStacks.PopOff (VCGHeap, ExpnStack, AggExp); InsertAssociation (CStacks.Top (VCGHeap, ExpnStack), AggExp, VCGHeap); end UpProcessNamedRecordComponentAssociation; --------------------------------------------------------------------- procedure UpProcessPositionalRecordComponentAssociation (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# STree.Table, --# VCGHeap; is AggExp, TypeCell : Cells.Cell; begin -- Node is positional_record_component_association -- Direction is UP -- TOS is expression to be associated -- 2nd TOS is incomplete aggregate being constructed. -- 3rd TOS is agggregate counter giving current field number -- check components for RTCs if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; CreateFixedVarCell (TypeCell, VCGHeap, Dictionary.GetRecordComponent (AggregateType (VCGHeap, ExpnStack), CurrentFieldOrIndex (VCGHeap, ExpnStack))); CStacks.Push (VCGHeap, TypeCell, ExpnStack); SwitchAndPush (SP_Symbols.becomes, VCGHeap, ExpnStack); IncCurrentFieldOrIndex (ExpnStack, VCGHeap); CStacks.PopOff (VCGHeap, ExpnStack, AggExp); InsertAssociation (CStacks.Top (VCGHeap, ExpnStack), AggExp, VCGHeap); end UpProcessPositionalRecordComponentAssociation; --------------------------------------------------------------------- procedure UpProcessAggregateOrExpression (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# LexTokenManager.State from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# LexTokenManager.State, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# STree.Table, --# VCGHeap; is IndexType : Dictionary.Symbol; CounterCell, AttribCell, TypeCell : Cells.Cell; CounterString : LexTokenManager.Lex_String; AggExp : Cells.Cell; begin --UpProcessAggregateOrExpression if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.positional_association_rep or else STree.Next_Sibling (Current_Node => Node) /= STree.NullNode then -- check components of array & record aggregates using -- positional association, but not the others part if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; if DoingArrayAggregate (VCGHeap, ExpnStack) then CreateCellKind (TypeCell, VCGHeap, Cell_Storage.Fixed_Var); IndexType := Dictionary.GetArrayIndex (AggregateType (VCGHeap, ExpnStack), 1); Cells.Set_Symbol_Value (VCGHeap, TypeCell, IndexType); CStacks.Push (VCGHeap, TypeCell, ExpnStack); CreateAttribValueCell (AttribCell, VCGHeap, LexTokenManager.First_Token); CStacks.Push (VCGHeap, AttribCell, ExpnStack); PushOperator (Binary, SP_Symbols.apostrophe, VCGHeap, ExpnStack); if Dictionary.TypeIsEnumeration (IndexType) then for I in Integer range 2 .. CurrentFieldOrIndex (VCGHeap, ExpnStack) loop --# accept F, 41, "Stable expression expected here"; if Dictionary.TypeIsBoolean (IndexType) then PushOperator (Unary, SP_Symbols.RWnot, VCGHeap, ExpnStack); else PushFunction (Cell_Storage.Succ_Function, VCGHeap, ExpnStack); end if; --# end accept; end loop; else --index type is numeric discrete if CurrentFieldOrIndex (VCGHeap, ExpnStack) > 1 then LexTokenManager.Insert_Nat (N => CurrentFieldOrIndex (VCGHeap, ExpnStack) - 1, Lex_Str => CounterString); CreateManifestConstCell (CounterCell, VCGHeap, CounterString); CStacks.Push (VCGHeap, CounterCell, ExpnStack); PushOperator (Binary, SP_Symbols.plus, VCGHeap, ExpnStack); end if; end if; PushFunction (Cell_Storage.List_Function, VCGHeap, ExpnStack); else --record aggregate CreateFixedVarCell (TypeCell, VCGHeap, Dictionary.GetRecordComponent (AggregateType (VCGHeap, ExpnStack), CurrentFieldOrIndex (VCGHeap, ExpnStack))); CStacks.Push (VCGHeap, TypeCell, ExpnStack); end if; SwitchAndPush (SP_Symbols.becomes, VCGHeap, ExpnStack); IncCurrentFieldOrIndex (ExpnStack, VCGHeap); if DoingArrayAggregate (VCGHeap, ExpnStack) then if STree.Next_Sibling (Current_Node => Node) = STree.NullNode then PushOperator (Binary, SP_Symbols.comma, VCGHeap, ExpnStack); end if; else -- record CStacks.PopOff (VCGHeap, ExpnStack, AggExp); InsertAssociation (CStacks.Top (VCGHeap, ExpnStack), AggExp, VCGHeap); end if; end if; end UpProcessAggregateOrExpression; --------------------------------------------------------------------- procedure UpProcessComponentAssociation (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# STree.Table, --# VCGHeap; is begin if STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node))) /= STree.NullNode then -- check the others part of an array aggregate with either -- named or positional association; but does not cover a -- lone others part. if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; SwitchAndPush (SP_Symbols.comma, VCGHeap, ExpnStack); end if; end UpProcessComponentAssociation; --------------------------------------------------------------------- procedure UpProcessAggregate --# global in Dictionary.Dict; --# in DoRtc; --# in Node; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# STree.Table, --# VCGHeap; is TempAgg : Cells.Cell; begin --RTC--a lone others clause gets picked up here and will need a RTC. -- In this case TOS is the expn dag of the others expression and -- all that happens is that a mk_array get put on top of it so -- that (others => X) becomes mk_array (X). The expected type -- will be found at the component_association node (1 down) and -- the check is only needed if the node 2 down is an agg_or_exp. --RTC--added if statement to control whether RTC on lone others needed if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node))) = SP_Symbols.aggregate_or_expression then --there is a lone others clause that needs a RTC and the expn is TOS if DoRtc then CheckConstraintRunTimeError (STree.NodeSymbol (STree.Child_Node (Current_Node => Node)), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; end if; --RTC--end of new if clause -- Tidy up expression stack -- At this point the stack is rather confused (even for an ex-FORTH programmer. -- If we are doing a record then TOS is the IncompleteAggregate function and its arguments, -- 2nd TOS is the aggregate counter used for positional association. -- -- If we are doing an array then TOS is the comma-delimited list of arguments to the MkAggregate func, -- 2nd TOS is the IncompleteAggregate function itself, -- 3rd TOS is the aggregate counter -- CStacks.PopOff (VCGHeap, ExpnStack, TempAgg); -- hold the aggregate expression or list if Cells.Get_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)) = Cell_Storage.Aggregate_Counter then -- we are doing a record and just need to get rid of the counter CStacks.Pop (VCGHeap, ExpnStack); -- get rid of counter else -- we are doing an array and TOS is the Incomplete function which needs to be connected to -- the comma-delimited list SetRightArgument (CStacks.Top (VCGHeap, ExpnStack), TempAgg, VCGHeap); -- hold the now complete aggregate expression and then get rid of the exposed counter CStacks.PopOff (VCGHeap, ExpnStack, TempAgg); CStacks.Pop (VCGHeap, ExpnStack); end if; -- Convert aggregate to a finished MkAggregate function Cells.Set_Kind (VCGHeap, TempAgg, Cell_Storage.Mk_Aggregate); -- Finally, restore aggregate DAG to TOS CStacks.Push (VCGHeap, TempAgg, ExpnStack); end UpProcessAggregate; --------------------------------------------------------------------- -- Attribute Processing -- --------------------------------------------------------------------- procedure DownProcessAttributeIdent (Node : in STree.SyntaxNode) --# global in STree.Table; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap; is DAGCell : Cells.Cell; begin CreateAttribValueCell (DAGCell, VCGHeap, STree.Node_Lex_String (Node => Node)); CStacks.Push (VCGHeap, DAGCell, ExpnStack); PushOperator (Binary, SP_Symbols.apostrophe, VCGHeap, ExpnStack); end DownProcessAttributeIdent; --------------------------------------------------------------------- -- Note name here is different from similar subunit within BuildAnnotationExpnDAG -- do avoid clash with Ada83 "no identical subunit names" rule. procedure UpAttributeDesignator (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in STree.Table; --# in out CheckStack; --# in out ExpnStack; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# LexTokenManager.State, --# Node, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# LexTokenManager.State from *, --# Dictionary.Dict, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap & --# ShortCircuitStack from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# LexTokenManager.State, --# Node, --# STree.Table, --# VCGHeap; is separate; --------------------------------------------------------------------- -- Identifier and Selected Components -- --------------------------------------------------------------------- -- Called whenever an identifer is encountered in the expression -- An identifier appears in many places in the grammar. It is paricularly -- interesting if it is a variable, a type mark or a function call. -- If it is a function call the procedure Setup_Function_Call establishes -- the environment to represent the function arguments and ProcessIdentifier -- creates a Cell to represent the function on the Expression Stack with a -- Cell Kind of Pending_Function. procedure ProcessIdentifier (Node : in STree.SyntaxNode; ThisScope : in Dictionary.Scopes) --# global in AssumeRvalues; --# in CommandLineData.Content; --# in DoRtc; --# in LineNmbr; --# in LoopStack; --# in ReferencedVars; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out ExpnStack; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# KindOfStackedCheck from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# ShortCircuitStack, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ErrorHandler.Error_Context, --# ExpnStack, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ExpnStack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# ThisScope, --# VCGHeap & --# FlowHeap from *, --# AssumeRvalues, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# ReferencedVars, --# Scope, --# STree.Table, --# ThisScope & --# Statistics.TableUsage from *, --# AssumeRvalues, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# ReferencedVars, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap; is Sym : Dictionary.Symbol; DAGCell : Cells.Cell; begin Sym := Dictionary.LookupItem (Name => STree.Node_Lex_String (Node => Node), Scope => ThisScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- If we call an inherited root function then the above call will fail -- to find it and returns a null symbol. In this case we can check the -- syntax tree for the symbol of the root operation that will have been -- planted by StackIdentifier. if Dictionary.Is_Null_Symbol (Sym) then Sym := STree.NodeSymbol (Node); end if; Cells.Create_Cell (VCGHeap, DAGCell); if Dictionary.Is_Variable (Sym) then -- each time we find a referenced variable we add it to the set of referenced vars AddRvalueSymbol (FlowHeap, ReferencedVars, Sym); Cells.Set_Kind (VCGHeap, DAGCell, Cell_Storage.Reference); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); CStacks.Push (VCGHeap, DAGCell, ExpnStack); elsif Dictionary.IsFunction (Sym) then Cells.Set_Kind (VCGHeap, DAGCell, Cell_Storage.Pending_Function); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); --LexTokenManager.InsertNat (1, LexStr); --Cells.Set_Lex_Str (VCGHeap, DAGCell, LexStr); CStacks.Push (VCGHeap, DAGCell, ExpnStack); Setup_Function_Call (ThisScope => ThisScope, Prefix => Dictionary.NullSymbol); elsif Dictionary.IsTypeMark (Sym) then -- If the identifier denotes a record subtype, then push its -- root type for subsequent VCG modelling... if Dictionary.TypeIsRecord (Sym) and then Dictionary.IsSubtype (Sym) then Sym := Dictionary.GetRootType (Sym); end if; Cells.Set_Kind (VCGHeap, DAGCell, Cell_Storage.Fixed_Var); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); CStacks.Push (VCGHeap, DAGCell, ExpnStack); else -- also check to see whether we need Rvalue for constants (see AddRValueSymbol for which sort) AddRvalueSymbol (FlowHeap, ReferencedVars, Sym); Cells.Set_Kind (VCGHeap, DAGCell, Cell_Storage.Named_Const); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); CStacks.Push (VCGHeap, DAGCell, ExpnStack); end if; end ProcessIdentifier; ------------------------------------------------------------------- procedure ModelRecordComponent (RecordType, Sym : in Dictionary.Symbol) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# LexTokenManager.State, --# RecordType, --# Sym, --# VCGHeap; is DAGCell : Cells.Cell; ExpnCell : Cells.Cell; begin CStacks.PopOff (VCGHeap, ExpnStack, ExpnCell); -- ExpnCell is a DAG representing an expression which is a record field -- Insert one or more "fld_inherit (" before the expression ModelInheritedFieldsOfTaggedRecord (Dictionary.GetSimpleName (Sym), RecordType, VCGHeap, ExpnCell); -- Then prefix it with fld_? ( CreateCellKind (DAGCell, VCGHeap, Cell_Storage.Field_Access_Function); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); Cells.Set_Lex_Str (VCGHeap, DAGCell, Dictionary.GetSimpleName (Sym)); --SetRightArgument (DAGCell, CStacks.Top (VCGHeap, ExpnStack)); --CStacks.Pop (VCGHeap, ExpnStack); SetRightArgument (DAGCell, ExpnCell, VCGHeap); CStacks.Push (VCGHeap, DAGCell, ExpnStack); end ModelRecordComponent; ------------------------------------------------------------------ -- Called whenever a selected_component is encountered in the expression -- A selected_component appears in many places in the grammar. -- This procedure deals with each of the places in which it may appear. -- If it is a function call the procedure Setup_Function_Call establishes -- the environment to represent the function arguments and -- ProcessSelectedComponent creates a Cell to represent the function on the -- Expression Stack with a Cell Kind of Pending_Function. procedure ProcessSelectedComponent (Node : in STree.SyntaxNode; ThisScope : in Dictionary.Scopes) --# global in AssumeRvalues; --# in CommandLineData.Content; --# in DoRtc; --# in LineNmbr; --# in LoopStack; --# in ReferencedVars; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out ExpnStack; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# KindOfStackedCheck from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ContainsReals, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# ShortCircuitStack, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ErrorHandler.Error_Context, --# ExpnStack, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap & --# ExpnStack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# ThisScope, --# VCGHeap & --# FlowHeap from *, --# AssumeRvalues, --# CommandLineData.Content, --# Dictionary.Dict, --# ExpnStack, --# LexTokenManager.State, --# Node, --# ReferencedVars, --# Scope, --# STree.Table, --# ThisScope, --# VCGHeap & --# Statistics.TableUsage from *, --# AssumeRvalues, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Node, --# ReferencedVars, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# ThisScope, --# VCGHeap; is DAGCell : Cells.Cell; Sym : Dictionary.Symbol; IdentNode : STree.SyntaxNode; Prefix : Dictionary.Symbol; begin DAGCell := CStacks.Top (VCGHeap, ExpnStack); IdentNode := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)))); Prefix := GetTOStype (VCGHeap, ExpnStack); Sym := Dictionary.LookupSelectedItem (Prefix, --GetTOStype, STree.Node_Lex_String (Node => IdentNode), ThisScope, Dictionary.ProgramContext); -- If we call an inherited root function then the above call will fail -- to find it and returns a null symbol. In this case we can check the -- syntax tree for the symbol of the root operation that will have been -- planted by StackIdentifier. if Dictionary.Is_Null_Symbol (Sym) then Sym := STree.NodeSymbol (Node); end if; if Dictionary.IsRecordComponent (Sym) then ModelRecordComponent (Prefix, Sym); elsif Dictionary.Is_Variable (Sym) then -- each time we find a referenced variable we add it to the set of referenced vars AddRvalueSymbol (FlowHeap, ReferencedVars, Sym); Cells.Set_Kind (VCGHeap, DAGCell, Cell_Storage.Reference); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); elsif Dictionary.IsFunction (Sym) then -- Before processing function, find the actual prefix symbol used Prefix := Cells.Get_Symbol_Value (VCGHeap, DAGCell); -- if Prefix is a protected object then we are handling a fucntion call -- of the form PO.F. In this case we need to pass PO to Setup_Function_Call -- so that it can replace globals of F expressed in terms of the type PT -- with the object PO if not (Dictionary.IsOwnVariable (Prefix) and then Dictionary.GetOwnVariableProtected (Prefix)) then -- Prefix is NOT a protected object so we set it to null so that -- Setup_Function_Call won't do any subtitutions. If it is a PO -- we leave it alone and it gets passed to Setup_Function_Call Prefix := Dictionary.NullSymbol; end if; -- now replace top of stack with the function Cells.Set_Kind (VCGHeap, DAGCell, Cell_Storage.Pending_Function); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); -- set up the paramater list (and copy in the globals) Setup_Function_Call (ThisScope => ThisScope, Prefix => Prefix); elsif Dictionary.IsTypeMark (Sym) then Cells.Set_Kind (VCGHeap, DAGCell, Cell_Storage.Fixed_Var); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); elsif Dictionary.IsPackage (Sym) then -- replace package symbolwith the child ready for next lookup Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); else -- check to see whether we need Rvalue for constants (see AddRValueSymbol for which sort) AddRvalueSymbol (FlowHeap, ReferencedVars, Sym); Cells.Set_Kind (VCGHeap, DAGCell, Cell_Storage.Named_Const); Cells.Set_Symbol_Value (VCGHeap, DAGCell, Sym); end if; end ProcessSelectedComponent; --------------------------------------------------------------------- -- Expressions -- --------------------------------------------------------------------- -- procedure to model XOR iaw B manual para 3.1.5 procedure ModelXorOperator --# global in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# VCGHeap; is DAGCell, Left, Right : Cells.Cell; begin CStacks.PopOff (VCGHeap, ExpnStack, Right); CStacks.PopOff (VCGHeap, ExpnStack, Left); CreateOpCell (DAGCell, VCGHeap, SP_Symbols.RWor); SetRightArgument (DAGCell, Right, VCGHeap); SetLeftArgument (DAGCell, Left, VCGHeap); CStacks.Push (VCGHeap, DAGCell, ExpnStack); CreateOpCell (DAGCell, VCGHeap, SP_Symbols.RWand); SetRightArgument (DAGCell, Right, VCGHeap); SetLeftArgument (DAGCell, Left, VCGHeap); CStacks.Push (VCGHeap, DAGCell, ExpnStack); PushOperator (Unary, SP_Symbols.RWnot, VCGHeap, ExpnStack); PushOperator (Binary, SP_Symbols.RWand, VCGHeap, ExpnStack); end ModelXorOperator; ------------------------------------------------------------- procedure ProcessExpression (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in STree.Table; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap; is Op_Node : STree.SyntaxNode; Operator : SP_Symbols.SP_Symbol; ResultType : Dictionary.Symbol; procedure ModelBitwiseOperation (Operator : in SP_Symbols.SP_Symbol; TypeSym : in Dictionary.Symbol) --# global in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# Operator, --# TypeSym, --# VCGHeap; is BoolOpCell : Cells.Cell; begin -- ModelBitwiseOperation CreateBoolOpCell (BoolOpCell, VCGHeap, TypeSym, Operator); -- on the stack are the arguments we want for this new function. PushOperator (Binary, SP_Symbols.comma, VCGHeap, ExpnStack); -- tos now has comma cell joining the two arguments SetRightArgument (BoolOpCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, BoolOpCell, ExpnStack); -- modelling function is now on TOS end ModelBitwiseOperation; --------------------------------- begin -- ProcessExpression Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); if Op_Node /= STree.NullNode then Operator := STree.Syntax_Node_Type (Node => Op_Node); -- check to see if result type is an array and -- build special model if it is ResultType := STree.NodeSymbol (Op_Node); if Dictionary.IsTypeMark (ResultType) and then Dictionary.TypeIsArray (ResultType) then -- must be a Boolean array operation ModelBitwiseOperation (Operator, ResultType); elsif IsModularBitwiseOp (Operator, ResultType) then ModelBitwiseOperation (Operator, ResultType); else -- procede as before for scalar bool ops if Operator = SP_Symbols.RWxor then ModelXorOperator; elsif Operator = SP_Symbols.RWandthen or Operator = SP_Symbols.RWorelse then -- do nothing for AndThen's and OrElse's here as they have -- already been left-associated in ProcessRelation null; else PushOperator (Binary, Operator, VCGHeap, ExpnStack); end if; end if; end if; end ProcessExpression; ------------------------------------------------------------------- procedure ModelInClause (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in STree.Table; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap; is LeftSideOfRange, RightSideOfRange, TypeMarkCell, AttribCell : Cells.Cell; RelOperationLHS, RelOperationRHS, MiddleOperator : SP_Symbols.SP_Symbol; InOperatorNode, RangeNode : STree.SyntaxNode; type StaticResults is (IsTrue, IsFalse, IsUnknown); StaticResult : StaticResults; type MembershipKinds is (Inside, Outside); MembershipKind : MembershipKinds; procedure CheckIfResultStaticallyKnown --# global in Dictionary.Dict; --# in InOperatorNode; --# in STree.Table; --# out StaticResult; --# derives StaticResult from Dictionary.Dict, --# InOperatorNode, --# STree.Table; is Sym : Dictionary.Symbol; begin Sym := STree.NodeSymbol (InOperatorNode); if Dictionary.IsEnumerationLiteral (Sym) then if Dictionary.Enumeration_Literals_Are_Equal (Left_Symbol => Sym, Right_Symbol => Dictionary.GetTrue) then StaticResult := IsTrue; elsif Dictionary.Enumeration_Literals_Are_Equal (Left_Symbol => Sym, Right_Symbol => Dictionary.GetFalse) then StaticResult := IsFalse; else StaticResult := IsUnknown; end if; else StaticResult := IsUnknown; end if; end CheckIfResultStaticallyKnown; ---------------------- procedure ModelStaticallyKnownResult --# global in Dictionary.Dict; --# in StaticResult; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack from Dictionary.Dict, --# StaticResult, --# VCGHeap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# StaticResult, --# VCGHeap & --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# StaticResult; is StaticResultCell : Cells.Cell; begin -- ModelStaticallyKnownResult CreateCellKind (StaticResultCell, VCGHeap, Cell_Storage.Named_Const); if StaticResult = IsTrue then Cells.Set_Symbol_Value (VCGHeap, StaticResultCell, Dictionary.GetTrue); else Cells.Set_Symbol_Value (VCGHeap, StaticResultCell, Dictionary.GetFalse); end if; CStacks.Push (VCGHeap, StaticResultCell, ExpnStack); end ModelStaticallyKnownResult; ---------------------- procedure CompleteInequalityModel --# global in LeftSideOfRange; --# in MiddleOperator; --# in RelOperationLHS; --# in RelOperationRHS; --# in RightSideOfRange; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# LeftSideOfRange, --# MiddleOperator, --# RelOperationLHS, --# RelOperationRHS, --# RightSideOfRange, --# VCGHeap; is LeftOperand : Cells.Cell; begin -- CompleteInequalityModel CStacks.PopOff (VCGHeap, ExpnStack, LeftOperand); -- restore stack keeping copy of LeftOperand CStacks.Push (VCGHeap, LeftOperand, ExpnStack); CStacks.Push (VCGHeap, LeftSideOfRange, ExpnStack); PushOperator (Binary, RelOperationLHS, VCGHeap, ExpnStack); CStacks.Push (VCGHeap, LeftOperand, ExpnStack); CStacks.Push (VCGHeap, RightSideOfRange, ExpnStack); PushOperator (Binary, RelOperationRHS, VCGHeap, ExpnStack); -- form conjunction of the two range constraints; PushOperator (Binary, MiddleOperator, VCGHeap, ExpnStack); end CompleteInequalityModel; ---------------------- function IsBooleanMembership return Boolean --# global in Dictionary.Dict; --# in InOperatorNode; --# in STree.Table; is Sym : Dictionary.Symbol; begin Sym := STree.NodeSymbol (InOperatorNode); return Dictionary.IsType (Sym) and then Dictionary.TypeIsBoolean (Sym); end IsBooleanMembership; ---------------------- procedure CompleteBooleanModel --# global in LeftSideOfRange; --# in MembershipKind; --# in RightSideOfRange; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# LeftSideOfRange, --# MembershipKind, --# RightSideOfRange, --# VCGHeap; is LeftOperand : Cells.Cell; begin -- CompleteBooleanModel -- model: for X in L .. R create (X and R) or (not X and not L) -- negate entire model if operator is 'not in' rather than 'in' CStacks.PopOff (VCGHeap, ExpnStack, LeftOperand); -- create not L CStacks.Push (VCGHeap, LeftSideOfRange, ExpnStack); PushOperator (Unary, SP_Symbols.RWnot, VCGHeap, ExpnStack); -- create not X (using copy of X) CStacks.Push (VCGHeap, LeftOperand, ExpnStack); PushOperator (Unary, SP_Symbols.RWnot, VCGHeap, ExpnStack); -- conjoin PushOperator (Binary, SP_Symbols.RWand, VCGHeap, ExpnStack); -- create X and R CStacks.Push (VCGHeap, RightSideOfRange, ExpnStack); CStacks.Push (VCGHeap, LeftOperand, ExpnStack); PushOperator (Binary, SP_Symbols.RWand, VCGHeap, ExpnStack); -- disjoin above two subexpressions PushOperator (Binary, SP_Symbols.RWor, VCGHeap, ExpnStack); -- finally, if outside rather than inside then invert answer if MembershipKind = Outside then PushOperator (Unary, SP_Symbols.RWnot, VCGHeap, ExpnStack); end if; end CompleteBooleanModel; ---------------------- begin -- ModelInClause InOperatorNode := STree.Next_Sibling (Current_Node => Node); if STree.Syntax_Node_Type (Node => InOperatorNode) = SP_Symbols.inside then MembershipKind := Inside; RelOperationLHS := SP_Symbols.greater_or_equal; RelOperationRHS := SP_Symbols.less_or_equal; MiddleOperator := SP_Symbols.RWand; else MembershipKind := Outside; RelOperationLHS := SP_Symbols.less_than; RelOperationRHS := SP_Symbols.greater_than; MiddleOperator := SP_Symbols.RWor; end if; RangeNode := STree.Next_Sibling (Current_Node => InOperatorNode); if STree.Syntax_Node_Type (Node => RangeNode) = SP_Symbols.arange then -- set is defined by a range, held in stack; if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => RangeNode)) = SP_Symbols.attribute then -- range is defined by a range attribute on top of stack -- this has already been transformed by UpAttribute -- which has left Index'First .. Index'Last on stack LeftSideOfRange := LeftPtr (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); RightSideOfRange := RightPtr (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); CStacks.Pop (VCGHeap, ExpnStack); -- discard .. else -- range is defined by a pair of simple expressions; CStacks.PopOff (VCGHeap, ExpnStack, RightSideOfRange); CStacks.PopOff (VCGHeap, ExpnStack, LeftSideOfRange); end if; if IsBooleanMembership then CompleteBooleanModel; else CompleteInequalityModel; end if; else -- range is defined by a typemark on top of stack; -- form the right operands from this typemark, using FIRST and LAST; CheckIfResultStaticallyKnown; -- it will be static if type is non-scalar CStacks.PopOff (VCGHeap, ExpnStack, TypeMarkCell); if StaticResult = IsUnknown then -- not known so build attribute range from typemark CreateCellKind (AttribCell, VCGHeap, Cell_Storage.Attrib_Value); CreateOpCell (LeftSideOfRange, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (LeftSideOfRange, TypeMarkCell, VCGHeap); SetRightArgument (LeftSideOfRange, AttribCell, VCGHeap); Structures.CopyStructure (VCGHeap, LeftSideOfRange, RightSideOfRange); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, LeftSideOfRange), LexTokenManager.First_Token); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, RightSideOfRange), LexTokenManager.Last_Token); CompleteInequalityModel; else -- it is known get rid of expression from TOS CStacks.Pop (VCGHeap, ExpnStack); -- put True or False literal cell on stack ModelStaticallyKnownResult; end if; end if; end ModelInClause; ------------------------------------------------------------------------ procedure DownProcessRelation (Node : in STree.SyntaxNode) --# global in DoRtc; --# in ExpnStack; --# in STree.Table; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ShortCircuitStack, --# Statistics.TableUsage from *, --# DoRtc, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap & --# VCGHeap from *, --# DoRtc, --# ExpnStack, --# Node, --# ShortCircuitStack, --# STree.Table; is NotLeftHand, LeftHand : Cells.Cell; begin if DoRtc and then STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.expression_rep2 then -- take a copy of the top of ExpnStack which is the LHS of -- the andthen; and push it on ShortCircuitStack Structures.CopyStructure (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), LeftHand); CStacks.Push (VCGHeap, LeftHand, ShortCircuitStack); elsif DoRtc and then STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.expression_rep4 then -- take a copy of the top of ExpnStack which is the LHS of -- the orelse and negate it; and push it on ShortCircuitStack Structures.CopyStructure (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), LeftHand); CreateOpCell (NotLeftHand, VCGHeap, SP_Symbols.RWnot); SetRightArgument (NotLeftHand, LeftHand, VCGHeap); CStacks.Push (VCGHeap, NotLeftHand, ShortCircuitStack); end if; end DownProcessRelation; procedure ModelAndThen --# global in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# VCGHeap; is DAGCell, Left, Right : Cells.Cell; begin -- continue to model AndThen as And for the moment CStacks.PopOff (VCGHeap, ExpnStack, Right); CStacks.PopOff (VCGHeap, ExpnStack, Left); CreateOpCell (DAGCell, VCGHeap, SP_Symbols.RWand); SetRightArgument (DAGCell, Right, VCGHeap); SetLeftArgument (DAGCell, Left, VCGHeap); CStacks.Push (VCGHeap, DAGCell, ExpnStack); end ModelAndThen; procedure ModelOrElse --# global in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# VCGHeap; is DAGCell, Left, Right : Cells.Cell; begin -- continue to model OrElse as Or for the moment CStacks.PopOff (VCGHeap, ExpnStack, Right); CStacks.PopOff (VCGHeap, ExpnStack, Left); CreateOpCell (DAGCell, VCGHeap, SP_Symbols.RWor); SetRightArgument (DAGCell, Right, VCGHeap); SetLeftArgument (DAGCell, Left, VCGHeap); CStacks.Push (VCGHeap, DAGCell, ExpnStack); end ModelOrElse; procedure ProcessRelation (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in STree.Table; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# ShortCircuitStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# ShortCircuitStack, --# STree.Table, --# VCGHeap; is Op_Node : STree.SyntaxNode; begin Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); if Op_Node /= STree.NullNode then if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.relational_operator then PushOperator (Binary, STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Op_Node)), VCGHeap, ExpnStack); else ModelInClause (STree.Child_Node (Current_Node => Node)); end if; end if; -- detect any short-circuit forms if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.expression_rep2 then if DoRtc then CStacks.Pop (VCGHeap, ShortCircuitStack); end if; -- left associate and then's ModelAndThen; elsif STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.expression_rep4 then if DoRtc then CStacks.Pop (VCGHeap, ShortCircuitStack); end if; -- left associate orelse's ModelOrElse; end if; end ProcessRelation; ------------------------------------------------------------------- procedure Process_Simple_Expression (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# LexTokenManager.State, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ExpnStack from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# LexTokenManager.State, --# Node, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# LexTokenManager.State from *, --# Dictionary.Dict, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap; is Origin_Type : SP_Symbols.SP_Symbol; Rel_Op_Node : STree.SyntaxNode; Op_Node : STree.SyntaxNode; Op : SP_Symbols.SP_Symbol; procedure CreateNonZeroConstraint (Expr : in Cells.Cell; Check_Cell : out Cells.Cell) --# global in out Statistics.TableUsage; --# in out VCGHeap; --# derives Check_Cell from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Expr; is ZeroCell, NotEqualsCell : Cells.Cell; begin -- CreateNonZeroConstraint CreateManifestConstCell (ZeroCell, VCGHeap, LexTokenManager.Zero_Value); CreateOpCell (NotEqualsCell, VCGHeap, SP_Symbols.not_equal); SetRightArgument (NotEqualsCell, ZeroCell, VCGHeap); SetLeftArgument (NotEqualsCell, Expr, VCGHeap); Check_Cell := NotEqualsCell; end CreateNonZeroConstraint; procedure CheckDivideByZero (RightArg : in Cells.Cell) --# global in DoRtc; --# in out CheckStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# DoRtc, --# RightArg, --# ShortCircuitStack, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# DoRtc, --# RightArg, --# ShortCircuitStack; is Check_Cell, CpRightArg : Cells.Cell; begin if DoRtc then Structures.CopyStructure (VCGHeap, RightArg, CpRightArg); CreateNonZeroConstraint (CpRightArg, Check_Cell); PlantCheckStatement (Check_Cell, VCGHeap, ShortCircuitStack, CheckStack); end if; end CheckDivideByZero; ---------------------------------------------------- procedure ModelDivide --# global in Dictionary.Dict; --# in DoRtc; --# in Op_Node; --# in STree.Table; --# in out CheckStack; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Op_Node, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ExpnStack from *, --# Dictionary.Dict, --# Op_Node, --# STree.Table, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Op_Node, --# ShortCircuitStack, --# STree.Table; is OpCell : Cells.Cell; begin Cells.Create_Cell (VCGHeap, OpCell); if Dictionary.TypeIsReal (STree.NodeSymbol (Op_Node)) then Cells.Set_Kind (VCGHeap, OpCell, Cell_Storage.Op); Cells.Set_Op_Symbol (VCGHeap, OpCell, SP_Symbols.divide); else Cells.Set_Kind (VCGHeap, OpCell, Cell_Storage.FDL_Div_Op); end if; SetRightArgument (OpCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); SetLeftArgument (OpCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, OpCell, ExpnStack); -- Should check for real types here? CheckDivideByZero (RightPtr (VCGHeap, CStacks.Top (VCGHeap, ExpnStack))); end ModelDivide; ---------------------------------------- procedure ModelRem --# global in DoRtc; --# in out CheckStack; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# DoRtc, --# ExpnStack, --# ShortCircuitStack, --# VCGHeap & --# ExpnStack from *, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# DoRtc, --# ExpnStack, --# ShortCircuitStack; is DAGCell, Left, Right : Cells.Cell; begin -- modelling of I rem J as I - (I div J) * J -- J is top of stack and I is 2nd TOS CStacks.PopOff (VCGHeap, ExpnStack, Right); CStacks.PopOff (VCGHeap, ExpnStack, Left); CreateCellKind (DAGCell, VCGHeap, Cell_Storage.FDL_Div_Op); SetRightArgument (DAGCell, Right, VCGHeap); SetLeftArgument (DAGCell, Left, VCGHeap); CStacks.Push (VCGHeap, DAGCell, ExpnStack); CreateOpCell (DAGCell, VCGHeap, SP_Symbols.multiply); SetRightArgument (DAGCell, Right, VCGHeap); SetLeftArgument (DAGCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, DAGCell, ExpnStack); CreateOpCell (DAGCell, VCGHeap, SP_Symbols.minus); SetRightArgument (DAGCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); SetLeftArgument (DAGCell, Left, VCGHeap); CStacks.Push (VCGHeap, DAGCell, ExpnStack); CheckDivideByZero (Right); end ModelRem; ---------------------------------------- begin -- Process_Simple_Expression Op_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))); if Op_Node /= STree.NullNode then -- detection of / and REM for special handling Op := STree.Syntax_Node_Type (Node => Op_Node); if Op = SP_Symbols.divide then ModelDivide; elsif Op = SP_Symbols.RWrem then ModelRem; elsif Op = SP_Symbols.ampersand then Model_Catenation (ExpnStack, VCGHeap); else PushOperator (Binary, Op, VCGHeap, ExpnStack); if Op = SP_Symbols.RWmod then CheckDivideByZero (RightPtr (VCGHeap, CStacks.Top (VCGHeap, ExpnStack))); end if; end if; ModularizeIfNeeded (STree.NodeSymbol (Op_Node), VCGHeap, ExpnStack); if DoRtc then if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.simple_expression then Origin_Type := STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => -- relation STree.Parent_Node (Current_Node => -- expression STree.Parent_Node (Current_Node => Node)))); -- check for relational operator Rel_Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => STree.Parent_Node (Current_Node => Node))); else -- SP_Symbols.term Origin_Type := STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => -- simple_expression_opt STree.Parent_Node (Current_Node => -- simple_expression STree.Parent_Node (Current_Node => -- relation STree.Parent_Node (Current_Node => -- expression STree.Parent_Node (Current_Node => Node)))))); -- check for relational operator Rel_Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node))))); end if; if Origin_Type /= SP_Symbols.assignment_statement or else Rel_Op_Node /= STree.NullNode then -- suppress overflow check for outermost expression on rhs -- of assignment CheckOverflowRunTimeError (STree.NodeSymbol (Op_Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, ContainsReals, CheckStack); end if; end if; end if; end Process_Simple_Expression; ------------------------------------------------------------------- procedure Process_Simple_Expression_Opt (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ExpnStack from *, --# Node, --# STree.Table, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table; is Origin_Type : SP_Symbols.SP_Symbol; Rel_Op_Node : STree.SyntaxNode; Op_Node : STree.SyntaxNode; begin Op_Node := STree.Child_Node (Current_Node => Node); if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.unary_adding_operator then PushOperator (Unary, STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Op_Node)), VCGHeap, ExpnStack); if DoRtc then Origin_Type := STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => -- simple_expression STree.Parent_Node (Current_Node => -- relation STree.Parent_Node (Current_Node => -- expression STree.Parent_Node (Current_Node => Node))))); -- check for relational operator Rel_Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node)))); if Origin_Type /= SP_Symbols.assignment_statement or else Rel_Op_Node /= STree.NullNode then -- suppress overflow check for outermost expression on rhs -- of assignment CheckOverflowRunTimeError (STree.NodeSymbol (Op_Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, ContainsReals, CheckStack); end if; end if; end if; end Process_Simple_Expression_Opt; ------------------------------------------------------------------- procedure ProcessFactor (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in DoRtc; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out ExpnStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# ExpnStack, --# Node, --# Scope, --# ShortCircuitStack, --# STree.Table, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# DoRtc, --# Node, --# STree.Table & --# ExpnStack from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# Node, --# ShortCircuitStack, --# STree.Table, --# VCGHeap; is Origin_Type : SP_Symbols.SP_Symbol; Rel_Op_Node, OpNextNode, Op_Node : STree.SyntaxNode; BoolOpCell : Cells.Cell; ResultType : Dictionary.Symbol; procedure CreateGeZeroConstraint (Expr : in Cells.Cell; Check_Cell : out Cells.Cell) --# global in out Statistics.TableUsage; --# in out VCGHeap; --# derives Check_Cell from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Expr; is ZeroCell, GeCell : Cells.Cell; begin CreateManifestConstCell (ZeroCell, VCGHeap, LexTokenManager.Zero_Value); CreateOpCell (GeCell, VCGHeap, SP_Symbols.greater_or_equal); SetRightArgument (GeCell, ZeroCell, VCGHeap); SetLeftArgument (GeCell, Expr, VCGHeap); Check_Cell := GeCell; end CreateGeZeroConstraint; procedure CreateEqZeroConstraint (Expr : in Cells.Cell; Check_Cell : out Cells.Cell) --# global in out Statistics.TableUsage; --# in out VCGHeap; --# derives Check_Cell from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Expr; is ZeroCell, GeCell : Cells.Cell; begin -- CreateEqZeroConstraint CreateManifestConstCell (ZeroCell, VCGHeap, LexTokenManager.Zero_Value); CreateOpCell (GeCell, VCGHeap, SP_Symbols.equals); SetRightArgument (GeCell, ZeroCell, VCGHeap); SetLeftArgument (GeCell, Expr, VCGHeap); Check_Cell := GeCell; end CreateEqZeroConstraint; procedure CheckExponentConstraint (LhsTypeSym : in Dictionary.Symbol; LeftArg : in Cells.Cell; RightArg : in Cells.Cell) --# global in Dictionary.Dict; --# in DoRtc; --# in out CheckStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# DoRtc, --# LeftArg, --# LhsTypeSym, --# RightArg, --# ShortCircuitStack, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# LeftArg, --# LhsTypeSym, --# RightArg, --# ShortCircuitStack; is Check_Cell, EQcell, GEcell, CpLeftArg, CpRightArg : Cells.Cell; begin if DoRtc then if IsIntegerType (LhsTypeSym) or IsModularType (LhsTypeSym) then -- LRM95 4.5.6(8) says that RHS for signed integer or modular -- types must be subtype Natural - i.e. >= 0, so... Structures.CopyStructure (VCGHeap, RightArg, CpRightArg); CreateGeZeroConstraint (CpRightArg, Check_Cell); PlantCheckStatement (Check_Cell, VCGHeap, ShortCircuitStack, CheckStack); elsif IsRealType (LhsTypeSym) then -- detect 0.0 to negative power case -- N.B. This is not guarded by the RealRTC switch because it -- effectively a division by zero and we already regard that as -- a special case in the RTC, even for reals Structures.CopyStructure (VCGHeap, RightArg, CpRightArg); CreateGeZeroConstraint (CpRightArg, GEcell); Structures.CopyStructure (VCGHeap, LeftArg, CpLeftArg); CreateEqZeroConstraint (CpLeftArg, EQcell); CreateOpCell (Check_Cell, VCGHeap, SP_Symbols.implies); SetLeftArgument (Check_Cell, EQcell, VCGHeap); SetRightArgument (Check_Cell, GEcell, VCGHeap); PlantCheckStatement (Check_Cell, VCGHeap, ShortCircuitStack, CheckStack); end if; end if; end CheckExponentConstraint; -- Note, there is a similar version of this -- subprogram in BuildAnnotationExprDAG procedure ModelModularNotOperation --# global in ResultType; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# ResultType, --# VCGHeap; is MinusOpCell, TickCell, PrefixCell, ModulusCell : Cells.Cell; begin ---------------------------------------------------- -- LRM 4.5.6 (5) defines "not X" for a modular -- -- type T to be equivalent to T'Last - X. -- ---------------------------------------------------- -- create ' operator CreateOpCell (TickCell, VCGHeap, SP_Symbols.apostrophe); -- create Last attribute name CreateAttribValueCell (ModulusCell, VCGHeap, LexTokenManager.Last_Token); -- Create prefix given by ResultType CreateFixedVarCell (PrefixCell, VCGHeap, ResultType); -- Assemble T'Last SetLeftArgument (TickCell, PrefixCell, VCGHeap); SetRightArgument (TickCell, ModulusCell, VCGHeap); -- create binary "-" operator CreateOpCell (MinusOpCell, VCGHeap, SP_Symbols.minus); -- Construct T'Last - X, where X is on the top-of-stack SetRightArgument (MinusOpCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); SetLeftArgument (MinusOpCell, TickCell, VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, MinusOpCell, ExpnStack); end ModelModularNotOperation; begin -- ProcessFactor Op_Node := STree.Child_Node (Current_Node => Node); if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWnot then -- check to see if result type is an array and -- build special model if it is ResultType := STree.NodeSymbol (Op_Node); if Dictionary.IsTypeMark (ResultType) then if Dictionary.TypeIsArray (ResultType) then -- must be a Boolean array "not" operation CreateBoolOpCell (BoolOpCell, VCGHeap, ResultType, SP_Symbols.RWnot); SetRightArgument (BoolOpCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, BoolOpCell, ExpnStack); elsif Dictionary.TypeIsModular (ResultType) then -- must be a Modular "not" operation. ModelModularNotOperation; else -- proceed as before for scalar bool ops PushOperator (Unary, SP_Symbols.RWnot, VCGHeap, ExpnStack); end if; else -- proceed as before for scalar bool ops PushOperator (Unary, SP_Symbols.RWnot, VCGHeap, ExpnStack); end if; -- handle abs elsif STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWabs then PushFunction (Cell_Storage.Abs_Function, VCGHeap, ExpnStack); if DoRtc then Origin_Type := STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => -- term STree.Parent_Node (Current_Node => -- simple_expression_opt STree.Parent_Node (Current_Node => -- simple_expression STree.Parent_Node (Current_Node => -- relation STree.Parent_Node (Current_Node => -- expression STree.Parent_Node (Current_Node => Node))))))); -- check for relational operator Rel_Op_Node := STree.Next_Sibling (STree.Child_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node)))))); if Origin_Type /= SP_Symbols.assignment_statement or else Rel_Op_Node /= STree.NullNode then -- suppress overflow check for outermost expression on rhs -- of assignment CheckOverflowRunTimeError (STree.NodeSymbol (Op_Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, ContainsReals, CheckStack); end if; end if; else OpNextNode := STree.Next_Sibling (Current_Node => Op_Node); if OpNextNode /= STree.NullNode then PushOperator (Binary, SP_Symbols.double_star, VCGHeap, ExpnStack); CheckExponentConstraint (STree.NodeSymbol (Op_Node), LeftPtr (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)), RightPtr (VCGHeap, CStacks.Top (VCGHeap, ExpnStack))); ModularizeIfNeeded (STree.NodeSymbol (Op_Node), VCGHeap, ExpnStack); if DoRtc then Origin_Type := STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => -- term STree.Parent_Node (Current_Node => -- simple_expression_opt STree.Parent_Node (Current_Node => -- simple_expression STree.Parent_Node (Current_Node => -- relation STree.Parent_Node (Current_Node => -- expression STree.Parent_Node (Current_Node => Node))))))); if Origin_Type /= SP_Symbols.assignment_statement then -- suppress overflow check for outermost expression on rhs -- of assignment CheckOverflowRunTimeError (STree.NodeSymbol (Op_Node), CStacks.Top (VCGHeap, ExpnStack), Scope, VCGHeap, ShortCircuitStack, ContainsReals, CheckStack); end if; end if; end if; end if; end ProcessFactor; ------------------------------------------------------------------- function ValidStartNode return Boolean --# global in StartNode; --# in STree.Table; is NodeType : SP_Symbols.SP_Symbol; begin NodeType := STree.Syntax_Node_Type (Node => StartNode); return NodeType = SP_Symbols.expression or else NodeType = SP_Symbols.name or else NodeType = SP_Symbols.selected_component or else NodeType = SP_Symbols.simple_expression or else NodeType = SP_Symbols.condition or else NodeType = SP_Symbols.simple_name or else NodeType = SP_Symbols.attribute; end ValidStartNode; ------------------------------------------------------------------- begin -- BuildExpnDAG SeqAlgebra.CreateSeq (FlowHeap, ReferencedVars); SystemErrors.RT_Assert (C => ValidStartNode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Invalid start node passed to BuildExpnDAG"); CStacks.CreateStack (ExpnStack); Node := StartNode; -- The down-loop traverses the syntax tree from the given root to its leaves. loop --------------------------------down loop LastNode := Node; NodeType := STree.Syntax_Node_Type (Node => Node); case NodeType is -- prune at selector nodes so that only left most idents found when SP_Symbols.selector => Node := STree.NullNode; when SP_Symbols.numeric_literal => CreateManifestConstCell (DAGCell, VCGHeap, STree.Node_Lex_String (Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)))); CStacks.Push (VCGHeap, DAGCell, ExpnStack); Node := STree.NullNode; when SP_Symbols.character_literal | SP_Symbols.string_literal => CreateManifestConstCell (DAGCell, VCGHeap, STree.Node_Lex_String (Node => Node)); CStacks.Push (VCGHeap, DAGCell, ExpnStack); Node := STree.NullNode; when SP_Symbols.attribute_ident => DownProcessAttributeIdent (Node); Node := STree.NullNode; when SP_Symbols.identifier => ProcessIdentifier (Node, ExpnScope); Node := STree.NullNode; when SP_Symbols.simple_name => if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => LastNode)) = SP_Symbols.named_argument_association then -- do not want look at parameter or field identifier Node := STree.NullNode; else Node := STree.Child_Node (Current_Node => Node); end if; when SP_Symbols.aggregate => DownProcessAggregate (SP_Symbols.qualified_expression, VCGHeap, Node, ExpnStack); when SP_Symbols.aggregate_choice_rep => DownProcessAggregateChoiceRep (LastNode, ExpnScope, VCGHeap, ExpnStack, -- to get Node); when SP_Symbols.record_component_selector_name => DownProcessRecordComponentSelectorName (LastNode, ExpnScope, VCGHeap, ExpnStack, -- to get Node); -- detect any short-circuit forms when SP_Symbols.relation => DownProcessRelation (Node); Node := STree.Child_Node (Current_Node => Node); when others => Node := STree.Child_Node (Current_Node => Node); end case; if Node = STree.NullNode and LastNode /= StartNode then -- The up-loop traverses the syntax tree from the leaves toward a -- given root. loop ------------------------up loop--------------------- Node := STree.Next_Sibling (Current_Node => LastNode); exit when Node /= STree.NullNode; Node := STree.Parent_Node (Current_Node => LastNode); exit when Node = STree.NullNode; NodeType := STree.Syntax_Node_Type (Node => Node); case NodeType is when SP_Symbols.selected_component => ProcessSelectedComponent (Node, ExpnScope); when SP_Symbols.qualified_expression => ModelQualifiedExpression (Node); when SP_Symbols.aggregate => UpProcessAggregate; when SP_Symbols.extension_aggregate => UpProcessExtensionAggregate (VCGHeap, ExpnStack); when SP_Symbols.ancestor_part => ProcessAncestorPart (Node, VCGHeap, ExpnStack); when SP_Symbols.component_association => UpProcessComponentAssociation (Node); when SP_Symbols.named_association_rep => UpProcessNamedAssociationRep (Node); when SP_Symbols.named_record_component_association => UpProcessNamedRecordComponentAssociation (Node); when SP_Symbols.aggregate_choice_rep => UpProcessAggregateChoiceRep (Node, VCGHeap, ExpnStack); when SP_Symbols.aggregate_or_expression => UpProcessAggregateOrExpression (Node); when SP_Symbols.positional_record_component_association => UpProcessPositionalRecordComponentAssociation (Node); when SP_Symbols.aggregate_choice => UpProcessAggregateChoice (Node); when SP_Symbols.expression | SP_Symbols.expression_rep1 | SP_Symbols.expression_rep2 | SP_Symbols.expression_rep3 | SP_Symbols.expression_rep4 | SP_Symbols.expression_rep5 => ProcessExpression (Node); when SP_Symbols.relation => ProcessRelation (Node); when SP_Symbols.simple_expression | SP_Symbols.term => Process_Simple_Expression (Node => Node); when SP_Symbols.simple_expression_opt => Process_Simple_Expression_Opt (Node => Node); when SP_Symbols.factor => ProcessFactor (Node); when SP_Symbols.positional_argument_association => Process_Positional_Argument_Association (Node => Node); when SP_Symbols.named_argument_association => Process_Named_Argument_Association (Node => Node); when SP_Symbols.name_argument_list => ProcessNameArgumentList; when SP_Symbols.attribute_designator => UpAttributeDesignator (Node); when others => null; end case; exit when Node = StartNode; LastNode := Node; end loop; -----------------------------up loop-------------- end if; exit when Node = STree.NullNode or Node = StartNode; end loop; --------------------------down loop------------------ --# accept F, 10, ExpnStack, "Ineffective assignment here OK"; CStacks.PopOff (VCGHeap, ExpnStack, DAGRoot); --# end accept; -- at this point assume variables in set ReferencedVars are in their type CheckPlantRvalueAssumptions; if CommandLineData.Content.Debug.DAG then Debug_Print_DAG (Start_Node => StartNode, Scope => Scope, DAG_Root => DAGRoot, VCG_Heap => VCGHeap); end if; end BuildExpnDAG; ././@LongLink0000000000000000000000000000021000000000000011556 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000011252111753202336033122 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Protected_Type_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Protected_Type_Sym : Dictionary.Symbol; Sym : Dictionary.Symbol; Protected_Scope : Dictionary.Scopes; Protected_Private_Scope : Dictionary.Scopes; Ident_Node : STree.SyntaxNode; Protected_Element_Decl_Node : STree.SyntaxNode; Protected_Element_Node : STree.SyntaxNode; Closing_Ident_Node : STree.SyntaxNode; Discriminant_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; At_Least_One_Operation_Declared : Boolean; It : STree.Iterator; function Get_Discriminant_Node (Protected_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Protected_Type_Declaration_Node, STree.Table) = SP_Symbols.protected_type_declaration; is Return_Node : STree.SyntaxNode; begin Return_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Protected_Type_Declaration_Node)); -- ASSUME Return_Node = known_discriminant_part OR protected_definition if Syntax_Node_Type (Node => Return_Node) = SP_Symbols.protected_definition then -- ASSUME Return_Node = protected_definition Return_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Return_Node) /= SP_Symbols.known_discriminant_part then Return_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Return_Node = known_discriminant_part OR protected_definition in Get_Discriminant_Node"); end if; -- ASSUME Return_Node = known_discriminant_part OR NULL SystemErrors.RT_Assert (C => Return_Node = STree.NullNode or else Syntax_Node_Type (Node => Return_Node) = SP_Symbols.known_discriminant_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Return_Node = known_discriminant_part OR NULL in Get_Discriminant_Node"); return Return_Node; end Get_Discriminant_Node; ---------- function Get_Protected_Operations_Node (Protected_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Protected_Type_Declaration_Node, STree.Table) = SP_Symbols.protected_type_declaration; --# return Node => Syntax_Node_Type (Node, STree.Table) = SP_Symbols.protected_operation_declaration; is Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Protected_Type_Declaration_Node))); -- ASSUME Current_Node = protected_operation_declaration SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.protected_operation_declaration, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = protected_operation_declaration in Get_Protected_Operations_Node"); return Current_Node; end Get_Protected_Operations_Node; ---------- function Get_Protected_Element_Declaration_Node (Protected_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Protected_Type_Declaration_Node, STree.Table) = SP_Symbols.protected_type_declaration; is Return_Node : STree.SyntaxNode; begin Return_Node := Next_Sibling (Current_Node => Get_Protected_Operations_Node (Protected_Type_Declaration_Node => Protected_Type_Declaration_Node)); -- ASSUME Return_Node = protected_element_declaration OR identifier OR hidden_part if Syntax_Node_Type (Node => Return_Node) = SP_Symbols.identifier or else Syntax_Node_Type (Node => Return_Node) = SP_Symbols.hidden_part then Return_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Return_Node) /= SP_Symbols.protected_element_declaration then Return_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Return_Node = protected_element_declaration OR identifier OR " & "hidden_part in Get_Protected_Element_Declaration_Node"); end if; -- ASSUME Return_Node = protected_element_declaration OR NULL SystemErrors.RT_Assert (C => Return_Node = STree.NullNode or else Syntax_Node_Type (Node => Return_Node) = SP_Symbols.protected_element_declaration, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Return_Node = protected_element_declaration OR NULL in Get_Protected_Element_Declaration_Node"); return Return_Node; end Get_Protected_Element_Declaration_Node; ---------- function Get_Closing_Ident_Node (Protected_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Protected_Type_Declaration_Node, STree.Table) = SP_Symbols.protected_type_declaration; is Return_Node : STree.SyntaxNode; begin Return_Node := Last_Sibling_Of (Start_Node => Get_Protected_Operations_Node (Protected_Type_Declaration_Node => Protected_Type_Declaration_Node)); -- ASSUME Return_Node = identifier OR hidden_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Return_Node) = SP_Symbols.hidden_part or else Syntax_Node_Type (Node => Return_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Return_Node = identifier OR hidden_part in Get_Closing_Ident_Node"); return Return_Node; end Get_Closing_Ident_Node; ---------- procedure Wf_Protected_Op_Dec (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Op_Found : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Op_Found from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.protected_operation_declaration; --# post STree.Table = STree.Table~; is separate; ---------- procedure Check_Pragma_Validity (End_Node_Position : in LexTokenManager.Token_Position; Protected_Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# End_Node_Position, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Protected_Type_Sym, --# Scope, --# SPARK_IO.File_Sys & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Protected_Type_Sym; is Priority_Found, Interrupt_Priority_Found, Attach_Handler_Found : Boolean; Unused_Value_Rep : LexTokenManager.Lex_String; begin Priority_Found := Dictionary.GetTypeHasPragma (Protected_Type_Sym, Dictionary.Priority); Interrupt_Priority_Found := Dictionary.GetTypeHasPragma (Protected_Type_Sym, Dictionary.InterruptPriority); Attach_Handler_Found := Dictionary.GetTypeHasPragma (Protected_Type_Sym, Dictionary.AttachHandler); -- There must be either Priority or Interrupt_Priority if not (Priority_Found or else Interrupt_Priority_Found) then ErrorHandler.Semantic_Error (Err_Num => 876, Reference => ErrorHandler.No_Reference, Position => End_Node_Position, Id_Str => LexTokenManager.Null_String); end if; -- If there is one or more Attach_Handler there must be Interrupt_Priority if Attach_Handler_Found then if Interrupt_Priority_Found then --# accept Flow, 10, Unused_Value_Rep, "Expected ineffective assignment"; Check_Priority_Range (Error_Sym => Protected_Type_Sym, Scope => Scope, Pragma_Kind => Dictionary.AttachHandler, Err_Pos => End_Node_Position, Value => Maths.ValueRep (Dictionary.GetTypePragmaValue (Protected_Type_Sym, Dictionary.InterruptPriority)), Value_Rep => Unused_Value_Rep); --# end accept; else ErrorHandler.Semantic_Error (Err_Num => 878, Reference => ErrorHandler.No_Reference, Position => End_Node_Position, Id_Str => LexTokenManager.Null_String); end if; end if; --# accept Flow, 33, Unused_Value_Rep, "Expected to be neither referenced nor exported"; end Check_Pragma_Validity; ---------- procedure Add_Virtual_Elements (Type_Sym : in Dictionary.Symbol) --# global in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# ContextManager.Ops.Unit_Stack, --# Type_Sym & --# SPARK_IO.File_Sys from *, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Type_Sym; is It : Dictionary.Iterator; begin It := Dictionary.FirstVirtualElement (Type_Sym); while It /= Dictionary.NullIterator loop -- Make the virtual element a refinement constituent of the implicit own -- variable associated with this protected type. Dictionary.AddConstituentSym (ConstituentVariable => Dictionary.CurrentSymbol (It), Subject => Dictionary.GetProtectedTypeOwnVariable (Type_Sym), Comp_Unit => ContextManager.Ops.Current_Unit, ConstituentReference => Dictionary.Null_Location); It := Dictionary.NextSymbol (It); end loop; end Add_Virtual_Elements; ---------- procedure Add_Implicit_Interrupt_Stream_Variables (Protected_Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# LexTokenManager.State, --# Protected_Type_Sym, --# Scope & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Protected_Type_Sym, --# Scope, --# SPARK_IO.File_Sys; is The_Own_Variable : Dictionary.Symbol; It : Dictionary.Iterator; procedure Create_Interrupt_Stream_Variables (For_PO : in Dictionary.Symbol; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# For_PO, --# LexTokenManager.State & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# For_PO, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; The_Mapping : Dictionary.Symbol; begin It := Dictionary.FirstInterruptStreamMapping (For_PO); while not Dictionary.IsNullIterator (It) loop The_Mapping := Dictionary.CurrentSymbol (It); Create_Interrupt_Stream_Variable (For_PO => For_PO, The_Handler => Dictionary.GetInterruptStreamMappingHandler (The_Mapping), The_Stream_Variable => Dictionary.GetInterruptStreamMappingStream (The_Mapping), Error_Node_Pos => Error_Node_Pos); It := Dictionary.NextSymbol (It); end loop; end Create_Interrupt_Stream_Variables; begin -- Add_Implicit_Interrupt_Stream_Variables -- Go through all the own variables of this package It := Dictionary.FirstOwnVariable (ThePackage => Dictionary.GetRegion (Scope)); while It /= Dictionary.NullIterator loop The_Own_Variable := Dictionary.CurrentSymbol (It); -- If the variable is of type Protected_Type_Sym if Dictionary.OwnVariableHasType (OwnVariable => The_Own_Variable, Scope => Scope) and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (The_Own_Variable), Right_Symbol => Protected_Type_Sym, Full_Range_Subtype => False) then -- Create interrupt stream variables as necessary Create_Interrupt_Stream_Variables (For_PO => The_Own_Variable, Error_Node_Pos => Error_Node_Pos); end if; It := Dictionary.NextSymbol (It); end loop; end Add_Implicit_Interrupt_Stream_Variables; ---------- function Get_Mode (For_Type : in LexTokenManager.Lex_String; Scope : in Dictionary.Scopes) return Dictionary.Modes --# global in Dictionary.Dict; --# in LexTokenManager.State; is It : Dictionary.Iterator; Own_Var_Sym : Dictionary.Symbol; Result : Dictionary.Modes; begin Result := Dictionary.DefaultMode; -- Go through all the own variables of this package looking for ones -- with this type. It := Dictionary.FirstOwnVariable (Dictionary.GetRegion (Scope)); while not Dictionary.IsNullIterator (It) loop Own_Var_Sym := Dictionary.CurrentSymbol (It); if Dictionary.OwnVariableHasType (OwnVariable => Own_Var_Sym, Scope => Scope) and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => For_Type, Lex_Str2 => Dictionary.GetSimpleName (Dictionary.GetType (Own_Var_Sym))) = LexTokenManager.Str_Eq then -- Found an own variable of the type. So use its mode. -- Note. If the own variable is moded then there can only be -- one instance of that that type. So we can exit when we find -- the first one. Result := Dictionary.GetOwnVariableMode (Own_Var_Sym); exit; end if; It := Dictionary.NextSymbol (It); end loop; return Result; end Get_Mode; procedure Check_Element_Initialization (Variable_Declaration_Node : in STree.SyntaxNode; Error_Node_Pos : in LexTokenManager.Token_Position; Current_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Variable_Declaration_Node; --# pre Syntax_Node_Type (Variable_Declaration_Node, STree.Table) = SP_Symbols.variable_declaration; is It : STree.Iterator; Ident_List_Node : STree.SyntaxNode; Ident_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Type_Node : STree.SyntaxNode; Exp_Node : STree.SyntaxNode; Sym : Dictionary.Symbol; begin Ident_List_Node := Child_Node (Current_Node => Variable_Declaration_Node); -- ASSUME Ident_List_Node = identifier_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_List_Node) = SP_Symbols.identifier_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_List_Node = identifier_list in Check_Element_Initialization"); Type_Node := Next_Sibling (Current_Node => Ident_List_Node); -- ASSUME Type_Node = RWaliased OR type_mark if Syntax_Node_Type (Node => Type_Node) = SP_Symbols.RWaliased then -- ASSUME Type_Node = RWaliased Type_Node := Next_Sibling (Current_Node => Type_Node); elsif Syntax_Node_Type (Node => Type_Node) /= SP_Symbols.type_mark then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = RWaliased OR type_mark in Check_Element_Initialization"); end if; -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Check_Element_Initialization"); Exp_Node := Next_Sibling (Current_Node => Type_Node); -- ASSUME Exp_Node = expression OR NULL if Exp_Node = STree.NullNode then -- ASSUME Exp_Node = NULL -- Variable(s) ISN'T initialized - we raise an error -- at the node corresponsing to the end of the -- enclosing protected type declaration, so that the -- error may be justified -- Variable_Declaration_Node has a list of identifiers below -- it, so we need to raise an error for each of them. It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Ident_List_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Ident_Node := Get_Node (It => It); -- ASSUME Ident_Node = identifier Ident_Str := Node_Lex_String (Node => Ident_Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- If the protected element had a semantic error in its declaration, -- then LookupItem might return NullSymbol, so if not Dictionary.Is_Null_Symbol (Sym) then ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Uninitialized_Protected_Element, Position => Error_Node_Pos, Var_Sym => Sym, Scope => Current_Scope); end if; It := STree.NextNode (It); end loop; elsif Syntax_Node_Type (Node => Exp_Node) /= SP_Symbols.expression then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = expression OR NULL in Check_Element_Initialization"); end if; end Check_Element_Initialization; begin -- Wf_Protected_Type_Declaration; Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Protected_Type_Declaration"); Ident_Str := Node_Lex_String (Node => Ident_Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) or else (Dictionary.IsTypeMark (Sym) and then Dictionary.TypeIsAnnounced (TheType => Sym) and then not Dictionary.Is_Declared (Item => Sym)) then Discriminant_Node := Get_Discriminant_Node (Protected_Type_Declaration_Node => Node); -- ASSUME Discriminant_Node = known_discriminant_part OR NULL Closing_Ident_Node := Get_Closing_Ident_Node (Protected_Type_Declaration_Node => Node); -- ASSUME Closing_Ident_Node = identifier OR hidden_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Closing_Ident_Node) = SP_Symbols.hidden_part or else Syntax_Node_Type (Node => Closing_Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Closing_Ident_Node = identifier OR hidden_part in Wf_Protected_Type_Declaration"); if not Dictionary.Is_Null_Symbol (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); end if; Dictionary.Add_Protected_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Scope => Scope, Context => Dictionary.ProgramContext, Mode => Get_Mode (For_Type => Ident_Str, Scope => Scope), Constrained => (Syntax_Node_Type (Node => Discriminant_Node) /= SP_Symbols.known_discriminant_part), The_Type => Protected_Type_Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Protected_Type_Sym); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Protected_Type_Sym, Is_Declaration => True); end if; if Syntax_Node_Type (Node => Discriminant_Node) = SP_Symbols.known_discriminant_part then -- ASSUME Discriminant_Node = known_discriminant_part Wf_Known_Discriminant_Part (Node => Discriminant_Node, Protected_Type_Sym => Protected_Type_Sym, Scope => Scope); elsif Discriminant_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Discriminant_Node = known_discriminant_part OR NULL in Wf_Protected_Type_Declaration"); end if; -- wff protected ops Protected_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Protected_Type_Sym); Wf_Protected_Op_Dec (Node => Get_Protected_Operations_Node (Protected_Type_Declaration_Node => Node), Scope => Protected_Scope, Component_Data => Component_Data, The_Heap => The_Heap, Op_Found => At_Least_One_Operation_Declared); -- wff protected elements Protected_Element_Decl_Node := Get_Protected_Element_Declaration_Node (Protected_Type_Declaration_Node => Node); -- ASSUME Protected_Element_Decl_Node = protected_element_declaration OR NULL if Syntax_Node_Type (Node => Protected_Element_Decl_Node) = SP_Symbols.protected_element_declaration then -- ASSUME Protected_Element_Decl_Node = protected_element_declaration if Get_Mode (For_Type => Ident_Str, Scope => Scope) /= Dictionary.DefaultMode then ErrorHandler.Semantic_Error_Sym (Err_Num => 928, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Protected_Element_Decl_Node), Sym => Protected_Type_Sym, Scope => Scope); else -- Element declarations are not hidden so we need to wf them -- Grammar ensures there is at least one declaration Protected_Private_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Protected_Type_Sym); --------------------------------------------------------------------- -- We check protected elements in 2 passes. -- -- Pass 1 WFFs each variable_declaration or justification_statement -- -- Pass 2 checks each variable for initialization, raising errors -- at the "end PT;" node - this allows for initialization -- errors to be justified --------------------------------------------------------------------- -- Pass 1 It := Find_First_Node (Node_Kind => SP_Symbols.protected_element, From_Root => Protected_Element_Decl_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop --# assert STree.Table = STree.Table~; Protected_Element_Node := Child_Node (Current_Node => Get_Node (It => It)); -- ASSUME Protected_Element_Node = variable_declaration OR justification_statement case Syntax_Node_Type (Node => Protected_Element_Node) is when SP_Symbols.variable_declaration => -- ASSUME Protected_Element_Node = variable_declaration -- Here we need to distinguish between the scope of the -- enclosing unit and the scope of the declaration, which -- may be different. Wf_Variable_Declaration (Node => Protected_Element_Node, Enclosing_Unit_Scope => Scope, Declaration_Scope => Protected_Private_Scope, The_Heap => The_Heap); when SP_Symbols.justification_statement => -- ASSUME Protected_Element_Node = justification_statement Wf_Justification_Statement (Node => Protected_Element_Node, Scope => Protected_Private_Scope, Component_Data => Component_Data, The_Heap => The_Heap); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Protected_Element_Node = variable_declaration OR " & "justification_statement in Wf_Protected_Type_Declaration"); end case; It := STree.NextNode (It); end loop; -- end of Pass 1 -- Pass 2 It := Find_First_Node (Node_Kind => SP_Symbols.protected_element, From_Root => Protected_Element_Decl_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop --# assert STree.Table = STree.Table~; Protected_Element_Node := Child_Node (Current_Node => Get_Node (It => It)); case Syntax_Node_Type (Node => Protected_Element_Node) is when SP_Symbols.variable_declaration => -- ASSUME Protected_Element_Node = variable_declaration Check_Element_Initialization (Variable_Declaration_Node => Protected_Element_Node, Error_Node_Pos => Node_Position (Node => Closing_Ident_Node), Current_Scope => Protected_Private_Scope); when SP_Symbols.justification_statement => -- ASSUME Protected_Element_Node = justification_statement null; when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Protected_Element_Node = variable_declaration OR " & "justification_statement in Wf_Protected_Type_Declaration"); end case; It := STree.NextNode (It); end loop; -- end of Pass 2 end if; elsif Protected_Element_Decl_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Protected_Element_Decl_Node = protected_element_declaration OR " & "NULL in Wf_Protected_Type_Declaration"); end if; -- Add any virtual elements. The virtual elements are the items in the protects list -- for this type. They behave as if they were elements of the protected type and -- hence must be made constituents of the implicit own variable associated with the -- protected type. Add_Virtual_Elements (Type_Sym => Protected_Type_Sym); -- This call will creates any interrupt stream variables specified by local own variables -- of Protected_Type_Sym. Add_Implicit_Interrupt_Stream_Variables (Protected_Type_Sym => Protected_Type_Sym, Scope => Scope, Error_Node_Pos => Node_Position (Node => Node)); -- closing identifier must match initial if Syntax_Node_Type (Node => Closing_Ident_Node) = SP_Symbols.identifier then -- private part is not hidden so we need to check that the closing identifier is correct if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => Node_Lex_String (Node => Closing_Ident_Node)) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Closing_Ident_Node), Id_Str => Ident_Str); end if; else -- must be hidden Dictionary.SetProtectedTypeElementsHidden (Protected_Type_Sym); ErrorHandler.Hidden_Text (Position => Node_Position (Node => Closing_Ident_Node), Unit_Str => Ident_Str, Unit_Typ => SP_Symbols.protected_type_declaration); end if; -- protected type must declare at leat one operation if not At_Least_One_Operation_Declared then ErrorHandler.Semantic_Error (Err_Num => 870, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Closing_Ident_Node), Id_Str => Ident_Str); end if; -- there must be a valid combination of pragmas declared in PT Check_Pragma_Validity (End_Node_Position => Node_Position (Node => Closing_Ident_Node), Protected_Type_Sym => Protected_Type_Sym, Scope => Scope); else -- illegal redeclaration ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; end Wf_Protected_Type_Declaration; spark-2012.0.deb/examiner/sem-wf_basic_declarative_item-check_subtype_against_basetype_bounds.adb0000644000175000017500000001053311753202336032445 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Given a (sub)type S with range S'First .. S'Last -- and a base type T with range T'First .. T'Last -- -- First, checks that T'First and T'Last are well-defined, or raises sem error 793. -- Then, if S is real and unconstrained, then no further checks. -- Then, checks that (S'First >= T'First and S'Last <= T'Last) or raises sem error 794. -- -- Used to check legality of derived numeric type declarations and base type assertions separate (Sem.Wf_Basic_Declarative_Item) procedure Check_Subtype_Against_Basetype_Bounds (Base_Type_Sym : in Dictionary.Symbol; Subtype_First : in LexTokenManager.Lex_String; Subtype_Last : in LexTokenManager.Lex_String; Ident_Node_Pos : in LexTokenManager.Token_Position; Range_Node_Pos : in LexTokenManager.Token_Position; Errors : in out Boolean) is Base_Type_First, Base_Type_Last : LexTokenManager.Lex_String; Type_First_Val, Type_Last_Val, Base_Type_First_Val, Base_Type_Last_Val, Comp_Val : Maths.Value; Bounds_OK : Boolean; Maths_Error1, Maths_Error2 : Maths.ErrorCode; begin -- check that there are defined bounds for the base type, and also that -- the range of the type fits within the range of the specified base type if not Errors and then Dictionary.IsTypeMark (Base_Type_Sym) then Base_Type_First := Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Base_Type_Sym); Base_Type_Last := Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Base_Type_Sym); if Base_Type_First = LexTokenManager.Null_String or else Base_Type_Last = LexTokenManager.Null_String then if CommandLineData.Content.VCG then -- we require that the base type have defined bounds, ErrorHandler.Semantic_Error (Err_Num => 793, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => LexTokenManager.Null_String); Errors := True; end if; elsif Subtype_First = LexTokenManager.Null_String or else Subtype_Last = LexTokenManager.Null_String then -- no check possible with unconstrained ranges null; else -- check that the range of the base type is at least that of the type Type_First_Val := Maths.ValueRep (Subtype_First); Type_Last_Val := Maths.ValueRep (Subtype_Last); Base_Type_First_Val := Maths.ValueRep (Base_Type_First); Base_Type_Last_Val := Maths.ValueRep (Base_Type_Last); Maths.Lesser (Type_First_Val, Base_Type_First_Val, Comp_Val, Maths_Error1); Bounds_OK := (Comp_Val = Maths.FalseValue); Maths.Greater (Type_Last_Val, Base_Type_Last_Val, Comp_Val, Maths_Error2); Bounds_OK := Bounds_OK and then Comp_Val = Maths.FalseValue; if not Bounds_OK and then Maths_Error1 = Maths.NoError and then Maths_Error2 = Maths.NoError then ErrorHandler.Semantic_Error (Err_Num => 794, Reference => ErrorHandler.No_Reference, Position => Range_Node_Pos, Id_Str => LexTokenManager.Null_String); Errors := True; end if; end if; end if; end Check_Subtype_Against_Basetype_Bounds; spark-2012.0.deb/examiner/sp_expected_symbols-get_expected_symbols.adb0000644000175000017500000000636211753202336025211 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- packed version of Get_Expected_Symbols. separate (SP_Expected_Symbols) procedure Get_Expected_Symbols (Error_State : in SP_Productions.SP_State; No_Of_Terminals : out SP_Ess_Sym_Range; Terminal_List : out SP_Exp_Sym_List; No_Of_Non_Terminals : out SP_Ess_Sym_Range; Non_Terminal_List : out SP_Exp_Sym_List) is Loc_Entry : Ess_Symbol_Entry; Sym_List_Index : Exp_Sym_Range; Terminal_Count, Non_Terminal_Count : SP_Ess_Sym_Range; begin Loc_Entry := Expected_Symbols (Error_State); Sym_List_Index := Exp_Sym_Range ((Loc_Entry / Index) mod Index_Lim); Terminal_Count := SP_Ess_Sym_Range ((Loc_Entry / N_Terminals) mod Term_Lim); Non_Terminal_Count := SP_Ess_Sym_Range ((Loc_Entry / N_Non_Terminals) mod Non_Term_Lim); for CurrIndex in SP_Ess_Sym_Range range 1 .. Terminal_Count loop --# accept Flow, 23, Terminal_List, "Expected flow error"; Terminal_List (CurrIndex) := Essential_Symbols (Sym_List_Index); --# end accept; if CurrIndex < Terminal_Count then Sym_List_Index := Sym_List_Index + 1; end if; end loop; if Natural (Non_Terminal_Count) > 0 then Sym_List_Index := Exp_Sym_Range ((Natural ((Loc_Entry / Index) mod Index_Lim)) + Natural (Terminal_Count)); for CurrIndex in SP_Ess_Sym_Range range 1 .. Non_Terminal_Count loop --# accept Flow, 23, Non_Terminal_List, "Expected flow error"; Non_Terminal_List (CurrIndex) := Essential_Symbols (Sym_List_Index); --# end accept; if CurrIndex < Non_Terminal_Count then Sym_List_Index := Sym_List_Index + 1; end if; end loop; end if; if Error_State = 1 and then Terminal_Count = 0 and then Non_Terminal_Count = 0 then Non_Terminal_Count := 1; --# accept Flow, 504, Non_Terminal_List, "Expected flow error"; Non_Terminal_List (1) := SP_Symbols.SP_Non_Terminal'Succ (SP_Symbols.SP_Non_Terminal'First); --# end accept; end if; No_Of_Terminals := Terminal_Count; No_Of_Non_Terminals := Non_Terminal_Count; --# accept Flow, 602, Terminal_List, Terminal_List, "Expected flow error" & --# Flow, 602, Non_Terminal_List, Non_Terminal_List, "Expected flow error"; end Get_Expected_Symbols; spark-2012.0.deb/examiner/flowanalyser-flowanalyse-analyserelations-mergeandhandleerrors.adb0000644000175000017500000004462011753202336031523 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (FlowAnalyser.FlowAnalyse.AnalyseRelations) procedure MergeAndHandleErrors is RootNode : ComponentManager.Component; procedure ProcessRoot (RootNode : in ComponentManager.Component) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in TheErrorHeap; --# in out ComponentData; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives ComponentData, --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# RootNode, --# TheErrorHeap, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# RootNode, --# Scope, --# SPARK_IO.File_Sys, --# TheErrorHeap, --# TheHeap; is CurrentNode, LastNode : ComponentManager.Component; ErrList : SeqAlgebra.Seq; CurrentMember : SeqAlgebra.MemberOfSeq; ListOfErrorsToPassToParent : SeqAlgebra.Seq; MemberOfList : SeqAlgebra.MemberOfSeq; type ErrorSelections is (AllEntries, Table1); procedure AddPreviousNodesToError (PrevError, CurrError : in ComponentErrors.ComponentError) --# global in TheErrorHeap; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# CurrError, --# PrevError, --# TheErrorHeap, --# TheHeap; is PrevNodeSeq, CurrNodeSeq : SeqAlgebra.Seq; begin CurrNodeSeq := ComponentErrors.AssociatedComponentNodesOfError (TheErrorHeap, CurrError); PrevNodeSeq := ComponentErrors.AssociatedComponentNodesOfError (TheErrorHeap, PrevError); SeqAlgebra.AugmentSeq (TheHeap, CurrNodeSeq, --with PrevNodeSeq); while not SeqAlgebra.IsEmptySeq (TheHeap, PrevNodeSeq) loop SeqAlgebra.EliminateAfter (TheHeap, SeqAlgebra.BeforeFirstMember (PrevNodeSeq)); end loop; end AddPreviousNodesToError; ---------------------------------------------------------------------------- procedure ReplaceNodesOfError (Node : in ComponentManager.Component; Error : in ComponentErrors.ComponentError) --# global in TheErrorHeap; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# Error, --# Node, --# TheErrorHeap, --# TheHeap; is NodeSeq : SeqAlgebra.Seq; begin NodeSeq := ComponentErrors.AssociatedComponentNodesOfError (TheErrorHeap, Error); while not SeqAlgebra.IsEmptySeq (TheHeap, NodeSeq) loop SeqAlgebra.EliminateAfter (TheHeap, SeqAlgebra.BeforeFirstMember (NodeSeq)); end loop; SeqAlgebra.AddMember (TheHeap, NodeSeq, ComponentManager.ComponentToRef (Node)); end ReplaceNodesOfError; ---------------------------------------------------------------------------- procedure PassToErrorHandler (Error : in ComponentErrors.ComponentError; Sel : in ErrorSelections) --# global in CommandLineData.Content; --# in ComponentData; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in TheErrorHeap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# Dictionary.Dict, --# Error, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# Sel, --# SPARK_IO.File_Sys, --# TheErrorHeap, --# TheHeap & --# TheHeap from *, --# Error, --# TheErrorHeap; is ErrClass : ComponentErrors.ErrorClass; ErrVal : Natural; ErrPosn : LexTokenManager.Token_Position; NameOfNode : Dictionary.Symbol; NodeSeq : SeqAlgebra.Seq; begin --PassToErrorHandler ErrClass := ComponentErrors.ClassOfError (TheErrorHeap, Error); ErrVal := ComponentErrors.ValueOfError (TheErrorHeap, Error); ErrPosn := ComponentErrors.PositionOfError (TheErrorHeap, Error); NodeSeq := ComponentErrors.AssociatedComponentNodesOfError (TheErrorHeap, Error); while not SeqAlgebra.IsEmptySeq (TheHeap, NodeSeq) loop NameOfNode := ComponentManager.GetName (ComponentData, ComponentManager.RefToComponent (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => SeqAlgebra.FirstMember (TheHeap, NodeSeq)))); SeqAlgebra.EliminateAfter (TheHeap, SeqAlgebra.BeforeFirstMember (NodeSeq)); --# accept Flow, 41, "Expected stable expression"; case ErrClass is when ComponentErrors.DataFlow => --# end accept; ErrorHandler.Data_Flow_Error (Err_Type => ErrorHandler.Data_Flow_Err_Type'Val (ErrVal), Position => ErrPosn, Var_Sym => NameOfNode, Scope => Scope); when ComponentErrors.IneffectiveStmt => --# accept Flow, 41, "Expected stable expression"; if Sel = Table1 then --# end accept; null; else ErrorHandler.Ineffective_Stmt (Position => ErrPosn, Var_Sym => NameOfNode, Scope => Scope); end if; when ComponentErrors.IneffectiveFieldAssignment => ErrorHandler.Ineffective_Stmt (Position => ErrPosn, Var_Sym => NameOfNode, Scope => Scope); when ComponentErrors.Dependency => --# accept Flow, 41, "Expected stable expression"; if Sel = Table1 and then (ErrVal = ErrorHandler.Dependency_Err_Type'Pos (ErrorHandler.Ineff_Local_Init)) then --# end accept; null; else ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Dependency_Err_Type'Val (ErrVal), Position => ErrPosn, Import_Var_Sym => NameOfNode, Export_Var_Sym => ComponentErrors.SymOfError (TheErrorHeap, Error), Scope => Scope); end if; when ComponentErrors.Usage => --# accept Flow, 41, "Expected stable expression"; if Sel = Table1 and then (ErrVal = ErrorHandler.Usage_Err_Type'Pos (ErrorHandler.Unused_Import) or ErrVal = ErrorHandler.Usage_Err_Type'Pos (ErrorHandler.Unreferenced_Var) or ErrVal = ErrorHandler.Usage_Err_Type'Pos (ErrorHandler.Ineffective_Import)) then --# end accept; null; else ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Usage_Err_Type'Val (ErrVal), Position => ErrPosn, Var_Sym => NameOfNode, Scope => Scope); end if; when ComponentErrors.SemanticWarning => --# accept Flow, 41, "Expected stable expression"; if Sel = AllEntries then --# end accept; ErrorHandler.Semantic_Warning (Err_Num => ErrVal, Position => ErrPosn, Id_Str => Dictionary.GetSimpleName (NameOfNode)); end if; end case; end loop; end PassToErrorHandler; ------------------ procedure CompoundErrors (CurrentNode, PreviousNode : in ComponentManager.Component) --# global in CommandLineData.Content; --# in ComponentData; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in TheErrorHeap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# CurrentNode, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# PreviousNode, --# Scope, --# SPARK_IO.File_Sys, --# TheErrorHeap, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# CurrentNode, --# PreviousNode, --# TheErrorHeap, --# TheHeap; is CurrErrList, PrevErrList : SeqAlgebra.Seq; CurrIndex, LagCurrIndex, PrevIndex, LagPrevIndex : SeqAlgebra.MemberOfSeq; CurrErr, PrevErr : Natural; IsACommonError : Boolean; begin -- CompoundErrors CurrErrList := ComponentManager.GetListOfErrors (ComponentData, CurrentNode); PrevErrList := ComponentManager.GetListOfErrors (ComponentData, PreviousNode); LagCurrIndex := SeqAlgebra.BeforeFirstMember (CurrErrList); CurrIndex := SeqAlgebra.FirstMember (TheHeap, CurrErrList); loop exit when SeqAlgebra.IsNullMember (CurrIndex); CurrErr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => CurrIndex); LagPrevIndex := SeqAlgebra.BeforeFirstMember (PrevErrList); PrevIndex := SeqAlgebra.FirstMember (TheHeap, PrevErrList); IsACommonError := False; --addition to aak algorithm loop exit when SeqAlgebra.IsNullMember (PrevIndex); PrevErr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => PrevIndex); IsACommonError := ComponentErrors.IsSameError (TheErrorHeap, CurrErr, PrevErr); if IsACommonError then AddPreviousNodesToError (PrevErr, CurrErr); -- maintain set of affected nodes SeqAlgebra.EliminateAfter (TheHeap, LagPrevIndex); exit; end if; LagPrevIndex := PrevIndex; PrevIndex := SeqAlgebra.NextMember (TheHeap, PrevIndex); end loop; --inner if IsACommonError then LagCurrIndex := CurrIndex; CurrIndex := SeqAlgebra.NextMember (TheHeap, CurrIndex); else PassToErrorHandler (CurrErr, Table1); CurrIndex := SeqAlgebra.NextMember (TheHeap, CurrIndex); SeqAlgebra.EliminateAfter (TheHeap, LagCurrIndex); end if; end loop; --outer loop exit when SeqAlgebra.IsEmptySeq (TheHeap, PrevErrList); PrevErr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => SeqAlgebra.FirstMember (TheHeap, PrevErrList)); PassToErrorHandler (PrevErr, Table1); SeqAlgebra.EliminateAfter (TheHeap, SeqAlgebra.BeforeFirstMember (PrevErrList)); end loop; end CompoundErrors; begin -- ProcessRoot CurrentNode := RootNode; loop --down LastNode := CurrentNode; if ComponentManager.IsALeaf (ComponentData, CurrentNode) and then not ComponentManager.IsNullComponent (ComponentManager.GetPreviousSibling (ComponentData, CurrentNode)) then CompoundErrors (CurrentNode, ComponentManager.GetPreviousSibling (ComponentData, CurrentNode)); end if; CurrentNode := ComponentManager.GetFirstChild (ComponentData, CurrentNode); if ComponentManager.IsNullComponent (CurrentNode) then loop --up CurrentNode := ComponentManager.GetNextSibling (ComponentData, LastNode); exit when not ComponentManager.IsNullComponent (CurrentNode); CurrentNode := ComponentManager.GetParent (ComponentData, LastNode); exit when ComponentManager.IsNullComponent (CurrentNode); -- before we pass list of errors to parent node we need to set affected nodes -- of each error to the parent node (replacing any set of affected nodes -- accumulated during sibling merging of errors ListOfErrorsToPassToParent := ComponentManager.GetListOfErrors (ComponentData, LastNode); MemberOfList := SeqAlgebra.FirstMember (TheHeap, ListOfErrorsToPassToParent); while not SeqAlgebra.IsNullMember (MemberOfList) loop ReplaceNodesOfError (CurrentNode, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfList)); MemberOfList := SeqAlgebra.NextMember (TheHeap, MemberOfList); end loop; ComponentManager.AddNewListOfErrors (TheHeap, ComponentData, CurrentNode, ComponentManager.GetListOfErrors (ComponentData, LastNode)); ComponentManager.EmptyListOfErrors (TheHeap, ComponentData, LastNode); if not ComponentManager.IsNullComponent (ComponentManager.GetPreviousSibling (ComponentData, CurrentNode)) then CompoundErrors (CurrentNode, ComponentManager.GetPreviousSibling (ComponentData, CurrentNode)); end if; LastNode := CurrentNode; end loop; --up end if; exit when ComponentManager.IsNullComponent (CurrentNode); end loop; --down ErrList := ComponentManager.GetListOfErrors (ComponentData, LastNode); CurrentMember := SeqAlgebra.FirstMember (TheHeap, ErrList); while not SeqAlgebra.IsNullMember (CurrentMember) loop PassToErrorHandler (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => CurrentMember), AllEntries); CurrentMember := SeqAlgebra.NextMember (TheHeap, CurrentMember); end loop; end ProcessRoot; begin -- MergeAndHandleErrors RootNode := ComponentManager.GetFirstRoot (ComponentData); while not ComponentManager.IsNullComponent (RootNode) loop ProcessRoot (RootNode); RootNode := ComponentManager.GetNextRoot (ComponentData, RootNode); end loop; end MergeAndHandleErrors; spark-2012.0.deb/examiner/statistics.ads0000644000175000017500000000402511753202336017135 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; --# inherit CommandLineData, --# ExaminerConstants, --# SPARK_IO, --# SystemErrors; package Statistics --# own TableUsage : Table_Usage_Array; --# initializes TableUsage; is type Table_Type is (RelationTable, VCGHeap, StringTable, SymbolTable, SyntaxTree, RecordFields, RecordErrors); procedure SetTableUsage (Table : in Table_Type; Size : in Natural); --# global in out TableUsage; --# derives TableUsage from *, --# Size, --# Table; procedure WriteOutput (File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in TableUsage; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# File, --# TableUsage; end Statistics; spark-2012.0.deb/examiner/examinerconstants.aps0000644000175000017500000000765711753202337020543 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package ExaminerConstants is ------------------------------------------------------------------- -- General purpose RefType -- -- This type is used as a general purpose reference in the -- Examiner. It is used in particular, as a general reference -- in an STree Node to refer to other useful info associated -- with that Node. -- -- It is declared here for ease of visibiltiy to other units ------------------------------------------------------------------- type RefType is range 0 .. Natural'Last; --# assert RefType'Base is Integer; ------------------------------------------------------------------- -- Principal table sizes (as reported by Examiner -stat switch) ------------------------------------------------------------------- MaxRecordComponents : constant := 16_000; MaxRecordErrors : constant := 16_000; String_Table_Size : constant := 2_097_152; SyntaxTreeSize : constant := 3_200_000; ContextManagerMaxFiles : constant := 8192; ContextManagerMaxUnits : constant := 8192; FlowAnalysisMaxExpnNumber : constant := 32000; MaxErrorsPerFile : constant := 65535; MaxIndexComponents : constant := 256; MaxIndexNumber : constant := 256; VCGMatrixOrder : constant := 8000; StmtStackSize : constant := 1000; ------------------------------------------------------------------- -- Constants invariant across different sizes ------------------------------------------------------------------- String_Hash_Size : constant := 2**8; -- Must be power of 2 and -- less than String_Table_Size. SPStackSize : constant := 256; SPErrLookahead : constant := 6; SPLocalErrLookahead : constant := 3; Lex_Token_Lists_Max_Length : constant := 16; ListsHeapSize : constant := 10000; MaxFilesOnCommandLine : constant := 100; MaxErrorSetSize : constant := 9; MaxJustificationsPerFile : constant := 300; ErrorBufferSize : constant := 5000; MaxPragmasInWarningFile : constant := 120; FlowAnalysisMaxVarNumber : constant := 10_000; StackManagerStackSize : constant := 1000; SimpleListsSize : constant := 10000; WalkExpStackMax : constant := 1000; AggregateStackMax : constant := 20; WalkStmtStackMax : constant := 100; MaxProcCallParams : constant := 1000; WfCompilationUnitStackMax : constant := 100; CompleteCheckSize : constant := 32000; WalkAnnotationExpressionStackMax : constant := 1000; -- Address_Size is determined by GNATPREP definition file. -- Needs to be a typed static constant, so we can -- use it in a type declaration. Address_Size : constant Positive := $Address_Size; end ExaminerConstants; spark-2012.0.deb/examiner/main.idx0000644000175000017500000001023611753202337015706 0ustar eugeneugensem auxindex is in sem.idx flowanalyser auxindex is in flowanalyser.idx vcg auxindex is in vcg.idx declarations auxindex is in declarations.idx errorhandler auxindex is in errorhandler.idx dag auxindex is in dag.idx indexmanager auxindex is in indexmanager.idx sli auxindex is in sli.idx adjustfdl_rws specification is in adjustfdl_rws.ads cell_storage specification is in cell_storage.ads cells specification is in cells.ads cells.utility specification is in cells-utility.ads cells.utility.list specification is in cells-utility-list.ads casing specification is in casing.ads clists specification is in clists.ads commandline specification is in commandline.ads commandlinedata specification is in commandlinedata.ads commandlinehandler specification is in commandlinehandler.ads completecheck specification is in completecheck.ads componenterrors specification is in componenterrors.ads componentmanager specification is in componentmanager.ads configfile specification is in configfile.ads contextmanager specification is in contextmanager.ads contextmanager.ops specification is in contextmanager-ops.ads cstacks specification is in cstacks.ads debug specification is in debug.ads dictionary specification is in dictionary.ads dictionary body is in dictionary.adb error_types specification is in error_types.ads examinerconstants specification is in examinerconstants.ads e_strings specification is in e_strings.ads filesystem specification is in filesystem.ads file_utils specification is in file_utils.ads spark_sha specification is in spark_sha.ads spark_xml specification is in spark_xml.ads graph specification is in graph.ads heap specification is in heap.ads heap_storage specification is in heap_storage.ads dag_io specification is in dag_io.ads fd specification is in fd.ads labels specification is in labels.ads lextokenlists specification is in lextokenlists.ads lextokenmanager specification is in lextokenmanager.ads lextokenmanager body is in lextokenmanager.adb lextokenmanager.relation_algebra specification is in lextokenmanager-relation_algebra.ads lextokenmanager.relation_algebra.string specification is in lextokenmanager-relation_algebra-string.ads lextokenmanager.seq_algebra specification is in lextokenmanager-seq_algebra.ads lextokenstacks specification is in lextokenstacks.ads lists specification is in lists.ads mainloop specification is in mainloop.ads maths specification is in maths.ads maths body is in maths.adb metafile specification is in metafile.ads pairs specification is in pairs.ads pile specification is in pile.ads reflist specification is in reflist.ads relationalgebra specification is in relationalgebra.ads relationalgebra.debug specification is in relationalgebra-debug.ads requiredunits specification is in requiredunits.ads screenecho specification is in screenecho.ads seqalgebra specification is in seqalgebra.ads simplelists specification is in simplelists.ads spark_io specification is in spark_io.ads sparkhtml specification is in sparkhtml.ads sparklex specification is in sparklex.ads sparklex body is in sparklex.adb sparklex.lex subunit is in sparklex-lex.adb SP_Expected_Symbols specification is in sp_expected_symbols.ads spparser specification is in spparser.ads SP_Parser_Actions specification is in sp_parser_actions.ads SP_Parser_Goto specification is in sp_parser_goto.ads SP_Productions specification is in sp_productions.ads SP_Relations Specification is in sp_relations.ads sprint specification is in sprint.ads SP_Symbols specification is in sp_symbols.ads statistics specification is in statistics.ads stmtstack specification is in stmtstack.ads structures specification is in structures.ads stree specification is in stree.ads stree body is in stree.adb Symbol_Set specification is in symbol_set.ads systemerrors specification is in systemerrors.ads error_io specification is in error_io.ads justification_io specification is in justification_io.ads system specification is in system_.ada calendar specification is in calendar_.ada xmlreport specification is in xmlreport.ads xmlreport body is in xmlreport.adb version specification is in ../common/versioning/version.ads Date_Time specification is in date_time.ads spark-2012.0.deb/examiner/sem-compunit-wf_generic_package_instantiation.adb0000644000175000017500000000324211753202336026067 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Generic_Package_Instantiation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is pragma Unreferenced (Scope); begin ErrorHandler.Semantic_Error (Err_Num => 999, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); --# accept Flow, 30, Scope, "Procedure body not implemented" & --# Flow, 50, SPARK_IO.File_Sys, Scope, "Procedure body not implemented" & --# Flow, 50, ErrorHandler.Error_Context, Scope, "Procedure body not implemented"; end Wf_Generic_Package_Instantiation; -- Scope not used for initial phase spark-2012.0.deb/examiner/errorhandler.adb0000644000175000017500000074751111753202336017427 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Latin_1; with CommandLineData; with Debug; with FileSystem; with ScreenEcho; with SPARK_XML; with SP_Relations; with SystemErrors; with XMLReport; use type CommandLineData.Justification_Options; package body ErrorHandler --# own Error_Context is Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# File_Open_Error, --# Potential_Invalid_Value, --# Stop_SLI, --# Total_Error_Count, --# WarningStatus.SuppressionList; is ---------------------------------------------------------------------- -- Centralized location for wrapper strings used when appending error explanations -- to error message strings. These are used by Conversions and in calls to ErrorAccumulator.Start_Msg Explanation_Prefix : constant String := " [Explanatory note: "; Explanation_Postfix : constant String := "]"; ---------------------------------------------------------------------- --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# Error_IO, --# Error_Types, --# ExaminerConstants, --# E_Strings, --# LexTokenManager, --# SPARK_IO, --# SP_Symbols, --# SystemErrors; package Conversions --# own State; --# initializes State; is procedure ToString (Err_Num : in Error_Types.NumericError; Purpose : in Error_Types.ConversionRequestSource; Err_Str : out Error_Types.StringError); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out State; --# derives Err_Str from CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# Purpose, --# State & --# State from *, --# CommandLineData.Content, --# Err_Num, --# Purpose; procedure Output_Reference_List (To_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# State, --# To_File; end Conversions; ---------------------------------------------------------------------- --# inherit CommandLineData, --# Conversions, --# Dictionary, --# ErrorHandler, --# Error_IO, --# Error_Types, --# ExaminerConstants, --# E_Strings, --# LexTokenManager, --# SPARK_IO, --# SP_Symbols, --# SystemErrors; package ErrorBuffer --# own Buffer; --# initializes Buffer; is -- this is a generic add routines called by more closely focussed procedure procedure Add (Err_File : in out Error_IO.File_Type; Err_Type : in Error_Types.Error_Class; Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; Error_Number : in Natural; Reference : in Natural; Name1, Name2, Name3 : in Error_Types.Names; Echo_Str : out Error_Types.StringError); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Buffer; --# in out Conversions.State; --# in out SPARK_IO.File_Sys; --# derives Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Error_Number, --# Err_Type, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope & --# Conversions.State from *, --# Buffer, --# CommandLineData.Content, --# Error_Number, --# Err_Type, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope & --# Echo_Str from Buffer, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Number, --# Err_Type, --# LexTokenManager.State, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope & --# Err_File from *, --# Buffer, --# CommandLineData.Content, --# Error_Number, --# Err_Type, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Buffer, --# CommandLineData.Content, --# Dictionary.Dict, --# Error_Number, --# Err_File, --# Err_Type, --# Name1, --# Name2, --# Name3, --# Pos, --# Reference, --# Scope; procedure Flush (Err_File : in out Error_IO.File_Type); --# global in Dictionary.Dict; --# in out Buffer; --# in out SPARK_IO.File_Sys; --# derives Buffer from * & --# Err_File from *, --# Buffer, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Buffer, --# Dictionary.Dict, --# Err_File; end ErrorBuffer; ---------------------------------------------------------------------- -- This package provides a list of justification (accept) annotations found in the -- code. When errors are added, the list can be consulted to see whether the error -- should be displayed or not. The table is populated for the file currently being examined, -- when the error context changes, the table contents are saved to a temorary file (or memory-based -- simulation of such a file) so that we can list all the justifications in the report and listing files. --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# SPARK_IO, --# SPARK_XML, --# SystemErrors, --# XMLReport; package Justifications is type Unmatched_Justification_Iterator is private; procedure Start_Unit (Which_Table : in out ErrorHandler.Justifications_Data_Tables); --# derives Which_Table from *; procedure Set_Current_Unit_Has_Semantic_Errors (Which_Table : in out ErrorHandler.Justifications_Data_Tables); --# derives Which_Table from *; procedure First_Unmatched_Justification (It : out Unmatched_Justification_Iterator; Which_Table : in ErrorHandler.Justifications_Data_Tables); --# global in CommandLineData.Content; --# derives It from CommandLineData.Content, --# Which_Table; procedure Next_Unmatched_Justification (It : in out Unmatched_Justification_Iterator; Which_Table : in ErrorHandler.Justifications_Data_Tables); --# global in CommandLineData.Content; --# derives It from *, --# CommandLineData.Content, --# Which_Table; function Error_Position (It : Unmatched_Justification_Iterator) return LexTokenManager.Token_Position; function Is_Null_Iterator (It : Unmatched_Justification_Iterator) return Boolean; procedure End_Unit (Which_Table : in out ErrorHandler.Justifications_Data_Tables); --# derives Which_Table from *; procedure Start_Justification (Which_Table : in out ErrorHandler.Justifications_Data_Tables; Position : in LexTokenManager.Token_Position; Line : in LexTokenManager.Line_Numbers; Kind : in ErrorHandler.Justification_Kinds; Err_Num : in Natural; Identifiers : in ErrorHandler.Justification_Identifiers; Applies_To_All : in Boolean; Explanation : in E_Strings.T; Maximum_Justifications_Reached : out Boolean); --# derives Maximum_Justifications_Reached from Which_Table & --# Which_Table from *, --# Applies_To_All, --# Err_Num, --# Explanation, --# Identifiers, --# Kind, --# Line, --# Position; procedure End_Justification (Which_Table : in out ErrorHandler.Justifications_Data_Tables; Line : in LexTokenManager.Line_Numbers; Unmatched_End : out Boolean); --# derives Unmatched_End, --# Which_Table from Line, --# Which_Table; procedure Check_Whether_Justified (Which_Table : in out ErrorHandler.Justifications_Data_Tables; Line : in LexTokenManager.Token_Position; Kind : in ErrorHandler.Justification_Kinds; Err_Num : in Natural; Identifiers : in ErrorHandler.Justification_Identifiers; Match_Found : out Boolean); --# global in CommandLineData.Content; --# in LexTokenManager.State; --# derives Match_Found, --# Which_Table from CommandLineData.Content, --# Err_Num, --# Identifiers, --# Kind, --# LexTokenManager.State, --# Line, --# Which_Table; procedure Print_Justifications (Which_Table : in ErrorHandler.Justifications_Data_Tables; File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# File, --# Which_Table; procedure Print_Justifications_XML (Which_Table : in ErrorHandler.Justifications_Data_Tables; File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# File, --# Which_Table, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# Which_Table; private type Unmatched_Justification_Iterator is record Current_Table_Entry : ErrorHandler.Data_Table_Ptr; Current_Position : LexTokenManager.Token_Position; end record; end Justifications; ---------------------------------------------------------------------- --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# CommandLineData, --# CommandLineHandler, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# LexTokenManager, --# ScreenEcho, --# SPARK_IO, --# SystemErrors, --# XMLReport; package WarningStatus --# own SuppressionList; --# initializes SuppressionList; is procedure ReadWarningFile; --# global in CommandLineData.Content; --# in out ErrorHandler.File_Open_Error; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out SuppressionList; --# derives ErrorHandler.File_Open_Error from *, --# CommandLineData.Content, --# SPARK_IO.File_Sys & --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# SuppressionList from CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# SuppressionList; function Is_Suppressed (The_Element : ErrorHandler.Warning_Elements) return Boolean; --# global in SuppressionList; function Pragma_Is_Suppressed (Pragma_Name : LexTokenManager.Lex_String) return Boolean; --# global in LexTokenManager.State; --# in SuppressionList; procedure Output_Warning_List (To_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in SuppressionList; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# SuppressionList, --# To_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# SuppressionList; procedure Report_Suppressed_Warnings (To_File : in SPARK_IO.File_Type; Counter : in ErrorHandler.Counters); --# global in SuppressionList; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Counter, --# SuppressionList, --# To_File; end WarningStatus; ---------------------------------------------------------------------- Stmt_Width : constant Positive := 4; Source_Line_Indent : constant Positive := 6; Error_Line_Length : constant Positive := 80; XML_Error_Line_Length : constant Positive := 256; subtype Error_Set_Lengths is Natural range 0 .. ExaminerConstants.MaxErrorSetSize; subtype Error_Set_Positions is Natural range 1 .. ExaminerConstants.MaxErrorSetSize; type Error_Struct is record Err_Num : Natural; Error : Error_Types.StringError; end record; type Error_Array is array (Error_Set_Positions) of Error_Struct; type Error_Sets is record Length : Error_Set_Lengths; Content : Error_Array; end record; Empty_Error_Struct : constant Error_Struct := Error_Struct'(0, Error_Types.Empty_StringError); Empty_Error_Array : constant Error_Array := Error_Array'(Error_Set_Positions => Empty_Error_Struct); Empty_Error_Set : constant Error_Sets := Error_Sets'(0, Empty_Error_Array); ---------------------------------------------------------------------- --# inherit CommandLineData, --# ErrorHandler, --# Error_Types, --# ExaminerConstants, --# E_Strings, --# SPARK_IO, --# SystemErrors, --# XMLReport; package ErrorAccumulator is type T is private; Clear : constant T; function Is_Error_Continuation (The_Error : Error_Types.StringError) return Boolean; function Is_Error_Start (The_Error : Error_Types.StringError) return Boolean; function Is_Active (This : T) return Boolean; procedure Start_Msg (This : out T; Start_Error : in ErrorHandler.Error_Struct; Start_Indent : in Natural; Explanation : in E_Strings.T; Line_Length : in Natural; Indent : in Natural); --# derives This from Explanation, --# Indent, --# Line_Length, --# Start_Error, --# Start_Indent; procedure Flush (This : in out T; Listing : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Listing, --# This & --# This from *; procedure Add (This : in out T; Error : in ErrorHandler.Error_Struct; End_Pos, Indent : in Natural; Listing : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# End_Pos, --# Error, --# Indent, --# Listing, --# This & --# This from *, --# End_Pos, --# Error, --# Indent; --# private type T is record Active : Boolean; Start_Error : ErrorHandler.Error_Struct; Start_Indent : Natural; -- following fields only concerned with appending explanation to message after Flush operation Explanation : E_Strings.T; Line_Length : Natural; Indent : Natural; end record; Clear : constant T := T' (Active => False, Start_Error => ErrorHandler.Empty_Error_Struct, Start_Indent => 0, Explanation => E_Strings.Empty_String, Line_Length => 0, Indent => 0); end ErrorAccumulator; ---------------------------------------------------------------------- Error_Context_Rec : Error_Contexts; Echo_Accumulator : ErrorAccumulator.T; Total_Error_Count : Total_Error_Counts; File_Open_Error : Boolean := False; Stop_SLI : Boolean := False; Potential_Invalid_Value : Boolean := False; ---------------------------------------------------------------------- function Dependency_Err_Number (Err_Type : Full_Depend_Err_Type) return Natural is Result : Natural; begin case Err_Type is -- Unconditional dependency errors when Not_Used | Not_Used_New | Not_Used_Continue | Ineff_Init | Ineff_Local_Init | Policy_Violation => Result := Dependency_Err_Type'Pos (Err_Type) + Error_Types.UncondDependencyErrorOffset; -- Conditional dependency errors when May_Be_Used | May_Be_Used_New | May_Be_Used_Continue | Uninitialised | Integrity_Violation | May_Be_Integrity_Violation => Result := Dependency_Err_Type'Pos (Err_Type) + Error_Types.CondDependencyErrorOffset; end case; return Result; end Dependency_Err_Number; ---------------------------------------------------------------------- package body Conversions is separate; ---------------------------------------------------------------------- package body ErrorBuffer is separate; ---------------------------------------------------------------------- package body WarningStatus is separate; ---------------------------------------------------------------------- package body Justifications is separate; ---------------------------------------------------------------------- function Symbol_To_Justification_Identifier (Sym : Dictionary.Symbol) return Justification_Identifier is begin return Justification_Identifier'(String_Form => LexTokenManager.Null_String, Symbol_Form => Sym); end Symbol_To_Justification_Identifier; function Lex_Str_To_Justification_Identifier (Str : LexTokenManager.Lex_String) return Justification_Identifier is begin return Justification_Identifier'(String_Form => Str, Symbol_Form => Dictionary.NullSymbol); end Lex_Str_To_Justification_Identifier; procedure Read_Warning_File --# global in CommandLineData.Content; --# in out File_Open_Error; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out WarningStatus.SuppressionList; --# derives File_Open_Error from *, --# CommandLineData.Content, --# SPARK_IO.File_Sys & --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# WarningStatus.SuppressionList from CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# WarningStatus.SuppressionList; is begin WarningStatus.ReadWarningFile; end Read_Warning_File; procedure Output_Warning_List (To_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# To_File, --# WarningStatus.SuppressionList, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# WarningStatus.SuppressionList; is begin WarningStatus.Output_Warning_List (To_File => To_File); end Output_Warning_List; ---------------------------------------------------------------------- procedure Output_Reference_List (To_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in Conversions.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Conversions.State, --# To_File; is begin Conversions.Output_Reference_List (To_File => To_File); end Output_Reference_List; -------------------------------------------------------------------------- function Lex_String_To_Name (Str : LexTokenManager.Lex_String) return Error_Types.Names --# global in LexTokenManager.State; is Result : Error_Types.Names; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Result := Error_Types.NoName; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Main_Program_Token) = LexTokenManager.Str_Eq then Result := Error_Types.ThePartitionName; else Result := Error_Types.Names'(Name_Sort => Error_Types.LexString, Name_Sym => Dictionary.NullSymbol, Name_Str => Str, Pos => 0); end if; return Result; end Lex_String_To_Name; -------------------------------------------------------------------------- function Symbol_To_Name (Sym : Dictionary.Symbol) return Error_Types.Names is Result : Error_Types.Names; begin if Dictionary.Is_Null_Symbol (Sym) then Result := Error_Types.NoName; else Result := Error_Types.Names'(Name_Sort => Error_Types.Symbol, Name_Sym => Sym, Name_Str => LexTokenManager.Null_String, Pos => 0); end if; return Result; end Symbol_To_Name; -------------------------------------------------------------------------- function SP_Symbol_To_Name (Sym : SP_Symbols.SP_Symbol) return Error_Types.Names is begin return Error_Types.Names' (Name_Sort => Error_Types.ParserSymbol, Name_Sym => Dictionary.NullSymbol, Name_Str => LexTokenManager.Null_String, Pos => SP_Symbols.SP_Symbol'Pos (Sym)); end SP_Symbol_To_Name; -------------------------------------------------------------------------- procedure Append_String (E_Str : in out Error_Types.StringError; Str : in String) --# derives E_Str from *, --# Str; is pragma Inline (Append_String); begin E_Strings.Append_String (E_Str => E_Str.Message, Str => Str); end Append_String; ------------------------------------------------------------------------ procedure AppendSym (Error : in out Error_Types.StringError; Sym : in SP_Symbols.SP_Symbol) --# derives Error from *, --# Sym; is separate; ---------------------------------------------------------------------- procedure Append_Lex_String (E_Str : in out Error_Types.StringError; L_Str : in LexTokenManager.Lex_String) --# global in LexTokenManager.State; --# derives E_Str from *, --# LexTokenManager.State, --# L_Str; is pragma Inline (Append_Lex_String); begin E_Strings.Append_Examiner_String (E_Str1 => E_Str.Message, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => L_Str)); end Append_Lex_String; pragma Unreferenced (Append_Lex_String); -- unused at present ---------------------------------------------------------------------- procedure Set_Col (File : in SPARK_IO.File_Type; Posn : in Positive) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Posn; is pragma Inline (Set_Col); begin if File = SPARK_IO.Standard_Output then ScreenEcho.Set_Col (Posn); else SPARK_IO.Set_Col (File, Posn); end if; end Set_Col; procedure Put_Char (File : in SPARK_IO.File_Type; Item : in Character) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Item; is pragma Inline (Put_Char); begin if File = SPARK_IO.Standard_Output then ScreenEcho.Put_Char (Item); else SPARK_IO.Put_Char (File => File, Item => Item); end if; end Put_Char; procedure Put_Integer (File : in SPARK_IO.File_Type; Item : in Integer; Width : in Natural; Base : in SPARK_IO.Number_Base) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Base, --# File, --# Item, --# Width; is pragma Inline (Put_Integer); begin if File = SPARK_IO.Standard_Output then ScreenEcho.Put_Integer (Item, Width, Base); else SPARK_IO.Put_Integer (File => File, Item => Item, Width => Width, Base => Base); end if; end Put_Integer; procedure New_Line (File : in SPARK_IO.File_Type; Spacing : in Positive) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Spacing; is pragma Inline (New_Line); begin if File = SPARK_IO.Standard_Output then ScreenEcho.New_Line (Spacing); else SPARK_IO.New_Line (File => File, Spacing => Spacing); end if; end New_Line; procedure Put_Line (File : in SPARK_IO.File_Type; Item : in String; Stop : in Natural) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Item, --# Stop; is pragma Inline (Put_Line); begin if File = SPARK_IO.Standard_Output then ScreenEcho.Put_Line (Item); else SPARK_IO.Put_Line (File => File, Item => Item, Stop => Stop); end if; end Put_Line; procedure Put_E_String (File : in SPARK_IO.File_Type; E_Str : in E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E_Str, --# File; is pragma Inline (Put_E_String); begin if File = SPARK_IO.Standard_Output then ScreenEcho.Put_ExaminerString (E_Str); else E_Strings.Put_String (File => File, E_Str => E_Str); end if; end Put_E_String; pragma Unreferenced (Put_E_String); -- unused at present procedure Put_E_Line (File : in SPARK_IO.File_Type; E_Str : in E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E_Str, --# File; is pragma Inline (Put_E_Line); begin if File = SPARK_IO.Standard_Output then ScreenEcho.Put_ExaminerLine (E_Str); else E_Strings.Put_Line (File => File, E_Str => E_Str); end if; end Put_E_Line; ---------------------------------------------------------------------- procedure Put_Spaces (File : in SPARK_IO.File_Type; N : in Natural) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# N; is begin for I in Natural range 1 .. N loop Put_Char (File => File, Item => ' '); end loop; end Put_Spaces; ---------------------------------------------------------------------- procedure PrintLine (Listing : in SPARK_IO.File_Type; Start_Pos, End_Pos, Indent : in Natural; Line : in E_Strings.T; Add_New_Line : in Boolean; New_Start : out Natural) --# global in out SPARK_IO.File_Sys; --# derives New_Start from End_Pos, --# Indent, --# Line, --# Start_Pos & --# SPARK_IO.File_Sys from *, --# Add_New_Line, --# End_Pos, --# Indent, --# Line, --# Listing, --# Start_Pos; is separate; ---------------------------------------------------------------------------- package body ErrorAccumulator is separate; ----------------------------------------------------------------------------- -- String utility used in calls to ErrorAccumulator.Start_Msg to remove explanations from -- strings being stored in the accumulator so that they can be kept and -- printed when the accumulator is flushed -- e.g. if "rage, rage against the dying of the light [Explanatory note: by Dylan Thomas]" is passed -- in in Estr and ErrorHandlerError.Prefix is set to " [Explanatory note: " then after the call, -- Estr has "rage, rage against the dying of the light" and -- Explanation has " [Explanatory note: by Dylan Thomas]" -- If there is no match for ErrorHandlerError.Prefix then E_Str is unchanged and Explanation -- is the EmptyString; procedure Split_String_At_Explanation (E_Str : in out E_Strings.T; Explanation : out E_Strings.T) --# derives Explanation, --# E_Str from E_Str; is Start_Point : Natural; Found_It : Boolean; begin E_Strings.Find_Sub_String (E_Str => E_Str, Search_String => Explanation_Prefix, String_Found => Found_It, String_Start => Start_Point); if Found_It then Explanation := E_Strings.Section (E_Str => E_Str, Start_Pos => Start_Point, Length => (E_Strings.Get_Length (E_Str => E_Str) - Start_Point) + 1); -- truncate E_Str to remove Explanation E_Str := E_Strings.Section (E_Str => E_Str, Start_Pos => 1, Length => Start_Point - 1); else Explanation := E_Strings.Empty_String; end if; end Split_String_At_Explanation; -- one DF error reported on Explanation.Content ----------------------------------------------------------------------------- procedure Flush_Buffer --# global in Dictionary.Dict; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives ErrorBuffer.Buffer from * & --# Error_Context_Rec from *, --# ErrorBuffer.Buffer, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec; is Err_File : Error_IO.File_Type; begin Err_File := Error_Context_Rec.Errs; ErrorBuffer.Flush (Err_File => Err_File); Error_Context_Rec.Errs := Err_File; end Flush_Buffer; ---------------------------------------------------------------------- procedure Inc_Total_Justified_Warnings --# global in out Total_Error_Count; --# derives Total_Error_Count from *; is begin if Total_Error_Count.Justified_Warnings = Count'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Errors, Msg => ""); end if; Total_Error_Count.Justified_Warnings := Total_Error_Count.Justified_Warnings + 1; end Inc_Total_Justified_Warnings; ---------------------------------------------------------------------- procedure Echo_Total_Error_Count --# global in CommandLineData.Content; --# in Total_Error_Count; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Total_Error_Count; is procedure Indent --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *; is begin ScreenEcho.Put_String (" "); end Indent; procedure End_Line (Is_Plural : in Boolean) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Is_Plural; is begin if Is_Plural then ScreenEcho.Put_Line ("s"); else ScreenEcho.New_Line (1); end if; end End_Line; begin -- Echo_Total_Error_Count if CommandLineData.Content.Echo and not CommandLineData.Content.Makefile_Mode then ScreenEcho.New_Line (1); -- Explicit and summarized warnings are handled separately -- First explicit counts if Total_Error_Count.Grand_Total = 0 then ScreenEcho.Put_Line (" No errors or warnings"); else ScreenEcho.Put_Integer (Integer (Total_Error_Count.Grand_Total), 5, 10); if Total_Error_Count.Grand_Total > 1 then ScreenEcho.Put_Line (" errors or warnings, comprising:"); else ScreenEcho.Put_Line (" error or warning, comprising:"); end if; -- List subtotals by category if Total_Error_Count.Explicit_Error_Count (Syntax_Or_Semantic) > 0 then Indent; ScreenEcho.Put_Integer (Integer (Total_Error_Count.Explicit_Error_Count (Syntax_Or_Semantic)), 5, 10); ScreenEcho.Put_String (" syntax or semantic error"); End_Line (Is_Plural => Total_Error_Count.Explicit_Error_Count (Syntax_Or_Semantic) > 1); end if; if Total_Error_Count.Explicit_Error_Count (Flow) > 0 then Indent; ScreenEcho.Put_Integer (Integer (Total_Error_Count.Explicit_Error_Count (Flow)), 5, 10); ScreenEcho.Put_String (" flow error"); End_Line (Is_Plural => Total_Error_Count.Explicit_Error_Count (Flow) > 1); end if; if Total_Error_Count.Explicit_Error_Count (Warning) > 0 then Indent; ScreenEcho.Put_Integer (Integer (Total_Error_Count.Explicit_Error_Count (Warning)), 5, 10); ScreenEcho.Put_String (" warning"); End_Line (Is_Plural => Total_Error_Count.Explicit_Error_Count (Warning) > 1); end if; end if; -- Then append summary of suppressed warnings (say nothing at all if total is 0) if Total_Error_Count.Suppressed_Warnings > 0 then ScreenEcho.Put_Integer (Integer (Total_Error_Count.Suppressed_Warnings), 5, 10); ScreenEcho.Put_String (" summarized warning"); End_Line (Is_Plural => Total_Error_Count.Suppressed_Warnings > 1); end if; -- Then append summary of justified warnings (say nothing at all if total is 0) if Total_Error_Count.Justified_Warnings > 0 then ScreenEcho.Put_Integer (Integer (Total_Error_Count.Justified_Warnings), 5, 10); ScreenEcho.Put_String (" expected (justified) warning"); End_Line (Is_Plural => Total_Error_Count.Justified_Warnings > 1); end if; end if; end Echo_Total_Error_Count; ---------------------------------------------------------------------- procedure Inc_Message_Count (Err_Type : in Error_Types.Error_Class) --# global in out Error_Context_Rec; --# in out Total_Error_Count; --# derives Error_Context_Rec, --# Total_Error_Count from *, --# Err_Type; is procedure Inc_Total_Errors_Or_Warnings (Kind : in Counted_Error_Kinds) --# global in out Total_Error_Count; --# derives Total_Error_Count from *, --# Kind; is begin if Total_Error_Count.Grand_Total = Count'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Errors, Msg => ""); end if; Total_Error_Count.Grand_Total := Total_Error_Count.Grand_Total + 1; -- We don't need to guard the following increment because a subtotal can't -- overflow without the grand total (tested above) overflowing first Total_Error_Count.Explicit_Error_Count (Kind) := Total_Error_Count.Explicit_Error_Count (Kind) + 1; end Inc_Total_Errors_Or_Warnings; begin if Error_Context_Rec.Num_Message = Count'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Errors, Msg => ""); end if; Error_Context_Rec.Num_Message := Error_Context_Rec.Num_Message + 1; if Err_Type /= Error_Types.NoErr then if Error_Context_Rec.Num_Errs = Count'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Errors, Msg => ""); end if; Error_Context_Rec.Num_Errs := Error_Context_Rec.Num_Errs + 1; end if; -- Above lines maintain counts of errors per unit in the current ErrorContext record. -- We also need to maintain grand totals (in various subcategories) thus: case Err_Type is when Error_Types.LexErr | Error_Types.SyntaxErr | Error_Types.SyntaxRec | Error_Types.SemanticErr => Inc_Total_Errors_Or_Warnings (Kind => Syntax_Or_Semantic); when Error_Types.UncondFlowErr | Error_Types.CondlFlowErr | Error_Types.UncondDependencyErr | Error_Types.CondlDependencyErr | Error_Types.DepSemanticErr | -- refinement inconsistency, treat as a flow error Error_Types.ControlFlowErr | Error_Types.IneffectiveStat | Error_Types.StabilityErr | Error_Types.UsageErr => Inc_Total_Errors_Or_Warnings (Kind => Flow); when Error_Types.WarningWithPosition | Error_Types.WarningWithoutPosition | Error_Types.Note => Inc_Total_Errors_Or_Warnings (Kind => Warning); when Error_Types.NoErr => null; end case; end Inc_Message_Count; --------------------------------------------------------------------------- procedure Inc_Suppressed_Warning_Counter (Warning_Type : in Warning_Elements) --# global in out Error_Context_Rec; --# in out Total_Error_Count; --# derives Error_Context_Rec from *, --# Warning_Type & --# Total_Error_Count from *; is procedure Inc_Total_Suppressed_Warnings --# global in out Total_Error_Count; --# derives Total_Error_Count from *; is begin if Total_Error_Count.Suppressed_Warnings = Count'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Errors, Msg => ""); end if; Total_Error_Count.Suppressed_Warnings := Total_Error_Count.Suppressed_Warnings + 1; end Inc_Total_Suppressed_Warnings; begin if Error_Context_Rec.Counter (Warning_Type) = Count'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Suppressed_Warnings, Msg => ""); end if; Error_Context_Rec.Counter (Warning_Type) := Error_Context_Rec.Counter (Warning_Type) + 1; Inc_Total_Suppressed_Warnings; -- above is per file, per kind count; this one is grand total end Inc_Suppressed_Warning_Counter; ---------------------------------------------------------------------- procedure Move_To_Indent (Source_File : in SPARK_IO.File_Type; Line : in E_Strings.T; Indent : in Natural; Position : in Integer) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Indent, --# Line, --# Position, --# Source_File; is Limit : Integer; begin Put_Spaces (File => Source_File, N => Indent); if Position > E_Strings.Get_Length (E_Str => Line) then Limit := E_Strings.Get_Length (E_Str => Line); else Limit := Position; end if; for I in Natural range 1 .. Limit loop if E_Strings.Get_Element (E_Str => Line, Pos => I) = Ada.Characters.Latin_1.HT then Put_Char (File => Source_File, Item => Ada.Characters.Latin_1.HT); else Put_Char (File => Source_File, Item => ' '); end if; end loop; for i in Natural range 1 .. Position - Limit loop Put_Char (File => Source_File, Item => ' '); end loop; end Move_To_Indent; ---------------------------------------------------------------------- procedure GetFileLine --# global in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Error_Context_Rec, --# SPARK_IO.File_Sys from Error_Context_Rec, --# SPARK_IO.File_Sys; is separate; ---------------------------------------------------------------------- procedure Print_Source_Line (To_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Error_Context_Rec, --# To_File; is begin if not CommandLineData.Content.Plain_Output then Put_Integer (File => To_File, Item => Integer (Error_Context_Rec.Line_No), Width => Stmt_Width, Base => 10); end if; Set_Col (File => To_File, Posn => Source_Line_Indent + 1); Put_E_Line (File => To_File, E_Str => Error_Context_Rec.Current_Line); end Print_Source_Line; ---------------------------------------------------------------------- function Error_Has_Position_Inline (Err_Type : in Error_Types.Error_Class) return Boolean is begin return not (Err_Type = Error_Types.UncondDependencyErr or Err_Type = Error_Types.NoErr or Err_Type = Error_Types.CondlDependencyErr or Err_Type = Error_Types.DepSemanticErr or Err_Type = Error_Types.UsageErr or Err_Type = Error_Types.Note or Err_Type = Error_Types.WarningWithoutPosition); end Error_Has_Position_Inline; ---------------------------------------------------------------------- function Convert_Message_Id (Message_Id : Natural; Err_Type : Error_Types.Error_Class) return Natural is Id : Natural; begin case Err_Type is when Error_Types.CondlDependencyErr => if Message_Id = Dependency_Err_Number (May_Be_Used_New) then Id := Dependency_Err_Number (May_Be_Used); else Id := Message_Id; end if; when Error_Types.UncondDependencyErr => if Message_Id = Dependency_Err_Number (Not_Used_New) then Id := Dependency_Err_Number (Not_Used); else Id := Message_Id; end if; when others => Id := Message_Id; end case; return Id; end Convert_Message_Id; procedure Put_Message_Id (File : in SPARK_IO.File_Type; Message_Id : in Natural; Err_Type : in Error_Types.Error_Class) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Err_Type, --# File, --# Message_Id; is begin if Message_Id = 0 then SPARK_IO.Put_Char (File => File, Item => ' '); else SPARK_IO.Put_Integer (File => File, Item => Convert_Message_Id (Message_Id => Message_Id, Err_Type => Err_Type), Width => 3, Base => 10); if CommandLineData.Content.Brief then SPARK_IO.Put_String (File => File, Item => " - ", Stop => 0); else SPARK_IO.Put_String (File => File, Item => ": ", Stop => 0); end if; end if; end Put_Message_Id; function Get_Error_Class (Err_Class : in Error_Types.Error_Class) return E_Strings.T is Tmp_String : E_Strings.T; begin case Err_Class is when Error_Types.LexErr => Tmp_String := E_Strings.Copy_String (Str => "Lexical Error"); when Error_Types.SyntaxErr => Tmp_String := E_Strings.Copy_String (Str => "Syntax Error"); when Error_Types.SyntaxRec => Tmp_String := E_Strings.Copy_String (Str => "Syntax Recovery"); when Error_Types.SemanticErr => Tmp_String := E_Strings.Copy_String (Str => "Semantic Error"); when Error_Types.ControlFlowErr => Tmp_String := E_Strings.Copy_String (Str => "Illegal Structure"); when Error_Types.UncondFlowErr | Error_Types.UncondDependencyErr | Error_Types.IneffectiveStat | Error_Types.StabilityErr | Error_Types.UsageErr | Error_Types.DepSemanticErr => Tmp_String := E_Strings.Copy_String (Str => "Flow Error"); when Error_Types.CondlFlowErr | Error_Types.CondlDependencyErr | Error_Types.WarningWithPosition | Error_Types.WarningWithoutPosition => Tmp_String := E_Strings.Copy_String (Str => "Warning"); when Error_Types.Note => Tmp_String := E_Strings.Copy_String (Str => "Note"); when Error_Types.NoErr => Tmp_String := E_Strings.Copy_String (Str => "No Error"); end case; return Tmp_String; end Get_Error_Class; procedure Output_Error_Marker (File : in SPARK_IO.File_Type; Err_Type : in Error_Types.Error_Class; Message_Id : in Natural; Err_Count : in Natural) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Err_Count, --# Err_Type, --# File, --# Message_Id; is procedure Output_Full_Error_Name (File : in SPARK_IO.File_Type; Err_Type : in Error_Types.Error_Class) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Err_Type, --# File; is begin case Err_Type is when Error_Types.LexErr => SPARK_IO.Put_String (File => File, Item => "Lexical Error :", Stop => 0); when Error_Types.SyntaxErr => SPARK_IO.Put_String (File => File, Item => "Syntax Error :", Stop => 0); when Error_Types.SyntaxRec => SPARK_IO.Put_String (File => File, Item => "Syntax Recovery :", Stop => 0); when Error_Types.SemanticErr => SPARK_IO.Put_String (File => File, Item => "Semantic Error :", Stop => 0); when Error_Types.ControlFlowErr => SPARK_IO.Put_String (File => File, Item => "Illegal Structure :", Stop => 0); when Error_Types.CondlFlowErr | Error_Types.CondlDependencyErr | Error_Types.UncondFlowErr | Error_Types.UncondDependencyErr | Error_Types.IneffectiveStat | Error_Types.StabilityErr | Error_Types.UsageErr | Error_Types.DepSemanticErr => SPARK_IO.Put_String (File => File, Item => "Flow Error :", Stop => 0); when Error_Types.WarningWithPosition | Error_Types.WarningWithoutPosition => SPARK_IO.Put_String (File => File, Item => "Warning :", Stop => 0); when Error_Types.Note => SPARK_IO.Put_String (File => File, Item => "Note :", Stop => 0); when Error_Types.NoErr => null; end case; end Output_Full_Error_Name; procedure Output_Error_Flash (File : in SPARK_IO.File_Type; Err_Type : in Error_Types.Error_Class) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Err_Type, --# File; is begin case Err_Type is when Error_Types.LexErr | Error_Types.SyntaxErr | Error_Types.SyntaxRec | Error_Types.ControlFlowErr | Error_Types.SemanticErr => SPARK_IO.Put_String (File => File, Item => "***", Stop => 0); when Error_Types.UncondFlowErr | Error_Types.UncondDependencyErr | Error_Types.IneffectiveStat | Error_Types.StabilityErr | Error_Types.UsageErr | Error_Types.DepSemanticErr => SPARK_IO.Put_String (File => File, Item => "!!!", Stop => 0); when Error_Types.CondlFlowErr | Error_Types.CondlDependencyErr => SPARK_IO.Put_String (File => File, Item => "???", Stop => 0); when Error_Types.NoErr => SPARK_IO.Put_String (File => File, Item => "+++ ", Stop => 0); when Error_Types.WarningWithPosition | Error_Types.WarningWithoutPosition | Error_Types.Note => SPARK_IO.Put_String (File => File, Item => "---", Stop => 0); end case; end Output_Error_Flash; ---------------------------------------------------------------------- procedure Output_Error_Count (File : in SPARK_IO.File_Type; Err_Count : in Natural) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Err_Count, --# File; is begin if (Err_Count = 0) or CommandLineData.Content.Plain_Output then SPARK_IO.Put_String (File => File, Item => " ", Stop => 0); else SPARK_IO.Put_String (File => File, Item => " (", Stop => 0); Put_Integer (File => File, Item => Err_Count, Width => 3, Base => 10); SPARK_IO.Put_String (File => File, Item => ") ", Stop => 0); end if; end Output_Error_Count; begin --Output_Error_Marker Output_Error_Flash (File => File, Err_Type => Err_Type); if Err_Type /= Error_Types.NoErr then Output_Error_Count (File => File, Err_Count => Err_Count); Output_Full_Error_Name (File => File, Err_Type => Err_Type); Put_Message_Id (File => File, Message_Id => Message_Id, Err_Type => Err_Type); else Output_Full_Error_Name (File => File, Err_Type => Err_Type); end if; end Output_Error_Marker; procedure Output_Brief_Error_Marker (File : in SPARK_IO.File_Type; Err_Type : in Error_Types.Error_Class; Message_Id : in Natural) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Err_Type, --# File, --# Message_Id; is procedure Output_Brief_Error_Name (File : in SPARK_IO.File_Type; Err_Type : in Error_Types.Error_Class) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Err_Type, --# File; is begin case Err_Type is when Error_Types.LexErr => SPARK_IO.Put_String (File => File, Item => "Lexical Error ", Stop => 0); when Error_Types.SyntaxErr => SPARK_IO.Put_String (File => File, Item => "Syntax Error ", Stop => 0); when Error_Types.SyntaxRec => SPARK_IO.Put_String (File => File, Item => "Syntax Recovery ", Stop => 0); when Error_Types.SemanticErr => SPARK_IO.Put_String (File => File, Item => "Semantic Error ", Stop => 0); when Error_Types.ControlFlowErr => SPARK_IO.Put_String (File => File, Item => "Illegal Structure ", Stop => 0); when Error_Types.CondlFlowErr | Error_Types.CondlDependencyErr | Error_Types.UncondFlowErr | Error_Types.UncondDependencyErr | Error_Types.IneffectiveStat | Error_Types.StabilityErr | Error_Types.UsageErr | Error_Types.DepSemanticErr => SPARK_IO.Put_String (File => File, Item => "Flow Error ", Stop => 0); when Error_Types.WarningWithPosition | Error_Types.WarningWithoutPosition => SPARK_IO.Put_String (File => File, Item => "Warning ", Stop => 0); when Error_Types.Note => SPARK_IO.Put_String (File => File, Item => "Note ", Stop => 0); when Error_Types.NoErr => null; end case; end Output_Brief_Error_Name; begin if Err_Type /= Error_Types.NoErr then Output_Brief_Error_Name (File => File, Err_Type => Err_Type); Put_Message_Id (File => File, Message_Id => Message_Id, Err_Type => Err_Type); end if; end Output_Brief_Error_Marker; --------------------------------------------------------------------------- procedure EchoErrorEntry (Echo_File : in SPARK_IO.File_Type; Error : in Error_Types.StringError) --# global in CommandLineData.Content; --# in out Echo_Accumulator; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Echo_Accumulator from *, --# CommandLineData.Content, --# Error, --# Error_Context_Rec & --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Echo_Accumulator, --# Echo_File, --# Error, --# Error_Context_Rec, --# SPARK_IO.File_Sys; is separate; --------------------------------------------------------------------------- procedure Flush_Echo_Messages --# global in out Echo_Accumulator; --# in out SPARK_IO.File_Sys; --# derives Echo_Accumulator, --# SPARK_IO.File_Sys from *, --# Echo_Accumulator; is Was_Active : Boolean; begin Was_Active := ErrorAccumulator.Is_Active (This => Echo_Accumulator); ErrorAccumulator.Flush (This => Echo_Accumulator, Listing => SPARK_IO.Standard_Output); if Was_Active then New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; end Flush_Echo_Messages; ---------------------------------------------------------------------- procedure Error_Init (Source_File_Name : in E_Strings.T; Echo : in Boolean) --# global in Dictionary.Dict; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Echo_Accumulator, --# ErrorBuffer.Buffer from * & --# Error_Context_Rec from *, --# Dictionary.Dict, --# Echo, --# ErrorBuffer.Buffer, --# Source_File_Name, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Source_File_Name; is Source_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Err_File : Error_IO.File_Type := Error_IO.Null_File; Success : SPARK_IO.File_Status; OK_Temp_File : Boolean; New_Context : Error_Contexts; begin Error_IO.Create (Err_File, Success); OK_Temp_File := Success = SPARK_IO.Ok; if not OK_Temp_File then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Error_Handler_Temporary_Files, Msg => "in Error_Init"); end if; New_Context.Errs := Err_File; New_Context.Line_No := 0; New_Context.Source_File_Name := Source_File_Name; New_Context.Num_Errs := 0; New_Context.Num_Message := 0; New_Context.Severity := No_Error; New_Context.Recovery_Messages := False; New_Context.Echo := Echo; New_Context.Current_Line := E_Strings.Empty_String; New_Context.Counter := Counters'(others => 0); New_Context.Source := SPARK_IO.Null_File; New_Context.Justifications_Data_Table := Empty_Justifications_Data_Table; --# accept Flow, 10, Error_Context_Rec, "Flush changes buffer but we need to initialize it anyway"; Flush_Buffer; --# end accept; Error_Context_Rec := New_Context; FileSystem.Open_Source_File (File => Source_File, Name => Source_File_Name, Status => Success); Error_Context_Rec.Source := Source_File; if not (Success = SPARK_IO.Ok) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Error_Handler_Source, Msg => "in Error_Init"); end if; Flush_Echo_Messages; end Error_Init; --------------------------------------------------------------------------- procedure Spark_Make_Init --# global in Dictionary.Dict; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Echo_Accumulator, --# ErrorBuffer.Buffer from * & --# Error_Context_Rec from SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec; is Err_File : Error_IO.File_Type := Error_IO.Null_File; Unused : SPARK_IO.File_Status; New_Context : Error_Contexts; begin --# accept Flow, 10, Unused, "Design decision not to check file status"; Error_IO.Create (File => Err_File, Status => Unused); --# end accept; New_Context.Errs := Err_File; New_Context.Line_No := 0; New_Context.Num_Errs := 0; New_Context.Num_Message := 0; New_Context.Severity := No_Error; New_Context.Source_File_Name := E_Strings.Empty_String; New_Context.Recovery_Messages := False; New_Context.Echo := False; --Echo; New_Context.Current_Line := E_Strings.Empty_String; New_Context.Counter := Counters'(others => 0); New_Context.Source := SPARK_IO.Null_File; New_Context.Justifications_Data_Table := Empty_Justifications_Data_Table; --# accept Flow, 10, Error_Context_Rec, "Flush changes buffer but we need to initialize it anyway"; Flush_Buffer; --# end accept; Error_Context_Rec := New_Context; Flush_Echo_Messages; --# accept Flow, 33, Unused, "Consequence of earlier deliberate non-use"; end Spark_Make_Init; ----------------------------------------------------------------------------- procedure Get_Error_Context (Context : out Error_Contexts) --# global in Dictionary.Dict; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Context, --# Error_Context_Rec from ErrorBuffer.Buffer, --# Error_Context_Rec, --# SPARK_IO.File_Sys & --# Echo_Accumulator, --# ErrorBuffer.Buffer from * & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec; is Source_File : SPARK_IO.File_Type; Success : SPARK_IO.File_Status; begin Flush_Buffer; Source_File := Error_Context_Rec.Source; --# accept Flow, 10, Success, "Intentional non use of file return code"; SPARK_IO.Close (Source_File, Success); --# end accept; Error_Context_Rec.Source := Source_File; Context := Error_Context_Rec; Flush_Echo_Messages; --# accept Flow, 33, Success, "Consequence of earlier non-use"; end Get_Error_Context; ----------------------------------------------------------------------------- procedure Set_Error_Context (Context : in Error_Contexts) --# global in Dictionary.Dict; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Echo_Accumulator, --# ErrorBuffer.Buffer from * & --# Error_Context_Rec from *, --# Context, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Context, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec; is Success : SPARK_IO.File_Status; Source_File : SPARK_IO.File_Type; begin --# accept Flow, 10, Error_Context_Rec, "Flush changes buffer but we need to initialize it anyway"; Flush_Buffer; --# end accept; Error_Context_Rec := Context; Source_File := Error_Context_Rec.Source; --# accept Flow, 10, Success, "Intentional non use of file return code"; FileSystem.Open_Source_File (File => Source_File, Name => Error_Context_Rec.Source_File_Name, Status => Success); --# end accept; Error_Context_Rec.Source := Source_File; Error_Context_Rec.Line_No := 0; Error_Context_Rec.Current_Line := E_Strings.Empty_String; Flush_Echo_Messages; --# accept Flow, 33, Success, "Consequence of earlier non-use"; end Set_Error_Context; ----------------------------------------------------------------------------- procedure Error_Reset --# global in out Error_Context_Rec; --# out Potential_Invalid_Value; --# derives Error_Context_Rec from * & --# Potential_Invalid_Value from ; is begin Error_Context_Rec.Severity := No_Error; Potential_Invalid_Value := False; end Error_Reset; ----------------------------------------------------------------------------- procedure Get_Error_Severity (Severity : out Error_Level) --# global in Error_Context_Rec; --# derives Severity from Error_Context_Rec; is begin Severity := Error_Context_Rec.Severity; end Get_Error_Severity; --------------------------------------------------------------------------- function Syntax_Or_Semantic_Error return Boolean --# global in Total_Error_Count; is begin return Total_Error_Count.Explicit_Error_Count (Syntax_Or_Semantic) /= 0; end Syntax_Or_Semantic_Error; --------------------------------------------------------------------------- function Possibly_Invalid_Values return Boolean --# global in Potential_Invalid_Value; is begin return Potential_Invalid_Value; end Possibly_Invalid_Values; --------------------------------------------------------------------------- function Generate_SLI return Boolean --# global in CommandLineData.Content; --# in Stop_SLI; is begin return CommandLineData.Content.Generate_SLI and then not Stop_SLI; end Generate_SLI; ----------------------------------------------------------------------------- procedure Report_Success (Position : in LexTokenManager.Token_Position; Subprog_Str : in LexTokenManager.Lex_String; Err_Num : in Error_Types.ErrNumRange) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Err_Num, --# LexTokenManager.State, --# Position, --# Subprog_Str & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Subprog_Str & --# Total_Error_Count from *; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Inc_Message_Count (Err_Type => Error_Types.NoErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.NoErr, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Err_Num, Reference => No_Reference, Name1 => Lex_String_To_Name (Str => Subprog_Str), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; -- In "Brief" mode we don't echo success messages, since an -- IDE like GPS would interpret them as "errors", which they -- aren't! if not CommandLineData.Content.Brief then EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Report_Success; --------------------------------------------------------------------------- procedure Hidden_Text (Position : in LexTokenManager.Token_Position; Unit_Str : in LexTokenManager.Lex_String; Unit_Typ : in SP_Symbols.SP_Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Unit_Str, --# Unit_Typ, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Unit_Str, --# Unit_Typ, --# WarningStatus.SuppressionList & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# WarningStatus.SuppressionList; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Warning_Message, Err_Num => 10, Identifiers => Null_Justification_Identifiers, Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else if WarningStatus.Is_Suppressed (The_Element => Hidden_Parts) then Inc_Suppressed_Warning_Counter (Warning_Type => Hidden_Parts); else Inc_Message_Count (Err_Type => Error_Types.WarningWithoutPosition); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithoutPosition, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => 10, Reference => No_Reference, Name1 => Lex_String_To_Name (Str => Unit_Str), Name2 => SP_Symbol_To_Name (Sym => Unit_Typ), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end if; end Hidden_Text; --------------------------------------------------------------------------- procedure Hidden_Handler (Position : in LexTokenManager.Token_Position; Unit_Str : in LexTokenManager.Lex_String; Unit_Typ : in SP_Symbols.SP_Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Unit_Str, --# Unit_Typ, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Unit_Str, --# Unit_Typ, --# WarningStatus.SuppressionList & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# WarningStatus.SuppressionList; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Warning_Message, Err_Num => 9, Identifiers => Null_Justification_Identifiers, Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else if WarningStatus.Is_Suppressed (The_Element => Handler_Parts) then Inc_Suppressed_Warning_Counter (Warning_Type => Handler_Parts); else Inc_Message_Count (Err_Type => Error_Types.WarningWithoutPosition); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithoutPosition, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => 9, Reference => No_Reference, Name1 => Lex_String_To_Name (Str => Unit_Str), Name2 => SP_Symbol_To_Name (Sym => Unit_Typ), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end if; end Hidden_Handler; --------------------------------------------------------------------------- procedure Representation_Clause (Position : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# WarningStatus.SuppressionList & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# WarningStatus.SuppressionList; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Warning_Message, Err_Num => 2, Identifiers => Null_Justification_Identifiers, Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else if WarningStatus.Is_Suppressed (The_Element => Representation_Clauses) then Inc_Suppressed_Warning_Counter (Warning_Type => Representation_Clauses); else Inc_Message_Count (Err_Type => Error_Types.WarningWithPosition); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithPosition, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => 2, Reference => No_Reference, Name1 => Error_Types.NoName, Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end if; end Representation_Clause; --------------------------------------------------------------------------- procedure A_Pragma (Pragma_Name : in LexTokenManager.Lex_String; Position : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Pragma_Name, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Pragma_Name, --# SPARK_IO.File_Sys, --# WarningStatus.SuppressionList & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Pragma_Name, --# WarningStatus.SuppressionList; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Warning_Message, Err_Num => 3, Identifiers => Null_Justification_Identifiers, Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else if WarningStatus.Pragma_Is_Suppressed (Pragma_Name => Pragma_Name) then Inc_Suppressed_Warning_Counter (Warning_Type => Pragmas); else Inc_Message_Count (Err_Type => Error_Types.WarningWithPosition); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithPosition, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => 3, Reference => No_Reference, Name1 => Error_Types.NoName, Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end if; end A_Pragma; -------------------------------------------------------------------------- procedure Add_Cut_Point (At_Line : LexTokenManager.Line_Numbers) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# At_Line, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from At_Line, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# WarningStatus.SuppressionList & --# Total_Error_Count from *, --# WarningStatus.SuppressionList; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin if WarningStatus.Is_Suppressed (The_Element => Default_Loop_Assertions) then Inc_Suppressed_Warning_Counter (Warning_Type => Default_Loop_Assertions); else Inc_Message_Count (Err_Type => Error_Types.WarningWithoutPosition); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithoutPosition, Pos => LexTokenManager.Token_Position'(Start_Line_No => At_Line, Start_Pos => 0), Scope => Dictionary.GlobalScope, Error_Number => 402, Reference => No_Reference, Name1 => Error_Types.NoName, Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Add_Cut_Point; -------------------------------------------------------------------------- procedure Semantic_Warning_With_Position (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Scope, --# Sym, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# WarningStatus.SuppressionList & --# Potential_Invalid_Value from *, --# CommandLineData.Content, --# Err_Num, --# Sym, --# WarningStatus.SuppressionList & --# Total_Error_Count from *, --# CommandLineData.Content, --# Err_Num, --# WarningStatus.SuppressionList; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin if Err_Num = 1 and then WarningStatus.Is_Suppressed (The_Element => With_Clauses) then Inc_Suppressed_Warning_Counter (Warning_Type => With_Clauses); elsif Err_Num = 4 and then WarningStatus.Is_Suppressed (The_Element => Declare_Annotations) then Inc_Suppressed_Warning_Counter (Warning_Type => Declare_Annotations); elsif Err_Num = 5 and then WarningStatus.Is_Suppressed (The_Element => Interrupt_Handlers) then Inc_Suppressed_Warning_Counter (Warning_Type => Interrupt_Handlers); elsif Err_Num = 7 and then WarningStatus.Is_Suppressed (The_Element => Ada2005_Reserved_Words) then Inc_Suppressed_Warning_Counter (Warning_Type => Ada2005_Reserved_Words); elsif Err_Num = 11 and then WarningStatus.Is_Suppressed (The_Element => Others_Clauses) then Inc_Suppressed_Warning_Counter (Warning_Type => Others_Clauses); elsif Err_Num = 12 and then WarningStatus.Is_Suppressed (The_Element => Unchecked_Conversion) then Inc_Suppressed_Warning_Counter (Warning_Type => Unchecked_Conversion); elsif Err_Num = 169 and then WarningStatus.Is_Suppressed (The_Element => Direct_Updates) then Inc_Suppressed_Warning_Counter (Warning_Type => Direct_Updates); elsif (Err_Num = 200 or Err_Num = 201) and then WarningStatus.Is_Suppressed (The_Element => Static_Expressions) then Inc_Suppressed_Warning_Counter (Warning_Type => Static_Expressions); elsif Err_Num = 302 and then not CommandLineData.Content.VCG and then WarningStatus.Is_Suppressed (The_Element => Expression_Reordering) then -- Suppress ambiguous ordering message if not -- generating VCs for Overflow_Check Inc_Suppressed_Warning_Counter (Warning_Type => Expression_Reordering); elsif Err_Num = 309 and then WarningStatus.Is_Suppressed (The_Element => Type_Conversions) then Inc_Suppressed_Warning_Counter (Warning_Type => Type_Conversions); elsif Err_Num = 310 and then WarningStatus.Is_Suppressed (The_Element => Obsolescent_Features) then Inc_Suppressed_Warning_Counter (Warning_Type => Obsolescent_Features); elsif Err_Num = 320 and then WarningStatus.Is_Suppressed (The_Element => Proof_Function_Non_Boolean) then Inc_Suppressed_Warning_Counter (Warning_Type => Proof_Function_Non_Boolean); elsif Err_Num = 321 and then WarningStatus.Is_Suppressed (The_Element => Proof_Function_Implicit) then Inc_Suppressed_Warning_Counter (Warning_Type => Proof_Function_Implicit); elsif (Err_Num = 322 or Err_Num = 323) and then WarningStatus.Is_Suppressed (Proof_Function_Refinement) then Inc_Suppressed_Warning_Counter (Warning_Type => Proof_Function_Refinement); elsif Err_Num = 350 and then WarningStatus.Is_Suppressed (The_Element => Imported_Objects) then Inc_Suppressed_Warning_Counter (Warning_Type => Imported_Objects); elsif Err_Num = 392 and then WarningStatus.Is_Suppressed (The_Element => External_Variable_Assignment) then Inc_Suppressed_Warning_Counter (Warning_Type => External_Variable_Assignment); elsif Err_Num = 394 and then WarningStatus.Is_Suppressed (The_Element => Unuseable_Private_Types) then Inc_Suppressed_Warning_Counter (Warning_Type => Unuseable_Private_Types); -- following two separate warnings share the same supression key word elsif Err_Num = 396 and then -- non-moded variable with address clause WarningStatus.Is_Suppressed (The_Element => Unexpected_Address_Clauses) then Inc_Suppressed_Warning_Counter (Warning_Type => Unexpected_Address_Clauses); elsif Err_Num = 351 and then -- constant with address clause WarningStatus.Is_Suppressed (The_Element => Unexpected_Address_Clauses) then Inc_Suppressed_Warning_Counter (Warning_Type => Unexpected_Address_Clauses); elsif Err_Num = 380 and then WarningStatus.Is_Suppressed (The_Element => Style_Check_Casing) then Inc_Suppressed_Warning_Counter (Warning_Type => Style_Check_Casing); else if Err_Num = 399 then --special case, although just a warning we want this to suppress flow analysis Error_Context_Rec.Severity := Semantic_Errs; end if; Inc_Message_Count (Err_Type => Error_Types.WarningWithPosition); File := Error_Context_Rec.Errs; if Dictionary.Is_Null_Symbol (Sym) then ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithPosition, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Err_Num, Reference => No_Reference, Name1 => Lex_String_To_Name (Str => Id_Str), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); else Potential_Invalid_Value := Potential_Invalid_Value or else Err_Num = 392 -- Read of an external variable or else Err_Num = 12; -- Use of an unchecked conversion ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithPosition, Pos => Position, Scope => Scope, Error_Number => Err_Num, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Sym), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); end if; Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Semantic_Warning_With_Position; procedure Semantic_Warning_Without_Position (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Scope, --# Sym, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# WarningStatus.SuppressionList & --# Total_Error_Count from *, --# Err_Num, --# WarningStatus.SuppressionList; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin if Err_Num = 400 and then WarningStatus.Is_Suppressed (The_Element => Unused_Variables) then Inc_Suppressed_Warning_Counter (Warning_Type => Unused_Variables); elsif Err_Num = 403 and then WarningStatus.Is_Suppressed (The_Element => Constant_Variables) then Inc_Suppressed_Warning_Counter (Warning_Type => Constant_Variables); elsif Err_Num = 405 and then WarningStatus.Is_Suppressed (The_Element => Real_RTCs) then Inc_Suppressed_Warning_Counter (Warning_Type => Real_RTCs); elsif Err_Num = 430 and then WarningStatus.Is_Suppressed (The_Element => SLI_Generation) then Inc_Suppressed_Warning_Counter (Warning_Type => SLI_Generation); elsif Err_Num = 431 and then WarningStatus.Is_Suppressed (The_Element => Main_Program_Precondition) then Inc_Suppressed_Warning_Counter (Warning_Type => Main_Program_Precondition); else Inc_Message_Count (Err_Type => Error_Types.WarningWithoutPosition); File := Error_Context_Rec.Errs; if Dictionary.Is_Null_Symbol (Sym) then ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithoutPosition, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Err_Num, Reference => No_Reference, Name1 => Lex_String_To_Name (Str => Id_Str), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); else ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.WarningWithoutPosition, Pos => Position, Scope => Scope, Error_Number => Err_Num, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Sym), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); end if; Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Semantic_Warning_Without_Position; function Is_Number_With_Position (Err_Num : in Natural) return Boolean is begin return Err_Num >= 1 and then Err_Num < 400; end Is_Number_With_Position; procedure Semantic_Warning (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# WarningStatus.SuppressionList & --# Potential_Invalid_Value, --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# WarningStatus.SuppressionList; is Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Warning_Message, Err_Num => Err_Num, Identifiers => Justification_Identifiers'(1 => Lex_Str_To_Justification_Identifier (Str => Id_Str), others => Null_Justification_Identifier), Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else if Is_Number_With_Position (Err_Num => Err_Num) then Semantic_Warning_With_Position (Err_Num => Err_Num, Position => Position, Id_Str => Id_Str, Sym => Dictionary.NullSymbol, Scope => Dictionary.GlobalScope); else Semantic_Warning_Without_Position (Err_Num => Err_Num, Position => Position, Id_Str => Id_Str, Sym => Dictionary.NullSymbol, Scope => Dictionary.GlobalScope); end if; end if; end Semantic_Warning; ---------------------------------------------------------------------------------- procedure SLI_Generation_Warning (Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Id_Str, --# LexTokenManager.State, --# Position, --# Stop_SLI, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Id_Str, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Stop_SLI, --# WarningStatus.SuppressionList & --# Potential_Invalid_Value, --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# Id_Str, --# LexTokenManager.State, --# Position, --# Stop_SLI, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is begin if Generate_SLI then Stop_SLI := True; Semantic_Warning (Err_Num => 430, Position => Position, Id_Str => Id_Str); end if; end SLI_Generation_Warning; ---------------------------------------------------------------------------------- procedure Semantic_Warning_Sym (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# LexTokenManager.State, --# Position, --# Scope, --# Sym, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# WarningStatus.SuppressionList & --# Potential_Invalid_Value, --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# Err_Num, --# LexTokenManager.State, --# Position, --# Sym, --# WarningStatus.SuppressionList; is Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Warning_Message, Err_Num => Err_Num, Identifiers => Justification_Identifiers'(1 => Symbol_To_Justification_Identifier (Sym => Sym), others => Null_Justification_Identifier), Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else if Is_Number_With_Position (Err_Num => Err_Num) then Semantic_Warning_With_Position (Err_Num => Err_Num, Position => Position, Id_Str => LexTokenManager.Null_String, Sym => Sym, Scope => Scope); else Semantic_Warning_Without_Position (Err_Num => Err_Num, Position => Position, Id_Str => LexTokenManager.Null_String, Sym => Sym, Scope => Scope); end if; end if; end Semantic_Warning_Sym; ---------------------------------------------------------------------------------- procedure SLI_Generation_Warning_Sym (Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Scope, --# Stop_SLI, --# Sym, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Sym, --# WarningStatus.SuppressionList & --# Potential_Invalid_Value, --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Stop_SLI, --# Sym, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is begin if Generate_SLI then Stop_SLI := True; Semantic_Warning_Sym (Err_Num => 430, Position => Position, Sym => Sym, Scope => Scope); end if; end SLI_Generation_Warning_Sym; ---------------------------------------------------------------------------------- -- Previously, LexErrors, SyntaxErrors and SyntaxRecovery didn't use -- the error buffer mechanism and were written direct to the error -- file. This meant that attempts to NOT store strings anywhere in the -- error handler system were impossible. The three routines have been -- altered below in an elegent bodge to use ErrorBuffer.Add. -- The trick has been to build the error string as now but instead of storing -- it, we convert it to a lex string, place it in parameter Name1 and then use -- the ErrorBuffer.Add mechanism. This is not wasteful of LexTokenManager -- storage space because these errors are fatal anyway. -- -- The result is that all errors are added to the system via ErrorBuffer.Add -- and remain in numeric form until PrintErrors or AppendErrors converst them. procedure Lex_Error (Error_Message, Recovery_Message : in String; Error_Item : in LexTokenManager.Lex_Value) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out LexTokenManager.State; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Error_Item, --# Error_Message, --# LexTokenManager.State, --# Recovery_Message, --# SPARK_IO.File_Sys, --# Stop_SLI, --# WarningStatus.SuppressionList & --# LexTokenManager.State from *, --# Error_Item, --# Error_Message, --# Recovery_Message & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; Error_Lex_String : LexTokenManager.Lex_String; --pna begin Error_Context_Rec.Severity := Fatal; Error := Error_Types.StringError'(0, Error_Types.LexErr, Error_Item.Position, E_Strings.Empty_String); Append_String (E_Str => Error, Str => Error_Message); Append_String (E_Str => Error, Str => " - "); Append_String (E_Str => Error, Str => Recovery_Message); Append_String (E_Str => Error, Str => "."); -- convert string to LexString so we can use ErrorBuffer.Add LexTokenManager.Insert_Examiner_String (Str => Error.Message, Lex_Str => Error_Lex_String); Inc_Message_Count (Err_Type => Error_Types.LexErr); ErrorBuffer.Add (Err_File => Error_Context_Rec.Errs, Err_Type => Error_Types.LexErr, Pos => Error_Item.Position, Scope => Dictionary.GlobalScope, Error_Number => 0, Reference => 0, Name1 => Lex_String_To_Name (Str => Error_Lex_String), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning (Position => Error_Item.Position, Id_Str => Error_Lex_String); end Lex_Error; -------------------------------------------------------------------------- procedure Syntax_Error (Error_Item : in LexTokenManager.Lex_Value; Current_Sym, Entry_Symbol : in SP_Symbols.SP_Symbol; No_Of_Terminals, No_Of_Non_Terminals : in SP_Expected_Symbols.SP_Ess_Sym_Range; Terminal_List, Non_Terminal_List : in SP_Expected_Symbols.SP_Exp_Sym_List) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out LexTokenManager.State; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Current_Sym, --# Dictionary.Dict, --# Echo_Accumulator, --# Entry_Symbol, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Error_Item, --# LexTokenManager.State, --# Non_Terminal_List, --# No_Of_Non_Terminals, --# No_Of_Terminals, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Terminal_List, --# WarningStatus.SuppressionList & --# LexTokenManager.State from *, --# Current_Sym, --# Entry_Symbol, --# Error_Item, --# Non_Terminal_List, --# No_Of_Non_Terminals, --# No_Of_Terminals, --# Terminal_List & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; Error_Lex_String : LexTokenManager.Lex_String; --pna procedure Append_Sym_List (Error : in out Error_Types.StringError; No_In_List : in SP_Expected_Symbols.SP_Ess_Sym_Range; List : in SP_Expected_Symbols.SP_Exp_Sym_List; Last_Sep : in String) --# derives Error from *, --# Last_Sep, --# List, --# No_In_List; is begin AppendSym (Error => Error, Sym => List (1)); for I in SP_Expected_Symbols.SP_Ess_Sym_Range range 2 .. No_In_List - 1 loop Append_String (E_Str => Error, Str => ", "); AppendSym (Error => Error, Sym => List (I)); end loop; if No_In_List > 1 then Append_String (E_Str => Error, Str => Last_Sep); AppendSym (Error => Error, Sym => List (No_In_List)); end if; end Append_Sym_List; procedure Append_Hint --# global in Current_Sym; --# in Entry_Symbol; --# in Non_Terminal_List; --# in No_Of_Non_Terminals; --# in No_Of_Terminals; --# in Terminal_List; --# in out Error; --# derives Error from *, --# Current_Sym, --# Entry_Symbol, --# Non_Terminal_List, --# No_Of_Non_Terminals, --# No_Of_Terminals, --# Terminal_List; is procedure Dump_State --# derives ; is --# hide Dump_State; begin if CommandLineData.Content.Debug.Parser then Debug.PrintMsg ("In ErrorHandler.Syntax_Error.Append_Hint we know:", True); Debug.PrintInt ("The Number of Terminals is ", Integer (No_Of_Terminals)); Debug.PrintMsg ("The Terminal List is:", True); for TLI in SP_Expected_Symbols.SP_Ess_Sym_Range range 1 .. No_Of_Terminals loop Debug.PrintMsg (SP_Symbols.SP_Symbol'Image (Terminal_List (TLI)), False); Debug.PrintMsg (" ", False); end loop; Debug.PrintMsg ("", True); Debug.PrintInt ("The Number of Non-Terminals is ", Integer (No_Of_Non_Terminals)); Debug.PrintMsg ("The Non-Terminal List is:", True); for NTLI in SP_Expected_Symbols.SP_Ess_Sym_Range range 1 .. No_Of_Non_Terminals loop Debug.PrintMsg (SP_Symbols.SP_Symbol'Image (Non_Terminal_List (NTLI)), False); Debug.PrintMsg (" ", False); end loop; Debug.PrintMsg ("", True); Debug.PrintMsg ("The Entry Symbol:", False); Debug.PrintMsg (SP_Symbols.SP_Symbol'Image (Entry_Symbol), True); Debug.PrintMsg ("The Current Symbol:", False); Debug.PrintMsg (SP_Symbols.SP_Symbol'Image (Current_Sym), True); Debug.Print_Lex_Str (Msg => "The current lex token is:", L => Error_Item.Token_Str); end if; end Dump_State; begin Dump_State; --------------------------------------------------------------------- -- Hints for special cases -- -- Here, we search for known special cases where we can give -- a more informative error message than that generated by the parser -- alone. In detecting these special cases, take great care to find -- _exactly_ the case you're expecting, since issuing a hint which -- is wrong is probably just as bad as not issuing the hint at all! -- -- This is a bit of a kludge - the long-term solution is to -- re-implement the parser generator to produce better errors in -- general. --------------------------------------------------------------------- if No_Of_Terminals = 0 and No_Of_Non_Terminals = 1 then --------------------------------------------------------------------- -- Case 1 - A protected type definition where the user forgets to -- put a Priority pragma. This is likely to be a common mistake for -- beginner RavenSPARK users, so giving a more informative error here -- is important. --------------------------------------------------------------------- if (Non_Terminal_List (1) = SP_Symbols.protected_definition) and (Current_Sym = SP_Symbols.RWprocedure or Current_Sym = SP_Symbols.RWentry or Current_Sym = SP_Symbols.RWfunction) then Append_String (E_Str => Error, Str => " Pragma Priority or Interrupt_Priority is required here."); --------------------------------------------------------------------- -- Case 2 - Nested procedure body has unexpected semicolon. Can be -- legal if followed by a pragma Import, but definitely wrong if -- followed by "is". --------------------------------------------------------------------- elsif (Non_Terminal_List (1) = SP_Symbols.apragma) and (Current_Sym = SP_Symbols.RWis) and (Entry_Symbol = SP_Symbols.procedure_annotation) then Append_String (E_Str => Error, Str => " Unexpected semicolon on preceding procedure specification."); --------------------------------------------------------------------- -- Case 11 - A comma has been used instead of a semicolon prior to -- an "IN" in a moded global definition. --------------------------------------------------------------------- elsif (Non_Terminal_List (1) = SP_Symbols.global_variable) and (Current_Sym = SP_Symbols.RWin) and (Entry_Symbol = SP_Symbols.comma) then Append_String (E_Str => Error, Str => " A semicolon and not a comma should precede an ""IN"" in a moded global definition."); --------------------------------------------------------------------- -- Case 15 - Empty parenthesis in a subprogram specification. --------------------------------------------------------------------- elsif (Non_Terminal_List (1) = SP_Symbols.formal_part_rep) and (Current_Sym = SP_Symbols.right_paren) and (Entry_Symbol = SP_Symbols.left_paren) then Append_String (E_Str => Error, Str => " A subprogram specification without parameters should not have parentheses."); --------------------------------------------------------------------- -- Case 16 - Empty parenthesis in a subprogram call. --------------------------------------------------------------------- elsif (Non_Terminal_List (1) = SP_Symbols.name_argument_list) and (Current_Sym = SP_Symbols.right_paren) and (Entry_Symbol = SP_Symbols.left_paren) then Append_String (E_Str => Error, Str => " A subprogram call without parameters should not have parentheses."); --------------------------------------------------------------------- -- Case 18 - Formal parameter list starts with an FDL identifier --------------------------------------------------------------------- elsif (Non_Terminal_List (1) = SP_Symbols.formal_part_rep) and (Current_Sym = SP_Symbols.predefined_FDL_identifier) and (Entry_Symbol = SP_Symbols.left_paren) then Append_String (E_Str => Error, Str => " This token is a predefined FDL identifier, "); Append_String (E_Str => Error, Str => "and therefore may not be used in this context."); --------------------------------------------------------------------- -- Case 22 - Attempt to use unqualified array aggregate in annotation --------------------------------------------------------------------- elsif (Non_Terminal_List (1) = SP_Symbols.annotation_expression) and (Current_Sym = SP_Symbols.RWothers) and (Entry_Symbol = SP_Symbols.left_paren) then Append_String (E_Str => Error, Str => " Aggregate expressions in annotations must always be qualified"); Append_String (E_Str => Error, Str => " with a subtype mark."); --------------------------------------------------------------------- -- Case 23 - "body" appears in package spec - almost certainly -- owing to a mis-placed inherit annotation preceding package -- body --------------------------------------------------------------------- elsif (Non_Terminal_List (1) = SP_Symbols.dotted_simple_name) and (Current_Sym = SP_Symbols.RWbody) and (Entry_Symbol = SP_Symbols.RWpackage) then Append_String (E_Str => Error, Str => " Package body cannot follow inherit annotation."); end if; elsif No_Of_Terminals = 1 and No_Of_Non_Terminals = 0 then --------------------------------------------------------------------- -- Case 3 - misplaced "is" following procedure or function annotation --------------------------------------------------------------------- if Terminal_List (1) = SP_Symbols.RWinherit and (Current_Sym = SP_Symbols.RWglobal or Current_Sym = SP_Symbols.RWderives) then Append_String (E_Str => Error, Str => " Subprogram annotation should precede reserved word ""IS""."); --------------------------------------------------------------------- -- Case 4 - Expecting Identifier, but got a predefined FDL identifier -- A common error for beginners! --------------------------------------------------------------------- elsif Terminal_List (1) = SP_Symbols.identifier and Current_Sym = SP_Symbols.predefined_FDL_identifier then Append_String (E_Str => Error, Str => " This token is a predefined FDL identifier, "); Append_String (E_Str => Error, Str => "and therefore may not be used in this context."); --------------------------------------------------------------------- -- Case 9 - "own" annotation in package body following "is" --------------------------------------------------------------------- elsif Terminal_List (1) = SP_Symbols.RWinherit and Current_Sym = SP_Symbols.RWown then Append_String (E_Str => Error, Str => " Own annotation should precede reserved word ""IS""."); --------------------------------------------------------------------- -- Case 14 - Incorrect name given in hide directive --------------------------------------------------------------------- elsif Terminal_List (1) = SP_Symbols.semicolon and Entry_Symbol = SP_Symbols.hidden_part then Append_String (E_Str => Error, Str => " The name in a preceding ""hide"" directive is probably incorrect."); end if; elsif No_Of_Terminals = 0 and No_Of_Non_Terminals = 0 then --------------------------------------------------------------------- -- Case 5 - Misplaced "is" _before_ a mandatory annotation --------------------------------------------------------------------- if Current_Sym = SP_Symbols.annotation_start and Entry_Symbol = SP_Symbols.RWis then Append_String (E_Str => Error, Str => " For packages and subprograms, annotations must "); Append_String (E_Str => Error, Str => "precede ""IS""."); --------------------------------------------------------------------- -- Case 6 - semicolon missing at end of annotation --------------------------------------------------------------------- elsif Current_Sym = SP_Symbols.annotation_end and Entry_Symbol = SP_Symbols.dotted_simple_name then Append_String (E_Str => Error, Str => " Annotations must end with a ;"); --------------------------------------------------------------------- -- Case 10 - unexpected semicolon at end of subprogram specification --------------------------------------------------------------------- elsif Current_Sym = SP_Symbols.RWis and Entry_Symbol = SP_Symbols.semicolon then Append_String (E_Str => Error, Str => " Unexpected semicolon on preceding subprogram specification."); --------------------------------------------------------------------- -- Case 12 - reported message is "No complete DOTTED_SIMPLE_NAME can be followed by IDENTIFIER here," -- Rockwell-Collins raised candidate for improvement in relation to -- imports in derives annotations but the possible occurance of the -- syntax error is much wider than this so we have to be careful with -- the wording of the eror message. --------------------------------------------------------------------- elsif Current_Sym = SP_Symbols.identifier and Entry_Symbol = SP_Symbols.dotted_simple_name then Append_String (E_Str => Error, Str => " A separator is missing between two successive names."); --------------------------------------------------------------------- -- Case 17 - declaration of an object using an FDL identifier --------------------------------------------------------------------- elsif Current_Sym = SP_Symbols.predefined_FDL_identifier and (Entry_Symbol = SP_Symbols.visible_part_rep or Entry_Symbol = SP_Symbols.initial_declarative_item_rep) then Append_String (E_Str => Error, Str => " This token is a predefined FDL identifier, "); Append_String (E_Str => Error, Str => "and therefore may not be used in this context."); --------------------------------------------------------------------- -- Case 19 - Attempt to label a declaration --------------------------------------------------------------------- elsif Current_Sym = SP_Symbols.left_label_paren and (Entry_Symbol = SP_Symbols.initial_declarative_item_rep or Entry_Symbol = SP_Symbols.RWis) then Append_String (E_Str => Error, Str => " Declarations may not be labelled."); --------------------------------------------------------------------- -- Case 20 - Attempt to label a proof context --------------------------------------------------------------------- elsif Current_Sym = SP_Symbols.proof_context and Entry_Symbol = SP_Symbols.sequence_of_labels then Append_String (E_Str => Error, Str => " Annotations may not be labelled."); --------------------------------------------------------------------- -- Case 21 - Attempt to use unqualified aggregate expression --------------------------------------------------------------------- elsif Current_Sym = SP_Symbols.arrow and Entry_Symbol = SP_Symbols.simple_expression then Append_String (E_Str => Error, Str => " Aggregate expressions must be qualified with a"); Append_String (E_Str => Error, Str => " subtype mark or must specify an unconstrained"); Append_String (E_Str => Error, Str => " array with only an others choice. Named and"); Append_String (E_Str => Error, Str => " positional association may not be mixed."); --------------------------------------------------------------------- -- Cases 24 and 25 - type, subtype, or object following a later declarative item --------------------------------------------------------------------- elsif Entry_Symbol = SP_Symbols.later_declarative_item_rep then if (Current_Sym = SP_Symbols.RWsubtype or Current_Sym = SP_Symbols.RWtype) then Append_String (E_Str => Error, Str => " Types and subtypes must precede all later declarative items"); Append_String (E_Str => Error, Str => " such as bodies, stubs or instantiations."); elsif (Current_Sym = SP_Symbols.identifier) then Append_String (E_Str => Error, Str => " Object declarations must precede all later declarative items"); Append_String (E_Str => Error, Str => " such as bodies, stubs or instantiations."); end if; end if; elsif No_Of_Terminals = 4 and No_Of_Non_Terminals = 0 then --------------------------------------------------------------------- -- Case 7 - missing ; on the end of an own variable annotation --------------------------------------------------------------------- if Entry_Symbol = SP_Symbols.own_variable_list and Current_Sym = SP_Symbols.annotation_end and Terminal_List (1) = SP_Symbols.left_paren and Terminal_List (2) = SP_Symbols.comma and Terminal_List (3) = SP_Symbols.colon and Terminal_List (4) = SP_Symbols.semicolon then Append_String (E_Str => Error, Str => " Own variable annotation must end with a ;"); end if; elsif No_Of_Terminals = 2 and No_Of_Non_Terminals = 0 then --------------------------------------------------------------------- -- Case 8 - missing ; on the end of an initializes annotation --------------------------------------------------------------------- if Entry_Symbol = SP_Symbols.package_variable_list and Current_Sym = SP_Symbols.annotation_end and Terminal_List (1) = SP_Symbols.comma and Terminal_List (2) = SP_Symbols.semicolon then Append_String (E_Str => Error, Str => " Initializes annotation must end with a ;"); end if; --------------------------------------------------------------------- -- Case 13 - derives list applied to a function -- From the SPARK.LLA grammar, The two expected terminal symbols -- RWpre and RWreturn can only occur together in an annotation -- as a function constraint so we should be safe to assume that we -- are in an annotation associated with a function. --------------------------------------------------------------------- if Entry_Symbol = SP_Symbols.annotation_start and Current_Sym = SP_Symbols.RWderives and Terminal_List (1) = SP_Symbols.RWpre and Terminal_List (2) = SP_Symbols.RWreturn then Append_String (E_Str => Error, Str => " A function never has a ""derives"" annotation." & " It may have a global definition in which all the global variables are of mode in."); end if; end if; end Append_Hint; -- Terminal_List not ref at the moment, but likely to be needed in future. begin Error_Context_Rec.Severity := Fatal; Error := Error_Types.StringError'(0, Error_Types.SyntaxErr, Error_Item.Position, E_Strings.Empty_String); if No_Of_Terminals /= 0 and No_Of_Non_Terminals = 0 then Append_Sym_List (Error => Error, No_In_List => No_Of_Terminals, List => Terminal_List, Last_Sep => " or "); Append_String (E_Str => Error, Str => " expected"); elsif No_Of_Terminals = 0 and No_Of_Non_Terminals = 1 then Append_String (E_Str => Error, Str => "No "); Append_Sym_List (Error => Error, No_In_List => No_Of_Non_Terminals, List => Non_Terminal_List, Last_Sep => " nor "); Append_String (E_Str => Error, Str => " can start with "); AppendSym (Error => Error, Sym => Current_Sym); elsif No_Of_Terminals = 0 and No_Of_Non_Terminals > 1 then Append_String (E_Str => Error, Str => "Neither "); Append_Sym_List (Error => Error, No_In_List => No_Of_Non_Terminals, List => Non_Terminal_List, Last_Sep => " nor "); Append_String (E_Str => Error, Str => " can start with "); AppendSym (Error => Error, Sym => Current_Sym); elsif No_Of_Terminals /= 0 and No_Of_Non_Terminals /= 0 then Append_Sym_List (Error => Error, No_In_List => No_Of_Terminals, List => Terminal_List, Last_Sep => ", "); Append_String (E_Str => Error, Str => " or start of "); Append_Sym_List (Error => Error, No_In_List => No_Of_Non_Terminals, List => Non_Terminal_List, Last_Sep => " or "); Append_String (E_Str => Error, Str => " expected"); else if SP_Relations.SP_Terminal_Like (Entry_Symbol) then AppendSym (Error => Error, Sym => Entry_Symbol); Append_String (E_Str => Error, Str => " cannot be followed by "); else Append_String (E_Str => Error, Str => "No complete "); AppendSym (Error => Error, Sym => Entry_Symbol); Append_String (E_Str => Error, Str => " can be followed by "); end if; AppendSym (Error => Error, Sym => Current_Sym); Append_String (E_Str => Error, Str => " here"); end if; Append_String (E_Str => Error, Str => "."); Append_Hint; -- convert string to LExString so we can use ErrorBuffer.Add LexTokenManager.Insert_Examiner_String (Str => Error.Message, Lex_Str => Error_Lex_String); Inc_Message_Count (Err_Type => Error_Types.SyntaxErr); ErrorBuffer.Add (Err_File => Error_Context_Rec.Errs, Err_Type => Error_Types.SyntaxErr, Pos => Error_Item.Position, Scope => Dictionary.GlobalScope, Error_Number => 0, Reference => 0, Name1 => Lex_String_To_Name (Str => Error_Lex_String), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning (Position => Error_Item.Position, Id_Str => Error_Lex_String); end Syntax_Error; --------------------------------------------------------------------------- procedure Syntax_Recovery (Recovery_Posn : in LexTokenManager.Lex_Value; Replacement_Sym : in SP_Symbols.SP_Symbol; Next_Sym : in SP_Symbols.SP_Terminal; No_Of_Syms : in Natural; Sym_List : in Err_Sym_List) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out LexTokenManager.State; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer, --# Potential_Invalid_Value, --# Total_Error_Count from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Next_Sym, --# No_Of_Syms, --# Recovery_Posn, --# Replacement_Sym, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Sym_List, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Next_Sym, --# No_Of_Syms, --# Recovery_Posn, --# Replacement_Sym, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Sym_List, --# WarningStatus.SuppressionList & --# LexTokenManager.State from *, --# Error_Context_Rec, --# Next_Sym, --# No_Of_Syms, --# Recovery_Posn, --# Replacement_Sym, --# Sym_List & --# Stop_SLI from *, --# CommandLineData.Content, --# Error_Context_Rec; is Error, Unused_Error : Error_Types.StringError; Error_Lex_String : LexTokenManager.Lex_String; Max_Index : Err_Sym_Range; begin if Error_Context_Rec.Recovery_Messages then Error := Error_Types.StringError'(0, Error_Types.SyntaxRec, Recovery_Posn.Position, E_Strings.Empty_String); Append_String (E_Str => Error, Str => "The recovery action was to "); if Replacement_Sym = SP_Symbols.SPDEFAULT then Append_String (E_Str => Error, Str => "delete "); AppendSym (Error => Error, Sym => Sym_List (1)); elsif Replacement_Sym in SP_Symbols.SP_Terminal then if No_Of_Syms = 0 then Append_String (E_Str => Error, Str => "insert "); else Append_String (E_Str => Error, Str => "replace "); AppendSym (Error => Error, Sym => Sym_List (1)); Append_String (E_Str => Error, Str => " with "); end if; AppendSym (Error => Error, Sym => Replacement_Sym); else if Next_Sym /= SP_Symbols.SPDEFAULT then if Next_Sym = SP_Symbols.SPEND then Append_String (E_Str => Error, Str => "ignore all remaining tokens and "); else Append_String (E_Str => Error, Str => "ignore all tokens up until "); AppendSym (Error => Error, Sym => Next_Sym); Append_String (E_Str => Error, Str => " and "); end if; end if; if No_Of_Syms /= 0 then Append_String (E_Str => Error, Str => "replace "); if No_Of_Syms > Natural (Err_Sym_List'Last) then Max_Index := Err_Sym_List'Last - 1; else Max_Index := Err_Sym_Range (No_Of_Syms); end if; for Index in Err_Sym_Range range 1 .. Max_Index loop AppendSym (Error => Error, Sym => Sym_List (Index)); Append_String (E_Str => Error, Str => " "); end loop; if No_Of_Syms > Natural (Err_Sym_List'Last) then Append_String (E_Str => Error, Str => " .. "); AppendSym (Error => Error, Sym => Sym_List (Err_Sym_List'Last)); end if; Append_String (E_Str => Error, Str => " by "); else Append_String (E_Str => Error, Str => "insert "); end if; AppendSym (Error => Error, Sym => Replacement_Sym); end if; Append_String (E_Str => Error, Str => "."); -- convert string to LexString so we can use ErrorBuffer.Add LexTokenManager.Insert_Examiner_String (Str => Error.Message, Lex_Str => Error_Lex_String); Inc_Message_Count (Err_Type => Error_Types.SyntaxRec); --# accept Flow, 10, Unused_Error, "Only used where we want to echo message to screen"; ErrorBuffer.Add (Err_File => Error_Context_Rec.Errs, Err_Type => Error_Types.SyntaxRec, Pos => Recovery_Posn.Position, Scope => Dictionary.GlobalScope, Error_Number => 0, Reference => 0, Name1 => Lex_String_To_Name (Str => Error_Lex_String), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Unused_Error); --# end accept; -- note, no echo back to screen in this case SLI_Generation_Warning (Position => Recovery_Posn.Position, Id_Str => Error_Lex_String); end if; --# accept Flow, 33, Unused_Error, "Consequence of earlier deliberate non-use"; end Syntax_Recovery; ------------------------------------------------------------------------- procedure Semantic_Note (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# WarningStatus.SuppressionList & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# WarningStatus.SuppressionList & --# Total_Error_Count from *, --# WarningStatus.SuppressionList; is File : Error_IO.File_Type; Error : Error_Types.StringError; begin if WarningStatus.Is_Suppressed (The_Element => Notes) then Inc_Suppressed_Warning_Counter (Warning_Type => Notes); else Inc_Message_Count (Err_Type => Error_Types.Note); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.Note, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Err_Num, Reference => No_Reference, Name1 => Lex_String_To_Name (Str => Id_Str), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Semantic_Note; -------------------------------------------------------------------------- procedure Dep_Semantic_Error (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Id_Str1 : in LexTokenManager.Lex_String; Id_Str2 : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Err_Num, --# Id_Str1, --# Id_Str2, --# LexTokenManager.State, --# Position & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str1, --# Id_Str2, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys & --# Total_Error_Count from *; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Inc_Message_Count (Err_Type => Error_Types.DepSemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.DepSemanticErr, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Err_Num, Reference => No_Reference, Name1 => Lex_String_To_Name (Str => Id_Str1), Name2 => Lex_String_To_Name (Str => Id_Str2), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end Dep_Semantic_Error; --------------------------------------------------------------------------- procedure Dep_Semantic_Error_Sym (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Sym1 : in Dictionary.Symbol; Sym2 : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State from *, --# CommandLineData.Content, --# ErrorBuffer.Buffer, --# Err_Num, --# Position, --# Scope, --# Sym1, --# Sym2 & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Sym1, --# Sym2 & --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# Position, --# Scope, --# Sym1, --# Sym2 & --# Total_Error_Count from *; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Inc_Message_Count (Err_Type => Error_Types.DepSemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.DepSemanticErr, Pos => Position, Scope => Scope, Error_Number => Err_Num, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Sym1), Name2 => Symbol_To_Name (Sym => Sym2), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end Dep_Semantic_Error_Sym; --------------------------------------------------------------------------- procedure Semantic_Error (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Reference, --# SPARK_IO.File_Sys, --# Stop_SLI, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Error_Context_Rec.Severity := Semantic_Errs; Justifications.Set_Current_Unit_Has_Semantic_Errors (Which_Table => Error_Context_Rec.Justifications_Data_Table); Inc_Message_Count (Err_Type => Error_Types.SemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.SemanticErr, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Err_Num, Reference => Reference, Name1 => Lex_String_To_Name (Str => Id_Str), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning (Position => Position, Id_Str => Id_Str); end Semantic_Error; --------------------------------------------------------------------------- procedure Semantic_Error2 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Id_Str1 : in LexTokenManager.Lex_String; Id_Str2 : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str1, --# Id_Str2, --# LexTokenManager.State, --# Position, --# Reference, --# SPARK_IO.File_Sys, --# Stop_SLI, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Justifications.Set_Current_Unit_Has_Semantic_Errors (Which_Table => Error_Context_Rec.Justifications_Data_Table); Error_Context_Rec.Severity := Semantic_Errs; Inc_Message_Count (Err_Type => Error_Types.SemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.SemanticErr, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Err_Num, Reference => Reference, Name1 => Lex_String_To_Name (Str => Id_Str1), Name2 => Lex_String_To_Name (Str => Id_Str2), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning (Position => Position, Id_Str => Id_Str1); end Semantic_Error2; --------------------------------------------------------------------------- procedure Semantic_Error_Sym (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Sym, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Justifications.Set_Current_Unit_Has_Semantic_Errors (Which_Table => Error_Context_Rec.Justifications_Data_Table); Error_Context_Rec.Severity := Semantic_Errs; Inc_Message_Count (Err_Type => Error_Types.SemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.SemanticErr, Pos => Position, Scope => Scope, Error_Number => Err_Num, Reference => Reference, Name1 => Symbol_To_Name (Sym => Sym), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning_Sym (Position => Position, Sym => Sym, Scope => Scope); end Semantic_Error_Sym; --------------------------------------------------------------------------- procedure Semantic_Error_Sym2 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Sym2 : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Sym, --# Sym2, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Justifications.Set_Current_Unit_Has_Semantic_Errors (Which_Table => Error_Context_Rec.Justifications_Data_Table); Error_Context_Rec.Severity := Semantic_Errs; Inc_Message_Count (Err_Type => Error_Types.SemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.SemanticErr, Pos => Position, Scope => Scope, Error_Number => Err_Num, Reference => Reference, Name1 => Symbol_To_Name (Sym => Sym), Name2 => Symbol_To_Name (Sym => Sym2), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning_Sym (Position => Position, Sym => Sym, Scope => Scope); end Semantic_Error_Sym2; --------------------------------------------------------------------------- procedure Semantic_Error_Lex1_Sym1 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Sym, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Justifications.Set_Current_Unit_Has_Semantic_Errors (Which_Table => Error_Context_Rec.Justifications_Data_Table); Error_Context_Rec.Severity := Semantic_Errs; Inc_Message_Count (Err_Type => Error_Types.SemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.SemanticErr, Pos => Position, Scope => Scope, Error_Number => Err_Num, Reference => Reference, Name1 => Lex_String_To_Name (Str => Id_Str), Name2 => Symbol_To_Name (Sym => Sym), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning (Position => Position, Id_Str => Id_Str); end Semantic_Error_Lex1_Sym1; --------------------------------------------------------------------------- procedure Semantic_Error_Lex1_Sym2 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String; Sym : in Dictionary.Symbol; Sym2 : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Sym, --# Sym2, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Justifications.Set_Current_Unit_Has_Semantic_Errors (Which_Table => Error_Context_Rec.Justifications_Data_Table); Error_Context_Rec.Severity := Semantic_Errs; Inc_Message_Count (Err_Type => Error_Types.SemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.SemanticErr, Pos => Position, Scope => Scope, Error_Number => Err_Num, Reference => Reference, Name1 => Lex_String_To_Name (Str => Id_Str), Name2 => Symbol_To_Name (Sym => Sym), Name3 => Symbol_To_Name (Sym => Sym2), Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning (Position => Position, Id_Str => Id_Str); end Semantic_Error_Lex1_Sym2; --------------------------------------------------------------------------- procedure Semantic_Error_Sym3 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Sym2 : in Dictionary.Symbol; Sym3 : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Stop_SLI; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Num, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Stop_SLI, --# Sym, --# Sym2, --# Sym3, --# WarningStatus.SuppressionList & --# Stop_SLI from *, --# CommandLineData.Content; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin Justifications.Set_Current_Unit_Has_Semantic_Errors (Which_Table => Error_Context_Rec.Justifications_Data_Table); Error_Context_Rec.Severity := Semantic_Errs; Inc_Message_Count (Err_Type => Error_Types.SemanticErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.SemanticErr, Pos => Position, Scope => Scope, Error_Number => Err_Num, Reference => Reference, Name1 => Symbol_To_Name (Sym => Sym), Name2 => Symbol_To_Name (Sym => Sym2), Name3 => Symbol_To_Name (Sym => Sym3), Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); SLI_Generation_Warning_Sym (Position => Position, Sym => Sym, Scope => Scope); end Semantic_Error_Sym3; --------------------------------------------------------------------------- procedure Control_Flow_Error (Err_Type : in Control_Flow_Err_Type; Position : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Err_Type, --# Position & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys & --# Total_Error_Count from *; is Error : Error_Types.StringError; File : Error_IO.File_Type; begin -- Note, no justification check here, these are serious structural errors in the code Error_Context_Rec.Severity := Flow_Errs; Inc_Message_Count (Err_Type => Error_Types.UncondFlowErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.ControlFlowErr, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Control_Flow_Err_Type'Pos (Err_Type) + Error_Types.ControlFlowErrOffset, Reference => No_Reference, Name1 => Error_Types.NoName, Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end Control_Flow_Error; ------------------------------------------------------------------------- procedure Data_Flow_Error (Err_Type : in Data_Flow_Err_Type; Position : in LexTokenManager.Token_Position; Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State from *, --# CommandLineData.Content, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Scope, --# Var_Sym & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Var_Sym & --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Scope, --# Var_Sym & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Var_Sym; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; function Obtain_Error_Number (Err_Type : Data_Flow_Err_Type) return Natural is Result : Natural; begin case Err_Type is when Expn_Undefined | Stmt_Undefined | Invariant_Exp => Result := Data_Flow_Err_Type'Pos (Err_Type) + Error_Types.UncondFlowErrorOffset; when Expn_May_Be_Undefined | Stmt_May_Be_Undefined => Result := Data_Flow_Err_Type'Pos (Err_Type) + Error_Types.CondFlowErrorOffset; end case; return Result; end Obtain_Error_Number; begin -- Data_Flow_Error Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Flow_Message, Err_Num => Obtain_Error_Number (Err_Type => Err_Type), Identifiers => Justification_Identifiers'(1 => Symbol_To_Justification_Identifier (Sym => Var_Sym), others => Null_Justification_Identifier), Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else File := Error_Context_Rec.Errs; case Err_Type is when Expn_Undefined | Stmt_Undefined | Invariant_Exp => Error_Context_Rec.Severity := Flow_Errs; Inc_Message_Count (Err_Type => Error_Types.UncondFlowErr); ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.UncondFlowErr, Pos => Position, Scope => Scope, Error_Number => Data_Flow_Err_Type'Pos (Err_Type) + Error_Types.UncondFlowErrorOffset, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Var_Sym), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); when Expn_May_Be_Undefined | Stmt_May_Be_Undefined => Error_Context_Rec.Severity := Flow_Warning; Inc_Message_Count (Err_Type => Error_Types.CondlFlowErr); ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.CondlFlowErr, Pos => Position, Scope => Scope, Error_Number => Data_Flow_Err_Type'Pos (Err_Type) + Error_Types.CondFlowErrorOffset, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Var_Sym), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); end case; Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Data_Flow_Error; ------------------------------------------------------------------------- procedure Ineffective_Stmt (Position : in LexTokenManager.Token_Position; Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State from *, --# CommandLineData.Content, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Scope, --# Var_Sym & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Var_Sym & --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Scope, --# Var_Sym & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# LexTokenManager.State, --# Position, --# Var_Sym; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Flow_Message, Err_Num => 10, Identifiers => Justification_Identifiers'(1 => Symbol_To_Justification_Identifier (Sym => Var_Sym), others => Null_Justification_Identifier), Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else Error_Context_Rec.Severity := Flow_Errs; Inc_Message_Count (Err_Type => Error_Types.UncondFlowErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.IneffectiveStat, Pos => Position, Scope => Scope, Error_Number => 10, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Var_Sym), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Ineffective_Stmt; ---------------------------------------------------------------------------- procedure Stability_Error (Err_Type : in Stability_Err_Type; Position : in LexTokenManager.Token_Position; Stability_Index : in Index_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Stability_Index & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Stability_Index & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Flow_Message, Err_Num => Stability_Err_Type'Pos (Err_Type) + Error_Types.StabilityErrOffset, Identifiers => Null_Justification_Identifiers, Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else Error_Context_Rec.Severity := Flow_Errs; Inc_Message_Count (Err_Type => Error_Types.UncondFlowErr); File := Error_Context_Rec.Errs; ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.StabilityErr, Pos => Position, Scope => Dictionary.GlobalScope, Error_Number => Stability_Err_Type'Pos (Err_Type) + Error_Types.StabilityErrOffset, Reference => No_Reference, Name1 => Error_Types.Names'(Name_Sort => Error_Types.StabilityIndex, Name_Sym => Dictionary.NullSymbol, Name_Str => LexTokenManager.Null_String, Pos => Index_Type'Pos (Stability_Index)), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Stability_Error; ---------------------------------------------------------------------------- procedure Dependency_Error (Err_Type : in Dependency_Err_Type; Position : in LexTokenManager.Token_Position; Import_Var_Sym : in Dictionary.Symbol; Export_Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State from *, --# CommandLineData.Content, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# Export_Var_Sym, --# Import_Var_Sym, --# LexTokenManager.State, --# Position, --# Scope & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# Export_Var_Sym, --# Import_Var_Sym, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys & --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Type, --# Export_Var_Sym, --# Import_Var_Sym, --# LexTokenManager.State, --# Position, --# Scope & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# Err_Type, --# Export_Var_Sym, --# Import_Var_Sym, --# LexTokenManager.State, --# Position; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; begin case Err_Type is when Not_Used | Ineff_Init | Ineff_Local_Init | Policy_Violation => -- Unconditional error cases Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Flow_Message, Err_Num => Dependency_Err_Type'Pos (Err_Type) + Error_Types.UncondDependencyErrorOffset, Identifiers => Justification_Identifiers'(1 => Symbol_To_Justification_Identifier (Sym => Export_Var_Sym), 2 => Symbol_To_Justification_Identifier (Sym => Import_Var_Sym)), Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else File := Error_Context_Rec.Errs; Error_Context_Rec.Severity := Flow_Errs; Inc_Message_Count (Err_Type => Error_Types.UncondDependencyErr); ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.UncondDependencyErr, Pos => Position, Scope => Scope, Error_Number => Dependency_Err_Type'Pos (Err_Type) + Error_Types.UncondDependencyErrorOffset, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Import_Var_Sym), Name2 => Symbol_To_Name (Sym => Export_Var_Sym), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; when May_Be_Used | Uninitialised | Integrity_Violation | May_Be_Integrity_Violation => -- Conditional error cases Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Flow_Message, Err_Num => Dependency_Err_Type'Pos (Err_Type) + Error_Types.CondDependencyErrorOffset, Identifiers => Justification_Identifiers'(1 => Symbol_To_Justification_Identifier (Sym => Export_Var_Sym), 2 => Symbol_To_Justification_Identifier (Sym => Import_Var_Sym)), Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else File := Error_Context_Rec.Errs; Error_Context_Rec.Severity := Flow_Warning; Inc_Message_Count (Err_Type => Error_Types.CondlDependencyErr); ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.CondlDependencyErr, Pos => Position, Scope => Scope, Error_Number => Dependency_Err_Type'Pos (Err_Type) + Error_Types.CondDependencyErrorOffset, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Import_Var_Sym), Name2 => Symbol_To_Name (Sym => Export_Var_Sym), Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end case; end Dependency_Error; -------------------------------------------------------------------------- procedure Usage_Error (Err_Type : in Usage_Err_Type; Position : in LexTokenManager.Token_Position; Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State from *, --# CommandLineData.Content, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Scope, --# Var_Sym & --# Echo_Accumulator, --# Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Var_Sym & --# ErrorBuffer.Buffer from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Scope, --# Var_Sym & --# Total_Error_Count from *, --# CommandLineData.Content, --# Error_Context_Rec, --# Err_Type, --# LexTokenManager.State, --# Position, --# Var_Sym; is Error : Error_Types.StringError; File : Error_IO.File_Type; Error_Is_Justified : Boolean; begin Justifications.Check_Whether_Justified (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Position, Kind => Flow_Message, Err_Num => Usage_Err_Type'Pos (Err_Type) + Error_Types.UsageErrOffset, Identifiers => Justification_Identifiers'(1 => Symbol_To_Justification_Identifier (Sym => Var_Sym), others => Null_Justification_Identifier), Match_Found => Error_Is_Justified); if Error_Is_Justified then Inc_Total_Justified_Warnings; else File := Error_Context_Rec.Errs; Error_Context_Rec.Severity := Flow_Errs; Inc_Message_Count (Err_Type => Error_Types.UncondDependencyErr); ErrorBuffer.Add (Err_File => File, Err_Type => Error_Types.UsageErr, Pos => Position, Scope => Scope, Error_Number => Usage_Err_Type'Pos (Err_Type) + Error_Types.UsageErrOffset, Reference => No_Reference, Name1 => Symbol_To_Name (Sym => Var_Sym), Name2 => Error_Types.NoName, Name3 => Error_Types.NoName, Echo_Str => Error); Error_Context_Rec.Errs := File; EchoErrorEntry (SPARK_IO.Standard_Output, Error); end if; end Usage_Error; ----------------------------------------------------------------------------- procedure Put_Error_Pointers (Listing : in SPARK_IO.File_Type; Errors : in Error_Sets) --# global in CommandLineData.Content; --# in Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Errors, --# Error_Context_Rec, --# Listing; is Err_Pos : Natural; Last_Err_Pos : Natural; Curr_Pos : Natural; Err_Num : Positive; First_One : Boolean; function Width_Of (Err_Count : Natural) return Positive is Size : Positive; begin if Err_Count < 10 then Size := 1; elsif Err_Count < 100 then Size := 2; elsif Err_Count < 1000 then Size := 3; else Size := 4; end if; return Size; end Width_Of; begin Last_Err_Pos := 0; Curr_Pos := 0; First_One := True; --# assert True; -- for RTC generation for I in Error_Set_Positions range 1 .. Errors.Length loop --# assert True; -- for RTC generation if Error_Has_Position_Inline (Err_Type => Errors.Content (I).Error.ErrorType) then if First_One then Put_Spaces (File => Listing, N => Source_Line_Indent); First_One := False; end if; Err_Pos := Errors.Content (I).Error.Position.Start_Pos; if Err_Pos = 0 then Err_Pos := 1; end if; --# assert True; -- for RTC generation if Err_Pos = Last_Err_Pos then Put_Char (File => Listing, Item => ','); else if Curr_Pos > Err_Pos then New_Line (File => Listing, Spacing => 1); Move_To_Indent (Source_File => Listing, Line => Error_Context_Rec.Current_Line, Indent => Source_Line_Indent, Position => Err_Pos - 1); elsif Curr_Pos = 0 then Move_To_Indent (Source_File => Listing, Line => Error_Context_Rec.Current_Line, Indent => 0, Position => Err_Pos - 1); else Put_Spaces (File => Listing, N => Err_Pos - Curr_Pos); end if; Curr_Pos := Err_Pos; Put_Char (File => Listing, Item => '^'); end if; Err_Num := Errors.Content (I).Err_Num; --# accept Flow, 41, "Mode-specific code"; if CommandLineData.Content.Plain_Output then -- Expect stable condition Curr_Pos := Curr_Pos + 1; else Put_Integer (File => Listing, Item => Err_Num, Width => Width_Of (Err_Count => Err_Num), Base => 10); Curr_Pos := (Curr_Pos + Width_Of (Err_Count => Err_Num)) + 1; end if; --# end accept; Last_Err_Pos := Err_Pos; end if; end loop; --# assert True; -- for RTC generation if not First_One then New_Line (File => Listing, Spacing => 1); end if; end Put_Error_Pointers; procedure Put_Error_Messages (Listing : in SPARK_IO.File_Type; Errors : in Error_Sets; Start_Pos : in Natural; Accumulator : in out ErrorAccumulator.T) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Accumulator from *, --# Errors, --# Start_Pos & --# SPARK_IO.File_Sys from *, --# Accumulator, --# CommandLineData.Content, --# Errors, --# Listing, --# Start_Pos; is The_Error : Error_Struct; New_Start : Natural; Started_Active : Boolean; Indent : Natural; Line_Length : Natural; Explanation : E_Strings.T; begin Indent := 11; Line_Length := Error_Line_Length; for I in Error_Set_Positions range 1 .. Errors.Length loop The_Error := Errors.Content (I); Started_Active := ErrorAccumulator.Is_Active (This => Accumulator); if ErrorAccumulator.Is_Active (This => Accumulator) then ErrorAccumulator.Add (This => Accumulator, Error => The_Error, End_Pos => Line_Length, Indent => Indent, Listing => Listing); end if; if not ErrorAccumulator.Is_Active (This => Accumulator) then -- put a trailing blank line after compound messages. if Started_Active then New_Line (File => Listing, Spacing => 2); end if; Output_Error_Marker (File => Listing, Err_Type => The_Error.Error.ErrorType, Message_Id => The_Error.Error.MessageId, Err_Count => The_Error.Err_Num); if ErrorAccumulator.Is_Error_Start (The_Error => The_Error.Error) then -- Message is of akind that will be "accumulated" before printing. We don't want -- the explanation to appear over and over again, only once at the end of the completed -- message. So we extract explanation from message here and pass it into ErrorAccumulator -- where it is stored and appended to the error mesage when a later Flush operation is -- performed. Split_String_At_Explanation (E_Str => The_Error.Error.Message, Explanation => Explanation); PrintLine (Listing => Listing, Start_Pos => Start_Pos, End_Pos => Line_Length, Indent => Indent, Line => The_Error.Error.Message, Add_New_Line => False, New_Start => New_Start); ErrorAccumulator.Start_Msg (This => Accumulator, Start_Error => The_Error, Start_Indent => New_Start, Explanation => Explanation, Line_Length => Line_Length, Indent => Indent); -- in rather than recalculate them in error accumulator else -- leave message text unchanged and display it --# accept Flow, 10, New_Start, "Returned parameter not needed in this case"; PrintLine (Listing => Listing, Start_Pos => Start_Pos, End_Pos => Line_Length, Indent => Indent, Line => The_Error.Error.Message, Add_New_Line => False, New_Start => New_Start); --# end accept; -- Terminate message New_Line (File => Listing, Spacing => 1); end if; end if; end loop; end Put_Error_Messages; procedure Put_Error_Messages_XML (Listing : in SPARK_IO.File_Type; Errors : in Error_Sets; Start_Pos : in Natural; Accumulator : in out ErrorAccumulator.T) --# global in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives Accumulator, --# XMLReport.State from *, --# Accumulator, --# Errors, --# Start_Pos & --# SPARK_IO.File_Sys from *, --# Accumulator, --# Errors, --# Listing, --# Start_Pos, --# XMLReport.State; is The_Error : Error_Struct; New_Start : Natural; Started_Active : Boolean; Indent : Natural; Line_Length : Natural; Explanation : E_Strings.T; begin Indent := 0; Line_Length := XML_Error_Line_Length; for I in Error_Set_Positions range 1 .. Errors.Length loop The_Error := Errors.Content (I); The_Error.Error.Message := SPARK_XML.Filter_String (Str => The_Error.Error.Message); Started_Active := ErrorAccumulator.Is_Active (This => Accumulator); if ErrorAccumulator.Is_Active (This => Accumulator) then ErrorAccumulator.Add (This => Accumulator, Error => The_Error, End_Pos => Line_Length, Indent => Indent, Listing => Listing); end if; if not ErrorAccumulator.Is_Active (This => Accumulator) then if Started_Active then XMLReport.End_Message (Report => Listing); New_Line (File => Listing, Spacing => 1); end if; XMLReport.Start_Message (Class => Get_Error_Class (Err_Class => The_Error.Error.ErrorType), Code => Convert_Message_Id (Message_Id => The_Error.Error.MessageId, Err_Type => The_Error.Error.ErrorType), Line => Integer (The_Error.Error.Position.Start_Line_No), Offset => The_Error.Error.Position.Start_Pos, Report => Listing); if ErrorAccumulator.Is_Error_Start (The_Error => The_Error.Error) then -- Message is of akind that will be "accumulated" before printing. We don't want -- the explanation to appear over and over again, only once at the end of the completed -- message. So we extract explanation from message here and pass it into ErrorAccumulator -- where it is stored and appended to the error mesage when a later Flush operation is -- performed. Split_String_At_Explanation (E_Str => The_Error.Error.Message, Explanation => Explanation); PrintLine (Listing => Listing, Start_Pos => Start_Pos, End_Pos => Line_Length, Indent => Indent, Line => The_Error.Error.Message, Add_New_Line => False, New_Start => New_Start); ErrorAccumulator.Start_Msg (This => Accumulator, Start_Error => The_Error, Start_Indent => New_Start, Explanation => Explanation, Line_Length => Line_Length, Indent => Indent); -- in rather than recalculate them in error accumulator else -- leave message text unchanged and display it --# accept Flow, 10, New_Start, "Returned parameter not needed in this case"; PrintLine (Listing => Listing, Start_Pos => Start_Pos, End_Pos => Line_Length, Indent => Indent, Line => The_Error.Error.Message, Add_New_Line => False, New_Start => New_Start); --# end accept; -- Terminate message XMLReport.End_Message (Report => Listing); end if; end if; end loop; end Put_Error_Messages_XML; procedure PrintErrors (Listing : in SPARK_IO.File_Type; Purpose : in Error_Types.ConversionRequestSource) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Conversions.State, --# Error_Context_Rec from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Listing, --# Purpose, --# SPARK_IO.File_Sys & --# ErrorBuffer.Buffer from * & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# Listing, --# Purpose, --# WarningStatus.SuppressionList; is separate; procedure AppendErrors (Report : in SPARK_IO.File_Type; Purpose : in Error_Types.ConversionRequestSource) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives Conversions.State, --# Error_Context_Rec, --# XMLReport.State from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# LexTokenManager.State, --# Purpose, --# Report, --# SPARK_IO.File_Sys, --# XMLReport.State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# LexTokenManager.State, --# Purpose, --# Report, --# WarningStatus.SuppressionList, --# XMLReport.State; is separate; procedure Index_Manager_Error (S : in String; Source_File : in LexTokenManager.Lex_String; Line_No : in Positive; Col_No : in Positive; Token_String : in E_Strings.T; Is_Token_Filename : in Boolean; Is_Fatal : in Boolean) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Error_Context_Rec, --# Total_Error_Count from *, --# Is_Fatal, --# WarningStatus.SuppressionList & --# SPARK_IO.File_Sys from *, --# Col_No, --# CommandLineData.Content, --# Is_Fatal, --# Is_Token_Filename, --# LexTokenManager.State, --# Line_No, --# S, --# Source_File, --# Token_String, --# WarningStatus.SuppressionList; is Str : E_Strings.T; Tmp_Col_No : Positive; begin if not Is_Fatal and then WarningStatus.Is_Suppressed (The_Element => Index_Manager_Duplicates) then Inc_Suppressed_Warning_Counter (Warning_Type => Index_Manager_Duplicates); else if Is_Fatal then Inc_Message_Count (Err_Type => Error_Types.SyntaxErr); else Inc_Message_Count (Err_Type => Error_Types.WarningWithoutPosition); end if; -- Adjust column number to take account of lookahead if Col_No > 1 then Tmp_Col_No := Col_No - 1; else Tmp_Col_No := Col_No; end if; Str := LexTokenManager.Lex_String_To_String (Lex_Str => Source_File); if E_Strings.Get_Length (E_Str => Str) > 0 then if CommandLineData.Content.Plain_Output or else CommandLineData.Content.Brief then Str := FileSystem.Just_File (Fn => Str, Ext => True); end if; else Str := E_Strings.Empty_String; end if; if CommandLineData.Content.Brief then ScreenEcho.Put_ExaminerString (Str); ScreenEcho.Put_Char (':'); ScreenEcho.Put_Integer (Line_No, 0, 10); ScreenEcho.Put_Char (':'); ScreenEcho.Put_Integer (Tmp_Col_No, 0, 10); ScreenEcho.Put_String (": "); ScreenEcho.Put_String (S); else ScreenEcho.Put_String ("In index file "); ScreenEcho.Put_ExaminerString (Str); ScreenEcho.Put_String (" at line "); ScreenEcho.Put_Integer (Line_No, 0, 10); ScreenEcho.Put_String (" column "); ScreenEcho.Put_Integer (Tmp_Col_No, 0, 10); ScreenEcho.Put_String (": "); ScreenEcho.Put_String (S); end if; if E_Strings.Get_Length (E_Str => Token_String) > 0 then if Is_Token_Filename then if CommandLineData.Content.Plain_Output then Str := FileSystem.Just_File (Fn => Token_String, Ext => True); elsif CommandLineData.Content.Brief then Str := FileSystem.Just_File (Fn => Token_String, Ext => True); else Str := Token_String; end if; else Str := Token_String; end if; ScreenEcho.Put_String (": "); ScreenEcho.Put_ExaminerLine (Str); else ScreenEcho.New_Line (1); end if; end if; end Index_Manager_Error; -- exported operations concerned with error justification mechanism ------------------------------ procedure Start_Unit --# global in out Error_Context_Rec; --# derives Error_Context_Rec from *; is begin Justifications.Start_Unit (Which_Table => Error_Context_Rec.Justifications_Data_Table); end Start_Unit; procedure End_Unit --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in WarningStatus.SuppressionList; --# in out Conversions.State; --# in out Echo_Accumulator; --# in out ErrorBuffer.Buffer; --# in out Error_Context_Rec; --# in out Potential_Invalid_Value; --# in out SPARK_IO.File_Sys; --# in out Total_Error_Count; --# derives Conversions.State, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# Potential_Invalid_Value, --# SPARK_IO.File_Sys, --# Total_Error_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Echo_Accumulator, --# ErrorBuffer.Buffer, --# Error_Context_Rec, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# WarningStatus.SuppressionList; is It : Justifications.Unmatched_Justification_Iterator; begin -- First report all justifications in the unit just being ended that have failed to match -- But only if we aren't ignoring justifications if CommandLineData.Content.Justification_Option /= CommandLineData.Ignore then Justifications.First_Unmatched_Justification (It => It, Which_Table => Error_Context_Rec.Justifications_Data_Table); while not Justifications.Is_Null_Iterator (It => It) loop Semantic_Warning (Err_Num => 121, Position => Justifications.Error_Position (It => It), Id_Str => LexTokenManager.Null_String); Justifications.Next_Unmatched_Justification (It => It, Which_Table => Error_Context_Rec.Justifications_Data_Table); end loop; end if; -- Then discard data for now-completed unit Justifications.End_Unit (Which_Table => Error_Context_Rec.Justifications_Data_Table); end End_Unit; procedure Start_Justification (Position : in LexTokenManager.Token_Position; Line : in LexTokenManager.Line_Numbers; Kind : in Justification_Kinds; Err_Num : in Natural; Identifiers : in Justification_Identifiers; Applies_To_All : in Boolean; Explanation : in E_Strings.T; Maximum_Justifications_Reached : out Boolean) --# global in out Error_Context_Rec; --# derives Error_Context_Rec from *, --# Applies_To_All, --# Err_Num, --# Explanation, --# Identifiers, --# Kind, --# Line, --# Position & --# Maximum_Justifications_Reached from Error_Context_Rec; is begin Justifications.Start_Justification (Which_Table => Error_Context_Rec.Justifications_Data_Table, Position => Position, Line => Line, Kind => Kind, Err_Num => Err_Num, Identifiers => Identifiers, Applies_To_All => Applies_To_All, Explanation => Explanation, Maximum_Justifications_Reached => Maximum_Justifications_Reached); end Start_Justification; procedure End_Justification (Line : in LexTokenManager.Line_Numbers; Unmatched_End : out Boolean) --# global in out Error_Context_Rec; --# derives Error_Context_Rec, --# Unmatched_End from Error_Context_Rec, --# Line; is begin Justifications.End_Justification (Which_Table => Error_Context_Rec.Justifications_Data_Table, Line => Line, Unmatched_End => Unmatched_End); end End_Justification; procedure Set_File_Open_Error --# global out File_Open_Error; --# derives File_Open_Error from ; is begin File_Open_Error := True; end Set_File_Open_Error; function Get_Errors_Type return Exit_Code --# global in File_Open_Error; --# in Total_Error_Count; is Return_Value : Exit_Code := 0; begin if File_Open_Error then Return_Value := 8; elsif Total_Error_Count.Explicit_Error_Count (Syntax_Or_Semantic) /= 0 then Return_Value := 3; elsif Total_Error_Count.Explicit_Error_Count (Flow) /= 0 then Return_Value := 2; elsif Total_Error_Count.Explicit_Error_Count (Warning) /= 0 then Return_Value := 1; end if; return Return_Value; end Get_Errors_Type; begin Error_Context_Rec := Null_Error_Context; Echo_Accumulator := ErrorAccumulator.Clear; Total_Error_Count := Null_Total_Error_Counts; end ErrorHandler; spark-2012.0.deb/examiner/g-trasym.adb0000644000175000017500000000344711753202336016474 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Run-time symbolic traceback support -- -- SPECIAL NULL VERSION FOR GCC 4.x on OSX/Darwin where this package -- is not currently supported. This body is only compiled on OSX/Darwin -- platforms. On all other GNAT Pro supported platforms the full, -- working version of this package is used from the standard GNAT runtime. -- -- This file has been derived from the standard version of this -- package body in the GNAT runtime library. package body GNAT.Traceback.Symbolic is ------------------------ -- Symbolic_Traceback -- ------------------------ function Symbolic_Traceback (Traceback : Tracebacks_Array) return String is begin return ""; end Symbolic_Traceback; function Symbolic_Traceback (E : Exception_Occurrence) return String is begin return ""; end Symbolic_Traceback; end GNAT.Traceback.Symbolic; spark-2012.0.deb/examiner/sem-wf_declare_annotation.adb0000644000175000017500000000511511753202336022032 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Declare_Annotation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Task_Or_Proc : in Dictionary.Symbol; First_Seen : in Boolean; The_Heap : in out Heap.HeapRecord) is Next_Node : STree.SyntaxNode; begin -- Grammar permits the declare annotation on procedures and task types only so no need -- to check the context further. if Dictionary.IsProcedure (Task_Or_Proc) and then not First_Seen then -- This procedure already has a declare annotation ErrorHandler.Semantic_Error (Err_Num => 929, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Task_Or_Proc)); elsif not CommandLineData.Ravenscar_Selected then -- Declare annotation ignored in Ada83 mode ErrorHandler.Semantic_Warning (Err_Num => 4, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Next_Node := Child_Node (Current_Node => Node); -- ASSUME Next_Node = property_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.property_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = property_list in Wf_Declare_Annotation"); Wf_Property_List (Node => Next_Node, Type_Node_Pos => LexTokenManager.Null_Token_Position, Scope => Scope, The_Owner => Task_Or_Proc, The_Heap => The_Heap); end if; end Wf_Declare_Annotation; spark-2012.0.deb/examiner/sem-wf_subprogram_declaration.adb0000644000175000017500000003015111753202336022725 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview -- Checks sub-program declarations from node subprogram_declaration. These -- nodes occur only in package declarations therefore well formation of -- function_ and procedure_specifications are handled here as a special case -- rather than using the more complex and general-purpose -- wf_subprogram_specification. -- NOTE 11/6/02 -- Declarations also occur in protected types but this procedure can -- deal with those as well -------------------------------------------------------------------------------- separate (Sem) procedure Wf_Subprogram_Declaration (Node : in STree.SyntaxNode; Inherit_Node : in STree.SyntaxNode; Context_Node : in STree.SyntaxNode; Generic_Formal_Part_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Generic_Unit : in Dictionary.Symbol; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Subprog_Sym : out Dictionary.Symbol) is Spec_Node : STree.SyntaxNode; Ident_Node : STree.SyntaxNode; Anno_Node : STree.SyntaxNode; Global_Node : STree.SyntaxNode; Dependency_Node : STree.SyntaxNode; Declare_Node : STree.SyntaxNode; Constraint_Node : STree.SyntaxNode; Is_Overriding : Boolean := False; begin -- Determine and record in the variable Overriding_Indicator -- if the procedure overrides a parent. -- In SPARK 2005 "not overriding Procedure ..." is equivalent -- to "Procedure ...". -- This differs from Ada 2005 where a procedure may override -- a parent procedure when no overriding_indicator is present. Spec_Node := Child_Node (Current_Node => Node); -- ASSUME Spec_Node = overriding_indicator OR procedure_specification OR function_specification OR -- proof_function_declaration OR entry_specification if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.overriding_indicator then -- ASSUME Spec_Node = overriding_indicator -- ASSUME Child_Node (Current_Node => Spec_Node) = RWoverriding OR RWnot if Syntax_Node_Type (Node => Child_Node (Current_Node => Spec_Node)) = SP_Symbols.RWoverriding then -- ASSUME Child_Node (Current_Node => Spec_Node) = RWoverriding Is_Overriding := True; elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Spec_Node)) /= SP_Symbols.RWnot then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Spec_Node) = RWoverriding OR RWnot in Wf_Subprogram_Declaration"); end if; Spec_Node := Next_Sibling (Current_Node => Spec_Node); elsif Syntax_Node_Type (Node => Spec_Node) /= SP_Symbols.procedure_specification and then Syntax_Node_Type (Node => Spec_Node) /= SP_Symbols.function_specification and then Syntax_Node_Type (Node => Spec_Node) /= SP_Symbols.proof_function_declaration and then Syntax_Node_Type (Node => Spec_Node) /= SP_Symbols.entry_specification then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = overriding_indicator OR procedure_specification OR function_specification OR " & "proof_function_declaration OR entry_specification in Wf_Subprogram_Declaration"); end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.entry_declaration) and --# (Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (Syntax_Node_Type (Inherit_Node, STree.Table) = SP_Symbols.inherit_clause or Inherit_Node = STree.NullNode) and --# (Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part or --# Generic_Formal_Part_Node = STree.NullNode); -- ASSUME Spec_Node = procedure_specification OR function_specification OR proof_function_declaration OR entry_specification if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification or else Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.entry_specification then -- ASSUME Spec_Node = procedure_specification OR function_specification OR entry_specification Anno_Node := Next_Sibling (Current_Node => Spec_Node); -- ASSUME Anno_Node = procedure_annotation OR function_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.procedure_annotation or else Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.function_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = procedure_annotation OR function_annotation in Wf_Subprogram_Declaration"); --# accept Flow, 10, Global_Node, "Expected ineffective assignment" & --# Flow, 10, Dependency_Node, "Expected ineffective assignment" & --# Flow, 10, Declare_Node, "Expected ineffective assignment"; Get_Subprogram_Anno_Key_Nodes (Node => Anno_Node, Global_Node => Global_Node, Dependency_Node => Dependency_Node, Declare_Node => Declare_Node, Constraint_Node => Constraint_Node); --# end accept; elsif Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.proof_function_declaration then -- ASSUME Spec_Node = proof_function_declaration Anno_Node := STree.NullNode; Constraint_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Constraint_Node = function_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.function_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = function_constraint in Wf_Subprogram_Declaration"); else Anno_Node := STree.NullNode; Constraint_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = procedure_specification OR function_specification OR " & "proof_function_declaration OR entry_specification in Wf_Subprogram_Declaration"); end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.entry_declaration) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.proof_function_declaration or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.entry_specification) and --# (Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation or --# Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.function_annotation or --# Anno_Node = STree.NullNode) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint) and --# (Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (Syntax_Node_Type (Inherit_Node, STree.Table) = SP_Symbols.inherit_clause or Inherit_Node = STree.NullNode) and --# (Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part or --# Generic_Formal_Part_Node = STree.NullNode); if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification or else Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.entry_specification then -- ASSUME Spec_Node = procedure_specification OR function_specification OR entry_specification Subprogram_Specification.Wf_Subprogram_Specification (Spec_Node => Spec_Node, Anno_Node => Anno_Node, Constraint_Node => Constraint_Node, Inherit_Node => Inherit_Node, Context_Node => Context_Node, Generic_Formal_Part_Node => Generic_Formal_Part_Node, Current_Scope => Current_Scope, Generic_Unit => Generic_Unit, Current_Context => Dictionary.ProgramContext, Component_Data => Component_Data, The_Heap => The_Heap, Subprog_Sym => Subprog_Sym); Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = identifier in Wf_Subprogram_Declaration"); Check_No_Overloading_From_Tagged_Ops (Ident_Node => Ident_Node, Subprog_Sym => Subprog_Sym, Scope => Current_Scope, Abstraction => Dictionary.IsAbstract, Is_Overriding => Is_Overriding); elsif Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.proof_function_declaration then -- ASSUME Spec_Node = proof_function_declaration Spec_Node := Child_Node (Current_Node => Spec_Node); -- ASSUME Spec_Node = function_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = function_specification in Wf_Subprogram_Declaration"); Subprogram_Specification.Wf_Subprogram_Specification (Spec_Node => Spec_Node, Anno_Node => Anno_Node, Constraint_Node => Constraint_Node, Inherit_Node => Inherit_Node, Context_Node => Context_Node, Generic_Formal_Part_Node => Generic_Formal_Part_Node, Current_Scope => Current_Scope, Generic_Unit => Generic_Unit, Current_Context => Dictionary.ProofContext, Component_Data => Component_Data, The_Heap => The_Heap, Subprog_Sym => Subprog_Sym); else Subprog_Sym := Dictionary.NullSymbol; end if; --# accept Flow, 33, Global_Node, "Expected to be neither referenced nor exported" & --# Flow, 33, Dependency_Node, "Expected to be neither referenced nor exported" & --# Flow, 33, Declare_Node, "Expected to be neither referenced nor exported"; end Wf_Subprogram_Declaration; spark-2012.0.deb/examiner/simplelists.ads0000644000175000017500000000531611753202336017317 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- SimpleLists -- -- Purpose: -- Provides an ADT for a bounded, unordered list of Naturals, -- duplicates permitted. The max size of a list is defined -- by ExaminerConstants.SimpleListsSize -- -- Clients: -- Used by Sem (specifically wf_proc_call) to match lists for formal -- and actual parameters. -- -- Use: -- Declare then Initialize with procedure Init. -- -- Extension: -- None planned. -------------------------------------------------------------------------------- with ExaminerConstants; --# inherit ExaminerConstants; package SimpleLists is type SimpleList is private; function NumberOfItems (List : SimpleList) return Natural; procedure Init (List : out SimpleList); --# derives List from ; --# post NumberOfItems (List) = 0; procedure AddItem (Item : in Natural; List : in out SimpleList; Ok : out Boolean); --# derives List from *, --# Item & --# Ok from List; --# post (Ok <-> NumberOfItems (List~) < ExaminerConstants.SimpleListsSize) and --# (Ok -> NumberOfItems (List) = NumberOfItems (List~) + 1); procedure GetItem (List : in SimpleList; Position : in Positive; Item : out Natural; Ok : out Boolean); --# derives Item, --# Ok from List, --# Position; --# post Ok <-> Position <= NumberOfItems (List); private subtype Index is Integer range 0 .. ExaminerConstants.SimpleListsSize; type ListArrays is array (Index) of Natural; type SimpleList is record ListArray : ListArrays; HighestOccupied : Index; end record; end SimpleLists; spark-2012.0.deb/examiner/all.wrn0000644000175000017500000000026511753202337015555 0ustar eugeneugen-- Warning control file for the Examiner declare_annotations default_loop_assertions handler_parts hidden_parts notes pragma all rep sli_generation static_expressions with_clauses spark-2012.0.deb/examiner/errorhandler-conversions-tostring-warningwithposition.adb0000644000175000017500000012656611753202336027771 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure WarningWithPosition (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is procedure WarningWithPositionExpl (E_Str : in out E_Strings.T) --# global in Err_Num; --# derives E_Str from *, --# Err_Num; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an AppendString for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Num; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Num, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation WarningWithPositionExpl (Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin -- HTML Directives --! <"warning-"> --! <"--- Warning : "><" : "> case Err_Num.ErrorNum is when 1 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); --! This warning will appear against an identifier in a with clause if --! it is not also present in an inherit clause. Such an identifier --! cannot be used in any non-hidden part of --! a SPARK program. The use of with without inherit is permitted to --! allow reference in hidden --! parts of the text to imported packages which are not legal SPARK. --! For example, the body of --! SPARK_IO is hidden and implements the exported operations of the --! package by use of package --! TEXT_IO. For this reason TEXT_IO must appear in the with clause of --! SPARK_IO. (warning control file keyword: with_clauses) when 2 => E_Strings.Append_String (E_Str => E_Str, Str => "Representation clause - ignored by the Examiner"); --! The significance of representation clauses cannot be assessed by the --! Examiner because --! it depends on the specific memory architecture of the target system. --! Like pragmas, --! representation clauses can change the meaning of a SPARK program and --! the warning --! highlights the need to ensure their correctness by other means. --! (warning control file --! keyword: representation_clauses) when 3 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma - ignored by the Examiner"); --! All pragmas encountered by the Examiner generate this warning. --! While many pragmas (e.g. --! pragma page) are harmless others can change a program's meaning, --! for example by causing --! two variables to share a single memory location. --! (warning control file keyword: pragma --! pragma_identifier or pragma all) when 4 => E_Strings.Append_String (E_Str => E_Str, Str => "declare annotation - ignored by the Examiner"); --! The declare annotation is ignored by the Examiner if the profile --! is not Ravenscar. (warning control file keyword: declare_annotations) when 5 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " contains interrupt handlers; it is important that an interrupt identifier is " & "not used by more than one handler"); --! Interrupt identifiers are implementation defined and the Examiner cannot check that values are --! used only once. Duplication can occur by declaring more than object of a single (sub)type where --! that type defines handlers. It may also occur if interrupt identifiers are set via discriminants --! and two or more actual discriminants generate the same value. --! (warning control file keyword: interrupt_handlers) when 6 => E_Strings.Append_String (E_Str => E_Str, Str => "Machine code insertion. Code insertions are ignored by the Examiner"); --! Machine code is inherently implementation dependent and cannot be analysed --! by the Examiner. Users are responsible for ensuring that the behaviour --! of the inserted machine code matches the annotation of the subprogram containing it. when 7 => E_Strings.Append_String (E_Str => E_Str, Str => "This identifier is an Ada2005 reserved word"); --! Such identifiers will be rejected by an Ada2005 compiler and by the SPARK --! Examiner for SPARK2005. It is recommended to rename --! such identifiers for future upward compatibility. --! (warning control file keyword: ada2005_reserved_words) when 11 => E_Strings.Append_String (E_Str => E_Str, Str => "Unnecessary others clause - case statement is already complete"); --! The others clause is non-executable because all case choices have --! already been --! covered explicitly. If the range of the case choice is altered later --! then the --! others clause may be executed with unexpected results. It is better --! to omit the --! others clause in which case any extension of the case range will result in a --! compilation error. when 12 => E_Strings.Append_String (E_Str => E_Str, Str => "Function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is an instantiation of Unchecked_Conversion"); --! See ALRM 13.9. The use of Unchecked_Conversion can result in implementation-defined --! values being returned. The function should be used with great care. The principal --! use of Unchecked_Conversion is SPARK programs is the for the reading of external ports --! prior to performing a validity check; here the suppression of constraint checking prior --! to validation is useful. The Examiner does not assume that the value returned by --! an unchecked conversion is valid and so unprovable run-time check VCs will result if --! a suitable validity check is not carried out before the value is used. --! (warning control file keyword: unchecked_conversion) when 13 => E_Strings.Append_String (E_Str => E_Str, Str => "Function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is an instantiation of Unchecked_Conversion returning a type for which run-time checks" & " are not generated. Users must take steps to ensure the validity of the returned value"); --! See ALRM 13.9. The use of Unchecked_Conversion can result in invalid --! values being returned. The function should be used with great care especially, as in --! this case, where the type returned does not generate Ada run-time checks nor SPARK --! run-time verification conditions. For such types, this warning is the ONLY reminder --! the Examiner generates that the generated value may have an invalid representation. --! For this reason the warning is NOT suppressed by the warning control file keyword --! unchecked_conversion. --! The principal use of Unchecked_Conversion is SPARK programs is the for the reading of external ports --! prior to performing a validity check; here the suppression of constraint checking prior --! to validation is useful. -- Warnings associated with the justification mechanism when 120 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected unmatched 'end accept' annotation ignored"); --! This end accept annotation does not match any preceding start accept in this unit. when 121 => E_Strings.Append_String (E_Str => E_Str, Str => "No warning message matches this accept annotation"); --! The accept annotation is used to indicate that a particular flow error or semantic warning --! message is expected and can be justified. This error indicates that the expected message --! did not actually occur. Note that when matching any information flow error messages containing --! two variable names, the export should be placed first and the import second (the order --! in the error message may differ from this depending on the style of information flow --! error reporting selected). For example: --# accept Flow, 601, X, Y, "..."; justifies --! the message: "X may be derived from the imported value(s) of Y" or the alternative --! form: "Y may be used in the derivation of X". when 122 => E_Strings.Append_String (E_Str => E_Str, Str => "Maximum number of error or warning justifications reached, subsequent accept annotations will be ignored"); --! The number of justifications per source file is limited. If you reach this limit --! it is worth careful consideration of why the code generates so many warnings. -- end of warnings associated with justification mechanism when 169 => E_Strings.Append_String (E_Str => E_Str, Str => "Direct update of own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ", which is an own variable of a non-enclosing package"); --! With the publication of Edition 3.1 of the SPARK Definition the --! previous restriction --! prohibiting the direct updating of own variables of non-enclosing --! packages was removed; however, the preferred use of packages as --! abstract state machines is compromised by such action which is --! therefore discouraged. (warning control file keyword: direct_updates) when 200 => E_Strings.Append_String (E_Str => E_Str, Str => "This static expression cannot be evaluated by the Examiner"); --! Issued if a static expression exceeds the internal limits of the Examiner --! because its --! value is, for example, too large to be evaluated using infinite precision --! arithmetic. No --! value will be recorded for the expression and this may limit the --! Examiner's ability to --! detect certain sorts of errors such as numeric constraints. --! (warning control file keyword: static_expressions) when 201 => E_Strings.Append_String (E_Str => E_Str, Str => "This expression cannot be evaluated statically because its value may be implementation-defined"); --! Raised, for example, when evaluating 'Size of a type --! that does not have an explicit Size representation clause. --! Attributes of implementation-defined types, such as --! Integer'Last may also be --! unknown to be Examiner if they are not specified in the --! configuration file --! (warning control file keyword: static_expressions) when 202 => E_Strings.Append_String (E_Str => E_Str, Str => "An arithmetic overflow has occurred. Constraint checks have not been performed"); --! Raised when comparing two real numbers. The examiner cannot deal --! with real numbers specified to --! such a high degree of precision. Consider reducing the precision --! of these numbers. when 300 => E_Strings.Append_String (E_Str => E_Str, Str => "VCs cannot be built for multi-dimensional array aggregates"); --! Issued when an aggregate of a multi-dimensional array is found. --! Suppresses generation --! of VCs for that subprogram. Can be worked round by using --! arrays of arrays. when 301 => E_Strings.Append_String (E_Str => E_Str, Str => "Called subprogram exports abstract types for which RTCs are not possible"); when 302 => E_Strings.Append_String (E_Str => E_Str, Str => "This expression may be re-ordered by a compiler. Add parentheses to remove ambiguity"); --! Issued when a potentially re-orderable expression is encountered. --! For example x := a + b + c; Whether --! intermediate sub-expression values overflow may depend on the --! order of evaluation which is --! compiler-dependent. --! Therefore, code generating this warning should be parenthesized to --! remove the ambiguity. --! e.g. x := (a + b) + c; when 303 => E_Strings.Append_String (E_Str => E_Str, Str => "Overlapping choices may not be detected"); --! Issued where choices in an array aggregate or case statement are --! outside the range --! which can be detected because of limits on the size of a table --! internal to the Examiner. when 304 => E_Strings.Append_String (E_Str => E_Str, Str => "Case statement may be incomplete"); --! Issued when the Examiner cannot determine the completeness of a --! case statement because --! the bounds of the type of the controlling expression exceed the --! size of the internal table --! used to perform the checks. when 305 => E_Strings.Append_String (E_Str => E_Str, Str => "Value too big for internal representation"); --! Issued when the Examiner cannot determine the completeness of an --! array aggregate or --! case statement because the number used in a choice exceed the size --! allowed in the internal --! table used to perform the checks. when 306 => E_Strings.Append_String (E_Str => E_Str, Str => "Aggregate may be incomplete"); --! Issued when the Examiner cannot determine the completeness of an --! array aggregate --! because its bounds exceed the size of the internal table used to --! perform the checks. when 307 => E_Strings.Append_String (E_Str => E_Str, Str => "Completeness checking incomplete: index type(s) undefined or not discrete"); --! Issued where the array index (sub)type is inappropriate: this is --! probably because there --! is an error in its definition, which will have been indicated by --! a previous error message. when 308 => E_Strings.Append_String (E_Str => E_Str, Str => "Use of equality operator with floating point type"); --! The use of this operator is discouraged in SPARK because of the --! difficulty in --! determining exactly what it means to say that two instances of a --! floating point number are --! equal when 309 => E_Strings.Append_String (E_Str => E_Str, Str => "Type conversion to own type, consider using type qualification instead"); --! Issued where a type conversion is either converting from a (sub)type --! to the same --! (sub)type or is converting between two subtypes of the same type. --! In the former case the --! type conversion may be safely removed because no constraint check --! is required; in the --! latter case the type conversion may be safely replaced by a type --! qualification which --! preserves the constraint check.(warning control file keyword: --! type_conversions) when 310 => E_Strings.Append_String (E_Str => E_Str, Str => "Use of obsolescent Ada 83 language feature"); --! Issued when a language feature defined by Ada 95 to be obsolescent is --! used. Use of such --! features is not recommended because compiler support for them cannot --! be guaranteed.(warning control file keyword:obsolescent_features) when 311 => E_Strings.Append_String (E_Str => E_Str, Str => "Priority pragma for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is unavailable and has not been considered in the ceiling priority check"); when 312 => E_Strings.Append_String (E_Str => E_Str, Str => "Replacement rules cannot be built for multi-dimensional array constant "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued when a VC or PF references a multi-dimensional array constant. --! Can be worked round by using arrays of arrays. when 313 => E_Strings.Append_String (E_Str => E_Str, Str => "The constant "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has semantic errors in its initializing expression or has " & "a hidden completion which prevent generation of a replacement rule"); --! Issued when replacement rules are requested for a composite constant which --! had semantic errors in its initializing expression, or is --! a deferred constant whose completion is hidden from the Examiner. --! Semantic errors must be eliminated before replacement rules can be generated. when 314 => E_Strings.Append_String (E_Str => E_Str, Str => "The constant "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has semantic errors in its type which prevent generation of rules"); --! Issued when an attempt is made to generate type deduction rules for a constant --! which has semantic errors in its type. These semantic errors --! must be eliminated before type deduction rules can be generated. when 315 => E_Strings.Append_String (E_Str => E_Str, Str => "The procedure "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have a derives annotation. The analysis of this call assumes " & "that each of its exports is derived from all of its imports"); --! Issued in flow=auto mode when a function calls a procedure that does not have a --! derives annotation. In most cases this assumption will not affect --! the validity of the analysis, but if the called procedure derives --! null from an import this can have an impact. Note that functions --! are considered to have implicit derives annotations so this warning --! is not issued for calls to functions. when 320 => E_Strings.Append_String (E_Str => E_Str, Str => "The proof function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has a non-boolean return and a return annotation. Please make sure " & "that the return is always in-type"); --! Any proof function with a non-bool return can introduce unsoundness if --! the result could overflow. For example a return of (x + 1) is not ok if --! x can take the value of integer'last. --! (warning control file keyword: proof_function_non_boolean) when 321 => E_Strings.Append_String (E_Str => E_Str, Str => "The proof function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has an implicit return annotation. Please be careful not to introduce unsoundness"); --! Any proof function with an implicit return can easily introduce --! unsoundness as they do not have a body which we can check to --! expose any contradictions. For example: return B => False. --! (warning control file keyword: proof_function_implicit) when 322 => E_Strings.Append_String (E_Str => E_Str, Str => "The return refinement for proof function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is assumed to hold as it is axiomatic and thus cannot be checked"); --! (warning control file keyword: proof_function_refinement) when 323 => E_Strings.Append_String (E_Str => E_Str, Str => "The precondition refinement for proof function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is assumed to hold as it is axiomatic and thus cannot be checked"); --! (warning control file keyword: proof_function_refinement) when 350 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected pragma Import. Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not identified as an external (stream) variable"); --! The presence of a pragma Import makes it possible that the variable --! is connected --! to some external device. The behaviour of such variables is best --! captured by --! making them moded own variables (or "stream" variables). If variables --! connected --! to the external environment are treated as if they are normal program --! variables then --! misleading analysis results are inevitable. The use of pragma Import on local --! variables of subprograms is particularly deprecated. The warning --! may safely be --! disregarded if the variable is not associated with memory-mapped --! input/output --! or if the variable concerned is an own variable and the operations on it are --! suitably annotated to indicate volatile, stream-like behaviour. --! Where pragma Import is used, it is essential that the variable is properly --! initialized at the point from which it is imported. --! (warning control file keyword:imported_objects) when 351 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected address clause. "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a constant"); --! Great care is needed when attaching an address clause to a constant. The use --! of such a clause is safe if, and only if, the address supplied provides a valid --! value for the constant which does not vary during the execution life of the program, --! for example, mapping the constant to PROM data. --! If the address clause causes the constant to have a value which may alter, or worse, --! change dynamically under the influence of some device external to the program, then --! misleading or incorrect analysis is certain to result. --! If the intention is to create an input port of some kind, then a constant should not --! be used. Instead a moded own variable (or "stream" variables) should be used. --! (warning control file keyword: address_clauses) when 360 => E_Strings.Append_String (E_Str => E_Str, Str => "This pragma must have zero or one arguments"); when 361 => E_Strings.Append_String (E_Str => E_Str, Str => "This pragma must have exactly one argument"); when 362 => E_Strings.Append_String (E_Str => E_Str, Str => "This pragma must have exactly two arguments"); when 363 => E_Strings.Append_String (E_Str => E_Str, Str => "This pragma must have at least one argument"); when 364 => E_Strings.Append_String (E_Str => E_Str, Str => "This pragma must have between two and four arguments"); when 365 => E_Strings.Append_String (E_Str => E_Str, Str => "This pragma must have exactly zero arguments"); when 366 => E_Strings.Append_String (E_Str => E_Str, Str => "This pragma must have one or two arguments"); when 380 => E_Strings.Append_String (E_Str => E_Str, Str => "Casing inconsistent with declaration. Expected casing is "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The Examiner checks the case used for an identifier against the --! declaration of that identifier and warns if they do not match. --! (warning control file keyword:style_check_casing) when 389 => E_Strings.Append_String (E_Str => E_Str, Str => "Generation of VCs for consistency of generic and instantiated subprogram constraints " & "is not yet supported. It will be supported in a future release of the Examiner"); when 390 => E_Strings.Append_String (E_Str => E_Str, Str => "This generic subprogram has semantic errors in its declaration which prevent instantiations of it"); --! Issued to inform the user that a generic subprogram instantiation --! cannot be completed because of earlier errors in the generic declaration. when 391 => E_Strings.Append_String (E_Str => E_Str, Str => "If the identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " represents a package which " & "contains a task or an interrupt handler then the partition-level " & "analysis performed by the Examiner " & "will be incomplete. Such packages must be inherited as well as withed"); when 392 => E_Strings.Append_String (E_Str => E_Str, Str => "External variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may have an invalid representation and its assignment may " & "cause a run-time exception which is outside the scope of " & "the absence of RTE proof"); --! Where values are read from external variables (i.e. variables --! connected to the external --! environment) there is no guarantee that the bit pattern read will --! be a valid representation for --! the type of the external variable. Unexpected behaviour may result --! if invalid values are used in expressions. --! If the code is compiled with Ada run-time checks enabled the --! assignment of an invalid value may (but need not) raise a --! run-time exception dependent on the compiler. --! A compiler may provide facilities to apply extended checking --! which may also raise a run-time exception if an invalid value is used. --! The SPARK Toolset does not check the validity of the external variable --! and therefore any possible exception arising from its assignment is --! outside the scope of proof of absence of RTE. --! To ensure that a run-time exception cannot occur make the type of --! the external variable such that any possible bit pattern that --! may be read from the external source is a valid value. --! If the desired type is such a type then the always_valid assertion --! may be applied to the external variable; otherwise use explicit tests --! to ensure it has a valid value for the desired type before converting --! to an object of the desired type. --! In SPARK 95 the 'Valid attribute (see ALRM 13.9.2) may be used to --! determine the validity of a value if it can be guaranteed that the --! assignment of an invalid value read from an external variable will --! not raise a run time exception, either by compiling the code with --! checks off or by ensuring the compiler does not apply constraint --! checks when assigning same subtype objects. --! Note that when the Examiner is used to generate run-time checks, it --! will not be possible to discharge those involving external variables --! unless one of the above steps is taken. --! More information on interfacing can be found in the INFORMED manual --! and the SPARK Proof Manual. --! (warning control file --! keyword: external_assignment) when 393 => E_Strings.Append_String (E_Str => E_Str, Str => "External variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may have an invalid representation and is of a type for which run-time checks " & "are not generated but its assignment may cause a run-time exception. " & "Users must take steps to ensure the validity of the " & "assigned or returned value"); --! Where values are read from external variables (i.e. variables --! connected to the external --! environment) there is no guarantee that the bit pattern read will --! be a valid representation for --! the type of the external variable. Unexpected behaviour may result --! if invalid values are used in expressions. --! If the code is compiled with Ada run-time checks enabled the --! assignment of an invalid value may (but need not) raise a --! run-time exception dependent on the compiler. --! A compiler may provide facilities to apply extended checking --! which may also raise a run-time exception if an invalid value is used --! The SPARK Toolset does not check the validity of the external variable --! and therefore any possible exception arising from its assignment is --! outside the scope of proof of absence of RTE. --! Where, as in this case, the type is one for which --! Ada run-time checks need not be generated and SPARK run-time --! verification conditions are not generated, extra care is required. --! For such types, this warning is the ONLY reminder --! the Examiner generates that the external value may have an invalid --! representation. --! For this reason the warning is NOT suppressed by the warning --! control file keyword external_assignment. --! To ensure that a run-time exception cannot occur make the type of --! the external variable such that any possible bit pattern that --! may be read from the external source is a valid value. --! Explicit tests of the value may then be used to determine the --! value of an object of the desired type. --! In SPARK 95 the 'Valid attribute (see ALRM 13.9.2) may be used to --! determine the validity of a value if it can be guaranteed that the --! assignment of an invalid value read from an external variable will --! not raise a run time exception, either by compiling the code with --! checks off or by ensuring the compiler does not apply constraint --! checks when assigning same subtype objects. --! Boolean external variables require special care since the Examiner --! does not generate run-time checks --! for Boolean variables; use of 'Valid is essential when reading --! Boolean external variables. --! More information on interfacing can be found in the INFORMED manual --! and the SPARK Proof Manual. when 394 => E_Strings.Append_String (E_Str => E_Str, Str => "Variables of type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " cannot be initialized using the facilities of this package"); --! A variable of a private type can only be used (without generating --! a data flow error) if there is some way of --! giving it an initial value. For a limited private type only a --! procedure that has an export of that type --! and no imports of that type is suitable. For a private type either --! a procedure, function or (deferred) --! constant is required. The required facility may be placed in, or --! already available in, a public --! child package. --! (warning control file keyword: private_types) when 395 => E_Strings.Append_String (E_Str => E_Str, Str => "Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is an external (stream) variable but does not have an address clause or a pragma import"); --! When own variables are given modes they are considered to be inputs --! from or outputs --! to the external environment. The Examiner regards them as being --! volatile (i.e. their --! values can change in ways not visible from an inspection of the --! source code). If --! a variable is declared in that way but it is actually an ordinary --! variable which is NOT --! connected to the environment then misleading analysis is inevitable. --! The Examiner --! expects to find an address clause or pragma import for variables of this kind to --! indicate that they --! are indeed memory-mapped input/output ports. This warning is issued --! if an address --! clause or pragma import is not found. when 396 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected address clause. Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not identified as an external (stream) variable"); --! The presence of an address clause makes it possible that the variable --! is connected --! to some external device. The behaviour of such variables is best --! captured by --! making them moded own variables (or "stream" variables). If variables --! connected --! to the external environment are treated as if they are normal program --! variables then --! misleading analysis results are inevitable. The use of address clauses --! on local --! variables of subprograms is particularly deprecated. The warning --! may safely be --! disregarded if the variable is not associated with memory-mapped --! input/output --! or if the variable concerned is an own variable and the operations on it are --! suitably annotated to indicate volatile, stream-like behaviour. --! (warning control file keyword: address_clauses) when 397 => E_Strings.Append_String (E_Str => E_Str, Str => "Variables of type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " can never be initialized before use"); --! A variable of a private type can only be used (without generating a data --! flow error) if there is some way of --! giving it an initial value. For a limited private type only a procedure --! that has an export of that type --! and no imports of that type is suitable. For a private type either a --! procedure, function or (deferred) --! constant is required. when 398 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " can never be initialized before use"); --! The own variable can only be used (without generating a data flow error) --! if there is some way of --! giving it an initial value. If it is --! initialized during package elaboration (or implicitly by the environment --! because it represents an --! input port) it should be placed in an "initializes" annotation. --! Otherwise there needs to be some way --! of assigning an initial value during program execution. Either the own --! variable needs to be declared --! in the visible part of the package so that a direct assignment can be --! made to it or, more usually, the --! package must declare at least one procedure for which the own variable --! is an export but not an import. --! Note that if the own variable is an abstract own variable with some --! constituents initialized --! during elaboration and some during program execution then it will never --! be possible correctly to --! initialize it; such abstract own variables must be divided into separate --! initialized and uninitialized --! components. when 399 => E_Strings.Append_String (E_Str => E_Str, Str => "The called subprogram has semantic errors in its interface " & "(parameters and/or " & "annotations) which prevent flow analysis of this call"); --! Issued to inform the user that flow analysis has been suppressed --! because of the error in the called subprogram's interface. when others => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO WarningWithPosition"); end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end WarningWithPosition; spark-2012.0.deb/examiner/lextokenmanager-relation_algebra-string.ads0000644000175000017500000001302511753202336024723 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with LexTokenManager.Seq_Algebra; --# inherit Heap, --# LexTokenManager, --# LexTokenManager.Relation_Algebra, --# LexTokenManager.Seq_Algebra, --# RelationAlgebra, --# Statistics; package LexTokenManager.Relation_Algebra.String is type Relation is private; procedure Create_Relation (The_Heap : in out Heap.HeapRecord; R : out Relation); --# global in out Statistics.TableUsage; --# derives R, --# The_Heap from The_Heap & --# Statistics.TableUsage from *, --# The_Heap; -- Objects of type Relations utilize storage managed by the package Heap. -- The storage used by a relation R must be returned to the Heap by calling -- DisposeOfRelation before R goes out of scope. procedure Dispose_Of_Relation (The_Heap : in out Heap.HeapRecord; R : in Relation); --# derives The_Heap from *, --# R; -- InsertPair provides the basic means to build a relation. -- Each pair in the relation R represented by the row value, I, and the -- column value J may be inserted individually. The pair is only inserted -- if it is not already present. There are no duplicates. procedure Insert_Pair (The_Heap : in out Heap.HeapRecord; R : in Relation; I, J : in LexTokenManager.Lex_String); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# I, --# J, --# LexTokenManager.State, --# R, --# The_Heap; -- Creates a new set S containing all the column value -- entries for the row of a relation R specified by GivenIndex. -- If a row specified by the GivenIndex is not present in R, -- S is the empty set. procedure Row_Extraction (The_Heap : in out Heap.HeapRecord; R : in Relation; Given_Index : in LexTokenManager.Lex_String; S : out Seq_Algebra.Seq); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives S from The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Given_Index, --# LexTokenManager.State, --# R, --# The_Heap; -- AddCol adds an entire column to a relation R. -- The column index J is applied to each of the values in the set S to obtain -- a set of pairs that are added to the relation R if they are not already -- present in R. -- R and S must have been initialized using their corresponding Create ops. procedure Add_Col (The_Heap : in out Heap.HeapRecord; R : in Relation; J : in LexTokenManager.Lex_String; S : in Seq_Algebra.Seq); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# J, --# LexTokenManager.State, --# R, --# S, --# The_Heap; -- AugmentRelation performs the same operation as Sum except that it is -- performed in place and the augmented set is the final value of A. procedure Augment_Relation (The_Heap : in out Heap.HeapRecord; A, B : in Relation); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# A, --# B, --# LexTokenManager.State, --# The_Heap; procedure Debug (The_Heap : in Heap.HeapRecord; R : in Relation); --# derives null from R, --# The_Heap; private type Relation is record The_Relation : RelationAlgebra.Relation; end record; end LexTokenManager.Relation_Algebra.String; spark-2012.0.deb/examiner/sem-range_check.adb0000644000175000017500000000264311753202336017741 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Range_Check (A_Range : in Boolean; Position : in LexTokenManager.Token_Position; Error_Found : in out Boolean) is begin if A_Range then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 341, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str => LexTokenManager.Null_String); end if; end Range_Check; spark-2012.0.deb/examiner/screenecho.ads0000644000175000017500000000746111753202336017070 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------- -- ScreenEcho -- -- Simple operations for printing Strings, Integers, ExaminerStrings -- and so on to SPARK_IO.Standard_Output ------------------------------------------------------------------------- with SPARK_IO, E_Strings; --# inherit E_Strings, --# SPARK_IO; package ScreenEcho is procedure Put_Char (Item : in Character); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Item; procedure Put_Integer (Item : in Integer; Width : in Natural; Base : in SPARK_IO.Number_Base); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Base, --# Item, --# Width; procedure Put_String (Item : in String); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Item; procedure Put_Line (Item : in String); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Item; procedure Put_StringWithLength (Item : in String; Stop : in Natural); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Item, --# Stop; procedure Put_LineWithLength (Item : in String; Stop : in Natural); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Item, --# Stop; procedure New_Line (Spacing : in Positive); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Spacing; procedure Set_Col (Posn : in Positive); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Posn; procedure Put_ExaminerString (Item : in E_Strings.T); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Item; procedure Put_ExaminerLine (Item : in E_Strings.T); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Item; -- Prints the given message to the screen, followed by " ..." -- indented at the 12th column, and possibly adding line-breaks at -- spaces. Used in MainLoop to generate the Examiner's analysis running -- commentary. procedure Echo (Str : in E_Strings.T); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Str; end ScreenEcho; spark-2012.0.deb/examiner/sem-compunit-wf_package_body-wf_refine.adb0000644000175000017500000000662111753202336024412 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.Wf_Package_Body) procedure Wf_Refine (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is It : STree.Iterator; Next_Node : STree.SyntaxNode; ------------------------------------------- procedure Wf_Clause (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.refinement_clause; --# post STree.Table = STree.Table~; is separate; begin -- Wf_Refine It := Find_First_Node (Node_Kind => SP_Symbols.refinement_clause, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.refinement_clause and --# Next_Node = Get_Node (It) and --# STree.Table = STree.Table~; Wf_Clause (Node => Next_Node, Scope => Scope); It := STree.NextNode (It); end loop; end Wf_Refine; spark-2012.0.deb/examiner/e_strings-not_spark.ads0000644000175000017500000000207711753202336020743 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package E_Strings.Not_SPARK is function Get_String (E_Str : E_Strings.T) return String; end E_Strings.Not_SPARK; spark-2012.0.deb/examiner/sem-create_interrupt_stream_variable.adb0000644000175000017500000001201711753202336024303 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Create_Interrupt_Stream_Variable (For_PO : in Dictionary.Symbol; The_Handler : in LexTokenManager.Lex_String; The_Stream_Variable : in LexTokenManager.Lex_String; Error_Node_Pos : in LexTokenManager.Token_Position) is Sym : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; Subprog_Sym : Dictionary.Symbol; Unused_Symbol : Dictionary.Symbol; Scope : Dictionary.Scopes; Found : Boolean; It : Dictionary.Iterator; begin Type_Sym := Dictionary.GetType (For_PO); Scope := Dictionary.GetScope (For_PO); It := Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Type_Sym); Found := False; -- For all the subprograms in protected type while not (Found or else Dictionary.IsNullIterator (It)) loop Subprog_Sym := Dictionary.CurrentSymbol (It); -- Is it an interrupt handler? if Dictionary.IsInterruptHandler (Subprog_Sym) then -- Does the interrupt handler name in the type match the one in the -- interrupt property? if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Subprog_Sym), Lex_Str2 => The_Handler) = LexTokenManager.Str_Eq then Found := True; -- Check the stream name to which it is mapped is not being used -- in this scope. Sym := Dictionary.LookupItem (Name => The_Stream_Variable, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) then -- Create the implicit own variable --# accept Flow, 10, Unused_Symbol, "Expected ineffective assignment"; Dictionary.Add_Own_Variable (Name => The_Stream_Variable, The_Package => Dictionary.GetOwner (For_PO), Mode => Dictionary.InMode, Is_Protected => False, Is_Interrupt_Stream => True, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0), End_Position => LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0)), Var_Symbol => Unused_Symbol); --# end accept; elsif not (Dictionary.IsOwnVariable (Sym) and then Dictionary.GetOwnVariableIsInterruptStream (Sym)) then -- The stream name is being used and is not an interrupt stream. Interrupt streams -- can be mapped to more than one handler. ErrorHandler.Semantic_Error2 (Err_Num => 954, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str1 => Dictionary.GetSimpleName (For_PO), Id_Str2 => The_Stream_Variable); end if; end if; end if; It := Dictionary.NextSymbol (It); end loop; if not Found then -- The handler name specified in the interrupt property is not an interrupt -- handler in the protected type. ErrorHandler.Semantic_Error_Lex1_Sym2 (Err_Num => 953, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str => The_Handler, Sym => For_PO, Sym2 => Type_Sym, Scope => Scope); end if; --# accept Flow, 33, Unused_Symbol, "Expected to be neither referenced nor exported"; end Create_Interrupt_Stream_Variable; spark-2012.0.deb/examiner/symbol_set.ads0000644000175000017500000000416511753202336017130 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Dictionary; with Heap; with SeqAlgebra; --# inherit Statistics, --# Dictionary, --# Heap, --# SeqAlgebra; -- This is a set of Dictionary.Symbol objects built on top of Heap -- and SeqAlgebra. package Symbol_Set is type T is private; procedure Initialise (The_Set : out T); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from * & --# The_Set from ; procedure Add (The_Set : in out T; Sym : in Dictionary.Symbol); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Set from *, --# Sym, --# The_Set; procedure Remove (The_Set : in out T; Sym : in Dictionary.Symbol); --# derives The_Set from *, --# Sym; function Contains (The_Set : in T; Sym : in Dictionary.Symbol) return Boolean; private type T is record The_Heap : Heap.HeapRecord; The_Symbols : SeqAlgebra.Seq; end record; end Symbol_Set; spark-2012.0.deb/examiner/sparklex-lex-hyphintro.adb0000644000175000017500000001225111753202336021363 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SparkLex.Lex) procedure HyphIntro (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) is Ch1, Ch2 : Character; Anno_Kind : Anno_Type; begin -- Hyphen already recognised, but do not accept it in case it introduces the -- annotation symbol of an annotation which requires the end_of_annotation -- token to inserted infront. LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch1); if Ch1 = '-' then LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch2); if Ch2 = CommandLineData.Content.Anno_Char then if Curr_Line.Anno_Context = Mid_Annotation then Check_Anno_Type (Curr_Line => Curr_Line, Unfinished_Anno => True, Anno_Kind => Anno_Kind); else Check_Anno_Type (Curr_Line => Curr_Line, Unfinished_Anno => False, Anno_Kind => Anno_Kind); end if; case Anno_Kind is when Hide_Anno => if Curr_Line.Context = In_Annotation then LineManager.Reject_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.annotation_end; else LineManager.Accept_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.hide_directive; end if; when Start_Anno => if Curr_Line.Context = In_Annotation then LineManager.Reject_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.annotation_end; else LineManager.Accept_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.annotation_start; end if; when Proof_Anno => if Curr_Line.Context = In_Annotation then LineManager.Reject_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.annotation_end; else LineManager.Accept_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.proof_context; end if; when Other_Anno => LineManager.Accept_Lookahead (Curr_Line => Curr_Line); -- Making the following assignement means that the if --# is -- followed by an identifier (i.e. we have a proof constant declaration) -- then the --# will be treated as a beginning a proof context rather -- than an annotation. Token := SP_Symbols.proof_context; when No_Anno => LineManager.Accept_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.comment; end case; else LineManager.Reject_Lookahead (Curr_Line => Curr_Line); LineManager.Accept_Char (Curr_Line => Curr_Line); LineManager.Accept_Char (Curr_Line => Curr_Line); while (Curr_Line.Curr_Pos <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) and then Ch2 /= End_Of_Text and then not Format_Effector (Ch => Ch2)) or else Ch2 = Ada.Characters.Latin_1.HT loop --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; LineManager.Accept_Char (Curr_Line => Curr_Line); Ch2 := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); end loop; Token := SP_Symbols.comment; end if; elsif Ch1 = '>' then LineManager.Accept_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.implies; else LineManager.Reject_Lookahead (Curr_Line => Curr_Line); LineManager.Accept_Char (Curr_Line => Curr_Line); Token := SP_Symbols.minus; end if; end HyphIntro; spark-2012.0.deb/examiner/file_utils.adb0000644000175000017500000003333311753202336017065 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Calendar; with Date_Time; with CommandLineData; with E_Strings; with ScreenEcho; with Version; package body File_Utils is procedure Print_A_Header (File : in SPARK_IO.File_Type; Header_Line : in String; File_Type : in File_Types) is Len_Date_Time : constant Integer := 23; subtype Typ_Date_Time_Range is Integer range 1 .. Len_Date_Time; subtype Typ_Date_Time is String (Typ_Date_Time_Range); FDL_Comment_Open : constant Character := '{'; FDL_Comment_Close : constant Character := '}'; Extended_Date_String : E_Strings.T; Date_String : Typ_Date_Time; Star_Line : E_Strings.T; procedure Get_Current_Date_Time (Date_String : out Typ_Date_Time) --# derives Date_String from ; is --# hide Get_Current_Date_Time; -- -- FUNCTION : -- -- Returns a string indicating the current date and time as a 22 character -- string in the format 24-12-1988 12:00:00.00 -- -- OPERATION : -- -- Uses the standard package Calendar. -- Hours_In_Day : constant := Date_Time.Hours_T'Last + 1; Minutes_In_Hour : constant := Date_Time.Minutes_T'Last + 1; Seconds_In_Minute : constant := Date_Time.Seconds_T'Last + 1; Hundredths_In_Second : constant := 100; subtype Hundredths_Type is Natural range 0 .. Hundredths_In_Second - 1; subtype Digit is Natural range 0 .. 9; Year : Date_Time.Years_T; Month : Date_Time.Months_T; Day : Date_Time.Days_T; Hours : Date_Time.Hours_T; Minutes : Date_Time.Minutes_T; Seconds : Date_Time.Seconds_T; Hundredths : Hundredths_Type; procedure Decode_Time (Time : in Ada.Calendar.Time; Year : out Date_Time.Years_T; Month : out Date_Time.Months_T; Day : out Date_Time.Days_T; Hours : out Date_Time.Hours_T; Minutes : out Date_Time.Minutes_T; Seconds : out Date_Time.Seconds_T; Hundredths : out Hundredths_Type) is System_Time : Duration; procedure Decode_Duration (Time : in Duration; Hours : out Date_Time.Hours_T; Minutes : out Date_Time.Minutes_T; Seconds : out Date_Time.Seconds_T; Hundredths : out Hundredths_Type) is -- 0 .. 8_640_000, to ensure we cover the full range of Ada.Calendar.Day_Duration in subsequent calculations. type Time_Of_Day is range 0 .. Hours_In_Day * Minutes_In_Hour * Seconds_In_Minute * Hundredths_In_Second; System_Time : Time_Of_Day; begin System_Time := (Time_Of_Day (Time * Duration'(100.0))) mod Time_Of_Day'Last; -- limit result to 8_639_999 Hours := Date_Time.Hours_T (System_Time / (Minutes_In_Hour * Seconds_In_Minute * Hundredths_In_Second)); Minutes := Date_Time.Minutes_T (System_Time / (Seconds_In_Minute * Hundredths_In_Second) mod Minutes_In_Hour); Seconds := Date_Time.Seconds_T (System_Time / Hundredths_In_Second mod Seconds_In_Minute); Hundredths := Hundredths_Type (System_Time mod Hundredths_In_Second); end Decode_Duration; begin Ada.Calendar.Split (Date => Time, Year => Ada.Calendar.Year_Number (Year), Month => Ada.Calendar.Month_Number (Month), Day => Ada.Calendar.Day_Number (Day), Seconds => System_Time); Decode_Duration (Time => System_Time, Hours => Hours, Minutes => Minutes, Seconds => Seconds, Hundredths => Hundredths); end Decode_Time; -- Convert a single digit into a character. function Digit_Image (Value : Digit) return Character is type Digit_Images is array (Digit) of Character; Image : constant Digit_Images := Digit_Images'('0', '1', '2', '3', '4', '5', '6', '7', '8', '9'); begin return Image (Value); end Digit_Image; -- Format the given value in a zero-padded field of the -- given width. function Format_Number (Value : Natural; Width : Natural) return String is Num_Digits : constant Natural := Width; subtype Digit_Number is Positive range 1 .. Num_Digits; subtype Result_Type is String (Digit_Number); Result : Result_Type; function Get_Digit (Pos : Digit_Number) return Digit is begin return Digit ((Value / 10 ** (Digit_Number'Last - Pos)) mod 10); end Get_Digit; begin for I in Digit_Number loop Result (I) := Digit_Image (Get_Digit (I)); end loop; return Result; end Format_Number; function Date_Image (Day : Date_Time.Days_T; Month : Date_Time.Months_T; Year : Date_Time.Years_T) return String is begin return Format_Number (Day, 2) & '-' & Date_Time.Month_Names (Month) & '-' & Format_Number (Year, 4); end Date_Image; function Time_Image (Hours : Date_Time.Hours_T; Minutes : Date_Time.Minutes_T; Seconds : Date_Time.Seconds_T; Hundredths : Hundredths_Type) return String is begin return Format_Number (Hours, 2) & ':' & Format_Number (Minutes, 2) & ':' & Format_Number (Seconds, 2) & '.' & Format_Number (Hundredths, 2); end Time_Image; begin Decode_Time (Time => Ada.Calendar.Clock, Year => Year, Month => Month, Day => Day, Hours => Hours, Minutes => Minutes, Seconds => Seconds, Hundredths => Hundredths); Date_String := Date_Image (Day => Day, Month => Month, Year => Year) & ' ' & Time_Image (Hours => Hours, Minutes => Minutes, Seconds => Seconds, Hundredths => Hundredths); end Get_Current_Date_Time; procedure Set_Col (File : in SPARK_IO.File_Type; Posn : in Positive) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Posn; is begin if File = SPARK_IO.Standard_Output then ScreenEcho.Set_Col (Posn); else SPARK_IO.Set_Col (File => File, Posn => Posn); end if; end Set_Col; procedure Put_Char (File : in SPARK_IO.File_Type; Item : in Character) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Item; is begin if File = SPARK_IO.Standard_Output then ScreenEcho.Put_Char (Item); else SPARK_IO.Put_Char (File => File, Item => Item); end if; end Put_Char; procedure Put_String (File : in SPARK_IO.File_Type; Item : in String; Stop : in Natural) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Item, --# Stop; is begin if File = SPARK_IO.Standard_Output then ScreenEcho.Put_String (Item); else SPARK_IO.Put_String (File => File, Item => Item, Stop => Stop); end if; end Put_String; procedure New_Line (File : in SPARK_IO.File_Type; Spacing : in Positive) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Spacing; is begin if File = SPARK_IO.Standard_Output then ScreenEcho.New_Line (Spacing); else SPARK_IO.New_Line (File => File, Spacing => Spacing); end if; end New_Line; procedure Center (File : in SPARK_IO.File_Type; E_Str : in E_Strings.T) --# global in File_Type; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E_Str, --# File, --# File_Type; is Page_Width : constant Natural := 80; begin Set_Col (File => File, Posn => (Page_Width - E_Strings.Get_Length (E_Str => E_Str)) / 2); if File_Type = Dec_File then Put_Char (File => File, Item => FDL_Comment_Open); E_Strings.Put_String (File => File, E_Str => E_Str); Put_Char (File => File, Item => FDL_Comment_Close); New_Line (File => File, Spacing => 1); elsif File_Type = Rule_File then Put_String (File => File, Item => "/*", Stop => 0); E_Strings.Put_String (File => File, E_Str => E_Str); Put_String (File => File, Item => "*/", Stop => 0); New_Line (File => File, Spacing => 1); else E_Strings.Put_String (File => File, E_Str => E_Str); New_Line (File => File, Spacing => 1); end if; end Center; procedure Output_Version_Line (File : SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in File_Type; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# File, --# File_Type; is Version_Line : E_Strings.T; begin if CommandLineData.Content.Plain_Output then Version_Line := E_Strings.Copy_String (Str => "Examiner " & Version.Toolset_Distribution & " Edition"); else Version_Line := E_Strings.Copy_String (Str => "Examiner " & Version.Toolset_Banner_Line); end if; Center (File => File, E_Str => Version_Line); end Output_Version_Line; begin Extended_Date_String := E_Strings.Copy_String (Str => "DATE : "); Star_Line := E_Strings.Copy_String (Str => "*******************************************************"); Center (File => File, E_Str => Star_Line); if Header_Line'Length /= 0 then Center (File => File, E_Str => E_Strings.Copy_String (Str => Header_Line)); end if; Output_Version_Line (File => File); -- Display the copyright line if not in plain mode. -- Otherwise, display a blank line. if not CommandLineData.Content.Plain_Output then Center (File => File, E_Str => E_Strings.Copy_String (Str => Version.Toolset_Copyright)); else New_Line (File => File, Spacing => 1); end if; Center (File => File, E_Str => Star_Line); New_Line (File => File, Spacing => 2); if not CommandLineData.Content.Plain_Output then Get_Current_Date_Time (Date_String => Date_String); E_Strings.Append_String (E_Str => Extended_Date_String, Str => Date_String); Center (File => File, E_Str => Extended_Date_String); New_Line (File => File, Spacing => 1); end if; end Print_A_Header; end File_Utils; spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_case_choice.adb0000644000175000017500000005675311753202336025344 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Wf_Case_Choice (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) is type Case_Choice_Sorts is (Single_Expression, Explicit_Range, Range_Constraint); Case_Choice_Sort : Case_Choice_Sorts; First_Node, Second_Node : STree.SyntaxNode; First_Result, Second_Result : Exp_Record; Ref_Var : SeqAlgebra.Seq; Case_Flags : Typ_Case_Flags; Complete_ADT : CompleteCheck.T; Case_Type_Symbol : Dictionary.Symbol; Case_Type_Lower_Bound : Typ_Type_Bound; Case_Type_Upper_Bound : Typ_Type_Bound; Is_A_Range : Boolean; -- these refer to the value/range Choice_Lower_Maths_Value : Maths.Value; -- specified in the case choice Choice_Upper_Maths_Value : Maths.Value := Maths.NoValue; -- init to remove spurious flowerrs Choice_Lower_Bound, Choice_Upper_Bound : Typ_Type_Bound := Unknown_Type_Bound; -- init to remove spurious flowerrs Lower_Bound_Unknown : Boolean; Upper_Bound_Unknown : Boolean := False; Lower_Bound_Out_Of_Range : Boolean; Upper_Bound_Out_Of_Range : Boolean := False; Semantic_Errors_Found : Boolean := False; Out_Of_Range_Seen : Boolean; Overlap_Seen : CompleteCheck.TypOverlapState; Both_Choice_Bounds_Known : Boolean := False; Range_Constraint_Lower_Bound : Typ_Type_Bound; Range_Constraint_Upper_Bound : Typ_Type_Bound; ------------------------------------------------------------------------ procedure Convert_Choice_Bound (Maths_Value : in Maths.Value; Bound : out Typ_Type_Bound; Unknown_Bound : out Boolean; Bound_Out_Of_Range : out Boolean) --# derives Bound, --# Bound_Out_Of_Range, --# Unknown_Bound from Maths_Value; --# post Bound.Is_Defined <-> (not Unknown_Bound and not Bound_Out_Of_Range); is Int : Integer; Maths_Error : Maths.ErrorCode; begin if Maths.HasNoValue (Maths_Value) then Bound := Typ_Type_Bound'(Value => 0, Is_Defined => False); Unknown_Bound := True; Bound_Out_Of_Range := False; else Maths.ValueToInteger (Maths_Value, Int, Maths_Error); if Maths_Error = Maths.NoError then Bound := Typ_Type_Bound'(Value => Int, Is_Defined => True); Unknown_Bound := False; Bound_Out_Of_Range := False; else Bound := Typ_Type_Bound'(Value => 0, Is_Defined => False); Unknown_Bound := False; Bound_Out_Of_Range := True; end if; end if; end Convert_Choice_Bound; ------------------------------------------------------------------------ -- note: returns True if any of the bounds is undefined, unless the -- choice is not a range, in which case, Choice_Upper is unused function Is_Choice_In_Range (Choice_Lower : Typ_Type_Bound; Choice_Upper : Typ_Type_Bound; Choice_Is_Range : Boolean; Range_Lower : Typ_Type_Bound; Range_Upper : Typ_Type_Bound) return Boolean is Result : Boolean; begin if (Choice_Lower.Is_Defined and then Range_Lower.Is_Defined and then Choice_Lower.Value < Range_Lower.Value) or else (Choice_Lower.Is_Defined and then Range_Upper.Is_Defined and then Choice_Lower.Value > Range_Upper.Value) or else (Choice_Is_Range and then Choice_Upper.Is_Defined and then Range_Upper.Is_Defined and then Choice_Upper.Value > Range_Upper.Value) then Result := False; else Result := True; end if; return Result; end Is_Choice_In_Range; ------------------------------------------------------------------------ function Is_Range_Empty (Range_Lower : Typ_Type_Bound; Range_Upper : Typ_Type_Bound) return Boolean --# pre Range_Lower.Is_Defined and Range_Upper.Is_Defined; --# return not (Range_Lower.Value <= Range_Upper.Value); is begin return not (Range_Lower.Value <= Range_Upper.Value); end Is_Range_Empty; ----------------------------------------------------------------------- procedure Convert_Boolean_Maths_Value (Value : in out Maths.Value) --# derives Value from *; is begin if Value = Maths.FalseValue then Value := Maths.ZeroInteger; elsif Value = Maths.TrueValue then Value := Maths.OneInteger; end if; end Convert_Boolean_Maths_Value; begin -- Wf_Case_Choice Second_Result := Unknown_Type_Record; Case_Stack.Pop (Case_Flags => Case_Flags, Complete_ADT => Complete_ADT, Sym => Case_Type_Symbol, Lower_Bound => Case_Type_Lower_Bound, Upper_Bound => Case_Type_Upper_Bound); First_Node := Child_Node (Current_Node => Node); -- ASSUME First_Node = simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => First_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect First_Node = simple_expression in Wf_Case_Choice"); SeqAlgebra.CreateSeq (TheHeap, Ref_Var); Walk_Expression_P.Walk_Expression (Exp_Node => First_Node, Scope => Scope, Type_Context => Case_Type_Symbol, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => First_Result, Component_Data => Component_Data, The_Heap => TheHeap); SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Case_Type_Symbol) or else Dictionary.IsTypeMark (Case_Type_Symbol), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Case_Type_Symbol to be a type in Wf_Case_Choice"); --# assert Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression and --# STree.Table = STree.Table~ and --# Case_Stack.Stack_Is_Valid (Case_Stack.State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Case_Type_Symbol) or Dictionary.IsTypeMark (Case_Type_Symbol, Dictionary.Dict)); Second_Node := Next_Sibling (Current_Node => First_Node); -- ASSUME Second_Node = range_constraint OR simple_expression OR NULL if Second_Node = STree.NullNode then -- ASSUME Second_Node = NULL Case_Choice_Sort := Single_Expression; else -- ASSUME Second_Node = range_constraint OR simple_expression if Syntax_Node_Type (Node => Second_Node) = SP_Symbols.simple_expression then -- ASSUME Second_Node = simple_expression Case_Choice_Sort := Explicit_Range; elsif Syntax_Node_Type (Node => Second_Node) = SP_Symbols.range_constraint then -- ASSUME Second_Node = range_constraint Case_Choice_Sort := Range_Constraint; else Case_Choice_Sort := Single_Expression; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Second_Node = range_constraint OR simple_expression in Wf_Case_Choice"); end if; Walk_Expression_P.Walk_Expression (Exp_Node => Second_Node, Scope => Scope, Type_Context => Case_Type_Symbol, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => Second_Result, Component_Data => Component_Data, The_Heap => TheHeap); SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Case_Type_Symbol) or else Dictionary.IsTypeMark (Case_Type_Symbol), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Case_Type_Symbol to be a type in Wf_Case_Choice"); end if; --# assert Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression and --# (Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.simple_expression or --# Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.range_constraint or --# Second_Node = STree.NullNode) and --# STree.Table = STree.Table~ and --# Case_Stack.Stack_Is_Valid (Case_Stack.State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Case_Type_Symbol) or Dictionary.IsTypeMark (Case_Type_Symbol, Dictionary.Dict)); case Case_Choice_Sort is when Single_Expression => if not First_Result.Is_Static then ErrorHandler.Semantic_Error (Err_Num => 36, Reference => 1, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; if not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Case_Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; -- code to work out whether we have a single choice or a -- range and to collect the appropriate values -- note that these will be nonsense if semantic errors have been found Choice_Lower_Maths_Value := First_Result.Value; if First_Result.Is_ARange then Is_A_Range := True; Choice_Upper_Maths_Value := First_Result.Range_RHS; else Is_A_Range := False; end if; when Explicit_Range => if not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Second_Result.Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 42, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; elsif not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Case_Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 106, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; if not (First_Result.Is_Static and then Second_Result.Is_Static) then ErrorHandler.Semantic_Error (Err_Num => 45, Reference => 1, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; -- code to collect the appropriate values for the extent of the range -- note that these will be nonsense if semantic errors have been found Choice_Lower_Maths_Value := First_Result.Value; Choice_Upper_Maths_Value := Second_Result.Value; Is_A_Range := True; when Range_Constraint => if not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Second_Result.Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 42, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; elsif not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Case_Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 106, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; if not (First_Result.Is_Constant and then First_Result.Is_ARange) then ErrorHandler.Semantic_Error (Err_Num => 95, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; if not Second_Result.Is_Static then ErrorHandler.Semantic_Error (Err_Num => 45, Reference => 1, Position => Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; -- code to collect the appropriate values for the extent of the range -- note that these will be nonsense if semantic errors have been found Choice_Lower_Maths_Value := Second_Result.Value; Choice_Upper_Maths_Value := Second_Result.Range_RHS; Is_A_Range := True; -- somewhere need to check that Second_Result range is within the type -- given by First_Result end case; --# assert Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression and --# (Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.simple_expression or --# Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.range_constraint or --# Second_Node = STree.NullNode) and --# STree.Table = STree.Table~ and --# Case_Stack.Stack_Is_Valid (Case_Stack.State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Case_Type_Symbol) or Dictionary.IsTypeMark (Case_Type_Symbol, Dictionary.Dict)); if not Semantic_Errors_Found then Convert_Boolean_Maths_Value (Value => Choice_Lower_Maths_Value); Convert_Choice_Bound (Maths_Value => Choice_Lower_Maths_Value, Bound => Choice_Lower_Bound, Unknown_Bound => Lower_Bound_Unknown, Bound_Out_Of_Range => Lower_Bound_Out_Of_Range); if Is_A_Range then Convert_Boolean_Maths_Value (Value => Choice_Upper_Maths_Value); Convert_Choice_Bound (Maths_Value => Choice_Upper_Maths_Value, Bound => Choice_Upper_Bound, Unknown_Bound => Upper_Bound_Unknown, Bound_Out_Of_Range => Upper_Bound_Out_Of_Range); else Choice_Upper_Bound := Unknown_Type_Bound; end if; if Lower_Bound_Out_Of_Range or else (Is_A_Range and then Upper_Bound_Out_Of_Range) then Both_Choice_Bounds_Known := False; ErrorHandler.Semantic_Warning (Err_Num => 305, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); elsif Lower_Bound_Unknown or else (Is_A_Range and then Upper_Bound_Unknown) then Both_Choice_Bounds_Known := False; Complete_ADT.Undeterminable := True; ErrorHandler.Semantic_Warning (Err_Num => 200, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); else Both_Choice_Bounds_Known := True; end if; --# assert Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression and --# (Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.simple_expression or --# Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.range_constraint or --# Second_Node = STree.NullNode) and --# STree.Table = STree.Table~ and --# not Semantic_Errors_Found and --# ((Both_Choice_Bounds_Known and Is_A_Range) -> (Choice_Lower_Bound.Is_Defined and Choice_Upper_Bound.Is_Defined)) and --# Case_Stack.Stack_Is_Valid (Case_Stack.State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Case_Type_Symbol) or Dictionary.IsTypeMark (Case_Type_Symbol, Dictionary.Dict)); if Both_Choice_Bounds_Known then -- check the case choice lies within controlling type if not Is_Choice_In_Range (Choice_Lower => Choice_Lower_Bound, Choice_Upper => Choice_Upper_Bound, Choice_Is_Range => Is_A_Range, Range_Lower => Case_Type_Lower_Bound, Range_Upper => Case_Type_Upper_Bound) then if Case_Choice_Sort = Range_Constraint then ErrorHandler.Semantic_Error (Err_Num => 410, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 410, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); end if; Semantic_Errors_Found := True; elsif Is_A_Range and then Is_Range_Empty (Range_Lower => Choice_Lower_Bound, Range_Upper => Choice_Upper_Bound) then if Case_Choice_Sort = Range_Constraint then ErrorHandler.Semantic_Error (Err_Num => 409, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 409, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); end if; Semantic_Errors_Found := True; end if; --# assert Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression and --# (Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.simple_expression or --# Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.range_constraint or --# Second_Node = STree.NullNode) and --# STree.Table = STree.Table~ and --# Both_Choice_Bounds_Known and --# ((not Semantic_Errors_Found and Is_A_Range) -> (Choice_Lower_Bound.Value <= Choice_Upper_Bound.Value)) and --# Case_Stack.Stack_Is_Valid (Case_Stack.State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Case_Type_Symbol) or Dictionary.IsTypeMark (Case_Type_Symbol, Dictionary.Dict)); -- check the case choice lies within Range_Constraint type if Case_Choice_Sort = Range_Constraint then Get_Type_Bounds (Type_Symbol => First_Result.Type_Symbol, Lower_Bound => Range_Constraint_Lower_Bound, Upper_Bound => Range_Constraint_Upper_Bound); if not Is_Choice_In_Range (Choice_Lower => Choice_Lower_Bound, Choice_Upper => Choice_Upper_Bound, Choice_Is_Range => Is_A_Range, Range_Lower => Range_Constraint_Lower_Bound, Range_Upper => Range_Constraint_Upper_Bound) then ErrorHandler.Semantic_Error (Err_Num => 413, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; end if; end if; end if; --# assert Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression and --# STree.Table = STree.Table~ and --# ((not Semantic_Errors_Found and Both_Choice_Bounds_Known and Is_A_Range) -> --# (Choice_Lower_Bound.Value <= Choice_Upper_Bound.Value)) and --# Case_Stack.Stack_Is_Valid (Case_Stack.State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Case_Type_Symbol) or Dictionary.IsTypeMark (Case_Type_Symbol, Dictionary.Dict)); if (not Semantic_Errors_Found) and then Both_Choice_Bounds_Known and then (Case_Flags.Check_Completeness or else Case_Flags.Check_Overlap) then if Is_A_Range then CompleteCheck.SeenRange (Complete_ADT, Choice_Lower_Bound.Value, Choice_Upper_Bound.Value, Out_Of_Range_Seen, Overlap_Seen); else CompleteCheck.SeenElement (Complete_ADT, Choice_Lower_Bound.Value, Out_Of_Range_Seen, Overlap_Seen); end if; if Out_Of_Range_Seen then Case_Flags.Out_Of_Range_Seen := True; end if; if Case_Flags.Check_Overlap and then Overlap_Seen = CompleteCheck.Overlap then ErrorHandler.Semantic_Error (Err_Num => 407, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; end if; -- add reference variable list to RefList hash table RefList.AddRelation (Table, TheHeap, Node, Dictionary.NullSymbol, Ref_Var); --# assert STree.Table = STree.Table~ and --# Case_Stack.Stack_Is_Valid (Case_Stack.State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Case_Type_Symbol) or Dictionary.IsTypeMark (Case_Type_Symbol, Dictionary.Dict)); if Semantic_Errors_Found then Case_Flags.Check_Completeness := False; end if; Case_Stack.Push (Case_Flags => Case_Flags, Complete_ADT => Complete_ADT, Sym => Case_Type_Symbol, Lower_Bound => Case_Type_Lower_Bound, Upper_Bound => Case_Type_Upper_Bound); end Wf_Case_Choice; spark-2012.0.deb/examiner/sem-walk_expression_p-walk_annotation_expression-down_wf_store_list.adb0000644000175000017500000000464611753202336032626 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Walk_Annotation_Expression) procedure Down_Wf_Store_List (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Next_Node : out STree.SyntaxNode) is Type_Info : Sem.Exp_Record; begin -- this procedure does nothing if an array update is being processed other -- than set Next_Node to the STree.Child_Node of Node. For records it prunes -- so that the expression tree walk does not stumble into the field ident. -- Checks at down_wf_store ensure that we have either an array or record -- object at this point so we don not need any more checks of this. Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); if Dictionary.IsRecordTypeMark (Type_Info.Type_Symbol, Scope) then Next_Node := STree.NullNode; -- prune else Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = store_list OR annotation_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.store_list or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = store_list OR annotation_expression in Down_Wf_Store_List"); end if; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); -- restore stack end Down_Wf_Store_List; spark-2012.0.deb/examiner/seqalgebra.adb0000644000175000017500000004742611753202336017044 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- SeqAlgebra -- -- Implementation Details: -- -- A Seq object represents a sequence or a set. -- Seqs are implemented as linked lists of Heap.Atoms using the storage -- facilities provided by package Heap. A linked list is a good representation -- for sequences but is a less than ideal but simple representation for -- general sets. -- -- An object which is a member of a Seq is type MemberOfSeq. [Actually, they -- are both simply numbers (Heap.Atoms), indexes of atoms on the -- heap currently being used.] -- -- The atoms of a Seq are allocated from the Heap package but only use the -- "A" values and pointers of Heap.Atoms. An "A" pointer P is the -- index of the following member of the Seq, or is Heap.IsNullPointer (P) -- to represent the end of the Seq. -- -- Note: In the current implementation of Heap.IsNullPointer (P) <=> P = 0 and -- there are instances where this implementation dependent equivalence is -- used directly in the code. Such implementation dependent code needs to be -- factored out in future revisions. -- -- A Seq object representing a sequence has an implied order and -- may have elements with duplicate values, whereas a set has no implied -- order and does not have elements with duplicate values. -- -- To represent a set within a Seq object the members of a set are kept in -- numerical order of the element value (lowest first). This allows -- detection of duplicate values and more efficient algorithms for the set -- operations. -- All is well if the operation Add member is used to add elements to a set as -- the ordering is maintained. Use AppendAfter (intended for sequences) -- and all bets are off. The ordering will be destroyed and the set operations -- will not work correctly. -- Use the AppendAfter operation to add members to a sequence but not a set. -------------------------------------------------------------------------------- package body SeqAlgebra is -------- Functions and operations for implementation of SeqAlgebra -------- function Is_Null_Seq (S : Seq) return Boolean is begin return S = Null_Seq; end Is_Null_Seq; function IsNullMember (M : MemberOfSeq) return Boolean is begin return M.Member = 0; end IsNullMember; function FirstMember (TheHeap : Heap.HeapRecord; S : Seq) return MemberOfSeq is begin return MemberOfSeq'(Member => Member_Range (Heap.APointer (TheHeap, Heap.Atom (S))), The_Seq => S); end FirstMember; function NextMember (TheHeap : Heap.HeapRecord; M : MemberOfSeq) return MemberOfSeq is begin return MemberOfSeq'(Member => Member_Range (Heap.APointer (TheHeap, Heap.Atom (M.Member))), The_Seq => M.The_Seq); end NextMember; function Value_Of_Member (The_Heap : Heap.HeapRecord; M : MemberOfSeq) return Natural is begin return Heap.AValue (The_Heap, Heap.Atom (M.Member)); end Value_Of_Member; -- Puts a marker atom (index S) onto TheHeap, -- with no members (A pointer is 0). procedure CreateSeq (TheHeap : in out Heap.HeapRecord; S : out Seq) is A : Heap.Atom; begin Heap.CreateAtom (TheHeap, A); S := Seq (A); end CreateSeq; function AreEqual (TheHeap : Heap.HeapRecord; S1, S2 : Seq) return Boolean is M1, M2 : MemberOfSeq; Result : Boolean; begin M1 := FirstMember (TheHeap, S1); M2 := FirstMember (TheHeap, S2); loop Result := IsNullMember (M1) and IsNullMember (M2); exit when Result; -- success, both lists finished at same time Result := not IsNullMember (M1); exit when not Result; -- fail case, first list finished before second Result := not IsNullMember (M2); exit when not Result; -- fail case, second list finished before first -- both list still have members, so compare them Result := Value_Of_Member (The_Heap => TheHeap, M => M1) = Value_Of_Member (The_Heap => TheHeap, M => M2); exit when not Result; --fail case, two different members found -- OK so far, still have values and equal so far, so compare next M1 := NextMember (TheHeap, M1); M2 := NextMember (TheHeap, M2); end loop; return Result; end AreEqual; function Length (TheHeap : Heap.HeapRecord; S : Seq) return Natural is Result : Natural := 0; M : MemberOfSeq; begin M := FirstMember (TheHeap, S); while not IsNullMember (M) loop Result := Result + 1; M := NextMember (TheHeap, M); end loop; return Result; end Length; function IsEmptySeq (TheHeap : Heap.HeapRecord; S : Seq) return Boolean is begin return IsNullMember (FirstMember (TheHeap, S)); end IsEmptySeq; -- Frees all the atoms on the heap relating to -- sequence S. procedure DisposeOfSeq (TheHeap : in out Heap.HeapRecord; S : in Seq) is M, N : MemberOfSeq; begin M := FirstMember (TheHeap, S); while not IsNullMember (M) loop N := NextMember (TheHeap, M); Heap.DisposeOfAtom (TheHeap, Heap.Atom (M.Member)); M := N; end loop; Heap.DisposeOfAtom (TheHeap, Heap.Atom (S)); end DisposeOfSeq; ---type conversion functions used in RefList------------ function SeqToNatural (S : Seq) return Natural is begin return Natural (S); end SeqToNatural; function NaturalToSeq (N : Natural) return Seq is begin return Seq (N); end NaturalToSeq; -------- Functions and operations intended for sequences -------- function BeforeFirstMember (S : Seq) return MemberOfSeq is begin return MemberOfSeq'(Member => Member_Range (S), The_Seq => S); end BeforeFirstMember; -- Note if this is used with a Seq representing a set this will -- destroy the numerical ordering of the set. procedure AppendAfter (TheHeap : in out Heap.HeapRecord; M : in out MemberOfSeq; GivenValue : in Natural) is LastAtom, NewAtom : Heap.Atom; begin LastAtom := Heap.Atom (M.Member); Heap.CreateAtom (TheHeap, NewAtom); Heap.UpdateAValue (TheHeap, NewAtom, GivenValue); Heap.UpdateAPointer (TheHeap, NewAtom, Heap.APointer (TheHeap, LastAtom)); Heap.UpdateAPointer (TheHeap, LastAtom, NewAtom); M.Member := Member_Range (NewAtom); end AppendAfter; -- Note frees the eliminated member from the heap procedure EliminateAfter (TheHeap : in out Heap.HeapRecord; M : in MemberOfSeq) is LastAtom, OldAtom : Heap.Atom; begin LastAtom := Heap.Atom (M.Member); OldAtom := Heap.APointer (TheHeap, LastAtom); Heap.UpdateAPointer (TheHeap, LastAtom, Heap.APointer (TheHeap, OldAtom)); Heap.DisposeOfAtom (TheHeap, OldAtom); end EliminateAfter; -------- Functions and operations intended for sets ---------- -- This operation uses the numerical ordering of a set. -- It might not find an element from a sequence even if the element exists. function IsMember (TheHeap : Heap.HeapRecord; S : Seq; GivenValue : Natural) return Boolean is MemberPresent : Boolean; N : MemberOfSeq; ValueOfN : Natural; begin MemberPresent := False; N := FirstMember (TheHeap, S); loop exit when IsNullMember (N); ValueOfN := Value_Of_Member (The_Heap => TheHeap, M => N); if ValueOfN = GivenValue then MemberPresent := True; end if; exit when ValueOfN >= GivenValue; N := NextMember (TheHeap, N); end loop; return MemberPresent; end IsMember; -- Preserves the numerical ordering of the set. -- Do not use with a Seq representing a sequence it may -- destroy the sequence order. procedure AddMember (TheHeap : in out Heap.HeapRecord; S : in Seq; GivenValue : in Natural) is MemberPresent : Boolean; M, N : MemberOfSeq; ValueOfN : Natural; begin MemberPresent := False; M := BeforeFirstMember (S); N := FirstMember (TheHeap, S); loop exit when IsNullMember (N); ValueOfN := Value_Of_Member (The_Heap => TheHeap, M => N); if ValueOfN = GivenValue then MemberPresent := True; end if; exit when ValueOfN >= GivenValue; M := N; N := NextMember (TheHeap, N); end loop; if not MemberPresent then -- we don't need the updated value of M in this case --# accept F, 10, M, "M unused here"; AppendAfter (TheHeap, M, GivenValue); --# end accept; end if; end AddMember; -- This operation uses the numerical ordering of a set. -- It might not remove an element from a sequence even if the element exists. procedure RemoveMember (TheHeap : in out Heap.HeapRecord; S : in Seq; GivenValue : in Natural) is MemberPresent : Boolean; M, N : MemberOfSeq; ValueOfN : Natural; begin MemberPresent := False; M := BeforeFirstMember (S); N := FirstMember (TheHeap, S); loop exit when IsNullMember (N); ValueOfN := Value_Of_Member (The_Heap => TheHeap, M => N); if ValueOfN = GivenValue then MemberPresent := True; end if; exit when ValueOfN >= GivenValue; M := N; N := NextMember (TheHeap, N); end loop; if MemberPresent then EliminateAfter (TheHeap, M); end if; end RemoveMember; -- Note this operation is intended for Seq objects representing sets. -- It currently also works for sequences because the comparison is commented out. -- The usefulness of the value returned is questionable -- because it will be the same if the last member has the given value -- or if no members match the given value. It would be better if 0 was -- returned if no match is found. function MemberIndex (TheHeap : Heap.HeapRecord; S : Seq; GivenValue : Natural) return Natural is N : MemberOfSeq; ValueOfN : Natural; Index : Natural; begin Index := 0; N := FirstMember (TheHeap, S); loop exit when IsNullMember (N); ValueOfN := Value_Of_Member (The_Heap => TheHeap, M => N); -- What should the value of index be if the GivenValue is not in the Seq -- exit when ValueOfN >= GivenValue; Index := Index + 1; exit when ValueOfN = GivenValue; N := NextMember (TheHeap, N); end loop; return Index; end MemberIndex; ----------- Set Operations on Seq representing Sets ----------- -- Assumes A and B are in numerical order, i.e. a set, in which case -- C will be set too. -- The operation is meaningless for a Seq representing a sequence. procedure Union (TheHeap : in out Heap.HeapRecord; A, B : in Seq; C : out Seq) is LocalC : Seq; M, N : MemberOfSeq; ValueofM, ValueofN : Natural; LastC : MemberOfSeq; begin CreateSeq (TheHeap, LocalC); LastC := BeforeFirstMember (LocalC); M := FirstMember (TheHeap, A); N := FirstMember (TheHeap, B); loop exit when IsNullMember (M) or IsNullMember (N); ValueofM := Value_Of_Member (The_Heap => TheHeap, M => M); ValueofN := Value_Of_Member (The_Heap => TheHeap, M => N); if ValueofM = ValueofN then AppendAfter (TheHeap, LastC, ValueofM); M := NextMember (TheHeap, M); N := NextMember (TheHeap, N); elsif ValueofM < ValueofN then AppendAfter (TheHeap, LastC, ValueofM); M := NextMember (TheHeap, M); else AppendAfter (TheHeap, LastC, ValueofN); N := NextMember (TheHeap, N); end if; end loop; loop exit when IsNullMember (M); AppendAfter (TheHeap, LastC, Value_Of_Member (The_Heap => TheHeap, M => M)); M := NextMember (TheHeap, M); end loop; loop exit when IsNullMember (N); AppendAfter (TheHeap, LastC, Value_Of_Member (The_Heap => TheHeap, M => N)); N := NextMember (TheHeap, N); end loop; C := LocalC; end Union; -- This operation uses the numerical ordering of a set. -- The operation is meaningless for a Seq representing a sequence. procedure AugmentSeq (TheHeap : in out Heap.HeapRecord; A, B : in Seq) is M, N : MemberOfSeq; ValueofM, ValueofN : Natural; LastM : MemberOfSeq; begin M := FirstMember (TheHeap, A); LastM := BeforeFirstMember (A); N := FirstMember (TheHeap, B); loop exit when IsNullMember (M) or IsNullMember (N); ValueofM := Value_Of_Member (The_Heap => TheHeap, M => M); ValueofN := Value_Of_Member (The_Heap => TheHeap, M => N); if ValueofM = ValueofN then LastM := M; M := NextMember (TheHeap, M); N := NextMember (TheHeap, N); elsif ValueofM < ValueofN then LastM := M; M := NextMember (TheHeap, M); else AppendAfter (TheHeap, LastM, ValueofN); N := NextMember (TheHeap, N); end if; end loop; loop exit when IsNullMember (N); AppendAfter (TheHeap, LastM, Value_Of_Member (The_Heap => TheHeap, M => N)); N := NextMember (TheHeap, N); end loop; end AugmentSeq; -- This operation uses the numerical ordering of a set. -- C is created as a new Seq on TheHeap. -- The operation is meaningless for a Seq representing a sequence. procedure Intersection (TheHeap : in out Heap.HeapRecord; A, B : in Seq; C : out Seq) is LocalC : Seq; LastC : MemberOfSeq; M, N : MemberOfSeq; ValueOfM, ValueOfN : Natural; begin CreateSeq (TheHeap, LocalC); LastC := BeforeFirstMember (LocalC); M := FirstMember (TheHeap, A); N := FirstMember (TheHeap, B); loop exit when IsNullMember (M) or IsNullMember (N); ValueOfM := Value_Of_Member (The_Heap => TheHeap, M => M); ValueOfN := Value_Of_Member (The_Heap => TheHeap, M => N); if ValueOfM < ValueOfN then M := NextMember (TheHeap, M); elsif ValueOfM > ValueOfN then N := NextMember (TheHeap, N); else AppendAfter (TheHeap, LastC, ValueOfM); M := NextMember (TheHeap, M); N := NextMember (TheHeap, N); end if; end loop; C := LocalC; end Intersection; -- This operation uses the numerical ordering of a set. -- C is created as a new Seq on TheHeap. -- The operation is meaningless for a Seq representing a sequence. procedure Complement (TheHeap : in out Heap.HeapRecord; A, B : in Seq; C : out Seq) is LocalC : Seq; M, N : MemberOfSeq; ValueOfM, ValueOfN : Natural; LastC : MemberOfSeq; begin CreateSeq (TheHeap, LocalC); LastC := BeforeFirstMember (LocalC); M := FirstMember (TheHeap, A); N := FirstMember (TheHeap, B); loop exit when IsNullMember (M) or IsNullMember (N); ValueOfM := Value_Of_Member (The_Heap => TheHeap, M => M); ValueOfN := Value_Of_Member (The_Heap => TheHeap, M => N); if ValueOfM = ValueOfN then M := NextMember (TheHeap, M); N := NextMember (TheHeap, N); elsif ValueOfM < ValueOfN then AppendAfter (TheHeap, LastC, ValueOfM); M := NextMember (TheHeap, M); else N := NextMember (TheHeap, N); end if; end loop; loop exit when IsNullMember (M); AppendAfter (TheHeap, LastC, Value_Of_Member (The_Heap => TheHeap, M => M)); M := NextMember (TheHeap, M); end loop; C := LocalC; end Complement; -- This operation uses the numerical ordering of a set. -- The operation is meaningless for a Seq representing a sequence. procedure Reduction (TheHeap : in out Heap.HeapRecord; A, B : in Seq) is M, N : MemberOfSeq; ValueOfM, ValueOfN : Natural; LastM : MemberOfSeq; begin M := FirstMember (TheHeap, A); LastM := BeforeFirstMember (A); N := FirstMember (TheHeap, B); loop exit when IsNullMember (M) or IsNullMember (N); ValueOfM := Value_Of_Member (The_Heap => TheHeap, M => M); ValueOfN := Value_Of_Member (The_Heap => TheHeap, M => N); if ValueOfM = ValueOfN then M := NextMember (TheHeap, M); N := NextMember (TheHeap, N); EliminateAfter (TheHeap, LastM); elsif ValueOfM < ValueOfN then LastM := M; M := NextMember (TheHeap, M); else N := NextMember (TheHeap, N); end if; end loop; end Reduction; function To_Atom (M : MemberOfSeq) return Heap.Atom is begin return Heap.Atom (M.Member); end To_Atom; end SeqAlgebra; spark-2012.0.deb/examiner/errorhandler-warningstatus.adb0000644000175000017500000006030211753202336022320 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) package body WarningStatus --# own SuppressionList is Pragma_List, --# Something_Suppressed, --# Suppressed_Element, --# Suppress_All_Pragmas; is subtype Pragma_Counts is Integer range 0 .. ExaminerConstants.MaxPragmasInWarningFile; subtype Pragma_Index is Integer range 1 .. ExaminerConstants.MaxPragmasInWarningFile; type Pragma_Arrays is array (Pragma_Index) of LexTokenManager.Lex_String; type Pragma_Lists is record Pragma_Array : Pragma_Arrays; Pragma_Count : Pragma_Counts; end record; type Suppressed_Element_Array is array (ErrorHandler.Warning_Elements) of Boolean; Suppressed_Element : Suppressed_Element_Array; Pragma_List : Pragma_Lists; Something_Suppressed : Boolean; Suppress_All_Pragmas : Boolean; --------------------------------------------------------------------------- function Pragma_Found (Pragma_Name : LexTokenManager.Lex_String) return Boolean --# global in LexTokenManager.State; --# in Pragma_List; is Look_At, Left, Right : Integer; Found : Boolean; Match_Res : LexTokenManager.Str_Comp_Result; function Match_Check (Pos : Integer) return LexTokenManager.Str_Comp_Result --# global in LexTokenManager.State; --# in Pragma_List; --# in Pragma_Name; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_List.Pragma_Array (Pos), Lex_Str2 => Pragma_Name); end Match_Check; begin Left := 0; Right := Pragma_List.Pragma_Count + 1; Found := False; loop exit when (Left + 1) = Right; Look_At := (Left + Right) / 2; Match_Res := Match_Check (Look_At); if Match_Res = LexTokenManager.Str_Eq then Found := True; exit; end if; if Match_Res = LexTokenManager.Str_First then Left := Look_At; else Right := Look_At; end if; end loop; return Found; end Pragma_Found; --------------------------------------------------------------------------- procedure ReadWarningFile --# global in CommandLineData.Content; --# in out ErrorHandler.File_Open_Error; --# in out LexTokenManager.State; --# in out Pragma_List; --# in out Something_Suppressed; --# in out SPARK_IO.File_Sys; --# in out Suppressed_Element; --# in out Suppress_All_Pragmas; --# derives ErrorHandler.File_Open_Error from *, --# CommandLineData.Content, --# SPARK_IO.File_Sys & --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Suppressed_Element, --# Suppress_All_Pragmas from *, --# CommandLineData.Content, --# LexTokenManager.State, --# Pragma_List, --# SPARK_IO.File_Sys & --# Pragma_List from *, --# CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Suppress_All_Pragmas & --# Something_Suppressed from *, --# CommandLineData.Content, --# LexTokenManager.State, --# Pragma_List, --# SPARK_IO.File_Sys, --# Suppressed_Element; is separate; --------------------------------------------------------------------------- function Is_Suppressed (The_Element : ErrorHandler.Warning_Elements) return Boolean --# global in Suppressed_Element; is begin return Suppressed_Element (The_Element); end Is_Suppressed; --------------------------------------------------------------------------- function Pragma_Is_Suppressed (Pragma_Name : LexTokenManager.Lex_String) return Boolean --# global in LexTokenManager.State; --# in Pragma_List; --# in Suppress_All_Pragmas; is Result : Boolean; begin Result := Suppress_All_Pragmas; if not Result then Result := Pragma_Found (Pragma_Name => Pragma_Name); end if; return Result; end Pragma_Is_Suppressed; --------------------------------------------------------------------------- function Get_Description (Item : in ErrorHandler.Warning_Elements; P1 : in String; P2 : in String) return E_Strings.T is Tmp_String, Result : E_Strings.T; Posn : E_Strings.Lengths; begin -- The messages are parmeterised for different message formats. -- The substring %%1 is substituted with the string P1 and -- the substring %%2 is substituted with the string P2. -- Some message formats require a simple "s£ character to represent a -- plural whereas others require the string "(s)". In one case in one -- format has a plural whereas another does not. -- This complex scheme is to maintain compatibility with an earlier -- message scheme. case Item is when ErrorHandler.Pragmas => Tmp_String := E_Strings.Copy_String (Str => "Pragma%%1"); when ErrorHandler.Hidden_Parts => Tmp_String := E_Strings.Copy_String (Str => "Hidden part%%1"); when ErrorHandler.Handler_Parts => Tmp_String := E_Strings.Copy_String (Str => "Hidden exception handler part%%1"); when ErrorHandler.Representation_Clauses => Tmp_String := E_Strings.Copy_String (Str => "Representation clause%%1"); when ErrorHandler.Direct_Updates => Tmp_String := E_Strings.Copy_String (Str => "Direct update%%1 of own variable(s) of non-enclosing package%%1"); when ErrorHandler.With_Clauses => Tmp_String := E_Strings.Copy_String (Str => "With clause%%1 lacking a supporting inherit"); when ErrorHandler.Static_Expressions => Tmp_String := E_Strings.Copy_String (Str => "Static expression%%1 too complex for Examiner"); when ErrorHandler.Style_Check_Casing => Tmp_String := E_Strings.Copy_String (Str => "Style check casing"); when ErrorHandler.Unused_Variables => Tmp_String := E_Strings.Copy_String (Str => "Variable%%1 declared but not used"); when ErrorHandler.Constant_Variables => Tmp_String := E_Strings.Copy_String (Str => "Variable%%1 used as constants"); when ErrorHandler.Type_Conversions => Tmp_String := E_Strings.Copy_String (Str => "Unnecessary type conversion%%1"); when ErrorHandler.SLI_Generation => Tmp_String := E_Strings.Copy_String (Str => "Stop SLI generation"); when ErrorHandler.Index_Manager_Duplicates => Tmp_String := E_Strings.Copy_String (Str => "Duplicate entry in index files"); when ErrorHandler.Others_Clauses => Tmp_String := E_Strings.Copy_String (Str => "Unnecessary others clause%%1"); when ErrorHandler.Imported_Objects => Tmp_String := E_Strings.Copy_String (Str => "Use%%1 of pragma Import on objects"); when ErrorHandler.Unexpected_Address_Clauses => Tmp_String := E_Strings.Copy_String (Str => "Unexpected address clause%%1"); when ErrorHandler.Main_Program_Precondition => Tmp_String := E_Strings.Copy_String (Str => "Precondition on main program"); when ErrorHandler.Proof_Function_Non_Boolean => Tmp_String := E_Strings.Copy_String (Str => "Non-boolean proof functions"); when ErrorHandler.Proof_Function_Implicit => Tmp_String := E_Strings.Copy_String (Str => "Proof functions with implicit return"); when ErrorHandler.Proof_Function_Refinement => Tmp_String := E_Strings.Copy_String (Str => "Refined proof functions"); when ErrorHandler.Expression_Reordering => Tmp_String := E_Strings.Copy_String (Str => "Reordering of expressions"); when ErrorHandler.Notes => Tmp_String := E_Strings.Copy_String (Str => "Note%%1"); when ErrorHandler.Unuseable_Private_Types => Tmp_String := E_Strings.Copy_String (Str => "Private type%%1 lacking method of initialization"); when ErrorHandler.External_Variable_Assignment => Tmp_String := E_Strings.Copy_String (Str => "Assignment%%2 or return%%2 of external variables"); when ErrorHandler.Declare_Annotations => Tmp_String := E_Strings.Copy_String (Str => "Declare annotations in non Ravenscar programs"); when ErrorHandler.Interrupt_Handlers => Tmp_String := E_Strings.Copy_String (Str => "Protected objects that include interrupt handlers"); when ErrorHandler.Unchecked_Conversion => Tmp_String := E_Strings.Copy_String (Str => "Use%%1 of instantiations of Unchecked_Conversion"); when ErrorHandler.Ada2005_Reserved_Words => Tmp_String := E_Strings.Copy_String (Str => "Use%%1 of Ada2005 reserved words"); when ErrorHandler.Obsolescent_Features => Tmp_String := E_Strings.Copy_String (Str => "Use%%1 of obsolete feature from Ada83 in SPARK 95 mode"); when ErrorHandler.Default_Loop_Assertions => Tmp_String := E_Strings.Copy_String (Str => "Generation of default loop assertions"); when ErrorHandler.Real_RTCs => Tmp_String := E_Strings.Copy_String (Str => "Generation of RTCs on real numbers"); end case; Posn := 0; Result := E_Strings.Empty_String; while Posn < E_Strings.Get_Length (E_Str => Tmp_String) loop Posn := Posn + 1; if Posn + 2 <= E_Strings.Get_Length (E_Str => Tmp_String) and then E_Strings.Get_Element (E_Str => Tmp_String, Pos => Posn) = '%' and then E_Strings.Get_Element (E_Str => Tmp_String, Pos => Posn + 1) = '%' and then (E_Strings.Get_Element (E_Str => Tmp_String, Pos => Posn + 2) = '1' or else E_Strings.Get_Element (E_Str => Tmp_String, Pos => Posn + 2) = '2') then if E_Strings.Get_Element (E_Str => Tmp_String, Pos => Posn + 2) = '1' then E_Strings.Append_String (E_Str => Result, Str => P1); else E_Strings.Append_String (E_Str => Result, Str => P2); end if; Posn := Posn + 2; else E_Strings.Append_Char (E_Str => Result, Ch => E_Strings.Get_Element (E_Str => Tmp_String, Pos => Posn)); end if; end loop; return Result; end Get_Description; --------------------------------------------------------------------------- procedure Output_Warning_List (To_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in Pragma_List; --# in Something_Suppressed; --# in Suppressed_Element; --# in Suppress_All_Pragmas; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# Pragma_List, --# Something_Suppressed, --# Suppressed_Element, --# Suppress_All_Pragmas, --# To_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# Pragma_List, --# Something_Suppressed, --# Suppressed_Element, --# Suppress_All_Pragmas; is Description : E_Strings.T; procedure Put_Pragmas --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in Pragma_List; --# in Suppressed_Element; --# in Suppress_All_Pragmas; --# in To_File; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# Pragma_List, --# Suppressed_Element, --# Suppress_All_Pragmas, --# To_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# Pragma_List, --# Suppressed_Element, --# Suppress_All_Pragmas; is Wrap : constant Integer := 72; Margin : constant Integer := 14; Column : Integer; Punct : Character; Pragma_Str : E_Strings.T; procedure Put_Pragma_Name (Str : LexTokenManager.Lex_String) --# global in LexTokenManager.State; --# in To_File; --# in out Column; --# in out Punct; --# in out SPARK_IO.File_Sys; --# derives Column from *, --# LexTokenManager.State, --# Str & --# Punct from & --# SPARK_IO.File_Sys from *, --# Column, --# LexTokenManager.State, --# Punct, --# Str, --# To_File; is Result : E_Strings.T; begin Result := LexTokenManager.Lex_String_To_String (Lex_Str => Str); SPARK_IO.Put_Char (To_File, Punct); SPARK_IO.Put_Char (To_File, ' '); Punct := ','; Column := Column + 2; if Column + E_Strings.Get_Length (E_Str => Result) > Wrap then SPARK_IO.New_Line (To_File, 1); SPARK_IO.Put_String (To_File, " ", 0); Column := Margin; end if; E_Strings.Put_String (File => To_File, E_Str => Result); Column := Column + E_Strings.Get_Length (E_Str => Result); end Put_Pragma_Name; function Get_Pragma_Name (Str : LexTokenManager.Lex_String) return E_Strings.T --# global in LexTokenManager.State; is begin return LexTokenManager.Lex_String_To_String (Lex_Str => Str); end Get_Pragma_Name; begin --Put_Pragmas if CommandLineData.Content.XML then if Suppress_All_Pragmas then Pragma_Str := E_Strings.Copy_String (Str => "all"); XMLReport.Suppressed_Pragma (Item => Pragma_Str); E_Strings.Put_String (File => To_File, E_Str => Pragma_Str); elsif Suppressed_Element (ErrorHandler.Pragmas) then for I in Integer range 1 .. Pragma_List.Pragma_Count loop Pragma_Str := Get_Pragma_Name (Str => Pragma_List.Pragma_Array (I)); XMLReport.Suppressed_Pragma (Item => Pragma_Str); E_Strings.Put_String (File => To_File, E_Str => Pragma_Str); end loop; end if; else if Suppress_All_Pragmas then SPARK_IO.Put_Line (To_File, " All pragmas", 0); elsif Suppressed_Element (ErrorHandler.Pragmas) then Column := Margin; Punct := ':'; SPARK_IO.Put_String (File => To_File, Item => " ", Stop => 0); E_Strings.Put_String (File => To_File, E_Str => Get_Description (Item => ErrorHandler.Pragmas, P1 => "s", P2 => "")); for I in Integer range 1 .. Pragma_List.Pragma_Count loop Put_Pragma_Name (Str => Pragma_List.Pragma_Array (I)); end loop; end if; SPARK_IO.New_Line (To_File, 1); end if; end Put_Pragmas; begin --Output_Warning_List if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Warnings_Config, Report => To_File); if Something_Suppressed then for I in ErrorHandler.Warning_Elements range ErrorHandler.Hidden_Parts .. ErrorHandler.Warning_Elements'Last loop if Suppressed_Element (I) then Description := Get_Description (Item => I, P1 => "s", P2 => ""); XMLReport.Suppressed (Item => Description); E_Strings.Put_String (File => To_File, E_Str => Description); end if; end loop; Put_Pragmas; end if; XMLReport.End_Section (Section => XMLReport.S_Warnings_Config, Report => To_File); else SPARK_IO.New_Line (To_File, 2); if Something_Suppressed then SPARK_IO.Put_Line (To_File, "Summary warning reporting selected for:", 0); for I in ErrorHandler.Warning_Elements range ErrorHandler.Hidden_Parts .. ErrorHandler.Warning_Elements'Last loop if Suppressed_Element (I) then SPARK_IO.Put_String (File => To_File, Item => " ", Stop => 0); E_Strings.Put_Line (File => To_File, E_Str => Get_Description (Item => I, P1 => "s", P2 => "")); end if; end loop; Put_Pragmas; else SPARK_IO.Put_Line (To_File, "Full warning reporting selected", 0); end if; end if; end Output_Warning_List; --------------------------------------------------------------------------- procedure Report_Suppressed_Warnings (To_File : in SPARK_IO.File_Type; Counter : in ErrorHandler.Counters) --# global in Something_Suppressed; --# in Suppressed_Element; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Counter, --# Something_Suppressed, --# Suppressed_Element, --# To_File; is Indent : constant Integer := 6; Total_Warnings : Integer; Severe_Warning : Boolean := False; Tmp_String : E_Strings.T; procedure Put_Count (Count : in Integer; Width : in Integer) --# global in To_File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Count, --# To_File, --# Width; is begin SPARK_IO.Put_Integer (To_File, Count, Width, 10); end Put_Count; begin --Report_Suppressed_Warnings if Something_Suppressed then Total_Warnings := 0; for I in ErrorHandler.Warning_Elements loop Total_Warnings := Total_Warnings + Integer (Counter (I)); end loop; if Total_Warnings = 0 then SPARK_IO.Put_Line (To_File, "No summarized warnings", 0); SPARK_IO.New_Line (To_File, 1); else Put_Count (Count => Total_Warnings, Width => 0); SPARK_IO.Put_Line (To_File, " summarized warning(s), comprising:", 0); for I in ErrorHandler.Warning_Elements loop if Suppressed_Element (I) and then Counter (I) > 0 then Put_Count (Count => Integer (Counter (I)), Width => Indent); SPARK_IO.Put_Char (File => To_File, Item => ' '); Tmp_String := Get_Description (Item => I, P1 => "(s)", P2 => "(s)"); if E_Strings.Get_Length (E_Str => Tmp_String) > 0 then Tmp_String := E_Strings.Lower_Case_Char (E_Str => Tmp_String, Pos => 1); end if; E_Strings.Put_String (File => To_File, E_Str => Tmp_String); if I in ErrorHandler.Severe_Warnings then SPARK_IO.Put_Char (To_File, '*'); Severe_Warning := True; end if; SPARK_IO.New_Line (To_File, 1); end if; end loop; if Severe_Warning then SPARK_IO.Put_Line (To_File, "(*Note: the above warnings may affect the validity of the analysis.)", 0); end if; SPARK_IO.New_Line (To_File, 1); end if; end if; end Report_Suppressed_Warnings; --------------------------------------------------------------------------- begin Suppressed_Element := Suppressed_Element_Array'(others => False); Pragma_List.Pragma_Count := 0; --will cause flow error Suppress_All_Pragmas := False; Something_Suppressed := False; --# accept Flow, 32, Pragma_List.Pragma_Array, "Init. is partial but effective." & --# Flow, 31, Pragma_List.Pragma_Array, "Init. is partial but effective." & --# Flow, 602, Pragma_List, Pragma_List.Pragma_Array, "Init. is partial but effective."; end WarningStatus; spark-2012.0.deb/examiner/errorhandler.idx0000644000175000017500000000355511753202337017457 0ustar eugeneugenerrorhandler specification is in errorhandler.ads errorhandler body is in errorhandler.adb errorhandler.warningstatus subunit is in errorhandler-warningstatus.adb errorhandler.errorbuffer subunit is in errorhandler-errorbuffer.adb errorhandler.conversions subunit is in errorhandler-conversions.adb errorhandler.conversions.tostring subunit is in errorhandler-conversions-tostring.adb errorhandler.conversions.tostring.semanticerr subunit is in errorhandler-conversions-tostring-semanticerr.adb errorhandler.conversions.tostring.uncondflowerr subunit is in errorhandler-conversions-tostring-uncondflowerr.adb errorhandler.conversions.tostring.condlflowerr subunit is in errorhandler-conversions-tostring-condlflowerr.adb errorhandler.conversions.tostring.unconddependency subunit is in errorhandler-conversions-tostring-unconddependency.adb errorhandler.conversions.tostring.condldependency subunit is in errorhandler-conversions-tostring-condldependency.adb errorhandler.conversions.tostring.depsemanticerr subunit is in errorhandler-conversions-tostring-depsemanticerr.adb errorhandler.conversions.tostring.warningwithposition subunit is in errorhandler-conversions-tostring-warningwithposition.adb errorhandler.conversions.tostring.warningwithoutposition subunit is in errorhandler-conversions-tostring-warningwithoutposition.adb errorhandler.conversions.tostring.controlflowerror subunit is in errorhandler-conversions-tostring-controlflowerror.adb errorhandler.conversions.tostring.ineffectivestatement subunit is in errorhandler-conversions-tostring-ineffectivestatement.adb errorhandler.conversions.tostring.stabilityerror subunit is in errorhandler-conversions-tostring-stabilityerror.adb errorhandler.conversions.tostring.usageerror subunit is in errorhandler-conversions-tostring-usageerror.adb errorhandler.conversions.tostring.note subunit is in errorhandler-conversions-tostring-note.adb spark-2012.0.deb/examiner/sem-walk_expression_p-dispose_of_name_list.adb0000644000175000017500000000235111753202336025423 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Dispose_Of_Name_List (List : in out Lists.List; Heap_Param : in out Lists.List_Heap) is begin Lists.Delete_List (Heap => Heap_Param, The_List => List); end Dispose_Of_Name_List; spark-2012.0.deb/examiner/cstacks.adb0000644000175000017500000001071511753202336016360 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body CStacks is -------------------------------------------------------------------------- -- Reresentation of a Cell Stack -- -- A Stack is a singly linked list of Cells. In each Cell: -- -- A_Ptr field links the stack together -- C_Ptr field points to the data Cell of each stack element -------------------------------------------------------------------------- function IsEmpty (S : Stack) return Boolean is begin return Cells.Is_Null_Cell (Cells.Cell (S)); end IsEmpty; -------------------------------------------------------------------------- function Top (Heap : Cells.Heap_Record; S : Stack) return Cells.Cell is begin return Cells.Get_C_Ptr (Heap, Cells.Cell (S)); end Top; -------------------------------------------------------------------------- procedure CreateStack (S : out Stack) is begin S := Stack (Cells.Null_Cell); end CreateStack; -------------------------------------------------------------------------- procedure Pop (Heap : in out Cells.Heap_Record; S : in out Stack) is OldTopCell : Cells.Cell; begin OldTopCell := Cells.Cell (S); S := Stack (Cells.Get_A_Ptr (Heap, OldTopCell)); Cells.Dispose_Of_Cell (Heap, OldTopCell); end Pop; -------------------------------------------------------------------------- -- New procedure (shorthand for Top followed by Pop) procedure PopOff (Heap : in out Cells.Heap_Record; S : in out Stack; C : out Cells.Cell) is begin C := Top (Heap, S); Pop (Heap, S); end PopOff; -------------------------------------------------------------------------- procedure Push (Heap : in out Cells.Heap_Record; CellName : in Cells.Cell; S : in out Stack) is NewTopCell : Cells.Cell; begin Cells.Create_Cell (Heap, NewTopCell); Cells.Set_A_Ptr (Heap, NewTopCell, Cells.Cell (S)); Cells.Set_C_Ptr (Heap, NewTopCell, CellName); S := Stack (NewTopCell); end Push; -------------------------------------------------------------------------- function NonDestructivePop (Heap : Cells.Heap_Record; S : Stack) return Stack is begin return Stack (Cells.Get_A_Ptr (Heap, Cells.Cell (S))); end NonDestructivePop; -------------------------------------------------------------------------- function FindAggregateCell (Heap : Cells.Heap_Record; S : Stack) return Cells.Cell is Ptr : Cells.Cell; Kind : Cells.Cell_Kind; begin Ptr := Cells.Cell (S); loop Kind := Cells.Get_Kind (Heap, Cells.Get_C_Ptr (Heap, Ptr)); exit when Kind = Cell_Storage.Incomplete_Aggregate; Ptr := Cells.Get_A_Ptr (Heap, Ptr); end loop; Ptr := Cells.Get_C_Ptr (Heap, Ptr); return Ptr; end FindAggregateCell; -------------------------------------------------------------------------- function FindAggregateCounter (Heap : Cells.Heap_Record; S : Stack) return Cells.Cell is Ptr : Cells.Cell; begin Ptr := Cells.Cell (S); loop exit when Cells.Get_Kind (Heap, Cells.Get_C_Ptr (Heap, Ptr)) = Cell_Storage.Aggregate_Counter; Ptr := Cells.Get_A_Ptr (Heap, Ptr); end loop; Ptr := Cells.Get_C_Ptr (Heap, Ptr); return Ptr; end FindAggregateCounter; end CStacks; spark-2012.0.deb/examiner/dictionary-getanyprefixneeded.adb0000644000175000017500000001617211753202336022745 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors, LexTokenStacks; separate (Dictionary) function GetAnyPrefixNeeded (Sym : Symbol; Scope : Scopes; Separator : String) return E_Strings.T is Prefix : E_Strings.T; Declared_Scope, Current_Scope : Scopes; Sym_Local : Symbol; function Is_Abstract_Proof_Function_In_Local_Scope (Sym : Symbol) return Boolean --# global in Dict; is begin return RawDict.GetSymbolDiscriminant (Sym) = ImplicitProofFunctionSymbol and then RawDict.Get_Subprogram_Implicit_Proof_Function (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Sym), Abstraction => IsAbstract) = Sym and then RawDict.Get_Subprogram_Implicit_Proof_Function (The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Sym), Abstraction => IsRefined) /= NullSymbol; end Is_Abstract_Proof_Function_In_Local_Scope; begin -- GetAnyPrefixNeeded if (RawDict.GetSymbolDiscriminant (Sym) = Type_Symbol and then RawDict.Get_Type_Info_Ref (Item => Sym) = Get_Unknown_Type_Mark) -- GAA External or else Sym = NullSymbol or else (RawDict.GetSymbolDiscriminant (Sym) = Variable_Symbol and then RawDict.Get_Variable_Info_Ref (Item => Sym) = Get_Null_Variable) then -- GAA External Prefix := E_Strings.Empty_String; elsif RawDict.GetSymbolDiscriminant (Sym) = Subprogram_Symbol and then RawDict.Get_Subprogram_Info_Ref (Item => Sym) = Dict.Subprograms.Unchecked_Conversion then -- GAA External -- special handling for Unchecked_Conversion which is a strange beast - -- child subprogram in 95 and 2005/KCG, library-level subprogram in 83 case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK95_Onwards => Prefix := E_Strings.Copy_String (Str => "Ada"); when CommandLineData.SPARK83 => Prefix := E_Strings.Empty_String; end case; else -- if the symbol is of an access type we dereference it first (access will get put back -- in GenerateSimpleName if RawDict.GetSymbolDiscriminant (Sym) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym)) -- GAA External and then RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym)) = -- GAA External Access_Type_Item then Sym_Local := RawDict.Get_Type_Symbol (RawDict.Get_Type_Accesses (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Sym))); -- GAA External else Sym_Local := Sym; end if; -- if the symbol is an implicit in stream associated with a protected own variable -- then the prefix is that applicable to the associated own variable if RawDict.GetSymbolDiscriminant (Sym_Local) = Implicit_In_Stream_Symbol then Sym_Local := RawDict.Get_Variable_Symbol (Get_Own_Variable_Of_Protected_Implicit_In_Stream (The_Implicit_In_Stream => RawDict.Get_Implicit_In_Stream_Info_Ref (Item => Sym_Local))); end if; -- if the symbol is a special on loop entry variable we use the original variable -- to determine whether a prefix is needed if RawDict.GetSymbolDiscriminant (Sym_Local) = LoopEntryVariableSymbol then Sym_Local := RawDict.GetLoopEntryVariableOriginalVar (Sym_Local); end if; -- call to getmost enclosing object added so that when looking for -- prefix of X.F we do so in terms of X (the X.F part of the full name -- will be added by GenerateSimpleName Declared_Scope := GetScope (GetMostEnclosingObject (Sym_Local)); if IsPredefinedScope (Declared_Scope) or else RawDict.GetSymbolDiscriminant (Sym_Local) = Quantified_Variable_Symbol then -- no prefix needed Prefix := E_Strings.Empty_String; else -- prefix may be needed so do search Current_Scope := Scope; loop exit when Current_Scope = Declared_Scope; exit when IsGlobalScope (Current_Scope); exit when IsPredefinedScope (Current_Scope); if RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) = Package_Symbol then Current_Scope := Set_Visibility (The_Visibility => Privat, The_Unit => GetRegion (Current_Scope)); end if; exit when Current_Scope = Declared_Scope; if RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) = Package_Symbol then Current_Scope := Set_Visibility (The_Visibility => Visible, The_Unit => GetRegion (Current_Scope)); exit; end if; Current_Scope := GetEnclosingScope (Current_Scope); end loop; -- The new error handling scheme intrduced by SEPR 1883 defers generation of error strings until after -- an entire source file has been processed. This causes ones spceial-case problem: a package spec and -- its body are in a single file, the package declares an own variable, there is an error in the -- package spec involvignthe own variable, the own variable is declared in the package body. This -- combination mens that we might have Current_Scope equal to VISIBLE part of P and Declared_Scope -- being LOCAL scope of P. This then triggers unwanted generation of a prefix. -- In all other situations, this "inversion" can't happen. We don't want a prefix if both declaration -- and viewpoint are in the same REGION. The test that the SCOPES be identical is too strict. So we -- replace "if Current_Scope = Declared_Scope" with: if GetRegion (Current_Scope) = GetRegion (Declared_Scope) and then not Is_Abstract_Proof_Function_In_Local_Scope (Sym => Sym_Local) then Prefix := E_Strings.Empty_String; else Prefix := GenerateSimpleName (Item => GetRegion (GetScope (GetMostEnclosingObject (Sym_Local))), Separator => Separator); end if; end if; end if; return Prefix; end GetAnyPrefixNeeded; spark-2012.0.deb/examiner/requiredunits.ads0000644000175000017500000000365711753202336017660 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ContextManager, STree, LexTokenLists; use type STree.SyntaxNode; --# inherit ContextManager, --# LexTokenLists, --# SP_Symbols, --# STree, --# SystemErrors; package RequiredUnits is procedure Init (Top_Node : in STree.SyntaxNode; Inherit_Ptr : out STree.SyntaxNode; Unit_Type : out ContextManager.UnitTypes; Unit_Name : out LexTokenLists.Lists); --# global in STree.Table; --# derives Inherit_Ptr, --# Unit_Name, --# Unit_Type from STree.Table, --# Top_Node; procedure Next (Inherit_Ptr : in out STree.SyntaxNode; Required_Unit : out LexTokenLists.Lists; Found : out Boolean); --# global in STree.Table; --# derives Found from Inherit_Ptr & --# Inherit_Ptr, --# Required_Unit from Inherit_Ptr, --# STree.Table; end RequiredUnits; spark-2012.0.deb/examiner/sparkhtml.adb0000644000175000017500000056370411753202336016745 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; with Ada.Characters.Handling; with FileSystem; with ScreenEcho; with E_Strings; with CommandLineData; with ContextManager.Ops; use type SPARK_IO.File_Status; use type FileSystem.Typ_File_Spec_Status; package body SparkHTML is Error_Reference_Filename : constant String := "errors.htm"; -- These states are used in the parsing of the report file. type Report_File_States is ( Report_Just_Started, Report_Banner_Started, Report_Banner_Finished, Report_Date_Found, Report_Options_Found, Report_Index_File_Found, Report_Warning_File_Found, Report_Target_Compiler_Data_Found, Report_Target_Config_File_Found, Report_Source_Extension_Found, Report_Listing_Extension_Found, Report_Dictionary_Found, Report_Report_File_Found, Report_HTML_Flag_Found, Report_Statistics_Flag_Found, Report_FDL_Identifiers_Found, Report_Flow_Analysis_Found, Report_Language_Option_Found, Report_Annotation_Character_Found, Report_Selected_Files_Started, Report_Index_Files_Started, Report_Meta_Files_Started, Report_Warning_List_Started, Report_Source_List_Started, Report_Missing_Files_Started, Report_Source_File_Started, Report_Listing_Filename_Found, Report_Unit_Name_Found, Report_No_Units_Found, Report_Unit_Type_Found, Report_Analysed_Message_Found, Report_Start_Of_Errors, Report_End_Of_Errors, Report_Justifications_Summary_Found, Report_Summarized_Warnings_Found, Report_Line_Header_Found, Report_Error_Source_Line_Found, Report_Error_Source_Pointer_Found, Report_Error_Message_Found, Report_Blank_After_Error_Found, Report_Target_Config_List_Started, Report_Target_Error_Line, Report_Target_Error_Next_Line, Report_End_Of_Report_File); type Listing_File_States is ( Listing_Just_Started, Listing_Banner_Started, Listing_Banner_Finished, Listing_Date_Found, Listing_Line_Heading_Found, Listing_Source_Line_Found, Listing_Error_Source_Pointer_Found, Listing_Error_Message_Found, Listing_End_Of_Listing_File); Generate_HTML : Boolean := True; -- Set to false if fatal HTML error occurs to prevent further HTML generation. HTML_Work_Dir : E_Strings.T; SPARK_Work_Dir : E_Strings.T; --------------------------------------------------------------------------- -- This function returns true if and only if C is a character representing -- a digit. function Digit (C : Character) return Boolean is begin return C in '0' .. '9'; end Digit; --------------------------------------------------------------------------- -- This function prepends the given string with the name of the selected -- direcotry into which HTML is being generated. The appropriate -- target-dependant directory separator is also added. -- -- No error checking is performed. If the string overflows it is -- truncated by the Append_String routines. function HTML_Filename (Filename : E_Strings.T) return E_Strings.T --# global in CommandLineData.Content; is Return_Filename : E_Strings.T; begin Return_Filename := FileSystem.Start_Of_Directory; if CommandLineData.Content.Output_Directory then E_Strings.Append_Examiner_String (E_Str1 => Return_Filename, E_Str2 => CommandLineData.Content.Output_Directory_Name); E_Strings.Append_Examiner_String (E_Str1 => Return_Filename, E_Str2 => FileSystem.Directory_Separator); end if; E_Strings.Append_Examiner_String (E_Str1 => Return_Filename, E_Str2 => CommandLineData.Content.HTML_Directory); E_Strings.Append_Examiner_String (E_Str1 => Return_Filename, E_Str2 => FileSystem.Directory_Separator); E_Strings.Append_Examiner_String (E_Str1 => Return_Filename, E_Str2 => Filename); return Return_Filename; end HTML_Filename; --------------------------------------------------------------------------- -- This function returns the index of the first character on a line that isn't -- a space. -- -- No error checking is necessary. The for loop ensures that bounds aren't -- exceeded. function First_Char (The_String : E_Strings.T) return E_Strings.Positions is Pos : E_Strings.Positions; begin Pos := E_Strings.Positions'First; for I in E_Strings.Positions range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => The_String) loop if E_Strings.Get_Element (E_Str => The_String, Pos => I) /= ' ' then Pos := I; exit; end if; end loop; return Pos; end First_Char; --------------------------------------------------------------------------- -- Applies HTML_Name_Char to each character in the string. -- -- No error checking is necessary. The for loop ensures that bounds aren't -- exceeded. function HTML_Name (E_Str : E_Strings.T) return E_Strings.T is Out_String : E_Strings.T; -- This function returns the given character if the character can be used in -- an HTML name and '_' otherwise. -- Characters the are allowed in HTML names are letters ([A-Za-z]), digits ([0-9] -- hyphens ("-"), underscores ("_"), colons (":") and periods ("."). -- -- No error checking necessary. function HTML_Name_Char (C : Character) return Character is Out_Char : Character; begin if C in 'A' .. 'Z' or C in 'a' .. 'z' or C in '0' .. '9' or C = '-' or C = '_' or C = ':' or C = '.' then Out_Char := C; else Out_Char := '_'; end if; return Out_Char; end HTML_Name_Char; begin Out_String := E_Strings.Empty_String; for I in E_Strings.Positions range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => E_Str) loop E_Strings.Append_Char (E_Str => Out_String, Ch => HTML_Name_Char (C => E_Strings.Get_Element (E_Str => E_Str, Pos => I))); end loop; return Out_String; end HTML_Name; --------------------------------------------------------------------------- -- This function removes all occurrences of special HTML characters and replaces -- them with the HTML codes required to display them. -- The conversions done are: -- -- > becomes > -- < becomes < -- & becomes & -- " becomes " -- -- The for loop ensures that bounds of the input string aren't exceeded. If -- string overflow occurs then the output string is truncated by the Append -- routines. function Convert_Special_HTML_Chars (Line : E_Strings.T) return E_Strings.T is Out_String : E_Strings.T; begin Out_String := E_Strings.Empty_String; for I in E_Strings.Positions range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => Line) loop case E_Strings.Get_Element (E_Str => Line, Pos => I) is when '<' => E_Strings.Append_String (E_Str => Out_String, Str => "<"); when '>' => E_Strings.Append_String (E_Str => Out_String, Str => ">"); when '&' => E_Strings.Append_String (E_Str => Out_String, Str => "&"); when '"' => E_Strings.Append_String (E_Str => Out_String, Str => """); when others => E_Strings.Append_Char (E_Str => Out_String, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); end case; end loop; return Out_String; end Convert_Special_HTML_Chars; --------------------------------------------------------------------------- -- This function encloses the line given in HTML tags to make it bold type. -- -- If the line given is too long for the tags to be added then it is -- returned unchanged. function HTML_Embolden (Line : E_Strings.T) return E_Strings.T is Out_String : E_Strings.T; begin if E_Strings.Get_Length (E_Str => Line) < E_Strings.Lengths'Last - 7 then Out_String := E_Strings.Copy_String (Str => ""); E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Line); E_Strings.Append_String (E_Str => Out_String, Str => ""); else Out_String := Line; end if; return Out_String; end HTML_Embolden; --------------------------------------------------------------------------- -- This function does a target-specific append of a subdirectory name. -- The function works with or without trailing separators. -- -- The path returned will always have a trailing separator. function Append_Directory_String (Path : E_Strings.T; Dir : E_Strings.T) return E_Strings.T is Path_Out : E_Strings.T; begin Path_Out := Path; -- Add trailing separator if necessary. if E_Strings.Get_Element (E_Str => Path_Out, Pos => E_Strings.Get_Length (E_Str => Path_Out)) /= E_Strings.Get_Element (E_Str => FileSystem.Directory_Separator, Pos => E_Strings.Get_Length (E_Str => FileSystem.Directory_Separator)) then E_Strings.Append_Examiner_String (E_Str1 => Path_Out, E_Str2 => FileSystem.Directory_Separator); end if; E_Strings.Append_Examiner_String (E_Str1 => Path_Out, E_Str2 => Dir); E_Strings.Append_Examiner_String (E_Str1 => Path_Out, E_Str2 => FileSystem.Directory_Separator); return Path_Out; end Append_Directory_String; --------------------------------------------------------------------------- -- This procedure takes two paths and returns the URI of the first path -- relative to the second path using the global value of the SPARK_Work_Dir as -- the current directory. -- -- The directories passed must have trailing separators. The directory returned has -- a trailing separator. A separator is defined as being the first character -- of the Examiner_String returned by FileSystem.Directory_Separator. -- -- For use on Windows, it is not possible to give a URI in all instances if -- the directories exist on two different drives. For example, if the -- paths given are "C:\foo\" and "D:\bar\". -- -- We could simply return the absolute path but if we then want to view -- the URI on a Unix system we will find that the path does not exist. -- -- I therefore have included a "Success" parameter which is set to false if -- the paths are on 2 different drives. -- procedure HTML_Relative_Dir (Dir_In, Relative_To_In : in E_Strings.T; Dir_Relative : out E_Strings.T; Success_Out : out Boolean) --# global in SPARK_Work_Dir; --# derives Dir_Relative, --# Success_Out from Dir_In, --# Relative_To_In, --# SPARK_Work_Dir; is Dir, Relative_To : E_Strings.T; Out_String : E_Strings.T; Remaining_Dir : E_Strings.T; Remaining_Relative_To : E_Strings.T; Popped_Directory : E_Strings.T; Working_Device : E_Strings.T; Dir_Device : E_Strings.T; Relative_To_Device : E_Strings.T; I : E_Strings.Positions; Directory_Found : Boolean; Working_Device_Found : Boolean; Dir_Device_Found : Boolean; Relative_To_Device_Found : Boolean; Success : Boolean; -- This function chops the first character from the Examiner_String. -- If the Empty_String is given then the Empty_String is returned. function Get_Rest (S : E_Strings.T) return E_Strings.T is Result : E_Strings.T; begin if E_Strings.Get_Length (E_Str => S) <= 1 then Result := E_Strings.Empty_String; else Result := E_Strings.Section (S, E_Strings.Positions'First + 1, E_Strings.Get_Length (E_Str => S) - 1); end if; return Result; end Get_Rest; -- This function looks for a device name and returns it. -- -- If no device name is present then the Empty_String is returned. -- -- A device name is ususally the name of a Windows drive (of the form "X:") function Get_Device_Prefix (Path : in E_Strings.T) return E_Strings.T is Colon_Found : Boolean; Pos : E_Strings.Positions; Device : E_Strings.T; begin if E_Strings.Get_Length (E_Str => Path) = 0 then Device := E_Strings.Empty_String; else Colon_Found := False; Pos := E_Strings.Get_Length (E_Str => Path); for I in E_Strings.Positions range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => Path) loop -- Have we found a colon? if E_Strings.Get_Element (E_Str => Path, Pos => I) = ':' then Colon_Found := True; Pos := I; exit; end if; -- If we find a directory separator or StartOf_Directory then -- exit the loop with Colon_Found = False. if E_Strings.Get_Element (E_Str => Path, Pos => I) = E_Strings.Get_Element (E_Str => FileSystem.Start_Of_Directory, Pos => 1) or else E_Strings.Get_Element (E_Str => Path, Pos => I) = E_Strings.Get_Element (E_Str => FileSystem.Directory_Separator, Pos => 1) then exit; -- Colon_Found is already False; end if; end loop; if Colon_Found then Device := E_Strings.Section (E_Str => Path, Start_Pos => E_Strings.Positions'First, Length => Pos); else -- Leave Path as it is. Set Device to empty string. Device := E_Strings.Empty_String; end if; end if; return Device; end Get_Device_Prefix; -- This procedure does a Get_Device_Prefix and removes the device name from -- the path given. procedure Chop_Device_Prefix (Path : in out E_Strings.T; Device : out E_Strings.T; Found : out Boolean) --# derives Device, --# Found, --# Path from Path; is Device_Name : E_Strings.T; begin Device_Name := Get_Device_Prefix (Path => Path); -- Get_Device_Prefix returns Empty_String if no device name was found. if E_Strings.Get_Length (E_Str => Device_Name) > 0 then Path := E_Strings.Section (Path, E_Strings.Get_Length (E_Str => Device_Name) + 1, E_Strings.Get_Length (E_Str => Path) - E_Strings.Get_Length (E_Str => Device_Name)); Device := Device_Name; Found := True; else Device := E_Strings.Empty_String; Found := False; end if; end Chop_Device_Prefix; -- This function takes a pathname (must NOT be preceded by an device name) parameter. -- If the pathname is absolute (begins with a Directory_Separator character) then it is -- returned unchanged. Otherwise it is returned with SPARK_Work_Dir prepended to it. -- The returned path will not have a device name. function Make_Absolute (Path : E_Strings.T) return E_Strings.T --# global in SPARK_Work_Dir; is Out_String : E_Strings.T; Device : E_Strings.T; begin if Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Path, Pos => 1)) = Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => FileSystem.Directory_Separator, Pos => 1)) then Out_String := Path; else -- Directory is relative to current - append Current_Dir Out_String := SPARK_Work_Dir; E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Path); end if; -- Remove device name if it exists. Device := Get_Device_Prefix (Path => Out_String); -- Length of device name is 0 if no device was found. if E_Strings.Get_Length (E_Str => Device) > 0 then Out_String := E_Strings.Section (Out_String, E_Strings.Get_Length (E_Str => Device) + 1, E_Strings.Get_Length (E_Str => Out_String) - E_Strings.Get_Length (E_Str => Device)); end if; return Out_String; end Make_Absolute; -- This procedure returns and removes the first subdirectory name from -- the given path. The given path must not have an device name or a -- leading separator. The popped subdirectory name will also not have a -- leading separator. The remaining directory name has no leading -- separator. -- -- e.g. if the input path is "foo/bar/baz/" then the path returned -- will be "bar/baz/" and the subdirectory returned will be "foo". -- -- If the URL flag is set then the separator '/' is used rather than -- a target specific separator. -- -- Dir_Found is true if and only if a directory separator was found in the -- path. -- -- If the path has length 0 or a directory separator is not found then -- - Path is returned unchanged -- - Dir is ELStrings.Empty_String -- - Dirfound is false -- procedure Pop_Sub_Dir (Path : in out E_Strings.T; URL : in Boolean; Dir : out E_Strings.T; Dir_Found : out Boolean) --# derives Dir, --# Dir_Found, --# Path from Path, --# URL; is Between_Separator : E_Strings.T; End_Separator : E_Strings.T; Separator_Pos : E_Strings.Positions; Separator_Found : Boolean; begin if E_Strings.Get_Length (E_Str => Path) = 0 then Dir := E_Strings.Empty_String; Dir_Found := False; else if URL then Between_Separator := E_Strings.Copy_String (Str => "/"); End_Separator := Between_Separator; else Between_Separator := FileSystem.Directory_Separator; End_Separator := FileSystem.Directory_Separator; end if; E_Strings.Find_Examiner_Sub_String (E_Str => Path, Search_String => Between_Separator, String_Found => Separator_Found, String_Start => Separator_Pos); if not Separator_Found then -- Maybe last directory? E_Strings.Find_Examiner_Sub_String (E_Str => Path, Search_String => End_Separator, String_Found => Separator_Found, String_Start => Separator_Pos); end if; if Separator_Found then Dir := E_Strings.Section (Path, E_Strings.Positions'First, Separator_Pos - 1); Path := E_Strings.Section (Path, Separator_Pos + 1, E_Strings.Get_Length (E_Str => Path) - Separator_Pos); Dir_Found := True; else Dir := E_Strings.Empty_String; Dir_Found := False; end if; end if; end Pop_Sub_Dir; -- This function removes the last directory in the URL given. -- The input path should have a trailing separator and the output -- path will also have a trailing separator. -- -- If no separator is found then the string is returned unchanged. -- -- e.g. Remove_Last_Directory ("foo/bar/baz/") = "foo/bar/" -- -- This routine is used when removing "../" from pathnames. -- -- NOTE: This only works with URLs - the directory separator must be '/' -- function Remove_Last_Directory (Path : E_Strings.T) return E_Strings.T is Out_String : E_Strings.T; Pos : E_Strings.Lengths; begin Out_String := Path; Pos := E_Strings.Get_Length (E_Str => Path); -- Remember not to include the trailing Directory_Separator character in the search. for I in reverse E_Strings.Positions range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => Path) - 1 loop if E_Strings.Get_Element (E_Str => Path, Pos => I) = '/' then Pos := I; exit; end if; end loop; -- Pos will still equal Path.Length if separator was not found. if Pos /= E_Strings.Get_Length (E_Str => Path) then -- separator was found Out_String := E_Strings.Section (E_Str => Path, Start_Pos => E_Strings.Positions'First, Length => Pos); end if; return Out_String; end Remove_Last_Directory; -- This function resolves a URL containing references to the previous -- directory "../" and the current directory "./". The path given must have -- a trailing '/' character. -- The function works with or without a leading directory separator (this is -- copied if it exists. -- -- NOTE: This function only works with URL's. The directory separator -- used is always '/' and current and parent directories are -- "." and ".." respectively. function Remove_Dots (In_Path : E_Strings.T) return E_Strings.T is Sub_Dir_Found : Boolean; Path : E_Strings.T; Out_Path : E_Strings.T; Next_Dir : E_Strings.T; begin Path := In_Path; Out_Path := E_Strings.Empty_String; if E_Strings.Get_Element (E_Str => Path, Pos => E_Strings.Positions'First) = '/' then -- Copy the leading separator. Path := Get_Rest (S => Path); E_Strings.Append_Char (E_Str => Out_Path, Ch => '/'); end if; loop Pop_Sub_Dir (Path => Path, URL => True, Dir => Next_Dir, Dir_Found => Sub_Dir_Found); exit when not Sub_Dir_Found; if E_Strings.Eq1_String (E_Str => Next_Dir, Str => ".") then null; -- do nothing elsif E_Strings.Eq1_String (E_Str => Next_Dir, Str => "..") then Out_Path := Remove_Last_Directory (Path => Out_Path); else E_Strings.Append_Examiner_String (E_Str1 => Out_Path, E_Str2 => Next_Dir); E_Strings.Append_Char (E_Str => Out_Path, Ch => '/'); end if; end loop; return Out_Path; end Remove_Dots; -- This function converts a directory name into a URL. function Convert_To_URL (Path : E_Strings.T) return E_Strings.T is begin -- On Windows, pathnames might contain '\' which need -- to be transformed unto '/' to be a URL. On other -- platforms, this is a no-op. return E_Strings.Translate (E_Str => Path, From_Char => '\', To_Char => '/'); end Convert_To_URL; begin -- HTML_Relative_Dir -- Initialise variables Success := True; Out_String := E_Strings.Empty_String; -- Copy input parameters to local variables. Dir := Dir_In; Relative_To := Relative_To_In; -- Get device names. -- Device names are removed from the input directories. -- SPARK_Work_Dir is not to be modified. Working_Device := Get_Device_Prefix (Path => SPARK_Work_Dir); Working_Device_Found := E_Strings.Get_Length (E_Str => Working_Device) > 0; Chop_Device_Prefix (Path => Dir, Device => Dir_Device, Found => Dir_Device_Found); Chop_Device_Prefix (Path => Relative_To, Device => Relative_To_Device, Found => Relative_To_Device_Found); -- We cannot create links if files are on different NT drives or different -- VAX devices. if Dir_Device_Found then --Dir contains a device name. if Relative_To_Device_Found then -- Relative_To also contains a device name. Are they the same? -- Fail if different devices. if not E_Strings.Eq_String (E_Str1 => Dir_Device, E_Str2 => Relative_To_Device) then Success := False; -- Files are on different drives. end if; else -- Dir contains a device name and Relative_To is on the current drive. -- Is the current device equal to the device that Dir is on? if (not Working_Device_Found) or else (not E_Strings.Eq_String (E_Str1 => Dir_Device, E_Str2 => Working_Device)) then Success := False; end if; end if; else -- Dir does not contain a drive specification - does Relative_To? if Relative_To_Device_Found then -- Relative_To contains a device name and Dir is on the current drive. -- Is the current device equal to the device that Relative_To is on? if (not Working_Device_Found) or else (not E_Strings.Eq_String (E_Str1 => Relative_To_Device, E_Str2 => Working_Device)) then Success := False; end if; end if; end if; -- Do nothing (return empty string) if directories are identical, or if -- the previous checks failed. if (not E_Strings.Eq_String (E_Str1 => Dir, E_Str2 => Relative_To)) and then Success then -- Make directories absolute Dir := Make_Absolute (Path => Dir); Relative_To := Make_Absolute (Path => Relative_To); -- Convert directories to URL's. Dir := Convert_To_URL (Path => Dir); Relative_To := Convert_To_URL (Path => Relative_To); -- Remove "./" and "../" and make case-insensitive where required. Dir := Remove_Dots (In_Path => Dir); Relative_To := Remove_Dots (In_Path => Relative_To); -- Initialize counter. I := E_Strings.Positions'First; -- Skip common prefix. We want I to point to the character that begins -- the name of the first different subdirectory. e.g. if we have -- /foo/bar/ and /foo/baz/ we want to point to the 'b' loop exit when I > E_Strings.Get_Length (E_Str => Dir) or else I > E_Strings.Get_Length (E_Str => Relative_To) or else E_Strings.Get_Element (E_Str => Dir, Pos => I) /= E_Strings.Get_Element (E_Str => Relative_To, Pos => I); I := I + 1; end loop; if I < E_Strings.Get_Length (E_Str => Dir) and then I < E_Strings.Get_Length (E_Str => Relative_To) then -- Back up to previous directory separator (in case we're comparing, e.g. -- \foo\bar and \foo\baz, in which case the common prefix is c:\foo\ba) while E_Strings.Get_Element (E_Str => Dir, Pos => I) /= '/' and I > E_Strings.Positions'First loop I := I - 1; end loop; -- Now we want to point to just past the separator so I := I + 1; end if; -- Now remove the common ancestor directories. if I > E_Strings.Get_Length (E_Str => Dir) then Remaining_Dir := E_Strings.Empty_String; else Remaining_Dir := E_Strings.Section (Dir, I, (E_Strings.Get_Length (E_Str => Dir) - I) + 1); end if; if I > E_Strings.Get_Length (E_Str => Relative_To) then Remaining_Relative_To := E_Strings.Empty_String; else Remaining_Relative_To := E_Strings.Section (Relative_To, I, (E_Strings.Get_Length (E_Str => Relative_To) - I) + 1); end if; -- For each subdirectory remaining in Relative_To we add a "../" to Out_String loop --# accept Flow, 10, Popped_Directory, "Expected ineffective assignment to Popped_Directory"; Pop_Sub_Dir (Path => Remaining_Relative_To, URL => True, Dir => Popped_Directory, Dir_Found => Directory_Found); --# end accept; exit when not Directory_Found; E_Strings.Append_String (E_Str => Out_String, Str => "../"); end loop; -- For each subdirectory remaining in Dir we add "/" loop Pop_Sub_Dir (Path => Remaining_Dir, URL => True, Dir => Popped_Directory, Dir_Found => Directory_Found); exit when not Directory_Found; E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Popped_Directory); E_Strings.Append_String (E_Str => Out_String, Str => "/"); end loop; end if; Dir_Relative := Out_String; Success_Out := Success; end HTML_Relative_Dir; -- Flow error expected. SPARK_Work_Dir is NOT updated. --------------------------------------------------------------------------- -- This procedure uses HTML_Relative_Dir to give the URI of a file relative -- to a given directory and the global SPARK_Work_Dir as the current directory. -- -- The procedure HTML_Relative_File (File, Relative_To : in E_Strings.T; File_Relative : out E_Strings.T; Success_Out : out Boolean) --# global in SPARK_Work_Dir; --# derives File_Relative, --# Success_Out from File, --# Relative_To, --# SPARK_Work_Dir; is Directory_Name : E_Strings.T; Relative_Dir_Name : E_Strings.T; Filename : E_Strings.T; Out_String : E_Strings.T; Separator_Pos : E_Strings.Positions; Success : Boolean; begin Separator_Pos := E_Strings.Get_Length (E_Str => File); -- Get the filename and extension. Filename might have a path -- or a drive name as a prefix. for I in reverse E_Strings.Positions range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => File) loop if E_Strings.Get_Element (E_Str => File, Pos => I) = E_Strings.Get_Element (E_Str => FileSystem.Directory_Separator, Pos => E_Strings.Get_Length (E_Str => FileSystem.Directory_Separator)) or else E_Strings.Get_Element (E_Str => File, Pos => I) = ':' then Separator_Pos := I; exit; end if; end loop; -- Separate the file and directory name. if Separator_Pos = E_Strings.Get_Length (E_Str => File) then -- no pathname given, just filename. Directory_Name := E_Strings.Empty_String; Filename := File; else Directory_Name := E_Strings.Section (File, E_Strings.Positions'First, Separator_Pos); Filename := E_Strings.Section (File, Separator_Pos + 1, E_Strings.Get_Length (E_Str => File) - Separator_Pos); end if; -- Interpret the directory name as a URL relative to Relative_To HTML_Relative_Dir (Dir_In => Directory_Name, Relative_To_In => Relative_To, Dir_Relative => Relative_Dir_Name, Success_Out => Success); -- If the relative directory operation was successful then we use the -- new directory name. Otherwise we simply return the filename given. if Success then Out_String := Relative_Dir_Name; E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Filename); else Out_String := File; end if; File_Relative := Out_String; Success_Out := Success; end HTML_Relative_File; --------------------------------------------------------------------------- -- This procedure is global as it is used by both the report and listing file -- translators. -- -- This procedure processes an error message line. Some errors get wrapped over -- more than one line and this procedure should only be used in generating -- HTML for the first line of any error message. -- -- It assumes that the line is formatted as follows: -- -- (nnnn) : : -- -- The actual pattern matched is slightly different but we only expect lines -- of this type (or the special cases described below) and lines of this type -- will be parsed correctly. -- -- The and the error code are used to generate an HTML -- link of the form , where error_file is -- the (relative) location of the error messages file and error_link is the -- name of an anchor in the file, typically made from the first word of the -- error type (converted to lower case) and the error number, separated by a hyphen. -- Note that there are some special cases where the name of the anchor is not -- constructed in this way. -- -- The HTML link is placed before the start of the line (the closing tag is -- placed at the end of the line). The link is also returned in the Error_Link -- out parameter so that it can be saved for use in any continuations -- of this message line. -- -- For example, if the Line parameter is the Examiner_String: -- -- *** (419) Semantic Error : 1: Message -- -- The line output will be: -- -- *** (419) Semantic Error : 1: Message -- -- and the Error_Link parameter will be: -- -- -- -- If no tag is added then the Error_Link parameter is set to Empty_String. -- -- The special cases (i.e. those which don't fit the above format, mainly -- because they don't have error numbers) are handled by the procedure -- Handle_Special_Cases as described below. procedure Process_First_Error_Message_Line (Line : in out E_Strings.T; Lookahead : in E_Strings.T; Error_Link : out E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives Error_Link, --# Line from Line, --# Lookahead & --# SPARK_IO.File_Sys from *, --# Line, --# Lookahead; is Saved_Line : E_Strings.T; Error_Type : E_Strings.T; Error_No : E_Strings.T; Out_String : E_Strings.T; Char : Character; Char2 : Character; Char3 : Character; Error : Boolean; Special_Case : Boolean; -- The function Flash_Character is true if and only if the input is -- a character used by the Examiner in a "flash" at the start of -- an error message. function Flash_Character (Char : Character) return Boolean is begin return Char = '?' or else Char = '-' or else Char = '+' or else Char = '*' or else Char = '!'; end Flash_Character; pragma Inline (Flash_Character); -- This procedure handles the special cases of error messages that are not -- of the form described in the commentary for Process_First_Error_Message_Line. -- These special cases are mainly errors that don't have error numbers -- associated with them or that have no reference in the error reference -- file. -- -- The "special case" errors are as follows: -- -- *** Syntax Error : ";" expected. -- *** Syntax Error : No APRAGMA can be start with reserved word "IS". -- *** Syntax Error : No complete PROCEDURE_SPECIFICATION can be followed by ANNOTATION_START here. -- *** Syntax Error : No complete PROCEDURE_SPECIFICATION can be followed by reserved word "IS" here. -- *** Syntax Error : reserved word "INHERIT" expected. -- Any other syntax errors (these won't have links) -- Warning : No semantic checks carried out, text may not be legal SPARK. -- Any lexical errors (these won't have links) -- -- The line output is the line input but with an HTML link to the correct -- place in the error reference file (or no link if no such reference -- exists). -- -- The error_link parameter is set to the HTML tag used to generate the link -- or, if not link is generated, the empty string. -- -- The Special_Case parameter is set to true if and only if a special case -- was found. If this parameter is set then the caller should not try -- to process the error message line in the usual way. -- -- The procedure uses a fairly naive Sub_String search mechanism that could -- potentially incorrectly match a string and flag it as a Special_Case -- but this is unlikely because when this procedure is called we know that -- we have a line containing an error message and we know what the error -- messages are. procedure Handle_Special_Cases (Line : in out E_Strings.T; Lookahead : in E_Strings.T; Error_Link : out E_Strings.T; Special_Case : out Boolean) --# derives Error_Link, --# Line from Line, --# Lookahead & --# Special_Case from Line; is Add_Tag : Boolean; Syntax_Error_Found : Boolean; Warning_Found : Boolean; Lexical_Found : Boolean; Error_Link_Name : E_Strings.T; Error_Link_String : E_Strings.T; Out_String : E_Strings.T; String_Found : Boolean; String_Start : E_Strings.Positions; begin Add_Tag := False; Special_Case := False; Error_Link := E_Strings.Empty_String; Error_Link_Name := E_Strings.Empty_String; -- Check for the various error types that have special cases. --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "Syntax Error", String_Found => Syntax_Error_Found, String_Start => String_Start); E_Strings.Find_Sub_String (E_Str => Line, Search_String => "Warning", String_Found => Warning_Found, String_Start => String_Start); E_Strings.Find_Sub_String (E_Str => Line, Search_String => "Lexical Error", String_Found => Lexical_Found, String_Start => String_Start); --# end accept; if Syntax_Error_Found then -- HTML directives: --! <"syntax-"> --! <"*** Syntax Error : "> -- HTML output --! semicolon-expected --! ";" expected. --! If this is reported at the end of the input file it may well --! be caused by the misspelling of an identifier in a hide directive. --! The parser then skips all the following text looking for the --! misspelled identifier but finds the end of file first where it --! reports a syntax error. --! no-apragma --! No APRAGMA can be start with reserved word "IS" --! This can occur when a stub for an embedded subprogram is wrongly --! terminated by a semicolon. --! procedure-spec-annotation-start --! No complete PROCEDURE_SPECIFICATION can be followed by ANNOTATION_START here. --! This can occur when the reserved word body has been --! omitted from the declaration of a package body. This error --! will occur at the annotation placed between the --! specification and the reserved word is of the first --! subprogram. --! procedure-spec-is --! No complete PROCEDURE_SPECIFICATION can be followed by reserved word "IS" here. --! This can occur when the reserved word body has been omitted --! from the declaration of a package body. This error will occur at the --! reserved word is which introduces the body of the first subprogram. --! inherit-expected --! reserved word "INHERIT" expected. --! This occurs where the annotation on a subprogram body is placed after --! the reserved word is instead of before it. --! simple-expression-rbracket --! No complete SIMPLE_EXPRESSION can be followed by ")" here. --! This can occur in an aggregate expression when there is a mixure of --! named and positional association being used. --! simple-expression-comma --! No complete SIMPLE_EXPRESSION can be followed by "," here. --! This can occur in an aggregate expression when there is a mixure of --! named and positional association being used. -- All syntax errors are special cases. Special_Case := True; --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => ";" expected", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then Error_Link_Name := E_Strings.Copy_String (Str => "syntax-semicolon-expected"); Add_Tag := True; end if; --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "No APRAGMA", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then Error_Link_Name := E_Strings.Copy_String (Str => "syntax-no-apragma"); Add_Tag := True; end if; --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "No complete PROCEDURE_SPECIFICATION", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Lookahead, Search_String => "_START here.", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then Error_Link_Name := E_Strings.Copy_String (Str => "syntax-procedure-spec-annotation-start"); Add_Tag := True; end if; --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Lookahead, Search_String => ""IS" here.", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then Error_Link_Name := E_Strings.Copy_String (Str => "syntax-procedure-spec-is"); Add_Tag := True; end if; end if; --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "No complete SIMPLE_EXPRESSION", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Lookahead, Search_String => "")" here.", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then Error_Link_Name := E_Strings.Copy_String (Str => "syntax-simple-expression-rbracket"); Add_Tag := True; end if; --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Lookahead, Search_String => ""," here.", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then Error_Link_Name := E_Strings.Copy_String (Str => "syntax-simple-expression-comma"); Add_Tag := True; end if; end if; --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "reserved word "INHERIT" expected", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then Error_Link_Name := E_Strings.Copy_String (Str => "syntax-inherit-expected"); Add_Tag := True; end if; elsif Warning_Found then -- Not all warnings are special cases - only set Special_Case to True if -- a special case is detected. --# accept F, 10, String_Start, "Ineffective assignment here OK"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "No semantic checks carried out,", String_Found => String_Found, String_Start => String_Start); --# end accept; if String_Found then Error_Link_Name := E_Strings.Copy_String (Str => "warning-no-semantic-checks"); Add_Tag := True; Special_Case := True; end if; elsif Lexical_Found then -- All lexical errors are special cases. Special_Case := True; -- Lexical errors are not included in the file of error explanations -- so no processing is done. end if; if Add_Tag then Error_Link_String := E_Strings.Copy_String (Str => " Error_Link_String, Str => Error_Reference_Filename); E_Strings.Append_String (E_Str => Error_Link_String, Str => "#"); E_Strings.Append_Examiner_String (E_Str1 => Error_Link_String, E_Str2 => Error_Link_Name); E_Strings.Append_String (E_Str => Error_Link_String, Str => """>"); Error_Link := Error_Link_String; Out_String := Error_Link_String; E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Line); E_Strings.Append_String (E_Str => Out_String, Str => ""); Line := Out_String; end if; --# accept F, 33, String_Start, "Expect String_Start unused"; end Handle_Special_Cases; begin -- Process_First_Error_Message_Line Handle_Special_Cases (Line => Line, Lookahead => Lookahead, Error_Link => Error_Link, Special_Case => Special_Case); if not Special_Case then Error := False; Error_Type := E_Strings.Empty_String; Error_No := E_Strings.Empty_String; Saved_Line := Line; -- Get characters until flash is found (either ???, ---, +++, ***, !!!) loop E_Strings.Pop_Char (E_Str => Line, Char => Char); exit when Flash_Character (Char => Char) or else E_Strings.Is_Empty (E_Str => Line); end loop; -- Look for two more flash characters E_Strings.Pop_Char (E_Str => Line, Char => Char2); E_Strings.Pop_Char (E_Str => Line, Char => Char3); if Char2 /= Char or Char3 /= Char then Error := True; end if; -- Look for a space and a '(' E_Strings.Pop_Char (E_Str => Line, Char => Char2); E_Strings.Pop_Char (E_Str => Line, Char => Char3); if Char2 /= ' ' or Char3 /= '(' then Error := True; end if; -- Skip line number (up to next ')') loop E_Strings.Pop_Char (E_Str => Line, Char => Char); exit when Char = ')' or else E_Strings.Is_Empty (E_Str => Line); end loop; -- Skip whitespace loop E_Strings.Pop_Char (E_Str => Line, Char => Char); exit when Char /= ' ' or else E_Strings.Is_Empty (E_Str => Line); end loop; -- Char is first character of Error_Type E_Strings.Append_Char (E_Str => Error_Type, Ch => Char); -- Get rest of Error_Type (up to next ' ') loop E_Strings.Pop_Char (E_Str => Line, Char => Char); exit when Char = ' ' or else E_Strings.Is_Empty (E_Str => Line); E_Strings.Append_Char (E_Str => Error_Type, Ch => Char); end loop; -- Skip up to colon loop E_Strings.Pop_Char (E_Str => Line, Char => Char); exit when Char = ':' or else E_Strings.Is_Empty (E_Str => Line); end loop; -- Skip whitespace loop E_Strings.Pop_Char (E_Str => Line, Char => Char); exit when Char /= ' ' or else E_Strings.Is_Empty (E_Str => Line); end loop; -- Char is first character of Error_No E_Strings.Append_Char (E_Str => Error_No, Ch => Char); -- Get rest of Error_No (up to next ':') loop E_Strings.Pop_Char (E_Str => Line, Char => Char); exit when Char = ':' or else E_Strings.Is_Empty (E_Str => Line); E_Strings.Append_Char (E_Str => Error_No, Ch => Char); end loop; -- If an error occurred report this and show which line it occurred on. if Error or else E_Strings.Is_Empty (E_Str => Line) then ScreenEcho.Put_Line ("An error occurred while parsing the following error message line."); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Saved_Line); end if; Error_Type := E_Strings.Lower_Case (E_Str => Error_Type); -- Generate the output string. Out_String := E_Strings.Copy_String (Str => " Out_String, Str => Error_Reference_Filename); E_Strings.Append_String (E_Str => Out_String, Str => "#"); E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Error_Type); E_Strings.Append_String (E_Str => Out_String, Str => "-"); E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Error_No); E_Strings.Append_String (E_Str => Out_String, Str => """ TARGET=""rbottom"">"); Error_Link := Out_String; E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Saved_Line); E_Strings.Append_String (E_Str => Out_String, Str => ""); Line := Out_String; end if; Line := HTML_Embolden (Line => Line); end Process_First_Error_Message_Line; -- This procedure processes subsequent error message lines. These contain -- word-wrapped portions of the error message, but no flash, error number -- and so on. We therefore need to use the tag stored in Saved_Error_Link -- as the link for this line. -- -- The line is assumed to have some spaces at the front of the it. Placing the -- tag before these turns the spaces into links which looks really silly, so -- the tag is placed immediately before the first non-space character. -- -- If Saved_Error_Link is Empty_String then no tag is added. procedure Process_Next_Error_Message_Line (Line : in out E_Strings.T; Link : in E_Strings.T) --# derives Line from *, --# Link; is Out_String : E_Strings.T; Char : Character; begin if not E_Strings.Is_Empty (E_Str => Line) then Out_String := E_Strings.Empty_String; -- Copy up to first non-space character loop E_Strings.Pop_Char (E_Str => Line, Char => Char); exit when Char /= ' '; E_Strings.Append_Char (E_Str => Out_String, Ch => Char); end loop; E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Link); E_Strings.Append_Char (E_Str => Out_String, Ch => Char); E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Line); E_Strings.Append_String (E_Str => Out_String, Str => ""); Line := Out_String; end if; Line := HTML_Embolden (Line => Line); end Process_Next_Error_Message_Line; -- -- ------------- -- Init_SPARK_HTML -- ------------- -- -- This procedure initialises the HTML output of the examiner. -- -- It is assumed that the HTML output flag has already been tested. The call to -- Init_SPARK_HTML should look something like: -- -- if CommandLineData.Content.HTML then -- SparkHTML.Init_SPARK_HTML; -- end if; -- -- The procedure does exactly the following: -- -- - creates a subdirectory of the current directory if one -- doesn't already exist. The directory name is given by -- CommandLineData.Content.HTML_Directory -- - prints a message to the screen informing the user that HTML output -- has been requested; -- - creates a top-level frames file in the HTML directory as described in -- the design; -- - creates an empty HTML file to fill the bottom frame (this is so that -- Netscape Navigator behaves correctly); -- - sets Generate_HTML to false if anything went wrong, to prevent further -- HTML generation. -- -- Error trapping: -- -- If anything unusual happens then the procedure will echo a message to the -- screen saying what went wrong and, in the case of errors which should stop -- HTML generation, Generate_HTML will be set to "false". -- -- Failed HTML initialisation should not affect the analysis. If intialisation -- fails, the Generate_HTML flag will stop calls to other SparkHTML routines -- from doing anything. It is therefore safe to make those calls. procedure Init_SPARK_HTML is New_Dir_OK : Boolean; Frame_File_OK : Boolean; Dir_Name : E_Strings.T; -- This procedure writes HTML content to the base frame file. -- The procedure does not open or close the frame file, it assumes -- that a file has been opened. -- The Frame_File parameter must be the handle of an open file. -- The Report_Filename parameter is the location of the file to -- go in the top frame, relative to the frame file. -- The Blankfile_Name parameter is the location of the file to go -- in the top frame, relative to the frame file. -- -- Error trapping: -- -- All error trapping is performed within SPARK_IO. The SPARK_IO -- procedures called do not return status information and so error -- trapping is not possible here. procedure Write_Frame_File (Frame_File : in SPARK_IO.File_Type; Report_Filename : in E_Strings.T; Blank_Filename : in E_Strings.T) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Blank_Filename, --# CommandLineData.Content, --# Frame_File, --# Report_Filename; is Temp_Examiner_String : E_Strings.T; begin SPARK_IO.Put_Line (Frame_File, "", 59); SPARK_IO.Put_Line (Frame_File, "", 6); SPARK_IO.Put_Line (Frame_File, " ", 8); SPARK_IO.Put_Line (Frame_File, " SPARK HTML Output", 36); SPARK_IO.Put_Line (Frame_File, " ", 9); SPARK_IO.Put_Line (Frame_File, " ", 25); Temp_Examiner_String := E_Strings.Copy_String (Str => " Temp_Examiner_String, E_Str2 => FileSystem.Just_File (Fn => Report_Filename, Ext => True)); else E_Strings.Append_Examiner_String (E_Str1 => Temp_Examiner_String, E_Str2 => Report_Filename); end if; E_Strings.Append_String (E_Str => Temp_Examiner_String, Str => """>"); E_Strings.Put_Line (File => Frame_File, E_Str => Temp_Examiner_String); Temp_Examiner_String := E_Strings.Copy_String (Str => " Temp_Examiner_String, E_Str2 => Blank_Filename); E_Strings.Append_String (E_Str => Temp_Examiner_String, Str => """>"); E_Strings.Put_Line (File => Frame_File, E_Str => Temp_Examiner_String); SPARK_IO.Put_Line (Frame_File, " ", 13); SPARK_IO.Put_Line (Frame_File, "", 7); end Write_Frame_File; -- This procedure does the following: -- -- - creates a file called "spark.htm" in the HTML subdirectory -- of the current directory (the HTML directory is assumed to -- already exist), -- - derives the report file name from that held in -- CommandLineData.Content for inclusion in the frame file, -- - creates a blank file to fill the bottom frame, -- - calls Write_Frame_File to write the content of the base frame -- file, -- - closes the frame file and blank file, -- - returns a status value of True if no errors occurred, -- - returns a status value of False and displays a message to -- the user if an error occurred. -- -- Error Trapping: -- -- Where calls to procedures in SPARK_IO return status parameters the -- parameter is checked. If the status is not OK then a message is -- echoed to the screen to inform the user. The message does not -- display the type of error (but could in future) but explains what -- was happening when the error occurred. -- -- This procedure returns a status value itself. If the status value -- is False then it is suggested that no further HTML generation should -- take place. The caller should test the returned status and set the -- Generate_HTML flag appropriately. procedure Create_Frame_File (Success : out Boolean) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# Success from CommandLineData.Content, --# SPARK_IO.File_Sys; is Frame_File : SPARK_IO.File_Type; Blank_File : SPARK_IO.File_Type; Frame_File_Created_OK : SPARK_IO.File_Status; Frame_File_Closed_OK : SPARK_IO.File_Status; Blank_File_Created_OK : SPARK_IO.File_Status; Blank_File_Closed_OK : SPARK_IO.File_Status; HTML_Report_Filename : E_Strings.T; Frame_Filename : E_Strings.T; Blank_Filename : E_Strings.T; Full_Frame_Filename : E_Strings.T; Full_Blank_Filename : E_Strings.T; begin -- Initialise variables. Success := True; -- Set to false when fatal error occurs. Frame_File := SPARK_IO.Null_File; Blank_File := SPARK_IO.Null_File; Frame_Filename := E_Strings.Copy_String (Str => "spark.htm"); Blank_Filename := E_Strings.Copy_String (Str => "blank.htm"); -- These files both reside in the HTML directory so their -- names need to have the HTML directory name prepended. Full_Frame_Filename := FileSystem.Case_Of_Files_For_Create (E_Str => HTML_Filename (Filename => Frame_Filename)); Full_Blank_Filename := FileSystem.Case_Of_Files_For_Create (E_Str => HTML_Filename (Filename => Blank_Filename)); -- Get the name of the HTML report file for inclusion in the HTML. HTML_Report_Filename := E_Strings.Translate (E_Str => FileSystem.Just_File (CommandLineData.Content.Report_File_Name, True), From_Char => '.', To_Char => '_'); E_Strings.Append_String (E_Str => HTML_Report_Filename, Str => ".htm"); -- Create the frame file. E_Strings.Create (File => Frame_File, Name_Of_File => Full_Frame_Filename, Form_Of_File => "", Status => Frame_File_Created_OK); if Frame_File_Created_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while attemping to create the HTML frame file."); Success := False; else E_Strings.Create (File => Blank_File, Name_Of_File => Full_Blank_Filename, Form_Of_File => "", Status => Blank_File_Created_OK); if Blank_File_Created_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while attempting to create the blank HTML file."); -- There is not an else here as not being able to write the blank file is -- not fatal. Success remains true and we continue as we still have to -- close the Frame_File. end if; Write_Frame_File (Frame_File => Frame_File, Report_Filename => HTML_Report_Filename, Blank_Filename => Blank_Filename); --# accept Flow, 10, Frame_File, "Expected ineffective assignment to Frame_File"; SPARK_IO.Close (File => Frame_File, Status => Frame_File_Closed_OK); --# end accept; if Frame_File_Closed_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while attempting to close the HTML frame file."); Success := False; end if; -- There is not an else here as we need to try and close the other file. --# accept Flow, 10, Blank_File, "Expected ineffective assignment to Blank_File"; SPARK_IO.Close (File => Blank_File, Status => Blank_File_Closed_OK); --# end accept; if Blank_File_Closed_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while attempting to close the blank HTML file."); -- Not closing the blank file is non-fatal, Success remains true. end if; end if; end Create_Frame_File; -- Returns the name of the HTML subdirectory required for passing -- to FileSystem.IdempotentCreate_Subdir. function HTML_Sub_Dir_Name return E_Strings.T --# global in CommandLineData.Content; is Dir_Name : E_Strings.T; begin Dir_Name := FileSystem.Start_Of_Directory; if CommandLineData.Content.Output_Directory then E_Strings.Append_Examiner_String (E_Str1 => Dir_Name, E_Str2 => CommandLineData.Content.Output_Directory_Name); E_Strings.Append_Examiner_String (E_Str1 => Dir_Name, E_Str2 => FileSystem.Directory_Separator); end if; E_Strings.Append_Examiner_String (E_Str1 => Dir_Name, E_Str2 => CommandLineData.Content.HTML_Directory); -- IdempotentCreate_Subdir expects: -- on Unix/NT: just the name of the directory return Dir_Name; end HTML_Sub_Dir_Name; procedure Copy_Errors_File --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content; is Source_Filename : E_Strings.T; Source_Full_Filename : E_Strings.T; Source_File_Status : FileSystem.Typ_File_Spec_Status; Source_File : SPARK_IO.File_Type; Source_File_Open_OK : SPARK_IO.File_Status; Source_File_Closed_OK : SPARK_IO.File_Status; Dest_Filename : E_Strings.T; Dest_File : SPARK_IO.File_Type; Dest_File_Created_OK : SPARK_IO.File_Status; Dest_File_Closed_OK : SPARK_IO.File_Status; Copy_Buffer : E_Strings.T; begin Source_File := SPARK_IO.Null_File; Dest_File := SPARK_IO.Null_File; -- Start by trying to open output file. Dest_Filename := FileSystem.Case_Of_Files_For_Create (E_Str => HTML_Filename (Filename => E_Strings.Copy_String (Str => "errors.htm"))); E_Strings.Create (File => Dest_File, Name_Of_File => Dest_Filename, Form_Of_File => "", Status => Dest_File_Created_OK); if Dest_File_Created_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while trying to create the HTML errors file."); else -- Now locate the input file. Source_Filename := FileSystem.Examiner_Lib_Directory; E_Strings.Append_Examiner_String (E_Str1 => Source_Filename, E_Str2 => FileSystem.Directory_Separator); E_Strings.Append_String (E_Str => Source_Filename, Str => "errors.htm"); FileSystem.Find_Full_File_Name (File_Spec => Source_Filename, File_Status => Source_File_Status, Full_File_Name => Source_Full_Filename); if Source_File_Status /= FileSystem.File_Found then -- Output simple message to destination file. SPARK_IO.Put_Line (Dest_File, "Sorry, could not locate errors.htm.", 35); --# accept Flow, 10, Dest_File_Closed_OK, "Expected ineffective assignment to Dest_File_Closed_OK"; SPARK_IO.Close (Dest_File, Dest_File_Closed_OK); -- Unused variable Dest_File_Closed_OK --Ignore error on close. --# end accept; else -- Open the file and copy it. E_Strings.Open (File => Source_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Source_Full_Filename, Form_Of_File => "", Status => Source_File_Open_OK); if Source_File_Open_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while opening the HTML errors file"); else while not SPARK_IO.End_Of_File (Source_File) loop E_Strings.Get_Line (File => Source_File, E_Str => Copy_Buffer); E_Strings.Put_Line (File => Dest_File, E_Str => Copy_Buffer); end loop; end if; --# accept Flow, 10, Source_File, "Expected ineffective assignment to Source_File" & --# Flow, 10, Source_File_Closed_OK, "Expected ineffective assignment to Source_File_Closed_OK"; SPARK_IO.Close (Source_File, Source_File_Closed_OK); --# end accept; end if; end if; --# accept Flow, 10, Dest_File, "Expected ineffective assignment to Dest_File" & --# Flow, 10, Dest_File_Closed_OK, "Expected ineffective assignment to Dest_File_Closed_OK"; SPARK_IO.Close (Dest_File, Dest_File_Closed_OK); --# end accept; --# accept Flow, 33, Source_File_Closed_OK, "Expected Source_File_Closed_OK to be neither referenced nor exported" & --# Flow, 33, Dest_File_Closed_OK, "Expected Dest_File_Closed_OK to be neither referenced nor exported"; end Copy_Errors_File; -- 2 Flow errors OK begin -- Init_SPARK_HTML -- Initialise working directories - ensure trailing slashes for use in -- relative filename procedures. SPARK_Work_Dir := FileSystem.Working_Directory; -- Ensure trailing separator. if E_Strings.Get_Element (E_Str => SPARK_Work_Dir, Pos => E_Strings.Get_Length (E_Str => SPARK_Work_Dir)) /= E_Strings.Get_Element (E_Str => FileSystem.Directory_Separator, Pos => E_Strings.Get_Length (E_Str => FileSystem.Directory_Separator)) then E_Strings.Append_Examiner_String (E_Str1 => SPARK_Work_Dir, E_Str2 => FileSystem.Directory_Separator); end if; if CommandLineData.Content.Output_Directory then HTML_Work_Dir := Append_Directory_String (Path => FileSystem.Working_Directory, Dir => CommandLineData.Content.Output_Directory_Name); else HTML_Work_Dir := FileSystem.Working_Directory; end if; HTML_Work_Dir := Append_Directory_String (Path => HTML_Work_Dir, Dir => CommandLineData.Content.HTML_Directory); -- Get name for creating subdirectory. Dir_Name := HTML_Sub_Dir_Name; -- Create subdirectory. FileSystem.Idempotent_Create_Subdirectory (Path => Dir_Name, Ok => New_Dir_OK); Copy_Errors_File; Create_Frame_File (Success => Frame_File_OK); if not (New_Dir_OK and Frame_File_OK) then ScreenEcho.Put_Line ("Error ocurred while initialising HTML generation."); ScreenEcho.Put_Line ("No further HTML generation will occur."); Generate_HTML := False; -- Suppress further HTML generation. end if; -- flow errors expected due to false coupling through SPARK_IO -- and File_Status not being used. end Init_SPARK_HTML; --------------------------------------------------------------------------- -- -- ------------- -- Gen_Report_HTML -- ------------- -- -- This procedure generates the HTMLified report file from the plain text -- report file. -- -- It is assumed that the HTML output flag has already been tested. The call to -- Gen_Report_HTML should look something like: -- -- if CommandLineData.Content.HTML then -- SparkHTML.Gen_Report_HTML; -- end if; -- -- The checking of the Generate_HTML flag is done internally. -- -- If the Generate_HTML flag is false this procedure does nothing, otherwise, -- it does exactly the following: -- -- - creates an HTML file (named .htm where is the name -- of the report file with all '.' characters changed to '_' characters; -- - processes each line of the plain text report file using the -- Process_Report_Line procedure (the functionality of this is described -- at the declaration of Process_Report_Line); -- - writes the processed lines to the HTML report file; -- - sets Generate_HTML to false if anything goes wrong, to prevent further -- HTML generation. -- -- Error trapping: -- -- The HTML report file is essentially a guide to the HTMLified listings and -- so if something causes the HTML report file generation to fail we should -- suppress all following HTML generation by settign Generate_HTML to "false". -- -- If anything unusual happens then the procedure will echo a message to the -- screen saying what went wrong and, in the case of errors which should stop -- HTML generation, Generate_HTML will be set to "false". -- procedure Gen_Report_HTML is HTML_Report_File : SPARK_IO.File_Type; HTML_Report_Filename : E_Strings.T; HTML_Report_File_Created_OK : SPARK_IO.File_Status; HTML_Report_File_Closed_OK : SPARK_IO.File_Status; Plain_Report_File : SPARK_IO.File_Type; Plain_Report_Filename : E_Strings.T; Plain_Report_File_Open_OK : SPARK_IO.File_Status; Plain_Report_File_Closed_OK : SPARK_IO.File_Status; Line_Buffer : E_Strings.T; Lookahead_Buffer : E_Strings.T; Saved_Error_Link : E_Strings.T; Saved_Listing_File : E_Strings.T; Report_File_State : Report_File_States := Report_Just_Started; -- Sub Programs -- ------------ -- This sub-procedure writes HTML content to the start of the report file. -- It assumes that the file handle HTML_Report_File is the open HTML -- report file. This procedure will not open or close the file. -- -- The HTML written specifies the title of the page and some formatting -- tags. The formatting is
 (pre-processed text) which displays
      -- text exactly as given, and  which sets the typewriter-text font;
      -- I use this because it usually results in a fixed-width font being
      -- used.
      --
      -- When writing the end of the report file these tags need to be closed,
      -- as do the  and  tags.  This should be done by calling
      -- the Write_HTML_Report_Footer procedure.
      --
      -- Error trapping:
      --
      -- All error trapping is performed within SPARK_IO.
      -- The SPARK_IO routines called do not return status parameters and so
      -- no error trapping can be done here.

      procedure Write_HTML_Report_Header
      --# global in     HTML_Report_File;
      --#        in out SPARK_IO.File_Sys;
      --# derives SPARK_IO.File_Sys from *,
      --#                                HTML_Report_File;
      is
      begin
         SPARK_IO.Put_Line (HTML_Report_File, "", 56);
         SPARK_IO.Put_Line (HTML_Report_File, "", 6);
         SPARK_IO.Put_Line (HTML_Report_File, "  ", 8);
         SPARK_IO.Put_Line (HTML_Report_File, "    Examiner HTML Report File", 44);
         SPARK_IO.Put_Line (HTML_Report_File, "  ", 9);
         SPARK_IO.Put_Line (HTML_Report_File, "  ", 8);
         SPARK_IO.Put_Line (HTML_Report_File, "    
", 9);
         SPARK_IO.Put_Line (HTML_Report_File, "      ", 10);
      end Write_HTML_Report_Header;

      -- This subprocedure writes HTML content to the end of the report file.
      -- It assumes that the file handle HTML_Report_File is the open HTML
      -- report file.  This procedure will not open or close the file.
      --
      -- The HTML written closes all the formatting tags that were opened
      -- by the call to Write_HTML_Report_Header.
      --
      -- Error trapping:
      --
      -- All error trapping is performed within SPARK_IO.
      -- The SPARK_IO routines called do not return status parameters and so
      -- no error trapping can be done here.

      procedure Write_HTML_Report_Footer
      --# global in     HTML_Report_File;
      --#        in out SPARK_IO.File_Sys;
      --# derives SPARK_IO.File_Sys from *,
      --#                                HTML_Report_File;
      is
      begin
         SPARK_IO.Put_Line (HTML_Report_File, "      ", 11);
         SPARK_IO.Put_Line (HTML_Report_File, "    
", 10); SPARK_IO.Put_Line (HTML_Report_File, " ", 9); SPARK_IO.Put_Line (HTML_Report_File, "", 7); end Write_HTML_Report_Footer; -- This procedure is used to convert a line of the report file into HTML. -- It is effectively a parser for the report file. -- -- The procedure design is based on a state machine. The global variable -- Report_File_State records our current location within the report file (in -- terms of what information has already been read). -- -- Given a line, we can determine the line's meaning from our current state -- and the contents of the line (all blank lines are ignored). For example, -- if we are reading the warning list and we discover a line starting with -- the string "Source Filename(s) used were:" then we know that we are now -- processing the source file list. -- -- We can use this method to parse almost all the information in the source -- file. Those bits that we don't parse are usually optional (such as the -- flag "rtc" in the options list) and require no translation to HTML anyway. -- -- Once the procedure understands what a line represents it updates the -- Report_File_State and processes the line by calling an appropriate -- subroutine. -- -- The procedure contains a debugging feature which reports each line that it -- finds to the screen along with a message for each line that it recognises. -- This only happens if the -debug switch is given on the commandline. procedure Process_Report_Line (Line : in out E_Strings.T; Lookahead : in E_Strings.T) --# global in CommandLineData.Content; --# in HTML_Work_Dir; --# in SPARK_Work_Dir; --# in out Report_File_State; --# in out Saved_Error_Link; --# in out Saved_Listing_File; --# in out SPARK_IO.File_Sys; --# derives Line from *, --# CommandLineData.Content, --# HTML_Work_Dir, --# Lookahead, --# Report_File_State, --# Saved_Error_Link, --# Saved_Listing_File, --# SPARK_Work_Dir & --# Report_File_State from *, --# Line & --# Saved_Error_Link from *, --# Line, --# Lookahead, --# Report_File_State & --# Saved_Listing_File from *, --# CommandLineData.Content, --# Line, --# Report_File_State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Line, --# Lookahead, --# Report_File_State; is Start_Pos : Integer; Compare_String : E_Strings.T; Debug_Message : E_Strings.T; -- This function takes a filename and some text as input and returns the text -- enclosed in HTML tags that form a link to the file relative to the -- specified "Relative_To" directory. -- -- A filename beginning with an @ symbol is interpreted as a metafile and -- the @ is removed from the location placed in the tag. -- -- Error checking is assumed to occur within the string handling routines. -- If the HTML_Relative_File procedure can not generate the URI then -- the link is not added and the file name returned, followed by a -- message saying "[Link unavailable]". function Create_File_Link_Tag_With_Text (Filename_In : E_Strings.T; Relative_To : E_Strings.T; Text : E_Strings.T) return E_Strings.T --# global in SPARK_Work_Dir; is Out_String : E_Strings.T; Filename : E_Strings.T; Location : E_Strings.T; Success : Boolean; begin Filename := Filename_In; -- The Out_String is the complete string that will be output including -- the tags and the original filename. Out_String := E_Strings.Empty_String; -- Chop off the leading @ (if there is one). if E_Strings.Get_Element (E_Str => Filename, Pos => E_Strings.Positions'First) = '@' then Filename := E_Strings.Section (Filename, E_Strings.Positions'First + 1, E_Strings.Get_Length (E_Str => Filename) - 1); end if; -- Find the relative URI. HTML_Relative_File (File => Filename, Relative_To => Relative_To, File_Relative => Location, Success_Out => Success); if not Success then Out_String := Filename_In; E_Strings.Append_String (E_Str => Out_String, Str => " [Link unavailable] "); else -- Create the tag. E_Strings.Append_String (E_Str => Out_String, Str => " Out_String, E_Str2 => Location); E_Strings.Append_String (E_Str => Out_String, Str => """ TYPE=""x-text/spark"" TARGET=""rbottom"" >"); E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Text); E_Strings.Append_String (E_Str => Out_String, Str => ""); end if; return Out_String; end Create_File_Link_Tag_With_Text; -- This function does the same as Create_File_Link_Tag_With_Text but the text -- used is the filename given. function Create_File_Link_Tag (Filename_In : E_Strings.T; Relative_To : E_Strings.T) return E_Strings.T --# global in SPARK_Work_Dir; is begin return Create_File_Link_Tag_With_Text (Filename_In => Filename_In, Relative_To => Relative_To, Text => Filename_In); end Create_File_Link_Tag; -- This file turns a line from the options section of the report file -- that specifies a file location and adds HTML tags to create a link -- to the file. -- -- An "Option Line" is defined to be a line from the options section -- of the report file. This line is of the form: -- -- = -- -- The procedure simply copies the line up to the '=' character -- and assumes that whatever follows the '=' is the filename. function Create_Option_File_Link (Line : E_Strings.T) return E_Strings.T --# global in HTML_Work_Dir; --# in SPARK_Work_Dir; is Temp_Line : E_Strings.T; I : E_Strings.Positions; Filename : E_Strings.T; File_Link : E_Strings.T; begin Temp_Line := E_Strings.Empty_String; Filename := E_Strings.Empty_String; I := E_Strings.Positions'First; -- Copy up to the '=' loop E_Strings.Append_Char (E_Str => Temp_Line, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); exit when E_Strings.Get_Element (E_Str => Line, Pos => I) = '='; I := I + 1; end loop; -- Point to the first character of the filename. I := I + 1; -- Get the name of the index file by copying up to the end of line while I <= E_Strings.Get_Length (E_Str => Line) loop E_Strings.Append_Char (E_Str => Filename, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; -- Add HTML tag File_Link := Create_File_Link_Tag (Filename_In => Filename, Relative_To => HTML_Work_Dir); E_Strings.Append_Examiner_String (E_Str1 => Temp_Line, E_Str2 => File_Link); return Temp_Line; end Create_Option_File_Link; -- This function takes a line containing a number of spaces followed by -- a file name and puts HTML link tags in to form a link to the file. -- If the file starts with an @ symbol it is interpreted as a metafile -- and the @ is removed. -- -- This function is used to generate HTML for lines which just contain -- whitespace and filenames, such as the selected files list, index files -- list, and metafiles list. function Create_File_Link (Line : E_Strings.T) return E_Strings.T --# global in HTML_Work_Dir; --# in SPARK_Work_Dir; is Temp_Line : E_Strings.T; I : E_Strings.Positions; Filename : E_Strings.T; File_Link : E_Strings.T; begin Temp_Line := E_Strings.Empty_String; Filename := E_Strings.Empty_String; I := E_Strings.Positions'First; -- Copy whitespace up to the first character. loop exit when E_Strings.Get_Element (E_Str => Line, Pos => I) /= ' '; E_Strings.Append_Char (E_Str => Temp_Line, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; -- Get the name of the file by copying up to the end of line while I <= E_Strings.Get_Length (E_Str => Line) loop E_Strings.Append_Char (E_Str => Filename, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; -- Add HTML tag File_Link := Create_File_Link_Tag (Filename_In => Filename, Relative_To => HTML_Work_Dir); E_Strings.Append_Examiner_String (E_Str1 => Temp_Line, E_Str2 => File_Link); return Temp_Line; end Create_File_Link; -- This function is used to process a line from the source file list. -- The line is assumed to be some space characters followed by a -- file name. The function creates the file link as described in the -- commentary for Create_File_Link, and appends to it a link to the -- analysis section for that file. -- -- The link created is a link of the form: -- -- ... -- -- where html_name is the result of applying the HTML_Name function to -- the filename. -- -- The HTML_Name function must also be used to create the anchor that this -- link refers to. function Process_Source_List_Line (Line : E_Strings.T) return E_Strings.T --# global in HTML_Work_Dir; --# in SPARK_Work_Dir; is Out_String : E_Strings.T; Filename : E_Strings.T; I : E_Strings.Positions; begin I := First_Char (The_String => Line); Filename := E_Strings.Section (Line, I, (E_Strings.Get_Length (E_Str => Line) - I) + 1); Out_String := Create_File_Link (Line => Line); E_Strings.Append_String (E_Str => Out_String, Str => " [ Out_String, E_Str2 => HTML_Name (E_Str => Filename)); E_Strings.Append_String (E_Str => Out_String, Str => """>View analysis]"); return Out_String; end Process_Source_List_Line; -- This function processes a line from the main section of the report file -- of the form: -- -- some_text: file_name -- -- to produce a line of the form: -- -- some_text: file_link -- -- where: -- -- html_name is the result of applying the function HTML_Name to file_name -- to generate the anchor tag referenced from the source file -- list (see commentary for Process_Source_List_Line) -- file_link is the result of applying the function Create_File_Link_Tag to -- file_name to produce a link to the file -- -- This function should be used to produce HTML for the lines in the body -- of the report file that beign "Source Filename: " function Process_Source_Filename_Line (Line : E_Strings.T) return E_Strings.T --# global in HTML_Work_Dir; --# in SPARK_Work_Dir; is Out_String : E_Strings.T; I : E_Strings.Positions; Filename : E_Strings.T; Anchor : E_Strings.T; begin Out_String := E_Strings.Empty_String; I := E_Strings.Positions'First; -- Copy up to and including ':' loop E_Strings.Append_Char (E_Str => Out_String, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); exit when E_Strings.Get_Element (E_Str => Line, Pos => I) = ':'; I := I + 1; end loop; -- Point to next character I := I + 1; -- Copy spaces up to first char of file name loop exit when E_Strings.Get_Element (E_Str => Line, Pos => I) /= ' '; E_Strings.Append_Char (E_Str => Out_String, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; -- Extract the filename Filename := E_Strings.Section (Line, I, (E_Strings.Get_Length (E_Str => Line) - I) + 1); -- Create the anchor tag Anchor := E_Strings.Copy_String (Str => " Anchor, E_Str2 => HTML_Name (E_Str => Filename)); E_Strings.Append_String (E_Str => Anchor, Str => """>"); -- Append the anchor tag to the output line E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Anchor); -- Create the link to the source file and append that to the output line E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Create_File_Link_Tag (Filename_In => Filename, Relative_To => HTML_Work_Dir)); return Out_String; end Process_Source_Filename_Line; -- This procedure processes a line from the main section of the report file -- of the form: -- -- some_text: file_name -- -- to produce a line of the form: -- -- some_text: file_link html_link -- -- where: -- -- file_link is the result of applying the function Create_File_Link_Tag to -- file_name to produce a link to the file -- -- html_link is the text "[View HTML]" enclosed in tags that make it a -- link to the HTML version of the above file. The HTML -- version is assumed to be the name (without directory) -- of the specified listing file, with all '.'s changed -- to '_'s, with a ".htm" extension and residing in the HTML -- subdirectory of the current directory; e.g. H:\foo\bar.lst -- becomes HTML/bar_lst.htm -- -- It also updates the value of Saved_Listing_File to be a reference to the -- HTML version of the listing for use in processing the source error lines. -- -- This function should be used to produce HTML for the lines in the body -- of the report file that beign "Listing Filename: " procedure Process_Listing_Filename_Line (Line : in out E_Strings.T) --# global in CommandLineData.Content; --# in HTML_Work_Dir; --# in SPARK_Work_Dir; --# out Saved_Listing_File; --# derives Line from *, --# CommandLineData.Content, --# HTML_Work_Dir, --# SPARK_Work_Dir & --# Saved_Listing_File from CommandLineData.Content, --# Line; is Out_String : E_Strings.T; I : E_Strings.Positions; Filename : E_Strings.T; HTML_File_Link : E_Strings.T; begin Out_String := E_Strings.Empty_String; I := E_Strings.Positions'First; -- Copy up to and including ':' loop E_Strings.Append_Char (E_Str => Out_String, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); exit when E_Strings.Get_Element (E_Str => Line, Pos => I) = ':'; I := I + 1; end loop; -- Point to next character I := I + 1; -- Copy spaces up to first char of file name loop exit when E_Strings.Get_Element (E_Str => Line, Pos => I) /= ' '; E_Strings.Append_Char (E_Str => Out_String, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; -- Extract the filename Filename := E_Strings.Section (Line, I, (E_Strings.Get_Length (E_Str => Line) - I) + 1); -- Append link to plain text listing. E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Create_File_Link_Tag (Filename_In => Filename, Relative_To => HTML_Work_Dir)); -- And add a space E_Strings.Append_Char (E_Str => Out_String, Ch => ' '); -- Create URL for HTML listing file in Saved_Listing_File Saved_Listing_File := CommandLineData.Content.HTML_Directory; E_Strings.Append_String (E_Str => Saved_Listing_File, Str => "/"); E_Strings.Append_Examiner_String (E_Str1 => Saved_Listing_File, E_Str2 => E_Strings.Translate (E_Str => FileSystem.Just_File (Fn => Filename, Ext => True), From_Char => '.', To_Char => '_')); E_Strings.Append_String (E_Str => Saved_Listing_File, Str => ".htm"); -- Create HTML tags. HTML_File_Link := Create_File_Link_Tag_With_Text (Filename_In => Saved_Listing_File, Relative_To => HTML_Work_Dir, Text => E_Strings.Copy_String (Str => "[View HTML]")); E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => HTML_File_Link); Line := Out_String; end Process_Listing_Filename_Line; -- *** Comment procedure Process_Error_Source_Line (Line : in out E_Strings.T) --# global in HTML_Work_Dir; --# in Saved_Listing_File; --# in SPARK_Work_Dir; --# derives Line from *, --# HTML_Work_Dir, --# Saved_Listing_File, --# SPARK_Work_Dir; is Out_String : E_Strings.T; I : E_Strings.Positions; Line_No : E_Strings.T; Relative_Listing : E_Strings.T; Link_Success : Boolean; Link : E_Strings.T; begin if E_Strings.Get_Length (E_Str => Saved_Listing_File) /= 0 then -- there is a listing file Out_String := E_Strings.Empty_String; I := E_Strings.Positions'First; Line_No := E_Strings.Empty_String; -- Copy up to the first non-space. loop exit when E_Strings.Get_Element (E_Str => Line, Pos => I) /= ' '; E_Strings.Append_Char (E_Str => Out_String, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; -- Copy digits to Line_No while Digit (C => E_Strings.Get_Element (E_Str => Line, Pos => I)) loop E_Strings.Append_Char (E_Str => Line_No, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; -- Create relative location of Saved_Listing_File HTML_Relative_File (File => Saved_Listing_File, Relative_To => HTML_Work_Dir, File_Relative => Relative_Listing, Success_Out => Link_Success); if Link_Success then -- Create link to listing based on saved listing file. Link := E_Strings.Copy_String (Str => " Link, E_Str2 => Relative_Listing); E_Strings.Append_String (E_Str => Link, Str => "#line"); E_Strings.Append_Examiner_String (E_Str1 => Link, E_Str2 => Line_No); E_Strings.Append_String (E_Str => Link, Str => """>"); E_Strings.Append_Examiner_String (E_Str1 => Link, E_Str2 => Line_No); E_Strings.Append_String (E_Str => Link, Str => ""); else -- The link is just the number. Link := Line_No; end if; -- Append link to Out_String E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Link); -- Append rest of line E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => E_Strings.Section (Line, I, (E_Strings.Get_Length (E_Str => Line) - I) + 1)); Line := Out_String; end if; end Process_Error_Source_Line; begin -- Process_Report_Line if E_Strings.Get_Length (E_Str => Line) > 0 then Start_Pos := First_Char (The_String => Line); Debug_Message := E_Strings.Empty_String; if CommandLineData.Content.Debug.HTML then E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Line); -- Line used for debugging. end if; case Report_File_State is when Report_Just_Started => if E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 5), Str => "*****") then Report_File_State := Report_Banner_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Banner_Started"); end if; when Report_Banner_Started => if E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 5), Str => "*****") then Report_File_State := Report_Banner_Finished; Debug_Message := E_Strings.Copy_String (Str => "Report_Banner_Finished"); end if; when Report_Banner_Finished => if E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 7), Str => "DATE : ") then Report_File_State := Report_Date_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Date_Found"); end if; when Report_Date_Found => if E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 8), Str => "Options:") then Report_File_State := Report_Options_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Options_Found"); end if; when Report_Options_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 11); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Index_File & "=") then Report_File_State := Report_Index_File_Found; Line := Create_Option_File_Link (Line => Line); Debug_Message := E_Strings.Copy_String (Str => "Report_Index_File_Found"); elsif E_Strings.Eq_String (E_Str1 => Compare_String, E_Str2 => E_Strings.Section (E_Str => E_Strings.Copy_String (Str => "no" & CommandLineData.Option_Index_File), Start_Pos => E_Strings.Positions'First, Length => 11)) then Report_File_State := Report_Index_File_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Index_File_Found"); end if; when Report_Index_File_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 13); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Warning_File & "=") then Report_File_State := Report_Warning_File_Found; Line := Create_Option_File_Link (Line => Line); Debug_Message := E_Strings.Copy_String (Str => "Report_Warning_File_Found"); elsif E_Strings.Eq_String (E_Str1 => Compare_String, E_Str2 => E_Strings.Section (E_Str => E_Strings.Copy_String (Str => "no" & CommandLineData.Option_Warning_File), Start_Pos => E_Strings.Positions'First, Length => 13)) then Report_File_State := Report_Warning_File_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Warning_File_Found"); end if; when Report_Warning_File_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 21); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Target_Compiler_Data & "=") then Report_File_State := Report_Target_Compiler_Data_Found; Line := Create_Option_File_Link (Line => Line); Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Compiler_Data_Found"); elsif E_Strings.Eq_String (E_Str1 => Compare_String, E_Str2 => E_Strings.Section (E_Str => E_Strings.Copy_String (Str => "no" & CommandLineData.Option_Target_Compiler_Data), Start_Pos => E_Strings.Positions'First, Length => 21)) then Report_File_State := Report_Target_Compiler_Data_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Compiler_Data_Found"); end if; when Report_Target_Compiler_Data_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 12); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Config_File & "=") then Report_File_State := Report_Target_Config_File_Found; Line := Create_Option_File_Link (Line => Line); Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Config_File_Found"); elsif E_Strings.Eq_String (E_Str1 => Compare_String, E_Str2 => E_Strings.Section (E_Str => E_Strings.Copy_String (Str => "no" & CommandLineData.Option_Config_File), Start_Pos => E_Strings.Positions'First, Length => 12)) then Report_File_State := Report_Target_Config_File_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Config_File_Found"); end if; when Report_Target_Config_File_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 17); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Source_Extension & "=") then Report_File_State := Report_Source_Extension_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_Extension_Found"); end if; when Report_Source_Extension_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 18); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Listing_Extension & "=") then Report_File_State := Report_Listing_Extension_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Listing_Extension_Found"); end if; when Report_Listing_Extension_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 12); if E_Strings.Eq_String (E_Str1 => Compare_String, E_Str2 => E_Strings.Section (E_Str => E_Strings.Copy_String (Str => CommandLineData.Option_Dictionary_File), Start_Pos => E_Strings.Positions'First, Length => 12)) then Report_File_State := Report_Dictionary_Found; Line := Create_Option_File_Link (Line => Line); Debug_Message := E_Strings.Copy_String (Str => "Report_Dictionary_Found"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_No_Dictionary) then Report_File_State := Report_Dictionary_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Dictionary_Found"); end if; when Report_Dictionary_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 12); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Report_File & "=") then Report_File_State := Report_Report_File_Found; Line := Create_Option_File_Link (Line => Line); Debug_Message := E_Strings.Copy_String (Str => "Report_Report_File_Found"); end if; -- NOTE: checking for "noreport_file" is a stupid thing to do! when Report_Report_File_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 4); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Html) then -- NOTE: checking for nohtml is an equally stupid thing to do! Report_File_State := Report_HTML_Flag_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_HTML_Flag_Found"); end if; when Report_HTML_Flag_Found => -- The RTC and VCG options appear here but as they are optional -- we shall ignore them. Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 10); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Statistics) or else E_Strings.Eq_String (E_Str1 => Compare_String, E_Str2 => E_Strings.Section (E_Str => E_Strings.Copy_String (Str => "no" & CommandLineData.Option_Statistics), Start_Pos => E_Strings.Positions'First, Length => 10)) then Report_File_State := Report_Statistics_Flag_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Statistics_Flag_Found"); end if; when Report_Statistics_Flag_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 15); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Fdl_Identifiers) or else E_Strings.Eq_String (E_Str1 => Compare_String, E_Str2 => E_Strings.Section (E_Str => E_Strings.Copy_String (Str => "no" & CommandLineData.Option_Fdl_Identifiers), Start_Pos => E_Strings.Positions'First, Length => 15)) then Report_File_State := Report_FDL_Identifiers_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_FDL_Identifiers_Found"); end if; when Report_FDL_Identifiers_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 14); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Flow_Analysis & "=") then Report_File_State := Report_Flow_Analysis_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Flow_Analysis_Found"); end if; when Report_Flow_Analysis_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 5); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "ada83") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "ada95") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "ada20") then Report_File_State := Report_Language_Option_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Language_Option_Found"); end if; when Report_Language_Option_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 21); if E_Strings.Eq1_String (E_Str => Compare_String, Str => CommandLineData.Option_Annotation_Character & "=") then Report_File_State := Report_Annotation_Character_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Annotation_Character_Found"); end if; when Report_Annotation_Character_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 15); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Selected files:") then Report_File_State := Report_Selected_Files_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Selected_Files_Started"); end if; when Report_Selected_Files_Started => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 24); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Index Filename(s) used w") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "No Index files were used") then Report_File_State := Report_Index_Files_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Index_Files_Started"); else Line := Create_File_Link (Line => Line); end if; when Report_Index_Files_Started => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 18); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Meta File(s) used ") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "No Meta Files used") then Report_File_State := Report_Meta_Files_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Meta_Files_Started"); else Line := Create_File_Link (Line => Line); end if; when Report_Meta_Files_Started => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 22); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Full warning reporting") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "Summary warning report") then Report_File_State := Report_Warning_List_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Warning_List_Started"); else Line := Create_File_Link (Line => Line); end if; when Report_Warning_List_Started => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 26); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Target configuration file:") then Report_File_State := Report_Target_Config_List_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Config_List_Started"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "Source Filename(s) used we") then Report_File_State := Report_Source_List_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_List_Started"); end if; when Report_Target_Config_List_Started => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 4); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Line") then Report_File_State := Report_Target_Error_Line; Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Error_Line"); end if; when Report_Target_Error_Line => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 3); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "!!!") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "***") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "---") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "???") then Process_First_Error_Message_Line (Line => Line, Lookahead => Lookahead, Error_Link => Saved_Error_Link); Report_File_State := Report_Target_Error_Next_Line; Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Error_Next_Line"); else Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 29); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Source Filename(s) used were:") then Report_File_State := Report_Source_List_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_List_Started"); end if; end if; when Report_Target_Error_Next_Line => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 29); if Start_Pos > 10 then -- Extra lines of the error message will be indented by 11 characters Process_Next_Error_Message_Line (Line => Line, Link => Saved_Error_Link); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "Source Filename(s) used were:") then Report_File_State := Report_Source_List_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_List_Started"); else Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 3); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "!!!") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "***") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "---") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "???") then Process_First_Error_Message_Line (Line => Line, Lookahead => Lookahead, Error_Link => Saved_Error_Link); Report_File_State := Report_Target_Error_Next_Line; Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Error_Next_Line"); else Report_File_State := Report_Target_Error_Line; Debug_Message := E_Strings.Copy_String (Str => "Report_Target_Error_Line"); end if; end if; when Report_Source_List_Started => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 16); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Source Filename:") then Line := Process_Source_Filename_Line (Line => Line); Report_File_State := Report_Source_File_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_File_Started"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "The following we") then Report_File_State := Report_Missing_Files_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Missing_Files_Started"); else Line := Process_Source_List_Line (Line => Line); end if; when Report_Missing_Files_Started => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 16); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Source Filename:") then Line := Process_Source_Filename_Line (Line => Line); Report_File_State := Report_Source_File_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_File_Started"); end if; when Report_Source_File_Started => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 15); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Listing Filenam") then Process_Listing_Filename_Line (Line => Line); Report_File_State := Report_Listing_Filename_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Listing_Filename_Found"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "No Listing File") then Saved_Listing_File := E_Strings.Empty_String; Report_File_State := Report_Listing_Filename_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Listing_Filename_Found"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "*** no unit") then Report_File_State := Report_No_Units_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_No_Units_Found"); end if; when Report_Listing_Filename_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 10); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Unit name:") then Report_File_State := Report_Unit_Name_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Unit_Name_Found"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "*** no") then Report_File_State := Report_No_Units_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_No_Units_Found"); end if; when Report_Unit_Name_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 10); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Unit type:") then Report_File_State := Report_Unit_Type_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Unit_Type_Found"); end if; when Report_Unit_Type_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 22); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Unit has been analysed") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "Unit has been parsed o") then Report_File_State := Report_Analysed_Message_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Analysed_Message_Found"); end if; when Report_Analysed_Message_Found | Report_No_Units_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 10); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "No errors ") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "No summari") then Report_File_State := Report_End_Of_Errors; Debug_Message := E_Strings.Copy_String (Str => "Report_End_Of_Errors"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "Unit name:") then Report_File_State := Report_Unit_Name_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Unit_Name_Found"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "*** Un") then Report_File_State := Report_End_Of_Errors; Debug_Message := E_Strings.Copy_String (Str => "Report_End_Of_Errors"); else Compare_String := E_Strings.Section (Line, E_Strings.Get_Length (E_Str => Line) - 9, 10); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "warning(s)") then Report_File_State := Report_Start_Of_Errors; Debug_Message := E_Strings.Copy_String (Str => "Report_Start_Of_Errors"); end if; end if; when Report_Start_Of_Errors => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 4); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Line") then Report_File_State := Report_Line_Header_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Line_Header_Found"); end if; when Report_Line_Header_Found => Process_Error_Source_Line (Line => Line); Report_File_State := Report_Error_Source_Line_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Error_Source_Line_Found"); when Report_Error_Source_Line_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 1); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "^") then -- Process Error Pointer Line (do nothing?) Report_File_State := Report_Error_Source_Pointer_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Error_Source_Pointer_Found"); else -- Some errors don't have pointers Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 3); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "!!!") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "***") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "---") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "???") then Process_First_Error_Message_Line (Line => Line, Lookahead => Lookahead, Error_Link => Saved_Error_Link); Report_File_State := Report_Error_Message_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Error_Message_Found"); end if; end if; when Report_Error_Source_Pointer_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 3); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "!!!") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "***") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "---") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "???") then Process_First_Error_Message_Line (Line => Line, Lookahead => Lookahead, Error_Link => Saved_Error_Link); Report_File_State := Report_Error_Message_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Error_Message_Found"); end if; when Report_Error_Message_Found | Report_End_Of_Errors => if Start_Pos > 10 then -- Extra lines of the error message will be indented by 11 characters Process_Next_Error_Message_Line (Line => Line, Link => Saved_Error_Link); Debug_Message := E_Strings.Copy_String (Str => "Report_Next_Error_Message_Line_Found"); else Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 16); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Source Filename:") then Line := Process_Source_Filename_Line (Line => Line); Report_File_State := Report_Source_File_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_File_Started"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "No summarized wa") then Report_File_State := Report_End_Of_Errors; Debug_Message := E_Strings.Copy_String (Str => "Report_End_Of_Errors_Found"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "--End of file---") then Report_File_State := Report_End_Of_Report_File; Debug_Message := E_Strings.Copy_String (Str => "Report_End_Of_Report_File"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "Expected message") then Report_File_State := Report_Justifications_Summary_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Justifications_Summary_Found"); else -- next error source line found, next error found or -- summarized warnings Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 3); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "!!!") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "***") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "---") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "???") then Process_First_Error_Message_Line (Line => Line, Lookahead => Lookahead, Error_Link => Saved_Error_Link); Report_File_State := Report_Error_Message_Found; Debug_Message := E_Strings.Copy_String (Str => "Report_Error_Message_Found"); else -- error source line or summarized warnings if E_Strings.Get_Length (E_Str => Line) > 9 then Compare_String := E_Strings.Section (Line, E_Strings.Get_Length (E_Str => Line) - 9, 10); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "omprising:") then Report_File_State := Report_Summarized_Warnings_Found; else Report_File_State := Report_Error_Source_Line_Found; end if; else Report_File_State := Report_Error_Source_Line_Found; end if; if Report_File_State = Report_Summarized_Warnings_Found then Debug_Message := E_Strings.Copy_String (Str => "Report_Summarized_Warnings_Found"); else Process_Error_Source_Line (Line => Line); Debug_Message := E_Strings.Copy_String (Str => "Report_Next_Error_Source_Line_Found"); end if; end if; end if; end if; when Report_Justifications_Summary_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 16); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "Source Filename:") then Line := Process_Source_Filename_Line (Line => Line); Report_File_State := Report_Source_File_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_File_Started"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "No summarized wa") then Report_File_State := Report_End_Of_Errors; Debug_Message := E_Strings.Copy_String (Str => "Report_End_Of_Errors_Found"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "--End of file---") then Report_File_State := Report_End_Of_Report_File; Debug_Message := E_Strings.Copy_String (Str => "Report_End_Of_Report_File"); else -- Here, we could process the justification table line to include -- HTML links the relevant source files, lines of code, and error -- messages. TBD! -- Both "Brief" and "Full" justifications mode need to be dealt with here. null; end if; when Report_Summarized_Warnings_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 7); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "(*Note:") then Report_File_State := Report_End_Of_Errors; Debug_Message := E_Strings.Copy_String (Str => "Report_End_Of_Errors_Found"); elsif E_Strings.Eq1_String (E_Str => Compare_String, Str => "Source ") then Line := Process_Source_Filename_Line (Line => Line); Report_File_State := Report_Source_File_Started; Debug_Message := E_Strings.Copy_String (Str => "Report_Source_File_Started"); end if; when Report_Blank_After_Error_Found | Report_End_Of_Report_File => ScreenEcho.Put_Line ("An error occurred during HTML report file generation: Invalid Report_File_State"); end case; if CommandLineData.Content.Debug.HTML then E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Debug_Message); -- Line used for debugging. end if; end if; end Process_Report_Line; begin -- Gen_Report_HTML -- Do nothing if previous HTML generation failed in some way. if Generate_HTML then -- Inform user that HTML generation is taking place. if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then ScreenEcho.Echo (E_Strings.Copy_String (Str => "Generating report file HTML")); end if; -- Initialise Saved_Error_Link and Saved_Listing_File. Saved_Error_Link := E_Strings.Empty_String; Saved_Listing_File := E_Strings.Empty_String; -- Initialise file handles. HTML_Report_File := SPARK_IO.Null_File; Plain_Report_File := SPARK_IO.Null_File; -- Generate filename of the form HTML/.htm Plain_Report_Filename := FileSystem.Just_File (CommandLineData.Content.Report_File_Name, True); HTML_Report_Filename := E_Strings.Translate (E_Str => Plain_Report_Filename, From_Char => '.', To_Char => '_'); E_Strings.Append_String (E_Str => HTML_Report_Filename, Str => ".htm"); HTML_Report_Filename := FileSystem.Case_Of_Files_For_Create (E_Str => HTML_Filename (Filename => HTML_Report_Filename)); -- Create HTML report file. E_Strings.Create (File => HTML_Report_File, Name_Of_File => HTML_Report_Filename, Form_Of_File => "", Status => HTML_Report_File_Created_OK); -- Check for errors. Stop HTML generation if create failed. if HTML_Report_File_Created_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while creating the HTML report file."); ScreenEcho.Put_Line ("No further HTML generation will occur."); Generate_HTML := False; else -- file created successfully. -- Open report file for input CommandLineData.Normalize_File_Name_To_Output_Directory (F => Plain_Report_Filename); -- Open report file for input E_Strings.Open (File => Plain_Report_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Plain_Report_Filename, Form_Of_File => "", Status => Plain_Report_File_Open_OK); -- Check for errors. Stop HTML generation if open failed. if Plain_Report_File_Open_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while opening the report file for HTML generation."); ScreenEcho.Put_Line ("No further HTML generation will occur."); Generate_HTML := False; else -- file opened successfully. Write_HTML_Report_Header; -- Fill Line_Buffer and Lookahead_Buffer if not SPARK_IO.End_Of_File (Plain_Report_File) then E_Strings.Get_Line (File => Plain_Report_File, E_Str => Line_Buffer); Line_Buffer := Convert_Special_HTML_Chars (Line => Line_Buffer); if not SPARK_IO.End_Of_File (Plain_Report_File) then E_Strings.Get_Line (File => Plain_Report_File, E_Str => Lookahead_Buffer); Lookahead_Buffer := Convert_Special_HTML_Chars (Line => Lookahead_Buffer); -- Process first line Process_Report_Line (Line => Line_Buffer, Lookahead => Lookahead_Buffer); -- Write line and process rest of file. loop E_Strings.Put_Line (File => HTML_Report_File, E_Str => Line_Buffer); if SPARK_IO.End_Of_File (Plain_Report_File) then -- Process and output the lookahead buffer. --# accept Flow, 10, Saved_Error_Link, "Expected ineffective assignment to Saved_Error_Link" & --# Flow, 10, Saved_Listing_File, "Expected ineffective assignment to Saved_Listing_File" & --# Flow, 10, Report_File_State, "Expected ineffective assignment to Report_File_State"; Process_Report_Line (Line => Lookahead_Buffer, Lookahead => E_Strings.Empty_String); --# end accept; -- this is the last call and so the saved values will not be used. E_Strings.Put_Line (File => HTML_Report_File, E_Str => Lookahead_Buffer); exit; end if; Line_Buffer := Lookahead_Buffer; E_Strings.Get_Line (File => Plain_Report_File, E_Str => Lookahead_Buffer); Lookahead_Buffer := Convert_Special_HTML_Chars (Line => Lookahead_Buffer); Process_Report_Line (Line => Line_Buffer, Lookahead => Lookahead_Buffer); end loop; end if; end if; Write_HTML_Report_Footer; -- Close input report file. --# accept Flow, 10, Plain_Report_File, "Expected ineffective assignment to Plain_Report_File"; SPARK_IO.Close (File => Plain_Report_File, Status => Plain_Report_File_Closed_OK); --# end accept; -- Check for errors. Stop HTML generation if close failed. if Plain_Report_File_Closed_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while closing the report file after HTML generation."); ScreenEcho.Put_Line ("No further HTML generation will occur."); Generate_HTML := False; end if; -- We don't use an else here as we need to try and close the HTML file too. -- Close HTML output file. --# accept Flow, 10, HTML_Report_File, "Expected ineffective assignment to HTML_Report_File"; SPARK_IO.Close (File => HTML_Report_File, Status => HTML_Report_File_Closed_OK); --# end accept; -- Check for errors. Stop HTML generation if close failed. if HTML_Report_File_Closed_OK /= SPARK_IO.Ok then ScreenEcho.Put_Line ("An error occurred while closing the HTML report file after HTML generation."); ScreenEcho.Put_Line ("No further HTML generation will occur."); Generate_HTML := False; end if; end if; end if; end if; --# accept Flow, 601, Generate_HTML, HTML_Work_Dir, "False coupling in SPARK_IO" & --# Flow, 601, Generate_HTML, SPARK_Work_Dir, "False coupling in SPARK_IO"; end Gen_Report_HTML; -- Flow errors expected due to false coupling in SPARK_IO. ----------------------------------------------------------------------------- -- -- ------------- -- Gen_Listing_HTML -- ------------- -- -- This procedure generates the HTMLified listing file from the plain text -- listing file. -- -- It is assumed that the HTML output flag has already been tested. The call to -- Gen_Listing_HTML should look something like: -- -- if CommandLineData.Content.HTML then -- SparkHTML.Gen_Listing_HTML; -- end if; -- -- The checking of the Generate_HTML flag is done internally. -- -- If the Generate_HTML flag is false this procedure does nothing, otherwise, -- it does exactly the following: -- -- - creates an HTML file (named .htm where is the name -- of the listing file associated with the file descriptor passed as a -- parameter with all '.' characters changed to '_' characters; -- - processes each line of the plain text listing file using the -- Process_Listing_Line procedure (the functionality of this is described -- at the declaration of Process_Listing_Line); -- - writes the processed lines to the HTML listing file; -- -- Error trapping: -- -- Incorrect generation of a listing files should not affect further HTML generation. -- In fact, it is better that we try and generate HTML for as many listing files as -- possible. So if HTML generation fails in this procedure the Generate_HTML flag is -- not set to false. -- procedure Gen_Listing_HTML (File_Descriptor : in ContextManager.FileDescriptors) is Message : E_Strings.T; Saved_Error_Link : E_Strings.T; HTML_Listing_File : SPARK_IO.File_Type; Plain_Listing_File : SPARK_IO.File_Type; HTML_Listing_Filename : E_Strings.T; Plain_Listing_Filename : E_Strings.T; Echoed_Listing_Filename : E_Strings.T; HTML_Listing_File_Created_OK : SPARK_IO.File_Status; Plain_Listing_File_Open_OK : SPARK_IO.File_Status; HTML_Listing_File_Closed_OK : SPARK_IO.File_Status; Plain_Listing_File_Closed_OK : SPARK_IO.File_Status; Line_Buffer : E_Strings.T; Lookahead_Buffer : E_Strings.T; Listing_File_State : Listing_File_States := Listing_Just_Started; -- -- Subprograms -- -- This sub-procedure writes HTML content to the start of the listing file. -- It assumes that the file handle HTML_Listing_File is the open HTML -- listing file. This procedure will not open or close the file. -- -- The HTML written specifies the title of the page (using the filename -- specified as a parameter) and some formatting tags. The formatting -- is
 (pre-processed text) which displays text exactly as given,
      -- and  which sets the typewriter-text font; I use this because it
      -- usually results in a fixed-width font being used.
      --
      -- When writing the end of the listing file these tags need to be closed,
      -- as do the  and  tags.  This should be done by calling
      -- the Write_HTML_Listing_Footer procedure.
      --
      -- Error trapping:
      --
      -- All error trapping is performed within SPARK_IO.
      -- The SPARK_IO routines called do not return status parameters and so
      -- no error trapping can be done here.

      procedure Write_HTML_Listing_Header (Filename : in E_Strings.T)
      --# global in     CommandLineData.Content;
      --#        in     HTML_Listing_File;
      --#        in out SPARK_IO.File_Sys;
      --# derives SPARK_IO.File_Sys from *,
      --#                                CommandLineData.Content,
      --#                                Filename,
      --#                                HTML_Listing_File;

      is
         Local_Filename : E_Strings.T;
      begin
         SPARK_IO.Put_Line (HTML_Listing_File, "", 56);
         SPARK_IO.Put_Line (HTML_Listing_File, "", 6);
         SPARK_IO.Put_Line (HTML_Listing_File, "  ", 8);
         SPARK_IO.Put_String (HTML_Listing_File, "    Examiner HTML Listing File: ", 39);
         if CommandLineData.Content.Plain_Output then
            Local_Filename := FileSystem.Just_File (Fn  => Filename,
                                                    Ext => True);
         else
            Local_Filename := Filename;
         end if;
         E_Strings.Put_String (File  => HTML_Listing_File,
                               E_Str => Local_Filename);
         SPARK_IO.Put_Line (HTML_Listing_File, "", 8);
         SPARK_IO.Put_Line (HTML_Listing_File, "  ", 9);
         SPARK_IO.Put_Line (HTML_Listing_File, "  ", 8);
         SPARK_IO.Put_Line (HTML_Listing_File, "    
", 9);
         SPARK_IO.Put_Line (HTML_Listing_File, "      ", 10);
      end Write_HTML_Listing_Header;

      -- This subprocedure writes HTML content to the end of the listing file.
      -- It assumes that the file handle HTML_Listing_File is the open HTML
      -- listing file.  This procedure will not open or close the file.
      --
      -- The HTML written closes all the formatting tags that were opened
      -- by the call to Write_HTML_Listing_Header.
      --
      -- Error trapping:
      --
      -- All error trapping is performed within SPARK_IO.
      -- The SPARK_IO routines called do not return status parameters and so
      -- no error trapping can be done here.

      procedure Write_HTML_Listing_Footer
      --# global in     HTML_Listing_File;
      --#        in out SPARK_IO.File_Sys;
      --# derives SPARK_IO.File_Sys from *,
      --#                                HTML_Listing_File;

      is
      begin
         SPARK_IO.Put_Line (HTML_Listing_File, "      ", 11);
         SPARK_IO.Put_Line (HTML_Listing_File, "    
", 10); SPARK_IO.Put_Line (HTML_Listing_File, " ", 9); SPARK_IO.Put_Line (HTML_Listing_File, "", 7); end Write_HTML_Listing_Footer; -- This procedure is used to convert a line of the listing file into HTML. -- It is effectively a parser for the listing file. -- -- The procedure design is based on a state machine. The global variable -- Listing_File_State records our current location within the listing file (in -- terms of what information has already been read). -- -- Given a line, we can determine the line's meaning from our current state -- and the contents of the line (all blank lines are ignored). -- -- We can use this method to parse almost all the information in the source --*** -- file. Those bits that we don't parse are usually optional (such as the --*** -- flag "rtc" in the options list) and require no translation to HTML anyway. --*** -- -- Once the procedure understands what a line represents it updates the -- Listing_File_State and processes the line by calling an appropriate -- subroutine. -- -- The procedure contains a debugging feature which reports each line that it -- finds to the screen along with a message for each line that it recognises. -- This only happens if the -debug switch is given on the commandline. procedure Process_Listing_Line (Line : in out E_Strings.T; Lookahead : in E_Strings.T) --# global in CommandLineData.Content; --# in out Listing_File_State; --# in out Saved_Error_Link; --# in out SPARK_IO.File_Sys; --# derives Line, --# Saved_Error_Link from Line, --# Listing_File_State, --# Lookahead, --# Saved_Error_Link & --# Listing_File_State from *, --# Line & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Line, --# Listing_File_State, --# Lookahead; is Start_Pos : E_Strings.Positions; Compare_String : E_Strings.T; Debug_Message : E_Strings.T; -- This procedure processes a listing source line. This line should -- consist of some spaces followed by a number followed by some more -- spaces and the text of the source line. -- -- The line number is extracted and put into an HTML tag of the form -- XXX where XXX is the line number. The rest of the -- source line is unchanged. procedure Process_Listing_Source_Line (Line : in out E_Strings.T) --# derives Line from *; is Out_String : E_Strings.T; I : E_Strings.Positions; Line_No : E_Strings.T; Link : E_Strings.T; begin Out_String := E_Strings.Empty_String; I := E_Strings.Positions'First; Line_No := E_Strings.Empty_String; -- Copy up to the first non-space. loop exit when E_Strings.Get_Element (E_Str => Line, Pos => I) /= ' '; E_Strings.Append_Char (E_Str => Out_String, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; -- Copy digits to Line_No while Digit (C => E_Strings.Get_Element (E_Str => Line, Pos => I)) loop E_Strings.Append_Char (E_Str => Line_No, Ch => E_Strings.Get_Element (E_Str => Line, Pos => I)); I := I + 1; end loop; if E_Strings.Get_Length (E_Str => Line_No) > 0 then -- Create anchor based on number Link := E_Strings.Copy_String (Str => " Link, E_Str2 => Line_No); E_Strings.Append_String (E_Str => Link, Str => """>"); E_Strings.Append_Examiner_String (E_Str1 => Link, E_Str2 => Line_No); E_Strings.Append_String (E_Str => Link, Str => ""); -- Append link to Out_String E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => Link); end if; -- Append rest of line E_Strings.Append_Examiner_String (E_Str1 => Out_String, E_Str2 => E_Strings.Section (Line, I, (E_Strings.Get_Length (E_Str => Line) - I) + 1)); Line := Out_String; end Process_Listing_Source_Line; begin -- Process_Listing_Line if E_Strings.Get_Length (E_Str => Line) > 0 then Start_Pos := First_Char (The_String => Line); Debug_Message := E_Strings.Empty_String; if CommandLineData.Content.Debug.HTML then E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Line); -- Line used for debugging. end if; case Listing_File_State is when Listing_Just_Started => if E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 5), Str => "*****") then Listing_File_State := Listing_Banner_Started; Debug_Message := E_Strings.Copy_String (Str => "Listing_Banner_Started"); end if; when Listing_Banner_Started => if E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 5), Str => "*****") then Listing_File_State := Listing_Banner_Finished; Debug_Message := E_Strings.Copy_String (Str => "Listing_Banner_Finished"); end if; when Listing_Banner_Finished => if E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 7), Str => "DATE : ") then Listing_File_State := Listing_Date_Found; Debug_Message := E_Strings.Copy_String (Str => "Listing_Date_Found"); end if; when Listing_Date_Found => if E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 4), Str => "Line") then Listing_File_State := Listing_Line_Heading_Found; Debug_Message := E_Strings.Copy_String (Str => "Listing_Line_Heading_Found"); end if; when Listing_Line_Heading_Found | Listing_Source_Line_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 1); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "^") then -- Process Error Pointer Line (do nothing?) Listing_File_State := Listing_Error_Source_Pointer_Found; Debug_Message := E_Strings.Copy_String (Str => "Listing_Error_Source_Pointer_Found"); else -- Some errors don't have pointers Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 3); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "!!!") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "***") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "---") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "???") then Process_First_Error_Message_Line (Line => Line, Lookahead => Lookahead, Error_Link => Saved_Error_Link); Listing_File_State := Listing_Error_Message_Found; Debug_Message := E_Strings.Copy_String (Str => "Listing_Error_Message_Found"); else Process_Listing_Source_Line (Line => Line); Listing_File_State := Listing_Source_Line_Found; Debug_Message := E_Strings.Copy_String (Str => "Listing_Source_Line_Found"); end if; end if; when Listing_Error_Source_Pointer_Found => Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 3); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "!!!") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "***") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "---") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "???") then Process_First_Error_Message_Line (Line => Line, Lookahead => Lookahead, Error_Link => Saved_Error_Link); Listing_File_State := Listing_Error_Message_Found; Debug_Message := E_Strings.Copy_String (Str => "Listing_Error_Message_Found"); end if; when Listing_Error_Message_Found => if Start_Pos > 5 then -- Extra lines of the error message will be indented by 11 characters Process_Next_Error_Message_Line (Line => Line, Link => Saved_Error_Link); Debug_Message := E_Strings.Copy_String (Str => "Listing_Next_Error_Message_Line_Found"); else Compare_String := E_Strings.Section (E_Str => Line, Start_Pos => Start_Pos, Length => 3); if E_Strings.Eq1_String (E_Str => Compare_String, Str => "!!!") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "***") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "---") or else E_Strings.Eq1_String (E_Str => Compare_String, Str => "???") then Process_First_Error_Message_Line (Line => Line, Lookahead => Lookahead, Error_Link => Saved_Error_Link); Listing_File_State := Listing_Error_Message_Found; Debug_Message := E_Strings.Copy_String (Str => "Listing_Error_Message_Found"); else -- error source line or summarized warnings Process_Listing_Source_Line (Line => Line); Listing_File_State := Listing_Source_Line_Found; Debug_Message := E_Strings.Copy_String (Str => "Listing_Next_Source_Line_Found"); end if; end if; when Listing_End_Of_Listing_File => ScreenEcho.Put_Line ("An error occurred during HTML listing file generation: Invalid Report_File_State"); end case; if CommandLineData.Content.Debug.HTML then E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Debug_Message); -- Line used for debugging. end if; end if; end Process_Listing_Line; begin if Generate_HTML then -- Get name of listing file. ContextManager.Ops.GetListingFileName (File_Descriptor, Plain_Listing_Filename); if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then -- Echo message to screen. Message := E_Strings.Copy_String (Str => "Generating listing file HTML for "); if CommandLineData.Content.Plain_Output then Echoed_Listing_Filename := FileSystem.Just_File (Fn => Plain_Listing_Filename, Ext => True); else Echoed_Listing_Filename := Plain_Listing_Filename; end if; E_Strings.Append_Examiner_String (E_Str1 => Message, E_Str2 => Echoed_Listing_Filename); ScreenEcho.Echo (Message); end if; -- Initialise Saved_Error_Link. Saved_Error_Link := E_Strings.Empty_String; -- Initialise file handles. HTML_Listing_File := SPARK_IO.Null_File; Plain_Listing_File := SPARK_IO.Null_File; -- Generate filename of the form HTML/.htm HTML_Listing_Filename := E_Strings.Translate (E_Str => Plain_Listing_Filename, From_Char => '.', To_Char => '_'); E_Strings.Append_String (E_Str => HTML_Listing_Filename, Str => ".htm"); HTML_Listing_Filename := FileSystem.Case_Of_Files_For_Create (E_Str => HTML_Filename (Filename => HTML_Listing_Filename)); -- Create HTML listing file. E_Strings.Create (File => HTML_Listing_File, Name_Of_File => HTML_Listing_Filename, Form_Of_File => "", Status => HTML_Listing_File_Created_OK); -- Check for errors. if HTML_Listing_File_Created_OK /= SPARK_IO.Ok then ScreenEcho.Put_String ("An error occurred while creating the HTML listing file "); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => HTML_Listing_Filename); else -- file created successfully. CommandLineData.Normalize_File_Name_To_Output_Directory (F => Plain_Listing_Filename); -- Open listing file for input E_Strings.Open (File => Plain_Listing_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Plain_Listing_Filename, Form_Of_File => "", Status => Plain_Listing_File_Open_OK); -- Check for errors. if Plain_Listing_File_Open_OK /= SPARK_IO.Ok then ScreenEcho.Put_String ("An error occurred while opening the listing file "); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Plain_Listing_Filename); ScreenEcho.Put_Line (" for HTML generation."); else -- file opened successfully. Write_HTML_Listing_Header (Filename => Plain_Listing_Filename); -- Fill Line_Buffer and Lookahead_Buffer if not SPARK_IO.End_Of_File (Plain_Listing_File) then E_Strings.Get_Line (File => Plain_Listing_File, E_Str => Line_Buffer); Line_Buffer := Convert_Special_HTML_Chars (Line => Line_Buffer); if not SPARK_IO.End_Of_File (Plain_Listing_File) then E_Strings.Get_Line (File => Plain_Listing_File, E_Str => Lookahead_Buffer); Lookahead_Buffer := Convert_Special_HTML_Chars (Line => Lookahead_Buffer); -- Process first line Process_Listing_Line (Line => Line_Buffer, Lookahead => Lookahead_Buffer); -- Write line and process rest of file. loop E_Strings.Put_Line (File => HTML_Listing_File, E_Str => Line_Buffer); if SPARK_IO.End_Of_File (Plain_Listing_File) then -- Process and output the lookahead buffer. --# accept Flow, 10, Listing_File_State, "Expected ineffective assignment to Listing_File_State" & --# Flow, 10, Saved_Error_Link, "Expected ineffective assignment to Saved_Error_Link"; Process_Listing_Line (Line => Lookahead_Buffer, Lookahead => E_Strings.Empty_String); --# end accept; -- this is the last call and so the saved values will not be used. E_Strings.Put_Line (File => HTML_Listing_File, E_Str => Lookahead_Buffer); exit; end if; Line_Buffer := Lookahead_Buffer; E_Strings.Get_Line (File => Plain_Listing_File, E_Str => Lookahead_Buffer); Lookahead_Buffer := Convert_Special_HTML_Chars (Line => Lookahead_Buffer); Process_Listing_Line (Line => Line_Buffer, Lookahead => Lookahead_Buffer); end loop; end if; end if; Write_HTML_Listing_Footer; -- Close input listing file. --# accept Flow, 10, Plain_Listing_File, "Expected ineffective assignment to Plain_Listing_File"; SPARK_IO.Close (File => Plain_Listing_File, Status => Plain_Listing_File_Closed_OK); --# end accept; -- Check for errors. if Plain_Listing_File_Closed_OK /= SPARK_IO.Ok then ScreenEcho.Put_String ("An error occurred while closing the listing file "); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Plain_Listing_Filename); ScreenEcho.Put_Line (" after HTML generation."); end if; -- We don't use an else here as we need to try and close the HTML file too. -- Close HTML output file. --# accept Flow, 10, HTML_Listing_File, "Expected ineffective assignment to HTML_Listing_File"; SPARK_IO.Close (File => HTML_Listing_File, Status => HTML_Listing_File_Closed_OK); --# end accept; -- Check for errors. if HTML_Listing_File_Closed_OK /= SPARK_IO.Ok then ScreenEcho.Put_String ("An error occurred while closing the HTML listing file "); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => HTML_Listing_Filename); ScreenEcho.Put_Line (" after HTML generation."); end if; end if; end if; end if; end Gen_Listing_HTML; end SparkHTML; spark-2012.0.deb/examiner/dictionary-add_record_subcomponent.adb0000644000175000017500000001315411753202336023752 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Add_Record_Subcomponent (Prefix : in Symbol; The_Record_Component : in RawDict.Record_Component_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; The_Subcomponent : out RawDict.Subcomponent_Info_Ref) is Current_Subcomponent, Next_Subcomponent : RawDict.Subcomponent_Info_Ref; Valid_State : Boolean; begin case RawDict.GetSymbolDiscriminant (Prefix) is when Variable_Symbol => Current_Subcomponent := RawDict.Get_Variable_Subcomponents (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Prefix)); -- GAA External Valid_State := RawDict.Get_Variable_Marked_Valid (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Prefix)); -- GAA External when Subprogram_Parameter_Symbol => Current_Subcomponent := RawDict.Get_Subprogram_Parameter_Subcomponents (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Prefix)); -- GAA External Valid_State := True; when Subcomponent_Symbol => Current_Subcomponent := RawDict.Get_Subcomponent_Subcomponents (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Prefix)); -- GAA External Valid_State := RawDict.Get_Subcomponent_Marked_Valid (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Prefix)); -- GAA External when others => -- non-exec code Current_Subcomponent := RawDict.Null_Subcomponent_Info_Ref; Valid_State := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Record_Subcomponent"); end case; if Current_Subcomponent = RawDict.Null_Subcomponent_Info_Ref then -- no subcomponents at all RawDict.Create_Subcomponent (Object => Prefix, Record_Component => The_Record_Component, Marked_Valid => Valid_State, Comp_Unit => Comp_Unit, Loc => RawDict.Get_Symbol_Location (RawDict.Get_Record_Component_Symbol (The_Record_Component)), The_Subcomponent => The_Subcomponent); case RawDict.GetSymbolDiscriminant (Prefix) is when Variable_Symbol => RawDict.Set_Variable_Subcomponents (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Prefix), -- GAA External Subcomponents => The_Subcomponent); when Subprogram_Parameter_Symbol => RawDict.Set_Subprogram_Parameter_Subcomponents (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Prefix), -- GAA External Subcomponents => The_Subcomponent); when Subcomponent_Symbol => RawDict.Set_Subcomponent_Subcomponents (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Prefix), -- GAA External Sibling => The_Subcomponent); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Record_Subcomponent"); end case; else -- at least one subcomponent already present loop --# assert Current_Subcomponent /= RawDict.Null_Subcomponent_Info_Ref; if RawDict.Get_Subcomponent_Record_Component (The_Subcomponent => Current_Subcomponent) = The_Record_Component then -- the one we are seeking to add is already present The_Subcomponent := Current_Subcomponent; exit; end if; Next_Subcomponent := RawDict.Get_Next_Subcomponent (The_Subcomponent => Current_Subcomponent); if Next_Subcomponent = RawDict.Null_Subcomponent_Info_Ref then -- checked all subcomponents and didn't find the one we want so add it RawDict.Create_Subcomponent (Object => Prefix, Record_Component => The_Record_Component, Marked_Valid => Valid_State, Comp_Unit => Comp_Unit, Loc => RawDict.Get_Symbol_Location (RawDict.Get_Subcomponent_Symbol (Current_Subcomponent)), The_Subcomponent => The_Subcomponent); RawDict.Set_Next_Subcomponent (The_Subcomponent => Current_Subcomponent, Next => The_Subcomponent); exit; end if; Current_Subcomponent := Next_Subcomponent; end loop; end if; end Add_Record_Subcomponent; spark-2012.0.deb/examiner/Makefile0000644000175000017500000001236711753202337015723 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for the Examiner # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=spark # Location of root. ROOT:=.. # Location of SPARKLALR parser generator binary SPARKLALR:=${ROOT}/sparklalr/sparklalr # Do not distribute files that must be prepped. SPARK_PREPED:=examinerconstants.ads filesystem.adb # Do not distribute files that are generated by the parser. SPARK_PARSER:=sp_symbols.ads sp_parser_goto.adb sp_parser_goto.ads sp_relations.adb \ sp_relations.ads sp_parser_actions.adb sp_parser_actions.ads \ sp_productions.ads sp_expected_symbols.adb sp_expected_symbols.ads SPARK.PAR FILES_GRAMMAR:=SPARK.LLA # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # PLATFORM INDEPENDENT CONFIGURATION ################################################################################ # Files containing platform specific code that is handled by gnatprep PREP_TARGETS:=${SPARK_PREPED} \ indexmanager-cache.adb indexmanager-cache.shb \ sli-xref.adb sli-xref.shb # Debug info file produced by sparklalr (so they can be cleaned up) PARSER_DEBUG:=SPARK.DGN SPARK.DSC SPARK.EKO SPARK.SYM SPARK.ACT GCC_OPTS:=-fstack-check ################################################################################ # PLATFORM SPECIFIC CONFIGURATION ################################################################################ # Windows. ifeq (${TARGET},Windows) # The --stack option is Windows specific, and supports setting the # 'StackCommitSize' and 'StackReserveSize'. The interpretation of # these values is not treated consistently across different # Windows versions. Experience and testing on various Windows # versions has led to the current stack size being set at: # 0x10000000,0x100000 Do not change this value without considering # its full ramifications. LINK_ARGS:=-Xlinker --stack=0x10000000,0x100000 endif # Solaris. ifeq (${TARGET},SunOS) LINK_ARGS:= endif # Linux. ifeq (${TARGET},Linux) LINK_ARGS:= endif # Darwin (Mac OS X 10.5 or 10.6, 64-bit). ifeq (${TARGET},Darwin) LINK_ARGS:= endif ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: prep parser gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} examiner -o $@ -cargs ${GCC_OPTS} -bargs ${BIND_OPTS} -largs ${LINK_ARGS} self-analysis: prep parser -spark -report=mainunits95.rep -plain -dictionary_file=examiner @${OUTPUT_NAME}.smf # Building parser # =============== parser: ${SPARK_PARSER} ${SPARK_PARSER}: ${FILES_GRAMMAR} # -p = Operate as parser # -s = Perform self-pack # -m = Accept multi comp # -v = Verbose # -du = Activate debug feature: dump memory ${SPARKLALR} SPARK -p -s -m -v -du gnatchop -w SPARK.PAR # Platform specific prepping # ========================== prep: ${PREP_TARGETS} %.ads: %.aps gnatprep ${PREP_OPTS} -DTarget=${PREP_TARGET} -DAddress_Size=${ADDRESS_SIZE} $< $@ %.adb: %.apb gnatprep ${PREP_OPTS} -DTarget=${PREP_TARGET} -DAddress_Size=${ADDRESS_SIZE} $< $@ indexmanager-cache.adb: indexmanager-cache.SHADOW.adb gnatprep ${PREP_OPTS} -r -DSPARK=False $< $@ indexmanager-cache.shb: indexmanager-cache.SHADOW.adb gnatprep ${PREP_OPTS} -DSPARK=True $< $@ sli-xref.adb: sli-xref.SHADOW.adb gnatprep ${PREP_OPTS} -r -DSPARK=False $< $@ sli-xref.shb: sli-xref.SHADOW.adb gnatprep ${PREP_OPTS} -DSPARK=True $< $@ # Cleaning code base # ================== clean: residueclean standardclean reallyclean: clean targetclean parserclean prepclean vcclean residueclean: rm -f ${PARSER_DEBUG} parserclean: rm -f ${SPARK_PARSER} prepclean: rm -f ${PREP_TARGETS} ################################################################################ # END-OF-FILE spark-2012.0.deb/examiner/spark_xml.ads0000644000175000017500000003337411753202336016754 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit Ada.Characters.Latin_1, --# E_Strings, --# SPARK_IO; package SPARK_XML is Max_Tags : constant Integer := 100; Max_Attributes_Per_Tag : constant Integer := 7; Max_Attributes : constant Integer := Max_Tags * Max_Attributes_Per_Tag; subtype Tag_ID is Integer range 0 .. Max_Tags; subtype Attribute_ID is Integer range 0 .. Max_Attributes; Null_Tag : constant Tag_ID := Tag_ID'First; Null_Attribute : constant Attribute_ID := Attribute_ID'First; -- Simple attribute types. type Attribute_Type is (At_Null, At_String, At_Integer, At_Float); type Schema_Status is ( SS_OK, SS_Invalid_Attribute, SS_Invalid_Tag, SS_To_Many_Attributes, SS_Wrong_Content_Type, SS_Stack_Full, SS_Stack_Empty, SS_Tag_Incomplete, SS_Invalid_Depth, SS_No_Such_Tag, SS_Tag_Not_Found); type Up_Or_Down is (Up, Down); Max_Tag_Depth : constant Integer := 100; type Tag_Depth is range 0 .. Max_Tag_Depth; type Schema_Record is private; type Schema_State_Record is private; function X_Str (Str : in String) return E_Strings.T; function Filter_String (Str : in E_Strings.T) return E_Strings.T; --------------------- -- Schema creation -- --------------------- procedure Init_Schema (Schema : out Schema_Record); --# derives Schema from ; procedure Add_Tag (Schema : in out Schema_Record; Name : in E_Strings.T; ID : out Tag_ID); --# derives ID from Schema & --# Schema from *, --# Name; function Is_Null_Tag (TID : in Tag_ID) return Boolean; procedure Add_Attribute_To_Tag (Schema : in out Schema_Record; TID : in Tag_ID; Name : in E_Strings.T; Content_Type : in Attribute_Type; Required : in Boolean; ID : out Attribute_ID; Success : out Boolean); --# derives ID from Schema & --# Schema, --# Success from Content_Type, --# Name, --# Required, --# Schema, --# TID; procedure Add_Child_Tag (Schema : in out Schema_Record; TID : in Tag_ID; Child : in Tag_ID; Required : in Boolean; Success : out Boolean); --# derives Schema from *, --# Child, --# Required, --# TID & --# Success from Schema, --# TID; procedure Add_CDATA (Schema : in out Schema_Record; TID : in Tag_ID); --# derives Schema from *, --# TID; function CDATA (Schema : in Schema_Record; TID : in Tag_ID) return Boolean; ------------------ -- Tag Creation -- ------------------ procedure Init_Schema_State (Schema_State : out Schema_State_Record); --# derives Schema_State from ; -- Opening tags: -- 1) Initialise the opening tag -- 2) Add attributes to it -- 3) Call Output_Opening_Tag to return the string. procedure Init_Opening_Tag (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Name : in E_Strings.T; Status : out Schema_Status); --# derives Schema_State, --# Status from Name, --# Schema, --# Schema_State; procedure Init_Opening_Tag_By_ID (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; TID : in Tag_ID; Status : out Schema_Status); --# derives Schema_State, --# Status from Schema, --# Schema_State, --# TID; procedure Init_Opening_Tag_No_Check (Schema_State : in out Schema_State_Record; TID : in Tag_ID; Status : out Schema_Status); --# derives Schema_State, --# Status from Schema_State, --# TID; procedure Add_Attribute_Int (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Name : in E_Strings.T; Value : in Integer; Status : out Schema_Status); --# derives Schema_State from *, --# Name, --# Schema, --# Value & --# Status from Name, --# Schema, --# Schema_State; procedure Add_Attribute_Str (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Name : in E_Strings.T; Value : in E_Strings.T; Status : out Schema_Status); --# derives Schema_State from *, --# Name, --# Schema, --# Value & --# Status from Name, --# Schema, --# Schema_State; procedure Output_Opening_Tag (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; XML : out E_Strings.T; Depth : out Tag_Depth; Status : out Schema_Status); --# derives Depth, --# Schema_State, --# Status, --# XML from Schema, --# Schema_State; -- Closing tags procedure Close_Tag (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Depth : in Tag_Depth; XML : out E_Strings.T; Status : out Schema_Status); --# derives Schema_State, --# Status from Depth, --# Schema_State & --# XML from Depth, --# Schema, --# Schema_State; procedure Close_Tag_By_ID (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; TID : in Tag_ID; XML : out E_Strings.T; Status : out Schema_Status); --# derives Schema_State, --# Status from Schema_State, --# TID & --# XML from Schema, --# Schema_State, --# TID; procedure Close_Top_Tag_By_ID (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; TID : in Tag_ID; XML : out E_Strings.T; Status : out Schema_Status); --# derives Schema_State, --# Status from Schema_State, --# TID & --# XML from Schema, --# Schema_State, --# TID; procedure Close_Tag_By_Name (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Name : in E_Strings.T; XML : out E_Strings.T; Status : out Schema_Status); --# derives Schema_State, --# Status, --# XML from Name, --# Schema, --# Schema_State; ----------- -- Debug -- ----------- function Is_Error (Error : in Schema_Status) return Boolean; procedure Print_Schema_Error (Error : in Schema_Status); --# derives null from Error; procedure Print_Working_State (Schema : in Schema_Record; Schema_State : in Schema_State_Record); --# derives null from Schema, --# Schema_State; private ----------------------- -- Schema Structures -- ----------------------- subtype Tag_Attribute_Array_Index is Integer range 0 .. Max_Attributes_Per_Tag; Max_Child_Tags : constant Integer := 10; subtype Child_Tag_Array_Index is Integer range 0 .. Max_Child_Tags; type Child_Tag is record Child : Tag_ID; Required : Boolean; end record; type Tag_Attribute_Array is array (Tag_Attribute_Array_Index) of Attribute_ID; type Child_Tag_Array is array (Child_Tag_Array_Index) of Child_Tag; type Tag is record Name : E_Strings.T; Tag_Attributes : Tag_Attribute_Array; Last_Tag_Attribute : Tag_Attribute_Array_Index; Child_Tags : Child_Tag_Array; Last_Child : Child_Tag_Array_Index; Allow_CDATA : Boolean; end record; Empty_Tag : constant Tag := Tag' (Name => E_Strings.Empty_String, Tag_Attributes => Tag_Attribute_Array'(others => Null_Attribute), Last_Tag_Attribute => Tag_Attribute_Array_Index'First, Child_Tags => Child_Tag_Array'(others => Child_Tag'(Child => Null_Tag, Required => False)), Last_Child => Child_Tag_Array_Index'First, Allow_CDATA => False); type Tag_Array_Type is array (Tag_ID) of Tag; type Tag_List is record Tag_Array : Tag_Array_Type; Last_Tag : Tag_ID; end record; Empty_Tag_List : constant Tag_List := Tag_List'(Tag_Array => Tag_Array_Type'(others => Empty_Tag), Last_Tag => 0); type Attribute is record Name : E_Strings.T; Content_Type : Attribute_Type; Required : Boolean; end record; type Attribute_Array_Type is array (Attribute_ID) of Attribute; type Attribute_List is record Attribute_Array : Attribute_Array_Type; Last_Attribute : Attribute_ID; end record; Empty_Attribute_List : constant Attribute_List := Attribute_List' (Attribute_Array => Attribute_Array_Type'(others => Attribute'(Name => E_Strings.Empty_String, Content_Type => At_Null, Required => False)), Last_Attribute => 0); type Schema_Record is record Attributes : Attribute_List; Tags : Tag_List; end record; Empty_Schema_Record : constant Schema_Record := Schema_Record'(Attributes => Empty_Attribute_List, Tags => Empty_Tag_List); ---------------------------- -- Schema_State Structures -- ---------------------------- -- Tag_Stack records the hierarcy from the present tag to the root. -- This allows us to enforce child tag relations. -- If a tag is closed that is not the emmediate parent, we can itterate through the stack -- until we find which one it was and close all the intermediate tags. -- This isn't perfect, it will have problems with cycles, but will be fine with simple -- recursion with a single tag that can be the child of itself (A -> B -> B -> B) but not -- (A -> B -> A -> B ->). type Tag_Stack_Array is array (Tag_Depth) of Tag_ID; type Tag_Stack_Type is record Stack : Tag_Stack_Array; Current : Tag_Depth; end record; Empty_Tag_Stack : constant Tag_Stack_Type := Tag_Stack_Type'(Stack => Tag_Stack_Array'(others => 0), Current => 0); subtype Tag_Count is Integer range 0 .. 100; type Tag_Count_Array is array (Tag_ID) of Tag_Count; type Working_Attribute is record AID : Attribute_ID; Val : E_Strings.T; end record; type Working_Attribute_Array is array (Tag_Attribute_Array_Index) of Working_Attribute; type Working_Tag_Type is record TID : Tag_ID; Attribs : Working_Attribute_Array; end record; Empty_Working_Tag : constant Working_Tag_Type := Working_Tag_Type' (TID => Null_Tag, Attribs => Working_Attribute_Array'(others => Working_Attribute'(AID => Null_Attribute, Val => E_Strings.Empty_String))); type Schema_State_Record is record Tag_Stack : Tag_Stack_Type; Working_Tag : Working_Tag_Type; end record; Empty_Schema_State_Record : constant Schema_State_Record := Schema_State_Record'(Tag_Stack => Empty_Tag_Stack, Working_Tag => Empty_Working_Tag); end SPARK_XML; spark-2012.0.deb/examiner/sem-walk_expression_p-ops_are_same_and_commutative.adb0000644000175000017500000000230411753202336027130 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Ops_Are_Same_And_Commutative (Op1, Op2 : SP_Symbols.SP_Symbol) return Boolean is begin return Op1 = Op2 and then (Op1 = SP_Symbols.plus or else Op1 = SP_Symbols.multiply); end Ops_Are_Same_And_Commutative; spark-2012.0.deb/examiner/seqalgebra.ads0000644000175000017500000004065611753202336017063 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- SeqAlgebra -- -- Purpose: -- SeqAlgebra is a utility package which defines a data type which provides -- simple sequence and set operations (methods) for use by the FlowAnalyser. -- It is a higher-level wrapper around the Heap package. -- -- Both sequences and sets are represented by the Seq abstract data type and -- members of a set or sequence are represented by the MemberOfSeq data type. -- -- Sequences maintain the order of insertion of members and allow duplicate -- values (they differ by their position in the sequence). Sets do not -- maintain the order of insertion and do not retain duplicate values. -- -- A member has a single value of type Natural which can be obtained -- via a getter function, ValueOfMember. -- -- Members are created by using methods to either add a new value to a set -- or append a new value to a sequence. There are also methods to -- remove members. To locate a particular member methods exist to locate -- the first and next member that allow the iteration through all members. -- -- There is an important distinction between sets and sequences and some -- methods are only applicable to one or the other. See the Use section -- for a list of methods and whether they apply exclusively to a set or a -- sequence. -- -- For Seq objects that represent sets there are some high-level set -- operations such as Union and Intersection. -- -- Clients: -- SeqAlgebra is used extensively by the Examiner modules Sem, FlowAnalyser -- and DAG as well as some uses in other modules such as ComponentManager. -- -- Use: -- There are two groups of methods, one associated with objects of type Seq -- representing a set and another associated with objects representing a -- sequence. There are a number of common methods which are used for both -- sorts of object. Use of the two groups of methods should not be mixed, -- that is, an object of type Seq should not be used to represent both a -- sequence and a set. -- -- Common methods: -- CreateSeq -- DisposeOfSeq -- IsEmptySeq -- AreEqual -- Length -- IsNullMember -- FistMember -- NextMember -- ValueOfMember -- MemberIndex -- SeqToNatural -- NaturalToSeq -- Methods for sequences: -- BeforeFirstMember -- AppendAfter -- EliminateAfter -- Methods for sets: -- AddMember -- RemoveMember -- IsMember -- Union -- AugmentSequence -- Intersection -- Complement -- Reduction -- -- For an example of use as a sequence see -- Sem-CompUnit-WalkStatements-CheckForMutuallyExclusiveBranches.adb -- -- For an example of use as a set see -- FlowAnalyser-FlowAnalyse-AnalyseRelations-AnalyseRelations.adb -- -- Important principles are: -- -- 1. a Seq object must be Created before it is used; -- -- 2. a Seq should be Disposed when its use is complete. A Seq that -- has been disposed is recycled and returned to the Heap; -- -- 3. a MemberOfSeq object cannot be used until it has been defined by -- a call to an appropriate method of SeqAlgebra; -- -- 4. the same object should not be used to represent both a sequence and -- a set; -- -- 5. SeqAlgebra is a wrapper for Heap and if the Heap becomes exhausted -- of free storage elements an attempt to Create a Seq or to add a -- new member will cause termination with a fatal error. -- -- Extension: -- It is not expected that any extension will be made to this package. -- --------------------------------------------------------------------------------` with Heap; --# inherit Heap, --# Statistics, --# SystemErrors; package SeqAlgebra is -- A Seq object may be used to represent a sequence or a notionally unordered -- set as described in the above description. In general, when a Seq object -- is used as a set then only the common and set operations should be -- applied to the object. When it is used as a sequence then only the -- common and sequence operations should be applied. type Seq is private; -- NullSeq represents an uninitialised Seq and may be used in an -- aggregate which initializes a composite object containing a Seq. Null_Seq : constant Seq; -- A MemberOfSeq object represents an element of a sequence or set. -- Each element contains a value of type Natural. type MemberOfSeq is private; -------- Functions and operations common to both sequences and sets -------- -- Returns true only if S is a null seq function Is_Null_Seq (S : Seq) return Boolean; -- Returns true only if M is a null member (not in any sequence or set) function IsNullMember (M : MemberOfSeq) return Boolean; -- Returns the first member of the given Seq S. The returned member -- can be used as a starting point to iterate through all members of the -- sequence or set. function FirstMember (TheHeap : Heap.HeapRecord; S : Seq) return MemberOfSeq; -- Next member of the sequence or set. A null member will be returned if the -- sequence or set has no more members. -- Successively calling Next_Member with the previously returned Member -- (starting with the member returned from First_Member) will iterate over -- all members of the sequence or set provided no elements are added or -- removed from the sequence or set during the iteration over its elements. function NextMember (TheHeap : Heap.HeapRecord; M : MemberOfSeq) return MemberOfSeq; -- Gets the (Natural) value of the member M. -- The Member must be non null. -- The value returned is undefined if M is the pseudo member obtained -- from a call to Before_First_Member. function Value_Of_Member (The_Heap : Heap.HeapRecord; M : MemberOfSeq) return Natural; -- Initializes Seq S ready for use. It must be called prior to any -- other Sequence or Set operation. procedure CreateSeq (TheHeap : in out Heap.HeapRecord; S : out Seq); --# global in out Statistics.TableUsage; --# derives S, --# TheHeap from TheHeap & --# Statistics.TableUsage from *, --# TheHeap; -- Return true only if both S1 and S2 are same length and have same members. function AreEqual (TheHeap : Heap.HeapRecord; S1, S2 : Seq) return Boolean; -- Return the length of a Seq object. function Length (TheHeap : Heap.HeapRecord; S : Seq) return Natural; -- Returns true only if the Sequence or Set S is empty. function IsEmptySeq (TheHeap : Heap.HeapRecord; S : Seq) return Boolean; -- SeqAlgebra uses the Heap package for storage. The storage must be -- released by calling Dispose before all references (there may be aliases) -- to the sequence or set within The_Heap are out of scope. -- WARNING: Disposing of a Seq S may leave Members of S with invalid -- references to non-existent elements. Do not use Members from a disposed -- Seq. -- As a rule Member objects of a set S should not have a larger scope than S. procedure DisposeOfSeq (TheHeap : in out Heap.HeapRecord; S : in Seq); --# derives TheHeap from *, --# S; -------- type conversion functions ------------ -- The following low-level type conversions should be used with extreme -- caution. -- Their purpose is to allow the building of sets of sets. function SeqToNatural (S : Seq) return Natural; function NaturalToSeq (N : Natural) return Seq; -------- Functions and operations intended for sequences -------- -- Returns a pseudo-member which can be used to prefix new elements to a -- sequence. Appending to the pseudo-member will place the appended member -- at the head of the sequence. -- The caller must ensure that the sequence S is properly Created. -- The returned member is guaranteed to be non null and deemed to refer -- to the pseudo element of S. function BeforeFirstMember (S : Seq) return MemberOfSeq; -- Inserts the GivenValue in a sequence referenced by M after member M. -- M must not be null. -- If the call is successful the new value of M refers to the appended -- element. procedure AppendAfter (TheHeap : in out Heap.HeapRecord; M : in out MemberOfSeq; GivenValue : in Natural); --# global in out Statistics.TableUsage; --# derives M, --# Statistics.TableUsage from *, --# TheHeap & --# TheHeap from *, --# GivenValue, --# M; -- Removes the element from a sequence following that referenced by Member M. -- M and the element to which M refers are not changed by the operation. -- M must not be null. -- WARNING: Eliminating an element of a sequence may leave other Members -- with invalid references to non-existent elements. Do not use Members which -- refer to an eliminated element. procedure EliminateAfter (TheHeap : in out Heap.HeapRecord; M : in MemberOfSeq); --# derives TheHeap from *, --# M; -------- Functions and operations intended for sets ---------- -- Return true only if there is an element in S with the value Given_Value. -- WARNING: IsMember is only defined for a Seq object representing a set. function IsMember (TheHeap : Heap.HeapRecord; S : Seq; GivenValue : Natural) return Boolean; -- If the Given_Value is not already an element of the set S add it to S. -- A Create operation must have been applied to S. -- WARNING: AddMember is only defined for a Seq object representing a set. -- If an element is added to a set it is in general indecidable as -- to whether it will be included in any current iteration over the elements -- of the set using Next_Member. procedure AddMember (TheHeap : in out Heap.HeapRecord; S : in Seq; GivenValue : in Natural); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# GivenValue, --# S, --# TheHeap; -- If an element with Given_Value exists in S, remove it from S. -- A Create operation must have been applied to S. -- WARNING: RemoveMember is only defined for a Seq object representing a set. -- Removing an element from a Seq will render any members -- referencing the element invalid and they should not be used. -- There are no checks against this erroneous use. procedure RemoveMember (TheHeap : in out Heap.HeapRecord; S : in Seq; GivenValue : in Natural); --# derives TheHeap from *, --# GivenValue, --# S; -- This is a low-level function not intended for general use. It is a -- mapping between set elements and the Natural numbers. -- Each element of a non empty set S is mapped to a unique element number in -- the range 1 .. Cardinality of S. -- If the GivenValue is an element of S, Index is the unique element -- number of the GivenValue. -- If the element is not found, the index is the index of the last element -- in the sequence. Otherwise if the Seq is empty the Index is 0. function MemberIndex (TheHeap : Heap.HeapRecord; S : Seq; GivenValue : Natural) return Natural; ----------- Set Operations on Seq representing Sets ----------- -- Creates a new set C with all the values from set A and set B. -- The caller should not apply CreateSeq to C prior to invoking Union. -- Sets A and B are unchanged. -- Note if value Z is in A and B, it will only appear once in C. -- This is a set operation do not use with a Seq representing a sequence. procedure Union (TheHeap : in out Heap.HeapRecord; A, B : in Seq; C : out Seq); --# global in out Statistics.TableUsage; --# derives C from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# A, --# B, --# TheHeap; -- An in place Union, all elements in set B are added to set -- A with no duplicates. B is unchanged. -- This is a set operation do not use with a Seq representing a sequence. procedure AugmentSeq (TheHeap : in out Heap.HeapRecord; A, B : in Seq); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# A, --# B, --# TheHeap; -- Creates a new set C with the elements common to set A and set B. -- The caller should not apply CreateSeq to C prior to invoking Intersection. -- Sets A and B are unchanged. -- This is a set operation do not use with a Seq representing a sequence. procedure Intersection (TheHeap : in out Heap.HeapRecord; A, B : in Seq; C : out Seq); --# global in out Statistics.TableUsage; --# derives C from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# A, --# B, --# TheHeap; -- Creates a new Seq set C, A and B which contains all the -- elements which are in set A but not in set B. -- The caller should not apply CreateSeq to C prior to invoking Complement. -- Sets A and B are unchanged. -- This is a set operation do not use with a Seq representing a sequence. procedure Complement (TheHeap : in out Heap.HeapRecord; A, B : in Seq; C : out Seq); --# global in out Statistics.TableUsage; --# derives C from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# A, --# B, --# TheHeap; -- This is an "in-place" Complement: all elements -- are removed from A if they are also in B. B is unchanged. -- This is a set operation do not use with a Seq representing a sequence. procedure Reduction (TheHeap : in out Heap.HeapRecord; A, B : in Seq); --# derives TheHeap from *, --# A, --# B; function To_Atom (M : MemberOfSeq) return Heap.Atom; private type Seq is range 1 .. Heap.Atom'Last; --# assert Seq'Base is Integer; type Member_Range is range 0 .. Heap.Atom'Last; type MemberOfSeq is record Member : Member_Range; The_Seq : Seq; end record; Null_Seq : constant Seq := Seq'Last; end SeqAlgebra; spark-2012.0.deb/examiner/dictionary-add_declaration.adb0000644000175000017500000004303611753202336022167 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Add_Declaration (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in Location; Scope : in Scopes; Context : in Contexts; The_Declaration : out RawDict.Declaration_Info_Ref) is procedure Add_Visible_Declaration (The_Declaration : in RawDict.Declaration_Info_Ref; Region : in Symbol) --# global in out Dict; --# derives Dict from *, --# Region, --# The_Declaration; is procedure Add_Visible_Declaration_To_Package (The_Declaration : in RawDict.Declaration_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Package; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Package_Last_Visible_Declaration (The_Package => The_Package); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Package_First_Visible_Declaration (The_Package => The_Package, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Package_Last_Visible_Declaration (The_Package => The_Package, The_Declaration => The_Declaration); end Add_Visible_Declaration_To_Package; -------------------------------------------------------------------------------- procedure Add_Visible_Declaration_To_Protected_Type (The_Declaration : in RawDict.Declaration_Info_Ref; The_Protected_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Protected_Type; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Protected_Type_Last_Visible_Declaration (The_Protected_Type => The_Protected_Type); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Protected_Type_First_Visible_Declaration (The_Protected_Type => The_Protected_Type, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Protected_Type_Last_Visible_Declaration (The_Protected_Type => The_Protected_Type, The_Declaration => The_Declaration); end Add_Visible_Declaration_To_Protected_Type; -------------------------------------------------------------------------------- procedure Add_Visible_Declaration_To_Generic_Unit (The_Declaration : in RawDict.Declaration_Info_Ref; The_Generic_Unit : in RawDict.Generic_Unit_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Generic_Unit; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Generic_Unit_Last_Declaration (The_Generic_Unit => The_Generic_Unit); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Generic_Unit_First_Declaration (The_Generic_Unit => The_Generic_Unit, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Generic_Unit_Last_Declaration (The_Generic_Unit => The_Generic_Unit, The_Declaration => The_Declaration); end Add_Visible_Declaration_To_Generic_Unit; begin -- Add_Visible_Declaration case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => Add_Visible_Declaration_To_Package (The_Declaration => The_Declaration, The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Type_Symbol => -- must be a protected type SystemErrors.RT_Assert (C => Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Add_Visible_Declaration"); Add_Visible_Declaration_To_Protected_Type (The_Declaration => The_Declaration, The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => Region)); when Generic_Unit_Symbol => Add_Visible_Declaration_To_Generic_Unit (The_Declaration => The_Declaration, The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Item => Region)); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Add_Visible_Declaration"); end case; end Add_Visible_Declaration; -------------------------------------------------------------------------------- procedure Add_Local_Declaration (The_Declaration : in RawDict.Declaration_Info_Ref; Region : in Symbol) --# global in out Dict; --# derives Dict from *, --# Region, --# The_Declaration; is procedure Add_Local_Declaration_To_Package (The_Declaration : in RawDict.Declaration_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Package; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Package_Last_Local_Declaration (The_Package => The_Package); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Package_First_Local_Declaration (The_Package => The_Package, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Package_Last_Local_Declaration (The_Package => The_Package, The_Declaration => The_Declaration); end Add_Local_Declaration_To_Package; -------------------------------------------------------------------------------- procedure Add_Local_Declaration_To_Subprogram (The_Declaration : in RawDict.Declaration_Info_Ref; The_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Subprogram; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Subprogram_Last_Declaration (The_Subprogram => The_Subprogram); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Subprogram_First_Declaration (The_Subprogram => The_Subprogram, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Subprogram_Last_Declaration (The_Subprogram => The_Subprogram, The_Declaration => The_Declaration); end Add_Local_Declaration_To_Subprogram; -------------------------------------------------------------------------------- procedure Add_Local_Declaration_To_Protected_Type (The_Declaration : in RawDict.Declaration_Info_Ref; The_Protected_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Protected_Type; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Protected_Type_Last_Local_Declaration (The_Protected_Type => The_Protected_Type); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Protected_Type_First_Local_Declaration (The_Protected_Type => The_Protected_Type, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Protected_Type_Last_Local_Declaration (The_Protected_Type => The_Protected_Type, The_Declaration => The_Declaration); end Add_Local_Declaration_To_Protected_Type; -------------------------------------------------------------------------------- procedure Add_Local_Declaration_To_Task_Type (The_Declaration : in RawDict.Declaration_Info_Ref; The_Task_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Task_Type; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Task_Type_Last_Local_Declaration (The_Task_Type => The_Task_Type); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Task_Type_First_Local_Declaration (The_Task_Type => The_Task_Type, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Task_Type_Last_Local_Declaration (The_Task_Type => The_Task_Type, The_Declaration => The_Declaration); end Add_Local_Declaration_To_Task_Type; begin -- Add_Local_Declaration case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => Add_Local_Declaration_To_Package (The_Declaration => The_Declaration, The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Type_Symbol => case RawDict.Get_Type_Discriminant (RawDict.Get_Type_Info_Ref (Item => Region)) is when Protected_Type_Item => Add_Local_Declaration_To_Protected_Type (The_Declaration => The_Declaration, The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => Region)); when Task_Type_Item => Add_Local_Declaration_To_Task_Type (The_Declaration => The_Declaration, The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Region)); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Add_Local_Declaration"); end case; when Subprogram_Symbol => Add_Local_Declaration_To_Subprogram (The_Declaration => The_Declaration, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Add_Local_Declaration"); end case; end Add_Local_Declaration; -------------------------------------------------------------------------------- procedure Add_Private_Declaration (The_Declaration : in RawDict.Declaration_Info_Ref; Region : in Symbol) --# global in out Dict; --# derives Dict from *, --# Region, --# The_Declaration; is procedure Add_Private_Declaration_To_Package (The_Declaration : in RawDict.Declaration_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Package; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Package_Last_Private_Declaration (The_Package => The_Package); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Package_First_Private_Declaration (The_Package => The_Package, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Package_Last_Private_Declaration (The_Package => The_Package, The_Declaration => The_Declaration); end Add_Private_Declaration_To_Package; -------------------------------------------------------------------------------- procedure Add_Private_Declaration_To_Protected_Type (The_Declaration : in RawDict.Declaration_Info_Ref; The_Protected_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Protected_Type; is Previous : RawDict.Declaration_Info_Ref; begin Previous := RawDict.Get_Protected_Type_Last_Private_Declaration (The_Protected_Type => The_Protected_Type); if Previous = RawDict.Null_Declaration_Info_Ref then RawDict.Set_Protected_Type_First_Private_Declaration (The_Protected_Type => The_Protected_Type, The_Declaration => The_Declaration); else RawDict.Set_Next_Declaration (The_Declaration => Previous, Next => The_Declaration); end if; RawDict.Set_Protected_Type_Last_Private_Declaration (The_Protected_Type => The_Protected_Type, The_Declaration => The_Declaration); end Add_Private_Declaration_To_Protected_Type; begin -- Add_Private_Declaration case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => Add_Private_Declaration_To_Package (The_Declaration => The_Declaration, The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Type_Symbol => -- must be a protected type SystemErrors.RT_Assert (C => Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Add_Private_Declaration"); Add_Private_Declaration_To_Protected_Type (The_Declaration => The_Declaration, The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => Region)); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.Add_Private_Declaration"); end case; end Add_Private_Declaration; begin -- Add_Declaration RawDict.Create_Declaration (Context => Context, Scope => Scope, Comp_Unit => Comp_Unit, Loc => Loc.Start_Position, The_Declaration => The_Declaration); case Get_Visibility (Scope => Scope) is when Visible => Add_Visible_Declaration (The_Declaration => The_Declaration, Region => GetRegion (Scope)); when Local => Add_Local_Declaration (The_Declaration => The_Declaration, Region => GetRegion (Scope)); when Privat => Add_Private_Declaration (The_Declaration => The_Declaration, Region => GetRegion (Scope)); end case; end Add_Declaration; spark-2012.0.deb/examiner/commandlinedata.ads0000644000175000017500000004034111753202335020063 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with ExaminerConstants; --# inherit ExaminerConstants, --# E_Strings, --# FileSystem, --# SPARK_IO, --# Version, --# XMLReport; package CommandLineData --# own Content : Command_Line_Contents; is Meta_File_Extension : constant String := "smf"; Default_Index_Extension : constant String := "idx"; Default_Warning_Extension : constant String := "wrn"; Default_Data_Extension : constant String := "dat"; Default_Config_Extension : constant String := "cfg"; type Source_File_Entry is record Source_File_Name : E_Strings.T; Listing : Boolean; Listing_File_Name : E_Strings.T; end record; subtype Source_File_Counts is Integer range 0 .. ExaminerConstants.MaxFilesOnCommandLine; subtype Source_File_Positions is Integer range 0 .. ExaminerConstants.MaxFilesOnCommandLine; type Source_File_Lists is array (Source_File_Positions) of Source_File_Entry; --------------------------------------------------------------------- -- To Add a new debug flag: -- -- 1. Add a Boolean field to this record, and supply an initial value -- for it in the body of CommandLineData.Initialize, and add a -- constant Option_Debug_XXX below for the new option. -- -- 2. Update CommandLineHandler.Parse_Command_Options.Process_D to -- recognize and set the new flag. -- -- 3. Document the new flag in the Examiner's help output in -- examiner.adb -- -- 4. Document the new flag in the Examiner User Manual -- -- 5. Test and use the flag to produce the output required --------------------------------------------------------------------- type Debug_State is record Enabled : Boolean; -- any -debug flag is enabled. Expressions : Boolean; -- -debug=e - expression walk debugging HTML : Boolean; -- -debug=h - HTML generation debugging Lookup_Trace : Boolean; -- -debug=l - Dictionary look up call tracing File_Names : Boolean; -- -debug=f - Trace filename storage/open/create Units : Boolean; -- -debug=u - Trace required units and index lookup Invariants : Boolean; -- -debug=i - Print default loop invariants Components : Boolean; -- -debug=c - print state of component manager when adding subcomponents Rho : Boolean; -- -debug=r - Print computed Rho relation for subprograms Parser : Boolean; -- -debug=p - Print parser state on detection of syntax error FDL_Ranking : Boolean; -- -debug=k - trace ranking and printing of FDL declarations VCG : Boolean; -- -debug=v - print VCG state after DAG.BuildGraph VCG_All : Boolean; -- -debug=V - as VCG, but also print VCG state during each iteration of Graph.GenVCs DAG : Boolean; -- -debug=d - print FDL DAG following BuildExpnDAG SLI : Boolean; -- -debug=x - Print cross-reference debug Extra_Stats : Boolean; -- -debug=t - Print extra detail when -statistics is on end record; type Concurrency_Profiles is (Sequential, Ravenscar); type Language_Profiles is (SPARK83, SPARK95, SPARK2005, KCG); -- Useful subtypes of language profiles subtype SPARK95_Onwards is Language_Profiles range SPARK95 .. Language_Profiles'Last; -- KCG profile is identical to SPARK2005 profile except for accept statements. subtype SPARK2005_Profiles is Language_Profiles range SPARK2005 .. KCG; subtype Auto_Code_Generators is Language_Profiles range KCG .. KCG; type Info_Flow_Policies is (None, Safety, Security); -- A subtype of the above excluding "None" subtype Defined_Info_Flow_Policies is Info_Flow_Policies range Safety .. Info_Flow_Policies'Last; type Justification_Options is (Ignore, Full, Brief); type Brief_Options is (No_Path, Full_Path); type Flow_Analysis_Options is (Data_Flow, Info_Flow, Auto_Flow); ------------------------------------------------------------------------------------- -- Proof rule generation policies for composite constants. If this type -- changes, then the error message in CommandLineHandler.OutputError must be updated. -- -- Whether a proof rule gets generated or not for a particular constant depends on -- the seeting of this switch, and the presence (or absence) of an object_assertion -- annotation for that constant. -- -- The policies are as follows: -- No_Rules - NEVER generate composite proof rules, regardless of annotations -- All_Rules - ALWAYS generate composite proof rules, regardless of annotations -- Lazy - Generate rules for constants where an annotation requests it. -- If no annotation is given then no rule is generated. -- Keen - Generate rules for constants where an annotation requests it. -- If no annotation is given then a rule IS generated. ------------------------------------------------------------------------------------- type Rule_Generation_Policies is (No_Rules, Lazy, Keen, All_Rules); type Error_Explanations is (Off, First_Occurrence, Every_Occurrence); type Command_Line_Contents is record Valid : Boolean; Index : Boolean; Index_File_Name : E_Strings.T; Warning : Boolean; Warning_File_Name : E_Strings.T; Target_Data : Boolean; Target_Data_File : E_Strings.T; Target_Config : Boolean; Target_Config_File : E_Strings.T; Source_Extension : E_Strings.T; Number_Source : Source_File_Counts; Source_File_List : Source_File_Lists; Listing_Extension : E_Strings.T; No_Listings : Boolean; Report : Boolean; Report_File_Name : E_Strings.T; Write_Dict : Boolean; Dict_File_Name : E_Strings.T; FDL_Reserved : Boolean; FDL_Mangle : E_Strings.T; HTML : Boolean; HTML_Directory : E_Strings.T; Output_Directory : Boolean; Output_Directory_Name : E_Strings.T; VCG : Boolean; -- Generate Verification Conditions DPC : Boolean; -- Generate Dead-Path Conjectures Anno_Char : Character; Concurrency_Profile : Concurrency_Profiles; Language_Profile : Language_Profiles; Info_Flow_Policy : Info_Flow_Policies; Constant_Rules : Rule_Generation_Policies; Error_Explanation : Error_Explanations; Justification_Option : Justification_Options; Debug : Debug_State; Echo : Boolean; Makefile_Mode : Boolean; Syntax_Only : Boolean; Write_Statistics : Boolean; Flow_Option : Flow_Analysis_Options; Default_Switch_File : Boolean; Plain_Output : Boolean; Version_Requested : Boolean; Help_Requested : Boolean; VC_Finger_Prints : Boolean; No_Duration : Boolean; Brief : Boolean; Brief_Option : Brief_Options; XML : Boolean; Legacy_Errors : Boolean; Generate_SLI : Boolean; Casing_Standard : Boolean; Casing_Identifier : Boolean; SPARK_Lib : Boolean; Distribution_Is_Pro : Boolean; GPL_Switch : Boolean; end record; -- Keep this sorted. In emacs M-x sort-lines may be useful. Option_Annotation_Character : constant String := "annotation_character"; Option_Brief : constant String := "brief"; Option_Brief_Full_Path : constant String := "fullpath"; Option_Brief_No_Path : constant String := "nopath"; Option_Casing : constant String := "casing"; Option_Casing_Identifier : constant Character := 'i'; Option_Casing_Standard : constant Character := 's'; Option_Config_File : constant String := "config_file"; Option_Debug : constant String := "debug"; Option_Debug_C : constant Character := 'c'; Option_Debug_D : constant Character := 'd'; Option_Debug_E : constant Character := 'e'; Option_Debug_F : constant Character := 'f'; Option_Debug_H : constant Character := 'h'; Option_Debug_I : constant Character := 'i'; Option_Debug_K : constant Character := 'k'; Option_Debug_L : constant Character := 'l'; Option_Debug_P : constant Character := 'p'; Option_Debug_R : constant Character := 'r'; Option_Debug_T : constant Character := 't'; Option_Debug_U : constant Character := 'u'; Option_Debug_V : constant Character := 'v'; Option_Debug_V_Upper : constant Character := 'V'; Option_Debug_X : constant Character := 'x'; Option_Dictionary_File : constant String := "dictionary_file"; Option_Dpc : constant String := "dpc"; Option_Error_Explanations : constant String := "error_explanations"; Option_Error_Explanations_Every_Occurrence : constant String := "every_occurrence"; Option_Error_Explanations_First_Occurrence : constant String := "first_occurrence"; Option_Error_Explanations_Off : constant String := "off"; Option_Fdl_Identifiers : constant String := "fdl_identifiers"; Option_Fdl_Identifiers_Accept : constant String := "accept"; Option_Fdl_Identifiers_Reject : constant String := "reject"; Option_Flow_Analysis : constant String := "flow_analysis"; Option_Flow_Analysis_Auto : constant String := "auto"; Option_Flow_Analysis_Data : constant String := "data"; Option_Flow_Analysis_Information : constant String := "information"; Option_GPL : constant String := "gpl"; Option_Help : constant String := "help"; Option_Html : constant String := "html"; Option_Index_File : constant String := "index_file"; Option_Justification_Option : constant String := "justification_option"; Option_Justification_Option_Brief : constant String := "brief"; Option_Justification_Option_Full : constant String := "full"; Option_Justification_Option_Ignore : constant String := "ignore"; Option_Language : constant String := "language"; Option_Language_2005 : constant String := "2005"; Option_Language_83 : constant String := "83"; Option_Language_95 : constant String := "95"; Option_Language_KCG : constant String := "KCG"; Option_Listing_Extension : constant String := "listing_extension"; Option_Listing_File : constant String := "listing_file"; Option_Makefile_Mode : constant String := "makefile"; Option_No_Dictionary : constant String := "nodictionary"; Option_No_Duration : constant String := "noduration"; Option_No_Echo : constant String := "noecho"; Option_No_Listing_File : constant String := "nolisting_file"; Option_No_Listings : constant String := "nolistings"; Option_No_Sli : constant String := "nosli"; Option_No_Switch : constant String := "noswitch"; Option_Original_Flow_Errors : constant String := "original_flow_errors"; Option_Output_Directory : constant String := "output_directory"; Option_Plain_Output : constant String := "plain_output"; Option_Policy : constant String := "policy"; Option_Policy_Safety : constant String := "safety"; Option_Policy_Security : constant String := "security"; Option_Profile : constant String := "profile"; Option_Profile_Ravenscar : constant String := "ravenscar"; Option_Profile_Sequential : constant String := "sequential"; Option_Report_File : constant String := "report_file"; Option_Rules : constant String := "rules"; Option_Rules_All : constant String := "all"; Option_Rules_Keen : constant String := "keen"; Option_Rules_Lazy : constant String := "lazy"; Option_Rules_None : constant String := "none"; Option_SPARK_Lib : constant String := "sparklib"; Option_Source_Extension : constant String := "source_extension"; Option_Statistics : constant String := "statistics"; Option_Syntax_Check : constant String := "syntax_check"; Option_Target_Compiler_Data : constant String := "target_compiler_data"; Option_Vcg : constant String := "vcg"; Option_Version : constant String := "version"; Option_Warning_File : constant String := "warning_file"; Option_Xml : constant String := "xml"; Content : Command_Line_Contents; procedure Initialize; --# global out Content; --# derives Content from ; -- If Content.Output_Directory is set, then interpret and return F relative -- to the specified output directory procedure Normalize_File_Name_To_Output_Directory (F : in out E_Strings.T); --# global in Content; --# derives F from *, --# Content; -- Echoes the various filenames in Content to the terminal, -- iff Content.Debug.File_Names has been selected. procedure Dump_File_Names; --# global in Content; --# derives null from Content; procedure Output_Command_Line (Prefix : in String; XML : in Boolean; Option_Str : out E_Strings.T); --# global in Content; --# in SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives Option_Str from Content, --# Prefix, --# SPARK_IO.File_Sys, --# XML, --# XMLReport.State & --# XMLReport.State from *, --# Content, --# XML; function Ravenscar_Selected return Boolean; --# global in Content; end CommandLineData; spark-2012.0.deb/examiner/dag_io.adb0000644000175000017500000022354111753202336016152 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with AdjustFDL_RWs; with Cell_Storage; with Clists; with CommandLineData; with CStacks; with E_Strings; with LexTokenManager; with Maths; with SP_Symbols; with SystemErrors; use type SP_Symbols.SP_Symbol; use type LexTokenManager.Str_Comp_Result; use type Maths.ErrorCode; package body DAG_IO is type Expn_Type is (Conclusion, Condition, Hypothesis); ---------------------------------------------------------------------------- procedure Put_Examiner_String (File : in SPARK_IO.File_Type; Item : in E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Item; is begin E_Strings.Put_String (File => File, E_Str => E_Strings.Lower_Case (E_Str => Item)); end Put_Examiner_String; ---------------------------------------------------------------------------- procedure Print_Cell_Contents (Heap : in Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Cell_Name : in Cells.Cell; Suppress_Wrap : in out Boolean; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive; Escape_DOT : in Boolean) is This_Cell_Sym_Value : Dictionary.Symbol; This_Cell_Kind : Cells.Cell_Kind; This_Cell_String_Value : LexTokenManager.Lex_String; This_Cell_Op : SP_Symbols.SP_Symbol; E_String : E_Strings.T; ------------------------------------------------------- procedure Check_Wrap (Width : in Integer) --# global in Output_File; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# in out Suppress_Wrap; --# derives SPARK_IO.File_Sys from *, --# Output_File, --# Suppress_Wrap, --# Width, --# Wrap_Limit & --# Suppress_Wrap from *; is begin if Suppress_Wrap then Suppress_Wrap := False; else if SPARK_IO.Col (Output_File) + Width > Wrap_Limit then SPARK_IO.New_Line (Output_File, 1); SPARK_IO.Put_String (Output_File, " ", 0); end if; end if; end Check_Wrap; ------------------------------------------------------- procedure VCG_Put_String (Str : in E_Strings.T) --# global in Output_File; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# in out Suppress_Wrap; --# derives SPARK_IO.File_Sys from *, --# Output_File, --# Str, --# Suppress_Wrap, --# Wrap_Limit & --# Suppress_Wrap from *; is begin Check_Wrap (Width => E_Strings.Get_Length (E_Str => Str)); Put_Examiner_String (File => Output_File, Item => Str); end VCG_Put_String; ------------------------------------------------------- procedure VCG_Put_String_With_Prefix (Prefix : in String; Str : in E_Strings.T) --# global in Output_File; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# in out Suppress_Wrap; --# derives SPARK_IO.File_Sys from *, --# Output_File, --# Prefix, --# Str, --# Suppress_Wrap, --# Wrap_Limit & --# Suppress_Wrap from *; is Str_To_Put : E_Strings.T; begin Str_To_Put := E_Strings.Copy_String (Str => Prefix); E_Strings.Append_Examiner_String (E_Str1 => Str_To_Put, E_Str2 => Str); VCG_Put_String (Str => Str_To_Put); end VCG_Put_String_With_Prefix; ------------------------------------------------------- procedure VCG_Put_Integer (Value : in Integer) --# global in Output_File; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# in out Suppress_Wrap; --# derives SPARK_IO.File_Sys from *, --# Output_File, --# Suppress_Wrap, --# Value, --# Wrap_Limit & --# Suppress_Wrap from *; is function Width (N : Natural) return Natural is Num, Wid : Natural; begin Num := N; Wid := 0; loop Num := Num / 10; Wid := Wid + 1; exit when Num = 0; end loop; return Wid; end Width; begin --VCG_Put_Integer Check_Wrap (Width => Width (Value)); SPARK_IO.Put_Integer (Output_File, Value, 0, 10); end VCG_Put_Integer; ------------------------------------------------------- procedure Print_Symbol (File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Sym : in Dictionary.Symbol; Kind : in Cells.Cell_Kind) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# File, --# Kind, --# LexTokenManager.State, --# Scope, --# Sym, --# Wrap_Limit; is Prefix_Fn_Length : Natural; Package_Ex_Str : E_Strings.T; Ex_Str : E_Strings.T; procedure Possibly_Print_Underbar (S : in E_Strings.T) --# global in File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# S; is --This horrible kludge is to add a trailing underbar to user-declared --quantifiers to prevent any capture by local variables of the same name. --The kludge is that we don't want to add one to quantifiers that have been --added by the Examiner for RTC purposes. These are characterized by --having names of the form xxxxx__nn where x are any chars and n are digits. --We add a trailing underbar if the terminal characters are not digits or --if the are not preceded by a double underbar subtype Numerals is Character range '0' .. '9'; I : E_Strings.Lengths; Underbar_Wanted : Boolean := True; begin --can't be the kind we are interested in unless last char is a numeral if E_Strings.Get_Element (E_Str => S, Pos => E_Strings.Get_Length (E_Str => S)) in Numerals then --now consume any other numerals that might be there I := E_Strings.Get_Length (E_Str => S) - 1; --penultimate char while I > 2 and then --2 is the lowest index that could be ok e.g. "x__" E_Strings.Get_Element (E_Str => S, Pos => I) in Numerals loop I := I - 1; end loop; --I now points at the first non-numeral from the back of the string --we don't need an underbar if Ith and (I-1)th characters are underbars if E_Strings.Get_Element (E_Str => S, Pos => I) = '_' and then E_Strings.Get_Element (E_Str => S, Pos => I - 1) = '_' then Underbar_Wanted := False; end if; end if; --print an underbar unless the above search has shown we don't want one if Underbar_Wanted then SPARK_IO.Put_Char (File, '_'); end if; end Possibly_Print_Underbar; begin -- Print_Symbol if Dictionary.Is_Null_Symbol (Sym) then SPARK_IO.Put_String (File, "unexpected_null_symbol", 0); elsif Dictionary.IsTypeMark (Sym) and then Dictionary.IsUnknownTypeMark (Sym) then SPARK_IO.Put_String (File, "unknown_type", 0); else case Kind is when Cell_Storage.Mk_Aggregate => Prefix_Fn_Length := 4; when others => Prefix_Fn_Length := 0; end case; if Dictionary.IsRecordComponent (Sym) then Package_Ex_Str := E_Strings.Empty_String; else Package_Ex_Str := Dictionary.GetAnyPrefixNeeded (Sym => Sym, Scope => Scope, Separator => "__"); end if; Ex_Str := Dictionary.GenerateSimpleName (Item => Sym, Separator => "__"); if ((((SPARK_IO.Col (File) + Prefix_Fn_Length) + E_Strings.Get_Length (E_Str => Package_Ex_Str)) + E_Strings.Get_Length (E_Str => Ex_Str)) + 12) > Wrap_Limit then SPARK_IO.New_Line (File, 1); SPARK_IO.Put_String (File, " ", 0); end if; case Kind is when Cell_Storage.Mk_Aggregate => SPARK_IO.Put_String (File, "mk__", 0); when others => null; end case; if E_Strings.Get_Length (E_Str => Package_Ex_Str) > 0 then Put_Examiner_String (File => File, Item => Package_Ex_Str); SPARK_IO.Put_String (File, "__", 0); elsif not E_Strings.Is_Empty (E_Str => CommandLineData.Content.FDL_Mangle) then AdjustFDL_RWs.Possibly_Adjust (E_Str => Ex_Str, Prefix => CommandLineData.Content.FDL_Mangle); end if; Put_Examiner_String (File => File, Item => Ex_Str); if Dictionary.IsQuantifiedVariable (Sym) then Possibly_Print_Underbar (S => Ex_Str); end if; end if; end Print_Symbol; ------------------------------------------------------- procedure Print_Procedure_Export (Cell_Name : in Cells.Cell; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Heap; --# in LexTokenManager.State; --# in Output_File; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# in out Suppress_Wrap; --# derives SPARK_IO.File_Sys from *, --# Cell_Name, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# Scope, --# Suppress_Wrap, --# Wrap_Limit & --# Suppress_Wrap from *; is Pack_Ex_Str, Export_String : E_Strings.T; Var_Sym : Dictionary.Symbol; begin -- Get the exported variable symbol from the dictionary -- and convert its name to an E_String. Var_Sym := Cells.Get_Symbol_Value (Heap, Cell_Name); -- Determine whether there is a prefix required for the variable name. Pack_Ex_Str := Dictionary.GetAnyPrefixNeeded (Sym => Var_Sym, Scope => Scope, Separator => "__"); if E_Strings.Get_Length (E_Str => Pack_Ex_Str) > 0 then -- There is a prefix and so prepend "__" to the exported variable. Export_String := Pack_Ex_Str; E_Strings.Append_String (E_Str => Export_String, Str => "__"); E_Strings.Append_Examiner_String (E_Str1 => Export_String, E_Str2 => Dictionary.GenerateSimpleName (Item => Var_Sym, Separator => "_")); else Export_String := Dictionary.GenerateSimpleName (Item => Var_Sym, Separator => "_"); -- It is a simple name and may be an FDL reserved identifier -- so check whether the name has to be mangled. if not E_Strings.Is_Empty (E_Str => CommandLineData.Content.FDL_Mangle) then AdjustFDL_RWs.Possibly_Adjust (E_Str => Export_String, Prefix => CommandLineData.Content.FDL_Mangle); end if; end if; -- The export name needs to be extended with its instance number -- e.g., export_var__1, export_var__2, etc. E_Strings.Append_String (E_Str => Export_String, Str => "__"); -- The instance number is contained within the cell representing the -- exported variable (Cell_Name). E_Strings.Append_Examiner_String (E_Str1 => Export_String, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Cells.Get_Lex_Str (Heap, Cell_Name))); -- Now the exported variable name can be output. VCG_Put_String (Str => Export_String); end Print_Procedure_Export; procedure Print_Manifest_Constant_Cell (Cell_Name : in Cells.Cell) --# global in Heap; --# in LexTokenManager.State; --# in Output_File; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# in out Suppress_Wrap; --# derives SPARK_IO.File_Sys from *, --# Cell_Name, --# Heap, --# LexTokenManager.State, --# Output_File, --# Suppress_Wrap, --# Wrap_Limit & --# Suppress_Wrap from *, --# Cell_Name, --# Heap, --# LexTokenManager.State; is E_Str : E_Strings.T; Value : Maths.Value; Err : Maths.ErrorCode; L_Str : LexTokenManager.Lex_String; procedure Print_String_Literal --# global in E_Str; --# in Output_File; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# in out Suppress_Wrap; --# derives SPARK_IO.File_Sys from *, --# E_Str, --# Output_File, --# Suppress_Wrap, --# Wrap_Limit & --# Suppress_Wrap from *, --# E_Str; is Separator : Character; Position : Positive; procedure Print_One_Element (Char_Code : in Integer) --# global in Output_File; --# in Position; --# in Separator; --# in Wrap_Limit; --# in out SPARK_IO.File_Sys; --# in out Suppress_Wrap; --# derives SPARK_IO.File_Sys from *, --# Char_Code, --# Output_File, --# Position, --# Separator, --# Suppress_Wrap, --# Wrap_Limit & --# Suppress_Wrap from *; is begin SPARK_IO.Put_Char (Output_File, Separator); SPARK_IO.Put_Char (Output_File, ' '); Check_Wrap (Width => 5); SPARK_IO.Put_Char (Output_File, '['); VCG_Put_Integer (Value => Position); SPARK_IO.Put_Char (Output_File, ']'); Check_Wrap (Width => 4); SPARK_IO.Put_String (Output_File, " := ", 0); VCG_Put_Integer (Value => Char_Code); end Print_One_Element; begin -- Print_String_Literal case E_Strings.Get_Length (E_Str => E_Str) is when 0 | 1 => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Precondition_Failure, Msg => "Mal-formed string literal in VCG Print_String_Literal"); when 2 => -- Must be "" - the null string literal. SystemErrors.RT_Assert (C => E_Strings.Eq1_String (E_Str => E_Str, Str => """"""), Sys_Err => SystemErrors.Precondition_Failure, Msg => "Mal-formed NULL string literal in VCG Print_String_Literal"); Check_Wrap (Width => 9); SPARK_IO.Put_String (Output_File, Null_String_Literal_Name, 0); when others => Check_Wrap (Width => 9); SPARK_IO.Put_String (Output_File, "mk__string", 0); Separator := '('; Position := 1; for I in E_Strings.Positions range 2 .. E_Strings.Get_Length (E_Str => E_Str) - 1 loop Print_One_Element (Char_Code => Character'Pos (E_Strings.Get_Element (E_Str => E_Str, Pos => I))); Separator := ','; Position := Position + 1; end loop; SPARK_IO.Put_Char (Output_File, ')'); end case; end Print_String_Literal; begin -- Print_Manifest_Constant_Cell L_Str := Cells.Get_Lex_Str (Heap, Cell_Name); E_Str := LexTokenManager.Lex_String_To_String (Lex_Str => L_Str); if E_Strings.Get_Element (E_Str => E_Str, Pos => 1) = ''' then --character literal VCG_Put_Integer (Value => Character'Pos (E_Strings.Get_Element (E_Str => E_Str, Pos => 2))); elsif E_Strings.Get_Element (E_Str => E_Str, Pos => 1) = '"' then --string literal Print_String_Literal; else -- should be a numeric Maths.LiteralToValue (L_Str, Value, Err); if Err = Maths.NoError then E_Str := Maths.ValueToString (Value); end if; VCG_Put_String (Str => E_Str); end if; end Print_Manifest_Constant_Cell; begin -- Print_Cell_Contents case Cells.Get_Kind (Heap, Cell_Name) is when Cell_Storage.Manifest_Const => Print_Manifest_Constant_Cell (Cell_Name => Cell_Name); when Cell_Storage.Attrib_Value | Cell_Storage.Attrib_Function => Put_Examiner_String (File => Output_File, Item => LexTokenManager.Lex_String_To_String (Lex_Str => Cells.Get_Lex_Str (Heap, Cell_Name))); when Cell_Storage.Field_Access_Function => E_String := LexTokenManager.Lex_String_To_String (Lex_Str => Cells.Get_Lex_Str (Heap, Cell_Name)); if not E_Strings.Is_Empty (E_Str => CommandLineData.Content.FDL_Mangle) then AdjustFDL_RWs.Possibly_Adjust (E_Str => E_String, Prefix => CommandLineData.Content.FDL_Mangle); end if; VCG_Put_String_With_Prefix (Prefix => "fld_", Str => E_String); when Cell_Storage.Field_Update_Function => E_String := LexTokenManager.Lex_String_To_String (Lex_Str => Cells.Get_Lex_Str (Heap, Cell_Name)); if not E_Strings.Is_Empty (E_Str => CommandLineData.Content.FDL_Mangle) then AdjustFDL_RWs.Possibly_Adjust (E_Str => E_String, Prefix => CommandLineData.Content.FDL_Mangle); end if; VCG_Put_String_With_Prefix (Prefix => "upf_", Str => E_String); when Cell_Storage.Element_Function => SPARK_IO.Put_String (Output_File, "element", 0); when Cell_Storage.Update_Function => SPARK_IO.Put_String (Output_File, "update", 0); when Cell_Storage.Pred_Function => SPARK_IO.Put_String (Output_File, "pred", 0); when Cell_Storage.Succ_Function => SPARK_IO.Put_String (Output_File, "succ", 0); when Cell_Storage.Abs_Function => SPARK_IO.Put_String (Output_File, "abs", 0); when Cell_Storage.Trunc_Function => SPARK_IO.Put_String (Output_File, "round__", 0); when Cell_Storage.List_Function => null; when Cell_Storage.FDL_Div_Op => SPARK_IO.Put_String (Output_File, " div ", 0); when Cell_Storage.Op => if SPARK_IO.Col (Output_File) > (Wrap_Limit + 3) and then Cells.Get_Op_Symbol (Heap, Cell_Name) /= SP_Symbols.apostrophe then SPARK_IO.New_Line (Output_File, 1); SPARK_IO.Put_String (Output_File, " ", 0); end if; case Cells.Get_Op_Symbol (Heap, Cell_Name) is when SP_Symbols.colon => SPARK_IO.Put_String (Output_File, ": ", 0); when SP_Symbols.comma => SPARK_IO.Put_String (Output_File, ", ", 0); when SP_Symbols.RWand | SP_Symbols.RWandthen => SPARK_IO.Put_String (Output_File, " and ", 0); when SP_Symbols.RWor | SP_Symbols.RWorelse => SPARK_IO.Put_String (Output_File, " or ", 0); when SP_Symbols.equals => SPARK_IO.Put_String (Output_File, " = ", 0); when SP_Symbols.not_equal => if Escape_DOT then SPARK_IO.Put_String (Output_File, " \<\> ", 0); else SPARK_IO.Put_String (Output_File, " <> ", 0); end if; when SP_Symbols.less_than => if Escape_DOT then SPARK_IO.Put_String (Output_File, " \< ", 0); else SPARK_IO.Put_String (Output_File, " < ", 0); end if; when SP_Symbols.less_or_equal => if Escape_DOT then SPARK_IO.Put_String (Output_File, " \<= ", 0); else SPARK_IO.Put_String (Output_File, " <= ", 0); end if; when SP_Symbols.greater_than => if Escape_DOT then SPARK_IO.Put_String (Output_File, " \> ", 0); else SPARK_IO.Put_String (Output_File, " > ", 0); end if; when SP_Symbols.greater_or_equal => if Escape_DOT then SPARK_IO.Put_String (Output_File, " \>= ", 0); else SPARK_IO.Put_String (Output_File, " >= ", 0); end if; when SP_Symbols.implies => if Escape_DOT then SPARK_IO.Put_String (Output_File, " -\> ", 0); else SPARK_IO.Put_String (Output_File, " -> ", 0); end if; when SP_Symbols.is_equivalent_to => if Escape_DOT then SPARK_IO.Put_String (Output_File, " \<-\> ", 0); else SPARK_IO.Put_String (Output_File, " <-> ", 0); end if; when SP_Symbols.plus => if Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, Cell_Name)) then -- Unary + is semantically meaningless, so no point -- printing it. Suppress_Wrap := True; else SPARK_IO.Put_String (Output_File, " + ", 0); -- Binary end if; when SP_Symbols.minus => if Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, Cell_Name)) then SPARK_IO.Put_String (Output_File, " -", 0); -- Unary Suppress_Wrap := True; else SPARK_IO.Put_String (Output_File, " - ", 0); -- Binary end if; when SP_Symbols.multiply => SPARK_IO.Put_String (Output_File, " * ", 0); when SP_Symbols.divide => SPARK_IO.Put_String (Output_File, " / ", 0); when SP_Symbols.RWmod => SPARK_IO.Put_String (Output_File, " mod ", 0); when SP_Symbols.double_star => SPARK_IO.Put_String (Output_File, " ** ", 0); when SP_Symbols.RWnot => SPARK_IO.Put_String (Output_File, "not ", 0); when SP_Symbols.apostrophe => --------------------------------- -- apostrophe becomes "__" in FDL --------------------------------- SPARK_IO.Put_String (Output_File, "__", 0); when SP_Symbols.ampersand => SPARK_IO.Put_String (Output_File, " & ", 0); when SP_Symbols.becomes => SPARK_IO.Put_String (Output_File, " := ", 0); when SP_Symbols.double_dot => SPARK_IO.Put_String (Output_File, " .. ", 0); when SP_Symbols.RWforall => SPARK_IO.Put_String (Output_File, "for_all", 0); when SP_Symbols.RWforsome => SPARK_IO.Put_String (Output_File, "for_some", 0); -------------------------------------------------------------------- when SP_Symbols.right_paren => SPARK_IO.Put_String (Output_File, " Parenthesis_Requested", 0); when others => SPARK_IO.Put_String (Output_File, " undef_op_value ", 0); end case; when Cell_Storage.Internal_Natural => SPARK_IO.Put_Integer (File => Output_File, Item => Cells.Get_Natural_Value (Heap, Cell_Name), Width => Natural'First, Base => 10); when Cell_Storage.Internal_Scope => SPARK_IO.Put_String (Output_File, "(Dictionary.Scopes in Internal_Scope)", 0); when Cell_Storage.Named_Const | Cell_Storage.Declared_Function | Cell_Storage.Proof_Function | Cell_Storage.Modified_Op | Cell_Storage.Reference | Cell_Storage.Constraining_Index | Cell_Storage.Fixed_Var | Cell_Storage.Unconstrained_Attribute_Prefix | Cell_Storage.Mk_Aggregate => This_Cell_Sym_Value := Cells.Get_Symbol_Value (Heap, Cell_Name); This_Cell_Kind := Cells.Get_Kind (Heap, Cell_Name); This_Cell_String_Value := Cells.Get_Lex_Str (Heap, Cell_Name); This_Cell_Op := Cells.Get_Op_Symbol (Heap, Cell_Name); if This_Cell_Kind = Cell_Storage.Mk_Aggregate and then Dictionary.IsSubtype (This_Cell_Sym_Value) then This_Cell_Sym_Value := Dictionary.GetRootType (This_Cell_Sym_Value); end if; Print_Symbol (File => Output_File, Scope => Scope, Sym => This_Cell_Sym_Value, Kind => This_Cell_Kind); -- An unconstrained attribute prefix _might_ have a tilde, -- such as O~'First. This is allowed by the grammar, -- but removed here, since the attributes of an unconstrained -- array parameter cannot change during the lifetime of the -- object. Essentially, we know that O~'First = O'First, -- so we just print the latter. if This_Cell_Op = SP_Symbols.tilde and This_Cell_Kind /= Cell_Storage.Unconstrained_Attribute_Prefix then SPARK_IO.Put_String (Output_File, "~", 0); end if; -- Only print numeric suffix if number present in string field. if This_Cell_Kind = Cell_Storage.Declared_Function and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => This_Cell_String_Value, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then SPARK_IO.Put_String (Output_File, "__", 0); Put_Examiner_String (File => Output_File, Item => LexTokenManager.Lex_String_To_String (Lex_Str => This_Cell_String_Value)); end if; when Cell_Storage.Procedure_Export => Print_Procedure_Export (Cell_Name => Cell_Name, Scope => Scope); when Cell_Storage.Procedure_Name | Cell_Storage.Call_Counter => null; when Cell_Storage.Pending_Function => SPARK_IO.Put_String (Output_File, " Incomplete_Function", 0); when Cell_Storage.Aggregate_Counter | Cell_Storage.Incomplete_Aggregate => SPARK_IO.Put_String (Output_File, " Incomplete_Aggregate", 0); when Cell_Storage.Return_Var => SPARK_IO.Put_String (Output_File, "return", 0); when Cell_Storage.Root_Integer => SPARK_IO.Put_String (Output_File, "system__", 0); -- Put out min or max Put_Examiner_String (File => Output_File, Item => LexTokenManager.Lex_String_To_String (Lex_Str => Cells.Get_Lex_Str (Heap, Cell_Name))); -- and complete with _int. SPARK_IO.Put_String (Output_File, "_int", 0); when Cell_Storage.Bitwise_Op => if Dictionary.TypeIsArray (Cells.Get_Symbol_Value (Heap, Cell_Name)) then Print_Symbol (File => Output_File, Scope => Scope, Sym => Cells.Get_Symbol_Value (Heap, Cell_Name), Kind => Cells.Get_Kind (Heap, Cell_Name)); SPARK_IO.Put_String (Output_File, "__", 0); case Cells.Get_Op_Symbol (Heap, Cell_Name) is when SP_Symbols.RWand => SPARK_IO.Put_String (Output_File, "and", 0); when SP_Symbols.RWor => SPARK_IO.Put_String (Output_File, "or", 0); when SP_Symbols.RWxor => SPARK_IO.Put_String (Output_File, "xor", 0); when SP_Symbols.RWnot => SPARK_IO.Put_String (Output_File, "not", 0); when others => SPARK_IO.Put_String (Output_File, "undef_op_value", 0); end case; elsif Dictionary.TypeIsModular (Cells.Get_Symbol_Value (Heap, Cell_Name)) then case Cells.Get_Op_Symbol (Heap, Cell_Name) is -- Note bitwise "not" for modular types is expanded in the -- VCG.ProduceVCs.BuildGraph, and so should never appear here. when SP_Symbols.RWand => SPARK_IO.Put_String (Output_File, "bit__and", 0); when SP_Symbols.RWor => SPARK_IO.Put_String (Output_File, "bit__or", 0); when SP_Symbols.RWxor => SPARK_IO.Put_String (Output_File, "bit__xor", 0); when others => SPARK_IO.Put_String (Output_File, "undef_op_value", 0); end case; end if; when Cell_Storage.Function_Call_In_Proof_Context => SPARK_IO.Put_String (Output_File, "Function_Call_In_Proof_Context", 0); when Cell_Storage.Proof_Function_Obtain_Precondition => SPARK_IO.Put_String (Output_File, "Proof_Function_Obtain_Precondition", 0); when Cell_Storage.Proof_Function_Obtain_Return => SPARK_IO.Put_String (Output_File, "Proof_Function_Obtain_Return", 0); when Cell_Storage.Proof_Function_Syntax_Node => SPARK_IO.Put_String (Output_File, "Syntax_Node", 0); when Cell_Storage.Quantifier => SPARK_IO.Put_String (Output_File, "Quantifier", 0); when Cell_Storage.Unknown_Kind => SPARK_IO.Put_String (Output_File, " unknown_cell_kind ", 0); end case; end Print_Cell_Contents; ------------------------------------------------------------------------- function IsLeaf (Node : Cells.Cell; Heap : Cells.Heap_Record) return Boolean is begin return Cells.Is_Null_Cell (Cells.Get_B_Ptr (Heap, Node)); end IsLeaf; -------------------------------------------------------------- procedure PrintDagLocal (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Root : in Cells.Cell; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# Root, --# Scope, --# Wrap_Limit; is P, ParenthesisCell, SqBracketCell : Cells.Cell; ParenthesisForm : SP_Symbols.SP_Symbol; ParReqd : Boolean; S : CStacks.Stack; Suppress_Wrap : Boolean; -------------------------------------------------------------- procedure Parenthesise (V : in Cells.Cell; LeftTree : in Boolean; ParReqd : out Boolean; ParenthesisForm : out SP_Symbols.SP_Symbol) --# global in Heap; --# derives ParenthesisForm from Heap, --# V & --# ParReqd from Heap, --# LeftTree, --# V; is VPrecedence, WPrecedence : Natural; Operand, W : Cells.Cell; V_Kind : Cells.Cell_Kind; -------------------------------------------- function PrecedenceValue (C : Cells.Cell) return Natural --# global in Heap; is PrecVal : Natural; begin if Cells.Get_Kind (Heap, C) = Cell_Storage.FDL_Div_Op then PrecVal := 5; else case Cells.Get_Op_Symbol (Heap, C) is when SP_Symbols.RWand | SP_Symbols.RWor | SP_Symbols.RWandthen | SP_Symbols.RWorelse | SP_Symbols.implies | SP_Symbols.RWnot | SP_Symbols.is_equivalent_to => PrecVal := 1; when SP_Symbols.equals | SP_Symbols.not_equal | SP_Symbols.less_than | SP_Symbols.less_or_equal | SP_Symbols.greater_than | SP_Symbols.greater_or_equal => PrecVal := 2; when SP_Symbols.plus | SP_Symbols.minus | SP_Symbols.ampersand => PrecVal := 3; -- arity is taken into account by examining node degrees of operator nodes -- (see body of procedure Parenthesise). -- when SP_Symbols.unary_plus | -- SP_Symbols.unary_minus => PrecVal := 4; when SP_Symbols.multiply | SP_Symbols.divide | SP_Symbols.RWmod => PrecVal := 5; when SP_Symbols.double_star => PrecVal := 6; when others => PrecVal := 7; end case; end if; return PrecVal; end PrecedenceValue; -------------------------------------------------------------------- begin -- Parenthesise; ParReqd := False; ParenthesisForm := SP_Symbols.left_paren; V_Kind := Cells.Get_Kind (Heap, V); if (V_Kind = Cell_Storage.Declared_Function) or (V_Kind = Cell_Storage.Proof_Function) or (V_Kind = Cell_Storage.Attrib_Function) or (V_Kind = Cell_Storage.Field_Access_Function) or (V_Kind = Cell_Storage.Mk_Aggregate) or (V_Kind = Cell_Storage.List_Function) or (V_Kind = Cell_Storage.Element_Function) or (V_Kind = Cell_Storage.Update_Function) or (V_Kind = Cell_Storage.Pred_Function) or (V_Kind = Cell_Storage.Succ_Function) or (V_Kind = Cell_Storage.Abs_Function) or (V_Kind = Cell_Storage.Trunc_Function) or (V_Kind = Cell_Storage.Field_Update_Function) or (V_Kind = Cell_Storage.Bitwise_Op) then ParReqd := True; if (V_Kind = Cell_Storage.List_Function) then ParenthesisForm := SP_Symbols.square_open; end if; elsif ((V_Kind = Cell_Storage.Op) -- TEMPORARY FIX until right_paren given its own kind and then ((Cells.Get_Op_Symbol (Heap, V) /= SP_Symbols.right_paren) -- END OF TEMPORARY FIX. and (Cells.Get_Op_Symbol (Heap, V) /= SP_Symbols.comma))) or else (V_Kind = Cell_Storage.FDL_Div_Op) then if Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, V)) then -- V is a monadic operator; Operand := Cells.Get_B_Ptr (Heap, V); if not IsLeaf (Operand, Heap) then ParReqd := True; end if; else if LeftTree then W := Cells.Get_A_Ptr (Heap, V); else W := Cells.Get_B_Ptr (Heap, V); end if; if not Cells.Is_Null_Cell (W) then if (Cells.Get_Kind (Heap, W) = Cell_Storage.Op) or else (Cells.Get_Kind (Heap, W) = Cell_Storage.FDL_Div_Op) then VPrecedence := PrecedenceValue (V); WPrecedence := PrecedenceValue (W); -- general rule for constructing unambiguous expressions: ParReqd := (VPrecedence > WPrecedence) or ((VPrecedence = WPrecedence) and not LeftTree); -- supplementary rules, to improve clarity: -- 1) if the outer op is logical op OR -- 2) the inner op is a relational op (such as "<=") OR -- 3) both the inner and outer ops are "**" -- then add parentheses if (VPrecedence = 1) or -- V is a logical operation; (WPrecedence = 2) or -- subtree W is a relation; (VPrecedence = 6 and WPrecedence = 6) then -- both "**" ParReqd := True; end if; end if; end if; end if; end if; end Parenthesise; ----------------------------------------------------------------------- begin -- PrintDagLocal -- Algorithm of D.E. Knuth, Fundamental Algorithms, p.317; Suppress_Wrap := False; CStacks.CreateStack (S); Cells.Create_Cell (Heap, ParenthesisCell); Cells.Set_Kind (Heap, ParenthesisCell, Cell_Storage.Op); Cells.Set_Op_Symbol (Heap, ParenthesisCell, SP_Symbols.left_paren); Cells.Create_Cell (Heap, SqBracketCell); Cells.Set_Kind (Heap, SqBracketCell, Cell_Storage.Op); Cells.Set_Op_Symbol (Heap, SqBracketCell, SP_Symbols.square_open); P := Root; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (Heap, P, S); if IsLeaf (P, Heap) then P := Cells.Null_Cell; else if (not Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, P))) then Parenthesise (P, True, ParReqd, ParenthesisForm); if ParReqd then if ParenthesisForm = SP_Symbols.left_paren then SPARK_IO.Put_String (Output_File, "(", 0); CStacks.Push (Heap, ParenthesisCell, S); else CStacks.Push (Heap, SqBracketCell, S); end if; end if; end if; P := Cells.Get_A_Ptr (Heap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (Heap, S); CStacks.Pop (Heap, S); Print_Cell_Contents (Heap => Heap, Output_File => Output_File, Cell_Name => P, Suppress_Wrap => Suppress_Wrap, Scope => Scope, Wrap_Limit => Wrap_Limit, Escape_DOT => False); if IsLeaf (P, Heap) then P := Cells.Null_Cell; loop exit when not ((Cells.Are_Identical (CStacks.Top (Heap, S), ParenthesisCell)) or (Cells.Are_Identical (CStacks.Top (Heap, S), SqBracketCell))); if (Cells.Are_Identical (CStacks.Top (Heap, S), ParenthesisCell)) then SPARK_IO.Put_String (Output_File, ")", 0); else SPARK_IO.Put_String (Output_File, "]", 0); end if; CStacks.Pop (Heap, S); end loop; else Parenthesise (P, False, ParReqd, ParenthesisForm); if ParReqd then if ParenthesisForm = SP_Symbols.left_paren then SPARK_IO.Put_String (Output_File, "(", 0); CStacks.Push (Heap, ParenthesisCell, S); else SPARK_IO.Put_String (Output_File, "[", 0); CStacks.Push (Heap, SqBracketCell, S); end if; end if; P := Cells.Get_B_Ptr (Heap, P); end if; end loop; end PrintDagLocal; ------------------------------------------------------------------------- procedure PrintDag (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Root : in Cells.Cell; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive) is begin PrintDagLocal (Heap, Output_File, Root, Scope, Wrap_Limit); end PrintDag; ------------------------------------------------------------------------- procedure Partition (Root : in Cells.Cell; SubExpnList : in Cells.Cell; Heap : in out Cells.Heap_Record) is P, SubExpn : Cells.Cell; S : CStacks.Stack; begin CStacks.CreateStack (S); P := Root; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (Heap, P, S); if (Cells.Get_Kind (Heap, P) = Cell_Storage.Op) and then ((Cells.Get_Op_Symbol (Heap, P) = SP_Symbols.RWand) or (Cells.Get_Op_Symbol (Heap, P) = SP_Symbols.RWandthen)) then P := Cells.Get_A_Ptr (Heap, P); else Cells.Create_Cell (Heap, SubExpn); Cells.Set_B_Ptr (Heap, SubExpn, P); Clists.AppendCell (Heap, SubExpn, SubExpnList); P := Cells.Null_Cell; end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (Heap, S); CStacks.Pop (Heap, S); if (Cells.Get_Kind (Heap, P) = Cell_Storage.Op) and then ((Cells.Get_Op_Symbol (Heap, P) = SP_Symbols.RWand) or (Cells.Get_Op_Symbol (Heap, P) = SP_Symbols.RWandthen)) then P := Cells.Get_B_Ptr (Heap, P); else P := Cells.Null_Cell; end if; end loop; end Partition; ------------------------------------------------------------------------- procedure PrintLogicalExpn (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Root : in Cells.Cell; TypeOfExpn : in Expn_Type; Wrap_Limit : in Positive) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# Root, --# Scope, --# TypeOfExpn, --# Wrap_Limit; is SubExpnList : Cells.Cell; procedure PrintListOfExpns --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Output_File; --# in Scope; --# in SubExpnList; --# in TypeOfExpn; --# in Wrap_Limit; --# in out Heap; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# SubExpnList & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# Scope, --# SubExpnList, --# TypeOfExpn, --# Wrap_Limit; is ClauseNmbr : Natural; ListMember : Cells.Cell; procedure PrintTypeOfExpn --# global in Output_File; --# in TypeOfExpn; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Output_File, --# TypeOfExpn; is begin case TypeOfExpn is when Conclusion => SPARK_IO.Put_String (Output_File, "C", 0); when Condition => SPARK_IO.Put_String (Output_File, " ", 0); when Hypothesis => SPARK_IO.Put_String (Output_File, "H", 0); end case; end PrintTypeOfExpn; begin -- This looks like the place to suppress multiple Trues in hypotheses -- and do something with trues in conclusions. ClauseNmbr := 0; ListMember := Clists.FirstCell (Heap, SubExpnList); loop PrintTypeOfExpn; ClauseNmbr := ClauseNmbr + 1; SPARK_IO.Put_Integer (Output_File, ClauseNmbr, 0, 10); SPARK_IO.Put_String (Output_File, ":", 0); SPARK_IO.Set_Col (Output_File, 8); PrintDag (Heap, Output_File, Cells.Get_B_Ptr (Heap, ListMember), Scope, Wrap_Limit); SPARK_IO.Put_Line (Output_File, " .", 0); ListMember := Clists.NextCell (Heap, ListMember); exit when Cells.Is_Null_Cell (ListMember); end loop; end PrintListOfExpns; begin -- PrintLogicalExpn Clists.CreateList (Heap, SubExpnList); Partition (Root, SubExpnList, Heap); PrintListOfExpns; Clists.DisposeOfList (Heap, SubExpnList); end PrintLogicalExpn; ------------------------------------------------------------------------- procedure PrintLabel (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; LabelName : in Labels.Label; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive) is PairCount : Natural; CurrentPair : Pairs.Pair; ------------------------------------------------------- procedure PrintPair (Output_File : in SPARK_IO.File_Type; PairName : in Pairs.Pair) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in Wrap_Limit; --# in out Heap; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# PairName & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# PairName, --# Scope, --# Wrap_Limit; -- prints a predicate-action pair; is Action, ModCell, Predicate : Cells.Cell; Suppress_Wrap : Boolean; begin -- PrintPair Suppress_Wrap := False; SPARK_IO.Put_Line (Output_File, " Traversal condition:", 0); if Pairs.IsTrue (Heap, PairName) then SPARK_IO.Put_Line (Output_File, " 1: true .", 0); else Predicate := Cells.Get_B_Ptr (Heap, Pairs.PairHead (PairName)); PrintLogicalExpn (Heap, Output_File, Scope, Predicate, Condition, Wrap_Limit); end if; SPARK_IO.Put_Line (Output_File, " Action:", 0); if Pairs.IsUnitAction (Heap, PairName) then SPARK_IO.Put_Line (Output_File, " null .", 0); else -- print action; Action := Cells.Get_C_Ptr (Heap, Pairs.PairHead (PairName)); ModCell := Clists.FirstCell (Heap, Action); SPARK_IO.Put_String (Output_File, " ", 0); Print_Cell_Contents (Heap => Heap, Output_File => Output_File, Cell_Name => ModCell, Suppress_Wrap => Suppress_Wrap, Scope => Scope, Wrap_Limit => Wrap_Limit, Escape_DOT => False); SPARK_IO.Put_String (Output_File, " := ", 0); PrintDag (Heap, Output_File, Cells.Get_B_Ptr (Heap, ModCell), Scope, Wrap_Limit); ModCell := Clists.NextCell (Heap, ModCell); loop exit when Cells.Is_Null_Cell (ModCell); SPARK_IO.Put_Line (Output_File, " &", 0); SPARK_IO.Put_String (Output_File, " ", 0); Print_Cell_Contents (Heap => Heap, Output_File => Output_File, Cell_Name => ModCell, Suppress_Wrap => Suppress_Wrap, Scope => Scope, Wrap_Limit => Wrap_Limit, Escape_DOT => False); SPARK_IO.Put_String (Output_File, " := ", 0); PrintDag (Heap, Output_File, Cells.Get_B_Ptr (Heap, ModCell), Scope, Wrap_Limit); ModCell := Clists.NextCell (Heap, ModCell); end loop; SPARK_IO.Put_Line (Output_File, " .", 0); end if; end PrintPair; begin -- PrintLabel PairCount := 1; CurrentPair := Labels.FirstPair (Heap, LabelName); loop exit when Pairs.IsNullPair (CurrentPair); SPARK_IO.Put_String (Output_File, " Path ", 0); SPARK_IO.Put_Integer (Output_File, PairCount, 2, 10); SPARK_IO.New_Line (Output_File, 1); PrintPair (Output_File, CurrentPair); PairCount := PairCount + 1; CurrentPair := Labels.NextPair (Heap, CurrentPair); end loop; end PrintLabel; ------------------------------------------------------------------------- procedure PrintVCFormula (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; PredicatePair : in Pairs.Pair; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive) is ConclusionRoot, HypothesisRoot : Cells.Cell; -- GAA duplicated code from DECLARATIONS.ADB function IsTriviallyTrue (DAG : Cells.Cell) return Boolean --# global in Dictionary.Dict; --# in Heap; is CurrentCell : Cells.Cell; Result : Boolean := True; function IsTrueCell (TheCell : Cells.Cell) return Boolean --# global in Dictionary.Dict; --# in Heap; is begin return Cells.Get_Kind (Heap, TheCell) = Cell_Storage.Named_Const and then Dictionary.IsEnumerationLiteral (Cells.Get_Symbol_Value (Heap, TheCell)) and then Dictionary.Enumeration_Literals_Are_Equal (Left_Symbol => Cells.Get_Symbol_Value (Heap, TheCell), Right_Symbol => Dictionary.GetTrue); end IsTrueCell; function AppropriateBinaryOperator (OpSym : SP_Symbols.SP_Symbol) return Boolean is begin return OpSym = SP_Symbols.RWand or else OpSym = SP_Symbols.RWandthen or else OpSym = SP_Symbols.RWor or else OpSym = SP_Symbols.RWorelse or else OpSym = SP_Symbols.equals or else OpSym = SP_Symbols.implies or else OpSym = SP_Symbols.is_equivalent_to; end AppropriateBinaryOperator; begin --IsTriviallyTrue CurrentCell := DAG; loop exit when IsTrueCell (CurrentCell); --success condition --some expression other than an operator - fail if Cells.Get_Kind (Heap, CurrentCell) /= Cell_Storage.Op then Result := False; exit; end if; --inappropriate operator - fail if not AppropriateBinaryOperator (Cells.Get_Op_Symbol (Heap, CurrentCell)) then Result := False; exit; end if; --thing on left of operator is not true - fail if not IsTrueCell (Cells.Get_A_Ptr (Heap, CurrentCell)) then Result := False; exit; end if; --move down right hand chain of tree to get next sub-expression CurrentCell := Cells.Get_B_Ptr (Heap, CurrentCell); --fallen off the end - fail - (I think this check is redundant but safe) if Cells.Is_Null_Cell (CurrentCell) then Result := False; exit; end if; end loop; return Result; end IsTriviallyTrue; begin --PrintVCFormula HypothesisRoot := Cells.Get_B_Ptr (Heap, Pairs.PairHead (PredicatePair)); ConclusionRoot := Cells.Get_C_Ptr (Heap, Pairs.PairHead (PredicatePair)); if IsTriviallyTrue (ConclusionRoot) then SPARK_IO.Put_Line (Output_File, "*** true . /* trivially true VC removed by Examiner */", 0); SPARK_IO.New_Line (Output_File, 1); else PrintLogicalExpn (Heap, Output_File, Scope, HypothesisRoot, Hypothesis, Wrap_Limit); SPARK_IO.Put_Line (Output_File, " ->", 0); PrintLogicalExpn (Heap, Output_File, Scope, ConclusionRoot, Conclusion, Wrap_Limit); SPARK_IO.Put_Line (Output_File, " ", 0); end if; end PrintVCFormula; ------------------------------------------------------------------------- procedure PrintDPC (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; PredicatePair : in Pairs.Pair; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive) is HypothesisRoot : Cells.Cell; begin -- PrintDPC HypothesisRoot := Cells.Get_B_Ptr (Heap, Pairs.PairHead (PredicatePair)); PrintLogicalExpn (Heap, Output_File, Scope, HypothesisRoot, Hypothesis, Wrap_Limit); SPARK_IO.Put_Line (Output_File, " ->", 0); SPARK_IO.Put_Line (Output_File, "C1: false .", 0); SPARK_IO.Put_Line (Output_File, " ", 0); end PrintDPC; ------------------------------------------------------------------------- procedure Print_DAG_Dot (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Root : in Cells.Cell; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive) is -- This procedure uses a recursive DAG traversal, so is not -- SPARK. --# hide Print_DAG_Dot; type Edge_Class is (A, B); function Cell_Kind_Image (K : in Cells.Cell_Kind) return String is --# hide Cell_Kind_Image; begin case K is when Cell_Storage.Manifest_Const => return "Manifest_Const"; when Cell_Storage.Attrib_Value => return "Attrib_Value"; when Cell_Storage.Attrib_Function => return "Attrib_Function"; when Cell_Storage.Field_Access_Function => return "Field_Access_Function"; when Cell_Storage.Field_Update_Function => return "Field_Update_Function"; when Cell_Storage.Element_Function => return "Element_Function"; when Cell_Storage.Update_Function => return "Update_Function"; when Cell_Storage.Pred_Function => return "Pred_Function"; when Cell_Storage.Succ_Function => return "Succ_Function"; when Cell_Storage.Abs_Function => return "Abs_Function"; when Cell_Storage.Trunc_Function => return "Trunc_Function"; when Cell_Storage.List_Function => return "List_Function"; when Cell_Storage.FDL_Div_Op => return "FDL_Div_Op"; when Cell_Storage.Op => return "Op"; when Cell_Storage.Named_Const => return "Named_Const"; when Cell_Storage.Declared_Function => return "Declared_Function"; when Cell_Storage.Proof_Function => return "Proof_Function"; when Cell_Storage.Modified_Op => return "Modified"; when Cell_Storage.Reference => return "Reference"; when Cell_Storage.Constraining_Index => return "Constraining_Index"; when Cell_Storage.Fixed_Var => return "Fixed_Var"; when Cell_Storage.Unconstrained_Attribute_Prefix => return "Uncon_Attribute_Prefix"; when Cell_Storage.Mk_Aggregate => return "Mk_Aggregate"; when Cell_Storage.Procedure_Export => return "Procedure_Export"; when Cell_Storage.Procedure_Name => return "Procedure_Name"; when Cell_Storage.Pending_Function => return "Pending_Function"; when Cell_Storage.Aggregate_Counter => return "Aggregate_Counter"; when Cell_Storage.Call_Counter => return "Call_Counter"; when Cell_Storage.Incomplete_Aggregate => return "Incomplete_Aggregate"; when Cell_Storage.Return_Var => return "Return_Var"; when Cell_Storage.Root_Integer => return "Root_Integer"; when Cell_Storage.Bitwise_Op => return "Bitwise_Op"; when Cell_Storage.Function_Call_In_Proof_Context => return "Function_Call_In_Proof_Context"; when Cell_Storage.Proof_Function_Obtain_Precondition => return "Proof_Function_Obtain_Precondition"; when Cell_Storage.Proof_Function_Obtain_Return => return "Proof_Function_Obtain_Return"; when Cell_Storage.Proof_Function_Syntax_Node => return "Proof_Function_Syntax_Node"; when Cell_Storage.Quantifier => return "Quantifier"; when Cell_Storage.Internal_Natural => return "Internal_Natural"; when Cell_Storage.Internal_Scope => return "Internal_Scope"; when Cell_Storage.Unknown_Kind => return "Unknown"; end case; end Cell_Kind_Image; procedure Print_Node (Root : in Cells.Cell; Rank : in Natural) is pragma Unreferenced (Rank); Suppress_Wrap : Boolean := False; begin SPARK_IO.Put_String (Output_File, Natural'Image (Cells.Cell_Ref (Root)) & " [shape=record,label=""{{", 0); SPARK_IO.Put_String (Output_File, Cell_Kind_Image (Cells.Get_Kind (Heap, Root)) & "\l|" & Natural'Image (Cells.Cell_Ref (Root)) & "\r}|", 0); Print_Cell_Contents (Heap => Heap, Output_File => Output_File, Cell_Name => Root, Suppress_Wrap => Suppress_Wrap, Scope => Scope, Wrap_Limit => Wrap_Limit, Escape_DOT => True); SPARK_IO.Put_Line (Output_File, "}""];", 0); end Print_Node; procedure Print_Edge (Head, Tail : in Cells.Cell; Class : in Edge_Class) is begin if not Cells.Is_Null_Cell (Tail) then SPARK_IO.Put_String (Output_File, Natural'Image (Cells.Cell_Ref (Head)) & " ->" & Natural'Image (Cells.Cell_Ref (Tail)) & "[style=solid,label=", 0); SPARK_IO.Put_String (Output_File, Edge_Class'Image (Class), 0); SPARK_IO.Put_Line (Output_File, "];", 0); end if; end Print_Edge; procedure Traverse_DAG (Root : in Cells.Cell; Rank : in Natural) is A_Child : Cells.Cell; B_Child : Cells.Cell; begin if Cells.Is_Null_Cell (Root) then null; else Print_Node (Root, Rank); A_Child := Cells.Get_A_Ptr (Heap, Root); B_Child := Cells.Get_B_Ptr (Heap, Root); -- If the A_Child is not null and not pointing -- at itself, then print the edge and that sub-dag. if (not Cells.Is_Null_Cell (A_Child)) and then (A_Child /= Root) then Print_Edge (Root, A_Child, A); Traverse_DAG (A_Child, Rank + 1); end if; -- If the B_Child is not null and not pointing -- at itself, then print the edge and that sub-dag. if (not Cells.Is_Null_Cell (B_Child)) and then (B_Child /= Root) then Print_Edge (Root, B_Child, B); Traverse_DAG (B_Child, Rank + 1); end if; end if; end Traverse_DAG; begin SPARK_IO.Put_Line (Output_File, "digraph DAG {", 0); SPARK_IO.Put_Line (Output_File, "ranksep=""1.0 equally"";", 0); SPARK_IO.Put_Line (Output_File, "nodesep=1.0;", 0); SPARK_IO.Put_Line (Output_File, "node [shape=box,fontname=helvetica];", 0); SPARK_IO.Put_Line (Output_File, "edge [labelfontname=helvetica,labelfontsize=10];", 0); Traverse_DAG (Root, 0); SPARK_IO.Put_Line (Output_File, "}", 0); end Print_DAG_Dot; procedure Print_Heap_Dot (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type) is --# hide Print_Heap_Dot; type Edge_Class is (A, B); function Cell_Kind_Image (K : in Cells.Cell_Kind) return String is --# hide Cell_Kind_Image; begin case K is when Cell_Storage.Manifest_Const => return "Manifest_Const"; when Cell_Storage.Attrib_Value => return "Attrib_Value"; when Cell_Storage.Attrib_Function => return "Attrib_Function"; when Cell_Storage.Field_Access_Function => return "Field_Access_Function"; when Cell_Storage.Field_Update_Function => return "Field_Update_Function"; when Cell_Storage.Element_Function => return "Element_Function"; when Cell_Storage.Update_Function => return "Update_Function"; when Cell_Storage.Pred_Function => return "Pred_Function"; when Cell_Storage.Succ_Function => return "Succ_Function"; when Cell_Storage.Abs_Function => return "Abs_Function"; when Cell_Storage.Trunc_Function => return "Trunc_Function"; when Cell_Storage.List_Function => return "List_Function"; when Cell_Storage.FDL_Div_Op => return "FDL_Div_Op"; when Cell_Storage.Op => return "Op"; when Cell_Storage.Named_Const => return "Named_Const"; when Cell_Storage.Declared_Function => return "Declared_Function"; when Cell_Storage.Proof_Function => return "Proof_Function"; when Cell_Storage.Modified_Op => return "Modified"; when Cell_Storage.Reference => return "Reference"; when Cell_Storage.Constraining_Index => return "Constraining_Index"; when Cell_Storage.Fixed_Var => return "Fixed_Var"; when Cell_Storage.Unconstrained_Attribute_Prefix => return "Uncon_Attribute_Prefix"; when Cell_Storage.Mk_Aggregate => return "Mk_Aggregate"; when Cell_Storage.Procedure_Export => return "Procedure_Export"; when Cell_Storage.Procedure_Name => return "Procedure_Name"; when Cell_Storage.Pending_Function => return "Pending_Function"; when Cell_Storage.Aggregate_Counter => return "Aggregate_Counter"; when Cell_Storage.Call_Counter => return "Call_Counter"; when Cell_Storage.Incomplete_Aggregate => return "Incomplete_Aggregate"; when Cell_Storage.Return_Var => return "Return_Var"; when Cell_Storage.Root_Integer => return "Root_Integer"; when Cell_Storage.Bitwise_Op => return "Bitwise_Op"; when Cell_Storage.Function_Call_In_Proof_Context => return "Function_Call_In_Proof_Context"; when Cell_Storage.Proof_Function_Obtain_Precondition => return "Proof_Function_Obtain_Precondition"; when Cell_Storage.Proof_Function_Obtain_Return => return "Proof_Function_Obtain_Return"; when Cell_Storage.Proof_Function_Syntax_Node => return "Proof_Function_Syntax_Node"; when Cell_Storage.Quantifier => return "Quantifier"; when Cell_Storage.Internal_Natural => return "Internal_Natural"; when Cell_Storage.Internal_Scope => return "Internal_Scope"; when Cell_Storage.Unknown_Kind => return "Unknown"; end case; end Cell_Kind_Image; procedure Print_Node (Root : in Cells.Cell) is Suppress_Wrap : Boolean := True; begin SPARK_IO.Put_String (Output_File, Natural'Image (Cells.Cell_Ref (Root)) & " [shape=record,label=""{{", 0); SPARK_IO.Put_String (Output_File, Cell_Kind_Image (Cells.Get_Kind (Heap, Root)) & "\l|" & Natural'Image (Cells.Cell_Ref (Root)) & "\r}|", 0); Print_Cell_Contents (Heap => Heap, Output_File => Output_File, Cell_Name => Root, Suppress_Wrap => Suppress_Wrap, Scope => Dictionary.GlobalScope, Wrap_Limit => No_Wrap, Escape_DOT => True); SPARK_IO.Put_Line (Output_File, "}""];", 0); end Print_Node; procedure Print_Edge (Head, Tail : in Cells.Cell; Class : in Edge_Class) is begin if not Cells.Is_Null_Cell (Tail) then SPARK_IO.Put_String (Output_File, Natural'Image (Cells.Cell_Ref (Head)) & " ->" & Natural'Image (Cells.Cell_Ref (Tail)) & "[style=solid,label=", 0); SPARK_IO.Put_String (Output_File, Edge_Class'Image (Class), 0); SPARK_IO.Put_Line (Output_File, "];", 0); end if; end Print_Edge; procedure Print_Node_And_Edge (Root : in Cells.Cell) is A_Child : Cells.Cell; B_Child : Cells.Cell; begin if Cells.Is_Null_Cell (Root) then null; else Print_Node (Root); A_Child := Cells.Get_A_Ptr (Heap, Root); B_Child := Cells.Get_B_Ptr (Heap, Root); -- If the A_Child is not null and not pointing -- at itself, then print the edge and that sub-dag. if (not Cells.Is_Null_Cell (A_Child)) and then (A_Child /= Root) then Print_Edge (Root, A_Child, A); end if; -- If the B_Child is not null and not pointing -- at itself, then print the edge and that sub-dag. if (not Cells.Is_Null_Cell (B_Child)) and then (B_Child /= Root) then Print_Edge (Root, B_Child, B); end if; end if; end Print_Node_And_Edge; begin SPARK_IO.Put_Line (Output_File, "digraph DAG {", 0); SPARK_IO.Put_Line (Output_File, "ranksep=""1.0 equally"";", 0); SPARK_IO.Put_Line (Output_File, "nodesep=1.0;", 0); SPARK_IO.Put_Line (Output_File, "node [shape=box,fontname=helvetica];", 0); SPARK_IO.Put_Line (Output_File, "edge [labelfontname=helvetica,labelfontsize=10];", 0); for C in Cell_Storage.Cell range 0 .. Cells.Get_Heap_Size (Heap) loop if not Cells.Is_Free (Heap, C) then Print_Node_And_Edge (C); end if; end loop; SPARK_IO.Put_Line (Output_File, "}", 0); end Print_Heap_Dot; end DAG_IO; spark-2012.0.deb/examiner/lextokenmanager-insert_nat.adb0000644000175000017500000000265311753202336022257 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; separate (LexTokenManager) procedure Insert_Nat (N : in Natural; Lex_Str : out Lex_String) is Ex_Lin : E_Strings.T; begin E_Strings.Put_Int_To_String (Dest => Ex_Lin, Item => N, Start_Pt => 1, Base => 10); Insert_Examiner_String (Str => Ex_Lin, Lex_Str => Lex_Str); end Insert_Nat; ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_variable_declaration.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_variable_declaration0000644000175000017500000016467611753202336033104 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration) procedure Wf_Variable_Declaration (Node : in STree.SyntaxNode; Enclosing_Unit_Scope : in Dictionary.Scopes; Declaration_Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) is type Declaration_Sorts is (In_Package, In_Subprogram, In_Protected_Type); Declaration_Sort : Declaration_Sorts; Type_Node : STree.SyntaxNode; Exp_Node : STree.SyntaxNode; Alias_Node : STree.SyntaxNode; Var_Is_Init : Boolean; Is_Aliased : Boolean := False; Unwanted_Seq : SeqAlgebra.Seq; Exp_Type : Exp_Record; Type_Sym : Dictionary.Symbol; Unused_Component_Data : ComponentManager.ComponentData; ------------------------------------------------------------------------- function Get_Declaration_Sort (Scope : Dictionary.Scopes) return Declaration_Sorts --# global in Dictionary.Dict; is The_Region : Dictionary.Symbol; Result : Declaration_Sorts; begin The_Region := Dictionary.GetRegion (Scope); if Dictionary.IsPackage (The_Region) then Result := In_Package; elsif Dictionary.IsType (The_Region) and then Dictionary.IsProtectedTypeMark (The_Region) then Result := In_Protected_Type; else -- assume subprogram Result := In_Subprogram; end if; return Result; end Get_Declaration_Sort; ------------------------------------------------------------------------- procedure Alias_Check (Type_Sym : in Dictionary.Symbol; Alias_Node_Pos : in LexTokenManager.Token_Position; Is_Aliased : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Alias_Node_Pos, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Aliased, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Type_Sym; is begin if Is_Aliased and then not Dictionary.IsProtectedTypeMark (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 894, Reference => ErrorHandler.No_Reference, Position => Alias_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end Alias_Check; ------------------------------------------------------------------------- procedure Wf_Package_Variable (Node, Exp_Node : in STree.SyntaxNode; Alias_Node_Pos, Type_Node_Pos : in LexTokenManager.Token_Position; Type_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Is_Aliased, Var_Is_Init : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# Exp_Node, --# Is_Aliased, --# LexTokenManager.State, --# Node, --# STree.Table, --# Type_Sym, --# Var_Is_Init & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Alias_Node_Pos, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# Is_Aliased, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Node_Pos, --# Type_Sym, --# Var_Is_Init; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.variable_declaration and --# (Exp_Node = STree.NullNode or Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression); --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Root_Type_Sym : Dictionary.Symbol; Variable_Symbol : Dictionary.Symbol; Declaration_Symbol : Dictionary.Symbol; OK_To_Add : Boolean; ------------------------------------------------------------------------- procedure Check_Type_Consistency (Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node : in STree.SyntaxNode; OK_To_Add : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Sym, --# Type_Sym & --# OK_To_Add from Dictionary.Dict, --# Scope, --# Sym, --# Type_Sym; --# pre Syntax_Node_Type (Error_Node, STree.Table) = SP_Symbols.identifier; is Announced_Type : Dictionary.Symbol; Root_Type_Sym : Dictionary.Symbol; Consistent : Boolean; begin Announced_Type := Dictionary.GetType (Sym); Root_Type_Sym := Dictionary.GetRootType (Type_Sym); if Dictionary.IsOwnVariable (Sym) then if Dictionary.OwnVariableHasType (Sym, Scope) then if Dictionary.IsSubtype (Type_Sym) then if Dictionary.IsProtectedTypeMark (Root_Type_Sym) then -- Protected types may be declared using a subtype of -- the type used in the own variable declaration. if Dictionary.IsSubtype (Dictionary.GetType (Sym)) then -- The own variable was declared using a subtype. -- Report this error rather than the type mismatch error ErrorHandler.Semantic_Error (Err_Num => 948, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Error_Node), Id_Str => Dictionary.GetSimpleName (Sym)); Consistent := True; else Consistent := Dictionary.Types_Are_Equal (Left_Symbol => Root_Type_Sym, Right_Symbol => Announced_Type, Full_Range_Subtype => False); end if; else -- The types must be the same Consistent := Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => Announced_Type, Full_Range_Subtype => False); end if; else -- The types must be the same Consistent := Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => Announced_Type, Full_Range_Subtype => False); end if; else -- No announced type so consistent by default Consistent := True; end if; else -- This is an own task Consistent := Dictionary.Types_Are_Equal (Left_Symbol => Root_Type_Sym, Right_Symbol => Announced_Type, Full_Range_Subtype => False); end if; if not Consistent then -- Type mismatch between declaration and own variable annotation ErrorHandler.Semantic_Error (Err_Num => 22, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Error_Node), Id_Str => Node_Lex_String (Node => Error_Node)); end if; OK_To_Add := Consistent; end Check_Type_Consistency; ------------------------------------------------------------------------- function Is_A_Protected_Own_Variable (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsOwnVariable (Sym) and then Dictionary.GetOwnVariableProtected (Sym); end Is_A_Protected_Own_Variable; ------------------------------------------------------------------------- procedure Check_PO_Declaration (Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position; Valid : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# Type_Sym & --# Valid from Dictionary.Dict, --# Scope, --# Sym, --# Type_Sym; is procedure Check_For_Multiple_Instances (Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position; Valid : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# Type_Sym & --# Valid from Dictionary.Dict, --# Sym, --# Type_Sym; is It : Dictionary.Iterator; Root_Type_Sym : Dictionary.Symbol; begin Valid := True; Root_Type_Sym := Dictionary.GetRootType (Type_Sym); It := Dictionary.FirstVirtualElement (Root_Type_Sym); if It /= Dictionary.NullIterator and then Dictionary.GetVirtualElementOwner (Dictionary.CurrentSymbol (It)) /= Sym then -- Illegal instance. This variable is not the one associated with -- the protects list in the own variable clause. Valid := False; ErrorHandler.Semantic_Error_Sym (Err_Num => 942, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Sym => Root_Type_Sym, Scope => Scope); end if; end Check_For_Multiple_Instances; ------------------------------------------------------------------------- procedure Check_PO_Operations (Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# Type_Sym; is Subprogram_It : Dictionary.Iterator; Subprogram_Sym : Dictionary.Symbol; Global_Variable_It : Dictionary.Iterator; begin -- Get the first subprogram for the protected type. Subprogram_It := Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Dictionary.GetRootType (Type_Sym)); while not Dictionary.IsNullIterator (Subprogram_It) loop Subprogram_Sym := Dictionary.CurrentSymbol (Subprogram_It); -- Get the first global variable for this subprogram. Global_Variable_It := Dictionary.FirstGlobalVariable (Dictionary.GetAbstraction (Subprogram_Sym, Scope), Subprogram_Sym); -- Check the ceiling priority Check_Ceiling_Priority (Sym => Sym, Scope => Scope, Check_List => Global_Variable_It, Priority_Lex_Value => Dictionary.GetTypePriority (Type_Sym), Error_Node_Pos => Error_Node_Pos); while not Dictionary.IsNullIterator (Global_Variable_It) loop if Dictionary.CurrentSymbol (Global_Variable_It) = Sym then -- A circular dependency has been found ErrorHandler.Semantic_Error2 (Err_Num => 916, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str1 => Dictionary.GetSimpleName (Sym), Id_Str2 => Dictionary.GetSimpleName (Subprogram_Sym)); end if; Global_Variable_It := Dictionary.NextSymbol (Global_Variable_It); end loop; -- Get the next subprogram. Subprogram_It := Dictionary.NextSymbol (Subprogram_It); end loop; end Check_PO_Operations; procedure Warn_Of_Interrupt_IDs (Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Sym; is begin if Dictionary.GetHasInterruptProperty (Sym) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 5, Position => Error_Node_Pos, Sym => Sym, Scope => Scope); end if; end Warn_Of_Interrupt_IDs; begin -- Check_PO_Declaration if Dictionary.IsLibraryLevel (Scope) then if Dictionary.OwnVariableHasType (Sym, Scope) then Check_For_Multiple_Instances (Sym => Sym, Type_Sym => Type_Sym, Scope => Scope, Error_Node_Pos => Error_Node_Pos, Valid => Valid); Check_PO_Operations (Sym => Sym, Type_Sym => Type_Sym, Scope => Scope, Error_Node_Pos => Error_Node_Pos); Warn_Of_Interrupt_IDs (Sym => Sym, Scope => Scope, Error_Node_Pos => Error_Node_Pos); else -- The own variable was not type announced. Valid := False; ErrorHandler.Semantic_Error (Err_Num => 925, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end if; else Valid := False; -- Illegal declaration of protected object ErrorHandler.Semantic_Error (Err_Num => 868, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end if; end Check_PO_Declaration; begin -- Wf_Package_Variable Alias_Check (Type_Sym => Type_Sym, Alias_Node_Pos => Alias_Node_Pos, Is_Aliased => Is_Aliased); Root_Type_Sym := Dictionary.GetRootType (Type_Sym); --# assert STree.Table = STree.Table~; It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Child_Node (Current_Node => Node), In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); OK_To_Add := False; Ident_Str := Node_Lex_String (Node => Next_Node); Variable_Symbol := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Variable_Symbol) then -- package state variable not previously announced if Dictionary.TypeIsTask (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 930, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); else ErrorHandler.Semantic_Error (Err_Num => 151, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; elsif Dictionary.GetContext (Variable_Symbol) = Dictionary.ProgramContext then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); elsif not (Dictionary.IsOwnVariable (Variable_Symbol) or else Dictionary.IsConstituent (Variable_Symbol) or else Dictionary.IsOwnTask (Variable_Symbol)) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); elsif Dictionary.IsRefinedOwnVariable (Variable_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 73, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); else OK_To_Add := True; end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add then Check_Task_Modifier_Consistency (The_Own_Var_Type => Dictionary.GetType (Variable_Symbol), The_Var_Type => Type_Sym, Modifier_Is_Task => Dictionary.IsOwnTask (Variable_Symbol), Error_Node => Next_Node, Consistent => OK_To_Add); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add then Check_Protected_Modifier_Consistency (The_Type => Type_Sym, Modifier_Is_Protected => Dictionary.IsOwnVariable (Variable_Symbol) and then Dictionary.GetOwnVariableProtected (Variable_Symbol), Error_Node => Next_Node, Consistent => OK_To_Add); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then ((Dictionary.IsOwnVariable (Variable_Symbol) or else (Dictionary.IsConstituent (Variable_Symbol)))) then Check_Suspendable_Property_Consistency (Sym => Variable_Symbol, Type_Sym => Type_Sym, Is_In_Suspends_List => Dictionary.GetIsSuspendable (Variable_Symbol), Error_Node_Pos => Node_Position (Node => Node), Consistent => OK_To_Add); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then (Dictionary.IsOwnVariable (Variable_Symbol) or else Dictionary.IsConstituent (Variable_Symbol)) then Check_Priority_Property_Consistency (Sym => Variable_Symbol, Type_Sym => Type_Sym, Priority_Property_Value => Dictionary.GetPriorityProperty (Variable_Symbol), Error_Node_Pos => Node_Position (Node => Next_Node), Consistent => OK_To_Add); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then (Dictionary.IsOwnVariable (Variable_Symbol) or else Dictionary.IsConstituent (Variable_Symbol)) then Check_Interrupt_Property_Consistency (Has_Interrupt_Property => Dictionary.GetHasInterruptProperty (Variable_Symbol), Sym => Variable_Symbol, Type_Sym => Type_Sym, Error_Node_Pos => Node_Position (Node => Next_Node), Consistent => OK_To_Add); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then ((Dictionary.IsOwnVariable (Variable_Symbol) and then Dictionary.OwnVariableHasType (Variable_Symbol, Current_Scope)) or else (Dictionary.IsOwnTask (Variable_Symbol))) then -- This is a type announced own variable or task own variable. -- Check that announced type matches the one used in the declaration. Check_Type_Consistency (Sym => Variable_Symbol, Type_Sym => Type_Sym, Scope => Current_Scope, Error_Node => Next_Node, OK_To_Add => OK_To_Add); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then Dictionary.TypeIsTask (Type_Sym) then Check_Ceiling_Priority (Sym => Variable_Symbol, Scope => Current_Scope, Check_List => Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Root_Type_Sym), Priority_Lex_Value => Dictionary.GetTypePriority (Type_Sym), Error_Node_Pos => Node_Position (Node => Next_Node)); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then Var_Is_Init and then Dictionary.IsOwnVariableOrConstituentWithMode (Variable_Symbol) and then not Is_A_Protected_Own_Variable (Sym => Variable_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 720, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); OK_To_Add := False; end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if Dictionary.FirstVirtualElement (Root_Type_Sym) /= Dictionary.NullIterator and then not Dictionary.IsProtectedTypeMark (Root_Type_Sym) then -- A protects property has been given in an own variable declaration that -- announces a type that is not a protected type. OK_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 937, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Dictionary.GetSimpleName (Variable_Symbol)); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then Dictionary.IsProtectedTypeMark (Type_Sym) then Check_PO_Declaration (Sym => Variable_Symbol, Type_Sym => Type_Sym, Scope => Current_Scope, Error_Node_Pos => Node_Position (Node => Next_Node), Valid => OK_To_Add); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then Var_Is_Init and then Unexpected_Initialization (Sym => Variable_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 333, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add and then not Var_Is_Init and then Dictionary.IsOwnVariable (Variable_Symbol) and then Dictionary.GetOwnVariableMode (Variable_Symbol) = Dictionary.DefaultMode and then (Dictionary.GetOwnVariableProtected (Variable_Symbol) or else Dictionary.IsVirtualElement (Variable_Symbol)) then -- Non moded protected state must be initialized at declaration. ErrorHandler.Semantic_Error (Err_Num => 874, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# (OK_To_Add -> (not Dictionary.Is_Null_Symbol (Variable_Symbol))); if OK_To_Add then --# accept F, 10, Declaration_Symbol, "Expected ineffective assignment"; Dictionary.Add_Variable_Declaration (Variable_Sym => Variable_Symbol, The_Type => Type_Sym, Initialized => Var_Is_Init, Is_Aliased => Is_Aliased, Exp_Node => STree.NodeToRef (Exp_Node), Type_Reference => Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), Scope => Current_Scope, Declaration_Symbol => Declaration_Symbol); --# end accept; SystemErrors.RT_Assert (C => Dictionary.IsVariable (Variable_Symbol), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "in Wf_Package_Variable"); STree.Add_Node_Symbol (Node => Next_Node, Sym => Variable_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Next_Node, Symbol => Variable_Symbol, Is_Declaration => True); end if; -- if we have an in stream, then initially mark it as invalid if Dictionary.GetOwnVariableOrConstituentMode (Variable_Symbol) = Dictionary.InMode then Dictionary.SetVariableMarkedValid (Variable_Symbol, False); end if; STree.Set_Node_Lex_String (Sym => Variable_Symbol, Node => Next_Node); end if; It := STree.NextNode (It); end loop; --# accept F, 33, Declaration_Symbol, "Expected unused variable"; end Wf_Package_Variable; ------------------------------------------------------------------------- procedure Wf_Procedure_Variable (Node, Exp_Node : in STree.SyntaxNode; Alias_Node_Pos, Type_Node_Pos : in LexTokenManager.Token_Position; Type_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Is_Aliased, Var_Is_Init : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Node, --# STree.Table, --# Type_Sym, --# Var_Is_Init & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Alias_Node_Pos, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# Is_Aliased, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Node_Pos, --# Type_Sym, --# Var_Is_Init; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.variable_declaration and --# (Exp_Node = STree.NullNode or Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression); --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Sym : Dictionary.Symbol; Declaration_Symbol : Dictionary.Symbol; Variable_Symbol : Dictionary.Symbol; OK_To_Add : Boolean; begin Alias_Check (Type_Sym => Type_Sym, Alias_Node_Pos => Alias_Node_Pos, Is_Aliased => Is_Aliased); It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Child_Node (Current_Node => Node), In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); Ident_Str := Node_Lex_String (Node => Next_Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); OK_To_Add := True; if Dictionary.IsProtectedTypeMark (Type_Sym) then OK_To_Add := False; -- Illegal declaration of protected object ErrorHandler.Semantic_Error (Err_Num => 868, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); elsif Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) then OK_To_Add := False; -- Illegal declaration of suspension object ErrorHandler.Semantic_Error (Err_Num => 901, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); elsif Dictionary.TypeIsTask (Type_Sym) then OK_To_Add := False; -- illegal declaration of task object. ErrorHandler.Semantic_Error (Err_Num => 926, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; if not Dictionary.Is_Null_Symbol (Sym) then OK_To_Add := False; -- already exists ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; if OK_To_Add then Dictionary.Add_Variable (Name => Ident_Str, The_Type => Type_Sym, Initialized => Var_Is_Init, Is_Aliased => False, Exp_Node => STree.NodeToRef (Exp_Node), Type_Reference => Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), Scope => Current_Scope, Declaration_Symbol => Declaration_Symbol, Variable_Symbol => Variable_Symbol); STree.Add_Node_Symbol (Node => Next_Node, Sym => Variable_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Next_Node, Symbol => Declaration_Symbol, Is_Declaration => True); end if; end if; It := STree.NextNode (It); end loop; end Wf_Procedure_Variable; ------------------------------------------------------------------------- procedure Wf_Protected_Element (Node, Exp_Node : in STree.SyntaxNode; Alias_Node_Pos, Type_Node_Pos : in LexTokenManager.Token_Position; Type_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Is_Aliased, Var_Is_Init : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Node, --# STree.Table, --# Type_Sym, --# Var_Is_Init & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Alias_Node_Pos, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# Is_Aliased, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Node_Pos, --# Type_Sym, --# Var_Is_Init; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.variable_declaration and --# (Exp_Node = STree.NullNode or Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression); --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Declaration_Symbol : Dictionary.Symbol; Variable_Symbol : Dictionary.Symbol; OK_To_Add : Boolean; begin Alias_Check (Type_Sym => Type_Sym, Alias_Node_Pos => Alias_Node_Pos, Is_Aliased => Is_Aliased); It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Child_Node (Current_Node => Node), In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); Ident_Str := Node_Lex_String (Node => Next_Node); Variable_Symbol := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); OK_To_Add := True; if Dictionary.IsProtectedTypeMark (Type_Sym) then OK_To_Add := False; -- Illegal declaration of protected object ErrorHandler.Semantic_Error (Err_Num => 868, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); elsif Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) then OK_To_Add := False; -- Illegal declaration of suspension object ErrorHandler.Semantic_Error (Err_Num => 901, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); elsif Dictionary.TypeIsTask (Type_Sym) then OK_To_Add := False; -- illegal declaration of task object. ErrorHandler.Semantic_Error (Err_Num => 926, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); elsif not Dictionary.Is_Null_Symbol (Variable_Symbol) then OK_To_Add := False; -- already exists ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; if OK_To_Add then -- Declaration is legal -- First add variable as a refinement constituent of the implicit own variable associated -- with the protected type Dictionary.AddConstituent (Name => Ident_Str, Subject => Dictionary.GetProtectedTypeOwnVariable (Dictionary.GetRegion (Current_Scope)), Mode => Dictionary.DefaultMode, SubjectReference => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), Comp_Unit => ContextManager.Ops.Current_Unit, ConstituentReference => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node))); Variable_Symbol := Dictionary.LookupImmediateScope (Name => Ident_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Dictionary.GetRegion (Current_Scope)), Context => Dictionary.ProofContext); SystemErrors.RT_Assert (C => not Dictionary.Is_Null_Symbol (Variable_Symbol), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "in Wf_Protected_Element"); -- then add the variable itself Dictionary.Add_Variable_Declaration (Variable_Sym => Variable_Symbol, The_Type => Type_Sym, Initialized => Var_Is_Init, Is_Aliased => False, Exp_Node => STree.NodeToRef (Exp_Node), Type_Reference => Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), Scope => Current_Scope, Declaration_Symbol => Declaration_Symbol); SystemErrors.RT_Assert (C => Dictionary.IsVariable (Variable_Symbol), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "in Wf_Protected_Element"); STree.Add_Node_Symbol (Node => Next_Node, Sym => Variable_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Next_Node, Symbol => Declaration_Symbol, Is_Declaration => True); end if; -- Checking that protected elements are initialized is now done -- in wf_protected_type_declaration to allow for justification of -- these errors. end if; It := STree.NextNode (It); end loop; end Wf_Protected_Element; begin -- Wf_Variable_Declaration Heap.Reset (The_Heap); Alias_Node := STree.NullNode; Declaration_Sort := Get_Declaration_Sort (Scope => Declaration_Scope); Type_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Type_Node = RWaliased OR type_mark if Syntax_Node_Type (Node => Type_Node) = SP_Symbols.RWaliased then -- ASSUME Type_Node = RWaliased Is_Aliased := True; Alias_Node := Type_Node; Type_Node := Next_Sibling (Current_Node => Type_Node); elsif Syntax_Node_Type (Node => Type_Node) /= SP_Symbols.type_mark then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = RWaliased OR type_mark in Wf_Variable_Declaration"); end if; -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Wf_Variable_Declaration"); -- ASSUME Alias_Node = RWaliased OR NULL SystemErrors.RT_Assert (C => Alias_Node = STree.NullNode or else Syntax_Node_Type (Node => Alias_Node) = SP_Symbols.RWaliased, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Alias_Node = RWaliased OR NULL in Wf_Variable_Declaration"); Wf_Type_Mark (Node => Type_Node, Current_Scope => Enclosing_Unit_Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); -- variable initialization -- Protected and suspension objects are implicitly initialised on declaration. Var_Is_Init := Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) or else Dictionary.IsProtectedTypeMark (Type_Sym); Exp_Node := Next_Sibling (Current_Node => Type_Node); -- ASSUME Exp_Node = expression OR NULL if Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.expression then -- ASSUME Exp_Node = expression if Dictionary.TypeIsGeneric (Type_Sym) then -- Initialization of such variables not allowed in generic bodies because we rely on VCs -- to do range checks that in non-generic situations can be done statically and -- no VCs are generated for variable initializations. ErrorHandler.Semantic_Error (Err_Num => 651, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); else -- Non-generic case, initialization is ok Var_Is_Init := True; SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Unused_Component_Data is discarded"; Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Declaration_Scope, Type_Context => Type_Sym, Context_Requires_Static => False, Ref_Var => Unwanted_Seq, Result => Exp_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Assignment_Check (Position => Node_Position (Node => Exp_Node), Scope => Enclosing_Unit_Scope, Target_Type => Type_Sym, Exp_Result => Exp_Type); if not Exp_Type.Is_Constant then ErrorHandler.Semantic_Error (Err_Num => 50, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; end if; elsif Exp_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = expression OR NULL in Wf_Variable_Declaration"); end if; case Declaration_Sort is when In_Package => Wf_Package_Variable (Node => Node, Exp_Node => Exp_Node, Alias_Node_Pos => Node_Position (Node => Alias_Node), Type_Node_Pos => Node_Position (Node => Type_Node), Type_Sym => Type_Sym, Current_Scope => Declaration_Scope, Is_Aliased => Is_Aliased, Var_Is_Init => Var_Is_Init); when In_Subprogram => Wf_Procedure_Variable (Node => Node, Exp_Node => Exp_Node, Alias_Node_Pos => Node_Position (Node => Alias_Node), Type_Node_Pos => Node_Position (Node => Type_Node), Type_Sym => Type_Sym, Current_Scope => Declaration_Scope, Is_Aliased => Is_Aliased, Var_Is_Init => Var_Is_Init); when In_Protected_Type => Wf_Protected_Element (Node => Node, Exp_Node => Exp_Node, Alias_Node_Pos => Node_Position (Node => Alias_Node), Type_Node_Pos => Node_Position (Node => Type_Node), Type_Sym => Type_Sym, Current_Scope => Declaration_Scope, Is_Aliased => Is_Aliased, Var_Is_Init => Var_Is_Init); end case; if Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Sym, Enclosing_Unit_Scope) or else Dictionary.IsUnconstrainedTaskType (Type_Sym) or else Dictionary.IsUnconstrainedProtectedType (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; Heap.ReportUsage (The_Heap); end Wf_Variable_Declaration; spark-2012.0.deb/examiner/sem-check_priority_range.adb0000644000175000017500000001031211753202336021672 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; separate (Sem) procedure Check_Priority_Range (Error_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Pragma_Kind : in Dictionary.RavenscarPragmas; Err_Pos : in LexTokenManager.Token_Position; Value : in Maths.Value; Value_Rep : out LexTokenManager.Lex_String) is type Lookup_Token_Table is array (Dictionary.RavenscarPragmas) of LexTokenManager.Lex_String; Lookup_Token : constant Lookup_Token_Table := Lookup_Token_Table' (Dictionary.Priority => LexTokenManager.Priority_Token, Dictionary.InterruptPriority => LexTokenManager.Any_Priority_Token, Dictionary.AttachHandler => LexTokenManager.Interrupt_Priority_Token); System_Sym : Dictionary.Symbol; Priority_Sym : Dictionary.Symbol; Result : Maths.Value; Unused : Maths.ErrorCode; Lower_OK, Upper_OK : Boolean; begin Value_Rep := LexTokenManager.Null_String; -- do we have a value to check? if Maths.IsIntegerValue (Value) then -- is System shadowed or declared in config file? System_Sym := Dictionary.LookupItem (Name => LexTokenManager.System_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (System_Sym) then -- System exists, what about the subtype we need? Priority_Sym := Dictionary.LookupSelectedItem (Prefix => System_Sym, Selector => Lookup_Token (Pragma_Kind), Scope => Dictionary.GetScope (System_Sym), Context => Dictionary.ProgramContext); if not Dictionary.Is_Null_Symbol (Priority_Sym) then -- we can do a range check --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.GreaterOrEqual (Value, Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Priority_Sym)), Result, Unused); Maths.ValueToBool (Result, Lower_OK, Unused); Maths.LesserOrEqual (Value, Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Priority_Sym)), Result, Unused); Maths.ValueToBool (Result, Upper_OK, Unused); --# end accept; if not (Upper_OK and Lower_OK) then ErrorHandler.Semantic_Error_Sym2 (Err_Num => 881, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Sym => Error_Sym, Sym2 => Priority_Sym, Scope => Scope); else -- valid value Maths.StorageRep (Value, Value_Rep); end if; end if; -- no Priority subtype else -- no System so we can't check the range but we can still add it to the dictionary Maths.StorageRep (Value, Value_Rep); end if; end if; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Check_Priority_Range; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-uncondflowerr.adb0000644000175000017500000001463411753202336026522 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure UncondFlowErr (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is Err_Type : ErrorHandler.Data_Flow_Err_Type; procedure UncondFlowErrExpl (E_Str : in out E_Strings.T) --# global in Err_Type; --# derives E_Str from *, --# Err_Type; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Type; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Type, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation UncondFlowErrExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin Err_Type := ErrorHandler.Data_Flow_Err_Type'Val (Err_Num.ErrorNum - Error_Types.UncondFlowErrorOffset); case Err_Type is -- HTML Directives --! <"flow-"> --! <"!!! Flow Error : "><" : "> when ErrorHandler.Expn_Undefined => --! 20 E_Strings.Append_String (E_Str => E_Str, Str => "Expression contains reference(s) to variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " which has an undefined value"); --! The expression may be that in an assignment or return statement, --! an actual parameter, --! or a condition occurring in an if or case statement, an iteration --! scheme or exit statement. NOTE: the presence of random and possibly invalid values --! introduced by data flow errors invalidates proof of exception freedom for the --! subprogram body which contains them. All unconditional data flow errors must be --! eliminated before attempting exception freedom proofs. See the manual --! "SPARK Proof Manual" for full details. when ErrorHandler.Stmt_Undefined => --! 23 E_Strings.Append_String (E_Str => E_Str, Str => "Statement contains reference(s) to variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " which has an undefined value"); --! The statement here is a procedure call or an assignment to an array element, and the variable XXX may --! appear in an actual parameter, whose value is imported when the --! procedure is executed. If the variable XXX --! does not occur in the actual parameter list, it is an imported --! global variable of the procedure (named in its global definition). --! NOTE: the presence of random and possibly invalid values --! introduced by data flow errors invalidates proof of exception freedom for the --! subprogram body which contains them. All unconditional data flow errors must be --! eliminated before attempting exception freedom proofs. See the manual --! "SPARK Proof Manual" for full details. when ErrorHandler.Invariant_Exp => --! 22 E_Strings.Append_String (E_Str => E_Str, Str => "Value of expression is invariant"); --! The expression is either a case expression or a condition --! (Boolean-valued expression) associated with an if-statement, not --! contained in a loop statement. The message indicates --! that the expression takes the same value whenever it is evaluated, --! in all program executions. Note that if the expression depends on values obtained --! by a call to another other subprogram then a possible source for its invariance --! might be an incorrect annotation on the called subprogram. when others => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO Dep_Semantic_Err"); end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end UncondFlowErr; spark-2012.0.deb/examiner/sem-compunit-checksuspendslistaccountedfor.adb0000644000175000017500000000373511753202336025502 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure CheckSuspendsListAccountedFor (Proc_Or_Task : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) is It : Dictionary.Iterator; The_PO_Or_SO : Dictionary.Symbol; begin if not Dictionary.BodyIsHidden (Proc_Or_Task) then It := Dictionary.FirstSuspendsListItem (Proc_Or_Task); while not Dictionary.IsNullIterator (It) loop The_PO_Or_SO := Dictionary.CurrentSymbol (It); if not Dictionary.SuspendsListItemIsAccountedFor (TheTaskOrProc => Proc_Or_Task, ThePOorSO => The_PO_Or_SO) then ErrorHandler.Semantic_Error (Err_Num => 914, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (The_PO_Or_SO)); end if; It := Dictionary.NextSymbol (It); end loop; end if; end CheckSuspendsListAccountedFor; spark-2012.0.deb/examiner/declarations.idx0000644000175000017500000000064711753202337017437 0ustar eugeneugendeclarations specification is in declarations.ads declarations body is in declarations.adb declarations.outputdeclarations subunit is in declarations-outputdeclarations.adb declarations.outputdeclarations.printdeclarations subunit is in declarations-outputdeclarations-printdeclarations.adb declarations.outputdeclarations.generatedeclarations subunit is in declarations-outputdeclarations-generatedeclarations.adb spark-2012.0.deb/examiner/date_time.ads0000644000175000017500000000514711753202336016704 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit E_Strings; package Date_Time is -- This is the return-type for Compare_Timestamps. type Timestamp_Comparison_Result_T is (Malformed_Timestamps, A_Less_Than_B, A_Equals_B, A_Greater_Than_B); -- We use short month names in a few places (including in POGS), -- so it makes sense to define them in a central place to avoid -- duplication. subtype Seconds_T is Natural range 0 .. 59; subtype Minutes_T is Natural range 0 .. 59; subtype Hours_T is Natural range 0 .. 23; subtype Days_T is Positive range 1 .. 31; subtype Months_T is Positive range 1 .. 12; subtype Years_T is Natural range 1000 .. 9999; -- TODO: Raise ticket in 9999BC and extend range subtype String_3_Positions is Positive range 1 .. 3; subtype String_3_T is String (String_3_Positions); type Month_Names_T is array (Months_T) of String_3_T; Month_Names : constant Month_Names_T := Month_Names_T'("JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"); -- This function compares two timestamps and returns -- A_Less_Than_B, A_Equals_B or A_Greater_Than_B. -- The timestamps must be in the following format: -- -- 30-SEP-2010, 10:15:24 -- -- The comma after the date is optional. -- -- Any characters afterwards are ignored. In case of any parsing -- error in either timestamps the special Malformed_Timestamps -- value is returned. This function assumes the month abbreviation -- is in English. function Compare_Timestamps (Timestamp_A : E_Strings.T; Timestamp_B : E_Strings.T) return Timestamp_Comparison_Result_T; end Date_Time; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-condlflowerr-condlflowerrexpl.adb0000644000175000017500000000505511753202337031720 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.CondlFlowErr) procedure CondlFlowErrExpl (E_Str : in out E_Strings.T) is begin case Err_Type is when ErrorHandler.Expn_May_Be_Undefined => E_Strings.Append_String (E_Str => E_Str, Str => "The expression may be that in an assignment or return statement," & " an actual parameter," & " or a condition occurring in an if or case statement, an iteration" & " scheme or exit statement. The Examiner has identified at least one syntactic path" & " to this point where the variable has NOT been given a value. Conditional" & " data flow errors are extremely serious and must be carefully investigated." & " NOTE: the presence of random and possibly invalid values" & " introduced by data flow errors invalidates proof of exception freedom for the" & " subprogram body which contains them. All reports of data flow errors must be" & " eliminated or shown to be associated with semantically infeasible paths before" & " attempting exception freedom proofs. See the manual ""SPARK Proof Manual" & " "" for full details."); when ErrorHandler.Stmt_May_Be_Undefined => E_Strings.Append_String (E_Str => E_Str, Str => "The statement here is a procedure call, and the variable XXX may" & " appear in an actual parameter, whose value is imported when the" & " procedure is executed. If the variable XXX" & " does not occur in the actual parameter list, it is an imported" & " global variable of the procedure (named in its global definition)." & " The Examiner has identified at least one syntactic path" & " to this point where the variable has NOT been given a value. Conditional" & " data flow errors are extremely serious and must be carefully investigated." & " NOTE: the presence of random and possibly invalid values" & " introduced by data flow errors invalidates proof of exception freedom for the" & " subprogram body which contains them. All reports of data flow errors must be" & " eliminated or shown to be associated with semantically infeasible paths before" & " attempting exception freedom proofs. See the manual ""SPARK Proof Manual" & " "" for full details."); when others => null; end case; end CondlFlowErrExpl; ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-check_state_can_be_initialized.adbspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-check_state_can_be_ini0000644000175000017500000001760611753202336033012 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Package_Declaration.Wf_Package_Specification) procedure Check_State_Can_Be_Initialized (Pack_Sym : in Dictionary.Symbol; Anno_Node : in STree.SyntaxNode) is Own_Var_It : Dictionary.Iterator; Own_Var_Sym : Dictionary.Symbol; function Pure_Export_Procedure_Exists (Pack_Sym, Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean; function Pure_Export_Procedure_Exists_Local (Sym : Dictionary.Symbol; It : Dictionary.Iterator) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; Subprog_Sym : Dictionary.Symbol; Local_It : Dictionary.Iterator; function Is_Procedure_Or_Task (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsProcedure (Sym) or else (Dictionary.IsType (Sym) and then Dictionary.TypeIsTask (Sym)); end Is_Procedure_Or_Task; begin -- Pure_Export_Procedure_Exists_Local Local_It := It; while not Dictionary.IsNullIterator (Local_It) loop Subprog_Sym := Dictionary.CurrentSymbol (Local_It); Result := Is_Procedure_Or_Task (Sym => Subprog_Sym) and then Dictionary.IsExport (Dictionary.IsAbstract, Subprog_Sym, Sym) and then not Dictionary.IsImport (Dictionary.IsAbstract, Subprog_Sym, Sym); exit when Result; Local_It := Dictionary.NextSymbol (Local_It); end loop; return Result; end Pure_Export_Procedure_Exists_Local; begin -- Pure_Export_Procedure_Exists Result := Pure_Export_Procedure_Exists_Local (Sym => Sym, It => Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Pack_Sym)) or else Pure_Export_Procedure_Exists_Local (Sym => Sym, It => Dictionary.First_Private_Subprogram (The_Package => Pack_Sym)); if not Result then Result := Pure_Export_Procedure_Exists_Local (Sym => Sym, It => Dictionary.First_Visible_Task_Type (The_Package => Pack_Sym)); end if; return Result; end Pure_Export_Procedure_Exists; ----------------------------------------------------- procedure Issue_Warning (Sym : in Dictionary.Symbol; Anno_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Anno_Node, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Sym; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation; is function Place_To_Report_Error (Anno_Node : STree.SyntaxNode; Own_Var_Name : LexTokenManager.Lex_String) return LexTokenManager.Token_Position --# global in LexTokenManager.State; --# in STree.Table; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation; is Result : LexTokenManager.Token_Position; Search_Node, Own_Var_Node : STree.SyntaxNode; It : STree.Iterator; begin -- set up default answer. In practice the loop below should always find the -- actual location of an own variable. If something goes wrong the default -- result will point at the end of the package specification Result := Node_Position (Node => Last_Sibling_Of (Start_Node => Anno_Node)); Search_Node := Child_Node (Current_Node => Anno_Node); -- point to own_var_clause It := Find_First_Node (Node_Kind => SP_Symbols.own_variable, From_Root => Search_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Own_Var_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Own_Var_Node, STree.Table) = SP_Symbols.own_variable and --# Own_Var_Node = Get_Node (It); -- check whether the own variable at this point is the one we are looking for if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Last_Child_Of (Start_Node => Own_Var_Node)), Lex_Str2 => Own_Var_Name) = LexTokenManager.Str_Eq then Result := Node_Position (Node => Own_Var_Node); exit; -- normal exit condition end if; It := STree.NextNode (It); end loop; return Result; end Place_To_Report_Error; begin -- Issue_Warning ErrorHandler.Semantic_Warning (Err_Num => 398, Position => Place_To_Report_Error (Anno_Node => Anno_Node, Own_Var_Name => Dictionary.GetSimpleName (Sym)), Id_Str => Dictionary.GetSimpleName (Sym)); end Issue_Warning; begin -- Check_State_Can_Be_Initialized Own_Var_It := Dictionary.FirstOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (Own_Var_It) -- exit when no more owns loop Own_Var_Sym := Dictionary.CurrentSymbol (Own_Var_It); -- if own variable initialized at declaration we don't have to consider -- it any further if not Dictionary.OwnVariableIsInitialized (Own_Var_Sym) then -- if own variable is moded then it does not need initializing if Dictionary.GetOwnVariableMode (Own_Var_Sym) = Dictionary.DefaultMode then -- if it is declared in the visible part we don't have to worry about it if not Dictionary.Is_Declared (Item => Own_Var_Sym) then -- if we get here we are interested in whether there is a subprogram which -- exports Own_Var_Sym without also importing it if not Pure_Export_Procedure_Exists (Pack_Sym => Pack_Sym, Sym => Own_Var_Sym) then Issue_Warning (Sym => Own_Var_Sym, Anno_Node => Anno_Node); end if; end if; end if; end if; Own_Var_It := Dictionary.NextSymbol (Own_Var_It); end loop; end Check_State_Can_Be_Initialized; spark-2012.0.deb/examiner/flowanalyser.idx0000644000175000017500000000040511753202337017465 0ustar eugeneugenflowanalyser specification is in flowanalyser.ads flowanalyser body is in flowanalyser.adb flowanalyser.flowanalyse subunit is in flowanalyser-flowanalyse.adb flowanalyser.flowanalyse.analyserelations subunit is in flowanalyser-flowanalyse-analyserelations.adb spark-2012.0.deb/examiner/spark.sw0000644000175000017500000000016511753202337015747 0ustar eugeneugen-sparklib -output_directory=vcg -config_file=../common/gnat.cfg -listing_extension=ls_ -casing -index_file=spark.idx spark-2012.0.deb/examiner/e_strings.ads0000644000175000017500000002726311753202336016751 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- E_Strings -- -- Purpose: -- -- In addition to the basic constructors and operations, simple I/O -- procedure are prodived to Put and Read such strings to/from a -- SPARK_IO file. -- -- Clients: -- Used throughout the Examiner and other tools. -- -- Use: -- Declare an E_Strings.T -- -- Initialization via the constant Empty_String or via constructors -- Copy_String -- -- Extension: -- None planned. -------------------------------------------------------------------------------- with SPARK.Ada.Strings.Unbounded; with SPARK_IO; --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# SPARK.Ada.Strings.Maps, --# SPARK.Ada.Strings.Unbounded, --# SPARK_IO; package E_Strings is subtype Lengths is Natural; subtype Positions is Positive; subtype Valid_Base is Positive range 2 .. 16; type T is private; -- First_One_First : first parameter is lexigraphicaly less -- Second_One_First : second parameter is lexigraphicaly less -- Neither_First : 2 parameters are equal type Order_Types is (First_One_First, Second_One_First, Neither_First); ----------------------------------------------------------------------------- -- Constants ----------------------------------------------------------------------------- Empty_String : constant T; ----------------------------------------------------------------------------- -- Operations ----------------------------------------------------------------------------- -- Conversion function To_Unbounded_String (E_Str : T) return SPARK.Ada.Strings.Unbounded.Unbounded_String; -- Equality operators -- Case INsensitive comparison. function Eq_String (E_Str1, E_Str2 : T) return Boolean; -- Case INsensitive comparison. function Eq1_String (E_Str : T; Str : String) return Boolean; -- Case sensitive comparison. function Eq_CS_String (E_Str1, E_Str2 : T) return Boolean; function Is_Empty (E_Str : T) return Boolean; function Get_Length (E_Str : T) return Lengths; function Get_Empty_String return T; --# return S => Get_Length (S) = 0 and S = Empty_String; function Get_Element (E_Str : T; Pos : Positions) return Character; function Copy_String (Str : String) return T; -- Append Str to E_Str procedure Append_String (E_Str : in out T; Str : in String); --# derives E_Str from *, --# Str; -- Append E_Str2 to E_Str1 procedure Append_Examiner_String (E_Str1 : in out T; E_Str2 : in T); --# derives E_Str1 from *, --# E_Str2; function Lower_Case (E_Str : T) return T; function Upper_Case (E_Str : T) return T; function Lower_Case_Char (E_Str : T; Pos : Positions) return T; function Upper_Case_Char (E_Str : T; Pos : Positions) return T; -- Replace all the From_Char by To_Char function Translate (E_Str : T; From_Char : Character; To_Char : Character) return T; procedure Append_Char (E_Str : in out T; Ch : in Character); --# derives E_Str from *, --# Ch; --# post Get_Length (E_Str) = Get_Length (E_Str~) + 1; -- Find_Sub_String_After : find the specified Search_String, -- starting at the specified position in the given T procedure Find_Sub_String_After (E_Str : in T; Search_Start : in Positions; Search_String : in String; String_Found : out Boolean; String_Start : out Positions); --# derives String_Found, --# String_Start from E_Str, --# Search_Start, --# Search_String; -- Find_Sub_String : find specified Search_String in the given T procedure Find_Sub_String (E_Str : in T; Search_String : in String; String_Found : out Boolean; String_Start : out Positions); --# derives String_Found, --# String_Start from E_Str, --# Search_String; procedure Find_Examiner_Sub_String (E_Str : in T; Search_String : in T; String_Found : out Boolean; String_Start : out Positions); --# derives String_Found, --# String_Start from E_Str, --# Search_String; -- Pop_Char takes as input a T. It removes the first character -- from the string and returns it in the Char output parameter. -- If the empty string is passed in then the outputs are: -- E_Str = EmptyString -- Char = ' ' procedure Pop_Char (E_Str : in out T; Char : out Character); --# derives Char, --# E_Str from E_Str; -- Find_Char_After : find specified character in E_Str, starting -- at specified position procedure Find_Char_After (E_Str : in T; Search_Start : in Positions; Search_Char : in Character; Char_Found : out Boolean; Char_Pos : out Positions); --# derives Char_Found, --# Char_Pos from E_Str, --# Search_Char, --# Search_Start; -- Find_Char : find first occurrence of specified character in E_Str procedure Find_Char (E_Str : in T; Search_Char : in Character; Char_Found : out Boolean; Char_Pos : out Positions); --# derives Char_Found, --# Char_Pos from E_Str, --# Search_Char; -- See Order_Types above function Lex_Order (First_Name, Second_Name : T) return Order_Types; -- Section returns the specified subsection of the string if the -- subsection lies outside the string, empty string returned function Section (E_Str : T; Start_Pos : Positions; Length : Lengths) return T; -- Trim removes ' ', Latin_1.HT, Latin_1.LF, Latin_1.CR from both -- ends of the string function Trim (E_Str : T) return T; procedure Get_Int_From_String (Source : in T; Item : out Integer; Start_Pt : in Positions; Stop : out Natural); --# derives Item, --# Stop from Source, --# Start_Pt; -- This procedure generates a string, optionally space-padded with -- Start_Pt - 1 spaces, representing the given integer Item. You -- can specify a Base other than ten. -- -- The precondition requires some explanation: The maximum string -- length cannot exceed Lengths'Last. To show that the generated -- string will fit into that the padding + the maximum string -- length may not exceed Lengths'Last. The maximum string length -- that can be generated is either Integer'First or Integer'Last -- in base 2. This means you will at most need 4 + 1 + 31 extra -- characters, 3 for the base prefix (bb##), 1 for the -- sign, and up to 31 (i.e. max(ln2(Integer'First), -- ln2(Integer'Last))). The latter is enforced by comparing 2 ** -- 31 to Integer'First and Integer'Last. Put_Int_To_String_Max_Int_Size : constant Integer := 31; procedure Put_Int_To_String (Dest : out T; Item : in Integer; Start_Pt : in Positions; Base : in Valid_Base); --# derives Dest from Base, --# Item, --# Start_Pt; --# pre Start_Pt < Lengths'Last - (4 + 1 + Put_Int_To_String_Max_Int_Size) and --# -(2 ** Put_Int_To_String_Max_Int_Size) = Integer'First and --# (2 ** Put_Int_To_String_Max_Int_Size) - 1 = Integer'Last; -- Given two strings, A and B, return the length of their -- common prefix (if any), limited to '.' and string -- boundaries. So, for example: -- -- E_Str_A E_Str_B Prefix -- ----------------------------- -- Foo.B1 | Foo.B2 | 3 (Foo) -- Foo | Foo.B2 | 3 (Foo) -- Foo | Foobar | 0 () -- A.B.C | A.B.X | 3 (A.B) function Get_Dotted_Common_Prefix_Length (E_Str_A, E_Str_B : T) return Lengths; -- This function returns true if the given E_Str starts with Str. function Starts_With (E_Str : T; Str : String) return Boolean; --# return X => X -> Get_Length (E_Str) >= Str'Length; ----------------------------------------------------------------------------- -- I/O procedures ----------------------------------------------------------------------------- -- See Ada.Text_IO for the Form_Of_File parameters procedure Create (File : in out SPARK_IO.File_Type; Name_Of_File : in T; Form_Of_File : in String; Status : out SPARK_IO.File_Status); --# global in out SPARK_IO.File_Sys; --# derives File, --# SPARK_IO.File_Sys, --# Status from File, --# Form_Of_File, --# Name_Of_File, --# SPARK_IO.File_Sys; procedure Open (File : in out SPARK_IO.File_Type; Mode_Of_File : in SPARK_IO.File_Mode; Name_Of_File : in T; Form_Of_File : in String; Status : out SPARK_IO.File_Status); --# global in out SPARK_IO.File_Sys; --# derives File, --# SPARK_IO.File_Sys, --# Status from File, --# Form_Of_File, --# Mode_Of_File, --# Name_Of_File, --# SPARK_IO.File_Sys; procedure Put_String (File : in SPARK_IO.File_Type; E_Str : in T); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E_Str, --# File; procedure Put_Line (File : in SPARK_IO.File_Type; E_Str : in T); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E_Str, --# File; procedure Get_Line (File : in SPARK_IO.File_Type; E_Str : out T); --# global in out SPARK_IO.File_Sys; --# derives E_Str, --# SPARK_IO.File_Sys from File, --# SPARK_IO.File_Sys; private type T is record Content : SPARK.Ada.Strings.Unbounded.Unbounded_String; end record; Empty_String : constant T := T'(Content => SPARK.Ada.Strings.Unbounded.Null_Unbounded_String); end E_Strings; spark-2012.0.deb/examiner/dictionary-operator_is_visible.adb0000644000175000017500000007331311753202336023136 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) package body Operator_Is_Visible is function Check_Declarations_Are_Visible (Name : SP_Symbols.SP_Symbol; Scope : Dictionary.Scopes; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is Current_Scope : Dictionary.Scopes; The_Package : RawDict.Package_Info_Ref; Is_Visible : Boolean; ------------------------------------------------------------------------------ function In_Same_Immediate_Package (Inner, Outer : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is Current_Scope : Dictionary.Scopes; Current_Region, Outer_Region : Dictionary.Symbol; begin Current_Scope := Inner; Outer_Region := Dictionary.GetRegion (Outer); loop Current_Region := Dictionary.GetRegion (Current_Scope); exit when Current_Region = Outer_Region or else RawDict.GetSymbolDiscriminant (Current_Region) = Dictionary.Package_Symbol; Current_Scope := Dictionary.GetEnclosingScope (Current_Scope); end loop; return Current_Region = Outer_Region; end In_Same_Immediate_Package; begin -- Check_Declarations_Are_Visible case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Is_Visible := In_Same_Immediate_Package (Inner => Scope, Outer => Dictionary.Get_Type_Scope (Type_Mark => Type_Mark)); when CommandLineData.SPARK95_Onwards => Current_Scope := Scope; loop Is_Visible := In_Same_Immediate_Package (Inner => Current_Scope, Outer => Dictionary.Get_Type_Scope (Type_Mark => Type_Mark)); exit when Is_Visible; Current_Scope := Dictionary.GetEnclosingScope (Current_Scope); exit when Current_Scope = Dictionary.GlobalScope; end loop; if not Is_Visible then The_Package := Dictionary.Get_Library_Package (Scope => Scope); if The_Package /= Dictionary.Get_Predefined_Package_Standard then loop The_Package := RawDict.Get_Package_Parent (The_Package => The_Package); exit when The_Package = RawDict.Null_Package_Info_Ref; Is_Visible := In_Same_Immediate_Package (Inner => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)), Outer => Dictionary.Get_Type_Scope (Type_Mark => Type_Mark)); exit when Is_Visible; end loop; end if; end if; end case; return Is_Visible and then (((Name = SP_Symbols.equals or else Name = SP_Symbols.not_equal) and then not Dictionary.Type_Is_Limited (Type_Mark => Type_Mark, Scope => Scope)) or else ((Name /= SP_Symbols.equals and then Name /= SP_Symbols.not_equal) and then not Dictionary.Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope))); end Check_Declarations_Are_Visible; -------------------------------------------------------------------------------- function Symmetric_Operators_Are_Visible (Name : SP_Symbols.SP_Symbol; Type_Mark : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is begin return Type_Mark = Dictionary.Get_Unknown_Type_Mark or else Dictionary.Defined_In_Package_Standard (Type_Mark => Type_Mark) or else Check_Declarations_Are_Visible (Name => Name, Scope => Scope, Type_Mark => Type_Mark); end Symmetric_Operators_Are_Visible; -------------------------------------------------------------------------------- function Type_Is_Used (Name : SP_Symbols.SP_Symbol; Type_Mark : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is Current_Scope : Dictionary.Scopes; The_Package : RawDict.Package_Info_Ref; Region : Dictionary.Symbol; Found : Boolean; -------------------------------------------------------------------------------- function Is_Used_In_This_Scope (Type_Mark : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is Region : Dictionary.Symbol; Found : Boolean; -------------------------------------------------------------------------------- function Is_Used (Type_Mark : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is Region : Dictionary.Symbol; The_Use_Type_Clause : RawDict.Use_Type_Clause_Info_Ref; Result : Boolean; begin Region := Dictionary.GetRegion (Scope); Result := Dictionary.Is_Used_Locally (Type_Mark => Type_Mark, Scope => Scope); if not Result and then RawDict.GetSymbolDiscriminant (Region) = Dictionary.Package_Symbol and then Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Local then The_Use_Type_Clause := RawDict.Get_Package_Visible_Use_Type_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); while The_Use_Type_Clause /= RawDict.Null_Use_Type_Clause_Info_Ref and then RawDict.Get_Use_Type_Clause_Type (The_Use_Type_Clause => The_Use_Type_Clause) /= Type_Mark loop The_Use_Type_Clause := RawDict.Get_Next_Use_Type_Clause (The_Use_Type_Clause => The_Use_Type_Clause); end loop; Result := The_Use_Type_Clause /= RawDict.Null_Use_Type_Clause_Info_Ref; end if; return Result; end Is_Used; begin -- Is_Used_In_This_Scope Region := Dictionary.GetRegion (Scope); case RawDict.GetSymbolDiscriminant (Region) is when Dictionary.Package_Symbol => Found := Is_Used (Type_Mark => Type_Mark, Scope => Scope); when Dictionary.Subprogram_Symbol => Found := Is_Used (Type_Mark => Type_Mark, Scope => Scope); when Dictionary.Type_Symbol => -- Task/Protected type SystemErrors.RT_Assert (C => Dictionary.Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) or else Dictionary.Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Used_In_This_Scope"); Found := Is_Used (Type_Mark => Type_Mark, Scope => Scope); when others => Found := False; end case; return Found; end Is_Used_In_This_Scope; -------------------------------------------------------------------------------- function Type_Exports_Operators (Type_Mark : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is begin return not Dictionary.Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope) or else (CommandLineData.Ravenscar_Selected and then Dictionary.Is_Predefined_Time_Type (Type_Mark => Type_Mark)); end Type_Exports_Operators; begin -- Type_Is_Used case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Found := False; when CommandLineData.SPARK95_Onwards => if Dictionary.Defined_In_Package_Standard (Type_Mark => Type_Mark) then -- can't possible be in a use type clause Found := False; else Current_Scope := Scope; loop Found := Is_Used_In_This_Scope (Type_Mark => Type_Mark, Scope => Current_Scope); exit when Found; Region := Dictionary.GetRegion (Current_Scope); exit when RawDict.GetSymbolDiscriminant (Region) = Dictionary.Subprogram_Symbol and then Dictionary.Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); Current_Scope := Dictionary.GetEnclosingScope (Current_Scope); exit when Current_Scope = Dictionary.GlobalScope; end loop; if Current_Scope = Dictionary.GlobalScope then The_Package := Dictionary.Get_Library_Package (Scope => Scope); if The_Package /= Dictionary.Get_Predefined_Package_Standard then loop The_Package := RawDict.Get_Package_Parent (The_Package => The_Package); exit when The_Package = RawDict.Null_Package_Info_Ref; Found := Is_Used_In_This_Scope (Type_Mark => Type_Mark, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package))); exit when Found; end loop; end if; end if; end if; end case; return Found and then (Name = SP_Symbols.equals or else Name = SP_Symbols.not_equal or else Type_Exports_Operators (Type_Mark => Type_Mark, Scope => Scope)); end Type_Is_Used; -------------------------------------------------------------------------------- function Operator_Is_Renamed (Name : SP_Symbols.SP_Symbol; Is_Binary_Operator : Boolean; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is Current_Scope : Dictionary.Scopes; The_Package : RawDict.Package_Info_Ref; Region : Dictionary.Symbol; Found : Boolean; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Scope (Name : SP_Symbols.SP_Symbol; Is_Binary_Operator : Boolean; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is Region : Dictionary.Symbol; Found : Boolean; -------------------------------------------------------------------------------- function Search_Renaming_Declarations (Name : SP_Symbols.SP_Symbol; Is_Binary_Operator : Boolean; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; Declarations : RawDict.Declaration_Info_Ref) return Boolean --# global in Dictionary.Dict; is Current : RawDict.Declaration_Info_Ref; Item : Dictionary.Symbol; begin Current := Declarations; loop exit when Current = RawDict.Null_Declaration_Info_Ref; Item := RawDict.Get_Declaration_Item (The_Declaration => Current); exit when RawDict.GetSymbolDiscriminant (Item) = Dictionary.Operator_Symbol and then RawDict.Get_Operator_Name (The_Operator => RawDict.Get_Operator_Info_Ref (Item => Item)) = Name and then RawDict.Get_Operator_Is_Binary (The_Operator => RawDict.Get_Operator_Info_Ref (Item => Item)) = Is_Binary_Operator and then ((Is_Binary_Operator and then RawDict.Get_Operator_Left_Operand (The_Operator => RawDict.Get_Operator_Info_Ref (Item => Item)) = The_Left_Type and then RawDict.Get_Operator_Right_Operand (The_Operator => RawDict.Get_Operator_Info_Ref (Item => Item)) = The_Right_Type) or else (not Is_Binary_Operator and then RawDict.Get_Operator_Operand (The_Operator => RawDict.Get_Operator_Info_Ref (Item => Item)) = The_Left_Type)); Current := RawDict.Get_Next_Declaration (The_Declaration => Current); end loop; return Current /= RawDict.Null_Declaration_Info_Ref; end Search_Renaming_Declarations; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Package (Name : SP_Symbols.SP_Symbol; Is_Binary_Operator : Boolean; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; The_Package : RawDict.Package_Info_Ref; The_Visibility : Dictionary.Visibility) return Boolean --# global in Dictionary.Dict; is Found : Boolean; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Package_Specification (Name : SP_Symbols.SP_Symbol; Is_Binary_Operator : Boolean; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dictionary.Dict; is begin return Search_Renaming_Declarations (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Declarations => RawDict.Get_Package_Visible_Renaming_Declarations (The_Package => The_Package)); end Is_Renamed_In_This_Package_Specification; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Package_Body (Name : SP_Symbols.SP_Symbol; Is_Binary_Operator : Boolean; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dictionary.Dict; is begin return Search_Renaming_Declarations (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Declarations => RawDict.Get_Package_Local_Renaming_Declarations (The_Package => The_Package)); end Is_Renamed_In_This_Package_Body; begin -- Is_Renamed_In_This_Package case The_Visibility is when Dictionary.Visible | Dictionary.Privat => Found := Is_Renamed_In_This_Package_Specification (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, The_Package => The_Package); when Dictionary.Local => Found := Is_Renamed_In_This_Package_Body (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, The_Package => The_Package) or else Is_Renamed_In_This_Package_Specification (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, The_Package => The_Package); end case; return Found; end Is_Renamed_In_This_Package; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Subprogram (Name : SP_Symbols.SP_Symbol; Is_Binary_Operator : Boolean; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref) return Boolean --# global in Dictionary.Dict; is begin return Search_Renaming_Declarations (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Declarations => RawDict.Get_Subprogram_Renaming_Declarations (The_Subprogram => The_Subprogram)); end Is_Renamed_In_This_Subprogram; begin -- Is_Renamed_In_This_Scope Region := Dictionary.GetRegion (Scope); case RawDict.GetSymbolDiscriminant (Region) is when Dictionary.Package_Symbol => Found := Is_Renamed_In_This_Package (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, The_Package => RawDict.Get_Package_Info_Ref (Item => Region), The_Visibility => Dictionary.Get_Visibility (Scope => Scope)); when Dictionary.Subprogram_Symbol => Found := Is_Renamed_In_This_Subprogram (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); when others => Found := False; end case; return Found; end Is_Renamed_In_This_Scope; begin -- Operator_Is_Renamed Current_Scope := Scope; loop Found := Is_Renamed_In_This_Scope (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Scope => Current_Scope); exit when Found; Region := Dictionary.GetRegion (Current_Scope); exit when (RawDict.GetSymbolDiscriminant (Region) = Dictionary.Package_Symbol and then CommandLineData.Content.Language_Profile = CommandLineData.SPARK83) or else (RawDict.GetSymbolDiscriminant (Region) = Dictionary.Subprogram_Symbol and then Dictionary.Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region))); Current_Scope := Dictionary.GetEnclosingScope (Current_Scope); exit when Current_Scope = Dictionary.GlobalScope; end loop; if Current_Scope = Dictionary.GlobalScope then The_Package := Dictionary.Get_Library_Package (Scope => Scope); if The_Package /= Dictionary.Get_Predefined_Package_Standard then loop The_Package := RawDict.Get_Package_Parent (The_Package => The_Package); exit when The_Package = RawDict.Null_Package_Info_Ref; Found := Is_Renamed_In_This_Scope (Name => Name, Is_Binary_Operator => Is_Binary_Operator, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package))); exit when Found; end loop; end if; end if; return Found; end Operator_Is_Renamed; -------------------------------------------------------------------------------- function Unary_Operator_Is_Visible (Name : SP_Symbols.SP_Symbol; Type_Mark : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean is begin return Symmetric_Operators_Are_Visible (Name => Name, Type_Mark => Type_Mark, Scope => Scope) or else Type_Is_Used (Name => Name, Type_Mark => Type_Mark, Scope => Scope) or else Operator_Is_Renamed (Name => Name, Is_Binary_Operator => False, The_Left_Type => Type_Mark, The_Right_Type => RawDict.Null_Type_Info_Ref, Scope => Scope); end Unary_Operator_Is_Visible; -------------------------------------------------------------------------------- function Binary_Operator_Is_Visible (Name : SP_Symbols.SP_Symbol; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean is function Is_Directly_Visible (Name : SP_Symbols.SP_Symbol; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is Is_Visible : Boolean; -------------------------------------------------------------------------------- function Asymmetric_Operators_Are_Visible (Name : SP_Symbols.SP_Symbol; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is Is_Visible : Boolean; begin if The_Left_Type = Dictionary.Get_Unknown_Type_Mark or else Dictionary.Defined_In_Package_Standard (Type_Mark => The_Left_Type) then Is_Visible := The_Right_Type = Dictionary.Get_Unknown_Type_Mark or else Dictionary.Defined_In_Package_Standard (Type_Mark => The_Right_Type) or else Check_Declarations_Are_Visible (Name => Name, Scope => Scope, Type_Mark => The_Right_Type); elsif The_Right_Type = Dictionary.Get_Unknown_Type_Mark or else Dictionary.Defined_In_Package_Standard (Type_Mark => The_Right_Type) then Is_Visible := Check_Declarations_Are_Visible (Name => Name, Scope => Scope, Type_Mark => The_Left_Type); else Is_Visible := Check_Declarations_Are_Visible (Name => Name, Scope => Scope, Type_Mark => The_Left_Type) and then Check_Declarations_Are_Visible (Name => Name, Scope => Scope, Type_Mark => The_Right_Type); end if; return Is_Visible; end Asymmetric_Operators_Are_Visible; begin -- Is_Directly_Visible if Name = SP_Symbols.multiply and then (Dictionary.Type_Is_Fixed_Point (Type_Mark => The_Left_Type) or else The_Left_Type = Dictionary.Get_Unknown_Type_Mark) and then (Dictionary.Type_Is_Fixed_Point (Type_Mark => The_Right_Type) or else The_Right_Type = Dictionary.Get_Unknown_Type_Mark) then Is_Visible := True; elsif Name = SP_Symbols.divide and then (Dictionary.Type_Is_Fixed_Point (Type_Mark => The_Left_Type) or else The_Left_Type = Dictionary.Get_Unknown_Type_Mark) and then (Dictionary.Type_Is_Fixed_Point (Type_Mark => The_Right_Type) or else The_Right_Type = Dictionary.Get_Unknown_Type_Mark) then Is_Visible := True; elsif The_Left_Type = The_Right_Type then Is_Visible := Symmetric_Operators_Are_Visible (Name => Name, Type_Mark => The_Left_Type, Scope => Scope); else Is_Visible := Asymmetric_Operators_Are_Visible (Name => Name, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Scope => Scope); end if; return Is_Visible; end Is_Directly_Visible; -------------------------------------------------------------------------------- function Binary_Operator_Is_Renamed (Name : SP_Symbols.SP_Symbol; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is Name_To_Look_For : SP_Symbols.SP_Symbol; begin if Name = SP_Symbols.not_equal then Name_To_Look_For := SP_Symbols.equals; else Name_To_Look_For := Name; end if; return Operator_Is_Renamed (Name => Name_To_Look_For, Is_Binary_Operator => True, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Scope => Scope); end Binary_Operator_Is_Renamed; begin -- Binary_Operator_Is_Visible return Is_Directly_Visible (Name => Name, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Scope => Scope) or else Type_Is_Used (Name => Name, Type_Mark => The_Left_Type, Scope => Scope) or else Type_Is_Used (Name => Name, Type_Mark => The_Right_Type, Scope => Scope) or else Binary_Operator_Is_Renamed (Name => Name, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type, Scope => Scope); end Binary_Operator_Is_Visible; end Operator_Is_Visible; spark-2012.0.deb/examiner/errorhandler-warningstatus-readwarningfile.adb0000644000175000017500000007126111753202336025465 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Latin_1; with CommandLineHandler; separate (ErrorHandler.WarningStatus) procedure ReadWarningFile is Option : E_Strings.T; File_OK : Boolean; Warning_File : SPARK_IO.File_Type; procedure Open_File --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# out File_OK; --# out Warning_File; --# derives File_OK, --# SPARK_IO.File_Sys, --# Warning_File from CommandLineData.Content, --# SPARK_IO.File_Sys; is File_Name : E_Strings.T; File_Spec_Status : FileSystem.Typ_File_Spec_Status; File_Status : SPARK_IO.File_Status; begin --# accept Flow, 10, File_Spec_Status, "Expected ineffective assignment to File_Spec_Status"; FileSystem.Find_Full_File_Name (File_Spec => CommandLineData.Content.Warning_File_Name, File_Status => File_Spec_Status, Full_File_Name => File_Name); --# end accept; Warning_File := SPARK_IO.Null_File; -- to avoid error on opening E_Strings.Open (File => Warning_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => File_Name, Form_Of_File => "", Status => File_Status); if File_Status = SPARK_IO.Ok then File_OK := True; else File_OK := False; ScreenEcho.Put_String ("Cannot open file "); if CommandLineData.Content.Plain_Output then ScreenEcho.Put_ExaminerLine (FileSystem.Just_File (Fn => File_Name, Ext => True)); else ScreenEcho.Put_ExaminerLine (File_Name); end if; end if; --# accept Flow, 33, File_Spec_Status, "Expected File_Spec_Status to be neither referenced nor exported"; end Open_File; ------------------------------------------------ procedure Close_File --# global in Warning_File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Warning_File; is File_Status : SPARK_IO.File_Status; begin --# accept Flow, 10, File_Status, "Expected ineffective assignment to File_Status" & --# Flow, 10, Warning_File, "Not assigned to. Due to Text_IO mode in out"; SPARK_IO.Close (Warning_File, File_Status); --# end accept; --# accept Flow, 33, File_Status, "Expected File_Status to be neither referenced nor exported" & --# Flow, 34, Warning_File, "Not assigned to. Due to Text_IO mode in out"; end Close_File; ------------------------------------------------ procedure Get_String (File : in SPARK_IO.File_Type; Str : out E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# Str from File, --# SPARK_IO.File_Sys; is Char_OK : Boolean; Ch : Character; procedure Get_Char (File : in SPARK_IO.File_Type; Ch : out Character; OK : out Boolean) --# global in out SPARK_IO.File_Sys; --# derives Ch, --# OK, --# SPARK_IO.File_Sys from File, --# SPARK_IO.File_Sys; is Ch_Local : Character; begin if SPARK_IO.End_Of_File (File) then OK := False; Ch := ' '; elsif SPARK_IO.End_Of_Line (File) then SPARK_IO.Skip_Line (File, 1); OK := True; Ch := ' '; else SPARK_IO.Get_Char (File, Ch_Local); if (Ch_Local = Ada.Characters.Latin_1.HT) or (Ch_Local = Ada.Characters.Latin_1.CR) then Ch_Local := ' '; end if; if Ch_Local = '-' then --must be comment start SPARK_IO.Skip_Line (File, 1); OK := True; Ch := ' '; else --valid character to return OK := True; Ch := Ch_Local; end if; end if; end Get_Char; begin --Get_String Str := E_Strings.Empty_String; --skip leading white space loop Get_Char (File => File, Ch => Ch, OK => Char_OK); exit when Ch /= ' '; exit when not Char_OK; end loop; if Char_OK then loop E_Strings.Append_Char (E_Str => Str, Ch => Ch); Get_Char (File => File, Ch => Ch, OK => Char_OK); exit when Ch = ' '; exit when not Char_OK; end loop; end if; end Get_String; ----------------------------------- procedure Invalid_Option (Opt : E_Strings.T) --# global in CommandLineData.Content; --# in Warning_File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Opt, --# Warning_File; is begin if CommandLineData.Content.Brief then if CommandLineData.Content.Plain_Output then ScreenEcho.Put_ExaminerString (FileSystem.Just_File (Fn => CommandLineData.Content.Warning_File_Name, Ext => True)); else ScreenEcho.Put_ExaminerString (CommandLineData.Content.Warning_File_Name); end if; ScreenEcho.Put_Char (':'); ScreenEcho.Put_Integer (SPARK_IO.Line (Warning_File), 0, 10); ScreenEcho.Put_Char (':'); ScreenEcho.Put_Integer (1, 0, 10); ScreenEcho.Put_Char (':'); ScreenEcho.Put_String ("Invalid warning option: "); ScreenEcho.Put_ExaminerLine (Opt); else ScreenEcho.Put_String ("Invalid warning option: "); ScreenEcho.Put_ExaminerLine (Opt); end if; end Invalid_Option; ------------------------------- procedure Process_Option (Opt : in E_Strings.T) --# global in CommandLineData.Content; --# in Warning_File; --# in out LexTokenManager.State; --# in out Pragma_List; --# in out SPARK_IO.File_Sys; --# in out Suppressed_Element; --# in out Suppress_All_Pragmas; --# derives LexTokenManager.State, --# Pragma_List from LexTokenManager.State, --# Opt, --# Pragma_List, --# SPARK_IO.File_Sys, --# Warning_File & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Opt, --# Pragma_List, --# Warning_File & --# Suppressed_Element from *, --# Opt & --# Suppress_All_Pragmas from *, --# Opt, --# SPARK_IO.File_Sys, --# Warning_File; is Option_Match : Boolean; procedure Process_Pragma --# global in Warning_File; --# in out LexTokenManager.State; --# in out Pragma_List; --# in out SPARK_IO.File_Sys; --# in out Suppress_All_Pragmas; --# derives LexTokenManager.State, --# SPARK_IO.File_Sys from *, --# Pragma_List, --# SPARK_IO.File_Sys, --# Warning_File & --# Pragma_List from *, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Warning_File & --# Suppress_All_Pragmas from *, --# SPARK_IO.File_Sys, --# Warning_File; is Pragma_Name : E_Strings.T; procedure Add_Pragma_Name (Prag : in E_Strings.T) --# global in out LexTokenManager.State; --# in out Pragma_List; --# in out SPARK_IO.File_Sys; --# derives LexTokenManager.State, --# SPARK_IO.File_Sys from *, --# Prag, --# Pragma_List & --# Pragma_List from *, --# LexTokenManager.State, --# Prag; is Lex_Name : LexTokenManager.Lex_String; begin if Pragma_List.Pragma_Count < (ExaminerConstants.MaxPragmasInWarningFile - 1) then LexTokenManager.Insert_Examiner_String (Str => Prag, Lex_Str => Lex_Name); Pragma_List.Pragma_Count := Pragma_List.Pragma_Count + 1; Pragma_List.Pragma_Array (Pragma_List.Pragma_Count) := Lex_Name; else -- too many ScreenEcho.Put_String ("Too many pragmas, ignoring: "); ScreenEcho.Put_ExaminerLine (Prag); end if; end Add_Pragma_Name; begin -- Process_Pragma Get_String (File => Warning_File, Str => Pragma_Name); if E_Strings.Get_Length (E_Str => Pragma_Name) /= 0 then if CommandLineHandler.Check_Option_Name (Opt_Name => Pragma_Name, Str => "all") then Suppress_All_Pragmas := True; else Add_Pragma_Name (Prag => Pragma_Name); end if; else ScreenEcho.Put_Line ("Pragma name missing"); end if; end Process_Pragma; begin -- Process_Option Option_Match := False; case E_Strings.Get_Element (E_Str => Opt, Pos => 1) is when 'a' | 'A' => case E_Strings.Get_Element (E_Str => Opt, Pos => 3) is when 'a' | 'A' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "ada2005_reserved_words") then Suppressed_Element (ErrorHandler.Ada2005_Reserved_Words) := True; Option_Match := True; end if; when 'd' | 'D' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "address_clauses") then Suppressed_Element (ErrorHandler.Unexpected_Address_Clauses) := True; Option_Match := True; end if; when others => null; end case; when 'c' | 'C' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "constant_variables") then Suppressed_Element (ErrorHandler.Constant_Variables) := True; Option_Match := True; end if; when 'd' | 'D' => case E_Strings.Get_Element (E_Str => Opt, Pos => 2) is when 'e' | 'E' => case E_Strings.Get_Element (E_Str => Opt, Pos => 3) is when 'c' | 'C' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "declare_annotations") then Suppressed_Element (ErrorHandler.Declare_Annotations) := True; Option_Match := True; end if; when 'f' | 'F' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "default_loop_assertions") then Suppressed_Element (ErrorHandler.Default_Loop_Assertions) := True; Option_Match := True; end if; when others => null; -- falls through with Option_Match false and generates error end case; when 'i' | 'I' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "direct_updates") then Suppressed_Element (ErrorHandler.Direct_Updates) := True; Option_Match := True; end if; when others => null; end case; when 'e' | 'E' => case E_Strings.Get_Element (E_Str => Opt, Pos => 3) is when 'p' | 'P' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "expression_reordering") then Suppressed_Element (ErrorHandler.Expression_Reordering) := True; Option_Match := True; end if; when 't' | 'T' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "external_assignment") then Suppressed_Element (ErrorHandler.External_Variable_Assignment) := True; Option_Match := True; end if; when others => null; end case; when 'h' | 'H' => case E_Strings.Get_Element (E_Str => Opt, Pos => 2) is when 'a' | 'A' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "handler_parts") then Suppressed_Element (ErrorHandler.Handler_Parts) := True; Option_Match := True; end if; when 'i' | 'I' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "hidden_parts") then Suppressed_Element (ErrorHandler.Hidden_Parts) := True; Option_Match := True; end if; when others => null; end case; when 'i' | 'I' => case E_Strings.Get_Element (E_Str => Opt, Pos => 2) is when 'm' | 'M' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "imported_objects") then Suppressed_Element (ErrorHandler.Imported_Objects) := True; Option_Match := True; end if; when 'n' | 'N' => case E_Strings.Get_Element (E_Str => Opt, Pos => 3) is when 'd' | 'D' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "index_manager_duplicates") then Suppressed_Element (ErrorHandler.Index_Manager_Duplicates) := True; Option_Match := True; end if; when 't' | 'T' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "interrupt_handlers") then Suppressed_Element (ErrorHandler.Interrupt_Handlers) := True; Option_Match := True; end if; when others => null; end case; when others => null; end case; when 'm' | 'M' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "main_program_precondition") then Suppressed_Element (ErrorHandler.Main_Program_Precondition) := True; Option_Match := True; end if; when 'n' | 'N' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "notes") then Suppressed_Element (ErrorHandler.Notes) := True; Option_Match := True; end if; when 'o' | 'O' => case E_Strings.Get_Element (E_Str => Opt, Pos => 2) is when 'b' | 'B' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "obsolescent_features") then Suppressed_Element (ErrorHandler.Obsolescent_Features) := True; Option_Match := True; end if; when 't' | 'T' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "others_clauses") then Suppressed_Element (ErrorHandler.Others_Clauses) := True; Option_Match := True; end if; when others => null; end case; when 'p' | 'P' => case E_Strings.Get_Element (E_Str => Opt, Pos => 3) is when 'a' | 'A' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "pragma") then Suppressed_Element (ErrorHandler.Pragmas) := True; Process_Pragma; Option_Match := True; end if; when 'i' | 'I' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "private_types") then Suppressed_Element (ErrorHandler.Unuseable_Private_Types) := True; Option_Match := True; end if; when 'o' | 'O' => -- We need to check that at least "proof_function_" -- (15 characters) and then one more letter is -- written down, otherwise these all look the same -- and the annoying short-hand features always pick -- the first warning keyword we check for. if E_Strings.Get_Length (Opt) >= 16 then if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "proof_function_non_boolean") then Suppressed_Element (ErrorHandler.Proof_Function_Non_Boolean) := True; Option_Match := True; elsif CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "proof_function_implicit") then Suppressed_Element (ErrorHandler.Proof_Function_Implicit) := True; Option_Match := True; elsif CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "proof_function_refinement") then Suppressed_Element (ErrorHandler.Proof_Function_Refinement) := True; Option_Match := True; end if; end if; when others => null; end case; when 'r' | 'R' => case E_Strings.Get_Element (E_Str => Opt, Pos => 3) is when 'a' | 'A' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "real_rtcs") then Suppressed_Element (ErrorHandler.Real_RTCs) := True; Option_Match := True; end if; when 'p' | 'P' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "representation_clauses") then Suppressed_Element (ErrorHandler.Representation_Clauses) := True; Option_Match := True; end if; when others => null; end case; when 's' | 'S' => case E_Strings.Get_Element (E_Str => Opt, Pos => 2) is when 'l' | 'L' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "sli_generation") then Suppressed_Element (ErrorHandler.SLI_Generation) := True; Option_Match := True; end if; when 't' | 'T' => case E_Strings.Get_Element (E_Str => Opt, Pos => 3) is when 'a' | 'A' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "static_expressions") then Suppressed_Element (ErrorHandler.Static_Expressions) := True; Option_Match := True; end if; when 'y' | 'Y' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "style_check_casing") then Suppressed_Element (ErrorHandler.Style_Check_Casing) := True; Option_Match := True; end if; when others => null; end case; when others => null; end case; when 't' | 'T' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "type_conversions") then Suppressed_Element (ErrorHandler.Type_Conversions) := True; Option_Match := True; end if; when 'u' | 'U' => case E_Strings.Get_Element (E_Str => Opt, Pos => 3) is when 'c' | 'C' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "unchecked_conversion") then Suppressed_Element (ErrorHandler.Unchecked_Conversion) := True; Option_Match := True; end if; when 'u' | 'U' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "unused_variables") then Suppressed_Element (ErrorHandler.Unused_Variables) := True; Option_Match := True; end if; when others => null; end case; when 'w' | 'W' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "with_clauses") then Suppressed_Element (ErrorHandler.With_Clauses) := True; Option_Match := True; end if; when others => null; end case; if not Option_Match then Invalid_Option (Opt => Opt); end if; end Process_Option; ------------------------------- procedure Sort_Pragmas --# global in LexTokenManager.State; --# in Suppress_All_Pragmas; --# in out Pragma_List; --# derives Pragma_List from *, --# LexTokenManager.State, --# Suppress_All_Pragmas; is J : Integer; Val : LexTokenManager.Lex_String; begin if not Suppress_All_Pragmas and then Pragma_List.Pragma_Count > 1 then for I in reverse Integer range 1 .. Pragma_List.Pragma_Count - 1 loop J := I; Val := Pragma_List.Pragma_Array (J); while J < Pragma_List.Pragma_Count and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_List.Pragma_Array (J + 1), Lex_Str2 => Val) = LexTokenManager.Str_First loop Pragma_List.Pragma_Array (J) := Pragma_List.Pragma_Array (J + 1); J := J + 1; end loop; Pragma_List.Pragma_Array (J) := Val; end loop; end if; end Sort_Pragmas; begin --ReadWarningFile if CommandLineData.Content.Warning then Open_File; if File_OK then if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then ScreenEcho.New_Line (1); ScreenEcho.Put_Line (" Reading warning control file ..."); end if; loop Get_String (File => Warning_File, Str => Option); exit when E_Strings.Get_Length (E_Str => Option) = 0; Process_Option (Opt => Option); end loop; Close_File; Sort_Pragmas; for I in ErrorHandler.Warning_Elements loop Something_Suppressed := Something_Suppressed or Suppressed_Element (I); end loop; else ErrorHandler.File_Open_Error := True; end if; end if; end ReadWarningFile; spark-2012.0.deb/examiner/sem-wf_formal_part.adb0000644000175000017500000001423311753202336020510 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Formal_Part (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Occurrence : in Boolean; Context : in Dictionary.Contexts) is -- look up table: if First_Occurrence then we are dealing with Abstract spec else REfined type Which_Abstractions is array (Boolean) of Dictionary.Abstractions; Which_Abstraction : constant Which_Abstractions := Which_Abstractions'(False => Dictionary.IsRefined, True => Dictionary.IsAbstract); It : STree.Iterator; Parameter_Count : Natural; Parameter_List_Has_Errors : Boolean := False; Next_Node : STree.SyntaxNode; -------------------------------------------------------------------------- procedure Wf_Param (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Occurrence : in Boolean; Context : in Dictionary.Contexts; Param_Count : in out Natural; Errors_Found : in out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# Errors_Found from *, --# CommandLineData.Content, --# Context, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# First_Occurrence, --# LexTokenManager.State, --# Node, --# Param_Count, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Occurrence, --# LexTokenManager.State, --# Node, --# Param_Count, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym & --# Param_Count, --# STree.Table from *, --# CommandLineData.Content, --# Context, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.parameter_specification; --# post STree.Table = STree.Table~; is separate; begin -- Wf_Formal_Part Parameter_Count := 0; It := Find_First_Node (Node_Kind => SP_Symbols.parameter_specification, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.parameter_specification and --# Next_Node = Get_Node (It); Wf_Param (Node => Next_Node, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym, First_Occurrence => First_Occurrence, Context => Context, Param_Count => Parameter_Count, Errors_Found => Parameter_List_Has_Errors); It := STree.NextNode (It); end loop; if Parameter_List_Has_Errors then Dictionary.SetSubprogramSignatureNotWellformed (Which_Abstraction (First_Occurrence), Subprog_Sym); end if; if not First_Occurrence and then Parameter_Count /= Dictionary.GetNumberOfSubprogramParameters (Subprog_Sym) then Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsRefined, Subprog_Sym); ErrorHandler.Semantic_Error (Err_Num => 152, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; end Wf_Formal_Part; spark-2012.0.deb/examiner/dictionary-targetdata.adb0000644000175000017500000006620311753202336021213 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with CommandLineHandler; with E_Strings; with FileSystem; with ScreenEcho; with SystemErrors; with XMLReport; separate (Dictionary) package body TargetData is -- Types------------------------------------------------------------------------------- type Val_Status is (OK_Val, Missing_Val, Illegal_Val); type Val_Sort is (Integer_Val, Real_Val); -- Local Subprograms------------------------------------------------------------------- procedure Open_File (Data_File : out SPARK_IO.File_Type; File_OK : out Boolean) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Data_File, --# File_OK, --# SPARK_IO.File_Sys from CommandLineData.Content, --# SPARK_IO.File_Sys; is File_Name : E_Strings.T; File_Spec_Status : FileSystem.Typ_File_Spec_Status; File_Status : SPARK_IO.File_Status; Local_File : SPARK_IO.File_Type; begin Local_File := SPARK_IO.Null_File; --# accept Flow, 10, File_Spec_Status, "Expected ineffective assignment"; FileSystem.Find_Full_File_Name -- 782 expect flow error File_Spec_Status not used. (File_Spec => CommandLineData.Content.Target_Data_File, File_Status => File_Spec_Status, Full_File_Name => File_Name); --# end accept; E_Strings.Open (File => Local_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => File_Name, Form_Of_File => "", Status => File_Status); if File_Status = SPARK_IO.Ok then File_OK := True; else File_OK := False; ScreenEcho.Put_String ("Cannot open file "); if CommandLineData.Content.Plain_Output then ScreenEcho.Put_ExaminerLine (FileSystem.Just_File (Fn => File_Name, Ext => True)); else ScreenEcho.Put_ExaminerLine (File_Name); end if; end if; Data_File := Local_File; --# accept Flow, 33, File_Spec_Status, "Expected to be neither referenced nor exported"; end Open_File; ------------------------------------------------ procedure Close_File (Data_File : in out SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives Data_File, --# SPARK_IO.File_Sys from *, --# Data_File; is File_Status : SPARK_IO.File_Status; begin --# accept Flow, 10, File_Status, "Expected ineffective assignment"; SPARK_IO.Close (Data_File, File_Status); --# accept Flow, 33, File_Status, "Expected to be neither referenced nor exported"; end Close_File; ------------------------------------------------ procedure Get_String (File : in SPARK_IO.File_Type; Str : out E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# Str from File, --# SPARK_IO.File_Sys; is Char_OK : Boolean; Ch : Character; procedure Get_Char (File : in SPARK_IO.File_Type; Ch : out Character; OK : out Boolean) --# global in out SPARK_IO.File_Sys; --# derives Ch, --# OK, --# SPARK_IO.File_Sys from File, --# SPARK_IO.File_Sys; is begin if SPARK_IO.End_Of_File (File) then OK := False; Ch := ' '; elsif SPARK_IO.End_Of_Line (File) then SPARK_IO.Skip_Line (File, 1); OK := True; Ch := ' '; else SPARK_IO.Get_Char (File, Ch); OK := True; end if; end Get_Char; begin --Get_String Str := E_Strings.Empty_String; --skip leading white space loop Get_Char (File => File, Ch => Ch, OK => Char_OK); exit when Ch /= ' '; exit when not Char_OK; end loop; if Char_OK then loop E_Strings.Append_Char (E_Str => Str, Ch => Ch); Get_Char (File => File, Ch => Ch, OK => Char_OK); exit when Ch = ' '; exit when not Char_OK; end loop; end if; end Get_String; ------------------------------- procedure Skip_Equals (Data_File : SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Data_File; is Unused_String : E_Strings.T; pragma Unreferenced (Unused_String); begin --# accept Flow, 10, Unused_String, "Expected ineffective assignment" & --# Flow, 33, Unused_String, "Expected to be neither referenced nor exported"; Get_String (File => Data_File, Str => Unused_String); end Skip_Equals; -------------------------------- procedure Get_Data_Value (Data_File : in SPARK_IO.File_Type; Sort : in Val_Sort; Val : out LexTokenManager.Lex_String; Status : out Val_Status) --# global in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives LexTokenManager.State, --# Status, --# Val from Data_File, --# LexTokenManager.State, --# Sort, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Data_File; is Value_String : E_Strings.T; Lex_Val : LexTokenManager.Lex_String; Conv_OK : Maths.ErrorCode; Num : Maths.Value; Is_Negative : Boolean; Dummy_Char : Character; begin --Get_Data_Value Skip_Equals (Data_File => Data_File); Get_String (File => Data_File, Str => Value_String); if E_Strings.Get_Length (E_Str => Value_String) /= 0 then if E_Strings.Get_Element (E_Str => Value_String, Pos => 1) = '-' then --# accept F, 10, Dummy_Char, "Ineffective assignment here OK"; E_Strings.Pop_Char (E_Str => Value_String, Char => Dummy_Char); --# end accept; Is_Negative := True; else Is_Negative := False; end if; LexTokenManager.Insert_Examiner_String (Str => Value_String, Lex_Str => Lex_Val); Maths.LiteralToValue (Lex_Val, -- to get Num, Conv_OK); if Conv_OK = Maths.NoError then if Sort = Integer_Val and not Maths.IsIntegerValue (Num) then Val := LexTokenManager.Null_String; Status := Illegal_Val; elsif Sort = Real_Val and not Maths.IsRealValue (Num) then Val := LexTokenManager.Null_String; Status := Illegal_Val; else if Is_Negative then Maths.Negate (Num); end if; Maths.StorageRep (Num, Val); Status := OK_Val; end if; else Val := LexTokenManager.Null_String; Status := Illegal_Val; end if; else Val := LexTokenManager.Null_String; Status := Missing_Val; end if; --# accept F, 33, Dummy_Char, "Dummy_Char not referenced here"; end Get_Data_Value; ----------------------------------------------------------- procedure Echo_Error (Status : in Val_Status) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Status; is begin case Status is when OK_Val => null; when Illegal_Val => ScreenEcho.Put_Line ("Illegal value"); when Missing_Val => ScreenEcho.Put_Line ("Value missing"); end case; end Echo_Error; --Exported Subprograms----------------------------------------------------------------- procedure Read_Target_Data_File is Option : E_Strings.T; File_OK : Boolean; Data_File : SPARK_IO.File_Type; procedure Invalid_Option (Opt : E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Opt; is begin ScreenEcho.Put_String ("Invalid target compiler data item: "); ScreenEcho.Put_ExaminerLine (Opt); end Invalid_Option; ------------------------------- procedure Process_Option (Opt : E_Strings.T) --# global in Data_File; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys from *, --# Data_File, --# LexTokenManager.State, --# Opt, --# SPARK_IO.File_Sys; is Option_Match : Boolean; Val : LexTokenManager.Lex_String; Status : Val_Status; begin Option_Match := False; case E_Strings.Get_Element (E_Str => Opt, Pos => 1) is when 'i' | 'I' => case E_Strings.Get_Element (E_Str => Opt, Pos => 9) is when 'f' | 'F' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "integer'first") then Get_Data_Value (Data_File => Data_File, Sort => Integer_Val, Val => Val, Status => Status); Echo_Error (Status => Status); RawDict.Set_Type_Lower (Type_Mark => Dictionary.Get_Predefined_Integer_Type, Lower => Val); Option_Match := True; end if; when 'l' | 'L' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "integer'last") then Get_Data_Value (Data_File => Data_File, Sort => Integer_Val, Val => Val, Status => Status); Echo_Error (Status => Status); RawDict.Set_Type_Upper (Type_Mark => Dictionary.Get_Predefined_Integer_Type, Upper => Val); RawDict.Set_Type_Upper (Type_Mark => Dictionary.Get_Predefined_Positive_Subtype, Upper => Val); RawDict.Set_Type_Upper (Type_Mark => Dictionary.Get_Predefined_Natural_Subtype, Upper => Val); Option_Match := True; end if; when others => null; end case; when 'l' | 'L' => case E_Strings.Get_Element (E_Str => Opt, Pos => 14) is when 'f' | 'F' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "long_integer'first") then Get_Data_Value (Data_File => Data_File, Sort => Integer_Val, Val => Val, Status => Status); Echo_Error (Status => Status); RawDict.Set_Type_Lower (Type_Mark => Dictionary.Get_Predefined_Long_Integer_Type, Lower => Val); Option_Match := True; end if; when 'l' | 'L' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "long_integer'last") then Get_Data_Value (Data_File => Data_File, Sort => Integer_Val, Val => Val, Status => Status); Echo_Error (Status => Status); RawDict.Set_Type_Upper (Type_Mark => Dictionary.Get_Predefined_Long_Integer_Type, Upper => Val); Option_Match := True; end if; when others => null; end case; when others => null; end case; if not Option_Match then Invalid_Option (Opt => Opt); end if; end Process_Option; ------------------------------- begin --Read_Target_Data_File if CommandLineData.Content.Target_Data then Open_File (Data_File => Data_File, File_OK => File_OK); if File_OK then if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then ScreenEcho.New_Line (1); ScreenEcho.Put_Line (" Reading target compiler data ..."); end if; loop Get_String (File => Data_File, Str => Option); exit when E_Strings.Get_Length (E_Str => Option) = 0; Process_Option (Opt => Option); end loop; --# accept Flow, 10, Data_File, "Expected ineffective assignment"; Close_File (Data_File => Data_File); --# end accept; end if; end if; end Read_Target_Data_File; ------------------------------------------------------------------------------ procedure Output_Target_Data_File (To_File : in SPARK_IO.File_Type) is Option : E_Strings.T; File_OK : Boolean; Data_File : SPARK_IO.File_Type; -------------------------------- procedure Invalid_Option (To_File : SPARK_IO.File_Type; Opt : E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Opt, --# To_File; is begin SPARK_IO.Put_String (To_File, "Invalid target compiler data item: ", 0); E_Strings.Put_String (File => To_File, E_Str => Opt); SPARK_IO.Put_Line (To_File, " has been ignored", 0); end Invalid_Option; ------------------------------- procedure Process_Option (Opt : E_Strings.T; To_File : SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in Data_File; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives LexTokenManager.State from *, --# CommandLineData.Content, --# Data_File, --# Opt, --# SPARK_IO.File_Sys, --# To_File & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Data_File, --# LexTokenManager.State, --# Opt, --# To_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# Opt; is Option_Match : Boolean; --Val : LexTokenManager.Lex_String; --Status : Val_Status; procedure Margin --# global in To_File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# To_File; is begin SPARK_IO.Put_String (To_File, " ", 0); end Margin; ---------------------------------------- procedure Separator --# global in To_File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# To_File; is begin SPARK_IO.Put_String (To_File, " = ", 0); end Separator; ---------------------------------------- procedure Put_Val (To_File : in SPARK_IO.File_Type; Val : in LexTokenManager.Lex_String; Status : in Val_Status) --# global in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# LexTokenManager.State, --# Status, --# To_File, --# Val; is begin case Status is when OK_Val => E_Strings.Put_Line (File => To_File, E_Str => Maths.ValueToString (Maths.ValueRep (Val))); when Illegal_Val => SPARK_IO.Put_Line (To_File, "Illegal value - ignored", 0); when Missing_Val => SPARK_IO.Put_Line (To_File, "Value missing - ignored", 0); end case; end Put_Val; function Get_Val (Val : in LexTokenManager.Lex_String; Status : in Val_Status) return E_Strings.T --# global in LexTokenManager.State; is Tmp_String : E_Strings.T; begin case Status is when OK_Val => Tmp_String := Maths.ValueToString (Maths.ValueRep (Val)); when Illegal_Val => Tmp_String := E_Strings.Copy_String (Str => "Illegal value - ignored"); when Missing_Val => Tmp_String := E_Strings.Copy_String (Str => "Value missing - ignored"); end case; return Tmp_String; end Get_Val; --------------------------------------- procedure Output_Value (Data_File : in SPARK_IO.File_Type; Opt : in E_Strings.T; Valid : in Boolean) --# global in CommandLineData.Content; --# in To_File; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives LexTokenManager.State from *, --# CommandLineData.Content, --# Data_File, --# Opt, --# SPARK_IO.File_Sys, --# To_File, --# Valid & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Data_File, --# LexTokenManager.State, --# Opt, --# To_File, --# Valid, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# Opt, --# Valid; is Val : LexTokenManager.Lex_String; Status : Val_Status; begin if Valid then if CommandLineData.Content.XML then Get_Data_Value (Data_File => Data_File, Sort => Integer_Val, Val => Val, Status => Status); XMLReport.Compiler_Item (Item => Opt, Val => Get_Val (Val => Val, Status => Status), Report => To_File); else Margin; E_Strings.Put_String (File => To_File, E_Str => Opt); Separator; Get_Data_Value (Data_File => Data_File, Sort => Integer_Val, Val => Val, Status => Status); Put_Val (To_File => To_File, Val => Val, Status => Status); end if; end if; end Output_Value; begin -- Process_Option Option_Match := False; case E_Strings.Get_Element (E_Str => Opt, Pos => 1) is when 'i' | 'I' => case E_Strings.Get_Element (E_Str => Opt, Pos => 9) is when 'f' | 'F' => Option_Match := CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "integer'first"); Output_Value (Data_File => Data_File, Opt => Opt, Valid => Option_Match); when 'l' | 'L' => Option_Match := CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "integer'last"); Output_Value (Data_File => Data_File, Opt => Opt, Valid => Option_Match); when others => null; end case; when 'l' | 'L' => case E_Strings.Get_Element (E_Str => Opt, Pos => 14) is when 'f' | 'F' => Option_Match := CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "long_integer'first"); Output_Value (Data_File => Data_File, Opt => Opt, Valid => Option_Match); when 'l' | 'L' => Option_Match := CommandLineHandler.Check_Option_Name (Opt_Name => Opt, Str => "long_integer'last"); Output_Value (Data_File => Data_File, Opt => Opt, Valid => Option_Match); when others => null; end case; when others => null; end case; if not Option_Match then Margin; Invalid_Option (To_File => To_File, Opt => Opt); end if; end Process_Option; begin --Output_Target_Data_File if CommandLineData.Content.Target_Data then Open_File (Data_File => Data_File, File_OK => File_OK); if File_OK then if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Compiler_Data, Report => To_File); loop Get_String (File => Data_File, Str => Option); exit when E_Strings.Get_Length (E_Str => Option) = 0; Process_Option (Opt => Option, To_File => To_File); end loop; XMLReport.End_Section (Section => XMLReport.S_Compiler_Data, Report => To_File); --# accept Flow, 10, Data_File, "Expected ineffective assignment"; Close_File (Data_File => Data_File); --# end accept; else SPARK_IO.New_Line (To_File, 2); SPARK_IO.Put_Line (To_File, "Target compiler data:", 0); loop Get_String (File => Data_File, Str => Option); exit when E_Strings.Get_Length (E_Str => Option) = 0; Process_Option (Opt => Option, To_File => To_File); end loop; --# accept Flow, 10, Data_File, "Expected ineffective assignment"; Close_File (Data_File => Data_File); --# end accept; end if; -- XML end if; end if; end Output_Target_Data_File; end TargetData; spark-2012.0.deb/examiner/sem-compunit-walkstatements-up_case.adb0000644000175000017500000001127511753202336024030 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Up_Case (Node : in STree.SyntaxNode) is Unused1 : Dictionary.Symbol; Case_Flags : Typ_Case_Flags; Unused2 : Typ_Type_Bound; Unused3 : Typ_Type_Bound; Complete_ADT : CompleteCheck.T; Others_Found : Boolean; Current_Node : STree.SyntaxNode; begin --# accept Flow, 10, Unused1, "Expected ineffective assignment" & --# Flow, 10, Unused2, "Expected ineffective assignment" & --# Flow, 10, Unused3, "Expected ineffective assignment"; Case_Stack.Pop (Case_Flags => Case_Flags, Complete_ADT => Complete_ADT, Sym => Unused1, Lower_Bound => Unused2, Upper_Bound => Unused3); --# end accept; -- if there is an others clause record the fact and inform ADT Current_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node)))); -- ASSUME Current_Node = sequence_of_statements OR NULL if Current_Node = STree.NullNode then -- ASSUME Current_Node = NULL Others_Found := False; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.sequence_of_statements then -- ASSUME Current_Node = sequence_of_statements Others_Found := True; -- check to see if case was already complete before others found if Case_Flags.Check_Completeness and then CompleteCheck.IsComplete (Complete_ADT) = CompleteCheck.Complete then ErrorHandler.Semantic_Warning (Err_Num => 11, Position => Node_Position (Node => Current_Node), Id_Str => LexTokenManager.Null_String); end if; -- now signal "others found" to completeness checker CompleteCheck.SeenOthers (Complete_ADT); else Others_Found := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = sequence_of_statements OR NULL in Up_Case"); end if; if Case_Flags.Check_Completeness then if Complete_ADT.Undeterminable and then not Others_Found then ErrorHandler.Semantic_Warning (Err_Num => 304, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif CompleteCheck.IsComplete (Complete_ADT) = CompleteCheck.Incomplete then ErrorHandler.Semantic_Error (Err_Num => 408, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; if Case_Flags.Signal_Out_Of_Range and then Case_Flags.Out_Of_Range_Seen then ErrorHandler.Semantic_Warning (Err_Num => 303, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; if Case_Flags.Warn_No_Others and then not Others_Found and then not (Case_Flags.Check_Completeness -- don't output and then Complete_ADT.Undeterminable) then -- 304 twice. ErrorHandler.Semantic_Warning (Err_Num => 304, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; if Case_Flags.Others_Mandatory and then not Others_Found then ErrorHandler.Semantic_Error (Err_Num => 411, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; --# accept Flow, 33, Unused1, "Expected to be neither referenced nor exported" & --# Flow, 33, Unused2, "Expected to be neither referenced nor exported" & --# Flow, 33, Unused3, "Expected to be neither referenced nor exported"; end Up_Case; spark-2012.0.deb/examiner/heap_storage.adb0000644000175000017500000000547311753202336017373 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Heap_Storage -- -- Implementation -- -- Uses Ada.Containers.Vectors to offer an automatically-extending -- Vector. Implemented in Ada, not SPARK, so hidden. -------------------------------------------------------------------------------- package body Heap_Storage is --# hide Heap_Storage; ----------------- -- Initialize -- ----------------- procedure Initialize (Initial_Length : in SPARK.Ada.Containers.Count_Type; V : out Vector) is begin V := Vector'(Vec => Vectors.To_Vector (Ada.Containers.Count_Type (Initial_Length))); end Initialize; ------------------- -- Last_Index -- ------------------- function Last_Index (V : Vector) return Atom is begin return Atom (Vectors.Last_Index (V.Vec)); end Last_Index; ----------------- -- Get_Element -- ----------------- function Get_Element (V : in Vector; Index : in Atom) return Atom_Descriptor is begin return Vectors.Element (Container => V.Vec, Index => Index); end Get_Element; ----------------- -- Set_Element -- ----------------- procedure Set_Element (V : in out Vector; Index : in Atom; Value : in Atom_Descriptor) is begin Vectors.Replace_Element (Container => V.Vec, Index => Index, New_Item => Value); end Set_Element; ----------------- -- Append -- ----------------- procedure Append (V : in out Vector; Value : in Atom_Descriptor) is begin Vectors.Append (Container => V.Vec, New_Item => Value); end Append; end Heap_Storage; ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_visible-wf_deferred.adbspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_visible-wf_deferred0000644000175000017500000002076411753202336033011 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Visible) procedure Wf_Deferred (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes) is Ident_List_Node, Type_Node, Next_Node : STree.SyntaxNode; It : STree.Iterator; Ident_Str : LexTokenManager.Lex_String; Type_Sym, The_Constant : Dictionary.Symbol; --------------------------------------------------------------------- procedure Wf_Local_Private_Type (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Type_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table, --# Type_Sym from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.type_mark; --# post STree.Table = STree.Table~; is Sym : Dictionary.Symbol; Ident_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; begin Ident_Node := Last_Child_Of (Start_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Local_Private_Type"); Ident_Str := Node_Lex_String (Node => Ident_Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) then -- not declared or visible Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 141, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); elsif Dictionary.IsType (Sym) and then Dictionary.TypeIsPrivate (TheType => Sym) then -- ok if not dotted if Next_Sibling (Child_Node (Child_Node (Node))) = STree.NullNode then -- no selector on type mark STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); Type_Sym := Sym; else -- shouldn't have selector on private type! Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 9, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; else -- not a local private type Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 89, Reference => 12, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; end Wf_Local_Private_Type; begin -- Wf_Deferred Ident_List_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_List_Node = identifier_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_List_Node) = SP_Symbols.identifier_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_List_Node = identifier_list in Wf_Deferred"); Type_Node := Next_Sibling (Current_Node => Ident_List_Node); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Wf_Deferred"); case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => -- Can only be a private type in SPARK83 Wf_Local_Private_Type (Node => Type_Node, Current_Scope => Current_Scope, Type_Sym => Type_Sym); when CommandLineData.SPARK95_Onwards => -- Any type allowed in 95 onwards Wf_Type_Mark (Node => Type_Node, Current_Scope => Current_Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); end case; It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Ident_List_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); Ident_Str := Node_Lex_String (Node => Next_Node); if Dictionary.IsDefined (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); elsif Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) or Dictionary.IsProtectedTypeMark (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 903, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); else Dictionary.Add_Deferred_Constant (Name => Ident_Str, Type_Mark => Type_Sym, Type_Reference => Dictionary.Location'(Start_Position => Node_Position (Node => Type_Node), End_Position => Node_Position (Node => Type_Node)), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), The_Package => Pack_Sym, TheConstant => The_Constant); STree.Add_Node_Symbol (Node => Next_Node, Sym => The_Constant); end if; It := STree.NextNode (It); end loop; end Wf_Deferred; spark-2012.0.deb/examiner/errors.htm0000644000175000017500000064263411753202337016317 0ustar eugeneugen SPARK Errors

*** Syntax Error : ";" expected.

If this is reported at the end of the input file it may well be caused by the misspelling of an identifier in a hide directive. The parser then skips all the following text looking for the misspelled identifier but finds the end of file first where it reports a syntax error.

*** Syntax Error : No APRAGMA can be start with reserved word "IS".

This can occur when a stub for an embedded subprogram is wrongly terminated by a semicolon.

*** Syntax Error : No complete PROCEDURE_SPECIFICATION can be followed by ANNOTATION_START here.

This can occur when the reserved word body has been omitted from the declaration of a package body. This error will occur at the annotation placed between the specification and the reserved word is of the first subprogram.

*** Syntax Error : No complete PROCEDURE_SPECIFICATION can be followed by reserved word "IS" here.

This can occur when the reserved word body has been omitted from the declaration of a package body. This error will occur at the reserved word is which introduces the body of the first subprogram.

*** Syntax Error : reserved word "INHERIT" expected.

This occurs where the annotation on a subprogram body is placed after the reserved word is instead of before it.

*** Syntax Error : No complete SIMPLE_EXPRESSION can be followed by ")" here.

This can occur in an aggregate expression when there is a mixure of named and positional association being used.

*** Syntax Error : No complete SIMPLE_EXPRESSION can be followed by "," here.

This can occur in an aggregate expression when there is a mixure of named and positional association being used.

*** Semantic Error : 1 : The identifier YYY.XXX is either undeclared or not visible at this point.

If the identifier is declared in a separate (or parent) package, the package must be included in an inherit clause and the identifier prefixed with the package name. Ensure that there are no errors in the declaration of the identifier.

*** Semantic Error : 2 : XXX does not denote a formal parameter for YYY.

*** Semantic Error : 3 : Incorrect number of actual parameters for call of subprogram XXX.

*** Semantic Error : 4 : More than one parameter association is given for formal parameter XXX.

*** Semantic Error : 5 : Illegal use of identifier XXX.

Usually associated with the use of an identifier other than a package name as a prefix in a selected component.

*** Semantic Error : 6 : Identifier XXX is not the name of a variable.

*** Semantic Error : 7 : Identifier XXX is not the name of a procedure.

*** Semantic Error : 8 : There is no field named XXX in this entity.

Issued when the selector in a selected component of a record references a non-existent field.

*** Semantic Error : 9 : Selected components are not allowed for XXX.

Occurs if the prefix to a selected component representing a procedure in a procedure call statement or a type mark is not a package. Also occurs if a selector is applied in an expression to an object which is not a record variable.

*** Semantic Error : 10 : Illegal redeclaration of identifier XXX.

*** Semantic Error : 11 : There is no package declaration for XXX.

Issued if a package body is encountered for which there is no package specification.

*** Semantic Error : 12 : Own variable XXX can only be completed by a variable declaration, not a constant.

If the object in question is really a constant, then remove it from the enclosing package's own variable annotation.

*** Semantic Error : 13 : A body for subprogram XXX has already been declared.

*** Semantic Error : 14 : Illegal parent unit name.

Issued if the name in a "separate" clause of a subunit does not correctly identify a compilation unit. Common causes of this error are a syntax error in the parent unit or omitting the parent unit specification and/or parent unit body entries from the index file.

*** Semantic Error : 15 : The stub for XXX is either undeclared or cannot be located.

Common causes of this error are an error in the declaration of the stub or the omission of the parent unit body from the index file.

*** Semantic Error : 16 : A body for package XXX has already been declared.

*** Semantic Error : 17 : A body stub for package XXX has already been declared.

*** Semantic Error : 18 : Identifier XXX is not the name of a package.

*** Semantic Error : 19 : Identifier XXX is not the name of a procedure.

*** Semantic Error : 20 : Illegal operator symbol.

Issued if a renaming declaration contains a non-existent operator.

*** Semantic Error : 21 : This entity is not an array.

Issued if an attempt is made to index into a name which does not represent an array.

*** Semantic Error : 22 : The type in this declaration is not consistent with the previous declaration of XXX.

Occurs when the type given in the Ada declaration of an own variable differs from that "announced" in the package's own variable clause.

*** Semantic Error : 23 : No parameter association is given for formal parameter XXX.

*** Semantic Error : 24 : The identifier XXX (exported by called subprogram) is not visible at this point.

When a procedure is called any global variables exported by that procedure must be visible at the point of call. This error message indicates that the global variable concerned is not visible. It may be that it needs to be added to the global annotation of the procedure containing the call (or some further enclosing subprogram) or it may be that an inherit clause is missing from the package containing the call.

*** Semantic Error : 25 : The identifier XXX (imported by called subprogram) is not visible at this point.

When a procedure is called any global variables imported by that procedure must be visible at the point of call. This error message indicates that the global variable concerned is not visible. It may be that it needs to be added to the global annotation of the subprogram containing the call (or some further enclosing subprogram) or it may be that an inherit clause is missing from the package containing the call.

*** Semantic Error : 26 : The deferred constant XXX does not have an associated full definition.

Issued at the end of a package specification if no full declaration has been supplied for a deferred constant declared in the package specification.

*** Semantic Error : 27 : The private type XXX does not have an associated full definition.

Issued at the end of a package specification if no full declaration has been supplied for a private type declared in the package specification.

*** Semantic Error : 28 : The own variable XXX does not have a definition.

Issued at the end of a package body if an own variable announced in the package specification has neither been given an Ada declaration nor refined.

*** Semantic Error : 29 : The subprogram XXX, declared in the package specification, does not have an associated body.

*** Semantic Error : 30 : Attribute XXX is not yet implemented in the Examiner.

The attribute is identified in Annex K of the SPARK 95 report as a valid SPARK 95 attribute but the Examiner does not currently support it. It is possible to work round the omission by putting the use of the attribute inside a suitable function which is hidden from the Examiner.

*** Semantic Error : 31 : The prefix of this attribute is not an object or type.

*** Semantic Error : 32 : Illegal type conversion.

Likely causes are type conversions involving record types or non-convertible arrays.

*** Semantic Error : 33 : Illegal aggregate.

Issued if the prefix of an aggregate is not a composite type.

*** Semantic Error : 34 : Illegal procedure call.

Issued if a call is made to a user-defined subprogram in a package initialization part.

*** Semantic Error : 35 : Binary operator is not declared for types XXX and YYY.

Indicates use of an undeclared binary operator; this message means that the type on each side of the operator cannot appear with the operator used. e.g. attempting to add an integer to an enumeration literal.

*** Semantic Error : 36 : Expression is not static.

*** Semantic Error : 37 : Expression is not constant.

*** Semantic Error : 38 : Expression is not of the expected type.

*** Semantic Error : 39 : Illegal use of unconstrained type.

An unconstrained array type or variable of such a type is illegally used. Use of unconstrained arrays in SPARK is limited to passing them as parameters, indexing into them and taking attributes of them. This message also arises if a string literal is used as an actual parameter where the formal parameter is a string subtype. In this case, the error can be removed by qualifying the string literal with the subtype name.

*** Semantic Error : 40 : Numeric or Time_Span type required.

This operator is only defined for numeric types and, if the Ravenscar Profile is selected, for type Ada.Real_Time.Time_Span.

*** Semantic Error : 41 : Array type required.

Issued if a subtype declaration taking the form of a constrained subtype of an unconstrained array type is encountered but with a type mark which does not represent an array.

*** Semantic Error : 42 : Incompatible types.

Issued when a name represents an object which is not of the required type.

*** Semantic Error : 43 : Range is not constant.

*** Semantic Error : 44 : Scalar type required.

The bounds of an explicit range must be scalar types.

*** Semantic Error : 45 : Range is not static.

*** Semantic Error : 46 : Discrete type required.

*** Semantic Error : 47 : The definition of this type contains errors which may make this array definition invalid.

Issued if an array type definition is encountered where one or more of the index types used in the definition contained errors in its original declaration. For example, SPARK requires array index bounds to be constant (known at compile time) so an attempt to use an illegal subtype with variable bounds as an array index will generate this message.

*** Semantic Error : 49 : Attribute XXX takes only one argument.

Only SPARK 95 attributes 'Min and 'Max require two arguments.

*** Semantic Error : 50 : Initializing expression must be constant.

To assign a non-constant expression to a variable, an assignment statement in the body of the program unit (following the 'begin') must be used.

*** Semantic Error : 51 : Arrays may not be ordered.

Issued if an ordering operator such as "<" is encountered between objects of an array type other than string or a constrained subtype of string.

*** Semantic Error : 52 : Only Scalar, String and Time types may be ordered.

Ordering operators are only defined for scalar types and type String plus, if the Ravenscar Profile is selected, types Time and Time_Span in package Ada.Real_Time.

*** Semantic Error : 53 : Illegal others clause.

In SPARK record aggregates may not contain an others clause.

*** Semantic Error : 54 : Illegal attribute: XXX.

Issued when an attribute not supported by SPARK is used.

*** Semantic Error : 55 : Attribute XXX takes no argument.

*** Semantic Error : 56 : Argument expected.

*** Semantic Error : 57 : Fixed type definition must have associated range constraint.

*** Semantic Error : 58 : XXX expected, to repeat initial identifier.

Occurs at the end of a package, subprogram, protected type, task type or loop if the terminal identifier does not match the name or label originally given.

*** Semantic Error : 59 : Composite subtype definition may not have associated range constraint.

A subtype of the form applicable to a subrange of a scalar type has been encountered but the type provided is not a scalar type.

*** Semantic Error : 60 : Illegal choice in record aggregate.

In SPARK record aggregates may not contain multiple choices, each field must be assigned a value individually.

*** Semantic Error : 61 : Illegal occurrence of body stub - a body stub may only occur in a compilation unit.

*** Semantic Error : 62 : A body for the embedded package XXX is required.

Issued if an embedded package declares subprograms or own variables and no body is provided.

*** Semantic Error : 63 : XXX is not a type mark.

*** Semantic Error : 64 : Parameters of function subprograms must be of mode in.

*** Semantic Error : 65 : Formal parameters of renamed operators may not be renamed.

The names of the parameters used in renaming declarations may not be altered from Left, Right for binary operators and Right for unary operators. These are the names given for the parameters in the ARM and the SPARK Definition requires that parameter names are not changed.

*** Semantic Error : 66 : Unexpected package initialization - no own variables of package XXX require initialization.

Either the package does not have an initializes annotation or all the own variables requiring initialization were given values at the point of declaration.

*** Semantic Error : 67 : Illegal machine code insertion. Machine code functions are not permitted in SPARK 83.

This is an Ada 83 rule. Machine code can only be used in procedures.

*** Semantic Error : 68 : Illegal operator renaming - operators are defined on types not subtypes.

Issued if an attempt is made to rename an operator using a subtype of the type for which it was originally implicitly declared.

*** Semantic Error : 69 : pragma XXX has two parameters.

*** Semantic Error : 70 : pragma Import expected.

*** Semantic Error : 70 : pragma Interface expected.

*** Semantic Error : 71 : This expression does not represent the expected subprogram or variable name XXX.

Issued if the name supplied in a pragma interface, import or attach_handler does not match the name of the associated subprogram or variable.

*** Semantic Error : 72 : Unexpected pragma Import.

Pragma import may only occur in a body stub, or immediately after a subprogram declaration in the visible part of a package, or immediately after a variable declaration.

*** Semantic Error : 72 : Unexpected pragma Interface.

Pragma interface may only occur in a body stub or immediately after a subprogram declaration in the visible part of a package.

*** Semantic Error : 73 : XXX has already been declared or refined.

Issued if an Ada declaration is given for an own variable which has been refined, or in a refinement clause if an own variable is refined more than once.

*** Semantic Error : 74 : XXX does not occur in the package own variable list.

A subject of a refinement definition of a package must be an own variable of that package.

*** Semantic Error : 75 : Illegal use of inherited package.

Issued if an attempt is made to refine an own variable onto an own variable of a non-embedded package.

*** Semantic Error : 76 : Identifier XXX is already declared and cannot be the name of an embedded package.

Issued when a refinement clause in a package body attempts to name an embedded package own variable as a refinement constituent and the name given for the embedded package is already in use.

*** Semantic Error : 77 : Variable XXX should occur in this own variable clause.

Occurs in the own variable clause of a package embedded in another package if an own variable which is a refinement constituent of an own variable of the enclosing package is omitted.

*** Semantic Error : 78 : Initialization of own variable XXX is ineffective.

Issued if an own variable occurs in the initialization clause of an embedded package and the own variable concerned is a refinement constituent of another own variable which is not listed in the initialization specification of its package.

*** Semantic Error : 79 : Variable XXX should occur in this initialization specification.

Occurs in the initialization clause of a package embedded in another package if an own variable which is a refinement constituent of an initialized own variable of the enclosing package is omitted.

*** Semantic Error : 80 : Unexpected own variable clause - no variable in this clause is a refinement constituent.

*** Semantic Error : 81 : Own variable clause expected - own variables of this package occur as refinement constituents.

*** Semantic Error : 82 : Unexpected initialization specification - no own variables of this package require initialization.

An own variable initialization clause and that of its refinement constituents must be consistent.

*** Semantic Error : 83 : Initialization specification expected - own variables of this package require initialization.

Issued if an own variable does not occur in the initialization clause of an embedded package and the own variable concerned is a refinement constituent of another own variable which is listed in the initialization clause of its package.

*** Semantic Error : 84 : The refinement constituent XXX does not have a declaration.

Issued at the end of a package if a refinement constituent of a refined own variable has not been given an Ada declaration or further refined.

*** Semantic Error : 85 : XXX is not a constituent of any abstract own variable appearing in the earlier global definition for this subprogram.

A variable XXX which has occurred in a refined global annotation is neither a variable that occurred in the earlier global definition nor a refinement constituent of any such variable.

*** Semantic Error : 86 : At least one constituent of XXX was expected in this refined global definition.

If the global annotation of a procedure specification contains an own variable and that own variable is later refined then at least one refinement constituent of the own variable shall appear in the second global annotation supplied for the procedure body.

*** Semantic Error : 87 : Refined global definition expected for subprogram XXX.

A global definition containing abstract own variables was given in the definition for subprogram XXX, in a package specification. A refined global definition is required in the package body.

*** Semantic Error : 88 : Variable XXX is not a refinement constituent.

*** Semantic Error : 89 : XXX is not a private type declared in this package.

*** Semantic Error : 90 : This operator may not be applied to ranges.

*** Semantic Error : 91 : Ranges may not be assigned.

*** Semantic Error : 92 : Named association may not be used here.

*** Semantic Error : 93 : Number of index expressions differs from number of dimensions of array XXX.

*** Semantic Error : 94 : Condition is not boolean.

Issued anywhere a boolean expression is required (e.g. in if, exit and while statements) and the expression provided is not of type boolean.

*** Semantic Error : 95 : Type mark expected.

*** Semantic Error : 96 : Attribute XXX is not valid with this prefix.

*** Semantic Error : 97 : Attribute BASE may only appear as a prefix.

'BASE may only be used as a prefix to another attribute.

*** Semantic Error : 98 : This expression is not a range.

*** Semantic Error : 99 : Unconstrained array expected.

Occurs if a subtype is declared of an array which is already constrained.

*** Semantic Error : 100 : Floating point type mark expected.

*** Semantic Error : 101 : Fixed point type mark expected.

*** Semantic Error : 102 : This is not the name of a field of record XXX.

*** Semantic Error : 103 : A value has already been supplied for field XXX.

*** Semantic Error : 104 : No value has been supplied for field XXX.

*** Semantic Error : 105 : More values have been supplied than number of fields in record XXX.

*** Semantic Error : 106 : Range is not of the expected type.

*** Semantic Error : 107 : Expression is not of the expected type. Actual type is XXX. Expected type is YYY.

*** Semantic Error : 108 : Expression is not of the expected type. Expected any Integer type.

*** Semantic Error : 109 : Expression is not of the expected type. Expected any Real type.

*** Semantic Error : 110 : Use type clauses following an embedded package are not currently supported by the Examiner.

*** Semantic Error : 111 : Package renaming is not currently supported by the Examiner.

*** Semantic Error : 112 : A use type clause may not appear here. They are only permitted as part of a context clause or directly following an embedded package specification.

*** Semantic Error : 113 : Private subprogram declarations are not permitted in SPARK 83.

Private subprograms would not be callable in SPARK 83 and are therefore not permitted; they may be declared and called in SPARK 95.

*** Semantic Error : 114 : Subtype mark or Range may not be used in an expression in this context.

A subtype mark or an explicit Range attribute may not be used in a context where a simple expression is expected.

*** Semantic Error : 115 : In a package body, an own variable annotation must include one or more refinement constituents.

Annotation should be of the form 'own S is A, B, C;'.

*** Semantic Error : 116 : View conversion to own type is not permitted in target of an assignment.

*** Semantic Error : 117 : Aggregate must be qualified with subtype mark.

Aggregates are qualified expressions so they must be prefixed with a subtype mark. An exception is made in the case of aggregate assignments to unconstrained arrays as the rules of Ada do not permit unconstrained array aggregates to be qualified.

*** Semantic Error : 118 : Aggregate assignment to unconstrained multi-dimensional array not permitted.

Unqualified aggregates may only be used in assignments to one-dimensional unconstrained arrays. SPARK does not permit aggregate assignment to multi-dimensional unconstrained arrays.

*** Semantic Error : 119 : Unary operator is not declared for type XXX.

Indicates use of an undeclared unary operator; this message means that the type on the right hand side of the operator cannot appear with the operator used. e.g. attempting to negate an enumeration literal.

*** Semantic Error : 120 : Pragma import not allowed here because variable XXX is already initialized. See ALRM B.1(24).

*** Semantic Error : 121 : 'Flow_Message' or 'Warning_Message' expected.

The identifier indicating what kind of message to justify must be either 'Flow_Message' or 'Warning_Message' or some unique abbreviation of them such as 'Fl' or even 'F'. Case is ignored.

*** Semantic Error : 122 : Error or warning number expected.

This item should be an integer literal representing the error or warning message that is being marked as expected.

*** Semantic Error : 123 : This warning number may not appear in an accept annotation.

It does not make sense to allow certain warnings to be justified with the accept annotation. In particular, attempting to justify warnings raised by the justification system itself could lead to some special kind of recursive hell that we would not wish to enter.

*** Semantic Error : 124 : Incorrect number of names in accept annotation: should be 0.

This class of error does not reference any variables, and therefore requires no names.

*** Semantic Error : 125 : Incorrect number of names in accept annotation: should be 1.

This class of error references one variable, and therefore requires one name.

*** Semantic Error : 126 : Incorrect number of names in accept annotation: should be 2.

This class of error references two variables, and therefore requires two names. Two names are need to justify expected information flow messages such as "X is not derived from Y". Note that for messages of this kind the accept annotation should list the names in the order "export, import".

*** Semantic Error : 127 : Incorrect number of names in accept annotation: should be 0 or 1.

This class of error references either zero or one variable, and therefore requires either zero or one name. An ineffective assignment error requires the name of variable being assigned to. An ineffective statement error has no name associated with it.

*** Semantic Error : 128 : Incorrect number of names in accept annotation: should be 1 or 2.

This class of error references either one or two variables, and therefore requires either one or two names. One name is required when the export is a function return value.

*** Semantic Error : 129 : Assignment to view conversion is not currently implemented.

*** Semantic Error : 130 : A type from the current package should not appear in a use type clause.

*** Semantic Error : 131 : The package name XXX should appear in a with clause preceding the use type clause.

*** Semantic Error : 132 : The unit name or the name of an enclosing package of the unit should not appear in its with clause.

A package should not 'with' itself and a subunit should not 'with' the package (or main program) which declares its stub.

*** Semantic Error : 133 : Name in with clause is locally redeclared.

*** Semantic Error : 134 : A package name should not appear in its own inherit clause.

*** Semantic Error : 135 : The package XXX is undeclared or not visible, or there is a circularity in the list of inherited packages.

Possible causes of this error are an error in the inherited package specification or omitting an entry for the package specification from the index file or circular inheritance.

*** Semantic Error : 136 : The own variable XXX is not declared in the own variable clause of the corresponding package declaration.

A refinement clause of a package body defines the constituent parts of own variables given in the own variable clause of the corresponding package declaration.

*** Semantic Error : 137 : The child package XXX is either undeclared or not visible at this point.

Possible causes of this error are an error in the child package specification or omitting the child from the parent's component list in the index file or omitting the child specification entry from the index file.

*** Semantic Error : 138 : Child package own variable XXX is does not appear in the own variable clause of the child package.

A constituent of a refinement clause which is defined in a child package must be an own variable of the child package.

*** Semantic Error : 139 : The variable XXX is not declared in the own variable clause of this package.

A package can only initialize variables declared in its own variable clause.

*** Semantic Error : 140 : The predecessor package XXX is either undeclared or not visible at this point.

The parent of a child package must be a library package and must be declared prior to a child package. If using an index file the parent must have an entry in the index file and the child package must be listed as a component of the parent package.

*** Semantic Error : 141 : The private type XXX is either undeclared or not visible at this point.

*** Semantic Error : 142 : The subprogram prefix XXX is either undeclared or not visible at this point.

The prefix should appear in the inherit clause of the current package.

*** Semantic Error : 143 : The subprogram YYY.XXX is either undeclared or not visible at this point.

*** Semantic Error : 144 : The dotted name YYY.XXX is either undeclared or not visible at this point.

The name must denote an entire variable or an own variable of a package. If the variable or own variable is declared in a separate (or parent) package, the package must be included in an inherit clause and the identifier prefixed with the package name.

*** Semantic Error : 145 : The identifier YYY.XXX is either undeclared or not visible at this point.

The identifier should be a typemark. If the typemark is declared in a separate (or parent) package, the package must be included in an inherit clause and the identifier prefixed with the package name. Ensure that there are no errors in the declaration of the typemark.

*** Semantic Error : 148 : The abstract proof type XXX may not be used to define an own variable in another package.

Own variables may be "type announced" as being of an abstract proof type only where that type is declared later in the same package. Thus --# own State : T; is legal if --# type T is abstract; appears later in the package; however, --# own State : P.T; is illegal if T is an abstract proof type declared in remote package P.

*** Semantic Error : 149 : More than one own variable has been announced as being of type XXX which may not therefore be declared as an abstract proof type.

Occurs when an own variable clause announces more than one own variable as being of a type XXX and XXX is later declared as being of an abstract proof type. Each abstract own variable must be of a unique type.

*** Semantic Error : 150 : Entire variable expected. The names of constants never appear in mandatory annotations.

Issued when a the name of a constant is found in a mandatory annotation such as a global or derives annotation. Constants should not appear in such annotations.

*** Semantic Error : 151 : The variable XXX does not occur either in the package own variable list or as a refinement constituent.

A variable declared in a package must have been previously announced as either an own variable or as a concrete refinement constituent of an own variable.

*** Semantic Error : 152 : The number of formal parameters is not consistent with the previous declaration of XXX.

*** Semantic Error : 153 : The declaration of formal parameter XXX is not consistent with the subprogram's previous declaration.

Issued if the name, type or parameter mode of a parameter is different in the subprogram body declaration from that declared originally.

*** Semantic Error : 154 : The subprogram or task body XXX does not have an annotation.

A subprogram or task body must have a global annotation if it references global variables; a procedure or task body must have a dependency relation to perform information flow analysis.

*** Semantic Error : 155 : Unexpected annotation - all annotations required for procedure or task body XXX have already occurred.

Do not repeat global or derives annotations in the body (or body stub) of a subprogram, entry or task except for state (own variable) refinement.

*** Semantic Error : 156 : Entire variable expected.

Issued when an identifier which SPARK requires to be an entire variable represents something other than this. Most commonly this message occurs when a component of a structured variable appears in a core annotation.

*** Semantic Error : 157 : The name XXX already appears in the global variable list.

*** Semantic Error : 158 : XXX is a formal parameter of this subprogram.

Issued in a global annotation if it names a formal parameter of the subprogram.

*** Semantic Error : 159 : The name XXX has already appeared as an exported variable.

*** Semantic Error : 160 : The name XXX already appears in the list of imported variables.

*** Semantic Error : 161 : Exportation of XXX is incompatible with its parameter mode.

Issued if a parameter appears as an export to a procedure when it is of parameter mode in.

*** Semantic Error : 162 : Importation of XXX is incompatible with its parameter mode.

Issued if a parameter appears as an import to a procedure when it is of parameter mode out.

*** Semantic Error : 163 : Subprogram XXX cannot be called from here.

SPARK contains rules to prevent construction of programs containing recursive subprogram calls; this error message occurs if a procedure or function is called before its body has been declared. Re-ordering of subprogram bodies in the package concerned will be required.

*** Semantic Error : 165 : This parameter is overlapped by another one, which is exported.

Violation of the anti-aliasing rule.

*** Semantic Error : 166 : This parameter is overlapped by an exported global variable.

Violation of the anti-aliasing rule.

*** Semantic Error : 167 : Imported variable XXX is not named in the initialization specification of its package.

Issued when an own variable which is imported into the main program procedure (or a task when the Ravenscar profile is enabled) has not been declared as being initialized by its package. At the main program level the only imports that are permitted are initialized own variables of inherited packages. There are two possible cases to consider: (1) the main program should be importing the variable in which case it should be annotated in its package with --# initializes (and, of course, actually initialized in some way) or be an external variable or protected variable which is implicitly initialized; or (2) the own variable concerned is not initialized at elaboration, should not therefore be considered an import to the main program and should be removed from the main program's import list.

*** Semantic Error : 168 : XXX is a loop parameter, whose updating is not allowed.

*** Semantic Error : 169 : Global variables of function subprograms must be of mode in.

It is an important property of SPARK that functions cannot have side-effects, therefore only the reading of global variable is permitted. It is usually convenient to omit modes from function global annotations but use of mode 'in' is permitted.

*** Semantic Error : 170 : XXX is a formal parameter of mode in, whose updating is not allowed.

*** Semantic Error : 171 : XXX is a formal parameter of mode out, whose value cannot be read.

*** Semantic Error : 172 : The actual parameter associated with an exported formal parameter must be an entire variable.

Issued if an actual parameter which is an array element is associated with an exported formal parameter in a procedure call. Exported parameters must be either entire variables or a record field.

*** Semantic Error : 173 : This exported parameter is named in the global definition of the procedure.

Violation of the anti-aliasing rule.

*** Semantic Error : 174 : XXX is not an own variable.

Occurs in initialization specifications if something other than a variable is listed as being initialized.

A justification of an error requires the actual variables named in the error message to be referenced. The keyword "all" can only be used with language profiles for auto-code generators such as SCADE KCG. Such profiles are only available with the SPARK Pro Toolset.

*** Semantic Error : 176 : XXX does not have a derives annotation so it may not be called from YYY which does have a derives annotation.

When analysing with flow=auto, a procedure or entry without a derives annotation may not be called by a procedure, task or entry with a derives annotation. This is because the body of the caller must be checked against its derives annotation. In order to calculate the correct dependency relation for the body of the caller there must be derives annotations present on all called procedures or entries.

*** Semantic Error : 180 : Entire composite constant expected.

Issued when an identifier which SPARK requires to be an entire composite constant represents something other than this.

*** Semantic Error : 181 : Invalid policy for constant proof rule generation.

*** Semantic Error : 182 : Rule Policy for YYY.XXX already declared in current scope.

Issued when a rule policy has already been declared for this constant within this declarative region. This rule policy will be ineffective.

*** Semantic Error : 190 : The name XXX already appears in the inherit clause.

*** Semantic Error : 191 : The name XXX already appears in the with clause.

*** Semantic Error : 200 : The parameter XXX is neither imported nor exported.

Each formal parameter of a subprogram shall be imported or exported or both.

*** Semantic Error : 201 : The global variable XXX is neither imported nor exported.

Every variable in a global definition must also appear in the associated derives annotation where it will be either imported or exported or both.

*** Semantic Error : 250 : The 'Size value for type XXX has already been set.

*** Semantic Error : 251 : The attribute value for XXX'Size must be of an integer type.

*** Semantic Error : 252 : The attribute value for XXX'Size must be a static simple expression.

The value of 'Size must be static and must be of an integer type.

*** Semantic Error : 253 : The attribute value for XXX'Size must not be negative.

The value of 'Size must be a positive integer or zero.

*** Semantic Error : 254 : The Size attribute can only be specified for a first subtype.

Setting 'Size for a user-defined non-first subtype is not permitted. See Ada95 LRM 13.3(48).

*** Semantic Error : 255 : The Address attribute can only be specified for a variable, a constant, or a program unit.

Ada95 LRM Annex N.31 defines a program unit to be either a package, a task unit, a protected unit, a protected entry, a generic unit, or an explicitly declared subprogram other than an enumeration literal.

*** Semantic Error : 273 : Own variable XXX may not be refined because it was declared with a type mark which has not subsequently been declared as an abstract proof type.

Where a type mark is included in an own variable declaration it indicates that the own variable will either be of a concrete type of that name (which may be either already declared or be declared later in the package) or of an abstract proof type declared in the package specification. In the former case the refinement is illegal because own variables of concrete Ada types may not be refined. In the latter case it is legal; however, no suitable proof type declaration has been found in this case.

*** Semantic Error : 300 : Renaming declarations are not allowed here.

A renaming declaration must be the first declarative item of a package body or main program or it must be placed immediately after the declaration of an embedded package.

*** Semantic Error : 301 : Renaming or use type declarations here can only rename subprograms in package XXX.

A renaming declaration may be placed immediately after the declaration of an embedded package; in this case it may only rename subprograms declared in that package.

*** Semantic Error : 302 : The subprogram specification in this renaming declaration is not consistent with the declaration of subprogram XXX.

Issued in a subprogram renaming declaration if it contains parameter names, numbers or types which differ from those originally declared.

*** Semantic Error : 303 : An operator can only be renamed by the same operator.

Issued if a renaming declaration has a different operator on each side of the reserved word RENAMES.

*** Semantic Error : 304 : A renaming declaration for operator XXX is not allowed.

*** Semantic Error : 305 : The specification in this renaming declaration is not consistent with the implicit declaration of operator XXX.

Issued in an operator renaming declaration if it contains types which differ from those applicable to the operator being renamed.

*** Semantic Error : 306 : Operator XXX is already visible.

Occurs in an operator renaming declaration if an attempt is made to rename an operator which is already visible. (The message will also appear as a secondary consequence of trying to rename an operator between undeclared types.).

*** Semantic Error : 307 : The implicit declaration of this operator does not occur in package XXX.

*** Semantic Error : 308 : Type is limited.

Issued if an attempt is made to assign a variable of a type which is limited or which contains a limited type.

*** Semantic Error : 309 : Operator not visible for these types.

This message means that the operator exists between the types on each side of it but that it is not visible. The most likely cause is that the types concerned are defined in another package and that renaming is required to make the operator visible.

*** Semantic Error : 310 : The % operator may only appear in an assert or check statement in a for loop.

The % operator is used to indicate the value of a variable on entry to a for loop. This is because the variable may be used in the exit expression of the loop and may also be modified in the body of the loop. Since the semantics of Ada require the exit expression to be fixed after evaluation we require a way of reasoning about the original value of a variable prior to any alteration in the loop body. No other situation requires this value so % may not be used anywhere else.

*** Semantic Error : 311 : Announced own variable types may not be implemented as unconstrained arrays.

Where an own variable is announced as being of some type, SPARK requires that type to be declared; the declaration cannot be in the form of an unconstrained array because SPARK prohibits unconstrained variables.

*** Semantic Error : 312 : A subprogram can only be renamed to the same name with the package prefix removed.

*** Semantic Error : 313 : Only one main program is permitted.

*** Semantic Error : 314 : Own variable XXX has been refined and may not appear here.

Issued if an attempt is made to use, in a second annotation, an own variable which has been refined. Second annotations should use the appropriate refinement constituents of the own variable.

*** Semantic Error : 315 : Unsupported proof context.

Certain proof contexts have been included in the syntax of SPARK but are not yet supported; this error message results if one is found.

*** Semantic Error : 316 : Selected components are not allowed for XXX since type YYY is private here.

If a type is private, then record field selectors may not be used. In pre- and post-conditions, a proof function can be declared to yield the required attribute of a private type.

*** Semantic Error : 317 : Tilde, in a function return annotation, may only be applied to an external variable of mode IN.

The tilde decoration indicates the initial value of a variable or parameter which is both imported and exported. A function may not have an explicit side effect on a program variable and so cannot be regarded as exporting such a variable. For modelling purposes a read of an external (stream) variable is regarded as having a side effect (outside the SPARK boundary). Since it may be necessary to refer to the initial value of the external variable, before this implicit side effect occurs, the use of tilde is allowed only for external variables of mode IN which are globally referenced by function.

*** Semantic Error : 318 : Tilde or Percent may only be applied to variables.

The tilde decoration indicates the initial value of a variable or parameter which is both imported and exported. Percent indicates the value of a variable on entry to a for loop; this message occurs if either operator is applied to any other object.

*** Semantic Error : 319 : Tilde may only be applied to a variable which is both imported and exported.

The tilde decoration indicates the initial value of a variable or parameter which is both imported and exported; this message occurs if the variable concerned is either exported only or imported only in which case no distinction between its initial and final value is required.

*** Semantic Error : 320 : Tilde or Percent may only be applied to an entire variable.

Tilde (and %) may not be applied to an element of an array or field of a record. e.g. to indicate the initial value of the Ith element of array V use V~(I) not V(I)~.

*** Semantic Error : 321 : Tilde may not appear in pre-conditions.

Since it does not make sense to refer to anything other than the initial value of a variable in a pre-condition there is no need to use tilde to distinguish initial from final values.

*** Semantic Error : 322 : Only imports may be referenced in pre-conditions or return expressions.

Pre-conditions are concerned with the initial values of information carried into a subprogram. Since only imports can do this only imports can appear in pre-condition expressions.

*** Semantic Error : 323 : Updates may only be applied to records or arrays.

The extended SPARK update syntax is only used to express changes to components of a structured variable.

*** Semantic Error : 324 : Only one field name may appear here.

When using the extended SPARK update syntax for a record, you can not update more than one element in each clause of the update. For example, you cannot use [x,y => z], you must instead use [x => z; y => z].

*** Semantic Error : 325 : Type XXX has not been declared.

Occurs if a type is "announced" as part of an own variable clause and the end of the package is reached without an Ada declaration for a type of this name being found.

*** Semantic Error : 326 : Predicate is not boolean.

Occurs anywhere where a proof context is found not to be a boolean expression.

*** Semantic Error : 327 : XXX is a global variable which may not be updated in a function subprogram.

*** Semantic Error : 328 : The identifier XXX (exported by called subprogram) may not be updated in a function subprogram.

Occurs if a function calls a procedure which exports a global variable; this would create an illegal side-effect of the function.

*** Semantic Error : 329 : Illegal function call.

Issued if a call is made to a user-defined subprogram in a package initialization part.

*** Semantic Error : 330 : Illegal use of an own variable not of this package.

Issued if an attempt is made, in a package initialization part, to update an own variable of a non-enclosing package.

*** Semantic Error : 331 : Private types may not be unconstrained arrays.

*** Semantic Error : 332 : This private type was not declared as limited.

Issued where the type contains a component which is a limited private type, but where the declaration of this type in the visible part of the package does not specify that the type is limited.

*** Semantic Error : 333 : Initialization of XXX is not announced in the initialization clause of this package.

Issued when an own variable is initialized either by assignment or by having a pragma Import attached to it when initialization of the variable is not announced in its package's own variable initialization specification.

*** Semantic Error : 334 : Identifier XXX is not the name of a function.

*** Semantic Error : 335 : This annotation should be placed with the declaration of function XXX.

Issued if a function is declared in a package specification without an annotation but one is then supplied on the function body.

*** Semantic Error : 336 : Unexpected annotation - all annotations required for function XXX have already occurred.

*** Semantic Error : 337 : Package XXX may not be used as a prefix here.

Selected component notation may not be used in places where an item is directly visible.

*** Semantic Error : 338 : Scalar parameter XXX is of mode in out and must appear as an import.

Parameters passed as mode in out must be listed as imports in the subprogram's dependency relation if they are of scalar types. The rule also applies to a parameter of a private type if its full declaration is scalar.

*** Semantic Error : 339 : Subprogram XXX was not declared in package YYY.

*** Semantic Error : 340 : Only operators may be renamed in package specifications.

User-declared subprograms may not be renamed in package specifications although the implicitly declared function subprograms associated with operators may be.

*** Semantic Error : 341 : A range may not appear here.

Issued if a range is found where a single value is expected, for example, if an array slice is constructed.

*** Semantic Error : 342 : This proof annotation should be placed with the declaration of subprogram XXX.

Like global and derives annotations, proof annotations should be placed on the first appearance of a subprogram. There may also be a requirement for a second proof annotation on a subprogram body where it references an abstract own variable.

*** Semantic Error : 343 : Unexpected proof annotation - all annotations required for subprogram XXX have already occurred.

Issued if a second proof annotation for a subprogram is found but the subprogram does not reference any abstract own variables. A second annotation is only required where it is necessary to express both an abstract (external) and a refined (internal) view of an operation.

*** Semantic Error : 399 : Range error in annotation expression.

Issued if a proof annotation contains an expression that would cause a constraint error if it were in an executable Ada statement. For example: "--# post X = T'Succ(T'Last);" VCs generated from such malformed predicates would always be unprovable.

*** Semantic Error : 400 : Expression contains division by zero.

Issued when a static expression, evaluated using perfect arithmetic, is found to contain a division by zero.

*** Semantic Error : 401 : Illegal numeric literal.

Issued when a numeric literal is illegal because it contains, for example, digits not compatible with its number base.

*** Semantic Error : 402 : Constraint_Error will be raised here.

Issued whenever a static expression would cause a constraint error. e.g. assigning a value to a constant outside the constant's type range. In SPARK a static expression may not yield a value which violates a range constraint.

*** Semantic Error : 403 : Argument value is inconsistent with the number of dimensions of array type XXX.

Issued when an array attribute containing an argument is found and the value of the argument is inconsistent with the number of dimensions of the array type to which it is being applied.

*** Semantic Error : 407 : This choice overlaps a previous one.

Choices in case statements and array aggregates may not overlap.

*** Semantic Error : 408 : Case statement is incomplete.

A case statement must either explicitly supply choices to cover the whole range of the (sub)type of the controlling expression, or it must supply an others choice.

*** Semantic Error : 409 : Empty range specified.

In SPARK, no static range is permitted to be null.

*** Semantic Error : 410 : Choice out of range.

The choices in case statements and array aggregates must be within the constraints of the appropriate (sub)type.

*** Semantic Error : 411 : Others clause required.

Issued where an others clause is required to satisfy the Ada language rules.

*** Semantic Error : 412 : Explicit boolean range not permitted.

*** Semantic Error : 413 : Invalid range constraint.

Issued where a range constraint is outside the range of the (sub)type to which the constraint applies.

*** Semantic Error : 414 : Array aggregate is incomplete.

An array aggregate must either explicitly supply values for all array elements or provide an others clause.

*** Semantic Error : 415 : Too many entries in array aggregate.

Issued where an array aggregate using positional association contains more entries than required by the array index type.

*** Semantic Error : 416 : Type may not have an empty range.

*** Semantic Error : 417 : String subtypes must have a lower index bound of 1.

*** Semantic Error : 418 : Index upper and/or lower bounds do not match those expected.

Issued where assignment, association or type conversion is attempted between two different constrained subtypes of the same unconstrained array type, and where the index bounds do not match.

*** Semantic Error : 419 : YYY.XXX has been renamed locally, so the prefix YYY must not be used.

When an entity is renamed, the fully qualified name is no longer visible, and so must not be used.

*** Semantic Error : 420 : Array index(es) not convertible.

Issued when an attempt is made to convert between two arrays whose indexes are neither of the same type nor numeric.

*** Semantic Error : 421 : Array components are not of the expected type.

Issued when a type conversion attempts to convert between two array types whose components are of different types.

*** Semantic Error : 422 : Array component constraints do not match those expected.

Issued when a type conversion attempts to convert between two array types whose components are of the same type but do not have constraints which can be statically determined to be identical.

*** Semantic Error : 423 : Array has different number of dimensions from that expected.

Issued when attempting to convert between two array types which have different numbers of dimensions.

*** Semantic Error : 424 : Attributes are not permitted in a String concatenation expression.

Character attributes such as 'Val, 'Pos, 'Succ and 'Pred are not permitted below a concatentation operator in a String expression.

*** Semantic Error : 425 : String literals may not be converted.

Issued if the argument of a type conversion is a string literal. A common cause is an attempt to type qualify a string and accidentally omitting the tick character.

*** Semantic Error : 500 : Mode expected.

Issued when performing data flow analysis only where a subprogram has no dependency clause and its global variables have not been given modes in the global annotation.

*** Semantic Error : 501 : Dependency relation expected.

A dependency relation is required for each procedure if information flow analysis is to be performed.

*** Semantic Error : 502 : Exportation of XXX is incompatible with its global mode.

Issued when a procedure has both a global annotation with modes and a dependency relation, and a global of mode in is listed as an export in the dependency relation.

*** Semantic Error : 503 : Importation of XXX is incompatible with its global mode.

Issued when a procedure has both a global annotation with modes and a dependency relation, and a global of mode out is listed as an import in the dependency relation.

*** Semantic Error : 504 : Parameter XXX is of mode in out and must appear as an import.

*** Semantic Error : 505 : Global variable XXX is of mode in out and must appear as an import.

Issued where a procedure has both a global annotation with modes and a dependency relation, and a global variable of mode in out is not listed as an import in the dependency relation.

*** Semantic Error : 506 : Parameter XXX is of mode in out and must appear as an export.

*** Semantic Error : 507 : Global variable XXX is of mode in out and must appear as an export.

Issued where a procedure has both a global annotation with modes and a dependency relation, and a global variable of mode in out is not listed as an export in the dependency relation.

*** Semantic Error : 508 : This global variable is a parameter of mode in and can only have the global mode in.

*** Semantic Error : 509 : Unexpected refined dependency relation.

When using refinement in automatic flow analysis mode, if there is a dependency relation on the subprogram specification then there must also be one on the body. Similarly, if there is no dependency relation on the specification then the body is not permitted to have one.

*** Semantic Error : 550 : use type clauses may only be used in SPARK95: clause ignored.

*** Semantic Error : 551 : All operators for type XXX are already visible.

*** Semantic Error : 552 : The type XXX already appears in the use type clause.

*** Semantic Error : 554 : XXX is a limited private type for which no operators can be made visible.

*** Semantic Error : 555 : XXX is not mentioned in an earlier with clause of this compilation unit.

*** Semantic Error : 600 : pragma Import has a minimum of 2 and a maximum of 4 parameters.

*** Semantic Error : 601 : Convention, Entity, External_Name or Link_Name expected.

*** Semantic Error : 602 : An association for XXX has already been given.

*** Semantic Error : 603 : No association for XXX was given.

*** Semantic Error : 604 : This package may not have a body - consider use of pragma Elaborate_Body.

In Ada 95, a package body is illegal unless it is required for the purpose of providing a subprogram body, or unless this pragma is used. This error is issued where a package body is found for a package whose specification does not require a body.

*** Semantic Error : 605 : pragma Elaborate_Body has one parameter.

*** Semantic Error : 606 : This expression does not represent the expected package name XXX.

Issued when the parameter to a pragma Elaborate_Body is invalid.

*** Semantic Error : 607 : This package requires a body and must therefore include either pragma Elaborate_Body or a subprogram declaration.

Issued where a package specification contains no subprogram declarations, but whose own variables (as specified in the package annotation) are not all declared (and initialized where appropriate) in the package specification. This is because such a package is not allowed a body in Ada 95 unless either the pragma is given or a subprogram declared.

*** Semantic Error : 608 : Reduced accuracy subtypes of real numbers are considered obsolescent and are not supported by SPARK.

*** Semantic Error : 609 : This entity cannot be assigned to.

*** Semantic Error : 610 : Child packages may not be used in SPARK83.

*** Semantic Error : 611 : Illegal use of deferred constant prior to its full declaration.

*** Semantic Error : 613 : Illegal name for body stub.

Issued if a dotted name appears in a body stub as in "package body P.Q is separate". No legal stub could ever have such a name.

*** Semantic Error : 614 : Child packages may be declared only at library level.

Issued if an attempt is made to declare a child package which is embedded in a package or subprogram.

*** Semantic Error : 615 : Name does not match name of package.

Issued if the closing identifier of a package has a different number of identifiers from the name originally given for the package. For example "package P.Q is ... end P.Q.R;".

*** Semantic Error : 616 : The private package XXX is not visible at this point.

Issued if an attempt is made to with or inherit a private package from the visible part of a public package.

*** Semantic Error : 617 : Public sibling XXX is not visible at this point.

Arises from attempting to inherit a public sibling child package from a private child package.

*** Semantic Error : 618 : The owner of the current package does not inherit the package XXX.

A private descendent (although it may be a public package) can only inherit a remote package if its parent also inherits it; this is a analogous to the behaviour of embedded packages which may also only inherit a remote package if their enclosing package also does so.

*** Semantic Error : 619 : The package XXX is not owned by the current package.

This message indicates an attempt to claim that own variables of a package other than a private child package of the current package are refinement constituents of an abstract own variable of the current package.

*** Semantic Error : 620 : Own variables here must be refinement constituents in package owner XXX.

Own variables of private child packages must appear as refinement constituents of the package which owns the child. If the Examiner has seen the owner package body before processing the child and has not found the required refinement constituent then this message results on processing the child.

*** Semantic Error : 621 : Own variable XXX expected as a refinement constituent in this package.

Own variables of private child packages must appear as refinement constituents of the package which owns the child. If the Examiner has seen a child package which declares an own variable before examining its owner’s body then this message is issued if the owner lacks the required refinement constituent declaration.

*** Semantic Error : 622 : Own variable XXX did not occur in an initialization specification.

Issued if an own variable appears in an initialization clause and is also a refinement constituent of an own variable which is not marked as initialized.

*** Semantic Error : 623 : Own variable XXX occurred in an initialization specification.

Issued if an own variable does not appear in an initialization clause and is also a refinement constituent of an own variable that is marked as initialized.

*** Semantic Error : 624 : All operators from ancestor package XXX are already visible.

A package must appear in a with clause before types declared in it can be specified in a use type clause.

*** Semantic Error : 626 : Global/derives/declare on generic subprogram instantiation is not allowed.

*** Semantic Error : 628 : Formal parameter of the instantiation of subprogram XXX is not allowed.

*** Semantic Error : 629 : The generic subprogram XXX has no generic formal parameters.

*** Semantic Error : 630 : XXX is not the name of generic subprogram.

Only generic subprogram can be instantiated.

*** Semantic Error : 631 : Generic function found where a generic procedure was expected.

Subprogram kind of generic and its instantiation must match.

*** Semantic Error : 632 : Generic procedure found where a generic function was expected.

Subprogram kind of generic and its instantiation must match.

*** Semantic Error : 635 : Incorrect number of generic actual parameters for instantiation of generic unit XXX.

The number of generic formal and actual parameters must match exactly.

*** Semantic Error : 636 : Type XXX is not compatible with generic formal parameter YYY.

See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

*** Semantic Error : 637 : User-defined generic units are not permitted in SPARK 83.

There are weaknesses in the generic type model of Ada 83 that prevent the implementation of a safe subset of generics in SPARK 83. These deficiencies are overcome in Ada 95. SPARK 83 users may employ the predefined unit Unchecked_Conversion only.

*** Semantic Error : 638 : Unexpected global annotation. A generic subprogram may not reference or update global variables.

A standalone generic subprogram may not have a global annotation. Note that a subprogram in a generic package may have a global annotation as long as it only refers to own variables that are local to the package.

*** Semantic Error : 639 : A generic formal object may only have default mode or mode in.

SPARK restricts formal objects to being constants in order to avoid concealed information flows.

*** Semantic Error : 640 : A generic formal object may only be instantiated with a constant expression.

SPARK restricts formal objects to being constants in order to avoid concealed information flows.

*** Semantic Error : 641 : There is no generic subprogram declaration named XXX so a generic body of that name cannot be declared here.

A generic body must be preceded by a generic declaration of the same name.

*** Semantic Error : 645 : Actual array element XXX is not compatible with the element type YYY of the generic formal parameter.

See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

*** Semantic Error : 646 : Actual array index XXX is not compatible with the index type YYY of the generic formal parameter.

See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

*** Semantic Error : 647 : Actual array XXX has more dimensions than formal array YYY.

See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

*** Semantic Error : 648 : Actual array XXX has fewer dimensions than formal array YYY.

See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

*** Semantic Error : 649 : Actual array XXX is constrained but the associated formal YYY is unconstrained.

See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

*** Semantic Error : 650 : Actual array XXX is unconstrained but the associated formal YYY is constrained.

See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

*** Semantic Error : 651 : Variables of generic types may not be initialized at declaration.

In non-generic code we statically know the value being assigned to the variable and can check that it is in range. In the case of a generic we cannot do this because we do not know the bounds of the variable's type. The variable may, however, be assigned to in the sequence of statements in the generic body because generation of run-time checks will provide suitable protection from out-of-range values.

*** Semantic Error : 652 : Subtypes of generic types are not permitted.

In non-generic code we statically know the values being used as the range bounds for a subtype and can check that they are in range. In the case of a generic we cannot do this because we do not know the bounds of the variable's type.

*** Semantic Error : 653 : Constants of generic types are not permitted.

In non-generic code we statically know the value being assigned to the constant and can check that it is in range. In the case of a generic we cannot do this because we do not know the bounds of the constant's type. A variable, assigned to in the sequence of statements in the generic body, may be a suitable substitute for such a constant.

*** Semantic Error : 654 : XXX is a generic subprogram which must be instantiated before it can be called.

Generic units provide a template for creating callable units and are not directly callable.

*** Semantic Error : 655 : Invalid prefix, XXX is a generic package.

Components of generic packages cannot be accessed directly. First instantiate the package and then access components of the instantiation.

*** Semantic Error : 656 : The only currently supported attribute in this context is 'Always_Valid.

*** Semantic Error : 657 : A 'Always_Valid assertion requires a variable here.

The 'Always_Valid assertion can only be applied to variables or to components of record variables.

*** Semantic Error : 658 : The object in this assertion must be scalar or a non-tagged aggregation of scalar components.

The 'Always_Valid assertion can only be applied to objects which are: (1) of a scalar type, (2) a one dimensional array of scalar components, (3) an entire record variable of a non-tagged type with all components that are either scalar or an array of scalar components, (4) an array variable whose components are records satisfying (3). Additionally a field of a record satisfying these constraints may be marked individually as always valid.

*** Semantic Error : 659 : A 'Always_Valid assertion must be in the same declarative region as contains the declaration of the variable to which it refers.

*** Semantic Error : 660 : A 'Always_Valid assertion must not be applied to an object already marked as always valid.

*** Semantic Error : 662 : Only Mode in own variables and constituents can be marked using 'Always_Valid.

The 'Always_Valid assertion can only be applied to variables which are own variables with the mode in, or to subcomponents of records which are mode in own variables.

*** Semantic Error : 700 : Mode 'in out' may not be applied to own variables or their refinement constituents.

Own variables may be given a mode to indicate that they are system level inputs or outputs (i.e. they obtain values from or pass values to the external environment). Since effective SPARK design strictly separates inputs from outputs the mode 'in out' is not permitted.

*** Semantic Error : 701 : The mode of this refinement constituent is not consistent with its subject: XXX.

If an abstract own variable is given a mode then its refinement constituents must all be of the same mode.

*** Semantic Error : 702 : Own variable XXX must be given the mode 'in' to match its earlier announcement .

Issued if an own variable of an embedded package is not given the same mode as the earlier refinement constituent that announced it would exist.

*** Semantic Error : 703 : Own variable XXX must be given the mode 'out' to match its earlier announcement .

Issued if an own variable of an embedded package is not given the same mode as the earlier refinement constituent that announced it would exist.

*** Semantic Error : 704 : Own variable XXX may not have a mode because one was not present in its earlier announcement .

Issued if an own variable of an embedded package is given a mode when the earlier refinement constituent that announced it would exist did not have one.

*** Semantic Error : 705 : Refinement constituent XXX must be given the mode 'in' to match the child package own variable with which it is being associated.

If a refinement constituent is an own variable of a private package then the constituent must have the same mode as the own variable to which it refers.

*** Semantic Error : 706 : Refinement constituent XXX must be given the mode 'out' to match the child package own variable with which it is being associated.

If a refinement constituent is an own variable of a private package then the constituent must have the same mode as the own variable to which it refers.

*** Semantic Error : 707 : Refinement constituent XXX may not have a mode because one was not present on the child package own variable with which it is being associated.

If a refinement constituent is an own variable of a private package then the constituent can only be given a mode if the own variable to which it refers has one.

*** Semantic Error : 708 : Own variable XXX has a mode and may not appear in an initializes clause.

Mode own variables (stream variables) are implicitly initialized by the environment to which they are connected and may not appear in initializes clauses since this would require their explicit initialization.

*** Semantic Error : 709 : Own variable or constituent XXX has mode 'out' and may not be referenced by a function.

Functions are permitted to reference own variables that are either unmoded or of mode 'in'. Since mode 'out' own variables represent outputs to the environment, reading them in a function does not make sense and is not allowed.

*** Semantic Error : 710 : The own variable or constituent XXX is of mode 'in' and can only have global mode 'in'.

Global modes, if given, must be consistent with the modes of own variables that appear in the global list.

*** Semantic Error : 711 : The own variable or constituent XXX is of mode 'out' and can only have global mode 'out'.

Global modes, if given, must be consistent with the modes of own variables that appear in the global list.

*** Semantic Error : 712 : The own variable or constituent XXX is of either mode 'in' or mode 'out' and may not have global mode 'in out'.

Global modes, if given, must be consistent with the modes of own variables that appear in the global list.

*** Semantic Error : 713 : The own variable or constituent XXX is of mode 'in' and may not appear in a dependency clause as an export.

Own variables with mode 'in' denote system-level inputs; their exportation is not allowed.

*** Semantic Error : 714 : The own variable or constituent XXX is of mode 'out' and may not appear in a dependency clause as an import.

Own variables with mode 'out' denote system-level outputs; their importation is not allowed.

*** Semantic Error : 715 : Function XXX references external (stream) variables and may only appear directly in an assignment or return statement.

To avoid ordering effects, functions which globally access own variables which have modes (indicating that they are connected to the external environment) may only appear directly in assignment or return statements. They may not appear as actual parameters or in any other form of expression.

*** Semantic Error : 716 : External (stream) variable XXX may only appear directly in an assignment or return statement; or as an actual parameter to an unchecked conversion.

To avoid ordering effects, own variables which have modes (indicating that they are connected to the external environment) may only appear directly in assignment or return statements. They may not appear as actual parameters (other than to instantiations of Unchecked_Conversion) or in any other form of expression.

*** Semantic Error : 717 : External (stream) variable XXX is of mode 'in' and may not be assigned to.

Own variables with mode 'in' represent inputs to the system from the external environment. As such, assigning to them does not make sense and is not permitted.

*** Semantic Error : 718 : External (stream) variable XXX is of mode 'out' and may not be referenced.

Own variables with mode 'out' represent outputs to the external environment from the system. As such, referencing them does not make sense and is not permitted.

*** Semantic Error : 719 : External (stream) variables may not be referenced or updated during package elaboration.

Own variables with modes represent inputs and outputs between the external environment and the system. Referencing or updating them during package elaboration would introduce ordering effects and is not permitted.

*** Semantic Error : 720 : Variable XXX is an external (stream) variable and may not be initialized at declaration.

Own variables with modes represent inputs and outputs between the external environment and the system. Referencing or updating them during package elaboration would introduce ordering effects and is not permitted.

*** Semantic Error : 721 : This refined function global annotation may not reference XXX because it is an external (stream) variable whose abstract subject YYY does not have a mode.

Functions may be used to reference external (stream) variables and the Examiner generates the appropriate information flow to show that the value returned by the function is 'volatile'. If the abstract view of the same function shows it referencing an own variable which is not an external stream then the volatility of the function is concealed. The error can be removed either by making the abstract own variable a mode 'in' stream or by using a procedure instead of a function to read the refined stream variable.

*** Semantic Error : 722 : The mode on abstract global variable YYY must be made 'in out' to make it consistent with the referencing of mode 'in' external (stream) constituent XXX in the refined global annotation.

Where a procedure references an external (stream) variable of mode 'in' the Examiner constructs appropriate information flow to show that the input stream is 'volatile'. If the abstract view shows that the procedure obtains its result by simply reading an own variable which is not an external stream then the volatility is concealed. The error can be removed either by making the global mode of XXX 'in out' or making XXX an external (stream) variable of mode 'in'.

*** Semantic Error : 723 : Variable XXX must appear in this refined global annotation.

Issued when a global variable which is present in the first (abstract) global annotation is omitted from the second (refined) one.

*** Semantic Error : 724 : Exit label must match the label of the most closely enclosing loop statement.

If an exit statement names a loop label, then the most closely enclosing loop statement must have a matching label.

*** Semantic Error : 725 : Protected function or variable XXX may only appear directly in an assignment or return statement.

To avoid ordering effects, protected functions may only appear directly in assignment or return statements. They may not appear as actual parameters or in any other form of expression. Ordering effects occur because the global state referenced by the protected function may be updated by another process during expression evaluation.

*** Semantic Error : 730 : A loop with no iteration scheme or exit statements may only appear as the last statement in the outermost scope of the main subprogram (or a task body when using the Ravenscar profile).

If a loop has neither an iteration scheme nor any exit statements then it will run forever. Any statements following it will be unreachable. SPARK only allows one such loop which must be the last statement of the main program.

*** Semantic Error : 750 : The identifier YYY.XXX is either undeclared or not visible at this point. An array type may not be used as its own index type.

The type mark used for the index of an array type declaration must not be the same as the name of the array type being declared.

*** Semantic Error : 751 : The identifier YYY.XXX is either undeclared or not visible at this point. A record type may not include fields of its own type.

The type mark given for a field in a record type declaration must not be the same as the name of the record type being declared.

*** Semantic Error : 752 : The identifier YYY.XXX is either undeclared or not visible at this point. This identifier must appear in a preceding legal global annotation or formal parameter list.

For an identifier to appear legally as an import in a derives annotation, it must be a formal parameter or must appear legally in a preceding global annotation and must be of mode 'in' or mode 'in out'.

*** Semantic Error : 753 : The identifier YYY.XXX is either undeclared or not visible at this point. This identifier must appear in a preceding legal global annotation or formal parameter list.

For an identifier to appear legally as an export in a derives annotation, it must be a formal parameter or must appear legally in a preceding global annotation and must be of mode 'out' or mode 'in out'.

*** Semantic Error : 754 : The identifier YYY.XXX is either undeclared or not visible at this point. This package must be both inherited and withed to be visible here.

For a package name to be visible in Ada context, it must appear in both the inherit clause and the with clause of the enclosing package.

*** Semantic Error : 755 : The identifier YYY.XXX is either undeclared or not visible at this point. A parent of a child package must be inherited to be visible here.

A parent of a child package must be inherited (but not withed) to be visible in that child.

*** Semantic Error : 756 : The identifier YYY.XXX is either undeclared or not visible at this point. The grandparent of a child package should not appear in this prefix.

A grandparent of a child package should not be included in prefixes referencing a declaration of the child package.

*** Semantic Error : 757 : The identifer XXX is either undeclared or not visible at this point. A record field name cannot be the same as its indicated type.

*** Semantic Error : 770 : If Any_Priority is defined, Priority and Interrupt_Priority must also be defined.

If the type Any_Priority is defined in package System, then the subtypes Priority and Interrupt_Priority must also be defined; if support for tasking is not required, then the definition of Any_Priority may be removed.

*** Semantic Error : 771 : The parent type of this subtype must be Any_Priority.

Ada 95 requires that both Priority and Interrupt_Priority be immediate subtypes of Any_Priority.

*** Semantic Error : 772 : The range of Priority must contain at least 30 values; LRM D.1(26).

Ada 95 requires that the range of the subtype Priority include at least 30 values; this requirement is stated in the Ada 95 Language Reference Manual at D.1(26).

*** Semantic Error : 773 : Priority'First must equal Any_Priority'First; LRM D.1(10).

Ada 95 requires that task priority types meet the following criteria, the second of which is relevant to this error:

  • subtype Any_Priority is Integer range implementation-defined;
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last.

    *** Semantic Error : 774 : Interrupt_Priority'First must equal Priority'Last + 1; LRM D.1(10).

    Ada 95 requires that task priority types meet the following criteria, the third of which is relevant to this error:

  • subtype Any_Priority is Integer range implementation-defined;
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last.

    *** Semantic Error : 775 : Interrupt_Priority'Last must equal Any_Priority'Last; LRM D.1(10).

    Ada 95 requires that task priority types meet the following criteria, the third of which is relevant to this error:

  • subtype Any_Priority is Integer range implementation-defined;
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;
  • To avoid ordering effects, functions which globally access own variables which have modes (indicating that they are connected to the external environment) may only appear directly in assignment or return statements. They may not appear as actual parameters or in any other form of expression.
  • SPARK relaxes the illegal use of function calls in elaboration code in the case of the function Ada.Real_Time.Clock. However the function can only be used to directly initialize a constant value.

    *** Semantic Error : 961 : This property value is of an incorrect format.

    Please check the user manual for valid property value formats.

    *** Semantic Error : 962 : Error(s) detected by VC Generator. See the .vcg file for more information.

    This message is echoed to the screen if an unrecoverable error occurs which makes the generation of VCs for the current subprogram impossible. Another message more precisely identifying the problem will be placed in the .vcg file.

    *** Semantic Error : 986 : A protected function may not call a locally-declared protected procedure.

    See LRM 9.5.1 (2). A protected function has read access to the protected elements of the type whereas the called procedure has read-write access. There is no way in which an Ada compiler can determine whether the procedure will illegally update the protected state or not so the call is prohibited by the rules of Ada. (Of course, in SPARK, we know there is no function side effect but the rules of Ada must prevail nonetheless).

    *** Semantic Error : 987 : Task types and protected types may only be declared in package specifications.

    The Examiner performs certain important checks at the whole program level such as detection of illegal sharing of unprotected state and partition-level information flow analysis. These checks require visibility of task types and protected types (especially those containing interrupt handlers). SPARK therefore requires these types to be declare in package specifications. Subtypes and objects of task types, protected types and their subtypes may be declared in package bodies.

    *** Semantic Error : 988 : Illegal re-use of identifier XXX; this identifier is used in a directly visible protected type.

    SPARK does not allow the re-use of operation names which are already in use in a directly visible protected type. The restriction is necessary to avoid overload resolution issues in the protected body. For example, type PT in package P declares operation K. Package P also declares an operation K. From inside the body of PT, a call to K could refer to either of the two Ks since both are directly visible.

    *** Semantic Error : 989 : The last statement of a task body must be a plain loop with no exits.

    To prevent any possibility of a task terminating (which can lead to a bounded error), SPARK requires each task to end with a non-terminating loop. The environment task (or "main program") does not need to end in a plain loop provided the program closure includes at least one other task. If there are no other tasks, then the environment task must be made non-terminating with a plain loop.

    *** Semantic Error : 990 : Unexpected annotation, a task body may have only global and derives annotations.

    Issued if a pre, post or declare annotation is attached to a task body.

    *** Semantic Error : 991 : Unexpected task body, XXX is not the name of a task declared in this package specification.

    Issued if task body is encountered for which there is no preceding declaration.

    *** Semantic Error : 992 : A body for task type XXX has already been declared.

    Issued if a duplicate body or body stub is encountered for a task.

    *** Semantic Error : 993 : There is no protected type declaration for XXX.

    Issued if a body is found for a protected types for which there is no preceding declaration.

    *** Semantic Error : 994 : Invalid guard, XXX is not a Boolean protected element of this protected type.

    The SPARK Ravenscar rules require a simple Boolean guard which must be one of the protected elements of the type declaring the entry.

    *** Semantic Error : 995 : Unexpected entry body, XXX is not the name of an entry declared in this protected type.

    Local entries are not permitted so a protected body can declare at most one entry body and that must have declared in the protected type specification.

    *** Semantic Error : 996 : The protected operation XXX, declared in this type, does not have an associated body.

    Each exported protected operation must have a matching implementation in the associated protected body.

    *** Semantic Error : 997 : A body for protected type XXX has already been declared.

    Each protected type declaration must have exactly one matching protected body or body stub.

    *** Semantic Error : 998 : There is no protected type declaration for XXX.

    Issued if a protected body or body stub is found and there is no matching declaration for it.

    *** Semantic Error : 999 : This feature of Generics is not yet implemented.

    Generics are currently limited to instantiation of Unchecked_Conversion.

    Warning : No semantic checks carried out, text may not be legal SPARK.

    Issued when the Examiner is used solely to check the syntax of a SPARK text: this does not check the semantics of a program (e.g. the correctness of the annotations) and therefore does not guarantee that a program is legal SPARK.

    Note: Information flow analysis not carried out.

    This is issued as a reminder that information flow analysis has not been carried out in this run of the Examiner: information flow errors may be present undetected in the text analysed.

    Note: Flow analysis mode is automatic.

    This is issued as a reminder that the Examiner will perform information flow analysis if it encounters full derives annotations and will perform data flow analysis if only moded global annotations are present. Information flow errors may be present undetected in the text analysed.

    Note: Ada 83 language rules selected.

    Issued when the Examiner is used in SPARK 83 mode.

    --- Warning : 1 : The identifier XXX is either undeclared or not visible at this point.

    This warning will appear against an identifier in a with clause if it is not also present in an inherit clause. Such an identifier cannot be used in any non-hidden part of a SPARK program. The use of with without inherit is permitted to allow reference in hidden parts of the text to imported packages which are not legal SPARK. For example, the body of SPARK_IO is hidden and implements the exported operations of the package by use of package TEXT_IO. For this reason TEXT_IO must appear in the with clause of SPARK_IO. (warning control file keyword: with_clauses).

    --- Warning : 2 : Representation clause - ignored by the Examiner.

    The significance of representation clauses cannot be assessed by the Examiner because it depends on the specific memory architecture of the target system. Like pragmas, representation clauses can change the meaning of a SPARK program and the warning highlights the need to ensure their correctness by other means. (warning control file keyword: representation_clauses).

    --- Warning : 3 : Pragma - ignored by the Examiner.

    All pragmas encountered by the Examiner generate this warning. While many pragmas (e.g. pragma page) are harmless others can change a program's meaning, for example by causing two variables to share a single memory location. (warning control file keyword: pragma pragma_identifier or pragma all).

    --- Warning : 4 : declare annotation - ignored by the Examiner.

    The declare annotation is ignored by the Examiner if the profile is not Ravenscar. (warning control file keyword: declare_annotations).

    --- Warning : 5 : XXX contains interrupt handlers; it is important that an interrupt identifier is not used by more than one handler.

    Interrupt identifiers are implementation defined and the Examiner cannot check that values are used only once. Duplication can occur by declaring more than object of a single (sub)type where that type defines handlers. It may also occur if interrupt identifiers are set via discriminants and two or more actual discriminants generate the same value. (warning control file keyword: interrupt_handlers).

    --- Warning : 6 : Machine code insertion. Code insertions are ignored by the Examiner.

    Machine code is inherently implementation dependent and cannot be analysed by the Examiner. Users are responsible for ensuring that the behaviour of the inserted machine code matches the annotation of the subprogram containing it.

    --- Warning : 7 : This identifier is an Ada2005 reserved word.

    Such identifiers will be rejected by an Ada2005 compiler and by the SPARK Examiner for SPARK2005. It is recommended to rename such identifiers for future upward compatibility. (warning control file keyword: ada2005_reserved_words).

    --- Warning : 11 : Unnecessary others clause - case statement is already complete.

    The others clause is non-executable because all case choices have already been covered explicitly. If the range of the case choice is altered later then the others clause may be executed with unexpected results. It is better to omit the others clause in which case any extension of the case range will result in a compilation error.

    --- Warning : 12 : Function XXX is an instantiation of Unchecked_Conversion.

    See ALRM 13.9. The use of Unchecked_Conversion can result in implementation-defined values being returned. The function should be used with great care. The principal use of Unchecked_Conversion is SPARK programs is the for the reading of external ports prior to performing a validity check; here the suppression of constraint checking prior to validation is useful. The Examiner does not assume that the value returned by an unchecked conversion is valid and so unprovable run-time check VCs will result if a suitable validity check is not carried out before the value is used. (warning control file keyword: unchecked_conversion).

    --- Warning : 13 : Function XXX is an instantiation of Unchecked_Conversion returning a type for which run-time checks are not generated. Users must take steps to ensure the validity of the returned value.

    See ALRM 13.9. The use of Unchecked_Conversion can result in invalid values being returned. The function should be used with great care especially, as in this case, where the type returned does not generate Ada run-time checks nor SPARK run-time verification conditions. For such types, this warning is the ONLY reminder the Examiner generates that the generated value may have an invalid representation. For this reason the warning is NOT suppressed by the warning control file keyword unchecked_conversion. The principal use of Unchecked_Conversion is SPARK programs is the for the reading of external ports prior to performing a validity check; here the suppression of constraint checking prior to validation is useful.

    --- Warning : 120 : Unexpected unmatched 'end accept' annotation ignored.

    This end accept annotation does not match any preceding start accept in this unit.

    --- Warning : 121 : No warning message matches this accept annotation.

    The accept annotation is used to indicate that a particular flow error or semantic warning message is expected and can be justified. This error indicates that the expected message did not actually occur. Note that when matching any information flow error messages containing two variable names, the export should be placed first and the import second (the order in the error message may differ from this depending on the style of information flow error reporting selected). For example: --# accept Flow, 601, X, Y, "..."; justifies the message: "X may be derived from the imported value(s) of Y" or the alternative form: "Y may be used in the derivation of X".

    --- Warning : 122 : Maximum number of error or warning justifications reached, subsequent accept annotations will be ignored.

    The number of justifications per source file is limited. If you reach this limit it is worth careful consideration of why the code generates so many warnings.

    --- Warning : 169 : Direct update of own variable XXX, which is an own variable of a non-enclosing package.

    With the publication of Edition 3.1 of the SPARK Definition the previous restriction prohibiting the direct updating of own variables of non-enclosing packages was removed; however, the preferred use of packages as abstract state machines is compromised by such action which is therefore discouraged. (warning control file keyword: direct_updates).

    --- Warning : 200 : This static expression cannot be evaluated by the Examiner.

    Issued if a static expression exceeds the internal limits of the Examiner because its value is, for example, too large to be evaluated using infinite precision arithmetic. No value will be recorded for the expression and this may limit the Examiner's ability to detect certain sorts of errors such as numeric constraints. (warning control file keyword: static_expressions).

    --- Warning : 201 : This expression cannot be evaluated statically because its value may be implementation-defined.

    Raised, for example, when evaluating 'Size of a type that does not have an explicit Size representation clause. Attributes of implementation-defined types, such as Integer'Last may also be unknown to be Examiner if they are not specified in the configuration file (warning control file keyword: static_expressions).

    --- Warning : 202 : An arithmetic overflow has occurred. Constraint checks have not been performed.

    Raised when comparing two real numbers. The examiner cannot deal with real numbers specified to such a high degree of precision. Consider reducing the precision of these numbers.

    --- Warning : 300 : VCs cannot be built for multi-dimensional array aggregates.

    Issued when an aggregate of a multi-dimensional array is found. Suppresses generation of VCs for that subprogram. Can be worked round by using arrays of arrays.

    --- Warning : 301 : Called subprogram exports abstract types for which RTCs are not possible.

    --- Warning : 302 : This expression may be re-ordered by a compiler. Add parentheses to remove ambiguity.

    Issued when a potentially re-orderable expression is encountered. For example x := a + b + c; Whether intermediate sub-expression values overflow may depend on the order of evaluation which is compiler-dependent. Therefore, code generating this warning should be parenthesized to remove the ambiguity. e.g. x := (a + b) + c;.

    --- Warning : 303 : Overlapping choices may not be detected.

    Issued where choices in an array aggregate or case statement are outside the range which can be detected because of limits on the size of a table internal to the Examiner.

    --- Warning : 304 : Case statement may be incomplete.

    Issued when the Examiner cannot determine the completeness of a case statement because the bounds of the type of the controlling expression exceed the size of the internal table used to perform the checks.

    --- Warning : 305 : Value too big for internal representation.

    Issued when the Examiner cannot determine the completeness of an array aggregate or case statement because the number used in a choice exceed the size allowed in the internal table used to perform the checks.

    --- Warning : 306 : Aggregate may be incomplete.

    Issued when the Examiner cannot determine the completeness of an array aggregate because its bounds exceed the size of the internal table used to perform the checks.

    --- Warning : 307 : Completeness checking incomplete: index type(s) undefined or not discrete.

    Issued where the array index (sub)type is inappropriate: this is probably because there is an error in its definition, which will have been indicated by a previous error message.

    --- Warning : 308 : Use of equality operator with floating point type.

    The use of this operator is discouraged in SPARK because of the difficulty in determining exactly what it means to say that two instances of a floating point number are equal.

    --- Warning : 309 : Type conversion to own type, consider using type qualification instead.

    Issued where a type conversion is either converting from a (sub)type to the same (sub)type or is converting between two subtypes of the same type. In the former case the type conversion may be safely removed because no constraint check is required; in the latter case the type conversion may be safely replaced by a type qualification which preserves the constraint check.(warning control file keyword: type_conversions).

    --- Warning : 310 : Use of obsolescent Ada 83 language feature.

    Issued when a language feature defined by Ada 95 to be obsolescent is used. Use of such features is not recommended because compiler support for them cannot be guaranteed.(warning control file keyword:obsolescent_features).

    --- Warning : 311 : Priority pragma for XXX is unavailable and has not been considered in the ceiling priority check.

    --- Warning : 312 : Replacement rules cannot be built for multi-dimensional array constant XXX.

    Issued when a VC or PF references a multi-dimensional array constant. Can be worked round by using arrays of arrays.

    --- Warning : 313 : The constant XXX has semantic errors in its initializing expression or has a hidden completion which prevent generation of a replacement rule.

    Issued when replacement rules are requested for a composite constant which had semantic errors in its initializing expression, or is a deferred constant whose completion is hidden from the Examiner. Semantic errors must be eliminated before replacement rules can be generated.

    --- Warning : 314 : The constant XXX has semantic errors in its type which prevent generation of rules.

    Issued when an attempt is made to generate type deduction rules for a constant which has semantic errors in its type. These semantic errors must be eliminated before type deduction rules can be generated.

    --- Warning : 315 : The procedure XXX does not have a derives annotation. The analysis of this call assumes that each of its exports is derived from all of its imports.

    Issued in flow=auto mode when a function calls a procedure that does not have a derives annotation. In most cases this assumption will not affect the validity of the analysis, but if the called procedure derives null from an import this can have an impact. Note that functions are considered to have implicit derives annotations so this warning is not issued for calls to functions.

    --- Warning : 320 : The proof function XXX has a non-boolean return and a return annotation. Please make sure that the return is always in-type.

    Any proof function with a non-bool return can introduce unsoundness if the result could overflow. For example a return of (x + 1) is not ok if x can take the value of integer'last. (warning control file keyword: proof_function_non_boolean).

    --- Warning : 321 : The proof function XXX has an implicit return annotation. Please be careful not to introduce unsoundness.

    Any proof function with an implicit return can easily introduce unsoundness as they do not have a body which we can check to expose any contradictions. For example: return B => False. (warning control file keyword: proof_function_implicit).

    --- Warning : 322 : The return refinement for proof function XXX is assumed to hold as it is axiomatic and thus cannot be checked.

    (warning control file keyword: proof_function_refinement).

    --- Warning : 323 : The precondition refinement for proof function XXX is assumed to hold as it is axiomatic and thus cannot be checked.

    (warning control file keyword: proof_function_refinement).

    --- Warning : 350 : Unexpected pragma Import. Variable XXX is not identified as an external (stream) variable.

    The presence of a pragma Import makes it possible that the variable is connected to some external device. The behaviour of such variables is best captured by making them moded own variables (or "stream" variables). If variables connected to the external environment are treated as if they are normal program variables then misleading analysis results are inevitable. The use of pragma Import on local variables of subprograms is particularly deprecated. The warning may safely be disregarded if the variable is not associated with memory-mapped input/output or if the variable concerned is an own variable and the operations on it are suitably annotated to indicate volatile, stream-like behaviour. Where pragma Import is used, it is essential that the variable is properly initialized at the point from which it is imported. (warning control file keyword:imported_objects).

    --- Warning : 351 : Unexpected address clause. XXX is a constant.

    Great care is needed when attaching an address clause to a constant. The use of such a clause is safe if, and only if, the address supplied provides a valid value for the constant which does not vary during the execution life of the program, for example, mapping the constant to PROM data. If the address clause causes the constant to have a value which may alter, or worse, change dynamically under the influence of some device external to the program, then misleading or incorrect analysis is certain to result. If the intention is to create an input port of some kind, then a constant should not be used. Instead a moded own variable (or "stream" variables) should be used. (warning control file keyword: address_clauses).

    --- Warning : 360 : This pragma must have zero or one arguments.

    --- Warning : 361 : This pragma must have exactly one argument.

    --- Warning : 362 : This pragma must have exactly two arguments.

    --- Warning : 363 : This pragma must have at least one argument.

    --- Warning : 364 : This pragma must have between two and four arguments.

    --- Warning : 365 : This pragma must have exactly zero arguments.

    --- Warning : 366 : This pragma must have one or two arguments.

    --- Warning : 380 : Casing inconsistent with declaration. Expected casing is XXX.

    The Examiner checks the case used for an identifier against the declaration of that identifier and warns if they do not match. (warning control file keyword:style_check_casing).

    --- Warning : 389 : Generation of VCs for consistency of generic and instantiated subprogram constraints is not yet supported. It will be supported in a future release of the Examiner.

    --- Warning : 390 : This generic subprogram has semantic errors in its declaration which prevent instantiations of it.

    Issued to inform the user that a generic subprogram instantiation cannot be completed because of earlier errors in the generic declaration.

    --- Warning : 391 : If the identifier XXX represents a package which contains a task or an interrupt handler then the partition-level analysis performed by the Examiner will be incomplete. Such packages must be inherited as well as withed.

    --- Warning : 392 : External variable XXX may have an invalid representation and its assignment may cause a run-time exception which is outside the scope of the absence of RTE proof.

    Where values are read from external variables (i.e. variables connected to the external environment) there is no guarantee that the bit pattern read will be a valid representation for the type of the external variable. Unexpected behaviour may result if invalid values are used in expressions. If the code is compiled with Ada run-time checks enabled the assignment of an invalid value may (but need not) raise a run-time exception dependent on the compiler. A compiler may provide facilities to apply extended checking which may also raise a run-time exception if an invalid value is used. The SPARK Toolset does not check the validity of the external variable and therefore any possible exception arising from its assignment is outside the scope of proof of absence of RTE. To ensure that a run-time exception cannot occur make the type of the external variable such that any possible bit pattern that may be read from the external source is a valid value. If the desired type is such a type then the always_valid assertion may be applied to the external variable; otherwise use explicit tests to ensure it has a valid value for the desired type before converting to an object of the desired type. In SPARK 95 the 'Valid attribute (see ALRM 13.9.2) may be used to determine the validity of a value if it can be guaranteed that the assignment of an invalid value read from an external variable will not raise a run time exception, either by compiling the code with checks off or by ensuring the compiler does not apply constraint checks when assigning same subtype objects. Note that when the Examiner is used to generate run-time checks, it will not be possible to discharge those involving external variables unless one of the above steps is taken. More information on interfacing can be found in the INFORMED manual and the SPARK Proof Manual. (warning control file keyword: external_assignment).

    --- Warning : 393 : External variable XXX may have an invalid representation and is of a type for which run-time checks are not generated but its assignment may cause a run-time exception. Users must take steps to ensure the validity of the assigned or returned value.

    Where values are read from external variables (i.e. variables connected to the external environment) there is no guarantee that the bit pattern read will be a valid representation for the type of the external variable. Unexpected behaviour may result if invalid values are used in expressions. If the code is compiled with Ada run-time checks enabled the assignment of an invalid value may (but need not) raise a run-time exception dependent on the compiler. A compiler may provide facilities to apply extended checking which may also raise a run-time exception if an invalid value is used The SPARK Toolset does not check the validity of the external variable and therefore any possible exception arising from its assignment is outside the scope of proof of absence of RTE. Where, as in this case, the type is one for which Ada run-time checks need not be generated and SPARK run-time verification conditions are not generated, extra care is required. For such types, this warning is the ONLY reminder the Examiner generates that the external value may have an invalid representation. For this reason the warning is NOT suppressed by the warning control file keyword external_assignment. To ensure that a run-time exception cannot occur make the type of the external variable such that any possible bit pattern that may be read from the external source is a valid value. Explicit tests of the value may then be used to determine the value of an object of the desired type. In SPARK 95 the 'Valid attribute (see ALRM 13.9.2) may be used to determine the validity of a value if it can be guaranteed that the assignment of an invalid value read from an external variable will not raise a run time exception, either by compiling the code with checks off or by ensuring the compiler does not apply constraint checks when assigning same subtype objects. Boolean external variables require special care since the Examiner does not generate run-time checks for Boolean variables; use of 'Valid is essential when reading Boolean external variables. More information on interfacing can be found in the INFORMED manual and the SPARK Proof Manual.

    --- Warning : 394 : Variables of type XXX cannot be initialized using the facilities of this package.

    A variable of a private type can only be used (without generating a data flow error) if there is some way of giving it an initial value. For a limited private type only a procedure that has an export of that type and no imports of that type is suitable. For a private type either a procedure, function or (deferred) constant is required. The required facility may be placed in, or already available in, a public child package. (warning control file keyword: private_types).

    --- Warning : 395 : Variable XXX is an external (stream) variable but does not have an address clause or a pragma import.

    When own variables are given modes they are considered to be inputs from or outputs to the external environment. The Examiner regards them as being volatile (i.e. their values can change in ways not visible from an inspection of the source code). If a variable is declared in that way but it is actually an ordinary variable which is NOT connected to the environment then misleading analysis is inevitable. The Examiner expects to find an address clause or pragma import for variables of this kind to indicate that they are indeed memory-mapped input/output ports. This warning is issued if an address clause or pragma import is not found.

    --- Warning : 396 : Unexpected address clause. Variable XXX is not identified as an external (stream) variable.

    The presence of an address clause makes it possible that the variable is connected to some external device. The behaviour of such variables is best captured by making them moded own variables (or "stream" variables). If variables connected to the external environment are treated as if they are normal program variables then misleading analysis results are inevitable. The use of address clauses on local variables of subprograms is particularly deprecated. The warning may safely be disregarded if the variable is not associated with memory-mapped input/output or if the variable concerned is an own variable and the operations on it are suitably annotated to indicate volatile, stream-like behaviour. (warning control file keyword: address_clauses).

    --- Warning : 397 : Variables of type XXX can never be initialized before use.

    A variable of a private type can only be used (without generating a data flow error) if there is some way of giving it an initial value. For a limited private type only a procedure that has an export of that type and no imports of that type is suitable. For a private type either a procedure, function or (deferred) constant is required.

    --- Warning : 398 : The own variable XXX can never be initialized before use.

    The own variable can only be used (without generating a data flow error) if there is some way of giving it an initial value. If it is initialized during package elaboration (or implicitly by the environment because it represents an input port) it should be placed in an "initializes" annotation. Otherwise there needs to be some way of assigning an initial value during program execution. Either the own variable needs to be declared in the visible part of the package so that a direct assignment can be made to it or, more usually, the package must declare at least one procedure for which the own variable is an export but not an import. Note that if the own variable is an abstract own variable with some constituents initialized during elaboration and some during program execution then it will never be possible correctly to initialize it; such abstract own variables must be divided into separate initialized and uninitialized components.

    --- Warning : 399 : The called subprogram has semantic errors in its interface (parameters and/or annotations) which prevent flow analysis of this call.

    Issued to inform the user that flow analysis has been suppressed because of the error in the called subprogram's interface.

    --- Warning : 9 : The body of XXX has a hidden exception handler - analysis and verification of contracts for this handler have not been performed.

    Issued when a --# hide XXX annotation is used to hide a user-defined exception handler. (warning control file keyword: handler_parts).

    --- Warning : 10 : XXX is hidden - hidden text is ignored by the Examiner.

    Issued when a --# hide XXX annotation is used. (warning control file keyword: hidden_parts).

    --- Warning : 400 : Variable XXX is declared but not used.

    Issued when a variable declared in a subprogram is neither referenced, nor updated. (warning control file keyword: unused_variables).

    --- Warning : 402 : Default assertion planted to cut loop.

    In order to prove properties of code containing loops, the loop must be "cut" with a suitable assertion statement. When generating run-time checks, the Examiner inserts a simple assertion to cut any loops which do not have one supplied by the user. The assertion is placed at the point where this warning appears in the listing file. The default assertion asserts that the subprogram's precondition (if any) is satisfied, that all imports to it are in their subtypes and that any for loop counter is in its subtype. In many cases this provides sufficient information to complete a proof of absence of run-time errors. If more information is required, then the user can supply an assertion and the Examiner will append the above information to it. (warning control file keyword: default_loop_assertions).

    --- Warning : 403 : XXX is declared as a variable but used as a constant.

    XXX is a variable which was initialized at declaration but whose value is only ever read not updated; it could therefore have been declared as a constant. (warning control file keyword: constant_variables).

    --- Warning : 404 : Subprogram imports variables of abstract types for which run-time checks cannot be generated.

    --- Warning : 405 : VCs for statements including real numbers are approximate.

    The Examiner generates VCs associated with real numbers using perfect arithmetic rather than the machine approximations used on the target platform. It is possible that rounding errors might cause a Constraint_Error even if these run-time check proofs are completed satisfactorily. (warning control file keyword: real_rtcs).

    --- Warning : 406 : VC Generator unable to create output files. Permission is required to create directories and files in the output directory.

    This message is echoed to the screen if the Examiner is unable to create output files for the VCs being generated (for instance, if the user does not have write permission for the output directory).

    --- Warning : 407 : This package requires a body. Care should be taken to provide one because an Ada compiler will not detect its omission.

    Issued where SPARK own variable and initialization annotations make it clear that a package requires a body but where no Ada requirement for a body exists.

    --- Warning : 408 : VCs could not be generated for this subprogram owing to semantic errors in its specification or body. Unprovable (False) VC generated.

    Semantic errors prevent VC Generation, so a single False VC is produced. This will be detected and reported by POGS.

    --- Warning : 409 : VCs could not be generated for this subprogram due to its size and/or complexity exceeding the capacity of the VC Generator. Unprovable (False) VC generated.

    A subprogram which has excessive complexity of data structure or number of paths may cause the VC Generator to exceed its capacity. A single False VC is generated in this case to make sure this error is detected in subsequent proof and analysis with POGS.

    --- Warning : 410 : Task or interrupt handler XXX is either unavailable (hidden) or has semantic errors in its specification which prevent partition-wide flow analysis being carried out.

    Partition-wide flow analysis is performed by checking all packages withed by the main program for tasks and interrupt handlers and constructing an overall flow relation that captures their cumulative effect. It is for this reason that SPARK requires task and protected types to be declared in package specifications. If a task or protected type which contains an interrupt handler, is hidden from the Examiner (in a hidden package private part) or contains errors in it specification, the partition-wide flow analysis cannot be constructed correctly and is therefore suppressed. Correct the specification of the affected tasks and (temporarily if desired) make them visible to the Examiner.

    --- Warning : 411 : Task type XXX is unavailable and has not been considered in the shared variable check.

    The Examiner checks that there is no potential sharing of unprotected data between tasks. If a task type is hidden from the Examiner in a hidden package private part, then it is not possible to check whether that task may share unprotected data.

    --- Warning : 412 : Task type XXX is unavailable and has not been considered in the max-one-in-a-queue check.

    The Examiner checks that no more than one task can suspend on a single object. If a task is hidden from the Examiner in a hidden package private part, then it is not possible to check whether that task may suspend on the same object as another task.

    --- Warning : 413 : Task or main program XXX has errors in its annotations. The shared variable and max-one-in-a-queue checks may be incomplete.

    The Examiner checks that no more than one task can suspend on a single object and that there is no potential sharing of unprotected data between tasks. These checks depend on the accuracy of the annotations on the task types withed by the main program. If these annotations contain errors, then any reported violations of the shared variable and max-one-in-a-queue checks will be correct; however, the check may be incomplete. The errors in the task annotations should be corrected.

    --- Warning : 414 : Long output file name has been truncated.

    Raised if an output file name is longer than the limit imposed by the operating system and has been truncated. Section 4.7 of the Examiner User Manual describes how the output file names are constructed. If this message is seen there is a possibility that the output from two or more subprograms will be written to the same file name, if they have a sufficiently large number of characters in common.

    --- Warning : 415 : The analysis of generic packages is not yet supported. It will be supported in a future release of the Examiner.

    --- Warning : 420 : Instance of SEPR 2124 found. An extra VC will be generated here and must be discharged to ensure absence of run-time errors. Please seek advice for assistance with this issue.

    In release 7.5 of the Examiner, a flaw in the VC generation was fixed such that subcomponents of records and elements of arrays when used as "out" or "in out" parameters will now generate an additional VC to verify absence of run-time errors. This warning flags an instance of this occurrence. Please read the release note and/or seek advice for assistance with this issue.

    --- Warning : 425 : The -vcg switch should be used with the selected language profile.

    A code generator language profile such as KCG is in use and so conditional flow errors may be present in the subprogram. Therefore the -vcg switch must be used to generate VCs and the VCs related to definedness discharged using the proof tools.

    --- Warning : 426 : The with_clause contains a reference to a public child of the package. The Examiner will not detect mutual recursion between subprograms of the two packages.

    A code generator language profile such as KCG allows a package body to with its own public child which is not normally permitted in SPARK. The removal of this restriction means that the Examiner will not detect mutual recursion between subprograms declared in the visible parts of the package and its child. The code generator is expected to guarantee the absence of recursion.

    --- Warning : 430 : SLI generation abandoned owing to syntax or semantic errors or multiple units in a single source file.

    --- Warning : 431 : Preconditions on the main program are assumed to be true and not checked by the VC generation system.

    --- Warning : 444 : Assumptions cannot be checked and must be justified with an accept annotation.

    --- Warning : 495 : The VC file XXX has a pathname longer than 255 characters which can produce unexpected problems on Windows with respect to the SPARK tools (undischarged VCs) and other tools.

    There is little that can be done to work around this as this is a fundamental limitation of Windows. You could try one of the following: Perform analysis higher up in the directory tree (i.e. in C:\a instead of C:\project_name\spark\analysis). You could try remapping a directory to a new drive to do the same (google for subst). You could try renaming or restructuring your program to flatten the structure a bit. And finally you can perform analysis on a UNIX system such as Mac OSX or GNU/Linux as they do not suffer from this problem.

    ??? Flow Error : 501 : Expression contains reference(s) to variable XXX, which may have an undefined value.

    The expression may be that in an assignment or return statement, an actual parameter, or a condition occurring in an if or case statement, an iteration scheme or exit statement. The Examiner has identified at least one syntactic path to this point where the variable has NOT been given a value. Conditional data flow errors are extremely serious and must be carefully investigated. NOTE: the presence of random and possibly invalid values introduced by data flow errors invalidates proof of exception freedom for the subprogram body which contains them. All reports of data flow errors must be eliminated or shown to be associated with semantically infeasible paths before attempting exception freedom proofs. See the manual "SPARK Proof Manual " for full details.

    ??? Flow Error : 504 : Statement contains reference(s) to variable XXX, which may have an undefined value.

    The statement here is a procedure call, and the variable XXX may appear in an actual parameter, whose value is imported when the procedure is executed. If the variable XXX does not occur in the actual parameter list, it is an imported global variable of the procedure (named in its global definition). The Examiner has identified at least one syntactic path to this point where the variable has NOT been given a value. Conditional data flow errors are extremely serious and must be carefully investigated. NOTE: the presence of random and possibly invalid values introduced by data flow errors invalidates proof of exception freedom for the subprogram body which contains them. All reports of data flow errors must be eliminated or shown to be associated with semantically infeasible paths before attempting exception freedom proofs. See the manual "SPARK Proof Manual " for full details.

    ??? Flow Error : 601 : YYY may be derived from the imported value(s) of XXX.

    Here the item on the left of "may be derived from ..." is an exported variable and the item(s) on the right are imports of a procedure subprogram. The message reports a possible dependency, found in the code, which does not appear in the specified dependency relation (derives annotation). The discrepancy could be caused by an error in the subprogram code which implements an unintended dependency. It could also be in an error in the subprogram derives annotation which omits a necessary and intended dependency. Finally, the Examiner may be reporting a false coupling between two items resulting from a non-executable code path or the sharing of disjoint parts of structured or abstract data (e.g one variable writing to one element of an array and another variable reading back a different element). Unexpected dependencies should be investigated carefully and only accepted without modification of either code or annotation if it is certain they are of "false coupling" kind.

    ??? Flow Error : 601 : The imported value of XXX may be used in the derivation of YYY.

    Here first item is an import and the second is an export of a procedure subprogram. The message reports a possible dependency, found in the code, which does not appear in the specified dependency relation. This version of the message has been retained for backward compatibility.

    ??? Flow Error : 602 : The undefined initial value of XXX may be used in the derivation of YYY.

    Here XXX is a non-imported variable, and YYY is an export, of a procedure subprogram.

    ??? Flow Error : 605 : Information flow from XXX to YYY violates information flow policy.

    This message indicates a violation of security or safety policy, such as information flow from a Secret input to an Unclassified output.

    ??? Flow Error : 606 : The imported value of XXX may be used in the derivation of YYY. Furthermore, this information flow violates information flow policy.

    Here XXX is an import and YYY is an export of a procedure subprogram. The message reports a possible dependency, found in the code, which does not appear in the specified dependency relation. If this dependency did appear in the dependency relation, then it would also constitute an integrity violation.

    !!! Flow Error : 20 : Expression contains reference(s) to variable XXX which has an undefined value.

    The expression may be that in an assignment or return statement, an actual parameter, or a condition occurring in an if or case statement, an iteration scheme or exit statement. NOTE: the presence of random and possibly invalid values introduced by data flow errors invalidates proof of exception freedom for the subprogram body which contains them. All unconditional data flow errors must be eliminated before attempting exception freedom proofs. See the manual "SPARK Proof Manual" for full details.

    !!! Flow Error : 23 : Statement contains reference(s) to variable XXX which has an undefined value.

    The statement here is a procedure call or an assignment to an array element, and the variable XXX may appear in an actual parameter, whose value is imported when the procedure is executed. If the variable XXX does not occur in the actual parameter list, it is an imported global variable of the procedure (named in its global definition). NOTE: the presence of random and possibly invalid values introduced by data flow errors invalidates proof of exception freedom for the subprogram body which contains them. All unconditional data flow errors must be eliminated before attempting exception freedom proofs. See the manual "SPARK Proof Manual" for full details.

    !!! Flow Error : 22 : Value of expression is invariant.

    The expression is either a case expression or a condition (Boolean-valued expression) associated with an if-statement, not contained in a loop statement. The message indicates that the expression takes the same value whenever it is evaluated, in all program executions. Note that if the expression depends on values obtained by a call to another other subprogram then a possible source for its invariance might be an incorrect annotation on the called subprogram.

    !!! Flow Error : 30 : The variable XXX is imported but neither referenced nor exported.

    !!! Flow Error : 31 : The variable XXX is exported but not (internally) defined.

    !!! Flow Error : 32 : The variable XXX is neither imported nor defined.

    !!! Flow Error : 33 : The variable XXX is neither referenced nor exported.

    !!! Flow Error : 34 : The imported, non-exported variable XXX may be redefined.

    The updating of imported-only variables is forbidden under all circumstances.

    !!! Flow Error : 35 : Importation of the initial value of variable XXX is ineffective.

    The meaning of this message is explained in Section 4.2 of Appendix A.

    !!! Flow Error : 36 : The referencing of variable XXX by a task or interrupt handler has been omitted from the partition annotation.

    This message is only issued when processing the partition annotation. The partition annotation must describe all the actions of the tasks and interrupt handlers making up the program. Therefore, if a variable is imported somewhere in the program by a task or interrupt handler, then it must also be an import at the partition level. As well as the omission of explicit imports, this message is also generated if the implicit imports of tasks and interrupt handlers are omitted. For tasks this means any variable the task suspends on and for interrupt handlers it means the name of the protected object containing the handler or, if given, the name of the interrupt stream associated with the handler.

    !!! Flow Error : 37 : The updating of variable XXX by a task or interrupt handler has been omitted from the partition annotation.

    This message is only issued when processing the partition annotation. The partition annotation must describe all the actions of the tasks and interrupt handlers making up the program. Therefore, if a variable is exported somewhere in the program by a task or interrupt handler, then it must also be an export at the partition level.

    !!! Flow Error : 38 : The protected element XXX must be initialized at its point of declaration.

    To avoid potential race conditions during program startup, all elements of a protected type must be initialized with a constant value at the point of declaration.

    !!! Flow Error : 50 : YYY is not derived from the imported value(s) of XXX.

    The item before "is not derived ..." is an export or function return value and the item(s) after are imports of the subprogram. The message indicates that a dependency, stated in the dependency relation (derives annotation) or implied by the function signature is not present in the code. The absence of a stated dependency is always an error in either code or annotation.

    !!! Flow Error : 50 : The imported value of XXX is not used in the derivation of YYY.

    The variable XXX, which appears in the dependency relation of a procedure subprogram, as an import from which the export YYY is derived, is not used in the code for that purpose. YYY may be a function return value. This version of the message has been retained for backward compatibility.

    !!! Flow Error : 53 : The package initialization of XXX is ineffective.

    Here XXX is an own variable of a package, initialized in the package initialization. The message states that XXX is updated elsewhere, before being read.

    !!! Flow Error : 54 : The initialization at declaration of XXX is ineffective.

    Issued if the value assigned to a variable at declaration cannot affect the final value of any exported variable of the subprogram in which it occurs because, for example, it is overwritten before it is used.

    !!! Flow Error : 57 : Information flow from XXX to YYY violates the selected information flow policy.

    Issued if safety or security policy checking is enabled and the specified dependency relation contains a relationship in which the flow of information from state or input to state or output violates the selected policy.

    !!! Flow Error : 1 : The previously stated updating of XXX has been omitted.

    XXX occurred as an export in the earlier dependency relation but neither XXX nor any refinement constituent of it occurs in the refined dependency relation.

    !!! Flow Error : 2 : The updating of XXX has not been previously stated.

    A refinement constituent of XXX occurs as an export in the refined dependency relation but XXX does not occur as an export in the earlier dependency relation.

    !!! Flow Error : 3 : The previously stated dependency of the exported value of XXX on the imported value of YYY has been omitted.

    The dependency of the exported value of XXX on the imported value of YYY occurs in the earlier dependency relation but in the refined dependency relation, no constituents of XXX depend on any constituents of YYY.

    !!! Flow Error : 4 : The dependency of the exported value of XXX on the imported value of YYY has not been previously stated.

    A refined dependency relation states a dependency of XXX or a constituent of XXX on YYY or a constituent of YYY, but in the earlier relation, no dependency of XXX on YYY is stated.

    !!! Flow Error : 5 : The (possibly implicit) dependency of the exported value of XXX on its imported value has not been previously stated.

    Either a dependency of a constituent of XXX on at least one constituent of XXX occurs in the refined dependency relation, or not all the constituents of XXX occur as exports in the refined dependency relation. However, the dependency of XXX on itself does not occur in the earlier dependency relation.

    !!! Flow Error : 40 : Exit condition is stable, of index 0.

    !!! Flow Error : 40 : Exit condition is stable, of index 1.

    !!! Flow Error : 40 : Exit condition is stable, of index greater than 1.

    In these cases the (loop) exit condition occurs in an iteration scheme, an exit statement, or an if-statement whose (unique) sequence of statements ends with an unconditional exit statement - see the SPARK Definition. The concept of loop stability is explained in Section 4.4 of Appendix A. A loop exit condition which is stable of index 0 takes the same value at every iteration around the loop, and with a stability index of 1, it always takes the same value after the first iteration. Stability with indices greater than 0 does not necessarily indicate a program error, but the conditions for loop termination require careful consideration.

    !!! Flow Error : 41 : Expression is stable, of index 0.

    !!! Flow Error : 41 : Expression is stable, of index 1.

    !!! Flow Error : 41 : Expression is stable, of index greater than 1.

    The expression, occurring within a loop, is either a case expression or a condition (Boolean-valued expression) associated with an if-statement, whose value determines the path taken through the body of the loop, but does not (directly) cause loop termination. Information flow analysis shows that the expression does not vary as the loop is executed, so the same branch of the case or if statement will be taken on every loop iteration. An Index of 0 means that the expression is immediately stable, 1 means it becomes stable after the first pass through the loop and so on. The stability index is given with reference to the loop most closely-containing the expression. Stable conditionals are not necessarily an error but do require careful evaluation; they can often be removed by lifting them outside the loop.

    !!! Flow Error : 10 : Ineffective statement.

    Execution of this statement cannot affect the final value of any exported variable of the subprogram in which it occurs. The cause may be a data-flow anomaly (i.e. the statement could be an assignment to a variable, which is always updated again before it is read. However, statements may be ineffective for other reasons - see Section 4.1 of Appendix A.

    !!! Flow Error : 10 : Assignment to XXX is ineffective.

    This message always relates to a procedure call or an assignment to a record. The variable XXX may be an actual parameter corresponding to a formal one that is exported; otherwise XXX is an exported global variable of the procedure. The message indicates that the updating of XXX, as a result of the procedure call, has no effect on any final values of exported variables of the calling subprogram. Where the ineffective assignment is expected (e.g. calling a supplied procedure that returns more parameters than are needed for the immediate purpose), it can be a useful convention to choose a distinctive name, such as "Unused" for the actual parameter concerned. The message "Assignment to Unused is ineffective" is then self-documenting.

    *** Illegal Structure : 1 : An exit statement may not occur here.

    Exit statements must be of the form "exit when c;" where the closest enclosing statement is a loop or "if c then S; exit;" where the if statement has no else part and its closest enclosing statement is a loop. See the SPARK Definition for details.

    *** Illegal Structure : 2 : A return statement may not occur here.

    A return statement may only occur as the last statement of a function.

    *** Illegal Structure : 3 : The last statement of this function is not a return statement.

    SPARK requires that the last statement of a function be a return statement.

    *** Illegal Structure : 4 : Return statements may not occur in procedure subprograms.

    --- note : 1 : This dependency relation was not used for this analysis and has not been checked for accuracy.

    Issued when information flow analysis is not performed and when modes were specified in the global annotation. It is a reminder that the dependencies specified in this annotation (including whether each variable is an import or an export) have not been checked against the code, and may therefore be incorrect. (warning control file keyword: notes).

    --- note : 2 : This dependency relation has been used only to identify imports and exports, dependencies have been ignored.

    Issued as a reminder when information flow analysis is not performed in SPARK 83. The dependencies specified in this annotation have not been checked against the code, and may therefore be incorrect. (warning control file keyword: notes).

    --- note : 3 : The deferred constant Null_Address has been implicitly defined here.

    Issued as a reminder that the declaration of the type Address within the target configuration file implicitly defines a deferred constant of type Null_Address. (warning control file keyword: notes).

    --- note : 4 : The constant Default_Priority, of type Priority, has been implicitly defined here.

    Issued as a reminder that the declaration of the subtype Priority within the target configuration file implicitly defines a constant Default_Priority, of type Priority, with the value (Priority'First + Priority'Last) / 2. (warning control file keyword: notes).

    !!! Program has a cyclic path without an assertion.

    SPARK generates VCs for paths between cutpoints in the code; these must be chosen by the developer in such a way that every loop traverses at least one cutpoint. If the SPARK Examiner detects a loop which is not broken by a cutpoint, it cannot generate verification conditions for the subprogram in which the loop is located, and instead, issues this warning. This can only be corrected by formulating a suitable loop-invariant assertion for the loop and including it as an assertion in the SPARK text at the appropriate point.

    !!! Unexpected node kind in main tree.

    This message indicates corruption of the syntax tree being processed by the VC Generator. It should not be seen in normal operation.

    spark-2012.0.deb/examiner/dictionary-initialize.adb0000644000175000017500000034417711753202336021245 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Maths, CommandLineData; separate (Dictionary) procedure Initialize (Write_To_File : in Boolean) is File : SPARK_IO.File_Type := SPARK_IO.Null_File; Status : SPARK_IO.File_Status; procedure Assign_Initial_Value --# global out Dict; --# derives Dict from ; is begin Dynamic_Symbol_Table.Initialize (Dict.Symbols); Dict.File_Name := E_Strings.Empty_String; Dict.TemporaryFile := SPARK_IO.Null_File; Dict.Main := Main_Program_Set' (Subprogram => RawDict.Null_Subprogram_Info_Ref, Priority_Given => False, The_Priority => LexTokenManager.Null_String); Dict.Packages := Package_Set' (Package_Standard => RawDict.Null_Package_Info_Ref, Package_ASCII => RawDict.Null_Package_Info_Ref, Package_Ada => RawDict.Null_Package_Info_Ref, Package_Ada_Characters => RawDict.Null_Package_Info_Ref, Package_Ada_Characters_Latin1 => RawDict.Null_Package_Info_Ref, Package_Real_Time => RawDict.Null_Package_Info_Ref, Package_Synchronous_Task_Control => RawDict.Null_Package_Info_Ref, Package_Interrupts => RawDict.Null_Package_Info_Ref); --# accept Flow, 32, Dict.Types, "Flow errors due to undefined components of Dict OK" & --# Flow, 31, Dict.Types, "Flow errors due to undefined components of Dict OK" & --# Flow, 602, Dict, Dict.Types, "Flow errors due to undefined components of Dict OK" & --# Flow, 32, Dict.Subprograms, "Flow errors due to undefined components of Dict OK" & --# Flow, 31, Dict.Subprograms, "Flow errors due to undefined components of Dict OK" & --# Flow, 602, Dict, Dict.Subprograms, "Flow errors due to undefined components of Dict OK" & --# Flow, 32, Dict.Null_Variable, "Flow errors due to undefined components of Dict OK" & --# Flow, 31, Dict.Null_Variable, "Flow errors due to undefined components of Dict OK" & --# Flow, 602, Dict, Dict.Null_Variable, "Flow errors due to undefined components of Dict OK" & --# Flow, 32, Dict.The_Partition, "Flow errors due to undefined components of Dict OK" & --# Flow, 31, Dict.The_Partition, "Flow errors due to undefined components of Dict OK" & --# Flow, 602, Dict, Dict.The_Partition, "Flow errors due to undefined components of Dict OK"; end Assign_Initial_Value; -------------------------------------------------------------------------------- procedure Insert_Lex_String (Name : in String; Token : out LexTokenManager.Lex_String) --# global in out LexTokenManager.State; --# derives LexTokenManager.State, --# Token from LexTokenManager.State, --# Name; is begin LexTokenManager.Insert_Examiner_String (Str => E_Strings.Copy_String (Str => Name), Lex_Str => Token); end Insert_Lex_String; -------------------------------------------------------------------------------- procedure Add_Unknown_Type --# global in out Dict; --# in out LexTokenManager.State; --# derives Dict, --# LexTokenManager.State from *, --# LexTokenManager.State; is Token : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref; begin Insert_Lex_String (Name => "unknown", Token => Token); RawDict.Create_Type (Name => Token, The_Declaration => RawDict.Null_Declaration_Info_Ref, Is_Private => False, Is_Announcement => False, Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, Type_Mark => Type_Mark); RawDict.Set_Type_Discriminant (Type_Mark => Type_Mark, Discriminant => Unknown_Type_Item); RawDict.Set_Type_Static (Type_Mark => Type_Mark, Static => True); Dict.Types.Unknown_Type_Mark := Type_Mark; end Add_Unknown_Type; -------------------------------------------------------------------------------- procedure Add_Predefined_Package_Standard --# global in out Dict; --# derives Dict from *; is The_Specification : RawDict.Declaration_Info_Ref; The_Package : RawDict.Package_Info_Ref; begin RawDict.Create_Declaration (Context => ProgramContext, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => NullSymbol), Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, The_Declaration => The_Specification); RawDict.Create_Package (Name => LexTokenManager.Standard_Token, The_Declaration => The_Specification, Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, The_Package => The_Package); Dict.Packages.Package_Standard := The_Package; end Add_Predefined_Package_Standard; -------------------------------------------------------------------------------- procedure Add_Package_Standard --# global in CommandLineData.Content; --# in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# LexTokenManager.State from *, --# CommandLineData.Content, --# LexTokenManager.State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dict, --# LexTokenManager.State; is procedure Add_Predefined_Type (Token : in LexTokenManager.Lex_String; Context : in Contexts; Type_Mark : out RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict, --# Type_Mark from Context, --# Dict, --# Token; is The_Declaration : RawDict.Declaration_Info_Ref; begin Add_Declaration (Comp_Unit => ContextManager.NullUnit, Loc => Null_Location, Scope => Predefined_Scope, Context => Context, The_Declaration => The_Declaration); RawDict.Create_Type (Name => Token, The_Declaration => The_Declaration, Is_Private => False, Is_Announcement => False, Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, Type_Mark => Type_Mark); end Add_Predefined_Type; -------------------------------------------------------------------------------- procedure Add_Universal_Types --# global in out Dict; --# derives Dict from *; is procedure Add_Universal_Integer_Type --# global in out Dict; --# derives Dict from *; is Type_Mark : RawDict.Type_Info_Ref; begin Add_Predefined_Type (Token => LexTokenManager.Universal_Integer_Token, Context => ProofContext, Type_Mark => Type_Mark); Add_Integer_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => LexTokenManager.Null_String, Upper => LexTokenManager.Null_String); Dict.Types.The_Universal_Types (Universal_Integer_Type) := Type_Mark; end Add_Universal_Integer_Type; -------------------------------------------------------------------------------- procedure Add_Universal_Real_Type --# global in out Dict; --# derives Dict from *; is Type_Mark : RawDict.Type_Info_Ref; begin Add_Predefined_Type (Token => LexTokenManager.Universal_Real_Token, Context => ProofContext, Type_Mark => Type_Mark); Add_Floating_Point_Type_Mark (Type_Mark => Type_Mark, Static => True); Dict.Types.The_Universal_Types (Universal_Real_Type) := Type_Mark; end Add_Universal_Real_Type; -------------------------------------------------------------------------------- procedure Add_Universal_Fixed_Type --# global in out Dict; --# derives Dict from *; is Type_Mark : RawDict.Type_Info_Ref; begin Add_Predefined_Type (Token => LexTokenManager.Universal_Fixed_Token, Context => ProofContext, Type_Mark => Type_Mark); Add_Fixed_Point_Type_Mark (Type_Mark => Type_Mark, Static => True); Dict.Types.The_Universal_Types (Universal_Fixed_Type) := Type_Mark; end Add_Universal_Fixed_Type; begin -- Add_Universal_Types Add_Universal_Integer_Type; Add_Universal_Real_Type; Add_Universal_Fixed_Type; end Add_Universal_Types; -------------------------------------------------------------------------------- procedure Add_Predefined_Type_Marks --# global in CommandLineData.Content; --# in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# LexTokenManager.State from *, --# CommandLineData.Content, --# LexTokenManager.State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dict, --# LexTokenManager.State; is procedure Set_Type_Is_Predefined (Type_Mark : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# Type_Mark; is begin RawDict.Set_Type_Derived (Type_Mark => Type_Mark, Is_Derived => False); end Set_Type_Is_Predefined; -------------------------------------------------------------------------------- procedure Add_Predefined_Character_Type --# global in CommandLineData.Content; --# in out Dict; --# in out LexTokenManager.State; --# derives Dict, --# LexTokenManager.State from *, --# CommandLineData.Content, --# LexTokenManager.State; is Type_Mark : RawDict.Type_Info_Ref; Store_Rep : LexTokenManager.Lex_String; begin Add_Predefined_Type (Token => LexTokenManager.Character_Token, Context => ProgramContext, Type_Mark => Type_Mark); Add_Enumeration_Type_Mark (Type_Mark => Type_Mark, Static => True); -- Set bounds for character type for use with 'first, 'last RawDict.Set_Type_Lower (Type_Mark => Type_Mark, Lower => LexTokenManager.Zero_Value); case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Maths.StorageRep (Maths.IntegerToValue (127), Store_Rep); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Store_Rep); when CommandLineData.SPARK95_Onwards => Maths.StorageRep (Maths.IntegerToValue (255), Store_Rep); RawDict.Set_Type_Upper (Type_Mark => Type_Mark, Upper => Store_Rep); end case; Dict.Types.The_Predefined_Types (Predefined_Character_Type) := Type_Mark; end Add_Predefined_Character_Type; -------------------------------------------------------------------------------- procedure Add_Predefined_Boolean_Type --# global in out Dict; --# in out LexTokenManager.State; --# derives Dict, --# LexTokenManager.State from *, --# LexTokenManager.State; is Type_Mark : RawDict.Type_Info_Ref; procedure Add_Predefined_Enumeration_Literal (Name : in String; Value : in LexTokenManager.Lex_String; The_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# in out LexTokenManager.State; --# derives Dict from *, --# LexTokenManager.State, --# Name, --# The_Type, --# Value & --# LexTokenManager.State from *, --# Name; is Token : LexTokenManager.Lex_String; The_Enumeration_Literal : RawDict.Enumeration_Literal_Info_Ref; pragma Unreferenced (The_Enumeration_Literal); begin Insert_Lex_String (Name => Name, Token => Token); --# accept Flow, 10, The_Enumeration_Literal, "Expected ineffective assignment to OK"; Add_Enumeration_Literal (Name => Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Position => Value, Type_Mark => The_Type, The_Enumeration_Literal => The_Enumeration_Literal); --# end accept; --# accept Flow, 33, The_Enumeration_Literal, "Expected to be neither referenced nor exported"; end Add_Predefined_Enumeration_Literal; begin -- Add_Predefined_Boolean_Type Add_Predefined_Type (Token => LexTokenManager.Boolean_Token, Context => ProgramContext, Type_Mark => Type_Mark); Add_Enumeration_Type_Mark (Type_Mark => Type_Mark, Static => True); Add_Predefined_Enumeration_Literal (Name => "False", Value => LexTokenManager.False_Token, The_Type => Type_Mark); Add_Predefined_Enumeration_Literal (Name => "True", Value => LexTokenManager.True_Token, The_Type => Type_Mark); Dict.Types.The_Predefined_Types (Predefined_Boolean_Type) := Type_Mark; Dict.Types.The_Predefined_Literals (Predefined_False) := RawDict.Get_Type_First_Enumeration_Literal (Type_Mark => Type_Mark); Dict.Types.The_Predefined_Literals (Predefined_True) := RawDict.Get_Type_Last_Enumeration_Literal (Type_Mark => Type_Mark); end Add_Predefined_Boolean_Type; -------------------------------------------------------------------------------- procedure Add_Predefined_Integer_Type --# global in CommandLineData.Content; --# in out Dict; --# in out LexTokenManager.State; --# derives Dict, --# LexTokenManager.State from *, --# CommandLineData.Content, --# LexTokenManager.State; is Token : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref; begin Add_Predefined_Type (Token => LexTokenManager.Integer_Token, Context => ProgramContext, Type_Mark => Type_Mark); Add_Integer_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => LexTokenManager.Null_String, Upper => LexTokenManager.Null_String); Set_Type_Is_Predefined (Type_Mark => Type_Mark); Dict.Types.The_Predefined_Types (Predefined_Integer_Type) := Type_Mark; if CommandLineData.Content.Target_Data then Insert_Lex_String (Name => "Long_Integer", Token => Token); Add_Predefined_Type (Token => Token, Context => ProgramContext, Type_Mark => Type_Mark); Add_Integer_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => LexTokenManager.Null_String, Upper => LexTokenManager.Null_String); Set_Type_Is_Predefined (Type_Mark => Type_Mark); Dict.Types.The_Predefined_Types (Predefined_Long_Integer_Type) := Type_Mark; else Dict.Types.The_Predefined_Types (Predefined_Long_Integer_Type) := RawDict.Null_Type_Info_Ref; end if; end Add_Predefined_Integer_Type; -------------------------------------------------------------------------------- procedure Add_Predefined_Float_Type --# global in CommandLineData.Content; --# in out Dict; --# in out LexTokenManager.State; --# derives Dict, --# LexTokenManager.State from *, --# CommandLineData.Content, --# LexTokenManager.State; is Token : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref; begin Add_Predefined_Type (Token => LexTokenManager.Float_Token, Context => ProgramContext, Type_Mark => Type_Mark); Add_Floating_Point_Type_Mark (Type_Mark => Type_Mark, Static => True); Set_Type_Is_Predefined (Type_Mark => Type_Mark); Dict.Types.The_Predefined_Types (Predefined_Float_Type) := Type_Mark; if CommandLineData.Content.Target_Data then Insert_Lex_String (Name => "Long_Float", Token => Token); Add_Predefined_Type (Token => Token, Context => ProgramContext, Type_Mark => Type_Mark); Add_Floating_Point_Type_Mark (Type_Mark => Type_Mark, Static => True); Set_Type_Is_Predefined (Type_Mark => Type_Mark); Dict.Types.The_Predefined_Types (Predefined_Long_Float_Type) := Type_Mark; else Dict.Types.The_Predefined_Types (Predefined_Long_Float_Type) := RawDict.Null_Type_Info_Ref; end if; end Add_Predefined_Float_Type; -------------------------------------------------------------------------------- procedure Add_Duration --# global in out Dict; --# derives Dict from *; is Type_Mark : RawDict.Type_Info_Ref; begin Add_Predefined_Type (Token => LexTokenManager.Duration_Token, Context => ProgramContext, Type_Mark => Type_Mark); Add_Fixed_Point_Type_Mark (Type_Mark => Type_Mark, Static => True); Dict.Types.The_Predefined_Types (Predefined_Duration_Type) := Type_Mark; end Add_Duration; -------------------------------------------------------------------------------- procedure Add_Predefined_String_Type --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from * & --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State; is Type_Mark : RawDict.Type_Info_Ref; The_Array_Index : RawDict.Array_Index_Info_Ref; pragma Unreferenced (The_Array_Index); begin Add_Predefined_Type (Token => LexTokenManager.String_Token, Context => ProgramContext, Type_Mark => Type_Mark); Add_Array_Type_Mark (Type_Mark => Type_Mark, Component_Type => Get_Predefined_Character_Type, Static => False); --# accept Flow, 10, The_Array_Index, "Expected ineffective assignment to OK"; Add_Array_Index (The_Array_Type => Type_Mark, Index_Type => Get_Predefined_Positive_Subtype, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Array_Index => The_Array_Index); --# end accept; Dict.Types.The_Predefined_Types (Predefined_String_Type) := Type_Mark; --# accept Flow, 33, The_Array_Index, "Expected to be neither referenced nor exported"; end Add_Predefined_String_Type; -------------------------------------------------------------------------------- procedure Add_Predefined_Natural_Subtype --# global in out Dict; --# derives Dict from *; is Type_Mark : RawDict.Type_Info_Ref; begin Add_Predefined_Type (Token => LexTokenManager.Natural_Token, Context => ProgramContext, Type_Mark => Type_Mark); RawDict.Set_Type_Parent (Type_Mark => Type_Mark, Parent => Get_Predefined_Integer_Type); Add_Integer_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => LexTokenManager.Zero_Value, Upper => LexTokenManager.Null_String); Dict.Types.The_Predefined_Types (Predefined_Natural_Subtype) := Type_Mark; end Add_Predefined_Natural_Subtype; -------------------------------------------------------------------------------- procedure Add_Predefined_Positive_Subtype --# global in out Dict; --# derives Dict from *; is Type_Mark : RawDict.Type_Info_Ref; begin Add_Predefined_Type (Token => LexTokenManager.Positive_Token, Context => ProgramContext, Type_Mark => Type_Mark); RawDict.Set_Type_Parent (Type_Mark => Type_Mark, Parent => Get_Predefined_Integer_Type); Add_Integer_Type_Mark (Type_Mark => Type_Mark, Static => True, Lower => LexTokenManager.One_Value, Upper => LexTokenManager.Null_String); Dict.Types.The_Predefined_Types (Predefined_Positive_Subtype) := Type_Mark; end Add_Predefined_Positive_Subtype; begin -- Add_Predefined_Type_Marks Add_Predefined_Boolean_Type; Add_Predefined_Integer_Type; Add_Predefined_Float_Type; Add_Predefined_Character_Type; Add_Predefined_Natural_Subtype; Add_Predefined_Positive_Subtype; Add_Predefined_String_Type; if not CommandLineData.Content.No_Duration then Add_Duration; end if; end Add_Predefined_Type_Marks; -------------------------------------------------------------------------------- procedure Add_Null_Own_Variable --# global in out Dict; --# in out LexTokenManager.State; --# derives Dict, --# LexTokenManager.State from *, --# LexTokenManager.State; is The_Variable : RawDict.Variable_Info_Ref; The_Own_Variable : RawDict.Own_Variable_Info_Ref; Null_Str : LexTokenManager.Lex_String; begin Insert_Lex_String (Name => "null", Token => Null_Str); RawDict.Create_Variable (Name => Null_Str, Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, The_Variable => The_Variable); RawDict.Create_Own_Variable (Variable => The_Variable, Owner => RawDict.Get_Package_Symbol (Get_Predefined_Package_Standard), Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, The_Own_Variable => The_Own_Variable); RawDict.Set_Variable_Own_Variable (The_Variable => The_Variable, Own_Variable => The_Own_Variable); RawDict.Set_Package_Own_Variables (The_Package => Get_Predefined_Package_Standard, Own_Variables => The_Own_Variable); RawDict.Set_Own_Variable_Mode (The_Own_Variable => The_Own_Variable, Mode => OutMode); Dict.Null_Variable := The_Variable; end Add_Null_Own_Variable; begin -- Add_Package_Standard Add_Predefined_Package_Standard; Add_Universal_Types; Add_Predefined_Type_Marks; -- package standard has a mode out own variable called "Null" which is used -- as a "data sink" for operations that affect only things outside the SPARK -- boundary such as delay statements or calls to procedures which derive null -- from something. Add_Null_Own_Variable; end Add_Package_Standard; -------------------------------------------------------------------------------- procedure Add_Predefined_Constant (Name : in String; Asc_Code : in Natural; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# in out LexTokenManager.State; --# derives Dict from *, --# Asc_Code, --# LexTokenManager.State, --# Name, --# The_Package & --# LexTokenManager.State from *, --# Asc_Code, --# Name; is Token, Store_Rep : LexTokenManager.Lex_String; The_Constant : RawDict.Constant_Info_Ref; The_Declaration : RawDict.Declaration_Info_Ref; begin Add_Declaration (Comp_Unit => ContextManager.NullUnit, Loc => Null_Location, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package)), Context => ProgramContext, The_Declaration => The_Declaration); Insert_Lex_String (Name => Name, Token => Token); RawDict.Create_Constant (Name => Token, Type_Mark => Get_Predefined_Character_Type, Static => True, The_Declaration => The_Declaration, Is_Deferred => False, Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, The_Constant => The_Constant); -- Add values for predefined constants Maths.StorageRep (Maths.IntegerToValue (Asc_Code), Store_Rep); RawDict.Set_Constant_Value (The_Constant => The_Constant, Value => Store_Rep); end Add_Predefined_Constant; ------------------------------------- -- Adds Character constants which are common to both -- SPARK83 and SPARK95 _AND_ have the same identifier - this -- is a subset of the first 127 Characters. procedure Add_Common_Characters (To_The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# in out LexTokenManager.State; --# derives Dict from *, --# LexTokenManager.State, --# To_The_Package & --# LexTokenManager.State from *; is begin Add_Predefined_Constant (Name => "NUL", Asc_Code => 0, The_Package => To_The_Package); Add_Predefined_Constant (Name => "SOH", Asc_Code => 1, The_Package => To_The_Package); Add_Predefined_Constant (Name => "STX", Asc_Code => 2, The_Package => To_The_Package); Add_Predefined_Constant (Name => "ETX", Asc_Code => 3, The_Package => To_The_Package); Add_Predefined_Constant (Name => "EOT", Asc_Code => 4, The_Package => To_The_Package); Add_Predefined_Constant (Name => "ENQ", Asc_Code => 5, The_Package => To_The_Package); Add_Predefined_Constant (Name => "ACK", Asc_Code => 6, The_Package => To_The_Package); Add_Predefined_Constant (Name => "BEL", Asc_Code => 7, The_Package => To_The_Package); Add_Predefined_Constant (Name => "BS", Asc_Code => 8, The_Package => To_The_Package); Add_Predefined_Constant (Name => "HT", Asc_Code => 9, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LF", Asc_Code => 10, The_Package => To_The_Package); Add_Predefined_Constant (Name => "VT", Asc_Code => 11, The_Package => To_The_Package); Add_Predefined_Constant (Name => "FF", Asc_Code => 12, The_Package => To_The_Package); Add_Predefined_Constant (Name => "CR", Asc_Code => 13, The_Package => To_The_Package); Add_Predefined_Constant (Name => "SO", Asc_Code => 14, The_Package => To_The_Package); Add_Predefined_Constant (Name => "SI", Asc_Code => 15, The_Package => To_The_Package); Add_Predefined_Constant (Name => "DLE", Asc_Code => 16, The_Package => To_The_Package); Add_Predefined_Constant (Name => "DC1", Asc_Code => 17, The_Package => To_The_Package); Add_Predefined_Constant (Name => "DC2", Asc_Code => 18, The_Package => To_The_Package); Add_Predefined_Constant (Name => "DC3", Asc_Code => 19, The_Package => To_The_Package); Add_Predefined_Constant (Name => "DC4", Asc_Code => 20, The_Package => To_The_Package); Add_Predefined_Constant (Name => "NAK", Asc_Code => 21, The_Package => To_The_Package); Add_Predefined_Constant (Name => "SYN", Asc_Code => 22, The_Package => To_The_Package); Add_Predefined_Constant (Name => "ETB", Asc_Code => 23, The_Package => To_The_Package); Add_Predefined_Constant (Name => "CAN", Asc_Code => 24, The_Package => To_The_Package); Add_Predefined_Constant (Name => "EM", Asc_Code => 25, The_Package => To_The_Package); Add_Predefined_Constant (Name => "SUB", Asc_Code => 26, The_Package => To_The_Package); Add_Predefined_Constant (Name => "ESC", Asc_Code => 27, The_Package => To_The_Package); Add_Predefined_Constant (Name => "FS", Asc_Code => 28, The_Package => To_The_Package); Add_Predefined_Constant (Name => "GS", Asc_Code => 29, The_Package => To_The_Package); Add_Predefined_Constant (Name => "RS", Asc_Code => 30, The_Package => To_The_Package); Add_Predefined_Constant (Name => "US", Asc_Code => 31, The_Package => To_The_Package); --# assert True; Add_Predefined_Constant (Name => "LC_A", Asc_Code => 97, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_B", Asc_Code => 98, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_C", Asc_Code => 99, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_D", Asc_Code => 100, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_E", Asc_Code => 101, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_F", Asc_Code => 102, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_G", Asc_Code => 103, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_H", Asc_Code => 104, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_I", Asc_Code => 105, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_J", Asc_Code => 106, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_K", Asc_Code => 107, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_L", Asc_Code => 108, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_M", Asc_Code => 109, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_N", Asc_Code => 110, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_O", Asc_Code => 111, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_P", Asc_Code => 112, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_Q", Asc_Code => 113, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_R", Asc_Code => 114, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_S", Asc_Code => 115, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_T", Asc_Code => 116, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_U", Asc_Code => 117, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_V", Asc_Code => 118, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_W", Asc_Code => 119, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_X", Asc_Code => 120, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_Y", Asc_Code => 121, The_Package => To_The_Package); Add_Predefined_Constant (Name => "LC_Z", Asc_Code => 122, The_Package => To_The_Package); Add_Predefined_Constant (Name => "DEL", Asc_Code => 127, The_Package => To_The_Package); end Add_Common_Characters; -------------------------------------------------------------------------------- procedure Add_Package_ASCII --# global in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# LexTokenManager.State from *, --# LexTokenManager.State & --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State; is Token : LexTokenManager.Lex_String; Package_ASCII : RawDict.Package_Info_Ref; begin Insert_Lex_String (Name => "ASCII", Token => Token); Add_New_Package (Name => Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => Predefined_Scope, The_Parent => RawDict.Null_Package_Info_Ref, Child_Sort => Public, The_Package => Package_ASCII); Dict.Packages.Package_ASCII := Package_ASCII; Add_Common_Characters (To_The_Package => Package_ASCII); -- Ada83 Characters whose identiifers differ from those used in Ada95. Add_Predefined_Constant (Name => "Exclam", Asc_Code => 33, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Quotation", Asc_Code => 34, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Sharp", Asc_Code => 35, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Dollar", Asc_Code => 36, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Percent", Asc_Code => 37, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Ampersand", Asc_Code => 38, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Colon", Asc_Code => 58, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Semicolon", Asc_Code => 59, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Query", Asc_Code => 63, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "At_Sign", Asc_Code => 64, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "L_Bracket", Asc_Code => 91, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Back_Slash", Asc_Code => 92, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "R_Bracket", Asc_Code => 93, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Circumflex", Asc_Code => 94, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Underline", Asc_Code => 95, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Grave", Asc_Code => 96, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "L_Brace", Asc_Code => 123, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Bar", Asc_Code => 124, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "R_Brace", Asc_Code => 125, The_Package => Package_ASCII); Add_Predefined_Constant (Name => "Tilde", Asc_Code => 126, The_Package => Package_ASCII); end Add_Package_ASCII; -------------------------------------------------------------------------------- procedure Add_Package_Ada --# global in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# LexTokenManager.State from *, --# LexTokenManager.State & --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State; is Token : LexTokenManager.Lex_String; Package_Ada, Package_Characters, Package_Latin1 : RawDict.Package_Info_Ref; procedure Add_Lower_Half_Control_And_Graphic_Characters (Package_Latin1 : in RawDict.Package_Info_Ref) --# global in out Dict; --# in out LexTokenManager.State; --# derives Dict from *, --# LexTokenManager.State, --# Package_Latin1 & --# LexTokenManager.State from *; is begin --ISO 6429 control characters Add_Predefined_Constant (Name => "IS4", Asc_Code => 28, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "IS3", Asc_Code => 29, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "IS2", Asc_Code => 30, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "IS1", Asc_Code => 31, The_Package => Package_Latin1); --ISO 646 graphic characters Add_Predefined_Constant (Name => "Space", Asc_Code => 32, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Exclamation", Asc_Code => 33, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Quotation", Asc_Code => 34, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Number_Sign", Asc_Code => 35, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Dollar_Sign", Asc_Code => 36, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Percent_Sign", Asc_Code => 37, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Ampersand", Asc_Code => 38, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Apostrophe", Asc_Code => 39, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Left_Parenthesis", Asc_Code => 40, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Right_Parenthesis", Asc_Code => 41, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Asterisk", Asc_Code => 42, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Plus_Sign", Asc_Code => 43, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Comma", Asc_Code => 44, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Hyphen", Asc_Code => 45, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Minus_Sign", Asc_Code => 45, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Full_Stop", Asc_Code => 46, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Solidus", Asc_Code => 47, The_Package => Package_Latin1); --Decimal digits '0' through '9 are at positions 48 through 57 Add_Predefined_Constant (Name => "Colon", Asc_Code => 58, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Semicolon", Asc_Code => 59, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Less_Than_Sign", Asc_Code => 60, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Equals_Sign", Asc_Code => 61, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Greater_Than_Sign", Asc_Code => 62, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Question", Asc_Code => 63, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Commercial_At", Asc_Code => 64, The_Package => Package_Latin1); --Letters 'A' through 'Z' are at positions 65 through 90 Add_Predefined_Constant (Name => "Left_Square_Bracket", Asc_Code => 91, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Reverse_Solidus", Asc_Code => 92, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Right_Square_Bracket", Asc_Code => 93, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Circumflex", Asc_Code => 94, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Low_Line", Asc_Code => 95, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Grave", Asc_Code => 96, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Left_Curly_Bracket", Asc_Code => 123, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Vertical_Line", Asc_Code => 124, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Right_Curly_Bracket", Asc_Code => 125, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Tilde", Asc_Code => 126, The_Package => Package_Latin1); end Add_Lower_Half_Control_And_Graphic_Characters; -------------------------------------------------------------------------------- procedure Add_Upper_Half_Ada95_Characters (Package_Latin1 : in RawDict.Package_Info_Ref) --# global in out Dict; --# in out LexTokenManager.State; --# derives Dict from *, --# LexTokenManager.State, --# Package_Latin1 & --# LexTokenManager.State from *; is begin Add_Predefined_Constant (Name => "Reserved_128", Asc_Code => 128, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Reserved_129", Asc_Code => 129, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "BPH", Asc_Code => 130, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "NBH", Asc_Code => 131, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Reserved_132", Asc_Code => 132, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "NEL", Asc_Code => 133, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "SSA", Asc_Code => 134, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "ESA", Asc_Code => 135, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "HTS", Asc_Code => 136, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "HTJ", Asc_Code => 137, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "VTS", Asc_Code => 138, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "PLD", Asc_Code => 139, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "PLU", Asc_Code => 140, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "RI", Asc_Code => 141, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "SS2", Asc_Code => 142, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "SS3", Asc_Code => 143, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "DCS", Asc_Code => 144, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "PU1", Asc_Code => 145, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "PU2", Asc_Code => 146, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "STS", Asc_Code => 147, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "CCH", Asc_Code => 148, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "MW", Asc_Code => 149, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "SPA", Asc_Code => 150, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "EPA", Asc_Code => 151, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "SOS", Asc_Code => 152, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Reserved_153", Asc_Code => 153, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "SCI", Asc_Code => 154, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "CSI", Asc_Code => 155, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "ST", Asc_Code => 156, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "OSC", Asc_Code => 157, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "PM", Asc_Code => 158, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "APC", Asc_Code => 159, The_Package => Package_Latin1); --# assert True; --other graphic characters --character positions 160 .. 175 Add_Predefined_Constant (Name => "No_Break_Space", Asc_Code => 160, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "NBSP", Asc_Code => 160, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Inverted_Exclamation", Asc_Code => 161, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Cent_Sign", Asc_Code => 162, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Pound_Sign", Asc_Code => 163, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Currency_Sign", Asc_Code => 164, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Yen_Sign", Asc_Code => 165, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Broken_Bar", Asc_Code => 166, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Section_Sign", Asc_Code => 167, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Diaeresis", Asc_Code => 168, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Copyright_Sign", Asc_Code => 169, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Feminine_Ordinal_Indicator", Asc_Code => 170, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Left_Angle_Quotation", Asc_Code => 171, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Not_Sign", Asc_Code => 172, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Soft_Hyphen", Asc_Code => 173, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Registered_Trade_Mark_Sign", Asc_Code => 174, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Macron", Asc_Code => 175, The_Package => Package_Latin1); --# assert True; --character positions 176 .. 191 Add_Predefined_Constant (Name => "Degree_Sign", Asc_Code => 176, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Ring_Above", Asc_Code => 177, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Plus_Minus_Sign", Asc_Code => 177, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Superscript_Two", Asc_Code => 178, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Superscript_Three", Asc_Code => 179, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Acute", Asc_Code => 180, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Micro_Sign", Asc_Code => 181, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Pilcrow_Sign", Asc_Code => 182, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Paragraph_Sign", Asc_Code => 182, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Middle_Dot", Asc_Code => 183, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Cedilla", Asc_Code => 184, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Superscript_One", Asc_Code => 185, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Masculine_Ordinal_Indicator", Asc_Code => 186, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Right_Angle_Quotation", Asc_Code => 187, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Fraction_One_Quarter", Asc_Code => 188, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Fraction_One_Half", Asc_Code => 189, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Fraction_Three_Quarters", Asc_Code => 190, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Inverted_Question", Asc_Code => 191, The_Package => Package_Latin1); --# assert True; --character positions 192 .. 207 Add_Predefined_Constant (Name => "UC_A_Grave", Asc_Code => 192, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_A_Acute", Asc_Code => 193, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_A_Circumflex", Asc_Code => 194, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_A_Tilde", Asc_Code => 195, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_A_Diaeresis", Asc_Code => 196, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_A_Ring", Asc_Code => 197, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_AE_Diphthong", Asc_Code => 198, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_C_Cedilla", Asc_Code => 199, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_E_Grave", Asc_Code => 200, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_E_Acute", Asc_Code => 201, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_E_Circumflex", Asc_Code => 202, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_E_Diaeresis", Asc_Code => 203, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_I_Grave", Asc_Code => 204, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_I_Acute", Asc_Code => 205, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_I_Circumflex", Asc_Code => 206, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_I_Diaeresis", Asc_Code => 207, The_Package => Package_Latin1); --# assert True; --character positions 208 .. 223 Add_Predefined_Constant (Name => "UC_Icelandic_Eth", Asc_Code => 208, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_N_Tilde", Asc_Code => 209, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_O_Grave", Asc_Code => 210, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_O_Acute", Asc_Code => 211, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_O_Circumflex", Asc_Code => 212, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_O_Tilde", Asc_Code => 213, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_O_Diaeresis", Asc_Code => 214, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Multiplication_Sign", Asc_Code => 215, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_O_Oblique_Stroke", Asc_Code => 216, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_U_Grave", Asc_Code => 217, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_U_Acute", Asc_Code => 218, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_U_Circumflex", Asc_Code => 219, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_U_Diaeresis", Asc_Code => 220, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_Y_Acute", Asc_Code => 221, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "UC_Icelandic_Thorn", Asc_Code => 222, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_German_Sharp_S", Asc_Code => 223, The_Package => Package_Latin1); --# assert True; --character positions 224 .. 239 Add_Predefined_Constant (Name => "LC_A_Grave", Asc_Code => 224, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_A_Acute", Asc_Code => 225, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_A_Circumflex", Asc_Code => 226, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_A_Tilde", Asc_Code => 227, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_A_Diaeresis", Asc_Code => 228, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_A_Ring", Asc_Code => 229, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_AE_Diphthong", Asc_Code => 230, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_C_Cedilla", Asc_Code => 231, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_E_Grave", Asc_Code => 232, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_E_Acute", Asc_Code => 233, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_E_Circumflex", Asc_Code => 234, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_E_Diaeresis", Asc_Code => 235, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_I_Grave", Asc_Code => 236, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_I_Acute", Asc_Code => 237, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_I_Circumflex", Asc_Code => 238, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_I_Diaeresis", Asc_Code => 239, The_Package => Package_Latin1); --# assert True; --character positions 240 .. 255 Add_Predefined_Constant (Name => "LC_Icelandic_Eth", Asc_Code => 240, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_N_Tilde", Asc_Code => 241, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_O_Grave", Asc_Code => 242, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_O_Acute", Asc_Code => 243, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_O_Circumflex", Asc_Code => 244, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_O_Tilde", Asc_Code => 245, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_O_Diaeresis", Asc_Code => 246, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "Division_Sign", Asc_Code => 247, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_O_Oblique_Stroke", Asc_Code => 248, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_U_Grave", Asc_Code => 249, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_U_Acute", Asc_Code => 250, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_U_Circumflex", Asc_Code => 251, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_U_Diaeresis", Asc_Code => 252, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_Y_Acute", Asc_Code => 253, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_Icelandic_Thorn", Asc_Code => 254, The_Package => Package_Latin1); Add_Predefined_Constant (Name => "LC_Y_Diaeresis", Asc_Code => 255, The_Package => Package_Latin1); end Add_Upper_Half_Ada95_Characters; begin -- Add_Package_Ada Insert_Lex_String (Name => "Ada", Token => Token); Add_New_Package (Name => Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => GlobalScope, The_Parent => RawDict.Null_Package_Info_Ref, Child_Sort => Public, The_Package => Package_Ada); Dict.Packages.Package_Ada := Package_Ada; Insert_Lex_String (Name => "Characters", Token => Token); Add_New_Package (Name => Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => GlobalScope, The_Parent => Package_Ada, Child_Sort => Public, The_Package => Package_Characters); Dict.Packages.Package_Ada_Characters := Package_Characters; Insert_Lex_String (Name => "Latin_1", Token => Token); Add_New_Package (Name => Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => GlobalScope, The_Parent => Package_Characters, Child_Sort => Public, The_Package => Package_Latin1); Dict.Packages.Package_Ada_Characters_Latin1 := Package_Latin1; --control characters Add_Common_Characters (To_The_Package => Package_Latin1); Add_Lower_Half_Control_And_Graphic_Characters (Package_Latin1 => Package_Latin1); Add_Upper_Half_Ada95_Characters (Package_Latin1 => Package_Latin1); end Add_Package_Ada; -------------------------------------------------------------------------------- procedure Add_Package_Real_Time --# global in CommandLineData.Content; --# in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dict, --# LexTokenManager.State & --# LexTokenManager.State from *, --# CommandLineData.Content; is Token, SC_Token, TS_Token : LexTokenManager.Lex_String; T_Param, SC_Param, TS_Param : Symbol; Time_Type, Time_Span_Type, Seconds_Count : RawDict.Type_Info_Ref; Clock_Time : RawDict.Variable_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref; Package_Real_Time : RawDict.Package_Info_Ref; The_Constant : RawDict.Constant_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; Real_Time_Scope : Scopes; Arg : Iterator; -- Adds a function with pattern -- function XXX (YYY : in Integer) return Time_Span -- where XXX and YYY are the two formal parameters given procedure Add_Time_Span_Constructor_Function (Function_Name : in String; Parameter_Name : in String) --# global in Real_Time_Scope; --# in Time_Span_Type; --# in out LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Function_Name, --# Parameter_Name, --# Real_Time_Scope, --# Time_Span_Type, --# LexTokenManager.State & --# LexTokenManager.State from *, --# Function_Name, --# Parameter_Name & --# SPARK_IO.File_Sys from *, --# Dict, --# Function_Name, --# Parameter_Name, --# Real_Time_Scope, --# Time_Span_Type, --# LexTokenManager.State; is The_Function : RawDict.Subprogram_Info_Ref; The_Parameter_Token : LexTokenManager.Lex_String; The_Function_Token : LexTokenManager.Lex_String; begin Insert_Lex_String (Name => Function_Name, Token => The_Function_Token); Add_Subprogram (Name => The_Function_Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => Real_Time_Scope, Context => ProgramContext, The_Subprogram => The_Function); Insert_Lex_String (Name => Parameter_Name, Token => The_Parameter_Token); Add_Subprogram_Parameter (Name => The_Parameter_Token, The_Subprogram => The_Function, Type_Mark => Get_Predefined_Integer_Type, Type_Reference => Null_Location, Mode => InMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); Add_Return_Type (The_Function => The_Function, Type_Mark => Time_Span_Type, Comp_Unit => ContextManager.NullUnit, Type_Reference => Null_Location); end Add_Time_Span_Constructor_Function; begin -- Add_Package_Real_Time -- Create the package as a child of package Ada Add_New_Package (Name => LexTokenManager.Real_Time_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => GlobalScope, The_Parent => Get_Predefined_Package_Ada, Child_Sort => Public, The_Package => Package_Real_Time); Dict.Packages.Package_Real_Time := Package_Real_Time; Real_Time_Scope := Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Package_Real_Time)); -- Add type definitions for private types Time and Time_Span Insert_Lex_String (Name => "Time", Token => Token); Add_Private_Type_Local (Name => Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, Is_Limited => False, Is_Tagged_Type => False, Extends => RawDict.Null_Type_Info_Ref, Type_Mark => Time_Type); Dict.Types.The_Predefined_Types (Predefined_Time_Type) := Time_Type; Insert_Lex_String (Name => "Time_Span", Token => Token); Add_Private_Type_Local (Name => Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, Is_Limited => False, Is_Tagged_Type => False, Extends => RawDict.Null_Type_Info_Ref, Type_Mark => Time_Span_Type); Dict.Types.The_Predefined_Types (Predefined_Time_Span_Type) := Time_Span_Type; -- Create annotation "own protected in ClockTime : Time" for external time returned by Clock. -- -- Note that the type-announcement is needed here so that the signature -- of the proof function associated with function Clock is well-formed -- in FDL. Insert_Lex_String (Name => "ClockTime", Token => Token); Add_Own_Variable_Local (Name => Token, The_Package => Package_Real_Time, Mode => InMode, Is_Protected => True, Is_Interrupt_Stream => False, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Variable => Clock_Time); Add_Own_Variable_Type (Own_Variable => Clock_Time, Type_Mark => Time_Type, Type_Reference => Null_Location); -- Add the seven deferred constants. Note that Time_Unit is not supported. Insert_Lex_String (Name => "Time_First", Token => Token); --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Add_Deferred_Constant_Local (Name => Token, Type_Mark => Time_Type, Type_Reference => Null_Location, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, The_Constant => The_Constant); --# end accept; Insert_Lex_String (Name => "Time_Last", Token => Token); --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Add_Deferred_Constant_Local (Name => Token, Type_Mark => Time_Type, Type_Reference => Null_Location, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, The_Constant => The_Constant); --# end accept; Insert_Lex_String (Name => "Time_Span_First", Token => Token); --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Add_Deferred_Constant_Local (Name => Token, Type_Mark => Time_Span_Type, Type_Reference => Null_Location, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, The_Constant => The_Constant); --# end accept; Insert_Lex_String (Name => "Time_Span_Last", Token => Token); --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Add_Deferred_Constant_Local (Name => Token, Type_Mark => Time_Span_Type, Type_Reference => Null_Location, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, The_Constant => The_Constant); --# end accept; Insert_Lex_String (Name => "Time_Span_Zero", Token => Token); --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Add_Deferred_Constant_Local (Name => Token, Type_Mark => Time_Span_Type, Type_Reference => Null_Location, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, The_Constant => The_Constant); --# end accept; Insert_Lex_String (Name => "Time_Span_Unit", Token => Token); --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Add_Deferred_Constant_Local (Name => Token, Type_Mark => Time_Span_Type, Type_Reference => Null_Location, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, The_Constant => The_Constant); --# end accept; Insert_Lex_String (Name => "Tick", Token => Token); --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Add_Deferred_Constant_Local (Name => Token, Type_Mark => Time_Span_Type, Type_Reference => Null_Location, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Real_Time, The_Constant => The_Constant); --# end accept; -- Add function Clock return Time. Insert_Lex_String (Name => "Clock", Token => Token); Add_Subprogram (Name => Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => Real_Time_Scope, Context => ProgramContext, The_Subprogram => The_Subprogram); Add_Return_Type (The_Function => The_Subprogram, Type_Mark => Time_Type, Comp_Unit => ContextManager.NullUnit, Type_Reference => Null_Location); Dict.Subprograms.Ada_Real_Time_Clock := The_Subprogram; -- Clock requires annotation "global in ClockTime" --# accept Flow, 10, The_Global_Variable, "Expected ineffective assignment to OK"; Add_Subprogram_Global_Variable (Abstraction => IsAbstract, The_Subprogram => The_Subprogram, The_Variable => Clock_Time, Mode => InMode, Prefix_Needed => False, Comp_Unit => ContextManager.NullUnit, Variable_Reference => Null_Location, The_Global_Variable => The_Global_Variable); --# end accept; -- The arithmetic and relational operators are not in the dictionary. -- Functions To_Duration and To_Time_Span are not supported. -- -- Add functions Nanoseconds / Microseconds / Milliseconds. These take a -- parameter of type Integer and return Time_Span. Add_Time_Span_Constructor_Function (Function_Name => "Nanoseconds", Parameter_Name => "NS"); Add_Time_Span_Constructor_Function (Function_Name => "Microseconds", Parameter_Name => "US"); Add_Time_Span_Constructor_Function (Function_Name => "Milliseconds", Parameter_Name => "MS"); -- Ada2005 additionally defines functions "Seconds" and "Minutes", so if CommandLineData.Content.Language_Profile in CommandLineData.SPARK2005_Profiles then Add_Time_Span_Constructor_Function (Function_Name => "Seconds", Parameter_Name => "S"); Add_Time_Span_Constructor_Function (Function_Name => "Minutes", Parameter_Name => "M"); end if; -- Add type Seconds_Count. This is an integer type with implementation- -- defined range. The range is left blank at this stage, and may be -- overridden by values supplied in the configuration file. Add_Integer_Type_Local (Name => LexTokenManager.Seconds_Count_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Lower => LexTokenManager.Null_String, Upper => LexTokenManager.Null_String, Scope => Real_Time_Scope, Context => ProgramContext, Type_Mark => Seconds_Count); Insert_Lex_String (Name => "Split", Token => Token); Add_Subprogram (Name => Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => Real_Time_Scope, Context => ProgramContext, The_Subprogram => The_Subprogram); Insert_Lex_String (Name => "T", Token => Token); Add_Subprogram_Parameter (Name => Token, The_Subprogram => The_Subprogram, Type_Mark => Time_Type, Type_Reference => Null_Location, Mode => InMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); Insert_Lex_String (Name => "SC", Token => SC_Token); -- Remember the token for further use of "SC" Add_Subprogram_Parameter (Name => SC_Token, The_Subprogram => The_Subprogram, Type_Mark => Seconds_Count, Type_Reference => Null_Location, Mode => OutMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); Insert_Lex_String (Name => "TS", Token => TS_Token); -- Remember the token for further use of "TS" Add_Subprogram_Parameter (Name => TS_Token, The_Subprogram => The_Subprogram, Type_Mark => Time_Span_Type, Type_Reference => Null_Location, Mode => OutMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); -- Split requires annotation "derives SC, TS from T", so we need the -- symbols for the three parameters. Arg := First_Ada_Subprogram_Parameter (The_Subprogram => The_Subprogram); T_Param := CurrentSymbol (Arg); Arg := NextSymbol (Arg); SC_Param := CurrentSymbol (Arg); Arg := NextSymbol (Arg); TS_Param := CurrentSymbol (Arg); Add_Subprogram_Export_Parameter (The_Subprogram => The_Subprogram, Abstraction => IsAbstract, The_Export => RawDict.Get_Subprogram_Parameter_Info_Ref (SC_Param), Export_Reference => Null_Location, Annotation => Null_Location); Add_Subprogram_Export_Parameter (The_Subprogram => The_Subprogram, Abstraction => IsAbstract, The_Export => RawDict.Get_Subprogram_Parameter_Info_Ref (TS_Param), Export_Reference => Null_Location, Annotation => Null_Location); Add_Subprogram_Dependency (Abstraction => IsAbstract, Comp_Unit => ContextManager.NullUnit, The_Subprogram => The_Subprogram, The_Export => SC_Param, The_Import => T_Param, Import_Reference => Null_Location); Add_Subprogram_Dependency (Abstraction => IsAbstract, Comp_Unit => ContextManager.NullUnit, The_Subprogram => The_Subprogram, The_Export => TS_Param, The_Import => T_Param, Import_Reference => Null_Location); Insert_Lex_String (Name => "Time_Of", Token => Token); -- Mark the subprogram as having a derives annotation so that calls to -- it from other subprograms with derives annotations are legal with flow=auto RawDict.Set_Subprogram_Has_Derives_Annotation (The_Subprogram => The_Subprogram); Add_Subprogram (Name => Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => Real_Time_Scope, Context => ProgramContext, The_Subprogram => The_Subprogram); Add_Subprogram_Parameter (Name => SC_Token, The_Subprogram => The_Subprogram, Type_Mark => Seconds_Count, Type_Reference => Null_Location, Mode => InMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); Add_Subprogram_Parameter (Name => TS_Token, The_Subprogram => The_Subprogram, Type_Mark => Time_Span_Type, Type_Reference => Null_Location, Mode => InMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); Add_Return_Type (The_Function => The_Subprogram, Type_Mark => Time_Type, Comp_Unit => ContextManager.NullUnit, Type_Reference => Null_Location); --# accept Flow, 33, The_Constant, "Expected to be neither referenced nor exported" & --# Flow, 33, The_Global_Variable, "Expected to be neither referenced nor exported"; end Add_Package_Real_Time; -------------------------------------------------------------------------------- procedure Add_Package_Synchronous_Task_Control --# global in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State & --# LexTokenManager.State from *; is Token, S_Token : LexTokenManager.Lex_String; S_Param : Symbol; The_Subprogram : RawDict.Subprogram_Info_Ref; Package_Synchronous_Task_Control : RawDict.Package_Info_Ref; Suspension_Object_Type : RawDict.Type_Info_Ref; Synchronous_Task_Control_Scope : Scopes; Arg : Iterator; begin -- Create the package as a child of package Ada Add_New_Package (Name => LexTokenManager.Synchronous_Task_Control_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => GlobalScope, The_Parent => Get_Predefined_Package_Ada, Child_Sort => Public, The_Package => Package_Synchronous_Task_Control); Dict.Packages.Package_Synchronous_Task_Control := Package_Synchronous_Task_Control; Synchronous_Task_Control_Scope := Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Package_Synchronous_Task_Control)); -- Add type definitions for limited private type Suspension_Object Insert_Lex_String (Name => "Suspension_Object", Token => Token); Add_Private_Type_Local (Name => Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Package => Package_Synchronous_Task_Control, Is_Limited => True, Is_Tagged_Type => False, Extends => RawDict.Null_Type_Info_Ref, Type_Mark => Suspension_Object_Type); Dict.Types.The_Predefined_Types (Predefined_Suspension_Object_Type) := Suspension_Object_Type; -- Mark the type as Atomic RawDict.Set_Type_Atomic (Type_Mark => Suspension_Object_Type); Insert_Lex_String (Name => "Set_True", Token => Token); Add_Subprogram (Name => Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => Synchronous_Task_Control_Scope, Context => ProgramContext, The_Subprogram => The_Subprogram); Insert_Lex_String (Name => "S", Token => S_Token); -- used in all subroutine definitions for this package. Add_Subprogram_Parameter (Name => S_Token, The_Subprogram => The_Subprogram, Type_Mark => Suspension_Object_Type, Type_Reference => Null_Location, Mode => InOutMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); -- The annotation Arg := First_Ada_Subprogram_Parameter (The_Subprogram => The_Subprogram); S_Param := CurrentSymbol (Arg); Add_Subprogram_Export_Parameter (The_Subprogram => The_Subprogram, Abstraction => IsAbstract, The_Export => RawDict.Get_Subprogram_Parameter_Info_Ref (S_Param), Export_Reference => Null_Location, Annotation => Null_Location); -- Mark the subprogram as having a derives annotation so that calls to -- it from other subprograms with derives annotations are legal with flow=auto RawDict.Set_Subprogram_Has_Derives_Annotation (The_Subprogram => The_Subprogram); Insert_Lex_String (Name => "Set_False", Token => Token); Add_Subprogram (Name => Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => Synchronous_Task_Control_Scope, Context => ProgramContext, The_Subprogram => The_Subprogram); Add_Subprogram_Parameter (Name => S_Token, The_Subprogram => The_Subprogram, Type_Mark => Suspension_Object_Type, Type_Reference => Null_Location, Mode => InOutMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); -- The annotation Arg := First_Ada_Subprogram_Parameter (The_Subprogram => The_Subprogram); S_Param := CurrentSymbol (Arg); Add_Subprogram_Export_Parameter (The_Subprogram => The_Subprogram, Abstraction => IsAbstract, The_Export => RawDict.Get_Subprogram_Parameter_Info_Ref (S_Param), Export_Reference => Null_Location, Annotation => Null_Location); -- Mark the subprogram as having a derives annotation so that calls to -- it from other subprograms with derives annotations are legal with flow=auto RawDict.Set_Subprogram_Has_Derives_Annotation (The_Subprogram => The_Subprogram); Insert_Lex_String (Name => "Suspend_Until_True", Token => Token); Add_Subprogram (Name => Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => Synchronous_Task_Control_Scope, Context => ProgramContext, The_Subprogram => The_Subprogram); -- Record the symbol for later retrieval by wellformedness checker. Dict.Subprograms.STC_Suspend_Until_True := The_Subprogram; Add_Subprogram_Parameter (Name => S_Token, The_Subprogram => The_Subprogram, Type_Mark => Suspension_Object_Type, Type_Reference => Null_Location, Mode => InOutMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); -- The annotations Arg := First_Ada_Subprogram_Parameter (The_Subprogram => The_Subprogram); S_Param := CurrentSymbol (Arg); Add_Subprogram_Export_Parameter (The_Subprogram => The_Subprogram, Abstraction => IsAbstract, The_Export => RawDict.Get_Subprogram_Parameter_Info_Ref (S_Param), Export_Reference => Null_Location, Annotation => Null_Location); -- Mark the subprogram as having a derives annotation so that calls to -- it from other subprograms with derives annotations are legal with flow=auto RawDict.Set_Subprogram_Has_Derives_Annotation (The_Subprogram => The_Subprogram); end Add_Package_Synchronous_Task_Control; -------------------------------------------------------------------------------- procedure Add_Package_Interrupts --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State; is Type_Mark : RawDict.Type_Info_Ref; Package_Interrupts : RawDict.Package_Info_Ref; Interrupts_Scope : Scopes; pragma Unreferenced (Type_Mark); begin -- Create the package as a child of package Ada Add_New_Package (Name => LexTokenManager.Interrupts_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => GlobalScope, The_Parent => Get_Predefined_Package_Ada, Child_Sort => Public, The_Package => Package_Interrupts); Dict.Packages.Package_Interrupts := Package_Interrupts; Interrupts_Scope := Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Package_Interrupts)); -- Add type Interrupt_ID. This is an integer type with implementation- -- defined range. The range is left blank at this stage, and may be -- overridden by values supplied in the configuration file. --# accept Flow, 10, Type_Mark, "Expected ineffective assignment to Type_Mark"; Add_Integer_Type_Local (Name => LexTokenManager.Interrupt_ID_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Lower => LexTokenManager.Null_String, Upper => LexTokenManager.Null_String, Scope => Interrupts_Scope, Context => ProgramContext, Type_Mark => Type_Mark); --# end accept; --# accept Flow, 33, Type_Mark, "Expected to be neither referenced nor exported"; end Add_Package_Interrupts; -------------------------------------------------------------------------------- -- This procedure creates a subprogram record with which we can associate globals -- and dependencies from the partition annotation procedure Add_The_Partition --# global in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State & --# LexTokenManager.State from *; is Token : LexTokenManager.Lex_String; The_Subprogram : RawDict.Subprogram_Info_Ref; begin Insert_Lex_String (Name => "main_program", Token => Token); -- suitable reserved name for the subprog Add_Subprogram (Name => Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => GlobalScope, Context => ProgramContext, The_Subprogram => The_Subprogram); Dict.The_Partition := The_Subprogram; end Add_The_Partition; -------------------------------------------------------------------------------- procedure Add_Unchecked_Conversion --# global in CommandLineData.Content; --# in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dict, --# LexTokenManager.State & --# LexTokenManager.State from *; is Subprogram_Token, Source_Token, Target_Token, S_Token : LexTokenManager.Lex_String; The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref; The_Generic_Parameter : RawDict.Generic_Parameter_Info_Ref; The_Declaration : RawDict.Declaration_Info_Ref; Type_Mark : RawDict.Type_Info_Ref; begin -- add the function itself Insert_Lex_String (Name => "Unchecked_Conversion", Token => Subprogram_Token); Add_Subprogram (Name => Subprogram_Token, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location, Scope => GlobalScope, Context => ProgramContext, The_Subprogram => The_Subprogram); -- mark predefined generic unit as having a body Add_Subprogram_Body (The_Subprogram => The_Subprogram, Comp_Unit => ContextManager.NullUnit, The_Body => Null_Location, Hidden => True); -- also add a declaration so that Ada.Unchecked_Conversion can be looked up case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => null; when CommandLineData.SPARK95_Onwards => Add_Declaration (Comp_Unit => ContextManager.NullUnit, Loc => Null_Location, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Get_Predefined_Package_Ada)), Context => ProgramContext, The_Declaration => The_Declaration); RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); end case; RawDict.Create_Generic_Unit (Kind => Generic_Of_Subprogram, Scope => GlobalScope, Comp_Unit => ContextManager.NullUnit, Loc => LexTokenManager.Null_Token_Position, The_Generic_Unit => The_Generic_Unit); -- make it generic RawDict.Set_Subprogram_Generic_Unit (The_Subprogram => The_Subprogram, The_Generic_Unit => The_Generic_Unit); RawDict.Set_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit, The_Subprogram => The_Subprogram); -- create generic parameters Insert_Lex_String (Name => "Source", Token => Source_Token); Add_Generic_Type_Local (Name => Source_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Generic_Unit_Symbol (The_Generic_Unit)), Type_Mark => Type_Mark); Set_Generic_Private_Type_Local (Type_Mark => Type_Mark, Is_Limited => False); Add_Generic_Formal_Parameter_Local (Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Generic_Unit => The_Generic_Unit, Type_Mark => Type_Mark, The_Object => RawDict.Null_Constant_Info_Ref, The_Generic_Parameter => The_Generic_Parameter); -- add subprogram parameter Insert_Lex_String (Name => "S", Token => S_Token); Add_Subprogram_Parameter (Name => S_Token, The_Subprogram => The_Subprogram, Type_Mark => RawDict.Get_Generic_Parameter_Type (The_Generic_Parameter => The_Generic_Parameter), Type_Reference => Null_Location, Mode => InMode, Comp_Unit => ContextManager.NullUnit, Specification => Null_Location); -- create generic parameters Insert_Lex_String (Name => "Target", Token => Target_Token); Add_Generic_Type_Local (Name => Target_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Generic_Unit_Symbol (The_Generic_Unit)), Type_Mark => Type_Mark); Set_Generic_Private_Type_Local (Type_Mark => Type_Mark, Is_Limited => False); Add_Generic_Formal_Parameter_Local (Comp_Unit => ContextManager.NullUnit, Declaration => Null_Location, The_Generic_Unit => The_Generic_Unit, Type_Mark => Type_Mark, The_Object => RawDict.Null_Constant_Info_Ref, The_Generic_Parameter => The_Generic_Parameter); -- add return type Add_Return_Type (The_Function => The_Subprogram, Type_Mark => RawDict.Get_Generic_Parameter_Type (The_Generic_Parameter => The_Generic_Parameter), Comp_Unit => ContextManager.NullUnit, Type_Reference => Null_Location); Dict.Subprograms.Unchecked_Conversion := The_Subprogram; end Add_Unchecked_Conversion; begin -- Initialize Assign_Initial_Value; Add_Unknown_Type; Add_Package_Standard; Add_Package_ASCII; case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Dict.Packages.Package_Ada := RawDict.Null_Package_Info_Ref; when CommandLineData.SPARK95_Onwards => Add_Package_Ada; end case; Add_Unchecked_Conversion; if CommandLineData.Ravenscar_Selected then Add_Package_Real_Time; Add_Package_Synchronous_Task_Control; Add_Package_Interrupts; Add_The_Partition; else Dict.Packages.Package_Real_Time := RawDict.Null_Package_Info_Ref; Dict.Packages.Package_Synchronous_Task_Control := RawDict.Null_Package_Info_Ref; Dict.Packages.Package_Interrupts := RawDict.Null_Package_Info_Ref; Dict.The_Partition := RawDict.Null_Subprogram_Info_Ref; end if; if Write_To_File then SPARK_IO.Create (File, 0, "", "", Status); -- Expected flow err if Status = SPARK_IO.Ok then Dict.TemporaryFile := File; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Init, Msg => "in Dictionary.Initialize"); Dict.TemporaryFile := SPARK_IO.Null_File; end if; else Dict.TemporaryFile := SPARK_IO.Null_File; end if; LexTokenManager.Set_Last_Token; end Initialize; spark-2012.0.deb/examiner/sli-io.adb0000644000175000017500000000707211753202336016123 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Text_IO; with Ada.Integer_Text_IO; with E_Strings.Not_SPARK; package body SLI.IO is --# hide SLI.IO; SLI_File_Descriptor : Ada.Text_IO.File_Type; procedure Create_File (Name_Of_File : in E_Strings.T; Status : out File_Status) is begin Ada.Text_IO.Create (File => SLI_File_Descriptor, Mode => Ada.Text_IO.Out_File, Name => E_Strings.Not_SPARK.Get_String (E_Str => Name_Of_File), Form => ""); Status := Ok; exception when Ada.Text_IO.Status_Error => Status := Status_Error; when Ada.Text_IO.Name_Error => Status := Name_Error; when Ada.Text_IO.Use_Error => Status := Use_Error; when Ada.Text_IO.Device_Error => Status := Device_Error; end Create_File; procedure Close is begin if Ada.Text_IO.Is_Open (File => SLI_File_Descriptor) then Ada.Text_IO.Close (File => SLI_File_Descriptor); end if; end Close; procedure Put_Char (Item : in Character) is begin if Ada.Text_IO.Is_Open (File => SLI_File_Descriptor) then Ada.Text_IO.Put (File => SLI_File_Descriptor, Item => Item); end if; end Put_Char; procedure Put_String (Item : in String) is begin if Ada.Text_IO.Is_Open (File => SLI_File_Descriptor) then Ada.Text_IO.Put (File => SLI_File_Descriptor, Item => Item); end if; end Put_String; procedure Put_Integer (Item : in Integer) is begin if Ada.Text_IO.Is_Open (File => SLI_File_Descriptor) then Ada.Integer_Text_IO.Put (File => SLI_File_Descriptor, Item => Item, Width => 0); end if; end Put_Integer; procedure New_Line is begin if Ada.Text_IO.Is_Open (File => SLI_File_Descriptor) then Ada.Text_IO.New_Line (File => SLI_File_Descriptor); end if; end New_Line; procedure Put_Line (Item : in String) is begin Put_String (Item => Item); New_Line; end Put_Line; procedure E_Strings_Put_String (E_Str : in E_Strings.T) is begin if E_Strings.Get_Length (E_Str => E_Str) /= 0 and then Ada.Text_IO.Is_Open (File => SLI_File_Descriptor) then Ada.Text_IO.Put (File => SLI_File_Descriptor, Item => E_Strings.Not_SPARK.Get_String (E_Str => E_Str)); end if; end E_Strings_Put_String; procedure E_Strings_Put_Line (E_Str : in E_Strings.T) is begin E_Strings_Put_String (E_Str => E_Str); New_Line; end E_Strings_Put_Line; end SLI.IO; spark-2012.0.deb/examiner/errorhandler-appenderrors.adb0000644000175000017500000003700411753202336022116 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) procedure AppendErrors (Report : in SPARK_IO.File_Type; Purpose : in Error_Types.ConversionRequestSource) is Err_Count : Natural; OK : Boolean; Next_Error : Error_Types.StringError; Num_Err : Error_Types.NumericError; Success : SPARK_IO.File_Status; Temp_Error_File : Error_IO.File_Type; Accumulator : ErrorAccumulator.T := ErrorAccumulator.Clear; procedure Put_Source_Line (To_File : in SPARK_IO.File_Type; Line_No : in LexTokenManager.Line_Numbers) --# global in CommandLineData.Content; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Error_Context_Rec from *, --# Line_No, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Error_Context_Rec, --# Line_No, --# To_File; -- pre Error_Context_Rec.LineNo /= Line_No; -- post Error_Context_Rec.LineNo = Line_No; is begin loop GetFileLine; exit when Error_Context_Rec.Line_No >= Line_No; end loop; if not CommandLineData.Content.XML then Print_Source_Line (To_File => To_File); end if; end Put_Source_Line; procedure Get_Error_Set (Error_Set : out Error_Sets; Next_Error : in out Error_Types.StringError; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Error_Context_Rec; --# in LexTokenManager.State; --# in Purpose; --# in out Conversions.State; --# in out Err_Count; --# in out SPARK_IO.File_Sys; --# derives Conversions.State, --# Next_Error, --# OK, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# LexTokenManager.State, --# Purpose, --# SPARK_IO.File_Sys & --# Error_Set, --# Err_Count from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Count, --# LexTokenManager.State, --# Next_Error, --# Purpose, --# SPARK_IO.File_Sys; -- post not OK or -- Error_Context_Rec.LineNo /= Next_Error.Position.StartLineNo or -- Errors.Length = ExaminerConstants.MaxErrorSetSize; is L_OK : Boolean; Errors : Error_Sets; Num_Err : Error_Types.NumericError; begin Errors := Empty_Error_Set; loop Errors.Length := Errors.Length + 1; Errors.Content (Errors.Length).Error := Next_Error; if ErrorAccumulator.Is_Error_Continuation (The_Error => Next_Error) then Errors.Content (Errors.Length).Err_Num := 0; else Err_Count := Err_Count + 1; Errors.Content (Errors.Length).Err_Num := Err_Count; end if; loop Error_IO.Get_Numeric_Error (Error_Context_Rec.Errs, Num_Err); Conversions.ToString (Num_Err, Purpose, Next_Error); L_OK := (Next_Error /= Error_Types.Empty_StringError); exit when not L_OK; exit when Next_Error.ErrorType /= Error_Types.NoErr; end loop; exit when not L_OK; exit when Error_Context_Rec.Line_No /= Next_Error.Position.Start_Line_No; exit when Errors.Length = ExaminerConstants.MaxErrorSetSize; end loop; OK := L_OK; Error_Set := Errors; end Get_Error_Set; procedure Process_Error_Set (Listing : in SPARK_IO.File_Type; Next_Error : in out Error_Types.StringError; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Error_Context_Rec; --# in LexTokenManager.State; --# in Purpose; --# in out Accumulator; --# in out Conversions.State; --# in out Err_Count; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives Accumulator, --# XMLReport.State from *, --# Accumulator, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Count, --# LexTokenManager.State, --# Next_Error, --# Purpose, --# SPARK_IO.File_Sys & --# Conversions.State, --# Next_Error, --# OK from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# LexTokenManager.State, --# Purpose, --# SPARK_IO.File_Sys & --# Err_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# LexTokenManager.State, --# Next_Error, --# Purpose, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Accumulator, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Count, --# LexTokenManager.State, --# Listing, --# Next_Error, --# Purpose, --# XMLReport.State; is Error_Set : Error_Sets; begin Get_Error_Set (Error_Set => Error_Set, Next_Error => Next_Error, OK => OK); if not CommandLineData.Content.XML then Put_Error_Pointers (Listing => Listing, Errors => Error_Set); Put_Error_Messages (Listing => Listing, Errors => Error_Set, Start_Pos => 29, Accumulator => Accumulator); else Put_Error_Messages_XML (Listing => Listing, Errors => Error_Set, Start_Pos => 29, Accumulator => Accumulator); end if; end Process_Error_Set; procedure Process_Errors_On_Line (Listing : in SPARK_IO.File_Type; Next_Error : in out Error_Types.StringError; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Error_Context_Rec; --# in LexTokenManager.State; --# in Purpose; --# in out Accumulator; --# in out Conversions.State; --# in out Err_Count; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives Accumulator, --# Conversions.State, --# Err_Count, --# Next_Error, --# OK, --# SPARK_IO.File_Sys, --# XMLReport.State from Accumulator, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Count, --# LexTokenManager.State, --# Listing, --# Next_Error, --# Purpose, --# SPARK_IO.File_Sys, --# XMLReport.State; is L_OK : Boolean; Accumulator_Was_Active : Boolean; begin if not Error_Has_Position_Inline (Err_Type => Next_Error.ErrorType) and then not CommandLineData.Content.XML then SPARK_IO.New_Line (Listing, 1); end if; loop Process_Error_Set (Listing => Listing, Next_Error => Next_Error, OK => L_OK); exit when not L_OK; exit when Error_Context_Rec.Line_No /= Next_Error.Position.Start_Line_No; end loop; OK := L_OK; Accumulator_Was_Active := ErrorAccumulator.Is_Active (This => Accumulator); ErrorAccumulator.Flush (Accumulator, Listing); if Accumulator_Was_Active then if CommandLineData.Content.XML then XMLReport.End_Message (Report => Listing); else New_Line (Listing, 1); end if; end if; end Process_Errors_On_Line; procedure Set_Up_Files (OK : out Boolean) --# global in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# out Err_Count; --# derives Error_Context_Rec, --# SPARK_IO.File_Sys from *, --# Error_Context_Rec & --# Err_Count from & --# OK from Error_Context_Rec, --# SPARK_IO.File_Sys; is L_OK : Boolean; Success : SPARK_IO.File_Status; Source_File : SPARK_IO.File_Type; Error_File : Error_IO.File_Type; begin Source_File := Error_Context_Rec.Source; SPARK_IO.Reset (Source_File, SPARK_IO.In_File, Success); Error_Context_Rec.Source := Source_File; L_OK := Success = SPARK_IO.Ok; Error_Context_Rec.Line_No := 0; Err_Count := 0; Error_File := Error_Context_Rec.Errs; Error_IO.Reset (Error_File, SPARK_IO.In_File, Success); Error_Context_Rec.Errs := Error_File; OK := L_OK and Success = SPARK_IO.Ok; end Set_Up_Files; procedure Put_Error_Count (File : in SPARK_IO.File_Type; Cnt : in Natural) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Cnt, --# File; is begin SPARK_IO.Put_Integer (File, Cnt, 0, 10); SPARK_IO.Put_Line (File, " error(s) or warning(s)", 0); SPARK_IO.New_Line (File, 1); end Put_Error_Count; begin if Error_Context_Rec.Num_Errs = 0 then if not CommandLineData.Content.XML then SPARK_IO.Put_Line (Report, "No errors found", 0); SPARK_IO.New_Line (Report, 1); end if; else Error_Context_Rec.Current_Line := E_Strings.Empty_String; if not CommandLineData.Content.XML then Put_Error_Count (File => Report, Cnt => Integer (Error_Context_Rec.Num_Errs)); end if; Set_Up_Files (OK => OK); if OK then if not CommandLineData.Content.XML then SPARK_IO.Put_Line (Report, "Line", 0); end if; loop Error_IO.Get_Numeric_Error (Error_Context_Rec.Errs, Num_Err); Conversions.ToString (Num_Err, Purpose, Next_Error); OK := (Next_Error /= Error_Types.Empty_StringError); exit when not OK; exit when Next_Error.ErrorType /= Error_Types.NoErr; end loop; -- assert Error_Context_Rec.Line_No = 0 and -- OK -> Next_Error.Position.StartLineNo >= 1; loop exit when not OK; -- assert Error_Context_Rec.Line_No /= Next_Error.Position.StartLineNo; Put_Source_Line (To_File => Report, Line_No => Next_Error.Position.Start_Line_No); -- assert Error_Context_Rec.Line_No = Next_Error.Position.StartLineNo; Process_Errors_On_Line (Listing => Report, Next_Error => Next_Error, OK => OK); -- assert OK -> Error_Context_Rec.Line_No /= Next_Error.Position.StartLineNo; --# accept Flow, 41, "Expected stable expression"; if not CommandLineData.Content.XML then --# end accept; SPARK_IO.New_Line (Report, 1); end if; end loop; else SPARK_IO.Put_Line (Report, "*** Bad error list, unable to report errors", 0); SPARK_IO.New_Line (Report, 1); end if; end if; if CommandLineData.Content.XML then Justifications.Print_Justifications_XML (Which_Table => Error_Context_Rec.Justifications_Data_Table, File => Report); else Justifications.Print_Justifications (Which_Table => Error_Context_Rec.Justifications_Data_Table, File => Report); end if; WarningStatus.Report_Suppressed_Warnings (To_File => Report, Counter => Error_Context_Rec.Counter); Temp_Error_File := Error_Context_Rec.Errs; --# accept Flow, 10, Success, "Expected ineffective assignment to Success"; Error_IO.Close (Temp_Error_File, Success); --# end accept; Error_Context_Rec.Errs := Temp_Error_File; --# accept Flow, 33, Success, "Expected Success to be neither referenced nor exported"; end AppendErrors; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_qualified_expression.adb0000644000175000017500000001662311753202336026163 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Qualified_Expression (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Name_Exp, Exp_Result : Sem.Exp_Record; Exp_Value : Maths.Value; Store_Rep : LexTokenManager.Lex_String; Exp_Node : STree.SyntaxNode; begin Exp_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Exp_Node = aggregate OR extension_aggregate OR expression OR -- annotation_aggregate OR annotation_extension_aggregate OR annotation_expression if STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.annotation_expression then -- ASSUME Exp_Node = expression OR annotation_expression -- we are dealing with a simple qualified expression Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); case Name_Exp.Sort is when Sem.Is_Type_Mark => -- check that constraining type mark is not unconstrained if Dictionary.Is_Unconstrained_Array_Type_Mark (Name_Exp.Type_Symbol, Scope) then ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); elsif Dictionary.CompatibleTypes (Scope, Name_Exp.Type_Symbol, Exp_Result.Type_Symbol) then Maths.StorageRep (Exp_Result.Range_RHS, Store_Rep); if Dictionary.IsPredefinedStringType (Exp_Result.Type_Symbol) then if Exp_Result.Range_RHS = Maths.NoValue then -- must be a parameter ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Dictionary.CurrentSymbol (Dictionary.FirstArrayIndex (Name_Exp.Type_Symbol))), Lex_Str2 => Store_Rep) /= LexTokenManager.Str_Eq then if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_qualified_expression then -- in annotation ErrorHandler.Semantic_Error (Err_Num => 399, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 402, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; elsif Sem.Illegal_Unconstrained (Left_Type => Name_Exp.Type_Symbol, Right_Type => Exp_Result.Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 418, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; Sem.Constraint_Check (Val => Exp_Result.Value, New_Val => Exp_Value, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_qualified_expression, Typ => Name_Exp.Type_Symbol, Position => STree.Node_Position (Node => Exp_Node)); Exp_Result.Value := Exp_Value; Exp_Result.Type_Symbol := Name_Exp.Type_Symbol; Exp_Result.Is_AVariable := False; Exp_Result.Is_An_Entire_Variable := False; Exp_Stack.Push (X => Exp_Result, Stack => E_Stack); else ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); end if; when Sem.Is_Unknown => -- any error from unknown prefix will already have been raised Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); when others => Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 95, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end case; elsif STree.Syntax_Node_Type (Node => Exp_Node) /= SP_Symbols.aggregate and then STree.Syntax_Node_Type (Node => Exp_Node) /= SP_Symbols.annotation_aggregate and then STree.Syntax_Node_Type (Node => Exp_Node) /= SP_Symbols.extension_aggregate and then STree.Syntax_Node_Type (Node => Exp_Node) /= SP_Symbols.annotation_extension_aggregate then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = aggregate OR extension_aggregate OR expression OR annotation_aggregate OR " & "annotation_extension_aggregate OR annotation_expression in Wf_Qualified_Expression"); end if; end Wf_Qualified_Expression; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-condlflowerr.adb0000644000175000017500000001442311753202336026327 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure CondlFlowErr (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is Err_Type : ErrorHandler.Data_Flow_Err_Type; procedure CondlFlowErrExpl (E_Str : in out E_Strings.T) --# global in Err_Type; --# derives E_Str from *, --# Err_Type; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Type; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Type, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation CondlFlowErrExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin Err_Type := ErrorHandler.Data_Flow_Err_Type'Val (Err_Num.ErrorNum - Error_Types.CondFlowErrorOffset); case Err_Type is -- HTML Directives --! <"flow-"> --! <"??? Flow Error : "><" : "> when ErrorHandler.Expn_May_Be_Undefined => --! 501 E_Strings.Append_String (E_Str => E_Str, Str => "Expression contains reference(s) to variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ", which may have an undefined value"); --! The expression may be that in an assignment or return statement, --! an actual parameter, --! or a condition occurring in an if or case statement, an iteration --! scheme or exit statement. The Examiner has identified at least one syntactic path --! to this point where the variable has NOT been given a value. Conditional --! data flow errors are extremely serious and must be carefully investigated. --! NOTE: the presence of random and possibly invalid values --! introduced by data flow errors invalidates proof of exception freedom for the --! subprogram body which contains them. All reports of data flow errors must be --! eliminated or shown to be associated with semantically infeasible paths before --! attempting exception freedom proofs. See the manual "SPARK Proof Manual --! " for full details. when ErrorHandler.Stmt_May_Be_Undefined => --! 504 E_Strings.Append_String (E_Str => E_Str, Str => "Statement contains reference(s) to variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ", which may have an undefined value"); --! The statement here is a procedure call, and the variable XXX may --! appear in an actual parameter, whose value is imported when the --! procedure is executed. If the variable XXX --! does not occur in the actual parameter list, it is an imported --! global variable of the procedure (named in its global definition). --! The Examiner has identified at least one syntactic path --! to this point where the variable has NOT been given a value. Conditional --! data flow errors are extremely serious and must be carefully investigated. --! NOTE: the presence of random and possibly invalid values --! introduced by data flow errors invalidates proof of exception freedom for the --! subprogram body which contains them. All reports of data flow errors must be --! eliminated or shown to be associated with semantically infeasible paths before --! attempting exception freedom proofs. See the manual "SPARK Proof Manual --! " for full details. when others => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO Condl_Flow_Error"); end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end CondlFlowErr; ././@LongLink0000000000000000000000000000017500000000000011570 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration-wf_ravenscar_subtype.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration-0000644000175000017500000005135511753202336033054 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Subtype_Declaration) procedure Wf_Ravenscar_Subtype (Id_Str : in LexTokenManager.Lex_String; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Constraint_Node : in STree.SyntaxNode; The_Heap : in out Heap.HeapRecord) is The_Subtype : Dictionary.Symbol; Assoc_Node : STree.SyntaxNode; procedure Process_Expression (Exp_Node : in STree.SyntaxNode; Formal_Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Subtype_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# LexTokenManager.State from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# Formal_Sym, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# Formal_Sym, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression; --# post STree.Table = STree.Table~; is Result : Exp_Record; Unwanted_Seq : SeqAlgebra.Seq; Unused_Component_Data : ComponentManager.ComponentData; Static_Value : LexTokenManager.Lex_String; Pragma_Kind : Dictionary.RavenscarPragmasWithValue; Value_Rep : LexTokenManager.Lex_String; begin Heap.Reset (The_Heap); ComponentManager.Initialise (Unused_Component_Data); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => False, Ref_Var => Unwanted_Seq, Result => Result, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Assignment_Check (Position => Node_Position (Node => Exp_Node), Scope => Scope, Target_Type => Dictionary.GetType (Formal_Sym), Exp_Result => Result); if Result.Is_Static then Maths.StorageRep (Result.Value, Static_Value); Dictionary.AddDiscriminantConstraintStaticValue (ProtectedOrTaskSubtype => Subtype_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Exp_Node), End_Position => Node_Position (Node => Exp_Node)), TheValue => Static_Value); if Dictionary.SetsPriority (Formal_Sym) then if Dictionary.GetTypeHasPragma (Type_Sym, Dictionary.Priority) then Pragma_Kind := Dictionary.Priority; else -- must be Pragma_Kind := Dictionary.InterruptPriority; end if; Check_Priority_Range (Error_Sym => Subtype_Sym, Scope => Scope, Pragma_Kind => Pragma_Kind, Err_Pos => Node_Position (Node => Exp_Node), Value => Result.Value, Value_Rep => Value_Rep); -- Value_Rep is either a storage rep of a valid value or a null string; we can always add it to dict Dictionary.SetSubtypePriority (Subtype_Sym, Value_Rep); end if; elsif Dictionary.TypeIsAccess (Dictionary.GetType (Formal_Sym)) then Dictionary.AddDiscriminantConstraintAccessedObject (ProtectedOrTaskSubtype => Subtype_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Exp_Node), End_Position => Node_Position (Node => Exp_Node)), TheObject => Result.Variable_Symbol); -- N.B. VariableSymbol is the accessed _variable_ name, put there by wf_attribute_designator else -- not static and not a protected types so must be wrong ErrorHandler.Semantic_Error (Err_Num => 36, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; end Process_Expression; ------------------------------------------------------------------------ procedure Handle_Named_Association (Node : in STree.SyntaxNode; Type_Sym : in Dictionary.Symbol; Subtype_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association; --# post STree.Table = STree.Table~; is It : Dictionary.Iterator; Expression_Node : STree.SyntaxNode; begin Check_Named_Association (The_Formals => Type_Sym, Scope => Scope, Named_Argument_Assoc_Node => Node); -- Loop through all the formals It := Dictionary.FirstKnownDiscriminant (Type_Sym); while not Dictionary.IsNullIterator (It) loop --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association and --# STree.Table = STree.Table~; Find_Actual_Node (For_Formal => Dictionary.CurrentSymbol (It), Named_Argument_Assoc_Node => Node, Expression_Node => Expression_Node); -- ASSUME Expression_Node = expression OR NULL --# check Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression or --# Expression_Node = STree.NullNode; if Syntax_Node_Type (Node => Expression_Node) = SP_Symbols.expression then -- ASSUME Expression_Node = expression Process_Expression (Exp_Node => Expression_Node, Formal_Sym => Dictionary.CurrentSymbol (It), Type_Sym => Type_Sym, Subtype_Sym => Subtype_Sym, Scope => Scope, The_Heap => The_Heap); end if; It := Dictionary.NextSymbol (It); end loop; end Handle_Named_Association; ------------------------------------------------------------------------ procedure Handle_Positional_Association (Node : in STree.SyntaxNode; Type_Sym : in Dictionary.Symbol; Subtype_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association; --# post STree.Table = STree.Table~; is Expression_Node : STree.SyntaxNode; Formal_It : Dictionary.Iterator; Actual_It : STree.Iterator; Formal_Parameter : Dictionary.Symbol; begin Formal_It := Dictionary.FirstKnownDiscriminant (Type_Sym); Actual_It := Find_First_Node (Node_Kind => SP_Symbols.expression, From_Root => Node, In_Direction => STree.Down); while not Dictionary.IsNullIterator (Formal_It) and then not STree.IsNull (Actual_It) loop Formal_Parameter := Dictionary.CurrentSymbol (Formal_It); Expression_Node := Get_Node (It => Actual_It); --# assert Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression and --# Expression_Node = Get_Node (Actual_It) and --# STree.Table = STree.Table~; Process_Expression (Exp_Node => Expression_Node, Formal_Sym => Formal_Parameter, Type_Sym => Type_Sym, Subtype_Sym => Subtype_Sym, Scope => Scope, The_Heap => The_Heap); Formal_It := Dictionary.NextSymbol (Formal_It); Actual_It := STree.NextNode (Actual_It); end loop; if not Dictionary.IsNullIterator (Formal_It) or else not STree.IsNull (Actual_It) then ErrorHandler.Semantic_Error_Sym (Err_Num => 893, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Type_Sym, Scope => Scope); end if; end Handle_Positional_Association; begin -- Wf_Ravenscar_Subtype Dictionary.Add_Task_Or_Protected_Subtype (Name => Id_Str, Parent => Type_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, Context => Dictionary.ProgramContext, The_Subtype => The_Subtype); STree.Add_Node_Symbol (Node => Ident_Node, Sym => The_Subtype); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => The_Subtype, Is_Declaration => True); end if; Assoc_Node := Child_Node (Current_Node => Child_Node (Current_Node => Constraint_Node)); -- ASSUME Assoc_Node = named_argument_association OR positional_argument_association if Syntax_Node_Type (Node => Assoc_Node) = SP_Symbols.named_argument_association then -- ASSUME Assoc_Node = named_argument_association Handle_Named_Association (Node => Assoc_Node, Type_Sym => Type_Sym, Subtype_Sym => The_Subtype, Scope => Scope, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Assoc_Node) = SP_Symbols.positional_argument_association then -- ASSUME Assoc_Node = positional_argument_association Handle_Positional_Association (Node => Assoc_Node, Type_Sym => Type_Sym, Subtype_Sym => The_Subtype, Scope => Scope, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Assoc_Node = named_argument_association OR positional_argument_association in Wf_Ravenscar_Subtype"); end if; -- Check that subtype has a priority, if one has not been set then inherit parent's if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetTypePriority (The_Subtype), Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Dictionary.SetSubtypePriority (The_Subtype, Dictionary.GetTypePriority (Type_Sym)); end if; end Wf_Ravenscar_Subtype; spark-2012.0.deb/examiner/sp_parser_actions-spa.adb0000644000175000017500000000570711753202336021231 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SP_Parser_Actions) function SPA (CST : SP_Productions.Valid_States; CSY : SP_Symbols.SP_Terminal) return SP_Parse_Act is Index_Pair : Packed_PAT_Index_Pair; Index, Max_Index : PAT_Index; CSY_POS, Next_Symbol, Packed_Result : Packed_Sym_Action_Pair; Result : SP_Parse_Act; begin -- SPA CSY_POS := SP_Symbols.SP_Terminal'Pos (CSY); Index_Pair := State_Table (CST); Index := PAT_Index (Index_Pair mod PAT_Index_Size); Max_Index := PAT_Index (Index_Pair / PAT_Index_Size); Next_Symbol := Parse_Action_Table (Index) mod Term_Sym_Lim; while Next_Symbol /= CSY_POS and then Index < Max_Index loop Index := Index + 1; Next_Symbol := Parse_Action_Table (Index) mod Term_Sym_Lim; end loop; if Next_Symbol /= CSY_POS and then Next_Symbol /= Default then Result := Error_Action; else Packed_Result := Parse_Action_Table (Index); case SP_Action_Kind'Val ((Packed_Result / Act) mod Act_Lim) is when Shift => Result := SP_Parse_Act' (Act => Shift, State => SP_Productions.SP_State ((Packed_Result / State) mod State_Lim), Symbol => No_Sym, Red_By => No_Red, Prod_No => No_Prod); when Reduce => Result := SP_Parse_Act' (Reduce, SP_Productions.No_State, SP_Symbols.SP_Symbol'Val ((Packed_Result / Symbol) mod Symbol_Lim + First_Non_Terminal), SP_Productions.SP_Right ((Packed_Result / Red_By) mod Red_By_Lim), 0); when Accpt => if Next_Symbol = CSY_POS then Result := Accept_Action; else Result := Error_Action; end if; when Error => -- can never have this option Result := Error_Action; end case; end if; return Result; end SPA; spark-2012.0.deb/examiner/sem-plant_constraining_type.adb0000644000175000017500000000570411753202336022446 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Plant_Constraining_Type (Expression_Type : in Dictionary.Symbol; String_Length : in Maths.Value; Actual_Node : in STree.SyntaxNode) is -- This procedure is used to plant a type symbol in the syntax tree, at the location of -- an actual parameter, for use by the VCG. IFF the Expression_Type is String -- (indicating that the actual parameter is a string literal) then the parameter String_Length -- is used to create (or obtain if it's already declared) a subtype of Positive called Positive__n -- where n is the string length; this implicitly-declared subtype is then used as the constraint. -- For anything other than a string literal actual parameter we -- plant the symbol of the constraining array subtype. The array subtype effectively passes -- the symbols of /all/ the constraining indexes however many dimensions the array has. Type_To_Plant : Dictionary.Symbol := Dictionary.NullSymbol; begin if Dictionary.IsUnconstrainedArrayType (Dictionary.GetRootType (Expression_Type)) and then not Dictionary.IsUnconstrainedArrayType (Expression_Type) then -- Not a string, so plant the array type Type_To_Plant := Expression_Type; elsif Dictionary.IsPredefinedStringType (Expression_Type) and then String_Length /= Maths.NoValue then -- If the actual is a String Literal like "Hello World", then the Expression_Type -- will be Predefined String and String_Length will have a well-defined value. -- In this case, we create an implicit constraining subtype. Create_Implicit_Positive_Subtype (String_Length => String_Length, Location => Dictionary.Location'(Start_Position => Node_Position (Node => Actual_Node), End_Position => Node_Position (Node => Actual_Node)), Index_Constraint => Type_To_Plant); end if; STree.Add_Node_Symbol (Node => Actual_Node, Sym => Type_To_Plant); end Plant_Constraining_Type; spark-2012.0.deb/examiner/g-tabsor.adb0000644000175000017500000000550511753202336016444 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with GNAT.Heap_Sort_G; package body GNAT.Table.Sort is ---------------- -- Sort_Table -- ---------------- procedure Sort_Table is Temp : Table_Component_Type; -- A temporary position to simulate index 0 -- Local subprograms function Index_Of (Idx : Natural) return Table_Index_Type; -- Return index of Idx'th element of table function Lower_Than (Op1, Op2 : Natural) return Boolean; -- Compare two components procedure Move (From : Natural; To : Natural); -- Move one component package Heap_Sort is new GNAT.Heap_Sort_G (Move, Lower_Than); -------------- -- Index_Of -- -------------- function Index_Of (Idx : Natural) return Table_Index_Type is J : constant Integer'Base := Table_Index_Type'Pos (First) + Idx - 1; begin return Table_Index_Type'Val (J); end Index_Of; ---------- -- Move -- ---------- procedure Move (From : Natural; To : Natural) is begin if From = 0 then Table (Index_Of (To)) := Temp; elsif To = 0 then Temp := Table (Index_Of (From)); else Table (Index_Of (To)) := Table (Index_Of (From)); end if; end Move; ---------------- -- Lower_Than -- ---------------- function Lower_Than (Op1, Op2 : Natural) return Boolean is begin if Op1 = 0 then return Lt (Temp, Table (Index_Of (Op2))); elsif Op2 = 0 then return Lt (Table (Index_Of (Op1)), Temp); else return Lt (Table (Index_Of (Op1)), Table (Index_Of (Op2))); end if; end Lower_Than; -- Start of processing for Sort_Table begin Heap_Sort.Sort (Natural (Last - First) + 1); end Sort_Table; end GNAT.Table.Sort; spark-2012.0.deb/examiner/sem.adb0000644000175000017500000047565611753202336015534 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- For sem*.adb, the source code uses the following patterns: -- -- 1. Subprograms declarations. For each node given as formal -- parameter or return in the subprogram declaration, we must write -- the type of the node in the pre/post/return annotation. -- -- procedure Proc (Node1, Node2 : in STree.SyntaxNode; -- Node3 : in out STree.SyntaxNode; -- Node4 : out STree.SyntaxNode) -- --# pre (Syntax_Node_Type (Node => Node1) = SP_Symbols.n1 or -- --# Node1 = STree.NullNode) and -- --# Syntax_Node_Type (Node => Node2) = SP_Symbols.n2 and -- --# (Syntax_Node_Type (Node => Node3) = SP_Symbols.n31 or -- --# Syntax_Node_Type (Node => Node3) = SP_Symbols.n32 or -- --# Syntax_Node_Type (Node => Node3) = SP_Symbols.n33); -- --# post (Syntax_Node_Type (Node => Node3) = SP_Symbols.n31 or -- --# Node3 = STree.NullNode) and -- --# Syntax_Node_Type (Node => Node4) = SP_Symbols.n4; -- function Func (Node1, Node2 : STree.SyntaxNode) return STree.SyntaxNode; -- --# pre (Syntax_Node_Type (Node => Node1) = SP_Symbols.n1 or -- --# Node1 = STree.NullNode) and -- --# Syntax_Node_Type (Node => Node2) = SP_Symbols.n2; -- --# return Node => Syntax_Node_Type (Node => Node) = SP_Symbols.n; -- -- 2. Node navigation. This is in the body of a subprogram. It is -- called "node navigation" all operations on a node that return -- another node (sometime the same node). Those operations includes -- STree.Child_Node, STree.Next_Sibling, STree.Last_Sibling_Of, -- STree.Last_Child_Of, STree.Parent,... -- -- There are 2 patterns: the optimized one and the normal one. For -- efficiency reason it is always better to use the optimized one but -- sometime, it is not possible. -- -- 2.1 Optimized version. it is called "optimized" because the check -- of the node type are performed almost for free (no real extra -- costs, there is actually one more condition to evaluate else -> -- elsif). -- -- is -- Node1, Node2 : STree.SyntaxNode; -- begin -- Node2 := STree.Child_Node (Node => Node1); -- -- ASSUME Node2 = n1 OR n2 OR NULL -- if Node2 = STree.NullNode -- or else Syntax_Node_Type (Node => Node2) = SP_Symbols.n1 then -- -- ASSUME Node2 = n1 OR NULL -- Do_Something_With_Node2; -- elsif Syntax_Node_Type (Node => Node2) = SP_Symbols.n2 then -- -- ASSUME Node2 = n2 OR NULL -- Do_Something_Else_With_Node2; -- else -- SystemErrors.Fatal_Error -- (Sys_Err => SystemErrors.Invalid_Syntax_Tree, -- Msg => "Expect Node2 = n1 OR n2 OR NULL in Func"); -- end if; -- end Func; -- -- See the fact that "or else" is used instead of "or". This is more -- efficient and this prevent to call Syntax_Node_Type (Node => -- Node2) on a null node. -- -- 2.2 Normal version. This version is ONLY used if the optimized -- version can not be use. -- -- is -- Node1, Node2 : STree.SyntaxNode; -- begin -- Node2 := STree.Child_Node (Node => Node1); -- -- ASSUME Node2 = n1 OR n2 OR NULL -- SystemErrors.RT_Assert -- (C => Node2 = STree.NullNode -- or else Syntax_Node_Type (Node => Node2) = SP_Symbols.n1 -- or else Syntax_Node_Type (Node => Node2) = SP_Symbols.n2, -- Sys_Err => SystemErrors.Invalid_Syntax_Tree, -- Msg => "Expect Node2 = n1 OR n2 OR NULL in Func"); -- Do_Something_With_Node2; -- end Func; -- -- See the fact that "or else" is used instead of "or". This is more -- efficient and this prevent to call Syntax_Node_Type (Node => -- Node2) on a null node. -- -- Optimisation is quite important for these kind of checks because -- they should NEVER EVER FAILED!!! with CompleteCheck; with ComponentManager; with ContextManager.Ops; with Debug; with Dictionary; with ErrorHandler; with ExaminerConstants; with E_Strings; with FlowAnalyser; with Heap; with LexTokenManager; with Lists; with Maths; with RefList; with RelationAlgebra; with SeqAlgebra; with SimpleLists; with SPARK_IO; with SP_Symbols; with SystemErrors; use type CompleteCheck.TypCompleteState; use type CompleteCheck.TypRangeState; use type CompleteCheck.TypOverlapState; use type Dictionary.Abstractions; use type Dictionary.Contexts; use type Dictionary.Iterator; use type Dictionary.Modes; use type Dictionary.PrefixSort; use type Dictionary.Scopes; use type Dictionary.Symbol; use type Dictionary.Visibility; use type ErrorHandler.Error_Level; use type ErrorHandler.Justification_Identifier; use type ErrorHandler.Justification_Kinds; use type ExaminerConstants.RefType; use type LexTokenManager.Lex_String; use type LexTokenManager.Str_Comp_Result; use type Maths.ErrorCode; use type Maths.Value; use type SP_Symbols.SP_Symbol; package body Sem --# own State is Aggregate_Stack.State; is -- Operator renames -- Long subprogram prefix renames function Child_Node (Current_Node : STree.SyntaxNode) return STree.SyntaxNode renames STree.Child_Node; function Next_Sibling (Current_Node : STree.SyntaxNode) return STree.SyntaxNode renames STree.Next_Sibling; function Parent_Node (Current_Node : STree.SyntaxNode) return STree.SyntaxNode renames STree.Parent_Node; function Find_First_Node (Node_Kind : SP_Symbols.SP_Symbol; From_Root : STree.SyntaxNode; In_Direction : STree.TraverseDirection) return STree.Iterator renames STree.Find_First_Node; function Find_First_Branch_Node (From_Root : STree.SyntaxNode; In_Direction : STree.TraverseDirection) return STree.Iterator renames STree.Find_First_Branch_Node; function Get_Node (It : STree.Iterator) return STree.SyntaxNode renames STree.Get_Node; function Syntax_Node_Type (Node : STree.SyntaxNode) return SP_Symbols.SP_Symbol renames STree.Syntax_Node_Type; function Node_Position (Node : STree.SyntaxNode) return LexTokenManager.Token_Position renames STree.Node_Position; function Node_Lex_String (Node : STree.SyntaxNode) return LexTokenManager.Lex_String renames STree.Node_Lex_String; -- Function returns the left most leaf node of the tree. function Last_Child_Of (Start_Node : STree.SyntaxNode) return STree.SyntaxNode renames STree.Last_Child_Of; function Last_Sibling_Of (Start_Node : STree.SyntaxNode) return STree.SyntaxNode renames STree.Last_Sibling_Of; ----------------------------------------------- type Exp_Record_Sort is ( Type_Result, -- should be this anywhere above primary Is_Unknown, Is_Parameter_Name, -- used in named association checks Is_Package, Is_Object, Is_Function, Is_Type_Mark); type Exp_Record is record Type_Symbol : Dictionary.Symbol; Other_Symbol : Dictionary.Symbol; Stream_Symbol : Dictionary.Symbol; Tagged_Parameter_Symbol : Dictionary.Symbol; Variable_Symbol : Dictionary.Symbol; Param_Count : Natural; Param_List : Lists.List; Sort : Exp_Record_Sort; Arg_List_Found : Boolean; Is_AVariable : Boolean; Is_An_Entire_Variable : Boolean; Errors_In_Expression : Boolean; Has_Operators : Boolean; Is_Static, -- 3 flags meaningless unless Sort=Type_Result Is_Constant, Is_ARange : Boolean; String_Value : LexTokenManager.Lex_String; -- if a String literal or constant Value : Maths.Value; -- value of scalar, or value of L if Is_ARange Range_RHS : Maths.Value; -- if Is_ARange (e.g. L .. R) then this is the value of R end record; Null_Exp_Record : constant Exp_Record := Exp_Record' (Type_Symbol => Dictionary.NullSymbol, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Is_Unknown, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Static => False, Is_Constant => False, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); type Typ_Case_Flags is record Check_Completeness : Boolean; Signal_Out_Of_Range : Boolean; Out_Of_Range_Seen : Boolean; Check_Overlap : Boolean; Warn_No_Others : Boolean; Others_Mandatory : Boolean; end record; Null_Case_Flags : constant Typ_Case_Flags := Typ_Case_Flags' (Check_Completeness => False, Signal_Out_Of_Range => False, Out_Of_Range_Seen => False, Check_Overlap => False, Warn_No_Others => False, Others_Mandatory => False); type Typ_Type_Bound is record Is_Defined : Boolean; Value : Integer; end record; Unknown_Type_Bound : constant Typ_Type_Bound := Typ_Type_Bound'(Is_Defined => False, Value => 0); type Typ_Agg_Association_Type is (Aggregate_Is_Positional, Aggregate_Is_Named, Aggregate_Is_Lone_Others); type Typ_Agg_Flags is record Check_Completeness : Boolean; Warn_No_Others : Boolean; Check_Overlap : Boolean; Signal_Out_Of_Range : Boolean; Out_Of_Range_Seen : Boolean; More_Entries_Than_Natural : Boolean; Has_Others_Part : Boolean; Association_Type : Typ_Agg_Association_Type; end record; Null_Typ_Agg_Flags : constant Typ_Agg_Flags := Typ_Agg_Flags' (Check_Completeness => False, Warn_No_Others => False, Check_Overlap => False, Signal_Out_Of_Range => False, Out_Of_Range_Seen => False, More_Entries_Than_Natural => False, Has_Others_Part => False, Association_Type => Typ_Agg_Association_Type'First); type Visibility_Error_Hint is (No_Hint, In_Global_List, In_Derives_Import_List, In_Derives_Export_List, In_Suspends_List); type Tilde_Context is (Code, Precondition, Postcondition, Function_Return); subtype Anno_Tilde_Context is Tilde_Context range Precondition .. Function_Return; ------------------------------------------------------- --# inherit CompleteCheck, --# Dictionary, --# ExaminerConstants, --# Sem, --# SPARK_IO, --# SystemErrors; package Aggregate_Stack --# own State : Stack_T; is --# type Stack_T is abstract; --# function Stack_Is_Valid (The_State : Stack_T) return Boolean; procedure Init; --# global in Dictionary.Dict; --# out State; --# derives State from & --# null from Dictionary.Dict; --# post Stack_Is_Valid (State); procedure Push (Type_Sym : in Dictionary.Symbol; Lower_Bound : in Sem.Typ_Type_Bound; Upper_Bound : in Sem.Typ_Type_Bound; Agg_Flags : in Sem.Typ_Agg_Flags; Counter : in Natural; Complete_Rec : in CompleteCheck.T); --# global in Dictionary.Dict; --# in out State; --# derives State from *, --# Agg_Flags, --# Complete_Rec, --# Counter, --# Lower_Bound, --# Type_Sym, --# Upper_Bound & --# null from Dictionary.Dict; --# pre Stack_Is_Valid (State) and --# (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and --# ((Lower_Bound.Is_Defined and Upper_Bound.Is_Defined) -> (Lower_Bound.Value <= Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize); --# post Stack_Is_Valid (State); procedure Pop (Type_Sym : out Dictionary.Symbol; Lower_Bound : out Sem.Typ_Type_Bound; Upper_Bound : out Sem.Typ_Type_Bound; Agg_Flags : out Sem.Typ_Agg_Flags; Counter : out Natural; Complete_Rec : out CompleteCheck.T); --# global in Dictionary.Dict; --# in out State; --# derives Agg_Flags, --# Complete_Rec, --# Counter, --# Lower_Bound, --# State, --# Type_Sym, --# Upper_Bound from State & --# null from Dictionary.Dict; --# pre Stack_Is_Valid (State); --# post Stack_Is_Valid (State) and --# (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and --# ((Lower_Bound.Is_Defined and Upper_Bound.Is_Defined) -> (Lower_Bound.Value <= Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize); -- In the analysis of array aggregates, the expression walker -- needs to know the expected type of the array index for the -- current aggregate. To make this work for multi-dimensional -- aggregates, this has to be stored in the Aggregate Stack, and -- is accessed with the following function. function Top_Type_Sym return Dictionary.Symbol; --# global in Dictionary.Dict; --# in State; --# pre Stack_Is_Valid (State); --# return Type_Sym => (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)); end Aggregate_Stack; --------------------------------------------------------------------- --# inherit CommandLineData, --# ContextManager.Ops, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# Heap, --# LexTokenManager, --# RelationAlgebra, --# Sem, --# SeqAlgebra, --# SLI, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# STree, --# SystemErrors; package Dependency_Relation is procedure Create_Full_Subprog_Dependency (Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; The_Heap : in out Heap.HeapRecord); --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Dictionary.Dict, --# Statistics.TableUsage, --# The_Heap from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Node_Pos, --# Subprog_Sym, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys, --# Subprog_Sym, --# The_Heap; --------------------------------------------------------------------- procedure Wf_Dependency_Relation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Glob_Def_Err : in Boolean; The_Heap : in out Heap.HeapRecord); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Dictionary.Dict, --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# Glob_Def_Err, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# Glob_Def_Err, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# Glob_Def_Err, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# Glob_Def_Err, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# STree.Table from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Node, --# Subprog_Sym, --# The_Heap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_relation; --# post STree.Table = STree.Table~; end Dependency_Relation; --------------------------------------------------------------------- --# inherit Aggregate_Stack, --# CommandLineData, --# CompleteCheck, --# ComponentManager, --# ContextManager.Ops, --# Debug, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# Heap, --# LexTokenManager, --# Lists, --# Maths, --# Sem, --# SeqAlgebra, --# SLI, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# STree, --# SystemErrors; package Walk_Expression_P is procedure Walk_Expression (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Type_Context : in Dictionary.Symbol; Context_Requires_Static : in Boolean; Ref_Var : in SeqAlgebra.Seq; Result : out Sem.Exp_Record; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Result, --# STree.Table, --# The_Heap from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Ref_Var, --# Scope, --# STree.Table, --# The_Heap, --# Type_Context & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Context_Requires_Static, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# LexTokenManager.State, --# Ref_Var, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# Type_Context & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Ref_Var, --# Scope, --# STree.Table, --# The_Heap, --# Type_Context; --# pre (STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.name or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.range_constraint or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.arange or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.attribute) and --# (Dictionary.Is_Null_Symbol (Type_Context) or Dictionary.IsTypeMark (Type_Context, Dictionary.Dict)); --# post (Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict)) and --# STree.Table = STree.Table~; -------------------------------------------------------------------------------------- procedure Walk_Annotation_Expression (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Type_Context : in Dictionary.Symbol; Context : in Sem.Anno_Tilde_Context; Result : out Sem.Exp_Record; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Result, --# STree.Table, --# The_Heap from CommandLineData.Content, --# Component_Data, --# Context, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap, --# Type_Context & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# Context, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# Type_Context & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Component_Data, --# Context, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap, --# Type_Context; --# pre STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_expression and --# (Dictionary.Is_Null_Symbol (Type_Context) or Dictionary.IsTypeMark (Type_Context, Dictionary.Dict)); --# post (Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict)) and --# STree.Table = STree.Table~; end Walk_Expression_P; ------------------------------------------------------------------------ --# inherit Aggregate_Stack, --# CommandLineData, --# ComponentManager, --# ContextManager.Ops, --# Dictionary, --# ErrorHandler, --# Heap, --# LexTokenManager, --# Sem, --# SLI, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# STree, --# SystemErrors; package Subprogram_Specification is procedure Wf_Subprogram_Specification_From_Body (Node : in STree.SyntaxNode; Hidden : in Boolean; Current_Scope : in out Dictionary.Scopes; Subprog_Sym : out Dictionary.Symbol; First_Seen : out Boolean); --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Current_Scope, --# Subprog_Sym from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# Hidden, --# LexTokenManager.State, --# Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Hidden, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# First_Seen from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.function_specification or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_specification; --# post STree.Table = STree.Table~; procedure Wf_Subprogram_Specification (Spec_Node : in STree.SyntaxNode; Anno_Node : in STree.SyntaxNode; Constraint_Node : in STree.SyntaxNode; Inherit_Node : in STree.SyntaxNode; Context_Node : in STree.SyntaxNode; Generic_Formal_Part_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Current_Context : in Dictionary.Contexts; Generic_Unit : in Dictionary.Symbol; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Subprog_Sym : out Dictionary.Symbol); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# Anno_Node, --# CommandLineData.Content, --# Component_Data, --# Constraint_Node, --# ContextManager.Ops.Unit_Stack, --# Context_Node, --# Current_Context, --# Current_Scope, --# Dictionary.Dict, --# Generic_Formal_Part_Node, --# Generic_Unit, --# Inherit_Node, --# LexTokenManager.State, --# Spec_Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Anno_Node, --# CommandLineData.Content, --# Component_Data, --# Constraint_Node, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Context_Node, --# Current_Context, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Generic_Formal_Part_Node, --# Generic_Unit, --# Inherit_Node, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# Spec_Node, --# STree.Table, --# The_Heap & --# Subprog_Sym from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Context, --# Current_Scope, --# Dictionary.Dict, --# Generic_Unit, --# LexTokenManager.State, --# Spec_Node, --# STree.Table; --# pre (STree.Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# STree.Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification or --# STree.Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.entry_specification) and --# (STree.Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation or --# STree.Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.function_annotation or --# Anno_Node = STree.NullNode) and --# (STree.Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint or --# STree.Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint) and --# (STree.Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (STree.Syntax_Node_Type (Inherit_Node, STree.Table) = SP_Symbols.inherit_clause or Inherit_Node = STree.NullNode) and --# (STree.Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part or --# Generic_Formal_Part_Node = STree.NullNode); --# post STree.Table = STree.Table~; end Subprogram_Specification; -------------- Package bodies ------------------------------ package body Aggregate_Stack is separate; ----------------------- Subprograms --------------------------- function In_Package_Body (Current_Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.Get_Visibility (Scope => Current_Scope) = Dictionary.Local and then Dictionary.IsPackage (Dictionary.GetRegion (Current_Scope)); end In_Package_Body; ----------------------------------------------- function In_Protected_Body (Current_Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.Get_Visibility (Scope => Current_Scope) = Dictionary.Local and then Dictionary.IsType (Dictionary.GetRegion (Current_Scope)) and then Dictionary.IsProtectedTypeMark (Dictionary.GetRegion (Current_Scope)); end In_Protected_Body; ------------------------------------------------------- -- Function to determine whether a procedure, task or entry needs a synthesised -- 'each export from all imports' dependency. -- This is done when data flow analysis is to be used, ie in data flow mode, or in -- automatic flow analysis mode when there is no explicit derives annotation. -- This is not compatible with SPARK83. If data-flow mode is used in SPARK83 then -- the derives annotations are used to determine the modes of the global imports and -- exports, but that is done elsewhere. function Needs_Synthetic_Dependency (Proc_Task_Or_Entry : Dictionary.Symbol) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is separate; ------------------------------------------------------- function Unknown_Type_Record return Exp_Record -- returns a "safe" exp result designed to minimise consequential errs --# global in Dictionary.Dict; --# return R => ((Dictionary.Is_Null_Symbol (R.Type_Symbol) or Dictionary.IsTypeMark (R.Type_Symbol, Dictionary.Dict)) and --# Dictionary.Is_Null_Symbol (R.Stream_Symbol)); is separate; ------------------------------------------------------------------------- function Unexpected_Initialization (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is separate; ------------------------------------------------------------------------- procedure Get_Type_Bounds (Type_Symbol : in Dictionary.Symbol; Lower_Bound, Upper_Bound : out Typ_Type_Bound) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# derives Lower_Bound, --# Upper_Bound from Dictionary.Dict, --# LexTokenManager.State, --# Type_Symbol; --# post (Lower_Bound.Is_Defined and Upper_Bound.Is_Defined) -> (Lower_Bound.Value <= Upper_Bound.Value); is separate; --------------------------------------------------------------------- procedure Check_Package_Prefix (Node_Pos : in LexTokenManager.Token_Position; Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys & --# OK from Dictionary.Dict, --# Pack_Sym, --# Scope; is separate; ------------------------------------------------------------------ procedure Check_Valid_Ident (Ident_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# Ok from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# STree.Table; --# pre Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; is separate; ------------------------------------------------------------------ function In_Package_Initialization (Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is separate; ---------------------------------------------------------------- function Indexes_Match (Target, Source : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is separate; ------------------------------------------------------------------- function Is_External_Interface (Pragma_Node : STree.SyntaxNode) return Boolean --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; is separate; ------------------------------------------------------------------- function Is_Enclosing_Package (Outer_Pack : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is separate; ------------------------------------------------------------------------ procedure Get_Literal_Value (Node : in STree.SyntaxNode; Val : out Maths.Value) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Val from LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.integer_number or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.real_number or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.based_integer or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.based_real; is separate; -------------------------------------------- function Substitute_Protected_Type_Self_Reference (Sym, Prefix_Symbol : Dictionary.Symbol) return Dictionary.Symbol --# global in Dictionary.Dict; is separate; ----------------------------------------------------------------- -- Checks Val against Typ'First .. Typ'Last. -- NewVal = Val if all OK or if arithmetic overflow raised a warning -- NewVal = Maths.NoValue if check performed and failed procedure Constraint_Check (Val : in Maths.Value; New_Val : out Maths.Value; Is_Annotation : in Boolean; Typ : in Dictionary.Symbol; Position : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Annotation, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Typ, --# Val & --# New_Val from Dictionary.Dict, --# LexTokenManager.State, --# Typ, --# Val; is separate; -------------------------------------------------------------------------- function Illegal_Unconstrained (Left_Type, Right_Type : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is separate; -------------------------------------------------------------------------- procedure Assignment_Check (Position : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; Target_Type : in Dictionary.Symbol; Exp_Result : in out Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Result, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Target_Type & --# Exp_Result from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Target_Type & --# LexTokenManager.State from *, --# Dictionary.Dict, --# Exp_Result, --# Target_Type; --# pre Dictionary.Is_Null_Symbol (Exp_Result.Type_Symbol) or Dictionary.IsTypeMark (Exp_Result.Type_Symbol, Dictionary.Dict); --# post Dictionary.Is_Null_Symbol (Exp_Result.Type_Symbol) or Dictionary.IsTypeMark (Exp_Result.Type_Symbol, Dictionary.Dict); is separate; -------------------------------------------------------------------------- function Convert_Tagged_Actual (Actual, Tagged_Parameter_Sym : Dictionary.Symbol) return Dictionary.Symbol --# global in Dictionary.Dict; -- Function is only called if we know substitution is required is separate; ----------------------------------------------------------------------------------- -- Create_Implicit_Positive_Subtype extracted from Plant_Constraining_Type so that it can be -- called directly from wf_constant_declaration where it is used to created an implcit -- subtype for string constants. procedure Create_Implicit_Positive_Subtype (String_Length : in Maths.Value; Location : in Dictionary.Location; Index_Constraint : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Location, --# String_Length & --# Index_Constraint from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Location, --# String_Length & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# String_Length; --# post Dictionary.Is_Null_Symbol (Index_Constraint) or Dictionary.IsTypeMark (Index_Constraint, Dictionary.Dict); is separate; --------------------------------------------------------------- procedure Create_Interrupt_Stream_Variable (For_PO : in Dictionary.Symbol; The_Handler : in LexTokenManager.Lex_String; The_Stream_Variable : in LexTokenManager.Lex_String; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# For_PO, --# LexTokenManager.State, --# The_Handler, --# The_Stream_Variable & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# For_PO, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Handler, --# The_Stream_Variable; is separate; --------------------------------------------------------------------- procedure Check_Interrupt_Property_Consistency (Has_Interrupt_Property : in Boolean; Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Error_Node_Pos : in LexTokenManager.Token_Position; Consistent : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Consistent from CommandLineData.Content, --# Dictionary.Dict, --# Has_Interrupt_Property, --# Type_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# Has_Interrupt_Property, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Sym, --# Type_Sym; is separate; --------------------------------------------------------------------- procedure Check_Suspendable_Property_Consistency (Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Is_In_Suspends_List : in Boolean; Error_Node_Pos : in LexTokenManager.Token_Position; Consistent : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Consistent from CommandLineData.Content, --# Dictionary.Dict, --# Is_In_Suspends_List, --# Sym, --# Type_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# Is_In_Suspends_List, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Sym, --# Type_Sym; is separate; --------------------------------------------------------------------- procedure Check_Priority_Property_Consistency (Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Priority_Property_Value : in LexTokenManager.Lex_String; Error_Node_Pos : in LexTokenManager.Token_Position; Consistent : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Consistent from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Priority_Property_Value, --# Sym, --# Type_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Priority_Property_Value, --# SPARK_IO.File_Sys, --# Sym, --# Type_Sym; is separate; --------------------------------------------------------------------- procedure Check_Task_Modifier_Consistency (The_Own_Var_Type : in Dictionary.Symbol; The_Var_Type : in Dictionary.Symbol; Modifier_Is_Task : in Boolean; Error_Node : in STree.SyntaxNode; Consistent : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Consistent from CommandLineData.Content, --# Dictionary.Dict, --# Modifier_Is_Task, --# The_Own_Var_Type, --# The_Var_Type & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node, --# LexTokenManager.State, --# Modifier_Is_Task, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Own_Var_Type, --# The_Var_Type; --# pre Syntax_Node_Type (Error_Node, STree.Table) = SP_Symbols.identifier; is separate; -------------------------------------------------------------------- procedure Check_Protected_Modifier_Consistency (The_Type : in Dictionary.Symbol; Modifier_Is_Protected : in Boolean; Error_Node : in STree.SyntaxNode; Consistent : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Consistent from CommandLineData.Content, --# Dictionary.Dict, --# Modifier_Is_Protected, --# The_Type & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node, --# LexTokenManager.State, --# Modifier_Is_Protected, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Type; --# pre Syntax_Node_Type (Error_Node, STree.Table) = SP_Symbols.identifier; is separate; ----------------------------------------------------------------------------- procedure Check_Ceiling_Priority (Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Check_List : in Dictionary.Iterator; Priority_Lex_Value : in LexTokenManager.Lex_String; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Check_List, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Priority_Lex_Value, --# Scope, --# SPARK_IO.File_Sys, --# Sym; is separate; --------------------------------------------------------------------- -- procedure to check whether a properly-defined subprogram or stub creates -- overloading by re-using the name of a potentially inheritable root -- operation procedure Check_No_Overloading_From_Tagged_Ops (Ident_Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Is_Overriding : in Boolean) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Abstraction, --# CommandLineData.Content, --# Ident_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# Is_Overriding, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym; --# pre Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; is separate; ---------------------------------------------------------------------------- procedure Check_Priority_Range (Error_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Pragma_Kind : in Dictionary.RavenscarPragmas; Err_Pos : in LexTokenManager.Token_Position; Value : in Maths.Value; Value_Rep : out LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Sym, --# Err_Pos, --# LexTokenManager.State, --# Pragma_Kind, --# Scope, --# SPARK_IO.File_Sys, --# Value & --# LexTokenManager.State, --# Value_Rep from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Pragma_Kind, --# Value; is separate; --------------------------------------------------------------------- procedure Check_Named_Association (The_Formals : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Named_Argument_Assoc_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Named_Argument_Assoc_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Formals; --# pre Syntax_Node_Type (Named_Argument_Assoc_Node, STree.Table) = SP_Symbols.named_argument_association; is separate; ------------------------------------------------------------------------ procedure Check_Announced_Types_Declared (Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys; is separate; -------------------------------------------------------------------- procedure Check_Closing_Identifier (End_Name_Node, Ident_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# End_Name_Node, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (End_Name_Node, STree.Table) = SP_Symbols.dotted_simple_name and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; is separate; -------------------------------------------------------------------- procedure Add_Derives_Stream_Effects (Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions) --# global in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Node_Pos, --# Subprog_Sym & --# SPARK_IO.File_Sys from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node_Pos, --# Subprog_Sym; is separate; --------------------------------------------------------------------- procedure Plant_Constraining_Type (Expression_Type : in Dictionary.Symbol; String_Length : in Maths.Value; Actual_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# SPARK_IO.File_Sys, --# STree.Table from *, --# Actual_Node, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Expression_Type, --# LexTokenManager.State, --# STree.Table, --# String_Length & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Expression_Type, --# String_Length; --# pre (Dictionary.Is_Null_Symbol (Expression_Type) or Dictionary.IsTypeMark (Expression_Type, Dictionary.Dict)) and --# Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.expression; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------------- procedure Add_Record_Sub_Components (Record_Var_Sym : in Dictionary.Symbol; Record_Type_Sym : in Dictionary.Symbol; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# The_Heap from *, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Record_Type_Sym, --# Record_Var_Sym, --# The_Heap & --# null from CommandLineData.Content; is separate; --------------------------------------------------------------- procedure Get_Subprogram_Anno_Key_Nodes (Node : in STree.SyntaxNode; Global_Node : out STree.SyntaxNode; Dependency_Node : out STree.SyntaxNode; Declare_Node : out STree.SyntaxNode; Constraint_Node : out STree.SyntaxNode) --# global in STree.Table; --# derives Constraint_Node, --# Declare_Node, --# Dependency_Node, --# Global_Node from Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_annotation or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.function_annotation; --# post (Syntax_Node_Type (Global_Node, STree.Table) = SP_Symbols.moded_global_definition or --# Global_Node = STree.NullNode) and --# (Syntax_Node_Type (Dependency_Node, STree.Table) = SP_Symbols.dependency_relation or --# Dependency_Node = STree.NullNode) and --# (Syntax_Node_Type (Declare_Node, STree.Table) = SP_Symbols.declare_annotation or --# Declare_Node = STree.NullNode) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint); is separate; --------------------------------------------------------------- procedure Find_Actual_Node (For_Formal : in Dictionary.Symbol; Named_Argument_Assoc_Node : in STree.SyntaxNode; Expression_Node : out STree.SyntaxNode) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out STree.Table; --# derives Expression_Node, --# STree.Table from Dictionary.Dict, --# For_Formal, --# LexTokenManager.State, --# Named_Argument_Assoc_Node, --# STree.Table; --# pre Syntax_Node_Type (Named_Argument_Assoc_Node, STree.Table) = SP_Symbols.named_argument_association; --# post (Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression or Expression_Node = STree.NullNode) and --# STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------ function Find_Previous_Package (Node : STree.SyntaxNode) return LexTokenManager.Lex_String --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.initial_declarative_item_rep; is separate; ------------------------------------------------------------------------ procedure Walk_Name (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Result : out Exp_Record; Is_A_Name : out Boolean; Ref_Var_Param : in SeqAlgebra.Seq) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Ref_Var_Param, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# LexTokenManager.State, --# Ref_Var_Param, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Is_A_Name from Exp_Node, --# STree.Table & --# Result from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Ref_Var_Param, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression; --# post (Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict)) and --# STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------ procedure Wf_External_Interface (Pragma_Node : in STree.SyntaxNode; Entity_Sym : in Dictionary.Symbol; Error_Found : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Entity_Sym, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pragma_Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# Entity_Sym, --# LexTokenManager.State, --# Pragma_Node, --# STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Pragma (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is separate; --------------------------------------------------------------------- procedure Wf_Type_Mark (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Context : in Dictionary.Contexts; Type_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table, --# Type_Sym from CommandLineData.Content, --# Context, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.type_mark; --# post (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and STree.Table = STree.Table~; is separate; ----------------------------------------------------------------- procedure Wf_Array_Type_Definition (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Array : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table, --# The_Array from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.array_type_definition and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------- procedure Wf_Formal_Part (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Occurrence : in Boolean; Context : in Dictionary.Contexts) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Context, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# First_Occurrence, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Occurrence, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.formal_part; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------------------- procedure Wf_Generic_Formal_Part (Node : in STree.SyntaxNode; Generic_Ident_Node_Pos : in LexTokenManager.Token_Position; Generic_Unit : in Dictionary.Symbol; Package_Or_Subprogram_Symbol : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# STree.Table & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Generic_Ident_Node_Pos, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.generic_formal_part; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------------------- procedure Wf_Renaming_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.renaming_declaration; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------- procedure Wf_Justification_Statement (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.justification_statement; --# post STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------ procedure Wf_Subprogram_Declaration (Node : in STree.SyntaxNode; Inherit_Node : in STree.SyntaxNode; Context_Node : in STree.SyntaxNode; Generic_Formal_Part_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Generic_Unit : in Dictionary.Symbol; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Subprog_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Context_Node, --# Current_Scope, --# Dictionary.Dict, --# Generic_Formal_Part_Node, --# Generic_Unit, --# Inherit_Node, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Context_Node, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Generic_Formal_Part_Node, --# Generic_Unit, --# Inherit_Node, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Subprog_Sym from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.entry_declaration) and --# (Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (Syntax_Node_Type (Inherit_Node, STree.Table) = SP_Symbols.inherit_clause or Inherit_Node = STree.NullNode) and --# (Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part or --# Generic_Formal_Part_Node = STree.NullNode); --# post STree.Table = STree.Table~; is separate; --------------------------------------------------------------------- procedure Wf_Entire_Variable (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Error_Hint : in Visibility_Error_Hint; Var_Sym : out Dictionary.Symbol; Dotted : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dotted, --# STree.Table, --# Var_Sym from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Hint, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.entire_variable or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_primary; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------------- procedure Wf_Global_Definition (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Sem_Err_Found : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# Sem_Err_Found, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# STree.Table, --# Subprog_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.moded_global_definition; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- procedure Wf_Priority_Value (Node : in STree.SyntaxNode; Pragma_Kind : in Dictionary.RavenscarPragmas; Error_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord; Value_Rep : out LexTokenManager.Lex_String; Compatible : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Compatible, --# Dictionary.Dict, --# STree.Table, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Sym, --# LexTokenManager.State, --# Node, --# Pragma_Kind, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# LexTokenManager.State, --# Value_Rep from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pragma_Kind, --# Scope, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression; --# post STree.Table = STree.Table~; is separate; --------------------------------------------------------------------- procedure Wf_Property_List (Node : in STree.SyntaxNode; Type_Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; The_Owner : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap, --# The_Owner & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# The_Owner, --# Type_Node_Pos; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.property_list; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- procedure Wf_Declare_Annotation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Task_Or_Proc : in Dictionary.Symbol; First_Seen : in Boolean; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Task_Or_Proc, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Task_Or_Proc, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.declare_annotation; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------------------- package body Dependency_Relation is separate; ---------------------------------------------------------------------- procedure Wf_Context_Clause_Package_Body (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; With_Public_Child : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table, --# With_Public_Child from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.context_clause; --# post STree.Table = STree.Table~; is separate; --------------------------------------------------------------------- procedure Wf_Context_Clause (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.context_clause; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- procedure Wf_Inherit_Clause (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.inherit_clause; --# post STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------ procedure Wf_Subprogram_Annotation (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_annotation or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.function_annotation; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- procedure Range_Check (A_Range : in Boolean; Position : in LexTokenManager.Token_Position; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from A_Range, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys & --# Error_Found from *, --# A_Range; is separate; ----------------------------------------------------------------------------- procedure Wf_Argument_Association (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Param_Type : in Dictionary.Symbol; Position : in LexTokenManager.Token_Position; Exp_Result : in Exp_Record; Fun_Info : in out Exp_Record; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Result, --# Fun_Info, --# LexTokenManager.State, --# Node, --# Param_Type, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Exp_Result, --# Fun_Info, --# LexTokenManager.State, --# Node, --# Param_Type, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found from *, --# Dictionary.Dict, --# Exp_Result, --# Fun_Info, --# Param_Type, --# Scope & --# Fun_Info from *, --# Exp_Result & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Exp_Result, --# Fun_Info, --# Node, --# Param_Type, --# Scope, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Result, --# Fun_Info, --# LexTokenManager.State, --# Node, --# Param_Type, --# Position, --# Scope, --# STree.Table; --# pre (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_argument_association or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association or --# Node = STree.NullNode) and --# (Dictionary.Is_Null_Symbol (Exp_Result.Type_Symbol) or Dictionary.IsTypeMark (Exp_Result.Type_Symbol, Dictionary.Dict)); --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- package body Walk_Expression_P is separate; --------------------------------------------------------------------- procedure Wf_Basic_Declarative_Item (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.basic_declarative_item; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Wf_Predicate (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Context : in Anno_Tilde_Context; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Errors_Found : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# Errors_Found, --# LexTokenManager.State, --# STree.Table, --# The_Heap from CommandLineData.Content, --# Component_Data, --# Context, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# Context, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Component_Data, --# Context, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.predicate; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Wf_Subprogram_Constraint (Node : in STree.SyntaxNode; Subprogram_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprogram_Sym, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprogram_Sym, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_constraint or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.function_constraint; --# post STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------- procedure Wf_Generic_Subprogram_Instantiation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.generic_subprogram_instantiation; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- procedure Wf_Package_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.private_package_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.generic_package_declaration; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- package body Subprogram_Specification is separate; ------------------------------------------------------------------------- -- Exported subprogram procedure CompUnit (Top_Node : in STree.SyntaxNode; Do_VCG : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Declarations.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out STree.Table; --# in out VCG.Invoked; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_VCG, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Top_Node & --# Declarations.State, --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCG.Invoked from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_VCG, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Top_Node; --# post STree.Table = STree.Table~; is separate; end Sem; ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrootspark-2012.0.deb/examiner/errorhandler-conversions-tostring-unconddependency-unconddependencyexpl.adbspark-2012.0.deb/examiner/errorhandler-conversions-tostring-unconddependency-unconddependencyexpl.ad0000644000175000017500000000446511753202337033234 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.UncondDependency) procedure UncondDependencyExpl (E_Str : in out E_Strings.T) is begin case Err_Type is when ErrorHandler.Not_Used_New => E_Strings.Append_String (E_Str => E_Str, Str => "The item before ""is not derived ..."" is an export or function return value and the item(s)" & " after are imports of the subprogram. The message indicates that a dependency, stated in the dependency" & " relation (derives annotation) or implied by the function signature is not present in the code." & " The absence of a stated dependency" & " is always an error in either code or annotation."); when ErrorHandler.Not_Used => E_Strings.Append_String (E_Str => E_Str, Str => "The variable XXX, which appears in the dependency relation of a" & " procedure subprogram, as an import from which the export YYY is derived," & " is not used in the code for that purpose. YYY may be a function return value." & " This version of the message has been retained for backward compatibility."); when ErrorHandler.Ineff_Init => E_Strings.Append_String (E_Str => E_Str, Str => "Here XXX is an own variable of a package, initialized in the package" & " initialization." & " The message states that XXX is updated elsewhere, before being read."); when ErrorHandler.Ineff_Local_Init => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if the value assigned to a variable at declaration cannot" & " affect the final value of any exported variable of the subprogram in" & " which it occurs because, for example, it is overwritten before it is used."); when ErrorHandler.Policy_Violation => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if safety or security policy checking is enabled and the specified" & " dependency relation contains a relationship in which the flow of information" & " from state or input to state or output violates the selected policy."); when others => null; end case; end UncondDependencyExpl; ././@LongLink0000000000000000000000000000015600000000000011567 Lustar rootrootspark-2012.0.deb/examiner/errorhandler-conversions-tostring-ineffectivestatement-ineffectivestatementexpl.adbspark-2012.0.deb/examiner/errorhandler-conversions-tostring-ineffectivestatement-ineffectivestatemen0000644000175000017500000000333611753202337033346 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.IneffectiveStatement) procedure IneffectiveStatementExpl (E_Str : in out E_Strings.T) is begin if Err_Num.Name1 = Error_Types.NoName then E_Strings.Append_String (E_Str => E_Str, Str => "Execution of this statement cannot affect the final value of any exported" & " variable of the subprogram in which it occurs. The cause may be a" & " data-flow anomaly (i.e. the statement could be an assignment to a" & " variable, which is always updated again before it is read. However," & " statements may be ineffective for other reasons - see Section 4.1 of" & " Appendix A."); else E_Strings.Append_String (E_Str => E_Str, Str => "This message always relates to a procedure call or an assignment to a" & " record. The variable XXX may be an actual parameter corresponding to a" & " formal one that is exported;" & " otherwise XXX is an exported global variable of the procedure." & " The message indicates that" & " the updating of XXX, as a result of the procedure call, has no effect on" & " any final values of exported variables of the calling subprogram." & " Where the ineffective assignment is expected (e.g. calling a supplied" & " procedure that returns more parameters than are needed for the immediate purpose)," & " it can be a useful convention to choose a distinctive name, such as ""Unused"" for" & " the actual parameter concerned. The message ""Assignment to Unused is ineffective""" & " is then self-documenting."); end if; end IneffectiveStatementExpl; spark-2012.0.deb/examiner/sem-wf_justification_statement.adb0000644000175000017500000015502411753202336023145 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem) procedure Wf_Justification_Statement (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Next_Node : STree.SyntaxNode; -------------------------------------------------------------------------------- procedure Wf_Start_Justification (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.justification_statement; --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node : STree.SyntaxNode; -------------------------------------------------------------------------------- procedure Wf_Justification_Clause (Start_Line : in LexTokenManager.Line_Numbers; Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# Start_Line, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.justification_clause; --# post STree.Table = STree.Table~; is Error_Found : Boolean; Current_Node : STree.SyntaxNode; Err_Num_Node : STree.SyntaxNode; Valid : Boolean; Kind : ErrorHandler.Justification_Kinds; Val : Maths.Value; Maths_Valid : Maths.ErrorCode; Err_Num : Natural; Err_Num_Int : Integer; Explanation : E_Strings.T; Identifiers : ErrorHandler.Justification_Identifiers; Maximum_Justifications_Reached : Boolean; Applies_To_All : Boolean; -------------------------------------------------------------------------------- procedure Check_Kind (Lex_String : in LexTokenManager.Lex_String; Kind : out ErrorHandler.Justification_Kinds; Valid : out Boolean) --# global in LexTokenManager.State; --# derives Kind, --# Valid from LexTokenManager.State, --# Lex_String; is Flow : constant String := "FLOW_MESSAGE"; Warn : constant String := "WARNING_MESSAGE"; Ex_String : E_Strings.T; Found : Boolean; Start_Pos : E_Strings.Positions; begin -- The kind of message (Flow or Warning) is in the form of an identifier and therefore is extracted -- from the syntax tree as a Lex_String. We first convert it to an Examiner_String -- Then we see if it a unique subset of either "Flow_Message" or "Warning_Message" -- Ignore case Ex_String := E_Strings.Upper_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_String)); -- Try "flow" first E_Strings.Find_Examiner_Sub_String (E_Str => E_Strings.Copy_String (Str => Flow), Search_String => Ex_String, String_Found => Found, String_Start => Start_Pos); -- To get a match we need Found and Start_Pos = 1 if Found and then Start_Pos = 1 then Kind := ErrorHandler.Flow_Message; Valid := True; else -- Try "warn" E_Strings.Find_Examiner_Sub_String (E_Str => E_Strings.Copy_String (Str => Warn), Search_String => Ex_String, String_Found => Found, String_Start => Start_Pos); -- To get a match we need Found and Start_Pos = 1 if Found and then Start_Pos = 1 then Kind := ErrorHandler.Warning_Message; Valid := True; else Kind := ErrorHandler.Flow_Message; -- not used, for DF purposes only Valid := False; end if; end if; end Check_Kind; -------------------------------------------------------------------------------- function Is_Disallowed_Warning (Kind : ErrorHandler.Justification_Kinds; Err_Num : Natural) return Boolean is begin -- Initially only prohibit warnings generated by the justification system itself. -- Extend here as necessary. return Kind = ErrorHandler.Warning_Message and then (Err_Num = 120 or else Err_Num = 121 or else Err_Num = 122); end Is_Disallowed_Warning; -------------------------------------------------------------------------------- procedure Check_Identifiers (Opt_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Kind : in ErrorHandler.Justification_Kinds; Err_Num : in Natural; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Identifiers : out ErrorHandler.Justification_Identifiers; Valid : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Opt_Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Num, --# Kind, --# LexTokenManager.State, --# Opt_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Identifiers from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Opt_Node, --# Scope, --# STree.Table, --# The_Heap & --# Valid from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Err_Num, --# Kind, --# LexTokenManager.State, --# Opt_Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Opt_Node, STree.Table) = SP_Symbols.justification_opt; --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node : STree.SyntaxNode; Identifier_Count : Natural := 0; Name_Error : Natural; Current_Name_Valid : Boolean; -------------------------------------------------------------------------------- procedure Process_Dotted_Simple_Name_Or_Null (DSNON_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Valid : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Identifiers; --# in out Identifier_Count; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# STree.Table, --# The_Heap, --# Valid from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# DSNON_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# DSNON_Node, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Identifiers from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# DSNON_Node, --# Identifier_Count, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# Identifier_Count from * & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# DSNON_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (DSNON_Node, STree.Table) = SP_Symbols.dotted_simple_name_or_null; --# post STree.Table = STree.Table~; is Name_Node : STree.SyntaxNode; Valid_Simple_Name : Boolean; Id_Str : LexTokenManager.Lex_String; Sym : Dictionary.Symbol; -------------------------------------------------------------------------------- procedure Process_Dotted_Simple_Name (DSN_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Str : out LexTokenManager.Lex_String; Sym : out Dictionary.Symbol; Valid : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# Str, --# STree.Table, --# Sym, --# The_Heap, --# Valid from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# DSN_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# DSN_Node, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# DSN_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (DSN_Node, STree.Table) = SP_Symbols.dotted_simple_name; --# post STree.Table = STree.Table~; is It : STree.Iterator; Dotted : Boolean; Id_Node : STree.SyntaxNode; P_Id_Str, Id_Str : LexTokenManager.Lex_String; Local_Sym, Sym_So_Far : Dictionary.Symbol; -------------------------------------------------------------------------------- function Selector_Allowed_For (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsPackage (Sym) or else (Dictionary.IsTypeMark (Sym) and then (Dictionary.TypeIsRecord (Sym) or else Dictionary.IsProtectedTypeMark (Sym))) or else Dictionary.IsRecordComponent (Sym) or else (Dictionary.IsObject (Sym) and then Dictionary.TypeIsRecord (Dictionary.GetType (Sym))) or else (Dictionary.IsFunction (Sym) and then Dictionary.TypeIsRecord (Dictionary.GetType (Sym))) or else (Dictionary.IsObject (Sym) and then Dictionary.IsProtectedType (Dictionary.GetType (Sym))); end Selector_Allowed_For; begin -- Process_Dotted_Simple_Name Valid := True; -- default -- See whether it is a simple identifier or not. If it is we return a Lex_String and a Symbol -- otherwise just a Symbol. Dotted gets set True if we loop through >1 identifiers Dotted := False; -- Loop through identifiers. Loop exits prematurely for simple identifier case It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => DSN_Node, In_Direction => STree.Down); Id_Node := Get_Node (It); -- ASSUME Id_Node = identifier if Syntax_Node_Type (Node => Id_Node) = SP_Symbols.identifier then -- ASSUME Id_Node = identifier Id_Str := Node_Lex_String (Node => Id_Node); -- Note that the lookup uses Proof_Context because we may be trying to justify a flow error -- or warning involving an identifier that is not visible in Program_Context (eg an abstract -- own variable). Local_Sym := Dictionary.LookupItem (Name => Id_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); else Id_Str := LexTokenManager.Null_String; Local_Sym := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Id_Node = identifier in Process_Dotted_Simple_Name"); end if; P_Id_Str := LexTokenManager.Null_String; loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier and --# Id_Node = Get_Node (It); -- any time we fail to find something it is an error failure if Dictionary.Is_Null_Symbol (Local_Sym) then ErrorHandler.Semantic_Error2 (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str1 => Id_Str, Id_Str2 => P_Id_Str); Valid := False; exit; end if; STree.Set_Node_Lex_String (Sym => Local_Sym, Node => Id_Node); -- set up next iteration It := STree.NextNode (It); exit when STree.IsNull (It); -- If we get to here then there is more than one identifier -- If there is more than one identifier and we are processing a record object or -- record subcomponent, then there is -- an extra step required: we do not add symbols for all the components of records to the -- dictionary all the time, but only where they are needed. Therefore if we try and look -- up R.F here (where R is record object) then the look up will fail because there is no -- subcomponent symbol for R.F. Therefore we must add the symbols now so that the -- LookUpSelectedItem below will succeed. if Dictionary.IsVariableOrSubcomponent (Local_Sym) and then Dictionary.TypeIsRecord (Dictionary.GetType (Local_Sym)) then Add_Record_Sub_Components (Record_Var_Sym => Local_Sym, Record_Type_Sym => Dictionary.GetType (Local_Sym), Component_Data => Component_Data, The_Heap => The_Heap); end if; -- end of sub component addition -- Because there is more than identifier we save some context for next time round the loop Dotted := True; P_Id_Str := Id_Str; Id_Node := Get_Node (It); Id_Str := Node_Lex_String (Node => Id_Node); Sym_So_Far := Local_Sym; -- needed for trapping P.P.P.P.X case later on -- At this point we have a prefix in Local_Sym and we are about to process -- a selector. Local_Sym had better be the kind of thing that can have a -- selector. if not Selector_Allowed_For (Sym => Local_Sym) then ErrorHandler.Semantic_Error_Sym (9, ErrorHandler.No_Reference, Node_Position (Id_Node), Local_Sym, Scope); Local_Sym := Dictionary.NullSymbol; exit; end if; -- Note that the lookup uses Proof_Context because we may be trying to justify a flow error -- or warning involving an identifier that is not visible in Program_Context (eg an abstract -- own variable). Local_Sym := Dictionary.LookupSelectedItem (Prefix => Local_Sym, Selector => Id_Str, Scope => Scope, Context => Dictionary.ProofContext); -- check to see if we are getting the same symbol over and again if Local_Sym = Sym_So_Far then -- P.P.P.P.X case Local_Sym := Dictionary.NullSymbol; -- to cause "Not visible" error at top of loop end if; end loop; -- return results if Dotted then Str := LexTokenManager.Null_String; else Str := Id_Str; end if; Sym := Local_Sym; end Process_Dotted_Simple_Name; begin -- Process_Dotted_Simple_Name_Or_Null if Identifier_Count <= ErrorHandler.Max_Justification_Identifier_Length then Identifier_Count := Identifier_Count + 1; end if; Name_Node := Child_Node (Current_Node => DSNON_Node); -- ASSUME Name_Node = dotted_simple_name OR null_name case Syntax_Node_Type (Node => Name_Node) is when SP_Symbols.dotted_simple_name => -- ASSUME Name_Node = dotted_simple_name -- We have a single dotted simple name to process. -- If it is OK then it will go in the -- identifier list at index position Identifier_Count Process_Dotted_Simple_Name (DSN_Node => Name_Node, Scope => Scope, Component_Data => Component_Data, The_Heap => The_Heap, Str => Id_Str, Sym => Sym, Valid => Valid_Simple_Name); when SP_Symbols.null_name => -- ASSUME Name_Node = null_name Id_Str := LexTokenManager.Null_String; Sym := Dictionary.GetNullVariable; Valid_Simple_Name := True; when others => -- Assign well-defined valued here to keep IFA happy. Id_Str := LexTokenManager.Null_String; Sym := Dictionary.NullSymbol; Valid_Simple_Name := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = dotted_simple_name OR null_name in Wf_Justification_Clause"); end case; if Valid_Simple_Name then Valid := True; if Identifier_Count in ErrorHandler.Justification_Identifier_Index then Identifiers (Identifier_Count) := ErrorHandler.Justification_Identifier'(String_Form => Id_Str, Symbol_Form => Sym); end if; else Valid := False; -- don't add clause at all if any part is malformed end if; end Process_Dotted_Simple_Name_Or_Null; -------------------------------------------------------------------------------- -- This function checks the number of names in the accept -- annotation against the error number. It returns the error message -- to report, or 0 if the clause is semantically correct. -- -- Unfortunately Flow Error 10 can have either 0 or 1 identifiers. -- -- Flow Errors 50 and 602 can have 1 or 2 identifiers, depending -- on whether the enclosing program unit is a function (1 identifier needed) -- or not (2 identifiers needed for procedures or task bodies). function Justification_Name_Length_Error (Enclosing_Region_Is_A_Function : Boolean; Err_Num : Natural) return Natural --# global in Identifier_Count; --# in Kind; is Ret_Val : Natural := 0; begin case Kind is when ErrorHandler.Flow_Message => case Err_Num is -- These flow errors require exactly zero names to be justified when 22 | 40 | 41 => if Identifier_Count /= 0 then Ret_Val := 124; end if; -- These flow errors require exactly two names to be justified when 3 | 4 | 57 | 601 | 605 | 606 => if Identifier_Count /= 2 then Ret_Val := 126; end if; -- Flow Error 10 (ineffective expression or statement) can require -- either zero or one name when 10 => if Identifier_Count = 0 or else Identifier_Count = 1 then Ret_Val := 0; else Ret_Val := 127; end if; -- Flow errors 50 and 602 can require one or two names, -- depending on Enclosing_Region_Is_A_Function when 50 | 602 => if Enclosing_Region_Is_A_Function then -- function - 1 identifier needed if Identifier_Count = 1 then Ret_Val := 0; else Ret_Val := 125; end if; else -- procedure or task body - 2 identifiers needed if Identifier_Count = 2 then Ret_Val := 0; else Ret_Val := 126; end if; end if; -- All other flow errors require exactly one name when others => if Identifier_Count /= 1 then Ret_Val := 125; end if; end case; when ErrorHandler.Warning_Message => case Err_Num is -- The following warnings require exactly 1 name to be justified when 1 | 5 | 9 | 10 | 12 | 13 | 169 | 311 | 312 | 313 | 314 | 350 | 351 | 391 | 392 | 393 | 394 | 395 | 396 | 397 | 398 | 400 | 403 | 410 | 411 | 412 | 413 => if Identifier_Count /= 1 then Ret_Val := 125; end if; -- All other warnings require exactly zero names when others => if Identifier_Count /= 0 then Ret_Val := 124; end if; end case; end case; return Ret_Val; end Justification_Name_Length_Error; begin -- Check_Identifiers -- Rules: -- (1) Between 0 and ErrorHandler.Max_Justification_Identifier_Length identifiers found -- (2) Each identifier must be declared and visible in Scope -- (3) Identifiers (I) is populated with each legal identifier thus: -- (a) If the identifier has no dots in it, then we store the Lex_String AND the looked-up symbol -- (b) If it has dots then we store a null lex string and the looked-up symbol -- (this complexity is because we don't know whether warnings will be passed to the errohandler -- using, for example, Semantic_Warning or Semantic_Warning_Sym so we need to match either) -- Establish default result Identifiers := ErrorHandler.Null_Justification_Identifiers; Valid := True; -- Iterate through dotted_simple_name_or_null nodes It := Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name_or_null, From_Root => Opt_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.dotted_simple_name_or_null and --# Next_Node = Get_Node (It); Process_Dotted_Simple_Name_Or_Null (DSNON_Node => Next_Node, Scope => Scope, Component_Data => Component_Data, The_Heap => The_Heap, Valid => Current_Name_Valid); Valid := Valid and then Current_Name_Valid; It := STree.NextNode (It); end loop; Name_Error := Justification_Name_Length_Error (Enclosing_Region_Is_A_Function => Dictionary.IsFunction (Dictionary.GetRegion (Scope)), Err_Num => Err_Num); if Name_Error /= 0 then ErrorHandler.Semantic_Error (Name_Error, ErrorHandler.No_Reference, Node_Position (Next_Sibling (Child_Node (Parent_Node (Opt_Node)))), LexTokenManager.Null_String); Valid := False; -- don't add clause at all if any part is malformed end if; end Check_Identifiers; -------------------------------------------------------------------------------- procedure Handle_Function_Return (Kind : in ErrorHandler.Justification_Kinds; Err_Num : in Natural; Identifiers : in out ErrorHandler.Justification_Identifiers) --# derives Identifiers from *, --# Err_Num, --# Kind; is function Message_Is_IFA (Err_Num : Natural) return Boolean is begin return Err_Num = 50 or else Err_Num = 602; end Message_Is_IFA; begin -- Handle_Function_Return -- If the users has tried to justify an information flow error where the "export" is the function -- return result, then there will only be one variable name in the message (which will say, e.g., -- "The function value is not derived from the imported value(s) of Y.") but the pattern matching -- in ErrorHandler.Justification.Check_Whether_Justified will still be expecting an two symbols, an -- export followed by an import. The Examiner's flow analyser uses Null_Symbol to represent the -- function return value. In this procedure we: -- (1) Detect cases where only one argument has been supplied for an IFA msg that needs two -- (2) Assume in that case that a function return in implicitly intended -- (3) Move the given variable to the second, import, slot. -- (4) Insert a null identifier in the first slot (this will match Null_Symbol). -- Note that this transformation is "safe" even if the user has simply forgotten a variable name -- because the transformed annotation will not pattern match any more than it would have before. -- e.g. User in intends "F, 50, X, Y" but types "F, 50, X" by mistake. Transformation gives -- "F, 50, Null_Sym, X". Neither original incorrect form nor transformed form will pattern match -- so behaviour is unaltered if Kind = ErrorHandler.Flow_Message and then Message_Is_IFA (Err_Num => Err_Num) then -- possibly something to do if Identifiers (2) = ErrorHandler.Null_Justification_Identifier then -- only one identifier was supplied so transformation is needed Identifiers (2) := Identifiers (1); -- move given variable to second place Identifiers (1) := ErrorHandler.Null_Justification_Identifier; -- to match Null_Sym end if; end if; end Handle_Function_Return; begin -- Wf_Justification_Clause Error_Found := False; Applies_To_All := False; Identifiers := ErrorHandler.Null_Justification_Identifiers; -- Check whether we are dealing with Flow_Message or Warning_Message -------- Current_Node := Child_Node (Current_Node => Node); -- ASSUME Current_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier in Wf_Justification_Clause"); Check_Kind (Lex_String => Node_Lex_String (Node => Current_Node), Kind => Kind, Valid => Valid); if not Valid then Error_Found := True; ErrorHandler.Semantic_Error (121, ErrorHandler.No_Reference, Node_Position (Current_Node), LexTokenManager.Null_String); end if; --# assert STree.Table = STree.Table~; -- Check error number --------------------------------------------------------- Current_Node := Next_Sibling (Current_Node => Current_Node); -- ASSUME Current_Node = numeric_literal SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.numeric_literal, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = numeric_literal in Wf_Justification_Clause"); Err_Num_Node := Child_Node (Current_Node => Child_Node (Current_Node => Current_Node)); -- ASSUME Err_Num_Node = integer_number OR real_number OR based_integer OR based_real if Syntax_Node_Type (Node => Err_Num_Node) = SP_Symbols.integer_number then -- ASSUME Err_Num_Node = integer_number Get_Literal_Value (Node => Err_Num_Node, Val => Val); Maths.ValueToInteger (Val, Err_Num_Int, Maths_Valid); Valid := Maths_Valid = Maths.NoError and then Err_Num_Int >= 0; if not Valid then Error_Found := True; Err_Num := 0; else Err_Num := Err_Num_Int; end if; elsif Syntax_Node_Type (Node => Err_Num_Node) = SP_Symbols.real_number or else Syntax_Node_Type (Node => Err_Num_Node) = SP_Symbols.based_integer or else Syntax_Node_Type (Node => Err_Num_Node) = SP_Symbols.based_real then -- ASSUME Err_Num_Node = real_number OR based_integer OR based_real -- wrong kind of number Error_Found := True; Err_Num := 0; else Error_Found := True; Err_Num := 0; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Err_Num_Node = integer_number OR real_number OR " & "based_integer OR based_real in Wf_Justification_Clause"); end if; -- We should have a valid positive integer value for Err_Num by here. If not, raise error if Err_Num = 0 then ErrorHandler.Semantic_Error (122, ErrorHandler.No_Reference, Node_Position (Err_Num_Node), LexTokenManager.Null_String); elsif Is_Disallowed_Warning (Kind => Kind, Err_Num => Err_Num) then -- we have a wellformed warning number but we may want to disallow certain warning numbers ErrorHandler.Semantic_Error (123, ErrorHandler.No_Reference, Node_Position (Err_Num_Node), LexTokenManager.Null_String); Error_Found := True; end if; --# assert STree.Table = STree.Table~; -- Check identifiers ------------------------------------------------------------------ Current_Node := Next_Sibling (Current_Node => Current_Node); -- ASSUME Current_Node = justification_opt OR justification_all case Syntax_Node_Type (Current_Node) is when SP_Symbols.justification_opt => Check_Identifiers (Opt_Node => Current_Node, Scope => Scope, Kind => Kind, Err_Num => Err_Num, Component_Data => Component_Data, The_Heap => The_Heap, Identifiers => Identifiers, Valid => Valid); if not Valid then -- I think this is clearer that Error_FOund := Error_Found or not Valid; Error_Found := True; end if; when SP_Symbols.justification_all => if CommandLineData.Content.Language_Profile in CommandLineData.Auto_Code_Generators then Applies_To_All := True; else ErrorHandler.Semantic_Error (Err_Num => 175, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node), Id_Str => LexTokenManager.Null_String); end if; when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = justification_opt OR justification_all in Wf_Justification_Clause"); end case; --# assert STree.Table = STree.Table~; -- Check explanation ------------------------------------------------------------------ Current_Node := Next_Sibling (Current_Node => Current_Node); -- ASSUME Current_Node = justification_string SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.justification_string, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = justification_string in Wf_Justification_Clause"); Explanation := E_Strings.Empty_String; while Syntax_Node_Type (Node => Current_Node) = SP_Symbols.justification_string loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Current_Node, STree.Table) = SP_Symbols.justification_string; Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = string_literal SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.string_literal, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = string_literal in Wf_Justification_Clause"); if not E_Strings.Is_Empty (E_Str => Explanation) then E_Strings.Append_Char (E_Str => Explanation, Ch => ' '); end if; E_Strings.Append_Examiner_String (E_Str1 => Explanation, E_Str2 => LexTokenManager.Lex_String_To_String (Node_Lex_String (Node => Current_Node))); Current_Node := Next_Sibling (Current_Node => Current_Node); -- ASSUME Current_Node = justification_string OR NULL SystemErrors.RT_Assert (C => Current_Node = STree.NullNode or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.justification_string, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = justification_string OR NULL in Wf_Justification_Clause"); end loop; --# assert STree.Table = STree.Table~; -- Insert justification data in error handler data table if not Error_Found then if not Applies_To_All then -- See whether Identifiers needs transforming to handle IFA errors on function return Handle_Function_Return (Kind => Kind, Err_Num => Err_Num, Identifiers => Identifiers); end if; -- Finally, add it to table of justification ErrorHandler.Start_Justification (Position => Node_Position (Node => Node), Line => Start_Line, Kind => Kind, Err_Num => Err_Num, Identifiers => Identifiers, Applies_To_All => Applies_To_All, Explanation => Explanation, Maximum_Justifications_Reached => Maximum_Justifications_Reached); if Maximum_Justifications_Reached then ErrorHandler.Semantic_Warning (122, Node_Position (Node), LexTokenManager.Null_String); end if; end if; end Wf_Justification_Clause; begin -- Wf_Start_Justification It := Find_First_Node (Node_Kind => SP_Symbols.justification_clause, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.justification_clause and --# Next_Node = Get_Node (It); Wf_Justification_Clause (Start_Line => Node_Position (Node).Start_Line_No, Node => Next_Node, Scope => Scope, Component_Data => Component_Data, The_Heap => The_Heap); It := STree.NextNode (It); end loop; end Wf_Start_Justification; -------------------------------------------------------------------------------- procedure Wf_End_Justification (Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys; is Unmatched_End : Boolean; begin ErrorHandler.End_Justification (Node_Pos.Start_Line_No, -- to get Unmatched_End); if Unmatched_End then ErrorHandler.Semantic_Warning (120, Node_Pos, LexTokenManager.Null_String); end if; end Wf_End_Justification; begin -- Wf_Justification_Statement Next_Node := Child_Node (Current_Node => Node); -- ASSUME Next_Node = start_justification OR end_justification if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.start_justification then -- ASSUME Next_Node = start_justification Wf_Start_Justification (Node => Node, Scope => Scope, Component_Data => Component_Data, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.end_justification then -- ASSUME Next_Node = end_justification Wf_End_Justification (Node_Pos => Node_Position (Node => Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = start_justification OR end_justification in Wf_Justification_Statement"); end if; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Justification (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Node, Scope => Scope); end if; end Wf_Justification_Statement; spark-2012.0.deb/examiner/sem-compunit-wf_proof_renaming_declaration.adb0000644000175000017500000000324511753202336025411 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Proof_Renaming_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is pragma Unreferenced (Scope); begin ErrorHandler.Semantic_Error (Err_Num => 315, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); --# accept Flow, 30, Scope, "Left unused for future expansion" & --# Flow, 50, SPARK_IO.File_Sys, Scope, "Left unused for future expansion" & --# Flow, 50, ErrorHandler.Error_Context, Scope, "Left unused for future expansion"; end Wf_Proof_Renaming_Declaration; -- unused scope left for future expansion spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_assign.adb0000644000175000017500000005143211753202336024370 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Wf_Assign (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) is Name_Result, Exp_Result : Exp_Record; Expected_Type, Name_Sym : Dictionary.Symbol; Most_Enclosing_Name_Sym : Dictionary.Symbol; Name_Node, -- the name on the LHS Exp_Node : STree.SyntaxNode; -- the expression on the RHS Ref_Var : SeqAlgebra.Seq; Others_Aggregate : Boolean; -- is this an unconstrained_array_assignment --------------------------------------------------------------------- procedure Check_Write_To_Structured_Var (Name_Sym, Expected_Type : in Dictionary.Symbol; Ref_Var : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# Expected_Type, --# Name_Sym, --# Ref_Var, --# The_Heap; is begin if not Dictionary.Types_Are_Equal (Left_Symbol => Expected_Type, Right_Symbol => Dictionary.GetType (Name_Sym), Full_Range_Subtype => False) then -- we must be writing to a component of a structured variable -- so must add structure variable to list of referenced variables SeqAlgebra.AddMember (The_Heap, Ref_Var, Natural (Dictionary.SymbolRef (Name_Sym))); end if; end Check_Write_To_Structured_Var; begin -- Wf_Assign -- This procedure checks the following: -- (0) if the child node is an unconstrained_array_assignment (if it is then step down -- a level in the tree before continuing with the other checks), -- (1) the assigned identifier is declared and visible, and -- (2) it is a variable, and -- (3) it is not an unconstrained array (unless this is an unconstrained_array_assignment) -- (4) for unconstrained array assignments the array must be one-dimensional -- (5) this variable is not a loop parameter, and -- (6) it may be a package own var declared in a non-enclosing scope but -- a warning is given. -- (7) it is not a formal parameter of mode in, and -- additions for streams -- (8) check that the assigned variable is not of mode in -- (9) check that assigning expression is not a mode out variable -- (0) Check if the child node is an unconstrained_array_assignment (if it is then step down -- a level in the tree before continuing with the other checks). Others_Aggregate := Syntax_Node_Type (Node => Child_Node (Current_Node => Node)) = SP_Symbols.unconstrained_array_assignment; if Others_Aggregate then Name_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); else Name_Node := Child_Node (Current_Node => Node); end if; -- ASSUME Name_Node = name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Name_Node) = SP_Symbols.name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = name in Wf_Assign"); --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement and --# Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# STree.Table = STree.Table~; Exp_Node := Next_Sibling (Current_Node => Name_Node); -- ASSUME Exp_Node = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = expression in Wf_Assign"); SeqAlgebra.CreateSeq (TheHeap, Ref_Var); -- Call WalkExpression to check the LHS of the assignment statement --# accept Flow, 10, Aggregate_Stack.State, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Name_Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => Name_Result, Component_Data => Component_Data, The_Heap => TheHeap); --# end accept; Name_Sym := Name_Result.Other_Symbol; Most_Enclosing_Name_Sym := Dictionary.GetMostEnclosingObject (Name_Sym); -- Check that LHS is something that can be assigned to if not Name_Result.Is_AVariable and then not Dictionary.IsUnknownTypeMark (Name_Result.Type_Symbol) then Expected_Type := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 609, Reference => 14, Position => Node_Position (Node => Name_Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.Is_Null_Symbol (Name_Sym) then Expected_Type := Dictionary.GetUnknownTypeMark; -- Check for attempts to assign to tagged type conversions: if Name_Result.Sort = Type_Result and then Dictionary.TypeIsTagged (Name_Result.Type_Symbol) then if Dictionary.IsSubcomponent (Name_Result.Variable_Symbol) then -- Assignment to view conversion is not implemented yet. ErrorHandler.Semantic_Error (Err_Num => 129, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => LexTokenManager.Null_String); else -- View conversion to own type is not permitted in target of -- assignment. ErrorHandler.Semantic_Error (Err_Num => 116, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => LexTokenManager.Null_String); end if; end if; else Expected_Type := Name_Result.Type_Symbol; -- For an unconstrained_array_assignment the Expected_Type of the LHS will be the -- unconstrained array type, but the type of the RHS will be the type -- of the components of that array. if Others_Aggregate then Expected_Type := Dictionary.GetArrayComponent (Expected_Type); end if; -- Seed syntax tree with expected type for run-time check. STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); -- (2) Check that LHS is a variable if Dictionary.IsVariableOrSubcomponent (Name_Sym) then -- If this is an unconstrained_array_assignment then it is an aggregate assignment so -- there is never any self-reference. if not Others_Aggregate then -- Check for assignment to structured variables so that they generate -- a reference to the variable as well; A (I) = 3; is a reference of A -- as well as write to it. Call moved here because if A is not a variable -- in the first place then the check is meaningless. Check_Write_To_Structured_Var (Name_Sym => Name_Sym, Expected_Type => Expected_Type, Ref_Var => Ref_Var, The_Heap => TheHeap); end if; -- (3) Check that Expected_Type is not unconstrained array. -- (For unconstrained_array_assignments the LHS *must* be an unconstrained array -- but don't need to add guard here because if this is an unconstrained_array_assignment -- then Expected_Type will represent the component type, not the array type.) if Dictionary.Is_Unconstrained_Array_Type_Mark (Expected_Type, Scope) then ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => LexTokenManager.Null_String); -- (4) If this is an unconstrained array assignment then the target type must be a -- one dimensional array. Although the grammar of unconstrained_array_assignment will -- not permit: -- X := (others => (others => 0)); -- we still need to make sure that we trap the case where: -- X := (others => 0); -- when X is a multidimensional array. elsif Others_Aggregate and then Dictionary.IsArrayTypeMark (Name_Result.Type_Symbol, Scope) and then Dictionary.GetNumberOfDimensions (Name_Result.Type_Symbol) /= 1 then ErrorHandler.Semantic_Error (Err_Num => 118, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => LexTokenManager.Null_String); -- (5) Check that LHS is not a loop parameter elsif Dictionary.IsLoopParameter (Name_Sym) then ErrorHandler.Semantic_Error (Err_Num => 168, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => Dictionary.GetSimpleName (Name_Sym)); -- (6) LHS may be a package own var declared in a non-enclosing scope but -- a warning is given. elsif not In_Package_Initialization (Scope => Scope) and then Dictionary.IsOwnVariable (Most_Enclosing_Name_Sym) and then not Is_Enclosing_Package (Outer_Pack => Dictionary.GetOwner (Most_Enclosing_Name_Sym), Scope => Scope) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 169, Position => Node_Position (Node => Name_Node), Sym => Name_Sym, Scope => Scope); -- If we are initializing a package own variable, check that the initialization -- was announced in the package specification. elsif In_Package_Initialization (Scope => Scope) and then Unexpected_Initialization (Sym => Most_Enclosing_Name_Sym) then ErrorHandler.Semantic_Error (Err_Num => 333, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => Dictionary.GetSimpleName (Most_Enclosing_Name_Sym)); -- Protected state must be initialized at declaration elsif In_Package_Initialization (Scope => Scope) and then Dictionary.IsOwnVariable (Name_Sym) and then Dictionary.GetOwnVariableMode (Name_Sym) = Dictionary.DefaultMode and then (Dictionary.GetOwnVariableProtected (Name_Sym) or else Dictionary.IsVirtualElement (Name_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 874, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => Dictionary.GetSimpleName (Most_Enclosing_Name_Sym)); elsif Others_Aggregate and then (not Dictionary.IsSubprogramParameter (Most_Enclosing_Name_Sym) or else not Dictionary.Is_Unconstrained_Array_Type_Mark (Name_Result.Type_Symbol, Scope)) then -- If LHS is not a subprogram parameter then it can't be an aggregate assignment -- to an unconstrained array. -- If LHS is not unconstrained then this syntax is not permitted in SPARK. -- This error will be raised if there is an attempt to use the syntax for an -- unconstrained_array_assignment where the LHS is not an unconstrained array type at all. (Most -- likely the LHS is a normal array.) It should not be possible to get here if the -- LHS is an unconstrained array type that is not a parameter because SPARK does not -- permit objects of unconstrained array types to be declared. ErrorHandler.Semantic_Error (Err_Num => 117, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); -- (7) Check LHS is not a formal parameter of mode in. elsif Dictionary.IsSubprogramParameter (Most_Enclosing_Name_Sym) then if Dictionary.GetSubprogramParameterMode (Most_Enclosing_Name_Sym) = Dictionary.InMode or else Dictionary.GetSubprogramParameterMode (Most_Enclosing_Name_Sym) = Dictionary.DefaultMode then ErrorHandler.Semantic_Error (Err_Num => 170, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => Dictionary.GetSimpleName (Most_Enclosing_Name_Sym)); end if; -- Check for direct update of global by function. elsif Dictionary.IsFunction (Dictionary.GetEnclosingCompilationUnit (Scope)) and then Dictionary.Is_Global_Variable (Dictionary.GetAbstraction (Dictionary.GetEnclosingCompilationUnit (Scope), Scope), Dictionary.GetEnclosingCompilationUnit (Scope), Most_Enclosing_Name_Sym) then ErrorHandler.Semantic_Error (Err_Num => 327, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => Dictionary.GetSimpleName (Most_Enclosing_Name_Sym)); -- (8) Check LHS is not stream variable of mode in. elsif Dictionary.GetOwnVariableOrConstituentMode (Most_Enclosing_Name_Sym) = Dictionary.InMode then ErrorHandler.Semantic_Error (Err_Num => 717, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => Dictionary.GetSimpleName (Most_Enclosing_Name_Sym)); end if; else Expected_Type := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 6, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => Dictionary.GetSimpleName (Name_Sym)); end if; end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement and --# Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression and --# (Dictionary.Is_Null_Symbol (Expected_Type) or Dictionary.IsTypeMark (Expected_Type, Dictionary.Dict)) and --# STree.Table = STree.Table~; -- Call WalkExpression to check the RHS of the assignment statement. Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Scope, Type_Context => Expected_Type, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => Exp_Result, Component_Data => Component_Data, The_Heap => TheHeap); Assignment_Check (Position => Node_Position (Node => Exp_Node), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Exp_Result); -- (9) Check that we are not trying to read an out stream. if Exp_Result.Is_AVariable and then Dictionary.GetOwnVariableOrConstituentMode (Exp_Result.Variable_Symbol) = Dictionary.OutMode then ErrorHandler.Semantic_Error_Sym (Err_Num => 718, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Sym => Exp_Result.Variable_Symbol, Scope => Scope); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement and --# Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression and --# STree.Table = STree.Table~ and --# (Dictionary.Is_Null_Symbol (Exp_Result.Type_Symbol) or Dictionary.IsTypeMark (Exp_Result.Type_Symbol, Dictionary.Dict)); -- If expression represents an IN stream variable then put type of expression -- into syntax tree for the benefit of the RTC procedure ModelAssignmentStatement. if Exp_Result.Is_AVariable and then Dictionary.GetOwnVariableOrConstituentMode (Exp_Result.Variable_Symbol) = Dictionary.InMode then -- Mark the enclosing compilation unit as assigning an external variable -- This may be too coarse; may be we should just mark enclosing subprog? Dictionary.AddAssignsFromExternal (Dictionary.GetEnclosingCompilationUnit (Scope)); SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Exp_Result.Type_Symbol) or else Dictionary.IsTypeMark (Exp_Result.Type_Symbol), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Result.Type_Symbol to be a type in Wf_Assign"); STree.Add_Node_Symbol (Node => Exp_Node, Sym => Exp_Result.Type_Symbol); -- Check to see if the variable has been marked as always valid. -- Note that the OtherSymbol is checked,not the variableSymbol, -- since this will be the Subcomponent symbol if we are referring to -- a record component. if Dictionary.VariableOrSubcomponentIsMarkedValid (Exp_Result.Other_Symbol) then -- MCA & TJJ: do we also need to add a use of 'Always_Valid to the summary? -- Debug.PrintSym ("Access is Always_Valid =", Exp_Result.OtherSymbol); null; else -- and issue warning about possible validity problems. -- The warning is stronger when the external variable is a type that doesn't -- generate run-time checks. if Dictionary.TypeIsScalar (Exp_Result.Type_Symbol) and then not Dictionary.TypeIsBoolean (Exp_Result.Type_Symbol) then -- weaker warning ErrorHandler.Semantic_Warning_Sym (Err_Num => 392, Position => Node_Position (Node => Exp_Node), Sym => Exp_Result.Other_Symbol, Scope => Scope); else -- stronger warning ErrorHandler.Semantic_Warning_Sym (Err_Num => 393, Position => Node_Position (Node => Exp_Node), Sym => Exp_Result.Other_Symbol, Scope => Scope); end if; end if; end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement and --# Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression and --# STree.Table = STree.Table~; -- If the expression represents a use of unchecked conversion then plant the return -- type in the syntax tree for the benefit of the RTC procedure ModelAssignmentStatement -- TJJ: Note a more explicit way of designating and checking for this would -- be better so that it is easier to determine the extent of use of this idiom. if Dictionary.IsAnUncheckedConversion (Exp_Result.Other_Symbol) then STree.Add_Node_Symbol (Node => Exp_Node, Sym => Dictionary.GetType (Exp_Result.Other_Symbol)); end if; if Dictionary.TypeIsLimited (Exp_Result.Type_Symbol, Scope) then ErrorHandler.Semantic_Error (Err_Num => 308, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; -- Patch flow relations to take into account stream volatility. Add_Stream_Effects (Table => Table, The_Heap => TheHeap, Node => Node, Export => Name_Sym, Imports => Ref_Var); -- Add export and list of imports to RefList hash table. RefList.AddRelation (Table, TheHeap, Node, Name_Sym, Ref_Var); end Wf_Assign; spark-2012.0.deb/examiner/pile.adb0000644000175000017500000002737611753202336015671 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Debug; with ExaminerConstants; package body Pile is ------------------------------------------------------------------------ -- A "Pile" is basically a list of Cells, each of which has -- a Parent, and a Symbol and (possibly) a DAG associated with it. -- The Cell fields are used as follows: -- -- A_Ptr - Sibling Node - next Node in this Pile -- B_Ptr - Parent Node -- C_Ptr - DAG Node, or Null for nodes for which no DAG is associated. -- Val - The Symbol associated with a Node ------------------------------------------------------------------------ procedure Obtain (Heap : in out Cells.Heap_Record; Node : out Cells.Cell) is begin Cells.Create_Cell (Heap, Node); end Obtain; ------------------------------------------------------------------------- function IsNull (Node : Cells.Cell) return Boolean is begin return Cells.Is_Null_Cell (Node); end IsNull; ------------------------------------------------------------------------- procedure Free (Heap : in out Cells.Heap_Record; Node : in Cells.Cell) is begin if not IsNull (Node) then Cells.Dispose_Of_Cell (Heap, Node); end if; end Free; ------------------------------------------------------------------------- procedure SetParent (Heap : in out Cells.Heap_Record; Node : in Cells.Cell; ParentNode : in Cells.Cell) is begin if not IsNull (Node) then Cells.Set_B_Ptr (Heap, Node, ParentNode); end if; end SetParent; ------------------------------------------------------------------------- procedure SetSibling (Heap : in out Cells.Heap_Record; Node : in Cells.Cell; SiblingNode : in Cells.Cell) is begin if not IsNull (Node) then Cells.Set_A_Ptr (Heap, Node, SiblingNode); end if; end SetSibling; ------------------------------------------------------------------------- procedure SetDAG (Heap : in out Cells.Heap_Record; Node : in Cells.Cell; DAGNode : in Cells.Cell) is begin if not IsNull (Node) then Cells.Set_C_Ptr (Heap, Node, DAGNode); end if; end SetDAG; ------------------------------------------------------------------------- procedure SetNodeSymbol (Heap : in out Cells.Heap_Record; Node : in Cells.Cell; Symbol : in Dictionary.Symbol) is begin if not IsNull (Node) then Cells.Set_Symbol_Value (Heap, Node, Symbol); end if; end SetNodeSymbol; ------------------------------------------------------------------------- function Parent (Heap : Cells.Heap_Record; Node : Cells.Cell) return Cells.Cell is ParentNode : Cells.Cell; begin if IsNull (Node) then ParentNode := Cells.Null_Cell; else ParentNode := Cells.Get_B_Ptr (Heap, Node); end if; return ParentNode; end Parent; ------------------------------------------------------------------------- function Sibling (Heap : Cells.Heap_Record; Node : Cells.Cell) return Cells.Cell is SiblingNode : Cells.Cell; begin if IsNull (Node) then SiblingNode := Cells.Null_Cell; else SiblingNode := Cells.Get_A_Ptr (Heap, Node); end if; return SiblingNode; end Sibling; ------------------------------------------------------------------------- function DAG (Heap : Cells.Heap_Record; Node : Cells.Cell) return Cells.Cell is DAGNode : Cells.Cell; begin if IsNull (Node) then DAGNode := Cells.Null_Cell; else DAGNode := Cells.Get_C_Ptr (Heap, Node); end if; return DAGNode; end DAG; ------------------------------------------------------------------------- function NodeSymbol (Heap : Cells.Heap_Record; Node : Cells.Cell) return Dictionary.Symbol is Symbol : Dictionary.Symbol; begin if IsNull (Node) then Symbol := Dictionary.NullSymbol; else Symbol := Cells.Get_Symbol_Value (Heap, Node); end if; return Symbol; end NodeSymbol; ------------------------------------------------------------------------- procedure Insert (Heap : in out Cells.Heap_Record; Symbol : in Dictionary.Symbol; DAG : in Cells.Cell; Node : in out Cells.Cell) is LastNode, NextNode, NewNode : Cells.Cell; begin if IsNull (Node) then -- Pile~ is empty, so obtain, fill and return a single new node. Obtain (Heap, NewNode); SetNodeSymbol (Heap, NewNode, Symbol); SetDAG (Heap, NewNode, DAG); Node := NewNode; elsif Dictionary.Declared_Before (Symbol, NodeSymbol (Heap, Node)) then -- Pile~ is not empty, and Symbol should be inserted _before_ the -- first entry and is not a duplicate, so we need to modify and return Node Obtain (Heap, NewNode); SetNodeSymbol (Heap, NewNode, Symbol); SetDAG (Heap, NewNode, DAG); SetSibling (Heap, NewNode, Node); Node := NewNode; else -- Pile~ is not empty, so search for duplicate (in which -- case terminate), or correct place to Insert, respecting -- Dictionary.Declared_Before order NextNode := Node; loop -- If we find that the Pile already contains Symbol, -- then no further action is required. exit when Symbol = NodeSymbol (Heap, NextNode); LastNode := NextNode; NextNode := Sibling (Heap, LastNode); -- If the NextNode is Null, then we've reached the end of the Pile if IsNull (NextNode) then Obtain (Heap, NewNode); SetNodeSymbol (Heap, NewNode, Symbol); SetDAG (Heap, NewNode, DAG); SetSibling (Heap, LastNode, NewNode); exit; end if; -- If Symbol is "between" the Symbols at LastNode and NextNode -- then we insert it there, otherwise keep searching if Dictionary.Declared_Before (NodeSymbol (Heap, LastNode), Symbol) and Dictionary.Declared_Before (Symbol, NodeSymbol (Heap, NextNode)) then Obtain (Heap, NewNode); SetNodeSymbol (Heap, NewNode, Symbol); SetDAG (Heap, NewNode, DAG); SetSibling (Heap, LastNode, NewNode); SetSibling (Heap, NewNode, NextNode); exit; end if; end loop; end if; end Insert; function Contains (Heap : in Cells.Heap_Record; Symbol : in Dictionary.Symbol; Node : in Cells.Cell) return Boolean is LastNode, NextNode : Cells.Cell; Result : Boolean; begin Result := True; if IsNull (Node) then -- Pile~ is empty Result := False; elsif Dictionary.Declared_Before (Symbol, NodeSymbol (Heap, Node)) then -- Pile~ is not empty, and Symbol would be inserted _before_ the -- first entry if it was present. Result := False; else -- Pile~ is not empty, so search for duplicate (in which case -- terminate). NextNode := Node; loop -- If we find that the Pile already contains Symbol, then no -- further action is required. exit when Symbol = NodeSymbol (Heap, NextNode); LastNode := NextNode; NextNode := Sibling (Heap, LastNode); -- If the NextNode is Null, then we've reached the end of the Pile if IsNull (NextNode) then Result := False; exit; end if; -- If Symbol is "between" the Symbols at LastNode and NextNode -- then is is not present, otherwise keep searching. if Dictionary.Declared_Before (NodeSymbol (Heap, LastNode), Symbol) and Dictionary.Declared_Before (Symbol, NodeSymbol (Heap, NextNode)) then Result := False; exit; end if; end loop; end if; return Result; end Contains; procedure PrintPile (Heap : Cells.Heap_Record; Node : Cells.Cell) is --# hide PrintPile; CurrentNode : Cells.Cell; NextNode : Cells.Cell; procedure PrintCell (Node : Cells.Cell) is begin if IsNull (Node) then Debug.PrintMsg ("", True); else Debug.PrintMsg ("Node" & Cells.Cell'Image (Node) & ",Sib" & Cells.Cell'Image (Sibling (Heap, Node)) & ",Parent" & Cells.Cell'Image (Parent (Heap, Node)) & ",DAG" & Cells.Cell'Image (DAG (Heap, Node)) & ",Rank" & Cells.Cell_Rank'Image (Cells.Get_Rank (Heap, Node)) & ",Sym" & ExaminerConstants.RefType'Image (Dictionary.SymbolRef (NodeSymbol (Heap, Node))), False); Debug.Print_Sym (Msg => " ", Sym => NodeSymbol (Heap, Node)); end if; end PrintCell; begin Debug.PrintMsg ("Printout of Pile beginning at Cell " & Cells.Cell'Image (Node), True); CurrentNode := Node; if IsNull (CurrentNode) then PrintCell (CurrentNode); else loop NextNode := Sibling (Heap, CurrentNode); PrintCell (CurrentNode); exit when IsNull (NextNode); CurrentNode := NextNode; NextNode := Sibling (Heap, CurrentNode); end loop; end if; end PrintPile; function OrderOK (Heap : Cells.Heap_Record; Node : Cells.Cell) return Boolean is Result : Boolean; CurrentNode : Cells.Cell; NextNode : Cells.Cell; begin if IsNull (Node) then -- A completely empty Pile is alway OK Result := True; else CurrentNode := Node; NextNode := Sibling (Heap, CurrentNode); if IsNull (NextNode) then -- Pile with exactly ONE node must be OK Result := True; else loop Result := Dictionary.Declared_Before (NodeSymbol (Heap, CurrentNode), NodeSymbol (Heap, NextNode)); exit when not Result; CurrentNode := NextNode; NextNode := Sibling (Heap, CurrentNode); exit when IsNull (NextNode); end loop; end if; end if; return Result; end OrderOK; end Pile; spark-2012.0.deb/examiner/sem-walk_expression_p-type_context_stack.adb0000644000175000017500000001321411753202336025150 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; separate (Sem.Walk_Expression_P) package body Type_Context_Stack is procedure Init (Stack : out T_Stack_Type) --# post (for all I in Index_Range range Index_Range'First .. Stack.Top_Ptr => --# (Dictionary.Is_Null_Symbol (Stack.S(I)) or Dictionary.IsTypeMark (Stack.S(I), Dictionary.Dict))); is begin Stack.Top_Ptr := 0; --# accept Flow, 32, Stack.S, "Safe partial initialisation" & --# Flow, 31, Stack.S, "Safe partial initialisation" & --# Flow, 602, Stack, Stack.S, "Safe partial initialisation"; end Init; procedure Push (X : in Dictionary.Symbol; Stack : in out T_Stack_Type) --# pre (for all I in Index_Range range Index_Range'First .. Stack.Top_Ptr => --# (Dictionary.Is_Null_Symbol (Stack.S(I)) or Dictionary.IsTypeMark (Stack.S(I), Dictionary.Dict))) and --# (Dictionary.Is_Null_Symbol (X) or Dictionary.IsTypeMark (X, Dictionary.Dict)); --# post (for all I in Index_Range range Index_Range'First .. Stack.Top_Ptr => --# (Dictionary.Is_Null_Symbol (Stack.S(I)) or Dictionary.IsTypeMark (Stack.S(I), Dictionary.Dict))); is procedure Debug --# derives ; is --# hide Debug; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Type_Context_Stack PUSH : ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => X, Separator => ".")); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end if; end Debug; begin if Stack.Top_Ptr < ExaminerConstants.WalkExpStackMax then Stack.Top_Ptr := Stack.Top_Ptr + 1; Stack.S (Stack.Top_Ptr) := X; Debug; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Type_Context_Stack_Overflow, Msg => "in Type_Context_Stack.Push"); end if; --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK"; end Push; procedure Pop (Stack : in out T_Stack_Type) --# pre (for all I in Index_Range range Index_Range'First .. Stack.Top_Ptr => --# (Dictionary.Is_Null_Symbol (Stack.S(I)) or Dictionary.IsTypeMark (Stack.S(I), Dictionary.Dict))); --# post (for all I in Index_Range range Index_Range'First .. Stack.Top_Ptr => --# (Dictionary.Is_Null_Symbol (Stack.S(I)) or Dictionary.IsTypeMark (Stack.S(I), Dictionary.Dict))); is procedure Debug --# derives ; is --# hide Debug; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Type_Context_Stack POP : ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Stack.S (Stack.Top_Ptr), Separator => ".")); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end if; end Debug; begin if Stack.Top_Ptr > 0 then Debug; -- Call Debug _before_ we move the stack pointer! Stack.Top_Ptr := Stack.Top_Ptr - 1; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Type_Context_Stack_Underflow, Msg => "in Type_Context_Stack.Pop"); end if; --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK"; end Pop; function Top (Stack : T_Stack_Type) return Dictionary.Symbol --# pre (for all I in Index_Range range Index_Range'First .. Stack.Top_Ptr => --# (Dictionary.Is_Null_Symbol (Stack.S(I)) or Dictionary.IsTypeMark (Stack.S(I), Dictionary.Dict))); --# return S => (Dictionary.Is_Null_Symbol (S) or Dictionary.IsTypeMark (S, Dictionary.Dict)); is Result : Dictionary.Symbol; begin if Stack.Top_Ptr > 0 then Result := Stack.S (Stack.Top_Ptr); else Result := Dictionary.NullSymbol; end if; --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK" & --# Flow, 50, Dictionary.Dict, "Value is not derived from the imported value OK"; return Result; end Top; function Has_One_Entry (Stack : T_Stack_Type) return Boolean --# pre (for all I in Index_Range range Index_Range'First .. Stack.Top_Ptr => --# (Dictionary.Is_Null_Symbol (Stack.S(I)) or Dictionary.IsTypeMark (Stack.S(I), Dictionary.Dict))); is begin return Stack.Top_Ptr = 1; end Has_One_Entry; end Type_Context_Stack; spark-2012.0.deb/examiner/errorhandler-printerrors.adb0000644000175000017500000003126011753202336022001 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) procedure PrintErrors (Listing : in SPARK_IO.File_Type; Purpose : in Error_Types.ConversionRequestSource) is Err_Count : Natural; OK : Boolean; Next_Error : Error_Types.StringError; Num_Err : Error_Types.NumericError; Accumulator : ErrorAccumulator.T := ErrorAccumulator.Clear; procedure Copy_Source_Lines (To_File : in SPARK_IO.File_Type; Line_No : in LexTokenManager.Line_Numbers) --# global in CommandLineData.Content; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Error_Context_Rec, --# Line_No, --# SPARK_IO.File_Sys, --# To_File; is begin if CommandLineData.Content.XML then loop exit when Error_Context_Rec.Line_No >= Line_No; GetFileLine; end loop; else loop exit when Error_Context_Rec.Line_No >= Line_No; GetFileLine; Print_Source_Line (To_File => To_File); end loop; end if; end Copy_Source_Lines; procedure Copy_Source_Line_To_EOF (To_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# derives Error_Context_Rec, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Error_Context_Rec, --# SPARK_IO.File_Sys, --# To_File; is begin if not CommandLineData.Content.XML then loop exit when SPARK_IO.End_Of_File (Error_Context_Rec.Source); GetFileLine; Print_Source_Line (To_File => To_File); end loop; end if; end Copy_Source_Line_To_EOF; procedure Get_Error_Set (Error_Set : out Error_Sets; Next_Error : in out Error_Types.StringError; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Error_Context_Rec; --# in LexTokenManager.State; --# in Purpose; --# in out Conversions.State; --# in out Err_Count; --# in out SPARK_IO.File_Sys; --# derives Conversions.State, --# Next_Error, --# OK, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# LexTokenManager.State, --# Purpose, --# SPARK_IO.File_Sys & --# Error_Set, --# Err_Count from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Count, --# LexTokenManager.State, --# Next_Error, --# Purpose, --# SPARK_IO.File_Sys; is L_OK : Boolean; Errors : Error_Sets; Num_Err : Error_Types.NumericError; begin Errors := Empty_Error_Set; loop Errors.Length := Errors.Length + 1; Errors.Content (Errors.Length).Error := Next_Error; if Next_Error.ErrorType = Error_Types.NoErr or else ErrorAccumulator.Is_Error_Continuation (The_Error => Next_Error) then Errors.Content (Errors.Length).Err_Num := 0; else Err_Count := Err_Count + 1; Errors.Content (Errors.Length).Err_Num := Err_Count; end if; Error_IO.Get_Numeric_Error (Error_Context_Rec.Errs, Num_Err); Conversions.ToString (Num_Err, Purpose, Next_Error); L_OK := (Next_Error /= Error_Types.Empty_StringError); exit when not L_OK; exit when Error_Context_Rec.Line_No < Next_Error.Position.Start_Line_No; exit when Errors.Length = ExaminerConstants.MaxErrorSetSize; end loop; OK := L_OK; Error_Set := Errors; end Get_Error_Set; procedure Process_Error_Set (Listing : in SPARK_IO.File_Type; Next_Error : in out Error_Types.StringError; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Error_Context_Rec; --# in LexTokenManager.State; --# in Purpose; --# in out Accumulator; --# in out Conversions.State; --# in out Err_Count; --# in out SPARK_IO.File_Sys; --# derives Accumulator, --# Err_Count from *, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Count, --# LexTokenManager.State, --# Next_Error, --# Purpose, --# SPARK_IO.File_Sys & --# Conversions.State, --# Next_Error, --# OK from CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# LexTokenManager.State, --# Purpose, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Accumulator, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Count, --# LexTokenManager.State, --# Listing, --# Next_Error, --# Purpose; is Error_Set : Error_Sets; begin Get_Error_Set (Error_Set => Error_Set, Next_Error => Next_Error, OK => OK); if not CommandLineData.Content.XML then Put_Error_Pointers (Listing => Listing, Errors => Error_Set); end if; Put_Error_Messages (Listing => Listing, Errors => Error_Set, Start_Pos => 31, Accumulator => Accumulator); end Process_Error_Set; procedure Process_Errors_On_Line (Listing : in SPARK_IO.File_Type; Next_Error : in out Error_Types.StringError; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Error_Context_Rec; --# in LexTokenManager.State; --# in Purpose; --# in out Accumulator; --# in out Conversions.State; --# in out Err_Count; --# in out SPARK_IO.File_Sys; --# derives Accumulator, --# Conversions.State, --# Err_Count, --# Next_Error, --# OK, --# SPARK_IO.File_Sys from Accumulator, --# CommandLineData.Content, --# Conversions.State, --# Dictionary.Dict, --# Error_Context_Rec, --# Err_Count, --# LexTokenManager.State, --# Listing, --# Next_Error, --# Purpose, --# SPARK_IO.File_Sys; is L_OK : Boolean; Accumulator_Was_Active : Boolean; begin if not Error_Has_Position_Inline (Err_Type => Next_Error.ErrorType) and then not CommandLineData.Content.XML then New_Line (Listing, 1); end if; loop Process_Error_Set (Listing => Listing, Next_Error => Next_Error, OK => L_OK); exit when not L_OK; exit when Error_Context_Rec.Line_No < Next_Error.Position.Start_Line_No; end loop; OK := L_OK; Accumulator_Was_Active := ErrorAccumulator.Is_Active (This => Accumulator); ErrorAccumulator.Flush (Accumulator, Listing); if Accumulator_Was_Active then New_Line (Listing, 1); end if; end Process_Errors_On_Line; procedure Set_Up_Files (OK : out Boolean) --# global in out Error_Context_Rec; --# in out SPARK_IO.File_Sys; --# out Err_Count; --# derives Error_Context_Rec, --# SPARK_IO.File_Sys from *, --# Error_Context_Rec & --# Err_Count from & --# OK from Error_Context_Rec, --# SPARK_IO.File_Sys; is L_OK : Boolean; Success : SPARK_IO.File_Status; Source_File : SPARK_IO.File_Type; Error_File : Error_IO.File_Type; begin Source_File := Error_Context_Rec.Source; SPARK_IO.Reset (Source_File, SPARK_IO.In_File, Success); Error_Context_Rec.Source := Source_File; L_OK := Success = SPARK_IO.Ok; Error_Context_Rec.Line_No := 0; Err_Count := 0; Error_File := Error_Context_Rec.Errs; Error_IO.Reset (Error_File, SPARK_IO.In_File, Success); Error_Context_Rec.Errs := Error_File; OK := L_OK and Success = SPARK_IO.Ok; Error_Context_Rec.Current_Line := E_Strings.Empty_String; end Set_Up_Files; begin Flush_Buffer; Set_Up_Files (OK => OK); if OK then if not CommandLineData.Content.XML then Put_Line (Listing, "Line", 0); end if; Error_IO.Get_Numeric_Error (Error_Context_Rec.Errs, Num_Err); Conversions.ToString (Num_Err, Purpose, Next_Error); OK := (Next_Error /= Error_Types.Empty_StringError); loop exit when not OK; Copy_Source_Lines (To_File => Listing, Line_No => Next_Error.Position.Start_Line_No); Process_Errors_On_Line (Listing => Listing, Next_Error => Next_Error, OK => OK); --# accept Flow, 41, "Expected stable expression"; if not CommandLineData.Content.XML then --# end accept; New_Line (Listing, 1); end if; end loop; Copy_Source_Line_To_EOF (To_File => Listing); end if; Justifications.Print_Justifications (Which_Table => Error_Context_Rec.Justifications_Data_Table, File => Listing); WarningStatus.Report_Suppressed_Warnings (To_File => Listing, Counter => Error_Context_Rec.Counter); end PrintErrors; spark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_private.adb0000644000175000017500000001200211753202336031263 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: -- Loops through all basic_declaritive_item_rep below passed in as Node. -- Calls appropriate wf for NextDerivative node of each. Cannot directly -- raise any errors but called procedures can. ---------------------------------------------------------------------------- separate (Sem.Wf_Package_Declaration.Wf_Package_Specification) procedure Wf_Private (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Next_Node, Node_To_Check : STree.SyntaxNode; Unused : Dictionary.Symbol; begin Next_Node := Last_Child_Of (Start_Node => Node); while Next_Node /= Node loop --# assert STree.Table = STree.Table~; -- ASSUME Next_Node = basic_declarative_item_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.basic_declarative_item_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = basic_declarative_item_rep in Wf_Private"); Node_To_Check := Next_Sibling (Current_Node => Next_Node); -- ASSUME Node_To_Check = subprogram_declaration OR generic_subprogram_instantiation OR basic_declarative_item OR apragma case Syntax_Node_Type (Node => Node_To_Check) is when SP_Symbols.basic_declarative_item => -- ASSUME Node_To_Check = basic_declarative_item Wf_Basic_Declarative_Item (Node => Node_To_Check, Current_Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); when SP_Symbols.subprogram_declaration => -- ASSUME Node_To_Check = subprogram_declaration --# accept Flow, 41, "Expected stable expression"; case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Error (Err_Num => 113, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node_To_Check), Id_Str => LexTokenManager.Null_String); when CommandLineData.SPARK95_Onwards => --# accept Flow, 10 , Unused, "Expected ineffective assignment"; Wf_Subprogram_Declaration (Node => Node_To_Check, Inherit_Node => STree.NullNode, Context_Node => STree.NullNode, Generic_Formal_Part_Node => STree.NullNode, Current_Scope => Current_Scope, Generic_Unit => Dictionary.NullSymbol, Component_Data => Component_Data, The_Heap => The_Heap, Subprog_Sym => Unused); --# end accept; end case; --# end accept; when SP_Symbols.generic_subprogram_instantiation => -- ASSUME Node_To_Check = generic_subprogram_instantiation Wf_Generic_Subprogram_Instantiation (Node => Node_To_Check, Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); when SP_Symbols.apragma => -- ASSUME Node_To_Check = apragma Wf_Pragma (Node => Node_To_Check, Scope => Current_Scope); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node_To_Check = subprogram_declaration OR generic_subprogram_instantiation OR " & "basic_declarative_item OR apragma in WF_Private"); end case; Next_Node := Parent_Node (Current_Node => Next_Node); end loop; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Wf_Private; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_attribute_designator-calc_attribute.adb0000644000175000017500000003130511753202336031140 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Wf_Attribute_Designator) procedure Calc_Attribute (Node : in STree.SyntaxNode; Attrib_Name : in LexTokenManager.Lex_String; Prefix : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Base_Found : in Boolean; Argument : in out Maths.Value; RHS_Of_Range : out Maths.Value) is type Err_Lookup is array (Boolean) of Natural; Which_Err : constant Err_Lookup := Err_Lookup'(False => 402, True => 399); Err : Maths.ErrorCode; Local_Base_Type : Dictionary.Symbol; Argument_Local : Maths.Value; procedure Calc_Array_Attribute (Attrib_Name : in LexTokenManager.Lex_String; Prefix : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Base_Found : in Boolean; Argument : in out Maths.Value; RHS_Of_Range : in out Maths.Value; Err : in out Maths.ErrorCode) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# derives Argument, --# Err, --# RHS_Of_Range from *, --# Argument, --# Attrib_Name, --# Base_Found, --# Dictionary.Dict, --# LexTokenManager.State, --# Prefix, --# Scope; is Dimension : Positive; V_Low, V_High : Maths.Value; function Get_Dimension (Argument : Maths.Value) return Positive is Unused_Err : Maths.ErrorCode; Dimension : Integer; begin if Maths.HasNoValue (Argument) then Dimension := 1; else --# accept Flow, 10, Unused_Err, "Expected ineffective assignment"; Maths.ValueToInteger (Argument, -- expect ineffective assign to Unused_Err -- to get Dimension, Unused_Err); --# end accept; if Dimension <= 0 then Dimension := 1; end if; end if; --# accept Flow, 33, Unused_Err, "Expected to be neither referenced nor exported"; return Dimension; end Get_Dimension; begin -- Calc_Array_Attribute if Base_Found then -- the only valid attribute would be size and we never know what -- the size of things is so we can only return the null value Argument := Maths.NoValue; elsif Dictionary.Is_Unconstrained_Array_Type_Mark (Prefix, Scope) then Argument := Maths.NoValue; else -- a constrained array type or subtype Dimension := Get_Dimension (Argument); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq then Argument := Maths.ValueRep (Dictionary.GetArrayAttributeValue (LexTokenManager.First_Token, Prefix, Dimension)); RHS_Of_Range := Maths.ValueRep (Dictionary.GetArrayAttributeValue (LexTokenManager.Last_Token, Prefix, Dimension)); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq then V_Low := Maths.ValueRep (Dictionary.GetArrayAttributeValue (LexTokenManager.First_Token, Prefix, Dimension)); V_High := Maths.ValueRep (Dictionary.GetArrayAttributeValue (LexTokenManager.Last_Token, Prefix, Dimension)); --# accept Flow, 10, Err, "Expected ineffective assignment"; Maths.Subtract (V_High, -- flow error expected V_Low, Argument, Err); --# end accept; V_High := Argument; Maths.Add (V_High, Maths.OneInteger, Argument, Err); else -- first/last Argument := Maths.ValueRep (Dictionary.GetArrayAttributeValue (Attrib_Name, Prefix, Dimension)); end if; end if; end Calc_Array_Attribute; begin -- Calc_Attribute -- this procedure is only called wf_attribute_designator if the attribute -- is well-formed. RHS_Of_Range := Maths.NoValue; -- default value unless 'RANGE processed if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq then Maths.SuccOp (Argument, Err); Local_Base_Type := Dictionary.GetRootType (Prefix); if Dictionary.TypeIsModular (Local_Base_Type) then Maths.Modulus (FirstNum => Argument, SecondNum => Maths.ValueRep (Dictionary.GetScalarAttributeValue (Base => False, Name => LexTokenManager.Modulus_Token, TypeMark => Local_Base_Type)), Result => Argument_Local, Ok => Err); else Sem.Constraint_Check (Val => Argument, New_Val => Argument_Local, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_attribute_designator, Typ => Local_Base_Type, Position => STree.Node_Position (Node => Node)); end if; Argument := Argument_Local; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq then Maths.PredOp (Argument, Err); Local_Base_Type := Dictionary.GetRootType (Prefix); if Dictionary.TypeIsModular (Local_Base_Type) then Maths.Modulus (FirstNum => Argument, SecondNum => Maths.ValueRep (Dictionary.GetScalarAttributeValue (Base => False, Name => LexTokenManager.Modulus_Token, TypeMark => Local_Base_Type)), Result => Argument_Local, Ok => Err); else Sem.Constraint_Check (Val => Argument, New_Val => Argument_Local, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_attribute_designator, Typ => Local_Base_Type, Position => STree.Node_Position (Node => Node)); end if; Argument := Argument_Local; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq then Maths.Floor (Argument, Argument_Local, Err); Argument := Argument_Local; Local_Base_Type := Dictionary.GetRootType (Prefix); Sem.Constraint_Check (Val => Argument, New_Val => Argument_Local, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_attribute_designator, Typ => Local_Base_Type, Position => STree.Node_Position (Node => Node)); Argument := Argument_Local; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq then Maths.Ceiling (Argument, Argument_Local, Err); Argument := Argument_Local; Local_Base_Type := Dictionary.GetRootType (Prefix); Sem.Constraint_Check (Val => Argument, New_Val => Argument_Local, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_attribute_designator, Typ => Local_Base_Type, Position => STree.Node_Position (Node => Node)); Argument := Argument_Local; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq then Local_Base_Type := Dictionary.GetRootType (Prefix); Maths.Modulus (FirstNum => Argument, SecondNum => Maths.ValueRep (Dictionary.GetScalarAttributeValue (Base => False, Name => LexTokenManager.Modulus_Token, TypeMark => Local_Base_Type)), Result => Argument_Local, Ok => Err); Argument := Argument_Local; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq then Err := Maths.NoError; -- upper and lower bounds check required elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq then Err := Maths.NoError; -- no action required, no error can occur elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Valid_Token) = LexTokenManager.Str_Eq then Err := Maths.NoError; -- no action required, no error can occur elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq then Err := Maths.NoError; Argument := Maths.ValueRep (Dictionary.TypeSizeAttribute (Prefix)); elsif Dictionary.TypeIsScalar (Prefix) then Err := Maths.NoError; Argument := Maths.ValueRep (Dictionary.GetScalarAttributeValue (Base_Found, Attrib_Name, Prefix)); elsif Dictionary.TypeIsArray (Prefix) then Err := Maths.NoError; Calc_Array_Attribute (Attrib_Name => Attrib_Name, Prefix => Prefix, Scope => Scope, Base_Found => Base_Found, Argument => Argument, RHS_Of_Range => RHS_Of_Range, Err => Err); else -- non-implemented attribute - should never occur Argument := Maths.NoValue; Err := Maths.NoError; end if; case Err is when Maths.NoError => null; when Maths.DivideByZero => Argument := Maths.NoValue; ErrorHandler.Semantic_Error (Err_Num => 400, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when Maths.ConstraintError => Argument := Maths.NoValue; ErrorHandler.Semantic_Error (Err_Num => Which_Err (STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_attribute_designator), Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when Maths.OverFlow => Argument := Maths.NoValue; ErrorHandler.Semantic_Warning (Err_Num => 200, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when others => -- indicates internal error in maths package Argument := Maths.NoValue; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Math_Error, Msg => "in Calc_Attribute"); end case; end Calc_Attribute; spark-2012.0.deb/examiner/maths.adb0000644000175000017500000020407111753202336016041 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Maths is type Division_Result_Type is record Quotient, Remnant : Part; end record; -------------------------------------------------------------------------- -- Local Procedures -------------------------------------------------------------------------- -------------------------------------------------------------------------- -- Low-level conversions and utilities -------------------------------------------------------------------------- -------------------------------------------------------------------------- -- Converts extended Character to digit -- only works for '0'..'9' and 'A'..'F'. ParseString will ensure that -- no other values will be passed to it. function CharToDigit (Ch : Character) return Digit -- # pre ((Ch >= '0') and (Ch <= '9')) or ((Ch >= 'A') and (Ch <= 'F')); is Valu : Digit; begin if Ch in '0' .. '9' then Valu := (Character'Pos (Ch) - Character'Pos ('0')); else Valu := ((Character'Pos (Ch) - Character'Pos ('A')) + 10); end if; return Valu; end CharToDigit; -------------------------------------------------------------------------- -- Converts digit (Integer subtype 0..15) to extended Character function DigitToChar (Dig : Digit) return Character is Ch : Character; begin if Dig <= 9 then Ch := Character'Val (Dig + Character'Pos ('0')); else Ch := Character'Val ((Dig + Character'Pos ('A')) - 10); end if; return Ch; end DigitToChar; -------------------------------------------------------------------------- -- Returns Larger of 2 Naturals function Max (X, Y : Natural) return Natural is Larger : Natural; begin if X > Y then Larger := X; else Larger := Y; end if; return Larger; end Max; -------------------------------------------------------------------------- -- final check of value overflow or underflow before returning Value -- to caller from one of the exported procedures below. Any existing -- error is preserved ie. overflow will only be reported if an earlier -- error is not detected. function OverflowCheck (ExistingError : ErrorCode; Num : Value) return ErrorCode is Result : ErrorCode; begin if ExistingError = NoError and then (Num.Numerator.Overflowed or Num.Denominator.Overflowed) then Result := OverFlow; else Result := ExistingError; end if; return Result; end OverflowCheck; -------------------------------------------------------------------------- -- Low-level manipulation of part-Value arrays function StripLeadingZeros (P : Part) return Part is PLocal : Part; begin PLocal := P; while (PLocal.Length > 1) and then (PLocal.Numerals (PLocal.Length) = 0) loop PLocal.Length := PLocal.Length - 1; end loop; return PLocal; end StripLeadingZeros; -------------------------------------------------------------------------- -- Multiplies Value part by 10 function ShiftUpPart (P : Part) return Part is PLocal : Part; begin PLocal := P; if PLocal.Length = MaxLength then ----------can't shift it without overflow PLocal.Overflowed := True; else ----------room to shift for i in reverse PosRange range 1 .. PLocal.Length loop PLocal.Numerals (i + 1) := PLocal.Numerals (i); end loop; PLocal.Numerals (1) := 0; PLocal.Length := P.Length + 1; PLocal := StripLeadingZeros (PLocal); --in case we have just turned 0 into 00 end if; return PLocal; end ShiftUpPart; -------------------------------------------------------------------------- -- Divides Value Part by 10 function ShiftDownPart (P : Part) return Part is PLocal : Part; begin PLocal := P; if PLocal.Length = 1 then PLocal.Numerals (1) := 0; else for i in PosRange range 2 .. PLocal.Length loop PLocal.Numerals (i - 1) := PLocal.Numerals (i); end loop; PLocal.Numerals (PLocal.Length) := 0; PLocal.Length := PLocal.Length - 1; end if; return PLocal; end ShiftDownPart; -------------------------------------------------------------------------- -- Basic arithmetic on Part Values -------------------------------------------------------------------------- -- Symbolically adds two Part Values together function AddPart (FirstPart, SecondPart : Part) return Part is Length : PosRange; Carry, IntermediateValue : Natural; Result : Part; begin Carry := 0; Result := ZeroPart; Result.Overflowed := FirstPart.Overflowed or SecondPart.Overflowed; -- propagate error Length := Max (FirstPart.Length, SecondPart.Length); for i in PosRange range 1 .. Length loop IntermediateValue := (Natural (FirstPart.Numerals (i)) + Natural (SecondPart.Numerals (i))) + Carry; if IntermediateValue >= 10 then IntermediateValue := IntermediateValue - 10; Carry := 1; else Carry := 0; end if; Result.Numerals (i) := Digit (IntermediateValue); end loop; if Carry /= 0 then if Length = MaxLength then -- can't carry without overflow Result.Overflowed := True; else -- ok to extend Value Length := Length + 1; Result.Numerals (Length) := 1; end if; end if; Result.Length := Length; return Result; end AddPart; -------------------------------------------------------------------------- -- Symbolically subtracts Part Values -- WARNING second parameter must be <= first before the call function SubtractPart (Larger, Smaller : Part) return Part is Length : PosRange; Borrow : Natural; IntermediateValue : Integer; Result : Part; begin Borrow := 0; Result := ZeroPart; Result.Overflowed := Larger.Overflowed or Smaller.Overflowed; -- propagate error Length := Max (Larger.Length, Smaller.Length); Result.Length := Length; for i in PosRange range 1 .. Length loop IntermediateValue := (Natural (Larger.Numerals (i)) - Natural (Smaller.Numerals (i))) - Borrow; if IntermediateValue < 0 then IntermediateValue := IntermediateValue + 10; Borrow := 1; else Borrow := 0; end if; Result.Numerals (i) := Digit (IntermediateValue); end loop; return StripLeadingZeros (Result); end SubtractPart; -------------------------------------------------------------------------- -- NB. These Parts are considered unsigned function GreaterPart (FirstPart, SecondPart : Part) return Boolean is IsGreater : Boolean; i : LengthRange; begin if FirstPart.Length = SecondPart.Length then IsGreater := False; i := FirstPart.Length; loop if FirstPart.Numerals (i) /= SecondPart.Numerals (i) then IsGreater := FirstPart.Numerals (i) > SecondPart.Numerals (i); exit; end if; exit when i = 1; i := i - 1; end loop; else IsGreater := FirstPart.Length > SecondPart.Length; end if; return IsGreater; end GreaterPart; -------------------------------------------------------------------------- -- NB. These Parts are considered unsigned function LesserPart (FirstPart, SecondPart : Part) return Boolean is IsLesser : Boolean; i : LengthRange; begin if FirstPart.Length = SecondPart.Length then IsLesser := False; i := FirstPart.Length; loop if FirstPart.Numerals (i) /= SecondPart.Numerals (i) then IsLesser := FirstPart.Numerals (i) < SecondPart.Numerals (i); exit; end if; exit when i = 1; i := i - 1; end loop; else IsLesser := FirstPart.Length < SecondPart.Length; end if; return IsLesser; end LesserPart; -------------------------------------------------------------------------- -- Multiplies a Part Value by a single digit (range 0..15). -- Used in conversion of based literal to a Value and as Part of the -- Value multiply routines function SingleDigitMult (P : Part; D : Digit) return Part is Carry, IntermediateValue : Natural; Result : Part; begin Carry := 0; Result := ZeroPart; Result.Overflowed := P.Overflowed; --propagate error Result.Length := P.Length; for i in PosRange range 1 .. P.Length loop IntermediateValue := Natural (P.Numerals (i)) * Natural (D) + Carry; Result.Numerals (i) := Digit (IntermediateValue mod 10); Carry := IntermediateValue / 10; end loop; while Carry /= 0 loop if Result.Length = MaxLength then -- can't carry without overflow Result.Overflowed := True; exit; end if; Result.Length := Result.Length + 1; Result.Numerals (Result.Length) := Digit (Carry mod 10); Carry := Carry / 10; end loop; return Result; end SingleDigitMult; -------------------------------------------------------------------------- -- Symbolically multiples 2 Part Values together function MultPart (FirstPart, SecondPart : Part) return Part is FirstPartLocal, Result : Part; begin Result := ZeroPart; Result.Overflowed := FirstPart.Overflowed or SecondPart.Overflowed; -- propagate error FirstPartLocal := FirstPart; for i in PosRange range 1 .. SecondPart.Length loop Result := AddPart (Result, SingleDigitMult (FirstPartLocal, SecondPart.Numerals (i))); FirstPartLocal := ShiftUpPart (FirstPartLocal); end loop; return Result; end MultPart; -------------------------------------------------------------------------- --Digit by digit long div of one Value Part by another --Do not call with bot=0 function DivPart (Top, Bot : Part) return Division_Result_Type is subtype GoesIndexRange is Integer range 0 .. 9; type GoesArray is array (GoesIndexRange) of Part; GoesDigit : GoesIndexRange; Goes : GoesArray; ResultLocal, CurrentTry : Part; Column : Natural; --------------------------- -- builds array of 1*divisor..9*divisor procedure BuildGoesTable --# global in Bot; --# out Goes; --# derives Goes from Bot; is begin Goes := GoesArray'(GoesIndexRange => ZeroPart); Goes (1) := Bot; for i in GoesIndexRange range 2 .. 9 loop Goes (i) := AddPart (Goes (i - 1), Bot); end loop; end BuildGoesTable; --------------------------- function FindGoes (Into : Part) return GoesIndexRange --# global in Goes; is Result : GoesIndexRange; begin Result := 0; for i in reverse Integer range 1 .. 9 loop if (LesserPart (Goes (i), Into)) or (Goes (i) = Into) then Result := i; exit; end if; end loop; return Result; end FindGoes; --------------------------- procedure StoreDigit (Dig : in GoesIndexRange; Dest : in out Part) --# derives Dest from *, --# Dig; --NB. This method of stroring digit automatically excludes leading zeros -- since Length only increases once a non-zero is present. is begin Dest := ShiftUpPart (Dest); Dest.Numerals (1) := Digit (Dig); end StoreDigit; --------------------------- begin -- DivPart BuildGoesTable; ResultLocal := ZeroPart; CurrentTry := ZeroPart; Column := Top.Length; -- start at MSD loop exit when Column = 0; StoreDigit (Integer (Top.Numerals (Column)), CurrentTry); Column := Column - 1; GoesDigit := FindGoes (CurrentTry); StoreDigit (GoesDigit, ResultLocal); if GoesDigit /= 0 then CurrentTry := SubtractPart (CurrentTry, Goes (GoesDigit)); end if; end loop; return Division_Result_Type'(Quotient => ResultLocal, Remnant => CurrentTry); end DivPart; -------------------------------------------------------------------------- -- Conversions too and from Part Values -------------------------------------------------------------------------- --Converts an Ada Natural type to a Part Value; function NaturalToPart (Int : Natural) return Part is IntLocal : Natural; Result : Part; begin Result := ZeroPart; IntLocal := Int; Result.Length := 0; while IntLocal > 0 loop Result.Length := Result.Length + 1; Result.Numerals (Result.Length) := Digit (IntLocal mod 10); IntLocal := IntLocal / 10; end loop; return Result; end NaturalToPart; -------------------------------------------------------------------------- -- Convert String to Natural - NB. not done symbolically -- Assumes decimal function String_To_Natural (Str : E_Strings.T) return Natural is Position_Multiplier, Total : Natural; begin Position_Multiplier := 1; Total := 0; for I in reverse E_Strings.Positions range 1 .. E_Strings.Get_Length (E_Str => Str) loop Total := Total + Position_Multiplier * Natural (CharToDigit (E_Strings.Get_Element (E_Str => Str, Pos => I))); Position_Multiplier := Position_Multiplier * 10; end loop; return Total; end String_To_Natural; -------------------------------------------------------------------------- -- Produces Value Part from string interpreting it as being to base function String_To_Part (Base : Natural; Str : E_Strings.T) return Part is Position_Multiplier, Base_Part, Result : Part; begin Result := ZeroPart; Position_Multiplier := OnePart; Base_Part := NaturalToPart (Base); for I in reverse E_Strings.Positions range 1 .. E_Strings.Get_Length (E_Str => Str) loop Result := AddPart (Result, SingleDigitMult (Position_Multiplier, CharToDigit (E_Strings.Get_Element (E_Str => Str, Pos => I)))); Position_Multiplier := MultPart (Position_Multiplier, Base_Part); end loop; return Result; end String_To_Part; -------------------------------------------------------------------------- -- Produces Value Part from decimal string more quickly than String_To_Part function Dec_String_To_Part (Str : E_Strings.T) return Part is Result : Part; Hi : Natural; begin Result := ZeroPart; Hi := E_Strings.Get_Length (E_Str => Str); if Hi <= MaxLength then for I in Natural range 1 .. Hi loop Result.Numerals (I) := CharToDigit (E_Strings.Get_Element (E_Str => Str, Pos => (Hi - I) + 1)); end loop; Result.Length := E_Strings.Get_Length (E_Str => Str); else Result.Overflowed := True; end if; return Result; end Dec_String_To_Part; -------------------------------------------------------------------------- -- Normalization routines for rational pairs -------------------------------------------------------------------------- -- removes zeros symmetrically from Numerator and Denominator procedure NormalizeByTen (Num : in out Value) --# derives Num from *; is begin -- strip mutual trailing zeros while (Num.Numerator.Numerals (1) = 0) and (Num.Denominator.Numerals (1) = 0) loop Num.Numerator := ShiftDownPart (Num.Numerator); Num.Denominator := ShiftDownPart (Num.Denominator); end loop; end NormalizeByTen; -------------------------------------------------------------------------- -- Routine to find GCD of 2 Parts function GCD (FirstPart, SecondPart : Part) return Part is FirstPartLocal, SecondPartLocal : Part; DivisionResult : Division_Result_Type; begin FirstPartLocal := FirstPart; SecondPartLocal := SecondPart; loop exit when SecondPartLocal = ZeroPart; DivisionResult := DivPart (FirstPartLocal, SecondPartLocal); FirstPartLocal := SecondPartLocal; SecondPartLocal := DivisionResult.Remnant; end loop; return FirstPartLocal; end GCD; -------------------------------------------------------------------------- procedure Normalize (Num : in out Value) --# derives Num from *; is Divisor : Part; begin NormalizeByTen (Num); --does not reduce no of GCD iterations but shortens each one if not (Num.Numerator.Overflowed or Num.Denominator.Overflowed) then Divisor := GCD (Num.Numerator, Num.Denominator); Num.Numerator := DivPart (Num.Numerator, Divisor).Quotient; Num.Denominator := DivPart (Num.Denominator, Divisor).Quotient; end if; end Normalize; -------------------------------------------------------------------------- -- Higher level arithmetic routines -------------------------------------------------------------------------- -- Correctly handles ordering and sign of subtraction of +ve Numerator Parts -- of Value but leaves Denominator and Value types alone so can be -- use by both Integer and Real routines. Callers should ensure that field -- Sort of Result is set correctly. procedure NumeratorSubtract (FirstNum, SecondNum : in Value; Result : in out Value) --# derives Result from *, --# FirstNum, --# SecondNum; is begin if GreaterPart (FirstNum.Numerator, SecondNum.Numerator) then Result.Numerator := SubtractPart (FirstNum.Numerator, SecondNum.Numerator); elsif GreaterPart (SecondNum.Numerator, FirstNum.Numerator) then Result.Numerator := SubtractPart (SecondNum.Numerator, FirstNum.Numerator); Result.IsPositive := False; else Result.Numerator := ZeroPart; end if; end NumeratorSubtract; -------------------------------------------------------------------------- -- Correctly handles adding (including ordering and signs) of numerator Parts -- of Values but leaves Denominator and Value types alone so can be -- use by both Integer and Real routines. Callers should ensure that field -- Sort of Result is set correctly. procedure NumeratorAdd (FirstNum, SecondNum : in Value; Result : in out Value) --# derives Result from *, --# FirstNum, --# SecondNum; is begin if FirstNum.IsPositive and SecondNum.IsPositive then Result.Numerator := AddPart (FirstNum.Numerator, SecondNum.Numerator); elsif not (FirstNum.IsPositive or SecondNum.IsPositive) then Result.Numerator := AddPart (FirstNum.Numerator, SecondNum.Numerator); Result.IsPositive := False; elsif FirstNum.IsPositive and not SecondNum.IsPositive then NumeratorSubtract (FirstNum => FirstNum, SecondNum => SecondNum, Result => Result); elsif not FirstNum.IsPositive and SecondNum.IsPositive then NumeratorSubtract (FirstNum => SecondNum, SecondNum => FirstNum, Result => Result); end if; end NumeratorAdd; -------------------------------------------------------------------------- -- Modifies 2 Values such that their Denominators are the same procedure CommonDenominator (FirstNum, SecondNum : in Value; ModifiedFirst, ModifiedSecond : out Value) --# derives ModifiedFirst, --# ModifiedSecond from FirstNum, --# SecondNum; is CommonDenom : Part; begin ModifiedFirst := FirstNum; ModifiedSecond := SecondNum; if not (FirstNum.Denominator = SecondNum.Denominator) then CommonDenom := MultPart (FirstNum.Denominator, SecondNum.Denominator); ModifiedFirst.Numerator := MultPart (FirstNum.Numerator, SecondNum.Denominator); ModifiedSecond.Numerator := MultPart (SecondNum.Numerator, FirstNum.Denominator); ModifiedFirst.Denominator := CommonDenom; ModifiedSecond.Denominator := CommonDenom; end if; end CommonDenominator; -------------------------------------------------------------------------- procedure RealAdd (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) --# derives Ok, --# Result from FirstNum, --# SecondNum; is FirstNumLocal, SecondNumLocal, Res : Value; begin Res := ZeroReal; CommonDenominator (FirstNum, SecondNum, FirstNumLocal, SecondNumLocal); NumeratorAdd (FirstNumLocal, SecondNumLocal, Res); Res.Denominator := FirstNumLocal.Denominator; Normalize (Res); Result := Res; Ok := OverflowCheck (NoError, Res); end RealAdd; -------------------------------------------------------------------------- procedure IntegerMultiply (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) --# derives Ok, --# Result from FirstNum, --# SecondNum; is Res : Value; begin Res := ZeroInteger; Res.Numerator := StripLeadingZeros (MultPart (FirstNum.Numerator, SecondNum.Numerator)); Res.IsPositive := (FirstNum.IsPositive = SecondNum.IsPositive); Ok := OverflowCheck (NoError, Res); Result := Res; end IntegerMultiply; -------------------------------------------------------------------------- procedure RealMultiply (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) --# derives Ok, --# Result from FirstNum, --# SecondNum; is Res : Value; begin Res := ZeroReal; Res.Numerator := MultPart (FirstNum.Numerator, SecondNum.Numerator); Res.Denominator := MultPart (FirstNum.Denominator, SecondNum.Denominator); Res.IsPositive := (FirstNum.IsPositive = SecondNum.IsPositive); Normalize (Res); Result := Res; Ok := OverflowCheck (NoError, Res); end RealMultiply; -------------------------------------------------------------------------- procedure RealDivide (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) --# derives Ok, --# Result from FirstNum, --# SecondNum; is ---------------------------------------- function Invert (N : Value) return Value is begin return Value'(Numerator => N.Denominator, Denominator => N.Numerator, IsPositive => N.IsPositive, Sort => N.Sort); end Invert; ---------------------------------------- begin -- RealDivide RealMultiply (FirstNum, Invert (SecondNum), Result, Ok); end RealDivide; -------------------------------------------------------------------------- procedure GreaterLocal (FirstNum, SecondNum : in Value; Result : out Boolean; Ok : out ErrorCode) --# derives Ok, --# Result from FirstNum, --# SecondNum; is FirstLocal, SecondLocal : Value; LocalOk : ErrorCode; begin LocalOk := NoError; if FirstNum.Sort /= SecondNum.Sort then Result := False; LocalOk := TypeMismatch; else --legal types if FirstNum.Sort = RealValue then --put on common Denominator CommonDenominator (FirstNum, SecondNum, FirstLocal, SecondLocal); else --this covers Integer, enumeration and Boolean FirstLocal := FirstNum; SecondLocal := SecondNum; end if; if FirstLocal.IsPositive then if SecondLocal.IsPositive then -- both positive Result := GreaterPart (FirstLocal.Numerator, SecondLocal.Numerator); else -- first positive and second Value is negative Result := True; end if; else --first Value is negative if SecondLocal.IsPositive then -- first negative, second positive Result := False; else --both Values are negative Result := LesserPart (FirstLocal.Numerator, SecondLocal.Numerator); end if; end if; LocalOk := OverflowCheck (LocalOk, FirstLocal); LocalOk := OverflowCheck (LocalOk, SecondLocal); end if; Ok := LocalOk; end GreaterLocal; -------------------------------------------------------------------------- -- Non-exported procedure used by Remainder and Modulus Only -------------------------------------------------------------------------- procedure ModRemLegalityCheck (FirstNum, SecondNum : in Value; Err : out ErrorCode) --# derives Err from FirstNum, --# SecondNum; is begin if (FirstNum.Sort = IntegerValue) and (SecondNum.Sort = IntegerValue) then if SecondNum.Numerator = ZeroPart then Err := DivideByZero; else --this is the legal case we can do something with Err := NoError; end if; elsif (FirstNum.Sort = RealValue) and (SecondNum.Sort = RealValue) then Err := IllegalOperation; else Err := TypeMismatch; end if; end ModRemLegalityCheck; ------------------------------------------------------------------------- procedure ParseString (S : in E_Strings.T; Decimal_Point_Found, Exponent_Found, Base_Found : out Boolean; Base : out Natural; Core_String, Exp_String : out E_Strings.T; Exp_Sign : out Character; Places_After_Point : out E_Strings.Lengths; Legal_Syntax : out Boolean) --# derives Base, --# Base_Found, --# Core_String, --# Decimal_Point_Found, --# Exponent_Found, --# Exp_Sign, --# Exp_String, --# Legal_Syntax, --# Places_After_Point from S; -- NOTES -- BaseString will be set to "10" if Base_Found = False -- Exp_String is "0" if ExpFound = False -- Exp_Sign is plus if ExpFound = False -- PlacesAferPoint is 0 if Decimal_Point_Found = False -- Legal_Syntax only implies that String looks like an Ada literal is separate; ------------------------------------------------------------------------- -- PartToBits - converts a Part into a Bits array, with LSB in element 0 -- All insignificant MSBs are set to False ------------------------------------------------------------------------- function PartToBits (A : in Part) return Bits is R : Bits; Power : Part; Q : Division_Result_Type; begin R := ZeroBits; Power := OnePart; for J in BinaryLengthRange loop Q := DivPart (DivPart (A, Power).Quotient, TwoPart); R (J) := Q.Remnant /= ZeroPart; Power := SingleDigitMult (Power, 2); exit when Power.Overflowed; end loop; return R; end PartToBits; ------------------------------------------------------------------------- -- BitsToPart - converts a Bits array into a Part, assuming LSB in -- element 0. ------------------------------------------------------------------------- function BitsToPart (B : in Bits) return Part is P : Part; Power : Part; begin P := ZeroPart; Power := OnePart; for J in BinaryLengthRange loop if B (J) then P := AddPart (P, Power); end if; Power := SingleDigitMult (Power, 2); exit when Power.Overflowed; end loop; return P; end BitsToPart; ------------------------------------------------------------------------- -------------------------------------------------------------------------- -- Exported Procedures -------------------------------------------------------------------------- -------------------------------------------------------------------------- procedure LiteralToValue (Str : in LexTokenManager.Lex_String; Num : out Value; OK : out ErrorCode) is separate; ---------------------------------------------------------------------------- function IntegerToValue (I : Integer) return Value is IsPositive : Boolean; Numerator : Part; ValSoFar : Integer; NextDigit : Digit; Length : LengthRange; begin --IntegerToValue Numerator := ZeroPart; IsPositive := I >= 0; ValSoFar := abs (I); if ValSoFar /= 0 then Length := 1; loop NextDigit := Digit (ValSoFar mod 10); ValSoFar := ValSoFar / 10; Numerator.Numerals (Length) := NextDigit; exit when ValSoFar = 0; Length := Length + 1; end loop; Numerator.Length := Length; end if; return Value'(Numerator => Numerator, Denominator => OnePart, IsPositive => IsPositive, Sort => IntegerValue); end IntegerToValue; --------------------------------------------------------------------------- procedure StorageRep (Num : in Value; Rep : out LexTokenManager.Lex_String) --670 is Str : E_Strings.T := E_Strings.Empty_String; StoreRep : LexTokenManager.Lex_String; procedure BuildString (IsPos : in Boolean; PartVal : in Part) --# global in out Str; --# derives Str from *, --# IsPos, --# PartVal; is begin if not IsPos then E_Strings.Append_Char (E_Str => Str, Ch => '-'); end if; for I in LengthRange range 1 .. PartVal.Length loop E_Strings.Append_Char (E_Str => Str, Ch => DigitToChar (PartVal.Numerals (I))); end loop; end BuildString; ----------------- procedure AppendDenominator (PartVal : in Part) --# global in out Str; --# derives Str from *, --# PartVal; is begin E_Strings.Append_Char (E_Str => Str, Ch => '/'); for I in LengthRange range 1 .. PartVal.Length loop E_Strings.Append_Char (E_Str => Str, Ch => DigitToChar (PartVal.Numerals (I))); end loop; end AppendDenominator; begin case Num.Sort is when UnknownValue => StoreRep := LexTokenManager.Null_String; when TruthValue => if Num.IsPositive then StoreRep := LexTokenManager.True_Token; else StoreRep := LexTokenManager.False_Token; end if; when IntegerValue => BuildString (Num.IsPositive, Num.Numerator); LexTokenManager.Insert_Examiner_String (Str => Str, Lex_Str => StoreRep); when RealValue => BuildString (Num.IsPositive, Num.Numerator); AppendDenominator (Num.Denominator); LexTokenManager.Insert_Examiner_String (Str => Str, Lex_Str => StoreRep); end case; Rep := StoreRep; end StorageRep; ---------------------------------------------------------------------------- function ValueRep (StoreRep : LexTokenManager.Lex_String) return Value is Str : E_Strings.T; ValIsPositive : Boolean; PartVal : Part; Ptr : E_Strings.Positions; SlashFound : Boolean; Val : Value; ---------------- procedure GetSign --# global in Str; --# out Ptr; --# out ValIsPositive; --# derives Ptr, --# ValIsPositive from Str; is begin if E_Strings.Get_Element (E_Str => Str, Pos => 1) = '-' then ValIsPositive := False; Ptr := 2; else ValIsPositive := True; Ptr := 1; end if; end GetSign; --------------- procedure GetPart (PartVal : out Part) --# global in Str; --# in out Ptr; --# out SlashFound; --# derives PartVal, --# Ptr, --# SlashFound from Ptr, --# Str; is Len : LengthRange; begin SlashFound := False; PartVal := ZeroPart; Len := 0; loop if E_Strings.Get_Element (E_Str => Str, Pos => Ptr) = '/' then Ptr := Ptr + 1; --skip over '/' SlashFound := True; exit; end if; --here we are neither at the end of the string nor have we reached a / Len := Len + 1; PartVal.Numerals (Len) := CharToDigit (E_Strings.Get_Element (E_Str => Str, Pos => Ptr)); exit when Ptr = E_Strings.Get_Length (E_Str => Str); Ptr := Ptr + 1; end loop; PartVal.Length := Len; end GetPart; begin -- ValueRep if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => StoreRep, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Val := NoValue; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => StoreRep, Lex_Str2 => LexTokenManager.True_Token) = LexTokenManager.Str_Eq then Val := TrueValue; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => StoreRep, Lex_Str2 => LexTokenManager.False_Token) = LexTokenManager.Str_Eq then Val := FalseValue; else --some Sort of Number Val := ZeroInteger; --set up all fields with suitable default Values Str := LexTokenManager.Lex_String_To_String (Lex_Str => StoreRep); GetSign; Val.IsPositive := ValIsPositive; GetPart (PartVal); Val.Numerator := PartVal; if SlashFound then Val.Sort := RealValue; --# accept F, 10, SlashFound, "SlashFound not used here" & --# F, 10, Ptr, "Ptr not used here"; GetPart (PartVal); --# end accept; Val.Denominator := PartVal; end if; end if; return Val; end ValueRep; ---------------------------------------------------------------------------- function HasNoValue (Num : Value) return Boolean is begin return Num.Sort = UnknownValue; end HasNoValue; ---------------------------------------------------------------------------- function ValueToString (Num : Value) return E_Strings.T is separate; -------------------------------------------------------------------------- procedure ValueToInteger (Num : in Value; Int : out Integer; Ok : out ErrorCode) is Column, IntLocal, PosMult : Natural; begin if Num.Sort = IntegerValue then if GreaterPart (Num.Numerator, NaturalToPart (Integer'Last)) then Int := 0; Ok := OverFlow; else IntLocal := 0; PosMult := 1; Column := 1; loop IntLocal := IntLocal + PosMult * Natural (Num.Numerator.Numerals (Column)); exit when Column = Num.Numerator.Length; Column := Column + 1; PosMult := PosMult * 10; end loop; Ok := NoError; if Num.IsPositive then Int := IntLocal; else Int := -IntLocal; end if; end if; else ------------------------its not an Integer Int := 0; Ok := TypeMismatch; end if; end ValueToInteger; -------------------------------------------------------------------------- procedure Add (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is Res : Value; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; Ok := NoError; elsif (FirstNum.Sort = IntegerValue) and then (SecondNum.Sort = IntegerValue) then Res := ZeroInteger; NumeratorAdd (FirstNum, SecondNum, Res); Result := Res; Ok := OverflowCheck (NoError, Res); elsif (FirstNum.Sort = RealValue) and then (SecondNum.Sort = RealValue) then RealAdd (FirstNum, SecondNum, Result, Ok); else Result := NoValue; Ok := TypeMismatch; end if; end Add; -------------------------------------------------------------------------- procedure Negate (Num : in out Value) is begin if Num.Sort /= UnknownValue and then Num.Numerator /= ZeroPart then Num.IsPositive := not Num.IsPositive; end if; end Negate; -------------------------------------------------------------------------- procedure Absolute (Num : in out Value) is begin Num.IsPositive := True; end Absolute; -------------------------------------------------------------------------- procedure ConvertToInteger (Num : in out Value) is begin Num.Sort := IntegerValue; end ConvertToInteger; ---------------------------------------------------------------------------- procedure ConvertToReal (Num : in out Value) is begin Num.Sort := RealValue; end ConvertToReal; ---------------------------------------------------------------------------- procedure Subtract (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is deductor : Value; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; Ok := NoError; else deductor := SecondNum; Negate (deductor); Add (FirstNum, deductor, Result, Ok); end if; end Subtract; -------------------------------------------------------------------------- procedure Multiply (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; Ok := NoError; elsif (FirstNum.Sort = IntegerValue) and then (SecondNum.Sort = IntegerValue) then IntegerMultiply (FirstNum, SecondNum, Result, Ok); else RealMultiply (FirstNum, SecondNum, Result, Ok); end if; end Multiply; -------------------------------------------------------------------------- procedure Divide (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; Ok := NoError; elsif (FirstNum.Sort = IntegerValue) and then (SecondNum.Sort = IntegerValue) then if SecondNum = ZeroInteger then Result := NoValue; Ok := DivideByZero; else Result := ZeroInteger; Result.Numerator := DivPart (FirstNum.Numerator, SecondNum.Numerator).Quotient; Result.IsPositive := (FirstNum.IsPositive = SecondNum.IsPositive); Ok := NoError; end if; elsif (FirstNum.Sort = RealValue) then if SecondNum = ZeroReal then Result := NoValue; Ok := DivideByZero; else RealDivide (FirstNum, SecondNum, Result, Ok); end if; else Result := NoValue; Ok := TypeMismatch; end if; end Divide; -------------------------------------------------------------------------- procedure Remainder (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is OkLocal : ErrorCode; ResultLocal : Value; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; Ok := NoError; else ModRemLegalityCheck (FirstNum, SecondNum, OkLocal); if OkLocal = NoError then ResultLocal := ZeroInteger; ResultLocal.Numerator := DivPart (FirstNum.Numerator, SecondNum.Numerator).Remnant; if not (ResultLocal = ZeroInteger) then ResultLocal.IsPositive := FirstNum.IsPositive; end if; Result := ResultLocal; else --some Sort of error Result := NoValue; end if; Ok := OkLocal; end if; end Remainder; -------------------------------------------------------------------------- procedure Modulus (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is OkLocal : ErrorCode; ResultLocal : Value; DivisionResult : Division_Result_Type; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; Ok := NoError; elsif FirstNum.IsPositive = SecondNum.IsPositive then Remainder (FirstNum, SecondNum, Result, Ok); else -- special handling for MOD with mixed signs ModRemLegalityCheck (FirstNum, SecondNum, OkLocal); if OkLocal = NoError then ResultLocal := ZeroInteger; DivisionResult := DivPart (FirstNum.Numerator, SecondNum.Numerator); if DivisionResult.Remnant = ZeroPart then -- modulus is zero Result := ZeroInteger; else -- modulus is non zero ResultLocal.Numerator := SubtractPart (MultPart (SecondNum.Numerator, AddPart (DivisionResult.Quotient, OnePart)), FirstNum.Numerator); ResultLocal.IsPositive := SecondNum.IsPositive; Result := ResultLocal; end if; -- of either the zero remainder or actual remainder case Ok := OkLocal; else -----------------------------some Sort of error Result := NoValue; Ok := OkLocal; end if; end if; end Modulus; -------------------------------------------------------------------------- function IsAPositivePowerOf2 (Num : in Value) return Boolean is NormNum : Value; DR : Division_Result_Type; D : Part; IsAPowerOfTwo : Boolean; begin IsAPowerOfTwo := True; if Num.Sort = IntegerValue and then Num.IsPositive then NormNum := Num; Normalize (NormNum); D := NormNum.Numerator; if D /= ZeroPart then -- A positive power of 2 has a single "1" digit in binary, -- so if we shift right (i.e. divide by 2) repeatedly, we should -- never get a remainder until the current value is 1. while D /= ZeroPart loop DR := DivPart (D, TwoPart); if D /= OnePart and DR.Remnant /= ZeroPart then -- We have a non-zero remainder, and D is not 1, so -- the original number could not have been a power of 2. IsAPowerOfTwo := False; exit; end if; D := DR.Quotient; end loop; else IsAPowerOfTwo := False; end if; else IsAPowerOfTwo := False; end if; return IsAPowerOfTwo; end IsAPositivePowerOf2; -------------------------------------------------------------------------- function BoolToValue (B : Boolean) return Value is Result : Value; begin if B then Result := TrueValue; else Result := FalseValue; end if; return Result; end BoolToValue; ---------------------------------------------------------------------------- procedure Greater (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is BoolResult : Boolean; OkLocal : ErrorCode; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; OkLocal := NoError; else GreaterLocal (FirstNum, SecondNum, BoolResult, OkLocal); if OkLocal = NoError then Result := BoolToValue (BoolResult); else Result := NoValue; end if; end if; Ok := OkLocal; end Greater; -------------------------------------------------------------------------- procedure Lesser (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is BoolResult : Boolean; OkLocal : ErrorCode; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; OkLocal := NoError; else GreaterLocal (SecondNum, FirstNum, BoolResult, OkLocal); if OkLocal = NoError then Result := BoolToValue (BoolResult); else Result := NoValue; end if; end if; Ok := OkLocal; end Lesser; ---------------------------------------------------------------------------- procedure FloorCeilInternal (Val : in Value; DoFloor : Boolean; Result : out Value; OK : out ErrorCode) --# derives OK, --# Result from DoFloor, --# Val; is Num : Value; Denom : Value; Temp_Swap : Part; Mod_Val : Value; New_Num : Value; Error : ErrorCode; Ceil_Temp : Value; Final_Result : Value; begin -- floor (Num / Denom) = (Num - Num mod Denom) / Denom -- if_zero (val, result) = if val = 0 then (result) else (0); -- ceiling (Num / Denom) = (Nun - Num mod Denom + if_zero (Num mod Denom, Denom)) / -- Denom if Val.Sort = IntegerValue then Result := Val; OK := NoError; elsif Val.Sort = RealValue then Num := Val; Num.Denominator := OnePart; Num.Sort := IntegerValue; Normalize (Num); Denom := Val; Temp_Swap := Denom.Denominator; Denom.Denominator := OnePart; Denom.Numerator := Temp_Swap; Denom.Sort := IntegerValue; Denom.IsPositive := True; Normalize (Denom); Modulus (Num, Denom, Mod_Val, Error); if not DoFloor then -- doing a ceiling operation, so check if addition req'd Greater (Mod_Val, ZeroInteger, Ceil_Temp, Error); if Ceil_Temp = TrueValue then Add (Num, Denom, Ceil_Temp, Error); Num := Ceil_Temp; end if; end if; if Error = NoError then Subtract (Num, Mod_Val, New_Num, Error); if Error = NoError then Normalize (New_Num); Divide (New_Num, Denom, Final_Result, Error); if Error /= NoError then Result := NoValue; OK := Error; else Final_Result.Sort := RealValue; Normalize (Final_Result); Result := Final_Result; OK := NoError; end if; else Result := NoValue; OK := Error; end if; else Result := NoValue; OK := NoError; end if; elsif HasNoValue (Val) then OK := NoError; Result := NoValue; else OK := IllegalValue; Result := NoValue; end if; end FloorCeilInternal; ---------------------------------------------------------------------------- procedure Floor (Val : in Value; Result : out Value; OK : out ErrorCode) is begin FloorCeilInternal (Val, True, Result, OK); end Floor; ---------------------------------------------------------------------------- procedure Ceiling (Val : in Value; Result : out Value; OK : out ErrorCode) is begin FloorCeilInternal (Val, False, Result, OK); end Ceiling; -------------------------------------------------------------------------- procedure LesserOrEqual (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is Res : Value; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; Ok := NoError; else Lesser (FirstNum, SecondNum, Res, Ok); if Res.Sort = TruthValue then Result := BoolToValue ((Res = TrueValue) or (FirstNum = SecondNum)); else Result := NoValue; end if; end if; end LesserOrEqual; -------------------------------------------------------------------------- procedure GreaterOrEqual (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is Res : Boolean; OkLocal : ErrorCode; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; OkLocal := NoError; else GreaterLocal (FirstNum, SecondNum, Res, OkLocal); if OkLocal = NoError then Result := BoolToValue (Res or (FirstNum = SecondNum)); else Result := NoValue; end if; end if; Ok := OkLocal; end GreaterOrEqual; -------------------------------------------------------------------------- procedure RaiseByPower (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode) is Swap : Part; N : Part; Q : Division_Result_Type; Y : Value; Z : Value; Temp : Value; Err : ErrorCode; begin if HasNoValue (FirstNum) or else HasNoValue (SecondNum) then Result := NoValue; Ok := NoError; elsif (SecondNum.Sort = RealValue) then Result := NoValue; Ok := IllegalOperation; --608--new elsif to catch INT ** NEGATIVE elsif ((FirstNum.Sort = IntegerValue) and (not SecondNum.IsPositive)) then Result := NoValue; Ok := ConstraintError; --822--new elsif to catch 0.0 ** negative elsif ((FirstNum = ZeroReal) and then (not SecondNum.IsPositive)) then Result := NoValue; Ok := ConstraintError; elsif SecondNum = ZeroInteger then -- we must return 1 if FirstNum.Sort = IntegerValue then Result := ZeroInteger; else Result := ZeroReal; end if; Result.Numerator := OnePart; Ok := NoError; else -- we have legal and meaningful operation -- Bit-wise algorithm. See Knuth Volume 2, section 4.6.3. N := SecondNum.Numerator; Y := OneInteger; Z := FirstNum; Err := NoError; loop Q := DivPart (N, TwoPart); if Q.Remnant = OnePart then -- N is odd, so -- Y := Y * Z; Multiply (Y, Z, Temp, Err); Y := Temp; end if; -- N := Floor(N/2); N := Q.Quotient; exit when N = ZeroPart; -- Z := Z * Z; Multiply (Z, Z, Temp, Err); Z := Temp; end loop; -- If exponent was negative then form the -- reciprocal of Y if not SecondNum.IsPositive then --reciprocate Swap := Y.Numerator; Y.Numerator := Y.Denominator; Y.Denominator := Swap; Normalize (Y); end if; Result := Y; Ok := Err; end if; end RaiseByPower; ---------------------------------------------------------------------------- -- Support of non-Numeric types ---------------------------------------------------------------------------- procedure ValueToBool (TheVal : in Value; Result : out Boolean; Ok : out ErrorCode) is begin if TheVal.Sort = TruthValue then Ok := NoError; Result := TheVal.IsPositive; else Result := False; Ok := TypeMismatch; end if; end ValueToBool; ---------------------------------------------------------------------------- function AndOp (LeftVal, RightVal : Value) return Value is Result : Value; begin if LeftVal.Sort = TruthValue and RightVal.Sort = TruthValue then -- Boolean "and" Result := FalseValue; Result.IsPositive := LeftVal.IsPositive and RightVal.IsPositive; elsif LeftVal.Sort = IntegerValue and RightVal.Sort = IntegerValue then -- Must be a modular (bitwise) "and" operator Result := Value' (Numerator => BitsToPart (PartToBits (LeftVal.Numerator) and PartToBits (RightVal.Numerator)), Denominator => OnePart, IsPositive => True, Sort => IntegerValue); else Result := NoValue; end if; return Result; end AndOp; ---------------------------------------------------------------------------- function OrOp (LeftVal, RightVal : Value) return Value is Result : Value; begin if LeftVal.Sort = TruthValue and RightVal.Sort = TruthValue then -- Boolean "or" Result := FalseValue; Result.IsPositive := LeftVal.IsPositive or RightVal.IsPositive; elsif LeftVal.Sort = IntegerValue and RightVal.Sort = IntegerValue then -- Must be a modular (bitwise) "or" operator Result := Value' (Numerator => BitsToPart (PartToBits (LeftVal.Numerator) or PartToBits (RightVal.Numerator)), Denominator => OnePart, IsPositive => True, Sort => IntegerValue); else Result := NoValue; end if; return Result; end OrOp; ---------------------------------------------------------------------------- function XorOp (LeftVal, RightVal : Value) return Value is Result : Value; begin if LeftVal.Sort = TruthValue and RightVal.Sort = TruthValue then -- Boolean "xor" Result := FalseValue; Result.IsPositive := LeftVal.IsPositive xor RightVal.IsPositive; elsif LeftVal.Sort = IntegerValue and RightVal.Sort = IntegerValue then -- Must be a modular (bitwise) "xor" operator Result := Value' (Numerator => BitsToPart (PartToBits (LeftVal.Numerator) xor PartToBits (RightVal.Numerator)), Denominator => OnePart, IsPositive => True, Sort => IntegerValue); else Result := NoValue; end if; return Result; end XorOp; ---------------------------------------------------------------------------- procedure NotOp (TheVal : in out Value) is begin if not HasNoValue (TheVal) then TheVal.IsPositive := not TheVal.IsPositive; end if; end NotOp; ---------------------------------------------------------------------------- -- nb this implementation ONLY WORKS iff TheModulus is a positive power of 2! procedure ModularNotOp (TheVal : in out Value; TheModulus : in Value) is TypeLast, NumeratorPart : Part; begin -- for type T is 2**N, then the LRM 4.5.6(5) says that -- not X == (T'Last - X) == ((T'Modulus - 1) - X), so... TypeLast := SubtractPart (TheModulus.Numerator, OnePart); NumeratorPart := SubtractPart (TypeLast, TheVal.Numerator); TheVal := Value'(Numerator => NumeratorPart, Denominator => OnePart, IsPositive => True, Sort => IntegerValue); end ModularNotOp; ---------------------------------------------------------------------------- procedure InsideRange (Val, LowerBound, UpperBound : in Value; Result : out Value; Ok : out ErrorCode) is Result1, Result2 : Value; Tmp_Ok1, Tmp_Ok2 : ErrorCode; begin --# accept F, 10, Tmp_Ok1, "Tmp_Ok1 not used here" & --# F, 33, Tmp_Ok1, "Tmp_Ok1 not used here" & --# F, 10, Tmp_Ok2, "Tmp_Ok2 not used here" & --# F, 33, Tmp_Ok2, "Tmp_Ok2 not used here"; if Val.Sort = LowerBound.Sort and then LowerBound.Sort = UpperBound.Sort then GreaterOrEqual (Val, LowerBound, Result1, Tmp_Ok1); LesserOrEqual (Val, UpperBound, Result2, Tmp_Ok2); if Result1.Sort = UnknownValue or else Result2.Sort = UnknownValue then Result := NoValue; else Result := AndOp (Result1, Result2); end if; Ok := NoError; else Result := NoValue; Ok := TypeMismatch; end if; end InsideRange; ---------------------------------------------------------------------------- procedure OutsideRange (Val, LowerBound, UpperBound : in Value; Result : out Value; Ok : out ErrorCode) is Result1, Result2 : Value; Tmp_Ok1, Tmp_Ok2 : ErrorCode; begin --# accept F, 10, Tmp_Ok1, "Tmp_Ok1 not used here" & --# F, 33, Tmp_Ok1, "Tmp_Ok1 not used here" & --# F, 10, Tmp_Ok2, "Tmp_Ok2 not used here" & --# F, 33, Tmp_Ok2, "Tmp_Ok2 not used here"; if Val.Sort = LowerBound.Sort and then LowerBound.Sort = UpperBound.Sort then Greater (Val, -- CFR 430 Flow error here: internal anomaly to be fixed UpperBound, Result1, Tmp_Ok1); Lesser (Val, LowerBound, Result2, Tmp_Ok2); if Result1.Sort = UnknownValue or else Result2.Sort = UnknownValue then Result := NoValue; else Result := OrOp (Result1, Result2); end if; Ok := NoError; else Result := NoValue; Ok := TypeMismatch; end if; end OutsideRange; ---------------------------------------------------------------------------- --attribute ops ---------------------------------------------------------------------------- procedure PredOp (TheVal : in out Value; Ok : out ErrorCode) is DedVal : Value; begin if HasNoValue (TheVal) then Ok := NoError; elsif TheVal.Sort = RealValue then Ok := TypeMismatch; TheVal := NoValue; else Subtract (TheVal, OneInteger, -- to get DedVal, Ok); TheVal := DedVal; end if; end PredOp; ---------------------------------------------------------------------------- procedure SuccOp (TheVal : in out Value; Ok : out ErrorCode) is AddVal : Value; begin if HasNoValue (TheVal) then Ok := NoError; elsif TheVal.Sort = RealValue then Ok := TypeMismatch; TheVal := NoValue; else Add (TheVal, OneInteger, -- to get AddVal, Ok); TheVal := AddVal; end if; end SuccOp; ---------------------------------------------------------------------------- function MakeEnum (Pos : Natural) return Value is Result : Value; begin Result := ZeroInteger; Result.Numerator := NaturalToPart (Pos); return Result; end MakeEnum; ---------------------------------------------------------------------------- function IsIntegerValue (Val : Value) return Boolean is begin return Val.Sort = IntegerValue; end IsIntegerValue; ------------------------------------ function IsRealValue (Val : Value) return Boolean is begin return Val.Sort = RealValue; end IsRealValue; ---------------------------------------------------------------------------- function Ada95RealToInteger (TheReal : Value) return Value is Temp : Value; Result : Value; TheRem : Value; RoundResult : Value; Unused : ErrorCode; DivRes : Division_Result_Type; begin --# accept F, 10, Unused, "Unused unused here" & --# F, 33, Unused, "Unused unused here"; if HasNoValue (TheReal) then Result := NoValue; else --get Quotient and remainder DivRes := DivPart (TheReal.Numerator, TheReal.Denominator); --create Integer truncated Part from Quotient Result := Value'(Numerator => DivRes.Quotient, Denominator => OnePart, IsPositive => True, Sort => IntegerValue); --create fractional remainder Part TheRem := Value'(Numerator => DivRes.Remnant, Denominator => TheReal.Denominator, IsPositive => True, Sort => RealValue); --see if remainder >= 0.5 GreaterOrEqual (TheRem, ExactHalf, --to get RoundResult, Unused); if RoundResult = TrueValue then Temp := Result; --needed because of aliasing in next call Add (Temp, OneInteger, --to get Result, Unused); end if; if Result /= ZeroInteger then Result.IsPositive := TheReal.IsPositive; --restore sign end if; end if; return Result; end Ada95RealToInteger; ---------------------------------------------------------------------------- -- Initialisation -------------------------------------------------------------------------- --none end Maths; spark-2012.0.deb/examiner/sem-wf_pragma.adb0000644000175000017500000030424411753202336017455 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Pragma (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is Pack_Ident_Node, Pack_Spec_Node, Id_Node, Subprog_Or_Variable_Node : STree.SyntaxNode; Pack_Ident_Sym : Dictionary.Symbol; Statement_OK : Boolean; Error_To_Raise : Natural; ------------------------------------------------------------------ procedure Wf_Elaborate_Body (Pragma_Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# Pack_Sym, --# Pragma_Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pack_Sym, --# Pragma_Node, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Pack_Sym, --# Pragma_Node; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Pragma_Atomic (Pragma_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Pragma_Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pragma_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is Arg_Ass_Node, Atomic_Arg, Parent_Apragma : STree.SyntaxNode; Is_Chain : Boolean; Subject_Sym : Dictionary.Symbol; procedure Check_Arg_Count (Node : in out STree.SyntaxNode; Atomic_Arg : out STree.SyntaxNode) --# global in STree.Table; --# derives Atomic_Arg, --# Node from Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.argument_association_rep; --# post (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.argument_association or Node = STree.NullNode) and --# (Syntax_Node_Type (Atomic_Arg, STree.Table) = SP_Symbols.identifier or --# Syntax_Node_Type (Atomic_Arg, STree.Table) = SP_Symbols.ADA_expression); is begin while Syntax_Node_Type (Node => Node) = SP_Symbols.argument_association_rep loop -- ASSUME Node = argument_association_rep Node := Child_Node (Current_Node => Node); end loop; -- ASSUME Node = argument_association SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Node) = SP_Symbols.argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = argument_association in Check_Arg_Count"); Atomic_Arg := Child_Node (Current_Node => Node); -- ASSUME Atomic_Arg = identifier OR ADA_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Atomic_Arg) = SP_Symbols.identifier or else Syntax_Node_Type (Node => Atomic_Arg) = SP_Symbols.ADA_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Atomic_Arg = identifier OR ADA_expression in Check_Arg_Count"); Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Node)); -- ASSUME Node = argument_association OR NULL SystemErrors.RT_Assert (C => Node = STree.NullNode or else Syntax_Node_Type (Node => Node) = SP_Symbols.argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = argument_association OR NULL in Check_Arg_Count"); end Check_Arg_Count; procedure Check_Is_Chain (Node : in out STree.SyntaxNode; Is_Chain : out Boolean) --# global in STree.Table; --# derives Is_Chain, --# Node from Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.ADA_expression; --# post Is_Chain and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.character_literal or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.string_literal); is Next_Node : STree.SyntaxNode; begin -- Walk down the syntax tree; if there's ever a Next_Sibling -- on the way down, then this isn't a chain - so we can stop the -- walk. Otherwise, find the node at the end of the chain. loop Is_Chain := Next_Sibling (Current_Node => Node) = STree.NullNode; Next_Node := Child_Node (Current_Node => Node); exit when not Is_Chain or else Next_Node = STree.NullNode; Node := Next_Node; end loop; -- ASSUME Node = identifier OR character_literal OR string_literal SystemErrors.RT_Assert (C => Is_Chain and then (Syntax_Node_Type (Node => Node) = SP_Symbols.identifier or else Syntax_Node_Type (Node => Node) = SP_Symbols.character_literal or else Syntax_Node_Type (Node => Node) = SP_Symbols.string_literal), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = ADA_expression in Check_Is_Chain"); end Check_Is_Chain; function Check_Location (Node_To_Check : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.compilation_unit or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.pragma_rep or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.initial_declarative_item_rep or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.later_declarative_item_rep or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.statement or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.code_insertion or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.visible_part_rep or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.basic_declarative_item_rep or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.renaming_declaration_rep or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.task_pragma or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.protected_operation_declaration_rep or --# Syntax_Node_Type (Node_To_Check, STree.Table) = SP_Symbols.body_stub; is Node_Type : SP_Symbols.SP_Symbol; begin Node_Type := Syntax_Node_Type (Node => Node_To_Check); -- allows use in declarative parts of subprogram, package bodies, -- package visible parts and package private parts. return Node_Type = SP_Symbols.initial_declarative_item_rep or else Node_Type = SP_Symbols.basic_declarative_item_rep or else Node_Type = SP_Symbols.visible_part_rep; end Check_Location; ------------------------------------------------------------------------- -- SEPR 2253 introduced the need for objects which are atomic but of a -- predefined type like Boolean, Character, or System.Address. -- RavenSPARK forbids Atomic on objects, and we can't apply pragma Atomic -- to these types, since they are predefined. -- Therefore, we allow a record type to be Atomic if it has a single -- field which ((is predefined and a scalar basetype) or (is System.Address)) ------------------------------------------------------------------------- function Is_Potentially_Atomic_Record_Type (Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean; The_Component : Dictionary.Symbol; The_Type : Dictionary.Symbol; Package_System : Dictionary.Symbol; Type_Address : Dictionary.Symbol; begin if Dictionary.IsRecordTypeMark (Sym, Scope) then if Dictionary.GetNumberOfComponents (Sym) = 1 and then not Dictionary.TypeIsTagged (Sym) then The_Component := Dictionary.GetRecordComponent (Sym, 1); The_Type := Dictionary.GetType (The_Component); -- Acceptable if it's predefined, scalar, and not a subtype if Dictionary.IsPredefined (The_Type) and then Dictionary.TypeIsScalar (The_Type) and then not Dictionary.IsSubtype (The_Type) then Result := True; else -- Not predefined and scalar, so check for special -- case of System.Address Package_System := Dictionary.LookupItem (Name => LexTokenManager.System_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Package_System) then Type_Address := Dictionary.LookupSelectedItem (Prefix => Package_System, Selector => LexTokenManager.Address_Token, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Package_System), Context => Dictionary.ProgramContext); Result := Dictionary.Types_Are_Equal (Left_Symbol => The_Type, Right_Symbol => Type_Address, Full_Range_Subtype => False); else -- can't find package System, so Result := False; end if; end if; else -- Record with 0 or >= 2 fields, or a tagged record Result := False; end if; else -- Not a record type at all Result := False; end if; return Result; end Is_Potentially_Atomic_Record_Type; begin -- Wf_Pragma_Atomic Arg_Ass_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Pragma_Node)); -- ASSUME Arg_Ass_Node = argument_association_rep OR NULL if Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association_rep then -- ASSUME Arg_Ass_Node = argument_association_rep Check_Arg_Count (Node => Arg_Ass_Node, Atomic_Arg => Atomic_Arg); -- ASSUME Arg_Ass_Node = argument_association OR NULL if Arg_Ass_Node = STree.NullNode then -- ASSUME Arg_Ass_Node = NULL -- ASSUME Atomic_Arg = identifier OR ADA_expression if Syntax_Node_Type (Node => Atomic_Arg) = SP_Symbols.identifier then -- ASSUME Atomic_Arg = identifier -- The parameter to pragma Atomic must be a simple_name, -- not a named association. ErrorHandler.Semantic_Error (Err_Num => 851, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Atomic_Arg), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Atomic_Arg) = SP_Symbols.ADA_expression then -- ASSUME Atomic_Arg = ADA_expression Check_Is_Chain (Node => Atomic_Arg, Is_Chain => Is_Chain); if Is_Chain and then Syntax_Node_Type (Node => Atomic_Arg) = SP_Symbols.identifier then -- ASSUME Atomic_Arg = identifier -- happy; found a simple_name; have the identifier Subject_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Atomic_Arg), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Subject_Sym) then -- the pragma refers to an identifier which isn't visible -- at this point. ErrorHandler.Semantic_Error (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Atomic_Arg), Id_Str => Node_Lex_String (Node => Atomic_Arg)); else if (Dictionary.IsTypeMark (Subject_Sym) and then not Dictionary.IsSubtype (Subject_Sym) and then (Dictionary.IsScalarTypeMark (Subject_Sym, Scope) or else Is_Potentially_Atomic_Record_Type (Sym => Subject_Sym, Scope => Scope))) then -- OK; the pragma refers to a scalar base type or to -- a record type that may be Atomic Parent_Apragma := Parent_Node (Current_Node => Pragma_Node); -- ASSUME Parent_Apragma = compilation_unit OR -- pragma_rep OR -- initial_declarative_item_rep OR -- later_declarative_item_rep OR -- statement OR -- code_insertion OR -- visible_part_rep OR -- basic_declarative_item_rep OR -- renaming_declaration_rep OR -- task_pragma OR -- protected_operation_declaration_rep OR -- body_stub SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.compilation_unit or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.pragma_rep or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.initial_declarative_item_rep or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.later_declarative_item_rep or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.statement or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.code_insertion or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.visible_part_rep or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.basic_declarative_item_rep or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.renaming_declaration_rep or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.task_pragma or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.protected_operation_declaration_rep or else Syntax_Node_Type (Node => Parent_Apragma) = SP_Symbols.body_stub, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent_Apragma = compilation_unit OR pragma_rep OR " & "initial_declarative_item_rep OR later_declarative_item_rep OR statement OR code_insertion OR " & "visible_part_rep OR basic_declarative_item_rep OR renaming_declaration_rep OR task_pragma OR " & "protected_operation_declaration_rep OR body_stub in Wf_Pragma_Atomic"); if Check_Location (Node_To_Check => Parent_Apragma) and then Dictionary.GetScope (Subject_Sym) = Scope then -- it's also in a legitimate part of the syntax tree, -- and in the same scope as the variable or type it identifies. STree.Set_Node_Lex_String (Sym => Subject_Sym, Node => Atomic_Arg); Dictionary.SetTypeAtomic (Subject_Sym); else -- the pragma isn't legal here ErrorHandler.Semantic_Error (Err_Num => 852, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Atomic_Arg), Id_Str => LexTokenManager.Null_String); end if; else -- the argument to pragma Atomic must be a scalar base type, -- or a record type that is potentially Atomic. -- it's not here, issue an error ErrorHandler.Semantic_Error (Err_Num => 853, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Atomic_Arg), Id_Str => LexTokenManager.Null_String); end if; end if; else -- The parameter to pragma Atomic must be a simple_name ErrorHandler.Semantic_Error (Err_Num => 851, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Atomic_Arg), Id_Str => LexTokenManager.Null_String); end if; end if; elsif Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association then -- ASSUME Arg_Ass_Node = argument_association -- pragma atomic takes exactly one argument, more than one was found -- in this case, so issue an error ErrorHandler.Semantic_Error (Err_Num => 854, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Arg_Ass_Node), Id_Str => LexTokenManager.Null_String); end if; elsif Arg_Ass_Node = STree.NullNode then -- ASSUME Arg_Ass_Node = NULL -- no argument association was found; so no parameters were supplied. ErrorHandler.Semantic_Error (Err_Num => 854, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Ass_Node = argument_association_rep OR NULL in Wf_Pragma_Atomic"); end if; end Wf_Pragma_Atomic; -------------------------------------------------------------------- function We_Are_Not_In_A_Protected_Type (Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return not Dictionary.IsType (Dictionary.GetRegion (Scope)); end We_Are_Not_In_A_Protected_Type; -------------------------------------------------------------------- procedure Wf_Attach_Handler (Pragma_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Pragma_Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pragma_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Wf_Main_Program_Priority (Id_Node : in STree.SyntaxNode; Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# Id_Node, --# LexTokenManager.State, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Id_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is Iterator, Err_Node : STree.SyntaxNode; Sym : Dictionary.Symbol; Error : Boolean; Priority_Val : Maths.Value := Maths.NoValue; Priority_Lex : LexTokenManager.Lex_String; begin -- Reject if pragma Priority has already been given. if not Dictionary.MainProgramPrioritySupplied then -- -- We restrict the argument to pragma Priority to be a single ADA_primary -- over one of the following: -- (a) numeric_literal that resolves to integer_number -- numeric_literal -- | -- decimal_literal -- | -- integer_number -- (b) ADA_Name that resolves to a simple identifier in the current scope -- that is either a named number or a constant of type System.Priority. -- ADA_Name -- | -- identifier -- This is done by walking down the Child_Nodes of the -- Argument_Association_Rep until the ADA_primary is found, ensuring at -- each step of the way that NextDerivative is null (to ensure a single -- argument which has no operators), and then ensuring that the child of -- the ADA_primary is one of the allowed kinds of node. -- Iterator := Next_Sibling (Current_Node => Id_Node); -- Set the error reporting node. -- ASSUME Iterator = argument_association_rep OR NULL if Iterator = STree.NullNode then -- ASSUME Iterator = NULL Err_Node := Id_Node; elsif Syntax_Node_Type (Node => Iterator) = SP_Symbols.argument_association_rep then -- ASSUME Iterator = argument_association_rep Err_Node := Iterator; else Err_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Iterator = argument_association_rep OR NULL in Wf_Main_Program_Priority"); end if; loop --# assert STree.Table = STree.Table~; Error := Iterator = STree.NullNode or else Next_Sibling (Current_Node => Iterator) /= STree.NullNode; exit when Error or else Syntax_Node_Type (Node => Iterator) = SP_Symbols.ADA_primary; Iterator := Child_Node (Current_Node => Iterator); end loop; if not Error then -- ASSUME Iterator = ADA_primary --# check Syntax_Node_Type (Iterator, STree.Table) = SP_Symbols.ADA_primary; -- Check that we have the right kind of Primary. Iterator := Child_Node (Current_Node => Iterator); -- ASSUME Iterator = numeric_literal OR ADA_association_list OR ADA_expression OR ADA_name OR NULL if Iterator = STree.NullNode then -- ASSUME Iterator = NULL Error := True; else -- ASSUME Iterator = numeric_literal OR ADA_association_list OR ADA_expression OR ADA_name case Syntax_Node_Type (Node => Iterator) is when SP_Symbols.numeric_literal => -- ASSUME Iterator = numeric_literal Iterator := Child_Node (Current_Node => Child_Node (Current_Node => Iterator)); -- ASSUME Iterator = integer_number OR real_number OR based_integer OR based_real if Syntax_Node_Type (Node => Iterator) = SP_Symbols.integer_number then Get_Literal_Value (Node => Iterator, Val => Priority_Val); elsif Syntax_Node_Type (Node => Iterator) = SP_Symbols.real_number or else Syntax_Node_Type (Node => Iterator) = SP_Symbols.based_integer or else Syntax_Node_Type (Node => Iterator) = SP_Symbols.based_real then Error := True; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Iterator = integer_number OR real_number OR based_integer OR " & "based_real in Wf_Main_Program_Priority"); end if; when SP_Symbols.ADA_name => -- ASSUME Iterator = ADA_name Iterator := Child_Node (Current_Node => Iterator); -- ASSUME Iterator = ADA_name OR identifier OR character_literal OR string_literal if Syntax_Node_Type (Node => Iterator) = SP_Symbols.identifier then -- ASSUME Iterator = identifier Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Iterator), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- Ensure we have a local constant with a static integer value if not Dictionary.Is_Null_Symbol (Sym) and then Dictionary.Is_Constant (Sym) and then Dictionary.IsStatic (Sym, Scope) and then Dictionary.CompatibleTypes (Scope, Dictionary.GetType (Sym), Dictionary.GetPredefinedIntegerType) then STree.Set_Node_Lex_String (Sym => Sym, Node => Iterator); Priority_Val := Maths.ValueRep (Dictionary.Get_Value (The_Constant => Sym)); else Error := True; end if; elsif Syntax_Node_Type (Node => Iterator) = SP_Symbols.ADA_name or else Syntax_Node_Type (Node => Iterator) = SP_Symbols.character_literal or else Syntax_Node_Type (Node => Iterator) = SP_Symbols.string_literal then Error := True; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Iterator = ADA_name OR identifier OR character_literal OR " & "string_literal in Wf_Main_Program_Priority"); end if; when SP_Symbols.ADA_association_list | SP_Symbols.ADA_expression => Error := True; when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Iterator = numeric_literal OR ADA_association_list OR " & "ADA_expression OR ADA_name in Wf_Main_Program_Priority"); end case; end if; end if; if not Error then -- We may be able to do a range check on Priority_Val, if package System is provided. -- The value must be in the range of System.Priority (see RM D.1(8)). Check_Priority_Range (Error_Sym => Dictionary.GetMainProgram, Scope => Scope, Pragma_Kind => Dictionary.Priority, Err_Pos => Node_Position (Node => Err_Node), Value => Priority_Val, Value_Rep => Priority_Lex); -- CheckPriorityRange will report any out of range value. The returned Priority_Lex -- will be NullString if known to be out of range, or else the input value, so we -- add this result to the Dictionary. Dictionary.SetMainProgramPriority (Priority_Lex); else -- Invalid argument for pragma Priority. Must be integer literal or local constant -- whose value is static integer. ErrorHandler.Semantic_Error (Err_Num => 911, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Err_Node), Id_Str => LexTokenManager.Null_String); end if; else ErrorHandler.Semantic_Error (Err_Num => 879, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); end if; end Wf_Main_Program_Priority; -------------------------------------------------------------------- procedure Handle_Interface_On_Subprogram (Subprog_Declaration_Node : in STree.SyntaxNode; Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subprog_Declaration_Node & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Declaration_Node & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subprog_Declaration_Node & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# Subprog_Declaration_Node; --# pre Syntax_Node_Type (Subprog_Declaration_Node, STree.Table) = SP_Symbols.subprogram_declaration and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is Name_Node : STree.SyntaxNode; Subprog_Sym : Dictionary.Symbol; Name : LexTokenManager.Lex_String; Error_In_Pragma : Boolean; begin -- find name of subprogram starting from subprogram_declaration Name_Node := Child_Node (Current_Node => Subprog_Declaration_Node); -- ASSUME Name_Node = overriding_indicator OR procedure_specification OR function_specification OR -- proof_function_declaration if Syntax_Node_Type (Node => Name_Node) = SP_Symbols.overriding_indicator then -- ASSUME Name_Node = overriding_indicator Name_Node := Next_Sibling (Current_Node => Name_Node); elsif Syntax_Node_Type (Node => Name_Node) /= SP_Symbols.procedure_specification and then Syntax_Node_Type (Node => Name_Node) /= SP_Symbols.function_specification and then Syntax_Node_Type (Node => Name_Node) /= SP_Symbols.proof_function_declaration then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = overriding_indicator OR procedure_specification OR function_specification OR " & "proof_function_declaration in Handle_Interface_On_Subprogram"); end if; -- ASSUME Name_Node = procedure_specification OR function_specification OR proof_function_declaration if Syntax_Node_Type (Node => Name_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Name_Node) = SP_Symbols.function_specification then Name_Node := Child_Node (Current_Node => Child_Node (Current_Node => Name_Node)); -- ASSUME Name_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Name_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = identifier in Handle_Interface_On_Subprogram"); Name := Node_Lex_String (Node => Name_Node); Subprog_Sym := Dictionary.LookupItem (Name => Name, Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); Wf_External_Interface (Pragma_Node => Node, Entity_Sym => Subprog_Sym, Error_Found => Error_In_Pragma); if not Error_In_Pragma then Dictionary.AddBody (CompilationUnit => Subprog_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Hidden => True); -- treat interface procs as hidden end if; elsif Syntax_Node_Type (Node => Name_Node) /= SP_Symbols.proof_function_declaration then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = procedure_specification OR function_specification OR " & "proof_function_declaration in Handle_Interface_On_Subprogram"); end if; end Handle_Interface_On_Subprogram; -------------------------------------------------------------------- procedure Handle_Import_On_Variable (Basic_Declaration_Node : in STree.SyntaxNode; Id_Node : in STree.SyntaxNode; Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from Basic_Declaration_Node, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Basic_Declaration_Node, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Id_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Basic_Declaration_Node, STree.Table) = SP_Symbols.basic_declarative_item and --# Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is Var_Dec_Node : STree.SyntaxNode; Name_Node : STree.SyntaxNode; Var_Name : LexTokenManager.Lex_String; Var_Sym : Dictionary.Symbol; Error_In_Pragma : Boolean; Error_Found : Boolean; begin -- Handle_Import_On_Variable -- ASSUME Basic_Declaration_Node = basic_declarative_item -- Grammar: -- basic_declarative_item -- | -- basic_declaration -- | -- object_declaration -- | -- variable_declaration -- | -- identifier_list -- | -- identifier (assumes V : T, not V1, V2, V3 ; T, otherwise there are N identifier_lists) -- -- Checks: -- 1. illegal in Ada 83 -- 2. must be a variable declaration -- 3. name consistency -- 4. no explicit initialization allowed -- 5. mode, init rules and warnings -- Check 1 case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => -- will report unexpected pragma INTERFACE. ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); when CommandLineData.SPARK95_Onwards => -- Check 2 Var_Dec_Node := Child_Node (Current_Node => Basic_Declaration_Node); -- ASSUME Var_Dec_Node = basic_declaration OR justification_statement OR -- representation_clause OR basic_proof_declaration if Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.basic_declaration then -- ASSUME Var_Dec_Node = basic_declaration Var_Dec_Node := Child_Node (Current_Node => Var_Dec_Node); -- ASSUME Var_Dec_Node = object_declaration OR full_type_declaration OR subtype_declaration if Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.object_declaration then -- ASSUME Var_Dec_Node = object_declaration Var_Dec_Node := Child_Node (Current_Node => Var_Dec_Node); -- ASSUME Var_Dec_Node = constant_declaration OR variable_declaration if Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.variable_declaration then -- ASSUME Var_Dec_Node = variable_declaration -- check 3 Name_Node := Last_Child_Of (Start_Node => Var_Dec_Node); -- ASSUME Name_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Name_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = identifier in Handle_Import_On_Variable"); Var_Name := Node_Lex_String (Node => Name_Node); -- Potentially OK to add pragma to variable in dictionary, -- for which we will need a symbol Var_Sym := Dictionary.LookupItem (Name => Var_Name, Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- If the pragma Import "looks right" in that it names the -- variable declared immediately above it, LookupItem still might -- return NullSymbol if the variable declaration itself was illegal - -- for example, in the case of a missing own annotation. If LookupItem -- does return NullSymbol, then do nothing since an error message on the -- offending variable declaration will have already been issued. if not Dictionary.Is_Null_Symbol (Var_Sym) then -- use existing wf to check name consistency Wf_External_Interface (Pragma_Node => Node, Entity_Sym => Var_Sym, Error_Found => Error_In_Pragma); if not Error_In_Pragma then -- since the pragma import has just been checked to ensure it directly -- follows a variable declaration, the abov elook up must always succeed SystemErrors.RT_Assert (C => Dictionary.Is_Variable (Var_Sym), Sys_Err => SystemErrors.Assertion_Failure, Msg => "Variable not found in Handle_Import_On_Variable"); -- Check 4 if Dictionary.VariableIsInitialized (Var_Sym) then ErrorHandler.Semantic_Error (Err_Num => 120, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Var_Name); else -- Now we really can add the pragma to the Dictionary -- Following call also marks variable as being initialized "at declaration" Dictionary.AddVariablePragmaImport (Var_Sym); Error_Found := False; -- Check 5 -- First deal with own variables that have a pragma import but aren't in an -- initializes clause and don't have a mode, an existing function gives the desired answer if Unexpected_Initialization (Sym => Var_Sym) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 333, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Var_Name); end if; -- Now warn in the case where we have ANY variable that lacks a "stream" mode (even if it -- passes the previous test because it is an initialized own variable) if Dictionary.GetOwnVariableOrConstituentMode (Var_Sym) = Dictionary.DefaultMode then Error_Found := True; ErrorHandler.Semantic_Warning (Err_Num => 350, Position => Node_Position (Node => Node), Id_Str => Var_Name); end if; if not Error_Found then STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Name_Node); end if; end if; end if; end if; elsif Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.constant_declaration then -- ASSUME Var_Dec_Node = constant_declaration ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Var_Dec_Node = constant_declaration OR " & "variable_declaration in Handle_Import_On_Variable"); end if; elsif Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.full_type_declaration or else Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.subtype_declaration then -- ASSUME Var_Dec_Node = full_type_declaration OR subtype_declaration ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Var_Dec_Node = object_declaration OR full_type_declaration OR " & "subtype_declaration in Handle_Import_On_Variable"); end if; elsif Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.justification_statement or else Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.representation_clause or else Syntax_Node_Type (Node => Var_Dec_Node) = SP_Symbols.basic_proof_declaration then -- ASSUME Var_Dec_Node = justification_statement OR representation_clause OR basic_proof_declaration ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Var_Dec_Node = basic_declaration OR justification_statement OR " & "representation_clause OR basic_proof_declaration in Handle_Import_On_Variable"); end if; end case; end Handle_Import_On_Variable; -------------------------------------------------------------------- ------------------------------------------------- -- This function checks the number of arguments -- present in a pragma statement by counting the -- number of argument_association_rep symbols. -- This is then compared against known acceptable -- numbers of arguments taken from the LRM. -- -- Return: True if number of arguments OK, -- False otherwise ------------------------------------------------- procedure Check_Arg_Count (Node : in STree.SyntaxNode; Statement_OK : out Boolean; Error_To_Raise : out Natural) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in STree.Table; --# derives Error_To_Raise, --# Statement_OK from CommandLineData.Content, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.apragma; is function Number_Of_Args (Pragma_Node : STree.SyntaxNode; Max : Natural) return Natural --# global in STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; --# return Num_Args => (Num_Args <= Max); is Arg_Ass_Node : STree.SyntaxNode; Num_Args : Natural := 0; begin Arg_Ass_Node := Child_Node (Current_Node => Pragma_Node); -- ASSUME Arg_Ass_Node = identifier OR assert_pragma if Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.identifier then -- ASSUME Arg_Ass_Node = identifier Arg_Ass_Node := Next_Sibling (Current_Node => Arg_Ass_Node); -- ASSUME Arg_Ass_Node = argument_association_rep OR NULL if Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association_rep then -- ASSUME Arg_Ass_Node = argument_association_rep while Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association_rep and then Num_Args < Max loop --# assert Syntax_Node_Type (Arg_Ass_Node, STree.Table) = SP_Symbols.argument_association_rep and --# Num_Args < Max; Num_Args := Num_Args + 1; Arg_Ass_Node := Child_Node (Current_Node => Arg_Ass_Node); -- ASSUME Arg_Ass_Node = argument_association_rep OR argument_association SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association_rep or else Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Ass_Node = argument_association_rep OR argument_association in Number_Of_Args"); end loop; elsif Arg_Ass_Node /= STree.NullNode then Num_Args := 0; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Ass_Node = argument_association_rep OR NULL in Number_Of_Args"); end if; elsif Syntax_Node_Type (Node => Arg_Ass_Node) /= SP_Symbols.assert_pragma then Num_Args := 0; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Ass_Node = identifier OR assert_pragma in Number_Of_Args"); end if; return Num_Args; end Number_Of_Args; procedure Check_Arg_Count_95 (Node : in STree.SyntaxNode; Statement_OK : out Boolean; Error_To_Raise : out Natural) --# global in LexTokenManager.State; --# in STree.Table; --# derives Error_To_Raise, --# Statement_OK from LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.apragma; is Pragma_Type : LexTokenManager.Lex_String; begin Pragma_Type := Node_Lex_String (Node => Child_Node (Current_Node => Node)); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Normalize_Scalars_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Page_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Reviewable_Token) = LexTokenManager.Str_Eq then -- Check for 0 args Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 1) = 0; Error_To_Raise := 365; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.All_Calls_Remote_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Discard_Names_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Elaborate_Body_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Interrupt_Priority_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Preelaborate_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Pure_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Remote_Call_Interface_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Remote_Types_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Shared_Passive_Token) = LexTokenManager.Str_Eq then -- Check for 0/1 arg Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 1) = 0 or else Number_Of_Args (Pragma_Node => Node, Max => 2) = 1; Error_To_Raise := 360; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Asynchronous_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Atomic_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Atomic_Components_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Controlled_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Interrupt_Handler_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Linker_Options_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.List_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Locking_Policy_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Optimize_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Pack_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Priority_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Queueing_Policy_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Storage_Size_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Task_Dispatching_Policy_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Volatile_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Volatile_Components_Token) = LexTokenManager.Str_Eq then -- Check for 1 arg Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 2) = 1; Error_To_Raise := 361; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Suppress_Token) = LexTokenManager.Str_Eq then -- Check for 1/2 args Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 2) = 1 or else Number_Of_Args (Pragma_Node => Node, Max => 3) = 2; Error_To_Raise := 366; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Elaborate_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Elaborate_All_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Inline_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Restrictions_Token) = LexTokenManager.Str_Eq then -- Check for >=1 args Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 1) /= 0; Error_To_Raise := 363; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Attach_Handler_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Convention_Token) = LexTokenManager.Str_Eq then -- Check for 2 args Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 3) = 2; Error_To_Raise := 362; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Export_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Import_Token) = LexTokenManager.Str_Eq then -- Check for 2-4 args Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 3) >= 2 and then Number_Of_Args (Pragma_Node => Node, Max => 5) <= 4; Error_To_Raise := 364; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Inspection_Point_Token) = LexTokenManager.Str_Eq then -- Any number of arguments Statement_OK := True; Error_To_Raise := 0; else -- Unknown pragma! Ada 2005? Implementation defined? Statement_OK := True; Error_To_Raise := 0; end if; end Check_Arg_Count_95; procedure Check_Arg_Count_83 (Node : in STree.SyntaxNode; Statement_OK : out Boolean; Error_To_Raise : out Natural) --# global in LexTokenManager.State; --# in STree.Table; --# derives Error_To_Raise, --# Statement_OK from LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.apragma; is Pragma_Type : LexTokenManager.Lex_String; begin Pragma_Type := Node_Lex_String (Node => Child_Node (Current_Node => Node)); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Page_Token) = LexTokenManager.Str_Eq then -- Check for 0 args Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 1) = 0; Error_To_Raise := 365; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Controlled_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.List_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Memory_Size_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Optimize_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Pack_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Priority_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Shared_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Storage_Unit_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.System_Name_Token) = LexTokenManager.Str_Eq then -- Check for 1 arg Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 2) = 1; Error_To_Raise := 361; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Suppress_Token) = LexTokenManager.Str_Eq then -- Check for 1/2 args Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 2) = 1 or else Number_Of_Args (Pragma_Node => Node, Max => 3) = 2; Error_To_Raise := 366; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Elaborate_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Pragma_Type, Lex_Str2 => LexTokenManager.Inline_Token) = LexTokenManager.Str_Eq then -- Check for >=1 args Statement_OK := Number_Of_Args (Pragma_Node => Node, Max => 1) /= 0; Error_To_Raise := 363; -- InterfaceToken requires 2 arguments, but always checked -- before entry to this subprogram else -- Unknown pragma! Ada 2005? Implementation defined? Statement_OK := True; Error_To_Raise := 0; end if; end Check_Arg_Count_83; begin -- Check_Arg_Count case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Check_Arg_Count_83 (Node => Node, Statement_OK => Statement_OK, Error_To_Raise => Error_To_Raise); when CommandLineData.SPARK95_Onwards => Check_Arg_Count_95 (Node => Node, Statement_OK => Statement_OK, Error_To_Raise => Error_To_Raise); end case; end Check_Arg_Count; begin -- Wf_Pragma Id_Node := Child_Node (Current_Node => Node); -- ASSUME Id_Node = identifier OR assert_pragma if Syntax_Node_Type (Node => Id_Node) = SP_Symbols.assert_pragma then -- ASSUME Id_Node = assert_pragma ErrorHandler.A_Pragma (LexTokenManager.Assert_Token, Node_Position (Node => Node)); elsif Syntax_Node_Type (Node => Id_Node) = SP_Symbols.identifier then -- ASSUME Id_Node = identifier if Is_External_Interface (Pragma_Node => Node) then if We_Are_Not_In_A_Protected_Type (Scope => Scope) then -- Import/Interface MAY be ok -- Look for the preceeding declaration Subprog_Or_Variable_Node := Parent_Node (Current_Node => Node); -- ASSUME Subprog_Or_Variable_Node = compilation_unit OR -- pragma_rep OR -- initial_declarative_item_rep OR -- later_declarative_item_rep OR -- statement OR -- code_insertion OR -- visible_part_rep OR -- basic_declarative_item_rep OR -- renaming_declaration_rep OR -- task_pragma OR -- protected_operation_declaration_rep OR -- body_stub if Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.visible_part_rep or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.basic_declarative_item_rep then -- ASSUME Subprog_Or_Variable_Node = visible_part_rep OR basic_declarative_item_rep Subprog_Or_Variable_Node := Child_Node (Current_Node => Subprog_Or_Variable_Node); -- ASSUME Subprog_Or_Variable_Node = visible_part_rep OR basic_declarative_item_rep OR NULL if Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.visible_part_rep or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.basic_declarative_item_rep then -- ASSUME Subprog_Or_Variable_Node = visible_part_rep OR basic_declarative_item_rep Subprog_Or_Variable_Node := Child_Node (Current_Node => Subprog_Or_Variable_Node); -- ASSUME Subprog_Or_Variable_Node = visible_part_rep OR basic_declarative_item_rep OR NULL if Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.visible_part_rep or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.basic_declarative_item_rep then Subprog_Or_Variable_Node := Next_Sibling (Current_Node => Subprog_Or_Variable_Node); -- ASSUME Subprog_Or_Variable_Node = basic_declarative_item OR -- private_type_declaration OR -- deferred_constant_declaration OR -- subprogram_declaration OR -- generic_subprogram_instantiation OR -- apragma OR -- renaming_declaration if Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.private_type_declaration or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.deferred_constant_declaration or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.generic_subprogram_instantiation or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.apragma or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.renaming_declaration then -- ASSUME Subprog_Or_Variable_Node = private_type_declaration OR -- deferred_constant_declaration OR -- generic_subprogram_instantiation OR -- apragma OR -- renaming_declaration Subprog_Or_Variable_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Subprog_Or_Variable_Node) /= SP_Symbols.basic_declarative_item and then Syntax_Node_Type (Node => Subprog_Or_Variable_Node) /= SP_Symbols.subprogram_declaration then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Or_Variable_Node = basic_declarative_item OR " & "private_type_declaration OR deferred_constant_declaration OR subprogram_declaration OR " & "generic_subprogram_instantiation OR apragma OR renaming_declaration in Wf_Pragma"); end if; elsif Subprog_Or_Variable_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Or_Variable_Node = visible_part_rep OR " & "basic_declarative_item_rep OR NULL in Wf_Pragma"); end if; elsif Subprog_Or_Variable_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Or_Variable_Node = visible_part_rep OR " & "basic_declarative_item_rep OR NULL in Wf_Pragma"); end if; elsif Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.initial_declarative_item_rep then -- ASSUME Subprog_Or_Variable_Node = initial_declarative_item_rep Subprog_Or_Variable_Node := Child_Node (Current_Node => Subprog_Or_Variable_Node); -- ASSUME Subprog_Or_Variable_Node = initial_declarative_item_rep OR basic_declarative_item OR -- package_declaration OR generic_package_instantiation if Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.initial_declarative_item_rep then -- ASSUME Subprog_Or_Variable_Node = initial_declarative_item_rep Subprog_Or_Variable_Node := Child_Node (Current_Node => Subprog_Or_Variable_Node); -- ASSUME Subprog_Or_Variable_Node = initial_declarative_item_rep OR basic_declarative_item OR -- package_declaration OR generic_package_instantiation if Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.initial_declarative_item_rep then -- ASSUME Subprog_Or_Variable_Node = initial_declarative_item_rep Subprog_Or_Variable_Node := Next_Sibling (Current_Node => Subprog_Or_Variable_Node); -- ASSUME Subprog_Or_Variable_Node = basic_declarative_item OR -- package_declaration OR -- renaming_declaration OR -- use_type_clause OR -- proof_renaming_declaration OR -- apragma if Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.package_declaration or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.renaming_declaration or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.use_type_clause or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.proof_renaming_declaration or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.apragma then -- ASSUME Subprog_Or_Variable_Node = package_declaration OR -- renaming_declaration OR -- use_type_clause OR -- proof_renaming_declaration OR -- apragma Subprog_Or_Variable_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Subprog_Or_Variable_Node) /= SP_Symbols.basic_declarative_item then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Or_Variable_Node = basic_declarative_item OR package_declaration OR " & "renaming_declaration OR use_type_clause OR proof_renaming_declaration OR apragma in Wf_Pragma"); end if; elsif Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.package_declaration or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.generic_package_instantiation then -- ASSUME Subprog_Or_Variable_Node = package_declaration OR generic_package_instantiation Subprog_Or_Variable_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Subprog_Or_Variable_Node) /= SP_Symbols.basic_declarative_item then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Or_Variable_Node = initial_declarative_item_rep OR " & "basic_declarative_item OR package_declaration OR generic_package_instantiation in Wf_Pragma"); end if; elsif Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.basic_declarative_item or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.package_declaration or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.generic_package_instantiation then -- ASSUME Subprog_Or_Variable_Node = basic_declarative_item OR package_declaration OR -- generic_package_instantiation Subprog_Or_Variable_Node := STree.NullNode; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Or_Variable_Node = initial_declarative_item_rep OR " & "basic_declarative_item OR package_declaration OR generic_package_instantiation in Wf_Pragma"); end if; elsif Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.compilation_unit or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.pragma_rep or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.later_declarative_item_rep or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.statement or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.code_insertion or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.renaming_declaration_rep or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.task_pragma or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.protected_operation_declaration_rep or else Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.body_stub then -- ASSUME Subprog_Or_Variable_Node = compilation_unit OR -- pragma_rep OR -- later_declarative_item_rep OR -- statement OR -- code_insertion OR -- renaming_declaration_rep OR -- task_pragma OR -- protected_operation_declaration_rep OR -- body_stub Subprog_Or_Variable_Node := STree.NullNode; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Or_Variable_Node = compilation_unit OR pragma_rep OR " & "initial_declarative_item_rep OR later_declarative_item_rep OR statement OR code_insertion OR " & "visible_part_rep OR basic_declarative_item_rep OR renaming_declaration_rep OR task_pragma OR " & "protected_operation_declaration_rep OR body_stub in Wf_Pragma"); end if; -- ASSUME Subprog_Or_Variable_Node = subprogram_declaration OR basic_declarative_item OR NULL --# check Syntax_Node_Type (Subprog_Or_Variable_Node, STree.Table) = SP_Symbols.subprogram_declaration or --# Syntax_Node_Type (Subprog_Or_Variable_Node, STree.Table) = SP_Symbols.basic_declarative_item or --# Subprog_Or_Variable_Node = STree.NullNode; if Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.subprogram_declaration then -- ASSUME Subprog_Or_Variable_Node = subprogram_declaration -- It's a subprogram_declaration, note that pragma Import must immediately follow -- the subprogram declaration so going one step up the syntax tree will find the subprogram -- if one is there. Handle_Interface_On_Subprogram (Subprog_Declaration_Node => Subprog_Or_Variable_Node, Node => Node, Scope => Scope); elsif Syntax_Node_Type (Node => Subprog_Or_Variable_Node) = SP_Symbols.basic_declarative_item then -- ASSUME Subprog_Or_Variable_Node = basic_declarative_item -- it might be a variable declaration Handle_Import_On_Variable (Basic_Declaration_Node => Subprog_Or_Variable_Node, Id_Node => Id_Node, Node => Node, Scope => Scope); elsif Subprog_Or_Variable_Node = STree.NullNode then -- ASSUME Subprog_Or_Variable_Node = NULL -- none of the things where pragma import allowed has been found so it's an error ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); end if; else -- we ARE in a protected type, so pragma import is unexpected ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); end if; -- in protected type -- handle pragma elaborate_body which must be the first -- visible_part_rep in a package specification to be acceptable elsif CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Elaborate_Body_Token) = LexTokenManager.Str_Eq then -- A pragma Elaborate_Body has been found and must be processed. -- First check it is correctly positioned Pack_Spec_Node := Parent_Node (Current_Node => Node); -- ASSUME Pack_Spec_Node = compilation_unit OR -- pragma_rep OR -- initial_declarative_item_rep OR -- later_declarative_item_rep OR -- statement OR -- code_insertion OR -- visible_part_rep OR -- basic_declarative_item_rep OR -- renaming_declaration_rep OR -- task_pragma OR -- protected_operation_declaration_rep OR -- body_stub if Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.visible_part_rep then -- ASSUME Pack_Spec_Node = visible_part_rep if Child_Node (Current_Node => Child_Node (Current_Node => Pack_Spec_Node)) = STree.NullNode then -- Potentially legal. -- Work up chain to package specification node while Syntax_Node_Type (Node => Pack_Spec_Node) /= SP_Symbols.package_specification loop --# assert STree.Table = STree.Table~; Pack_Spec_Node := Parent_Node (Current_Node => Pack_Spec_Node); end loop; -- ASSUME Pack_Spec_Node = package_specification -- find identifier of package Pack_Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Pack_Spec_Node)); while Syntax_Node_Type (Node => Pack_Ident_Node) /= SP_Symbols.identifier loop --# assert STree.Table = STree.Table~; Pack_Ident_Node := Next_Sibling (Current_Node => Pack_Ident_Node); end loop; -- ASSUME Pack_Ident_Node = identifier Pack_Ident_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Pack_Ident_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); Wf_Elaborate_Body (Pragma_Node => Node, Pack_Sym => Pack_Ident_Sym); else -- unexpected in this position ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); end if; elsif Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.compilation_unit or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.pragma_rep or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.initial_declarative_item_rep or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.later_declarative_item_rep or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.statement or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.code_insertion or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.basic_declarative_item_rep or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.renaming_declaration_rep or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.task_pragma or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.protected_operation_declaration_rep or else Syntax_Node_Type (Node => Pack_Spec_Node) = SP_Symbols.body_stub then -- ASSUME Pack_Spec_Node = compilation_unit OR -- pragma_rep OR -- initial_declarative_item_rep OR -- later_declarative_item_rep OR -- statement OR -- code_insertion OR -- basic_declarative_item_rep OR -- renaming_declaration_rep OR -- task_pragma OR -- protected_operation_declaration_rep OR -- body_stub -- unexpected in this position ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pack_Spec_Node = compilation_unit OR pragma_rep OR initial_declarative_item_rep OR " & "later_declarative_item_rep OR statement OR code_insertion OR visible_part_rep OR " & "basic_declarative_item_rep OR renaming_declaration_rep OR task_pragma OR " & "protected_operation_declaration_rep OR body_stub in Wf_Pragma"); end if; elsif CommandLineData.Ravenscar_Selected then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Atomic_Token) = LexTokenManager.Str_Eq then -- special handling for Pragma Atomic (...) in Ravenscar mode Wf_Pragma_Atomic (Pragma_Node => Node, Scope => Scope); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Priority_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Interrupt_Priority_Token) = LexTokenManager.Str_Eq then -- Other than pragma Priority in the declarative part of the main program, -- we have an out-of-place priority pragma; these are handled by -- wf_priority_pragma which is called from grammar-specific locations -- where a priority pragma is allowed. if Dictionary.MainProgramExists and then Dictionary.IsMainProgram (Dictionary.GetRegion (Scope)) and then Syntax_Node_Type (Node => Parent_Node (Current_Node => Node)) /= SP_Symbols.statement and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Priority_Token) = LexTokenManager.Str_Eq then Wf_Main_Program_Priority (Id_Node => Id_Node, Node => Node, Scope => Scope); else ErrorHandler.Semantic_Error (Err_Num => 879, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Id_Node)); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Interrupt_Handler_Token) = LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 883, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Atomic_Components_Token) = LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 842, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Volatile_Components_Token) = LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 843, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => LexTokenManager.Attach_Handler_Token) = LexTokenManager.Str_Eq then Wf_Attach_Handler (Pragma_Node => Node, Scope => Scope); else ErrorHandler.A_Pragma (Node_Lex_String (Node => Id_Node), Node_Position (Node => Node)); end if; else -- not a "special" pragma so handle with normal warning -- Check the number of arguments associated with particular -- pragmas. Check_Arg_Count (Node => Node, Statement_OK => Statement_OK, Error_To_Raise => Error_To_Raise); if not Statement_OK then ErrorHandler.Semantic_Warning (Err_Num => Error_To_Raise, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.A_Pragma (Node_Lex_String (Node => Id_Node), Node_Position (Node => Node)); end if; end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Id_Node = identifier OR assert_pragma in Wf_Pragma"); end if; end Wf_Pragma; spark-2012.0.deb/examiner/dictionary-dynamic_symbol_table.adb0000644000175000017500000001410511753202336023245 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) package body Dynamic_Symbol_Table is procedure Initialize (The_Table : out T) is --# hide Initialize; begin The_Table := T'(Vec => ST_Vec.Empty_Vector); end Initialize; function Get_Current_Usage (The_Table : in T) return Natural is --# hide Get_Current_Usage; begin return Natural (ST_Vec.Length (The_Table.Vec)); end Get_Current_Usage; function Get_Info (The_Table : in T; Item : in Valid_Symbol) return Symbol_Info is --# hide Get_Info; begin SystemErrors.RT_Assert (C => Natural (Item) <= Get_Current_Usage (The_Table), Sys_Err => SystemErrors.Invalid_Index, Msg => "Out of bounds access in Dictionary.Dynamic_Symbol_Table"); return ST_Vec.Element (Container => The_Table.Vec, Index => Item); end Get_Info; procedure Set_Info (The_Table : in out T; Item : in Valid_Symbol; Info : in Symbol_Info) --# derives The_Table from *, --# Info, --# Item; is --# hide Set_Info; begin SystemErrors.RT_Assert (C => Natural (Item) <= Get_Current_Usage (The_Table), Sys_Err => SystemErrors.Invalid_Index, Msg => "Out of bounds access in Dictionary.Dynamic_Symbol_Table"); ST_Vec.Replace_Element (Container => The_Table.Vec, Index => Item, New_Item => Info); end Set_Info; procedure Add_Symbol (The_Table : in out T; Discriminant : in Dictionary.SymbolDiscriminant; Ref : in Dictionary.Ref_Type; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; Item : out Dictionary.Symbol) is --# hide Add_Symbol; begin SystemErrors.RT_Assert (C => Get_Current_Usage (The_Table) < Natural (Dictionary.Symbol'Last), Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Too many symbols."); ST_Vec.Append (Container => The_Table.Vec, New_Item => Symbol_Info'(Discriminant => Discriminant, Ref => Ref, Comp_Unit => Comp_Unit, Loc => Loc)); Item := Dictionary.Symbol (Get_Current_Usage (The_Table)); end Add_Symbol; function Get_Symbol_Discriminant (The_Table : in T; Item : in Dictionary.Symbol) return Dictionary.SymbolDiscriminant is D : Dictionary.SymbolDiscriminant; begin if Item = Dictionary.NullSymbol then D := Dictionary.Null_Symbol; else D := Get_Info (The_Table, Item).Discriminant; end if; return D; end Get_Symbol_Discriminant; function Get_Symbol_Ref (The_Table : in T; Item : in Dictionary.Symbol) return Dictionary.Ref_Type is begin SystemErrors.RT_Assert (C => Item in Valid_Symbol, Sys_Err => SystemErrors.Invalid_Index, Msg => "Out of bounds access in Dictionary.Dynamic_Symbol_Table"); return Get_Info (The_Table, Item).Ref; end Get_Symbol_Ref; function Get_Symbol_Compilation_Unit (The_Table : in T; Item : in Dictionary.Symbol) return ContextManager.UnitDescriptors is C : ContextManager.UnitDescriptors; begin if Item = Dictionary.NullSymbol then C := ContextManager.NullUnit; else C := Get_Info (The_Table, Item).Comp_Unit; end if; return C; end Get_Symbol_Compilation_Unit; function Get_Symbol_Location (The_Table : in T; Item : in Dictionary.Symbol) return LexTokenManager.Token_Position is L : LexTokenManager.Token_Position; begin if Item = Dictionary.NullSymbol then L := LexTokenManager.Null_Token_Position; else L := Get_Info (The_Table, Item).Loc; end if; return L; end Get_Symbol_Location; procedure Set_Symbol_Location (The_Table : in out T; Item : in Dictionary.Symbol; Location : in LexTokenManager.Token_Position) is Info : Symbol_Info; begin if Item /= Dictionary.NullSymbol then Info := Get_Info (The_Table, Item); Info.Loc := Location; Set_Info (The_Table, Item, Info); end if; end Set_Symbol_Location; end Dynamic_Symbol_Table; spark-2012.0.deb/examiner/sem-dependency_relation-create_full_subprog_dependency.adb0000644000175000017500000004153411753202336027747 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Dependency_Relation) procedure Create_Full_Subprog_Dependency (Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; The_Heap : in out Heap.HeapRecord) is Imports_In_Relation, Exports_In_Relation : SeqAlgebra.Seq; ------------------------------- procedure Add_To_Full_Dependency (Sym : in Dictionary.Symbol; Mode : in Dictionary.Modes; Imports_In_Relation, Exports_In_Relation : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Exports_In_Relation, --# Imports_In_Relation, --# Mode, --# Sym, --# The_Heap; is procedure Add_Export (Sym : in Dictionary.Symbol; Exports_In_Relation : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Exports_In_Relation, --# Sym, --# The_Heap; is begin SeqAlgebra.AddMember (The_Heap, Exports_In_Relation, Natural (Dictionary.SymbolRef (Sym))); end Add_Export; --------------- procedure Add_Import (Sym : in Dictionary.Symbol; Imports_In_Relation : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Imports_In_Relation, --# Sym, --# The_Heap; is begin SeqAlgebra.AddMember (The_Heap, Imports_In_Relation, Natural (Dictionary.SymbolRef (Sym))); end Add_Import; begin -- Add_To_Full_Dependency case Mode is when Dictionary.InMode => Add_Import (Sym => Sym, Imports_In_Relation => Imports_In_Relation, The_Heap => The_Heap); when Dictionary.OutMode => Add_Export (Sym => Sym, Exports_In_Relation => Exports_In_Relation, The_Heap => The_Heap); when Dictionary.InOutMode => Add_Import (Sym => Sym, Imports_In_Relation => Imports_In_Relation, The_Heap => The_Heap); Add_Export (Sym => Sym, Exports_In_Relation => Exports_In_Relation, The_Heap => The_Heap); when Dictionary.DefaultMode => -- can't occur for global Add_Import (Sym => Sym, Imports_In_Relation => Imports_In_Relation, The_Heap => The_Heap); when Dictionary.InvalidMode => -- can't ocur null; end case; end Add_To_Full_Dependency; -------------------------------------------------- procedure Add_Parameters_To_Import_Export_Lists (Subprog_Sym : in Dictionary.Symbol; Imports_In_Relation, Exports_In_Relation : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# Exports_In_Relation, --# Imports_In_Relation, --# Subprog_Sym, --# The_Heap; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; begin It := Dictionary.FirstSubprogramParameter (Subprog_Sym); while not Dictionary.IsNullIterator (It) loop Sym := Dictionary.CurrentSymbol (It); Add_To_Full_Dependency (Sym => Sym, Mode => Dictionary.GetSubprogramParameterMode (Sym), Imports_In_Relation => Imports_In_Relation, Exports_In_Relation => Exports_In_Relation, The_Heap => The_Heap); It := Dictionary.NextSymbol (It); end loop; end Add_Parameters_To_Import_Export_Lists; -------------------------------------------------- procedure Add_Globals_To_Import_Export_Lists (Abstraction : in Dictionary.Abstractions; Subprog_Sym : in Dictionary.Symbol; Imports_In_Relation, Exports_In_Relation : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Abstraction, --# Dictionary.Dict, --# Exports_In_Relation, --# Imports_In_Relation, --# Subprog_Sym, --# The_Heap; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; begin It := Dictionary.FirstGlobalVariable (Abstraction, Subprog_Sym); while not Dictionary.IsNullIterator (It) loop Sym := Dictionary.CurrentSymbol (It); Add_To_Full_Dependency (Sym => Sym, Mode => Dictionary.GetGlobalMode (Abstraction, Subprog_Sym, Sym), Imports_In_Relation => Imports_In_Relation, Exports_In_Relation => Exports_In_Relation, The_Heap => The_Heap); It := Dictionary.NextSymbol (It); end loop; end Add_Globals_To_Import_Export_Lists; -------------------------------------------------- procedure Add_Exports_To_Dictionary (The_Heap : in Heap.HeapRecord; Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Exports_In_Relation : in SeqAlgebra.Seq) --# global in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Abstraction, --# Exports_In_Relation, --# Subprog_Sym, --# The_Heap & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Dictionary.Dict, --# Exports_In_Relation, --# LexTokenManager.State, --# Node_Pos, --# Subprog_Sym, --# The_Heap; is Member : SeqAlgebra.MemberOfSeq; begin Member := SeqAlgebra.FirstMember (The_Heap, Exports_In_Relation); while not SeqAlgebra.IsNullMember (Member) loop Dictionary.AddExport (Abstraction => Abstraction, TheProcedure => Subprog_Sym, TheExport => Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member))), ExportReference => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos), Annotation => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos)); Member := SeqAlgebra.NextMember (The_Heap, Member); end loop; end Add_Exports_To_Dictionary; procedure Possibly_Add_Null_Export (Node_Pos : in LexTokenManager.Token_Position; Abstraction : in Dictionary.Abstractions; Imports_In_Relation, Exports_In_Relation : in SeqAlgebra.Seq; Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Dictionary.Dict from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Imports_In_Relation, --# Node_Pos, --# Subprog_Sym, --# The_Heap & --# SPARK_IO.File_Sys from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Imports_In_Relation, --# LexTokenManager.State, --# Node_Pos, --# Subprog_Sym, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exports_In_Relation, --# Imports_In_Relation, --# Node_Pos, --# Subprog_Sym, --# The_Heap; is Import : SeqAlgebra.MemberOfSeq; External_Variable_Found_In_Import_List : Boolean := False; Global_Variable_Sym : Dictionary.Symbol; begin -- check whether any of the imports are external variables Import := SeqAlgebra.FirstMember (The_Heap, Imports_In_Relation); while not SeqAlgebra.IsNullMember (Import) loop if Dictionary.GetOwnVariableOrConstituentMode (Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Import)))) /= Dictionary.DefaultMode then External_Variable_Found_In_Import_List := True; exit; end if; Import := SeqAlgebra.NextMember (The_Heap, Import); end loop; -- if NONE of the imports are external variables then we need to create a suitable -- export by adding the NullVariable to the export set. if not External_Variable_Found_In_Import_List then -- We must first make it a global of the subprogram --# accept Flow, 10, Global_Variable_Sym, "Expected ineffective assignment to OK"; Dictionary.AddGlobalVariable (Abstraction => Abstraction, Subprogram => Subprog_Sym, Variable => Dictionary.GetNullVariable, Mode => Dictionary.OutMode, PrefixNeeded => False, Comp_Unit => ContextManager.Ops.Current_Unit, VariableReference => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos), Global_Variable_Sym => Global_Variable_Sym); --# end accept; -- and then we can add it to the export set. Call to CreateFullDerives will do the rest SeqAlgebra.AddMember (The_Heap, Exports_In_Relation, Natural (Dictionary.SymbolRef (Dictionary.GetNullVariable))); end if; --# accept Flow, 33, Global_Variable_Sym, "Expected to be neither referenced nor exported"; end Possibly_Add_Null_Export; begin -- Create_Full_Subprog_Dependency -- Use parameter and global modes to create set of imports and set of exports SeqAlgebra.CreateSeq (The_Heap, Imports_In_Relation); SeqAlgebra.CreateSeq (The_Heap, Exports_In_Relation); Add_Parameters_To_Import_Export_Lists (Subprog_Sym => Subprog_Sym, Imports_In_Relation => Imports_In_Relation, Exports_In_Relation => Exports_In_Relation, The_Heap => The_Heap); Add_Globals_To_Import_Export_Lists (Abstraction => Abstraction, Subprog_Sym => Subprog_Sym, Imports_In_Relation => Imports_In_Relation, Exports_In_Relation => Exports_In_Relation, The_Heap => The_Heap); -- If the above calls to populate the set of imports and exports results in an -- empty sequence of exports but a non-empty sequence of imports then we need -- to do a further check. -- Nothing needs to be done if the imports include external (stream) variables -- because AddDerivesStreamEffects below will make any imported streams into -- an export as well (in order to model stream side-effects. -- We only need to do something if all the imports are "ordinary" -- variables. -- In that case only we make the set of exports into {NullVariable}. if SeqAlgebra.IsEmptySeq (The_Heap, Exports_In_Relation) and then not SeqAlgebra.IsEmptySeq (The_Heap, Imports_In_Relation) then Possibly_Add_Null_Export (Node_Pos => Node_Pos, Abstraction => Abstraction, Imports_In_Relation => Imports_In_Relation, Exports_In_Relation => Exports_In_Relation, Subprog_Sym => Subprog_Sym, The_Heap => The_Heap); end if; Add_Exports_To_Dictionary (The_Heap => The_Heap, Node_Pos => Node_Pos, Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Exports_In_Relation => Exports_In_Relation); Dictionary.AddDependencyRelation (Abstraction, Subprog_Sym, Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos)); -- create "closure" dependency relation, all exports from all imports Create_Full_Dependency (Node_Pos => Node_Pos, Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Import_List => Imports_In_Relation, Export_List => Exports_In_Relation, The_Heap => The_Heap); SeqAlgebra.DisposeOfSeq (The_Heap, Imports_In_Relation); SeqAlgebra.DisposeOfSeq (The_Heap, Exports_In_Relation); Sem.Add_Derives_Stream_Effects (Node_Pos => Node_Pos, Subprog_Sym => Subprog_Sym, Abstraction => Abstraction); if Abstraction = Dictionary.IsRefined then Check_Derives_Consistency (Subprog_Sym => Subprog_Sym, Position => Node_Pos, The_Heap => The_Heap); end if; end Create_Full_Subprog_Dependency; spark-2012.0.deb/examiner/heap.ads0000644000175000017500000001513211753202336015661 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Heap -- -- Purpose: -- The Heap is a low-level package which is used by higher level modules -- of the FlowAnalyser. A HeapRecord is a collection of storage elements -- that can be allocated to and deallocated from users of the package. -- -- A storage element is not directly available to a user, rather, it is -- referenced via an Atom. Logically a user works with Atoms. -- -- The attributes referenced by an Atom are two values AValue and BValue both -- of type Natural, and two references APointer and BPointer which may be -- used to refer to other Atoms. -- -- There are getter and setter methods for each of the attributes. -- The higher-level packages which use the Heap build various data structures -- from the Atoms and use the value attributes to store instances of various -- items. In the Examiner most of these items are represented by numeric -- indices that select a particular item. Hence, in the Examiner, the value -- attributes of an Atom may be used to refer to objects of different types. -- -- Clients: -- Example users of Heap are the packages SeqAlgebra, RelationalAlgebra and -- RefLists and these packages are themselves used by the FlowAnalyser. -- -- Use: -- It is intended that the Heap is not used directly but only through a -- a higher-level package which provides an interface abstraction such as a -- a set or a sequence and gives a better level of protection against -- incorrect use of the Heap. -- -- Examples of the use of the Heap may be found in the packages SeqAlgebra, -- RelationalAlgebra and RefLists. -- -- Important principles are: -- -- 1. only one HeapRecord object should exist at any one time - the object -- is very large - the Examiner uses just one; -- -- 2. a HeapRecord object must be initialized before it is used; -- -- 3. an Atom must be Created before it can be used; -- -- 4. an Atom should be Disposed when its use is complete but not before -- any Atoms it uniquely references have been Disposed. An Atom that -- has been disposed is recycled and available for reuse when a new Atom -- is Created; -- -- 5. an attribute referred to by an Atom must not be read until it has been -- set using the appropriate update procedure; -- -- 6. the heap automatically extends its capacity. See the package -- Heap_Storage for more details. -- -- Extension: -- It is not expected that any extension will be made to this low-level -- package. -- -------------------------------------------------------------------------------- with Heap_Storage; use type Heap_Storage.Atom; --# inherit Heap_Storage, --# Statistics, --# SystemErrors; package Heap is subtype Atom is Heap_Storage.Atom; NullAtom : constant Atom; type HeapRecord is private; function IsNullPointer (A : Atom) return Boolean; -- Initializes a HeapRecord and sets the initial capacity of the Heap. -- This should be called exactly once after a HeapRecord has been -- declared. procedure Initialize (TheHeap : out HeapRecord); --# derives TheHeap from ; -- Resets the length of TheHeap to be empty, but does -- not re-initialize the capacity of it. This should be -- called when TheHeap is reset for analysis each program -- unit in an Examiner run. procedure Reset (TheHeap : in out HeapRecord); --# derives TheHeap from *; procedure CreateAtom (TheHeap : in out HeapRecord; NewAtom : out Atom); --# global in out Statistics.TableUsage; --# derives NewAtom, --# TheHeap from TheHeap & --# Statistics.TableUsage from *, --# TheHeap; procedure DisposeOfAtom (TheHeap : in out HeapRecord; OldAtom : in Atom); --# derives TheHeap from *, --# OldAtom; function APointer (TheHeap : HeapRecord; A : Atom) return Atom; function BPointer (TheHeap : HeapRecord; A : Atom) return Atom; function AValue (TheHeap : HeapRecord; A : Atom) return Natural; function BValue (TheHeap : HeapRecord; A : Atom) return Natural; procedure UpdateAPointer (TheHeap : in out HeapRecord; A, Pointer : in Atom); --# derives TheHeap from *, --# A, --# Pointer; procedure UpdateBPointer (TheHeap : in out HeapRecord; A, Pointer : in Atom); --# derives TheHeap from *, --# A, --# Pointer; procedure UpdateAValue (TheHeap : in out HeapRecord; A : in Atom; Value : in Natural); --# derives TheHeap from *, --# A, --# Value; procedure UpdateBValue (TheHeap : in out HeapRecord; A : in Atom; Value : in Natural); --# derives TheHeap from *, --# A, --# Value; procedure ReportUsage (TheHeap : in HeapRecord); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# TheHeap; private NullAtom : constant Atom := Atom'First; type HeapRecord is record ListOfAtoms : Heap_Storage.Vector; HighMark, NextFreeAtom : Atom; end record; end Heap; spark-2012.0.deb/examiner/sem-find_actual_node.adb0000644000175000017500000000467311753202336020773 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Find_Actual_Node (For_Formal : in Dictionary.Symbol; Named_Argument_Assoc_Node : in STree.SyntaxNode; Expression_Node : out STree.SyntaxNode) is It : STree.Iterator; Next_Node : STree.SyntaxNode; begin Expression_Node := STree.NullNode; It := STree.Find_First_Formal_Parameter_Node (From_Root => Named_Argument_Assoc_Node); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# STree.Table = STree.Table~; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (For_Formal), Lex_Str2 => Node_Lex_String (Node => Next_Node)) = LexTokenManager.Str_Eq then STree.Set_Node_Lex_String (Sym => For_Formal, Node => Next_Node); Expression_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Next_Node)); exit; end if; It := STree.NextNode (It); end loop; -- ASSUME Expression_Node = expression OR NULL SystemErrors.RT_Assert (C => Expression_Node = STree.NullNode or else Syntax_Node_Type (Node => Expression_Node) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Expression_Node = expression OR NULL in Find_Actual_Node"); end Find_Actual_Node; spark-2012.0.deb/examiner/declarations-outputdeclarations.adb0000644000175000017500000001665211753202336023332 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors; separate (Declarations) procedure OutputDeclarations (Heap : in out Cells.Heap_Record; File : in SPARK_IO.File_Type; Rule_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Write_Rules : in Boolean; EndPosition : in LexTokenManager.Token_Position) is NeededSymbols : Cells.Cell; --------------------------------------------------------------------- function IsLocalOwnVariableWithRefinement (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is Result : Boolean; begin if Dictionary.IsOwnVariable (Sym) then if Dictionary.OwnVariableHasType (Sym, Scope) then -- it has a concrete type so it can't be what we are looking for Result := False; else -- it may be what we are looking for; see if it is local to this package Result := Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetOwner (Sym), Right_Symbol => Dictionary.GetEnclosingPackage (Scope)); end if; else Result := False; end if; return Result; end IsLocalOwnVariableWithRefinement; --------------------------------------------------------------------- procedure GenerateDeclarations (Heap : in out Cells.Heap_Record; UsedSymbols : in Cells.Cell; Scope : in Dictionary.Scopes; NeededSymbols : out Cells.Cell) --# global in AttributeList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Scope, --# UsedSymbols & --# NeededSymbols from AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Scope, --# UsedSymbols; is separate; --------------------------------------------------------------------- procedure PrintDeclarations (Heap : in out Cells.Heap_Record; File : in SPARK_IO.File_Type; Rule_File : in SPARK_IO.File_Type; Needed_Symbols : in Cells.Cell; Scope : in Dictionary.Scopes; Write_Rules : in Boolean; End_Position : in LexTokenManager.Token_Position) --# global in AttributeList; --# in BitwiseOpList; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in ProcedureExportList; --# in ReturnSymbol; --# in RootIntegerUsed; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# End_Position, --# File, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# Rule_File, --# Scope, --# SPARK_IO.File_Sys, --# Write_Rules & --# Heap, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# Needed_Symbols, --# Scope, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# AttributeList, --# BitwiseOpList, --# CommandLineData.Content, --# Dictionary.Dict, --# End_Position, --# ErrorHandler.Error_Context, --# File, --# Heap, --# LexTokenManager.State, --# Needed_Symbols, --# ProcedureExportList, --# ReturnSymbol, --# RootIntegerUsed, --# Rule_File, --# Scope, --# Write_Rules; is separate; begin -- OutputDeclarations if Pile.OrderOK (Heap, UsedSymbols) then --Debug.PrintMsg ("UsedSymbols order OK", True); null; else --Debug.PrintMsg ("Dump of UsedSymbols before GenerateDeclarations", True); Pile.PrintPile (Heap, UsedSymbols); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Assertion_Failure, Msg => "UsedSymbols before GenerateDeclarations not in order"); end if; GenerateDeclarations (Heap, UsedSymbols, Scope, NeededSymbols); if Pile.OrderOK (Heap, NeededSymbols) then --Debug.PrintMsg ("NeededSymbols order OK", True); null; else --Debug.PrintMsg ("Dump of NeededSymbols after GenerateDeclarations", True); Pile.PrintPile (Heap, NeededSymbols); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Assertion_Failure, Msg => "NeededSymbols after GenerateDeclarations not in order"); end if; PrintDeclarations (Heap, File, Rule_File, NeededSymbols, Scope, Write_Rules, EndPosition); end OutputDeclarations; spark-2012.0.deb/examiner/sem-walk_expression_p-up_wf_aggregate_or_expression.adb0000644000175000017500000004040511753202336027345 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Up_Wf_Aggregate_Or_Expression (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Parent, Child : STree.SyntaxNode; Name_Exp, Exp_Result : Sem.Exp_Record; Expected_Type : Dictionary.Symbol; Index_Type_Symbol : Dictionary.Symbol; Type_Lower_Bound : Sem.Typ_Type_Bound; Type_Upper_Bound : Sem.Typ_Type_Bound; Aggregate_Flags : Sem.Typ_Agg_Flags; Entry_Counter : Natural; Complete_Rec : CompleteCheck.T; -------------------------------------------------------------- procedure Chain_Up_To_Component_Association (Node : in out STree.SyntaxNode) --# global in STree.Table; --# derives Node from *, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate_or_expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression; --# post STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_component_association; is begin while STree.Syntax_Node_Type (Node => Node) /= SP_Symbols.component_association and then STree.Syntax_Node_Type (Node => Node) /= SP_Symbols.annotation_component_association loop Node := STree.Parent_Node (Current_Node => Node); end loop; end Chain_Up_To_Component_Association; ------------------------------------------------------------------- -- type is needed at aggregate_or_expression node except if it associated -- with an "others" clause in which case it is needed at component_association -- node; this procedure puts it in the right place procedure Plant_Type (Node : in STree.SyntaxNode; Expected_Type : in Dictionary.Symbol; Parent : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in out STree.Table; --# derives STree.Table from *, --# Expected_Type, --# Node, --# Parent & --# null from Dictionary.Dict; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate_or_expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression) and --# (STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.positional_association or --# STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.positional_association_rep or --# STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.annotation_positional_association or --# STree.Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.annotation_positional_association_rep) and --# Parent = STree.Parent_Node (Node, STree.Table) and --# (Dictionary.Is_Null_Symbol (Expected_Type) or Dictionary.IsTypeMark (Expected_Type, Dictionary.Dict)); --# post STree.Table = STree.Table~; is Next_Node, Grand_Parent : STree.SyntaxNode; begin if STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.positional_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association then -- ASSUME Parent = positional_association OR annotation_positional_association Next_Node := STree.Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR NULL if Next_Node = STree.NullNode then -- ASSUME Next_Node = NULL -- we are dealing with an others clause Grand_Parent := STree.Parent_Node (Current_Node => Parent); -- ASSUME Grand_Parent = component_association OR positional_association OR positional_association_rep OR -- annotation_component_association OR annotation_positional_association OR annotation_positional_association_rep SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.component_association or else STree.Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.positional_association or else STree.Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.positional_association_rep or else STree.Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.annotation_component_association or else STree.Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.annotation_positional_association or else STree.Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.annotation_positional_association_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = component_association OR positional_association OR positional_association_rep OR " & "annotation_component_association OR annotation_positional_association OR annotation_positional_association_rep " & " in Plant_Type"); STree.Add_Node_Symbol (Node => Grand_Parent, Sym => Expected_Type); elsif STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_aggregate_or_expression then -- ASSUME Next_Node = aggregate_or_expression OR annotation_aggregate_or_expression STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR " & "NULL in Plant_Type"); end if; elsif STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.positional_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association_rep then -- ASSUME Parent = positional_association_rep OR annotation_positional_association_rep STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); end if; end Plant_Type; begin -- Up_Wf_Aggregate_Or_Expression Parent := STree.Parent_Node (Current_Node => Node); -- ASSUME Parent = component_association OR named_association OR named_association_rep OR -- name_value_property OR positional_association OR positional_association_rep OR -- annotation_named_association OR annotation_named_association_rep OR annotation_component_association OR -- annotation_positional_association OR annotation_positional_association_rep SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.component_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.named_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.named_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.name_value_property or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.positional_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.positional_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_named_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_named_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_component_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = component_association OR named_association OR named_association_rep OR " & "name_value_property OR positional_association OR positional_association_rep OR " & "annotation_named_association OR annotation_named_association_rep OR annotation_component_association OR " & "annotation_positional_association OR annotation_positional_association_rep in Up_Wf_Aggregate_Or_Expression"); Child := STree.Child_Node (Current_Node => Node); -- ASSUME Child = aggregate OR expression OR -- annotation_aggregate OR annotation_expression if STree.Syntax_Node_Type (Node => Child) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_expression then -- ASSUME Child = expression OR annotation_expression if STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.positional_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.positional_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association then -- ASSUME Parent = positional_association OR positional_association_rep OR -- annotation_positional_association OR annotation_positional_association_rep Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if Dictionary.IsUnknownTypeMark (Name_Exp.Type_Symbol) then Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); elsif Dictionary.TypeIsArray (Name_Exp.Type_Symbol) then Expected_Type := Dictionary.GetArrayComponent (Name_Exp.Type_Symbol); Plant_Type (Node => Node, Expected_Type => Expected_Type, Parent => Parent); Sem.Assignment_Check (Position => STree.Node_Position (Node => Child), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Exp_Result); Name_Exp.Is_Constant := Name_Exp.Is_Constant and then Exp_Result.Is_Constant; -- if this is not the others clause -- increment the entry counter on the aggregate stack -- nb: we already know that it's positional association here if STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.positional_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association_rep then -- ASSUME Parent = positional_association_rep OR annotation_positional_association_rep Aggregate_Stack.Pop (Type_Sym => Index_Type_Symbol, Lower_Bound => Type_Lower_Bound, Upper_Bound => Type_Upper_Bound, Agg_Flags => Aggregate_Flags, Counter => Entry_Counter, Complete_Rec => Complete_Rec); if Entry_Counter < Natural'Last then Entry_Counter := Entry_Counter + 1; else Aggregate_Flags.More_Entries_Than_Natural := True; end if; Aggregate_Stack.Push (Type_Sym => Index_Type_Symbol, Lower_Bound => Type_Lower_Bound, Upper_Bound => Type_Upper_Bound, Agg_Flags => Aggregate_Flags, Counter => Entry_Counter, Complete_Rec => Complete_Rec); end if; Name_Exp.Errors_In_Expression := Name_Exp.Errors_In_Expression or else Exp_Result.Errors_In_Expression; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); elsif Dictionary.TypeIsRecord (Name_Exp.Type_Symbol) then if Name_Exp.Param_Count = Dictionary.GetNumberOfComponents (Name_Exp.Type_Symbol) then Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 105, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Child), Id_Str => Dictionary.GetSimpleName (Name_Exp.Other_Symbol)); Chain_Up_To_Component_Association (Node => Node); elsif Name_Exp.Param_Count < Dictionary.GetNumberOfComponents (Name_Exp.Type_Symbol) then Name_Exp.Param_Count := Name_Exp.Param_Count + 1; Expected_Type := Dictionary.GetType (Dictionary.GetRecordComponent (Name_Exp.Type_Symbol, Name_Exp.Param_Count)); Plant_Type (Node => Node, Expected_Type => Expected_Type, Parent => Parent); Sem.Assignment_Check (Position => STree.Node_Position (Node => Child), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Exp_Result); Name_Exp.Is_Constant := Name_Exp.Is_Constant and then Exp_Result.Is_Constant; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); end if; end if; end if; elsif STree.Syntax_Node_Type (Node => Child) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_aggregate then -- ASSUME Child = aggregate OR annotation_aggregate if STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.positional_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association_rep then -- ASSUME Parent = positional_association_rep OR annotation_positional_association_rep Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if Dictionary.TypeIsArray (Name_Exp.Type_Symbol) then Aggregate_Stack.Pop (Type_Sym => Index_Type_Symbol, Lower_Bound => Type_Lower_Bound, Upper_Bound => Type_Upper_Bound, Agg_Flags => Aggregate_Flags, Counter => Entry_Counter, Complete_Rec => Complete_Rec); if Entry_Counter < Natural'Last then Entry_Counter := Entry_Counter + 1; else Aggregate_Flags.More_Entries_Than_Natural := True; end if; Aggregate_Stack.Push (Type_Sym => Index_Type_Symbol, Lower_Bound => Type_Lower_Bound, Upper_Bound => Type_Upper_Bound, Agg_Flags => Aggregate_Flags, Counter => Entry_Counter, Complete_Rec => Complete_Rec); end if; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = aggregate OR expression OR " & "annotation_aggregate OR annotation_expression in Up_Wf_Aggregate_Or_Expression"); end if; end Up_Wf_Aggregate_Or_Expression; spark-2012.0.deb/examiner/sem-illegal_unconstrained.adb0000644000175000017500000000310411753202336022046 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function Illegal_Unconstrained (Left_Type, Right_Type : Dictionary.Symbol) return Boolean is Result : Boolean; begin -- at the point where this is called we know that the RHS/LHS base types are the same. -- We are interested in the case where LHS and RHS are differently -- constrained subtypes of a common unconstrained base type. Result := False; if Dictionary.IsUnconstrainedArrayType (Dictionary.GetRootType (Right_Type)) then Result := not Indexes_Match (Target => Left_Type, Source => Right_Type); end if; return Result; end Illegal_Unconstrained; spark-2012.0.deb/examiner/sem-create_implicit_positive_subtype.adb0000644000175000017500000000752211753202336024343 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Create_Implicit_Positive_Subtype (String_Length : in Maths.Value; Location : in Dictionary.Location; Index_Constraint : out Dictionary.Symbol) is Constraint_Str : E_Strings.T; Constraint_Lex_Str : LexTokenManager.Lex_String; Upper_Bound_Lex_Str : LexTokenManager.Lex_String; begin -- Create an ExaminerString of the form "Positive__n" where n is the string length Constraint_Str := E_Strings.Copy_String (Str => "Positive__"); -- The value of "n" will not exceed a size that can be printed within an ExaminerString -- so the conversion will not truncate here. E_Strings.Append_Examiner_String (E_Str1 => Constraint_Str, E_Str2 => Maths.ValueToString (String_Length)); -- Insert this name into the string table; either we add it an get the LexStr back or, -- if it is already there, we get the existing LexStr back LexTokenManager.Insert_Examiner_String (Str => Constraint_Str, Lex_Str => Constraint_Lex_Str); -- Look up type in Dictionary in case it has previously been added -- Note that we put these implicit subtype in Standard (where Positive itself lives) -- and that we declare them in proof rather than Ada context Index_Constraint := Dictionary.LookupItem (Name => Constraint_Lex_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetPredefinedPackageStandard), Context => Dictionary.ProofContext, Full_Package_Name => False); SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Index_Constraint) or else Dictionary.IsTypeMark (Index_Constraint), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Index_Constraint to be a type in Create_Implicit_Positive_Subtype"); if Dictionary.Is_Null_Symbol (Index_Constraint) then -- not previously added, so we add a new subtype here Maths.StorageRep (String_Length, Upper_Bound_Lex_Str); Dictionary.Add_Integer_Subtype (Name => Constraint_Lex_Str, Static => True, Parent => Dictionary.GetPredefinedIntegerType, Parent_Reference => Location, Lower => LexTokenManager.One_Value, Upper => Upper_Bound_Lex_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Location, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetPredefinedPackageStandard), Context => Dictionary.ProofContext, The_Subtype => Index_Constraint); end if; end Create_Implicit_Positive_Subtype; spark-2012.0.deb/examiner/casing.ads0000644000175000017500000001060211753202335016204 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with LexTokenManager; with STree; use type STree.Iterator; --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# SPARK_IO, --# SP_Symbols, --# STree; package Casing is -- If the relevant casing switch is enable, this procedure compare -- 2 strings (STR and LEX_STR). The procedure raises a warning if -- the 2 strings have different casing. procedure Check_String_Casing (Str : in E_Strings.T; Lex_Str : in LexTokenManager.Lex_String; Position : in LexTokenManager.Token_Position); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Str, --# Position, --# SPARK_IO.File_Sys, --# Str; -- If the relevant casing switch is enable, this procedure compare -- 2 strings (LEX_STR1 and LEX_STR2). The procedure raises a -- warning if the 2 strings have different casing. procedure Check_Casing (Lex_Str1 : in LexTokenManager.Lex_String; Lex_Str2 : in LexTokenManager.Lex_String; Position : in LexTokenManager.Token_Position); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Str1, --# Lex_Str2, --# Position, --# SPARK_IO.File_Sys; -- This procedure runs CHECK_CASING for all the identifiers in the -- subtree with the top node TOP_NODE. procedure Check_Node_Casing (Top_Node : in STree.SyntaxNode); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Top_Node; end Casing; spark-2012.0.deb/examiner/requiredunits.adb0000644000175000017500000007531211753202336017634 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO, CommandLineData; -- used for trace/debug statements only, not inherited with SP_Symbols; with SystemErrors; use type SP_Symbols.SP_Symbol; package body RequiredUnits is -- Following hidden trace routine enabled by -debug=u ----------------------------------- procedure Trace (Msg : String) --# derives null from Msg; is --# hide Trace; begin if CommandLineData.Content.Debug.Units then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Msg, 0); end if; end Trace; ------------------------------------------------------------------------------------------ procedure Init (Top_Node : in STree.SyntaxNode; Inherit_Ptr : out STree.SyntaxNode; Unit_Type : out ContextManager.UnitTypes; Unit_Name : out LexTokenLists.Lists) is Inherit_Ptr_Local, Name_Ptr : STree.SyntaxNode; Unit_Type_Local : ContextManager.UnitTypes; Unit_Name_Local : LexTokenLists.Lists; begin -- ASSUME Top_Node = compilation_unit SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Top_Node) = SP_Symbols.compilation_unit, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Top_Node = compilation_unit in RequiredUnits.Init"); -- compilation_unit : -- context_clause library_unit -- | context_clause secondary_unit -- | library_unit -- | secondary_unit -- | apragma -- | ; Trace ("Entering RequiredUnits.Init"); Inherit_Ptr_Local := STree.Child_Node (Current_Node => Top_Node); if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.context_clause then Inherit_Ptr_Local := STree.Next_Sibling (Current_Node => Inherit_Ptr_Local); end if; -- any WITH clause [and/or use type clause] now skipped -- ASSUME Inherit_Ptr_Local = library_unit OR secondary_unit OR apragma OR SPend if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.library_unit then -- ASSUME Inherit_Ptr_Local = library_unit Trace (" library unit found"); Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = package_declaration OR private_package_declaration OR main_program_declaration OR -- generic_declaration OR generic_package_instantiation if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.package_declaration or else STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.private_package_declaration then -- ASSUME Inherit_Ptr_Local = package_declaration OR private_package_declaration Trace (" package declaration found"); Unit_Type_Local := ContextManager.PackageSpecification; Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = package_specification OR inherit_clause if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.inherit_clause then Name_Ptr := STree.Next_Sibling (Current_Node => Inherit_Ptr_Local); elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.package_specification then Name_Ptr := Inherit_Ptr_Local; Inherit_Ptr_Local := STree.NullNode; else Name_Ptr := STree.NullNode; Inherit_Ptr_Local := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = package_specification OR inherit_clause in RequiredUnits.Init"); end if; -- ASSUME Name_Ptr = package_specification SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.package_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = compilation_unit in RequiredUnits.Init"); Name_Ptr := STree.Child_Node (Current_Node => Name_Ptr); -- ASSUME Inherit_Ptr_Local = inherit_clause OR NULL -- ASSUME Name_Ptr = dotted_simple_name elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.main_program_declaration then -- ASSUME Inherit_Ptr_Local = main_program_declaration Trace (" main program found"); Unit_Type_Local := ContextManager.MainProgram; Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = inherit_clause OR main_program_annotation if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.inherit_clause then Name_Ptr := STree.Next_Sibling (Current_Node => Inherit_Ptr_Local); elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.main_program_annotation then Name_Ptr := Inherit_Ptr_Local; Inherit_Ptr_Local := STree.NullNode; else Name_Ptr := STree.NullNode; Inherit_Ptr_Local := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = inherit_clause OR main_program_annotation in RequiredUnits.Init"); end if; -- ASSUME Name_Ptr = main_program_annotation SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.main_program_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = main_program_annotation in RequiredUnits.Init"); Name_Ptr := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Last_Sibling_Of (Start_Node => Name_Ptr)))); -- ASSUME Inherit_Ptr_Local = inherit_clause OR NULL -- ASSUME Name_Ptr = identifier elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.generic_declaration then -- ASSUME Inherit_Ptr_Local = generic_declaration Trace (" generic declaration found"); Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = generic_subprogram_declaration OR generic_package_declaration if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.generic_subprogram_declaration then -- ASSUME Inherit_Ptr_Local = generic_subprogram_declaration Unit_Type_Local := ContextManager.GenericSubprogramDeclaration; elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.generic_package_declaration then Unit_Type_Local := ContextManager.PackageSpecification; else Unit_Type_Local := ContextManager.InvalidUnit; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = generic_subprogram_declaration OR " & "generic_package_declaration in RequiredUnits.Init"); end if; Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = inherit_clause OR generic_formal_part if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.inherit_clause then Name_Ptr := STree.Next_Sibling (Current_Node => Inherit_Ptr_Local); elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.generic_formal_part then Name_Ptr := Inherit_Ptr_Local; Inherit_Ptr_Local := STree.NullNode; else Name_Ptr := STree.NullNode; Inherit_Ptr_Local := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = inherit_clause OR generic_formal_part in RequiredUnits.Init"); end if; -- ASSUME Name_Ptr = generic_formal_part SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.generic_formal_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = generic_formal_part in RequiredUnits.Init"); Name_Ptr := STree.Next_Sibling (Current_Node => Name_Ptr); -- ASSUME Name_Ptr = not_overriding_subprogram_declaration OR package_specification if STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.package_specification then -- ASSUME Name_Ptr = package_specification Name_Ptr := STree.Child_Node (Current_Node => Name_Ptr); elsif STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.not_overriding_subprogram_declaration then -- ASSUME Name_Ptr = not_overriding_subprogram_declaration Name_Ptr := STree.Child_Node (Current_Node => Name_Ptr); -- ASSUME Name_Ptr = procedure_specification OR function_specification OR proof_function_declaration if STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.proof_function_declaration then -- ASSUME Name_Ptr = proof_function_declaration Name_Ptr := STree.Child_Node (Current_Node => Name_Ptr); elsif STree.Syntax_Node_Type (Node => Name_Ptr) /= SP_Symbols.procedure_specification and then STree.Syntax_Node_Type (Node => Name_Ptr) /= SP_Symbols.function_specification then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = procedure_specification OR function_specification OR " & "proof_function_declaration in RequiredUnits.Init"); end if; -- ASSUME Name_Ptr = procedure_specification OR function_specification SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.procedure_specification or else STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.function_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = procedure_specification OR function_specification in RequiredUnits.Init"); Name_Ptr := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Name_Ptr)); else Name_Ptr := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = not_overriding_subprogram_declaration OR " & "package_specification in RequiredUnits.Init"); end if; -- ASSUME Inherit_Ptr_Local = inherit_clause OR NULL -- ASSUME Name_Ptr = dotted_simple_name OR identifier elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.generic_package_instantiation then -- ASSUME Inherit_Ptr_Local = generic_package_instantiation -- generic_package_instantiation : -- dotted_simple_name package_annotation RWis RWnew identifier generic_actual_part semicolon -- | dotted_simple_name package_annotation RWis RWnew identifier semicolon ; Trace (" generic declaration found"); Unit_Type_Local := ContextManager.PackageSpecification; Name_Ptr := STree.Child_Node (Current_Node => Inherit_Ptr_Local); Inherit_Ptr_Local := STree.NullNode; -- ASSUME Inherit_Ptr_Local = NULL -- ASSUME Name_Ptr = dotted_simple_name -- add elsif here for generic subprog bods else Unit_Type_Local := ContextManager.InvalidUnit; Name_Ptr := STree.NullNode; Inherit_Ptr_Local := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = package_declaration OR private_package_declaration OR " & "main_program_declaration OR generic_declaration OR generic_package_instantiation in RequiredUnits.Init"); end if; -- ASSUME Inherit_Ptr_Local = inherit_clause OR NULL if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.inherit_clause then Inherit_Ptr_Local := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Inherit_Ptr_Local)); end if; -- ASSUME Inherit_Ptr_Local = inherit_clause_rep OR dotted_simple_name OR NULL -- ASSUME Name_Ptr = dotted_simple_name OR identifier elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.secondary_unit then -- ASSUME Inherit_Ptr_Local = secondary_unit -- secondary_unit : -- library_unit_body | subunit ; Trace (" secondary unit found"); Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = library_unit_body OR subunit if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.library_unit_body then -- ASSUME Inherit_Ptr_Local = library_unit_body -- library_unit_body : -- package_body | generic_subprogram_body ; Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = package_body OR generic_subprogram_body if STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.package_body then -- ASSUME Inherit_Ptr_Local = package_body -- package_body : -- RWpackage RWbody dotted_simple_name RWis package_implementation semicolon -- | RWpackage RWbody dotted_simple_name refinement_definition RWis package_implementation semicolon ; Trace (" package body found"); Unit_Type_Local := ContextManager.PackageBody; Name_Ptr := STree.Child_Node (Current_Node => Inherit_Ptr_Local); Inherit_Ptr_Local := STree.NullNode; -- ASSUME Inherit_Ptr_Local = NULL -- ASSUME Name_Ptr = dotted_simple_name elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.generic_subprogram_body then -- ASSUME Inherit_Ptr_Local = generic_subprogram_body -- generic_subprogram_body : -- not_overriding_subprogram_body ; -- -- not_overriding_subprogram_body : -- procedure_specification procedure_annotation subprogram_implementation -- | function_specification function_annotation subprogram_implementation ; -- -- procedure_specification : -- RWprocedure designator -- | RWprocedure designator formal_part ; -- -- function_specification : -- RWfunction designator formal_part RWreturn type_mark -- | RWfunction designator RWreturn type_mark ; -- -- designator : -- identifier ; Trace (" generic subprogram body found"); Unit_Type_Local := ContextManager.GenericSubprogramBody; Name_Ptr := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Inherit_Ptr_Local)))); Inherit_Ptr_Local := STree.NullNode; -- ASSUME Inherit_Ptr_Local = NULL -- ASSUME Name_Ptr = identifier else Unit_Type_Local := ContextManager.InvalidUnit; Name_Ptr := STree.NullNode; Inherit_Ptr_Local := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = package_body OR generic_subprogram_body in RequiredUnits.Init"); end if; -- ASSUME Inherit_Ptr_Local = NULL -- ASSUME Name_Ptr = dotted_simple_name OR identifier elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.subunit then -- ASSUME Inherit_Ptr_Local = subunit -- subunit : -- RWseparate left_paren parent_unit_name right_paren proper_body ; -- -- proper_body : -- subprogram_body | package_body | task_body | protected_body ; Trace (" subunit found"); Unit_Type_Local := ContextManager.SubUnit; Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = parent_unit_name SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.parent_unit_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = parent_unit_name in RequiredUnits.Init"); Name_Ptr := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Inherit_Ptr_Local)); Inherit_Ptr_Local := STree.Child_Node (Current_Node => Inherit_Ptr_Local); -- ASSUME Inherit_Ptr_Local = parent_unit_name OR simple_name SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.parent_unit_name or else STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = parent_unit_name OR simple_name in RequiredUnits.Init"); -- ASSUME Name_Ptr = subprogram_body OR package_body OR task_body OR protected_body if STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.subprogram_body then -- ASSUME Name_Ptr = subprogram_body -- subprogram_body : -- overriding_indicator procedure_specification procedure_annotation RWis subprogram_implementation -- | overriding_indicator function_specification function_annotation RWis subprogram_implementation -- | procedure_specification procedure_annotation RWis subprogram_implementation -- | function_specification function_annotation RWis subprogram_implementation ; Trace (" subprogram body found"); Name_Ptr := STree.Child_Node (Current_Node => Name_Ptr); -- ASSUME Name_Ptr = procedure_specification OR function_specification OR overriding_indicator if STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.overriding_indicator then Name_Ptr := STree.Next_Sibling (Current_Node => Name_Ptr); elsif STree.Syntax_Node_Type (Node => Name_Ptr) /= SP_Symbols.procedure_specification and then STree.Syntax_Node_Type (Node => Name_Ptr) /= SP_Symbols.function_specification then Name_Ptr := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = procedure_specification OR function_specification OR " & "overriding_indicator in RequiredUnits.Init"); end if; -- ASSUME Name_Ptr = procedure_specification OR function_specification SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.procedure_specification or else STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.function_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = procedure_specification OR function_specification in RequiredUnits.Init"); Name_Ptr := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Name_Ptr)); -- ASSUME Name_Ptr = identifier elsif STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.protected_body then -- ASSUME Name_Ptr = protected_body -- protected_body : -- RWprotected RWbody identifier RWis -- protected_operation_item -- RWend identifier semicolon ; Trace (" protected body found"); Name_Ptr := STree.Child_Node (Current_Node => Name_Ptr); -- ASSUME Name_Ptr = identifier elsif STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.task_body then -- ASSUME Name_Ptr = task_body -- task_body : -- RWtask RWbody identifier procedure_annotation RWis -- subprogram_implementation ; Trace (" task body found"); Name_Ptr := STree.Child_Node (Current_Node => Name_Ptr); -- ASSUME Name_Ptr = identifier elsif STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.package_body then -- ASSUME Name_Ptr = package_body -- package_body : -- RWpackage RWbody dotted_simple_name RWis package_implementation semicolon -- | RWpackage RWbody dotted_simple_name refinement_definition RWis package_implementation semicolon ; Trace (" separate package body found"); Name_Ptr := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Name_Ptr)); -- ASSUME Name_Ptr = dotted_simple_name OR identifier else Name_Ptr := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = subprogram_body OR package_body OR task_body OR " & "protected_body in RequiredUnits.Init"); end if; -- ASSUME Inherit_Ptr_Local = parent_unit_name OR simple_name -- ASSUME Name_Ptr = dotted_simple_name OR identifier else Unit_Type_Local := ContextManager.InvalidUnit; Inherit_Ptr_Local := STree.NullNode; Name_Ptr := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = library_unit_body OR subunit in RequiredUnits.Init"); end if; -- ASSUME Inherit_Ptr_Local = parent_unit_name OR simple_name OR NULL -- ASSUME Name_Ptr = dotted_simple_name OR identifier elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.apragma then -- ASSUME Inherit_Ptr_Local = apragma Trace (" pragma found"); Unit_Type_Local := ContextManager.InterUnitPragma; Inherit_Ptr_Local := STree.NullNode; Name_Ptr := STree.NullNode; -- ASSUME Inherit_Ptr_Local = NULL -- ASSUME Name_Ptr = NULL elsif STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.SPEND then -- ASSUME Inherit_Ptr_Local = SPEND Unit_Type_Local := ContextManager.InvalidUnit; Inherit_Ptr_Local := STree.NullNode; Name_Ptr := STree.NullNode; -- ASSUME Inherit_Ptr_Local = NULL -- ASSUME Name_Ptr = NULL else Unit_Type_Local := ContextManager.InvalidUnit; Inherit_Ptr_Local := STree.NullNode; Name_Ptr := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = library_unit OR secondary_unit OR apragma OR SPEND in RequiredUnits.Init"); end if; -- ASSUME Inherit_Ptr_Local = inherit_clause_rep OR dotted_simple_name OR parent_unit_name OR simple_name OR NULL -- ASSUME Name_Ptr = dotted_simple_name OR identifier OR NULL if Name_Ptr = STree.NullNode then -- ASSUME Name_Ptr = NULL Unit_Name := LexTokenLists.Null_List; elsif STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.dotted_simple_name then -- ASSUME Name_Ptr = dotted_simple_name Unit_Name_Local := LexTokenLists.Null_List; Name_Ptr := STree.Last_Child_Of (Start_Node => Name_Ptr); loop -- ASSUME Name_Ptr = identifier LexTokenLists.Append (Unit_Name_Local, STree.Node_Lex_String (Node => Name_Ptr)); Name_Ptr := STree.Next_Sibling (Current_Node => STree.Parent_Node (Current_Node => Name_Ptr)); exit when STree.Syntax_Node_Type (Node => Name_Ptr) /= SP_Symbols.identifier; end loop; Unit_Name := Unit_Name_Local; elsif STree.Syntax_Node_Type (Node => Name_Ptr) = SP_Symbols.identifier then -- ASSUME Name_Ptr = identifier Unit_Name_Local := LexTokenLists.Null_List; LexTokenLists.Append (Unit_Name_Local, STree.Node_Lex_String (Node => Name_Ptr)); Unit_Name := Unit_Name_Local; else Unit_Name := LexTokenLists.Null_List; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Ptr = dotted_simple_name OR identifier OR NULL in RequiredUnits.Init"); end if; Unit_Type := Unit_Type_Local; SystemErrors.RT_Assert (C => Inherit_Ptr_Local = STree.NullNode or else STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.inherit_clause_rep or else STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.dotted_simple_name or else STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.parent_unit_name or else STree.Syntax_Node_Type (Node => Inherit_Ptr_Local) = SP_Symbols.simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Ptr_Local = inherit_clause_rep OR dotted_simple_name OR parent_unit_name OR " & "simple_name OR NULL in RequiredUnits.Init"); Inherit_Ptr := Inherit_Ptr_Local; end Init; procedure Next (Inherit_Ptr : in out STree.SyntaxNode; Required_Unit : out LexTokenLists.Lists; Found : out Boolean) is procedure CopyInheritedPackageToList (DottedNameNode : STree.SyntaxNode) --# global in STree.Table; --# in out Required_Unit; --# derives Required_Unit from *, --# DottedNameNode, --# STree.Table; is CurrentNode : STree.SyntaxNode; begin Trace ("Entering RequiredUnits.CopyInheritedPackageToList"); CurrentNode := STree.Last_Child_Of (Start_Node => DottedNameNode); loop exit when CurrentNode = STree.NullNode; LexTokenLists.Append (List => Required_Unit, Item => STree.Node_Lex_String (Node => CurrentNode)); CurrentNode := STree.Next_Sibling (Current_Node => STree.Parent_Node (Current_Node => CurrentNode)); end loop; end CopyInheritedPackageToList; ------------------------------- procedure CopyPUNtoList (PUNNode : in STree.SyntaxNode; List : out LexTokenLists.Lists) --# global in STree.Table; --# derives List from PUNNode, --# STree.Table; is NextNode : STree.SyntaxNode; LList : LexTokenLists.Lists; begin NextNode := PUNNode; while STree.Syntax_Node_Type (Node => NextNode) /= SP_Symbols.simple_name loop NextNode := STree.Child_Node (Current_Node => NextNode); end loop; LList := LexTokenLists.Null_List; loop LexTokenLists.Append (LList, STree.Node_Lex_String (Node => STree.Child_Node (Current_Node => NextNode))); NextNode := STree.Next_Sibling (Current_Node => STree.Parent_Node (Current_Node => NextNode)); exit when STree.Syntax_Node_Type (Node => NextNode) /= SP_Symbols.simple_name; end loop; List := LList; end CopyPUNtoList; begin -- Next Trace ("Entering RequiredUnits.Next"); Required_Unit := LexTokenLists.Null_List; if Inherit_Ptr = STree.NullNode then Found := False; else Found := True; if STree.Syntax_Node_Type (Node => Inherit_Ptr) = SP_Symbols.simple_name then LexTokenLists.Append (List => Required_Unit, Item => STree.Node_Lex_String (Node => STree.Child_Node (Current_Node => Inherit_Ptr))); elsif STree.Syntax_Node_Type (Node => Inherit_Ptr) = SP_Symbols.dotted_simple_name then CopyInheritedPackageToList (Inherit_Ptr); Inherit_Ptr := STree.NullNode; elsif STree.Syntax_Node_Type (Node => Inherit_Ptr) = SP_Symbols.inherit_clause_rep then CopyInheritedPackageToList (STree.Next_Sibling (Current_Node => Inherit_Ptr)); Inherit_Ptr := STree.Child_Node (Current_Node => Inherit_Ptr); elsif STree.Syntax_Node_Type (Node => Inherit_Ptr) = SP_Symbols.parent_unit_name then CopyPUNtoList (Inherit_Ptr, Required_Unit); Inherit_Ptr := STree.NullNode; end if; end if; end Next; end RequiredUnits; spark-2012.0.deb/examiner/dictionary.ads0000644000175000017500000055761311753202336017130 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ContextManager; with ExaminerConstants; with E_Strings; with LexTokenManager; with SPARK_IO; with SP_Symbols; use type ContextManager.UnitDescriptors; use type ExaminerConstants.RefType; use type LexTokenManager.Str_Comp_Result; use type SPARK_IO.File_Status; use type SP_Symbols.SP_Symbol; --# inherit CommandLineData, --# CommandLineHandler, --# ContextManager, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# LexTokenManager, --# LexTokenStacks, --# Maths, --# ScreenEcho, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# SystemErrors, --# XMLReport; package Dictionary --# own Dict : Dictionaries; is -------------------------------------------------------------------------------- -- TYPE DEFINITIONS -- -------------------------------------------------------------------------------- type Symbol is private; NullSymbol : constant Symbol; type Iterator is private; NullIterator : constant Iterator; type Location is record Start_Position : LexTokenManager.Token_Position; End_Position : LexTokenManager.Token_Position; end record; Null_Location : constant Location := Location'(Start_Position => LexTokenManager.Null_Token_Position, End_Position => LexTokenManager.Null_Token_Position); type Scopes is private; -- following constant is not valid for use other than as a placeholder for constructing a -- an aggregate that needs a Scope in it (e.g. EmptyNumericError in Error_Types) NullScope : constant Scopes; -- conversion routines to and from scope follow in access function section type Contexts is (ProofContext, ProgramContext); type Modes is (DefaultMode, InMode, OutMode, InOutMode, InvalidMode); type Abstractions is (IsAbstract, IsRefined); type PrefixSort is (AType, ABaseType, AnObject); type PackageSort is (Public, PrivateChild); type RavenscarPragmas is (Priority, InterruptPriority, AttachHandler); subtype RavenscarPragmasWithValue is RavenscarPragmas range Priority .. InterruptPriority; type Generic_Kind is (Generic_Of_Subprogram, Generic_Of_Package); type Generic_Parameter_Kind is (Generic_Type_Parameter, Generic_Object_Parameter); type SLI_Type is ( SLI_Array_Object, SLI_Array_Type, SLI_Boolean_Object, SLI_Boolean_Type, SLI_Enumeration_Object, SLI_Enumeration_Type, SLI_Floating_Point_Object, SLI_Floating_Point_Type, SLI_Abstract_Type, SLI_Signed_Integer_Object, SLI_Signed_Integer_Type, SLI_Generic_Package_Type, SLI_Package_Type, SLI_Label_On_Loop, SLI_Modular_Integer_Object, SLI_Modular_Integer_Type, SLI_Enumeration_Literal, SLI_Named_Number, SLI_Fixed_Point_Object, SLI_Fixed_Point_Type, SLI_Record_Object, SLI_Record_Type, SLI_String_Object, SLI_String_Type, SLI_Task_Object, SLI_Task_Type, SLI_Generic_Procedure_Type, SLI_Procedure_Type, SLI_Generic_Function_Op, SLI_Function_Op, SLI_Protected_Object, SLI_Protected_Type, SLI_Entry_Family, SLI_Generic_Formal_Parameter, SLI_Unknown_Type); -- The generation of replacement rules for composite constants is governed -- by an object_assertion annotation. This can either explicitly request -- that a rule be generated, request that NO rule be generated, or can be -- left unspecified. The action of the Examiner for these cases also depends -- on the setting of the /rules=XXX command-line switch declared in -- commandlinedata.ads type Rule_Policies is (Unspecified, Rule_Requested, No_Rule_Requested); type Visibility is (Visible, Privat, Local); function Is_Null_Symbol (TheSymbol : Symbol) return Boolean; --# return TheSymbol = NullSymbol; function IsDeclaration (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsEnumerationLiteral (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsArrayIndex (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsSubcomponent (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsTypeMark (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsConstant (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsVariable (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsGlobalVariable (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsQuantifiedVariable (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsImplicitReturnVariable (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsImplicitInStream (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsRulePolicy (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsParameterConstraint (TheSymbol : Symbol) return Boolean; --# global in Dict; -- A subprgoram parameter constraint is a special symbol associated with unconstrained formal parameters function IsSubprogramParameter (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsSubprogram (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsOperator (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsDependency (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsPackage (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsGenericParameterSymbol (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsKnownDiscriminant (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsLoop (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsImplicitProofFunction (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsLoopParameter (TheSymbol : Symbol) return Boolean; --# global in Dict; -------------------------------------------------------------------------------- -- Null Test and Equality of Symbols -- -- -- -- If you know two Symbols are of the same kind (e.g. both denote a package, -- -- a type, and so on), then equality should be tested using the strongly- -- -- typed functions below. -- -- -- -- General equality between two Symbols is available using the predefined -- -- "=" operator. -- -------------------------------------------------------------------------------- -- This function replaces the "=" function between 2 symbols but -- works only between 2 Enumeration_Literals or Null Symbols. If one of the arguments -- is not an Enumeration_Literal, the function terminates the Examiner. function Enumeration_Literals_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean; --# global in Dict; --# return Left_Symbol = Right_Symbol; -- This function replaces the "=" function between 2 symbols but -- works only between 2 Record_Components or Null Symbols. If one of the arguments -- is not an Record_Component, the function terminates the Examiner. function Record_Components_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean; --# global in Dict; --# return Left_Symbol = Right_Symbol; -- This function replaces the "=" function between 2 symbols but -- works only between 2 Types or Null Symbols. If one of the arguments -- is not a Type, the function terminates the Examiner. function Types_Are_Equal (Left_Symbol, Right_Symbol : Symbol; Full_Range_Subtype : Boolean) return Boolean; --# global in Dict; --# return R => (not Full_Range_Subtype -> R = (Left_Symbol = Right_Symbol)); -- This function replaces the "=" function between 2 symbols but -- works only between 2 Variables or Null Symbols. If one of the arguments -- is not a Variable, the function terminates the Examiner. function Variables_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean; --# global in Dict; --# return Left_Symbol = Right_Symbol; -- This function replaces the "=" function between 2 symbols but -- works only between 2 Implicit_Return_Variables or Null Symbols. If one of the arguments -- is not a Implicit_Return_Variable, the function terminates the Examiner. function Implicit_Return_Variables_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean; --# global in Dict; --# return Left_Symbol = Right_Symbol; -- This function replaces the "=" function between 2 symbols but -- works only between 2 Subprograms or Null Symbols. If one of the arguments -- is not a Subprogram, the function terminates the Examiner. function Subprograms_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean; --# global in Dict; --# return Left_Symbol = Right_Symbol; -- This function replaces the "=" function between 2 symbols but -- works only between 2 Packages or Null Symbols. If one of the arguments -- is not a Package, the function terminates the Examiner. function Packages_Are_Equal (Left_Symbol, Right_Symbol : Symbol) return Boolean; --# global in Dict; --# return Left_Symbol = Right_Symbol; -------------------------------------------------------------------------------- -- HOUSEKEEPING -- -------------------------------------------------------------------------------- procedure Initialize (Write_To_File : in Boolean); --# global in CommandLineData.Content; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# out Dict; --# derives Dict, --# SPARK_IO.File_Sys from CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Write_To_File & --# LexTokenManager.State from *, --# CommandLineData.Content; procedure Set_Current_File_Name (File_Name : in E_Strings.T); --# global in CommandLineData.Content; --# in out Dict; --# derives Dict from *, --# CommandLineData.Content, --# File_Name; function GetOwnVariableOrConstituentMode (Variable : Symbol) return Modes; --# global in Dict; procedure Write (File_Name : in E_Strings.T; Status : out SPARK_IO.File_Status); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# File_Name, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys, --# Status from Dict, --# File_Name, --# LexTokenManager.State, --# SPARK_IO.File_Sys; procedure Add_Generic_Unit (Kind : in Generic_Kind; Scope : in Scopes; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Generic_Unit : out Symbol); --# global in out Dict; --# derives Dict, --# Generic_Unit from Comp_Unit, --# Declaration, --# Dict, --# Kind, --# Scope; procedure ReportUsage; --# global in Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# Dict; procedure Read_Target_Data_File; --# global in CommandLineData.Content; --# in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys; procedure Output_Target_Data_File (To_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives LexTokenManager.State, --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# To_File, --# XMLReport.State; -------------------------------------------------------------------------------- -- SYMBOLS and ORDERING -- -------------------------------------------------------------------------------- function Declared_Before (Left, Right : in Symbol) return Boolean; -------------------------------------------------------------------------------- -- LOOKUP ROUTINES -- -------------------------------------------------------------------------------- function GlobalScope return Scopes; --# global in Dict; function Set_Visibility (The_Visibility : Visibility; The_Unit : Symbol) return Scopes; function Get_Visibility (Scope : Scopes) return Visibility; function IsLibraryLevel (Scope : Scopes) return Boolean; --# global in Dict; function IsPredefinedScope (Scope : Scopes) return Boolean; --# global in Dict; function IsGlobalScope (Scope : Scopes) return Boolean; --# global in Dict; function GetRegion (Scope : Scopes) return Symbol; function IsLocal (Inner, Outer : Scopes) return Boolean; --# global in Dict; function GetContext (TheSymbol : Symbol) return Contexts; --# global in Dict; function GetRootPackage (ThePackage : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsPackage (S, Dict)); function GetLibraryPackage (Scope : Scopes) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsPackage (S, Dict)); function IsPrivatePackage (ThePackage : Symbol) return Boolean; --# global in Dict; function GetPackageParent (ThePackage : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsPackage (S, Dict)); function PackageDeclaresTaggedType (ThePackage : Symbol) return Boolean; --# global in Dict; function PackageExtendsAnotherPackage (ThePackage : Symbol) return Boolean; --# global in Dict; function GetPackageThatIsExtended (ThePackage : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsPackage (S, Dict)); -- Package ownership is a relationship defined as follows: -- The owner of a package (if any) is defined to be the -- the parent of its closest private ancestor, where the term ancestor is -- as defined 10.1.1(11) of the Ada 95 LRM [2], to include the package itself. -- As a consequence of this rule only a private package or its private -- or public descendents can have an owner. -- This definition is given in S.P0468.42.8 section 2.3.3.6 and -- a consequntial rule given in the SPARK LRM section 7.1.1. function GetPackageOwner (ThePackage : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsPackage (S, Dict)); function IsProperDescendent (Inner, Outer : Symbol) return Boolean; --# global in Dict; function IsPrivateDescendent (Inner, Outer : Symbol) return Boolean; --# global in Dict; function IsDescendentOfPrivateSibling (Candidate, ThePackage : Symbol) return Boolean; --# global in Dict; function IsDirectlyDefined (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return Boolean; --# global in Dict; --# in LexTokenManager.State; function IsDefined (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts; Full_Package_Name : Boolean) return Boolean; --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; function UnaryOperatorIsDefined (Name : SP_Symbols.SP_Symbol; Operand : Symbol) return Boolean; --# global in CommandLineData.Content; --# in Dict; function Get_Binary_Operator_Type (Name : SP_Symbols.SP_Symbol; Left : Symbol; Right : Symbol) return Symbol; --# global in CommandLineData.Content; --# in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function UnaryOperatorIsVisible (Name : SP_Symbols.SP_Symbol; Operand : Symbol; Scope : Scopes) return Boolean; --# global in CommandLineData.Content; --# in Dict; function BinaryOperatorIsVisible (Name : SP_Symbols.SP_Symbol; Left : Symbol; Right : Symbol; Scope : Scopes) return Boolean; --# global in CommandLineData.Content; --# in Dict; function AttributeIsVisible (Name : LexTokenManager.Lex_String; Prefix : PrefixSort; TypeMark : Symbol; Scope : Scopes) return Boolean; --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; function Attribute_Is_Visible_But_Obsolete (Name : LexTokenManager.Lex_String; Prefix : PrefixSort; TypeMark : Symbol; Scope : Scopes) return Boolean; --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; function PrefixAllowed (Prefix : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function PrefixRequired (Item : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function GetPrefix (Item : Symbol) return LexTokenManager.Lex_String; --# global in Dict; function LookupImmediateScope (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return Symbol; --# global in Dict; --# in LexTokenManager.State; function LookupItem (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts; Full_Package_Name : Boolean) return Symbol; --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; function LookupSelectedItem (Prefix : Symbol; Selector : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts) return Symbol; --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; function Is_Subprogram (TheSymbol : Symbol) return Boolean; --# global in Dict; -- starting in Scope look for operations inherited as a result of use of -- tagged and extended tagged types. Returns NullSymbol if no match found. -- ActualTaggedType is set (on successful return) to the type of the tagged parameter -- required in the calling environment. procedure SearchForInheritedOperations (Name : in LexTokenManager.Lex_String; Scope : in Scopes; Prefix : in Symbol; Context : in Contexts; OpSym : out Symbol; ActualTaggedType : out Symbol); --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; --# derives ActualTaggedType, --# OpSym from CommandLineData.Content, --# Context, --# Dict, --# LexTokenManager.State, --# Name, --# Prefix, --# Scope; --# post (Is_Null_Symbol (OpSym) or Is_Subprogram (OpSym, Dict)) and --# (Is_Null_Symbol (ActualTaggedType) or IsTypeMark (ActualTaggedType, Dict)); function GetSubprogramControllingType (Subprogram : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); -- returns null symbol if the subprogram does not have a controlling type (i.e. a parameter of a tagged -- type which is declared in the same package as the subprogram) function GetOverriddenSubprogram (Subprogram : Symbol) return Symbol; --# global in Dict; --# in LexTokenManager.State; function IsCallable (Subprogram : Symbol; PrefixNeeded : Boolean; Scope : Scopes) return Boolean; --# global in Dict; function GenerateSimpleName (Item : Symbol; Separator : String) return E_Strings.T; --# global in Dict; --# in LexTokenManager.State; function GetAnyPrefixNeeded (Sym : Symbol; Scope : Scopes; Separator : String) return E_Strings.T; --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; function IsValidGenericTypeAssociation (Formal, Actual : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function Get_Symbol_Compilation_Unit (Item : Symbol) return ContextManager.UnitDescriptors; --# global in Dict; function Get_Symbol_Location (Item : Symbol) return LexTokenManager.Token_Position; --# global in Dict; -- This procedure returns RESULT containing the SLI type -- associated with the symbol ITEM. procedure Get_SLI_Type (Item : in Symbol; Result : out SLI_Type); --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Result from Dict, --# Item & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dict, --# Item, --# LexTokenManager.State; -------------------------------------------------------------------------------- -- CONSTRUCTORS -- -------------------------------------------------------------------------------- procedure Add_Deferred_Constant (Name : in LexTokenManager.Lex_String; Type_Mark : in Symbol; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in Symbol; TheConstant : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# TheConstant from Comp_Unit, --# Declaration, --# Dict, --# Name, --# The_Package, --# Type_Mark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# The_Package, --# Type_Mark, --# Type_Reference; --# post IsConstant (TheConstant, Dict); procedure Promote_Deferred_To_Full_Constant (Constant_Sym : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Value : in LexTokenManager.Lex_String; Exp_Node : in ExaminerConstants.RefType; The_Package : in Symbol); --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Constant_Sym, --# Declaration, --# Exp_Node, --# The_Package, --# Value; procedure Add_Constant (Name : in LexTokenManager.Lex_String; The_Type : in Symbol; Static : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Value : in LexTokenManager.Lex_String; Exp_Is_Wellformed : in Boolean; Exp_Node : in ExaminerConstants.RefType; Constant_Sym : in out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Constant_Sym, --# Dict from Comp_Unit, --# Constant_Sym, --# Declaration, --# Dict, --# Exp_Is_Wellformed, --# Exp_Node, --# Name, --# Static, --# The_Type, --# Value & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Constant_Sym, --# Declaration, --# Dict, --# Exp_Is_Wellformed, --# Exp_Node, --# LexTokenManager.State, --# Name, --# Static, --# The_Type, --# Value; --# post IsConstant (Constant_Sym, Dict); procedure Add_Constant_Declaration (Name : in LexTokenManager.Lex_String; Type_Mark : in Symbol; Type_Reference : in Location; Value : in LexTokenManager.Lex_String; Exp_Is_Wellformed : in Boolean; Exp_Node : in ExaminerConstants.RefType; Static : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; TheConstant : out Symbol); --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# TheConstant from CommandLineData.Content, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# Exp_Is_Wellformed, --# Exp_Node, --# LexTokenManager.State, --# Name, --# Scope, --# Static, --# Type_Mark, --# Value & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# Exp_Is_Wellformed, --# Exp_Node, --# LexTokenManager.State, --# Name, --# Scope, --# Static, --# Type_Mark, --# Type_Reference, --# Value; --# post IsConstant (TheConstant, Dict); procedure AddConstantRulePolicy (TheConstant : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheScope : in Scopes; ThePolicy : in Rule_Policies; TheRulePolicy : out Symbol); --# global in out Dict; --# derives Dict, --# TheRulePolicy from Comp_Unit, --# Declaration, --# Dict, --# TheConstant, --# ThePolicy, --# TheScope; --# post IsRulePolicy (TheRulePolicy, Dict); procedure Add_Variable_Declaration (Variable_Sym : in Symbol; The_Type : in Symbol; Initialized : in Boolean; Is_Aliased : in Boolean; Exp_Node : in ExaminerConstants.RefType; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Declaration_Symbol : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Declaration_Symbol, --# Dict from Comp_Unit, --# Declaration, --# Dict, --# Exp_Node, --# Initialized, --# Is_Aliased, --# Scope, --# The_Type, --# Variable_Sym & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Exp_Node, --# Initialized, --# Is_Aliased, --# LexTokenManager.State, --# Scope, --# The_Type, --# Type_Reference, --# Variable_Sym; --# pre not Is_Null_Symbol (Variable_Sym); --# post IsDeclaration (Declaration_Symbol, Dict); -- This procedure may also be called to modify an existing -- variable. procedure Add_Variable (Name : in LexTokenManager.Lex_String; The_Type : in Symbol; Initialized : in Boolean; Is_Aliased : in Boolean; Exp_Node : in ExaminerConstants.RefType; Type_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Declaration_Symbol : out Symbol; Variable_Symbol : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Declaration_Symbol, --# Dict, --# Variable_Symbol from Comp_Unit, --# Declaration, --# Dict, --# Exp_Node, --# Initialized, --# Is_Aliased, --# Name, --# Scope, --# The_Type & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Exp_Node, --# Initialized, --# Is_Aliased, --# LexTokenManager.State, --# Name, --# Scope, --# The_Type, --# Type_Reference; --# post IsDeclaration (Declaration_Symbol, Dict) and IsVariable (Variable_Symbol, Dict); procedure AddVariableAddressClause (Variable : in Symbol); --# global in out Dict; --# derives Dict from *, --# Variable; -- could also be extended to write location to dictionary file as for other reps procedure AddVariablePragmaImport (Variable : in Symbol); --# global in out Dict; --# derives Dict from *, --# Variable; -- could also be extended to write location to dictionary file as for other reps procedure AddTypeSizeAttribute (TypeMark : in Symbol; SizeVal : in LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# SizeVal, --# TypeMark; procedure SetTypeAtomic (TypeMark : in Symbol); --# global in out Dict; --# derives Dict from *, --# TypeMark; procedure AddRecordSubcomponent (Prefix : in Symbol; Component : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Subcomponent : out Symbol); --# global in out Dict; --# derives Dict, --# Subcomponent from Component, --# Comp_Unit, --# Dict, --# Prefix; --# post IsSubcomponent (Subcomponent, Dict); procedure AddQuantifiedVariable (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TypeMark : in Symbol; TheConstraint : in Symbol; Region : in Symbol; Variable : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# Variable from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Region, --# TheConstraint, --# TypeMark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Region, --# TheConstraint, --# TypeMark; --# post IsQuantifiedVariable (Variable, Dict); procedure Add_Type_Announcement (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in Symbol; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Declaration, --# Dict, --# Name, --# The_Package & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# The_Package; --# post IsTypeMark (The_Type, Dict); procedure Add_Private_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Package : in Symbol; Is_Limited : in Boolean; Is_Tagged_Type : in Boolean; Extends : in Symbol; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Declaration, --# Dict, --# Extends, --# Is_Limited, --# Is_Tagged_Type, --# LexTokenManager.State, --# Name, --# The_Package & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Extends, --# Is_Limited, --# Is_Tagged_Type, --# LexTokenManager.State, --# Name, --# The_Package; --# post IsTypeMark (The_Type, Dict); procedure Add_Abstract_Proof_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# The_Type from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Add_Default_Abstract_Proof_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Type : out Symbol); --# global in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# LexTokenManager.State from *, --# Name & --# The_Type from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Add_Enumeration_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# The_Type from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure AddEnumerationLiteral (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Position : in LexTokenManager.Lex_String; The_Type : in Symbol; TheEnumerationLiteral : out Symbol); --# global in out Dict; --# derives Dict, --# TheEnumerationLiteral from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Position, --# The_Type; --# post IsEnumerationLiteral (TheEnumerationLiteral, Dict); procedure Add_Representation_Clause (The_Type : in Symbol; Clause : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Clause, --# Dict, --# LexTokenManager.State, --# The_Type; procedure AddEnumerationLiteralRepresentation (Literal : in Symbol; Code : in Integer); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Code, --# Dict, --# LexTokenManager.State, --# Literal; procedure Add_Integer_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Lower, --# Name, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Add_Predef_Integer_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# LexTokenManager.State, --# Lower, --# Name, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; procedure Add_Modular_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Modulus : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol); --# global in out Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Modulus, --# Name, --# Scope & --# LexTokenManager.State from *, --# Modulus & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Add_Floating_Point_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Context, --# Declaration, --# Dict, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Add_Predef_Floating_Point_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Context, --# Declaration, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; procedure Add_Protected_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Mode : in Modes; Constrained : in Boolean; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Constrained, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Mode, --# Name, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Add_Task_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Constrained : in Boolean; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Constrained, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Add_Task_Or_Protected_Subtype (Name : in LexTokenManager.Lex_String; Parent : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Scope & --# The_Subtype from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Scope; --# post IsTypeMark (The_Subtype, Dict); procedure SetTypeIsWellformed (TypeMark : in Symbol; Wellformed : in Boolean); --# global in out Dict; --# derives Dict from *, --# TypeMark, --# Wellformed; procedure SetBaseType (TypeMark, BaseType : in Symbol); --# global in out Dict; --# derives Dict from *, --# BaseType, --# TypeMark; procedure Add_Fixed_Point_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Context, --# Declaration, --# Dict, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Add_Array_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Constrained : in Boolean; Component_Type : in Symbol; Component_Type_Reference : in Location; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Component_Type, --# Comp_Unit, --# Constrained, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# SPARK_IO.File_Sys from *, --# Component_Type, --# Component_Type_Reference, --# Comp_Unit, --# Constrained, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure AddArrayIndex (TheArrayType : in Symbol; IndexType : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheArrayIndex : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# TheArrayIndex from Comp_Unit, --# Declaration, --# Dict, --# IndexType, --# TheArrayType & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# IndexType, --# LexTokenManager.State, --# TheArrayType; --# post IsArrayIndex (TheArrayIndex, Dict); procedure Add_Record_Type (Name : in LexTokenManager.Lex_String; Is_Tagged_Type : in Boolean; Extends : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Type from Comp_Unit, --# Context, --# Declaration, --# Dict, --# Extends, --# Is_Tagged_Type, --# LexTokenManager.State, --# Name, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure AddRecordComponent (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheRecordType : in Symbol; TheComponentType : in Symbol; InheritedField : in Boolean; ComponentTypeReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# InheritedField, --# Name, --# TheComponentType, --# TheRecordType & --# SPARK_IO.File_Sys from *, --# ComponentTypeReference, --# Comp_Unit, --# Declaration, --# Dict, --# InheritedField, --# LexTokenManager.State, --# Name, --# TheComponentType, --# TheRecordType; -- Generic types procedure Add_Generic_Type (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Type : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope & --# The_Type from Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope; --# post IsTypeMark (The_Type, Dict); procedure Set_Generic_Private_Type (The_Type : in Symbol; Is_Limited : in Boolean); --# global in out Dict; --# derives Dict from *, --# Is_Limited, --# The_Type; procedure Set_Generic_Discrete_Type (The_Type : in Symbol); --# global in out Dict; --# derives Dict from *, --# The_Type; procedure Set_Generic_Integer_Type (The_Type : in Symbol); --# global in out Dict; --# derives Dict from *, --# The_Type; procedure Set_Generic_Modular_Type (The_Type : in Symbol); --# global in out Dict; --# derives Dict from *, --# The_Type; procedure Set_Generic_Floating_Point_Type (The_Type : in Symbol); --# global in out Dict; --# derives Dict from *, --# The_Type; procedure Set_Generic_Fixed_Point_Type (The_Type : in Symbol); --# global in out Dict; --# derives Dict from *, --# The_Type; procedure Set_Generic_Array_Type (The_Type : in Symbol); --# global in out Dict; --# derives Dict from *, --# The_Type; procedure Add_Generic_Object (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; The_Type : in Symbol; The_Object_Sym : out Symbol); --# global in out Dict; --# derives Dict, --# The_Object_Sym from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Scope, --# The_Type; --# post IsConstant (The_Object_Sym, Dict); procedure AddRecordComponentRepresentation (Component : in Symbol; Clause : in Location; RelativeAddress : in Natural; FirstBitPosition : in Natural; LastBitPosition : in Natural); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Clause, --# Component, --# Dict, --# FirstBitPosition, --# LastBitPosition, --# LexTokenManager.State, --# RelativeAddress; procedure AddAlignmentClause (TheType : in Symbol; Clause : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Clause, --# Dict, --# LexTokenManager.State, --# TheType; procedure AddLoop (Scope : in Scopes; Comp_Unit : in ContextManager.UnitDescriptors; LoopStatement : in Location; TheLoop : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# LoopStatement, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# LoopStatement, --# Scope & --# TheLoop from Comp_Unit, --# Dict, --# LoopStatement; procedure AddLoopName (Name : in LexTokenManager.Lex_String; TheLoop : in Symbol); --# global in out Dict; --# derives Dict from *, --# Name, --# TheLoop; procedure AddLoopParameter (TheLoop : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Name : in LexTokenManager.Lex_String; TypeMark : in Symbol; StaticRange : in Boolean; IsReverse : in Boolean; TypeReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# IsReverse, --# Name, --# StaticRange, --# TheLoop, --# TypeMark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# IsReverse, --# LexTokenManager.State, --# Name, --# StaticRange, --# TheLoop, --# TypeMark, --# TypeReference; procedure MarkLoopHasExits (TheLoop : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheLoop; -- This is used by the VCG to store a Cells.Cell representing the exit expression of -- a for loop. Used by DAG.BuildGraph to build the default loop invariant. procedure SetLoopExitExpn (ForLoop : in Symbol; Expn : in Natural); --# global in out Dict; --# derives Dict from *, --# Expn, --# ForLoop; -- This is used by the VCG to store a Cells.Cell representing the entry expression of -- a for loop. Used by DAG.BuildGraph to build the default loop invariant. procedure SetLoopEntryExpn (ForLoop : in Symbol; Expn : in Natural); --# global in out Dict; --# derives Dict from *, --# Expn, --# ForLoop; -- Following is used by VCG in for loop modelling to create a variable uniquely associated -- with an original variable and a loop. The original variable is used in the exit expression -- of a for loop and the new one is used to store the value on entry to the loop so as to -- freeze the loop bounds as required by Ada semantics. procedure IdempotentCreateLoopOnEntryVariable (OriginalVariable : in Symbol; TheLoop : in Symbol; OnEntryVariable : out Symbol); --# global in out Dict; --# derives Dict, --# OnEntryVariable from Dict, --# OriginalVariable, --# TheLoop; -- Provides access to variable created by previous call. function GetLoopOnEntryVariable (OriginalVariable : Symbol; TheLoop : Symbol) return Symbol; --# global in Dict; procedure Add_Full_Range_Subtype (Name : in LexTokenManager.Lex_String; Parent : in Symbol; Parent_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Subtype from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope; --# post IsTypeMark (The_Subtype, Dict); procedure Add_Enumeration_Subtype (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Subtype from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Static, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope; --# post IsTypeMark (The_Subtype, Dict); procedure Add_Integer_Subtype (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Subtype from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Static, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope; --# post IsTypeMark (The_Subtype, Dict); procedure Add_Modular_Subtype (Name : in LexTokenManager.Lex_String; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Subtype from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope; --# post IsTypeMark (The_Subtype, Dict); procedure Add_Floating_Point_Subtype (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Subtype from Comp_Unit, --# Context, --# Declaration, --# Dict, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Static, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope; --# post IsTypeMark (The_Subtype, Dict); procedure Add_Fixed_Point_Subtype (Name : in LexTokenManager.Lex_String; Static : in Boolean; Parent : in Symbol; Parent_Reference : in Location; Lower : in LexTokenManager.Lex_String; Upper : in LexTokenManager.Lex_String; Error_Bound : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; The_Subtype : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Subtype from Comp_Unit, --# Context, --# Declaration, --# Dict, --# Error_Bound, --# LexTokenManager.State, --# Lower, --# Name, --# Parent, --# Scope, --# Static, --# Upper & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope; --# post IsTypeMark (The_Subtype, Dict); -- Adds an array subtype that has a (possibly static) constraint. For example - -- addition of a constrained subtype of an unconstrained array type procedure Add_Array_Subtype (Name : in LexTokenManager.Lex_String; Parent : in Symbol; Parent_Reference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes; Context : in Contexts; Static : in Boolean; The_Subtype : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# The_Subtype from Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Scope, --# Static & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Parent, --# Parent_Reference, --# Scope; --# post IsTypeMark (The_Subtype, Dict); procedure AddAssertStatement (Statement : in Location); --# global in Dict; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# Statement; procedure AddCheckStatement (Statement : in Location); --# global in Dict; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# Statement; procedure AddPrecondition (Abstraction : in Abstractions; Subprogram : in Symbol; Predicate : in ExaminerConstants.RefType; Precondition : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Predicate, --# Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# LexTokenManager.State, --# Precondition, --# Predicate, --# Subprogram; procedure AddPostcondition (Abstraction : in Abstractions; Subprogram : in Symbol; Predicate : in ExaminerConstants.RefType; Postcondition : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Predicate, --# Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# LexTokenManager.State, --# Postcondition, --# Predicate, --# Subprogram; procedure AddSubprogram (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; Context : in Contexts; Subprogram : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# Subprogram from Comp_Unit, --# Context, --# Dict, --# Name, --# Scope, --# Specification & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Specification; --# post IsSubprogram (Subprogram, Dict); function ActualOfGenericFormalType (TheGenericFormalSym : Symbol; ActualSubprogramSym : Symbol) return Symbol; --# global in Dict; --# return S => IsTypeMark (S, Dict); -- returns the symbol that is the generic actual that matches the given generic formal for a -- given instantiation function ActualOfGenericFormalObject (TheGenericFormalSym : Symbol; ActualSubprogramSym : Symbol) return Symbol; --# global in Dict; --# return S => IsConstant (S, Dict); -- returns the symbol that is the generic actual that matches the given generic formal for a -- given instantiation function ActualOfGenericParameter (TheParameter : Symbol; ActualSubprogramSym : Symbol) return Symbol; --# global in Dict; --# return S => IsSubprogramParameter (S, Dict); -- returns the symbol of the actual parameter that has the same posiiton number in the instantiated -- subprogram as the parameter symbol has in the generic from which it comes function ActualOfParameterConstraint (TheParameter : Symbol; ActualSubprogramSym : Symbol) return Symbol; --# global in Dict; --# return S => IsParameterConstraint (S, Dict); procedure AddSubprogramInstantiation (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheGeneric : in Symbol; Specification : in Location; Scope : in Scopes; Context : in Contexts; Subprogram : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# Subprogram from Comp_Unit, --# Context, --# Declaration, --# Dict, --# Name, --# Scope, --# Specification, --# TheGeneric & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Context, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Specification, --# TheGeneric; --# post IsSubprogram (Subprogram, Dict); function IsInstantiation (PackageOrSubProgram : Symbol) return Boolean; --# global in Dict; function GetGenericOfInstantiation (PackageOrSubProgram : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsSubprogram (S, Dict)); -- Copies the parameters and return types from Generic to Instantiation substituting -- actual types as it goes (using generic association linked list of Instantiation) procedure InstantiateSubprogramParameters (ActualSubprogramSym : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# ActualSubprogramSym, --# Comp_Unit, --# Declaration & --# SPARK_IO.File_Sys from *, --# ActualSubprogramSym, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State; procedure SetSubprogramSignatureNotWellformed (Abstraction : in Abstractions; Subprogram : in Symbol); --# global in out Dict; --# derives Dict from *, --# Abstraction, --# Subprogram; procedure SetSubprogramIsEntry (Subprogram : in Symbol); --# global in out Dict; --# derives Dict from *, --# Subprogram; procedure Set_Package_Generic_Unit (Pack_Sym : in Symbol; Generic_Unit : in Symbol); --# global in out Dict; --# derives Dict from *, --# Generic_Unit, --# Pack_Sym; procedure Set_Subprogram_Generic_Unit (Subprogram : in Symbol; Generic_Unit : in Symbol); --# global in out Dict; --# derives Dict from *, --# Generic_Unit, --# Subprogram; procedure Set_Generic_Unit_Owning_Package (Generic_Unit : in Symbol; Pack_Sym : in Symbol); --# global in out Dict; --# derives Dict from *, --# Generic_Unit, --# Pack_Sym; procedure Set_Generic_Unit_Owning_Subprogram (Generic_Unit : in Symbol; Subprogram : in Symbol); --# global in out Dict; --# derives Dict from *, --# Generic_Unit, --# Subprogram; -- Call this to record the fact that a procedure or task has an explicit -- derives annotation. procedure SetHasDerivesAnnotation (Task_Or_Proc : in Symbol); --# global in out Dict; --# derives Dict from *, --# Task_Or_Proc; procedure SetSubprogramEntryBarrier (Subprogram, TheBarrier : in Symbol); --# global in out Dict; --# derives Dict from *, --# Subprogram, --# TheBarrier; procedure SetIsInterruptHandler (Subprogram : in Symbol); --# global in out Dict; --# derives Dict from *, --# Subprogram; procedure SetHasDelayProperty (TheProcedure : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheProcedure; procedure SetUsesUnprotectedVariables (Sym : in Symbol); --# global in out Dict; --# derives Dict from *, --# Sym; procedure SetUnprotectedReference (Variable : in Symbol; ByThread : in Symbol); --# global in out Dict; --# derives Dict from *, --# ByThread, --# Variable; procedure SetSuspendsReference (Variable : in Symbol; ByThread : in Symbol); --# global in out Dict; --# derives Dict from *, --# ByThread, --# Variable; procedure SetVirtualElementSeenByOwner (TheVariable : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheVariable; procedure SetPriorityProperty (OwnVariable : in Symbol; TheValue : in LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# OwnVariable, --# TheValue; procedure SetIntegrityProperty (OwnVariable : in Symbol; TheValue : in LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# OwnVariable, --# TheValue; procedure SetIsSuspendable (Variable : in Symbol); --# global in out Dict; --# derives Dict from *, --# Variable; procedure SetHasInterruptProperty (Variable : in Symbol); --# global in out Dict; --# derives Dict from *, --# Variable; procedure MarkAccountsForDelay (TheProcedure : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheProcedure; procedure MarkAccountsForSuspendsListItem (TheTaskOrProc : in Symbol; ThePOorSO : in Symbol); --# global in out Dict; --# derives Dict from *, --# ThePOorSO, --# TheTaskOrProc; procedure MarkAccountsForSuspendsListItems (TheTaskOrProc : in Symbol; TheItemsInProcedure : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheItemsInProcedure, --# TheTaskOrProc; procedure SetProtectedTypeElementsHidden (TheProtectedType : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheProtectedType; procedure SetProtectedTypeEntry (TheProtectedType, TheEntry : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheEntry, --# TheProtectedType; procedure SetTypeHasPragma (TheProtectedOrTaskType : in Symbol; ThePragma : in RavenscarPragmas); --# global in out Dict; --# derives Dict from *, --# ThePragma, --# TheProtectedOrTaskType; procedure SetTypePragmaValue (TheProtectedOrTaskType : in Symbol; ThePragma : in RavenscarPragmasWithValue; TheValue : in LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# ThePragma, --# TheProtectedOrTaskType, --# TheValue; procedure SetMainProgramPriority (TheValue : in LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# TheValue; procedure AddReturnType (TheFunction : in Symbol; TypeMark : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; TypeReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# TheFunction, --# TypeMark, --# TypeReference & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# TheFunction, --# TypeMark, --# TypeReference; procedure AddImplicitReturnVariable (Abstraction : in Abstractions; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Name : in LexTokenManager.Lex_String; TheFunction : in Symbol; Variable : out Symbol); --# global in out Dict; --# derives Dict, --# Variable from Abstraction, --# Comp_Unit, --# Declaration, --# Dict, --# Name, --# TheFunction; --# post IsImplicitReturnVariable (Variable, Dict); procedure AddSubprogramParameter (Name : in LexTokenManager.Lex_String; Subprogram : in Symbol; TypeMark : in Symbol; TypeReference : in Location; Mode : in Modes; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Mode, --# Name, --# Specification, --# Subprogram, --# TypeMark & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Mode, --# Name, --# Specification, --# Subprogram, --# TypeMark, --# TypeReference; procedure Add_Generic_Formal_Parameter (Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Generic_Unit : in Symbol; The_Type : in Symbol; The_Object : in Symbol); --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Generic_Unit, --# The_Object, --# The_Type; procedure AddGenericTypeAssociation (SubprogramOrPackage : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; FormalSym : in Symbol; ActualSym : in Symbol); --# global in out Dict; --# derives Dict from *, --# ActualSym, --# Comp_Unit, --# Declaration, --# FormalSym, --# SubprogramOrPackage; procedure AddGenericObjectAssociation (SubprogramOrPackage : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; FormalSym : in Symbol; ActualSym : in Symbol); --# global in out Dict; --# derives Dict from *, --# ActualSym, --# Comp_Unit, --# Declaration, --# FormalSym, --# SubprogramOrPackage; procedure AddKnownDiscriminant (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; ProtectedOrTaskType : in Symbol; TypeMark : in Symbol); --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Name, --# ProtectedOrTaskType, --# TypeMark; procedure SetDiscriminantSetsPriority (TheDiscriminant : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheDiscriminant; procedure AddDiscriminantConstraintStaticValue (ProtectedOrTaskSubtype : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheValue : in LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# ProtectedOrTaskSubtype, --# TheValue; procedure AddDiscriminantConstraintAccessedObject (ProtectedOrTaskSubtype : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheObject : in Symbol); --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# ProtectedOrTaskSubtype, --# TheObject; procedure SetSubtypePriority (ProtectedOrTaskSubtype : in Symbol; ThePriority : in LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# ProtectedOrTaskSubtype, --# ThePriority; procedure AddBody (CompilationUnit : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; TheBody : in Location; Hidden : in Boolean); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# CompilationUnit, --# Comp_Unit, --# Hidden, --# TheBody & --# SPARK_IO.File_Sys from *, --# CompilationUnit, --# Comp_Unit, --# Dict, --# Hidden, --# LexTokenManager.State, --# TheBody; procedure AddBodyStub (CompilationUnit : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; BodyStub : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# BodyStub, --# CompilationUnit, --# Comp_Unit & --# SPARK_IO.File_Sys from *, --# BodyStub, --# CompilationUnit, --# Comp_Unit, --# Dict, --# LexTokenManager.State; procedure AddMainProgram (Subprogram : in Symbol; Annotation : in Location); --# global in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Subprogram & --# SPARK_IO.File_Sys from *, --# Annotation, --# Dict, --# Subprogram; procedure AddDependencyRelation (Abstraction : in Abstractions; TheProcedure : in Symbol; DependencyRelation : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# TheProcedure & --# SPARK_IO.File_Sys from *, --# Abstraction, --# DependencyRelation, --# Dict, --# LexTokenManager.State, --# TheProcedure; procedure RenameSubprogram (Subprogram : in Symbol; SubprogramReference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Scope, --# Subprogram & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Scope, --# Subprogram, --# SubprogramReference; procedure RenameUnaryOperator (Name : in SP_Symbols.SP_Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Operand : in Symbol; Scope : in Scopes; Op_Sym : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# Op_Sym from Comp_Unit, --# Declaration, --# Dict, --# Name, --# Operand, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# Name, --# Operand, --# Scope; --# post IsOperator (Op_Sym, Dict); procedure RenameBinaryOperator (Name : in SP_Symbols.SP_Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Left : in Symbol; Right : in Symbol; Scope : in Scopes; Op_Sym : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# Op_Sym from Comp_Unit, --# Declaration, --# Dict, --# Left, --# Name, --# Right, --# Scope & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Left, --# LexTokenManager.State, --# Name, --# Right, --# Scope; --# post IsOperator (Op_Sym, Dict); procedure AddGlobalAnnotation (Abstraction : in Abstractions; Subprogram : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Annotation : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Annotation, --# Comp_Unit, --# Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Annotation, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Subprogram; procedure AddGlobalVariable (Abstraction : in Abstractions; Subprogram : in Symbol; Variable : in Symbol; Mode : in Modes; PrefixNeeded : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; VariableReference : in Location; Global_Variable_Sym : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# Global_Variable_Sym from Abstraction, --# Comp_Unit, --# Dict, --# Mode, --# PrefixNeeded, --# Subprogram, --# Variable, --# VariableReference & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Mode, --# PrefixNeeded, --# Subprogram, --# Variable, --# VariableReference; --# post IsGlobalVariable (Global_Variable_Sym, Dict); procedure AddExport (Abstraction : in Abstractions; TheProcedure : in Symbol; TheExport : in Symbol; ExportReference : in Location; Annotation : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# TheExport, --# TheProcedure & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Annotation, --# Dict, --# ExportReference, --# LexTokenManager.State, --# TheExport, --# TheProcedure; procedure AddDependency (Abstraction : in Abstractions; Comp_Unit : in ContextManager.UnitDescriptors; TheProcedure : in Symbol; TheExport : in Symbol; TheImport : in Symbol; ImportReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Comp_Unit, --# ImportReference, --# TheExport, --# TheImport, --# TheProcedure & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Comp_Unit, --# Dict, --# ImportReference, --# LexTokenManager.State, --# TheExport, --# TheImport, --# TheProcedure; procedure AddVirtualElement (ToProtectedType : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheVirtualElement : in Symbol; TheOwner : in Symbol); --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# TheOwner, --# TheVirtualElement, --# ToProtectedType; procedure AddPOorSOToSuspendsList (TheTaskOrProc : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; ThePOorSO : in Symbol); --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# ThePOorSO, --# TheTaskOrProc; procedure AddInterruptStreamMapping (Subject : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TheInterruptHandler : in LexTokenManager.Lex_String; TheInterruptStream : in LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# Subject, --# TheInterruptHandler, --# TheInterruptStream; procedure CopyDependencyList (Abstraction : in Abstractions; TheProcedure : in Symbol; FromExport : in Symbol; ToExport : in Symbol); --# global in out Dict; --# derives Dict from *, --# Abstraction, --# FromExport, --# TheProcedure, --# ToExport; procedure ForceImport (Abstraction : in Abstractions; TheProcedure : in Symbol; TheImport : in Symbol; ImportReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# TheImport, --# TheProcedure & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# ImportReference, --# LexTokenManager.State, --# TheImport, --# TheProcedure; procedure Add_Package (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; ThePackage : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Specification & --# ThePackage from Comp_Unit, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Specification; --# post IsPackage (ThePackage, Dict); procedure AddChildPackage (TheParent : in Symbol; Sort : in PackageSort; Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; ThePackage : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Sort, --# Specification, --# TheParent & --# ThePackage from Comp_Unit, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Sort, --# Specification, --# TheParent; --# post IsPackage (ThePackage, Dict); procedure AddPrivatePackage (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Scope : in Scopes; ThePackage : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Specification & --# ThePackage from Comp_Unit, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# Specification; --# post IsPackage (ThePackage, Dict); procedure AddPrivatePart (ThePackage : in Symbol; PrivatePart : in Location; Hidden : in Boolean); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# Hidden, --# LexTokenManager.State, --# PrivatePart, --# ThePackage; procedure AddOwnAnnotation (ThePackage : in Symbol; Annotation : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Annotation, --# Dict, --# LexTokenManager.State, --# ThePackage; procedure Add_Own_Variable (Name : in LexTokenManager.Lex_String; The_Package : in Symbol; Mode : in Modes; Is_Protected : in Boolean; Is_Interrupt_Stream : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Var_Symbol : out Symbol); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict, --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Is_Interrupt_Stream, --# Is_Protected, --# LexTokenManager.State, --# Mode, --# Name, --# The_Package & --# Var_Symbol from Comp_Unit, --# Declaration, --# Dict, --# Is_Interrupt_Stream, --# Is_Protected, --# LexTokenManager.State, --# Mode, --# Name, --# The_Package; --# post IsVariable (Var_Symbol, Dict); procedure AddOwnVariableType (OwnVariable : in Symbol; TypeMark : in Symbol; TypeReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# OwnVariable, --# TypeMark & --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State, --# OwnVariable, --# TypeMark, --# TypeReference; procedure AddOwnTask (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; TypeMark : in Symbol; ThePackage : in Symbol; TaskSym : out Symbol); --# global in out Dict; --# derives Dict, --# TaskSym from Comp_Unit, --# Declaration, --# Dict, --# Name, --# ThePackage, --# TypeMark; --# post IsVariable (TaskSym, Dict); procedure AddRefinementDefinition (ThePackage : in Symbol; Annotation : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Annotation, --# Dict, --# LexTokenManager.State, --# ThePackage; procedure AddConstituent (Name : in LexTokenManager.Lex_String; Subject : in Symbol; Mode : in Modes; SubjectReference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; ConstituentReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# ConstituentReference, --# Mode, --# Name, --# Subject & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# ConstituentReference, --# Dict, --# LexTokenManager.State, --# Name, --# Subject, --# SubjectReference; procedure AddConstituentSym (ConstituentVariable : in Symbol; Subject : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; ConstituentReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# ConstituentReference, --# ConstituentVariable, --# Subject & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# ConstituentReference, --# ConstituentVariable, --# Dict, --# LexTokenManager.State, --# Subject; procedure AddEmbeddedConstituent (PackageName : in LexTokenManager.Lex_String; VariableName : in LexTokenManager.Lex_String; Subject : in Symbol; Mode : in Modes; SubjectReference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; ConstituentReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# ConstituentReference, --# LexTokenManager.State, --# Mode, --# PackageName, --# Subject, --# VariableName & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# ConstituentReference, --# Dict, --# LexTokenManager.State, --# PackageName, --# Subject, --# SubjectReference, --# VariableName; procedure AddChildConstituent (Variable : in Symbol; Subject : in Symbol; Mode : in Modes; SubjectReference : in Location; Comp_Unit : in ContextManager.UnitDescriptors; ConstituentReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# ConstituentReference, --# Mode, --# Subject, --# Variable & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# ConstituentReference, --# Dict, --# LexTokenManager.State, --# Subject, --# SubjectReference, --# Variable; procedure AddInitializationSpecification (ThePackage : in Symbol; Annotation : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Annotation, --# Dict, --# LexTokenManager.State, --# ThePackage; procedure AddInitializedOwnVariable (Variable : in Symbol; VariableReference : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Variable & --# SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State, --# Variable, --# VariableReference; procedure AddPackageInitialization (ThePackage : in Symbol; Initialization : in Location; Hidden : in Boolean); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# Hidden, --# Initialization, --# LexTokenManager.State, --# ThePackage; procedure AddWithReference (The_Visibility : in Visibility; The_Unit : in Symbol; The_Withed_Symbol : in Symbol; Explicit : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Already_Present : out Boolean); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Already_Present from Dict, --# Explicit, --# The_Unit, --# The_Visibility, --# The_Withed_Symbol & --# Dict from *, --# Comp_Unit, --# Declaration, --# Explicit, --# The_Unit, --# The_Visibility, --# The_Withed_Symbol & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Explicit, --# LexTokenManager.State, --# The_Unit, --# The_Visibility, --# The_Withed_Symbol; procedure AddUseTypeReference (The_Visibility : in Visibility; The_Unit : in Symbol; TheType : in Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Comp_Unit, --# Declaration, --# TheType, --# The_Unit, --# The_Visibility & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# LexTokenManager.State, --# TheType, --# The_Unit, --# The_Visibility; procedure AddInheritsReference (The_Unit : in Symbol; The_Inherited_Symbol : in Symbol; Explicit : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Already_Present : out Boolean); --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Already_Present from Dict, --# Explicit, --# The_Inherited_Symbol, --# The_Unit & --# Dict from *, --# Comp_Unit, --# Declaration, --# Explicit, --# The_Inherited_Symbol, --# The_Unit & --# SPARK_IO.File_Sys from *, --# Comp_Unit, --# Declaration, --# Dict, --# Explicit, --# LexTokenManager.State, --# The_Inherited_Symbol, --# The_Unit; procedure SetPackageElaborateBodyFound (ThePackage : in Symbol); --# global in out Dict; --# derives Dict from *, --# ThePackage; procedure SetPackageAsExtendingAnother (ThePackage : in Symbol; ThePackageItExtends : in Symbol); --# global in out Dict; --# derives Dict from *, --# ThePackage, --# ThePackageItExtends; procedure AddWriteReference (Variable, CompilationUnit : in Symbol; Reference : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CompilationUnit, --# Dict, --# LexTokenManager.State, --# Reference, --# Variable; procedure AddReadReference (Object, CompilationUnit : in Symbol; Reference : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CompilationUnit, --# Dict, --# LexTokenManager.State, --# Object, --# Reference; procedure AddSubprogramCall (Subprogram, CompilationUnit : in Symbol; Call : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Call, --# CompilationUnit, --# Dict, --# LexTokenManager.State, --# Subprogram; procedure AddOtherReference (Item, CompilationUnit : in Symbol; Reference : in Location); --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CompilationUnit, --# Dict, --# Item, --# LexTokenManager.State, --# Reference; procedure AddUsesUncheckedConversion (TheUnit : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheUnit; procedure AddAssignsFromExternal (TheUnit : in Symbol); --# global in out Dict; --# derives Dict from *, --# TheUnit; -------------------------------------------------------------------------------- -- ACCESS FUNCTIONS -- -------------------------------------------------------------------------------- function SymbolRef (Item : Symbol) return ExaminerConstants.RefType; function ConvertSymbolRef (Ref : ExaminerConstants.RefType) return Symbol; function GetSimpleName (Item : Symbol) return LexTokenManager.Lex_String; --# global in Dict; function GetType (TheSymbol : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function Get_Unconstrained_Array_Index (TheSymbol : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsParameterConstraint (S, Dict)); -- There is no need for a GetUnaryOperatorType function since the result type -- of every SPARK unary operator is the same as the operand type function GetAccess (TheProtectedType : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetScalarAttributeType (Name : LexTokenManager.Lex_String; TypeMark : Symbol) return Symbol; --# global in Dict; --# in LexTokenManager.State; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetArrayAttributeType (Name : LexTokenManager.Lex_String; TypeMark : Symbol; Dimension : Positive) return Symbol; --# global in Dict; --# in LexTokenManager.State; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetScalarAttributeValue (Base : Boolean; Name : LexTokenManager.Lex_String; TypeMark : Symbol) return LexTokenManager.Lex_String; --# global in Dict; --# in LexTokenManager.State; function GetArrayAttributeValue (Name : LexTokenManager.Lex_String; TypeMark : Symbol; Dimension : Positive) return LexTokenManager.Lex_String; --# global in Dict; --# in LexTokenManager.State; function GetEnumerationLiteral (EnumerationType : Symbol; Position : LexTokenManager.Lex_String) return Symbol; --# global in Dict; --# in LexTokenManager.State; function GetPositionNumber (Literal : Symbol) return LexTokenManager.Lex_String; --# global in Dict; function GetRecordType (Component : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); -- Predefined types ------------------------------------------------------------- function GetUnknownTypeMark return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetUniversalIntegerType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetUniversalRealType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetUniversalFixedType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function IsUniversalFixedType (TheSymbol : Symbol) return Boolean; --# global in Dict; function GetPredefinedBooleanType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function IsPredefinedBooleanType (TheSymbol : Symbol) return Boolean; --# global in Dict; -- The NullVariable is an out stream used as a data sink for null derives etc. function GetNullVariable return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsVariable (S, Dict)); function Is_Null_Variable (TheVariable : Symbol) return Boolean; --# global in Dict; -- ThePartition is a subprogram symbol (predefined when Ravenscar profile is selected) -- that is used as place with which to associate the partition annotation. function GetThePartition return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsSubprogram (S, Dict)); function GetFalse return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsEnumerationLiteral (S, Dict)); function GetTrue return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsEnumerationLiteral (S, Dict)); function GetPredefinedIntegerType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetPredefinedLongIntegerType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetPredefinedCharacterType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetPredefinedStringType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetPredefinedNaturalSubtype return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetPredefinedPositiveSubtype return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetPredefinedTimeType return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function IsPredefinedTimeSpanType (TheSymbol : Symbol) return Boolean; --# global in Dict; -- Note: following function is to with getting the true base type -- of a type which is derived (from say root_integer). -- Use GetRootType to get the "first named subtype" of a subtype function GetBaseType (TypeMark : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetRootOfExtendedType (TypeMark : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); -- Applies to types extended from tagged types. Gives the immediately -- Returns the type at the root of the "Parent" relation between -- subtypes and types. -- -- For signed integer and floating point (sub)types, -- the root type is the first subtype of the -- anonymous type chosen by the compiler when a signed integer -- or floating point type is declared. For example: -- type T is range 1 .. 10; -- subtype S is T range 2 .. 8; -- then -- GetRootType (T) = T -- identity -- GetRootType (S) = T -- -- For array and record subtypes: -- type A is array (...) of ... ; -- full type declaration -- subtype A2 is A; -- then -- GetRootType (A) = A -- identity -- GetRootType (A2) = A function GetRootType (TypeMark : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); -- Returns the number of Dimensions of the given array -- type or subtype. GetRootType (see above) is -- used to step over any subtypes until the full -- parent type is found. function GetNumberOfDimensions (TypeMark : Symbol) return Positive; --# global in Dict; -- Returns the index type of the given Dimension for the given array -- type or subtype. In the case of a full-range array subtype, the index -- type is obtained from the first constrained subtype. function GetArrayIndex (TypeMark : Symbol; Dimension : Positive) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetArrayComponent (TypeMark : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetNumberOfNonExtendedComponents (TheRecordType : Symbol) return Natural; --# global in Dict; -- Returns the number of record components in a record but does nor traverse a group of -- records modelling an extended tagged record. The count includes the Inherit field of -- an extended record. function GetNumberOfActualComponents (TheRecordType : Symbol) return Natural; --# global in Dict; -- As above but ignore Inherit fields of extended tagged records function GetNumberOfComponents (TheRecordType : Symbol) return Natural; --# global in Dict; -- if the type is extended we get all the fields whether locally declared -- or inherited function GetNonExtendedRecordComponent (TheRecordType : Symbol; Number : Positive) return Symbol; --# global in Dict; -- Returns field N of a single record (i.e ignores structured composition of records used to -- model tagged extended records. If record is an extension, it cann return the "Inherit" field function GetRecordComponent (TheRecordType : Symbol; Number : Positive) return Symbol; --# global in Dict; -- Works for extended records as well as non-extended ones; does not ever return Inherit fields function GetInheritDepth (FieldName : LexTokenManager.Lex_String; RecordType : Symbol) return Natural; --# global in Dict; --# in LexTokenManager.State; -- Assuming we have used LookUpSelectedItem and established that there is a field -- called FieldName in record RecordType we can use this function to find out -- how far FieldName is down a chain of inherit-from-root-type pointers. So if -- R.F directly exists we return 0, and if R.F actually represents R.Inherit.F then we get -- 1 and so on function RecordComponentIsInherited (TheComponent : Symbol) return Boolean; --# global in Dict; -- returns true for extended types where a field is inherited rather than -- explicitly declared function GetVariableExpNode (Variable : Symbol) return ExaminerConstants.RefType; --# global in Dict; function GetConstantExpNode (TheConstant : Symbol) return ExaminerConstants.RefType; --# global in Dict; function ConstantExpIsWellformed (TheConstant : Symbol) return Boolean; --# global in Dict; function Get_Value (The_Constant : Symbol) return LexTokenManager.Lex_String; --# global in Dict; function GetConstantRulePolicy (TheConstant : Symbol; TheScope : Scopes) return Rule_Policies; --# global in Dict; function IsConstantRulePolicyPresent (TheConstant : Symbol; TheScope : Scopes) return Boolean; --# global in Dict; function TypeIsWellformed (TheType : Symbol) return Boolean; --# global in Dict; function GetPredefinedPackageStandard return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsPackage (S, Dict)); function IsPredefinedPackageStandard (TheSymbol : Symbol) return Boolean; --# global in Dict; function GetPredefinedPackageASCII return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsPackage (S, Dict)); function GetOwner (Variable : Symbol) return Symbol; --# global in Dict; function LastMostEnclosingLoop (CompilationUnit : Symbol) return Symbol; --# global in Dict; function GetLoop (CompilationUnit : Symbol; Number : Positive) return Symbol; --# global in Dict; function GetLoopParameter (TheLoop : Symbol) return Symbol; --# global in Dict; -- Returns a Cells.Cell previously planted by the VCG and representing -- the exit expression of a for loop function GetLoopExitExpn (TheLoop : Symbol) return Natural; --# global in Dict; -- Returns a Cells.Cell previously planted by the VCG and representing -- the entry expression of a for loop function GetLoopEntryExpn (TheLoop : Symbol) return Natural; --# global in Dict; function GetLoopHasExits (TheLoop : Symbol) return Boolean; --# global in Dict; function LoopParameterHasStaticRange (TheLoopParameter : Symbol) return Boolean; --# global in Dict; function LoopParameterMovesInReverse (TheLoopParameter : Symbol) return Boolean; --# global in Dict; function GetScope (Item : Symbol) return Scopes; --# global in Dict; function GetEnclosingScope (Scope : Scopes) return Scopes; --# global in Dict; function GetEnclosingPackage (Scope : Scopes) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsPackage (S, Dict)); function GetEnclosingProtectedRegion (Scope : Scopes) return Symbol; --# global in Dict; function IsOrIsInProtectedScope (Scope : Scopes) return Boolean; --# global in Dict; function SubprogramSignatureIsWellformed (Abstraction : Abstractions; Subprogram : Symbol) return Boolean; --# global in Dict; function HasPrecondition (Abstraction : Abstractions; Subprogram : Symbol) return Boolean; --# global in Dict; -- NOTE: a task type is allowed but always returns False since tasks don't have preconditions function GetPrecondition (Abstraction : Abstractions; Subprogram : Symbol) return ExaminerConstants.RefType; --# global in Dict; -- NOTE: a task type is allowed but always returns 0 since tasks don't have preconditions function HasPostcondition (Abstraction : Abstractions; Subprogram : Symbol) return Boolean; --# global in Dict; function GetPostcondition (Abstraction : Abstractions; Subprogram : Symbol) return ExaminerConstants.RefType; --# global in Dict; function HasImplicitReturnVariable (Abstraction : Abstractions; TheFunction : Symbol) return Boolean; --# global in Dict; function GetImplicitReturnVariable (Abstraction : Abstractions; TheFunction : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsImplicitReturnVariable (S, Dict)); function GetAdaFunction (ProofFunction : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsSubprogram (S, Dict)); function GetImplicitProofFunction (Abstraction : Abstractions; TheFunction : Symbol) return Symbol; --# global in Dict; function GetSubprogramParameterNumber (Parameter : Symbol) return Positive; --# global in Dict; function GetSubprogramParameterMode (Parameter : Symbol) return Modes; --# global in Dict; function GetSubprogramParameterConstraint (Parameter : Symbol; Dimension : Positive) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsParameterConstraint (S, Dict)); -- Returns the symbol for x__index_subtype__n where x is the paraeter and n the dimension number. This is -- a symbol with discriminant ParameterConstraintSymbol and which is used to pass information about -- unconstrained parameters between the wffs and the VCG. Pseudo annotation describes normal behaviour; an -- illegal SPARK program might result in calls being made which violate the "precondition", in which case -- the unknown type mark is returned. function GetSubprogramParameterConstraintDimension (TheConstraint : Symbol) return Positive; --# global in Dict; function GetParameterAssociatedWithParameterConstraint (TheConstraint : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsSubprogramParameter (S, Dict)); function GetNumberOfSubprogramParameters (Subprogram : Symbol) return Natural; --# global in Dict; function GetSubprogramParameter (Subprogram : Symbol; Number : Positive) return Symbol; --# global in Dict; function GetNumberOfGlobalVariables (Abstraction : Abstractions; Subprogram : Symbol) return Natural; --# global in Dict; function GetGlobalMode (Abstraction : Abstractions; Subprogram : Symbol; Variable : Symbol) return Modes; --# global in Dict; function GetOwnVariableMode (Variable : Symbol) return Modes; --# global in Dict; function GetOwnVariableProtected (Variable : Symbol) return Boolean; --# global in Dict; function GetOwnVariableIsInterruptStream (Variable : Symbol) return Boolean; --# global in Dict; function GetOwnVariableTypeHere (OwnVariable : in Symbol; Scope : in Scopes) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsTypeMark (S, Dict)); function GetConstituentMode (Variable : Symbol) return Modes; --# global in Dict; function IsOwnVariableOrConstituentWithMode (Variable : Symbol) return Boolean; --# global in Dict; function GetProtectedTypeOwnVariable (TheProtectedType : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsVariable (S, Dict)); -- return "the implicitly-declared abstract own variable that belongs to the PT" function IsUnmodedProtectedOwnVariable (Sym : Symbol) return Boolean; --# global in Dict; function GetProtectedImplicitInStream (TheProtectedOwnVar : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsImplicitInStream (S, Dict)); -- Failure to observe precondtion will result in a null symbol being returned or dict crash. function GetProtectedTypeHasEntry (TheProtectedType : Symbol) return Boolean; --# global in Dict; function GetTypeHasPragma (TheProtectedOrTaskType : in Symbol; ThePragma : in RavenscarPragmas) return Boolean; --# global in Dict; function GetTypePragmaValue (TheProtectedOrTaskType : in Symbol; ThePragma : in RavenscarPragmasWithValue) return LexTokenManager.Lex_String; --# global in Dict; function GetTypePriority (TheProtectedOrTaskType : in Symbol) return LexTokenManager.Lex_String; --# global in Dict; function GetPriorityProperty (OwnVariable : in Symbol) return LexTokenManager.Lex_String; --# global in Dict; -- If IsOwnVariable (S), then returns the Integrity of that -- own var (which could be of a library package, nested package, -- or private child package.) Otherwise, returns Null_String. function GetIntegrityProperty (S : in Symbol) return LexTokenManager.Lex_String; --# global in Dict; -- Implements information flow policy checking based on the Integrity -- property of the given import and export symbol. CommandLineData is -- used to determine which info flow policy (if any) if being checked. function RelationViolatesInfoFlowPolicy (TheExport : in Symbol; TheImport : in Symbol) return Boolean; --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; function GetIsSuspendable (Variable : in Symbol) return Boolean; --# global in Dict; function GetHasInterruptProperty (Variable : in Symbol) return Boolean; --# global in Dict; function GetVirtualElementOwner (Variable : in Symbol) return Symbol; --# global in Dict; function IsVirtualElement (Variable : in Symbol) return Boolean; --# global in Dict; function IsVirtualElementForType (TheVariable : in Symbol; TheProtectedType : in Symbol) return Boolean; --# global in Dict; function VirtualElementSeenByOwner (Variable : in Symbol) return Boolean; --# global in Dict; function GetMainProgram return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsSubprogram (S, Dict)); function GetMainProgramPriority return LexTokenManager.Lex_String; --# global in Dict; function GetAbstraction (Subprogram : Symbol; Scope : Scopes) return Abstractions; --# global in Dict; function GetConstraintAbstraction (Subprogram : Symbol; Scope : Scopes) return Abstractions; --# global in Dict; -- Similar above but selects which subprgoram constraint to use. May return different -- result to GetAbstraction because a subprogram may have only one flow annotation -- but have two proof contexts (if private type refinement is involved). procedure AdjustTypeUpperBound (TypeMark : Symbol; NewBound : LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# NewBound, --# TypeMark; procedure AdjustTypeLowerBound (TypeMark : Symbol; NewBound : LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# NewBound, --# TypeMark; procedure AdjustTypeErrorBound (TypeMark : Symbol; NewBound : LexTokenManager.Lex_String); --# global in out Dict; --# derives Dict from *, --# NewBound, --# TypeMark; -- ********************************************************************** -- NB: the three above operations are not intended for general use, -- and are present for the benefit of the ConfigFile package. -- ********************************************************************** -------------------------------------------------------------------------------- -- QUERY FUNCTIONS -- -------------------------------------------------------------------------------- function IsObject (TheSymbol : Symbol) return Boolean; --# global in Dict; function Is_Declared (Item : Symbol) return Boolean; --# global in Dict; function Is_Constant (TheSymbol : Symbol) return Boolean; --# global in Dict; function Is_Variable (TheSymbol : Symbol) return Boolean; --# global in Dict; -- Note, returns false for a record subcomponent function IsVariableOrSubcomponent (TheSymbol : Symbol) return Boolean; --# global in Dict; function GetFirstRecordSubcomponent (TheSymbol : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsSubcomponent (S, Dict)); function GetNextRecordSubcomponent (TheSubcomponent : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsSubcomponent (S, Dict)); function VariableIsInitialized (Variable : Symbol) return Boolean; --# global in Dict; function VariableHasAddressClause (Variable : Symbol) return Boolean; --# global in Dict; function VariableHasPragmaImport (Variable : Symbol) return Boolean; --# global in Dict; function TypeSizeAttribute (TypeMark : Symbol) return LexTokenManager.Lex_String; --# global in Dict; function VariableIsAliased (Variable : Symbol) return Boolean; --# global in Dict; procedure SetVariableMarkedValid (Variable : in Symbol; Val : in Boolean); --# global in out Dict; --# derives Dict from *, --# Val, --# Variable; function VariableIsMarkedValid (TheVariable : Symbol) return Boolean; --# global in Dict; procedure SetSubcomponentMarkedValid (Subcomponent : in Symbol; Val : in Boolean); --# global in out Dict; --# derives Dict from *, --# Subcomponent, --# Val; function SubcomponentIsMarkedValid (TheSubcomponent : Symbol) return Boolean; --# global in Dict; function VariableOrSubcomponentIsMarkedValid (TheSym : Symbol) return Boolean; --# global in Dict; function IsAtomic (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsPragmaAtomic (TheSymbol : Symbol) return Boolean; --# global in CommandLineData.Content; --# in Dict; function IsDeferredConstant (TheSymbol : Symbol) return Boolean; --# global in Dict; function ConstantIsDeferredHere (TheConstant : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsUnknownTypeMark (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsUniversalIntegerType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsUniversalRealType (TheSymbol : Symbol) return Boolean; --# global in Dict; -- True iff IsTypeMark (TheSymbol) and ISN'T UnknownType and -- ISN'T a subtype function IsType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsSubtype (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsTaskType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsProtectedType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsProtectedTypeMark (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsProtectedFunction (TheSymbol : Symbol) return Boolean; --# global in Dict; function CompatibleTypes (Scope : Scopes; Left : Symbol; Right : Symbol) return Boolean; --# global in Dict; function IsScalarTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsDiscreteTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsIntegerTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsModularTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsRealTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsFixedPointTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsFloatingPointTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsArrayTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsRecordTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function TypeIsBoolean (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsCharacter (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsEnumeration (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsInteger (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsModular (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsDiscrete (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsFixedPoint (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsFloatingPoint (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsReal (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsNumeric (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsScalar (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsRecord (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsAbstractProof (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsArray (TypeMark : Symbol) return Boolean; --# global in Dict; function TypeIsAnnounced (TheType : Symbol) return Boolean; --# global in Dict; function TypeIsPrivate (TheType : Symbol) return Boolean; --# global in Dict; function TypeIsTagged (TheType : Symbol) return Boolean; --# global in Dict; function TypeIsExtendedTagged (TheType : Symbol) return Boolean; --# global in Dict; function TypeIsTask (TheType : Symbol) return Boolean; --# global in Dict; function TypeIsAccess (TheType : Symbol) return Boolean; --# global in Dict; function TypeIsGeneric (TheType : Symbol) return Boolean; --# global in Dict; function ExtendedTaggedHasPrivateAncestors (TheType : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsAnExtensionOf (RootType, ExtendedType : Symbol) return Boolean; --# global in Dict; function NoFieldsBelowThisRecord (RecordSym : Symbol) return Boolean; --# global in Dict; -- Returns true for an extended record if all its inherited records are null -- extensions or null records function RecordHasSomeFields (RecordSym : Symbol) return Boolean; --# global in Dict; -- return true if a record either has soem fields itself or it inherits some from an -- ancestor type function ContainsFloat (TypeMark : Symbol) return Boolean; --# global in Dict; function GetEnclosingCompilationUnit (Scope : Scopes) return Symbol; --# global in Dict; function LoopHasName (TheLoop : Symbol) return Boolean; --# global in Dict; --# in LexTokenManager.State; function GetLoopNumber (TheLoop : Symbol) return Positive; --# global in Dict; function IsPrivateTypeMark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsPrivateType (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; -- Returns True iff the given Type is INCOMPLETE -- the point of view of the given Scope. function TypeIsIncompleteHere (TypeMark : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsLimitedPrivateType (TheSymbol : Symbol) return Boolean; --# global in Dict; function TypeIsLimited (TypeMark : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function TypeIsOwnAbstractHere (TypeMark : in Symbol; Scope : in Scopes) return Boolean; --# global in Dict; function IsStatic (Item : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsScalarType (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsNumericType (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsIntegerType (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsModularType (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsFixedPointType (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsFloatingPointType (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function Is_Constrained_Array_Type_Mark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function Is_Unconstrained_Array_Type_Mark (TheSymbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsUnconstrainedArrayType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsUnconstrainedTaskType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsUnconstrainedProtectedType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsRecordComponent (TheSymbol : Symbol) return Boolean; --# global in Dict; function GetEnclosingObject (Object : Symbol) return Symbol; --# global in Dict; function GetMostEnclosingObject (Object : Symbol) return Symbol; --# global in Dict; function DefinedInPackageStandard (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsPredefined (TheSymbol : Symbol) return Boolean; --# global in CommandLineData.Content; --# in Dict; -- Returns true if the scope of the symbol is the visible scope of any of the -- following predefined packages: -- STANDARD -- ASCII -- Ada -- Ada95 -- Ada.Characters -- Ada95 -- Ada.Characters.Latin_1 -- Ada95 -- Ada.Real_Time -- Ravenscar -- Ada.Synchronous_Task_Control, -- Ravenscar -- Ada.Interrupts -- Ravenscar function IsPredefinedIntegerType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsPredefinedFloatType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsPredefinedCharacterType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsPredefinedStringType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsPredefinedTimeType (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsPredefinedSuspensionObjectType (TheSymbol : Symbol) return Boolean; --# global in CommandLineData.Content; --# in Dict; function IsPredefinedSuspendUntilTrueOperation (TheProcedure : Symbol) return Boolean; --# global in Dict; function IsPredefinedRealTimeClockOperation (TheProcedure : Symbol) return Boolean; --# global in Dict; function IsCompilationUnit (TheSymbol : Symbol) return Boolean; --# global in Dict; function Is_Withed (The_Withed_Symbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function Is_Withed_Locally (The_Withed_Symbol : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsUsedLocally (TheType : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function IsInherited (ThePackage, CompilationUnit : Symbol) return Boolean; --# global in Dict; function Is_Generic_Subprogram (The_Symbol : Symbol) return Boolean; --# global in Dict; function IsFunction (TheSymbol : Symbol) return Boolean; --# global in Dict; --# return S => (S -> Is_Subprogram (TheSymbol, Dict)); function IsEntry (TheSymbol : Symbol) return Boolean; --# global in Dict; function GetSubprogramEntryBarrier (Subprogram : Symbol) return Symbol; --# global in Dict; -- return Entries Booelan variable barrier symbol if there is one, else null sym function IsInterruptHandler (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsAnUncheckedConversion (TheSymbol : Symbol) return Boolean; --# global in Dict; function UsesUncheckedConversion (TheUnit : Symbol) return Boolean; --# global in Dict; function AssignsFromExternal (TheUnit : Symbol) return Boolean; --# global in Dict; function IsProcedure (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsProofConstant (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsAdaFunction (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsProofFunction (TheSymbol : Symbol) return Boolean; --# global in Dict; function Is_Renamed (Subprogram : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function SetsPriority (TheDiscriminant : Symbol) return Boolean; --# global in Dict; function IsFormalParameter (Subprogram, Parameter : Symbol) return Boolean; --# global in Dict; function IsGenericFormalParameter (TheGeneric, Parameter : Symbol) return Boolean; --# global in Dict; function HasBody (CompilationUnit : Symbol) return Boolean; --# global in Dict; function HasBodyStub (CompilationUnit : Symbol) return Boolean; --# global in Dict; function Is_Global_Variable (Abstraction : Abstractions; Subprogram : Symbol; Variable : Symbol) return Boolean; --# global in Dict; function IsImportExport (Abstraction : Abstractions; TheProcedure : Symbol; Variable : Symbol) return Boolean; --# global in Dict; function IsExport (Abstraction : Abstractions; TheProcedure : Symbol; Variable : Symbol) return Boolean; --# global in Dict; function IsImport (Abstraction : Abstractions; TheProcedure : Symbol; Variable : Symbol) return Boolean; --# global in Dict; function IsArrayAttribute (Name : LexTokenManager.Lex_String; TypeMark : Symbol) return Boolean; --# global in Dict; --# in LexTokenManager.State; function IsEmbeddedPackage (TheSymbol : Symbol) return Boolean; --# global in Dict; function IsOwnVariable (Variable : Symbol) return Boolean; --# global in Dict; function IsOwnTask (Variable : Symbol) return Boolean; --# global in Dict; function IsRefinedOwnVariable (Variable : Symbol) return Boolean; --# global in Dict; function OwnVariableHasType (OwnVariable : Symbol; Scope : Scopes) return Boolean; --# global in Dict; function OwnVariableIsAnnounced (Variable : Symbol) return Boolean; --# global in Dict; function OwnVariableIsInitialized (Variable : Symbol) return Boolean; --# global in Dict; function OwnVariableHasConstituents (Variable : Symbol) return Boolean; --# global in Dict; function IsConcreteOwnVariable (Variable : Symbol) return Boolean; --# global in Dict; function IsRefinement (Subject, Constituent : Symbol) return Boolean; --# global in Dict; function IsRefinementConstituent (ThePackage, Variable : Symbol) return Boolean; --# global in Dict; function IsConstituent (Variable : Symbol) return Boolean; --# global in Dict; function GetSubject (Constituent : Symbol) return Symbol; --# global in Dict; --# return S => (Is_Null_Symbol (S) or IsVariable (S, Dict)); function HasDelayProperty (TheProcedure : Symbol) return Boolean; --# global in Dict; function DelayPropertyIsAccountedFor (TheProcedure : Symbol) return Boolean; --# global in Dict; function HasValidPriorityProperty (OwnVariable : Symbol) return Boolean; --# global in Dict; function IsThread (Sym : in Symbol) return Boolean; --# global in Dict; function UsesUnprotectedVariables (Sym : in Symbol) return Boolean; --# global in Dict; function GetUnprotectedReference (Variable : in Symbol) return Symbol; --# global in Dict; function GetSuspendsReference (Variable : in Symbol) return Symbol; --# global in Dict; function SuspendsOn (TheTaskOrProc : Symbol; ThePOorSO : Symbol) return Boolean; --# global in Dict; function SuspendsListItemIsAccountedFor (TheTaskOrProc : Symbol; ThePOorSO : Symbol) return Boolean; --# global in Dict; function SuspendsListIsPropagated (FromProcedure : Symbol; ToTaskOrProc : Symbol) return Boolean; --# global in Dict; function SubprogramMayBlock (Subprogram : Symbol) return Boolean; --# global in Dict; function BodyIsHidden (Sym : Symbol) return Boolean; --# global in Dict; -- Returns true to indicate that a procedure or task has an explicit -- derives annotation (rather than a synthesised one). function GetHasDerivesAnnotation (Task_Or_Proc : Symbol) return Boolean; --# global in Dict; function IsMainProgram (Subprogram : Symbol) return Boolean; --# global in Dict; function IsThePartition (Subprogram : Symbol) return Boolean; --# global in Dict; -- Returns True iff Sym is a program unit as defined by -- Ada95 LRM Annex N.31. Used to check the legality of -- adddress representation clauses. This definition also -- seems to be consistent with that given in Ada83 LRM 13.1(4). function IsProgramUnit (Sym : Symbol) return Boolean; --# global in Dict; function MainProgramExists return Boolean; --# global in Dict; function MainProgramPrioritySupplied return Boolean; --# global in Dict; function PackageRequiresBody (ThePackage : Symbol) return Boolean; --# global in CommandLineData.Content; --# in Dict; function GetInterruptStreamVariable (ProtectedObject : in Symbol; InterruptHandler : in Symbol) return Symbol; --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; -------------------------------------------------------------------------------- -- ITERATORS -- -------------------------------------------------------------------------------- function CurrentSymbol (CurrentIterator : Iterator) return Symbol; --# global in Dict; function NextSymbol (Previous : Iterator) return Iterator; --# global in Dict; function IsNullIterator (Current : Iterator) return Boolean; function First_Deferred_Constant (The_Package : Symbol) return Iterator; --# global in Dict; function FirstArrayIndex (TypeMark : Symbol) return Iterator; --# global in Dict; function First_Undeclared_Type (The_Package : Symbol) return Iterator; --# global in Dict; function First_Private_Type (The_Package : Symbol) return Iterator; --# global in Dict; function FirstEnumerationLiteral (EnumerationType : Symbol) return Iterator; --# global in Dict; function FirstRecordComponent (TheRecordType : Symbol) return Iterator; --# global in Dict; -- N.B. This iterator works over a single record; if you want to iterate -- over a group of records modelling extension of tagged types then -- use the following function instead. function FirstExtendedRecordComponent (TheRecordType : Symbol) return Iterator; --# global in Dict; function First_Visible_Subprogram (The_Package_Or_Type : Symbol) return Iterator; --# global in Dict; function First_Private_Subprogram (The_Package : Symbol) return Iterator; --# global in Dict; function First_Visible_Task_Type (The_Package : Symbol) return Iterator; --# global in Dict; function First_Private_Task_Type (The_Package : Symbol) return Iterator; --# global in Dict; function FirstOwnTask (ThePackage : Symbol) return Iterator; --# global in Dict; function First_Visible_Protected_Type (The_Package : Symbol) return Iterator; --# global in Dict; function First_Private_Protected_Type (The_Package : Symbol) return Iterator; --# global in Dict; function FirstSubprogramParameter (Subprogram : Symbol) return Iterator; --# global in Dict; function FirstGenericFormalParameter (TheGeneric : Symbol) return Iterator; --# global in Dict; function GetNumberOfGenericFormalParameters (TheGeneric : Symbol) return Natural; --# global in Dict; function GetGenericFormalParameterKind (TheGenericFormalParameter : Symbol) return Generic_Parameter_Kind; --# global in Dict; function FirstKnownDiscriminant (ProtectedOrTaskType : Symbol) return Iterator; --# global in Dict; function FirstGlobalVariable (Abstraction : Abstractions; Subprogram : Symbol) return Iterator; --# global in Dict; function First_Local_Variable (Subprogram : Symbol) return Iterator; --# global in Dict; function First_Initialized_Variable (Subprogram : Symbol) return Iterator; --# global in Dict; function FirstProtectedElement (The_Protected_Type : Symbol) return Iterator; --# global in Dict; function FirstImportExport (Abstraction : Abstractions; TheProcedure : Symbol) return Iterator; --# global in Dict; function FirstExport (Abstraction : Abstractions; TheProcedure : Symbol) return Iterator; --# global in Dict; function FirstImport (Abstraction : Abstractions; TheProcedure : Symbol) return Iterator; --# global in Dict; function FirstDependency (Abstraction : Abstractions; TheProcedure : Symbol; TheExport : Symbol) return Iterator; --# global in Dict; function FirstSuspendsListItem (TheTaskOrProc : Symbol) return Iterator; --# global in Dict; function FirstVirtualElement (TheProtectedType : Symbol) return Iterator; --# global in Dict; function FirstOwnedPackage (ThePackage : Symbol) return Iterator; --# global in Dict; function First_Embedded_Package (Compilation_Unit : Symbol) return Iterator; --# global in Dict; function FirstOwnVariable (ThePackage : Symbol) return Iterator; --# global in Dict; function FirstInitializedOwnVariable (ThePackage : Symbol) return Iterator; --# global in Dict; function FirstConstituent (Subject : Symbol) return Iterator; --# global in Dict; function FirstInheritsClause (Sym : Symbol) return Iterator; --# global in Dict; function FirstInterruptStreamMapping (Sym : Symbol) return Iterator; --# global in Dict; function FirstLoopOnEntryVar (TheLoop : Symbol) return Iterator; --# global in Dict; -- returns first variable used in exit expression of for loop function GetInterruptStreamMappingHandler (TheMapping : in Symbol) return LexTokenManager.Lex_String; function GetInterruptStreamMappingStream (TheMapping : in Symbol) return LexTokenManager.Lex_String; -------------------------------------------------------------------------------- private -- We allocate SymbolTableSize entries for user-defined Symbols, -- plus sentinel value 0 for NullSymbol type Symbol is range 0 .. Natural'Last; --# assert Symbol'Base is Integer; NullSymbol : constant Symbol := Symbol'First; type IteratorDiscriminant is ( NullSymIterator, DeclarativeItemIterator, DeferredConstantIterator, ArrayIndexIterator, LoopIterator, UndeclaredTypeIterator, PrivateTypeIterator, EnumerationLiteralIterator, RecordComponentIterator, ExtendedRecordComponentIterator, LibraryUnitIterator, WithedPackageIterator, InheritedPackageIterator, VisibleSubprogramIterator, PrivateSubprogramIterator, TaskTypeIterator, OwnTaskIterator, ProtectedTypeIterator, SubprogramParameterIterator, GenericFormalParameterIterator, KnownDiscriminantIterator, DiscriminantConstraintIterator, ImplicitProofFunctionParameterIterator, ImplicitProofFunctionGlobalIterator, GlobalVariableIterator, LocalVariableIterator, InitializedVariableIterator, ImportExportIterator, ExportIterator, ImportIterator, DependencyIterator, InterruptStreamMappingIterator, SuspendsListItemIterator, VirtualElementIterator, OwnedPackageIterator, ProtectedElementIterator, LoopOnEntryVarIterator, EmbeddedPackageIterator, -- see Dictionary.CurrentSymbol before adding to this list, position matters OwnVariableIterator, InitializedOwnVariableIterator, AbstractOwnVariableIterator, -- see Dictionary.CurrentSymbol before adding to this list, position matters ConstituentIterator); type Iterator is record Discriminant : IteratorDiscriminant; Abstraction : Abstractions; Current : Symbol; Context : Symbol; end record; NullIterator : constant Iterator := Iterator'(NullSymIterator, IsAbstract, NullSymbol, NullSymbol); -- If you change this, please also change -- Cells.Utility.Create_Scope_Cell and -- Cells.Utility.Scope_Cell_Get_Scope. type Scopes is record The_Visibility : Visibility; The_Unit : Symbol; end record; NullScope : constant Scopes := Scopes'(The_Visibility => Visible, The_Unit => NullSymbol); end Dictionary; ././@LongLink0000000000000000000000000000017400000000000011567 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_type_extension.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000002343311753202336033125 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Type_Extension (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; Private_Type_Being_Resolved : in Dictionary.Symbol) is Root_Type_Node : STree.SyntaxNode; Root_Type_Sym : Dictionary.Symbol; This_Package : Dictionary.Symbol; The_Type : Dictionary.Symbol; begin -- Rules: 1 type_mark must be visible tagged type from another package -- 2 this package must not already contain a type extension -- 3 record components handled as for any other record -- first check that we do not already have a type extension in this package -- since SPARK requires a maximum of one per package to avoid overloading -- introduced by unherited operations. A second declaration is allowed if -- it completing a private extension in which case it must be compatible with -- the first declaration. case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Error (Err_Num => 826, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when CommandLineData.SPARK95_Onwards => -- check that we are in a library package spec if Dictionary.IsPackage (Dictionary.GetRegion (Scope)) and then Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetLibraryPackage (Scope), Right_Symbol => Dictionary.GetRegion (Scope)) and then (Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Visible or else Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Privat) then This_Package := Dictionary.GetRegion (Scope); if Dictionary.PackageDeclaresTaggedType (This_Package) then ErrorHandler.Semantic_Error (Err_Num => 839, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.PackageExtendsAnotherPackage (This_Package) and then not Is_Private_Type_Resolution (Sym => Private_Type_Being_Resolved, Scope => Scope) then ErrorHandler.Semantic_Error (Err_Num => 824, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Dictionary.GetPackageThatIsExtended (This_Package))); else -- first extension or resolution of a private extension Root_Type_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Root_Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Root_Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Root_Type_Node = type_mark in Wf_Type_Extension"); Wf_Type_Mark (Node => Root_Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Root_Type_Sym); -- if we are resolving a private extension then the Root_Type_Sym must be the same as -- the type we originally extended if Is_Private_Type_Resolution (Sym => Private_Type_Being_Resolved, Scope => Scope) and then not Dictionary.Types_Are_Equal (Left_Symbol => Root_Type_Sym, Right_Symbol => Dictionary.GetRootOfExtendedType (Private_Type_Being_Resolved), Full_Range_Subtype => False) then ErrorHandler.Semantic_Error_Sym (Err_Num => 825, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Root_Type_Node), Sym => Dictionary.GetRootOfExtendedType (Private_Type_Being_Resolved), Scope => Scope); end if; -- wf_type_mark will return the unknown type if it found any errors if not Dictionary.IsUnknownTypeMark (Root_Type_Sym) then -- check that Root_Type_Sym represents a tagged if Dictionary.TypeIsTagged (Root_Type_Sym) then -- check that type being extended is not locally declared if Dictionary.GetScope (Root_Type_Sym) /= Scope then -- mark this package as extending the one declaring the root type Dictionary.SetPackageAsExtendingAnother (This_Package, Dictionary.GetRegion (Dictionary.GetScope (Root_Type_Sym))); -- add private type or add record type -- ASSUME Child_Node (Current_Node => Node) = private_type_extension OR record_type_extension if Syntax_Node_Type (Node => Child_Node (Current_Node => Node)) = SP_Symbols.private_type_extension then -- ASSUME Child_Node (Current_Node => Node) = private_type_extension -- process with private Dictionary.Add_Private_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), The_Package => This_Package, Is_Limited => False, Is_Tagged_Type => False, Extends => Root_Type_Sym, The_Type => The_Type); STree.Add_Node_Symbol (Node => Ident_Node, Sym => The_Type); elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Node)) = SP_Symbols.record_type_extension then -- ASSUME Child_Node (Current_Node => Node) = record_type_extension -- process rest of with record Wf_Record (Node => Child_Node (Current_Node => Node), Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, Extends => Root_Type_Sym, Private_Type_Being_Resolved => Private_Type_Being_Resolved); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Node) = private_type_extension OR " & "record_type_extension in Wf_Type_Extension"); end if; else -- local type being extended ErrorHandler.Semantic_Error (Err_Num => 823, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Root_Type_Node), Id_Str => LexTokenManager.Null_String); end if; else -- illegal type being extended ErrorHandler.Semantic_Error_Sym (Err_Num => 822, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Root_Type_Node), Sym => Root_Type_Sym, Scope => Scope); end if; end if; end if; else -- not in library spec ErrorHandler.Semantic_Error (Err_Num => 828, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end case; end Wf_Type_Extension; spark-2012.0.deb/examiner/lextokenmanager-relation_algebra.adb0000644000175000017500000004254211753202336023404 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body LexTokenManager.Relation_Algebra is procedure Create_Relation (The_Heap : in out Heap.HeapRecord; R : out Relation) is begin RelationAlgebra.CreateRelation (TheHeap => The_Heap, R => R.The_Relation); end Create_Relation; procedure Dispose_Of_Relation (The_Heap : in out Heap.HeapRecord; R : in Relation) is begin RelationAlgebra.DisposeOfRelation (TheHeap => The_Heap, R => R.The_Relation); end Dispose_Of_Relation; -- Returns the value of the column value of a matrix element (Pair). function Column_Value (The_Heap : Heap.HeapRecord; P : RelationAlgebra.Pair) return LexTokenManager.Lex_String is begin return LexTokenManager.Lex_String (Heap.BValue (TheHeap => The_Heap, A => RelationAlgebra.Pair_To_Atom (P => P))); end Column_Value; -- Returns the column index value of the Col_Leader L. function Col_Ldr_Index (The_Heap : Heap.HeapRecord; L : RelationAlgebra.ColLeader) return LexTokenManager.Lex_String is begin return LexTokenManager.Lex_String (Heap.BValue (TheHeap => The_Heap, A => RelationAlgebra.ColLeader_To_Atom (C => L))); end Col_Ldr_Index; function Convert_To_Relation (R : RelationAlgebra.Relation) return Relation is begin return Relation'(The_Relation => R); end Convert_To_Relation; procedure Insert_Col_Leader (The_Heap : in out Heap.HeapRecord; R : in Relation; J : in LexTokenManager.Lex_String; Cache : in out RelationAlgebra.Caches) is Col_Ldr, Last_Ldr : RelationAlgebra.ColLeader; Ldr_Present : Boolean; Ldr_Index : LexTokenManager.Lex_String; procedure Create_Col_Leader (The_Heap : in out Heap.HeapRecord; P : in RelationAlgebra.ColLeader; J : in LexTokenManager.Lex_String; L : out RelationAlgebra.ColLeader) --# global in out Statistics.TableUsage; --# derives L from The_Heap & --# Statistics.TableUsage from *, --# The_Heap & --# The_Heap from *, --# J, --# P; is New_Atom : Heap.Atom; begin Heap.CreateAtom (TheHeap => The_Heap, NewAtom => New_Atom); Heap.UpdateBValue (TheHeap => The_Heap, A => New_Atom, Value => Natural (J)); Heap.UpdateAPointer (TheHeap => The_Heap, A => New_Atom, Pointer => RelationAlgebra.ColLeader_To_Atom (C => RelationAlgebra.NextColLeader (TheHeap => The_Heap, L => P))); Heap.UpdateAPointer (TheHeap => The_Heap, A => RelationAlgebra.ColLeader_To_Atom (C => P), Pointer => New_Atom); L := RelationAlgebra.Atom_To_ColLeader (A => New_Atom); end Create_Col_Leader; begin Col_Ldr := Cache.ColLdr; Last_Ldr := RelationAlgebra.Atom_To_ColLeader (A => RelationAlgebra.Relation_To_Atom (R => R.The_Relation)); Ldr_Present := False; loop exit when Col_Ldr = RelationAlgebra.NullColLdr; Ldr_Index := Col_Ldr_Index (The_Heap => The_Heap, L => Col_Ldr); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ldr_Index, Lex_Str2 => J) = LexTokenManager.Str_Eq then Ldr_Present := True; exit; end if; exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ldr_Index, Lex_Str2 => J) = LexTokenManager.Str_Second; Last_Ldr := Col_Ldr; Col_Ldr := RelationAlgebra.NextColLeader (TheHeap => The_Heap, L => Col_Ldr); end loop; if not Ldr_Present then Create_Col_Leader (The_Heap => The_Heap, P => Last_Ldr, J => J, L => Col_Ldr); end if; if Col_Ldr /= Cache.ColLdr then Cache.ColLdr := Col_Ldr; Cache.ColPair := RelationAlgebra.FirstInCol (TheHeap => The_Heap, L => Col_Ldr); end if; end Insert_Col_Leader; -- Inserts an element (Pair) specified by I and J into the matrix -- representing relation R. If row I or column J do not exist in the matrix -- they are created. The new Pair (I, J) is inserted into the matrix and -- the Cache is updated such that the current row is I and the current -- column is J and the current row and column elements refer to the new -- Pair (I, J). -- If the element (I, J) already exists in the matrix the operation has no -- effect on the matrix but the Cache is updated with the current row set -- to I, the current row and column elements set to the Pair (I, J) but -- the current column value is not changed --- Is this correct?? -- R must be non null. procedure Cached_Insert_Pair (The_Heap : in out Heap.HeapRecord; R : in Relation; I : in Natural; J : in LexTokenManager.Lex_String; Cache : in out RelationAlgebra.Caches) --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Cache, --# Statistics.TableUsage, --# The_Heap from *, --# Cache, --# I, --# J, --# LexTokenManager.State, --# R, --# The_Heap; is Current_Pair, Last_Pair, New_Pair : RelationAlgebra.Pair; Row_Val : Natural; Col_Val : LexTokenManager.Lex_String; Pair_Present : Boolean; procedure Create_Pair (The_Heap : in out Heap.HeapRecord; New_Pair : out RelationAlgebra.Pair; Row : in Natural; Col : in LexTokenManager.Lex_String) --# global in out Statistics.TableUsage; --# derives New_Pair from The_Heap & --# Statistics.TableUsage from *, --# The_Heap & --# The_Heap from *, --# Col, --# Row; is A : Heap.Atom; begin Heap.CreateAtom (TheHeap => The_Heap, NewAtom => A); Heap.UpdateAValue (TheHeap => The_Heap, A => A, Value => Row); Heap.UpdateBValue (TheHeap => The_Heap, A => A, Value => Natural (Col)); New_Pair := RelationAlgebra.Atom_To_Pair (A => A); end Create_Pair; begin RelationAlgebra.Insert_Row_Leader (The_Heap => The_Heap, R => R.The_Relation, I => I, Cache => Cache); Last_Pair := RelationAlgebra.Atom_To_Pair (A => RelationAlgebra.RowLeader_To_Atom (R => Cache.RowLdr)); Current_Pair := Cache.RowPair; Pair_Present := False; loop exit when RelationAlgebra.IsNullPair (P => Current_Pair); Col_Val := Column_Value (The_Heap => The_Heap, P => Current_Pair); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Col_Val, Lex_Str2 => J) = LexTokenManager.Str_Eq then Pair_Present := True; exit; end if; exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Col_Val, Lex_Str2 => J) = LexTokenManager.Str_Second; Last_Pair := Current_Pair; Current_Pair := RelationAlgebra.RightSuccr (TheHeap => The_Heap, P => Current_Pair); end loop; if Pair_Present then Cache.RowPair := Current_Pair; Cache.ColPair := Current_Pair; else Create_Pair (The_Heap => The_Heap, New_Pair => New_Pair, Row => I, Col => J); RelationAlgebra.UpdateRight (TheHeap => The_Heap, P => New_Pair, R => Current_Pair); RelationAlgebra.UpdateRight (TheHeap => The_Heap, P => Last_Pair, R => New_Pair); Insert_Col_Leader (The_Heap => The_Heap, R => R, J => J, Cache => Cache); Last_Pair := RelationAlgebra.Atom_To_Pair (A => RelationAlgebra.ColLeader_To_Atom (C => Cache.ColLdr)); Current_Pair := Cache.ColPair; loop exit when RelationAlgebra.IsNullPair (Current_Pair); Row_Val := RelationAlgebra.RowValue (TheHeap => The_Heap, P => Current_Pair); exit when Row_Val > I; Last_Pair := Current_Pair; Current_Pair := RelationAlgebra.DownSuccr (TheHeap => The_Heap, P => Current_Pair); end loop; RelationAlgebra.UpdateDown (TheHeap => The_Heap, P => New_Pair, D => Current_Pair); RelationAlgebra.UpdateDown (TheHeap => The_Heap, P => Last_Pair, D => New_Pair); Cache.RowPair := New_Pair; Cache.ColPair := New_Pair; end if; end Cached_Insert_Pair; procedure Insert_Pair (The_Heap : in out Heap.HeapRecord; R : in Relation; I : in Natural; J : in LexTokenManager.Lex_String) is Cache : RelationAlgebra.Caches; begin RelationAlgebra.InitialiseCache (TheHeap => The_Heap, R => R.The_Relation, Cache => Cache); -- we do not need the changed value of Cache in this case --# accept F, 10, Cache, "Cache unused here"; Cached_Insert_Pair (The_Heap => The_Heap, R => R, I => I, J => J, Cache => Cache); --# end accept; end Insert_Pair; procedure Row_Extraction (The_Heap : in out Heap.HeapRecord; R : in Relation; Given_Index : in Natural; S : out Seq_Algebra.Seq) is Row_Index : Natural; Row_Ldr : RelationAlgebra.RowLeader; Row_Found : Boolean; Local_S : Seq_Algebra.Seq; Last_S : Seq_Algebra.Member_Of_Seq; P : RelationAlgebra.Pair; begin Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Local_S); -- The optimisation using sequence operations -- BeforeFirstMember and AppendAfter is only permissible -- because Indices in a relation are ordered identically to the -- set ordering in s SeqAlgebra. This assumption is implementation -- dependent and should be eliminated when a more efficient representation -- of sets and relations is implemented. Last_S := Seq_Algebra.Before_First_Member (S => Local_S); Row_Found := False; Row_Ldr := RelationAlgebra.FirstRowLeader (TheHeap => The_Heap, R => R.The_Relation); loop exit when Row_Ldr = RelationAlgebra.NullRowLdr; Row_Index := RelationAlgebra.RowLdrIndex (TheHeap => The_Heap, L => Row_Ldr); Row_Found := (Row_Index = Given_Index); exit when Row_Index >= Given_Index; Row_Ldr := RelationAlgebra.NextRowLeader (TheHeap => The_Heap, L => Row_Ldr); end loop; if Row_Found then P := RelationAlgebra.FirstInRow (TheHeap => The_Heap, L => Row_Ldr); loop exit when P = RelationAlgebra.NullPair; -- The optimisation using sequence operations -- BeforeFirstMember and AppendAfter is only permissible -- because Indices in a relation are ordered identically to the -- set ordering in s SeqAlgebra. This assumption is implementation -- dependent and should be eliminated when a more efficient representation -- of sets and relations is implemented. Seq_Algebra.Append_After (The_Heap => The_Heap, M => Last_S, Given_Value => Column_Value (The_Heap => The_Heap, P => P)); P := RelationAlgebra.RightSuccr (TheHeap => The_Heap, P => P); end loop; end if; S := Local_S; end Row_Extraction; procedure Add_Row (The_Heap : in out Heap.HeapRecord; R : in Relation; I : in Natural; S : in Seq_Algebra.Seq) is M : Seq_Algebra.Member_Of_Seq; Cache : RelationAlgebra.Caches; begin RelationAlgebra.InitialiseCache (TheHeap => The_Heap, R => R.The_Relation, Cache => Cache); M := Seq_Algebra.First_Member (The_Heap => The_Heap, S => S); loop exit when Seq_Algebra.Is_Null_Member (M => M); Cached_Insert_Pair (The_Heap => The_Heap, R => R, I => I, J => Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => M), Cache => Cache); M := Seq_Algebra.Next_Member (The_Heap => The_Heap, M => M); end loop; end Add_Row; procedure Add_Col (The_Heap : in out Heap.HeapRecord; R : in Relation; J : in LexTokenManager.Lex_String; S : in SeqAlgebra.Seq) is M : SeqAlgebra.MemberOfSeq; Cache : RelationAlgebra.Caches; begin RelationAlgebra.InitialiseCache (TheHeap => The_Heap, R => R.The_Relation, Cache => Cache); M := SeqAlgebra.FirstMember (TheHeap => The_Heap, S => S); loop exit when SeqAlgebra.IsNullMember (M => M); Cached_Insert_Pair (The_Heap => The_Heap, R => R, I => SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => M), J => J, Cache => Cache); M := SeqAlgebra.NextMember (TheHeap => The_Heap, M => M); end loop; end Add_Col; end LexTokenManager.Relation_Algebra; spark-2012.0.deb/examiner/sem-wf_package_declaration-add_child.adb0000644000175000017500000001466411753202336024043 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Package_Declaration) procedure Add_Child (Root_Id_Node : in STree.SyntaxNode; Is_Private : in Boolean; Scope : in Dictionary.Scopes; Child_Sym : out Dictionary.Symbol; Child_Str : out LexTokenManager.Lex_String) is Curr_Node : STree.SyntaxNode; Curr_Sym : Dictionary.Symbol; Parent_Sym : Dictionary.Symbol := Dictionary.NullSymbol; Child_Sort : Dictionary.PackageSort; begin Curr_Node := Root_Id_Node; Child_Str := Node_Lex_String (Node => Curr_Node); Curr_Sym := Dictionary.LookupItem (Name => Child_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); while Syntax_Node_Type (Node => Next_Sibling (Current_Node => Parent_Node (Current_Node => Curr_Node))) = SP_Symbols.identifier loop -- to handle multiple prefixes if Dictionary.Is_Null_Symbol (Curr_Sym) then -- not declared or visible Parent_Sym := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 140, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Child_Str); exit; end if; if not Dictionary.IsPackage (Curr_Sym) then -- can't be a parent Curr_Sym := Dictionary.NullSymbol; Parent_Sym := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 18, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Child_Str); exit; end if; -- Child_Str (Curr_Sym) is visible and its a package STree.Set_Node_Lex_String (Sym => Curr_Sym, Node => Curr_Node); Parent_Sym := Curr_Sym; Curr_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Curr_Node)); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Root_Id_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Curr_Node, STree.Table) = SP_Symbols.identifier; Child_Str := Node_Lex_String (Node => Curr_Node); Curr_Sym := Dictionary.LookupSelectedItem (Prefix => Curr_Sym, Selector => Child_Str, Scope => Scope, Context => Dictionary.ProofContext); end loop; if not Dictionary.Is_Null_Symbol (Curr_Sym) then -- child already declared ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Child_Str); elsif not Dictionary.Is_Null_Symbol (Parent_Sym) then -- check that Child_Str has not been declared as a body stub Curr_Sym := Dictionary.LookupImmediateScope (Name => Child_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Parent_Sym), Context => Dictionary.ProgramContext); if not Dictionary.Is_Null_Symbol (Curr_Sym) and then Dictionary.HasBodyStub (Curr_Sym) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Child_Str); elsif not Dictionary.Is_Null_Symbol (Curr_Sym) then STree.Set_Node_Lex_String (Sym => Curr_Sym, Node => Curr_Node); end if; -- check that Child_Str not inherited by parent of private child: Curr_Sym := Dictionary.LookupItem (Name => Child_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Parent_Sym), Context => Dictionary.ProofContext, Full_Package_Name => False); if Is_Private and then not Dictionary.Is_Null_Symbol (Curr_Sym) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Child_Str); elsif not Dictionary.Is_Null_Symbol (Curr_Sym) then STree.Set_Node_Lex_String (Sym => Curr_Sym, Node => Curr_Node); end if; end if; if not Dictionary.Is_Null_Symbol (Parent_Sym) then if Is_Private then Child_Sort := Dictionary.PrivateChild; else Child_Sort := Dictionary.Public; end if; Dictionary.AddChildPackage (TheParent => Parent_Sym, Sort => Child_Sort, Name => Child_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => Node_Position (Node => Root_Id_Node), End_Position => Node_Position (Node => Root_Id_Node)), Scope => Scope, ThePackage => Child_Sym); else -- Parent is not valid (i.e. undeclared or not a package) so we can't do any more. -- Signal failure back to caller with null child symbol Child_Sym := Dictionary.NullSymbol; Child_Str := LexTokenManager.Null_String; end if; end Add_Child; spark-2012.0.deb/examiner/commandlinehandler.ads0000644000175000017500000000760711753202335020577 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# CommandLineData, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# ScreenEcho, --# SPARK_IO; package CommandLineHandler is type Command_Strings is record Current_Position : E_Strings.Positions; Contents : E_Strings.T; end record; type S_Typs is (S_Empty, S_Equal, S_Switch_Character, S_Comma, S_String, S_Continue); type Symbols is record Typ : S_Typs; The_String : E_Strings.T; end record; function Check_Option_Name (Opt_Name : E_Strings.T; Str : String) return Boolean; procedure Skip_Spaces (Command_String : in out Command_Strings); --# derives Command_String from *; procedure Read_The_String (Command_String : in out Command_Strings; Next_Symbol : in out Symbols); --# derives Command_String, --# Next_Symbol from *, --# Command_String; ---------------------------------------------------------------------------- -- Reads the default switch file "spark.sw", then the user's -- command-line switches. Parses these and sets the content -- of CommandLineData.Content appropriately. Illegal or -- contradictory switches are detected and reported. -- -- As a special case, if -noswitch has been given on the command-line, -- then the default switch file "spark.sw" is completely ignored. ---------------------------------------------------------------------------- procedure Process; --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from CommandLineData.Content, --# SPARK_IO.File_Sys; ---------------------------------------------------------------------------- -- Reads and parses the default switch file, setting -- CommandLineData.Content as it goes, but doesn't -- check for existence or non-overlapping of -- input files. Used by tools like SPARKFormat -- which want to be able to read the default switch -- file, but don't care if the index file(s) (and so on) -- don't actually exist. -- -- As a special case, if -noswitch has been given on the command-line, -- then the default switch file "spark.sw" is completely ignored. ---------------------------------------------------------------------------- procedure Process_Defaults_From_Switch_File; --# global in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from CommandLineData.Content, --# SPARK_IO.File_Sys; end CommandLineHandler; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_simple_expression_opt.adb0000644000175000017500000003021111753202336026360 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------- -- Overview: Called to check validity of a -- simple_expression_opt node. Replaces calls to StaticSimpleExpression, -- BaseTypeSimpleExpression and CheckTypeSimpleExpression ---------------------------------------------------------------------------- separate (Sem.Walk_Expression_P) procedure Wf_Simple_Expression_Opt (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) is Result : Sem.Exp_Record; Base_Type : Dictionary.Symbol; Op_Node : STree.SyntaxNode; Operator : SP_Symbols.SP_Symbol; ---------------------------------------------------- procedure Calc_Unary_Plus_Minus (Node_Pos : in LexTokenManager.Token_Position; Is_Annotation : in Boolean; Op : in SP_Symbols.SP_Symbol; Result : in out Sem.Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Annotation, --# LexTokenManager.State, --# Node_Pos, --# Op, --# Result, --# SPARK_IO.File_Sys & --# Result from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Op; is type Err_Lookup is array (Boolean) of Positive; Which_Err : constant Err_Lookup := Err_Lookup'(False => 402, True => 399); The_Modulus_String : LexTokenManager.Lex_String; Temp_Arg : Maths.Value; Err : Maths.ErrorCode; begin if Op = SP_Symbols.minus then Maths.Negate (Result.Value); if Dictionary.TypeIsModular (Result.Type_Symbol) then The_Modulus_String := Dictionary.GetScalarAttributeValue (Base => False, Name => LexTokenManager.Modulus_Token, TypeMark => Result.Type_Symbol); Temp_Arg := Result.Value; Maths.Modulus (FirstNum => Temp_Arg, SecondNum => Maths.ValueRep (The_Modulus_String), Result => Result.Value, Ok => Err); case Err is when Maths.NoError => null; when Maths.DivideByZero => ErrorHandler.Semantic_Error (Err_Num => 400, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); when Maths.ConstraintError => ErrorHandler.Semantic_Error (Err_Num => Which_Err (Is_Annotation), Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); when Maths.OverFlow => Result.Value := Maths.NoValue; ErrorHandler.Semantic_Warning (Err_Num => 200, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); when others => -- indicates internal error in maths package SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Math_Error, Msg => "in CalcBinaryPlusMinus"); end case; end if; end if; -- only other possibility is unary plus which has no effect end Calc_Unary_Plus_Minus; ---------------------------------------- procedure Resolve_Universal (T_Stack : in Type_Context_Stack.T_Stack_Type; T : in out Dictionary.Symbol) --# global in Dictionary.Dict; --# derives T from *, --# Dictionary.Dict, --# T_Stack; --# pre (Dictionary.Is_Null_Symbol (T) or Dictionary.IsTypeMark (T, Dictionary.Dict)) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post Dictionary.Is_Null_Symbol (T) or Dictionary.IsTypeMark (T, Dictionary.Dict); is procedure Debug_Print --# derives ; is --# hide Debug_Print; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Wf_Simple_Expression_Opt encounters a universal expression. Resolving by context to type ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Type_Context_Stack.Top (Stack => T_Stack)))); end if; end Debug_Print; begin -- Resolve_Universal if not Dictionary.IsUnknownTypeMark (Type_Context_Stack.Top (Stack => T_Stack)) then if Dictionary.IsUniversalRealType (T) then T := Type_Context_Stack.Top (Stack => T_Stack); Debug_Print; elsif Dictionary.IsUniversalIntegerType (T) then T := Type_Context_Stack.Top (Stack => T_Stack); Debug_Print; -- It's tempting to want to do a ConstraintCheck here against -- T'Base. Unfortunately, this can't be done reliably since -- Ada95's "preference rule" _might_ kick in and actualy make -- a static expression legal that would be rejected by a simple -- minded ConstraintCheck. For example, consider: -- -- type T is range -128 .. 127; -- --# assert T'Base is Short_Short_Integer; -- same range! -- -- C : constant T := -128; -- -- Ada95 - legal, owing to preference rule (which the Examiner doesn't implement!) -- SPARK95 - legal, owing to imperfect implementation here -- Ada83 - illegal (rejected by DEC Ada, for instance) -- SPARK83 - accepted (wrongly) owing to imperfect implementation here -- -- So...the only user-visible mistake is an acceptance of illegal Ada -- in SPARK83 mode, which is a long-standing problem and only affects SPARK83 -- projects anyway. The risk of messing with this code and incorrectly -- rejecting _legal_ SPARK83 is so great, that it's best to leave the -- current implementation as is. end if; end if; end Resolve_Universal; begin -- Wf_Simple_Expression_Opt Op_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Op_Node = unary_adding_operator OR term OR annotation_term if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.unary_adding_operator then -- ASSUME Op_Node = unary_adding_operator Exp_Stack.Pop (Item => Result, Stack => E_Stack); Resolve_Universal (T_Stack => T_Stack, T => Result.Type_Symbol); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.simple_expression_opt then STree.Add_Node_Symbol (Node => Op_Node, Sym => Result.Type_Symbol); end if; Base_Type := Dictionary.GetRootType (Result.Type_Symbol); Operator := STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Op_Node)); -- ASSUME Operator = plus OR minus SystemErrors.RT_Assert (C => Operator = SP_Symbols.plus or else Operator = SP_Symbols.minus, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Operator = plus OR minus in Wf_Simple_Expression_Opt"); if not Dictionary.UnaryOperatorIsDefined (Operator, Base_Type) then Result := Sem.Unknown_Type_Record; if Dictionary.IsModularType (Base_Type, Scope) then ErrorHandler.Semantic_Error (Err_Num => 803, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Next_Sibling (Current_Node => Op_Node)), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 40, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Next_Sibling (Current_Node => Op_Node)), Id_Str => LexTokenManager.Null_String); end if; elsif STree.Syntax_Node_Type (Node => Node) = SP_Symbols.simple_expression_opt and then not Dictionary.UnaryOperatorIsVisible (Operator, Base_Type, Scope) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 309, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); else Calc_Unary_Plus_Minus (Node_Pos => STree.Node_Position (Node => Node), Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_simple_expression_opt, Op => Operator, Result => Result); Result.Type_Symbol := Base_Type; -- (if we decide that unary plus is to be ignored for aliasing purposes -- then lines below will have to change to reflect this) Result.Variable_Symbol := Dictionary.NullSymbol; Result.Is_AVariable := False; Result.Is_An_Entire_Variable := False; Result.Has_Operators := True; end if; -- OtherSymbol may carry a function symbol in the case of uses of unchecked_conversion. -- This symbol is used (by wf_Assign) to convery information to the VCG to supress -- checks when an unchecked_conversion is assigned to something of the same subtype. -- We do not want this mechanism if the unchecked_conversion is sued in any other context -- than a direct assignment. Therefore we clear OtherSymbol here: Result.Other_Symbol := Dictionary.NullSymbol; Exp_Stack.Push (X => Result, Stack => E_Stack); elsif STree.Syntax_Node_Type (Node => Op_Node) /= SP_Symbols.term and then STree.Syntax_Node_Type (Node => Op_Node) /= SP_Symbols.annotation_term then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = unary_adding_operator OR term OR annotation_term in Wf_Simple_Expression_Opt"); end if; end Wf_Simple_Expression_Opt; spark-2012.0.deb/examiner/sem-walk_expression_p-exp_stack.adb0000644000175000017500000001650611753202336023226 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Text_IO; separate (Sem.Walk_Expression_P) package body Exp_Stack is procedure Debug_Item (Op : in String; Item : in Sem.Exp_Record) --# derives null from Item, --# Op; is --# hide Debug_Item; begin Ada.Text_IO.Put_Line (Op); if not Dictionary.Is_Null_Symbol (Item.Type_Symbol) then Ada.Text_IO.Put (" Sem.Exp_Record.TypeSymbol = "); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Item.Type_Symbol, Separator => ".")); end if; if not Dictionary.Is_Null_Symbol (Item.Other_Symbol) then Ada.Text_IO.Put (" Sem.Exp_Record.OtherSymbol = "); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Item.Other_Symbol, Separator => ".")); end if; if not Dictionary.Is_Null_Symbol (Item.Stream_Symbol) then Ada.Text_IO.Put (" Sem.Exp_Record.StreamSymbol = "); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Item.Stream_Symbol, Separator => ".")); end if; if not Dictionary.Is_Null_Symbol (Item.Tagged_Parameter_Symbol) then Ada.Text_IO.Put (" Sem.Exp_Record.TaggedParameterSymbol = "); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Item.Tagged_Parameter_Symbol, Separator => ".")); end if; if not Dictionary.Is_Null_Symbol (Item.Variable_Symbol) then Ada.Text_IO.Put (" Sem.Exp_Record.VariableSymbol = "); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Item.Variable_Symbol, Separator => ".")); end if; end Debug_Item; procedure Debug_Stack (Stack : in Exp_Stack_Type) --# derives null from Stack; is --# hide Debug_Stack; begin Ada.Text_IO.Put_Line ("----------------------------------------"); for I in Index_Range'First .. Stack.Top_Ptr loop Debug_Item (Op => "Exp_Stack.Debug_Stack", Item => Stack.S (I)); end loop; Ada.Text_IO.Put_Line ("----------------------------------------"); end Debug_Stack; procedure Init (Stack : out Exp_Stack_Type) is begin Stack.Top_Ptr := 0; --# accept Flow, 32, Stack.S, "Safe partial initialisation" & --# Flow, 31, Stack.S, "Safe partial initialisation" & --# Flow, 602, Stack, Stack.S, "Safe partial initialisation"; end Init; procedure Push (X : in Sem.Exp_Record; Stack : in out Exp_Stack_Type) is begin SystemErrors.RT_Assert (C => (Dictionary.Is_Null_Symbol (X.Type_Symbol) or else Dictionary.IsTypeMark (X.Type_Symbol)) and then (Dictionary.Is_Null_Symbol (X.Stream_Symbol) or else Dictionary.IsFunction (X.Stream_Symbol) or else Dictionary.IsOwnVariableOrConstituentWithMode (X.Stream_Symbol)), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Bad X in Exp_Stack.Push"); if CommandLineData.Content.Debug.Expressions then Debug_Item ("Exp_Stack.Push", X); end if; if Stack.Top_Ptr < ExaminerConstants.WalkExpStackMax then Stack.Top_Ptr := Stack.Top_Ptr + 1; Stack.S (Stack.Top_Ptr) := X; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Expression_Stack_Overflow, Msg => "in Exp_Stack.Push"); end if; if CommandLineData.Content.Debug.Expressions then Debug_Stack (Stack); end if; end Push; procedure Pop (Item : out Sem.Exp_Record; Stack : in out Exp_Stack_Type) is begin if Stack.Top_Ptr > 0 then Item := Stack.S (Stack.Top_Ptr); Stack.Top_Ptr := Stack.Top_Ptr - 1; if CommandLineData.Content.Debug.Expressions then Debug_Item ("Exp_Stack.Pop", Item); Debug_Stack (Stack); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Expression_Stack_Underflow, Msg => "in Exp_Stack.Pop"); Item := Stack.S (Stack.Top_Ptr); end if; SystemErrors.RT_Assert (C => (Dictionary.Is_Null_Symbol (Item.Type_Symbol) or else Dictionary.IsTypeMark (Item.Type_Symbol)) and then (Dictionary.Is_Null_Symbol (Item.Stream_Symbol) or else Dictionary.IsFunction (Item.Stream_Symbol) or else Dictionary.IsOwnVariableOrConstituentWithMode (Item.Stream_Symbol)), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Bad Item in Exp_Stack.Pop"); end Pop; function Top (Stack : Exp_Stack_Type) return Sem.Exp_Record is Result : Sem.Exp_Record; begin if Stack.Top_Ptr > 0 then Result := Stack.S (Stack.Top_Ptr); else Result := Sem.Null_Exp_Record; end if; SystemErrors.RT_Assert (C => (Dictionary.Is_Null_Symbol (Result.Type_Symbol) or else Dictionary.IsTypeMark (Result.Type_Symbol)) and then (Dictionary.Is_Null_Symbol (Result.Stream_Symbol) or else Dictionary.IsFunction (Result.Stream_Symbol) or else Dictionary.IsOwnVariableOrConstituentWithMode (Result.Stream_Symbol)), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Bad Result in Exp_Stack.Top"); --# accept Flow, 35, Dictionary.Dict, "Importation of the initial value is ineffective OK" & --# Flow, 50, Dictionary.Dict, "Value is not derived from the imported value OK"; return Result; end Top; function Has_One_Entry (Stack : Exp_Stack_Type) return Boolean is begin return Stack.Top_Ptr = 1; end Has_One_Entry; function Is_Empty (Stack : Exp_Stack_Type) return Boolean is begin return Stack.Top_Ptr = 0; end Is_Empty; end Exp_Stack; spark-2012.0.deb/examiner/sem-wf_inherit_clause.adb0000644000175000017500000004550411753202336021205 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: -- Checks an inherit clause for Sem starting at node inherit_clause. -- Directly capable of rasing errors for: undeclared item in inherit list, -- duplicate item in inherit list or inheriting of something which is not a -- package. -------------------------------------------------------------------------------- with SLI; separate (Sem) procedure Wf_Inherit_Clause (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) is It : STree.Iterator; Next_Node : STree.SyntaxNode; ----------------------------- procedure Process_Dotted_Simple_Name (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dotted_simple_name; --# post STree.Table = STree.Table~; is Prefix_Sym : Dictionary.Symbol := Dictionary.NullSymbol; Current_Sym : Dictionary.Symbol; Current_Node : STree.SyntaxNode; Explicit_Duplicate : Boolean; Ok : Boolean; ---------------------------- function Dotted_Identifier_Found (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dotted_simple_name; is Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Node); -- ASSUME Current_Node = dotted_simple_name OR identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name or Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = dotted_simple_name OR identifier in Dotted_Simple_Name"); return Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name; end Dotted_Identifier_Found; ------------------- function Look_Up (Prefix : in Dictionary.Symbol; Str : in LexTokenManager.Lex_String; Scope : in Dictionary.Scopes) return Dictionary.Symbol --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Sym : Dictionary.Symbol; begin if Dictionary.Is_Null_Symbol (Prefix) then Sym := Dictionary.LookupItem (Name => Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); else Sym := Dictionary.LookupSelectedItem (Prefix => Prefix, Selector => Str, Scope => Scope, Context => Dictionary.ProofContext); end if; return Sym; end Look_Up; -------------------- -- Note: A package owner (if it exists) is the parent of the closest -- private anscestor. procedure Check_Package_Owner (Current_Node : in STree.SyntaxNode; Comp_Sym, Current_Sym : in Dictionary.Symbol; Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# Current_Node, --# Current_Sym, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# Ok from Comp_Sym, --# Current_Sym, --# Dictionary.Dict; --# pre Syntax_Node_Type (Current_Node, STree.Table) = SP_Symbols.identifier; is Owner : Dictionary.Symbol; begin Ok := True; Owner := Dictionary.GetPackageOwner (Comp_Sym); if not Dictionary.Is_Null_Symbol (Owner) and then not Dictionary.Packages_Are_Equal (Left_Symbol => Current_Sym, Right_Symbol => Owner) then if Dictionary.IsProperDescendent (Current_Sym, Owner) then if not Dictionary.IsPrivateDescendent (Current_Sym, Owner) then ErrorHandler.Semantic_Error (Err_Num => 617, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); Ok := False; end if; elsif not Dictionary.IsInherited (Current_Sym, Owner) then ErrorHandler.Semantic_Error (Err_Num => 618, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); Ok := False; end if; end if; end Check_Package_Owner; -------------------- -- Detects eg P.Q inheriting both R and P.R procedure Check_For_Redeclaration (Current_Node : in STree.SyntaxNode; Comp_Sym, Current_Sym : in Dictionary.Symbol; Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# Current_Node, --# Current_Sym, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# Ok, --# STree.Table from CommandLineData.Content, --# Comp_Sym, --# Current_Node, --# Current_Sym, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table; --# pre Syntax_Node_Type (Current_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Parent_Sym : Dictionary.Symbol; Visible_Sym : Dictionary.Symbol; begin Ok := True; if not Dictionary.IsEmbeddedPackage (Comp_Sym) and then Dictionary.IsPackage (Current_Sym) then -- guard for next line's call Parent_Sym := Dictionary.GetPackageParent (Current_Sym); if Dictionary.Is_Null_Symbol (Parent_Sym) or else Dictionary.IsProperDescendent (Comp_Sym, Parent_Sym) then -- Current_Sym will be directly visible Visible_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Current_Node), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Comp_Sym), Context => Dictionary.ProofContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Visible_Sym) and then not Dictionary.Packages_Are_Equal (Left_Symbol => Visible_Sym, Right_Symbol => Current_Sym) then -- name is already directly visible (and not duplicate) ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); Ok := False; elsif not Dictionary.Is_Null_Symbol (Visible_Sym) then STree.Set_Node_Lex_String (Sym => Visible_Sym, Node => Current_Node); end if; end if; end if; end Check_For_Redeclaration; begin -- Process_Dotted_Simple_Name if Dotted_Identifier_Found (Node => Node) and then CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 then ErrorHandler.Semantic_Error (Err_Num => 610, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Current_Node := Last_Child_Of (Start_Node => Node); loop --# assert STree.Table = STree.Table~; -- ASSUME Current_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier in Process_Dotted_Simple_Name"); Current_Sym := Look_Up (Prefix => Prefix_Sym, Str => Node_Lex_String (Node => Current_Node), Scope => Scope); if Dictionary.Is_Null_Symbol (Current_Sym) then ErrorHandler.Semantic_Error (Err_Num => 135, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); exit; end if; if not Dictionary.IsPackage (Current_Sym) and then not Dictionary.Is_Generic_Subprogram (The_Symbol => Current_Sym) then -- can't be inherited ErrorHandler.Semantic_Error (Err_Num => 18, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); exit; end if; if Current_Sym = Comp_Sym then -- trying to inherit self ErrorHandler.Semantic_Error (Err_Num => 134, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); exit; end if; -- check for valid inheriting of private packages if Dictionary.IsPackage (Current_Sym) and then -- guard for next call Dictionary.IsPrivatePackage (Current_Sym) and then not Dictionary.Is_Null_Symbol (Dictionary.GetPackageParent (Current_Sym)) and then (Dictionary.IsMainProgram (Comp_Sym) or else not (Dictionary.IsEmbeddedPackage (Comp_Sym) or else Dictionary.IsDescendentOfPrivateSibling (Comp_Sym, Current_Sym))) then ErrorHandler.Semantic_Error (Err_Num => 616, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Node_Lex_String (Node => Current_Node)); exit; end if; -- check rules for what a child package may and may not inherit (i.e. siblings of -- same kind (public/private) etc). Note guard so that we don't do this if the -- inherited things is a generic subprogram since these are library-level units that -- aren't covered by the child package hierarchy rules Ok := True; if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Dictionary.IsPackage (Comp_Sym) and then not Dictionary.Is_Generic_Subprogram (The_Symbol => Current_Sym) then Check_Package_Owner (Current_Node => Current_Node, Comp_Sym => Comp_Sym, Current_Sym => Current_Sym, Ok => Ok); if Ok then Check_For_Redeclaration (Current_Node => Current_Node, Comp_Sym => Comp_Sym, Current_Sym => Current_Sym, Ok => Ok); end if; end if; exit when not Ok; Dictionary.AddInheritsReference (The_Unit => Comp_Sym, The_Inherited_Symbol => Current_Sym, Explicit => Parent_Node (Current_Node => Current_Node) = Node, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Current_Node), End_Position => Node_Position (Node => Current_Node)), Already_Present => Explicit_Duplicate); if Explicit_Duplicate then ErrorHandler.Semantic_Error_Sym (Err_Num => 190, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Current_Sym, Scope => Scope); end if; STree.Set_Node_Lex_String (Sym => Current_Sym, Node => Current_Node); Current_Node := Parent_Node (Current_Node => Current_Node); -- ASSUME Current_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = dotted_simple_name in Process_Dotted_Simple_Name"); exit when Current_Node = Node; Prefix_Sym := Current_Sym; -- ready for next lookup Current_Node := Next_Sibling (Current_Node => Current_Node); end loop; end if; end Process_Dotted_Simple_Name; begin -- Wf_Inherit_Clause It := Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.dotted_simple_name and --# Next_Node = Get_Node (It); Process_Dotted_Simple_Name (Node => Next_Node, Comp_Sym => Comp_Sym, Scope => Scope); It := STree.NextNode (It); end loop; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Inherit (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Node, Scope => Scope); end if; end Wf_Inherit_Clause; spark-2012.0.deb/examiner/declarations-outputdeclarations-printdeclarations-printconstantrules.adb0000644000175000017500000010427711753202336033035 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors; with ErrorHandler; separate (Declarations.OutputDeclarations.PrintDeclarations) procedure PrintConstantRules (Write_Rules : in Boolean; Sym : in Dictionary.Symbol; Rule_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; End_Position : in LexTokenManager.Token_Position) is -------------------------------------------------------------------------------- procedure PrintScalarConstantRules (Rule_File : in SPARK_IO.File_Type; Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is StoreVal : LexTokenManager.Lex_String; T : Dictionary.Symbol; begin StoreVal := Dictionary.Get_Value (The_Constant => Sym); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => StoreVal, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then -- Sym has a literal value, so print a replacement rule. Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); Print_Replacement_Rule (Rule_File => Rule_File, Store_Val => StoreVal, Type_Mark => Dictionary.GetType (Sym), Scope => Scope); else -- Sym doesn't have a literal value - could be a deferred -- constant with a hidden completion, or a known discriminant. -- -- In the former case, the VCG will produce hypotheses giving the -- subtype membership of the constant, so no action here. -- -- In the case of a known discriminant, we genrate a subtype membership -- rule, unless its type is Boolean. if Dictionary.IsKnownDiscriminant (Sym) then T := Dictionary.GetType (Sym); if not Dictionary.TypeIsBoolean (T) then Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, " >= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => T); SPARK_IO.Put_Line (Rule_File, "__first may_be_deduced.", 0); Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, " <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => T); SPARK_IO.Put_Line (Rule_File, "__last may_be_deduced.", 0); end if; end if; end if; end PrintScalarConstantRules; -------------------------------------------------------------------------------- procedure PrintStructuredConstantRules (Rule_File : in SPARK_IO.File_Type; Sym : in Dictionary.Symbol; EndPosition : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Scope; --# in out ErrorHandler.Error_Context; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# SPARK_IO.File_Sys, --# Sym & --# Rule_Counter from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Sym; is Node : Dictionary.Symbol; Name : E_Strings.T; Constraints : E_Strings.T; Constraint_OK : Boolean; ErrorsFound : Boolean; -------------------------------------------------------------------------------- --# inherit SPARK_IO, --# SystemErrors; package Index --# own State : Letter; --# initializes State; is subtype IndexNameRange is Positive range 1 .. 1; subtype IndexNameType is String (IndexNameRange); subtype Letter is Character range 'A' .. 'Z'; State : Letter; function Value return IndexNameType; --# global in State; procedure Next; --# global in out State; --# derives State from *; procedure Reset (Valu : in Letter); --# global out State; --# derives State from Valu; end Index; -------------------------------------------------------------------------------- --# inherit Dictionary, --# E_Strings, --# Index, --# SPARK_IO, --# SystemErrors; package Stack --# own State : StackType; --# initializes State; is procedure Push (Iterator : in Dictionary.Iterator; Name : in E_Strings.T; Constraints : in E_Strings.T; Constraint_OK : in Boolean; CurrentIndex : in Index.Letter); --# global in out State; --# derives State from *, --# Constraints, --# Constraint_OK, --# CurrentIndex, --# Iterator, --# Name; procedure Pop (Iterator : out Dictionary.Iterator; Name : out E_Strings.T; Constraints : out E_Strings.T; Constraint_OK : out Boolean; CurrentIndex : out Index.Letter); --# global in out State; --# derives Constraints, --# Constraint_OK, --# CurrentIndex, --# Iterator, --# Name, --# State from State; function IsEmpty return Boolean; --# global in State; end Stack; -------------------------------------------------------------------------------- package body Index is function Value return IndexNameType is begin return IndexNameType'(1 => State); end Value; procedure Next is begin if State = Letter'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Nested_Arrays, Msg => "in PrintConstantRules"); end if; State := Letter'Succ (State); end Next; procedure Reset (Valu : in Letter) is begin State := Valu; end Reset; begin State := 'I'; end Index; -------------------------------------------------------------------------------- package body Stack is type StackPointer is range 0 .. 50; --# assert StackPointer'Base is Short_Short_Integer; -- for GNAT subtype StackIndex is StackPointer range 1 .. StackPointer'Last; type StackElement is record Iterator : Dictionary.Iterator; Name : E_Strings.T; Constraints : E_Strings.T; Constraint_OK : Boolean; CurrentIndex : Index.Letter; end record; type StackContents is array (StackIndex) of StackElement; type StackType is record Ptr : StackPointer; Contents : StackContents; end record; State : StackType; procedure Push (Iterator : in Dictionary.Iterator; Name : in E_Strings.T; Constraints : in E_Strings.T; Constraint_OK : in Boolean; CurrentIndex : in Index.Letter) is begin if State.Ptr = StackPointer'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Nested_Records, Msg => "in PrintConstantRules"); end if; State.Ptr := State.Ptr + 1; State.Contents (State.Ptr).Iterator := Iterator; State.Contents (State.Ptr).Name := Name; State.Contents (State.Ptr).Constraints := Constraints; State.Contents (State.Ptr).Constraint_OK := Constraint_OK; State.Contents (State.Ptr).CurrentIndex := CurrentIndex; end Push; procedure Pop (Iterator : out Dictionary.Iterator; Name : out E_Strings.T; Constraints : out E_Strings.T; Constraint_OK : out Boolean; CurrentIndex : out Index.Letter) is begin Iterator := State.Contents (State.Ptr).Iterator; Name := State.Contents (State.Ptr).Name; Constraints := State.Contents (State.Ptr).Constraints; Constraint_OK := State.Contents (State.Ptr).Constraint_OK; CurrentIndex := State.Contents (State.Ptr).CurrentIndex; State.Ptr := State.Ptr - 1; end Pop; function IsEmpty return Boolean is begin return State.Ptr = 0; end IsEmpty; begin State.Ptr := 0; --# accept Flow, 32, State.Contents, "Init is partial but effective." & --# Flow, 31, State.Contents, "Init is partial but effective." & --# Flow, 602, State, State.Contents, "Init is partial but effective."; end Stack; -------------------------------------------------------------------------------- procedure PushState (Iterator : in Dictionary.Iterator; Name : in E_Strings.T; Constraints : in E_Strings.T; Constraint_OK : in Boolean) --# global in Dictionary.Dict; --# in Index.State; --# in out Stack.State; --# derives Stack.State from *, --# Constraints, --# Constraint_OK, --# Dictionary.Dict, --# Index.State, --# Iterator, --# Name; is Next : Dictionary.Iterator; begin Next := Dictionary.NextSymbol (Iterator); if not Dictionary.IsNullIterator (Next) then Stack.Push (Next, Name, Constraints, Constraint_OK, Index.State); end if; end PushState; -------------------------------------------------------------------------------- procedure Walk_Record (Sym : out Dictionary.Symbol; Components : in Dictionary.Iterator; Name : in out E_Strings.T; Constraints : in E_Strings.T; Constraint_OK : in Boolean) --# global in Dictionary.Dict; --# in Index.State; --# in LexTokenManager.State; --# in out Stack.State; --# derives Name from *, --# Components, --# Dictionary.Dict, --# LexTokenManager.State & --# Stack.State from *, --# Components, --# Constraints, --# Constraint_OK, --# Dictionary.Dict, --# Index.State, --# Name & --# Sym from Components, --# Dictionary.Dict; is Component : Dictionary.Symbol; Record_Name : E_Strings.T; begin Component := Dictionary.CurrentSymbol (Components); PushState (Components, Name, Constraints, Constraint_OK); Record_Name := Name; Name := E_Strings.Copy_String (Str => "fld_"); E_Strings.Append_Examiner_String (E_Str1 => Name, E_Str2 => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Component)))); E_Strings.Append_String (E_Str => Name, Str => "("); E_Strings.Append_Examiner_String (E_Str1 => Name, E_Str2 => Record_Name); E_Strings.Append_String (E_Str => Name, Str => ")"); Sym := Dictionary.GetType (Component); end Walk_Record; -------------------------------------------------------------------------------- procedure TreeWalk (Sym : in out Dictionary.Symbol; Name : in out E_Strings.T; Constraints : in out E_Strings.T; Constraint_OK : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in out Index.State; --# in out Stack.State; --# derives Constraints from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Index.State, --# LexTokenManager.State, --# Scope, --# Sym & --# Constraint_OK, --# Index.State, --# Sym from *, --# Dictionary.Dict, --# Scope, --# Sym & --# Name from *, --# Dictionary.Dict, --# Index.State, --# LexTokenManager.State, --# Scope, --# Sym & --# Stack.State from *, --# CommandLineData.Content, --# Constraints, --# Constraint_OK, --# Dictionary.Dict, --# Index.State, --# LexTokenManager.State, --# Name, --# Scope, --# Sym; is procedure WalkArray (Sym : in out Dictionary.Symbol; Name : in out E_Strings.T; Constraints : in out E_Strings.T; Constraint_OK : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in out Index.State; --# derives Constraints from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Index.State, --# LexTokenManager.State, --# Scope, --# Sym & --# Constraint_OK, --# Index.State, --# Sym from *, --# Dictionary.Dict, --# Sym & --# Name from *, --# Dictionary.Dict, --# Index.State, --# Sym; is ArrayName : E_Strings.T; Indices : Dictionary.Iterator; procedure AppendIndexConstraints (Indices : in Dictionary.Iterator; Constraints : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Index.State; --# in LexTokenManager.State; --# in Scope; --# in out Constraint_OK; --# derives Constraints from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Index.State, --# Indices, --# LexTokenManager.State, --# Scope & --# Constraint_OK from *, --# Dictionary.Dict, --# Indices; is IndexType : Dictionary.Symbol; Constraint : E_Strings.T; FirstValue : LexTokenManager.Lex_String; LastValue : LexTokenManager.Lex_String; procedure NewConstraint (Constraint : in E_Strings.T; Constraints : in out E_Strings.T) --# derives Constraints from *, --# Constraint; is begin if not E_Strings.Is_Empty (E_Str => Constraints) then E_Strings.Append_String (E_Str => Constraints, Str => ", "); end if; E_Strings.Append_Examiner_String (E_Str1 => Constraints, E_Str2 => Constraint); end NewConstraint; begin IndexType := Dictionary.CurrentSymbol (Indices); if not Dictionary.IsUnknownTypeMark (IndexType) then -- Guard to prevent constraint being generated for boolean index if not Dictionary.TypeIsBoolean (IndexType) then FirstValue := Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, IndexType); Constraint := Get_Value (Store_Val => FirstValue, Type_Mark => IndexType, Scope => Scope); E_Strings.Append_String (E_Str => Constraint, Str => " <= "); E_Strings.Append_String (E_Str => Constraint, Str => Index.Value); NewConstraint (Constraint, Constraints); Constraint := E_Strings.Copy_String (Str => Index.Value); E_Strings.Append_String (E_Str => Constraint, Str => " <= "); LastValue := Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, IndexType); E_Strings.Append_Examiner_String (E_Str1 => Constraint, E_Str2 => Get_Value (Store_Val => LastValue, Type_Mark => IndexType, Scope => Scope)); NewConstraint (Constraint, Constraints); end if; else Constraint_OK := False; end if; end AppendIndexConstraints; -------------------------------------------------------------------------------- begin ArrayName := Name; Name := E_Strings.Copy_String (Str => "element("); E_Strings.Append_Examiner_String (E_Str1 => Name, E_Str2 => ArrayName); E_Strings.Append_String (E_Str => Name, Str => ", ["); E_Strings.Append_String (E_Str => Name, Str => Index.Value); Indices := Dictionary.FirstArrayIndex (Sym); AppendIndexConstraints (Indices, Constraints); Index.Next; loop Indices := Dictionary.NextSymbol (Indices); exit when Dictionary.IsNullIterator (Indices); E_Strings.Append_String (E_Str => Name, Str => ", "); E_Strings.Append_String (E_Str => Name, Str => Index.Value); AppendIndexConstraints (Indices, Constraints); Index.Next; end loop; E_Strings.Append_String (E_Str => Name, Str => "])"); Sym := Dictionary.GetArrayComponent (Sym); end WalkArray; begin loop exit when Dictionary.IsUnknownTypeMark (Sym); exit when Dictionary.TypeIsScalar (Sym); exit when Dictionary.IsPrivateType (Sym, Scope); exit when Dictionary.TypeIsGeneric (Sym); -- no rules for generic types exit when Dictionary.TypeIsRecord (Sym) and then -- no rules for null records not Dictionary.RecordHasSomeFields (Sym); if Dictionary.TypeIsArray (Sym) then WalkArray (Sym, Name, Constraints, Constraint_OK); elsif Dictionary.TypeIsRecord (Sym) then -- Sym might denote a record subtype here, so... Sym := Dictionary.GetRootType (Sym); Walk_Record (Sym => Sym, Components => Dictionary.FirstRecordComponent (Sym), Name => Name, Constraints => Constraints, Constraint_OK => Constraint_OK); else -- Should never be reached. We have covered scalar, private, generic, -- unknown, array and record above -- and task and protected types can't appear in expressions. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Unexpected type symbol in PrintConstantRules.TreeWalk"); end if; end loop; end TreeWalk; -------------------------------------------------------------------------------- procedure PrintTypeBounds (Rule_File : in SPARK_IO.File_Type; Sym : in Dictionary.Symbol; Name : in E_Strings.T; Constraints : in E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Constraints, --# Dictionary.Dict, --# LexTokenManager.State, --# Name, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is procedure PrintLowerBound (Rule_File : in SPARK_IO.File_Type; Sym : in Dictionary.Symbol; Name : in E_Strings.T; Constraints : in E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from * & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Constraints, --# Dictionary.Dict, --# LexTokenManager.State, --# Name, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is begin Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__first <= ", 0); E_Strings.Put_String (File => Rule_File, E_Str => Name); SPARK_IO.Put_String (Rule_File, " may_be_deduced", 0); if not E_Strings.Is_Empty (E_Str => Constraints) then SPARK_IO.Put_String (Rule_File, "_from [", 0); E_Strings.Put_String (File => Rule_File, E_Str => Constraints); SPARK_IO.Put_String (Rule_File, "]", 0); end if; End_A_Rule (Rule_File => Rule_File); end PrintLowerBound; procedure PrintUpperBound (Rule_File : in SPARK_IO.File_Type; Sym : in Dictionary.Symbol; Name : in E_Strings.T; Constraints : in E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from * & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Constraints, --# Dictionary.Dict, --# LexTokenManager.State, --# Name, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is begin Print_Rule_Name (Rule_File => Rule_File); E_Strings.Put_String (File => Rule_File, E_Str => Name); SPARK_IO.Put_String (Rule_File, " <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__last may_be_deduced", 0); if not E_Strings.Is_Empty (E_Str => Constraints) then SPARK_IO.Put_String (Rule_File, "_from [", 0); E_Strings.Put_String (File => Rule_File, E_Str => Constraints); SPARK_IO.Put_String (Rule_File, "]", 0); end if; End_A_Rule (Rule_File => Rule_File); end PrintUpperBound; begin -- Boolean types are scalar, but do not have "<=" or ">=" operators -- in FDL, so there's no range constraint for them. if not Dictionary.TypeIsBoolean (Sym) then PrintLowerBound (Rule_File, Sym, Name, Constraints); PrintUpperBound (Rule_File, Sym, Name, Constraints); end if; end PrintTypeBounds; procedure Backtrack (Sym : out Dictionary.Symbol; Name : out E_Strings.T; Constraints : out E_Strings.T; Constraint_OK : out Boolean) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Stack.State; --# out Index.State; --# derives Constraints, --# Constraint_OK, --# Index.State from Stack.State & --# Name from Dictionary.Dict, --# LexTokenManager.State, --# Stack.State & --# Stack.State, --# Sym from Dictionary.Dict, --# Stack.State; is CurrentIndex : Index.Letter; TheSym : Dictionary.Symbol; Components : Dictionary.Iterator; TheName : E_Strings.T; TheConstraints : E_Strings.T; The_Constraint_OK : Boolean; begin Stack.Pop (Components, TheName, TheConstraints, The_Constraint_OK, CurrentIndex); Index.Reset (CurrentIndex); Walk_Record (Sym => TheSym, Components => Components, Name => TheName, Constraints => TheConstraints, Constraint_OK => The_Constraint_OK); Sym := TheSym; Name := TheName; Constraints := TheConstraints; Constraint_OK := The_Constraint_OK; end Backtrack; -------------------------------------------------------------------------------- begin Node := Dictionary.GetType (Sym); Name := Get_Name (Sym => Sym, Scope => Scope); Constraint_OK := True; Constraints := E_Strings.Empty_String; ErrorsFound := False; -- Note: This accept annotation should be inside the loop around the TreeWalk call -- but currently this is rejected by the parser. See SEPR 2067 --# accept Flow, 10, Index.State, "Expected ineffective assignment to Index.State"; loop -- Expect ineffective assignment to Index.State, as this -- state is discarded when we leave PrintConstantRules TreeWalk (Node, Name, Constraints, Constraint_OK); -- 782 - Expect 1 ineffective assignment if not Dictionary.TypeIsPrivate (TheType => Node) then -- no bounds available for private types if not Dictionary.IsUnknownTypeMark (Node) then -- nor unknown types if Constraint_OK then PrintTypeBounds (Rule_File, Node, Name, Constraints); else ErrorsFound := True; end if; else ErrorsFound := True; end if; end if; exit when Stack.IsEmpty; Backtrack (Node, Name, Constraints, Constraint_OK); end loop; --# end accept; if ErrorsFound then ErrorHandler.Semantic_Warning_Sym (Err_Num => 314, Position => EndPosition, Sym => Sym, Scope => Dictionary.GetScope (Sym)); end if; end PrintStructuredConstantRules; -------------------------------------------------------------------------------- begin if Write_Rules then if not Dictionary.IsPrivateType (Dictionary.GetType (Sym), Scope) then if Dictionary.TypeIsScalar (Dictionary.GetType (Sym)) then PrintScalarConstantRules (Rule_File, Sym); else PrintStructuredConstantRules (Rule_File, Sym, End_Position); end if; end if; end if; end PrintConstantRules; spark-2012.0.deb/examiner/sem-check_no_overloading_from_tagged_ops.adb0000644000175000017500000000757011753202336025075 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_No_Overloading_From_Tagged_Ops (Ident_Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Is_Overriding : in Boolean) is Root_Subprog_Sym : Dictionary.Symbol; Actual_Tagged_Parameter_Type : Dictionary.Symbol; function Successfully_Overrides (Root_Subprog, Second_Subprog, Actual_Tagged_Parameter_Type : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is separate; begin -- Check_No_Overloading_From_Tagged_Ops -- if a potentially inheritable subprogram of the same name exists then -- the new declaration is only legal if it successfully overrides it Dictionary.SearchForInheritedOperations (Name => Node_Lex_String (Node => Ident_Node), Scope => Scope, Prefix => Dictionary.NullSymbol, Context => Dictionary.ProofContext, OpSym => Root_Subprog_Sym, ActualTaggedType => Actual_Tagged_Parameter_Type); if not Dictionary.Is_Null_Symbol (Root_Subprog_Sym) then -- An inheritable subprogram has been found. -- This declaration is only legal if it overrides it if not Successfully_Overrides (Root_Subprog => Root_Subprog_Sym, Second_Subprog => Subprog_Sym, Actual_Tagged_Parameter_Type => Actual_Tagged_Parameter_Type) then ErrorHandler.Semantic_Error_Sym (Err_Num => 829, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Sym => Root_Subprog_Sym, Scope => Scope); if not Dictionary.IsProofFunction (Subprog_Sym) then Dictionary.SetSubprogramSignatureNotWellformed (Abstraction, Subprog_Sym); end if; elsif CommandLineData.Content.Language_Profile in CommandLineData.SPARK2005_Profiles and then not Is_Overriding then -- An inherited sub-program but its declarations contradicts the -- its overriding indicator. ErrorHandler.Semantic_Error_Sym (Err_Num => 844, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Sym => Subprog_Sym, Scope => Scope); end if; elsif CommandLineData.Content.Language_Profile in CommandLineData.SPARK2005_Profiles and then Dictionary.Is_Null_Symbol (Root_Subprog_Sym) and then Is_Overriding then ErrorHandler.Semantic_Error_Sym (Err_Num => 845, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Sym => Subprog_Sym, Scope => Scope); end if; end Check_No_Overloading_From_Tagged_Ops; spark-2012.0.deb/examiner/dictionary-nextsymbol.adb0000644000175000017500000022632511753202336021302 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function NextSymbol (Previous : Iterator) return Iterator is Next : Iterator; -------------------------------------------------------------------------------- function Next_Declarative_Item (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol := NullSymbol; Found : Boolean := False; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref and then not Found loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); case RawDict.GetSymbolDiscriminant (Item) is when Package_Symbol => Found := The_Declaration /= RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Item)); when Subprogram_Symbol => Found := The_Declaration /= RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)); when others => Found := True; end case; if not Found then The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; if The_Declaration /= RawDict.Null_Declaration_Info_Ref then Next := Iterator' (DeclarativeItemIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); end if; return Next; end Next_Declarative_Item; -------------------------------------------------------------------------------- function Next_Deferred_Constant (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Constant_Symbol and then Constant_Is_Deferred (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Item)) then Next := Iterator' (DeferredConstantIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Deferred_Constant; -------------------------------------------------------------------------------- function Next_Array_Index (Previous : Iterator) return Iterator --# global in Dict; is The_Array_Index : RawDict.Array_Index_Info_Ref; Result : Iterator := NullIterator; begin The_Array_Index := RawDict.Get_Next_Array_Index (RawDict.Get_Array_Index_Info_Ref (Previous.Context)); if The_Array_Index /= RawDict.Null_Array_Index_Info_Ref then Result := Iterator' (ArrayIndexIterator, IsAbstract, RawDict.Get_Type_Symbol (RawDict.Get_Array_Index_Type (The_Array_Index => The_Array_Index)), RawDict.Get_Array_Index_Symbol (The_Array_Index)); end if; return Result; end Next_Array_Index; -------------------------------------------------------------------------------- function Next_Loop (Previous : Iterator) return Iterator --# global in Dict; is The_Loop : Symbol; Next : Iterator; begin The_Loop := RawDict.GetNextLoop (CurrentSymbol (Previous)); if The_Loop = NullSymbol then Next := NullIterator; else Next := Iterator'(LoopIterator, IsAbstract, The_Loop, NullSymbol); end if; return Next; end Next_Loop; -------------------------------------------------------------------------------- function Next_Undeclared_Type (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) = Unknown_Type_Item then Next := Iterator' (UndeclaredTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Undeclared_Type; -------------------------------------------------------------------------------- function Next_Private_Type (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then Type_Is_Private (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then RawDict.Get_Declaration_Context (The_Declaration => The_Declaration) = ProgramContext then Next := Iterator' (PrivateTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Private_Type; -------------------------------------------------------------------------------- function Next_Enumeration_Literal (Previous : Iterator) return Iterator --# global in Dict; is The_Enumeration_Literal : RawDict.Enumeration_Literal_Info_Ref; Next : Iterator := NullIterator; begin The_Enumeration_Literal := RawDict.Get_Next_Enumeration_Literal (The_Enumeration_Literal => RawDict.Get_Enumeration_Literal_Info_Ref (CurrentSymbol (Previous))); if The_Enumeration_Literal /= RawDict.Null_Enumeration_Literal_Info_Ref then Next := Iterator' (EnumerationLiteralIterator, IsAbstract, RawDict.Get_Enumeration_Literal_Symbol (The_Enumeration_Literal), NullSymbol); end if; return Next; end Next_Enumeration_Literal; -------------------------------------------------------------------------------- function Next_Record_Component (Previous : Iterator) return Iterator --# global in Dict; is The_Record_Component : RawDict.Record_Component_Info_Ref; Next : Iterator := NullIterator; begin The_Record_Component := RawDict.Get_Next_Record_Component (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (CurrentSymbol (Previous))); if The_Record_Component /= RawDict.Null_Record_Component_Info_Ref then Next := Iterator'(RecordComponentIterator, IsAbstract, RawDict.Get_Record_Component_Symbol (The_Record_Component), NullSymbol); end if; return Next; end Next_Record_Component; -------------------------------------------------------------------------------- function Next_Extended_Record_Component (Previous : Iterator) return Iterator --# global in Dict; is The_Record_Component : RawDict.Record_Component_Info_Ref; Current_Record : RawDict.Type_Info_Ref; Next : Iterator; begin The_Record_Component := RawDict.Get_Next_Record_Component (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (CurrentSymbol (Previous))); if The_Record_Component = RawDict.Null_Record_Component_Info_Ref then -- current record has no more fields so see if earlier extension has Current_Record := Back_Track_Up_Inherit_Chain (Start_Sym => RawDict.Get_Type_Info_Ref (Previous.Context), Stop_Sym => RawDict.Get_Record_Component_Record_Type (The_Record_Component => RawDict.Get_Record_Component_Info_Ref (CurrentSymbol (Previous)))); loop if Current_Record = RawDict.Null_Type_Info_Ref then -- searched entire extended record structure Next := NullIterator; exit; end if; -- We have a new extended record to search, we need the _second_ field (to skip Inherit) The_Record_Component := RawDict.Get_Next_Record_Component (The_Record_Component => RawDict.Get_Type_First_Record_Component (Type_Mark => Current_Record)); if The_Record_Component /= RawDict.Null_Record_Component_Info_Ref then -- found a field Next := Iterator' (ExtendedRecordComponentIterator, IsAbstract, RawDict.Get_Record_Component_Symbol (The_Record_Component), Previous.Context); exit; end if; -- if get here then we must have encountered a null extension - we must backtrack again Current_Record := Back_Track_Up_Inherit_Chain (Start_Sym => RawDict.Get_Type_Info_Ref (Previous.Context), Stop_Sym => Current_Record); end loop; else -- record has further fields Next := Iterator' (ExtendedRecordComponentIterator, IsAbstract, RawDict.Get_Record_Component_Symbol (The_Record_Component), Previous.Context); end if; return Next; end Next_Extended_Record_Component; -------------------------------------------------------------------------------- function Next_Library_Unit (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol := NullSymbol; Found : Boolean := False; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref and then not Found loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); case RawDict.GetSymbolDiscriminant (Item) is when Package_Symbol => Found := The_Declaration /= RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Item)); when Subprogram_Symbol => Found := The_Declaration /= RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)); when others => Found := False; end case; if not Found then The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; if The_Declaration /= RawDict.Null_Declaration_Info_Ref then Next := Iterator'(LibraryUnitIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); end if; return Next; end Next_Library_Unit; -------------------------------------------------------------------------------- function Next_Withed_Package (Previous : Iterator) return Iterator --# global in Dict; is The_Context_Clause : RawDict.Context_Clause_Info_Ref; Next : Iterator := NullIterator; begin The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => RawDict.Get_Context_Clause_Info_Ref (Previous.Context)); if The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref then case RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) is when False => Next := Iterator' (WithedPackageIterator, IsAbstract, RawDict.Get_Package_Symbol (RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Context_Clause)), RawDict.Get_Context_Clause_Symbol (The_Context_Clause)); when True => Next := Iterator' (WithedPackageIterator, IsAbstract, RawDict.Get_Subprogram_Symbol (RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Context_Clause)), RawDict.Get_Context_Clause_Symbol (The_Context_Clause)); end case; end if; return Next; end Next_Withed_Package; -------------------------------------------------------------------------------- function Next_Inherited_Package (Previous : Iterator) return Iterator --# global in Dict; is The_Context_Clause : RawDict.Context_Clause_Info_Ref; Next : Iterator := NullIterator; begin The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => RawDict.Get_Context_Clause_Info_Ref (Previous.Context)); if The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref then case RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) is when False => Next := Iterator' (InheritedPackageIterator, IsAbstract, RawDict.Get_Package_Symbol (RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Context_Clause)), RawDict.Get_Context_Clause_Symbol (The_Context_Clause)); when True => Next := Iterator' (InheritedPackageIterator, IsAbstract, RawDict.Get_Subprogram_Symbol (RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Context_Clause)), RawDict.Get_Context_Clause_Symbol (The_Context_Clause)); end case; end if; return Next; end Next_Inherited_Package; -------------------------------------------------------------------------------- function Next_Visible_Or_Private_Subprogram (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Subprogram_Symbol and then The_Declaration /= RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)) then Next := Iterator' (Previous.Discriminant, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Visible_Or_Private_Subprogram; -------------------------------------------------------------------------------- function Next_Task_Type (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) then Next := Iterator'(TaskTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Task_Type; -------------------------------------------------------------------------------- function Next_Own_Task (Previous : Iterator) return Iterator --# global in Dict; is Own_Task : Symbol; Next : Iterator; begin Own_Task := RawDict.GetNextOwnTask (Previous.Context); if Own_Task = NullSymbol then Next := NullIterator; else Next := Iterator'(OwnTaskIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.GetOwnTaskVariable (Own_Task)), Own_Task); end if; return Next; end Next_Own_Task; -------------------------------------------------------------------------------- function Next_Protected_Type (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); -- filter items to leave just protected types that are actually declared (not just announced) if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then Get_Visibility (Scope => GetScope (Item)) = Visible then Next := Iterator' (ProtectedTypeIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Protected_Type; -------------------------------------------------------------------------------- function Next_Subprogram_Parameter (Previous : Iterator) return Iterator --# global in Dict; is Next : Iterator; -------------------------------------------------------------------------------- function Next_Formal_Parameter (Previous : Iterator) return Iterator --# global in Dict; is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; ProofFunction : Symbol; Ada_Function : RawDict.Subprogram_Info_Ref; GlobalVariables : Iterator; Next : Iterator; begin The_Subprogram_Parameter := RawDict.Get_Next_Subprogram_Parameter (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (CurrentSymbol (Previous))); ProofFunction := Previous.Context; if The_Subprogram_Parameter = RawDict.Null_Subprogram_Parameter_Info_Ref then if ProofFunction = NullSymbol then Next := NullIterator; else Ada_Function := RawDict.GetImplicitProofFunctionAdaFunction (ProofFunction); GlobalVariables := First_Subprogram_Global_Variable (The_Subprogram => Ada_Function, Abstraction => Previous.Abstraction); if GlobalVariables = NullIterator then Next := NullIterator; else Next := Iterator' (ImplicitProofFunctionGlobalIterator, Previous.Abstraction, CurrentSymbol (GlobalVariables), GlobalVariables.Context); end if; end if; else Next := Iterator' (Previous.Discriminant, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), ProofFunction); end if; return Next; end Next_Formal_Parameter; -------------------------------------------------------------------------------- function Next_Global_Variable (Previous : Iterator) return Iterator --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; Next : Iterator := NullIterator; begin The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => RawDict.Get_Global_Variable_Info_Ref (Previous.Context)); if The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref then case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => Next := Iterator' (Previous.Discriminant, Previous.Abstraction, RawDict.Get_Variable_Symbol (RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable)), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); when RawDict.Subprogram_Parameter_Item => Next := Iterator' (Previous.Discriminant, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable)), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); when others => -- non-exec code Next := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Next_Global_Variable"); end case; end if; return Next; end Next_Global_Variable; begin -- Next_Subprogram_Parameter case Previous.Discriminant is when SubprogramParameterIterator | ImplicitProofFunctionParameterIterator => Next := Next_Formal_Parameter (Previous => Previous); when ImplicitProofFunctionGlobalIterator => Next := Next_Global_Variable (Previous => Previous); when others => Next := NullIterator; end case; return Next; end Next_Subprogram_Parameter; -------------------------------------------------------------------------------- function Next_Global_Variable (Previous : Iterator) return Iterator --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; Next : Iterator := NullIterator; begin The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => RawDict.Get_Global_Variable_Info_Ref (Previous.Context)); if The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref then case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item | RawDict.Task_Type_Variable_Item => Next := Iterator' (GlobalVariableIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable)), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); when RawDict.Subprogram_Parameter_Item => Next := Iterator' (GlobalVariableIterator, IsAbstract, RawDict.Get_Subprogram_Parameter_Symbol (RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable)), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); end case; end if; return Next; end Next_Global_Variable; -------------------------------------------------------------------------------- function Next_Local_Variable (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if Is_Variable (Item) then Next := Iterator' (LocalVariableIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Local_Variable; -------------------------------------------------------------------------------- function Next_Protected_Element (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); if The_Declaration /= RawDict.Null_Declaration_Info_Ref then Next := Iterator' (LocalVariableIterator, IsAbstract, RawDict.Get_Declaration_Item (The_Declaration => The_Declaration), RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); end if; return Next; end Next_Protected_Element; -------------------------------------------------------------------------------- function Next_Initialized_Variable (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if Is_Variable (Item) and then RawDict.Get_Variable_Initialized (The_Variable => RawDict.Get_Variable_Info_Ref (Item)) then Next := Iterator' (InitializedVariableIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Initialized_Variable; -------------------------------------------------------------------------------- function Next_Import_Export (Previous : Iterator) return Iterator --# global in Dict; is Next : Iterator; -------------------------------------------------------------------------------- function Next_Subprogram_Parameter_Import_Export (Previous : Iterator) return Iterator --# global in Dict; is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; Next : Iterator := NullIterator; Stop : Boolean := False; begin The_Subprogram_Parameter := RawDict.Get_Next_Subprogram_Parameter (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (CurrentSymbol (Previous))); The_Subprogram := RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (CurrentSymbol (Previous))); while The_Subprogram_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref and then not Is_Subprogram_Import_Export_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) loop The_Subprogram_Parameter := RawDict.Get_Next_Subprogram_Parameter (The_Subprogram_Parameter => The_Subprogram_Parameter); end loop; if The_Subprogram_Parameter = RawDict.Null_Subprogram_Parameter_Info_Ref then The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction); while not Stop loop if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Next := NullIterator; exit; end if; case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Subprogram_Import_Export_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ImportExportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); if Is_Subprogram_Import_Export_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Next := Iterator' (ImportExportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when others => -- non-exec code Stop := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Next_Subprogram_Parameter_Import_Export"); end case; The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; else Next := Iterator' (ImportExportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), NullSymbol); end if; return Next; end Next_Subprogram_Parameter_Import_Export; -------------------------------------------------------------------------------- function Next_Global_Variable_Import_Export (Previous : Iterator) return Iterator --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref; The_Task_Type : RawDict.Type_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Next : Iterator := NullIterator; Stop : Boolean := False; begin The_Global_Variable := RawDict.Get_Global_Variable_Info_Ref (Previous.Context); case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Parameter_Item | RawDict.Subprogram_Variable_Item => The_Subprogram := RawDict.Get_Global_Variable_Subprogram (The_Global_Variable => The_Global_Variable); while not Stop loop The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Next := NullIterator; exit; end if; case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Subprogram_Import_Export_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ImportExportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); if Is_Subprogram_Import_Export_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Next := Iterator' (ImportExportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when others => -- non-exec code Stop := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Next_Global_Variable_Import_Export"); end case; end loop; when RawDict.Task_Type_Variable_Item => The_Task_Type := RawDict.Get_Global_Variable_Task_Type (The_Global_Variable => The_Global_Variable); loop The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Next := NullIterator; exit; end if; The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Task_Type_Import_Export (The_Task_Type => The_Task_Type, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ImportExportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); exit; end if; end loop; end case; return Next; end Next_Global_Variable_Import_Export; begin -- Next_Import_Export if Previous.Context = NullSymbol then Next := Next_Subprogram_Parameter_Import_Export (Previous => Previous); else Next := Next_Global_Variable_Import_Export (Previous => Previous); end if; return Next; end Next_Import_Export; -------------------------------------------------------------------------------- function Next_Export (Previous : Iterator) return Iterator --# global in Dict; is Next : Iterator; -------------------------------------------------------------------------------- function Next_Subprogram_Parameter_Export (Previous : Iterator) return Iterator --# global in Dict; is The_Subprogram : RawDict.Subprogram_Info_Ref; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; Next : Iterator; Stop : Boolean := False; begin The_Subprogram_Parameter := RawDict.Get_Subprogram_Parameter_Info_Ref (CurrentSymbol (Previous)); The_Subprogram := RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Subprogram_Parameter); loop The_Subprogram_Parameter := RawDict.Get_Next_Subprogram_Parameter (The_Subprogram_Parameter => The_Subprogram_Parameter); if The_Subprogram_Parameter = RawDict.Null_Subprogram_Parameter_Info_Ref then Next := NullIterator; exit; end if; if Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Next := Iterator' (ExportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), NullSymbol); exit; end if; end loop; if Next = NullIterator then The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction); while not Stop and then The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref loop case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Exported_Subprogram_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ExportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); if Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Next := Iterator' (ExportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when others => -- non-exec code Stop := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Next_Subprogram_Parameter_Export"); end case; The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; end if; return Next; end Next_Subprogram_Parameter_Export; -------------------------------------------------------------------------------- function Next_Global_Variable_Export (Previous : Iterator) return Iterator --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref; The_Task_Type : RawDict.Type_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Next : Iterator := NullIterator; Stop : Boolean := False; begin The_Global_Variable := RawDict.Get_Global_Variable_Info_Ref (Previous.Context); case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Parameter_Item | RawDict.Subprogram_Variable_Item => The_Subprogram := RawDict.Get_Global_Variable_Subprogram (The_Global_Variable => The_Global_Variable); while not Stop loop The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Next := NullIterator; exit; end if; case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Exported_Subprogram_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ExportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); if Is_Exported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Next := Iterator' (ExportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when others => -- non-exec code Stop := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Next_Global_Variable_Export"); end case; end loop; when RawDict.Task_Type_Variable_Item => The_Task_Type := RawDict.Get_Global_Variable_Task_Type (The_Global_Variable => The_Global_Variable); loop The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Next := NullIterator; exit; end if; The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Exported_Task_Type_Variable (The_Task_Type => The_Task_Type, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ExportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); exit; end if; end loop; end case; return Next; end Next_Global_Variable_Export; begin -- Next_Export if Previous.Context = NullSymbol then Next := Next_Subprogram_Parameter_Export (Previous => Previous); else Next := Next_Global_Variable_Export (Previous => Previous); end if; return Next; end Next_Export; -------------------------------------------------------------------------------- function Next_Import (Previous : Iterator) return Iterator --# global in Dict; is Next : Iterator; -------------------------------------------------------------------------------- function Next_Subprogram_Parameter_Import (Previous : Iterator) return Iterator --# global in Dict; is The_Subprogram : RawDict.Subprogram_Info_Ref; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; Next : Iterator; Stop : Boolean := False; begin The_Subprogram_Parameter := RawDict.Get_Subprogram_Parameter_Info_Ref (CurrentSymbol (Previous)); The_Subprogram := RawDict.Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter => The_Subprogram_Parameter); loop The_Subprogram_Parameter := RawDict.Get_Next_Subprogram_Parameter (The_Subprogram_Parameter => The_Subprogram_Parameter); if The_Subprogram_Parameter = RawDict.Null_Subprogram_Parameter_Info_Ref then Next := NullIterator; exit; end if; if Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Next := Iterator' (ImportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), NullSymbol); exit; end if; end loop; if Next = NullIterator then The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction); while not Stop and then The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref loop case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Imported_Subprogram_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ImportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); if Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Next := Iterator' (ImportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when others => -- non-exec code Stop := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Next_Subprogram_Parameter_Import"); end case; The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; end if; return Next; end Next_Subprogram_Parameter_Import; -------------------------------------------------------------------------------- function Next_Global_Variable_Import (Previous : Iterator) return Iterator --# global in Dict; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref; The_Task_Type : RawDict.Type_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Next : Iterator := NullIterator; Stop : Boolean := False; begin The_Global_Variable := RawDict.Get_Global_Variable_Info_Ref (Previous.Context); case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Parameter_Item | RawDict.Subprogram_Variable_Item => The_Subprogram := RawDict.Get_Global_Variable_Subprogram (The_Global_Variable => The_Global_Variable); while not Stop loop The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Next := NullIterator; exit; end if; case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Imported_Subprogram_Variable (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ImportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); if Is_Imported_Subprogram_Subprogram_Parameter (The_Subprogram => The_Subprogram, Abstraction => Previous.Abstraction, The_Subprogram_Parameter => The_Subprogram_Parameter, Is_Implicit_Proof_Function => False) then Next := Iterator' (ImportIterator, Previous.Abstraction, RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); Stop := True; end if; when others => -- non-exec code Stop := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Next_Global_Variable_Import"); end case; end loop; when RawDict.Task_Type_Variable_Item => The_Task_Type := RawDict.Get_Global_Variable_Task_Type (The_Global_Variable => The_Global_Variable); loop The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Next := NullIterator; exit; end if; The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if Is_Imported_Task_Type_Variable (The_Task_Type => The_Task_Type, Abstraction => Previous.Abstraction, The_Variable => The_Variable) then Next := Iterator' (ImportIterator, Previous.Abstraction, RawDict.Get_Variable_Symbol (The_Variable), RawDict.Get_Global_Variable_Symbol (The_Global_Variable)); exit; end if; end loop; end case; return Next; end Next_Global_Variable_Import; begin -- Next_Import if Previous.Context = NullSymbol then Next := Next_Subprogram_Parameter_Import (Previous => Previous); else Next := Next_Global_Variable_Import (Previous => Previous); end if; return Next; end Next_Import; -------------------------------------------------------------------------------- function Next_Dependency (Previous : Iterator) return Iterator --# global in Dict; is The_Dependency : RawDict.Dependency_Info_Ref; Next : Iterator; begin The_Dependency := RawDict.Get_Next_Dependency (The_Dependency => RawDict.Get_Dependency_Info_Ref (Previous.Context)); if The_Dependency = RawDict.Null_Dependency_Info_Ref then Next := NullIterator; else case RawDict.Get_Kind_Of_Dependency (The_Dependency => The_Dependency) is when RawDict.Dependency_Parameter_Item => Next := Iterator' (DependencyIterator, IsAbstract, RawDict.Get_Subprogram_Parameter_Symbol (RawDict.Get_Dependency_Import_Parameter (The_Dependency => The_Dependency)), RawDict.Get_Dependency_Symbol (The_Dependency)); when RawDict.Dependency_Variable_Item => Next := Iterator' (DependencyIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.Get_Dependency_Import_Variable (The_Dependency => The_Dependency)), RawDict.Get_Dependency_Symbol (The_Dependency)); end case; end if; return Next; end Next_Dependency; -------------------------------------------------------------------------------- function Next_Interrupt_Stream_Mapping (Previous : Iterator) return Iterator --# global in Dict; is Mapping : Symbol; Next : Iterator; begin Mapping := RawDict.GetNextInterruptStreamMapping (Previous.Context); if Mapping = NullSymbol then Next := NullIterator; else Next := Iterator'(InterruptStreamMappingIterator, IsAbstract, Mapping, Mapping); end if; return Next; end Next_Interrupt_Stream_Mapping; -------------------------------------------------------------------------------- function Next_Suspends_List_Item (Previous : Iterator) return Iterator --# global in Dict; is SuspendsListItem : Symbol; Next : Iterator; begin SuspendsListItem := RawDict.GetNextSuspendsListItem (Previous.Context); if SuspendsListItem = NullSymbol then Next := NullIterator; else Next := Iterator'(SuspendsListItemIterator, IsAbstract, RawDict.GetSuspendsListItem (SuspendsListItem), SuspendsListItem); end if; return Next; end Next_Suspends_List_Item; -------------------------------------------------------------------------------- function Next_Virtual_Element (Previous : Iterator) return Iterator --# global in Dict; is Virtual_Element : Symbol; Next : Iterator; begin Virtual_Element := RawDict.GetNextVirtualElement (Previous.Context); if Virtual_Element = NullSymbol then Next := NullIterator; else Next := Iterator' (VirtualElementIterator, IsAbstract, RawDict.Get_Variable_Symbol (RawDict.GetVirtualElementVariable (Virtual_Element)), Virtual_Element); end if; return Next; end Next_Virtual_Element; -------------------------------------------------------------------------------- function Next_Owned_Package (Previous : Iterator) return Iterator --# global in Dict; is Previous_Package, Current_Package, Descendent : RawDict.Package_Info_Ref; Next : Iterator; begin Previous_Package := RawDict.Get_Package_Info_Ref (CurrentSymbol (Previous)); Current_Package := RawDict.Get_Package_Next_Sibling (The_Package => Previous_Package); if Current_Package /= RawDict.Null_Package_Info_Ref then loop Descendent := RawDict.Get_Package_First_Public_Child (The_Package => Current_Package); exit when Descendent = RawDict.Null_Package_Info_Ref; Current_Package := Descendent; end loop; elsif not RawDict.Get_Package_Is_Private (The_Package => Previous_Package) then Current_Package := RawDict.Get_Package_Parent (The_Package => Previous_Package); end if; if Current_Package = RawDict.Null_Package_Info_Ref then Next := NullIterator; else Next := Iterator'(OwnedPackageIterator, IsAbstract, RawDict.Get_Package_Symbol (Current_Package), Previous.Context); end if; return Next; end Next_Owned_Package; -------------------------------------------------------------------------------- function Next_Embedded_Package (Previous : Iterator) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Next : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => RawDict.Get_Declaration_Info_Ref (Previous.Context)); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Package_Symbol and then The_Declaration /= RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Item)) then Next := Iterator' (EmbeddedPackageIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; return Next; end Next_Embedded_Package; -------------------------------------------------------------------------------- function Next_Own_Variable (Previous : Iterator) return Iterator --# global in Dict; is Own_Variable : RawDict.Own_Variable_Info_Ref; Next : Iterator; begin Own_Variable := RawDict.Get_Next_Own_Variable (The_Own_Variable => RawDict.Get_Own_Variable_Info_Ref (Previous.Current)); if Own_Variable = RawDict.Null_Own_Variable_Info_Ref then Next := NullIterator; else Next := Iterator'(OwnVariableIterator, IsAbstract, RawDict.Get_Own_Variable_Symbol (Own_Variable), NullSymbol); end if; return Next; end Next_Own_Variable; -------------------------------------------------------------------------------- function Next_Initialized_Own_Variable (Previous : Iterator) return Iterator --# global in Dict; is Own_Variable : RawDict.Own_Variable_Info_Ref; Next : Iterator; begin Own_Variable := RawDict.Get_Next_Own_Variable (The_Own_Variable => RawDict.Get_Own_Variable_Info_Ref (Previous.Current)); loop if Own_Variable = RawDict.Null_Own_Variable_Info_Ref then Next := NullIterator; exit; end if; if RawDict.Get_Own_Variable_Initialized (The_Own_Variable => Own_Variable) then Next := Iterator'(InitializedOwnVariableIterator, IsAbstract, RawDict.Get_Own_Variable_Symbol (Own_Variable), NullSymbol); exit; end if; Own_Variable := RawDict.Get_Next_Own_Variable (The_Own_Variable => Own_Variable); end loop; return Next; end Next_Initialized_Own_Variable; -------------------------------------------------------------------------------- function Next_Abstract_Own_Variable (Previous : Iterator) return Iterator --# global in Dict; is Own_Variable : RawDict.Own_Variable_Info_Ref; Next : Iterator; begin Own_Variable := RawDict.Get_Next_Own_Variable (The_Own_Variable => RawDict.Get_Own_Variable_Info_Ref (Previous.Current)); loop if Own_Variable = RawDict.Null_Own_Variable_Info_Ref then Next := NullIterator; exit; end if; if RawDict.Get_Own_Variable_Constituents (The_Own_Variable => Own_Variable) /= RawDict.Null_Constituent_Info_Ref then Next := Iterator'(AbstractOwnVariableIterator, IsAbstract, RawDict.Get_Own_Variable_Symbol (Own_Variable), NullSymbol); exit; end if; Own_Variable := RawDict.Get_Next_Own_Variable (The_Own_Variable => Own_Variable); end loop; return Next; end Next_Abstract_Own_Variable; -------------------------------------------------------------------------------- function Next_Constituent (Previous : Iterator) return Iterator --# global in Dict; is The_Constituent : RawDict.Constituent_Info_Ref; Next : Iterator := NullIterator; begin The_Constituent := RawDict.Get_Next_Constituent (The_Constituent => RawDict.Get_Constituent_Info_Ref (Previous.Current)); if The_Constituent /= RawDict.Null_Constituent_Info_Ref then Next := Iterator'(ConstituentIterator, IsAbstract, RawDict.Get_Constituent_Symbol (The_Constituent), NullSymbol); end if; return Next; end Next_Constituent; -------------------------------------------------------------------------------- function Next_Known_Discriminant (Previous : Iterator) return Iterator --# global in Dict; is Discriminant : Symbol; Next : Iterator; begin Discriminant := RawDict.GetNextDiscriminant (Previous.Current); if Discriminant = NullSymbol then Next := NullIterator; else Next := Iterator'(KnownDiscriminantIterator, IsAbstract, Discriminant, NullSymbol); end if; return Next; end Next_Known_Discriminant; -------------------------------------------------------------------------------- function Next_Discriminant_Constraint (Previous : Iterator) return Iterator --# global in Dict; is Discriminant : Symbol; Next : Iterator; begin Discriminant := RawDict.GetNextDiscriminantConstraint (Previous.Current); if Discriminant = NullSymbol then Next := NullIterator; else Next := Iterator'(DiscriminantConstraintIterator, IsAbstract, Discriminant, NullSymbol); end if; return Next; end Next_Discriminant_Constraint; -------------------------------------------------------------------------------- function Next_Generic_Formal_Parameter (Previous : Iterator) return Iterator --# global in Dict; is The_Generic_Parameter : RawDict.Generic_Parameter_Info_Ref; Result : Iterator; begin The_Generic_Parameter := RawDict.Get_Next_Generic_Parameter (The_Generic_Parameter => RawDict.Get_Generic_Parameter_Info_Ref (Previous.Current)); if The_Generic_Parameter = RawDict.Null_Generic_Parameter_Info_Ref then Result := NullIterator; else Result := Iterator' (GenericFormalParameterIterator, IsAbstract, RawDict.Get_Generic_Parameter_Symbol (The_Generic_Parameter), NullSymbol); end if; return Result; end Next_Generic_Formal_Parameter; -------------------------------------------------------------------------------- function Next_Loop_On_Entry_Var (Previous : Iterator) return Iterator --# global in Dict; is Next : Symbol; Result : Iterator; begin Next := RawDict.GetLoopEntryVariableNext (Previous.Current); if Next = NullSymbol then Result := NullIterator; else Result := Iterator'(LoopOnEntryVarIterator, IsAbstract, Next, NullSymbol); end if; return Result; end Next_Loop_On_Entry_Var; begin -- NextSymbol case Previous.Discriminant is when NullSymIterator | DeclarativeItemIterator => Next := Next_Declarative_Item (Previous => Previous); when DeferredConstantIterator => Next := Next_Deferred_Constant (Previous => Previous); when ArrayIndexIterator => Next := Next_Array_Index (Previous => Previous); when LoopIterator => Next := Next_Loop (Previous => Previous); when UndeclaredTypeIterator => Next := Next_Undeclared_Type (Previous => Previous); when PrivateTypeIterator => Next := Next_Private_Type (Previous => Previous); when EnumerationLiteralIterator => Next := Next_Enumeration_Literal (Previous => Previous); when RecordComponentIterator => Next := Next_Record_Component (Previous => Previous); when ExtendedRecordComponentIterator => Next := Next_Extended_Record_Component (Previous => Previous); when LibraryUnitIterator => Next := Next_Library_Unit (Previous => Previous); when WithedPackageIterator => Next := Next_Withed_Package (Previous => Previous); when InheritedPackageIterator => Next := Next_Inherited_Package (Previous => Previous); when VisibleSubprogramIterator | PrivateSubprogramIterator => Next := Next_Visible_Or_Private_Subprogram (Previous => Previous); when TaskTypeIterator => Next := Next_Task_Type (Previous => Previous); when OwnTaskIterator => Next := Next_Own_Task (Previous => Previous); when ProtectedTypeIterator => Next := Next_Protected_Type (Previous => Previous); when SubprogramParameterIterator | ImplicitProofFunctionGlobalIterator | ImplicitProofFunctionParameterIterator => Next := Next_Subprogram_Parameter (Previous => Previous); when KnownDiscriminantIterator => Next := Next_Known_Discriminant (Previous => Previous); when DiscriminantConstraintIterator => Next := Next_Discriminant_Constraint (Previous => Previous); when GlobalVariableIterator => Next := Next_Global_Variable (Previous => Previous); when LocalVariableIterator => Next := Next_Local_Variable (Previous => Previous); when ProtectedElementIterator => Next := Next_Protected_Element (Previous => Previous); when InitializedVariableIterator => Next := Next_Initialized_Variable (Previous => Previous); when ImportExportIterator => Next := Next_Import_Export (Previous => Previous); when ExportIterator => Next := Next_Export (Previous => Previous); when ImportIterator => Next := Next_Import (Previous => Previous); when DependencyIterator => Next := Next_Dependency (Previous => Previous); when InterruptStreamMappingIterator => Next := Next_Interrupt_Stream_Mapping (Previous => Previous); when SuspendsListItemIterator => Next := Next_Suspends_List_Item (Previous => Previous); when VirtualElementIterator => Next := Next_Virtual_Element (Previous => Previous); when OwnedPackageIterator => Next := Next_Owned_Package (Previous => Previous); when EmbeddedPackageIterator => Next := Next_Embedded_Package (Previous => Previous); when OwnVariableIterator => Next := Next_Own_Variable (Previous => Previous); when InitializedOwnVariableIterator => Next := Next_Initialized_Own_Variable (Previous => Previous); when AbstractOwnVariableIterator => Next := Next_Abstract_Own_Variable (Previous => Previous); when ConstituentIterator => Next := Next_Constituent (Previous => Previous); when GenericFormalParameterIterator => Next := Next_Generic_Formal_Parameter (Previous => Previous); when LoopOnEntryVarIterator => Next := Next_Loop_On_Entry_Var (Previous => Previous); end case; return Next; end NextSymbol; spark-2012.0.deb/examiner/sem-walk_expression_p-unknown_symbol_record.adb0000644000175000017500000000374311753202336025666 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Unknown_Symbol_Record return Sem.Exp_Record is begin return Sem.Exp_Record' (Type_Symbol => Dictionary.GetUnknownTypeMark, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Is_Unknown, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => True, Has_Operators => False, Is_Static => False, Is_Constant => False, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); end Unknown_Symbol_Record; spark-2012.0.deb/examiner/dictionary-search_for_inherited_operations.adb0000644000175000017500000002134511753202336025502 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Search_For_Inherited_Operations (Name : in LexTokenManager.Lex_String; Scope : in Scopes; Prefix : in RawDict.Package_Info_Ref; Context : in Contexts; OpSym : out Symbol; Actual_Tagged_Type : out RawDict.Type_Info_Ref) is type KindsOfOp is (AProcedure, AFunction, NotASubprogram); Current_Package : RawDict.Package_Info_Ref; Calling_Package : RawDict.Package_Info_Ref; PossibleOpSym : Symbol := NullSymbol; PossibleKindOfOp : KindsOfOp; -------------------------------------------------------------------------------- function Get_Package_Extended_Type (The_Package : RawDict.Package_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol; Result : RawDict.Type_Info_Ref := RawDict.Null_Type_Info_Ref; begin if RawDict.Get_Package_Extends (The_Package => The_Package) /= RawDict.Null_Package_Info_Ref then -- search for a tagged type The_Declaration := RawDict.Get_Package_First_Visible_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then Type_Is_Tagged (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) then Result := RawDict.Get_Type_Info_Ref (Item => Item); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; -- if we haven't succeeded already then check private declarations if Result = RawDict.Null_Type_Info_Ref then The_Declaration := RawDict.Get_Package_First_Private_Declaration (The_Package => The_Package); while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) and then Type_Is_Tagged (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)) then Result := RawDict.Get_Type_Info_Ref (Item => Item); The_Declaration := RawDict.Null_Declaration_Info_Ref; else The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; end if; end if; return Result; end Get_Package_Extended_Type; -------------------------------------------------------------------------------- function OperationCanBeInherited (TheOpSym : Symbol; Calling_Package : RawDict.Package_Info_Ref; Current_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is It : Iterator; Current_Type : RawDict.Type_Info_Ref; Valid : Boolean := False; function Is_Locally_Declared (Type_Mark : RawDict.Type_Info_Ref; Current_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.GetSymbolDiscriminant (GetRegion (Get_Type_Scope (Type_Mark => Type_Mark))) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => GetRegion (Get_Type_Scope (Type_Mark => Type_Mark))) = Current_Package; end Is_Locally_Declared; begin -- OperationCanBeInherited -- a subprogram is suitable for inheritance if it has a parameter -- of a tagged type declared in the same package and which the caller extends It := FirstSubprogramParameter (TheOpSym); while not IsNullIterator (It) loop Current_Type := Get_Type (The_Symbol => CurrentSymbol (It)); if Type_Is_Tagged (Type_Mark => Current_Type) and then Is_Locally_Declared (Type_Mark => Current_Type, Current_Package => Current_Package) and then Is_An_Extension_Of (Root_Type => Current_Type, Extended_Type => Get_Package_Extended_Type (The_Package => Calling_Package)) then Valid := True; exit; end if; It := NextSymbol (It); end loop; return Valid; end OperationCanBeInherited; begin -- Search_For_Inherited_Operations -- this procedure will only be called when a normal search for an -- operation using LookUpItem or LookUpSelectedItem has failed. We may -- be in some local scope so the first step is to get to the enclosing -- library package of the scope we start in if there is no prefix or -- the prefix package if there is one if Prefix = RawDict.Null_Package_Info_Ref then Current_Package := Get_Library_Package (Scope => Scope); else Current_Package := Prefix; end if; Calling_Package := Current_Package; -- now we can chain up the package "Extends" pointers looking for the -- required operation loop Current_Package := RawDict.Get_Package_Extends (The_Package => Current_Package); if Current_Package = RawDict.Null_Package_Info_Ref then -- no more inherited packs PossibleKindOfOp := NotASubprogram; exit; end if; -- Prior to release 7.1, a potentially inheritable operation must have -- been declared in the visible part of its package so LookupImmediateScope -- was a good choice for seeing if such an operation exists -- After release 7.1 the operation might be in the private part so we use -- LookUpSelectedItem instead; this makes operations correctly visible -- depending on whether we are looking from a child package or not. PossibleOpSym := LookupSelectedItem (RawDict.Get_Package_Symbol (Current_Package), Name, Scope, Context); if RawDict.GetSymbolDiscriminant (PossibleOpSym) = Subprogram_Symbol and then Is_Procedure (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => PossibleOpSym)) then PossibleKindOfOp := AProcedure; exit; end if; if IsFunction (PossibleOpSym) then PossibleKindOfOp := AFunction; exit; end if; if PossibleOpSym /= NullSymbol then -- something else found PossibleKindOfOp := NotASubprogram; exit; end if; end loop; -- At this point we have either found something and PossibleKindOfOp will say -- what it is or we have failed and PossibleOpSym is NullSymbol (and PossibleKindOfOp -- is NotASubprogram. In any case a result of NotASubprogam is a failure and no further -- checks are required. if PossibleKindOfOp = NotASubprogram then OpSym := NullSymbol; Actual_Tagged_Type := RawDict.Null_Type_Info_Ref; else -- some kind of subprogram found so we need to check whether it has a parameter of -- a locally-declared tagged type if OperationCanBeInherited (TheOpSym => PossibleOpSym, Calling_Package => Calling_Package, Current_Package => Current_Package) then OpSym := PossibleOpSym; Actual_Tagged_Type := Get_Package_Extended_Type (The_Package => Calling_Package); else -- not a suitable subprog to inherit OpSym := NullSymbol; Actual_Tagged_Type := RawDict.Null_Type_Info_Ref; end if; end if; end Search_For_Inherited_Operations; spark-2012.0.deb/examiner/sem-walk_expression_p-walk_annotation_expression-up_wf_store.adb0000644000175000017500000001640311753202336031242 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Walk_Annotation_Expression) procedure Up_Wf_Store (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Type_Info, Exp_Result : Sem.Exp_Record; Sym : Dictionary.Symbol; Field_Ident_Node, Store_List_Node : STree.SyntaxNode; Field_Ident : LexTokenManager.Lex_String; Field_Symbol : Dictionary.Symbol; Error_Found : Boolean := False; function Branches_Found (Start_Node, End_Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Start_Node, STree.Table) = SP_Symbols.identifier and --# STree.Syntax_Node_Type (End_Node, STree.Table) = SP_Symbols.store_list; is Next_Node : STree.SyntaxNode; Result : Boolean; begin Result := False; Next_Node := STree.Parent_Node (Current_Node => Start_Node); while Next_Node /= End_Node loop if STree.Next_Sibling (Current_Node => Next_Node) /= STree.NullNode then Result := True; exit; end if; Next_Node := STree.Parent_Node (Current_Node => Next_Node); end loop; return Result; end Branches_Found; begin -- Up_Wf_Store Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); Sym := Type_Info.Other_Symbol; Store_List_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Store_List_Node = store_list SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Store_List_Node) = SP_Symbols.store_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Store_List_Node = store_list in Up_Wf_Store"); if Dictionary.IsArrayTypeMark (Type_Info.Type_Symbol, Scope) then if Type_Info.Param_Count = Dictionary.GetNumberOfDimensions (Type_Info.Type_Symbol) then -- right number of index expressions so just check type check needed if not Dictionary.CompatibleTypes (Scope, Dictionary.GetArrayComponent (Type_Info.Type_Symbol), Exp_Result.Type_Symbol) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Next_Sibling (Current_Node => Store_List_Node)), Id_Str => LexTokenManager.Null_String); end if; else -- insufficient index expressions Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 93, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); end if; else -- must be record multiple field name check if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Store_List_Node)) = SP_Symbols.store_list then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 324, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Field_Ident_Node := STree.Last_Child_Of (Start_Node => Node); if STree.Syntax_Node_Type (Node => Field_Ident_Node) = SP_Symbols.identifier then -- ASSUME Field_Ident_Node = identifier if Branches_Found (Start_Node => Field_Ident_Node, End_Node => Store_List_Node) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 102, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Field_Ident_Node), Id_Str => Dictionary.GetSimpleName (Type_Info.Other_Symbol)); else Field_Ident := STree.Node_Lex_String (Node => Field_Ident_Node); Field_Symbol := Dictionary.LookupSelectedItem (Prefix => Type_Info.Type_Symbol, Selector => Field_Ident, Scope => Scope, Context => Dictionary.ProofContext); if Dictionary.Is_Null_Symbol (Field_Symbol) or else not Dictionary.IsRecordComponent (Field_Symbol) then -- no such field Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 8, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Field_Ident_Node), Id_Str => Field_Ident); else -- field name exists so type check of result assigned to it required if Dictionary.CompatibleTypes (Scope, Dictionary.GetType (Field_Symbol), Exp_Result.Type_Symbol) then STree.Set_Node_Lex_String (Sym => Field_Symbol, Node => Field_Ident_Node); else Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Next_Sibling (Current_Node => Store_List_Node)), Id_Str => LexTokenManager.Null_String); end if; end if; end if; else -- identifier not found Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 102, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Field_Ident_Node), Id_Str => Dictionary.GetSimpleName (Type_Info.Other_Symbol)); end if; end if; end if; Type_Info.Errors_In_Expression := Error_Found or else Type_Info.Errors_In_Expression or else Exp_Result.Errors_In_Expression; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); end Up_Wf_Store; spark-2012.0.deb/examiner/flowanalyser-flowanalyse-analyserelations-checkusages.adb0000644000175000017500000002316011753202336027611 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (FlowAnalyser.FlowAnalyse.AnalyseRelations) procedure CheckUsages is AffectedExports, ExpOrRefSeq, ImpOrDefSeq, RhoRow : SeqAlgebra.Seq; M : SeqAlgebra.MemberOfSeq; Variable : Natural; VarSymbol : Dictionary.Symbol; PreservedVars : SeqAlgebra.Seq; TempSeq : SeqAlgebra.Seq; TempMem : SeqAlgebra.MemberOfSeq; TempLeaves : SeqAlgebra.Seq; TempSym : Dictionary.Symbol; ------------------------------ procedure AddError (Err : in ErrorHandler.Usage_Err_Type; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ComponentData; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheErrorHeap; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Err, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Sym & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# Err, --# Position, --# Sym, --# TheErrorHeap, --# TheHeap & --# TheErrorHeap from *, --# Dictionary.Dict, --# Err, --# Position, --# Sym, --# TheHeap; --# is NewError : Natural; begin if Dictionary.IsSubcomponent (Sym) then ComponentErrors.CreateError (TheErrorHeap, TheHeap, ComponentErrors.Usage, ErrorHandler.Usage_Err_Type'Pos (Err), Position, Dictionary.NullSymbol, --to get NewError); ComponentManager.AddError (TheHeap, TheErrorHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, Sym), NewError); else ErrorHandler.Usage_Error (Err_Type => Err, Position => Position, Var_Sym => Sym, Scope => Scope); end if; end AddError; ------------------ begin --CheckUsages SeqAlgebra.Union (TheHeap, SeqOfInitVars, ExpSeqOfImports, ImpOrDefSeq); SeqAlgebra.AugmentSeq (TheHeap, ImpOrDefSeq, IFA_Stack.Top (S).DefinedVars); SeqAlgebra.Union (TheHeap, ExpSeqOfExports, ReferencedVars, ExpOrRefSeq); SeqAlgebra.AugmentSeq (TheHeap, IFA_Stack.Top (S).AllVars, SeqOfInitVars); SeqAlgebra.AugmentSeq (TheHeap, IFA_Stack.Top (S).AllVars, ExpSeqOfImports); SeqAlgebra.AugmentSeq (TheHeap, IFA_Stack.Top (S).AllVars, ExpSeqOfExports); SeqAlgebra.CreateSeq (TheHeap, PreservedVars); SeqAlgebra.Intersection (TheHeap, IFA_Stack.Top (S).DefinedVars, ExpSeqOfImports, TempSeq); TempMem := SeqAlgebra.FirstMember (TheHeap, TempSeq); while not SeqAlgebra.IsNullMember (TempMem) loop TempSym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => TempMem))); if Dictionary.IsSubcomponent (TempSym) then ComponentManager.GetLeaves (TheHeap, ComponentData, ComponentManager.GetRoot (ComponentData, ComponentManager.GetComponentNode (ComponentData, TempSym)), --to get TempLeaves); SeqAlgebra.AugmentSeq (TheHeap, PreservedVars, TempLeaves); SeqAlgebra.DisposeOfSeq (TheHeap, TempLeaves); end if; TempMem := SeqAlgebra.NextMember (TheHeap, TempMem); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, TempSeq); TempSeq := IFA_Stack.Top (S).DefinedVars; SeqAlgebra.Reduction (TheHeap, PreservedVars, TempSeq); M := SeqAlgebra.FirstMember (TheHeap, IFA_Stack.Top (S).AllVars); while not SeqAlgebra.IsNullMember (M) loop Variable := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M); if Variable /= FnResultRepn then VarSymbol := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (Variable)); if not SeqAlgebra.IsMember (TheHeap, ImpOrDefSeq, Variable) and then not Dictionary.IsImplicitInStream (VarSymbol) then AddError (ErrorHandler.Undefined_Var, EndPosition, VarSymbol, Scope); end if; if SeqAlgebra.IsMember (TheHeap, ExpSeqOfImports, Variable) then if SeqAlgebra.IsMember (TheHeap, IFA_Stack.Top (S).DefinedVars, Variable) and then not SeqAlgebra.IsMember (TheHeap, ExpSeqOfExports, Variable) and then not Dictionary.Is_Null_Variable (VarSymbol) then AddError (ErrorHandler.Redefined_Import, EndPosition, VarSymbol, Scope); end if; if not SeqAlgebra.IsMember (TheHeap, ExpOrRefSeq, Variable) and then not Dictionary.Is_Null_Variable (VarSymbol) then AddError (ErrorHandler.Unused_Import, EndPosition, VarSymbol, Scope); else RelationAlgebra.RowExtraction (TheHeap, IFA_Stack.Top (S).Rho, Variable, RhoRow); SeqAlgebra.Intersection (TheHeap, RhoRow, ExpSeqOfExports, AffectedExports); -- Remove any affected exports that are implicit in streams of protected state since they are -- not significant or genuine exports SeqAlgebra.Reduction (TheHeap, AffectedExports, InStreamsOfShareableProtectedVars); SeqAlgebra.DisposeOfSeq (TheHeap, RhoRow); if SeqAlgebra.IsEmptySeq (TheHeap, AffectedExports) and then (not (Dictionary.IsSubcomponent (VarSymbol) and then SeqAlgebra.IsMember (TheHeap, PreservedVars, Variable))) and then -- following covers implicit importing of out streams that should not be reported (Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (VarSymbol)) /= Dictionary.OutMode) then AddError (ErrorHandler.Ineffective_Import, EndPosition, VarSymbol, Scope); end if; SeqAlgebra.DisposeOfSeq (TheHeap, AffectedExports); end if; elsif not SeqAlgebra.IsMember (TheHeap, ExpOrRefSeq, Variable) then AddError (ErrorHandler.Unreferenced_Var, EndPosition, VarSymbol, Scope); end if; if SeqAlgebra.IsMember (TheHeap, ExpSeqOfExports, Variable) and then (not SeqAlgebra.IsMember (TheHeap, IFA_Stack.Top (S).DefinedVars, Variable)) and then (not (Dictionary.IsSubcomponent (VarSymbol) and then SeqAlgebra.IsMember (TheHeap, PreservedVars, Variable))) and then -- following covers implicit updating of in streams that should not be reported (Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (VarSymbol)) /= Dictionary.InMode) and then -- neither should undefinedness of null (data sink) variable not Dictionary.Is_Null_Variable (VarSymbol) and then not Dictionary.IsImplicitInStream (VarSymbol) then AddError (ErrorHandler.Undefined_Export, EndPosition, VarSymbol, Scope); end if; end if; M := SeqAlgebra.NextMember (TheHeap, M); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, ExpOrRefSeq); SeqAlgebra.DisposeOfSeq (TheHeap, ImpOrDefSeq); SeqAlgebra.DisposeOfSeq (TheHeap, PreservedVars); end CheckUsages; spark-2012.0.deb/examiner/sli.idx0000644000175000017500000000040411753202337015545 0ustar eugeneugensli components are sli.io, sli.xref sli specification is in sli.ads sli body is in sli.adb sli.io specification is in sli-io.ads sli.io body is in sli-io.adb sli.xref specification is in sli-xref.ads sli.xref body is in sli-xref.shb spark-2012.0.deb/examiner/sem-dependency_relation-wf_dependency_relation.adb0000644000175000017500000025630511753202336026236 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Dependency_Relation) procedure Wf_Dependency_Relation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Glob_Def_Err : in Boolean; The_Heap : in out Heap.HeapRecord) is It : STree.Iterator; Next_Node : STree.SyntaxNode; Position : LexTokenManager.Token_Position; Abstraction : Dictionary.Abstractions; Error, Semantic_Error_Found : Boolean; Export_List, Import_List, Exports_In_Relation, Imports_In_Relation : SeqAlgebra.Seq; Null_Import_Node : STree.SyntaxNode; -- location of "derives null..." ------------------------------------------------------------------------ procedure Wf_Dependency_Clause (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Do_IFA : in Boolean; The_Heap : in out Heap.HeapRecord; Export_List, Import_List : out SeqAlgebra.Seq; Error_Found : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_IFA, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_IFA, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# Error_Found from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_IFA, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# Export_List, --# Import_List from The_Heap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_clause or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.null_import_list; --# post STree.Table = STree.Table~; is Star_Node, Import_List_Node : STree.SyntaxNode := STree.NullNode; Export_List_Node : STree.SyntaxNode; First_Valid_Export : Dictionary.Symbol; Global_Variable_Sym : Dictionary.Symbol; Null_List : Boolean := False; ------------------------------------------------------------- procedure Valid_Export_List_Add (Sym : in Dictionary.Symbol; Export_List : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Export_List, --# Sym, --# The_Heap; is begin SeqAlgebra.AddMember (The_Heap, Export_List, Natural (Dictionary.SymbolRef (Sym))); end Valid_Export_List_Add; ----------------------------------------------------------------------- -- We check that imports are of the correct mode and (at the -- main progam level) that they are initialized. If the import list -- includes a * self reference we do not need to make these checks where the -- import being checked is also an export since the checks will already have -- been made at the * node. This function is used by Check_Import_Mode and -- Check_Import_Init because the check needed is the same in both cases. function Export_Imported_By_Star (Import_Sym : Dictionary.Symbol; Export_List : SeqAlgebra.Seq; Star_Found : Boolean; The_Heap : Heap.HeapRecord) return Boolean is begin return Star_Found and then SeqAlgebra.IsMember (The_Heap, Export_List, Natural (Dictionary.SymbolRef (Import_Sym))); end Export_Imported_By_Star; ----------------------------------------------------------------------- -- ignore checks if the import is an export which has already been -- imported by use of the * abbreviation; this is because checks will already -- have been carried out at the * node. procedure Check_Import_Mode (Import_Sym : in Dictionary.Symbol; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Export_List : in SeqAlgebra.Seq; Error_Pos : in LexTokenManager.Token_Position; Star_Found : in Boolean; The_Heap : in Heap.HeapRecord; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# Export_List, --# Import_Sym, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Star_Found, --# Subprog_Sym, --# The_Heap & --# Error_Found from *, --# Abstraction, --# Dictionary.Dict, --# Export_List, --# Import_Sym, --# Star_Found, --# Subprog_Sym, --# The_Heap; is begin if not Export_Imported_By_Star (Import_Sym => Import_Sym, Export_List => Export_List, Star_Found => Star_Found, The_Heap => The_Heap) then if Dictionary.IsFormalParameter (Subprog_Sym, Import_Sym) then if Dictionary.GetSubprogramParameterMode (Import_Sym) = Dictionary.OutMode then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 162, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Import_Sym)); end if; elsif -- check mode, only "OUT" is unacceptable Dictionary.GetGlobalMode (Abstraction, Subprog_Sym, Import_Sym) = Dictionary.OutMode then Error_Found := True; ErrorHandler.Semantic_Error_Sym (Err_Num => 503, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Sym => Import_Sym, Scope => Scope); end if; -- check here that we are not trying to import a mode out stream variable if Dictionary.GetOwnVariableOrConstituentMode (Import_Sym) = Dictionary.OutMode then ErrorHandler.Semantic_Error (Err_Num => 714, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Import_Sym)); Error_Found := True; end if; end if; end Check_Import_Mode; ----------------------------------------------------------------------- -- ignore checks if the import is an export which has already been -- imported by use of the * abbreviation; this is because checks will already -- have been carried out at the * node. procedure Check_Import_Init (Import_Node_Pos : in LexTokenManager.Token_Position; Import_Sym, Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Export_List : in SeqAlgebra.Seq; Star_Found : in Boolean; The_Heap : in Heap.HeapRecord; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Export_List, --# Import_Node_Pos, --# Import_Sym, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Star_Found, --# Subprog_Sym, --# The_Heap & --# Error_Found from *, --# Abstraction, --# Dictionary.Dict, --# Export_List, --# Import_Sym, --# Star_Found, --# Subprog_Sym, --# The_Heap; is begin if not Export_Imported_By_Star (Import_Sym => Import_Sym, Export_List => Export_List, Star_Found => Star_Found, The_Heap => The_Heap) then if Dictionary.IsTaskType (Subprog_Sym) and then Dictionary.IsOwnVariable (Import_Sym) and then not Dictionary.GetOwnVariableProtected (Import_Sym) and then Dictionary.GetOwnVariableMode (Import_Sym) /= Dictionary.InMode then -- This is an unprotected import to a task that is not of mode in. -- The elaboration policy is concurrent and hence we cannot -- guarantee the import is initialised before it is used. Error_Found := True; ErrorHandler.Semantic_Error_Sym (Err_Num => 958, Reference => ErrorHandler.No_Reference, Position => Import_Node_Pos, Sym => Import_Sym, Scope => Scope); elsif (Dictionary.IsMainProgram (Subprog_Sym) or else Dictionary.IsTaskType (Subprog_Sym)) then if Dictionary.Is_Global_Variable (Abstraction, Subprog_Sym, Import_Sym) and then Dictionary.IsOwnVariable (Import_Sym) and then not Dictionary.OwnVariableIsInitialized (Import_Sym) and then Dictionary.GetOwnVariableOrConstituentMode (Import_Sym) = Dictionary.DefaultMode then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 167, Reference => ErrorHandler.No_Reference, Position => Import_Node_Pos, Id_Str => Dictionary.GetSimpleName (Import_Sym)); end if; end if; end if; end Check_Import_Init; ----------------------------------------------------------------------- procedure Do_Export_List (Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Export_List : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Dictionary.Dict, --# Error_Found, --# STree.Table from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym & --# Statistics.TableUsage, --# The_Heap from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# Export_List, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Sym, --# The_Heap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_clause_optrep; --# post STree.Table = STree.Table~; is Sym : Dictionary.Symbol; It : STree.Iterator; Export_Node : STree.SyntaxNode; Unused : Boolean; ----------------------------------------------------------------------- procedure Check_Export_Mode (Export_Sym : in Dictionary.Symbol; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Error_Pos : in LexTokenManager.Token_Position; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# Export_Sym, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Error_Found from *, --# Abstraction, --# Dictionary.Dict, --# Export_Sym, --# Subprog_Sym; --# is Mode : Dictionary.Modes; begin if Dictionary.IsFormalParameter (Subprog_Sym, Export_Sym) then Mode := Dictionary.GetSubprogramParameterMode (Export_Sym); if Mode = Dictionary.InMode or else Mode = Dictionary.DefaultMode then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 161, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Export_Sym)); end if; else -- check mode, only "IN" is unacceptable Mode := Dictionary.GetGlobalMode (Abstraction, Subprog_Sym, Export_Sym); if Mode = Dictionary.InMode then Error_Found := True; ErrorHandler.Semantic_Error_Sym (Err_Num => 502, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Sym => Export_Sym, Scope => Scope); end if; end if; end Check_Export_Mode; ----------------------------------------------------------------------- procedure Unique_Export (Export_Sym : in out Dictionary.Symbol; Error_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# Export_Sym, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Error_Found, --# Export_Sym from *, --# Abstraction, --# Dictionary.Dict, --# Export_Sym, --# Subprog_Sym; is Next_Export : Dictionary.Iterator; begin Next_Export := Dictionary.FirstExport (Abstraction, Subprog_Sym); while Next_Export /= Dictionary.NullIterator loop if Export_Sym = Dictionary.CurrentSymbol (Next_Export) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 159, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Export_Sym)); Export_Sym := Dictionary.NullSymbol; exit; end if; Next_Export := Dictionary.NextSymbol (Next_Export); end loop; end Unique_Export; begin -- Do_Export_List It := STree.Find_First_Node (Node_Kind => SP_Symbols.entire_variable, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Export_Node := STree.Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_clause_optrep and --# STree.Syntax_Node_Type (Export_Node, STree.Table) = SP_Symbols.entire_variable and --# Export_Node = STree.Get_Node (It); --# accept Flow, 10, Unused, "Expected ineffective assignment"; Sem.Wf_Entire_Variable (Node => Export_Node, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog_Sym), Error_Hint => Sem.In_Derives_Export_List, Var_Sym => Sym, Dotted => Unused); --# end accept; Error_Found := Error_Found or else Dictionary.Is_Null_Symbol (Sym); if not Dictionary.Is_Null_Symbol (Sym) then Unique_Export (Export_Sym => Sym, Error_Pos => STree.Node_Position (Node => Export_Node), Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Error_Found => Error_Found); end if; -- check that the export is not a stream variable of mode in if not Dictionary.Is_Null_Symbol (Sym) then if Dictionary.GetOwnVariableOrConstituentMode (Sym) = Dictionary.InMode then ErrorHandler.Semantic_Error (Err_Num => 713, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Export_Node), Id_Str => Dictionary.GetSimpleName (Sym)); Error_Found := True; Sym := Dictionary.NullSymbol; end if; end if; if Dictionary.IsTaskType (Subprog_Sym) and then Dictionary.IsOwnVariable (Sym) and then not Dictionary.GetOwnVariableProtected (Sym) and then Dictionary.OwnVariableIsInitialized (Sym) then -- This export is defined during elaboration as well by this task. -- The order is non-deterministic. Hence we must prohibit the export -- being initialised during elaboration. ErrorHandler.Semantic_Error_Sym (Err_Num => 959, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Export_Node), Sym => Sym, Scope => Scope); end if; if not Dictionary.Is_Null_Symbol (Sym) then Check_Export_Mode (Export_Sym => Sym, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Error_Pos => STree.Node_Position (Node => Export_Node), Error_Found => Error_Found); Dictionary.AddExport (Abstraction => Abstraction, TheProcedure => Subprog_Sym, TheExport => Sym, ExportReference => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Export_Node), End_Position => STree.Node_Position (Node => Export_Node)), Annotation => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Node), End_Position => STree.Node_Position (Node => Node))); Valid_Export_List_Add (Sym => Sym, Export_List => Export_List, The_Heap => The_Heap); end if; It := STree.NextNode (It); end loop; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Do_Export_List; ----------------------------------------------------------------------- procedure Do_Import_List (Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Do_IFA : in Boolean; Export_List, Import_List : in SeqAlgebra.Seq; Null_List : in Boolean; Star_Found : in Boolean; The_Heap : in out Heap.HeapRecord; Error_Found : in out Boolean; First_Valid_Export : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Dictionary.Dict, --# Error_Found, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_IFA, --# Export_List, --# Import_List, --# LexTokenManager.State, --# Node, --# Null_List, --# Star_Found, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_IFA, --# ErrorHandler.Error_Context, --# Export_List, --# Import_List, --# LexTokenManager.State, --# Node, --# Null_List, --# Scope, --# SPARK_IO.File_Sys, --# Star_Found, --# STree.Table, --# Subprog_Sym, --# The_Heap & --# First_Valid_Export from Export_List, --# The_Heap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_clause_opt or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_clause_optrep; --# post STree.Table = STree.Table~; is Sym : Dictionary.Symbol; It : STree.Iterator; Import_Node : STree.SyntaxNode; Unused : Boolean; ----------------------------------------------------------------------- function Get_First_Valid_Export (Export_List : SeqAlgebra.Seq; The_Heap : Heap.HeapRecord) return Dictionary.Symbol is Member : SeqAlgebra.MemberOfSeq; Sym : Dictionary.Symbol; begin Member := SeqAlgebra.FirstMember (The_Heap, Export_List); if SeqAlgebra.IsNullMember (Member) then Sym := Dictionary.NullSymbol; else Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member))); end if; return Sym; end Get_First_Valid_Export; -------------------------------------------------------------------- -- Uses an explicit import list. The import is passed in, -- if it is already in the import list an error is reported and a null symbol -- returned; else it is added to the list. Imports by virtue of * never -- appear in the list and do not give rise to duplicate errors. procedure Unique_Import (Import_Sym : in out Dictionary.Symbol; Import_List : in SeqAlgebra.Seq; Error_Pos : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# Import_List, --# Import_Sym, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Heap & --# Error_Found, --# Import_Sym, --# Statistics.TableUsage, --# The_Heap from *, --# Import_List, --# Import_Sym, --# The_Heap; is Import_Rep : Natural; begin Import_Rep := Natural (Dictionary.SymbolRef (Import_Sym)); if SeqAlgebra.IsMember (The_Heap, Import_List, Import_Rep) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 160, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Import_Sym)); Import_Sym := Dictionary.NullSymbol; else SeqAlgebra.AddMember (The_Heap, Import_List, Import_Rep); end if; end Unique_Import; ----------------------------------------------------------------------- -- Similar to Unique_Import but this checks that the import has not appeared as -- an import anywhere in the dependency relation whereas Unique_Import only checks -- it is not duplicated in the current clause. Check_Not_Imported_Before is for use -- in handling "derives null from ..." cases where the imports must not have -- been imported before. procedure Check_Not_Imported_Before (Import_Sym : in out Dictionary.Symbol; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Import_List : in SeqAlgebra.Seq; Error_Pos : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# Import_Sym, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Error_Found, --# Import_Sym from *, --# Abstraction, --# Dictionary.Dict, --# Import_Sym, --# Subprog_Sym & --# Statistics.TableUsage, --# The_Heap from *, --# Abstraction, --# Dictionary.Dict, --# Import_List, --# Import_Sym, --# Subprog_Sym, --# The_Heap; is It : Dictionary.Iterator; begin It := Dictionary.FirstImport (Abstraction, Subprog_Sym); while not Dictionary.IsNullIterator (It) loop if Dictionary.CurrentSymbol (It) = Import_Sym then -- duplicate Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 160, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Import_Sym)); Import_Sym := Dictionary.NullSymbol; else SeqAlgebra.AddMember (The_Heap, Import_List, Natural (Dictionary.SymbolRef (Import_Sym))); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Not_Imported_Before; ----------------------------------------------------------------------- procedure Add_Dependencies (Abstraction : in Dictionary.Abstractions; The_Procedure : in Dictionary.Symbol; Export_List : in SeqAlgebra.Seq; First_Export : in Dictionary.Symbol; The_Import : in Dictionary.Symbol; Import_Node_Pos : in Dictionary.Location; Star_Found : in Boolean; The_Heap : in Heap.HeapRecord) --# global in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Export_List, --# First_Export, --# Import_Node_Pos, --# Star_Found, --# The_Heap, --# The_Import, --# The_Procedure & --# SPARK_IO.File_Sys from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Export_List, --# First_Export, --# Import_Node_Pos, --# LexTokenManager.State, --# Star_Found, --# The_Heap, --# The_Import, --# The_Procedure; is Member : SeqAlgebra.MemberOfSeq; Current_Export : Dictionary.Symbol; begin if Star_Found then Member := SeqAlgebra.FirstMember (The_Heap, Export_List); while not SeqAlgebra.IsNullMember (Member) loop Current_Export := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member))); if The_Import /= Current_Export then -- don't add if already added by * Dictionary.AddDependency (Abstraction => Abstraction, Comp_Unit => ContextManager.Ops.Current_Unit, TheProcedure => The_Procedure, TheExport => Current_Export, TheImport => The_Import, ImportReference => Import_Node_Pos); end if; Member := SeqAlgebra.NextMember (The_Heap, Member); end loop; else -- only need to copy it to first export (Copy_Imports will do rest) Dictionary.AddDependency (Abstraction => Abstraction, Comp_Unit => ContextManager.Ops.Current_Unit, TheProcedure => The_Procedure, TheExport => First_Export, TheImport => The_Import, ImportReference => Import_Node_Pos); end if; end Add_Dependencies; begin -- Do_Import_List First_Valid_Export := Get_First_Valid_Export (Export_List => Export_List, The_Heap => The_Heap); It := STree.Find_First_Node (Node_Kind => SP_Symbols.entire_variable, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Import_Node := STree.Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# STree.Syntax_Node_Type (Import_Node, STree.Table) = SP_Symbols.entire_variable and --# Import_Node = STree.Get_Node (It); --# accept Flow, 10, Unused, "Expected ineffective assignment"; Sem.Wf_Entire_Variable (Node => Import_Node, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog_Sym), Error_Hint => Sem.In_Derives_Import_List, Var_Sym => Sym, Dotted => Unused); --# end accept; Error_Found := Error_Found or else Dictionary.Is_Null_Symbol (Sym); if not Dictionary.Is_Null_Symbol (Sym) and then not Dictionary.Is_Null_Symbol (First_Valid_Export) then -- also covers Null_List case Unique_Import (Import_Sym => Sym, Import_List => Import_List, Error_Pos => STree.Node_Position (Node => Import_Node), The_Heap => The_Heap, Error_Found => Error_Found); end if; -- new check, similar to Unique_Import except that for a null import clause -- we require that the variable has not appeared as an import anywhere else rather -- than just in the current clause if Null_List and then not Dictionary.Is_Null_Symbol (Sym) then Check_Not_Imported_Before (Import_Sym => Sym, Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Import_List => Import_List, Error_Pos => STree.Node_Position (Node => Import_Node), The_Heap => The_Heap, Error_Found => Error_Found); end if; if not Dictionary.Is_Null_Symbol (Sym) then -- we have valid import so far Check_Import_Mode (Import_Sym => Sym, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, Error_Pos => STree.Node_Position (Node => Import_Node), Star_Found => Star_Found, The_Heap => The_Heap, Error_Found => Error_Found); Check_Import_Init (Import_Node_Pos => STree.Node_Position (Node => Import_Node), Import_Sym => Sym, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, Star_Found => Star_Found, The_Heap => The_Heap, Error_Found => Error_Found); -- One of two things must happen to the valid import we have at this point. -- If we are doing IFA and we have valid export to associated it with we do so; -- however, if we are not doing IFA or there is no valid export we just -- mark the import in the dictionary as being an import usign the force call --# accept Flow, 41, "Expected stable expression"; if Do_IFA and then not Dictionary.Is_Null_Symbol (First_Valid_Export) then --# end accept; Add_Dependencies (Abstraction => Abstraction, The_Procedure => Subprog_Sym, Export_List => Export_List, First_Export => First_Valid_Export, The_Import => Sym, Import_Node_Pos => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Import_Node), End_Position => STree.Node_Position (Node => Import_Node)), Star_Found => Star_Found, The_Heap => The_Heap); else -- No valid export (including null derives case) to associate with or else we -- are just doing DFA. In either case mark as import to avoid -- knock-on errors Dictionary.ForceImport (Abstraction, Subprog_Sym, Sym, Dictionary.Location'(Start_Position => STree.Node_Position (Node => Import_Node), End_Position => STree.Node_Position (Node => Import_Node))); end if; end if; It := STree.NextNode (It); end loop; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Do_Import_List; ----------------------------------------------------------------------- procedure Do_Self_References (Star_Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Export_List : in SeqAlgebra.Seq; The_Heap : in Heap.HeapRecord; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict, --# Error_Found from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Export_List, --# Star_Node_Pos, --# Subprog_Sym, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Export_List, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Star_Node_Pos, --# Subprog_Sym, --# The_Heap; is Member : SeqAlgebra.MemberOfSeq; The_Export : Dictionary.Symbol; begin Member := SeqAlgebra.FirstMember (The_Heap, Export_List); while not SeqAlgebra.IsNullMember (Member) loop The_Export := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member))); Check_Import_Mode (Import_Sym => The_Export, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, Error_Pos => Star_Node_Pos, Star_Found => False, The_Heap => The_Heap, Error_Found => Error_Found); Check_Import_Init (Import_Node_Pos => Star_Node_Pos, Import_Sym => The_Export, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, Star_Found => False, The_Heap => The_Heap, Error_Found => Error_Found); Dictionary.AddDependency (Abstraction => Abstraction, Comp_Unit => ContextManager.Ops.Current_Unit, TheProcedure => Subprog_Sym, TheExport => The_Export, TheImport => The_Export, ImportReference => Dictionary.Location'(Start_Position => Star_Node_Pos, End_Position => Star_Node_Pos)); Member := SeqAlgebra.NextMember (The_Heap, Member); end loop; end Do_Self_References; ----------------------------------------------------------------------- -- This procedure does checks on * node when no IFA selected procedure Append_Self_References (Star_Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Export_List, Import_List : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Export_List, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Star_Node_Pos, --# Subprog_Sym, --# The_Heap & --# Error_Found from *, --# Abstraction, --# Dictionary.Dict, --# Export_List, --# Subprog_Sym, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Export_List, --# Import_List, --# The_Heap; is Member : SeqAlgebra.MemberOfSeq; The_Export : Dictionary.Symbol; begin Member := SeqAlgebra.FirstMember (The_Heap, Export_List); while not SeqAlgebra.IsNullMember (Member) loop The_Export := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member))); Check_Import_Mode (Import_Sym => The_Export, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, Error_Pos => Star_Node_Pos, Star_Found => False, The_Heap => The_Heap, Error_Found => Error_Found); Check_Import_Init (Import_Node_Pos => Star_Node_Pos, Import_Sym => The_Export, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, Star_Found => False, The_Heap => The_Heap, Error_Found => Error_Found); Member := SeqAlgebra.NextMember (The_Heap, Member); end loop; SeqAlgebra.AugmentSeq (The_Heap, Import_List, Export_List); end Append_Self_References; ----------------------------------------------------------------------- procedure Copy_Imports (Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Export_List : in SeqAlgebra.Seq; First_Valid_Export : in Dictionary.Symbol; The_Heap : in Heap.HeapRecord) --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# Export_List, --# First_Valid_Export, --# Subprog_Sym, --# The_Heap; is Member : SeqAlgebra.MemberOfSeq; begin Member := SeqAlgebra.FirstMember (The_Heap, Export_List); if not SeqAlgebra.IsNullMember (Member) then -- there is at leat one valid export, we want to loop through rest loop Member := SeqAlgebra.NextMember (The_Heap, Member); exit when SeqAlgebra.IsNullMember (Member); Dictionary.CopyDependencyList (Abstraction, Subprog_Sym, First_Valid_Export, Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member)))); end loop; end if; end Copy_Imports; ----------------------------------------------------------------------- procedure Check_Star (Node : in STree.SyntaxNode; Star_Node, Imp_Node : out STree.SyntaxNode) --# global in STree.Table; --# derives Imp_Node, --# Star_Node from Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_clause; --# post STree.Syntax_Node_Type (Imp_Node, STree.Table) = SP_Symbols.dependency_clause_opt and --# (STree.Syntax_Node_Type (Star_Node, STree.Table) = SP_Symbols.multiply or --# Star_Node = STree.NullNode); is begin Imp_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Imp_Node = dependency_clause_opt SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Imp_Node) = SP_Symbols.dependency_clause_opt, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Imp_Node = dependency_clause_opt in Check_Star"); Star_Node := STree.Child_Node (Current_Node => Imp_Node); -- ASSUME Star_Node = multiply OR dependency_clause_optrep OR NULL if Star_Node = STree.NullNode or else STree.Syntax_Node_Type (Node => Star_Node) = SP_Symbols.dependency_clause_optrep then -- ASSUME Star_Node = dependency_clause_optrep OR NULL Star_Node := STree.NullNode; elsif STree.Syntax_Node_Type (Node => Star_Node) /= SP_Symbols.multiply then Star_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Star_Node = multiply OR dependency_clause_optrep OR NULL in Check_Star"); end if; end Check_Star; begin -- Wf_Dependency_Clause -- For each exported variable E : E is a visible entire -- variable; occurrence of E as an export is unique (Section -- 6.1.2 Rule 6); mode of E is in out or out (Section 6.1.2 -- Rule 2); for each imported variable I : I is a visible -- entire variable; occurrence of I as an import is unique -- (Section 6.1.2 Rule 6); mode of I is in or in out (Section -- 6.1.2 Rule 1); if dependency clause belongs to main_program, -- then I is initialized by the package of which it is an own -- variable; SeqAlgebra.CreateSeq (The_Heap, Export_List); SeqAlgebra.CreateSeq (The_Heap, Import_List); Error_Found := False; -- ASSUME Node = dependency_clause OR null_import_list if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.null_import_list then -- ASSUME Node = null_import_list -- we are dealing with " derives null from ..." so there are no stars to worry -- about and we don't have to go looking for the node holding the list of imports -- because that is what has been passed in Null_List := True; Star_Node := STree.NullNode; -- not used but removes apparent DFA Import_List_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Import_List_Node = dependency_clause_optrep SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Import_List_Node) = SP_Symbols.dependency_clause_optrep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Import_List_Node = dependency_clause_optrep in Wf_Dependency_Clause"); -- Add NullVariable as an export to provide a data sink for the imported items Dictionary.AddGlobalVariable (Abstraction => Abstraction, Subprogram => Subprog_Sym, Variable => Dictionary.GetNullVariable, Mode => Dictionary.OutMode, PrefixNeeded => False, Comp_Unit => ContextManager.Ops.Current_Unit, VariableReference => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Import_List_Node), End_Position => STree.Node_Position (Node => Import_List_Node)), Global_Variable_Sym => Global_Variable_Sym); STree.Add_Node_Symbol (Node => Node, Sym => Global_Variable_Sym); Dictionary.AddExport (Abstraction => Abstraction, TheProcedure => Subprog_Sym, TheExport => Dictionary.GetNullVariable, ExportReference => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Import_List_Node), End_Position => STree.Node_Position (Node => Import_List_Node)), Annotation => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Import_List_Node), End_Position => STree.Node_Position (Node => Import_List_Node))); Valid_Export_List_Add (Sym => Dictionary.GetNullVariable, Export_List => Export_List, The_Heap => The_Heap); elsif STree.Syntax_Node_Type (Node => Node) = SP_Symbols.dependency_clause then -- ASSUME Node = dependency_clause Null_List := False; -- it's an ordinary dependency clause Export_List_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Export_List_Node = dependency_clause_optrep SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Export_List_Node) = SP_Symbols.dependency_clause_optrep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Export_List_Node = dependency_clause_optrep in Wf_Dependency_Clause"); Check_Star (Node => Node, Star_Node => Star_Node, Imp_Node => Import_List_Node); Do_Export_List (Node => Export_List_Node, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, The_Heap => The_Heap, Error_Found => Error_Found); end if; if Do_IFA then -- Do_IFA can only be False here in SPARK 83 DFA mode if STree.Syntax_Node_Type (Node => Star_Node) = SP_Symbols.multiply then -- ASSUME Star_Node = multiply Do_Self_References (Star_Node_Pos => STree.Node_Position (Node => Star_Node), Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, The_Heap => The_Heap, Error_Found => Error_Found); --# accept Flow, 10, First_Valid_Export, "Expected ineffective assignment"; Do_Import_List (Node => Import_List_Node, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Do_IFA => Do_IFA, Export_List => Export_List, Import_List => Import_List, Null_List => Null_List, Star_Found => True, The_Heap => The_Heap, Error_Found => Error_Found, First_Valid_Export => First_Valid_Export); --# end accept; elsif Star_Node = STree.NullNode then -- ASSUME Star_Node = NULL Do_Import_List (Node => Import_List_Node, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Do_IFA => Do_IFA, Export_List => Export_List, Import_List => Import_List, Null_List => Null_List, Star_Found => False, The_Heap => The_Heap, Error_Found => Error_Found, First_Valid_Export => First_Valid_Export); Copy_Imports (Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Export_List => Export_List, First_Valid_Export => First_Valid_Export, The_Heap => The_Heap); end if; else --# accept Flow, 10, First_Valid_Export, "Expected ineffective assignment"; Do_Import_List (Node => Import_List_Node, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Do_IFA => Do_IFA, Export_List => Export_List, Import_List => Import_List, Null_List => Null_List, Star_Found => STree.Syntax_Node_Type (Node => Star_Node) = SP_Symbols.multiply, The_Heap => The_Heap, Error_Found => Error_Found, First_Valid_Export => First_Valid_Export); --# end accept; if STree.Syntax_Node_Type (Node => Star_Node) = SP_Symbols.multiply then -- ASSUME Star_Node = multiply Append_Self_References (Star_Node_Pos => STree.Node_Position (Node => Star_Node), Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Export_List => Export_List, Import_List => Import_List, The_Heap => The_Heap, Error_Found => Error_Found); end if; end if; end Wf_Dependency_Clause; ---------------------------------------------------------------------------- procedure Params_Are_Imports_Or_Exports (Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Error_Pos : in LexTokenManager.Token_Position; Semantic_Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Semantic_Error_Found from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# Subprog_Sym; is Next_Param : Dictionary.Iterator; Param_Sym : Dictionary.Symbol; begin Next_Param := Dictionary.FirstSubprogramParameter (Subprog_Sym); while Next_Param /= Dictionary.NullIterator loop Param_Sym := Dictionary.CurrentSymbol (Next_Param); if not Dictionary.IsImport (Abstraction, Subprog_Sym, Param_Sym) and then not Dictionary.IsExport (Abstraction, Subprog_Sym, Param_Sym) then Semantic_Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 200, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Param_Sym)); end if; -- test to ensure that parameters of mode IN OUT are imported if Dictionary.GetSubprogramParameterMode (Param_Sym) = Dictionary.InOutMode then if not Dictionary.IsImport (Abstraction, Subprog_Sym, Param_Sym) then --# accept Flow, 41, "Expected stable expression"; if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 then --# end accept; Semantic_Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 504, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Param_Sym)); elsif Dictionary.TypeIsScalar (Dictionary.GetType (Param_Sym)) then Semantic_Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 338, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Param_Sym)); end if; end if; --end of checks that in out is an import -- check that in out is exported if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then not Dictionary.IsExport (Abstraction, Subprog_Sym, Param_Sym) then Semantic_Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 506, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Param_Sym)); end if; end if; Next_Param := Dictionary.NextSymbol (Next_Param); end loop; end Params_Are_Imports_Or_Exports; --------------------------------------------------------------------------- procedure Globals_Are_Imports_Or_Exports (Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Error_Pos : in LexTokenManager.Token_Position; Semantic_Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Semantic_Error_Found from *, --# Abstraction, --# Dictionary.Dict, --# Subprog_Sym; --# is Next_Global : Dictionary.Iterator; Global_Sym : Dictionary.Symbol; begin Next_Global := Dictionary.FirstGlobalVariable (Abstraction, Subprog_Sym); while Next_Global /= Dictionary.NullIterator loop Global_Sym := Dictionary.CurrentSymbol (Next_Global); if not Dictionary.IsImport (Abstraction, Subprog_Sym, Global_Sym) and then not Dictionary.IsExport (Abstraction, Subprog_Sym, Global_Sym) then Semantic_Error_Found := True; ErrorHandler.Semantic_Error_Sym (Err_Num => 201, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Sym => Global_Sym, Scope => Scope); end if; -- check that globals with mode in out are both imported and exported. if Dictionary.GetGlobalMode (Abstraction, Subprog_Sym, Global_Sym) = Dictionary.InOutMode then if not Dictionary.IsImport (Abstraction, Subprog_Sym, Global_Sym) then Semantic_Error_Found := True; ErrorHandler.Semantic_Error_Sym (Err_Num => 505, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Sym => Global_Sym, Scope => Scope); end if; if not Dictionary.IsExport (Abstraction, Subprog_Sym, Global_Sym) then Semantic_Error_Found := True; ErrorHandler.Semantic_Error_Sym (Err_Num => 507, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Sym => Global_Sym, Scope => Scope); end if; end if; Next_Global := Dictionary.NextSymbol (Next_Global); end loop; end Globals_Are_Imports_Or_Exports; --------------------------------------------------------------------------- procedure Check_Information_Flow_Policy (Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; First_Seen : in Boolean; Position : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Subprog_Sym; is type FS_To_Abs_Table is array (Boolean) of Dictionary.Abstractions; To_Abs : constant FS_To_Abs_Table := FS_To_Abs_Table'(False => Dictionary.IsRefined, True => Dictionary.IsAbstract); Export_It : Dictionary.Iterator; Import_It : Dictionary.Iterator; The_Export : Dictionary.Symbol; The_Import : Dictionary.Symbol; begin -- Iterate over all the Exports of this subprogram... Export_It := Dictionary.FirstExport (To_Abs (First_Seen), Subprog_Sym); while Export_It /= Dictionary.NullIterator loop The_Export := Dictionary.CurrentSymbol (Export_It); Import_It := Dictionary.FirstDependency (To_Abs (First_Seen), Subprog_Sym, The_Export); -- For each export, iterate over all the imports... while Import_It /= Dictionary.NullIterator loop The_Import := Dictionary.CurrentSymbol (Import_It); -- ...and see if each relation violates the current information -- flow policy. if Dictionary.RelationViolatesInfoFlowPolicy (The_Export, The_Import) then ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Policy_Violation, Position => Position, Import_Var_Sym => The_Import, Export_Var_Sym => The_Export, Scope => Scope); end if; Import_It := Dictionary.NextSymbol (Import_It); end loop; Export_It := Dictionary.NextSymbol (Export_It); end loop; end Check_Information_Flow_Policy; begin -- Wf_Dependency_Relation Position := STree.FindLastItemInDependencyRelation (Node => Node); if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow then -- derives not used in this case, don't process, just issue note ErrorHandler.Semantic_Note (Err_Num => 1, Position => Position, Id_Str => LexTokenManager.Null_String); else -- proceed as usual (even if doing DFA in 83 mode) if First_Seen then Abstraction := Dictionary.IsAbstract; else Abstraction := Dictionary.IsRefined; end if; SeqAlgebra.CreateSeq (The_Heap, Exports_In_Relation); SeqAlgebra.CreateSeq (The_Heap, Imports_In_Relation); Semantic_Error_Found := Glob_Def_Err; Dictionary.AddDependencyRelation (Abstraction, Subprog_Sym, Dictionary.Location'(Start_Position => STree.Node_Position (Node => Node), End_Position => STree.Node_Position (Node => Node))); --# assert STree.Table = STree.Table~ and --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_relation; It := STree.Find_First_Node (Node_Kind => SP_Symbols.dependency_clause, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := STree.Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_relation and --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.dependency_clause and --# Next_Node = STree.Get_Node (It); -- globals Import_List, Export_List are created in wf_dependency_clause Wf_Dependency_Clause (Node => Next_Node, Scope => Scope, Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Do_IFA => CommandLineData.Content.Flow_Option /= CommandLineData.Data_Flow, The_Heap => The_Heap, Export_List => Export_List, Import_List => Import_List, Error_Found => Error); Semantic_Error_Found := Semantic_Error_Found or else Error; -- add in the imports and exports from this clause to the total imports/exports --# accept Flow, 41, "Expected stable expression"; if CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow then --# end accept; SeqAlgebra.AugmentSeq (The_Heap, Exports_In_Relation, Export_List); SeqAlgebra.AugmentSeq (The_Heap, Imports_In_Relation, Import_List); end if; SeqAlgebra.DisposeOfSeq (The_Heap, Export_List); SeqAlgebra.DisposeOfSeq (The_Heap, Import_List); It := STree.NextNode (It); end loop; --# assert STree.Table = STree.Table~ and --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_relation; -- Process any import list of the form "derives null from x, y, z;" Null_Import_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Null_Import_Node = dependency_relation_rep OR null_import_list OR NULL if STree.Syntax_Node_Type (Node => Null_Import_Node) = SP_Symbols.dependency_relation_rep then -- ASSUME Null_Import_Node = dependency_relation_rep Null_Import_Node := STree.Next_Sibling (Current_Node => Null_Import_Node); -- ASSUME Null_Import_Node = ampersand OR NULL if STree.Syntax_Node_Type (Node => Null_Import_Node) = SP_Symbols.ampersand then -- ASSUME Null_Import_Node = ampersand Null_Import_Node := STree.Next_Sibling (Current_Node => Null_Import_Node); elsif Null_Import_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Null_Import_Node = ampersand OR NULL in Wf_Dependency_Relation"); end if; elsif Null_Import_Node /= STree.NullNode and then STree.Syntax_Node_Type (Node => Null_Import_Node) /= SP_Symbols.null_import_list then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Null_Import_Node = dependency_relation_rep OR null_import_list OR NULL in Wf_Dependency_Relation"); end if; -- ASSUME Null_Import_Node = null_import_list OR NULL -- if there is a null import clause then Null_Import_Node is now pointing at it if STree.Syntax_Node_Type (Node => Null_Import_Node) = SP_Symbols.null_import_list then Wf_Dependency_Clause (Node => Null_Import_Node, Scope => Scope, Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Do_IFA => CommandLineData.Content.Flow_Option /= CommandLineData.Data_Flow, The_Heap => The_Heap, Export_List => Export_List, Import_List => Import_List, Error_Found => Error); Semantic_Error_Found := Semantic_Error_Found or else Error; -- add in the imports and exports from this clause to the total imports/exports if CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow then -- stable index OK SeqAlgebra.AugmentSeq (The_Heap, Exports_In_Relation, Export_List); SeqAlgebra.AugmentSeq (The_Heap, Imports_In_Relation, Import_List); end if; SeqAlgebra.DisposeOfSeq (The_Heap, Export_List); SeqAlgebra.DisposeOfSeq (The_Heap, Import_List); elsif Null_Import_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Null_Import_Node = null_import_list OR NULL in Wf_Dependency_Relation"); end if; --# assert STree.Table = STree.Table~ and --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_relation; -- make all exports depend on all imports here if CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow then ErrorHandler.Semantic_Note (Err_Num => 2, Position => Position, Id_Str => LexTokenManager.Null_String); Create_Full_Dependency (Node_Pos => STree.Node_Position (Node => Node), Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Import_List => Imports_In_Relation, Export_List => Exports_In_Relation, The_Heap => The_Heap); end if; SeqAlgebra.DisposeOfSeq (The_Heap, Exports_In_Relation); SeqAlgebra.DisposeOfSeq (The_Heap, Imports_In_Relation); Params_Are_Imports_Or_Exports (Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Error_Pos => Position, Semantic_Error_Found => Semantic_Error_Found); Globals_Are_Imports_Or_Exports (Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Abstraction, Error_Pos => Position, Semantic_Error_Found => Semantic_Error_Found); -- If this is the first derives anno then record the fact that it was present -- Record in the dictionary that this subprogram has an explicit derives annotation, -- unless in data-flow mode when the derives annotation is ignored. (If it was -- recorded as being present in data-flow mode then the flow analyser would check it -- against the calculated rho relation.) -- -- There is a special case for when we are doing data-flow analysis in 83 mode. In -- 83 there are no moded globals, so the derives anno is used (only) to work out the -- modes for the globals, but the full dependency relation is not stored and is not -- checked against calculated information flow. Subsequent analysis proceeds just as -- if there had been a moded global anno and no derives, so we do not record the -- presence of a derives anno in data-flow mode, even if language=83, although the -- flag Dependency_Found is still set to suppress error 501 (see below). if First_Seen and then CommandLineData.Content.Flow_Option /= CommandLineData.Data_Flow then Dictionary.SetHasDerivesAnnotation (Subprog_Sym); end if; -- do consistency checks if second anno and no semantic errors found and -- second global annotation is not missing completely if not (First_Seen or else Semantic_Error_Found or else (STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => STree.Parent_Node (Current_Node => Node))) = SP_Symbols.dependency_relation)) then Check_Derives_Consistency (Subprog_Sym => Subprog_Sym, Position => Position, The_Heap => The_Heap); end if; if not Semantic_Error_Found then Check_Information_Flow_Policy (Subprog_Sym => Subprog_Sym, Scope => Scope, First_Seen => First_Seen, Position => Position); end if; -- mark subprogram as having incorrect signature if necessary if Semantic_Error_Found then Dictionary.SetSubprogramSignatureNotWellformed (Abstraction, Subprog_Sym); end if; end if; --# assert STree.Table = STree.Table~ and --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dependency_relation; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Derives (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Node, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog_Sym), Subprog_Sym => Subprog_Sym); end if; end Wf_Dependency_Relation; spark-2012.0.deb/examiner/lextokenmanager-seq_algebra.adb0000644000175000017500000004443711753202336022364 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Text_IO; with E_Strings.Not_SPARK; package body LexTokenManager.Seq_Algebra is function Is_Null_Seq (S : Seq) return Boolean is begin return SeqAlgebra.Is_Null_Seq (S => S.The_Seq); end Is_Null_Seq; function Is_Null_Member (M : Member_Of_Seq) return Boolean is begin return SeqAlgebra.IsNullMember (M => M.Member); end Is_Null_Member; function First_Member (The_Heap : Heap.HeapRecord; S : Seq) return Member_Of_Seq is begin return Member_Of_Seq'(Member => SeqAlgebra.FirstMember (TheHeap => The_Heap, S => S.The_Seq)); end First_Member; function Next_Member (The_Heap : Heap.HeapRecord; M : Member_Of_Seq) return Member_Of_Seq is begin return Member_Of_Seq'(Member => SeqAlgebra.NextMember (TheHeap => The_Heap, M => M.Member)); end Next_Member; -- Puts a marker atom (index S) onto TheHeap, -- with no members (A pointer is 0). procedure Create_Seq (The_Heap : in out Heap.HeapRecord; S : out Seq) is begin SeqAlgebra.CreateSeq (TheHeap => The_Heap, S => S.The_Seq); end Create_Seq; function Is_Empty_Seq (The_Heap : Heap.HeapRecord; S : Seq) return Boolean is begin return SeqAlgebra.IsEmptySeq (TheHeap => The_Heap, S => S.The_Seq); end Is_Empty_Seq; -- Frees all the atoms on the heap relating to -- sequence S. procedure Dispose_Of_Seq (The_Heap : in out Heap.HeapRecord; S : in Seq) is begin SeqAlgebra.DisposeOfSeq (TheHeap => The_Heap, S => S.The_Seq); end Dispose_Of_Seq; function Before_First_Member (S : Seq) return Member_Of_Seq is begin return Member_Of_Seq'(Member => SeqAlgebra.BeforeFirstMember (S => S.The_Seq)); end Before_First_Member; -- Note if this is used with a Seq representing a set this will -- destroy the numerical ordering of the set. procedure Append_After (The_Heap : in out Heap.HeapRecord; M : in out Member_Of_Seq; Given_Value : in LexTokenManager.Lex_String) is begin SeqAlgebra.AppendAfter (TheHeap => The_Heap, M => M.Member, GivenValue => Natural (Given_Value)); end Append_After; function Value_Of_Member (The_Heap : Heap.HeapRecord; M : Member_Of_Seq) return LexTokenManager.Lex_String is begin return LexTokenManager.Lex_String (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => M.Member)); end Value_Of_Member; -- Preserves the lexical ordering of the set. -- Do not use with a Seq representingg a sequence it may -- destroy the sequence order. procedure Add_Member (The_Heap : in out Heap.HeapRecord; S : in Seq; Given_Value : in LexTokenManager.Lex_String) is Member_Present : Boolean; M, N : Member_Of_Seq; Value_Of_N : LexTokenManager.Lex_String; begin Member_Present := False; M := Before_First_Member (S => S); N := First_Member (The_Heap => The_Heap, S => S); loop exit when SeqAlgebra.IsNullMember (M => N.Member); Value_Of_N := Value_Of_Member (The_Heap => The_Heap, M => N); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_N, Lex_Str2 => Given_Value) = LexTokenManager.Str_Eq then Member_Present := True; exit; end if; exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_N, Lex_Str2 => Given_Value) = LexTokenManager.Str_Second; M := N; N := Next_Member (The_Heap => The_Heap, M => N); end loop; if not Member_Present then -- we don't need the updated value of M in this case --# accept F, 10, M, "M unused here"; SeqAlgebra.AppendAfter (TheHeap => The_Heap, M => M.Member, GivenValue => Natural (Given_Value)); --# end accept; end if; end Add_Member; -- This operation uses the lexical ordering of a set. -- It might not remove an element from a sequence even if the element exists. procedure Remove_Member (The_Heap : in out Heap.HeapRecord; S : in Seq; Given_Value : in LexTokenManager.Lex_String) is Member_Present : Boolean; M, N : Member_Of_Seq; Value_Of_N : LexTokenManager.Lex_String; begin Member_Present := False; M := Before_First_Member (S => S); N := First_Member (The_Heap => The_Heap, S => S); loop exit when SeqAlgebra.IsNullMember (M => N.Member); Value_Of_N := Value_Of_Member (The_Heap => The_Heap, M => N); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_N, Lex_Str2 => Given_Value) = LexTokenManager.Str_Eq then Member_Present := True; exit; end if; exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_N, Lex_Str2 => Given_Value) = LexTokenManager.Str_Second; M := N; N := Next_Member (The_Heap => The_Heap, M => N); end loop; if Member_Present then SeqAlgebra.EliminateAfter (TheHeap => The_Heap, M => M.Member); end if; end Remove_Member; -- This operation uses the lexical ordering of a set. -- It might not find an element from a sequence even if the element exists. function Is_Member (The_Heap : Heap.HeapRecord; S : Seq; Given_Value : LexTokenManager.Lex_String) return Boolean is Member_Present : Boolean; N : Member_Of_Seq; Value_Of_N : LexTokenManager.Lex_String; begin Member_Present := False; N := First_Member (The_Heap => The_Heap, S => S); loop exit when SeqAlgebra.IsNullMember (M => N.Member); Value_Of_N := Value_Of_Member (The_Heap => The_Heap, M => N); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_N, Lex_Str2 => Given_Value) = LexTokenManager.Str_Eq then Member_Present := True; exit; end if; exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_N, Lex_Str2 => Given_Value) = LexTokenManager.Str_Second; N := Next_Member (The_Heap => The_Heap, M => N); end loop; return Member_Present; end Is_Member; ----------- Set Operations on Seq representing Sets ----------- -- Assumes A and B are in numerical order, i.e. a set, in which case -- C will be set too. -- The operation is meaningless for a Seq representing a sequence. procedure Union (The_Heap : in out Heap.HeapRecord; A, B : in Seq; C : out Seq) is Local_C : Seq; M, N : Member_Of_Seq; Value_Of_M, Value_Of_N : LexTokenManager.Lex_String; Last_C : Member_Of_Seq; begin Create_Seq (The_Heap => The_Heap, S => Local_C); Last_C := Before_First_Member (S => Local_C); M := First_Member (The_Heap => The_Heap, S => A); N := First_Member (The_Heap => The_Heap, S => B); loop exit when SeqAlgebra.IsNullMember (M => M.Member) or SeqAlgebra.IsNullMember (M => N.Member); Value_Of_M := Value_Of_Member (The_Heap => The_Heap, M => M); Value_Of_N := Value_Of_Member (The_Heap => The_Heap, M => N); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_M, Lex_Str2 => Value_Of_N) = LexTokenManager.Str_Eq then Append_After (The_Heap => The_Heap, M => Last_C, Given_Value => Value_Of_M); M := Next_Member (The_Heap => The_Heap, M => M); N := Next_Member (The_Heap => The_Heap, M => N); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_M, Lex_Str2 => Value_Of_N) = LexTokenManager.Str_First then Append_After (The_Heap => The_Heap, M => Last_C, Given_Value => Value_Of_M); M := Next_Member (The_Heap => The_Heap, M => M); else Append_After (The_Heap => The_Heap, M => Last_C, Given_Value => Value_Of_N); N := Next_Member (The_Heap => The_Heap, M => N); end if; end loop; loop exit when SeqAlgebra.IsNullMember (M => M.Member); Append_After (The_Heap => The_Heap, M => Last_C, Given_Value => Value_Of_Member (The_Heap => The_Heap, M => M)); M := Next_Member (The_Heap => The_Heap, M => M); end loop; loop exit when SeqAlgebra.IsNullMember (M => N.Member); Append_After (The_Heap => The_Heap, M => Last_C, Given_Value => Value_Of_Member (The_Heap => The_Heap, M => N)); N := Next_Member (The_Heap => The_Heap, M => N); end loop; C := Local_C; end Union; -- This operation uses the lexical ordering of a set. -- The operation is meaningless for a Seq representing a sequence. procedure Augment_Seq (The_Heap : in out Heap.HeapRecord; A, B : in Seq) is M, N : Member_Of_Seq; Value_Of_M, Value_Of_N : LexTokenManager.Lex_String; Last_M : Member_Of_Seq; begin M := First_Member (The_Heap => The_Heap, S => A); Last_M := Before_First_Member (S => A); N := First_Member (The_Heap => The_Heap, S => B); loop exit when SeqAlgebra.IsNullMember (M => M.Member) or SeqAlgebra.IsNullMember (M => N.Member); Value_Of_M := Value_Of_Member (The_Heap => The_Heap, M => M); Value_Of_N := Value_Of_Member (The_Heap => The_Heap, M => N); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_M, Lex_Str2 => Value_Of_N) = LexTokenManager.Str_Eq then Last_M := M; M := Next_Member (The_Heap => The_Heap, M => M); N := Next_Member (The_Heap => The_Heap, M => N); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_M, Lex_Str2 => Value_Of_N) = LexTokenManager.Str_First then Last_M := M; M := Next_Member (The_Heap => The_Heap, M => M); else Append_After (The_Heap => The_Heap, M => Last_M, Given_Value => Value_Of_N); N := Next_Member (The_Heap => The_Heap, M => N); end if; end loop; loop exit when SeqAlgebra.IsNullMember (M => N.Member); Append_After (The_Heap => The_Heap, M => Last_M, Given_Value => Value_Of_Member (The_Heap => The_Heap, M => N)); N := Next_Member (The_Heap => The_Heap, M => N); end loop; end Augment_Seq; -- This operation uses the numerical ordering of a set. -- The operation is meaningless for a Seq representing a sequence. procedure Reduction (The_Heap : in out Heap.HeapRecord; A, B : in Seq) is M, N : Member_Of_Seq; Value_Of_M, Value_Of_N : LexTokenManager.Lex_String; Last_M : Member_Of_Seq; begin M := First_Member (The_Heap => The_Heap, S => A); Last_M := Before_First_Member (S => A); N := First_Member (The_Heap => The_Heap, S => B); loop exit when SeqAlgebra.IsNullMember (M => M.Member) or SeqAlgebra.IsNullMember (M => N.Member); Value_Of_M := Value_Of_Member (The_Heap => The_Heap, M => M); Value_Of_N := Value_Of_Member (The_Heap => The_Heap, M => N); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_M, Lex_Str2 => Value_Of_N) = LexTokenManager.Str_Eq then M := Next_Member (The_Heap => The_Heap, M => M); N := Next_Member (The_Heap => The_Heap, M => N); SeqAlgebra.EliminateAfter (TheHeap => The_Heap, M => Last_M.Member); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Value_Of_M, Lex_Str2 => Value_Of_N) = LexTokenManager.Str_First then Last_M := M; M := Next_Member (The_Heap => The_Heap, M => M); else N := Next_Member (The_Heap => The_Heap, M => N); end if; end loop; end Reduction; procedure Debug (The_Heap : in Heap.HeapRecord; S : in Seq) is N : Member_Of_Seq; Is_First_Member : Boolean; procedure Print (S : in LexTokenManager.Lex_String; Is_First_Member : in out Boolean) --# derives Is_First_Member from *, --# S; is --# hide Print; begin if Is_First_Member then Ada.Text_IO.New_Line; Is_First_Member := False; end if; Ada.Text_IO.Put (Item => E_Strings.Not_SPARK.Get_String (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => S))); Ada.Text_IO.Put (Item => " "); end Print; begin --# accept F, 10, "Ineffective statement OK"; Is_First_Member := True; N := First_Member (The_Heap => The_Heap, S => S); loop exit when SeqAlgebra.IsNullMember (M => N.Member); --# accept F, 10, Is_First_Member, "Assignment is ineffective OK"; Print (S => Value_Of_Member (The_Heap => The_Heap, M => N), Is_First_Member => Is_First_Member); --# end accept; N := Next_Member (The_Heap => The_Heap, M => N); end loop; --# accept F, 35, The_Heap, "Importation of the initial value is ineffective OK" & --# F, 35, S, "Importation of the initial value is ineffective OK"; end Debug; end LexTokenManager.Seq_Algebra; spark-2012.0.deb/examiner/xmlreport.ads0000644000175000017500000002630111753202337017001 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with SPARK_IO; with SPARK_XML; --# inherit E_Strings, --# FileSystem, --# SPARK_IO, --# SPARK_XML, --# SystemErrors; package XMLReport --# own State; is type Sections is ( S_Report, S_Prologue, S_Commandline, S_Compiler_Data, S_Cyclic_Requirements, S_Indexes, S_Target_Config, S_Messages, S_Meta_Files, S_Units_Not_Found, S_Warnings_Config, S_Results, S_Listing, S_Units_In_File, S_Justifications, S_Full_Justifications); Max_Meta_File_Depth : constant Integer := 50; subtype Meta_File_Id is SPARK_XML.Tag_Depth; procedure Init; --# global out State; --# derives State from ; -- Producers for simple container tags. -- These have no attributes of their own and only contain other tags. -- The only exception is the Listing tag, which contains large amounts of text, -- and is also included. procedure Start_Section (Section : in Sections; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Report, --# Section, --# State & --# State from *, --# Section; procedure End_Section (Section : in Sections; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Report, --# Section, --# State & --# State from *, --# Section; -- Simple tags, ones whose opening and closing tags are generated -- at the same time. E.g. file.txt procedure Filename (Plain_Output : in Boolean; File : in out E_Strings.T); --# global in out State; --# derives File from *, --# Plain_Output, --# State & --# State from *; procedure Index (Plain_Output : in Boolean; Idx : in out E_Strings.T); --# global in out State; --# derives Idx from *, --# Plain_Output, --# State & --# State from *; procedure Compiler_Item (Item : in E_Strings.T; Val : in E_Strings.T; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Item, --# Report, --# State, --# Val & --# State from *, --# Item; procedure Suppressed (Item : in out E_Strings.T); --# global in out State; --# derives Item, --# State from *, --# State; procedure Suppressed_Pragma (Item : in out E_Strings.T); --# global in out State; --# derives Item, --# State from *, --# State; procedure Option (Opt : in out E_Strings.T); --# global in out State; --# derives Opt, --# State from *, --# State; procedure Unit (Name : in E_Strings.T; Typ : in E_Strings.T; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Name, --# Report, --# State, --# Typ & --# State from *, --# Name, --# Typ; -- Ada Units in a file. procedure Ada_Unit (Name : in E_Strings.T; Typ : in E_Strings.T; Unit_Status : in E_Strings.T; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Name, --# Report, --# State, --# Typ, --# Unit_Status & --# State from *, --# Name, --# Typ, --# Unit_Status; -- Message construction. procedure Start_Message (Class : in E_Strings.T; Code : in Integer; Line : in Integer; Offset : in Integer; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Class, --# Code, --# Line, --# Offset, --# Report, --# State & --# State from *, --# Class, --# Code, --# Line, --# Offset; procedure Symbol (Sym : in E_Strings.T; Sym_Num : in Integer; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Report, --# State, --# Sym, --# Sym_Num & --# State from *, --# Sym_Num; procedure End_Message (Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Report, --# State & --# State from *; -- Metafile contruction procedure Start_Meta_File (Name : in E_Strings.T; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Name, --# Report, --# State & --# State from *, --# Name; procedure End_Meta_File (Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Report, --# State & --# State from *; -- Results Section contents procedure Start_File (Plain_Output : in Boolean; F_Name : in E_Strings.T; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# F_Name, --# Plain_Output, --# Report, --# State & --# State from *, --# F_Name, --# Plain_Output; procedure End_File (Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Report, --# State & --# State from *; procedure Brief_Justifications (Matched : in Natural; Unmatched : in Natural; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Matched, --# Report, --# State, --# Unmatched & --# State from *, --# Matched, --# Unmatched; procedure Start_Full_Justification (Class : in E_Strings.T; Code : in Integer; Line_From : in Integer; Line_To : in E_Strings.T; Match_No : in Integer; Match_Line : in Integer; Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Class, --# Code, --# Line_From, --# Line_To, --# Match_Line, --# Match_No, --# Report, --# State & --# State from *, --# Class, --# Code, --# Line_From, --# Line_To, --# Match_Line, --# Match_No; procedure End_Full_Justification (Report : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# Report, --# State & --# State from *; end XMLReport; spark-2012.0.deb/examiner/sem-wf_generic_subprogram_instantiation-wf_generic_actual_part.adb0000644000175000017500000012062711753202336031515 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Wffs: -- 1. There must be one actual for each formal. -- 2. Each actual must be of the right type and kind (i.e. object or type). -- -- Actions: -- Wellformed formal/actual associations are written to the Dictionary -- and associated with the Instantiation_Sym. separate (Sem.Wf_Generic_Subprogram_Instantiation) procedure Wf_Generic_Actual_Part (Actual_Part_Node : in STree.SyntaxNode; Instantiation_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Error_Found : out Boolean) is Current_Node : STree.SyntaxNode; ----------------------------------------------------------------------------- procedure Process_Generic_Type_Parameter (Formal_Sym : in Dictionary.Symbol; Expression_Node : in STree.SyntaxNode; Instantiation_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Found : in out Boolean; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Expression_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# Error_Found from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Error_Found, --# Expression_Node, --# Formal_Sym, --# Instantiation_Sym, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expression_Node, --# Formal_Sym, --# Instantiation_Sym, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expression_Node, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression; --# post STree.Table = STree.Table~; is Expression_Result : Exp_Record; Is_A_Name : Boolean; Unused_Ref_Vars : SeqAlgebra.Seq; Actual_Type, Formal_Type : Dictionary.Symbol; -------------------------------------------------------------------------- procedure Check_Array_Type_Compatibility (Actual_Type, Formal_Type : in Dictionary.Symbol; Instantiation_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Position : in LexTokenManager.Token_Position; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Actual_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Formal_Type, --# Instantiation_Sym, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys & --# Error_Found from *, --# Actual_Type, --# Dictionary.Dict, --# Formal_Type, --# Instantiation_Sym, --# Scope; is Actual_Type_Local, Formal_Type_Local : Dictionary.Symbol; Actual_Type_Iterator, Formal_Type_Iterator : Dictionary.Iterator; begin -- Check the kind of array -- Both formal and actual must be same constrained/unconstrained if Dictionary.Is_Constrained_Array_Type_Mark (Actual_Type, Scope) and then Dictionary.Is_Unconstrained_Array_Type_Mark (Formal_Type, Scope) then Error_Found := True; ErrorHandler.Semantic_Error2 (Err_Num => 649, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str1 => Dictionary.GetSimpleName (Actual_Type), Id_Str2 => Dictionary.GetSimpleName (Formal_Type)); end if; -- reverse check if Dictionary.Is_Unconstrained_Array_Type_Mark (Actual_Type, Scope) and then Dictionary.Is_Constrained_Array_Type_Mark (Formal_Type, Scope) then Error_Found := True; ErrorHandler.Semantic_Error2 (Err_Num => 650, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str1 => Dictionary.GetSimpleName (Actual_Type), Id_Str2 => Dictionary.GetSimpleName (Formal_Type)); end if; -- Check the type of the component of the array Actual_Type_Local := Dictionary.GetArrayComponent (Actual_Type); Formal_Type_Local := Dictionary.GetArrayComponent (Formal_Type); if Dictionary.TypeIsGeneric (Formal_Type_Local) then Formal_Type_Local := Dictionary.ActualOfGenericFormalType (Formal_Type_Local, Instantiation_Sym); if Dictionary.Is_Null_Symbol (Formal_Type_Local) then ErrorHandler.Semantic_Error2 (Err_Num => 645, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str1 => Dictionary.GetSimpleName (Actual_Type_Local), Id_Str2 => Dictionary.GetSimpleName (Dictionary.GetArrayComponent (Formal_Type))); Error_Found := True; end if; end if; if not Dictionary.Is_Null_Symbol (Formal_Type_Local) and then not Dictionary.Types_Are_Equal (Left_Symbol => Actual_Type_Local, Right_Symbol => Formal_Type_Local, Full_Range_Subtype => True) then ErrorHandler.Semantic_Error2 (Err_Num => 645, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str1 => Dictionary.GetSimpleName (Actual_Type_Local), Id_Str2 => Dictionary.GetSimpleName (Formal_Type_Local)); Error_Found := True; end if; -- Check the type of the index of the array Actual_Type_Iterator := Dictionary.FirstArrayIndex (Actual_Type); Formal_Type_Iterator := Dictionary.FirstArrayIndex (Formal_Type); loop exit when Actual_Type_Iterator = Dictionary.NullIterator and then Formal_Type_Iterator = Dictionary.NullIterator; if Actual_Type_Iterator = Dictionary.NullIterator then ErrorHandler.Semantic_Error2 (Err_Num => 647, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str1 => Dictionary.GetSimpleName (Actual_Type), Id_Str2 => Dictionary.GetSimpleName (Formal_Type)); Error_Found := True; exit; end if; if Formal_Type_Iterator = Dictionary.NullIterator then ErrorHandler.Semantic_Error2 (Err_Num => 648, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str1 => Dictionary.GetSimpleName (Actual_Type), Id_Str2 => Dictionary.GetSimpleName (Formal_Type)); Error_Found := True; exit; end if; Actual_Type_Local := Dictionary.CurrentSymbol (Actual_Type_Iterator); Formal_Type_Local := Dictionary.CurrentSymbol (Formal_Type_Iterator); if Dictionary.TypeIsGeneric (Formal_Type_Local) then Formal_Type_Local := Dictionary.ActualOfGenericFormalType (Formal_Type_Local, Instantiation_Sym); if Dictionary.Is_Null_Symbol (Formal_Type_Local) then ErrorHandler.Semantic_Error2 (Err_Num => 646, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str1 => Dictionary.GetSimpleName (Actual_Type_Local), Id_Str2 => Dictionary.GetSimpleName (Dictionary.CurrentSymbol (Formal_Type_Iterator))); Error_Found := True; end if; end if; if not Dictionary.Is_Null_Symbol (Formal_Type_Local) and then not Dictionary.Types_Are_Equal (Left_Symbol => Actual_Type_Local, Right_Symbol => Formal_Type_Local, Full_Range_Subtype => True) then ErrorHandler.Semantic_Error2 (Err_Num => 646, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str1 => Dictionary.GetSimpleName (Actual_Type_Local), Id_Str2 => Dictionary.GetSimpleName (Formal_Type_Local)); Error_Found := True; end if; Actual_Type_Iterator := Dictionary.NextSymbol (Actual_Type_Iterator); Formal_Type_Iterator := Dictionary.NextSymbol (Formal_Type_Iterator); end loop; end Check_Array_Type_Compatibility; begin -- Process_Generic_Type_Parameter -- The actual parameter takes the form of an expression but must actually be a typemark. -- We can use WalkName to recover the symbol, then check that it is a type and that it -- compatible with generic type. Matching pairs are added to the dictionary and associated -- with the symbol of the instantiated unit. SeqAlgebra.CreateSeq (The_Heap, Unused_Ref_Vars); Walk_Name (Exp_Node => Expression_Node, Scope => Scope, Component_Data => Component_Data, The_Heap => The_Heap, Result => Expression_Result, Is_A_Name => Is_A_Name, Ref_Var_Param => Unused_Ref_Vars); SeqAlgebra.DisposeOfSeq (The_Heap, Unused_Ref_Vars); if not Is_A_Name or else Expression_Result.Sort /= Is_Type_Mark then -- we have a general expression which cannot possibly be a type mark or -- we have a name but it's not a type mark ErrorHandler.Semantic_Error (Err_Num => 95, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Expression_Node), Id_Str => LexTokenManager.Null_String); Error_Found := True; else -- we have a type mark Actual_Type := Expression_Result.Type_Symbol; Formal_Type := Dictionary.GetType (Formal_Sym); if Dictionary.IsArrayTypeMark (TheSymbol => Formal_Type, Scope => Scope) and then Dictionary.IsArrayTypeMark (TheSymbol => Actual_Type, Scope => Scope) then Check_Array_Type_Compatibility (Actual_Type => Actual_Type, Formal_Type => Formal_Type, Instantiation_Sym => Instantiation_Sym, Scope => Scope, Position => Node_Position (Node => Expression_Node), Error_Found => Error_Found); if Error_Found then Actual_Type := Dictionary.GetUnknownTypeMark; end if; else if not Dictionary.IsValidGenericTypeAssociation (Formal => Formal_Type, Actual => Actual_Type, Scope => Scope) then ErrorHandler.Semantic_Error2 (Err_Num => 636, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Expression_Node), Id_Str1 => Dictionary.GetSimpleName (Actual_Type), Id_Str2 => Dictionary.GetSimpleName (Formal_Type)); Error_Found := True; Actual_Type := Dictionary.GetUnknownTypeMark; end if; end if; -- above call will change Actual_Type to the UnknownType if there is an error Dictionary.AddGenericTypeAssociation (SubprogramOrPackage => Instantiation_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Expression_Node), End_Position => Node_Position (Node => Expression_Node)), FormalSym => Formal_Type, ActualSym => Actual_Type); end if; end Process_Generic_Type_Parameter; ----------------------------------------------------------------------------- procedure Process_Generic_Object_Parameter (Formal_Sym : in Dictionary.Symbol; Expression_Node : in STree.SyntaxNode; Instantiation_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Found : in out Boolean; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Expression_Node, --# Formal_Sym, --# Instantiation_Sym, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expression_Node, --# Formal_Sym, --# Instantiation_Sym, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Error_Found, --# Statistics.TableUsage from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Expression_Node, --# Formal_Sym, --# Instantiation_Sym, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression; --# post STree.Table = STree.Table~; is Actual_Type : Dictionary.Symbol; Unused_Component_Data : ComponentManager.ComponentData; Unwanted_Seq : SeqAlgebra.Seq; Exp_Result : Exp_Record; Value, Constant_Name : LexTokenManager.Lex_String; Constant_Name_String : E_Strings.T; Constant_Sym : Dictionary.Symbol := Dictionary.NullSymbol; Constant_Location : Dictionary.Location; Fun_Info : Exp_Record := Null_Exp_Record; begin Actual_Type := Dictionary.GetType (Formal_Sym); -- The Actual_Type we have got here might be a predefined Ada type such as Integer. -- If so, then that is the type we expect the actual expression to be; however, -- if the type is a generic type then we need to look up the actual associated with it: if Dictionary.TypeIsGeneric (Actual_Type) then Actual_Type := Dictionary.ActualOfGenericFormalType (Actual_Type, Instantiation_Sym); end if; -- walk expression SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Expression_Node, Scope => Scope, Type_Context => Actual_Type, Context_Requires_Static => False, Ref_Var => Unwanted_Seq, Result => Exp_Result, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Exp_Result.Value, Value); -- scalar value if needed later -- check constant if Exp_Result.Is_Constant then if Exp_Result = Unknown_Type_Record then Error_Found := True; else -- Fundamentally ok to add -- mark any errors (both to avoid use of invalid instantiation and to supress rule generation) if Exp_Result.Errors_In_Expression then Error_Found := True; end if; --# accept Flow, 10, Fun_Info, "Ineffective assignment here OK"; Wf_Argument_Association (Node => STree.NullNode, Scope => Scope, Param_Type => Actual_Type, Position => Node_Position (Node => Expression_Node), Exp_Result => Exp_Result, Fun_Info => Fun_Info, Error_Found => Error_Found); --# end accept; -- first add a constant declaration to local scope of Instantiation_Sym -- synthetic constant can have same name as formal it is associated with. -- Create a unique name for the actual object parameter: -- ___. Constant_Name_String := LexTokenManager.Lex_String_To_String (Dictionary.GetSimpleName (Instantiation_Sym)); E_Strings.Append_String (E_Str => Constant_Name_String, Str => "__"); E_Strings.Append_Examiner_String (E_Str1 => Constant_Name_String, E_Str2 => LexTokenManager.Lex_String_To_String (Dictionary.GetSimpleName (Formal_Sym))); E_Strings.Append_String (E_Str => Constant_Name_String, Str => "_"); LexTokenManager.Insert_Examiner_String (Str => Constant_Name_String, Lex_Str => Constant_Name); -- and can be "located" at expression node Constant_Location := Dictionary.Location' (Start_Position => Node_Position (Node => Expression_Node), End_Position => Node_Position (Node => Expression_Node)); if Dictionary.IsArrayTypeMark (Actual_Type, Scope) then Value := LexTokenManager.Null_String; elsif Dictionary.IsPrivateTypeMark (Actual_Type, Scope) then Exp_Result.Is_Static := False; elsif Dictionary.IsRecordTypeMark (Actual_Type, Scope) or else Dictionary.IsProtectedTypeMark (Actual_Type) then Exp_Result.Is_Static := False; Value := LexTokenManager.Null_String; end if; Dictionary.Add_Constant (Name => Constant_Name, The_Type => Actual_Type, Static => Exp_Result.Is_Static, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Constant_Location, Value => Value, Exp_Is_Wellformed => not Exp_Result.Errors_In_Expression, Exp_Node => STree.NodeToRef (Expression_Node), Constant_Sym => Constant_Sym); STree.Add_Node_Symbol (Node => Expression_Node, Sym => Constant_Sym); Dictionary.AddGenericObjectAssociation (SubprogramOrPackage => Instantiation_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Expression_Node), End_Position => Node_Position (Node => Expression_Node)), FormalSym => Formal_Sym, ActualSym => Constant_Sym); end if; else Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 640, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Expression_Node), Id_Str => LexTokenManager.Null_String); end if; end Process_Generic_Object_Parameter; ----------------------------------------------------------------------------- procedure Handle_Named_Association (Named_Argument_Assoc_Node : in STree.SyntaxNode; Instantiation_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Found : in out Boolean; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# Error_Found, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Error_Found, --# Instantiation_Sym, --# LexTokenManager.State, --# Named_Argument_Assoc_Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Found, --# Instantiation_Sym, --# LexTokenManager.State, --# Named_Argument_Assoc_Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Named_Argument_Assoc_Node, STree.Table) = SP_Symbols.named_argument_association; --# post STree.Table = STree.Table~; is Formal_It : Dictionary.Iterator; Generic_Sym, Formal_Sym : Dictionary.Symbol; Formal_Kind : Dictionary.Generic_Parameter_Kind; Expression_Node : STree.SyntaxNode; begin Generic_Sym := Dictionary.GetGenericOfInstantiation (PackageOrSubProgram => Instantiation_Sym); -- Check that each named assoication appears and appears once only Check_Named_Association (The_Formals => Generic_Sym, Scope => Scope, Named_Argument_Assoc_Node => Named_Argument_Assoc_Node); -- now process each formal in turn Formal_It := Dictionary.FirstGenericFormalParameter (Generic_Sym); while not Dictionary.IsNullIterator (Formal_It) loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Named_Argument_Assoc_Node, STree.Table) = SP_Symbols.named_argument_association; Formal_Sym := Dictionary.CurrentSymbol (Formal_It); Find_Actual_Node (For_Formal => Formal_Sym, Named_Argument_Assoc_Node => Named_Argument_Assoc_Node, Expression_Node => Expression_Node); -- ASSUME Expression_Node = expression OR NULL --# check Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression or --# Expression_Node = STree.NullNode; if Syntax_Node_Type (Node => Expression_Node) = SP_Symbols.expression then -- ASSUME Expression_Node = expression -- wff them together here Formal_Kind := Dictionary.GetGenericFormalParameterKind (Formal_Sym); case Formal_Kind is when Dictionary.Generic_Type_Parameter => Process_Generic_Type_Parameter (Formal_Sym => Formal_Sym, Expression_Node => Expression_Node, Instantiation_Sym => Instantiation_Sym, Scope => Scope, Error_Found => Error_Found, Component_Data => Component_Data, The_Heap => The_Heap); when Dictionary.Generic_Object_Parameter => Process_Generic_Object_Parameter (Formal_Sym => Formal_Sym, Expression_Node => Expression_Node, Instantiation_Sym => Instantiation_Sym, Scope => Scope, Error_Found => Error_Found, The_Heap => The_Heap); end case; end if; Formal_It := Dictionary.NextSymbol (Formal_It); end loop; end Handle_Named_Association; ----------------------------------------------------------------------------- procedure Handle_Positional_Association (Positional_Argument_Assoc_Node : in STree.SyntaxNode; Instantiation_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Found : in out Boolean; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# Error_Found, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Error_Found, --# Instantiation_Sym, --# LexTokenManager.State, --# Positional_Argument_Assoc_Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Found, --# Instantiation_Sym, --# LexTokenManager.State, --# Positional_Argument_Assoc_Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Positional_Argument_Assoc_Node, STree.Table) = SP_Symbols.positional_argument_association; --# post STree.Table = STree.Table~; is Formal_It : Dictionary.Iterator; Actual_It : STree.Iterator; Generic_Sym, Formal_Sym : Dictionary.Symbol; Formal_Kind : Dictionary.Generic_Parameter_Kind; Expression_Node : STree.SyntaxNode; begin Generic_Sym := Dictionary.GetGenericOfInstantiation (PackageOrSubProgram => Instantiation_Sym); Expression_Node := Positional_Argument_Assoc_Node; -- default value in case loop below is skipped Actual_It := Find_First_Node (Node_Kind => SP_Symbols.expression, From_Root => Positional_Argument_Assoc_Node, In_Direction => STree.Down); Formal_It := Dictionary.FirstGenericFormalParameter (Generic_Sym); while not Dictionary.IsNullIterator (Formal_It) and then not STree.IsNull (Actual_It) loop Formal_Sym := Dictionary.CurrentSymbol (Formal_It); Expression_Node := Get_Node (It => Actual_It); --# assert Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression and --# Expression_Node = Get_Node (Actual_It) and --# STree.Table = STree.Table~ ; Formal_Kind := Dictionary.GetGenericFormalParameterKind (Formal_Sym); case Formal_Kind is when Dictionary.Generic_Type_Parameter => Process_Generic_Type_Parameter (Formal_Sym => Formal_Sym, Expression_Node => Expression_Node, Instantiation_Sym => Instantiation_Sym, Scope => Scope, Error_Found => Error_Found, Component_Data => Component_Data, The_Heap => The_Heap); when Dictionary.Generic_Object_Parameter => Process_Generic_Object_Parameter (Formal_Sym => Formal_Sym, Expression_Node => Expression_Node, Instantiation_Sym => Instantiation_Sym, Scope => Scope, Error_Found => Error_Found, The_Heap => The_Heap); end case; Formal_It := Dictionary.NextSymbol (Formal_It); Actual_It := STree.NextNode (Actual_It); end loop; -- completeness check, both loops should run out at same time if not Dictionary.IsNullIterator (Formal_It) or else not STree.IsNull (Actual_It) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 635, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Expression_Node), Id_Str => Dictionary.GetSimpleName (Generic_Sym)); end if; end Handle_Positional_Association; begin -- Wf_Generic_Actual_Part Error_Found := False; Current_Node := Child_Node (Current_Node => Child_Node (Current_Node => Actual_Part_Node)); -- ASSUME Current_Node = named_argument_association OR positional_argument_association if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.named_argument_association then -- ASSUME Current_Node = named_argument_association Handle_Named_Association (Named_Argument_Assoc_Node => Current_Node, Instantiation_Sym => Instantiation_Sym, Scope => Scope, Error_Found => Error_Found, Component_Data => Component_Data, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.positional_argument_association then -- ASSUME Current_Node = positional_argument_association Handle_Positional_Association (Positional_Argument_Assoc_Node => Current_Node, Instantiation_Sym => Instantiation_Sym, Scope => Scope, Error_Found => Error_Found, Component_Data => Component_Data, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = named_argument_association OR " & "positional_argument_association in Wf_Generic_Actual_Part"); end if; end Wf_Generic_Actual_Part; spark-2012.0.deb/examiner/flowanalyser-flowanalyse-analyserelations.adb0000644000175000017500000003707511753202336025340 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ComponentErrors; separate (FlowAnalyser.FlowAnalyse) procedure AnalyseRelations is TheErrorHeap : ComponentErrors.HeapOfErrors; procedure CheckExpressions --# global in CommandLineData.Content; --# in ComponentData; --# in Dictionary.Dict; --# in EndPosition; --# in ExpnLocations; --# in ExpSeqOfExports; --# in ExpSeqOfImports; --# in InnerExpns; --# in KindDictionary; --# in LexTokenManager.State; --# in OneStableExpnSeq; --# in OtherStableExpnSeq; --# in ParamDictionary; --# in S; --# in Scope; --# in SeqOfInitVars; --# in StmtLocations; --# in STree.Table; --# in SubprogSym; --# in ZeroStableExpnSeq; --# in out DataFlowErrorFoundLocal; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheErrorHeap; --# in out TheHeap; --# derives DataFlowErrorFoundLocal, --# Statistics.TableUsage, --# TheErrorHeap, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# ExpnLocations, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InnerExpns, --# KindDictionary, --# ParamDictionary, --# S, --# SeqOfInitVars, --# StmtLocations, --# STree.Table, --# SubprogSym, --# TheErrorHeap, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# ExpnLocations, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InnerExpns, --# KindDictionary, --# LexTokenManager.State, --# OneStableExpnSeq, --# OtherStableExpnSeq, --# ParamDictionary, --# S, --# Scope, --# SeqOfInitVars, --# SPARK_IO.File_Sys, --# StmtLocations, --# STree.Table, --# SubprogSym, --# TheErrorHeap, --# TheHeap, --# ZeroStableExpnSeq; is separate; procedure CheckUsages --# global in CommandLineData.Content; --# in ComponentData; --# in Dictionary.Dict; --# in EndPosition; --# in ExpSeqOfExports; --# in ExpSeqOfImports; --# in InStreamsOfShareableProtectedVars; --# in LexTokenManager.State; --# in ReferencedVars; --# in S; --# in Scope; --# in SeqOfInitVars; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheErrorHeap; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InStreamsOfShareableProtectedVars, --# LexTokenManager.State, --# ReferencedVars, --# S, --# Scope, --# SeqOfInitVars, --# SPARK_IO.File_Sys, --# TheErrorHeap, --# TheHeap & --# Statistics.TableUsage, --# TheErrorHeap, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InStreamsOfShareableProtectedVars, --# ReferencedVars, --# S, --# SeqOfInitVars, --# TheErrorHeap, --# TheHeap; is separate; procedure CheckDependencies --# global in CommandLineData.Content; --# in ComponentData; --# in DependencyRelation; --# in Dictionary.Dict; --# in EndPosition; --# in ExpSeqOfExports; --# in LexTokenManager.State; --# in S; --# in Scope; --# in SeqOfExports; --# in SeqOfImports; --# in SeqOfInitVars; --# in SubprogSym; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheErrorHeap; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# DependencyRelation, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# ExpSeqOfExports, --# LexTokenManager.State, --# S, --# Scope, --# SeqOfExports, --# SeqOfImports, --# SeqOfInitVars, --# SPARK_IO.File_Sys, --# SubprogSym, --# TheErrorHeap, --# TheHeap & --# Statistics.TableUsage, --# TheErrorHeap, --# TheHeap from *, --# ComponentData, --# DependencyRelation, --# Dictionary.Dict, --# EndPosition, --# ExpSeqOfExports, --# S, --# SeqOfExports, --# SeqOfImports, --# SeqOfInitVars, --# SubprogSym, --# TheErrorHeap, --# TheHeap; is separate; procedure CheckUnused --# global in CommandLineData.Content; --# in ComponentData; --# in Dictionary.Dict; --# in EndPosition; --# in LexTokenManager.State; --# in S; --# in SubprogSym; --# in TheHeap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# S, --# SPARK_IO.File_Sys, --# SubprogSym, --# TheHeap; is separate; procedure ReportVarsUsedAsConsts --# global in CommandLineData.Content; --# in ComponentData; --# in Dictionary.Dict; --# in EndPosition; --# in LexTokenManager.State; --# in VarsUsedAsConstants; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheErrorHeap; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# TheErrorHeap, --# TheHeap, --# VarsUsedAsConstants & --# Statistics.TableUsage, --# TheErrorHeap, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# TheErrorHeap, --# TheHeap, --# VarsUsedAsConstants; is MemberOfConstVars : SeqAlgebra.MemberOfSeq; ConstVarRep : Natural; ConstVar : Dictionary.Symbol; NewError : Natural; begin MemberOfConstVars := SeqAlgebra.FirstMember (TheHeap, VarsUsedAsConstants); while not SeqAlgebra.IsNullMember (MemberOfConstVars) loop ConstVarRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfConstVars); ConstVar := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (ConstVarRep)); if Dictionary.IsSubcomponent (ConstVar) then ComponentErrors.CreateError (TheErrorHeap, TheHeap, ComponentErrors.SemanticWarning, 403, EndPosition, Dictionary.NullSymbol, NewError); ComponentManager.AddError (TheHeap, TheErrorHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, ConstVar), NewError); else ErrorHandler.Semantic_Warning (Err_Num => 403, Position => EndPosition, Id_Str => Dictionary.GetSimpleName (ConstVar)); end if; MemberOfConstVars := SeqAlgebra.NextMember (TheHeap, MemberOfConstVars); end loop; end ReportVarsUsedAsConsts; procedure MergeAndHandleErrors --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in TheErrorHeap; --# in out ComponentData; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives ComponentData, --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# TheErrorHeap, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# TheErrorHeap, --# TheHeap; is separate; begin -- AnalyseRelations ComponentErrors.Initialise (TheErrorHeap); CheckExpressions; CheckUsages; CheckDependencies; CheckUnused; ReportVarsUsedAsConsts; ComponentErrors.ReportUsage (TheErrorHeap); MergeAndHandleErrors; end AnalyseRelations; spark-2012.0.deb/examiner/sem-wf_package_declaration.adb0000644000175000017500000005114011753202336022140 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: -- Checks a Package Declaration for Sem on down pass through -- TreeProcessor. Starts at node package_declaration. May directly raise -- errors for: re-declaration of package identifier. Other errors may be raised -- indirectly by wf_package_specification, wf_inherit_clause and -- wf_context_clause which are called from here. -------------------------------------------------------------------------------- separate (Sem) procedure Wf_Package_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is type Enclosing_Scope_Types is (In_Library, In_Package, In_Procedure); Enclosing_Scope_Type : Enclosing_Scope_Types; Ident_Str : LexTokenManager.Lex_String; Context_Node, Inherit_Node, Generic_Formal_Part_Node, Spec_Node, Ident_Node : STree.SyntaxNode; Pack_Sym : Dictionary.Symbol := Dictionary.NullSymbol; Generic_Unit : Dictionary.Symbol; Private_Package_Declaration, Child_Package_Declaration : Boolean; Valid_Name : Boolean := True; --------------------------------------------------------------- procedure Get_Package_Declaration_Key_Nodes (Node : in STree.SyntaxNode; Context_Node : out STree.SyntaxNode; Inherit_Node : out STree.SyntaxNode; Generic_Formal_Part_Node : out STree.SyntaxNode; Spec_Node : out STree.SyntaxNode; Ident_Node : out STree.SyntaxNode; Private_Package_Declaration : out Boolean; Child_Package_Declaration : out Boolean) --# global in STree.Table; --# derives Child_Package_Declaration, --# Context_Node, --# Generic_Formal_Part_Node, --# Ident_Node, --# Inherit_Node, --# Private_Package_Declaration, --# Spec_Node from Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.private_package_declaration or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.generic_package_declaration; --# post (Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (Syntax_Node_Type (Inherit_Node, STree.Table) = SP_Symbols.inherit_clause or Inherit_Node = STree.NullNode) and --# (Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part --# or Generic_Formal_Part_Node = STree.NullNode) and --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.package_specification and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; is separate; --------------------------------------------------------------- procedure Find_Enclosing_Scope_Type (Scope : in Dictionary.Scopes; Enclosing_Scope_Type : out Enclosing_Scope_Types) --# global in Dictionary.Dict; --# derives Enclosing_Scope_Type from Dictionary.Dict, --# Scope; is begin if Dictionary.IsGlobalScope (Scope) then Enclosing_Scope_Type := In_Library; elsif Dictionary.IsPackage (Dictionary.GetRegion (Scope)) then Enclosing_Scope_Type := In_Package; else Enclosing_Scope_Type := In_Procedure; end if; end Find_Enclosing_Scope_Type; --------------------------------------------------------------- function Is_Not_Refinement_Announcement (Sym : Dictionary.Symbol; Enclosing_Scope_Type : Enclosing_Scope_Types) return Boolean --# global in Dictionary.Dict; is begin return Enclosing_Scope_Type /= In_Package or else Dictionary.GetContext (Sym) /= Dictionary.ProofContext; end Is_Not_Refinement_Announcement; --------------------------------------------------------------- procedure Add_Child (Root_Id_Node : in STree.SyntaxNode; Is_Private : in Boolean; Scope : in Dictionary.Scopes; Child_Sym : out Dictionary.Symbol; Child_Str : out LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Child_Str from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Root_Id_Node, --# Scope, --# STree.Table & --# Child_Sym, --# Dictionary.Dict from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Is_Private, --# LexTokenManager.State, --# Root_Id_Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Is_Private, --# LexTokenManager.State, --# Root_Id_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Private, --# LexTokenManager.State, --# Root_Id_Node, --# Scope, --# STree.Table & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Is_Private, --# LexTokenManager.State, --# Root_Id_Node, --# Scope; --# pre Syntax_Node_Type (Root_Id_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- procedure Wf_Package_Specification (Node : in STree.SyntaxNode; Ident_Str : in LexTokenManager.Lex_String; Pack_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Str, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_specification; --# post STree.Table = STree.Table~; is separate; begin -- Wf_Package_Declaration Get_Package_Declaration_Key_Nodes (Node => Node, Context_Node => Context_Node, Inherit_Node => Inherit_Node, Generic_Formal_Part_Node => Generic_Formal_Part_Node, Spec_Node => Spec_Node, Ident_Node => Ident_Node, Private_Package_Declaration => Private_Package_Declaration, Child_Package_Declaration => Child_Package_Declaration); Ident_Str := Node_Lex_String (Node => Ident_Node); -- tells us where package is being declared Find_Enclosing_Scope_Type (Scope => Current_Scope, Enclosing_Scope_Type => Enclosing_Scope_Type); if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Child_Package_Declaration and then Enclosing_Scope_Type = In_Library then if Syntax_Node_Type (Node => Node) = SP_Symbols.generic_package_declaration then -- ASSUME Node = generic_package_declaration ErrorHandler.Semantic_Error (Err_Num => 610, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Node) = SP_Symbols.package_declaration or else Syntax_Node_Type (Node => Node) = SP_Symbols.private_package_declaration then -- ASSUME Node = package_declaration OR private_package_declaration Add_Child (Root_Id_Node => Ident_Node, Is_Private => Private_Package_Declaration, Scope => Current_Scope, Child_Sym => Pack_Sym, Child_Str => Ident_Str); end if; -- if Pack_Sym is null then something went wrong when we added the child so we need to supress -- any further analysis of the package specification Valid_Name := not Dictionary.Is_Null_Symbol (Pack_Sym); else if CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 then -- check that syntax conforms if Child_Package_Declaration or else Private_Package_Declaration or else Syntax_Node_Type (Node => Node) = SP_Symbols.generic_package_declaration then ErrorHandler.Semantic_Error (Err_Num => 610, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); Private_Package_Declaration := False; end if; elsif Child_Package_Declaration and then Enclosing_Scope_Type /= In_Library then ErrorHandler.Semantic_Error (Err_Num => 614, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; Pack_Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (Syntax_Node_Type (Inherit_Node, STree.Table) = SP_Symbols.inherit_clause or Inherit_Node = STree.NullNode) and --# (Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part --# or Generic_Formal_Part_Node = STree.NullNode) and --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.package_specification and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; if not Dictionary.Is_Null_Symbol (Pack_Sym) and then Is_Not_Refinement_Announcement (Sym => Pack_Sym, Enclosing_Scope_Type => Enclosing_Scope_Type) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); Valid_Name := False; else if not Dictionary.Is_Null_Symbol (Pack_Sym) then STree.Set_Node_Lex_String (Sym => Pack_Sym, Node => Ident_Node); end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (Syntax_Node_Type (Inherit_Node, STree.Table) = SP_Symbols.inherit_clause or Inherit_Node = STree.NullNode) and --# (Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part --# or Generic_Formal_Part_Node = STree.NullNode) and --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.package_specification and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; if Private_Package_Declaration then -- root level private package Dictionary.AddPrivatePackage (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Current_Scope, ThePackage => Pack_Sym); else Dictionary.Add_Package (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Current_Scope, ThePackage => Pack_Sym); end if; end if; end if; -- wff the package specification iff its declaration is valid if Valid_Name then -- ASSUME Inherit_Node = inherit_clause OR NULL if Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.inherit_clause then -- ASSUME Inherit_Node = inherit_clause Wf_Inherit_Clause (Node => Inherit_Node, Comp_Sym => Pack_Sym, Scope => Current_Scope); end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part --# or Generic_Formal_Part_Node = STree.NullNode) and --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.package_specification; -- ASSUME Context_Node = context_clause OR NULL if Syntax_Node_Type (Node => Context_Node) = SP_Symbols.context_clause then -- ASSUME Context_Node = context_clause Wf_Context_Clause (Node => Context_Node, Comp_Sym => Pack_Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Pack_Sym)); end if; if Syntax_Node_Type (Node => Generic_Formal_Part_Node) = SP_Symbols.generic_formal_part then -- ASSUME Generic_Formal_Part_Node = generic_formal_part Dictionary.Add_Generic_Unit (Kind => Dictionary.Generic_Of_Package, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Current_Scope, Generic_Unit => Generic_Unit); Dictionary.Set_Package_Generic_Unit (Pack_Sym => Pack_Sym, Generic_Unit => Generic_Unit); Dictionary.Set_Generic_Unit_Owning_Package (Generic_Unit => Generic_Unit, Pack_Sym => Pack_Sym); Wf_Generic_Formal_Part (Node => Generic_Formal_Part_Node, Generic_Ident_Node_Pos => Node_Position (Node => Ident_Node), Generic_Unit => Generic_Unit, Package_Or_Subprogram_Symbol => Pack_Sym); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.package_specification; Wf_Package_Specification (Node => Spec_Node, Ident_Str => Ident_Str, Pack_Sym => Pack_Sym, Current_Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); end if; end Wf_Package_Declaration; spark-2012.0.deb/examiner/sparklex.ads0000644000175000017500000001475511753202336016607 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with LexTokenManager; with SPARK_IO; with SP_Symbols; use type LexTokenManager.Line_Numbers; use type SP_Symbols.SP_Symbol; --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# LexTokenLists, --# LexTokenManager, --# SPARK_IO, --# SP_Symbols, --# SystemErrors; package SparkLex --# own Curr_Line : Line_Context; is --# function Curr_Line_Invariant (Curr_Line : Line_Context) return Boolean; type Line_Context is private; Null_Line_Context : constant Line_Context; procedure Clear_Line_Context; --# global out Curr_Line; --# derives Curr_Line from ; --# post Curr_Line_Invariant (Curr_Line); procedure Store_Line_Context (File_Line : out Line_Context); --# global in Curr_Line; --# derives File_Line from Curr_Line; --# pre Curr_Line_Invariant (Curr_Line); --# post Curr_Line_Invariant (File_Line); procedure Restore_Line_Context (File_Line : in Line_Context); --# global out Curr_Line; --# derives Curr_Line from File_Line; --# pre Curr_Line_Invariant (File_Line); --# post Curr_Line_Invariant (Curr_Line); -- Returns True iff ExStr is an FDL reserved word function Check_FDL_RW (Ex_Str : E_Strings.T) return Boolean; -- Scan ProgText and return next token found with class Token -- and value LexVal. PunctToken is True iff the scanned token -- is deemed to be a "punctutation token" which can be ignored -- by the parser. -- -- The exact list of "Punctutation Tokens" is defined in -- SPARKLex.Lex.IsPunctToken procedure Examiner_Lex (Prog_Text : in SPARK_IO.File_Type; Token : out SP_Symbols.SP_Terminal; Lex_Val : out LexTokenManager.Lex_Value; Punct_Token : out Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out Curr_Line; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Curr_Line, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# Punct_Token, --# SPARK_IO.File_Sys, --# Token from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Prog_Text, --# SPARK_IO.File_Sys; --# pre Curr_Line_Invariant (Curr_Line); --# post Curr_Line_Invariant (Curr_Line); -- This version behaves as ExaminerLex, but _allows_ identifiers -- beginning with $ to allow for pre-processing and macro-substitution -- of such identifiers by tools such as GNATPREP. procedure SPARK_Format_Lex (Prog_Text : in SPARK_IO.File_Type; Token : out SP_Symbols.SP_Terminal; Lex_Val : out LexTokenManager.Lex_Value; Punct_Token : out Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out Curr_Line; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Curr_Line, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# Punct_Token, --# SPARK_IO.File_Sys, --# Token from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Prog_Text, --# SPARK_IO.File_Sys; --# pre Curr_Line_Invariant (Curr_Line); --# post Curr_Line_Invariant (Curr_Line); -- Returns True if Token1 and Token2 are both -- Delimiters (such as operators, [ ], and %) -- or both Numbers (integers, real, based literals etc.) -- or both Character or String literals -- or both Identifiers -- or both Reserved Words -- or both "Others" (not falling into any of the catogories above) function Similar_Tokens (Token1, Token2 : SP_Symbols.SP_Terminal) return Boolean; private type Program_Context is (In_Ada, In_Annotation); type Annotation_Context is (Start_Annotation, Mid_Annotation, Ended_Annotation); type Line_Context is record Context : Program_Context; Anno_Context : Annotation_Context; Line_No : LexTokenManager.Line_Numbers; Last_Token_Pos, Curr_Pos, Lookahead_Pos : E_Strings.Positions; Conts : E_Strings.T; end record; Null_Line_Context : constant Line_Context := Line_Context' (Context => In_Ada, Anno_Context => Start_Annotation, Line_No => LexTokenManager.Line_Numbers'First, Last_Token_Pos => E_Strings.Positions'First, Curr_Pos => E_Strings.Positions'First, Lookahead_Pos => E_Strings.Positions'First, Conts => E_Strings.Empty_String); end SparkLex; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_simple_expression.adb0000644000175000017500000003743511753202336025515 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------- -- Overview: Called to check validity of a -- simple_expression node. Replaces calls to StaticSimpleExpression, -- BaseTypeSimpleExpression and CheckTypeSimpleExpression ---------------------------------------------------------------------------- separate (Sem.Walk_Expression_P) procedure Wf_Simple_Expression (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type; Context_Requires_Static : in Boolean) is Left, Right, Result : Sem.Exp_Record; Op_Node : STree.SyntaxNode; Operator : SP_Symbols.SP_Symbol; Left_Base, Right_Base : Dictionary.Symbol; Result_Str, Temp_Str : E_Strings.T; function Length_Sum (Left, Right : Sem.Exp_Record) return Maths.Value --# global in Dictionary.Dict; is Unused : Maths.ErrorCode; Sum : Maths.Value; function Length_Of (S : Sem.Exp_Record) return Maths.Value --# global in Dictionary.Dict; is Result : Maths.Value; begin if Dictionary.IsPredefinedCharacterType (Dictionary.GetRootType (S.Type_Symbol)) then Result := Maths.OneInteger; else Result := S.Range_RHS; end if; return Result; end Length_Of; begin -- Length_Sum --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.Add (Length_Of (S => Left), Length_Of (S => Right), Sum, Unused); --# end accept; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; return Sum; end Length_Sum; -- Returns either the Character or String Literal associated with Exp -- as a string literal. A character literal 'a' is returned as the -- string literal "a" for example. The resulting string does NOT -- include opening or closing quotations. procedure Get_String (Exp : in Sem.Exp_Record; Exp_Type : in Dictionary.Symbol; Err_Pos : in LexTokenManager.Token_Position; Str : out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Pos, --# Exp, --# Exp_Type, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Str from Dictionary.Dict, --# Exp, --# Exp_Type, --# LexTokenManager.State; --# pre Dictionary.IsPredefinedCharacterType (Exp_Type, Dictionary.Dict) or --# Dictionary.IsPredefinedStringType (Exp_Type, Dictionary.Dict); is Str_Length : E_Strings.Lengths; Char_Code : Integer; Unused : Maths.ErrorCode; begin if Dictionary.IsPredefinedCharacterType (Exp_Type) then --# accept F, 10, Unused, "Ineffective assignment OK here"; Maths.ValueToInteger (Exp.Value, Char_Code, Unused); --# end accept; Str := E_Strings.Empty_String; if Char_Code >= Character'Pos (Character'First) and then Char_Code <= Character'Pos (Character'Last) then E_Strings.Append_Char (Str, Character'Val (Char_Code)); else ErrorHandler.Semantic_Error (Err_Num => 402, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Id_Str => LexTokenManager.Null_String); end if; else -- Get the String representation of Exp. This will include a preceding -- and a trailing " character at this point Str := LexTokenManager.Lex_String_To_String (Exp.String_Value); Str_Length := E_Strings.Get_Length (Str); -- including two " characters if Str_Length > 2 then -- Must contain at least one character, but we still want -- to get rid of the outer " characters. Str := E_Strings.Section (Str, 2, Str_Length - 2); else -- Must be a null String "", so Str := E_Strings.Empty_String; end if; end if; --# accept F, 33, Unused, "Unused not referenced OK"; end Get_String; begin -- Wf_Simple_Expression Op_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Op_Node = simple_expression OR simple_expression_opt OR -- annotation_simple_expression OR annotation_simple_expression_opt if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.annotation_simple_expression then -- ASSUME Op_Node = simple_expression OR annotation_simple_expression Op_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Op_Node)); -- ASSUME Op_Node = plus OR minus OR ampersand SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.plus or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.minus or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.ampersand, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = plus OR minus OR ampersand in Wf_Simple_Expression"); -- binary_adding_operator exists Operator := STree.Syntax_Node_Type (Node => Op_Node); Exp_Stack.Pop (Item => Right, Stack => E_Stack); Exp_Stack.Pop (Item => Left, Stack => E_Stack); Result := Null_Type_Record; -- safety: we may not set all fields below -- do static checks first Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; -- In Ada95 onwards & can be a static function. -- LRM83(4.9) says that & is never static, so... case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Result.Is_Static := Left.Is_Static and then Right.Is_Static and then Operator /= SP_Symbols.ampersand; when CommandLineData.SPARK95_Onwards => Result.Is_Static := Left.Is_Static and then Right.Is_Static; end case; Result.Has_Operators := True; if Left.Is_ARange or else Right.Is_ARange then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 90, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); else -- neither are ranges -- now do type compat and operator visibility checks if Operator = SP_Symbols.ampersand then Left_Base := Dictionary.GetRootType (Left.Type_Symbol); Right_Base := Dictionary.GetRootType (Right.Type_Symbol); if (Dictionary.IsPredefinedCharacterType (Left_Base) or else Dictionary.IsPredefinedStringType (Left_Base)) and then (Dictionary.IsPredefinedCharacterType (Right_Base) or else Dictionary.IsPredefinedStringType (Right_Base)) then -- "&" expressions in SPARK are always expected to be constant (SR 4.5.3) -- As such, we should be able to compute the length and value of the result. if Result.Is_Constant then Result.Range_RHS := Length_Sum (Left => Left, Right => Right); Result.Type_Symbol := Dictionary.GetPredefinedStringType; -- Now build the result, from a starting ", then Left, then Right, -- then a closing " Result_Str := E_Strings.Empty_String; E_Strings.Append_Char (Result_Str, '"'); Get_String (Exp => Left, Exp_Type => Left_Base, Err_Pos => STree.Node_Position (Node => Node), Str => Temp_Str); E_Strings.Append_Examiner_String (Result_Str, Temp_Str); Get_String (Exp => Right, Exp_Type => Right_Base, Err_Pos => STree.Node_Position (Node => Node), Str => Temp_Str); E_Strings.Append_Examiner_String (Result_Str, Temp_Str); E_Strings.Append_Char (Result_Str, '"'); LexTokenManager.Insert_Examiner_String (Result_Str, Result.String_Value); else Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 37, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; else -- Types of Left and/or Right are wrong... Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error_Sym2 (Err_Num => 35, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Sym => Left_Base, Sym2 => Right_Base, Scope => Scope); end if; else -- its plus or minus Check_Binary_Operator (Operator => Operator, Left => Left, Right => Right, Scope => Scope, T_Stack => T_Stack, Op_Pos => STree.Node_Position (Node => Op_Node), Left_Pos => STree.Node_Position (Node => STree.Child_Node (Current_Node => Node)), Right_Pos => STree.Node_Position (Node => STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)))), Convert => True, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_simple_expression, Result => Result); -- Seed Op_Node with type to aid selection of operator in VCG STree.Add_Node_Symbol (Node => Op_Node, Sym => Result.Type_Symbol); Calc_Binary_Operator (Node_Pos => STree.Node_Position (Node => Node), Operator => Operator, Left_Val => Left.Value, Right_Val => Right.Value, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_simple_expression, Result => Result); if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.simple_expression then -- ASSUME STree.Parent_Node (Current_Node => Node) = simple_expression ---------------------------------------------------------------- -- If the parent is also a simple_expression, then we must have -- an unparenthesized expression with two adding operators, such -- as A + B + C -- -- Here, we issue warning 302 to warn of potential evaluation -- order dependency. -- -- We can reduce false-alarm rate here by suppressing the -- warning in two specific cases: -- a) If the sub-expression under consideration is static -- AND the expression as a whole appears in a context -- that requires a static expression. Example: a type -- declaration such as -- type T is range B + 2 - 3 .. 10; -- or -- b) A modular-typed expression where the two operators -- under consideration are both the same and -- commutative. For example: -- A := A + B + C; -- where A, B, and C are all of the same modular -- (sub-)type. -- -- The same logic is used in wf_term for multiplying -- operators. ---------------------------------------------------------------- if (Context_Requires_Static and then Result.Is_Static) or else (Dictionary.TypeIsModular (Result.Type_Symbol) and then Ops_Are_Same_And_Commutative (Operator, STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Node))))) then null; else ErrorHandler.Semantic_Warning (Err_Num => 302, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; end if; end if; Result.Errors_In_Expression := Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; -- OtherSymbol may carry a function symbol in the case of uses of unchecked_conversion. -- This symbol is used (by Wf_Assign) to convery information to the VCG to supress -- checks when an unchecked_conversion is assigned to something of the same subtype. -- We do not want this mechanism if the unchecked_conversion is sued in any other context -- than a direct assignment. Therefore we clear OtherSymbol here: Result.Other_Symbol := Dictionary.NullSymbol; Exp_Stack.Push (X => Result, Stack => E_Stack); elsif STree.Syntax_Node_Type (Node => Op_Node) /= SP_Symbols.simple_expression_opt and then STree.Syntax_Node_Type (Node => Op_Node) /= SP_Symbols.annotation_simple_expression_opt then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = simple_expression OR simple_expression_opt OR annotation_simple_expression OR " & "annotation_simple_expression_opt in Wf_Simple_Expression"); end if; end Wf_Simple_Expression; spark-2012.0.deb/examiner/spparser.adb0000644000175000017500000014347211753202336016573 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ErrorHandler; with ExaminerConstants; with LexTokenManager; with SparkLex; with SP_Expected_Symbols; with SP_Parser_Actions; with SP_Parser_Goto; with SP_Productions; with SP_Relations; with SP_Symbols; with STree; with SystemErrors; use type STree.SyntaxNode; use type SP_Parser_Actions.SP_Parse_Act; use type SP_Parser_Actions.SP_Action_Kind; use type SP_Productions.SP_State; use type SP_Symbols.SP_Symbol; package body SPParser is procedure Put_Symbol (File : in SPARK_IO.File_Type; Item : in SP_Symbols.SP_Symbol) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Item; is --# hide Put_Symbol; -- hidden since uses 'Image begin if SPARK_IO.Valid_File (File) then SPARK_IO.Put_String (File, SP_Symbols.SP_Symbol'Image (Item), 0); end if; end Put_Symbol; procedure SPParse (ProgText : in SPARK_IO.File_Type; MaxStackSize : out Natural; FileEnd : out Boolean) is --# inherit ErrorHandler, --# ExaminerConstants, --# SP_Productions, --# SP_Symbols, --# STree, --# SystemErrors; package SPStackManager --# own SPStack : SPStackStruct; --# SPStackPtr : SPStackPtrVal; --# initializes SPStack, --# SPStackPtr; is subtype SPStackPtrVal is Natural range 0 .. ExaminerConstants.SPStackSize; type SPStackEntry is record StateNo : SP_Productions.SP_State; SPSym : SP_Symbols.SP_Symbol; Node : STree.SyntaxNode; end record; SPStackPtr : SPStackPtrVal; procedure SPPush (St : in SP_Productions.SP_State; Sym : in SP_Symbols.SP_Symbol; Node : in STree.SyntaxNode); --# global in out SPStack; --# in out SPStackPtr; --# derives SPStack from *, --# Node, --# SPStackPtr, --# St, --# Sym & --# SPStackPtr from *; procedure SPTop (Top : out SPStackEntry); --# global in SPStack; --# in SPStackPtr; --# derives Top from SPStack, --# SPStackPtr; procedure SPPop (Top : out SPStackEntry; PopOff : in SP_Productions.SP_Right); --# global in SPStack; --# in out SPStackPtr; --# derives SPStackPtr from *, --# PopOff & --# Top from PopOff, --# SPStack, --# SPStackPtr; procedure SPLook (StackEntry : out SPStackEntry; Pos : in SPStackPtrVal); --# global in SPStack; --# in SPStackPtr; --# derives StackEntry from Pos, --# SPStack, --# SPStackPtr; procedure SPRemove (NoStates : in SPStackPtrVal); --# global in out SPStackPtr; --# derives SPStackPtr from *, --# NoStates; end SPStackManager; --# inherit ErrorHandler, --# LexTokenManager, --# SPARK_IO, --# SPStackManager, --# SP_Parser_Actions, --# SP_Parser_Goto, --# SP_Productions, --# SP_Symbols, --# STree; package SPActions is procedure ShiftAction (State : in SP_Productions.Valid_States; Sym : in SP_Symbols.SP_Terminal; LexVal : in LexTokenManager.Lex_Value; PuncToken : in Boolean); --# global in out SPStackManager.SPStack; --# in out SPStackManager.SPStackPtr; --# in out STree.Table; --# derives SPStackManager.SPStack from *, --# PuncToken, --# SPStackManager.SPStackPtr, --# State, --# STree.Table, --# Sym & --# SPStackManager.SPStackPtr from * & --# STree.Table from *, --# LexVal, --# PuncToken, --# Sym; procedure ReduceAction (ReduceSymbol : in SP_Symbols.SP_Non_Terminal; ReduceBy : in SP_Productions.SP_Right); --# global in out SPStackManager.SPStack; --# in out SPStackManager.SPStackPtr; --# in out STree.Table; --# derives SPStackManager.SPStack, --# STree.Table from ReduceBy, --# ReduceSymbol, --# SPStackManager.SPStack, --# SPStackManager.SPStackPtr, --# STree.Table & --# SPStackManager.SPStackPtr from *, --# ReduceBy; end SPActions; --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# LexTokenManager, --# SPActions, --# SparkLex, --# SPARK_IO, --# SPStackManager, --# SP_Parser_Actions, --# SP_Parser_Goto, --# SP_Productions, --# SP_Relations, --# SP_Symbols, --# STree; package SPErrorRecovery is procedure SPRecover (ProgText : in SPARK_IO.File_Type; CurrentToken : in SP_Symbols.SP_Symbol; CurrentLexVal : in LexTokenManager.Lex_Value; PuncToken : in Boolean; Halt : out Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out SPStackManager.SPStack; --# in out SPStackManager.SPStackPtr; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from CommandLineData.Content, --# CurrentLexVal, --# CurrentToken, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# ProgText, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# SPStackManager.SPStack, --# SPStackManager.SPStackPtr, --# STree.Table & --# Halt, --# SPStackManager.SPStack, --# SPStackManager.SPStackPtr, --# STree.Table from CommandLineData.Content, --# CurrentLexVal, --# CurrentToken, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# ProgText, --# PuncToken, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# SPStackManager.SPStack, --# SPStackManager.SPStackPtr, --# STree.Table; end SPErrorRecovery; SPCurrentSym : SP_Symbols.SP_Symbol; SPCurrState : SP_Productions.SP_State; SPHaltCalled : Boolean; LexVal : LexTokenManager.Lex_Value; PuncToken : Boolean; NoOfTerminals, NoOfNonTerminals : SP_Expected_Symbols.SP_Ess_Sym_Range; TerminalList, NonTerminalList : SP_Expected_Symbols.SP_Exp_Sym_List; SPStackTop : SPStackManager.SPStackEntry; SPAct : SP_Parser_Actions.SP_Parse_Act; package body SPStackManager is subtype SPStackIndex is Positive range 1 .. ExaminerConstants.SPStackSize; type SPStackStruct is array (SPStackIndex) of SPStackEntry; SPStack : SPStackStruct; procedure SPPush (St : in SP_Productions.SP_State; Sym : in SP_Symbols.SP_Symbol; Node : in STree.SyntaxNode) is begin if SPStackPtr < SPStackIndex'Last then SPStackPtr := SPStackPtr + 1; SPStack (SPStackPtr) := SPStackEntry'(St, Sym, Node); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Parse_Stack_Overflow, Msg => ""); end if; end SPPush; procedure SPTop (Top : out SPStackEntry) is begin Top := SPStack (SPStackPtr); end SPTop; procedure SPPop (Top : out SPStackEntry; PopOff : in SP_Productions.SP_Right) is begin if SPStackPtr > Natural (PopOff) then SPStackPtr := SPStackPtr - Natural (PopOff); Top := SPStack (SPStackPtr); else SPStackPtr := 0; Top := SPStackEntry'(SP_Productions.No_State, SP_Symbols.SPDEFAULT, STree.NullNode); end if; end SPPop; procedure SPLook (StackEntry : out SPStackEntry; Pos : in SPStackPtrVal) is begin if SPStackPtr > Pos then StackEntry := SPStack (SPStackPtr - Pos); else StackEntry := SPStackEntry'(SP_Productions.No_State, SP_Symbols.SPDEFAULT, STree.NullNode); end if; end SPLook; procedure SPRemove (NoStates : in SPStackPtrVal) is begin if SPStackPtr > NoStates then SPStackPtr := SPStackPtr - NoStates; else SPStackPtr := 0; end if; end SPRemove; begin SPStackPtr := 0; --# accept F, 31, SPStack, --# "Only the stack pointer needs to be initialized" & --# F, 32, SPStack, --# "Only the stack pointer needs to be initialized"; end SPStackManager; -- flow error SPStack undefined expected package body SPActions is procedure ShiftAction (State : in SP_Productions.Valid_States; Sym : in SP_Symbols.SP_Terminal; LexVal : in LexTokenManager.Lex_Value; PuncToken : in Boolean) is Node : STree.SyntaxNode; begin if not PuncToken then STree.NewTerminal (Terminal => Sym, TerminalVal => LexVal, Node => Node); else Node := STree.NullNode; end if; SPStackManager.SPPush (State, Sym, Node); end ShiftAction; procedure ReduceAction (ReduceSymbol : in SP_Symbols.SP_Non_Terminal; ReduceBy : in SP_Productions.SP_Right) is Node : STree.SyntaxNode; SPElement : SPStackManager.SPStackEntry; SPCurrState : SP_Productions.SP_State; StackPointer : SPStackManager.SPStackPtrVal; begin STree.NewProduction (ReduceSymbol, Node); StackPointer := SPStackManager.SPStackPtrVal (ReduceBy); loop exit when StackPointer = 0; StackPointer := StackPointer - 1; SPStackManager.SPLook (SPElement, StackPointer); if SPElement.Node /= STree.NullNode then STree.AddDerivative (SPElement.Node); end if; end loop; SPStackManager.SPPop (SPElement, ReduceBy); SPCurrState := SP_Parser_Goto.SP_Goto (SPElement.StateNo, ReduceSymbol); SPStackManager.SPPush (SPCurrState, ReduceSymbol, Node); end ReduceAction; end SPActions; package body SPErrorRecovery is procedure SPRecover (ProgText : in SPARK_IO.File_Type; CurrentToken : in SP_Symbols.SP_Symbol; CurrentLexVal : in LexTokenManager.Lex_Value; PuncToken : in Boolean; Halt : out Boolean) is type BufIndex is range 0 .. ExaminerConstants.SPErrLookahead; --# assert BufIndex'Base is Short_Short_Integer; -- for GNAT type TokenBuffer is array (BufIndex) of SP_Symbols.SP_Terminal; type LexBuffer is array (BufIndex) of LexTokenManager.Lex_Value; type TypeBuffer is array (BufIndex) of Boolean; type LocalRecoverySuccess is (NoSuccess, WrongToken, MissingToken, ExtraToken); TokenList : TokenBuffer; LexValList : LexBuffer; TokenTypes : TypeBuffer; HigherEntry, LowerEntry : SPStackManager.SPStackEntry; StackDepth, Pos : SPStackManager.SPStackPtrVal; Node : STree.SyntaxNode; LocalSuccess : LocalRecoverySuccess; Success, Stop, Done : Boolean; SymListSize : Natural; SymList : ErrorHandler.Err_Sym_List; ReplacementSym, RecoverySym : SP_Symbols.SP_Symbol; CurrState : SP_Productions.SP_State; Index, LastBufIndex : BufIndex; SPElement : SPStackManager.SPStackEntry; SPAct : SP_Parser_Actions.SP_Parse_Act; LexToken : SP_Symbols.SP_Terminal; LexTokenValue : LexTokenManager.Lex_Value; LexTokenType : Boolean; procedure CheckFollowingTokens (Tokens : in TokenBuffer; StartIndex, LastIndex : in BufIndex; StackPos : in SPStackManager.SPStackPtrVal; NextState : in SP_Productions.SP_State; RecoveryOK : out Boolean) --# global in SPStackManager.SPStack; --# in SPStackManager.SPStackPtr; --# derives RecoveryOK from LastIndex, --# NextState, --# SPStackManager.SPStack, --# SPStackManager.SPStackPtr, --# StackPos, --# StartIndex, --# Tokens; is type RecoveryStackIndex is range 0 .. ExaminerConstants.SPErrLookahead * 2 + 1; --# assert RecoveryStackIndex'Base is Short_Short_Integer; -- for GNAT -- times two to allow for reduction by 0 type RecoveryStack is array (RecoveryStackIndex) of SP_Productions.SP_State; LocalStack : RecoveryStack; LocalStackPtr : RecoveryStackIndex; ParseStackptr : SPStackManager.SPStackPtrVal; CurrState : SP_Productions.SP_State; SPAct : SP_Parser_Actions.SP_Parse_Act; SPElement : SPStackManager.SPStackEntry; Index : BufIndex; Done : Boolean; begin -- This code could do with refactoring to remove the conditional -- flow errors and to render it free from RTE - TJJ. --# accept F, 23, LocalStack, --# "The stack pointer is all that is need to determine the extent of the stack" & --# F, 501, LocalStack, --# "The stack pointer is all that is need to determine the extent of the stack" & --# F, 504, LocalStack, --# "The stack pointer is all that is need to determine the extent of the stack" & --# F, 602, RecoveryOK, LocalStack, --# "The stack pointer is all that is need to determine the extent of the stack"; -- check further tokens so that we do not recover too soon RecoveryOK := True; ParseStackptr := StackPos; if NextState /= SP_Productions.No_State then LocalStack (1) := NextState; LocalStackPtr := 1; else LocalStackPtr := 0; end if; if LocalStackPtr > 0 then CurrState := LocalStack (LocalStackPtr); else SPStackManager.SPLook (SPElement, ParseStackptr); CurrState := SPElement.StateNo; end if; Index := StartIndex; Done := False; loop --SPARK_IO.Put_String (SPARK_IO.STANDARD_OUTPUT, "state ",0); --SPARK_IO.Put_Integer (SPARK_IO.STANDARD_OUTPUT, Integer (CurrState), 5, 10); SPAct := SP_Parser_Actions.SPA (CurrState, Tokens (Index)); --SPARK_IO.Put_String (SPARK_IO.STANDARD_OUTPUT, "trying ",0); --Put_Symbol (SPARK_IO.STANDARD_OUTPUT, Tokens (Index)); case SPAct.Act is when SP_Parser_Actions.Shift => --SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, " - shift",0); CurrState := SPAct.State; LocalStackPtr := LocalStackPtr + 1; LocalStack (LocalStackPtr) := CurrState; if Index < LastIndex then Index := Index + 1; else Done := True; end if; when SP_Parser_Actions.Reduce => --SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, " - reduce",0); if Integer (LocalStackPtr) > Integer (SPAct.Red_By) then LocalStackPtr := LocalStackPtr - RecoveryStackIndex (SPAct.Red_By); CurrState := LocalStack (LocalStackPtr); else ParseStackptr := ParseStackptr + SPStackManager.SPStackPtrVal'(Integer (SPAct.Red_By) - Integer (LocalStackPtr)); LocalStackPtr := 0; SPStackManager.SPLook (SPElement, ParseStackptr); CurrState := SPElement.StateNo; end if; CurrState := SP_Parser_Goto.SP_Goto (CurrState, SPAct.Symbol); LocalStackPtr := LocalStackPtr + 1; LocalStack (LocalStackPtr) := CurrState; when SP_Parser_Actions.Accpt => --SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, " - accept",0); if Tokens (Index) = SP_Symbols.SPEND then Done := True; else LocalStackPtr := 1; LocalStack (1) := 1; -- First state CurrState := 1; end if; when SP_Parser_Actions.Error => --SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, " - error",0); RecoveryOK := False; Done := True; end case; exit when Done; end loop; end CheckFollowingTokens; procedure FindLocalError (StackTop : in SPStackManager.SPStackEntry; TokenList : in out TokenBuffer; RecoveryPosn : out BufIndex; Success : out LocalRecoverySuccess) --# global in SPStackManager.SPStack; --# in SPStackManager.SPStackPtr; --# derives RecoveryPosn, --# Success, --# TokenList from SPStackManager.SPStack, --# SPStackManager.SPStackPtr, --# StackTop, --# TokenList; is RecoveryToken, RecoverySymbol : SP_Symbols.SP_Symbol; RecoveryPossible, RecoveryOK : Boolean; RecoveryAct : SP_Parser_Actions.SP_Parse_Act; Status : LocalRecoverySuccess; Index : SP_Parser_Actions.Action_Index; FirstToken : SP_Symbols.SP_Terminal; begin FirstToken := TokenList (1); Index := SP_Parser_Actions.First_Action_Index; Status := NoSuccess; RecoveryPossible := True; RecoveryToken := SP_Symbols.SPDEFAULT; -- the initialization of this variable is not strictly -- necessary but it avoids conditional data-flow errors. loop SP_Parser_Actions.Scan_Action_Table (StackTop.StateNo, Index, RecoveryAct, RecoverySymbol); exit when RecoveryAct = SP_Parser_Actions.Error_Action; --Put_Symbol (SPARK_IO.STANDARD_OUTPUT, RecoverySymbol); if (RecoverySymbol /= SP_Symbols.SPDEFAULT) and (RecoverySymbol /= SP_Symbols.SPEND) then -- check for invalid extra token if RecoverySymbol = TokenList (2) then --SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, "Trying local error - extra token",0); CheckFollowingTokens (TokenList, 2, BufIndex (ExaminerConstants.SPLocalErrLookahead), 0, SP_Productions.No_State, RecoveryOK); if RecoveryOK then if Status = NoSuccess then Status := ExtraToken; else RecoveryPossible := False; end if; end if; end if; -- check for missing token if RecoveryPossible then -- SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, "Trying local error - missing token",0); TokenList (0) := RecoverySymbol; CheckFollowingTokens (TokenList, 0, BufIndex (ExaminerConstants.SPLocalErrLookahead), 0, SP_Productions.No_State, RecoveryOK); if RecoveryOK then if Status = NoSuccess then Status := MissingToken; RecoveryToken := RecoverySymbol; else RecoveryPossible := False; end if; end if; end if; -- wrongly spelt token if RecoveryPossible then if SparkLex.Similar_Tokens (Token1 => RecoverySymbol, Token2 => TokenList (1)) then TokenList (1) := RecoverySymbol; -- SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, "Trying local error - wrongly spelt token",0); CheckFollowingTokens (TokenList, 1, BufIndex (ExaminerConstants.SPLocalErrLookahead), 0, SP_Productions.No_State, RecoveryOK); if RecoveryOK then if Status = NoSuccess then Status := WrongToken; RecoveryToken := RecoverySymbol; else RecoveryPossible := False; end if; end if; TokenList (1) := FirstToken; end if; end if; end if; exit when not RecoveryPossible; end loop; if RecoveryPossible then if Status = MissingToken then TokenList (0) := RecoveryToken; -- flow err from non-exec path RecoveryPosn := 0; elsif Status = WrongToken then TokenList (1) := RecoveryToken; -- flow err from non-exec path RecoveryPosn := 1; else RecoveryPosn := 2; end if; Success := Status; else Success := NoSuccess; RecoveryPosn := 0; end if; end FindLocalError; procedure FindErrorPhrase (HigherEntry : in out SPStackManager.SPStackEntry; LowerEntry : in SPStackManager.SPStackEntry; StackPos : in SPStackManager.SPStackPtrVal; TokenList : in TokenBuffer; Success : out Boolean) --# global in SPStackManager.SPStack; --# in SPStackManager.SPStackPtr; --# derives HigherEntry, --# Success from HigherEntry, --# LowerEntry, --# SPStackManager.SPStack, --# SPStackManager.SPStackPtr, --# StackPos, --# TokenList; is RecoveryOK, RecoveryFound, RecoveryPossible : Boolean; RecoverySymbol : SP_Symbols.SP_Non_Terminal; RecoveryEntry : SPStackManager.SPStackEntry; RecoveryState, NextState : SP_Productions.SP_State; Index : SP_Parser_Goto.Goto_Index; SPAct : SP_Parser_Actions.SP_Parse_Act; begin RecoveryEntry := SPStackManager.SPStackEntry'(SP_Productions.No_State, SP_Symbols.SPDEFAULT, STree.NullNode); Index := SP_Parser_Goto.First_Goto_Index; RecoveryPossible := True; RecoveryFound := False; loop SP_Parser_Goto.Scan_Goto_Table (LowerEntry.StateNo, Index, RecoveryState, RecoverySymbol); exit when RecoveryState = SP_Productions.No_State; SPAct := SP_Parser_Actions.SPA (RecoveryState, TokenList (1)); if SPAct.Act = SP_Parser_Actions.Shift or SPAct.Act = SP_Parser_Actions.Accpt then if HigherEntry.StateNo = SP_Productions.No_State or else RecoverySymbol = HigherEntry.SPSym or else SP_Relations.SP_Left_Corner (RecoverySymbol, HigherEntry.SPSym) then CheckFollowingTokens (TokenList, 1, BufIndex'Last, StackPos, RecoveryState, RecoveryOK); if RecoveryOK then --# accept F, 20, NextState, "NextState is guarded by RecoveryFound"; if not RecoveryFound then NextState := SPAct.State; RecoveryFound := True; RecoveryEntry := SPStackManager.SPStackEntry'(RecoveryState, RecoverySymbol, STree.NullNode); elsif SPAct.State /= NextState then -- expected flow error RecoveryPossible := False; end if; end if; end if; end if; exit when not RecoveryPossible; end loop; if RecoveryPossible and RecoveryFound then Success := True; HigherEntry := RecoveryEntry; else Success := False; end if; --# accept F, 602, HigherEntry, NextState, --# "NextState is guarded by RecoveryFound" & --# F, 602, Success, NextState, --# "NextState is guarded by RecoveryFound"; end FindErrorPhrase; begin --SPRecover Stop := False; --# accept f, 23, TokenList, "Whole array is initialized." & --# f, 23, LexValList, "Whole array is initialized." & --# f, 23, TokenTypes, "Whole array is initialized."; TokenList (1) := CurrentToken; LexValList (1) := CurrentLexVal; TokenTypes (1) := PuncToken; LexValList (0) := LexTokenManager.Lex_Value' (Position => LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0), Token_Str => LexTokenManager.Null_String); TokenTypes (0) := False; for Ix in BufIndex range 2 .. BufIndex (ExaminerConstants.SPLocalErrLookahead) loop SparkLex.Examiner_Lex (Prog_Text => ProgText, Token => LexToken, Lex_Val => LexTokenValue, Punct_Token => LexTokenType); TokenList (Ix) := LexToken; LexValList (Ix) := LexTokenValue; TokenTypes (Ix) := LexTokenType; end loop; --# end accept; Success := False; SPStackManager.SPTop (HigherEntry); -- try local error recovery FindLocalError (HigherEntry, TokenList, Index, LocalSuccess); --# accept F, 23, SymList, --# "Access to SymList Elements is guarded by SymListSize"; if LocalSuccess /= NoSuccess then -- produce recovery message if LocalSuccess = WrongToken or LocalSuccess = ExtraToken then SymListSize := 1; SymList (1) := CurrentToken; else SymListSize := 0; end if; if LocalSuccess = WrongToken or LocalSuccess = MissingToken then ReplacementSym := TokenList (Index); else ReplacementSym := SP_Symbols.SPDEFAULT; end if; --# accept F, 504, SymList, --# "Access to SymList is guarded by SymListSize"; ErrorHandler.Syntax_Recovery (Recovery_Posn => LexValList (1), Replacement_Sym => ReplacementSym, Next_Sym => SP_Symbols.SPDEFAULT, No_Of_Syms => SymListSize, Sym_List => SymList); --# end accept; -- if LocalSuccess = MissingToken then -- SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, "Local error - missing token",0); -- elsif LocalSuccess = ExtraToken then -- SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, "Local error - extra token",0); -- else -- SPARK_IO.Put_LINE (SPARK_IO.STANDARD_OUTPUT, "Local error - wrongly spelt token",0); -- end if; Success := True; LastBufIndex := BufIndex (ExaminerConstants.SPLocalErrLookahead); else for Ix in BufIndex range BufIndex (ExaminerConstants.SPLocalErrLookahead) + 1 .. BufIndex'Last loop SparkLex.Examiner_Lex (Prog_Text => ProgText, Token => LexToken, Lex_Val => LexTokenValue, Punct_Token => LexTokenType); TokenList (Ix) := LexToken; LexValList (Ix) := LexTokenValue; TokenTypes (Ix) := LexTokenType; end loop; RecoverySym := TokenList (1); loop SPStackManager.SPTop (LowerEntry); HigherEntry := SPStackManager.SPStackEntry'(SP_Productions.No_State, SP_Symbols.SPDEFAULT, STree.NullNode); StackDepth := 0; while not Success and StackDepth < SPStackManager.SPStackPtr loop FindErrorPhrase (HigherEntry, LowerEntry, StackDepth, TokenList, Success); if Success then -- produce recovery message -- Put_LINE (STANDARD_OUTPUT, "Non-local error"); Pos := StackDepth; SymListSize := 0; while Pos > 0 and SymListSize < Natural (ErrorHandler.Err_Sym_Range'Last) loop Pos := Pos - 1; SPStackManager.SPLook (SPElement, Pos); SymListSize := SymListSize + 1; SymList (ErrorHandler.Err_Sym_Range (SymListSize)) := SPElement.SPSym; end loop; --# accept F, 41, "Pos is updated in the outer loop."; if Pos /= 0 then -- expected flow error SymListSize := StackDepth; SPStackManager.SPTop (SPElement); --# accept F, 504, SymList, --# "Update of element of SymList - Accesses guarded by SymListSize"; SymList (ErrorHandler.Err_Sym_Range'Last) := SPElement.SPSym; --# end accept; end if; --# end accept; --# accept F, 504, SymList, --# "Access to SymList is guarded by SymListSize"; ErrorHandler.Syntax_Recovery (Recovery_Posn => LexValList (1), Replacement_Sym => HigherEntry.SPSym, Next_Sym => RecoverySym, No_Of_Syms => SymListSize, Sym_List => SymList); --# end accept; -- patch stack SPStackManager.SPRemove (StackDepth); STree.NewProduction (HigherEntry.SPSym, Node); SPStackManager.SPPush (HigherEntry.StateNo, HigherEntry.SPSym, Node); Index := 1; LastBufIndex := BufIndex'Last; exit; end if; HigherEntry := LowerEntry; StackDepth := StackDepth + 1; SPStackManager.SPLook (LowerEntry, StackDepth); end loop; exit when Success or TokenList (1) = SP_Symbols.SPEND; for Ix in BufIndex range 1 .. BufIndex'Last - 1 loop TokenList (Ix) := TokenList (Ix + 1); LexValList (Ix) := LexValList (Ix + 1); TokenTypes (Ix) := TokenTypes (Ix + 1); end loop; SparkLex.Examiner_Lex (Prog_Text => ProgText, Token => LexToken, Lex_Val => LexTokenValue, Punct_Token => LexTokenType); TokenList (BufIndex'Last) := LexToken; LexValList (BufIndex'Last) := LexTokenValue; TokenTypes (BufIndex'Last) := LexTokenType; RecoverySym := SP_Symbols.SPDEFAULT; end loop; end if; -- perform action on following tokens if Success then SPStackManager.SPTop (HigherEntry); CurrState := HigherEntry.StateNo; Done := False; loop SPAct := SP_Parser_Actions.SPA (CurrState, TokenList (Index)); case SPAct.Act is when SP_Parser_Actions.Shift => SPActions.ShiftAction (State => SPAct.State, Sym => TokenList (Index), LexVal => LexValList (Index), PuncToken => TokenTypes (Index)); CurrState := SPAct.State; --# accept F, 501, LastBufIndex, "Access guarded by Success."; if Index < LastBufIndex then -- flow error expected Index := Index + 1; else Done := True; end if; --# end accept; when SP_Parser_Actions.Reduce => SPActions.ReduceAction (SPAct.Symbol, SPAct.Red_By); SPStackManager.SPTop (HigherEntry); CurrState := HigherEntry.StateNo; when SP_Parser_Actions.Accpt => Stop := True; Done := True; when others => -- doesn't arise Done := True; end case; exit when Done; end loop; end if; Halt := Stop; --# accept F, 602, SPARK_IO.File_Sys, TokenList, "Accessed elements are defined." & --# F, 602, SPARK_IO.File_Sys, LexValList, "Accessed elements are defined." & --# F, 602, SPARK_IO.File_Sys, SymList, "Accessed elements are defined." & --# F, 602, LexTokenManager.State, TokenList, "Accessed elements are defined." & --# F, 602, LexTokenManager.State, LexValList, "Accessed elements are defined." & --# F, 602, LexTokenManager.State, SymList, "Accessed elements are defined." & --# F, 602, ErrorHandler.Error_Context, TokenList, "Accessed elements are defined." & --# F, 602, ErrorHandler.Error_Context, LexValList, "Accessed elements are defined." & --# F, 602, ErrorHandler.Error_Context, SymList, "Accessed elements are defined." & --# F, 602, SparkLex.Curr_Line, TokenList, "Accessed elements are defined." & --# F, 602, SparkLex.Curr_Line, LexValList, "Accessed elements are defined." & --# F, 602, SparkLex.Curr_Line, SymList, "Accessed elements are defined." & --# F, 602, STree.Table, TokenList, "Accessed elements are defined." & --# F, 602, STree.Table, LexValList, "Accessed elements are defined." & --# F, 602, STree.Table, SymList, "Accessed elements are defined." & --# F, 602, STree.Table, TokenTypes, "Accessed elements are defined." & --# F, 602, STree.Table, LastBufIndex, "Accesses guarded by Success" & --# F, 602, SPStackManager.SPStack, TokenList, "Accessed elements are defined." & --# F, 602, SPStackManager.SPStack, LexValList, "Accessed elements are defined." & --# F, 602, SPStackManager.SPStack, SymList, "Accessed elements are defined." & --# F, 602, SPStackManager.SPStack, TokenTypes, "Accessed elements are defined." & --# F, 602, SPStackManager.SPStack, LastBufIndex, "Accesses guarded by Success" & --# F, 602, SPStackManager.SPStackPtr, TokenList, "Accessed elements are defined." & --# F, 602, SPStackManager.SPStackPtr, LexValList, "Accessed elements are defined." & --# F, 602, SPStackManager.SPStackPtr, SymList, "Accessed elements are defined." & --# F, 602, SPStackManager.SPStackPtr, TokenTypes, "Accessed elements are defined." & --# F, 602, SPStackManager.SPStackPtr, LastBufIndex, "Accesses guarded by Success" & --# F, 602, Halt, TokenList, "Accessed elements are defined." & --# F, 602, Halt, LexValList, "Accessed elements are defined." & --# F, 602, Halt, SymList, "Accessed elements are defined." & --# F, 602, Halt, TokenTypes, "Accessed elements are defined." & --# F, 602, Halt, LastBufIndex, "Accesses guarded by Success"; end SPRecover; end SPErrorRecovery; -- Unused procedure, but leave here for debugging procedure SPPrintAction (OutputFile : in SPARK_IO.File_Type; SPAct : in SP_Parser_Actions.SP_Parse_Act; SPCurrState : in SP_Productions.SP_State; SPCurrentSym : in SP_Symbols.SP_Symbol) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# OutputFile, --# SPAct, --# SPCurrentSym, --# SPCurrState; is begin SPARK_IO.Put_String (OutputFile, " STATE: ", 0); SPARK_IO.Put_Integer (OutputFile, Integer (SPCurrState), 5, 10); SPARK_IO.Put_String (OutputFile, " SYMBOL: ", 0); Put_Symbol (OutputFile, SPCurrentSym); SPARK_IO.New_Line (OutputFile, 1); case SPAct.Act is when SP_Parser_Actions.Shift => SPARK_IO.Put_String (OutputFile, " ACTION : SHIFT ", 0); SPARK_IO.Put_Integer (OutputFile, Integer (SPAct.State), 5, 10); SPARK_IO.New_Line (OutputFile, 1); when SP_Parser_Actions.Reduce => SPARK_IO.Put_String (OutputFile, " ACTION : REDUCE ", 0); SPARK_IO.Put_String (OutputFile, " SYMBOL : ", 0); Put_Symbol (OutputFile, SPAct.Symbol); SPARK_IO.Put_String (OutputFile, " REDUCE BY : ", 0); SPARK_IO.Put_Integer (OutputFile, Integer (SPAct.Red_By), 5, 10); SPARK_IO.Put_String (OutputFile, " PROD NO : ", 0); SPARK_IO.Put_Integer (OutputFile, Integer (SPAct.Prod_No), 5, 10); SPARK_IO.New_Line (OutputFile, 1); when SP_Parser_Actions.Accpt => SPARK_IO.Put_String (OutputFile, " ACTION : ACCEPT", 0); SPARK_IO.New_Line (OutputFile, 1); when SP_Parser_Actions.Error => SPARK_IO.Put_String (OutputFile, " ACTION : ERROR", 0); SPARK_IO.New_Line (OutputFile, 1); end case; end SPPrintAction; pragma Unreferenced (SPPrintAction); -- Unused procedure, but leave here for debugging procedure SPPrintStack (OutputFile : in SPARK_IO.File_Type) --# global in SPStackManager.SPStack; --# in SPStackManager.SPStackPtr; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# OutputFile, --# SPStackManager.SPStack, --# SPStackManager.SPStackPtr; is SPElement : SPStackManager.SPStackEntry; begin SPARK_IO.Put_Line (OutputFile, " STACK VALUES :", 0); for Ix in reverse SPStackManager.SPStackPtrVal range 0 .. SPStackManager.SPStackPtr loop SPStackManager.SPLook (SPElement, Ix); SPARK_IO.Put_Integer (OutputFile, Integer (SPElement.StateNo), 5, 10); SPARK_IO.Put_String (OutputFile, " SYMBOL: ", 0); Put_Symbol (OutputFile, SPElement.SPSym); SPARK_IO.New_Line (OutputFile, 1); end loop; end SPPrintStack; pragma Unreferenced (SPPrintStack); -- Declarations to write SPPrintStack to a named file -- OutputFile : SPARK_IO.File_Type; -- Status : SPARK_IO.File_Status; begin --SPParse -- SPARK_IO.CREATE (OutputFile, "spark.out", "", Status); SPCurrState := 1; SparkLex.Examiner_Lex (Prog_Text => ProgText, Token => SPCurrentSym, Lex_Val => LexVal, Punct_Token => PuncToken); SPStackManager.SPPush (1, SP_Symbols.SPACCEPT, STree.NullNode); SPHaltCalled := False; while not SPHaltCalled loop SPAct := SP_Parser_Actions.SPA (SPCurrState, SPCurrentSym); -- SPPrintAction (OutputFile, SPAct, SPCurrState, SPCurrentSym); -- to write to named dump file -- SPPrintAction (SPARK_IO.Standard_Output, SPAct, SPCurrState, SPCurrentSym); -- to dump to screen case SPAct.Act is when SP_Parser_Actions.Shift => SPCurrState := SPAct.State; SPActions.ShiftAction (State => SPCurrState, Sym => SPCurrentSym, LexVal => LexVal, PuncToken => PuncToken); SparkLex.Examiner_Lex (Prog_Text => ProgText, Token => SPCurrentSym, Lex_Val => LexVal, Punct_Token => PuncToken); when SP_Parser_Actions.Reduce => SPActions.ReduceAction (SPAct.Symbol, SPAct.Red_By); SPStackManager.SPTop (SPStackTop); SPCurrState := SPStackTop.StateNo; when SP_Parser_Actions.Accpt => SPHaltCalled := True; when SP_Parser_Actions.Error => -- SPPrintStack (OutputFile); SPStackManager.SPTop (SPStackTop); SP_Expected_Symbols.Get_Expected_Symbols (SPStackTop.StateNo, NoOfTerminals, TerminalList, NoOfNonTerminals, NonTerminalList); ErrorHandler.Syntax_Error (Error_Item => LexVal, Current_Sym => SPCurrentSym, Entry_Symbol => SPStackTop.SPSym, No_Of_Terminals => NoOfTerminals, No_Of_Non_Terminals => NoOfNonTerminals, Terminal_List => TerminalList, Non_Terminal_List => NonTerminalList); SPErrorRecovery.SPRecover (ProgText => ProgText, CurrentToken => SPCurrentSym, CurrentLexVal => LexVal, PuncToken => PuncToken, Halt => SPHaltCalled); if not SPHaltCalled then SPStackManager.SPTop (SPStackTop); SPCurrState := SPStackTop.StateNo; SparkLex.Examiner_Lex (Prog_Text => ProgText, Token => SPCurrentSym, Lex_Val => LexVal, Punct_Token => PuncToken); end if; end case; end loop; MaxStackSize := 0; case SPCurrentSym is when SP_Symbols.SPEND => FileEnd := True; when others => FileEnd := False; end case; end SPParse; end SPParser; spark-2012.0.deb/examiner/flowanalyser-ifa_stack.adb0000644000175000017500000001162711753202336021360 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors; separate (FlowAnalyser) package body IFA_Stack is function IsEmpty (S : Stack) return Boolean is begin return S.StackPointer = 0; end IsEmpty; function Top (S : Stack) return StackMember is Result : StackMember; begin if S.StackPointer > 0 then Result := S.StackVector (S.StackPointer); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Relation_Stack_Underflow, Msg => "in StackManager.Top"); Result := NullMember; -- unreachable, assignment just to avoid DF error end if; return Result; end Top; procedure ClearStack (S : out Stack) is begin S.StackPointer := 0; --# accept Flow, 32, S.StackVector, "Init. is partial but effective" & --# Flow, 31, S.StackVector, "Init. is partial but effective" & --# Flow, 602, S, S.StackVector, "Init. is partial but effective"; end ClearStack; procedure Push (S : in out Stack; M : in StackMember) is begin if S.StackPointer < ExaminerConstants.StackManagerStackSize then S.StackPointer := S.StackPointer + 1; S.StackVector (S.StackPointer) := M; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Relation_Stack_Overflow, Msg => "in StackManager.Push"); end if; end Push; procedure Pop (S : in out Stack; M : out StackMember) is begin if S.StackPointer > 0 then M := S.StackVector (S.StackPointer); S.StackPointer := S.StackPointer - 1; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Relation_Stack_Underflow, Msg => "in StackManager.Pop"); M := NullMember; -- unreachable, just to avoid DF end if; end Pop; procedure EstablishMember (TheHeap : in out Heap.HeapRecord; Kind : in MemberType; M : out StackMember) is DefinedVars, UnPreservedVars, AllVars, SeqOfExpns : SeqAlgebra.Seq; Lambda, Mu, Rho, Theta, ThetaTilde, RhoProd : RelationAlgebra.Relation; begin SeqAlgebra.CreateSeq (TheHeap, DefinedVars); SeqAlgebra.CreateSeq (TheHeap, UnPreservedVars); SeqAlgebra.CreateSeq (TheHeap, AllVars); SeqAlgebra.CreateSeq (TheHeap, SeqOfExpns); RelationAlgebra.CreateRelation (TheHeap, Lambda); RelationAlgebra.CreateRelation (TheHeap, Mu); RelationAlgebra.CreateRelation (TheHeap, Rho); RelationAlgebra.CreateRelation (TheHeap, Theta); RelationAlgebra.CreateRelation (TheHeap, ThetaTilde); RelationAlgebra.CreateRelation (TheHeap, RhoProd); M := StackMember' (MemberKind => Kind, DefinedVars => DefinedVars, UnPreservedVars => UnPreservedVars, AllVars => AllVars, SeqOfExpns => SeqOfExpns, Lambda => Lambda, Mu => Mu, Rho => Rho, Theta => Theta, ThetaTilde => ThetaTilde, RhoProd => RhoProd); end EstablishMember; procedure DisposeOfMember (TheHeap : in out Heap.HeapRecord; M : in StackMember) is begin SeqAlgebra.DisposeOfSeq (TheHeap, M.DefinedVars); SeqAlgebra.DisposeOfSeq (TheHeap, M.UnPreservedVars); SeqAlgebra.DisposeOfSeq (TheHeap, M.AllVars); SeqAlgebra.DisposeOfSeq (TheHeap, M.SeqOfExpns); RelationAlgebra.DisposeOfRelation (TheHeap, M.Lambda); RelationAlgebra.DisposeOfRelation (TheHeap, M.Mu); RelationAlgebra.DisposeOfRelation (TheHeap, M.Rho); RelationAlgebra.DisposeOfRelation (TheHeap, M.Theta); RelationAlgebra.DisposeOfRelation (TheHeap, M.ThetaTilde); RelationAlgebra.DisposeOfRelation (TheHeap, M.RhoProd); end DisposeOfMember; end IFA_Stack; spark-2012.0.deb/examiner/maths-valuetostring.adb0000644000175000017500000001011011753202336020732 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Maths) function ValueToString (Num : Value) return E_Strings.T is OutBuffer : E_Strings.T; --------------------------------------------------------- procedure WriteChar (Ch : in Character) --# global in out OutBuffer; --# derives OutBuffer from *, --# Ch; is begin E_Strings.Append_Char (E_Str => OutBuffer, Ch => Ch); end WriteChar; --------------------------------------------------------- procedure DoSign --# global in Num; --# in out OutBuffer; --# derives OutBuffer from *, --# Num; is begin if not Num.IsPositive then WriteChar ('-'); end if; end DoSign; --------------------------------------------------------- procedure PartToString (P : in Part; Str : out E_Strings.T) --# derives Str from P; is Hi : Natural; begin Str := E_Strings.Empty_String; Hi := P.Length; for I in Natural range 1 .. Hi loop E_Strings.Append_Char (E_Str => Str, Ch => DigitToChar (P.Numerals ((Hi - I) + 1))); end loop; end PartToString; ---------------------------------------------- procedure IntegerToString --# global in Num; --# in out OutBuffer; --# derives OutBuffer from *, --# Num; is ItemBuffer : E_Strings.T; begin DoSign; PartToString (Num.Numerator, ItemBuffer); E_Strings.Append_Examiner_String (E_Str1 => OutBuffer, E_Str2 => ItemBuffer); end IntegerToString; ---------------------------------------------- procedure RealToString --# global in Num; --# in out OutBuffer; --# derives OutBuffer from *, --# Num; is ItemBuffer : E_Strings.T; IsRational : Boolean; begin IsRational := not (Num.Denominator = OnePart); if IsRational then WriteChar ('('); end if; DoSign; PartToString (Num.Numerator, ItemBuffer); E_Strings.Append_Examiner_String (E_Str1 => OutBuffer, E_Str2 => ItemBuffer); if IsRational then WriteChar ('/'); PartToString (Num.Denominator, ItemBuffer); E_Strings.Append_Examiner_String (E_Str1 => OutBuffer, E_Str2 => ItemBuffer); WriteChar (')'); end if; end RealToString; ---------------------------------------------- begin --ValueToString if Num.Sort = UnknownValue then OutBuffer := E_Strings.Copy_String (Str => "unknown value"); elsif Num.Sort = TruthValue then if Num = FalseValue then OutBuffer := E_Strings.Copy_String (Str => "false"); else OutBuffer := E_Strings.Copy_String (Str => "true"); end if; else OutBuffer := E_Strings.Empty_String; if Num.Sort = IntegerValue then IntegerToString; else --its a real RealToString; end if; end if; return OutBuffer; end ValueToString; spark-2012.0.deb/examiner/stree.adb0000644000175000017500000010040411753202336016042 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Statistics; with SystemErrors; package body STree is type NodeValue is record NodeType : SP_Symbols.SP_Symbol; Position : LexTokenManager.Token_Position; Parent, Next, Child : SyntaxNode; Token_Str : LexTokenManager.Lex_String; -- case ??? is -- when ??? => Ref_Str : LexTokenManager.Lex_String; -- when ??? => Ref_Sym : Dictionary.Symbol; -- end case; end record; type SyntaxTreeContents is array (SyntaxNode) of NodeValue; type TableStructure is record FreeList : SyntaxNode; TopUsed : SyntaxNode; CurrSyntaxNode : SyntaxNode; Contents : SyntaxTreeContents; end record; Table : TableStructure; ------------------------------------------------------------------------ procedure RetrieveCurrentRoot (Root : out SyntaxNode) is begin Root := Table.CurrSyntaxNode; Table.CurrSyntaxNode := NullNode; end RetrieveCurrentRoot; ------------------------------------------------------------------------ function NodeToRef (Node : SyntaxNode) return ExaminerConstants.RefType is begin return ExaminerConstants.RefType (Node); end NodeToRef; ------------------------------------------------------------------------ function RefToNode (Ref : ExaminerConstants.RefType) return SyntaxNode is begin return SyntaxNode (Ref); end RefToNode; ------------------------------------------------------------------------ procedure AllocateNode (Node : out SyntaxNode) --# global in out Table; --# derives Node, --# Table from Table; is NewNode : SyntaxNode; begin if Table.FreeList /= NullNode then NewNode := Table.FreeList; -- Child field used as free list pointer. Table.FreeList := Table.Contents (Table.FreeList).Child; elsif Table.TopUsed /= SyntaxTreeContents'Last then Table.TopUsed := Table.TopUsed + 1; NewNode := Table.TopUsed; else NewNode := NullNode; end if; if NewNode = NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Syntax_Tree_Overflow, Msg => "in STree.AllocateNode"); else Table.Contents (NewNode) := NodeValue' (NodeType => SP_Symbols.SPEND, Position => LexTokenManager.Null_Token_Position, Parent => NullNode, Next => NullNode, Child => NullNode, Token_Str => LexTokenManager.Null_String, Ref_Str => LexTokenManager.Null_String, Ref_Sym => Dictionary.NullSymbol); end if; Node := NewNode; end AllocateNode; ------------------------------------------------------------------------ procedure NewProduction (Production : in SP_Symbols.SP_Non_Terminal; Node : out SyntaxNode) is CurrNode : SyntaxNode; begin AllocateNode (CurrNode); Table.Contents (CurrNode).NodeType := Production; Table.CurrSyntaxNode := CurrNode; Node := CurrNode; end NewProduction; ------------------------------------------------------------------------ procedure NewTerminal (Terminal : in SP_Symbols.SP_Terminal; TerminalVal : in LexTokenManager.Lex_Value; Node : out SyntaxNode) is CurrNode : SyntaxNode; begin AllocateNode (CurrNode); Table.Contents (CurrNode).NodeType := Terminal; Table.Contents (CurrNode).Position := TerminalVal.Position; Table.Contents (CurrNode).Token_Str := TerminalVal.Token_Str; Table.Contents (CurrNode).Ref_Str := TerminalVal.Token_Str; Node := CurrNode; end NewTerminal; ------------------------------------------------------------------------ procedure AddDerivative (Child_Node : in SyntaxNode) is CurrNode : SyntaxNode; LastChild : SyntaxNode; ThePosition : LexTokenManager.Token_Position; begin CurrNode := Table.CurrSyntaxNode; LastChild := Table.Contents (CurrNode).Child; if LastChild = NullNode then Table.Contents (CurrNode).Child := Child_Node; ThePosition := Table.Contents (Child_Node).Position; Table.Contents (CurrNode).Position := ThePosition; else loop exit when Table.Contents (LastChild).Next = NullNode; LastChild := Table.Contents (LastChild).Next; end loop; if Table.Contents (CurrNode).Position = LexTokenManager.Null_Token_Position then ThePosition := Table.Contents (Child_Node).Position; Table.Contents (CurrNode).Position := ThePosition; end if; Table.Contents (LastChild).Next := Child_Node; end if; Table.Contents (Child_Node).Parent := CurrNode; end AddDerivative; ------------------------------------------------------------------------ procedure AddChildNode (Node : in SyntaxNode; ChildNode : in SyntaxNode; LinkToParent : in Boolean) is LastChild : SyntaxNode; begin LastChild := Table.Contents (Node).Child; if LastChild = NullNode then Table.Contents (Node).Child := ChildNode; else loop exit when Table.Contents (LastChild).Next = NullNode; LastChild := Table.Contents (LastChild).Next; end loop; Table.Contents (LastChild).Next := ChildNode; end if; if LinkToParent then Table.Contents (ChildNode).Parent := Node; end if; end AddChildNode; ------------------------------------------------------------------------ function Child_Node (Current_Node : SyntaxNode) return SyntaxNode is begin return Table.Contents (Current_Node).Child; end Child_Node; ------------------------------------------------------------------------ function Next_Sibling (Current_Node : SyntaxNode) return SyntaxNode is begin SystemErrors.RT_Assert (C => Current_Node /= NullNode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "STree.Next_Sibling : Program Error"); return Table.Contents (Current_Node).Next; end Next_Sibling; ------------------------------------------------------------------------ function Parent_Node (Current_Node : SyntaxNode) return SyntaxNode is begin SystemErrors.RT_Assert (C => Current_Node /= NullNode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "STree.Parent_Node : Program Error"); return Table.Contents (Current_Node).Parent; end Parent_Node; ------------------------------------------------------------------------ function Syntax_Node_Type (Node : SyntaxNode) return SP_Symbols.SP_Symbol is begin return Table.Contents (Node).NodeType; end Syntax_Node_Type; ------------------------------------------------------------------------ function Node_Position (Node : SyntaxNode) return LexTokenManager.Token_Position is Position : LexTokenManager.Token_Position; begin if Node = NullNode then Position := LexTokenManager.Null_Token_Position; else Position := Table.Contents (Node).Position; end if; return Position; end Node_Position; ------------------------------------------------------------------------ procedure Set_Node_Lex_String (Sym : in Dictionary.Symbol; Node : in SyntaxNode) is Str : LexTokenManager.Lex_String; begin Str := Dictionary.GetSimpleName (Item => Sym); SystemErrors.RT_Assert (C => Node /= NullNode and then Syntax_Node_Type (Node => Node) in SP_Symbols.SP_Terminal and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => Table.Contents (Node).Ref_Str) = LexTokenManager.Str_Eq, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "STree.Set_Node_Lex_String : Program Error"); Table.Contents (Node).Ref_Str := Str; end Set_Node_Lex_String; ------------------------------------------------------------------------ function Node_Lex_String (Node : SyntaxNode) return LexTokenManager.Lex_String is begin SystemErrors.RT_Assert (C => Node /= NullNode and then Syntax_Node_Type (Node => Node) in SP_Symbols.SP_Terminal, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = SP_Symbols.SP_Terminal in STree.Node_Lex_String"); return Table.Contents (Node).Ref_Str; end Node_Lex_String; ------------------------------------------------------------------------ function Node_Token_String (Node : SyntaxNode) return LexTokenManager.Lex_String is begin return Table.Contents (Node).Token_Str; end Node_Token_String; ------------------------------------------------------------------------ procedure CollectNode (Node : in SyntaxNode) --# global in out Table; --# derives Table from *, --# Node; is TheNodeValue : NodeValue; begin TheNodeValue := Table.Contents (NullNode); Table.Contents (Node) := TheNodeValue; Table.Contents (Node).Child := Table.FreeList; Table.FreeList := Node; end CollectNode; ---------------------------------------------------------------------- procedure DeleteSyntaxTree (Root : in SyntaxNode; KeepConstants : in Boolean) is CurrNode, The_Next_Node, TempNode : SyntaxNode; begin CurrNode := Root; loop The_Next_Node := Child_Node (CurrNode); -- Subprogram constraints are needed by the VCG later, so we -- don't return such nodes to the free list. Similarly, -- constant declarations are needed by the Declarations -- package for proof rule generation. Furthermore, we also -- need to protect generic_actual_part to preserve -- expressions used to initialize generic formal objects. We -- need also to keep inherit_clause to calculate the -- dependency closure. while The_Next_Node /= NullNode and then ((Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.procedure_constraint) or (Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.function_constraint) or (KeepConstants and Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.constant_declaration) or (Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.generic_actual_part) or (Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.inherit_clause)) loop Table.Contents (The_Next_Node).Parent := NullNode; TempNode := Next_Sibling (The_Next_Node); Table.Contents (The_Next_Node).Next := NullNode; The_Next_Node := TempNode; end loop; if The_Next_Node = NullNode then loop The_Next_Node := Next_Sibling (CurrNode); while The_Next_Node /= NullNode and then ((Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.procedure_constraint) or (Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.function_constraint) or (KeepConstants and Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.constant_declaration) or (Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.generic_actual_part) or (Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.inherit_clause)) loop Table.Contents (The_Next_Node).Parent := NullNode; TempNode := Next_Sibling (The_Next_Node); Table.Contents (The_Next_Node).Next := NullNode; The_Next_Node := TempNode; end loop; if The_Next_Node /= NullNode then CollectNode (CurrNode); exit; end if; The_Next_Node := Parent_Node (Current_Node => CurrNode); CollectNode (CurrNode); exit when The_Next_Node = NullNode; CurrNode := The_Next_Node; end loop; end if; exit when The_Next_Node = NullNode; CurrNode := The_Next_Node; end loop; end DeleteSyntaxTree; ----------------------------------------------------------------------- function TraverseAcross (RootNode : SyntaxNode; CurrNode : SyntaxNode) return SyntaxNode --# global in Table; -- Traverses across the tree in a pre-order fashion. is The_Next_Node : SyntaxNode; begin The_Next_Node := CurrNode; while The_Next_Node /= NullNode loop if The_Next_Node = RootNode then The_Next_Node := NullNode; exit; end if; if Next_Sibling (The_Next_Node) /= NullNode then The_Next_Node := Next_Sibling (The_Next_Node); exit; end if; The_Next_Node := Parent_Node (Current_Node => The_Next_Node); end loop; return The_Next_Node; end TraverseAcross; ----------------------------------------------------------------------- function Traverse (RootNode : SyntaxNode; CurrNode : SyntaxNode) return SyntaxNode --# global in Table; -- Traverses the tree in a pre-order fashion. is The_Next_Node : SyntaxNode; begin if Child_Node (CurrNode) /= NullNode then -- Depth first The_Next_Node := Child_Node (CurrNode); else -- breadth next The_Next_Node := TraverseAcross (RootNode, CurrNode); end if; return The_Next_Node; end Traverse; ----------------------------------------------------------------------- function Last_Child_Of (Start_Node : SyntaxNode) return SyntaxNode is Last_Valid_Node_Found, Current_Node : SyntaxNode; begin SystemErrors.RT_Assert (C => Start_Node /= NullNode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "STree.Last_Child_Of : Program Error"); Last_Valid_Node_Found := Start_Node; Current_Node := Start_Node; while Current_Node /= NullNode loop Last_Valid_Node_Found := Current_Node; Current_Node := Child_Node (Current_Node => Current_Node); end loop; return Last_Valid_Node_Found; end Last_Child_Of; -------------------------------------------------------------- function Last_Sibling_Of (Start_Node : SyntaxNode) return SyntaxNode is Last_Valid_Node_Found, Current_Node : SyntaxNode; begin SystemErrors.RT_Assert (C => Start_Node /= NullNode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "STree.Last_Sibling_Of : Program Error"); Last_Valid_Node_Found := Start_Node; Current_Node := Start_Node; while Current_Node /= NullNode loop Last_Valid_Node_Found := Current_Node; Current_Node := Next_Sibling (Current_Node => Current_Node); end loop; return Last_Valid_Node_Found; end Last_Sibling_Of; -------------------------------------------------------------------- procedure Add_Node_Symbol (Node : in SyntaxNode; Sym : in Dictionary.Symbol) is begin if Dictionary.Is_Null_Symbol (Table.Contents (Node).Ref_Sym) then Table.Contents (Node).Ref_Sym := Sym; elsif Table.Contents (Node).Ref_Sym /= Sym then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Syntax_Tree_Walk_Error, Msg => "in STree.Add_Node_Symbol"); end if; --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK"; end Add_Node_Symbol; -------------------------------------------------------------------- function NodeSymbol (Node : SyntaxNode) return Dictionary.Symbol is Sym : Dictionary.Symbol; begin Sym := Table.Contents (Node).Ref_Sym; return Sym; end NodeSymbol; ------------------------------------------------------- procedure ReportUsage is begin -- TopUsed is peak usage, because freelist is used up before allocating -- a new cell at the top of the table Statistics.SetTableUsage (Statistics.SyntaxTree, Integer (Table.TopUsed)); end ReportUsage; ------------------------------------------------------- function NextNodeType (It : Iterator) return Iterator --# global in Table; is Node : SyntaxNode; NextIt : Iterator; begin Node := It.Current; while Node /= NullNode loop --# accept Flow, 41, "Expected stable expression"; if It.SearchDirection = Down then -- expect stable expression if Syntax_Node_Type (Node => Node) = It.SearchNodeType then -- This node was returned in the search so ignore all nodes -- below it. Node := TraverseAcross (It.Root, Node); else -- Get the next pre-order node Node := Traverse (RootNode => It.Root, CurrNode => Node); end if; else Node := Parent_Node (Current_Node => Node); end if; --# end accept; exit when Syntax_Node_Type (Node => Node) = It.SearchNodeType; end loop; if Node = NullNode then NextIt := NullIterator; else NextIt := Iterator' (TheSearchKind => It.TheSearchKind, SearchNodeType => It.SearchNodeType, SearchDirection => It.SearchDirection, Current => Node, Root => It.Root); end if; return NextIt; end NextNodeType; ------------------------------------------------------- function IsBranchNode (Node : SyntaxNode) return Boolean --# global in Table; is begin return Node /= NullNode and then -- has more than one child Child_Node (Node) /= NullNode and then Next_Sibling (Child_Node (Node)) /= NullNode; end IsBranchNode; ------------------------------------------------------- function NextBranch (It : Iterator) return Iterator --# global in Table; is Node : SyntaxNode; NextIt : Iterator; begin Node := It.Current; while Node /= NullNode loop --# accept Flow, 41, "Expected stable expression"; if It.SearchDirection = Down then -- expect stable expression Node := Traverse (RootNode => It.Root, CurrNode => Node); else Node := Parent_Node (Current_Node => Node); end if; --# end accept; exit when IsBranchNode (Node); end loop; if Node = NullNode then NextIt := NullIterator; else NextIt := Iterator' (TheSearchKind => It.TheSearchKind, SearchNodeType => It.SearchNodeType, SearchDirection => It.SearchDirection, Current => Node, Root => It.Root); end if; return NextIt; end NextBranch; ------------------------------------------------------- function IsFormalParameterNode (Node : SyntaxNode; RootNode : SyntaxNode) return Boolean --# global in Table; is MyNode : SyntaxNode; Result : Boolean; begin if Syntax_Node_Type (Node => Node) /= SP_Symbols.identifier or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Node)) /= SP_Symbols.simple_name then Result := False; else Result := True; MyNode := Node; while MyNode /= RootNode loop if Syntax_Node_Type (Node => MyNode) = SP_Symbols.expression then Result := False; exit; end if; MyNode := Parent_Node (Current_Node => MyNode); end loop; end if; return Result; end IsFormalParameterNode; ------------------------------------------------------- function NextFormalParameter (It : Iterator) return Iterator --# global in Table; is Node : SyntaxNode; NextIt : Iterator; begin Node := It.Current; while Node /= NullNode loop Node := Traverse (RootNode => It.Root, CurrNode => Node); exit when IsFormalParameterNode (Node, It.Root); end loop; if Node = NullNode then NextIt := NullIterator; else NextIt := Iterator' (TheSearchKind => It.TheSearchKind, SearchNodeType => It.SearchNodeType, SearchDirection => It.SearchDirection, Current => Node, Root => It.Root); end if; return NextIt; end NextFormalParameter; ------------------------------------------------------- function NextNode (It : Iterator) return Iterator is NextIt : Iterator; begin case It.TheSearchKind is when Undefined => NextIt := NullIterator; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Syntax_Tree_Walk_Error, Msg => "in STree.NextNode"); when NodeTypeSearch => NextIt := NextNodeType (It); when BranchSearch => NextIt := NextBranch (It); when FormalParameterSearch => NextIt := NextFormalParameter (It); end case; return NextIt; end NextNode; ------------------------------------------------------- function Get_Node (It : Iterator) return SyntaxNode is begin return It.Current; end Get_Node; ------------------------------------------------------- function IsNull (It : Iterator) return Boolean is begin return It = NullIterator; end IsNull; ------------------------------------------------------- function Find_First_Node (Node_Kind : SP_Symbols.SP_Symbol; From_Root : SyntaxNode; In_Direction : TraverseDirection) return Iterator is It : Iterator; begin It := Iterator' (TheSearchKind => NodeTypeSearch, SearchNodeType => Node_Kind, SearchDirection => In_Direction, Current => From_Root, Root => From_Root); if Syntax_Node_Type (Node => From_Root) /= It.SearchNodeType then It := NextNode (It); end if; return It; end Find_First_Node; ------------------------------------------------------- function Find_First_Branch_Node (From_Root : SyntaxNode; In_Direction : TraverseDirection) return Iterator is It : Iterator; begin It := Iterator' (TheSearchKind => BranchSearch, SearchNodeType => SP_Symbols.SPEND, SearchDirection => In_Direction, Current => From_Root, Root => From_Root); if not IsBranchNode (From_Root) then It := NextNode (It); end if; return It; end Find_First_Branch_Node; ------------------------------------------------------- function Find_First_Formal_Parameter_Node (From_Root : SyntaxNode) return Iterator is It : Iterator; begin It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => From_Root, In_Direction => Down); return Iterator' (TheSearchKind => FormalParameterSearch, SearchNodeType => SP_Symbols.SPEND, SearchDirection => Down, Current => It.Current, Root => From_Root); end Find_First_Formal_Parameter_Node; ------------------------------------------------------- function FindLastItemInDependencyRelation (Node : SyntaxNode) return LexTokenManager.Token_Position is separate; ------------------------------------------------------- function FindLastActualParameterNode (FromRoot : SyntaxNode) return SyntaxNode is LastFormal : SyntaxNode; LastActual : SyntaxNode; It : Iterator; begin SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => FromRoot) = SP_Symbols.named_argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect FromRoot = named_argument_association in STree.FindLastActualParameterNode"); LastFormal := NullNode; -- Get the first formal It := Find_First_Formal_Parameter_Node (From_Root => FromRoot); -- find the last formal while not IsNull (It) loop LastFormal := Get_Node (It => It); It := NextNode (It); end loop; if LastFormal = NullNode then LastActual := NullNode; else -- work out the last actual -- simple_name => expression LastActual := Next_Sibling (Get_Node (It => Find_First_Node (Node_Kind => SP_Symbols.simple_name, From_Root => LastFormal, In_Direction => Up))); end if; return LastActual; end FindLastActualParameterNode; ------------------------------------------------------- function Expression_From_Positional_Argument_Association (Node : SyntaxNode) return SyntaxNode is Result : SyntaxNode; begin Result := Child_Node (Current_Node => Node); -- ASSUME Result = positional_argument_association OR expression if Syntax_Node_Type (Node => Result) = SP_Symbols.positional_argument_association then -- ASSUME Result = positional_argument_association Result := Next_Sibling (Current_Node => Result); elsif Syntax_Node_Type (Node => Result) /= SP_Symbols.expression then Result := NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = positional_argument_association OR expression " & "in STree.Expression_From_Positional_Argument_Association"); end if; -- ASSUME Result = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = expression in STree.Expression_From_Positional_Argument_Association"); return Result; end Expression_From_Positional_Argument_Association; ------------------------------------------------------- function Expression_From_Named_Argument_Association (Node : SyntaxNode) return SyntaxNode is Result : SyntaxNode; begin Result := Child_Node (Current_Node => Node); -- ASSUME Result = named_argument_association OR simple_name if Syntax_Node_Type (Node => Result) = SP_Symbols.named_argument_association then -- ASSUME Result = named_argument_association Result := Next_Sibling (Current_Node => Result); elsif Syntax_Node_Type (Node => Result) /= SP_Symbols.simple_name then Result := NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = named_argument_association OR simple_name " & "in STree.Expression_From_Named_Argument_Association"); end if; -- ASSUME Result = simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = simple_name in STree.Expression_From_Named_Argument_Association"); -- skip over parameter name to get expression Result := Next_Sibling (Current_Node => Result); -- ASSUME Result = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = expression in STree.Expression_From_Named_Argument_Association"); return Result; end Expression_From_Named_Argument_Association; ------------------------------------------------------- function LoopParameterSpecFromEndOfLoop (Node : SyntaxNode) return SyntaxNode is Result : SyntaxNode; begin SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Node) = SP_Symbols.end_of_loop, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = end_of_loop in STree.LoopParameterSpecFromEndOfLoop"); Result := Child_Node (Parent_Node (Current_Node => Node)); -- simple_name or loop_statement_opt if Syntax_Node_Type (Node => Result) = SP_Symbols.simple_name then Result := Next_Sibling (Result); -- loop_statement_opt end if; Result := Child_Node (Child_Node (Result)); SystemErrors.RT_Assert (C => Result = NullNode or else Syntax_Node_Type (Node => Result) = SP_Symbols.loop_parameter_specification or else Syntax_Node_Type (Node => Result) = SP_Symbols.condition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = loop_parameter_specification OR condition OR NULL " & "in STree.LoopParameterSpecFromEndOfLoop"); return Result; end LoopParameterSpecFromEndOfLoop; ------------------------------------------------------- function IdentifierHasTildeSuffix (Node : SyntaxNode) return Boolean is begin SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = identifier in STree.IdentifierHasTildeSuffix"); return Syntax_Node_Type (Node => Next_Sibling (Node)) = SP_Symbols.tilde; end IdentifierHasTildeSuffix; ------------------------------------------------------- function IdentifierHasPercentSuffix (Node : SyntaxNode) return Boolean is begin SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = identifier in STree.IdentifierHasPercentSuffix"); return Syntax_Node_Type (Node => Next_Sibling (Node)) = SP_Symbols.percent; end IdentifierHasPercentSuffix; ------------------------------------------------------- begin Table.CurrSyntaxNode := NullNode; --# accept Flow, 23, Table.Contents, "Init partial but effective"; Table.Contents (NullNode) := -- Expect flow error on 1st write to array NodeValue' (NodeType => SP_Symbols.SPEND, Position => LexTokenManager.Null_Token_Position, Parent => NullNode, Next => NullNode, Child => NullNode, Token_Str => LexTokenManager.Null_String, Ref_Str => LexTokenManager.Null_String, Ref_Sym => Dictionary.NullSymbol); --# end accept; Table.FreeList := NullNode; Table.TopUsed := NullNode; --# accept Flow, 602, Table, Table.Contents, "Init partial but effective"; end STree; -- Init. is partial but effective. Expect 1 warning. spark-2012.0.deb/examiner/sem-walk_expression_p-calc_binary_operator.adb0000644000175000017500000002125411753202336025422 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- This procedure is to expression evaluation what -- -- CheckBinaryOperator is to type checking. It is called from -- -- wf_term, wf_simple_expression, wf_relation, wf_expression and -- -- wf_factor to calculate effect of binary operators. This -- -- procedure is called immediately after calls to -- -- CheckBinaryOperator so that if the sub-expression is not -- -- wellformed then Result = Unknown_Type_Record on entry to this -- -- procedure. -- ---------------------------------------------------------------------------- separate (Sem.Walk_Expression_P) procedure Calc_Binary_Operator (Node_Pos : in LexTokenManager.Token_Position; Operator : in SP_Symbols.SP_Symbol; Left_Val, Right_Val : in Maths.Value; Is_Annotation : in Boolean; Result : in out Sem.Exp_Record) is type Err_Lookup is array (Boolean) of Natural; Which_Err : constant Err_Lookup := Err_Lookup'(False => 402, True => 399); Err : Maths.ErrorCode; Ans : Maths.Value; procedure Apply_Modulus_If_Necessary (Result : in Sem.Exp_Record; Ans : in out Maths.Value; Err : in out Maths.ErrorCode) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# derives Ans, --# Err from Ans, --# Dictionary.Dict, --# Err, --# LexTokenManager.State, --# Result; is Temp_Arg : Maths.Value; begin if Err = Maths.NoError then if Dictionary.TypeIsModular (Result.Type_Symbol) then Temp_Arg := Ans; Maths.Modulus (FirstNum => Temp_Arg, SecondNum => Maths.ValueRep (Dictionary.GetScalarAttributeValue (Base => False, Name => LexTokenManager.Modulus_Token, TypeMark => Result.Type_Symbol)), Result => Ans, Ok => Err); end if; end if; end Apply_Modulus_If_Necessary; begin -- Calc_Binary_Operator Err := Maths.NoError; if Result /= Sem.Unknown_Type_Record then -- expression was wellformed so we must try and calculate result case Operator is when SP_Symbols.multiply => Maths.Multiply (Left_Val, Right_Val, -- to get Ans, Err); -- multiply might need a modulo reduction (See LRM 4.5.5(10)) Apply_Modulus_If_Necessary (Result => Result, Ans => Ans, Err => Err); when SP_Symbols.divide => Maths.Divide (Left_Val, Right_Val, -- to get Ans, Err); -- Divide never needs a modulo reduction (See LRM 4.5.5(10)) when SP_Symbols.RWmod => Maths.Modulus (Left_Val, Right_Val, -- to get Ans, Err); -- mod never needs a modulo reduction (See LRM 4.5.5(10)) when SP_Symbols.RWrem => Maths.Remainder (Left_Val, Right_Val, -- to get Ans, Err); -- rem never needs a modulo reduction (See LRM 4.5.5(10)) when SP_Symbols.plus => Maths.Add (Left_Val, Right_Val, -- to get Ans, Err); -- plus might need a modulo reduction (See LRM 4.5.3(11)) Apply_Modulus_If_Necessary (Result => Result, Ans => Ans, Err => Err); when SP_Symbols.minus => Maths.Subtract (Left_Val, Right_Val, -- to get Ans, Err); -- minus might need a modulo reduction (See LRM 4.5.3(11)) Apply_Modulus_If_Necessary (Result => Result, Ans => Ans, Err => Err); when SP_Symbols.RWand | SP_Symbols.RWandthen => Ans := Maths.AndOp (Left_Val, Right_Val); when SP_Symbols.RWor | SP_Symbols.RWorelse => Ans := Maths.OrOp (Left_Val, Right_Val); when SP_Symbols.RWxor => Ans := Maths.XorOp (Left_Val, Right_Val); when SP_Symbols.double_star => Maths.RaiseByPower (Left_Val, Right_Val, -- to get Ans, Err); -- ** might need a modulo reduction (See LRM 4.5.6(11)) Apply_Modulus_If_Necessary (Result => Result, Ans => Ans, Err => Err); when SP_Symbols.equals => if Left_Val = Maths.NoValue or else Right_Val = Maths.NoValue then Ans := Maths.NoValue; else Ans := Maths.BoolToValue (Left_Val = Right_Val); end if; Err := Maths.NoError; when SP_Symbols.not_equal => if Left_Val = Maths.NoValue or else Right_Val = Maths.NoValue then Ans := Maths.NoValue; else Ans := Maths.BoolToValue (Left_Val /= Right_Val); end if; Err := Maths.NoError; when SP_Symbols.less_than => Maths.Lesser (Left_Val, Right_Val, -- to get Ans, Err); when SP_Symbols.less_or_equal => Maths.LesserOrEqual (Left_Val, Right_Val, -- to get Ans, Err); when SP_Symbols.greater_or_equal => Maths.GreaterOrEqual (Left_Val, Right_Val, -- to get Ans, Err); when SP_Symbols.greater_than => Maths.Greater (Left_Val, Right_Val, -- to get Ans, Err); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Math_Error, Msg => "in Calc_Binary_Operator"); Ans := Maths.NoValue; -- define Ans here to avoid subsequent flow errors end case; Result.Value := Ans; case Err is when Maths.NoError => null; when Maths.DivideByZero => ErrorHandler.Semantic_Error (Err_Num => 400, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); when Maths.ConstraintError => ErrorHandler.Semantic_Error (Err_Num => Which_Err (Is_Annotation), Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); when Maths.OverFlow => Result.Value := Maths.NoValue; ErrorHandler.Semantic_Warning (Err_Num => 200, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); when others => -- indicates internal error in maths package SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Math_Error, Msg => "in Calc_Binary_Operator (2nd case)"); end case; end if; end Calc_Binary_Operator; spark-2012.0.deb/examiner/dictionary-write.adb0000644000175000017500000036776111753202336020242 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Maths; with FileSystem; with E_Strings; separate (Dictionary) procedure Write (File_Name : in E_Strings.T; Status : out SPARK_IO.File_Status) is File : SPARK_IO.File_Type := SPARK_IO.Null_File; Local_File_Name : E_Strings.T; -------------------------------------------------------------------------------- procedure Write_Library_Units (File : in SPARK_IO.File_Type) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State; is Library_Units : Iterator; -------------------------------------------------------------------------------- procedure Write_Generic_Formal_Parameters (File : in SPARK_IO.File_Type; The_Generic : in RawDict.Subprogram_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Generic; is Parameter_It : Iterator; Number : Positive; -------------------------------------------------------------------------------- procedure Write_Generic_Formal_Parameter (File : in SPARK_IO.File_Type; The_Generic : in RawDict.Subprogram_Info_Ref; Number : in Positive; The_Generic_Parameter : in RawDict.Generic_Parameter_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Number, --# The_Generic, --# The_Generic_Parameter; is procedure Write_Generic_Type (File : in SPARK_IO.File_Type; The_Generic_Parameter : in RawDict.Generic_Parameter_Info_Ref) --# global in Dict; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# The_Generic_Parameter; is Type_Mark : RawDict.Type_Info_Ref; begin Type_Mark := RawDict.Get_Generic_Parameter_Type (The_Generic_Parameter => The_Generic_Parameter); case RawDict.Get_Type_Discriminant (Type_Mark) is when Generic_Type_Item => Write_String (File, "generic "); case RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Type_Mark) is when Invalid_Generic_Type => Write_String (File, "invalid generic type "); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Write_Generic_Type"); when Generic_Private_Type => Write_String (File, "private "); when Generic_Discrete_Type => Write_String (File, "discrete "); when Generic_Integer_Type => Write_String (File, "integer "); when Generic_Modular_Type => Write_String (File, "modular "); when Generic_Floating_Point_Type => Write_String (File, "floating point "); when Generic_Fixed_Point_Type => Write_String (File, "fixed point "); when Generic_Array_Type => Write_String (File, "array "); end case; when others => -- non-exec code Write_String (File, "invalid generic type "); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Write_Generic_Type"); end case; end Write_Generic_Type; begin -- Write_Generic_Formal_Parameter Write_String (File, "generic formal parameter # "); Write_Integer (File, Number); Write_String (File, " of "); Write_Name (File => File, Item => RawDict.Get_Subprogram_Symbol (The_Generic)); Write_String (File, " is "); Write_Simple_Name (File => File, Item => RawDict.Get_Generic_Parameter_Symbol (The_Generic_Parameter)); Write_String (File, " which is "); case RawDict.Get_Generic_Parameter_Kind (The_Generic_Parameter => The_Generic_Parameter) is when Generic_Type_Parameter => Write_String (File, "a "); Write_Generic_Type (File => File, The_Generic_Parameter => The_Generic_Parameter); Write_String (File, "type parameter"); when Generic_Object_Parameter => Write_String (File, "a generic object parameter of type "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Constant_Type (The_Constant => RawDict.Get_Generic_Parameter_Object (The_Generic_Parameter => The_Generic_Parameter)))); end case; Write_Line (File, " ;"); end Write_Generic_Formal_Parameter; begin -- Write_Generic_Formal_Parameters Parameter_It := First_Generic_Formal_Subprogram_Parameter (The_Subprogram => The_Generic); Number := 1; while not IsNullIterator (Parameter_It) and then Number < Positive'Last loop Write_Generic_Formal_Parameter (File => File, The_Generic => The_Generic, Number => Number, The_Generic_Parameter => RawDict.Get_Generic_Parameter_Info_Ref (CurrentSymbol (Parameter_It))); Parameter_It := NextSymbol (Parameter_It); Number := Number + 1; end loop; end Write_Generic_Formal_Parameters; -------------------------------------------------------------------------------- function First_Library_Unit return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Item : Symbol := NullSymbol; Found : Boolean := False; Library_Units : Iterator := NullIterator; begin The_Declaration := RawDict.Get_Package_First_Local_Declaration (The_Package => Get_Predefined_Package_Standard); while (The_Declaration /= RawDict.Null_Declaration_Info_Ref and then not Found) loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); case RawDict.GetSymbolDiscriminant (Item) is when Package_Symbol => Found := The_Declaration /= RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Item)); when Subprogram_Symbol => Found := The_Declaration /= RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)); when others => Found := False; end case; if not Found then The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; if The_Declaration /= RawDict.Null_Declaration_Info_Ref then Library_Units := Iterator' (LibraryUnitIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); end if; return Library_Units; end First_Library_Unit; -------------------------------------------------------------------------------- -- Subprograms that should be in Write_Library_Unit -------------------------------------------------------------------------------- procedure Write_With_References (File : in SPARK_IO.File_Type; Scope : in Scopes) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Scope; is With_Reference : Iterator; -------------------------------------------------------------------------------- function First_Withed_Package (Scope : Scopes) return Iterator --# global in Dict; is Region : Symbol; The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Context_Clause : RawDict.Context_Clause_Info_Ref; Withed_Packages : Iterator := NullIterator; begin Region := GetRegion (Scope); case Get_Visibility (Scope => Scope) is when Visible | Privat => case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => The_Context_Clause := RawDict.Get_Package_Visible_With_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Generic_Unit_Symbol => The_Generic_Unit := RawDict.Get_Generic_Unit_Info_Ref (Item => Region); case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => The_Context_Clause := RawDict.Get_Package_Visible_With_Clauses (The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => The_Context_Clause := RawDict.Get_Subprogram_With_Clauses (The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Withed_Package"); end case; when Local => case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => The_Context_Clause := RawDict.Get_Package_Local_With_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Subprogram_Symbol => The_Context_Clause := RawDict.Get_Subprogram_With_Clauses (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Withed_Package"); end case; end case; if The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref then case RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) is when False => Withed_Packages := Iterator' (WithedPackageIterator, IsAbstract, RawDict.Get_Package_Symbol (RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Context_Clause)), RawDict.Get_Context_Clause_Symbol (The_Context_Clause)); when True => Withed_Packages := Iterator' (WithedPackageIterator, IsAbstract, RawDict.Get_Subprogram_Symbol (RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Context_Clause)), RawDict.Get_Context_Clause_Symbol (The_Context_Clause)); end case; end if; return Withed_Packages; end First_Withed_Package; -------------------------------------------------------------------------------- procedure Write_With_Reference (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref; Scope : in Scopes) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Scope, --# The_Package; is begin Write_String (File, "with reference to "); Write_Simple_Name (File => File, Item => RawDict.Get_Package_Symbol (The_Package)); Write_String (File, " in "); Write_Scope (File, Scope); Write_Line (File, " ;"); end Write_With_Reference; begin -- Write_With_References With_Reference := First_Withed_Package (Scope => Scope); loop exit when IsNullIterator (With_Reference); Write_With_Reference (File => File, The_Package => RawDict.Get_Package_Info_Ref (CurrentSymbol (With_Reference)), Scope => Scope); With_Reference := NextSymbol (With_Reference); end loop; end Write_With_References; -------------------------------------------------------------------------------- procedure Write_Inherits_References (File : in SPARK_IO.File_Type; Compilation_Unit : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Compilation_Unit, --# Dict, --# File, --# LexTokenManager.State; is Inherits_Reference : Iterator; -------------------------------------------------------------------------------- function First_Inherited_Package (Compilation_Unit : Symbol) return Iterator --# global in Dict; is The_Context_Clause : RawDict.Context_Clause_Info_Ref; Inherited_Packages : Iterator := NullIterator; begin case RawDict.GetSymbolDiscriminant (Compilation_Unit) is when Package_Symbol => The_Context_Clause := RawDict.Get_Package_Inherit_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Compilation_Unit)); when Subprogram_Symbol => if Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Compilation_Unit)) then The_Context_Clause := RawDict.Get_Subprogram_Inherit_Clauses (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Compilation_Unit)); else The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; end if; when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.First_Inherited_Package"); end case; if The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref then case RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) is when False => Inherited_Packages := Iterator' (InheritedPackageIterator, IsAbstract, RawDict.Get_Package_Symbol (RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Context_Clause)), RawDict.Get_Context_Clause_Symbol (The_Context_Clause)); when True => Inherited_Packages := Iterator' (InheritedPackageIterator, IsAbstract, RawDict.Get_Subprogram_Symbol (RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Context_Clause)), RawDict.Get_Context_Clause_Symbol (The_Context_Clause)); end case; end if; return Inherited_Packages; end First_Inherited_Package; -------------------------------------------------------------------------------- procedure Write_Inherits_Reference (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref; Compilation_Unit : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Compilation_Unit, --# Dict, --# File, --# LexTokenManager.State, --# The_Package; is begin Write_String (File, "inherits reference to "); Write_Simple_Name (File => File, Item => RawDict.Get_Package_Symbol (The_Package)); Write_String (File, " in "); Write_Name (File => File, Item => Compilation_Unit); Write_Line (File, " ;"); end Write_Inherits_Reference; begin -- Write_Inherits_References Inherits_Reference := First_Inherited_Package (Compilation_Unit => Compilation_Unit); loop exit when IsNullIterator (Inherits_Reference); Write_Inherits_Reference (File => File, The_Package => RawDict.Get_Package_Info_Ref (CurrentSymbol (Inherits_Reference)), Compilation_Unit => Compilation_Unit); Inherits_Reference := NextSymbol (Inherits_Reference); end loop; end Write_Inherits_References; -------------------------------------------------------------------------------- procedure Write_Package_Info (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Package; is Scope : Scopes; begin if RawDict.Get_Package_Parent (The_Package => The_Package) /= RawDict.Null_Package_Info_Ref then Write_String (File, "child "); end if; Write_String (File, "package named "); Write_Simple_Name (File => File, Item => RawDict.Get_Package_Symbol (The_Package)); Scope := Get_Package_Scope (The_Package => The_Package); if RawDict.GetSymbolDiscriminant (GetRegion (Scope)) /= Package_Symbol or else RawDict.Get_Package_Info_Ref (Item => GetRegion (Scope)) /= Get_Predefined_Package_Standard then Write_String (File, " declared in "); Write_Scope (File, Scope); end if; Write_Line (File, " ;"); end Write_Package_Info; -------------------------------------------------------------------------------- function First_Declarative_Item (Scope : Scopes) return Iterator --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; Region : Symbol; Item : Symbol := NullSymbol; Found : Boolean := False; Declarative_Items : Iterator := NullIterator; begin case Get_Visibility (Scope => Scope) is when Visible => The_Declaration := RawDict.Get_Package_First_Visible_Declaration (The_Package => RawDict.Get_Package_Info_Ref (GetRegion (Scope))); when Local => Region := GetRegion (Scope); case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => The_Declaration := RawDict.Get_Package_First_Local_Declaration (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Subprogram_Symbol => The_Declaration := RawDict.Get_Subprogram_First_Declaration (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); when others => The_Declaration := RawDict.Null_Declaration_Info_Ref; end case; when Privat => The_Declaration := RawDict.Get_Package_First_Private_Declaration (The_Package => RawDict.Get_Package_Info_Ref (GetRegion (Scope))); end case; while (The_Declaration /= RawDict.Null_Declaration_Info_Ref and then not Found) loop Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); case RawDict.GetSymbolDiscriminant (Item) is when Package_Symbol => Found := The_Declaration /= RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Item)); when Subprogram_Symbol => Found := The_Declaration /= RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item)); when others => Found := True; end case; if not Found then The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end if; end loop; if The_Declaration /= RawDict.Null_Declaration_Info_Ref then Declarative_Items := Iterator' (DeclarativeItemIterator, IsAbstract, Item, RawDict.Get_Declaration_Symbol (The_Declaration => The_Declaration)); end if; return Declarative_Items; end First_Declarative_Item; -------------------------------------------------------------------------------- procedure Write_Static_Value (File : in SPARK_IO.File_Type; Value : in LexTokenManager.Lex_String; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark, --# Value; is Root_Type : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- procedure Write_Lex_String (File : in SPARK_IO.File_Type; Lex_Str : in LexTokenManager.Lex_String) --# global in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# LexTokenManager.State, --# Lex_Str; is begin E_Strings.Put_String (File => File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str)); end Write_Lex_String; begin -- Write_Static_Value Root_Type := Get_Root_Type (Type_Mark => Type_Mark); if Type_Mark = Get_Unknown_Type_Mark then Write_String (File, " unknown "); elsif Type_Is_Numeric (Type_Mark => Type_Mark) or else Root_Type = Get_Predefined_Character_Type then E_Strings.Put_String (File => File, E_Str => Maths.ValueToString (Maths.ValueRep (Value))); elsif Root_Type = Get_Predefined_Boolean_Type or else Root_Type = Get_Predefined_String_Type then Write_Lex_String (File => File, Lex_Str => Value); elsif RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Enumeration_Type_Item then Write_Name (File => File, Item => Get_Enumeration_Literal (Type_Mark => Type_Mark, Position => Value)); else Write_String (File, " unknown "); end if; end Write_Static_Value; -------------------------------------------------------------------------------- procedure Write_Subprogram_Global_Variables (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# File, --# LexTokenManager.State, --# The_Subprogram; is Global_Variable : Iterator; -------------------------------------------------------------------------------- procedure Write_Global_Variable (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref; Global_Variable : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# File, --# Global_Variable, --# LexTokenManager.State, --# The_Subprogram; is The_Mode : Modes; begin if Abstraction = IsRefined then Write_String (File, "refined "); end if; Write_String (File, "global variable named "); Write_Name (File => File, Item => Global_Variable); Write_String (File, " of mode "); case RawDict.GetSymbolDiscriminant (Global_Variable) is when Variable_Symbol => The_Mode := Get_Subprogram_Variable_Global_Mode (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Global_Variable)); when Subprogram_Parameter_Symbol => The_Mode := Get_Subprogram_Parameter_Global_Mode (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Global_Variable)); when others => -- non-exec code The_Mode := InvalidMode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Write_Global_Variable"); end case; case The_Mode is when DefaultMode => Write_String (File, "default"); when InMode => Write_String (File, "in"); when OutMode => Write_String (File, "out"); when InOutMode => Write_String (File, "in out"); when InvalidMode => Write_String (File, "invalid"); end case; Write_String (File, " is referenced by "); Write_Name (File => File, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_Line (File, " ;"); end Write_Global_Variable; begin -- Write_Subprogram_Global_Variables Global_Variable := First_Subprogram_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Abstraction); loop exit when IsNullIterator (Global_Variable); Write_Global_Variable (Abstraction => Abstraction, File => File, The_Subprogram => The_Subprogram, Global_Variable => CurrentSymbol (Global_Variable)); Global_Variable := NextSymbol (Global_Variable); end loop; end Write_Subprogram_Global_Variables; -------------------------------------------------------------------------------- procedure Write_Task_Type_Global_Variables (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Task_Type : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# File, --# LexTokenManager.State, --# The_Task_Type; is Global_Variable : Iterator; -------------------------------------------------------------------------------- procedure Write_Global_Variable (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Task_Type : in RawDict.Type_Info_Ref; Global_Variable : in RawDict.Variable_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# File, --# Global_Variable, --# LexTokenManager.State, --# The_Task_Type; is The_Mode : Modes; begin if Abstraction = IsRefined then Write_String (File, "refined "); end if; Write_String (File, "global variable named "); Write_Name (File => File, Item => RawDict.Get_Variable_Symbol (Global_Variable)); Write_String (File, " of mode "); The_Mode := Get_Task_Type_Variable_Global_Mode (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Variable => Global_Variable); case The_Mode is when DefaultMode => Write_String (File, "default"); when InMode => Write_String (File, "in"); when OutMode => Write_String (File, "out"); when InOutMode => Write_String (File, "in out"); when InvalidMode => Write_String (File, "invalid"); end case; Write_String (File, " is referenced by "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (The_Task_Type)); Write_Line (File, " ;"); end Write_Global_Variable; begin Global_Variable := First_Task_Type_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Abstraction); loop exit when IsNullIterator (Global_Variable); Write_Global_Variable (Abstraction => Abstraction, File => File, The_Task_Type => The_Task_Type, Global_Variable => RawDict.Get_Variable_Info_Ref (CurrentSymbol (Global_Variable))); Global_Variable := NextSymbol (Global_Variable); end loop; end Write_Task_Type_Global_Variables; -------------------------------------------------------------------------------- procedure Write_Subprogram_Dependency_Clauses (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# File, --# LexTokenManager.State, --# The_Subprogram; is Export : Iterator; -------------------------------------------------------------------------------- procedure Write_Dependencies (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref; Export : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# Export, --# File, --# LexTokenManager.State, --# The_Subprogram; is Dependency : Iterator; -------------------------------------------------------------------------------- procedure Write_Dependency (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref; Export : in Symbol; Import : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# Export, --# File, --# Import, --# LexTokenManager.State, --# The_Subprogram; is begin if Abstraction = IsRefined then Write_String (File, "refined "); end if; if RawDict.GetSymbolDiscriminant (Export) = Variable_Symbol and then Get_Own_Variable_Or_Constituent_Mode (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Export)) = InMode then Write_String (File, "implicit "); end if; Write_String (File, "export named "); Write_Name (File => File, Item => Export); Write_String (File, " is "); if RawDict.GetSymbolDiscriminant (Import) = Variable_Symbol and then Get_Own_Variable_Or_Constituent_Mode (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Import)) = OutMode then Write_String (File, "implicitly "); end if; Write_String (File, "derived from "); if Import = NullSymbol then Write_String (File, "nothing"); else Write_Name (File => File, Item => Import); end if; Write_String (File, " in "); Write_Name (File => File, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_Line (File, " ;"); end Write_Dependency; begin -- Write_Dependencies Dependency := First_Subprogram_Dependency (The_Subprogram => The_Subprogram, Abstraction => Abstraction, The_Export => Export); if IsNullIterator (Dependency) then Write_Dependency (Abstraction => Abstraction, File => File, The_Subprogram => The_Subprogram, Export => Export, Import => NullSymbol); else loop exit when IsNullIterator (Dependency); Write_Dependency (Abstraction => Abstraction, File => File, The_Subprogram => The_Subprogram, Export => Export, Import => CurrentSymbol (Dependency)); Dependency := NextSymbol (Dependency); end loop; end if; end Write_Dependencies; begin -- Write_Subprogram_Dependency_Clauses Export := First_Subprogram_Export (The_Subprogram => The_Subprogram, Abstraction => Abstraction); loop exit when IsNullIterator (Export); Write_Dependencies (Abstraction => Abstraction, File => File, The_Subprogram => The_Subprogram, Export => CurrentSymbol (Export)); Export := NextSymbol (Export); end loop; end Write_Subprogram_Dependency_Clauses; -------------------------------------------------------------------------------- procedure Write_Task_Type_Dependency_Clauses (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Task_Type : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# File, --# LexTokenManager.State, --# The_Task_Type; is Export : Iterator; -------------------------------------------------------------------------------- procedure Write_Dependencies (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Task_Type : in RawDict.Type_Info_Ref; The_Export : in RawDict.Variable_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# File, --# LexTokenManager.State, --# The_Export, --# The_Task_Type; is Dependency : Iterator; -------------------------------------------------------------------------------- procedure Write_Dependency (Abstraction : in Abstractions; File : in SPARK_IO.File_Type; The_Task_Type : in RawDict.Type_Info_Ref; The_Export : in RawDict.Variable_Info_Ref; The_Import : in RawDict.Variable_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Abstraction, --# Dict, --# File, --# LexTokenManager.State, --# The_Export, --# The_Import, --# The_Task_Type; is begin if Abstraction = IsRefined then Write_String (File, "refined "); end if; if Get_Own_Variable_Or_Constituent_Mode (The_Variable => The_Export) = InMode then Write_String (File, "implicit "); end if; Write_String (File, "export named "); Write_Name (File => File, Item => RawDict.Get_Variable_Symbol (The_Export)); Write_String (File, " is "); if Get_Own_Variable_Or_Constituent_Mode (The_Variable => The_Import) = OutMode then Write_String (File, "implicitly "); end if; Write_String (File, "derived from "); if The_Import = RawDict.Null_Variable_Info_Ref then Write_String (File, "nothing"); else Write_Name (File => File, Item => RawDict.Get_Variable_Symbol (The_Import)); end if; Write_String (File, " in "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (The_Task_Type)); Write_Line (File, " ;"); end Write_Dependency; begin -- Write_Dependencies Dependency := First_Task_Type_Dependency (The_Task_Type => The_Task_Type, Abstraction => Abstraction, The_Export => The_Export); if IsNullIterator (Dependency) then Write_Dependency (Abstraction => Abstraction, File => File, The_Task_Type => The_Task_Type, The_Export => The_Export, The_Import => RawDict.Null_Variable_Info_Ref); else loop exit when IsNullIterator (Dependency); Write_Dependency (Abstraction => Abstraction, File => File, The_Task_Type => The_Task_Type, The_Export => The_Export, The_Import => RawDict.Get_Variable_Info_Ref (CurrentSymbol (Dependency))); Dependency := NextSymbol (Dependency); end loop; end if; end Write_Dependencies; begin -- Write_Task_Type_Dependency_Clauses Export := First_Task_Type_Export (The_Task_Type => The_Task_Type, Abstraction => Abstraction); loop exit when IsNullIterator (Export); Write_Dependencies (Abstraction => Abstraction, File => File, The_Task_Type => The_Task_Type, The_Export => RawDict.Get_Variable_Info_Ref (CurrentSymbol (Export))); Export := NextSymbol (Export); end loop; end Write_Task_Type_Dependency_Clauses; -------------------------------------------------------------------------------- procedure Write_Type_Info (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is procedure Write_Discriminant (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is begin case RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) is when Unknown_Type_Item => Write_String (File, "unknown"); when Enumeration_Type_Item => Write_String (File, "enumeration"); when Integer_Type_Item => Write_String (File, "integer"); when Modular_Type_Item => Write_String (File, "modular"); when Floating_Point_Type_Item => Write_String (File, "floating point"); when Fixed_Point_Type_Item => Write_String (File, "fixed point"); when Array_Type_Item => Write_String (File, "array"); when Record_Type_Item => Write_String (File, "record"); when Abstract_Proof_Type_Item => Write_String (File, "abstract "); when Protected_Type_Item => Write_String (File, "protected"); when Task_Type_Item => Write_String (File, "task"); when Access_Type_Item => Write_String (File, "access "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Type_Accesses (Type_Mark => Type_Mark))); when Generic_Type_Item => Write_String (File, "generic "); case RawDict.Get_Type_Kind_Of_Generic (Type_Mark => Type_Mark) is when Invalid_Generic_Type => Write_String (File, "invalid generic type "); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Write_Discriminant"); when Generic_Private_Type => Write_String (File, "private "); when Generic_Discrete_Type => Write_String (File, "discrete "); when Generic_Integer_Type => Write_String (File, "integer "); when Generic_Modular_Type => Write_String (File, "modular "); when Generic_Floating_Point_Type => Write_String (File, "floating point "); when Generic_Fixed_Point_Type => Write_String (File, "fixed point "); when Generic_Array_Type => Write_String (File, "array "); end case; end case; end Write_Discriminant; ------------------------------------------------------------------------- procedure Write_Bound (File : in SPARK_IO.File_Type; Bound : in LexTokenManager.Lex_String; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Bound, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Bound, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then SPARK_IO.Put_String (File, "unknown", 7); else Write_Static_Value (File => File, Value => Bound, Type_Mark => Type_Mark); end if; end Write_Bound; -------------------------------------------------------------------------------- function Type_Is_Limited_Private (Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Type_Limited (Type_Mark => Type_Mark) /= Never; end Type_Is_Limited_Private; -------------------------------------------------------------------------------- procedure Write_Protected_Refinement (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is It : Iterator; begin Write_String (File, "The implicit state variable of type "); Write_Simple_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_Line (File, " has the following refinement constituents:"); It := First_Constituent (The_Variable => Get_Protected_Type_Own_Variable (The_Protected_Type => Type_Mark)); while not IsNullIterator (It) loop Write_String (File, " "); Write_Simple_Name (File => File, Item => CurrentSymbol (It)); Write_Line (File, ","); It := NextSymbol (It); end loop; Write_Line (File, ";"); end Write_Protected_Refinement; -------------------------------------------------------------------------------- procedure Write_Type_Discriminants (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is It : Iterator; begin Write_String (File, "Type "); Write_Simple_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_Line (File, " has the following known discriminants:"); It := First_Known_Discriminant (Protected_Or_Task_Type => Type_Mark); if IsNullIterator (It) then Write_Line (File, "None;"); else while not IsNullIterator (It) loop Write_String (File, " "); Write_Simple_Name (File => File, Item => CurrentSymbol (It)); Write_String (File, " which is of type "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Get_Type (The_Symbol => CurrentSymbol (It)))); if RawDict.GetDiscriminantSetsPriority (CurrentSymbol (It)) then Write_String (File, " and is used to set the priority"); end if; Write_Line (File, ";"); It := NextSymbol (It); end loop; end if; end Write_Type_Discriminants; -------------------------------------------------------------------------------- procedure Write_Discriminant_Constraint (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is Constraint_It : Iterator; Known_It : Iterator; procedure Write_Constraint_Value (File : in SPARK_IO.File_Type; The_Constraint : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Constraint; is Val : LexTokenManager.Lex_String; begin Val := RawDict.GetDiscriminantConstraintStaticValue (The_Constraint); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Val, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then Write_Static_Value (File => File, Value => Val, Type_Mark => Get_Predefined_Integer_Type); elsif RawDict.GetDiscriminantConstraintAccessedObject (The_Constraint) /= NullSymbol then -- if no static value then must be access to PO Write_Name (File => File, Item => RawDict.GetDiscriminantConstraintAccessedObject (The_Constraint)); end if; end Write_Constraint_Value; begin -- Write_Discriminant_Constraint Write_String (File, "Type "); Write_Simple_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_Line (File, " has the following discriminant constraints:"); Constraint_It := First_Discriminant_Constraint (Protected_Or_Task_Subtype => Type_Mark); if IsNullIterator (Constraint_It) then Write_Line (File, "None;"); else Known_It := First_Known_Discriminant (Protected_Or_Task_Type => RawDict.Get_Type_Parent (Type_Mark => Type_Mark)); while not IsNullIterator (Constraint_It) and then not IsNullIterator (Known_It) loop Write_String (File, " "); Write_Simple_Name (File => File, Item => CurrentSymbol (Known_It)); Write_String (File, " => "); Write_Constraint_Value (File => File, The_Constraint => CurrentSymbol (Constraint_It)); if RawDict.GetDiscriminantSetsPriority (CurrentSymbol (Known_It)) then Write_String (File, " which is used to set the priority"); end if; Write_Line (File, ";"); Known_It := NextSymbol (Known_It); Constraint_It := NextSymbol (Constraint_It); end loop; end if; end Write_Discriminant_Constraint; -------------------------------------------------------------------------------- procedure Write_Type_Priority (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is Val : LexTokenManager.Lex_String; begin if Get_Type_Has_Pragma (Protected_Or_Task_Type => Type_Mark, The_Pragma => Priority) then Write_String (File, "Type "); Write_Simple_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (File, " has pragma Priority"); Val := Get_Type_Pragma_Value (Protected_Or_Task_Type => Type_Mark, The_Pragma => Priority); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Val, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Write_Line (File, " of unknown value;"); else Write_String (File, " of value "); Write_Static_Value (File => File, Value => Val, Type_Mark => Get_Predefined_Integer_Type); Write_Line (File, ";"); end if; elsif Get_Type_Has_Pragma (Protected_Or_Task_Type => Type_Mark, The_Pragma => InterruptPriority) then Write_String (File, "Type "); Write_Simple_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (File, " has pragma Interrupt_Priority"); Val := Get_Type_Pragma_Value (Protected_Or_Task_Type => Type_Mark, The_Pragma => InterruptPriority); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Val, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Write_Line (File, " of unknown value;"); else Write_String (File, " of value "); Write_Static_Value (File => File, Value => Val, Type_Mark => Get_Predefined_Integer_Type); Write_Line (File, ";"); end if; end if; end Write_Type_Priority; -------------------------------------------------------------------------------- procedure Write_Priority (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is begin Write_String (File, "Type "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (File, " has a priority of "); Write_Static_Value (File => File, Value => Get_Type_Priority (Protected_Or_Task_Type => Type_Mark), Type_Mark => Get_Predefined_Integer_Type); Write_Line (File, ";"); end Write_Priority; -------------------------------------------------------------------------------- procedure Write_Suspends_List (File : in SPARK_IO.File_Type; The_Task_Type : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Task_Type; is It : Iterator; begin Write_String (File, "Type "); Write_Simple_Name (File => File, Item => RawDict.Get_Type_Symbol (The_Task_Type)); Write_Line (File, " has the following PO or SO names in its Suspend list"); It := First_Task_Type_Suspends_List_Item (The_Task_Type => The_Task_Type); if IsNullIterator (It) then Write_Line (File, "None;"); else while not IsNullIterator (It) loop Write_String (File, " "); Write_Name (File => File, Item => CurrentSymbol (It)); Write_Line (File, ";"); It := NextSymbol (It); end loop; end if; end Write_Suspends_List; begin -- Write_Type_Info Write_String (File, "type named "); Write_Simple_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (File, " is "); if RawDict.Get_Type_Extends (Type_Mark => Type_Mark) /= RawDict.Null_Type_Info_Ref then Write_String (File, "an extension of type "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Type_Extends (Type_Mark => Type_Mark))); Write_String (File, " and is "); end if; --# assert True; if Type_Is_Tagged (Type_Mark => Type_Mark) then Write_String (File, "tagged "); end if; --# assert True; if Type_Is_Private (Type_Mark => Type_Mark) then if Type_Is_Limited_Private (Type_Mark => Type_Mark) then Write_String (File, "limited "); end if; Write_String (File, "private "); end if; --# assert True; if Is_Unconstrained_Array_Type (Type_Mark => Type_Mark) then Write_String (File, "unconstrained "); end if; --# assert True; if Is_Static_Type (Type_Mark => Type_Mark, Scope => Get_Type_Scope (Type_Mark => Type_Mark)) then Write_String (File, "static "); end if; Write_Discriminant (File => File, Type_Mark => Type_Mark); --# assert True; if Is_Proof_Type (Type_Mark => Type_Mark) then Write_String (File, "proof "); end if; Write_Space (File => File); --# assert True; if Is_Type (Type_Mark => Type_Mark) then Write_String (File, "type"); else Write_String (File, "subtype of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Type_Parent (Type_Mark => Type_Mark))); end if; --# assert True; if Type_Is_Scalar (Type_Mark => Type_Mark) then Write_String (File, " range "); Write_Bound (File => File, Bound => RawDict.Get_Type_Lower (Type_Mark => Type_Mark), Type_Mark => Type_Mark); Write_String (File, " .. "); Write_Bound (File => File, Bound => RawDict.Get_Type_Upper (Type_Mark => Type_Mark), Type_Mark => Type_Mark); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Type_Error_Bound (Type_Mark => Type_Mark), Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then if RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Floating_Point_Type_Item then Write_String (File, " digits "); else Write_String (File, " delta "); end if; Write_Static_Value (File => File, Value => RawDict.Get_Type_Error_Bound (Type_Mark => Type_Mark), Type_Mark => Type_Mark); end if; elsif Type_Is_Array (Type_Mark => Type_Mark) then Write_String (File, " of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Get_Array_Component (Type_Mark => Type_Mark))); elsif Is_Type (Type_Mark => Type_Mark) and then RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Protected_Type_Item and then RawDict.Get_Protected_Type_Elements_Hidden (The_Protected_Type => Type_Mark) then Write_String (File, " with hidden elements "); end if; --# assert True; if RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Access_Type_Item then Write_String (File, " implicitly"); end if; Write_String (File, " declared in "); Write_Scope (File, Get_Type_Scope (Type_Mark => Type_Mark)); Write_Line (File, " ;"); --# assert True; if RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Protected_Type_Item then if Is_Type (Type_Mark => Type_Mark) then Write_Type_Discriminants (File => File, Type_Mark => Type_Mark); Write_Protected_Refinement (File => File, Type_Mark => Type_Mark); Write_Type_Priority (File => File, Type_Mark => Type_Mark); else -- subtype Write_Discriminant_Constraint (File => File, Type_Mark => Type_Mark); end if; Write_Priority (File => File, Type_Mark => Type_Mark); end if; --# assert True; if RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Task_Type_Item then if Is_Type (Type_Mark => Type_Mark) then Write_Type_Discriminants (File => File, Type_Mark => Type_Mark); Write_Task_Type_Global_Variables (Abstraction => IsAbstract, File => File, The_Task_Type => Type_Mark); Write_Task_Type_Dependency_Clauses (Abstraction => IsAbstract, File => File, The_Task_Type => Type_Mark); Write_Suspends_List (File => File, The_Task_Type => Type_Mark); Write_Type_Priority (File => File, Type_Mark => Type_Mark); if not RawDict.Get_Task_Type_Signature_Is_Wellformed (The_Task_Type => Type_Mark, Abstraction => IsAbstract) then Write_Line (File, "Task type signature contains errors"); end if; else -- subtype Write_Discriminant_Constraint (File => File, Type_Mark => Type_Mark); end if; Write_Priority (File => File, Type_Mark => Type_Mark); end if; end Write_Type_Info; -------------------------------------------------------------------------------- procedure Write_Enumeration_Literals (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is Literal : Iterator; Value : Natural; -------------------------------------------------------------------------------- procedure Write_Enumeration_Literal (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref; The_Enumeration_Value : in Natural; The_Enumeration_Literal : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Enumeration_Literal, --# The_Enumeration_Value, --# Type_Mark; is begin Write_String (File, "enumeration literal # "); Write_Integer (File, The_Enumeration_Value); Write_String (File, " of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (File, " is "); Write_Simple_Name (File => File, Item => The_Enumeration_Literal); Write_Line (File, " ;"); end Write_Enumeration_Literal; begin -- Write_Enumeration_Literals Literal := First_Enumeration_Literal (Type_Mark => Type_Mark); Value := 0; while not IsNullIterator (Literal) and then Value < Natural'Last loop Write_Enumeration_Literal (File => File, Type_Mark => Type_Mark, The_Enumeration_Value => Value, The_Enumeration_Literal => CurrentSymbol (Literal)); Literal := NextSymbol (Literal); Value := Value + 1; end loop; end Write_Enumeration_Literals; -------------------------------------------------------------------------------- procedure Write_Record_Components (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is Component : Iterator; Number : Positive; -------------------------------------------------------------------------------- procedure Write_Record_Component (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref; Number : in Positive; Record_Component : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Number, --# Record_Component, --# Type_Mark; is begin Write_String (File, "record component # "); Write_Integer (File, Number); Write_String (File, " of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (File, " is "); Write_Simple_Name (File => File, Item => Record_Component); Write_String (File, " of type "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Get_Type (The_Symbol => Record_Component))); Write_Line (File, " ;"); end Write_Record_Component; begin -- Write_Record_Components Component := First_Record_Component (Type_Mark => Type_Mark); Number := 1; while not IsNullIterator (Component) and then Number < Positive'Last loop Write_Record_Component (File => File, Type_Mark => Type_Mark, Number => Number, Record_Component => CurrentSymbol (Component)); Component := NextSymbol (Component); Number := Number + 1; end loop; end Write_Record_Components; -------------------------------------------------------------------------------- procedure Write_Array_Indices (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Type_Mark; is Array_Index : Iterator; Dimension : Positive; -------------------------------------------------------------------------------- procedure Write_Array_Index (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref; Dimension : in Positive; Index_Type : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# Dimension, --# File, --# Index_Type, --# LexTokenManager.State, --# Type_Mark; is begin Write_String (File, "index # "); Write_Integer (File, Dimension); Write_String (File, " of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (File, " is "); Write_Name (File => File, Item => Index_Type); Write_Line (File, " ;"); end Write_Array_Index; begin -- Write_Array_Indices Array_Index := First_Array_Index (Type_Mark => Type_Mark); Dimension := 1; while not IsNullIterator (Array_Index) and then Dimension < Positive'Last loop Write_Array_Index (File => File, Type_Mark => Type_Mark, Dimension => Dimension, Index_Type => CurrentSymbol (Array_Index)); Array_Index := NextSymbol (Array_Index); Dimension := Dimension + 1; end loop; end Write_Array_Indices; -------------------------------------------------------------------------------- procedure Write_Variable (File : in SPARK_IO.File_Type; The_Variable : in RawDict.Variable_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Variable; is begin Write_String (File, "variable named "); Write_Simple_Name (File => File, Item => RawDict.Get_Variable_Symbol (The_Variable)); Write_String (File, " is"); if RawDict.Get_Variable_Initialized (The_Variable => The_Variable) then Write_String (File, " initialized"); end if; if RawDict.Get_Variable_Is_Aliased (The_Variable => The_Variable) then Write_String (File, " aliased"); end if; Write_String (File, " variable of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Variable_Type (The_Variable => The_Variable))); Write_String (File, " declared in "); Write_Scope (File, Get_Variable_Scope (The_Variable => The_Variable)); if RawDict.Get_Variable_Has_Pragma_Import (The_Variable => The_Variable) then Write_String (File, " and completed by a pragma Import"); end if; Write_Line (File, " ;"); end Write_Variable; -------------------------------------------------------------------------------- procedure Write_Constant (File : in SPARK_IO.File_Type; The_Constant : in RawDict.Constant_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Constant; is begin Write_String (File, "constant named "); Write_Simple_Name (File => File, Item => RawDict.Get_Constant_Symbol (The_Constant)); Write_String (File, " is "); if Constant_Is_Deferred (The_Constant => The_Constant) then Write_String (File, "deferred "); end if; if Is_Static_Constant (The_Constant => The_Constant, Scope => Get_Constant_Scope (The_Constant => The_Constant)) then Write_String (File, "static "); end if; if Get_Constant_Context (The_Constant => The_Constant) = ProofContext then Write_String (File, "proof "); end if; Write_String (File, "constant of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Constant_Type (The_Constant => The_Constant))); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Constant_Value (The_Constant => The_Constant), Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then Write_String (File, " value "); Write_Static_Value (File => File, Value => RawDict.Get_Constant_Value (The_Constant => The_Constant), Type_Mark => RawDict.Get_Constant_Type (The_Constant => The_Constant)); end if; Write_String (File, " declared in "); Write_Scope (File, Get_Constant_Scope (The_Constant => The_Constant)); Write_Line (File, " ;"); end Write_Constant; -------------------------------------------------------------------------------- procedure Write_Loops (File : in SPARK_IO.File_Type; Compilation_Unit : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Compilation_Unit, --# Dict, --# File, --# LexTokenManager.State; is The_Loop : Iterator; -------------------------------------------------------------------------------- procedure Write_Loop (File : in SPARK_IO.File_Type; The_Loop : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Loop; is -------------------------------------------------------------------------------- procedure Write_Loop_Info (File : in SPARK_IO.File_Type; The_Loop : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Loop; is begin Write_String (File, "loop named "); Write_Simple_Name (File => File, Item => The_Loop); Write_String (File, " declared in "); Write_Scope (File, GetScope (The_Loop)); Write_Line (File, " ;"); end Write_Loop_Info; -------------------------------------------------------------------------------- procedure Write_Loop_Parameter (File : in SPARK_IO.File_Type; Loop_Parameter : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Loop_Parameter; is begin Write_String (File, "loop parameter of "); Write_Name (File => File, Item => GetRegion (GetScope (Loop_Parameter))); Write_String (File, " named "); Write_Simple_Name (File => File, Item => Loop_Parameter); Write_String (File, " is of type "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Get_Type (The_Symbol => Loop_Parameter))); Write_Line (File, " ;"); end Write_Loop_Parameter; begin -- Write_Loop Write_Loop_Info (File => File, The_Loop => The_Loop); if Is_For_Loop (TheSymbol => The_Loop) then Write_Loop_Parameter (File => File, Loop_Parameter => RawDict.GetLoopParameter (The_Loop)); end if; end Write_Loop; begin -- Write_Loops The_Loop := First_Loop (CompilationUnit => Compilation_Unit); loop exit when IsNullIterator (The_Loop); Write_Loop (File => File, The_Loop => CurrentSymbol (The_Loop)); The_Loop := NextSymbol (The_Loop); end loop; end Write_Loops; -------------------------------------------------------------------------------- procedure Write_Protected_Element (File : in SPARK_IO.File_Type; The_Protected_Type : in RawDict.Type_Info_Ref; The_Element : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Element, --# The_Protected_Type; is begin Write_String (File, "protected element named "); Write_Simple_Name (File => File, Item => The_Element); Write_String (File, " of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (Get_Type (The_Symbol => The_Element))); Write_String (File, " declared in "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (The_Protected_Type)); Write_Line (File, " ;"); end Write_Protected_Element; -------------------------------------------------------------------------------- procedure Write_Own_Variables (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Package; is Own_Variables : Iterator; -------------------------------------------------------------------------------- procedure Write_Own_Variable (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref; The_Variable : in RawDict.Variable_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Package, --# The_Variable; is The_Own_Variable : RawDict.Own_Variable_Info_Ref; begin The_Own_Variable := RawDict.Get_Variable_Own_Variable (The_Variable => The_Variable); case RawDict.Get_Own_Variable_Mode (The_Own_Variable => The_Own_Variable) is when DefaultMode => Write_String (File, "default"); when InMode => Write_String (File, "in"); when OutMode => Write_String (File, "out"); when InOutMode => Write_String (File, "in out"); when InvalidMode => Write_String (File, "invalid"); end case; Write_String (File, " mode "); if RawDict.Get_Own_Variable_Protected (The_Own_Variable => The_Own_Variable) then Write_String (File, "protected "); end if; Write_String (File, "own variable "); Write_Simple_Name (File => File, Item => RawDict.Get_Variable_Symbol (The_Variable)); if RawDict.Get_Own_Variable_Typed (The_Own_Variable => The_Own_Variable) then Write_String (File, " of type "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Variable_Type (The_Variable => The_Variable))); end if; Write_String (File, " is owned "); if RawDict.Get_Own_Variable_Initialized (The_Own_Variable => The_Own_Variable) then Write_String (File, "and initialized "); end if; Write_String (File, "by "); Write_Name (File => File, Item => RawDict.Get_Package_Symbol (The_Package)); Write_Line (File, " ;"); end Write_Own_Variable; begin -- Write_Own_Variables Own_Variables := First_Own_Variable (The_Package => The_Package); loop exit when IsNullIterator (Own_Variables); Write_Own_Variable (File => File, The_Package => The_Package, The_Variable => RawDict.Get_Variable_Info_Ref (CurrentSymbol (Own_Variables))); Own_Variables := NextSymbol (Own_Variables); end loop; end Write_Own_Variables; -------------------------------------------------------------------------------- procedure Write_Embedded_Packages (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Package; is Packages : Iterator; begin Packages := First_Embedded_Package_In_Package (The_Package => The_Package); loop exit when IsNullIterator (Packages); Write_Package_Info (File => File, The_Package => RawDict.Get_Package_Info_Ref (CurrentSymbol (Packages))); Packages := NextSymbol (Packages); end loop; end Write_Embedded_Packages; -------------------------------------------------------------------------------- procedure Write_Abstract_Own_Variables (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Package; is Abstract_Own_Variables : Iterator; -------------------------------------------------------------------------------- procedure Write_Abstract_Own_Variable (File : in SPARK_IO.File_Type; The_Variable : in RawDict.Variable_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Variable; is Constituents : Iterator; -------------------------------------------------------------------------------- procedure Write_Constituent (File : in SPARK_IO.File_Type; The_Variable : in RawDict.Variable_Info_Ref; Constituent : in RawDict.Variable_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Constituent, --# Dict, --# File, --# LexTokenManager.State, --# The_Variable; is begin case Get_Constituent_Mode (The_Variable => Constituent) is when DefaultMode => Write_String (File, "default"); when InMode => Write_String (File, "in"); when OutMode => Write_String (File, "out"); when InOutMode => Write_String (File, "in out"); when InvalidMode => Write_String (File, "invalid"); end case; Write_String (File, " mode "); Write_String (File, "constituent of "); Write_Name (File => File, Item => RawDict.Get_Variable_Symbol (The_Variable)); Write_String (File, " is "); Write_Simple_Name (File => File, Item => RawDict.Get_Variable_Symbol (Constituent)); if Get_Owner (The_Variable => The_Variable) /= Get_Owner (The_Variable => Constituent) then Write_String (File, " declared in "); Write_Name (File => File, Item => Get_Owner (The_Variable => Constituent)); end if; Write_Line (File, " ;"); end Write_Constituent; begin -- Write_Abstract_Own_Variable Constituents := First_Constituent (The_Variable => The_Variable); loop exit when IsNullIterator (Constituents); Write_Constituent (File => File, The_Variable => The_Variable, Constituent => RawDict.Get_Variable_Info_Ref (CurrentSymbol (Constituents))); Constituents := NextSymbol (Constituents); end loop; end Write_Abstract_Own_Variable; begin -- Write_Abstract_Own_Variables Abstract_Own_Variables := First_Abstract_Own_Variable (The_Package => The_Package); loop exit when IsNullIterator (Abstract_Own_Variables); Write_Abstract_Own_Variable (File => File, The_Variable => RawDict.Get_Variable_Info_Ref (CurrentSymbol (Abstract_Own_Variables))); Abstract_Own_Variables := NextSymbol (Abstract_Own_Variables); end loop; end Write_Abstract_Own_Variables; -------------------------------------------------------------------------------- procedure Write_Subprogram_Bodies (File : in SPARK_IO.File_Type; Scope : in Scopes) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Scope; is Declarative_Items : Iterator; Declarative_Item : Symbol; begin Declarative_Items := First_Declarative_Item (Scope => Scope); loop exit when IsNullIterator (Declarative_Items); Declarative_Item := CurrentSymbol (Declarative_Items); if Is_Subprogram (Declarative_Item) then Write_Subprogram_Global_Variables (Abstraction => IsRefined, File => File, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Declarative_Item)); if RawDict.GetSymbolDiscriminant (Declarative_Item) = Subprogram_Symbol and then Is_Procedure (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Declarative_Item)) then Write_Subprogram_Dependency_Clauses (Abstraction => IsRefined, File => File, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Declarative_Item)); end if; end if; Declarative_Items := NextSymbol (Declarative_Items); end loop; end Write_Subprogram_Bodies; -------------------------------------------------------------------------------- procedure Write_Subprogram_Info (File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Subprogram; is begin Write_String (File, "subprogram named "); Write_Simple_Name (File => File, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (File, " is "); if RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => The_Subprogram) /= RawDict.Null_Generic_Unit_Info_Ref then Write_String (File, "generic "); end if; if RawDict.Get_Subprogram_Is_Entry (The_Subprogram => The_Subprogram) then Write_String (File, "entry"); elsif Is_Procedure (The_Subprogram => The_Subprogram) then Write_String (File, "procedure"); else if IsProofFunction (RawDict.Get_Subprogram_Symbol (The_Subprogram)) then Write_String (File, "proof "); end if; Write_String (File, "function of "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Subprogram_Return_Type (The_Subprogram => The_Subprogram))); end if; -- don't try and print declaration region for library-level units if not (RawDict.GetSymbolDiscriminant (RawDict.Get_Subprogram_Symbol (The_Subprogram)) = Subprogram_Symbol and then Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)))) and then The_Subprogram /= Get_The_Partition and then RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => The_Subprogram) = RawDict.Null_Generic_Unit_Info_Ref then Write_String (File, " declared in "); Write_Scope (File, Get_Subprogram_Scope (The_Subprogram => The_Subprogram)); end if; if RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => The_Subprogram) /= RawDict.Null_Subprogram_Info_Ref then Write_String (File, " and which is an instantiation of "); Write_Name (File => File, Item => RawDict.Get_Subprogram_Symbol (RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => The_Subprogram))); end if; if RawDict.Get_Subprogram_Is_Interrupt_Handler (The_Subprogram => The_Subprogram) then Write_String (File, " and is an interrupt handler"); end if; if The_Subprogram = Get_The_Partition then Write_String (File, " and is the partition table of the whole program"); end if; Write_Line (File, " ;"); end Write_Subprogram_Info; -------------------------------------------------------------------------------- procedure Write_Subprogram_Parameters (File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref; The_Implicit_Proof_Function : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# The_Implicit_Proof_Function, --# The_Subprogram; is Parameter : Iterator; Number : Positive; -------------------------------------------------------------------------------- procedure Write_Subprogram_Parameter (File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref; Number : in Positive; The_Subprogram_Parameter : in RawDict.Subprogram_Parameter_Info_Ref) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Number, --# The_Subprogram, --# The_Subprogram_Parameter; is begin Write_String (File, "subprogram parameter # "); Write_Integer (File, Number); Write_String (File, " of "); Write_Name (File => File, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_String (File, " is "); Write_Simple_Name (File => File, Item => RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter)); Write_String (File, " which is "); case RawDict.Get_Subprogram_Parameter_Mode (The_Subprogram_Parameter => The_Subprogram_Parameter) is when DefaultMode => Write_String (File, "default"); when InMode => Write_String (File, "in"); when OutMode => Write_String (File, "out"); when InOutMode => Write_String (File, "in out"); when InvalidMode => Write_String (File, "invalid"); end case; Write_String (File, " parameter of type "); Write_Name (File => File, Item => RawDict.Get_Type_Symbol (RawDict.Get_Subprogram_Parameter_Type (The_Subprogram_Parameter => The_Subprogram_Parameter))); Write_Line (File, " ;"); end Write_Subprogram_Parameter; begin -- Write_Subprogram_Parameters if The_Implicit_Proof_Function = NullSymbol then Parameter := First_Ada_Subprogram_Parameter (The_Subprogram => The_Subprogram); else Parameter := First_Implicit_Proof_Function_Parameter (ProofFunction => The_Implicit_Proof_Function); end if; Number := 1; while not IsNullIterator (Parameter) and then Number < Positive'Last loop Write_Subprogram_Parameter (File => File, The_Subprogram => The_Subprogram, Number => Number, The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (CurrentSymbol (Parameter))); Parameter := NextSymbol (Parameter); Number := Number + 1; end loop; end Write_Subprogram_Parameters; -------------------------------------------------------------------------------- procedure Write_Library_Unit (File : in SPARK_IO.File_Type; Unit : in Symbol) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# File, --# LexTokenManager.State, --# Unit; is --# hide Write_Library_Unit; -------------------------------------------------------------------------------- procedure Write_Subprogram (File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref; The_Implicit_Proof_Function : in Symbol); -------------------------------------------------------------------------------- procedure Write_Package (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref); -------------------------------------------------------------------------------- procedure Write_Library_Package (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref) is begin if Get_Root_Package (The_Package => The_Package) /= Get_Predefined_Package_Ada then Write_Package_Info (File => File, The_Package => The_Package); Write_Package (File => File, The_Package => The_Package); end if; end Write_Library_Package; -------------------------------------------------------------------------------- procedure Write_Declarative_Items (File : in SPARK_IO.File_Type; Scope : in Scopes) is Declarative_Items : Iterator; Declarative_Item : Symbol; -------------------------------------------------------------------------------- procedure Write_Declarative_Item (File : in SPARK_IO.File_Type; Item : in Symbol) is procedure Write_Type (File : in SPARK_IO.File_Type; Type_Mark : in RawDict.Type_Info_Ref) is procedure Write_Protected_Declarations (File : in SPARK_IO.File_Type; The_Protected_Type : in RawDict.Type_Info_Ref) is It : Iterator; begin It := First_Protected_Type_Visible_Subprogram (The_Protected_Type => The_Protected_Type); while not IsNullIterator (It) loop Write_Subprogram (File => File, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (CurrentSymbol (It)), The_Implicit_Proof_Function => NullSymbol); It := NextSymbol (It); end loop; It := First_Protected_Element (The_Protected_Type => The_Protected_Type); while not IsNullIterator (It) loop Write_Protected_Element (File => File, The_Protected_Type => The_Protected_Type, The_Element => CurrentSymbol (It)); It := NextSymbol (It); end loop; end Write_Protected_Declarations; begin -- Write_Type Write_Type_Info (File => File, Type_Mark => Type_Mark); case RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) is when Enumeration_Type_Item => if Is_Type (Type_Mark => Type_Mark) then Write_Enumeration_Literals (File => File, Type_Mark => Type_Mark); end if; when Record_Type_Item => Write_Record_Components (File => File, Type_Mark => Type_Mark); when Array_Type_Item => Write_Array_Indices (File => File, Type_Mark => Type_Mark); when Protected_Type_Item => if Is_Type (Type_Mark => Type_Mark) then Write_Protected_Declarations (File => File, The_Protected_Type => Type_Mark); end if; when others => null; end case; end Write_Type; begin -- Write_Declarative_Item case RawDict.GetSymbolDiscriminant (Item) is when Type_Symbol => Write_Type (File => File, Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)); when Variable_Symbol => Write_Variable (File => File, The_Variable => RawDict.Get_Variable_Info_Ref (Item => Item)); when Constant_Symbol => Write_Constant (File => File, The_Constant => RawDict.Get_Constant_Info_Ref (Item => Item)); when Subprogram_Symbol => Write_Subprogram (File => File, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Item), The_Implicit_Proof_Function => NullSymbol); when Package_Symbol => Write_Package (File => File, The_Package => RawDict.Get_Package_Info_Ref (Item => Item)); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Write_Declarative_Item"); end case; end Write_Declarative_Item; begin -- Write_Declarative_Items Declarative_Items := First_Declarative_Item (Scope => Scope); loop exit when IsNullIterator (Declarative_Items); Declarative_Item := CurrentSymbol (Declarative_Items); if RawDict.GetSymbolDiscriminant (Declarative_Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Declarative_Item)) and then Type_Is_Private (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Declarative_Item)) and then (Get_Visibility (Scope => Scope) = Local or else Get_Visibility (Scope => Scope) = Privat) then null; elsif (Get_Visibility (Scope => Scope) = Local or else Get_Visibility (Scope => Scope) = Privat) and then RawDict.GetSymbolDiscriminant (Declarative_Item) = Constant_Symbol and then Constant_Is_Deferred (The_Constant => RawDict.Get_Constant_Info_Ref (Item => Declarative_Item)) then null; else Write_Declarative_Item (File => File, Item => Declarative_Item); end if; Declarative_Items := NextSymbol (Declarative_Items); end loop; end Write_Declarative_Items; -------------------------------------------------------------------------------- procedure Write_Package (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref) is procedure Write_Owned_Packages (File : in SPARK_IO.File_Type; The_Package : in RawDict.Package_Info_Ref) is Packages : Iterator; begin Packages := First_Owned_Package (The_Package => The_Package); loop exit when IsNullIterator (Packages); Write_Package_Info (File => File, The_Package => RawDict.Get_Package_Info_Ref (CurrentSymbol (Packages))); Write_Package (File => File, The_Package => RawDict.Get_Package_Info_Ref (CurrentSymbol (Packages))); Packages := NextSymbol (Packages); end loop; end Write_Owned_Packages; begin -- Write_Package Write_With_References (File => File, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package))); Write_With_References (File => File, Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (The_Package))); Write_Inherits_References (File => File, Compilation_Unit => RawDict.Get_Package_Symbol (The_Package)); Write_Own_Variables (File => File, The_Package => The_Package); Write_Embedded_Packages (File => File, The_Package => The_Package); Write_Abstract_Own_Variables (File => File, The_Package => The_Package); Write_Declarative_Items (File => File, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package))); Write_Declarative_Items (File => File, Scope => Set_Visibility (The_Visibility => Privat, The_Unit => RawDict.Get_Package_Symbol (The_Package))); Write_Declarative_Items (File => File, Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (The_Package))); Write_Subprogram_Bodies (File => File, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (The_Package))); Write_Subprogram_Bodies (File => File, Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (The_Package))); Write_Loops (File => File, Compilation_Unit => RawDict.Get_Package_Symbol (The_Package)); Write_Owned_Packages (File => File, The_Package => The_Package); end Write_Package; -------------------------------------------------------------------------------- procedure Write_Subprogram (File : in SPARK_IO.File_Type; The_Subprogram : in RawDict.Subprogram_Info_Ref; The_Implicit_Proof_Function : in Symbol) is begin Write_Subprogram_Info (File => File, The_Subprogram => The_Subprogram); Write_With_References (File => File, Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Subprogram_Symbol (The_Subprogram))); Write_Inherits_References (File => File, Compilation_Unit => RawDict.Get_Subprogram_Symbol (The_Subprogram)); Write_Subprogram_Parameters (File => File, The_Subprogram => The_Subprogram, The_Implicit_Proof_Function => The_Implicit_Proof_Function); Write_Subprogram_Global_Variables (Abstraction => IsAbstract, File => File, The_Subprogram => The_Subprogram); if Is_Procedure (The_Subprogram => The_Subprogram) then Write_Subprogram_Dependency_Clauses (Abstraction => IsAbstract, File => File, The_Subprogram => The_Subprogram); end if; if RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => The_Subprogram) /= RawDict.Null_Generic_Unit_Info_Ref then Write_Generic_Formal_Parameters (File => File, The_Generic => The_Subprogram); end if; Write_Declarative_Items (File => File, Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Subprogram_Symbol (The_Subprogram))); Write_Loops (File => File, Compilation_Unit => RawDict.Get_Subprogram_Symbol (The_Subprogram)); end Write_Subprogram; begin -- Write_Library_Unit case RawDict.GetSymbolDiscriminant (Unit) is when Package_Symbol => Write_Library_Package (File => File, The_Package => RawDict.Get_Package_Info_Ref (Item => Unit)); when Subprogram_Symbol => Write_Subprogram (File => File, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Unit), The_Implicit_Proof_Function => NullSymbol); when ImplicitProofFunctionSymbol => Write_Subprogram (File => File, The_Subprogram => RawDict.GetImplicitProofFunctionAdaFunction (Unit), The_Implicit_Proof_Function => Unit); when others => null; end case; end Write_Library_Unit; begin -- Write_Library_Units Library_Units := First_Library_Unit; loop exit when IsNullIterator (Library_Units); Write_Library_Unit (File => File, Unit => CurrentSymbol (Library_Units)); Library_Units := NextSymbol (Library_Units); end loop; -- If we are in Ravencar mode, then a pseudo procedure called main_program will have been -- created as a place to store partition-wide flow anno data. We print these details out now. if Get_The_Partition /= RawDict.Null_Subprogram_Info_Ref then Write_Library_Unit (File => File, Unit => RawDict.Get_Subprogram_Symbol (Get_The_Partition)); end if; end Write_Library_Units; -------------------------------------------------------------------------------- procedure Append_Viewer_Specific_Info (File : in SPARK_IO.File_Type) --# global in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from * & --# SPARK_IO.File_Sys from *, --# Dict, --# File; is MaxLineLength : constant Positive := 512; subtype LineIndex is Positive range 1 .. MaxLineLength; subtype Lines is String (LineIndex); Line : Lines; Length : Natural; File_Status : SPARK_IO.File_Status; TemporaryFile : SPARK_IO.File_Type; begin TemporaryFile := Dict.TemporaryFile; SPARK_IO.Reset (TemporaryFile, SPARK_IO.In_File, File_Status); if File_Status = SPARK_IO.Ok then loop exit when SPARK_IO.End_Of_File (TemporaryFile); SPARK_IO.Get_Line (TemporaryFile, Line, Length); SPARK_IO.Put_Line (File, Line, Length); end loop; end if; Dict.TemporaryFile := TemporaryFile; end Append_Viewer_Specific_Info; begin -- Write Local_File_Name := FileSystem.Case_Of_Files_For_Create (E_Str => File_Name); E_Strings.Create (File => File, Name_Of_File => Local_File_Name, Form_Of_File => "", Status => Status); if Status = SPARK_IO.Ok then -- File := SPARK_IO.Standard_Output; -- useful debug statement Write_Library_Units (File => File); Append_Viewer_Specific_Info (File => File); --# accept Flow, 10, File, "Expected ineffective assignment"; SPARK_IO.Close (File, Status); --# end accept; end if; end Write; spark-2012.0.deb/examiner/sem-compunit-walkstatements-checkformutuallyexclusivebranches.adb0000644000175000017500000004260311753202336031427 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------- -- CheckForMutuallyExclusiveBranches -- -- Implementation Notes: -- The details of the algorithm used is described in S.P0468.53.49. -- The set of ancestor conditional branches and the set of -- the closest sequences of statements eminating from a conditional -- branch node are constructed using SeqAlgebra.Seq objects. -------------------------------------------------------------------- separate (Sem.CompUnit.WalkStatements) procedure CheckForMutuallyExclusiveBranches (Given_Node, Preceding_Node : in STree.SyntaxNode; The_Heap : in out Heap.HeapRecord; Are_Mutually_Exclusive : out Boolean) is Ancestor_Cond_Branches : SeqAlgebra.Seq; Set_Of_Seq_Of_Statements : SeqAlgebra.Seq; Branch_Node : STree.SyntaxNode; Cond_Ancestor : STree.SyntaxNode; Common_Ancestor : STree.SyntaxNode; Given_Node_Seq_Stat : STree.SyntaxNode; Preceding_Node_Seq_Stat : STree.SyntaxNode; Iter : STree.Iterator; function Locate_Child_Of_Type (Node : STree.SyntaxNode; Child_Type : SP_Symbols.SP_Symbol) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.if_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.case_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.else_part or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.others_part; --# return Return_Node => (Syntax_Node_Type (Return_Node, STree.Table) = Child_Type or --# Return_Node = STree.NullNode); is Child : STree.SyntaxNode; begin Child := Child_Node (Current_Node => Node); while Child /= STree.NullNode and then Syntax_Node_Type (Node => Child) /= Child_Type loop Child := Next_Sibling (Current_Node => Child); end loop; return Child; end Locate_Child_Of_Type; procedure Find_Recursive_Branches (Node : in STree.SyntaxNode; Find_Type : in SP_Symbols.SP_Symbol; Branch_Set : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Branch_Set, --# Find_Type, --# Node, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.elsif_part or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.alternatives; is Child : STree.SyntaxNode; Next_Instance : STree.SyntaxNode; Recurse_Over : SP_Symbols.SP_Symbol; Iter : STree.Iterator; begin -- Determine the type of node we are recursing over. Recurse_Over := Syntax_Node_Type (Node => Node); -- Find all children from the given node and search for a node -- of the Find_Type only on children which are not recursive. -- For if and case statements a parent has at most 1 recursive -- child node. Only traverse a recursive child node after all -- its siblings have been traversed. The process is then -- repeated until a node without a recursive node is encountered. -- when all the children of the node have been processed the -- loop terminates. -- The traversal of the syntax tree is not pre-order but -- the order in which the nodes are placed into the Branch_Set -- is unimportant. Next_Instance := Node; while Next_Instance /= STree.NullNode loop Child := Child_Node (Current_Node => Next_Instance); Next_Instance := STree.NullNode; while Child /= STree.NullNode loop if Syntax_Node_Type (Node => Child) = Recurse_Over then -- There is at most one instance of a recursive child node. Next_Instance := Child; else Iter := Find_First_Node (Node_Kind => Find_Type, From_Root => Child, In_Direction => STree.Down); if not STree.IsNull (Iter) then -- Only add the set of branches if a -- node of the Find_Type is present. SeqAlgebra.AddMember (The_Heap, Branch_Set, Natural (STree.NodeToRef (Get_Node (It => Iter)))); end if; end if; Child := Next_Sibling (Current_Node => Child); end loop; end loop; end Find_Recursive_Branches; procedure Find_If_Branches (If_Node : in STree.SyntaxNode; Branch_Set : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Branch_Set, --# If_Node, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (If_Node, STree.Table) = SP_Symbols.if_statement; is Current_Child : STree.SyntaxNode; begin -- Process "then" part. Current_Child := Locate_Child_Of_Type (Node => If_Node, Child_Type => SP_Symbols.sequence_of_statements); -- ASSUME Current_Child = sequence_of_statements OR NULL if Syntax_Node_Type (Node => Current_Child) = SP_Symbols.sequence_of_statements then -- ASSUME Current_Child = sequence_of_statements -- there should always be a then part otherwise the -- syntax tree is invalid, but no error is raised here -- as it will be reported elsewhere by the Examiner. SeqAlgebra.AddMember (The_Heap, Branch_Set, Natural (STree.NodeToRef (Current_Child))); end if; -- Process the "else" part if one exists Current_Child := Locate_Child_Of_Type (Node => If_Node, Child_Type => SP_Symbols.else_part); -- ASSUME Current_Child = else_part OR NULL if Syntax_Node_Type (Node => Current_Child) = SP_Symbols.else_part then -- ASSUME Current_Child = else_part Current_Child := Locate_Child_Of_Type (Node => Current_Child, Child_Type => SP_Symbols.sequence_of_statements); -- ASSUME Current_Child = sequence_of_statements OR NULL if Syntax_Node_Type (Node => Current_Child) = SP_Symbols.sequence_of_statements then -- ASSUME Current_Child = sequence_of_statements -- Only add the branch if the else sequence of statements exist. SeqAlgebra.AddMember (The_Heap, Branch_Set, Natural (STree.NodeToRef (Current_Child))); end if; end if; -- Process the elsif part if one exists. Current_Child := Locate_Child_Of_Type (Node => If_Node, Child_Type => SP_Symbols.elsif_part); -- ASSUME Current_Child = elsif_part OR NULL if Syntax_Node_Type (Node => Current_Child) = SP_Symbols.elsif_part then -- ASSUME Current_Child = elsif_part Find_Recursive_Branches (Node => Current_Child, Find_Type => SP_Symbols.sequence_of_statements, Branch_Set => Branch_Set, The_Heap => The_Heap); end if; end Find_If_Branches; procedure Find_Case_Branches (Case_Node : in STree.SyntaxNode; Branch_Set : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Branch_Set, --# Case_Node, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Case_Node, STree.Table) = SP_Symbols.case_statement; is Current_Child : STree.SyntaxNode; begin -- Process "others" part if it exists. Current_Child := Locate_Child_Of_Type (Node => Case_Node, Child_Type => SP_Symbols.others_part); -- ASSUME Current_Child = others_part OR NULL if Syntax_Node_Type (Node => Current_Child) = SP_Symbols.others_part then -- ASSUME Current_Child = others_part Current_Child := Locate_Child_Of_Type (Node => Current_Child, Child_Type => SP_Symbols.sequence_of_statements); -- ASSUME Current_Child = sequence_of_statements OR NULL if Syntax_Node_Type (Node => Current_Child) = SP_Symbols.sequence_of_statements then -- ASSUME Current_Child = sequence_of_statements SeqAlgebra.AddMember (The_Heap, Branch_Set, Natural (STree.NodeToRef (Current_Child))); end if; end if; -- Process the alternatives part if one exists. Current_Child := Locate_Child_Of_Type (Node => Case_Node, Child_Type => SP_Symbols.alternatives); -- ASSUME Current_Child = alternatives OR NULL if Syntax_Node_Type (Node => Current_Child) = SP_Symbols.alternatives then -- ASSUME Current_Child = alternatives Find_Recursive_Branches (Node => Current_Child, Find_Type => SP_Symbols.sequence_of_statements, Branch_Set => Branch_Set, The_Heap => The_Heap); end if; end Find_Case_Branches; function Find_Conntaining_Sequence_Of_Statements (Node : STree.SyntaxNode; Set_Of_Seq_Of_Statements : SeqAlgebra.Seq; The_Heap : Heap.HeapRecord) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement; --# return Return_Node => (Syntax_Node_Type (Return_Node, STree.Table) = SP_Symbols.sequence_of_statements or --# Return_Node = STree.NullNode); is Iter : STree.Iterator; Seq_Statements_Node : STree.SyntaxNode; begin Iter := Find_First_Node (Node_Kind => SP_Symbols.sequence_of_statements, From_Root => Node, In_Direction => STree.Up); if STree.IsNull (Iter) then Seq_Statements_Node := STree.NullNode; else Seq_Statements_Node := Get_Node (It => Iter); end if; while not (STree.IsNull (Iter) or else SeqAlgebra.IsMember (The_Heap, Set_Of_Seq_Of_Statements, Natural (STree.NodeToRef (Seq_Statements_Node)))) loop --# assert Syntax_Node_Type (Seq_Statements_Node, STree.Table) = SP_Symbols.sequence_of_statements and --# Seq_Statements_Node = Get_Node (Iter); Iter := STree.NextNode (Iter); if STree.IsNull (Iter) then Seq_Statements_Node := STree.NullNode; else Seq_Statements_Node := Get_Node (It => Iter); end if; end loop; return Seq_Statements_Node; end Find_Conntaining_Sequence_Of_Statements; begin SeqAlgebra.CreateSeq (The_Heap, Ancestor_Cond_Branches); SeqAlgebra.CreateSeq (The_Heap, Set_Of_Seq_Of_Statements); Iter := Find_First_Branch_Node (From_Root => Preceding_Node, In_Direction => STree.Up); -- Determine the set of Ancestor If and Case branch nodes -- of the Preceding Node. while not STree.IsNull (Iter) loop Branch_Node := Get_Node (It => Iter); case Syntax_Node_Type (Node => Branch_Node) is -- Only if and case statement branches create -- mutually exclusive sequences of statements when SP_Symbols.if_statement | SP_Symbols.case_statement => -- ASSUME Branch_Node = if_statement OR case_statement SeqAlgebra.AddMember (The_Heap, Ancestor_Cond_Branches, Natural (STree.NodeToRef (Branch_Node))); when others => null; end case; Iter := STree.NextNode (Iter); end loop; if SeqAlgebra.IsEmptySeq (The_Heap, Ancestor_Cond_Branches) then -- The Preceding_Node has no if or case branches and therefore -- cannot be on a mutually exclusive branch to the Given_Node Are_Mutually_Exclusive := False; else -- Find the closest if or case branch common to the -- Preceding_Node and the Given_Node. -- As we traverse up the tree from the Given_Node this will -- ensure that the closest common conditional branch node -- is located. Iter := Find_First_Branch_Node (From_Root => Given_Node, In_Direction => STree.Up); Common_Ancestor := STree.NullNode; while not STree.IsNull (Iter) and then Common_Ancestor = STree.NullNode loop -- The Ancestor_Cond_Branches set only contains conditional -- branch nodes. No need to check again here for type of branch. Cond_Ancestor := Get_Node (It => Iter); if SeqAlgebra.IsMember (The_Heap, Ancestor_Cond_Branches, Natural (STree.NodeToRef (Cond_Ancestor))) then Common_Ancestor := Cond_Ancestor; else Iter := STree.NextNode (Iter); end if; end loop; -- ASSUME Common_Ancestor = if_statement OR case_statement OR NULL if Common_Ancestor = STree.NullNode then -- ASSUME Common_Ancestor = NULL -- The Given_Node and the Preceding_Node have no conditional -- branches in common and therefore are not mutually exclusive. Are_Mutually_Exclusive := False; else -- ASSUME Common_Ancestor = if_statement OR case_statement -- Determine the set of mutually exclusive branches from the -- closest common if or case statement ancestor. -- Both the Given_Node and the Preceding_Node will be contained -- within a sequence of statements. Only the branches which -- contain sequences of statements are considered and the -- nodes representing the sequence of statements form the set. if Syntax_Node_Type (Node => Common_Ancestor) = SP_Symbols.if_statement then -- ASSUME Common_Ancestor = if_statement Find_If_Branches (If_Node => Common_Ancestor, Branch_Set => Set_Of_Seq_Of_Statements, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Common_Ancestor) = SP_Symbols.case_statement then -- ASSUME Common_Ancestor = case_statement Find_Case_Branches (Case_Node => Common_Ancestor, Branch_Set => Set_Of_Seq_Of_Statements, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Common_Ancestor = if_statement OR case_statement OR " & "NULL in CheckForMutuallyExclusiveBranches"); end if; -- Find the sequence of statements which contains the Given_Node. -- Such a node must exist. Given_Node_Seq_Stat := Find_Conntaining_Sequence_Of_Statements (Node => Given_Node, Set_Of_Seq_Of_Statements => Set_Of_Seq_Of_Statements, The_Heap => The_Heap); -- Find the sequence of statements which contains the Preceding_Node. -- Such a node must exist. Preceding_Node_Seq_Stat := Find_Conntaining_Sequence_Of_Statements (Node => Preceding_Node, Set_Of_Seq_Of_Statements => Set_Of_Seq_Of_Statements, The_Heap => The_Heap); Are_Mutually_Exclusive := Given_Node_Seq_Stat /= Preceding_Node_Seq_Stat; end if; end if; SeqAlgebra.DisposeOfSeq (The_Heap, Ancestor_Cond_Branches); SeqAlgebra.DisposeOfSeq (The_Heap, Set_Of_Seq_Of_Statements); end CheckForMutuallyExclusiveBranches; spark-2012.0.deb/examiner/sem-walk_expression_p-check_binary_operator-homo_impl_type_conv.adb0000644000175000017500000002156111753202336031645 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Check_Binary_Operator) procedure Homo_Impl_Type_Conv (Operator : in SP_Symbols.SP_Symbol; Left_Type, Right_Type : in out Dictionary.Symbol; Left_Val : in Maths.Value; Right_Val : in Maths.Value; Left_Has_Operators : in Boolean; Right_Has_Operators : in Boolean; Left_Pos : in LexTokenManager.Token_Position; Right_Pos : in LexTokenManager.Token_Position; Is_Annotation : in Boolean; T_Stack : in Type_Context_Stack.T_Stack_Type; Scope : in Dictionary.Scopes) is Type_From_Context : Dictionary.Symbol; Base_Type : Dictionary.Symbol; New_Val : Maths.Value; procedure Debug_1 --# derives ; is --# hide Debug_1; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Homo_Impl_Type_Conv encounters a universal expression. Context is Unknown so no change.", 0); end if; end Debug_1; procedure Debug_2 --# derives ; is --# hide Debug_2; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Homo_Impl_Type_Conv encounters a universal expression. Resolving by context to type ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Type_Context_Stack.Top (Stack => T_Stack)))); end if; end Debug_2; function Is_Relational_Operator (Operator : SP_Symbols.SP_Symbol) return Boolean is begin return Operator = SP_Symbols.equals or else Operator = SP_Symbols.not_equal or else Operator = SP_Symbols.less_than or else Operator = SP_Symbols.less_or_equal or else Operator = SP_Symbols.greater_than or else Operator = SP_Symbols.greater_or_equal; end Is_Relational_Operator; begin if Dictionary.Types_Are_Equal (Left_Symbol => Left_Type, Right_Symbol => Right_Type, Full_Range_Subtype => False) then -- Types are the same. If both are universal integer, then the -- expression may be of a signed integer or modular type, and we -- need to use the context to resolve this. if Dictionary.IsUniversalIntegerType (Left_Type) and then Dictionary.IsUniversalIntegerType (Right_Type) then if Dictionary.IsUnknownTypeMark (Type_Context_Stack.Top (Stack => T_Stack)) or else Dictionary.IsPredefinedBooleanType (Type_Context_Stack.Top (Stack => T_Stack)) then -- If the context is unknown or Boolean (as we might have for a subexpression -- below a relational operator for instance), then we can do nothing. -- We leave both operands as UniversalInteger to preserve existing -- Examiner behaviour in that case. null; Debug_1; else -- If we do know a definite type from the context, then we convert -- the Universal operands to that type here. -- -- The visibility of the operator (which will be determined later) is -- dependent on the _base_ type of the type, so... Type_From_Context := Dictionary.GetRootType (Type_Context_Stack.Top (Stack => T_Stack)); Left_Type := Type_From_Context; Right_Type := Type_From_Context; Debug_2; end if; end if; else -- Types are different. if Dictionary.IsUniversalIntegerType (Left_Type) then if Dictionary.IsIntegerTypeMark (Right_Type, Scope) then -- Right is a signed integer type - implicit conversion OK. Left_Type := Right_Type; -- For a signed integer type T, a literal must lie -- in the range of T'Base. If this is known (via -- a type assertion and the config file), then a static -- constraint check can be done here. Base_Type := Dictionary.GetBaseType (Left_Type); if not Dictionary.Is_Null_Symbol (Base_Type) then --# accept F, 10, New_Val, "Final value of New_Val not used"; Sem.Constraint_Check (Val => Left_Val, New_Val => New_Val, Is_Annotation => Is_Annotation, Typ => Base_Type, Position => Left_Pos); --# end accept; end if; elsif Dictionary.IsModularTypeMark (Right_Type, Scope) then -- Right is a Modular type - implicit conversion OK unless -- we're below a relational operator AND the Left subexpression -- contains operators itself. if not (Is_Relational_Operator (Operator => Operator) and then Left_Has_Operators) then Left_Type := Right_Type; -- For a modular type T, a literal must lie in the range -- of T'First .. T'Last, so --# accept F, 10, New_Val, "Final value of New_Val not used"; Sem.Constraint_Check (Val => Left_Val, New_Val => New_Val, Is_Annotation => Is_Annotation, Typ => Left_Type, Position => Left_Pos); --# end accept; end if; end if; elsif Dictionary.IsUniversalIntegerType (Right_Type) then if Dictionary.IsIntegerTypeMark (Left_Type, Scope) then -- Left is a signed integer type - implicit conversion OK. Right_Type := Left_Type; -- For a signed integer type T, a literal must lie -- in the range of T'Base. If this is known (via -- a type assertion and the config file), then a static -- constraint check can be done here. Base_Type := Dictionary.GetBaseType (Right_Type); if not Dictionary.Is_Null_Symbol (Base_Type) then --# accept F, 10, New_Val, "Final value of New_Val not used"; Sem.Constraint_Check (Val => Right_Val, New_Val => New_Val, Is_Annotation => Is_Annotation, Typ => Base_Type, Position => Right_Pos); --# end accept; end if; elsif Dictionary.IsModularTypeMark (Left_Type, Scope) then -- Left is a Modular type - implicit conversion OK unless -- we're below a relational operator AND the Right subexpression -- contains operators itself. if not (Is_Relational_Operator (Operator => Operator) and then Right_Has_Operators) then Right_Type := Left_Type; -- For a modular type T, a literal must lie in the range -- of T'First .. T'Last, so --# accept F, 10, New_Val, "Final value of New_Val not used"; Sem.Constraint_Check (Val => Right_Val, New_Val => New_Val, Is_Annotation => Is_Annotation, Typ => Right_Type, Position => Right_Pos); --# end accept; end if; end if; elsif Dictionary.IsUniversalRealType (Left_Type) then if Dictionary.IsRealTypeMark (Right_Type, Scope) then Left_Type := Right_Type; end if; elsif Dictionary.IsUniversalRealType (Right_Type) then if Dictionary.IsRealTypeMark (Left_Type, Scope) then Right_Type := Left_Type; end if; end if; end if; --# accept F, 33, New_Val, "Final value of New_Val not used"; end Homo_Impl_Type_Conv; spark-2012.0.deb/examiner/sp_parser_goto-scan_goto_table.adb0000644000175000017500000000420011753202336023064 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SP_Parser_Goto) procedure Scan_Goto_Table (State_No : in SP_Productions.Valid_States; Index : in out Goto_Index; Goal_State : out SP_Productions.SP_State; Reduction_Goal : out SP_Symbols.SP_Non_Terminal) is Feasible_State : SP_Productions.SP_State; Table_Index : Goto_Index; begin Feasible_State := SP_Productions.No_State; Table_Index := Index; while Table_Index < Goto_Index'Last and then Feasible_State /= State_No loop Feasible_State := SP_Productions.SP_State (Goto_Table (Table_Index) mod State_Size); if Feasible_State /= State_No then Table_Index := Table_Index + 1; end if; end loop; if Feasible_State = State_No then Goal_State := SP_Productions.SP_State ((Goto_Table (Table_Index) / State_Size) mod State_Size); Reduction_Goal := SP_Symbols.SP_Non_Terminal'Val (((Goto_Table (Table_Index) / Red_Goal) mod Red_Goal_Size) + First_Non_Terminal); Index := Table_Index + 1; else Goal_State := SP_Productions.No_State; Reduction_Goal := SP_Symbols.SP_Non_Terminal'First; Index := Table_Index; end if; end Scan_Goto_Table; spark-2012.0.deb/examiner/errorhandler-conversions.adb0000644000175000017500000001547211753202336021767 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) package body Conversions --# own State is Explanation_Table, --# Source_Used; is ------------------------------------------------------------------- type Sources is ( Nul, LRM, SR95, SR83, UM, Proof_UM, JB); type Sources_Used is array (Sources) of Boolean; No_Source_Used : constant Sources_Used := Sources_Used'(Sources => False); Source_Used : Sources_Used := No_Source_Used; -- construct table of flags used to note when an error explanation of a particular -- kind, number and destination has been given. type Purpose_Array is array (Error_Types.ConversionRequestSource) of Boolean; Empty_Purpose_Array : constant Purpose_Array := Purpose_Array'(False, False, False, False); type Error_Number_Array is array (Error_Types.ErrNumRange) of Purpose_Array; Empty_Error_Number_Array : constant Error_Number_Array := Error_Number_Array'(Error_Types.ErrNumRange => Empty_Purpose_Array); type Explanation_Classes is ( Flow_Errors, Dependency_Errs, Semantic_Errs, Dep_Semantic_Errs, Warnings, Notes, Control_Flows, Ineffective_Statements); type Explanation_Tables is array (Explanation_Classes) of Error_Number_Array; Empty_Explanation_Table : constant Explanation_Tables := Explanation_Tables'(Explanation_Classes => Empty_Error_Number_Array); -- giving us the actual table (and refinement constituent) Explanation_Table : Explanation_Tables := Empty_Explanation_Table; -------------------------------------------------------------------------- procedure ToString (Err_Num : in Error_Types.NumericError; Purpose : in Error_Types.ConversionRequestSource; Err_Str : out Error_Types.StringError) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Explanation_Table; --# in out Source_Used; --# derives Err_Str from CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# Explanation_Table, --# LexTokenManager.State, --# Purpose & --# Explanation_Table from *, --# CommandLineData.Content, --# Err_Num, --# Purpose & --# Source_Used from *, --# CommandLineData.Content, --# Err_Num; is separate; -------------------------------------------------------------------------- procedure Output_Reference_List (To_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in Source_Used; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Source_Used, --# To_File; is procedure Output_A_Reference (Source : in Sources; To_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Source, --# To_File; is begin case Source is when LRM => case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => SPARK_IO.Put_Line (To_File, "Ada LRM: Ada Reference Manual (ANSI/MIL-STD-1815A-1983)", 0); when CommandLineData.SPARK95 => SPARK_IO.Put_Line (To_File, "Ada LRM: Ada Reference Manual (ISO/IEC 8652:1995)", 0); when CommandLineData.SPARK2005_Profiles => SPARK_IO.Put_Line (To_File, "Ada LRM: Ada 2005 Reference Manual (ISO/IEC 8652:1995/AMD.1:2007)", 0); end case; when SR83 => SPARK_IO.Put_Line (To_File, "SR: SPARK - The SPADE Ada83 Kernel", 0); when SR95 => SPARK_IO.Put_Line (To_File, "SR: SPARK95 - The SPADE Ada Kernel (including RavenSPARK)", 0); when UM => SPARK_IO.Put_Line (To_File, "User Manual: Examiner User Manual", 0); when Proof_UM => SPARK_IO.Put_Line (To_File, "SPARK Proof Manual", 0); when JB => SPARK_IO.Put_Line (To_File, "Barnes: High Integrity Software - The SPARK Approach", 0); when others => SPARK_IO.Put_Line (To_File, "Unexpected reference table entry", 0); end case; end Output_A_Reference; --------------------- begin --Output_Reference_List for I in Sources range LRM .. Sources'Last loop if Source_Used (I) then SPARK_IO.New_Line (To_File, 2); SPARK_IO.Put_Line (To_File, "References used:", 0); for J in Sources range LRM .. Sources'Last loop if Source_Used (J) then Output_A_Reference (Source => J, To_File => To_File); end if; end loop; exit; end if; end loop; end Output_Reference_List; -- state initialized at declaration end Conversions; ././@LongLink0000000000000000000000000000016500000000000011567 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_integer.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000003435211753202336033127 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Integer (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) is Exp_Node : STree.SyntaxNode; Left_Exp_Type, Right_Exp_Type : Exp_Record; Unwanted_Seq : SeqAlgebra.Seq; Lower, Upper : LexTokenManager.Lex_String; -- StoreVals of type's bounds Unused_Component_Data : ComponentManager.ComponentData; Type_Symbol : Dictionary.Symbol; -- Checks that Lower .. Upper are legal wrt System.Min_Int and System.Max_Int procedure Check_Against_Root_Integer (Dec_Loc : in LexTokenManager.Token_Position; Lower, Upper : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lower, --# SPARK_IO.File_Sys, --# Upper; is System_Sym : Dictionary.Symbol; Min_Int_Sym : Dictionary.Symbol; Min_Int_Val : LexTokenManager.Lex_String; Max_Int_Sym : Dictionary.Symbol; Max_Int_Val : LexTokenManager.Lex_String; Result : Maths.Value; Unused : Maths.ErrorCode; Range_OK : Boolean; begin -- We only check in 95 onwards, since System may not be -- specified in the target configuration file in SPARK83 mode. case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => null; when CommandLineData.SPARK95_Onwards => System_Sym := Dictionary.LookupItem (Name => LexTokenManager.System_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- The user may or may not have bothered to supply -- package System, so... if not Dictionary.Is_Null_Symbol (System_Sym) then -- Find System.Min_Int and check Lower against it. Min_Int_Sym := Dictionary.LookupSelectedItem (Prefix => System_Sym, Selector => LexTokenManager.Min_Int_Token, Scope => Dictionary.GetScope (System_Sym), Context => Dictionary.ProgramContext); -- Even if the user has supplied a package System, they might -- not have declared Min_Int, so again we have to guard... if not Dictionary.Is_Null_Symbol (Min_Int_Sym) then Min_Int_Val := Dictionary.Get_Value (The_Constant => Min_Int_Sym); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lower, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Min_Int_Val, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.GreaterOrEqual (Maths.ValueRep (Lower), Maths.ValueRep (Min_Int_Val), Result, Unused); --# end accept; --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.ValueToBool (Result, Range_OK, Unused); --# end accept; if not Range_OK then ErrorHandler.Semantic_Error (Err_Num => 781, Reference => ErrorHandler.No_Reference, Position => Dec_Loc, Id_Str => LexTokenManager.Null_String); end if; end if; end if; --# assert True; -- for RTC generation -- Find System.Max_Int and check Upper against it. Max_Int_Sym := Dictionary.LookupSelectedItem (Prefix => System_Sym, Selector => LexTokenManager.Max_Int_Token, Scope => Dictionary.GetScope (System_Sym), Context => Dictionary.ProgramContext); -- Even if the user has supplied a package System, they might -- not have declared Max_Int, so again we have to guard... if not Dictionary.Is_Null_Symbol (Max_Int_Sym) then Max_Int_Val := Dictionary.Get_Value (The_Constant => Max_Int_Sym); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lower, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Max_Int_Val, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.LesserOrEqual (Maths.ValueRep (Upper), Maths.ValueRep (Max_Int_Val), Result, Unused); --# end accept; --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.ValueToBool (Result, Range_OK, Unused); --# end accept; if not Range_OK then ErrorHandler.Semantic_Error (Err_Num => 782, Reference => ErrorHandler.No_Reference, Position => Dec_Loc, Id_Str => LexTokenManager.Null_String); end if; end if; end if; end if; end case; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Check_Against_Root_Integer; begin -- Wf_Integer Exp_Node := Child_Node (Current_Node => Child_Node (Current_Node => Child_Node (Current_Node => Node))); -- ASSUME Exp_Node = attribute OR simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.attribute or else Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = attribute OR simple_expression in Wf_Integer"); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => True, Ref_Var => Unwanted_Seq, Result => Left_Exp_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Left_Exp_Type.Value, Lower); if Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.attribute then -- ASSUME Exp_Node = attribute if Left_Exp_Type.Is_ARange then Maths.StorageRep (Left_Exp_Type.Range_RHS, Upper); ErrorHandler.Semantic_Error (Err_Num => 45, Reference => 1, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); else Lower := LexTokenManager.Null_String; -- no value in error case Upper := LexTokenManager.Null_String; -- no value in error case ErrorHandler.Semantic_Error (Err_Num => 98, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; elsif Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.simple_expression then -- ASSUME Exp_Node = simple_expression if not (Dictionary.TypeIsInteger (Left_Exp_Type.Type_Symbol) or else Dictionary.TypeIsModular (Left_Exp_Type.Type_Symbol) or else Dictionary.IsUnknownTypeMark (Left_Exp_Type.Type_Symbol)) then Lower := LexTokenManager.Null_String; -- no value in error case ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); elsif Left_Exp_Type.Is_ARange then Lower := LexTokenManager.Null_String; -- no value in error case ErrorHandler.Semantic_Error (Err_Num => 114, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; Exp_Node := Next_Sibling (Current_Node => Exp_Node); -- ASSUME Exp_Node = simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = simple_expression in Wf_Integer"); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => True, Ref_Var => Unwanted_Seq, Result => Right_Exp_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Right_Exp_Type.Value, Upper); if not (Dictionary.TypeIsInteger (Right_Exp_Type.Type_Symbol) or else Dictionary.TypeIsModular (Right_Exp_Type.Type_Symbol) or else Dictionary.IsUnknownTypeMark (Right_Exp_Type.Type_Symbol)) then Upper := LexTokenManager.Null_String; -- no value in error case ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); elsif Right_Exp_Type.Is_ARange then Lower := LexTokenManager.Null_String; -- no value in error case ErrorHandler.Semantic_Error (Err_Num => 114, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; if not (Left_Exp_Type.Is_Static and then Right_Exp_Type.Is_Static) then ErrorHandler.Semantic_Error (Err_Num => 45, Reference => 1, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; else Lower := LexTokenManager.Null_String; -- no value in error case Upper := LexTokenManager.Null_String; -- no value in error case end if; Empty_Type_Check (Dec_Loc => Dec_Loc, Lower => Lower, Upper => Upper); Check_Against_Root_Integer (Dec_Loc => Dec_Loc, Lower => Lower, Upper => Upper); Dictionary.Add_Integer_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Lower => Lower, Upper => Upper, Scope => Scope, Context => Dictionary.ProgramContext, The_Type => Type_Symbol); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Type_Symbol, Is_Declaration => True); end if; Heap.ReportUsage (The_Heap); end Wf_Integer; spark-2012.0.deb/examiner/systemerrors.ads0000644000175000017500000001427011753202336017527 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --# inherit ScreenEcho, --# SPARK_IO, --# Version; package SystemErrors is type Sys_Err_Type is (String_Table_Overflow, Syntax_Tree_Overflow, Parse_Stack_Overflow, Symbol_Table_Overflow_Dynamic, Invalid_Syntax_Tree, Invalid_Symbol_Table, Empty_Heap, Relation_Stack_Underflow, Relation_Stack_Overflow, Invalid_Init, Error_Position_Wrong, Expression_Stack_Corrupt, Expression_Stack_Underflow, Expression_Stack_Overflow, Type_Context_Stack_Corrupt, Type_Context_Stack_Underflow, Type_Context_Stack_Overflow, List_Overflow_In_Expression, List_Overflow_In_Dependency_Clause, List_Overflow_In_Procedure_Call, Case_Stack_Underflow, Case_Stack_Overflow, VCG_Graph_Size_Exceeded, VCG_Heap_Is_Exhausted, VCG_Heap_Is_Corrupted, Ref_List_Key_Cell_Missing, Flow_Analyser_Expression_Limit, Case_Statement_Nesting_Limit, Error_Handler_Temporary_Files, Error_Handler_Source, Disk_Full_Error, Math_Error, Too_Many_Nested_Arrays, Too_Many_Nested_Records, Context_Unit_Stack_Overflow, Context_Unit_Stack_Underflow, Context_File_Heap_Overflow, Context_Unit_Heap_Overflow, Too_Many_File_Lines, Index_Stack_Full, Index_Component_List_Full, Too_Many_Errors, Warning_Name_Too_Long, Unit_Name_In_Index_Too_Long, File_Name_In_Index_Too_Long, Too_Many_Suppressed_Warnings, Unit_Nesting_Too_Deep, Statement_Stack_Underflow, Statement_Stack_Overflow, Wf_Compilation_Unit_Stack_Overflow, Wf_Compilation_Unit_Stack_Underflow, Too_Many_Flow_Analyser_Expressions, Too_Many_Params_In_Procedure_Call, Statistics_Usage_Greater_Than_Table_Size, Aggregate_Stack_Under_Flow, Aggregate_Stack_Over_Flow, Meta_File_Stack_Overflow, Lex_Stack_Overflow, Lex_Stack_Underflow, Component_Manager_Overflow, Component_Error_Overflow, Syntax_Tree_Walk_Error, Precondition_Failure, Postcondition_Failure, Assertion_Failure, Unimplemented_Feature, XML_Schema_Error, XML_Generation_Error, Illegal_XML_Generation_Attempt, String_Over_Flow, Queue_Overflow, XRef_Table_Full, Invalid_Index, -- Add additional specific entries here... Other_Internal_Error); -- Proof function can be asserted true and, on paths where SystemErrors is called -- this will appear in hypotheses; this helps understand what is going on. --# function Halted return Boolean; -- Raises Sys_Err with Msg procedure Fatal_Error (Sys_Err : in Sys_Err_Type; Msg : in String); --# derives null from Msg, --# Sys_Err; --# post Halted and -- this helps us understand VCs where SystemError called --# False; -- this ensures that such paths are provable by contradiction -- if C if False, then Raises Sys_Err with Msg -- if C is True, then returns procedure RT_Assert (C : in Boolean; Sys_Err : in Sys_Err_Type; Msg : in String); --# derives null from C, --# Msg, --# Sys_Err; --# post C or (not C and Halted and False); -- This routine is intended to signal "interesting" but non-critical -- assertions in the Examiner - things that should be True, but if -- False we should know about but don't prevent the Examiner from -- carrying on or the validity of any subsequent analyses. -- -- If C is False, then print Msg to Standard_Output and carry on -- If C is True, then no action and returns procedure RT_Warning (C : in Boolean; Msg : in String); --# derives null from C, --# Msg; end SystemErrors; spark-2012.0.deb/examiner/indexmanager-index_table_p.adb0000644000175000017500000011522511753202336022164 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with ErrorHandler; with ExaminerConstants; with Fatal; with FileSystem; with SystemErrors; with XMLReport; package body IndexManager.Index_Table_P is subtype Index_Sizes is Integer range 0 .. ExaminerConstants.MaxIndexNumber; subtype Index_Positions is Integer range 1 .. ExaminerConstants.MaxIndexNumber; -- Filename : contains the filename of the index, super index or -- the auxiliary index. -- -- Position : position of the index filename in the source index -- file. -- -- Done : if the index file has been parsed. -- -- File_Type : kind of index (index, super index, auxiliary index). -- -- Unit : the prefix of the unit name (only relevant for auxiliary -- index). -- -- Parent_Index : a refence to the source index file in the list. type Index_Info is record Filename : LexTokenManager.Lex_String; Position : IndexManager.File_Position; Done : Boolean; File_Type : IndexManager.Entry_Types; -- case File_Type is -- when IndexManager.AuxIndex => Unit : LexTokenLists.Lists; -- when others => -- null -- end case; Parent_Index : Index_Sizes; end record; type Index_Contents is array (Index_Positions) of Index_Info; type Index_Tables is record Size : Index_Sizes; Content : Index_Contents; end record; -- This table represents a list which has a partial ordering which must be maintained. -- -- * A full ordering between index file and super index files -- -- * All auxiliary index files belonging to the same index file or -- super index file must be kept together as a set. -- -- The relationship between an index file and its parent must be -- maintained. Index_Table : Index_Tables; Fatal_Error : Boolean; procedure Stop_SPARK is procedure Raise_Fatal_Index_Manager --# derives ; is --# hide Raise_Fatal_Index_Manager; begin raise Fatal.Index_Manager; end Raise_Fatal_Index_Manager; begin --# accept F, 10, "Ineffective statement here OK"; if Fatal_Error then Raise_Fatal_Index_Manager; end if; --# end accept; --# accept F, 35, Fatal_Error, "Ineffective initial value of variable Fatal_Error here OK"; end Stop_SPARK; procedure Debug_Put_E_Str (E_Str : in E_Strings.T; New_Line : in Boolean) is String_To_Print : E_Strings.T; begin if FileSystem.Use_Windows_Command_Line and then CommandLineData.Content.Plain_Output then String_To_Print := E_Strings.Lower_Case (E_Str => E_Str); else String_To_Print := E_Str; end if; if New_Line then E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => String_To_Print); else E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => String_To_Print); end if; end Debug_Put_E_Str; procedure Output_Error (E : in IndexManager.Library_Manager_Errors; Source_File : in LexTokenManager.Lex_String; Token_Position : in IndexManager.File_Position; Token_String : in E_Strings.T) is begin case E is when IndexManager.ES_FileLocation => ErrorHandler.Index_Manager_Error (S => "Incorrect syntax in File location", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.ES_IsIn => ErrorHandler.Index_Manager_Error (S => "Incorrect syntax, ""is in"" expected", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.ES_UnitEntry => ErrorHandler.Index_Manager_Error (S => "Incorrect syntax in Entry type", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.ES_Comment => ErrorHandler.Index_Manager_Error (S => "Illegal comment, ignored", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.EW_UnexpectedSuper => ErrorHandler.Index_Manager_Error (S => "Unexpected superindex", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.EW_IllegalUnitName => ErrorHandler.Index_Manager_Error (S => "Illegal Unit name", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.ES_Are => ErrorHandler.Index_Manager_Error (S => "Incorrect syntax ""are"" expected", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.ES_Components => ErrorHandler.Index_Manager_Error (S => "Illegal syntax in Component entry", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.EW_Aux => ErrorHandler.Index_Manager_Error (S => "Unit name in auxindex file is not a suffix of", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.EW_Index => ErrorHandler.Index_Manager_Error (S => "Cannot open index file", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => True, Is_Fatal => True); Fatal_Error := True; when IndexManager.ES_Recursion => ErrorHandler.Index_Manager_Error (S => "Recursive use of index file", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => True, Is_Fatal => True); Fatal_Error := True; when IndexManager.EW_Super => ErrorHandler.Index_Manager_Error (S => "Cannot open superindex file", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => True, Is_Fatal => True); Fatal_Error := True; when IndexManager.EF_Contradiction => ErrorHandler.Index_Manager_Error (S => "Contradiction in index files", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => True); Fatal_Error := True; when IndexManager.EW_Duplicate => ErrorHandler.Index_Manager_Error (S => "Duplication in index files", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => False, Is_Fatal => False); when IndexManager.EW_DuplicateAux => ErrorHandler.Index_Manager_Error (S => "Duplicated index files", Source_File => Source_File, Line_No => Token_Position.Line, Col_No => Token_Position.Col, Token_String => Token_String, Is_Token_Filename => True, Is_Fatal => False); end case; end Output_Error; procedure Add_Index_File (Filename : in E_Strings.T) is Found : Boolean; Lex_Filename : LexTokenManager.Lex_String; begin LexTokenManager.Insert_Examiner_String (Str => Filename, Lex_Str => Lex_Filename); -- Try to find if the index file is already in the list of -- index files. Found := LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (Index_Positions'First).Filename, Lex_Str2 => Lex_Filename) = LexTokenManager.Str_Eq; if not Found then -- The index file is not yet in the list of index files => -- add it. Index_Table.Size := Index_Table.Size + 1; Index_Table.Content (Index_Table.Size) := Index_Info' (Filename => Lex_Filename, Position => IndexManager.File_Position'(Line => 1, Col => 1), Done => False, File_Type => IndexManager.Invalid_Entry_Type, Unit => LexTokenLists.Null_List, Parent_Index => Index_Sizes'First); end if; end Add_Index_File; procedure Add_Super_Index_File (Filename : in E_Strings.T; Position : in IndexManager.File_Position; Source_File : in LexTokenManager.Lex_String) is Found_Filename : Boolean; Found_Source_File : Boolean; Index : Index_Positions; Lex_Filename : LexTokenManager.Lex_String; begin Found_Filename := False; Found_Source_File := False; Index := Index_Positions'First; LexTokenManager.Insert_Examiner_String (Str => Filename, Lex_Str => Lex_Filename); -- Try to find if the super index file is already in the list -- of index files and the index file from where the super index -- file is called. for I in Index_Sizes range Index_Positions'First .. Index_Table.Size loop if not Found_Filename and then LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (I).Filename, Lex_Str2 => Lex_Filename) = LexTokenManager.Str_Eq then Found_Filename := True; end if; if not Found_Source_File and then LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (I).Filename, Lex_Str2 => Source_File) = LexTokenManager.Str_Eq then Found_Source_File := True; Index := I; end if; exit when Found_Filename and Found_Source_File; end loop; if Found_Source_File then if not Found_Filename then if Index_Table.Size < ExaminerConstants.MaxIndexNumber then -- The super index file is not yet in the list of -- index files => add it. Index_Table.Size := Index_Table.Size + 1; Index_Table.Content (Index_Table.Size) := Index_Info' (Filename => Lex_Filename, Position => Position, Done => False, File_Type => IndexManager.Super_Index, Unit => LexTokenLists.Null_List, Parent_Index => Index); else -- The list of index files is full. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Stack_Full, Msg => "INDEXMANAGER.INDEX_TABLE.ADD_SUPER_INDEX_FILE"); end if; else -- The super index file is already in the list of index -- files => recursion. Output_Error (E => IndexManager.ES_Recursion, Source_File => Source_File, Token_Position => Position, Token_String => Filename); end if; else -- The index file has not been found => not normal, stop -- SPARK. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Stack_Full, Msg => "INDEXMANAGER.INDEX_TABLE.ADD_SUPER_INDEX_FILE"); end if; end Add_Super_Index_File; procedure Add_Aux_Index_File (Filename : in E_Strings.T; Unit : in LexTokenLists.Lists; Position : in IndexManager.File_Position; Source_File : in LexTokenManager.Lex_String) is Found_Filename : Boolean; Found_Source_File : Boolean; Filename_Index : Index_Positions; Source_File_Index : Index_Positions; Lex_Filename : LexTokenManager.Lex_String; begin Found_Filename := False; Found_Source_File := False; Filename_Index := Index_Positions'First; Source_File_Index := Index_Positions'First; LexTokenManager.Insert_Examiner_String (Str => Filename, Lex_Str => Lex_Filename); -- Try to find if the auxiliary index file is already in the -- list of index files and the index file from where the -- auxiliary index file is called. for I in Index_Sizes range Index_Positions'First .. Index_Table.Size loop if not Found_Filename and then LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (I).Filename, Lex_Str2 => Lex_Filename) = LexTokenManager.Str_Eq then Found_Filename := True; Filename_Index := I; end if; if not Found_Source_File and then LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (I).Filename, Lex_Str2 => Source_File) = LexTokenManager.Str_Eq then Found_Source_File := True; Source_File_Index := I; end if; exit when Found_Filename and then Found_Source_File; end loop; if Found_Source_File then if not Found_Filename then if Index_Table.Size < ExaminerConstants.MaxIndexNumber then -- The auxiliary index file is not yet in the list of -- index files => add it to the set for this source -- file and update the parent index references. for I in reverse Index_Sizes range Source_File_Index + 1 .. Index_Table.Size loop if Index_Table.Content (I).Parent_Index > Source_File_Index then Index_Table.Content (I).Parent_Index := Index_Table.Content (I).Parent_Index + 1; end if; Index_Table.Content (I + 1) := Index_Table.Content (I); end loop; Index_Table.Size := Index_Table.Size + 1; Index_Table.Content (Source_File_Index + 1) := Index_Info' (Filename => Lex_Filename, Position => Position, Done => False, File_Type => IndexManager.Aux_Index, Unit => Unit, Parent_Index => Source_File_Index); else -- The list of index files is full. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Stack_Full, Msg => "INDEXMANAGER.INDEX_TABLE.ADD_AUX_INDEX_FILE"); end if; elsif not (Index_Table.Content (Filename_Index).Position = Position and then Index_Table.Content (Filename_Index).File_Type = IndexManager.Aux_Index and then Index_Table.Content (Filename_Index).Unit = Unit and then Index_Table.Content (Filename_Index).Parent_Index = Source_File_Index) then -- The auxiliary index file is already in the list of -- index files => recursion. Output_Error (E => IndexManager.EW_DuplicateAux, Source_File => Source_File, Token_Position => Position, Token_String => Filename); end if; else -- The index file has not been found => not normal, stop -- SPARK. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Stack_Full, Msg => "INDEXMANAGER.INDEX_TABLE.ADD_AUX_INDEX_FILE"); end if; end Add_Aux_Index_File; procedure Index_File_Done (Filename : in LexTokenManager.Lex_String) is Found : Boolean; Index : Index_Positions; begin Found := False; Index := 1; -- Try to find the index file in the list of index files for I in Index_Sizes range Index_Positions'First .. Index_Table.Size loop Found := LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (I).Filename, Lex_Str2 => Filename) = LexTokenManager.Str_Eq; if Found then Index := I; end if; exit when Found; end loop; if Found and then not Index_Table.Content (Index).Done then -- The index file has been found and is not already marked -- as done => mark it as done. Index_Table.Content (Index).Done := True; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.INDEX_TABLE.INDEX_FILE_DONE : ", Stop => 0); Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Filename), New_Line => True); end if; else -- The index file has not been found or is already marked as -- done => not normal, stop SPARK. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Stack_Full, Msg => "INDEXMANAGER.INDEX_TABLE.INDEX_FILE_DONE"); end if; end Index_File_Done; function Is_File_Ancestor (Parent_Filename : in LexTokenManager.Lex_String; Filename : in LexTokenManager.Lex_String) return Boolean is Found_Parent_Filename : Boolean; Found_Filename : Boolean; Found : Boolean; Index_Parent_Filename : Index_Sizes; Index_Filename : Index_Sizes; Return_Val : Boolean; procedure Trace --# derives ; is --# hide Trace; begin if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.INDEX_TABLE_P.IS_FILE_ANCESTOR ", Stop => 0); Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Parent_Filename), New_Line => False); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ' '); Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Filename), New_Line => True); end if; end Trace; begin Trace; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Parent_Filename, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Filename, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then -- Try to find the parent index file and the index file in -- the list of index files. Index_Parent_Filename := Index_Sizes'First; Index_Filename := Index_Sizes'First; Found_Parent_Filename := False; Found_Filename := False; for I in Index_Sizes range Index_Positions'First .. Index_Table.Size loop if not Found_Parent_Filename and then LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (I).Filename, Lex_Str2 => Parent_Filename) = LexTokenManager.Str_Eq then Found_Parent_Filename := True; Index_Parent_Filename := I; end if; if not Found_Filename and then LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (I).Filename, Lex_Str2 => Filename) = LexTokenManager.Str_Eq then Found_Filename := True; Index_Filename := I; end if; exit when Found_Parent_Filename and Found_Filename; end loop; if Found_Parent_Filename then -- If the parent index file is an auxiliary index file, -- find the associated index file or super index -- file. Index_Sizes'First represents no parent. while Index_Parent_Filename /= Index_Sizes'First and then Index_Table.Content (Index_Parent_Filename).File_Type = IndexManager.Aux_Index loop Index_Parent_Filename := Index_Table.Content (Index_Parent_Filename).Parent_Index; end loop; else -- The parent index file has not been found => not -- normal, stop SPARK. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Stack_Full, Msg => "INDEXMANAGER.INDEX_TABLE.IS_FILE_ANCESTOR"); end if; if Found_Filename then -- If the index file is an auxiliary index file, find the -- associated index file or super index -- file. Index_Sizes'First represents no parent. while Index_Filename /= Index_Sizes'First and then Index_Table.Content (Index_Filename).File_Type = IndexManager.Aux_Index loop Index_Filename := Index_Table.Content (Index_Filename).Parent_Index; end loop; else -- The index file has not been found => not normal, stop -- SPARK. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Stack_Full, Msg => "INDEXMANAGER.INDEX_TABLE.IS_FILE_ANCESTOR"); end if; -- Check if the parent index file is actually an ancestor of -- the index file. The index file is the parent of -- itself. Index_Sizes'First represents no parent. Found := Index_Parent_Filename = Index_Filename; while not Found and Index_Filename /= Index_Sizes'First loop Index_Filename := Index_Table.Content (Index_Filename).Parent_Index; Found := Index_Parent_Filename = Index_Filename; end loop; Return_Val := Found; else -- The empty index filename is the parent of all the index -- files. Return_Val := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Parent_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq; end if; return Return_Val; end Is_File_Ancestor; procedure Get_Next_Index_File (Unit : in LexTokenLists.Lists; Top_Filename : in LexTokenManager.Lex_String; Filename : out LexTokenManager.Lex_String; File_Type : out IndexManager.Entry_Types; Aux_Index_Unit : out LexTokenLists.Lists; Position : out IndexManager.File_Position) is L_Filename : LexTokenManager.Lex_String := LexTokenManager.Null_String; L_File_Type : IndexManager.Entry_Types := IndexManager.Invalid_Entry_Type; L_Aux_Index_Unit : LexTokenLists.Lists := LexTokenLists.Null_List; L_Position : IndexManager.File_Position := IndexManager.File_Position'(Line => 1, Col => 1); Found : Boolean := False; begin if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.INDEX_TABLE_P.GET_NEXT_INDEX_FILE ", Stop => 0); LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Unit); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ' '); end if; -- First, try to find the more relevant auxiliary index -- file. This relies on the ordering of the list. More local -- index files will be earlier in the list. for I in Index_Sizes range Index_Positions'First .. Index_Table.Size loop --# accept F, 41, "Expect stable expression"; if CommandLineData.Content.Debug.File_Names then -- Debug Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Index_Table.Content (I).Filename), New_Line => False); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ' '); if not Index_Table.Content (I).Done then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "NOT DONE ", Stop => 0); end if; if Index_Table.Content (I).File_Type = IndexManager.Aux_Index then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "AUXFILE ", Stop => 0); end if; LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Index_Table.Content (I).Unit); end if; --# end accept; if not Index_Table.Content (I).Done and then Index_Table.Content (I).File_Type = IndexManager.Aux_Index and then LexTokenLists.Prefix_Unit (Poss_Prefix => Index_Table.Content (I).Unit, Prefixed => Unit) and then (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Top_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq or else Is_File_Ancestor (Parent_Filename => Index_Table.Content (I).Filename, Filename => Top_Filename)) then L_Filename := Index_Table.Content (I).Filename; L_File_Type := Index_Table.Content (I).File_Type; L_Aux_Index_Unit := Index_Table.Content (I).Unit; L_Position := Index_Table.Content (I).Position; Found := True; end if; exit when Found; end loop; if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; if not Found then -- No relevant auxiliary index file found, try to find an -- index file or a super index file. for I in Index_Sizes range Index_Positions'First .. Index_Table.Size loop if not Index_Table.Content (I).Done and then Index_Table.Content (I).File_Type /= IndexManager.Aux_Index and then (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Top_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq or else Is_File_Ancestor (Parent_Filename => Index_Table.Content (I).Filename, Filename => Top_Filename)) then L_Filename := Index_Table.Content (I).Filename; L_File_Type := Index_Table.Content (I).File_Type; L_Aux_Index_Unit := LexTokenLists.Null_List; L_Position := Index_Table.Content (I).Position; Found := True; end if; exit when Found; end loop; end if; if Found then Filename := L_Filename; File_Type := L_File_Type; Aux_Index_Unit := L_Aux_Index_Unit; Position := L_Position; else Filename := LexTokenManager.Null_String; File_Type := IndexManager.Invalid_Entry_Type; Aux_Index_Unit := LexTokenLists.Null_List; Position := IndexManager.File_Position'(Line => 1, Col => 1); end if; end Get_Next_Index_File; function Is_Aux_File_Ancestor (Parent_Index_Filename : in LexTokenManager.Lex_String; Index_Filename : in LexTokenManager.Lex_String) return Boolean is Found : Boolean; Index : Index_Sizes; Return_Val : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Parent_Index_Filename, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Index_Filename, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then -- Try to find the index file in the list of index files. Found := False; Index := Index_Sizes'First; for I in Index_Sizes range Index_Positions'First .. Index_Table.Size loop Found := LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (I).Filename, Lex_Str2 => Index_Filename) = LexTokenManager.Str_Eq; if Found then Index := I; end if; exit when Found; end loop; if Found then -- The index file has been found => if the index file is -- an auxiliary index file, check if the parent index -- file is between the auxiliary index file and the -- associated index file or super index file. Found := LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (Index).Filename, Lex_Str2 => Parent_Index_Filename) = LexTokenManager.Str_Eq; while not Found and (Index /= Index_Sizes'First and then Index_Table.Content (Index).File_Type = IndexManager.Aux_Index) loop Index := Index_Table.Content (Index).Parent_Index; Found := LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Index_Table.Content (Index).Filename, Lex_Str2 => Parent_Index_Filename) = LexTokenManager.Str_Eq; end loop; Return_Val := Found; else -- The index file has not been found => not normal, stop -- SPARK. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Index_Stack_Full, Msg => "INDEXMANAGER.INDEX_TABLE.IS_AUX_FILE_ANCESTOR"); Return_Val := False; end if; else -- The empty index filename can only be the auxiliary -- ancestor of itself. Return_Val := LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Parent_Index_Filename, Lex_Str2 => Index_Filename) = LexTokenManager.Str_Eq; end if; return Return_Val; end Is_Aux_File_Ancestor; procedure List_Index_File (Report_File : in SPARK_IO.File_Type) is Filename_Tmp : E_Strings.T; begin if Index_Table.Size = 0 then if not CommandLineData.Content.XML then SPARK_IO.Put_Line (Report_File, "No Index files were used", 0); end if; else if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Indexes, Report => Report_File); else SPARK_IO.Put_Line (Report_File, "Index Filename(s) used were: ", 0); end if; for I in Index_Sizes range Index_Positions'First .. Index_Table.Size loop if Index_Table.Content (I).Done then --# accept F, 41, "Expect stable expression"; if CommandLineData.Content.XML then Filename_Tmp := LexTokenManager.Lex_String_To_String (Lex_Str => Index_Table.Content (I).Filename); XMLReport.Index (Plain_Output => CommandLineData.Content.Plain_Output, Idx => Filename_Tmp); E_Strings.Put_String (File => Report_File, E_Str => Filename_Tmp); elsif CommandLineData.Content.Plain_Output then SPARK_IO.Put_String (File => Report_File, Item => " ", Stop => 0); if FileSystem.Use_Windows_Command_Line then --# end accept; E_Strings.Put_String (File => Report_File, E_Str => E_Strings.Lower_Case (E_Str => FileSystem.Just_File (Fn => LexTokenManager.Lex_String_To_String (Lex_Str => Index_Table.Content (I).Filename), Ext => True))); else E_Strings.Put_String (File => Report_File, E_Str => FileSystem.Just_File (Fn => LexTokenManager.Lex_String_To_String (Lex_Str => Index_Table.Content (I).Filename), Ext => True)); end if; SPARK_IO.New_Line (File => Report_File, Spacing => 1); else SPARK_IO.Put_String (File => Report_File, Item => " ", Stop => 0); E_Strings.Put_String (File => Report_File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Index_Table.Content (I).Filename)); SPARK_IO.New_Line (File => Report_File, Spacing => 1); end if; end if; end loop; if CommandLineData.Content.XML then XMLReport.End_Section (Section => XMLReport.S_Indexes, Report => Report_File); end if; end if; end List_Index_File; begin Index_Table.Size := 0; Fatal_Error := False; --# accept F, 32, Index_Table.Content, "Initialization is partial but effective" & --# F, 31, Index_Table.Content, "Initialization is partial but effective" & --# F, 602, Index_Table, Index_Table.Content, "Initialization is partial but effective"; end IndexManager.Index_Table_P; spark-2012.0.deb/examiner/contextmanager-ops.ads0000644000175000017500000003763111753202336020572 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ErrorHandler; with E_Strings; with LexTokenLists; with LexTokenManager; with SparkLex; with SPARK_IO; with STree; use type LexTokenLists.Lists; use type LexTokenManager.Str_Comp_Result; use type SPARK_IO.File_Status; use type STree.Iterator; --# inherit CommandLineData, --# ContextManager, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# IndexManager, --# LexTokenLists, --# LexTokenManager, --# ScreenEcho, --# SparkLex, --# SPARK_IO, --# SP_Symbols, --# STree, --# SystemErrors; package ContextManager.Ops --# own File_Heap : File_Heaps; --# Unit_Heap : Unit_Heaps; --# Unit_Stack : Unit_Stacks; --# initializes File_Heap, --# Unit_Heap, --# Unit_Stack; is procedure Open_File (File_Descriptor : in ContextManager.FileDescriptors); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out File_Heap; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# File_Heap, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# File_Heap, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# SparkLex.Curr_Line from *, --# File_Descriptor, --# File_Heap, --# LexTokenManager.State, --# SPARK_IO.File_Sys; procedure Close_File (File_Descriptor : in ContextManager.FileDescriptors); --# global in out File_Heap; --# in out SPARK_IO.File_Sys; --# derives File_Heap, --# SPARK_IO.File_Sys from *, --# File_Descriptor, --# File_Heap; function Current_Unit return ContextManager.UnitDescriptors; --# global in Unit_Stack; function Get_Unit_Status (Unit_Descriptor : ContextManager.UnitDescriptors) return ContextManager.UnitStatus; --# global in Unit_Heap; procedure Create_Unit_Descriptor (File_Descriptor : in ContextManager.FileDescriptors; Unit_Descriptor : out ContextManager.UnitDescriptors); --# global in out File_Heap; --# in out Unit_Heap; --# derives File_Heap, --# Unit_Heap from *, --# File_Descriptor, --# File_Heap & --# Unit_Descriptor from Unit_Heap; function Get_File_Descriptor (Unit_Descriptor : ContextManager.UnitDescriptors) return ContextManager.FileDescriptors; --# global in Unit_Heap; procedure SetUnitStatus (Descriptor : in ContextManager.UnitDescriptors; Status : in ContextManager.UnitStatus); --# global in out Unit_Heap; --# derives Unit_Heap from *, --# Descriptor, --# Status; function GetFileStatus (Descriptor : ContextManager.FileDescriptors) return ContextManager.FileStatus; --# global in File_Heap; procedure SetVCG (Descriptor : in ContextManager.UnitDescriptors; VCG : in Boolean); --# global in out Unit_Heap; --# derives Unit_Heap from *, --# Descriptor, --# VCG; procedure GetVCG (Descriptor : in ContextManager.UnitDescriptors; VCG : out Boolean); --# global in Unit_Heap; --# derives VCG from Descriptor, --# Unit_Heap; procedure GetUnitByName (UnitName : in LexTokenLists.Lists; UnitTypeSet : in ContextManager.UnitTypeSets; Descriptor : out ContextManager.UnitDescriptors); --# global in LexTokenManager.State; --# in Unit_Heap; --# derives Descriptor from LexTokenManager.State, --# UnitName, --# UnitTypeSet, --# Unit_Heap; procedure SetUnitName (Descriptor : in ContextManager.UnitDescriptors; UnitName : in LexTokenLists.Lists; UnitType : in ContextManager.UnitTypes); --# global in out Unit_Heap; --# derives Unit_Heap from *, --# Descriptor, --# UnitName, --# UnitType; procedure GetUnitName (Descriptor : in ContextManager.UnitDescriptors; UnitName : out LexTokenLists.Lists; UnitType : out ContextManager.UnitTypes); --# global in Unit_Heap; --# derives UnitName, --# UnitType from Descriptor, --# Unit_Heap; procedure SetParseTree (Descriptor : in ContextManager.UnitDescriptors; ParseTree : in STree.SyntaxNode); --# global in STree.Table; --# in out Unit_Heap; --# derives Unit_Heap from *, --# Descriptor, --# ParseTree, --# STree.Table; procedure GetParseTree (Descriptor : in ContextManager.UnitDescriptors; ParseTree : out STree.SyntaxNode); --# global in Unit_Heap; --# derives ParseTree from Descriptor, --# Unit_Heap; procedure MarkUnitInCycle (Descriptor : in ContextManager.UnitDescriptors); --# global in out Unit_Heap; --# derives Unit_Heap from *, --# Descriptor; function UnitInCycle (Descriptor : ContextManager.UnitDescriptors) return Boolean; --# global in Unit_Heap; function FirstUnitDescriptor return ContextManager.UnitDescriptors; --# global in Unit_Heap; function NextUnitDescriptor (Descriptor : ContextManager.UnitDescriptors) return ContextManager.UnitDescriptors; --# global in Unit_Heap; procedure PushUnit (Descriptor : in ContextManager.UnitDescriptors); --# global in out Unit_Stack; --# derives Unit_Stack from *, --# Descriptor; procedure PopUnit (Descriptor : out ContextManager.UnitDescriptors); --# global in out Unit_Stack; --# derives Descriptor, --# Unit_Stack from Unit_Stack; procedure CreateFileDescriptor (Descriptor : out ContextManager.FileDescriptors); --# global in out File_Heap; --# derives Descriptor, --# File_Heap from File_Heap; procedure SetSourceFileName (Descriptor : in ContextManager.FileDescriptors; SourceFileName : in LexTokenManager.Lex_String); --# global in out File_Heap; --# derives File_Heap from *, --# Descriptor, --# SourceFileName; function GetSourceFileName (Descriptor : in ContextManager.FileDescriptors) return LexTokenManager.Lex_String; --# global in File_Heap; procedure GetSourceFile (Descriptor : in ContextManager.FileDescriptors; SourceFile : out SPARK_IO.File_Type); --# global in File_Heap; --# derives SourceFile from Descriptor, --# File_Heap; function ListingReqt (Descriptor : ContextManager.FileDescriptors) return Boolean; --# global in File_Heap; function FirstFileDescriptor return ContextManager.FileDescriptors; --# global in File_Heap; function NextFileDescriptor (Descriptor : ContextManager.FileDescriptors) return ContextManager.FileDescriptors; --# global in File_Heap; function GetFileByName (FileName : in LexTokenManager.Lex_String) return ContextManager.FileDescriptors; --# global in File_Heap; --# in LexTokenManager.State; procedure SetFileStatus (Descriptor : in ContextManager.FileDescriptors; Status : in ContextManager.FileStatus); --# global in out File_Heap; --# derives File_Heap from *, --# Descriptor, --# Status; procedure SetListingReq (Descriptor : in ContextManager.FileDescriptors; Req : in Boolean); --# global in out File_Heap; --# derives File_Heap from *, --# Descriptor, --# Req; procedure SetLineContext (Descriptor : in ContextManager.FileDescriptors; FileContext : in SparkLex.Line_Context); --# global in out File_Heap; --# derives File_Heap from *, --# Descriptor, --# FileContext; procedure GetLineContext (Descriptor : in ContextManager.FileDescriptors; FileContext : out SparkLex.Line_Context); --# global in File_Heap; --# derives FileContext from Descriptor, --# File_Heap; procedure SetErrorContext (Descriptor : in ContextManager.FileDescriptors; Context : in ErrorHandler.Error_Contexts); --# global in out File_Heap; --# derives File_Heap from *, --# Context, --# Descriptor; procedure GetErrorContext (Descriptor : in ContextManager.FileDescriptors; Context : out ErrorHandler.Error_Contexts); --# global in File_Heap; --# derives Context from Descriptor, --# File_Heap; procedure SetListingFileName (Descriptor : in ContextManager.FileDescriptors; Listing_File_Name : in E_Strings.T); --# global in out File_Heap; --# derives File_Heap from *, --# Descriptor, --# Listing_File_Name; procedure GetListingFileName (Descriptor : in ContextManager.FileDescriptors; Listing_File_Name : out E_Strings.T); --# global in File_Heap; --# derives Listing_File_Name from Descriptor, --# File_Heap; procedure SetErrorsReported (Descriptor : in ContextManager.FileDescriptors); --# global in out File_Heap; --# derives File_Heap from *, --# Descriptor; function ErrorsReported (Descriptor : ContextManager.FileDescriptors) return Boolean; --# global in File_Heap; -- Find the compilation unit descriptor associated with the file -- descriptor (Descriptor). Each compilation unit has only one -- file but each file can contain many compilation units. This -- function must only be used for the SLI generation because, in -- the case of GNAT source code, each file has only one -- compilation unit. procedure Get_Unit (Descriptor : in ContextManager.FileDescriptors; Unit_Descriptor : out ContextManager.UnitDescriptors); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in Unit_Heap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Descriptor, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Heap & --# Unit_Descriptor from Descriptor, --# Unit_Heap; -- Get the parent of an Ada separate unit (Unit_Descriptor). procedure Get_Parent (Unit_Descriptor : in out ContextManager.UnitDescriptors); --# global in LexTokenManager.State; --# in Unit_Heap; --# derives Unit_Descriptor from *, --# LexTokenManager.State, --# Unit_Heap; -- The procedure marks all the compilation units that are in the -- closure of the compilation unit associated with the file -- descriptor (Descriptor). Each compilation unit has only one -- file but each file can contain many compilation units. This -- function must only be used for the SLI generation because, in -- the case of GNAT source code, each file has only one -- compilation unit. This procedure must be called before the -- function In_Closure. procedure Dependency_Closure (Descriptor : in ContextManager.FileDescriptors); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Unit_Heap; --# derives ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Heap from CommandLineData.Content, --# Descriptor, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Heap; -- Return TRUE if the compilation unit descriptor (Descriptor) is -- in the closure of the compilation unit calculated by -- Dependency_Closure. The procedure Dependency_Closure must be -- called before this function. function In_Closure (Descriptor : in ContextManager.UnitDescriptors) return Boolean; --# global in Unit_Heap; -- Set the line number of the "D ..." line (see D section of the -- ALI format). procedure Set_Line_Number (Descriptor : in ContextManager.UnitDescriptors; Line_Number : in Positive); --# global in out Unit_Heap; --# derives Unit_Heap from *, --# Descriptor, --# Line_Number; -- Get the line number of the "D ..." line (see D section of the -- ALI format). function Get_Line_Number (Descriptor : in ContextManager.UnitDescriptors) return Natural; --# global in Unit_Heap; end ContextManager.Ops; ././@LongLink0000000000000000000000000000023400000000000011564 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration-wf_protected_op_dec.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000004057511753202336033133 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Protected_Type_Declaration) procedure Wf_Protected_Op_Dec (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Op_Found : out Boolean) is ------------------------------------------------------------------------------------ -- Overview: -- -- Process intial pragma and subprogram then loops through all the declarations in the visible part of the -- protected type and passes each to an appropriate wellformation check ------------------------------------------------------------------------------------ Pragma_Node, Entry_Or_Subprogram_Node, Declaration_Sequence_Node, Next_Node, Node_To_Check : STree.SyntaxNode; Entry_Found : Boolean; Subprog_Sym : Dictionary.Symbol; Protected_Type_Sym : Dictionary.Symbol; function Valid_Ada_Op (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return not Dictionary.Is_Null_Symbol (Sym) and then Dictionary.Is_Subprogram (Sym) and then not Dictionary.IsProofFunction (Sym); end Valid_Ada_Op; ----------------------------------------------------------------------- procedure Check_Global_Validity (Subprog_Sym : in Dictionary.Symbol; Protected_Type : in Dictionary.Symbol; Dec_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dec_Node, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Protected_Type, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym; --# pre Syntax_Node_Type (Dec_Node, STree.Table) = SP_Symbols.not_overriding_subprogram_declaration or --# Syntax_Node_Type (Dec_Node, STree.Table) = SP_Symbols.entry_declaration; is Global_It : Dictionary.Iterator; Global_Sym : Dictionary.Symbol; function Var_Is_Protected (Sym : Dictionary.Symbol; Protected_Type : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.GetOwnVariableProtected (Sym) or else Dictionary.Variables_Are_Equal (Left_Symbol => Sym, Right_Symbol => Dictionary.GetProtectedTypeOwnVariable (Protected_Type)) or else Dictionary.Is_Null_Variable (Sym); -- allow "null" in protected op annos end Var_Is_Protected; function Position_To_Report_Error (Dec_Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre Syntax_Node_Type (Dec_Node, STree.Table) = SP_Symbols.not_overriding_subprogram_declaration or --# Syntax_Node_Type (Dec_Node, STree.Table) = SP_Symbols.entry_declaration; is Result_Node : STree.SyntaxNode; begin -- try and find a global anno to report on, if not use subprogram declaration Result_Node := Child_Node (Current_Node => Dec_Node); -- ASSUME Result_Node = procedure_specification OR function_specification OR -- proof_function_declaration OR entry_specification if Syntax_Node_Type (Node => Result_Node) = SP_Symbols.proof_function_declaration then -- ASSUME Result_Node = proof_function_declaration Result_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Result_Node)); -- ASSUME Result_Node = function_constraint elsif Syntax_Node_Type (Node => Result_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Result_Node) = SP_Symbols.function_specification or else Syntax_Node_Type (Node => Result_Node) = SP_Symbols.entry_specification then -- ASSUME Result_Node = procedure_specification OR function_specification OR entry_specification Result_Node := Child_Node (Current_Node => Last_Sibling_Of (Start_Node => Result_Node)); -- ASSUME Result_Node = moded_global_definition OR dependency_relation OR declare_annotation OR -- procedure_constraint OR function_constraint else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result_Node = procedure_specification OR function_specification OR " & "proof_function_declaration OR entry_specification in Position_To_Report_Error"); end if; -- ASSUME Result_Node = moded_global_definition OR dependency_relation OR declare_annotation OR -- procedure_constraint OR function_constraint if Syntax_Node_Type (Node => Result_Node) = SP_Symbols.dependency_relation or else Syntax_Node_Type (Node => Result_Node) = SP_Symbols.declare_annotation or else Syntax_Node_Type (Node => Result_Node) = SP_Symbols.procedure_constraint or else Syntax_Node_Type (Node => Result_Node) = SP_Symbols.function_constraint then -- ASSUME Result_Node = dependency_relation OR declare_annotation OR procedure_constraint OR function_constraint Result_Node := Dec_Node; elsif Syntax_Node_Type (Node => Result_Node) /= SP_Symbols.moded_global_definition then Result_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result_Node = moded_global_definition OR dependency_relation OR declare_annotation OR " & "procedure_constraint OR function_constraint OR function_specification in Position_To_Report_Error"); end if; -- ASSUME Result_Node = not_overriding_subprogram_declaration OR entry_declaration OR moded_global_definition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result_Node) = SP_Symbols.not_overriding_subprogram_declaration or else Syntax_Node_Type (Node => Result_Node) = SP_Symbols.entry_declaration or else Syntax_Node_Type (Node => Result_Node) = SP_Symbols.moded_global_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result_Node = not_overriding_subprogram_declaration OR entry_declaration OR " & "moded_global_definition in Position_To_Report_Error"); return Node_Position (Node => Result_Node); end Position_To_Report_Error; begin -- Check_Global_Validity if Valid_Ada_Op (Sym => Subprog_Sym) then Global_It := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Subprog_Sym); while not Dictionary.IsNullIterator (Global_It) loop Global_Sym := Dictionary.CurrentSymbol (Global_It); if not Var_Is_Protected (Sym => Global_Sym, Protected_Type => Protected_Type) then -- Global variables used in protected operations must be protected. ErrorHandler.Semantic_Error_Sym (Err_Num => 872, Reference => ErrorHandler.No_Reference, Position => Position_To_Report_Error (Dec_Node => Dec_Node), Sym => Global_Sym, Scope => Current_Scope); end if; Global_It := Dictionary.NextSymbol (Global_It); end loop; end if; end Check_Global_Validity; begin -- Wf_Protected_Operation_Declaration Op_Found := False; Protected_Type_Sym := Dictionary.GetRegion (Scope); Pragma_Node := Child_Node (Current_Node => Node); -- ASSUME Pragma_Node = priority_pragma SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.priority_pragma, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Priority_Pragma = priority_pragma in Wf_Protected_Operation_Declaration"); -- Process priority pragma Wf_Priority_Pragma (Node => Pragma_Node, Scope => Scope, The_Heap => The_Heap); Entry_Or_Subprogram_Node := Next_Sibling (Current_Node => Pragma_Node); -- ASSUME Entry_Or_Subprogram_Node = entry_or_subprogram SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Entry_Or_Subprogram_Node) = SP_Symbols.entry_or_subprogram, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Entry_Or_Subprogram_Node = entry_or_subprogram in Wf_Protected_Operation_Declaration"); Declaration_Sequence_Node := Next_Sibling (Current_Node => Entry_Or_Subprogram_Node); -- ASSUME Declaration_Sequence_Node = protected_operation_declaration_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Declaration_Sequence_Node) = SP_Symbols.protected_operation_declaration_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Declaration_Sequence_Node = protected_operation_declaration_rep in Wf_Protected_Operation_Declaration"); -- Process initial subprogram or entry Entry_Or_Subprogram_Node := Child_Node (Current_Node => Entry_Or_Subprogram_Node); -- ASSUME Entry_Or_Subprogram_Node = not_overriding_subprogram_declaration OR entry_declaration SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Entry_Or_Subprogram_Node) = SP_Symbols.not_overriding_subprogram_declaration or else Syntax_Node_Type (Node => Entry_Or_Subprogram_Node) = SP_Symbols.entry_declaration, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Entry_Or_Subprogram_Node = not_overriding_subprogram_declaration OR " & "entry_declaration in Wf_Protected_Operation_Declaration"); Wf_Subprogram_Declaration (Node => Entry_Or_Subprogram_Node, Inherit_Node => STree.NullNode, Context_Node => STree.NullNode, Generic_Formal_Part_Node => STree.NullNode, Current_Scope => Scope, Generic_Unit => Dictionary.NullSymbol, Component_Data => Component_Data, The_Heap => The_Heap, Subprog_Sym => Subprog_Sym); Entry_Found := Syntax_Node_Type (Node => Entry_Or_Subprogram_Node) = SP_Symbols.entry_declaration; if Entry_Found then Dictionary.SetProtectedTypeEntry (Protected_Type_Sym, Subprog_Sym); end if; Check_Global_Validity (Subprog_Sym => Subprog_Sym, Protected_Type => Protected_Type_Sym, Dec_Node => Entry_Or_Subprogram_Node, Current_Scope => Scope); Op_Found := Op_Found or else Valid_Ada_Op (Sym => Subprog_Sym); -- Process any subsequent sequence of declarations Next_Node := Last_Child_Of (Start_Node => Declaration_Sequence_Node); while Next_Node /= Declaration_Sequence_Node loop --# assert STree.Table = STree.Table~; -- ASSUME Next_Node = protected_operation_declaration_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.protected_operation_declaration_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = protected_operation_declaration_rep in Wf_Protected_Operation_Declaration"); Node_To_Check := Next_Sibling (Current_Node => Next_Node); -- ASSUME Node_To_Check = apragma OR not_overriding_subprogram_declaration OR entry_declaration case Syntax_Node_Type (Node => Node_To_Check) is when SP_Symbols.apragma => -- ASSUME Node_To_Check = apragma Wf_Pragma (Node => Node_To_Check, Scope => Scope); when SP_Symbols.not_overriding_subprogram_declaration => -- ASSUME Node_To_Check = not_overriding_subprogram_declaration Wf_Subprogram_Declaration (Node => Node_To_Check, Inherit_Node => STree.NullNode, Context_Node => STree.NullNode, Generic_Formal_Part_Node => STree.NullNode, Current_Scope => Scope, Generic_Unit => Dictionary.NullSymbol, Component_Data => Component_Data, The_Heap => The_Heap, Subprog_Sym => Subprog_Sym); Check_Global_Validity (Subprog_Sym => Subprog_Sym, Protected_Type => Protected_Type_Sym, Dec_Node => Node_To_Check, Current_Scope => Scope); Op_Found := Op_Found or else Valid_Ada_Op (Sym => Subprog_Sym); when SP_Symbols.entry_declaration => -- ASSUME Node_To_Check = entry_declaration if Entry_Found then ErrorHandler.Semantic_Error (Err_Num => 869, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node_To_Check), Id_Str => Dictionary.GetSimpleName (Dictionary.GetRegion (Scope))); else Entry_Found := True; -- entry declarations are the same "shape" as subprograms and can be handled -- by wf_subprog Wf_Subprogram_Declaration (Node => Node_To_Check, Inherit_Node => STree.NullNode, Context_Node => STree.NullNode, Generic_Formal_Part_Node => STree.NullNode, Current_Scope => Scope, Generic_Unit => Dictionary.NullSymbol, Component_Data => Component_Data, The_Heap => The_Heap, Subprog_Sym => Subprog_Sym); Dictionary.SetProtectedTypeEntry (Protected_Type_Sym, Subprog_Sym); Check_Global_Validity (Subprog_Sym => Subprog_Sym, Protected_Type => Protected_Type_Sym, Dec_Node => Node_To_Check, Current_Scope => Scope); Op_Found := Op_Found or else Valid_Ada_Op (Sym => Subprog_Sym); end if; when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node_To_Check = apragma OR not_overriding_subprogram_declaration OR " & "entry_declaration in Wf_Protected_Op_Dec"); end case; Next_Node := Parent_Node (Current_Node => Next_Node); end loop; end Wf_Protected_Op_Dec; spark-2012.0.deb/examiner/sem-check_task_modifier_consistency.adb0000644000175000017500000000565411753202336024113 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Task_Modifier_Consistency (The_Own_Var_Type : in Dictionary.Symbol; The_Var_Type : in Dictionary.Symbol; Modifier_Is_Task : in Boolean; Error_Node : in STree.SyntaxNode; Consistent : out Boolean) is Error_Number : Natural := 0; begin if Modifier_Is_Task then -- Do we know anything about the type announcement? if Dictionary.Is_Declared (Item => The_Own_Var_Type) or else Dictionary.IsPredefined (The_Own_Var_Type) then if not Dictionary.TypeIsTask (The_Own_Var_Type) then Error_Number := 855; elsif Dictionary.IsSubtype (The_Own_Var_Type) then Error_Number := 948; end if; end if; if not Dictionary.Is_Null_Symbol (The_Var_Type) and then not Dictionary.TypeIsTask (The_Var_Type) then -- The modifier is 'task', we're declaring a task object but the -- type is not a task or task subtype. Error_Number := 855; end if; else -- the modifier is not 'task' -- Do we know anything about the type announcement? if Dictionary.Is_Declared (Item => The_Own_Var_Type) or else Dictionary.IsPredefined (The_Own_Var_Type) then if Dictionary.TypeIsTask (The_Own_Var_Type) then -- We know that the announced type is -- not a task type (or subtype) Error_Number := 855; end if; end if; if not Dictionary.Is_Null_Symbol (The_Var_Type) and then Dictionary.TypeIsTask (The_Var_Type) then -- The modifier is not task, we're declaring a task object. Error_Number := 855; end if; end if; if Error_Number /= 0 then ErrorHandler.Semantic_Error (Err_Num => Error_Number, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Error_Node), Id_Str => Node_Lex_String (Node => Error_Node)); end if; Consistent := Error_Number = 0; end Check_Task_Modifier_Consistency; spark-2012.0.deb/examiner/sem-dependency_relation-check_derives_consistency.adb0000644000175000017500000006745211753202336026751 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Dependency_Relation) procedure Check_Derives_Consistency (Subprog_Sym : in Dictionary.Symbol; Position : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) is R, RT : RelationAlgebra.Relation; R_Dom, RT_Dom, A_Unchanged : SeqAlgebra.Seq; ---------------------------------------------- function Sym_To_Nat (Sym : in Dictionary.Symbol) return Natural is begin return Natural (Dictionary.SymbolRef (Sym)); end Sym_To_Nat; ---------------------------------------------- function Mem_To_Sym (Nat : SeqAlgebra.MemberOfSeq; The_Heap : Heap.HeapRecord) return Dictionary.Symbol is begin return Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Nat))); end Mem_To_Sym; ---------------------------------------------- procedure Build_RT (Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord; RT : out RelationAlgebra.Relation; RT_Dom : out SeqAlgebra.Seq; A_Unchanged : out SeqAlgebra.Seq) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives A_Unchanged, --# RT_Dom, --# The_Heap from Dictionary.Dict, --# Subprog_Sym, --# The_Heap & --# RT from The_Heap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Subprog_Sym, --# The_Heap; is -- GAA duplicated code from SEM-WF_GLOBAL_DEFINITION.ADB function Valid_Refinement (Constituent, Subprogram : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Owner, Region, Enclosing_Region : Dictionary.Symbol; Result : Boolean := False; begin if Dictionary.IsConstituent (Constituent) then -- We regard Constituent as a Valid_Refinement if: -- It is a refinement constituent of Subject AND -- (Subject is owned by the region in which the Subprogram is declared OR -- Subject is owned by the region in which the protected type in which the -- Subprogram is declared) Owner := Dictionary.GetOwner (Dictionary.GetSubject (Constituent)); Region := Dictionary.GetRegion (Dictionary.GetScope (Subprogram)); Result := Owner = Region; if not Result and then Dictionary.IsProtectedType (Region) then Enclosing_Region := Dictionary.GetRegion (Dictionary.GetScope (Region)); Result := Owner = Enclosing_Region; end if; end if; return Result; end Valid_Refinement; ---------------------------------------------- procedure Step_One (RT : in RelationAlgebra.Relation; Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord; RT_Dom : out SeqAlgebra.Seq; A_Unchanged : out SeqAlgebra.Seq) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives A_Unchanged from Dictionary.Dict, --# Subprog_Sym, --# The_Heap & --# RT_Dom, --# The_Heap from Dictionary.Dict, --# RT, --# Subprog_Sym, --# The_Heap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# RT, --# Subprog_Sym, --# The_Heap; is CA_Exp, C_Exp, Unchanged, Empty_Seq : SeqAlgebra.Seq; ---------------------------------------------- procedure Build_CA_Exp (CA_Exp : out SeqAlgebra.Seq; Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives CA_Exp from The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# Subprog_Sym, --# The_Heap; is Concrete_Export, Constituent_Sym : Dictionary.Symbol; Concrete_Export_It, Constituent_It : Dictionary.Iterator; begin SeqAlgebra.CreateSeq (The_Heap, CA_Exp); Concrete_Export_It := Dictionary.FirstExport (Dictionary.IsRefined, Subprog_Sym); while not Dictionary.IsNullIterator (Concrete_Export_It) loop Concrete_Export := Dictionary.CurrentSymbol (Concrete_Export_It); if Valid_Refinement (Constituent => Concrete_Export, Subprogram => Subprog_Sym) then Constituent_It := Dictionary.FirstConstituent (Dictionary.GetSubject (Concrete_Export)); while not Dictionary.IsNullIterator (Constituent_It) loop Constituent_Sym := Dictionary.CurrentSymbol (Constituent_It); -- Previously we only add constituents to CA_Exp if they are unmoded; this has been shown to -- be incorrect (see SEPR 1844), so now we add all constituents SeqAlgebra.AddMember (The_Heap, CA_Exp, Sym_To_Nat (Sym => Constituent_Sym)); Constituent_It := Dictionary.NextSymbol (Constituent_It); end loop; end if; Concrete_Export_It := Dictionary.NextSymbol (Concrete_Export_It); end loop; end Build_CA_Exp; ---------------------------------------------- procedure Build_C_Exp (C_Exp : out SeqAlgebra.Seq; Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives C_Exp from The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# Subprog_Sym, --# The_Heap; is Concrete_Export_It : Dictionary.Iterator; begin SeqAlgebra.CreateSeq (The_Heap, C_Exp); Concrete_Export_It := Dictionary.FirstExport (Dictionary.IsRefined, Subprog_Sym); while not Dictionary.IsNullIterator (Concrete_Export_It) loop SeqAlgebra.AddMember (The_Heap, C_Exp, Sym_To_Nat (Sym => Dictionary.CurrentSymbol (Concrete_Export_It))); Concrete_Export_It := Dictionary.NextSymbol (Concrete_Export_It); end loop; end Build_C_Exp; ---------------------------------------------- procedure Abstract_Unchanged (U : in SeqAlgebra.Seq; AU : out SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives AU from The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# The_Heap, --# U; is Mem : SeqAlgebra.MemberOfSeq; begin SeqAlgebra.CreateSeq (The_Heap, AU); Mem := SeqAlgebra.FirstMember (The_Heap, U); while not SeqAlgebra.IsNullMember (Mem) loop SeqAlgebra.AddMember (The_Heap, AU, Sym_To_Nat (Sym => Dictionary.GetSubject (Mem_To_Sym (Nat => Mem, The_Heap => The_Heap)))); Mem := SeqAlgebra.NextMember (The_Heap, Mem); end loop; end Abstract_Unchanged; begin -- Step_One -- set of all constituents of subject of any exported constituent Build_CA_Exp (CA_Exp => CA_Exp, Subprog_Sym => Subprog_Sym, The_Heap => The_Heap); -- set of actual exported constituents Build_C_Exp (C_Exp => C_Exp, Subprog_Sym => Subprog_Sym, The_Heap => The_Heap); -- unwritten constituents SeqAlgebra.Complement (The_Heap, CA_Exp, C_Exp, Unchanged); -- abstract subjects of unwritten constituents Abstract_Unchanged (U => Unchanged, AU => A_Unchanged, The_Heap => The_Heap); -- depend on themselves RelationAlgebra.AddIdentity (The_Heap, RT, A_Unchanged); SeqAlgebra.CreateSeq (The_Heap, Empty_Seq); SeqAlgebra.Union (The_Heap, A_Unchanged, Empty_Seq, RT_Dom); SeqAlgebra.DisposeOfSeq (The_Heap, Empty_Seq); SeqAlgebra.DisposeOfSeq (The_Heap, CA_Exp); SeqAlgebra.DisposeOfSeq (The_Heap, C_Exp); SeqAlgebra.DisposeOfSeq (The_Heap, Unchanged); end Step_One; ---------------------------------------------- procedure Steps_Two_And_Three (RT : in RelationAlgebra.Relation; RT_Dom : in SeqAlgebra.Seq; Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# RT, --# RT_Dom, --# Subprog_Sym, --# The_Heap; is Export_Var, Import_Var : Dictionary.Symbol; Export_Num, Import_Num : Natural; Export_It, Import_It : Dictionary.Iterator; begin Export_It := Dictionary.FirstExport (Dictionary.IsRefined, Subprog_Sym); while not Dictionary.IsNullIterator (Export_It) loop Export_Var := Dictionary.CurrentSymbol (Export_It); if Valid_Refinement (Constituent => Export_Var, Subprogram => Subprog_Sym) then Export_Num := Sym_To_Nat (Sym => Dictionary.GetSubject (Export_Var)); else Export_Num := Sym_To_Nat (Sym => Export_Var); end if; SeqAlgebra.AddMember (The_Heap, RT_Dom, Export_Num); -- if it's mode out stream add Export_Num, Export_Num to RT if Dictionary.GetOwnVariableOrConstituentMode (Export_Var) = Dictionary.OutMode then RelationAlgebra.InsertPair (The_Heap, RT, Export_Num, Export_Num); end if; Import_It := Dictionary.FirstDependency (Dictionary.IsRefined, Subprog_Sym, Export_Var); while not Dictionary.IsNullIterator (Import_It) loop Import_Var := Dictionary.CurrentSymbol (Import_It); if Valid_Refinement (Constituent => Import_Var, Subprogram => Subprog_Sym) then Import_Num := Sym_To_Nat (Sym => Dictionary.GetSubject (Import_Var)); else Import_Num := Sym_To_Nat (Sym => Import_Var); end if; RelationAlgebra.InsertPair (The_Heap, RT, Export_Num, Import_Num); -- if it's a mode in stream then add Import_Num, Import_Num to RT -- and add Import_Num to RT_Dom if Dictionary.GetOwnVariableOrConstituentMode (Import_Var) = Dictionary.InMode then RelationAlgebra.InsertPair (The_Heap, RT, Import_Num, Import_Num); SeqAlgebra.AddMember (The_Heap, RT_Dom, Import_Num); end if; Import_It := Dictionary.NextSymbol (Import_It); end loop; Export_It := Dictionary.NextSymbol (Export_It); end loop; end Steps_Two_And_Three; begin -- Build_RT RelationAlgebra.CreateRelation (The_Heap, RT); Step_One (RT => RT, Subprog_Sym => Subprog_Sym, The_Heap => The_Heap, RT_Dom => RT_Dom, A_Unchanged => A_Unchanged); Steps_Two_And_Three (RT => RT, RT_Dom => RT_Dom, Subprog_Sym => Subprog_Sym, The_Heap => The_Heap); end Build_RT; ---------------------------------------------- procedure Build_R (Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord; R : out RelationAlgebra.Relation; R_Dom : out SeqAlgebra.Seq) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives R, --# R_Dom from The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# Subprog_Sym, --# The_Heap; is LR : RelationAlgebra.Relation; Export_Var, Import_Var : Dictionary.Symbol; Export_Num, Import_Num : Natural; Export_It, Import_It : Dictionary.Iterator; begin RelationAlgebra.CreateRelation (The_Heap, LR); SeqAlgebra.CreateSeq (The_Heap, R_Dom); Export_It := Dictionary.FirstExport (Dictionary.IsAbstract, Subprog_Sym); while not Dictionary.IsNullIterator (Export_It) loop Export_Var := Dictionary.CurrentSymbol (Export_It); Export_Num := Sym_To_Nat (Sym => Export_Var); SeqAlgebra.AddMember (The_Heap, R_Dom, Export_Num); -- if it's mode out stream add Export_Num, Export_Num to LR if Dictionary.GetOwnVariableOrConstituentMode (Export_Var) = Dictionary.OutMode then RelationAlgebra.InsertPair (The_Heap, LR, Export_Num, Export_Num); end if; Import_It := Dictionary.FirstDependency (Dictionary.IsAbstract, Subprog_Sym, Export_Var); while not Dictionary.IsNullIterator (Import_It) loop Import_Var := Dictionary.CurrentSymbol (Import_It); Import_Num := Sym_To_Nat (Sym => Import_Var); RelationAlgebra.InsertPair (The_Heap, LR, Export_Num, Import_Num); -- if it's a mode in stream then add Import_It, Import_It to LR -- and add Import_It to R_Dom if Dictionary.GetOwnVariableOrConstituentMode (Import_Var) = Dictionary.InMode then RelationAlgebra.InsertPair (The_Heap, LR, Import_Num, Import_Num); SeqAlgebra.AddMember (The_Heap, R_Dom, Import_Num); end if; Import_It := Dictionary.NextSymbol (Import_It); end loop; Export_It := Dictionary.NextSymbol (Export_It); end loop; R := LR; end Build_R; ---------------------------------------------- procedure Type_One_Errors (R : in RelationAlgebra.Relation; R_Dom : in SeqAlgebra.Seq; RT : in RelationAlgebra.Relation; RT_Dom : in SeqAlgebra.Seq; A_Unchanged : in SeqAlgebra.Seq; Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Position; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from A_Unchanged, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Position, --# R, --# RT, --# RT_Dom, --# R_Dom, --# SPARK_IO.File_Sys, --# Subprog_Sym, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# R, --# RT, --# RT_Dom, --# R_Dom, --# The_Heap; is R_Row, RT_Row, Diff : SeqAlgebra.Seq; R_Exp, Err_Imp : SeqAlgebra.MemberOfSeq; Subprog_Scope : Dictionary.Scopes; begin Subprog_Scope := Dictionary.GetScope (Subprog_Sym); R_Exp := SeqAlgebra.FirstMember (The_Heap, R_Dom); loop exit when SeqAlgebra.IsNullMember (R_Exp); if not SeqAlgebra.IsMember (The_Heap, RT_Dom, SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => R_Exp)) then -- special case: we do not want to issue Error 1 for an abstract own -- variable of mode in since a message saying we have failed to update -- such a variable is bound to misleading (the updating is only implicit anyway) if Dictionary.GetOwnVariableOrConstituentMode (Mem_To_Sym (Nat => R_Exp, The_Heap => The_Heap)) /= Dictionary.InMode then ErrorHandler.Dep_Semantic_Error_Sym (Err_Num => 1, Position => Position, Sym1 => Mem_To_Sym (Nat => R_Exp, The_Heap => The_Heap), Sym2 => Dictionary.NullSymbol, Scope => Subprog_Scope); end if; else RelationAlgebra.RowExtraction (The_Heap, R, SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => R_Exp), R_Row); RelationAlgebra.RowExtraction (The_Heap, RT, SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => R_Exp), RT_Row); SeqAlgebra.Complement (The_Heap, RT_Row, R_Row, Diff); Err_Imp := SeqAlgebra.FirstMember (The_Heap, Diff); loop exit when SeqAlgebra.IsNullMember (Err_Imp); if Mem_To_Sym (Nat => Err_Imp, The_Heap => The_Heap) = Mem_To_Sym (Nat => R_Exp, The_Heap => The_Heap) and then SeqAlgebra.IsMember (The_Heap, A_Unchanged, SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Err_Imp)) then ErrorHandler.Dep_Semantic_Error_Sym (Err_Num => 5, Position => Position, Sym1 => Mem_To_Sym (Nat => R_Exp, The_Heap => The_Heap), Sym2 => Dictionary.NullSymbol, Scope => Subprog_Scope); else ErrorHandler.Dep_Semantic_Error_Sym (Err_Num => 4, Position => Position, Sym1 => Mem_To_Sym (Nat => R_Exp, The_Heap => The_Heap), Sym2 => Mem_To_Sym (Nat => Err_Imp, The_Heap => The_Heap), Scope => Subprog_Scope); end if; Err_Imp := SeqAlgebra.NextMember (The_Heap, Err_Imp); end loop; end if; R_Exp := SeqAlgebra.NextMember (The_Heap, R_Exp); end loop; end Type_One_Errors; ---------------------------------------------- procedure Type_Two_Errors (R : in RelationAlgebra.Relation; R_Dom : in SeqAlgebra.Seq; RT : in RelationAlgebra.Relation; RT_Dom : in SeqAlgebra.Seq; Subprog_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Position; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Position, --# R, --# RT, --# RT_Dom, --# R_Dom, --# SPARK_IO.File_Sys, --# Subprog_Sym, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# R, --# RT, --# RT_Dom, --# R_Dom, --# The_Heap; is R_Row, RT_Row, Diff : SeqAlgebra.Seq; RT_Exp, Err_Imp : SeqAlgebra.MemberOfSeq; Subprog_Scope : Dictionary.Scopes; begin Subprog_Scope := Dictionary.GetScope (Subprog_Sym); RT_Exp := SeqAlgebra.FirstMember (The_Heap, RT_Dom); loop exit when SeqAlgebra.IsNullMember (RT_Exp); if not SeqAlgebra.IsMember (The_Heap, R_Dom, SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => RT_Exp)) and then not Dictionary.Is_Null_Variable (Mem_To_Sym (Nat => RT_Exp, The_Heap => The_Heap)) then -- don't report refinement errors on Null ErrorHandler.Dep_Semantic_Error_Sym (Err_Num => 2, Position => Position, Sym1 => Mem_To_Sym (Nat => RT_Exp, The_Heap => The_Heap), Sym2 => Dictionary.NullSymbol, Scope => Subprog_Scope); else RelationAlgebra.RowExtraction (The_Heap, R, SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => RT_Exp), R_Row); RelationAlgebra.RowExtraction (The_Heap, RT, SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => RT_Exp), RT_Row); SeqAlgebra.Complement (The_Heap, R_Row, RT_Row, Diff); Err_Imp := SeqAlgebra.FirstMember (The_Heap, Diff); loop exit when SeqAlgebra.IsNullMember (Err_Imp); -- guard to avoid spurious refinement errors in DFA mode if CommandLineData.Content.Flow_Option /= CommandLineData.Data_Flow or else Mem_To_Sym (Nat => RT_Exp, The_Heap => The_Heap) = Mem_To_Sym (Nat => Err_Imp, The_Heap => The_Heap) then ErrorHandler.Dep_Semantic_Error_Sym (Err_Num => 3, Position => Position, Sym1 => Mem_To_Sym (Nat => RT_Exp, The_Heap => The_Heap), Sym2 => Mem_To_Sym (Nat => Err_Imp, The_Heap => The_Heap), Scope => Subprog_Scope); end if; Err_Imp := SeqAlgebra.NextMember (The_Heap, Err_Imp); end loop; end if; RT_Exp := SeqAlgebra.NextMember (The_Heap, RT_Exp); end loop; end Type_Two_Errors; begin -- Check_Derives_Consistency Build_RT (Subprog_Sym => Subprog_Sym, The_Heap => The_Heap, RT => RT, RT_Dom => RT_Dom, A_Unchanged => A_Unchanged); Build_R (Subprog_Sym => Subprog_Sym, The_Heap => The_Heap, R => R, R_Dom => R_Dom); Type_One_Errors (R => R, R_Dom => R_Dom, RT => RT, RT_Dom => RT_Dom, A_Unchanged => A_Unchanged, Subprog_Sym => Subprog_Sym, The_Heap => The_Heap); Type_Two_Errors (R => R, R_Dom => R_Dom, RT => RT, RT_Dom => RT_Dom, Subprog_Sym => Subprog_Sym, The_Heap => The_Heap); RelationAlgebra.DisposeOfRelation (The_Heap, RT); RelationAlgebra.DisposeOfRelation (The_Heap, R); SeqAlgebra.DisposeOfSeq (The_Heap, RT_Dom); SeqAlgebra.DisposeOfSeq (The_Heap, R_Dom); SeqAlgebra.DisposeOfSeq (The_Heap, A_Unchanged); Heap.ReportUsage (The_Heap); end Check_Derives_Consistency; spark-2012.0.deb/examiner/relationalgebra-debug.adb0000644000175000017500000001140611753202336021142 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Debug; with Dictionary; with ExaminerConstants; with Ada.Text_IO; package body RelationAlgebra.Debug is --------------- -- Print_Rho -- --------------- procedure Print_Rho (Msg : in String; Rho : in RelationAlgebra.Relation; TheHeap : in Heap.HeapRecord) is --# hide Print_Rho; ColIndex : Natural; ColLdr : RelationAlgebra.ColLeader; P : RelationAlgebra.Pair; LaterItem : Boolean; begin Ada.Text_IO.Put_Line (Msg); ColLdr := RelationAlgebra.FirstColLeader (TheHeap, Rho); loop exit when ColLdr = NullColLdr; ColIndex := ColLdrIndex (TheHeap, ColLdr); Standard.Debug.Print_Sym_Raw (Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (ColIndex))); Ada.Text_IO.Put (" <-- "); LaterItem := False; P := RelationAlgebra.FirstInCol (TheHeap, ColLdr); loop exit when P = NullPair; if LaterItem then Ada.Text_IO.Put (", "); end if; LaterItem := True; Standard.Debug.Print_Sym_Raw (Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (RelationAlgebra.RowValue (TheHeap, P)))); P := RelationAlgebra.DownSuccr (TheHeap, P); end loop; Ada.Text_IO.New_Line; ColLdr := NextColLeader (TheHeap, ColLdr); end loop; end Print_Rho; -------------- -- Print_Mu -- -------------- procedure Print_Mu (Msg : in String; Mu : in RelationAlgebra.Relation; TheHeap : in Heap.HeapRecord) is --# hide Print_Mu; ColIndex : Natural; ColLdr : ColLeader; P : Pair; LaterItem : Boolean; begin Ada.Text_IO.Put_Line (Msg); ColLdr := FirstColLeader (TheHeap, Mu); loop exit when ColLdr = NullColLdr; ColIndex := ColLdrIndex (TheHeap, ColLdr); Standard.Debug.Print_Sym_Raw (Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (ColIndex))); Ada.Text_IO.Put (" <-- "); LaterItem := False; P := FirstInCol (TheHeap, ColLdr); loop exit when P = NullPair; if LaterItem then Ada.Text_IO.Put (", "); end if; LaterItem := True; Ada.Text_IO.Put (Integer'Image (RowValue (TheHeap, P))); P := DownSuccr (TheHeap, P); end loop; Ada.Text_IO.New_Line; ColLdr := NextColLeader (TheHeap, ColLdr); end loop; end Print_Mu; ------------------ -- Print_Lambda -- ------------------ procedure Print_Lambda (Msg : in String; Lambda : in RelationAlgebra.Relation; TheHeap : in Heap.HeapRecord) is --# hide Print_Lambda; ColIndex : Natural; ColLdr : ColLeader; P : Pair; LaterItem : Boolean; begin Ada.Text_IO.Put_Line (Msg); ColLdr := FirstColLeader (TheHeap, Lambda); loop exit when ColLdr = NullColLdr; ColIndex := ColLdrIndex (TheHeap, ColLdr); Ada.Text_IO.Put (Integer'Image (ColIndex)); Ada.Text_IO.Put (" <-- "); LaterItem := False; P := FirstInCol (TheHeap, ColLdr); loop exit when P = NullPair; if LaterItem then Ada.Text_IO.Put (", "); end if; LaterItem := True; Standard.Debug.Print_Sym_Raw (Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (RowValue (TheHeap, P)))); P := DownSuccr (TheHeap, P); end loop; Ada.Text_IO.New_Line; ColLdr := NextColLeader (TheHeap, ColLdr); end loop; end Print_Lambda; end RelationAlgebra.Debug; spark-2012.0.deb/examiner/dag-build_annotation_expression.adb0000644000175000017500000074522311753202336023277 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with LexTokenManager; with SeqAlgebra; with SP_Symbols; with Structures; with ExaminerConstants; with Cells.Utility; with Symbol_Set; -- This procedure traverses a syntax tree of an annotation expression separate (DAG) procedure Build_Annotation_Expression (Exp_Node : in STree.SyntaxNode; Instantiated_Subprogram : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Calling_Scope : in Dictionary.Scopes; Force_Abstract : in Boolean; Loop_Stack : in LoopContext.T; Generate_Function_Instantiations : in Boolean; VC_Failure : in out Boolean; VC_Contains_Reals : in out Boolean; VCG_Heap : in out Cells.Heap_Record; DAG_Root : out Cells.Cell; Function_Defs : in out CStacks.Stack) is type Loop_Direction is (Down_Loop, Up_Loop); Direction : Loop_Direction; -- Records direction of tree traversal Next_Node, Last_Node : STree.SyntaxNode; Node_Type : SP_Symbols.SP_Symbol; DAG_Cell : Cells.Cell; E_Stack : CStacks.Stack; L_Scope : Dictionary.Scopes; Implicit_Var : Dictionary.Symbol; -- Context for parsing return annos Calling_Function : Cells.Cell; -- Used in processing nested calls Function_Definition : Cells.Cell; Precondition : Cells.Cell; Start_Node : STree.SyntaxNode; True_Cell : Cells.Cell; Current_Instantiation : Dictionary.Symbol; Current_Unit : Dictionary.Symbol; -- Context used to obtain current scope Tmp_Cell_1 : Cells.Cell; Tmp_Cell_2 : Cells.Cell; Tmp_Parent : Cells.Cell; -- To avoid the posibility of infinite looping due to recursion a record -- has to be kept of the called functions within an annotation expression. Called_Functions : Symbol_Set.T; Done : Boolean; -- Indicates parsing of the expression is complete --------------------------------------------------------------------- function Function_Has_Unconstrained_Parameter (The_FDL_Function : Dictionary.Symbol) return Boolean --# global Dictionary.Dict; is Number_Of_Parameters : Natural; The_Argument : Dictionary.Symbol; The_Type : Dictionary.Symbol; Result : Boolean; begin Number_Of_Parameters := Dictionary.GetNumberOfSubprogramParameters (The_FDL_Function); Result := False; for I in Natural range 1 .. Number_Of_Parameters loop The_Argument := Dictionary.GetSubprogramParameter (The_FDL_Function, I); The_Type := Dictionary.GetType (The_Argument); if Dictionary.IsUnconstrainedArrayType (The_Type) then Result := True; exit; end if; end loop; return Result; end Function_Has_Unconstrained_Parameter; --------------------------------------------------------------------- -- Returns True if Instantiated_Subprogram refers to an instantiation of a -- generic subprogram and Exp_Node refers to the formal pre-condition, -- post-condition, or return predicate attached to that generic declaration. function Is_Generic_Constraint (Exp_Node : STree.SyntaxNode; Instantiated_Subprogram : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in STree.Table; is The_Precondition : STree.SyntaxNode; The_Postcondition : STree.SyntaxNode; Result : Boolean; begin if not Dictionary.Is_Null_Symbol (Instantiated_Subprogram) then The_Precondition := STree.RefToNode (Dictionary.GetPrecondition (Dictionary.IsAbstract, Dictionary.GetGenericOfInstantiation (Instantiated_Subprogram))); The_Postcondition := STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsAbstract, Dictionary.GetGenericOfInstantiation (Instantiated_Subprogram))); Result := Exp_Node = The_Precondition or else Exp_Node = The_Postcondition or else (The_Postcondition /= STree.NullNode and then Exp_Node = STree.Next_Sibling (Current_Node => The_Postcondition)); else Result := False; end if; return Result; end Is_Generic_Constraint; --------------------------------------------------------------------- --------------------------------------------------------------------- -- Returns the correct Scope needed to generate the DAG for -- a subprogram. For generic units, the Scope needs to be -- adjusted to that of the generic declaration. -- -- Exp_Node refers to either the pre-condition, post-condition or return -- constraint for which a DAG is currently being built. -- -- Instantiated_Subprogram refers to the instantiation of a generic -- subprogram, or NullSymbol in the case of a non-generic subprogram. -- -- Scope is that passed into Build_Annotation_Expression from the calling -- environment. --------------------------------------------------------------------- function Get_Generic_Scope (Exp_Node : STree.SyntaxNode; Instantiated_Subprogram : Dictionary.Symbol; Scope : Dictionary.Scopes) return Dictionary.Scopes --# global in Dictionary.Dict; is The_Postcondition : STree.SyntaxNode; The_Precondition : STree.SyntaxNode; Return_Scope : Dictionary.Scopes; begin -- Check that we hav e generic constraint if Instantiated_Subprogram /= Dictionary.NullSymbol then The_Precondition := STree.RefToNode (Dictionary.GetPrecondition (Dictionary.IsAbstract, Dictionary.GetGenericOfInstantiation (Instantiated_Subprogram))); The_Postcondition := STree.RefToNode (Dictionary.GetPostcondition (Dictionary.IsAbstract, Dictionary.GetGenericOfInstantiation (Instantiated_Subprogram))); if Exp_Node = The_Precondition or else Exp_Node = The_Postcondition then -- Exp_Node refere to either the pre or the explicit return -- annotation of the generic declaration -- Set the scope to that of the generic declaration Return_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Get_Visibility (Scope => Scope), The_Unit => Dictionary.GetGenericOfInstantiation (Instantiated_Subprogram)); else -- The constraint syntax node, Exp_Node does not refer to the -- pre of the generic nor an explicit return annotation of the generic -- An implicit return annotation uses the scope of the implicit var -- and the correct scope is used without modification and so is treated -- as not generic. Return_Scope := Scope; end if; else Return_Scope := Scope; end if; return Return_Scope; end Get_Generic_Scope; --------------------------------------------------------------------- -- Constraint appears to be just an input but is actually exported. -- (It is effectively a pointer to a data structure which is updated). procedure Instantiate_Parameters (Constraint : in Cells.Cell; Instantiated_Subprogram : in Dictionary.Symbol; VCG_Heap : in out Cells.Heap_Record) -- replace symbols in DAG which belong to a generic unit with the equivalent -- associated with the instantiated unit. Substitutes generic formals/actuals -- and also parameters --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# VCG_Heap from *, --# Constraint, --# Dictionary.Dict, --# Instantiated_Subprogram, --# VCG_Heap; is P : Cells.Cell; S : CStacks.Stack; Sym_To_Check, Generic_Subprogram : Dictionary.Symbol; begin Generic_Subprogram := Dictionary.GetGenericOfInstantiation (Instantiated_Subprogram); -- DAG traversal algorithm of D.E. Knuth, Fundamental -- Algorithms, p.317; CStacks.CreateStack (S); P := Constraint; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCG_Heap, P, S); if Is_Leaf (Node => P, VCG_Heap => VCG_Heap) then P := Cells.Null_Cell; else P := LeftPtr (VCG_Heap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCG_Heap, S); CStacks.Pop (VCG_Heap, S); if Is_Leaf (Node => P, VCG_Heap => VCG_Heap) then Sym_To_Check := Cells.Get_Symbol_Value (VCG_Heap, P); if Dictionary.IsFormalParameter (Generic_Subprogram, Sym_To_Check) then Cells.Set_Symbol_Value (VCG_Heap, P, Dictionary.ActualOfGenericParameter (Sym_To_Check, Instantiated_Subprogram)); elsif Dictionary.IsGenericFormalParameter (Generic_Subprogram, Sym_To_Check) then Cells.Set_Symbol_Value (VCG_Heap, P, Dictionary.ActualOfGenericFormalObject (Sym_To_Check, Instantiated_Subprogram)); elsif Dictionary.IsType (Sym_To_Check) and then Dictionary.TypeIsGeneric (Sym_To_Check) then Cells.Set_Symbol_Value (VCG_Heap, P, Dictionary.ActualOfGenericFormalType (Sym_To_Check, Instantiated_Subprogram)); elsif Dictionary.IsParameterConstraint (Sym_To_Check) and then Dictionary.TypeIsGeneric (Dictionary.GetType (Sym_To_Check)) then Cells.Set_Symbol_Value (VCG_Heap, P, Dictionary.ActualOfParameterConstraint (Sym_To_Check, Instantiated_Subprogram)); end if; P := Cells.Null_Cell; else P := RightPtr (VCG_Heap, P); end if; end loop; end Instantiate_Parameters; --------------------------------------------------------------------- -- Given a function call in Function_Call; this procedure will -- return a typecheck expression for all arguments in -- Argument_Check and an assumption that the return of the -- function is in type. procedure Get_Function_Type_Constraints (Function_Call : in Cells.Cell; Calling_Scope : in Dictionary.Scopes; Argument_Check : out Cells.Cell; Return_Assumption : out Cells.Cell; VCG_Heap : in out Cells.Heap_Record) --# global in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VC_Contains_Reals; --# in out VC_Failure; --# derives Argument_Check, --# Dictionary.Dict, --# LexTokenManager.State, --# Return_Assumption, --# VCG_Heap from Calling_Scope, --# Dictionary.Dict, --# Function_Call, --# LexTokenManager.State, --# VCG_Heap & --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VC_Contains_Reals, --# VC_Failure from *, --# Calling_Scope, --# Dictionary.Dict, --# Function_Call, --# LexTokenManager.State, --# VCG_Heap; is The_FDL_Function : Dictionary.Symbol; The_Constraint : Cells.Cell; The_Type : Dictionary.Symbol; The_Argument : Dictionary.Symbol; Check_Parameter : Boolean; Number_Of_Parameters : Natural; Arg_List : Cells.Cell; The_Arg : Cells.Cell; Arg_Stack : CStacks.Stack; begin -- Work out some basic facts. The_FDL_Function := Cells.Get_Symbol_Value (VCG_Heap, Function_Call); Number_Of_Parameters := Dictionary.GetNumberOfSubprogramParameters (The_FDL_Function); -- Work out the return assumption. The_Type := Dictionary.GetType (The_FDL_Function); Type_Constraint.Make (The_Expression => Function_Call, The_Type => The_Type, Scope => Calling_Scope, Consider_Always_Valid => False, VCG_Heap => VCG_Heap, The_Constraint => Return_Assumption, VC_Contains_Reals => VC_Contains_Reals, VC_Failure => VC_Failure); -- Work out the check required for each function parameter and -- push them onto Arg_Stack. CStacks.CreateStack (Arg_Stack); -- The arguments of the function are stashed in a somewhat -- annoying way. For a single argument we have: -- -- fn_call -- / \ -- / \ -- / \ -- (...) Argument -- -- For 2+ arguments we have something like: -- -- fn_call -- / \ -- / \ -- / \ -- (...) op ',' -- / \ -- / \ -- / \ -- Arg1 op ',' -- / \ -- / \ -- / \ -- Arg2 Arg3 if Number_Of_Parameters = 1 then Arg_List := Function_Call; else Arg_List := Cells.Get_B_Ptr (VCG_Heap, Function_Call); end if; -- Arg_List now points to the top of the argument list where -- A_Ptr is the current argument and B_Ptr is the rest; except -- where: -- - We have a single argument; then B is the current argument -- - We are in the last argument; then B is the current argument for Param_N in Natural range 1 .. Number_Of_Parameters loop -- Work out the type of the current argument. The_Argument := Dictionary.GetSubprogramParameter (The_FDL_Function, Param_N); The_Type := Dictionary.GetType (The_Argument); Check_Parameter := True; -- If we have a parameter which is really an own in variable -- there is no need to typecheck it as externally it is -- always not in type and the implementation of a "reading" -- subprogram must deal with this. if Dictionary.IsOwnVariable (The_Argument) and then Dictionary.GetOwnVariableMode (The_Argument) = Dictionary.InMode then Check_Parameter := False; end if; if Check_Parameter then -- Pick out the current argument. if Param_N < Number_Of_Parameters then The_Arg := Cells.Get_A_Ptr (VCG_Heap, Arg_List); else The_Arg := Cells.Get_B_Ptr (VCG_Heap, Arg_List); end if; Type_Constraint.Make (The_Expression => The_Arg, The_Type => The_Type, Scope => Calling_Scope, Consider_Always_Valid => False, VCG_Heap => VCG_Heap, The_Constraint => The_Constraint, VC_Contains_Reals => VC_Contains_Reals, VC_Failure => VC_Failure); -- Unless the contraint is trivially true, push it onto -- Arg_Stack. if not Cells.Utility.Is_True (VCG_Heap, The_Constraint) then CStacks.Push (VCG_Heap, The_Constraint, Arg_Stack); else Cells.Dispose_Of_Cell (VCG_Heap, The_Constraint); end if; end if; -- Point Arg_List to the tail of the list. If we are in the -- second-to-last or last argument we go no further. if Param_N < Number_Of_Parameters - 1 then Arg_List := Cells.Get_B_Ptr (VCG_Heap, Arg_List); end if; end loop; -- Finally we join together the stack of argument constraints -- with /\ and return that. --# accept F, 10, Arg_Stack, "The stack will be empty and no longer used after this."; Join_And (Stack => Arg_Stack, Conjunct => Argument_Check, VCG_Heap => VCG_Heap); --# end accept; end Get_Function_Type_Constraints; --------------------------------------------------------------------- -- Create_Saved_Context_DAG creates a DAG that may be placed on the -- expression stack which contains the values of the given in mode -- parameters. The DAG is returned in the Argument_List parameter -- The Argument_List has the form: -- Op "," -- / \ -- / \ -- / \ -- Scope Op "," -- /\ -- / \ -- Direction \ -- Op "," -- /\ -- / \ -- Instantiated_ \ -- Subprogram \ -- Op "," -- /\ -- / \ -- In_Called_ \ -- Function \ -- Op "," -- /\ -- / \ -- Implicit_Var \ -- Op "," -- /\ -- / \ -- Start_Node \ -- Op "," -- /\ -- / \ -- Last_Node Next_Node procedure Create_Saved_Context_DAG (Scope : in Dictionary.Scopes; Direction : in Loop_Direction; Instantiated_Subprogram : in Dictionary.Symbol; Current_Unit : in Dictionary.Symbol; Implicit_Var : in Dictionary.Symbol; Start_Node : in STree.SyntaxNode; Last_Node : in STree.SyntaxNode; Next_Node : in STree.SyntaxNode; VCG_Heap : in out Cells.Heap_Record; Argument_List : out Cells.Cell) --# global in out Statistics.TableUsage; --# derives Argument_List from Current_Unit, --# Direction, --# Implicit_Var, --# Instantiated_Subprogram, --# Last_Node, --# Next_Node, --# Start_Node, --# VCG_Heap & --# Statistics.TableUsage from *, --# Current_Unit, --# Direction, --# Implicit_Var, --# Instantiated_Subprogram, --# Last_Node, --# Next_Node, --# Start_Node, --# VCG_Heap & --# VCG_Heap from *, --# Current_Unit, --# Direction, --# Implicit_Var, --# Instantiated_Subprogram, --# Last_Node, --# Next_Node, --# Scope, --# Start_Node; --# is Left_Ptr, Right_Ptr, Parent_Ptr : Cells.Cell; begin -- The Argument_List is constructed in reverse. -- Save the Last_Node and Next_Node which are a syntax nodes. -- Op "," -- /\ -- / \ -- Last_Node Next_Node CreateOpCell (Parent_Ptr, VCG_Heap, SP_Symbols.comma); CreateCellKind (CellName => Left_Ptr, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Proof_Function_Syntax_Node); Cells.Set_Natural_Value (Heap => VCG_Heap, CellName => Left_Ptr, Value => Natural (STree.NodeToRef (Last_Node))); CreateCellKind (CellName => Right_Ptr, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Proof_Function_Syntax_Node); Cells.Set_Natural_Value (Heap => VCG_Heap, CellName => Right_Ptr, Value => Natural (STree.NodeToRef (Next_Node))); SetLeftArgument (OpCell => Parent_Ptr, Argument => Left_Ptr, VCGHeap => VCG_Heap); SetRightArgument (OpCell => Parent_Ptr, Argument => Right_Ptr, VCGHeap => VCG_Heap); -- Op "," -- /\ -- / \ -- Start_Node \ -- Create new comma cell and swap things around. Right_Ptr := Parent_Ptr; CreateOpCell (Parent_Ptr, VCG_Heap, SP_Symbols.comma); SetRightArgument (OpCell => Parent_Ptr, Argument => Right_Ptr, VCGHeap => VCG_Heap); -- Save the Start_Node which is a syntax node. CreateCellKind (CellName => Left_Ptr, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Proof_Function_Syntax_Node); Cells.Set_Natural_Value (Heap => VCG_Heap, CellName => Left_Ptr, Value => Natural (STree.NodeToRef (Start_Node))); SetLeftArgument (OpCell => Parent_Ptr, Argument => Left_Ptr, VCGHeap => VCG_Heap); -- Op "," -- /\ -- / \ -- Implicit_Var \ -- Create new comma cell and swap things around. Right_Ptr := Parent_Ptr; CreateOpCell (Parent_Ptr, VCG_Heap, SP_Symbols.comma); SetRightArgument (OpCell => Parent_Ptr, Argument => Right_Ptr, VCGHeap => VCG_Heap); -- Save the Current_Unit which is a dictionary -- symbol. CreateReferenceCell (CellName => Left_Ptr, VCGHeap => VCG_Heap, Sym => Implicit_Var); SetLeftArgument (OpCell => Parent_Ptr, Argument => Left_Ptr, VCGHeap => VCG_Heap); -- Op "," -- /\ -- / \ -- In_Called_ \ -- Function \ -- Create new comma cell and swap things around. Right_Ptr := Parent_Ptr; CreateOpCell (Parent_Ptr, VCG_Heap, SP_Symbols.comma); SetRightArgument (OpCell => Parent_Ptr, Argument => Right_Ptr, VCGHeap => VCG_Heap); -- Save the Current_Unit which is a dictionary -- symbol. CreateReferenceCell (CellName => Left_Ptr, VCGHeap => VCG_Heap, Sym => Current_Unit); SetLeftArgument (OpCell => Parent_Ptr, Argument => Left_Ptr, VCGHeap => VCG_Heap); -- Op "," -- /\ -- / \ -- Instantiated_ \ -- Subprogram \ -- Create new comma cell and swap things around. Right_Ptr := Parent_Ptr; CreateOpCell (Parent_Ptr, VCG_Heap, SP_Symbols.comma); SetRightArgument (OpCell => Parent_Ptr, Argument => Right_Ptr, VCGHeap => VCG_Heap); -- Save the Instantiated_Subprogram which is a dictionary -- symbol. CreateReferenceCell (CellName => Left_Ptr, VCGHeap => VCG_Heap, Sym => Instantiated_Subprogram); SetLeftArgument (OpCell => Parent_Ptr, Argument => Left_Ptr, VCGHeap => VCG_Heap); -- Op "," -- /\ -- / \ -- Direction \ -- Create new comma cell and swap things around. Right_Ptr := Parent_Ptr; CreateOpCell (Parent_Ptr, VCG_Heap, SP_Symbols.comma); SetRightArgument (OpCell => Parent_Ptr, Argument => Right_Ptr, VCGHeap => VCG_Heap); -- Save Direction which is a Loop_Direction. Create_Internal_Natural_Cell (Cell_Name => Left_Ptr, VCG_Heap => VCG_Heap, N => Loop_Direction'Pos (Direction)); SetLeftArgument (OpCell => Parent_Ptr, Argument => Left_Ptr, VCGHeap => VCG_Heap); -- Op "," -- / \ -- / \ -- / \ -- Scope -- Create new comma cell and swap things around. Right_Ptr := Parent_Ptr; CreateOpCell (Parent_Ptr, VCG_Heap, SP_Symbols.comma); SetRightArgument (OpCell => Parent_Ptr, Argument => Right_Ptr, VCGHeap => VCG_Heap); -- Save the given scope. Cells.Utility.Create_Scope_Cell (VCG_Heap => VCG_Heap, The_Scope => Scope, The_Cell => Left_Ptr); SetLeftArgument (OpCell => Parent_Ptr, Argument => Left_Ptr, VCGHeap => VCG_Heap); -- The current parent is the argument list. Argument_List := Parent_Ptr; end Create_Saved_Context_DAG; --------------------------------------------------------------------- -- This procedure restores a previously saved context by the above -- procedure, Create_Saved_Context_DAG; -- -- This also disposes of all cells in Argument_List. procedure Load_Saved_Context_DAG (Scope : out Dictionary.Scopes; Direction : out Loop_Direction; Instantiated_Subprogram : out Dictionary.Symbol; Current_Unit : out Dictionary.Symbol; Implicit_Var : out Dictionary.Symbol; Start_Node : out STree.SyntaxNode; Last_Node : out STree.SyntaxNode; Next_Node : out STree.SyntaxNode; VCG_Heap : in out Cells.Heap_Record; Argument_List : in Cells.Cell) --# derives Current_Unit, --# Direction, --# Implicit_Var, --# Instantiated_Subprogram, --# Last_Node, --# Next_Node, --# Scope, --# Start_Node, --# VCG_Heap from Argument_List, --# VCG_Heap; is Left_Ptr, Right_Ptr, Parent_Ptr : Cells.Cell; Tmp : Natural; begin Parent_Ptr := Argument_List; -- Op "," -- / \ -- / \ -- / \ -- Scope Left_Ptr := LeftPtr (VCG_Heap, Parent_Ptr); Right_Ptr := RightPtr (VCG_Heap, Parent_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Parent_Ptr); Scope := Cells.Utility.Scope_Cell_Get_Scope (VCG_Heap, Left_Ptr); Parent_Ptr := Right_Ptr; -- Op "," -- /\ -- / \ -- Direction \ Left_Ptr := LeftPtr (VCG_Heap, Parent_Ptr); Right_Ptr := RightPtr (VCG_Heap, Parent_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Parent_Ptr); Tmp := Cells.Get_Natural_Value (VCG_Heap, Left_Ptr); Direction := Loop_Direction'Val (Tmp); Cells.Dispose_Of_Cell (VCG_Heap, Left_Ptr); Parent_Ptr := Right_Ptr; -- Op "," -- /\ -- / \ -- Instantiated_ \ -- Subprogram \ Left_Ptr := LeftPtr (VCG_Heap, Parent_Ptr); Right_Ptr := RightPtr (VCG_Heap, Parent_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Parent_Ptr); Instantiated_Subprogram := Cells.Get_Symbol_Value (VCG_Heap, Left_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Left_Ptr); Parent_Ptr := Right_Ptr; -- Op "," -- /\ -- / \ -- In_Called_ \ -- Function \ Left_Ptr := LeftPtr (VCG_Heap, Parent_Ptr); Right_Ptr := RightPtr (VCG_Heap, Parent_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Parent_Ptr); Current_Unit := Cells.Get_Symbol_Value (VCG_Heap, Left_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Left_Ptr); Parent_Ptr := Right_Ptr; -- Op "," -- /\ -- / \ -- Implicit_Var \ Left_Ptr := LeftPtr (VCG_Heap, Parent_Ptr); Right_Ptr := RightPtr (VCG_Heap, Parent_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Parent_Ptr); Implicit_Var := Cells.Get_Symbol_Value (VCG_Heap, Left_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Left_Ptr); Parent_Ptr := Right_Ptr; -- Op "," -- /\ -- / \ -- Start_Node \ Left_Ptr := LeftPtr (VCG_Heap, Parent_Ptr); Right_Ptr := RightPtr (VCG_Heap, Parent_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Parent_Ptr); Tmp := Cells.Get_Natural_Value (VCG_Heap, Left_Ptr); Start_Node := STree.RefToNode (ExaminerConstants.RefType (Tmp)); Cells.Dispose_Of_Cell (VCG_Heap, Left_Ptr); Parent_Ptr := Right_Ptr; -- Op "," -- /\ -- / \ -- Last_Node Next_Node Left_Ptr := LeftPtr (VCG_Heap, Parent_Ptr); Right_Ptr := RightPtr (VCG_Heap, Parent_Ptr); Cells.Dispose_Of_Cell (VCG_Heap, Parent_Ptr); Tmp := Cells.Get_Natural_Value (VCG_Heap, Left_Ptr); Last_Node := STree.RefToNode (ExaminerConstants.RefType (Tmp)); Cells.Dispose_Of_Cell (VCG_Heap, Left_Ptr); Tmp := Cells.Get_Natural_Value (VCG_Heap, Right_Ptr); Next_Node := STree.RefToNode (ExaminerConstants.RefType (Tmp)); Cells.Dispose_Of_Cell (VCG_Heap, Right_Ptr); end Load_Saved_Context_DAG; --------------------------------------------------------------------- -- Insert_Guarded_Function_Definition saves the call of the Calling_Function -- of this function (this will be null if no function calls have been encountered) -- I then saves the current context of the parse loop: -- Subrogram_Instantiaton, Direction, Current_Unit, Implicit_Var, -- Start_Node, Last_Node and Next_Node. -- It then sets up the expression_stack, E_Stack -- in preparation for traversing the syntax trees of the pre and return -- annoatations of a function called in a proof context. -- It also establishes the basic function definition on the stack as -- -> = , -- where the "=" operator may be an "<->" operator if the result type of the -- called function is Boolean. -- If the called function has no precondition then the precondition "True" -- is assumed. -- If the called function has no return annotation and is not of a Boolean -- type then the function definition is considered to be -- -> = -- This is needed so that the call of the function is recorded -- and so that an in-type assumption for its return value can be -- conjoined. -- If the called function has no return annotation and the result type -- is Boolean, its definition is omitted and Insert_Guarded_Function_Definition -- leaves E_Stack unchanged. -- If the call of the function is recursive then -- Insert_Guarded_Function_Definition the function definition is not further -- expanded and Insert_Guarded_Function_Definition leaves E_Stack unchanged. -- Insert_Guarded_Function_Definition also records whether the function call -- an istantiated generic subprogram by setting the value of -- Instantiated_Subprogram to refer to the generic subprogram. -- If the called is not an instantiation then Instantiated_Subprogram is -- a null symbol. -- Lastly, if the function call is one to be processed, Insert_Guarded_Function_Definition -- sets the value of the Start_Node, Last_Node and Next_Node to a null -- syntax node guaranteeing that the parse loops are immediately exited ready -- to parse the precondition and return annotation of the called function. -- Assuming that the called function has a pre and return annotation -- Insert_Guarded_Function_Definition places on the top of the E_Stack the -- following entities: -- -- |-----------|<- Default return anno -- |-----------|<- Proof_Function_Syntax_Node -- |-----------|<- Proof_Function_Obtain_Return -- |-----------|<- "=" place holder for return anno -- |-----------|<- Proof_Function_Obtain_Precondition (left -> Function_Call) -- |-----------|<- precond placeholder "->" function definition placeholder -- |-----------|<- argument typecheck (this will be anded to the precondition) -- |-----------|<- Function_Call_In_Proof_Context -- |-----------|<- return assumption (this will be anded to the entire instantiation) -- |-----------|<- Calling_Function the function call that applied this function -- | / | -- | \ | previous values on stack -- | / | -- -- The True Call and Proof_Function_Syntax_Node entities are needed to sychronize the -- stack for the next iteration of the parser. -- Function_Call_In_Proof_Context does not perform the action of popping off -- the entities which it stacks but is done within the outer loop enclosing -- the parser loops. The unstacking is described here for completeness. -- -- After exiting the parser loop the top element of the stack is always -- popped off. This will reveal the Proof_Function_Syntax_Node which indicates the start -- of processing the definition of a called function and the need to enter -- the parse loop afresh with new root syntax node by setting the Direction -- to Down_Loop. -- When the parse loop is exited again, a DAG of the return anno is on the -- top of the stack. This is popped off to reveal Proof_Function_Obtain_Return -- This indicates that the DAG just popped of the stack is the root of the -- return anno DAG and has to become the RHS of the "=" or "<->" operator. -- This operator is popped off and becomes the RHS of the "->" operator -- The Proof_Function_Obtain_Precondition may have be temporarilly popped off -- but it is the TOS when the parse loop is re-entered. -- The Proof_Function_Obtain_Precondition contains a reference to the -- precondition node of the called function and this is extracted to -- set up the values of Start_Node, Last_Node and Next_Node and the -- Direction is set to Down_Loop to enter the parse loop afresh with the -- new root syntax node of the precondition. -- If the called function has no precondition a True Cell is pushed -- on the E_Stack as the assumed precondition. -- When the parse loop exits again the popped of cell is the root of the -- precondition DAG which becomes the LHS of the "->" operator. -- The guarded function definition is now complete and may be popped of the -- E_Stack and pushed on to the function definition stack. -- The top of E_Stack is now Function_Call_In_Proof_Context -- and contains the saved context. It is popped off and the saved context -- restored but further checks have to be performed on the restored context -- to ascertain whether the parsing sould continue within the down or up loop. procedure Insert_Guarded_Function_Definition (Function_Call : in Cells.Cell; Scope : in Dictionary.Scopes; Calling_Scope : in Dictionary.Scopes; Concrete_Function : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Direction : in Loop_Direction; Called_Functions : in out Symbol_Set.T; Instantiated_Subprogram : in out Dictionary.Symbol; Current_Unit : in out Dictionary.Symbol; Implicit_Var : in out Dictionary.Symbol; Start_Node : in out STree.SyntaxNode; Last_Node : in out STree.SyntaxNode; Next_Node : in out STree.SyntaxNode; Calling_Function : in out Cells.Cell; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in STree.Table; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VC_Contains_Reals; --# in out VC_Failure; --# derives Called_Functions, --# Current_Unit, --# Instantiated_Subprogram, --# Last_Node, --# Next_Node, --# Start_Node from *, --# Abstraction, --# Called_Functions, --# Concrete_Function, --# Dictionary.Dict & --# Calling_Function from *, --# Abstraction, --# Called_Functions, --# Concrete_Function, --# Dictionary.Dict, --# E_Stack, --# Function_Call, --# VCG_Heap & --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# VC_Contains_Reals, --# VC_Failure from *, --# Abstraction, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Concrete_Function, --# Dictionary.Dict, --# E_Stack, --# Function_Call, --# LexTokenManager.State, --# VCG_Heap & --# E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Abstraction, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Concrete_Function, --# Current_Unit, --# Dictionary.Dict, --# Direction, --# E_Stack, --# Function_Call, --# Implicit_Var, --# Instantiated_Subprogram, --# Last_Node, --# LexTokenManager.State, --# Next_Node, --# Scope, --# Start_Node, --# STree.Table, --# VCG_Heap & --# Implicit_Var from *, --# Abstraction, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Concrete_Function, --# Dictionary.Dict, --# E_Stack, --# Function_Call, --# LexTokenManager.State, --# STree.Table, --# VCG_Heap; is Precondition_Node : STree.SyntaxNode; Return_Anno_Node : STree.SyntaxNode; Copy_Of_Function_Call : Cells.Cell; Function_Call_Marker : Cells.Cell; Function_Symbol : Cells.Cell; Implicit_Return_Var : Cells.Cell; OP_Cell : Cells.Cell; New_Start_Node : Cells.Cell; Precondition : Cells.Cell; Precondition_Marker : Cells.Cell; Return_Anno : Cells.Cell; Return_Anno_Marker : Cells.Cell; Argument_Check : Cells.Cell; Return_Assumption : Cells.Cell; Return_Type_Is_Boolean : Boolean; Local_Abstraction : Dictionary.Abstractions; Local_Instantiated_Subprogram : Dictionary.Symbol; Saved_Context : Cells.Cell; begin -- Get the details of the called function Return_Type_Is_Boolean := Dictionary.TypeIsBoolean (Dictionary.GetType (Concrete_Function)); -- Determine whether we have a call of an instantiation of a generic if Dictionary.IsInstantiation (Concrete_Function) then Local_Instantiated_Subprogram := Concrete_Function; -- The view must be abstract if is an instantiation -- as the body cannot be visible. Local_Abstraction := Dictionary.IsAbstract; else -- not generic Local_Instantiated_Subprogram := Dictionary.NullSymbol; Local_Abstraction := Abstraction; end if; -- Get the precondition and return anno syntax nodes -- (they may be from the instantiation or the generic declaration -- if the call is of an instantiation of a generic). Precondition_Node := STree.RefToNode (Dictionary.GetPrecondition (Local_Abstraction, Concrete_Function)); Return_Anno_Node := STree.RefToNode (Dictionary.GetPostcondition (Local_Abstraction, Concrete_Function)); -- The function call is only processed if the function is not -- already being processed and it has a return anno. if not Symbol_Set.Contains (The_Set => Called_Functions, Sym => Concrete_Function) and then Return_Anno_Node /= STree.NullNode then Symbol_Set.Add (The_Set => Called_Functions, Sym => Concrete_Function); --------------------------------------------------------------------- -- Save the calling function call on the stack CStacks.Push (Heap => VCG_Heap, CellName => Calling_Function, S => E_Stack); -- Make a copy of the called function and substitute the parameters -- if it is a nested call Structures.CopyStructure (Heap => VCG_Heap, Root => Function_Call, RootCopy => Copy_Of_Function_Call); if not Cells.Is_Null_Cell (Calling_Function) then Substitutions.Substitute_Parameters (Called_Function => Calling_Function, Constraint => Copy_Of_Function_Call, VCG_Heap => VCG_Heap); end if; -- The called function (with any parameter substituitons) becomes -- the calling function for any further nested calls Calling_Function := Copy_Of_Function_Call; --------------------------------------------------------------------- -- Push the function's argument check and return assumption -- onto the stack first. Get_Function_Type_Constraints (Function_Call => Copy_Of_Function_Call, Calling_Scope => Calling_Scope, Argument_Check => Argument_Check, Return_Assumption => Return_Assumption, VCG_Heap => VCG_Heap); CStacks.Push (VCG_Heap, Return_Assumption, E_Stack); --------------------------------------------------------------------- -- Create a function call marker and push it on the expression stack CreateCellKind (CellName => Function_Call_Marker, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Function_Call_In_Proof_Context); CStacks.Push (Heap => VCG_Heap, CellName => Function_Call_Marker, S => E_Stack); -- TOS is a function call marker -- The right argument of a function call marker is the saved context Create_Saved_Context_DAG (Scope => Scope, Direction => Direction, Instantiated_Subprogram => Instantiated_Subprogram, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Start_Node => Start_Node, Last_Node => Last_Node, Next_Node => Next_Node, VCG_Heap => VCG_Heap, Argument_List => Saved_Context); SetRightArgument (OpCell => CStacks.Top (VCG_Heap, E_Stack), Argument => Saved_Context, VCGHeap => VCG_Heap); -- The left argument of a function call marker is the concrete function symbol CreateCellKind (CellName => Function_Symbol, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Declared_Function); Cells.Set_Symbol_Value (Heap => VCG_Heap, CellName => Function_Symbol, Sym => Concrete_Function); SetLeftArgument (OpCell => CStacks.Top (VCG_Heap, E_Stack), Argument => Function_Symbol, VCGHeap => VCG_Heap); -- Now we push the argument check. CStacks.Push (VCG_Heap, Argument_Check, E_Stack); ---------------------------------------------------------------------- -- Create a -> operator and push it on the stack CreateOpCell (OP_Cell, VCG_Heap, SP_Symbols.implies); CStacks.Push (Heap => VCG_Heap, CellName => OP_Cell, S => E_Stack); -- TOS is "->" operator ---------------------------------------------------------------------- -- Create a precondition marker and push it on the stack CreateCellKind (CellName => Precondition_Marker, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Proof_Function_Obtain_Precondition); -- We remember the function call as otherwise its a bit -- painful to get to it in the main loop below. SetLeftArgument (Precondition_Marker, Copy_Of_Function_Call, VCG_Heap); CStacks.Push (Heap => VCG_Heap, CellName => Precondition_Marker, S => E_Stack); -- TOS is precondition marker -- Set the RHS of the precondition marker to a cell containing -- a reference to the syntax node for the precondition (which will be -- null if the called function does not have one). CreateCellKind (CellName => Precondition, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Proof_Function_Syntax_Node); Cells.Set_Natural_Value (Heap => VCG_Heap, CellName => Precondition, Value => Natural (STree.NodeToRef (Precondition_Node))); SetRightArgument (OpCell => CStacks.Top (VCG_Heap, E_Stack), Argument => Precondition, VCGHeap => VCG_Heap); ---------------------------------------------------------------------- -- Create a "=" or "<->" operator depending on the function type -- and push it on the stack if Return_Type_Is_Boolean then CreateOpCell (OP_Cell, VCG_Heap, SP_Symbols.is_equivalent_to); else CreateOpCell (OP_Cell, VCG_Heap, SP_Symbols.equals); end if; CStacks.Push (Heap => VCG_Heap, CellName => OP_Cell, S => E_Stack); -- TOS is Op "=" or "<->" -- LHS of the operator to the actual function call SetLeftArgument (OpCell => CStacks.Top (VCG_Heap, E_Stack), Argument => Copy_Of_Function_Call, VCGHeap => VCG_Heap); ---------------------------------------------------------------------- -- Create a return anno marker and push it on the stack CreateCellKind (CellName => Return_Anno_Marker, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Proof_Function_Obtain_Return); CStacks.Push (Heap => VCG_Heap, CellName => Return_Anno_Marker, S => E_Stack); -- TOS is return anno marker -- Determine the called function has a return anno and if it has, -- the sort, explicit or implicit. if Return_Anno_Node = STree.NullNode or else STree.Syntax_Node_Type (Return_Anno_Node) = SP_Symbols.annotation_expression then -- The called function has no return anno or it is an explicit -- return anno. In either case it has no implicit variable. Implicit_Var := Dictionary.NullSymbol; else -- It is an implicit return annotation - get the implicit -- variable. Implicit_Var := Dictionary.GetImplicitReturnVariable (Local_Abstraction, Concrete_Function); -- Set the Return_Anno_Node to the start of the start of the -- return expression (involving the implicit variable). Return_Anno_Node := STree.Next_Sibling (Current_Node => Return_Anno_Node); end if; -- Set the RHS of the return anno marker to a cell containing -- a reference to the root syntax tree node for the return anno CreateCellKind (CellName => Return_Anno, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Proof_Function_Syntax_Node); Cells.Set_Natural_Value (Heap => VCG_Heap, CellName => Return_Anno, Value => Natural (STree.NodeToRef (Return_Anno_Node))); SetRightArgument (OpCell => CStacks.Top (VCG_Heap, E_Stack), Argument => Return_Anno, VCGHeap => VCG_Heap); -- Set the LHS of the return anno marker to a cell containing -- a reference to the implicit variable CreateReferenceCell (CellName => Implicit_Return_Var, VCGHeap => VCG_Heap, Sym => Implicit_Var); SetLeftArgument (OpCell => CStacks.Top (VCG_Heap, E_Stack), Argument => Implicit_Return_Var, VCGHeap => VCG_Heap); -- Set up the stack to start processing return anno CreateCellKind (CellName => New_Start_Node, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Proof_Function_Syntax_Node); CStacks.Push (Heap => VCG_Heap, CellName => New_Start_Node, S => E_Stack); -- Put a default return anno on the stack -- for when no return anno is given CStacks.Push (Heap => VCG_Heap, CellName => Function_Call, S => E_Stack); -- TOS is => Proof_Function_Syntax_Node => return anno marker -- Record whether the call is of an instantiation of a generic function -- This does not change whether the pre or return anno is being processed. Instantiated_Subprogram := Local_Instantiated_Subprogram; -- Determine the symbol from which the scope for parsing the pre and -- explicit return annotation is obtained. Current_Unit := Concrete_Function; -- Ensure that the parse loops are immediately exited by setting -- context variables to a null node Start_Node := STree.NullNode; Last_Node := STree.NullNode; Next_Node := STree.NullNode; end if; end Insert_Guarded_Function_Definition; --------------------------------------------------------------------- -- Given a concrete or implicit function symbol and a scope -- Get_Concrete_And_FDL_Functions_And_Abstraction -- obtains the concrete function symbol, the symbol of the FDL version of -- the function used in the VCs and the level of abstraction for -- the call of the function from the given scope. procedure Get_Concrete_Function_And_Abstraction (Function_Symbol : in Dictionary.Symbol; Force_Abstract : in Boolean; Scope : in Dictionary.Scopes; Concrete_Function : out Dictionary.Symbol; FDL_Function : out Dictionary.Symbol; Level_Of_Abstraction : out Dictionary.Abstractions) --# global in Dictionary.Dict; --# derives Concrete_Function from Dictionary.Dict, --# Function_Symbol & --# FDL_Function, --# Level_Of_Abstraction from Dictionary.Dict, --# Force_Abstract, --# Function_Symbol, --# Scope; is begin -- Debug.PrintMsg ("Get_Concrete_Function_And_Abstraction", True); -- Debug.Print_Function_Sym (" Function_Symbol: ", Function_Symbol); -- Debug.PrintBool (" Force_Abstract: ", Force_Abstract); -- Debug.PrintScope (" Scope: ", Scope); -- The function symbol may refer to a concrete function (Ada or -- proof) or an implicit view of an Ada function; we need to be -- sure which one we have. if Dictionary.IsImplicitProofFunction (Function_Symbol) then -- GetAdaFuntion gets the concrete view of the Ada function Concrete_Function := Dictionary.GetAdaFunction (Function_Symbol); else Concrete_Function := Function_Symbol; end if; -- The function call may have either an abstract or refined -- signature depending on where it is called. The refinement -- may be due to data refinement of an own variable or, a -- private data type refinement in which case only the pre and -- return annotations are refined. Only the level of -- abstraction relating to the pre and return annotations is -- required in a proof context (an annotation expression). if Force_Abstract then Level_Of_Abstraction := Dictionary.IsAbstract; else Level_Of_Abstraction := Dictionary.GetConstraintAbstraction (Concrete_Function, Scope); end if; -- Finally we get the function symbol to use in FDL. if Dictionary.IsProofFunction (Function_Symbol) and not Dictionary.IsImplicitProofFunction (Function_Symbol) then FDL_Function := Function_Symbol; else FDL_Function := Dictionary.GetImplicitProofFunction (Level_Of_Abstraction, Concrete_Function); -- We may not have a refined function, in which case we -- should fall back to the abstract one. if Level_Of_Abstraction = Dictionary.IsRefined and Dictionary.Is_Null_Symbol (FDL_Function) then FDL_Function := Dictionary.GetImplicitProofFunction (Dictionary.IsAbstract, Concrete_Function); end if; end if; -- Debug.Print_Function_Sym (" Concrete FN: ", Concrete_Function); -- Debug.Print_Function_Sym (" FDL FN: ", FDL_Function); -- case Level_Of_Abstraction is -- when Dictionary.IsAbstract => -- Debug.PrintMsg (" Abstraction: isAbstract", True); -- when Dictionary.IsRefined => -- Debug.PrintMsg (" Abstraction: isRefined", True); -- end case; end Get_Concrete_Function_And_Abstraction; --------------------------------------------------------------------- -- Setup_Function_Call is called during the DAG.Build_Annotation_Expression -- "down-loop" when a function call is encountered in an annotation -- expression and establishes a data structure to represent the actual -- parameters of the function call. -- The actual parameters are entered during the "up-loop" by the procedure -- by Process_Positional_Argument_Association -- or Process_Named_Argument_Association and the processing of the -- function call during the "up-loop" by Process_Name_Argument_List. -- Set_UpFunction_Call also determines the level of abstraction from the -- given scope and sets the DAG symbol value for the function to the correct -- implicit function. -- If it is a parameterless function (a function in an annotation expression -- does not have globals) there are no actual parameters to be processed -- during the up-loop and so the function call has to be completed by -- this subprogram: the DAG symbol kind is changed to a proof function, -- whether it is an Ada or a proof function, indicating the processing of -- the function call is complete and calling -- Insert_Guarded_Function_Definition to initiate processing of the -- pre and return annotations of the function. -- If the function has parameters: the completion of the function call -- is performed by the procedure Process_Name_Argument_List during the -- DAG.Build_Annotation_Expression "up-loop". procedure Setup_Function_Call (Direction : in Loop_Direction; Scope : in Dictionary.Scopes; Calling_Scope : in Dictionary.Scopes; Force_Abstract : in Boolean; Current_Unit : in out Dictionary.Symbol; Implicit_Var : in out Dictionary.Symbol; Current_Instantiation : in out Dictionary.Symbol; Start_Node : in out STree.SyntaxNode; Next_Node : in out STree.SyntaxNode; Last_Node : in out STree.SyntaxNode; Calling_Function : in out Cells.Cell; Called_Functions : in out Symbol_Set.T; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Generate_Function_Instantiations; --# in STree.Table; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VC_Contains_Reals; --# in out VC_Failure; --# derives Called_Functions, --# Calling_Function, --# Current_Instantiation, --# Current_Unit, --# Last_Node, --# Next_Node, --# Start_Node from *, --# Called_Functions, --# Calling_Scope, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# VCG_Heap & --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# VC_Contains_Reals, --# VC_Failure from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# LexTokenManager.State, --# VCG_Heap & --# E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Current_Instantiation, --# Current_Unit, --# Dictionary.Dict, --# Direction, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# Implicit_Var, --# Last_Node, --# LexTokenManager.State, --# Next_Node, --# Scope, --# Start_Node, --# STree.Table, --# VCG_Heap & --# Implicit_Var from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# LexTokenManager.State, --# STree.Table, --# VCG_Heap; is Number_Of_Parameters : Natural; Function_Sym : Dictionary.Symbol; Level_Of_Abstraction : Dictionary.Abstractions; Concrete_Function : Dictionary.Symbol; FDL_Function : Dictionary.Symbol; Actual_Function_Call : Cells.Cell; begin -- Get the function symbol from the DAG Function_Sym := Cells.Get_Symbol_Value (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); -- Ada functions have a concrete and and one or two -- implicit views. An implicit view may be either abstract or refined -- but it is always an implicit view which appears in VCs. -- Both the concrete and implicit views of the function are -- required for processing the function call because the implicit -- view is used for the FDL and the concrete view contains the -- pre and return annoatations. -- A proof function only has a single concrete view. -- Ensure we have the concrete and FDL function symbols and the correct -- level of abstraction. -- An embedded function call is not forced to be abstract. Get_Concrete_Function_And_Abstraction (Function_Symbol => Function_Sym, Force_Abstract => Force_Abstract, Scope => Calling_Scope, Concrete_Function => Concrete_Function, FDL_Function => FDL_Function, Level_Of_Abstraction => Level_Of_Abstraction); -- Ensure that the function symbol in the DAG has the correct implicit -- view. Cells.Set_Symbol_Value (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, E_Stack), Sym => FDL_Function); Number_Of_Parameters := Dictionary.GetNumberOfSubprogramParameters (FDL_Function); -- Establish the data structure to take the actual parameters CreateEmptyList (Number_Of_Parameters, VCG_Heap, E_Stack); -- If the function is parameterless then the function model has -- to be completed here on the down-loop because -- Process_Name_Argument_List will not be called on the up-loop to -- complete the function model. The function call model is -- completed setting the Cell.Kind as a proof function -- and calling Insert_Gaurded_Function_Definition for subsequent -- building of the graphs for the pre and return annotations of the function. if Number_Of_Parameters = 0 then Cells.Set_Kind (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, E_Stack), KindConst => Cell_Storage.Proof_Function); if Generate_Function_Instantiations then Actual_Function_Call := CStacks.Top (VCG_Heap, E_Stack); Insert_Guarded_Function_Definition (Function_Call => Actual_Function_Call, Scope => Scope, Calling_Scope => Calling_Scope, Concrete_Function => Concrete_Function, Abstraction => Level_Of_Abstraction, Direction => Direction, Called_Functions => Called_Functions, Instantiated_Subprogram => Current_Instantiation, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Start_Node => Start_Node, Last_Node => Last_Node, Next_Node => Next_Node, E_Stack => E_Stack, Calling_Function => Calling_Function, VCG_Heap => VCG_Heap); end if; end if; end Setup_Function_Call; ----------------------------------------------------------------------- procedure Setup_Array_Access (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# VCG_Heap; is Number_Of_Dimensions : Positive; DAG_Cell : Cells.Cell; begin Number_Of_Dimensions := Dictionary.GetNumberOfDimensions (GetTOStype (VCG_Heap, E_Stack)); CreateCellKind (DAG_Cell, VCG_Heap, Cell_Storage.List_Function); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); CreateEmptyList (Number_Of_Dimensions, VCG_Heap, E_Stack); end Setup_Array_Access; ----------------------------------------------------------------------- procedure Process_Positional_Argument_Association (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Expression_Cell : Cells.Cell; Conversion_Target_Type, Conversion_Source_Type : Dictionary.Symbol; begin CStacks.PopOff (VCG_Heap, E_Stack, Expression_Cell); case Cells.Get_Kind (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)) is when Cell_Storage.Pending_Function => -- We may need to convert the actual parameter by inserting some inherit -- derefences in front of it; conversion is required if we have called -- an inherited root function. The parameter in this case must be an -- object. ConvertTaggedActualIfNecessary (Cells.Get_Symbol_Value (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)), VCG_Heap, Expression_Cell); -- function sym InsertParameterInNextFreeSlot (CStacks.Top (VCG_Heap, E_Stack), Expression_Cell, VCG_Heap); when Cell_Storage.List_Function => InsertParameterInNextFreeSlot (CStacks.Top (VCG_Heap, E_Stack), Expression_Cell, VCG_Heap); when Cell_Storage.Fixed_Var => Conversion_Source_Type := STree.NodeSymbol (Node); Conversion_Target_Type := Cells.Get_Symbol_Value (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); -- assume integer numeric conversion for now CStacks.Pop (VCG_Heap, E_Stack); -- get rid of type mark CStacks.Push (VCG_Heap, Expression_Cell, E_Stack); -- restore expression -- insert trunc function if needed if Dictionary.TypeIsReal (Conversion_Source_Type) and then (Dictionary.TypeIsInteger (Conversion_Target_Type) or else IsModularType (Conversion_Target_Type)) then PushFunction (Cell_Storage.Trunc_Function, VCG_Heap, E_Stack); end if; when others => -- must be dealing with first indexed expression of array access Setup_Array_Access (E_Stack => E_Stack, VCG_Heap => VCG_Heap); InsertParameterInNextFreeSlot (CStacks.Top (VCG_Heap, E_Stack), Expression_Cell, VCG_Heap); end case; end Process_Positional_Argument_Association; ----------------------------------------------------------------------- procedure Process_Named_Argument_Association (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack from *, --# VCG_Heap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# E_Stack, --# VCG_Heap & --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# STree.Table; is Insert_Point, Expression_Cell : Cells.Cell; Function_Sym : Dictionary.Symbol; Last_One : Boolean; ---------------------------------------------------------------- function Find_Identifier (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; is Ident_Node : STree.SyntaxNode; begin if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Node)) = SP_Symbols.annotation_simple_name then Ident_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)); else Ident_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))); end if; return Ident_Node; end Find_Identifier; -------------------------------------------------------------- -- This function has an implicit precondition, that the subprogram -- does have parameters and that the name passed identifies one of them -- this will be True because when VCs are generated, we know that the code -- is well-formed. Therefore the flow error can be ignored. function Get_Param_Number (Name : in LexTokenManager.Lex_String; Function_Sym : in Dictionary.Symbol) return Positive --# global in Dictionary.Dict; --# in LexTokenManager.State; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; Number : Positive; begin It := Dictionary.FirstSubprogramParameter (Function_Sym); Number := 1; SystemErrors.RT_Assert (C => not Dictionary.IsNullIterator (It), Sys_Err => SystemErrors.Precondition_Failure, Msg => "Can't find first subprogram parameter in Build_Annotation_Expression.Get_Param_Number"); loop Sym := Dictionary.CurrentSymbol (It); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Sym), Lex_Str2 => Name) = LexTokenManager.Str_Eq; It := Dictionary.NextSymbol (It); Number := Number + 1; end loop; return Number; end Get_Param_Number; begin -- Process_Named_Argument_Association -- we must be dealing with a function call CStacks.PopOff (VCG_Heap, E_Stack, Expression_Cell); Function_Sym := Cells.Get_Symbol_Value (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); -- We may need to convert the actual parameter by inserting some inherit -- derefences in front of it; conversion is required if we have called -- an inherited root function. The parameter in this case must be an -- object. ConvertTaggedActualIfNecessary (Function_Sym, VCG_Heap, Expression_Cell); CalculateInsertPoint (VCG_Heap, E_Stack, Get_Param_Number (Name => STree.Node_Lex_String (Node => Find_Identifier (Node => Node)), Function_Sym => Function_Sym), -- to get Insert_Point, Last_One); if Last_One then SetRightArgument (Insert_Point, Expression_Cell, VCG_Heap); else SetLeftArgument (Insert_Point, Expression_Cell, VCG_Heap); end if; end Process_Named_Argument_Association; ----------------------------------------------------------------------- -- This procedure is called during the "up-loop" of -- DAG.Build_Annotation_Expression once all of the arguments -- (with positional or named association) of a function call in an annotation -- have been processed by Process_Positional_ArgumentAssocaition or -- Process_Named_Argument_Association respectively. -- A function is identified by a Cell Kind of Pending_Function on the -- Expression stack and an array aggregate by a Cell Kind of List_Function. -- It completes the model of a function call or an array access started -- on the down loop by Setup_Function_Call or Setup_Array_Access. -- A function call model is completed by: -- changing the DAG symbol kind to a proof function whether it is an Ada -- or a proof function and calling Insert_Guarded_Function_Definition -- to initiate the processing of the pre and return annotations of the -- function. -- An array access model is completed by: -- translating the array access to an FDL element function -- associating the index type with the array access for potential use with -- translating unconstrained array attributes. procedure Process_Name_Argument_List (Direction : in Loop_Direction; Scope : in Dictionary.Scopes; Calling_Scope : in Dictionary.Scopes; Force_Abstract : in Boolean; Calling_Function : in out Cells.Cell; Current_Unit : in out Dictionary.Symbol; Implicit_Var : in out Dictionary.Symbol; Current_Instantiation : in out Dictionary.Symbol; Start_Node : in out STree.SyntaxNode; Next_Node : in out STree.SyntaxNode; Last_Node : in out STree.SyntaxNode; Called_Functions : in out Symbol_Set.T; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Generate_Function_Instantiations; --# in STree.Table; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VC_Contains_Reals; --# in out VC_Failure; --# derives Called_Functions, --# Calling_Function, --# Current_Instantiation, --# Current_Unit, --# Last_Node, --# Next_Node, --# Start_Node from *, --# Called_Functions, --# Calling_Scope, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# VCG_Heap & --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# VC_Contains_Reals, --# VC_Failure from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# LexTokenManager.State, --# VCG_Heap & --# E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Current_Instantiation, --# Current_Unit, --# Dictionary.Dict, --# Direction, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# Implicit_Var, --# Last_Node, --# LexTokenManager.State, --# Next_Node, --# Scope, --# Start_Node, --# STree.Table, --# VCG_Heap & --# Implicit_Var from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# LexTokenManager.State, --# STree.Table, --# VCG_Heap; --# is Temp : Cells.Cell; Type_Sym : Dictionary.Symbol; Concrete_Function : Dictionary.Symbol; FDL_Function : Dictionary.Symbol; Level_Of_Abstraction : Dictionary.Abstractions; begin case Cells.Get_Kind (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)) is when Cell_Storage.Pending_Function => -- Set_Up_Function_Call has ensured that the DAG function -- Symbol has the correct implicit view of the function Cells.Set_Kind (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, E_Stack), KindConst => Cell_Storage.Proof_Function); -- We need the concrete function symbol and the -- correct level of abstraction. -- An embedded function call is not forced to be abstract. Get_Concrete_Function_And_Abstraction (Function_Symbol => Cells.Get_Symbol_Value (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, E_Stack)), Force_Abstract => Force_Abstract, Scope => Calling_Scope, Concrete_Function => Concrete_Function, FDL_Function => FDL_Function, Level_Of_Abstraction => Level_Of_Abstraction); -- Note: If a called function has an unconstrained array -- as a parameter then it is not (easily) possible to -- work out what kind of type an argument should be in; -- thus we do not instantiate it (for now). if Generate_Function_Instantiations and not Function_Has_Unconstrained_Parameter (FDL_Function) then -- Ensure that the function symbol in the DAG has the -- correct implicit view. Cells.Set_Symbol_Value (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, E_Stack), Sym => FDL_Function); -- Call Insert_Gaurded_Function_Definition for -- subsequent building of the graphs for the pre and -- return annotations of the function. Insert_Guarded_Function_Definition (Function_Call => CStacks.Top (VCG_Heap, E_Stack), Scope => Scope, Calling_Scope => Calling_Scope, Concrete_Function => Concrete_Function, Abstraction => Level_Of_Abstraction, Direction => Direction, Called_Functions => Called_Functions, Instantiated_Subprogram => Current_Instantiation, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Start_Node => Start_Node, Last_Node => Last_Node, Next_Node => Next_Node, Calling_Function => Calling_Function, E_Stack => E_Stack, VCG_Heap => VCG_Heap); end if; when Cell_Storage.List_Function => -- complete element model and store type so far in case of further -- indexing (to handle array of arrays or array of records case) CStacks.PopOff (VCG_Heap, E_Stack, Temp); Type_Sym := Dictionary.GetArrayComponent (GetTOStype (VCG_Heap, E_Stack)); CStacks.Push (VCG_Heap, Temp, E_Stack); PushOperator (Binary, SP_Symbols.comma, VCG_Heap, E_Stack); PushFunction (Cell_Storage.Element_Function, VCG_Heap, E_Stack); Cells.Set_Symbol_Value (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack), Type_Sym); when others => null; end case; end Process_Name_Argument_List; ----------------------------------------------------------------------- -- Handling Update Syntax in Annotations ----------------------------------------------------------------------- procedure Down_Process_Store (L_Scope : in Dictionary.Scopes; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# L_Scope, --# VCG_Heap; is Type_Sym : Dictionary.Symbol; begin Type_Sym := GetTOStype (VCG_Heap, E_Stack); -- Handle array and record updates differently, arrays need stuff for store-lists if Dictionary.IsArrayTypeMark (Type_Sym, L_Scope) then Setup_Array_Access (E_Stack => E_Stack, VCG_Heap => VCG_Heap); -- this leaves us with update subject on 2nd TOS and empty list on TOS end if; -- no action required for record end Down_Process_Store; ------------------------------------------------------------------------ procedure Down_Process_Store_List (Node : in STree.SyntaxNode; E_Stack : in CStacks.Stack; VCG_Heap : in Cells.Heap_Record; Next_Node : out STree.SyntaxNode) --# global in STree.Table; --# derives Next_Node from E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is begin if Cells.Get_Kind (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)) = Cell_Storage.List_Function then -- we are doing an array Next_Node := STree.Child_Node (Current_Node => Node); else -- must be record so prune walk here Next_Node := STree.NullNode; end if; end Down_Process_Store_List; ----------------------------------------------------------------------- procedure Up_Process_Store (Node : in STree.SyntaxNode; L_Scope : in Dictionary.Scopes; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# L_Scope, --# Node, --# STree.Table, --# VCG_Heap; is Temp, Up_Cell, Comma_Cell : Cells.Cell; Original_Object_Being_Updated : Cells.Cell; Local_Copy_Of_Object_Being_Updated : Cells.Cell; Object_Being_Updated : Cells.Cell; Type_Sym, Field_Sym, Field_Sym_For_Inherit_Deref_Loop : Dictionary.Symbol; Field_Name : LexTokenManager.Lex_String; begin -- for an array update we have exp, list, updated_obj on stack -- for a record we have exp, updated_obj on stack CStacks.PopOff (VCG_Heap, E_Stack, Temp); --this is assigned expression if Cells.Get_Kind (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)) = Cell_Storage.List_Function then -- we are doing an array CStacks.Push (VCG_Heap, Temp, E_Stack); PushOperator (Binary, SP_Symbols.comma, VCG_Heap, E_Stack); -- now obtain type of whole composite object and store in update -- function cell so that updates of updates will work CStacks.PopOff (VCG_Heap, E_Stack, Temp); -- remove to get access to object Type_Sym := GetTOStype (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, Temp, E_Stack); PushOperator (Binary, SP_Symbols.comma, VCG_Heap, E_Stack); PushFunction (Cell_Storage.Update_Function, VCG_Heap, E_Stack); Cells.Set_Symbol_Value (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack), Type_Sym); else -- we are doing a record ----------------------------------------------------- -- Get the root type here in case the updated object is of a record -- subtype. Type_Sym := Dictionary.GetRootType (GetTOStype (VCG_Heap, E_Stack)); CStacks.PopOff (VCG_Heap, E_Stack, Object_Being_Updated); Original_Object_Being_Updated := Object_Being_Updated; -- because Object_Being_Updated changes later and we need a copy Field_Name := STree.Node_Lex_String (Node => STree.Last_Child_Of (Start_Node => Node)); Field_Sym := Dictionary.LookupSelectedItem (Type_Sym, Field_Name, L_Scope, Dictionary.ProofContext); SystemErrors.RT_Assert (C => not Dictionary.Is_Null_Symbol (Field_Sym), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "DAG.Build_Annotation_Expression.Up_Process_Store : Program Error"); -- The field we are updating may be inherited from an earlier tagged types. -- So insert as many fld_inherit()s in front as needed ModelInheritedFieldsOfTaggedRecord (Field_Name, Type_Sym, VCG_Heap, Object_Being_Updated); -- assemble upf_field (Object_Being_Updated, OriginalExpression) CStacks.Push (VCG_Heap, Object_Being_Updated, E_Stack); CStacks.Push (VCG_Heap, Temp, E_Stack); PushOperator (Binary, SP_Symbols.comma, VCG_Heap, E_Stack); CreateUpfCell (Up_Cell, VCG_Heap, Field_Sym, Dictionary.GetSimpleName (Field_Sym)); SetRightArgument (Up_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, Up_Cell, E_Stack); -- TOS now has an upf_field function that represents the most direct update of the field -- for example O[F=>exp] with no inheritance gives: upf_f (o, exp); -- if F is inherited one level we get: upf_f (fld_inherit (o), exp) -- and two levels gives: upf_f (fld_inherit (fld_inherit (o), exp)) -- -- We now need to prefix this expression with some upf_ functions: -- First case required no prefix. -- Second case wants: "upf_inherit (o, " -- Third wants: "upf_inherit (o, upf_inherit (fld_inherit (o), " etc. -- The number of prefixes required depends on ther inheritance depth at this point. -- Inner loop puts on the fld_inherits needed. Loop not entered in no inheritance. -- After the inner loop we put on the upf_inherit function needed. -- We loop backwards so we can use I to tell us how many inherit derefs we need in an -- embedded loop. for I in reverse Integer range 1 .. Dictionary.GetInheritDepth (Field_Name, Type_Sym) loop -- Make copy of Object_Being_Updated because cell it is in gets changed each time we add inherit de-refs Local_Copy_Of_Object_Being_Updated := Original_Object_Being_Updated; CreateOpCell (Comma_Cell, VCG_Heap, SP_Symbols.comma); SetRightArgument (Comma_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); -- Insert n-1 inherit dereferences in front of Local_Copy_Of_Object_Being_Updated Field_Sym_For_Inherit_Deref_Loop := Type_Sym; for J in Integer range 1 .. (I - 1) loop Field_Sym_For_Inherit_Deref_Loop := Dictionary.GetType (Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (Field_Sym_For_Inherit_Deref_Loop))); -- Local_Copy_Of_Object_Being_Updated gets changed by following call InsertInheritDeReference (Field_Sym_For_Inherit_Deref_Loop, VCG_Heap, Local_Copy_Of_Object_Being_Updated); end loop; SetLeftArgument (Comma_Cell, Local_Copy_Of_Object_Being_Updated, VCG_Heap); -- Now put upf_inherit on front CreateUpfCell (Up_Cell, VCG_Heap, Dictionary.LookupSelectedItem (Field_Sym_For_Inherit_Deref_Loop, LexTokenManager.Inherit_Token, L_Scope, Dictionary.ProofContext), LexTokenManager.Inherit_Token); SetRightArgument (Up_Cell, Comma_Cell, VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); -- old expression CStacks.Push (VCG_Heap, Up_Cell, E_Stack); -- expression with one level of prefix end loop; end if; end Up_Process_Store; ----------------------------------------------------------------------- procedure Up_Process_Store_List (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# derives E_Stack, --# VCG_Heap from E_Stack, --# VCG_Heap; is Expression_Cell : Cells.Cell; begin --will only be called if array being processed, earlier pruning --will stop us getting here for records CStacks.PopOff (VCG_Heap, E_Stack, Expression_Cell); InsertParameterInNextFreeSlot (CStacks.Top (VCG_Heap, E_Stack), Expression_Cell, VCG_Heap); end Up_Process_Store_List; ----------------------------------------------------------------------- -- only do this if down, right node is expression. procedure Model_Qualified_Expression (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Expression_Cell : Cells.Cell; begin if STree.Syntax_Node_Type (Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))) = SP_Symbols.annotation_expression then -- discard type indication and return its argument to top of stack; CStacks.PopOff (VCG_Heap, E_Stack, Expression_Cell); -- the topmost stack cell contains the typemark; CStacks.Pop (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, Expression_Cell, E_Stack); end if; end Model_Qualified_Expression; ---------------------------------------------------------------------- procedure Up_Process_Aggregate_Choice (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Range_Node_Type : SP_Symbols.SP_Symbol; Range_Expression : Cells.Cell; begin Range_Node_Type := STree.Syntax_Node_Type (Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))); if Range_Node_Type = SP_Symbols.annotation_simple_expression then PushOperator (Binary, SP_Symbols.double_dot, VCG_Heap, E_Stack); elsif Range_Node_Type = SP_Symbols.annotation_range_constraint then TransformRangeConstraint (VCG_Heap, E_Stack); CStacks.PopOff (VCG_Heap, E_Stack, Range_Expression); CStacks.Pop (VCG_Heap, E_Stack); -- discard type mark part of range CStacks.Push (VCG_Heap, Range_Expression, E_Stack); elsif Cells.Get_Kind (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)) = Cell_Storage.Fixed_Var then -- type mark found TransformTypeName (VCG_Heap, E_Stack); end if; end Up_Process_Aggregate_Choice; --------------------------------------------------------------------- procedure Up_Process_Named_Association_Rep (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Agg_Exp : Cells.Cell; begin PushOperator (Binary, SP_Symbols.becomes, VCG_Heap, E_Stack); if DoingArrayAggregate (VCG_Heap, E_Stack) then if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Node)) = SP_Symbols.annotation_named_association_rep then PushOperator (Binary, SP_Symbols.comma, VCG_Heap, E_Stack); end if; else -- record CStacks.PopOff (VCG_Heap, E_Stack, Agg_Exp); InsertAssociation (CStacks.Top (VCG_Heap, E_Stack), Agg_Exp, VCG_Heap); end if; end Up_Process_Named_Association_Rep; --------------------------------------------------------------------- procedure Up_Process_Named_Record_Component_Association (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage from *, --# E_Stack, --# VCG_Heap & --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack; is Agg_Exp : Cells.Cell; begin -- Node is named_record_component_association -- Direction is UP -- TOS is expression to be associated -- 2nd TOS is field name -- 3rd TOS is incomplete aggregate being constructed. -- associated field name with expression PushOperator (Binary, SP_Symbols.becomes, VCG_Heap, E_Stack); CStacks.PopOff (VCG_Heap, E_Stack, Agg_Exp); InsertAssociation (CStacks.Top (VCG_Heap, E_Stack), Agg_Exp, VCG_Heap); end Up_Process_Named_Record_Component_Association; --------------------------------------------------------------------- procedure Up_Process_Positional_Record_Component_Association (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# VCG_Heap; is Agg_Exp, Type_Cell : Cells.Cell; begin -- Node is positional_record_component_association -- Direction is UP -- TOS is expression to be associated -- 2nd TOS is incomplete aggregate being constructed. -- 3rd TOS is agggregate counter giving current field number CreateFixedVarCell (Type_Cell, VCG_Heap, Dictionary.GetRecordComponent (AggregateType (VCG_Heap, E_Stack), CurrentFieldOrIndex (VCG_Heap, E_Stack))); CStacks.Push (VCG_Heap, Type_Cell, E_Stack); SwitchAndPush (SP_Symbols.becomes, VCG_Heap, E_Stack); IncCurrentFieldOrIndex (E_Stack, VCG_Heap); CStacks.PopOff (VCG_Heap, E_Stack, Agg_Exp); InsertAssociation (CStacks.Top (VCG_Heap, E_Stack), Agg_Exp, VCG_Heap); end Up_Process_Positional_Record_Component_Association; --------------------------------------------------------------------- procedure Up_Process_Aggregate_Or_Expression (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# derives E_Stack, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# STree.Table, --# VCG_Heap; is Index_Type : Dictionary.Symbol; Counter_Cell, Attrib_Cell, Type_Cell : Cells.Cell; Counter_String : LexTokenManager.Lex_String; Agg_Exp : Cells.Cell; begin if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.annotation_positional_association_rep or else STree.Next_Sibling (Current_Node => Node) /= STree.NullNode then if DoingArrayAggregate (VCG_Heap, E_Stack) then CreateCellKind (Type_Cell, VCG_Heap, Cell_Storage.Fixed_Var); Index_Type := Dictionary.GetArrayIndex (AggregateType (VCG_Heap, E_Stack), 1); Cells.Set_Symbol_Value (VCG_Heap, Type_Cell, Index_Type); CStacks.Push (VCG_Heap, Type_Cell, E_Stack); CreateAttribValueCell (Attrib_Cell, VCG_Heap, LexTokenManager.First_Token); CStacks.Push (VCG_Heap, Attrib_Cell, E_Stack); PushOperator (Binary, SP_Symbols.apostrophe, VCG_Heap, E_Stack); if Dictionary.TypeIsEnumeration (Index_Type) then for I in Integer range 2 .. CurrentFieldOrIndex (VCG_Heap, E_Stack) loop --# accept F, 41, "Stable expression here OK"; if Dictionary.TypeIsBoolean (Index_Type) then PushOperator (Unary, SP_Symbols.RWnot, VCG_Heap, E_Stack); else PushFunction (Cell_Storage.Succ_Function, VCG_Heap, E_Stack); end if; --# end accept; end loop; else -- index type is numeric discrete if CurrentFieldOrIndex (VCG_Heap, E_Stack) > 1 then LexTokenManager.Insert_Nat (N => CurrentFieldOrIndex (VCG_Heap, E_Stack) - 1, Lex_Str => Counter_String); CreateManifestConstCell (Counter_Cell, VCG_Heap, Counter_String); CStacks.Push (VCG_Heap, Counter_Cell, E_Stack); PushOperator (Binary, SP_Symbols.plus, VCG_Heap, E_Stack); end if; end if; PushFunction (Cell_Storage.List_Function, VCG_Heap, E_Stack); else -- record aggregate CreateFixedVarCell (Type_Cell, VCG_Heap, Dictionary.GetRecordComponent (AggregateType (VCG_Heap, E_Stack), CurrentFieldOrIndex (VCG_Heap, E_Stack))); CStacks.Push (VCG_Heap, Type_Cell, E_Stack); end if; SwitchAndPush (SP_Symbols.becomes, VCG_Heap, E_Stack); IncCurrentFieldOrIndex (E_Stack, VCG_Heap); if DoingArrayAggregate (VCG_Heap, E_Stack) then if STree.Next_Sibling (Current_Node => Node) = STree.NullNode then PushOperator (Binary, SP_Symbols.comma, VCG_Heap, E_Stack); end if; else -- record CStacks.PopOff (VCG_Heap, E_Stack, Agg_Exp); InsertAssociation (CStacks.Top (VCG_Heap, E_Stack), Agg_Exp, VCG_Heap); end if; end if; end Up_Process_Aggregate_Or_Expression; --------------------------------------------------------------------- procedure Up_Process_Component_Association (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is begin if STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node))) /= STree.NullNode then SwitchAndPush (SP_Symbols.comma, VCG_Heap, E_Stack); end if; end Up_Process_Component_Association; --------------------------------------------------------------------- procedure Up_Process_Aggregate (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# VCG_Heap; is Agg_Cell : Cells.Cell; begin -- Tidy up expression stack -- At this point the stack is rather confused (even for an ex-FORTH programmer). -- If we are doing a record then TOS is the IncompleteAggregate function and its arguments, -- 2nd TOS is the aggregate counter used for positional association. -- -- If we are doing an array then TOS is the comma-delimited list of arguments to the MkAggregate func, -- 2nd TOS is the IncompleteAggregate function itself, -- 3rd TOS is the aggregate counter -- CStacks.PopOff (VCG_Heap, E_Stack, Agg_Cell); -- hold the aggregate expression or list if Cells.Get_Kind (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)) = Cell_Storage.Aggregate_Counter then -- we are doing a record and just need to get rid of the counter CStacks.Pop (VCG_Heap, E_Stack); -- get rid of counter else -- we are doing an array and TOS is the IncompleteArray function which needs to be connected to -- the comma-delimited list SetRightArgument (CStacks.Top (VCG_Heap, E_Stack), Agg_Cell, VCG_Heap); -- hold the now complete aggregate expression and then get rid of the exposed counter CStacks.PopOff (VCG_Heap, E_Stack, Agg_Cell); CStacks.Pop (VCG_Heap, E_Stack); end if; -- Convert aggregate to a finished MkAggregate function Cells.Set_Kind (VCG_Heap, Agg_Cell, Cell_Storage.Mk_Aggregate); -- Finally, restore aggregate DAG to TOS CStacks.Push (VCG_Heap, Agg_Cell, E_Stack); end Up_Process_Aggregate; --------------------------------------------------------------------- -- Attribute Processing -- --------------------------------------------------------------------- procedure Down_Process_Attribute_Ident (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is DAG_Cell : Cells.Cell; begin CreateAttribValueCell (DAG_Cell, VCG_Heap, STree.Node_Lex_String (Node => Node)); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); PushOperator (Binary, SP_Symbols.apostrophe, VCG_Heap, E_Stack); end Down_Process_Attribute_Ident; --------------------------------------------------------------------- procedure Up_Process_Attribute_Designator (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# derives E_Stack, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# STree.Table, --# VCG_Heap; is Expression_Found, Base_Found : Boolean; Temp_Cell, Prefix_Cell, Attrib_Cell, Expression_Cell, Second_Expression_Cell : Cells.Cell; Lex_Str, Attrib_Name : LexTokenManager.Lex_String; Prefix_Type : Dictionary.Symbol; Expression_Node : STree.SyntaxNode; ------------------------------------------------------- procedure Eliminate_Base (Prefix_Cell, TOS : in Cells.Cell; VCG_Heap : in out Cells.Heap_Record) --# derives VCG_Heap from *, --# Prefix_Cell, --# TOS; is Base_Cell : Cells.Cell; begin Base_Cell := LeftPtr (VCG_Heap, TOS); if Cells.Get_Kind (VCG_Heap, Base_Cell) = Cell_Storage.Op then -- 'Base exists Cells.Dispose_Of_Cell (VCG_Heap, RightPtr (VCG_Heap, Base_Cell)); Cells.Dispose_Of_Cell (VCG_Heap, Base_Cell); SetLeftArgument (TOS, Prefix_Cell, VCG_Heap); end if; end Eliminate_Base; ------------------------------------------------------- procedure Model_Simple_Function_Attribute (Expression_Cell, Attrib_Cell, Prefix_Cell : in Cells.Cell; Strip_To_Root_Type : in Boolean; E_Stack : in CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# derives VCG_Heap from *, --# Attrib_Cell, --# Dictionary.Dict, --# Expression_Cell, --# E_Stack, --# Prefix_Cell, --# Strip_To_Root_Type; is begin Eliminate_Base (Prefix_Cell => Prefix_Cell, TOS => CStacks.Top (VCG_Heap, E_Stack), VCG_Heap => VCG_Heap); -- Most attributes are modelled in FDL by reference to the -- underlying root type. Most notably, 'Valid is always -- in terms of the indicated sub-type (see LRM 13.9.1(2)) so we need -- the option here to use the Root Type or not. if Strip_To_Root_Type then Cells.Set_Symbol_Value (VCG_Heap, Prefix_Cell, Dictionary.GetRootType (Cells.Get_Symbol_Value (VCG_Heap, Prefix_Cell))); end if; Cells.Set_Kind (VCG_Heap, Attrib_Cell, Cell_Storage.Attrib_Function); SetRightArgument (Attrib_Cell, Expression_Cell, VCG_Heap); end Model_Simple_Function_Attribute; ------------------------------------------------------- procedure Model_Min_Max (Expression_Cell, Second_Expression_Cell, Attrib_Cell, Prefix_Cell : in Cells.Cell; E_Stack : in CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# Attrib_Cell, --# Dictionary.Dict, --# Expression_Cell, --# E_Stack, --# Prefix_Cell, --# Second_Expression_Cell; is Comma_Cell : Cells.Cell; begin CreateOpCell (Comma_Cell, VCG_Heap, SP_Symbols.comma); Eliminate_Base (Prefix_Cell => Prefix_Cell, TOS => CStacks.Top (VCG_Heap, E_Stack), VCG_Heap => VCG_Heap); Cells.Set_Symbol_Value (VCG_Heap, Prefix_Cell, Dictionary.GetRootType (Cells.Get_Symbol_Value (VCG_Heap, Prefix_Cell))); Cells.Set_Kind (VCG_Heap, Attrib_Cell, Cell_Storage.Attrib_Function); SetLeftArgument (Comma_Cell, Expression_Cell, VCG_Heap); SetRightArgument (Comma_Cell, Second_Expression_Cell, VCG_Heap); SetRightArgument (Attrib_Cell, Comma_Cell, VCG_Heap); end Model_Min_Max; ------------------------------------------------------- procedure Model_Length_Attribute (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# VCG_Heap & --# LexTokenManager.State from *; is One_Cell, High_End_Cell, Low_End_Cell, Pos_Cell : Cells.Cell; Type_Sym : Dictionary.Symbol; Lex_Str : LexTokenManager.Lex_String; begin CStacks.PopOff (VCG_Heap, E_Stack, High_End_Cell); Structures.CopyStructure (VCG_Heap, High_End_Cell, Low_End_Cell); Cells.Set_Lex_Str (VCG_Heap, RightPtr (VCG_Heap, High_End_Cell), LexTokenManager.Last_Token); Cells.Set_Lex_Str (VCG_Heap, RightPtr (VCG_Heap, Low_End_Cell), LexTokenManager.First_Token); Type_Sym := Cells.Get_Symbol_Value (VCG_Heap, LeftPtr (VCG_Heap, High_End_Cell)); if Dictionary.IsTypeMark (Type_Sym) and then Dictionary.TypeIsEnumeration (Type_Sym) then CreateAttribFunctionCell (LexTokenManager.Pos_Token, Type_Sym, VCG_Heap, Pos_Cell); SetRightArgument (RightPtr (VCG_Heap, Pos_Cell), High_End_Cell, VCG_Heap); High_End_Cell := Pos_Cell; CreateAttribFunctionCell (LexTokenManager.Pos_Token, Type_Sym, VCG_Heap, Pos_Cell); SetRightArgument (RightPtr (VCG_Heap, Pos_Cell), Low_End_Cell, VCG_Heap); Low_End_Cell := Pos_Cell; end if; CStacks.Push (VCG_Heap, High_End_Cell, E_Stack); CStacks.Push (VCG_Heap, Low_End_Cell, E_Stack); PushOperator (Binary, SP_Symbols.minus, VCG_Heap, E_Stack); LexTokenManager.Insert_Nat (N => 1, Lex_Str => Lex_Str); CreateManifestConstCell (One_Cell, VCG_Heap, Lex_Str); CStacks.Push (VCG_Heap, One_Cell, E_Stack); PushOperator (Binary, SP_Symbols.plus, VCG_Heap, E_Stack); end Model_Length_Attribute; ------------------------------------------------------- procedure Model_Tail_Function_Attribute (Expression_Cell, Attrib_Cell : in Cells.Cell; VCG_Heap : in out Cells.Heap_Record) --# derives VCG_Heap from *, --# Attrib_Cell, --# Expression_Cell; is begin Cells.Set_Kind (VCG_Heap, Attrib_Cell, Cell_Storage.Attrib_Function); SetRightArgument (Attrib_Cell, Expression_Cell, VCG_Heap); end Model_Tail_Function_Attribute; ------------------------------------------------------- procedure Model_Append_Function_Attribute (Expression_Cell, Second_Expression_Cell, Attrib_Cell : in Cells.Cell; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# Attrib_Cell, --# Expression_Cell, --# Second_Expression_Cell; is Comma_Cell : Cells.Cell; begin CreateOpCell (Comma_Cell, VCG_Heap, SP_Symbols.comma); Cells.Set_Kind (VCG_Heap, Attrib_Cell, Cell_Storage.Attrib_Function); SetLeftArgument (Comma_Cell, Expression_Cell, VCG_Heap); SetRightArgument (Comma_Cell, Second_Expression_Cell, VCG_Heap); SetRightArgument (Attrib_Cell, Comma_Cell, VCG_Heap); end Model_Append_Function_Attribute; ------------------------------------------------------- procedure Model_Mod_Function_Attribute (Expression_Cell, Prefix_Cell : in Cells.Cell; Type_Sym : in Dictionary.Symbol; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# Expression_Cell, --# E_Stack, --# Prefix_Cell, --# Type_Sym, --# VCG_Heap; is Type_Cell, Attrib_Value_Cell, Mod_Op_Cell, Tick_Cell, Temp_Cell : Cells.Cell; begin Eliminate_Base (Prefix_Cell => Prefix_Cell, TOS => CStacks.Top (VCG_Heap, E_Stack), VCG_Heap => VCG_Heap); -- Create the DAG for the the functional attribute. -- The root of the DAG is "Mod", the left child -- is the attribute's argument and the right child is the -- DAG representing T'modulus. -- Root "Mod" cell. CreateOpCell (Mod_Op_Cell, VCG_Heap, SP_Symbols.RWmod); -- Left child SetLeftArgument (Mod_Op_Cell, Expression_Cell, VCG_Heap); -- Right child which represents T'Modulus. CreateOpCell (Tick_Cell, VCG_Heap, SP_Symbols.apostrophe); SetRightArgument (Mod_Op_Cell, Tick_Cell, VCG_Heap); CreateFixedVarCell (Type_Cell, VCG_Heap, Dictionary.GetRootType (Type_Sym)); SetLeftArgument (Tick_Cell, Type_Cell, VCG_Heap); CreateAttribValueCell (Attrib_Value_Cell, VCG_Heap, LexTokenManager.Modulus_Token); SetRightArgument (Tick_Cell, Attrib_Value_Cell, VCG_Heap); -- Update the E_Stack after processing the attribute. CStacks.PopOff (VCG_Heap, E_Stack, Temp_Cell); Cells.Dispose_Of_Cell (VCG_Heap, LeftPtr (VCG_Heap, Temp_Cell)); Cells.Dispose_Of_Cell (VCG_Heap, RightPtr (VCG_Heap, Temp_Cell)); Cells.Dispose_Of_Cell (VCG_Heap, Temp_Cell); CStacks.Push (VCG_Heap, Mod_Op_Cell, E_Stack); end Model_Mod_Function_Attribute; begin -- Up_Process_Attribute_Designator -- If there are any expression associated with the attribute they will be TOS -- Below it (or TOS if there is no expression) is a DAG representing the attribute -- move to where first expression would be if there is one Expression_Node := STree.Child_Node (Current_Node => STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => Node))); --# assert True; -- check for second expression if Expression_Node /= STree.NullNode and then STree.Next_Sibling (Current_Node => Expression_Node) /= STree.NullNode then -- There is a 2nd expression associated with attribute CStacks.PopOff (VCG_Heap, E_Stack, Second_Expression_Cell); else Second_Expression_Cell := Cells.Null_Cell; end if; --# assert True; -- then check for first expression if Expression_Node /= STree.NullNode then -- There is a 1st expression associated with attribute CStacks.PopOff (VCG_Heap, E_Stack, Expression_Cell); Expression_Found := True; else Expression_Cell := Cells.Null_Cell; Expression_Found := False; end if; --# assert True; Prefix_Cell := LeftPtr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); if Cells.Get_Kind (VCG_Heap, Prefix_Cell) = Cell_Storage.Op then --must be a 'BASE Prefix_Cell := LeftPtr (VCG_Heap, Prefix_Cell); Base_Found := True; else Base_Found := False; end if; -- If no expression forms part of the attribute we -- now need to make a copy of the prefix for possible use in modelling 'valid. -- This is because fdl model of valid takes an argument which is created from the -- prefix to the attribute. By the time we know we are modelling 'valid this prefix -- subtree may have been patched with type information extracted from the syntax tree --# assert True; if not Expression_Found then Structures.CopyStructure (VCG_Heap, Prefix_Cell, -- to get Expression_Cell); end if; Attrib_Cell := RightPtr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); Attrib_Name := Cells.Get_Lex_Str (VCG_Heap, Attrib_Cell); -- Recover type planted in syntax tree by wellformation checker. -- For all cases except attributes of unconstrained objects, this will be type mark. -- For attributes of constrained array objects the wffs will haev resolved all such -- things as dimesnion number arguments and will have planted the appropriate type. -- For unconstraiend objects only, the wffs will plant a symbol of a special kind -- (ParameterConstraintSymbol) associated with the object. This special symbol kind -- behaves for all practical purposes like a type except that we typically don't -- know its bounds. Prefix_Type := STree.NodeSymbol (Node); Cells.Set_Kind (VCG_Heap, Prefix_Cell, Cell_Storage.Fixed_Var); -- Note that we only do this if the attribute is not a proof attribute (e.g. 'Tail or 'Append) -- because if it is then we want the prefix to be the object not its type. In this case -- we just want to convert the prefix to a fixed var cell --# assert True; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Tail_Token) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Append_Token) /= LexTokenManager.Str_Eq then -- transform prefix cell to be cell just containing the prefix type Cells.Set_Symbol_Value (VCG_Heap, Prefix_Cell, Prefix_Type); end if; -- If prefix is unconstrained object then make cell an UnconstrainedAttributePrefix to allow special -- formal-to-actual substitution in procedure and function call pre con and proc call post con checks if Dictionary.IsParameterConstraint (Prefix_Type) then Cells.Set_Kind (VCG_Heap, Prefix_Cell, Cell_Storage.Unconstrained_Attribute_Prefix); end if; -- make leaf SetLeftArgument (Prefix_Cell, Cells.Null_Cell, VCG_Heap); SetRightArgument (Prefix_Cell, Cells.Null_Cell, VCG_Heap); SetAuxPtr (Prefix_Cell, Cells.Null_Cell, VCG_Heap); --# assert True; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq then if Dictionary.TypeIsEnumeration (Prefix_Type) and then not Dictionary.TypeIsCharacter (Prefix_Type) then -- Enumeration type but NOT character - model as an FDL -- function. Model_Simple_Function_Attribute (Expression_Cell => Expression_Cell, Attrib_Cell => Attrib_Cell, Prefix_Cell => Prefix_Cell, Strip_To_Root_Type => True, E_Stack => E_Stack, VCG_Heap => VCG_Heap); else -- must be discrete numeric type or character so simply discard attribute, -- since for all integer (signed or modular) and Character types X (or subtypes -- thereof...), X'Pos (Y) = X'Val (Y) = Y Eliminate_Base (Prefix_Cell => Prefix_Cell, TOS => CStacks.Top (VCG_Heap, E_Stack), VCG_Heap => VCG_Heap); CStacks.PopOff (VCG_Heap, E_Stack, Temp_Cell); Cells.Dispose_Of_Cell (VCG_Heap, LeftPtr (VCG_Heap, Temp_Cell)); Cells.Dispose_Of_Cell (VCG_Heap, RightPtr (VCG_Heap, Temp_Cell)); Cells.Dispose_Of_Cell (VCG_Heap, Temp_Cell); CStacks.Push (VCG_Heap, Expression_Cell, E_Stack); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq then Eliminate_Base (Prefix_Cell => Prefix_Cell, TOS => CStacks.Top (VCG_Heap, E_Stack), VCG_Heap => VCG_Heap); CStacks.PopOff (VCG_Heap, E_Stack, Temp_Cell); Cells.Dispose_Of_Cell (VCG_Heap, LeftPtr (VCG_Heap, Temp_Cell)); Cells.Dispose_Of_Cell (VCG_Heap, RightPtr (VCG_Heap, Temp_Cell)); Cells.Dispose_Of_Cell (VCG_Heap, Temp_Cell); CStacks.Push (VCG_Heap, Expression_Cell, E_Stack); if Dictionary.TypeIsEnumeration (Prefix_Type) then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq then PushFunction (Cell_Storage.Succ_Function, VCG_Heap, E_Stack); else PushFunction (Cell_Storage.Pred_Function, VCG_Heap, E_Stack); end if; else -- must be discrete numeric type so use + or - instead LexTokenManager.Insert_Nat (N => 1, Lex_Str => Lex_Str); CreateManifestConstCell (Temp_Cell, VCG_Heap, Lex_Str); CStacks.Push (VCG_Heap, Temp_Cell, E_Stack); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq then PushOperator (Binary, SP_Symbols.plus, VCG_Heap, E_Stack); else PushOperator (Binary, SP_Symbols.minus, VCG_Heap, E_Stack); end if; ModularizeIfNeeded (Prefix_Type, VCG_Heap, E_Stack); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then if Base_Found and then Dictionary.TypeIsEnumeration (Prefix_Type) then Cells.Set_Symbol_Value (VCG_Heap, Prefix_Cell, Dictionary.GetRootType (Prefix_Type)); Eliminate_Base (Prefix_Cell => Prefix_Cell, TOS => CStacks.Top (VCG_Heap, E_Stack), VCG_Heap => VCG_Heap); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq then TransformRangeConstraint (VCG_Heap, E_Stack); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq then Model_Length_Attribute (E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq or else -- 830 LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq then Model_Min_Max (Expression_Cell => Expression_Cell, Second_Expression_Cell => Second_Expression_Cell, Attrib_Cell => Attrib_Cell, Prefix_Cell => Prefix_Cell, E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Valid_Token) = LexTokenManager.Str_Eq then -- using the Expression_Cell which is a copy of the prefix -- to the attribute made earlier. -- -- Data validity is defined in terms of the indicated sub-type -- (LRM 13.9.1(2)), so we don't strip to the root type in this case Model_Simple_Function_Attribute (Expression_Cell => Expression_Cell, Attrib_Cell => Attrib_Cell, Prefix_Cell => Prefix_Cell, Strip_To_Root_Type => False, E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq then Model_Simple_Function_Attribute (Expression_Cell => Expression_Cell, Attrib_Cell => Attrib_Cell, Prefix_Cell => Prefix_Cell, Strip_To_Root_Type => True, E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Tail_Token) = LexTokenManager.Str_Eq then Model_Tail_Function_Attribute (Expression_Cell => Expression_Cell, Attrib_Cell => Attrib_Cell, VCG_Heap => VCG_Heap); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Append_Token) = LexTokenManager.Str_Eq then Model_Append_Function_Attribute (Expression_Cell => Expression_Cell, Second_Expression_Cell => Second_Expression_Cell, Attrib_Cell => Attrib_Cell, VCG_Heap => VCG_Heap); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attrib_Name, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq then Model_Mod_Function_Attribute (Expression_Cell => Expression_Cell, Prefix_Cell => Prefix_Cell, Type_Sym => Prefix_Type, E_Stack => E_Stack, VCG_Heap => VCG_Heap); else -- it's a non-function, non-substitutable attribute if Cells.Get_Kind (VCG_Heap, Prefix_Cell) = Cell_Storage.Reference then Cells.Set_Kind (VCG_Heap, Prefix_Cell, Cell_Storage.Fixed_Var); end if; end if; end Up_Process_Attribute_Designator; --------------------------------------------------------------------- -- Identifier and Selected Components -- --------------------------------------------------------------------- procedure Replace_With_On_Entry_Variable (DAG_Cell : in Cells.Cell; Var_Sym : in Dictionary.Symbol; Loop_Stack : in LoopContext.T; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# derives VCG_Heap from *, --# DAG_Cell, --# Dictionary.Dict, --# Loop_Stack, --# Var_Sym; is On_Entry_Variable : Dictionary.Symbol; Current_Loop : Dictionary.Symbol; begin -- For a variable which appears in a for loop invariant in the form X%, replace X with the -- variable set up in BuildGraph as X_on_entry_to_the_loop. -- The variable we are seeking may appear in the exit condition of an enclosing for loop so we -- need to loop through any enclosing loops Current_Loop := LoopContext.CurrentLoopSym (Loop_Stack, VCG_Heap); loop On_Entry_Variable := Dictionary.GetLoopOnEntryVariable (Var_Sym, Current_Loop); -- success exit condition, sought variable is used in loop exit conditon exit when not Dictionary.Is_Null_Symbol (On_Entry_Variable); -- If we have a null symbol then the variable isn't used in the exit condition of the current loop -- so we need to get the enclosing loop and try again Current_Loop := LoopContext.EnclosingLoopSym (Loop_Stack, VCG_Heap, Current_Loop); -- failure case, we have run out of loops without finding soughtvariable if Dictionary.Is_Null_Symbol (Current_Loop) then On_Entry_Variable := Dictionary.NullSymbol; exit; end if; end loop; -- If % is used on a variable that doesn't appear in any enclosing for loop exit condition then -- On_Entry_Variable will be still be a null symbol here. Ideally we should prevent use of percent in this -- situation but the wffs for that would be very hard to write. As a second best we simply -- don't make the substitution in this case. In effect we say that X% = X is X doesn't appear -- in the for loop exit condition. if not Dictionary.Is_Null_Symbol (On_Entry_Variable) then Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, On_Entry_Variable); end if; end Replace_With_On_Entry_Variable; --------------------------------------------------------------------- procedure Process_Identifier (L_Scope : in Dictionary.Scopes; Calling_Scope : in Dictionary.Scopes; Force_Abstract : in Boolean; Loop_Stack : in LoopContext.T; Calling_Function : in out Cells.Cell; Current_Unit : in out Dictionary.Symbol; Implicit_Var : in out Dictionary.Symbol; Current_Instantiation : in out Dictionary.Symbol; Called_Functions : in out Symbol_Set.T; Start_Node : in out STree.SyntaxNode; Next_Node : in out STree.SyntaxNode; Last_Node : in out STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in CommandLineData.Content; --# in Generate_Function_Instantiations; --# in STree.Table; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VC_Contains_Reals; --# in out VC_Failure; --# derives Called_Functions, --# Calling_Function, --# Current_Instantiation, --# Current_Unit, --# Last_Node, --# Next_Node, --# Start_Node from *, --# Called_Functions, --# Calling_Scope, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# LexTokenManager.State, --# L_Scope, --# Next_Node, --# STree.Table, --# VCG_Heap & --# Dictionary.Dict, --# Implicit_Var, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# VC_Contains_Reals, --# VC_Failure from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# LexTokenManager.State, --# L_Scope, --# Next_Node, --# STree.Table, --# VCG_Heap & --# E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# CommandLineData.Content, --# Current_Instantiation, --# Current_Unit, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# Implicit_Var, --# Last_Node, --# LexTokenManager.State, --# Loop_Stack, --# L_Scope, --# Next_Node, --# Start_Node, --# STree.Table, --# VCG_Heap; is Sym : Dictionary.Symbol; Enclosing_Package : Dictionary.Symbol; DAG_Cell : Cells.Cell; --------------------------------------------------------------------- function Get_Enclosing_Package (Scope : Dictionary.Scopes) return Dictionary.Symbol --# global in Dictionary.Dict; is Result : Dictionary.Symbol; Enclosing_Scope : Dictionary.Scopes; begin Enclosing_Scope := Scope; loop Enclosing_Scope := Dictionary.GetEnclosingScope (Enclosing_Scope); Result := Dictionary.GetRegion (Enclosing_Scope); exit when Dictionary.IsPackage (Result); -- fail-safe exit if we hit "standard" if Dictionary.IsPredefinedScope (Enclosing_Scope) then Result := Dictionary.GetRegion (Enclosing_Scope); exit; end if; end loop; return Result; end Get_Enclosing_Package; begin -- Process_Identifier Sym := Dictionary.LookupItem (Name => STree.Node_Lex_String (Node => Next_Node), Scope => L_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); -- if we are doing an abstract pre/post and we fail to find what we are -- expecting at the first attempt we need to re-search in the visible -- scope of the package where our subprogram is declared; this is to -- pick up abstract own variables that have been refined away if Dictionary.Is_Null_Symbol (Sym) and then Force_Abstract then Enclosing_Package := Get_Enclosing_Package (Scope => L_Scope); if Dictionary.IsPackage (Enclosing_Package) then Sym := Dictionary.LookupItem (Name => STree.Node_Lex_String (Node => Next_Node), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Enclosing_Package), Context => Dictionary.ProofContext, Full_Package_Name => False); end if; end if; -- If we call an inherited root function then the above call will fail -- to find it and returns a null symbol. In this case we can check the -- syntax tree for the symbol of the root operation that will have been -- planted by StackIdentifier. if Dictionary.Is_Null_Symbol (Sym) then Sym := STree.NodeSymbol (Next_Node); end if; SystemErrors.RT_Assert (C => not Dictionary.Is_Null_Symbol (Sym), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "DAG.Build_Annotation_Expression.Process_Identifier : Program Error"); Cells.Create_Cell (VCG_Heap, DAG_Cell); if Dictionary.Is_Variable (Sym) then Cells.Set_Kind (VCG_Heap, DAG_Cell, Cell_Storage.Reference); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); if STree.IdentifierHasTildeSuffix (Next_Node) then SetTilde (DAG_Cell, VCG_Heap); elsif STree.IdentifierHasPercentSuffix (Next_Node) then Replace_With_On_Entry_Variable (DAG_Cell => DAG_Cell, Var_Sym => Sym, Loop_Stack => Loop_Stack, VCG_Heap => VCG_Heap); end if; CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); elsif Dictionary.IsFunction (Sym) then -- The down loop is exited following processing of an identifier -- which is aa function call so set the Next_Node to null and -- the direction as Up_Loop. Next_Node := STree.NullNode; Cells.Set_Kind (VCG_Heap, DAG_Cell, Cell_Storage.Pending_Function); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); Setup_Function_Call (Direction => Up_Loop, Called_Functions => Called_Functions, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Current_Instantiation => Current_Instantiation, Start_Node => Start_Node, Next_Node => Next_Node, Last_Node => Last_Node, Scope => L_Scope, Calling_Scope => Calling_Scope, Calling_Function => Calling_Function, Force_Abstract => Force_Abstract, E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif Dictionary.IsTypeMark (Sym) then -- If the identifier denotes a record subtype, then push its -- root type for subsequent VCG modelling. if Dictionary.TypeIsRecord (Sym) and then Dictionary.IsSubtype (Sym) then Sym := Dictionary.GetRootType (Sym); end if; Cells.Set_Kind (VCG_Heap, DAG_Cell, Cell_Storage.Fixed_Var); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); else Cells.Set_Kind (VCG_Heap, DAG_Cell, Cell_Storage.Named_Const); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); end if; end Process_Identifier; ------------------------------------------------------------------- procedure Process_Selected_Component (Direction : in Loop_Direction; L_Scope : in Dictionary.Scopes; Calling_Scope : in Dictionary.Scopes; Force_Abstract : in Boolean; Loop_Stack : in LoopContext.T; Calling_Function : in out Cells.Cell; Current_Unit : in out Dictionary.Symbol; Implicit_Var : in out Dictionary.Symbol; Current_Instantiation : in out Dictionary.Symbol; Called_Functions : in out Symbol_Set.T; Start_Node : in out STree.SyntaxNode; Next_Node : in out STree.SyntaxNode; Last_Node : in out STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in CommandLineData.Content; --# in Generate_Function_Instantiations; --# in STree.Table; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VC_Contains_Reals; --# in out VC_Failure; --# derives Called_Functions, --# Calling_Function, --# Current_Instantiation, --# Current_Unit, --# Last_Node, --# Next_Node, --# Start_Node from *, --# Called_Functions, --# Calling_Scope, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# LexTokenManager.State, --# L_Scope, --# Next_Node, --# STree.Table, --# VCG_Heap & --# Dictionary.Dict, --# Implicit_Var, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# VC_Contains_Reals, --# VC_Failure from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# LexTokenManager.State, --# L_Scope, --# Next_Node, --# STree.Table, --# VCG_Heap & --# E_Stack, --# Statistics.TableUsage from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# CommandLineData.Content, --# Current_Instantiation, --# Current_Unit, --# Dictionary.Dict, --# Direction, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# Implicit_Var, --# Last_Node, --# LexTokenManager.State, --# L_Scope, --# Next_Node, --# Start_Node, --# STree.Table, --# VCG_Heap & --# VCG_Heap from *, --# Called_Functions, --# Calling_Function, --# Calling_Scope, --# CommandLineData.Content, --# Current_Instantiation, --# Current_Unit, --# Dictionary.Dict, --# Direction, --# E_Stack, --# Force_Abstract, --# Generate_Function_Instantiations, --# Implicit_Var, --# Last_Node, --# LexTokenManager.State, --# Loop_Stack, --# L_Scope, --# Next_Node, --# Start_Node, --# STree.Table; is DAG_Cell : Cells.Cell; Sym : Dictionary.Symbol; Ident_Node : STree.SyntaxNode; Prefix : Dictionary.Symbol; ------------------------------------------------------------------ procedure Model_Record_Component (Record_Type, Sym : in Dictionary.Symbol; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Record_Type, --# Sym, --# VCG_Heap; is DAG_Cell : Cells.Cell; Expression_Cell : Cells.Cell; begin CStacks.PopOff (VCG_Heap, E_Stack, Expression_Cell); -- Expression_Cell is a DAG representing an expression which is a record field -- Insert one or more "fld_inherit (" before the expression ModelInheritedFieldsOfTaggedRecord (Dictionary.GetSimpleName (Sym), Record_Type, VCG_Heap, Expression_Cell); -- Then prefix it with fld_? ( CreateCellKind (DAG_Cell, VCG_Heap, Cell_Storage.Field_Access_Function); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); Cells.Set_Lex_Str (VCG_Heap, DAG_Cell, Dictionary.GetSimpleName (Sym)); SetRightArgument (DAG_Cell, Expression_Cell, VCG_Heap); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); end Model_Record_Component; begin -- Process_Selected_Component DAG_Cell := CStacks.Top (VCG_Heap, E_Stack); Ident_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Next_Node)))); Prefix := GetTOStype (VCG_Heap, E_Stack); Sym := Dictionary.LookupSelectedItem (Prefix, STree.Node_Lex_String (Node => Ident_Node), L_Scope, Dictionary.ProofContext); -- If we call an inherited root function then the above call will fail -- to find it and returns a null symbol. In this case we can check the -- syntax tree for the symbol of the root operation that will have been -- planted by StackIdentifier. if Dictionary.Is_Null_Symbol (Sym) then Sym := STree.NodeSymbol (Next_Node); end if; SystemErrors.RT_Assert (C => not Dictionary.Is_Null_Symbol (Sym), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "DAG.Build_Annotation_Expression.Process_Selected_Component : Program Error"); if Dictionary.IsRecordComponent (Sym) then Model_Record_Component (Record_Type => Prefix, Sym => Sym, E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif Dictionary.Is_Variable (Sym) then Cells.Set_Kind (VCG_Heap, DAG_Cell, Cell_Storage.Reference); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); if STree.IdentifierHasTildeSuffix (Ident_Node) then SetTilde (DAG_Cell, VCG_Heap); elsif STree.IdentifierHasPercentSuffix (Ident_Node) then Replace_With_On_Entry_Variable (DAG_Cell => DAG_Cell, Var_Sym => Sym, Loop_Stack => Loop_Stack, VCG_Heap => VCG_Heap); end if; elsif Dictionary.IsFunction (Sym) then Cells.Set_Kind (VCG_Heap, DAG_Cell, Cell_Storage.Pending_Function); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); Setup_Function_Call (Direction => Direction, Called_Functions => Called_Functions, Current_Instantiation => Current_Instantiation, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Start_Node => Start_Node, Next_Node => Next_Node, Last_Node => Last_Node, Scope => L_Scope, Calling_Scope => Calling_Scope, Calling_Function => Calling_Function, Force_Abstract => Force_Abstract, E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif Dictionary.IsTypeMark (Sym) then Cells.Set_Kind (VCG_Heap, DAG_Cell, Cell_Storage.Fixed_Var); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); else Cells.Set_Kind (VCG_Heap, DAG_Cell, Cell_Storage.Named_Const); Cells.Set_Symbol_Value (VCG_Heap, DAG_Cell, Sym); end if; end Process_Selected_Component; --------------------------------------------------------------------- -- Expressions -- --------------------------------------------------------------------- procedure Process_Expression (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Op_Node : STree.SyntaxNode; Operator : SP_Symbols.SP_Symbol; Result_Type : Dictionary.Symbol; ------------------------------------------------------------- procedure Model_Bitwise_Operation (Operator : in SP_Symbols.SP_Symbol; Type_Sym : in Dictionary.Symbol; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Operator, --# Type_Sym, --# VCG_Heap; is Bool_Op_Cell : Cells.Cell; begin CreateBoolOpCell (Bool_Op_Cell, VCG_Heap, Type_Sym, Operator); -- on the stack are the arguments we want for this new function. PushOperator (Binary, SP_Symbols.comma, VCG_Heap, E_Stack); -- tos now has comma cell joining the two arguments SetRightArgument (Bool_Op_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, Bool_Op_Cell, E_Stack); -- modelling function is now on TOS end Model_Bitwise_Operation; ------------------------------------------------------------- -- model XOR iaw B manual para 3.1.5 procedure Model_Xor_Operator (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# VCG_Heap; is DAG_Cell, Left, Right, Copy_Of_Left, Copy_Of_Right : Cells.Cell; begin -- Obtain operands and make copies of them so that we can construct a model -- that does not make multiple links to the Left and Right cells. This -- change arises from CFR 1154 and affects only annotation expressions since -- it is to avoid problems when substituting for tilded globals in postconditions. CStacks.PopOff (VCG_Heap, E_Stack, Right); Structures.CopyStructure (VCG_Heap, Right, Copy_Of_Right); CStacks.PopOff (VCG_Heap, E_Stack, Left); Structures.CopyStructure (VCG_Heap, Left, Copy_Of_Left); -- model OR part using original arguments CreateOpCell (DAG_Cell, VCG_Heap, SP_Symbols.RWor); SetRightArgument (DAG_Cell, Right, VCG_Heap); SetLeftArgument (DAG_Cell, Left, VCG_Heap); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); -- model AND part using copies CreateOpCell (DAG_Cell, VCG_Heap, SP_Symbols.RWand); SetRightArgument (DAG_Cell, Copy_Of_Right, VCG_Heap); SetLeftArgument (DAG_Cell, Copy_Of_Left, VCG_Heap); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); -- negate AND part PushOperator (Unary, SP_Symbols.RWnot, VCG_Heap, E_Stack); -- complete model by conjoining the OR and NOT AND parts PushOperator (Binary, SP_Symbols.RWand, VCG_Heap, E_Stack); end Model_Xor_Operator; begin -- Process_Expression Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); if Op_Node /= STree.NullNode then Operator := STree.Syntax_Node_Type (Node => Op_Node); -- check to see if result type is an array and -- build special model if it is Result_Type := STree.NodeSymbol (Op_Node); if Dictionary.IsTypeMark (Result_Type) and then Dictionary.TypeIsArray (Result_Type) then -- must be a Boolean array operation Model_Bitwise_Operation (Operator => Operator, Type_Sym => Result_Type, E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif IsModularBitwiseOp (Operator, Result_Type) then Model_Bitwise_Operation (Operator => Operator, Type_Sym => Result_Type, E_Stack => E_Stack, VCG_Heap => VCG_Heap); else -- proceed as before for scalar bool ops if Operator = SP_Symbols.RWxor then Model_Xor_Operator (E_Stack => E_Stack, VCG_Heap => VCG_Heap); else PushOperator (Binary, Operator, VCG_Heap, E_Stack); end if; end if; end if; end Process_Expression; ------------------------------------------------------------------- procedure Process_Simple_Expression (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# derives E_Stack, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# STree.Table, --# VCG_Heap; is Op_Node : STree.SyntaxNode; Op : SP_Symbols.SP_Symbol; ---------------------------------------------------- procedure Model_Divide (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in Op_Node; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Op_Node, --# STree.Table, --# VCG_Heap; is Op_Cell : Cells.Cell; begin Cells.Create_Cell (VCG_Heap, Op_Cell); if Dictionary.TypeIsReal (STree.NodeSymbol (Op_Node)) then Cells.Set_Kind (VCG_Heap, Op_Cell, Cell_Storage.Op); Cells.Set_Op_Symbol (VCG_Heap, Op_Cell, SP_Symbols.divide); else Cells.Set_Kind (VCG_Heap, Op_Cell, Cell_Storage.FDL_Div_Op); end if; SetRightArgument (Op_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); SetLeftArgument (Op_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, Op_Cell, E_Stack); end Model_Divide; ---------------------------------------- procedure Model_Rem (E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# VCG_Heap; is DAG_Cell, Left, Right, Copy_Of_Left, Copy_Of_Right : Cells.Cell; begin -- correct modelling of I rem J as I - (I div J) * J -- J is top of stack and I is 2nd TOS CStacks.PopOff (VCG_Heap, E_Stack, Right); CStacks.PopOff (VCG_Heap, E_Stack, Left); -- Make deep copies of arguments so that we can construct a model with -- no sharing of the argument cells. This change arises from CFR 1154 -- and affects only annotation expression because of potential problems -- substituting tilded globals in post conditions if a cell is revisited. Structures.CopyStructure (VCG_Heap, Left, Copy_Of_Left); Structures.CopyStructure (VCG_Heap, Right, Copy_Of_Right); -- make core DIV sub-model using original arguments CreateCellKind (DAG_Cell, VCG_Heap, Cell_Storage.FDL_Div_Op); SetRightArgument (DAG_Cell, Right, VCG_Heap); SetLeftArgument (DAG_Cell, Left, VCG_Heap); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); -- multiply by copy of right rather than re-using rigth CreateOpCell (DAG_Cell, VCG_Heap, SP_Symbols.multiply); SetRightArgument (DAG_Cell, Copy_Of_Right, VCG_Heap); SetLeftArgument (DAG_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); CreateOpCell (DAG_Cell, VCG_Heap, SP_Symbols.minus); SetRightArgument (DAG_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); SetLeftArgument (DAG_Cell, Copy_Of_Left, VCG_Heap); -- note use of copy CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); end Model_Rem; begin -- Process_Simple_Expression Op_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))); if Op_Node /= STree.NullNode then -- detection of / and REM for special handling Op := STree.Syntax_Node_Type (Node => Op_Node); if Op = SP_Symbols.divide then Model_Divide (E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif Op = SP_Symbols.RWrem then Model_Rem (E_Stack => E_Stack, VCG_Heap => VCG_Heap); elsif Op = SP_Symbols.ampersand then Model_Catenation (E_Stack, VCG_Heap); else PushOperator (Binary, Op, VCG_Heap, E_Stack); end if; ModularizeIfNeeded (STree.NodeSymbol (Op_Node), VCG_Heap, E_Stack); end if; end Process_Simple_Expression; ------------------------------------------------------------------- procedure Process_Simple_Expression_Opt (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Op_Node : STree.SyntaxNode; begin Op_Node := STree.Child_Node (Current_Node => Node); if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.unary_adding_operator then PushOperator (Unary, STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Op_Node)), VCG_Heap, E_Stack); end if; end Process_Simple_Expression_Opt; ------------------------------------------------------------------------ procedure Process_Relation (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Op_Node : STree.SyntaxNode; ------------------------------------------------------------------- procedure Model_In_Clause (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Left_Side_Of_Range, Right_Side_Of_Range, Type_Mark_Cell, Attrib_Cell : Cells.Cell; Rel_Operation_LHS, Rel_Operation_RHS, Middle_Operator : SP_Symbols.SP_Symbol; In_Operator_Node, Range_Node : STree.SyntaxNode; type Static_Results is (Is_True, Is_False, Is_Unknown); Static_Result : Static_Results; type Membership_Kinds is (Inside, Outside); Membership_Kind : Membership_Kinds; function Check_If_Result_Statically_Known (In_Operator_Node : STree.SyntaxNode) return Static_Results --# global in Dictionary.Dict; --# in STree.Table; is Static_Result : Static_Results := Is_Unknown; begin if Dictionary.IsEnumerationLiteral (STree.NodeSymbol (In_Operator_Node)) then if Dictionary.Enumeration_Literals_Are_Equal (Left_Symbol => STree.NodeSymbol (In_Operator_Node), Right_Symbol => Dictionary.GetTrue) then Static_Result := Is_True; elsif Dictionary.Enumeration_Literals_Are_Equal (Left_Symbol => STree.NodeSymbol (In_Operator_Node), Right_Symbol => Dictionary.GetFalse) then Static_Result := Is_False; end if; end if; return Static_Result; end Check_If_Result_Statically_Known; ---------------------- procedure Model_Statically_Known_Result (Static_Result : in Static_Results; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives E_Stack from Dictionary.Dict, --# Static_Result, --# VCG_Heap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Static_Result, --# VCG_Heap & --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Static_Result; is Static_Result_Cell : Cells.Cell; begin CreateCellKind (Static_Result_Cell, VCG_Heap, Cell_Storage.Named_Const); if Static_Result = Is_True then Cells.Set_Symbol_Value (VCG_Heap, Static_Result_Cell, Dictionary.GetTrue); else Cells.Set_Symbol_Value (VCG_Heap, Static_Result_Cell, Dictionary.GetFalse); end if; CStacks.Push (VCG_Heap, Static_Result_Cell, E_Stack); end Model_Statically_Known_Result; ---------------------- procedure Complete_Inequality_Model (Left_Side_Of_Range, Right_Side_Of_Range : in Cells.Cell; Rel_Operation_LHS, Rel_Operation_RHS : in SP_Symbols.SP_Symbol; Middle_Operator : in SP_Symbols.SP_Symbol; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Left_Side_Of_Range, --# Middle_Operator, --# Rel_Operation_LHS, --# Rel_Operation_RHS, --# Right_Side_Of_Range, --# VCG_Heap; is Left_Operand, Copy_Of_Left_Operand : Cells.Cell; begin CStacks.PopOff (VCG_Heap, E_Stack, Left_Operand); -- Make deep copy of left operand so that we can construct the model -- without making multiple links to Left_Operand cell. This change -- arises from CFR 1154 and is only needed in annotation expressions -- because of potential problems using tilded globals in postconditions -- that use IN operators Structures.CopyStructure (VCG_Heap, Left_Operand, Copy_Of_Left_Operand); -- restore stack, model first inequality CStacks.Push (VCG_Heap, Left_Operand, E_Stack); CStacks.Push (VCG_Heap, Left_Side_Of_Range, E_Stack); PushOperator (Binary, Rel_Operation_LHS, VCG_Heap, E_Stack); -- model second inequality using copy of LHS CStacks.Push (VCG_Heap, Copy_Of_Left_Operand, E_Stack); CStacks.Push (VCG_Heap, Right_Side_Of_Range, E_Stack); PushOperator (Binary, Rel_Operation_RHS, VCG_Heap, E_Stack); -- form conjunction of the two range constraints; PushOperator (Binary, Middle_Operator, VCG_Heap, E_Stack); end Complete_Inequality_Model; ---------------------- function Is_Boolean_Membership (In_Operator_Node : STree.SyntaxNode) return Boolean --# global in Dictionary.Dict; --# in STree.Table; is begin return Dictionary.IsType (STree.NodeSymbol (In_Operator_Node)) and then Dictionary.TypeIsBoolean (STree.NodeSymbol (In_Operator_Node)); end Is_Boolean_Membership; ---------------------- procedure Complete_Boolean_Model (Left_Side_Of_Range, Right_Side_Of_Range : in Cells.Cell; Membership_Kind : in Membership_Kinds; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Left_Side_Of_Range, --# Membership_Kind, --# Right_Side_Of_Range, --# VCG_Heap; is Left_Operand, Copy_Of_Left_Operand : Cells.Cell; begin -- model: for X in L .. R create (X and R) or (not X and not L) -- negate entire model if operator is 'not in' rather than 'in' CStacks.PopOff (VCG_Heap, E_Stack, Left_Operand); -- Make deep copy of left operand so that we can construct the model -- without making multiple links to Left_Operand cell. This change -- arises from CFR 1154 and is only needed in annotation expressions -- because of potential problems using tilded globals in postconditions -- that use IN operators Structures.CopyStructure (VCG_Heap, Left_Operand, Copy_Of_Left_Operand); -- create not L CStacks.Push (VCG_Heap, Left_Side_Of_Range, E_Stack); PushOperator (Unary, SP_Symbols.RWnot, VCG_Heap, E_Stack); -- create not X (using copy of X) CStacks.Push (VCG_Heap, Copy_Of_Left_Operand, E_Stack); PushOperator (Unary, SP_Symbols.RWnot, VCG_Heap, E_Stack); -- conjoin PushOperator (Binary, SP_Symbols.RWand, VCG_Heap, E_Stack); -- create X and R CStacks.Push (VCG_Heap, Right_Side_Of_Range, E_Stack); CStacks.Push (VCG_Heap, Left_Operand, E_Stack); PushOperator (Binary, SP_Symbols.RWand, VCG_Heap, E_Stack); -- disjoin above two subexpressions PushOperator (Binary, SP_Symbols.RWor, VCG_Heap, E_Stack); -- finally, if outside rather than inside then invert answer if Membership_Kind = Outside then PushOperator (Unary, SP_Symbols.RWnot, VCG_Heap, E_Stack); end if; end Complete_Boolean_Model; begin -- Model_In_Clause In_Operator_Node := STree.Next_Sibling (Current_Node => Node); if STree.Syntax_Node_Type (Node => In_Operator_Node) = SP_Symbols.inside then Membership_Kind := Inside; Rel_Operation_LHS := SP_Symbols.greater_or_equal; Rel_Operation_RHS := SP_Symbols.less_or_equal; Middle_Operator := SP_Symbols.RWand; else Membership_Kind := Outside; Rel_Operation_LHS := SP_Symbols.less_than; Rel_Operation_RHS := SP_Symbols.greater_than; Middle_Operator := SP_Symbols.RWor; end if; Range_Node := STree.Next_Sibling (Current_Node => In_Operator_Node); if STree.Syntax_Node_Type (Node => Range_Node) = SP_Symbols.annotation_arange then -- set is defined by a range, held in stack; if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Range_Node)) = SP_Symbols.annotation_attribute then -- range is defined by a range attribute on top of stack -- this has already been transformed by UpProcessAttribute -- which has left Index'First .. Index'Last on stack Left_Side_Of_Range := LeftPtr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); Right_Side_Of_Range := RightPtr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); CStacks.Pop (VCG_Heap, E_Stack); --discard .. else -- range is defined by a pair of simple expressions; CStacks.PopOff (VCG_Heap, E_Stack, Right_Side_Of_Range); CStacks.PopOff (VCG_Heap, E_Stack, Left_Side_Of_Range); end if; if Is_Boolean_Membership (In_Operator_Node => In_Operator_Node) then Complete_Boolean_Model (Left_Side_Of_Range => Left_Side_Of_Range, Right_Side_Of_Range => Right_Side_Of_Range, Membership_Kind => Membership_Kind, E_Stack => E_Stack, VCG_Heap => VCG_Heap); else Complete_Inequality_Model (Left_Side_Of_Range => Left_Side_Of_Range, Right_Side_Of_Range => Right_Side_Of_Range, Rel_Operation_LHS => Rel_Operation_LHS, Rel_Operation_RHS => Rel_Operation_RHS, Middle_Operator => Middle_Operator, E_Stack => E_Stack, VCG_Heap => VCG_Heap); end if; else -- range is defined by a typemark on top of stack; -- form the right operands from this typemark, using FIRST and LAST; Static_Result := Check_If_Result_Statically_Known (In_Operator_Node => In_Operator_Node); -- it will be static if type is non-scalar CStacks.PopOff (VCG_Heap, E_Stack, Type_Mark_Cell); if Static_Result = Is_Unknown then CreateCellKind (Attrib_Cell, VCG_Heap, Cell_Storage.Attrib_Value); CreateOpCell (Left_Side_Of_Range, VCG_Heap, SP_Symbols.apostrophe); SetLeftArgument (Left_Side_Of_Range, Type_Mark_Cell, VCG_Heap); SetRightArgument (Left_Side_Of_Range, Attrib_Cell, VCG_Heap); Structures.CopyStructure (VCG_Heap, Left_Side_Of_Range, Right_Side_Of_Range); Cells.Set_Lex_Str (VCG_Heap, RightPtr (VCG_Heap, Left_Side_Of_Range), LexTokenManager.First_Token); Cells.Set_Lex_Str (VCG_Heap, RightPtr (VCG_Heap, Right_Side_Of_Range), LexTokenManager.Last_Token); Complete_Inequality_Model (Left_Side_Of_Range => Left_Side_Of_Range, Right_Side_Of_Range => Right_Side_Of_Range, Rel_Operation_LHS => Rel_Operation_LHS, Rel_Operation_RHS => Rel_Operation_RHS, Middle_Operator => Middle_Operator, E_Stack => E_Stack, VCG_Heap => VCG_Heap); else CStacks.Pop (VCG_Heap, E_Stack); Model_Statically_Known_Result (Static_Result => Static_Result, E_Stack => E_Stack, VCG_Heap => VCG_Heap); end if; end if; end Model_In_Clause; begin -- Process_Relation Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); if Op_Node /= STree.NullNode then if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.relational_operator then PushOperator (Binary, STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Op_Node)), VCG_Heap, E_Stack); else Model_In_Clause (Node => STree.Child_Node (Current_Node => Node), E_Stack => E_Stack, VCG_Heap => VCG_Heap); end if; end if; end Process_Relation; ------------------------------------------------------------------- procedure Process_Factor (Node : in STree.SyntaxNode; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table, --# VCG_Heap; is Op_Node : STree.SyntaxNode; Bool_Op_Cell : Cells.Cell; Result_Type : Dictionary.Symbol; -- Note, there is a similar version of this -- subprogram in BuildExprDAG procedure Model_Modular_Not_Operation (Result_Type : in Dictionary.Symbol; E_Stack : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives E_Stack, --# Statistics.TableUsage, --# VCG_Heap from *, --# E_Stack, --# Result_Type, --# VCG_Heap; is Minus_Op_Cell, Tick_Cell, Prefix_Cell, Modulus_Cell : Cells.Cell; begin ---------------------------------------------------- -- LRM 4.5.6 (5) defines "not X" for a modular -- -- type T to be equivalent to T'Last - X. -- ---------------------------------------------------- -- create ' operator CreateOpCell (Tick_Cell, VCG_Heap, SP_Symbols.apostrophe); -- create Last attribute name CreateAttribValueCell (Modulus_Cell, VCG_Heap, LexTokenManager.Last_Token); -- Create prefix given by Result_Type CreateFixedVarCell (Prefix_Cell, VCG_Heap, Result_Type); -- Assemble T'Last SetLeftArgument (Tick_Cell, Prefix_Cell, VCG_Heap); SetRightArgument (Tick_Cell, Modulus_Cell, VCG_Heap); -- create binary "-" operator CreateOpCell (Minus_Op_Cell, VCG_Heap, SP_Symbols.minus); -- Construct T'Last - X, where X is on the top-of-stack SetRightArgument (Minus_Op_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); SetLeftArgument (Minus_Op_Cell, Tick_Cell, VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, Minus_Op_Cell, E_Stack); end Model_Modular_Not_Operation; begin -- Process_Factor Op_Node := STree.Child_Node (Current_Node => Node); if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWnot then -- check to see if result type is an array and -- build special model if it is Result_Type := STree.NodeSymbol (Op_Node); if Dictionary.IsTypeMark (Result_Type) then if Dictionary.TypeIsArray (Result_Type) then -- must be a Boolean array "not" operation CreateBoolOpCell (Bool_Op_Cell, VCG_Heap, Result_Type, SP_Symbols.RWnot); SetRightArgument (Bool_Op_Cell, CStacks.Top (VCG_Heap, E_Stack), VCG_Heap); CStacks.Pop (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, Bool_Op_Cell, E_Stack); elsif Dictionary.TypeIsModular (Result_Type) then -- must be a Modular "not" operation. Model_Modular_Not_Operation (Result_Type => Result_Type, E_Stack => E_Stack, VCG_Heap => VCG_Heap); else -- proceed as before for scalar bool ops PushOperator (Unary, SP_Symbols.RWnot, VCG_Heap, E_Stack); end if; else -- proceed as before for scalar bool ops PushOperator (Unary, SP_Symbols.RWnot, VCG_Heap, E_Stack); end if; -- handle abs elsif STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWabs then PushFunction (Cell_Storage.Abs_Function, VCG_Heap, E_Stack); else Op_Node := STree.Next_Sibling (Current_Node => Op_Node); if Op_Node /= STree.NullNode then PushOperator (Binary, SP_Symbols.double_star, VCG_Heap, E_Stack); ModularizeIfNeeded (STree.NodeSymbol (Op_Node), VCG_Heap, E_Stack); end if; end if; end Process_Factor; ------------------------------------------------------------------- procedure Down_Process_Quantifier (Node : in STree.SyntaxNode; Current_Unit : out Dictionary.Symbol; E_Stack : in out CStacks.Stack; Function_Defs : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record; L_Scope : out Dictionary.Scopes; Next_Node : out STree.SyntaxNode) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives Current_Unit, --# L_Scope, --# Next_Node from Node, --# STree.Table & --# E_Stack from Dictionary.Dict, --# Function_Defs, --# Node, --# STree.Table, --# VCG_Heap & --# Function_Defs from Dictionary.Dict, --# Node, --# STree.Table, --# VCG_Heap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Function_Defs, --# Node, --# STree.Table, --# VCG_Heap & --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Function_Defs, --# Node, --# STree.Table; is Ident_Node : STree.SyntaxNode; Quantifier_Sym : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; Var_Decl : Cells.Cell; Quantifier_Ident : Cells.Cell; Type_Ident : Cells.Cell; Quantifier_Marker : Cells.Cell; begin Ident_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); -- continue tree walk from the range node if present or else the predicate node Next_Node := STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => Ident_Node)); -- Get the Quantifier symbol planted by wffs and enter local scope of -- quantifier. Set the Current_Unit to the quantifier symbol. Quantifier_Sym := STree.NodeSymbol (Ident_Node); L_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Quantifier_Sym); Current_Unit := Quantifier_Sym; -- build quantifier and type declaration and stack it. In FDL we want the base type Type_Sym := Dictionary.GetRootType (Dictionary.GetType (Quantifier_Sym)); CreateFixedVarCell (Type_Ident, VCG_Heap, Type_Sym); CreateFixedVarCell (Quantifier_Ident, VCG_Heap, Quantifier_Sym); -- create Var_Decl as Quantifier_Ident : Type_Ident CreateOpCell (Var_Decl, VCG_Heap, SP_Symbols.colon); SetLeftArgument (Var_Decl, Quantifier_Ident, VCG_Heap); SetRightArgument (Var_Decl, Type_Ident, VCG_Heap); -- Put the marker on the function Def stack to indicate -- the end of functions defined within the quantified expression. CreateCellKind (CellName => Quantifier_Marker, VCGHeap => VCG_Heap, KindOfCell => Cell_Storage.Quantifier); CStacks.Push (Heap => VCG_Heap, CellName => Quantifier_Marker, S => Function_Defs); -- stack for use on the way up CStacks.Push (VCG_Heap, Var_Decl, E_Stack); end Down_Process_Quantifier; ------------------------------------------------------------------- procedure Up_Process_Quantifier (Node : in STree.SyntaxNode; Current_Unit : out Dictionary.Symbol; E_Stack : in out CStacks.Stack; Function_Defs : in out CStacks.Stack; VCG_Heap : in out Cells.Heap_Record; L_Scope : in out Dictionary.Scopes) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives Current_Unit, --# L_Scope from Dictionary.Dict, --# L_Scope & --# E_Stack, --# Function_Defs, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# E_Stack, --# Function_Defs, --# Node, --# STree.Table, --# VCG_Heap; is Range_Found : Boolean; Range_Node : STree.SyntaxNode; Quantifier_Kind : SP_Symbols.SP_Symbol; Quantifier_Sym : Dictionary.Symbol; Quantifier_Type : Dictionary.Symbol; Predicate : Cells.Cell; Declaration : Cells.Cell; Range_Data : Cells.Cell; Left_Side_Of_Range : Cells.Cell; Right_Side_Of_Range : Cells.Cell; Left_Op : Cells.Cell; Right_Op : Cells.Cell; Quantifier_Ident_Cell : Cells.Cell; Quantified_Expression : Cells.Cell; Quantifier_Function_Defs : Cells.Cell; Function_Defs_Join : Cells.Cell; Comma_Cell : Cells.Cell; Implies_Cell : Cells.Cell; Function_Def_Uses_Quantifier : Boolean; Function_Defs_Present : Boolean; Temp_Function_Stack : CStacks.Stack; begin -- TODO: Init here to shut up the flow analyser. Is this OK? Quantifier_Ident_Cell := Cells.Null_Cell; Quantifier_Function_Defs := Cells.Null_Cell; Range_Node := STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)))); Range_Found := STree.Syntax_Node_Type (Node => Range_Node) = SP_Symbols.annotation_arange; Quantifier_Kind := STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Node)); -- TOS is the DAG representing the predicate CStacks.PopOff (VCG_Heap, E_Stack, Predicate); -- 2nd TOS is range expression if its there if Range_Found then -- range is either an attribute or explicit range if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Range_Node)) = SP_Symbols.annotation_attribute then -- range is defined by a range attribute on top of stack -- this has already been transformed by UpProcessAttribute -- which has left Index'First .. Index'Last on stack Left_Side_Of_Range := LeftPtr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); Right_Side_Of_Range := RightPtr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)); CStacks.Pop (VCG_Heap, E_Stack); -- discard .. else -- range is defined by a pair of simple expressions; CStacks.PopOff (VCG_Heap, E_Stack, Right_Side_Of_Range); CStacks.PopOff (VCG_Heap, E_Stack, Left_Side_Of_Range); end if; -- we now have the bounds of the range which we just need to assemble into a pair -- of bounds checks -- first get the Declaration data CStacks.PopOff (VCG_Heap, E_Stack, Declaration); Quantifier_Ident_Cell := LeftPtr (VCG_Heap, Declaration); -- create left sub-tree CreateOpCell (Left_Op, VCG_Heap, SP_Symbols.greater_or_equal); SetLeftArgument (Left_Op, Quantifier_Ident_Cell, VCG_Heap); SetRightArgument (Left_Op, Left_Side_Of_Range, VCG_Heap); -- create right subtree CreateOpCell (Right_Op, VCG_Heap, SP_Symbols.less_or_equal); SetLeftArgument (Right_Op, Quantifier_Ident_Cell, VCG_Heap); SetRightArgument (Right_Op, Right_Side_Of_Range, VCG_Heap); -- and them together to for Range_Data DAG CreateOpCell (Range_Data, VCG_Heap, SP_Symbols.RWand); SetLeftArgument (Range_Data, Left_Op, VCG_Heap); SetRightArgument (Range_Data, Right_Op, VCG_Heap); else -- no explicit range CStacks.PopOff (VCG_Heap, E_Stack, Declaration); -- create a range DAG here -- Declaration is the colon in "ident : type" Quantifier_Sym := Cells.Get_Symbol_Value (VCG_Heap, LeftPtr (VCG_Heap, Declaration)); Quantifier_Type := Dictionary.GetType (Quantifier_Sym); -- For Boolean, we _mustn't_ try to create a range constraint, since Boolean -- isn't ordered in FDL. Sem-compunit-wf_arange forbids the use -- of explicit ranges with Boolean, so the only possibility here is full-range -- Boolean. -- -- We can't emit (Sym >= False and Sym <= True), for the reason stated above. -- We really know that (Sym or not Sym), but that's just "True"! if Dictionary.TypeIsBoolean (Quantifier_Type) then CreateTrueCell (VCG_Heap, Range_Data); else Type_Constraint.Process_Discrete (The_Type => Quantifier_Type, The_Expression => LeftPtr (VCG_Heap, Declaration), The_Constraint => Range_Data, VCG_Heap => VCG_Heap); end if; end if; -- Check whether any functions have been defined from called -- functions within the quantified expression. If there are none the -- TOS will be a quantifer marker placed on the stack by -- Down_Process_Quantifier if Cells.Get_Kind (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, Function_Defs)) /= Cell_Storage.Quantifier then CStacks.CreateStack (Temp_Function_Stack); -- Transfer all function defs down to the quantifier marker to the -- temoprary stack loop CStacks.Push (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, Function_Defs), S => Temp_Function_Stack); CStacks.Pop (VCG_Heap, Function_Defs); exit when Cells.Get_Kind (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, Function_Defs)) = Cell_Storage.Quantifier; end loop; -- remove the quantifer marker from the function stack CStacks.Pop (VCG_Heap, Function_Defs); -- The function defs which do not use the current quantified variable -- should be pushed back on to the Function_Defs stack. -- The functions which do use the quantified variable must be placed -- within the quantified expression and conjoined within -- Quantifier_Function_Defs Function_Defs_Present := False; while not (CStacks.IsEmpty (Temp_Function_Stack) or Function_Defs_Present) loop ContainsQuantIdent (DataElem => CStacks.Top (VCG_Heap, Temp_Function_Stack), QuantIdent => Quantifier_Ident_Cell, VCGHeap => VCG_Heap, Result => Function_Def_Uses_Quantifier); if Function_Def_Uses_Quantifier then -- The function def on the top of the stack uses the quantifier Function_Defs_Present := True; else -- The function def does not use the quantifier -- Transfer from temp function stack to function def stack CStacks.Push (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, Temp_Function_Stack), S => Function_Defs); CStacks.Pop (VCG_Heap, Temp_Function_Stack); end if; end loop; if Function_Defs_Present then -- Get the first function def using the quantifier from -- the top of the stack. CStacks.PopOff (Heap => VCG_Heap, S => Temp_Function_Stack, C => Quantifier_Function_Defs); while not CStacks.IsEmpty (Temp_Function_Stack) loop ContainsQuantIdent (DataElem => CStacks.Top (VCG_Heap, Temp_Function_Stack), QuantIdent => Quantifier_Ident_Cell, VCGHeap => VCG_Heap, Result => Function_Def_Uses_Quantifier); if Function_Def_Uses_Quantifier then -- If the current tos is relevant to to us, we join -- it onto the cell containing the other relevant -- instantiations. Cells.Utility.Conjoin (VCG_Heap => VCG_Heap, New_Term => CStacks.Top (VCG_Heap, Temp_Function_Stack), Conjunct => Quantifier_Function_Defs); CStacks.Pop (VCG_Heap, Temp_Function_Stack); else -- The function def does not use the quantifier; -- transfer from temp function stack to function -- def stack. CStacks.Push (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, Temp_Function_Stack), S => Function_Defs); CStacks.Pop (VCG_Heap, Temp_Function_Stack); end if; end loop; -- Quantifier_Function_Defs contains all function defs -- that use the quantifier conjcoined. end if; else Function_Defs_Present := False; -- Pop off the quantifier marker from the function_defs stack. -- as the parsing of the quantified expression is complete. CStacks.Pop (VCG_Heap, Function_Defs); end if; -- now assemble the quantifier expression CreateOpCell (Quantified_Expression, VCG_Heap, Quantifier_Kind); CreateOpCell (Comma_Cell, VCG_Heap, SP_Symbols.comma); if Quantifier_Kind = SP_Symbols.RWforall then CreateOpCell (Implies_Cell, VCG_Heap, SP_Symbols.implies); else -- must for_some CreateOpCell (Implies_Cell, VCG_Heap, SP_Symbols.RWand); end if; SetLeftArgument (Implies_Cell, Range_Data, VCG_Heap); -- If there are function defs present that use the quanitfier -- then these need to be placed within the quantifier as follows, -- where -> is replaced and by for existential qauntification: -- -> -> if Function_Defs_Present then CreateOpCell (Function_Defs_Join, VCG_Heap, SP_Symbols.implies); SetLeftArgument (Function_Defs_Join, Quantifier_Function_Defs, VCG_Heap); SetRightArgument (Function_Defs_Join, Predicate, VCG_Heap); SetRightArgument (Implies_Cell, Function_Defs_Join, VCG_Heap); else -- Otherwise the format is: -- -> Predicate SetRightArgument (Implies_Cell, Predicate, VCG_Heap); end if; SetLeftArgument (Comma_Cell, Declaration, VCG_Heap); SetRightArgument (Comma_Cell, Implies_Cell, VCG_Heap); SetRightArgument (Quantified_Expression, Comma_Cell, VCG_Heap); CStacks.Push (VCG_Heap, Quantified_Expression, E_Stack); -- leave local scope of quantifier L_Scope := Dictionary.GetEnclosingScope (L_Scope); Current_Unit := Dictionary.GetRegion (L_Scope); end Up_Process_Quantifier; begin -- Build_Annotation_Expression -- The set of called functions is initiallized and set to a null set. Symbol_Set.Initialise (Called_Functions); -- Initially it is not possible to be within a nested function call -- There is no calling function. Calling_Function := Cells.Null_Cell; Done := False; CStacks.CreateStack (E_Stack); -- scope may change locally in loops but will always be back to -- original scope on exit from procedure. In all calls below -- L_Scope replaces Scope. L_Scope := Get_Generic_Scope (Exp_Node => Exp_Node, Instantiated_Subprogram => Instantiated_Subprogram, Scope => Scope); Start_Node := Exp_Node; Next_Node := Start_Node; Last_Node := Next_Node; Current_Instantiation := Instantiated_Subprogram; -- At the outermost level the current unit is not known within -- Build_Annotation_Expression. Only the scope is known. Current_Unit := Dictionary.NullSymbol; Implicit_Var := Dictionary.NullSymbol; Direction := Down_Loop; -- Debug.PrintScope ("L_Scope is ", L_Scope); -- Debug.PrintScope ("Calling_Scope is ", Calling_Scope); -- Debug.PrintBool ("Force_Abstract: ", Force_Abstract); loop -- As SPARK 95 does not support recursion the syntax tree is traversed using -- a down and an up loop. The up-loop is wholly contained within the -- down-loop. The syntax tree nodes maintain a reference to their parent -- and so a stack is not required to traverse the tree using the down and -- up loops. However, the purpose of traversing the tree is to build a DAG -- representation of the syntax tree. The building of the DAG does involve -- a stack, the expression stack (E_Stack). During the down part of the -- traversal structures are pushed on to the stack and if necessary these -- are completed during the up-loop. The up-loop also uses the E_Stack -- directly to build expressions DAGs without using recursion. loop if Direction = Down_Loop then --------------------------------down loop -- Start_Node is the syntax node of the whole expression -- Next_Node is the child node of the current node which is being -- visited in the syntax tree -- Last_Node is the node which is currently being visited. -- -- Initially Next_Node = Start_Node -- -- The down-loop includes the up-loop. -- Starting at the Start_Node the down loop traverses the syntax tree -- by taking successive child nodes. -- At the start of each iteration of the loop Last_Node set set equal -- to the Next_Node and its Node_Type determined and used as a case -- selector. The Node_Type gives the grammar production that the syntax -- node represents. The down loop processes certain grammar productions -- but others it may ignore and just moving to the child node of the -- currently visted node in the syntax tree. The processing of a -- production ibvolves building a DAG on the expression stack, E_Stack. -- The down-loop processing is suspended when a null syntax node is -- encountered. The null syntax node may be encountered purely by -- traversing the syntax tree and reaching a leaf or by being forced -- by processing a production of the grammar of annotation expressions. -- In either case when a null syntax node is encountered the up-loop -- is entered provided that the Last_Node is not equal to the Start_Node -- (i.e., we are back at the beginning of the expression and cannot -- go backup any further). -- -- The up-loop -- At the start of each loop there is a loop exit checks: -- a. if there is a sibling of the node which has not been vsited -- then the up-loop is exited. -- b. Next_Node is set to the parent of the currently visited node -- and the Last_Node set to the new value of Next_Node. -- If the new value of Last_Node is a null node then the up-loop -- is exited. -- Once the up-loop proper is entered a selection is made on the type -- of the currently visited syntax tree node referenced by Last_Node. -- A number of grammar productions are processed on the up-loop and use -- the E_Stack to build a DAG. Other productions are not processed. -- At the end of the up-loop a check is made to see if the currently -- visited node (here repreented by Next_Node) references the the -- initial syntax node of the expresion, Start_Node. -- If it does we cannot progress up any further and the up-loop is -- otherwise the up-loop just iteratees choosing the next sibling or -- the parent of the currently viisted node. -- Ultimately the down-loop and therefore the up-loop will be exited -- at the end of the down-loop when the next node yo be processed is -- null or references the initial syntax node of the expression, Start_Node. Last_Node := Next_Node; Node_Type := STree.Syntax_Node_Type (Node => Next_Node); case Node_Type is when SP_Symbols.attribute_ident => Down_Process_Attribute_Ident (Node => Next_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); Next_Node := STree.NullNode; when SP_Symbols.character_literal | SP_Symbols.string_literal => -- ASSUME Last_Node = character_literal OR string_literal CreateManifestConstCell (DAG_Cell, VCG_Heap, STree.Node_Lex_String (Node => Next_Node)); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); Next_Node := STree.NullNode; when SP_Symbols.numeric_literal => -- ASSUME Last_Node = numeric_literal CreateManifestConstCell (DAG_Cell, VCG_Heap, STree.Node_Lex_String (Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Next_Node)))); CStacks.Push (VCG_Heap, DAG_Cell, E_Stack); Next_Node := STree.NullNode; when SP_Symbols.annotation_selector => -- ASSUME Last_Node = annotation_selector -- prune at selector nodes so that only left most idents found Next_Node := STree.NullNode; when SP_Symbols.annotation_simple_name => -- ASSUME Last_Node = annotation_simple_name if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Last_Node)) = SP_Symbols.annotation_named_argument_association then -- do not want look at parameter or field identifier Next_Node := STree.NullNode; else Next_Node := STree.Child_Node (Current_Node => Next_Node); end if; when SP_Symbols.identifier => -- ASSUME Last_Node = identifier --# accept F, 10, Next_Node, "Ineffective assignment OK - Process_Identifier does use", --# "the intial value of Next_Node"; Process_Identifier (L_Scope => L_Scope, Calling_Scope => Calling_Scope, Force_Abstract => Force_Abstract, Loop_Stack => Loop_Stack, Calling_Function => Calling_Function, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Current_Instantiation => Current_Instantiation, Called_Functions => Called_Functions, Start_Node => Start_Node, Next_Node => Next_Node, Last_Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); Next_Node := STree.NullNode; --# end accept; when SP_Symbols.annotation_aggregate => -- ASSUME Last_Node = annotation_aggregate DownProcessAggregate (SP_Symbols.annotation_qualified_expression, VCG_Heap, Next_Node, E_Stack); when SP_Symbols.annotation_aggregate_choice_rep => -- ASSUME Last_Node = annotation_aggregate_choice_rep DownProcessAggregateChoiceRep (Last_Node, L_Scope, VCG_Heap, E_Stack, Next_Node); when SP_Symbols.record_component_selector_name => -- ASSUME Last_Node = record_component_selector_name DownProcessRecordComponentSelectorName (Last_Node, L_Scope, VCG_Heap, E_Stack, Next_Node); when SP_Symbols.store => -- ASSUME Last_Node = store Down_Process_Store (L_Scope => L_Scope, E_Stack => E_Stack, VCG_Heap => VCG_Heap); Next_Node := STree.Child_Node (Current_Node => Next_Node); when SP_Symbols.store_list => -- ASSUME Last_Node = store_list Down_Process_Store_List (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap, Next_Node => Next_Node); when SP_Symbols.quantified_expression => -- ASSUME Last_Node = quantified_expression Down_Process_Quantifier (Node => Last_Node, Current_Unit => Current_Unit, E_Stack => E_Stack, Function_Defs => Function_Defs, VCG_Heap => VCG_Heap, L_Scope => L_Scope, Next_Node => Next_Node); when others => Next_Node := STree.Child_Node (Current_Node => Next_Node); end case; end if; -- Down_Loop -------------------------------------------------up loop---------- if Direction = Up_Loop or else (Next_Node = STree.NullNode and then Last_Node /= Start_Node) then Direction := Up_Loop; loop Next_Node := STree.Next_Sibling (Current_Node => Last_Node); if Next_Node /= STree.NullNode then Direction := Down_Loop; exit; end if; Next_Node := STree.Parent_Node (Current_Node => Last_Node); Last_Node := Next_Node; if Last_Node = STree.NullNode then Direction := Down_Loop; exit; end if; case STree.Syntax_Node_Type (Node => Last_Node) is when SP_Symbols.annotation_expression | SP_Symbols.annotation_expression_rep1 | SP_Symbols.annotation_expression_rep2 | SP_Symbols.annotation_expression_rep3 | SP_Symbols.annotation_expression_rep4 | SP_Symbols.annotation_expression_rep5 | SP_Symbols.annotation_expression_rep6 | SP_Symbols.annotation_expression_rep7 => -- ASSUME Last_Node = annotation_expression_rep1 OR annotation_expression_rep2 OR annotation_expression_rep3 OR -- annotation_expression_rep4 OR annotation_expression_rep5 OR annotation_expression_rep6 OR -- annotation_expression_rep7 OR annotation_expression Process_Expression (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_simple_expression => -- ASSUME Last_Node = annotation_simple_expression Process_Simple_Expression (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_simple_expression_opt => -- ASSUME Last_Node = annotation_simple_expression_opt Process_Simple_Expression_Opt (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_term => -- ASSUME Last_Node = annotation_term Process_Simple_Expression (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_factor => -- ASSUME Last_Node = annotation_factor Process_Factor (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_relation => -- ASSUME Last_Node = annotation_relation Process_Relation (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_selected_component => -- ASSUME Last_Node = annotation_selected_component Process_Selected_Component (Direction => Direction, L_Scope => L_Scope, Calling_Scope => Calling_Scope, Force_Abstract => Force_Abstract, Called_Functions => Called_Functions, Calling_Function => Calling_Function, Current_Instantiation => Current_Instantiation, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Start_Node => Start_Node, Next_Node => Next_Node, Last_Node => Last_Node, Loop_Stack => Loop_Stack, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_attribute_designator => -- ASSUME Last_Node = annotation_attribute_designator Up_Process_Attribute_Designator (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_positional_argument_association => -- ASSUME Last_Node = annotation_positional_argument_association Process_Positional_Argument_Association (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_named_argument_association => -- ASSUME Last_Node = annotation_named_argument_association Process_Named_Argument_Association (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_name_argument_list => -- ASSUME Last_Node = annotation_name_argument_list Process_Name_Argument_List (Direction => Direction, Current_Instantiation => Current_Instantiation, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Called_Functions => Called_Functions, Calling_Function => Calling_Function, Start_Node => Start_Node, Next_Node => Next_Node, Last_Node => Last_Node, E_Stack => E_Stack, Scope => L_Scope, Calling_Scope => Calling_Scope, Force_Abstract => Force_Abstract, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_ancestor_part => -- ASSUME Last_Node = annotation_ancestor_part ProcessAncestorPart (Last_Node, VCG_Heap, E_Stack); when SP_Symbols.annotation_aggregate_choice => -- ASSUME Last_Node = annotation_aggregate_choice Up_Process_Aggregate_Choice (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_aggregate_choice_rep => -- ASSUME Last_Node = annotation_aggregate_choice_rep UpProcessAggregateChoiceRep (Last_Node, VCG_Heap, E_Stack); when SP_Symbols.annotation_named_association_rep => -- ASSUME Last_Node = annotation_named_association_rep Up_Process_Named_Association_Rep (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_named_record_component_association => -- ASSUME Last_Node = annotation_named_record_component_association Up_Process_Named_Record_Component_Association (E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_aggregate_or_expression => -- ASSUME Last_Node = annotation_aggregate_or_expression Up_Process_Aggregate_Or_Expression (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_positional_record_component_association => -- ASSUME Last_Node = annotation_positional_record_component_association Up_Process_Positional_Record_Component_Association (E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_component_association => -- ASSUME Last_Node = annotation_component_association Up_Process_Component_Association (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_aggregate => -- ASSUME Last_Node = annotation_aggregate Up_Process_Aggregate (E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.annotation_extension_aggregate => -- ASSUME Last_Node = annotation_extension_aggregate UpProcessExtensionAggregate (VCG_Heap, E_Stack); when SP_Symbols.annotation_qualified_expression => -- ASSUME Last_Node = annotation_qualified_expression Model_Qualified_Expression (Node => Last_Node, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.store => -- ASSUME Last_Node = store Up_Process_Store (Node => Last_Node, L_Scope => L_Scope, E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.store_list => -- ASSUME Last_Node = store_list Up_Process_Store_List (E_Stack => E_Stack, VCG_Heap => VCG_Heap); when SP_Symbols.quantified_expression => -- ASSUME Last_Node = quantified_expression Up_Process_Quantifier (Node => Last_Node, Current_Unit => Current_Unit, E_Stack => E_Stack, Function_Defs => Function_Defs, VCG_Heap => VCG_Heap, L_Scope => L_Scope); when others => null; end case; if Next_Node = Start_Node then Direction := Down_Loop; exit; end if; end loop; -- up end if; exit when Next_Node = STree.NullNode or else Next_Node = Start_Node; end loop; -- down -- DAG Root contains the DAG of the annotation expression CStacks.PopOff (VCG_Heap, E_Stack, DAG_Root); if Is_Generic_Constraint (Exp_Node => Start_Node, Instantiated_Subprogram => Instantiated_Subprogram) then Instantiate_Parameters (Constraint => DAG_Root, Instantiated_Subprogram => Instantiated_Subprogram, VCG_Heap => VCG_Heap); end if; if not CStacks.IsEmpty (E_Stack) then case Cells.Get_Kind (Heap => VCG_Heap, CellName => CStacks.Top (Heap => VCG_Heap, S => E_Stack)) is when Cell_Storage.Proof_Function_Syntax_Node => -- This sort of Cell indicates the start of processing a -- called function with a return anno and is used to synchronize -- the stack with the new context variables and initiate the -- parser with a new context. -- DAG_Root contains a default return anno expression -- The Proof_Function_Syntax_Node Pop is no longer required, -- Pop it off. CStacks.Pop (VCG_Heap, E_Stack); -- TOS is Proof_Function_Obtain_Return -- Obtain the root syntax tree node for return anno; -- it has been saved in the RHS of the return anno marker -- which is on the top of the stack. Start_Node := STree.RefToNode (ExaminerConstants.RefType (Cells.Get_Natural_Value (Heap => VCG_Heap, CellName => Cells.Get_B_Ptr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack))))); if Start_Node /= STree.NullNode then -- The called function has a return anno and Start_Node -- references the root node of its syntax tree. -- Set up the scope to use in parsing based on the -- Current_Unit and Implicit_Var symbols if Implicit_Var = Dictionary.NullSymbol then -- An explicit return anno L_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Current_Unit); -- Adjust for instantiation L_Scope := Get_Generic_Scope (Exp_Node => Exp_Node, Instantiated_Subprogram => Instantiated_Subprogram, Scope => L_Scope); else -- An implicit return anno. The Implicit_Var gives -- the corrct scope whether it is from the generic -- declaration or the instantiation. L_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Implicit_Var); end if; -- Set up the context variables to parse the return anno Last_Node := Start_Node; Next_Node := Start_Node; else -- If the return anno node is null then the return expression -- is taken to be the called function and the definition becomes: -- -> = . -- The DAG_Root just popped of the stack contains the -- . -- Push it back on to the stack to become the DAG -- of the return anno for the next cycle CStacks.Push (Heap => VCG_Heap, CellName => DAG_Root, S => E_Stack); -- TOS is => Proof_Function_Obtain_Return -- Ensure that parse loops are exited immediatly -- so as to leave as the parsed DAG -- for the next cycle. Last_Node := STree.NullNode; Next_Node := STree.NullNode; end if; -- set Direction to Down_Loop to enter parse loop afresh Direction := Down_Loop; when Cell_Storage.Proof_Function_Obtain_Return => -- DAG_Root contains the return anno expression DAG -- Pop off Proof_Function_Obtain_Return as it is no longer required CStacks.Pop (VCG_Heap, E_Stack); -- TOS is now "=" place holder -- Determine whether the return anno uses an implicit variable. -- If it is a null symbol if the function does not have an -- implcit return variable. if Implicit_Var /= Dictionary.NullSymbol then -- Substitute the implicit variable by the function call Substitutions.Substitute_Implicit_Vars (Proof_Function => Cells.Get_A_Ptr (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, E_Stack)), Implicit_Var => Implicit_Var, Implicit_Return_Expr => DAG_Root, VCG_Heap => VCG_Heap); end if; -- Substitute actual parameters from call in return annotation DAG -- requires the function call from the TOS Substitutions.Substitute_Parameters (Called_Function => Cells.Get_A_Ptr (Heap => VCG_Heap, CellName => CStacks.Top (VCG_Heap, E_Stack)), Constraint => DAG_Root, VCG_Heap => VCG_Heap); if Implicit_Var /= Dictionary.NullSymbol then -- For functions with implicit returns we just -- replace the f(...) = foo node with foo itself. CStacks.Pop (VCG_Heap, E_Stack); CStacks.Push (VCG_Heap, DAG_Root, E_Stack); else -- Set RHS of =/<-> op now on top of stack to -- return annotation DAG. SetRightArgument (OpCell => CStacks.Top (VCG_Heap, E_Stack), Argument => DAG_Root, VCGHeap => VCG_Heap); end if; -- pop off the completed =/<-> op CStacks.PopOff (Heap => VCG_Heap, S => E_Stack, C => Function_Definition); -- TOS is precondition marker -- temporarily pop off precondition marker to expose -> at TOS CStacks.PopOff (Heap => VCG_Heap, S => E_Stack, C => Precondition); -- TOS is place holder "->" place holder -- set RHS of -> op to completed =/<-> op. SetRightArgument (OpCell => CStacks.Top (VCG_Heap, E_Stack), Argument => Function_Definition, VCGHeap => VCG_Heap); -- push precondition marker back on to stack. CStacks.Push (Heap => VCG_Heap, CellName => Precondition, S => E_Stack); -- TOS is precondition marker -- Obtain the root syntax tree node for the precondition -- it has been saved in the RHS of the precondition marker Start_Node := STree.RefToNode (ExaminerConstants.RefType (Cells.Get_Natural_Value (Heap => VCG_Heap, CellName => Cells.Get_B_Ptr (VCG_Heap, Precondition)))); -- If the precondition syntax trree root node is null -- then the called function does not have a precondition. -- An assumed precondition of True is pushed on to the stack if Start_Node = STree.NullNode then CreateTrueCell (VCG_Heap, True_Cell); CStacks.Push (Heap => VCG_Heap, CellName => True_Cell, S => E_Stack); -- TOS is True => precondition marker end if; -- Set up the correct scope to parse the precondition based -- on the called function symbol L_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Current_Unit); -- Adjust for instantiation L_Scope := Get_Generic_Scope (Exp_Node => Exp_Node, Instantiated_Subprogram => Instantiated_Subprogram, Scope => L_Scope); -- The start node contains the root node of the syntax tree of -- the precondition -- If it is null then the top of the stack is a True cell -- and both the down and the up loop will exit immediately -- when they are entered afresh leaving the True cell on top. -- If the precondition root syntax tree node is not null -- then as the parser loops are entered afresh, because -- the Direction is Down_Loop, the precondition is parsed -- and when complete the precondition DAG is on the top of -- the stack. Last_Node := Start_Node; Next_Node := Start_Node; Direction := Down_Loop; when Cell_Storage.Proof_Function_Obtain_Precondition => -- DAG_Root is precondition DAG -- Pop off the precondition marker as it is no longer required CStacks.PopOff (Heap => VCG_Heap, S => E_Stack, C => Precondition); -- TOS is place holder -> = -- We need to get to the argument check, which is -- stashed underneath the current TOS. CStacks.PopOff (VCG_Heap, E_Stack, Tmp_Cell_2); CStacks.PopOff (VCG_Heap, E_Stack, Tmp_Cell_1); -- This is the argument typecheck. CStacks.Push (VCG_Heap, Tmp_Cell_2, E_Stack); -- Substitute actual parameters from call in precondition DAG -- requires the function call from the RHS of the -> op on TOS Substitutions.Substitute_Parameters (Called_Function => LeftPtr (VCG_Heap, Precondition), Constraint => DAG_Root, VCG_Heap => VCG_Heap); -- Tmp_Cell_1 is the argument check. -- DAG_Root is the precondition. Cells.Utility.Create_And (VCG_Heap => VCG_Heap, Left => Tmp_Cell_1, Right => DAG_Root, Conjunct => Tmp_Parent); Cells.Utility.Simplify (VCG_Heap, Tmp_Parent); -- Tmp_Parent is now (argument_check /\ precondition) CStacks.PopOff (VCG_Heap, E_Stack, Tmp_Cell_1); SetLeftArgument (OpCell => Tmp_Cell_1, Argument => Tmp_Parent, VCGHeap => VCG_Heap); Cells.Utility.Simplify (VCG_Heap, Tmp_Cell_1); CStacks.Push (VCG_Heap, Tmp_Cell_1, E_Stack); -- "->" op on TOS set the LHS argument to the precondition DAG -- Ensure that parse loops are exited immediatly -- by setting the context variables to a null node -- and the direction to Down_Loop -- so as to leave the completed guarded function definition -- on the top of the stack for the next cycle. Start_Node := STree.NullNode; Last_Node := STree.NullNode; Next_Node := STree.NullNode; Direction := Down_Loop; when Cell_Storage.Function_Call_In_Proof_Context => -- DAG_Root is guarded function definition DAG -- TOS is Function_Call_In_Proof_Context -- We need to get to the return assumption, which is -- stashed underneath the current TOS. CStacks.PopOff (VCG_Heap, E_Stack, Tmp_Cell_2); CStacks.PopOff (VCG_Heap, E_Stack, Tmp_Cell_1); -- This is the return assumption. CStacks.Push (VCG_Heap, Tmp_Cell_2, E_Stack); -- DAG_Root is (typecheck /\ pre) -> return -- Tmp_Cell_1 is the return assumption if Cells.Utility.Is_True (VCG_Heap, Tmp_Cell_1) then -- We have a trivial true return assumption; we can -- skip this as it adds nothing. Tmp_Parent := DAG_Root; Cells.Dispose_Of_Cell (VCG_Heap, Tmp_Cell_1); else -- Otherwise we and the return assumption onto -- DAG_Root. CreateOpCell (Tmp_Parent, VCG_Heap, SP_Symbols.RWand); SetLeftArgument (Tmp_Parent, DAG_Root, VCG_Heap); SetRightArgument (Tmp_Parent, Tmp_Cell_1, VCG_Heap); end if; -- Tmp_Parent is now ((typecheck /\ pre) -> return) /\ return_assumption -- Push the full instantiation on to the Function_Defs -- stack. CStacks.Push (Heap => VCG_Heap, CellName => Tmp_Parent, S => Function_Defs); -- Remove function from set of called functions. The called -- function symbol is in LHS of the Function_Call_In_Proof_Context -- marker. Symbol_Set.Remove (The_Set => Called_Functions, Sym => Cells.Get_Symbol_Value (Heap => VCG_Heap, CellName => Cells.Get_A_Ptr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack)))); -- Restore context variables to those before encountering -- called function. These are saved in the RHS -- of the Function_Call_In_Proof_Context marker Load_Saved_Context_DAG (Scope => L_Scope, Direction => Direction, Instantiated_Subprogram => Current_Instantiation, Current_Unit => Current_Unit, Implicit_Var => Implicit_Var, Start_Node => Start_Node, Last_Node => Last_Node, Next_Node => Next_Node, VCG_Heap => VCG_Heap, Argument_List => RightPtr (VCG_Heap, CStacks.Top (VCG_Heap, E_Stack))); -- Pop off Function_Call_In_Proof_Context CStacks.Pop (VCG_Heap, E_Stack); -- Retsore the previous Calling_Function CStacks.PopOff (Heap => VCG_Heap, S => E_Stack, C => Calling_Function); -- Determine whether the parsing is complete based on the exit -- for the parse loops. Done := Next_Node = Start_Node or else (Direction = Down_Loop and Next_Node = STree.NullNode); when others => Done := True; end case; else Done := True; end if; exit when Done; end loop; end Build_Annotation_Expression; spark-2012.0.deb/examiner/dictionary-add_use_type_reference.adb0000644000175000017500000002416611753202336023560 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Add_Use_Type_Reference (The_Visibility : in Visibility; The_Unit : in Symbol; Type_Mark : in RawDict.Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) is The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Use_Type_Clause : RawDict.Use_Type_Clause_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Use_Type_Clause (Type_Mark : in RawDict.Type_Info_Ref; Declaration : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Declaration, --# Dict, --# LexTokenManager.State, --# Type_Mark; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "use type clause of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Type_Symbol (Type_Mark)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Declaration); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Use_Type_Clause; -------------------------------------------------------------------------------- procedure Add_Package_Visible_Use_Type_Reference (The_Use_Type_Clause : in RawDict.Use_Type_Clause_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Package, --# The_Use_Type_Clause; is begin RawDict.Set_Next_Use_Type_Clause (The_Use_Type_Clause => The_Use_Type_Clause, Next => RawDict.Get_Package_Visible_Use_Type_Clauses (The_Package => The_Package)); RawDict.Set_Package_Visible_Use_Type_Clauses (The_Package => The_Package, The_Use_Type_Clause => The_Use_Type_Clause); end Add_Package_Visible_Use_Type_Reference; -------------------------------------------------------------------------------- procedure Add_Package_Local_Use_Type_Reference (The_Use_Type_Clause : in RawDict.Use_Type_Clause_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Package, --# The_Use_Type_Clause; is begin RawDict.Set_Next_Use_Type_Clause (The_Use_Type_Clause => The_Use_Type_Clause, Next => RawDict.Get_Package_Local_Use_Type_Clauses (The_Package => The_Package)); RawDict.Set_Package_Local_Use_Type_Clauses (The_Package => The_Package, The_Use_Type_Clause => The_Use_Type_Clause); end Add_Package_Local_Use_Type_Reference; -------------------------------------------------------------------------------- procedure Add_Protected_Type_Use_Type_Reference (The_Use_Type_Clause : in RawDict.Use_Type_Clause_Info_Ref; The_Protected_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Protected_Type, --# The_Use_Type_Clause; is begin RawDict.Set_Next_Use_Type_Clause (The_Use_Type_Clause => The_Use_Type_Clause, Next => RawDict.Get_Protected_Type_Use_Type_Clauses (The_Protected_Type => The_Protected_Type)); RawDict.Set_Protected_Type_Use_Type_Clauses (The_Protected_Type => The_Protected_Type, The_Use_Type_Clause => The_Use_Type_Clause); end Add_Protected_Type_Use_Type_Reference; -------------------------------------------------------------------------------- procedure Add_Task_Type_Use_Type_Reference (The_Use_Type_Clause : in RawDict.Use_Type_Clause_Info_Ref; The_Task_Type : in RawDict.Type_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Task_Type, --# The_Use_Type_Clause; is begin RawDict.Set_Next_Use_Type_Clause (The_Use_Type_Clause => The_Use_Type_Clause, Next => RawDict.Get_Task_Type_Use_Type_Clauses (The_Task_Type => The_Task_Type)); RawDict.Set_Task_Type_Use_Type_Clauses (The_Task_Type => The_Task_Type, The_Use_Type_Clause => The_Use_Type_Clause); end Add_Task_Type_Use_Type_Reference; -------------------------------------------------------------------------------- procedure Add_Subprogram_Use_Type_Reference (The_Use_Type_Clause : in RawDict.Use_Type_Clause_Info_Ref; The_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Subprogram, --# The_Use_Type_Clause; is begin RawDict.Set_Next_Use_Type_Clause (The_Use_Type_Clause => The_Use_Type_Clause, Next => RawDict.Get_Subprogram_Use_Type_Clauses (The_Subprogram => The_Subprogram)); RawDict.Set_Subprogram_Use_Type_Clauses (The_Subprogram => The_Subprogram, The_Use_Type_Clause => The_Use_Type_Clause); end Add_Subprogram_Use_Type_Reference; begin -- Add_Use_Type_Reference Add_Use_Type_Clause (Type_Mark => Type_Mark, Declaration => Declaration); RawDict.Create_Use_Type_Clause (Type_Mark => Type_Mark, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Use_Type_Clause => The_Use_Type_Clause); case The_Visibility is when Visible | Privat => case RawDict.GetSymbolDiscriminant (The_Unit) is when Package_Symbol => Add_Package_Visible_Use_Type_Reference (The_Use_Type_Clause => The_Use_Type_Clause, The_Package => RawDict.Get_Package_Info_Ref (Item => The_Unit)); -- GAA External when Generic_Unit_Symbol => The_Generic_Unit := RawDict.Get_Generic_Unit_Info_Ref (Item => The_Unit); -- GAA External case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => Add_Package_Visible_Use_Type_Reference (The_Use_Type_Clause => The_Use_Type_Clause, The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => Add_Subprogram_Use_Type_Reference (The_Use_Type_Clause => The_Use_Type_Clause, The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Use_Type_Reference"); end case; when Local => case RawDict.GetSymbolDiscriminant (The_Unit) is when Package_Symbol => Add_Package_Local_Use_Type_Reference (The_Use_Type_Clause => The_Use_Type_Clause, The_Package => RawDict.Get_Package_Info_Ref (Item => The_Unit)); -- GAA External when Subprogram_Symbol => Add_Subprogram_Use_Type_Reference (The_Use_Type_Clause => The_Use_Type_Clause, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => The_Unit)); -- GAA External when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => The_Unit)) is -- GAA External when Protected_Type_Item => Add_Protected_Type_Use_Type_Reference (The_Use_Type_Clause => The_Use_Type_Clause, The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => The_Unit)); -- GAA External when Task_Type_Item => Add_Task_Type_Use_Type_Reference (The_Use_Type_Clause => The_Use_Type_Clause, The_Task_Type => RawDict.Get_Type_Info_Ref (Item => The_Unit)); -- GAA External when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Use_Type_Reference"); end case; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Use_Type_Reference"); end case; end case; AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), The_Unit, Declaration); end Add_Use_Type_Reference; spark-2012.0.deb/examiner/dag.adb0000644000175000017500000040011711753202336015457 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cell_Storage; with Cells.Utility; with Cells.Utility.List; with CStacks; with CommandLineData; with ContextManager; with ContextManager.Ops; with DAG_IO; with Debug; with ErrorHandler; with E_Strings; with FileSystem; with Graph; with Labels; with Maths; with Pairs; with SP_Symbols; with StmtStack; with Structures; with SystemErrors; use type SP_Symbols.SP_Symbol; use type StmtStack.ArcKind; package body DAG is type ArityType is (Unary, Binary); --# inherit Cells, --# CStacks, --# Dictionary, --# Statistics; package LoopContext is type T is limited private; procedure Initialize (S : out T); --# derives S from ; procedure EnterLoop (Scope : in Dictionary.Scopes; S : in out T; VCGHeap : in out Cells.Heap_Record; LoopScope : out Dictionary.Scopes); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives LoopScope from Dictionary.Dict, --# S, --# Scope & --# S, --# VCGHeap from Dictionary.Dict, --# S, --# Scope, --# VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap; procedure ExitLoop (S : in out T; VCGHeap : in out Cells.Heap_Record; LoopScope : in out Dictionary.Scopes); --# global in Dictionary.Dict; --# derives LoopScope from *, --# Dictionary.Dict & --# S, --# VCGHeap from S, --# VCGHeap; function CurrentLoopSym (S : T; VCGHeap : Cells.Heap_Record) return Dictionary.Symbol; function EnclosingLoopSym (S : T; VCGHeap : Cells.Heap_Record; CurrentLoop : Dictionary.Symbol) return Dictionary.Symbol; -- Following only applicable to FOR loops ---------------------------------------------- function CurrentLoopParameterSym (S : T; VCGHeap : Cells.Heap_Record) return Dictionary.Symbol; --# global in Dictionary.Dict; function CurrentLoopMovesInReverse (S : T; VCGHeap : Cells.Heap_Record) return Boolean; --# global in Dictionary.Dict; private type T is record CurrentLoopNumber : Natural; LoopStack : CStacks.Stack; end record; end LoopContext; --# inherit Cells, --# Cell_Storage, --# CStacks, --# DAG, --# Dictionary, --# SP_Symbols, --# Statistics, --# Structures; package Substitutions is -- Substitute_Parameters replaces all formal parameters by their -- corresponding actual parameter within the Constraint -- expression. If the name is a prefix to an unconstrained array -- attribute then the underlying index subtype of the actual name -- is substituted. -- -- The "Constraint" expression may be a function precondition or a -- function return annotation. -- -- The Called function may be the call of an Ada function from executable -- code or it may be the call of an implicitly defined or explicitly defined -- proof function applied within a proof context. -- -- If the constraint calls a nested function then the parameters of the -- nested function are substituted first, S1, and then the callers parameters -- are substuted within S1. This applies to all levels of nesting. -- -- The expression referenced by the parameter "Constraint" is -- updated in place. procedure Substitute_Parameters (Called_Function : in Cells.Cell; Constraint : in out Cells.Cell; VCG_Heap : in out Cells.Heap_Record); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Constraint, --# Statistics.TableUsage, --# VCG_Heap from *, --# Called_Function, --# Constraint, --# Dictionary.Dict, --# VCG_Heap; -- Substitute_Implicit_Vars replaces all occurrences of an -- implicit variablle, denoted by "Implicit_Var", by the -- corresponding implicitly declared function call within the -- return annotation. -- -- The "Impicit_Return_Expr" must be a reference to the predicate -- part of an implicit return annotation. -- -- The "Proof_Function" must be a "call" of proof function -- (implicitly declared for a concrete function). -- -- The expression referenced by the parameter -- "Implicit_Return_Expr" is updated in place. -- -- Note: this subprogram uses the same algorithn as for -- Substitute_Parameters but, unfortunately, they cannot be -- combined into a single subprogram because Substitute_Parameters -- relies upon a concrete view of a function whereas -- Substitute_Implicit_Vars requires a proof function view. The -- proof function "call" is substituted for the Implicit_Var. procedure Substitute_Implicit_Vars (Proof_Function : in Cells.Cell; Implicit_Var : in Dictionary.Symbol; Implicit_Return_Expr : in out Cells.Cell; VCG_Heap : in out Cells.Heap_Record); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Implicit_Return_Expr, --# Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# Implicit_Return_Expr, --# Implicit_Var, --# Proof_Function, --# VCG_Heap; end Substitutions; --# inherit Cells, --# Cells.Utility, --# Cells.Utility.List, --# Cell_Storage, --# ContextManager, --# CStacks, --# DAG, --# Debug, --# Dictionary, --# E_Strings, --# LexTokenManager, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# Structures, --# SystemErrors; package Type_Constraint is type Context_T is limited private; -- Do NOT call this procedure from the outside. Instead call -- procedure Make, declared below... procedure Process_Type_Rec (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Assoc_Var : in Dictionary.Symbol; Constraint_List : out Cells.Utility.List.Linked_List; VCG_Heap : in out Cells.Heap_Record; Context : in out Context_T); --# global in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; -- Do NOT call this procedure from the outside either. Instead -- call procedure Make, declared below... procedure Process_Type (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Assoc_Var : in Dictionary.Symbol; Constraint_List : out Cells.Utility.List.Linked_List; VCG_Heap : in out Cells.Heap_Record; Context : in out Context_T); --# global in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; -- Given a discrete type, this produces the DAG for -- The_Expression >= The_Type'First /\ The_Expression <= The_Type'Last. -- For booelans this always returns true. procedure Process_Discrete (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; The_Constraint : out Cells.Cell; VCG_Heap : in out Cells.Heap_Record); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# The_Expression, --# The_Type, --# VCG_Heap & --# The_Constraint from Dictionary.Dict, --# The_Expression, --# The_Type, --# VCG_Heap; -- Create in-type constraints for the given expression against -- the given type. Scope is imporant here as it affects the -- treatment of private and own types: if the type is not -- visible at the given scope we simply create a `true' -- constraint. procedure Make (The_Type : in Dictionary.Symbol; The_Expression : in Cells.Cell; Scope : in Dictionary.Scopes; Consider_Always_Valid : in Boolean; The_Constraint : out Cells.Cell; VCG_Heap : in out Cells.Heap_Record; VC_Contains_Reals : in out Boolean; VC_Failure : in out Boolean); --# global in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCG_Heap, --# VC_Contains_Reals, --# VC_Failure from *, --# Consider_Always_Valid, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# The_Expression, --# The_Type, --# VCG_Heap & --# The_Constraint from Consider_Always_Valid, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# The_Expression, --# The_Type, --# VCG_Heap; private type Context_T is record VC_Contains_Reals : Boolean; VC_Failure : Boolean; Quant_Id_Number : Positive; Scope : Dictionary.Scopes; Initial_Var : Dictionary.Symbol; end record; --# accept W, 394, Context_T, "It is intentional that the outside can", --# "never make one of these."; end Type_Constraint; package body LoopContext is separate; ------------------------------------------------------------------------- -- Cell Creation Utilities ------------------------------------------------------------------------- -- Create a cell and set its cell kind at the same time procedure CreateCellKind (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; KindOfCell : in Cells.Cell_Kind) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# KindOfCell; is LocalCell : Cells.Cell; begin Cells.Create_Cell (VCGHeap, LocalCell); Cells.Set_Kind (VCGHeap, LocalCell, KindOfCell); CellName := LocalCell; end CreateCellKind; ------------------------------------------------------------------------- procedure CreateOpCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; OpSymbol : in SP_Symbols.SP_Symbol) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# OpSymbol; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Op); Cells.Set_Op_Symbol (VCGHeap, LocalCell, OpSymbol); CellName := LocalCell; end CreateOpCell; --------------------------------------------------------------- procedure CreateBoolOpCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; TypeSym : in Dictionary.Symbol; OpSymbol : in SP_Symbols.SP_Symbol) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# OpSymbol, --# TypeSym; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Bitwise_Op); Cells.Set_Symbol_Value (VCGHeap, LocalCell, TypeSym); Cells.Set_Op_Symbol (VCGHeap, LocalCell, OpSymbol); CellName := LocalCell; end CreateBoolOpCell; --------------------------------------------------------------- procedure CreateModifiedCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; Sym : in Dictionary.Symbol) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Sym; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Modified_Op); Cells.Set_Symbol_Value (VCGHeap, LocalCell, Sym); CellName := LocalCell; end CreateModifiedCell; --------------------------------------------------------------- procedure CreateReferenceCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; Sym : in Dictionary.Symbol) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Sym; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Reference); Cells.Set_Symbol_Value (VCGHeap, LocalCell, Sym); CellName := LocalCell; end CreateReferenceCell; --------------------------------------------------------------- procedure CreateUpfCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; Sym : in Dictionary.Symbol; Str : in LexTokenManager.Lex_String) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Str, --# Sym; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Field_Update_Function); Cells.Set_Symbol_Value (VCGHeap, LocalCell, Sym); Cells.Set_Lex_Str (VCGHeap, LocalCell, Str); CellName := LocalCell; end CreateUpfCell; --------------------------------------------------------------------- -- Simplified names for common operations --------------------------------------------------------------------- procedure SetRightArgument (OpCell : in Cells.Cell; Argument : in Cells.Cell; VCGHeap : in out Cells.Heap_Record) --# derives VCGHeap from *, --# Argument, --# OpCell; is begin Cells.Set_B_Ptr (VCGHeap, OpCell, Argument); end SetRightArgument; -------------------------------------------------------------- procedure SetLeftArgument (OpCell : in Cells.Cell; Argument : in Cells.Cell; VCGHeap : in out Cells.Heap_Record) --# derives VCGHeap from *, --# Argument, --# OpCell; is begin Cells.Set_A_Ptr (VCGHeap, OpCell, Argument); end SetLeftArgument; -------------------------------------------------------------- procedure SetAuxPtr (OpCell : in Cells.Cell; Argument : in Cells.Cell; VCGHeap : in out Cells.Heap_Record) --# derives VCGHeap from *, --# Argument, --# OpCell; is begin Cells.Set_C_Ptr (VCGHeap, OpCell, Argument); end SetAuxPtr; -------------------------------------------------------------- function Is_Leaf (Node : in Cells.Cell; VCG_Heap : in Cells.Heap_Record) return Boolean is begin return Cells.Is_Null_Cell (Cells.Get_B_Ptr (VCG_Heap, Node)); end Is_Leaf; -------------------------------------------------------------- function RightPtr (VCGHeap : in Cells.Heap_Record; OpCell : in Cells.Cell) return Cells.Cell is begin return Cells.Get_B_Ptr (VCGHeap, OpCell); end RightPtr; -------------------------------------------------------------- function LeftPtr (VCGHeap : in Cells.Heap_Record; OpCell : in Cells.Cell) return Cells.Cell is begin return Cells.Get_A_Ptr (VCGHeap, OpCell); end LeftPtr; -------------------------------------------------------------- function AuxPtr (VCGHeap : in Cells.Heap_Record; OpCell : in Cells.Cell) return Cells.Cell is begin return Cells.Get_C_Ptr (VCGHeap, OpCell); end AuxPtr; package body Substitutions is separate; -------------------------------------------------------------- procedure StackCheckStatement (Check_Cell : in Cells.Cell; VCGHeap : in out Cells.Heap_Record; CheckStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives CheckStack from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# Check_Cell; is begin -- StackCheckStatement CStacks.Push (VCGHeap, Check_Cell, CheckStack); end StackCheckStatement; ------------------------------------------------------------------------ procedure SetTilde (CellName : in Cells.Cell; VCGHeap : in out Cells.Heap_Record) --# derives VCGHeap from *, --# CellName; is begin Cells.Set_Op_Symbol (VCGHeap, CellName, SP_Symbols.tilde); end SetTilde; --------------------------------------------------------------------- -- More cell creation utilities --------------------------------------------------------------------- procedure CreateFixedVarCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; VarSymbol : in Dictionary.Symbol) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# VarSymbol; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Fixed_Var); Cells.Set_Symbol_Value (VCGHeap, LocalCell, VarSymbol); CellName := LocalCell; end CreateFixedVarCell; --------------------------------------------------------------------- procedure Create_Internal_Natural_Cell (Cell_Name : out Cells.Cell; VCG_Heap : in out Cells.Heap_Record; N : in Natural) --# global in out Statistics.TableUsage; --# derives Cell_Name from VCG_Heap & --# Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# N; is begin CreateCellKind (Cell_Name, VCG_Heap, Cell_Storage.Internal_Natural); Cells.Set_Natural_Value (VCG_Heap, Cell_Name, N); end Create_Internal_Natural_Cell; --------------------------------------------------------------------- procedure CreateManifestConstCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; LexStr : in LexTokenManager.Lex_String) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# LexStr; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Manifest_Const); Cells.Set_Lex_Str (VCGHeap, LocalCell, LexStr); CellName := LocalCell; end CreateManifestConstCell; --------------------------------------------------------------------- procedure CreateNamedConstCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; ConstVal : in Dictionary.Symbol) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# ConstVal; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Named_Const); Cells.Set_Symbol_Value (VCGHeap, LocalCell, ConstVal); CellName := LocalCell; end CreateNamedConstCell; --------------------------------------------------------------------- -- PNA new proc to support false VC generation in presence of semantic errs procedure CreateFalseCell (VCGHeap : in out Cells.Heap_Record; CellName : out Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Dictionary.Dict; is begin Cells.Utility.Create_Bool (VCGHeap, False, CellName); end CreateFalseCell; --------------------------------------------------------------------- procedure CreateTrueCell (VCGHeap : in out Cells.Heap_Record; CellName : out Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Dictionary.Dict; is begin Cells.Utility.Create_Bool (VCGHeap, True, CellName); end CreateTrueCell; --------------------------------------------------------------------- procedure Imply (Impl : in Cells.Cell; VCGHeap : in out Cells.Heap_Record; Expr : in out Cells.Cell) --# global in out Statistics.TableUsage; --# derives Expr from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Expr, --# Impl; is ImpCell : Cells.Cell; begin Cells.Utility.Create_Implies (VCG_Heap => VCGHeap, Antecedent => Impl, Consequent => Expr, Implication => ImpCell); Expr := ImpCell; end Imply; --------------------------------------------------------------------- procedure AddAnyShortCircuitImplications (VCGHeap : in out Cells.Heap_Record; Expr : in out Cells.Cell; ShortCircuitStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives Expr, --# ShortCircuitStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Expr, --# ShortCircuitStack, --# VCGHeap; is cp_impl, impl : Cells.Cell; TempStack : CStacks.Stack; begin if not CStacks.IsEmpty (ShortCircuitStack) then CStacks.CreateStack (TempStack); while not CStacks.IsEmpty (ShortCircuitStack) loop CStacks.PopOff (VCGHeap, ShortCircuitStack, impl); Structures.CopyStructure (VCGHeap, impl, cp_impl); Imply (cp_impl, VCGHeap, Expr); CStacks.Push (VCGHeap, impl, TempStack); end loop; -- copy all the elements back to the ShortCircuitStack while not CStacks.IsEmpty (TempStack) loop CStacks.PopOff (VCGHeap, TempStack, impl); CStacks.Push (VCGHeap, impl, ShortCircuitStack); end loop; end if; end AddAnyShortCircuitImplications; --------------------------------------------------------------------- function DiscreteTypeWithCheck (Type_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is -- function to deal with optional real rtcs function RealTypeAsWell (TypeSym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return (Dictionary.IsType (TypeSym) or Dictionary.IsSubtype (TypeSym)) and then Dictionary.TypeIsReal (TypeSym); end RealTypeAsWell; begin -- DiscreteTypeWithCheck return ((Dictionary.IsType (Type_Sym) or Dictionary.IsSubtype (Type_Sym)) -- guard against unknown types, viz. from multi-dimensional arrays and then (not Dictionary.IsPrivateType (Type_Sym, Scope) or else Dictionary.IsPredefinedTimeType (Type_Sym)) and then not Dictionary.TypeIsBoolean (Type_Sym) and then (Dictionary.TypeIsDiscrete (Type_Sym) or else Dictionary.IsPredefinedTimeType (Type_Sym))) or else RealTypeAsWell (Type_Sym) or else Dictionary.IsParameterConstraint (Type_Sym); -- special "type" for uncon arrays end DiscreteTypeWithCheck; ------------------------------------------------------------------- function ArrayTypeWithCheck (Type_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return ((Dictionary.IsType (Type_Sym) or Dictionary.IsSubtype (Type_Sym)) and then not Dictionary.IsPrivateType (Type_Sym, Scope) and then Dictionary.TypeIsArray (Type_Sym)); end ArrayTypeWithCheck; ------------------------------------------------------------------- function RecordTypeWithCheck (Type_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return ((Dictionary.IsType (Type_Sym) or Dictionary.IsSubtype (Type_Sym)) and then not Dictionary.IsPrivateType (Type_Sym, Scope) and then Dictionary.TypeIsRecord (Type_Sym)); end RecordTypeWithCheck; ------------------------------------------------------------------- function IsRealType (Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return ((Dictionary.IsType (Type_Sym) or Dictionary.IsSubtype (Type_Sym)) -- guard against unknown types, viz. from multi-dimensional arrays and then Dictionary.TypeIsReal (Type_Sym)); end IsRealType; ------------------------------------------------------------------- -- Destructively join together a list of cells given in Stack with -- /\; if given an empty list return a single true cell. procedure Join_And (Stack : in out CStacks.Stack; Conjunct : out Cells.Cell; VCG_Heap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Conjunct, --# Stack, --# VCG_Heap from Dictionary.Dict, --# Stack, --# VCG_Heap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Stack, --# VCG_Heap; is Tmp : Cells.Cell; begin if CStacks.IsEmpty (Stack) then Cells.Utility.Create_Bool (VCG_Heap, True, Conjunct); else Conjunct := Cells.Null_Cell; while not CStacks.IsEmpty (Stack) loop CStacks.PopOff (Heap => VCG_Heap, S => Stack, C => Tmp); Cells.Utility.Conjoin (VCG_Heap, Tmp, Conjunct); end loop; end if; end Join_And; ------------------------------------------------------------------- package body Type_Constraint is separate; ------------------------------------------------------------------- procedure CreateAttribValueCell (CellName : out Cells.Cell; VCGHeap : in out Cells.Heap_Record; AttribStr : in LexTokenManager.Lex_String) --# global in out Statistics.TableUsage; --# derives CellName from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# AttribStr; is LocalCell : Cells.Cell; begin CreateCellKind (LocalCell, VCGHeap, Cell_Storage.Attrib_Value); Cells.Set_Lex_Str (VCGHeap, LocalCell, AttribStr); CellName := LocalCell; end CreateAttribValueCell; --------------------------------------------------------------------- procedure CreateAttribFunctionCell (AttribName : in LexTokenManager.Lex_String; TypeSym : in Dictionary.Symbol; VCGHeap : in out Cells.Heap_Record; AttribFuncCell : out Cells.Cell) --# global in out Statistics.TableUsage; --# derives AttribFuncCell, --# VCGHeap from AttribName, --# TypeSym, --# VCGHeap & --# Statistics.TableUsage from *, --# AttribName, --# TypeSym, --# VCGHeap; is TypeCell, TickCell, AttribCell : Cells.Cell; begin CreateFixedVarCell (TypeCell, VCGHeap, TypeSym); CreateCellKind (AttribCell, VCGHeap, Cell_Storage.Attrib_Function); Cells.Set_Lex_Str (VCGHeap, AttribCell, AttribName); CreateOpCell (TickCell, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (TickCell, TypeCell, VCGHeap); SetRightArgument (TickCell, AttribCell, VCGHeap); AttribFuncCell := TickCell; end CreateAttribFunctionCell; --------------------------------------------------------------------- procedure CreateAggregateCounter (StartPoint : in Positive; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives ExpnStack from StartPoint, --# VCGHeap & --# Statistics.TableUsage from *, --# StartPoint, --# VCGHeap & --# VCGHeap from *, --# ExpnStack, --# StartPoint; is CounterCell : Cells.Cell; begin CreateCellKind (CounterCell, VCGHeap, Cell_Storage.Aggregate_Counter); Cells.Set_Natural_Value (VCGHeap, CounterCell, StartPoint); CStacks.Push (VCGHeap, CounterCell, ExpnStack); end CreateAggregateCounter; --------------------------------------------------------------------- -- End of Cell Creation Utilities --------------------------------------------------------------------- -- This function is declared here because it is used by BuildExnDAG and Build_Annotation_Expression function IsModularType (Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return ((Dictionary.IsType (Type_Sym) or Dictionary.IsSubtype (Type_Sym)) -- guard against unknown types, viz. from multi-dimensional arrays and then Dictionary.TypeIsModular (Type_Sym)); end IsModularType; --------------------------------------------------------------------- function IsIntegerType (Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return ((Dictionary.IsType (Type_Sym) or Dictionary.IsSubtype (Type_Sym)) -- guard against unknown types, viz. from multi-dimensional arrays and then Dictionary.TypeIsInteger (Type_Sym)); end IsIntegerType; --------------------------------------------------------------------- function IsScalarType (Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return ((Dictionary.IsType (Type_Sym) or Dictionary.IsSubtype (Type_Sym)) -- guard against unknown types, viz. from multi-dimensional arrays and then Dictionary.TypeIsScalar (Type_Sym)); end IsScalarType; --------------------------------------------------------------------- -- This function is declared here because it is used by BuildExnDAG and Build_Annotation_Expression function IsModularBitwiseOp (Operator : in SP_Symbols.SP_Symbol; TypeSym : in Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return IsModularType (TypeSym) and then (Operator = SP_Symbols.RWand or else Operator = SP_Symbols.RWor or else Operator = SP_Symbols.RWxor); end IsModularBitwiseOp; --------------------------------------------------------------------- procedure CalculateInsertPoint (VCGHeap : in Cells.Heap_Record; ExpnStack : in CStacks.Stack; ParameterNumber : in Natural; InsertPoint : out Cells.Cell; LastOne : out Boolean) --# derives InsertPoint, --# LastOne from ExpnStack, --# ParameterNumber, --# VCGHeap; is Ptr : Cells.Cell; begin LastOne := True; Ptr := CStacks.Top (VCGHeap, ExpnStack); for i in Natural range 1 .. ParameterNumber loop if Cells.Is_Null_Cell (RightPtr (VCGHeap, Ptr)) then LastOne := True; else LastOne := False; Ptr := RightPtr (VCGHeap, Ptr); end if; end loop; InsertPoint := Ptr; end CalculateInsertPoint; ---------------------------------------------------------------------------------- procedure PushOperator (Arity : in ArityType; OperatorSymbol : in SP_Symbols.SP_Symbol; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Arity, --# ExpnStack, --# OperatorSymbol, --# VCGHeap; is DAGCell : Cells.Cell; begin -- PushOperator CreateOpCell (DAGCell, VCGHeap, OperatorSymbol); SetRightArgument (DAGCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); if Arity = Binary then SetLeftArgument (DAGCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); end if; CStacks.Push (VCGHeap, DAGCell, ExpnStack); end PushOperator; --------------------------------------------------------------------- procedure CreateEmptyList (Size : in Natural; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# Size, --# VCGHeap; is CommaCell : Cells.Cell; ArgumentList : Cells.Cell; begin if Size > 1 then CreateOpCell (CommaCell, VCGHeap, SP_Symbols.comma); CStacks.Push (VCGHeap, CommaCell, ExpnStack); for i in Natural range 3 .. Size loop CreateOpCell (CommaCell, VCGHeap, SP_Symbols.comma); SetRightArgument (CommaCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, CommaCell, ExpnStack); end loop; CStacks.PopOff (VCGHeap, ExpnStack, ArgumentList); SetRightArgument (CStacks.Top (VCGHeap, ExpnStack), ArgumentList, VCGHeap); end if; end CreateEmptyList; --------------------------------------------------------------------- procedure Chain (NewArcLabel : in Labels.Label; VCGHeap : in out Cells.Heap_Record) --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives Graph.Table, --# StmtStack.S from Graph.Table, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Graph.Table, --# NewArcLabel, --# StmtStack.S; is R : StmtStack.StmtRecord; begin R := StmtStack.Top; StmtStack.Pop; Graph.Inc_Nmbr_Of_Stmts; Graph.Create_Coeff (Heap => VCGHeap, I => R.StmtNmbr, J => Graph.Get_Nmbr_Of_Stmts, K => NewArcLabel); R.StmtNmbr := Graph.Get_Nmbr_Of_Stmts; StmtStack.Push (R); end Chain; -------------------------------------------------------------------------------- procedure CreateUnitLabel (StmtLabel : out Labels.Label; VCGHeap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# VCGHeap from *, --# VCGHeap & --# StmtLabel from VCGHeap; is LocalLabel : Labels.Label; UnitStmtCell : Cells.Cell; begin Labels.CreateLabel (VCGHeap, LocalLabel); Cells.Create_Cell (VCGHeap, UnitStmtCell); Labels.AppendPair (VCGHeap, Pairs.CellToPair (UnitStmtCell), LocalLabel); StmtLabel := LocalLabel; end CreateUnitLabel; ------------------------------------------------------------------------ procedure ModelNullStmt (VCGHeap : in out Cells.Heap_Record) --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Graph.Table, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap; is StmtLabel : Labels.Label; begin CreateUnitLabel (StmtLabel, VCGHeap); Chain (StmtLabel, VCGHeap); end ModelNullStmt; ------------------------------------------------------------------------- procedure PrepareLabel (VCGHeap : in out Cells.Heap_Record; StmtLabel : out Labels.Label; StmtCell : out Cells.Cell) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# VCGHeap from *, --# VCGHeap & --# StmtCell, --# StmtLabel from VCGHeap; is LocalLabel : Labels.Label; LocalCell : Cells.Cell; begin Labels.CreateLabel (VCGHeap, LocalLabel); Cells.Create_Cell (VCGHeap, LocalCell); Labels.AppendPair (VCGHeap, Pairs.CellToPair (LocalCell), LocalLabel); StmtLabel := LocalLabel; StmtCell := LocalCell; end PrepareLabel; ------------------------------------------------------------------------ procedure IncorporateAssumption (VCGHeap : in out Cells.Heap_Record; DAG_Root : in Cells.Cell) --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from DAG_Root, --# Graph.Table, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# DAG_Root, --# VCGHeap; is Stmt_Cell : Cells.Cell; Stmt_Label : Labels.Label; begin PrepareLabel (VCGHeap, Stmt_Label, Stmt_Cell); SetRightArgument (Stmt_Cell, DAG_Root, VCGHeap); Chain (Stmt_Label, VCGHeap); end IncorporateAssumption; ------------------------------------------------------------------------- procedure PlantStackedChecks (LineNmbr : in Integer; VCGHeap : in out Cells.Heap_Record; CheckStack : in out CStacks.Stack; KindOfStackedCheck : in out Graph.Proof_Context_Type) --# global in Dictionary.Dict; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives CheckStack, --# KindOfStackedCheck from *, --# CheckStack, --# Dictionary.Dict, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# Graph.Table, --# KindOfStackedCheck, --# LineNmbr, --# StmtStack.S, --# VCGHeap; is CheckList, CpCheckList, StmtCell, CheckCell : Cells.Cell; StmtLabel : Labels.Label; begin -- PlantStackedChecks -- conjoin all the checks on CheckStack CheckList := Cells.Null_Cell; while not CStacks.IsEmpty (CheckStack) loop CStacks.PopOff (VCGHeap, CheckStack, CheckCell); Cells.Utility.Conjoin (VCGHeap, CheckCell, CheckList); end loop; if not Cells.Is_Null_Cell (CheckList) then Structures.CopyStructure (VCGHeap, CheckList, CpCheckList); -- plant conjoined check statement ModelNullStmt (VCGHeap); -- distinguish Run_Time_Check from PreConCheck Graph.Set_Proof_Context (X => KindOfStackedCheck); KindOfStackedCheck := Graph.Run_Time_Check; -- re-set default value Graph.Set_Text_Line_Nmbr (X => LineNmbr); Graph.Set_Assertion_Locn (X => CheckList); ModelNullStmt (VCGHeap); PrepareLabel (VCGHeap, StmtLabel, StmtCell); SetRightArgument (StmtCell, CpCheckList, VCGHeap); Chain (StmtLabel, VCGHeap); end if; end PlantStackedChecks; -------------------------------------------------------------------------------- procedure UnStackRtcs (LineNmbr : in Integer; VCGHeap : in out Cells.Heap_Record; CheckStack : in out CStacks.Stack; KindOfStackedCheck : in out Graph.Proof_Context_Type) --# global in Dictionary.Dict; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives CheckStack, --# KindOfStackedCheck from *, --# CheckStack, --# Dictionary.Dict, --# VCGHeap & --# Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# Graph.Table, --# KindOfStackedCheck, --# LineNmbr, --# StmtStack.S, --# VCGHeap; is begin PlantStackedChecks (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); end UnStackRtcs; --------------------------------------------------------------------- procedure PlantCheckStatement (Check_Stmt : in Cells.Cell; VCGHeap : in out Cells.Heap_Record; ShortCircuitStack : in out CStacks.Stack; CheckStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives CheckStack, --# ShortCircuitStack from Check_Stmt, --# ShortCircuitStack, --# VCGHeap & --# Statistics.TableUsage from *, --# Check_Stmt, --# ShortCircuitStack, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# Check_Stmt, --# ShortCircuitStack; is Check_Cell1 : Cells.Cell; begin -- PlantCheckStatement Check_Cell1 := Check_Stmt; -- deal with any encompassing short-circuit forms AddAnyShortCircuitImplications (VCGHeap, Check_Cell1, ShortCircuitStack); StackCheckStatement (Check_Cell1, VCGHeap, CheckStack); end PlantCheckStatement; --------------------------------------------------------------------- procedure CheckConstraintRunTimeError (Type_Sym : in Dictionary.Symbol; Expr : in Cells.Cell; Scope : in Dictionary.Scopes; VCGHeap : in out Cells.Heap_Record; ShortCircuitStack : in out CStacks.Stack; CheckStack : in out CStacks.Stack; ContainsReals : in out Boolean) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Expr, --# Scope, --# ShortCircuitStack, --# Type_Sym, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# Type_Sym & --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# Expr, --# Scope, --# ShortCircuitStack, --# Type_Sym; is Check_Cell : Cells.Cell; begin -- CheckConstraintRunTimeError if DiscreteTypeWithCheck (Type_Sym, Scope) then Type_Constraint.Process_Discrete (The_Type => Type_Sym, The_Expression => Expr, The_Constraint => Check_Cell, VCG_Heap => VCGHeap); PlantCheckStatement (Check_Cell, VCGHeap, ShortCircuitStack, CheckStack); end if; -- check for reals separated out in support of optional real RTCs if IsRealType (Type_Sym) then ContainsReals := True; end if; end CheckConstraintRunTimeError; --------------------------------------------------------------------- procedure CreateOverflowConstraint (Expr : in Cells.Cell; Type_Sym : in Dictionary.Symbol; VCGHeap : in out Cells.Heap_Record; RangeDAG : out Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives RangeDAG, --# VCGHeap from Dictionary.Dict, --# Expr, --# Type_Sym, --# VCGHeap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Expr, --# Type_Sym, --# VCGHeap; is RelOperationLHS, RelOperationRHS, MiddleOperator : SP_Symbols.SP_Symbol; BaseTypeCell : Cells.Cell; Exp_Copy : Cells.Cell; Type_Of_LHS : Cells.Cell; AttribCell : Cells.Cell; LeftSideofRange : Cells.Cell; RightSideofRange : Cells.Cell; LeftAnd : Cells.Cell; RightAnd : Cells.Cell; RangeDAG1 : Cells.Cell; begin -- CreateOverflowConstraint -- make a copy of Expr Structures.CopyStructure (VCGHeap, Expr, Exp_Copy); RelOperationLHS := SP_Symbols.greater_or_equal; RelOperationRHS := SP_Symbols.less_or_equal; MiddleOperator := SP_Symbols.RWand; -- if the type is universal integer then build constraint in terms of Min_Int -- and Max_Int if Dictionary.IsUniversalIntegerType (Type_Sym) then -- RootInteger is a special kind of cell used to model Min_Int and Max_Int CreateCellKind (LeftSideofRange, VCGHeap, Cell_Storage.Root_Integer); Cells.Set_Lex_Str (VCGHeap, LeftSideofRange, LexTokenManager.Min_Token); CreateCellKind (RightSideofRange, VCGHeap, Cell_Storage.Root_Integer); Cells.Set_Lex_Str (VCGHeap, RightSideofRange, LexTokenManager.Max_Token); -- otherwise build constraint in terms of t'base'first and t'base'last else -- create cell for Type_Sym CreateFixedVarCell (Type_Of_LHS, VCGHeap, Dictionary.GetRootType (Type_Sym)); -- Create BaseTypeCell as apostrophe (Type_Of_LHS, first) CreateCellKind (AttribCell, VCGHeap, Cell_Storage.Attrib_Value); CreateOpCell (BaseTypeCell, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (BaseTypeCell, Type_Of_LHS, VCGHeap); SetRightArgument (BaseTypeCell, AttribCell, VCGHeap); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, BaseTypeCell), LexTokenManager.Base_Token); -- Create LeftSideofRange as apostrophe (BaseTypeCell, first) CreateCellKind (AttribCell, VCGHeap, Cell_Storage.Attrib_Value); CreateOpCell (LeftSideofRange, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (LeftSideofRange, BaseTypeCell, VCGHeap); SetRightArgument (LeftSideofRange, AttribCell, VCGHeap); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, LeftSideofRange), LexTokenManager.First_Token); -- Create RightSideofRange as apostrophe (BaseTypeCell, last) Structures.CopyStructure (VCGHeap, LeftSideofRange, RightSideofRange); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, RightSideofRange), LexTokenManager.Last_Token); end if; -- now assemble the whole constraint -- create left-hand of AND CreateOpCell (LeftAnd, VCGHeap, RelOperationLHS); SetRightArgument (LeftAnd, LeftSideofRange, VCGHeap); SetLeftArgument (LeftAnd, Exp_Copy, VCGHeap); -- create right-hand of AND CreateOpCell (RightAnd, VCGHeap, RelOperationRHS); SetRightArgument (RightAnd, RightSideofRange, VCGHeap); SetLeftArgument (RightAnd, Exp_Copy, VCGHeap); -- form conjunction of the two constraints; CreateOpCell (RangeDAG1, VCGHeap, MiddleOperator); SetRightArgument (RangeDAG1, RightAnd, VCGHeap); SetLeftArgument (RangeDAG1, LeftAnd, VCGHeap); RangeDAG := RangeDAG1; end CreateOverflowConstraint; -------------------------------------------------------------------------------- procedure CheckOverflowRunTimeError (Type_Sym : in Dictionary.Symbol; Expr : in Cells.Cell; Scope : in Dictionary.Scopes; VCGHeap : in out Cells.Heap_Record; ShortCircuitStack : in out CStacks.Stack; ContainsReals : in out Boolean; CheckStack : in out CStacks.Stack) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Expr, --# Scope, --# ShortCircuitStack, --# Type_Sym, --# VCGHeap & --# ContainsReals from *, --# Dictionary.Dict, --# Type_Sym & --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# Expr, --# Scope, --# ShortCircuitStack, --# Type_Sym; is Check_Cell : Cells.Cell; begin -- CheckOverflowRunTimeError if DiscreteTypeWithCheck (Type_Sym, Scope) and then not IsModularType (Type_Sym) then -- no overflows for modulars CreateOverflowConstraint (Expr, Type_Sym, VCGHeap, Check_Cell); PlantCheckStatement (Check_Cell, VCGHeap, ShortCircuitStack, CheckStack); end if; -- check for reals separated out in support of optional real RTCs if IsRealType (Type_Sym) then ContainsReals := True; end if; end CheckOverflowRunTimeError; -------------------------------------------------------------------------------- -- Given an expression representing f convert it to fld_inherit (f) procedure InsertInheritDeReference (FieldSymbol : in Dictionary.Symbol; VCGHeap : in out Cells.Heap_Record; Expression : in out Cells.Cell) --# global in out Statistics.TableUsage; --# derives Expression from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# Expression, --# FieldSymbol; is DAGCell : Cells.Cell; begin -- InsertInheritDeReference CreateCellKind (DAGCell, VCGHeap, Cell_Storage.Field_Access_Function); Cells.Set_Symbol_Value (VCGHeap, DAGCell, FieldSymbol); Cells.Set_Lex_Str (VCGHeap, DAGCell, LexTokenManager.Inherit_Token); SetRightArgument (DAGCell, Expression, VCGHeap); Expression := DAGCell; end InsertInheritDeReference; --------------------------------------------------------------------- -- Used in ModelProcedureCall, BuildExpnDAG, Build_Annotation_Expression procedure ConvertTaggedActualIfNecessary (SubprogSym : in Dictionary.Symbol; VCGHeap : in out Cells.Heap_Record; Expression : in out Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Expression, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# Expression, --# SubprogSym, --# VCGHeap; is function ExpressionType (VCGHeap : Cells.Heap_Record; Expression : Cells.Cell) return Dictionary.Symbol --# global in Dictionary.Dict; is Result : Dictionary.Symbol; begin case Cells.Get_Kind (VCGHeap, Expression) is when Cell_Storage.Reference | Cell_Storage.Named_Const | Cell_Storage.Field_Access_Function => Result := Dictionary.GetType (Cells.Get_Symbol_Value (VCGHeap, Expression)); when Cell_Storage.Element_Function => Result := Cells.Get_Symbol_Value (VCGHeap, Expression); when others => Result := Dictionary.NullSymbol; -- consider fatal error trap here end case; return Result; end ExpressionType; ------------------ procedure ConvertTaggedActual (ActualType : in Dictionary.Symbol; ControllingType : in Dictionary.Symbol; VCGHeap : in out Cells.Heap_Record; Expression : in out Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Expression, --# Statistics.TableUsage, --# VCGHeap from *, --# ActualType, --# ControllingType, --# Dictionary.Dict, --# Expression, --# VCGHeap; is ActualTypeLocal : Dictionary.Symbol; begin -- We know in this case that Expression represents the -- name of an object so all we need to do is add fld_inherit dereferences in -- front of it until the type conversion is complete. ActualTypeLocal := ActualType; -- ControllingType=NullSym means that the called subprogram doesn't have a controlling -- type and so we should just leave expression alone. Could be considered a special -- case of "not Dictionary.IsAnExtensionOf" below (can't be an extension of null). if not Dictionary.Is_Null_Symbol (ControllingType) then loop -- Actual type is null if nothing planted in wf_proc_call exit when Dictionary.Is_Null_Symbol (ActualTypeLocal); -- Do nothing if we call this with anything other than a tagged type exit when not Dictionary.TypeIsTagged (ActualTypeLocal); -- Do nothing if type is tagged but is unrelated to controlling type exit when not Dictionary.IsAnExtensionOf (ControllingType, ActualTypeLocal); exit when Dictionary.Types_Are_Equal (Left_Symbol => ActualTypeLocal, Right_Symbol => ControllingType, Full_Range_Subtype => False); -- normal exit, conversion complete InsertInheritDeReference (Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (ActualTypeLocal)), VCGHeap, Expression); ActualTypeLocal := Dictionary.GetRootOfExtendedType (ActualTypeLocal); end loop; end if; end ConvertTaggedActual; ------------------ begin -- ConvertTaggedActualIfNecessary if Cells.Get_Kind (VCGHeap, Expression) = Cell_Storage.Reference or else Cells.Get_Kind (VCGHeap, Expression) = Cell_Storage.Named_Const or else Cells.Get_Kind (VCGHeap, Expression) = Cell_Storage.Field_Access_Function or else Cells.Get_Kind (VCGHeap, Expression) = Cell_Storage.Element_Function then -- it's an object, and may need converting ConvertTaggedActual (ExpressionType (VCGHeap, Expression), -- type of actual Dictionary.GetSubprogramControllingType (SubprogSym), -- type of formal VCGHeap, Expression); -- actual parameter end if; end ConvertTaggedActualIfNecessary; -------------------------------------------------------------- -- When we encounter R.F we may need to convert it to R.{Inherit.}F if F is -- is a field inherited from a root type that we have extended. -- FieldExpn is a DAG representing "F". -- This procedure used by both BuildExpnDAg and Build_Annotation_Expression procedure ModelInheritedFieldsOfTaggedRecord (FieldName : in LexTokenManager.Lex_String; RecordType : in Dictionary.Symbol; VCGHeap : in out Cells.Heap_Record; FieldExpn : in out Cells.Cell) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives FieldExpn, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# FieldExpn, --# FieldName, --# LexTokenManager.State, --# RecordType, --# VCGHeap; is FieldSymbol : Dictionary.Symbol; RootRecordType : Dictionary.Symbol; function InheritedField (OfField : Dictionary.Symbol) return Dictionary.Symbol --# global in Dictionary.Dict; is Result : Dictionary.Symbol; begin if Dictionary.IsRecordComponent (OfField) then Result := Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (Dictionary.GetType (OfField))); else -- we have a record type Result := Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (OfField)); end if; return Result; end InheritedField; begin -- ModelInheritedFieldsOfTaggedRecord RootRecordType := Dictionary.GetRootType (RecordType); FieldSymbol := RootRecordType; -- loop executes 0 times if there is no tagged inheritance involved; this leaves -- FieldExpn unchanged for I in Integer range 1 .. Dictionary.GetInheritDepth (FieldName, RootRecordType) loop -- The model of tagged records used meams that the Inherit field is always the first -- field so we can get its symbol as follows FieldSymbol := InheritedField (FieldSymbol); -- for each level of inherit depth, we insert one inherit dereference InsertInheritDeReference (FieldSymbol, VCGHeap, FieldExpn); end loop; end ModelInheritedFieldsOfTaggedRecord; --------------------------------------------------------------- procedure InsertParameterInNextFreeSlot (StartPoint : in Cells.Cell; ExpressionCell : in Cells.Cell; VCGHeap : in out Cells.Heap_Record) --# derives VCGHeap from *, --# ExpressionCell, --# StartPoint; is InsertPoint : Cells.Cell; begin InsertPoint := StartPoint; loop if Cells.Is_Null_Cell (RightPtr (VCGHeap, InsertPoint)) then SetRightArgument (InsertPoint, ExpressionCell, VCGHeap); exit; end if; InsertPoint := RightPtr (VCGHeap, InsertPoint); if Cells.Is_Null_Cell (LeftPtr (VCGHeap, InsertPoint)) then SetLeftArgument (InsertPoint, ExpressionCell, VCGHeap); exit; end if; end loop; end InsertParameterInNextFreeSlot; -------------------------------------------------------------------------------- procedure PushFunction (Kind : in Cells.Cell_Kind; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# Kind, --# VCGHeap; is DAGCell : Cells.Cell; begin CreateCellKind (DAGCell, VCGHeap, Kind); SetRightArgument (DAGCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, DAGCell, ExpnStack); end PushFunction; ----------------------------------------------------------------------- procedure TransformRangeConstraint (VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# VCGHeap; is LeftSideOfRange, RightSideOfRange : Cells.Cell; begin --turns 'RANGE into 'FIRST .. 'LAST or pushes .. for discrete range if Cells.Get_Op_Symbol (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)) = SP_Symbols.apostrophe then CStacks.PopOff (VCGHeap, ExpnStack, LeftSideOfRange); Structures.CopyStructure (VCGHeap, LeftSideOfRange, RightSideOfRange); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, LeftSideOfRange), LexTokenManager.First_Token); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, RightSideOfRange), LexTokenManager.Last_Token); CStacks.Push (VCGHeap, LeftSideOfRange, ExpnStack); CStacks.Push (VCGHeap, RightSideOfRange, ExpnStack); end if; PushOperator (Binary, SP_Symbols.double_dot, VCGHeap, ExpnStack); end TransformRangeConstraint; --------------------------------------------------------------------- procedure TransformTypeName (VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# VCGHeap; is AttribCell, TypeMarkcell, LeftSideOfRange, RightSideOfRange : Cells.Cell; begin --turns TYPE into TYPE'FIRST .. TYPE'LAST CStacks.PopOff (VCGHeap, ExpnStack, TypeMarkcell); CreateCellKind (AttribCell, VCGHeap, Cell_Storage.Attrib_Value); CreateOpCell (LeftSideOfRange, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (LeftSideOfRange, TypeMarkcell, VCGHeap); SetRightArgument (LeftSideOfRange, AttribCell, VCGHeap); Structures.CopyStructure (VCGHeap, LeftSideOfRange, RightSideOfRange); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, LeftSideOfRange), LexTokenManager.First_Token); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, RightSideOfRange), LexTokenManager.Last_Token); CStacks.Push (VCGHeap, LeftSideOfRange, ExpnStack); CStacks.Push (VCGHeap, RightSideOfRange, ExpnStack); PushOperator (Binary, SP_Symbols.double_dot, VCGHeap, ExpnStack); end TransformTypeName; ----------------------------------------------------------------------- function DoingArrayAggregate (VCGHeap : Cells.Heap_Record; ExpnStack : CStacks.Stack) return Boolean --# global in Dictionary.Dict; is AggCell : Cells.Cell; begin AggCell := CStacks.FindAggregateCell (VCGHeap, ExpnStack); return Dictionary.TypeIsArray (Cells.Get_Symbol_Value (VCGHeap, AggCell)); end DoingArrayAggregate; --------------------------------------------------------------------- function DoingRecordAggregate (VCGHeap : Cells.Heap_Record; ExpnStack : CStacks.Stack) return Boolean --# global in Dictionary.Dict; is AggCell : Cells.Cell; begin AggCell := CStacks.FindAggregateCell (VCGHeap, ExpnStack); return Dictionary.TypeIsRecord (Cells.Get_Symbol_Value (VCGHeap, AggCell)); end DoingRecordAggregate; --------------------------------------------------------------------- -- Used with record aggregates. These are created in DownProcessAggregate -- with an empty comma-delimited list into which associations are placed. -- If we have a model of tagged extended record, the routine first identifies -- which constituent record part of the overall model should have the -- association procedure InsertAssociation (StartPoint : in Cells.Cell; ExpressionCell : in Cells.Cell; VCGHeap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# derives VCGHeap from *, --# Dictionary.Dict, --# ExpressionCell, --# StartPoint; is InsertPoint : Cells.Cell; SoughtRecordType : Dictionary.Symbol; begin -- expression cell has the form FIELD := EXPRESSION SoughtRecordType := Dictionary.GetRecordType (Cells.Get_Symbol_Value (VCGHeap, LeftPtr (VCGHeap, ExpressionCell))); InsertPoint := StartPoint; -- left-most MkAggregate in the model loop exit when Dictionary.Types_Are_Equal (Left_Symbol => Cells.Get_Symbol_Value (VCGHeap, InsertPoint), Right_Symbol => SoughtRecordType, Full_Range_Subtype => False); -- move to first parameter slot - this is either a comma or a MkAggregate cell InsertPoint := RightPtr (VCGHeap, InsertPoint); if Cells.Get_Op_Symbol (VCGHeap, InsertPoint) = SP_Symbols.comma then InsertPoint := RightPtr (VCGHeap, LeftPtr (VCGHeap, InsertPoint)); end if; end loop; -- InsertPoint now points at the correct MkAggregate cell -- So insert expression InsertParameterInNextFreeSlot (InsertPoint, ExpressionCell, VCGHeap); end InsertAssociation; --------------------------------------------------------------------- function AggregateType (VCGHeap : Cells.Heap_Record; ExpnStack : CStacks.Stack) return Dictionary.Symbol is begin return Cells.Get_Symbol_Value (VCGHeap, CStacks.FindAggregateCell (VCGHeap, ExpnStack)); end AggregateType; --------------------------------------------------------------------- function CurrentFieldOrIndex (VCGHeap : Cells.Heap_Record; ExpnStack : CStacks.Stack) return Positive is begin return Cells.Get_Natural_Value (VCGHeap, CStacks.FindAggregateCounter (VCGHeap, ExpnStack)); end CurrentFieldOrIndex; --------------------------------------------------------------------- procedure IncCurrentFieldOrIndex (ExpnStack : in CStacks.Stack; VCGHeap : in out Cells.Heap_Record) --# derives VCGHeap from *, --# ExpnStack; is AggCell : Cells.Cell; begin AggCell := CStacks.FindAggregateCounter (VCGHeap, ExpnStack); Cells.Set_Natural_Value (VCGHeap, AggCell, Cells.Get_Natural_Value (VCGHeap, AggCell) + 1); end IncCurrentFieldOrIndex; --------------------------------------------------------------------- -- Push binary operator on to top of stack but reversing arguments -- so we end up with: "TOS Op 2ndTOS" procedure SwitchAndPush (OperatorSymbol : in SP_Symbols.SP_Symbol; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# OperatorSymbol, --# VCGHeap; is -- pushes binary operator,reversing order of operands; DAGCell : Cells.Cell; begin CreateOpCell (DAGCell, VCGHeap, OperatorSymbol); -- Cells.Set_A_Ptr (VCGHeap , DAGCell, CStacks.Top (VCGHeap , ExpnStack)); SetLeftArgument (DAGCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); -- Cells.Set_B_Ptr (VCGHeap , DAGCell, CStacks.Top (VCGHeap , ExpnStack)); SetRightArgument (DAGCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, DAGCell, ExpnStack); end SwitchAndPush; --------------------------------------------------------------------- function GetTOStype (VCGHeap : Cells.Heap_Record; ExpnStack : CStacks.Stack) return Dictionary.Symbol --# global in Dictionary.Dict; is Sym : Dictionary.Symbol; begin -- this function is used in various places where a decision on -- what to do next depends on the symbol on TOS. Sym := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); if Cells.Get_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)) = Cell_Storage.Field_Update_Function then -- we have a record field but want record type Sym := Dictionary.GetRecordType (Sym); elsif Dictionary.IsPackage (Sym) then null; -- packages returned intact elsif Dictionary.IsTypeMark (Sym) then null; -- types returned intact else Sym := Dictionary.GetType (Sym); -- return type of thing on TOS end if; return Sym; end GetTOStype; --------------------------------------------------------------------- -- This procedure adds "mod T'Modulus" to an expression if T is modular procedure ModularizeIfNeeded (TypeSym : in Dictionary.Symbol; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# TypeSym, --# VCGHeap; is ModOpCell, TickCell, PrefixCell, ModulusCell : Cells.Cell; -- Check to avoid getting "mod N mod N" in output -- Suspect this is actually redundant but it a cheap safeguard function AlreadyModularized (VCGHeap : Cells.Heap_Record) return Boolean --# global in Dictionary.Dict; --# in ExpnStack; --# in TypeSym; is begin return Cells.Get_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)) = Cell_Storage.Op and then Cells.Get_Op_Symbol (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)) = SP_Symbols.RWmod and then Dictionary.Types_Are_Equal (Left_Symbol => Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)), Right_Symbol => TypeSym, Full_Range_Subtype => False); -- final conjunction makes use of type symbol placed in the Val field -- of the cell as described in ModularizeIfNeeded main body end AlreadyModularized; begin -- ModularizeIfNeeded if IsModularType (TypeSym) then if not AlreadyModularized (VCGHeap) then -- create ' operator CreateOpCell (TickCell, VCGHeap, SP_Symbols.apostrophe); -- create Modulus attribute name CreateCellKind (ModulusCell, VCGHeap, Cell_Storage.Attrib_Value); Cells.Set_Lex_Str (VCGHeap, ModulusCell, LexTokenManager.Modulus_Token); -- Create prefix CreateFixedVarCell (PrefixCell, VCGHeap, TypeSym); -- Assemble t'modulus SetLeftArgument (TickCell, PrefixCell, VCGHeap); SetRightArgument (TickCell, ModulusCell, VCGHeap); -- create mod operator CreateOpCell (ModOpCell, VCGHeap, SP_Symbols.RWmod); -- insert type in the val field of the cell to simplify checking for -- unnecessary duplicate MODs. For an operator cell the val field is -- not usually used Cells.Set_Symbol_Value (VCGHeap, ModOpCell, TypeSym); -- append mod t'modulus to expression on TOS SetRightArgument (ModOpCell, TickCell, VCGHeap); SetLeftArgument (ModOpCell, CStacks.Top (VCGHeap, ExpnStack), VCGHeap); CStacks.Pop (VCGHeap, ExpnStack); CStacks.Push (VCGHeap, ModOpCell, ExpnStack); end if; -- no action if identical mod already there end if; -- falls through with no action if a valid type is supplied but the type is not modular end ModularizeIfNeeded; --------------------------------------------------------------------- -- Node is in out so that we can prune walk if embedded agg found. procedure DownProcessAggregate (QualExpNode : in SP_Symbols.SP_Symbol; VCGHeap : in out Cells.Heap_Record; Node : in out STree.SyntaxNode; ExpnStack : in out CStacks.Stack) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# QualExpNode, --# SPARK_IO.File_Sys, --# STree.Table & --# ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# Node, --# QualExpNode, --# STree.Table, --# VCGHeap & --# Node from *, --# QualExpNode, --# STree.Table; is DAGCell : Cells.Cell; AggTemp : Cells.Cell; AggType : Dictionary.Symbol; InsertPointForInheritedFields : Cells.Cell; -- following three cells are used to create a model of inherited tagged records InheritedFieldsModel : Cells.Cell; InheritField : Cells.Cell; MkAggregateCell : Cells.Cell; InheritSym : Dictionary.Symbol; function NumberOfListElementsNeeded (RecordSym : Dictionary.Symbol) return Natural --# global in Dictionary.Dict; is NumberOfFields : Natural; begin NumberOfFields := Dictionary.GetNumberOfNonExtendedComponents (RecordSym); -- If the root type is a null record then we ignore it since it won't have -- any aggregate choice expressions to associate with it. In this case we need -- one fewer fields slots in the empty list of the immediately preceding record -- in the model. if Dictionary.TypeIsExtendedTagged (RecordSym) and then Dictionary.NoFieldsBelowThisRecord (RecordSym) then NumberOfFields := NumberOfFields - 1; end if; return NumberOfFields; end NumberOfListElementsNeeded; begin if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = QualExpNode then --top level aggregate Node := STree.Child_Node (Current_Node => Node); -- ExpnStack will have symbol of aggregate type on top. We turn -- this into an IncompleteAggregate cell (it becomes MkAggregate when -- we get back to the aggregate node on the way up). Cells.Set_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), Cell_Storage.Incomplete_Aggregate); -- For record aggregates, create a list of commas into which we can later slot -- the aggregate expressions if (Dictionary.TypeIsRecord (Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)))) then -- The type indicated by the qualifier might be a record subtype, -- but for the VCG we always use the root type, look it up and -- modify that cells symbol for later use. AggType := Dictionary.GetRootType (Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack))); Cells.Set_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), AggType); CreateEmptyList (NumberOfListElementsNeeded (AggType), VCGHeap, ExpnStack); -- For extended tagged records we need to extend this "empty record" model to include -- all the Inherit fields InsertPointForInheritedFields := CStacks.Top (VCGHeap, ExpnStack); loop -- immediate exit if we aren't doing tagged record model exit when not Dictionary.TypeIsTagged (AggType); -- Step down to first inherited record InheritSym := Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (AggType)); AggType := Dictionary.GetType (InheritSym); -- Finally reached field of the root type exit when not Dictionary.TypeIsTagged (AggType); -- if we have zero components then we don't need to include record in model exit when not Dictionary.RecordHasSomeFields (AggType); -- At this point we have an inherited record with >0 components CreateOpCell (InheritedFieldsModel, VCGHeap, SP_Symbols.becomes); -- Create a cell for field name Inherit CreateFixedVarCell (InheritField, VCGHeap, InheritSym); -- connect it to := SetLeftArgument (InheritedFieldsModel, InheritField, VCGHeap); -- Create MkAggregate cell CreateCellKind (MkAggregateCell, VCGHeap, Cell_Storage.Mk_Aggregate); Cells.Set_Symbol_Value (VCGHeap, MkAggregateCell, AggType); -- connect it to := SetRightArgument (InheritedFieldsModel, MkAggregateCell, VCGHeap); -- We need to create an empty comma list and to do that need -- the aggregate model on top of stack, so we push it for now CStacks.Push (VCGHeap, MkAggregateCell, ExpnStack); CreateEmptyList (NumberOfListElementsNeeded (AggType), VCGHeap, ExpnStack); -- Restore stack CStacks.Pop (VCGHeap, ExpnStack); -- We now have a completed model of the inherited and just need to insert it in -- overall structure InsertParameterInNextFreeSlot (InsertPointForInheritedFields, InheritedFieldsModel, VCGHeap); InsertPointForInheritedFields := MkAggregateCell; -- ready for next pass through loop end loop; end if; -- no else part, anything else must be a simple qualified expression -- and we will later throw this cell away in that case. CStacks.PopOff (VCGHeap, ExpnStack, AggTemp); -- if we are doing positional association (other than simple -- qualified expression) we need to maintain a counter -- and we use a new cell's value field for this purpose CreateAggregateCounter (1, VCGHeap, ExpnStack); -- Put aggregate back on top of stack CStacks.Push (VCGHeap, AggTemp, ExpnStack); else -- unsupported embedded aggregate ErrorHandler.Semantic_Warning (Err_Num => 300, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); --push a null cell to represent the unresolvable anonymous aggregate Cells.Create_Cell (VCGHeap, DAGCell); CStacks.Push (VCGHeap, DAGCell, ExpnStack); Node := STree.NullNode; --prune walk end if; end DownProcessAggregate; --------------------------------------------------------------------- procedure DownProcessAggregateChoiceRep (Node : in STree.SyntaxNode; ThisScope : in Dictionary.Scopes; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack; NextNode : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ExpnStack, --# LexTokenManager.State, --# Node, --# STree.Table, --# ThisScope, --# VCGHeap & --# NextNode from Dictionary.Dict, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap; is FieldSym : Dictionary.Symbol; TypeCell : Cells.Cell; begin if DoingRecordAggregate (VCGHeap, ExpnStack) then --doing record so need to get field sym FieldSym := Dictionary.LookupSelectedItem (AggregateType (VCGHeap, ExpnStack), STree.Node_Lex_String (Node => STree.Last_Child_Of (Start_Node => Node)), ThisScope, Dictionary.ProgramContext); CreateFixedVarCell (TypeCell, VCGHeap, FieldSym); CStacks.Push (VCGHeap, TypeCell, ExpnStack); NextNode := STree.NullNode; else NextNode := STree.Child_Node (Current_Node => Node); end if; end DownProcessAggregateChoiceRep; ---------------------------------------------------------------------------------- procedure DownProcessRecordComponentSelectorName (Node : in STree.SyntaxNode; ThisScope : in Dictionary.Scopes; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack; NextNode : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ExpnStack, --# LexTokenManager.State, --# Node, --# STree.Table, --# ThisScope, --# VCGHeap & --# NextNode from ; is FieldSym : Dictionary.Symbol; TypeCell : Cells.Cell; begin FieldSym := Dictionary.LookupSelectedItem (AggregateType (VCGHeap, ExpnStack), STree.Node_Lex_String (Node => STree.Last_Child_Of (Start_Node => Node)), ThisScope, Dictionary.ProgramContext); CreateFixedVarCell (TypeCell, VCGHeap, FieldSym); CStacks.Push (VCGHeap, TypeCell, ExpnStack); NextNode := STree.NullNode; end DownProcessRecordComponentSelectorName; --------------------------------------------------------------------- procedure UpProcessExtensionAggregate (VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# ExpnStack, --# VCGHeap; is TempAgg : Cells.Cell; begin CStacks.PopOff (VCGHeap, ExpnStack, TempAgg); -- hold the aggregate expression or list if Cells.Get_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)) = Cell_Storage.Aggregate_Counter then -- we are doing a record and just need to get rid of the counter CStacks.Pop (VCGHeap, ExpnStack); -- get rid of counter end if; -- Convert aggregate to a finished MkAggregate function Cells.Set_Kind (VCGHeap, TempAgg, Cell_Storage.Mk_Aggregate); -- Finally, restore aggregate DAG to TOS CStacks.Push (VCGHeap, TempAgg, ExpnStack); end UpProcessExtensionAggregate; --------------------------------------------------------------------- procedure UpProcessAggregateChoiceRep (Node : in STree.SyntaxNode; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap; is begin if DoingArrayAggregate (VCGHeap, ExpnStack) then PushFunction (Cell_Storage.List_Function, VCGHeap, ExpnStack); if STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)) /= STree.NullNode then PushOperator (Binary, SP_Symbols.ampersand, VCGHeap, ExpnStack); end if; end if; end UpProcessAggregateChoiceRep; --------------------------------------------------------------------- procedure ProcessAncestorPart (Node : in STree.SyntaxNode; VCGHeap : in out Cells.Heap_Record; ExpnStack : in out CStacks.Stack) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# Node, --# STree.Table, --# VCGHeap; is AggTemp : Cells.Cell; AggType : Dictionary.Symbol; InsertPointForInheritedFields : Cells.Cell; -- following three cells are used to create a model of inherited tagged records InheritedFieldsModel, InheritField, MkAggregateCell : Cells.Cell; InheritSym : Dictionary.Symbol; AncestorDAG : Cells.Cell; AncestorType : Dictionary.Symbol; -------------------------------------------------------------------- function IsNullAggregate return Boolean --# global in Node; --# in STree.Table; is begin return STree.Syntax_Node_Type (Node => STree.Next_Sibling (Current_Node => Node)) = SP_Symbols.RWnull; end IsNullAggregate; -------------------------------------------------------------------- function NumberOfListElementsNeeded (RecordSym : Dictionary.Symbol) return Natural --# global in Dictionary.Dict; is NumberOfFields : Natural; begin NumberOfFields := Dictionary.GetNumberOfNonExtendedComponents (RecordSym); -- If the root type is a null record then we ignore it since it won't have -- any aggregate choice expressions to associate with it. In this case we need -- one fewer fields slots in the empty list of the immediately preceding record -- in the model. if Dictionary.TypeIsExtendedTagged (RecordSym) and then Dictionary.NoFieldsBelowThisRecord (RecordSym) then NumberOfFields := NumberOfFields - 1; end if; return NumberOfFields; end NumberOfListElementsNeeded; begin -- Node is SP_Symbols.[annotation_]ancestor_part -- Next_Sibling is either RWNull or record_component_association -- Direction is UP -- TOS is ancestor expression -- 2nd TOS is qualified expression prefix giving overall aggregate type if IsNullAggregate then -- Required model is simply an aggregate with a single assignment of the ancestor part -- itself since no other fields are involved -- CStacks.PopOff (VCGHeap, ExpnStack, AncestorDAG); AggType := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); InheritSym := Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (AggType)); -- Convert qualifier to aggregate Cells.Set_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), Cell_Storage.Mk_Aggregate); -- create aggregate association CreateOpCell (InheritedFieldsModel, VCGHeap, SP_Symbols.becomes); -- Create a cell for field name Inherit CreateFixedVarCell (InheritField, VCGHeap, InheritSym); -- connect it to := SetLeftArgument (InheritedFieldsModel, InheritField, VCGHeap); SetRightArgument (InheritedFieldsModel, AncestorDAG, VCGHeap); SetRightArgument (CStacks.Top (VCGHeap, ExpnStack), InheritedFieldsModel, VCGHeap); else -- not a null aggregate AncestorType := STree.NodeSymbol (Node); -- Build an empty structure into which the associations can be plugged and -- associate the ancestor expression with the inherit field of the rightmost -- mk_record in the model CStacks.PopOff (VCGHeap, ExpnStack, AncestorDAG); -- ExpnStack now has symbol of aggregate type on top. We turn -- this into an IncompleteAggregate cell (it becomes MkAggregate when -- we get back to the aggregate node on the way up). Cells.Set_Kind (VCGHeap, CStacks.Top (VCGHeap, ExpnStack), Cell_Storage.Incomplete_Aggregate); -- We know we are dealing with a record aggregate so we can build an empty -- comma-list into which to insert later associaitons CreateEmptyList (NumberOfListElementsNeeded (Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack))), VCGHeap, ExpnStack); -- and then add in the "inherit" fields needed to make an extended record model InsertPointForInheritedFields := CStacks.Top (VCGHeap, ExpnStack); AggType := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); loop -- immediate exit if we aren't doing tagged record model exit when not Dictionary.TypeIsTagged (AggType); -- stop building model when we get to ancestor part exit when Dictionary.Types_Are_Equal (Left_Symbol => AggType, Right_Symbol => AncestorType, Full_Range_Subtype => False); -- Step down to first inherited record InheritSym := Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (AggType)); AggType := Dictionary.GetType (InheritSym); -- Finally reached field of the root type - shouldn't get here exit when not Dictionary.TypeIsTagged (AggType); -- if we have zero components then we don't need to include record in model exit when not Dictionary.RecordHasSomeFields (AggType); -- At this point we have an inherited record with >0 components CreateOpCell (InheritedFieldsModel, VCGHeap, SP_Symbols.becomes); -- Create a cell for field name Inherit CreateFixedVarCell (InheritField, VCGHeap, InheritSym); -- connect it to := SetLeftArgument (InheritedFieldsModel, InheritField, VCGHeap); -- if we have reached the ancestor type then we assign that otherwise we -- create an MkAggrgeate cell for the next level if Dictionary.Types_Are_Equal (Left_Symbol => AggType, Right_Symbol => AncestorType, Full_Range_Subtype => False) then SetRightArgument (InheritedFieldsModel, AncestorDAG, VCGHeap); InsertParameterInNextFreeSlot (InsertPointForInheritedFields, InheritedFieldsModel, VCGHeap); else -- Create MkAggregate cell CreateCellKind (MkAggregateCell, VCGHeap, Cell_Storage.Mk_Aggregate); Cells.Set_Symbol_Value (VCGHeap, MkAggregateCell, AggType); -- connect it to := SetRightArgument (InheritedFieldsModel, MkAggregateCell, VCGHeap); -- We need to create an empty comma list and to do that need -- the aggregate model on top of stack, so we push it for now CStacks.Push (VCGHeap, MkAggregateCell, ExpnStack); CreateEmptyList (NumberOfListElementsNeeded (AggType), VCGHeap, ExpnStack); -- Restore stack CStacks.Pop (VCGHeap, ExpnStack); -- We now have a completed model of the inherited and just need to insert it in -- overall structure InsertParameterInNextFreeSlot (InsertPointForInheritedFields, InheritedFieldsModel, VCGHeap); InsertPointForInheritedFields := MkAggregateCell; -- ready for next pass through loop end if; end loop; -- temporarily remove aggregate from stack CStacks.PopOff (VCGHeap, ExpnStack, AggTemp); -- if we are doing positional association (other than simple -- qualified expression) we need to maintain a counter -- and we use a new cell's value field for this purpose -- Since we will be looking for fields that are not provided by the ancestor part, we -- start with the counter set to the number of fields in the ancestor CreateAggregateCounter (Dictionary.GetNumberOfComponents (AncestorType) + 1, VCGHeap, ExpnStack); -- Put aggregate back on top of stack CStacks.Push (VCGHeap, AggTemp, ExpnStack); end if; end ProcessAncestorPart; -- Construct an attribute "PrefixSymbol'tail (ExpnDAG)" and return it as RHS procedure BuildStreamRHS (VCGHeap : in out Cells.Heap_Record; PrefixSymbol : in Dictionary.Symbol; ExpnDAG : in Cells.Cell; RHS : out Cells.Cell) --# global in out Statistics.TableUsage; --# derives RHS from VCGHeap & --# Statistics.TableUsage from *, --# PrefixSymbol, --# VCGHeap & --# VCGHeap from *, --# ExpnDAG, --# PrefixSymbol; is StreamFunction, StreamPrefix, StreamIdent : Cells.Cell; begin -- BuildStreamRHS -- create necessary cells CreateOpCell (StreamFunction, VCGHeap, SP_Symbols.apostrophe); CreateFixedVarCell (StreamPrefix, VCGHeap, PrefixSymbol); CreateCellKind (StreamIdent, VCGHeap, Cell_Storage.Attrib_Function); Cells.Set_Lex_Str (VCGHeap, StreamIdent, LexTokenManager.Tail_Token); --assemble into a function attribute SetLeftArgument (StreamFunction, StreamPrefix, VCGHeap); SetRightArgument (StreamFunction, StreamIdent, VCGHeap); SetRightArgument (StreamIdent, ExpnDAG, VCGHeap); RHS := StreamFunction; end BuildStreamRHS; ------------------------------------------------------------------------- -- Changed to use a stack to do a depth-first search, -- and altered from a function to a procedure to allow -- debugging side effects procedure ContainsQuantIdent (DataElem, QuantIdent : in Cells.Cell; VCGHeap : in out Cells.Heap_Record; Result : out Boolean) --# global in out Statistics.TableUsage; --# derives Result, --# VCGHeap from DataElem, --# QuantIdent, --# VCGHeap & --# Statistics.TableUsage from *, --# DataElem, --# QuantIdent, --# VCGHeap; is CurrElem : Cells.Cell; FoundQuantIdent : Boolean; MyTempStack : CStacks.Stack; Ident_To_Find : Dictionary.Symbol; begin Ident_To_Find := Cells.Get_Symbol_Value (VCGHeap, QuantIdent); CurrElem := DataElem; FoundQuantIdent := False; CStacks.CreateStack (MyTempStack); CStacks.Push (VCGHeap, CurrElem, MyTempStack); while not (CStacks.IsEmpty (MyTempStack) or FoundQuantIdent) loop CStacks.PopOff (VCGHeap, MyTempStack, CurrElem); -- Check if we have found what we are looking for. if Cells.Get_Symbol_Value (VCGHeap, CurrElem) = Ident_To_Find then FoundQuantIdent := True; end if; case Cells.Get_Kind (VCGHeap, CurrElem) is when Cell_Storage.Fixed_Var | Cell_Storage.Reference => -- A fixed identifier; covered above. These can -- apparently point to themselves, so we must not push -- the A or B pointer on the stack. null; when others => -- Explore both sides if they are non-null if not Cells.Is_Null_Cell (LeftPtr (VCGHeap, CurrElem)) then CStacks.Push (VCGHeap, LeftPtr (VCGHeap, CurrElem), MyTempStack); end if; if not Cells.Is_Null_Cell (RightPtr (VCGHeap, CurrElem)) then CStacks.Push (VCGHeap, RightPtr (VCGHeap, CurrElem), MyTempStack); end if; end case; end loop; -- Clean up a bit... while not CStacks.IsEmpty (MyTempStack) loop CStacks.Pop (VCGHeap, MyTempStack); end loop; Result := FoundQuantIdent; end ContainsQuantIdent; ----------------------------------------------------------------------------- -- Prints DAG at DAGRoot in GraphViz's "Dot" format. -- -- The output appears in a file called "dagXXX.dot" where -- XXX is the decimal integer value of DAGRoot - this suuplies -- a low-tech but sure way to differeniate between the -- many DAGs that might be produced by a single Examiner run. procedure Debug_Print_DAG (Start_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; DAG_Root : in Cells.Cell; VCG_Heap : in out Cells.Heap_Record) --# global in CommandLineData.Content; --# derives VCG_Heap from * & --# null from CommandLineData.Content, --# DAG_Root, --# Scope, --# Start_Node; is -- Uses String slicing and catenation, so not SPARK... --# hide Debug_Print_DAG; Current_Unit : ContextManager.UnitDescriptors; Current_File_Name : E_Strings.T; DAG_File_Name : E_Strings.T; DAG_File : SPARK_IO.File_Type := SPARK_IO.Null_File; DAG_Posn : LexTokenManager.Token_Position; Status : SPARK_IO.File_Status; DAG_Num : constant String := Cells.Cell'Image (DAG_Root); DAG_ID : constant String := "dag" & DAG_Num (2 .. DAG_Num'Last) & ".dot"; begin -- We want to produce a message that ties the filename containing -- the DAG printout to the file, line-number, and column-number of -- the offending expression. -- -- The source filename can be obtained from ContextManager, but -- This approach DOES NOT WORK for abstract pre-conditions and post-conditions -- since these can come from source files other than that associated with -- ContextManager.CurrentUnit. Currently, this code can produce the -- incorrect source file reference if called from Build_Annotation_Expression. Current_Unit := ContextManager.Ops.Current_Unit; Current_File_Name := LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Current_Unit))); -- The Source position of the expression itself comes from STree.Table, thus: DAG_Posn := STree.Node_Position (Node => Start_Node); DAG_File_Name := E_Strings.Copy_String (Str => DAG_ID); CommandLineData.Normalize_File_Name_To_Output_Directory (F => DAG_File_Name); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => FileSystem.Just_File (Fn => Current_File_Name, Ext => True)); declare Line_Str : constant String := DAG_Posn.Start_Line_No'Img; Col_Str : constant String := DAG_Posn.Start_Pos'Img; begin Debug.PrintMsg (":" & Line_Str (2 .. Line_Str'Last) & ":" & Col_Str (2 .. Col_Str'Last) & ": DAG printed to file ", False); if CommandLineData.Content.Plain_Output then E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => FileSystem.Just_File (Fn => DAG_File_Name, Ext => True)); else E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => DAG_File_Name); end if; end; E_Strings.Create (File => DAG_File, Name_Of_File => DAG_File_Name, Form_Of_File => "", Status => Status); if Status = SPARK_IO.Ok then DAG_IO.Print_DAG_Dot (Heap => VCG_Heap, Output_File => DAG_File, Root => DAG_Root, Scope => Scope, Wrap_Limit => DAG_IO.No_Wrap); SPARK_IO.Close (DAG_File, Status); else Debug.PrintMsg (" - File create failed", False); end if; SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end Debug_Print_DAG; -- Model_Catenation - models the semantics of the "&" operator -- between characters and/or strings in constant expressions. -- For example, if ExpnStack contains two entries representing -- "ab" and "cd", then this routine leaves ExpnStack containing -- a single top entry representing "abcd" procedure Model_Catenation (ExpnStack : in out CStacks.Stack; VCGHeap : in out Cells.Heap_Record) --# global in Dictionary.Dict; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# derives ExpnStack, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# LexTokenManager.State, --# VCGHeap; is Left, Right : Cells.Cell; Left_String, Right_String : E_Strings.T; Left_Length, Right_Length : E_Strings.Lengths; New_String : E_Strings.T; New_Lex_Str : LexTokenManager.Lex_String; OK_Left, OK_Right : Boolean; procedure Get_String (The_Cell : in Cells.Cell; The_String : out E_Strings.T; OK : out Boolean) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in VCGHeap; --# derives OK, --# The_String from Dictionary.Dict, --# LexTokenManager.State, --# The_Cell, --# VCGHeap; is Char_Code : Integer; Unused : Maths.ErrorCode; Sym : Dictionary.Symbol; Sym_Type : Dictionary.Symbol; begin if Cells.Get_Kind (VCGHeap, The_Cell) = Cell_Storage.Manifest_Const then The_String := LexTokenManager.Lex_String_To_String (Lex_Str => Cells.Get_Lex_Str (VCGHeap, The_Cell)); OK := True; elsif Cells.Get_Kind (VCGHeap, The_Cell) = Cell_Storage.Named_Const then Sym := Cells.Get_Symbol_Value (VCGHeap, The_Cell); Sym_Type := Dictionary.GetRootType (Dictionary.GetType (Sym)); if Dictionary.IsPredefinedStringType (Sym_Type) then -- Grab the value of the String constant from the Dictionary. This will -- have been recorded in the Dictionary by Sem.CompUnit.Wf_Constant_Declaration The_String := LexTokenManager.Lex_String_To_String (Dictionary.Get_Value (The_Constant => Sym)); OK := True; elsif Dictionary.IsPredefinedCharacterType (Sym_Type) then --# accept F, 10, Unused, "Unused here OK"; Maths.ValueToInteger (Maths.ValueRep (Dictionary.Get_Value (The_Constant => Cells.Get_Symbol_Value (VCGHeap, The_Cell))), Char_Code, Unused); --# end accept; if Char_Code = 0 then -- can't model nuls in strings The_String := E_Strings.Empty_String; OK := False; else The_String := E_Strings.Copy_String (Str => """"); E_Strings.Append_Char (E_Str => The_String, Ch => Character'Val (Char_Code)); E_Strings.Append_Char (E_Str => The_String, Ch => '"'); OK := True; end if; else -- Not a String or a Character constant. ????? The_String := E_Strings.Empty_String; -- should not occur OK := False; end if; else The_String := E_Strings.Empty_String; -- should not occur OK := False; end if; --# accept F, 33, Unused, "Unused here OK"; end Get_String; begin -- Model_Catenation -- get left and right strings to be concatenated CStacks.PopOff (VCGHeap, ExpnStack, Right); CStacks.PopOff (VCGHeap, ExpnStack, Left); if (Cells.Get_Kind (VCGHeap, Left) = Cell_Storage.Manifest_Const or else Cells.Get_Kind (VCGHeap, Left) = Cell_Storage.Named_Const) and then (Cells.Get_Kind (VCGHeap, Right) = Cell_Storage.Manifest_Const or else Cells.Get_Kind (VCGHeap, Right) = Cell_Storage.Named_Const) then Get_String (The_Cell => Left, The_String => Left_String, OK => OK_Left); Get_String (The_Cell => Right, The_String => Right_String, OK => OK_Right); if OK_Left and OK_Right then Left_Length := E_Strings.Get_Length (E_Str => Left_String); Right_Length := E_Strings.Get_Length (E_Str => Right_String); -- build an examiner line with concatenated strings in it New_String := E_Strings.Copy_String (Str => """"); for I in E_Strings.Lengths range 2 .. Left_Length - 1 loop E_Strings.Append_Char (E_Str => New_String, Ch => E_Strings.Get_Element (E_Str => Left_String, Pos => I)); end loop; -- at this point we have " followed by left string and no terminating " for I in E_Strings.Lengths range 2 .. Right_Length - 1 loop E_Strings.Append_Char (E_Str => New_String, Ch => E_Strings.Get_Element (E_Str => Right_String, Pos => I)); end loop; E_Strings.Append_Char (E_Str => New_String, Ch => '"'); LexTokenManager.Insert_Examiner_String (Str => New_String, Lex_Str => New_Lex_Str); -- Having catenated the two halfs, we change Left to -- be a Manifest_Const cell in all cases Cells.Set_Kind (VCGHeap, Left, Cell_Storage.Manifest_Const); Cells.Set_Lex_Str (VCGHeap, Left, New_Lex_Str); CStacks.Push (VCGHeap, Left, ExpnStack); else -- can't model string with nul in it so push & op instead CStacks.Push (VCGHeap, Left, ExpnStack); CStacks.Push (VCGHeap, Right, ExpnStack); PushOperator (Binary, SP_Symbols.ampersand, VCGHeap, ExpnStack); end if; else -- & operator shouldn't have got here so just push it as binop CStacks.Push (VCGHeap, Left, ExpnStack); CStacks.Push (VCGHeap, Right, ExpnStack); PushOperator (Binary, SP_Symbols.ampersand, VCGHeap, ExpnStack); end if; end Model_Catenation; ---------------------------------------- procedure Build_Annotation_Expression (Exp_Node : in STree.SyntaxNode; Instantiated_Subprogram : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Calling_Scope : in Dictionary.Scopes; Force_Abstract : in Boolean; Loop_Stack : in LoopContext.T; Generate_Function_Instantiations : in Boolean; VC_Failure : in out Boolean; VC_Contains_Reals : in out Boolean; VCG_Heap : in out Cells.Heap_Record; DAG_Root : out Cells.Cell; Function_Defs : in out CStacks.Stack) --# global in CommandLineData.Content; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives DAG_Root, --# Dictionary.Dict, --# Function_Defs, --# LexTokenManager.State, --# VCG_Heap from Calling_Scope, --# CommandLineData.Content, --# Dictionary.Dict, --# Exp_Node, --# Force_Abstract, --# Function_Defs, --# Generate_Function_Instantiations, --# Instantiated_Subprogram, --# LexTokenManager.State, --# Loop_Stack, --# Scope, --# STree.Table, --# VCG_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Calling_Scope, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# Force_Abstract, --# Function_Defs, --# Generate_Function_Instantiations, --# Instantiated_Subprogram, --# LexTokenManager.State, --# Loop_Stack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCG_Heap & --# Statistics.TableUsage, --# VC_Contains_Reals, --# VC_Failure from *, --# Calling_Scope, --# CommandLineData.Content, --# Dictionary.Dict, --# Exp_Node, --# Force_Abstract, --# Function_Defs, --# Generate_Function_Instantiations, --# Instantiated_Subprogram, --# LexTokenManager.State, --# Loop_Stack, --# Scope, --# STree.Table, --# VCG_Heap; is separate; -- This procedure traverses a syntax tree of an expression, which may be -- - an expression of an assignment statement, -- - a condition of an if_statement (or elsif_part), -- - an expression of a case_statement, -- - a condition of an iteration scheme. procedure BuildExpnDAG (StartNode : in STree.SyntaxNode; ExpnScope : in Dictionary.Scopes; Scope : in Dictionary.Scopes; LineNmbr : in Integer; DoRtc : in Boolean; AssumeRvalues : in Boolean; LoopStack : in LoopContext.T; FlowHeap : in out Heap.HeapRecord; VCGHeap : in out Cells.Heap_Record; ContainsReals : in out Boolean; VCGFailure : in out Boolean; ShortCircuitStack : in out CStacks.Stack; CheckStack : in out CStacks.Stack; KindOfStackedCheck : in out Graph.Proof_Context_Type; DAGRoot : out Cells.Cell) --# global in CommandLineData.Content; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# AssumeRvalues, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnScope, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Scope, --# ShortCircuitStack, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# DAGRoot from AssumeRvalues, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ExpnScope, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Scope, --# ShortCircuitStack, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from AssumeRvalues, --# CheckStack, --# CommandLineData.Content, --# Dictionary.Dict, --# DoRtc, --# ErrorHandler.Error_Context, --# ExpnScope, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StartNode, --# StmtStack.S, --# STree.Table, --# VCGHeap; is separate; -------------------------------------------------------------------------------- procedure BuildGraph (StartNode : in STree.SyntaxNode; SubprogSym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; OutputFile : in SPARK_IO.File_Type; EndPosition : in LexTokenManager.Token_Position; VCGFailure : in out Boolean; VCGHeap : in out Cells.Heap_Record; FlowHeap : in out Heap.HeapRecord; Semantic_Error_In_Subprogram : in Boolean; DataFlowErrorInSubprogram : in Boolean; Type_Check_Exports : in Boolean) is separate; -------------------------------------------------------------------------- procedure BuildConstantInitializationDAG (StartNode : in STree.SyntaxNode; Scope : in Dictionary.Scopes; TheHeap : in out Cells.Heap_Record; FlowHeap : in out Heap.HeapRecord; DAGRoot : out Cells.Cell) is ContainsReals : Boolean; VCGFailure : Boolean; ShortCircuitStack : CStacks.Stack; CheckStack : CStacks.Stack; KindOfStackedCheck : Graph.Proof_Context_Type; LoopStack : LoopContext.T; begin LoopContext.Initialize (LoopStack); --# accept F, 10, "ContainsReals, VCGFailure not used here"; ContainsReals := False; -- Not used in this subprogram VCGFailure := False; -- Not used in this subprogram --# end accept; CStacks.CreateStack (ShortCircuitStack); CStacks.CreateStack (CheckStack); KindOfStackedCheck := Graph.Unspecified; --# accept F, 10, KindOfStackedCheck, "KindOfStackedCheck not used here" & --# F, 10, CheckStack, "CheckStack not used here" & --# F, 10, ShortCircuitStack, "ShortCircuitStack not used here" & --# F, 10, VCGFailure, "VCGFailure not used here" & --# F, 10, ContainsReals, "ContainsReals not used here"; BuildExpnDAG (StartNode, Scope, Scope, 0, False, False, LoopStack, FlowHeap, TheHeap, ContainsReals, VCGFailure, ShortCircuitStack, CheckStack, KindOfStackedCheck, DAGRoot); --# end accept; end BuildConstantInitializationDAG; end DAG; spark-2012.0.deb/examiner/dag-loopcontext.adb0000644000175000017500000001223411753202336020032 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (DAG) package body LoopContext is procedure Initialize (S : out T) is begin S.CurrentLoopNumber := 0; CStacks.CreateStack (S.LoopStack); end Initialize; --------------------------------------------------------------------- procedure EnterLoop (Scope : in Dictionary.Scopes; S : in out T; VCGHeap : in out Cells.Heap_Record; LoopScope : out Dictionary.Scopes) is LoopSym : Dictionary.Symbol; LoopCell : Cells.Cell; begin S.CurrentLoopNumber := S.CurrentLoopNumber + 1; LoopSym := Dictionary.GetLoop (Dictionary.GetRegion (Scope), S.CurrentLoopNumber); Cells.Create_Cell (VCGHeap, LoopCell); Cells.Set_Symbol_Value (VCGHeap, LoopCell, LoopSym); CStacks.Push (VCGHeap, LoopCell, S.LoopStack); LoopScope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => LoopSym); --# accept F, 601, Statistics.TableUsage, Dictionary.Dict, "False coupling expected" & --# F, 601, Statistics.TableUsage, Scope, "False coupling expected" & --# F, 601, Statistics.TableUsage, S.CurrentLoopNumber, "False coupling expected"; end EnterLoop; ----------------------------------------------------------------------- procedure ExitLoop (S : in out T; VCGHeap : in out Cells.Heap_Record; LoopScope : in out Dictionary.Scopes) is begin CStacks.Pop (VCGHeap, S.LoopStack); LoopScope := Dictionary.GetEnclosingScope (LoopScope); end ExitLoop; ---------------------------------------------------------------------- function CurrentLoopSym (S : T; VCGHeap : Cells.Heap_Record) return Dictionary.Symbol is begin return Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, S.LoopStack)); end CurrentLoopSym; ---------------------------------------------------------------------- function EnclosingLoopSym (S : T; VCGHeap : Cells.Heap_Record; CurrentLoop : Dictionary.Symbol) return Dictionary.Symbol is LocalStack : CStacks.Stack; Result : Dictionary.Symbol; begin -- start by looping down stack to find CurrentLoop LocalStack := S.LoopStack; -- start at top of stack loop -- we want to find the current loop symbol on top of our ever-reducing stack exit when Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, LocalStack)) = CurrentLoop; -- not found, so move down (or shrink) stack LocalStack := CStacks.NonDestructivePop (VCGHeap, LocalStack); -- error trap for case where we search for a non-existing loop symbol exit when CStacks.IsEmpty (LocalStack); -- run out of stack end loop; -- At this point we either have a stack whose top item is the current loop or (gross error -- condition) an empty stack -- We want the next loop entry which we can find by shrnking the stack one more time. -- We can do this safely, even in the error case, because the A_Ptr of Null is Null LocalStack := CStacks.NonDestructivePop (VCGHeap, LocalStack); if CStacks.IsEmpty (LocalStack) then -- no enclosing loops Result := Dictionary.NullSymbol; else -- there is an enclosing loop Result := Cells.Get_Symbol_Value (VCGHeap, CStacks.Top (VCGHeap, LocalStack)); end if; return Result; end EnclosingLoopSym; ---------------------------------------------------------------------- function CurrentLoopParameterSym (S : T; VCGHeap : Cells.Heap_Record) return Dictionary.Symbol is begin return Dictionary.GetLoopParameter (CurrentLoopSym (S, VCGHeap)); end CurrentLoopParameterSym; ---------------------------------------------------------------------- function CurrentLoopMovesInReverse (S : T; VCGHeap : Cells.Heap_Record) return Boolean is begin return Dictionary.LoopParameterMovesInReverse (CurrentLoopParameterSym (S, VCGHeap)); end CurrentLoopMovesInReverse; end LoopContext; ././@LongLink0000000000000000000000000000020300000000000011560 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_task_type_declaration.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000004241411753202336033125 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Task_Type_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) is Task_Type_Sym : Dictionary.Symbol; Task_Scope : Dictionary.Scopes; Sym : Dictionary.Symbol; Ident_Node : STree.SyntaxNode; Anno_Node : STree.SyntaxNode; Closing_Ident_Node : STree.SyntaxNode; Discriminant_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Pragma_Node : STree.SyntaxNode; Global_Error : Boolean; Derives_Error : Boolean := False; function Get_Discriminant_Node (Task_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Task_Type_Declaration_Node, STree.Table) = SP_Symbols.task_type_declaration; --# return Node => (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.known_discriminant_part or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.task_type_annotation); is Result : STree.SyntaxNode; begin Result := Next_Sibling (Current_Node => Child_Node (Current_Node => Task_Type_Declaration_Node)); -- ASSUME Result = known_discriminant_part OR task_type_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.known_discriminant_part or else Syntax_Node_Type (Node => Result) = SP_Symbols.task_type_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = known_discriminant_part OR task_type_annotation in Get_Discriminant_Node"); return Result; end Get_Discriminant_Node; ---------- function Get_Anno_Node (Task_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Task_Type_Declaration_Node, STree.Table) = SP_Symbols.task_type_declaration; --# return Node => Syntax_Node_Type (Node, STree.Table) = SP_Symbols.moded_global_definition; is Result : STree.SyntaxNode; begin Result := Get_Discriminant_Node (Task_Type_Declaration_Node => Task_Type_Declaration_Node); -- ASSUME Result = known_discriminant_part OR task_type_annotation if Syntax_Node_Type (Node => Result) = SP_Symbols.known_discriminant_part then -- ASSUME Result = known_discriminant_part Result := Next_Sibling (Current_Node => Result); end if; -- ASSUME Result = task_type_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.task_type_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = task_type_annotation in Get_Anno_Node"); Result := Child_Node (Current_Node => Result); -- ASSUME Result = moded_global_definition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.moded_global_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = moded_global_definition in Get_Anno_Node"); return Result; end Get_Anno_Node; ---------- function Get_Task_Definition_Node (Task_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Task_Type_Declaration_Node, STree.Table) = SP_Symbols.task_type_declaration; --# return Node => Syntax_Node_Type (Node, STree.Table) = SP_Symbols.task_definition; is Result : STree.SyntaxNode; begin Result := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Task_Type_Declaration_Node)); -- ASSUME Result = task_definition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.task_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = task_definition in Get_Task_Definition_Node"); return Result; end Get_Task_Definition_Node; ---------- function Get_Closing_Ident_Node (Task_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Task_Type_Declaration_Node, STree.Table) = SP_Symbols.task_type_declaration; --# return Node => Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier; is Result : STree.SyntaxNode; begin Result := Next_Sibling (Current_Node => Child_Node (Current_Node => Get_Task_Definition_Node (Task_Type_Declaration_Node => Task_Type_Declaration_Node))); -- ASSUME Result = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = identifier in Get_Closing_Ident_Node"); return Result; end Get_Closing_Ident_Node; ---------- function Get_Priority_Pragma_Node (Task_Type_Declaration_Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Task_Type_Declaration_Node, STree.Table) = SP_Symbols.task_type_declaration; --# return Node => Syntax_Node_Type (Node, STree.Table) = SP_Symbols.priority_pragma; is Result : STree.SyntaxNode; begin Result := Parent_Node (Current_Node => Last_Child_Of (Start_Node => Get_Task_Definition_Node (Task_Type_Declaration_Node => Task_Type_Declaration_Node))); -- ASSUME Result = priority_pragma SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.priority_pragma, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = priority_pragma in Get_Priority_Pragma_Node"); return Result; end Get_Priority_Pragma_Node; ---------- procedure Check_Pragma_Validity (End_Node_Position : in LexTokenManager.Token_Position; Task_Type_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# End_Node_Position, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Task_Type_Sym; is Priority_Found, Interrupt_Priority_Found : Boolean; begin Priority_Found := Dictionary.GetTypeHasPragma (Task_Type_Sym, Dictionary.Priority); Interrupt_Priority_Found := Dictionary.GetTypeHasPragma (Task_Type_Sym, Dictionary.InterruptPriority); -- There must be either Priority or Interrupt_Priority if not (Priority_Found or else Interrupt_Priority_Found) then ErrorHandler.Semantic_Error (Err_Num => 876, Reference => ErrorHandler.No_Reference, Position => End_Node_Position, Id_Str => LexTokenManager.Null_String); end if; end Check_Pragma_Validity; begin -- Wf_Task_Type_Declaration Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Task_Type_Declaration"); Ident_Str := Node_Lex_String (Node => Ident_Node); Discriminant_Node := Get_Discriminant_Node (Task_Type_Declaration_Node => Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) or else (Dictionary.IsTypeMark (Sym) and then Dictionary.TypeIsAnnounced (TheType => Sym) and then not Dictionary.Is_Declared (Item => Sym)) then if not Dictionary.Is_Null_Symbol (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); end if; Dictionary.Add_Task_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Scope => Scope, Context => Dictionary.ProgramContext, Constrained => (Syntax_Node_Type (Node => Discriminant_Node) /= SP_Symbols.known_discriminant_part), The_Type => Task_Type_Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Task_Type_Sym); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Task_Type_Sym, Is_Declaration => True); end if; Task_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Task_Type_Sym); -- wff discriminants here if Syntax_Node_Type (Node => Discriminant_Node) = SP_Symbols.known_discriminant_part then -- ASSUME Discriminant_Node = known_discriminant_part Wf_Known_Discriminant_Part (Node => Discriminant_Node, Protected_Type_Sym => Task_Type_Sym, Scope => Scope); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.task_type_declaration; -- handle annotation -- global Anno_Node := Get_Anno_Node (Task_Type_Declaration_Node => Node); Wf_Global_Definition (Node => Anno_Node, Scope => Scope, Subprog_Sym => Task_Type_Sym, First_Seen => True, Sem_Err_Found => Global_Error); -- In data-flow mode the full dependency is always synthesised from the moded globals if CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow then Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Position (Node => Node), Subprog_Sym => Task_Type_Sym, Abstraction => Dictionary.IsAbstract, The_Heap => The_Heap); end if; -- derives Anno_Node := Next_Sibling (Current_Node => Anno_Node); -- ASSUME Anno_Node = dependency_relation OR declare_annotation OR NULL if Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.dependency_relation then -- ASSUME Anno_Node = dependency_relation Dependency_Relation.Wf_Dependency_Relation (Node => Anno_Node, Scope => Task_Scope, Subprog_Sym => Task_Type_Sym, First_Seen => True, Glob_Def_Err => Global_Error, The_Heap => The_Heap); Anno_Node := Next_Sibling (Current_Node => Anno_Node); elsif Anno_Node = STree.NullNode or else Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.declare_annotation then -- ASSUME Anno_Node = declare_annotation OR NULL -- No derives annotation if CommandLineData.Content.Flow_Option = CommandLineData.Info_Flow then -- In information-flow mode this is a semantic error - there must always be a derives annotation Derives_Error := True; ErrorHandler.Semantic_Error (Err_Num => 501, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Get_Anno_Node (Task_Type_Declaration_Node => Node)), Id_Str => LexTokenManager.Null_String); elsif CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow then -- In auto-flow mode, synthesise the dependency from the moded globals. -- It seems obvious to also check for data-flow mode here and remove -- the earlier call to CreateFullSubProgDependency but that won't work. Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Position (Node => Node), Subprog_Sym => Task_Type_Sym, Abstraction => Dictionary.IsAbstract, The_Heap => The_Heap); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = dependency_relation OR declare_annotation OR NULL in Wf_Task_Type_Declaration"); end if; -- ASSUME Anno_Node = declare_annotation OR NULL if Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.declare_annotation then -- ASSUME Anno_Node = declare_annotation Wf_Declare_Annotation (Node => Anno_Node, Scope => Task_Scope, Task_Or_Proc => Task_Type_Sym, First_Seen => True, The_Heap => The_Heap); elsif Anno_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = declare_annotation OR NULL in Wf_Task_Type_Declaration"); end if; -- if there are errors in the task type signature then mark it as malformed if Global_Error or else Derives_Error then Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Task_Type_Sym); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.task_type_declaration; Pragma_Node := Get_Priority_Pragma_Node (Task_Type_Declaration_Node => Node); -- deal with priority pragma which should be first Wf_Priority_Pragma (Node => Pragma_Node, Scope => Task_Scope, The_Heap => The_Heap); -- check any other pragmas loop Pragma_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Pragma_Node)); -- ASSUME Pragma_Node = apragma OR identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.apragma or else Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pragma_Node = apragma OR identifier in Wf_Task_Type_Declaration"); exit when Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.identifier; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.task_type_declaration and --# Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; Wf_Pragma (Node => Pragma_Node, Scope => Task_Scope); end loop; -- closing identifier must match initial Closing_Ident_Node := Get_Closing_Ident_Node (Task_Type_Declaration_Node => Node); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => Node_Lex_String (Node => Closing_Ident_Node)) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Closing_Ident_Node), Id_Str => Ident_Str); end if; Check_Pragma_Validity (End_Node_Position => Node_Position (Node => Closing_Ident_Node), Task_Type_Sym => Task_Type_Sym); else -- illegal redeclaration ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; end Wf_Task_Type_Declaration; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_primary-protected_references_by.adb0000644000175000017500000000753411753202336030307 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Wf_Primary) function Protected_References_By (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Dictionary.Symbol is Result : Dictionary.Symbol := Dictionary.NullSymbol; Current_Var : Dictionary.Symbol; It : Dictionary.Iterator; function Is_Local_Protected_State (Var_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; -- pre IsOwnVariable (Var_Sym) and GetOwnVariableProtected (Var_Sym) -- returns True if the owner of Var_Sym is the protected type which also encloses the -- scope we are making the function call in is Compilation_Unit : Dictionary.Symbol; function Get_Enclosing_Package_Or_Protected_Type (Scope : Dictionary.Scopes) return Dictionary.Symbol --# global in Dictionary.Dict; is Current : Dictionary.Scopes; Region : Dictionary.Symbol; begin Current := Scope; loop Region := Dictionary.GetRegion (Current); exit when Dictionary.IsPackage (Region); exit when Dictionary.IsProtectedType (Region); Current := Dictionary.GetEnclosingScope (Current); end loop; return Region; end Get_Enclosing_Package_Or_Protected_Type; begin -- Is_Local_Protected_State Compilation_Unit := Get_Enclosing_Package_Or_Protected_Type (Scope => Scope); return Dictionary.IsProtectedType (Compilation_Unit) and then Dictionary.IsTypeMark (Dictionary.GetOwner (Var_Sym)) and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetOwner (Var_Sym), Right_Symbol => Compilation_Unit, Full_Range_Subtype => False); end Is_Local_Protected_State; begin -- Protected_References_By if Dictionary.IsAdaFunction (Sym) then -- IsAdaFunction used to block proof functions It := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Sym); while not Dictionary.IsNullIterator (It) loop Current_Var := Dictionary.CurrentSymbol (It); if Dictionary.IsOwnVariable (Current_Var) and then Dictionary.GetOwnVariableProtected (Current_Var) and then not Dictionary.IsOwnVariableOrConstituentWithMode (Current_Var) and then not Is_Local_Protected_State (Var_Sym => Current_Var, Scope => Scope) then Result := Sym; exit; end if; It := Dictionary.NextSymbol (It); end loop; else -- check for pragma atomic protected variable case if Dictionary.IsOwnVariable (Sym) and then Dictionary.GetOwnVariableProtected (Sym) and then not Dictionary.IsOwnVariableOrConstituentWithMode (Sym) then Result := Sym; end if; end if; return Result; end Protected_References_By; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_expression.adb0000644000175000017500000003577311753202336024147 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: Called to check validity of an -- expression node. Replaces calls to StaticExpression, -- BaseTypeExpression and CheckTypeExpression ---------------------------------------------------------------------------- separate (Sem.Walk_Expression_P) procedure Wf_Expression (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) is Op_Node : STree.SyntaxNode; Operator : SP_Symbols.SP_Symbol; Left, Right, Result : Sem.Exp_Record; Is_Annotation : Boolean := False; --------------------------------------------------------------------- procedure Check_Short_Circuit (Op : in SP_Symbols.SP_Symbol; Node_Pos : in LexTokenManager.Token_Position; Op_Pos : in LexTokenManager.Token_Position; Is_Annotation : in Boolean; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Is_Annotation, --# LexTokenManager.State, --# Node_Pos, --# Op, --# Op_Pos, --# Scope, --# SPARK_IO.File_Sys & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Op; is Left, Right, Result : Sem.Exp_Record; begin Exp_Stack.Pop (Item => Right, Stack => E_Stack); Exp_Stack.Pop (Item => Left, Stack => E_Stack); Result := Null_Type_Record; -- safety: we may not set all fields below Result.Is_Static := CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Left.Is_Static and then Right.Is_Static; Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; Result.Has_Operators := True; if Dictionary.TypeIsBoolean (Left.Type_Symbol) and then Dictionary.TypeIsBoolean (Right.Type_Symbol) then Result.Is_ARange := False; Result.Type_Symbol := Left.Type_Symbol; Calc_Binary_Operator (Node_Pos => Node_Pos, Operator => Op, Left_Val => Left.Value, Right_Val => Right.Value, Is_Annotation => Is_Annotation, Result => Result); else Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error_Sym2 (Err_Num => 35, Reference => ErrorHandler.No_Reference, Position => Op_Pos, Sym => Left.Type_Symbol, Sym2 => Right.Type_Symbol, Scope => Scope); end if; Result.Errors_In_Expression := Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; Exp_Stack.Push (X => Result, Stack => E_Stack); end Check_Short_Circuit; ----------------------------------------------------------------- procedure Check_Implication (Op_Pos : in LexTokenManager.Token_Position; E_Stack : in out Exp_Stack.Exp_Stack_Type; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Op_Pos, --# Scope, --# SPARK_IO.File_Sys & --# E_Stack from *, --# Dictionary.Dict; is Left, Right, Result : Sem.Exp_Record; begin Exp_Stack.Pop (Item => Right, Stack => E_Stack); Exp_Stack.Pop (Item => Left, Stack => E_Stack); Result := Null_Type_Record; -- safety: we may not set all fields below Result.Is_Static := Left.Is_Static and then Right.Is_Static; Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; Result.Has_Operators := True; if Dictionary.TypeIsBoolean (Left.Type_Symbol) and then Dictionary.TypeIsBoolean (Right.Type_Symbol) then Result.Type_Symbol := Left.Type_Symbol; else Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error_Sym2 (Err_Num => 35, Reference => ErrorHandler.No_Reference, Position => Op_Pos, Sym => Left.Type_Symbol, Sym2 => Right.Type_Symbol, Scope => Scope); end if; Result.Errors_In_Expression := Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; Exp_Stack.Push (X => Result, Stack => E_Stack); end Check_Implication; begin -- Wf_Expression -- ASSUME Node = expression OR expression_rep1 OR expression_rep2 OR expression_rep3 OR expression_rep4 OR expression_rep5 OR -- annotation_expression OR annotation_expression_rep1 OR annotation_expression_rep2 OR -- annotation_expression_rep3 OR annotation_expression_rep4 OR annotation_expression_rep5 OR -- annotation_expression_rep6 OR annotation_expression_rep7 if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.expression_rep1 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.expression_rep2 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.expression_rep3 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.expression_rep4 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.expression_rep5 then -- ASSUME Node = expression OR expression_rep1 OR expression_rep2 OR expression_rep3 OR expression_rep4 OR expression_rep5 Is_Annotation := False; elsif STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression_rep1 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression_rep2 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression_rep3 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression_rep4 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression_rep5 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression_rep6 or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression_rep7 then Is_Annotation := True; -- ASSUME Node = annotation_expression OR annotation_expression_rep1 OR annotation_expression_rep2 OR -- annotation_expression_rep3 OR annotation_expression_rep4 OR annotation_expression_rep5 OR -- annotation_expression_rep6 OR annotation_expression_rep7 end if; Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Op_Node = RWand OR RWandthen OR RWor OR RWorelse OR RWxor OR implies OR is_equivalent_to OR NULL if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWand or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWandthen or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWor or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWorelse or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWxor or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.implies or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.is_equivalent_to then -- ASSUME Op_Node = RWand OR RWandthen OR RWor OR RWorelse OR RWxor OR implies OR is_equivalent_to Operator := STree.Syntax_Node_Type (Node => Op_Node); if Operator = SP_Symbols.RWandthen or else Operator = SP_Symbols.RWorelse then Check_Short_Circuit (Op => Operator, Node_Pos => STree.Node_Position (Node => Node), Op_Pos => STree.Node_Position (Node => Op_Node), Is_Annotation => Is_Annotation, Scope => Scope, E_Stack => E_Stack); elsif Operator = SP_Symbols.implies or else Operator = SP_Symbols.is_equivalent_to then Check_Implication (Op_Pos => STree.Node_Position (Node => Op_Node), E_Stack => E_Stack, Scope => Scope); elsif Operator = SP_Symbols.RWand or else Operator = SP_Symbols.RWor or else Operator = SP_Symbols.RWxor then Exp_Stack.Pop (Item => Right, Stack => E_Stack); Exp_Stack.Pop (Item => Left, Stack => E_Stack); Result := Null_Type_Record; -- safety: we may not set all fields below Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; Result.Is_Static := Left.Is_Static and then Right.Is_Static; Result.Has_Operators := True; Check_Binary_Operator (Operator => Operator, Left => Left, Right => Right, Scope => Scope, T_Stack => T_Stack, Op_Pos => STree.Node_Position (Node => Op_Node), Left_Pos => STree.Node_Position (Node => STree.Child_Node (Node)), Right_Pos => STree.Node_Position (Node => STree.Next_Sibling (Op_Node)), Convert => True, Is_Annotation => Is_Annotation, Result => Result); -- check that array bounds match. if Result /= Sem.Unknown_Type_Record then -- check that whole array operation not being performed on unconstrained array if Dictionary.Is_Unconstrained_Array_Type_Mark (Left.Type_Symbol, Scope) or else Dictionary.Is_Unconstrained_Array_Type_Mark (Right.Type_Symbol, Scope) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); elsif Sem.Illegal_Unconstrained (Left_Type => Left.Type_Symbol, Right_Type => Right.Type_Symbol) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 418, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); end if; if Result /= Sem.Unknown_Type_Record then Calc_Binary_Operator (Node_Pos => STree.Node_Position (Node => Node), Operator => Operator, Left_Val => Left.Value, Right_Val => Right.Value, Is_Annotation => Is_Annotation, Result => Result); end if; end if; -- test to prevent result being considered unconstrained if Dictionary.TypeIsArray (Result.Type_Symbol) then Result.Type_Symbol := Left.Type_Symbol; end if; -- Plant result type for use by VCG -- It will be used to identify cases where a special model is needed for bitwise ops -- between arrays or modular types STree.Add_Node_Symbol (Node => Op_Node, Sym => Result.Type_Symbol); Result.Errors_In_Expression := Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; -- OtherSymbol may carry a function symbol in the case of uses of unchecked_conversion. -- This symbol is used (by wf_Assign) to convery information to the VCG to supress -- checks when an unchecked_conversion is assigned to something of the same subtype. -- We do not want this mechanism if the unchecked_conversion is sued in any other context -- than a direct assignment. Therefore we clear OtherSymbol here: Result.Other_Symbol := Dictionary.NullSymbol; Exp_Stack.Push (X => Result, Stack => E_Stack); end if; elsif Op_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = RWand OR RWandthen OR RWor OR RWorelse OR RWxor OR implies OR " & "is_equivalent_to OR NULL in Wf_Expression"); end if; end Wf_Expression; spark-2012.0.deb/examiner/sem-constraint_check.adb0000644000175000017500000001305411753202336021027 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- This routine checks that a value supplied will fit in a type -- -- supplied. It does nothing if the type is Unknown or non-scalar -- -- or if the value is unknown. If the type is REAL then constraint -- -- violations are reported as warnings (because limitations of our -- -- handling of reals mean that false alarms may occur. For -- -- integers and enumerations constraint violations are errors. -- -------------------------------------------------------------------------------- separate (Sem) procedure Constraint_Check (Val : in Maths.Value; New_Val : out Maths.Value; Is_Annotation : in Boolean; Typ : in Dictionary.Symbol; Position : in LexTokenManager.Token_Position) is Lower_Bound, Upper_Bound, Result : Maths.Value; Error : Maths.ErrorCode; ------------------------------- procedure Raise_Error_Or_Warning (Result : in Maths.Value; Error : in Maths.ErrorCode; Is_Annotation : in Boolean; Position : in LexTokenManager.Token_Position; New_Val : in out Maths.Value) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error, --# ErrorHandler.Error_Context, --# Is_Annotation, --# LexTokenManager.State, --# Position, --# Result, --# SPARK_IO.File_Sys & --# New_Val from *, --# Error, --# Result; is begin if Error = Maths.NoError then if Result = Maths.TrueValue then New_Val := Maths.NoValue; if Is_Annotation then ErrorHandler.Semantic_Error (Err_Num => 399, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 402, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str => LexTokenManager.Null_String); end if; end if; elsif Error = Maths.OverFlow then -- Arithmetic overflow. Constraint check has not been peformed. ErrorHandler.Semantic_Warning (Err_Num => 202, Position => Position, Id_Str => LexTokenManager.Null_String); end if; end Raise_Error_Or_Warning; begin -- Constraint_Check New_Val := Val; if Dictionary.TypeIsScalar (Typ) and then not Maths.HasNoValue (Val) and then not Dictionary.IsUnknownTypeMark (Typ) then Lower_Bound := Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- without base LexTokenManager.First_Token, Typ)); if not Maths.HasNoValue (Lower_Bound) then Maths.Lesser (Val, Lower_Bound, Result, Error); Raise_Error_Or_Warning (Result => Result, Error => Error, Is_Annotation => Is_Annotation, Position => Position, New_Val => New_Val); if Error = Maths.NoError then Upper_Bound := Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- without base LexTokenManager.Last_Token, Typ)); if not Maths.HasNoValue (Upper_Bound) then Maths.Greater (Val, Upper_Bound, Result, Error); Raise_Error_Or_Warning (Result => Result, Error => Error, Is_Annotation => Is_Annotation, Position => Position, New_Val => New_Val); end if; end if; end if; end if; end Constraint_Check; ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000016543611753202336033137 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration) procedure Wf_Full_Type_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Ident_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Sym : Dictionary.Symbol; Type_Definition_Node : STree.SyntaxNode; Type_Declared_As_Limited : Boolean; Type_Declared_As_Tagged : Boolean; Error_Found : Boolean; ------------------------------- function Is_Private_Type_Resolution (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return not Dictionary.Is_Declared (Item => Sym) and then Dictionary.Get_Visibility (Scope => Scope) /= Dictionary.Visible and then Dictionary.IsType (Sym) and then Dictionary.TypeIsPrivate (TheType => Sym) and then Dictionary.GetRegion (Scope) = Dictionary.GetRegion (Dictionary.GetScope (Sym)); end Is_Private_Type_Resolution; ------------------------------- procedure Empty_Type_Check (Dec_Loc : in LexTokenManager.Token_Position; Lower, Upper : in out LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lower, --# SPARK_IO.File_Sys, --# Upper & --# Lower, --# Upper from LexTokenManager.State, --# Lower, --# Upper; is Unused : Maths.ErrorCode; Maths_Result : Maths.Value; Range_Is_Empty : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Lower, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Upper, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.Lesser (Maths.ValueRep (Upper), Maths.ValueRep (Lower), --to get Maths_Result, Unused); -- not used because it can only be ok or type mismatch Maths.ValueToBool (Maths_Result, --to get Range_Is_Empty, Unused); --# end accept; if Range_Is_Empty then Lower := LexTokenManager.Null_String; Upper := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 416, Reference => ErrorHandler.No_Reference, Position => Dec_Loc, Id_Str => LexTokenManager.Null_String); end if; end if; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Empty_Type_Check; ------------------------------- procedure Wf_Integer (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.integer_type_definition and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Derived (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.derived_type_definition and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Modular (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.modular_type_definition and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Enum (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.enumeration_type_definition and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Record (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; Extends : in Dictionary.Symbol; Private_Type_Being_Resolved : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Extends, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Private_Type_Being_Resolved, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Extends, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Private_Type_Being_Resolved, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Extends, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Private_Type_Being_Resolved, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_type_definition or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_type_extension) and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Real (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.real_type_definition and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Type_Extension (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; Private_Type_Being_Resolved : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Private_Type_Being_Resolved, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Private_Type_Being_Resolved, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Private_Type_Being_Resolved, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.type_extension and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Known_Discriminant_Part (Node : in STree.SyntaxNode; Protected_Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Protected_Type_Sym, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Protected_Type_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.known_discriminant_part; --# post STree.Table = STree.Table~; is separate; ------------------------------------------------------------------- procedure Wf_Priority_Pragma (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.priority_pragma; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------- procedure Wf_Protected_Type_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.protected_type_declaration; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Task_Type_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.task_type_declaration; --# post STree.Table = STree.Table~; is separate; ------------------------------- procedure Wf_Type_Definition (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; Private_Type_Being_Resolved : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Private_Type_Being_Resolved, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Private_Type_Being_Resolved, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# LexTokenManager.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.type_definition and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Type_Node : STree.SyntaxNode; Unused_Array_Type_Symbol : Dictionary.Symbol; begin Type_Node := Child_Node (Current_Node => Node); -- ASSUME Type_Node = enumeration_type_definition OR integer_type_definition OR real_type_definition OR -- array_type_definition OR record_type_definition OR modular_type_definition OR -- type_extension OR derived_type_definition case Syntax_Node_Type (Node => Type_Node) is when SP_Symbols.enumeration_type_definition => -- ASSUME Type_Node = enumeration_type_definition Wf_Enum (Node => Type_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc); when SP_Symbols.real_type_definition => -- ASSUME Type_Node = real_type_definition Wf_Real (Node => Type_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, The_Heap => The_Heap); when SP_Symbols.record_type_definition => -- ASSUME Type_Node = record_type_definition Wf_Record (Node => Type_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, Extends => Dictionary.NullSymbol, Private_Type_Being_Resolved => Private_Type_Being_Resolved); when SP_Symbols.integer_type_definition => -- ASSUME Type_Node = integer_type_definition Wf_Integer (Node => Type_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, The_Heap => The_Heap); when SP_Symbols.derived_type_definition => -- ASSUME Type_Node = derived_type_definition Wf_Derived (Node => Type_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, The_Heap => The_Heap); when SP_Symbols.modular_type_definition => -- ASSUME Type_Node = modular_type_definition Wf_Modular (Node => Type_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, The_Heap => The_Heap); when SP_Symbols.array_type_definition => -- ASSUME Type_Node = array_type_definition --# accept Flow, 10, Unused_Array_Type_Symbol, "Expected ineffective assignment"; Wf_Array_Type_Definition (Node => Type_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, The_Array => Unused_Array_Type_Symbol); --# end accept; when SP_Symbols.type_extension => -- ASSUME Type_Node = type_extension Wf_Type_Extension (Node => Type_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, Private_Type_Being_Resolved => Private_Type_Being_Resolved); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = enumeration_type_definition OR integer_type_definition OR " & "real_type_definition OR array_type_definition OR record_type_definition OR modular_type_definition OR " & "type_extension OR derived_type_definition in Wf_Type_Definition"); end case; --# accept Flow, 33, Unused_Array_Type_Symbol, "Expected to be neither referenced nor exported"; end Wf_Type_Definition; ------------------------------- function Valid_Scope_For_Task_Or_Protected_Type_Declaration (Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return (Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Visible or else Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Privat) and then Dictionary.IsPackage (Dictionary.GetRegion (Scope)); end Valid_Scope_For_Task_Or_Protected_Type_Declaration; begin -- Wf_Full_Type_Declaration Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier OR task_type_declaration OR protected_type_declaration case Syntax_Node_Type (Node => Ident_Node) is when SP_Symbols.identifier => -- ASSUME Ident_Node = identifier Ident_Str := Node_Lex_String (Node => Ident_Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) or else (Dictionary.IsTypeMark (Sym) and then Dictionary.TypeIsAnnounced (TheType => Sym) and then not Dictionary.Is_Declared (Item => Sym)) or else Is_Private_Type_Resolution (Sym => Sym, Scope => Scope) then Type_Definition_Node := Next_Sibling (Current_Node => Ident_Node); -- ASSUME Type_Definition_Node = type_definition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Definition_Node) = SP_Symbols.type_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Definition_Node = type_definition in Wf_Full_Type_Declaration"); if Dictionary.Is_Null_Symbol (Sym) then -- initial type declaration Wf_Type_Definition (Node => Type_Definition_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Node_Position (Node => Node), Private_Type_Being_Resolved => Sym, The_Heap => The_Heap); else Type_Declared_As_Limited := Dictionary.IsLimitedPrivateType (Sym); Type_Declared_As_Tagged := Dictionary.TypeIsTagged (Sym); Error_Found := False; Wf_Type_Definition (Node => Type_Definition_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Node_Position (Node => Node), Private_Type_Being_Resolved => Sym, The_Heap => The_Heap); if Dictionary.Is_Unconstrained_Array_Type_Mark (Sym, Scope) then if Dictionary.TypeIsAnnounced (TheType => Sym) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 311, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else -- private type Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 331, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; end if; if not Type_Declared_As_Limited and then Dictionary.TypeIsLimited (Sym, Dictionary.GlobalScope) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 332, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; -- initial declaration tagged but completion is not if Type_Declared_As_Tagged and then not Dictionary.TypeIsTagged (Sym) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 821, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; -- reverse case; this could be made legal but is not allowed for now if not Type_Declared_As_Tagged and then Dictionary.TypeIsTagged (Sym) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 830, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; if not Error_Found then STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); end if; end if; else ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; when SP_Symbols.task_type_declaration => -- ASSUME Ident_Node = task_type_declaration if CommandLineData.Ravenscar_Selected then if Valid_Scope_For_Task_Or_Protected_Type_Declaration (Scope => Scope) then Wf_Task_Type_Declaration (Node => Ident_Node, Scope => Scope, The_Heap => The_Heap); else ErrorHandler.Semantic_Error (Err_Num => 987, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; else -- declaration not allowed ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; when SP_Symbols.protected_type_declaration => -- ASSUME Ident_Node = protected_type_declaration if CommandLineData.Ravenscar_Selected then if Valid_Scope_For_Task_Or_Protected_Type_Declaration (Scope => Scope) then Wf_Protected_Type_Declaration (Node => Ident_Node, Scope => Scope, Component_Data => Component_Data, The_Heap => The_Heap); else ErrorHandler.Semantic_Error (Err_Num => 987, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; else -- declaration not allowed ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier OR task_type_declaration OR " & "protected_type_declaration in Wf_Full_Type_Declaration"); end case; end Wf_Full_Type_Declaration; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-ineffectivestatement.adb0000644000175000017500000001146511753202336030046 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure IneffectiveStatement (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is procedure IneffectiveStatementExpl (E_Str : in out E_Strings.T) --# global in Err_Num; --# derives E_Str from *, --# Err_Num; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Num; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Num, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation IneffectiveStatementExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin -- HTML Directives --! <"flow-"> --! <"!!! Flow Error : "><" : "> if Err_Num.Name1 = Error_Types.NoName then --! 10 E_Strings.Append_String (E_Str => E_Str, Str => "Ineffective statement"); --! Execution of this statement cannot affect the final value of any exported --! variable of the subprogram in which it occurs. The cause may be a --! data-flow anomaly (i.e. the statement could be an assignment to a --! variable, which is always updated again before it is read. However, --! statements may be ineffective for other reasons - see Section 4.1 of --! Appendix A. else --! 10 E_Strings.Append_String (E_Str => E_Str, Str => "Assignment to "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is ineffective"); --! This message always relates to a procedure call or an assignment to a --! record. The variable XXX may be an actual parameter corresponding to a --! formal one that is exported; --! otherwise XXX is an exported global variable of the procedure. --! The message indicates that --! the updating of XXX, as a result of the procedure call, has no effect on --! any final values of exported variables of the calling subprogram. --! Where the ineffective assignment is expected (e.g. calling a supplied --! procedure that returns more parameters than are needed for the immediate purpose), --! it can be a useful convention to choose a distinctive name, such as "Unused" for --! the actual parameter concerned. The message "Assignment to Unused is ineffective" --! is then self-documenting. end if; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end IneffectiveStatement; spark-2012.0.deb/examiner/sem-compunit-walkstatements-up_loop.adb0000644000175000017500000001651011753202336024063 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Up_Loop (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) is First_Ident_Node, Second_Ident_Node : STree.SyntaxNode; Endless_Loop_Error : Boolean := False; function Position_To_Report_Error (Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.loop_statement; is Local_Node : STree.SyntaxNode; begin Local_Node := Child_Node (Current_Node => Node); -- ASSUME Local_Node = simple_name OR loop_statement_opt if Syntax_Node_Type (Node => Local_Node) = SP_Symbols.simple_name then -- ASSUME Local_Node = simple_name -- loop has a name Local_Node := Last_Sibling_Of (Start_Node => Local_Node); -- closing name location -- ASSUME Local_Node = simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = simple_name in Position_To_Report_Error"); elsif Syntax_Node_Type (Node => Local_Node) = SP_Symbols.loop_statement_opt then -- ASSUME Local_Node = loop_statement_opt -- loop has no name, find last statement in sequence of statements Local_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Local_Node)); -- ASSUME Local_Node = sequence_of_statements OR statement -- Local_Node is either a Statement (which is the only one in the sequence) -- or it's a sequence_of_statements in which case the last statement is to it's right if Syntax_Node_Type (Node => Local_Node) = SP_Symbols.sequence_of_statements then Local_Node := Next_Sibling (Current_Node => Local_Node); elsif Syntax_Node_Type (Node => Local_Node) /= SP_Symbols.statement then Local_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = sequence_of_statements OR statement in Position_To_Report_Error"); end if; -- ASSUME Local_Node = statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = statement in Position_To_Report_Error"); else Local_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = simple_name OR loop_statement_opt in Position_To_Report_Error"); end if; -- ASSUME Node = simple_name OR statement return Node_Position (Node => Local_Node); end Position_To_Report_Error; begin -- Up_Loop First_Ident_Node := Child_Node (Current_Node => Node); -- ASSUME First_Ident_Node = simple_name OR loop_statement_opt if Syntax_Node_Type (Node => First_Ident_Node) = SP_Symbols.simple_name then -- ASSUME First_Ident_Node = simple_name Second_Ident_Node := Child_Node (Current_Node => Last_Sibling_Of (Start_Node => First_Ident_Node)); -- ASSUME Second_Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Second_Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Second_Ident_Node = identifier in Up_Loop"); First_Ident_Node := Child_Node (Current_Node => First_Ident_Node); -- ASSUME First_Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => First_Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect First_Ident_Node = identifier in Up_Loop"); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => First_Ident_Node), Lex_Str2 => Node_Lex_String (Node => Second_Ident_Node)) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Second_Ident_Node), Id_Str => Node_Lex_String (Node => First_Ident_Node)); end if; elsif Syntax_Node_Type (Node => First_Ident_Node) /= SP_Symbols.loop_statement_opt then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect First_Ident_Node = simple_name OR loop_statement_opt in Up_Loop"); end if; -- Make checks that any loops without exits are the last statement of the main prgoram -- or task body. -- We may need to allow a proof statement or pragma to follow and infinite loop (and, perhaps -- a null statement) but for now we allow nothing to follow. if not Dictionary.GetLoopHasExits (Dictionary.GetRegion (Scope)) then -- Loop is infinite, checks are required -- First check that it is main program or in task type if not (Dictionary.IsMainProgram (Dictionary.GetRegion (Dictionary.GetEnclosingScope (Scope))) or else Dictionary.IsTaskType (Dictionary.GetRegion (Dictionary.GetEnclosingScope (Scope)))) then Endless_Loop_Error := True; else case Syntax_Node_Type (Node => Parent_Of_Sequence (Node => Node)) is when SP_Symbols.if_statement | SP_Symbols.elsif_part | SP_Symbols.else_part | SP_Symbols.loop_statement | SP_Symbols.case_statement_alternative | SP_Symbols.others_part => Endless_Loop_Error := True; when others => if not Is_Last_In_Sequence (Node => Node) then Endless_Loop_Error := True; end if; end case; end if; if Endless_Loop_Error then ErrorHandler.Semantic_Error (Err_Num => 730, Reference => ErrorHandler.No_Reference, Position => Position_To_Report_Error (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; -- move out of loop scope Scope := Dictionary.GetEnclosingScope (Scope); end Up_Loop; spark-2012.0.deb/examiner/sparklex-lex.adb0000644000175000017500000013355711753202336017356 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with LexTokenLists; separate (SparkLex) procedure Lex (Prog_Text : in SPARK_IO.File_Type; Allow_Dollar : in Boolean; Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal; Lex_Val : out LexTokenManager.Lex_Value; Punct_Token : out Boolean) is Start_Line : LexTokenManager.Line_Numbers; Start_Posn : E_Strings.Positions; End_Posn : E_Strings.Lengths; Lex_Str : LexTokenManager.Lex_String; Next_Token : SP_Symbols.SP_Terminal; Ch : Character; Hidden_Part, End_Hide_Found : Boolean; Hide_Designator, End_Designator : LexTokenLists.Lists; Previous_Context : Program_Context; procedure GetIdent (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) --# derives Curr_Line, --# Token from Curr_Line; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token /= SP_Symbols.annotation_end and Token /= SP_Symbols.SPEND; is separate; procedure GetNumber (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) --# derives Curr_Line, --# Token from Curr_Line; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token /= SP_Symbols.annotation_end and Token /= SP_Symbols.SPEND; is separate; procedure HyphIntro (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Curr_Line, --# Token from CommandLineData.Content, --# Curr_Line & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos >= Curr_Line~.Curr_Pos and --# ((Token /= SP_Symbols.annotation_end and Token /= SP_Symbols.SPEND) <-> (Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos)) and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; is separate; procedure GetString (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) --# derives Curr_Line, --# Token from Curr_Line; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token /= SP_Symbols.annotation_end and Token /= SP_Symbols.SPEND; is separate; procedure ApostIntro (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) --# derives Curr_Line, --# Token from Curr_Line; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token /= SP_Symbols.annotation_end and Token /= SP_Symbols.SPEND; is separate; procedure LTIntro (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) --# derives Curr_Line, --# Token from Curr_Line; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token /= SP_Symbols.annotation_end and Token /= SP_Symbols.SPEND; is separate; procedure NextLex (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Curr_Line, --# Token from CommandLineData.Content, --# Curr_Line & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos >= Curr_Line~.Curr_Pos and --# ((Token /= SP_Symbols.annotation_end and Token /= SP_Symbols.SPEND) <-> (Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos)) and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Curr_Pos; -- -- The Ada Lexis allows the class of a token to be determined by the -- first character in the text string representing it. Given the first -- character of the string representing the token is at the current position -- in the line buffer, Curr_Line, Next_Lex determines the class of the token and -- calls a procedure to recognise that class of token. -- On exit Token is set to a value representing the token. is separate; -- Modified list of reserved words which are punctuation tokens -- this is a knock on effect from change in syntax of proof contexts function Is_Punct_Token (Token : SP_Symbols.SP_Terminal) return Boolean is Result : Boolean; begin case Token is when SP_Symbols.left_paren | SP_Symbols.right_paren | SP_Symbols.comma | SP_Symbols.colon | SP_Symbols.semicolon | SP_Symbols.becomes | SP_Symbols.double_dot | SP_Symbols.point | SP_Symbols.apostrophe | SP_Symbols.vertical_bar | SP_Symbols.arrow | SP_Symbols.annotation_start | SP_Symbols.annotation_end | SP_Symbols.proof_context | SP_Symbols.RWabort | SP_Symbols.RWaccess | SP_Symbols.RWall | SP_Symbols.RWarray | SP_Symbols.RWassert | SP_Symbols.RWassume | SP_Symbols.RWat | SP_Symbols.RWbegin | SP_Symbols.RWbody | SP_Symbols.RWcase | SP_Symbols.RWcheck | SP_Symbols.RWconstant | SP_Symbols.RWdeclare | SP_Symbols.RWdelta | SP_Symbols.RWderives | SP_Symbols.RWdigits | SP_Symbols.RWdo | SP_Symbols.RWelse | SP_Symbols.RWelsif | SP_Symbols.RWend | SP_Symbols.RWentry | SP_Symbols.RWexception | SP_Symbols.RWfor | SP_Symbols.RWfrom | SP_Symbols.RWfunction | SP_Symbols.RWgeneric | SP_Symbols.RWglobal | SP_Symbols.RWgoto | SP_Symbols.RWhide | SP_Symbols.RWif | SP_Symbols.RWin | SP_Symbols.RWinherit | SP_Symbols.RWinitializes | SP_Symbols.RWis | SP_Symbols.RWlimited | SP_Symbols.RWloop | SP_Symbols.RWnew | SP_Symbols.RWnotin | SP_Symbols.RWof | SP_Symbols.RWothers | SP_Symbols.RWout | SP_Symbols.RWown | SP_Symbols.RWpackage | SP_Symbols.RWpost | SP_Symbols.RWpragma | SP_Symbols.RWpre | SP_Symbols.RWprivate | SP_Symbols.RWprocedure | SP_Symbols.RWraise | SP_Symbols.RWrange | SP_Symbols.RWrecord | SP_Symbols.RWrenames | SP_Symbols.RWreturn | SP_Symbols.RWreverse | SP_Symbols.RWselect | SP_Symbols.RWseparate | SP_Symbols.RWsubtype | SP_Symbols.RWtask | SP_Symbols.RWterminate | SP_Symbols.RWthen | SP_Symbols.RWtype | SP_Symbols.RWuse | SP_Symbols.RWwhen | SP_Symbols.RWwhile | SP_Symbols.RWwith | -- SPARK95 reserved words SP_Symbols.RWabstract | SP_Symbols.RWrequeue | SP_Symbols.RWprotected | SP_Symbols.RWuntil | -- SPARK2005 reserved words SP_Symbols.RWinterface | SP_Symbols.RWsynchronized => Result := True; when others => Result := False; end case; return Result; end Is_Punct_Token; procedure Check_Following_Token (Prog_Text : in SPARK_IO.File_Type; Possible_Follower : in SP_Symbols.SP_Symbol; Transformed_Token : in SP_Symbols.SP_Terminal; Curr_Line : in out Line_Context; Next_Token : in out SP_Symbols.SP_Terminal; Lex_Str : out LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Curr_Line from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Possible_Follower, --# Prog_Text, --# SPARK_IO.File_Sys & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Prog_Text, --# SPARK_IO.File_Sys & --# Lex_Str from & --# Next_Token from *, --# CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Possible_Follower, --# Prog_Text, --# SPARK_IO.File_Sys, --# Transformed_Token; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is Following_Token : SP_Symbols.SP_Terminal; begin LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); LineManager.Record_Curr_Pos (Curr_Line => Curr_Line); GetIdent (Curr_Line => Curr_Line, Token => Following_Token); if Following_Token = SP_Symbols.identifier then Check_Reserved (Curr_Line => Curr_Line, Start_Pos => Curr_Line.Last_Token_Pos, End_Pos => Curr_Line.Curr_Pos - 1, Look_Ahead => True, Token => Following_Token); if Following_Token = Possible_Follower then Next_Token := Transformed_Token; else LineManager.Reset_Curr_Pos (Curr_Line => Curr_Line); end if; else LineManager.Reset_Curr_Pos (Curr_Line => Curr_Line); end if; Lex_Str := LexTokenManager.Null_String; end Check_Following_Token; --------------------- procedure Insert_String_Literal (Line_In : in E_Strings.T; Start_Pos, End_Pos : in E_Strings.Positions; Lex_Str : out LexTokenManager.Lex_String) --# global in out LexTokenManager.State; --# derives LexTokenManager.State, --# Lex_Str from End_Pos, --# LexTokenManager.State, --# Line_In, --# Start_Pos; --# pre Start_Pos <= E_Strings.Get_Length (Line_In) and --# End_Pos <= E_Strings.Get_Length (Line_In); is Modified_Line : E_Strings.T := E_Strings.Empty_String; Ch : Character; Skip_Next : Boolean := False; begin if Start_Pos < End_Pos then -- copy leading quote E_Strings.Append_Char (E_Str => Modified_Line, Ch => E_Strings.Get_Element (E_Str => Line_In, Pos => Start_Pos)); -- copy character up to closing quote eliminating doubled quotes for I in E_Strings.Positions range Start_Pos + 1 .. End_Pos - 1 loop if Skip_Next then Skip_Next := False; else Ch := E_Strings.Get_Element (E_Str => Line_In, Pos => I); E_Strings.Append_Char (E_Str => Modified_Line, Ch => Ch); Skip_Next := Ch = '"'; end if; end loop; -- copy closing quote E_Strings.Append_Char (E_Str => Modified_Line, Ch => E_Strings.Get_Element (E_Str => Line_In, Pos => End_Pos)); -- put in string table LexTokenManager.Insert_Examiner_String (Str => Modified_Line, Lex_Str => Lex_Str); else Lex_Str := LexTokenManager.Null_String; end if; end Insert_String_Literal; --------------------- procedure Insert_Lex_String (The_Line : in E_Strings.T; Start_Pos, End_Pos : in E_Strings.Positions; Lex_Str : out LexTokenManager.Lex_String) --# global in out LexTokenManager.State; --# derives LexTokenManager.State, --# Lex_Str from End_Pos, --# LexTokenManager.State, --# Start_Pos, --# The_Line; is E_Str : E_Strings.T := E_Strings.Empty_String; begin for I in E_Strings.Positions range Start_Pos .. End_Pos loop E_Strings.Append_Char (E_Str => E_Str, Ch => E_Strings.Get_Element (E_Str => The_Line, Pos => I)); end loop; LexTokenManager.Insert_Examiner_String (Str => E_Str, Lex_Str => Lex_Str); end Insert_Lex_String; begin -- Lex Hidden_Part := False; Hide_Designator := LexTokenLists.Null_List; loop --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; Previous_Context := Curr_Line.Context; LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); Start_Line := Curr_Line.Line_No; Start_Posn := Curr_Line.Curr_Pos; if Previous_Context = In_Annotation and then Curr_Line.Context = In_Ada then Next_Token := SP_Symbols.annotation_end; else NextLex (Curr_Line => Curr_Line, Token => Next_Token); end if; End_Posn := Curr_Line.Curr_Pos - 1; --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos <= Curr_Line.Curr_Pos and --# Start_Posn <= E_Strings.Get_Length (Curr_Line.Conts) and --# End_Posn <= E_Strings.Get_Length (Curr_Line.Conts) and --# ((Next_Token /= SP_Symbols.annotation_end and Next_Token /= SP_Symbols.SPEND) <-> (Start_Posn <= End_Posn)); case Next_Token is when SP_Symbols.identifier => Check_Reserved (Curr_Line => Curr_Line, Start_Pos => Start_Posn, End_Pos => End_Posn, Look_Ahead => False, Token => Next_Token); if Next_Token = SP_Symbols.identifier then if E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Start_Posn) = '$' then --# accept F, 41, "Stable expression here OK"; if Allow_Dollar then null; else -- Identifier starting with a $, which is illegal in -- this case. Issue a lexical error and then treat -- as an identifier starting at the next character. ErrorHandler.Lex_Error (Error_Message => "Illegal token", Recovery_Message => "Token ignored", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => LexTokenManager.Null_String)); Start_Posn := Start_Posn + 1; end if; --# end accept; end if; Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); if Curr_Line.Context = In_Annotation then Curr_Line.Anno_Context := Mid_Annotation; end if; elsif Next_Token = SP_Symbols.RWand then Check_Following_Token (Prog_Text => Prog_Text, Possible_Follower => SP_Symbols.RWthen, Transformed_Token => SP_Symbols.RWandthen, Curr_Line => Curr_Line, Next_Token => Next_Token, Lex_Str => Lex_Str); elsif Next_Token = SP_Symbols.RWor then Check_Following_Token (Prog_Text => Prog_Text, Possible_Follower => SP_Symbols.RWelse, Transformed_Token => SP_Symbols.RWorelse, Curr_Line => Curr_Line, Next_Token => Next_Token, Lex_Str => Lex_Str); elsif Next_Token = SP_Symbols.RWnot then Check_Following_Token (Prog_Text => Prog_Text, Possible_Follower => SP_Symbols.RWin, Transformed_Token => SP_Symbols.RWnotin, Curr_Line => Curr_Line, Next_Token => Next_Token, Lex_Str => Lex_Str); elsif Next_Token = SP_Symbols.RWfor then Check_Following_Token (Prog_Text => Prog_Text, Possible_Follower => SP_Symbols.RWall, Transformed_Token => SP_Symbols.RWforall, Curr_Line => Curr_Line, Next_Token => Next_Token, Lex_Str => Lex_Str); if Next_Token = SP_Symbols.RWfor then Check_Following_Token (Prog_Text => Prog_Text, Possible_Follower => SP_Symbols.RWsome, Transformed_Token => SP_Symbols.RWforsome, Curr_Line => Curr_Line, Next_Token => Next_Token, Lex_Str => Lex_Str); end if; else Lex_Str := LexTokenManager.Null_String; end if; when SP_Symbols.string_literal => Insert_String_Literal (Line_In => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); when SP_Symbols.character_literal | SP_Symbols.integer_number | SP_Symbols.real_number | SP_Symbols.based_integer => Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); when SP_Symbols.annotation_start | SP_Symbols.proof_context => if Curr_Line.Context = In_Annotation then Next_Token := SP_Symbols.annotation_continuation; else LineManager.Set_Context (Curr_Line => Curr_Line, New_Context => In_Annotation); end if; if Curr_Line.Anno_Context = Ended_Annotation then Curr_Line.Anno_Context := Start_Annotation; end if; Lex_Str := LexTokenManager.Null_String; when SP_Symbols.annotation_end => LineManager.Set_Context (Curr_Line => Curr_Line, New_Context => In_Ada); Lex_Str := LexTokenManager.Null_String; when SP_Symbols.hide_directive => -- skip over hide LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); --# accept F, 10, Next_Token, "Skipping so ineffective assignment"; NextLex (Curr_Line => Curr_Line, Token => Next_Token); --# end accept; -- now get designator LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); Start_Line := Curr_Line.Line_No; Start_Posn := Curr_Line.Curr_Pos; NextLex (Curr_Line => Curr_Line, Token => Next_Token); End_Posn := Curr_Line.Curr_Pos - 1; if Next_Token = SP_Symbols.identifier then Check_Reserved (Curr_Line => Curr_Line, Start_Pos => Start_Posn, End_Pos => End_Posn, Look_Ahead => False, Token => Next_Token); end if; if Next_Token /= SP_Symbols.identifier then -- ???? Report Error ???? -- ErrorHandler.Lex_Error (Error_Message => "Hide designator missing", Recovery_Message => "Hide directive ignored", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => LexTokenManager.Null_String)); LineManager.Reset_Curr_Pos (Curr_Line => Curr_Line); Next_Token := SP_Symbols.comment; else Hidden_Part := True; Hide_Designator := LexTokenLists.Null_List; Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); LexTokenLists.Append (Hide_Designator, Lex_Str); loop --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); exit when E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) /= '.'; LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch); LineManager.Reject_Lookahead (Curr_Line => Curr_Line); exit when Ch = '.'; -- stop if .. LineManager.Accept_Char (Curr_Line => Curr_Line); -- absorb dot LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); Start_Line := Curr_Line.Line_No; Start_Posn := Curr_Line.Curr_Pos; NextLex (Curr_Line => Curr_Line, Token => Next_Token); End_Posn := Curr_Line.Curr_Pos - 1; if Next_Token = SP_Symbols.identifier then Check_Reserved (Curr_Line => Curr_Line, Start_Pos => Start_Posn, End_Pos => End_Posn, Look_Ahead => False, Token => Next_Token); end if; if Next_Token /= SP_Symbols.identifier then -- ???? Report Error ???? -- ErrorHandler.Lex_Error (Error_Message => "Hide designator incomplete", Recovery_Message => "Hide directive ignored", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => LexTokenManager.Null_String)); LineManager.Reset_Curr_Pos (Curr_Line => Curr_Line); Next_Token := SP_Symbols.comment; Hidden_Part := False; exit; end if; Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); LexTokenLists.Append (Hide_Designator, Lex_Str); end loop; end if; Lex_Str := LexTokenManager.Null_String; when SP_Symbols.apostrophe => -- Check for attribute designator. LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); if Ada.Characters.Handling.Is_Letter (E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos)) then Start_Line := Curr_Line.Line_No; Start_Posn := Curr_Line.Curr_Pos; GetIdent (Curr_Line => Curr_Line, Token => Next_Token); End_Posn := Curr_Line.Curr_Pos - 1; Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); if Next_Token = SP_Symbols.identifier then Next_Token := SP_Symbols.attribute_ident; else -- ???? Report Error ???? -- ErrorHandler.Lex_Error (Error_Message => "Attribute identifier expected", Recovery_Message => "Attribute identifier assumed", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => Lex_Str)); Next_Token := SP_Symbols.attribute_ident; -- Error recovery. end if; else Lex_Str := LexTokenManager.Null_String; end if; when SP_Symbols.illegal_id => Next_Token := SP_Symbols.identifier; -- Error recovery. Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); -- ???? Report Error ???? -- ErrorHandler.Lex_Error (Error_Message => "Illegal identifier", Recovery_Message => "Identifier assumed", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => Lex_Str)); when SP_Symbols.illegal_number => Next_Token := SP_Symbols.integer_number; Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); -- ???? Report Error ???? -- if E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Start_Posn) = '0' then ErrorHandler.Lex_Error (Error_Message => "Illegal number - possible mis-use of '0' instead of 'O' as first letter of identifier", Recovery_Message => "Number assumed", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => Lex_Str)); else ErrorHandler.Lex_Error (Error_Message => "Illegal number", Recovery_Message => "Number assumed", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => Lex_Str)); end if; when SP_Symbols.based_real => Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); -- ???? Report Error ???? -- ErrorHandler.Lex_Error (Error_Message => "Based real numbers are not allowed", Recovery_Message => "Number assumed", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => Lex_Str)); when SP_Symbols.unterminated_string => Next_Token := SP_Symbols.string_literal; -- Error recovery. Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); -- ???? Report Error ???? -- ErrorHandler.Lex_Error (Error_Message => "Unterminated string", Recovery_Message => "String assumed", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => Lex_Str)); when SP_Symbols.illegal_comment => Lex_Str := LexTokenManager.Null_String; Next_Token := SP_Symbols.comment; -- Error recovery. -- ???? Report Error ???? -- ErrorHandler.Lex_Error (Error_Message => "Illegal character in comment", Recovery_Message => "Illegal character ignored", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => LexTokenManager.Null_String)); when SP_Symbols.illegal_token => Lex_Str := LexTokenManager.Null_String; -- ???? Report Error ???? -- ErrorHandler.Lex_Error (Error_Message => "Illegal token", Recovery_Message => "Token ignored", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => LexTokenManager.Null_String)); when SP_Symbols.semicolon => Lex_Str := LexTokenManager.Null_String; if Curr_Line.Context = In_Annotation then Curr_Line.Anno_Context := Ended_Annotation; end if; when others => Lex_Str := LexTokenManager.Null_String; end case; exit when Hidden_Part or else (Next_Token /= SP_Symbols.comment and then Next_Token /= SP_Symbols.annotation_continuation and then Next_Token /= SP_Symbols.illegal_token); end loop; --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; if Hidden_Part then End_Hide_Found := False; loop --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); Start_Line := Curr_Line.Line_No; Start_Posn := Curr_Line.Curr_Pos; NextLex (Curr_Line => Curr_Line, Token => Next_Token); End_Posn := Curr_Line.Curr_Pos - 1; if Next_Token = SP_Symbols.SPEND then End_Hide_Found := True; elsif Next_Token = SP_Symbols.identifier then Check_Reserved (Curr_Line => Curr_Line, Start_Pos => Start_Posn, End_Pos => End_Posn, Look_Ahead => False, Token => Next_Token); end if; if Next_Token = SP_Symbols.RWend then LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); Start_Line := Curr_Line.Line_No; Start_Posn := Curr_Line.Curr_Pos; NextLex (Curr_Line => Curr_Line, Token => Next_Token); End_Posn := Curr_Line.Curr_Pos - 1; if Next_Token = SP_Symbols.identifier then End_Designator := LexTokenLists.Null_List; Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); LexTokenLists.Append (End_Designator, Lex_Str); -- Process remainder of dotted name (if any) loop --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); exit when E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) /= '.'; LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch); LineManager.Reject_Lookahead (Curr_Line => Curr_Line); exit when Ch = '.'; -- stop if '..' LineManager.Accept_Char (Curr_Line => Curr_Line); -- absorb dot LineManager.Next_Sig_Char (Prog_Text => Prog_Text, Curr_Line => Curr_Line); Start_Posn := Curr_Line.Curr_Pos; NextLex (Curr_Line => Curr_Line, Token => Next_Token); -- get expected identifier End_Posn := Curr_Line.Curr_Pos - 1; exit when Next_Token /= SP_Symbols.identifier; Insert_Lex_String (The_Line => Curr_Line.Conts, Start_Pos => Start_Posn, End_Pos => End_Posn, Lex_Str => Lex_Str); LexTokenLists.Append (End_Designator, Lex_Str); end loop; if Next_Token = SP_Symbols.identifier then End_Hide_Found := LexTokenLists.Eq_Unit (First_Item => Hide_Designator, Second => End_Designator); end if; end if; end if; if End_Hide_Found then Hidden_Part := False; Next_Token := SP_Symbols.hide_directive; Lex_Str := LexTokenManager.Null_String; end if; exit when not Hidden_Part; end loop; end if; --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; Token := Next_Token; Lex_Val := LexTokenManager.Lex_Value' (Position => LexTokenManager.Token_Position'(Start_Line_No => Start_Line, Start_Pos => Start_Posn), Token_Str => Lex_Str); Punct_Token := Is_Punct_Token (Token => Next_Token); end Lex; spark-2012.0.deb/examiner/sem-is_external_interface.adb0000644000175000017500000000526311753202336022046 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Synopsis -- This function idnetifies whether a pragma is Interface or Import and whether -- it is appropriate for the language varaint in use -------------------------------------------------------------------------------- separate (Sem) function Is_External_Interface (Pragma_Node : STree.SyntaxNode) return Boolean is Child_Pragma_Node : STree.SyntaxNode; Result : Boolean; begin Child_Pragma_Node := Child_Node (Current_Node => Pragma_Node); -- ASSUME Child_Pragma_Node = identifier OR assert_pragma if Syntax_Node_Type (Node => Child_Pragma_Node) = SP_Symbols.assert_pragma then -- ASSUME Child_Pragma_Node = assert_pragma Result := False; elsif Syntax_Node_Type (Node => Child_Pragma_Node) = SP_Symbols.identifier then -- ASSUME Child_Pragma_Node = identifier case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Child_Pragma_Node), Lex_Str2 => LexTokenManager.Interface_Token) = LexTokenManager.Str_Eq; when CommandLineData.SPARK95_Onwards => Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Child_Pragma_Node), Lex_Str2 => LexTokenManager.Import_Token) = LexTokenManager.Str_Eq; end case; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Pragma_Node = identifier OR assert_pragma in Is_External_Interface"); end if; return Result; end Is_External_Interface; spark-2012.0.deb/examiner/sem-walk_expression_p-down_wf_aggregate_or_expression.adb0000644000175000017500000001705311753202336027673 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Down_Wf_Aggregate_Or_Expression (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Next_Node : out STree.SyntaxNode) is Name_Exp, Field_Name : Sem.Exp_Record; Record_Field_Found, Reached_Last_Dimension, Doing_Embedded_Aggregate : Boolean; Parent, Child : STree.SyntaxNode; begin Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if Name_Exp.Sort = Sem.Is_Parameter_Name then Record_Field_Found := True; Field_Name := Name_Exp; Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); else Record_Field_Found := False; end if; Child := STree.Child_Node (Current_Node => Node); -- ASSUME Child = aggregate OR expression OR -- annotation_aggregate OR annotation_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Child) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_aggregate or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = aggregate OR expression OR " & "annotation_aggregate OR annotation_expression in Down_Wf_Aggregate_Or_Expression"); Doing_Embedded_Aggregate := STree.Syntax_Node_Type (Node => Child) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_aggregate; if Dictionary.IsUnknownTypeMark (Name_Exp.Type_Symbol) then Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); if Doing_Embedded_Aggregate then Next_Node := STree.NullNode; else Next_Node := Child; end if; elsif Dictionary.TypeIsArray (Name_Exp.Type_Symbol) then Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Reached_Last_Dimension := Name_Exp.Param_Count = Dictionary.GetNumberOfDimensions (Name_Exp.Type_Symbol); if Reached_Last_Dimension = Doing_Embedded_Aggregate then Next_Node := STree.NullNode; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Child), Id_Str => LexTokenManager.Null_String); -- Clause below covers the case where not enough dimensions have been supplied. -- Because the tree walk has been pruned as a result of this error the expression -- is not traversed and not stacked; this is fine for positional association but -- in named association gives a stack underflow later on in either -- up_wf_named_association_rep or up_wf_component_association. To cover these cases -- we must put something on the stack. An Sem.Unknown_Type_Record seems appropriate. -- We only do this if the association is not positional if not Reached_Last_Dimension then Parent := STree.Parent_Node (Current_Node => Node); -- ASSUME Parent = component_association OR -- annotation_component_association OR name_value_property OR -- named_association OR named_association_rep OR -- annotation_named_association OR annotation_named_association_rep OR -- positional_association OR positional_association_rep OR -- annotation_positional_association OR annotation_positional_association_rep if STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.component_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.named_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.named_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_component_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_named_association or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_positional_association_rep or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.name_value_property then Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); elsif STree.Syntax_Node_Type (Node => Parent) /= SP_Symbols.positional_association_rep and then STree.Syntax_Node_Type (Node => Parent) /= SP_Symbols.annotation_positional_association_rep and then STree.Syntax_Node_Type (Node => Parent) /= SP_Symbols.positional_association and then STree.Syntax_Node_Type (Node => Parent) /= SP_Symbols.annotation_positional_association then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = component_association OR " & "annotation_component_association OR name_value_property OR " & "named_association OR named_association_rep OR " & "annotation_named_association OR annotation_named_association_rep OR " & "positional_association OR positional_association_rep OR " & "annotation_positional_association OR annotation_positional_association_rep " & "in Down_Wf_Aggregate_Or_Expression"); end if; end if; else Next_Node := Child; end if; else -- must be record Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); if Doing_Embedded_Aggregate then Next_Node := STree.NullNode; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Child), Id_Str => LexTokenManager.Null_String); else if Record_Field_Found then --# accept Flow, 504, Field_Name, "Expected Fieldname to have undefined value"; Exp_Stack.Push (X => Field_Name, Stack => E_Stack); --# end accept; end if; Next_Node := Child; end if; end if; --# accept Flow, 602, E_Stack, Field_Name, "Fieldname always defined if needed"; end Down_Wf_Aggregate_Or_Expression; spark-2012.0.deb/examiner/sparklex-lex-getstring.adb0000644000175000017500000000526011753202336021347 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SparkLex.Lex) procedure GetString (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) is String_Not_Ended : Boolean; Ch : Character; begin LineManager.Accept_Char (Curr_Line => Curr_Line); -- Quotation already recognised String_Not_Ended := True; while String_Not_Ended loop --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); if Ch = '"' then LineManager.Accept_Char (Curr_Line => Curr_Line); -- Check for adjacent quotations. (See LRM 2.6). if E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) = '"' then LineManager.Accept_Char (Curr_Line => Curr_Line); -- Two adjacent quotations - String not ended. else String_Not_Ended := False; end if; elsif Extended_ASCII (Ch => Ch) then LineManager.Accept_Char (Curr_Line => Curr_Line); else String_Not_Ended := False; end if; end loop; --# accept F, 501, Ch, "Ch always defined on this path"; if Ch = '"' then Token := SP_Symbols.string_literal; else Token := SP_Symbols.unterminated_string; end if; --# end accept; --# accept F, 602, Token, Ch, "Ch always defined here"; end GetString; spark-2012.0.deb/examiner/sem-compunit-wf_package_body-wf_refine-wf_clause.adb0000644000175000017500000014154411753202336026364 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.Wf_Package_Body.Wf_Refine) procedure Wf_Clause (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is Subject_Sym : Dictionary.Symbol; Subject_Node : STree.SyntaxNode; Subject_Loc : LexTokenManager.Token_Position; Constituent_Node : STree.SyntaxNode; ----------------------------------------------------------------- procedure Do_Subject (Subject_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subject_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subject_Node & --# STree.Table, --# Subject_Sym from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Subject_Node; --# pre Syntax_Node_Type (Subject_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Var_Sym : Dictionary.Symbol; Var_Pos : LexTokenManager.Token_Position; Var_Str : LexTokenManager.Lex_String; begin Var_Str := Node_Lex_String (Node => Subject_Node); Var_Pos := Node_Position (Node => Subject_Node); Var_Sym := Dictionary.LookupItem (Name => Var_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetRegion (Scope)), Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Var_Sym) then -- not declared or visible ErrorHandler.Semantic_Error (Err_Num => 136, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); Subject_Sym := Dictionary.NullSymbol; elsif Dictionary.IsOwnVariable (Var_Sym) then -- possibly ok, it's an own var if not Dictionary.GetOwnVariableProtected (Var_Sym) then -- cannot refine protected state if not Dictionary.IsVirtualElement (Var_Sym) then -- cannot refine virtual elements if Dictionary.Is_Declared (Item => Var_Sym) -- declared as concrete or else Dictionary.OwnVariableHasConstituents (Var_Sym) then -- or already refined away so raise error 73 ErrorHandler.Semantic_Error (Err_Num => 73, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); Subject_Sym := Dictionary.NullSymbol; elsif Dictionary.OwnVariableHasType (Var_Sym, Scope) then -- type announced so refinement valid only if type is abstract proof type if Dictionary.Is_Declared (Item => Dictionary.GetType (Var_Sym)) then -- type is concrete type so refinement definitely illegal ErrorHandler.Semantic_Error (Err_Num => 73, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); Subject_Sym := Dictionary.NullSymbol; else -- type not declared so we don't know whether problem is illegal refinement -- or missing abstract type declaration. Need special error message here ErrorHandler.Semantic_Error (Err_Num => 273, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); Subject_Sym := Dictionary.NullSymbol; end if; else -- this is the valid case STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Subject_Node); Subject_Sym := Var_Sym; end if; else -- it's a virtual element which cannot be refined ErrorHandler.Semantic_Error (Err_Num => 858, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); Subject_Sym := Dictionary.NullSymbol; end if; else -- it's a protected own variable which cannot be refined ErrorHandler.Semantic_Error (Err_Num => 859, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); Subject_Sym := Dictionary.NullSymbol; end if; else -- it's not an own variable so it can't be refined ErrorHandler.Semantic_Error (Err_Num => 74, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); Subject_Sym := Dictionary.NullSymbol; end if; end Do_Subject; ----------------------------------------------------------------- procedure Do_Constituent_List (Constituent_List_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subject_Sym : in Dictionary.Symbol; Subject_Loc : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Constituent_List_Node, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Subject_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Constituent_List_Node, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subject_Loc, --# Subject_Sym; --# pre Syntax_Node_Type (Constituent_List_Node, STree.Table) = SP_Symbols.constituent_list; --# post STree.Table = STree.Table~; is type Modes is record Value : Dictionary.Modes; Is_Valid : Boolean; end record; It : STree.Iterator; Constituent_Node : STree.SyntaxNode; Mode : Modes; ----------------------------------------------------------------- procedure Do_Constituent (Constituent_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subject_Sym : in Dictionary.Symbol; Mode : in Modes; Subject_Loc : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# Constituent_Node, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Mode, --# Scope, --# STree.Table, --# Subject_Sym & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Constituent_Node, --# Dictionary.Dict, --# LexTokenManager.State, --# Mode, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subject_Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Constituent_Node, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Mode, --# Scope, --# STree.Table, --# Subject_Loc, --# Subject_Sym & --# STree.Table from *, --# CommandLineData.Content, --# Constituent_Node, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Subject_Sym; --# pre Syntax_Node_Type (Constituent_Node, STree.Table) = SP_Symbols.entire_variable; --# post STree.Table = STree.Table~; is First_Node : STree.SyntaxNode; Second_Node : STree.SyntaxNode; Child_First_Node : STree.SyntaxNode; ---------------------------------------------------------------- procedure Do_Concrete_Constituent (Var_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subject_Sym : in Dictionary.Symbol; Mode : in Modes; Subject_Loc : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Mode, --# Scope, --# STree.Table, --# Subject_Sym, --# Var_Node & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Var_Node & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Mode, --# Scope, --# STree.Table, --# Subject_Loc, --# Subject_Sym, --# Var_Node; --# pre Syntax_Node_Type (Var_Node, STree.Table) = SP_Symbols.identifier; is Var_Pos : LexTokenManager.Token_Position; Var_Str : LexTokenManager.Lex_String; begin Var_Str := Node_Lex_String (Node => Var_Node); Var_Pos := Node_Position (Node => Var_Node); if Dictionary.IsDefined (Name => Var_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); elsif Mode.Is_Valid then Dictionary.AddConstituent (Name => Var_Str, Subject => Subject_Sym, Mode => Mode.Value, SubjectReference => Dictionary.Location'(Start_Position => Subject_Loc, End_Position => Subject_Loc), Comp_Unit => ContextManager.Ops.Current_Unit, ConstituentReference => Dictionary.Location'(Start_Position => Var_Pos, End_Position => Var_Pos)); end if; end Do_Concrete_Constituent; ----------------------------------------------------------------- procedure Do_Embedded_Constituent (Pack_Node : in STree.SyntaxNode; Var_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subject_Sym : in Dictionary.Symbol; Mode : in Modes; Subject_Loc : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Mode, --# Pack_Node, --# Scope, --# STree.Table, --# Subject_Sym, --# Var_Node & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Pack_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Var_Node & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Mode, --# Pack_Node, --# Scope, --# STree.Table, --# Subject_Loc, --# Subject_Sym, --# Var_Node & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Pack_Node, --# Scope; --# pre Syntax_Node_Type (Pack_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Var_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Pack_Sym : Dictionary.Symbol; Pack_Pos : LexTokenManager.Token_Position; Pack_Str : LexTokenManager.Lex_String; Var_Sym : Dictionary.Symbol; Var_Pos : LexTokenManager.Token_Position; Var_Str : LexTokenManager.Lex_String; Ok_To_Add : Boolean; begin Ok_To_Add := False; Pack_Str := Node_Lex_String (Node => Pack_Node); Pack_Pos := Node_Position (Node => Pack_Node); Var_Str := Node_Lex_String (Node => Var_Node); Var_Pos := Node_Position (Node => Var_Node); Pack_Sym := Dictionary.LookupItem (Name => Pack_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Pack_Sym) then Ok_To_Add := True; elsif Dictionary.IsPackage (Pack_Sym) then if Dictionary.GetContext (Pack_Sym) = Dictionary.ProgramContext then ErrorHandler.Semantic_Error (Err_Num => 75, Reference => ErrorHandler.No_Reference, Position => Pack_Pos, Id_Str => Pack_Str); else STree.Set_Node_Lex_String (Sym => Pack_Sym, Node => Pack_Node); Var_Sym := Dictionary.LookupSelectedItem (Prefix => Pack_Sym, Selector => Var_Str, Scope => Scope, Context => Dictionary.ProofContext); if Dictionary.Is_Null_Symbol (Var_Sym) then Ok_To_Add := True; else ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); end if; end if; else ErrorHandler.Semantic_Error (Err_Num => 76, Reference => ErrorHandler.No_Reference, Position => Pack_Pos, Id_Str => Pack_Str); end if; if Ok_To_Add and then Mode.Is_Valid then Dictionary.AddEmbeddedConstituent (PackageName => Pack_Str, VariableName => Var_Str, Subject => Subject_Sym, Mode => Mode.Value, SubjectReference => Dictionary.Location'(Start_Position => Subject_Loc, End_Position => Subject_Loc), Comp_Unit => ContextManager.Ops.Current_Unit, ConstituentReference => Dictionary.Location'(Start_Position => Var_Pos, End_Position => Var_Pos)); end if; end Do_Embedded_Constituent; ---------------------------------------------------------------- procedure Do_Child_Constituent (Pack_D_Simple_Name_Node : in STree.SyntaxNode; Var_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subject_Sym : in Dictionary.Symbol; Mode : in Modes; Subject_Loc : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Mode, --# Pack_D_Simple_Name_Node, --# Scope, --# STree.Table, --# Subject_Sym, --# Var_Node & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Mode, --# Pack_D_Simple_Name_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subject_Sym, --# Var_Node & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Mode, --# Pack_D_Simple_Name_Node, --# Scope, --# STree.Table, --# Subject_Loc, --# Subject_Sym, --# Var_Node & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Pack_D_Simple_Name_Node, --# Scope, --# Subject_Sym, --# Var_Node; --# pre Syntax_Node_Type (Pack_D_Simple_Name_Node, STree.Table) = SP_Symbols.dotted_simple_name and --# Syntax_Node_Type (Var_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Pack_Sym : Dictionary.Symbol; Pack_Pos : LexTokenManager.Token_Position; Pack_Str : LexTokenManager.Lex_String; Pack_Node : STree.SyntaxNode; Var_Sym : Dictionary.Symbol; Var_Pos : LexTokenManager.Token_Position; Var_Str : LexTokenManager.Lex_String; Owner : Dictionary.Symbol; Err_Num : Natural; begin Pack_Node := Last_Child_Of (Start_Node => Pack_D_Simple_Name_Node); -- ASSUME Pack_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pack_Node = identifier in Do_Child_Constituent"); Pack_Str := Node_Lex_String (Node => Pack_Node); Pack_Pos := Node_Position (Node => Pack_Node); Pack_Sym := Dictionary.LookupItem (Name => Pack_Str, Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext, Full_Package_Name => False); loop --# assert Syntax_Node_Type (Var_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Pack_Node, STree.Table) = SP_Symbols.identifier and --# STree.Table = STree.Table~; if Dictionary.Is_Null_Symbol (Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 137, Reference => ErrorHandler.No_Reference, Position => Pack_Pos, Id_Str => Pack_Str); exit; end if; if not Dictionary.IsPackage (Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 18, Reference => ErrorHandler.No_Reference, Position => Pack_Pos, Id_Str => Pack_Str); Pack_Sym := Dictionary.NullSymbol; exit; end if; STree.Set_Node_Lex_String (Sym => Pack_Sym, Node => Pack_Node); Pack_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Pack_Node)); -- ASSUME Pack_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pack_Node = identifier in Do_Child_Constituent"); exit when Pack_Node = Var_Node; Pack_Str := Node_Lex_String (Node => Pack_Node); Pack_Pos := Node_Position (Node => Pack_Node); Pack_Sym := Dictionary.LookupSelectedItem (Prefix => Pack_Sym, Selector => Pack_Str, Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext); end loop; if not Dictionary.Is_Null_Symbol (Pack_Sym) then Owner := Dictionary.GetRegion (Scope); if not Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetPackageOwner (Pack_Sym), Right_Symbol => Owner) then -- package not owned by subject package ErrorHandler.Semantic_Error (Err_Num => 619, Reference => ErrorHandler.No_Reference, Position => Pack_Pos, Id_Str => Pack_Str); else Var_Str := Node_Lex_String (Node => Var_Node); Var_Pos := Node_Position (Node => Var_Node); Var_Sym := Dictionary.LookupSelectedItem (Prefix => Pack_Sym, Selector => Var_Str, Scope => Scope, Context => Dictionary.ProofContext); if Dictionary.Is_Null_Symbol (Var_Sym) then -- constituent own variable not found ErrorHandler.Semantic_Error (Err_Num => 138, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); elsif not Dictionary.IsOwnVariable (Var_Sym) then ErrorHandler.Semantic_Error (Err_Num => 74, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); elsif Dictionary.IsRefinementConstituent (Owner, Var_Sym) then -- already declared as constituent ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); else -- valid constituent if Dictionary.OwnVariableIsInitialized (Subject_Sym) /= Dictionary.OwnVariableIsInitialized (Var_Sym) then -- subject and constituent have different initialization if Dictionary.OwnVariableIsInitialized (Subject_Sym) then -- if subject is initialized and constituent is not then -- it is only an error if the constituent is not a stream var if Dictionary.GetOwnVariableOrConstituentMode (Var_Sym) = Dictionary.DefaultMode then ErrorHandler.Semantic_Error (Err_Num => 622, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); else STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Var_Node); end if; else ErrorHandler.Semantic_Error (Err_Num => 623, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); end if; else STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Var_Node); end if; -- constituent must have same mode as own variable it being -- associated with if Mode.Value /= Dictionary.GetOwnVariableMode (Var_Sym) then if Dictionary.GetOwnVariableMode (Var_Sym) = Dictionary.InMode then Err_Num := 705; -- must be IN elsif Dictionary.GetOwnVariableMode (Var_Sym) = Dictionary.OutMode then Err_Num := 706; -- must be OUT else Err_Num := 707; -- no mode permitted end if; ErrorHandler.Semantic_Error (Err_Num => Err_Num, Reference => ErrorHandler.No_Reference, Position => Var_Pos, Id_Str => Var_Str); end if; -- end of mode checks if Mode.Is_Valid then Dictionary.AddChildConstituent (Variable => Var_Sym, Subject => Subject_Sym, Mode => Mode.Value, SubjectReference => Dictionary.Location'(Start_Position => Subject_Loc, End_Position => Subject_Loc), Comp_Unit => ContextManager.Ops.Current_Unit, ConstituentReference => Dictionary.Location'(Start_Position => Var_Pos, End_Position => Var_Pos)); end if; end if; end if; end if; end Do_Child_Constituent; begin -- Do_Constituent First_Node := Child_Node (Current_Node => Child_Node (Current_Node => Constituent_Node)); -- ASSUME First_Node = dotted_simple_name OR identifier if Syntax_Node_Type (Node => First_Node) = SP_Symbols.identifier then -- ASSUME First_Node = identifier Do_Concrete_Constituent (Var_Node => First_Node, Scope => Scope, Subject_Sym => Subject_Sym, Mode => Mode, Subject_Loc => Subject_Loc); elsif Syntax_Node_Type (Node => First_Node) = SP_Symbols.dotted_simple_name then -- ASSUME First_Node = dotted_simple_name Second_Node := Next_Sibling (Current_Node => First_Node); -- ASSUME Second_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Second_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Second_Node = identifier in Do_Constituent"); Child_First_Node := Child_Node (Current_Node => First_Node); -- ASSUME Child_First_Node = dotted_simple_name OR identifier if Syntax_Node_Type (Node => Child_First_Node) = SP_Symbols.identifier then -- ASSUME Child_First_Node = identifier Do_Embedded_Constituent (Pack_Node => Child_First_Node, Var_Node => Second_Node, Scope => Scope, Subject_Sym => Subject_Sym, Mode => Mode, Subject_Loc => Subject_Loc); elsif Syntax_Node_Type (Node => Child_First_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Child_First_Node = dotted_simple_name if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 then Do_Child_Constituent (Pack_D_Simple_Name_Node => Child_First_Node, Var_Node => Second_Node, Scope => Scope, Subject_Sym => Subject_Sym, Mode => Mode, Subject_Loc => Subject_Loc); else ErrorHandler.Semantic_Error (Err_Num => 156, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Constituent_Node), Id_Str => LexTokenManager.Null_String); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_First_Node = dotted_simple_name OR identifier in Do_Constituent"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect First_Node = dotted_simple_name OR identifier in Do_Constituent"); end if; end Do_Constituent; ----------------------------------------------------------------- procedure Check_Modifier (Node : in STree.SyntaxNode; Subject_Sym : in Dictionary.Symbol; Mode : out Modes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Subject_Sym & --# Mode from Dictionary.Dict, --# Node, --# STree.Table, --# Subject_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.own_variable_modifier; is Modifier_Node, Err_Node, Mode_Node : STree.SyntaxNode; Subject_Mode : Dictionary.Modes; begin Subject_Mode := Dictionary.GetOwnVariableMode (Subject_Sym); Mode := Modes'(Value => Dictionary.DefaultMode, Is_Valid => False); Err_Node := Next_Sibling (Current_Node => Node); -- ASSUME Err_Node = entire_variable SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Err_Node) = SP_Symbols.entire_variable, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Err_Node = entire_variable in Check_Modifier"); Modifier_Node := Child_Node (Current_Node => Node); -- ASSUME Modifier_Node = mode OR protected_modifier OR protected_moded_modifier OR task_modifier case Syntax_Node_Type (Node => Modifier_Node) is when SP_Symbols.mode => -- ASSUME Modifier_Node = mode Mode_Node := Child_Node (Current_Node => Modifier_Node); -- ASSUME Mode_Node = in_mode OR inout_mode OR out_mode OR NULL if Mode_Node = STree.NullNode then -- ASSUME Mode_Node = NULL -- Default is legal only if subject is Default if Subject_Mode = Dictionary.DefaultMode then Mode := Modes'(Value => Dictionary.DefaultMode, Is_Valid => True); else ErrorHandler.Semantic_Error (Err_Num => 701, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Err_Node), Id_Str => Dictionary.GetSimpleName (Subject_Sym)); end if; else -- ASSUME Mode_Node = in_mode OR inout_mode OR out_mode case Syntax_Node_Type (Node => Mode_Node) is when SP_Symbols.in_mode => -- ASSUME Mode_Node = in_mode -- IN is legal if subject is IN or subject if Default if Subject_Mode = Dictionary.DefaultMode or else Subject_Mode = Dictionary.InMode then Mode := Modes'(Value => Dictionary.InMode, Is_Valid => True); else ErrorHandler.Semantic_Error (Err_Num => 701, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Err_Node), Id_Str => Dictionary.GetSimpleName (Subject_Sym)); end if; when SP_Symbols.out_mode => -- ASSUME Mode_Node = out_mode -- OUT is legal if subject is OUT or subject if Default if Subject_Mode = Dictionary.DefaultMode or else Subject_Mode = Dictionary.OutMode then Mode := Modes'(Value => Dictionary.OutMode, Is_Valid => True); else ErrorHandler.Semantic_Error (Err_Num => 701, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Err_Node), Id_Str => Dictionary.GetSimpleName (Subject_Sym)); end if; when SP_Symbols.inout_mode => -- ASSUME Mode_Node = inout_mode ErrorHandler.Semantic_Error (Err_Num => 700, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Err_Node), Id_Str => LexTokenManager.Null_String); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Mode_Node = in_mode OR inout_mode OR out_mode OR NULL in Check_Modifier"); end case; end if; when SP_Symbols.protected_modifier => -- ASSUME Modifier_Node = protected_modifier -- Refinement constituents cannot be protected. ErrorHandler.Semantic_Error (Err_Num => 859, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Err_Node), Id_Str => Dictionary.GetSimpleName (Subject_Sym)); when SP_Symbols.protected_moded_modifier | SP_Symbols.task_modifier => -- ASSUME Modifier_Node = protected_moded_modifier OR task_modifier null; when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Modifier_Node = mode OR protected_modifier OR " & "protected_moded_modifier OR task_modifier in Check_Modifier"); end case; end Check_Modifier; begin -- Do_Constituent_List It := Find_First_Node (Node_Kind => SP_Symbols.own_variable_modifier, From_Root => Constituent_List_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Constituent_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Constituent_Node, STree.Table) = SP_Symbols.own_variable_modifier and --# Constituent_Node = Get_Node (It) and --# STree.Table = STree.Table~; Check_Modifier (Node => Constituent_Node, Subject_Sym => Subject_Sym, Mode => Mode); Constituent_Node := Next_Sibling (Current_Node => Constituent_Node); -- ASSUME Constituent_Node = entire_variable SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Constituent_Node) = SP_Symbols.entire_variable, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constituent_Node = entire_variable in Do_Constituent_List"); Do_Constituent (Constituent_Node => Constituent_Node, Scope => Scope, Subject_Sym => Subject_Sym, Mode => Mode, Subject_Loc => Subject_Loc); It := STree.NextNode (It); end loop; end Do_Constituent_List; begin -- Wf_Clause Subject_Node := Child_Node (Current_Node => Node); -- ASSUME Subject_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Subject_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subject_Node = identifier in Wf_Clause"); Subject_Loc := Node_Position (Node => Subject_Node); Constituent_Node := Next_Sibling (Current_Node => Subject_Node); -- ASSUME Constituent_Node = constituent_list OR NULL if Constituent_Node = STree.NullNode then -- ASSUME Constituent_Node = NULL -- There is _no_ constituent node at all. This used to be -- rejected by the gammar alone, but this resulted in a poor -- error message, and it's a common "beginner" mistake. -- The grammar now accepts this, so we can detect and reject it -- here with a much better message. ErrorHandler.Semantic_Error (Err_Num => 115, Reference => ErrorHandler.No_Reference, Position => Subject_Loc, Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Constituent_Node) = SP_Symbols.constituent_list then -- ASSUME Constituent_Node = constituent_list Do_Subject (Subject_Node => Subject_Node, Scope => Scope, Subject_Sym => Subject_Sym); if not Dictionary.Is_Null_Symbol (Subject_Sym) then Do_Constituent_List (Constituent_List_Node => Constituent_Node, Scope => Scope, Subject_Sym => Subject_Sym, Subject_Loc => Subject_Loc); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constituent_Node = constituent_list OR NULL in Wf_Clause"); end if; end Wf_Clause; spark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_visible.adb0000644000175000017500000002332711753202336031262 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: -- Loops through all visible_part_rep below visible_part (passed in as Node) -- calls approriate wf for NextDerivative node of each. Cannot directly -- raise any errors but called procedures can. ---------------------------------------------------------------------------- separate (Sem.Wf_Package_Declaration.Wf_Package_Specification) procedure Wf_Visible (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Next_Node, Node_To_Check : STree.SyntaxNode; Unused : Dictionary.Symbol; ------------------------------------------------------------------------ procedure Wf_Private_Type_Declaration (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.private_type_declaration; --# post STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------ procedure Wf_Deferred (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.deferred_constant_declaration; --# post STree.Table = STree.Table~; is separate; begin -- Wf_Visible Next_Node := Last_Child_Of (Start_Node => Node); while Next_Node /= Node loop --# assert STree.Table = STree.Table~; -- ASSUME Next_Node = visible_part_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.visible_part_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = visible_part_rep in Wf_Visible"); Node_To_Check := Next_Sibling (Next_Node); -- ASSUME Node_To_Check = basic_declarative_item OR private_type_declaration OR -- deferred_constant_declaration OR subprogram_declaration OR -- generic_subprogram_instantiation OR apragma OR renaming_declaration case Syntax_Node_Type (Node => Node_To_Check) is when SP_Symbols.basic_declarative_item => -- ASSUME Node_To_Check = basic_declarative_item Wf_Basic_Declarative_Item (Node => Node_To_Check, Current_Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); when SP_Symbols.subprogram_declaration => -- ASSUME Node_To_Check = subprogram_declaration --# accept Flow, 10, Unused, "Expected ineffective assignment"; Wf_Subprogram_Declaration (Node => Node_To_Check, Inherit_Node => STree.NullNode, Context_Node => STree.NullNode, Generic_Formal_Part_Node => STree.NullNode, Current_Scope => Current_Scope, Generic_Unit => Dictionary.NullSymbol, Component_Data => Component_Data, The_Heap => The_Heap, Subprog_Sym => Unused); --# end accept; when SP_Symbols.generic_subprogram_instantiation => -- ASSUME Node_To_Check = generic_subprogram_instantiation Wf_Generic_Subprogram_Instantiation (Node => Node_To_Check, Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); when SP_Symbols.apragma => -- ASSUME Node_To_Check = apragma Wf_Pragma (Node => Node_To_Check, Scope => Current_Scope); when SP_Symbols.private_type_declaration => -- ASSUME Node_To_Check = private_type_declaration Wf_Private_Type_Declaration (Node => Node_To_Check, Pack_Sym => Pack_Sym, Current_Scope => Current_Scope); when SP_Symbols.deferred_constant_declaration => -- ASSUME Node_To_Check = deferred_constant_declaration Wf_Deferred (Node => Node_To_Check, Pack_Sym => Pack_Sym, Current_Scope => Current_Scope); when SP_Symbols.renaming_declaration => -- ASSUME Node_To_Check = renaming_declaration Wf_Renaming_Declaration (Node => Node_To_Check, Scope => Current_Scope); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node_To_Check = basic_declarative_item OR private_type_declaration OR " & "deferred_constant_declaration OR subprogram_declaration OR generic_subprogram_instantiation OR " & "apragma OR renaming_declaration in Wf_Visible"); end case; Next_Node := Parent_Node (Current_Node => Next_Node); end loop; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Wf_Visible; spark-2012.0.deb/examiner/sem-walk_expression_p-walk_annotation_expression.adb0000644000175000017500000013756711753202336026727 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Debug; with E_Strings; with SPrint; separate (Sem.Walk_Expression_P) procedure Walk_Annotation_Expression (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Type_Context : in Dictionary.Symbol; Context : in Sem.Anno_Tilde_Context; Result : out Sem.Exp_Record; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Next_Node, Last_Node, Local_Node : STree.SyntaxNode; Node_Type : SP_Symbols.SP_Symbol; L_Heap : Lists.List_Heap; Val : Maths.Value; E_Stack : Exp_Stack.Exp_Stack_Type; T_Stack : Type_Context_Stack.T_Stack_Type; Sym : Dictionary.Symbol; Ref_Var : SeqAlgebra.Seq; L_Scope : Dictionary.Scopes; String_Value : LexTokenManager.Lex_String; -------------------------------------------------------------- -- Procedures for debugging Expression syntax and tree walking -- These are hidden with "derives ;" so as not to pollute the -- annotations -------------------------------------------------------------- procedure Dump_Syntax_Tree --# derives ; is --# hide Dump_Syntax_Tree; begin if CommandLineData.Content.Debug.Expressions then SPrint.Dump_Syntax_Tree (Exp_Node, 0); end if; end Dump_Syntax_Tree; procedure Dump_Down_Node --# derives ; is --# hide Dump_Down_Node; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Walk_Annotation_Expression DOWN encounters node" & ExaminerConstants.RefType'Image (STree.NodeToRef (Last_Node)) & ' ' & SP_Symbols.SP_Symbol'Image (STree.Syntax_Node_Type (Node => Last_Node)), 0); end if; end Dump_Down_Node; procedure Dump_Up_Node --# derives ; is --# hide Dump_Up_Node; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Walk_Annotation_Expression UP encounters node" & ExaminerConstants.RefType'Image (STree.NodeToRef (Last_Node)) & ' ' & SP_Symbols.SP_Symbol'Image (STree.Syntax_Node_Type (Node => Last_Node)), 0); end if; end Dump_Up_Node; procedure Dump_Result --# derives ; is --# hide Dump_Result; begin if CommandLineData.Content.Debug.Expressions then if Result.Is_Constant then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Walk_Annotation_Expression constant result is ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Maths.ValueToString (Result.Value)); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Walk_Annotation_Expression result is not constant", 0); end if; SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "WalkExpression Result is: ", 0); Put_Exp_Record (Result); end if; end Dump_Result; ------------------------------------------------------------------------ procedure Down_Wf_Store (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Scope & --# Next_Node from Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store; --# post STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.store_list or --# Next_Node = STree.NullNode; is separate; -------------------------------------------- procedure Up_Wf_Store (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store; --# post STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------ procedure Down_Wf_Store_List (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in STree.Table; --# derives E_Stack from * & --# Next_Node from Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# STree.Table & --# null from CommandLineData.Content; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store_list; --# post STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.store_list or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_expression or --# Next_Node = STree.NullNode; is separate; -------------------------------------------- procedure Up_Wf_Store_List (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Scope & --# Node from *, --# Dictionary.Dict, --# E_Stack, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store_list; --# post STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store_list or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store; is separate; --------------------------------------------------------- procedure Down_Wf_Quantifier (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Scope : in out Dictionary.Scopes; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# Scope, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# Next_Node from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.quantified_expression; --# post STree.Table = STree.Table~ and --# (STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_arange or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.predicate or --# Next_Node = STree.NullNode); is separate; ------------------------------------------------------------ procedure Up_Wf_Quantifier (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Scope : in out Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Node, --# Scope, --# STree.Table & --# Scope from *, --# Dictionary.Dict; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.quantified_expression; is separate; begin -- Walk_Annotation_Expression Dump_Syntax_Tree; Exp_Stack.Init (Stack => E_Stack); Aggregate_Stack.Init; Lists.Init (L_Heap); Type_Context_Stack.Init (Stack => T_Stack); Type_Context_Stack.Push (X => Type_Context, Stack => T_Stack); SeqAlgebra.CreateSeq (TheHeap => The_Heap, S => Ref_Var); L_Scope := Scope; -- scope may change locally in loops but will always be back to -- original scope on exit from procedure. In all calls below -- L_Scope replaces Scope. Next_Node := Exp_Node; loop --# assert STree.Table = STree.Table~ and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); Last_Node := Next_Node; Dump_Down_Node; Node_Type := STree.Syntax_Node_Type (Node => Last_Node); case Node_Type is when SP_Symbols.character_literal => -- ASSUME Last_Node = character_literal Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetPredefinedCharacterType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => True, Is_Static => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Get_Character_Literal (Node => Last_Node), Range_RHS => Maths.NoValue), Stack => E_Stack); Next_Node := STree.NullNode; when SP_Symbols.string_literal => -- ASSUME Last_Node = string_literal String_Value := STree.Node_Lex_String (Node => Last_Node); --# accept F, 41, "Stable expression here OK"; if CommandLineData.Content.Debug.Expressions then Debug.Print_Lex_Str ("Walk_Annotation_Expression pushing string literal with value: ", String_Value); end if; --# end accept; Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetPredefinedStringType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => True, Is_Static => (CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83), Is_ARange => False, String_Value => String_Value, Value => Maths.NoValue, Range_RHS => Get_String_Literal_Length (Str => String_Value)), Stack => E_Stack); Next_Node := STree.NullNode; when SP_Symbols.numeric_literal => -- ASSUME Last_Node = numeric_literal Local_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Last_Node)); -- ASSUME Local_Node = integer_number OR real_number OR based_integer OR based_real if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.real_number then -- ASSUME Local_Node = real_number Sem.Get_Literal_Value (Node => Local_Node, Val => Val); Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetUniversalRealType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => True, Is_Static => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Val, Range_RHS => Maths.NoValue), Stack => E_Stack); elsif STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.integer_number or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.based_integer or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.based_real then -- ASSUME Local_Node = integer_number OR based_integer OR based_real Sem.Get_Literal_Value (Node => Local_Node, Val => Val); Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetUniversalIntegerType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => True, Is_Static => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Val, Range_RHS => Maths.NoValue), Stack => E_Stack); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = integer_number OR real_number OR " & "based_integer OR based_real in Walk_Annotation_Expression"); end if; Next_Node := STree.NullNode; when SP_Symbols.annotation_selector => -- ASSUME Last_Node = annotation_selector Next_Node := STree.NullNode; when SP_Symbols.annotation_simple_name => -- ASSUME Last_Node = annotation_simple_name Local_Node := STree.Parent_Node (Current_Node => Last_Node); -- ASSUME Local_Node = annotation_name OR annotation_named_argument_association OR annotation_selector if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_named_argument_association then -- ASSUME Local_Node = annotation_named_argument_association -- do not look at identifier in this case Next_Node := STree.NullNode; elsif STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_name or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_selector then -- ASSUME Local_Node = annotation_name OR annotation_selector Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = identifier in Walk_Annotation_Expression"); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = annotation_name OR annotation_named_argument_association OR " & "annotation_selector in Walk_Annotation_Expression"); end if; when SP_Symbols.identifier => -- ASSUME Last_Node = identifier Wf_Identifier (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, The_Heap => The_Heap, Ref_Var => Ref_Var, Context => Context); Next_Node := STree.NullNode; when SP_Symbols.annotation_name_argument_list => -- ASSUME Last_Node = annotation_name_argument_list Down_Wf_Name_Argument_List (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, Heap_Param => L_Heap, Next_Node => Next_Node); when SP_Symbols.annotation_aggregate => -- ASSUME Last_Node = annotation_aggregate Down_Wf_Aggregate (Node => Last_Node, Scope => L_Scope, Next_Node => Next_Node, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.annotation_aggregate_choice_rep => -- ASSUME Last_Node = annotation_aggregate_choice_rep Wf_Aggregate_Choice_Rep (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, Heap_Param => L_Heap, Next_Node => Next_Node); when SP_Symbols.record_component_selector_name => -- ASSUME Last_Node = record_component_selector_name Wf_Record_Component_Selector_Name (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap); Next_Node := STree.NullNode; when SP_Symbols.annotation_aggregate_or_expression => -- ASSUME Last_Node = annotation_aggregate_or_expression Down_Wf_Aggregate_Or_Expression (Node => Last_Node, E_Stack => E_Stack, Next_Node => Next_Node); when SP_Symbols.annotation_attribute_designator => -- ASSUME Last_Node = annotation_attribute_designator Type_Context_Stack.Push (X => Attribute_Designator_Type_From_Context (Exp_Node => Last_Node, E_Stack => E_Stack, T_Stack => T_Stack), Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = annotation_attribute_designator OR attribute_ident SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_attribute_designator or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.attribute_ident, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = annotation_attribute_designator OR attribute_ident in Walk_Annotation_Expression"); when SP_Symbols.annotation_range_constraint => -- ASSUME Last_Node = annotation_range_constraint Type_Context_Stack.Push (X => Range_Constraint_Type_From_Context (Exp_Node => Last_Node, E_Stack => E_Stack, T_Stack => T_Stack), Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = annotation_arange SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_arange, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = annotation_arange in Walk_Annotation_Expression"); when SP_Symbols.annotation_simple_expression => -- ASSUME Last_Node = annotation_simple_expression Type_Context_Stack.Push (X => Simple_Expression_Type_From_Context (Exp_Node => Last_Node, T_Stack => T_Stack), Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = annotation_simple_expression OR annotation_simple_expression_opt SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_simple_expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_simple_expression_opt, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = annotation_simple_expression OR annotation_simple_expression_opt " & "in Walk_Annotation_Expression"); when SP_Symbols.annotation_expression => -- ASSUME Last_Node = annotation_expression Expression_Type_From_Context (Exp_Node => Last_Node, E_Stack => E_Stack, T_Stack => T_Stack, New_Context_Type => Sym); Type_Context_Stack.Push (X => Sym, Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = annotation_relation OR quantified_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_relation or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.quantified_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = annotation_relation OR quantified_expression in Walk_Annotation_Expression"); when SP_Symbols.annotation_primary => -- ASSUME Last_Node = annotation_primary Type_Context_Stack.Push (X => Primary_Type_From_Context (Node => Last_Node, T_Stack => T_Stack), Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = numeric_literal OR character_literal OR string_literal OR annotation_name OR -- annotation_qualified_expression OR annotation_expression OR annotation_attribute SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.numeric_literal or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.character_literal or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.string_literal or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_name or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_qualified_expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_attribute, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = numeric_literal OR character_literal OR string_literal OR annotation_name OR " & "annotation_qualified_expression OR annotation_expression OR annotation_attribute in Walk_Annotation_Expression"); when SP_Symbols.store => -- ASSUME Last_Node = store Down_Wf_Store (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, Next_Node => Next_Node); when SP_Symbols.store_list => -- ASSUME Last_Node = store_list Down_Wf_Store_List (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, Next_Node => Next_Node); when SP_Symbols.quantified_expression => -- ASSUME Last_Node = quantified_expression Down_Wf_Quantifier (Node => Last_Node, E_Stack => E_Stack, Scope => L_Scope, Next_Node => Next_Node); when others => if Node_Type in SP_Symbols.SP_Non_Terminal then Next_Node := STree.Child_Node (Current_Node => Last_Node); else Next_Node := STree.NullNode; end if; end case; -------------------------------------------------up loop---------- if Next_Node = STree.NullNode then loop --# assert STree.Table = STree.Table~ and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); Next_Node := STree.Next_Sibling (Current_Node => Last_Node); exit when Next_Node /= STree.NullNode; -- new branch to right Next_Node := STree.Parent_Node (Current_Node => Last_Node); Last_Node := Next_Node; Dump_Up_Node; case STree.Syntax_Node_Type (Node => Last_Node) is when SP_Symbols.annotation_expression => -- ASSUME Last_Node = annotation_expression Wf_Expression (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, T_Stack => T_Stack); Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.annotation_expression_rep1 | SP_Symbols.annotation_expression_rep2 | SP_Symbols.annotation_expression_rep3 | SP_Symbols.annotation_expression_rep4 | SP_Symbols.annotation_expression_rep5 | SP_Symbols.annotation_expression_rep6 | SP_Symbols.annotation_expression_rep7 => -- ASSUME Last_Node = annotation_expression_rep1 OR annotation_expression_rep2 OR annotation_expression_rep3 OR -- annotation_expression_rep4 OR annotation_expression_rep5 OR annotation_expression_rep6 OR -- annotation_expression_rep7 Wf_Expression (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, T_Stack => T_Stack); when SP_Symbols.annotation_simple_expression => -- ASSUME Last_Node = annotation_simple_expression Wf_Simple_Expression (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, T_Stack => T_Stack, Context_Requires_Static => False); Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.annotation_simple_expression_opt => -- ASSUME Last_Node = annotation_simple_expression_opt Wf_Simple_Expression_Opt (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, T_Stack => T_Stack); when SP_Symbols.annotation_term => -- ASSUME Last_Node = annotation_term Wf_Term (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, T_Stack => T_Stack, Context_Requires_Static => False); when SP_Symbols.annotation_factor => -- ASSUME Last_Node = annotation_factor Wf_Factor (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, T_Stack => T_Stack); when SP_Symbols.annotation_relation => -- ASSUME Last_Node = annotation_relation Wf_Relation (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, T_Stack => T_Stack); when SP_Symbols.annotation_range_constraint => -- ASSUME Last_Node = annotation_range_constraint Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.annotation_arange => -- ASSUME Last_Node = annotation_arange Wf_Arange (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack); when SP_Symbols.annotation_selected_component => -- ASSUME Last_Node = annotation_selected_component Wf_Selected_Component (Node => Last_Node, Scope => L_Scope, Ref_Var => Ref_Var, E_Stack => E_Stack, Component_Data => Component_Data, The_Heap => The_Heap, Context => Context); when SP_Symbols.annotation_attribute => -- ASSUME Last_Node = annotation_attribute Wf_Attribute (E_Stack => E_Stack); when SP_Symbols.annotation_attribute_designator => -- ASSUME Last_Node = annotation_attribute_designator Wf_Attribute_Designator (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, The_Heap => The_Heap, Ref_Var => Ref_Var); Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.annotation_primary => -- ASSUME Last_Node = annotation_primary Wf_Primary (Node => Last_Node, Scope => L_Scope, Ref_Var => Ref_Var, E_Stack => E_Stack, Component_Data => Component_Data, The_Heap => The_Heap); Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.annotation_positional_argument_association => -- ASSUME Last_Node = annotation_positional_argument_association Wf_Positional_Argument_Association (Node => Last_Node, Scope => L_Scope, Ref_Var => Ref_Var, E_Stack => E_Stack, Component_Data => Component_Data, The_Heap => The_Heap); when SP_Symbols.annotation_named_argument_association => -- ASSUME Last_Node = annotation_named_argument_association Wf_Named_Argument_Association (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.annotation_name_argument_list => -- ASSUME Last_Node = annotation_name_argument_list Up_Wf_Name_Argument_List (Node => Last_Node, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.annotation_ancestor_part => -- ASSUME Last_Node = annotation_ancestor_part Wf_Ancestor_Part (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.annotation_aggregate_choice => -- ASSUME Last_Node = annotation_aggregate_choice Wf_Aggregate_Choice (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack); when SP_Symbols.annotation_named_association_rep => -- ASSUME Last_Node = annotation_named_association_rep Wf_Named_Association_Rep (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.annotation_named_record_component_association => -- ASSUME Last_Node = annotation_named_record_component_association Wf_Named_Record_Component_Association (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.annotation_positional_association | SP_Symbols.annotation_record_component_association => -- ASSUME Last_Node = annotation_positional_association OR annotation_record_component_association Wf_Positional_Association (Node => Last_Node, E_Stack => E_Stack); when SP_Symbols.annotation_aggregate_or_expression => -- ASSUME Last_Node = annotation_aggregate_or_expression Up_Wf_Aggregate_Or_Expression (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack); when SP_Symbols.annotation_positional_record_component_association => -- ASSUME Last_Node = annotation_positional_record_component_association Wf_Positional_Record_Component_Association (Node => Last_Node, Scope => Scope, E_Stack => E_Stack); when SP_Symbols.annotation_component_association => -- ASSUME Last_Node = annotation_component_association Wf_Component_Association (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack); when SP_Symbols.annotation_aggregate | SP_Symbols.annotation_extension_aggregate => -- ASSUME Last_Node = annotation_aggregate OR annotation_extension_aggregate Up_Wf_Aggregate (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack); when SP_Symbols.annotation_qualified_expression => -- ASSUME Last_Node = annotation_qualified_expression Wf_Qualified_Expression (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack); when SP_Symbols.store => -- ASSUME Last_Node = store Up_Wf_Store (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack); when SP_Symbols.store_list => -- ASSUME Last_Node = store_list Up_Wf_Store_List (Node => Last_Node, Scope => L_Scope, E_Stack => E_Stack); when SP_Symbols.quantified_expression => -- ASSUME Last_Node = quantified_expression Up_Wf_Quantifier (Node => Last_Node, E_Stack => E_Stack, Scope => L_Scope); when others => null; end case; exit when Next_Node = Exp_Node; -- got back to top end loop; -- up end if; exit when Next_Node = Exp_Node; -- met start point on way up end loop; -- down if not Exp_Stack.Has_One_Entry (Stack => E_Stack) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Expression_Stack_Corrupt, Msg => "in Walk_Annotation_Expression"); end if; if not (Type_Context_Stack.Has_One_Entry (Stack => T_Stack) and then Dictionary.Types_Are_Equal (Left_Symbol => Type_Context_Stack.Top (Stack => T_Stack), Right_Symbol => Type_Context, Full_Range_Subtype => False)) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Type_Context_Stack_Corrupt, Msg => "in Walk_Annotation_Expression"); end if; --# accept Flow, 10, E_Stack, "Expected ineffective assignment"; Exp_Stack.Pop (Item => Result, Stack => E_Stack); --# end accept; Dump_Result; SeqAlgebra.DisposeOfSeq (TheHeap => The_Heap, S => Ref_Var); end Walk_Annotation_Expression; spark-2012.0.deb/examiner/debug.ads0000644000175000017500000001267011753202336016036 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --------------------------------------------------------------------------------------- -- -- -- Useful debug routines. They all "derive null ..." to minimize impact on -- -- self-analysis. -- -- -- -------------------------------------------------------------------------------- with Cells; with CStacks; with Dictionary; with Heap; with LexTokenManager; with SeqAlgebra; with STree; use type Dictionary.Visibility; --# inherit Cells, --# CStacks, --# Dictionary, --# E_Strings, --# Heap, --# LexTokenManager, --# SeqAlgebra, --# SPARK_IO, --# STree; package Debug is procedure PrintMsg (Msg : in String; NewLine : in Boolean); --# derives null from Msg, --# NewLine; -- Prints Sym to Standard_Output procedure Print_Sym_Raw (Sym : in Dictionary.Symbol); --# derives null from Sym; -- Prints Msg, Sym, and a New_Line to Standard_Output procedure Print_Sym (Msg : in String; Sym : in Dictionary.Symbol); --# derives null from Msg, --# Sym; -- Like Print_Sym, but produces more information (such as if the -- function is a proof or ada function) procedure Print_Function_Sym (Msg : in String; Sym : in Dictionary.Symbol); --# derives null from Msg, --# Sym; procedure PrintScope (Msg : in String; Scope : in Dictionary.Scopes); --# derives null from Msg, --# Scope; procedure PrintInt (Msg : in String; I : in Integer); --# derives null from I, --# Msg; procedure PrintBool (Msg : in String; B : in Boolean); --# derives null from B, --# Msg; procedure Print_Lex_Str (Msg : in String; L : in LexTokenManager.Lex_String); --# derives null from L, --# Msg; -- The_Heap needs to be "in out" here for DAG_IO.PrintDAG... procedure PrintDAG (Msg : in String; DAG : in Cells.Cell; The_Heap : in out Cells.Heap_Record; Scope : in Dictionary.Scopes); --# derives The_Heap from * & --# null from DAG, --# Msg, --# Scope; procedure Write_DAG_To_File (Filename : in String; DAG : in Cells.Cell; The_Heap : in out Cells.Heap_Record; Scope : in Dictionary.Scopes); --# derives The_Heap from * & --# null from DAG, --# Filename, --# Scope; procedure Write_Heap_To_File (Filename : in String; The_Heap : in out Cells.Heap_Record); --# derives The_Heap from * & --# null from Filename; procedure Print_Cell (Msg : in String; The_Heap : in out Cells.Heap_Record; The_Cell : in Cells.Cell); --# derives The_Heap from * & --# null from Msg, --# The_Cell; procedure Print_Sym_Seq (Msg : in String; Seq : in SeqAlgebra.Seq; The_Heap : in Heap.HeapRecord); --# derives null from Msg, --# Seq, --# The_Heap; procedure PrintSeq (Msg : in String; Seq : in SeqAlgebra.Seq; The_Heap : in Heap.HeapRecord); --# derives null from Msg, --# Seq, --# The_Heap; procedure PrintNode (Msg : in String; N : in STree.SyntaxNode); --# derives null from Msg, --# N; procedure PrintTraceback (Msg : in String; Depth : in Natural); --# derives null from Depth, --# Msg; procedure Dump_Stack (Msg : in String; Scope : in Dictionary.Scopes; VCG_Heap : in out Cells.Heap_Record; Stack : in CStacks.Stack); --# derives VCG_Heap from * & --# null from Msg, --# Scope, --# Stack; end Debug; spark-2012.0.deb/examiner/flowanalyser-flowanalysepartition.adb0000644000175000017500000015201111753202336023703 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: -- Perform partition flow analysis as follows: -- 1. Calculate claimed Rho by examining the partition annotation -- 2. Calculate actual Rho by: -- a. Examining each withed task (adding as imports anything the task suspends on) -- b. Examining each with interrupt routine (adding as import the PO that encloses -- the interrupt handler or the user-chosen InterruptStream name if one was given. -- 3. Compare the claimed and actual Rho relations and report differences. -- The following are NOT reported: -- a. Dependencies on OUT streams or updates of IN streams (must be implicit) -- 4. Check the actual Rho and claimed Rho for usage errors: -- a. All claimed imports get used somewhere -- b. All claimed exports get written somewhere -- c. All actual exports are mentioned as exports somewhere in claimed Rho -- d. All actual imports are mentioned as imports somewhere in claimed Rho -- 5. Check that the last statement of the main program is a plain loop if no -- tasks have been found during the above analysis -------------------------------------------------------------------------------- separate (FlowAnalyser) procedure FlowAnalysePartition (Node : in STree.SyntaxNode; TheHeap : in out Heap.HeapRecord) is FlowErrorsFound : Boolean := False; SemErrorsFound : Boolean; EndPosition : LexTokenManager.Token_Position; Dependency_Node : STree.SyntaxNode; PartitionRho : RelationAlgebra.Relation; PartitionImports, PartitionExports, ActualImports, ActualExports : SeqAlgebra.Seq; ActualRho : RelationAlgebra.Relation; TaskFound : Boolean := False; ------------------------------------------------------------------- procedure CheckLastStatementOfEnvironmentTaskIsPlainLoop --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Node; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table; is EndDesigNode, CurrentNode : STree.SyntaxNode; LastLoop : Dictionary.Symbol; function LastStatementFromEndDesignator (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; is Result : STree.SyntaxNode; begin Result := STree.Child_Node (Current_Node => STree.Parent_Node (Current_Node => Node)); -- pragma_rep Result := STree.Next_Sibling (Current_Node => Result); -- declarative_part or seq_of_stat if STree.Syntax_Node_Type (Node => Result) /= SP_Symbols.sequence_of_statements then Result := STree.Next_Sibling (Current_Node => Result); -- seq_of_stat end if; Result := STree.Child_Node (Current_Node => Result); -- statement or seq_of_stat if STree.Syntax_Node_Type (Node => Result) /= SP_Symbols.statement then Result := STree.Next_Sibling (Current_Node => Result); -- statement end if; return Result; end LastStatementFromEndDesignator; begin -- CheckLastStatementOfEnvironmentTaskIsPlainLoop; -- In order to report last statements that not plain loops, we need to find the -- end designator of the main subprogram. Node is main_program_declaration. -- Grammar: -- main_program_declaration -- | -- {xxx} --- not_overriding_subprogram_body -- | -- {xxx} ---- subprogram_implementation -- | -- {xxx} --- designator | hidden_part EndDesigNode := STree.Last_Sibling_Of -- designator or hidden part (Start_Node => STree.Child_Node -- statements etc (STree.Last_Sibling_Of -- subprogram implementation (Start_Node => STree.Child_Node -- some subprog anno item (STree.Last_Sibling_Of -- not_overriding_subprogram_body (Start_Node => STree.Child_Node (Current_Node => Node)))))); -- some main prog anno item -- The above is vulnerable to grammar changes so we perform a run time check here: -- ASSUME EndDesigNode = designator OR hidden_part SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => EndDesigNode) = SP_Symbols.designator or else STree.Syntax_Node_Type (Node => EndDesigNode) = SP_Symbols.hidden_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Failed to find end designator node in CheckLastStatementOfEnvironmentTaskIsPlainLoop"); -- We can't do anything about hidden main prgoram, but if there is an end designator then we can check for -- the required loop if STree.Syntax_Node_Type (Node => EndDesigNode) = SP_Symbols.designator then CurrentNode := LastStatementFromEndDesignator (EndDesigNode); if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => CurrentNode))) = SP_Symbols.loop_statement then -- check loop has no exits LastLoop := Dictionary.LastMostEnclosingLoop (Dictionary.GetMainProgram); if not Dictionary.Is_Null_Symbol (LastLoop) and then Dictionary.GetLoopHasExits (LastLoop) then ErrorHandler.Semantic_Error (Err_Num => 989, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => EndDesigNode), Id_Str => LexTokenManager.Null_String); end if; else -- last statement not a loop ErrorHandler.Semantic_Error (Err_Num => 989, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => EndDesigNode), Id_Str => LexTokenManager.Null_String); end if; end if; end CheckLastStatementOfEnvironmentTaskIsPlainLoop; ------------------------------------------------------------------- function Find_Derives_Node (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.main_program_declaration; --# return Return_Node => STree.Syntax_Node_Type (Return_Node, STree.Table) = SP_Symbols.dependency_relation or --# Return_Node = STree.NullNode; is Result : STree.SyntaxNode; begin Result := STree.Child_Node (Current_Node => Node); -- ASSUME Result = inherit_clause OR main_program_annotation if STree.Syntax_Node_Type (Node => Result) = SP_Symbols.inherit_clause then -- ASSUME Result = inherit_clause Result := STree.Next_Sibling (Current_Node => Result); -- ASSUME Result = main_program_annotation SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Result) = SP_Symbols.main_program_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = main_program_annotation in Find_Derives_Node"); elsif STree.Syntax_Node_Type (Node => Result) /= SP_Symbols.main_program_annotation then Result := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = inherit_clause OR main_program_annotation in Find_Derives_Node"); end if; --# check STree.Syntax_Node_Type (Result, STree.Table) = SP_Symbols.main_program_annotation; Result := STree.Next_Sibling (Current_Node => Result); -- ASSUME Result = moded_global_definition OR not_overriding_subprogram_body if STree.Syntax_Node_Type (Node => Result) = SP_Symbols.moded_global_definition then -- ASSUME Result = moded_global_definition Result := STree.Next_Sibling (Current_Node => Result); -- ASSUME Result = dependency_relation OR not_overriding_subprogram_body if STree.Syntax_Node_Type (Node => Result) = SP_Symbols.not_overriding_subprogram_body then -- ASSUME Result = not_overriding_subprogram_body Result := STree.NullNode; elsif STree.Syntax_Node_Type (Node => Result) /= SP_Symbols.dependency_relation then Result := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = dependency_relation OR not_overriding_subprogram_body in Find_Derives_Node"); end if; elsif STree.Syntax_Node_Type (Node => Result) = SP_Symbols.not_overriding_subprogram_body then Result := STree.NullNode; else Result := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = moded_global_definition OR not_overriding_subprogram_body in Find_Derives_Node"); end if; return Result; end Find_Derives_Node; ------------------------------------------------------------------ -- utilities to simplify adding symbols to sequences and relations ------------------------------------------------------------------ procedure InsertSymbolPair (TheHeap : in out Heap.HeapRecord; Rel : in RelationAlgebra.Relation; Sym1, Sym2 : in Dictionary.Symbol) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# Rel, --# Sym1, --# Sym2, --# TheHeap; is begin RelationAlgebra.InsertPair (TheHeap, Rel, Natural (Dictionary.SymbolRef (Sym1)), Natural (Dictionary.SymbolRef (Sym2))); end InsertSymbolPair; ------------------------------------------------------------- procedure InsertSymbol (TheHeap : in out Heap.HeapRecord; Seq : in SeqAlgebra.Seq; Sym : in Dictionary.Symbol) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# Seq, --# Sym, --# TheHeap; is begin SeqAlgebra.AddMember (TheHeap, Seq, Natural (Dictionary.SymbolRef (Sym))); end InsertSymbol; ------------------------------------------------------------- -- Constructors for main components of partition check ------------------------------------------------------------- -- Calculates the claimed Rho using the partition level derives annotation procedure BuildPartitionRho --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out TheHeap; --# out PartitionExports; --# out PartitionImports; --# out PartitionRho; --# derives PartitionExports, --# PartitionImports, --# PartitionRho from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# TheHeap; is ExportIt, ImportIt : Dictionary.Iterator; ExportVar, ImportVar : Dictionary.Symbol; begin -- create the sequences and relation we are calculating SeqAlgebra.CreateSeq (TheHeap, PartitionExports); SeqAlgebra.CreateSeq (TheHeap, PartitionImports); RelationAlgebra.CreateRelation (TheHeap, PartitionRho); -- loop through exports and add export/import pairs to Rho ExportIt := Dictionary.FirstExport (Dictionary.IsAbstract, Dictionary.GetThePartition); while not Dictionary.IsNullIterator (ExportIt) loop ExportVar := Dictionary.CurrentSymbol (ExportIt); InsertSymbol (TheHeap, PartitionExports, ExportVar); ImportIt := Dictionary.FirstDependency (Dictionary.IsAbstract, Dictionary.GetThePartition, ExportVar); while not Dictionary.IsNullIterator (ImportIt) loop ImportVar := Dictionary.CurrentSymbol (ImportIt); InsertSymbol (TheHeap, PartitionImports, ImportVar); InsertSymbolPair (TheHeap, PartitionRho, ImportVar, ExportVar); ImportIt := Dictionary.NextSymbol (ImportIt); end loop; ExportIt := Dictionary.NextSymbol (ExportIt); end loop; -- now loop through all the imports in case any are associated with "null" ImportIt := Dictionary.FirstImport (Dictionary.IsAbstract, Dictionary.GetThePartition); while not Dictionary.IsNullIterator (ImportIt) loop ImportVar := Dictionary.CurrentSymbol (ImportIt); InsertSymbol (TheHeap, PartitionImports, ImportVar); ImportIt := Dictionary.NextSymbol (ImportIt); end loop; -- add the data sink NullVariable as an import and an export InsertSymbol (TheHeap, PartitionImports, Dictionary.GetNullVariable); InsertSymbol (TheHeap, PartitionExports, Dictionary.GetNullVariable); end BuildPartitionRho; ------------------------------------------------------------- -- Routines to calculate the actual Rho based on all visible tasks -- and interrupt handlers ------------------------------------------------------------- procedure CalculateActualRho --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in EndPosition; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TaskFound; --# in out TheHeap; --# out ActualExports; --# out ActualImports; --# out ActualRho; --# out SemErrorsFound; --# derives ActualExports, --# ActualImports, --# ActualRho from TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# SemErrorsFound from Dictionary.Dict & --# Statistics.TableUsage, --# TheHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# TheHeap & --# TaskFound from *, --# Dictionary.Dict; is WithedPackages : Dictionary.Iterator; CurrentPackage : Dictionary.Symbol; CurrentRho : RelationAlgebra.Relation; CurrentImports, CurrentExports : SeqAlgebra.Seq; MainProgram : Dictionary.Symbol; MainProgramOk : Boolean; -- Two sets to collect all the actual exports and imports of protected state -- at the partition level. ProtectedExports, ProtectedImports : SeqAlgebra.Seq; ---------------------------------- procedure GetTaskRho (TaskSym : in Dictionary.Symbol; Rho : out RelationAlgebra.Relation; Imports, Exports : out SeqAlgebra.Seq; RhoOk : out Boolean) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Exports, --# Imports, --# Rho from TheHeap & --# RhoOk from Dictionary.Dict, --# TaskSym & --# Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# TaskSym, --# TheHeap; is SuspensionIt, ExportIt, ImportIt : Dictionary.Iterator; ExportVar, ImportVar : Dictionary.Symbol; RhoLocal : RelationAlgebra.Relation; TheTask : Dictionary.Symbol; ImportsLocal, ExportsLocal : SeqAlgebra.Seq; begin -- GetTaskRho SeqAlgebra.CreateSeq (TheHeap, ImportsLocal); SeqAlgebra.CreateSeq (TheHeap, ExportsLocal); RelationAlgebra.CreateRelation (TheHeap, RhoLocal); RhoOk := True; -- We need to use TaskSym directly if it is the main program (a subprogram), -- or, if it is a task variable, we need to use its type because that is what -- the dependency relation of a task is tied to in the dictionary. if Dictionary.IsMainProgram (TaskSym) then TheTask := TaskSym; else -- a task object TheTask := Dictionary.GetRootType (Dictionary.GetType (TaskSym)); end if; if Dictionary.Is_Declared (Item => TheTask) and then -- trap tasks types in hidden pack priv parts Dictionary.SubprogramSignatureIsWellformed (Dictionary.IsAbstract, TheTask) then ExportIt := Dictionary.FirstExport (Dictionary.IsAbstract, TheTask); while not Dictionary.IsNullIterator (ExportIt) loop ExportVar := Dictionary.CurrentSymbol (ExportIt); InsertSymbol (TheHeap, ExportsLocal, ExportVar); -- We add in each thing the task suspends on as an import here because at the -- partition level, suspension affects information flow by affecting which -- tasks run SuspensionIt := Dictionary.FirstSuspendsListItem (TheTask); while not Dictionary.IsNullIterator (SuspensionIt) loop InsertSymbolPair (TheHeap, RhoLocal, Dictionary.CurrentSymbol (SuspensionIt), ExportVar); InsertSymbol (TheHeap, ImportsLocal, Dictionary.CurrentSymbol (SuspensionIt)); SuspensionIt := Dictionary.NextSymbol (SuspensionIt); end loop; -- add any explicit imports ImportIt := Dictionary.FirstDependency (Dictionary.IsAbstract, TheTask, ExportVar); while not Dictionary.IsNullIterator (ImportIt) loop ImportVar := Dictionary.CurrentSymbol (ImportIt); InsertSymbolPair (TheHeap, RhoLocal, ImportVar, ExportVar); InsertSymbol (TheHeap, ImportsLocal, ImportVar); ImportIt := Dictionary.NextSymbol (ImportIt); end loop; ExportIt := Dictionary.NextSymbol (ExportIt); end loop; else RhoOk := False; end if; Rho := RhoLocal; Exports := ExportsLocal; Imports := ImportsLocal; end GetTaskRho; ---------------------------------- procedure GetInterruptRho (SubprogSym : in Dictionary.Symbol; OwnVar : in Dictionary.Symbol; Rho : out RelationAlgebra.Relation; Imports, Exports : out SeqAlgebra.Seq; RhoOk : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Exports, --# Imports, --# Rho from TheHeap & --# RhoOk from Dictionary.Dict, --# SubprogSym & --# Statistics.TableUsage, --# TheHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# OwnVar, --# SubprogSym, --# TheHeap; is ExportIt, ImportIt : Dictionary.Iterator; ExportVar, ImportVar : Dictionary.Symbol; RhoLocal : RelationAlgebra.Relation; ImportsLocal, ExportsLocal : SeqAlgebra.Seq; -- substitutes the own variable name for the type name in a protected proc. function SubstituteProtectedTypeSelfReference (Sym, OwnVar : Dictionary.Symbol) return Dictionary.Symbol --# global in Dictionary.Dict; is Result : Dictionary.Symbol; begin Result := Sym; if Dictionary.IsOwnVariable (Sym) and then Dictionary.IsProtectedType (Dictionary.GetOwner (Sym)) then Result := OwnVar; end if; return Result; end SubstituteProtectedTypeSelfReference; --------------------------------------------------------------- begin --GetInterruptRho SeqAlgebra.CreateSeq (TheHeap, ImportsLocal); SeqAlgebra.CreateSeq (TheHeap, ExportsLocal); RelationAlgebra.CreateRelation (TheHeap, RhoLocal); RhoOk := True; if Dictionary.SubprogramSignatureIsWellformed (Dictionary.IsAbstract, SubprogSym) then ExportIt := Dictionary.FirstExport (Dictionary.IsAbstract, SubprogSym); while not Dictionary.IsNullIterator (ExportIt) loop ExportVar := Dictionary.CurrentSymbol (ExportIt); InsertSymbol (TheHeap, ExportsLocal, SubstituteProtectedTypeSelfReference (ExportVar, OwnVar)); -- For interrupt handlers, we add the name of the protected object (or the interrupt stream -- associated with it as an import; -- in effect, we either regard the PO as protecting a notional stream which -- represents the "stream" of interrupts being generated by the environment or we use the -- user-named interrupt stream as the source of interrupts. InsertSymbolPair (TheHeap, RhoLocal, Dictionary.GetInterruptStreamVariable (ProtectedObject => OwnVar, InterruptHandler => SubprogSym), -- the import SubstituteProtectedTypeSelfReference (ExportVar, OwnVar)); -- the export InsertSymbol (TheHeap, ImportsLocal, Dictionary.GetInterruptStreamVariable (ProtectedObject => OwnVar, InterruptHandler => SubprogSym)); -- now add all the actualimports associated with ExportVar ImportIt := Dictionary.FirstDependency (Dictionary.IsAbstract, SubprogSym, ExportVar); while not Dictionary.IsNullIterator (ImportIt) loop ImportVar := Dictionary.CurrentSymbol (ImportIt); InsertSymbolPair (TheHeap, RhoLocal, SubstituteProtectedTypeSelfReference (ImportVar, OwnVar), SubstituteProtectedTypeSelfReference (ExportVar, OwnVar)); InsertSymbol (TheHeap, ImportsLocal, SubstituteProtectedTypeSelfReference (ImportVar, OwnVar)); ImportIt := Dictionary.NextSymbol (ImportIt); end loop; ExportIt := Dictionary.NextSymbol (ExportIt); end loop; else RhoOk := False; end if; -- assign exports Rho := RhoLocal; Exports := ExportsLocal; Imports := ImportsLocal; end GetInterruptRho; ---------------------------------- procedure ProcessCurrentPackage (PackSym : in Dictionary.Symbol; Rho : out RelationAlgebra.Relation; PackageExports, PackageImports : out SeqAlgebra.Seq) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in EndPosition; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SemErrorsFound; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TaskFound; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# PackSym, --# SPARK_IO.File_Sys & --# PackageExports, --# PackageImports, --# Rho from TheHeap & --# SemErrorsFound, --# TaskFound from *, --# Dictionary.Dict, --# PackSym & --# Statistics.TableUsage, --# TheHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# PackSym, --# TheHeap; is PackageRho, RhoLocal : RelationAlgebra.Relation; PackageImportsLocal, PackageExportsLocal, ImportsLocal, ExportsLocal : SeqAlgebra.Seq; RhoLocalOk : Boolean; It, SubprogramIt : Dictionary.Iterator; begin SeqAlgebra.CreateSeq (TheHeap, PackageExportsLocal); SeqAlgebra.CreateSeq (TheHeap, PackageImportsLocal); RelationAlgebra.CreateRelation (TheHeap, PackageRho); -- process all the tasks in the package It := Dictionary.FirstOwnTask (PackSym); while not Dictionary.IsNullIterator (It) loop TaskFound := True; GetTaskRho (TaskSym => Dictionary.CurrentSymbol (It), Rho => RhoLocal, Imports => ImportsLocal, Exports => ExportsLocal, RhoOk => RhoLocalOk); if RhoLocalOk then RelationAlgebra.AugmentRelation (TheHeap, PackageRho, RhoLocal); SeqAlgebra.AugmentSeq (TheHeap, PackageExportsLocal, ExportsLocal); SeqAlgebra.AugmentSeq (TheHeap, PackageImportsLocal, ImportsLocal); else SemErrorsFound := True; ErrorHandler.Semantic_Warning_Sym (Err_Num => 410, Position => EndPosition, Sym => Dictionary.CurrentSymbol (It), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetMainProgram)); end if; SeqAlgebra.DisposeOfSeq (TheHeap, ImportsLocal); SeqAlgebra.DisposeOfSeq (TheHeap, ExportsLocal); RelationAlgebra.DisposeOfRelation (TheHeap, RhoLocal); It := Dictionary.NextSymbol (It); end loop; -- process all the interrupts in the package It := Dictionary.FirstOwnVariable (PackSym); while not Dictionary.IsNullIterator (It) loop if Dictionary.GetHasInterruptProperty (Dictionary.CurrentSymbol (It)) then -- a protected own variable with an interrupt handler has been found if Dictionary.Is_Declared (Item => Dictionary.GetRootType (Dictionary.GetType (Dictionary.CurrentSymbol (It)))) then -- the protected type is not hidden in a hidden package private parts so we can process it SubprogramIt := Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Dictionary.GetRootType (Dictionary.GetType (Dictionary.CurrentSymbol (It)))); while not Dictionary.IsNullIterator (SubprogramIt) loop if Dictionary.IsInterruptHandler (Dictionary.CurrentSymbol (SubprogramIt)) then -- found symbol of an interrupt handling procedure GetInterruptRho (SubprogSym => Dictionary.CurrentSymbol (SubprogramIt), OwnVar => Dictionary.CurrentSymbol (It), Rho => RhoLocal, Imports => ImportsLocal, Exports => ExportsLocal, RhoOk => RhoLocalOk); if RhoLocalOk then RelationAlgebra.AugmentRelation (TheHeap, PackageRho, RhoLocal); SeqAlgebra.AugmentSeq (TheHeap, PackageExportsLocal, ExportsLocal); SeqAlgebra.AugmentSeq (TheHeap, PackageImportsLocal, ImportsLocal); else SemErrorsFound := True; ErrorHandler.Semantic_Warning_Sym (Err_Num => 410, Position => EndPosition, Sym => Dictionary.CurrentSymbol (It), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetMainProgram)); end if; SeqAlgebra.DisposeOfSeq (TheHeap, ImportsLocal); SeqAlgebra.DisposeOfSeq (TheHeap, ExportsLocal); RelationAlgebra.DisposeOfRelation (TheHeap, RhoLocal); end if; SubprogramIt := Dictionary.NextSymbol (SubprogramIt); end loop; else -- the announced protected type is hidden and cannot be processed SemErrorsFound := True; ErrorHandler.Semantic_Warning_Sym (Err_Num => 410, Position => EndPosition, Sym => Dictionary.CurrentSymbol (It), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetMainProgram)); end if; end if; It := Dictionary.NextSymbol (It); end loop; -- assign exports Rho := PackageRho; PackageExports := PackageExportsLocal; PackageImports := PackageImportsLocal; end ProcessCurrentPackage; ---------------------------------- begin -- CalculateActualRho SemErrorsFound := False; SeqAlgebra.CreateSeq (TheHeap, ActualExports); SeqAlgebra.CreateSeq (TheHeap, ActualImports); SeqAlgebra.CreateSeq (TheHeap, ProtectedExports); SeqAlgebra.CreateSeq (TheHeap, ProtectedImports); RelationAlgebra.CreateRelation (TheHeap, ActualRho); -- We need to search all the packages withed by the main program and process -- every task therein. -- We also need to include the flow relation of the main -- program itself which is also a task. MainProgram := Dictionary.GetMainProgram; GetTaskRho (MainProgram, CurrentRho, CurrentImports, CurrentExports, MainProgramOk); if MainProgramOk then SeqAlgebra.AugmentSeq (TheHeap, ActualImports, CurrentImports); SeqAlgebra.AugmentSeq (TheHeap, ActualExports, CurrentExports); RelationAlgebra.AugmentRelation (TheHeap, ActualRho, CurrentRho); else ErrorHandler.Semantic_Warning_Sym (Err_Num => 410, Position => EndPosition, Sym => MainProgram, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => MainProgram)); SemErrorsFound := True; end if; RelationAlgebra.DisposeOfRelation (TheHeap, CurrentRho); SeqAlgebra.DisposeOfSeq (TheHeap, CurrentImports); SeqAlgebra.DisposeOfSeq (TheHeap, CurrentExports); WithedPackages := Dictionary.FirstInheritsClause (MainProgram); while not Dictionary.IsNullIterator (WithedPackages) loop CurrentPackage := Dictionary.CurrentSymbol (WithedPackages); if Dictionary.Is_Withed (The_Withed_Symbol => CurrentPackage, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => MainProgram)) then ProcessCurrentPackage (CurrentPackage, -- to get CurrentRho, CurrentExports, CurrentImports); RelationAlgebra.AugmentRelation (TheHeap, ActualRho, CurrentRho); RelationAlgebra.DisposeOfRelation (TheHeap, CurrentRho); SeqAlgebra.AugmentSeq (TheHeap, ActualExports, CurrentExports); SeqAlgebra.AugmentSeq (TheHeap, ActualImports, CurrentImports); SeqAlgebra.DisposeOfSeq (TheHeap, CurrentImports); SeqAlgebra.DisposeOfSeq (TheHeap, CurrentExports); end if; WithedPackages := Dictionary.NextSymbol (WithedPackages); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, ProtectedExports); SeqAlgebra.DisposeOfSeq (TheHeap, ProtectedImports); end CalculateActualRho; ------------------------------------------------------------- -- Routines to compare the claimed and actual partition flow -- relations and issue suitable error messages ------------------------------------------------------------- procedure CompareRelations --# global in ActualRho; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in EndPosition; --# in LexTokenManager.State; --# in PartitionExports; --# in PartitionImports; --# in PartitionRho; --# in out ErrorHandler.Error_Context; --# in out FlowErrorsFound; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from ActualRho, --# CommandLineData.Content, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# PartitionExports, --# PartitionImports, --# PartitionRho, --# SPARK_IO.File_Sys, --# TheHeap & --# FlowErrorsFound from *, --# ActualRho, --# CommandLineData.Content, --# Dictionary.Dict, --# PartitionExports, --# PartitionImports, --# PartitionRho, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ActualRho, --# Dictionary.Dict, --# PartitionExports, --# PartitionRho, --# TheHeap; is DependencyCol, RhoCol : SeqAlgebra.Seq; MemberOfExports : SeqAlgebra.MemberOfSeq; ExpVarRep : Natural; ExportVar : Dictionary.Symbol; MemberOfDependencyCol : SeqAlgebra.MemberOfSeq; DepRep : Natural; DepSym : Dictionary.Symbol; MemberOfRhoCol : SeqAlgebra.MemberOfSeq; RhoSym : Dictionary.Symbol; RhoRep : Natural; Scope : Dictionary.Scopes; -- Function introduced to make the body of CompareRelations cleaner function IsAnOutStream (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (Sym)) = Dictionary.OutMode; end IsAnOutStream; begin --CompareRelations Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetMainProgram); MemberOfExports := SeqAlgebra.FirstMember (TheHeap, PartitionExports); while not SeqAlgebra.IsNullMember (MemberOfExports) loop ExpVarRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfExports); ExportVar := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (ExpVarRep)); -- Do not process any dependency stuff associated with implicit exports of mode in if Dictionary.GetOwnVariableOrConstituentMode (ExportVar) /= Dictionary.InMode and then -- nor anything to do with exporting of the "data sink" null variable not Dictionary.Is_Null_Variable (ExportVar) then RelationAlgebra.ColExtraction (TheHeap, PartitionRho, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfExports), -- to get DependencyCol); RelationAlgebra.ColExtraction (TheHeap, ActualRho, ExpVarRep, -- to get RhoCol); MemberOfDependencyCol := SeqAlgebra.FirstMember (TheHeap, DependencyCol); -- If the spec says "derives Y from X" then there should be an entry in -- the Rho relation for this dependency. If not then flow error 50 is raised. -- This check is not performed in data-flow analysis mode. while CommandLineData.Content.Flow_Option /= CommandLineData.Data_Flow and then not SeqAlgebra.IsNullMember (MemberOfDependencyCol) loop DepRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfDependencyCol); DepSym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (DepRep)); if not SeqAlgebra.IsMember (TheHeap, RhoCol, DepRep) then FlowErrorsFound := True; ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Not_Used, Position => EndPosition, Import_Var_Sym => DepSym, Export_Var_Sym => ExportVar, Scope => Scope); end if; MemberOfDependencyCol := SeqAlgebra.NextMember (TheHeap, MemberOfDependencyCol); end loop; MemberOfRhoCol := SeqAlgebra.FirstMember (TheHeap, RhoCol); while not SeqAlgebra.IsNullMember (MemberOfRhoCol) loop RhoRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfRhoCol); RhoSym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (RhoRep)); --add guard to prevent dependencies on OUT streams being reported if not IsAnOutStream (RhoSym) then if not SeqAlgebra.IsMember (TheHeap, DependencyCol, RhoRep) then -- select which error message to use if SeqAlgebra.IsMember (TheHeap, PartitionImports, RhoRep) then FlowErrorsFound := True; ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.May_Be_Used, Position => EndPosition, Import_Var_Sym => RhoSym, Export_Var_Sym => ExportVar, Scope => Scope); else -- not imported, so undefined FlowErrorsFound := True; ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Uninitialised, Position => EndPosition, Import_Var_Sym => RhoSym, Export_Var_Sym => ExportVar, Scope => Scope); end if; -- cfr1203 -- end if; end if; end if; MemberOfRhoCol := SeqAlgebra.NextMember (TheHeap, MemberOfRhoCol); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, DependencyCol); SeqAlgebra.DisposeOfSeq (TheHeap, RhoCol); -- cfr1203 -- end if; end if; MemberOfExports := SeqAlgebra.NextMember (TheHeap, MemberOfExports); end loop; end CompareRelations; ---------------------------------- -- check that all actual imports/exports appear in aprtition somewhere procedure CheckUsages --# global in ActualExports; --# in ActualImports; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in EndPosition; --# in LexTokenManager.State; --# in PartitionExports; --# in PartitionImports; --# in out ErrorHandler.Error_Context; --# in out FlowErrorsFound; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from ActualExports, --# ActualImports, --# CommandLineData.Content, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# PartitionExports, --# PartitionImports, --# SPARK_IO.File_Sys, --# TheHeap & --# FlowErrorsFound from *, --# ActualExports, --# ActualImports, --# Dictionary.Dict, --# PartitionExports, --# PartitionImports, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ActualExports, --# ActualImports, --# PartitionExports, --# PartitionImports, --# TheHeap; is Mem : SeqAlgebra.MemberOfSeq; Sym : Dictionary.Symbol; TempSeq : SeqAlgebra.Seq; begin -- CheckUsages -- Check that things claimed as Imports in partition Rho are actually imported somewhere SeqAlgebra.Complement (TheHeap, PartitionImports, ActualImports, -- to get TempSeq); Mem := SeqAlgebra.FirstMember (TheHeap, TempSeq); while not SeqAlgebra.IsNullMember (Mem) loop Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => Mem))); if not Dictionary.Is_Null_Variable (Sym) then FlowErrorsFound := True; ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Ineffective_Import, Position => EndPosition, Var_Sym => Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetMainProgram)); end if; Mem := SeqAlgebra.NextMember (TheHeap, Mem); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, TempSeq); -- Check that things claimed as Exports in partition Rho are actually exported somewhere SeqAlgebra.Complement (TheHeap, PartitionExports, ActualExports, -- to get TempSeq); Mem := SeqAlgebra.FirstMember (TheHeap, TempSeq); while not SeqAlgebra.IsNullMember (Mem) loop Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => Mem))); if not Dictionary.Is_Null_Variable (Sym) -- don't report updating of null -- following covers implicit updating of in streams that should not be reported and then (Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (Sym)) /= Dictionary.InMode) then FlowErrorsFound := True; ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Undefined_Export, Position => EndPosition, Var_Sym => Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetMainProgram)); end if; Mem := SeqAlgebra.NextMember (TheHeap, Mem); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, TempSeq); -- Check that things Actually Exported are also Exports of the partition Rho SeqAlgebra.Reduction (TheHeap, ActualExports, PartitionExports); -- anything left in ActualExports is exported but undefined (as far as partition anno is concerned) Mem := SeqAlgebra.FirstMember (TheHeap, ActualExports); while not SeqAlgebra.IsNullMember (Mem) loop Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => Mem))); if Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (Sym)) /= Dictionary.InMode then FlowErrorsFound := True; ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Updated_But_Not_In_Partition, Position => EndPosition, Var_Sym => Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetMainProgram)); end if; -- cfr1203 -- end if; Mem := SeqAlgebra.NextMember (TheHeap, Mem); end loop; -- Check that things Actually Imported are also Imports of the partition Rho SeqAlgebra.Reduction (TheHeap, ActualImports, PartitionImports); -- anything left in ActualImports is an ineffective import (as far as partition anno is concerned) Mem := SeqAlgebra.FirstMember (TheHeap, ActualImports); while not SeqAlgebra.IsNullMember (Mem) loop Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => Mem))); if Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (Sym)) /= Dictionary.OutMode then FlowErrorsFound := True; ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Referenced_But_Not_In_Partition, Position => EndPosition, Var_Sym => Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Dictionary.GetMainProgram)); end if; -- cfr1203 -- end if; Mem := SeqAlgebra.NextMember (TheHeap, Mem); end loop; end CheckUsages; begin -- FlowAnalysePartition Dependency_Node := Find_Derives_Node (Node => Node); -- ASSUME Dependency_Node = dependency_relation OR NULL --# check STree.Syntax_Node_Type (Dependency_Node, STree.Table) = SP_Symbols.dependency_relation or --# Dependency_Node = STree.NullNode; if Dependency_Node = STree.NullNode then -- ASSUME Dependency_Node = NULL EndPosition := LexTokenManager.Null_Token_Position; else -- ASSUME Dependency_Node = dependency_relation EndPosition := STree.FindLastItemInDependencyRelation (Node => Dependency_Node); end if; BuildPartitionRho; -- this is the partition-wide dependency asserted by the partition annotation CalculateActualRho; -- sets SemErrorsFound if not SemErrorsFound then RelationAlgebra.CloseRelation (TheHeap, ActualRho); -- this is the closure of the union of all tasks and interrupts CompareRelations; CheckUsages; end if; RelationAlgebra.DisposeOfRelation (TheHeap, PartitionRho); RelationAlgebra.DisposeOfRelation (TheHeap, ActualRho); SeqAlgebra.DisposeOfSeq (TheHeap, PartitionExports); SeqAlgebra.DisposeOfSeq (TheHeap, PartitionImports); SeqAlgebra.DisposeOfSeq (TheHeap, ActualExports); SeqAlgebra.DisposeOfSeq (TheHeap, ActualImports); if not FlowErrorsFound and then not SemErrorsFound and then CommandLineData.Content.Flow_Option /= CommandLineData.Data_Flow then ErrorHandler.Report_Success (Position => EndPosition, Subprog_Str => LexTokenManager.Main_Program_Token, Err_Num => ErrorHandler.No_Error_Default); end if; -- If the main program does not WITH any tasks, then the only task is the environment task itself. In -- this case, the task must end with a plain loop. It does not need to if there is at least on withed -- task since the endless loop in that task will be enough to prevent program termination. if not TaskFound then CheckLastStatementOfEnvironmentTaskIsPlainLoop; end if; end FlowAnalysePartition; spark-2012.0.deb/examiner/errorhandler-appendsym.adb0000644000175000017500000001237211753202336021413 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) procedure AppendSym (Error : in out Error_Types.StringError; Sym : in SP_Symbols.SP_Symbol) is --# hide AppendSym; procedure Append_Reserved_Word (S : String) --# global in out Error; --# derives Error from *, --# S; is begin Append_String (E_Str => Error, Str => S (S'First + 2 .. S'Last)); end Append_Reserved_Word; ------------------------------------------ begin --AppendSym case Sym is when SP_Symbols.RWabort .. SP_Symbols.RWxor => Append_String (E_Str => Error, Str => "reserved word """); Append_Reserved_Word (S => SP_Symbols.SP_Symbol'Image (Sym)); Append_String (E_Str => Error, Str => """"); when SP_Symbols.ampersand => Append_String (E_Str => Error, Str => """&"""); when SP_Symbols.apostrophe => Append_String (E_Str => Error, Str => """'"""); when SP_Symbols.left_paren => Append_String (E_Str => Error, Str => """("""); when SP_Symbols.right_paren => Append_String (E_Str => Error, Str => """)"""); when SP_Symbols.multiply => Append_String (E_Str => Error, Str => """*"""); when SP_Symbols.plus => Append_String (E_Str => Error, Str => """+"""); when SP_Symbols.comma => Append_String (E_Str => Error, Str => ""","""); when SP_Symbols.minus => Append_String (E_Str => Error, Str => """-"""); when SP_Symbols.point => Append_String (E_Str => Error, Str => """."""); when SP_Symbols.divide => Append_String (E_Str => Error, Str => """/"""); when SP_Symbols.colon => Append_String (E_Str => Error, Str => """:"""); when SP_Symbols.semicolon => Append_String (E_Str => Error, Str => """;"""); when SP_Symbols.less_than => Append_String (E_Str => Error, Str => """<"""); when SP_Symbols.equals => Append_String (E_Str => Error, Str => """="""); when SP_Symbols.greater_than => Append_String (E_Str => Error, Str => """>"""); when SP_Symbols.vertical_bar => Append_String (E_Str => Error, Str => """|"""); when SP_Symbols.tilde => Append_String (E_Str => Error, Str => """~"""); when SP_Symbols.arrow => Append_String (E_Str => Error, Str => """=>"""); when SP_Symbols.double_dot => Append_String (E_Str => Error, Str => """.."""); when SP_Symbols.double_star => Append_String (E_Str => Error, Str => """**"""); when SP_Symbols.becomes => Append_String (E_Str => Error, Str => """:="""); when SP_Symbols.not_equal => Append_String (E_Str => Error, Str => """/="""); when SP_Symbols.greater_or_equal => Append_String (E_Str => Error, Str => """>="""); when SP_Symbols.less_or_equal => Append_String (E_Str => Error, Str => """<="""); when SP_Symbols.box => Append_String (E_Str => Error, Str => """<>"""); when SP_Symbols.implies => Append_String (E_Str => Error, Str => """->"""); when SP_Symbols.is_equivalent_to => Append_String (E_Str => Error, Str => """<->"""); when SP_Symbols.square_open => Append_String (E_Str => Error, Str => """["""); when SP_Symbols.square_close => Append_String (E_Str => Error, Str => """]"""); when others => Append_String (E_Str => Error, Str => SP_Symbols.SP_Symbol'Image (Sym)); end case; end AppendSym; spark-2012.0.deb/examiner/maths-literaltovalue.adb0000644000175000017500000001335311753202336021074 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Maths) procedure LiteralToValue (Str : in LexTokenManager.Lex_String; Num : out Value; OK : out ErrorCode) is Decimal_Point_Found, Exponent_Found, Base_Found : Boolean; Base : Natural; Core_String, Exp_String : E_Strings.T; Exp_Sign : Character; Places_After_Point : E_Strings.Lengths; Legal_Syntax : Boolean; Exponent : Integer; Num_Local : Value; OK_Local : ErrorCode; --------------------------------------------------------- procedure Make_Integer --# global in Base; --# in Base_Found; --# in Core_String; --# in Exponent; --# in Exponent_Found; --# in out Num_Local; --# derives Num_Local from *, --# Base, --# Base_Found, --# Core_String, --# Exponent, --# Exponent_Found; is Core_Local : E_Strings.T; ------------------------------- procedure Pad_String_With_Zeros (Str : in out E_Strings.T; By : in Natural) --# derives Str from *, --# By; is begin for I in Integer range 1 .. By loop E_Strings.Append_Char (E_Str => Str, Ch => '0'); end loop; end Pad_String_With_Zeros; ------------------------------- begin -- Make_Integer Num_Local.Sort := IntegerValue; Core_Local := Core_String; if Exponent_Found then Pad_String_With_Zeros (Str => Core_Local, By => Exponent); end if; if Base_Found then Num_Local.Numerator := StripLeadingZeros (String_To_Part (Base => Base, Str => Core_Local)); else Num_Local.Numerator := StripLeadingZeros (Dec_String_To_Part (Str => Core_Local)); end if; end Make_Integer; --------------------------------------------------------- procedure Make_Real --# global in Core_String; --# in Exponent; --# in Places_After_Point; --# in out Num_Local; --# derives Num_Local from *, --# Core_String, --# Exponent, --# Places_After_Point; is Exp_Local : Integer; Den_Part, Num_Part : Part; begin Exp_Local := Exponent; Num_Local.Sort := RealValue; Num_Part := Dec_String_To_Part (Str => Core_String); Den_Part := OnePart; Exp_Local := Exp_Local - Places_After_Point; if Exp_Local > 0 then for i in Integer range 1 .. Exp_Local loop Num_Part := ShiftUpPart (Num_Part); end loop; elsif Exp_Local < 0 then Exp_Local := -Exp_Local; for i in Integer range 1 .. Exp_Local loop Den_Part := ShiftUpPart (Den_Part); end loop; --else do nothing about exponent end if; Num_Local.Numerator := Num_Part; Num_Local.Denominator := Den_Part; Normalize (Num_Local); end Make_Real; --------------------------------------------------------- begin -- LiteralToValue if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Num := NoValue; OK_Local := NoError; else Num_Local := NoValue; --must return something if error ParseString (LexTokenManager.Lex_String_To_String (Lex_Str => Str), Decimal_Point_Found, Exponent_Found, Base_Found, Base, Core_String, Exp_String, Exp_Sign, Places_After_Point, Legal_Syntax); if Legal_Syntax then OK_Local := NoError; Exponent := String_To_Natural (Str => Exp_String); if Exp_Sign = '-' then Exponent := -Exponent; end if; if Decimal_Point_Found then Make_Real; else Make_Integer; end if; else OK_Local := IllegalValue; end if; OK_Local := OverflowCheck (OK_Local, Num_Local); if OK_Local = NoError then Num := Num_Local; else Num := NoValue; end if; end if; OK := OK_Local; end LiteralToValue; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_ancestor_part.adb0000644000175000017500000004056611753202336024610 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Ancestor_Part (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) is Qualifying_Expression, Ancestor_Part : Sem.Exp_Record; Ptr : Lists.List; -------------------------------------------------------------------- function Has_Intervening_Private_Extensions (Root_Type, Extended_Type : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; -- pre Dictionary.IsAnExtensionOf (Root_Type, Extended_Type); is Result : Boolean := False; Current_Record : Dictionary.Symbol; begin Current_Record := Extended_Type; loop -- follow chain of Inherit field pointers Current_Record := Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (Current_Record)); exit when Dictionary.Is_Null_Symbol (Current_Record); -- root record is a null record Current_Record := Dictionary.GetType (Current_Record); exit when Dictionary.Types_Are_Equal (Left_Symbol => Current_Record, Right_Symbol => Root_Type, Full_Range_Subtype => False); exit when not Dictionary.TypeIsTagged (Current_Record); -- all fields checked - false result exit if Dictionary.IsPrivateType (Current_Record, Scope) then Result := True; exit; -- true result exit end if; end loop; return Result; end Has_Intervening_Private_Extensions; -------------------------------------------------------------------- function Is_Null_Agregate (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.ancestor_part or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_ancestor_part; is Next_Node : STree.SyntaxNode; begin Next_Node := STree.Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = record_component_association OR annotation_record_component_association OR RWnull SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.record_component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_record_component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.RWnull, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = record_component_association OR annotation_record_component_association OR " & "RWnull in Is_Null_Agregate"); return STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.RWnull; end Is_Null_Agregate; -------------------------------------------------------------------- function Fields_Need_Defining (Root_Type, Extended_Type : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.GetNumberOfComponents (Extended_Type) > Dictionary.GetNumberOfComponents (Root_Type); end Fields_Need_Defining; -------------------------------------------------------------------- function Association_Is_Named (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.ancestor_part or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_ancestor_part; is Next_Node : STree.SyntaxNode; Result : Boolean; begin Next_Node := STree.Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = record_component_association OR annotation_record_component_association OR RWnull if STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.record_component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_record_component_association then -- ASSUME Next_Node = record_component_association OR annotation_record_component_association Next_Node := STree.Child_Node (Current_Node => Next_Node); -- ASSUME Next_Node = named_record_component_association OR positional_record_component_association OR -- annotation_named_record_component_association OR annotation_positional_record_component_association if STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.named_record_component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_named_record_component_association then Result := True; elsif STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.positional_record_component_association or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_positional_record_component_association then Result := False; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = named_record_component_association OR positional_record_component_association OR " & "annotation_named_record_component_association OR annotation_positional_record_component_association " & "in Association_Is_Named"); end if; elsif STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.RWnull then -- ASSUME Next_Node = RWnull Result := False; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = record_component_association OR annotation_record_component_association OR " & "RWnull in Association_Is_Named"); end if; return Result; end Association_Is_Named; -------------------------------------------------------------------- -- Assume: -- 1. Node is [annotation_]ancestor_part going up -- 2. TOS is result of walking ancestor part expression -- 3. 2nd TOS is result of walking qualifier -- -- Check: -- 1. Qualifier is an extended tagged type -- 2. Ancestor part represents an ancestor of the extended type -- 3. There are no private extensions between the ancestor and the qualifier -- 4. If the aggregate part is null record then there are no new components -- between ancestor part and the qualifier. -------------------------------------------------------------------- begin -- Wf_Ancestor_Part Exp_Stack.Pop (Item => Ancestor_Part, Stack => E_Stack); Exp_Stack.Pop (Item => Qualifying_Expression, Stack => E_Stack); -- seed syntax tree with type of ancestor for use in VCG STree.Add_Node_Symbol (Node => Node, Sym => Ancestor_Part.Type_Symbol); if Qualifying_Expression.Sort = Sem.Is_Type_Mark then -- Correctly formed qualifying expression so carry on with rest of checks Qualifying_Expression.Is_Constant := True; if not Dictionary.TypeIsExtendedTagged (Qualifying_Expression.Type_Symbol) then Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error_Sym (Err_Num => 835, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node))), Sym => Qualifying_Expression.Type_Symbol, Scope => Scope); -- move up so as to prevent walk of rest of illegal aggregate Node := STree.Parent_Node (Current_Node => Node); -- ASSUME Node = extension_aggregate OR annotation_extension_aggregate SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.extension_aggregate or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_extension_aggregate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = extension_aggregate OR annotation_extension_aggregate in Wf_Ancestor_Part"); elsif not Dictionary.IsAnExtensionOf (Ancestor_Part.Type_Symbol, Qualifying_Expression.Type_Symbol) then Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error_Sym (Err_Num => 836, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Sym => Qualifying_Expression.Type_Symbol, Scope => Scope); -- move up so as to prevent walk of rest of illegal aggregate Node := STree.Parent_Node (Current_Node => Node); -- ASSUME Node = extension_aggregate OR annotation_extension_aggregate SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.extension_aggregate or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_extension_aggregate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = extension_aggregate OR annotation_extension_aggregate in Wf_Ancestor_Part"); elsif Has_Intervening_Private_Extensions (Root_Type => Ancestor_Part.Type_Symbol, Extended_Type => Qualifying_Expression.Type_Symbol, Scope => Scope) then Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error_Sym (Err_Num => 837, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Sym => Qualifying_Expression.Type_Symbol, Scope => Scope); -- move up so as to prevent walk of rest of illegal aggregate Node := STree.Parent_Node (Current_Node => Node); -- ASSUME Node = extension_aggregate OR annotation_extension_aggregate SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.extension_aggregate or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_extension_aggregate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = extension_aggregate OR annotation_extension_aggregate in Wf_Ancestor_Part"); elsif Is_Null_Agregate (Node => Node) then if Fields_Need_Defining (Root_Type => Ancestor_Part.Type_Symbol, Extended_Type => Qualifying_Expression.Type_Symbol) then Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error_Sym (Err_Num => 838, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Sym => Qualifying_Expression.Type_Symbol, Scope => Scope); -- move up so as to prevent walk of rest of illegal aggregate Node := STree.Parent_Node (Current_Node => Node); -- ASSUME Node = extension_aggregate OR annotation_extension_aggregate SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.extension_aggregate or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_extension_aggregate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = extension_aggregate OR annotation_extension_aggregate in Wf_Ancestor_Part"); else -- valid null record case, this means aggregate is complete Qualifying_Expression.Sort := Sem.Type_Result; Qualifying_Expression.Is_Constant := True; Qualifying_Expression.Param_Count := 0; Qualifying_Expression.Param_List := Lists.Null_List; Qualifying_Expression.Other_Symbol := Dictionary.NullSymbol; Qualifying_Expression.Is_ARange := False; Qualifying_Expression.Is_Static := False; Exp_Stack.Push (X => Qualifying_Expression, Stack => E_Stack); -- move up to qualified expression node since aggregate is complete Node := STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node)); -- ASSUME Node = qualified_expression OR annotation_qualified_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.qualified_expression or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_qualified_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = qualified_expression OR annotation_qualified_expression in Wf_Ancestor_Part"); end if; else -- Valid non-null case -- Set up either counters, for positional, or check lists, for named association if Association_Is_Named (Node => Node) then Create_Name_List (List => Ptr, Heap_Param => Heap_Param); Qualifying_Expression.Param_List := Ptr; -- to check whether a field is correctly part of the record but NOT already in -- the ancestor part we need access to the ancestor symbol when processing -- the associations. For this reason we put the ancestor symbol into the -- OtherSymbol field of the record representing the aggregate Qualifying_Expression.Other_Symbol := Ancestor_Part.Type_Symbol; Exp_Stack.Push (X => Qualifying_Expression, Stack => E_Stack); -- walk continues at STree.Next_Sibling (Node) - record_component_association else -- positional association -- when we check the expressions we want to match those fields NOT included -- in the ancestor part so we set the field counter thus: Qualifying_Expression.Param_Count := Dictionary.GetNumberOfComponents (Ancestor_Part.Type_Symbol); Exp_Stack.Push (X => Qualifying_Expression, Stack => E_Stack); -- walk continues at STree.Next_Sibling (Node) - record_component_association end if; end if; else -- qualifying prefix is not a type mark Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 95, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node))), Id_Str => LexTokenManager.Null_String); -- move up so as to prevent walk of rest of illegal aggregate Node := STree.Parent_Node (Current_Node => Node); -- ASSUME Node = extension_aggregate OR annotation_extension_aggregate SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.extension_aggregate or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_extension_aggregate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = extension_aggregate OR annotation_extension_aggregate in Wf_Ancestor_Part"); end if; end Wf_Ancestor_Part; spark-2012.0.deb/examiner/maths-parsestring.adb0000644000175000017500000005235311753202336020404 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- This procedure breaks a SPARK literal string down into its constituent parts for -- further processing separate (Maths) procedure ParseString (S : in E_Strings.T; Decimal_Point_Found, Exponent_Found, Base_Found : out Boolean; Base : out Natural; Core_String, Exp_String : out E_Strings.T; Exp_Sign : out Character; Places_After_Point : out E_Strings.Lengths; Legal_Syntax : out Boolean) -- # derives Decimal_Point_Found from S & -- # Exponent_Found from S & -- # Base_Found from S & -- # Base from S & -- # Core_String from S & -- # Exp_String from S & -- # Exp_Sign from S & -- # Places_After_Point from S & -- # Legal_Syntax from S; -- NOTES -- Base_String will be set to "10" if Base_Found = FALSE -- Exp_String is "0" if Exp_Found = FALSE -- Exp_Sign is plus if Exp_Found = FALSE -- Places_Afer_Point is 0 if Decimal_Point_Found = FALSE -- Legal_Syntax only implies that string looks like an Ada literal is End_Indicator : constant Character := Character'Val (0); type Char_Set is array (Character) of Boolean; type Parser_State is ( Initial, Leading_Zero, Leading_Underline, Later_Digits, Base_Start, Based_Part, End_Base, Decimal_Start, Decimal_Part, Exp_Start, Exp_Part, Finished); Syntax_OK : Boolean; State : Parser_State; Acceptable, Legal_Digit : Char_Set; Digits_Read, Places_Count, In_Ptr : E_Strings.Lengths; Ch : Character; Buffer : E_Strings.T; ----------------------------------------------- procedure Caps (Ch : in out Character) --converts Characters a through f to upper case --# derives Ch from *; is begin if (Ch >= 'a') and (Ch <= 'f') then Ch := Character'Val ((Character'Pos (Ch) - Character'Pos ('a')) + Character'Pos ('A')); end if; end Caps; ----------------------------------------------- procedure Store (Ch : in Character) --# global in out Buffer; --# derives Buffer from *, --# Ch; is begin E_Strings.Append_Char (E_Str => Buffer, Ch => Ch); end Store; ----------------------------------------------- function Legal_Underline (OK : Char_Set) return Boolean --# global in In_Ptr; --# in S; is Ch : Character; OK_So_Far : Boolean; begin OK_So_Far := In_Ptr < E_Strings.Get_Length (E_Str => S); if OK_So_Far then Ch := E_Strings.Get_Element (E_Str => S, Pos => In_Ptr + 1); Caps (Ch => Ch); OK_So_Far := OK (Ch); end if; return OK_So_Far; end Legal_Underline; ----------------------------------------------- procedure Calc_Base --# global in Buffer; --# in out Syntax_OK; --# out Base; --# out Legal_Digit; --# derives Base, --# Legal_Digit from Buffer & --# Syntax_OK from *, --# Buffer; is Local_Base, I : Natural; begin if E_Strings.Get_Length (E_Str => Buffer) = 2 then Local_Base := 10 * Natural (CharToDigit (E_Strings.Get_Element (E_Str => Buffer, Pos => 1))) + Natural (CharToDigit (E_Strings.Get_Element (E_Str => Buffer, Pos => 2))); else -- must be 1 Local_Base := Natural (CharToDigit (E_Strings.Get_Element (E_Str => Buffer, Pos => 1))); end if; Base := Local_Base; Legal_Digit := Char_Set'(Character => False); I := 0; while I < Local_Base loop if I <= 9 then Legal_Digit (Character'Val (I + Character'Pos ('0'))) := True; else Legal_Digit (Character'Val ((I + Character'Pos ('A')) - 10)) := True; end if; I := I + 1; end loop; if (Local_Base < 2) or (Local_Base > 16) then Syntax_OK := False; end if; end Calc_Base; ----------------------------------------------- procedure Do_Initial --# global in Ch; --# in out Buffer; --# in out Digits_Read; --# out Acceptable; --# out State; --# derives Acceptable, --# State from Ch & --# Buffer, --# Digits_Read from *, --# Ch; is begin if Ch = '0' then Acceptable := Char_Set'(End_Indicator => True, '0' .. '9' => True, '_' => True, '.' => True, others => False); State := Leading_Zero; else --must be '1'..'9' Store (Ch => Ch); Digits_Read := Digits_Read + 1; Acceptable := Char_Set' (End_Indicator => True, '0' .. '9' => True, '_' => True, '#' => True, '.' => True, 'E' => True, others => False); State := Later_Digits; end if; end Do_Initial; ----------------------------------------------- procedure Do_Leading_Zero --# global in Ch; --# in out Acceptable; --# in out Buffer; --# in out Core_String; --# in out Digits_Read; --# in out State; --# derives Acceptable, --# Buffer, --# Core_String, --# Digits_Read, --# State from *, --# Ch; is begin if Ch = '_' then Acceptable := Char_Set'('0' .. '9' => True, others => False); State := Leading_Underline; elsif Ch = '.' then Acceptable := Char_Set'('0' .. '9' => True, others => False); State := Decimal_Start; elsif (Ch >= '1') and (Ch <= '9') then Store (Ch => Ch); Digits_Read := Digits_Read + 1; Acceptable := Char_Set' (End_Indicator => True, '0' .. '9' => True, '_' => True, '#' => True, '.' => True, 'E' => True, others => False); State := Later_Digits; elsif Ch = End_Indicator then Core_String := E_Strings.Empty_String; E_Strings.Append_Char (E_Str => Core_String, Ch => '0'); State := Finished; --else its another leading zero and state remains unchanged end if; end Do_Leading_Zero; ----------------------------------------------- procedure Do_Leading_Underline --# global in Ch; --# in out Buffer; --# in out Digits_Read; --# out Acceptable; --# out State; --# derives Acceptable, --# State from Ch & --# Buffer, --# Digits_Read from *, --# Ch; is begin if (Ch >= '1') and (Ch <= '9') then Store (Ch => Ch); Digits_Read := Digits_Read + 1; Acceptable := Char_Set' (End_Indicator => True, '0' .. '9' => True, '_' => True, '#' => True, '.' => True, 'E' => True, others => False); State := Later_Digits; else -- must be '0' Acceptable := Char_Set'(End_Indicator => True, '0' .. '9' => True, '_' => True, '.' => True, others => False); State := Leading_Zero; end if; end Do_Leading_Underline; ----------------------------------------------- procedure Do_Later_Digits --# global in Ch; --# in In_Ptr; --# in S; --# in out Acceptable; --# in out Base; --# in out Base_Found; --# in out Buffer; --# in out Core_String; --# in out Digits_Read; --# in out Legal_Digit; --# in out State; --# in out Syntax_OK; --# derives Acceptable, --# Buffer from *, --# Buffer, --# Ch, --# Digits_Read & --# Base, --# Core_String, --# Legal_Digit from *, --# Buffer, --# Ch & --# Base_Found, --# Digits_Read, --# State from *, --# Ch & --# Syntax_OK from *, --# Buffer, --# Ch, --# In_Ptr, --# S; is begin case Ch is when End_Indicator => Core_String := Buffer; State := Finished; when '0' .. '9' => --------------------------------------------------- if Digits_Read < 3 then Digits_Read := Digits_Read + 1; Store (Ch => Ch); else Acceptable ('#') := False; Store (Ch => Ch); end if; when '.' => --------------------------------------------------- Acceptable := Char_Set'('0' .. '9' => True, others => False); State := Decimal_Start; when 'E' => --------------------------------------------------- Core_String := Buffer; Buffer := E_Strings.Empty_String; Acceptable := Char_Set'('+' => True, '0' .. '9' => True, others => False); State := Exp_Start; when '_' => --------------------------------------------------- Syntax_OK := Legal_Underline (OK => Char_Set'('0' .. '9' => True, others => False)); when '#' => --------------------------------------------------- Base_Found := True; Calc_Base; -- also calcs Legal_Digit set Buffer := E_Strings.Empty_String; Acceptable := Legal_Digit; State := Base_Start; when others => --------------------------------------------------- null; -- can't occur end case; end Do_Later_Digits; ----------------------------------------------- procedure Do_Base_Start --# global in Ch; --# in out Acceptable; --# in out Buffer; --# out State; --# derives Acceptable from * & --# Buffer from *, --# Ch & --# State from ; is begin Store (Ch => Ch); --which must be an acceptable digit Acceptable ('#') := True; Acceptable ('_') := True; State := Based_Part; end Do_Base_Start; ----------------------------------------------- procedure Do_Based_Part --# global in Ch; --# in In_Ptr; --# in Legal_Digit; --# in S; --# in out Acceptable; --# in out Buffer; --# in out Core_String; --# in out State; --# in out Syntax_OK; --# derives Acceptable, --# Buffer, --# State from *, --# Ch & --# Core_String from *, --# Buffer, --# Ch & --# Syntax_OK from *, --# Ch, --# In_Ptr, --# Legal_Digit, --# S; is begin case Ch is when '#' => Core_String := Buffer; Buffer := E_Strings.Empty_String; Acceptable := Char_Set'(End_Indicator => True, 'E' => True, others => False); State := End_Base; when '_' => Syntax_OK := Legal_Underline (OK => Legal_Digit); when others => --must be a legal digit Store (Ch => Ch); end case; end Do_Based_Part; ----------------------------------------------- procedure Do_End_Base --# global in Ch; --# in out Acceptable; --# out State; --# derives Acceptable from *, --# Ch & --# State from Ch; is begin if Ch = End_Indicator then State := Finished; else -- must be 'E' Acceptable := Char_Set'('+' => True, '0' .. '9' => True, others => False); State := Exp_Start; end if; end Do_End_Base; ----------------------------------------------- procedure Do_Decimal_Start --# global in Ch; --# in out Buffer; --# in out Places_Count; --# out Acceptable; --# out Decimal_Point_Found; --# out State; --# derives Acceptable, --# Decimal_Point_Found, --# State from & --# Buffer from *, --# Ch & --# Places_Count from *; is begin Decimal_Point_Found := True; Store (Ch => Ch); Places_Count := Places_Count + 1; Acceptable := Char_Set'(End_Indicator => True, '0' .. '9' => True, '_' => True, 'E' => True, others => False); State := Decimal_Part; end Do_Decimal_Start; ----------------------------------------------- procedure Do_Decimal_Part --# global in Ch; --# in In_Ptr; --# in S; --# in out Acceptable; --# in out Buffer; --# in out Core_String; --# in out Places_Count; --# in out State; --# in out Syntax_OK; --# derives Acceptable, --# Buffer, --# Places_Count, --# State from *, --# Ch & --# Core_String from *, --# Buffer, --# Ch & --# Syntax_OK from *, --# Ch, --# In_Ptr, --# S; is begin case Ch is when End_Indicator => Core_String := Buffer; State := Finished; when 'E' => Core_String := Buffer; Buffer := E_Strings.Empty_String; Acceptable := Char_Set'('+' => True, '-' => True, '0' .. '9' => True, others => False); State := Exp_Start; when '_' => Syntax_OK := Legal_Underline (OK => Char_Set'('0' .. '9' => True, others => False)); when others => Store (Ch => Ch); Places_Count := Places_Count + 1; end case; end Do_Decimal_Part; ----------------------------------------------- procedure Do_Exp_Start --# global in Ch; --# in out Buffer; --# out Acceptable; --# out Exponent_Found; --# out Exp_Sign; --# out State; --# derives Acceptable, --# Exp_Sign, --# State from Ch & --# Buffer from *, --# Ch & --# Exponent_Found from ; is begin Exponent_Found := True; case Ch is when '-' | '+' => Exp_Sign := Ch; Acceptable := Char_Set'('0' .. '9' => True, others => False); State := Exp_Part; when others => Exp_Sign := '+'; Store (Ch => Ch); Acceptable := Char_Set'(End_Indicator => True, '0' .. '9' => True, '_' => True, others => False); State := Exp_Part; end case; end Do_Exp_Start; ----------------------------------------------- procedure Do_Exp_Part --# global in Ch; --# in In_Ptr; --# in S; --# in out Acceptable; --# in out Buffer; --# in out Exp_String; --# in out State; --# in out Syntax_OK; --# derives Acceptable, --# Buffer, --# State from *, --# Ch & --# Exp_String from *, --# Buffer, --# Ch & --# Syntax_OK from *, --# Ch, --# In_Ptr, --# S; is begin case Ch is when End_Indicator => Exp_String := Buffer; State := Finished; when '_' => Syntax_OK := Legal_Underline (OK => Char_Set'('0' .. '9' => True, others => False)); when others => -- '0'..'9' Store (Ch => Ch); Acceptable := Char_Set'(End_Indicator => True, '0' .. '9' => True, '_' => True, others => False); end case; end Do_Exp_Part; begin -- Parse_String Acceptable := Char_Set'('0' .. '9' => True, others => False); State := Initial; Syntax_OK := True; Decimal_Point_Found := False; Exponent_Found := False; Exp_Sign := '+'; Base_Found := False; Digits_Read := 0; Places_Count := 0; Base := 10; Core_String := E_Strings.Empty_String; Exp_String := E_Strings.Copy_String (Str => "0"); Buffer := E_Strings.Empty_String; Legal_Digit := Char_Set'(Character => False); In_Ptr := 1; loop exit when not Syntax_OK; -- don't look beyond first error if In_Ptr > E_Strings.Get_Length (E_Str => S) then -- end of string Ch := End_Indicator; else Ch := E_Strings.Get_Element (E_Str => S, Pos => In_Ptr); -- get Character Caps (Ch => Ch); end if; -- check legality of Character against acceptable set if not Acceptable (Ch) then Syntax_OK := False; exit; end if; -- if we get here we have legal Character to deal with; case State is when Initial => Do_Initial; when Leading_Zero => Do_Leading_Zero; when Leading_Underline => Do_Leading_Underline; when Later_Digits => Do_Later_Digits; when Base_Start => Do_Base_Start; when Based_Part => Do_Based_Part; when End_Base => Do_End_Base; when Decimal_Start => Do_Decimal_Start; when Decimal_Part => Do_Decimal_Part; when Exp_Start => Do_Exp_Start; when Exp_Part => Do_Exp_Part; when Finished => null; -- can't be reached because of exit below end case; exit when State = Finished; In_Ptr := In_Ptr + 1; end loop; Places_After_Point := Places_Count; Legal_Syntax := Syntax_OK; end ParseString; spark-2012.0.deb/examiner/metafile.adb0000644000175000017500000007573511753202336016530 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Provides routines to support analysis of "files of files" -- -------------------------------------------------------------------------------- with Ada.Characters.Latin_1; with CommandLineData; with CommandLineHandler; with ErrorHandler; with FileSystem; with ScreenEcho; with SystemErrors; with XMLReport; package body MetaFile is Empty_Meta_File_Element : constant Meta_File_Element := Meta_File_Element'(SPARK_IO.Null_File, E_Strings.Empty_String); Null_Meta_File : constant Meta_Files := Meta_Files'(Vector => Vectors'(Indexes => Empty_Meta_File_Element), Ptr => 0); function Is_Empty (The_Meta_File : Meta_Files) return Boolean is begin return The_Meta_File.Ptr = 0; end Is_Empty; function Is_White_Space (Space_Char : Character) return Boolean is begin return (Space_Char = ' ') or (Space_Char = Ada.Characters.Latin_1.HT) or (Space_Char = Ada.Characters.Latin_1.CR); end Is_White_Space; procedure Push (The_File : in SPARK_IO.File_Type; The_Path : in E_Strings.T; The_Meta_File : in out Meta_Files) --# derives The_Meta_File from *, --# The_File, --# The_Path; is begin if The_Meta_File.Ptr = Ptrs'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Meta_File_Stack_Overflow, Msg => ""); end if; The_Meta_File.Ptr := The_Meta_File.Ptr + 1; The_Meta_File.Vector (The_Meta_File.Ptr) := Meta_File_Element'(The_File, The_Path); end Push; procedure Pop (The_Meta_File : in out Meta_Files; The_File : out SPARK_IO.File_Type; The_Path : out E_Strings.T) --# derives The_File, --# The_Meta_File, --# The_Path from The_Meta_File; --# pre not Is_Empty (The_Meta_File); is begin The_File := The_Meta_File.Vector (The_Meta_File.Ptr).File_Handle; The_Path := The_Meta_File.Vector (The_Meta_File.Ptr).Path_Name; The_Meta_File.Ptr := The_Meta_File.Ptr - 1; end Pop; function Strip_At (S : E_Strings.T) return E_Strings.T is begin return E_Strings.Section (E_Str => S, Start_Pos => 2, Length => E_Strings.Get_Length (E_Str => S) - 1); end Strip_At; function Strip_Leading_Spaces (S : E_Strings.T) return E_Strings.T is Ptr : E_Strings.Positions := 1; begin loop exit when Ptr > E_Strings.Get_Length (E_Str => S); exit when not Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => S, Pos => Ptr)); Ptr := Ptr + 1; end loop; return E_Strings.Section (E_Str => S, Start_Pos => Ptr, Length => (E_Strings.Get_Length (E_Str => S) - Ptr) + 1); end Strip_Leading_Spaces; function Strip_Comments (S : E_Strings.T) return E_Strings.T is Res : E_Strings.T; begin if E_Strings.Get_Element (E_Str => S, Pos => 1) = '-' and then E_Strings.Get_Element (E_Str => S, Pos => 2) = '-' then Res := E_Strings.Empty_String; else Res := S; end if; return Res; end Strip_Comments; function Line_Is_Empty (S : E_Strings.T) return Boolean is begin return E_Strings.Get_Length (E_Str => S) = 0; end Line_Is_Empty; function Get_Metafile_Name (S : E_Strings.T) return E_Strings.T is Res : E_Strings.T := E_Strings.Empty_String; In_Quoted_String : Boolean; begin In_Quoted_String := False; for I in E_Strings.Positions range 1 .. E_Strings.Get_Length (E_Str => S) loop exit when Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => S, Pos => I)) and then not In_Quoted_String; -- allow for quoted strings containing spaces if E_Strings.Get_Element (E_Str => S, Pos => I) = Ada.Characters.Latin_1.Quotation then In_Quoted_String := not In_Quoted_String; else if E_Strings.Get_Element (E_Str => S, Pos => I) /= '@' then E_Strings.Append_Char (E_Str => Res, Ch => E_Strings.Get_Element (E_Str => S, Pos => I)); end if; end if; end loop; FileSystem.Check_Extension (Fn => Res, Ext => E_Strings.Copy_String (Str => CommandLineData.Meta_File_Extension)); return Res; end Get_Metafile_Name; -- Exported Meta File Operations procedure Create (File_Name : in E_Strings.T; The_Meta_File : out Meta_Files) is The_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Status : SPARK_IO.File_Status; Meta_File_Local : Meta_Files := Null_Meta_File; Filename_Local : E_Strings.T; Filename_Full : E_Strings.T; Find_Status : FileSystem.Typ_File_Spec_Status; begin Filename_Local := Strip_At (S => File_Name); E_Strings.Open (File => The_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename_Local, Form_Of_File => "", Status => Status); if Status = SPARK_IO.Ok then --# accept F, 10, Find_Status, "Known to be ineffective, must be true at this point"; FileSystem.Find_Full_File_Name (File_Spec => Filename_Local, File_Status => Find_Status, Full_File_Name => Filename_Full); --# end accept; Push (The_File => The_File, The_Path => Filename_Full, The_Meta_File => Meta_File_Local); else ScreenEcho.New_Line (1); ScreenEcho.Put_String ("Cannot open file "); if CommandLineData.Content.Plain_Output then ScreenEcho.Put_ExaminerLine (FileSystem.Just_File (Fn => Filename_Local, Ext => True)); else ScreenEcho.Put_ExaminerLine (Filename_Local); end if; ScreenEcho.New_Line (1); end if; The_Meta_File := Meta_File_Local; --# accept F, 33, Find_Status, "Known to be ineffective, must be true at this point"; end Create; procedure Next_Name (The_Meta_File : in out Meta_Files; The_Filename : out E_Strings.T; Do_Listing : out Boolean; The_Listing_Name : out E_Strings.T; Do_VCG : out Boolean; File_Found : out Boolean) is The_File : SPARK_IO.File_Type; Tmp_Filename : E_Strings.T; The_Path : E_Strings.T; Current_Line : E_Strings.T; Data_Line_Found : Boolean; Unused, Status : SPARK_IO.File_Status; procedure Parse (Current_Line : in E_Strings.T; The_Filename : out E_Strings.T; Do_Listing : out Boolean; The_Listing_Name : out E_Strings.T; Do_VCG : out Boolean) --# global in CommandLineData.Content; --# derives Do_Listing, --# Do_VCG from Current_Line & --# The_Filename, --# The_Listing_Name from CommandLineData.Content, --# Current_Line; is Switch : E_Strings.T := E_Strings.Empty_String; Ptr : E_Strings.Positions := 1; In_Quoted_String : Boolean; begin The_Filename := E_Strings.Empty_String; Do_Listing := True; The_Listing_Name := E_Strings.Empty_String; Do_VCG := False; In_Quoted_String := False; loop exit when Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)) and then not In_Quoted_String; -- Allow for quoted strings containing spaces if E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr) = Ada.Characters.Latin_1.Quotation then In_Quoted_String := not In_Quoted_String; else E_Strings.Append_Char (E_Str => The_Filename, Ch => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)); end if; exit when Ptr = E_Strings.Get_Length (E_Str => Current_Line); Ptr := Ptr + 1; end loop; FileSystem.Check_Extension (Fn => The_Filename, Ext => CommandLineData.Content.Source_Extension); -- at this point The_Filename has the whole of any file name -- and Ptr either points at a switch character or at the end -- of Current_Line or at a space -- skip leading spaces before possible switch character while Ptr < E_Strings.Get_Length (E_Str => Current_Line) loop -- Find the switch character or the end of Current_Line if Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)) then Ptr := Ptr + 1; loop exit when E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr) = '-' or else Ptr = E_Strings.Get_Length (E_Str => Current_Line); Ptr := Ptr + 1; end loop; end if; -- At this point Ptr either points at a switch character or at the end of Current_Line if E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr) = '-' then Ptr := Ptr + 1; loop exit when Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)) or else E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr) = '='; E_Strings.Append_Char (E_Str => Switch, Ch => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)); exit when Ptr = E_Strings.Get_Length (E_Str => Current_Line); Ptr := Ptr + 1; end loop; -- At this point we have any command line argument in variable 'switch' if CommandLineHandler.Check_Option_Name (Opt_Name => Switch, Str => CommandLineData.Option_No_Listing_File) then Do_Listing := False; end if; if CommandLineHandler.Check_Option_Name (Opt_Name => Switch, Str => CommandLineData.Option_Listing_File) then -- the user has given a specific name to the listing file -- first skip the '=' and any leading spaces loop exit when (not Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr))) and then E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr) /= '='; exit when Ptr = E_Strings.Get_Length (E_Str => Current_Line); Ptr := Ptr + 1; end loop; -- we are either at the end of the line (error no file name provided) -- at the start of a comment (error no file name provided) -- at the start of the listing file name if Ptr < E_Strings.Get_Length (E_Str => Current_Line) and then E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr) /= '-' then In_Quoted_String := False; loop exit when Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)) and then not In_Quoted_String; -- allow for quoted strings containing spaces if E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr) = Ada.Characters.Latin_1.Quotation then In_Quoted_String := not In_Quoted_String; else E_Strings.Append_Char (E_Str => The_Listing_Name, Ch => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)); end if; exit when Ptr = E_Strings.Get_Length (E_Str => Current_Line); Ptr := Ptr + 1; end loop; FileSystem.Check_Listing_Extension (Source_Name => The_Filename, Fn => The_Listing_Name, Ext => CommandLineData.Content.Listing_Extension); end if; end if; if CommandLineHandler.Check_Option_Name (Opt_Name => Switch, Str => CommandLineData.Option_Vcg) then Do_VCG := True; end if; end if; end loop; if E_Strings.Is_Empty (E_Str => The_Listing_Name) then -- no switch found so listing is on and listing name is default listing name The_Listing_Name := FileSystem.Just_File (Fn => The_Filename, Ext => False); FileSystem.Check_Listing_Extension (Source_Name => The_Filename, Fn => The_Listing_Name, Ext => CommandLineData.Content.Listing_Extension); end if; end Parse; begin -- Next_Name The_Filename := E_Strings.Empty_String; Do_Listing := False; The_Listing_Name := E_Strings.Empty_String; Do_VCG := False; File_Found := False; loop exit when Is_Empty (The_Meta_File => The_Meta_File); -- fail exit Pop (The_Meta_File => The_Meta_File, The_File => The_File, The_Path => The_Path); loop -- look for non-empty line in current file Data_Line_Found := False; if SPARK_IO.End_Of_File (The_File) then --# accept Flow, 10, Unused, "Expected ineffective assignment to unused" & --# Flow, 10, The_File, "Expected ineffective assignment to The_File"; SPARK_IO.Close (The_File, Unused); --# end accept; exit; end if; E_Strings.Get_Line (File => The_File, E_Str => Current_Line); -- to get Current_Line := Strip_Comments (S => Strip_Leading_Spaces (S => Current_Line)); if not Line_Is_Empty (S => Current_Line) then Data_Line_Found := True; Push (The_File => The_File, The_Path => The_Path, The_Meta_File => The_Meta_File); -- put file back ready for next --call if E_Strings.Get_Element (E_Str => Current_Line, Pos => 1) = '@' then Current_Line := Get_Metafile_Name (S => Current_Line); -- Interpret this FileSpec relative to the current -- metafile's location Current_Line := FileSystem.Interpret_Relative (File_Name => Current_Line, Relative_To_Directory => The_Path); E_Strings.Open (File => The_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Current_Line, Form_Of_File => "", Status => Status); if Status = SPARK_IO.Ok then Push (The_File => The_File, The_Path => Current_Line, The_Meta_File => The_Meta_File); elsif Status = SPARK_IO.Use_Error then -- for GNAT we get Use_Error if the file is already open; this means recursive -- meta files have been detected. ScreenEcho.New_Line (1); ScreenEcho.Put_String ("Circular reference found to file "); --# accept Flow, 41, "Expect stable expression"; if CommandLineData.Content.Plain_Output then --# end accept; ScreenEcho.Put_ExaminerLine (FileSystem.Just_File (Fn => Current_Line, Ext => True)); else ScreenEcho.Put_ExaminerLine (Current_Line); end if; ScreenEcho.New_Line (1); else ScreenEcho.New_Line (1); ScreenEcho.Put_String ("Cannot open file "); ErrorHandler.Set_File_Open_Error; --# accept Flow, 41, "Expect stable expression"; if CommandLineData.Content.Plain_Output then --# end accept; ScreenEcho.Put_ExaminerLine (FileSystem.Just_File (Fn => Current_Line, Ext => True)); else ScreenEcho.Put_ExaminerLine (Current_Line); end if; ScreenEcho.New_Line (1); end if; else -- ordinary file found File_Found := True; Parse (Current_Line => Current_Line, The_Filename => Tmp_Filename, Do_Listing => Do_Listing, The_Listing_Name => The_Listing_Name, Do_VCG => Do_VCG); -- Interpret this FileSpec relative to the current -- metafile's location The_Filename := FileSystem.Interpret_Relative (File_Name => Tmp_Filename, Relative_To_Directory => The_Path); end if; end if; exit when Data_Line_Found; end loop; -- looping through file lines for a non-blank and valid one exit when File_Found; -- success exit end loop; -- processing stacked metafile entries --# accept Flow, 33, Unused, "Expected unused to be neither referenced or exported"; end Next_Name; procedure Report_File_Content (To_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Meta_File_Used : in out Boolean) is The_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Status, Unused : SPARK_IO.File_Status; Data_Line_Found : Boolean; The_Meta_File : Meta_Files := Null_Meta_File; Current_Line, Filename_Local, The_Path : E_Strings.T; Filename_Full : E_Strings.T; Ptr : E_Strings.Positions; In_Quoted_String : Boolean; Find_Status : FileSystem.Typ_File_Spec_Status; Margin : Natural := 3; Offset : constant Natural := 3; procedure Print_Filename (To_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Margin : in Natural) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Filename, --# Margin, --# To_File; is begin for I in Natural range 1 .. Margin loop SPARK_IO.Put_Char (To_File, ' '); end loop; if CommandLineData.Content.Plain_Output then E_Strings.Put_Line (File => To_File, E_Str => FileSystem.Just_File (Fn => Filename, Ext => True)); else E_Strings.Put_Line (File => To_File, E_Str => Filename); end if; end Print_Filename; procedure Inc_Margin --# global in out Margin; --# derives Margin from *; is begin Margin := Margin + Offset; end Inc_Margin; procedure Dec_Margin --# global in out Margin; --# derives Margin from *; is begin Margin := Margin - Offset; end Dec_Margin; begin -- Report_File_Content Filename_Local := Strip_At (S => Filename); E_Strings.Open (File => The_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename_Local, Form_Of_File => "", Status => Status); if Status = SPARK_IO.Ok then if not Meta_File_Used then Meta_File_Used := True; if not CommandLineData.Content.XML then -- Expect stable expression SPARK_IO.New_Line (To_File, 1); SPARK_IO.Put_Line (To_File, "Meta File(s) used were:", 0); end if; end if; if CommandLineData.Content.XML then -- Expect stable expression XMLReport.Start_Meta_File (Name => Filename_Local, Report => To_File); else Print_Filename (To_File => To_File, Filename => Filename_Local, Margin => Margin); Inc_Margin; end if; --# accept F, 10, Find_Status, "Known to be ineffective, must be true at this point"; FileSystem.Find_Full_File_Name (File_Spec => Filename_Local, File_Status => Find_Status, Full_File_Name => Filename_Full); --# end accept; Push (The_File => The_File, The_Path => Filename_Full, The_Meta_File => The_Meta_File); end if; loop exit when Is_Empty (The_Meta_File => The_Meta_File); Pop (The_Meta_File => The_Meta_File, The_File => The_File, The_Path => The_Path); loop -- look for non-empty line in current file Data_Line_Found := False; if SPARK_IO.End_Of_File (The_File) then --# accept Flow, 10, The_File, "Expected ineffective assignment to The_File" & --# Flow, 10, Unused, "Expected ineffective assignment to unused"; SPARK_IO.Close (The_File, Unused); --# end accept; --# accept Flow, 41, "Expect stable expression"; if CommandLineData.Content.XML then --# end accept; XMLReport.End_Meta_File (Report => To_File); else Dec_Margin; end if; exit; end if; E_Strings.Get_Line (File => The_File, E_Str => Current_Line); -- to get Current_Line := Strip_Comments (S => Strip_Leading_Spaces (S => Current_Line)); if not Line_Is_Empty (S => Current_Line) then Data_Line_Found := True; Push (The_File => The_File, The_Path => The_Path, The_Meta_File => The_Meta_File); -- put file back ready for next --call if E_Strings.Get_Element (E_Str => Current_Line, Pos => 1) = '@' then Current_Line := Get_Metafile_Name (S => Current_Line); Current_Line := FileSystem.Interpret_Relative (File_Name => Current_Line, Relative_To_Directory => The_Path); E_Strings.Open (File => The_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Current_Line, Form_Of_File => "", Status => Status); if Status = SPARK_IO.Ok then Push (The_File => The_File, The_Path => Current_Line, The_Meta_File => The_Meta_File); --# accept Flow, 41, "Expect stable expression"; if CommandLineData.Content.Plain_Output then --# end accept; Current_Line := FileSystem.Just_File (Fn => Current_Line, Ext => True); end if; --# accept Flow, 41, "Expect stable expression"; if CommandLineData.Content.XML then --# end accept; XMLReport.Start_Meta_File (Name => Current_Line, Report => To_File); else Print_Filename (To_File => To_File, Filename => Current_Line, Margin => Margin); Inc_Margin; end if; end if; else -- ordinary file found Filename_Local := E_Strings.Empty_String; Ptr := 1; In_Quoted_String := False; loop exit when Is_White_Space (Space_Char => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)) and then not In_Quoted_String; -- allow for quoted strings containing spaces if E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr) = Ada.Characters.Latin_1.Quotation then In_Quoted_String := not In_Quoted_String; else E_Strings.Append_Char (E_Str => Filename_Local, Ch => E_Strings.Get_Element (E_Str => Current_Line, Pos => Ptr)); end if; exit when Ptr = E_Strings.Get_Length (E_Str => Current_Line); Ptr := Ptr + 1; end loop; --# accept Flow, 41, "Expect stable expression"; if CommandLineData.Content.Plain_Output then --# end accept; Filename_Local := FileSystem.Just_File (Fn => Filename_Local, Ext => True); else Filename_Local := FileSystem.Interpret_Relative (File_Name => Filename_Local, Relative_To_Directory => The_Path); end if; --# accept Flow, 41, "Expect stable expression"; if CommandLineData.Content.XML then --# end accept; XMLReport.Filename (Plain_Output => CommandLineData.Content.Plain_Output, File => Filename_Local); E_Strings.Put_String (File => To_File, E_Str => Filename_Local); else Print_Filename (To_File => To_File, Filename => Filename_Local, Margin => Margin); end if; end if; end if; exit when Data_Line_Found; end loop; -- looping through file lines for a non-blank and valid one end loop; --# accept Flow, 33, Unused, "Expected unused to be neither referenced or exported" & --# Flow, 33, Find_Status, "Expected Find_Status to be neither referenced or exported"; end Report_File_Content; end MetaFile; spark-2012.0.deb/examiner/systemerrors.adb0000644000175000017500000004364411753202336017515 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with Fatal; with ScreenEcho; with Version; package body SystemErrors is type Error_Kinds is (Static_Limit, Operating_System_Limit, Internal_Error); type Sys_Err_To_Err_Kind_Table is array (Sys_Err_Type) of Error_Kinds; SETEK : constant Sys_Err_To_Err_Kind_Table := Sys_Err_To_Err_Kind_Table' (String_Table_Overflow => Static_Limit, Syntax_Tree_Overflow => Static_Limit, Parse_Stack_Overflow => Static_Limit, Symbol_Table_Overflow_Dynamic => Operating_System_Limit, Invalid_Syntax_Tree => Internal_Error, Invalid_Symbol_Table => Internal_Error, Empty_Heap => Static_Limit, Relation_Stack_Overflow => Static_Limit, Relation_Stack_Underflow => Internal_Error, Invalid_Init => Internal_Error, Error_Position_Wrong => Internal_Error, Expression_Stack_Corrupt => Internal_Error, Expression_Stack_Underflow => Internal_Error, Expression_Stack_Overflow => Static_Limit, Type_Context_Stack_Corrupt => Internal_Error, Type_Context_Stack_Underflow => Internal_Error, Type_Context_Stack_Overflow => Static_Limit, List_Overflow_In_Expression => Static_Limit, List_Overflow_In_Dependency_Clause => Static_Limit, List_Overflow_In_Procedure_Call => Static_Limit, Case_Stack_Underflow => Internal_Error, Case_Stack_Overflow => Static_Limit, VCG_Graph_Size_Exceeded => Static_Limit, VCG_Heap_Is_Exhausted => Static_Limit, VCG_Heap_Is_Corrupted => Internal_Error, Ref_List_Key_Cell_Missing => Internal_Error, Flow_Analyser_Expression_Limit => Static_Limit, Case_Statement_Nesting_Limit => Static_Limit, Error_Handler_Temporary_Files => Operating_System_Limit, Error_Handler_Source => Operating_System_Limit, Disk_Full_Error => Operating_System_Limit, Math_Error => Internal_Error, Too_Many_Nested_Arrays => Static_Limit, Too_Many_Nested_Records => Static_Limit, Context_Unit_Stack_Overflow => Static_Limit, Context_Unit_Stack_Underflow => Internal_Error, Context_File_Heap_Overflow => Static_Limit, Context_Unit_Heap_Overflow => Static_Limit, Too_Many_File_Lines => Static_Limit, Index_Stack_Full => Static_Limit, Index_Component_List_Full => Static_Limit, Too_Many_Errors => Static_Limit, Warning_Name_Too_Long => Static_Limit, Unit_Name_In_Index_Too_Long => Static_Limit, File_Name_In_Index_Too_Long => Static_Limit, Too_Many_Suppressed_Warnings => Static_Limit, Unit_Nesting_Too_Deep => Static_Limit, Statement_Stack_Underflow => Internal_Error, Statement_Stack_Overflow => Static_Limit, Wf_Compilation_Unit_Stack_Overflow => Static_Limit, Wf_Compilation_Unit_Stack_Underflow => Internal_Error, Too_Many_Flow_Analyser_Expressions => Static_Limit, Too_Many_Params_In_Procedure_Call => Static_Limit, Statistics_Usage_Greater_Than_Table_Size => Static_Limit, Aggregate_Stack_Under_Flow => Internal_Error, Aggregate_Stack_Over_Flow => Static_Limit, Meta_File_Stack_Overflow => Static_Limit, Lex_Stack_Overflow => Static_Limit, Lex_Stack_Underflow => Internal_Error, Component_Manager_Overflow => Static_Limit, Component_Error_Overflow => Static_Limit, Syntax_Tree_Walk_Error => Internal_Error, Precondition_Failure => Internal_Error, Postcondition_Failure => Internal_Error, Assertion_Failure => Internal_Error, Unimplemented_Feature => Internal_Error, XML_Schema_Error => Internal_Error, XML_Generation_Error => Internal_Error, Illegal_XML_Generation_Attempt => Internal_Error, String_Over_Flow => Internal_Error, Queue_Overflow => Static_Limit, XRef_Table_Full => Operating_System_Limit, Invalid_Index => Internal_Error, Other_Internal_Error => Internal_Error); procedure Stop_Program (Error_Kind : in Error_Kinds) --# derives null from Error_Kind; --# post False; -- does not terminate normally is --# hide Stop_Program; begin case Error_Kind is when Static_Limit => raise Fatal.Static_Limit; when Operating_System_Limit => raise Fatal.Operating_System_Limit; when Internal_Error => raise Fatal.Internal_Error; end case; end Stop_Program; procedure Display_Cause (Error_Kind : in Error_Kinds) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Error_Kind; is begin case Error_Kind is when Static_Limit => ScreenEcho.Put_Line ("* Internal static tool limit reached"); when Operating_System_Limit => ScreenEcho.Put_Line ("* Operating system limit reached"); when Internal_Error => ScreenEcho.Put_Line ("* Unexpected internal error"); end case; end Display_Cause; procedure Display_Box (Sys_Err : in Sys_Err_Type; Msg : in String) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Msg, --# Sys_Err; is begin -- print 'big box' on screen to draw attention to error ScreenEcho.Put_Line ("*****************************************************************************"); ScreenEcho.Put_Line ("* A fatal error has occurred"); ScreenEcho.Put_String ("* "); case Sys_Err is when String_Table_Overflow => ScreenEcho.Put_Line ("String table overflow"); when Syntax_Tree_Overflow => ScreenEcho.Put_Line ("Syntax tree overflow"); when Parse_Stack_Overflow => ScreenEcho.Put_Line ("Parse stack overflow"); when Symbol_Table_Overflow_Dynamic => ScreenEcho.Put_Line ("Symbol table allocation overflow"); when Invalid_Syntax_Tree => ScreenEcho.Put_Line ("Invalid syntax tree"); when Invalid_Symbol_Table => ScreenEcho.Put_Line ("Internal Symbol Table Error"); when Empty_Heap => ScreenEcho.Put_Line ("Empty heap"); when Relation_Stack_Overflow => ScreenEcho.Put_Line ("Relation stack overflow"); when Relation_Stack_Underflow => ScreenEcho.Put_Line ("Relation stack underflow"); when Invalid_Init => ScreenEcho.Put_Line ("Failure in initialisation"); when Error_Position_Wrong => ScreenEcho.Put_Line ("An error is incorrectly positioned"); when Expression_Stack_Corrupt => ScreenEcho.Put_Line ("The expression stack is corrupt"); when Expression_Stack_Underflow => ScreenEcho.Put_Line ("Expression stack underflow"); when Expression_Stack_Overflow => ScreenEcho.Put_Line ("Expression stack overflow"); when Type_Context_Stack_Corrupt => ScreenEcho.Put_Line ("The type context stack is corrupt"); when Type_Context_Stack_Underflow => ScreenEcho.Put_Line ("Type context stack underflow"); when Type_Context_Stack_Overflow => ScreenEcho.Put_Line ("Type context stack overflow"); when List_Overflow_In_Expression => ScreenEcho.Put_Line ("List overflow in expression"); when List_Overflow_In_Dependency_Clause => ScreenEcho.Put_Line ("List overflow in dependency clause"); when List_Overflow_In_Procedure_Call => ScreenEcho.Put_Line ("List overflow in procedure call"); when Case_Stack_Underflow => ScreenEcho.Put_Line ("Case statement stack underflow"); when Case_Stack_Overflow => ScreenEcho.Put_Line ("Case statement stack overflow"); when VCG_Graph_Size_Exceeded => ScreenEcho.Put_Line ("Maximum graph size in VC Generator exceeded"); when VCG_Heap_Is_Exhausted => ScreenEcho.Put_Line ("VC Generator Heap is Exhausted"); when VCG_Heap_Is_Corrupted => ScreenEcho.Put_Line ("VC Generator Heap is Corrupted"); when Ref_List_Key_Cell_Missing => ScreenEcho.Put_Line ("Referenced Variable List Error"); when Flow_Analyser_Expression_Limit => ScreenEcho.Put_Line ("Flow analyser expression limit reached"); when Case_Statement_Nesting_Limit => ScreenEcho.Put_Line ("Case statement nesting limit reached"); when Error_Handler_Temporary_Files => ScreenEcho.Put_Line ("Unable to open temporary file in ErrorHandler"); when Error_Handler_Source => ScreenEcho.Put_Line ("Unable to open source file in ErrorHandler"); when Disk_Full_Error => ScreenEcho.Put_Line ("File write operation failed, disk is full"); when Math_Error => ScreenEcho.Put_Line ("Internal error in static expression evaluator"); when Too_Many_Nested_Arrays => ScreenEcho.Put_Line ("Array constant nested too deeply"); when Too_Many_Nested_Records => ScreenEcho.Put_Line ("Record constant nested too deeply"); when Context_Unit_Stack_Overflow => ScreenEcho.Put_Line ("Too many pending units in context manager"); when Context_Unit_Stack_Underflow => ScreenEcho.Put_Line ("Internal error in context manager: stack underflow"); when Context_File_Heap_Overflow => ScreenEcho.Put_Line ("Too many files in examination"); when Context_Unit_Heap_Overflow => ScreenEcho.Put_Line ("Too many units in examination"); when Too_Many_File_Lines => ScreenEcho.Put_Line ("Too many lines in source file"); when Index_Stack_Full => ScreenEcho.Put_Line ("Index files too deeply nested"); when Index_Component_List_Full => ScreenEcho.Put_Line ("Too many components in index file entry"); when Too_Many_Errors => ScreenEcho.Put_Line ("Too many errors in a single file"); when Warning_Name_Too_Long => ScreenEcho.Put_Line ("Line too long in warning control file"); when Unit_Name_In_Index_Too_Long => ScreenEcho.Put_Line ("Unit name too long in index file"); when File_Name_In_Index_Too_Long => ScreenEcho.Put_Line ("File name too long in index file"); when Too_Many_Suppressed_Warnings => ScreenEcho.Put_Line ("Too many suppressed warnings for a single file"); when Unit_Nesting_Too_Deep => ScreenEcho.Put_Line ("Units too deeply nested"); when Statement_Stack_Underflow => ScreenEcho.Put_Line ("VCG statement stack underflow"); when Statement_Stack_Overflow => ScreenEcho.Put_Line ("VCG statement stack overflow"); when Wf_Compilation_Unit_Stack_Overflow => ScreenEcho.Put_Line ("Well-formation checker error: compilation unit stack overflow"); when Wf_Compilation_Unit_Stack_Underflow => ScreenEcho.Put_Line ("Internal error in well-formation checker: compilation unit stack underflow"); when Too_Many_Flow_Analyser_Expressions => ScreenEcho.Put_Line ("Too many expressions in flow analyser"); when Too_Many_Params_In_Procedure_Call => ScreenEcho.Put_Line ("Too many parameters in procedure call"); when Statistics_Usage_Greater_Than_Table_Size => ScreenEcho.Put_Line ("Reported table usage larger than table size"); when Aggregate_Stack_Under_Flow => ScreenEcho.Put_Line ("Aggregate stack underflow"); when Aggregate_Stack_Over_Flow => ScreenEcho.Put_Line ("Aggregate stack overflow"); when Meta_File_Stack_Overflow => ScreenEcho.Put_Line ("Stack overflow while processing meta file"); when Lex_Stack_Overflow => ScreenEcho.Put_Line ("Stack overflow in LexTokenStacks"); when Lex_Stack_Underflow => ScreenEcho.Put_Line ("Stack under flow in LexTokenStacks"); when Component_Manager_Overflow => ScreenEcho.Put_Line ("Record component manager overflow"); when Component_Error_Overflow => ScreenEcho.Put_Line ("Record component error-manager overflow"); when Syntax_Tree_Walk_Error => ScreenEcho.Put_Line ("Syntax tree walk error"); when Precondition_Failure => ScreenEcho.Put_Line ("Precondition failure"); when Postcondition_Failure => ScreenEcho.Put_Line ("Postcondition failure"); when Assertion_Failure => ScreenEcho.Put_Line ("Run-time assertion failure"); when Unimplemented_Feature => ScreenEcho.Put_Line ("Use of an unimplemented SPARK language construct or Examiner feature"); when XML_Schema_Error => ScreenEcho.Put_Line ("Error initialising schema"); when XML_Generation_Error => ScreenEcho.Put_Line ("Internal failure of the XML report generator"); when Illegal_XML_Generation_Attempt => ScreenEcho.Put_Line ("The Examiner attempted to generate invalid XML"); when String_Over_Flow => ScreenEcho.Put_Line ("String operation overflowed"); when Queue_Overflow => ScreenEcho.Put_Line ("Queue operation overflowed"); when XRef_Table_Full => ScreenEcho.Put_Line ("Cross-references table full"); when Invalid_Index => ScreenEcho.Put_Line ("Invalid index into container"); -- Add additional errors here... when Other_Internal_Error => ScreenEcho.Put_Line ("Other internal error"); end case; if Msg /= "" then ScreenEcho.Put_String ("* "); ScreenEcho.Put_Line (Msg); end if; end Display_Box; procedure Fatal_Error (Sys_Err : in Sys_Err_Type; Msg : in String) is --# hide Fatal_Error; Error_Kind : Error_Kinds; begin Error_Kind := SETEK (Sys_Err); case Sys_Err is when VCG_Graph_Size_Exceeded | VCG_Heap_Is_Exhausted => -- Following SEPR 2272, these are both caught and handled using -- semantic warning 409 in VCG.Generate_VCs_Local -- and no longer terminate the Examiner. -- Therefore, no need for a display box here unless -debug -- is active. if CommandLineData.Content.Debug.Enabled then Display_Box (Sys_Err, Msg); Display_Cause (Error_Kind); end if; when others => Display_Box (Sys_Err, Msg); Display_Cause (Error_Kind); end case; Stop_Program (Error_Kind); end Fatal_Error; procedure RT_Assert (C : in Boolean; Sys_Err : in Sys_Err_Type; Msg : in String) is --# hide RT_Assert; begin if not C then Fatal_Error (Sys_Err, Msg); end if; end RT_Assert; procedure RT_Warning (C : in Boolean; Msg : in String) is --# hide RT_Warning; begin if not C then -- If would be great to put out a proper source file/positiom here, -- but that would need a circular inherit with LexTokenManager, so not -- possible. We might also be in a position where this is no -- source position to report. -- -- BUT..report "examiner.adb:1:1: " here to make sure that the GPS -- or GNATBench pick it up. ScreenEcho.Put_String ("examiner.adb:1:1: Internal warning. "); ScreenEcho.Put_String (Msg); ScreenEcho.Put_String (". Validity of analysis is not affected, but please report this "); ScreenEcho.Put_String ("matter via "); ScreenEcho.Put_Line (Version.Toolset_Support_Line2); end if; end RT_Warning; end SystemErrors; spark-2012.0.deb/examiner/sem-needs_synthetic_dependency.adb0000644000175000017500000000267011753202336023076 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function Needs_Synthetic_Dependency (Proc_Task_Or_Entry : Dictionary.Symbol) return Boolean is begin return CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then (CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow or else (CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow and then not Dictionary.GetHasDerivesAnnotation (Proc_Task_Or_Entry))); end Needs_Synthetic_Dependency; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-unconddependency.adb0000644000175000017500000001717011753202336027156 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure UncondDependency (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is Err_Type : ErrorHandler.Full_Depend_Err_Type; procedure UncondDependencyExpl (E_Str : in out E_Strings.T) --# global in Err_Type; --# derives E_Str from *, --# Err_Type; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Type; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Type, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation UncondDependencyExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin Err_Type := ErrorHandler.Dependency_Err_Type'Val (Err_Num.ErrorNum - Error_Types.UncondDependencyErrorOffset); case Err_Type is -- HTML Directives --! <"flow-"> --! <"!!! Flow Error : "><" : "> when ErrorHandler.Not_Used_New => --! 50 Append_Export_Var (E_Str, Err_Num.Name2, Err_Num.Scope, True); E_Strings.Append_String (E_Str => E_Str, Str => " is not derived from the imported value(s) of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The item before "is not derived ..." is an export or function return value and the item(s) --! after are imports of the subprogram. The message indicates that a dependency, stated in the dependency --! relation (derives annotation) or implied by the function signature is not present in the code. --! The absence of a stated dependency --! is always an error in either code or annotation. when ErrorHandler.Not_Used => --! 50 E_Strings.Append_String (E_Str => E_Str, Str => "The imported value of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not used in the derivation of "); Append_Export_Var (E_Str, Err_Num.Name2, Err_Num.Scope, False); --! The variable XXX, which appears in the dependency relation of a --! procedure subprogram, as an import from which the export YYY is derived, --! is not used in the code for that purpose. YYY may be a function return value. --! This version of the message has been retained for backward compatibility. when ErrorHandler.Not_Used_Continue => E_Strings.Append_String (E_Str => E_Str, Str => ", "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when ErrorHandler.Ineff_Init => --! 53 E_Strings.Append_String (E_Str => E_Str, Str => "The package initialization of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is ineffective"); --! Here XXX is an own variable of a package, initialized in the package --! initialization. --! The message states that XXX is updated elsewhere, before being read. when ErrorHandler.Ineff_Local_Init => --! 54 E_Strings.Append_String (E_Str => E_Str, Str => "The initialization at declaration of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is ineffective"); --! Issued if the value assigned to a variable at declaration cannot --! affect the final value of any exported variable of the subprogram in --! which it occurs because, for example, it is overwritten before it is used. when ErrorHandler.Policy_Violation => --! 57 E_Strings.Append_String (E_Str => E_Str, Str => "Information flow from "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " to "); Append_Export_Var (E_Str, Err_Num.Name2, Err_Num.Scope, True); E_Strings.Append_String (E_Str => E_Str, Str => " violates the selected information flow policy"); --! Issued if safety or security policy checking is enabled and the specified --! dependency relation contains a relationship in which the flow of information --! from state or input to state or output violates the selected policy. when others => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO Uncond_Dependency"); end case; Append_Explanation; if Err_Type /= ErrorHandler.Not_Used_Continue and Err_Type /= ErrorHandler.Not_Used_New then E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; end UncondDependency; spark-2012.0.deb/examiner/configfile.adb0000644000175000017500000100762311753202335017036 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Handling; with Casing; with CommandLineData; with ContextManager; with Dictionary; with ErrorHandler; with Error_Types; with ExaminerConstants; with E_Strings; with FileSystem; with LexTokenLists; with LexTokenManager; with Maths; with ScreenEcho; with SparkLex; with SP_Expected_Symbols; with SP_Symbols; with XMLReport; use type CommandLineData.Language_Profiles; use type ErrorHandler.Error_Level; use type LexTokenManager.Str_Comp_Result; use type Maths.ErrorCode; use type Maths.Value; use type SP_Symbols.SP_Symbol; package body ConfigFile --# own State is The_Error_Context; is The_Error_Context : ErrorHandler.Error_Contexts := ErrorHandler.Null_Error_Context; procedure Read_Config_File (Opened_OK : out Boolean; No_Errors : out Boolean) --# global in CommandLineData.Content; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# out The_Error_Context; --# derives Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# No_Errors, --# SPARK_IO.File_Sys, --# The_Error_Context from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Opened_OK from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys & --# SparkLex.Curr_Line from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is File_Opened_OK : Boolean; File_Read_OK : Boolean; Local_Config_File : SPARK_IO.File_Type; Saved_Error_Context : ErrorHandler.Error_Contexts; procedure Open_File (The_File : out SPARK_IO.File_Type; File_Ok : out Boolean) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives File_Ok, --# SPARK_IO.File_Sys, --# The_File from CommandLineData.Content, --# SPARK_IO.File_Sys; is File_Name : E_Strings.T; File_Spec_Status : FileSystem.Typ_File_Spec_Status; File_Status : SPARK_IO.File_Status; Local_File : SPARK_IO.File_Type := SPARK_IO.Null_File; begin --# accept F, 10, File_Spec_Status, "File_Spec_Status unused here"; FileSystem.Find_Full_File_Name (File_Spec => CommandLineData.Content.Target_Config_File, File_Status => File_Spec_Status, Full_File_Name => File_Name); --# end accept; E_Strings.Open (File => Local_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => File_Name, Form_Of_File => "", Status => File_Status); if File_Status = SPARK_IO.Ok then File_Ok := True; else File_Ok := False; ScreenEcho.Put_String ("Cannot open file "); if CommandLineData.Content.Plain_Output then ScreenEcho.Put_ExaminerLine (FileSystem.Just_File (Fn => File_Name, Ext => True)); else ScreenEcho.Put_ExaminerLine (File_Name); end if; end if; The_File := Local_File; --# accept F, 33, File_Spec_Status, "File_Spec_Status unused here"; end Open_File; procedure Close_File (The_File : in out SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# The_File from *, --# The_File; is File_Status : SPARK_IO.File_Status; begin --# accept F, 10, File_Status, "File_Status unused here"; SPARK_IO.Close (The_File, File_Status); --# accept F, 33, File_Status, "File_Status unused here"; end Close_File; procedure Process_Config_File (The_Config_File : in SPARK_IO.File_Type; Overall_Status : out Boolean) --# global in CommandLineData.Content; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# out SparkLex.Curr_Line; --# derives Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Overall_Status, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Config_File; is type Declaration_Type is ( Dec_Named_Integer, Dec_Named_Real, Dec_Integer_Subtype, Dec_Integer_Type, Dec_Floating_Point_Type, Dec_Private, Dec_Typed_Constant); -- Depth of child package hierarchy that we support type Child_Package_Depth is range 1 .. 2; --# assert Child_Package_Depth'Base is Short_Short_Integer; -- for GNAT type Package_Hierarchy is array (Child_Package_Depth) of LexTokenManager.Lex_Value; type Declaration is record My_Type : Declaration_Type; Enc_Package : Package_Hierarchy; Name : LexTokenManager.Lex_Value; Base_Type : LexTokenManager.Lex_Value; Low_Bound : Maths.Value; High_Bound : Maths.Value; Num_Digits : Maths.Value; -- The value of a declartion can be numeric or an identifier. -- One of the following two fields is well-defined, depending on the -- value of My_Type Value : Maths.Value; Value_Str : LexTokenManager.Lex_Value; end record; type Possible_Identifier_Range is range 1 .. 19; --# assert Possible_Identifier_Range'Base is Short_Short_Integer; -- for GNAT type Identifier_Strings is array (Possible_Identifier_Range) of LexTokenLists.Lists; -- acceptable packages type Library_Package_Range is range 1 .. 3; --# assert Library_Package_Range'Base is Short_Short_Integer; -- for GNAT type Child_Package_Range is range 1 .. 2; --# assert Child_Package_Range'Base is Short_Short_Integer; -- for GNAT type Library_Package_Strings is array (Library_Package_Range) of LexTokenManager.Lex_String; type Child_Package_Strings is array (Child_Package_Range) of LexTokenManager.Lex_String; type Syntax_Error_Types is ( Exp_Symbol, Exp_Defn, Exp_Digits_Range, Exp_Minus_Real, Exp_Minus_Integer, Exp_Integer_Real); type Syntax_Errors is record The_Type : Syntax_Error_Types; The_Symbol : SP_Symbols.SP_Terminal; Expected_Symbol : SP_Symbols.SP_Terminal; The_Lex_Val : LexTokenManager.Lex_Value; end record; Package_System : constant Library_Package_Range := 2; -- ===================================================================== -- = LOCAL STATE -- ===================================================================== Possible_Identifiers : Identifier_Strings := Identifier_Strings'(others => LexTokenLists.Null_List); Possible_Library_Packages : Library_Package_Strings; Possible_Child_Packages : Child_Package_Strings; ------------------- -- Pseudo-constants ------------------- Predefined_Scope : Dictionary.Scopes; ------------------------ -- Important state items ------------------------ Current_Scope : Dictionary.Scopes; Valid_Package : Boolean; Current_Declaration : Declaration; The_Token : SP_Symbols.SP_Terminal; The_Lex_Val : LexTokenManager.Lex_Value; Errors_Occurred : Boolean; -- Flag to reparse current lexeme (after parser state change) Lookahead : Boolean; -------------------------------------------- -- Integer expression parsing and evaluation -------------------------------------------- Unary_Minus : Boolean; Int_Subtract : Boolean; Int_Add : Boolean; Int_Exponent : Boolean; Int_Val : Maths.Value; Int_Exponent_Val : Maths.Value; Int_Add_Sub_Val : Maths.Value; Expr_Int_Value : Maths.Value; --------------------------------------- -- Parsing and basic semantics checking --------------------------------------- type Parser_States is ( Config_Defn_Start, Next_Config_Defn_Start, Defn, Type_Unknown, Type_Private, Type_FP, Type_Int, Subtype_Int, Expr_Int, Const_Unknown, Config_Defn_End, Found_Syntax_Error); subtype Parser_Stages is Positive range 1 .. 10; Parser_State : Parser_States; Parser_Stage : Parser_Stages; Saved_Parser_State : Parser_States; Saved_Parser_Stage : Parser_Stages; Next_Expected_State : Parser_States; Parsing_Ends : Boolean; Package_Full_Name, Package_End_Name : E_Strings.T := E_Strings.Empty_String; Package_Name_Depth : Child_Package_Depth := 1; Any_Priority_Sym : Dictionary.Symbol := Dictionary.NullSymbol; Priority_Sym : Dictionary.Symbol := Dictionary.NullSymbol; Interrupt_Priority_Sym : Dictionary.Symbol := Dictionary.NullSymbol; Lib_Package_Symbol : Dictionary.Symbol := Dictionary.NullSymbol; ---------------------------------------------------------- -- Semantic check on validity of 'new' package declaration ---------------------------------------------------------- procedure State_Machine_Initialise --# global in Dictionary.Dict; --# in out Possible_Identifiers; --# out Current_Declaration; --# out Current_Scope; --# out Errors_Occurred; --# out Expr_Int_Value; --# out Int_Add; --# out Int_Add_Sub_Val; --# out Int_Exponent; --# out Int_Exponent_Val; --# out Int_Subtract; --# out Int_Val; --# out Lookahead; --# out Next_Expected_State; --# out Parser_Stage; --# out Parser_State; --# out Parsing_Ends; --# out Possible_Child_Packages; --# out Possible_Library_Packages; --# out Predefined_Scope; --# out Saved_Parser_Stage; --# out Saved_Parser_State; --# out The_Lex_Val; --# out The_Token; --# out Unary_Minus; --# out Valid_Package; --# derives Current_Declaration, --# Errors_Occurred, --# Expr_Int_Value, --# Int_Add, --# Int_Add_Sub_Val, --# Int_Exponent, --# Int_Exponent_Val, --# Int_Subtract, --# Int_Val, --# Lookahead, --# Next_Expected_State, --# Parser_Stage, --# Parser_State, --# Parsing_Ends, --# Possible_Child_Packages, --# Possible_Library_Packages, --# Saved_Parser_Stage, --# Saved_Parser_State, --# The_Lex_Val, --# The_Token, --# Unary_Minus, --# Valid_Package from & --# Current_Scope, --# Predefined_Scope from Dictionary.Dict & --# Possible_Identifiers from *; is begin -- integers LexTokenLists.Append (List => Possible_Identifiers (1), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (1), Item => LexTokenManager.Min_Int_Token); LexTokenLists.Append (List => Possible_Identifiers (2), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (2), Item => LexTokenManager.Max_Int_Token); LexTokenLists.Append (List => Possible_Identifiers (3), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (3), Item => LexTokenManager.Max_Binary_Modulus_Token); LexTokenLists.Append (List => Possible_Identifiers (4), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (4), Item => LexTokenManager.Max_Digits_Token); LexTokenLists.Append (List => Possible_Identifiers (5), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (5), Item => LexTokenManager.Max_Base_Digits_Token); LexTokenLists.Append (List => Possible_Identifiers (6), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (6), Item => LexTokenManager.Max_Mantissa_Token); LexTokenLists.Append (List => Possible_Identifiers (7), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (7), Item => LexTokenManager.Priority_Last_Token); LexTokenLists.Append (List => Possible_Identifiers (8), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (8), Item => LexTokenManager.Storage_Unit_Token); LexTokenLists.Append (List => Possible_Identifiers (9), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (9), Item => LexTokenManager.Word_Size_Token); -- reals LexTokenLists.Append (List => Possible_Identifiers (10), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (10), Item => LexTokenManager.Fine_Delta_Token); -- subtypes LexTokenLists.Append (List => Possible_Identifiers (11), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (11), Item => LexTokenManager.Any_Priority_Token); LexTokenLists.Append (List => Possible_Identifiers (12), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (12), Item => LexTokenManager.Priority_Token); LexTokenLists.Append (List => Possible_Identifiers (13), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (13), Item => LexTokenManager.Interrupt_Priority_Token); -- types LexTokenLists.Append (List => Possible_Identifiers (14), Item => LexTokenManager.Standard_Token); LexTokenLists.Append (List => Possible_Identifiers (14), Item => LexTokenManager.Integer_Token); LexTokenLists.Append (List => Possible_Identifiers (15), Item => LexTokenManager.Standard_Token); LexTokenLists.Append (List => Possible_Identifiers (15), Item => LexTokenManager.Float_Token); -- private type LexTokenLists.Append (List => Possible_Identifiers (16), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (16), Item => LexTokenManager.Address_Token); -- Ravenscar type LexTokenLists.Append (List => Possible_Identifiers (17), Item => LexTokenManager.Ada_Token); LexTokenLists.Append (List => Possible_Identifiers (17), Item => LexTokenManager.Real_Time_Token); LexTokenLists.Append (List => Possible_Identifiers (17), Item => LexTokenManager.Seconds_Count_Token); LexTokenLists.Append (List => Possible_Identifiers (18), Item => LexTokenManager.Ada_Token); LexTokenLists.Append (List => Possible_Identifiers (18), Item => LexTokenManager.Interrupts_Token); LexTokenLists.Append (List => Possible_Identifiers (18), Item => LexTokenManager.Interrupt_ID_Token); -- typed constants LexTokenLists.Append (List => Possible_Identifiers (19), Item => LexTokenManager.System_Token); LexTokenLists.Append (List => Possible_Identifiers (19), Item => LexTokenManager.Default_Bit_Order_Token); Possible_Library_Packages := Library_Package_Strings' (1 => LexTokenManager.Standard_Token, 2 => LexTokenManager.System_Token, 3 => LexTokenManager.Ada_Token); Possible_Child_Packages := Child_Package_Strings'(1 => LexTokenManager.Real_Time_Token, 2 => LexTokenManager.Interrupts_Token); Predefined_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetPredefinedPackageStandard); Current_Scope := Predefined_Scope; The_Lex_Val := LexTokenManager.Lex_Value' (Position => LexTokenManager.Null_Token_Position, Token_Str => LexTokenManager.Null_String); Current_Declaration := Declaration' (Name => The_Lex_Val, My_Type => Declaration_Type'First, Enc_Package => Package_Hierarchy'(Child_Package_Depth => The_Lex_Val), Base_Type => The_Lex_Val, Low_Bound => Maths.NoValue, High_Bound => Maths.NoValue, Num_Digits => Maths.NoValue, Value => Maths.NoValue, Value_Str => The_Lex_Val); The_Token := SP_Symbols.SPDEFAULT; Unary_Minus := False; Int_Subtract := False; Int_Add := False; Int_Exponent := False; Int_Val := Maths.NoValue; Int_Exponent_Val := Maths.NoValue; Int_Add_Sub_Val := Maths.NoValue; Expr_Int_Value := Maths.NoValue; Lookahead := False; Parsing_Ends := False; Valid_Package := False; Saved_Parser_State := Parser_States'First; Saved_Parser_Stage := Parser_Stages'First; Next_Expected_State := Parser_States'First; Parser_State := Parser_States'First; Parser_Stage := Parser_Stages'First; Errors_Occurred := False; end State_Machine_Initialise; procedure State_Machine_Iterate --# global in CommandLineData.Content; --# in Possible_Child_Packages; --# in Possible_Identifiers; --# in Possible_Library_Packages; --# in Predefined_Scope; --# in The_Config_File; --# in out Any_Priority_Sym; --# in out Current_Declaration; --# in out Current_Scope; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Expr_Int_Value; --# in out Interrupt_Priority_Sym; --# in out Int_Add; --# in out Int_Add_Sub_Val; --# in out Int_Exponent; --# in out Int_Exponent_Val; --# in out Int_Subtract; --# in out Int_Val; --# in out LexTokenManager.State; --# in out Lib_Package_Symbol; --# in out Lookahead; --# in out Next_Expected_State; --# in out Package_End_Name; --# in out Package_Full_Name; --# in out Package_Name_Depth; --# in out Parser_Stage; --# in out Parser_State; --# in out Priority_Sym; --# in out Saved_Parser_Stage; --# in out Saved_Parser_State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Lex_Val; --# in out The_Token; --# in out Unary_Minus; --# in out Valid_Package; --# derives Any_Priority_Sym, --# Interrupt_Priority_Sym, --# Priority_Sym from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Lookahead, --# Package_Full_Name, --# Parser_Stage, --# Parser_State, --# Possible_Identifiers, --# Priority_Sym, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# Valid_Package & --# Current_Declaration from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expr_Int_Value, --# LexTokenManager.State, --# Lookahead, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token, --# Unary_Minus & --# Current_Scope from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Lookahead, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# Possible_Child_Packages, --# Possible_Library_Packages, --# Predefined_Scope, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token & --# Dictionary.Dict from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# ErrorHandler.Error_Context, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Lookahead, --# Package_Full_Name, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# Possible_Identifiers, --# Possible_Library_Packages, --# Priority_Sym, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# Int_Add, --# Int_Add_Sub_Val, --# Int_Exponent, --# Int_Exponent_Val, --# Int_Subtract, --# Int_Val, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Lookahead, --# Package_End_Name, --# Package_Full_Name, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# Possible_Child_Packages, --# Possible_Identifiers, --# Possible_Library_Packages, --# Priority_Sym, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token, --# Unary_Minus, --# Valid_Package & --# Errors_Occurred from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Lookahead, --# Package_End_Name, --# Package_Full_Name, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# Possible_Child_Packages, --# Possible_Identifiers, --# Possible_Library_Packages, --# Priority_Sym, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token, --# Unary_Minus, --# Valid_Package & --# Expr_Int_Value from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Int_Add, --# Int_Add_Sub_Val, --# Int_Exponent, --# Int_Exponent_Val, --# Int_Subtract, --# Int_Val, --# LexTokenManager.State, --# Lookahead, --# Parser_Stage, --# Parser_State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Token, --# Unary_Minus & --# Int_Add, --# Int_Exponent, --# Int_Subtract, --# Lookahead, --# Package_Name_Depth, --# Saved_Parser_Stage, --# Saved_Parser_State, --# Unary_Minus from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# Parser_Stage, --# Parser_State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Token & --# Int_Add_Sub_Val, --# Int_Val, --# Package_End_Name from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# Parser_Stage, --# Parser_State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token & --# Int_Exponent_Val from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# Parser_Stage, --# Parser_State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val & --# LexTokenManager.State from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# Lookahead, --# Package_Full_Name, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# Possible_Identifiers, --# Priority_Sym, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token, --# Unary_Minus, --# Valid_Package & --# Lib_Package_Symbol from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# Possible_Library_Packages, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token & --# Next_Expected_State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Token, --# Unary_Minus & --# Package_Full_Name from *, --# CommandLineData.Content, --# Current_Declaration, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Token & --# Parser_Stage from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# Package_Name_Depth, --# Parser_State, --# Saved_Parser_Stage, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Token, --# Unary_Minus & --# Parser_State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# Next_Expected_State, --# Package_Name_Depth, --# Parser_Stage, --# Saved_Parser_State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Token, --# Unary_Minus & --# SparkLex.Curr_Line, --# The_Lex_Val, --# The_Token from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lookahead, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File & --# Valid_Package from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Lookahead, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# Possible_Child_Packages, --# Possible_Library_Packages, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Config_File, --# The_Lex_Val, --# The_Token; is Unwanted_Punct_Token : Boolean; procedure Next_Stage --# global in out Parser_Stage; --# derives Parser_Stage from *; is begin Parser_Stage := Parser_Stage + 1; end Next_Stage; procedure New_State (The_Stage : in Parser_States) --# global out Parser_Stage; --# out Parser_State; --# derives Parser_Stage from & --# Parser_State from The_Stage; is begin Parser_Stage := 1; Parser_State := The_Stage; end New_State; procedure Guarded_Literal_To_Value (Lex : in LexTokenManager.Lex_Value; Val : out Maths.Value) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Lex, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Val from Lex, --# LexTokenManager.State; is Local_Error_Code : Maths.ErrorCode; begin Maths.LiteralToValue (Lex.Token_Str, Val, Local_Error_Code); if Local_Error_Code /= Maths.NoError then ErrorHandler.Semantic_Warning (Err_Num => 200, Position => Lex.Position, Id_Str => Lex.Token_Str); end if; end Guarded_Literal_To_Value; procedure Raise_Syntax_Error (The_Error : in Syntax_Errors) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out Parser_State; --# in out SPARK_IO.File_Sys; --# out Errors_Occurred; --# out Next_Expected_State; --# out Parser_Stage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Error & --# Errors_Occurred, --# Parser_Stage, --# Parser_State from & --# LexTokenManager.State from *, --# The_Error & --# Next_Expected_State from Parser_State; is No_Of_Terminals : SP_Expected_Symbols.SP_Ess_Sym_Range; Terminal_List : SP_Expected_Symbols.SP_Exp_Sym_List := SP_Expected_Symbols.SP_Exp_Sym_List'(others => SP_Symbols.SPDEFAULT); begin case The_Error.The_Type is when Exp_Symbol => No_Of_Terminals := 1; Terminal_List (1) := The_Error.Expected_Symbol; when Exp_Defn => No_Of_Terminals := 3; Terminal_List (1) := SP_Symbols.identifier; Terminal_List (2) := SP_Symbols.RWtype; Terminal_List (3) := SP_Symbols.RWsubtype; when Exp_Digits_Range => No_Of_Terminals := 3; Terminal_List (1) := SP_Symbols.RWdigits; Terminal_List (2) := SP_Symbols.RWrange; Terminal_List (3) := SP_Symbols.RWprivate; when Exp_Minus_Real => No_Of_Terminals := 2; Terminal_List (1) := SP_Symbols.minus; Terminal_List (2) := SP_Symbols.real_number; when Exp_Minus_Integer => No_Of_Terminals := 2; Terminal_List (1) := SP_Symbols.minus; Terminal_List (2) := SP_Symbols.integer_number; when Exp_Integer_Real => No_Of_Terminals := 2; Terminal_List (1) := SP_Symbols.integer_number; Terminal_List (2) := SP_Symbols.real_number; end case; --# assert True; case Parser_State is when Config_Defn_Start | Next_Config_Defn_Start => Next_Expected_State := Parser_State; when others => Next_Expected_State := Defn; end case; New_State (The_Stage => Found_Syntax_Error); ErrorHandler.Syntax_Error (Error_Item => The_Error.The_Lex_Val, Current_Sym => The_Error.The_Symbol, Entry_Symbol => The_Error.The_Symbol, No_Of_Terminals => No_Of_Terminals, No_Of_Non_Terminals => 0, Terminal_List => Terminal_List, Non_Terminal_List => Terminal_List); Errors_Occurred := True; end Raise_Syntax_Error; procedure Raise_Semantic_Error (Err_Num : in Natural; Lex_Val : in LexTokenManager.Lex_Value) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# out Errors_Occurred; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Num, --# LexTokenManager.State, --# Lex_Val, --# SPARK_IO.File_Sys & --# Errors_Occurred from ; is begin ErrorHandler.Semantic_Error (Err_Num => Err_Num, Reference => ErrorHandler.No_Reference, Position => Lex_Val.Position, Id_Str => Lex_Val.Token_Str); Errors_Occurred := True; end Raise_Semantic_Error; ---------------------------------------------- -- Check semantic validity of new declaration, -- and update dictionary as appropriate ---------------------------------------------- procedure Check_And_Process_Declaration (The_Decl : in Declaration) --# global in CommandLineData.Content; --# in Current_Scope; --# in Package_Full_Name; --# in Possible_Identifiers; --# in Valid_Package; --# in out Any_Priority_Sym; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Interrupt_Priority_Sym; --# in out LexTokenManager.State; --# in out Priority_Sym; --# in out SPARK_IO.File_Sys; --# derives Any_Priority_Sym, --# Dictionary.Dict, --# Errors_Occurred, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Priority_Sym from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Possible_Identifiers, --# Priority_Sym, --# The_Decl, --# Valid_Package & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Possible_Identifiers, --# Priority_Sym, --# SPARK_IO.File_Sys, --# The_Decl, --# Valid_Package; is type Failures is ( Not_Valid_Name, Wrong_Type_Found, Empty_Range, No_Failure, Not_Within_Valid_Package, Invalid_Redeclaration, Parent_Type_Does_Not_Exist, Subtype_Range_Mismatch, Parent_Type_Not_Integer, Parent_Type_Not_AP, Priority_Range_Insufficient, Any_Prio_First, Any_Prio_Last, Prio_Mid_Point, Must_Be_Private, Not_Positive_Power_Of_2, Bit_Order_Wrong_Type, Bit_Order_Wrong_Value); type Identifier_Type_Array is array (Possible_Identifier_Range) of Declaration_Type; Identifier_Types : constant Identifier_Type_Array := Identifier_Type_Array' (1 => Dec_Named_Integer, 2 => Dec_Named_Integer, 3 => Dec_Named_Integer, 4 => Dec_Named_Integer, 5 => Dec_Named_Integer, 6 => Dec_Named_Integer, 7 => Dec_Named_Integer, 8 => Dec_Named_Integer, 9 => Dec_Named_Integer, 10 => Dec_Named_Real, 11 => Dec_Integer_Subtype, 12 => Dec_Integer_Subtype, 13 => Dec_Integer_Subtype, 14 => Dec_Integer_Type, 15 => Dec_Floating_Point_Type, 16 => Dec_Private, 17 => Dec_Integer_Type, 18 => Dec_Integer_Type, 19 => Dec_Typed_Constant); Failure_Reason : Failures := No_Failure; Dotted_Name : E_Strings.T; Name_Match : Boolean := False; Match_Type : Declaration_Type; Parent_Type : Dictionary.Symbol; Is_Any_Priority : Boolean := False; Is_Priority : Boolean := False; Is_Interrupt_Priority : Boolean := False; Is_Address : Boolean := False; Is_Max_Binary_Modulus : Boolean := False; Is_Predef_Integer : Boolean := False; Is_Predef_Float : Boolean := False; Is_Seconds_Count : Boolean := False; Is_Interrupt_ID : Boolean := False; Is_Default_Bit_Order : Boolean := False; procedure Match_Identifier_List (ID : in LexTokenLists.Lists; To_Match : in E_Strings.T; Position : in LexTokenManager.Token_Position; Matched : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# ID, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# To_Match & --# Matched from ID, --# LexTokenManager.State, --# To_Match; is Start_Pos : E_Strings.Positions := E_Strings.Positions'First; Str_Length : E_Strings.Lengths := E_Strings.Lengths'First; Str : LexTokenManager.Lex_String := LexTokenManager.Null_String; begin Matched := True; for I in LexTokenLists.Positions range LexTokenLists.Positions'First .. LexTokenLists.Get_Length (List => ID) loop Str := LexTokenLists.Get_Element (List => ID, Pos => I); Str_Length := E_Strings.Get_Length (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Str)); Matched := LexTokenManager.Comp_Str_Case_Insensitive (Str => E_Strings.Section (E_Str => To_Match, Start_Pos => Start_Pos, Length => Str_Length), Lex_Str => Str); exit when not Matched; Start_Pos := (Start_Pos + Str_Length) + 1; end loop; Matched := Matched and then Start_Pos - 2 = E_Strings.Get_Length (E_Str => To_Match); if Matched then Start_Pos := (Start_Pos - Str_Length) - 1; Casing.Check_String_Casing (Str => E_Strings.Section (E_Str => To_Match, Start_Pos => Start_Pos, Length => Str_Length), Lex_Str => Str, Position => Position); end if; end Match_Identifier_List; function Ck_Failed return Boolean --# global in Failure_Reason; is begin return Failure_Reason /= No_Failure; end Ck_Failed; procedure Do_Ck_Valid_Package --# global in Valid_Package; --# in out Failure_Reason; --# derives Failure_Reason from *, --# Valid_Package; is begin if not Valid_Package then Failure_Reason := Not_Within_Valid_Package; end if; end Do_Ck_Valid_Package; procedure Do_Ck_Suffix_Name --# global in Dotted_Name; --# in Failure_Reason; --# in out Match_Type; --# in out Name_Match; --# derives Match_Type, --# Name_Match from *, --# Dotted_Name, --# Failure_Reason; is Standard_Prefix : E_Strings.T; Float_Suffix : E_Strings.T; Integer_Suffix : E_Strings.T; function Match_Prefix (Prefix : E_Strings.T; To_Match : E_Strings.T) return Boolean is Matched : Boolean; begin if E_Strings.Get_Length (E_Str => Prefix) > E_Strings.Get_Length (E_Str => To_Match) then Matched := False; else Matched := E_Strings.Eq_String (E_Str1 => Prefix, E_Str2 => E_Strings.Section (E_Str => To_Match, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Prefix))); end if; return Matched; end Match_Prefix; function Match_Suffix (Suffix : E_Strings.T; To_Match : E_Strings.T) return Boolean is Matched : Boolean := True; begin if E_Strings.Get_Length (E_Str => Suffix) > E_Strings.Get_Length (E_Str => To_Match) then Matched := False; else for I in Natural range 1 .. E_Strings.Get_Length (E_Str => Suffix) loop if Ada.Characters.Handling.To_Lower (Item => E_Strings.Get_Element (E_Str => To_Match, Pos => (E_Strings.Get_Length (E_Str => To_Match) - I) + 1)) /= Ada.Characters.Handling.To_Lower (Item => E_Strings.Get_Element (E_Str => Suffix, Pos => (E_Strings.Get_Length (E_Str => Suffix) - I) + 1)) then Matched := False; end if; exit when not Matched; end loop; end if; return Matched; end Match_Suffix; begin Standard_Prefix := E_Strings.Copy_String (Str => "Standard."); Float_Suffix := E_Strings.Copy_String (Str => "_Float"); Integer_Suffix := E_Strings.Copy_String (Str => "_Integer"); if not Ck_Failed then if Match_Prefix (Prefix => Standard_Prefix, To_Match => Dotted_Name) then if Match_Suffix (Suffix => Integer_Suffix, To_Match => Dotted_Name) then Name_Match := True; Match_Type := Dec_Integer_Type; elsif Match_Suffix (Suffix => Float_Suffix, To_Match => Dotted_Name) then Name_Match := True; Match_Type := Dec_Floating_Point_Type; end if; end if; end if; end Do_Ck_Suffix_Name; procedure Do_Ck_Def_Name_With_Priority_Ck --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Dotted_Name; --# in LexTokenManager.State; --# in Name_Match; --# in Possible_Identifiers; --# in The_Decl; --# in out ErrorHandler.Error_Context; --# in out Failure_Reason; --# in out Is_Any_Priority; --# in out Is_Interrupt_Priority; --# in out Is_Priority; --# in out Match_Type; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Dotted_Name, --# ErrorHandler.Error_Context, --# Failure_Reason, --# LexTokenManager.State, --# Name_Match, --# Possible_Identifiers, --# SPARK_IO.File_Sys, --# The_Decl & --# Failure_Reason, --# Is_Any_Priority, --# Is_Interrupt_Priority, --# Is_Priority, --# Match_Type from *, --# Dotted_Name, --# Failure_Reason, --# LexTokenManager.State, --# Name_Match, --# Possible_Identifiers; is Any_Priority_Subtype : constant Possible_Identifier_Range := 11; Priority_Subtype : constant Possible_Identifier_Range := 12; Interrupt_Priority_Subtype : constant Possible_Identifier_Range := 13; Local_Name_Match : Boolean := False; Dotted_Name_Match : Boolean; begin if not (Name_Match or Ck_Failed) then Failure_Reason := Not_Valid_Name; -- initially for I in Possible_Identifier_Range loop Match_Identifier_List (ID => Possible_Identifiers (I), To_Match => Dotted_Name, Position => The_Decl.Name.Position, Matched => Dotted_Name_Match); if Dotted_Name_Match then Failure_Reason := No_Failure; Local_Name_Match := False; case I is when Any_Priority_Subtype => Is_Any_Priority := True; when Priority_Subtype => Is_Priority := True; when Interrupt_Priority_Subtype => Is_Interrupt_Priority := True; when others => null; end case; Match_Type := Identifier_Types (I); end if; exit when Local_Name_Match; end loop; end if; end Do_Ck_Def_Name_With_Priority_Ck; procedure Do_Ck_Def_Name --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Dotted_Name; --# in LexTokenManager.State; --# in Name_Match; --# in Possible_Identifiers; --# in The_Decl; --# in out ErrorHandler.Error_Context; --# in out Failure_Reason; --# in out Is_Address; --# in out Is_Default_Bit_Order; --# in out Is_Interrupt_ID; --# in out Is_Max_Binary_Modulus; --# in out Is_Predef_Float; --# in out Is_Predef_Integer; --# in out Is_Seconds_Count; --# in out Match_Type; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Dotted_Name, --# ErrorHandler.Error_Context, --# Failure_Reason, --# LexTokenManager.State, --# Name_Match, --# Possible_Identifiers, --# SPARK_IO.File_Sys, --# The_Decl & --# Failure_Reason, --# Is_Address, --# Is_Default_Bit_Order, --# Is_Interrupt_ID, --# Is_Max_Binary_Modulus, --# Is_Predef_Float, --# Is_Predef_Integer, --# Is_Seconds_Count, --# Match_Type from *, --# Dotted_Name, --# Failure_Reason, --# LexTokenManager.State, --# Name_Match, --# Possible_Identifiers; is Max_Binary_Modulus : constant Possible_Identifier_Range := 3; Predef_Integer : constant Possible_Identifier_Range := 14; Predef_Float : constant Possible_Identifier_Range := 15; System_Address : constant Possible_Identifier_Range := 16; Seconds_Count : constant Possible_Identifier_Range := 17; Interrupt_ID : constant Possible_Identifier_Range := 18; Default_Bit_Order : constant Possible_Identifier_Range := 19; Dotted_Name_Match : Boolean; begin if not (Name_Match or Ck_Failed) then Failure_Reason := Not_Valid_Name; -- initially for I in Possible_Identifier_Range loop Match_Identifier_List (ID => Possible_Identifiers (I), To_Match => Dotted_Name, Position => The_Decl.Name.Position, Matched => Dotted_Name_Match); if Dotted_Name_Match then Is_Address := I = System_Address; Is_Max_Binary_Modulus := I = Max_Binary_Modulus; Is_Predef_Integer := I = Predef_Integer; Is_Predef_Float := I = Predef_Float; Is_Seconds_Count := I = Seconds_Count; Is_Interrupt_ID := I = Interrupt_ID; Is_Default_Bit_Order := I = Default_Bit_Order; Failure_Reason := No_Failure; Match_Type := Identifier_Types (I); exit; end if; end loop; end if; end Do_Ck_Def_Name; procedure Do_Ck_Existing_Decl --# global in CommandLineData.Content; --# in Current_Scope; --# in Dictionary.Dict; --# in Is_Default_Bit_Order; --# in Is_Interrupt_ID; --# in Is_Seconds_Count; --# in LexTokenManager.State; --# in The_Decl; --# in out Failure_Reason; --# derives Failure_Reason from *, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# Is_Default_Bit_Order, --# Is_Interrupt_ID, --# Is_Seconds_Count, --# LexTokenManager.State, --# The_Decl; is Check_Symbol : Dictionary.Symbol; begin if not Ck_Failed then Check_Symbol := Dictionary.LookupItem (Name => The_Decl.Name.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Check_Symbol) then -- redeclaration of existing object ... but if (Dictionary.IsTypeMark (Check_Symbol) and then (Dictionary.IsPredefinedIntegerType (Check_Symbol) or else Dictionary.IsPredefinedFloatType (Check_Symbol))) or else Is_Default_Bit_Order or else Is_Seconds_Count or else Is_Interrupt_ID then if Is_Default_Bit_Order then if not Dictionary.IsDeferredConstant (Check_Symbol) then Failure_Reason := Invalid_Redeclaration; end if; else -- Must be a numeric constant if not (Maths.HasNoValue (Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Check_Symbol)))) then Failure_Reason := Invalid_Redeclaration; end if; end if; else Failure_Reason := Invalid_Redeclaration; end if; end if; end if; end Do_Ck_Existing_Decl; procedure Do_Ck_Type --# global in Match_Type; --# in The_Decl; --# in out Failure_Reason; --# derives Failure_Reason from *, --# Match_Type, --# The_Decl; is begin if not Ck_Failed then if Match_Type /= The_Decl.My_Type then if Match_Type = Dec_Private then Failure_Reason := Must_Be_Private; else Failure_Reason := Wrong_Type_Found; end if; end if; end if; end Do_Ck_Type; procedure Do_Ck_Bounds --# global in The_Decl; --# in out Failure_Reason; --# derives Failure_Reason from *, --# The_Decl; is Temp_Value : Maths.Value; Local_ME : Maths.ErrorCode; begin if not Ck_Failed then --# accept F, 10, Local_ME, "Local_ME unused here"; Maths.Greater (The_Decl.Low_Bound, The_Decl.High_Bound, Temp_Value, Local_ME); --# end accept; -- ineffective assignment to Local_ME expected here -- no possible error; lexical analysis confirms correct typing if Temp_Value = Maths.BoolToValue (True) then Failure_Reason := Empty_Range; end if; end if; --# accept F, 33, Local_ME, "Local_ME unused here"; end Do_Ck_Bounds; procedure Do_Ck_Base_Type --# global in CommandLineData.Content; --# in Current_Scope; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in The_Decl; --# in out ErrorHandler.Error_Context; --# in out Failure_Reason; --# in out Parent_Type; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Failure_Reason, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Decl & --# Failure_Reason, --# Parent_Type from *, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# Failure_Reason, --# LexTokenManager.State, --# The_Decl; is Check_Symbol : Dictionary.Symbol; Temp_Value : Maths.Value; Local_ME : Maths.ErrorCode; begin if not Ck_Failed then Check_Symbol := Dictionary.LookupItem (Name => The_Decl.Base_Type.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Check_Symbol) then -- parent type does not exist Failure_Reason := Parent_Type_Does_Not_Exist; else --# accept F, 10, Local_ME, "Local_ME unused here"; Maths.Lesser (The_Decl.Low_Bound, Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Check_Symbol)), Temp_Value, Local_ME); --# end accept; -- ineffective assignment to Local_ME expected here -- no possible error; lexical analysis confirms correct typing if Temp_Value = Maths.BoolToValue (True) then -- type'first less than type'base'first Failure_Reason := Subtype_Range_Mismatch; else --# accept F, 10, Local_ME, "Local_ME unused here"; Maths.Greater (The_Decl.High_Bound, Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Check_Symbol)), Temp_Value, Local_ME); --# end accept; -- ineffective assignment to Local_ME expected here -- no possible error; lexical analysis confirms correct typing if Temp_Value = Maths.BoolToValue (True) then -- type'last greater than type'base'last Failure_Reason := Subtype_Range_Mismatch; else Casing.Check_Casing (Lex_Str1 => Dictionary.GetSimpleName (Item => Check_Symbol), Lex_Str2 => The_Decl.Base_Type.Token_Str, Position => The_Decl.Base_Type.Position); Parent_Type := Check_Symbol; end if; end if; end if; end if; --# accept F, 33, Local_ME, "Local_ME unused here"; end Do_Ck_Base_Type; procedure Do_Ck_System_Priority --# global in Any_Priority_Sym; --# in Dictionary.Dict; --# in Interrupt_Priority_Sym; --# in Is_Any_Priority; --# in Is_Interrupt_Priority; --# in Is_Priority; --# in LexTokenManager.State; --# in Parent_Type; --# in Priority_Sym; --# in The_Decl; --# in out Failure_Reason; --# derives Failure_Reason from *, --# Any_Priority_Sym, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Is_Any_Priority, --# Is_Interrupt_Priority, --# Is_Priority, --# LexTokenManager.State, --# Parent_Type, --# Priority_Sym, --# The_Decl; is procedure Check_Priority --# global in Any_Priority_Sym; --# in Dictionary.Dict; --# in Interrupt_Priority_Sym; --# in LexTokenManager.State; --# in Parent_Type; --# in The_Decl; --# in out Failure_Reason; --# derives Failure_Reason from *, --# Any_Priority_Sym, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Parent_Type, --# The_Decl; is High_Minus_Low : Maths.Value; Last_Plus_One : Maths.Value; Boolean_Condition : Maths.Value; Unwanted_ME : Maths.ErrorCode; begin -- check parent type is Any_Priority if Dictionary.Types_Are_Equal (Left_Symbol => Parent_Type, Right_Symbol => Any_Priority_Sym, Full_Range_Subtype => False) then -- check range of at least 30 values - LRM D.1(26) --# accept F, 10, Unwanted_ME, "Unwanted_ME unused here"; Maths.Subtract (The_Decl.High_Bound, The_Decl.Low_Bound, High_Minus_Low, Unwanted_ME); Maths.GreaterOrEqual (High_Minus_Low, Maths.IntegerToValue (29), Boolean_Condition, Unwanted_ME); --# end accept; -- ineffective assignment to Unwanted_ME expected here -- no possible error; lexical analysis confirms correct typing if Boolean_Condition = Maths.BoolToValue (False) then Failure_Reason := Priority_Range_Insufficient; else -- check Priority'First = Any_Priority'First if Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Any_Priority_Sym)) /= The_Decl.Low_Bound then Failure_Reason := Any_Prio_First; else if not Dictionary.Is_Null_Symbol (Interrupt_Priority_Sym) then -- check that Priority'Last + 1 = Interrupt_Priority'First; Last_Plus_One := The_Decl.High_Bound; --# accept F, 10, Unwanted_ME, "Unwanted_ME unused here"; Maths.SuccOp (Last_Plus_One, Unwanted_ME); --# end accept; -- ineffective assignment to Unwanted_ME expected here -- no possible error; lexical analysis confirms correct typing if Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Interrupt_Priority_Sym)) /= Last_Plus_One then Failure_Reason := Prio_Mid_Point; end if; end if; end if; end if; else Failure_Reason := Parent_Type_Not_AP; end if; --# accept F, 33, Unwanted_ME, "Unwanted_ME unused here"; end Check_Priority; procedure Check_Interrupt_Priority --# global in Any_Priority_Sym; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Parent_Type; --# in Priority_Sym; --# in The_Decl; --# in out Failure_Reason; --# derives Failure_Reason from *, --# Any_Priority_Sym, --# Dictionary.Dict, --# LexTokenManager.State, --# Parent_Type, --# Priority_Sym, --# The_Decl; is Last_Plus_One : Maths.Value; Unwanted_ME : Maths.ErrorCode; begin -- check parent type is Any_Priority if Dictionary.Types_Are_Equal (Left_Symbol => Parent_Type, Right_Symbol => Any_Priority_Sym, Full_Range_Subtype => False) then -- there must be at least one value (LRM D.1(25)), but this is -- checked by the bounds check in the general semantic checking -- for a subtype... -- check Interrupt_Priority'Last = Any_Priority'Last if The_Decl.High_Bound /= Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Any_Priority_Sym)) then -- some other semantic error Failure_Reason := Any_Prio_Last; else if not Dictionary.Is_Null_Symbol (Priority_Sym) then -- check that Priority'Last + 1 = Interrupt_Priority'First; Last_Plus_One := Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Priority_Sym)); --# accept F, 10, Unwanted_ME, "Unwanted_ME unused here"; Maths.SuccOp (Last_Plus_One, Unwanted_ME); --# end accept; -- ineffective assignment to Unwanted_ME expected here -- no possible error; lexical analysis confirms correct typing if The_Decl.Low_Bound /= Last_Plus_One then -- some other semantic error Failure_Reason := Prio_Mid_Point; end if; end if; end if; else Failure_Reason := Parent_Type_Not_AP; end if; --# accept F, 33, Unwanted_ME, "Unwanted_ME unused here"; end Check_Interrupt_Priority; begin if not Ck_Failed then if Is_Any_Priority then -- check parent type is Integer if not Dictionary.IsPredefinedIntegerType (Parent_Type) then Failure_Reason := Parent_Type_Not_Integer; end if; elsif Is_Priority then Check_Priority; elsif Is_Interrupt_Priority then Check_Interrupt_Priority; end if; end if; end Do_Ck_System_Priority; procedure Do_Ck_Special_Cases --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Is_Default_Bit_Order; --# in Is_Max_Binary_Modulus; --# in LexTokenManager.State; --# in The_Decl; --# in out ErrorHandler.Error_Context; --# in out Failure_Reason; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Failure_Reason, --# Is_Default_Bit_Order, --# Is_Max_Binary_Modulus, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Decl & --# Failure_Reason from *, --# Is_Default_Bit_Order, --# Is_Max_Binary_Modulus, --# LexTokenManager.State, --# The_Decl; is begin if not Ck_Failed then -- Max_Binary_Modulus must be a positive power of 2 if Is_Max_Binary_Modulus then if not Maths.IsAPositivePowerOf2 (The_Decl.Value) then Failure_Reason := Not_Positive_Power_Of_2; end if; elsif Is_Default_Bit_Order then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => The_Decl.Base_Type.Token_Str, Lex_Str2 => LexTokenManager.Bit_Order_Token) /= LexTokenManager.Str_Eq then Failure_Reason := Bit_Order_Wrong_Type; else Casing.Check_Casing (Lex_Str1 => LexTokenManager.Bit_Order_Token, Lex_Str2 => The_Decl.Base_Type.Token_Str, Position => The_Decl.Base_Type.Position); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => The_Decl.Value_Str.Token_Str, Lex_Str2 => LexTokenManager.Low_Order_First_Token) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => The_Decl.Value_Str.Token_Str, Lex_Str2 => LexTokenManager.High_Order_First_Token) /= LexTokenManager.Str_Eq then Failure_Reason := Bit_Order_Wrong_Value; else if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => The_Decl.Value_Str.Token_Str, Lex_Str2 => LexTokenManager.Low_Order_First_Token) = LexTokenManager.Str_Eq then Casing.Check_Casing (Lex_Str1 => LexTokenManager.Low_Order_First_Token, Lex_Str2 => The_Decl.Value_Str.Token_Str, Position => The_Decl.Value_Str.Position); else Casing.Check_Casing (Lex_Str1 => LexTokenManager.High_Order_First_Token, Lex_Str2 => The_Decl.Value_Str.Token_Str, Position => The_Decl.Value_Str.Position); end if; end if; end if; end if; end if; end Do_Ck_Special_Cases; procedure Raise_Errors_And_Update_Dictionary --# global in CommandLineData.Content; --# in Current_Scope; --# in Failure_Reason; --# in Is_Address; --# in Is_Default_Bit_Order; --# in Is_Interrupt_ID; --# in Is_Predef_Float; --# in Is_Predef_Integer; --# in Is_Priority; --# in Is_Seconds_Count; --# in Parent_Type; --# in The_Decl; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# Current_Scope, --# Failure_Reason, --# Is_Address, --# Is_Default_Bit_Order, --# Is_Interrupt_ID, --# Is_Predef_Float, --# Is_Predef_Integer, --# Is_Priority, --# Is_Seconds_Count, --# LexTokenManager.State, --# Parent_Type, --# The_Decl & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# Failure_Reason, --# Is_Address, --# Is_Priority, --# LexTokenManager.State, --# Parent_Type, --# SPARK_IO.File_Sys, --# The_Decl & --# Errors_Occurred from *, --# Failure_Reason & --# LexTokenManager.State from *, --# Failure_Reason, --# Is_Interrupt_ID, --# Is_Predef_Integer, --# Is_Priority, --# Is_Seconds_Count, --# The_Decl & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Failure_Reason, --# Is_Address, --# Is_Interrupt_ID, --# Is_Predef_Float, --# Is_Predef_Integer, --# Is_Priority, --# Is_Seconds_Count, --# LexTokenManager.State, --# Parent_Type, --# The_Decl; is Storage_Rep_Value : LexTokenManager.Lex_String; Storage_Rep_Low_Bound : LexTokenManager.Lex_String; Storage_Rep_High_Bound : LexTokenManager.Lex_String; Storage_Rep_Digits : LexTokenManager.Lex_String; Integer_Type : Dictionary.Symbol; Float_Type : Dictionary.Symbol; Private_Type : Dictionary.Symbol; Lib_Package_Sym : Dictionary.Symbol; Constant_Sym : Dictionary.Symbol; procedure Add_Null_Address --# global in CommandLineData.Content; --# in Current_Scope; --# in LexTokenManager.State; --# in The_Decl; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# Current_Scope, --# LexTokenManager.State, --# The_Decl & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Decl; is The_Constant : Dictionary.Symbol; pragma Unreferenced (The_Constant); begin --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Dictionary.Add_Deferred_Constant (Name => LexTokenManager.Null_Address_Token, Type_Mark => Dictionary.LookupItem (Name => The_Decl.Name.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False), Type_Reference => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), The_Package => Dictionary.GetRegion (Current_Scope), TheConstant => The_Constant); --# end accept; -- note to indicate implicit definition ErrorHandler.Semantic_Note (Err_Num => 3, Position => The_Decl.Name.Position, Id_Str => The_Decl.Name.Token_Str); --# accept Flow, 33, The_Constant, "Expected to be neither referenced nor exported"; end Add_Null_Address; procedure Add_Default_Priority --# global in CommandLineData.Content; --# in Current_Scope; --# in The_Decl; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# Current_Scope, --# LexTokenManager.State, --# The_Decl & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Decl & --# LexTokenManager.State from *, --# The_Decl; is Temp_Val1, Temp_Val2 : Maths.Value; Unwanted_ME : Maths.ErrorCode; Storage_Rep_Ave : LexTokenManager.Lex_String; The_Constant : Dictionary.Symbol; pragma Unreferenced (The_Constant); begin --# accept Flow, 10, Unwanted_ME, "Unwanted_ME unused here"; Maths.Add (The_Decl.Low_Bound, The_Decl.High_Bound, Temp_Val1, Unwanted_ME); Maths.Divide (Temp_Val1, Maths.IntegerToValue (2), Temp_Val2, Unwanted_ME); --# end accept; Maths.StorageRep (Temp_Val2, Storage_Rep_Ave); --# accept Flow, 10, The_Constant, "Expected ineffective assignment to OK"; Dictionary.Add_Constant_Declaration (Name => LexTokenManager.Default_Priority_Token, Type_Mark => Dictionary.LookupItem (Name => The_Decl.Name.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False), Type_Reference => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Value => Storage_Rep_Ave, Exp_Is_Wellformed => True, Exp_Node => ExaminerConstants.RefType (0), Static => True, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Scope => Current_Scope, Context => Dictionary.ProgramContext, TheConstant => The_Constant); --# end accept; -- note to indicate implicit definition ErrorHandler.Semantic_Note (Err_Num => 4, Position => The_Decl.Name.Position, Id_Str => The_Decl.Name.Token_Str); --# accept Flow, 33, Unwanted_ME, "Unwanted_ME unused here" & --# Flow, 33, The_Constant, "Expected to be neither referenced nor exported"; end Add_Default_Priority; begin -- Raise_Errors_And_Update_Dictionary case Failure_Reason is when Invalid_Redeclaration => Raise_Semantic_Error (Err_Num => 10, Lex_Val => The_Decl.Name); when Wrong_Type_Found => Raise_Semantic_Error (Err_Num => 42, Lex_Val => The_Decl.Name); when Empty_Range => Raise_Semantic_Error (Err_Num => 409, Lex_Val => The_Decl.Name); when Subtype_Range_Mismatch => Raise_Semantic_Error (Err_Num => 413, Lex_Val => The_Decl.Name); when Not_Valid_Name | Not_Within_Valid_Package => Raise_Semantic_Error (Err_Num => 778, Lex_Val => The_Decl.Name); when Parent_Type_Does_Not_Exist => Raise_Semantic_Error (Err_Num => 63, Lex_Val => The_Decl.Base_Type); when Parent_Type_Not_AP => Raise_Semantic_Error (Err_Num => 771, Lex_Val => The_Decl.Base_Type); when Priority_Range_Insufficient => Raise_Semantic_Error (Err_Num => 772, Lex_Val => The_Decl.Base_Type); when Any_Prio_First => Raise_Semantic_Error (Err_Num => 773, Lex_Val => The_Decl.Base_Type); when Any_Prio_Last => Raise_Semantic_Error (Err_Num => 775, Lex_Val => The_Decl.Base_Type); when Prio_Mid_Point => Raise_Semantic_Error (Err_Num => 774, Lex_Val => The_Decl.Base_Type); when Parent_Type_Not_Integer => Raise_Semantic_Error (Err_Num => 777, Lex_Val => The_Decl.Base_Type); when Must_Be_Private => Raise_Semantic_Error (Err_Num => 780, Lex_Val => The_Decl.Name); when Not_Positive_Power_Of_2 => Raise_Semantic_Error (Err_Num => 784, Lex_Val => The_Decl.Name); when Bit_Order_Wrong_Type => Raise_Semantic_Error (Err_Num => 814, Lex_Val => The_Decl.Base_Type); when Bit_Order_Wrong_Value => Raise_Semantic_Error (Err_Num => 815, Lex_Val => The_Decl.Value_Str); when No_Failure => case The_Decl.My_Type is when Dec_Named_Integer => Maths.StorageRep (The_Decl.Value, Storage_Rep_Value); --# accept Flow, 10, Constant_Sym, "Expected ineffective assignment to OK"; Dictionary.Add_Constant_Declaration (Name => The_Decl.Name.Token_Str, Type_Mark => Dictionary.GetUniversalIntegerType, Type_Reference => Dictionary.Null_Location, Value => Storage_Rep_Value, Exp_Is_Wellformed => True, Exp_Node => ExaminerConstants.RefType (0), Static => True, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Scope => Current_Scope, Context => Dictionary.ProgramContext, TheConstant => Constant_Sym); --# end accept; when Dec_Named_Real => Maths.StorageRep (The_Decl.Value, Storage_Rep_Value); --# accept Flow, 10, Constant_Sym, "Expected ineffective assignment to OK"; Dictionary.Add_Constant_Declaration (Name => The_Decl.Name.Token_Str, Type_Mark => Dictionary.GetUniversalRealType, Type_Reference => Dictionary.Null_Location, Value => Storage_Rep_Value, Exp_Is_Wellformed => True, Exp_Node => ExaminerConstants.RefType (0), Static => True, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Scope => Current_Scope, Context => Dictionary.ProgramContext, TheConstant => Constant_Sym); --# end accept; when Dec_Integer_Subtype => Maths.StorageRep (The_Decl.Low_Bound, Storage_Rep_Low_Bound); Maths.StorageRep (The_Decl.High_Bound, Storage_Rep_High_Bound); --# accept F, 10, Integer_Type, "Integer_Type unused here"; Dictionary.Add_Integer_Subtype (Name => The_Decl.Name.Token_Str, Static => True, Parent => Parent_Type, Parent_Reference => Dictionary.Null_Location, Lower => Storage_Rep_Low_Bound, Upper => Storage_Rep_High_Bound, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Scope => Current_Scope, Context => Dictionary.ProgramContext, The_Subtype => Integer_Type); --# end accept; if Is_Priority then Add_Default_Priority; end if; when Dec_Integer_Type => Maths.StorageRep (The_Decl.Low_Bound, Storage_Rep_Low_Bound); Maths.StorageRep (The_Decl.High_Bound, Storage_Rep_High_Bound); if Is_Interrupt_ID or else Is_Seconds_Count then -- An integer type which is pre-declared by -- Dictionary.Initialize, and has no dependent -- subtypes, so simply override the lower and upper bounds Integer_Type := Dictionary.LookupItem (Name => The_Decl.Name.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); Dictionary.AdjustTypeLowerBound (TypeMark => Integer_Type, NewBound => Storage_Rep_Low_Bound); Dictionary.AdjustTypeUpperBound (TypeMark => Integer_Type, NewBound => Storage_Rep_High_Bound); elsif Is_Predef_Integer then -- Update the bounds for types Standard.Integer, -- Standard.Natural and Standard.Positive in the dictionary Integer_Type := Dictionary.LookupItem (Name => The_Decl.Name.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); Dictionary.AdjustTypeLowerBound (TypeMark => Integer_Type, NewBound => Storage_Rep_Low_Bound); Dictionary.AdjustTypeUpperBound (TypeMark => Integer_Type, NewBound => Storage_Rep_High_Bound); Maths.StorageRep (Maths.ZeroInteger, Storage_Rep_Low_Bound); Dictionary.AdjustTypeLowerBound (Dictionary.GetPredefinedNaturalSubtype, Storage_Rep_Low_Bound); Dictionary.AdjustTypeUpperBound (Dictionary.GetPredefinedNaturalSubtype, Storage_Rep_High_Bound); Maths.StorageRep (Maths.OneInteger, Storage_Rep_Low_Bound); Dictionary.AdjustTypeLowerBound (Dictionary.GetPredefinedPositiveSubtype, Storage_Rep_Low_Bound); Dictionary.AdjustTypeUpperBound (Dictionary.GetPredefinedPositiveSubtype, Storage_Rep_High_Bound); else -- A new predefined integer type which _isn't_ -- pre-declared by Dictionary.Initialize. -- For example: Short_Integer or Long_Long_Integer Dictionary.Add_Predef_Integer_Type (Name => The_Decl.Name.Token_Str, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Lower => Storage_Rep_Low_Bound, Upper => Storage_Rep_High_Bound, Scope => Current_Scope, Context => Dictionary.ProgramContext); end if; when Dec_Floating_Point_Type => Maths.StorageRep (The_Decl.Low_Bound, Storage_Rep_Low_Bound); Maths.StorageRep (The_Decl.High_Bound, Storage_Rep_High_Bound); Maths.StorageRep (The_Decl.Num_Digits, Storage_Rep_Digits); if Is_Predef_Float then -- A Floating-point type which is pre-declared by -- Dictionary.Initialize, and has no dependent -- subtypes, so simply override the accuracy, -- lower and upper bounds Float_Type := Dictionary.LookupItem (Name => The_Decl.Name.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); Dictionary.AdjustTypeLowerBound (TypeMark => Float_Type, NewBound => Storage_Rep_Low_Bound); Dictionary.AdjustTypeUpperBound (TypeMark => Float_Type, NewBound => Storage_Rep_High_Bound); Dictionary.AdjustTypeErrorBound (TypeMark => Float_Type, NewBound => Storage_Rep_Digits); else -- A Floating-point type, which is _not_ pre-declared -- by Dictionary.Initialize - e.g. Short_Float or -- Long_Long_Float Dictionary.Add_Predef_Floating_Point_Type (Name => The_Decl.Name.Token_Str, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Lower => Storage_Rep_Low_Bound, Upper => Storage_Rep_High_Bound, Error_Bound => Storage_Rep_Digits, Scope => Current_Scope, Context => Dictionary.ProgramContext); end if; when Dec_Private => --# accept Flow, 10, Private_Type, "Expected ineffective assignment to OK"; Dictionary.Add_Private_Type (Name => The_Decl.Name.Token_Str, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), The_Package => Dictionary.GetRegion (Current_Scope), Is_Limited => False, Is_Tagged_Type => False, Extends => Dictionary.NullSymbol, The_Type => Private_Type); --# end accept; if Is_Address then Add_Null_Address; end if; when Dec_Typed_Constant => if Is_Default_Bit_Order then Lib_Package_Sym := Dictionary.LookupItem (Name => LexTokenManager.System_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); Constant_Sym := Dictionary.LookupItem (Name => The_Decl.Name.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- In the Dictionary, we need to store the 'Pos of the -- value, so... if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => The_Decl.Value_Str.Token_Str, Lex_Str2 => LexTokenManager.High_Order_First_Token) = LexTokenManager.Str_Eq then Storage_Rep_Value := LexTokenManager.Zero_Value; else Storage_Rep_Value := LexTokenManager.One_Value; end if; Dictionary.Promote_Deferred_To_Full_Constant (Constant_Sym => Constant_Sym, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => The_Decl.Name.Position, End_Position => The_Decl.Name.Position), Value => Storage_Rep_Value, Exp_Node => ExaminerConstants.RefType (0), The_Package => Lib_Package_Sym); end if; end case; end case; --# accept Flow, 33, Private_Type, "Expected Success to be neither referenced nor exported"; end Raise_Errors_And_Update_Dictionary; procedure Update_Priority_Symbols --# global in CommandLineData.Content; --# in Current_Scope; --# in Dictionary.Dict; --# in Failure_Reason; --# in Is_Any_Priority; --# in Is_Interrupt_Priority; --# in Is_Priority; --# in LexTokenManager.State; --# in The_Decl; --# in out Any_Priority_Sym; --# in out Interrupt_Priority_Sym; --# in out Priority_Sym; --# derives Any_Priority_Sym from *, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# Failure_Reason, --# Is_Any_Priority, --# LexTokenManager.State, --# The_Decl & --# Interrupt_Priority_Sym from *, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# Failure_Reason, --# Is_Any_Priority, --# Is_Interrupt_Priority, --# Is_Priority, --# LexTokenManager.State, --# The_Decl & --# Priority_Sym from *, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# Failure_Reason, --# Is_Any_Priority, --# Is_Priority, --# LexTokenManager.State, --# The_Decl; is Temp_Sym : Dictionary.Symbol; begin if not Ck_Failed then Temp_Sym := Dictionary.LookupItem (Name => The_Decl.Name.Token_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Is_Any_Priority then Any_Priority_Sym := Temp_Sym; elsif Is_Priority then Priority_Sym := Temp_Sym; elsif Is_Interrupt_Priority then Interrupt_Priority_Sym := Temp_Sym; end if; end if; end Update_Priority_Symbols; begin -- Check_And_Process_Declaration -- get full name of declared item Dotted_Name := Package_Full_Name; E_Strings.Append_String (E_Str => Dotted_Name, Str => "."); E_Strings.Append_Examiner_String (E_Str1 => Dotted_Name, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => The_Decl.Name.Token_Str)); Parent_Type := Dictionary.NullSymbol; Match_Type := Declaration_Type'Last; -- perform whichever checks are appropriate case The_Decl.My_Type is when Dec_Named_Integer => Do_Ck_Valid_Package; Do_Ck_Def_Name; Do_Ck_Existing_Decl; Do_Ck_Type; Do_Ck_Special_Cases; Raise_Errors_And_Update_Dictionary; when Dec_Named_Real => Do_Ck_Valid_Package; Do_Ck_Def_Name; Do_Ck_Existing_Decl; Do_Ck_Type; Do_Ck_Special_Cases; Raise_Errors_And_Update_Dictionary; when Dec_Integer_Subtype => Do_Ck_Valid_Package; Do_Ck_Def_Name_With_Priority_Ck; Do_Ck_Existing_Decl; Do_Ck_Type; Do_Ck_Bounds; Do_Ck_Base_Type; Do_Ck_System_Priority; Do_Ck_Special_Cases; Raise_Errors_And_Update_Dictionary; Update_Priority_Symbols; when Dec_Integer_Type => Do_Ck_Valid_Package; Do_Ck_Suffix_Name; Do_Ck_Def_Name; Do_Ck_Existing_Decl; Do_Ck_Type; Do_Ck_Bounds; Do_Ck_Special_Cases; Raise_Errors_And_Update_Dictionary; when Dec_Floating_Point_Type => Do_Ck_Valid_Package; Do_Ck_Suffix_Name; Do_Ck_Def_Name; Do_Ck_Existing_Decl; Do_Ck_Type; Do_Ck_Bounds; Do_Ck_Special_Cases; Raise_Errors_And_Update_Dictionary; when Dec_Private => Do_Ck_Valid_Package; Do_Ck_Def_Name; Do_Ck_Existing_Decl; Do_Ck_Type; Do_Ck_Special_Cases; Raise_Errors_And_Update_Dictionary; when Dec_Typed_Constant => Do_Ck_Valid_Package; Do_Ck_Def_Name; Do_Ck_Existing_Decl; Do_Ck_Type; Do_Ck_Special_Cases; Raise_Errors_And_Update_Dictionary; end case; end Check_And_Process_Declaration; procedure Assert_Token (The_Token : in SP_Symbols.SP_Terminal; Should_Be : in SP_Symbols.SP_Terminal) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in The_Lex_Val; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out LexTokenManager.State; --# in out Next_Expected_State; --# in out Parser_Stage; --# in out Parser_State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Should_Be, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token & --# Errors_Occurred, --# Parser_Stage, --# Parser_State from *, --# Should_Be, --# The_Token & --# LexTokenManager.State from *, --# Should_Be, --# The_Lex_Val, --# The_Token & --# Next_Expected_State from *, --# Parser_State, --# Should_Be, --# The_Token; is begin if The_Token /= Should_Be then Raise_Syntax_Error (The_Error => Syntax_Errors'(The_Type => Exp_Symbol, The_Symbol => The_Token, Expected_Symbol => Should_Be, The_Lex_Val => The_Lex_Val)); end if; end Assert_Token; procedure Begin_Int_Expr_Parse --# global in Unary_Minus; --# in out Parser_Stage; --# in out Parser_State; --# out Int_Add; --# out Int_Exponent; --# out Int_Subtract; --# out Saved_Parser_Stage; --# out Saved_Parser_State; --# derives Int_Add, --# Int_Exponent, --# Int_Subtract, --# Parser_State from & --# Parser_Stage from Unary_Minus & --# Saved_Parser_Stage from Parser_Stage & --# Saved_Parser_State from Parser_State; is begin Saved_Parser_State := Parser_State; Saved_Parser_Stage := Parser_Stage; New_State (The_Stage => Expr_Int); if Unary_Minus then -- cannot accept initial '-' if already had one Next_Stage; end if; Int_Subtract := False; Int_Add := False; Int_Exponent := False; end Begin_Int_Expr_Parse; procedure Do_Config_Defn_Start --# global in CommandLineData.Content; --# in Possible_Child_Packages; --# in Possible_Library_Packages; --# in Predefined_Scope; --# in The_Lex_Val; --# in The_Token; --# in out Current_Declaration; --# in out Current_Scope; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out LexTokenManager.State; --# in out Lib_Package_Symbol; --# in out Lookahead; --# in out Next_Expected_State; --# in out Package_Full_Name; --# in out Package_Name_Depth; --# in out Parser_Stage; --# in out Parser_State; --# in out SPARK_IO.File_Sys; --# in out Valid_Package; --# derives Current_Declaration from *, --# Package_Name_Depth, --# Parser_Stage, --# The_Lex_Val & --# Current_Scope from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Child_Packages, --# Possible_Library_Packages, --# Predefined_Scope, --# The_Lex_Val, --# The_Token & --# Dictionary.Dict, --# Lib_Package_Symbol from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Library_Packages, --# The_Lex_Val, --# The_Token & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Declaration, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Child_Packages, --# Possible_Library_Packages, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# Errors_Occurred, --# Valid_Package from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Child_Packages, --# Possible_Library_Packages, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# LexTokenManager.State from *, --# Package_Name_Depth, --# Parser_Stage, --# The_Lex_Val, --# The_Token & --# Lookahead from *, --# Parser_Stage & --# Next_Expected_State, --# Parser_State from *, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# The_Token & --# Package_Full_Name from *, --# Current_Declaration, --# LexTokenManager.State, --# Package_Name_Depth, --# Parser_Stage, --# The_Token & --# Package_Name_Depth, --# Parser_Stage from Package_Name_Depth, --# Parser_Stage, --# The_Token; is procedure Change_To_New_Package (Package_Name : in LexTokenManager.Lex_Value) --# global in CommandLineData.Content; --# in Current_Declaration; --# in LexTokenManager.State; --# in Package_Name_Depth; --# in Possible_Child_Packages; --# in Possible_Library_Packages; --# in Predefined_Scope; --# in out Current_Scope; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Lib_Package_Symbol; --# in out SPARK_IO.File_Sys; --# in out Valid_Package; --# derives Current_Scope from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name, --# Package_Name_Depth, --# Possible_Child_Packages, --# Possible_Library_Packages, --# Predefined_Scope & --# Dictionary.Dict, --# Lib_Package_Symbol from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Package_Name, --# Package_Name_Depth, --# Possible_Library_Packages & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Declaration, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name, --# Package_Name_Depth, --# Possible_Child_Packages, --# Possible_Library_Packages, --# SPARK_IO.File_Sys, --# Valid_Package & --# Errors_Occurred, --# Valid_Package from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name, --# Package_Name_Depth, --# Possible_Child_Packages, --# Possible_Library_Packages, --# Valid_Package; is Package_Standard : constant Library_Package_Range := 1; Package_Ada : constant Library_Package_Range := 3; Package_Real_Time : constant Child_Package_Range := 1; Package_Interrupts : constant Child_Package_Range := 2; Name_Match : Boolean := False; Local_Package_Symbol : Dictionary.Symbol; Ada_Package_Symbol : Dictionary.Symbol; Bit_Order_Type_Symbol : Dictionary.Symbol; Unused_Symbol : Dictionary.Symbol; Package_Name_Match : Boolean; procedure Match_Identifier (ID : in LexTokenManager.Lex_String; To_Match : in LexTokenManager.Lex_String; Position : in LexTokenManager.Token_Position; Matched : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# ID, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# To_Match & --# Matched from ID, --# LexTokenManager.State, --# To_Match; is begin Matched := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => ID, Lex_Str2 => To_Match) = LexTokenManager.Str_Eq; if Matched then Casing.Check_Casing (Lex_Str1 => ID, Lex_Str2 => To_Match, Position => Position); end if; end Match_Identifier; begin -- Change_To_New_Package -- Get the symbol for package Ada now since we need it more than once later. -- For SPARK83 mode, this will return NullSymbol, but that doesn't matter -- since we only use the symbol in code specific to SPARK95 onwards. Ada_Package_Symbol := Dictionary.LookupItem (Name => LexTokenManager.Ada_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext, Full_Package_Name => False); case Package_Name_Depth is when 1 => -- Checking Library packages for I in Library_Package_Range loop Match_Identifier (ID => Possible_Library_Packages (I), To_Match => Package_Name.Token_Str, Position => Package_Name.Position, Matched => Package_Name_Match); if Package_Name_Match then Name_Match := True; case I is when Package_Standard => Current_Scope := Predefined_Scope; Lib_Package_Symbol := Dictionary.GetPredefinedPackageStandard; Valid_Package := True; when Package_System => --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 then -- package System disallowed in Spark83 mode -- don't complain about the other declarations here; -- wood for trees etc. Raise_Semantic_Error (Err_Num => 779, Lex_Val => Package_Name); end if; --# end accept; Lib_Package_Symbol := Dictionary.LookupItem (Name => Package_Name.Token_Str, Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Lib_Package_Symbol) then -- redeclaration of existing package Valid_Package := False; Raise_Semantic_Error (Err_Num => 10, Lex_Val => Package_Name); else -- create package Dictionary.Add_Package (Name => Package_Name.Token_Str, Comp_Unit => ContextManager.NullUnit, Specification => Dictionary.Location'(Start_Position => Package_Name.Position, End_Position => Package_Name.Position), Scope => Dictionary.GlobalScope, ThePackage => Lib_Package_Symbol); Current_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Lib_Package_Symbol); -- Add Bit_Order, its enumeration literals, and -- System.Default_Bit_Order as a deferred constant -- (for now...the actual value might be supplied and -- overridden in the config file). Dictionary.Add_Enumeration_Type (Name => LexTokenManager.Bit_Order_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => Package_Name.Position, End_Position => Package_Name.Position), Scope => Current_Scope, Context => Dictionary.ProgramContext, The_Type => Bit_Order_Type_Symbol); --# accept Flow, 10, Unused_Symbol, "Expected ineffective assignment to OK"; Dictionary.AddEnumerationLiteral (Name => LexTokenManager.High_Order_First_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => Package_Name.Position, End_Position => Package_Name.Position), Position => LexTokenManager.Zero_Value, The_Type => Bit_Order_Type_Symbol, TheEnumerationLiteral => Unused_Symbol); Dictionary.AddEnumerationLiteral (Name => LexTokenManager.Low_Order_First_Token, Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => Package_Name.Position, End_Position => Package_Name.Position), Position => LexTokenManager.One_Value, The_Type => Bit_Order_Type_Symbol, TheEnumerationLiteral => Unused_Symbol); Dictionary.Add_Deferred_Constant (Name => LexTokenManager.Default_Bit_Order_Token, Type_Mark => Bit_Order_Type_Symbol, Type_Reference => Dictionary.Location'(Start_Position => Package_Name.Position, End_Position => Package_Name.Position), Comp_Unit => ContextManager.NullUnit, Declaration => Dictionary.Location'(Start_Position => Package_Name.Position, End_Position => Package_Name.Position), The_Package => Lib_Package_Symbol, TheConstant => Unused_Symbol); --# end accept; Valid_Package := True; end if; when Package_Ada => --# accept F, 41, "Stable expression expected here"; case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => -- package Ada disallowed in Spark83 mode Raise_Semantic_Error (Err_Num => 779, Lex_Val => Package_Name); Valid_Package := False; when CommandLineData.SPARK95_Onwards => Lib_Package_Symbol := Ada_Package_Symbol; Current_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Lib_Package_Symbol); Valid_Package := True; end case; --# end accept; end case; end if; exit when Name_Match; end loop; when 2 => -- Checking Child packages case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => -- Child packages not supported in Ada83 mode. Flag the error at the -- library package name token, unless we have done so already. if Valid_Package then -- No error flagged yet, so do it now. Raise_Semantic_Error (Err_Num => 779, Lex_Val => Current_Declaration.Enc_Package (1)); Valid_Package := False; end if; -- Don't rub it in by giving a second error (776) for an unmatched name. This -- might be a bogus error message anyway since the name may be OK in Ada95. Name_Match := True; when CommandLineData.SPARK95_Onwards => for I in Child_Package_Range loop Match_Identifier (ID => Possible_Child_Packages (I), To_Match => Package_Name.Token_Str, Position => Package_Name.Position, Matched => Package_Name_Match); if Package_Name_Match then Name_Match := True; case I is when Package_Interrupts => Local_Package_Symbol := Dictionary.LookupSelectedItem (Prefix => Lib_Package_Symbol, Selector => LexTokenManager.Interrupts_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext); when Package_Real_Time => Local_Package_Symbol := Dictionary.LookupSelectedItem (Prefix => Lib_Package_Symbol, Selector => LexTokenManager.Real_Time_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext); end case; if Dictionary.Is_Null_Symbol (Local_Package_Symbol) then Valid_Package := False; --# accept F, 41, "Stable expression expected here"; if not Dictionary.Packages_Are_Equal (Left_Symbol => Lib_Package_Symbol, Right_Symbol => Ada_Package_Symbol) then -- The child package is a child of a package other than Ada, so -- name is not recognised. Name_Match := False; end if; --# end accept; else Current_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Local_Package_Symbol); Valid_Package := True; end if; end if; exit when Name_Match; end loop; end case; end case; --# assert True; if not Name_Match then Raise_Semantic_Error (Err_Num => 776, Lex_Val => Package_Name); Valid_Package := False; end if; --# accept Flow, 33, Unused_Symbol, "Expected to be neither referenced nor exported"; end Change_To_New_Package; begin case Parser_Stage is when 1 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWpackage); Package_Name_Depth := 1; Next_Stage; when 2 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.identifier); Change_To_New_Package (Package_Name => The_Lex_Val); Current_Declaration.Enc_Package (Package_Name_Depth) := The_Lex_Val; Next_Stage; when 3 => case The_Token is when SP_Symbols.RWis => -- Evaluate and store the full name of the package Package_Full_Name := LexTokenManager.Lex_String_To_String (Lex_Str => Current_Declaration.Enc_Package (1).Token_Str); for I in Child_Package_Depth range 2 .. Package_Name_Depth loop E_Strings.Append_String (E_Str => Package_Full_Name, Str => "."); E_Strings.Append_Examiner_String (E_Str1 => Package_Full_Name, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Declaration.Enc_Package (I).Token_Str)); --# assert True; end loop; Next_Stage; when SP_Symbols.point => -- Child unit if Package_Name_Depth = Child_Package_Depth'Last then -- Too many child levels, or a spurious dot Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWis); else Package_Name_Depth := Package_Name_Depth + 1; Parser_Stage := 2; -- Go back to process the child name end if; when others => -- expected a 'package is' Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWis); end case; when 4 => Lookahead := True; New_State (The_Stage => Defn); when others => null; end case; end Do_Config_Defn_Start; procedure Do_Next_Config_Defn_Start --# global in CommandLineData.Content; --# in Possible_Child_Packages; --# in Possible_Library_Packages; --# in Predefined_Scope; --# in The_Lex_Val; --# in The_Token; --# in out Current_Declaration; --# in out Current_Scope; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out LexTokenManager.State; --# in out Lib_Package_Symbol; --# in out Lookahead; --# in out Next_Expected_State; --# in out Package_Full_Name; --# in out Package_Name_Depth; --# in out Parser_Stage; --# in out Parser_State; --# in out SPARK_IO.File_Sys; --# in out Valid_Package; --# derives Current_Declaration from *, --# Package_Name_Depth, --# Parser_Stage, --# The_Lex_Val & --# Current_Scope from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Child_Packages, --# Possible_Library_Packages, --# Predefined_Scope, --# The_Lex_Val, --# The_Token & --# Dictionary.Dict, --# Lib_Package_Symbol from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Library_Packages, --# The_Lex_Val, --# The_Token & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Declaration, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Child_Packages, --# Possible_Library_Packages, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# Errors_Occurred, --# Valid_Package from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Lib_Package_Symbol, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Child_Packages, --# Possible_Library_Packages, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# LexTokenManager.State from *, --# Package_Name_Depth, --# Parser_Stage, --# The_Lex_Val, --# The_Token & --# Lookahead from *, --# Parser_Stage & --# Next_Expected_State, --# Parser_State from *, --# Package_Name_Depth, --# Parser_Stage, --# Parser_State, --# The_Token & --# Package_Full_Name from *, --# Current_Declaration, --# LexTokenManager.State, --# Package_Name_Depth, --# Parser_Stage, --# The_Token & --# Package_Name_Depth, --# Parser_Stage from Package_Name_Depth, --# Parser_Stage, --# The_Token; is begin case Parser_Stage is when 1 => case The_Token is when SP_Symbols.RWpackage => Package_Name_Depth := 1; Next_Stage; when SP_Symbols.SPEND => null; when others => -- expected a 'package ... is' Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWpackage); end case; when others => Do_Config_Defn_Start; end case; end Do_Next_Config_Defn_Start; procedure Do_Defn --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in The_Lex_Val; --# in The_Token; --# in out Current_Declaration; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out LexTokenManager.State; --# in out Next_Expected_State; --# in out Parser_State; --# in out SPARK_IO.File_Sys; --# out Parser_Stage; --# derives Current_Declaration, --# LexTokenManager.State from *, --# The_Lex_Val, --# The_Token & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token & --# Errors_Occurred from *, --# The_Token & --# Next_Expected_State from *, --# Parser_State, --# The_Token & --# Parser_Stage, --# Parser_State from The_Token; is begin case The_Token is when SP_Symbols.RWtype => New_State (The_Stage => Type_Unknown); when SP_Symbols.RWsubtype => Current_Declaration.My_Type := Dec_Integer_Subtype; New_State (The_Stage => Subtype_Int); when SP_Symbols.identifier => Current_Declaration.Name := The_Lex_Val; New_State (The_Stage => Const_Unknown); when SP_Symbols.RWend => New_State (The_Stage => Config_Defn_End); when others => -- expected a configuration definition Raise_Syntax_Error (The_Error => Syntax_Errors'(The_Type => Exp_Defn, The_Symbol => The_Token, Expected_Symbol => SP_Symbols.SPDEFAULT, The_Lex_Val => The_Lex_Val)); end case; end Do_Defn; procedure Do_Type_Unknown --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in The_Lex_Val; --# in The_Token; --# in out Current_Declaration; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out LexTokenManager.State; --# in out Next_Expected_State; --# in out Parser_Stage; --# in out Parser_State; --# in out SPARK_IO.File_Sys; --# derives Current_Declaration, --# LexTokenManager.State from *, --# Parser_Stage, --# The_Lex_Val, --# The_Token & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Parser_Stage, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token & --# Errors_Occurred, --# Parser_Stage, --# Parser_State from *, --# Parser_Stage, --# The_Token & --# Next_Expected_State from *, --# Parser_Stage, --# Parser_State, --# The_Token; is begin case Parser_Stage is when 1 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.identifier); Current_Declaration.Name := The_Lex_Val; Next_Stage; when 2 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWis); Next_Stage; when 3 => case The_Token is when SP_Symbols.RWprivate => Current_Declaration.My_Type := Dec_Private; New_State (The_Stage => Type_Private); when SP_Symbols.RWdigits => Current_Declaration.My_Type := Dec_Floating_Point_Type; New_State (The_Stage => Type_FP); when SP_Symbols.RWrange => Current_Declaration.My_Type := Dec_Integer_Type; New_State (The_Stage => Type_Int); when others => -- expected either 'digits', 'range' or 'private' Raise_Syntax_Error (The_Error => Syntax_Errors'(The_Type => Exp_Digits_Range, The_Symbol => The_Token, Expected_Symbol => SP_Symbols.SPDEFAULT, The_Lex_Val => The_Lex_Val)); end case; when others => null; end case; end Do_Type_Unknown; procedure Do_Type_Private --# global in CommandLineData.Content; --# in Current_Declaration; --# in Current_Scope; --# in Package_Full_Name; --# in Possible_Identifiers; --# in The_Lex_Val; --# in The_Token; --# in Valid_Package; --# in out Any_Priority_Sym; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Interrupt_Priority_Sym; --# in out LexTokenManager.State; --# in out Lookahead; --# in out Next_Expected_State; --# in out Parser_Stage; --# in out Parser_State; --# in out Priority_Sym; --# in out SPARK_IO.File_Sys; --# derives Any_Priority_Sym, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Priority_Sym from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# Valid_Package & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# Errors_Occurred from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Token, --# Valid_Package & --# LexTokenManager.State from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# Lookahead from *, --# Parser_Stage & --# Next_Expected_State, --# Parser_State from *, --# Parser_Stage, --# Parser_State, --# The_Token & --# Parser_Stage from *, --# The_Token; is begin case Parser_Stage is when 1 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.semicolon); Next_Stage; when 2 => Check_And_Process_Declaration (The_Decl => Current_Declaration); Lookahead := True; New_State (The_Stage => Defn); when others => null; end case; end Do_Type_Private; procedure Do_Type_FP --# global in CommandLineData.Content; --# in Current_Scope; --# in Package_Full_Name; --# in Possible_Identifiers; --# in The_Lex_Val; --# in The_Token; --# in Valid_Package; --# in out Any_Priority_Sym; --# in out Current_Declaration; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Interrupt_Priority_Sym; --# in out LexTokenManager.State; --# in out Lookahead; --# in out Next_Expected_State; --# in out Parser_Stage; --# in out Parser_State; --# in out Priority_Sym; --# in out SPARK_IO.File_Sys; --# in out Unary_Minus; --# derives Any_Priority_Sym, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Priority_Sym from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# Valid_Package & --# Current_Declaration from *, --# LexTokenManager.State, --# Parser_Stage, --# The_Lex_Val, --# The_Token, --# Unary_Minus & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token, --# Unary_Minus, --# Valid_Package & --# Errors_Occurred from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Token, --# Unary_Minus, --# Valid_Package & --# LexTokenManager.State from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Lex_Val, --# The_Token, --# Unary_Minus, --# Valid_Package & --# Lookahead from *, --# Parser_Stage & --# Next_Expected_State, --# Parser_State from *, --# Parser_Stage, --# Parser_State, --# The_Token, --# Unary_Minus & --# Parser_Stage, --# Unary_Minus from Parser_Stage, --# The_Token, --# Unary_Minus; is begin case Parser_Stage is when 1 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.integer_number); Guarded_Literal_To_Value (Lex => The_Lex_Val, Val => Current_Declaration.Num_Digits); Next_Stage; when 2 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWrange); Unary_Minus := False; Next_Stage; when 3 => case The_Token is when SP_Symbols.real_number => Guarded_Literal_To_Value (Lex => The_Lex_Val, Val => Current_Declaration.Low_Bound); if Unary_Minus then Maths.Negate (Current_Declaration.Low_Bound); end if; Next_Stage; when SP_Symbols.minus => if Unary_Minus then -- expected a number Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.real_number); else Unary_Minus := True; end if; when others => -- expected either either '-' or a number Raise_Syntax_Error (The_Error => Syntax_Errors'(The_Type => Exp_Minus_Real, The_Symbol => The_Token, Expected_Symbol => SP_Symbols.SPDEFAULT, The_Lex_Val => The_Lex_Val)); end case; when 4 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.double_dot); Unary_Minus := False; Next_Stage; when 5 => case The_Token is when SP_Symbols.real_number => Guarded_Literal_To_Value (Lex => The_Lex_Val, Val => Current_Declaration.High_Bound); if Unary_Minus then Maths.Negate (Current_Declaration.High_Bound); end if; Next_Stage; when SP_Symbols.minus => if Unary_Minus then -- expected a number Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.real_number); else Unary_Minus := True; end if; when others => -- expected either '-' or a number Raise_Syntax_Error (The_Error => Syntax_Errors'(The_Type => Exp_Minus_Real, The_Symbol => The_Token, Expected_Symbol => SP_Symbols.SPDEFAULT, The_Lex_Val => The_Lex_Val)); end case; when 6 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.semicolon); Next_Stage; when 7 => Check_And_Process_Declaration (The_Decl => Current_Declaration); Lookahead := True; New_State (The_Stage => Defn); when others => null; end case; end Do_Type_FP; procedure Do_Type_Int --# global in CommandLineData.Content; --# in Current_Scope; --# in Expr_Int_Value; --# in Package_Full_Name; --# in Possible_Identifiers; --# in The_Lex_Val; --# in The_Token; --# in Valid_Package; --# in out Any_Priority_Sym; --# in out Current_Declaration; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Interrupt_Priority_Sym; --# in out Int_Add; --# in out Int_Exponent; --# in out Int_Subtract; --# in out LexTokenManager.State; --# in out Lookahead; --# in out Next_Expected_State; --# in out Parser_Stage; --# in out Parser_State; --# in out Priority_Sym; --# in out Saved_Parser_Stage; --# in out Saved_Parser_State; --# in out SPARK_IO.File_Sys; --# in out Unary_Minus; --# derives Any_Priority_Sym, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Priority_Sym from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# Valid_Package & --# Current_Declaration from *, --# Expr_Int_Value, --# Parser_Stage & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# Errors_Occurred from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Token, --# Valid_Package & --# Int_Add, --# Int_Exponent, --# Int_Subtract, --# Lookahead, --# Saved_Parser_Stage, --# Unary_Minus from *, --# Parser_Stage & --# LexTokenManager.State from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# Next_Expected_State, --# Parser_State from *, --# Parser_Stage, --# Parser_State, --# The_Token & --# Parser_Stage from *, --# The_Token & --# Saved_Parser_State from *, --# Parser_Stage, --# Parser_State; is begin case Parser_Stage is when 1 => Lookahead := True; Next_Stage; Unary_Minus := False; Begin_Int_Expr_Parse; when 2 => -- got the low bound, last time Current_Declaration.Low_Bound := Expr_Int_Value; Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.double_dot); Next_Stage; when 3 => Lookahead := True; Next_Stage; Unary_Minus := False; Begin_Int_Expr_Parse; when 4 => -- got the high bound, last time Current_Declaration.High_Bound := Expr_Int_Value; Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.semicolon); Next_Stage; when 5 => Check_And_Process_Declaration (The_Decl => Current_Declaration); Lookahead := True; New_State (The_Stage => Defn); when others => null; end case; end Do_Type_Int; procedure Do_Subtype_Int --# global in CommandLineData.Content; --# in Current_Scope; --# in Expr_Int_Value; --# in Package_Full_Name; --# in Possible_Identifiers; --# in The_Lex_Val; --# in The_Token; --# in Valid_Package; --# in out Any_Priority_Sym; --# in out Current_Declaration; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Interrupt_Priority_Sym; --# in out Int_Add; --# in out Int_Exponent; --# in out Int_Subtract; --# in out LexTokenManager.State; --# in out Lookahead; --# in out Next_Expected_State; --# in out Parser_Stage; --# in out Parser_State; --# in out Priority_Sym; --# in out Saved_Parser_Stage; --# in out Saved_Parser_State; --# in out SPARK_IO.File_Sys; --# in out Unary_Minus; --# derives Any_Priority_Sym, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Priority_Sym from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# Valid_Package & --# Current_Declaration from *, --# Expr_Int_Value, --# Parser_Stage, --# The_Lex_Val & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# Errors_Occurred from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Token, --# Valid_Package & --# Int_Add, --# Int_Exponent, --# Int_Subtract, --# Lookahead, --# Saved_Parser_Stage, --# Unary_Minus from *, --# Parser_Stage & --# LexTokenManager.State from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Lex_Val, --# The_Token, --# Valid_Package & --# Next_Expected_State, --# Parser_State from *, --# Parser_Stage, --# Parser_State, --# The_Token & --# Parser_Stage from *, --# The_Token & --# Saved_Parser_State from *, --# Parser_Stage, --# Parser_State; is begin case Parser_Stage is when 1 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.identifier); Current_Declaration.Name := The_Lex_Val; Next_Stage; when 2 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWis); Next_Stage; when 3 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.identifier); Current_Declaration.Base_Type := The_Lex_Val; Next_Stage; when 4 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWrange); Next_Stage; when 5 => Lookahead := True; Next_Stage; Unary_Minus := False; Begin_Int_Expr_Parse; when 6 => -- got the low bound, last time Current_Declaration.Low_Bound := Expr_Int_Value; Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.double_dot); Next_Stage; when 7 => Lookahead := True; Next_Stage; Unary_Minus := False; Begin_Int_Expr_Parse; when 8 => -- got the high bound, last time Current_Declaration.High_Bound := Expr_Int_Value; Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.semicolon); Next_Stage; when 9 => Check_And_Process_Declaration (The_Decl => Current_Declaration); Lookahead := True; New_State (The_Stage => Defn); when others => null; end case; end Do_Subtype_Int; procedure Do_Const_Unknown --# global in CommandLineData.Content; --# in Current_Scope; --# in Expr_Int_Value; --# in Package_Full_Name; --# in Possible_Identifiers; --# in The_Lex_Val; --# in The_Token; --# in Valid_Package; --# in out Any_Priority_Sym; --# in out Current_Declaration; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Interrupt_Priority_Sym; --# in out Int_Add; --# in out Int_Exponent; --# in out Int_Subtract; --# in out LexTokenManager.State; --# in out Lookahead; --# in out Next_Expected_State; --# in out Parser_Stage; --# in out Parser_State; --# in out Priority_Sym; --# in out Saved_Parser_Stage; --# in out Saved_Parser_State; --# in out SPARK_IO.File_Sys; --# in out Unary_Minus; --# derives Any_Priority_Sym, --# Dictionary.Dict, --# Interrupt_Priority_Sym, --# Priority_Sym from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# Valid_Package & --# Current_Declaration from *, --# Expr_Int_Value, --# LexTokenManager.State, --# Parser_Stage, --# The_Lex_Val, --# The_Token, --# Unary_Minus & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token, --# Unary_Minus, --# Valid_Package & --# Errors_Occurred from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Token, --# Unary_Minus, --# Valid_Package & --# Int_Add, --# Int_Exponent, --# Int_Subtract, --# Lookahead, --# Saved_Parser_Stage, --# Unary_Minus from *, --# Parser_Stage, --# The_Token & --# LexTokenManager.State from *, --# Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Current_Scope, --# Dictionary.Dict, --# Expr_Int_Value, --# Interrupt_Priority_Sym, --# Package_Full_Name, --# Parser_Stage, --# Possible_Identifiers, --# Priority_Sym, --# The_Lex_Val, --# The_Token, --# Unary_Minus, --# Valid_Package & --# Next_Expected_State, --# Parser_State from *, --# Parser_Stage, --# Parser_State, --# The_Token, --# Unary_Minus & --# Parser_Stage from *, --# The_Token, --# Unary_Minus & --# Saved_Parser_State from *, --# Parser_Stage, --# Parser_State, --# The_Token; is begin case Parser_Stage is when 1 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.colon); Next_Stage; when 2 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.RWconstant); Next_Stage; when 3 => if The_Token = SP_Symbols.identifier then Current_Declaration.My_Type := Dec_Typed_Constant; Current_Declaration.Base_Type := The_Lex_Val; -- Stay in stage 3, since we now expect := else Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.becomes); Unary_Minus := False; Next_Stage; end if; when 4 => case The_Token is when SP_Symbols.real_number => Current_Declaration.My_Type := Dec_Named_Real; Guarded_Literal_To_Value (Lex => The_Lex_Val, Val => Current_Declaration.Value); if Unary_Minus then Maths.Negate (Current_Declaration.Value); end if; Next_Stage; when SP_Symbols.minus => if Unary_Minus then -- expected a number Raise_Syntax_Error (The_Error => Syntax_Errors'(The_Type => Exp_Integer_Real, The_Symbol => The_Token, Expected_Symbol => SP_Symbols.SPDEFAULT, The_Lex_Val => The_Lex_Val)); else Unary_Minus := True; end if; when SP_Symbols.identifier => Current_Declaration.Value_Str := The_Lex_Val; Next_Stage; when others => Current_Declaration.My_Type := Dec_Named_Integer; Lookahead := True; Next_Stage; Begin_Int_Expr_Parse; end case; when 5 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.semicolon); Next_Stage; when 6 => if Current_Declaration.My_Type = Dec_Named_Integer then Current_Declaration.Value := Expr_Int_Value; end if; Check_And_Process_Declaration (The_Decl => Current_Declaration); Lookahead := True; New_State (The_Stage => Defn); when others => null; end case; end Do_Const_Unknown; procedure Do_Config_Defn_End --# global in Any_Priority_Sym; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in Interrupt_Priority_Sym; --# in Package_Full_Name; --# in Package_Name_Depth; --# in Possible_Library_Packages; --# in Priority_Sym; --# in The_Lex_Val; --# in The_Token; --# in out Current_Declaration; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out LexTokenManager.State; --# in out Lookahead; --# in out Next_Expected_State; --# in out Package_End_Name; --# in out Parser_Stage; --# in out Parser_State; --# in out SPARK_IO.File_Sys; --# derives Current_Declaration from *, --# Parser_Stage, --# The_Lex_Val & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_End_Name, --# Package_Full_Name, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Library_Packages, --# Priority_Sym, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token & --# Errors_Occurred from *, --# Any_Priority_Sym, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_End_Name, --# Package_Full_Name, --# Package_Name_Depth, --# Parser_Stage, --# Possible_Library_Packages, --# Priority_Sym, --# The_Token & --# LexTokenManager.State from *, --# Package_Full_Name, --# Package_Name_Depth, --# Parser_Stage, --# The_Lex_Val, --# The_Token & --# Lookahead from *, --# Parser_Stage & --# Next_Expected_State, --# Parser_State from *, --# Parser_Stage, --# Parser_State, --# The_Token & --# Package_End_Name from *, --# LexTokenManager.State, --# Parser_Stage, --# The_Lex_Val, --# The_Token & --# Parser_Stage from *, --# The_Token; is procedure Check_Package_End --# global in Any_Priority_Sym; --# in CommandLineData.Content; --# in Current_Declaration; --# in Dictionary.Dict; --# in Interrupt_Priority_Sym; --# in Package_End_Name; --# in Package_Full_Name; --# in Package_Name_Depth; --# in Possible_Library_Packages; --# in Priority_Sym; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Any_Priority_Sym, --# CommandLineData.Content, --# Current_Declaration, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_End_Name, --# Package_Full_Name, --# Package_Name_Depth, --# Possible_Library_Packages, --# Priority_Sym, --# SPARK_IO.File_Sys & --# Errors_Occurred from *, --# Any_Priority_Sym, --# Interrupt_Priority_Sym, --# LexTokenManager.State, --# Package_End_Name, --# Package_Full_Name, --# Package_Name_Depth, --# Possible_Library_Packages, --# Priority_Sym & --# LexTokenManager.State from *, --# Package_Full_Name, --# Package_Name_Depth; is Full_Name_Token_Str : LexTokenManager.Lex_String; begin if Package_Name_Depth = 1 then Full_Name_Token_Str := Current_Declaration.Enc_Package (1).Token_Str; else -- We must construct a new lex string from the library and child package names LexTokenManager.Insert_Examiner_String (Str => Package_Full_Name, Lex_Str => Full_Name_Token_Str); end if; -- Check that the (dotted) name after "end" matches the package name. if not E_Strings.Eq_String (E_Str1 => Package_End_Name, E_Str2 => Package_Full_Name) then -- don't use normal error handler here; need to pass different string from -- position ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Current_Declaration.Name.Position, Id_Str => Full_Name_Token_Str); Errors_Occurred := True; else Casing.Check_String_Casing (Str => Package_End_Name, Lex_Str => Full_Name_Token_Str, Position => Current_Declaration.Name.Position); end if; -- Check that the full set of priority decls are given if we have Any_Priority if LexTokenManager.Comp_Str_Case_Insensitive (Str => Package_Full_Name, Lex_Str => Possible_Library_Packages (Package_System)) then if not Dictionary.Is_Null_Symbol (Any_Priority_Sym) and then (Dictionary.Is_Null_Symbol (Priority_Sym) or else Dictionary.Is_Null_Symbol (Interrupt_Priority_Sym)) then Raise_Semantic_Error (Err_Num => 770, Lex_Val => Current_Declaration.Name); end if; end if; end Check_Package_End; begin case Parser_Stage is when 1 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.identifier); -- Build up the name after "end" in Package_End_Name Package_End_Name := LexTokenManager.Lex_String_To_String (Lex_Str => The_Lex_Val.Token_Str); Current_Declaration.Name := The_Lex_Val; Next_Stage; when 2 => case The_Token is when SP_Symbols.semicolon => Next_Stage; -- Note that we don't need to parse the name too strictly here -- e.g. we won't (yet) fail . The string that is -- built won't match the package name unless it is well-formed, -- and so a badly-formed name will fail later in Check_Package_End. when SP_Symbols.point => E_Strings.Append_String (E_Str => Package_End_Name, Str => "."); when SP_Symbols.identifier => E_Strings.Append_Examiner_String (E_Str1 => Package_End_Name, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => The_Lex_Val.Token_Str)); when others => -- expected a 'end [.] ;' Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.semicolon); end case; when 3 => Lookahead := True; Check_Package_End; New_State (The_Stage => Next_Config_Defn_Start); when others => null; end case; end Do_Config_Defn_End; procedure Do_Expr_Int --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Saved_Parser_Stage; --# in Saved_Parser_State; --# in The_Lex_Val; --# in The_Token; --# in out ErrorHandler.Error_Context; --# in out Errors_Occurred; --# in out Expr_Int_Value; --# in out Int_Add; --# in out Int_Add_Sub_Val; --# in out Int_Exponent; --# in out Int_Exponent_Val; --# in out Int_Subtract; --# in out Int_Val; --# in out LexTokenManager.State; --# in out Lookahead; --# in out Next_Expected_State; --# in out Parser_Stage; --# in out Parser_State; --# in out SPARK_IO.File_Sys; --# in out Unary_Minus; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Int_Add, --# Int_Add_Sub_Val, --# Int_Exponent, --# Int_Exponent_Val, --# Int_Subtract, --# Int_Val, --# LexTokenManager.State, --# Parser_Stage, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# The_Token, --# Unary_Minus & --# Errors_Occurred, --# Int_Add, --# Int_Exponent, --# Int_Subtract, --# Lookahead, --# Unary_Minus from *, --# Parser_Stage, --# The_Token & --# Expr_Int_Value from *, --# Int_Add, --# Int_Add_Sub_Val, --# Int_Exponent, --# Int_Exponent_Val, --# Int_Subtract, --# Int_Val, --# Parser_Stage, --# The_Token, --# Unary_Minus & --# Int_Add_Sub_Val, --# Int_Val, --# LexTokenManager.State from *, --# LexTokenManager.State, --# Parser_Stage, --# The_Lex_Val, --# The_Token & --# Int_Exponent_Val from *, --# LexTokenManager.State, --# Parser_Stage, --# The_Lex_Val & --# Next_Expected_State from *, --# Parser_Stage, --# Parser_State, --# The_Token & --# Parser_Stage from *, --# Saved_Parser_Stage, --# The_Token & --# Parser_State from *, --# Parser_Stage, --# Saved_Parser_State, --# The_Token; is procedure End_Int_Expr_Parse --# global in Saved_Parser_Stage; --# in Saved_Parser_State; --# out Parser_Stage; --# out Parser_State; --# derives Parser_Stage from Saved_Parser_Stage & --# Parser_State from Saved_Parser_State; is begin Parser_State := Saved_Parser_State; Parser_Stage := Saved_Parser_Stage; end End_Int_Expr_Parse; procedure Calc_Int_Value (Result : out Maths.Value) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Int_Add; --# in Int_Add_Sub_Val; --# in Int_Exponent; --# in Int_Exponent_Val; --# in Int_Subtract; --# in Int_Val; --# in LexTokenManager.State; --# in The_Lex_Val; --# in Unary_Minus; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Int_Add, --# Int_Add_Sub_Val, --# Int_Exponent, --# Int_Exponent_Val, --# Int_Subtract, --# Int_Val, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Lex_Val, --# Unary_Minus & --# Result from Int_Add, --# Int_Add_Sub_Val, --# Int_Exponent, --# Int_Exponent_Val, --# Int_Subtract, --# Int_Val, --# Unary_Minus; is So_Far, Temp_Val : Maths.Value; Local_ME : Maths.ErrorCode := Maths.NoError; Had_An_Error : Boolean := False; begin -- valid typing of the values is confirmed by correct lexical analysis, -- and therefore the only Maths.ErrorCode that can arise from the calculations -- here is Maths.Overflow; resulting in a semantic warning. if Int_Exponent then Maths.RaiseByPower (Int_Val, Int_Exponent_Val, So_Far, Local_ME); if Local_ME /= Maths.NoError then ErrorHandler.Semantic_Warning (Err_Num => 200, Position => The_Lex_Val.Position, Id_Str => The_Lex_Val.Token_Str); Had_An_Error := True; end if; else So_Far := Int_Val; end if; if Unary_Minus then Maths.Negate (So_Far); end if; --# assert True; if Int_Subtract then Maths.Subtract (So_Far, Int_Add_Sub_Val, Temp_Val, Local_ME); So_Far := Temp_Val; elsif Int_Add then Maths.Add (So_Far, Int_Add_Sub_Val, Temp_Val, Local_ME); So_Far := Temp_Val; end if; if Local_ME /= Maths.NoError and then not Had_An_Error then ErrorHandler.Semantic_Warning (Err_Num => 200, Position => The_Lex_Val.Position, Id_Str => The_Lex_Val.Token_Str); end if; Result := So_Far; end Calc_Int_Value; begin case Parser_Stage is when 1 => case The_Token is when SP_Symbols.minus => Unary_Minus := True; Parser_Stage := 2; when SP_Symbols.integer_number => Guarded_Literal_To_Value (Lex => The_Lex_Val, Val => Int_Val); Parser_Stage := 3; when others => -- expected either '-' or an integer literal Raise_Syntax_Error (The_Error => Syntax_Errors'(The_Type => Exp_Minus_Integer, The_Symbol => The_Token, Expected_Symbol => SP_Symbols.SPDEFAULT, The_Lex_Val => The_Lex_Val)); end case; when 2 => Guarded_Literal_To_Value (Lex => The_Lex_Val, Val => Int_Val); Parser_Stage := 3; Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.integer_number); when 3 => case The_Token is when SP_Symbols.minus => Int_Subtract := True; Parser_Stage := 6; when SP_Symbols.plus => Int_Add := True; Parser_Stage := 6; when SP_Symbols.double_star => Int_Exponent := True; Parser_Stage := 4; when others => Lookahead := True; Calc_Int_Value (Result => Expr_Int_Value); End_Int_Expr_Parse; end case; when 4 => Guarded_Literal_To_Value (Lex => The_Lex_Val, Val => Int_Exponent_Val); Parser_Stage := 5; Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.integer_number); when 5 => case The_Token is when SP_Symbols.minus => Int_Subtract := True; Parser_Stage := 6; when SP_Symbols.plus => Int_Add := True; Parser_Stage := 6; when others => Lookahead := True; Calc_Int_Value (Result => Expr_Int_Value); End_Int_Expr_Parse; end case; when 6 => Assert_Token (The_Token => The_Token, Should_Be => SP_Symbols.integer_number); Guarded_Literal_To_Value (Lex => The_Lex_Val, Val => Int_Add_Sub_Val); Next_Stage; when 7 => Calc_Int_Value (Result => Expr_Int_Value); Lookahead := True; End_Int_Expr_Parse; when others => null; end case; end Do_Expr_Int; begin if not Lookahead then --# accept F, 10, Unwanted_Punct_Token, "Unwanted_Punct_Token unused here"; SparkLex.Examiner_Lex (Prog_Text => The_Config_File, Token => The_Token, Lex_Val => The_Lex_Val, Punct_Token => Unwanted_Punct_Token); --# end accept; else Lookahead := False; end if; case Parser_State is when Config_Defn_Start => Do_Config_Defn_Start; when Next_Config_Defn_Start => Do_Next_Config_Defn_Start; when Defn => Do_Defn; when Type_Unknown => Do_Type_Unknown; when Type_Private => Do_Type_Private; when Type_FP => Do_Type_FP; when Type_Int => Do_Type_Int; when Subtype_Int => Do_Subtype_Int; when Const_Unknown => Do_Const_Unknown; when Config_Defn_End => Do_Config_Defn_End; when Expr_Int => Do_Expr_Int; when Found_Syntax_Error => if The_Token = SP_Symbols.semicolon then New_State (The_Stage => Next_Expected_State); end if; end case; --# accept F, 33, Unwanted_Punct_Token, "Unwanted_Punct_Token unused here"; end State_Machine_Iterate; begin SparkLex.Clear_Line_Context; State_Machine_Initialise; loop State_Machine_Iterate; exit when The_Token = SP_Symbols.SPEND or else Parsing_Ends; end loop; Overall_Status := not Errors_Occurred; end Process_Config_File; begin File_Opened_OK := False; ErrorHandler.Get_Error_Context (Context => The_Error_Context); if CommandLineData.Content.Target_Config then Open_File (The_File => Local_Config_File, File_Ok => File_Opened_OK); if File_Opened_OK then if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then ScreenEcho.New_Line (1); ScreenEcho.Put_Line (" Reading target configuration file ..."); end if; ErrorHandler.Get_Error_Context (Context => Saved_Error_Context); ErrorHandler.Error_Init (Source_File_Name => CommandLineData.Content.Target_Config_File, Echo => CommandLineData.Content.Echo); Process_Config_File (The_Config_File => Local_Config_File, Overall_Status => File_Read_OK); ErrorHandler.Get_Error_Context (Context => The_Error_Context); ErrorHandler.Set_Error_Context (Context => Saved_Error_Context); --# accept F, 10, Local_Config_File, "Local_Config_File unused here"; Close_File (The_File => Local_Config_File); --# end accept; else File_Read_OK := False; end if; else File_Read_OK := True; end if; No_Errors := File_Read_OK; Opened_OK := File_Opened_OK; end Read_Config_File; procedure Output_Config_File (To_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in The_Error_Context; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Error_Context, --# To_File, --# XMLReport.State; is Saved_Error_Context : ErrorHandler.Error_Contexts; Error_Severity : ErrorHandler.Error_Level; Target_Config_File : E_Strings.T; procedure Check_And_Report_Errors --# global in Error_Severity; --# in To_File; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context from *, --# Error_Severity & --# SPARK_IO.File_Sys from *, --# Error_Severity, --# To_File; is begin if Error_Severity /= ErrorHandler.No_Error then SPARK_IO.New_Line (To_File, 1); SPARK_IO.Put_Line (To_File, "Warning: analysis aborted due to errors in target configuration file", 0); ErrorHandler.Set_File_Open_Error; end if; end Check_And_Report_Errors; begin if CommandLineData.Content.Target_Config then if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Target_Config, Report => To_File); Target_Config_File := CommandLineData.Content.Target_Config_File; XMLReport.Filename (Plain_Output => CommandLineData.Content.Plain_Output, File => Target_Config_File); E_Strings.Put_String (File => To_File, E_Str => Target_Config_File); ErrorHandler.Get_Error_Context (Context => Saved_Error_Context); ErrorHandler.Set_Error_Context (Context => The_Error_Context); ErrorHandler.Get_Error_Severity (Severity => Error_Severity); XMLReport.Start_Section (Section => XMLReport.S_Messages, Report => To_File); ErrorHandler.AppendErrors (To_File, Error_Types.ForReportIndexedFiles); XMLReport.End_Section (Section => XMLReport.S_Messages, Report => To_File); Check_And_Report_Errors; ErrorHandler.Set_Error_Context (Context => Saved_Error_Context); XMLReport.End_Section (Section => XMLReport.S_Target_Config, Report => To_File); else SPARK_IO.New_Line (To_File, 2); SPARK_IO.Put_Line (To_File, "Target configuration file:", 0); ErrorHandler.Get_Error_Context (Context => Saved_Error_Context); ErrorHandler.Set_Error_Context (Context => The_Error_Context); ErrorHandler.Get_Error_Severity (Severity => Error_Severity); ErrorHandler.PrintErrors (To_File, Error_Types.ForReportIndexedFiles); Check_And_Report_Errors; ErrorHandler.Set_Error_Context (Context => Saved_Error_Context); end if; end if; end Output_Config_File; end ConfigFile; spark-2012.0.deb/examiner/declarations.ads0000644000175000017500000002235211753202336017416 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; with Dictionary; with LexTokenManager; with Pairs; with SPARK_IO; use type Cells.Cell; use type Cells.Cell_Kind; use type Cells.Cell_Rank; use type Dictionary.Abstractions; use type Dictionary.Symbol; use type LexTokenManager.Str_Comp_Result; --# inherit AdjustFDL_RWs, --# Cells, --# Cell_Storage, --# Clists, --# CommandLineData, --# CStacks, --# DAG_IO, --# Debug, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# LexTokenManager, --# Lists, --# Maths, --# Pairs, --# Pile, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# Symbol_Set, --# SystemErrors; package Declarations --# own State; --# initializes State; is type UsedSymbolIterator is private; NullIterator : constant UsedSymbolIterator; procedure Initialize (It : out UsedSymbolIterator); --# global in State; --# derives It from State; function CurrentNode (It : in UsedSymbolIterator) return Cells.Cell; function NextNode (Heap : in Cells.Heap_Record; It : in UsedSymbolIterator) return UsedSymbolIterator; function IsNullIterator (It : in UsedSymbolIterator) return Boolean; procedure StartProcessing (Heap : in out Cells.Heap_Record); --# global in out Statistics.TableUsage; --# out State; --# derives Heap, --# Statistics.TableUsage from *, --# Heap & --# State from Heap; procedure Add (Heap : in out Cells.Heap_Record; Symbol : in Dictionary.Symbol); --# global in Dictionary.Dict; --# in out State; --# in out Statistics.TableUsage; --# derives Heap, --# State, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# State, --# Symbol; procedure AddAttribute (Heap : in out Cells.Heap_Record; TickCell : in Cells.Cell); --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in State; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# State, --# TickCell; procedure AddBitwiseOp (Heap : in out Cells.Heap_Record; OpCell : in Cells.Cell); --# global in Dictionary.Dict; --# in out State; --# in out Statistics.TableUsage; --# derives Heap, --# State, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# OpCell, --# State; procedure AddProcedureExport (Heap : in out Cells.Heap_Record; ExportCell : in Cells.Cell); --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out State; --# in out Statistics.TableUsage; --# derives Heap, --# State, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# ExportCell, --# Heap, --# LexTokenManager.State, --# State; procedure AddReturnVar (Heap : in out Cells.Heap_Record; ReturnVarCell : in Cells.Cell); --# global in Dictionary.Dict; --# in out State; --# in out Statistics.TableUsage; --# derives Heap, --# State, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# ReturnVarCell, --# State; procedure AddUseOfRootInteger; --# global in out State; --# derives State from *; -- Traverses the DAG denoted by Root and adds all declarations -- needed by it. procedure Find_DAG_Declarations (Heap : in out Cells.Heap_Record; Root : in Cells.Cell); --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out State; --# in out Statistics.TableUsage; --# derives Heap, --# State, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Root, --# State; -- Traverses the VC DAGs denoted by PredicatePair and adds all declarations -- needed by them. If IgnoreTriviallyTrueVCs is True, then VCs with all -- conclusions "True" are ignored, regardless of the hypotheses. procedure FindVCFormulaDeclarations (Heap : in out Cells.Heap_Record; PredicatePair : in Pairs.Pair; IgnoreTriviallyTrueVCs : in Boolean); --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out State; --# in out Statistics.TableUsage; --# derives Heap, --# State, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# IgnoreTriviallyTrueVCs, --# LexTokenManager.State, --# PredicatePair, --# State; procedure PrintDeclarationTail (File : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File; procedure OutputDeclarations (Heap : in out Cells.Heap_Record; File : in SPARK_IO.File_Type; Rule_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Write_Rules : in Boolean; EndPosition : in LexTokenManager.Token_Position); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# File, --# Heap, --# LexTokenManager.State, --# Rule_File, --# Scope, --# SPARK_IO.File_Sys, --# State, --# Write_Rules & --# Heap, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Scope, --# State, --# Write_Rules; private type UsedSymbolIterator is record It : Cells.Cell; end record; NullIterator : constant UsedSymbolIterator := UsedSymbolIterator'(It => Cells.Null_Cell); end Declarations; spark-2012.0.deb/examiner/sem-walk_expression_p-walk_annotation_expression-up_wf_quantifier.adb0000644000175000017500000001315611753202336032257 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Walk_Annotation_Expression) procedure Up_Wf_Quantifier (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Scope : in out Dictionary.Scopes) is Errors_Found : Boolean; Range_Node : STree.SyntaxNode; Predicate_Result, Range_Result : Sem.Exp_Record; Quantifier_Type : Dictionary.Symbol; begin -- top of E_Stack is predicate data -- 2nd tos E_Stack is range info iff a range node exists Exp_Stack.Pop (Item => Predicate_Result, Stack => E_Stack); -- result of predicate node Errors_Found := Predicate_Result.Errors_In_Expression; if not Dictionary.TypeIsBoolean (Predicate_Result.Type_Symbol) then -- error case Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 326, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => Node))), Id_Str => LexTokenManager.Null_String); end if; Range_Node := STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)))); -- ASSUME Range_Node = annotation_arange OR predicate if STree.Syntax_Node_Type (Node => Range_Node) = SP_Symbols.annotation_arange then -- ASSUME Range_Node = annotation_arange -- if the arange node exists there will be other stuff on the stack here Exp_Stack.Pop (Item => Range_Result, Stack => E_Stack); Errors_Found := Errors_Found or else Range_Result.Errors_In_Expression; Quantifier_Type := Dictionary.GetType (Dictionary.GetRegion (Scope)); -- If the quantifier is over Boolean or a subtype of Boolean, then -- an explicit range is illegal. if Dictionary.TypeIsBoolean (Quantifier_Type) then ErrorHandler.Semantic_Error (Err_Num => 412, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Range_Node), Id_Str => LexTokenManager.Null_String); elsif not Range_Result.Is_ARange then Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 98, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Range_Node), Id_Str => LexTokenManager.Null_String); elsif not Dictionary.CompatibleTypes (Scope, Quantifier_Type, Range_Result.Type_Symbol) then Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 106, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Range_Node), Id_Str => LexTokenManager.Null_String); end if; elsif STree.Syntax_Node_Type (Node => Range_Node) /= SP_Symbols.predicate then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = annotation_arange OR predicate in Up_Wf_Quantifier"); end if; -- turn result into a Boolean type result Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetPredefinedBooleanType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => Errors_Found, Has_Operators => False, Is_Constant => False, Is_Static => False, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue), Stack => E_Stack); -- leave local scope of quantifier Scope := Dictionary.GetEnclosingScope (Scope); end Up_Wf_Quantifier; spark-2012.0.deb/examiner/sem-walk_expression_p-stack_identifier.adb0000644000175000017500000010565611753202336024561 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Stack_Identifier (Sym : in Dictionary.Symbol; Id_Str : in LexTokenManager.Lex_String; Node : in STree.SyntaxNode; Prefix : in Dictionary.Symbol; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; The_Heap : in out Heap.HeapRecord; Ref_Var : in SeqAlgebra.Seq; Dotted : in Boolean; Context : in Sem.Tilde_Context; Is_Annotation : in Boolean) is Result : Sem.Exp_Record; Sym_Local : Dictionary.Symbol; Tagged_Parameter_Type : Dictionary.Symbol; Loc : LexTokenManager.Token_Position; procedure Check_Globals_Are_Visible (Proc_Sym : in Dictionary.Symbol; Loc : in LexTokenManager.Token_Position; Prefix : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Ref_Var : in SeqAlgebra.Seq; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Loc, --# Prefix, --# Proc_Sym, --# Scope, --# SPARK_IO.File_Sys & --# Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# Prefix, --# Proc_Sym, --# Ref_Var, --# Scope, --# The_Heap; is It : Dictionary.Iterator; Glob_Sym, Enclosing_Unit : Dictionary.Symbol; Calling_Abstraction : Dictionary.Abstractions; ---------------------------------------------------- function Is_Local_Variable (Calling_Sym, Glob_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.GetRegion (Dictionary.GetScope (Glob_Sym)) = Calling_Sym; end Is_Local_Variable; ------------------------------------------------------ function Is_Own_Var_Of_Embedded_Package (Calling_Sym, Glob_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return (Dictionary.IsOwnVariable (Glob_Sym)) and then (Dictionary.GetScope (Dictionary.GetOwner (Glob_Sym)) = Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Calling_Sym)); end Is_Own_Var_Of_Embedded_Package; begin -- Check_Globals_Are_Visible -- first check if we are in package initialization. If so function -- call is illegal and we do not proceed with global checks Enclosing_Unit := Dictionary.GetEnclosingCompilationUnit (Scope); if Sem.In_Package_Initialization (Scope => Scope) then if not Dictionary.IsPredefined (Proc_Sym) then -- only predefined function calls are allowed. ErrorHandler.Semantic_Error (Err_Num => 329, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => LexTokenManager.Null_String); end if; elsif Dictionary.Is_Subprogram (Enclosing_Unit) or else Dictionary.IsTaskType (Enclosing_Unit) then -- we need to check that the function globals are local vars, parameters or -- globals of enclosing unit Calling_Abstraction := Dictionary.GetAbstraction (Enclosing_Unit, Scope); It := Dictionary.FirstGlobalVariable (Dictionary.GetAbstraction (Proc_Sym, Scope), Proc_Sym); while not Dictionary.IsNullIterator (It) loop Glob_Sym := Sem.Substitute_Protected_Type_Self_Reference (Sym => Dictionary.CurrentSymbol (It), Prefix_Symbol => Prefix); SeqAlgebra.AddMember (The_Heap, Ref_Var, Natural (Dictionary.SymbolRef (Glob_Sym))); if (not Dictionary.Is_Global_Variable (Calling_Abstraction, Enclosing_Unit, Glob_Sym)) and then (not Dictionary.IsFormalParameter (Enclosing_Unit, Glob_Sym)) and then (not Is_Local_Variable (Calling_Sym => Enclosing_Unit, Glob_Sym => Glob_Sym)) and then (not Is_Own_Var_Of_Embedded_Package (Calling_Sym => Enclosing_Unit, Glob_Sym => Glob_Sym)) then ErrorHandler.Semantic_Error_Sym (Err_Num => 25, Reference => ErrorHandler.No_Reference, Position => Loc, Sym => Glob_Sym, Scope => Scope); end if; It := Dictionary.NextSymbol (It); end loop; elsif Dictionary.IsPackage (Enclosing_Unit) then -- no check required, there is no equivalent of checking the globals or parameters for a package -- and simple visibility of local package variables will have been checked when the function's -- own global anno was checked -- -- NOTE. There may be need of check if we relax declarative order rules since an own var -- might then be declared after the function that uses it. null; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Checking call to function in Check_Globals_Are_Visible where calling scope is " & "none of package, subprogram or task body"); end if; end Check_Globals_Are_Visible; --------------------------------------- procedure Check_Called_Function_Is_Wellformed (Sym : in Dictionary.Symbol; Loc : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Loc, --# Scope, --# SPARK_IO.File_Sys, --# Sym; is begin if not Dictionary.SubprogramSignatureIsWellformed (Dictionary.GetAbstraction (Sym, Scope), Sym) then ErrorHandler.Semantic_Warning (Err_Num => 399, Position => Loc, Id_Str => LexTokenManager.Null_String); end if; end Check_Called_Function_Is_Wellformed; --------------------------------------- procedure Check_Package_Init_Rules (Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Loc : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Loc, --# Scope, --# SPARK_IO.File_Sys, --# Sym; is begin -- two checks required: (1) illegal tampering with remote own variables -- (2) illegal referencing of moded own variables if Sem.In_Package_Initialization (Scope => Scope) then --(1) if Dictionary.IsOwnVariable (Sym) and then Dictionary.GetOwner (Sym) /= Dictionary.GetEnclosingCompilationUnit (Scope) then ErrorHandler.Semantic_Error (Err_Num => 330, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => LexTokenManager.Null_String); end if; --(2) if Dictionary.GetOwnVariableOrConstituentMode (Sym) /= Dictionary.DefaultMode then ErrorHandler.Semantic_Error (Err_Num => 719, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => LexTokenManager.Null_String); end if; end if; end Check_Package_Init_Rules; --------------------------------------- function Is_Unresolved_Deferred_Constant (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.Is_Constant (Sym) and then not Dictionary.ConstantIsDeferredHere (Sym, Scope) and then not Dictionary.Is_Declared (Item => Sym) and then not (Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Local); end Is_Unresolved_Deferred_Constant; ------------------------------------------- procedure Check_Reference_Ability (Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Loc : in LexTokenManager.Token_Position; Context : in Sem.Tilde_Context) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Loc, --# Scope, --# SPARK_IO.File_Sys, --# Sym; is Subprog_Sym : Dictionary.Symbol; function Is_Protected_Element_Of (Sym, Subprog_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; TheUnit : Dictionary.Symbol; It : Dictionary.Iterator; begin TheUnit := Dictionary.GetRegion (Dictionary.GetScope (Subprog_Sym)); if Dictionary.IsProtectedType (TheUnit) then It := Dictionary.FirstProtectedElement (The_Protected_Type => TheUnit); while It /= Dictionary.NullIterator loop Result := Dictionary.CurrentSymbol (It) = Sym; exit when Result; It := Dictionary.NextSymbol (It); end loop; end if; return Result; end Is_Protected_Element_Of; begin -- Check_Reference_Ability case Context is when Sem.Precondition | Sem.Function_Return => Subprog_Sym := Dictionary.GetEnclosingCompilationUnit (Scope); if not Dictionary.IsFunction (Subprog_Sym) and then Dictionary.Is_Variable (Sym) and then not Dictionary.IsQuantifiedVariable (Sym) and then not Dictionary.IsImport (Dictionary.GetAbstraction (Subprog_Sym, Scope), Subprog_Sym, Sym) and then -- Unconstrained formal parameters can appear in preconditions not (Dictionary.IsFormalParameter (Subprog_Sym, Sym) and then Dictionary.IsUnconstrainedArrayType (Dictionary.GetType (Sym))) and then -- in protected bodies, allow freeer access to protected elements so that assertions -- can be made about relationship between entry barriers and PO state. This is a temp -- solution to the problem described in SEPR 1542 not (Is_Protected_Element_Of (Sym => Sym, Subprog_Sym => Subprog_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 322, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => LexTokenManager.Null_String); end if; when Sem.Postcondition => null; when Sem.Code => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Precondition_Failure, Msg => "Stack_Identifier.Check_Reference_Ability called with Context=Code"); end case; end Check_Reference_Ability; ----------------------- function Context_To_Use (Is_Annotation : Boolean) return Dictionary.Contexts is Result : Dictionary.Contexts; begin if Is_Annotation then Result := Dictionary.ProofContext; else Result := Dictionary.ProgramContext; end if; return Result; end Context_To_Use; ----------------------- procedure Check_Use_Of_Unchecked_Conversion (Sym_Local : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Loc : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Scope, --# Sym_Local & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Loc, --# Scope, --# SPARK_IO.File_Sys, --# Sym_Local; is function Returns_Type_With_Check (Func_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Return_Type : Dictionary.Symbol; begin -- Returns true if Func_Sym returns a scalar type that is not Boolean. -- These are types for which range check VCs are generated; other types -- do not have checks. We want to generate different warnings for use of -- unchecked conversions depending on the type returned. Return_Type := Dictionary.GetType (Func_Sym); return Dictionary.TypeIsScalar (Return_Type) and then not Dictionary.TypeIsBoolean (Return_Type); end Returns_Type_With_Check; begin -- Check_Use_Of_Unchecked_Conversion if Dictionary.IsAnUncheckedConversion (Sym_Local) then -- mark unit as a user of U_C for benefit of VCG BuildGraph Dictionary.AddUsesUncheckedConversion (Dictionary.GetEnclosingCompilationUnit (Scope)); -- warn user -- For each warning, use the symbol for the implicit proof function. This is done to -- make sure that the symbols will match if there is a justification for the warning. if Returns_Type_With_Check (Func_Sym => Sym_Local) then -- weaker warning where RTC VCs provide some further protection ErrorHandler.Semantic_Warning_Sym (Err_Num => 12, Position => Loc, Sym => Dictionary.GetImplicitProofFunction (Dictionary.IsAbstract, Sym_Local), Scope => Scope); else -- stronger warning because no VCs to help us ErrorHandler.Semantic_Warning_Sym (Err_Num => 13, Position => Loc, Sym => Dictionary.GetImplicitProofFunction (Dictionary.IsAbstract, Sym_Local), Scope => Scope); end if; end if; end Check_Use_Of_Unchecked_Conversion; -------------------------------------------------------------------- -- this function checks if the symbol passed is a stream variable or -- a function which globally accesses a stream variable. If it is -- it returns the symbol otherwise it returns NullSymbol function Stream_References_By (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Dictionary.Symbol --# global in Dictionary.Dict; is Result : Dictionary.Symbol := Dictionary.NullSymbol; It : Dictionary.Iterator; begin if Dictionary.IsAdaFunction (Sym) then -- IsAdaFunction used to block proof functions It := Dictionary.FirstGlobalVariable (Dictionary.GetAbstraction (Sym, Scope), Sym); while not Dictionary.IsNullIterator (It) loop if Dictionary.IsOwnVariableOrConstituentWithMode (Dictionary.CurrentSymbol (It)) then Result := Sym; exit; end if; It := Dictionary.NextSymbol (It); end loop; else -- check for stream variable case if Dictionary.IsOwnVariableOrConstituentWithMode (Sym) then Result := Sym; end if; end if; return Result; end Stream_References_By; -------------------------------------------- function Get_Enum_Lit_Value (Sym : Dictionary.Symbol) return Maths.Value --# global in Dictionary.Dict; --# in LexTokenManager.State; is Value : Maths.Value; begin if Dictionary.Enumeration_Literals_Are_Equal (Left_Symbol => Sym, Right_Symbol => Dictionary.GetTrue) then Value := Maths.TrueValue; elsif Dictionary.Enumeration_Literals_Are_Equal (Left_Symbol => Sym, Right_Symbol => Dictionary.GetFalse) then Value := Maths.FalseValue; else Value := Maths.ValueRep (Dictionary.GetPositionNumber (Sym)); end if; return Value; end Get_Enum_Lit_Value; -------------------------------------------- function Get_Object_Value (Sym : Dictionary.Symbol) return Maths.Value --# global in Dictionary.Dict; --# in LexTokenManager.State; is Value : Maths.Value; begin if Dictionary.Is_Constant (Sym) then Value := Maths.ValueRep (Dictionary.Get_Value (The_Constant => Sym)); else Value := Maths.NoValue; end if; return Value; end Get_Object_Value; begin -- Stack_Identifier Loc := STree.Node_Position (Node => Node); Tagged_Parameter_Type := Dictionary.NullSymbol; -- Tagged_Parameter_Type has a value if an inherited operation gets called; in -- this case it holds the value of the formal parameter of that operation -- that makes it inheritable. We need this when processing parameters in -- wf_positional_ and wf_named_association so that we know when to convert -- actual parameters that are of an extended, tagged type. Sym_Local := Sym; -- if we arrive here with a null symbol then a look up in wf_identifier -- or wf_selected_component has failed. Before we can simply report this -- we need to see if there are any inherited ops associated with tagged -- types that might be visible. -- -- We search only if the prefix we are using is null (i.e. we are stacking a -- simple identifier) or if it is a package. (The alternative, that it is a protected -- object means that we do not search for inherited (tagged) ops). if Dictionary.Is_Null_Symbol (Sym_Local) and then (Dictionary.Is_Null_Symbol (Prefix) or else Dictionary.IsPackage (Prefix)) then Dictionary.SearchForInheritedOperations (Name => Id_Str, Scope => Scope, Prefix => Prefix, Context => Context_To_Use (Is_Annotation => Is_Annotation), OpSym => Sym_Local, ActualTaggedType => Tagged_Parameter_Type); if Dictionary.Is_Null_Symbol (Sym_Local) or else Dictionary.IsProcedure (Sym_Local) then -- procedure is no good Sym_Local := Dictionary.NullSymbol; -- now check that any function found is an Ada function (or an implicit proof -- function associated with an Ada function. A pure proof function is not -- acceptable because proof functions are not primitives in SPARK elsif Dictionary.IsProofFunction (Sym_Local) and then not Dictionary.IsImplicitProofFunction (Sym_Local) then Sym_Local := Dictionary.NullSymbol; end if; end if; -- After looking at inherited ops we carry on as normal. Either processing -- the inherited op found or reporting failure to find anything. if Dictionary.Is_Null_Symbol (Sym_Local) then Result := Unknown_Symbol_Record; ErrorHandler.Semantic_Error2 (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str1 => Id_Str, Id_Str2 => Dictionary.GetSimpleName (Prefix)); elsif not Is_Annotation and then Is_Unresolved_Deferred_Constant (Sym => Sym_Local, Scope => Scope) then Result := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 611, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => LexTokenManager.Null_String); elsif Dictionary.IsFunction (Sym_Local) then -- If we have ended calling an inherited root operation then we need to tell -- the VCG which subprogram actually got called. We know that a root op has -- been called if TaggedParameterSym is not null. In that case we seed the -- syntax node with the symbol of the root function called. if not Dictionary.Is_Null_Symbol (Tagged_Parameter_Type) then STree.Add_Node_Symbol (Node => Node, Sym => Sym_Local); end if; if Is_Annotation then -- provisonal callability checkwhich excludes proof functions, will -- need to be sorted out once dict includes implicit proof funcs if not (Dictionary.IsProofFunction (Sym_Local) or else Dictionary.IsCallable (Sym_Local, Dotted, Scope)) then ErrorHandler.Semantic_Error (Err_Num => 163, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => Dictionary.GetSimpleName (Sym_Local)); end if; else -- not Is_Annotation if Dictionary.Is_Generic_Subprogram (The_Symbol => Sym_Local) then ErrorHandler.Semantic_Error (Err_Num => 654, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => Dictionary.GetSimpleName (Sym_Local)); else if Dictionary.IsCallable (Sym_Local, Dotted, Scope) then Check_Called_Function_Is_Wellformed (Sym => Sym_Local, Loc => Loc, Scope => Scope); Check_Globals_Are_Visible (Proc_Sym => Sym_Local, Loc => Loc, Prefix => Prefix, Scope => Scope, Ref_Var => Ref_Var, The_Heap => The_Heap); Check_Use_Of_Unchecked_Conversion (Sym_Local => Sym_Local, Scope => Scope, Loc => Loc); else ErrorHandler.Semantic_Error (Err_Num => 163, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => Dictionary.GetSimpleName (Sym_Local)); end if; -- Dictionary.IsCallable end if; -- IsGenericSubprogram end if; -- Is_Annotation Result := Sem.Exp_Record' (Type_Symbol => Dictionary.GetType (Sym_Local), Other_Symbol => Sym_Local, Stream_Symbol => Stream_References_By (Sym => Sym_Local, Scope => Scope), Tagged_Parameter_Symbol => Tagged_Parameter_Type, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Is_Function, Arg_List_Found => False, Is_Static => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => Dictionary.IsPredefined (Sym_Local), Is_ARange => False, Is_AVariable => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); if Dictionary.GetNumberOfSubprogramParameters (Sym_Local) = 0 then Result.Sort := Sem.Is_Object; end if; elsif Dictionary.IsObject (Sym_Local) then if Is_Annotation then Check_Reference_Ability (Sym => Sym_Local, Scope => Scope, Loc => Loc, Context => Context); else Check_Package_Init_Rules (Sym => Sym_Local, Scope => Scope, Loc => Loc); end if; Result := Sem.Exp_Record' (Type_Symbol => Dictionary.GetType (Sym_Local), Other_Symbol => Sym_Local, Stream_Symbol => Stream_References_By (Sym => Sym_Local, Scope => Scope), Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Is_Object, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Static => Dictionary.IsStatic (Sym_Local, Scope), Is_Constant => Dictionary.Is_Constant (Sym_Local), Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); -- If the constant is of a string subtype then we need to recover the string's length -- and place it in RangeRHS because this is what we would get if a string literal was -- substituted for the constant and we should get the same behaviour for both cases if Dictionary.Is_Constant (Sym_Local) and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetRootType (Result.Type_Symbol), Right_Symbol => Dictionary.GetPredefinedStringType, Full_Range_Subtype => False) then Result.String_Value := Dictionary.Get_Value (The_Constant => Sym_Local); Result.Range_RHS := Maths.ValueRep (Dictionary.GetArrayAttributeValue (Name => LexTokenManager.Last_Token, TypeMark => Result.Type_Symbol, Dimension => 1)); else Result.Value := Get_Object_Value (Sym => Sym_Local); end if; if Dictionary.Is_Variable (Sym_Local) then Result.Variable_Symbol := Sym_Local; Result.Is_AVariable := True; Result.Is_An_Entire_Variable := True; -- Do not allow protected variables (except streams) to appear in annotation expressions if Is_Annotation and then Dictionary.IsOwnVariable (Sym_Local) and then not Dictionary.IsOwnVariableOrConstituentWithMode (Sym_Local) and then Dictionary.GetOwnVariableProtected (Sym_Local) then Result.Errors_In_Expression := True; ErrorHandler.Semantic_Error_Sym (Err_Num => 940, Reference => ErrorHandler.No_Reference, Position => Loc, Sym => Sym_Local, Scope => Scope); end if; end if; elsif Dictionary.IsEnumerationLiteral (Sym_Local) then Result := Sem.Exp_Record' (Type_Symbol => Dictionary.GetType (Sym_Local), Other_Symbol => Sym_Local, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Static => True, Is_Constant => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Get_Enum_Lit_Value (Sym => Sym_Local), Range_RHS => Maths.NoValue); elsif Dictionary.IsTypeMark (Sym_Local) then Result := Sem.Exp_Record' (Type_Symbol => Sym_Local, Other_Symbol => Sym_Local, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Is_Type_Mark, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Static => False, Is_Constant => False, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); elsif Dictionary.IsPackage (Sym_Local) then Result := Sem.Exp_Record' (Type_Symbol => Dictionary.GetUnknownTypeMark, Other_Symbol => Sym_Local, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Is_Package, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Static => False, Is_Constant => False, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); elsif Dictionary.IsKnownDiscriminant (Sym_Local) then Result := Sem.Exp_Record' (Type_Symbol => Dictionary.GetType (Sym_Local), Other_Symbol => Sym_Local, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Sym_Local, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Is_Object, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Static => True, Is_Constant => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); else Result := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 5, Reference => ErrorHandler.No_Reference, Position => Loc, Id_Str => Id_Str); end if; Exp_Stack.Push (X => Result, Stack => E_Stack); end Stack_Identifier; spark-2012.0.deb/examiner/indexmanager-cache.ads0000644000175000017500000001420611753202336020450 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ContextManager; with E_Strings; with LexTokenLists; with LexTokenManager; --# inherit CommandLineData, --# ContextManager, --# ErrorHandler, --# E_Strings, --# IndexManager, --# IndexManager.Index_Table_P, --# LexTokenLists, --# LexTokenManager, --# SPARK_IO; private package IndexManager.Cache --# own The_Unit_Hash; --# initializes The_Unit_Hash; is procedure Context_Manager_Unit_Types_Image (Unit_Type : in ContextManager.UnitTypes); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Unit_Type; procedure Add_Unit (Unit : in LexTokenLists.Lists; Unit_Types : in ContextManager.UnitTypes; Source_Filename : in E_Strings.T; Index_Filename : in LexTokenManager.Lex_String; Index_Position : in IndexManager.File_Position); --# global in CommandLineData.Content; --# in Index_Table_P.Index_Table; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out The_Unit_Hash; --# derives ErrorHandler.Error_Context, --# Index_Table_P.Fatal_Error, --# The_Unit_Hash from *, --# Index_Filename, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Source_Filename, --# The_Unit_Hash, --# Unit, --# Unit_Types & --# LexTokenManager.State from *, --# Source_Filename & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Index_Filename, --# Index_Position, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# Source_Filename, --# The_Unit_Hash, --# Unit, --# Unit_Types; procedure Get_Unit (Required_Unit : in LexTokenLists.Lists; Unit_Types : in ContextManager.UnitTypes; Source_Filename : out LexTokenManager.Lex_String; Index_Filename : out LexTokenManager.Lex_String; Found : out Boolean); --# global in The_Unit_Hash; --# derives Found, --# Index_Filename, --# Source_Filename from Required_Unit, --# The_Unit_Hash, --# Unit_Types; procedure Add_Components (Unit : in LexTokenLists.Lists; Components : in IndexManager.Component_Lists; Index_Filename : in LexTokenManager.Lex_String; Index_Position : in IndexManager.File_Position); --# global in CommandLineData.Content; --# in Index_Table_P.Index_Table; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Index_Table_P.Fatal_Error; --# in out SPARK_IO.File_Sys; --# in out The_Unit_Hash; --# derives ErrorHandler.Error_Context, --# Index_Table_P.Fatal_Error, --# The_Unit_Hash from *, --# Components, --# Index_Filename, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# The_Unit_Hash, --# Unit & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Components, --# ErrorHandler.Error_Context, --# Index_Filename, --# Index_Position, --# Index_Table_P.Index_Table, --# LexTokenManager.State, --# The_Unit_Hash, --# Unit; procedure Get_Components (Required_Unit : in LexTokenLists.Lists; Components : out IndexManager.Component_Lists; Index_Filename : out LexTokenManager.Lex_String; Found : out Boolean); --# global in The_Unit_Hash; --# derives Components, --# Found, --# Index_Filename from Required_Unit, --# The_Unit_Hash; end IndexManager.Cache; spark-2012.0.deb/examiner/sem.ads0000644000175000017500000001137011753202336015530 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with STree; use type CommandLineData.Flow_Analysis_Options; use type CommandLineData.Language_Profiles; use type STree.Iterator; use type STree.SyntaxNode; --# inherit Casing, --# CommandLineData, --# CompleteCheck, --# ComponentManager, --# ContextManager.Ops, --# Debug, --# Declarations, --# Dictionary, --# ErrorHandler, --# Error_Types, --# ExaminerConstants, --# E_Strings, --# FlowAnalyser, --# Graph, --# Heap, --# LexTokenManager, --# LexTokenManager.Relation_Algebra, --# LexTokenManager.Relation_Algebra.String, --# LexTokenManager.Seq_Algebra, --# Lists, --# Maths, --# RefList, --# RelationAlgebra, --# SeqAlgebra, --# SimpleLists, --# SLI, --# SPARK_IO, --# SPrint, --# SP_Symbols, --# Statistics, --# StmtStack, --# STree, --# SystemErrors, --# VCG; package Sem --# own State; is procedure CompUnit (Top_Node : in STree.SyntaxNode; Do_VCG : in Boolean); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Declarations.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out STree.Table; --# in out VCG.Invoked; --# out State; --# derives Declarations.State, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Graph.Table, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# StmtStack.S, --# STree.Table, --# VCG.Invoked from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_VCG, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Top_Node & --# State from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Do_VCG, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Top_Node; --# post STree.Table = STree.Table~; end Sem; spark-2012.0.deb/examiner/vcg.idx0000644000175000017500000000061311753202337015537 0ustar eugeneugenvcg specification is in vcg.ads vcg body is in vcg.adb vcg.producevcs subunit is in vcg-producevcs.adb vcg.producevcs.buildgraph subunit is in vcg-producevcs-buildgraph.adb vcg.producevcs.buildgraph.buildannotationexpndag subunit is in vcg-producevcs-buildgraph-buildannotationexpndag.adb vcg.producevcs.buildgraph.buildexpndag subunit is in vcg-producevcs-buildgraph-buildexpndag.adb spark-2012.0.deb/examiner/sem-walk_expression_p-walk_annotation_expression-down_wf_quantifier.adb0000644000175000017500000001145211753202336032577 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Walk_Annotation_Expression) procedure Down_Wf_Quantifier (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Scope : in out Dictionary.Scopes; Next_Node : out STree.SyntaxNode) is Ident_Node, Type_Node : STree.SyntaxNode; Quantifier_Sym, Type_Sym : Dictionary.Symbol; Ident_Str : LexTokenManager.Lex_String; begin Ident_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Down_Wf_Quantifier"); Ident_Str := STree.Node_Lex_String (Node => Ident_Node); Type_Node := STree.Next_Sibling (Current_Node => Ident_Node); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Down_Wf_Quantifier"); -- continue tree walk from next node after the type mark; this is either the range -- or the predicate Next_Node := STree.Next_Sibling (Current_Node => Type_Node); -- ASSUME Next_Node = annotation_arange OR predicate SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_arange or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.predicate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = annotation_arange OR predicate in Down_Wf_Quantifier"); -- check type declared and scalar Sem.Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProofContext, Type_Sym => Type_Sym); if not Dictionary.TypeIsScalar (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 44, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); Type_Sym := Dictionary.GetUnknownTypeMark; end if; -- check quantified variable is not already declared if Dictionary.IsDefined (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then Next_Node := STree.NullNode; -- prune walk to prevent error knock on Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); -- result of whole Q.E. is unknown ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else Dictionary.AddQuantifiedVariable (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Ident_Node), End_Position => STree.Node_Position (Node => Ident_Node)), TypeMark => Type_Sym, TheConstraint => Dictionary.NullSymbol, Region => Dictionary.GetRegion (Scope), Variable => Quantifier_Sym); -- plant quantifier symbol for recovery and use by VCG STree.Add_Node_Symbol (Node => Ident_Node, Sym => Quantifier_Sym); -- enter local scope of quantifier Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Quantifier_Sym); end if; end Down_Wf_Quantifier; spark-2012.0.deb/examiner/reflist.ads0000644000175000017500000003340711753202336016421 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- RefList -- -- Purpose: -- RefList is a utility package which is used to associate a node on the -- syntax tree with a dependency relation or a list of referenced variables. -- It is a higher-level wrapper around the Heap package. -- -- The Examiner, particularly the flow analyser, needs to know what -- information flows are associated with particular syntax nodes. -- For example, the node of an assignment statement will have an export -- (the assigned var) and 0 or more imports (the referenced variables in the -- assigned expression). Some nodes, notably Conditions, will have a list -- of referenced variables but no exports. -- -- Rather than store the dependency information in the syntax tree where it -- is only required for a small percentage of the nodes it is placed in a -- RefList. -- -- The association between a symbol table node and its dependencies in a -- Reflist object is via a hash table with the look up based on the syntax -- tree node number. -- -- It is the HashTable abstract data type which is visible in the -- specification and to which all the operations (methods) apply. -- -- The actual RefList information is stored in the Heap and is not directly -- accessible. The methods defined in this package must be used to manage -- the information. -- -- The RefList package contains two procedures which are not directly -- related to the main purpose of the package described above: -- ExpandSeq and ExpandToComponentEntities, -- these procedures are concerned with expanding a list of objects to include -- all components of records appearing in the list. -- -- ExpandToComponentEntities uses the HashTable object for a compilation -- unit to expand every RefList to include component names of records -- referenced in it. -- -- Clients: -- RefList is used by Sem to build the dependency lists and by the -- FlowAnalyser to inspect the dependencies. -- -- ExpandSeq and ExpandToComponentEntities are only used by the FlowAnalyser. -- -- Use: -- An example of use the main use of RefLists: -- MainLoop.adb calls RefList.Init to initialize a HashTable object; -- Sem-CompUnit-WalkStatements-WF_Assign.adb adds a relation to a RefList -- by calling RefList.AddRelation with the initialized HashTable object. -- FlowAnalyser.FlowAnalyse.adb calls RefList.FirstExport and -- RefList.NextExport to iterate through the exports and calls -- RefList.DependencyRelation to obtain the dependencies associated with -- each export. -- -- For an example of the use of ExpandSeq and ExpandToComponentEntities see -- FlowAnalyser-FlowAnalyse as this is the only module which calls these -- procedures. -- -- Important principles are: -- -- 1. a HashTable object must be Initialized before it used; -- -- 2. RefList is a wrapper for Heap and if the Heap becomes -- exhausted of free storage elements an attempt to perform any of the -- operations of this package will cause termination with a fatal -- error. As this may occur the methods of RefList are not -- guaranteed to terminate. -- -- Extension: -- It is not expected that any extension will be made to this package. -- -------------------------------------------------------------------------------- with ComponentManager; with Dictionary; with Heap; with SeqAlgebra; with STree; use type Heap.Atom; --# inherit ComponentManager, --# Dictionary, --# ExaminerConstants, --# Heap, --# SeqAlgebra, --# SP_Symbols, --# Statistics, --# STree, --# SystemErrors; package RefList is -- HashTable is the ADT which maintains the association between syntax -- tree nodes and the sets of variables stored within The_Heap. type HashTable is private; -- Initializes the mapping between syntax nodes and sets to an empty -- relation. This must be called before using the HashTable, Table. procedure Init (Table : out HashTable); --# derives Table from ; -- Procedure to add Export and associated list of imports to hash table. -- Create a set from the Imports and TheExport and associate the given -- syntax tree node, Node, with the new set. -- Node must be a non null Syntax Tree node, Imports must be a non null -- Seq (although the sequence may be empty) and TheExport may be -- any dictionary symbol including a NullSymbol -- (representing derives null from Imports). -- BEWARE: AddRelation only makes a shallow copy of Imports; -- a reference (an alias) to Imports is created and therefore the -- corresponding actual parameter must not be disposed using -- SeqAlgebra.DisposeSeq. procedure AddRelation (Table : in out HashTable; TheHeap : in out Heap.HeapRecord; Node : in STree.SyntaxNode; TheExport : in Dictionary.Symbol; Imports : in SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# Node, --# Table, --# TheExport, --# TheHeap & --# Table from *, --# Node, --# TheHeap & --# TheHeap from *, --# Imports, --# Node, --# Table, --# TheExport; -- Associates an empty set with the given syntax tree node, Node. -- Node must be non null. procedure AddNullRelation (Table : in out HashTable; TheHeap : in out Heap.HeapRecord; Node : in STree.SyntaxNode); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# Table, --# TheHeap from *, --# Node, --# Table, --# TheHeap; -------------extractor functions and procedures--------------------------- -- Returns true only if the given Node in the syntax tree has one or more -- exported variables. Node must be non null. function NodeHasExportList (Table : in HashTable; TheHeap : in Heap.HeapRecord; Node : in STree.SyntaxNode) return Boolean; -- True only if the given Export is dependent on one or more variables. -- The Atom representing the exported variable, The_Export, must be non null. function ExportHasDependencies (TheExport : in Heap.Atom; TheHeap : in Heap.HeapRecord) return Boolean; -- Provides a starting point for iterating through the set of Exported -- variables associated with a node (in an unspecified but repeatable order). -- It obtains the first Exported variable in the sequence. -- The Node must be non null and either represent a node with the dependency -- relation derives ; or a node which has at least one exported variable. procedure FirstExport (Table : in HashTable; TheHeap : in Heap.HeapRecord; Node : in STree.SyntaxNode; TheExport : out Heap.Atom); --# derives TheExport from Node, --# Table, --# TheHeap; -- pre "node represents 'derives ;'" or else NodeHasExportList (Table, TheHeap, Node); -- Provides the means to successively iterate over the set of exported -- variables. It returns the next variable in the sequence from TheExport -- (the order is unspecified but repeatable). -- TheExport must refer to an Exported variable. -- If there are no more Exported variables in the set a Heap.NullPointer -- is returned. function NextExport (TheHeap : Heap.HeapRecord; TheExport : Heap.Atom) return Heap.Atom; -- Returns a set of Imported variables on which The_Export depends. -- The_Export must refer to an Exported variable which has imported -- variables on which it is dependent. -- BEWARE: Only a shallow copy of the set of imported variables is returned. -- The returned value should be regarded as strictly read only unless it is -- intended to update the set associated with the syntax tree node. function DependencyList (TheHeap : Heap.HeapRecord; TheExport : Heap.Atom) return SeqAlgebra.Seq; -- pre ExportHasDependencies (TheExport, TheHeap); -- Some nodes of the syntax tree have no exported variables associated with -- them, for instance, conditional expressions in an if statement. -- In such cases there will usually be variables which are referenced in the -- expression and these are recorded in a set associated with the node. -- This subprogram provides a reference to the set of variables referenced -- by the syntax tree node, Node. -- Node must be non null and be a node which has no associated Exported -- variables. -- On successful completion Seq is guaranteed to be non null although -- it may be empty. procedure ReferencedVarList (Table : in HashTable; TheHeap : in Heap.HeapRecord; Node : in STree.SyntaxNode; Seq : out SeqAlgebra.Seq); --# derives Seq from Node, --# Table, --# TheHeap; -- pre not NodeHasExportList (Table, TheHeap, Node); -- On completion the set Seq is one of the following depending on -- the sort of syntax tree node represented by Node: -- (1) For a node with a null derives list: empty set -- (2) For a node with no exports but a referenced variable list: -- the referenced variables -- (3) For a node with one or more exports: the union of the imports which -- affect each export -- Node must be non null. -- On completion Seq is guaranteed to be non null but it may be empty. procedure AllReferencedVariables (Table : in HashTable; TheHeap : in out Heap.HeapRecord; Node : in STree.SyntaxNode; Seq : out SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives Seq, --# TheHeap from Node, --# Table, --# TheHeap & --# Statistics.TableUsage from *, --# Node, --# Table, --# TheHeap; ------------- Record Component Expansion procedures--------------------------- -- ExpandSeq extends a list of variables given by TheSeq to include the -- components of all record variables named in the list. procedure ExpandSeq (ComponentData : in ComponentManager.ComponentData; TheSeq : in SeqAlgebra.Seq; TheHeap : in out Heap.HeapRecord); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# TheHeap, --# TheSeq; -- This procedure analyses the hash table, Table, (associated with a -- compilation unit) and iterates over each entry in the Table. -- For each entry representing a syntax node with dependencies: -- determine if the node has a dependency relation or a list of imports, -- if it has a dependency relation, then for each export expand the RefList -- of imports, -- if it only has a list of variables then expand the RefList of variables. -- Expansion extends the RefList to include the components of all record -- variables named in the list. procedure ExpandToComponentEntities (ComponentData : in ComponentManager.ComponentData; Table : in HashTable; TheHeap : in out Heap.HeapRecord); --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# STree.Table, --# Table, --# TheHeap; private -- The hash function is a modulus of HashMax. HashMax : constant Integer := 25500; subtype HashIndex is Integer range 0 .. HashMax; type HashTable is array (HashIndex) of Natural; end RefList; spark-2012.0.deb/examiner/error_types.ads0000644000175000017500000001315311753202336017322 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with LexTokenManager; with E_Strings; with Dictionary; --# inherit Dictionary, --# E_Strings, --# LexTokenManager; package Error_Types is -------------------------------------------------------- -- Types common to ErrorHandler and Error_IO packages -- -------------------------------------------------------- type Error_Class is ( LexErr, SyntaxErr, SyntaxRec, UncondFlowErr, CondlFlowErr, UncondDependencyErr, CondlDependencyErr, SemanticErr, DepSemanticErr, WarningWithPosition, WarningWithoutPosition, Note, ControlFlowErr, IneffectiveStat, StabilityErr, UsageErr, NoErr); type NameSorts is ( None, Symbol, Entity, LexString, ParserSymbol, StabilityIndex, ThePartition); -- a "Name" is a kind of variant record, storing one of various NameSorts is numerical form type Names is record Name_Sort : NameSorts; -- case Name_Sort is -- when ??? => Name_Sym : Dictionary.Symbol; -- when ??? => Name_Str : LexTokenManager.Lex_String; -- when others => Pos : Natural; -- end case; end record; NoName : constant Names := Names'(Name_Sort => None, Name_Sym => Dictionary.NullSymbol, Name_Str => LexTokenManager.Null_String, Pos => 0); ThePartitionName : constant Names := Names'(Name_Sort => ThePartition, Name_Sym => Dictionary.NullSymbol, Name_Str => LexTokenManager.Null_String, Pos => 0); -- error messages have a number, curently in the range 0 .. 999; subtype ErrNumRange is Integer range 0 .. 999; -- Error messages are handled in 2 forms. The NumericError form is space efficient -- and is used for adding and sorting messages into line number order; that is the work -- of ErrorBuffer type NumericError is record ErrorType : Error_Class; Position : LexTokenManager.Token_Position; Scope : Dictionary.Scopes; ErrorNum : ErrNumRange; Reference : Natural; Name1, Name2, Name3 : Names; end record; Empty_NumericError : constant NumericError := NumericError' (NoErr, LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0), Dictionary.NullScope, 0, 0, NoName, NoName, NoName); -- The StringError form includes a conversion of the message into string form and is therefore not -- space efficient but is suitable for printing; that is the work of ConvertToString as called -- by PrintErrors and AppendErrors (and also as a sid effect of Add). type StringError is record MessageId : ErrNumRange; ErrorType : Error_Class; Position : LexTokenManager.Token_Position; Message : E_Strings.T; end record; Empty_StringError : constant StringError := StringError'(0, NoErr, LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0), E_Strings.Empty_String); -- constants used as offsets to 'val/'pos of various error kind enumerations -- so as to ensure that they all have unique error numbers UncondFlowErrorOffset : constant ErrNumRange := 20; UncondDependencyErrorOffset : constant ErrNumRange := 50; StabilityErrOffset : constant ErrNumRange := 40; UsageErrOffset : constant ErrNumRange := 30; ControlFlowErrOffset : constant ErrNumRange := 1; CondFlowErrorOffset : constant ErrNumRange := 500; CondDependencyErrorOffset : constant ErrNumRange := 600; -- To track when explanations have been given, so that we can restrict such explanations -- to the first occurrence of each error number, we need to know whether the conversion -- of a numeric error to a string error is being made for one of the three following -- purposes. type ConversionRequestSource is (ForScreen, ForReportSelectedFiles, ForReportIndexedFiles, ForListing); subtype ForReport is ConversionRequestSource range ForReportSelectedFiles .. ForReportIndexedFiles; end Error_Types; spark-2012.0.deb/examiner/cell_storage.ads0000644000175000017500000001606711753202335017416 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Cell_Storage -- -- Purpose: -- This pakcage supports the Cells package, proving an abstraction -- of an automatically-extending array of Cell_Content indexed -- by the type Cell. -- -- In the past, the Examiner used a fixed-size array to -- offer this abstraction, but this had the drawback of a fixed -- capacity. This package, introduced in Release 10.1 solves -- that by offering an implementation where the Vector automatically -- extends its capacity as required. -- -- Implementation -- Uses Ada.Containers.Vectors in the (hidden) private part and -- body. -------------------------------------------------------------------------------- with Ada.Containers; with Ada.Containers.Vectors; with Dictionary; with LexTokenManager; with SP_Symbols; with SPARK.Ada.Containers; --# inherit Dictionary, --# ExaminerConstants, --# LexTokenManager, --# SPARK.Ada.Containers, --# SP_Symbols; package Cell_Storage is ------------------------------------------------------------- -- Cell Kinds and Attributes -- -- At this level, the Cell Heap is essentially polymorphic and -- weakly-typed - each Cell has various attributes whose -- usage varies depending on the "Kind" of a Cell. -- -- Cells have a "Kind" (corresponding to the Discriminant -- or Tag if we were able to use discriminated records or -- tagged types here). The Kind of a Cell determines which -- operations are available and the meaning of other -- attributes. ------------------------------------------------------------- type Cell_Kind is ( Named_Const, Manifest_Const, Attrib_Value, Attrib_Function, Pending_Function, Declared_Function, Aggregate_Counter, Proof_Function, Field_Access_Function, Field_Update_Function, Element_Function, Update_Function, Succ_Function, Pred_Function, Incomplete_Aggregate, Mk_Aggregate, List_Function, Abs_Function, Trunc_Function, FDL_Div_Op, Procedure_Export, Procedure_Name, Call_Counter, Bitwise_Op, Modified_Op, -- Assignment Op, Reference, -- R-Value of variable Fixed_Var, Return_Var, Root_Integer, Unconstrained_Attribute_Prefix, -- usedfor U'xxx where U is an unconstrained object Constraining_Index, -- gives the index for a constrained subtype of uncon obj Function_Call_In_Proof_Context, Proof_Function_Obtain_Precondition, Proof_Function_Obtain_Return, Proof_Function_Syntax_Node, Quantifier, -- Used to stash naturals in a cell, only used for stacks, -- etc. For example see DAG.Build_Annotation_Expression, -- Create_Saved_Context_DAG where we use this to stash the 'val -- of an enum (the loop direction). Internal_Natural, -- Used to stash Dictionary.Scopes in a cell. The scope is -- broken down in to the unit (which is stored in the -- Dictionary.Symbol field) and the visibility (which is stored -- via 'pos in the natural field). Internal_Scope, Unknown_Kind); type Cell is range 0 .. SPARK.Ada.Containers.Count_Type'Last; --# assert Cell'Base is Integer; ----------------------------------------------------------- -- Cells have a "rank" attribute, which may be used to -- establish a partial ordering of cells. This is used, -- for example, in Declarations, where the rank is used to -- produce declaration-before-use order in the FDL -- declarations ----------------------------------------------------------- type Cell_Rank is new Short_Integer range 0 .. 32767; type Cell_Content is record A_Ptr, B_Ptr, C_Ptr, Copy : Cell; Lex_Str : LexTokenManager.Lex_String; Val : Natural; Assoc_Var : Dictionary.Symbol; Op_Symbol : SP_Symbols.SP_Symbol; Rank : Cell_Rank; Kind : Cell_Kind; Free : Boolean; Marked : Boolean; end record; pragma Pack (Cell_Content); -- Vector is an automatcially-extending array of Cell_Content type Vector is private; procedure Initialize (Initial_Length : in SPARK.Ada.Containers.Count_Type; V : out Vector); --# derives V from Initial_Length; function Last_Index (V : Vector) return Cell; function Get_Element (V : in Vector; Index : in Cell) return Cell_Content; procedure Set_Element (V : in out Vector; Index : in Cell; Value : in Cell_Content); --# derives V from *, --# Index, --# Value; -- Appends Value to V, extending the capacity of V if necessary procedure Append (V : in out Vector; Value : in Cell_Content); --# derives V from *, --# Value; private --# hide Cell_Storage; package Vectors is new Ada.Containers.Vectors (Index_Type => Cell, Element_Type => Cell_Content); type Vector is record Vec : Vectors.Vector; end record; end Cell_Storage; spark-2012.0.deb/examiner/labels.ads0000644000175000017500000000661011753202336016207 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cell_Storage; with Cells; with Pairs; --# inherit Cells, --# Cell_Storage, --# Clists, --# Pairs, --# Statistics, --# Structures; package Labels is type Label is private; function LabelHead (L : Label) return Cells.Cell; function FirstPair (Heap : Cells.Heap_Record; L : Label) return Pairs.Pair; function NextPair (Heap : Cells.Heap_Record; P : Pairs.Pair) return Pairs.Pair; function IsNull (L : Label) return Boolean; function CellToLabel (C : Cells.Cell) return Label; procedure AppendPair (Heap : in out Cells.Heap_Record; NewPair : in Pairs.Pair; LabelName : in Label); --# derives Heap from *, --# LabelName, --# NewPair; procedure CreateLabel (Heap : in out Cells.Heap_Record; NewLabel : out Label); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap & --# NewLabel from Heap; procedure CopyLabel (Heap : in out Cells.Heap_Record; Original : in Label; Copy : out Label); --# global in out Statistics.TableUsage; --# derives Copy, --# Heap from Heap, --# Original & --# Statistics.TableUsage from *, --# Heap, --# Original; procedure AddLabels (Heap : in out Cells.Heap_Record; Label_1 : in Label; Label_2 : in Label); --# derives Heap from *, --# Label_1, --# Label_2; procedure MultiplyLabels (Heap : in out Cells.Heap_Record; Label_1 : in Label; Label_2 : in Label; Product : out Label); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Label_1, --# Label_2 & --# Product from Heap; private type Label is range 0 .. Cell_Storage.Cell'Last; --# assert Label'Base is Integer; end Labels; spark-2012.0.deb/examiner/sem-walk_expression_p-add_name.adb0000644000175000017500000000312411753202336022765 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Add_Name (Name : in LexTokenManager.Lex_String; List : in Lists.List; Heap_Param : in out Lists.List_Heap; Present : out Boolean) is Ok : Boolean; begin Lists.Add_Name (Heap => Heap_Param, The_List => List, Name => Name, Already_Present => Present, Ok => Ok); if not Ok then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.List_Overflow_In_Expression, Msg => "in Add_Name"); end if; end Add_Name; ././@LongLink0000000000000000000000000000014700000000000011567 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_anno-wf_init_spec.adbspark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_anno-wf_init_spec.a0000644000175000017500000003140111753202336032711 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Anno) procedure Wf_Init_Spec (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope_Type : in Enclosing_Scope_Types; Scope : in Dictionary.Scopes) is ------------------------------------------------------------------------- procedure Do_Library_Pack (Init_Spec_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from Dictionary.Dict, --# Init_Spec_Node, --# LexTokenManager.State, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Init_Spec_Node, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Init_Spec_Node, STree.Table) = SP_Symbols.initialization_specification; --# post STree.Table = STree.Table~; is Ident_List_Node, Next_Node : STree.SyntaxNode; It : STree.Iterator; Var_Sym : Dictionary.Symbol; begin Ident_List_Node := Child_Node (Current_Node => Init_Spec_Node); -- ASSUME Ident_List_Node = package_variable_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_List_Node) = SP_Symbols.package_variable_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_List_Node = package_variable_list in Do_Library_Pack"); It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Ident_List_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); Var_Sym := Dictionary.LookupImmediateScope (Name => Node_Lex_String (Node => Next_Node), Scope => Scope, Context => Dictionary.ProofContext); if Dictionary.Is_Null_Symbol (Var_Sym) then -- undeclared ErrorHandler.Semantic_Error (Err_Num => 139, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); elsif Dictionary.IsOwnVariable (Var_Sym) then -- ok as long as not moded if Dictionary.GetOwnVariableMode (Var_Sym) = Dictionary.DefaultMode then if not Dictionary.GetOwnVariableProtected (Var_Sym) then -- ok as long as not protected STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Next_Node); Dictionary.AddInitializedOwnVariable (Var_Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node))); else -- own protected are implicitly initialized ErrorHandler.Semantic_Error (Err_Num => 863, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); end if; else -- has a mode ErrorHandler.Semantic_Error (Err_Num => 708, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); end if; else -- not an own variable ErrorHandler.Semantic_Error (Err_Num => 174, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); end if; It := STree.NextNode (It); end loop; end Do_Library_Pack; ------------------------------------------------------------------------- procedure Do_Embedded_Pack (Init_Spec_Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from Dictionary.Dict, --# Init_Spec_Node, --# LexTokenManager.State, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Init_Spec_Node, --# LexTokenManager.State, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Init_Spec_Node, STree.Table) = SP_Symbols.initialization_specification; --# post STree.Table = STree.Table~; is Ident_List_Node, Last_Node, Next_Node : STree.SyntaxNode; Node_It : STree.Iterator; Var_Sym : Dictionary.Symbol; It : Dictionary.Iterator; begin Ident_List_Node := Child_Node (Current_Node => Init_Spec_Node); -- ASSUME Ident_List_Node = package_variable_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_List_Node) = SP_Symbols.package_variable_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_List_Node = package_variable_list in Do_Embedded_Pack"); Last_Node := Ident_List_Node; Node_It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Ident_List_Node, In_Direction => STree.Down); while not STree.IsNull (Node_It) loop Next_Node := Get_Node (It => Node_It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (Node_It); Last_Node := Next_Node; Var_Sym := Dictionary.LookupImmediateScope (Name => Node_Lex_String (Node => Next_Node), Scope => Scope, Context => Dictionary.ProofContext); if Dictionary.Is_Null_Symbol (Var_Sym) then -- undeclared ErrorHandler.Semantic_Error (Err_Num => 139, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); elsif Dictionary.IsOwnVariable (Var_Sym) and then Dictionary.OwnVariableIsAnnounced (Var_Sym) then -- potentially ok if Dictionary.GetOwnVariableMode (Var_Sym) /= Dictionary.DefaultMode then -- it does have a mode which is not allowed ErrorHandler.Semantic_Error (Err_Num => 708, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); elsif not Dictionary.OwnVariableIsInitialized (Dictionary.GetSubject (Var_Sym)) then -- initialization not allowed because subject not initialized ErrorHandler.Semantic_Error (Err_Num => 78, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); elsif Dictionary.GetOwnVariableProtected (Var_Sym) then -- ok as long as not protected own protected are -- implicitly initialized ErrorHandler.Semantic_Error (Err_Num => 863, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); else STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Next_Node); Dictionary.AddInitializedOwnVariable (Var_Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node))); end if; else -- not an announced own variable ErrorHandler.Semantic_Error (Err_Num => 174, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Node_Lex_String (Node => Next_Node)); end if; Node_It := STree.NextNode (Node_It); end loop; It := Dictionary.FirstOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (It) loop --# assert STree.Table = STree.Table~; Var_Sym := Dictionary.CurrentSymbol (It); if Dictionary.OwnVariableIsInitialized (Dictionary.GetSubject (Var_Sym)) and then not Dictionary.OwnVariableIsInitialized (Var_Sym) and then Dictionary.GetOwnVariableMode (Var_Sym) = Dictionary.DefaultMode then ErrorHandler.Semantic_Error (Err_Num => 79, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Last_Node), Id_Str => Dictionary.GetSimpleName (Var_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Do_Embedded_Pack; begin -- Wf_Init_Spec if Scope_Type = In_Package then Do_Embedded_Pack (Init_Spec_Node => Node, Pack_Sym => Pack_Sym, Scope => Scope); else Do_Library_Pack (Init_Spec_Node => Node, Scope => Scope); end if; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Initializes (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Node, Scope => Scope); end if; end Wf_Init_Spec; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_aggregate_choice_rep.adb0000644000175000017500000002727111753202336026050 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Aggregate_Choice_Rep (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap; Next_Node : out STree.SyntaxNode) is Name_Exp, Field_Info : Sem.Exp_Record; Field_Ident_Node : STree.SyntaxNode; Field_Ident : LexTokenManager.Lex_String; Field_Symbol : Dictionary.Symbol; Already_Present : Boolean; ---------------------------------------------------- function Range_Found (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate_choice_rep or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate_choice_rep; is Next_Node : STree.SyntaxNode; Result : Boolean; begin Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = aggregate_choice_rep OR aggregate_choice OR -- annotation_aggregate_choice_rep OR annotation_aggregate_choice if STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.aggregate_choice_rep or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_aggregate_choice_rep then -- ASSUME Next_Node = aggregate_choice_rep OR annotation_aggregate_choice_rep Result := True; elsif STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.aggregate_choice or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_aggregate_choice then -- ASSUME Next_Node = aggregate_choice OR annotation_aggregate_choice Next_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Next_Node)); -- ASSUME Next_Node = range_constraint OR simple_expression OR NULL OR -- annotation_range_constraint OR annotation_simple_expression if Next_Node = STree.NullNode then -- ASSUME Next_Node = NULL Result := False; elsif STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.range_constraint or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_range_constraint or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_simple_expression then -- ASSUME Next_Node = range_constraint OR simple_expression OR -- annotation_range_constraint OR annotation_simple_expression Result := True; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = range_constraint OR simple_expression " & "annotation_range_constraint OR annotation_simple_expression in Range_Found"); end if; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = aggregate_choice_rep OR aggregate_choice " & "annotation_aggregate_choice_rep OR annotation_aggregate_choice in Range_Found"); end if; return Result; end Range_Found; ---------------------------------------------------- procedure Check_For_Branches (Start_Node, End_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# End_Node, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Start_Node, --# STree.Table; --# pre (STree.Syntax_Node_Type (End_Node, STree.Table) = SP_Symbols.aggregate_choice_rep or --# STree.Syntax_Node_Type (End_Node, STree.Table) = SP_Symbols.annotation_aggregate_choice_rep) and --# Start_Node = STree.Last_Child_Of (End_Node, STree.Table); is Next_Node : STree.SyntaxNode; begin Next_Node := Start_Node; while Next_Node /= End_Node loop if STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.aggregate_choice_rep or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_aggregate_choice_rep then Next_Node := STree.Next_Sibling (Current_Node => Next_Node); -- ASSUME Next_Node = aggregate_choice OR annotation_aggregate_choice SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.aggregate_choice or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_aggregate_choice, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = aggregate_choice OR annotation_aggregate_choice in Check_For_Branches"); ErrorHandler.Semantic_Error (Err_Num => 60, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Next_Node), Id_Str => LexTokenManager.Null_String); end if; Next_Node := STree.Parent_Node (Current_Node => Next_Node); end loop; end Check_For_Branches; begin -- Wf_Aggregate_Choice_Rep Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if Dictionary.IsUnknownTypeMark (Name_Exp.Type_Symbol) then Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Next_Node := STree.NullNode; elsif Dictionary.TypeIsArray (Name_Exp.Type_Symbol) then Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = aggregate_choice_rep OR aggregate_choice OR -- annotation_aggregate_choice_rep OR annotation_aggregate_choice SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.aggregate_choice_rep or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.aggregate_choice or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_aggregate_choice_rep or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_aggregate_choice, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = aggregate_choice_rep OR aggregate_choice " & "annotation_aggregate_choice_rep OR annotation_aggregate_choice in Wf_Aggregate_Choice_Rep"); else -- must be a record if Range_Found (Node => Node) then Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Exp_Stack.Push (X => Null_Parameter_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 60, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Field_Ident_Node := STree.Last_Child_Of (Start_Node => Node); if STree.Syntax_Node_Type (Node => Field_Ident_Node) = SP_Symbols.identifier then -- ASSUME Field_Ident_Node = identifier Field_Ident := STree.Node_Lex_String (Node => Field_Ident_Node); Field_Symbol := Dictionary.LookupSelectedItem (Prefix => Name_Exp.Type_Symbol, Selector => Field_Ident, Scope => Scope, Context => Dictionary.ProgramContext); if Dictionary.Is_Null_Symbol (Field_Symbol) then -- no such field Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Exp_Stack.Push (X => Null_Parameter_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 8, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Field_Ident_Node), Id_Str => Field_Ident); else -- field name exists Add_Name (Name => Field_Ident, List => Name_Exp.Param_List, Heap_Param => Heap_Param, Present => Already_Present); if Already_Present then Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Exp_Stack.Push (X => Null_Parameter_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 103, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Field_Ident_Node), Id_Str => Field_Ident); else -- no value thus far assigned STree.Set_Node_Lex_String (Sym => Field_Symbol, Node => Field_Ident_Node); Field_Info := Null_Parameter_Record; Field_Info.Other_Symbol := Field_Symbol; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Exp_Stack.Push (X => Field_Info, Stack => E_Stack); end if; end if; else -- identifier not found Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Exp_Stack.Push (X => Null_Parameter_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 102, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Field_Ident_Node), Id_Str => Dictionary.GetSimpleName (Name_Exp.Other_Symbol)); end if; Check_For_Branches (Start_Node => Field_Ident_Node, End_Node => Node); end if; Next_Node := STree.NullNode; end if; end Wf_Aggregate_Choice_Rep; spark-2012.0.deb/examiner/errorhandler-justifications.adb0000644000175000017500000011535311753202336022454 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with E_Strings; with LexTokenManager; with SystemErrors; with XMLReport; use type CommandLineData.Flow_Analysis_Options; separate (ErrorHandler) package body Justifications is -- The Data Table, stored in Error_Context_Rec, has to serve two separate purposes: -- (1) a list of justifications in lexical order for listing at end of rep and lst files -- (2) a list structured by unit within the file that identifies which justifications are -- "in scope" at any particular time and which deal with nested units. -- For the first role, we just use the array of records in order. Index 1 is the first justification -- and Current_Slot the last one. We can find all entries bt looping over 1 .. Current_Slot. -- For the second role we use a stack and a linkages in the table. Each table entry has a previous pointer -- that points to the next lexically earlier entry of that unit. The constant End_Of_List means there -- are no more entries. For any particular unit, the starting point for the search will be the TOS of the -- unit stack. -- When we start processing a unit we push End_Of_List to start with but always keep TOS pointing at the most recent -- table entry that has been added. -- When we finish processing the errors for a unit, the stack is popped thus removing all -- justifications for that unit from consideration but leaving them unchanged in the table for the first -- purpose described earlier. -- Last_Line------------------------------------------------------------------- -- The default scope of a justification is from the start justify to the end of the unit -- in which it appears. An end justify can restrict this scope. We don't actually need -- to know the last line number as we can use the following constant to represent it. -- In effect End_Of_Line_Unit has the value "infinity". End_Line_Of_Unit_Marker : constant LexTokenManager.Line_Numbers := 0; -- constant of private type declared in package spec Null_Unmatched_Justification_Iterator : constant Unmatched_Justification_Iterator := Unmatched_Justification_Iterator' (Current_Table_Entry => ErrorHandler.End_Of_List, Current_Position => LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0)); No_Match_Message : constant String := "!!! No match found."; -- local stack operations ------------------------------- function Current_Unit_List_Head (Unit_Stack : ErrorHandler.Unit_Stacks) return ErrorHandler.Data_Table_Ptr is begin SystemErrors.RT_Assert (C => Unit_Stack.Ptr > 0, Sys_Err => SystemErrors.Precondition_Failure, Msg => "Stack underflow in error justification handler"); return Unit_Stack.Vector (Unit_Stack.Ptr).List_Items; end Current_Unit_List_Head; function Stack_Is_Empty (Unit_Stack : ErrorHandler.Unit_Stacks) return Boolean is begin return Unit_Stack.Ptr = 0; end Stack_Is_Empty; -- exported operations ------------------------------------------------------- procedure Start_Unit (Which_Table : in out ErrorHandler.Justifications_Data_Tables) is procedure Stack_New_Unit (Unit_Stack : in out ErrorHandler.Unit_Stacks) --# derives Unit_Stack from *; is begin SystemErrors.RT_Assert (C => Unit_Stack.Ptr < ErrorHandler.Max_Stack_Size, Sys_Err => SystemErrors.Precondition_Failure, Msg => "Stack overflow in error justification handler"); Unit_Stack.Ptr := Unit_Stack.Ptr + 1; Unit_Stack.Vector (Unit_Stack.Ptr) := ErrorHandler.Stack_Record'(List_Items => ErrorHandler.End_Of_List, Semantic_Error_In_Unit => False); end Stack_New_Unit; begin Stack_New_Unit (Unit_Stack => Which_Table.Unit_Stack); end Start_Unit; procedure Set_Current_Unit_Has_Semantic_Errors (Which_Table : in out ErrorHandler.Justifications_Data_Tables) is begin -- If a semantic error occurs before we get into the declarative part, or statements -- of a unit then the stack will be empty; however, there are no justifiable warnings -- for these regions so we can simply ignore the call if not Stack_Is_Empty (Unit_Stack => Which_Table.Unit_Stack) then Which_Table.Unit_Stack.Vector (Which_Table.Unit_Stack.Ptr).Semantic_Error_In_Unit := True; end if; end Set_Current_Unit_Has_Semantic_Errors; -- Operations concerned with reaching the end of a subprogram or other unit. We provide an -- iterator for finding all the unmatched justifications so that Errorhandler.End_Unit can report -- them and also a stack Pop operation to clear the now completed unit from scope. -- local functions shared by First_Unmatched_Justification and Next_Unmatched_Justification -- Don't report unmatched flow messages if a semantic error has occurred function Ignore_Flow_When_Semantic_Errors (Which_Table : ErrorHandler.Justifications_Data_Tables; Current_Item : ErrorHandler.Data_Table_Ptr) return Boolean is function Current_Unit_Has_Semantic_Errors (Unit_Stack : ErrorHandler.Unit_Stacks) return Boolean is begin SystemErrors.RT_Assert (C => Unit_Stack.Ptr > 0, Sys_Err => SystemErrors.Precondition_Failure, Msg => "Stack underflow in error justification handler"); return Unit_Stack.Vector (Unit_Stack.Ptr).Semantic_Error_In_Unit; end Current_Unit_Has_Semantic_Errors; begin return Current_Unit_Has_Semantic_Errors (Unit_Stack => Which_Table.Unit_Stack) and then Which_Table.Data_Table (Current_Item).Kind = ErrorHandler.Flow_Message; end Ignore_Flow_When_Semantic_Errors; -- Don't report unmatched information-flow messages in data-flow mode. It may be that lower -- levels of the calling hierarchy have full derives annotations, but higher levels are -- analysed in data-flow mode. The information-flow relations are not calculated in data- -- flow mode so the justifications cannot be checked, and are therefore ignored. -- If flow=auto and derives are present then the justification can be checked. -- If flow=auto and derives not present then the justification should not be there. function Ignore_Information_Flow_Errors (Which_Table : ErrorHandler.Justifications_Data_Tables; Current_Item : ErrorHandler.Data_Table_Ptr) return Boolean --# global in CommandLineData.Content; is function Is_IFA (Num : Natural) return Boolean is begin return Num = 50 or else Num = 57 or else Num = 601 or else Num = 602; end Is_IFA; begin -- Ignore_Information_Flow_Errors return CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow and then Which_Table.Data_Table (Current_Item).Kind = ErrorHandler.Flow_Message and then Is_IFA (Num => Which_Table.Data_Table (Current_Item).Err_Num); end Ignore_Information_Flow_Errors; procedure First_Unmatched_Justification (It : out Unmatched_Justification_Iterator; Which_Table : in ErrorHandler.Justifications_Data_Tables) is Current_Item : ErrorHandler.Data_Table_Ptr; begin -- establish default answer It := Null_Unmatched_Justification_Iterator; -- seek unmatched items Current_Item := Current_Unit_List_Head (Unit_Stack => Which_Table.Unit_Stack); while Current_Item /= ErrorHandler.End_Of_List loop -- Only justifications with specific identifiers are checked for matches. -- A justification which applies to all does not have to have a match if not Which_Table.Data_Table (Current_Item).Applies_To_All and then Which_Table.Data_Table (Current_Item).Match_Count = 0 and then -- Unmatched item found, -- but we ignore it if it is a flow error justification and the flow analyser hasn't run (not Ignore_Flow_When_Semantic_Errors (Which_Table => Which_Table, Current_Item => Current_Item)) and then (not Ignore_Information_Flow_Errors (Which_Table => Which_Table, Current_Item => Current_Item)) then It := Unmatched_Justification_Iterator' (Current_Table_Entry => Which_Table.Data_Table (Current_Item).Previous, Current_Position => Which_Table.Data_Table (Current_Item).Position); exit; end if; Current_Item := Which_Table.Data_Table (Current_Item).Previous; end loop; end First_Unmatched_Justification; procedure Next_Unmatched_Justification (It : in out Unmatched_Justification_Iterator; Which_Table : in ErrorHandler.Justifications_Data_Tables) is Current_Item : ErrorHandler.Data_Table_Ptr; begin Current_Item := It.Current_Table_Entry; -- establish default answer It := Null_Unmatched_Justification_Iterator; -- seek unmatched items while Current_Item /= ErrorHandler.End_Of_List loop -- Only justifications with specific identifiers are checked for matches. -- A justification which applies to all does not have to have a match if not Which_Table.Data_Table (Current_Item).Applies_To_All and then Which_Table.Data_Table (Current_Item).Match_Count = 0 and then -- Unmatched item found, -- but we ignore it if it is a flow error justification and the flow analyser hasn't run (not Ignore_Flow_When_Semantic_Errors (Which_Table => Which_Table, Current_Item => Current_Item)) and then (not Ignore_Information_Flow_Errors (Which_Table => Which_Table, Current_Item => Current_Item)) then It := Unmatched_Justification_Iterator' (Current_Table_Entry => Which_Table.Data_Table (Current_Item).Previous, Current_Position => Which_Table.Data_Table (Current_Item).Position); exit; end if; Current_Item := Which_Table.Data_Table (Current_Item).Previous; end loop; end Next_Unmatched_Justification; function Error_Position (It : Unmatched_Justification_Iterator) return LexTokenManager.Token_Position is begin return It.Current_Position; end Error_Position; function Is_Null_Iterator (It : Unmatched_Justification_Iterator) return Boolean is begin return It = Null_Unmatched_Justification_Iterator; end Is_Null_Iterator; procedure End_Unit (Which_Table : in out ErrorHandler.Justifications_Data_Tables) is procedure Stack_Pop_Unit (Unit_Stack : in out ErrorHandler.Unit_Stacks) --# derives Unit_Stack from *; is begin SystemErrors.RT_Assert (C => Unit_Stack.Ptr > 0, Sys_Err => SystemErrors.Precondition_Failure, Msg => "Stack underflow in error justification handler"); Unit_Stack.Ptr := Unit_Stack.Ptr - 1; end Stack_Pop_Unit; begin -- Discard all justifications belonging to this now finished unit Stack_Pop_Unit (Unit_Stack => Which_Table.Unit_Stack); end End_Unit; -- end of operations associated with reaching the end of a unit procedure Start_Justification (Which_Table : in out ErrorHandler.Justifications_Data_Tables; Position : in LexTokenManager.Token_Position; Line : in LexTokenManager.Line_Numbers; Kind : in ErrorHandler.Justification_Kinds; Err_Num : in Natural; Identifiers : in ErrorHandler.Justification_Identifiers; Applies_To_All : in Boolean; Explanation : in E_Strings.T; Maximum_Justifications_Reached : out Boolean) is New_Entry : ErrorHandler.Data_Table_Entry; procedure Update_Current_Unit_List_Head (Which_Table : in out ErrorHandler.Justifications_Data_Tables) --# derives Which_Table from *; is begin SystemErrors.RT_Assert (C => Which_Table.Unit_Stack.Ptr > 0, Sys_Err => SystemErrors.Precondition_Failure, Msg => "Stack underflow in error justification handler"); -- Set top of the stack that is associated with Which_Table to most recently added table entry index Which_Table.Unit_Stack.Vector (Which_Table.Unit_Stack.Ptr).List_Items := Which_Table.Current_Slot; end Update_Current_Unit_List_Head; begin -- The return parameter below is only ever set True once, when the table first fills up. If the -- table is already full then we return False because we only want to generate one warning -- at the point of call where the table first fills, not at every call thereafter. Maximum_Justifications_Reached := False; if Which_Table.Accepting_More_Entries then Which_Table.Current_Slot := Which_Table.Current_Slot + 1; if Which_Table.Current_Slot = ErrorHandler.Max_Table_Entries then Maximum_Justifications_Reached := True; -- signal to caller that table has just become full Which_Table.Accepting_More_Entries := False; -- remember that table is full for future calls end if; New_Entry := ErrorHandler.Data_Table_Entry' (Kind => Kind, Err_Num => Err_Num, Identifiers => Identifiers, Applies_To_All => Applies_To_All, Explanation => Explanation, Position => Position, Start_Line => Line, End_Line => End_Line_Of_Unit_Marker, End_Found => False, Match_Count => 0, Match_Line => Which_Table.Data_Table (Which_Table.Current_Slot).Match_Line, Previous => Current_Unit_List_Head (Unit_Stack => Which_Table.Unit_Stack)); Which_Table.Data_Table (Which_Table.Current_Slot) := New_Entry; Update_Current_Unit_List_Head (Which_Table => Which_Table); end if; end Start_Justification; procedure End_Justification (Which_Table : in out ErrorHandler.Justifications_Data_Tables; Line : in LexTokenManager.Line_Numbers; Unmatched_End : out Boolean) is Entry_To_Check : ErrorHandler.Data_Table_Ptr; Match_Found : Boolean := False; Starting_Line : LexTokenManager.Line_Numbers; begin SystemErrors.RT_Assert (C => not Stack_Is_Empty (Unit_Stack => Which_Table.Unit_Stack), Sys_Err => SystemErrors.Precondition_Failure, Msg => "Stack underflow in End_Justification"); Entry_To_Check := Current_Unit_List_Head (Unit_Stack => Which_Table.Unit_Stack); while Entry_To_Check /= ErrorHandler.End_Of_List loop if not Which_Table.Data_Table (Entry_To_Check).End_Found then -- a start justify with no matching end has been found Match_Found := True; Which_Table.Data_Table (Entry_To_Check).End_Found := True; Which_Table.Data_Table (Entry_To_Check).End_Line := Line; -- end justify restricts line range over which it is valid -- At this point we have matched one begin with one end; however, there is one further check to do to -- deal with the form of justify statement that has several clauses separated by '&'. In this case we will -- have several entries all with the same start line. We want to set all of these to be closed by the -- end we have just found. Starting_Line := Which_Table.Data_Table (Entry_To_Check).Start_Line; Entry_To_Check := Which_Table.Data_Table (Entry_To_Check).Previous; while Entry_To_Check /= ErrorHandler.End_Of_List --# assert Match_Found; loop -- we process further linked table entries until we find one that has a different start line number -- and therefore is not part of the same multiple entry clause exit when Which_Table.Data_Table (Entry_To_Check).Start_Line /= Starting_Line; -- if we get to here, the line number is the same and it is part of the same clause Which_Table.Data_Table (Entry_To_Check).End_Found := True; Which_Table.Data_Table (Entry_To_Check).End_Line := Line; -- restricts line range over which justify valid Entry_To_Check := Which_Table.Data_Table (Entry_To_Check).Previous; end loop; end if; exit when Match_Found; -- each end justify should only match one start Entry_To_Check := Which_Table.Data_Table (Entry_To_Check).Previous; end loop; -- Tell caller that end didn't match a start so that warning can be raised; however, don't return True -- if the table has filled up otherwise we will get lots of unmatched end warnings for the justifications -- that never got added because the table was full Unmatched_End := not Match_Found and Which_Table.Accepting_More_Entries; end End_Justification; procedure Check_Whether_Justified (Which_Table : in out ErrorHandler.Justifications_Data_Tables; Line : in LexTokenManager.Token_Position; Kind : in ErrorHandler.Justification_Kinds; Err_Num : in Natural; Identifiers : in ErrorHandler.Justification_Identifiers; Match_Found : out Boolean) is type Search_Sort is (Search_Identifiers, Search_Applies_To_All); Current_Stack_Ptr : ErrorHandler.Stack_Index; function Matching_Entry_Found (The_Table_Entry : ErrorHandler.Data_Table_Entry; Line : LexTokenManager.Line_Numbers; Kind : ErrorHandler.Justification_Kinds; Err_Num : Natural; Identifiers : ErrorHandler.Justification_Identifiers; Search_For : Search_Sort) return Boolean --# global in LexTokenManager.State; is function Below_End_Line (Line, End_Line : LexTokenManager.Line_Numbers) return Boolean is Result : Boolean; begin if End_Line = End_Line_Of_Unit_Marker then Result := True; else Result := Line <= End_Line; end if; return Result; end Below_End_Line; function Identifiers_Match (The_Table_Entry : ErrorHandler.Data_Table_Entry; Identifiers : ErrorHandler.Justification_Identifiers) return Boolean --# global in LexTokenManager.State; is Result : Boolean := True; function Identifier_Matches (From_The_Table, From_The_Call : ErrorHandler.Justification_Identifier) return Boolean --# global in LexTokenManager.State; is Result : Boolean; begin -- Tricky comparison. From_The_Call will contain: a null string and a valid symbol; -- or a valid string and a null symbol; or both will be null. -- From_The_Table will contain either both null or both valid. -- We need to match as follows: if From_The_Call = ErrorHandler.Null_Justification_Identifier then -- both null, so we require From_The_Table to be exactly the same Result := From_The_Table = ErrorHandler.Null_Justification_Identifier; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => From_The_Call.String_Form, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Result := From_The_Call.Symbol_Form = From_The_Table.Symbol_Form; else -- Strings aren't null so compare them Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => From_The_Call.String_Form, Lex_Str2 => From_The_Table.String_Form) = LexTokenManager.Str_Eq; end if; return Result; end Identifier_Matches; begin -- Identifiers_Match for I in Integer range 1 .. ErrorHandler.Max_Justification_Identifier_Length loop if not Identifier_Matches (From_The_Table => The_Table_Entry.Identifiers (I), From_The_Call => Identifiers (I)) then Result := False; exit; end if; end loop; return Result; end Identifiers_Match; begin -- Matching_Entry_Found return Line >= The_Table_Entry.Start_Line and then Below_End_Line (Line => Line, End_Line => The_Table_Entry.End_Line) and then Err_Num = The_Table_Entry.Err_Num and then Kind = The_Table_Entry.Kind -- Last because it is much the most expensive test and then ((Search_For = Search_Identifiers and then Identifiers_Match (The_Table_Entry => The_Table_Entry, Identifiers => Identifiers)) or else (Search_For = Search_Applies_To_All and then The_Table_Entry.Applies_To_All)); end Matching_Entry_Found; procedure Check_Justified_In_Unit (Search_For : in Search_Sort; Which_Table : in out ErrorHandler.Justifications_Data_Tables; Line : in LexTokenManager.Token_Position; Kind : in ErrorHandler.Justification_Kinds; Err_Num : in Natural; Identifiers : in ErrorHandler.Justification_Identifiers; Match_Found : out Boolean) --# global in LexTokenManager.State; --# derives Match_Found, --# Which_Table from Err_Num, --# Identifiers, --# Kind, --# LexTokenManager.State, --# Line, --# Search_For, --# Which_Table; is Entry_To_Check : ErrorHandler.Data_Table_Ptr; begin Match_Found := False; if not Stack_Is_Empty (Unit_Stack => Which_Table.Unit_Stack) then -- can't have a match if nothing is even in stack yet Entry_To_Check := Current_Unit_List_Head (Unit_Stack => Which_Table.Unit_Stack); while Entry_To_Check /= ErrorHandler.End_Of_List loop if Matching_Entry_Found (The_Table_Entry => Which_Table.Data_Table (Entry_To_Check), Line => Line.Start_Line_No, Kind => Kind, Err_Num => Err_Num, Identifiers => Identifiers, Search_For => Search_For) then -- note how many times we got a match Which_Table.Data_Table (Entry_To_Check).Match_Count := Which_Table.Data_Table (Entry_To_Check).Match_Count + 1; -- and retain the most recent line number where it happened Which_Table.Data_Table (Entry_To_Check).Match_Line := Line.Start_Line_No; -- finally, return result to caller Match_Found := True; exit; end if; Entry_To_Check := Which_Table.Data_Table (Entry_To_Check).Previous; end loop; end if; end Check_Justified_In_Unit; begin -- Check_Whether_Justified if CommandLineData.Content.Justification_Option = CommandLineData.Ignore then Match_Found := False; else -- First check against justification statements with specific identifiers. -- These miust be in the current unit. Check_Justified_In_Unit (Search_For => Search_Identifiers, Which_Table => Which_Table, Line => Line, Kind => Kind, Err_Num => Err_Num, Identifiers => Identifiers, Match_Found => Match_Found); -- If a a specific identifier match has not been found and we are -- analysing auto-generated code, look for a match with an Applies_To_All -- justification defined in successively enclosing units starting with -- the current unit. if not Match_Found and then CommandLineData.Content.Language_Profile in CommandLineData.Auto_Code_Generators and then not Stack_Is_Empty (Which_Table.Unit_Stack) then Current_Stack_Ptr := Which_Table.Unit_Stack.Ptr; for Previous_Unit in reverse ErrorHandler.Stack_Index range ErrorHandler.Stack_Index'First .. Current_Stack_Ptr loop Which_Table.Unit_Stack.Ptr := Previous_Unit; Check_Justified_In_Unit (Search_For => Search_Applies_To_All, Which_Table => Which_Table, Line => Line, Kind => Kind, Err_Num => Err_Num, Identifiers => Identifiers, Match_Found => Match_Found); exit when Match_Found; end loop; Which_Table.Unit_Stack.Ptr := Current_Stack_Ptr; end if; end if; end Check_Whether_Justified; function Table_Contains_Entries (Which_Table : in ErrorHandler.Justifications_Data_Tables) return Boolean is begin return Which_Table.Current_Slot > 0; end Table_Contains_Entries; procedure Print_Justifications (Which_Table : in ErrorHandler.Justifications_Data_Tables; File : in SPARK_IO.File_Type) is procedure Print_Common_Header --# global in File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File; is begin -- This string is common to both "Full" and "Brief" justifications modes, -- so that the justifications summary table (in either mode) can be -- recognized by the HTML report file generator. If this string changes, -- then Process_Report_Line in sparkhtml.adb will also need to be updated. SPARK_IO.Put_Line (File, "Expected messages marked with the accept annotation", 0); end Print_Common_Header; procedure Print_Full_Listing --# global in CommandLineData.Content; --# in File; --# in Which_Table; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# File, --# Which_Table; is procedure Print_Headers --# global in File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File; is begin Print_Common_Header; SPARK_IO.Put_Line (File, "Type Msg Lines Reason Match", 0); SPARK_IO.Put_Line (File, " No. From To No. Line", 0); end Print_Headers; procedure Print_Kind (The_Type : in ErrorHandler.Justification_Kinds) --# global in File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# The_Type; is begin case The_Type is when ErrorHandler.Flow_Message => SPARK_IO.Put_String (File, "Flow ", 0); when ErrorHandler.Warning_Message => SPARK_IO.Put_String (File, "Warn ", 0); end case; end Print_Kind; procedure Print_Line_No (The_Line : in LexTokenManager.Line_Numbers) --# global in CommandLineData.Content; --# in File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# File, --# The_Line; is begin if The_Line = 0 then SPARK_IO.Put_String (File, " end", 0); elsif CommandLineData.Content.Plain_Output then SPARK_IO.Put_String (File, " ", 0); else SPARK_IO.Put_Integer (File, Integer (The_Line), 6, 10); end if; end Print_Line_No; procedure Print_Explanation (Ex_Explanation : in E_Strings.T) --# global in File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Ex_Explanation, --# File; is Ch_Idx, Printed_Chars : Natural; Ch : Character; Field_Width : constant := 32; begin SPARK_IO.Put_String (File, " ", 0); Ch_Idx := 1; Printed_Chars := 0; loop exit when Ch_Idx > E_Strings.Get_Length (E_Str => Ex_Explanation); exit when Printed_Chars >= Field_Width; Ch := E_Strings.Get_Element (E_Str => Ex_Explanation, Pos => Ch_Idx); if Ch /= '"' then -- strip quotes SPARK_IO.Put_Char (File, E_Strings.Get_Element (E_Str => Ex_Explanation, Pos => Ch_Idx)); Printed_Chars := Printed_Chars + 1; end if; Ch_Idx := Ch_Idx + 1; end loop; -- if we haven't reached Field_Width then pad out with spaces for I in Natural range Printed_Chars .. Field_Width loop SPARK_IO.Put_Char (File, ' '); end loop; end Print_Explanation; begin -- Print_Full_Listing Print_Headers; for I in ErrorHandler.Data_Table_Index range 1 .. Which_Table.Current_Slot loop Print_Kind (The_Type => Which_Table.Data_Table (I).Kind); SPARK_IO.Put_Integer (File, Which_Table.Data_Table (I).Err_Num, 3, 10); Print_Line_No (The_Line => Which_Table.Data_Table (I).Start_Line); Print_Line_No (The_Line => Which_Table.Data_Table (I).End_Line); Print_Explanation (Which_Table.Data_Table (I).Explanation); SPARK_IO.Put_Integer (File, Which_Table.Data_Table (I).Match_Count, 4, 10); if Which_Table.Data_Table (I).Match_Count = 0 then SPARK_IO.Put_String (File, " " & No_Match_Message, 0); else Print_Line_No (The_Line => Which_Table.Data_Table (I).Match_Line); end if; SPARK_IO.New_Line (File, 1); end loop; SPARK_IO.New_Line (File, 2); end Print_Full_Listing; procedure Print_Brief_Listing --# global in File; --# in Which_Table; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Which_Table; is Failed_Matches : Natural := 0; begin Print_Common_Header; for I in ErrorHandler.Data_Table_Index range 1 .. Which_Table.Current_Slot loop if Which_Table.Data_Table (I).Match_Count = 0 then Failed_Matches := Failed_Matches + 1; end if; end loop; SPARK_IO.Put_Integer (File, Integer (Which_Table.Current_Slot), 0, 10); SPARK_IO.Put_String (File, " message(s) marked as expected", 0); if Failed_Matches > 0 then SPARK_IO.Put_String (File, ", !!! Warning, ", 0); SPARK_IO.Put_Integer (File, Failed_Matches, 0, 10); SPARK_IO.Put_String (File, " message(s) did not occur", 0); end if; SPARK_IO.Put_Char (File, '.'); SPARK_IO.New_Line (File, 2); end Print_Brief_Listing; begin -- Print_Justifications if Table_Contains_Entries (Which_Table => Which_Table) then SPARK_IO.New_Line (File, 1); case CommandLineData.Content.Justification_Option is when CommandLineData.Full => Print_Full_Listing; when CommandLineData.Brief => Print_Brief_Listing; when CommandLineData.Ignore => null; end case; end if; end Print_Justifications; -- Precondition: Must be called on a report file, at the correct location in the schema procedure Print_Justifications_XML (Which_Table : in ErrorHandler.Justifications_Data_Tables; File : in SPARK_IO.File_Type) is procedure Print_Full_Listing --# global in File; --# in Which_Table; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# File, --# Which_Table, --# XMLReport.State & --# XMLReport.State from *, --# Which_Table; is No_Match_Explanation : E_Strings.T; function Print_Kind_To_String (The_Type : in ErrorHandler.Justification_Kinds) return E_Strings.T is Kind_String : E_Strings.T; begin case The_Type is when ErrorHandler.Flow_Message => Kind_String := E_Strings.Copy_String (Str => "Flow"); when ErrorHandler.Warning_Message => Kind_String := E_Strings.Copy_String (Str => "Warning"); end case; return Kind_String; end Print_Kind_To_String; function Print_Line_No_To_String (The_Line : in LexTokenManager.Line_Numbers) return E_Strings.T is Line_No_Str : E_Strings.T; begin if The_Line = 0 then Line_No_Str := E_Strings.Copy_String (Str => "end"); else E_Strings.Put_Int_To_String (Dest => Line_No_Str, Item => Integer (The_Line), Start_Pt => 1, Base => 10); end if; return Line_No_Str; end Print_Line_No_To_String; begin -- Print_Full_Listing No_Match_Explanation := E_Strings.Copy_String (Str => No_Match_Message); XMLReport.Start_Section (Section => XMLReport.S_Full_Justifications, Report => File); for I in ErrorHandler.Data_Table_Index range 1 .. Which_Table.Current_Slot loop XMLReport.Start_Full_Justification (Print_Kind_To_String (The_Type => Which_Table.Data_Table (I).Kind), Which_Table.Data_Table (I).Err_Num, Integer (Which_Table.Data_Table (I).Start_Line), Print_Line_No_To_String (The_Line => Which_Table.Data_Table (I).End_Line), Which_Table.Data_Table (I).Match_Count, Integer (Which_Table.Data_Table (I).Match_Line), File); if Which_Table.Data_Table (I).Match_Count = 0 then E_Strings.Put_String (File => File, E_Str => No_Match_Explanation); else E_Strings.Put_String (File => File, E_Str => SPARK_XML.Filter_String (Which_Table.Data_Table (I).Explanation)); end if; XMLReport.End_Full_Justification (File); end loop; XMLReport.End_Section (Section => XMLReport.S_Full_Justifications, Report => File); end Print_Full_Listing; procedure Print_Brief_Listing --# global in File; --# in Which_Table; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# File, --# Which_Table, --# XMLReport.State & --# XMLReport.State from *, --# Which_Table; is Failed_Matches : Natural := 0; begin for I in ErrorHandler.Data_Table_Index range 1 .. Which_Table.Current_Slot loop if Which_Table.Data_Table (I).Match_Count = 0 then Failed_Matches := Failed_Matches + 1; end if; end loop; XMLReport.Brief_Justifications (Natural (Which_Table.Current_Slot), Failed_Matches, File); end Print_Brief_Listing; begin -- Print_Justifications_XML if Table_Contains_Entries (Which_Table => Which_Table) then XMLReport.Start_Section (Section => XMLReport.S_Justifications, Report => File); case CommandLineData.Content.Justification_Option is when CommandLineData.Full => Print_Full_Listing; when CommandLineData.Brief => Print_Brief_Listing; when CommandLineData.Ignore => null; end case; XMLReport.End_Section (Section => XMLReport.S_Justifications, Report => File); end if; end Print_Justifications_XML; end Justifications; spark-2012.0.deb/examiner/sem-wf_context_clause.adb0000644000175000017500000000324011753202336021216 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Context_Clause (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) is Always_False : Boolean; pragma Unreferenced (Always_False); begin --# accept F, 10, Always_False, "A specification can never with its own child" & --# F, 33, Always_False, "A specification can never with its own child"; Wf_Context_Clause_Package_Body (Node => Node, Comp_Sym => Comp_Sym, Scope => Scope, With_Public_Child => Always_False); end Wf_Context_Clause; spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_delay_until.adb0000644000175000017500000001025511753202336025413 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Wf_Delay_Until (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) is Enc_Unit_Sym : Dictionary.Symbol; Result_Type : Exp_Record; Ref_Var : SeqAlgebra.Seq; Exp_Node : STree.SyntaxNode; begin SeqAlgebra.CreateSeq (TheHeap, Ref_Var); Exp_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Exp_Node = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = expression in Wf_Delay_Until"); -- Process the expression of the delay_statement. Must be of type Ada.Real_Time. Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Scope, Type_Context => Dictionary.GetPredefinedTimeType, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => Result_Type, Component_Data => Component_Data, The_Heap => TheHeap); -- Verify that Result_Type is compatible with Ada.Real_Time if not Dictionary.CompatibleTypes (Scope => Scope, Left => Result_Type.Type_Symbol, Right => Dictionary.GetPredefinedTimeType) then ErrorHandler.Semantic_Error (Err_Num => 866, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; -- Verify that if the enclosing unit is a procedure then -- it has a delay annotation. Other enclosing units are invalid. -- Also mark the enclosing unit as having seen a delay statement. Enc_Unit_Sym := Dictionary.GetEnclosingCompilationUnit (Scope); if Dictionary.IsProcedure (Enc_Unit_Sym) and then not Dictionary.IsOrIsInProtectedScope (Scope) then if Dictionary.HasDelayProperty (Enc_Unit_Sym) then Dictionary.MarkAccountsForDelay (Enc_Unit_Sym); else ErrorHandler.Semantic_Error (Err_Num => 867, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Enc_Unit_Sym)); end if; elsif not Dictionary.IsTaskType (Enc_Unit_Sym) then -- Cannot call delay from the enclosing construct. ErrorHandler.Semantic_Error (Err_Num => 907, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; -- NullVariable is an out stream and so need to be considered an import Add_Stream_Effects (Table => Table, The_Heap => TheHeap, Node => Node, Export => Dictionary.GetNullVariable, Imports => Ref_Var); -- add reference variable list to RefList hash table for the statement node RefList.AddRelation (Table, TheHeap, Node, Dictionary.GetNullVariable, -- ref vars dumped in data sink Ref_Var); end Wf_Delay_Until; spark-2012.0.deb/examiner/sli-xref.ads0000644000175000017500000001134311753202336016475 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Manage the cross-references. --# inherit CommandLineData, --# ContextManager, --# ContextManager.Ops, --# Dictionary, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# LexTokenLists, --# LexTokenManager, --# SLI.IO, --# SPARK_IO, --# SystemErrors; private package SLI.Xref --# own State; --# initializes State; is -- Increment by 1 the number of separates found in the -- compilation unit (Comp_Unit). procedure Increment_Nb_Separates (Comp_Unit : in ContextManager.UnitDescriptors); --# global in out State; --# derives State from *, --# Comp_Unit; -- Add a cross-reference for the symbol (Sym). This symbol is -- declared in the compilation unit (Decl_Comp_Unit). The -- symbol is used in the compilation unit (Usage_Comp_Unit) at -- position (Pos). The type of reference is Ref_Type. procedure Add_Usage (Decl_Comp_Unit : in ContextManager.UnitDescriptors; Sym : in Dictionary.Symbol; Usage_Comp_Unit : in ContextManager.UnitDescriptors; Pos : in LexTokenManager.Token_Position; Ref_Type : in Character); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Decl_Comp_Unit, --# Dictionary.Dict, --# LexTokenManager.State, --# State, --# Sym, --# Usage_Comp_Unit & --# State from *, --# Decl_Comp_Unit, --# Dictionary.Dict, --# Pos, --# Ref_Type, --# Sym, --# Usage_Comp_Unit; -- Cleanup all the cross-references tables. procedure Cleanup_Decl_Comp_Unit; --# global in out State; --# derives State from *; -- Write all the cross-references for the compilation unit -- (Comp_Unit) and the closure in the SLI file. procedure Dump (Comp_Unit : in ContextManager.UnitDescriptors); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out State; --# out IO.Stream_Buffer; --# derives IO.Stream_Buffer from Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# State & --# State from *, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State; end SLI.Xref; spark-2012.0.deb/examiner/pairs.adb0000644000175000017500000005420211753202336016042 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Clists; with CStacks; with Structures; with SP_Symbols; use type SP_Symbols.SP_Symbol; package body Pairs is -------------------------------------------------------------------------- -- Local subprograms supporting CombinePredicateWithAction and -- ComposeActions -------------------------------------------------------------------------- procedure Substitute (Heap : in out Cells.Heap_Record; Structure_1, Structure_2 : in Cells.Cell) --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Structure_1, --# Structure_2; is ModCell, P : Cells.Cell; S : CStacks.Stack; DagFound : Boolean; DagRoot : Cells.Cell; function IsLeaf (Node : Cells.Cell) return Boolean --# global in Heap; is begin return Cells.Is_Null_Cell (Cells.Get_B_Ptr (Heap, Node)); end IsLeaf; ----------------------- procedure FindDagRep (RefCell : in Cells.Cell; DagFound : out Boolean; DagRoot : out Cells.Cell) --# global in Heap; --# in Structure_1; --# derives DagFound, --# DagRoot from Heap, --# RefCell, --# Structure_1; is RefVarName : Natural; RefVarOp : SP_Symbols.SP_Symbol; ModVarCell : Cells.Cell; begin RefVarName := Cells.Get_Natural_Value (Heap, RefCell); RefVarOp := Cells.Get_Op_Symbol (Heap, RefCell); ModVarCell := Clists.FirstCell (Heap, Structure_1); DagRoot := Cells.Null_Cell; loop if Cells.Is_Null_Cell (ModVarCell) then DagFound := False; exit; end if; if Cells.Get_Natural_Value (Heap, ModVarCell) = RefVarName and then Cells.Get_Op_Symbol (Heap, ModVarCell) = RefVarOp then DagFound := True; DagRoot := Cells.Get_B_Ptr (Heap, ModVarCell); exit; end if; ModVarCell := Clists.NextCell (Heap, ModVarCell); end loop; end FindDagRep; begin -- Substitute if Cells.Is_Reference_Cell (Heap, Structure_2) then FindDagRep (Structure_2, DagFound, DagRoot); if DagFound then Cells.Copy_Contents (Heap, DagRoot, Structure_2); end if; else -- Traverse Structure_2, using variant of tree-traversal -- algorithm of D.E. Knuth, Fundamental Algorithms, p.317. -- First mark roots of dags of Sructure_1 to prevent repeated copying. ModCell := Clists.FirstCell (Heap, Structure_1); loop exit when Cells.Is_Null_Cell (ModCell); Cells.Mark_Cell (Heap, Cells.Get_B_Ptr (Heap, ModCell)); ModCell := Clists.NextCell (Heap, ModCell); end loop; -- Traverse dag: CStacks.CreateStack (S); P := Structure_2; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (Heap, P, S); if IsLeaf (P) or Cells.Is_Marked (Heap, P) then P := Cells.Null_Cell; else P := Cells.Get_A_Ptr (Heap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (Heap, S); CStacks.Pop (Heap, S); if IsLeaf (P) or Cells.Is_Marked (Heap, P) then P := Cells.Null_Cell; else if Cells.Is_Reference_Cell (Heap, Cells.Get_A_Ptr (Heap, P)) and not Cells.Is_Marked (Heap, Cells.Get_A_Ptr (Heap, P)) then FindDagRep (Cells.Get_A_Ptr (Heap, P), DagFound, DagRoot); if DagFound then Cells.Set_A_Ptr (Heap, P, DagRoot); end if; -- Garbage collection ? end if; if Cells.Is_Reference_Cell (Heap, Cells.Get_B_Ptr (Heap, P)) and not Cells.Is_Marked (Heap, Cells.Get_B_Ptr (Heap, P)) then FindDagRep (Cells.Get_B_Ptr (Heap, P), DagFound, DagRoot); if DagFound then Cells.Set_B_Ptr (Heap, P, DagRoot); end if; P := Cells.Null_Cell; else P := Cells.Get_B_Ptr (Heap, P); end if; end if; end loop; -- unmark roots of Structure_1; ModCell := Clists.FirstCell (Heap, Structure_1); loop exit when Cells.Is_Null_Cell (ModCell); Cells.UnMark_Cell (Heap, Cells.Get_B_Ptr (Heap, ModCell)); ModCell := Clists.NextCell (Heap, ModCell); end loop; end if; end Substitute; -- Merges two lists of assigned variables. List1 becomes the merged list, -- of valid assignments. Defunct assignments are transferred to OldRootList. -- The procedure disposes of the head of List2. procedure MergeLists (Heap : in out Cells.Heap_Record; List1, List2, OldRootList : in Cells.Cell) --# global in out Statistics.TableUsage; --# derives Heap from *, --# List1, --# List2, --# OldRootList & --# Statistics.TableUsage from *, --# Heap; is List1_Cell, List2_Cell, NewList : Cells.Cell; begin -- MergeLists Clists.CreateList (Heap, NewList); loop List1_Cell := Clists.FirstCell (Heap, List1); List2_Cell := Clists.FirstCell (Heap, List2); if Cells.Is_Null_Cell (List1_Cell) then Clists.TransferCells (Heap, List2, NewList); exit; end if; if Cells.Is_Null_Cell (List2_Cell) then Clists.TransferCells (Heap, List1, NewList); exit; end if; if Cells.Get_Natural_Value (Heap, List1_Cell) < Cells.Get_Natural_Value (Heap, List2_Cell) then Clists.RemoveLeader (Heap, List1); Clists.AppendCell (Heap, List1_Cell, NewList); elsif Cells.Get_Natural_Value (Heap, List1_Cell) = Cells.Get_Natural_Value (Heap, List2_Cell) then -- cells are only same if OpKinds are also same - new IF if Cells.Get_Op_Symbol (Heap, List1_Cell) = Cells.Get_Op_Symbol (Heap, List2_Cell) then -- next 4 statements were entire original contents of this IF Clists.RemoveLeader (Heap, List1); Clists.RemoveLeader (Heap, List2); Clists.AppendCell (Heap, List1_Cell, OldRootList); Clists.AppendCell (Heap, List2_Cell, NewList); else -- OpKinds are different so we can make an arbitrary decision -- as to which to place first in NewList. Clists.RemoveLeader (Heap, List1); Clists.AppendCell (Heap, List1_Cell, NewList); end if; else -- if Cells.Get_Natural_Value (List1_Cell) > Cells.Get_Natural_Value (List2_Cell) then Clists.RemoveLeader (Heap, List2); Clists.AppendCell (Heap, List2_Cell, NewList); end if; end loop; Cells.Set_A_Ptr (Heap, List1, Cells.Get_A_Ptr (Heap, NewList)); Cells.Set_B_Ptr (Heap, List1, Cells.Get_B_Ptr (Heap, NewList)); Cells.Dispose_Of_Cell (Heap, List2); Cells.Dispose_Of_Cell (Heap, NewList); end MergeLists; ----------------------- procedure MarkAccessibleCells (Heap : in out Cells.Heap_Record; Root : in Cells.Cell; MarkedCellStack : out CStacks.Stack) --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root & --# MarkedCellStack from Heap, --# Root; is TopCell : Cells.Cell; UnexploredCellStack : CStacks.Stack; procedure Mark (C : in Cells.Cell) --# global in out Heap; --# in out MarkedCellStack; --# in out Statistics.TableUsage; --# in out UnexploredCellStack; --# derives Heap, --# UnexploredCellStack from C, --# Heap, --# MarkedCellStack, --# UnexploredCellStack & --# MarkedCellStack, --# Statistics.TableUsage from *, --# C, --# Heap, --# MarkedCellStack; is begin if not Cells.Is_Null_Cell (C) then if not Cells.Is_Marked (Heap, C) then Cells.Mark_Cell (Heap, C); CStacks.Push (Heap, C, MarkedCellStack); if not Cells.Is_Reference_Cell (Heap, C) then CStacks.Push (Heap, C, UnexploredCellStack); end if; end if; end if; end Mark; begin -- MarkAccessibleCells; CStacks.CreateStack (MarkedCellStack); CStacks.CreateStack (UnexploredCellStack); Mark (Root); loop exit when CStacks.IsEmpty (UnexploredCellStack); TopCell := CStacks.Top (Heap, UnexploredCellStack); CStacks.Pop (Heap, UnexploredCellStack); Mark (Cells.Get_A_Ptr (Heap, TopCell)); Mark (Cells.Get_B_Ptr (Heap, TopCell)); end loop; end MarkAccessibleCells; ------------------------- procedure CleanUpDags (Heap : in out Cells.Heap_Record; OldRootList : in Cells.Cell) --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# OldRootList; is DefunctCellStack, UnexploredCellStack : CStacks.Stack; TopCell : Cells.Cell; ----------------------- procedure MarkAndPush (C : in Cells.Cell) --# global in out DefunctCellStack; --# in out Heap; --# in out Statistics.TableUsage; --# in out UnexploredCellStack; --# derives DefunctCellStack, --# Statistics.TableUsage, --# UnexploredCellStack from *, --# C, --# DefunctCellStack, --# Heap & --# Heap from *, --# C, --# DefunctCellStack, --# UnexploredCellStack; is begin if not Cells.Is_Null_Cell (C) then if not Cells.Is_Marked (Heap, C) then Cells.Mark_Cell (Heap, C); CStacks.Push (Heap, C, DefunctCellStack); CStacks.Push (Heap, C, UnexploredCellStack); end if; end if; end MarkAndPush; begin -- CleanUpDags CStacks.CreateStack (UnexploredCellStack); CStacks.CreateStack (DefunctCellStack); MarkAndPush (OldRootList); loop exit when CStacks.IsEmpty (UnexploredCellStack); TopCell := CStacks.Top (Heap, UnexploredCellStack); CStacks.Pop (Heap, UnexploredCellStack); MarkAndPush (Cells.Get_A_Ptr (Heap, TopCell)); MarkAndPush (Cells.Get_B_Ptr (Heap, TopCell)); end loop; loop exit when CStacks.IsEmpty (DefunctCellStack); Cells.Dispose_Of_Cell (Heap, CStacks.Top (Heap, DefunctCellStack)); CStacks.Pop (Heap, DefunctCellStack); end loop; end CleanUpDags; ------------------------- procedure UnMarkCells (Heap : in out Cells.Heap_Record; MarkedCellStack : in out CStacks.Stack) --# derives Heap, --# MarkedCellStack from Heap, --# MarkedCellStack; is begin loop exit when CStacks.IsEmpty (MarkedCellStack); Cells.UnMark_Cell (Heap, CStacks.Top (Heap, MarkedCellStack)); CStacks.Pop (Heap, MarkedCellStack); end loop; -- CStacks.DisposeOfStack (Heap, MarkedCellStack); end UnMarkCells; ------------------------- function IsNullPair (P : Pair) return Boolean is begin return Cells.Is_Null_Cell (Cells.Cell (P)); end IsNullPair; -------------------------------------------------------------------------- function IsTrue (Heap : Cells.Heap_Record; P : Pair) return Boolean is begin return Cells.Is_Null_Cell (Cells.Get_B_Ptr (Heap, Cells.Cell (P))); end IsTrue; -------------------------------------------------------------------------- function IsUnitAction (Heap : Cells.Heap_Record; P : Pair) return Boolean is begin return Cells.Is_Null_Cell (Cells.Get_C_Ptr (Heap, Cells.Cell (P))); end IsUnitAction; -------------------------------------------------------------------------- function CellToPair (C : Cells.Cell) return Pair is begin return Pair (C); end CellToPair; -------------------------------------------------------------------------- function PairHead (P : Pair) return Cells.Cell is begin return Cells.Cell (P); end PairHead; -------------------------------------------------------------------------- -- Exported subprograms -------------------------------------------------------------------------- procedure CopyPair (Heap : in out Cells.Heap_Record; Original : in Pair; Copy : out Pair) is CopyName, RootOfNextPair : Cells.Cell; begin -- Deep copy, but ignoring A_Ptr field, so take a copy of it, -- and set A_Ptr to NullCell for now. RootOfNextPair := Cells.Get_A_Ptr (Heap, Cells.Cell (Original)); Cells.Set_A_Ptr (Heap, Cells.Cell (Original), Cells.Null_Cell); -- Deep copy Structures.CopyStructure (Heap, Cells.Cell (Original), CopyName); Copy := Pair (CopyName); -- Put back the original value of A_Ptr field. Cells.Set_A_Ptr (Heap, Cells.Cell (Original), RootOfNextPair); end CopyPair; -------------------------------------------------------------------------- -- Action_R is the structure of an action R -- Predicate_q is the structure of a predicate q procedure CombinePredicateWithAction (Heap : in out Cells.Heap_Record; Action_R, Predicate_q : in Cells.Cell; Result : out Cells.Cell) is MarkedCellStack : CStacks.Stack; OldRootList : Cells.Cell; begin Substitute (Heap, Action_R, Predicate_q); -- prepare Structure_1 for garbage collection; OldRootList := Action_R; -- perform step a7 of JFB Fig.6.13: remove redundant elements; if Clists.IsEmptyList (Heap, OldRootList) then Clists.DisposeOfList (Heap, OldRootList); else MarkAccessibleCells (Heap, Predicate_q, MarkedCellStack); CleanUpDags (Heap, OldRootList); --# accept F, 10, MarkedCellStack, "MarkedCellStack unused here"; UnMarkCells (Heap, MarkedCellStack); --# end accept; end if; -- get rid of other structure in each case ? Simplify to structure 2 always ? Result := Predicate_q; end CombinePredicateWithAction; -------------------------------------------------------------------------- -- Action_R is the structure of an action R -- Action_S is the structure of an action S procedure ComposeActions (Heap : in out Cells.Heap_Record; Action_R, Action_S : in Cells.Cell; Result : out Cells.Cell) is MarkedCellStack : CStacks.Stack; OldRootList : Cells.Cell; begin Substitute (Heap, Action_R, Action_S); -- merge lists of modified variables and prepare redundant members -- for garbage collection; Clists.CreateList (Heap, OldRootList); MergeLists (Heap, Action_R, Action_S, OldRootList); -- perform step a7 of JFB Fig.6.13: remove redundant elements; if Clists.IsEmptyList (Heap, OldRootList) then Clists.DisposeOfList (Heap, OldRootList); else MarkAccessibleCells (Heap, Clists.FirstCell (Heap, Action_R), MarkedCellStack); CleanUpDags (Heap, OldRootList); --# accept F, 10, MarkedCellStack, "MarkedCellStack unused here"; UnMarkCells (Heap, MarkedCellStack); --# end accept; end if; Result := Action_R; -- get rid of other structure in each case ? Simplify to structure 2 always ? end ComposeActions; -------------------------------------------------------------------------- procedure FormConjunction (Heap : in out Cells.Heap_Record; Predicate_1, Predicate_2 : in Cells.Cell; Result : out Cells.Cell) is ConjunctionCell : Cells.Cell; begin Cells.Create_Cell (Heap, ConjunctionCell); Cells.Set_Kind (Heap, ConjunctionCell, Cell_Storage.Op); Cells.Set_Op_Symbol (Heap, ConjunctionCell, SP_Symbols.RWand); Cells.Set_A_Ptr (Heap, ConjunctionCell, Predicate_1); Cells.Set_B_Ptr (Heap, ConjunctionCell, Predicate_2); Result := ConjunctionCell; end FormConjunction; -------------------------------------------------------------------------- procedure MultiplyPairs (Heap : in out Cells.Heap_Record; Multiplicand, Multiplier : in Pair; Product : out Pair) is p, R, q, S, Conjunction, CopyOfR, ProductCell, RS_Composition, Transformed_q : Cells.Cell; begin -- MultiplyPairs p := Cells.Get_B_Ptr (Heap, Cells.Cell (Multiplicand)); R := Cells.Get_C_Ptr (Heap, Cells.Cell (Multiplicand)); q := Cells.Get_B_Ptr (Heap, Cells.Cell (Multiplier)); S := Cells.Get_C_Ptr (Heap, Cells.Cell (Multiplier)); Cells.Create_Cell (Heap, ProductCell); -- form p /\ q!R ; -- if q is just True if IsTrue (Heap, Multiplier) then -- new predicate is p; Cells.Set_B_Ptr (Heap, ProductCell, p); else -- if p is just True if IsTrue (Heap, Multiplicand) then -- new predicate is q!R; -- if R is null, then... if IsUnitAction (Heap, Multiplicand) then -- new predicate Product_Cell is q; Cells.Set_B_Ptr (Heap, ProductCell, q); else -- p is True, q is not null, so new -- predicate Product_Cell is q!R; Structures.CopyStructure (Heap, R, CopyOfR); CombinePredicateWithAction (Heap => Heap, Action_R => CopyOfR, Predicate_q => q, Result => Transformed_q); Cells.Set_B_Ptr (Heap, ProductCell, Transformed_q); end if; else -- form q!R and perform its conjunction with p; -- if R is null... if IsUnitAction (Heap, Multiplicand) then -- ...then q!R is q Transformed_q := q; else -- form q!R and store in Transformed_q; Structures.CopyStructure (Heap, R, CopyOfR); CombinePredicateWithAction (Heap => Heap, Action_R => CopyOfR, Predicate_q => q, Result => Transformed_q); end if; -- Product_Cell := p and Transformed_q FormConjunction (Heap, p, Transformed_q, Conjunction); Cells.Set_B_Ptr (Heap, ProductCell, Conjunction); end if; end if; -- Product_Cell's B_Ptr field now contains the result of forming -- p and q!R for all cases. -- Now form R.S -- If either R or S are null, then it's easy if IsUnitAction (Heap, Multiplicand) then Cells.Set_C_Ptr (Heap, ProductCell, S); elsif IsUnitAction (Heap, Multiplier) then Cells.Set_C_Ptr (Heap, ProductCell, R); else -- R and S both non-null, so -- construct and store R.S composition; ComposeActions (Heap => Heap, Action_R => R, Action_S => S, Result => RS_Composition); Cells.Set_C_Ptr (Heap, ProductCell, RS_Composition); end if; Cells.Dispose_Of_Cell (Heap, Cells.Cell (Multiplicand)); Cells.Dispose_Of_Cell (Heap, Cells.Cell (Multiplier)); Product := Pair (ProductCell); end MultiplyPairs; end Pairs; spark-2012.0.deb/examiner/completecheck.adb0000644000175000017500000002114511753202335017531 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Maths; use type Maths.Value; use type Maths.ErrorCode; package body CompleteCheck is type TypTriStateRangeStatus is (NoneSet, SomeSet, AllSet); ------------------------------------------------------------------------------ -- set all the elements in the specified range to true procedure SetRange (Data : in out T; RangeFrom : in Integer; RangeTo : in Integer) --# derives Data from *, --# RangeFrom, --# RangeTo; --# pre (RangeFrom - Data.LowerBound >= 0) --# and (RangeTo - Data.LowerBound < ExaminerConstants.CompleteCheckSize); is begin for I in Integer range (RangeFrom - Data.LowerBound) .. (RangeTo - Data.LowerBound) loop --# assert (I in (RangeFrom - Data.LowerBound) .. --# (RangeTo - Data.LowerBound)) --# and (Data.LowerBound = Data~.LowerBound); Data.Elements (I) := True; end loop; end SetRange; ------------------------------------------------------------------------------ -- check whether none, some or all the elements in the specified range are true function CheckRange (Data : T; RangeFrom : Integer; RangeTo : Integer) return TypTriStateRangeStatus --# pre (RangeFrom - Data.LowerBound >= 0) and --# (RangeTo - Data.LowerBound < ExaminerConstants.CompleteCheckSize); is FalseSeen : Boolean := False; TrueSeen : Boolean := False; Result : TypTriStateRangeStatus; begin for I in Integer range (RangeFrom - Data.LowerBound) .. (RangeTo - Data.LowerBound) loop --# assert I in (RangeFrom - Data.LowerBound) .. --# (RangeTo - Data.LowerBound); if Data.Elements (I) then TrueSeen := True; else FalseSeen := True; end if; end loop; if FalseSeen and not TrueSeen then Result := NoneSet; elsif TrueSeen and not FalseSeen then Result := AllSet; else Result := SomeSet; end if; return Result; end CheckRange; ------------------------------------------------------------------------------ procedure Init (Data : out T; RangeFrom : in Integer; RangeTo : in Integer; RangeState : out TypRangeState) is ActualUpperBound : Integer; -- vars that follow are so we can use Maths package to determine size of range MathsTo, MathsFrom, MathsLimit, MathsCalc1, MathsCalc2, MathsCalc3 : Maths.Value; CalcErr1, CalcErr2, CalcErr3 : Maths.ErrorCode; MathsError : Boolean := False; begin -- Check whether the entire type fits within the internal data type -- do calculation using Maths package to prevent overflows MathsTo := Maths.IntegerToValue (RangeTo); MathsFrom := Maths.IntegerToValue (RangeFrom); MathsLimit := Maths.IntegerToValue (ExaminerConstants.CompleteCheckSize); Maths.Subtract (MathsTo, MathsFrom, -- to get MathsCalc1, CalcErr1); Maths.Add (MathsCalc1, Maths.OneInteger, -- to get MathsCalc2, CalcErr2); Maths.LesserOrEqual (MathsCalc2, MathsLimit, --to get MathsCalc3, CalcErr3); if (CalcErr1 = Maths.NoError) and then (CalcErr2 = Maths.NoError) and then (CalcErr3 = Maths.NoError) then if MathsCalc3 = Maths.TrueValue then -- RangeTo - RangeFrom + 1 <= ExaminerConstants.CompleteCheckSize ActualUpperBound := RangeTo; RangeState := RangeDoesFit; else --range to big or some error in calculating range ActualUpperBound := (RangeFrom + ExaminerConstants.CompleteCheckSize) - 1; RangeState := RangeTooBig; end if; else ActualUpperBound := (RangeFrom + ExaminerConstants.CompleteCheckSize) - 1; RangeState := RangeTooBig; MathsError := True; end if; Data := T' (LowerBound => RangeFrom, ActualUpperBound => ActualUpperBound, OthersClause => NotSeen, Elements => ElementArray'(others => False), Undeterminable => MathsError); end Init; ------------------------------------------------------------------------------ procedure SeenElement (Data : in out T; ElementNum : in Integer; OutOfRangeSeen : out Boolean; OverlapState : out TypOverlapState) is begin -- check whether the number specified is within the range of the type if Data.LowerBound <= ElementNum and ElementNum <= Data.ActualUpperBound then -- if in range then check in array for whether the element has been -- seen before if Data.Elements (ElementNum - Data.LowerBound) then OverlapState := Overlap; else OverlapState := NoOverlap; end if; Data.Elements (ElementNum - Data.LowerBound) := True; OutOfRangeSeen := False; else -- if out of range then element has not been seen before OverlapState := NoOverlap; OutOfRangeSeen := True; end if; end SeenElement; ------------------------------------------------------------------------------ procedure SeenRange (Data : in out T; RangeFrom : in Integer; RangeTo : in Integer; OutOfRangeSeen : out Boolean; OverlapState : out TypOverlapState) is UpperLimitOfCheck : Integer; LowerLimitOfCheck : Integer; RangeStatus : TypTriStateRangeStatus; begin -- if range specified is entirely outside range of type then -- return NoOverlap if RangeFrom > Data.ActualUpperBound or RangeTo < Data.LowerBound then OverlapState := NoOverlap; OutOfRangeSeen := True; else -- otherwise process the range: which may be partially outside the -- range of the type -- initially assume in range OutOfRangeSeen := False; if RangeTo > Data.ActualUpperBound then UpperLimitOfCheck := Data.ActualUpperBound; OutOfRangeSeen := True; else UpperLimitOfCheck := RangeTo; end if; if RangeFrom < Data.LowerBound then LowerLimitOfCheck := Data.LowerBound; OutOfRangeSeen := True; else LowerLimitOfCheck := RangeFrom; end if; RangeStatus := CheckRange (Data, LowerLimitOfCheck, UpperLimitOfCheck); if RangeStatus = NoneSet then OverlapState := NoOverlap; else OverlapState := Overlap; end if; SetRange (Data, LowerLimitOfCheck, UpperLimitOfCheck); end if; end SeenRange; ------------------------------------------------------------------------------ procedure SeenOthers (Data : in out T) is begin Data.OthersClause := Seen; end SeenOthers; ------------------------------------------------------------------------------ function IsComplete (Data : T) return TypCompleteState is Result : TypCompleteState; begin if Data.OthersClause = Seen then Result := Complete; else if CheckRange (Data, Data.LowerBound, Data.ActualUpperBound) = AllSet then Result := Complete; else Result := Incomplete; end if; end if; return Result; end IsComplete; end CompleteCheck; spark-2012.0.deb/examiner/stree.ads0000644000175000017500000005741311753202336016076 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- STree -- -- Description: -- The package STree implements an ASM which maitains a syntax -- tree derived from the analysed source text. The structure of the -- tree strictly follows the LLR1 grammar described in spark.lla. -- The syntax tree is logically a multi-child tree but is physically -- a binary tree using a "left-son-right-sibling" representation. -- That is the left branch of a node, N, represents the leftmost child, L, -- of N and the right branch of L is a list of the siblings of L (i.e., the -- remaining children of N). -------------------------------------------------------------------------------- with Dictionary; with ExaminerConstants; with LexTokenManager; with SP_Symbols; use type Dictionary.Symbol; use type LexTokenManager.Str_Comp_Result; use type LexTokenManager.Token_Position; use type SP_Symbols.SP_Symbol; --# inherit Dictionary, --# ExaminerConstants, --# LexTokenManager, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# SystemErrors; package STree --# own Table : TableStructure; --# initializes Table; is type SyntaxNode is private; NullNode : constant SyntaxNode; procedure RetrieveCurrentRoot (Root : out SyntaxNode); --# global in out Table; --# derives Root, --# Table from Table; function NodeToRef (Node : SyntaxNode) return ExaminerConstants.RefType; function RefToNode (Ref : ExaminerConstants.RefType) return SyntaxNode; procedure NewProduction (Production : in SP_Symbols.SP_Non_Terminal; Node : out SyntaxNode); --# global in out Table; --# derives Node from Table & --# Table from *, --# Production; procedure NewTerminal (Terminal : in SP_Symbols.SP_Terminal; TerminalVal : in LexTokenManager.Lex_Value; Node : out SyntaxNode); --# global in out Table; --# derives Node from Table & --# Table from *, --# Terminal, --# TerminalVal; procedure AddDerivative (Child_Node : in SyntaxNode); --# global in out Table; --# derives Table from *, --# Child_Node; procedure AddChildNode (Node : in SyntaxNode; ChildNode : in SyntaxNode; LinkToParent : in Boolean); --# global in out Table; --# derives Table from *, --# ChildNode, --# LinkToParent, --# Node; ----------------------------------------------------------------------------- -- Child_Node -- -- Description: -- Child_Node gets the first (left) child of a node (if one exists). The -- remaining children are obtained using calls to -- Next_Sibling initially applied to the first node returned by -- Child_Node and then applied to the successive nodes returned by -- the calls to Next_Sibling. -- Child_Node returns a NulNode if there is no child nodes. ----------------------------------------------------------------------------- function Child_Node (Current_Node : SyntaxNode) return SyntaxNode; --# global in Table; ----------------------------------------------------------------------------- -- Next_Sibling -- -- Description: -- Next_Sibling gets sibling nodes by applying each -- call to the preceding sibling. If there are no further siblings -- Next_Sibling returns a NullNode. ----------------------------------------------------------------------------- function Next_Sibling (Current_Node : SyntaxNode) return SyntaxNode; --# global in Table; -- pre Current_Node /= NullNode; function Parent_Node (Current_Node : SyntaxNode) return SyntaxNode; --# global in Table; -- pre Current_Node /= NullNode; ----------------------------------------------------------------------------- -- Syntax_Node_Type -- -- Description: -- Returns the grammar symbol associated with the Node from the grammar -- symbols defined in SP_Symbols.SP_Symbol. ----------------------------------------------------------------------------- function Syntax_Node_Type (Node : SyntaxNode) return SP_Symbols.SP_Symbol; --# global in Table; function Node_Position (Node : SyntaxNode) return LexTokenManager.Token_Position; --# global in Table; procedure Set_Node_Lex_String (Sym : in Dictionary.Symbol; Node : in SyntaxNode); --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Table; --# derives Table from *, --# Dictionary.Dict, --# Node, --# Sym & --# null from LexTokenManager.State; --# post Table = Table~; -- pre Node /= NullNode and Syntax_Node_Type (Node, Table) in SP_Symbols.SP_Terminal; function Node_Lex_String (Node : SyntaxNode) return LexTokenManager.Lex_String; --# global in Table; -- pre Node /= NullNode and Syntax_Node_Type (Node, Table) in SP_Symbols.SP_Terminal; function Node_Token_String (Node : SyntaxNode) return LexTokenManager.Lex_String; --# global in Table; --# pre Syntax_Node_Type (Node, Table) = SP_Symbols.identifier; ---------------------------------------------------------------------- -- DeleteSyntaxTree -- -- Description: -- Returns the tree rooted at Root to the Free List, _except_ subtrees -- rooted at the following node types, which are needed by the VCG: -- procedure_constraint -- function_constraint -- generic_actual_part -- Subtrees rooted at constant_declaration nodes are not deleted iff -- KeepConstants is True ---------------------------------------------------------------------- procedure DeleteSyntaxTree (Root : in SyntaxNode; KeepConstants : in Boolean); --# global in out Table; --# derives Table from *, --# KeepConstants, --# Root; -- Function returns the left most leaf node of the tree. function Last_Child_Of (Start_Node : SyntaxNode) return SyntaxNode; --# global in Table; -- pre Start_Node /= NullNode; function Last_Sibling_Of (Start_Node : SyntaxNode) return SyntaxNode; --# global in Table; -- pre Start_Node /= NullNode; -- procedure to poke symbol into tree for retrieval by VCG procedure Add_Node_Symbol (Node : in SyntaxNode; Sym : in Dictionary.Symbol); --# global in Dictionary.Dict; --# in out Table; --# derives Table from *, --# Node, --# Sym & --# null from Dictionary.Dict; --# pre ((Syntax_Node_Type (Node, Table) = SP_Symbols.attribute_designator or --# Syntax_Node_Type (Node, Table) = SP_Symbols.annotation_attribute_designator or --# Syntax_Node_Type (Node, Table) = SP_Symbols.positional_argument_association or --# Syntax_Node_Type (Node, Table) = SP_Symbols.annotation_positional_argument_association) -> --# (Dictionary.Is_Null_Symbol (Sym) or --# Dictionary.IsTypeMark (Sym, Dictionary.Dict) or --# Dictionary.IsParameterConstraint (Sym, Dictionary.Dict))) and --# ((Syntax_Node_Type (Node, Table) = SP_Symbols.inside or Syntax_Node_Type (Node, Table) = SP_Symbols.outside) -> --# (Dictionary.Is_Null_Symbol (Sym) or --# Dictionary.IsTypeMark (Sym, Dictionary.Dict) or --# Dictionary.IsEnumerationLiteral (Sym, Dictionary.Dict))) and --# ((Syntax_Node_Type (Node, Table) = SP_Symbols.type_mark) -> --# (Dictionary.Is_Null_Symbol (Sym) or --# Dictionary.IsTypeMark (Sym, Dictionary.Dict) or --# Dictionary.IsArrayIndex (Sym, Dictionary.Dict))) and --# ((Syntax_Node_Type (Node, Table) = SP_Symbols.expression) -> --# (Dictionary.Is_Null_Symbol (Sym) or --# Dictionary.IsTypeMark (Sym, Dictionary.Dict) or --# Dictionary.IsArrayIndex (Sym, Dictionary.Dict) or --# Dictionary.IsConstant (Sym, Dictionary.Dict))) and --# ((Syntax_Node_Type (Node, Table) = SP_Symbols.procedure_call_statement) -> --# (Dictionary.Is_Null_Symbol (Sym) or --# Dictionary.IsProcedure (Sym, Dictionary.Dict) or --# Dictionary.IsOwnVariable (Sym, Dictionary.Dict))) and --# ((Syntax_Node_Type (Node, Table) = SP_Symbols.selected_component or --# Syntax_Node_Type (Node, Table) = SP_Symbols.annotation_selected_component or --# Syntax_Node_Type (Node, Table) = SP_Symbols.identifier) -> --# Syntax_Node_Type (Node, Table) /= SP_Symbols.identifier -> --# Dictionary.IsFunction (Sym, Dictionary.Dict)) and --# ((Syntax_Node_Type (Node, Table) = SP_Symbols.primary) -> --# (Dictionary.Is_Null_Symbol (Sym) or --# Dictionary.IsTypeMark (Sym, Dictionary.Dict) or --# Dictionary.IsFunction (Sym, Dictionary.Dict) or --# Dictionary.IsOwnVariableOrConstituentWithMode (Sym, Dictionary.Dict))) and --# (Syntax_Node_Type (Node, Table) = SP_Symbols.identifier -> --# (Dictionary.IsEnumerationLiteral (Sym, Dictionary.Dict) or --# Dictionary.IsConstant (Sym, Dictionary.Dict) or --# Dictionary.IsRulePolicy (Sym, Dictionary.Dict) or --# Dictionary.IsVariable (Sym, Dictionary.Dict) or --# Dictionary.IsQuantifiedVariable (Sym, Dictionary.Dict) or --# Dictionary.IsTypeMark (Sym, Dictionary.Dict) or --# Dictionary.IsSubprogram (Sym, Dictionary.Dict) or --# Dictionary.IsFunction (Sym, Dictionary.Dict) or --# Dictionary.IsImplicitReturnVariable (Sym, Dictionary.Dict))) and --# (Syntax_Node_Type (Node, Table) = SP_Symbols.operator_symbol -> --# Dictionary.IsOperator (Sym, Dictionary.Dict)) and --# ((Syntax_Node_Type (Node, Table) = SP_Symbols.global_variable or --# Syntax_Node_Type (Node, Table) = SP_Symbols.null_import_list) -> --# Dictionary.IsGlobalVariable (Sym, Dictionary.Dict)) and --# ((Syntax_Node_Type (Node, Table) /= SP_Symbols.attribute_designator and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.annotation_attribute_designator and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.positional_argument_association and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.annotation_positional_argument_association and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.inside and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.outside and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.type_mark and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.expression and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.procedure_call_statement and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.selected_component and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.annotation_selected_component and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.primary and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.operator_symbol and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.global_variable and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.null_import_list and --# Syntax_Node_Type (Node, Table) /= SP_Symbols.identifier) -> --# (Dictionary.Is_Null_Symbol (Sym) or Dictionary.IsTypeMark (Sym, Dictionary.Dict))); --# post Table = Table~; function NodeSymbol (Node : SyntaxNode) return Dictionary.Symbol; --# global in Table; procedure ReportUsage; --# global in Table; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# Table; ------------------------------------------------------------------------ -- Iterators -- -- Description: -- An Iterator is an ADT which embodies the information traverse -- successive nodes of the syntax tree based on given criteria: -- Search Kind - Undefined, Node, Branch, or Formal Parameter search. -- Node Kind - the sort of node to be located on each -- iteration (as defined by SP_Symbols.SP_Symbol). -- Direction - Whether the traversal is up through its predecessors -- or down the tree through its children and siblings. ------------------------------------------------------------------------ type Iterator is private; -- -- This iterator can be used to traverse the syntax tree. NullIterator : constant Iterator; type TraverseDirection is (Up, Down); -- -- Up: Looks for relevant nodes in the parent chain above (and including) -- the specified root. -- -- Down: Looks for relevant nodes in a pre-order (or depth first) fashion -- below (and including) the root. ----------------------------------------------------------------------------- -- GetNode -- -- Description: -- Gets the currently located node from an Iterator. -- If there is no located node (IsNull is True), then a NullNode -- is returned. ----------------------------------------------------------------------------- function Get_Node (It : Iterator) return SyntaxNode; --# return S => ((It = NullIterator) -> (S = NullNode)); ----------------------------------------------------------------------------- -- IsNull -- -- Description: -- Returns True if the Iterator, It, does not contain a located node, -- otherwise returns false. ----------------------------------------------------------------------------- function IsNull (It : Iterator) return Boolean; --# return It = NullIterator; ----------------------------------------------------------------------------- -- Find_First_Node -- -- Description: -- Intialises the returned Iterator with the following criteria: -- Search Kind = Node Search, -- Node Kind = NodeKind, and -- Direction = InDirection. -- The details of the first node found starting from, and including, the -- FromNode and meeting the criteria are recorded. -- If the Direction is Down the node is located using a pre-order search -- of the tree. If the Direction is Up the tree is traversed through the -- ancestors of the FromNode. -- If no node of NodeKind is found then a call to IsNull will return True, -- otherwise a call to GetNode (with no intervining -- calls to NextNode) returns the located node. -- Once the iterator is initialised it will retain the same criteria in -- successive calls of NextNode. -- NOTE: once a relevant node is found successive calls to NextNode -- will traverse its ancestors (up) or its siblings (down) it will -- not traverse the children of the located node. ----------------------------------------------------------------------------- function Find_First_Node (Node_Kind : SP_Symbols.SP_Symbol; From_Root : SyntaxNode; In_Direction : TraverseDirection) return Iterator; --# global in Table; --# return It => ((Syntax_Node_Type (Get_Node (It), Table) = Node_Kind) or IsNull (It)); ----------------------------------------------------------------------------- -- Find_First_Branch_Node -- -- Description: -- Sets up an iterator to find nodes which have more than one child. -- Intialises the returned Iterator with the following criteria: -- Search Kind = Barnch Search, -- Node Kind = n/a, and -- Direction = InDirection. -- The details of the first node starting from FromNode and meeting -- the criteria are recorded. -- If no branch nodes are found then a call IsNull will return True, -- otherwise a call to GetNode (with no intervining -- calls to NextNode) returns the located node. -- Once the iterator is initialised it will retain the same criteria in -- successive calls of NextNode. -- NOTE: once a relevant node is found successive calls to NextNode -- will traverse its ancestors (up) or its siblings (down) it will -- not traverse the children of the located node. ----------------------------------------------------------------------------- function Find_First_Branch_Node (From_Root : SyntaxNode; In_Direction : TraverseDirection) return Iterator; --# global in Table; ----------------------------------------------------------------------------- -- Find_First_Formal_Parameter_Node -- -- Description: -- This function is used when processing subprogram calls with named -- parameter association. -- Intialises the returned Iterator with the following criteria: -- Search Kind = Formal Parameter Search, -- Node Kind = n/a, and -- Direction = Down. -- IMPORTANT: the FromRoot parameter must have a node kind of -- SP_Symbols.named_argument_association -- otherwise an Examiner Fatal Error will occur. -- The details of the first node starting from FromNode and meeting -- the criteria are recorded. -- If no Formal Parameter Nodes are found then a call to IsNull will -- return True, otherwise a call to GetNode (with no intervining -- calls to NextNode) returns the located node. -- Once the iterator is initialised it will retain the same criteria in -- successive calls of NextNode. -- Once a formal parameter node of a subprogram is found, successive -- calls to NextNode will return each of the formal parameters of the -- subprogram. ----------------------------------------------------------------------------- function Find_First_Formal_Parameter_Node (From_Root : SyntaxNode) return Iterator; --# global in Table; --# pre Syntax_Node_Type (From_Root, Table) = SP_Symbols.named_argument_association; --# return It => (Syntax_Node_Type (Get_Node (It), Table) = SP_Symbols.identifier); ----------------------------------------------------------------------------- -- FindLastActualParameterNode -- -- Description: -- This function is used when processing subprogram calls with named -- parameter association. -- Intialises the returned Iterator with the following criteria: -- Search Kind = Node Search, -- Node Kind = SP_Symbols.simple_name, and -- Direction = Up. -- IMPORTANT: the FromRoot parameter must have a node kind of -- SP_Symbols.named_argument_association -- otherwise an Examiner Fatal Error will occur. -- The details of the first node starting from FromNode and meeting -- the criteria are recorded. -- If no Actual Parameter Nodes are found then a call to IsNull will -- return True, otherwise a call to GetNode (with no intervining -- calls to NextNode) returns the located node. -- Once the iterator is initialised it will retain the same criteria in -- successive calls of NextNode. -- NOTE: it does not appear that preceding actual parameters will be -- found by successive calls to NextNode. ----------------------------------------------------------------------------- function FindLastActualParameterNode (FromRoot : SyntaxNode) return SyntaxNode; --# global in Table; -- pre Syntax_Node_Type (FromRoot, Table) = SP_Symbols.named_argument_association; ----------------------------------------------------------------------------- -- NextNode -- -- Description: -- Traverses the syntax tree via successive iterative calls. -- The traversal depends on the traversal criteria set by -- Find_First_Node, Find_First_Branch_Node, Find_First_Formal_Parameter_Node, -- or FindLastActualParameterNode. -- See the above descriptions of these subprograms for details. ----------------------------------------------------------------------------- function NextNode (It : Iterator) return Iterator; --# global in Table; --# return Next_It => ((Syntax_Node_Type (Get_Node (It), Table) = Syntax_Node_Type (Get_Node (Next_It), Table)) or --# IsNull (Next_It)); ------------------------------------------------------------------------ -- Larger-scale accessor functions to aid SyntaxTree navigation ------------------------------------------------------------------------ function FindLastItemInDependencyRelation (Node : SyntaxNode) return LexTokenManager.Token_Position; --# global in Table; --# pre Syntax_Node_Type (Node, Table) = SP_Symbols.dependency_relation; -- return "The right-most, bottom-most item in the relation" -- Useful for reporting items missing from a derives annotation -- type Traversal is (Up, Down, Across); function Expression_From_Positional_Argument_Association (Node : SyntaxNode) return SyntaxNode; --# global in Table; --# pre Syntax_Node_Type (Node, Table) = SP_Symbols.positional_argument_association; --# return Result => Syntax_Node_Type (Result, Table) = SP_Symbols.expression; function Expression_From_Named_Argument_Association (Node : SyntaxNode) return SyntaxNode; --# global in Table; --# pre Syntax_Node_Type (Node, Table) = SP_Symbols.named_argument_association; --# return Result => Syntax_Node_Type (Result, Table) = SP_Symbols.expression; function LoopParameterSpecFromEndOfLoop (Node : SyntaxNode) return SyntaxNode; --# global in Table; --# return Result => (Result = NullNode or --# Syntax_Node_Type (Result, Table) = SP_Symbols.loop_parameter_specification or --# Syntax_Node_Type (Result, Table) = SP_Symbols.condition); -- pre Syntax_Node_Type (Node, Table) = SP_Symbols.end_of_loop; function IdentifierHasTildeSuffix (Node : SyntaxNode) return Boolean; --# global in Table; -- pre Syntax_Node_Type (Node, Table) = SP_Symbols.identifier; function IdentifierHasPercentSuffix (Node : SyntaxNode) return Boolean; --# global in Table; -- pre Syntax_Node_Type (Node, Table) = SP_Symbols.identifier; private type SyntaxNode is range 0 .. ExaminerConstants.SyntaxTreeSize; --# assert SyntaxNode'Base is Integer; -- for the "Large" Examiner NullNode : constant SyntaxNode := 0; type SearchKind is (Undefined, NodeTypeSearch, BranchSearch, FormalParameterSearch); type Iterator is record TheSearchKind : SearchKind; SearchNodeType : SP_Symbols.SP_Symbol; SearchDirection : TraverseDirection; Current : SyntaxNode; Root : SyntaxNode; end record; NullIterator : constant Iterator := Iterator' (TheSearchKind => Undefined, SearchNodeType => SP_Symbols.SP_Symbol'First, SearchDirection => TraverseDirection'First, Current => NullNode, Root => NullNode); end STree; spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_proc_call.adb0000644000175000017500000044506711753202336025055 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- This procedure checks the following : -- -- 1. The parameter associations defined by a procedure call form a bijective -- function from formal to actual parameters. -- 2. Every actual parameter corresponding to an exported formal parameter is -- an entire variable (Section 6.4 Rule 1). -- 3. Every actual parameter of mode in out or out is the name of a variable. -- (LRM 6.4.1). -- 4. Every actual parameter of mode in out or out is not -- - a formal parameter of mode in (LRM 6.2), or -- - a loop parameter (LRM 5.5), or -- 5. If a variable V named in the global definition of a procedure P -- is exported, then neither V nor any of its subcomponents can occur -- as an actual parameter of P (LRM 6.4 Rule 1), or -- 6. If a variable V occurs in the global definition of a procedure -- P, then neither V nor any of its subcomponents can occur as an -- actual parameter of P where the corresponding formal parameter is -- an exported variable (LRM 6.4 Rule 2), or -- 7. If an entire variable V or a subcomponent of V occurs as an -- actual parameter in a procedure call statement, and the -- corresponding formal parameter is an exported variable, then -- neither V or an overlapping subcomponent of V can occur as another -- actual parameter in that statement. Two components are considered -- to be overlapping if they are elements of the same array or are -- the same component of a record (for example V.F and V.F) including -- subcomponents of the component (for example V.F and V.F.P). Note -- array elements are always considered to be overlapping and so, for -- example, V.A(I).P and V.A(J).Q are considered as overlapping -- (LRM 6.4 Rule 3). -- 8. The types of Formal and Actual parameters are compatible. -- 9. The procedure is callable at the point of call. --10. Globals imported or exported by a procedure are visible at the point of -- call. --11. Procedure calls resulting in direct update of non-enclosing package -- own variable generate a warning. --12. After substitution of actuals-for-formals, the resulting information -- flow relation is checked against the selected information flow policy. -------------------------------------------------------------------------------- separate (Sem.CompUnit.WalkStatements) procedure Wf_Proc_Call (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) is Proc_Sym : Dictionary.Symbol; Arg_List_Node, Name_Node : STree.SyntaxNode; Dotted, Valid : Boolean; Actual_Parameter_List, Formal_Position_List : SimpleLists.SimpleList; Abstraction : Dictionary.Abstractions := Dictionary.IsAbstract; -- ineffective, but removes spurious flowerrs subtype Param_Index is Integer range 1 .. ExaminerConstants.MaxProcCallParams; -- A Param_Info stores the sequence of items referenced in an actual parameter. For an IN -- parameter it will be just the variables for that appear in the general expression -- that makes up the actual. For OUT and IN OUT cases, we normally would expect a singleton -- element in the sequence because the actual must be a variable (note in this context that a -- record field is still a single variable because each field has its own symbol). For all the -- above cases, the Seq field contains the only useful data and the Entire_Array_Sym is null. -- The special case occurs whn the actual parameter is an array element such as A(I). Here the -- Seq field will contain both A and I (in a non-deterministic order); however, we need to know that -- the parameter is an array element so we can correct the flow analysis in BuilIORelation. In this -- case, the Entire_Array_Sym will be set to A. type Param_Info is record Seq : Natural; Entire_Array_Sym : Dictionary.Symbol; end record; type Param_Tables is array (Param_Index) of Param_Info; Param_Table : Param_Tables; -- The following variable gets set to the type that an actual parameter -- must be in a call to an inherited operation Tagged_Parameter_Sym : Dictionary.Symbol; -- and this is the type of the formal of the inherited operation Root_Parameter_Sym : Dictionary.Symbol; -- we need global access to the Prefix_Symbol (which gets set by Do_Dotted_Name) -- so that we can deal with "self substitutions" in PO calls. Prefix_Symbol : Dictionary.Symbol; --------------------------------------------------------------- function Tagged_Substitution_Required (Formal, Actual_Type, Root_Parameter_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Formal), Right_Symbol => Root_Parameter_Sym, Full_Range_Subtype => False) and then Dictionary.IsAnExtensionOf (Root_Parameter_Sym, Actual_Type); end Tagged_Substitution_Required; --------------------------------------------------------------- procedure Add_Seq (Param_No : in Positive; Refs : in SeqAlgebra.Seq; Entire_Array_Sym : in Dictionary.Symbol; Param_Table : in out Param_Tables) -- see comment on type Param_Info for explanation of parameter Entire_Array_Sym --# derives Param_Table from *, --# Entire_Array_Sym, --# Param_No, --# Refs; is begin if Param_No > Param_Index'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Params_In_Procedure_Call, Msg => ""); end if; Param_Table (Param_No).Seq := SeqAlgebra.SeqToNatural (Refs); Param_Table (Param_No).Entire_Array_Sym := Entire_Array_Sym; end Add_Seq; --------------------------------------------------------------- procedure Build_IO_Relation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Root_Parameter_Sym, Prefix_Symbol, Proc_Sym : in Dictionary.Symbol; Param_Table : in Param_Tables; Abstraction : in Dictionary.Abstractions; Component_Data : in out ComponentManager.ComponentData; Table : in out RefList.HashTable) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# Table, --# TheHeap from *, --# Abstraction, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Node, --# Param_Table, --# Prefix_Symbol, --# Proc_Sym, --# Root_Parameter_Sym, --# Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Param_Table, --# Prefix_Symbol, --# Proc_Sym, --# Root_Parameter_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement; is Export_It, Import_It : Dictionary.Iterator; Import_Seq : SeqAlgebra.Seq; Export_Sym, Import_Sym : Dictionary.Symbol; Param_No : Positive; Actual_Sym : Dictionary.Symbol; --------------------------------------------------------------- procedure Check_Relation_Info_Flow_Policy (Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; The_Export : in Dictionary.Symbol; The_Imports : in SeqAlgebra.Seq; The_Heap : in Heap.HeapRecord) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# The_Export, --# The_Heap, --# The_Imports; is Current_Member : SeqAlgebra.MemberOfSeq; The_Import_Sym : Dictionary.Symbol; begin Current_Member := SeqAlgebra.FirstMember (The_Heap, The_Imports); while not SeqAlgebra.IsNullMember (Current_Member) loop The_Import_Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Current_Member))); if Dictionary.RelationViolatesInfoFlowPolicy (The_Export, The_Import_Sym) then ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Policy_Violation, Position => Node_Pos, Import_Var_Sym => The_Import_Sym, Export_Var_Sym => The_Export, Scope => Scope); end if; Current_Member := SeqAlgebra.NextMember (The_Heap, Current_Member); end loop; end Check_Relation_Info_Flow_Policy; begin -- Build_IO_Relation Export_It := Dictionary.FirstExport (Abstraction, Proc_Sym); if Dictionary.IsNullIterator (Export_It) then RefList.AddNullRelation (Table, TheHeap, Node); else while not Dictionary.IsNullIterator (Export_It) loop --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement; Export_Sym := Dictionary.CurrentSymbol (Export_It); SeqAlgebra.CreateSeq (TheHeap, Import_Seq); Import_It := Dictionary.FirstDependency (Abstraction, Proc_Sym, Export_Sym); while not Dictionary.IsNullIterator (Import_It) loop --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement; Import_Sym := Dictionary.CurrentSymbol (Import_It); if Dictionary.Is_Global_Variable (Abstraction, Proc_Sym, Import_Sym) then SeqAlgebra.AddMember (TheHeap, Import_Seq, Natural (Dictionary.SymbolRef (Substitute_Protected_Type_Self_Reference (Sym => Import_Sym, Prefix_Symbol => Prefix_Symbol)))); else Param_No := Dictionary.GetSubprogramParameterNumber (Import_Sym); if Param_No <= Param_Index'Last and then Param_Table (Param_No).Seq /= 0 then SeqAlgebra.AugmentSeq (TheHeap, Import_Seq, SeqAlgebra.NaturalToSeq (Param_Table (Param_No).Seq)); end if; end if; Import_It := Dictionary.NextSymbol (Import_It); end loop; -- now substitue export if necessary if Dictionary.Is_Global_Variable (Abstraction, Proc_Sym, Export_Sym) then Export_Sym := Substitute_Protected_Type_Self_Reference (Sym => Export_Sym, Prefix_Symbol => Prefix_Symbol); else -- parameter Param_No := Dictionary.GetSubprogramParameterNumber (Export_Sym); if Param_No <= Param_Index'Last and then Param_Table (Param_No).Seq /= 0 and then not SeqAlgebra.IsEmptySeq (TheHeap, SeqAlgebra.NaturalToSeq (Param_Table (Param_No).Seq)) then -- we have an export to process. -- There are two cases: -- (1) It's NOT an array element. In which case we know that the export is a single -- variable symbol and must be the sole member of the set of vars associated with the -- current parameter; or -- (2) It is an array element in which case the set of vars includes both the array and the -- indexing expressions. -- We can distinguish these case because the field Entire_Array_Sym -- associated with the current parameter will be null in case 1 and non null in case 2 . if Dictionary.Is_Null_Symbol (Param_Table (Param_No).Entire_Array_Sym) then -- case 1 -- extract singleton from set Actual_Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => SeqAlgebra.FirstMember (TheHeap, SeqAlgebra.NaturalToSeq (Param_Table (Param_No).Seq))))); else -- case 2 -- use array symbol stored especially for this case Actual_Sym := Param_Table (Param_No).Entire_Array_Sym; -- Add a self reference here because the actual parameter is an array element. -- Necessary because, for example, SetToZero (A (1));, must derives A from A. -- Secondly, as well as adding self, add all things the selected array -- element depends on (e.g. variables controlling the selected element). -- The array and all the things it depends on are in the Param_Table, thus: SeqAlgebra.AugmentSeq (TheHeap, Import_Seq, SeqAlgebra.NaturalToSeq (Param_Table (Param_No).Seq)); end if; if Tagged_Substitution_Required (Formal => Export_Sym, Actual_Type => Dictionary.GetType (Actual_Sym), Root_Parameter_Sym => Root_Parameter_Sym) then -- before we can convert the actual parameter to the appropriate root type -- we must add all its subcomponents to the Dictionary and ComponentManager Add_Record_Sub_Components (Record_Var_Sym => Actual_Sym, Record_Type_Sym => Dictionary.GetType (Actual_Sym), Component_Data => Component_Data, The_Heap => TheHeap); Export_Sym := Convert_Tagged_Actual (Actual => Actual_Sym, Tagged_Parameter_Sym => Root_Parameter_Sym); else Export_Sym := Actual_Sym; end if; end if; end if; RefList.AddRelation (Table, TheHeap, Node, Export_Sym, Import_Seq); Check_Relation_Info_Flow_Policy (Node_Pos => Node_Position (Node => Node), Scope => Scope, The_Export => Export_Sym, The_Imports => Import_Seq, The_Heap => TheHeap); Export_It := Dictionary.NextSymbol (Export_It); end loop; end if; end Build_IO_Relation; ----------------------------------------------------- function Actual_Parameter_At (Position : Positive; Actual_Parameter_List : SimpleLists.SimpleList) return Dictionary.Symbol is Ok : Boolean; Val : Natural; Sym : Dictionary.Symbol; begin SimpleLists.GetItem (Actual_Parameter_List, Position, --to get Val, Ok); if Ok then Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (Val)); else Sym := Dictionary.NullSymbol; end if; return Sym; end Actual_Parameter_At; --------------------------------------------------------------- procedure Check_Procedure_Name (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Root_Parameter_Sym, Prefix_Symbol, Proc_Sym, Tagged_Parameter_Sym : out Dictionary.Symbol; Name_Node : out STree.SyntaxNode; Dotted, Valid : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dotted, --# Name_Node, --# Prefix_Symbol, --# Proc_Sym, --# Root_Parameter_Sym, --# STree.Table, --# Tagged_Parameter_Sym, --# Valid from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement; --# post Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; is Ident_Node, Prefix_Node : STree.SyntaxNode; ---------------------------------------------------------------- procedure Check_Symbol (Node : in STree.SyntaxNode; Sym : in Dictionary.Symbol; Prefix : in LexTokenManager.Lex_String; Name : in LexTokenManager.Lex_String; Prefix_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Root_Parameter_Sym, Proc_Sym, Tagged_Parameter_Sym : out Dictionary.Symbol; Valid : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name, --# Node, --# Prefix, --# Prefix_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Sym & --# Proc_Sym, --# Root_Parameter_Sym, --# Tagged_Parameter_Sym, --# Valid from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Name, --# Prefix_Sym, --# Scope, --# Sym & --# STree.Table from *, --# Dictionary.Dict, --# Node, --# Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier; --# post (Dictionary.Is_Null_Symbol (Proc_Sym) or --# Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; is Proc_Sym_Local : Dictionary.Symbol; Sym2 : Dictionary.Symbol; begin Tagged_Parameter_Sym := Dictionary.NullSymbol; Root_Parameter_Sym := Dictionary.NullSymbol; Proc_Sym_Local := Sym; if Dictionary.Is_Null_Symbol (Proc_Sym_Local) then -- not declared -- If we have failed to find an operation by here then we -- need to look for an inherited one of the same name in -- some root package from which tagged types may have been -- extended. -- -- This search is only made if the prefix is a package. If it is a protected -- object then there is no question of there being any inherited operations to find. -- We only need to search if there is no prefix or the prefix is a package if Dictionary.Is_Null_Symbol (Prefix_Sym) or else Dictionary.IsPackage (Prefix_Sym) then Dictionary.SearchForInheritedOperations (Name => Name, Scope => Scope, Prefix => Prefix_Sym, Context => Dictionary.ProgramContext, OpSym => Proc_Sym_Local, ActualTaggedType => Tagged_Parameter_Sym); end if; if Dictionary.Is_Null_Symbol (Proc_Sym_Local) then -- we still didn't find anything Valid := False; Proc_Sym := Dictionary.NullSymbol; -- if "Prefix.Name" is NOT visible, but "Name" is visible AND it has been renamed, -- then issue the more helpful semantic error 419, otherwise error 143. Sym2 := Dictionary.LookupItem (Name => Name, Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Sym2) and then Dictionary.Is_Renamed (Subprogram => Sym2, Scope => Scope) then ErrorHandler.Semantic_Error2 (Err_Num => 419, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Name, Id_Str2 => Prefix); else ErrorHandler.Semantic_Error2 (Err_Num => 143, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Node_Lex_String (Node => Node), Id_Str2 => Prefix); end if; else -- the inherit search did find something if Dictionary.IsProcedure (Proc_Sym_Local) then Valid := True; Proc_Sym := Proc_Sym_Local; Root_Parameter_Sym := Dictionary.GetSubprogramControllingType (Proc_Sym_Local); else -- found but not a procedure Valid := False; Proc_Sym := Dictionary.NullSymbol; end if; end if; elsif Dictionary.IsProcedure (Proc_Sym_Local) then -- ok if Dictionary.Is_Generic_Subprogram (The_Symbol => Proc_Sym_Local) then Valid := False; Proc_Sym := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 654, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Node)); else Valid := True; STree.Set_Node_Lex_String (Sym => Sym, Node => Node); Proc_Sym := Proc_Sym_Local; Root_Parameter_Sym := Dictionary.GetSubprogramControllingType (Proc_Sym_Local); end if; else -- there but not a procedure Valid := False; Proc_Sym := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 7, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Node)); end if; end Check_Symbol; ---------------------------------------------------------------- procedure Do_Simple_Name (Ident_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Root_Parameter_Sym, Proc_Sym, Tagged_Parameter_Sym : out Dictionary.Symbol; Name_Node : out STree.SyntaxNode; Valid : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Name_Node, --# Proc_Sym, --# Root_Parameter_Sym, --# STree.Table, --# Tagged_Parameter_Sym, --# Valid from CommandLineData.Content, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; is begin Check_Symbol (Node => Ident_Node, Sym => Dictionary.LookupItem (Name => Node_Lex_String (Node => Ident_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False), Prefix => LexTokenManager.Null_String, Name => Node_Lex_String (Node => Ident_Node), Prefix_Sym => Dictionary.NullSymbol, Scope => Scope, Root_Parameter_Sym => Root_Parameter_Sym, Proc_Sym => Proc_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Valid => Valid); Name_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Ident_Node)); -- ASSUME Name_Node = name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Name_Node) = SP_Symbols.name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = name in Do_Simple_Name"); end Do_Simple_Name; ---------------------------------------------------------------- procedure Do_Dotted_Name (Ident_Node : in STree.SyntaxNode; Prefix : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Root_Parameter_Sym, Prefix_Symbol, Proc_Sym, Tagged_Parameter_Sym : out Dictionary.Symbol; Name_Node : out STree.SyntaxNode; Valid : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Prefix, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Name_Node, --# Prefix_Symbol, --# Proc_Sym, --# Root_Parameter_Sym, --# STree.Table, --# Tagged_Parameter_Sym, --# Valid from CommandLineData.Content, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Prefix, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Prefix, STree.Table) = SP_Symbols.prefix; --# post Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; is Sym : Dictionary.Symbol; Ident_Node_Local : STree.SyntaxNode; Ident_Node_Prev : STree.SyntaxNode; Prefix_Local : STree.SyntaxNode; Prefix_OK : Boolean; begin -- on entry, Ident_Node points at the first identifier in the prefix so if the proc -- called is P.C.K then Ident_Node points at P Root_Parameter_Sym := Dictionary.NullSymbol; -- in case path taken misses Check_Symbol Prefix_Symbol := Dictionary.NullSymbol; Proc_Sym := Dictionary.NullSymbol; Tagged_Parameter_Sym := Dictionary.NullSymbol; -- in case path taken misses Check_Symbol Ident_Node_Local := Ident_Node; -- ASSUME Ident_Node_Local = identifier Prefix_Local := Prefix; -- ASSUME Prefix_Local = prefix -- look up start of prefix Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Ident_Node_Local), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); loop -- loop over multiple prefixes --# assert Syntax_Node_Type (Ident_Node_Local, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Prefix_Local, STree.Table) = SP_Symbols.prefix and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Dictionary.Is_Null_Symbol (Sym) then -- not found Valid := False; ErrorHandler.Semantic_Error (Err_Num => 142, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node_Local), Id_Str => Node_Lex_String (Node => Ident_Node_Local)); exit; end if; if not (Dictionary.IsPackage (Sym) or else (Dictionary.IsObject (Sym) and then Dictionary.IsProtectedTypeMark (Dictionary.GetType (Sym)))) then -- not a valid prefix Valid := False; ErrorHandler.Semantic_Error (Err_Num => 9, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node_Local), Id_Str => Node_Lex_String (Node => Ident_Node_Local)); exit; end if; Check_Package_Prefix (Node_Pos => Node_Position (Node => Ident_Node_Local), Pack_Sym => Sym, Scope => Scope, OK => Prefix_OK); if not Prefix_OK then Valid := False; exit; end if; STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node_Local); Ident_Node_Prev := Ident_Node_Local; -- ASSUME Ident_Node_Prev = identifier -- move on to next identifier in prefix chain Ident_Node_Local := Child_Node (Current_Node => Child_Node (Current_Node => Next_Sibling (Current_Node => Prefix_Local))); -- ASSUME Ident_Node_Local = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node_Local) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node_Local = identifier in Do_Dotted_Name"); Prefix_Local := Parent_Node (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => Prefix_Local))); while Syntax_Node_Type (Node => Prefix_Local) = SP_Symbols.name loop --# assert Syntax_Node_Type (Ident_Node_Local, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Prefix_Local, STree.Table) = SP_Symbols.name and --# Syntax_Node_Type (Ident_Node_Prev, STree.Table) = SP_Symbols.identifier and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; Prefix_Local := Parent_Node (Current_Node => Prefix_Local); end loop; -- ASSUME Prefix_Local = prefix OR procedure_call_statement if Syntax_Node_Type (Node => Prefix_Local) = SP_Symbols.procedure_call_statement then -- ASSUME Prefix_Local = procedure_call_statement -- store package prefix in case we have to search for inherited ops in it Prefix_Symbol := Sym; -- then check procedure name Check_Symbol (Node => Ident_Node_Local, Sym => Dictionary.LookupSelectedItem (Prefix => Sym, Selector => Node_Lex_String (Node => Ident_Node_Local), Scope => Scope, Context => Dictionary.ProgramContext), Prefix => Node_Lex_String (Node => Ident_Node_Prev), Name => Node_Lex_String (Node => Ident_Node_Local), Prefix_Sym => Prefix_Symbol, Scope => Scope, Root_Parameter_Sym => Root_Parameter_Sym, Proc_Sym => Proc_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Valid => Valid); exit; end if; -- elsif Syntax_Node_Type (Node => Prefix_Local) /= SP_Symbols.prefix then SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Prefix_Local) = SP_Symbols.prefix, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Prefix_Local = prefix OR procedure_call_statement in Do_Dotted_Name"); -- end if; -- otherwise we have another prefix to deal with Sym := Dictionary.LookupSelectedItem (Prefix => Sym, Selector => Node_Lex_String (Node => Ident_Node_Local), Scope => Scope, Context => Dictionary.ProgramContext); end loop; Name_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Ident_Node_Local)); -- ASSUME Name_Node = name OR selector if Syntax_Node_Type (Node => Name_Node) = SP_Symbols.selector then Name_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Name_Node)); elsif Syntax_Node_Type (Node => Name_Node) /= SP_Symbols.name then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = name OR selector in Do_Dotted_Name"); end if; -- ASSUME Name_Node = name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Name_Node) = SP_Symbols.name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = name in Do_Dotted_Name"); end Do_Dotted_Name; ---------------------------------------------------------------- function Really_Has_Package_Prefix (Proc_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is Declared_Scope : Dictionary.Scopes; Declared_Region : Dictionary.Symbol; Calling_Region : Dictionary.Symbol; Result : Boolean := False; begin -- Before protected operations, we knew syntactically whether a subprogram had a -- package prefix on it. Now we don't because even a local protected operation call -- is prefixed with the protected object name. We need to distinguish between -- PO.Op which is "not dotted" and P.PO.Op which is. if not Dictionary.Is_Null_Symbol (Proc_Sym) then -- Get Region in which caller is declared Calling_Region := Dictionary.GetEnclosingPackage (Scope); -- get scope where called subprogram is declared Declared_Scope := Dictionary.GetScope (Proc_Sym); Declared_Region := Dictionary.GetRegion (Declared_Scope); if Dictionary.IsProtectedType (Declared_Region) then -- we want the scope where the protected type is declared Declared_Scope := Dictionary.GetScope (Declared_Region); end if; -- function is true if declared region and calling region are different Result := not Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetRegion (Declared_Scope), Right_Symbol => Calling_Region); end if; return Result; end Really_Has_Package_Prefix; begin -- Check_Procedure_Name Ident_Node := Last_Child_Of (Start_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Check_Procedure_Name"); Prefix_Symbol := Dictionary.NullSymbol; Prefix_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => Ident_Node))); -- ASSUME Prefix_Node = name OR prefix OR procedure_call_statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.name or else Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.prefix or else Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.procedure_call_statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Prefix_Node = name OR prefix OR procedure_call_statement in Check_Procedure_Name"); while Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.name loop --# assert Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Prefix_Node, STree.Table) = SP_Symbols.name and --# STree.Table = STree.Table~; Prefix_Node := Parent_Node (Current_Node => Prefix_Node); -- ASSUME Prefix_Node = name OR prefix OR procedure_call_statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.name or else Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.prefix or else Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.procedure_call_statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Prefix_Node = name OR prefix OR procedure_call_statement in Check_Procedure_Name"); end loop; -- ASSUME Prefix_Node = prefix OR procedure_call_statement if Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.prefix then -- ASSUME Prefix_Node = prefix Do_Dotted_Name (Ident_Node => Ident_Node, Prefix => Prefix_Node, Scope => Scope, Root_Parameter_Sym => Root_Parameter_Sym, Prefix_Symbol => Prefix_Symbol, Proc_Sym => Proc_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Name_Node => Name_Node, Valid => Valid); Dotted := Really_Has_Package_Prefix (Proc_Sym => Proc_Sym, Scope => Scope); elsif Syntax_Node_Type (Node => Prefix_Node) = SP_Symbols.procedure_call_statement then -- ASSUME Prefix_Node = procedure_call_statement Do_Simple_Name (Ident_Node => Ident_Node, Scope => Scope, Root_Parameter_Sym => Root_Parameter_Sym, Proc_Sym => Proc_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Name_Node => Name_Node, Valid => Valid); Dotted := False; else Root_Parameter_Sym := Dictionary.NullSymbol; Proc_Sym := Dictionary.NullSymbol; Tagged_Parameter_Sym := Dictionary.NullSymbol; Name_Node := STree.NullNode; Dotted := False; Valid := False; end if; end Check_Procedure_Name; --------------------------------------------------------------- procedure Check_Globals_Are_Visible (Node_Pos : in LexTokenManager.Token_Position; Prefix_Symbol, Proc_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Prefix_Symbol, --# Proc_Sym, --# Scope, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Calling_Sym, Glob_Sym : Dictionary.Symbol; Calling_Abstraction : Dictionary.Abstractions; function Is_Local_Variable (Calling_Sym, Glob_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.GetRegion (Dictionary.GetScope (Glob_Sym)) = Calling_Sym; end Is_Local_Variable; function Is_Own_Var_Of_Embedded_Package (Calling_Sym, Glob_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsOwnVariable (Glob_Sym) and then Dictionary.GetScope (Dictionary.GetOwner (Glob_Sym)) = Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Calling_Sym); end Is_Own_Var_Of_Embedded_Package; -- this procedure can be extended to deal with error case of vars which -- are exported by a called procedure but not exported by calling procedure procedure Check_Illegal_Update (Node_Pos : in LexTokenManager.Token_Position; Calling_Abstraction, Abstraction : in Dictionary.Abstractions; Calling_Sym, Proc_Sym, Glob_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# Calling_Abstraction, --# Calling_Sym, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Glob_Sym, --# LexTokenManager.State, --# Node_Pos, --# Proc_Sym, --# SPARK_IO.File_Sys; is begin if Dictionary.IsFunction (Calling_Sym) and then Dictionary.Is_Global_Variable (Calling_Abstraction, Calling_Sym, Glob_Sym) and then Dictionary.IsExport (Abstraction, Proc_Sym, Glob_Sym) and then not -- if the global is a mode in own variable then the "exporting" is only -- a modelling artefact that we can ignore (Dictionary.GetOwnVariableOrConstituentMode (Glob_Sym) = Dictionary.InMode) then ErrorHandler.Semantic_Error (Err_Num => 328, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Glob_Sym)); end if; end Check_Illegal_Update; begin -- Check_Globals_Are_Visible Calling_Sym := Dictionary.GetEnclosingCompilationUnit (Scope); Calling_Abstraction := Dictionary.GetAbstraction (Calling_Sym, Scope); It := Dictionary.FirstGlobalVariable (Abstraction, Proc_Sym); while not Dictionary.IsNullIterator (It) loop Glob_Sym := Substitute_Protected_Type_Self_Reference (Sym => Dictionary.CurrentSymbol (It), Prefix_Symbol => Prefix_Symbol); if not Dictionary.Is_Global_Variable (Calling_Abstraction, Calling_Sym, Glob_Sym) and then not Dictionary.IsFormalParameter (Calling_Sym, Glob_Sym) and then not Is_Local_Variable (Calling_Sym => Calling_Sym, Glob_Sym => Glob_Sym) and then not Is_Own_Var_Of_Embedded_Package (Calling_Sym => Calling_Sym, Glob_Sym => Glob_Sym) and then not Dictionary.Is_Null_Variable (Glob_Sym) then -- null variable always deemed visible if Dictionary.IsImport (Abstraction, Proc_Sym, Glob_Sym) and then Dictionary.GetOwnVariableOrConstituentMode (Glob_Sym) /= Dictionary.OutMode then ErrorHandler.Semantic_Error_Sym (Err_Num => 25, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => Glob_Sym, Scope => Scope); end if; if Dictionary.IsExport (Abstraction, Proc_Sym, Glob_Sym) and then Dictionary.GetOwnVariableOrConstituentMode (Glob_Sym) /= Dictionary.InMode then ErrorHandler.Semantic_Error_Sym (Err_Num => 24, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => Glob_Sym, Scope => Scope); end if; end if; Check_Illegal_Update (Node_Pos => Node_Pos, Calling_Abstraction => Calling_Abstraction, Abstraction => Abstraction, Calling_Sym => Calling_Sym, Proc_Sym => Proc_Sym, Glob_Sym => Glob_Sym); It := Dictionary.NextSymbol (It); end loop; end Check_Globals_Are_Visible; ------------------------------------------------------------------------- procedure Check_Actual (Root_Parameter_Sym, Proc_Sym, Formal_Sym, Tagged_Parameter_Sym : in Dictionary.Symbol; Formal_Pos : in Positive; Actual_Node : in STree.SyntaxNode; Ref_Var_Param : in SeqAlgebra.Seq; Abstraction : in Dictionary.Abstractions; Scope : in Dictionary.Scopes; Actual_Parameter_List, Formal_Position_List : in out SimpleLists.SimpleList; Component_Data : in out ComponentManager.ComponentData; Entire_Array_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Actual_Parameter_List, --# Aggregate_Stack.State, --# LexTokenManager.State, --# STree.Table from *, --# Abstraction, --# Actual_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Formal_Sym, --# LexTokenManager.State, --# Proc_Sym, --# Ref_Var_Param, --# Scope, --# STree.Table, --# TheHeap & --# Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# TheHeap from *, --# Abstraction, --# Actual_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Formal_Sym, --# LexTokenManager.State, --# Proc_Sym, --# Ref_Var_Param, --# Root_Parameter_Sym, --# Scope, --# STree.Table, --# TheHeap & --# Entire_Array_Sym from Abstraction, --# Actual_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Formal_Sym, --# LexTokenManager.State, --# Proc_Sym, --# Ref_Var_Param, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# Actual_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Formal_Sym, --# LexTokenManager.State, --# Proc_Sym, --# Ref_Var_Param, --# Root_Parameter_Sym, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Tagged_Parameter_Sym, --# TheHeap & --# Formal_Position_List from *, --# Abstraction, --# Actual_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Formal_Pos, --# Formal_Sym, --# LexTokenManager.State, --# Proc_Sym, --# Ref_Var_Param, --# Scope, --# STree.Table, --# TheHeap & --# SLI.State from *, --# Abstraction, --# Actual_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Formal_Sym, --# LexTokenManager.State, --# Proc_Sym, --# Ref_Var_Param, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap; --# pre Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.expression; --# post STree.Table = STree.Table~; is Entire_Var_Sym, Var_Type : Dictionary.Symbol; Mode_Of_Formal : Dictionary.Modes; Exp_Result : Exp_Record; Is_A_Name : Boolean; Actual_Type : Dictionary.Symbol; Actual_Parameter_Is_A_Variable : Boolean := False; Actual_Parameter_Is_A_Constant : Boolean := False; -------------------------------------------------------------------- procedure Check_Valid_Mode (Actual_Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; Actual_Sym : in Dictionary.Symbol; Mode_Of_Formal : in Dictionary.Modes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Actual_Node_Pos, --# Actual_Sym, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Mode_Of_Formal, --# Scope, --# SPARK_IO.File_Sys; -- Actual_Sym represents an actual parameter, whose corresponding formal -- parameter is of mode in out or out. It is the symbol of the associated -- entire variable (if the parameter is an entire variable or a -- subcomponent of a variable). -- -- The procedure checks that every actual parameter of mode in out or out -- is not -- - a formal parameter of mode in, or -- - a loop parameter, -- and that an actual parameter of mode in out is not itself a formal -- parameter of mode out, or a subcomponent thereof. -- Also warns of direct update of non-enclosing package own variable -- is Actual_Mode : Dictionary.Modes; begin if Dictionary.IsLoopParameter (Actual_Sym) then ErrorHandler.Semantic_Error (Err_Num => 168, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Id_Str => Dictionary.GetSimpleName (Actual_Sym)); elsif Dictionary.IsOwnVariable (Actual_Sym) and then not Is_Enclosing_Package (Outer_Pack => Dictionary.GetOwner (Actual_Sym), Scope => Scope) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 169, Position => Actual_Node_Pos, Sym => Actual_Sym, Scope => Scope); end if; if Dictionary.IsSubprogramParameter (Actual_Sym) then Actual_Mode := Dictionary.GetSubprogramParameterMode (Actual_Sym); if Actual_Mode = Dictionary.InMode or else Actual_Mode = Dictionary.DefaultMode then ErrorHandler.Semantic_Error (Err_Num => 170, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Id_Str => Dictionary.GetSimpleName (Actual_Sym)); elsif CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 and then Actual_Mode = Dictionary.OutMode and then Mode_Of_Formal = Dictionary.InOutMode then ErrorHandler.Semantic_Error (Err_Num => 171, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Id_Str => Dictionary.GetSimpleName (Actual_Sym)); end if; end if; end Check_Valid_Mode; ------------------------------------------------------------------- procedure Check_Type (Actual_Type : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Actual_Range_RHS : in Maths.Value; Formal_Sym : in Dictionary.Symbol; Tagged_Parameter_Sym : in Dictionary.Symbol; Actual_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Actual_Node_Pos, --# Actual_Range_RHS, --# Actual_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Formal_Sym, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Tagged_Parameter_Sym; is Expected_Type : Dictionary.Symbol; begin Expected_Type := Dictionary.GetType (Formal_Sym); if not (Dictionary.Types_Are_Equal (Left_Symbol => Tagged_Parameter_Sym, Right_Symbol => Actual_Type, Full_Range_Subtype => False) or else -- inherited call, right type (Dictionary.Is_Null_Symbol (Tagged_Parameter_Sym) and then -- no inheritance, normal rules Dictionary.CompatibleTypes (Scope, Actual_Type, Expected_Type)) or else (not Dictionary.IsAnExtensionOf (Actual_Type, Tagged_Parameter_Sym) and then -- trap use of root type Dictionary.CompatibleTypes (Scope, Actual_Type, Expected_Type))) then -- we have a type mismatch but there are two cases to consider: (1) the failure is in an inherited call -- indicated by Tagged_Parameter_Sym /= NullSymbol or (2) in a normal proc call. To provide the right -- information in the error message we need a condition thus: if Dictionary.Is_Null_Symbol (Tagged_Parameter_Sym) then -- normal case ErrorHandler.Semantic_Error_Sym2 (Err_Num => 107, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Sym => Actual_Type, Sym2 => Expected_Type, Scope => Scope); else -- inherited call ErrorHandler.Semantic_Error_Sym2 (Err_Num => 107, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Sym => Actual_Type, Sym2 => Tagged_Parameter_Sym, Scope => Scope); end if; -- if types match ok, do checks for array bounds, strings etc elsif Dictionary.Is_Constrained_Array_Type_Mark (Expected_Type, Scope) then -- Formal parameter type is Constrained. if Dictionary.Is_Unconstrained_Array_Type_Mark (Actual_Type, Scope) then -- Actual is unconstrained. In SPARK95 or 2005, this is OK if -- the actual is a static String expression, but illegal -- otherwise. if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Dictionary.IsPredefinedStringType (Actual_Type) then -- Formal must be a constrained String subtype, so we need -- to check the upper bound of the actual (if it has one) against -- the expected upper bound of the formal. if Actual_Range_RHS = Maths.NoValue then -- Actual is not static, so must be illegal ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Id_Str => LexTokenManager.Null_String); else -- Actual is static, so check upper-bound against that expected if Actual_Range_RHS /= Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Expected_Type)) then ErrorHandler.Semantic_Error (Err_Num => 418, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; else ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; elsif Illegal_Unconstrained (Left_Type => Expected_Type, Right_Type => Actual_Type) then ErrorHandler.Semantic_Error (Err_Num => 418, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; end Check_Type; ------------------------------------------------------------------- function Is_Global_Of (Var_Sym, Proc_Sym : Dictionary.Symbol; Abstraction : Dictionary.Abstractions) return Boolean --# global in Dictionary.Dict; is It : Dictionary.Iterator; Found : Boolean; begin Found := False; It := Dictionary.FirstGlobalVariable (Abstraction, Proc_Sym); while not Dictionary.IsNullIterator (It) loop if Dictionary.CurrentSymbol (It) = Var_Sym then Found := True; exit; end if; It := Dictionary.NextSymbol (It); end loop; return Found; end Is_Global_Of; ------------------------------------------------------------------- function Is_Exported_Global_Of (Var_Sym, Proc_Sym : Dictionary.Symbol; Abstraction : Dictionary.Abstractions) return Boolean --# global in Dictionary.Dict; is It : Dictionary.Iterator; Found : Boolean; begin Found := False; It := Dictionary.FirstGlobalVariable (Abstraction, Proc_Sym); while not Dictionary.IsNullIterator (It) loop if Dictionary.CurrentSymbol (It) = Var_Sym then Found := Dictionary.IsExport (Abstraction, Proc_Sym, Dictionary.CurrentSymbol (It)); exit; end if; It := Dictionary.NextSymbol (It); end loop; return Found; end Is_Exported_Global_Of; ---------------------------------------------------------- procedure Check_For_Use_Of_Protected_Or_Stream_Vars (Var_Sym, Proc_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Actual_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Actual_Node_Pos, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Proc_Sym, --# Scope, --# SPARK_IO.File_Sys, --# Var_Sym; is begin -- Stream variables cannot be passed as parameters if Dictionary.IsOwnVariableOrConstituentWithMode (Var_Sym) then ErrorHandler.Semantic_Error_Sym (Err_Num => 716, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Sym => Var_Sym, Scope => Scope); -- Protected variable cannot be passed as parameters elsif Dictionary.IsOwnVariable (Var_Sym) and then Dictionary.GetOwnVariableProtected (Var_Sym) then -- unless it is a suspension object to a predefined operation. if not (Dictionary.IsPredefinedSuspensionObjectType (Dictionary.GetType (Var_Sym)) and then Dictionary.IsPredefined (Proc_Sym)) then ErrorHandler.Semantic_Error_Sym (Err_Num => 725, Reference => ErrorHandler.No_Reference, Position => Actual_Node_Pos, Sym => Var_Sym, Scope => Scope); end if; end if; end Check_For_Use_Of_Protected_Or_Stream_Vars; ---------------------------------------------------------- -- procedure maintains two parallel list one of actual symbols -- and the other of the formal parameter position number -- applicable to each. procedure Add_Sym (Sym : in Dictionary.Symbol; Formal_Pos : in Positive; Actual_Parameter_List, Formal_Position_List : in out SimpleLists.SimpleList) --# derives Actual_Parameter_List from *, --# Sym & --# Formal_Position_List from *, --# Formal_Pos; is Ok : Boolean; begin SimpleLists.AddItem (Natural (Dictionary.SymbolRef (Sym)), Actual_Parameter_List, Ok); if not Ok then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.List_Overflow_In_Procedure_Call, Msg => ""); end if; SimpleLists.AddItem (Formal_Pos, Formal_Position_List, Ok); if not Ok then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.List_Overflow_In_Procedure_Call, Msg => ""); end if; end Add_Sym; -------------------------------------------- function Is_Global_Of_An_Enclosing_Function (Calling_Scope : Dictionary.Scopes; Param_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Calling_Sym : Dictionary.Symbol; Calling_Abstraction : Dictionary.Abstractions; Result : Boolean := False; begin Calling_Sym := Dictionary.GetEnclosingCompilationUnit (Calling_Scope); if Dictionary.IsFunction (Calling_Sym) then -- Procedure that exports Param_Sym is being called from inside a function body -- so there is a possibility that an indirect function side efefct is occurring Calling_Abstraction := Dictionary.GetAbstraction (Calling_Sym, Calling_Scope); Result := Dictionary.Is_Global_Variable (Calling_Abstraction, Calling_Sym, Param_Sym); end if; return Result; end Is_Global_Of_An_Enclosing_Function; -------------------------------------------- procedure Substitute_Root_Type_For_Extended_Type_In_Sequence (The_Seq : in SeqAlgebra.Seq; Tagged_Parameter_Sym : in Dictionary.Symbol; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# TheHeap from *, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Tagged_Parameter_Sym, --# TheHeap, --# The_Seq & --# null from CommandLineData.Content; is Actual_Sym : Dictionary.Symbol; Member_Of_Seq : SeqAlgebra.MemberOfSeq; begin Member_Of_Seq := SeqAlgebra.FirstMember (TheHeap, The_Seq); while not SeqAlgebra.IsNullMember (Member_Of_Seq) loop -- process each member of sequence in turn Actual_Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => Member_Of_Seq))); -- we only need to do things to tagged types that extend Tagged_Parameter_Sym if Dictionary.IsAnExtensionOf (Tagged_Parameter_Sym, Dictionary.GetType (Actual_Sym)) then -- We can't replace X with X.Inherit unless we add X's subcomponents first Add_Record_Sub_Components (Record_Var_Sym => Actual_Sym, Record_Type_Sym => Dictionary.GetType (Actual_Sym), Component_Data => Component_Data, The_Heap => TheHeap); -- now add as may .Inherits as needed to get type match Actual_Sym := Convert_Tagged_Actual (Actual => Actual_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym); -- replace existing sequence member with converted one SeqAlgebra.RemoveMember (TheHeap, The_Seq, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => Member_Of_Seq)); SeqAlgebra.AddMember (TheHeap, The_Seq, Natural (Dictionary.SymbolRef (Actual_Sym))); end if; Member_Of_Seq := SeqAlgebra.NextMember (TheHeap, Member_Of_Seq); end loop; end Substitute_Root_Type_For_Extended_Type_In_Sequence; begin -- Check_Actual Actual_Type := Dictionary.GetUnknownTypeMark; Entire_Array_Sym := Dictionary.NullSymbol; -- default value for all except actual param which is array element Mode_Of_Formal := Dictionary.GetSubprogramParameterMode (Formal_Sym); if Dictionary.IsExport (Abstraction, Proc_Sym, Formal_Sym) or else Mode_Of_Formal = Dictionary.InOutMode or else Mode_Of_Formal = Dictionary.OutMode then -- no array elements or indexing permitted Walk_Name (Exp_Node => Actual_Node, Scope => Scope, Component_Data => Component_Data, The_Heap => TheHeap, Result => Exp_Result, Is_A_Name => Is_A_Name, Ref_Var_Param => Ref_Var_Param); if Is_A_Name then if Exp_Result.Is_AVariable then -- valid variable found, do further checks Actual_Parameter_Is_A_Variable := True; Entire_Var_Sym := Exp_Result.Variable_Symbol; if Exp_Result.Arg_List_Found then -- actual parameter is an array reference so we need to return the entire array symbol -- so we can correctly handle the flow analysis of calls such as SetToZero (A (I)); -- -- If it's an entire variable, then the symbol we need is in Exp_Result.Variable_Symbol -- -- If it's NOT an entire var, then it must be record field such as R.F (I). -- If that case, the flow analyser needs the symbol of the field itself, -- which is in Exp_Result.Other_Symbol if Exp_Result.Is_An_Entire_Variable then Entire_Array_Sym := Exp_Result.Variable_Symbol; else Entire_Array_Sym := Exp_Result.Other_Symbol; end if; end if; Var_Type := Exp_Result.Type_Symbol; Actual_Type := Exp_Result.Type_Symbol; Plant_Constraining_Type (Expression_Type => Actual_Type, String_Length => Maths.NoValue, Actual_Node => Actual_Node); if Dictionary.Is_Null_Symbol (Exp_Result.Other_Symbol) then Add_Sym (Sym => Exp_Result.Variable_Symbol, Formal_Pos => Formal_Pos, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List); else Add_Sym (Sym => Exp_Result.Other_Symbol, Formal_Pos => Formal_Pos, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List); end if; Check_Type (Actual_Type => Var_Type, Scope => Scope, Actual_Range_RHS => Maths.NoValue, Formal_Sym => Formal_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Actual_Node_Pos => Node_Position (Node => Actual_Node)); Check_Valid_Mode (Actual_Node_Pos => Node_Position (Node => Actual_Node), Scope => Scope, Actual_Sym => Entire_Var_Sym, Mode_Of_Formal => Mode_Of_Formal); Check_For_Use_Of_Protected_Or_Stream_Vars (Var_Sym => Entire_Var_Sym, Proc_Sym => Proc_Sym, Scope => Scope, Actual_Node_Pos => Node_Position (Node => Actual_Node)); if Is_Global_Of_An_Enclosing_Function (Calling_Scope => Scope, Param_Sym => Entire_Var_Sym) then -- We are attempting to change a function's global via it's use as an actual parameter -- to an procedure within the function ErrorHandler.Semantic_Error_Sym (Err_Num => 328, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Actual_Node), Sym => Entire_Var_Sym, Scope => Scope); end if; if Is_Global_Of (Var_Sym => Entire_Var_Sym, Proc_Sym => Proc_Sym, Abstraction => Abstraction) then -- export overlapping global ErrorHandler.Semantic_Error (Err_Num => 173, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Actual_Node), Id_Str => LexTokenManager.Null_String); end if; else -- expression is not an entire variable Add_Sym (Sym => Dictionary.NullSymbol, Formal_Pos => Formal_Pos, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List); -- put place holder in parameter list ErrorHandler.Semantic_Error (Err_Num => 172, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Actual_Node), Id_Str => LexTokenManager.Null_String); end if; else -- its not even a name Add_Sym (Sym => Dictionary.NullSymbol, Formal_Pos => Formal_Pos, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List); -- put place holder in parameter list ErrorHandler.Semantic_Error (Err_Num => 172, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Actual_Node), Id_Str => LexTokenManager.Null_String); end if; else -- Mode is "in", so any expression will do but vars need special alias checks Walk_Expression_P.Walk_Expression (Exp_Node => Actual_Node, Scope => Scope, Type_Context => Dictionary.GetType (Formal_Sym), Context_Requires_Static => False, Ref_Var => Ref_Var_Param, Result => Exp_Result, Component_Data => Component_Data, The_Heap => TheHeap); if Exp_Result.Is_AVariable then --# accept F, 10, Is_A_Name, "We already know."; -- We need to use walk_name in order to get a more useful -- name for the variable. We only do this if the previous -- call to WalkExpression says that this refers to a -- variable. Walk_Name (Exp_Node => Actual_Node, Scope => Scope, Component_Data => Component_Data, The_Heap => TheHeap, Result => Exp_Result, Is_A_Name => Is_A_Name, Ref_Var_Param => Ref_Var_Param); --# end accept; end if; Actual_Type := Exp_Result.Type_Symbol; Plant_Constraining_Type (Expression_Type => Actual_Type, String_Length => Exp_Result.Range_RHS, Actual_Node => Actual_Node); if not Dictionary.Is_Null_Symbol (Exp_Result.Variable_Symbol) then -- we have a variable or 'disguised' variable Actual_Parameter_Is_A_Variable := True; Entire_Var_Sym := Exp_Result.Variable_Symbol; Var_Type := Exp_Result.Type_Symbol; if Dictionary.Is_Null_Symbol (Exp_Result.Other_Symbol) then Add_Sym (Sym => Exp_Result.Variable_Symbol, Formal_Pos => Formal_Pos, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List); else Add_Sym (Sym => Exp_Result.Other_Symbol, Formal_Pos => Formal_Pos, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List); end if; Check_Type (Actual_Type => Var_Type, Scope => Scope, Actual_Range_RHS => Maths.NoValue, Formal_Sym => Formal_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Actual_Node_Pos => Node_Position (Node => Actual_Node)); if CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 and then Dictionary.IsSubprogramParameter (Entire_Var_Sym) and then Dictionary.GetSubprogramParameterMode (Entire_Var_Sym) = Dictionary.OutMode then ErrorHandler.Semantic_Error (Err_Num => 171, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Actual_Node), Id_Str => Dictionary.GetSimpleName (Entire_Var_Sym)); end if; if Is_Exported_Global_Of (Var_Sym => Entire_Var_Sym, Proc_Sym => Proc_Sym, Abstraction => Abstraction) then ErrorHandler.Semantic_Error (Err_Num => 166, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Actual_Node), Id_Str => LexTokenManager.Null_String); end if; elsif Exp_Result.Is_ARange then -- check that a type mark isn't being used as an actual parameter ErrorHandler.Semantic_Error (Err_Num => 5, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Actual_Node), Id_Str => Dictionary.GetSimpleName (Exp_Result.Type_Symbol)); else -- True expression, does not represent a variable and isn't a typemark Actual_Parameter_Is_A_Constant := Exp_Result.Is_Constant; Add_Sym (Sym => Dictionary.NullSymbol, Formal_Pos => Formal_Pos, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List); -- put place holder in parameter list Check_Type (Actual_Type => Exp_Result.Type_Symbol, Scope => Scope, Actual_Range_RHS => Exp_Result.Range_RHS, Formal_Sym => Formal_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Actual_Node_Pos => Node_Position (Node => Actual_Node)); end if; end if; -- We might need to substitute a subtype of the actual paramter -- if an inherited root op is being called if Tagged_Substitution_Required (Formal => Formal_Sym, Actual_Type => Actual_Type, Root_Parameter_Sym => Root_Parameter_Sym) then if Actual_Parameter_Is_A_Variable then Substitute_Root_Type_For_Extended_Type_In_Sequence (The_Seq => Ref_Var_Param, Tagged_Parameter_Sym => Root_Parameter_Sym, Component_Data => Component_Data); elsif not Actual_Parameter_Is_A_Constant then ErrorHandler.Semantic_Error (Err_Num => 827, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Actual_Node), Id_Str => LexTokenManager.Null_String); end if; end if; end Check_Actual; --------------------------------------------------------------- procedure Handle_Named_Association (Named_Argument_Assoc_Node : in STree.SyntaxNode; Root_Parameter_Sym, Proc_Sym, Tagged_Parameter_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Actual_Parameter_List, Formal_Position_List : in out SimpleLists.SimpleList; Param_Table : in out Param_Tables; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Actual_Parameter_List, --# Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# Formal_Position_List, --# LexTokenManager.State, --# Param_Table, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# Abstraction, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Named_Argument_Assoc_Node, --# Proc_Sym, --# Root_Parameter_Sym, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Named_Argument_Assoc_Node, --# Proc_Sym, --# Root_Parameter_Sym, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Tagged_Parameter_Sym, --# TheHeap; --# pre Syntax_Node_Type (Named_Argument_Assoc_Node, STree.Table) = SP_Symbols.named_argument_association; --# post STree.Table = STree.Table~; is Check_Named_Association_Ref_Var : SeqAlgebra.Seq; Expression_Node : STree.SyntaxNode; Formal_It : Dictionary.Iterator; Formal_Sym : Dictionary.Symbol; Entire_Array_Sym : Dictionary.Symbol; begin -- Check that each named assoication appears and appears once only Check_Named_Association (The_Formals => Proc_Sym, Scope => Scope, Named_Argument_Assoc_Node => Named_Argument_Assoc_Node); -- now process each formal in turn Formal_It := Dictionary.FirstSubprogramParameter (Proc_Sym); while not Dictionary.IsNullIterator (Formal_It) loop --# assert Syntax_Node_Type (Named_Argument_Assoc_Node, STree.Table) = SP_Symbols.named_argument_association and --# STree.Table = STree.Table~; Formal_Sym := Dictionary.CurrentSymbol (Formal_It); Find_Actual_Node (For_Formal => Formal_Sym, Named_Argument_Assoc_Node => Named_Argument_Assoc_Node, Expression_Node => Expression_Node); -- ASSUME Expression_Node = expression OR NULL --# check Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression or --# Expression_Node = STree.NullNode; if Syntax_Node_Type (Node => Expression_Node) = SP_Symbols.expression then -- ASSUME Expression_Node = expression SeqAlgebra.CreateSeq (TheHeap, Check_Named_Association_Ref_Var); Check_Actual (Root_Parameter_Sym => Root_Parameter_Sym, Proc_Sym => Proc_Sym, Formal_Sym => Formal_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Formal_Pos => Dictionary.GetSubprogramParameterNumber (Formal_Sym), Actual_Node => Expression_Node, Ref_Var_Param => Check_Named_Association_Ref_Var, Abstraction => Abstraction, Scope => Scope, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List, Component_Data => Component_Data, Entire_Array_Sym => Entire_Array_Sym); Add_Seq (Param_No => Dictionary.GetSubprogramParameterNumber (Formal_Sym), Refs => Check_Named_Association_Ref_Var, Entire_Array_Sym => Entire_Array_Sym, Param_Table => Param_Table); end if; Formal_It := Dictionary.NextSymbol (Formal_It); end loop; end Handle_Named_Association; --------------------------------------------------------------- procedure Handle_Positional_Association (Positional_Argument_Assoc_Node : in STree.SyntaxNode; Root_Parameter_Sym, Proc_Sym, Tagged_Parameter_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Abstraction : in Dictionary.Abstractions; Actual_Parameter_List, Formal_Position_List : in out SimpleLists.SimpleList; Param_Table : in out Param_Tables; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Actual_Parameter_List, --# Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# Formal_Position_List, --# LexTokenManager.State, --# Param_Table, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# Abstraction, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Positional_Argument_Assoc_Node, --# Proc_Sym, --# Root_Parameter_Sym, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Positional_Argument_Assoc_Node, --# Proc_Sym, --# Root_Parameter_Sym, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Tagged_Parameter_Sym, --# TheHeap; --# pre Syntax_Node_Type (Positional_Argument_Assoc_Node, STree.Table) = SP_Symbols.positional_argument_association; --# post STree.Table = STree.Table~; is Check_Positional_Association_Ref_Var : SeqAlgebra.Seq; Expression_Node : STree.SyntaxNode; Formal_It : Dictionary.Iterator; Actual_It : STree.Iterator; Formal_Sym : Dictionary.Symbol; Parameter_Number : Positive; Entire_Array_Sym : Dictionary.Symbol; begin Expression_Node := Positional_Argument_Assoc_Node; Actual_It := Find_First_Node (Node_Kind => SP_Symbols.expression, From_Root => Positional_Argument_Assoc_Node, In_Direction => STree.Down); Formal_It := Dictionary.FirstSubprogramParameter (Proc_Sym); while not Dictionary.IsNullIterator (Formal_It) and then not STree.IsNull (Actual_It) loop Formal_Sym := Dictionary.CurrentSymbol (Formal_It); Expression_Node := Get_Node (It => Actual_It); --# assert Syntax_Node_Type (Expression_Node, STree.Table) = SP_Symbols.expression and --# Expression_Node = Get_Node (Actual_It) and --# STree.Table = STree.Table~; Parameter_Number := Dictionary.GetSubprogramParameterNumber (Formal_Sym); SeqAlgebra.CreateSeq (TheHeap, Check_Positional_Association_Ref_Var); Check_Actual (Root_Parameter_Sym => Root_Parameter_Sym, Proc_Sym => Proc_Sym, Formal_Sym => Formal_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Formal_Pos => Parameter_Number, Actual_Node => Expression_Node, Ref_Var_Param => Check_Positional_Association_Ref_Var, Abstraction => Abstraction, Scope => Scope, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List, Component_Data => Component_Data, Entire_Array_Sym => Entire_Array_Sym); Add_Seq (Param_No => Parameter_Number, Refs => Check_Positional_Association_Ref_Var, Entire_Array_Sym => Entire_Array_Sym, Param_Table => Param_Table); Formal_It := Dictionary.NextSymbol (Formal_It); Actual_It := STree.NextNode (Actual_It); end loop; -- completeness check, both loops should run out at same time if not Dictionary.IsNullIterator (Formal_It) or else not STree.IsNull (Actual_It) then ErrorHandler.Semantic_Error (Err_Num => 3, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Expression_Node), Id_Str => Dictionary.GetSimpleName (Proc_Sym)); end if; end Handle_Positional_Association; --------------------------------------------------------------- procedure Check_Parameter_Overlap (Arg_List_Node : in STree.SyntaxNode; Proc_Sym : in Dictionary.Symbol; Actual_Parameter_List, Formal_Position_List : in SimpleLists.SimpleList; Abstraction : in Dictionary.Abstractions) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# Actual_Parameter_List, --# Arg_List_Node, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Formal_Position_List, --# LexTokenManager.State, --# Proc_Sym, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Arg_List_Node, STree.Table) = SP_Symbols.named_argument_association or --# Syntax_Node_Type (Arg_List_Node, STree.Table) = SP_Symbols.positional_argument_association; is Actual_Node : STree.SyntaxNode; Err_Pos : LexTokenManager.Token_Position; ----------------------------------------------------- procedure Next_Err_Pos (Actual_Node : in out STree.SyntaxNode; Err_Pos : out LexTokenManager.Token_Position) --# global in STree.Table; --# derives Actual_Node, --# Err_Pos from Actual_Node, --# STree.Table; --# pre Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.expression or --# Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.named_argument_association or --# Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.positional_argument_association; --# post Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.expression or --# Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.named_argument_association or --# Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.positional_argument_association; is begin -- ASSUME Actual_Node = expression OR named_argument_association OR positional_argument_association if Syntax_Node_Type (Node => Actual_Node) = SP_Symbols.expression then -- ASSUME Actual_Node = expression Actual_Node := Last_Sibling_Of (Start_Node => Parent_Node (Current_Node => Actual_Node)); end if; Err_Pos := Node_Position (Node => Actual_Node); -- ASSUME Actual_Node = expression OR named_argument_association OR positional_argument_association SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Actual_Node) = SP_Symbols.expression or else Syntax_Node_Type (Node => Actual_Node) = SP_Symbols.named_argument_association or else Syntax_Node_Type (Node => Actual_Node) = SP_Symbols.positional_argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Actual_Node = expression OR named_argument_association OR " & "positional_argument_association in Next_Err_Pos"); end Next_Err_Pos; ----------------------------------------------------- -- This will check a given actual parameter (Sym at Position) -- against all other actual parameters. Returns true if -- the given symbol is a subcomponent of another or if another -- parameter is a subcomponent of it. -- -- The check works by looking at the string representation -- of any two actual parameters and identifying the common -- prefix. We then check if this common prefix is identical -- to one or both of the given parameters, if it is they -- necessarily overlap. This is best shown with a few -- examples: -- -- Parameters Common Prefix Overlap -- R.A, R.B | R. | No -- R, R.B | R | Yes, because R = R -- R, R | R | Yes, because R = R and R = R -- A, B | - | No -- A.B.C, A.D | A. | No -- -- The symbol used to represent the parameters is Other_Symbol -- in the record returned by WalkExpression. procedure Check_Overlap_Of_Exported_Parameter (Position : Positive; Proc_Sym : Dictionary.Symbol; Err_Pos : LexTokenManager.Token_Position; Actual_Parameter_List, Formal_Position_List : SimpleLists.SimpleList; Abstraction : Dictionary.Abstractions) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# Actual_Parameter_List, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Pos, --# Formal_Position_List, --# LexTokenManager.State, --# Position, --# Proc_Sym, --# SPARK_IO.File_Sys; is Current_Actual, Actual_Sym : Dictionary.Symbol; Sym_Str_A, Sym_Str_B : E_Strings.T; Prefix_Str_A, Prefix_Str_B : E_Strings.T; Len_A, Len_B : E_Strings.Lengths; Common_Prefix_Length : E_Strings.Lengths; Overlaps : Boolean; Current_Formal : Natural; ----------------------------------------------------- function Formal_Number_Of_Actual_Parameter_At (Position : Positive; Formal_Position_List : SimpleLists.SimpleList) return Natural is OK : Boolean; Pos_No : Natural; begin SimpleLists.GetItem (Formal_Position_List, Position, --to get Pos_No, OK); if not OK then Pos_No := 0; end if; return Pos_No; end Formal_Number_Of_Actual_Parameter_At; begin -- Check_Overlap_Of_Exported_Parameter Actual_Sym := Actual_Parameter_At (Position => Position, Actual_Parameter_List => Actual_Parameter_List); if not Dictionary.Is_Null_Symbol (Actual_Sym) then -- Cache the name and length of then given parameter Prefix_Str_A := Dictionary.GetAnyPrefixNeeded (Sym => Actual_Sym, Scope => Dictionary.GlobalScope, Separator => "."); Sym_Str_A := Dictionary.GenerateSimpleName (Item => Actual_Sym, Separator => "."); Len_A := E_Strings.Get_Length (Sym_Str_A); -- No overlap until shown otherwise Overlaps := False; for Current_Pos in Positive range 1 .. SimpleLists.NumberOfItems (Actual_Parameter_List) --# assert SimpleLists.NumberOfItems (Actual_Parameter_List) in Natural and --# Actual_Parameter_List = Actual_Parameter_List%; loop Current_Actual := Actual_Parameter_At (Position => Current_Pos, Actual_Parameter_List => Actual_Parameter_List); Current_Formal := Formal_Number_Of_Actual_Parameter_At (Position => Current_Pos, Formal_Position_List => Formal_Position_List); if not Dictionary.Is_Null_Symbol (Current_Actual) and then Position /= Current_Pos and then Current_Formal /= 0 and then Dictionary.IsExport (Abstraction, Proc_Sym, Dictionary.GetSubprogramParameter (Proc_Sym, Current_Formal)) then Prefix_Str_B := Dictionary.GetAnyPrefixNeeded (Sym => Current_Actual, Scope => Dictionary.GlobalScope, Separator => "."); Sym_Str_B := Dictionary.GenerateSimpleName (Item => Current_Actual, Separator => "."); Len_B := E_Strings.Get_Length (Sym_Str_B); Common_Prefix_Length := E_Strings.Get_Dotted_Common_Prefix_Length (Sym_Str_A, Sym_Str_B); -- If the lengths of a string are equal to the length of the -- common prefix, they must be equal. Overlaps := E_Strings.Eq_String (E_Str1 => Prefix_Str_A, E_Str2 => Prefix_Str_B) and then (Len_A = Common_Prefix_Length or else Len_B = Common_Prefix_Length); end if; -- It's enough to find a single instance exit when Overlaps; end loop; if Overlaps then ErrorHandler.Semantic_Error (Err_Num => 165, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; end Check_Overlap_Of_Exported_Parameter; begin -- Check_Parameter_Overlap -- Work out a position to include in any error messages. The -- first proper expression or simple_name will do. Actual_Node := Child_Node (Current_Node => Arg_List_Node); while Syntax_Node_Type (Node => Actual_Node) /= SP_Symbols.simple_name and then Syntax_Node_Type (Node => Actual_Node) /= SP_Symbols.expression loop -- ASSUME Actual_Node = named_argument_association OR positional_argument_association SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Actual_Node) = SP_Symbols.named_argument_association or else Syntax_Node_Type (Node => Actual_Node) = SP_Symbols.positional_argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Actual_Node = named_argument_association OR positional_argument_association OR " & "in Check_Parameter_Overlap"); --# assert Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.named_argument_association or --# Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.positional_argument_association; Actual_Node := Child_Node (Current_Node => Actual_Node); end loop; -- ASSUME Actual_Node = simple_name OR expression if Syntax_Node_Type (Node => Actual_Node) = SP_Symbols.simple_name then -- ASSUME Actual_Node = simple_name -- named association Actual_Node := Next_Sibling (Current_Node => Actual_Node); elsif Syntax_Node_Type (Node => Actual_Node) /= SP_Symbols.expression then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Actual_Node = simple_name OR expression in Check_Parameter_Overlap"); end if; -- ASSUME Actual_Node = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Actual_Node) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Actual_Node = expression in Check_Parameter_Overlap"); Err_Pos := Node_Position (Node => Actual_Node); -- Search for parameter overlap. for I in Positive range 1 .. SimpleLists.NumberOfItems (Actual_Parameter_List) loop --# assert SimpleLists.NumberOfItems (Actual_Parameter_List) in Natural and --# Actual_Parameter_List = Actual_Parameter_List% and --# (Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.expression or --# Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.named_argument_association or --# Syntax_Node_Type (Actual_Node, STree.Table) = SP_Symbols.positional_argument_association); Check_Overlap_Of_Exported_Parameter (Position => I, Proc_Sym => Proc_Sym, Err_Pos => Err_Pos, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List, Abstraction => Abstraction); Next_Err_Pos (Actual_Node => Actual_Node, Err_Pos => Err_Pos); end loop; end Check_Parameter_Overlap; --------------------------------------------------------------- procedure Check_Callable (Node_Pos : in LexTokenManager.Token_Position; Proc_Sym : in Dictionary.Symbol; Dotted : in Boolean; Scope : in Dictionary.Scopes; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Dotted, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Proc_Sym, --# Scope, --# SPARK_IO.File_Sys & --# OK from Dictionary.Dict, --# Dotted, --# Proc_Sym, --# Scope; is begin if Dictionary.IsCallable (Proc_Sym, Dotted, Scope) then OK := True; else OK := False; ErrorHandler.Semantic_Error (Err_Num => 163, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Proc_Sym)); end if; end Check_Callable; --------------------------------------------------------------- procedure Check_Not_Local_Procedure_Call_From_Protected_Function (Node_Pos : in LexTokenManager.Token_Position; Proc_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Proc_Sym, --# Scope, --# SPARK_IO.File_Sys & --# OK from Dictionary.Dict, --# Proc_Sym, --# Scope; is Calling_Sym, Calling_Region : Dictionary.Symbol; Calling_Scope : Dictionary.Scopes; begin OK := True; -- get symbol of caller Calling_Sym := Dictionary.GetEnclosingCompilationUnit (Scope); -- get package or protected type where caller is declared Calling_Scope := Dictionary.GetScope (Calling_Sym); loop Calling_Region := Dictionary.GetRegion (Calling_Scope); exit when Dictionary.IsProtectedType (TheSymbol => Calling_Region) or else Dictionary.IsPackage (Calling_Region); Calling_Scope := Dictionary.GetEnclosingScope (Calling_Scope); end loop; if Dictionary.IsFunction (Calling_Sym) and then Dictionary.IsProtectedType (TheSymbol => Calling_Region) and then Dictionary.IsTypeMark (Dictionary.GetRegion (Dictionary.GetScope (Proc_Sym))) and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetRegion (Dictionary.GetScope (Proc_Sym)), Right_Symbol => Calling_Region, Full_Range_Subtype => False) then OK := False; ErrorHandler.Semantic_Error (Err_Num => 986, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Proc_Sym)); end if; end Check_Not_Local_Procedure_Call_From_Protected_Function; --------------------------------------------------------------- procedure Check_Property_Consistency (Node_Pos : in LexTokenManager.Token_Position; Prefix_Symbol, Proc_Sym : in Dictionary.Symbol; Actual_Parameter_List : in SimpleLists.SimpleList; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Actual_Parameter_List, --# CommandLineData.Content, --# Prefix_Symbol, --# Proc_Sym, --# Scope & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Actual_Parameter_List, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Prefix_Symbol, --# Proc_Sym, --# Scope, --# SPARK_IO.File_Sys; is The_PO_Or_SO_Sym : Dictionary.Symbol; Enc_Unit_Sym : Dictionary.Symbol; begin Enc_Unit_Sym := Dictionary.GetEnclosingCompilationUnit (Scope); -- Check that if the procedure has a delay property then there is a -- delay property for the enclosing unit (if it is a procedure). -- Also mark the enclosing procedure as having come across a -- potentially delaying operation. if Dictionary.HasDelayProperty (Proc_Sym) and then Dictionary.Is_Subprogram (Enc_Unit_Sym) then if Dictionary.HasDelayProperty (Enc_Unit_Sym) then Dictionary.MarkAccountsForDelay (Enc_Unit_Sym); else ErrorHandler.Semantic_Error (Err_Num => 912, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Proc_Sym)); end if; end if; -- Check that if this is a call to Suspend_Until_True then the actual parameter -- appears in the suspends list for the enclosing procedure. Also mark the -- enclosing unit as having seen the suspend until true call. if CommandLineData.Ravenscar_Selected and then Dictionary.IsPredefinedSuspendUntilTrueOperation (Proc_Sym) then The_PO_Or_SO_Sym := Actual_Parameter_At (Position => 1, Actual_Parameter_List => Actual_Parameter_List); if Dictionary.SuspendsOn (TheTaskOrProc => Enc_Unit_Sym, ThePOorSO => The_PO_Or_SO_Sym) then Dictionary.MarkAccountsForSuspendsListItem (TheTaskOrProc => Enc_Unit_Sym, ThePOorSO => The_PO_Or_SO_Sym); else -- The suspension object does not appear in the suspends list for the -- enclosing unit. ErrorHandler.Semantic_Error_Sym (Err_Num => 910, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => The_PO_Or_SO_Sym, Scope => Scope); end if; end if; -- Check that if this is an entry then the protected object appears in the -- suspends list for the enclosing procedure. Also mark the enclosing unit -- as having seen the entry. if Dictionary.IsEntry (Proc_Sym) then if Dictionary.SuspendsOn (TheTaskOrProc => Enc_Unit_Sym, ThePOorSO => Prefix_Symbol) then Dictionary.MarkAccountsForSuspendsListItem (TheTaskOrProc => Enc_Unit_Sym, ThePOorSO => Prefix_Symbol); else -- The protected object does not appear in the entry annotation for the enclosing -- unit. ErrorHandler.Semantic_Error_Sym (Err_Num => 910, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => Prefix_Symbol, Scope => Scope); end if; end if; -- Check that all the items in the suspends list for the procedure appear -- in the suspends list for the enclosing unit. if not Dictionary.SuspendsListIsPropagated (FromProcedure => Proc_Sym, ToTaskOrProc => Enc_Unit_Sym) then -- The operation being called contains entries that are not present in the -- entry list of the calling procedure. ErrorHandler.Semantic_Error (Err_Num => 913, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Proc_Sym)); end if; -- Mark the entries declared in the called procedure as having been seen by the -- enclosing procedure. Any entries not propagated are ignored. Dictionary.MarkAccountsForSuspendsListItems (TheTaskOrProc => Enc_Unit_Sym, TheItemsInProcedure => Proc_Sym); end Check_Property_Consistency; --------------------------------------------------------------- -- In flow=auto we need to guard against a subprogram with a derives annotation calling -- a procedure without a derives annotation. In such cases, the body of the caller is -- subject to full information flow analysis, but using the synthesised dependency of -- the callee. This can give rise to 'false alarm' flow errors or, worse, absence of -- flow errors which could only be detected if there was a derives anno for the callee. procedure Check_For_Subprog_With_Derives_Calling_Proc_Without_Derives (Proc_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Node : in STree.SyntaxNode; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Proc_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# OK from CommandLineData.Content, --# Dictionary.Dict, --# Proc_Sym, --# Scope; is Calling_Sym : Dictionary.Symbol; begin OK := True; -- Get the symbol for the caller (ie the current subrogram) Calling_Sym := Dictionary.GetEnclosingCompilationUnit (Scope); -- This check is only relevant for flow=auto. In other modes it is always OK. -- Is the current subprogram a procedure, task or entry with a derives, or a function? If not then we are OK. -- If caller is a function or has a derives, does callee have a derives? If it does then we are OK, -- but if it doesn't then raise an error. if CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow and then Dictionary.GetHasDerivesAnnotation (Calling_Sym) and then not Dictionary.GetHasDerivesAnnotation (Proc_Sym) then -- If a function is calling a procedure without a derives anno the analysis will generally be -- correct but if, for example, the procedure derives null from one of its exports the analysis -- can be misleading. So we raise a warning if the caller is a function. If the caller is a -- procedure then the analysis cannot proceed - raise an error. if Dictionary.IsFunction (Calling_Sym) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 315, Position => Node_Position (Node => Node), Sym => Proc_Sym, Scope => Scope); else OK := False; ErrorHandler.Semantic_Error_Sym2 (Err_Num => 176, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Proc_Sym, Sym2 => Calling_Sym, Scope => Scope); end if; end if; end Check_For_Subprog_With_Derives_Calling_Proc_Without_Derives; begin -- Wf_Proc_Call if In_Package_Initialization (Scope => Scope) then ErrorHandler.Semantic_Error (Err_Num => 34, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else -- not in package initialization Param_Table := Param_Tables'(Param_Index => Param_Info'(0, Dictionary.NullSymbol)); Check_Procedure_Name (Node => Node, Scope => Scope, Root_Parameter_Sym => Root_Parameter_Sym, Prefix_Symbol => Prefix_Symbol, Proc_Sym => Proc_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Name_Node => Name_Node, Dotted => Dotted, Valid => Valid); -- above call will also have set Tagged_Parameter_Sym which if not null is -- the type of the formal parameter to which an actual parameter of some -- extended type will have to be converted if Valid then -- determine which set of annotation to use Abstraction := Dictionary.GetAbstraction (Proc_Sym, Scope); if not Dictionary.SubprogramSignatureIsWellformed (Abstraction, Proc_Sym) then ErrorHandler.Semantic_Warning (Err_Num => 399, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; if Child_Node (Current_Node => Node) /= Name_Node and then Child_Node (Current_Node => Child_Node (Current_Node => Node)) /= Name_Node then Valid := False; ErrorHandler.Semantic_Error (Err_Num => 34, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement and --# Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Valid then Check_Callable (Node_Pos => Node_Position (Node => Node), Proc_Sym => Proc_Sym, Dotted => Dotted, Scope => Scope, OK => Valid); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement and --# Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Valid then Check_Not_Local_Procedure_Call_From_Protected_Function (Node_Pos => Node_Position (Node => Node), Proc_Sym => Proc_Sym, Scope => Scope, OK => Valid); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement and --# Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Valid and then CommandLineData.Ravenscar_Selected and then Dictionary.IsOrIsInProtectedScope (Scope) and then Dictionary.SubprogramMayBlock (Proc_Sym) then Valid := False; ErrorHandler.Semantic_Error (Err_Num => 917, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Proc_Sym)); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement and --# Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Valid and then CommandLineData.Ravenscar_Selected and then Dictionary.IsFunction (Dictionary.GetRegion (Scope)) and then Dictionary.SubprogramMayBlock (Proc_Sym) then Valid := False; ErrorHandler.Semantic_Error (Err_Num => 923, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Proc_Sym)); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement and --# Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Valid and then Dictionary.IsInterruptHandler (Proc_Sym) then Valid := False; ErrorHandler.Semantic_Error_Sym (Err_Num => 952, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Proc_Sym, Scope => Scope); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement and --# Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Valid then Check_For_Subprog_With_Derives_Calling_Proc_Without_Derives (Proc_Sym => Proc_Sym, Scope => Scope, Node => Node, OK => Valid); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement and --# Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.name and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Valid then SimpleLists.Init (Actual_Parameter_List); SimpleLists.Init (Formal_Position_List); Check_Globals_Are_Visible (Node_Pos => Node_Position (Node => Node), Prefix_Symbol => Prefix_Symbol, Proc_Sym => Proc_Sym, Scope => Scope, Abstraction => Abstraction); Arg_List_Node := Next_Sibling (Current_Node => Name_Node); -- ASSUME Arg_List_Node = name_argument_list OR NULL if Arg_List_Node = STree.NullNode then -- ASSUME Arg_List_Node = NULL -- parameterless procedure call if Dictionary.GetNumberOfSubprogramParameters (Proc_Sym) /= 0 then ErrorHandler.Semantic_Error (Err_Num => 3, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Proc_Sym)); end if; elsif Syntax_Node_Type (Node => Arg_List_Node) = SP_Symbols.name_argument_list then -- ASSUME Arg_List_Node = name_argument_list -- parameter node found if Dictionary.GetNumberOfSubprogramParameters (Proc_Sym) = 0 then ErrorHandler.Semantic_Error (Err_Num => 3, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Arg_List_Node), Id_Str => Dictionary.GetSimpleName (Proc_Sym)); else Arg_List_Node := Child_Node (Current_Node => Arg_List_Node); -- ASSUME Arg_List_Node = named_argument_association OR positional_argument_association if Syntax_Node_Type (Node => Arg_List_Node) = SP_Symbols.named_argument_association then -- ASSUME Arg_List_Node = named_argument_association Handle_Named_Association (Named_Argument_Assoc_Node => Arg_List_Node, Root_Parameter_Sym => Root_Parameter_Sym, Proc_Sym => Proc_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Scope => Scope, Abstraction => Abstraction, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List, Param_Table => Param_Table, Component_Data => Component_Data); -- Abstraction defined since Valid elsif Syntax_Node_Type (Node => Arg_List_Node) = SP_Symbols.positional_argument_association then -- ASSUME Arg_List_Node = positional_argument_association Handle_Positional_Association (Positional_Argument_Assoc_Node => Arg_List_Node, Root_Parameter_Sym => Root_Parameter_Sym, Proc_Sym => Proc_Sym, Tagged_Parameter_Sym => Tagged_Parameter_Sym, Scope => Scope, Abstraction => Abstraction, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List, Param_Table => Param_Table, Component_Data => Component_Data); -- Abstraction defined since Valid else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_List_Node = named_argument_association OR " & "positional_argument_association in Wf_Proc_Call"); end if; Check_Parameter_Overlap (Arg_List_Node => Arg_List_Node, Proc_Sym => Proc_Sym, Actual_Parameter_List => Actual_Parameter_List, Formal_Position_List => Formal_Position_List, Abstraction => Abstraction); -- Abstraction defined since Valid end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_List_Node = name_argument_list OR NULL in Wf_Proc_Call"); end if; Build_IO_Relation (Node => Node, Scope => Scope, Root_Parameter_Sym => Root_Parameter_Sym, Prefix_Symbol => Prefix_Symbol, Proc_Sym => Proc_Sym, Param_Table => Param_Table, Abstraction => Abstraction, Component_Data => Component_Data, Table => Table); Check_Property_Consistency (Node_Pos => Node_Position (Node => Node), Prefix_Symbol => Prefix_Symbol, Proc_Sym => Proc_Sym, Actual_Parameter_List => Actual_Parameter_List, Scope => Scope); end if; SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Proc_Sym) or else Dictionary.IsProcedure (Proc_Sym), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Proc_Sym to be a procedure in Wf_Proc_Call"); --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement and --# (Dictionary.Is_Null_Symbol (Proc_Sym) or Dictionary.IsProcedure (Proc_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; -- If we have ended calling an inherited root operation then we need to tell -- the VCG which subprogram actually got called. We know that a root op has -- been called if Tagged_Parameter_Sym is not null. In that case we seed the -- procedure_call_statement syntax node with the symbol of the root procedure -- called. if not Dictionary.Is_Null_Symbol (Tagged_Parameter_Sym) then STree.Add_Node_Symbol (Node => Node, Sym => Proc_Sym); -- If we are not dealing with an inherited root operation then we MAY be dealing with -- a protected operation call. Such calls are annotated in terms of their TYPE but we -- will want to substitute the particular PO instance of the type. We can pass this -- prefix (PO in this case) to the VCG via the syntax tree. Note that previous and -- current if clauses are mutually exclusive - there are no protected tagged ops. elsif not Dictionary.Is_Null_Symbol (Prefix_Symbol) and then Dictionary.IsOwnVariable (Prefix_Symbol) and then Dictionary.GetOwnVariableProtected (Prefix_Symbol) then STree.Add_Node_Symbol (Node => Node, Sym => Prefix_Symbol); end if; end if; -- in package initialization end Wf_Proc_Call; -- Abstraction always defined if used spark-2012.0.deb/examiner/cells-utility.adb0000644000175000017500000003053311753202335017527 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Dictionary; with Cell_Storage; with SP_Symbols; with SystemErrors; use type Dictionary.Symbol; package body Cells.Utility is ------------------------------------------------------------------------------ -- General utility ------------------------------------------------------------------------------ procedure Create_Binary_Op_Cell (VCG_Heap : in out Cells.Heap_Record; Left : in Cells.Cell; Op : in SP_Symbols.SP_Symbol; Right : in Cells.Cell; Result : out Cells.Cell) is begin Cells.Create_Cell (VCG_Heap, Result); Cells.Set_Kind (VCG_Heap, Result, Cell_Storage.Op); Cells.Set_Op_Symbol (VCG_Heap, Result, Op); Cells.Set_A_Ptr (VCG_Heap, Result, Left); Cells.Set_B_Ptr (VCG_Heap, Result, Right); end Create_Binary_Op_Cell; ------------------------------------------------------------------------------ -- Booleans ------------------------------------------------------------------------------ function Is_True (VCG_Heap : in Cells.Heap_Record; C : in Cells.Cell) return Boolean is begin return Cells.Get_Kind (VCG_Heap, C) = Cell_Storage.Named_Const and then Cells.Get_Symbol_Value (VCG_Heap, C) = Dictionary.GetTrue; end Is_True; function Is_False (VCG_Heap : in Cells.Heap_Record; C : in Cells.Cell) return Boolean is begin return Cells.Get_Kind (VCG_Heap, C) = Cell_Storage.Named_Const and then Cells.Get_Symbol_Value (VCG_Heap, C) = Dictionary.GetFalse; end Is_False; procedure Create_Bool (VCG_Heap : in out Cells.Heap_Record; Value : in Boolean; C : out Cells.Cell) is begin Cells.Create_Cell (VCG_Heap, C); Cells.Set_Kind (VCG_Heap, C, Cell_Storage.Named_Const); if Value then Cells.Set_Symbol_Value (VCG_Heap, C, Dictionary.GetTrue); else Cells.Set_Symbol_Value (VCG_Heap, C, Dictionary.GetFalse); end if; end Create_Bool; procedure Create_Not (VCG_Heap : in out Cells.Heap_Record; Predicate : in Cells.Cell; Negation : out Cells.Cell) is begin Cells.Create_Cell (VCG_Heap, Negation); Cells.Set_Kind (VCG_Heap, Negation, Cell_Storage.Op); Cells.Set_Op_Symbol (VCG_Heap, Negation, SP_Symbols.RWnot); Cells.Set_B_Ptr (VCG_Heap, Negation, Predicate); end Create_Not; procedure Create_And (VCG_Heap : in out Cells.Heap_Record; Left : in Cells.Cell; Right : in Cells.Cell; Conjunct : out Cells.Cell) is begin Create_Binary_Op_Cell (VCG_Heap, Left, SP_Symbols.RWand, Right, Conjunct); end Create_And; procedure Create_Implies (VCG_Heap : in out Cells.Heap_Record; Antecedent : in Cells.Cell; Consequent : in Cells.Cell; Implication : out Cells.Cell) is begin Create_Binary_Op_Cell (VCG_Heap, Antecedent, SP_Symbols.implies, Consequent, Implication); end Create_Implies; procedure Simplify (VCG_Heap : in out Cells.Heap_Record; C : in out Cells.Cell) is Left, Right : Cells.Cell; begin -- In general, we don't clean up here as we have no idea what -- other things could be pointing to sub-parts of this DAG. if Cells.Get_Kind (VCG_Heap, C) = Cell_Storage.Op then Left := Cells.Get_A_Ptr (VCG_Heap, C); Right := Cells.Get_B_Ptr (VCG_Heap, C); case Cells.Get_Op_Symbol (VCG_Heap, C) is when SP_Symbols.RWand => -- True /\ X ==> X if Is_True (VCG_Heap, Left) then C := Right; elsif Is_True (VCG_Heap, Right) then C := Left; -- False /\ X ==> False elsif Is_False (VCG_Heap, Left) then C := Left; elsif Is_False (VCG_Heap, Right) then C := Right; end if; when SP_Symbols.implies => -- True -> X ==> X if Is_True (VCG_Heap, Left) then C := Right; -- False -> X ==> True elsif Is_False (VCG_Heap, Left) then Create_Bool (VCG_Heap, True, C); -- X -> True ==> True elsif Is_True (VCG_Heap, Right) then C := Right; -- X -> False ==> not X elsif Is_False (VCG_Heap, Right) then Create_Not (VCG_Heap, Left, C); end if; when SP_Symbols.is_equivalent_to => -- True <-> X ==> X if Is_True (VCG_Heap, Left) then C := Right; elsif Is_True (VCG_Heap, Right) then C := Left; -- False <-> X ==> not X elsif Is_False (VCG_Heap, Left) then Create_Not (VCG_Heap, Right, C); elsif Is_False (VCG_Heap, Right) then Create_Not (VCG_Heap, Left, C); end if; when others => null; end case; end if; end Simplify; ------------------------------------------------------------------------------ -- Utility ------------------------------------------------------------------------------ procedure Conjoin (VCG_Heap : in out Cells.Heap_Record; New_Term : in Cells.Cell; Conjunct : in out Cells.Cell) is New_Conjunct : Cells.Cell; begin if Cells.Is_Null_Cell (Conjunct) then Conjunct := New_Term; elsif Is_True (VCG_Heap, New_Term) then null; elsif Is_True (VCG_Heap, Conjunct) then Cells.Dispose_Of_Cell (VCG_Heap, Conjunct); Conjunct := New_Term; else Create_And (VCG_Heap, Conjunct, New_Term, New_Conjunct); Conjunct := New_Conjunct; end if; end Conjoin; ------------------------------------------------------------------------------ -- Types (general) ------------------------------------------------------------------------------ procedure Create_Type_Attribute (VCG_Heap : in out Cells.Heap_Record; The_Type : in Dictionary.Symbol; The_Attribute : in Type_Attribute; Result : out Cells.Cell) is Attribute_Cell : Cells.Cell; Type_Cell : Cells.Cell; begin -- We need a cell for the actual attribute. Cells.Create_Cell (VCG_Heap, Attribute_Cell); Cells.Set_Kind (VCG_Heap, Attribute_Cell, Cell_Storage.Attrib_Value); case The_Attribute is when Tick_First => Cells.Set_Lex_Str (VCG_Heap, Attribute_Cell, LexTokenManager.First_Token); when Tick_Last => Cells.Set_Lex_Str (VCG_Heap, Attribute_Cell, LexTokenManager.Last_Token); end case; -- We need a cell for the type symbol. Cells.Create_Cell (VCG_Heap, Type_Cell); if Dictionary.IsParameterConstraint (The_Type) then Cells.Set_Kind (VCG_Heap, Type_Cell, Cell_Storage.Unconstrained_Attribute_Prefix); else Cells.Set_Kind (VCG_Heap, Type_Cell, Cell_Storage.Fixed_Var); end if; Cells.Set_Symbol_Value (VCG_Heap, Type_Cell, The_Type); -- Assemble the result cell. Cells.Create_Cell (VCG_Heap, Result); Cells.Set_Kind (VCG_Heap, Result, Cell_Storage.Op); Cells.Set_Op_Symbol (VCG_Heap, Result, SP_Symbols.apostrophe); Cells.Set_A_Ptr (VCG_Heap, Result, Type_Cell); Cells.Set_B_Ptr (VCG_Heap, Result, Attribute_Cell); end Create_Type_Attribute; ------------------------------------------------------------------------------ -- Records ------------------------------------------------------------------------------ procedure Create_Record_Access (VCG_Heap : in out Cells.Heap_Record; The_Record : in Cells.Cell; The_Component : in Dictionary.Symbol; The_Field : out Cells.Cell) is begin Cells.Create_Cell (VCG_Heap, The_Field); Cells.Set_Kind (VCG_Heap, The_Field, Cell_Storage.Field_Access_Function); Cells.Set_Symbol_Value (VCG_Heap, The_Field, The_Component); Cells.Set_Lex_Str (VCG_Heap, The_Field, Dictionary.GetSimpleName (The_Component)); Cells.Set_B_Ptr (VCG_Heap, The_Field, The_Record); end Create_Record_Access; ------------------------------------------------------------------------------ -- Arrays ------------------------------------------------------------------------------ procedure Create_Array_Access (VCG_Heap : in out Cells.Heap_Record; The_Array : in Cells.Cell; The_Index : in Cells.Cell; The_Element : out Cells.Cell) is List_Cell : Cells.Cell; Comma_Cell : Cells.Cell; begin -- Create "[" The_Index "]" Cells.Create_Cell (VCG_Heap, List_Cell); Cells.Set_Kind (VCG_Heap, List_Cell, Cell_Storage.List_Function); Cells.Set_B_Ptr (VCG_Heap, List_Cell, The_Index); -- Create The_Array "," List_Cell Create_Binary_Op_Cell (VCG_Heap, The_Array, SP_Symbols.comma, List_Cell, Comma_Cell); -- Create the element function call Cells.Create_Cell (VCG_Heap, The_Element); Cells.Set_Kind (VCG_Heap, The_Element, Cell_Storage.Element_Function); Cells.Set_B_Ptr (VCG_Heap, The_Element, Comma_Cell); end Create_Array_Access; ------------------------------------------------------------------------------ -- Stashing things in cells that we probably shouldn't. ------------------------------------------------------------------------------ procedure Create_Scope_Cell (VCG_Heap : in out Cells.Heap_Record; The_Scope : in Dictionary.Scopes; The_Cell : out Cells.Cell) is begin Cells.Create_Cell (VCG_Heap, The_Cell); Cells.Set_Kind (VCG_Heap, The_Cell, Cell_Storage.Internal_Scope); -- It may be tempting to store this using set_symbol, but this -- shares the field for the natural... Cells.Set_Assoc_Var (VCG_Heap, The_Cell, Dictionary.GetRegion (The_Scope)); Cells.Set_Natural_Value (VCG_Heap, The_Cell, Dictionary.Visibility'Pos (Dictionary.Get_Visibility (The_Scope))); end Create_Scope_Cell; function Scope_Cell_Get_Scope (VCG_Heap : in Cells.Heap_Record; Scope_Cell : in Cells.Cell) return Dictionary.Scopes is N : Natural; begin SystemErrors.RT_Assert (C => Cells.Get_Kind (VCG_Heap, Scope_Cell) = Cell_Storage.Internal_Scope, Sys_Err => SystemErrors.VCG_Heap_Is_Corrupted, Msg => "Cells.Utility.Scope_Cell_Get_Scope: Expected an `Internal_Scope' cell."); pragma Warnings (Off); N := Cells.Get_Natural_Value (VCG_Heap, Scope_Cell); SystemErrors.RT_Assert (C => (N >= Dictionary.Visibility'Pos (Dictionary.Visibility'First) and N <= Dictionary.Visibility'Pos (Dictionary.Visibility'Last)), Sys_Err => SystemErrors.VCG_Heap_Is_Corrupted, Msg => "Cells.Utility.Scope_Cell_Get_Scope: Expected a sane value for visibility."); pragma Warnings (On); return Dictionary.Set_Visibility (The_Visibility => Dictionary.Visibility'Val (N), The_Unit => Cells.Get_Assoc_Var (VCG_Heap, Scope_Cell)); end Scope_Cell_Get_Scope; end Cells.Utility; spark-2012.0.deb/examiner/dictionary-add_subprogram_parameter.adb0000644000175000017500000001574711753202336024133 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Add_Subprogram_Parameter (Name : in LexTokenManager.Lex_String; The_Subprogram : in RawDict.Subprogram_Info_Ref; Type_Mark : in RawDict.Type_Info_Ref; Type_Reference : in Location; Mode : in Modes; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location) is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; The_Previous_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; -------------------------------------------------------------------------------- procedure WriteSubprogramParameterSpecification (The_Subprogram_Parameter : in RawDict.Subprogram_Parameter_Info_Ref; Specification : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dict, --# LexTokenManager.State, --# Specification, --# The_Subprogram_Parameter; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "specification of "); Write_Name (File => Dict.TemporaryFile, Item => RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter)); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Specification); Write_Line (Dict.TemporaryFile, " ;"); end if; end WriteSubprogramParameterSpecification; -------------------------------------------------------------------------------- procedure AddConstraintSymbolsIfNeeded (Type_Mark : in RawDict.Type_Info_Ref; The_Subprogram_Parameter : in RawDict.Subprogram_Parameter_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Specification, --# The_Subprogram_Parameter, --# Type_Mark; is Number_Of_Dimensions : Positive; procedure AddConstraintSymbol (The_Subprogram_Parameter : in RawDict.Subprogram_Parameter_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Specification : in Location; Dimension : in Positive) --# global in out Dict; --# derives Dict from *, --# Comp_Unit, --# Dimension, --# Specification, --# The_Subprogram_Parameter; is The_Parameter_Constraint : RawDict.Parameter_Constraint_Info_Ref; begin RawDict.Create_Parameter_Constraint (The_Subprogram_Parameter => The_Subprogram_Parameter, Dimension => Dimension, Comp_Unit => Comp_Unit, Loc => Specification.Start_Position, The_Parameter_Constraint => The_Parameter_Constraint); -- Now link new constraint to subprogram parameter - list ends up in dimension order RawDict.Set_Next_Parameter_Constraint (The_Parameter_Constraint => The_Parameter_Constraint, Next => RawDict.Get_Subprogram_Parameter_Index_Constraints (The_Subprogram_Parameter => The_Subprogram_Parameter)); RawDict.Set_Subprogram_Parameter_Index_Constraints (The_Subprogram_Parameter => The_Subprogram_Parameter, The_Index_Constraints => The_Parameter_Constraint); end AddConstraintSymbol; begin -- AddConstraintSymbolsIfNeeded if Is_Unconstrained_Array_Type (Type_Mark => Type_Mark) then Number_Of_Dimensions := Get_Number_Of_Dimensions (Type_Mark => Type_Mark); for I in reverse Positive range 1 .. Number_Of_Dimensions loop AddConstraintSymbol (The_Subprogram_Parameter => The_Subprogram_Parameter, Comp_Unit => Comp_Unit, Specification => Specification, Dimension => I); end loop; end if; end AddConstraintSymbolsIfNeeded; begin -- Add_Subprogram_Parameter RawDict.Create_Subprogram_Parameter (Name => Name, The_Subprogram => The_Subprogram, Type_Mark => Type_Mark, Mode => Mode, Comp_Unit => Comp_Unit, Loc => Specification.Start_Position, The_Subprogram_Parameter => The_Subprogram_Parameter); The_Previous_Subprogram_Parameter := RawDict.Get_Subprogram_Last_Parameter (The_Subprogram => The_Subprogram); if The_Previous_Subprogram_Parameter = RawDict.Null_Subprogram_Parameter_Info_Ref then RawDict.Set_Subprogram_First_Parameter (The_Subprogram => The_Subprogram, The_Subprogram_Parameter => The_Subprogram_Parameter); else RawDict.Set_Next_Subprogram_Parameter (The_Subprogram_Parameter => The_Previous_Subprogram_Parameter, Next => The_Subprogram_Parameter); end if; RawDict.Set_Subprogram_Last_Parameter (The_Subprogram => The_Subprogram, The_Subprogram_Parameter => The_Subprogram_Parameter); AddConstraintSymbolsIfNeeded (Type_Mark => Type_Mark, The_Subprogram_Parameter => The_Subprogram_Parameter, Comp_Unit => Comp_Unit, Specification => Specification); if Type_Mark /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (Type_Mark), RawDict.Get_Subprogram_Symbol (The_Subprogram), Type_Reference); end if; WriteSubprogramParameterSpecification (The_Subprogram_Parameter, Specification); end Add_Subprogram_Parameter; spark-2012.0.deb/examiner/sparklex.adb0000644000175000017500000021572611753202336016567 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Handling; with Ada.Characters.Latin_1; with CommandLineData; with ErrorHandler; with SystemErrors; use type CommandLineData.Language_Profiles; package body SparkLex is Curr_Line : Line_Context; End_Of_Text : constant Character := Ada.Characters.Latin_1.ETX; --# inherit Ada.Characters.Latin_1, --# CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# SparkLex, --# SPARK_IO, --# SystemErrors; package LineManager is procedure Clear_Line (Curr_Line : out SparkLex.Line_Context); --# derives Curr_Line from ; --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; procedure Copy_In_Line (Line : in SparkLex.Line_Context; Curr_Line : out SparkLex.Line_Context); --# derives Curr_Line from Line; --# post Line = Curr_Line; procedure Record_Curr_Pos (Curr_Line : in out SparkLex.Line_Context); --# derives Curr_Line from *; --# post Curr_Line.Last_Token_Pos = Curr_Line.Curr_Pos and --# E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos = Curr_Line~.Curr_Pos and --# Curr_Line.Lookahead_Pos = Curr_Line~.Lookahead_Pos; procedure Reset_Curr_Pos (Curr_Line : in out SparkLex.Line_Context); --# derives Curr_Line from *; --# post Curr_Line.Curr_Pos = Curr_Line.Last_Token_Pos and --# Curr_Line.Lookahead_Pos = Curr_Line.Last_Token_Pos and --# E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; procedure Accept_Char (Curr_Line : in out SparkLex.Line_Context); --# derives Curr_Line from *; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Curr_Pos >= Curr_Line~.Curr_Pos and --# (Curr_Line~.Curr_Pos <= E_Strings.Get_Length (Curr_Line~.Conts) -> --# (Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos)) and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; procedure Lookahead_Char (Curr_Line : in out SparkLex.Line_Context; Ch : out Character); --# derives Ch, --# Curr_Line from Curr_Line; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post Curr_Line.Lookahead_Pos >= Curr_Line~.Lookahead_Pos and --# (Curr_Line~.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line~.Conts) -> --# (Curr_Line.Lookahead_Pos > Curr_Line~.Lookahead_Pos)) and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos = Curr_Line~.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; procedure Accept_Lookahead (Curr_Line : in out SparkLex.Line_Context); --# derives Curr_Line from *; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post Curr_Line.Curr_Pos = Curr_Line.Lookahead_Pos and --# Curr_Line.Lookahead_Pos >= Curr_Line~.Lookahead_Pos and --# (Curr_Line~.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line~.Conts) -> --# (Curr_Line.Lookahead_Pos > Curr_Line~.Lookahead_Pos)) and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; procedure Reject_Lookahead (Curr_Line : in out SparkLex.Line_Context); --# derives Curr_Line from *; --# post Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos = Curr_Line~.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; function Separator (Ch : Character) return Boolean; procedure Next_Sig_Char (Prog_Text : in SPARK_IO.File_Type; Curr_Line : in out SparkLex.Line_Context); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Curr_Line, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Prog_Text, --# SPARK_IO.File_Sys; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Curr_Line.Last_Token_Pos = Curr_Line.Curr_Pos and Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos; procedure Set_Context (Curr_Line : in out SparkLex.Line_Context; New_Context : in SparkLex.Program_Context); --# derives Curr_Line from *, --# New_Context; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos = Curr_Line~.Curr_Pos and --# Curr_Line.Lookahead_Pos = Curr_Line~.Lookahead_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; procedure Set_Anno_Context (Curr_Line : in out SparkLex.Line_Context; New_Context : in SparkLex.Annotation_Context); --# derives Curr_Line from *, --# New_Context; pragma Unreferenced (Set_Anno_Context); -- not used at present end LineManager; No_Of_RW : constant Positive := 120; Min_RW_Length : constant Positive := 1; Max_RW_Length : constant Positive := 20; type Anno_Type is (Start_Anno, Proof_Anno, Hide_Anno, Other_Anno, No_Anno); subtype RW_Length is Positive range 1 .. Max_RW_Length; subtype RW_Index is Positive range 1 .. No_Of_RW; subtype Res_Word is String (RW_Length); type RWPair is record Word : Res_Word; Token : SP_Symbols.SP_Terminal; end record; type RWList is array (RW_Index) of RWPair; -- The table of reserved words MUST be listed in alphabetical order. RW : constant RWList := RWList' (RWPair'(Res_Word'("abort "), SP_Symbols.RWabort), RWPair'(Res_Word'("abs "), SP_Symbols.RWabs), RWPair'(Res_Word'("abstract "), SP_Symbols.RWabstract), RWPair'(Res_Word'("accept "), SP_Symbols.RWaccept), RWPair'(Res_Word'("access "), SP_Symbols.RWaccess), RWPair'(Res_Word'("aliased "), SP_Symbols.RWaliased), RWPair'(Res_Word'("all "), SP_Symbols.RWall), RWPair'(Res_Word'("and "), SP_Symbols.RWand), RWPair'(Res_Word'("are_interchangeable "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("array "), SP_Symbols.RWarray), RWPair'(Res_Word'("as "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("assert "), SP_Symbols.RWassert), RWPair'(Res_Word'("assume "), SP_Symbols.RWassume), RWPair'(Res_Word'("at "), SP_Symbols.RWat), RWPair'(Res_Word'("begin "), SP_Symbols.RWbegin), RWPair'(Res_Word'("body "), SP_Symbols.RWbody), RWPair'(Res_Word'("case "), SP_Symbols.RWcase), RWPair'(Res_Word'("check "), SP_Symbols.RWcheck), RWPair'(Res_Word'("const "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("constant "), SP_Symbols.RWconstant), RWPair'(Res_Word'("declare "), SP_Symbols.RWdeclare), RWPair'(Res_Word'("delay "), SP_Symbols.RWdelay), RWPair'(Res_Word'("delta "), SP_Symbols.RWdelta), RWPair'(Res_Word'("derives "), SP_Symbols.RWderives), RWPair'(Res_Word'("digits "), SP_Symbols.RWdigits), RWPair'(Res_Word'("div "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("do "), SP_Symbols.RWdo), RWPair'(Res_Word'("element "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("else "), SP_Symbols.RWelse), RWPair'(Res_Word'("elsif "), SP_Symbols.RWelsif), RWPair'(Res_Word'("end "), SP_Symbols.RWend), RWPair'(Res_Word'("entry "), SP_Symbols.RWentry), RWPair'(Res_Word'("exception "), SP_Symbols.RWexception), RWPair'(Res_Word'("exit "), SP_Symbols.RWexit), RWPair'(Res_Word'("finish "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("first "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("for "), SP_Symbols.RWfor), RWPair'(Res_Word'("for_all "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("for_some "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("from "), SP_Symbols.RWfrom), RWPair'(Res_Word'("function "), SP_Symbols.RWfunction), RWPair'(Res_Word'("generic "), SP_Symbols.RWgeneric), RWPair'(Res_Word'("global "), SP_Symbols.RWglobal), RWPair'(Res_Word'("goal "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("goto "), SP_Symbols.RWgoto), RWPair'(Res_Word'("hide "), SP_Symbols.RWhide), RWPair'(Res_Word'("if "), SP_Symbols.RWif), RWPair'(Res_Word'("in "), SP_Symbols.RWin), RWPair'(Res_Word'("inherit "), SP_Symbols.RWinherit), RWPair'(Res_Word'("initializes "), SP_Symbols.RWinitializes), RWPair'(Res_Word'("interface "), SP_Symbols.RWinterface), RWPair'(Res_Word'("is "), SP_Symbols.RWis), RWPair'(Res_Word'("last "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("limited "), SP_Symbols.RWlimited), RWPair'(Res_Word'("loop "), SP_Symbols.RWloop), RWPair'(Res_Word'("main_program "), SP_Symbols.RWmain_program), RWPair'(Res_Word'("may_be_deduced "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("may_be_deduced_from "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("may_be_replaced_by "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("mod "), SP_Symbols.RWmod), RWPair'(Res_Word'("new "), SP_Symbols.RWnew), RWPair'(Res_Word'("nonfirst "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("nonlast "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("not "), SP_Symbols.RWnot), RWPair'(Res_Word'("not_in "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("null "), SP_Symbols.RWnull), RWPair'(Res_Word'("odd "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("of "), SP_Symbols.RWof), RWPair'(Res_Word'("or "), SP_Symbols.RWor), RWPair'(Res_Word'("others "), SP_Symbols.RWothers), RWPair'(Res_Word'("out "), SP_Symbols.RWout), RWPair'(Res_Word'("overriding "), SP_Symbols.RWoverriding), RWPair'(Res_Word'("own "), SP_Symbols.RWown), RWPair'(Res_Word'("package "), SP_Symbols.RWpackage), RWPair'(Res_Word'("pending "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("post "), SP_Symbols.RWpost), RWPair'(Res_Word'("pragma "), SP_Symbols.RWpragma), RWPair'(Res_Word'("pre "), SP_Symbols.RWpre), RWPair'(Res_Word'("pred "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("private "), SP_Symbols.RWprivate), RWPair'(Res_Word'("procedure "), SP_Symbols.RWprocedure), RWPair'(Res_Word'("proof "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("protected "), SP_Symbols.RWprotected), RWPair'(Res_Word'("raise "), SP_Symbols.RWraise), RWPair'(Res_Word'("range "), SP_Symbols.RWrange), RWPair'(Res_Word'("real "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("record "), SP_Symbols.RWrecord), RWPair'(Res_Word'("rem "), SP_Symbols.RWrem), RWPair'(Res_Word'("renames "), SP_Symbols.RWrenames), RWPair'(Res_Word'("requeue "), SP_Symbols.RWrequeue), RWPair'(Res_Word'("requires "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("return "), SP_Symbols.RWreturn), RWPair'(Res_Word'("reverse "), SP_Symbols.RWreverse), RWPair'(Res_Word'("save "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("select "), SP_Symbols.RWselect), RWPair'(Res_Word'("separate "), SP_Symbols.RWseparate), RWPair'(Res_Word'("sequence "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("set "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("some "), SP_Symbols.RWsome), RWPair'(Res_Word'("sqr "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("start "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("strict_subset_of "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("subset_of "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("subtype "), SP_Symbols.RWsubtype), RWPair'(Res_Word'("succ "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("synchronized "), SP_Symbols.RWsynchronized), RWPair'(Res_Word'("tagged "), SP_Symbols.RWtagged), RWPair'(Res_Word'("task "), SP_Symbols.RWtask), RWPair'(Res_Word'("terminate "), SP_Symbols.RWterminate), RWPair'(Res_Word'("then "), SP_Symbols.RWthen), RWPair'(Res_Word'("type "), SP_Symbols.RWtype), RWPair'(Res_Word'("until "), SP_Symbols.RWuntil), RWPair'(Res_Word'("update "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("use "), SP_Symbols.RWuse), RWPair'(Res_Word'("var "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("when "), SP_Symbols.RWwhen), RWPair'(Res_Word'("where "), SP_Symbols.predefined_FDL_identifier), RWPair'(Res_Word'("while "), SP_Symbols.RWwhile), RWPair'(Res_Word'("with "), SP_Symbols.RWwith), RWPair'(Res_Word'("xor "), SP_Symbols.RWxor)); No_Of_FDL_RW : constant Positive := 34; subtype FDL_RW_Index is Positive range 1 .. No_Of_FDL_RW; type FDL_RWList is array (FDL_RW_Index) of Res_Word; FDL_RW : constant FDL_RWList := FDL_RWList' (Res_Word'("are_interchangeable "), Res_Word'("as "), Res_Word'("const "), Res_Word'("div "), Res_Word'("element "), Res_Word'("finish "), Res_Word'("first "), Res_Word'("for_all "), Res_Word'("for_some "), Res_Word'("goal "), Res_Word'("last "), Res_Word'("may_be_deduced "), Res_Word'("may_be_deduced_from "), Res_Word'("may_be_replaced_by "), Res_Word'("nonfirst "), Res_Word'("nonlast "), Res_Word'("not_in "), Res_Word'("odd "), Res_Word'("pending "), Res_Word'("pred "), Res_Word'("proof "), Res_Word'("real "), Res_Word'("requires "), Res_Word'("save "), Res_Word'("sequence "), Res_Word'("set "), Res_Word'("sqr "), Res_Word'("start "), Res_Word'("strict_subset_of "), Res_Word'("subset_of "), Res_Word'("succ "), Res_Word'("update "), Res_Word'("var "), Res_Word'("where ")); subtype Offset is Integer range 1 .. 4; type Possible_Prefixes is (Field_Prefix, Update_Prefix); type Prefix_Strings is array (Offset) of Character; type Prefix_Tables is array (Possible_Prefixes) of Prefix_Strings; Prefix_Table : constant Prefix_Tables := Prefix_Tables'(Field_Prefix => Prefix_Strings'('f', 'l', 'd', '_'), Update_Prefix => Prefix_Strings'('u', 'p', 'f', '_')); package body LineManager is separate; --++++ The following functions are defined as per Ada LRM Chapter 2. ++++-- ------------------------------------------------------------------------------- -- Note a "Special Character" here to the SPARK Lexer is NOT the -- same as Ada95 LRM's Ada.Characters.Handling.Is_Special function Special_Character (Ch : Character) return Boolean is Result : Boolean; begin case Ch is when '"' | '#' | '&' | ''' | '(' | ')' | '*' | '+' | ',' | '-' | '.' | '/' | ':' | ';' | '<' | '=' | '>' | '_' | '|' => Result := True; when others => Result := False; end case; return Result; end Special_Character; function Format_Effector (Ch : Character) return Boolean is Result : Boolean; begin case Ch is when Ada.Characters.Latin_1.HT | Ada.Characters.Latin_1.VT | Ada.Characters.Latin_1.CR | Ada.Characters.Latin_1.LF | Ada.Characters.Latin_1.FF => Result := True; when others => Result := False; end case; return Result; end Format_Effector; function Other_Special_Character (Ch : Character) return Boolean is Result : Boolean; begin case Ch is when '!' | '$' | '%' | '?' | '@' | '[' | '\' | ']' | '^' | '`' | '{' | '}' | '~' => Result := True; when others => Result := False; end case; return Result; end Other_Special_Character; -- Note a "Basic Character" here to the SPARK Lexer is NOT the -- same as Ada95 LRM's Ada.Characters.Handling.Is_Basic function Basic_Graphic_Character (Ch : Character) return Boolean is begin return Ada.Characters.Handling.Is_Upper (Ch) or else Ada.Characters.Handling.Is_Digit (Ch) or else Special_Character (Ch => Ch) or else Ch = ' '; end Basic_Graphic_Character; -- Note a "Graphic Character" here to the SPARK Lexer is NOT the -- same as Ada95 LRM's Ada.Characters.Handling.Is_Graphic function Graphic_Character (Ch : Character) return Boolean is begin return Basic_Graphic_Character (Ch => Ch) or else Ada.Characters.Handling.Is_Lower (Ch) or else Other_Special_Character (Ch => Ch); end Graphic_Character; -- Same as Graphic_Character but also allows all extended -- ASCII characters (useful for string constants). function Extended_ASCII (Ch : Character) return Boolean is begin return Graphic_Character (Ch) or Character'Pos (Ch) >= 128; end Extended_ASCII; function Simple_Delimiter (Ch : Character) return Boolean is Result : Boolean; begin case Ch is when '&' | ''' | '(' | ')' | '*' | '+' | ',' | '-' | '.' | '/' | ':' | ';' | '<' | '=' | '>' | '|' | '[' | ']' | '@' | '~' | '%' => Result := True; when others => Result := False; end case; return Result; end Simple_Delimiter; function Letter_Or_Digit (Ch : Character) return Boolean --# return Ada.Characters.Handling.Is_Letter (Ch) or Ada.Characters.Handling.Is_Digit (Ch); is begin return Ada.Characters.Handling.Is_Letter (Item => Ch) or else Ada.Characters.Handling.Is_Digit (Item => Ch); end Letter_Or_Digit; ------------------------------------------------------------------------------- procedure Clear_Line_Context --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is begin LineManager.Clear_Line (Curr_Line => Curr_Line); end Clear_Line_Context; procedure Store_Line_Context (File_Line : out Line_Context) --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (File_Line.Conts) < Natural'Last and --# File_Line.Curr_Pos <= E_Strings.Get_Length (File_Line.Conts) + 1 and --# File_Line.Lookahead_Pos <= E_Strings.Get_Length (File_Line.Conts) + 1 and --# File_Line.Last_Token_Pos <= E_Strings.Get_Length (File_Line.Conts) + 1; is begin File_Line := Line_Context' (Context => In_Ada, Anno_Context => Start_Annotation, Line_No => Curr_Line.Line_No, Last_Token_Pos => Curr_Line.Last_Token_Pos, Curr_Pos => Curr_Line.Last_Token_Pos, Lookahead_Pos => Curr_Line.Lookahead_Pos, Conts => Curr_Line.Conts); end Store_Line_Context; procedure Restore_Line_Context (File_Line : in Line_Context) --# pre E_Strings.Get_Length (File_Line.Conts) < Natural'Last and --# File_Line.Curr_Pos <= E_Strings.Get_Length (File_Line.Conts) + 1 and --# File_Line.Lookahead_Pos <= E_Strings.Get_Length (File_Line.Conts) + 1 and --# File_Line.Last_Token_Pos <= E_Strings.Get_Length (File_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is begin LineManager.Copy_In_Line (Line => File_Line, Curr_Line => Curr_Line); end Restore_Line_Context; --------------------------------------------------------------------------- procedure Check_Reserved (Curr_Line : in Line_Context; Start_Pos, End_Pos : in E_Strings.Positions; Look_Ahead : in Boolean; Token : out SP_Symbols.SP_Terminal) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# End_Pos, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Look_Ahead, --# SPARK_IO.File_Sys, --# Start_Pos & --# Token from CommandLineData.Content, --# Curr_Line, --# End_Pos, --# Start_Pos; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Start_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# End_Pos <= E_Strings.Get_Length (Curr_Line.Conts) and --# Start_Pos <= End_Pos; is type Cmp_Res is (CEQ, CLT, CGT); Leng : Positive; Ix : RW_Index; IL, IU : Positive; Result : Cmp_Res; L_Token : SP_Symbols.SP_Terminal; ----------------------------------------- function Comp_RW (Curr_Line : Line_Context; R_Word : Res_Word; Start_Pos, End_Pos : E_Strings.Positions) return Cmp_Res --# pre Start_Pos <= End_Pos; is Ch1, Ch2 : Character; RW_X : RW_Length; Comp_Result : Cmp_Res; begin RW_X := 1; loop --# assert RW_X + Start_Pos - 1 <= End_Pos and --# RW_X <= Max_RW_Length; Ch1 := R_Word (RW_X); -- Reserved words in lower case. Ch2 := Ada.Characters.Handling.To_Lower (Item => E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => RW_X + (Start_Pos - 1))); if Ch1 < Ch2 then Comp_Result := CLT; elsif Ch1 > Ch2 then Comp_Result := CGT; elsif RW_X + (Start_Pos - 1) = End_Pos then if RW_X = Max_RW_Length or else R_Word (RW_X + 1) = ' ' then Comp_Result := CEQ; else Comp_Result := CGT; end if; else Comp_Result := CEQ; end if; exit when Comp_Result /= CEQ or else RW_X + (Start_Pos - 1) >= End_Pos or else RW_X = Max_RW_Length; RW_X := RW_X + 1; end loop; return Comp_Result; end Comp_RW; function Check_FLD_Or_UPF (Curr_Line : Line_Context; Leng : Positive; Start_Pos : E_Strings.Positions) return SP_Symbols.SP_Terminal --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Start_Pos <= E_Strings.Get_Length (Curr_Line.Conts); is Prefix_Sort : Possible_Prefixes; Result : SP_Symbols.SP_Terminal; ---------------------------------------------- function Check_Rest_Of_Prefix (Curr_Line : Line_Context; Prefix_Sort : Possible_Prefixes; Start_Pos : E_Strings.Positions) return SP_Symbols.SP_Terminal --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Start_Pos <= E_Strings.Get_Length (Curr_Line.Conts); is Ptr : Offset; Result : SP_Symbols.SP_Terminal; begin Ptr := 1; loop --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Start_Pos + Ptr <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Ptr in Offset; if Start_Pos + Ptr > E_Strings.Get_Length (E_Str => Curr_Line.Conts) or else Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Start_Pos + (Ptr - 1))) /= Prefix_Table (Prefix_Sort) (Ptr) then Result := SP_Symbols.identifier; exit; end if; if Ptr = Offset'Last then Result := SP_Symbols.predefined_FDL_identifier; exit; end if; Ptr := Ptr + 1; end loop; return Result; end Check_Rest_Of_Prefix; begin -- Check_FLD_Or_UPF Result := SP_Symbols.identifier; if Leng >= 5 then -- minimum length a valid fld_ or upf_ could be if Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Start_Pos)) = 'f' then Prefix_Sort := Field_Prefix; Result := Check_Rest_Of_Prefix (Curr_Line => Curr_Line, Prefix_Sort => Prefix_Sort, Start_Pos => Start_Pos); elsif Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Start_Pos)) = 'u' then Prefix_Sort := Update_Prefix; Result := Check_Rest_Of_Prefix (Curr_Line => Curr_Line, Prefix_Sort => Prefix_Sort, Start_Pos => Start_Pos); else Result := SP_Symbols.identifier; end if; end if; return Result; end Check_FLD_Or_UPF; ----------------------------------------------------------- function Convert_FDL (Token : SP_Symbols.SP_Terminal) return SP_Symbols.SP_Terminal --# global in CommandLineData.Content; is Result : SP_Symbols.SP_Terminal; begin if Token = SP_Symbols.predefined_FDL_identifier and then not CommandLineData.Content.FDL_Reserved then Result := SP_Symbols.identifier; else Result := Token; end if; return Result; end Convert_FDL; ----------------------------------------------------------- function Convert_Reserved (Token : SP_Symbols.SP_Terminal) return SP_Symbols.SP_Terminal --# global in CommandLineData.Content; is Result : SP_Symbols.SP_Terminal; begin if Token = SP_Symbols.RWsome and then not CommandLineData.Content.FDL_Reserved then Result := SP_Symbols.identifier; else Result := Token; end if; return Result; end Convert_Reserved; ----------------------------------------------------------- procedure Convert95_And_2005_Reserved (Curr_Line : in Line_Context; Token : in SP_Symbols.SP_Terminal; Start_Pos : in E_Strings.Positions; Look_Ahead : in Boolean; Converted_Token : out SP_Symbols.SP_Terminal) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Converted_Token from CommandLineData.Content, --# Token & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Look_Ahead, --# SPARK_IO.File_Sys, --# Start_Pos, --# Token; is Result : SP_Symbols.SP_Terminal; -------------------------------------------------------- -- Returns true if Token is a new 95 reserved word. function SPARK95_Reserved (Token : SP_Symbols.SP_Terminal) return Boolean --# global in CommandLineData.Content; is begin -- In SPARK83 mode, "aliased", "protected", "requeue", "tagged", -- "until" are all identifiers, not reserved words. Additionally, -- "abstract" is an identifier UNLESS -fdl is active, in which case -- it remains a reserved word return Token = SP_Symbols.RWaliased or else Token = SP_Symbols.RWprotected or else Token = SP_Symbols.RWrequeue or else Token = SP_Symbols.RWtagged or else Token = SP_Symbols.RWuntil or else (Token = SP_Symbols.RWabstract and then not CommandLineData.Content.FDL_Reserved); end SPARK95_Reserved; -------------------------------------------------------- -- Returns true if Token is a new 2005 reserved word. function SPARK2005_Reserved (Token : SP_Symbols.SP_Terminal) return Boolean is begin return Token = SP_Symbols.RWinterface or else Token = SP_Symbols.RWoverriding or else Token = SP_Symbols.RWsynchronized; end SPARK2005_Reserved; begin -- Convert95_And_2005_Reserved case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => if SPARK95_Reserved (Token => Token) or else SPARK2005_Reserved (Token => Token) then Result := SP_Symbols.identifier; else Result := Token; end if; when CommandLineData.SPARK95 => if SPARK2005_Reserved (Token => Token) then Result := SP_Symbols.identifier; if not Look_Ahead then -- Raise a warning if the identifier is an Ada2005 -- reserve word. To remove duplicate warning messages, -- lexical analyser only raises the warning if it is -- not looking ahead. ErrorHandler.Semantic_Warning (Err_Num => 7, Position => LexTokenManager.Token_Position'(Start_Line_No => Curr_Line.Line_No, Start_Pos => Start_Pos), Id_Str => LexTokenManager.Null_String); end if; else Result := Token; end if; when CommandLineData.SPARK2005_Profiles => Result := Token; end case; Converted_Token := Result; end Convert95_And_2005_Reserved; begin -- Check_Reserved Leng := (End_Pos - Start_Pos) + 1; if Leng <= Max_RW_Length then IL := RW_Index'First; IU := RW_Index'Last; loop Ix := IL; Result := Comp_RW (Curr_Line => Curr_Line, R_Word => RW (Ix).Word, Start_Pos => Start_Pos, End_Pos => End_Pos); exit when Result = CGT or else Result = CEQ; Ix := IU; Result := Comp_RW (Curr_Line => Curr_Line, R_Word => RW (Ix).Word, Start_Pos => Start_Pos, End_Pos => End_Pos); exit when Result = CLT or else Result = CEQ; Ix := (IL + IU) / 2; Result := Comp_RW (Curr_Line => Curr_Line, R_Word => RW (Ix).Word, Start_Pos => Start_Pos, End_Pos => End_Pos); case Result is when CEQ => null; when CGT => if IL < RW_Index'Last then IL := IL + 1; end if; IU := Ix - 1; when CLT => IL := Ix + 1; if IU > RW_Index'First then IU := IU - 1; end if; end case; exit when Result = CEQ or else IL > IU; --# assert IL <= IU and IL in RW_Index and IU in RW_Index and Leng >= Min_RW_Length and --# Start_Pos <= End_Pos and Ix in RW_Index and Leng <= End_Pos; end loop; if Result = CEQ then L_Token := RW (Ix).Token; else L_Token := Check_FLD_Or_UPF (Curr_Line => Curr_Line, Leng => Leng, Start_Pos => Start_Pos); end if; else L_Token := Check_FLD_Or_UPF (Curr_Line => Curr_Line, Leng => Leng, Start_Pos => Start_Pos); end if; Convert95_And_2005_Reserved (Curr_Line => Curr_Line, Token => Convert_Reserved (Token => Convert_FDL (Token => L_Token)), Start_Pos => Start_Pos, Look_Ahead => Look_Ahead, Converted_Token => Token); end Check_Reserved; function Check_FDL_RW (Ex_Str : E_Strings.T) return Boolean is type Cmp_Res is (CEQ, CLT, CGT); Is_FDL_RW : Boolean := True; Ix : FDL_RW_Index; IL, IU : Positive; Result : Cmp_Res; function Comp_FDL_RW (R_Word : Res_Word; Ex_Str : E_Strings.T) return Cmp_Res --# pre E_Strings.Get_Length (Ex_Str) >= 1; is Ch1, Ch2 : Character; RW_X : RW_Length; Comp_Result : Cmp_Res; begin RW_X := 1; loop --# assert RW_X <= E_Strings.Get_Length (Ex_Str) and --# RW_X <= Max_RW_Length; Ch1 := R_Word (RW_X); -- Reserved words in lower case. Ch2 := Ada.Characters.Handling.To_Lower (Item => E_Strings.Get_Element (E_Str => Ex_Str, Pos => RW_X)); if Ch1 < Ch2 then Comp_Result := CLT; elsif Ch1 > Ch2 then Comp_Result := CGT; elsif RW_X = E_Strings.Get_Length (E_Str => Ex_Str) then if RW_X = Max_RW_Length or else R_Word (RW_X + 1) = ' ' then Comp_Result := CEQ; else Comp_Result := CGT; end if; else Comp_Result := CEQ; end if; exit when Comp_Result /= CEQ or else RW_X = E_Strings.Get_Length (E_Str => Ex_Str) or else RW_X = Max_RW_Length; RW_X := RW_X + 1; end loop; return Comp_Result; end Comp_FDL_RW; begin -- Check_FDL_RW if E_Strings.Get_Length (E_Str => Ex_Str) >= Min_RW_Length and then E_Strings.Get_Length (E_Str => Ex_Str) <= Max_RW_Length then IL := FDL_RW_Index'First; IU := FDL_RW_Index'Last; loop Result := Comp_FDL_RW (R_Word => FDL_RW (IL), Ex_Str => Ex_Str); exit when Result = CGT or else Result = CEQ; Result := Comp_FDL_RW (R_Word => FDL_RW (IU), Ex_Str => Ex_Str); exit when Result = CLT or else Result = CEQ; Ix := (IL + IU) / 2; Result := Comp_FDL_RW (R_Word => FDL_RW (Ix), Ex_Str => Ex_Str); case Result is when CEQ => null; when CGT => if IL < FDL_RW_Index'Last then IL := IL + 1; end if; IU := Ix - 1; when CLT => IL := Ix + 1; if IU > FDL_RW_Index'First then IU := IU - 1; end if; end case; exit when Result = CEQ or else IL > IU; --# assert IL <= IU and --# IL in FDL_RW_Index and --# IU in FDL_RW_Index and --# E_Strings.Get_Length (Ex_Str) >= Min_RW_Length; end loop; if Result /= CEQ then Is_FDL_RW := False; end if; else Is_FDL_RW := False; end if; return Is_FDL_RW; end Check_FDL_RW; --------------------------------------------------------------------------- -- Returns the type of the annotation starting at the lookahead position in -- the line buffer. procedure Check_Anno_Type (Curr_Line : in Line_Context; Unfinished_Anno : in Boolean; Anno_Kind : out Anno_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Anno_Kind from CommandLineData.Content, --# Curr_Line, --# Unfinished_Anno & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is type Element_Type is (End_Of_Line_Or_Text, Non_Character, RW_Or_Ident); Unused, Start_Posn, End_Posn : E_Strings.Positions; Anno_Token : SP_Symbols.SP_Terminal; Next_Element : Element_Type; procedure Check_Next_Element (Curr_Line : in Line_Context; Start_Posn : in E_Strings.Positions; End_Posn : out E_Strings.Positions; Next_Element : out Element_Type; Symbol : out SP_Symbols.SP_Terminal) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives End_Posn, --# Next_Element from Curr_Line, --# Start_Posn & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Start_Posn & --# Symbol from CommandLineData.Content, --# Curr_Line, --# Start_Posn; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Start_Posn <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post End_Posn <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is Ch : Character := ' '; Local_End_Posn, Local_Start_Posn : E_Strings.Positions; begin Local_Start_Posn := Start_Posn; loop --# assert Local_Start_Posn <= E_Strings.Get_Length (Curr_Line.Conts) + 1; exit when Local_Start_Posn > E_Strings.Get_Length (E_Str => Curr_Line.Conts); Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Local_Start_Posn); exit when Ch = End_Of_Text or else not LineManager.Separator (Ch => Ch); Local_Start_Posn := Local_Start_Posn + 1; end loop; Local_End_Posn := Local_Start_Posn; if Local_Start_Posn <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) then if Ch = End_Of_Text then Next_Element := End_Of_Line_Or_Text; Symbol := SP_Symbols.illegal_id; -- not used elsif Letter_Or_Digit (Ch => Ch) then if Ada.Characters.Handling.Is_Letter (Item => Ch) then Next_Element := RW_Or_Ident; -- Scan the next identifier, but then see if it's a reserved word. while Local_End_Posn <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) and then (Letter_Or_Digit (Ch => E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Local_End_Posn)) or else E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Local_End_Posn) = '_') loop --# assert Local_End_Posn <= E_Strings.Get_Length (Curr_Line.Conts) and --# Local_Start_Posn <= Local_End_Posn; Local_End_Posn := Local_End_Posn + 1; end loop; Local_End_Posn := Local_End_Posn - 1; Check_Reserved (Curr_Line => Curr_Line, Start_Pos => Local_Start_Posn, End_Pos => Local_End_Posn, Look_Ahead => True, Token => Symbol); else Next_Element := Non_Character; Symbol := SP_Symbols.illegal_id; -- not used end if; else Next_Element := Non_Character; Symbol := SP_Symbols.illegal_id; -- not used end if; else Next_Element := End_Of_Line_Or_Text; Symbol := SP_Symbols.illegal_id; -- not used end if; End_Posn := Local_End_Posn; end Check_Next_Element; begin -- Check_Anno_Type if Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) then Start_Posn := Curr_Line.Lookahead_Pos + 1; Check_Next_Element (Curr_Line => Curr_Line, Start_Posn => Start_Posn, End_Posn => End_Posn, Next_Element => Next_Element, Symbol => Anno_Token); if Next_Element = End_Of_Line_Or_Text then Anno_Kind := No_Anno; elsif Next_Element = Non_Character then Anno_Kind := Other_Anno; else case Anno_Token is when SP_Symbols.RWmain_program | SP_Symbols.RWinherit | SP_Symbols.RWown | SP_Symbols.RWinitializes | SP_Symbols.RWglobal | SP_Symbols.RWderives | SP_Symbols.RWdeclare | SP_Symbols.RWpre | SP_Symbols.RWpost => Anno_Kind := Start_Anno; when SP_Symbols.RWreturn => if (Unfinished_Anno) then -- Still in an annotation so it's not the start of a new one Anno_Kind := Other_Anno; else -- New annotation Anno_Kind := Start_Anno; end if; when SP_Symbols.RWassert | SP_Symbols.RWassume | SP_Symbols.RWcheck | SP_Symbols.RWtype | SP_Symbols.RWsubtype | SP_Symbols.RWfunction | SP_Symbols.RWaccept | SP_Symbols.RWend => Anno_Kind := Proof_Anno; when SP_Symbols.RWfor => -- do a second look ahead to check for "some" or "all" if End_Posn <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) then Start_Posn := End_Posn + 1; --# accept F, 10, Unused, "Unused not referenced here"; Check_Next_Element (Curr_Line => Curr_Line, Start_Posn => Start_Posn, End_Posn => Unused, Next_Element => Next_Element, Symbol => Anno_Token); --# end accept; if Next_Element = RW_Or_Ident and then (Anno_Token = SP_Symbols.RWsome or else Anno_Token = SP_Symbols.RWall) then Anno_Kind := Other_Anno; else Anno_Kind := Proof_Anno; end if; else Anno_Kind := Proof_Anno; end if; when SP_Symbols.RWhide => Anno_Kind := Hide_Anno; when others => -- When a proof constant declaration occurs -- interpreting --# as proof context is -- handled by Hyph_Intro. Anno_Kind := Other_Anno; end case; end if; else Anno_Kind := No_Anno; end if; --# accept F, 33, Unused, "Unused not referenced here"; end Check_Anno_Type; -- Main implementation of the lexical analyser, common to both -- the Examiner and SPARKFormat. procedure Lex (Prog_Text : in SPARK_IO.File_Type; Allow_Dollar : in Boolean; Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal; Lex_Val : out LexTokenManager.Lex_Value; Punct_Token : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Curr_Line, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# Punct_Token, --# SPARK_IO.File_Sys, --# Token from Allow_Dollar, --# CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Prog_Text, --# SPARK_IO.File_Sys; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is separate; procedure Examiner_Lex (Prog_Text : in SPARK_IO.File_Type; Token : out SP_Symbols.SP_Terminal; Lex_Val : out LexTokenManager.Lex_Value; Punct_Token : out Boolean) --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is begin Lex (Prog_Text => Prog_Text, Allow_Dollar => False, Curr_Line => Curr_Line, Token => Token, Lex_Val => Lex_Val, Punct_Token => Punct_Token); end Examiner_Lex; procedure SPARK_Format_Lex (Prog_Text : in SPARK_IO.File_Type; Token : out SP_Symbols.SP_Terminal; Lex_Val : out LexTokenManager.Lex_Value; Punct_Token : out Boolean) --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Last_Token_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is begin -- For SPARKFormat, we allow identifiers to begin with a '$' -- character so that SPARKFormat doesn't mess up annotations -- containing GNATPREP symbols Lex (Prog_Text => Prog_Text, Allow_Dollar => True, Curr_Line => Curr_Line, Token => Token, Lex_Val => Lex_Val, Punct_Token => Punct_Token); end SPARK_Format_Lex; function Similar_Tokens (Token1, Token2 : SP_Symbols.SP_Terminal) return Boolean is type Token_Type is ( Delimiter, Reserved_Word, Id, Number, Chars, Other_Token); Token_Type1, Token_Type2 : Token_Type; Result : Boolean; function Type_Of_Token (Token : SP_Symbols.SP_Terminal) return Token_Type is Token_Kind : Token_Type; begin case Token is when SP_Symbols.ampersand | SP_Symbols.apostrophe | SP_Symbols.left_paren | SP_Symbols.right_paren | SP_Symbols.multiply | SP_Symbols.plus | SP_Symbols.comma | SP_Symbols.minus | SP_Symbols.point | SP_Symbols.divide | SP_Symbols.colon | SP_Symbols.semicolon | SP_Symbols.less_than | SP_Symbols.equals | SP_Symbols.greater_than | SP_Symbols.vertical_bar | SP_Symbols.arrow | SP_Symbols.double_dot | SP_Symbols.double_star | SP_Symbols.becomes | SP_Symbols.not_equal | SP_Symbols.greater_or_equal | SP_Symbols.less_or_equal | SP_Symbols.left_label_paren | SP_Symbols.right_label_paren | SP_Symbols.box | SP_Symbols.implies | SP_Symbols.is_equivalent_to | SP_Symbols.tilde | SP_Symbols.square_open | SP_Symbols.square_close | SP_Symbols.percent => Token_Kind := Delimiter; when SP_Symbols.integer_number | SP_Symbols.real_number | SP_Symbols.based_integer | SP_Symbols.based_real => Token_Kind := Number; when SP_Symbols.character_literal | SP_Symbols.string_literal => Token_Kind := Chars; when SP_Symbols.identifier => Token_Kind := Id; when SP_Symbols.RWabort | SP_Symbols.RWabs | SP_Symbols.RWabstract | SP_Symbols.RWaccept | SP_Symbols.RWaccess | SP_Symbols.RWaliased | SP_Symbols.RWall | SP_Symbols.RWand | SP_Symbols.RWandthen | SP_Symbols.RWany | SP_Symbols.RWarray | SP_Symbols.RWassert | SP_Symbols.RWassume | SP_Symbols.RWat | SP_Symbols.RWbegin | SP_Symbols.RWbody | SP_Symbols.RWcase | SP_Symbols.RWcheck | SP_Symbols.RWconstant | SP_Symbols.RWdeclare | SP_Symbols.RWdelay | SP_Symbols.RWdelta | SP_Symbols.RWderives | SP_Symbols.RWdigits | SP_Symbols.RWdo | SP_Symbols.RWelse | SP_Symbols.RWelsif | SP_Symbols.RWend | SP_Symbols.RWentry | SP_Symbols.RWexception | SP_Symbols.RWexit | SP_Symbols.RWfor | SP_Symbols.RWforall | SP_Symbols.RWforsome | SP_Symbols.RWfrom | SP_Symbols.RWfunction | SP_Symbols.RWgeneric | SP_Symbols.RWglobal | SP_Symbols.RWgoto | SP_Symbols.RWhide | SP_Symbols.RWif | SP_Symbols.RWin | SP_Symbols.RWinherit | SP_Symbols.RWinitializes | SP_Symbols.RWis | SP_Symbols.RWlimited | SP_Symbols.RWloop | SP_Symbols.RWmain_program | SP_Symbols.RWmod | SP_Symbols.RWnew | SP_Symbols.RWnot | SP_Symbols.RWnotin | SP_Symbols.RWnull | SP_Symbols.RWof | SP_Symbols.RWor | SP_Symbols.RWorelse | SP_Symbols.RWothers | SP_Symbols.RWout | SP_Symbols.RWown | SP_Symbols.RWpackage | SP_Symbols.RWpost | SP_Symbols.RWpragma | SP_Symbols.RWpre | SP_Symbols.RWprivate | SP_Symbols.RWprotected | SP_Symbols.RWprocedure | SP_Symbols.RWraise | SP_Symbols.RWrange | SP_Symbols.RWrecord | SP_Symbols.RWrequeue | SP_Symbols.RWrem | SP_Symbols.RWrenames | SP_Symbols.RWreturn | SP_Symbols.RWreverse | SP_Symbols.RWselect | SP_Symbols.RWseparate | SP_Symbols.RWsome | SP_Symbols.RWsubtype | SP_Symbols.RWtagged | SP_Symbols.RWtask | SP_Symbols.RWterminate | SP_Symbols.RWthen | SP_Symbols.RWtype | SP_Symbols.RWuntil | SP_Symbols.RWuse | SP_Symbols.RWwhen | SP_Symbols.RWwhile | SP_Symbols.RWwith | SP_Symbols.RWxor | -- Ada2005 reserved words SP_Symbols.RWinterface | SP_Symbols.RWoverriding | SP_Symbols.RWsynchronized => Token_Kind := Reserved_Word; when others => Token_Kind := Other_Token; end case; return Token_Kind; end Type_Of_Token; begin Token_Type1 := Type_Of_Token (Token => Token1); Token_Type2 := Type_Of_Token (Token => Token2); case Token_Type1 is when Delimiter => case Token_Type2 is when Delimiter => Result := True; when others => Result := False; end case; when Number => case Token_Type2 is when Number => Result := True; when others => Result := False; end case; when Chars => case Token_Type2 is when Chars => Result := True; when others => Result := False; end case; when Id => case Token_Type2 is when Id => Result := True; when Reserved_Word => Result := True; when others => Result := False; end case; when Reserved_Word => case Token_Type2 is when Id => Result := True; when Reserved_Word => Result := True; when others => Result := False; end case; when others => Result := False; end case; return Result; end Similar_Tokens; end SparkLex; spark-2012.0.deb/examiner/sem-unknown_type_record.adb0000644000175000017500000000367711753202336021616 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function Unknown_Type_Record return Exp_Record is begin return Exp_Record' (Type_Symbol => Dictionary.GetUnknownTypeMark, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => True, Has_Operators => False, Is_Static => True, Is_Constant => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Maths.NoValue, Range_RHS => Maths.NoValue); end Unknown_Type_Record; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-warningwithoutposition.adb0000644000175000017500000004747211753202336030517 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure WarningWithoutPosition (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is Unit_Typ : SP_Symbols.SP_Symbol; procedure WarningWithoutPositionExpl (E_Str : in out E_Strings.T) --# global in Err_Num; --# derives E_Str from *, --# Err_Num; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an AppendString for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Num; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Num, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation WarningWithoutPositionExpl (Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin -- HTML Directives --! <"warning-"> --! <"--- Warning : "><" : "> case Err_Num.ErrorNum is when 9 => Unit_Typ := SP_Symbols.SP_Symbol'Val (Err_Num.Name2.Pos); --! The body of XXX has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. --! Issued when a --# hide XXX annotation is used to hide a user-defined exception handler. (warning control file --! keyword: handler_parts) E_Strings.Append_String (E_Str => E_Str, Str => "The body of "); case Unit_Typ is when SP_Symbols.entry_body => E_Strings.Append_String (E_Str => E_Str, Str => "entry "); when SP_Symbols.subprogram_implementation => E_Strings.Append_String (E_Str => E_Str, Str => "subprogram "); when SP_Symbols.task_body => E_Strings.Append_String (E_Str => E_Str, Str => "task "); when others => E_Strings.Append_String (E_Str => E_Str, Str => "unknown_node_type "); end case; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has a hidden exception handler - analysis and verification of contracts " & "for this handler have not been performed"); when 10 => Unit_Typ := SP_Symbols.SP_Symbol'Val (Err_Num.Name2.Pos); --! XXX is hidden - hidden text is ignored by the Examiner --! Issued when a --# hide XXX annotation is used. (warning control file keyword: hidden_parts) case Unit_Typ is when SP_Symbols.subprogram_implementation => E_Strings.Append_String (E_Str => E_Str, Str => "The body of subprogram "); when SP_Symbols.private_part => E_Strings.Append_String (E_Str => E_Str, Str => "The private part of package "); when SP_Symbols.package_implementation => E_Strings.Append_String (E_Str => E_Str, Str => "The body of package "); when SP_Symbols.package_initialization => E_Strings.Append_String (E_Str => E_Str, Str => "The initialization of package "); when SP_Symbols.protected_type_declaration => E_Strings.Append_String (E_Str => E_Str, Str => "The private part of protected type "); when others => null; -- never happens end case; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is hidden - hidden text is ignored by the Examiner"); when 400 => E_Strings.Append_String (E_Str => E_Str, Str => "Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is declared but not used"); --! Issued when a variable declared in a subprogram is neither --! referenced, nor updated. --! (warning control file keyword: unused_variables) when 402 => E_Strings.Append_String (E_Str => E_Str, Str => "Default assertion planted to cut loop"); --! In order to prove properties of code containing loops, the --! loop must be "cut" with --! a suitable assertion statement. When generating run-time checks, --! the Examiner --! inserts a simple assertion to cut any loops which do not have one --! supplied --! by the user. The assertion is placed at the point where this --! warning appears in --! the listing file. The default assertion asserts that the --! subprogram's precondition --! (if any) is satisfied, that all imports to it are in their --! subtypes and that any for --! loop counter is in its subtype. In many cases this provides --! sufficient information --! to complete a proof of absence of run-time errors. If more --! information is required, --! then the user can supply an assertion and the Examiner will --! append the above information --! to it. (warning control file keyword: default_loop_assertions) when 403 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is declared as a variable but used as a constant"); --! XXX is a variable which was initialized at declaration but --! whose value is only ever --! read not updated; it could therefore have been declared as --! a constant. (warning control --! file keyword: constant_variables) when 404 => E_Strings.Append_String (E_Str => E_Str, Str => "Subprogram imports variables of abstract types for which run-time checks cannot be generated"); when 405 => E_Strings.Append_String (E_Str => E_Str, Str => "VCs for statements including real numbers are approximate"); --! The Examiner generates VCs associated with --! real numbers using perfect arithmetic rather than the machine --! approximations used on the --! target platform. It is possible that rounding errors might --! cause a Constraint_Error even --! if these run-time check proofs are completed satisfactorily. --! (warning control file keyword: real_rtcs) when 406 => E_Strings.Append_String (E_Str => E_Str, Str => "VC Generator unable to create output files. Permission is required to " & "create directories and files in the output directory"); --! This message is echoed to the screen if the Examiner is unable --! to create output files for the VCs being generated --! (for instance, if the user does not have write --! permission for the output directory). when 407 => E_Strings.Append_String (E_Str => E_Str, Str => "This package requires a body. Care should be taken to " & "provide one " & "because an Ada compiler will not detect its omission"); --! Issued where SPARK own variable and initialization annotations --! make it clear that a --! package requires a body but where no Ada requirement for a body --! exists. when 408 => E_Strings.Append_String (E_Str => E_Str, Str => "VCs could not be generated for this subprogram owing to " & "semantic errors in its " & "specification or body. Unprovable (False) VC generated"); --! Semantic errors prevent VC Generation, so a single False VC --! is produced. This will be detected and reported by POGS. when 409 => E_Strings.Append_String (E_Str => E_Str, Str => "VCs could not be generated for this subprogram due to " & "its size and/or complexity " & "exceeding the capacity of the VC Generator. Unprovable (False) VC generated"); --! A subprogram which has excessive complexity of data structure --! or number of paths may cause the VC Generator to exceed its capacity. --! A single False VC is generated in this case to make sure this --! error is detected in subsequent proof and analysis with POGS when 410 => E_Strings.Append_String (E_Str => E_Str, Str => "Task or interrupt handler "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either unavailable (hidden) or has semantic errors in " & "its specification which prevent " & "partition-wide flow analysis being carried out"); --! Partition-wide flow analysis is performed by checking all --! packages withed by the main program for --! tasks and interrupt handlers and constructing an overall flow --! relation that captures their cumulative --! effect. It is for this reason that SPARK requires task and --! protected types to be declared in package --! specifications. If a task or protected type which contains --! an interrupt handler, is hidden from the --! Examiner (in a hidden package private part) or contains errors --! in it specification, the partition-wide --! flow analysis cannot be --! constructed correctly and is therefore suppressed. Correct the --! specification of the affected tasks --! and (temporarily if desired) make them visible to the Examiner. when 411 => E_Strings.Append_String (E_Str => E_Str, Str => "Task type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is unavailable and has not been considered in the shared variable check"); --! The Examiner checks that there is no potential sharing of --! unprotected data between tasks. If a task type --! is hidden from the Examiner in a hidden package private --! part, then it is not possible to check whether that --! task may share unprotected data. when 412 => E_Strings.Append_String (E_Str => E_Str, Str => "Task type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is unavailable and has not been considered in the max-one-in-a-queue check"); --! The Examiner checks that no more than one task can suspend on --! a single object. If a task --! is hidden from the Examiner in a hidden package private part, --! then it is not possible to check whether that --! task may suspend on the same object as another task. when 413 => E_Strings.Append_String (E_Str => E_Str, Str => "Task or main program "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has errors in its annotations. The shared variable and max-one-in-a-queue checks may be incomplete"); --! The Examiner checks that no more than one task can suspend on a --! single object and that there is no --! potential sharing of unprotected data between tasks. These checks --! depend on the accuracy of the annotations --! on the task types withed by the main program. If these annotations --! contain errors, then any reported --! violations of the shared variable and max-one-in-a-queue checks will --! be correct; however, the check --! may be incomplete. The errors in the task annotations should be corrected. when 414 => E_Strings.Append_String (E_Str => E_Str, Str => "Long output file name has been truncated"); --! Raised if an output file name is longer than the --! limit imposed by the operating system and has been truncated. --! Section 4.7 of the Examiner User Manual describes how the output file names --! are constructed. If this message is seen there is a possibility --! that the output from two --! or more subprograms will be written to the same file name, --! if they have a sufficiently large number of characters in common. when 415 => E_Strings.Append_String (E_Str => E_Str, Str => "The analysis of generic packages is not yet supported. " & "It will be supported in a future release of the Examiner"); when 420 => E_Strings.Append_String (E_Str => E_Str, Str => "Instance of SEPR 2124 found. An extra VC will " & "be generated here and must be discharged to " & "ensure absence of run-time errors. Please seek advice " & "for assistance with this issue"); --! In release 7.5 of the Examiner, a flaw in the VC generation --! was fixed such that subcomponents of records and elements of --! arrays when used as "out" or "in out" --! parameters will now generate an --! additional VC to verify absence of run-time errors. This warning --! flags an instance of this occurrence. Please read the release --! note and/or seek advice for assistance with this issue. when 425 => E_Strings.Append_String (E_Str => E_Str, Str => "The -vcg switch should be used with the selected language profile"); --! A code generator language profile such as KCG is in use --! and so conditional flow errors may be present in the subprogram. --! Therefore the -vcg switch must be used to generate VCs and the VCs --! related to definedness discharged using the proof tools. when 426 => E_Strings.Append_String (E_Str => E_Str, Str => "The with_clause contains a reference to a public child of the package. " & "The Examiner will not detect mutual recursion between subprograms of the two packages"); --! A code generator language profile such as KCG allows a package body to --! with its own public child which is not normally permitted in SPARK. --! The removal of this restriction means that the Examiner will not --! detect mutual recursion between subprograms declared in the visible --! parts of the package and its child. The code generator is expected --! to guarantee the absence of recursion. when 430 => E_Strings.Append_String (E_Str => E_Str, Str => "SLI generation abandoned owing to syntax or semantic errors or multiple units in a single source file"); when 431 => E_Strings.Append_String (E_Str => E_Str, Str => "Preconditions on the main program are assumed to be true and not checked by the VC generation system"); when 444 => E_Strings.Append_String (E_Str => E_Str, Str => "Assumptions cannot be checked and must be justified with an accept annotation"); when 495 => E_Strings.Append_String (E_Str => E_Str, Str => "The VC file "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Dictionary.NullScope); E_Strings.Append_String (E_Str => E_Str, Str => " has a pathname longer than 255 characters which can produce unexpected problems " & "on Windows with respect to the SPARK tools (undischarged VCs) and other tools"); --! There is little that can be done to work around this as this --! is a fundamental limitation of Windows. You could try one of the --! following: Perform analysis higher up in the directory tree (i.e. --! in C:\a instead of C:\project_name\spark\analysis). You could try --! remapping a directory to a new drive to do the same (google for subst). --! You could try renaming or restructuring your program to flatten the --! structure a bit. And finally you can perform analysis on a UNIX system --! such as Mac OSX or GNU/Linux as they do not suffer from this problem. when others => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO WarningWithoutPosition"); end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end WarningWithoutPosition; spark-2012.0.deb/examiner/sem-compunit-wf_package_body.adb0000644000175000017500000005731411753202336022455 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Package_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; With_Public_Child : out Boolean) is Ident_Str : LexTokenManager.Lex_String; Sym : Dictionary.Symbol; Ident_Node, With_Node, Ref_Node, Next_Node, Grand_Parent : STree.SyntaxNode; Spec_Found, Ok_To_Add_Body, Body_Is_Hidden : Boolean; Pack_Scope : Dictionary.Scopes; ---------------------------------------------- procedure Find_Package (Ident_Node : in out STree.SyntaxNode; Ident_Str : in out LexTokenManager.Lex_String; Scope : in Dictionary.Scopes; Found : out Boolean; The_Package : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# Found, --# Ident_Node, --# Ident_Str, --# STree.Table, --# The_Package from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Ident_Node, --# Ident_Str, --# LexTokenManager.State, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# Ident_Str, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# STree.Table = STree.Table~; is Sym, Parent_Sym : Dictionary.Symbol; Ok : Boolean; begin Sym := Dictionary.LookupImmediateScope (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProgramContext); Ok := not Dictionary.Is_Null_Symbol (Sym) and then Dictionary.IsPackage (Sym); if not Ok then ErrorHandler.Semantic_Error (Err_Num => 11, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); -- there is no spec, must create one for Dict to add body to later Dictionary.Add_Package (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, ThePackage => Sym); else STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); end if; if Syntax_Node_Type (Node => Next_Sibling (Current_Node => Parent_Node (Current_Node => Ident_Node))) = SP_Symbols.identifier then -- child package form if CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 then ErrorHandler.Semantic_Error (Err_Num => 610, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Sibling (Current_Node => Parent_Node (Current_Node => Ident_Node))), Id_Str => LexTokenManager.Null_String); elsif Ok then loop -- to handle multiple prefixes Ident_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Ident_Node)); --# assert Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# STree.Table = STree.Table~; Ident_Str := Node_Lex_String (Node => Ident_Node); Parent_Sym := Sym; Sym := Dictionary.LookupSelectedItem (Prefix => Parent_Sym, Selector => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext); if Dictionary.Is_Null_Symbol (Sym) or else not Dictionary.IsPackage (Sym) then ErrorHandler.Semantic_Error (Err_Num => 11, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); -- there is no spec, must create one for Dict to add body to later Dictionary.AddChildPackage (TheParent => Parent_Sym, Sort => Dictionary.Public, Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, ThePackage => Sym); Ok := False; exit; end if; STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); exit when Syntax_Node_Type (Node => Next_Sibling (Current_Node => Parent_Node (Current_Node => Ident_Node))) /= SP_Symbols.identifier; -- when no more identifier (s) to right end loop; end if; end if; Found := Ok; The_Package := Sym; end Find_Package; ---------------------------------------------- -- check that all own variables of private children (and their public -- descendents) have appeared as refinement constituents procedure Check_Owned_Packages (Owner : in Dictionary.Symbol; Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Owner, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body; is Owned_Packages : Dictionary.Iterator; Own_Vars : Dictionary.Iterator; Pack_Sym : Dictionary.Symbol; Var_Sym : Dictionary.Symbol; function Get_Error_Pos (Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body; is Err_Node : STree.SyntaxNode; begin Err_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Err_Node = refinement_definition OR package_implementation if Syntax_Node_Type (Node => Err_Node) = SP_Symbols.refinement_definition then -- ASSUME Err_Node = refinement_definition Err_Node := Child_Node (Current_Node => Child_Node (Current_Node => Err_Node)); -- ASSUME Err_Node = refinement_clause_rep OR refinement_clause if Syntax_Node_Type (Node => Err_Node) = SP_Symbols.refinement_clause_rep then -- ASSUME Err_Node = refinement_clause_rep Err_Node := Next_Sibling (Current_Node => Next_Sibling (Current_Node => Err_Node)); end if; -- ASSUME Err_Node = refinement_clause SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Err_Node) = SP_Symbols.refinement_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Err_Node = refinement_clause in Get_Error_Pos"); Err_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Err_Node)); -- ASSUME Err_Node = constituent_list if Syntax_Node_Type (Node => Err_Node) = SP_Symbols.constituent_list then -- ASSUME Err_Node = constituent_list Err_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Err_Node)); -- ASSUME Err_Node = entire_variable SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Err_Node) = SP_Symbols.entire_variable, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Err_Node = entire_variable in Get_Error_Pos"); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Err_Node = constituent_list in Get_Error_Pos"); end if; elsif Syntax_Node_Type (Node => Err_Node) = SP_Symbols.package_implementation then -- ASSUME Err_Node = package_implementation -- no refinement definition - report at package name Err_Node := Last_Child_Of (Start_Node => Node); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Err_Node = refinement_definition OR package_implementation in Get_Error_Pos"); end if; return Node_Position (Node => Err_Node); end Get_Error_Pos; begin -- Check_Owned_Packages Owned_Packages := Dictionary.FirstOwnedPackage (Owner); while not Dictionary.IsNullIterator (Owned_Packages) loop Pack_Sym := Dictionary.CurrentSymbol (Owned_Packages); Own_Vars := Dictionary.FirstOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (Own_Vars) loop Var_Sym := Dictionary.CurrentSymbol (Own_Vars); if not Dictionary.IsRefinementConstituent (Owner, Var_Sym) then -- missing own variable ErrorHandler.Semantic_Error_Sym (Err_Num => 621, Reference => ErrorHandler.No_Reference, Position => Get_Error_Pos (Node => Node), Sym => Var_Sym, Scope => Dictionary.GlobalScope); end if; Own_Vars := Dictionary.NextSymbol (Own_Vars); end loop; Owned_Packages := Dictionary.NextSymbol (Owned_Packages); end loop; end Check_Owned_Packages; ---------------------------------------------- procedure Wf_Refine (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.refinement_definition; --# post STree.Table = STree.Table~; is separate; begin -- Wf_Package_Body With_Public_Child := False; Ident_Node := Last_Child_Of (Start_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Package_Body"); Ident_Str := Node_Lex_String (Node => Ident_Node); Grand_Parent := Parent_Node (Current_Node => Parent_Node (Current_Node => Node)); -- ASSUME Grand_Parent = abody OR subunit OR secondary_unit if Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.abody then -- ASSUME Grand_Parent = abody With_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.subunit or else Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.secondary_unit then -- ASSUME Grand_Parent = subunit OR secondary_unit if Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.subunit then With_Node := Parent_Node (Current_Node => Grand_Parent); else With_Node := Grand_Parent; end if; -- ASSUME With_Node = secondary_unit SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => With_Node) = SP_Symbols.secondary_unit, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect with_Node = secondary_unit in Wf_Package_Body"); With_Node := Child_Node (Current_Node => Parent_Node (Current_Node => With_Node)); -- ASSUME With_Node = secondary_unit OR context_clause if Syntax_Node_Type (Node => With_Node) = SP_Symbols.secondary_unit then -- ASSUME With_Node = secondary_unit With_Node := STree.NullNode; elsif Syntax_Node_Type (Node => With_Node) /= SP_Symbols.context_clause then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = secondary_unit OR context_clause in Wf_Package_Body"); end if; else With_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Grand_Parent = abody OR subunit OR secondary_unit in Wf_Package_Body"); end if; -- ASSUME With_Node = context_clause OR NULL Ok_To_Add_Body := False; Find_Package (Ident_Node => Ident_Node, Ident_Str => Ident_Str, Scope => Scope, Found => Spec_Found, The_Package => Sym); --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# (Syntax_Node_Type (Grand_Parent, STree.Table) = SP_Symbols.abody or --# Syntax_Node_Type (Grand_Parent, STree.Table) = SP_Symbols.subunit or --# Syntax_Node_Type (Grand_Parent, STree.Table) = SP_Symbols.secondary_unit) and --# (Syntax_Node_Type (With_Node, STree.Table) = SP_Symbols.context_clause or With_Node = STree.NullNode) and --# STree.Table = STree.Table~; if not Spec_Found then Ok_To_Add_Body := True; elsif Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.abody and then Dictionary.HasBodyStub (Sym) then -- ASSUME Grand_Parent = abody ErrorHandler.Semantic_Error (Err_Num => 17, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); elsif Syntax_Node_Type (Node => Grand_Parent) = SP_Symbols.subunit then -- ASSUME Grand_Parent = subunit -- additional if clause to ensure extra package body subunits reported if not Dictionary.HasBodyStub (Sym) then ErrorHandler.Semantic_Error (Err_Num => 15, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); elsif Dictionary.HasBody (Sym) then ErrorHandler.Semantic_Error (Err_Num => 16, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else Ok_To_Add_Body := True; end if; elsif Dictionary.HasBody (Sym) then ErrorHandler.Semantic_Error (Err_Num => 16, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else -- no errors found CheckPackageNeedsBody (Node_Pos => Node_Position (Node => Ident_Node), Pack_Sym => Sym); Ok_To_Add_Body := True; end if; Next_Node := Child_Node (Current_Node => Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Node))); -- ASSUME Next_Node = pragma_rep OR hidden_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.pragma_rep or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.hidden_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = pragma_rep OR hidden_part in Wf_Package_Body"); Body_Is_Hidden := Syntax_Node_Type (Node => Next_Node) = SP_Symbols.hidden_part; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# (Syntax_Node_Type (With_Node, STree.Table) = SP_Symbols.context_clause or With_Node = STree.NullNode) and --# (Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.pragma_rep or --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.hidden_part) and --# STree.Table = STree.Table~; if Ok_To_Add_Body then Dictionary.AddBody (CompilationUnit => Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Hidden => Body_Is_Hidden); end if; Pack_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Sym); --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body and --# (Syntax_Node_Type (With_Node, STree.Table) = SP_Symbols.context_clause or With_Node = STree.NullNode) and --# (Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.pragma_rep or --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.hidden_part) and --# STree.Table = STree.Table~; if Syntax_Node_Type (Node => With_Node) = SP_Symbols.context_clause then -- ASSUME With_Node = context_clause Wf_Context_Clause_Package_Body (Node => With_Node, Comp_Sym => Sym, Scope => Pack_Scope, With_Public_Child => With_Public_Child); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body and --# (Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.pragma_rep or --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.hidden_part) and --# STree.Table = STree.Table~; Ref_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Ref_Node = refinement_definition OR package_implementation if Syntax_Node_Type (Node => Ref_Node) = SP_Symbols.package_implementation then -- ASSUME Ref_Node = package_implementation Ref_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Ref_Node) /= SP_Symbols.refinement_definition then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ref_Node = refinement_definition OR package_implementation in Wf_Package_Body"); end if; --# check Syntax_Node_Type (Ref_Node, STree.Table) = SP_Symbols.refinement_definition or Ref_Node = STree.NullNode; if Syntax_Node_Type (Node => Ref_Node) = SP_Symbols.refinement_definition then -- ASSUME Ref_Node = refinement_definition Dictionary.AddRefinementDefinition (Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Ref_Node), End_Position => Node_Position (Node => Ref_Node))); Wf_Refine (Node => Ref_Node, Scope => Pack_Scope); end if; --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body and --# (Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.pragma_rep or --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.hidden_part) and --# STree.Table = STree.Table~; if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 then Check_Owned_Packages (Owner => Sym, Node => Node); end if; --# assert (Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.pragma_rep or --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.hidden_part) and --# STree.Table = STree.Table~; if Body_Is_Hidden then ErrorHandler.Hidden_Text (Position => Node_Position (Node => Next_Node), Unit_Str => Ident_Str, Unit_Typ => SP_Symbols.package_implementation); end if; Scope := Pack_Scope; end Wf_Package_Body; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-uncondflowerr-uncondflowerrexpl.adb0000644000175000017500000000514311753202337032274 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.UncondFlowErr) procedure UncondFlowErrExpl (E_Str : in out E_Strings.T) is begin case Err_Type is when ErrorHandler.Expn_Undefined => E_Strings.Append_String (E_Str => E_Str, Str => "The expression may be that in an assignment or return statement," & " an actual parameter," & " or a condition occurring in an if or case statement, an iteration" & " scheme or exit statement. NOTE: the presence of random and possibly invalid values" & " introduced by data flow errors invalidates proof of exception freedom for the" & " subprogram body which contains them. All unconditional data flow errors must be" & " eliminated before attempting exception freedom proofs. See the manual" & " ""SPARK Proof Manual"" for full details."); when ErrorHandler.Stmt_Undefined => E_Strings.Append_String (E_Str => E_Str, Str => "The statement here is a procedure call or an assignment to an array element, and the variable XXX may" & " appear in an actual parameter, whose value is imported when the" & " procedure is executed. If the variable XXX" & " does not occur in the actual parameter list, it is an imported" & " global variable of the procedure (named in its global definition)." & " NOTE: the presence of random and possibly invalid values" & " introduced by data flow errors invalidates proof of exception freedom for the" & " subprogram body which contains them. All unconditional data flow errors must be" & " eliminated before attempting exception freedom proofs. See the manual" & " ""SPARK Proof Manual"" for full details."); when ErrorHandler.Invariant_Exp => E_Strings.Append_String (E_Str => E_Str, Str => "The expression is either a case expression or a condition" & " (Boolean-valued expression) associated with an if-statement, not" & " contained in a loop statement. The message indicates" & " that the expression takes the same value whenever it is evaluated," & " in all program executions. Note that if the expression depends on values obtained" & " by a call to another other subprogram then a possible source for its invariance" & " might be an incorrect annotation on the called subprogram."); when others => null; end case; end UncondFlowErrExpl; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_named_record_component_association.adb0000644000175000017500000002241711753202336031037 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Named_Record_Component_Association (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) is Name_Exp, Field_Name, Exp_Result : Sem.Exp_Record; Expected_Type : Dictionary.Symbol; Error_Found : Boolean := False; Next_Node : STree.SyntaxNode; -------------------------------------------- function Expression_Location (Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_record_component_association; is Local_Node : STree.SyntaxNode; begin Local_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Local_Node = named_record_component_association OR record_component_selector_name OR -- annotation_named_record_component_association if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.named_record_component_association or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_named_record_component_association then -- ASSUME Local_Node = named_record_component_association OR annotation_named_record_component_association Local_Node := STree.Next_Sibling (Current_Node => Local_Node); elsif STree.Syntax_Node_Type (Node => Local_Node) /= SP_Symbols.record_component_selector_name then Local_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = named_record_component_association OR record_component_selector_name OR " & "annotation_named_record_component_association in Expression_Location"); end if; -- ASSUME Local_Node = record_component_selector_name SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.record_component_selector_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = record_component_selector_name in Expression_Location"); Local_Node := STree.Next_Sibling (Current_Node => Local_Node); -- ASSUME Local_Node = expression OR annotation_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = expression OR annotation_expression in Expression_Location"); return STree.Node_Position (Node => Local_Node); end Expression_Location; -------------------------------------------- procedure Check_Record_Completeness (Node : in STree.SyntaxNode; Name_Exp : in out Sem.Exp_Record; Heap_Param : in out Lists.List_Heap; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Heap_Param, --# LexTokenManager.State, --# Name_Exp, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found from *, --# Dictionary.Dict, --# Heap_Param, --# LexTokenManager.State, --# Name_Exp & --# Heap_Param from *, --# LexTokenManager.State, --# Name_Exp & --# Name_Exp from *; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_record_component_association; is Field_Str : LexTokenManager.Lex_String; Error_Pos : LexTokenManager.Token_Position; Ptr : Lists.List; Other_Symbol, Type_Symbol : Natural; begin Error_Pos := Expression_Location (Node => Node); Other_Symbol := Dictionary.GetNumberOfComponents (Name_Exp.Other_Symbol); Type_Symbol := Dictionary.GetNumberOfComponents (Name_Exp.Type_Symbol); if Other_Symbol < Natural'Last and then Type_Symbol > Natural'First then for I in Positive range Other_Symbol + 1 .. -- ancestor field count Type_Symbol loop -- total field count Field_Str := Dictionary.GetSimpleName (Dictionary.GetRecordComponent (Name_Exp.Type_Symbol, I)); if not Lists.Is_Member (Heap => Heap_Param, The_List => Name_Exp.Param_List, Str => Field_Str) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 104, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Field_Str); end if; end loop; end if; Ptr := Name_Exp.Param_List; Dispose_Of_Name_List (List => Ptr, Heap_Param => Heap_Param); Name_Exp.Param_List := Ptr; end Check_Record_Completeness; begin -- Wf_Named_Record_Component_Association -- TOS is the result of walking an expression to be associated with a record field name -- 2nd TOS is the field name in a parameter record -- 3rd TOS is the aggregate type with the ancestor type in its OtherSymbol field Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Field_Name, Stack => E_Stack); Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if not Dictionary.Is_Null_Symbol (Field_Name.Other_Symbol) then Expected_Type := Dictionary.GetType (Field_Name.Other_Symbol); STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); Sem.Assignment_Check (Position => Expression_Location (Node => Node), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Exp_Result); Name_Exp.Is_Constant := Name_Exp.Is_Constant and then Exp_Result.Is_Constant; Next_Node := STree.Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = record_component_selector_name OR NULL if Next_Node = STree.NullNode then -- ASSUME Next_Node = NULL -- this is the last named association so we need to check that -- all fields have been given a value Check_Record_Completeness (Node => Node, Name_Exp => Name_Exp, Heap_Param => Heap_Param, Error_Found => Error_Found); elsif STree.Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.record_component_selector_name then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = record_component_selector_name OR NULL in Wf_Named_Record_Component_Association"); end if; end if; Name_Exp.Errors_In_Expression := Error_Found or else Name_Exp.Errors_In_Expression or else Exp_Result.Errors_In_Expression; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); end Wf_Named_Record_Component_Association; spark-2012.0.deb/examiner/dag-buildexpndag-upattributedesignator.adb0000644000175000017500000007614511753202336024563 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (DAG.BuildExpnDAG) procedure UpAttributeDesignator (Node : in STree.SyntaxNode) is ExpnFound, BaseFound : Boolean; TempCell, PrefixCell, AttribCell, ExpnCell, SecondExpnCell : Cells.Cell; LexStr, AttribName : LexTokenManager.Lex_String; PrefixType : Dictionary.Symbol; ExpnNode : STree.SyntaxNode; ------------------------------------------------------- procedure EliminateBase (TOS : in Cells.Cell) --# global in PrefixCell; --# in out VCGHeap; --# derives VCGHeap from *, --# PrefixCell, --# TOS; is BaseCell : Cells.Cell; begin BaseCell := LeftPtr (VCGHeap, TOS); if Cells.Get_Kind (VCGHeap, BaseCell) = Cell_Storage.Op then -- 'Base exists Cells.Dispose_Of_Cell (VCGHeap, RightPtr (VCGHeap, BaseCell)); Cells.Dispose_Of_Cell (VCGHeap, BaseCell); SetLeftArgument (TOS, PrefixCell, VCGHeap); end if; end EliminateBase; ------------------------------------------------------- procedure ModelSimpleFunctionAttribute (StripToRootType : in Boolean) --# global in AttribCell; --# in Dictionary.Dict; --# in ExpnCell; --# in ExpnStack; --# in PrefixCell; --# in out VCGHeap; --# derives VCGHeap from *, --# AttribCell, --# Dictionary.Dict, --# ExpnCell, --# ExpnStack, --# PrefixCell, --# StripToRootType; is begin EliminateBase (CStacks.Top (VCGHeap, ExpnStack)); -- Most attributes are modelled in FDL by reference to the -- underlying root type. Most notably, 'Valid is always -- in terms of the indicated sub-type (see LRM 13.9.1(2)) so we need -- the option here to use the Root Type or not. if StripToRootType then Cells.Set_Symbol_Value (VCGHeap, PrefixCell, Dictionary.GetRootType (Cells.Get_Symbol_Value (VCGHeap, PrefixCell))); end if; Cells.Set_Kind (VCGHeap, AttribCell, Cell_Storage.Attrib_Function); SetRightArgument (AttribCell, ExpnCell, VCGHeap); end ModelSimpleFunctionAttribute; ------------------------------------------------------- procedure ModelMinMax --# global in AttribCell; --# in Dictionary.Dict; --# in ExpnCell; --# in ExpnStack; --# in PrefixCell; --# in SecondExpnCell; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# AttribCell, --# Dictionary.Dict, --# ExpnCell, --# ExpnStack, --# PrefixCell, --# SecondExpnCell; is CommaCell : Cells.Cell; begin CreateOpCell (CommaCell, VCGHeap, SP_Symbols.comma); EliminateBase (CStacks.Top (VCGHeap, ExpnStack)); Cells.Set_Symbol_Value (VCGHeap, PrefixCell, Dictionary.GetRootType (Cells.Get_Symbol_Value (VCGHeap, PrefixCell))); Cells.Set_Kind (VCGHeap, AttribCell, Cell_Storage.Attrib_Function); SetLeftArgument (CommaCell, ExpnCell, VCGHeap); SetRightArgument (CommaCell, SecondExpnCell, VCGHeap); SetRightArgument (AttribCell, CommaCell, VCGHeap); end ModelMinMax; ------------------------------------------------------- procedure ModelLengthAttribute --# global in Dictionary.Dict; --# in out ExpnStack; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnStack, --# LexTokenManager.State, --# VCGHeap & --# LexTokenManager.State from *; is OneCell, HighEndCell, LowEndCell : Cells.Cell; TypeSym : Dictionary.Symbol; LexStr : LexTokenManager.Lex_String; ---------------------------------------------- procedure InsertPos --# global in TypeSym; --# in out HighEndCell; --# in out LowEndCell; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives HighEndCell from TypeSym, --# VCGHeap & --# LowEndCell from HighEndCell, --# TypeSym, --# VCGHeap & --# Statistics.TableUsage from *, --# HighEndCell, --# TypeSym, --# VCGHeap & --# VCGHeap from *, --# HighEndCell, --# LowEndCell, --# TypeSym; is PosCell : Cells.Cell; -------------------------------------- begin CreateAttribFunctionCell (LexTokenManager.Pos_Token, TypeSym, VCGHeap, -- to get PosCell); SetRightArgument (RightPtr (VCGHeap, PosCell), HighEndCell, VCGHeap); HighEndCell := PosCell; CreateAttribFunctionCell (LexTokenManager.Pos_Token, TypeSym, VCGHeap, --to get PosCell); SetRightArgument (RightPtr (VCGHeap, PosCell), LowEndCell, VCGHeap); LowEndCell := PosCell; end InsertPos; ---------------------------------------------- begin --ModelLengthAttribute CStacks.PopOff (VCGHeap, ExpnStack, HighEndCell); Structures.CopyStructure (VCGHeap, HighEndCell, LowEndCell); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, HighEndCell), LexTokenManager.Last_Token); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, LowEndCell), LexTokenManager.First_Token); TypeSym := Cells.Get_Symbol_Value (VCGHeap, LeftPtr (VCGHeap, HighEndCell)); if Dictionary.IsTypeMark (TypeSym) and then Dictionary.TypeIsEnumeration (TypeSym) then InsertPos; end if; CStacks.Push (VCGHeap, HighEndCell, ExpnStack); CStacks.Push (VCGHeap, LowEndCell, ExpnStack); PushOperator (Binary, SP_Symbols.minus, VCGHeap, ExpnStack); LexTokenManager.Insert_Nat (N => 1, Lex_Str => LexStr); CreateManifestConstCell (OneCell, VCGHeap, LexStr); CStacks.Push (VCGHeap, OneCell, ExpnStack); PushOperator (Binary, SP_Symbols.plus, VCGHeap, ExpnStack); end ModelLengthAttribute; ---------------------------------------------- -- Ada2005 introduces the functional attribute T'mod(). -- The Examiner transforms expressions with this attribute to -- mod T'modulus. procedure ModelModFunctionAttribute (Type_Sym : in Dictionary.Symbol) --# global in Dictionary.Dict; --# in ExpnCell; --# in PrefixCell; --# in out ExpnStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives ExpnStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Dictionary.Dict, --# ExpnCell, --# ExpnStack, --# PrefixCell, --# Type_Sym, --# VCGHeap; is TypeCell, AttribValueCell, ModOpCell, TickCell, TempCell : Cells.Cell; begin EliminateBase (CStacks.Top (VCGHeap, ExpnStack)); -- Create the DAG for the the functional attribute. -- The root of the DAG is "Mod", the left child -- is the attribute's argument and the right child is the -- DAG representing T'modulus. -- Root "Mod" cell. CreateOpCell (ModOpCell, VCGHeap, SP_Symbols.RWmod); -- Left child SetLeftArgument (ModOpCell, ExpnCell, VCGHeap); -- Right child which represents T'Modulus. CreateOpCell (TickCell, VCGHeap, SP_Symbols.apostrophe); SetRightArgument (ModOpCell, TickCell, VCGHeap); CreateFixedVarCell (TypeCell, VCGHeap, Dictionary.GetRootType (Type_Sym)); SetLeftArgument (TickCell, TypeCell, VCGHeap); CreateAttribValueCell (AttribValueCell, VCGHeap, LexTokenManager.Modulus_Token); SetRightArgument (TickCell, AttribValueCell, VCGHeap); -- Update the ExpnStack after processing the attribute. CStacks.PopOff (VCGHeap, ExpnStack, TempCell); Cells.Dispose_Of_Cell (VCGHeap, LeftPtr (VCGHeap, TempCell)); Cells.Dispose_Of_Cell (VCGHeap, RightPtr (VCGHeap, TempCell)); Cells.Dispose_Of_Cell (VCGHeap, TempCell); CStacks.Push (VCGHeap, ModOpCell, ExpnStack); end ModelModFunctionAttribute; ---------------------------------------------- procedure CreatePredSuccConstraint (Expr : in Cells.Cell; Type_Sym : in Dictionary.Symbol; AttribName : in LexTokenManager.Lex_String; Check_Cell : out Cells.Cell) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Check_Cell from AttribName, --# Dictionary.Dict, --# LexTokenManager.State, --# Type_Sym, --# VCGHeap & --# Statistics.TableUsage from *, --# AttribName, --# Dictionary.Dict, --# LexTokenManager.State, --# Type_Sym, --# VCGHeap & --# VCGHeap from *, --# AttribName, --# Dictionary.Dict, --# Expr, --# LexTokenManager.State, --# Type_Sym; is TypeLimit, NotEqualsCell, BaseCell, AttribCell : Cells.Cell; begin -- create BaseCell for Type_Sym CreateFixedVarCell (BaseCell, VCGHeap, Dictionary.GetRootType (Type_Sym)); -- Create TypeLimit as apostrophe (BaseCell, first) or -- apostrophe (BaseCell, last) depending on AttribName CreateCellKind (AttribCell, VCGHeap, Cell_Storage.Attrib_Value); CreateOpCell (TypeLimit, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (TypeLimit, BaseCell, VCGHeap); SetRightArgument (TypeLimit, AttribCell, VCGHeap); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq then Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, TypeLimit), LexTokenManager.First_Token); else Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, TypeLimit), LexTokenManager.Last_Token); end if; -- create inequality CreateOpCell (NotEqualsCell, VCGHeap, SP_Symbols.not_equal); SetRightArgument (NotEqualsCell, TypeLimit, VCGHeap); SetLeftArgument (NotEqualsCell, Expr, VCGHeap); Check_Cell := NotEqualsCell; end CreatePredSuccConstraint; procedure CheckPredSuccConstraint (Type_Sym : in Dictionary.Symbol; Expr : in Cells.Cell; AttribName : in LexTokenManager.Lex_String) --# global in Dictionary.Dict; --# in DoRtc; --# in LexTokenManager.State; --# in out CheckStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# AttribName, --# Dictionary.Dict, --# DoRtc, --# Expr, --# LexTokenManager.State, --# ShortCircuitStack, --# Type_Sym, --# VCGHeap & --# VCGHeap from *, --# AttribName, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# Expr, --# LexTokenManager.State, --# ShortCircuitStack, --# Type_Sym; is Check_Cell, Exp_Copy : Cells.Cell; begin if DoRtc and then not IsModularType (Type_Sym) then -- no need to check Type_Sym as SPARK's static-semantics -- allows only discrete non-Boolean types here and then -- DiscreteTypeWithCheck (Type_Sym) -- make a copy of Expr Structures.CopyStructure (VCGHeap, Expr, Exp_Copy); CreatePredSuccConstraint (Exp_Copy, Type_Sym, AttribName, Check_Cell); PlantCheckStatement (Check_Cell, VCGHeap, ShortCircuitStack, CheckStack); end if; end CheckPredSuccConstraint; procedure CreateValConstraint (Expr : in Cells.Cell; Type_Sym : in Dictionary.Symbol; Check_Cell : out Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Check_Cell, --# VCGHeap from Dictionary.Dict, --# Expr, --# Type_Sym, --# VCGHeap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Expr, --# Type_Sym, --# VCGHeap; is RelOperationLHS, RelOperationRHS, MiddleOperator : SP_Symbols.SP_Symbol; TypeFiLimit, TypeLaLimit, PosFiCell, PosLaCell, BaseCell, AttribCell, LeftAnd, RightAnd, rangeDAG : Cells.Cell; Root_Type_Sym : Dictionary.Symbol; begin -- create BaseCell for Type_Sym Root_Type_Sym := Dictionary.GetRootType (Type_Sym); CreateFixedVarCell (BaseCell, VCGHeap, Root_Type_Sym); RelOperationLHS := SP_Symbols.greater_or_equal; RelOperationRHS := SP_Symbols.less_or_equal; MiddleOperator := SP_Symbols.RWand; -- Create TypeFiLimit as apostrophe (BaseCell, first) CreateCellKind (AttribCell, VCGHeap, Cell_Storage.Attrib_Value); CreateOpCell (TypeFiLimit, VCGHeap, SP_Symbols.apostrophe); SetLeftArgument (TypeFiLimit, BaseCell, VCGHeap); SetRightArgument (TypeFiLimit, AttribCell, VCGHeap); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, TypeFiLimit), LexTokenManager.First_Token); -- Create TypeLaLimit as apostrophe (BaseCell, last) Structures.CopyStructure (VCGHeap, TypeFiLimit, TypeLaLimit); Cells.Set_Lex_Str (VCGHeap, RightPtr (VCGHeap, TypeLaLimit), LexTokenManager.Last_Token); if Dictionary.TypeIsEnumeration (Root_Type_Sym) and then not Dictionary.TypeIsCharacter (Root_Type_Sym) then --------------------------------------------------------- -- For enumeration types that AREN'T Character, we -- build -- X >= T'Pos (T'First) and X <= T'Pos (T'Last) --------------------------------------------------------- -- Create PosFiCell from TypeFiLimit CreateAttribFunctionCell (LexTokenManager.Pos_Token, Root_Type_Sym, VCGHeap, PosFiCell); SetRightArgument (RightPtr (VCGHeap, PosFiCell), TypeFiLimit, VCGHeap); -- Create PosLaCell from TypeLaLimit CreateAttribFunctionCell (LexTokenManager.Pos_Token, Root_Type_Sym, VCGHeap, PosLaCell); SetRightArgument (RightPtr (VCGHeap, PosLaCell), TypeLaLimit, VCGHeap); -- create left-hand of AND CreateOpCell (LeftAnd, VCGHeap, RelOperationLHS); SetRightArgument (LeftAnd, PosFiCell, VCGHeap); SetLeftArgument (LeftAnd, Expr, VCGHeap); -- create right-hand of AND CreateOpCell (RightAnd, VCGHeap, RelOperationRHS); SetRightArgument (RightAnd, PosLaCell, VCGHeap); SetLeftArgument (RightAnd, Expr, VCGHeap); else --------------------------------------------------------- -- For integer (signed or modular) and Character types, -- we know that -- T'Pos (X) = T'Val (X) = X -- so we simply build -- X >= T'First and X <= T'Last --------------------------------------------------------- -- create left-hand of AND CreateOpCell (LeftAnd, VCGHeap, RelOperationLHS); SetRightArgument (LeftAnd, TypeFiLimit, VCGHeap); SetLeftArgument (LeftAnd, Expr, VCGHeap); -- create right-hand of AND CreateOpCell (RightAnd, VCGHeap, RelOperationRHS); SetRightArgument (RightAnd, TypeLaLimit, VCGHeap); SetLeftArgument (RightAnd, Expr, VCGHeap); end if; -- form conjunction of the two constraints; CreateOpCell (rangeDAG, VCGHeap, MiddleOperator); SetRightArgument (rangeDAG, RightAnd, VCGHeap); SetLeftArgument (rangeDAG, LeftAnd, VCGHeap); Check_Cell := rangeDAG; end CreateValConstraint; procedure CheckValConstraint (Type_Sym : in Dictionary.Symbol; Expr : in Cells.Cell) --# global in Dictionary.Dict; --# in DoRtc; --# in out CheckStack; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ShortCircuitStack, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# DoRtc, --# Expr, --# ShortCircuitStack, --# Type_Sym, --# VCGHeap & --# VCGHeap from *, --# CheckStack, --# Dictionary.Dict, --# DoRtc, --# Expr, --# ShortCircuitStack, --# Type_Sym; is Check_Cell, Exp_Copy : Cells.Cell; begin if DoRtc then -- No need to check Type_Sym as SPARK's static-semantics -- allows only discrete non-Boolean types here -- and then DiscreteTypeWithCheck (Type_Sym) -- make a copy of Expr Structures.CopyStructure (VCGHeap, Expr, Exp_Copy); CreateValConstraint (Exp_Copy, Type_Sym, Check_Cell); PlantCheckStatement (Check_Cell, VCGHeap, ShortCircuitStack, CheckStack); end if; end CheckValConstraint; begin -- UpProcessAttributeDesignator -- If there are any expression associated with the attribute they will be TOS -- Below it (or TOS if there is no expression) is a DAG representing the attribute -- move to where first expression would be if there is one ExpnNode := STree.Child_Node (Current_Node => STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => Node))); --# assert True; -- Check for second expression if ExpnNode /= STree.NullNode and then STree.Next_Sibling (Current_Node => ExpnNode) /= STree.NullNode then -- There is a 2nd expression associated with attribute CStacks.PopOff (VCGHeap, ExpnStack, SecondExpnCell); else SecondExpnCell := Cells.Null_Cell; end if; --# assert True; -- then check for first expression if ExpnNode /= STree.NullNode then -- There is a 1st expression associated with attribute CStacks.PopOff (VCGHeap, ExpnStack, ExpnCell); ExpnFound := True; else ExpnFound := False; ExpnCell := Cells.Null_Cell; end if; --# assert True; PrefixCell := LeftPtr (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); if Cells.Get_Kind (VCGHeap, PrefixCell) = Cell_Storage.Op then --must be a 'BASE PrefixCell := LeftPtr (VCGHeap, PrefixCell); BaseFound := True; else BaseFound := False; end if; -- If no expression forms part of the attribute we -- now need to make a copy of the prefix for possible use in modelling 'valid. -- This is because fdl model of valid takes an argument which is created from the -- prefix to the attribute. By the time we know we are modelling 'valid this prefix -- subtree may have been patched with type information extracted from the syntax tree --# assert True; if not ExpnFound then Structures.CopyStructure (VCGHeap, PrefixCell, -- to get ExpnCell); end if; -- Recover type planted in syntax tree by wellformation checker. -- For all cases except attributes of unconstrained objects, this will be type mark. -- For attributes of constrained array objects the wffs will haev resolved all such -- things as dimesnion number arguments and will have planted the appropriate type. -- For unconstraiend objects only, the wffs will plant a symbol of a special kind -- (ParameterConstraintSymbol) associated with the object. This special symbol kind -- behaves for all practical purposes like a type except that we typically don't -- know its bounds. PrefixType := STree.NodeSymbol (Node); Cells.Set_Kind (VCGHeap, PrefixCell, Cell_Storage.Fixed_Var); Cells.Set_Symbol_Value (VCGHeap, PrefixCell, PrefixType); if Dictionary.IsParameterConstraint (PrefixType) then -- If prefix is unconstrained object then make cell an UnconstrainedAttributePrefix to allow special -- formal-to-actual substitution in procedure and function call pre con and proc call post con checks Cells.Set_Kind (VCGHeap, PrefixCell, Cell_Storage.Unconstrained_Attribute_Prefix); end if; -- make leaf SetLeftArgument (PrefixCell, Cells.Null_Cell, VCGHeap); SetRightArgument (PrefixCell, Cells.Null_Cell, VCGHeap); SetAuxPtr (PrefixCell, Cells.Null_Cell, VCGHeap); AttribCell := RightPtr (VCGHeap, CStacks.Top (VCGHeap, ExpnStack)); AttribName := Cells.Get_Lex_Str (VCGHeap, AttribCell); --# assert True; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq then CheckValConstraint (PrefixType, ExpnCell); end if; if Dictionary.TypeIsEnumeration (PrefixType) and then not Dictionary.TypeIsCharacter (PrefixType) then -- Enumeration type but NOT character - model as an FDL -- function. ModelSimpleFunctionAttribute (StripToRootType => True); else -- must be discrete numeric type or character so simply discard attribute, -- since for all integer (signed or modular) and Character types X (or subtypes -- thereof...), X'Pos (Y) = X'Val (Y) = Y EliminateBase (CStacks.Top (VCGHeap, ExpnStack)); CStacks.PopOff (VCGHeap, ExpnStack, TempCell); Cells.Dispose_Of_Cell (VCGHeap, LeftPtr (VCGHeap, TempCell)); Cells.Dispose_Of_Cell (VCGHeap, RightPtr (VCGHeap, TempCell)); Cells.Dispose_Of_Cell (VCGHeap, TempCell); CStacks.Push (VCGHeap, ExpnCell, ExpnStack); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq then ModelSimpleFunctionAttribute (StripToRootType => True); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq then EliminateBase (CStacks.Top (VCGHeap, ExpnStack)); CStacks.PopOff (VCGHeap, ExpnStack, TempCell); Cells.Dispose_Of_Cell (VCGHeap, LeftPtr (VCGHeap, TempCell)); Cells.Dispose_Of_Cell (VCGHeap, RightPtr (VCGHeap, TempCell)); Cells.Dispose_Of_Cell (VCGHeap, TempCell); CStacks.Push (VCGHeap, ExpnCell, ExpnStack); CheckPredSuccConstraint (PrefixType, ExpnCell, AttribName); --# assert True; if Dictionary.TypeIsEnumeration (PrefixType) then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq then PushFunction (Cell_Storage.Succ_Function, VCGHeap, ExpnStack); else PushFunction (Cell_Storage.Pred_Function, VCGHeap, ExpnStack); end if; else -- must be discrete numeric type so use + or - instead LexTokenManager.Insert_Nat (N => 1, Lex_Str => LexStr); CreateManifestConstCell (TempCell, VCGHeap, LexStr); CStacks.Push (VCGHeap, TempCell, ExpnStack); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq then PushOperator (Binary, SP_Symbols.plus, VCGHeap, ExpnStack); else PushOperator (Binary, SP_Symbols.minus, VCGHeap, ExpnStack); end if; ModularizeIfNeeded (PrefixType, VCGHeap, ExpnStack); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then if BaseFound and then Dictionary.TypeIsEnumeration (PrefixType) then Cells.Set_Symbol_Value (VCGHeap, PrefixCell, Dictionary.GetRootType (PrefixType)); EliminateBase (CStacks.Top (VCGHeap, ExpnStack)); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq then TransformRangeConstraint (VCGHeap, ExpnStack); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq then ModelLengthAttribute; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq then ModelMinMax; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Valid_Token) = LexTokenManager.Str_Eq then -- support for 'Valid in SPARK95 -- using the ExpnCell which is a copy of the prefix -- to the attribute made earlier -- -- Data validity is defined in terms of the indicated sub-type -- (LRM 13.9.1(2)), so we don't strip to the root type in this case ModelSimpleFunctionAttribute (StripToRootType => False); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => AttribName, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq then ModelModFunctionAttribute (PrefixType); else -- its a non-function, non-substitutable attribute if Cells.Get_Kind (VCGHeap, PrefixCell) = Cell_Storage.Reference then Cells.Set_Kind (VCGHeap, PrefixCell, Cell_Storage.Fixed_Var); end if; end if; end UpAttributeDesignator; spark-2012.0.deb/examiner/dag-substitutions.adb0000644000175000017500000003151111753202336020412 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (DAG) package body Substitutions is procedure Substitute_Parameters (Called_Function : in Cells.Cell; Constraint : in out Cells.Cell; VCG_Heap : in out Cells.Heap_Record) is Index_Subtype, P, Persistent_Actual : Cells.Cell; S : CStacks.Stack; Function_Sym, Var_Sym : Dictionary.Symbol; Function_Dec : Dictionary.Symbol; Change : Boolean; -- Walks the data structure produced by Setup_Function_Call to -- find the actual parameter DAG associated with a particular -- function parameter number of the called function. function Get_Actual (Called_Function : in Cells.Cell; Arg_No : in Positive) return Cells.Cell --# global in VCG_Heap; is DAGCell : Cells.Cell; begin DAGCell := Called_Function; for I in Positive range 1 .. Arg_No loop DAGCell := DAG.RightPtr (VCG_Heap, DAGCell); end loop; if Cells.Get_Kind (VCG_Heap, DAGCell) = Cell_Storage.Op and then Cells.Get_Op_Symbol (VCG_Heap, DAGCell) = SP_Symbols.comma then DAGCell := DAG.LeftPtr (VCG_Heap, DAGCell); end if; return DAGCell; end Get_Actual; -- Similar to Get_Actual above but returns the constraining -- index type associated with a constrained actual parameter -- associated with an unconstrained formal parameter procedure Get_Actual_Constraint (Called_Function : in Cells.Cell; Sym : in Dictionary.Symbol; Change : out Boolean; Result : out Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out VCG_Heap; --# derives Change, --# Result, --# VCG_Heap from Called_Function, --# Dictionary.Dict, --# Sym, --# VCG_Heap & --# Statistics.TableUsage from *, --# Called_Function, --# Dictionary.Dict, --# Sym, --# VCG_Heap; is Arg_No : Positive; LResult : Cells.Cell; ActualCell : Cells.Cell; ConstraintCell : Cells.Cell; Constraint_Sym : Dictionary.Symbol; Function_Sym : Dictionary.Symbol; Object_Sym : Dictionary.Symbol; ArrayDimension : Positive; begin -- The Sym passed to this routine will be a -- Dictionary.ParameterConstraintSymbol. From this we can -- obtain the object itself and the dimension of that object -- that appears in the expression we may be making -- substitutions to. Function_Sym := Cells.Get_Symbol_Value (VCG_Heap, Called_Function); Object_Sym := Dictionary.GetParameterAssociatedWithParameterConstraint (Sym); if Dictionary.IsFormalParameter (Function_Sym, Object_Sym) then -- There may be something to do. Only in the case of -- formal/actual parameter matching can constraints be -- introduced and constraint substitution requires. If -- Object_Sym is global to Function_Sym (as it may be -- with nested subprogram calls) then the constraint will -- left unchanged Change := True; ArrayDimension := Dictionary.GetSubprogramParameterConstraintDimension (Sym); Arg_No := Dictionary.GetSubprogramParameterNumber (Object_Sym); ActualCell := Get_Actual (Called_Function => Called_Function, Arg_No => Arg_No); ConstraintCell := DAG.AuxPtr (VCG_Heap, ActualCell); Cells.Create_Cell (VCG_Heap, LResult); if Cells.Is_Null_Cell (ConstraintCell) then Cells.Copy_Contents (VCG_Heap, ActualCell, -- no constraint present LResult); else Cells.Copy_Contents (VCG_Heap, ConstraintCell, LResult); end if; -- LResult contains either: -- -- (1) an array subtype symbol in the case where the -- actual paramater is of a constrained array subtype -- -- (2) a scalar index type symbol in the case of a string -- literal being passed to string -- -- (3) a symbol of a subprogram parameter in the case -- where the actual parameter is also an unconstrained -- array and no constraint has been planted (this final -- behaviour occurs because GetConstraintCell returns the -- actual parameter DAG if no constraint is present) Constraint_Sym := Cells.Get_Symbol_Value (VCG_Heap, LResult); if Dictionary.IsSubprogramParameter (Constraint_Sym) then -- Case 3. We substitute "actual__index__subtype__n" for "formal__index__subtype__n" Cells.Set_Symbol_Value (VCG_Heap, LResult, Dictionary.GetSubprogramParameterConstraint (Constraint_Sym, ArrayDimension)); elsif Dictionary.TypeIsArray (Constraint_Sym) then -- Case 2. We substitute array index n of constraining subtype for "formal__index__subtype__n" Cells.Set_Symbol_Value (VCG_Heap, LResult, Dictionary.GetArrayIndex (Constraint_Sym, ArrayDimension)); else -- Case 1. we already have the constraining index directly null; end if; else -- Not a formal parameter so leave constraint unchanged. LResult := Cells.Null_Cell; Change := False; end if; Result := LResult; end Get_Actual_Constraint; begin -- Substitute_Parameters -- DAG traversal algorithm of D.E. Knuth, Fundamental Algorithms, p.317; Function_Sym := Cells.Get_Symbol_Value (VCG_Heap, Called_Function); if Dictionary.IsImplicitProofFunction (Function_Sym) then Function_Dec := Dictionary.GetAdaFunction (Function_Sym); else Function_Dec := Function_Sym; end if; CStacks.CreateStack (S); P := Constraint; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCG_Heap, P, S); if DAG.Is_Leaf (Node => P, VCG_Heap => VCG_Heap) then P := Cells.Null_Cell; else P := DAG.LeftPtr (VCG_Heap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCG_Heap, S); CStacks.Pop (VCG_Heap, S); if DAG.Is_Leaf (Node => P, VCG_Heap => VCG_Heap) then Var_Sym := Cells.Get_Symbol_Value (VCG_Heap, P); if Cells.Get_Kind (VCG_Heap, P) = Cell_Storage.Reference then if Dictionary.IsFormalParameter (Function_Dec, Var_Sym) then -- A persistent copy of the actual parameter has to -- be made from the temporary version which exists -- on the ExpnStack. Structures.CopyStructure (VCG_Heap, Get_Actual (Called_Function => Called_Function, Arg_No => Dictionary.GetSubprogramParameterNumber (Var_Sym)), Persistent_Actual); Cells.Copy_Contents (VCG_Heap, Persistent_Actual, P); end if; elsif Cells.Get_Kind (VCG_Heap, P) = Cell_Storage.Unconstrained_Attribute_Prefix then Get_Actual_Constraint (Called_Function, Var_Sym, -- to get Change, Index_Subtype); if Change then Cells.Copy_Contents (VCG_Heap, Index_Subtype, P); end if; end if; P := Cells.Null_Cell; else P := DAG.RightPtr (VCG_Heap, P); end if; end loop; --# accept F, 31, Constraint, "Constraint is updated indirectly via local pointer P" & --# F, 50, Constraint, Dictionary.Dict, "Indirectly used via local pointer P" & --# F, 50, Constraint, VCG_Heap, "Indirectly used via local pointer P" & --# F, 50, Constraint, Called_Function, "Indirectly used via local pointer P" & --# W, 3, "Suppress warnings on Constraint"; -- Constraint appears to be just an input but is actually exported. -- (It is effectively a pointer to a data structure which is updated). pragma Warnings (Off, Constraint); end Substitute_Parameters; procedure Substitute_Implicit_Vars (Proof_Function : in Cells.Cell; Implicit_Var : in Dictionary.Symbol; Implicit_Return_Expr : in out Cells.Cell; VCG_Heap : in out Cells.Heap_Record) is P, Persistent_Call : Cells.Cell; S : CStacks.Stack; Var_Sym : Dictionary.Symbol; begin -- DAG traversal algorithm of D.E. Knuth, Fundamental Algorithms, p.317; CStacks.CreateStack (S); P := Implicit_Return_Expr; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCG_Heap, P, S); if DAG.Is_Leaf (Node => P, VCG_Heap => VCG_Heap) then P := Cells.Null_Cell; else P := DAG.LeftPtr (VCG_Heap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCG_Heap, S); CStacks.Pop (VCG_Heap, S); if DAG.Is_Leaf (Node => P, VCG_Heap => VCG_Heap) then Var_Sym := Cells.Get_Symbol_Value (VCG_Heap, P); if not Dictionary.Is_Null_Symbol (Implicit_Var) and then Dictionary.IsImplicitReturnVariable (Var_Sym) and then Dictionary.Implicit_Return_Variables_Are_Equal (Left_Symbol => Var_Sym, Right_Symbol => Implicit_Var) then -- A persistent copy of the function call has to be -- made from the temporary version which exists on the -- ExpnStack. Structures.CopyStructure (Heap => VCG_Heap, Root => Proof_Function, RootCopy => Persistent_Call); Cells.Copy_Contents (VCG_Heap, Persistent_Call, P); end if; P := Cells.Null_Cell; else P := DAG.RightPtr (VCG_Heap, P); end if; end loop; --# accept F, 31, Implicit_Return_Expr, "Constraint is updated indirectly via local pointer P" & --# F, 50, Implicit_Return_Expr, VCG_Heap, "Indirectly used via local pointer P" & --# F, 50, Implicit_Return_Expr, Proof_Function, "Indirectly used via local pointer P" & --# F, 50, Implicit_Return_Expr, Implicit_Var, "Indirectly used via local pointer P" & --# F, 50, Implicit_Return_Expr, Dictionary.Dict, "Indirectly used via local pointer P" & --# W, 3, "Suppress warnings on Implicit_Return_Expr"; -- Implicit_Return_Expr appears to be just an input but is actually -- exported. -- (It is effectively a pointer to a data structure which is updated). pragma Warnings (Off, Implicit_Return_Expr); end Substitute_Implicit_Vars; end Substitutions; spark-2012.0.deb/examiner/sem-wf_subprogram_constraint.adb0000644000175000017500000003623511753202336022635 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Subprogram_Constraint (Node : in STree.SyntaxNode; Subprogram_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is -- look up table: if First_Seen then we are dealing with Abstract spec else Refined type Which_Abstractions is array (Boolean) of Dictionary.Abstractions; Which_Abstraction : constant Which_Abstractions := Which_Abstractions'(False => Dictionary.IsRefined, True => Dictionary.IsAbstract); Con_Node, Child_Con_Node : STree.SyntaxNode; Scope : Dictionary.Scopes; Generic_Subprogram_Sym : Dictionary.Symbol; Errors_Found : Boolean := False; Errors_Found_In_Predicate : Boolean := False; --------------------------------------------- procedure Wf_Return_Expression (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; First_Seen : in Boolean; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Errors_Found : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Errors_Found from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.simple_name; --# post STree.Table = STree.Table~; is Ret_Exp : Exp_Record; Implicit_Node : STree.SyntaxNode; Implicit_Str : LexTokenManager.Lex_String; Implicit_Var : Dictionary.Symbol; Return_Type : Dictionary.Symbol; Con_Node : STree.SyntaxNode; begin if Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression then -- ASSUME Node = annotation_expression Return_Type := Dictionary.GetType (Dictionary.GetRegion (Scope)); Walk_Expression_P.Walk_Annotation_Expression (Exp_Node => Node, Scope => Scope, Type_Context => Return_Type, Context => Function_Return, Result => Ret_Exp, Component_Data => Component_Data, The_Heap => The_Heap); Errors_Found := Ret_Exp.Errors_In_Expression; Assignment_Check (Position => Node_Position (Node => Node), Scope => Scope, Target_Type => Return_Type, Exp_Result => Ret_Exp); Errors_Found := Errors_Found or else Ret_Exp.Errors_In_Expression; elsif Syntax_Node_Type (Node => Node) = SP_Symbols.simple_name then -- ASSUME Node = simple_name Implicit_Node := Child_Node (Current_Node => Node); -- ASSUME Implicit_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Implicit_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Implicit_Node = identifier in Wf_Return_Expression"); Implicit_Str := Node_Lex_String (Node => Implicit_Node); if Dictionary.IsDefined (Name => Implicit_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Implicit_Str); else -- valid implicit return variable so add it and then wf predicate Dictionary.AddImplicitReturnVariable (Abstraction => Which_Abstraction (First_Seen), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Implicit_Node), End_Position => Node_Position (Node => Implicit_Node)), Name => Implicit_Str, TheFunction => Dictionary.GetRegion (Scope), Variable => Implicit_Var); STree.Add_Node_Symbol (Node => Implicit_Node, Sym => Implicit_Var); Con_Node := Next_Sibling (Current_Node => Node); -- ASSUME Con_Node = predicate SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Con_Node) = SP_Symbols.predicate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Con_Node = predicate in Wf_Return_Expression"); Wf_Predicate (Node => Con_Node, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Implicit_Var), Context => Function_Return, Component_Data => Component_Data, The_Heap => The_Heap, Errors_Found => Errors_Found); end if; else Errors_Found := False; end if; end Wf_Return_Expression; begin -- Wf_Subprogram_Constraint Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprogram_Sym); Generic_Subprogram_Sym := Dictionary.GetGenericOfInstantiation (Subprogram_Sym); Con_Node := Child_Node (Current_Node => Node); -- ASSUME Con_Node = precondition OR postcondition OR return_expression OR NULL SystemErrors.RT_Assert (C => Con_Node = STree.NullNode or else Syntax_Node_Type (Node => Con_Node) = SP_Symbols.precondition or else Syntax_Node_Type (Node => Con_Node) = SP_Symbols.postcondition or else Syntax_Node_Type (Node => Con_Node) = SP_Symbols.return_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Con_Node = precondition OR postcondition OR return_expression OR NULL in Wf_Subprogram_Constraint"); if Syntax_Node_Type (Node => Con_Node) = SP_Symbols.precondition then -- ASSUME Con_Node = precondition if not Dictionary.Is_Null_Symbol (Generic_Subprogram_Sym) and then STree.RefToNode (Dictionary.GetPrecondition (Dictionary.IsAbstract, Generic_Subprogram_Sym)) /= STree.NullNode then -- A precondition on the generic instantiation and on the generic -- Instantiation => Generic -- True => True : OK -- Pre => True : OK -- True => Pre : OK because the precondition of the generic will be used by the caller -- Pre => Pre : raise a warning because no VCs will be generated for this ErrorHandler.Semantic_Warning (Err_Num => 389, Position => Node_Position (Node => Con_Node), Id_Str => LexTokenManager.Null_String); end if; Child_Con_Node := Child_Node (Current_Node => Con_Node); -- ASSUME Child_Con_Node = predicate SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child_Con_Node) = SP_Symbols.predicate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Con_Node = predicate in Wf_Subprogram_Constraint"); Dictionary.AddPrecondition (Which_Abstraction (First_Seen), Subprogram_Sym, STree.NodeToRef (Child_Con_Node), Dictionary.Location'(Start_Position => Node_Position (Node => Con_Node), End_Position => Node_Position (Node => Con_Node))); Wf_Predicate (Node => Child_Con_Node, Scope => Scope, Context => Precondition, Component_Data => Component_Data, The_Heap => The_Heap, Errors_Found => Errors_Found); Con_Node := Next_Sibling (Current_Node => Con_Node); end if; -- ASSUME Con_Node = postcondition OR return_expression OR NULL if Syntax_Node_Type (Node => Con_Node) = SP_Symbols.postcondition or else Syntax_Node_Type (Node => Con_Node) = SP_Symbols.return_expression then -- ASSUME Con_Node = postcondition OR return_expression if not Dictionary.Is_Null_Symbol (Generic_Subprogram_Sym) then -- A postcondition on the generic instantiation -- Generic => Instantiation -- True => True : OK -- Post => True : OK -- True => Post : raise a warning because the postcondition of the generic instantiation will be used by the caller -- Post => Post : raise a warning because no VCs will be generated for this ErrorHandler.Semantic_Warning (Err_Num => 389, Position => Node_Position (Node => Con_Node), Id_Str => LexTokenManager.Null_String); end if; Child_Con_Node := Child_Node (Current_Node => Con_Node); -- ASSUME Child_Con_Node = predicate OR annotation_expression OR simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child_Con_Node) = SP_Symbols.predicate or else Syntax_Node_Type (Node => Child_Con_Node) = SP_Symbols.annotation_expression or else Syntax_Node_Type (Node => Child_Con_Node) = SP_Symbols.simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Con_Node = predicate OR annotation_expression OR simple_name in Wf_Subprogram_Constraint"); Dictionary.AddPostcondition (Which_Abstraction (First_Seen), Subprogram_Sym, STree.NodeToRef (Child_Con_Node), Dictionary.Location'(Start_Position => Node_Position (Node => Con_Node), End_Position => Node_Position (Node => Con_Node))); if Syntax_Node_Type (Node => Child_Con_Node) = SP_Symbols.predicate then -- ASSUME Child_Con_Node = predicate Wf_Predicate (Node => Child_Con_Node, Scope => Scope, Context => Postcondition, Component_Data => Component_Data, The_Heap => The_Heap, Errors_Found => Errors_Found_In_Predicate); elsif Syntax_Node_Type (Node => Child_Con_Node) = SP_Symbols.annotation_expression or else Syntax_Node_Type (Node => Child_Con_Node) = SP_Symbols.simple_name then -- ASSUME Child_Con_Node = annotation_expression OR simple_name Wf_Return_Expression (Node => Child_Con_Node, Scope => Scope, First_Seen => First_Seen, Component_Data => Component_Data, The_Heap => The_Heap, Errors_Found => Errors_Found_In_Predicate); end if; Errors_Found := Errors_Found or else Errors_Found_In_Predicate; elsif Con_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Con_Node = postcondition OR return_expression OR NULL in Wf_Subprogram_Constraint"); end if; if Errors_Found then Dictionary.SetSubprogramSignatureNotWellformed (Which_Abstraction (First_Seen), Subprogram_Sym); end if; end Wf_Subprogram_Constraint; spark-2012.0.deb/examiner/error_io.adb0000644000175000017500000001142611753202336016545 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- This package implements files of Error Entries either using the standard -- -- mechanisms of SPARK_IO (as in Examiners up to and including release 5) -- -- or building them entirely in-memory as an option for future Examiners on -- -- hosts where sufficient memory (or insufficient file handles) are available -- -- -- -- The "In_Memory" implementation assumes the host compiler is GNAT, and uses -- -- the GNAT.Dynamic_Tables package -- -- -- -- Following Examiner 7.6, we assume the host compiler is always GNAT and -- -- that error files will always be built in memory, so the "On_Disk" -- -- implementation has been removed. As such, this file no longer needs to -- -- be passed through GNATPREP -- -------------------------------------------------------------------------------- with GNAT.Dynamic_Tables; package body Error_IO is --# hide Error_IO; package NumericError_Tables is new GNAT.Dynamic_Tables ( Error_Types.NumericError, Positive, Positive'First, 100, -- elements 100); -- % increase when reallocated type File_Descriptor is record T : NumericError_Tables.Instance; Top : Natural; -- Length of file in records Ptr : Positive; -- Current file pointer end record; procedure Close (File : in out File_Type; Status : out SPARK_IO.File_Status) is begin Status := SPARK_IO.Ok; NumericError_Tables.Set_Last (File.T, Positive'First); NumericError_Tables.Release (File.T); File.Top := 0; File.Ptr := 1; exception when others => Status := SPARK_IO.Device_Error; end Close; ------------ -- Create -- ------------ procedure Create (File : in out File_Type; Status : out SPARK_IO.File_Status) is begin Status := SPARK_IO.Ok; File := new File_Descriptor; NumericError_Tables.Init (File.T); File.Top := 0; File.Ptr := 1; exception when others => Status := SPARK_IO.Device_Error; end Create; --------------------- -- Get_Numeric_Error -- --------------------- procedure Get_Numeric_Error (File : in File_Type; Item : out Error_Types.NumericError) is begin if File.Ptr <= File.Top then Item := File.T.Table (File.Ptr); File.Ptr := File.Ptr + 1; else Item := Error_Types.Empty_NumericError; end if; end Get_Numeric_Error; --------------------- -- Put_Numeric_Error -- --------------------- procedure Put_Numeric_Error (File : in File_Type; Item : in Error_Types.NumericError) is begin NumericError_Tables.Allocate (File.T, 1); File.T.Table (File.Ptr) := Item; File.Ptr := File.Ptr + 1; File.Top := File.Top + 1; end Put_Numeric_Error; ----------- -- Reset -- ----------- procedure Reset (File : in out File_Type; Mode_Of_File : in SPARK_IO.File_Mode; Status : out SPARK_IO.File_Status) is pragma Unreferenced (Mode_Of_File); begin Status := SPARK_IO.Ok; if File /= Null_File then File.Ptr := 1; end if; end Reset; end Error_IO; ././@LongLink0000000000000000000000000000017500000000000011570 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_priority_pragma.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000002160411753202336033123 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Priority_Pragma (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) is -- Rules: -- 1. Priority may appear in Task, PT or main_program -- 2. Interrupt_Priority may only appear in PT or task -- 3. Only one may appear -- 4. Only priority or interrupt_priority is valid here -- -- Grammar rules ensure that we only call this check from locations where -- some form of priority pragma is expected. The_Region : Dictionary.Symbol; Is_Protected_Type : Boolean; Is_Task_Type : Boolean; Pragma_Kind : Dictionary.RavenscarPragmasWithValue; Id_Node : STree.SyntaxNode; Argument_Expression_Node : STree.SyntaxNode; Value_Rep : LexTokenManager.Lex_String; -- storage rep of value supplied for pragma Compatible : Boolean; function Valid_Location (Is_Protected_Type : Boolean; Is_Task_Type : Boolean) return Boolean is begin -- Location must be SYNTACTICALLY correct: we need only worry about things like -- Interrupt_Priority in main_program -- FOR NOW ALLOW PROTECTED TYPES ONLY - NEEDS EXTENDING FOR TASKS & MAIN PROGRAMS return Is_Protected_Type or else Is_Task_Type; end Valid_Location; ---- procedure Check_Discriminant (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression; --# post STree.Table = STree.Table~; is Id_Node : STree.SyntaxNode; Sym : Dictionary.Symbol; function Is_Chain (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier; is Current_Node : STree.SyntaxNode; Result : Boolean := True; begin Current_Node := Node; while Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.expression loop --# assert Syntax_Node_Type (Current_Node, STree.Table) /= SP_Symbols.expression; Result := Next_Sibling (Current_Node => Current_Node) = STree.NullNode; exit when not Result; -- fail Current_Node := Parent_Node (Current_Node => Current_Node); end loop; return Result; end Is_Chain; begin -- Check_Discriminant -- Check that if a discriminant is used, it is not in an expression. -- If it is valid mark it as being used to set priority Id_Node := Last_Child_Of (Start_Node => Node); if Syntax_Node_Type (Node => Id_Node) = SP_Symbols.identifier then -- ASSUME Id_Node = identifier -- may be a discriminant Sym := Dictionary.LookupItem (Name => Node_Lex_String (Id_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.IsKnownDiscriminant (Sym) then if Is_Chain (Node => Id_Node) then STree.Set_Node_Lex_String (Sym => Sym, Node => Id_Node); Dictionary.SetDiscriminantSetsPriority (Sym); else ErrorHandler.Semantic_Error (887, ErrorHandler.No_Reference, Node_Position (Id_Node), LexTokenManager.Null_String); end if; end if; end if; end Check_Discriminant; begin -- Wf_Priority_Pragma Id_Node := Child_Node (Current_Node => Node); -- ASSUME Id_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Id_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Id_Node = identifier in Wf_Priority_Pragma"); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Id_Node), Lex_Str2 => LexTokenManager.Priority_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Id_Node), Lex_Str2 => LexTokenManager.Interrupt_Priority_Token) = LexTokenManager.Str_Eq then -- right sort of pragma Argument_Expression_Node := Next_Sibling (Current_Node => Id_Node); -- ASSUME Argument_Expression_Node = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Argument_Expression_Node) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Argument_Expression_Node = expression in Wf_Priority_Pragma"); The_Region := Dictionary.GetRegion (Scope); Is_Protected_Type := Dictionary.IsType (The_Region) and then Dictionary.IsProtectedTypeMark (The_Region); Is_Task_Type := Dictionary.IsType (The_Region) and then Dictionary.TypeIsTask (The_Region); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Id_Node), Lex_Str2 => LexTokenManager.Priority_Token) = LexTokenManager.Str_Eq then Pragma_Kind := Dictionary.Priority; else Pragma_Kind := Dictionary.InterruptPriority; end if; if Valid_Location (Is_Protected_Type => Is_Protected_Type, Is_Task_Type => Is_Task_Type) then Dictionary.SetTypeHasPragma (The_Region, Pragma_Kind); Wf_Priority_Value (Node => Argument_Expression_Node, Pragma_Kind => Pragma_Kind, Error_Sym => The_Region, Scope => Scope, The_Heap => The_Heap, Value_Rep => Value_Rep, Compatible => Compatible); if Compatible then -- return Value_Rep will either be a valid static value or NullString so we can add it safely Dictionary.SetTypePragmaValue (The_Region, Pragma_Kind, Value_Rep); -- see if argument is a discriminant and, if it is, mark it in the dicitonary as being -- used to set priority (so that we can do checks on actuals supplied in subtypes) Check_Discriminant (Node => Argument_Expression_Node, Scope => Scope); end if; else -- Invalid location ErrorHandler.Semantic_Error (879, ErrorHandler.No_Reference, Node_Position (Node), Node_Lex_String (Id_Node)); end if; else -- not pragma [Interrupt_]Priority ErrorHandler.Semantic_Error (880, ErrorHandler.No_Reference, Node_Position (Node), LexTokenManager.Null_String); end if; end Wf_Priority_Pragma; spark-2012.0.deb/examiner/dag.smf0000644000175000017500000000064011753202337015514 0ustar eugeneugencells.adb cells-utility.adb -vcg cells-utility-list.adb -vcg cell_storage.adb -vcg dag.adb dag-build_annotation_expression.adb dag-buildexpndag.adb dag-buildexpndag-upattributedesignator.adb dag-buildgraph.adb dag-buildgraph-modelassignmentstmt.adb -vcg dag-buildgraph-modelprocedurecall.adb dag-buildgraph-incorporateconstraints.adb dag-loopcontext.adb dag-substitutions.adb -vcg dag-type_constraint.adb dag_io.adb spark-2012.0.deb/examiner/sem-check_announced_types_declared.adb0000644000175000017500000001204611753202336023664 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Announced_Types_Declared (Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Node_Pos : in LexTokenManager.Token_Position) is Type_List : Dictionary.Iterator; The_Type : Dictionary.Symbol; function Type_Is_Protected (The_Type : Dictionary.Symbol; Pack_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in Scope; is -- This function is necessary because at this point the type symbol is -- simply a placeholder generated from the type announcement and contains -- no information as to what the type is. So we must iterate over the own -- variables looking for this type and then we can deduce whether the -- type is protected. Own_List : Dictionary.Iterator; The_Own_Var : Dictionary.Symbol; Result : Boolean := False; begin Own_List := Dictionary.FirstOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (Own_List) loop The_Own_Var := Dictionary.CurrentSymbol (Own_List); if Dictionary.OwnVariableHasType (The_Own_Var, Scope) and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (The_Own_Var), Right_Symbol => The_Type, Full_Range_Subtype => False) and then Dictionary.HasValidPriorityProperty (The_Own_Var) then Result := True; exit; end if; Own_List := Dictionary.NextSymbol (Own_List); end loop; return Result; end Type_Is_Protected; function Type_Is_Task (The_Type : Dictionary.Symbol; Pack_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is -- As above but looking through the own tasks. Own_List : Dictionary.Iterator; The_Own_Task : Dictionary.Symbol; Result : Boolean := False; begin Own_List := Dictionary.FirstOwnTask (Pack_Sym); while not Dictionary.IsNullIterator (Own_List) loop The_Own_Task := Dictionary.CurrentSymbol (Own_List); if Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (The_Own_Task), Right_Symbol => The_Type, Full_Range_Subtype => False) then Result := True; exit; end if; Own_List := Dictionary.NextSymbol (Own_List); end loop; return Result; end Type_Is_Task; begin -- Check_Announced_Types_Declared Type_List := Dictionary.First_Undeclared_Type (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (Type_List) loop The_Type := Dictionary.CurrentSymbol (Type_List); -- Are we in the package specification? --# accept Flow, 41, "Expected stable expression"; if Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Visible then --# end accept; if Type_Is_Protected (The_Type => The_Type, Pack_Sym => Pack_Sym) or else Type_Is_Task (The_Type => The_Type, Pack_Sym => Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 325, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (The_Type)); end if; else -- We're in the package body.. if not Dictionary.TypeIsPrivate (TheType => Dictionary.CurrentSymbol (Type_List)) and then not Type_Is_Protected (The_Type => The_Type, Pack_Sym => Pack_Sym) and then not Type_Is_Task (The_Type => The_Type, Pack_Sym => Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 325, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (The_Type)); end if; end if; Type_List := Dictionary.NextSymbol (Type_List); end loop; end Check_Announced_Types_Declared; spark-2012.0.deb/examiner/cell_storage.adb0000644000175000017500000000646611753202335017377 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Cell_Storage -- -- Implementation -- Uses Ada.Containers.Vectors to implement a simple self-extending -- vector. Implemented in Ada, not SPARK, so hidden. -------------------------------------------------------------------------------- package body Cell_Storage is --# hide Cell_Storage; procedure Initialize (Initial_Length : in SPARK.Ada.Containers.Count_Type; V : out Vector) is begin V := Vector' (Vec => Vectors.To_Vector (New_Item => Cell_Content'(A_Ptr => Cell'First, B_Ptr => Cell'First, C_Ptr => Cell'First, Copy => Cell'First, Lex_Str => LexTokenManager.Null_String, Val => Natural'First, Assoc_Var => Dictionary.NullSymbol, Op_Symbol => SP_Symbols.SP_Symbol'First, Rank => Cell_Rank'First, Kind => Unknown_Kind, Free => True, Marked => False), Length => Ada.Containers.Count_Type (Initial_Length))); end Initialize; function Last_Index (V : Vector) return Cell is begin return Cell (Vectors.Last_Index (V.Vec)); end Last_Index; function Get_Element (V : in Vector; Index : in Cell) return Cell_Content is begin return Vectors.Element (Container => V.Vec, Index => Index); end Get_Element; procedure Set_Element (V : in out Vector; Index : in Cell; Value : in Cell_Content) is begin Vectors.Replace_Element (Container => V.Vec, Index => Index, New_Item => Value); end Set_Element; procedure Append (V : in out Vector; Value : in Cell_Content) is begin Vectors.Append (Container => V.Vec, New_Item => Value); end Append; end Cell_Storage; spark-2012.0.deb/examiner/structures.ads0000644000175000017500000000356011753202336017171 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; --# inherit Cells, --# CStacks, --# Statistics; package Structures is procedure CopyStructure (Heap : in out Cells.Heap_Record; Root : in Cells.Cell; RootCopy : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root & --# RootCopy from Heap, --# Root; procedure DisposeOfStructure (Heap : in out Cells.Heap_Record; Root : in Cells.Cell); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root; end Structures; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_percent.adb0000644000175000017500000001030411753202336023367 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Percent (Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Stack_Top : Sem.Exp_Record; Top_Sym : Dictionary.Symbol; function In_A_For_Loop (Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is -- Search outwards in scope as long as we remain in a loop. -- Success is when a for loop is found, -- Failure is when anything other than a loop is found. Current_Scope : Dictionary.Scopes; Result : Boolean := False; begin Current_Scope := Scope; while Dictionary.IsLoop (Dictionary.GetRegion (Current_Scope)) loop if not Dictionary.Is_Null_Symbol (Dictionary.GetLoopParameter (Dictionary.GetRegion (Current_Scope))) then -- for loop found Result := True; exit; end if; Current_Scope := Dictionary.GetEnclosingScope (Current_Scope); end loop; return Result; end In_A_For_Loop; begin -- Wf_Percent -- Percent operator is only allowed in loop invariants of for loops. if In_A_For_Loop (Scope => Scope) then -- Percent may be allowed Exp_Stack.Pop (Item => Stack_Top, Stack => E_Stack); if Stack_Top.Sort = Sem.Is_Object then -- May be ok, further checks required Top_Sym := Stack_Top.Other_Symbol; if not Dictionary.Is_Variable (Top_Sym) then Stack_Top.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 318, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); elsif not Dictionary.Types_Are_Equal (Left_Symbol => Stack_Top.Type_Symbol, Right_Symbol => Dictionary.GetType (Top_Sym), Full_Range_Subtype => False) then -- New check that variable is entire Stack_Top.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 320, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; else -- not an object so error message needed Stack_Top.Errors_In_Expression := True; if not (Stack_Top.Sort = Sem.Is_Unknown) then -- Supress error for unknown things ErrorHandler.Semantic_Error (Err_Num => 318, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; Exp_Stack.Push (X => Stack_Top, Stack => E_Stack); else Exp_Stack.Pop (Item => Stack_Top, Stack => E_Stack); Stack_Top.Errors_In_Expression := True; Exp_Stack.Push (X => Stack_Top, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 310, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end Wf_Percent; spark-2012.0.deb/examiner/componenterrors.ads0000644000175000017500000001234211753202335020202 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Dictionary; with ExaminerConstants; with Heap; with LexTokenManager; with SeqAlgebra; use type Dictionary.Symbol; use type LexTokenManager.Token_Position; --# inherit Dictionary, --# ExaminerConstants, --# Heap, --# LexTokenManager, --# SeqAlgebra, --# Statistics, --# SystemErrors; package ComponentErrors is MaxNumComponentErrors : constant Natural := ExaminerConstants.MaxRecordErrors; subtype ComponentError is Natural range 0 .. MaxNumComponentErrors; NullComponentError : constant ComponentError := 0; type HeapOfErrors is private; type ErrorClass is ( DataFlow, IneffectiveStmt, IneffectiveFieldAssignment, Dependency, Usage, SemanticWarning); procedure Initialise (TheErrorHeap : out HeapOfErrors); --# derives TheErrorHeap from ; -- Creates a new component error on TheErrorheap from the given -- error class, error value, position and sym. procedure CreateError (TheErrorHeap : in out HeapOfErrors; HeapSeq : in out Heap.HeapRecord; ErrClass : in ErrorClass; ErrVal : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; NewError : out ComponentError); --# global in out Statistics.TableUsage; --# derives HeapSeq from * & --# NewError from TheErrorHeap & --# Statistics.TableUsage from *, --# HeapSeq, --# TheErrorHeap & --# TheErrorHeap from *, --# ErrClass, --# ErrVal, --# HeapSeq, --# Position, --# Sym; -- Disposes of component error on TheErrorHeap. procedure DisposeOfError (TheErrorHeap : in out HeapOfErrors; HeapSeq : in out Heap.HeapRecord; OldError : in ComponentError); --# derives HeapSeq, --# TheErrorHeap from *, --# OldError, --# TheErrorHeap; -- Returns true if ErrClass, ErrVal, Position and Sym are the same -- in the error descriptors referred to by Error1 and Error2. -- Otherwise it returns false. function IsSameError (TheErrorHeap : HeapOfErrors; Error1 : ComponentError; Error2 : ComponentError) return Boolean; function ClassOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return ErrorClass; function ValueOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return Natural; function PositionOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return LexTokenManager.Token_Position; function SymOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return Dictionary.Symbol; function AssociatedComponentNodesOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return SeqAlgebra.Seq; procedure ReportUsage (TheErrorHeap : in HeapOfErrors); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# TheErrorHeap; private type ErrorDescriptor is record ErrClass : ErrorClass; ErrVal : Natural; Position : LexTokenManager.Token_Position; Sym : Dictionary.Symbol; AssociatedComponentNodes : SeqAlgebra.Seq; NextError : ComponentError; end record; subtype ComponentIndex is Natural range 1 .. MaxNumComponentErrors; type ArrayOfErrorDescriptors is array (ComponentIndex) of ErrorDescriptor; type HeapOfErrors is record ListOfComponentErrors : ArrayOfErrorDescriptors; HighMark, NextFreeComponent : ComponentError; end record; end ComponentErrors; spark-2012.0.deb/examiner/debug.adb0000644000175000017500000003316111753202336016013 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with DAG_IO; with ExaminerConstants; with E_Strings; with GNAT.Traceback.Symbolic; with SPARK_IO; with SP_Symbols; with Text_IO; with SystemErrors; use type SPARK_IO.File_Status; package body Debug is --# hide Debug; procedure PrintMsg (Msg : in String; NewLine : in Boolean) is begin Text_IO.Put (Msg); if NewLine then Text_IO.New_Line (1); end if; end PrintMsg; procedure Print_Sym_Raw (Sym : in Dictionary.Symbol) is Str : E_Strings.T; begin if Dictionary.Is_Null_Symbol (Sym) then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Null Symbol", 0); else Str := Dictionary.GetAnyPrefixNeeded (Sym => Sym, Scope => Dictionary.GlobalScope, Separator => "."); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Str); if E_Strings.Get_Length (E_Str => Str) > 0 then SPARK_IO.Put_Char (SPARK_IO.Standard_Output, '.'); end if; E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Sym, Separator => ".")); end if; end Print_Sym_Raw; procedure Print_Sym (Msg : in String; Sym : in Dictionary.Symbol) is begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, Msg, 0); -- These statements put out the raw symbol number before its name -- SPARK_IO.Put_Char (SPARK_IO.Standard_Output, '('); -- Text_IO.Put (Integer'Image (Integer (Dictionary.SymbolRef (Sym)))); -- SPARK_IO.Put_Char (SPARK_IO.Standard_Output, ')'); -- end of numeric output lines Print_Sym_Raw (Sym => Sym); -- These statements put out the raw symbol Discriminant after its name -- Start of printing " (SymbolDiscriminant)" -- SPARK_IO.Put_Char (SPARK_IO.Standard_Output, ' '); -- SPARK_IO.Put_Char (SPARK_IO.Standard_Output, '('); -- E_Strings.PutString (SPARK_IO.Standard_Output, -- Dictionary.GetSymbolDiscriminant (Sym)); -- SPARK_IO.Put_Char (SPARK_IO.Standard_Output, ')'); -- End of printing " (SymbolDiscriminant)" SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end Print_Sym; procedure Print_Function_Sym (Msg : in String; Sym : in Dictionary.Symbol) is --# hide Print_Function_Sym; begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, Msg, 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " [", 0); SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => Integer (Dictionary.SymbolRef (Sym)), Base => 10, Width => 4); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "]: ", 0); Print_Sym_Raw (Sym); if Dictionary.IsProofFunction (Sym) then SPARK_IO.Put_String (SPARK_IO.Standard_Output, " (Proof)", 0); if Dictionary.IsImplicitProofFunction (Sym) then SPARK_IO.Put_String (SPARK_IO.Standard_Output, " (Implicit)", 0); end if; elsif Dictionary.IsAdaFunction (Sym) then SPARK_IO.Put_String (SPARK_IO.Standard_Output, " (Ada)", 0); end if; SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end Print_Function_Sym; procedure PrintScope (Msg : in String; Scope : in Dictionary.Scopes) is begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, Msg, 0); if Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Visible then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Visible scope of ", 0); elsif Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Local then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Local scope of ", 0); else SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Private scope of ", 0); end if; Print_Sym ("", Dictionary.GetRegion (Scope)); end PrintScope; procedure PrintInt (Msg : in String; I : in Integer) is begin Text_IO.Put (Msg); Text_IO.Put_Line (Integer'Image (I)); end PrintInt; procedure PrintBool (Msg : in String; B : in Boolean) is begin Text_IO.Put (Msg); Text_IO.Put_Line (Boolean'Image (B)); end PrintBool; procedure Print_Lex_Str (Msg : in String; L : in LexTokenManager.Lex_String) is begin Text_IO.Put (Msg); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => L)); end Print_Lex_Str; -- needs DAG_IO.PrintDAG to be made visible procedure PrintDAG (Msg : in String; DAG : in Cells.Cell; The_Heap : in out Cells.Heap_Record; Scope : in Dictionary.Scopes) is begin Text_IO.Put (Msg); DAG_IO.PrintDag (The_Heap, SPARK_IO.Standard_Output, DAG, Scope, DAG_IO.Default_Wrap_Limit); Text_IO.New_Line; end PrintDAG; procedure Write_DAG_To_File (Filename : in String; DAG : in Cells.Cell; The_Heap : in out Cells.Heap_Record; Scope : in Dictionary.Scopes) is Fd : SPARK_IO.File_Type := SPARK_IO.Null_File; Status : SPARK_IO.File_Status; begin SPARK_IO.Create (File => Fd, Name_Length => Filename'Length, Name_Of_File => Filename, Form_Of_File => "", Status => Status); SystemErrors.RT_Assert (C => Status = SPARK_IO.Ok, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Could not open file: " & Filename); DAG_IO.Print_DAG_Dot (Heap => The_Heap, Output_File => Fd, Root => DAG, Scope => Scope, Wrap_Limit => DAG_IO.No_Wrap); SPARK_IO.Close (Fd, Status); SystemErrors.RT_Assert (C => Status = SPARK_IO.Ok, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Could not close file: " & Filename); end Write_DAG_To_File; procedure Write_Heap_To_File (Filename : in String; The_Heap : in out Cells.Heap_Record) is Fd : SPARK_IO.File_Type := SPARK_IO.Null_File; Status : SPARK_IO.File_Status; begin SPARK_IO.Create (File => Fd, Name_Length => Filename'Length, Name_Of_File => Filename, Form_Of_File => "", Status => Status); SystemErrors.RT_Assert (C => Status = SPARK_IO.Ok, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Could not open file: " & Filename); DAG_IO.Print_Heap_Dot (Heap => The_Heap, Output_File => Fd); SPARK_IO.Close (Fd, Status); SystemErrors.RT_Assert (C => Status = SPARK_IO.Ok, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Could not close file: " & Filename); end Write_Heap_To_File; procedure Print_Cell (Msg : in String; The_Heap : in out Cells.Heap_Record; The_Cell : in Cells.Cell) is begin Text_IO.Put_Line (Msg & " ("); Text_IO.Put_Line (" Kind : " & Cells.Cell_Kind'Image (Cells.Get_Kind (The_Heap, The_Cell))); Text_IO.Put_Line (" Op : " & SP_Symbols.SP_Symbol'Image (Cells.Get_Op_Symbol (The_Heap, The_Cell))); Text_IO.Put_Line (" A : " & Cells.Cell_Kind'Image (Cells.Get_Kind (The_Heap, Cells.Get_A_Ptr (The_Heap, The_Cell)))); Text_IO.Put (" "); DAG_IO.PrintDag (The_Heap, SPARK_IO.Standard_Output, Cells.Get_A_Ptr (The_Heap, The_Cell), Dictionary.GlobalScope, 120); Text_IO.New_Line; Text_IO.Put_Line (" B : " & Cells.Cell_Kind'Image (Cells.Get_Kind (The_Heap, Cells.Get_B_Ptr (The_Heap, The_Cell)))); Text_IO.Put (" "); DAG_IO.PrintDag (The_Heap, SPARK_IO.Standard_Output, Cells.Get_B_Ptr (The_Heap, The_Cell), Dictionary.GlobalScope, 120); Text_IO.New_Line; Text_IO.Put_Line (" C : " & Cells.Cell_Kind'Image (Cells.Get_Kind (The_Heap, Cells.Get_C_Ptr (The_Heap, The_Cell)))); Text_IO.Put (" "); DAG_IO.PrintDag (The_Heap, SPARK_IO.Standard_Output, Cells.Get_C_Ptr (The_Heap, The_Cell), Dictionary.GlobalScope, 120); Text_IO.New_Line; Text_IO.Put_Line (" Nat :" & Natural'Image (Cells.Get_Natural_Value (The_Heap, The_Cell))); Text_IO.Put (" Sym : "); Print_Sym_Raw (Cells.Get_Symbol_Value (The_Heap, The_Cell)); Text_IO.New_Line; Text_IO.Put_Line (")"); end Print_Cell; procedure Print_Sym_Seq (Msg : in String; Seq : in SeqAlgebra.Seq; The_Heap : in Heap.HeapRecord) is X : SeqAlgebra.MemberOfSeq; Str : E_Strings.T; Sym : Dictionary.Symbol; Later_Item : Boolean := False; begin Text_IO.Put (Msg); Text_IO.Put ("{ "); X := SeqAlgebra.FirstMember (The_Heap, Seq); while not SeqAlgebra.IsNullMember (X) loop if Later_Item then Text_IO.Put (", "); end if; Later_Item := True; Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => X))); Str := Dictionary.GetAnyPrefixNeeded (Sym => Sym, Scope => Dictionary.GlobalScope, Separator => "."); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Str); if E_Strings.Get_Length (E_Str => Str) > 0 then SPARK_IO.Put_Char (SPARK_IO.Standard_Output, '.'); end if; E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Sym, Separator => ".")); X := SeqAlgebra.NextMember (The_Heap, X); end loop; Text_IO.Put_Line (" }"); end Print_Sym_Seq; procedure PrintSeq (Msg : in String; Seq : in SeqAlgebra.Seq; The_Heap : in Heap.HeapRecord) is X : SeqAlgebra.MemberOfSeq; Later_Item : Boolean := False; begin Text_IO.Put (Msg); Text_IO.Put ("{ "); X := SeqAlgebra.FirstMember (The_Heap, Seq); while not SeqAlgebra.IsNullMember (X) loop if Later_Item then Text_IO.Put (", "); end if; Later_Item := True; Text_IO.Put (Integer'Image (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => X))); X := SeqAlgebra.NextMember (The_Heap, X); end loop; Text_IO.Put_Line (" }"); end PrintSeq; procedure PrintNode (Msg : in String; N : in STree.SyntaxNode) is begin Text_IO.Put (Msg); Text_IO.Put_Line (SP_Symbols.SP_Symbol'Image (STree.Syntax_Node_Type (Node => N))); end PrintNode; procedure PrintTraceback (Msg : in String; Depth : in Natural) is Traceback : GNAT.Traceback.Tracebacks_Array (1 .. Depth); Unused : Natural; begin Text_IO.Put_Line (Msg); GNAT.Traceback.Call_Chain (Traceback, Unused); SPARK_IO.Put_String (SPARK_IO.Standard_Output, GNAT.Traceback.Symbolic.Symbolic_Traceback (Traceback), 0); end PrintTraceback; procedure Dump_Stack (Msg : in String; Scope : in Dictionary.Scopes; VCG_Heap : in out Cells.Heap_Record; Stack : in CStacks.Stack) is Top_Ptr : CStacks.Stack := Stack; begin Debug.PrintMsg (Msg, True); Debug.PrintMsg ("Stack contents:", True); while not CStacks.IsEmpty (Top_Ptr) loop Debug.PrintDAG (Msg => "--", DAG => CStacks.Top (VCG_Heap, Top_Ptr), The_Heap => VCG_Heap, Scope => Scope); Debug.PrintMsg ("------------------", True); Top_Ptr := CStacks.NonDestructivePop (VCG_Heap, Top_Ptr); end loop; end Dump_Stack; end Debug; spark-2012.0.deb/examiner/g-tabsor.ads0000644000175000017500000000377611753202336016475 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- The generic procedure Sort_Table is missing in the generic package -- GNAT.Table (it exists in the generic package -- GNAT.Dynamic_Tables). To prevent to change the generic package -- GNAT.Table, a public child package has been written to add this -- functionality. This generic child package contains a generic -- function to sort the table. generic package GNAT.Table.Sort is generic with function Lt (Comp1, Comp2 : Table_Component_Type) return Boolean; procedure Sort_Table; -- This procedure sorts the components of the table into ascending -- order making calls to Lt to do required comparisons, and using -- assignments to move components around. The Lt function returns True -- if Comp1 is less than Comp2 (in the sense of the desired sort), and -- False if Comp1 is greater than Comp2. For equal objects it does not -- matter if True or False is returned (it is slightly more efficient -- to return False). The sort is not stable (the order of equal items -- in the table is not preserved). end GNAT.Table.Sort; spark-2012.0.deb/examiner/commandlinedata.adb0000644000175000017500000013124511753202335020046 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------- -- WARNING: if you add or modify a command line switch be sure to update -- -- Print_Help in examiner.adb -- ---------------------------------------------------------------------------- with FileSystem; with SPARK_IO; with Version; with XMLReport; use type Version.Distribution_Sort; package body CommandLineData is Ada_Extension : constant String := "ada"; Lst_Extension : constant String := "lst"; Default_Report_File : constant String := "spark.rep"; Default_Dict_File : constant String := "spark.dic"; Default_HTML_Directory : constant String := "HTML"; procedure Initialize is begin Content := Command_Line_Contents' (Valid => True, Index => False, Warning => False, Target_Data => False, Target_Config => False, Source_Extension => E_Strings.Copy_String (Str => Ada_Extension), Listing_Extension => E_Strings.Copy_String (Str => Lst_Extension), Number_Source => 0, Echo => True, Makefile_Mode => False, Report => True, Write_Dict => False, Report_File_Name => E_Strings.Copy_String (Str => Default_Report_File), Dict_File_Name => E_Strings.Copy_String (Str => Default_Dict_File), VCG => False, DPC => False, Write_Statistics => False, Flow_Option => Auto_Flow, Default_Switch_File => False, HTML => False, HTML_Directory => E_Strings.Copy_String (Str => Default_HTML_Directory), Output_Directory => False, Output_Directory_Name => E_Strings.Empty_String, Index_File_Name => E_Strings.Empty_String, Warning_File_Name => E_Strings.Empty_String, Target_Data_File => E_Strings.Empty_String, Target_Config_File => E_Strings.Empty_String, Source_File_List => Source_File_Lists'(others => Source_File_Entry'(Source_File_Name => E_Strings.Empty_String, Listing => False, Listing_File_Name => E_Strings.Empty_String)), Anno_Char => '#', Syntax_Only => False, FDL_Reserved => True, FDL_Mangle => E_Strings.Empty_String, Plain_Output => False, Version_Requested => False, Help_Requested => False, VC_Finger_Prints => False, Concurrency_Profile => Sequential, Language_Profile => SPARK95, No_Duration => False, Brief => False, Brief_Option => No_Path, XML => False, Info_Flow_Policy => None, Constant_Rules => Lazy, Legacy_Errors => False, Error_Explanation => Off, Justification_Option => Full, No_Listings => False, Generate_SLI => True, Casing_Standard => False, Casing_Identifier => False, SPARK_Lib => False, Debug => Debug_State'(Enabled => False, Expressions => False, HTML => False, Lookup_Trace => False, File_Names => False, Units => False, Invariants => False, Components => False, Rho => False, Parser => False, FDL_Ranking => False, VCG => False, SLI => False, VCG_All => False, DAG => False, Extra_Stats => False), Distribution_Is_Pro => Version.Toolset_Distribution_Sort = Version.Pro, GPL_Switch => False); end Initialize; procedure Normalize_File_Name_To_Output_Directory (F : in out E_Strings.T) is Original : E_Strings.T; procedure Debug --# derives ; is --# hide Debug; begin if Content.Debug.File_Names then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Normalizing ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Original); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " with respect to ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Content.Output_Directory_Name); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " results in ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => F); end if; end Debug; begin Original := F; if Content.Output_Directory then F := FileSystem.Interpret_Relative (File_Name => Original, Relative_To_Directory => Content.Output_Directory_Name); Debug; end if; end Normalize_File_Name_To_Output_Directory; procedure Dump_File_Names is --# hide Dump_File_Names; begin if Content.Debug.File_Names then SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "------------Dump of CommandLineData.Content File Names------------", 0); if Content.Index then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Index file is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Index_File_Name); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "No Index File", 0); end if; if Content.Warning then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Warning file is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Warning_File_Name); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "No Warning File", 0); end if; if Content.Target_Data then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Target Data File is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Target_Data_File); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "No Target Data File", 0); end if; if Content.Target_Config then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Target Config File is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Target_Config_File); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "No Target Config File", 0); end if; SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Source extension is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Source_Extension); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Listing extension is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Listing_Extension); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Report File Name is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Report_File_Name); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Dictionary File Name is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Dict_File_Name); if Content.Output_Directory then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Output Directory is:", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Output_Directory_Name); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Output directory is CWD", 0); end if; SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Number of Source Files is:", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Integer'Image (Content.Number_Source), 0); for I in Source_File_Positions range 1 .. Content.Number_Source loop E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Content.Source_File_List (I).Source_File_Name); if Content.No_Listings then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, ", All listing files suppressed", 0); elsif Content.Source_File_List (I).Listing then SPARK_IO.Put_String (SPARK_IO.Standard_Output, ", ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Content.Source_File_List (I).Listing_File_Name); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, ", No listing file", 0); end if; end loop; SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Examiner Lib Directory is: ", 0); if FileSystem.Use_Windows_Command_Line and then Content.Plain_Output then E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Strings.Lower_Case (E_Str => FileSystem.Examiner_Lib_Directory)); else E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => FileSystem.Examiner_Lib_Directory); end if; SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "------------End of CommandLineData.Content File Names-------------", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end if; end Dump_File_Names; procedure Output_Command_Line (Prefix : in String; XML : in Boolean; Option_Str : out E_Strings.T) is Dir_Actual_C : constant String := "output_directory (actual)="; Tmp_String : E_Strings.T; Out_Dir : E_Strings.T; CWD : E_Strings.T; Is_First : Boolean; procedure Output_Option (Prefix : in String; Option : in E_Strings.T; Acc : in out E_Strings.T; XML : in Boolean; For_XML : in Boolean) --# global in out XMLReport.State; --# derives Acc from *, --# For_XML, --# Option, --# Prefix, --# XML, --# XMLReport.State & --# XMLReport.State from *, --# For_XML, --# XML; is Acc_Option : E_Strings.T; begin if XML then if For_XML then Acc_Option := Option; XMLReport.Option (Opt => Acc_Option); else Acc_Option := E_Strings.Empty_String; end if; else Acc_Option := E_Strings.Copy_String (Str => Prefix); E_Strings.Append_Examiner_String (E_Str1 => Acc_Option, E_Str2 => Option); E_Strings.Append_Examiner_String (E_Str1 => Acc_Option, E_Str2 => FileSystem.End_Of_Line); end if; E_Strings.Append_Examiner_String (E_Str1 => Acc, E_Str2 => Acc_Option); end Output_Option; function Plain_Output (E_Str : E_Strings.T; XML : Boolean) return E_Strings.T --# global in Content; is Result : E_Strings.T; begin if Content.Plain_Output and then not XML then Result := FileSystem.Just_File (Fn => E_Str, Ext => True); else Result := E_Str; end if; return Result; end Plain_Output; begin Option_Str := E_Strings.Empty_String; if not Content.Default_Switch_File then Tmp_String := E_Strings.Copy_String (Str => Option_No_Switch); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; --# assert True; if Content.Index then Tmp_String := E_Strings.Copy_String (Str => Option_Index_File & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.Index_File_Name, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else Tmp_String := E_Strings.Copy_String (Str => "no" & Option_Index_File); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; if Content.Warning then Tmp_String := E_Strings.Copy_String (Str => Option_Warning_File & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.Warning_File_Name, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else Tmp_String := E_Strings.Copy_String (Str => "no" & Option_Warning_File); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; --# assert True; if Content.Target_Data then Tmp_String := E_Strings.Copy_String (Str => Option_Target_Compiler_Data & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.Target_Data_File, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else Tmp_String := E_Strings.Copy_String (Str => "no" & Option_Target_Compiler_Data); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; if Content.Target_Config then Tmp_String := E_Strings.Copy_String (Str => Option_Config_File & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.Target_Config_File, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else Tmp_String := E_Strings.Copy_String (Str => "no" & Option_Config_File); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; --# assert True; Tmp_String := E_Strings.Copy_String (Str => Option_Source_Extension & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.Source_Extension, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); Tmp_String := E_Strings.Copy_String (Str => Option_Listing_Extension & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.Listing_Extension, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); if Content.No_Listings then Tmp_String := E_Strings.Copy_String (Str => Option_No_Listings); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; if not Content.Echo then Tmp_String := E_Strings.Copy_String (Str => Option_No_Echo); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.Makefile_Mode then Tmp_String := E_Strings.Copy_String (Str => Option_Makefile_Mode); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.Write_Dict then Tmp_String := E_Strings.Copy_String (Str => Option_Dictionary_File & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.Dict_File_Name, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else Tmp_String := E_Strings.Copy_String (Str => "no" & Option_Dictionary_File); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; --# assert True; if Content.Report then Tmp_String := E_Strings.Copy_String (Str => Option_Report_File & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.Report_File_Name, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else Tmp_String := E_Strings.Copy_String (Str => "no" & Option_Report_File); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; if Content.HTML then Tmp_String := E_Strings.Copy_String (Str => Option_Html & "="); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Plain_Output (E_Str => Content.HTML_Directory, XML => XML)); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else Tmp_String := E_Strings.Copy_String (Str => "no" & Option_Html); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; --# assert True; if Content.VCG then Tmp_String := E_Strings.Copy_String (Str => Option_Vcg); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.DPC then Tmp_String := E_Strings.Copy_String (Str => Option_Dpc); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; --# assert True; if Content.Syntax_Only then Tmp_String := E_Strings.Copy_String (Str => Option_Syntax_Check); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.GPL_Switch then Tmp_String := E_Strings.Copy_String (Str => Option_GPL); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; --# assert True; if Content.Plain_Output then Tmp_String := E_Strings.Copy_String (Str => Option_Plain_Output); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.No_Duration then Tmp_String := E_Strings.Copy_String (Str => Option_No_Duration); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.Brief then Tmp_String := E_Strings.Copy_String (Str => Option_Brief & "="); case Content.Brief_Option is when No_Path => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Brief_No_Path); when Full_Path => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Brief_Full_Path); end case; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.SPARK_Lib then Tmp_String := E_Strings.Copy_String (Str => Option_SPARK_Lib); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.Write_Statistics then Tmp_String := E_Strings.Copy_String (Str => Option_Statistics); else Tmp_String := E_Strings.Copy_String (Str => "no" & Option_Statistics); end if; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); --# assert True; Tmp_String := E_Strings.Copy_String (Str => Option_Fdl_Identifiers & "="); if Content.FDL_Reserved then E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Fdl_Identifiers_Accept); else if E_Strings.Is_Empty (E_Str => Content.FDL_Mangle) then E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Fdl_Identifiers_Reject); else E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Content.FDL_Mangle); end if; end if; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); Tmp_String := E_Strings.Copy_String (Str => Option_Flow_Analysis & "="); case Content.Flow_Option is when Data_Flow => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Flow_Analysis_Data); when Info_Flow => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Flow_Analysis_Information); when Auto_Flow => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Flow_Analysis_Auto); end case; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); --# assert True; Tmp_String := E_Strings.Copy_String (Str => Option_Policy & "="); case Content.Info_Flow_Policy is when None => null; when Safety => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Policy_Safety); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); when Security => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Policy_Security); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end case; --# assert True; Tmp_String := E_Strings.Copy_String (Str => Option_Language & "="); case Content.Language_Profile is when SPARK83 => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Language_83); when SPARK95 => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Language_95); when SPARK2005 => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Language_2005); when KCG => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Language_KCG); end case; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); Tmp_String := E_Strings.Copy_String (Str => Option_Profile & "="); case Content.Concurrency_Profile is when Ravenscar => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Profile_Ravenscar); when Sequential => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Profile_Sequential); end case; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); Tmp_String := E_Strings.Copy_String (Str => Option_Annotation_Character & "="); E_Strings.Append_Char (E_Str => Tmp_String, Ch => Content.Anno_Char); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); --# assert True; Tmp_String := E_Strings.Copy_String (Str => Option_Rules & "="); case Content.Constant_Rules is when No_Rules => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Rules_None); when Lazy => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Rules_Lazy); when Keen => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Rules_Keen); when All_Rules => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Rules_All); end case; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); --# assert True; if Content.Legacy_Errors then Tmp_String := E_Strings.Copy_String (Str => Option_Original_Flow_Errors); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if not Content.Generate_SLI then Tmp_String := E_Strings.Copy_String (Str => Option_No_Sli); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; --# assert True; Tmp_String := E_Strings.Copy_String (Str => Option_Error_Explanations & "="); case Content.Error_Explanation is when Off => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Error_Explanations_Off); when First_Occurrence => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Error_Explanations_First_Occurrence); when Every_Occurrence => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Error_Explanations_Every_Occurrence); end case; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); Tmp_String := E_Strings.Copy_String (Str => Option_Justification_Option & "="); case Content.Justification_Option is when Ignore => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Justification_Option_Ignore); when Full => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Justification_Option_Full); when Brief => E_Strings.Append_String (E_Str => Tmp_String, Str => Option_Justification_Option_Brief); end case; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); --# assert True; if Content.Casing_Standard or else Content.Casing_Identifier then Tmp_String := E_Strings.Copy_String (Str => Option_Casing & "="); if Content.Casing_Standard then E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Casing_Standard); end if; if Content.Casing_Identifier then E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Casing_Identifier); end if; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; if Content.Debug.Enabled then Tmp_String := E_Strings.Copy_String (Str => Option_Debug); Is_First := True; if Content.Debug.Expressions then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_E); end if; --# assert True; if Content.Debug.HTML then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_H); end if; --# assert True; if Content.Debug.Lookup_Trace then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_L); end if; --# assert True; if Content.Debug.File_Names then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_F); end if; --# assert True; if Content.Debug.Units then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_U); end if; --# assert True; if Content.Debug.Invariants then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_I); end if; --# assert True; if Content.Debug.Components then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_C); end if; --# assert True; if Content.Debug.Rho then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_R); end if; --# assert True; if Content.Debug.Parser then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_P); end if; if Content.Debug.FDL_Ranking then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_K); end if; --# assert True; if Content.Debug.VCG then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_V); end if; --# assert True; if Content.Debug.VCG_All then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_V_Upper); end if; --# assert True; if Content.Debug.DAG then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); Is_First := False; end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_D); end if; --# assert True; if Content.Debug.SLI then if Is_First then E_Strings.Append_Char (E_Str => Tmp_String, Ch => '='); end if; E_Strings.Append_Char (E_Str => Tmp_String, Ch => Option_Debug_X); end if; Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => False); end if; --# assert True; if Content.Output_Directory then -- output_directory _has_ been specified, so firstly -- report exactly what the user gave on the command-line -- as the requested output directory Tmp_String := E_Strings.Copy_String (Str => Option_Output_Directory & "="); Out_Dir := Content.Output_Directory_Name; -- Out_Dir might have an EndOfPath (normally '/' or '\') on -- the end, so remove it here. FileSystem.Remove_End_Of_Path_If_Present (D => Out_Dir); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Out_Dir); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); -- Now report the actual output directory. if Content.Plain_Output then -- Plain output mode, so just repeat the requested dir. Tmp_String := E_Strings.Copy_String (Str => Dir_Actual_C); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Out_Dir); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else -- Verbose output mode...imagine we're creating a dummy -- file called "a" in the requested output directory. -- Normalize this filename for the output dir, then -- find its full pathname relative to CWD, then report -- that, minus the "a" on the end... Tmp_String := E_Strings.Copy_String (Str => Dir_Actual_C); Out_Dir := E_Strings.Copy_String (Str => "a"); Normalize_File_Name_To_Output_Directory (Out_Dir); CWD := FileSystem.Working_Directory; FileSystem.Append_End_Of_Path_If_Needed (D => CWD); Out_Dir := FileSystem.Interpret_Relative (File_Name => Out_Dir, Relative_To_Directory => CWD); -- Drop the "a" off the end... Out_Dir := E_Strings.Section (E_Str => Out_Dir, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Out_Dir) - 1); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Out_Dir); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; else -- output_directory NOT specified, so report "." Tmp_String := E_Strings.Copy_String (Str => Option_Output_Directory & "="); E_Strings.Append_String (E_Str => Tmp_String, Str => "."); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); if Content.Plain_Output then -- Plain output, so report "." as actual output directory -- to avoid diff between runs and platforms Tmp_String := E_Strings.Copy_String (Str => Dir_Actual_C); E_Strings.Append_String (E_Str => Tmp_String, Str => "."); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); else -- Verbose output, so report full pathname of -- current working directory Tmp_String := E_Strings.Copy_String (Str => Dir_Actual_C); CWD := FileSystem.Working_Directory; FileSystem.Append_End_Of_Path_If_Needed (D => CWD); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => CWD); Output_Option (Prefix => Prefix, Option => Tmp_String, Acc => Option_Str, XML => XML, For_XML => True); end if; end if; end Output_Command_Line; function Ravenscar_Selected return Boolean is begin return Content.Concurrency_Profile = Ravenscar; end Ravenscar_Selected; end CommandLineData; spark-2012.0.deb/examiner/examiner.adb0000644000175000017500000011765311753202336016546 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Command_Line; with Ada.Exceptions; with CommandLineData; with CommandLineHandler; with Dictionary; with ErrorHandler; with Fatal; with File_Utils; with GNAT.Traceback.Symbolic; with LexTokenManager; with MainLoop; with ScreenEcho; with SPARK_IO; with Statistics; with STree; with Version; --# inherit CommandLineData, --# CommandLineHandler, --# ConfigFile, --# ContextManager, --# ContextManager.Ops, --# Declarations, --# Dictionary, --# ErrorHandler, --# File_Utils, --# Graph, --# IndexManager, --# LexTokenManager, --# MainLoop, --# ScreenEcho, --# Sem, --# SLI, --# SparkHTML, --# SparkLex, --# SPARK_IO, --# Statistics, --# StmtStack, --# STree, --# VCG, --# Version, --# XMLReport; --# main_program procedure Examiner --# global in out ConfigFile.State; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out Declarations.State; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SparkHTML.Generate_HTML; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out STree.Table; --# in out VCG.Invoked; --# out CommandLineData.Content; --# out Dictionary.Dict; --# out Sem.State; --# out SparkHTML.HTML_Work_Dir; --# out SparkHTML.SPARK_Work_Dir; --# out SparkLex.Curr_Line; --# out XMLReport.State; --# derives CommandLineData.Content, --# Sem.State, --# SparkLex.Curr_Line, --# XMLReport.State from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# STree.Table & --# ConfigFile.State from *, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Declarations.State, --# ErrorHandler.Error_Context, --# Graph.Table, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# Statistics.TableUsage, --# StmtStack.S, --# STree.Table, --# VCG.Invoked from *, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# STree.Table & --# Dictionary.Dict, --# SparkHTML.Generate_HTML from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# STree.Table & --# SparkHTML.HTML_Work_Dir, --# SparkHTML.SPARK_Work_Dir from LexTokenManager.State, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# Statistics.TableUsage, --# STree.Table, --# VCG.Invoked; is procedure Set_Exit_Status (Code : in ErrorHandler.Exit_Code) --# derives null from Code; is --# hide Set_Exit_Status; begin Ada.Command_Line.Set_Exit_Status (Ada.Command_Line.Exit_Status (Code)); end Set_Exit_Status; procedure Execute --# global in out CommandLineData.Content; --# in out ConfigFile.State; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out Declarations.State; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SparkHTML.Generate_HTML; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out STree.Table; --# in out VCG.Invoked; --# out Dictionary.Dict; --# out Sem.State; --# out SparkHTML.HTML_Work_Dir; --# out SparkHTML.SPARK_Work_Dir; --# out SparkLex.Curr_Line; --# out XMLReport.State; --# derives CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Declarations.State, --# ErrorHandler.Error_Context, --# Graph.Table, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# Statistics.TableUsage, --# StmtStack.S, --# STree.Table, --# VCG.Invoked from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# STree.Table & --# ConfigFile.State from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Dictionary.Dict, --# SparkHTML.Generate_HTML from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# STree.Table & --# Sem.State, --# SparkLex.Curr_Line, --# XMLReport.State from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# STree.Table & --# SparkHTML.HTML_Work_Dir, --# SparkHTML.SPARK_Work_Dir from CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# Statistics.TableUsage, --# STree.Table, --# VCG.Invoked; is procedure Star_Line --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *; is begin ScreenEcho.Put_Line ("*****************************************************************************"); end Star_Line; procedure Support_Details --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content; is begin -- Suppress support details when in plain mode to avoid -- spurious diff when testing non SPARK Pro builds if CommandLineData.Content.Plain_Output then ScreenEcho.Put_Line ("* Support details - Suppressed in plain mode"); else ScreenEcho.Put_Line ("*"); ScreenEcho.Put_Line ("* " & Version.Toolset_Support_Line1); ScreenEcho.Put_Line ("* " & Version.Toolset_Support_Line2); ScreenEcho.Put_Line ("* " & Version.Toolset_Support_Line3); ScreenEcho.Put_Line ("* " & Version.Toolset_Support_Line4); end if; end Support_Details; begin -- Execute if CommandLineData.Content.Valid then MainLoop.Process_Files; end if; --# accept Flow, 602, Dictionary.Dict, Dictionary.Dict, "Defined before use in MainLoop" & --# Flow, 602, Sem.State, Sem.State, "Defined before use in MainLoop" & --# Flow, 602, SparkHTML.HTML_Work_Dir, SparkHTML.HTML_Work_Dir, "Defined before use in MainLoop" & --# Flow, 602, SparkHTML.SPARK_Work_Dir, SparkHTML.SPARK_Work_Dir, "Defined before use in MainLoop" & --# Flow, 602, SparkLex.Curr_Line, SparkLex.Curr_Line, "Defined before use in MainLoop" & --# Flow, 602, XMLReport.State, XMLReport.State, "Defined before use in MainLoop"; exception --# hide Execute; when Fatal.Static_Limit => STree.ReportUsage; LexTokenManager.Report_Usage; Dictionary.ReportUsage; Statistics.WriteOutput (SPARK_IO.Standard_Output); Support_Details; Star_Line; raise; when Fatal.Operating_System_Limit | Fatal.Internal_Error => Support_Details; Star_Line; raise; when Fatal.Index_Manager => Star_Line; raise; when Storage_Error => Star_Line; ScreenEcho.Put_Line ("* A storage_error has occurred. You may have too small a system stack limit,"); ScreenEcho.Put_Line ("* insufficient memory or too many other processes running."); Support_Details; Star_Line; raise; when others => Star_Line; ScreenEcho.Put_Line ("* An unexpected internal error has occurred."); Support_Details; Star_Line; raise; end Execute; procedure Print_Help --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content; is subtype Option_String_Range is Positive range 1 .. 1; subtype Option_String_T is String (Option_String_Range); Option_String_Casing_Standard : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Casing_Standard); Option_String_Casing_Identifier : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Casing_Identifier); Option_String_Debug_C : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_C); Option_String_Debug_D : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_D); Option_String_Debug_E : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_E); Option_String_Debug_F : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_F); Option_String_Debug_H : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_H); Option_String_Debug_I : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_I); Option_String_Debug_K : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_K); Option_String_Debug_L : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_L); Option_String_Debug_P : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_P); Option_String_Debug_R : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_R); Option_String_Debug_T : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_T); Option_String_Debug_U : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_U); Option_String_Debug_V : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_V); Option_String_Debug_V_Upper : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_V_Upper); Option_String_Debug_X : constant Option_String_T := Option_String_T'(others => CommandLineData.Option_Debug_X); begin ScreenEcho.Put_Line ("Usage: spark {options} Argument-list"); ScreenEcho.Put_Line ("Argument-list = Argument {"" "" Argument }"); ScreenEcho.Put_Line ("Argument = ( File-spec [Argument-option] ) | Meta-file-spec"); ScreenEcho.Put_Line ("Argument-option = ( ""-" & CommandLineData.Option_Listing_File & "="" file-spec | ""-" & CommandLineData.Option_No_Listing_File & """ )"); ScreenEcho.Put_Line ("Meta-file-spec = ""@""file-spec"); ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Options - all may be abbreviated to the shortest unique prefix"); ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Input File Options"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Source_Extension & "=file-type - specifies source file extension (Default .ada)"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Index_File & "=file-spec - specifies index file"); ScreenEcho.Put_Line ("-no" & CommandLineData.Option_Index_File & " - suppress index file (Default)"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Warning_File & "=file-spec - specifies warning control file"); ScreenEcho.Put_Line ("-no" & CommandLineData.Option_Warning_File & " - all warnings reported (Default)"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Target_Compiler_Data & "=file-spec - specifies target compiler data"); ScreenEcho.Put_Line (" This option is now deprecated by -config_file"); ScreenEcho.Put_Line ("-no" & CommandLineData.Option_Target_Compiler_Data & " - suppress target compiler data (Default)"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Config_File & "=file-spec - specifies Examiner configuration file"); ScreenEcho.Put_Line ("-no" & CommandLineData.Option_Config_File & " - suppress configuration file (Default)"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_No_Switch & " - ignore spark.sw file"); ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Output File Options"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Listing_Extension & "=file-type - specifies listing file extension (Default .lst)"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Report_File & "=file-spec - specifies report file name (default SPARK.REP)"); ScreenEcho.Put_Line ("-no" & CommandLineData.Option_Report_File & " - suppress report file"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Html & "[=dir_spec] - Generate HTML listings and report file"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Output_Directory & "=dir_spec - Generate report, listing, and proof files"); ScreenEcho.Put_Line (" in specified directory"); ScreenEcho.Put_Line (" Default is in and below current working directory"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_No_Listings & " - suppress all listing files"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Plain_Output & " - No dates, line, or error numbers in output files"); ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Language and Analysis Options"); ScreenEcho.Put_String ("-" & CommandLineData.Option_Language & "=choice - select " & CommandLineData.Option_Language_83 & ", " & CommandLineData.Option_Language_95); if CommandLineData.Content.Distribution_Is_Pro then ScreenEcho.Put_Line (", " & CommandLineData.Option_Language_2005 & ", or " & CommandLineData.Option_Language_KCG & " language rules."); ScreenEcho.Put_Line (" " & CommandLineData.Option_Language_KCG & " rules are only used with SCADE KCG."); else ScreenEcho.Put_Line (", or " & CommandLineData.Option_Language_2005 & " language rules."); end if; ScreenEcho.Put_Line (" Default is " & CommandLineData.Option_Language_95); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Profile & "=language - select " & CommandLineData.Option_Profile_Sequential & " or " & CommandLineData.Option_Profile_Ravenscar & " language profile."); ScreenEcho.Put_Line (" Default is " & CommandLineData.Option_Profile_Sequential); ScreenEcho.Put_Line ("-" & CommandLineData.Option_No_Duration & " - do not predefine Standard.Duration"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Syntax_Check & " - syntax check only. No semantic checks"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Flow_Analysis & "=type - select """ & CommandLineData.Option_Flow_Analysis_Information & """-, """ & CommandLineData.Option_Flow_Analysis_Data & """- or """ & CommandLineData.Option_Flow_Analysis_Auto & """-flow analysis"); ScreenEcho.Put_Line (" Default is """ & CommandLineData.Option_Flow_Analysis_Information & """"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Policy & "=type - select """ & CommandLineData.Option_Policy_Security & """ or """ & CommandLineData.Option_Policy_Safety & """ policy for flow analysis"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Vcg & " - Generate VCs"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Dpc & " - Generate DPCs"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Rules & "=type - Select policy for generation of composite constant proof rules"); ScreenEcho.Put_Line (" - valid values: " & CommandLineData.Option_Rules_None & ", " & CommandLineData.Option_Rules_Lazy & ", " & CommandLineData.Option_Rules_Keen & ", " & CommandLineData.Option_Rules_All); ScreenEcho.Put_Line (" - default: " & CommandLineData.Option_Rules_Lazy); ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Debugging and Tracing Options"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Debug & "[=choices] - choices is a sequence of letters, where:"); ScreenEcho.Put_Line (" " & Option_String_Debug_C & " - trace component manager state"); ScreenEcho.Put_Line (" " & Option_String_Debug_D & " - print FDL DAG after BuildExpnDAG"); ScreenEcho.Put_Line (" " & Option_String_Debug_E & " - trace expression walking"); ScreenEcho.Put_Line (" " & Option_String_Debug_F & " - trace file handling"); ScreenEcho.Put_Line (" " & Option_String_Debug_H & " - trace HTML generation"); ScreenEcho.Put_Line (" " & Option_String_Debug_I & " - print default loop invariants in FDL"); ScreenEcho.Put_Line (" " & Option_String_Debug_K & " - trace ranking and printing of FDL declarations"); ScreenEcho.Put_Line (" " & Option_String_Debug_L & " - trace entity lookup in dictionary"); ScreenEcho.Put_Line (" " & Option_String_Debug_P & " - print parser state on syntax error"); ScreenEcho.Put_Line (" " & Option_String_Debug_R & " - print required and computed flow relations"); ScreenEcho.Put_Line (" for each subprogram"); ScreenEcho.Put_Line (" " & Option_String_Debug_T & " - print extra information when -statistics is on"); ScreenEcho.Put_Line (" " & Option_String_Debug_U & " - trace required unit and index file lookups"); ScreenEcho.Put_Line (" " & Option_String_Debug_V & " - print VCG State and BPG after DAG.BuildGraph"); ScreenEcho.Put_Line (" " & Option_String_Debug_V_Upper & " - as v, but also print BPG during each iteration"); ScreenEcho.Put_Line (" of VC generation"); ScreenEcho.Put_Line (" " & Option_String_Debug_X & " - print cross-reference"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Dictionary_File & "=file-spec - dump Dictionary into given file"); ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Other Options"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Annotation_Character & "=char - select alternate annotation character (Default #)"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_No_Echo & " - suppress screen output"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_No_Sli & " - don't generate SLI files"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_SPARK_Lib & " - use the standard SPARK library"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Statistics & " - append Examiner table usage statistics to report file"); ScreenEcho.Put_Line ("-no" & CommandLineData.Option_Statistics & " - no statistics reported (Default)"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Fdl_Identifiers & "=option - Control treatment of FDL identifiers"); ScreenEcho.Put_Line (" settings are """ & CommandLineData.Option_Fdl_Identifiers_Reject & """ (Default),"); ScreenEcho.Put_Line (" """ & CommandLineData.Option_Fdl_Identifiers_Accept & """ or"); ScreenEcho.Put_Line (" """""); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Version & " - print Examiner banner, statistics, then exit"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Brief & "[=choices] - errors and warnings in brief (gcc-style) format, where:"); ScreenEcho.Put_Line (" " & CommandLineData.Option_Brief_No_Path & " - omit path (Default)"); ScreenEcho.Put_Line (" " & CommandLineData.Option_Brief_Full_Path & " - report full path"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Makefile_Mode & " - only absolutely essential output, useful when using"); ScreenEcho.Put_Line (" the Examiner in a makefile (requires -" & CommandLineData.Option_Brief & ")"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Help & " - print command line summary and options"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Original_Flow_Errors & " - print information flow errors in original,"); ScreenEcho.Put_Line (" less compact, format"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Error_Explanations & "=setting - print explanations after error messages"); ScreenEcho.Put_Line (" settings are """ & CommandLineData.Option_Error_Explanations_Off & """, """ & CommandLineData.Option_Error_Explanations_First_Occurrence & """"); ScreenEcho.Put_Line (" or """ & CommandLineData.Option_Error_Explanations_Every_Occurrence & """"); ScreenEcho.Put_Line (" default is """ & CommandLineData.Option_Error_Explanations_Off & """"); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Justification_Option & "=type - select policy for justification of errors"); ScreenEcho.Put_Line (" - values: " & CommandLineData.Option_Justification_Option_Full & ", " & CommandLineData.Option_Justification_Option_Brief & ", " & CommandLineData.Option_Justification_Option_Ignore); ScreenEcho.Put_Line (" - default: " & CommandLineData.Option_Justification_Option_Full); ScreenEcho.Put_Line ("-" & CommandLineData.Option_Casing & "[=choices] - check casing for identifier references and"); ScreenEcho.Put_Line (" check casing of package Standard identifiers"); ScreenEcho.Put_Line (" - " & Option_String_Casing_Standard & " - check casing of package Standard identifiers"); ScreenEcho.Put_Line (" - " & Option_String_Casing_Identifier & " - check casing for identifier references"); ScreenEcho.New_Line (1); ScreenEcho.Put_Line (Version.Toolset_Support_Line1); ScreenEcho.Put_Line (Version.Toolset_Support_Line2); ScreenEcho.Put_Line (Version.Toolset_Support_Line3); ScreenEcho.Put_Line (Version.Toolset_Support_Line4); end Print_Help; procedure MainProcedure --# global in out ConfigFile.State; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out Declarations.State; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SparkHTML.Generate_HTML; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out STree.Table; --# in out VCG.Invoked; --# out CommandLineData.Content; --# out Dictionary.Dict; --# out Sem.State; --# out SparkHTML.HTML_Work_Dir; --# out SparkHTML.SPARK_Work_Dir; --# out SparkLex.Curr_Line; --# out XMLReport.State; --# derives CommandLineData.Content, --# Sem.State, --# SparkLex.Curr_Line, --# XMLReport.State from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# STree.Table & --# ConfigFile.State from *, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Declarations.State, --# ErrorHandler.Error_Context, --# Graph.Table, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# Statistics.TableUsage, --# StmtStack.S, --# STree.Table, --# VCG.Invoked from *, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# STree.Table & --# Dictionary.Dict, --# SparkHTML.Generate_HTML from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# STree.Table & --# SparkHTML.HTML_Work_Dir, --# SparkHTML.SPARK_Work_Dir from LexTokenManager.State, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# Statistics.TableUsage, --# STree.Table, --# VCG.Invoked; is begin CommandLineData.Initialize; -- Process the command line before printing the banner, since -- the banner depends on the -xxxplain option CommandLineHandler.Process; if CommandLineData.Content.Version_Requested then -- User has requested the Examiner Version banner - we print -- this, clean up, and exit with no further action - source -- files are not examined. File_Utils.Print_A_Header (File => SPARK_IO.Standard_Output, Header_Line => "", File_Type => File_Utils.Other_File); Statistics.WriteOutput (SPARK_IO.Standard_Output); Set_Exit_Status (0); elsif CommandLineData.Content.Help_Requested then -- User has requested help - we print then banner, -- a summary of command line options, clean up, and exit -- with no further action - source files are not examined. File_Utils.Print_A_Header (File => SPARK_IO.Standard_Output, Header_Line => "", File_Type => File_Utils.Other_File); Print_Help; Set_Exit_Status (0); elsif not CommandLineData.Content.Valid then Set_Exit_Status (8); else CommandLineData.Dump_File_Names; if CommandLineData.Content.Valid and then CommandLineData.Content.Echo and then not CommandLineData.Content.Brief then File_Utils.Print_A_Header (File => SPARK_IO.Standard_Output, Header_Line => "", File_Type => File_Utils.Other_File); -- The Examiner used to print the banner first, -- then process the command line. To make the -plain option work, -- though, we have to process the command line first (since the content -- of the banner now depends on the command line), THEN print the banner. -- The printing of the following message, therefore, has been moved here -- to preserve the old look-and-feel of the Examiner. if CommandLineData.Content.Default_Switch_File then ScreenEcho.New_Line (1); ScreenEcho.Put_Line (" Reading default switch file ..."); end if; end if; Execute; Set_Exit_Status (ErrorHandler.Get_Errors_Type); end if; --# accept Flow, 602, Dictionary.Dict, Dictionary.Dict, "Defined before use in MainLoop" & --# Flow, 602, Sem.State, Sem.State, "Defined before use in MainLoop" & --# Flow, 602, SparkHTML.HTML_Work_Dir, SparkHTML.HTML_Work_Dir, "Defined before use in MainLoop" & --# Flow, 602, SparkHTML.SPARK_Work_Dir, SparkHTML.SPARK_Work_Dir, "Defined before use in MainLoop" & --# Flow, 602, SparkLex.Curr_Line, SparkLex.Curr_Line, "Defined before use in MainLoop" & --# Flow, 602, XMLReport.State, XMLReport.State, "Defined before use in MainLoop"; end MainProcedure; begin MainProcedure; exception --# hide Examiner; when Fatal.Index_Manager => ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("The Examiner has encountered an error in an index file and is unable to continue."); when E : others => -- Traceback information will vary between different platforms. Thus it -- is suppressed where operating in plain mode. if CommandLineData.Content.Plain_Output then ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Unhandled Exception in Spark."); ScreenEcho.Put_Line ("Operating in plain mode: Exception and traceback information suppressed."); else ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Unhandled Exception in Spark. Exception information:"); ScreenEcho.Put_Line (Ada.Exceptions.Exception_Information (E)); ScreenEcho.Put_Line ("Traceback:"); ScreenEcho.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); end if; Ada.Command_Line.Set_Exit_Status (9); end Examiner; spark-2012.0.deb/examiner/sem-get_literal_value.adb0000644000175000017500000000325711753202336021201 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Get_Literal_Value (Node : in STree.SyntaxNode; Val : out Maths.Value) is Error_Val : Maths.ErrorCode; begin Maths.LiteralToValue (Node_Lex_String (Node => Node), Val, Error_Val); if Error_Val = Maths.IllegalValue then ErrorHandler.Semantic_Error (Err_Num => 401, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif Error_Val = Maths.OverFlow then ErrorHandler.Semantic_Warning (Err_Num => 200, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end Get_Literal_Value; spark-2012.0.deb/examiner/adjustfdl_rws.ads0000644000175000017500000000235411753202335017620 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit E_Strings, --# SparkLex; package AdjustFDL_RWs is procedure Possibly_Adjust (E_Str : in out E_Strings.T; Prefix : in E_Strings.T); --# derives E_Str from *, --# Prefix; end AdjustFDL_RWs; spark-2012.0.deb/examiner/sem-compunit-stack.adb0000644000175000017500000000565311753202336020455 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) package body Stack --# own State is S, --# Top_Ptr; is subtype Index_Range is Integer range 1 .. ExaminerConstants.WfCompilationUnitStackMax; type Stack_Array is array (Index_Range) of Boolean; subtype Top_Range is Integer range 0 .. ExaminerConstants.WfCompilationUnitStackMax; S : Stack_Array; Top_Ptr : Top_Range; procedure Init --# global out S; --# out Top_Ptr; --# derives S, --# Top_Ptr from ; is begin Top_Ptr := 0; S := Stack_Array'(others => False); end Init; procedure Push (X : in Boolean) --# global in out S; --# in out Top_Ptr; --# derives S from *, --# Top_Ptr, --# X & --# Top_Ptr from *; is begin if Top_Ptr < ExaminerConstants.WfCompilationUnitStackMax then Top_Ptr := Top_Ptr + 1; S (Top_Ptr) := X; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Wf_Compilation_Unit_Stack_Overflow, Msg => "in Stack.Push"); end if; end Push; -- return of Item removed, it is never used, pop just clears a stack item procedure Pop --# global in out Top_Ptr; --# derives Top_Ptr from *; is begin if Top_Ptr > 0 then Top_Ptr := Top_Ptr - 1; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Wf_Compilation_Unit_Stack_Underflow, Msg => "in Stack.Pop"); end if; end Pop; function Top return Boolean --# global in S; --# in Top_Ptr; is begin --# accept Flow, 10, "Expected ineffective assignment to Top_Ptr"; if Top_Ptr = 0 then --# end accept; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Wf_Compilation_Unit_Stack_Underflow, Msg => "in Stack.Top"); end if; return S (Top_Ptr); end Top; end Stack; spark-2012.0.deb/examiner/dictionary-is_callable.adb0000644000175000017500000004005011753202336021315 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------------------- -- Checks the following: -- if not Dotted then SubProgSym has body and is not being called from within -- itself -- or, if not dotted but subprogram is declared remotely then it must be an -- an inherited op associated with a tagged type and that is ok -- if dotted then if proc in enclosing package then proc has body -- else if -- package embedded in something then package has body -- or, if package does not have body, does -- subprogram have body (might have pragma -- import/interface) -- else if called from [descendent of] private child of proc's package -- then error -- else its Ok -------------------------------------------------------------------------------- separate (Dictionary) function Is_Callable (The_Subprogram : RawDict.Subprogram_Info_Ref; Prefix_Needed : Boolean; Scope : Scopes) return Boolean is The_Region : Symbol; The_Package : RawDict.Package_Info_Ref; Result : Boolean; -------------------------------------------------------------------------------- function Get_Body (Compilation_Unit : Symbol) return RawDict.Declaration_Info_Ref --# global in Dict; is The_Body : RawDict.Declaration_Info_Ref := RawDict.Null_Declaration_Info_Ref; begin case RawDict.GetSymbolDiscriminant (Compilation_Unit) is when Package_Symbol => The_Body := RawDict.Get_Package_Body (The_Package => RawDict.Get_Package_Info_Ref (Item => Compilation_Unit)); when Type_Symbol => case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Compilation_Unit)) is when Protected_Type_Item => The_Body := RawDict.Get_Protected_Type_Body (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => Compilation_Unit)); when Task_Type_Item => The_Body := RawDict.Get_Task_Type_Body (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Compilation_Unit)); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Get_Body"); end case; when Subprogram_Symbol => The_Body := RawDict.Get_Subprogram_Body (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Compilation_Unit)); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Get_Body"); end case; return The_Body; end Get_Body; -------------------------------------------------------------------------------- function Body_Is_Visible (The_Body : RawDict.Declaration_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is Region : Symbol; Stop_At : RawDict.Declaration_Info_Ref; Found : Boolean := False; Current_Scope : Scopes; Enclosing_Scope : Scopes; -------------------------------------------------------------------------------- function Body_Is_Defined (The_Body : RawDict.Declaration_Info_Ref; Scope : Scopes; Stop_At : RawDict.Declaration_Info_Ref) return Boolean --# global in Dict; is Found : Boolean; -------------------------------------------------------------------------------- function Lookup_Local_Declarations (The_Body : RawDict.Declaration_Info_Ref; Region : Symbol; Stop_At : RawDict.Declaration_Info_Ref) return Boolean --# global in Dict; is Found : Boolean := False; ------------------------------------------------------------------------------ function Lookup_Declarations (The_Body, Head, Stop_At : RawDict.Declaration_Info_Ref) return Boolean --# global in Dict; is The_Declaration : RawDict.Declaration_Info_Ref; begin The_Declaration := Head; while The_Declaration /= RawDict.Null_Declaration_Info_Ref and then The_Declaration /= Stop_At and then The_Declaration /= The_Body loop The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end loop; return The_Declaration = The_Body; end Lookup_Declarations; begin -- Lookup_Local_Declarations case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => Found := Lookup_Declarations (The_Body => The_Body, Head => RawDict.Get_Package_First_Local_Declaration (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)), Stop_At => Stop_At); when Subprogram_Symbol => Found := Lookup_Declarations (The_Body => The_Body, Head => RawDict.Get_Subprogram_First_Declaration (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)), Stop_At => Stop_At); when Type_Symbol => -- must be protected or task type since these are the only types that could contain -- a subprogram call. case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) is when Protected_Type_Item => Found := Lookup_Declarations (The_Body => The_Body, Head => RawDict.Get_Protected_Type_First_Local_Declaration (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => Region)), Stop_At => Stop_At); when Task_Type_Item => Found := Lookup_Declarations (The_Body => The_Body, Head => RawDict.Get_Task_Type_First_Local_Declaration (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => Region)), Stop_At => Stop_At); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Lookup_Local_Declarations"); end case; when others => null; end case; return Found; end Lookup_Local_Declarations; begin -- Body_Is_Defined case Get_Visibility (Scope => Scope) is when Visible | Privat => Found := False; when Local => Found := Lookup_Local_Declarations (The_Body => The_Body, Region => GetRegion (Scope), Stop_At => Stop_At); end case; return Found; end Body_Is_Defined; begin -- Body_Is_Visible if The_Body /= RawDict.Null_Declaration_Info_Ref then Found := Body_Is_Defined (The_Body => The_Body, Scope => Scope, Stop_At => RawDict.Null_Declaration_Info_Ref); if not Found then Current_Scope := Scope; loop Region := GetRegion (Current_Scope); exit when RawDict.GetSymbolDiscriminant (Region) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => Region) = Get_Predefined_Package_Standard; Enclosing_Scope := GetEnclosingScope (Current_Scope); if IsCompilationUnit (Region) then Stop_At := Get_Body (Compilation_Unit => Region); elsif RawDict.GetSymbolDiscriminant (Region) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) then -- Task or Protected body subunit Stop_At := Get_Body (Compilation_Unit => Region); else Stop_At := RawDict.Null_Declaration_Info_Ref; end if; Found := Body_Is_Defined (The_Body => The_Body, Scope => Enclosing_Scope, Stop_At => Stop_At); exit when Found; Current_Scope := Enclosing_Scope; end loop; end if; end if; return Found; end Body_Is_Visible; -------------------------------------------------------------------------------- function Direct_Recursion (The_Subprogram : RawDict.Subprogram_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is Current_Scope : Scopes; Current_Region : Symbol; begin Current_Scope := Scope; loop Current_Region := GetRegion (Current_Scope); exit when (RawDict.GetSymbolDiscriminant (Current_Region) = Subprogram_Symbol and then RawDict.Get_Subprogram_Info_Ref (Item => Current_Region) = The_Subprogram) or else (RawDict.GetSymbolDiscriminant (Current_Region) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => Current_Region) = Get_Predefined_Package_Standard); Current_Scope := GetEnclosingScope (Current_Scope); end loop; return RawDict.GetSymbolDiscriminant (Current_Region) = Subprogram_Symbol and then RawDict.Get_Subprogram_Info_Ref (Item => Current_Region) = The_Subprogram; end Direct_Recursion; -------------------------------------------------------------------------------- function Is_Inherited_Operation (The_Subprogram : RawDict.Subprogram_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is begin -- a subprogram denoted by a simple name must be inherited if the -- library package in which it is declared is not the same as the -- package associated with the scope from which we are looking return Get_Library_Package (Scope => Get_Subprogram_Scope (The_Subprogram => The_Subprogram)) /= Get_Library_Package (Scope => Scope); end Is_Inherited_Operation; -------------------------------------------------------------------------------- function Select_Protected_Body (The_Subprogram : RawDict.Subprogram_Info_Ref; Scope : Scopes) return RawDict.Declaration_Info_Ref --# global in Dict; is Result : Symbol; Declared_Region : Symbol; begin -- If the subprogram is declared inside a protected type and we are not calling from inside the protected -- body itself then it is the body of the type we need to find not the body of the subprgoram -- itself. Otherwise we return the Subprogram unchanged. Result := RawDict.Get_Subprogram_Symbol (The_Subprogram); Declared_Region := GetRegion (Get_Subprogram_Scope (The_Subprogram => The_Subprogram)); if RawDict.GetSymbolDiscriminant (Declared_Region) = Type_Symbol and then Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Declared_Region)) and then not IsLocal (Scope, Set_Visibility (The_Visibility => Local, The_Unit => Declared_Region)) then Result := Declared_Region; end if; return Get_Body (Compilation_Unit => Result); end Select_Protected_Body; begin -- Is_Callable if Prefix_Needed or else Is_Renamed_Local (The_Subprogram => The_Subprogram, Scope => Scope) then The_Region := GetRegion (Get_Subprogram_Scope (The_Subprogram => The_Subprogram)); case RawDict.GetSymbolDiscriminant (The_Region) is when Type_Symbol => SystemErrors.RT_Assert (C => Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => The_Region)) or else Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => The_Region)), Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Callable"); if IsLocal (Scope, Set_Visibility (The_Visibility => Local, The_Unit => The_Region)) then Result := Body_Is_Visible (The_Body => RawDict.Get_Subprogram_Body (The_Subprogram => The_Subprogram), Scope => Scope); else Result := True; end if; when Package_Symbol => The_Package := RawDict.Get_Package_Info_Ref (Item => The_Region); if IsLocal (Scope, Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (The_Package))) then Result := Body_Is_Visible (The_Body => RawDict.Get_Subprogram_Body (The_Subprogram => The_Subprogram), Scope => Scope); elsif Is_Embedded_Package (The_Package => The_Package) then Result := Body_Is_Visible (The_Body => RawDict.Get_Package_Body (The_Package => The_Package), Scope => Scope) or else Body_Is_Visible (The_Body => RawDict.Get_Subprogram_Body (The_Subprogram => The_Subprogram), Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Subprogram_Symbol (The_Subprogram))); elsif Is_Descendent_Of_Private_Child (Candidate => Get_Library_Package (Scope => Scope), The_Package => The_Package) then Result := False; else Result := True; end if; when others => Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Is_Callable"); end case; else Result := (Body_Is_Visible (The_Body => Select_Protected_Body (The_Subprogram => The_Subprogram, Scope => Scope), Scope => Scope) and then not Direct_Recursion (The_Subprogram => The_Subprogram, Scope => Scope)) or else Is_Inherited_Operation (The_Subprogram => The_Subprogram, Scope => Scope); end if; return Result; end Is_Callable; spark-2012.0.deb/examiner/sem-walk_expression_p-get_character_literal.adb0000644000175000017500000000245611753202336025553 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Get_Character_Literal (Node : STree.SyntaxNode) return Maths.Value is begin return Maths.MakeEnum (Character'Pos (E_Strings.Get_Element (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => STree.Node_Lex_String (Node => Node)), Pos => 2))); end Get_Character_Literal; spark-2012.0.deb/examiner/sparklex.smf0000644000175000017500000000044211753202337016612 0ustar eugeneugensparklex.adb -vcg sparklex-lex.adb -vcg sparklex-lex-apostintro.adb -vcg sparklex-lex-getident.adb -vcg sparklex-lex-getnumber.adb -vcg sparklex-lex-getstring.adb -vcg sparklex-lex-hyphintro.adb -vcg sparklex-lex-ltintro.adb -vcg sparklex-lex-nextlex.adb -vcg sparklex-linemanager.adb -vcg spark-2012.0.deb/examiner/dag-buildgraph-modelprocedurecall.adb0000644000175000017500000031036711753202336023450 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; separate (DAG.BuildGraph) procedure ModelProcedureCall is Procedure_Name_Cell : Cells.Cell; Concrete_Procedure : Dictionary.Symbol; PrefixSym : Dictionary.Symbol := Dictionary.NullSymbol; SubstitutionTable : Cells.Cell; -- The interface of a procedure can have two views. An abstract view and a -- refined view which are denoted by the type Dictionary.Abstractions. -- A refined view can be defined by data refinement or proof refinement. -- Data refinement occurs when an own variable is refined into constituent -- parts and may also involve proof refinement where the pre and post -- conditions are in terms of the constituents. -- However, proof refinement may occur without data refinement to facilitate -- defining pre and post conditions in terms of the complete view of a -- private type. -- Hence the introduction of these two variables. -- Abstraction gives the view to be used for the pre and post -- conditions, -- Data_View gives the view to be used for the parameters, globals and -- derives annotations and from this the view of the imports and exports of -- the subprogram. Abstraction, Data_View : Dictionary.Abstractions; ---------------------------------------------------------- procedure Build_Substitution_Table (Node : in STree.SyntaxNode; Concrete_Procedure : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in Scope; --# in STree.Table; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# out SubstitutionTable; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# Node, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCGHeap & --# SubstitutionTable from VCGHeap; is NameArgListNode : STree.SyntaxNode; function Not_Mode_Out (Param_Number : Positive; Concrete_Procedure : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.GetSubprogramParameterMode (Dictionary.GetSubprogramParameter (Concrete_Procedure, Param_Number)) /= Dictionary.OutMode; end Not_Mode_Out; ----------------- procedure Do_Positional_Association (Concrete_Procedure : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in NameArgListNode; --# in Scope; --# in STree.Table; --# in SubstitutionTable; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# NameArgListNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# SubstitutionTable, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# NameArgListNode, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# SubstitutionTable, --# VCGHeap; is -- Builds a table using the facilities of CLists. Each element of the list is a cell with -- a parameter number as its contents. The C pointer of each list element points to a -- A DAG representing the actual parameter. If there is a constraining index involved -- (i.e. the actual parameter is a constrained subtype of an unconstrained array) then -- a cell of kind ConstrainingIndex is placed between the parameter list cell and the -- expression DAG itself. The structure is a bit like this: -- PNA experiment. In the case of an actual parameter that is a named subtype of an -- unconstrained array type then the constraint node will hold the -- array subtype symbol rather than the first index subtype as originally -- implemented. Note that in the special case of a string literal actual parameter -- then the constraint will still have an index type (a subtype of positive). -- -- SubstitutionTable -> 1 -> 2 -> 3 -> etc -- | | | -- v v v -- dag dag constraint -- | -- v -- dag ParamCounter : Positive; TryNode : STree.SyntaxNode; ListElement, Expression : Cells.Cell; ConstraintCell : Cells.Cell; ConstraintIndex : Dictionary.Symbol; begin ParamCounter := 1; TryNode := NameArgListNode; while STree.Syntax_Node_Type (Node => TryNode) /= SP_Symbols.expression loop TryNode := STree.Child_Node (Current_Node => TryNode); end loop; -- TryNode is now bottommost expression ie. first parameter while TryNode /= STree.NullNode loop ConstraintCell := Cells.Null_Cell; Cells.Create_Cell (VCGHeap, ListElement); Cells.Set_Natural_Value (VCGHeap, ListElement, ParamCounter); BuildExpnDAG (TryNode, LScope, Scope, LineNmbr, True, DoAssumeLocalRvalues and then Not_Mode_Out (Param_Number => ParamCounter, Concrete_Procedure => Concrete_Procedure), LoopStack, FlowHeap, VCGHeap, ContainsReals, VCGFailure, ShortCircuitStack, CheckStack, KindOfStackedCheck, -- to get Expression); -- Constraining index (if there is one) will have been planted at expression node by wffs ConstraintIndex := STree.NodeSymbol (TryNode); if not Dictionary.Is_Null_Symbol (ConstraintIndex) then CreateCellKind (ConstraintCell, VCGHeap, Cell_Storage.Constraining_Index); Cells.Set_Symbol_Value (VCGHeap, ConstraintCell, ConstraintIndex); -- PNA unchnaged name at present, but "Index" is misleading now end if; -- We may need to convert the actual parameter by inserting some inherit -- derefences in front of it; conversion is required if we have called -- an inherited root function. The parameter in this case must be an -- object. ConvertTaggedActualIfNecessary (Concrete_Procedure, VCGHeap, Expression); -- Link constraint (if any) and DAG into linked list of parameters if Cells.Is_Null_Cell (ConstraintCell) then SetAuxPtr (ListElement, Expression, VCGHeap); else SetAuxPtr (ListElement, ConstraintCell, VCGHeap); SetAuxPtr (ConstraintCell, Expression, VCGHeap); end if; Clists.InsertCell (VCGHeap, ListElement, SubstitutionTable); ParamCounter := ParamCounter + 1; TryNode := STree.Next_Sibling (Current_Node => STree.Parent_Node (Current_Node => TryNode)); end loop; end Do_Positional_Association; ------------------------------------------------------------- procedure Do_Named_Association (Concrete_Procedure : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in DoAssumeLocalRvalues; --# in LineNmbr; --# in LoopStack; --# in LScope; --# in NameArgListNode; --# in Scope; --# in STree.Table; --# in SubstitutionTable; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out FlowHeap; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# Dictionary.Dict, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# ShortCircuitStack, --# Statistics.TableUsage, --# StmtStack.S, --# VCGFailure, --# VCGHeap from *, --# CheckStack, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# NameArgListNode, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# SubstitutionTable, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CheckStack, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# DoAssumeLocalRvalues, --# ErrorHandler.Error_Context, --# FlowHeap, --# Graph.Table, --# KindOfStackedCheck, --# LexTokenManager.State, --# LineNmbr, --# LoopStack, --# LScope, --# NameArgListNode, --# Scope, --# ShortCircuitStack, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# SubstitutionTable, --# VCGHeap; is -- See comment in Do_Positional_Association above for the purpose of this procedure. ParamCounter : Positive; TryNode : STree.SyntaxNode; ListElement, Expression : Cells.Cell; ConstraintCell : Cells.Cell; ConstraintIndex : Dictionary.Symbol; ------------------------------------------------------- procedure GetParamNumber (Name : in LexTokenManager.Lex_String; ProcSym : in Dictionary.Symbol; ParamNo : out Positive) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# derives ParamNo from Dictionary.Dict, --# LexTokenManager.State, --# Name, --# ProcSym; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; begin It := Dictionary.FirstSubprogramParameter (ProcSym); SystemErrors.RT_Assert (C => not Dictionary.IsNullIterator (It), Sys_Err => SystemErrors.Precondition_Failure, Msg => "Can't find first parameter in BuildGraph.ModelProcedureCall.GetParamNumber"); loop Sym := Dictionary.CurrentSymbol (It); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Sym), Lex_Str2 => Name) = LexTokenManager.Str_Eq; It := Dictionary.NextSymbol (It); exit when Dictionary.IsNullIterator (It); end loop; ParamNo := Dictionary.GetSubprogramParameterNumber (Sym); end GetParamNumber; begin -- Do_Named_Association TryNode := NameArgListNode; while STree.Syntax_Node_Type (Node => TryNode) /= SP_Symbols.simple_name loop TryNode := STree.Child_Node (Current_Node => TryNode); end loop; -- TryNode is now simple_name of first parameter while TryNode /= STree.NullNode loop ConstraintCell := Cells.Null_Cell; Cells.Create_Cell (VCGHeap, ListElement); GetParamNumber (STree.Node_Lex_String (Node => STree.Child_Node (Current_Node => TryNode)), Concrete_Procedure, -- to get ParamCounter); Cells.Set_Natural_Value (VCGHeap, ListElement, ParamCounter); BuildExpnDAG (STree.Next_Sibling (Current_Node => TryNode), LScope, Scope, LineNmbr, True, DoAssumeLocalRvalues and then Not_Mode_Out (Param_Number => ParamCounter, Concrete_Procedure => Concrete_Procedure), -- assume rvalues only for in/inout params LoopStack, FlowHeap, VCGHeap, ContainsReals, VCGFailure, ShortCircuitStack, CheckStack, KindOfStackedCheck, -- to get Expression); -- Constraining index (if there is one) will have been planted at expression node by wffs ConstraintIndex := STree.NodeSymbol (STree.Next_Sibling (Current_Node => TryNode)); if not Dictionary.Is_Null_Symbol (ConstraintIndex) then CreateCellKind (ConstraintCell, VCGHeap, Cell_Storage.Constraining_Index); Cells.Set_Symbol_Value (VCGHeap, ConstraintCell, ConstraintIndex); end if; -- We may need to convert the actual parameter by inserting some inherit -- derefences in front of it; conversion is required if we have called -- an inherited root function. The parameter in this case must be an -- object. ConvertTaggedActualIfNecessary (Concrete_Procedure, VCGHeap, Expression); -- Link constraint (if any) and DAG into linked list of parameters if Cells.Is_Null_Cell (ConstraintCell) then SetAuxPtr (ListElement, Expression, VCGHeap); else SetAuxPtr (ListElement, ConstraintCell, VCGHeap); SetAuxPtr (ConstraintCell, Expression, VCGHeap); end if; Clists.InsertCell (VCGHeap, ListElement, SubstitutionTable); TryNode := STree.Next_Sibling (Current_Node => STree.Parent_Node (Current_Node => TryNode)); end loop; end Do_Named_Association; begin -- Build_Substitution_Table -- Node is procedure_call_statement Clists.CreateList (VCGHeap, SubstitutionTable); NameArgListNode := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node))); if NameArgListNode /= STree.NullNode then if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => NameArgListNode)) = SP_Symbols.positional_argument_association then Do_Positional_Association (Concrete_Procedure => Concrete_Procedure); else Do_Named_Association (Concrete_Procedure => Concrete_Procedure); end if; end if; end Build_Substitution_Table; ---------------------------------------------------------- function GetExpressionCell (Parameter : Positive) return Cells.Cell --# global in SubstitutionTable; --# in VCGHeap; is -- For a particular parameter number, find the DAG associated with the actual parameter -- expression. This will be found in the substitution table built by procedure Build_Substitution_Table Try : Cells.Cell; begin Try := Clists.FirstCell (VCGHeap, SubstitutionTable); while Cells.Get_Natural_Value (VCGHeap, Try) /= Parameter loop Try := Clists.NextCell (VCGHeap, Try); end loop; Try := AuxPtr (VCGHeap, Try); if Cells.Get_Kind (VCGHeap, Try) = Cell_Storage.Constraining_Index then -- expression is one lower in the data strucure - see comment in Do_Positional_Association above Try := AuxPtr (VCGHeap, Try); end if; return Try; end GetExpressionCell; ---------------------------------------------------------- function GetConstraintCell (Parameter : Positive) return Cells.Cell --# global in SubstitutionTable; --# in VCGHeap; is -- This is similar to GetExpressionCell but returns the constraining index cell if there is one otherwise -- it returns the actual parameter expression's DAG. Try : Cells.Cell; begin Try := Clists.FirstCell (VCGHeap, SubstitutionTable); while Cells.Get_Natural_Value (VCGHeap, Try) /= Parameter loop Try := Clists.NextCell (VCGHeap, Try); end loop; return AuxPtr (VCGHeap, Try); end GetConstraintCell; ---------------------------------------------------------- -- There are several places below where we need to traverse a DAG that contains -- record field selections and/or array element references. The following two functions -- simplify this search process function IsSelector (The_Cell : Cells.Cell) return Boolean --# global in VCGHeap; is begin return Cells.Get_Kind (VCGHeap, The_Cell) = Cell_Storage.Field_Access_Function or else Cells.Get_Kind (VCGHeap, The_Cell) = Cell_Storage.Element_Function; end IsSelector; -- a field selector function has the form: -- fld_name --- expression -- -- an array element function has the form -- element --- , --- index -- | -- expression function ArgumentOfSelector (The_Cell : Cells.Cell) return Cells.Cell --# global in VCGHeap; is Result : Cells.Cell; begin if Cells.Get_Kind (VCGHeap, The_Cell) = Cell_Storage.Field_Access_Function then -- for a record field access, the expression is to the right Result := RightPtr (VCGHeap, The_Cell); else -- must be element function because of precondition -- for an array access, we have a comma to the right and the expression to the left of this Result := LeftPtr (VCGHeap, RightPtr (VCGHeap, The_Cell)); end if; return Result; end ArgumentOfSelector; ---------------------------------------------------------- -- Given a symbol for an export which is either the symbol of a global -- variable or the symbol of a formal parameter, this procedure returns -- the symbol of the entire variable being exported and the DAG that -- represents the actual parameter expression. -- If the export is a global then Entire_Actual_Sym = Formal_Or_Global_Sym and -- Actual_DAG = Cells.Null_Cell. For parameters the substitution table is used -- to obtain the returned results. procedure Get_Export_Details (Formal_Or_Global_Sym : in Dictionary.Symbol; Concrete_Procedure : in Dictionary.Symbol; Entire_Actual_Sym : out Dictionary.Symbol; Actual_DAG : out Cells.Cell) --# global in Dictionary.Dict; --# in PrefixSym; --# in SubstitutionTable; --# in VCGHeap; --# derives Actual_DAG from Concrete_Procedure, --# Dictionary.Dict, --# Formal_Or_Global_Sym, --# SubstitutionTable, --# VCGHeap & --# Entire_Actual_Sym from Concrete_Procedure, --# Dictionary.Dict, --# Formal_Or_Global_Sym, --# PrefixSym, --# SubstitutionTable, --# VCGHeap; is Actual_DAG_Local : Cells.Cell; Entire_Actual_Sym_Local : Dictionary.Symbol; ---------------------------------------------------------- function Substitute_Protected_Type_Self_Reference (Sym, Prefix_Symbol : Dictionary.Symbol) return Dictionary.Symbol --# global in Dictionary.Dict; is Result : Dictionary.Symbol; begin -- if Sym is the implicitly-declared own variable of a protected type -- then we must replace it with the "current instance of the protected object" -- before checking whether it is visible. -- Background: given protected type PT its operations will globally reference and -- derive PT meaning, in this case, "myself". -- If an object PO of type PT (or a subtype of PT) is declared then calls to its -- operations will take the form PO.Op and the calling environment will be annotated -- in terms of PO. Therefore, when checking that the globals necessary for the call -- PO.Op are visible (for example), we need to replace all references to PT into -- references to PO before making the check. The Prefix Symbol of the call is the -- symbol we need to substitute in. Result := Sym; if not Dictionary.Is_Null_Symbol (Prefix_Symbol) and then Dictionary.IsOwnVariable (Sym) and then Dictionary.IsProtectedType (Dictionary.GetOwner (Sym)) then Result := Prefix_Symbol; end if; return Result; end Substitute_Protected_Type_Self_Reference; begin -- Get_Export_Details if Dictionary.IsFormalParameter (Concrete_Procedure, Formal_Or_Global_Sym) then Actual_DAG_Local := GetExpressionCell (Dictionary.GetSubprogramParameterNumber (Formal_Or_Global_Sym)); Actual_DAG := Actual_DAG_Local; while IsSelector (Actual_DAG_Local) loop Actual_DAG_Local := ArgumentOfSelector (Actual_DAG_Local); end loop; Entire_Actual_Sym_Local := Cells.Get_Symbol_Value (VCGHeap, Actual_DAG_Local); else Entire_Actual_Sym_Local := Formal_Or_Global_Sym; Actual_DAG := Cells.Null_Cell; end if; -- if the export is from a protected procedure it may ba type name (representing "this") rather -- than the protected object itself; the following substitutes the PO name. Entire_Actual_Sym := Substitute_Protected_Type_Self_Reference (Sym => Entire_Actual_Sym_Local, Prefix_Symbol => PrefixSym); end Get_Export_Details; ---------------------------------------------------------- procedure Substitute_Import (Sym : in Dictionary.Symbol; Concrete_Procedure : in Dictionary.Symbol; Change : out Boolean; Result : out Cells.Cell) --# global in Dictionary.Dict; --# in SubstitutionTable; --# in VCGHeap; --# derives Change from Concrete_Procedure, --# Dictionary.Dict, --# Sym & --# Result from Concrete_Procedure, --# Dictionary.Dict, --# SubstitutionTable, --# Sym, --# VCGHeap; is -- Given the Symbol of an import, replaces it with the matching actual parameter expression -- (if a formal parameter) otherwise does nothing. Change is set to True if a substitution has been made begin if Dictionary.IsFormalParameter (Concrete_Procedure, Sym) then Change := True; Result := GetExpressionCell (Dictionary.GetSubprogramParameterNumber (Sym)); else Result := Cells.Null_Cell; Change := False; end if; end Substitute_Import; --------------------------------------------------------------------- procedure Substitute_Import_Constraint (Sym : in Dictionary.Symbol; Concrete_Procedure : in Dictionary.Symbol; Change : out Boolean; Result : out Cells.Cell) --# global in Dictionary.Dict; --# in SubstitutionTable; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Change from Concrete_Procedure, --# Dictionary.Dict, --# Sym & --# Result from Concrete_Procedure, --# Dictionary.Dict, --# Sym, --# VCGHeap & --# Statistics.TableUsage from *, --# Concrete_Procedure, --# Dictionary.Dict, --# Sym, --# VCGHeap & --# VCGHeap from *, --# Concrete_Procedure, --# Dictionary.Dict, --# SubstitutionTable, --# Sym; is -- Similar to Substitute_Import above but returns the constraining index type associated with -- a constrained actual parameter associated with an unconstrained formal parameter ConstraintSym : Dictionary.Symbol; ObjectSym : Dictionary.Symbol; ArrayDimension : Positive; begin -- The Sym passed to this routine will be a Dictionary.ParameterConstraintSymbol. -- From this we can obtain the object itself and the dimesnion of that object that appears -- in the expression we may be making substitutions to. ObjectSym := Dictionary.GetParameterAssociatedWithParameterConstraint (Sym); ArrayDimension := Dictionary.GetSubprogramParameterConstraintDimension (Sym); if Dictionary.IsFormalParameter (Concrete_Procedure, ObjectSym) then Change := True; Cells.Create_Cell (VCGHeap, Result); Cells.Copy_Contents (VCGHeap, GetConstraintCell (Dictionary.GetSubprogramParameterNumber (ObjectSym)), Result); -- Result contains either: -- (1) an array subtype symbol in the case where the actual paramater is of a constrained -- array subtype -- (2) a scalar index type symbol in the case of a string literal being passed to string -- (3) a symbol of a subprogram parameter in the case where the actual parameter is also -- an unconstrained array and no constraint has been planted (this final behaviour occurs -- because GetConstraintCell returns the actual parameter DAG if no constraint is present) ConstraintSym := Cells.Get_Symbol_Value (VCGHeap, Result); if Dictionary.IsSubprogramParameter (ConstraintSym) then -- Case 3. We substitute "actual__index__subtype__n" for "formal__index__subtype__n" Cells.Set_Symbol_Value (VCGHeap, Result, Dictionary.GetSubprogramParameterConstraint (ConstraintSym, ArrayDimension)); elsif Dictionary.TypeIsArray (ConstraintSym) then -- Case 2. We substitute array index n of constraining subtype for "formal__index__subtype__n" Cells.Set_Symbol_Value (VCGHeap, Result, Dictionary.GetArrayIndex (ConstraintSym, ArrayDimension)); else -- Case 1. we already have the constraining index directly null; end if; else Result := Cells.Null_Cell; Change := False; end if; end Substitute_Import_Constraint; --------------------------------------------------------------------- -- procedure to create modelling vars for procedure export procedure Convert_Cell_To_Export_Cell (Cell_Name : in Cells.Cell; Concrete_Procedure : in Dictionary.Symbol) --# global in Dictionary.Dict; --# in PrefixSym; --# in SubprogramCalls; --# in SubstitutionTable; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives LexTokenManager.State from *, --# SubprogramCalls & --# Statistics.TableUsage from *, --# Cell_Name, --# Concrete_Procedure, --# Dictionary.Dict, --# SubstitutionTable, --# VCGHeap & --# VCGHeap from *, --# Cell_Name, --# Concrete_Procedure, --# Dictionary.Dict, --# LexTokenManager.State, --# PrefixSym, --# SubprogramCalls, --# SubstitutionTable; is Count_Str : LexTokenManager.Lex_String; Cell_Name_Local, Temp_Cell, Export_DAG : Cells.Cell; ExportSym : Dictionary.Symbol; begin -- if the cell supplied is simply a variable we convert the cell sort to -- a procedure export variable adding in the necessary call counter. If -- it is the top of a DAG containing record field references we convert the -- cell which represents the entire record variable after making a copy -- of the actual DAG. LexTokenManager.Insert_Nat (N => SubprogramCalls, Lex_Str => Count_Str); Get_Export_Details (Formal_Or_Global_Sym => Cells.Get_Symbol_Value (VCGHeap, Cell_Name), Concrete_Procedure => Concrete_Procedure, Entire_Actual_Sym => ExportSym, Actual_DAG => Export_DAG); -- see if there is an Export DAG representing a fld access of a record -- variable or an array element function. if not Cells.Is_Null_Cell (Export_DAG) and then (Cells.Get_Kind (VCGHeap, Export_DAG) = Cell_Storage.Field_Access_Function or else Cells.Get_Kind (VCGHeap, Export_DAG) = Cell_Storage.Element_Function) then -- field access or array element access found we must substitute input node with -- the export DAG and converting the entire variable concerned to an export -- variable. Structures.CopyStructure (VCGHeap, Export_DAG, Cell_Name_Local); Cells.Copy_Contents (VCGHeap, Cell_Name_Local, Cell_Name); Temp_Cell := Cell_Name; while IsSelector (Temp_Cell) loop Temp_Cell := ArgumentOfSelector (Temp_Cell); end loop; -- Temp_Cell now points at entire variable cell Cells.Set_Kind (VCGHeap, Temp_Cell, Cell_Storage.Procedure_Export); Cells.Set_Lex_Str (VCGHeap, Temp_Cell, Count_Str); else -- no FieldAccess found so export is an entire variable. -- Just convert cell directly to an export cell as before -- and put in the symbol of the actual export Cells.Set_Kind (VCGHeap, Cell_Name, Cell_Storage.Procedure_Export); Cells.Set_Symbol_Value (VCGHeap, Cell_Name, ExportSym); Cells.Set_Lex_Str (VCGHeap, Cell_Name, Count_Str); end if; end Convert_Cell_To_Export_Cell; --------------------------------------------------------------------- -- The view of the precondition, abstract or refined is determined by the parameter Abstraction procedure Model_Precondition (Abstraction : in Dictionary.Abstractions; Concrete_Procedure : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in LoopStack; --# in Scope; --# in STree.Table; --# in SubstitutionTable; --# in out CheckStack; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out KindOfStackedCheck; --# in out LexTokenManager.State; --# in out ShortCircuitStack; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# ShortCircuitStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Abstraction, --# CheckStack, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Scope, --# ShortCircuitStack, --# StmtStack.S, --# STree.Table, --# SubstitutionTable, --# VCGHeap & --# Dictionary.Dict, --# LexTokenManager.State, --# VCGFailure from *, --# Abstraction, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Graph.Table, --# StmtStack.S from Abstraction, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# Scope, --# StmtStack.S, --# STree.Table, --# SubstitutionTable, --# VCGHeap & --# KindOfStackedCheck from *, --# Abstraction, --# Concrete_Procedure, --# Dictionary.Dict; is Instantiated_Subprogram : Dictionary.Symbol; Local_Abstraction : Dictionary.Abstractions; Constraint : STree.SyntaxNode; DAG_Cell : Cells.Cell; Conjoined_Function_Defs : Cells.Cell; Function_Defs : CStacks.Stack; -------------------------------------------- procedure Substitute_Parameters (Constraint_Root : in Cells.Cell; Concrete_Procedure : in Dictionary.Symbol) -- replace formal parameters by actual ones in a subprogram constraint; --# global in Dictionary.Dict; --# in SubstitutionTable; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Statistics.TableUsage, --# VCGHeap from *, --# Concrete_Procedure, --# Constraint_Root, --# Dictionary.Dict, --# SubstitutionTable, --# VCGHeap; is Subs, P : Cells.Cell; S : CStacks.Stack; VarSym : Dictionary.Symbol; Change : Boolean; begin -- DAG traversal algorithm of D.E. Knuth, Fundamental Algorithms, p.317; CStacks.CreateStack (S); Subs := Cells.Null_Cell; -- to avoid conditional DFA later P := Constraint_Root; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCGHeap, P, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then P := Cells.Null_Cell; else P := LeftPtr (VCGHeap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCGHeap, S); CStacks.Pop (VCGHeap, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then Change := False; VarSym := Cells.Get_Symbol_Value (VCGHeap, P); if Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Reference then Substitute_Import (Sym => VarSym, Concrete_Procedure => Concrete_Procedure, Change => Change, Result => Subs); elsif Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Unconstrained_Attribute_Prefix then Substitute_Import_Constraint (Sym => VarSym, Concrete_Procedure => Concrete_Procedure, Change => Change, Result => Subs); end if; if Change then Cells.Copy_Contents (VCGHeap, Subs, P); end if; P := Cells.Null_Cell; else P := RightPtr (VCGHeap, P); end if; end loop; end Substitute_Parameters; procedure Check_Type_Of_Actual_Import_Params (Concrete_Procedure : in Dictionary.Symbol) --# global in Dictionary.Dict; --# in Scope; --# in SubstitutionTable; --# in out CheckStack; --# in out ContainsReals; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# ShortCircuitStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Concrete_Procedure, --# Dictionary.Dict, --# Scope, --# ShortCircuitStack, --# SubstitutionTable, --# VCGHeap; is ParamElement : Cells.Cell; ParamNum : Natural; ActualParam : Cells.Cell; FormalParam : Dictionary.Symbol; begin ParamNum := 1; ParamElement := Clists.FirstCell (VCGHeap, SubstitutionTable); while not Cells.Is_Null_Cell (ParamElement) loop ActualParam := AuxPtr (VCGHeap, ParamElement); FormalParam := Dictionary.GetSubprogramParameter (Concrete_Procedure, ParamNum); -- Formal parameters are always defined in the abstract view and cannot be refined -- and so we should always consider the abstract view. if Dictionary.IsImport (Dictionary.IsAbstract, Concrete_Procedure, FormalParam) then CheckConstraintRunTimeError (Dictionary.GetType (FormalParam), ActualParam, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; ParamElement := Clists.NextCell (VCGHeap, ParamElement); ParamNum := ParamNum + 1; end loop; end Check_Type_Of_Actual_Import_Params; begin -- Model_Precondition if Dictionary.IsInstantiation (Concrete_Procedure) then Instantiated_Subprogram := Concrete_Procedure; Local_Abstraction := Dictionary.IsAbstract; else -- not generic Instantiated_Subprogram := Dictionary.NullSymbol; Local_Abstraction := Abstraction; end if; Constraint := STree.RefToNode (Dictionary.GetPrecondition (Local_Abstraction, Concrete_Procedure)); if Constraint /= STree.NullNode then -- Initialize the function definition stack CStacks.CreateStack (Function_Defs); Build_Annotation_Expression (Exp_Node => Constraint, Instantiated_Subprogram => Instantiated_Subprogram, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Concrete_Procedure), Calling_Scope => Scope, Force_Abstract => Abstraction = Dictionary.IsAbstract, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => DAG_Cell, Function_Defs => Function_Defs); if not CStacks.IsEmpty (Function_Defs) then -- Functions are called within the procedure's pre condition -- Use null statement as place holder for the function definitions ModelNullStmt (VCGHeap); -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; -- Use the actual parameters of the procedure call in the function -- definitions. Substitute_Parameters (Constraint_Root => Conjoined_Function_Defs, Concrete_Procedure => Concrete_Procedure); -- Assume the function definitions from the point of the null statement IncorporateAssumption (VCGHeap, Conjoined_Function_Defs); end if; -- Substitute the actual parameters for the formal nes in the -- procedure's precondition Substitute_Parameters (Constraint_Root => DAG_Cell, Concrete_Procedure => Concrete_Procedure); StackCheckStatement (DAG_Cell, VCGHeap, CheckStack); KindOfStackedCheck := Graph.Precon_Check; end if; Check_Type_Of_Actual_Import_Params (Concrete_Procedure => Concrete_Procedure); end Model_Precondition; -------------------------------------------------------------------------- -- Abstraction gives the view of the postcondition to be used and Data_View gives the view of -- the Imports and Exports to be used procedure Model_Postcondition (Abstraction, Data_View : in Dictionary.Abstractions; Concrete_Procedure : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LoopStack; --# in PrefixSym; --# in STree.Table; --# in SubprogramCalls; --# in SubstitutionTable; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# Dictionary.Dict, --# VCGFailure from *, --# Abstraction, --# CommandLineData.Content, --# Concrete_Procedure, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap & --# ErrorHandler.Error_Context from *, --# Abstraction, --# CommandLineData.Content, --# Concrete_Procedure, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# VCGHeap & --# Graph.Table, --# StmtStack.S, --# VCGHeap from Abstraction, --# CommandLineData.Content, --# Concrete_Procedure, --# Data_View, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# LoopStack, --# PrefixSym, --# Scope, --# StmtStack.S, --# STree.Table, --# SubprogramCalls, --# SubstitutionTable, --# VCGHeap & --# LexTokenManager.State, --# Statistics.TableUsage from *, --# Abstraction, --# CommandLineData.Content, --# Concrete_Procedure, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# LoopStack, --# PrefixSym, --# Scope, --# STree.Table, --# SubprogramCalls, --# SubstitutionTable, --# VCGHeap & --# SPARK_IO.File_Sys from *, --# Abstraction, --# CommandLineData.Content, --# Concrete_Procedure, --# Data_View, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# LoopStack, --# Scope, --# STree.Table, --# VCGHeap; is Instantiated_Subprogram : Dictionary.Symbol; Local_Abstraction : Dictionary.Abstractions; Constraint : STree.SyntaxNode; DAG_Root, Stmt_Cell : Cells.Cell; Stmt_Label : Labels.Label; Function_Defs : CStacks.Stack; Conjoined_Function_Defs : Cells.Cell; -------------------------------------------- -- Constraint_Root gives the root node of the postcondition. -- Data_View gives the view to be used for imports and exports of the procedure. procedure Substitute_Parameters (Constraint_Root : in out Cells.Cell; Data_View : in Dictionary.Abstractions; Concrete_Procedure : in Dictionary.Symbol; Scope : in Dictionary.Scopes) -- replace formal parameters by actual ones in a subprogram constraint; --# global in Dictionary.Dict; --# in PrefixSym; --# in SubprogramCalls; --# in SubstitutionTable; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Constraint_Root, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGHeap from *, --# Concrete_Procedure, --# Constraint_Root, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# PrefixSym, --# Scope, --# SubprogramCalls, --# SubstitutionTable, --# VCGHeap; is Subs, P : Cells.Cell; S : CStacks.Stack; VarSym : Dictionary.Symbol; Tilded : Boolean; Change : Boolean; HypStack : CStacks.Stack; -- procedure, generates hypothesis that all export record -- fields are unchanged other than that appearing as exported actual -- parameter. -- Data_View determines the view of the imports and exports of the procedure. procedure Add_In_Record_Field_And_Array_Element_Preservation (Data_View : in Dictionary.Abstractions; Concrete_Procedure : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in Dictionary.Dict; --# in PrefixSym; --# in SubprogramCalls; --# in SubstitutionTable; --# in out HypStack; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives HypStack, --# LexTokenManager.State, --# Statistics.TableUsage, --# VCGHeap from *, --# Concrete_Procedure, --# Data_View, --# Dictionary.Dict, --# HypStack, --# LexTokenManager.State, --# Scope, --# SubprogramCalls, --# SubstitutionTable, --# VCGHeap & --# null from PrefixSym; is It : Dictionary.Iterator; ExportSym, UnusedSym : Dictionary.Symbol; Temp_Cell : Cells.Cell; OpCell : Cells.Cell; Converted_Export_DAG : Cells.Cell; Export_DAG : Cells.Cell; Ptr_To_Export_DAG : Cells.Cell; Ptr : Cells.Cell; EntireExport : Cells.Cell; UpdateCell : Cells.Cell; S : CStacks.Stack; procedure Convert_Export_DAG (Export_DAG : in Cells.Cell; Converted_Export_DAG : out Cells.Cell; Entire_Export : out Cells.Cell) --# global in SubprogramCalls; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives Converted_Export_DAG, --# Entire_Export from Export_DAG, --# VCGHeap & --# LexTokenManager.State from *, --# SubprogramCalls & --# Statistics.TableUsage from *, --# Export_DAG, --# VCGHeap & --# VCGHeap from *, --# Export_DAG, --# LexTokenManager.State, --# SubprogramCalls; is Count_Str : LexTokenManager.Lex_String; begin Structures.CopyStructure (VCGHeap, Export_DAG, Converted_Export_DAG); Entire_Export := Converted_Export_DAG; while IsSelector (Entire_Export) loop Entire_Export := ArgumentOfSelector (Entire_Export); end loop; -- Entire_Export now points at entire variable cell Cells.Set_Kind (VCGHeap, Entire_Export, Cell_Storage.Procedure_Export); LexTokenManager.Insert_Nat (N => SubprogramCalls, Lex_Str => Count_Str); Cells.Set_Lex_Str (VCGHeap, Entire_Export, Count_Str); end Convert_Export_DAG; ------------------- procedure AddInSiblingHypotheses (Export_DAG, Converted_Export_DAG : in Cells.Cell; Scope : in Dictionary.Scopes) --# global in Dictionary.Dict; --# in out HypStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives HypStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Converted_Export_DAG, --# Dictionary.Dict, --# Export_DAG, --# HypStack, --# Scope, --# VCGHeap; is Export_DAG_Local, Converted_Export_DAG_Local : Cells.Cell; procedure DoOneLevelOfSiblings (Export_DAG, Converted_Export_DAG : in Cells.Cell; Scope : in Dictionary.Scopes) --# global in Dictionary.Dict; --# in out HypStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives HypStack, --# Statistics.TableUsage, --# VCGHeap from *, --# Converted_Export_DAG, --# Dictionary.Dict, --# Export_DAG, --# HypStack, --# Scope, --# VCGHeap; is It : Dictionary.Iterator; Sym, SymOfExportedField : Dictionary.Symbol; AndCell : Cells.Cell; OpCell : Cells.Cell; Temp_Cell : Cells.Cell; Export_DAG_Copy : Cells.Cell; Local_Export_DAG : Cells.Cell; Converted_Export_DAG_Copy : Cells.Cell; CurrentRecord : Dictionary.Symbol; begin -- DoOneLevelOfSiblings CurrentRecord := Dictionary.GetType (Cells.Get_Symbol_Value (VCGHeap, RightPtr (VCGHeap, Export_DAG))); -- In the array of record case, once we have passed the indexing expression, we -- no longer have symbols for the record fields. This is indicated at this point -- by CurrentRecord being a NullSymbol. In that case we can't produce any useful -- hypotheses and we skip the whole remaining body if not Dictionary.Is_Null_Symbol (CurrentRecord) then -- suppress unchnaged field hypotheses for record fields that are -- private here if not Dictionary.IsPrivateType (CurrentRecord, Scope) then -- or else IsPredefinedTimeType not needed here as -- definitely a record -- CFR 1743 -- the symbol of the field being exported can be found at the top -- of the export DAG SymOfExportedField := Cells.Get_Symbol_Value (VCGHeap, Export_DAG); -- make local copy of Export_DAG and convert its rightmostr cell to a RefCell Structures.CopyStructure (VCGHeap, Export_DAG, Local_Export_DAG); Temp_Cell := Local_Export_DAG; while IsSelector (Temp_Cell) loop Temp_Cell := ArgumentOfSelector (Temp_Cell); end loop; -- Temp_Cell now points at entire variable cell Cells.Set_Kind (VCGHeap, Temp_Cell, Cell_Storage.Reference); -- iterate through all sibling field of that being exported, type -- of siblings is found one cell down from top of export dag It := Dictionary.FirstRecordComponent (CurrentRecord); while not Dictionary.IsNullIterator (It) loop Sym := Dictionary.CurrentSymbol (It); if not Dictionary.Record_Components_Are_Equal (Left_Symbol => Sym, Right_Symbol => SymOfExportedField) then -- need to build an "unchanged" hypothesis for this case Structures.CopyStructure (VCGHeap, Local_Export_DAG, Export_DAG_Copy); Structures.CopyStructure (VCGHeap, Converted_Export_DAG, Converted_Export_DAG_Copy); -- substitute current field into top of each DAG Cells.Set_Symbol_Value (VCGHeap, Export_DAG_Copy, Sym); Cells.Set_Lex_Str (VCGHeap, Export_DAG_Copy, Dictionary.GetSimpleName (Sym)); Cells.Set_Symbol_Value (VCGHeap, Converted_Export_DAG_Copy, Sym); Cells.Set_Lex_Str (VCGHeap, Converted_Export_DAG_Copy, Dictionary.GetSimpleName (Sym)); -- create an equals operator to asert them equal CreateOpCell (OpCell, VCGHeap, SP_Symbols.equals); -- assert equality SetRightArgument (OpCell, Export_DAG_Copy, VCGHeap); SetLeftArgument (OpCell, Converted_Export_DAG_Copy, VCGHeap); -- CFR 1744: The HypStack may only contain a null -- cell if this is the first term of the hypothesis. if Cells.Is_Null_Cell (CStacks.Top (VCGHeap, HypStack)) then -- Start of a new hypothesis CStacks.Push (VCGHeap, OpCell, HypStack); else -- Not the start of the hypothesis: -- 'and' result on to hypothesis stack CStacks.PopOff (VCGHeap, HypStack, AndCell); Cells.Utility.Conjoin (VCGHeap, OpCell, AndCell); CStacks.Push (VCGHeap, AndCell, HypStack); end if; end if; It := Dictionary.NextSymbol (It); end loop; end if; end if; end DoOneLevelOfSiblings; begin -- AddInSiblingHypotheses Export_DAG_Local := Export_DAG; Converted_Export_DAG_Local := Converted_Export_DAG; while IsSelector (Export_DAG_Local) loop if Cells.Get_Kind (VCGHeap, Export_DAG_Local) = Cell_Storage.Field_Access_Function then DoOneLevelOfSiblings (Export_DAG_Local, Converted_Export_DAG_Local, Scope); end if; Export_DAG_Local := ArgumentOfSelector (Export_DAG_Local); Converted_Export_DAG_Local := ArgumentOfSelector (Converted_Export_DAG_Local); end loop; end AddInSiblingHypotheses; begin -- Add_In_Record_Field_And_Array_Element_Preservation CStacks.CreateStack (S); It := Dictionary.FirstExport (Data_View, Concrete_Procedure); while not Dictionary.IsNullIterator (It) loop ExportSym := Dictionary.CurrentSymbol (It); --# accept F, 10, UnusedSym, "UnusedSym unused here"; Get_Export_Details (Formal_Or_Global_Sym => ExportSym, Concrete_Procedure => Concrete_Procedure, Entire_Actual_Sym => UnusedSym, Actual_DAG => Ptr_To_Export_DAG); --# end accept; if IsSelector (Ptr_To_Export_DAG) then -- a record field is exported so extra hypothesis is needed Structures.CopyStructure (VCGHeap, Ptr_To_Export_DAG, Export_DAG); Convert_Export_DAG (Ptr_To_Export_DAG, -- to get Converted_Export_DAG, EntireExport); -- create hypotheses that each sibling of exported field is -- unchanged AddInSiblingHypotheses (Export_DAG, Converted_Export_DAG, Scope); -- now continue to produce "catch-all" hypothesis that all -- unchanged record fields (not just sibling fields) are -- unchanged. -- we need to find bottom right of export DAG and convert cell -- there to a RefCell so that it is preserved as initial value Temp_Cell := Export_DAG; while IsSelector (Temp_Cell) loop Temp_Cell := ArgumentOfSelector (Temp_Cell); end loop; -- Temp_Cell now points at entire variable cell Cells.Set_Kind (VCGHeap, Temp_Cell, Cell_Storage.Reference); -- using ingredients Export_DAG, Converted_Export_DAG and EntireExport -- we can now build the hypothesis described in S.P0468.53.27 CStacks.Push (VCGHeap, Converted_Export_DAG, S); Ptr := Export_DAG; loop -- Need decision here on record or array handling. -- We know we are looping over either FldFunctions or ElementFunctions if Cells.Get_Kind (VCGHeap, Ptr) = Cell_Storage.Field_Access_Function then -- handling record field -- create upf_ function cell CreateUpfCell (UpdateCell, VCGHeap, Cells.Get_Symbol_Value (VCGHeap, Ptr), Cells.Get_Lex_Str (VCGHeap, Ptr)); else -- handling an array element -- create update function cell CreateCellKind (UpdateCell, VCGHeap, Cell_Storage.Update_Function); end if; -- create comma cell CreateOpCell (OpCell, VCGHeap, SP_Symbols.comma); -- and link it to arguments SetRightArgument (OpCell, CStacks.Top (VCGHeap, S), VCGHeap); SetLeftArgument (OpCell, RightPtr (VCGHeap, Ptr), VCGHeap); -- now link update cell to arguments SetRightArgument (UpdateCell, OpCell, VCGHeap); -- remove old expression from stack and replace by new one CStacks.Pop (VCGHeap, S); CStacks.Push (VCGHeap, UpdateCell, S); -- move down unconverted export_DAG Ptr := ArgumentOfSelector (Ptr); exit when not IsSelector (Ptr); end loop; -- TOS is the RHS of the desired hypothesis, we must set it equal -- to converted entire export CreateOpCell (OpCell, VCGHeap, SP_Symbols.equals); SetRightArgument (OpCell, CStacks.Top (VCGHeap, S), VCGHeap); SetLeftArgument (OpCell, EntireExport, VCGHeap); -- remove old expression from stack and replace by new one CStacks.Pop (VCGHeap, S); CStacks.Push (VCGHeap, OpCell, S); -- TOS is the desired new hypothesis, needs anding into current one CStacks.PopOff (VCGHeap, HypStack, OpCell); Cells.Utility.Conjoin (VCGHeap, CStacks.Top (VCGHeap, S), OpCell); CStacks.Push (VCGHeap, OpCell, HypStack); -- clean up local stack CStacks.Pop (VCGHeap, S); end if; It := Dictionary.NextSymbol (It); end loop; --# accept F, 35, PrefixSym, "Coupled to UnusedSym only" & --# F, 33, UnusedSym, "UnusedSym unused here"; end Add_In_Record_Field_And_Array_Element_Preservation; ----------------- procedure Substitute_Tilded_Parameter (The_Cell : in Cells.Cell; Concrete_Procedure : in Dictionary.Symbol) --# global in Dictionary.Dict; --# in PrefixSym; --# in SubstitutionTable; --# in out VCGHeap; --# derives VCGHeap from *, --# Concrete_Procedure, --# Dictionary.Dict, --# PrefixSym, --# SubstitutionTable, --# The_Cell; is Export_DAG : Cells.Cell; ExportSym : Dictionary.Symbol; begin Get_Export_Details (Formal_Or_Global_Sym => Cells.Get_Symbol_Value (VCGHeap, The_Cell), Concrete_Procedure => Concrete_Procedure, Entire_Actual_Sym => ExportSym, Actual_DAG => Export_DAG); -- see if the DAG represents a field access or array element if not Cells.Is_Null_Cell (Export_DAG) and then (Cells.Get_Kind (VCGHeap, Export_DAG) = Cell_Storage.Field_Access_Function or else Cells.Get_Kind (VCGHeap, Export_DAG) = Cell_Storage.Element_Function) then -- there is a field access, substitute DAG for cell contents Cells.Copy_Contents (VCGHeap, Export_DAG, The_Cell); else -- no field access, just substitute symbol of actual Cells.Set_Symbol_Value (VCGHeap, The_Cell, ExportSym); end if; end Substitute_Tilded_Parameter; begin -- Substitute_Parameters -- HypStack is used to ease joining of extra hypotheses concerning -- exportation of record fields to the main hypothesis where the called -- procedure's post-condition is assumed. CStacks.CreateStack (HypStack); CStacks.Push (VCGHeap, Constraint_Root, HypStack); -- stack S is used for the -- DAG traversal algorithm of D.E. Knuth, Fundamental Algorithms, p.317; CStacks.CreateStack (S); P := Constraint_Root; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (VCGHeap, P, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then P := Cells.Null_Cell; else P := LeftPtr (VCGHeap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (VCGHeap, S); CStacks.Pop (VCGHeap, S); if Is_Leaf (Node => P, VCG_Heap => VCGHeap) then if Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Reference or else Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Unconstrained_Attribute_Prefix then VarSym := Cells.Get_Symbol_Value (VCGHeap, P); Tilded := Cells.Get_Op_Symbol (VCGHeap, P) = SP_Symbols.tilde; if Dictionary.IsExport (Data_View, Concrete_Procedure, VarSym) then if Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Unconstrained_Attribute_Prefix then -- Although VarSym is an export, it is used here as an attribute prefix -- and we don't want to convert it to an export cell or do anything to -- the tilde, rather we want to replace the variable name with a -- constraining type mark Substitute_Import_Constraint (Sym => VarSym, Concrete_Procedure => Concrete_Procedure, Change => Change, Result => Subs); if Change then Cells.Copy_Contents (VCGHeap, Subs, P); end if; else -- not an unconstrained attribute prefix - handle as before if Tilded then if Dictionary.Is_Global_Variable (Data_View, Concrete_Procedure, VarSym) then Cells.Set_Op_Symbol (VCGHeap, P, SP_Symbols.RWnull); else -- must be a parameter Cells.Set_Op_Symbol (VCGHeap, P, SP_Symbols.RWnull); Substitute_Tilded_Parameter (The_Cell => P, Concrete_Procedure => Concrete_Procedure); end if; else -- not Tilded, but still an export Convert_Cell_To_Export_Cell (Cell_Name => P, Concrete_Procedure => Concrete_Procedure); end if; end if; else -- process imports -- In a post-condition, a reference to an -- attribute of an unconstrained array parameter -- appears as am imported -- SubprogramParameterConstraint here here, so if Dictionary.IsSubprogramParameter (VarSym) or Dictionary.IsParameterConstraint (VarSym) then if Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Reference then Substitute_Import (Sym => VarSym, Concrete_Procedure => Concrete_Procedure, Change => Change, Result => Subs); if Change then Cells.Copy_Contents (VCGHeap, Subs, P); end if; elsif Cells.Get_Kind (VCGHeap, P) = Cell_Storage.Unconstrained_Attribute_Prefix then Substitute_Import_Constraint (Sym => VarSym, Concrete_Procedure => Concrete_Procedure, Change => Change, Result => Subs); if Change then Cells.Copy_Contents (VCGHeap, Subs, P); end if; end if; end if; end if; end if; P := Cells.Null_Cell; else P := RightPtr (VCGHeap, P); end if; end loop; -- where record fields or array elements are exported we need to create a hypothis -- in terms of the entire export and, for records, assert that all -- unaffected fields are unchanged. Add_In_Record_Field_And_Array_Element_Preservation (Data_View => Data_View, Concrete_Procedure => Concrete_Procedure, Scope => Scope); --# accept F, 10, HypStack, "HypStack unused here"; CStacks.PopOff (VCGHeap, HypStack, Constraint_Root); --# end accept; end Substitute_Parameters; -- Data_View gives the view of the imports and exports of the procedure to be used. procedure Assume_Type_Of_Actual_Exports (DAG_Root : in out Cells.Cell; Data_View : in Dictionary.Abstractions; Concrete_Procedure : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in out ContainsReals; --# in out Dictionary.Dict; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out VCGFailure; --# in out VCGHeap; --# derives ContainsReals, --# DAG_Root, --# Dictionary.Dict, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# VCGFailure, --# VCGHeap from *, --# CommandLineData.Content, --# Concrete_Procedure, --# DAG_Root, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# VCGHeap; is It : Dictionary.Iterator; ExportVar : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; begin It := Dictionary.FirstExport (Data_View, Concrete_Procedure); while not Dictionary.IsNullIterator (It) loop ExportVar := Dictionary.CurrentSymbol (It); if IsDirectlyVisible (ExportVar, Scope) and then not Dictionary.IsOwnVariableOrConstituentWithMode (ExportVar) then Type_Sym := Dictionary.GetType (ExportVar); if not Dictionary.IsPrivateType (Type_Sym, Scope) or else Dictionary.IsPredefinedTimeType (Type_Sym) then ConjoinParamConstraint (Type_Sym, ExportVar, Scope, False, DAG_Root); end if; end if; It := Dictionary.NextSymbol (It); end loop; end Assume_Type_Of_Actual_Exports; begin -- Model_Postcondition if Dictionary.IsInstantiation (Concrete_Procedure) then -- for instantiations we go and get the constraint of the original generic Instantiated_Subprogram := Concrete_Procedure; Local_Abstraction := Dictionary.IsAbstract; else -- not generic Instantiated_Subprogram := Dictionary.NullSymbol; Local_Abstraction := Abstraction; end if; Constraint := STree.RefToNode (Dictionary.GetPostcondition (Local_Abstraction, Concrete_Procedure)); if Constraint /= STree.NullNode then -- Initialize the function definition stack CStacks.CreateStack (Function_Defs); Build_Annotation_Expression (Exp_Node => Constraint, Instantiated_Subprogram => Instantiated_Subprogram, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Concrete_Procedure), Calling_Scope => Scope, Force_Abstract => Abstraction = Dictionary.IsAbstract, Loop_Stack => LoopStack, Generate_Function_Instantiations => True, VC_Failure => VCGFailure, VC_Contains_Reals => ContainsReals, VCG_Heap => VCGHeap, DAG_Root => DAG_Root, Function_Defs => Function_Defs); if not CStacks.IsEmpty (Function_Defs) then -- Functions are called within the procedure's post condition -- Conjoin all the function definitions on the stack --# accept F, 10, Function_Defs, "The stack has been emptied"; Join_And (Stack => Function_Defs, Conjunct => Conjoined_Function_Defs, VCG_Heap => VCGHeap); --# end accept; -- conjoin the function definitions wit the procedure's post condition Cells.Utility.Conjoin (VCGHeap, Conjoined_Function_Defs, DAG_Root); end if; else DAG_Root := Cells.Null_Cell; end if; -- Ensure the correct view of the exports is used determined by the value of Data_View Assume_Type_Of_Actual_Exports (DAG_Root => DAG_Root, Data_View => Data_View, Concrete_Procedure => Concrete_Procedure, Scope => Scope); -- CFR 1744: Call Substitute_Parameters even if DAG_Root is a null cell -- in order to generate hypothesis for invariant fields of composite -- objects where an exported variable of a private type is a component -- of the composite object. -- Ensure the correct view of imports and exports is used determined by the value of Data_View Substitute_Parameters (Constraint_Root => DAG_Root, Data_View => Data_View, Concrete_Procedure => Concrete_Procedure, Scope => Scope); PrepareLabel (VCGHeap, Stmt_Label, Stmt_Cell); SetRightArgument (Stmt_Cell, DAG_Root, VCGHeap); Chain (Stmt_Label, VCGHeap); end Model_Postcondition; procedure CheckTypeOfActualExportParams --# global in Concrete_Procedure; --# in Dictionary.Dict; --# in Scope; --# in SubstitutionTable; --# in out CheckStack; --# in out ContainsReals; --# in out ShortCircuitStack; --# in out Statistics.TableUsage; --# in out VCGHeap; --# derives CheckStack, --# ContainsReals, --# ShortCircuitStack, --# Statistics.TableUsage, --# VCGHeap from *, --# CheckStack, --# Concrete_Procedure, --# Dictionary.Dict, --# Scope, --# ShortCircuitStack, --# SubstitutionTable, --# VCGHeap; is ParamElement : Cells.Cell; ParamNum : Natural; ActualParamCell : Cells.Cell; ActualParamSym : Dictionary.Symbol; ActualParamType : Dictionary.Symbol; FormalParamSym : Dictionary.Symbol; -- This subprogram is added merely as a temporary measure -- to flag up presence of additional VCs being generated -- as a result of the bug under SEPR 2124. -- To be removed in a future release of the Examiner. procedure SEPR2124Warning --# derives ; is --# hide SEPR2124Warning; begin -- Only output warning in case of different types in use if not Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (FormalParamSym), Right_Symbol => ActualParamType, Full_Range_Subtype => False) then ErrorHandler.Semantic_Warning (Err_Num => 420, Position => LexTokenManager.Token_Position'(Start_Line_No => LexTokenManager.Line_Numbers (LineNmbr), Start_Pos => 0), Id_Str => LexTokenManager.Null_String); end if; end SEPR2124Warning; begin ParamNum := 1; ParamElement := Clists.FirstCell (VCGHeap, SubstitutionTable); while not Cells.Is_Null_Cell (ParamElement) loop FormalParamSym := Dictionary.GetSubprogramParameter (Concrete_Procedure, ParamNum); ActualParamCell := AuxPtr (VCGHeap, ParamElement); -- Subprogram parameters cannot be refined they are always abstract if Dictionary.IsExport (Dictionary.IsAbstract, Concrete_Procedure, FormalParamSym) then -- We need to work out the subtype of the actual parameter -- here for the case where the actual is a subtype of the -- formal parameter, and so requires a range check. if Cells.Get_Kind (VCGHeap, ActualParamCell) = Cell_Storage.Field_Access_Function then ActualParamSym := Cells.Get_Symbol_Value (VCGHeap, ActualParamCell); ActualParamType := Dictionary.GetType (ActualParamSym); SEPR2124Warning; elsif Cells.Get_Kind (VCGHeap, ActualParamCell) = Cell_Storage.Element_Function then -- BuildExpnDAG.ProcessNameArgumentList will have planted -- the type of the array component here as the SymValue of -- the ActualParamCell, so ActualParamType := Cells.Get_Symbol_Value (VCGHeap, ActualParamCell); SEPR2124Warning; else -- not an array element or record field selector, so must be a simple variable ActualParamSym := Cells.Get_Symbol_Value (VCGHeap, ActualParamCell); ActualParamType := Dictionary.GetType (ActualParamSym); end if; CheckConstraintRunTimeError (ActualParamType, ActualParamCell, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; ParamElement := Clists.NextCell (VCGHeap, ParamElement); ParamNum := ParamNum + 1; end loop; end CheckTypeOfActualExportParams; ---------------------------------------------------------------- -- Data_View gives the view of the imports and exports to be used. procedure BuildVCSAssignments (Data_View : in Dictionary.Abstractions) --# global in Concrete_Procedure; --# in Dictionary.Dict; --# in PrefixSym; --# in SubprogramCalls; --# in SubstitutionTable; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Concrete_Procedure, --# Data_View, --# Dictionary.Dict, --# Graph.Table, --# LexTokenManager.State, --# PrefixSym, --# StmtStack.S, --# SubprogramCalls, --# SubstitutionTable, --# VCGHeap & --# LexTokenManager.State from *, --# Concrete_Procedure, --# Data_View, --# Dictionary.Dict, --# SubprogramCalls & --# Statistics.TableUsage from *, --# Concrete_Procedure, --# Data_View, --# Dictionary.Dict, --# LexTokenManager.State, --# PrefixSym, --# SubprogramCalls, --# SubstitutionTable, --# VCGHeap; is It : Dictionary.Iterator; ExportSym : Dictionary.Symbol; ExportCell, AssignedCell : Cells.Cell; ExportsFound : Boolean; ModList : Cells.Cell; StmtPair : Cells.Cell; Stmt_Label : Labels.Label; EntireExportSym : Dictionary.Symbol; Unused_Export_DAG : Cells.Cell; begin ExportsFound := False; Clists.CreateList (VCGHeap, ModList); It := Dictionary.FirstExport (Data_View, Concrete_Procedure); while not Dictionary.IsNullIterator (It) loop ExportSym := Dictionary.CurrentSymbol (It); if not Dictionary.Is_Null_Variable (ExportSym) then --don't model data sinks ExportsFound := True; --# accept F, 10, Unused_Export_DAG, "Unused_Export_DAG unused here"; Get_Export_Details (Formal_Or_Global_Sym => ExportSym, Concrete_Procedure => Concrete_Procedure, Entire_Actual_Sym => EntireExportSym, Actual_DAG => Unused_Export_DAG); --# end accept; CreateModifiedCell (ExportCell, VCGHeap, EntireExportSym); Clists.InsertCell (VCGHeap, ExportCell, ModList); Cells.Create_Cell (VCGHeap, AssignedCell); Cells.Set_Symbol_Value (VCGHeap, AssignedCell, EntireExportSym); Convert_Cell_To_Export_Cell (Cell_Name => AssignedCell, Concrete_Procedure => Concrete_Procedure); SetRightArgument (ExportCell, AssignedCell, VCGHeap); end if; It := Dictionary.NextSymbol (It); end loop; if ExportsFound then PrepareLabel (VCGHeap, Stmt_Label, StmtPair); SetAuxPtr (StmtPair, ModList, VCGHeap); else CreateUnitLabel (Stmt_Label, VCGHeap); end if; Chain (Stmt_Label, VCGHeap); --# accept F, 33, Unused_Export_DAG, "Unused_Export_DAG unused here"; end BuildVCSAssignments; begin -- ModelProcedureCall SubprogramCalls := SubprogramCalls + 1; -- Node represents procedure_call_statement; -- If we are calling an inherited root operation then the proc_call node will -- have been seeded with the actual procedure that got called. Concrete_Procedure := STree.NodeSymbol (Node); if not Dictionary.Is_Null_Symbol (Concrete_Procedure) and then Dictionary.Is_Variable (Concrete_Procedure) then -- We have found something in the syntax tree but it is not a subprogram. In that -- case it must be a protected object prefix to a protected op call. We need this so -- we can subtitute instances of PT in annotations with PO. PO is what we have just found. PrefixSym := Concrete_Procedure; Concrete_Procedure := Dictionary.NullSymbol; end if; if Dictionary.Is_Null_Symbol (Concrete_Procedure) then -- no root proc symbol found so carry on and find a procedure name locally BuildExpnDAG (STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)), LScope, Scope, LineNmbr, True, --PNA observation, 8/3/4, why is RTC turned on when all we are doing is identifying proc name? False, LoopStack, FlowHeap, VCGHeap, ContainsReals, VCGFailure, ShortCircuitStack, CheckStack, KindOfStackedCheck, -- to get Procedure_Name_Cell); Concrete_Procedure := Cells.Get_Symbol_Value (VCGHeap, Procedure_Name_Cell); end if; -- First obtain what the view of the pre and post conditions is - -- abstract or refined? Abstraction := Dictionary.GetConstraintAbstraction (Concrete_Procedure, LScope); if Abstraction = Dictionary.IsRefined then -- The view of the pre and post condition is refined. -- Determine whether the view of the data should be -- Abstract (Proof Refinement only) or -- Refined (Data Refinement is present). if Dictionary.IsNullIterator (Dictionary.FirstGlobalVariable (Dictionary.IsRefined, Concrete_Procedure)) then -- No Data Refinement. Data_View := Dictionary.IsAbstract; else -- Data Refinement is present. Data_View := Dictionary.IsRefined; end if; else -- Only proof refinement is present. Data_View := Dictionary.IsAbstract; end if; Build_Substitution_Table (Node => Node, Concrete_Procedure => Concrete_Procedure); Model_Precondition (Abstraction => Abstraction, Concrete_Procedure => Concrete_Procedure); UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); Model_Postcondition (Abstraction => Abstraction, Data_View => Data_View, Concrete_Procedure => Concrete_Procedure, Scope => Scope); BuildVCSAssignments (Data_View => Data_View); CheckTypeOfActualExportParams; UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); Clists.DisposeOfList (VCGHeap, SubstitutionTable); end ModelProcedureCall; spark-2012.0.deb/examiner/spparser.smf0000644000175000017500000000041211753202337016615 0ustar eugeneugenspparser.adb sp_parser_actions.adb -vcg sp_parser_actions-scan_action_table.adb sp_parser_actions-spa.adb sp_parser_goto.adb -vcg sp_parser_goto-scan_goto_table.adb sp_parser_goto-sp_goto.adb sp_expected_symbols.adb -vcg sp_expected_symbols-get_expected_symbols.adb spark-2012.0.deb/examiner/e_strings.adb0000644000175000017500000006376711753202336016741 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Handling; with Ada.Characters.Latin_1; with SPARK.Ada.Strings.Maps; with SPARK.Ada.Strings.Unbounded.Not_SPARK; package body E_Strings is function To_Unbounded_String (E_Str : T) return SPARK.Ada.Strings.Unbounded.Unbounded_String is begin return E_Str.Content; end To_Unbounded_String; -- Case (in)sensitive comparison. Returns true if and only if the strings -- are equal, given the specified lengths. function Eq_String_Local (Str1 : SPARK.Ada.Strings.Unbounded.Unbounded_String; Str2 : SPARK.Ada.Strings.Unbounded.Unbounded_String) return Boolean is Result : Boolean; begin if SPARK.Ada.Strings.Unbounded.Get_Length (Source => Str1) = SPARK.Ada.Strings.Unbounded.Get_Length (Source => Str2) then Result := True; for I in Positions range 1 .. SPARK.Ada.Strings.Unbounded.Get_Length (Source => Str1) loop --# assert SPARK.Ada.Strings.Unbounded.Get_Length (Str1) = --# SPARK.Ada.Strings.Unbounded.Get_Length (Str1%) and --# SPARK.Ada.Strings.Unbounded.Get_Length (Str1) in Lengths; Result := Ada.Characters.Handling.To_Lower (SPARK.Ada.Strings.Unbounded.Get_Element (Source => Str1, Index => I)) = Ada.Characters.Handling.To_Lower (SPARK.Ada.Strings.Unbounded.Get_Element (Source => Str2, Index => I)); exit when not Result; end loop; else Result := False; end if; return Result; end Eq_String_Local; -- Case INsensitive comparison. function Eq_String (E_Str1, E_Str2 : T) return Boolean is begin return Eq_String_Local (Str1 => E_Str1.Content, Str2 => E_Str2.Content); end Eq_String; -- Case INsensitive comparison. function Eq1_String (E_Str : T; Str : String) return Boolean is begin return Eq_String_Local (Str1 => E_Str.Content, Str2 => SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Str)); end Eq1_String; -- Case sensitive comparison. function Eq_CS_String (E_Str1, E_Str2 : T) return Boolean is begin return SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_Unbounded_String (Left => E_Str1.Content, Right => E_Str2.Content); end Eq_CS_String; function Is_Empty (E_Str : T) return Boolean is begin return E_Str = Empty_String; end Is_Empty; function Get_Length (E_Str : T) return Lengths is begin return SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content); end Get_Length; function Get_Empty_String return T is begin return Empty_String; end Get_Empty_String; function Get_Element (E_Str : T; Pos : Positions) return Character is Return_Value : Character; begin if Pos <= SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content) then Return_Value := SPARK.Ada.Strings.Unbounded.Get_Element (Source => E_Str.Content, Index => Pos); else Return_Value := ' '; end if; return Return_Value; end Get_Element; function Copy_String (Str : String) return T is begin return T'(Content => SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Str)); end Copy_String; procedure Append_String (E_Str : in out T; Str : in String) is begin SPARK.Ada.Strings.Unbounded.Append_String (Source => E_Str.Content, New_Item => Str); end Append_String; procedure Append_Examiner_String (E_Str1 : in out T; E_Str2 : in T) is begin SPARK.Ada.Strings.Unbounded.Append_Unbounded_String (Source => E_Str1.Content, New_Item => E_Str2.Content); end Append_Examiner_String; function Lower_Case (E_Str : T) return T is Temp : Character; Result : T; begin Result := E_Str; for I in Natural range 1 .. SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content) loop --# assert SPARK.Ada.Strings.Unbounded.Get_Length (E_Str.Content) = --# SPARK.Ada.Strings.Unbounded.Get_Length (E_Str%.Content) and --# SPARK.Ada.Strings.Unbounded.Get_Length (E_Str.Content) in Lengths; Temp := SPARK.Ada.Strings.Unbounded.Get_Element (Source => E_Str.Content, Index => I); if Temp in 'A' .. 'Z' then SPARK.Ada.Strings.Unbounded.Replace_Element (Source => Result.Content, Index => I, By => Ada.Characters.Handling.To_Lower (Temp)); end if; end loop; return Result; end Lower_Case; function Upper_Case (E_Str : T) return T is Temp : Character; Result : T; begin Result := E_Str; for I in Natural range 1 .. SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content) loop --# assert SPARK.Ada.Strings.Unbounded.Get_Length (E_Str.Content) = --# SPARK.Ada.Strings.Unbounded.Get_Length (E_Str%.Content) and --# SPARK.Ada.Strings.Unbounded.Get_Length (E_Str.Content) in Lengths; Temp := SPARK.Ada.Strings.Unbounded.Get_Element (Source => E_Str.Content, Index => I); if Temp in 'a' .. 'z' then SPARK.Ada.Strings.Unbounded.Replace_Element (Source => Result.Content, Index => I, By => Ada.Characters.Handling.To_Upper (Temp)); end if; end loop; return Result; end Upper_Case; function Lower_Case_Char (E_Str : T; Pos : Positions) return T is Result : T; begin Result := E_Str; SPARK.Ada.Strings.Unbounded.Replace_Element (Source => Result.Content, Index => Pos, By => Ada.Characters.Handling.To_Lower (SPARK.Ada.Strings.Unbounded.Get_Element (Source => Result.Content, Index => Pos))); return Result; end Lower_Case_Char; function Upper_Case_Char (E_Str : T; Pos : Positions) return T is Result : T; begin Result := E_Str; SPARK.Ada.Strings.Unbounded.Replace_Element (Source => Result.Content, Index => Pos, By => Ada.Characters.Handling.To_Upper (SPARK.Ada.Strings.Unbounded.Get_Element (Source => Result.Content, Index => Pos))); return Result; end Upper_Case_Char; function Translate (E_Str : T; From_Char : Character; To_Char : Character) return T is Result : T; begin Result := E_Str; for I in Positions range 1 .. SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content) loop --# assert SPARK.Ada.Strings.Unbounded.Get_Length (E_Str.Content) = --# SPARK.Ada.Strings.Unbounded.Get_Length (E_Str%.Content) and --# SPARK.Ada.Strings.Unbounded.Get_Length (E_Str.Content) in Lengths; if SPARK.Ada.Strings.Unbounded.Get_Element (Source => E_Str.Content, Index => I) = From_Char then SPARK.Ada.Strings.Unbounded.Replace_Element (Source => Result.Content, Index => I, By => To_Char); end if; end loop; return Result; end Translate; procedure Append_Char (E_Str : in out T; Ch : in Character) is begin SPARK.Ada.Strings.Unbounded.Append_Char (Source => E_Str.Content, New_Item => Ch); end Append_Char; ------------------------------------------------------------------------- -- Find_Sub_String_After: for use in summary tool -- find the specified SearchString, starting at the specified position in -- the given T ------------------------------------------------------------------------- procedure Find_Sub_String_After (E_Str : in T; Search_Start : in Positions; Search_String : in String; String_Found : out Boolean; String_Start : out Positions) is Result : Natural; begin Result := SPARK.Ada.Strings.Unbounded.Index_Pattern_From (Source => E_Str.Content, Pattern => Search_String, Arg_From => Search_Start, Going => SPARK.Ada.Strings.Direction_Forward, Mapping => SPARK.Ada.Strings.Maps.Identity); if Result = 0 then String_Found := False; String_Start := 1; else String_Found := True; String_Start := Result; end if; end Find_Sub_String_After; -------------------------------------------------------------------------- -- Find_Sub_String: for use in summary tool -- find specified SearchString in the given T -------------------------------------------------------------------------- procedure Find_Sub_String (E_Str : in T; Search_String : in String; String_Found : out Boolean; String_Start : out Positions) is Result : Natural; begin Result := SPARK.Ada.Strings.Unbounded.Index_Pattern (Source => E_Str.Content, Pattern => Search_String, Going => SPARK.Ada.Strings.Direction_Forward, Mapping => SPARK.Ada.Strings.Maps.Identity); if Result = 0 then String_Found := False; String_Start := 1; else String_Found := True; String_Start := Result; end if; end Find_Sub_String; procedure Find_Examiner_Sub_String (E_Str : in T; Search_String : in T; String_Found : out Boolean; String_Start : out Positions) is --# hide Find_Examiner_Sub_String; Result : Natural; begin Result := SPARK.Ada.Strings.Unbounded.Index_Pattern (Source => E_Str.Content, Pattern => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Search_String.Content), Going => SPARK.Ada.Strings.Direction_Forward, Mapping => SPARK.Ada.Strings.Maps.Identity); if Result = 0 then String_Found := False; String_Start := 1; else String_Found := True; String_Start := Result; end if; end Find_Examiner_Sub_String; procedure Pop_Char (E_Str : in out T; Char : out Character) is begin if SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content) = 0 then Char := ' '; else Char := SPARK.Ada.Strings.Unbounded.Get_Element (Source => E_Str.Content, Index => Positions'First); SPARK.Ada.Strings.Unbounded.Procedure_Delete (Source => E_Str.Content, Arg_From => Positions'First, Through => 1); end if; end Pop_Char; procedure Find_Char_After (E_Str : in T; Search_Start : in Positions; Search_Char : in Character; Char_Found : out Boolean; Char_Pos : out Positions) is Result : Natural; begin Result := SPARK.Ada.Strings.Unbounded.Index_Set_From (Source => E_Str.Content, Arg_Set => SPARK.Ada.Strings.Maps.Singleton_To_Set (Singleton => Search_Char), Arg_From => Search_Start, Test => SPARK.Ada.Strings.Membership_Inside, Going => SPARK.Ada.Strings.Direction_Forward); if Result = 0 then Char_Found := False; Char_Pos := 1; else Char_Found := True; Char_Pos := Result; end if; end Find_Char_After; procedure Find_Char (E_Str : in T; Search_Char : in Character; Char_Found : out Boolean; Char_Pos : out Positions) is Result : Natural; begin Result := SPARK.Ada.Strings.Unbounded.Index_Set (Source => E_Str.Content, Arg_Set => SPARK.Ada.Strings.Maps.Singleton_To_Set (Singleton => Search_Char), Test => SPARK.Ada.Strings.Membership_Inside, Going => SPARK.Ada.Strings.Direction_Forward); if Result = 0 then Char_Found := False; Char_Pos := 1; else Char_Found := True; Char_Pos := Result; end if; end Find_Char; function Lex_Order (First_Name, Second_Name : T) return Order_Types is Result : Order_Types; begin if SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_Unbounded_String (Left => First_Name.Content, Right => Second_Name.Content) then Result := Neither_First; elsif SPARK.Ada.Strings.Unbounded.Less_Unbounded_String_Unbounded_String (Left => First_Name.Content, Right => Second_Name.Content) then Result := First_One_First; else Result := Second_One_First; end if; return Result; end Lex_Order; function Section (E_Str : T; Start_Pos : Positions; Length : Lengths) return T is Result : T := Empty_String; begin -- Using an 'and then' here will confuse the simplifier, so we -- stick to a simple nested if. if Start_Pos - 1 <= Lengths'Last - Length then if Start_Pos + (Length - 1) <= SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content) then Result.Content := SPARK.Ada.Strings.Unbounded.Function_Unbounded_Slice (Source => E_Str.Content, Low => Start_Pos, High => Start_Pos + (Length - 1)); end if; end if; return Result; end Section; function Trim (E_Str : T) return T is subtype String_Length is Integer range 1 .. 4; subtype String_4 is String (String_Length); White_Space : String_4 := " "; begin White_Space (1) := ' '; White_Space (2) := Ada.Characters.Latin_1.HT; White_Space (3) := Ada.Characters.Latin_1.LF; White_Space (4) := Ada.Characters.Latin_1.CR; return T' (Content => SPARK.Ada.Strings.Unbounded.Function_Trim_Character_Set (Source => E_Str.Content, Left => SPARK.Ada.Strings.Maps.Character_Sequence_To_Set (Arg_Sequence => White_Space), Right => SPARK.Ada.Strings.Maps.Character_Sequence_To_Set (Arg_Sequence => White_Space))); end Trim; procedure Get_Int_From_String (Source : in T; Item : out Integer; Start_Pt : in Positions; Stop : out Natural) is --# hide Get_Int_From_String; begin SPARK_IO.Get_Int_From_String (Source => SPARK.Ada.Strings.Unbounded.Not_SPARK.Slice (Source => Source.Content, Low => Start_Pt, High => SPARK.Ada.Strings.Unbounded.Get_Length (Source => Source.Content)), Item => Item, Start_Pos => Start_Pt, Stop => Stop); end Get_Int_From_String; procedure Put_Int_To_String (Dest : out T; Item : in Integer; Start_Pt : in Positions; Base : in Valid_Base) is type Unsigned_Integer is range 0 .. 2 * (Integer'Last + 1); subtype Hex_Size is Unsigned_Integer range 0 .. 15; type Hex_T is array (Hex_Size) of Character; subtype String_Range is Positive range 1 .. 1; subtype String_1_T is String (String_Range); Hex : constant Hex_T := Hex_T' (0 => '0', 1 => '1', 2 => '2', 3 => '3', 4 => '4', 5 => '5', 6 => '6', 7 => '7', 8 => '8', 9 => '9', 10 => 'A', 11 => 'B', 12 => 'C', 13 => 'D', 14 => 'E', 15 => 'F'); Local_Item : Unsigned_Integer; Local_Base : Unsigned_Integer; String_1 : String_1_T := String_1_T'(" "); Current_Pos : Positive; begin Dest := Empty_String; Local_Base := Unsigned_Integer (Base); --# check Local_Base in Unsigned_Integer(Valid_Base'First) .. Unsigned_Integer(Valid_Base'Last); -- Pad with spaces for I in Positive range 2 .. Start_Pt loop --# assert Local_Base in Unsigned_Integer(Valid_Base'First) .. Unsigned_Integer(Valid_Base'Last) and --# Start_Pt < Lengths'Last - (3 + 1 + 31); SPARK.Ada.Strings.Unbounded.Append_Char (Source => Dest.Content, New_Item => ' '); end loop; --# assert Local_Base in Unsigned_Integer(Valid_Base'First) .. Unsigned_Integer(Valid_Base'Last) and --# Start_Pt < Lengths'Last - (3 + 1 + 31); -- Indicate the base if it is not 10 Current_Pos := Start_Pt; if Local_Base /= 10 then if Local_Base > 10 then SPARK.Ada.Strings.Unbounded.Append_Char (Source => Dest.Content, New_Item => '1'); Current_Pos := Current_Pos + 1; end if; SPARK.Ada.Strings.Unbounded.Append_Char (Source => Dest.Content, New_Item => Hex (Local_Base mod 10)); SPARK.Ada.Strings.Unbounded.Append_Char (Source => Dest.Content, New_Item => '#'); Current_Pos := Current_Pos + 2; end if; --# assert Local_Base in Unsigned_Integer(Valid_Base'First) .. Unsigned_Integer(Valid_Base'Last) and --# Start_Pt < Lengths'Last - (3 + 1 + 31) and --# Current_Pos >= Start_Pt and Current_Pos <= Start_Pt + 3; -- Obtain the absolute value of Item and indicate the sign in -- the generated string. if Item < 0 then Local_Item := Unsigned_Integer (-Long_Long_Integer (Item)); SPARK.Ada.Strings.Unbounded.Append_Char (Source => Dest.Content, New_Item => '-'); Current_Pos := Current_Pos + 1; else Local_Item := Unsigned_Integer (Item); end if; --# assert Local_Base in Unsigned_Integer(Valid_Base'First) .. Unsigned_Integer(Valid_Base'Last) and --# Start_Pt < Lengths'Last - (3 + 1 + 31) and --# Current_Pos >= Start_Pt and Current_Pos <= Start_Pt + 3 + 1 and --# Local_Item mod Local_Base in 0 .. 15; -- Convert Local_Item and output to the string. We insert into -- the generated string at Current_Pos. while Local_Item >= Local_Base loop --# assert Local_Base in Unsigned_Integer(Valid_Base'First) .. Unsigned_Integer(Valid_Base'Last) and --# Start_Pt < Lengths'Last - (3 + 1 + 31) and --# Current_Pos >= Start_Pt and Current_Pos <= Start_Pt + 3 + 1 and --# Local_Item mod Local_Base in 0 .. 15; String_1 (1) := Hex (Local_Item mod Local_Base); SPARK.Ada.Strings.Unbounded.Procedure_Insert (Source => Dest.Content, Before => Current_Pos, New_Item => String_1); Local_Item := Local_Item / Local_Base; end loop; String_1 (1) := Hex (Local_Item); SPARK.Ada.Strings.Unbounded.Procedure_Insert (Source => Dest.Content, Before => Current_Pos, New_Item => String_1); -- Insert the final base marking if necessary if Local_Base /= 10 then SPARK.Ada.Strings.Unbounded.Append_Char (Source => Dest.Content, New_Item => '#'); end if; end Put_Int_To_String; function Get_Dotted_Common_Prefix_Length (E_Str_A, E_Str_B : T) return Lengths is Equal : Boolean := False; Min_Length : Lengths; Common_Prefix : Lengths := 0; begin Min_Length := Lengths'Min (Get_Length (E_Str_A), Get_Length (E_Str_B)); --# assert Min_Length <= Get_Length (E_Str_A) and --# Min_Length <= Get_Length (E_Str_B); if Min_Length >= 1 then for I in Positions range 1 .. Min_Length loop Equal := Get_Element (E_Str_A, I) = Get_Element (E_Str_B, I); exit when not Equal; if Get_Element (E_Str_A, I) = '.' then Common_Prefix := I - 1; end if; end loop; if Equal then if Get_Length (E_Str_A) = Get_Length (E_Str_B) then Common_Prefix := Get_Length (E_Str_A); elsif Get_Length (E_Str_A) > Get_Length (E_Str_B) then if Get_Element (E_Str_A, Get_Length (E_Str_B) + 1) = '.' then Common_Prefix := Get_Length (E_Str_B); end if; else if Get_Element (E_Str_B, Get_Length (E_Str_A) + 1) = '.' then Common_Prefix := Get_Length (E_Str_A); end if; end if; end if; end if; return Common_Prefix; end Get_Dotted_Common_Prefix_Length; function Starts_With (E_Str : T; Str : String) return Boolean is Result : Boolean; begin if Get_Length (E_Str) >= Str'Length then Result := Eq1_String (Section (E_Str => E_Str, Start_Pos => Positions'First, Length => Str'Length), Str); else Result := False; end if; return Result; end Starts_With; procedure Create (File : in out SPARK_IO.File_Type; Name_Of_File : in T; Form_Of_File : in String; Status : out SPARK_IO.File_Status) is --# hide Create; begin SPARK_IO.Create (File => File, Name_Length => SPARK.Ada.Strings.Unbounded.Get_Length (Source => Name_Of_File.Content), Name_Of_File => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Name_Of_File.Content), Form_Of_File => Form_Of_File, Status => Status); end Create; procedure Open (File : in out SPARK_IO.File_Type; Mode_Of_File : in SPARK_IO.File_Mode; Name_Of_File : in T; Form_Of_File : in String; Status : out SPARK_IO.File_Status) is --# hide Open; begin SPARK_IO.Open (File => File, Mode_Of_File => Mode_Of_File, Name_Length => SPARK.Ada.Strings.Unbounded.Get_Length (Source => Name_Of_File.Content), Name_Of_File => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Name_Of_File.Content), Form_Of_File => Form_Of_File, Status => Status); end Open; procedure Put_String (File : in SPARK_IO.File_Type; E_Str : in T) is --# hide Put_String; begin if SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content) /= 0 then SPARK_IO.Put_String (File => File, Item => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => E_Str.Content), Stop => SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content)); end if; end Put_String; procedure Put_Line (File : in SPARK_IO.File_Type; E_Str : in T) is --# hide Put_Line; begin if SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content) = 0 then SPARK_IO.New_Line (File => File, Spacing => 1); else SPARK_IO.Put_Line (File => File, Item => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => E_Str.Content), Stop => SPARK.Ada.Strings.Unbounded.Get_Length (Source => E_Str.Content)); end if; end Put_Line; procedure Get_Line (File : in SPARK_IO.File_Type; E_Str : out T) is subtype String_Length is Integer range 1 .. 4096; subtype Long_String is String (String_Length); Item : Long_String; Stop : Natural; begin SPARK_IO.Get_Line (File => File, Item => Item, Stop => Stop); E_Str.Content := SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Item); E_Str.Content := SPARK.Ada.Strings.Unbounded.Function_Unbounded_Slice (Source => E_Str.Content, Low => Positions'First, High => Stop); end Get_Line; end E_Strings; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_positional_association.adb0000644000175000017500000003225011753202336026510 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Positional_Association (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Name_Exp : Sem.Exp_Record; Error_Found : Boolean := False; -------------------------------------------------------------- procedure Check_Record_Completeness (Name_Exp : in Sem.Exp_Record; Node : in STree.SyntaxNode; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Exp, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found from *, --# Dictionary.Dict, --# Name_Exp; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_record_component_association; is Highest_Field_Number, Number_Of_Fields : Positive; function Error_Pos (Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_record_component_association; -- Finds the position of the right most expression in an association list is Result : LexTokenManager.Token_Position := LexTokenManager.Null_Token_Position; Local_Node : STree.SyntaxNode; begin if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.positional_association or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_positional_association then -- ASSUME Node = positional_association OR annotation_positional_association Local_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Local_Node = aggregate_or_expression OR positional_association_rep OR -- annotation_aggregate_or_expression OR annotation_positional_association_rep if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.positional_association_rep or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_positional_association_rep then -- ASSUME Local_Node = positional_association_rep OR annotation_positional_association_rep Local_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Local_Node)); elsif STree.Syntax_Node_Type (Node => Local_Node) /= SP_Symbols.aggregate_or_expression and then STree.Syntax_Node_Type (Node => Local_Node) /= SP_Symbols.annotation_aggregate_or_expression then Local_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = aggregate_or_expression OR positional_association_rep OR " & "annotation_aggregate_or_expression OR annotation_positional_association_rep in Error_Pos"); end if; -- ASSUME Local_Node = aggregate_or_expression OR annotation_aggregate_or_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_aggregate_or_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = aggregate_or_expression OR annotation_aggregate_or_expression in Error_Pos"); Result := STree.Node_Position (Node => Local_Node); elsif STree.Syntax_Node_Type (Node => Node) = SP_Symbols.record_component_association or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_record_component_association then -- ASSUME Node = record_component_association OR annotation_record_component_association Local_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Local_Node = positional_record_component_association OR expression OR -- named_record_component_association OR record_component_selector_name OR -- annotation_positional_record_component_association OR annotation_expression OR -- annotation_named_record_component_association if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.positional_record_component_association or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.named_record_component_association or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.record_component_selector_name or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_positional_record_component_association or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_named_record_component_association then -- ASSUME Local_Node = positional_record_component_association OR -- named_record_component_association OR record_component_selector_name OR -- annotation_positional_record_component_association OR -- annotation_named_record_component_association Local_Node := STree.Last_Sibling_Of (Start_Node => Local_Node); elsif STree.Syntax_Node_Type (Node => Local_Node) /= SP_Symbols.expression and then STree.Syntax_Node_Type (Node => Local_Node) /= SP_Symbols.annotation_expression then Local_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = positional_record_component_association OR expression OR " & "named_record_component_association OR record_component_selector_name OR " & "annotation_positional_record_component_association OR annotation_expression OR " & "annotation_named_record_component_association in Error_Pos"); end if; -- ASSUME Local_Node = expression OR annotation_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = expression OR annotation_expression in Error_Pos"); Result := STree.Node_Position (Node => Local_Node); end if; return Result; end Error_Pos; begin -- Check_Record_Completeness if Name_Exp.Param_Count < Natural'Last and then Dictionary.GetNumberOfComponents (Name_Exp.Type_Symbol) > Natural'First then Highest_Field_Number := Name_Exp.Param_Count + 1; Number_Of_Fields := Dictionary.GetNumberOfComponents (Name_Exp.Type_Symbol); for I in Positive range Highest_Field_Number .. Number_Of_Fields loop Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 104, Reference => ErrorHandler.No_Reference, Position => Error_Pos (Node => Node), Id_Str => Dictionary.GetSimpleName (Dictionary.GetRecordComponent (Name_Exp.Type_Symbol, I))); end loop; end if; end Check_Record_Completeness; -------------------------------------------------------------- function Named_Record_Extension_Aggregate (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_record_component_association; is Local_Node : STree.SyntaxNode; Result : Boolean := False; begin if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.positional_association or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_positional_association then -- ASSUME Node = positional_association OR annotation_positional_association Result := False; elsif STree.Syntax_Node_Type (Node => Node) = SP_Symbols.record_component_association or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_record_component_association then -- ASSUME Node = record_component_association OR annotation_record_component_association Local_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Local_Node = named_record_component_association OR positional_record_component_association OR -- annotation_named_record_component_association OR annotation_positional_record_component_association if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.named_record_component_association or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_named_record_component_association then -- ASSUME Local_Node = named_record_component_association OR annotation_named_record_component_association Result := True; elsif STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.positional_record_component_association or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.annotation_positional_record_component_association then Result := False; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = positional_record_component_association OR named_record_component_association OR " & "annotation_positional_record_component_association OR " & "annotation_named_record_component_association in Named_Record_Component_Association"); end if; end if; return Result; end Named_Record_Extension_Aggregate; begin -- Wf_Positional_Association if not Named_Record_Extension_Aggregate (Node => Node) then Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if Dictionary.TypeIsRecord (Name_Exp.Type_Symbol) then Check_Record_Completeness (Name_Exp => Name_Exp, Node => Node, Error_Found => Error_Found); end if; Name_Exp.Errors_In_Expression := Name_Exp.Errors_In_Expression or else Error_Found; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); end if; end Wf_Positional_Association; spark-2012.0.deb/examiner/sparklex-lex-apostintro.adb0000644000175000017500000000615511753202336021547 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SparkLex.Lex) procedure ApostIntro (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) is Ch1, Ch2, Ch3, Ch4 : Character; begin LineManager.Accept_Char (Curr_Line => Curr_Line); -- Apostrophe already recognised Ch1 := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); if Graphic_Character (Ch => Ch1) then LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch2); if Ch2 = ''' then if Ch1 /= '(' then LineManager.Accept_Lookahead (Curr_Line => Curr_Line); -- Any character literal except for '('. Token := SP_Symbols.character_literal; else LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch3); if Graphic_Character (Ch => Ch3) then LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch4); LineManager.Reject_Lookahead (Curr_Line => Curr_Line); if Ch4 = ''' then -- Qualified expression - type_mark'('X'), -- where X is any graphic character. Token := SP_Symbols.apostrophe; else LineManager.Accept_Char (Curr_Line => Curr_Line); LineManager.Accept_Char (Curr_Line => Curr_Line); -- The character literal '('. Token := SP_Symbols.character_literal; end if; else LineManager.Reject_Lookahead (Curr_Line => Curr_Line); LineManager.Accept_Char (Curr_Line => Curr_Line); LineManager.Accept_Char (Curr_Line => Curr_Line); -- The character literal '('. Token := SP_Symbols.character_literal; end if; end if; else LineManager.Reject_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.apostrophe; end if; else LineManager.Reject_Lookahead (Curr_Line => Curr_Line); Token := SP_Symbols.apostrophe; end if; end ApostIntro; spark-2012.0.deb/examiner/lextokenstacks.adb0000644000175000017500000000377311753202336017775 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors; package body LexTokenStacks is function IsEmpty (S : Stacks) return Boolean is begin return S.Ptr = 0; end IsEmpty; procedure Clear (S : out Stacks) is begin S := Stacks'(Vector => Vectors'(others => LexTokenManager.Null_String), Ptr => 0); end Clear; procedure Push (S : in out Stacks; Item : in LexTokenManager.Lex_String) is begin if S.Ptr = MaxDepth then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Lex_Stack_Overflow, Msg => ""); end if; S.Ptr := S.Ptr + 1; S.Vector (S.Ptr) := Item; end Push; procedure Pop (S : in out Stacks; Item : out LexTokenManager.Lex_String) is begin if IsEmpty (S) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Lex_Stack_Underflow, Msg => ""); end if; Item := S.Vector (S.Ptr); S.Ptr := S.Ptr - 1; end Pop; end LexTokenStacks; spark-2012.0.deb/examiner/cells-utility.ads0000644000175000017500000002231711753202335017551 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; --# inherit Cells, --# Cell_Storage, --# Dictionary, --# LexTokenManager, --# SP_Symbols, --# Statistics, --# SystemErrors; package Cells.Utility is type Type_Attribute is (Tick_First, Tick_Last); ------------------------------------------------------------------------------ -- General utility ------------------------------------------------------------------------------ procedure Create_Binary_Op_Cell (VCG_Heap : in out Cells.Heap_Record; Left : in Cells.Cell; Op : in SP_Symbols.SP_Symbol; Right : in Cells.Cell; Result : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Result from VCG_Heap & --# Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# Left, --# Op, --# Right; ------------------------------------------------------------------------------ -- Booleans ------------------------------------------------------------------------------ function Is_True (VCG_Heap : in Cells.Heap_Record; C : in Cells.Cell) return Boolean; --# global in Dictionary.Dict; function Is_False (VCG_Heap : in Cells.Heap_Record; C : in Cells.Cell) return Boolean; --# global in Dictionary.Dict; procedure Create_Bool (VCG_Heap : in out Cells.Heap_Record; Value : in Boolean; C : out Cells.Cell); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives C from VCG_Heap & --# Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# Dictionary.Dict, --# Value; procedure Create_Not (VCG_Heap : in out Cells.Heap_Record; Predicate : in Cells.Cell; Negation : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Negation from VCG_Heap & --# Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# Predicate; procedure Create_And (VCG_Heap : in out Cells.Heap_Record; Left : in Cells.Cell; Right : in Cells.Cell; Conjunct : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Conjunct from VCG_Heap & --# Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# Left, --# Right; procedure Create_Implies (VCG_Heap : in out Cells.Heap_Record; Antecedent : in Cells.Cell; Consequent : in Cells.Cell; Implication : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Implication from VCG_Heap & --# Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# Antecedent, --# Consequent; procedure Simplify (VCG_Heap : in out Cells.Heap_Record; C : in out Cells.Cell); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives C, --# Statistics.TableUsage, --# VCG_Heap from *, --# C, --# Dictionary.Dict, --# VCG_Heap; ------------------------------------------------------------------------------ -- Utility ------------------------------------------------------------------------------ -- Join New_Term into the Conjuct. procedure Conjoin (VCG_Heap : in out Cells.Heap_Record; New_Term : in Cells.Cell; Conjunct : in out Cells.Cell); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Conjunct, --# Statistics.TableUsage, --# VCG_Heap from *, --# Conjunct, --# Dictionary.Dict, --# New_Term, --# VCG_Heap; ------------------------------------------------------------------------------ -- Types (general) ------------------------------------------------------------------------------ -- This produces the DAG for The_Type'First, etc... procedure Create_Type_Attribute (VCG_Heap : in out Cells.Heap_Record; The_Type : in Dictionary.Symbol; The_Attribute : in Type_Attribute; Result : out Cells.Cell); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Result, --# VCG_Heap from Dictionary.Dict, --# The_Attribute, --# The_Type, --# VCG_Heap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# The_Attribute, --# The_Type, --# VCG_Heap; ------------------------------------------------------------------------------ -- Records ------------------------------------------------------------------------------ procedure Create_Record_Access (VCG_Heap : in out Cells.Heap_Record; The_Record : in Cells.Cell; The_Component : in Dictionary.Symbol; The_Field : out Cells.Cell); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# VCG_Heap & --# The_Field from VCG_Heap & --# VCG_Heap from *, --# Dictionary.Dict, --# The_Component, --# The_Record; ------------------------------------------------------------------------------ -- Arrays ------------------------------------------------------------------------------ procedure Create_Array_Access (VCG_Heap : in out Cells.Heap_Record; The_Array : in Cells.Cell; The_Index : in Cells.Cell; The_Element : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# VCG_Heap from *, --# The_Array, --# The_Index, --# VCG_Heap & --# The_Element from The_Array, --# The_Index, --# VCG_Heap; ------------------------------------------------------------------------------ -- Stashing things in cells that we probably shouldn't. ------------------------------------------------------------------------------ procedure Create_Scope_Cell (VCG_Heap : in out Cells.Heap_Record; The_Scope : in Dictionary.Scopes; The_Cell : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# VCG_Heap & --# The_Cell from VCG_Heap & --# VCG_Heap from *, --# The_Scope; function Scope_Cell_Get_Scope (VCG_Heap : in Cells.Heap_Record; Scope_Cell : in Cells.Cell) return Dictionary.Scopes; end Cells.Utility; spark-2012.0.deb/examiner/sem-wf_subprogram_annotation.adb0000644000175000017500000001751611753202336022624 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Subprogram_Annotation (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; The_Heap : in out Heap.HeapRecord) is -- look up table: if First_Seen then we are dealing with Abstract spec else Refined type Which_Abstractions is array (Boolean) of Dictionary.Abstractions; Which_Abstraction : constant Which_Abstractions := Which_Abstractions'(False => Dictionary.IsRefined, True => Dictionary.IsAbstract); Glob_Def_Err : Boolean := False; Global_Has_Errors : Boolean := False; Report_Node_Pos : LexTokenManager.Token_Position; Global_Node : STree.SyntaxNode; Dependency_Node : STree.SyntaxNode; Declare_Node : STree.SyntaxNode; Constraint_Node : STree.SyntaxNode; begin --# accept Flow, 10, Constraint_Node, "Expected ineffective assignment"; Get_Subprogram_Anno_Key_Nodes (Node => Node, Global_Node => Global_Node, Dependency_Node => Dependency_Node, Declare_Node => Declare_Node, Constraint_Node => Constraint_Node); --# end accept; Report_Node_Pos := Node_Position (Node => Parent_Node (Current_Node => Node)); if Syntax_Node_Type (Node => Node) = SP_Symbols.procedure_annotation and then not First_Seen and then Global_Node = STree.NullNode then Global_Has_Errors := True; ErrorHandler.Semantic_Error (Err_Num => 87, Reference => ErrorHandler.No_Reference, Position => Report_Node_Pos, Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; if Syntax_Node_Type (Node => Global_Node) = SP_Symbols.moded_global_definition then -- ASSUME Global_Node = moded_global_definition Report_Node_Pos := Node_Position (Node => Global_Node); if Dictionary.Is_Generic_Subprogram (The_Symbol => Subprog_Sym) then ErrorHandler.Semantic_Error (Err_Num => 638, Reference => ErrorHandler.No_Reference, Position => Report_Node_Pos, Id_Str => LexTokenManager.Null_String); Global_Has_Errors := True; else Wf_Global_Definition (Node => Global_Node, Scope => Current_Scope, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen, Sem_Err_Found => Glob_Def_Err); Global_Has_Errors := Global_Has_Errors or else Glob_Def_Err; end if; end if; if Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.dependency_relation then -- ASSUME Dependency_Node = dependency_relation Report_Node_Pos := Node_Position (Node => Dependency_Node); Dependency_Relation.Wf_Dependency_Relation (Node => Dependency_Node, Scope => Current_Scope, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen, Glob_Def_Err => Glob_Def_Err, The_Heap => The_Heap); end if; if Syntax_Node_Type (Node => Declare_Node) = SP_Symbols.declare_annotation then -- ASSUME Declare_Node = declare_annotation Wf_Declare_Annotation (Node => Declare_Node, Scope => Current_Scope, Task_Or_Proc => Subprog_Sym, First_Seen => First_Seen, The_Heap => The_Heap); end if; if Syntax_Node_Type (Node => Node) = SP_Symbols.procedure_annotation then Add_Derives_Stream_Effects (Node_Pos => Node_Position (Node => Node), Subprog_Sym => Subprog_Sym, Abstraction => Which_Abstraction (First_Seen)); end if; -- mark subprogram as having incorrect signature if necessary if Global_Has_Errors then Dictionary.SetSubprogramSignatureNotWellformed (Which_Abstraction (First_Seen), Subprog_Sym); end if; -- Raise error 501 (dependency relation expected) if: -- 1. flow=info and there was no derives, or -- 2. flow=auto, global is refined and there was a derives on the spec, -- 3. flow=data, language=83 and there was no derives. -- Generally, if flow=data (and lang /= 83) or flow=auto a derives is not required because -- there will be a moded global annotation if one is necessary. However, if flow=auto and there -- are refined own variables in the annotation, and there was a dependency annotation on the -- spec then one is expected on the body (case 2 in the list above). if Syntax_Node_Type (Node => Node) = SP_Symbols.procedure_annotation then if Dependency_Node = STree.NullNode then -- no derives on body if CommandLineData.Content.Flow_Option = CommandLineData.Info_Flow -- 1 or else (CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow and then not First_Seen and then Dictionary.GetHasDerivesAnnotation (Subprog_Sym)) --2 or else (CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow and then CommandLineData.Content.Language_Profile = CommandLineData.SPARK83) then --3 -- Here we definitely know that the derives annotation is missing if Global_Node = STree.NullNode and then Declare_Node = STree.NullNode then Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Subprog_Sym); ErrorHandler.Semantic_Error (Err_Num => 154, Reference => ErrorHandler.No_Reference, Position => Report_Node_Pos, Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); else Dictionary.SetSubprogramSignatureNotWellformed (Which_Abstraction (First_Seen), Subprog_Sym); ErrorHandler.Semantic_Error (Err_Num => 501, Reference => ErrorHandler.No_Reference, Position => Report_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; else -- there was a derives on the body -- Similarly, if there was a dependency found, and flow=auto, and there was no dependency on the spec, -- and the annotation is in terms of refined variables, then raise semantic error 509. if CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow and then not First_Seen and then not Dictionary.GetHasDerivesAnnotation (Subprog_Sym) then Dictionary.SetSubprogramSignatureNotWellformed (Which_Abstraction (First_Seen), Subprog_Sym); ErrorHandler.Semantic_Error (Err_Num => 509, Reference => ErrorHandler.No_Reference, Position => Report_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; end if; --# accept Flow, 33, Constraint_Node, "Expected to be neither referenced nor exported"; end Wf_Subprogram_Annotation; spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_case.adb0000644000175000017500000002111611753202336024013 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Wf_Case (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) is Case_Exp : Exp_Record; Ref_Var : SeqAlgebra.Seq; Case_Flags : Typ_Case_Flags; Upper_Bound : Typ_Type_Bound; Lower_Bound : Typ_Type_Bound; Complete_ADT : CompleteCheck.T; Complete_Check_Range_From : Integer; Complete_Check_Range_To : Integer; Complete_Check_Range_State : CompleteCheck.TypRangeState; Child : STree.SyntaxNode; begin SeqAlgebra.CreateSeq (TheHeap, Ref_Var); Child := Child_Node (Current_Node => Node); -- ASSUME Child = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = expression in Wf_Case"); Walk_Expression_P.Walk_Expression (Exp_Node => Child, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => Case_Exp, Component_Data => Component_Data, The_Heap => TheHeap); -- distinguish between the different possible situations, and -- set up the case checking accordingly if Dictionary.IsUnknownTypeMark (Case_Exp.Type_Symbol) or else not Dictionary.IsDiscreteTypeMark (Case_Exp.Type_Symbol, Scope) then Upper_Bound := Unknown_Type_Bound; Lower_Bound := Unknown_Type_Bound; -- for unknown or non-discrete types -- for unknown types still attempt overlap checking ErrorHandler.Semantic_Error (Err_Num => 46, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Case_Flags := Typ_Case_Flags' (Check_Completeness => False, Signal_Out_Of_Range => False, Out_Of_Range_Seen => False, Check_Overlap => Dictionary.IsUnknownTypeMark (Case_Exp.Type_Symbol), Warn_No_Others => False, Others_Mandatory => False); -- the completeness checker object will not be used if the type mark -- is not discrete Complete_Check_Range_From := -(ExaminerConstants.CompleteCheckSize / 2); Complete_Check_Range_To := (Complete_Check_Range_From + ExaminerConstants.CompleteCheckSize) - 1; -- NB we 'know' that Complete_Check_Range_State will return RangeDoesFit, -- so the value is ignored, giving a flow error --# accept Flow, 10, Complete_Check_Range_State, "Expected ineffective assignment"; CompleteCheck.Init (Complete_ADT, Complete_Check_Range_From, Complete_Check_Range_To, Complete_Check_Range_State); --# end accept; elsif Dictionary.IsUniversalIntegerType (Case_Exp.Type_Symbol) then Upper_Bound := Unknown_Type_Bound; Lower_Bound := Unknown_Type_Bound; -- for universal Integer: others is mandatory Case_Flags := Typ_Case_Flags' (Check_Completeness => False, Signal_Out_Of_Range => True, Out_Of_Range_Seen => False, Check_Overlap => True, Warn_No_Others => False, Others_Mandatory => True); Complete_Check_Range_From := -(ExaminerConstants.CompleteCheckSize / 2); Complete_Check_Range_To := (Complete_Check_Range_From + ExaminerConstants.CompleteCheckSize) - 1; -- NB we 'know' that Complete_Check_Range_State will return RangeDoesFit, -- so the value is ignored, giving a flow error --# accept Flow, 10, Complete_Check_Range_State, "Expected ineffective assignment"; CompleteCheck.Init (Complete_ADT, Complete_Check_Range_From, Complete_Check_Range_To, Complete_Check_Range_State); --# end accept; else -- get bounds from dictionary Get_Type_Bounds (Type_Symbol => Case_Exp.Type_Symbol, Lower_Bound => Lower_Bound, Upper_Bound => Upper_Bound); if not (Lower_Bound.Is_Defined and then Upper_Bound.Is_Defined) then -- one or other bound is unknown to the dictionary Case_Flags := Typ_Case_Flags' (Check_Completeness => False, Signal_Out_Of_Range => True, Out_Of_Range_Seen => False, Check_Overlap => True, Warn_No_Others => True, Others_Mandatory => False); -- if both bounds unknown use symmetric range if (not Lower_Bound.Is_Defined) and then (not Upper_Bound.Is_Defined) then Complete_Check_Range_From := -(ExaminerConstants.CompleteCheckSize / 2); Complete_Check_Range_To := (Complete_Check_Range_From + ExaminerConstants.CompleteCheckSize) - 1; -- otherwise use range extending from known bound elsif Lower_Bound.Is_Defined then Complete_Check_Range_From := Lower_Bound.Value; if Complete_Check_Range_From <= (Integer'Last - ExaminerConstants.CompleteCheckSize) then Complete_Check_Range_To := (Complete_Check_Range_From + ExaminerConstants.CompleteCheckSize) - 1; else Complete_Check_Range_To := Integer'Last; end if; else -- Upper_Bound.IsDefined Complete_Check_Range_To := Upper_Bound.Value; if Complete_Check_Range_To >= (Integer'First + ExaminerConstants.CompleteCheckSize) then Complete_Check_Range_From := (Complete_Check_Range_To - ExaminerConstants.CompleteCheckSize) + 1; else Complete_Check_Range_From := Integer'First; end if; end if; -- NB we 'know' that Complete_Check_Range_State will return RangeDoesFit, -- so the value is ignored, giving a flow error --# accept Flow, 10, Complete_Check_Range_State, "Expected ineffective assignment"; CompleteCheck.Init (Complete_ADT, Complete_Check_Range_From, Complete_Check_Range_To, Complete_Check_Range_State); --# end accept; else -- both bounds known to dictionary: set up completeness checker CompleteCheck.Init (Complete_ADT, Lower_Bound.Value, Upper_Bound.Value, Complete_Check_Range_State); if Complete_Check_Range_State = CompleteCheck.RangeDoesFit then -- range fits in completeness checker Case_Flags := Typ_Case_Flags' (Check_Completeness => True, Signal_Out_Of_Range => False, Out_Of_Range_Seen => False, Check_Overlap => True, Warn_No_Others => False, Others_Mandatory => False); else -- range does not fit in completeness checker Case_Flags := Typ_Case_Flags' (Check_Completeness => False, Signal_Out_Of_Range => True, Out_Of_Range_Seen => False, Check_Overlap => True, Warn_No_Others => True, Others_Mandatory => False); end if; end if; end if; Case_Stack.Push (Case_Flags => Case_Flags, Complete_ADT => Complete_ADT, Sym => Case_Exp.Type_Symbol, Lower_Bound => Lower_Bound, Upper_Bound => Upper_Bound); -- add reference variable list to RefList hash table RefList.AddRelation (Table, TheHeap, Child, Dictionary.NullSymbol, Ref_Var); end Wf_Case; spark-2012.0.deb/examiner/errorhandler.ads0000644000175000017500000024665611753202336017454 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- ErrorHandler -- -- Purpose: -- The package ErrorHandler is used to report almost all warnings and errors -- detected by the Examiner during analysis and produce summaries of the -- number of warnings and errors encountered. -- -- ErrorHandler is also responsible for the definition and suppression -- of warnings specified in the warning control file (see Examiner User -- Manual section 4.6) -- -- The processing of justifications using the accept annotation -- (see Examiner User Manual section 9.1) is also managed by ErrorHandler. -- -- The Examiner will terminate with a status code dependent on whether an -- error has occurred and if applicable the class of error. The ErrorHandler -- package provides a function for determining the status code. The exit -- codes are declared below. -- -- Errors are accumulated as they are encountered in ErrorContexts. -- The ErrorHandler only manages a single ErrorContext (represented -- by the own variable ErrorContext) but an ErrorContext for each source -- file read is maintained by the ContextManager. -- -- The main ErrorHandler interface provides subprograms to manage -- ErrorContexts, add various classes of errors and warnings to the -- accumulated errors and others to initialize, reset and list or print the -- accumulated warnings and errors or summaries thereof. -- -- Another section of the ErrorHandler interface is concerned with the -- Warning Control file and listing the warning messages that have been -- suppressed. -- -- A further part of the ErrorHandler interface manages Justifications. -- -- Clients: -- The ErrorHandler module is used for reporting almost all errors except -- errors detected within the command line (See examiner.adb) and fatal -- errors which immediately terminate the Examiner by calling -- SystemErrors.FatalError. -- Consequently ErrorHandler is used by many parts of the Examiner and has -- many clients, for instance, the FlowAnalyser and Sem. -- ErrorHandler is also used by SPARKmake. -- -- Use: -- Reporting Errors and ErrorContexts: -- -- (1) When a source file (or configuration file) is opened initialize the -- ErrorContext by calling ErrorHandler.ErrorInit -- (or errorHandler.SparkMakeInit from SPARKmake). -- This ErrorContext is retrieved by calling GetErrorContext and is -- associated with the source file and saved by calling -- ContextManager.SetContext (See mainloop.adb). -- -- (2) For each compilation unit (or a configuration file) to be analysed -- retrieve the ErrorContext associated with the file containing the -- compilation unit by calling ContextManager.GetErrorContext and set -- the current ErrorContext maintained by the ErrorHandler to the -- retrieved ErrorContext by calling ErrorHandler.SetErrorContext -- (See mainloop.adb) -- -- (3) Prior to analysis a body or package initialization call -- ErrorHandler.ErrorReset to reset the severity of the ErrorContext as -- this property only applies to a single body or package -- initialization (See sem-compunit.adb). -- -- (4) To add a warning or error to the accumulated messages of the -- ErrorContext call an appropriate interface subprogram from those -- declared below. -- -- (5) On completing analysis of each subprogram body -- (subprogram_implementation) call ErrorHandler.FlushEchoMessages to -- complete any messages relating to the subprogram that are sent to -- the screen (standard output) (See sem-compunit.adb). -- -- (6) After the analysis of each unit ErrorHandler.GetErrorSeverity may -- be called to determine the worst severity of any errors encountered. -- The severity levels are declared below (See sem-compunit.adb). -- -- (7) Call ErrorHander.PrintErrors to generate a listing file annotated -- with the accumulated error messages for the corresponding source -- or configuration file (See mainloop.adb). -- -- Call ErrorHandler.AppendErrors to add errors related to a source -- or configuration file to the report file (see mainloop.adb). -- -- (8) Call ErrorHandler.EchoTotalErrorCount after all files have been -- analysed to display the total error count on the screen -- (standard_output) (See mainloop.adb). -- -- (9) Call ErrorHandler.GetErrorsType to obtain the exit code which -- defines (the worst) type of error encountered during analysis. -- See below for definition of values. -- -- (10) If references to Ada LRM 83 or 95, SPARK 83 or 95, -- Examiner User Manual, VCG or RTC Manuals or -- Barnes "High Integrity Software" are associated with any warning or -- error messages issued may be listed to a file by calling -- ErrorHandler.OutputReferenceList. It is normally called to add -- these to the report file unless the -brief switch is specified -- (See mainloop.adb). -- -- (11) If it is not possible to open any input file (configuration file, -- meta file or source file), ErrorHandler.SetFileOpenError should -- be called to register this failure. The failure is not recorded as -- part of an ErrorContext and related to a single file but as a -- pervasive event (See mainloop.adb). -- -- Warning File: -- -- (1) Read in the warning control file by calling -- ErrorHandler.ReadWarningFile (See mainloop.adb). -- -- Warnings which are correctly designated in the warning control file -- (see Examiner User Manual 9.2.1) will now be suppressed. -- -- (2) A list of suppressed may be written to a file by calling -- ErrorHandler.OutputWarningList. It is called in mainloop.adb -- to write the list to the report file. -- -- Justifications using Accept Annotations: -- -- (1) Call ErrorHandler.StartUnit when a new unit is to be analysed -- (see sem-compunit.adb). -- -- (2) Call ErrorHandler.StartJustification when an accept annoation is -- encountered (See sem-compunit-wf_justification_statement.adb). -- -- The errors justified by the accept annotation will no longer be -- reported except in the justification summary. -- -- (3) Call ErrorHandler.EndJustification when an end accept annotation is -- encountered (See sem-compunit-wf_justification_statement.adb). -- -- (4) Call ErrorHandler.EndUnit when the analysis of a unit is complete -- (See sem-compunit.adb). -- -- Extension: -- Warnings: -- -- To add a new warning -- -- (1) Choose a unique warning number within the range defined by -- Error_Types.ErrNumRange - distinct from any number listed -- in the Examiner User Manual section 6.3 "Warning Messages". -- -- (2) Consider whether the warning is to be suppressible and if suppressible -- whether it is a severe warning. -- -- (3) If it is to be suppressible a new keyword must be introduced. -- -- If the warning is to be suppressible... -- -- (4) Add a suitable enumeration element to the type Warning_Elements putting -- it into the correct part of the list depending on whether it is a -- severe (starred) warning or not. -- Keep Unexpected_Address_Clauses as the last non-severe element of -- the enumeration or it corrupts the subtype definition of -- "Severe_Warnings". -- -- (5) In errorhandler-warningstatus.adb, procedure Get_Description, -- add an extra case alternative to describe the new warning. -- -- (6) In errorhandler-warningstatus-readwarningfile.adb extend procedure -- Process_Option to recognise new keyword. -- -- (7) In the body of ErrorHandler, in procedures Semantic_Warning_With_Position -- or Semantic_Warning_Without_Position, add if/elsif clauses to increment -- counters, rather than display warning, when suppression has been -- selected. -- For all warnings independent of suppression -- -- (8) If the chosen error number is less than 400 add an extra case -- alternative to procedure WarningWithPosition define the new warning -- text and its explanation in file -- errorhandler-conversions-tostring-warningwithposition.adb. -- -- If the chosen error number is greater or equal to 400 add an extra -- case alternative to procedure WarningWithoutPosition define the new -- warning text and its explanation in file -- errorhandler-conversions-tostring-warningwithoutposition.adb. -- -- In either of the above two cases the format of the warning text to be -- added is a catenation of strings to an E_String.T for the standard -- warning text followed by a number of comment lines starting with --!. -- The text in the comments starting with --! constitute the error -- explanation which is given when the Examiner -error_explanation -- command line switch is set to first or every_occurrence. -- -- Two new files are automatically generated from the above two files -- during the build process to provide SPARK procedures which incorporate -- the error explanations see -- errorhandler-conversions-tostring-warningwithposition-warningwithpositionexpl.adb and -- errorhandler-conversions-tostring-warningwithoutposition-warningwithoutpositionexpl.adb. -- -- To add a new note -- -- (1) Choose a unique note number within the range defined by -- Error_Types.ErrNumRange - distinct from any number listed -- in the Examiner User Manual section 6.4 "Notes". -- -- (2) Add an extra case alternative to procedure Note in -- errorhandler-conversions-tostring-note.adb defining the text and -- explanation of the note as described for warnings (8). -- -- To add a new semantic error -- (excluding those related to inconsistencies between abstract and refined -- dependency relations) -- -- (1) Choose a unique semantic error number within the range defined by -- Error_Types.ErrNumRange - distinct from any number listed -- in the Examiner User Manual section 6.2 "Semantic Errors". -- -- (2) Add an extra case alternative to procedure SemanticErr in -- errorhandler-conversions-tostring-semanticerr.adb defining the text -- and explanation of the semantic error as described for warnings (8). -- -- (3) If the error message contains a reference to a document which is -- already in the list of document references maintained in -- errorhandler-conversions-tostring-appendreference.adb, then when a -- semantic error reporting subprogram is called the document reference -- number may be given. -- -- (4) If the message contains a reference to a document which is not in -- the list of documents in -- errorhandler-conversions-tostring-appendreference.adb then in the file -- increment the value of the constant MaxReferences and add a new -- document reference as the last element of the constant array -- ReferenceTable. -- -- To add a new error related to inconsistencies between abstract and -- refined dependency relations -- -- (1) Choose a unique semantic error number within the range defined by -- Error_Types.ErrNumRange - distinct from any number listed -- in the Examiner User Manual section 8.3.7 -- "Inconsistencies between abstract and refined dependency relations". -- -- (2) Add an extra case alternative to procedure DepSemanticErr in -- errorhandler-conversions-tostring-depsemanticerr.adb defining the text -- and explanation of the semantic error as described for warnings (8). -- -- To add a new control-flow error -- -- (1) Choose a unique control-flow error number within the range defined by -- Error_Types.ErrNumRange - distinct from any number listed -- in the Examiner User Manual section 7.2 "Control-flow Errors". -- -- (2) Add a new enumeration literal to the type ControlFlowErrType declared -- below to represent the new error. -- -- (3) Add an extra case alternative to procedure ControlFlowError in -- errorhandler-conversions-tostring-controlflowerror.adb defining the -- text and explanation of the control-flow error as described for -- warnings (8). Additionally associate the chosen error number -- with the text using a comment with the syntax --! ErrorNumber. -- -- To add a new data-flow error -- -- (1) Choose a unique data-flow error number within the range defined by -- Error_Types.ErrNumRange - distinct from any number listed -- in the Examiner User Manual section 8.3 -- "Data and information-flow Errors". -- -- (2) Add a new enumeration literal to the type DataFlowErrType declared -- below to represent the new error. -- -- (3) In errorhandler.adb, procedure DataFlowError, add the new data-flow -- enumeration to the appropriate alternative of the case statement -- dependent on whether the new error is a conditional or unconditional -- flow-error. -- -- (4) Add an extra case alternative to either procedure CondlFlowErr or -- procedure UncondFlowErr in -- errorhandler-conversions-tostring-condlflowerr.adb or -- errorhandler-conversions-tostring-uncondflowerr.adb respectively -- defining the text and explanation of the data-flow error as described -- for warnings (8). Additionally associate the chosen error number -- with the text using a comment with the syntax --! ErrorNumber. -- -- To add a new information-flow error -- -- (1) Choose a unique information-flow error number within the range defined -- by Error_Types.ErrNumRange - distinct from any number listed -- in the Examiner User Manual section 8.3 -- "Data and information-flow Errors". -- -- (2) Add a new enumeration literal to one of the types FullDependErrType, -- StabilityErrType, IndexType or UsageErrType dependent on the category -- of the error to be reported. If a new litereral is to be added to -- FullDependencyErrType it must appear immediately before NotUsedNew so -- that it is in the subtype DependencyErrType. -- -- (3) If the error is of the subtype DependencyErrType determine whether -- the error is a conditional or unconditional flow error and -- Add the new enumeration literal to the appropriate case alternative -- of the procedure DependencyError in errorhandler.adb. -- -- (4) Add an extra case alternative to the appropriate procedure -- CondlDependency, IneffectiveStatement, StabilityError, -- UncondDependency or UsageError in -- errorhandler-conversions-tostring-condldependency.adb -- errorhandler-conversions-tostring-ineffectivestatement.adb -- errorhandler-conversions-tostring-stabilityerror.adb -- errorhandler-conversions-tostring-unconddependency.adb -- errorhandler-conversions-tostring-usageerror.adb -- respectively defining the text and explanation of the information-flow -- error as described for warnings (8). -- Additionally associate the chosen error number with the text using a -- comment with the syntax --! ErrorNumber. -- -- Lexical and syntax errors are not extended in this package but are -- extended within the SPARKLex and SPParser subsystems. -- -- It is not expected that the Justification interface will be extended. -- -------------------------------------------------------------------------------- with Dictionary; with Error_IO; with Error_Types; with ExaminerConstants; with E_Strings; with LexTokenManager; with SPARK_IO; with SP_Expected_Symbols; with SP_Symbols; use type Dictionary.Scopes; use type Dictionary.Symbol; use type Error_Types.Error_Class; use type Error_Types.Names; use type Error_Types.NumericError; use type Error_Types.StringError; use type LexTokenManager.Line_Numbers; use type LexTokenManager.Str_Comp_Result; use type LexTokenManager.Token_Position; use type SPARK_IO.File_Status; use type SPARK_IO.File_Type; use type SP_Expected_Symbols.SP_Ess_Sym_Range; use type SP_Symbols.SP_Symbol; pragma Elaborate_All (SPARK_IO); --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# CommandLineData, --# CommandLineHandler, --# Dictionary, --# Error_IO, --# Error_Types, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# LexTokenManager, --# ScreenEcho, --# SPARK_IO, --# SPARK_XML, --# SP_Expected_Symbols, --# SP_Relations, --# SP_Symbols, --# SystemErrors, --# XMLReport; package ErrorHandler --# own Error_Context; --# initializes Error_Context; is -- Used to define the most serious sort of error encountered in an analysis -- Fatal is most serious and No_Error the least. type Error_Level is (Fatal, Semantic_Errs, Flow_Errs, Flow_Warning, No_Error); -- Enumeration literals representing the different sorts of control-flow -- error that may be reported. type Control_Flow_Err_Type is (Misplaced_Exit, Misplaced_Return, Missing_Return, Return_In_Proc); -- Enumeration literals representing the different sorts of data-flow -- error that may be reported. type Data_Flow_Err_Type is (Expn_Undefined, Expn_May_Be_Undefined, Invariant_Exp, Stmt_Undefined, Stmt_May_Be_Undefined); -- Enumeration literals representing the different sorts of information-flow -- errors related to dependencies that may be reported. -- Only the enumeration literals contained within the subtype -- Dependency_Err_Type may be used externally. type Full_Depend_Err_Type is ( Not_Used, May_Be_Used, Uninitialised, Ineff_Init, -- this is own var of embedded pack Ineff_Local_Init, Integrity_Violation, May_Be_Integrity_Violation, Policy_Violation, Not_Used_New, May_Be_Used_New, Not_Used_Continue, May_Be_Used_Continue); subtype Dependency_Err_Type is Full_Depend_Err_Type range Not_Used .. Policy_Violation; -- Enumeration literals representing the different sorts of information-flow -- errors related to stable conditional expressions that may be reported. type Stability_Err_Type is (Stable_Exit_Cond, Stable_Fork_Cond); -- Enumeration literals representing the different sorts of information-flow -- errors related to stable expressions in loops that may be reported. type Index_Type is (Index_Zero, Index_One, Larger_Index); -- Enumeration literals representing the different sorts of information-flow -- errors related miscellaneous errors that may be reported. type Usage_Err_Type is ( Unused_Import, Undefined_Export, Undefined_Var, Unreferenced_Var, Redefined_Import, Ineffective_Import, Referenced_But_Not_In_Partition, Updated_But_Not_In_Partition, Uninitialized_Protected_Element); type Err_Sym_Range is range 1 .. 5; --# assert Err_Sym_Range'Base is Short_Short_Integer; -- For GNAT -- An array of parser symbols used in reporting and recovering from -- syntax errors (see spparser.adb). type Err_Sym_List is array (Err_Sym_Range) of SP_Symbols.SP_Symbol; -- A list of enumeration literals representing each of the suppressible -- warnings (and a general literal for notes). -- The literals up to and including Main_Program_Precondition are considered -- to be severe warnings in that they detect uses that may affect the -- semantics of the program. -- When severe warnings are suppressed in the list of suppressed warnings -- issued by the Examiner they are suffixed with a '*' character. -- Non-severe warnings do not have a suffix character. type Warning_Elements is (Pragmas, Hidden_Parts, Handler_Parts, Representation_Clauses, Interrupt_Handlers, Static_Expressions, Style_Check_Casing, External_Variable_Assignment, Unchecked_Conversion, Imported_Objects, Real_RTCs, Default_Loop_Assertions, Expression_Reordering, Unexpected_Address_Clauses, Main_Program_Precondition, -- those below are not "severe" Proof_Function_Non_Boolean, Proof_Function_Implicit, Proof_Function_Refinement, Direct_Updates, With_Clauses, Unused_Variables, Unuseable_Private_Types, Constant_Variables, Type_Conversions, SLI_Generation, Index_Manager_Duplicates, Declare_Annotations, Others_Clauses, Ada2005_Reserved_Words, Obsolescent_Features, Notes); -- "Severe" warnings are for language elements that potentially change the -- semantics of a program. These are indicated with an additional note -- in the report file, even if they are suppressed. subtype Severe_Warnings is Warning_Elements range Pragmas .. Main_Program_Precondition; type Count is range 0 .. ExaminerConstants.MaxErrorsPerFile; --# assert Count'Base is Integer; -- for the Large Examiner -- An array of counters, one for eaach type of suppressible warning -- This forms part of the error context and is not expexted to be -- accessed externally. type Counters is array (Warning_Elements) of Count; ------- Types used for justification of expected errors and warnings -------- -- A justification may be applied to a warning or a flow-error. type Justification_Kinds is (Flow_Message, Warning_Message); -- When a message is marked as expected it is unknown whether it will -- eventually have to match it using a LexString or Symbol (because the error -- may be added using, say, Semantic_Warning or Semantic_Warning_Sym). -- Since both forms available in wf_justification_statement -- both are stored in the justification table and the right one is picked -- when comparisons are needed. -- Note that we only have both values for simple identifier like X, if the -- item being added is of the form P.Q.X then we can only have, and only -- need, the symbol because warnings for objects of this form are always -- added with a symbol. type Justification_Identifier is record String_Form : LexTokenManager.Lex_String; Symbol_Form : Dictionary.Symbol; end record; Null_Justification_Identifier : constant Justification_Identifier := Justification_Identifier'(String_Form => LexTokenManager.Null_String, Symbol_Form => Dictionary.NullSymbol); -- A message that we may want to suppress may have more than one idenfiier -- that needs to match (e.g. information flow dependency errors). -- There are not any errors that refer to more than two but, in any case -- the behaviour is made generic by using an array of identifiers. Max_Justification_Identifier_Length : constant := 2; subtype Justification_Identifier_Index is Integer range 1 .. Max_Justification_Identifier_Length; type Justification_Identifiers is array (Justification_Identifier_Index) of Justification_Identifier; Null_Justification_Identifiers : constant Justification_Identifiers := Justification_Identifiers'(others => Null_Justification_Identifier); ---------------- End of Justification type declarations --------------------- -- An Error_Context maintains all the warning and error information related -- to a single source or configuration file. type Error_Contexts is private; Null_Error_Context : constant Error_Contexts; -- This constant is used to indicate that the warning or error message -- does not refer to any document. -- At the moment references are represented as simple Natural numbers -- and the list of references is held as a array, Reference_Table declared in -- errorhandler-conversions-tostring-appendreference.adb. -- Currently there are 22 reference entries. It would be better if this -- constant and the reference table were made more controlled and abstract. No_Reference : constant Natural := 0; -- Total_Error_Counts used to create summary counts of errors for -- screen echo at end of Examination. -- Note the special handling of the count of suppressed warnings -- which does not form part of the grand total. This has been done for -- consistency with the way error counts in report files are generated. -- A total error count is maintained by ErrorHandler and it is not expected -- that this declaration will be used externally. type Counted_Error_Kinds is (Syntax_Or_Semantic, Flow, Warning); type Explicit_Error_Counts is array (Counted_Error_Kinds) of Count; type Total_Error_Counts is record Grand_Total : Count; Explicit_Error_Count : Explicit_Error_Counts; Suppressed_Warnings : Count; Justified_Warnings : Count; end record; Null_Total_Error_Counts : constant Total_Error_Counts := Total_Error_Counts' (Grand_Total => Count'First, Explicit_Error_Count => Explicit_Error_Counts'(Counted_Error_Kinds => Count'First), Suppressed_Warnings => Count'First, Justified_Warnings => Count'First); -- Exit codes for the Examiner -- 0 = Success with no unjustified errors or warnings -- 1 = Unjustified Warnings -- 2 = Unjustified Flow Errors -- 3 = Syntax/Semantic Errors -- 4-7 = Reserved -- 8 = Invocation Error e.g. contradictory command-line switches -- 9 = Internal Error e.g. table overflow or internal exception subtype Exit_Code is Integer range 0 .. 9; -- Error numbers used when reporting success in automatic flow analysis mode No_Error_Default : constant Error_Types.ErrNumRange := 0; No_Error_Info_Flow : constant Error_Types.ErrNumRange := 1; No_Error_Data_Flow : constant Error_Types.ErrNumRange := 2; -- Flushes any pending message text in the Echo_Accumulator, which is part of -- the Error_Context maintained by the Error_Handler, to the standard output. -- It should be called on the completion of analysis of each subprogram -- implementation. procedure Flush_Echo_Messages; --# global in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from *, --# Error_Context; -- This subporgram initializes the current Error_Context and is called when -- a new source file or configuration file is opened for reading. procedure Error_Init (Source_File_Name : in E_Strings.T; Echo : in Boolean); --# global in Dictionary.Dict; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context from *, --# Dictionary.Dict, --# Echo, --# Source_File_Name, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Error_Context, --# Source_File_Name; -- This subprogram is similar to Error_Init but is for use by SPARKmake -- rather than the Examiner. procedure Spark_Make_Init; --# global in Dictionary.Dict; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context from *, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Error_Context; -- Obtains the current Error_Context from the ErrorHandler. procedure Get_Error_Context (Context : out Error_Contexts); --# global in Dictionary.Dict; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Context, --# Error_Context from Error_Context, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Error_Context; -- Sets the current Error_Context of the ErrorHandler to the one supplied -- by the parameter Context. procedure Set_Error_Context (Context : in Error_Contexts); --# global in Dictionary.Dict; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from Context, --# Dictionary.Dict, --# Error_Context, --# SPARK_IO.File_Sys; -- Called prior to analysis of a body or package initialization -- to reset the severity property of the Error_Context and as -- this property only applies to a single body or package initialization. -- Similarly, the invalid value detection only applies to a single body -- and so this is also reset. procedure Error_Reset; --# global in out Error_Context; --# derives Error_Context from *; -- Returns the severity property from the current Error_Context. procedure Get_Error_Severity (Severity : out Error_Level); --# global in Error_Context; --# derives Severity from Error_Context; -- This function returns TRUE if a syntax error or a semantic -- error has already been met. function Syntax_Or_Semantic_Error return Boolean; --# global in Error_Context; -- True when the analysed body potentially contains invalid values -- from reading an external variable or using an unchecked conversion. function Possibly_Invalid_Values return Boolean; --# global in Error_Context; -- The function returns TRUE if we are still generating SLI. function Generate_SLI return Boolean; --# global in CommandLineData.Content; --# in Error_Context; -- Called to report successful analysis of a unit. A successful analysis -- may be determined by calling the procedure Get_Error_Severity. procedure Report_Success (Position : in LexTokenManager.Token_Position; Subprog_Str : in LexTokenManager.Lex_String; Err_Num : in Error_Types.ErrNumRange); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Subprog_Str; -- Reports a warning that a hidden body has been encountered. procedure Hidden_Text (Position : in LexTokenManager.Token_Position; Unit_Str : in LexTokenManager.Lex_String; Unit_Typ : in SP_Symbols.SP_Symbol); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Unit_Str, --# Unit_Typ; -- Reports a warning that a hidden exception handler has been encountered. procedure Hidden_Handler (Position : in LexTokenManager.Token_Position; Unit_Str : in LexTokenManager.Lex_String; Unit_Typ : in SP_Symbols.SP_Symbol); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Unit_Str, --# Unit_Typ; -- Reports a warning that an unrecognised representation clause has been -- encountered. procedure Representation_Clause (Position : in LexTokenManager.Token_Position); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys; -- Reports a warning that an unrecognised pragma has been encountered. procedure A_Pragma (Pragma_Name : in LexTokenManager.Lex_String; Position : in LexTokenManager.Token_Position); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# Position, --# Pragma_Name, --# SPARK_IO.File_Sys; -- Warns that a cut point has been introduced into a loop by inserting a -- default assertion. procedure Add_Cut_Point (At_Line : LexTokenManager.Line_Numbers); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from At_Line, --# CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; -- This subprogram is called to report a semantic warning involving a single -- named entity represented by a LexString. -- See the package header documentation for a description of warnings and -- their definition. procedure Semantic_Warning (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys; -- This subprogram is called to report that we stop the SLI -- generation. procedure SLI_Generation_Warning (Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Id_Str, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys; -- This subprogram is called to report a semantic warning involving a single -- named entity represented by a Dictionary.Symbol. -- See the package header documentation for a description of warnings and -- their definition. procedure Semantic_Warning_Sym (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Sym; -- This subprogram is called to report a note involving a single -- Reports a lexical error detected by SPARKlex. procedure Lex_Error (Error_Message, Recovery_Message : in String; Error_Item : in LexTokenManager.Lex_Value); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Error_Item, --# Error_Message, --# LexTokenManager.State, --# Recovery_Message, --# SPARK_IO.File_Sys & --# LexTokenManager.State from *, --# Error_Item, --# Error_Message, --# Recovery_Message; -- Reports an error detected by SPParser. procedure Syntax_Error (Error_Item : in LexTokenManager.Lex_Value; Current_Sym, Entry_Symbol : in SP_Symbols.SP_Symbol; No_Of_Terminals, No_Of_Non_Terminals : in SP_Expected_Symbols.SP_Ess_Sym_Range; Terminal_List, Non_Terminal_List : in SP_Expected_Symbols.SP_Exp_Sym_List); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Sym, --# Dictionary.Dict, --# Entry_Symbol, --# Error_Context, --# Error_Item, --# LexTokenManager.State, --# Non_Terminal_List, --# No_Of_Non_Terminals, --# No_Of_Terminals, --# SPARK_IO.File_Sys, --# Terminal_List & --# LexTokenManager.State from *, --# Current_Sym, --# Entry_Symbol, --# Error_Item, --# Non_Terminal_List, --# No_Of_Non_Terminals, --# No_Of_Terminals, --# Terminal_List; -- Reports on a syntax error recovery action taken by SPParser. procedure Syntax_Recovery (Recovery_Posn : in LexTokenManager.Lex_Value; Replacement_Sym : in SP_Symbols.SP_Symbol; Next_Sym : in SP_Symbols.SP_Terminal; No_Of_Syms : in Natural; Sym_List : in Err_Sym_List); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# Next_Sym, --# No_Of_Syms, --# Recovery_Posn, --# Replacement_Sym, --# SPARK_IO.File_Sys, --# Sym_List & --# LexTokenManager.State from *, --# Error_Context, --# Next_Sym, --# No_Of_Syms, --# Recovery_Posn, --# Replacement_Sym, --# Sym_List; -- named entity represented by a LexString. -- See the package header documentation for a description of notes and -- their definition. procedure Semantic_Note (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys; -- This subprogram is called to report inconsistencies between abstract and -- refined dependency relations involving two named entities represented by -- LexStrings. -- See the package header documentation for a description of these errors and -- their definition. procedure Dep_Semantic_Error (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Id_Str1 : in LexTokenManager.Lex_String; Id_Str2 : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# Id_Str1, --# Id_Str2, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys; -- This subprogram is called to report inconsistencies between abstract and -- refined dependency relations involving two named entities represented by -- Dictionary.Symbols. -- See the package header documentation for a description of these errors and -- their definition. procedure Dep_Semantic_Error_Sym (Err_Num : in Natural; Position : in LexTokenManager.Token_Position; Sym1 : in Dictionary.Symbol; Sym2 : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Sym1, --# Sym2; -- This subprogram is called to report a semantic error involving a single -- named entity represented by a LexString. -- The parameter "Reference" facilitates referencing a document within -- the warning. A list of document references is maintained in -- errorhandler-conversions-tostring-appendreference.adb. If a document is -- not referenced then the constant ErrorHandler.No_Reference should be used -- to signify this. -- See the package header documentation for a description of semantic errors -- and their definition. procedure Semantic_Error (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Reference, --# SPARK_IO.File_Sys; -- This subprogram is called to report a semantic error involving two -- named entitities represented by LexStrings. -- The parameter "Reference" facilitates referencing a document within -- the warning. A list of document references is maintained in -- errorhandler-conversions-tostring-appendreference.adb. If a document is -- not referenced then the constant ErrorHandler.No_Reference should be used -- to signify this. -- See the package header documentation for a description of semantic errors -- and their definition. procedure Semantic_Error2 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Id_Str1 : in LexTokenManager.Lex_String; Id_Str2 : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# Id_Str1, --# Id_Str2, --# LexTokenManager.State, --# Position, --# Reference, --# SPARK_IO.File_Sys; -- This subprogram is called to report a semantic error involving a single -- named entity represented by a Dictionary.Symbol. -- The parameter "Reference" facilitates referencing a document within -- the warning. A list of document references is maintained in -- errorhandler-conversions-tostring-appendreference.adb. If a document is -- not referenced then the constant ErrorHandler.No_Reference should be used -- to signify this. -- See the package header documentation for a description of semantic errors -- and their definition. procedure Semantic_Error_Sym (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Sym; -- This subprogram is called to report a semantic error involving two -- named entitities represented by Dictionary.Symbols. -- The parameter "Reference" facilitates referencing a document within -- the warning. A list of document references is maintained in -- errorhandler-conversions-tostring-appendreference.adb. If a document is -- not referenced then the constant ErrorHandler.No_Reference should be used -- to signify this. -- See the package header documentation for a description of semantic errors -- and their definition. procedure Semantic_Error_Sym2 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Sym2 : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# Sym2; -- This subprogram is called to report a semantic error involving two -- named entitities represented one by a LexString and one by a -- Dictionary.Symbol. -- The parameter "Reference" facilitates referencing a document within -- the warning. A list of document references is maintained in -- errorhandler-conversions-tostring-appendreference.adb. If a document is -- not referenced then the constant ErrorHandler.No_Reference should be used -- to signify this. -- See the package header documentation for a description of semantic errors -- and their definition. procedure Semantic_Error_Lex1_Sym1 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Sym; -- This subprogram is called to report a semantic error involving three -- named entitities represented one by a LexString and two by -- Dictionary.Symbols. -- The parameter "Reference" facilitates referencing a document within -- the warning. A list of document references is maintained in -- errorhandler-conversions-tostring-appendreference.adb. If a document is -- not referenced then the constant ErrorHandler.No_Reference should be used -- to signify this. -- See the package header documentation for a description of semantic errors -- and their definition. procedure Semantic_Error_Lex1_Sym2 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Id_Str : in LexTokenManager.Lex_String; Sym : in Dictionary.Symbol; Sym2 : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# Id_Str, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# Sym2; -- This subprogram is called to report a semantic error involving three -- named entitities represented by Dictionary.Symbols. -- The parameter "Reference" facilitates referencing a document within -- the warning. A list of document references is maintained in -- errorhandler-conversions-tostring-appendreference.adb. If a document is -- not referenced then the constant ErrorHandler.No_Reference should be used -- to signify this. -- See the package header documentation for a description of semantic errors -- and their definition. procedure Semantic_Error_Sym3 (Err_Num : in Natural; Reference : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; Sym2 : in Dictionary.Symbol; Sym3 : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Num, --# LexTokenManager.State, --# Position, --# Reference, --# Scope, --# SPARK_IO.File_Sys, --# Sym, --# Sym2, --# Sym3; -- This subprogram is called to report a control-flow error. -- See the package header documentation for a description of control-flow -- errors and their definition. procedure Control_Flow_Error (Err_Type : in Control_Flow_Err_Type; Position : in LexTokenManager.Token_Position); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Type, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys; -- This subprogram is called to report a data-flow error involving a single -- named entity represented by a Dictionary.Symbol. -- See the package header documentation for a description of data-flow errors -- and their definition. procedure Data_Flow_Error (Err_Type : in Data_Flow_Err_Type; Position : in LexTokenManager.Token_Position; Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Type, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Var_Sym; -- This subprogram is called to report an ineffective statement involving a -- single named entity represented by a Dictionary.Symbol. procedure Ineffective_Stmt (Position : in LexTokenManager.Token_Position; Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Var_Sym; -- This subprogram is called to report a stable conditional or a stable -- expression in a loop. -- See the package header documentation for a description of information-flow -- errors and their definition. procedure Stability_Error (Err_Type : in Stability_Err_Type; Position : in LexTokenManager.Token_Position; Stability_Index : in Index_Type); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Type, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Stability_Index; -- This subprogram is called to report a dependency error involving two -- named entitities, the imported and exported variables, represented by -- Dictionary.Symbols. -- See the package header documentation for a description of information-flow -- errors and their definition. procedure Dependency_Error (Err_Type : in Dependency_Err_Type; Position : in LexTokenManager.Token_Position; Import_Var_Sym : in Dictionary.Symbol; Export_Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Type, --# Export_Var_Sym, --# Import_Var_Sym, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys; -- This subprogram is called to report a usage error involving a single -- named entitity represented by a Dictionary.Symbol. -- See the package header documentation for a description of information-flow -- errors and their definition. procedure Usage_Error (Err_Type : in Usage_Err_Type; Position : in LexTokenManager.Token_Position; Var_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# Err_Type, --# LexTokenManager.State, --# Position, --# Scope, --# SPARK_IO.File_Sys, --# Var_Sym; -- Read_Warning_File reads in and parses the warning control file specified on -- the command line. It sets up an internal store (part of the state -- represented by the own variable Error_Context) which records which warnings -- have been suppressed by the inclusion of their keyword in the warning file. -- See the package header documentation for a description of the use of the -- warning file and the definition of suppressible warnings. procedure Read_Warning_File; --# global in CommandLineData.Content; --# in out Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; -- Output_Warning_List writes out a list of the warnings to the file parameter -- "To_File" that are currently suppressed based on the internal store of -- warnings suppressed by reading the warning control file (part of the state -- represented by the own variable Error_Context). -- See the package header documentation for a description of the use of the -- warning file and the definition of suppressible warnings. procedure Output_Warning_List (To_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in Error_Context; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Error_Context, --# LexTokenManager.State, --# To_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# Error_Context; -- Output_Reference_List writes out a list of the document references that are -- associated with reported semantic erros to the file parameter "To_File" -- See the package header documentation for a description of semantic errors -- and document references. procedure Output_Reference_List (To_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in Error_Context; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Error_Context, --# To_File; -- This subprogram writes a source listing to the file parameter "Listing" -- in which the accumulated reported warnings and errors are interspersed -- with program text to indicate the location of the warning or error. -- A table showing the successful justifications encountered in the source -- file and a summary of the suppressed warnings are appended to the end of -- the listing file. -- The parameter "Purpose" identifies the type of listing that is being -- generated and is used in conjunction with the -error_explanations command -- line switch to control the generation of extended messages. -- For this subprogram, given its use, it is likely that Purpose -- will either be For_Listing - indicating this is a sorce listing or -- For_Report_Indexed_Files used when listing the configuration file. procedure PrintErrors (Listing : in SPARK_IO.File_Type; Purpose : in Error_Types.ConversionRequestSource); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# Listing, --# Purpose, --# SPARK_IO.File_Sys; -- Append_Errors extends the report file passed as the parameter "Report" -- adding the accumulated errors from the current Error_Context. -- A table showing the successful justifications encountered within the -- current Error_Context and a summary of the suppressed warnings are -- added after the reported warnings and errors. -- The parameter "Purpose" identifies the type of listing that is being -- generated and is used in conjunction with the -error_explanations command -- line switch to control the generation of extended messages. -- For this subprogram, given its use, it is likely that Purpose -- will either be For_Report_Selected_Files - indicating this is a file that -- is explicitly given on the command line or in a meta file, or -- For_Report_Indexed_Files - indicating that this is a file which was accessed -- indirectly via an index file.. procedure AppendErrors (Report : in SPARK_IO.File_Type; Purpose : in Error_Types.ConversionRequestSource); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives Error_Context, --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# Purpose, --# Report, --# SPARK_IO.File_Sys, --# XMLReport.State; -- This procedure write at the standard output an error in an -- index file. procedure Index_Manager_Error (S : in String; Source_File : in LexTokenManager.Lex_String; Line_No : in Positive; Col_No : in Positive; Token_String : in E_Strings.T; Is_Token_Filename : in Boolean; Is_Fatal : in Boolean); --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context from *, --# Is_Fatal & --# SPARK_IO.File_Sys from *, --# Col_No, --# CommandLineData.Content, --# Error_Context, --# Is_Fatal, --# Is_Token_Filename, --# LexTokenManager.State, --# Line_No, --# S, --# Source_File, --# Token_String; -- When called writes the total warnings and errors encountered during the -- Examiner run to the standard output. procedure Echo_Total_Error_Count; --# global in CommandLineData.Content; --# in Error_Context; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Error_Context; -- Call this subprogram if it is not possible to open a required input file -- during analysis. It will record the fact that this has happened and -- it will be reflected in the exit code returned by a call to Get_Errors_Type. procedure Set_File_Open_Error; --# global in out Error_Context; --# derives Error_Context from *; -- At the end of an Examiner run call this subprogram to obtain an exit code -- reflecting the most severe type of error encountered during the run. -- The exit code so obtained can then be used in a call to Set_Exit_Status. function Get_Errors_Type return Exit_Code; --# global in Error_Context; ------ Exported calls concerned with the error justification mechanism ------ -- Called at the start of analysis of each unit to initialize the -- justification state for the unit. -- The justification state is part of the state represented by the own -- variable Error_Context. procedure Start_Unit; --# global in out Error_Context; --# derives Error_Context from *; -- Called at the end of analysis of each unit to close the -- justification state for the unit. -- The justification state is part of the state represented by the own -- variable Error_Context. procedure End_Unit; --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Error_Context; --# in out SPARK_IO.File_Sys; --# derives Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; -- Called when an accept annotation is encountered in the source text to -- register the justification. -- If the maximum allowed justifications within a unit has been reached when -- registering the justification the parameter Maximum_Justifications_Reached -- is True but justification is registered. -- The Maximum_Justifications_Reached is only ever set True once, when the -- table first fills up. If the table is already full then False is returned. -- at the point of call where the table first fills, not at every call thereafter. procedure Start_Justification (Position : in LexTokenManager.Token_Position; Line : in LexTokenManager.Line_Numbers; Kind : in Justification_Kinds; Err_Num : in Natural; Identifiers : in Justification_Identifiers; Applies_To_All : in Boolean; Explanation : in E_Strings.T; Maximum_Justifications_Reached : out Boolean); --# global in out Error_Context; --# derives Error_Context from *, --# Applies_To_All, --# Err_Num, --# Explanation, --# Identifiers, --# Kind, --# Line, --# Position & --# Maximum_Justifications_Reached from Error_Context; -- Called when an end accept annotation is encountered in the source text to -- end the scope of a justification. -- The parameter Unmatched_End returns True if there is no accept annotation -- matching the end accept annotation provided the Justification table -- is not full in which case it will always return False to avoid false -- alarms. procedure End_Justification (Line : in LexTokenManager.Line_Numbers; Unmatched_End : out Boolean); --# global in out Error_Context; --# derives Error_Context, --# Unmatched_End from Error_Context, --# Line; private -------------------------------------------------------------- -- The following declarations define a Data Table which is a core component -- of the Justification structure. -- A Data_Table_Table_Entry defines a single element of the table and contains -- all the information associated with a single justification. -- Data_Tables defines the array of Data_Table_Entries which holds all the -- justifications for a unit. Max_Table_Entries : constant := ExaminerConstants.MaxJustificationsPerFile; type Data_Table_Ptr is range 0 .. Max_Table_Entries; type Data_Table_Entry is record Kind : Justification_Kinds; Err_Num : Natural; Identifiers : Justification_Identifiers; Applies_To_All : Boolean; Explanation : E_Strings.T; -- location of the justification clause for error reporting purposes Position : LexTokenManager.Token_Position; -- location of start justify (for multiple clauses this is the line of the justify statement as a whole) Start_Line : LexTokenManager.Line_Numbers; -- location of end justify or end of unit (const End_Line_Of_Unit_Marker) if there is no matching end justify End_Line : LexTokenManager.Line_Numbers; End_Found : Boolean; -- explicit end justify found or not Match_Count : Natural; -- number of times this justification matched last line where match occurred linkage Match_Line : LexTokenManager.Line_Numbers; Previous : Data_Table_Ptr; end record; -- Initializing constants for Data Tables End_Of_List : constant Data_Table_Ptr := 0; Empty_Data_Table_Entry : constant Data_Table_Entry := Data_Table_Entry' (Kind => Flow_Message, Err_Num => 0, Identifiers => Null_Justification_Identifiers, Applies_To_All => False, Explanation => E_Strings.Empty_String, Position => LexTokenManager.Token_Position'(Start_Line_No => 0, Start_Pos => 0), Start_Line => 0, End_Line => 0, End_Found => False, Match_Count => 0, Match_Line => 0, Previous => End_Of_List); -- Data_Tables defines the array of Data_Table_Entries representing all the -- justifications for a unit. subtype Data_Table_Index is Data_Table_Ptr range 1 .. Max_Table_Entries; type Data_Tables is array (Data_Table_Index) of Data_Table_Entry; -- Initializing constant Empty_Data_Table : constant Data_Tables := Data_Tables'(Data_Table_Index => Empty_Data_Table_Entry); -- The Stack is the second major component of the justification structure -- A Stack_Record is a single element of the stack, Unit_Stack_Array is the -- stack storage and Unit_Stacks is the complete stack including a stack -- pointer. Max_Stack_Size : constant := 10; -- Perhaps move this to ExaminerConstants later type Stack_Ptrs is range 0 .. Max_Stack_Size; subtype Stack_Index is Stack_Ptrs range 1 .. Max_Stack_Size; type Stack_Record is record List_Items : Data_Table_Ptr; Semantic_Error_In_Unit : Boolean; end record; type Unit_Stack_Array is array (Stack_Index) of Stack_Record; type Unit_Stacks is record Vector : Unit_Stack_Array; Ptr : Stack_Ptrs; end record; -- initializing constant Empty_Stack : constant Unit_Stacks := Unit_Stacks' (Ptr => 0, Vector => Unit_Stack_Array'(Stack_Index => Stack_Record'(List_Items => End_Of_List, Semantic_Error_In_Unit => False))); -- The justification structure including a Data_Table, a Unit_Stack, a pointer -- for the current entry in the Data_Table and a Boolean indicating whether -- the justification table is accepting more entries (is not full). type Justifications_Data_Tables is record Data_Table : Data_Tables; Current_Slot : Data_Table_Ptr; Accepting_More_Entries : Boolean; Unit_Stack : Unit_Stacks; end record; -- initializing constant Empty_Justifications_Data_Table : constant Justifications_Data_Tables := Justifications_Data_Tables' (Data_Table => Empty_Data_Table, Current_Slot => End_Of_List, Accepting_More_Entries => True, Unit_Stack => Empty_Stack); --------- End of justification table data structure definitions ------------- -- We can now declare the actual announced private type -- The private type Error_Contexts contains a Justification_Table which is -- itself a complex data structure used to track the justification of -- expected flow errors and warnings. -- It is part of Error_Contexts because we need a different copy of the data -- structure for each file we are examining. -- The data structure types can be declared here because they are only used -- in the body of ErrorHandler and in the embedded package Justifications. -- -- More details of the rationale for the Justifications Table data structure -- can be found in ErrorHandler.Justifications.adb -- type Error_Contexts is record Recovery_Messages : Boolean; Echo : Boolean; Severity : Error_Level; Num_Errs : Count; Num_Message : Count; Line_No : LexTokenManager.Line_Numbers; Current_Line : E_Strings.T; Errs : Error_IO.File_Type; Source : SPARK_IO.File_Type; Source_File_Name : E_Strings.T; Counter : Counters; Justifications_Data_Table : Justifications_Data_Tables; end record; Null_Error_Context : constant Error_Contexts := Error_Contexts' (Recovery_Messages => False, Echo => False, Severity => Error_Level'First, Num_Errs => Count'First, Num_Message => Count'First, Line_No => LexTokenManager.Line_Numbers'First, Current_Line => E_Strings.Empty_String, Errs => Error_IO.Null_File, Source => SPARK_IO.Null_File, Source_File_Name => E_Strings.Empty_String, Counter => Counters'(others => Count'First), Justifications_Data_Table => Empty_Justifications_Data_Table); end ErrorHandler; spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_condition.adb0000644000175000017500000000476211753202336025076 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Wf_Condition (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) is Condition_Result : Exp_Record; Ref_Var : SeqAlgebra.Seq; Child : STree.SyntaxNode; begin SeqAlgebra.CreateSeq (TheHeap, Ref_Var); Child := Child_Node (Current_Node => Node); -- ASSUME Child = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = expression in Wf_Condition"); Walk_Expression_P.Walk_Expression (Exp_Node => Child, Scope => Scope, Type_Context => Dictionary.GetPredefinedBooleanType, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => Condition_Result, Component_Data => Component_Data, The_Heap => TheHeap); if not Dictionary.TypeIsBoolean (Condition_Result.Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 94, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; -- add reference variable list to RefList hash table RefList.AddRelation (Table, TheHeap, Node, Dictionary.NullSymbol, Ref_Var); end Wf_Condition; spark-2012.0.deb/examiner/sp_parser_goto-sp_goto.adb0000644000175000017500000000365011753202336021423 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SP_Parser_Goto) function SP_Goto (CST : SP_Productions.Valid_States; CSY : SP_Symbols.SP_Non_Terminal) return SP_Productions.SP_State is Index_Pair : Packed_GOT_Index_Pair; Index, Max_Index : GOT_Index; Feasible_State, Return_State : SP_Productions.SP_State; begin Index_Pair := Non_Term_Table (CSY); Index := GOT_Index (Index_Pair mod GOT_Index_Size); Max_Index := GOT_Index (Index_Pair / GOT_Index_Size); Feasible_State := SP_Productions.SP_State (Goto_Table (Index) mod State_Size); while Feasible_State /= CST and then Index < Max_Index loop Index := Index + 1; Feasible_State := SP_Productions.SP_State (Goto_Table (Index) mod State_Size); end loop; if Feasible_State /= CST then Return_State := 0; else Return_State := SP_Productions.SP_State ((Goto_Table (Index) / State_Size) mod State_Size); end if; return Return_State; end SP_Goto; spark-2012.0.deb/examiner/dictionary-add_renaming_declaration.adb0000644000175000017500000001521611753202336024046 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Add_Renaming_Declaration (The_Subprogram : in RawDict.Subprogram_Info_Ref; The_Operator : in RawDict.Operator_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Scope : in Scopes) is Region : Symbol; The_Declaration : RawDict.Declaration_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Renaming_Declaration_To_Subprogram (The_Declaration : in RawDict.Declaration_Info_Ref; The_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Subprogram; is begin RawDict.Set_Next_Declaration (The_Declaration => The_Declaration, Next => RawDict.Get_Subprogram_Renaming_Declarations (The_Subprogram => The_Subprogram)); RawDict.Set_Subprogram_Renaming_Declarations (The_Subprogram => The_Subprogram, The_Declaration => The_Declaration); end Add_Renaming_Declaration_To_Subprogram; -------------------------------------------------------------------------------- procedure Add_Renaming_Declaration_To_Package (The_Declaration : in RawDict.Declaration_Info_Ref; Scope : in Scopes) --# global in out Dict; --# derives Dict from *, --# Scope, --# The_Declaration; is The_Package : RawDict.Package_Info_Ref; -------------------------------------------------------------------------------- procedure Add_Renaming_Declaration_To_Package_Specification (The_Declaration : in RawDict.Declaration_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Package; is begin RawDict.Set_Next_Declaration (The_Declaration => The_Declaration, Next => RawDict.Get_Package_Visible_Renaming_Declarations (The_Package => The_Package)); RawDict.Set_Package_Visible_Renaming_Declarations (The_Package => The_Package, The_Declaration => The_Declaration); end Add_Renaming_Declaration_To_Package_Specification; -------------------------------------------------------------------------------- procedure Add_Renaming_Declaration_To_Package_Body (The_Declaration : in RawDict.Declaration_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Declaration, --# The_Package; is begin RawDict.Set_Next_Declaration (The_Declaration => The_Declaration, Next => RawDict.Get_Package_Local_Renaming_Declarations (The_Package => The_Package)); RawDict.Set_Package_Local_Renaming_Declarations (The_Package => The_Package, The_Declaration => The_Declaration); end Add_Renaming_Declaration_To_Package_Body; begin -- Add_Renaming_Declaration_To_Package The_Package := RawDict.Get_Package_Info_Ref (GetRegion (Scope)); case Get_Visibility (Scope => Scope) is when Visible => Add_Renaming_Declaration_To_Package_Specification (The_Declaration => The_Declaration, The_Package => The_Package); when Local => Add_Renaming_Declaration_To_Package_Body (The_Declaration => The_Declaration, The_Package => The_Package); when Privat => null; end case; end Add_Renaming_Declaration_To_Package; begin -- Add_Renaming_Declaration RawDict.Create_Declaration (Context => ProgramContext, Scope => Scope, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Declaration => The_Declaration); if The_Subprogram /= RawDict.Null_Subprogram_Info_Ref and then The_Operator = RawDict.Null_Operator_Info_Ref then RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Subprogram_Symbol (The_Subprogram)); elsif The_Subprogram = RawDict.Null_Subprogram_Info_Ref and then The_Operator /= RawDict.Null_Operator_Info_Ref then RawDict.Set_Declaration_Item (The_Declaration => The_Declaration, Item => RawDict.Get_Operator_Symbol (The_Operator)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Renaming_Declaration"); end if; Region := GetRegion (Scope); case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => Add_Renaming_Declaration_To_Package (The_Declaration => The_Declaration, Scope => Scope); when Subprogram_Symbol => Add_Renaming_Declaration_To_Subprogram (The_Declaration => The_Declaration, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Add_Renaming_Declaration"); end case; end Add_Renaming_Declaration; spark-2012.0.deb/examiner/sp_parser_actions-scan_action_table.adb0000644000175000017500000000547311753202336024076 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SP_Parser_Actions) procedure Scan_Action_Table (State_No : in SP_Productions.Valid_States; Index : in out Action_Index; Parse_Act : out SP_Parse_Act; Action_Symbol : out SP_Symbols.SP_Terminal) is Index_Pair : Packed_PAT_Index_Pair; Table_Index, Max_Index : PAT_Index; Packed_Result : Packed_Sym_Action_Pair; begin Index_Pair := State_Table (State_No); Table_Index := PAT_Index (Index_Pair mod PAT_Index_Size); Max_Index := PAT_Index (Index_Pair / PAT_Index_Size); if Action_Index (Max_Index - Table_Index) < Index then Action_Symbol := SP_Symbols.SPEND; Parse_Act := Error_Action; else Table_Index := PAT_Index (Natural (Table_Index) + Natural (Index)); Index := Index + 1; Action_Symbol := SP_Symbols.SP_Terminal'Val ((Parse_Action_Table (Table_Index) / Term_Sym) mod Term_Sym_Lim); Packed_Result := Parse_Action_Table (Table_Index); case SP_Action_Kind'Val ((Packed_Result / Act) mod Act_Lim) is when Shift => Parse_Act := SP_Parse_Act' (Act => Shift, State => SP_Productions.SP_State ((Packed_Result / State) mod State_Lim), Symbol => No_Sym, Red_By => No_Red, Prod_No => No_Prod); when Reduce => Parse_Act := SP_Parse_Act' (Reduce, SP_Productions.No_State, SP_Symbols.SP_Symbol'Val ((Packed_Result / Symbol) mod Symbol_Lim + First_Non_Terminal), SP_Productions.SP_Right ((Packed_Result / Red_By) mod Red_By_Lim), 0); when Accpt => Parse_Act := Accept_Action; when Error => -- can never have this option Parse_Act := Error_Action; end case; end if; end Scan_Action_Table; spark-2012.0.deb/examiner/metafile.ads0000644000175000017500000001030011753202336016522 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --Provides routines to support analysis of "files of files" -------------------------------------------------------------------------------- with E_Strings; with SPARK_IO; use type SPARK_IO.File_Status; --# inherit Ada.Characters.Latin_1, --# CommandLineData, --# CommandLineHandler, --# ErrorHandler, --# E_Strings, --# FileSystem, --# ScreenEcho, --# SPARK_IO, --# SystemErrors, --# XMLReport; package MetaFile is type Meta_Files is private; --Meta File Operations procedure Create (File_Name : in E_Strings.T; The_Meta_File : out Meta_Files); --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# File_Name & --# The_Meta_File from File_Name, --# SPARK_IO.File_Sys; procedure Next_Name (The_Meta_File : in out Meta_Files; The_Filename : out E_Strings.T; Do_Listing : out Boolean; The_Listing_Name : out E_Strings.T; Do_VCG : out Boolean; File_Found : out Boolean); --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Do_Listing, --# Do_VCG, --# File_Found, --# SPARK_IO.File_Sys, --# The_Filename, --# The_Listing_Name, --# The_Meta_File from CommandLineData.Content, --# SPARK_IO.File_Sys, --# The_Meta_File & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# SPARK_IO.File_Sys, --# The_Meta_File; procedure Report_File_Content (To_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Meta_File_Used : in out Boolean); --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives Meta_File_Used from *, --# Filename, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# Filename, --# Meta_File_Used, --# SPARK_IO.File_Sys, --# To_File, --# XMLReport.State; private Stack_Size : constant := 100; type Ptrs is range 0 .. Stack_Size; subtype Indexes is Ptrs range 1 .. Stack_Size; type Meta_File_Element is record File_Handle : SPARK_IO.File_Type; Path_Name : E_Strings.T; end record; type Vectors is array (Indexes) of Meta_File_Element; type Meta_Files is record Vector : Vectors; Ptr : Ptrs; end record; end MetaFile; spark-2012.0.deb/examiner/filesystem.apb0000644000175000017500000006417711753202337017141 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --------------------------------------------------------------------------------- -- This package body provides the filesystem interface which allows the -- Examiner to determine the current drive, the full pathname of a given file, -- and various properties of files. It also reads the command line and -- provides the facility to create a directory. --------------------------------------------------------------------------------- with Ada.Characters.Latin_1; with Ada.Command_Line; with SPARK.Ada.Strings.Unbounded.Not_SPARK; with E_Strings.Not_SPARK; with GNAT.Directory_Operations; with GNAT.IO_Aux; with GNAT.OS_Lib; #if Target = "Intel_WinNT" then with Ada.Characters.Handling; with Interfaces.C_Streams; #end if; use type GNAT.OS_Lib.String_Access; package body FileSystem is -- NOTE - this package body is Ada95, not SPARK --========================================================================= -- Filesystem package body -- -- This body is split into section according to the various combinations -- of Host Compiler and Target Platform. Effort is made to make as much -- code common to as many platforms as possible. For example, many -- procedures are common to all platforms where the host compiler is -- GNAT, regardless of whether the target is Windows, Solaris, Linux, -- OS X or whatever. -- -- Contents -- 1 Linker Options (GNAT/Intel Windows only) -- 2 Command-Line Arguments and Identification (All platforms/hosts) -- 3 Casing of filenames (All platforms/hosts) -- 4 Exported Operations - All hosts and targets -- 5 Exported Operations - All GNAT hosts --========================================================================= -------------------------------------------------------------- -- Section 1 -- -- Linker Options (CFR 786) -- On NT we currently require the system stack reserve and -- commit setting to be increased. -------------------------------------------------------------- -- This section is no longer needed. Stack size setting in the linker -- is still required on Windows, but this is done in the project file and/or -- makefile ----------------------------------------------------------------- -- Section 2 -- -- Functions relating to reading command-line arguments ----------------------------------------------------------------- function Argument_Separator return Character is begin #if Target = "Intel_WinNT" then -- We use Nul to separate arguments on NT, since we want spaces to -- be allowed in filenames (CFR 783) return Ada.Characters.Latin_1.NUL; #else -- Solaris, Linux, Mac OS X return ' '; #end if; end Argument_Separator; function Is_An_Argument_Terminator (Ch : Character) return Boolean is begin #if Target = "Intel_WinNT" then return Ch = Ada.Characters.Latin_1.NUL or Ch = ',' or Ch = '='; #else -- Solaris, Linux, OS X return Ch = ' ' or Ch = '='; #end if; end Is_An_Argument_Terminator; function Use_Windows_Command_Line return Boolean is begin #if Target = "Intel_WinNT" then return True; #else -- Solaris, Linux, Mac OS X return False; #end if; end Use_Windows_Command_Line; function Use_Unix_Command_Line return Boolean is begin #if Target = "SPARC_Solaris" or Target = "Intel_Linux" or Target = "Darwin" then return True; #else -- Solaris, Linux, Mac OS X return False; #end if; end Use_Unix_Command_Line; ----------------------------------------------------------------- -- Section 3 -- -- Functions relating to the casing of filenames ----------------------------------------------------------------- function Case_Of_Files_For_Open (E_Str : E_Strings.T) return E_Strings.T is begin return E_Str; -- unchanged end Case_Of_Files_For_Open; function Case_Of_Files_For_Create (E_Str : E_Strings.T) return E_Strings.T is begin return E_Str; -- unchanged end Case_Of_Files_For_Create; ----------------------------------------------------------------- -- Section 4 -- -- Exported operations - All hosts and targets ----------------------------------------------------------------- procedure Check_Listing_Extension (Source_Name : in E_Strings.T; Fn : in out E_Strings.T; Ext : in E_Strings.T) is Source_Extension : E_Strings.T; function Get_File_Ext (File_Name : E_Strings.T) return E_Strings.T is File_Ext : E_Strings.T; First : E_Strings.Lengths; begin --Get_File_Ext File_Ext := E_Strings.Empty_String; if E_Strings.Get_Length (E_Str => File_Name) > 0 then First := 0; for I in reverse E_Strings.Positions range 1 .. E_Strings.Get_Length (E_Str => File_Name) loop if E_Strings.Get_Element (E_Str => File_Name, Pos => I) = '.' then -- Start of extension found First := I + 1; exit; end if; end loop; -- first now point to the first character after the '.' or it's 0 because -- no extension (i.e. no ',') was found if First > 0 then for I in E_Strings.Positions range First .. E_Strings.Get_Length (E_Str => File_Name) loop exit when E_Strings.Get_Element (E_Str => File_Name, Pos => I) = ' '; --fallen of the end of the extension -- If we are here we have a valid character to insert E_Strings.Append_Char (E_Str => File_Ext, Ch => E_Strings.Get_Element (E_Str => File_Name, Pos => I)); end loop; end if; end if; return File_Ext; end Get_File_Ext; --------------------- function Create_New_Extension (Source_Ext, Ext : E_Strings.T) return E_Strings.T is Result : E_Strings.T; function Substitute_Character (Source_Char, Ext_Char, Default_Char : Character) return Character is Wild_Card : constant Character := '_'; Result : Character; begin --Substitute_Character if Ext_Char = Wild_Card then -- We need to look at source and possible default extensions if Source_Char = ' ' then -- no matching character from source so return default Result := Default_Char; else Result := Source_Char; end if; else -- non wild-card so just return it Result := Ext_Char; end if; return Result; end Substitute_Character; begin -- Create_New_Extension; Result := E_Strings.Empty_String; E_Strings.Append_Char (E_Str => Result, Ch => Substitute_Character (Source_Char => E_Strings.Get_Element (E_Str => Source_Ext, Pos => 1), Ext_Char => E_Strings.Get_Element (E_Str => Ext, Pos => 1), Default_Char => 'l')); E_Strings.Append_Char (E_Str => Result, Ch => Substitute_Character (Source_Char => E_Strings.Get_Element (E_Str => Source_Ext, Pos => 2), Ext_Char => E_Strings.Get_Element (E_Str => Ext, Pos => 2), Default_Char => 's')); E_Strings.Append_Char (E_Str => Result, Ch => Substitute_Character (Source_Char => E_Strings.Get_Element (E_Str => Source_Ext, Pos => 3), Ext_Char => E_Strings.Get_Element (E_Str => Ext, Pos => 3), Default_Char => 't')); --safety check: if Result = Source_Ext then force to default .lst if E_Strings.Eq_String (E_Str1 => Result, E_Str2 => Source_Ext) then Result := E_Strings.Copy_String ("lst"); end if; return Result; end Create_New_Extension; begin -- Check_Listing_Extension if E_Strings.Get_Length (E_Str => Get_File_Ext (File_Name => Fn)) = 0 then --no existing extension if E_Strings.Get_Length (E_Str => Fn) /= 0 and then -- and then required here. E_Strings.Get_Element (E_Str => Fn, Pos => E_Strings.Get_Length (E_Str => Fn)) /= '.' then -- prevent double dot E_Strings.Append_Char (E_Str => Fn, Ch => '.'); end if; Source_Extension := Get_File_Ext (File_Name => Source_Name); --The source extension may be less than 3 characters long. We force --it to 3 characters here (it is padded with spaces anyway) for use --by Create_New_Extension E_Strings.Append_Examiner_String (E_Str1 => Fn, E_Str2 => Create_New_Extension (Source_Ext => Source_Extension, Ext => Ext)); end if; end Check_Listing_Extension; ----------------------------------------------------------------- -- Section 5 -- -- Exported operations - All GNAT hosted platforms ----------------------------------------------------------------- function Get_Environment_Variable (Env_Var_Name : String) return E_Strings.T is begin return E_Strings.Copy_String (Str => GNAT.OS_Lib.Getenv (Env_Var_Name).all); end Get_Environment_Variable; procedure Open_Source_File (File : in out SPARK_IO.File_Type; Name : in E_Strings.T; Status : out SPARK_IO.File_Status) is begin E_Strings.Open (File => File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Name, Form_Of_File => "shared=no", Status => Status); end Open_Source_File; procedure Read_Cmd_Line (Cmd_Line_Found : out Boolean; Cmd_Line : out E_Strings.T) is Loc_Ptr : E_Strings.Lengths := 0; Loc_Line : E_Strings.T := E_Strings.Empty_String; Arg_Count : constant Natural := Ada.Command_Line.Argument_Count; begin for I in Positive range 1 .. Arg_Count loop declare Arg : constant E_Strings.T := E_Strings.Copy_String (Ada.Command_Line.Argument (I)); begin Loc_Ptr := Loc_Ptr + E_Strings.Get_Length (Arg); E_Strings.Append_Examiner_String (Loc_Line, Arg); end; Loc_Ptr := Loc_Ptr + 1; E_Strings.Append_Char (Loc_Line, Argument_Separator); end loop; Cmd_Line_Found := (Loc_Ptr /= 0); Cmd_Line := Loc_Line; end Read_Cmd_Line; function Working_Directory return E_Strings.T is Result : E_Strings.T; CWD : constant String := GNAT.Directory_Operations.Get_Current_Dir; begin Result := E_Strings.Copy_String (Str => CWD); -- GNAT.Directory_Operations.Get_Current_Dir can return -- a trailing separator, which we don't want, so... if E_Strings.Get_Length (E_Str => Result) /= 0 and then E_Strings.Get_Element (E_Str => Result, Pos => E_Strings.Get_Length (E_Str => Result)) = GNAT.OS_Lib.Directory_Separator then Result := E_Strings.Section (E_Str => Result, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Result) - 1); end if; return Result; end Working_Directory; #if Target = "Intel_WinNT" then --------------------------------------------------------------------- -- We require the pathnames returned from this -- function to match those returned by Interfaces.C_Streams.Full_Name -- which is used by System.File_IO to detect shared files and -- attempts to open the same file twice. We need this to work -- so that circular meta-files can be detected, for example. -- -- On Windows Vista, _fullname can return a _lower_ case -- drive letter, while GNAT.OS_Lib.Normalize_Pathname always -- returns upper-case, which causes a regression. -- -- This local implementation is therefore needed as a temporary -- measure --------------------------------------------------------------------- function Normalize_Pathname (Name : in String; Directory : in String) return String is -- Step 1 - normalize with GNAT.OS_Lib C1 : String := GNAT.OS_Lib.Normalize_Pathname (Name => Name, Directory => Directory, Resolve_Links => True, Case_Sensitive => True); C_Name : constant String := Name & Ada.Characters.Latin_1.NUL; subtype P_String is String (1 .. Interfaces.C_Streams.max_path_len + 1); P_Name : P_String := P_String'(others => Ada.Characters.Latin_1.NUL); use Ada.Characters.Handling; begin -- If C1 looks like it has a drive letter on the front if C1'Length >= 2 and then C1 (2) = ':' and then Is_Letter (C1 (1)) then -- then Normalize again using Interfaces.C_Streams, relative -- to CWD Interfaces.C_Streams.full_name (C_Name'Address, P_Name'Address); -- if THAT also looks like a drive letter on the front which -- is case-insensitively the same as the drive letter returned by -- GNAT.OS_Lib - for example if P_Name start with "d:" and C1 starts -- with "D:" ... if P_Name'Length >= 2 and then P_Name (2) = ':' and then Is_Letter (P_Name (1)) and then To_Lower (P_Name (1)) = To_Lower (C1 (1)) then -- ...THEN favour the one returned by C_Streams C1 (1) := P_Name (1); end if; end if; return C1; end Normalize_Pathname; #else -- Solaris, Linux, Mac OS X -- On all other platforms, just call-through to GNAT.OS_Lib function Normalize_Pathname (Name : in String; Directory : in String) return String is begin return GNAT.OS_Lib.Normalize_Pathname (Name => Name, Directory => Directory, Resolve_Links => True, Case_Sensitive => True); end Normalize_Pathname; #end if; function Examiner_Lib_Directory return E_Strings.T is DS : constant Character := GNAT.OS_Lib.Directory_Separator; Lib_Rel_Path : constant String := ".." & DS & "lib" & DS & "spark"; CN : constant String := Ada.Command_Line.Command_Name; EN : GNAT.OS_Lib.String_Access; Result : E_Strings.T; begin EN := GNAT.OS_Lib.Locate_Exec_On_Path (CN); if EN = null then Result := E_Strings.Copy_String (Str => Lib_Rel_Path); else declare PN : constant String := Normalize_Pathname (Name => GNAT.Directory_Operations.Dir_Name (EN.all) & DS & Lib_Rel_Path, Directory => ""); begin Result := E_Strings.Copy_String (Str => PN); end; end if; GNAT.OS_Lib.Free (EN); return Result; end Examiner_Lib_Directory; function Examiner_SPARK_Lib_Directory return E_Strings.T is DS : constant Character := GNAT.OS_Lib.Directory_Separator; Result : E_Strings.T; begin Result := Examiner_Lib_Directory; E_Strings.Append_String (E_Str => Result, Str => DS & "current"); return Result; end Examiner_SPARK_Lib_Directory; procedure Idempotent_Create_Subdirectory (Path : in E_Strings.T; Ok : out Boolean) is Name : constant String := E_Strings.Not_SPARK.Get_String (E_Str => Path); use GNAT.Directory_Operations; use GNAT.OS_Lib; use GNAT.IO_Aux; begin if File_Exists (Name) then if Is_Directory (Name) then -- A directory called Name already exists - do nothing, and indicate success Ok := True; else -- Something called Name exists, but it's not a directory - error Ok := False; end if; else Make_Dir (Name); Ok := File_Exists (Name) and then Is_Directory (Name); end if; exception when Directory_Error => Ok := False; end Idempotent_Create_Subdirectory; function Start_Of_Directory return E_Strings.T is begin return E_Strings.Empty_String; end Start_Of_Directory; function Directory_Separator return E_Strings.T is Return_Value : E_Strings.T := E_Strings.Empty_String; begin E_Strings.Append_Char (E_Str => Return_Value, Ch => GNAT.OS_Lib.Directory_Separator); return Return_Value; end Directory_Separator; procedure Append_End_Of_Path_If_Needed (D : in out E_Strings.T) is begin if E_Strings.Get_Element (E_Str => D, Pos => E_Strings.Get_Length (E_Str => D)) /= GNAT.OS_Lib.Directory_Separator then E_Strings.Append_Char (E_Str => D, Ch => GNAT.OS_Lib.Directory_Separator); end if; end Append_End_Of_Path_If_Needed; procedure Remove_End_Of_Path_If_Present (D : in out E_Strings.T) is begin if E_Strings.Get_Length (E_Str => D) >= 1 and then E_Strings.Get_Element (E_Str => D, Pos => E_Strings.Get_Length (E_Str => D)) = GNAT.OS_Lib.Directory_Separator then D := E_Strings.Section (E_Str => D, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => D) - 1); end if; end Remove_End_Of_Path_If_Present; function Is_Readable_File (F : E_Strings.T) return Boolean is Name : constant String := E_Strings.Not_SPARK.Get_String (E_Str => F); begin return Name'Length > 0 and then GNAT.OS_Lib.Is_Regular_File (Name) and then GNAT.OS_Lib.Is_Readable_File (Name); end Is_Readable_File; function Is_Directory (F : E_Strings.T) return Boolean is Name : constant String := E_Strings.Not_SPARK.Get_String (E_Str => F); begin return GNAT.IO_Aux.File_Exists (Name) and then GNAT.OS_Lib.Is_Directory (Name); end Is_Directory; function Base_Name (Path : E_Strings.T; Suffix : String) return E_Strings.T is Str : constant String := GNAT.Directory_Operations.Base_Name (SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => E_Strings.To_Unbounded_String (E_Str => Path)), Suffix); begin return E_Strings.Copy_String (Str => Str); end Base_Name; procedure Find_Full_File_Name (File_Spec : in E_Strings.T; File_Status : out Typ_File_Spec_Status; Full_File_Name : out E_Strings.T) is -- FUNCTION : -- -- Given a file specification File_Spec, this procedure searches for the file -- and, if present, returns the full file name in Full_File_Name. The File_Status -- parameter returns one of the following values : -- -- File_Found - The file denoted by File_Spec was found amd the full filename -- returned in parameter Full_File_Name. -- File_Missing - The file denoted by File_Spec could not be found. -- -- NOTE: -- File_Invalid is NEVER returned. NFN : constant String := Normalize_Pathname (Name => E_Strings.Not_SPARK.Get_String (E_Str => File_Spec), Directory => ""); begin Full_File_Name := E_Strings.Copy_String (Str => NFN); if GNAT.OS_Lib.Is_Regular_File (NFN) then File_Status := File_Found; else File_Status := File_Missing; end if; end Find_Full_File_Name; function Just_File (Fn : E_Strings.T; Ext : Boolean) return E_Strings.T is RS : constant String := E_Strings.Not_SPARK.Get_String (E_Str => Fn); ES : constant String := GNAT.Directory_Operations.File_Extension (RS); -- Get the Base_Name, stripping any extension that may be there BS : constant String := GNAT.Directory_Operations.Base_Name (RS, ES); R : E_Strings.T := E_Strings.Copy_String (Str => BS); begin -- If required, restore the extension if Ext then E_Strings.Append_String (E_Str => R, Str => ES); end if; return R; end Just_File; procedure Check_Extension (Fn : in out E_Strings.T; Ext : in E_Strings.T) is S : constant String := E_Strings.Not_SPARK.Get_String (E_Str => Fn); E : constant String := GNAT.Directory_Operations.File_Extension (S); begin if E = "" then if S (S'Last) /= '.' then E_Strings.Append_String (E_Str => Fn, Str => "."); end if; E_Strings.Append_Examiner_String (E_Str1 => Fn, E_Str2 => Ext); end if; end Check_Extension; function Interpret_Relative (File_Name, Relative_To_Directory : E_Strings.T) return E_Strings.T is XS : constant String := E_Strings.Not_SPARK.Get_String (E_Str => File_Name); To_YS : constant String := E_Strings.Not_SPARK.Get_String (E_Str => Relative_To_Directory); RS : constant String := Normalize_Pathname (Name => XS, Directory => GNAT.Directory_Operations.Dir_Name (To_YS)); begin return E_Strings.Copy_String (Str => RS); end Interpret_Relative; function Same_File (F1, F2 : E_Strings.T) return Boolean is S1, S2 : E_Strings.T; Stat1, Stat2 : Typ_File_Spec_Status; begin Find_Full_File_Name (File_Spec => F1, File_Status => Stat1, Full_File_Name => S1); Find_Full_File_Name (File_Spec => F2, File_Status => Stat2, Full_File_Name => S2); return (Stat1 /= File_Invalid and Stat2 /= File_Invalid) and then #if Target = "Intel_WinNT" then -- Filenames are case-INSENSITIVE on Windows E_Strings.Eq_String (E_Str1 => S1, E_Str2 => S2); #else -- Solaris, Linux, Mac OS X -- Filenames are case-sensitive on Solaris E_Strings. Eq_CS_String (E_Str1 => S1, E_Str2 => S2); #end if; end Same_File; function Get_Relative_Path (Full_Path : E_Strings.T; Prefix : E_Strings.T) return E_Strings.T is Result : String := E_Strings.Not_SPARK.Get_String (Full_Path); function Compare_Char (C1 : Character; C2 : Character) return Boolean is C1_Tmp : Character := C1; C2_Tmp : Character := C2; begin if C1 = '\' then C1_Tmp := '/'; end if; if C2 = '\' then C2_Tmp := '/'; end if; #if Target = "Intel_WinNT" then return Ada.Characters.Handling.To_Lower (C1_Tmp) = Ada.Characters.Handling.To_Lower (C2_Tmp); #else -- Solaris, Linux, Mac OS X return C1_Tmp = C2_Tmp; #end if; end Compare_Char; begin for I in E_Strings.Lengths range 1 .. E_Strings.Get_Length (E_Str => Prefix) loop if Compare_Char (C1 => E_Strings.Get_Element (E_Str => Prefix, Pos => I), C2 => E_Strings.Get_Element (E_Str => Full_Path, Pos => I)) then Result (I) := ' '; else exit; end if; end loop; return E_Strings.Trim (E_Str => E_Strings.Copy_String (Str => Result)); end Get_Relative_Path; function End_Of_Line return E_Strings.T is S : String (1 .. 1); begin S (1) := Ada.Characters.Latin_1.LF; return E_Strings.Copy_String (Str => S); end End_Of_Line; begin -- initialization part null; end FileSystem; spark-2012.0.deb/examiner/lextokenmanager.ads0000644000175000017500000005757611753202336020152 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with ExaminerConstants; with E_Strings; --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# CommandLineData, --# ExaminerConstants, --# E_Strings, --# SPARK_IO, --# Statistics, --# SystemErrors; package LexTokenManager --# own State; --# initializes State; is type Lex_String is private; Null_String : constant Lex_String; The_First_Token : constant Lex_String; -- Common Attributes Aft_Token : constant Lex_String; Base_Token : constant Lex_String; Delta_Token : constant Lex_String; Digits_Token : constant Lex_String; Emax_Token : constant Lex_String; Epsilon_Token : constant Lex_String; First_Token : constant Lex_String; Fore_Token : constant Lex_String; Large_Token : constant Lex_String; Last_Token : constant Lex_String; Length_Token : constant Lex_String; Machine_Emax_Token : constant Lex_String; Machine_Emin_Token : constant Lex_String; Machine_Mantissa_Token : constant Lex_String; Machine_Overflows_Token : constant Lex_String; Machine_Radix_Token : constant Lex_String; Machine_Rounds_Token : constant Lex_String; Mantissa_Token : constant Lex_String; Pos_Token : constant Lex_String; Pred_Token : constant Lex_String; Range_Token : constant Lex_String; Safe_Emax_Token : constant Lex_String; Safe_Large_Token : constant Lex_String; Safe_Small_Token : constant Lex_String; Size_Token : constant Lex_String; Small_Token : constant Lex_String; Succ_Token : constant Lex_String; Val_Token : constant Lex_String; -- Identifiers in Standard Left_Token : constant Lex_String; Right_Token : constant Lex_String; True_Token : constant Lex_String; False_Token : constant Lex_String; -- Numeric literals Zero_Value : constant Lex_String; One_Value : constant Lex_String; -- Index file Super_Index_Token : constant Lex_String; -- Pragmas Interface_Token : constant Lex_String; Import_Token : constant Lex_String; Link_Name_Token : constant Lex_String; External_Name_Token : constant Lex_String; Entity_Token : constant Lex_String; Convention_Token : constant Lex_String; Elaborate_Body_Token : constant Lex_String; -- SPARK95 Predefined packages Ada_Token : constant Lex_String; SPARK_Token : constant Lex_String; -- SPARK95 Attributes Denorm_Token : constant Lex_String; Model_Emin_Token : constant Lex_String; Model_Epsilon_Token : constant Lex_String; Model_Mantissa_Token : constant Lex_String; Model_Small_Token : constant Lex_String; Safe_First_Token : constant Lex_String; Safe_Last_Token : constant Lex_String; Component_Size_Token : constant Lex_String; Min_Token : constant Lex_String; Max_Token : constant Lex_String; Signed_Zeros_Token : constant Lex_String; Valid_Token : constant Lex_String; -- More SPARK95 Predefined packages Characters_Token : constant Lex_String; Latin_1_Token : constant Lex_String; -- More SPARK95 Attributes Adjacent_Token : constant Lex_String; Compose_Token : constant Lex_String; Copy_Sign_Token : constant Lex_String; Leading_Part_Token : constant Lex_String; Remainder_Token : constant Lex_String; Scaling_Token : constant Lex_String; Ceiling_Token : constant Lex_String; Exponent_Token : constant Lex_String; Floor_Token : constant Lex_String; Fraction_Token : constant Lex_String; Machine_Token : constant Lex_String; Model_Token : constant Lex_String; Rounding_Token : constant Lex_String; Truncation_Token : constant Lex_String; Unbiased_Rounding_Token : constant Lex_String; Address_Token : constant Lex_String; Modulus_Token : constant Lex_String; -- SPARK95 Proof Attributes Tail_Token : constant Lex_String; Append_Token : constant Lex_String; -- Package System and its constants and types. "Address" is already -- defined above. System_Token : constant Lex_String; Min_Int_Token : constant Lex_String; Max_Int_Token : constant Lex_String; Max_Binary_Modulus_Token : constant Lex_String; Max_Base_Digits_Token : constant Lex_String; Max_Digits_Token : constant Lex_String; Max_Mantissa_Token : constant Lex_String; Fine_Delta_Token : constant Lex_String; Null_Address_Token : constant Lex_String; Storage_Unit_Token : constant Lex_String; Word_Size_Token : constant Lex_String; Any_Priority_Token : constant Lex_String; Priority_Token : constant Lex_String; Interrupt_Priority_Token : constant Lex_String; Default_Priority_Token : constant Lex_String; -- RavenSPARK Pragmas, attributes and packages Atomic_Token : constant Lex_String; Real_Time_Token : constant Lex_String; Inherit_Token : constant Lex_String; Synchronous_Task_Control_Token : constant Lex_String; Attach_Handler_Token : constant Lex_String; Interrupt_Handler_Token : constant Lex_String; Interrupts_Token : constant Lex_String; Access_Token : constant Lex_String; Atomic_Components_Token : constant Lex_String; Volatile_Components_Token : constant Lex_String; Main_Program_Token : constant Lex_String; -- predefined generic units Unchecked_Conversion_Token : constant Lex_String; -- composite constant rule generation Rule_Token : constant Lex_String; No_Rule_Token : constant Lex_String; -- the 'Always_Valid token Always_Valid_Token : constant Lex_String; -- Ada0Y identifiers. -- -- "Assert" is a predefined pragma in Ada0Y. It's already -- a reserved word in SPARK, but we still -- need a token for it so the corresponding warning can -- be suppressed in the warning control file. -- -- There is a special production in the grammar to -- allow "pragma Assert ..." Assert_Token : constant Lex_String; -- "overriding" is a reserved word in -- Ada2005, but let's have a token for it so we -- can at least issue a warning Overriding_Token : constant Lex_String; -- Package System - more predefined identifiers Bit_Order_Token : constant Lex_String; High_Order_First_Token : constant Lex_String; Low_Order_First_Token : constant Lex_String; Default_Bit_Order_Token : constant Lex_String; -- More Pragmas All_Calls_Remote_Token : constant Lex_String; Asynchronous_Token : constant Lex_String; Controlled_Token : constant Lex_String; Discard_Names_Token : constant Lex_String; Elaborate_Token : constant Lex_String; Elaborate_All_Token : constant Lex_String; Export_Token : constant Lex_String; Inline_Token : constant Lex_String; Inspection_Point_Token : constant Lex_String; Linker_Options_Token : constant Lex_String; List_Token : constant Lex_String; Locking_Policy_Token : constant Lex_String; Normalize_Scalars_Token : constant Lex_String; Optimize_Token : constant Lex_String; Pack_Token : constant Lex_String; Page_Token : constant Lex_String; Preelaborate_Token : constant Lex_String; Pure_Token : constant Lex_String; Queueing_Policy_Token : constant Lex_String; Remote_Call_Interface_Token : constant Lex_String; Remote_Types_Token : constant Lex_String; Restrictions_Token : constant Lex_String; Reviewable_Token : constant Lex_String; Shared_Passive_Token : constant Lex_String; Storage_Size_Token : constant Lex_String; Suppress_Token : constant Lex_String; Task_Dispatching_Policy_Token : constant Lex_String; Volatile_Token : constant Lex_String; -- More Ada '83 Pragmas Memory_Size_Token : constant Lex_String; Shared_Token : constant Lex_String; System_Name_Token : constant Lex_String; -- Ada 2005 Mod_Token : constant Lex_String; Machine_Rounding_Token : constant Lex_String; Priority_Last_Token : constant Lex_String; Standard_Token : constant Lex_String; Integer_Token : constant Lex_String; Float_Token : constant Lex_String; Seconds_Count_Token : constant Lex_String; Interrupt_ID_Token : constant Lex_String; -- Dictionary types Universal_Integer_Token : constant Lex_String; Universal_Real_Token : constant Lex_String; Universal_Fixed_Token : constant Lex_String; Character_Token : constant Lex_String; Boolean_Token : constant Lex_String; Duration_Token : constant Lex_String; String_Token : constant Lex_String; Natural_Token : constant Lex_String; Positive_Token : constant Lex_String; -- More Ada95 predefined library packages Interfaces_Token : constant Lex_String; type Line_Numbers is new Integer range 0 .. Natural'Last; type Token_Position is record Start_Line_No : Line_Numbers; Start_Pos : E_Strings.Lengths; end record; Null_Token_Position : constant Token_Position := Token_Position'(Start_Line_No => 0, Start_Pos => 0); type Lex_Value is record Position : Token_Position; Token_Str : Lex_String; end record; -- Type to identify result of string comparisons type Str_Comp_Result is (Str_Eq, Str_First, Str_Second); -- Performs case insensitive comparison of two Lex_Strings and -- returns a value of type StrCompResult (see above) to indicate -- which string comes first when ordered alphabetically. -- If both strings are null it returns StrEqual. -- If just one string is null then the null string is considered -- to come first. -- If one string is of length n chars and the other string is -- longer but identical for the first n chars then the shorter -- string is considered to come first. -- (See also CompStr and CompStrCaseSensitive in package body) function Lex_String_Case_Insensitive_Compare (Lex_Str1 : Lex_String; Lex_Str2 : Lex_String) return Str_Comp_Result; --# global in State; function Lex_String_Case_Sensitive_Compare (Lex_Str1 : Lex_String; Lex_Str2 : Lex_String) return Str_Comp_Result; --# global in State; function Comp_Str_Case_Insensitive (Str : E_Strings.T; Lex_Str : Lex_String) return Boolean; --# global in State; function Comp_Str_Case_Sensitive (Str : E_Strings.T; Lex_Str : Lex_String) return Boolean; --# global in State; procedure Insert_Examiner_String (Str : in E_Strings.T; Lex_Str : out Lex_String); --# global in out State; --# derives Lex_Str, --# State from State, --# Str; function Lex_String_To_String (Lex_Str : Lex_String) return E_Strings.T; --# global in State; function Is_Attribute_Token (Tok : Lex_String; Language : CommandLineData.Language_Profiles) return Boolean; --# global in State; procedure Initialise_String_Table; --# global in out State; --# derives State from *; procedure Report_Usage; --# global in State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# State; procedure Insert_Nat (N : in Natural; Lex_Str : out Lex_String); --# global in out State; --# derives Lex_Str, --# State from N, --# State; function Is_Standard_Token (Lex_Str : Lex_String) return Boolean; --# global in State; procedure Set_Last_Token; --# global in out State; --# derives State from *; private type Lex_String is range 0 .. ExaminerConstants.String_Table_Size; --# assert Lex_String'Base is Integer; Null_String : constant Lex_String := 0; The_First_Token : constant Lex_String := 1; Aft_Token : constant Lex_String := 1; Base_Token : constant Lex_String := 8; Delta_Token : constant Lex_String := 16; Digits_Token : constant Lex_String := 25; Emax_Token : constant Lex_String := 35; Epsilon_Token : constant Lex_String := 43; First_Token : constant Lex_String := 54; Fore_Token : constant Lex_String := 63; Large_Token : constant Lex_String := 71; Last_Token : constant Lex_String := 80; Length_Token : constant Lex_String := 88; Machine_Emax_Token : constant Lex_String := 98; Machine_Emin_Token : constant Lex_String := 114; Machine_Mantissa_Token : constant Lex_String := 130; Machine_Overflows_Token : constant Lex_String := 150; Machine_Radix_Token : constant Lex_String := 171; Machine_Rounds_Token : constant Lex_String := 188; Mantissa_Token : constant Lex_String := 206; Pos_Token : constant Lex_String := 218; Pred_Token : constant Lex_String := 225; Range_Token : constant Lex_String := 233; Safe_Emax_Token : constant Lex_String := 242; Safe_Large_Token : constant Lex_String := 255; Safe_Small_Token : constant Lex_String := 269; Size_Token : constant Lex_String := 283; Small_Token : constant Lex_String := 291; Succ_Token : constant Lex_String := 300; Val_Token : constant Lex_String := 308; Left_Token : constant Lex_String := 315; Right_Token : constant Lex_String := 323; True_Token : constant Lex_String := 332; False_Token : constant Lex_String := 340; Zero_Value : constant Lex_String := 349; One_Value : constant Lex_String := 354; Super_Index_Token : constant Lex_String := 359; Interface_Token : constant Lex_String := 373; Import_Token : constant Lex_String := 386; Link_Name_Token : constant Lex_String := 396; External_Name_Token : constant Lex_String := 409; Entity_Token : constant Lex_String := 426; Convention_Token : constant Lex_String := 436; Elaborate_Body_Token : constant Lex_String := 450; Ada_Token : constant Lex_String := 468; SPARK_Token : constant Lex_String := 475; Denorm_Token : constant Lex_String := 484; Model_Emin_Token : constant Lex_String := 494; Model_Epsilon_Token : constant Lex_String := 508; Model_Mantissa_Token : constant Lex_String := 525; Model_Small_Token : constant Lex_String := 543; Safe_First_Token : constant Lex_String := 558; Safe_Last_Token : constant Lex_String := 572; Component_Size_Token : constant Lex_String := 585; Min_Token : constant Lex_String := 603; Max_Token : constant Lex_String := 610; Signed_Zeros_Token : constant Lex_String := 617; Valid_Token : constant Lex_String := 633; Characters_Token : constant Lex_String := 642; Latin_1_Token : constant Lex_String := 656; Adjacent_Token : constant Lex_String := 667; Compose_Token : constant Lex_String := 679; Copy_Sign_Token : constant Lex_String := 690; Leading_Part_Token : constant Lex_String := 703; Remainder_Token : constant Lex_String := 719; Scaling_Token : constant Lex_String := 732; Ceiling_Token : constant Lex_String := 743; Exponent_Token : constant Lex_String := 754; Floor_Token : constant Lex_String := 766; Fraction_Token : constant Lex_String := 775; Machine_Token : constant Lex_String := 787; Model_Token : constant Lex_String := 798; Rounding_Token : constant Lex_String := 807; Truncation_Token : constant Lex_String := 819; Unbiased_Rounding_Token : constant Lex_String := 833; Address_Token : constant Lex_String := 854; Modulus_Token : constant Lex_String := 865; Tail_Token : constant Lex_String := 876; Append_Token : constant Lex_String := 884; -- System and its constants and types. "Address" is already -- defined above. System_Token : constant Lex_String := 894; Min_Int_Token : constant Lex_String := 904; Max_Int_Token : constant Lex_String := 915; Max_Binary_Modulus_Token : constant Lex_String := 926; Max_Base_Digits_Token : constant Lex_String := 948; Max_Digits_Token : constant Lex_String := 967; Max_Mantissa_Token : constant Lex_String := 981; Fine_Delta_Token : constant Lex_String := 997; Null_Address_Token : constant Lex_String := 1011; Storage_Unit_Token : constant Lex_String := 1027; Word_Size_Token : constant Lex_String := 1043; Any_Priority_Token : constant Lex_String := 1056; Priority_Token : constant Lex_String := 1072; Interrupt_Priority_Token : constant Lex_String := 1084; Default_Priority_Token : constant Lex_String := 1106; -- RavenSPARK Pragmas, attributes, and identifiers Atomic_Token : constant Lex_String := 1126; Real_Time_Token : constant Lex_String := 1136; Inherit_Token : constant Lex_String := 1149; Synchronous_Task_Control_Token : constant Lex_String := 1160; Attach_Handler_Token : constant Lex_String := 1188; Interrupt_Handler_Token : constant Lex_String := 1206; Interrupts_Token : constant Lex_String := 1227; Access_Token : constant Lex_String := 1241; Atomic_Components_Token : constant Lex_String := 1251; Volatile_Components_Token : constant Lex_String := 1272; Main_Program_Token : constant Lex_String := 1295; -- Ada0Y identifiers. Assert_Token : constant Lex_String := 1311; Overriding_Token : constant Lex_String := 1321; -- Predefined generics Unchecked_Conversion_Token : constant Lex_String := 1335; -- Composite constant rule generation Rule_Token : constant Lex_String := 1359; No_Rule_Token : constant Lex_String := 1367; -- the 'Always_Valid token Always_Valid_Token : constant Lex_String := 1377; Bit_Order_Token : constant Lex_String := 1393; High_Order_First_Token : constant Lex_String := 1406; Low_Order_First_Token : constant Lex_String := 1426; Default_Bit_Order_Token : constant Lex_String := 1445; -- More Pragmas All_Calls_Remote_Token : constant Lex_String := 1466; Asynchronous_Token : constant Lex_String := 1486; Controlled_Token : constant Lex_String := 1502; Discard_Names_Token : constant Lex_String := 1516; Elaborate_Token : constant Lex_String := 1533; Elaborate_All_Token : constant Lex_String := 1546; Export_Token : constant Lex_String := 1563; Inline_Token : constant Lex_String := 1573; Inspection_Point_Token : constant Lex_String := 1583; Linker_Options_Token : constant Lex_String := 1603; List_Token : constant Lex_String := 1621; Locking_Policy_Token : constant Lex_String := 1629; Normalize_Scalars_Token : constant Lex_String := 1647; Optimize_Token : constant Lex_String := 1668; Pack_Token : constant Lex_String := 1680; Page_Token : constant Lex_String := 1688; Preelaborate_Token : constant Lex_String := 1696; Pure_Token : constant Lex_String := 1712; Queueing_Policy_Token : constant Lex_String := 1720; Remote_Call_Interface_Token : constant Lex_String := 1739; Remote_Types_Token : constant Lex_String := 1764; Restrictions_Token : constant Lex_String := 1780; Reviewable_Token : constant Lex_String := 1796; Shared_Passive_Token : constant Lex_String := 1810; Storage_Size_Token : constant Lex_String := 1828; Suppress_Token : constant Lex_String := 1844; Task_Dispatching_Policy_Token : constant Lex_String := 1856; Volatile_Token : constant Lex_String := 1883; -- More Ada '83 Pragmas Memory_Size_Token : constant Lex_String := 1895; Shared_Token : constant Lex_String := 1910; System_Name_Token : constant Lex_String := 1920; -- Ada 2005 Mod attribute Mod_Token : constant Lex_String := 1935; Machine_Rounding_Token : constant Lex_String := 1942; Priority_Last_Token : constant Lex_String := 1962; Standard_Token : constant Lex_String := 1979; Integer_Token : constant Lex_String := 1991; Float_Token : constant Lex_String := 2002; Seconds_Count_Token : constant Lex_String := 2011; Interrupt_ID_Token : constant Lex_String := 2028; -- Dictionary types Universal_Integer_Token : constant Lex_String := 2044; Universal_Real_Token : constant Lex_String := 2065; Universal_Fixed_Token : constant Lex_String := 2083; Character_Token : constant Lex_String := 2102; Boolean_Token : constant Lex_String := 2115; Duration_Token : constant Lex_String := 2126; String_Token : constant Lex_String := 2138; Natural_Token : constant Lex_String := 2148; Positive_Token : constant Lex_String := 2159; -- More Ada95 predefined library packages Interfaces_Token : constant Lex_String := 2171; -- The next vacant token value is equal to the final token value above, -- plus the length of that token in characters, plus 4, so -- Interfaces_Token + Length("Interfaces") + 4 = 2171 + 10 + 4 = 2185 Next_Vacant_Token : constant Lex_String := 2185; -- Use this value for the new Token that you want to add, then -- update these comments for next time and the declaration of -- Next_Vacant_Token for next time. -- See S.P0468.53.1 "Implementation and Testing of the Lexical -- Token Manager" end LexTokenManager; ././@LongLink0000000000000000000000000000016500000000000011567 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_modular.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000001777011753202336033134 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Modular (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) is Exp_Node : STree.SyntaxNode; Exp_Type : Exp_Record; Unwanted_Seq : SeqAlgebra.Seq; Modulus : LexTokenManager.Lex_String; Unused_Component_Data : ComponentManager.ComponentData; Type_Symbol : Dictionary.Symbol; System_Sym : Dictionary.Symbol; Max_Binary_Modulus_Sym : Dictionary.Symbol; Max_Binary_Modulus_Val : LexTokenManager.Lex_String; Result : Maths.Value; Unused : Maths.ErrorCode; Modulus_OK : Boolean; begin case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Error (Err_Num => 801, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when CommandLineData.SPARK95_Onwards => -- Fetch Modulus, which is a simple_expression Exp_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Exp_Node = simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = simple_expression in Wf_Modular"); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => True, Ref_Var => Unwanted_Seq, Result => Exp_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Exp_Type.Value, Modulus); if not (Dictionary.TypeIsInteger (Exp_Type.Type_Symbol) or else Dictionary.TypeIsModular (Exp_Type.Type_Symbol) or else Dictionary.IsUnknownTypeMark (Exp_Type.Type_Symbol)) then Modulus := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; if not Exp_Type.Is_Static then Modulus := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 36, Reference => 1, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); elsif Exp_Type.Is_ARange then ErrorHandler.Semantic_Error (Err_Num => 114, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); else if Maths.IsAPositivePowerOf2 (Exp_Type.Value) then -- All is OK so far, so finally check the modulus against -- System.Max_Binary_Modulus System_Sym := Dictionary.LookupItem (Name => LexTokenManager.System_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- The user may or may not have bothered to supply -- package System, so... if not Dictionary.Is_Null_Symbol (System_Sym) then -- Find System.Max_Binary_Modulus Max_Binary_Modulus_Sym := Dictionary.LookupSelectedItem (Prefix => System_Sym, Selector => LexTokenManager.Max_Binary_Modulus_Token, Scope => Dictionary.GetScope (System_Sym), Context => Dictionary.ProgramContext); -- Even if the user has supplied a package System, they might -- not have declared Max_Binary_Modulus, so again we have to guard... if not Dictionary.Is_Null_Symbol (Max_Binary_Modulus_Sym) then Max_Binary_Modulus_Val := Dictionary.Get_Value (The_Constant => Max_Binary_Modulus_Sym); --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.LesserOrEqual (Exp_Type.Value, Maths.ValueRep (Max_Binary_Modulus_Val), Result, Unused); Maths.ValueToBool (Result, Modulus_OK, Unused); --# end accept; if not Modulus_OK then ErrorHandler.Semantic_Error (Err_Num => 783, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; end if; end if; else Modulus := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 800, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); end if; end if; Dictionary.Add_Modular_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Modulus => Modulus, Scope => Scope, Context => Dictionary.ProgramContext, The_Type => Type_Symbol); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Type_Symbol, Is_Declaration => True); end if; end case; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Wf_Modular; spark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-check_modes.adb0000644000175000017500000002601611753202336031373 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with LexTokenManager.Relation_Algebra.String; with LexTokenManager.Seq_Algebra; separate (Sem.Wf_Package_Declaration.Wf_Package_Specification) procedure Check_Modes (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) is Priv_Type_It, Subprog_It, Param_It : Dictionary.Iterator; Type_Sym, Subprog_Sym, Param_Sym : Dictionary.Symbol; Vis_Part_Rep_Node, Proc_Spec_Node : STree.SyntaxNode; Subprograms_To_Mark : Boolean; Current_Param_List : LexTokenManager.Seq_Algebra.Seq; The_Relation : LexTokenManager.Relation_Algebra.String.Relation; procedure Process_Procedure (Node : in STree.SyntaxNode; Param_List : in LexTokenManager.Seq_Algebra.Seq; The_Heap : in Heap.HeapRecord) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Param_List, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.formal_part; is It : STree.Iterator; Id_Node : STree.SyntaxNode; begin It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Id_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier and --# Id_Node = Get_Node (It); if LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Param_List, Given_Value => Node_Lex_String (Node => Id_Node)) then ErrorHandler.Semantic_Error (Err_Num => 338, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str => Node_Lex_String (Node => Id_Node)); end if; It := STree.NextNode (It); end loop; end Process_Procedure; begin -- Check_Modes LexTokenManager.Relation_Algebra.String.Create_Relation (The_Heap => The_Heap, R => The_Relation); Subprograms_To_Mark := False; Priv_Type_It := Dictionary.First_Private_Type (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (Priv_Type_It) loop Type_Sym := Dictionary.CurrentSymbol (Priv_Type_It); if Dictionary.Is_Declared (Item => Type_Sym) and then Dictionary.TypeIsScalar (Type_Sym) then -- we have a scalar private type which may affect subprog params Subprog_It := Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Pack_Sym); while not Dictionary.IsNullIterator (Subprog_It) loop Subprog_Sym := Dictionary.CurrentSymbol (Subprog_It); Param_It := Dictionary.FirstSubprogramParameter (Subprog_Sym); while not Dictionary.IsNullIterator (Param_It) loop Param_Sym := Dictionary.CurrentSymbol (Param_It); if Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Param_Sym), Right_Symbol => Type_Sym, Full_Range_Subtype => False) and then Dictionary.GetSubprogramParameterMode (Param_Sym) = Dictionary.InOutMode and then not Dictionary.IsImport (Dictionary.IsAbstract, Subprog_Sym, Param_Sym) then Subprograms_To_Mark := True; LexTokenManager.Relation_Algebra.String.Insert_Pair (The_Heap => The_Heap, R => The_Relation, I => Dictionary.GetSimpleName (Subprog_Sym), J => Dictionary.GetSimpleName (Param_Sym)); end if; Param_It := Dictionary.NextSymbol (Param_It); end loop; Subprog_It := Dictionary.NextSymbol (Subprog_It); end loop; end if; Priv_Type_It := Dictionary.NextSymbol (Priv_Type_It); end loop; -- At this point we have created in SubprogList a data structure listing -- all the procedures made illegal by the private types' full declarations -- and for each of them a list of affected parameters. We now walk the -- syntax tree marking each parameter occurrence found. if Subprograms_To_Mark then Vis_Part_Rep_Node := Child_Node (Current_Node => Node); -- ASSUME Vis_Part_Rep_Node = visible_part_rep OR NULL while Syntax_Node_Type (Node => Vis_Part_Rep_Node) = SP_Symbols.visible_part_rep loop -- ASSUME Vis_Part_Rep_Node = visible_part_rep Proc_Spec_Node := Next_Sibling (Current_Node => Vis_Part_Rep_Node); -- ASSUME Proc_Spec_Node = basic_declarative_item OR private_type_declaration OR deferred_constant_declaration OR -- subprogram_declaration OR generic_subprogram_instantiation OR apragma OR -- renaming_declaration if Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.subprogram_declaration then -- ASSUME Proc_Spec_Node = subprogram_declaration Proc_Spec_Node := Child_Node (Current_Node => Proc_Spec_Node); -- ASSUME Proc_Spec_Node = overriding_indicator OR procedure_specification OR function_specification OR -- proof_function_declaration if Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.overriding_indicator then -- ASSUME Proc_Spec_Node = overriding_indicator Proc_Spec_Node := Next_Sibling (Current_Node => Proc_Spec_Node); elsif Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.function_specification and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.procedure_specification and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.proof_function_declaration then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Proc_Spec_Node = overriding_indicator OR procedure_specification OR " & "function_specification OR proof_function_declaration in Check_Modes"); end if; -- ASSUME Proc_Spec_Node = procedure_specification OR function_specification OR proof_function_declaration if Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.procedure_specification then -- ASSUME Proc_Spec_Node = procedure_specification LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => The_Relation, Given_Index => Node_Lex_String (Node => Child_Node (Current_Node => Child_Node (Current_Node => Proc_Spec_Node))), S => Current_Param_List); if not LexTokenManager.Seq_Algebra.Is_Null_Seq (S => Current_Param_List) then Proc_Spec_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Proc_Spec_Node)); -- ASSUME Proc_Spec_Node = formal_part OR NULL if Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.formal_part then Process_Procedure (Node => Proc_Spec_Node, Param_List => Current_Param_List, The_Heap => The_Heap); elsif Proc_Spec_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Proc_Spec_Node = formal_part OR NULL in Check_Modes"); end if; end if; elsif Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.function_specification and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.proof_function_declaration then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Proc_Spec_Node = procedure_specification OR " & "function_specification OR proof_function_declaration in Check_Modes"); end if; elsif Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.basic_declarative_item and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.private_type_declaration and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.deferred_constant_declaration and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.generic_subprogram_instantiation and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.apragma and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.renaming_declaration then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Proc_Spec_Node = basic_declarative_item OR private_type_declaration OR " & "deferred_constant_declaration OR subprogram_declaration OR generic_subprogram_instantiation OR " & "apragma OR renaming_declaration in Check_Modes"); end if; Vis_Part_Rep_Node := Child_Node (Current_Node => Vis_Part_Rep_Node); end loop; end if; end Check_Modes; spark-2012.0.deb/examiner/sem-in_package_initialization.adb0000644000175000017500000000235311753202336022676 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function In_Package_Initialization (Scope : Dictionary.Scopes) return Boolean is begin return Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Local and then Dictionary.IsPackage (Dictionary.GetEnclosingCompilationUnit (Scope)); end In_Package_Initialization; spark-2012.0.deb/examiner/relationalgebra.adb0000644000175000017500000010464511753202336020066 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- The matrices required to represent binary relations -- associated with flow analysis are, in general, sparse. The implementation of -- the matrix uses a space efficient but not necessarily computationally -- efficient representation. To improve computational efficiency the -- implementation provides a cacheing mechanism. -- Each element of the matrix is a pair of Natural numbers representing the -- the row and column values. The values also act as the row and column indices. -- Each row of the matrix may be accessed by a RowLeader value and similarly -- each column via ColLeader value. -- A matrix has the following general structure: -- -- Relation-------| |----| |----| |----| -- | \|/ | \|/ | \|/ | \|/ -- | ColLeader ColLeader ColLeader .... -- \|/ \|/ \|/ \|/ \|/ -- --RowLeader---->Pair------>Pair------>Pair------>... -- | \|/ \|/ \|/ \|/ -- ->RowLeader---->Pair------>Pair------>Pair------>... -- | \|/ \|/ \|/ \|/ -- ->RowLeader---->Pair------>Pair------>Pair------>... -- | \|/ \|/ \|/ \|/ -- ->RowLeader---->Pair------>Pair------>Pair------>... -- | \|/ \|/ \|/ \|/ -- ->RowLeader---->Pair------>Pair------>Pair------>... -- | \|/ \|/ \|/ \|/ -- -> ...--------->...-------->...------>...------->... -- -- From a Relation object the first row and column leaders are directly -- accessible and from each RowLeader and ColumnLeader the successive row or -- column leader respectively is directly accessible. -- From each row and column leader the first pair in the row or column is -- directly accessible. -- From a pair P it is possible to directly access the immediately adjacent -- pair of the row and column. The next pair in the row is notionally to the -- right of P and will have a higher column index than P and the next pair in -- the column is notionally down from P and will have a higher row index than P. -- A pair is only included in the representation if the there exists an element -- in the matrix. This allows the representation of a sparse matrix with -- minimal storage. As a consequence of this row and column indices are not -- successive values of the natural numbers so a given row or column may not -- exist. However it is guaranteed that the rows and columns of the matrix -- representation are strictly increasing (there are no duplicates). package body RelationAlgebra is ------ Functions and operations for implementation of RelationAlgebra ------ function IsNullPair (P : Pair) return Boolean is begin return P = NullPair; end IsNullPair; -- Returns the value of the row value of a matrix element (Pair). function RowValue (TheHeap : Heap.HeapRecord; P : Pair) return Natural is begin return Heap.AValue (TheHeap, Heap.Atom (P)); end RowValue; -- Returns the value of the column value of a matrix element (Pair). function ColumnValue (TheHeap : Heap.HeapRecord; P : Pair) return Natural is begin return Heap.BValue (TheHeap, Heap.Atom (P)); end ColumnValue; -- Gets the next matrix element (Pair) in the row adjacent to Pair P. function RightSuccr (TheHeap : Heap.HeapRecord; P : Pair) return Pair is begin return Pair (Heap.APointer (TheHeap, Heap.Atom (P))); end RightSuccr; -- Gets the next matrix element (Pair) in the column adjacent to Pair P. function DownSuccr (TheHeap : Heap.HeapRecord; P : Pair) return Pair is begin return Pair (Heap.BPointer (TheHeap, Heap.Atom (P))); end DownSuccr; -- Obtains the first row (Row_Leader) of the relation R. function FirstRowLeader (TheHeap : Heap.HeapRecord; R : Relation) return RowLeader is begin return RowLeader (Heap.BPointer (TheHeap, Heap.Atom (R))); end FirstRowLeader; -- Obtains the succeeding row (Row_Leader) from the given Row_Leader L. function NextRowLeader (TheHeap : Heap.HeapRecord; L : RowLeader) return RowLeader is begin return RowLeader (Heap.BPointer (TheHeap, Heap.Atom (L))); end NextRowLeader; -- Obtains the first column (Col_Leader) of the relation R. function FirstColLeader (TheHeap : Heap.HeapRecord; R : Relation) return ColLeader is begin return ColLeader (Heap.APointer (TheHeap, Heap.Atom (R))); end FirstColLeader; -- Obtains the succeeding column (Col_Leader) from the given Col_Leader L. function NextColLeader (TheHeap : Heap.HeapRecord; L : ColLeader) return ColLeader is begin return ColLeader (Heap.APointer (TheHeap, Heap.Atom (L))); end NextColLeader; -- Obtains the first matrix element (Pair) in the row specified by -- Row_Leader L. function FirstInRow (TheHeap : Heap.HeapRecord; L : RowLeader) return Pair is begin return Pair (Heap.APointer (TheHeap, Heap.Atom (L))); end FirstInRow; -- Obtains the first matrix element (Pair) in the column specified by -- Col_Leader L. function FirstInCol (TheHeap : Heap.HeapRecord; L : ColLeader) return Pair is begin return Pair (Heap.BPointer (TheHeap, Heap.Atom (L))); end FirstInCol; procedure CreateRelation (TheHeap : in out Heap.HeapRecord; R : out Relation) is A : Heap.Atom; begin Heap.CreateAtom (TheHeap, A); R := Relation (A); end CreateRelation; procedure DisposeOfRelation (TheHeap : in out Heap.HeapRecord; R : in Relation) is K, L : RowLeader; M, N : ColLeader; P, Q : Pair; begin K := FirstRowLeader (TheHeap, R); while K /= NullRowLdr loop P := FirstInRow (TheHeap, K); while P /= NullPair loop Q := RightSuccr (TheHeap, P); Heap.DisposeOfAtom (TheHeap, Heap.Atom (P)); P := Q; end loop; L := NextRowLeader (TheHeap, K); Heap.DisposeOfAtom (TheHeap, Heap.Atom (K)); K := L; end loop; M := FirstColLeader (TheHeap, R); while M /= NullColLdr loop N := NextColLeader (TheHeap, M); Heap.DisposeOfAtom (TheHeap, Heap.Atom (M)); M := N; end loop; Heap.DisposeOfAtom (TheHeap, Heap.Atom (R)); end DisposeOfRelation; procedure UpdateRight (TheHeap : in out Heap.HeapRecord; P, R : in Pair) is begin Heap.UpdateAPointer (TheHeap, Heap.Atom (P), Heap.Atom (R)); end UpdateRight; procedure UpdateDown (TheHeap : in out Heap.HeapRecord; P, D : in Pair) is begin Heap.UpdateBPointer (TheHeap, Heap.Atom (P), Heap.Atom (D)); end UpdateDown; function Relation_To_Atom (R : Relation) return Heap.Atom is begin return Heap.Atom (R); end Relation_To_Atom; function Pair_To_Atom (P : Pair) return Heap.Atom is begin return Heap.Atom (P); end Pair_To_Atom; function Atom_To_Pair (A : Heap.Atom) return Pair is begin return Pair (A); end Atom_To_Pair; function RowLeader_To_Atom (R : RowLeader) return Heap.Atom is begin return Heap.Atom (R); end RowLeader_To_Atom; function Atom_To_RowLeader (A : Heap.Atom) return RowLeader is begin return RowLeader (A); end Atom_To_RowLeader; function ColLeader_To_Atom (C : ColLeader) return Heap.Atom is begin return Heap.Atom (C); end ColLeader_To_Atom; function Atom_To_ColLeader (A : Heap.Atom) return ColLeader is begin return ColLeader (A); end Atom_To_ColLeader; -- Initalizes the Cache from relation R and must be called prior to its use. -- Once initialized a cache is associated with R and should not be used to -- access any other relation. procedure InitialiseCache (TheHeap : in Heap.HeapRecord; R : in Relation; Cache : out Caches) is RL : RowLeader; CL : ColLeader; begin RL := FirstRowLeader (TheHeap, R); CL := FirstColLeader (TheHeap, R); Cache := Caches'(Rtion => R, RowLdr => RL, ColLdr => CL, RowPair => FirstInRow (TheHeap, RL), ColPair => FirstInCol (TheHeap, CL)); end InitialiseCache; -- Returns the row index value of the Row_Leader L. function RowLdrIndex (TheHeap : Heap.HeapRecord; L : RowLeader) return Natural is begin return Heap.AValue (TheHeap, Heap.Atom (L)); end RowLdrIndex; -- Returns the column index value of the Col_Leader L. function ColLdrIndex (TheHeap : Heap.HeapRecord; L : ColLeader) return Natural is begin return Heap.BValue (TheHeap, Heap.Atom (L)); end ColLdrIndex; procedure Insert_Row_Leader (The_Heap : in out Heap.HeapRecord; R : in Relation; I : in Natural; Cache : in out Caches) is Row_Ldr, Last_Ldr : RowLeader; Ldr_Present : Boolean; Ldr_Index : Natural; procedure Create_Row_Leader (The_Heap : in out Heap.HeapRecord; P : in RowLeader; I : in Natural; L : out RowLeader) --# global in out Statistics.TableUsage; --# derives L from The_Heap & --# Statistics.TableUsage from *, --# The_Heap & --# The_Heap from *, --# I, --# P; is New_Atom : Heap.Atom; begin Heap.CreateAtom (TheHeap => The_Heap, NewAtom => New_Atom); Heap.UpdateAValue (TheHeap => The_Heap, A => New_Atom, Value => I); Heap.UpdateBPointer (TheHeap => The_Heap, A => New_Atom, Pointer => Heap.Atom (NextRowLeader (TheHeap => The_Heap, L => P))); Heap.UpdateBPointer (TheHeap => The_Heap, A => Heap.Atom (P), Pointer => New_Atom); L := RowLeader (New_Atom); end Create_Row_Leader; begin Row_Ldr := Cache.RowLdr; Last_Ldr := RowLeader (R); Ldr_Present := False; loop exit when Row_Ldr = NullRowLdr; Ldr_Index := RowLdrIndex (TheHeap => The_Heap, L => Row_Ldr); Ldr_Present := Ldr_Index = I; exit when Ldr_Index >= I; Last_Ldr := Row_Ldr; Row_Ldr := NextRowLeader (TheHeap => The_Heap, L => Row_Ldr); end loop; if not Ldr_Present then Create_Row_Leader (The_Heap => The_Heap, P => Last_Ldr, I => I, L => Row_Ldr); end if; if Row_Ldr /= Cache.RowLdr then Cache.RowLdr := Row_Ldr; Cache.RowPair := FirstInRow (TheHeap => The_Heap, L => Row_Ldr); end if; end Insert_Row_Leader; --------------------------exported procedure----------------------------- -- Inserts an element (Pair) specified by I and J into the matrix -- representing relation R. If row I or column J do not exist in the matrix -- they are created. The new Pair (I, J) is inserted into the matrix and -- the Cache is updated such that the current row is I and the current -- column is J and the current row and column elements refer to the new -- Pair (I, J). -- If the element (I, J) already exists in the matrix the operation has no -- effect on the matrix but the Cache is updated with the current row set -- to I, the current row and column elements set to the Pair (I, J) but -- the current column value is not changed --- Is this correct?? -- R must be non null. procedure CachedInsertPair (TheHeap : in out Heap.HeapRecord; R : in Relation; I, J : in Natural; Cache : in out Caches) --# global in out Statistics.TableUsage; --# derives Cache, --# Statistics.TableUsage, --# TheHeap from *, --# Cache, --# I, --# J, --# R, --# TheHeap; is CurrentPair, LastPair, NewPair : Pair; RowVal, ColVal : Natural; PairPresent : Boolean; procedure InsertColLeader (TheHeap : in out Heap.HeapRecord; R : in Relation; J : in Natural; Cache : in out Caches) --# global in out Statistics.TableUsage; --# derives Cache, --# TheHeap from Cache, --# J, --# R, --# TheHeap & --# Statistics.TableUsage from *, --# Cache, --# J, --# TheHeap; is ColLdr, LastLdr : ColLeader; LdrPresent : Boolean; LdrIndex : Natural; procedure CreateColLeader (TheHeap : in out Heap.HeapRecord; P : in ColLeader; J : in Natural; L : out ColLeader) --# global in out Statistics.TableUsage; --# derives L from TheHeap & --# Statistics.TableUsage from *, --# TheHeap & --# TheHeap from *, --# J, --# P; is NewAtom : Heap.Atom; begin Heap.CreateAtom (TheHeap, NewAtom); Heap.UpdateBValue (TheHeap, NewAtom, J); Heap.UpdateAPointer (TheHeap, NewAtom, Heap.Atom (NextColLeader (TheHeap, P))); Heap.UpdateAPointer (TheHeap, Heap.Atom (P), NewAtom); L := ColLeader (NewAtom); end CreateColLeader; begin ColLdr := Cache.ColLdr; LastLdr := ColLeader (R); LdrPresent := False; loop exit when ColLdr = NullColLdr; LdrIndex := ColLdrIndex (TheHeap, ColLdr); LdrPresent := LdrIndex = J; exit when LdrIndex >= J; LastLdr := ColLdr; ColLdr := NextColLeader (TheHeap, ColLdr); end loop; if not LdrPresent then CreateColLeader (TheHeap, LastLdr, J, ColLdr); end if; if ColLdr /= Cache.ColLdr then Cache.ColLdr := ColLdr; Cache.ColPair := FirstInCol (TheHeap, ColLdr); end if; end InsertColLeader; procedure CreatePair (TheHeap : in out Heap.HeapRecord; NewPair : out Pair; Row, Col : in Natural) --# global in out Statistics.TableUsage; --# derives NewPair from TheHeap & --# Statistics.TableUsage from *, --# TheHeap & --# TheHeap from *, --# Col, --# Row; is A : Heap.Atom; begin Heap.CreateAtom (TheHeap, A); Heap.UpdateAValue (TheHeap, A, Row); Heap.UpdateBValue (TheHeap, A, Col); NewPair := Pair (A); end CreatePair; begin Insert_Row_Leader (The_Heap => TheHeap, R => R, I => I, Cache => Cache); LastPair := Pair (Cache.RowLdr); CurrentPair := Cache.RowPair; PairPresent := False; loop exit when IsNullPair (CurrentPair); ColVal := ColumnValue (TheHeap, CurrentPair); PairPresent := ColVal = J; exit when ColVal >= J; LastPair := CurrentPair; CurrentPair := RightSuccr (TheHeap, CurrentPair); end loop; if PairPresent then Cache.RowPair := CurrentPair; Cache.ColPair := CurrentPair; else CreatePair (TheHeap, NewPair, I, J); UpdateRight (TheHeap, NewPair, CurrentPair); UpdateRight (TheHeap, LastPair, NewPair); InsertColLeader (TheHeap, R, J, Cache); LastPair := Pair (Cache.ColLdr); CurrentPair := Cache.ColPair; loop exit when IsNullPair (CurrentPair); RowVal := RowValue (TheHeap, CurrentPair); exit when RowVal > I; LastPair := CurrentPair; CurrentPair := DownSuccr (TheHeap, CurrentPair); end loop; UpdateDown (TheHeap, NewPair, CurrentPair); UpdateDown (TheHeap, LastPair, NewPair); Cache.RowPair := NewPair; Cache.ColPair := NewPair; end if; end CachedInsertPair; procedure InsertPair (TheHeap : in out Heap.HeapRecord; R : in Relation; I, J : in Natural) is Cache : Caches; begin InitialiseCache (TheHeap, R, Cache); -- we do not need the changed value of Cache in this case --# accept F, 10, Cache, "Cache unused here"; CachedInsertPair (TheHeap, R, I, J, Cache); --# end accept; end InsertPair; procedure AddRow (TheHeap : in out Heap.HeapRecord; R : in Relation; I : in Natural; S : in SeqAlgebra.Seq) is M : SeqAlgebra.MemberOfSeq; Cache : Caches; begin InitialiseCache (TheHeap, R, Cache); M := SeqAlgebra.FirstMember (TheHeap, S); loop exit when SeqAlgebra.IsNullMember (M); CachedInsertPair (TheHeap, R, I, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M), Cache); M := SeqAlgebra.NextMember (TheHeap, M); end loop; end AddRow; procedure AddCol (TheHeap : in out Heap.HeapRecord; R : in Relation; J : in Natural; S : in SeqAlgebra.Seq) is M : SeqAlgebra.MemberOfSeq; Cache : Caches; begin InitialiseCache (TheHeap, R, Cache); M := SeqAlgebra.FirstMember (TheHeap, S); loop exit when SeqAlgebra.IsNullMember (M); CachedInsertPair (TheHeap, R, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M), J, Cache); M := SeqAlgebra.NextMember (TheHeap, M); end loop; end AddCol; --------- Fundamental functions and operations of RelationAlgebra --------- function IsEmptyRow (TheHeap : Heap.HeapRecord; R : Relation; I : Natural) return Boolean is RowLdr : RowLeader; begin RowLdr := FirstRowLeader (TheHeap, R); while (RowLdr /= NullRowLdr) and then (RowLdrIndex (TheHeap, RowLdr) < I) loop RowLdr := NextRowLeader (TheHeap, RowLdr); end loop; return (RowLdr = NullRowLdr) or else (RowLdrIndex (TheHeap, RowLdr) /= I) or else (FirstInRow (TheHeap, RowLdr) = NullPair); end IsEmptyRow; function ColumnCount (TheHeap : Heap.HeapRecord; R : Relation; J : Natural) return Natural is Counter : Natural; ColLdr : ColLeader; Q : Pair; begin Counter := 0; ColLdr := FirstColLeader (TheHeap, R); while ColLdr /= NullColLdr loop exit when ColLdrIndex (TheHeap, ColLdr) >= J; ColLdr := NextColLeader (TheHeap, ColLdr); end loop; if ColLdrIndex (TheHeap, ColLdr) = J then Q := FirstInCol (TheHeap, ColLdr); while Q /= NullPair loop Counter := Counter + 1; Q := DownSuccr (TheHeap, Q); end loop; end if; return Counter; end ColumnCount; procedure ResetColumnCache (TheHeap : in Heap.HeapRecord; Cache : in out Caches) is begin Cache.ColLdr := FirstColLeader (TheHeap, Cache.Rtion); Cache.ColPair := FirstInCol (TheHeap, Cache.ColLdr); end ResetColumnCache; procedure RowRemoval (TheHeap : in out Heap.HeapRecord; R : in Relation; S : in SeqAlgebra.Seq; T : out Relation) is LocalT : Relation; P : Pair; RowIndex : Natural; RowLdr : RowLeader; Cache : Caches; begin CreateRelation (TheHeap, LocalT); InitialiseCache (TheHeap, LocalT, Cache); RowLdr := FirstRowLeader (TheHeap, R); while RowLdr /= NullRowLdr loop RowIndex := RowLdrIndex (TheHeap, RowLdr); if not SeqAlgebra.IsMember (TheHeap, S, RowIndex) then P := FirstInRow (TheHeap, RowLdr); while P /= NullPair loop CachedInsertPair (TheHeap, LocalT, RowIndex, ColumnValue (TheHeap, P), Cache); P := RightSuccr (TheHeap, P); end loop; end if; RowLdr := NextRowLeader (TheHeap, RowLdr); ResetColumnCache (TheHeap, Cache); end loop; T := LocalT; end RowRemoval; procedure RowExtraction (TheHeap : in out Heap.HeapRecord; R : in Relation; GivenIndex : in Natural; S : out SeqAlgebra.Seq) is RowIndex : Natural; RowLdr : RowLeader; RowFound : Boolean; LocalS : SeqAlgebra.Seq; LastS : SeqAlgebra.MemberOfSeq; P : Pair; begin SeqAlgebra.CreateSeq (TheHeap, LocalS); -- The optimisation using sequence operations -- BeforeFirstMember and AppendAfter is only permissible -- because Indices in a relation are ordered identically to the -- set ordering in s SeqAlgebra. This assumption is implementation -- dependent and should be eliminated when a more efficient representation -- of sets and relations is implemented. LastS := SeqAlgebra.BeforeFirstMember (LocalS); RowFound := False; RowLdr := FirstRowLeader (TheHeap, R); loop exit when RowLdr = NullRowLdr; RowIndex := RowLdrIndex (TheHeap, RowLdr); RowFound := (RowIndex = GivenIndex); exit when RowIndex >= GivenIndex; RowLdr := NextRowLeader (TheHeap, RowLdr); end loop; if RowFound then P := FirstInRow (TheHeap, RowLdr); loop exit when P = NullPair; -- The optimisation using sequence operations -- BeforeFirstMember and AppendAfter is only permissible -- because Indices in a relation are ordered identically to the -- set ordering in s SeqAlgebra. This assumption is implementation -- dependent and should be eliminated when a more efficient representation -- of sets and relations is implemented. SeqAlgebra.AppendAfter (TheHeap, LastS, ColumnValue (TheHeap, P)); P := RightSuccr (TheHeap, P); end loop; end if; S := LocalS; end RowExtraction; procedure ColExtraction (TheHeap : in out Heap.HeapRecord; R : in Relation; GivenIndex : in Natural; S : out SeqAlgebra.Seq) is ColIndex : Natural; ColLdr : ColLeader; ColFound : Boolean; LocalS : SeqAlgebra.Seq; LastS : SeqAlgebra.MemberOfSeq; P : Pair; begin SeqAlgebra.CreateSeq (TheHeap, LocalS); -- The optimisation using sequence operations -- BeforeFirstMember and AppendAfter is only permissible -- because Indices in a relation are ordered identically to the -- set ordering in s SeqAlgebra. This assumption is implementation -- dependent and should be eliminated when a more efficient representation -- of sets and relations is implemented. LastS := SeqAlgebra.BeforeFirstMember (LocalS); ColFound := False; ColLdr := FirstColLeader (TheHeap, R); loop exit when ColLdr = NullColLdr; ColIndex := ColLdrIndex (TheHeap, ColLdr); ColFound := (ColIndex = GivenIndex); exit when ColIndex >= GivenIndex; ColLdr := NextColLeader (TheHeap, ColLdr); end loop; if ColFound then P := FirstInCol (TheHeap, ColLdr); loop exit when P = NullPair; -- The optimisation using sequence operations -- BeforeFirstMember and AppendAfter is only permissible -- because Indices in a relation are ordered identically to the -- set ordering in s SeqAlgebra. This assumption is implementation -- dependent and should be eliminated when a more efficient representation -- of sets and relations is implemented. SeqAlgebra.AppendAfter (TheHeap, LastS, RowValue (TheHeap, P)); P := DownSuccr (TheHeap, P); end loop; end if; S := LocalS; end ColExtraction; procedure ExtractSubRelation (TheHeap : in out Heap.HeapRecord; R : in out Relation; S : in SeqAlgebra.Seq) is LocalR : Relation; P : Pair; ColIndex, RowIndex : Natural; RowLdr : RowLeader; Cache : Caches; begin CreateRelation (TheHeap, LocalR); InitialiseCache (TheHeap, LocalR, Cache); RowLdr := FirstRowLeader (TheHeap, R); loop exit when RowLdr = NullRowLdr; RowIndex := RowLdrIndex (TheHeap, RowLdr); if SeqAlgebra.IsMember (TheHeap, S, RowIndex) then P := FirstInRow (TheHeap, RowLdr); loop exit when P = NullPair; ColIndex := ColumnValue (TheHeap, P); if SeqAlgebra.IsMember (TheHeap, S, ColIndex) then CachedInsertPair (TheHeap, LocalR, RowIndex, ColIndex, Cache); end if; P := RightSuccr (TheHeap, P); end loop; end if; RowLdr := NextRowLeader (TheHeap, RowLdr); ResetColumnCache (TheHeap, Cache); end loop; DisposeOfRelation (TheHeap, R); R := LocalR; end ExtractSubRelation; procedure AddIdentity (TheHeap : in out Heap.HeapRecord; R : in Relation; S : in SeqAlgebra.Seq) is M : SeqAlgebra.MemberOfSeq; Cache : Caches; begin InitialiseCache (TheHeap, R, Cache); M := SeqAlgebra.FirstMember (TheHeap, S); loop exit when SeqAlgebra.IsNullMember (M); CachedInsertPair (TheHeap, R, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M), SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M), Cache); M := SeqAlgebra.NextMember (TheHeap, M); end loop; end AddIdentity; procedure AugmentRelation (TheHeap : in out Heap.HeapRecord; A, B : in Relation) -- This procedure augments relation A by adding to it relation B. is P : Pair; RowIndex : Natural; RowLdr : RowLeader; Cache : Caches; begin InitialiseCache (TheHeap, A, Cache); RowLdr := FirstRowLeader (TheHeap, B); loop exit when RowLdr = NullRowLdr; RowIndex := RowLdrIndex (TheHeap, RowLdr); P := FirstInRow (TheHeap, RowLdr); loop exit when P = NullPair; CachedInsertPair (TheHeap, A, RowIndex, ColumnValue (TheHeap, P), Cache); P := RightSuccr (TheHeap, P); end loop; RowLdr := NextRowLeader (TheHeap, RowLdr); ResetColumnCache (TheHeap, Cache); end loop; end AugmentRelation; procedure Sum (TheHeap : in out Heap.HeapRecord; A, B : in Relation; C : out Relation) is LocalC : Relation; begin CreateRelation (TheHeap, LocalC); AugmentRelation (TheHeap, LocalC, A); AugmentRelation (TheHeap, LocalC, B); C := LocalC; end Sum; procedure Composition (TheHeap : in out Heap.HeapRecord; A, B : in Relation; C : out Relation) is LocalC : Relation; RowLdr : RowLeader; ColLdr : ColLeader; RowIndex : Natural; MatchFound : Boolean; P, Q : Pair; PColValue, QRowValue : Natural; Cache : Caches; begin CreateRelation (TheHeap, LocalC); InitialiseCache (TheHeap, LocalC, Cache); RowLdr := FirstRowLeader (TheHeap, A); while RowLdr /= NullRowLdr loop RowIndex := RowLdrIndex (TheHeap, RowLdr); ColLdr := FirstColLeader (TheHeap, B); loop exit when ColLdr = NullColLdr; P := FirstInRow (TheHeap, RowLdr); Q := FirstInCol (TheHeap, ColLdr); MatchFound := False; loop exit when (P = NullPair) or (Q = NullPair) or MatchFound; PColValue := ColumnValue (TheHeap, P); QRowValue := RowValue (TheHeap, Q); if PColValue < QRowValue then P := RightSuccr (TheHeap, P); elsif PColValue > QRowValue then Q := DownSuccr (TheHeap, Q); else MatchFound := True; end if; end loop; if MatchFound then CachedInsertPair (TheHeap, LocalC, RowIndex, ColLdrIndex (TheHeap, ColLdr), Cache); end if; ColLdr := NextColLeader (TheHeap, ColLdr); end loop; RowLdr := NextRowLeader (TheHeap, RowLdr); ResetColumnCache (TheHeap, Cache); end loop; C := LocalC; end Composition; procedure CloseRelation (TheHeap : in out Heap.HeapRecord; R : in Relation) is RowLdr : RowLeader; ColLdr : ColLeader; RowIndex, ColIndex : Natural; P, Q : Pair; PColValue, QRowValue : Natural; begin RowLdr := FirstRowLeader (TheHeap, R); ColLdr := FirstColLeader (TheHeap, R); loop exit when (RowLdr = NullRowLdr) or (ColLdr = NullColLdr); RowIndex := RowLdrIndex (TheHeap, RowLdr); ColIndex := ColLdrIndex (TheHeap, ColLdr); if RowIndex < ColIndex then RowLdr := NextRowLeader (TheHeap, RowLdr); elsif RowIndex > ColIndex then ColLdr := NextColLeader (TheHeap, ColLdr); else P := FirstInRow (TheHeap, RowLdr); loop exit when P = NullPair; PColValue := ColumnValue (TheHeap, P); if PColValue /= RowIndex then Q := FirstInCol (TheHeap, ColLdr); loop exit when Q = NullPair; QRowValue := RowValue (TheHeap, Q); if QRowValue /= ColIndex then InsertPair (TheHeap, R, QRowValue, PColValue); end if; Q := DownSuccr (TheHeap, Q); end loop; end if; P := RightSuccr (TheHeap, P); end loop; RowLdr := NextRowLeader (TheHeap, RowLdr); ColLdr := NextColLeader (TheHeap, ColLdr); end if; end loop; end CloseRelation; procedure CartesianProduct (TheHeap : in out Heap.HeapRecord; A, B : in SeqAlgebra.Seq; C : out Relation) is LocalC : Relation; M, N : SeqAlgebra.MemberOfSeq; ValueOfM : Natural; Cache : Caches; begin CreateRelation (TheHeap, LocalC); InitialiseCache (TheHeap, LocalC, Cache); M := SeqAlgebra.FirstMember (TheHeap, A); loop exit when SeqAlgebra.IsNullMember (M); ValueOfM := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M); N := SeqAlgebra.FirstMember (TheHeap, B); loop exit when SeqAlgebra.IsNullMember (N); CachedInsertPair (TheHeap, LocalC, ValueOfM, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => N), Cache); N := SeqAlgebra.NextMember (TheHeap, N); end loop; M := SeqAlgebra.NextMember (TheHeap, M); ResetColumnCache (TheHeap, Cache); end loop; C := LocalC; end CartesianProduct; end RelationAlgebra; spark-2012.0.deb/examiner/sparklex-lex-nextlex.adb0000644000175000017500000001650511753202336021034 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SparkLex.Lex) procedure NextLex (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) -- The Ada Lexis allows the class of a token to be determined by the -- first character in the text string representing it. Given the first -- character of the string representing the token is at the current position -- in the line buffer, CurrLine, NextLex determines the class of the token and -- directly recognises the token or calls a procedure to recognise that class -- of token. -- On exit Token is set to a value representing the token and the line buffer -- position is updated. is begin LineManager.Record_Curr_Pos (Curr_Line => Curr_Line); case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) is when 'a' .. 'z' | 'A' .. 'Z' | '$' => -- letter or '$' -- In Ada and SPARK, identifiers may not start with a '$' -- character, but these are used by GNATPREP to introduce -- symbols that might be replaced, and we do want -- SPARKFormat to be able to process these successfully in -- annotations. We therefore allow '$' to introduce an -- identififier here. Rejecting the illegal case (in the -- Examiner) is performed in SPARKLex.Lex GetIdent (Curr_Line => Curr_Line, Token => Token); when '0' .. '9' => -- digit GetNumber (Curr_Line => Curr_Line, Token => Token); when '-' => -- minus, comment or annotation_start HyphIntro (Curr_Line => Curr_Line, Token => Token); when '"' => -- string GetString (Curr_Line => Curr_Line, Token => Token); when ''' => -- apostrope or character literal ApostIntro (Curr_Line => Curr_Line, Token => Token); when '<' => -- < << <-> LTIntro (Curr_Line => Curr_Line, Token => Token); when '=' => -- check for compound delimiters LineManager.Accept_Char (Curr_Line => Curr_Line); case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) is when '>' => Token := SP_Symbols.arrow; LineManager.Accept_Char (Curr_Line => Curr_Line); when others => Token := SP_Symbols.equals; end case; when '.' => -- point or double_dot LineManager.Accept_Char (Curr_Line => Curr_Line); case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) is when '.' => Token := SP_Symbols.double_dot; LineManager.Accept_Char (Curr_Line => Curr_Line); when others => Token := SP_Symbols.point; end case; when '*' => LineManager.Accept_Char (Curr_Line => Curr_Line); case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) is when '*' => Token := SP_Symbols.double_star; LineManager.Accept_Char (Curr_Line => Curr_Line); when others => Token := SP_Symbols.multiply; end case; when ':' => LineManager.Accept_Char (Curr_Line => Curr_Line); case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) is when '=' => Token := SP_Symbols.becomes; LineManager.Accept_Char (Curr_Line => Curr_Line); when others => Token := SP_Symbols.colon; end case; when '/' => LineManager.Accept_Char (Curr_Line => Curr_Line); case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) is when '=' => Token := SP_Symbols.not_equal; LineManager.Accept_Char (Curr_Line => Curr_Line); when others => Token := SP_Symbols.divide; end case; when '>' => LineManager.Accept_Char (Curr_Line => Curr_Line); case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) is when '=' => Token := SP_Symbols.greater_or_equal; LineManager.Accept_Char (Curr_Line => Curr_Line); when '>' => Token := SP_Symbols.right_label_paren; LineManager.Accept_Char (Curr_Line => Curr_Line); when others => Token := SP_Symbols.greater_than; end case; when '&' => -- simple delimiters Token := SP_Symbols.ampersand; LineManager.Accept_Char (Curr_Line => Curr_Line); when '(' => Token := SP_Symbols.left_paren; LineManager.Accept_Char (Curr_Line => Curr_Line); when ')' => Token := SP_Symbols.right_paren; LineManager.Accept_Char (Curr_Line => Curr_Line); when '+' => Token := SP_Symbols.plus; LineManager.Accept_Char (Curr_Line => Curr_Line); when ',' => Token := SP_Symbols.comma; LineManager.Accept_Char (Curr_Line => Curr_Line); when ';' => Token := SP_Symbols.semicolon; LineManager.Accept_Char (Curr_Line => Curr_Line); when '|' => Token := SP_Symbols.vertical_bar; LineManager.Accept_Char (Curr_Line => Curr_Line); when '[' => Token := SP_Symbols.square_open; LineManager.Accept_Char (Curr_Line => Curr_Line); when ']' => Token := SP_Symbols.square_close; LineManager.Accept_Char (Curr_Line => Curr_Line); when '~' => Token := SP_Symbols.tilde; LineManager.Accept_Char (Curr_Line => Curr_Line); when '%' => Token := SP_Symbols.percent; LineManager.Accept_Char (Curr_Line => Curr_Line); when End_Of_Text => Token := SP_Symbols.SPEND; when others => Token := SP_Symbols.illegal_token; LineManager.Accept_Char (Curr_Line => Curr_Line); end case; end NextLex; spark-2012.0.deb/examiner/dictionary-instantiate_subprogram_parameters.adb0000644000175000017500000003117211753202336026077 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Instantiate_Subprogram_Parameters (Actual_Subprogram : in RawDict.Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location) is function Substitute_Type (Possibly_Generic_Type : RawDict.Type_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is Result : RawDict.Type_Info_Ref; begin if RawDict.Get_Type_Discriminant (Type_Mark => Possibly_Generic_Type) = Generic_Type_Item then Result := Actual_Of_Generic_Formal_Type (The_Generic_Formal_Type => Possibly_Generic_Type, Actual_Subprogram => The_Subprogram, Not_Found_Expected => True); else Result := Possibly_Generic_Type; end if; return Result; end Substitute_Type; -------------------------------------------------------------------------------- procedure Substitute_Parameters (Actual_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Actual_Subprogram & --# SPARK_IO.File_Sys from *, --# Actual_Subprogram, --# Dict, --# LexTokenManager.State; is Generic_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Type_Reference, Specification : LexTokenManager.Token_Position; begin Generic_Parameter := RawDict.Get_Subprogram_First_Parameter (The_Subprogram => RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => Actual_Subprogram)); while Generic_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref loop Type_Reference := RawDict.Get_Symbol_Location (RawDict.Get_Type_Symbol (RawDict.Get_Subprogram_Parameter_Type (The_Subprogram_Parameter => Generic_Parameter))); Specification := RawDict.Get_Symbol_Location (RawDict.Get_Subprogram_Parameter_Symbol (Generic_Parameter)); -- create new parameter for instantiation Add_Subprogram_Parameter (Name => RawDict.Get_Subprogram_Parameter_Name (The_Subprogram_Parameter => Generic_Parameter), The_Subprogram => Actual_Subprogram, Type_Mark => Substitute_Type (Possibly_Generic_Type => RawDict.Get_Subprogram_Parameter_Type (The_Subprogram_Parameter => Generic_Parameter), The_Subprogram => Actual_Subprogram), Type_Reference => Location'(Start_Position => Type_Reference, End_Position => Type_Reference), Mode => RawDict.Get_Subprogram_Parameter_Mode (The_Subprogram_Parameter => Generic_Parameter), Comp_Unit => RawDict.Get_Symbol_Compilation_Unit (RawDict.Get_Subprogram_Parameter_Symbol (Generic_Parameter)), Specification => Location'(Start_Position => Specification, End_Position => Specification)); -- move on to next parameter Generic_Parameter := RawDict.Get_Next_Subprogram_Parameter (The_Subprogram_Parameter => Generic_Parameter); end loop; end Substitute_Parameters; -------------------------------------------------------------------------------- procedure Substitute_Globals (Actual_Subprogram : in RawDict.Subprogram_Info_Ref; Abstraction : in Abstractions) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Abstraction, --# Actual_Subprogram & --# SPARK_IO.File_Sys from *, --# Abstraction, --# Actual_Subprogram, --# Dict, --# LexTokenManager.State; is Generic_Global_Variable : RawDict.Global_Variable_Info_Ref; Actual_Global_Variable : RawDict.Global_Variable_Info_Ref; Specification : LexTokenManager.Token_Position; begin Generic_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => Actual_Subprogram), Abstraction => Abstraction); while Generic_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref loop Specification := RawDict.Get_Symbol_Location (RawDict.Get_Global_Variable_Symbol (Generic_Global_Variable)); --# accept Flow, 10, Actual_Global_Variable, "Expected ineffective assignment to OK"; Add_Subprogram_Global_Variable (Abstraction => Abstraction, The_Subprogram => Actual_Subprogram, The_Variable => RawDict.Get_Global_Variable_Variable (The_Global_Variable => Generic_Global_Variable), Mode => RawDict.Get_Global_Variable_Mode (The_Global_Variable => Generic_Global_Variable), Prefix_Needed => RawDict.Get_Global_Variable_Prefix_Needed (The_Global_Variable => Generic_Global_Variable), Comp_Unit => RawDict.Get_Symbol_Compilation_Unit (RawDict.Get_Global_Variable_Symbol (Generic_Global_Variable)), Variable_Reference => Location'(Start_Position => Specification, End_Position => Specification), The_Global_Variable => Actual_Global_Variable); --# end accept; Generic_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => Generic_Global_Variable); end loop; --# accept Flow, 33, Actual_Global_Variable, "Expected to be neither referenced nor exported"; end Substitute_Globals; -------------------------------------------------------------------------------- procedure Substitute_Derives (Actual_Subprogram : in RawDict.Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors) --# global in LexTokenManager.State; --# in out Dict; --# in out SPARK_IO.File_Sys; --# derives Dict from *, --# Actual_Subprogram, --# Comp_Unit & --# SPARK_IO.File_Sys from *, --# Actual_Subprogram, --# Comp_Unit, --# Dict, --# LexTokenManager.State; is Export_It : Iterator; Dependency_It : Iterator; The_Generic_Export_Sym : Symbol; The_Actual_Export_Sym : Symbol; The_Import_Sym : Symbol; Generic_Subprogram : RawDict.Subprogram_Info_Ref; begin Generic_Subprogram := RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => Actual_Subprogram); Export_It := First_Subprogram_Export (The_Subprogram => Generic_Subprogram, Abstraction => IsAbstract); while not IsNullIterator (Export_It) loop The_Generic_Export_Sym := CurrentSymbol (Export_It); case RawDict.GetSymbolDiscriminant (The_Generic_Export_Sym) is when Variable_Symbol => Add_Subprogram_Export_Variable (The_Subprogram => Actual_Subprogram, Abstraction => IsAbstract, The_Export => RawDict.Get_Variable_Info_Ref (Item => The_Generic_Export_Sym), Export_Reference => Null_Location, Annotation => Null_Location); The_Actual_Export_Sym := The_Generic_Export_Sym; when Subprogram_Parameter_Symbol => Add_Subprogram_Export_Parameter (The_Subprogram => Actual_Subprogram, Abstraction => IsAbstract, The_Export => Actual_Of_Generic_Parameter (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Generic_Export_Sym), Actual_Subprogram => Actual_Subprogram), Export_Reference => Null_Location, Annotation => Null_Location); The_Actual_Export_Sym := RawDict.Get_Subprogram_Parameter_Symbol (Actual_Of_Generic_Parameter (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => The_Generic_Export_Sym), Actual_Subprogram => Actual_Subprogram)); when others => The_Actual_Export_Sym := NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Instantiate_Subprogram_Parameters.Substitute_Derives"); end case; Dependency_It := First_Subprogram_Dependency (The_Subprogram => Generic_Subprogram, Abstraction => IsAbstract, The_Export => The_Generic_Export_Sym); while not IsNullIterator (Dependency_It) loop The_Import_Sym := CurrentSymbol (Dependency_It); case RawDict.GetSymbolDiscriminant (The_Import_Sym) is when Variable_Symbol => null; when Subprogram_Parameter_Symbol => The_Import_Sym := RawDict.Get_Subprogram_Parameter_Symbol (Actual_Of_Generic_Parameter (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (The_Import_Sym), Actual_Subprogram => Actual_Subprogram)); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Instantiate_Subprogram_Parameters.Substitute_Derives"); end case; Add_Subprogram_Dependency (Abstraction => IsAbstract, Comp_Unit => Comp_Unit, The_Subprogram => Actual_Subprogram, The_Export => The_Actual_Export_Sym, The_Import => The_Import_Sym, Import_Reference => Null_Location); Dependency_It := NextSymbol (Dependency_It); end loop; Export_It := NextSymbol (Export_It); end loop; end Substitute_Derives; begin -- Instantiate_Subprogram_Parameters if RawDict.Get_Subprogram_Return_Type (The_Subprogram => RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => Actual_Subprogram)) /= RawDict.Null_Type_Info_Ref then Add_Return_Type (The_Function => Actual_Subprogram, Type_Mark => Substitute_Type (Possibly_Generic_Type => RawDict.Get_Subprogram_Return_Type (The_Subprogram => RawDict.Get_Subprogram_Instantiation_Of (The_Subprogram => Actual_Subprogram)), The_Subprogram => Actual_Subprogram), Comp_Unit => Comp_Unit, Type_Reference => Declaration); end if; Substitute_Parameters (Actual_Subprogram => Actual_Subprogram); Substitute_Globals (Actual_Subprogram => Actual_Subprogram, Abstraction => IsAbstract); Substitute_Derives (Actual_Subprogram => Actual_Subprogram, Comp_Unit => Comp_Unit); end Instantiate_Subprogram_Parameters; ././@LongLink0000000000000000000000000000016400000000000011566 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_record.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000004705711753202336033135 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Record (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; Extends : in Dictionary.Symbol; Private_Type_Being_Resolved : in Dictionary.Symbol) is Next_Node : STree.SyntaxNode; It : STree.Iterator; Record_Sym : Dictionary.Symbol; Is_Tagged, Is_Abstract : Boolean; Has_Fields : Boolean := False; ----------------------------------------------------------------- procedure Set_Tag_Status (Tag_Option_Node : in STree.SyntaxNode; Is_Tagged, Is_Abstract : out Boolean) --# global in STree.Table; --# derives Is_Abstract, --# Is_Tagged from STree.Table, --# Tag_Option_Node; --# pre Syntax_Node_Type (Tag_Option_Node, STree.Table) = SP_Symbols.non_abstract_tagged or --# Syntax_Node_Type (Tag_Option_Node, STree.Table) = SP_Symbols.abstract_tagged or --# Syntax_Node_Type (Tag_Option_Node, STree.Table) = SP_Symbols.non_tagged; is begin Is_Abstract := Syntax_Node_Type (Node => Tag_Option_Node) = SP_Symbols.abstract_tagged; Is_Tagged := Is_Abstract or else Syntax_Node_Type (Node => Tag_Option_Node) = SP_Symbols.non_abstract_tagged; end Set_Tag_Status; ----------------------------------------------------------------- procedure Wf_Component_Declaration (Node : in STree.SyntaxNode; Rec_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# Rec_Sym, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Rec_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.component_declaration; --# post STree.Table = STree.Table~; is Type_Node, Ident_List_Node, Next_Node : STree.SyntaxNode; It : STree.Iterator; Type_Sym : Dictionary.Symbol; Component_Ident : LexTokenManager.Lex_String; Type_Pos : LexTokenManager.Token_Position; Type_Mark_Is_Not_Dotted : Boolean; Type_Mark_Simple_Name : LexTokenManager.Lex_String; ----------------------------------------------------------------- -- this function finds all the fields in a record including any -- non-private ones obtained by inheritance function Is_Existing_Field (Fieldname : LexTokenManager.Lex_String; The_Record : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is It : Dictionary.Iterator; Result : Boolean := False; Current_Record : Dictionary.Symbol; This_Package : Dictionary.Symbol; Current_Package : Dictionary.Symbol; function Is_Public_Descendant (Root_Package, The_Package : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Current_Package : Dictionary.Symbol; Result : Boolean := False; begin Current_Package := The_Package; loop -- success case, we have got back to root all via public children if Dictionary.Packages_Are_Equal (Left_Symbol => Current_Package, Right_Symbol => Root_Package) then Result := True; exit; end if; -- fail case, private child found exit when Dictionary.IsPrivatePackage (Current_Package); Current_Package := Dictionary.GetPackageParent (Current_Package); exit when Dictionary.Is_Null_Symbol (Current_Package); end loop; return Result; end Is_Public_Descendant; begin -- Is_Existing_Field This_Package := Dictionary.GetLibraryPackage (Dictionary.GetScope (The_Record)); Current_Record := The_Record; loop Current_Package := Dictionary.GetLibraryPackage (Dictionary.GetScope (Current_Record)); if not Dictionary.TypeIsPrivate (TheType => Current_Record) or else Is_Public_Descendant (Root_Package => Current_Package, The_Package => This_Package) then -- not private so search for all fields It := Dictionary.FirstRecordComponent (Current_Record); while not Dictionary.IsNullIterator (It) loop if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Dictionary.CurrentSymbol (It)), Lex_Str2 => Fieldname) = LexTokenManager.Str_Eq then Result := True; exit; end if; It := Dictionary.NextSymbol (It); end loop; end if; exit when Result; Current_Record := Dictionary.GetRootOfExtendedType (Current_Record); exit when Dictionary.Is_Null_Symbol (Current_Record); end loop; return Result; end Is_Existing_Field; begin -- Wf_Component_Declaration Type_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node))); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Wf_Component_Declaration"); -- If the indicated typemark is not dotted (e.g. just "T" but not "P.T") -- then an additional check is required. -- Two nodes below type_mark, there will either be a -- dotted_simple_name node (dotted case) or an identifier node (not dotted). Type_Mark_Is_Not_Dotted := Syntax_Node_Type (Node => Child_Node (Current_Node => Child_Node (Current_Node => Type_Node))) = SP_Symbols.identifier; if Type_Mark_Is_Not_Dotted then Type_Mark_Simple_Name := Node_Lex_String (Node => Child_Node (Current_Node => Child_Node (Current_Node => Type_Node))); else Type_Mark_Simple_Name := LexTokenManager.Null_String; end if; Type_Pos := Node_Position (Node => Type_Node); Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); if not Dictionary.IsUnknownTypeMark (Type_Sym) then if Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => Rec_Sym, Full_Range_Subtype => False) then -- Type of field is same type as the record type being declared. ErrorHandler.Semantic_Error (Err_Num => 751, Reference => ErrorHandler.No_Reference, Position => Type_Pos, Id_Str => Dictionary.GetSimpleName (Type_Sym)); elsif Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Sym, Scope) then ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; end if; Ident_List_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_List_Node = identifier_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_List_Node) = SP_Symbols.identifier_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_List_Node = identifier_list in Wf_Component_Declaration"); It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Ident_List_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); Component_Ident := Node_Lex_String (Node => Next_Node); -- if the TypeMark is not dotted, then we need to check for the -- illegal case of a record field name which attempts to override -- the name of an existing directly visible TypeMake, such as -- type R is record -- T : T; -- illegal -- end record; if Type_Mark_Is_Not_Dotted and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Component_Ident, Lex_Str2 => Type_Mark_Simple_Name) = LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 757, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => Component_Ident); elsif Is_Existing_Field (Fieldname => Component_Ident, The_Record => Rec_Sym) then -- catches repeat within dec which is an existing Examiner -- bug not to do with tagged types ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Component_Ident); elsif Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) or else Dictionary.IsProtectedTypeMark (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 906, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); else Dictionary.AddRecordComponent (Name => Component_Ident, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), TheRecordType => Rec_Sym, TheComponentType => Type_Sym, InheritedField => False, ComponentTypeReference => Dictionary.Location'(Start_Position => Type_Pos, End_Position => Type_Pos)); end if; It := STree.NextNode (It); end loop; end Wf_Component_Declaration; begin -- Wf_Record if Syntax_Node_Type (Node => Node) = SP_Symbols.record_type_definition then -- ASSUME Node = record_type_definition Next_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Next_Node = non_abstract_tagged OR abstract_tagged OR non_tagged SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.non_abstract_tagged or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.abstract_tagged or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.non_tagged, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = non_abstract_tagged OR abstract_tagged OR non_tagged in Wf_Record"); Set_Tag_Status (Tag_Option_Node => Next_Node, Is_Tagged => Is_Tagged, Is_Abstract => Is_Abstract); elsif Syntax_Node_Type (Node => Node) = SP_Symbols.record_type_extension then -- ASSUME Node = record_type_extension Is_Abstract := False; Is_Tagged := False; else Is_Abstract := False; Is_Tagged := False; end if; -- temporary prevention of use of abstract types if Is_Abstract then ErrorHandler.Semantic_Error (Err_Num => 820, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; -- tagged types can only be declared in library package specs if (Is_Tagged or else not Dictionary.Is_Null_Symbol (Extends)) and then (not Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetLibraryPackage (Scope), Right_Symbol => Dictionary.GetRegion (Scope)) or else not (Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Visible or else Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Privat)) then ErrorHandler.Semantic_Error (Err_Num => 828, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif Is_Tagged and then (not Is_Private_Type_Resolution (Sym => Private_Type_Being_Resolved, Scope => Scope)) and then (Dictionary.PackageDeclaresTaggedType (Dictionary.GetRegion (Scope)) or else Dictionary.PackageExtendsAnotherPackage (Dictionary.GetRegion (Scope))) then -- illegal second root tagged type declaration ErrorHandler.Semantic_Error (Err_Num => 839, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else -- either not tagged type or correctly declared tagged type Dictionary.Add_Record_Type (Name => Node_Lex_String (Node => Ident_Node), Is_Tagged_Type => Is_Tagged, Extends => Extends, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Scope => Scope, Context => Dictionary.ProgramContext, The_Type => Record_Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Record_Sym); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Record_Sym, Is_Declaration => True); end if; -- if Extends is not null then we need to add in the fields inherited -- from the root type if not Dictionary.Is_Null_Symbol (Extends) then Dictionary.AddRecordComponent (Name => LexTokenManager.Inherit_Token, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), TheRecordType => Record_Sym, TheComponentType => Extends, InheritedField => True, ComponentTypeReference => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc)); end if; -- search for components unaffected by addition of tag info. If the grammar -- is of the form "null record" then no components get found which is correct It := Find_First_Node (Node_Kind => SP_Symbols.component_declaration, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.component_declaration and --# Next_Node = Get_Node (It); Has_Fields := True; Wf_Component_Declaration (Node => Next_Node, Rec_Sym => Record_Sym, Scope => Scope); It := STree.NextNode (It); end loop; -- SPARK disallows null records unless they are tagged (and maybe abstract as well TBD) if not (Is_Tagged or else Has_Fields or else not Dictionary.Is_Null_Symbol (Extends)) then ErrorHandler.Semantic_Error (Err_Num => 834, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; end Wf_Record; spark-2012.0.deb/examiner/sem-compunit-checkembedbodies.adb0000644000175000017500000001525711753202336022611 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure CheckEmbedBodies (Comp_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) is Pack_List : Dictionary.Iterator; Embedded_Pack_Sym : Dictionary.Symbol; Error : Boolean; -------------------------------------------------------------- procedure Check_For_Any_Declared_Subprograms (Embedded_Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position; Error_Found : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Embedded_Pack_Sym, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys & --# Error_Found from Dictionary.Dict, --# Embedded_Pack_Sym; is function Subprog_Without_Body_Found (Subprogram_Iterator : Dictionary.Iterator) return Boolean --# global in Dictionary.Dict; is It : Dictionary.Iterator; Result : Boolean := False; begin It := Subprogram_Iterator; while not Dictionary.IsNullIterator (It) loop if not (Dictionary.HasBody (Dictionary.CurrentSymbol (It)) or else Dictionary.IsProofFunction (Dictionary.CurrentSymbol (It))) then Result := True; exit; end if; It := Dictionary.NextSymbol (It); end loop; return Result; end Subprog_Without_Body_Found; begin -- Check_For_Any_Declared_Subprograms Error_Found := False; if Subprog_Without_Body_Found (Subprogram_Iterator => Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Embedded_Pack_Sym)) or else Subprog_Without_Body_Found (Subprogram_Iterator => Dictionary.First_Private_Subprogram (The_Package => Embedded_Pack_Sym)) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 62, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Embedded_Pack_Sym)); end if; end Check_For_Any_Declared_Subprograms; ------------------------------------------------------- procedure Check_Any_Own_Variables (Embedded_Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Embedded_Pack_Sym, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys; is Own_Var_It : Dictionary.Iterator; Own_Var_Sym : Dictionary.Symbol; Found_Own_Variable_Not_Initialized_At_Declaration : Boolean := False; begin Own_Var_It := Dictionary.FirstInitializedOwnVariable (Embedded_Pack_Sym); while Own_Var_It /= Dictionary.NullIterator loop Own_Var_Sym := Dictionary.CurrentSymbol (Own_Var_It); Found_Own_Variable_Not_Initialized_At_Declaration := not Dictionary.Is_Declared (Item => Own_Var_Sym) -- not declared in spec at all or else not Dictionary.VariableIsInitialized (Own_Var_Sym); -- declared but not initialized exit when Found_Own_Variable_Not_Initialized_At_Declaration; Own_Var_It := Dictionary.NextSymbol (Own_Var_It); end loop; if Found_Own_Variable_Not_Initialized_At_Declaration then ErrorHandler.Semantic_Error (Err_Num => 62, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Embedded_Pack_Sym)); end if; end Check_Any_Own_Variables; begin -- CheckEmbedBodies Pack_List := Dictionary.First_Embedded_Package (Compilation_Unit => Comp_Sym); while not Dictionary.IsNullIterator (Pack_List) loop Embedded_Pack_Sym := Dictionary.CurrentSymbol (Pack_List); if not (Dictionary.HasBody (Embedded_Pack_Sym) or else Dictionary.HasBodyStub (Embedded_Pack_Sym)) then Check_For_Any_Declared_Subprograms (Embedded_Pack_Sym => Embedded_Pack_Sym, Node_Pos => Node_Pos, Error_Found => Error); if not Error then Check_Any_Own_Variables (Embedded_Pack_Sym => Embedded_Pack_Sym, Node_Pos => Node_Pos); end if; end if; Pack_List := Dictionary.NextSymbol (Pack_List); end loop; end CheckEmbedBodies; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_term.adb0000644000175000017500000002016011753202336022677 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------- -- Overview: Called to check validity of a -- term node. Replaces calls to StaticTerm, BaseTypeTerm and CheckTypeTerm ---------------------------------------------------------------------------- separate (Sem.Walk_Expression_P) procedure Wf_Term (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type; Context_Requires_Static : in Boolean) is Left, Right, Result : Sem.Exp_Record; Op_Node : STree.SyntaxNode; Operator : SP_Symbols.SP_Symbol; begin Op_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Op_Node = term OR factor OR annotation_term OR annotation_factor if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.term or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.annotation_term then -- ASSUME Op_Node = term OR annotation_term Op_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Op_Node)); -- ASSUME Op_Node = multiply OR divide OR RWmod OR RWrem SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.multiply or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.divide or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWmod or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.RWrem, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = multiply OR divide OR RWmod OR RWrem in Wf_Term"); -- multiplying_operator exists Operator := STree.Syntax_Node_Type (Node => Op_Node); Exp_Stack.Pop (Item => Right, Stack => E_Stack); Exp_Stack.Pop (Item => Left, Stack => E_Stack); Result := Null_Type_Record; -- safety: we may not set all fields below -- do static checks first Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; Result.Is_Static := Left.Is_Static and then Right.Is_Static; Result.Has_Operators := True; if Left.Is_ARange or else Right.Is_ARange then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 90, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); else -- neither are ranges -- now do type compatibility and operator visibility checks Check_Binary_Operator (Operator => Operator, Left => Left, Right => Right, Scope => Scope, T_Stack => T_Stack, Op_Pos => STree.Node_Position (Node => Op_Node), Left_Pos => STree.Node_Position (Node => STree.Child_Node (Current_Node => Node)), Right_Pos => STree.Node_Position (Node => STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)))), Convert => True, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_term, Result => Result); -- Seed Op_Node with type to aid selection of operator in VCG STree.Add_Node_Symbol (Node => Op_Node, Sym => Result.Type_Symbol); Calc_Binary_Operator (Node_Pos => STree.Node_Position (Node => Node), Operator => Operator, Left_Val => Left.Value, Right_Val => Right.Value, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_term, Result => Result); if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.term then -- ASSUME STree.Parent_Node (Current_Node => Node) = term ---------------------------------------------------------------- -- If the parent is also a term, then we must have -- an unparenthesized expression with two multiplying operators, -- such as A * B * C -- -- Here, we issue warning 302 to warn of potential evaluation -- order dependency. -- -- We can reduce false-alarm rate here by suppressing the -- warning in two specific cases: -- a) If the sub-expression under consideration is static -- AND the expression as a whole appears in a context -- that requires a static expression. Example: a type -- declaration such as -- type T is range B * 2 / 3 .. 10; -- or -- b) A modular-typed expression where the two operators -- under consideration are both the same and -- commutative. For example: -- A := A * B * C; -- where A, B, and C are all of the same modular -- (sub-)type. -- -- The same logic is used in wf_simple_expression for -- binary adding operators. ---------------------------------------------------------------- if (Context_Requires_Static and then Result.Is_Static) or else (Dictionary.TypeIsModular (Result.Type_Symbol) and then Ops_Are_Same_And_Commutative (Operator, STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Node))))) then null; else ErrorHandler.Semantic_Warning (Err_Num => 302, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; end if; Result.Errors_In_Expression := Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; -- OtherSymbol may carry a function symbol in the case of uses of unchecked_conversion. -- This symbol is used (by Wf_Assign) to convery information to the VCG to supress -- checks when an unchecked_conversion is assigned to something of the same subtype. -- We do not want this mechanism if the unchecked_conversion is sued in any other context -- than a direct assignment. Therefore we clear OtherSymbol here: Result.Other_Symbol := Dictionary.NullSymbol; Exp_Stack.Push (X => Result, Stack => E_Stack); elsif STree.Syntax_Node_Type (Node => Op_Node) /= SP_Symbols.factor and then STree.Syntax_Node_Type (Node => Op_Node) /= SP_Symbols.annotation_factor then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = term OR factor OR annotation_term OR annotation_factor in Wf_Term"); end if; end Wf_Term; spark-2012.0.deb/examiner/sem-walk_expression_p-check_binary_operator.adb0000644000175000017500000002465511753202336025605 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Check_Binary_Operator (Operator : in SP_Symbols.SP_Symbol; Left : in Sem.Exp_Record; Right : in Sem.Exp_Record; Scope : in Dictionary.Scopes; T_Stack : in Type_Context_Stack.T_Stack_Type; Op_Pos : in LexTokenManager.Token_Position; Left_Pos : in LexTokenManager.Token_Position; Right_Pos : in LexTokenManager.Token_Position; Convert : in Boolean; Is_Annotation : in Boolean; Result : in out Sem.Exp_Record) is Left_Type, Right_Type, Return_Type : Dictionary.Symbol; ------------------------------------------------------------- function Mixed_Type_Mult_Or_Div (Op : SP_Symbols.SP_Symbol; Left_Type, Right_Type : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is begin return (Op = SP_Symbols.multiply or else Op = SP_Symbols.divide) and then (Dictionary.IsFixedPointTypeMark (Right_Type, Scope) or else Dictionary.IsFixedPointTypeMark (Left_Type, Scope) or else (CommandLineData.Ravenscar_Selected and then (Dictionary.IsPredefinedTimeSpanType (Left_Type) or else Dictionary.IsPredefinedTimeSpanType (Right_Type)))); end Mixed_Type_Mult_Or_Div; ------------------------------------------------------------- procedure Hetero_Impl_Type_Conv (Left_Type, Right_Type : in out Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# derives Left_Type, --# Right_Type from CommandLineData.Content, --# Dictionary.Dict, --# Left_Type, --# Right_Type, --# Scope; is begin if Dictionary.IsUniversalIntegerType (Left_Type) and then (Dictionary.IsFixedPointTypeMark (Right_Type, Scope) or else (CommandLineData.Ravenscar_Selected and then Dictionary.IsPredefinedTimeType (Right_Type))) then Left_Type := Dictionary.GetPredefinedIntegerType; elsif Dictionary.IsUniversalIntegerType (Right_Type) and then (Dictionary.IsFixedPointTypeMark (Left_Type, Scope) or else (CommandLineData.Ravenscar_Selected and then Dictionary.IsPredefinedTimeType (Left_Type))) then Right_Type := Dictionary.GetPredefinedIntegerType; elsif CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Dictionary.IsUniversalRealType (Right_Type) and then Dictionary.IsFixedPointTypeMark (Left_Type, Scope) then Right_Type := Dictionary.GetUniversalFixedType; elsif CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Dictionary.IsUniversalRealType (Left_Type) and then Dictionary.IsFixedPointTypeMark (Right_Type, Scope) then Left_Type := Dictionary.GetUniversalFixedType; end if; end Hetero_Impl_Type_Conv; ----------------------------------------------------------------- procedure Homo_Impl_Type_Conv (Operator : in SP_Symbols.SP_Symbol; Left_Type, Right_Type : in out Dictionary.Symbol; Left_Val : in Maths.Value; Right_Val : in Maths.Value; Left_Has_Operators : in Boolean; Right_Has_Operators : in Boolean; Left_Pos : in LexTokenManager.Token_Position; Right_Pos : in LexTokenManager.Token_Position; Is_Annotation : in Boolean; T_Stack : in Type_Context_Stack.T_Stack_Type; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Annotation, --# Left_Has_Operators, --# Left_Pos, --# Left_Type, --# Left_Val, --# LexTokenManager.State, --# Operator, --# Right_Has_Operators, --# Right_Pos, --# Right_Type, --# Right_Val, --# Scope, --# SPARK_IO.File_Sys & --# Left_Type from *, --# Dictionary.Dict, --# Left_Has_Operators, --# Operator, --# Right_Type, --# Scope, --# T_Stack & --# Right_Type from *, --# Dictionary.Dict, --# Left_Type, --# Operator, --# Right_Has_Operators, --# Scope, --# T_Stack; --# pre Type_Context_Stack.Stack_Is_Valid (T_Stack); is separate; begin -- Check_Binary_Operator Left_Type := Dictionary.GetRootType (Left.Type_Symbol); Right_Type := Dictionary.GetRootType (Right.Type_Symbol); -- suppress type conversion in case of fixed point * or / if Convert then if Mixed_Type_Mult_Or_Div (Op => Operator, Left_Type => Left_Type, Right_Type => Right_Type, Scope => Scope) then Hetero_Impl_Type_Conv (Left_Type => Left_Type, Right_Type => Right_Type, Scope => Scope); else Homo_Impl_Type_Conv (Operator => Operator, Left_Type => Left_Type, Right_Type => Right_Type, Left_Val => Left.Value, Right_Val => Right.Value, Left_Has_Operators => Left.Has_Operators, Right_Has_Operators => Right.Has_Operators, Left_Pos => Left_Pos, Right_Pos => Right_Pos, Is_Annotation => Is_Annotation, T_Stack => T_Stack, Scope => Scope); end if; end if; Return_Type := Dictionary.Get_Binary_Operator_Type (Name => Operator, Left => Left_Type, Right => Right_Type); if Dictionary.Is_Null_Symbol (Return_Type) then Result := Sem.Unknown_Type_Record; if Dictionary.IsUniversalIntegerType (Left_Type) and then Dictionary.IsModularTypeMark (Right_Type, Scope) then ErrorHandler.Semantic_Error_Sym (Err_Num => 804, Reference => ErrorHandler.No_Reference, Position => Op_Pos, Sym => Right_Type, Scope => Scope); elsif Dictionary.IsUniversalIntegerType (Right_Type) and then Dictionary.IsModularTypeMark (Left_Type, Scope) then ErrorHandler.Semantic_Error_Sym (Err_Num => 805, Reference => ErrorHandler.No_Reference, Position => Op_Pos, Sym => Left_Type, Scope => Scope); else ErrorHandler.Semantic_Error_Sym2 (Err_Num => 35, Reference => ErrorHandler.No_Reference, Position => Op_Pos, Sym => Left_Type, Sym2 => Right_Type, Scope => Scope); end if; elsif not Is_Annotation and then not Dictionary.BinaryOperatorIsVisible (Operator, Left_Type, Right_Type, Scope) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 309, Reference => ErrorHandler.No_Reference, Position => Op_Pos, Id_Str => LexTokenManager.Null_String); else -- check whether equality of floats is being used if not Is_Annotation and then (Operator = SP_Symbols.equals or else Operator = SP_Symbols.not_equal) and then (Dictionary.ContainsFloat (Left_Type) or else Dictionary.ContainsFloat (Right_Type)) then ErrorHandler.Semantic_Warning (Err_Num => 308, Position => Op_Pos, Id_Str => LexTokenManager.Null_String); end if; Result.Type_Symbol := Return_Type; end if; end Check_Binary_Operator; spark-2012.0.deb/examiner/sem-assignment_check.adb0000644000175000017500000001510411753202336021011 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- This procedure checks assignment compatibility given a target type and -- a record from the expression stack. It checks type mismatches, use of -- unqualified string literals, attempts to assign unconstrained objects -- and assignement of arrays where the bounds do not match. If any of these -- checks fail the stack record is changed to the Unknown_Type_Record. -- Scalars are also checked for Constraint_Error. If this check fails, the -- value field of the stack record is changed to Maths.NoValue. separate (Sem) procedure Assignment_Check (Position : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; Target_Type : in Dictionary.Symbol; Exp_Result : in out Exp_Record) is Exp_Value : Maths.Value; Store_Rep : LexTokenManager.Lex_String; procedure Raise_Error (Err_Num : in Natural; Ref : in Natural; Position : in LexTokenManager.Token_Position; Exp_Result : out Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Num, --# LexTokenManager.State, --# Position, --# Ref, --# SPARK_IO.File_Sys & --# Exp_Result from Dictionary.Dict; --# post Dictionary.Is_Null_Symbol (Exp_Result.Type_Symbol) or Dictionary.IsTypeMark (Exp_Result.Type_Symbol, Dictionary.Dict); is begin Exp_Result := Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => Err_Num, Reference => Ref, Position => Position, Id_Str => LexTokenManager.Null_String); end Raise_Error; begin -- Assignment_Check if not (Dictionary.IsUnknownTypeMark (Target_Type) or else Dictionary.IsUnknownTypeMark (Exp_Result.Type_Symbol)) then if Dictionary.IsPredefinedStringType (Exp_Result.Type_Symbol) then -- string literal or parameter -- check for type compatibility before other checks if not Dictionary.IsPredefinedStringType (Dictionary.GetRootType (Target_Type)) then ErrorHandler.Semantic_Error_Sym2 (Err_Num => 107, Reference => 8, Position => Position, Sym => Exp_Result.Type_Symbol, Sym2 => Target_Type, Scope => Scope); else if Exp_Result.Range_RHS = Maths.NoValue then -- parameter -- can't assign a string parameter Raise_Error (Err_Num => 39, Ref => 7, Position => Position, Exp_Result => Exp_Result); -- if its a string literal its ok if the length is right elsif not Dictionary.IsPredefinedStringType (Target_Type) then Maths.StorageRep (Exp_Result.Range_RHS, Store_Rep); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Dictionary.CurrentSymbol (Dictionary.FirstArrayIndex (Target_Type))), Lex_Str2 => Store_Rep) /= LexTokenManager.Str_Eq then Raise_Error (Err_Num => 402, Ref => ErrorHandler.No_Reference, Position => Position, Exp_Result => Exp_Result); end if; end if; end if; elsif Dictionary.Is_Unconstrained_Array_Type_Mark (Exp_Result.Type_Symbol, Scope) then Raise_Error (Err_Num => 39, Ref => 7, Position => Position, Exp_Result => Exp_Result); elsif not Dictionary.CompatibleTypes (Scope, Target_Type, Exp_Result.Type_Symbol) then ErrorHandler.Semantic_Error_Sym2 (Err_Num => 107, Reference => 8, Position => Position, Sym => Exp_Result.Type_Symbol, Sym2 => Target_Type, Scope => Scope); Exp_Result := Unknown_Type_Record; elsif Exp_Result.Is_ARange then Raise_Error (Err_Num => 91, Ref => ErrorHandler.No_Reference, Position => Position, Exp_Result => Exp_Result); elsif Illegal_Unconstrained (Left_Type => Target_Type, Right_Type => Exp_Result.Type_Symbol) then Raise_Error (Err_Num => 418, Ref => 5, Position => Position, Exp_Result => Exp_Result); else -- if there is no error we can check for constraint_error Constraint_Check (Val => Exp_Result.Value, New_Val => Exp_Value, Is_Annotation => False, Typ => Target_Type, Position => Position); Exp_Result.Value := Exp_Value; end if; end if; end Assignment_Check; spark-2012.0.deb/examiner/sem-is_enclosing_package.adb0000644000175000017500000000344511753202336021640 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function Is_Enclosing_Package (Outer_Pack : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean is Scope_Chain, Outer_Scope : Dictionary.Scopes; Result : Boolean; begin if Outer_Pack = Dictionary.GetRegion (Scope) then Result := True; else Result := False; Outer_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Outer_Pack); Scope_Chain := Scope; while not Dictionary.IsGlobalScope (Scope_Chain) loop Scope_Chain := Dictionary.GetEnclosingScope (Scope_Chain); if Scope_Chain = Outer_Scope then Result := True; exit; end if; end loop; end if; return Result; end Is_Enclosing_Package; spark-2012.0.deb/examiner/dictionary-get_binary_operator_type_local.adb0000644000175000017500000011023311753202336025335 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function Get_Binary_Operator_Type_Local (Name : SP_Symbols.SP_Symbol; The_Left_Type : RawDict.Type_Info_Ref; The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref is type Kind_Of_Result is (Left_Defined, Right_Defined, Never_Defined, Match); Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- -- Logical operators (and, or, xor) are _defined_ (but not necessarily -- visible) according to the following table. -------------------------------------------------------------------------------- function Get_Logical_Operators_Type (The_Left_Type, The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is type Kind_Of_Type is (BO_Boolean_Type, BO_Boolean_Array_Type, BO_Modular_Type, BO_Universal_Integer_Type, BO_Unknown_Type, BO_Other_Type); type Array_Right_Type is array (Kind_Of_Type) of Kind_Of_Result; type Array_Left_Type is array (Kind_Of_Type) of Array_Right_Type; pragma Style_Checks (Off); The_Table : constant Array_Left_Type := Array_Left_Type' -- Right operand Boolean, Boolean Array, Modular, Universal Int, Unknown, Other -- Left operand (Array_Right_Type'( Match, Never_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Boolean Array_Right_Type'(Never_Defined, Match, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Boolean Array Array_Right_Type'(Never_Defined, Never_Defined, Match, Left_Defined, Left_Defined, Never_Defined), -- Modular Array_Right_Type'(Never_Defined, Never_Defined, Right_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Int Array_Right_Type'(Right_Defined, Right_Defined, Right_Defined, Right_Defined, Left_Defined, Never_Defined), -- Unknown Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined)); -- Other pragma Style_Checks (On); Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- function Get_Kind_Of_Type (Type_Mark : RawDict.Type_Info_Ref) return Kind_Of_Type --# global in Dict; is Result : Kind_Of_Type; begin if Type_Is_Boolean (Type_Mark => Type_Mark) then Result := BO_Boolean_Type; elsif Type_Is_Boolean_Array (Type_Mark => Type_Mark) then Result := BO_Boolean_Array_Type; elsif Type_Is_Modular (Type_Mark => Type_Mark) then Result := BO_Modular_Type; elsif Type_Mark = Get_Universal_Integer_Type then Result := BO_Universal_Integer_Type; elsif Type_Mark = Get_Unknown_Type_Mark then Result := BO_Unknown_Type; else Result := BO_Other_Type; end if; return Result; end Get_Kind_Of_Type; begin -- Get_Logical_Operators_Type case The_Table (Get_Kind_Of_Type (Type_Mark => The_Left_Type)) (Get_Kind_Of_Type (Type_Mark => The_Right_Type)) is when Left_Defined => Result := The_Left_Type; when Right_Defined => Result := The_Right_Type; when Never_Defined => Result := RawDict.Null_Type_Info_Ref; when Match => if The_Left_Type = The_Right_Type then Result := The_Left_Type; else Result := RawDict.Null_Type_Info_Ref; end if; end case; return Result; end Get_Logical_Operators_Type; -------------------------------------------------------------------------------- -- Covers = and /= -- -- The EqualityDefined attribute is set when the type is added to the -- dictionary. -------------------------------------------------------------------------------- function Get_Equality_Operators_Type (The_Left_Type, The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is Result : RawDict.Type_Info_Ref; begin if The_Left_Type = The_Right_Type or else The_Right_Type = Get_Unknown_Type_Mark then if RawDict.Get_Type_Equality_Defined (Type_Mark => The_Left_Type) then Result := Get_Predefined_Boolean_Type; else Result := RawDict.Null_Type_Info_Ref; end if; elsif The_Left_Type = Get_Unknown_Type_Mark then if RawDict.Get_Type_Equality_Defined (Type_Mark => The_Right_Type) then Result := Get_Predefined_Boolean_Type; else Result := RawDict.Null_Type_Info_Ref; end if; else Result := RawDict.Null_Type_Info_Ref; end if; return Result; end Get_Equality_Operators_Type; -------------------------------------------------------------------------------- -- Relational ordering operators (<= >= < >) are _defined_ (but not necessarily -- visible) according to the following table. -- Scalar = Integer, Modular, Enumeration, Real (but not Boolean) -------------------------------------------------------------------------------- function Get_Relational_Operators_Type (The_Left_Type, The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in CommandLineData.Content; --# in Dict; is type Kind_Of_Type is (BO_Scalar_Type, BO_Boolean_Type, BO_Predefined_String, BO_Constrained_String, BO_Time_Type, BO_Unknown_Type, BO_Other_Type); type Array_Right_Type is array (Kind_Of_Type) of Kind_Of_Result; type Array_Left_Type is array (Kind_Of_Type) of Array_Right_Type; pragma Style_Checks (Off); The_Table : constant Array_Left_Type := Array_Left_Type' -- Right operand Scalar, Boolean, Predefined Str, Constrained Str, Time, Unknown, Other -- Left operand (Array_Right_Type'( Match, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Scalar Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined), -- Boolean Array_Right_Type'(Never_Defined, Never_Defined, Match, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Predefined Str Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Match, Never_Defined, Left_Defined, Never_Defined), -- Constrained Str Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Match, Left_Defined, Never_Defined), -- Time Array_Right_Type'(Right_Defined, Never_Defined, Right_Defined, Right_Defined, Right_Defined, Left_Defined, Never_Defined), -- Unknown Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined)); -- Other pragma Style_Checks (On); Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- function Get_Kind_Of_Type (Type_Mark : RawDict.Type_Info_Ref) return Kind_Of_Type --# global in CommandLineData.Content; --# in Dict; is Result : Kind_Of_Type; begin if Type_Is_Boolean (Type_Mark => Type_Mark) then Result := BO_Boolean_Type; elsif Type_Is_Scalar (Type_Mark => Type_Mark) then Result := BO_Scalar_Type; elsif Type_Is_Array (Type_Mark => Type_Mark) then if Is_Type (Type_Mark => Type_Mark) then if Type_Mark = Get_Predefined_String_Type then Result := BO_Predefined_String; else Result := BO_Constrained_String; end if; else if Get_Root_Type (Type_Mark => Type_Mark) = Get_Predefined_String_Type then Result := BO_Predefined_String; else Result := BO_Constrained_String; end if; end if; elsif CommandLineData.Ravenscar_Selected and then Is_Predefined_Time_Type (Type_Mark => Type_Mark) then Result := BO_Time_Type; elsif Type_Mark = Get_Unknown_Type_Mark then Result := BO_Unknown_Type; else Result := BO_Other_Type; end if; return Result; end Get_Kind_Of_Type; begin -- Get_Relational_Operators_Type case The_Table (Get_Kind_Of_Type (Type_Mark => The_Left_Type)) (Get_Kind_Of_Type (Type_Mark => The_Right_Type)) is when Left_Defined => Result := Get_Predefined_Boolean_Type; when Right_Defined => Result := Get_Predefined_Boolean_Type; when Never_Defined => Result := RawDict.Null_Type_Info_Ref; when Match => if The_Left_Type = The_Right_Type then Result := Get_Predefined_Boolean_Type; else Result := RawDict.Null_Type_Info_Ref; end if; end case; return Result; end Get_Relational_Operators_Type; -------------------------------------------------------------------------------- -- Binary Adding operators + and - (but not &) are _defined_ -- (but not necessarily visible) according -- to the following table. -- -- Result codes are -- Y = Always defined -- N = Never defined -- -- For the types Time and Time_Span in package Ada.Real_Time, the adding -- operators are defined as follows, with T = Time and TS = Time_Span: -- -- Op "+" Right T TS U O Op "-" Right T TS U O -- Left Left -- T N Y Y N T Y Y Y N -- TS Y Y Y N TS N Y Y N -- U Y Y Y N U Y Y Y N -- O N N N N O N N N N -------------------------------------------------------------------------------- function Get_Adding_Operators_Type (Name : SP_Symbols.SP_Symbol; The_Left_Type, The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in CommandLineData.Content; --# in Dict; is type Kind_Of_Type is (BO_Integer_Type, BO_Real_Type, BO_Universal_Integer_Type, BO_Universal_Real_Type, BO_Time_Type, BO_Time_Span_Type, BO_Unknown_Type, BO_Other_Type); type Array_Right_Type is array (Kind_Of_Type) of Kind_Of_Result; type Array_Left_Type is array (Kind_Of_Type) of Array_Right_Type; pragma Style_Checks (Off); The_Table : constant Array_Left_Type := Array_Left_Type' -- Right operand Integer, Real, Universal Int, Universal Real, Time, Time Span, Unknown, Other -- Left operand (Array_Right_Type'( Match, Never_Defined, Left_Defined, Never_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Integer Array_Right_Type'(Never_Defined, Match, Never_Defined, Left_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Real Array_Right_Type'(Right_Defined, Never_Defined, Match, Never_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Int Array_Right_Type'(Never_Defined, Right_Defined, Never_Defined, Match, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Real Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Match, Left_Defined, Left_Defined, Never_Defined), -- Time Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Match, Left_Defined, Left_Defined, Never_Defined), -- Time Span Array_Right_Type'(Right_Defined, Right_Defined, Right_Defined, Right_Defined, Right_Defined, Right_Defined, Left_Defined, Never_Defined), -- Unknown Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined)); -- Other pragma Style_Checks (On); Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- function Get_Kind_Of_Type (Type_Mark : RawDict.Type_Info_Ref) return Kind_Of_Type --# global in CommandLineData.Content; --# in Dict; is Result : Kind_Of_Type; begin if Type_Mark = Get_Universal_Integer_Type then Result := BO_Universal_Integer_Type; elsif Type_Mark = Get_Universal_Real_Type or else Type_Mark = Get_Universal_Fixed_Type then Result := BO_Universal_Real_Type; elsif Type_Is_Integer (Type_Mark => Type_Mark) or else Type_Is_Modular (Type_Mark => Type_Mark) then Result := BO_Integer_Type; elsif Type_Is_Real (Type_Mark => Type_Mark) then Result := BO_Real_Type; elsif CommandLineData.Ravenscar_Selected and then Is_Predefined_Time_Type (Type_Mark => Type_Mark) then if Type_Mark = Get_Predefined_Time_Type then Result := BO_Time_Type; else Result := BO_Time_Span_Type; end if; elsif Type_Mark = Get_Unknown_Type_Mark then Result := BO_Unknown_Type; else Result := BO_Other_Type; end if; return Result; end Get_Kind_Of_Type; begin -- Get_Adding_Operators_Type case The_Table (Get_Kind_Of_Type (Type_Mark => The_Left_Type)) (Get_Kind_Of_Type (Type_Mark => The_Right_Type)) is when Left_Defined => Result := The_Left_Type; when Right_Defined => Result := The_Right_Type; when Never_Defined => Result := RawDict.Null_Type_Info_Ref; when Match => if CommandLineData.Ravenscar_Selected and then Get_Kind_Of_Type (Type_Mark => The_Right_Type) = BO_Time_Type then if Name = SP_Symbols.plus and then The_Left_Type = Get_Predefined_Time_Span_Type then Result := Get_Predefined_Time_Type; elsif Name = SP_Symbols.minus and then The_Left_Type = Get_Predefined_Time_Type then Result := Get_Predefined_Time_Span_Type; else Result := RawDict.Null_Type_Info_Ref; end if; elsif The_Left_Type = The_Right_Type then Result := The_Left_Type; else Result := RawDict.Null_Type_Info_Ref; end if; end case; return Result; end Get_Adding_Operators_Type; -------------------------------------------------------------------------------- -- Modular types are Numeric types, so no change here to accomodate modular -- types. -- For the Time_Span type, multiplication is defined between TS and Integer. -------------------------------------------------------------------------------- function Get_Multiplication_Operator_Type (The_Left_Type, The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in CommandLineData.Content; --# in Dict; is type Kind_Of_Type is (BO_Integer_Type, BO_Fixed_Point_Type, BO_Floating_Point_Type, BO_Universal_Integer_Type, BO_Universal_Fixed_Point_Type, BO_Universal_Floating_Point_Type, BO_Time_Span_Type, BO_Unknown_Type, BO_Other_Type); type Array_Right_Type is array (Kind_Of_Type) of Kind_Of_Result; type Array_Left_Type is array (Kind_Of_Type) of Array_Right_Type; pragma Style_Checks (Off); The_Table : constant Array_Left_Type := Array_Left_Type' -- Right operand Integer, Fixed, Float, Universal Int, Universal Fixed, Universal Float, Time Span, Unknown, Other -- Left operand (Array_Right_Type'( Match, Right_Defined, Never_Defined, Left_Defined, Right_Defined, Never_Defined, Right_Defined, Left_Defined, Never_Defined), -- Integer Array_Right_Type'( Left_Defined, Match, Never_Defined, Left_Defined, Right_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Fixed Array_Right_Type'(Never_Defined, Never_Defined, Match, Never_Defined, Never_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Float Array_Right_Type'(Right_Defined, Right_Defined, Never_Defined, Left_Defined, Right_Defined, Right_Defined, Right_Defined, Left_Defined, Never_Defined), -- Universal Int Array_Right_Type'( Left_Defined, Left_Defined, Never_Defined, Left_Defined, Left_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Fixed Array_Right_Type'(Never_Defined, Never_Defined, Right_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Float Array_Right_Type'( Left_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Time Span Array_Right_Type'(Right_Defined, Right_Defined, Right_Defined, Right_Defined, Right_Defined, Right_Defined, Right_Defined, Left_Defined, Never_Defined), -- Unknown Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined)); -- Other pragma Style_Checks (On); Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- function Get_Kind_Of_Type (Type_Mark : RawDict.Type_Info_Ref) return Kind_Of_Type --# global in CommandLineData.Content; --# in Dict; is Result : Kind_Of_Type; begin if Type_Mark = Get_Universal_Integer_Type then Result := BO_Universal_Integer_Type; elsif Type_Mark = Get_Universal_Fixed_Type then Result := BO_Universal_Fixed_Point_Type; elsif Type_Mark = Get_Universal_Real_Type then Result := BO_Universal_Floating_Point_Type; elsif Type_Is_Integer (Type_Mark => Type_Mark) or else Type_Is_Modular (Type_Mark => Type_Mark) then Result := BO_Integer_Type; elsif Type_Is_Fixed_Point (Type_Mark => Type_Mark) then Result := BO_Fixed_Point_Type; elsif Type_Is_Floating_Point (Type_Mark => Type_Mark) then Result := BO_Floating_Point_Type; elsif CommandLineData.Ravenscar_Selected and then Is_Predefined_Time_Type (Type_Mark => Type_Mark) then if Type_Mark = Get_Predefined_Time_Type then Result := BO_Unknown_Type; else Result := BO_Time_Span_Type; end if; elsif Type_Mark = Get_Unknown_Type_Mark then Result := BO_Unknown_Type; else Result := BO_Other_Type; end if; return Result; end Get_Kind_Of_Type; begin -- Get_Multiplication_Operator_Type case The_Table (Get_Kind_Of_Type (Type_Mark => The_Left_Type)) (Get_Kind_Of_Type (Type_Mark => The_Right_Type)) is when Left_Defined => Result := The_Left_Type; when Right_Defined => Result := The_Right_Type; when Never_Defined => Result := RawDict.Null_Type_Info_Ref; when Match => if Get_Kind_Of_Type (Type_Mark => The_Left_Type) = BO_Fixed_Point_Type and then Get_Kind_Of_Type (Type_Mark => The_Left_Type) = BO_Fixed_Point_Type then Result := Get_Universal_Fixed_Type; elsif The_Left_Type = The_Right_Type then Result := The_Left_Type; else Result := RawDict.Null_Type_Info_Ref; end if; end case; return Result; end Get_Multiplication_Operator_Type; -------------------------------------------------------------------------------- -- / for signed integer, real, and modular types are defined for -- 1) Any matching pair of numeric types -- 2) Any pair of fixed point types -- 3) Any fixed point type on the left with Standard.Integer on the right -- 4) Universal real on the left, and Universal integer on the right -- 5) Any type with an unknown type (to prevent needless propagation of errors) -- For type Time_Span, "/" is defined for TS/TS and TS/Integer. -------------------------------------------------------------------------------- function Get_Division_Operator_Type (The_Left_Type, The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in CommandLineData.Content; --# in Dict; is type Kind_Of_Type is (BO_Integer_Type, BO_Fixed_Point_Type, BO_Floating_Point_Type, BO_Universal_Integer_Type, BO_Universal_Fixed_Point_Type, BO_Universal_Floating_Point_Type, BO_Time_Span_Type, BO_Unknown_Type, BO_Other_Type); type Array_Right_Type is array (Kind_Of_Type) of Kind_Of_Result; type Array_Left_Type is array (Kind_Of_Type) of Array_Right_Type; pragma Style_Checks (Off); The_Table : constant Array_Left_Type := Array_Left_Type' -- Right operand Integer, Fixed, Float, Universal Int, Universal Fixed, Universal Float, Time Span, Unknown, Other -- Left operand (Array_Right_Type'( Match, Never_Defined, Never_Defined, Left_Defined, Never_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Integer Array_Right_Type'( Left_Defined, Match, Never_Defined, Left_Defined, Right_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Fixed Array_Right_Type'(Never_Defined, Never_Defined, Match, Never_Defined, Never_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Float Array_Right_Type'(Right_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Int Array_Right_Type'( Left_Defined, Left_Defined, Never_Defined, Left_Defined, Left_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Fixed Array_Right_Type'(Never_Defined, Never_Defined, Right_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Float Array_Right_Type'( Left_Defined, Never_Defined, Never_Defined, Left_Defined, Never_Defined, Never_Defined, Match, Left_Defined, Never_Defined), -- Time Span Array_Right_Type'(Right_Defined, Right_Defined, Right_Defined, Right_Defined, Right_Defined, Right_Defined, Right_Defined, Left_Defined, Never_Defined), -- Unknown Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined)); -- Other pragma Style_Checks (On); Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- function Get_Kind_Of_Type (Type_Mark : RawDict.Type_Info_Ref) return Kind_Of_Type --# global in CommandLineData.Content; --# in Dict; is Result : Kind_Of_Type; begin if Type_Mark = Get_Universal_Integer_Type then Result := BO_Universal_Integer_Type; elsif Type_Mark = Get_Universal_Fixed_Type then Result := BO_Universal_Fixed_Point_Type; elsif Type_Mark = Get_Universal_Real_Type then Result := BO_Universal_Floating_Point_Type; elsif Type_Is_Integer (Type_Mark => Type_Mark) or else Type_Is_Modular (Type_Mark => Type_Mark) then Result := BO_Integer_Type; elsif Type_Is_Fixed_Point (Type_Mark => Type_Mark) then Result := BO_Fixed_Point_Type; elsif Type_Is_Floating_Point (Type_Mark => Type_Mark) then Result := BO_Floating_Point_Type; elsif CommandLineData.Ravenscar_Selected and then Is_Predefined_Time_Type (Type_Mark => Type_Mark) then if Type_Mark = Get_Predefined_Time_Type then Result := BO_Other_Type; else Result := BO_Time_Span_Type; end if; elsif Type_Mark = Get_Unknown_Type_Mark then Result := BO_Unknown_Type; else Result := BO_Other_Type; end if; return Result; end Get_Kind_Of_Type; begin -- Get_Division_Operator_Type case The_Table (Get_Kind_Of_Type (Type_Mark => The_Left_Type)) (Get_Kind_Of_Type (Type_Mark => The_Right_Type)) is when Left_Defined => Result := The_Left_Type; when Right_Defined => Result := The_Right_Type; when Never_Defined => Result := RawDict.Null_Type_Info_Ref; when Match => if Get_Kind_Of_Type (Type_Mark => The_Left_Type) = BO_Fixed_Point_Type and then Get_Kind_Of_Type (Type_Mark => The_Left_Type) = BO_Fixed_Point_Type then Result := Get_Universal_Fixed_Type; elsif CommandLineData.Ravenscar_Selected and then Get_Kind_Of_Type (Type_Mark => The_Left_Type) = BO_Time_Span_Type and then Get_Kind_Of_Type (Type_Mark => The_Right_Type) = BO_Time_Span_Type then Result := Get_Predefined_Integer_Type; elsif The_Left_Type = The_Right_Type then Result := The_Left_Type; else Result := RawDict.Null_Type_Info_Ref; end if; end case; return Result; end Get_Division_Operator_Type; -------------------------------------------------------------------------------- -- mod and rem for signed integer and modular types is defined for -- 1) Matching pairs of integer or modular types -- 2) Any integer or modular type and an unknown type -------------------------------------------------------------------------------- function Get_Integer_Division_Operators_Type (The_Left_Type, The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is type Kind_Of_Type is (BO_Integer_Type, BO_Unknown_Type, BO_Other_Type); type Array_Right_Type is array (Kind_Of_Type) of Kind_Of_Result; type Array_Left_Type is array (Kind_Of_Type) of Array_Right_Type; pragma Style_Checks (Off); The_Table : constant Array_Left_Type := Array_Left_Type' -- Right operand Integer, Unknown, Other -- Left operand (Array_Right_Type'( Match, Left_Defined, Never_Defined), -- Integer Array_Right_Type'(Right_Defined, Left_Defined, Never_Defined), -- Unknown Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined)); -- Other pragma Style_Checks (On); Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- function Get_Kind_Of_Type (Type_Mark : RawDict.Type_Info_Ref) return Kind_Of_Type --# global in Dict; is Result : Kind_Of_Type; begin if Type_Is_Integer (Type_Mark => Type_Mark) or else Type_Is_Modular (Type_Mark => Type_Mark) then Result := BO_Integer_Type; elsif Type_Mark = Get_Unknown_Type_Mark then Result := BO_Unknown_Type; else Result := BO_Other_Type; end if; return Result; end Get_Kind_Of_Type; begin -- Get_Integer_Division_Operators_Type case The_Table (Get_Kind_Of_Type (Type_Mark => The_Left_Type)) (Get_Kind_Of_Type (Type_Mark => The_Right_Type)) is when Left_Defined => Result := The_Left_Type; when Right_Defined => Result := The_Right_Type; when Never_Defined => Result := RawDict.Null_Type_Info_Ref; when Match => if The_Left_Type = The_Right_Type then Result := The_Left_Type; else Result := RawDict.Null_Type_Info_Ref; end if; end case; return Result; end Get_Integer_Division_Operators_Type; -------------------------------------------------------------------------------- -- ** is defined (but not necessairily visible for) -- 1) Any Integer type ** Standard.Integer -- 2) Any Modular type ** Standard.Integer -- 3) Any Floating point type ** Standard.Integer -- 4) Any Unknown type ** Standard.Integer -- 5) as 1) thru 4) with Unknown on the RHS -------------------------------------------------------------------------------- function Get_Exponentiation_Operator_Type (The_Left_Type, The_Right_Type : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is type Kind_Of_Type is (BO_Integer_Type, BO_Floating_Point_Type, BO_Predefined_Integer_Type, BO_Universal_Integer_Type, BO_Universal_Floating_Point_Type, BO_Unknown_Type, BO_Other_Type); type Array_Right_Type is array (Kind_Of_Type) of Kind_Of_Result; type Array_Left_Type is array (Kind_Of_Type) of Array_Right_Type; pragma Style_Checks (Off); The_Table : constant Array_Left_Type := Array_Left_Type' -- Right operand Integer, Float, Predefined Int, Universal Int, Universal Float, Unknown, Other -- Left operand (Array_Right_Type'(Never_Defined, Never_Defined, Left_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Integer Array_Right_Type'(Never_Defined, Never_Defined, Left_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Float Array_Right_Type'(Never_Defined, Never_Defined, Left_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Predefined Int Array_Right_Type'(Never_Defined, Never_Defined, Left_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Int Array_Right_Type'(Never_Defined, Never_Defined, Left_Defined, Left_Defined, Never_Defined, Left_Defined, Never_Defined), -- Universal Float Array_Right_Type'(Never_Defined, Never_Defined, Right_Defined, Right_Defined, Never_Defined, Left_Defined, Never_Defined), -- Unknown Array_Right_Type'(Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined, Never_Defined)); -- Other pragma Style_Checks (On); Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- function Get_Kind_Of_Type (Type_Mark : RawDict.Type_Info_Ref) return Kind_Of_Type --# global in Dict; is Result : Kind_Of_Type; begin if Type_Mark = Get_Universal_Integer_Type then Result := BO_Universal_Integer_Type; elsif Type_Mark = Get_Universal_Real_Type then Result := BO_Universal_Floating_Point_Type; elsif Type_Mark = Get_Predefined_Integer_Type then Result := BO_Predefined_Integer_Type; elsif Type_Is_Integer (Type_Mark => Type_Mark) or else Type_Is_Modular (Type_Mark => Type_Mark) then Result := BO_Integer_Type; elsif Type_Is_Floating_Point (Type_Mark => Type_Mark) then Result := BO_Floating_Point_Type; elsif Type_Mark = Get_Unknown_Type_Mark then Result := BO_Unknown_Type; else Result := BO_Other_Type; end if; return Result; end Get_Kind_Of_Type; begin -- Get_Exponentiation_Operator_Type case The_Table (Get_Kind_Of_Type (Type_Mark => The_Left_Type)) (Get_Kind_Of_Type (Type_Mark => The_Right_Type)) is when Left_Defined => Result := The_Left_Type; when Right_Defined => Result := The_Right_Type; when Never_Defined => Result := RawDict.Null_Type_Info_Ref; when Match => if The_Left_Type = The_Right_Type then Result := The_Left_Type; else Result := RawDict.Null_Type_Info_Ref; end if; end case; return Result; end Get_Exponentiation_Operator_Type; begin -- Get_Binary_Operator_Type_Local case Name is when SP_Symbols.RWand | SP_Symbols.RWor | SP_Symbols.RWxor => Result := Get_Logical_Operators_Type (The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type); when SP_Symbols.equals | SP_Symbols.not_equal => Result := Get_Equality_Operators_Type (The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type); when SP_Symbols.less_than | SP_Symbols.less_or_equal | SP_Symbols.greater_than | SP_Symbols.greater_or_equal => Result := Get_Relational_Operators_Type (The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type); when SP_Symbols.plus | SP_Symbols.minus => Result := Get_Adding_Operators_Type (Name => Name, The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type); when SP_Symbols.multiply => Result := Get_Multiplication_Operator_Type (The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type); when SP_Symbols.divide => Result := Get_Division_Operator_Type (The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type); when SP_Symbols.RWmod | SP_Symbols.RWrem => Result := Get_Integer_Division_Operators_Type (The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type); when SP_Symbols.double_star => Result := Get_Exponentiation_Operator_Type (The_Left_Type => The_Left_Type, The_Right_Type => The_Right_Type); when others => Result := RawDict.Null_Type_Info_Ref; end case; return Result; end Get_Binary_Operator_Type_Local; spark-2012.0.deb/examiner/declarations.smf0000644000175000017500000000072311753202337017433 0ustar eugeneugendeclarations.adb declarations-outputdeclarations.adb declarations-outputdeclarations-generatedeclarations.adb -vcg declarations-outputdeclarations-generatedeclarations-generatesuccessors.adb -vcg declarations-outputdeclarations-printdeclarations.adb declarations-outputdeclarations-printdeclarations-printconstantrules.adb declarations-outputdeclarations-printdeclarations-printruleheader.adb -vcg declarations-outputdeclarations-printdeclarations-printtyperules.adb spark-2012.0.deb/examiner/errorhandler-conversions-tostring-appendreference.adb0000644000175000017500000002345311753202336026760 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure AppendReference (E_Str : in out E_Strings.T; Reference : in Natural) is subtype Index12 is Positive range 1 .. 12; subtype Paras is String (Index12); type References is record Source : Sources; Para : Paras; end record; --e.g. ALRM 12.3.4(6) == References'(LRM, Paras'("12.3.4(6) ")); Empty_Reference : constant References := References'(Nul, Paras'(" ")); Max_References : constant Positive := 22; --INCREMENT WHEN ADDING REFS subtype Reference_Indexes is Positive range 1 .. Max_References; type Reference_Tables is array (Reference_Indexes, CommandLineData. Language_Profiles) of References; -- References arranged thus: -- -- Ref_No => (Spark83-ref, -- Spark95-ref, -- Spark2005_Profiles-ref) Reference_Table : constant Reference_Tables := Reference_Tables' (1 => (CommandLineData.SPARK83 => References'(LRM, Paras'("4.9 ")), CommandLineData.SPARK95 => Empty_Reference, CommandLineData.SPARK2005_Profiles => Empty_Reference), 2 => (CommandLineData.SPARK83 => References'(LRM, Paras'("3.3.3(9) ")), CommandLineData.SPARK95 => Empty_Reference, CommandLineData.SPARK2005_Profiles => Empty_Reference), 3 => (CommandLineData.SPARK83 => References'(LRM, Paras'("3.6.1(3) ")), CommandLineData.SPARK95 => Empty_Reference, CommandLineData.SPARK2005_Profiles => Empty_Reference), 4 => (CommandLineData.SPARK83 => Empty_Reference, CommandLineData.SPARK95 => References'(LRM, Paras'("7.2(4) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 5 => (CommandLineData.SPARK83 => References'(SR83, Paras'("5.2.1 ")), CommandLineData.SPARK95 => References'(SR95, Paras'("5.2.1 ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 6 => (CommandLineData.SPARK83 => References'(SR83, Paras'("3.6.3 ")), CommandLineData.SPARK95 => References'(SR95, Paras'("3.6.3 ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 7 => (CommandLineData.SPARK83 => References'(SR83, Paras'("4.1 ")), CommandLineData.SPARK95 => References'(SR95, Paras'("4.1 ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 8 => (CommandLineData.SPARK83 => References'(LRM, Paras'("5.2(3) ")), CommandLineData.SPARK95 => References'(LRM, Paras'("5.2(4) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 9 => (CommandLineData.SPARK83 => Empty_Reference, CommandLineData.SPARK95 => References'(LRM, Paras'("J.3 ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 10 => (CommandLineData.SPARK83 => References'(LRM, Paras'("3.2.2(1) ")), CommandLineData.SPARK95 => References'(LRM, Paras'("3.3.2(3) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 11 => (CommandLineData.SPARK83 => References'(LRM, Paras'("6.3.1(5) ")), CommandLineData.SPARK95 => References'(LRM, Paras'("6.3.1(18) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 12 => (CommandLineData.SPARK83 => References'(LRM, Paras'("7.4(4) ")), CommandLineData.SPARK95 => Empty_Reference, CommandLineData.SPARK2005_Profiles => Empty_Reference), 13 => (CommandLineData.SPARK83 => References'(SR83, Paras'("3.2(Note2) ")), CommandLineData.SPARK95 => References'(SR95, Paras'("3.2(Note2) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 14 => (CommandLineData.SPARK83 => References'(LRM, Paras'("5.2(2) ")), CommandLineData.SPARK95 => References'(LRM, Paras'("5.2(2) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 15 => (CommandLineData.SPARK83 => References'(SR83, Paras'("3.3.2 ")), CommandLineData.SPARK95 => References'(SR95, Paras'("3.3.2 ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 16 => (CommandLineData.SPARK83 => References'(SR83, Paras'("7.3.1 ")), CommandLineData.SPARK95 => References'(SR95, Paras'("7.3.1 ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 17 => (CommandLineData.SPARK83 => References'(LRM, Paras'("10.2(3) ")), CommandLineData.SPARK95 => References'(LRM, Paras'("10.1.3(13) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 18 => (CommandLineData.SPARK83 => References'(SR83, Paras'("3.9(1) ")), CommandLineData.SPARK95 => References'(SR95, Paras'("3.9(1) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 19 => (CommandLineData.SPARK83 => References'(SR83, Paras'("6.3 ")), CommandLineData.SPARK95 => References'(SR95, Paras'("6.3 ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 20 => (CommandLineData.SPARK83 => References'(SR83, Paras'("7.2.3 ")), CommandLineData.SPARK95 => References'(SR95, Paras'("7.2.3 ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 21 => (CommandLineData.SPARK83 => References'(LRM, Paras'("6.3(3) ")), CommandLineData.SPARK95 => References'(LRM, Paras'("6.1(20) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference), 22 => (CommandLineData.SPARK83 => References'(LRM, Paras'("4.6(3) ")), CommandLineData.SPARK95 => References'(LRM, Paras'("4.6(61) ")), CommandLineData.SPARK2005_Profiles => Empty_Reference)); Current_Reference : References; -------------------------------------- procedure Append_Source (E_Str : in out E_Strings.T; Source : in Sources) --# derives E_Str from *, --# Source; is begin case Source is when Nul => null; when LRM => E_Strings.Append_String (E_Str => E_Str, Str => "Ada LRM "); when SR83 => E_Strings.Append_String (E_Str => E_Str, Str => "SR83 "); when SR95 => E_Strings.Append_String (E_Str => E_Str, Str => "SR95 "); when UM => E_Strings.Append_String (E_Str => E_Str, Str => "User Manual "); when Proof_UM => E_Strings.Append_String (E_Str => E_Str, Str => "Proof Manual "); when JB => E_Strings.Append_String (E_Str => E_Str, Str => "Barnes "); end case; end Append_Source; -------------------------------------- procedure Append_Para (E_Str : in out E_Strings.T; Para : in Paras) --# derives E_Str from *, --# Para; is begin for I in Index12 loop exit when Para (I) = ' '; E_Strings.Append_Char (E_Str => E_Str, Ch => Para (I)); end loop; end Append_Para; -------------------------------------- begin --AppendReference if Reference /= ErrorHandler.No_Reference and not CommandLineData.Content.Plain_Output then if Reference > Max_References then E_Strings.Append_String (E_Str => E_Str, Str => " [Ref No Too High]"); else Current_Reference := Reference_Table (Reference, CommandLineData.Content.Language_Profile); if Current_Reference /= Empty_Reference then Source_Used (Current_Reference.Source) := True; E_Strings.Append_String (E_Str => E_Str, Str => " ["); Append_Source (E_Str => E_Str, Source => Current_Reference.Source); Append_Para (E_Str => E_Str, Para => Current_Reference.Para); E_Strings.Append_String (E_Str => E_Str, Str => "]"); end if; end if; end if; end AppendReference; spark-2012.0.deb/examiner/mainloop.adb0000644000175000017500000051544611753202336016556 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Casing; with CommandLineData; with ConfigFile; with ContextManager; with ContextManager.Ops; with Dictionary; with Error_Types; with ErrorHandler; with E_Strings; with File_Utils; with FileSystem; with IndexManager; with LexTokenLists; with LexTokenManager; with MetaFile; with RequiredUnits; with ScreenEcho; with Sem; with SLI; with SPARK_IO; with SparkHTML; with SparkLex; with SPParser; with Statistics; with STree; with VCG; with XMLReport; use type CommandLineData.Flow_Analysis_Options; use type CommandLineData.Rule_Generation_Policies; use type ContextManager.FileDescriptors; use type ContextManager.FileStatus; use type ContextManager.UnitDescriptors; use type ContextManager.UnitStatus; use type ContextManager.UnitTypes; use type ErrorHandler.Error_Level; use type LexTokenManager.Str_Comp_Result; use type SPARK_IO.File_Status; use type SPARK_IO.File_Type; package body MainLoop is procedure Print_Filename (File : in SPARK_IO.File_Type; Name : in E_Strings.T; Plain : in Boolean) -- all-from-all derives for data flow analysis only --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Name, --# Plain; is Stat : FileSystem.Typ_File_Spec_Status; Full_Name : E_Strings.T; begin if Plain then Full_Name := FileSystem.Just_File (Fn => Name, Ext => True); else --# accept F, 10, Stat, "Stat not used"; FileSystem.Find_Full_File_Name (File_Spec => Name, File_Status => Stat, Full_File_Name => Full_Name); --# end accept; end if; if File = SPARK_IO.Standard_Output then ScreenEcho.Put_ExaminerString (Full_Name); else E_Strings.Put_String (File => File, E_Str => Full_Name); end if; --# accept F, 33, Stat, "Stat not used"; end Print_Filename; procedure Initialise_Processing --# global in CommandLineData.Content; --# in out LexTokenManager.State; --# in out SparkHTML.Generate_HTML; --# in out SPARK_IO.File_Sys; --# out Dictionary.Dict; --# out SparkHTML.HTML_Work_Dir; --# out SparkHTML.SPARK_Work_Dir; --# out SparkLex.Curr_Line; --# out XMLReport.State; --# derives Dictionary.Dict, --# SparkHTML.HTML_Work_Dir, --# SparkHTML.SPARK_Work_Dir, --# SPARK_IO.File_Sys from CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# LexTokenManager.State from *, --# CommandLineData.Content & --# SparkHTML.Generate_HTML from *, --# CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# SparkLex.Curr_Line, --# XMLReport.State from ; is begin SparkLex.Clear_Line_Context; LexTokenManager.Initialise_String_Table; Dictionary.Initialize (CommandLineData.Content.Write_Dict); --# accept F, 602, SparkHTML.HTML_Work_Dir, SparkHTML.HTML_Work_Dir, "Only used in HTML selected" & --# F, 602, SparkHTML.SPARK_Work_Dir, SparkHTML.SPARK_Work_Dir, "Only used in HTML selected"; if CommandLineData.Content.HTML then SparkHTML.Init_SPARK_HTML; end if; XMLReport.Init; end Initialise_Processing; procedure Create_File_Context (Source_Filename : in E_Strings.T; File_Descriptor : out ContextManager.FileDescriptors) --# global in out ContextManager.Ops.File_Heap; --# in out LexTokenManager.State; --# derives ContextManager.Ops.File_Heap, --# LexTokenManager.State from *, --# LexTokenManager.State, --# Source_Filename & --# File_Descriptor from ContextManager.Ops.File_Heap; is FD2 : ContextManager.FileDescriptors; Lex_String_Source_Filename : LexTokenManager.Lex_String; begin ContextManager.Ops.CreateFileDescriptor (FD2); LexTokenManager.Insert_Examiner_String (Str => Source_Filename, Lex_Str => Lex_String_Source_Filename); ContextManager.Ops.SetSourceFileName (FD2, Lex_String_Source_Filename); ContextManager.Ops.SetFileStatus (FD2, ContextManager.FileCreated); File_Descriptor := FD2; end Create_File_Context; procedure Prepare_Next_Argument_File (The_Filename : in E_Strings.T; Do_Listing : in Boolean; The_Listing_Name : in E_Strings.T; Do_VCG : in out Boolean; File_Descriptor : out ContextManager.FileDescriptors) --# global in out CommandLineData.Content; --# in out ContextManager.Ops.File_Heap; --# in out LexTokenManager.State; --# derives CommandLineData.Content from *, --# Do_VCG & --# ContextManager.Ops.File_Heap from *, --# Do_Listing, --# LexTokenManager.State, --# The_Filename, --# The_Listing_Name & --# Do_VCG from CommandLineData.Content & --# File_Descriptor from ContextManager.Ops.File_Heap & --# LexTokenManager.State from *, --# The_Filename; is FD2 : ContextManager.FileDescriptors; Global_Do_VCG : Boolean; begin Create_File_Context (Source_Filename => The_Filename, File_Descriptor => FD2); if Do_Listing then ContextManager.Ops.SetListingReq (FD2, True); ContextManager.Ops.SetListingFileName (FD2, The_Listing_Name); end if; File_Descriptor := FD2; Global_Do_VCG := CommandLineData.Content.VCG; if Do_VCG then --# accept W, 169, CommandLineData.Content.VCG, "Direct updates OK here"; CommandLineData.Content.VCG := True; --# end accept; end if; Do_VCG := Global_Do_VCG; end Prepare_Next_Argument_File; procedure Show_End_Of_File (To_File : in SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# To_File; is begin SPARK_IO.New_Line (To_File, 2); SPARK_IO.Put_Line (To_File, "--End of file--------------------------------------------------", 0); end Show_End_Of_File; -- Important warnings or notes appear in makefile mode. procedure Echo_Warning_Or_Note (About_File : in E_Strings.T; Msg : in String; Important : in Boolean) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# About_File, --# CommandLineData.Content, --# Important, --# Msg; is begin if CommandLineData.Content.Echo and (Important or not CommandLineData.Content.Makefile_Mode) then if CommandLineData.Content.Brief then -- Echo the first filename from the command-line. If it starts with an '@' -- then chop it off, so the file is recognized by the IDE. if E_Strings.Get_Length (E_Str => About_File) >= 2 and then E_Strings.Get_Element (E_Str => About_File, Pos => 1) = '@' then ScreenEcho.Put_ExaminerString (E_Strings.Section (About_File, 2, E_Strings.Get_Length (E_Str => About_File) - 1)); else ScreenEcho.Put_ExaminerString (About_File); end if; ScreenEcho.Put_String (":1:1: "); ScreenEcho.Put_Line (Msg); else ScreenEcho.New_Line (1); ScreenEcho.Put_Line (Msg); end if; end if; end Echo_Warning_Or_Note; procedure Output_Dictionary_File --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State; is Dict_Message : constant String := "Generating dictionary file"; Status : SPARK_IO.File_Status; Dict_File_Name : E_Strings.T; begin if CommandLineData.Content.Write_Dict then if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then ScreenEcho.Echo (E_Strings.Copy_String (Str => Dict_Message)); end if; if CommandLineData.Content.Output_Directory then Dict_File_Name := CommandLineData.Content.Output_Directory_Name; E_Strings.Append_Examiner_String (E_Str1 => Dict_File_Name, E_Str2 => FileSystem.Directory_Separator); E_Strings.Append_Examiner_String (E_Str1 => Dict_File_Name, E_Str2 => CommandLineData.Content.Dict_File_Name); else Dict_File_Name := CommandLineData.Content.Dict_File_Name; end if; Dictionary.Write (Dict_File_Name, Status); if Status /= SPARK_IO.Ok then ScreenEcho.Put_Line ("Error writing dictionary file: "); Print_Filename (File => SPARK_IO.Standard_Output, Name => Dict_File_Name, Plain => False); ScreenEcho.New_Line (1); end if; end if; end Output_Dictionary_File; procedure Print_Lex_Token_List (File : in SPARK_IO.File_Type; List : in LexTokenLists.Lists) --# global in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# LexTokenManager.State, --# List; is begin if LexTokenLists.Get_Length (List => List) = 0 then SPARK_IO.Put_String (File, "Unexpected Empty Lextoken List", 0); else LexTokenLists.Print_List (File => File, List => List); end if; end Print_Lex_Token_List; -- Following hidden trace routines enabled by -debug=u ----------------------------------- procedure Trace (Msg : String) --# derives null from Msg; is --# hide Trace; begin if CommandLineData.Content.Debug.Units then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Msg, 0); end if; end Trace; procedure Trace_Unit (List : in LexTokenLists.Lists; Types : in ContextManager.UnitTypeSets) --# derives null from List, --# Types; is --# hide Trace_Unit; FirstType : Boolean := True; begin if CommandLineData.Content.Debug.Units then SPARK_IO.Put_String (SPARK_IO.Standard_Output, " unit name: ", 0); Print_Lex_Token_List (File => SPARK_IO.Standard_Output, List => List); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " which is {", 0); for I in ContextManager.UnitTypes loop if Types (I) then if not FirstType then SPARK_IO.Put_String (SPARK_IO.Standard_Output, ", ", 0); end if; SPARK_IO.Put_String (SPARK_IO.Standard_Output, ContextManager.UnitTypes'Image (I), 0); FirstType := False; end if; end loop; SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "}", 0); end if; end Trace_Unit; ----------------------------------------------------------------------------------- function Get_Unit_Name (List : in LexTokenLists.Lists) return E_Strings.T --# global in LexTokenManager.State; is Return_String : E_Strings.T; begin if LexTokenLists.Get_Length (List => List) = 0 then Return_String := E_Strings.Copy_String (Str => "Unexpected Empty LexToken List"); else Return_String := LexTokenLists.Token_List_To_String (Token_List => List); end if; return Return_String; end Get_Unit_Name; procedure Print_Unit_Type (File : in SPARK_IO.File_Type; Unit_Type : in ContextManager.UnitTypes) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Unit_Type; is begin case Unit_Type is when ContextManager.MainProgram => SPARK_IO.Put_String (File, "main program", 0); when ContextManager.PackageSpecification => SPARK_IO.Put_String (File, "package specification", 0); when ContextManager.PackageBody => SPARK_IO.Put_String (File, "package body", 0); when ContextManager.SubUnit => SPARK_IO.Put_String (File, "subunit", 0); when ContextManager.GenericSubprogramDeclaration => SPARK_IO.Put_String (File, "generic subprogram declaration", 0); when ContextManager.GenericSubprogramBody => SPARK_IO.Put_String (File, "generic subprogram body", 0); when ContextManager.InterUnitPragma => null; when ContextManager.InvalidUnit => null; end case; end Print_Unit_Type; function Get_Unit_Type (Unit_Type : in ContextManager.UnitTypes) return E_Strings.T is Return_Str : E_Strings.T; begin case Unit_Type is when ContextManager.MainProgram => Return_Str := E_Strings.Copy_String (Str => "main program"); when ContextManager.PackageSpecification => Return_Str := E_Strings.Copy_String (Str => "package specification"); when ContextManager.PackageBody => Return_Str := E_Strings.Copy_String (Str => "package body"); when ContextManager.SubUnit => Return_Str := E_Strings.Copy_String (Str => "subunit"); when ContextManager.GenericSubprogramDeclaration => Return_Str := E_Strings.Copy_String (Str => "generic subprogram declaration"); when ContextManager.GenericSubprogramBody => Return_Str := E_Strings.Copy_String (Str => "generic subprogram body"); when ContextManager.InterUnitPragma => Return_Str := E_Strings.Copy_String (Str => "InterUnitPragma"); when ContextManager.InvalidUnit => Return_Str := E_Strings.Copy_String (Str => "InvalidUnit"); end case; return Return_Str; end Get_Unit_Type; procedure Print_Reason (File : in SPARK_IO.File_Type; Unit_Status : in ContextManager.UnitStatus) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Unit_Status; is begin case Unit_Status is when ContextManager.NoIndexFile => SPARK_IO.Put_String (File, "No index file specified.", 0); when ContextManager.NotInIndexFile => SPARK_IO.Put_String (File, "Omitted from index file.", 0); when ContextManager.CannotOpenFile => SPARK_IO.Put_String (File, "Cannot open source file specified in index file.", 0); when ContextManager.UnableToLocate => SPARK_IO.Put_String (File, "Declaration not found in source code.", 0); when others => null; -- should not happen because only called when unit is not found end case; end Print_Reason; -- This function is not currently used but will be required when -- XMLReport is updated to contain the reason function Get_Reason (Unit_Status : in ContextManager.UnitStatus) return E_Strings.T is Return_Str : E_Strings.T; begin case Unit_Status is when ContextManager.NoIndexFile => Return_Str := E_Strings.Copy_String (Str => "No index file specified."); when ContextManager.NotInIndexFile => Return_Str := E_Strings.Copy_String (Str => "Omitted from index file."); when ContextManager.CannotOpenFile => Return_Str := E_Strings.Copy_String (Str => "Cannot open source file specified in index file."); when ContextManager.UnableToLocate => Return_Str := E_Strings.Copy_String (Str => "Declaration not found in source code."); when others => Return_Str := E_Strings.Empty_String; -- only called when unit is not found end case; return Return_Str; end Get_Reason; procedure Append_To_Report_File (Report_File : in SPARK_IO.File_Type; Purpose : in Error_Types.ConversionRequestSource; File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ContextManager.Ops.File_Heap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives ContextManager.Ops.File_Heap, --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# LexTokenManager.State, --# Purpose, --# Report_File, --# SPARK_IO.File_Sys, --# XMLReport.State; is Error_Context : ErrorHandler.Error_Contexts; function Get_Source_Filename (File_Descriptor : in ContextManager.FileDescriptors) return E_Strings.T --# global in ContextManager.Ops.File_Heap; --# in LexTokenManager.State; is begin return LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (File_Descriptor)); end Get_Source_Filename; procedure Print_Source_Filename (Report_File : in SPARK_IO.File_Type; File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# File_Descriptor, --# LexTokenManager.State, --# Report_File; is begin SPARK_IO.Put_String (Report_File, "Source Filename: ", 0); Print_Filename (File => Report_File, Name => LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (File_Descriptor)), Plain => CommandLineData.Content.Plain_Output); SPARK_IO.New_Line (Report_File, 1); end Print_Source_Filename; procedure Print_Listing_Filename (Report_File : in SPARK_IO.File_Type; File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# File_Descriptor, --# Report_File; is Filename : E_Strings.T; begin if ContextManager.Ops.ListingReqt (File_Descriptor) then SPARK_IO.Put_String (Report_File, "Listing Filename: ", 0); ContextManager.Ops.GetListingFileName (File_Descriptor, Filename); CommandLineData.Normalize_File_Name_To_Output_Directory (F => Filename); Print_Filename (File => Report_File, Name => Filename, Plain => CommandLineData.Content.Plain_Output); SPARK_IO.New_Line (Report_File, 1); else SPARK_IO.Put_Line (Report_File, "No Listing File", 0); end if; end Print_Listing_Filename; procedure Print_Unit_Status (File : in SPARK_IO.File_Type; Unit_Descriptor : in ContextManager.UnitDescriptors) --# global in ContextManager.Ops.Unit_Heap; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# ContextManager.Ops.Unit_Heap, --# File, --# Unit_Descriptor; is begin case ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Unit_Descriptor) is when ContextManager.NoUnitEntry => null; when ContextManager.UnitCreated => null; when ContextManager.UnitParsed | ContextManager.UnitDeferred => SPARK_IO.Put_String (File, "Unit has been parsed only, any errors are listed below.", 0); when ContextManager.UnitAnalysed => SPARK_IO.Put_String (File, "Unit has been analysed, any errors are listed below.", 0); when ContextManager.NoIndexFile => SPARK_IO.Put_String (File, "Unit required but not found in source file and no index file has been specified.", 0); when ContextManager.NotInIndexFile => SPARK_IO.Put_String (File, "Unit required but not found in source file or in index file.", 0); when ContextManager.CannotOpenFile => SPARK_IO.Put_String (File, "Source file for required Unit given in index file cannot be opened.", 0); when ContextManager.UnableToLocate => SPARK_IO.Put_String (File, "Unit required but not found.", 0); end case; end Print_Unit_Status; function Get_Unit_Status (Unit_Descriptor : in ContextManager.UnitDescriptors) return E_Strings.T --# global in ContextManager.Ops.Unit_Heap; is Return_String : E_Strings.T; begin Return_String := E_Strings.Empty_String; case ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Unit_Descriptor) is when ContextManager.NoUnitEntry => null; when ContextManager.UnitCreated => null; when ContextManager.UnitParsed | ContextManager.UnitDeferred => Return_String := E_Strings.Copy_String (Str => "parsed"); when ContextManager.UnitAnalysed => Return_String := E_Strings.Copy_String (Str => "analysed"); when ContextManager.NoIndexFile => Return_String := E_Strings.Copy_String (Str => "not found and no index file specified"); when ContextManager.NotInIndexFile => Return_String := E_Strings.Copy_String (Str => "not found in source file or in index file."); when ContextManager.CannotOpenFile => Return_String := E_Strings.Copy_String (Str => "unit source file name given in index file cannot be opened."); when ContextManager.UnableToLocate => Return_String := E_Strings.Copy_String (Str => "not found"); end case; return Return_String; end Get_Unit_Status; procedure Output_Unit_List (Report_File : in SPARK_IO.File_Type; File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Heap; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# File_Descriptor, --# LexTokenManager.State, --# Report_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# File_Descriptor, --# LexTokenManager.State; is Unit_Descriptor : ContextManager.UnitDescriptors; Unit_Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; Unit_Output : Boolean; begin Unit_Output := False; Unit_Descriptor := ContextManager.Ops.FirstUnitDescriptor; if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Units_In_File, Report => Report_File); end if; loop exit when Unit_Descriptor = ContextManager.NullUnit; if (ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Unit_Descriptor) /= ContextManager.NoUnitEntry) and then File_Descriptor = ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Unit_Descriptor) then ContextManager.Ops.GetUnitName (Unit_Descriptor, Unit_Name, Unit_Type); if LexTokenLists.Get_Length (List => Unit_Name) /= 0 then --# accept F, 41, "Stable expression expected"; if CommandLineData.Content.XML then XMLReport.Ada_Unit (Name => Get_Unit_Name (List => Unit_Name), Typ => Get_Unit_Type (Unit_Type => Unit_Type), Unit_Status => Get_Unit_Status (Unit_Descriptor => Unit_Descriptor), Report => Report_File); Unit_Output := True; else Unit_Output := True; SPARK_IO.Put_String (Report_File, " Unit name: ", 0); Print_Lex_Token_List (File => Report_File, List => Unit_Name); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, " Unit type: ", 0); Print_Unit_Type (File => Report_File, Unit_Type => Unit_Type); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, " ", 0); Print_Unit_Status (File => Report_File, Unit_Descriptor => Unit_Descriptor); SPARK_IO.New_Line (Report_File, 2); end if; --# end accept; end if; end if; Unit_Descriptor := ContextManager.Ops.NextUnitDescriptor (Unit_Descriptor); end loop; if CommandLineData.Content.XML then XMLReport.End_Section (Section => XMLReport.S_Units_In_File, Report => Report_File); end if; if not Unit_Output then SPARK_IO.Put_Line (Report_File, "*** No units in file", 0); SPARK_IO.New_Line (Report_File, 2); end if; end Output_Unit_List; begin -- Append_To_Report_File SPARK_IO.New_Line (Report_File, 1); if not CommandLineData.Content.XML then Print_Source_Filename (Report_File => Report_File, File_Descriptor => File_Descriptor); end if; if ContextManager.Ops.GetFileStatus (File_Descriptor) = ContextManager.UnableToOpen then if CommandLineData.Content.XML then XMLReport.Start_File (Plain_Output => CommandLineData.Content.Plain_Output, F_Name => Get_Source_Filename (File_Descriptor => File_Descriptor), Report => Report_File); Output_Unit_List (Report_File => Report_File, File_Descriptor => File_Descriptor); XMLReport.End_File (Report => Report_File); else SPARK_IO.New_Line (Report_File, 1); Output_Unit_List (Report_File => Report_File, File_Descriptor => File_Descriptor); SPARK_IO.Put_Line (Report_File, "*** Unable to open source file", 0); end if; else if CommandLineData.Content.XML then XMLReport.Start_File (Plain_Output => CommandLineData.Content.Plain_Output, F_Name => Get_Source_Filename (File_Descriptor => File_Descriptor), Report => Report_File); Output_Unit_List (Report_File => Report_File, File_Descriptor => File_Descriptor); ContextManager.Ops.GetErrorContext (File_Descriptor, Error_Context); ErrorHandler.Set_Error_Context (Context => Error_Context); ErrorHandler.AppendErrors (Report_File, Purpose); ErrorHandler.Get_Error_Context (Context => Error_Context); ContextManager.Ops.SetErrorContext (File_Descriptor, Error_Context); XMLReport.End_File (Report => Report_File); else Print_Listing_Filename (Report_File => Report_File, File_Descriptor => File_Descriptor); SPARK_IO.New_Line (Report_File, 1); Output_Unit_List (Report_File => Report_File, File_Descriptor => File_Descriptor); ContextManager.Ops.GetErrorContext (File_Descriptor, Error_Context); ErrorHandler.Set_Error_Context (Context => Error_Context); ErrorHandler.AppendErrors (Report_File, Purpose); ErrorHandler.Get_Error_Context (Context => Error_Context); ContextManager.Ops.SetErrorContext (File_Descriptor, Error_Context); end if; end if; end Append_To_Report_File; procedure Output_Report_File (Report_File_Error_List : in out SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in ConfigFile.State; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in IndexManager.State; --# in SparkHTML.HTML_Work_Dir; --# in SparkHTML.SPARK_Work_Dir; --# in Statistics.TableUsage; --# in out ContextManager.Ops.File_Heap; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkHTML.Generate_HTML; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives ContextManager.Ops.File_Heap, --# ErrorHandler.Error_Context, --# XMLReport.State from CommandLineData.Content, --# ConfigFile.State, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# XMLReport.State & --# LexTokenManager.State from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# IndexManager.State, --# SPARK_IO.File_Sys, --# XMLReport.State & --# Report_File_Error_List from *, --# CommandLineData.Content, --# SPARK_IO.File_Sys & --# SparkHTML.Generate_HTML from *, --# CommandLineData.Content, --# ConfigFile.State, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# Report_File_Error_List, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# XMLReport.State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ConfigFile.State, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# Report_File_Error_List, --# SparkHTML.Generate_HTML, --# SparkHTML.HTML_Work_Dir, --# SparkHTML.SPARK_Work_Dir, --# Statistics.TableUsage, --# XMLReport.State; is Report_File : SPARK_IO.File_Type; OK : Boolean; Report_Message : constant String := "Generating report file"; procedure Create_Report_File (Report_File : out SPARK_IO.File_Type; OK : out Boolean) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives OK, --# Report_File from CommandLineData.Content, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# SPARK_IO.File_Sys, --# XMLReport.State; is Local_File : SPARK_IO.File_Type; Status : SPARK_IO.File_Status; Filename : E_Strings.T; begin OK := True; Local_File := SPARK_IO.Null_File; Filename := CommandLineData.Content.Report_File_Name; CommandLineData.Normalize_File_Name_To_Output_Directory (F => Filename); Filename := FileSystem.Case_Of_Files_For_Create (E_Str => Filename); E_Strings.Create (File => Local_File, Name_Of_File => Filename, Form_Of_File => "", Status => Status); if Status = SPARK_IO.Ok then if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Report, Report => Local_File); XMLReport.Start_Section (Section => XMLReport.S_Prologue, Report => Local_File); else File_Utils.Print_A_Header (File => Local_File, Header_Line => "Report of SPARK Examination", File_Type => File_Utils.Other_File); end if; else OK := False; ScreenEcho.Put_Line ("Unable to open report file: "); Print_Filename (File => SPARK_IO.Standard_Output, Name => Filename, Plain => CommandLineData.Content.Plain_Output); ScreenEcho.New_Line (1); end if; Report_File := Local_File; end Create_Report_File; procedure Close_Report_File (Report_File : in out SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives Report_File, --# SPARK_IO.File_Sys from *, --# Report_File; is Status : SPARK_IO.File_Status; begin --# accept F, 10, Status, "Status not used here" & --# F, 33, Status, "Status not used here"; SPARK_IO.Close (Report_File, Status); end Close_Report_File; procedure Output_File_List (Report_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# LexTokenManager.State, --# Report_File; is File_Descriptor : ContextManager.FileDescriptors; begin if not CommandLineData.Content.XML then File_Descriptor := ContextManager.Ops.FirstFileDescriptor; SPARK_IO.New_Line (Report_File, 2); if File_Descriptor = ContextManager.NullFile then SPARK_IO.Put_Line (Report_File, "No source files used", 0); else if not CommandLineData.Content.XML then SPARK_IO.Put_Line (Report_File, "Source Filename(s) used were:", 0); end if; loop exit when File_Descriptor = ContextManager.NullFile; SPARK_IO.Put_String (Report_File, " ", 0); Print_Filename (File => Report_File, Name => LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (File_Descriptor)), Plain => CommandLineData.Content.Plain_Output); SPARK_IO.New_Line (Report_File, 1); File_Descriptor := ContextManager.Ops.NextFileDescriptor (File_Descriptor); end loop; end if; end if; end Output_File_List; procedure Output_Error_List (Report_File : in SPARK_IO.File_Type; Report_File_Error_List : in out SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ContextManager.Ops.File_Heap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives ContextManager.Ops.File_Heap, --# ErrorHandler.Error_Context, --# XMLReport.State from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Report_File, --# SPARK_IO.File_Sys, --# XMLReport.State & --# Report_File_Error_List from * & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Report_File, --# Report_File_Error_List, --# XMLReport.State; is subtype Line_Index is Positive range 1 .. 512; subtype Lines is String (Line_Index); Current_Line : Lines; Stop : Natural; Unused1, Unused2 : SPARK_IO.File_Status; File_Descriptor : ContextManager.FileDescriptors; begin --# accept F, 10, Unused1, "Not required" & --# F, 33, Unused1, "Not required" & --# F, 10, Unused2, "Not required" & --# F, 33, Unused2, "Not required"; if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Results, Report => Report_File); end if; --first output errors associated with files picked up from index mechanism File_Descriptor := ContextManager.Ops.FirstFileDescriptor; loop exit when File_Descriptor = ContextManager.NullFile; if not ContextManager.Ops.ErrorsReported (File_Descriptor) then Append_To_Report_File (Report_File => Report_File, Purpose => Error_Types.ForReportIndexedFiles, File_Descriptor => File_Descriptor); end if; File_Descriptor := ContextManager.Ops.NextFileDescriptor (File_Descriptor); end loop; --then append errors from files on command line/meta files SPARK_IO.Reset (Report_File_Error_List, SPARK_IO.In_File, Unused1); while not SPARK_IO.End_Of_File (Report_File_Error_List) loop SPARK_IO.Get_Line (Report_File_Error_List, --to get Current_Line, Stop); if Stop = 0 then SPARK_IO.New_Line (Report_File, 1); else SPARK_IO.Put_Line (Report_File, Current_Line, Stop); end if; end loop; SPARK_IO.Close (Report_File_Error_List, Unused2); end Output_Error_List; procedure Output_Units_Required_But_Not_Found (Report_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Heap; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# LexTokenManager.State, --# Report_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# LexTokenManager.State; is Status_Col : constant Positive := 50; Reason_Col : constant Positive := 72; Unit_Descriptor : ContextManager.UnitDescriptors; Unit_Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; First_Time : Boolean; begin if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Units_Not_Found, Report => Report_File); Unit_Descriptor := ContextManager.Ops.FirstUnitDescriptor; loop exit when Unit_Descriptor = ContextManager.NullUnit; if ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Unit_Descriptor) in ContextManager.UnitNotFound then ContextManager.Ops.GetUnitName (Unit_Descriptor, Unit_Name, Unit_Type); -- XMLReport needs to be updated to include the reason XMLReport.Ada_Unit (Name => LexTokenLists.Token_List_To_String (Token_List => Unit_Name), Typ => Get_Unit_Type (Unit_Type => Unit_Type), Unit_Status => Get_Reason (Unit_Status => ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Unit_Descriptor)), Report => Report_File); end if; Unit_Descriptor := ContextManager.Ops.NextUnitDescriptor (Unit_Descriptor); end loop; XMLReport.End_Section (Section => XMLReport.S_Units_Not_Found, Report => Report_File); else First_Time := True; Unit_Descriptor := ContextManager.Ops.FirstUnitDescriptor; loop exit when Unit_Descriptor = ContextManager.NullUnit; if ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Unit_Descriptor) in ContextManager.UnitNotFound then SPARK_IO.New_Line (Report_File, 1); if First_Time then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "The following were required during analysis but could not be located:", 0); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, " Unit name", 0); SPARK_IO.Set_Col (Report_File, Status_Col); SPARK_IO.Put_String (Report_File, "Unit type", 0); SPARK_IO.Set_Col (Report_File, Reason_Col); SPARK_IO.Put_Line (Report_File, "Reason", 0); SPARK_IO.Put_String (Report_File, " ---------", 0); SPARK_IO.Set_Col (Report_File, Status_Col); SPARK_IO.Put_String (Report_File, "---------", 0); SPARK_IO.Set_Col (Report_File, Reason_Col); SPARK_IO.Put_Line (Report_File, "------", 0); First_Time := False; end if; ContextManager.Ops.GetUnitName (Unit_Descriptor, Unit_Name, Unit_Type); SPARK_IO.Put_String (Report_File, " ", 0); Print_Lex_Token_List (File => Report_File, List => Unit_Name); SPARK_IO.Set_Col (Report_File, Status_Col); Print_Unit_Type (File => Report_File, Unit_Type => Unit_Type); SPARK_IO.Set_Col (Report_File, Reason_Col); Print_Reason (File => Report_File, Unit_Status => ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Unit_Descriptor)); end if; Unit_Descriptor := ContextManager.Ops.NextUnitDescriptor (Unit_Descriptor); end loop; SPARK_IO.New_Line (Report_File, 2); end if; end Output_Units_Required_But_Not_Found; procedure Output_Units_With_Cyclic_Requirements (Report_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Heap; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# LexTokenManager.State, --# Report_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# LexTokenManager.State; is Status_Col : constant Positive := 50; Unit_Descriptor : ContextManager.UnitDescriptors; Unit_Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; First_Time : Boolean; begin if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Cyclic_Requirements, Report => Report_File); Unit_Descriptor := ContextManager.Ops.FirstUnitDescriptor; loop exit when Unit_Descriptor = ContextManager.NullUnit; if ContextManager.Ops.UnitInCycle (Unit_Descriptor) then ContextManager.Ops.GetUnitName (Unit_Descriptor, Unit_Name, Unit_Type); XMLReport.Unit (LexTokenLists.Token_List_To_String (Token_List => Unit_Name), Get_Unit_Type (Unit_Type => Unit_Type), Report_File); end if; Unit_Descriptor := ContextManager.Ops.NextUnitDescriptor (Unit_Descriptor); end loop; XMLReport.End_Section (Section => XMLReport.S_Cyclic_Requirements, Report => Report_File); else First_Time := True; Unit_Descriptor := ContextManager.Ops.FirstUnitDescriptor; loop exit when Unit_Descriptor = ContextManager.NullUnit; if ContextManager.Ops.UnitInCycle (Unit_Descriptor) then SPARK_IO.New_Line (Report_File, 1); if First_Time then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "A cycle was detected when determining the required units of the following:", 0); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, " Unit name", 0); SPARK_IO.Set_Col (Report_File, Status_Col); SPARK_IO.Put_Line (Report_File, "Unit type", 0); SPARK_IO.Put_String (Report_File, " ---------", 0); SPARK_IO.Set_Col (Report_File, Status_Col); SPARK_IO.Put_Line (Report_File, "---------", 0); First_Time := False; end if; ContextManager.Ops.GetUnitName (Unit_Descriptor, Unit_Name, Unit_Type); SPARK_IO.Put_String (Report_File, " ", 0); Print_Lex_Token_List (File => Report_File, List => Unit_Name); SPARK_IO.Set_Col (Report_File, Status_Col); Print_Unit_Type (File => Report_File, Unit_Type => Unit_Type); end if; Unit_Descriptor := ContextManager.Ops.NextUnitDescriptor (Unit_Descriptor); end loop; if not First_Time then SPARK_IO.New_Line (Report_File, 2); end if; end if; end Output_Units_With_Cyclic_Requirements; procedure Output_Command_Line (Report_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Report_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content; is Tmp_String : E_Strings.T; Option_Str : E_Strings.T; function Plain_Output (E_Str : E_Strings.T) return E_Strings.T --# global in CommandLineData.Content; is Result : E_Strings.T; begin if CommandLineData.Content.Plain_Output and then not CommandLineData.Content.XML then Result := FileSystem.Just_File (Fn => E_Str, Ext => True); else Result := E_Str; end if; return Result; end Plain_Output; begin if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Commandline, Report => Report_File); else SPARK_IO.Put_Line (Report_File, "Options:", 0); end if; CommandLineData.Output_Command_Line (Prefix => " ", XML => CommandLineData.Content.XML, Option_Str => Option_Str); E_Strings.Put_String (File => Report_File, E_Str => Option_Str); --# assert True; if not CommandLineData.Content.XML then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "Selected files:", 0); end if; --# assert True; for I in CommandLineData.Source_File_Counts range 1 .. CommandLineData.Content.Number_Source loop Tmp_String := Plain_Output (E_Str => CommandLineData.Content.Source_File_List (I).Source_File_Name); --# accept F, 41, "Stable expression OK here"; if CommandLineData.Content.XML then XMLReport.Filename (Plain_Output => CommandLineData.Content.Plain_Output, File => Tmp_String); E_Strings.Put_String (File => Report_File, E_Str => Tmp_String); else SPARK_IO.Put_String (Report_File, " ", 0); E_Strings.Put_Line (File => Report_File, E_Str => Tmp_String); end if; --# end accept; end loop; --# assert True; if CommandLineData.Content.XML then XMLReport.End_Section (Section => XMLReport.S_Commandline, Report => Report_File); else SPARK_IO.New_Line (Report_File, 2); end if; end Output_Command_Line; procedure Output_Index_List (Report_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in IndexManager.State; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# IndexManager.State, --# LexTokenManager.State, --# Report_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# IndexManager.State; is begin IndexManager.List_Index_File (Report_File => Report_File); end Output_Index_List; procedure Output_Meta_File_List (Report_File : in SPARK_IO.File_Type) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# Report_File, --# SPARK_IO.File_Sys, --# XMLReport.State; is Meta_File_Used : Boolean := False; Filename : E_Strings.T; begin if CommandLineData.Content.XML then XMLReport.Start_Section (Section => XMLReport.S_Meta_Files, Report => Report_File); else SPARK_IO.New_Line (Report_File, 1); end if; for CurrentSource in CommandLineData.Source_File_Positions range 1 .. CommandLineData.Content.Number_Source loop Filename := CommandLineData.Content.Source_File_List (CurrentSource).Source_File_Name; if E_Strings.Get_Element (E_Str => Filename, Pos => 1) = '@' then MetaFile.Report_File_Content (To_File => Report_File, Filename => Filename, Meta_File_Used => Meta_File_Used); end if; end loop; if not Meta_File_Used and not CommandLineData.Content.XML then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "No Meta Files used", 0); end if; if CommandLineData.Content.XML then XMLReport.End_Section (Section => XMLReport.S_Meta_Files, Report => Report_File); end if; end Output_Meta_File_List; begin --Output_Report_File if CommandLineData.Content.Report then Create_Report_File (Report_File => Report_File, OK => OK); if OK then if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then ScreenEcho.Echo (E_Strings.Copy_String (Str => Report_Message)); end if; Output_Command_Line (Report_File => Report_File); Output_Index_List (Report_File => Report_File); Output_Meta_File_List (Report_File => Report_File); ErrorHandler.Output_Warning_List (To_File => Report_File); Dictionary.Output_Target_Data_File (To_File => Report_File); ConfigFile.Output_Config_File (To_File => Report_File); Output_File_List (Report_File => Report_File); Output_Units_Required_But_Not_Found (Report_File => Report_File); Output_Units_With_Cyclic_Requirements (Report_File => Report_File); if CommandLineData.Content.XML then XMLReport.End_Section (Section => XMLReport.S_Prologue, Report => Report_File); end if; Output_Error_List (Report_File => Report_File, Report_File_Error_List => Report_File_Error_List); if CommandLineData.Content.Syntax_Only then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "Warning: No semantic checks carried out, text may not be legal SPARK", 0); end if; if CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "Note: Data flow analysis mode selected", 0); elsif CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "Note: Automatic flow analysis mode selected", 0); end if; if CommandLineData.Content.Write_Statistics then Statistics.WriteOutput (Report_File); end if; if not CommandLineData.Content.Plain_Output then ErrorHandler.Output_Reference_List (To_File => Report_File); end if; if CommandLineData.Content.XML then XMLReport.End_Section (Section => XMLReport.S_Report, Report => Report_File); else Show_End_Of_File (To_File => Report_File); end if; --# accept F, 10, Report_File, "Final assignment after close"; Close_Report_File (Report_File => Report_File); --# end accept; if CommandLineData.Content.HTML then SparkHTML.Gen_Report_HTML; end if; end if; end if; end Output_Report_File; procedure Echo_Listing_File (Filename : in E_Strings.T) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Filename; is Message : E_Strings.T; begin if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then Message := E_Strings.Copy_String (Str => "Generating listing file "); if CommandLineData.Content.Plain_Output then E_Strings.Append_Examiner_String (E_Str1 => Message, E_Str2 => FileSystem.Just_File (Fn => Filename, Ext => True)); else E_Strings.Append_Examiner_String (E_Str1 => Message, E_Str2 => Filename); end if; ScreenEcho.Echo (Message); end if; end Echo_Listing_File; procedure Output_Listing_File (File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ContextManager.Ops.File_Heap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Listing_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Plain_Listing_Filename : E_Strings.T; Listing_Filename : E_Strings.T; Status : SPARK_IO.File_Status; Error_Context : ErrorHandler.Error_Contexts; begin ContextManager.Ops.GetListingFileName (File_Descriptor, Listing_Filename); Plain_Listing_Filename := Listing_Filename; CommandLineData.Normalize_File_Name_To_Output_Directory (F => Listing_Filename); Listing_Filename := FileSystem.Case_Of_Files_For_Create (E_Str => Listing_Filename); E_Strings.Create (File => Listing_File, Name_Of_File => Listing_Filename, Form_Of_File => "", Status => Status); if Status /= SPARK_IO.Ok then ScreenEcho.Put_String ("Listing file, "); Print_Filename (File => SPARK_IO.Standard_Output, Name => Listing_Filename, Plain => False); ScreenEcho.Put_Line (", could not be created"); else Echo_Listing_File (Filename => Plain_Listing_Filename); File_Utils.Print_A_Header (File => Listing_File, Header_Line => "Listing of SPARK Text", File_Type => File_Utils.Other_File); ContextManager.Ops.GetErrorContext (File_Descriptor, Error_Context); ErrorHandler.Set_Error_Context (Context => Error_Context); ErrorHandler.PrintErrors (Listing_File, Error_Types.ForListing); if CommandLineData.Content.Syntax_Only then -- HTML directives: --! --! -- HTML output --! warning-no-semantic-checks --! Warning : No semantic checks carried out, text may not be legal SPARK. --! Issued when the Examiner is used solely to check the syntax of a --! SPARK text: this does not check the semantics of a program --! (e.g. the correctness of the annotations) and --! therefore does not guarantee that a program is legal SPARK. SPARK_IO.New_Line (Listing_File, 1); SPARK_IO.Put_Line (Listing_File, "Warning: No semantic checks carried out, text may not be legal SPARK", 0); end if; if CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow then --! note-information-flow --! Note: Information flow analysis not carried out --! This is issued as a reminder that information flow analysis has --! not been carried out in this run of the Examiner: information --! flow errors may be present undetected in the text analysed SPARK_IO.New_Line (Listing_File, 1); SPARK_IO.Put_Line (Listing_File, "Note: Information flow analysis not carried out", 0); elsif CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow then --! note-automatic-flow --! Note: Flow analysis mode is automatic --! This is issued as a reminder that the Examiner will perform --! information flow analysis if it encounters full derives annotations --! and will perform data flow analysis if only moded global annotations --! are present. Information flow errors may be present undetected in the --! text analysed SPARK_IO.New_Line (Listing_File, 1); SPARK_IO.Put_Line (Listing_File, "Note: Flow analysis mode is automatic", 0); end if; case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => --! note-ada-83 --! Note: Ada 83 language rules selected --! Issued when the Examiner is used in SPARK 83 mode SPARK_IO.New_Line (Listing_File, 1); SPARK_IO.Put_Line (Listing_File, "Note: Ada83 language rules selected", 0); when CommandLineData.SPARK95_Onwards => null; end case; ErrorHandler.Get_Error_Context (Context => Error_Context); ContextManager.Ops.SetErrorContext (File_Descriptor, Error_Context); Show_End_Of_File (To_File => Listing_File); --# accept F, 10, Listing_File, "Final assignment after close" & --# F, 10, Status, "Final assignment after close"; SPARK_IO.Close (Listing_File, Status); --# end accept; end if; end Output_Listing_File; procedure Parse_Current_Unit --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in Dictionary.Dict; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# out SparkLex.Curr_Line; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table; is File_Descriptor : ContextManager.FileDescriptors; Source_File : SPARK_IO.File_Type; Max_Stack_Size : Natural; File_End : Boolean; Severity : ErrorHandler.Error_Level; Parse_Tree : STree.SyntaxNode; File_Context : SparkLex.Line_Context; Error_Context : ErrorHandler.Error_Contexts; begin File_Descriptor := ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => ContextManager.Ops.Current_Unit); ContextManager.Ops.GetSourceFile (File_Descriptor, Source_File); ContextManager.Ops.GetLineContext (File_Descriptor, File_Context); SparkLex.Restore_Line_Context (File_Line => File_Context); ContextManager.Ops.GetErrorContext (File_Descriptor, Error_Context); ErrorHandler.Set_Error_Context (Context => Error_Context); --# accept F, 10, Max_Stack_Size, "Not required here"; SPParser.SPParse (Source_File, Max_Stack_Size, File_End); --# end accept; STree.RetrieveCurrentRoot (Parse_Tree); ContextManager.Ops.SetParseTree (ContextManager.Ops.Current_Unit, Parse_Tree); ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.UnitParsed); if File_End then ContextManager.Ops.Close_File (File_Descriptor => File_Descriptor); end if; ErrorHandler.Get_Error_Severity (Severity => Severity); if Severity = ErrorHandler.Fatal then ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.UnitAnalysed); ContextManager.Ops.Close_File (File_Descriptor => File_Descriptor); end if; SparkLex.Store_Line_Context (File_Line => File_Context); ContextManager.Ops.SetLineContext (File_Descriptor, File_Context); ErrorHandler.Get_Error_Context (Context => Error_Context); ContextManager.Ops.SetErrorContext (File_Descriptor, Error_Context); --# accept F, 33, Max_Stack_Size, "Not required here"; end Parse_Current_Unit; procedure Echo_Compilation_Unit (Unit_Name : in LexTokenLists.Lists; Unit_Type : in ContextManager.UnitTypes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# Unit_Name, --# Unit_Type; is Message : E_Strings.T; begin if CommandLineData.Content.Echo and then not CommandLineData.Content.Brief and then Unit_Type /= ContextManager.InterUnitPragma then Message := E_Strings.Copy_String (Str => "Examining "); case Unit_Type is when ContextManager.MainProgram => E_Strings.Append_String (E_Str => Message, Str => "main program "); when ContextManager.PackageSpecification => E_Strings.Append_String (E_Str => Message, Str => "the specification of package "); when ContextManager.PackageBody => E_Strings.Append_String (E_Str => Message, Str => "the body of package "); when ContextManager.SubUnit => E_Strings.Append_String (E_Str => Message, Str => "subunit "); when ContextManager.GenericSubprogramDeclaration => E_Strings.Append_String (E_Str => Message, Str => "generic subprogram declaration "); when ContextManager.GenericSubprogramBody => E_Strings.Append_String (E_Str => Message, Str => "generic subprogram body "); -- following don't get examined so don't need suitable message when ContextManager.InterUnitPragma | ContextManager.InvalidUnit => null; end case; E_Strings.Append_Examiner_String (E_Str1 => Message, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenLists.Get_Element (List => Unit_Name, Pos => LexTokenLists.Get_Length (List => Unit_Name)))); ScreenEcho.Echo (Message); end if; end Echo_Compilation_Unit; function Get_Likely_Unit_Type (Possible_Unit_Types : ContextManager.UnitTypeSets) return ContextManager.UnitTypes is Actual_Unit_Type : ContextManager.UnitTypes; begin -- since we didn't find the unit we need to guess what kind of thing we were -- looking for so that we can make a sensible report about missing units in the -- report file. We use the "sets" of possible units to decide. if Possible_Unit_Types (ContextManager.PackageSpecification) then Actual_Unit_Type := ContextManager.PackageSpecification; elsif Possible_Unit_Types (ContextManager.GenericSubprogramDeclaration) then Actual_Unit_Type := ContextManager.GenericSubprogramDeclaration; elsif Possible_Unit_Types (ContextManager.SubUnit) then Actual_Unit_Type := ContextManager.SubUnit; else Actual_Unit_Type := ContextManager.PackageBody; end if; return Actual_Unit_Type; end Get_Likely_Unit_Type; procedure Print_Lookup_Fail_Message (Curr_File : in E_Strings.T; Required_Unit : in E_Strings.T; Expected_Sort : in E_Strings.T; Message : in String) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Curr_File, --# Expected_Sort, --# Message, --# Required_Unit; is Curr_Str : E_Strings.T; begin if E_Strings.Get_Length (E_Str => Curr_File) > 0 then if CommandLineData.Content.Plain_Output or else CommandLineData.Content.Brief then Curr_Str := FileSystem.Just_File (Fn => Curr_File, Ext => True); else Curr_Str := Curr_File; end if; else Curr_Str := E_Strings.Empty_String; end if; if CommandLineData.Content.Brief then ScreenEcho.Put_ExaminerString (Curr_Str); ScreenEcho.Put_Char (':'); ScreenEcho.Put_Integer (1, 0, 10); ScreenEcho.Put_Char (':'); ScreenEcho.Put_Integer (1, 0, 10); ScreenEcho.Put_String (": "); else ScreenEcho.New_Line (1); ScreenEcho.Put_String ("In "); ScreenEcho.Put_ExaminerString (Curr_Str); ScreenEcho.Put_String (": "); end if; ScreenEcho.Put_ExaminerString (Expected_Sort); ScreenEcho.Put_Char (' '); ScreenEcho.Put_ExaminerString (Required_Unit); ScreenEcho.Put_String (" is not declared in this file "); ScreenEcho.Put_String ("and "); ScreenEcho.Put_ExaminerString (Expected_Sort); ScreenEcho.Put_Char (' '); ScreenEcho.Put_ExaminerString (Required_Unit); ScreenEcho.Put_Char (' '); ScreenEcho.Put_Line (Message); end Print_Lookup_Fail_Message; procedure Add_Required_Unit (Required_Unit : in LexTokenLists.Lists; Possible_Unit_Types : in ContextManager.UnitTypeSets) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit, --# SPARK_IO.File_Sys & --# IndexManager.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit, --# SPARK_IO.File_Sys & --# LexTokenManager.State, --# SparkLex.Curr_Line from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# Possible_Unit_Types, --# Required_Unit; is Unit_Descriptor : ContextManager.UnitDescriptors; Source_Filename : LexTokenManager.Lex_String; Actual_Unit_Type : ContextManager.UnitTypes; File_Descriptor : ContextManager.FileDescriptors; File_Str : E_Strings.T; Found : Boolean; begin Trace (Msg => "In MainLoop.Add_Required_Unit"); Trace_Unit (List => Required_Unit, Types => Possible_Unit_Types); ContextManager.Ops.GetUnitByName (Required_Unit, Possible_Unit_Types, Unit_Descriptor); if Unit_Descriptor = ContextManager.NullUnit then File_Descriptor := ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => ContextManager.Ops.Current_Unit); -- A unit which was previously referenced, but could not be located -- can have a NullFile entry, and therefore won't have an entry -- in the Source_Filename table either. We need to report something, so... if File_Descriptor = ContextManager.NullFile then File_Str := E_Strings.Copy_String (Str => "UNKNOWN"); else File_Str := LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (File_Descriptor)); end if; if CommandLineData.Content.Index or else CommandLineData.Content.SPARK_Lib then Trace (Msg => " unit not known, calling IndexManager.LookUp"); IndexManager.Look_Up (Required_Unit => Required_Unit, Possible_Unit_Types => Possible_Unit_Types, Source_Filename => Source_Filename, Actual_Unit_Type => Actual_Unit_Type, Found => Found); if Found then Trace (Msg => " Found"); File_Descriptor := ContextManager.Ops.GetFileByName (Source_Filename); if File_Descriptor = ContextManager.NullFile then Create_File_Context (Source_Filename => LexTokenManager.Lex_String_To_String (Lex_Str => Source_Filename), File_Descriptor => File_Descriptor); ContextManager.Ops.Open_File (File_Descriptor => File_Descriptor); end if; ContextManager.Ops.Create_Unit_Descriptor (File_Descriptor => File_Descriptor, Unit_Descriptor => Unit_Descriptor); ContextManager.Ops.SetUnitName (Unit_Descriptor, Required_Unit, Actual_Unit_Type); if ContextManager.Ops.GetFileStatus (File_Descriptor) = ContextManager.UnableToOpen then Trace (Msg => " unit name found in index file but cannot open specified file"); Actual_Unit_Type := Get_Likely_Unit_Type (Possible_Unit_Types => Possible_Unit_Types); Print_Lookup_Fail_Message (Curr_File => File_Str, Required_Unit => Get_Unit_Name (List => Required_Unit), Expected_Sort => Get_Unit_Type (Unit_Type => Actual_Unit_Type), Message => "source file entry in index file cannot be opened."); ContextManager.Ops.SetUnitStatus (Unit_Descriptor, ContextManager.CannotOpenFile); else ContextManager.Ops.PushUnit (Unit_Descriptor); end if; else Trace (Msg => " not found in Index File"); Actual_Unit_Type := Get_Likely_Unit_Type (Possible_Unit_Types => Possible_Unit_Types); Print_Lookup_Fail_Message (Curr_File => File_Str, Required_Unit => Get_Unit_Name (List => Required_Unit), Expected_Sort => Get_Unit_Type (Unit_Type => Actual_Unit_Type), Message => "is not referenced in an index file."); ContextManager.Ops.Create_Unit_Descriptor (File_Descriptor => ContextManager.NullFile, Unit_Descriptor => Unit_Descriptor); ContextManager.Ops.SetUnitName (Unit_Descriptor, Required_Unit, Actual_Unit_Type); ContextManager.Ops.SetUnitStatus (Unit_Descriptor, ContextManager.NotInIndexFile); end if; else Trace (Msg => " unit not known, and no index file specified"); Actual_Unit_Type := Get_Likely_Unit_Type (Possible_Unit_Types => Possible_Unit_Types); Print_Lookup_Fail_Message (Curr_File => File_Str, Required_Unit => Get_Unit_Name (List => Required_Unit), Expected_Sort => Get_Unit_Type (Unit_Type => Actual_Unit_Type), Message => "cannot be located as no index file has been specified."); ContextManager.Ops.Create_Unit_Descriptor (File_Descriptor => ContextManager.NullFile, Unit_Descriptor => Unit_Descriptor); ContextManager.Ops.SetUnitName (Unit_Descriptor, Required_Unit, Actual_Unit_Type); ContextManager.Ops.SetUnitStatus (Unit_Descriptor, ContextManager.NoIndexFile); end if; else Trace (Msg => " unit already seen"); if ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Unit_Descriptor) = ContextManager.UnitParsed then ContextManager.Ops.MarkUnitInCycle (Unit_Descriptor); end if; ContextManager.Ops.PushUnit (Unit_Descriptor); end if; end Add_Required_Unit; procedure Set_Up_Required_Units --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in STree.Table; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table; is Inherit_Ptr : STree.SyntaxNode; Unit_Type : ContextManager.UnitTypes; Unit_Name : LexTokenLists.Lists; Current_Unit_Name : LexTokenLists.Lists; Current_Unit_Type : ContextManager.UnitTypes; function Found_Top_Unit (Wanted_Name : LexTokenLists.Lists; Wanted_Type : ContextManager.UnitTypes; Found_Name : LexTokenLists.Lists; Found_Type : ContextManager.UnitTypes) return Boolean --# global in LexTokenManager.State; is begin Trace (Msg => "In MainLoop.Found_Top_Unit"); return LexTokenLists.Get_Length (List => Wanted_Name) = 0 or else (LexTokenLists.Eq_Unit (First_Item => Wanted_Name, Second => Found_Name) and Wanted_Type = Found_Type); end Found_Top_Unit; procedure Replace_Top_Unit (Current_Unit_Name : in LexTokenLists.Lists; Current_Unit_Type : in ContextManager.UnitTypes) --# global in LexTokenManager.State; --# in STree.Table; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Stack from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# LexTokenManager.State & --# ContextManager.Ops.Unit_Heap from *, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# LexTokenManager.State, --# STree.Table; is New_Unit_Descriptor, Old_Unit_Descriptor : ContextManager.UnitDescriptors; File_Descriptor : ContextManager.FileDescriptors; Found_Unit : LexTokenLists.Lists; Found_Unit_Type : ContextManager.UnitTypes; Found_Unit_Type_Set : ContextManager.UnitTypeSets; Found_Unit_Descriptor : ContextManager.UnitDescriptors; Found_File_Descriptor : ContextManager.FileDescriptors; Parse_Tree : STree.SyntaxNode; begin --# accept F, 10, Old_Unit_Descriptor, "Not required here" & --# F, 33, Old_Unit_Descriptor, "Not required here"; Trace (Msg => "In MainLoop.Replace_Top_Unit"); if LexTokenLists.Get_Length (List => Current_Unit_Name) = 0 then ContextManager.Ops.PopUnit (Old_Unit_Descriptor); else File_Descriptor := ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => ContextManager.Ops.Current_Unit); Found_File_Descriptor := File_Descriptor; ContextManager.Ops.GetUnitName (ContextManager.Ops.Current_Unit, Found_Unit, Found_Unit_Type); Found_Unit_Type_Set := ContextManager.UnitTypeSets'(others => False); Found_Unit_Type_Set (Found_Unit_Type) := True; ContextManager.Ops.GetUnitByName (Found_Unit, Found_Unit_Type_Set, Found_Unit_Descriptor); if ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => Found_Unit_Descriptor) = ContextManager.UnitCreated then Found_File_Descriptor := ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Found_Unit_Descriptor); end if; if Found_Unit_Descriptor /= ContextManager.Ops.Current_Unit and then Found_File_Descriptor = File_Descriptor then -- special case of earlier pending unit found ContextManager.Ops.SetUnitStatus (Found_Unit_Descriptor, ContextManager.UnitParsed); ContextManager.Ops.GetParseTree (ContextManager.Ops.Current_Unit, Parse_Tree); ContextManager.Ops.SetParseTree (Found_Unit_Descriptor, Parse_Tree); ContextManager.Ops.SetParseTree (ContextManager.Ops.Current_Unit, STree.NullNode); ContextManager.Ops.SetUnitName (ContextManager.Ops.Current_Unit, Current_Unit_Name, Current_Unit_Type); case ContextManager.Ops.GetFileStatus (File_Descriptor) is when ContextManager.FileEnd => ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.UnableToLocate); ContextManager.Ops.PopUnit (Old_Unit_Descriptor); when ContextManager.UnableToOpen => ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.CannotOpenFile); ContextManager.Ops.PopUnit (Old_Unit_Descriptor); when others => ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.UnitCreated); end case; else -- 'normal' case ContextManager.Ops.Create_Unit_Descriptor (File_Descriptor => File_Descriptor, Unit_Descriptor => New_Unit_Descriptor); ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.UnitDeferred); ContextManager.Ops.PopUnit (Old_Unit_Descriptor); ContextManager.Ops.SetUnitName (New_Unit_Descriptor, Current_Unit_Name, Current_Unit_Type); case ContextManager.Ops.GetFileStatus (File_Descriptor) is when ContextManager.FileEnd => ContextManager.Ops.SetUnitStatus (New_Unit_Descriptor, ContextManager.UnableToLocate); when ContextManager.UnableToOpen => ContextManager.Ops.SetUnitStatus (New_Unit_Descriptor, ContextManager.CannotOpenFile); when others => ContextManager.Ops.PushUnit (New_Unit_Descriptor); end case; end if; end if; end Replace_Top_Unit; procedure Add_Inherited_Packages --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in STree.Table; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out Inherit_Ptr; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# Inherit_Ptr, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# Inherit_Ptr from *, --# STree.Table; is Required_Unit : LexTokenLists.Lists; Found : Boolean; -- If a package is inherited and therefore required we do not want to add it -- to the required unit list if it is predefined. Currently only -- Ada.Characters.Latin_1 is predefined. If Ravenscar is selected, package -- Ada.Real_Time is also predefined. function Is_Predefined_Package return Boolean --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in Required_Unit; is Result : Boolean; begin case LexTokenLists.Get_Length (List => Required_Unit) is when 1 => -- Ada Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 1), Lex_Str2 => LexTokenManager.Ada_Token) = LexTokenManager.Str_Eq; when 2 => -- Ada.Real_Time, Ada.Characters or Ada.Interrupts Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 1), Lex_Str2 => LexTokenManager.Ada_Token) = LexTokenManager.Str_Eq and then (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 2), Lex_Str2 => LexTokenManager.Characters_Token) = LexTokenManager.Str_Eq or else (CommandLineData.Ravenscar_Selected and then (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 2), Lex_Str2 => LexTokenManager.Real_Time_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 2), Lex_Str2 => LexTokenManager.Interrupts_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 2), Lex_Str2 => LexTokenManager.Synchronous_Task_Control_Token) = LexTokenManager.Str_Eq))); when 3 => -- Ada.Characters.Latin_1 Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 1), Lex_Str2 => LexTokenManager.Ada_Token) = LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 2), Lex_Str2 => LexTokenManager.Characters_Token) = LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 3), Lex_Str2 => LexTokenManager.Latin_1_Token) = LexTokenManager.Str_Eq; when others => Result := False; end case; return Result; end Is_Predefined_Package; ------------------------ function System_Needed_And_Provided_By_Config_File return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Required_Unit; is begin return LexTokenLists.Get_Length (List => Required_Unit) = 1 and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 1), Lex_Str2 => LexTokenManager.System_Token) = LexTokenManager.Str_Eq and then Dictionary.IsDefined (Name => LexTokenManager.System_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); end System_Needed_And_Provided_By_Config_File; ------------------------ function Is_Predefined_Generic return Boolean --# global in LexTokenManager.State; --# in Required_Unit; is Result : Boolean; begin case LexTokenLists.Get_Length (List => Required_Unit) is when 1 => -- Unchecked_Conversion Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 1), Lex_Str2 => LexTokenManager.Unchecked_Conversion_Token) = LexTokenManager.Str_Eq; when 2 => -- Ada.Unchecked_Conversion Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 1), Lex_Str2 => LexTokenManager.Ada_Token) = LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Required_Unit, Pos => 2), Lex_Str2 => LexTokenManager.Unchecked_Conversion_Token) = LexTokenManager.Str_Eq; when others => Result := False; end case; return Result; end Is_Predefined_Generic; begin -- Add_Inherited_Packages Trace (Msg => "In MainLoop.Add_Inherited_Packages"); loop RequiredUnits.Next (Inherit_Ptr => Inherit_Ptr, Required_Unit => Required_Unit, Found => Found); exit when not Found; -- Ignore dotted package names in 83 mode and ignore predefined packages -- in 95 mode. Ignore System if provided by config file. Ignore -- predefined generic units. --# accept F, 41, "Expect stable expression here"; case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => if LexTokenLists.Get_Length (List => Required_Unit) <= 1 and then not System_Needed_And_Provided_By_Config_File and then not Is_Predefined_Generic then Add_Required_Unit (Required_Unit => Required_Unit, Possible_Unit_Types => ContextManager.InheritableItem); end if; when CommandLineData.SPARK95_Onwards => if not Is_Predefined_Package and then not System_Needed_And_Provided_By_Config_File and then not Is_Predefined_Generic then Add_Required_Unit (Required_Unit => Required_Unit, Possible_Unit_Types => ContextManager.InheritableItem); end if; end case; --# end accept; end loop; end Add_Inherited_Packages; procedure Handle_Main_Program --# global in CommandLineData.Content; --# in Current_Unit_Name; --# in Current_Unit_Type; --# in Dictionary.Dict; --# in STree.Table; --# in Unit_Name; --# in Unit_Type; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out Inherit_Ptr; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# Inherit_Ptr, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Name, --# Unit_Type & --# Inherit_Ptr from *, --# Current_Unit_Name, --# Current_Unit_Type, --# LexTokenManager.State, --# STree.Table, --# Unit_Name, --# Unit_Type; is begin Trace (Msg => "In MainLoop.Handle_Main_Program"); ContextManager.Ops.SetUnitName (ContextManager.Ops.Current_Unit, Unit_Name, Unit_Type); if Found_Top_Unit (Wanted_Name => Current_Unit_Name, Wanted_Type => Current_Unit_Type, Found_Name => Unit_Name, Found_Type => Unit_Type) then Add_Inherited_Packages; else Replace_Top_Unit (Current_Unit_Name => Current_Unit_Name, Current_Unit_Type => Current_Unit_Type); end if; end Handle_Main_Program; procedure Handle_Inter_Unit_Pragma --# global in Current_Unit_Name; --# in Current_Unit_Type; --# in LexTokenManager.State; --# in STree.Table; --# in Unit_Name; --# in Unit_Type; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Stack from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# LexTokenManager.State, --# Unit_Name, --# Unit_Type & --# ContextManager.Ops.Unit_Heap from *, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# LexTokenManager.State, --# STree.Table, --# Unit_Name, --# Unit_Type; is New_Unit_Name : LexTokenLists.Lists; begin Trace (Msg => "In MainLoop.Handle_Inter_Unit_Pragma"); New_Unit_Name := LexTokenLists.Null_List; ContextManager.Ops.SetUnitName (ContextManager.Ops.Current_Unit, New_Unit_Name, Unit_Type); if not Found_Top_Unit (Wanted_Name => Current_Unit_Name, Wanted_Type => Current_Unit_Type, Found_Name => Unit_Name, Found_Type => Unit_Type) then Replace_Top_Unit (Current_Unit_Name => Current_Unit_Name, Current_Unit_Type => Current_Unit_Type); end if; end Handle_Inter_Unit_Pragma; procedure Handle_Package_Specification --# global in CommandLineData.Content; --# in Current_Unit_Name; --# in Current_Unit_Type; --# in Dictionary.Dict; --# in STree.Table; --# in Unit_Name; --# in Unit_Type; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out Inherit_Ptr; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# Inherit_Ptr, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Name, --# Unit_Type & --# Inherit_Ptr from *, --# Current_Unit_Name, --# Current_Unit_Type, --# LexTokenManager.State, --# STree.Table, --# Unit_Name, --# Unit_Type; is procedure Add_Parent (Unit_Name : in LexTokenLists.Lists) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name & --# IndexManager.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name & --# LexTokenManager.State, --# SparkLex.Curr_Line from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# Unit_Name; is Required_Unit : LexTokenLists.Lists; Dummy_Item : LexTokenManager.Lex_String; begin Trace (Msg => "In MainLoop.Handle_Package_Specification.Add_Parent"); Required_Unit := Unit_Name; --# accept F, 10, Dummy_Item, "Ineffective assignment here OK"; LexTokenLists.Pop (List => Required_Unit, Item => Dummy_Item); --# end accept; Add_Required_Unit (Required_Unit => Required_Unit, Possible_Unit_Types => ContextManager.PackageSpecificationSet); --# accept F, 33, Dummy_Item, "Expect Dummy_Item unused"; end Add_Parent; begin -- Handle_Package_Specification Trace (Msg => "In MainLoop.Handle_Package_Specification"); ContextManager.Ops.SetUnitName (ContextManager.Ops.Current_Unit, Unit_Name, Unit_Type); if Found_Top_Unit (Wanted_Name => Current_Unit_Name, Wanted_Type => Current_Unit_Type, Found_Name => Unit_Name, Found_Type => Unit_Type) then Add_Inherited_Packages; case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => null; when CommandLineData.SPARK95_Onwards => if LexTokenLists.Get_Length (List => Unit_Name) > 1 and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Unit_Name, Pos => 1), Lex_Str2 => LexTokenManager.Ada_Token) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Unit_Name, Pos => 1), Lex_Str2 => LexTokenManager.System_Token) /= LexTokenManager.Str_Eq then Add_Parent (Unit_Name => Unit_Name); end if; end case; else Replace_Top_Unit (Current_Unit_Name => Current_Unit_Name, Current_Unit_Type => Current_Unit_Type); end if; end Handle_Package_Specification; procedure Handle_Package_Body --# global in CommandLineData.Content; --# in Current_Unit_Name; --# in Current_Unit_Type; --# in Dictionary.Dict; --# in STree.Table; --# in Unit_Name; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name & --# ContextManager.Ops.Unit_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Name; is Components : IndexManager.Component_Lists; Index : IndexManager.Component_Index; begin Trace (Msg => "In MainLoop.Handle_Package_Body"); ContextManager.Ops.SetUnitName (ContextManager.Ops.Current_Unit, Unit_Name, ContextManager.PackageBody); if Found_Top_Unit (Wanted_Name => Current_Unit_Name, Wanted_Type => Current_Unit_Type, Found_Name => Unit_Name, Found_Type => ContextManager.PackageBody) then case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => if LexTokenLists.Get_Length (List => Unit_Name) <= 1 then Add_Required_Unit (Required_Unit => Unit_Name, Possible_Unit_Types => ContextManager.PackageSpecificationSet); end if; when CommandLineData.SPARK95_Onwards => -- look for components in index file IndexManager.Look_Up_Components (Required_Unit => Unit_Name, Components => Components); Index := IndexManager.Component_Index'First; loop exit when LexTokenLists.Eq_Unit (First_Item => Components (Index), Second => LexTokenLists.Null_List); Add_Required_Unit (Required_Unit => Components (Index), Possible_Unit_Types => ContextManager.PackageSpecificationSet); Index := IndexManager.Component_Index'Succ (Index); end loop; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenLists.Get_Element (List => Unit_Name, Pos => 1), Lex_Str2 => LexTokenManager.Ada_Token) /= LexTokenManager.Str_Eq then Add_Required_Unit (Required_Unit => Unit_Name, Possible_Unit_Types => ContextManager.PackageSpecificationSet); end if; end case; else Replace_Top_Unit (Current_Unit_Name => Current_Unit_Name, Current_Unit_Type => Current_Unit_Type); end if; end Handle_Package_Body; procedure Handle_Generic_Subprogram_Declaration --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in STree.Table; --# in Unit_Name; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out Inherit_Ptr; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# Inherit_Ptr, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Name & --# Inherit_Ptr from *, --# STree.Table; is begin Trace (Msg => "In MainLoop.Handle_Generic_Subprogram_Declaration"); ContextManager.Ops.SetUnitName (ContextManager.Ops.Current_Unit, Unit_Name, ContextManager.GenericSubprogramDeclaration); Add_Inherited_Packages; end Handle_Generic_Subprogram_Declaration; procedure Handle_Generic_Subprogram_Body --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Unit_Name; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name & --# IndexManager.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name & --# LexTokenManager.State, --# SparkLex.Curr_Line from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Unit_Name; is begin Trace (Msg => "In MainLoop.Handle_Generic_Subprogram_Body"); ContextManager.Ops.SetUnitName (ContextManager.Ops.Current_Unit, Unit_Name, ContextManager.GenericSubprogramBody); -- generic body found so we need to see its spec Add_Required_Unit (Required_Unit => Unit_Name, Possible_Unit_Types => ContextManager.GenericDeclarationSet); end Handle_Generic_Subprogram_Body; procedure Handle_Sub_Unit --# global in CommandLineData.Content; --# in Current_Unit_Name; --# in Current_Unit_Type; --# in Dictionary.Dict; --# in STree.Table; --# in Unit_Name; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out Inherit_Ptr; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# IndexManager.State, --# Inherit_Ptr, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Name & --# IndexManager.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# ErrorHandler.Error_Context, --# Inherit_Ptr, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Name & --# Inherit_Ptr from *, --# STree.Table & --# LexTokenManager.State, --# SparkLex.Curr_Line from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Unit_Name, --# Current_Unit_Type, --# ErrorHandler.Error_Context, --# IndexManager.State, --# Inherit_Ptr, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Unit_Name; is Required_Unit : LexTokenLists.Lists; New_Unit_Name : LexTokenLists.Lists; Found : Boolean; begin Trace (Msg => "In MainLoop.Handle_Sub_Unit"); RequiredUnits.Next (Inherit_Ptr => Inherit_Ptr, Required_Unit => Required_Unit, Found => Found); if Found then New_Unit_Name := Required_Unit; LexTokenLists.Append (List => New_Unit_Name, Item => LexTokenLists.Get_Element (List => Unit_Name, Pos => 1)); ContextManager.Ops.SetUnitName (ContextManager.Ops.Current_Unit, New_Unit_Name, ContextManager.SubUnit); if Found_Top_Unit (Wanted_Name => Current_Unit_Name, Wanted_Type => Current_Unit_Type, Found_Name => New_Unit_Name, Found_Type => ContextManager.SubUnit) then if LexTokenLists.Get_Length (List => Required_Unit) = 1 then Add_Required_Unit (Required_Unit => Required_Unit, Possible_Unit_Types => ContextManager.BodySet); else Add_Required_Unit (Required_Unit => Required_Unit, Possible_Unit_Types => ContextManager.SubUnitSet); end if; else Replace_Top_Unit (Current_Unit_Name => Current_Unit_Name, Current_Unit_Type => Current_Unit_Type); end if; end if; end Handle_Sub_Unit; procedure Look_At_Parse_Tree --# global in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in STree.Table; --# out Inherit_Ptr; --# out Unit_Name; --# out Unit_Type; --# derives Inherit_Ptr, --# Unit_Name, --# Unit_Type from ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# STree.Table; is Parse_Tree : STree.SyntaxNode; begin ContextManager.Ops.GetParseTree (ContextManager.Ops.Current_Unit, Parse_Tree); RequiredUnits.Init (Top_Node => Parse_Tree, Inherit_Ptr => Inherit_Ptr, Unit_Type => Unit_Type, Unit_Name => Unit_Name); end Look_At_Parse_Tree; begin -- Set_Up_Required_Units Trace (Msg => "In MainLoop.Set_Up_Required_Units"); Look_At_Parse_Tree; ContextManager.Ops.GetUnitName (ContextManager.Ops.Current_Unit, Current_Unit_Name, Current_Unit_Type); if LexTokenLists.Eq_Unit (First_Item => Unit_Name, Second => LexTokenLists.Null_List) and then Unit_Type /= ContextManager.InterUnitPragma then ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.NoUnitEntry); Replace_Top_Unit (Current_Unit_Name => Current_Unit_Name, Current_Unit_Type => Current_Unit_Type); else --# accept F, 10, Inherit_Ptr, "Not required here"; case Unit_Type is when ContextManager.MainProgram => Handle_Main_Program; when ContextManager.PackageSpecification => Handle_Package_Specification; when ContextManager.PackageBody => Handle_Package_Body; when ContextManager.SubUnit => Handle_Sub_Unit; when ContextManager.GenericSubprogramDeclaration => Handle_Generic_Subprogram_Declaration; when ContextManager.GenericSubprogramBody => Handle_Generic_Subprogram_Body; when ContextManager.InterUnitPragma => Handle_Inter_Unit_Pragma; when ContextManager.InvalidUnit => null; end case; --# end accept; end if; end Set_Up_Required_Units; procedure Process_Units (File_Descriptor : in ContextManager.FileDescriptors) --# global in CommandLineData.Content; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out Declarations.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out STree.Table; --# in out VCG.Invoked; --# in out XMLReport.State; --# out Sem.State; --# derives ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Declarations.State, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Graph.Table, --# IndexManager.State, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# Statistics.TableUsage, --# StmtStack.S, --# STree.Table, --# VCG.Invoked, --# XMLReport.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# Sem.State from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# XMLReport.State; is Unit_Descriptor : ContextManager.UnitDescriptors; SLI_Unit_Descriptor : ContextManager.UnitDescriptors := ContextManager.NullUnit; Severity : ErrorHandler.Error_Level; Unit_Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; Scope : Dictionary.Scopes; Prefix_Sym : Dictionary.Symbol; Source_Filename : E_Strings.T; L_Source_Filename : E_Strings.T; File_Status : FileSystem.Typ_File_Spec_Status; procedure Analyse_Current_Unit --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out Declarations.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SLI_Unit_Descriptor; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out STree.Table; --# in out VCG.Invoked; --# out Sem.State; --# out SparkLex.Curr_Line; --# derives ContextManager.Ops.File_Heap, --# Declarations.State, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Graph.Table, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# StmtStack.S, --# STree.Table, --# VCG.Invoked from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# ContextManager.Ops.Unit_Heap from *, --# ContextManager.Ops.Unit_Stack & --# Sem.State from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI_Unit_Descriptor from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys & --# SparkLex.Curr_Line from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack; is File_Descriptor : ContextManager.FileDescriptors; Unit_Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; Severity : ErrorHandler.Error_Level; File_Context : SparkLex.Line_Context; Parse_Tree : STree.SyntaxNode; Error_Context : ErrorHandler.Error_Contexts; Do_VCG : Boolean; begin ContextManager.Ops.GetParseTree (ContextManager.Ops.Current_Unit, Parse_Tree); File_Descriptor := ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => ContextManager.Ops.Current_Unit); ContextManager.Ops.GetLineContext (File_Descriptor, File_Context); ContextManager.Ops.GetVCG (ContextManager.Ops.Current_Unit, Do_VCG); ContextManager.Ops.GetUnitName (ContextManager.Ops.Current_Unit, Unit_Name, Unit_Type); SparkLex.Restore_Line_Context (File_Line => File_Context); ContextManager.Ops.GetErrorContext (File_Descriptor, Error_Context); ErrorHandler.Set_Error_Context (Context => Error_Context); Echo_Compilation_Unit (Unit_Name => Unit_Name, Unit_Type => Unit_Type); Dictionary.Set_Current_File_Name (File_Name => LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (File_Descriptor))); if ErrorHandler.Generate_SLI then ContextManager.Ops.Get_Unit (Descriptor => File_Descriptor, Unit_Descriptor => SLI_Unit_Descriptor); end if; Sem.CompUnit (Top_Node => Parse_Tree, Do_VCG => Do_VCG); Casing.Check_Node_Casing (Top_Node => Parse_Tree); ErrorHandler.Get_Error_Severity (Severity => Severity); if Severity = ErrorHandler.Fatal then ContextManager.Ops.Close_File (File_Descriptor => File_Descriptor); end if; SparkLex.Store_Line_Context (File_Line => File_Context); ContextManager.Ops.SetLineContext (File_Descriptor, File_Context); ErrorHandler.Get_Error_Context (Context => Error_Context); ContextManager.Ops.SetErrorContext (File_Descriptor, Error_Context); -- return the Parse_Tree to the syntax tree free list, except sub-trees -- that are later needed by the VCG. If the user has selected /rules=none, -- then we _can_ return constant declarations as well. STree.DeleteSyntaxTree (Root => Parse_Tree, KeepConstants => CommandLineData.Content.Constant_Rules /= CommandLineData.No_Rules); ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.UnitAnalysed); end Analyse_Current_Unit; begin -- Process_Units loop exit when ContextManager.Ops.GetFileStatus (File_Descriptor) = ContextManager.FileEnd; ContextManager.Ops.Create_Unit_Descriptor (File_Descriptor => File_Descriptor, Unit_Descriptor => Unit_Descriptor); ContextManager.Ops.SetVCG (Unit_Descriptor, CommandLineData.Content.VCG or CommandLineData.Content.DPC); ContextManager.Ops.PushUnit (Unit_Descriptor); loop exit when ContextManager.Ops.Current_Unit = ContextManager.NullUnit; case ContextManager.Ops.Get_Unit_Status (Unit_Descriptor => ContextManager.Ops.Current_Unit) is when ContextManager.NoUnitEntry => null; when ContextManager.UnitCreated => Parse_Current_Unit; ErrorHandler.Get_Error_Severity (Severity => Severity); if Severity /= ErrorHandler.Fatal then Set_Up_Required_Units; end if; when ContextManager.UnitDeferred => ContextManager.Ops.SetUnitStatus (ContextManager.Ops.Current_Unit, ContextManager.UnitParsed); Set_Up_Required_Units; when ContextManager.UnitParsed => --# accept F, 41, "Stable expression OK here"; if CommandLineData.Content.Syntax_Only then ContextManager.Ops.PopUnit (Unit_Descriptor); else Analyse_Current_Unit; end if; --# end accept; when ContextManager.UnitAnalysed | ContextManager.NoIndexFile .. ContextManager.UnableToLocate => ContextManager.Ops.PopUnit (Unit_Descriptor); end case; end loop; if not ErrorHandler.Syntax_Or_Semantic_Error then L_Source_Filename := LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => File_Descriptor)); ContextManager.Ops.GetUnitName (Descriptor => Unit_Descriptor, UnitName => Unit_Name, UnitType => Unit_Type); --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.File_Names then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "MAINLOOP ", Stop => 0); end if; --# end accept; --# accept F, 10, File_Status, "Expect File_Status Unused"; FileSystem.Find_Full_File_Name (File_Spec => L_Source_Filename, File_Status => File_Status, Full_File_Name => Source_Filename); --# end accept; if Unit_Type /= ContextManager.InterUnitPragma then IndexManager.Add_Unit (Unit => Unit_Name, Unit_Types => Unit_Type, Source_Filename => Source_Filename); end if; end if; end loop; if ErrorHandler.Generate_SLI and then SLI_Unit_Descriptor /= ContextManager.NullUnit then ContextManager.Ops.GetUnitName (Descriptor => SLI_Unit_Descriptor, UnitName => Unit_Name, UnitType => Unit_Type); Scope := Dictionary.GlobalScope; Prefix_Sym := Dictionary.NullSymbol; for I in LexTokenLists.Positions range LexTokenLists.Positions'First .. LexTokenLists.Get_Length (List => Unit_Name) loop SLI.Look_Up (Prefix => Prefix_Sym, Scope => Scope, Subprog_Sym => Dictionary.NullSymbol, Lex_Str => LexTokenLists.Get_Element (List => Unit_Name, Pos => I), Pos => LexTokenManager.Null_Token_Position, Full_Package_Name => False); exit when Dictionary.Is_Null_Symbol (Prefix_Sym); end loop; if Unit_Type = ContextManager.MainProgram or else Unit_Type = ContextManager.GenericSubprogramBody or else Unit_Type = ContextManager.PackageBody or else Unit_Type = ContextManager.SubUnit or else (Unit_Type = ContextManager.PackageSpecification and then not Dictionary.Is_Null_Symbol (Prefix_Sym) and then not Dictionary.PackageRequiresBody (ThePackage => Prefix_Sym)) then ContextManager.Ops.Dependency_Closure (Descriptor => File_Descriptor); SLI.Create_File (File_Descriptor => File_Descriptor); SLI.Header (File_Descriptor => File_Descriptor); SLI.Dump_Xref (File_Descriptor => File_Descriptor); SLI.Close_File; end if; end if; --# accept Flow, 33, File_Status, "Expect File_Status unused" & --# Flow, 602, Sem.State, Sem.State, "Expect undefined initial value"; end Process_Units; procedure Close_File (File_Descriptor : in ContextManager.FileDescriptors; Report_File_Error_List : in SPARK_IO.File_Type; Report_Needed : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in SparkHTML.Generate_HTML; --# in out ContextManager.Ops.File_Heap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives ContextManager.Ops.File_Heap, --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# LexTokenManager.State, --# Report_File_Error_List, --# Report_Needed, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# XMLReport.State; is begin if ContextManager.Ops.ListingReqt (File_Descriptor) and not CommandLineData.Content.No_Listings then Output_Listing_File (File_Descriptor => File_Descriptor); if CommandLineData.Content.HTML then SparkHTML.Gen_Listing_HTML (File_Descriptor => File_Descriptor); end if; end if; if Report_Needed then Append_To_Report_File (Report_File => Report_File_Error_List, Purpose => Error_Types.ForReportSelectedFiles, File_Descriptor => File_Descriptor); ContextManager.Ops.SetErrorsReported (File_Descriptor); end if; end Close_File; procedure Process_Files is File_Descriptor : ContextManager.FileDescriptors; File_Found, Config_Success, Config_Read, Do_Listing, Do_VCG : Boolean; Filename, The_Filename, The_Listing_Name : E_Strings.T; The_Meta_File : MetaFile.Meta_Files; Report_File_Error_List : SPARK_IO.File_Type := SPARK_IO.Null_File; Error_File_Status : SPARK_IO.File_Status; File_Status : FileSystem.Typ_File_Spec_Status; procedure Open_Error_List --# global in out Report_File_Error_List; --# in out SPARK_IO.File_Sys; --# out Error_File_Status; --# derives Error_File_Status, --# Report_File_Error_List, --# SPARK_IO.File_Sys from Report_File_Error_List, --# SPARK_IO.File_Sys; --# is begin SPARK_IO.Create (Report_File_Error_List, 0, "", "", -- Temporary file --to get Error_File_Status); end Open_Error_List; procedure Trace_Meta_Filenames (The_Filename : in E_Strings.T; Do_Listing : in Boolean; The_Listing_Name : in E_Strings.T; Do_VCG : in Boolean; File_Found : in Boolean) --# global in CommandLineData.Content; --# derives null from CommandLineData.Content, --# Do_Listing, --# Do_VCG, --# File_Found, --# The_Filename, --# The_Listing_Name; is --# hide Trace_Meta_Filenames; begin if File_Found and CommandLineData.Content.Debug.File_Names then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "MetaFile.NextName returns:", 0); Print_Filename (File => SPARK_IO.Standard_Output, Name => The_Filename, Plain => CommandLineData.Content.Plain_Output); if Do_Listing then SPARK_IO.Put_String (SPARK_IO.Standard_Output, ", ", 0); Print_Filename (File => SPARK_IO.Standard_Output, Name => The_Listing_Name, Plain => CommandLineData.Content.Plain_Output); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, ", No listing file", 0); end if; if Do_VCG then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, ", VCG", 0); end if; ScreenEcho.New_Line (1); end if; end Trace_Meta_Filenames; begin -- Process_Files Initialise_Processing; Open_Error_List; if Error_File_Status = SPARK_IO.Ok then ErrorHandler.Read_Warning_File; Dictionary.Read_Target_Data_File; --# accept F, 10, Config_Read, "Not required here" & --# F, 33, Config_Read, "Not required here"; ConfigFile.Read_Config_File (Opened_OK => Config_Read, No_Errors => Config_Success); if Config_Success then for CurrentSource in CommandLineData.Source_File_Positions range 1 .. CommandLineData.Content.Number_Source loop Filename := CommandLineData.Content.Source_File_List (CurrentSource).Source_File_Name; if E_Strings.Get_Element (E_Str => Filename, Pos => 1) = '@' then -- process a meta file MetaFile.Create (Filename, The_Meta_File); loop MetaFile.Next_Name (The_Meta_File => The_Meta_File, The_Filename => The_Filename, Do_Listing => Do_Listing, The_Listing_Name => The_Listing_Name, Do_VCG => Do_VCG, File_Found => File_Found); exit when not File_Found; Trace_Meta_Filenames (The_Filename => The_Filename, Do_Listing => Do_Listing, The_Listing_Name => The_Listing_Name, Do_VCG => Do_VCG, File_Found => File_Found); Prepare_Next_Argument_File (The_Filename => The_Filename, Do_Listing => Do_Listing, The_Listing_Name => The_Listing_Name, Do_VCG => Do_VCG, File_Descriptor => File_Descriptor); ContextManager.Ops.Open_File (File_Descriptor => File_Descriptor); if ContextManager.Ops.GetFileStatus (File_Descriptor) /= ContextManager.UnableToOpen then Process_Units (File_Descriptor => File_Descriptor); Close_File (File_Descriptor => File_Descriptor, Report_File_Error_List => Report_File_Error_List, Report_Needed => CommandLineData.Content.Report); end if; --# accept W, 169, CommandLineData.Content.VCG, "Direct updates OK here"; CommandLineData.Content.VCG := Do_VCG; --# end accept; end loop; else -- its a single source file --# accept F, 10, File_Status, "Expect File_Status Unused"; FileSystem.Find_Full_File_Name (File_Spec => Filename, File_Status => File_Status, Full_File_Name => The_Filename); --# end accept; The_Listing_Name := CommandLineData.Content.Source_File_List (CurrentSource).Listing_File_Name; Do_Listing := CommandLineData.Content.Source_File_List (CurrentSource).Listing; Do_VCG := CommandLineData.Content.VCG; Prepare_Next_Argument_File (The_Filename => The_Filename, Do_Listing => Do_Listing, The_Listing_Name => The_Listing_Name, Do_VCG => Do_VCG, File_Descriptor => File_Descriptor); ContextManager.Ops.Open_File (File_Descriptor => File_Descriptor); if ContextManager.Ops.GetFileStatus (File_Descriptor) /= ContextManager.UnableToOpen then Process_Units (File_Descriptor => File_Descriptor); Close_File (File_Descriptor => File_Descriptor, Report_File_Error_List => Report_File_Error_List, Report_Needed => CommandLineData.Content.Report); end if; --# accept W, 169, CommandLineData.Content.VCG, "Direct updates OK here"; CommandLineData.Content.VCG := Do_VCG; --# end accept; end if; end loop; --each file on command line else Echo_Warning_Or_Note (About_File => CommandLineData.Content.Target_Config_File, Msg => "Warning - analysis aborted due to errors in target configuration file", Important => True); end if; LexTokenManager.Report_Usage; Dictionary.ReportUsage; STree.ReportUsage; SLI.Cleanup; --# accept F, 10, Report_File_Error_List, "Final assignment on Close"; Output_Report_File (Report_File_Error_List => Report_File_Error_List); --# end accept; Output_Dictionary_File; if CommandLineData.Content.Syntax_Only then Echo_Warning_Or_Note (About_File => CommandLineData.Content.Source_File_List (1).Source_File_Name, Msg => "Warning - No semantic checks carried out, text may not be legal SPARK", Important => True); end if; if CommandLineData.Content.VCG and then -- Any sort of VC generation requested not VCG.Invoked and then -- and VCG not actually invoked on any body ErrorHandler.Get_Errors_Type < 3 then -- and no syntax or semantic errors Echo_Warning_Or_Note (About_File => CommandLineData.Content.Source_File_List (1).Source_File_Name, Msg => "Warning - VC generation requested but no bodies presented. No VCs generated.", Important => False); end if; case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Echo_Warning_Or_Note (About_File => CommandLineData.Content.Source_File_List (1).Source_File_Name, Msg => "Note - Ada83 language rules selected", Important => False); when CommandLineData.SPARK95_Onwards => null; end case; ErrorHandler.Echo_Total_Error_Count; if CommandLineData.Content.Echo and not CommandLineData.Content.Brief then ScreenEcho.New_Line (2); ScreenEcho.Put_Line ("-----------End of SPARK Examination--------------------------------"); end if; else -- Error_File_Status test Echo_Warning_Or_Note (About_File => CommandLineData.Content.Source_File_List (1).Source_File_Name, Msg => "Creating temp error file failed. Check existence and ACL of temp directory", Important => True); end if; --# accept Flow, 33, File_Status, "Expect File_Status unused" & --# Flow, 602, Sem.State, Sem.State, "Expect undefined initial value"; end Process_Files; end MainLoop; ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration.0000644000175000017500000022773411753202336033063 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration) procedure Wf_Subtype_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) is type Real_Type is (Is_Floating, Is_Fixed); Type_Node, Ident_Node, Constraint_Node : STree.SyntaxNode; Id_Str : LexTokenManager.Lex_String; Constraint_Found, Ok_To_Add : Boolean; All_Indexes_OK : Boolean; Subtype_Sym, Type_Sym : Dictionary.Symbol; Subtype_Is_Static : Boolean; Lower, Upper, Accuracy : LexTokenManager.Lex_String; Subtype_Symbol : Dictionary.Symbol; ------------------------------------------------------------------- function Is_Named_Association (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.index_or_discriminant_constraint; is Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Current_Node = named_argument_association OR positional_argument_association SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.named_argument_association or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.positional_argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = named_argument_association OR " & "positional_argument_association in Is_Named_Association"); return Syntax_Node_Type (Node => Current_Node) = SP_Symbols.named_argument_association; end Is_Named_Association; ------------------------------------------------------------------- procedure Check_Index_Constraints (Constraint_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subtype_Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; All_Indexes_OK : out Boolean; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# All_Indexes_OK, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# Constraint_Node, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Constraint_Node, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Constraint_Node, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Subtype_Sym, --# The_Heap, --# Type_Sym; --# pre Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.index_or_discriminant_constraint; --# post STree.Table = STree.Table~; is Index_Iterator : Dictionary.Iterator; Current_Constraint_Node : STree.SyntaxNode; Index_Sym, Constraint_Sym, The_Array_Index : Dictionary.Symbol; Result_Of_First_Check, Result_Of_Last_Check : Maths.Value; Constraint_First, Constraint_Last : LexTokenManager.Lex_String; procedure Get_First_Index (Type_Sym : in Dictionary.Symbol; Index_Sym : out Dictionary.Symbol; Index_Iterator : out Dictionary.Iterator) --# global in Dictionary.Dict; --# derives Index_Iterator, --# Index_Sym from Dictionary.Dict, --# Type_Sym; is begin Index_Iterator := Dictionary.FirstArrayIndex (Type_Sym); if Dictionary.IsNullIterator (Index_Iterator) then Index_Sym := Dictionary.NullSymbol; else Index_Sym := Dictionary.CurrentSymbol (Index_Iterator); end if; end Get_First_Index; procedure Get_Next_Index (Index_Sym : out Dictionary.Symbol; Index_Iterator : in out Dictionary.Iterator) --# global in Dictionary.Dict; --# derives Index_Iterator, --# Index_Sym from Dictionary.Dict, --# Index_Iterator; is begin Index_Iterator := Dictionary.NextSymbol (Index_Iterator); if Dictionary.IsNullIterator (Index_Iterator) then Index_Sym := Dictionary.NullSymbol; else Index_Sym := Dictionary.CurrentSymbol (Index_Iterator); end if; end Get_Next_Index; procedure Walk_Expression_As_Type_Mark (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Constraint_Sym : out Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Constraint_Sym, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression; --# post STree.Table = STree.Table~; is Unwanted_Seq : SeqAlgebra.Seq; Unused_Component_Data : ComponentManager.ComponentData; Constraint_Record : Exp_Record; begin Heap.Reset (The_Heap); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => False, Ref_Var => Unwanted_Seq, Result => Constraint_Record, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); if Constraint_Record.Is_ARange then Constraint_Sym := Constraint_Record.Type_Symbol; else ErrorHandler.Semantic_Error (Err_Num => 95, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); Constraint_Sym := Dictionary.GetUnknownTypeMark; end if; Heap.ReportUsage (The_Heap); end Walk_Expression_As_Type_Mark; procedure Get_First_Constraint (Constraint_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Current_Constraint_Node : out STree.SyntaxNode; Constraint_Sym : out Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Constraint_Sym, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# Constraint_Node, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# Current_Constraint_Node from Constraint_Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Constraint_Node, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Constraint_Node, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.index_or_discriminant_constraint; --# post Syntax_Node_Type (Current_Constraint_Node, STree.Table) = SP_Symbols.expression and --# STree.Table = STree.Table~; is begin Current_Constraint_Node := Child_Node (Current_Node => Child_Node (Current_Node => Constraint_Node)); while Syntax_Node_Type (Node => Current_Constraint_Node) /= SP_Symbols.expression loop --# assert STree.Table = STree.Table~; -- ASSUME Current_Constraint_Node = positional_argument_association SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Constraint_Node) = SP_Symbols.positional_argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Constraint_Node = positional_argument_association in Get_First_Constraint"); Current_Constraint_Node := Child_Node (Current_Node => Current_Constraint_Node); end loop; -- ASSUME Current_Constraint_Node = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Constraint_Node) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Constraint_Node = expression in Get_First_Constraint"); Walk_Expression_As_Type_Mark (Exp_Node => Current_Constraint_Node, Scope => Scope, Constraint_Sym => Constraint_Sym, The_Heap => The_Heap); end Get_First_Constraint; procedure Get_Next_Constraint (Scope : in Dictionary.Scopes; Current_Constraint_Node : in out STree.SyntaxNode; Constraint_Sym : out Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Constraint_Node, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# Constraint_Sym from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Constraint_Node, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap & --# Current_Constraint_Node from *, --# STree.Table & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Constraint_Node, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Current_Constraint_Node, STree.Table) = SP_Symbols.expression; --# post (Syntax_Node_Type (Current_Constraint_Node, STree.Table) = SP_Symbols.expression or --# Current_Constraint_Node = STree.NullNode) and --# STree.Table = STree.Table~; is begin Current_Constraint_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Current_Constraint_Node)); -- ASSUME Current_Constraint_Node = expression OR NULL if Current_Constraint_Node = STree.NullNode then -- ASSUME Current_Constraint_Node = NULL Constraint_Sym := Dictionary.NullSymbol; elsif Syntax_Node_Type (Node => Current_Constraint_Node) = SP_Symbols.expression then -- ASSUME Current_Constraint_Node = expression Walk_Expression_As_Type_Mark (Exp_Node => Current_Constraint_Node, Scope => Scope, Constraint_Sym => Constraint_Sym, The_Heap => The_Heap); else Constraint_Sym := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Constraint_Node = expression OR NULL in Get_Next_Constraint"); end if; end Get_Next_Constraint; begin -- Check_Index_Constraints All_Indexes_OK := True; Get_First_Index (Type_Sym => Type_Sym, Index_Sym => Index_Sym, Index_Iterator => Index_Iterator); Get_First_Constraint (Constraint_Node => Constraint_Node, Scope => Scope, Current_Constraint_Node => Current_Constraint_Node, Constraint_Sym => Constraint_Sym, The_Heap => The_Heap); while not Dictionary.Is_Null_Symbol (Index_Sym) and then Current_Constraint_Node /= STree.NullNode loop --# assert Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.index_or_discriminant_constraint and --# Syntax_Node_Type (Current_Constraint_Node, STree.Table) = SP_Symbols.expression and --# STree.Table = STree.Table~; if not Dictionary.CompatibleTypes (Scope, Index_Sym, Constraint_Sym) then ErrorHandler.Semantic_Error_Sym2 (Err_Num => 107, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Constraint_Node), Sym => Constraint_Sym, Sym2 => Index_Sym, Scope => Scope); All_Indexes_OK := False; end if; Constraint_First := Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Constraint_Sym); Constraint_Last := Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Constraint_Sym); -- Check that Constraint_Sym'First is OK wrt Index_Sym'First if Dictionary.IsPredefinedStringType (Type_Sym) then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Constraint_First, Lex_Str2 => LexTokenManager.One_Value) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 417, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Constraint_Node), Id_Str => LexTokenManager.Null_String); end if; else Constraint_Check (Val => Maths.ValueRep (Constraint_First), New_Val => Result_Of_First_Check, Is_Annotation => False, Typ => Index_Sym, Position => Node_Position (Node => Current_Constraint_Node)); if Result_Of_First_Check = Maths.NoValue then All_Indexes_OK := False; end if; end if; -- Check that Constraint_Sym'Last is OK wrt Index_Sym'Last Constraint_Check (Val => Maths.ValueRep (Constraint_Last), New_Val => Result_Of_Last_Check, Is_Annotation => False, Typ => Index_Sym, Position => Node_Position (Node => Current_Constraint_Node)); if Result_Of_Last_Check = Maths.NoValue then All_Indexes_OK := False; end if; Dictionary.AddArrayIndex (TheArrayType => Subtype_Sym, IndexType => Constraint_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Current_Constraint_Node), End_Position => Node_Position (Node => Current_Constraint_Node)), TheArrayIndex => The_Array_Index); STree.Add_Node_Symbol (Node => Current_Constraint_Node, Sym => The_Array_Index); Get_Next_Index (Index_Sym => Index_Sym, Index_Iterator => Index_Iterator); Get_Next_Constraint (Scope => Scope, Current_Constraint_Node => Current_Constraint_Node, Constraint_Sym => Constraint_Sym, The_Heap => The_Heap); end loop; if not Dictionary.Is_Null_Symbol (Index_Sym) or else Current_Constraint_Node /= STree.NullNode then ErrorHandler.Semantic_Error (Err_Num => 93, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Constraint_Node), Id_Str => LexTokenManager.Null_String); All_Indexes_OK := False; end if; end Check_Index_Constraints; ----------------------------------------------------------------------- procedure Check_Real_Accuracy (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Sort : in Real_Type; Accuracy : out LexTokenManager.Lex_String; Static : out Boolean; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Accuracy from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# Sort, --# STree.Table, --# The_Heap & --# Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# Sort, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Static, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.floating_point_constraint or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.fixed_point_constraint; --# post STree.Table = STree.Table~; is Exp_Node : STree.SyntaxNode; Accuracy_Type : Exp_Record; Unwanted_Seq : SeqAlgebra.Seq; Unused_Component_Data : ComponentManager.ComponentData; ------------------------------- function Type_Correct (Type_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes; Sort : Real_Type) return Boolean --# global in Dictionary.Dict; is Result : Boolean; begin if Sort = Is_Floating then Result := Dictionary.IsIntegerTypeMark (Type_Sym, Scope); else Result := Dictionary.IsRealTypeMark (Type_Sym, Scope); end if; return Result or else Dictionary.IsUnknownTypeMark (Type_Sym); end Type_Correct; begin -- Check_Real_Accuracy Heap.Reset (The_Heap); ComponentManager.Initialise (Unused_Component_Data); case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Exp_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Exp_Node = simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = simple_expression in Check_Real_Accuracy"); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Exp_Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => True, Ref_Var => Unwanted_Seq, Result => Accuracy_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Accuracy_Type.Value, Accuracy); if not Type_Correct (Type_Sym => Accuracy_Type.Type_Symbol, Scope => Scope, Sort => Sort) then Accuracy := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; Static := Accuracy_Type.Is_Static; when CommandLineData.SPARK95_Onwards => -- reduced accuracy subtypes of reals are not allowed in 95 onwards Static := True; -- to reduce knock-on errors Accuracy := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 608, Reference => 9, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end case; Heap.ReportUsage (The_Heap); end Check_Real_Accuracy; ------------------------------------------------------------------------ procedure Check_Range (Node : in STree.SyntaxNode; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Static : in out Boolean; Lower, Upper : out LexTokenManager.Lex_String; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Lower, --# STree.Table, --# Upper from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap, --# Type_Sym & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# Type_Sym & --# Static, --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap, --# Type_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.arange and --# (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)); --# post STree.Table = STree.Table~; is Range_Result : Exp_Record; Unwanted_Seq : SeqAlgebra.Seq; Unused_Component_Data : ComponentManager.ComponentData; Upper_After_Constraint_Check, Lower_After_Constraint_Check : Maths.Value; RHS_Node : STree.SyntaxNode; begin Heap.Reset (The_Heap); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Node, Scope => Scope, Type_Context => Type_Sym, Context_Requires_Static => False, Ref_Var => Unwanted_Seq, Result => Range_Result, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Static := Static and then Range_Result.Is_Static; Maths.StorageRep (Range_Result.Value, Lower); Maths.StorageRep (Range_Result.Range_RHS, Upper); -- check that range is constant if not Range_Result.Is_Constant then Lower := LexTokenManager.Null_String; Upper := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 43, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; if not Dictionary.CompatibleTypes (Scope, Range_Result.Type_Symbol, Type_Sym) then Lower := LexTokenManager.Null_String; Upper := LexTokenManager.Null_String; ErrorHandler.Semantic_Error_Sym2 (Err_Num => 107, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Range_Result.Type_Symbol, Sym2 => Type_Sym, Scope => Scope); end if; -- checks for bounds outside type being constrained -- see whether node is attribute or X..Y form and select suitable place -- to report errors on upper range bound RHS_Node := Child_Node (Current_Node => Node); -- ASSUME RHS_Node = attribute OR simple_expression if Syntax_Node_Type (Node => RHS_Node) = SP_Symbols.attribute then -- ASSUME RHS_Node = attribute RHS_Node := Node; elsif Syntax_Node_Type (Node => RHS_Node) = SP_Symbols.simple_expression then -- ASSUME RHS_Node = simple_expression -- must be of form X..Y RHS_Node := Next_Sibling (Current_Node => RHS_Node); else SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => RHS_Node) = SP_Symbols.attribute or else Syntax_Node_Type (Node => RHS_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect RHS_Node = attribute OR simple_expression in Check_Range"); end if; -- ASSUME RHS_Node = arange OR simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => RHS_Node) = SP_Symbols.arange or else Syntax_Node_Type (Node => RHS_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect RHS_Node = arange OR simple_expression in Check_Range"); Constraint_Check (Val => Maths.ValueRep (Lower), New_Val => Lower_After_Constraint_Check, Is_Annotation => False, Typ => Type_Sym, Position => Node_Position (Node => Node)); Maths.StorageRep (Lower_After_Constraint_Check, Lower); Constraint_Check (Val => Maths.ValueRep (Upper), New_Val => Upper_After_Constraint_Check, Is_Annotation => False, Typ => Type_Sym, Position => Node_Position (Node => RHS_Node)); Maths.StorageRep (Upper_After_Constraint_Check, Upper); Heap.ReportUsage (The_Heap); end Check_Range; ----------------------------------------------------------------------- procedure Check_Real_Range (Node : in STree.SyntaxNode; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Static : in out Boolean; Lower, Upper : out LexTokenManager.Lex_String; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Static, --# Statistics.TableUsage, --# STree.Table from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap, --# Type_Sym & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# Type_Sym & --# Lower, --# The_Heap, --# Upper from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap, --# Type_Sym; --# pre (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.floating_point_constraint or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.fixed_point_constraint) and --# (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)); --# post STree.Table = STree.Table~; is Range_Node : STree.SyntaxNode; begin Range_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Range_Node = range_constraint OR NULL if Range_Node = STree.NullNode then -- ASSUME Range_Node = NULL -- no range supplied so range is unchanged from parent type Upper := Dictionary.GetScalarAttributeValue (False, --no 'base LexTokenManager.Last_Token, Type_Sym); Lower := Dictionary.GetScalarAttributeValue (False, --no 'base LexTokenManager.First_Token, Type_Sym); elsif Syntax_Node_Type (Node => Range_Node) = SP_Symbols.range_constraint then -- ASSUME Range_Node = range_constraint -- a range is supplied Range_Node := Child_Node (Current_Node => Range_Node); -- ASSUME Range_Node = arange SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Range_Node) = SP_Symbols.arange, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = arange in Check_Real_Range"); Check_Range (Node => Range_Node, Type_Sym => Type_Sym, Scope => Scope, Static => Static, Lower => Lower, Upper => Upper, The_Heap => The_Heap); else Lower := LexTokenManager.Null_String; Upper := LexTokenManager.Null_String; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = range_constraint OR NULL in Check_Real_Range"); end if; end Check_Real_Range; ----------------------------------------------------------------------- procedure Add_Scalar_Subtype (Accuracy : in LexTokenManager.Lex_String; Id_Str : in LexTokenManager.Lex_String; Ident_Node : in STree.SyntaxNode; Type_Node_Pos : in LexTokenManager.Token_Position; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Subtype_Is_Static : in Boolean; Lower, Upper : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in ErrorHandler.Error_Context; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from Accuracy, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Ident_Node, --# Id_Str, --# LexTokenManager.State, --# Lower, --# Scope, --# STree.Table, --# Subtype_Is_Static, --# Type_Sym, --# Upper & --# SLI.State from *, --# Accuracy, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# Id_Str, --# LexTokenManager.State, --# Lower, --# Scope, --# STree.Table, --# Subtype_Is_Static, --# Type_Sym, --# Upper & --# SPARK_IO.File_Sys from *, --# Accuracy, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# Id_Str, --# LexTokenManager.State, --# Lower, --# Scope, --# SLI.State, --# STree.Table, --# Subtype_Is_Static, --# Type_Node_Pos, --# Type_Sym, --# Upper; --# pre Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Subtype_Symbol : Dictionary.Symbol; begin if Dictionary.TypeIsInteger (Type_Sym) then Dictionary.Add_Integer_Subtype (Name => Id_Str, Static => Subtype_Is_Static, Parent => Type_Sym, Parent_Reference => Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos), Lower => Lower, Upper => Upper, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, Context => Dictionary.ProgramContext, The_Subtype => Subtype_Symbol); elsif Dictionary.TypeIsModular (Type_Sym) then Dictionary.Add_Modular_Subtype (Name => Id_Str, Parent => Type_Sym, Parent_Reference => Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos), Lower => Lower, Upper => Upper, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, Context => Dictionary.ProgramContext, The_Subtype => Subtype_Symbol); elsif Dictionary.TypeIsEnumeration (Type_Sym) then Dictionary.Add_Enumeration_Subtype (Name => Id_Str, Static => Subtype_Is_Static, Parent => Type_Sym, Parent_Reference => Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos), Lower => Lower, Upper => Upper, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, Context => Dictionary.ProgramContext, The_Subtype => Subtype_Symbol); elsif Dictionary.TypeIsFloatingPoint (Type_Sym) then Dictionary.Add_Floating_Point_Subtype (Name => Id_Str, Static => Subtype_Is_Static, Parent => Type_Sym, Parent_Reference => Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos), Lower => Lower, Upper => Upper, Error_Bound => Accuracy, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, Context => Dictionary.ProgramContext, The_Subtype => Subtype_Symbol); elsif Dictionary.TypeIsFixedPoint (Type_Sym) then Dictionary.Add_Fixed_Point_Subtype (Name => Id_Str, Static => Subtype_Is_Static, Parent => Type_Sym, Parent_Reference => Dictionary.Location'(Start_Position => Type_Node_Pos, End_Position => Type_Node_Pos), Lower => Lower, Upper => Upper, Error_Bound => Accuracy, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, Context => Dictionary.ProgramContext, The_Subtype => Subtype_Symbol); else Subtype_Symbol := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "in Add_Scalar_Subtype"); end if; STree.Add_Node_Symbol (Node => Ident_Node, Sym => Subtype_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Subtype_Symbol, Is_Declaration => True); end if; end Add_Scalar_Subtype; ----------------------------------------------------------------------- function Already_Defined (Ident_Str : LexTokenManager.Lex_String; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Sym : Dictionary.Symbol; begin Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); return not (Dictionary.Is_Null_Symbol (Sym) or else (Dictionary.IsTypeMark (Sym) and then Dictionary.TypeIsAnnounced (TheType => Sym) and then not Dictionary.Is_Declared (Item => Sym))); end Already_Defined; ----------------------------------------------------------------------- function Select_Index_Or_Discriminant_Error (Constraint_Node : STree.SyntaxNode) return Natural --# global in CommandLineData.Content; --# in STree.Table; --# pre Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.index_or_discriminant_constraint; is Result : Natural; begin -- if an index_or_discriminant_constraint is applied to an inapplicable type we need to report and -- error. This function tries to narrow the scope of the error message returned. if CommandLineData.Ravenscar_Selected then -- we could be expecting an array, task or protected type if Is_Named_Association (Node => Constraint_Node) then -- must be Task or protected Result := 891; else -- could be any of Task, Protected, Array Result := 892; end if; else -- can only be an array Result := 41; end if; return Result; end Select_Index_Or_Discriminant_Error; ----------------------------------------------------------------------- procedure Wf_Ravenscar_Subtype (Id_Str : in LexTokenManager.Lex_String; Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Constraint_Node : in STree.SyntaxNode; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Constraint_Node, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Ident_Node, --# Id_Str, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap, --# Type_Sym & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Constraint_Node, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# Id_Str, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# Type_Sym; --# pre Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.index_or_discriminant_constraint; --# post STree.Table = STree.Table~; is separate; begin -- Wf_Subtype_Declaration Lower := LexTokenManager.Null_String; Upper := LexTokenManager.Null_String; Accuracy := LexTokenManager.Null_String; Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Subtype_Declaration"); Id_Str := Node_Lex_String (Node => Ident_Node); if Already_Defined (Ident_Str => Id_Str, Scope => Scope) then Ok_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Id_Str); else Ok_To_Add := True; end if; Type_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Ident_Node)); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Wf_Subtype_Declaration"); Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); -- Subtypes of generic types are not allowed because we can't check whether the bounds will be valid -- when they are instantiated if Dictionary.TypeIsGeneric (Type_Sym) then Ok_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 652, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; Constraint_Node := Next_Sibling (Current_Node => Type_Node); -- ASSUME Constraint_Node = constraint OR NULL if Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.constraint then Constraint_Found := True; elsif Constraint_Node = STree.NullNode then Constraint_Found := False; else Constraint_Found := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = constraint OR NULL in Wf_Subtype_Declaration"); end if; -- test to prevent Boolean subtype unless full-range if Constraint_Found and then Dictionary.TypeIsBoolean (Type_Sym) then Ok_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 412, Reference => 15, Position => Node_Position (Node => Constraint_Node), Id_Str => LexTokenManager.Null_String); end if; --# assert Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Type_Node, STree.Table) = SP_Symbols.type_mark and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.constraint or --# Constraint_Node = STree.NullNode) and --# (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and --# STree.Table = STree.Table~; if Ok_To_Add and then not Dictionary.IsUnknownTypeMark (Type_Sym) then if Constraint_Found then Constraint_Node := Child_Node (Current_Node => Constraint_Node); -- ASSUME Constraint_Node = range_constraint OR floating_point_constraint OR fixed_point_constraint OR -- index_or_discriminant_constraint -- there is a constraint node so proceed as before if Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.index_or_discriminant_constraint then -- ASSUME Constraint_Node = index_or_discriminant_constraint if Dictionary.IsArrayTypeMark (Type_Sym, Scope) then if Is_Named_Association (Node => Constraint_Node) then ErrorHandler.Semantic_Error (Err_Num => 92, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Constraint_Node), Id_Str => LexTokenManager.Null_String); else -- positional association is ok if Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Sym, Scope) then Dictionary.Add_Array_Subtype (Name => Id_Str, Parent => Type_Sym, Parent_Reference => Dictionary.Location'(Start_Position => Node_Position (Node => Type_Node), End_Position => Node_Position (Node => Type_Node)), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, Context => Dictionary.ProgramContext, Static => False, The_Subtype => Subtype_Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Subtype_Sym); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Subtype_Sym, Is_Declaration => True); end if; Check_Index_Constraints (Constraint_Node => Constraint_Node, Scope => Scope, Subtype_Sym => Subtype_Sym, Type_Sym => Type_Sym, All_Indexes_OK => All_Indexes_OK, The_Heap => The_Heap); Dictionary.SetTypeIsWellformed (Subtype_Sym, All_Indexes_OK); else -- array already constrained ErrorHandler.Semantic_Error (Err_Num => 99, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; end if; -- illegal named association fo index_constraint elsif Dictionary.IsProtectedType (Type_Sym) or else Dictionary.IsTaskType (Type_Sym) then Wf_Ravenscar_Subtype (Id_Str => Id_Str, Type_Sym => Type_Sym, Scope => Scope, Ident_Node => Ident_Node, Constraint_Node => Constraint_Node, The_Heap => The_Heap); else -- a type has been supplied for which index_or_discriminant_constraint is -- not appropriate ErrorHandler.Semantic_Error (Err_Num => Select_Index_Or_Discriminant_Error (Constraint_Node => Constraint_Node), Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; else -- ASSUME Constraint_Node = range_constraint OR floating_point_constraint OR fixed_point_constraint -- some scalar subtype expected if not Dictionary.TypeIsScalar (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 59, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); else Subtype_Is_Static := True; -- default value if Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.range_constraint then -- ASSUME Constraint_Node = range_constraint Constraint_Node := Child_Node (Current_Node => Constraint_Node); -- ASSUME Constraint_Node = arange SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.arange, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = arange in Wf_Subtype_Declaration"); Check_Range (Node => Constraint_Node, Type_Sym => Type_Sym, Scope => Scope, Static => Subtype_Is_Static, Lower => Lower, Upper => Upper, The_Heap => The_Heap); -- if constraint is a range but type is real then no accuracy -- has been supplied so we need to get it from parent if Dictionary.TypeIsFloatingPoint (Type_Sym) then Accuracy := Dictionary.GetScalarAttributeValue (False, LexTokenManager.Digits_Token, Type_Sym); elsif Dictionary.TypeIsFixedPoint (Type_Sym) then Accuracy := Dictionary.GetScalarAttributeValue (False, LexTokenManager.Delta_Token, Type_Sym); end if; Subtype_Is_Static := Subtype_Is_Static and then Dictionary.IsStatic (Type_Sym, Scope); elsif Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.floating_point_constraint then -- ASSUME Constraint_Node = floating_point_constraint if not Dictionary.TypeIsFloatingPoint (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 100, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); else Check_Real_Accuracy (Node => Constraint_Node, Scope => Scope, Sort => Is_Floating, Accuracy => Accuracy, Static => Subtype_Is_Static, The_Heap => The_Heap); SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Type_Sym) or else Dictionary.IsTypeMark (Type_Sym), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Sym to be a type in Wf_Subtype_Declaration"); Check_Real_Range (Node => Constraint_Node, Type_Sym => Type_Sym, Scope => Scope, Static => Subtype_Is_Static, Lower => Lower, Upper => Upper, The_Heap => The_Heap); end if; elsif Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.fixed_point_constraint then -- ASSUME Constraint_Node = fixed_point_constraint if not Dictionary.TypeIsFixedPoint (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 101, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); else Check_Real_Accuracy (Node => Constraint_Node, Scope => Scope, Sort => Is_Fixed, Accuracy => Accuracy, Static => Subtype_Is_Static, The_Heap => The_Heap); SystemErrors.RT_Assert (C => Dictionary.Is_Null_Symbol (Type_Sym) or else Dictionary.IsTypeMark (Type_Sym), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Sym to be a type in Wf_Subtype_Declaration"); Check_Real_Range (Node => Constraint_Node, Type_Sym => Type_Sym, Scope => Scope, Static => Subtype_Is_Static, Lower => Lower, Upper => Upper, The_Heap => The_Heap); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = range_constraint OR floating_point_constraint OR " & "fixed_point_constraint OR index_or_discriminant_constraint in Wf_Subtype_Declaration"); end if; Add_Scalar_Subtype (Accuracy => Accuracy, Id_Str => Id_Str, Ident_Node => Ident_Node, Type_Node_Pos => Node_Position (Node => Type_Node), Type_Sym => Type_Sym, Scope => Scope, Subtype_Is_Static => Subtype_Is_Static, Lower => Lower, Upper => Upper); end if; end if; else -- no constraint node present Dictionary.Add_Full_Range_Subtype (Name => Id_Str, Parent => Type_Sym, Parent_Reference => Dictionary.Location'(Start_Position => Node_Position (Node => Type_Node), End_Position => Node_Position (Node => Type_Node)), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, Context => Dictionary.ProgramContext, The_Subtype => Subtype_Symbol); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Subtype_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Subtype_Symbol, Is_Declaration => True); end if; end if; end if; end Wf_Subtype_Declaration; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_aggregate_choice.adb0000644000175000017500000006603111753202336025177 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Aggregate_Choice (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is type Case_Choice_Sorts is (Single_Expression, Explicit_Range, Range_Constraint); Case_Choice_Sort : Case_Choice_Sorts; First_Node, Second_Node : STree.SyntaxNode; Name_Exp, First_Result, Second_Result : Sem.Exp_Record; Semantic_Errors_Found : Boolean := False; Choice_Lower_Maths_Value : Maths.Value; Choice_Upper_Maths_Value : Maths.Value := Maths.NoValue; Is_A_Range : Boolean; Index_Type_Symbol : Dictionary.Symbol; Index_Type_Lower_Bound, Index_Type_Upper_Bound : Sem.Typ_Type_Bound; Aggregate_Flags : Sem.Typ_Agg_Flags; Entry_Counter : Natural; Complete_Rec : CompleteCheck.T; Choice_Lower_Bound, Choice_Upper_Bound : Sem.Typ_Type_Bound := Sem.Unknown_Type_Bound; Lower_Bound_Unknown : Boolean; Upper_Bound_Unknown : Boolean := True; Lower_Bound_Out_Of_Range : Boolean; Upper_Bound_Out_Of_Range : Boolean := True; Out_Of_Range_Seen : Boolean; Overlap_Seen : CompleteCheck.TypOverlapState; Both_Choice_Bounds_Known : Boolean := False; Range_Constraint_Lower_Bound, Range_Constraint_Upper_Bound : Sem.Typ_Type_Bound; ------------------------------------------------------------------------ procedure Convert_Choice_Bound (Maths_Value : in Maths.Value; Bound : out Sem.Typ_Type_Bound; Unknown_Bound : out Boolean; Bound_Out_Of_Range : out Boolean) --# derives Bound, --# Bound_Out_Of_Range, --# Unknown_Bound from Maths_Value; --# post Bound.Is_Defined <-> (not Unknown_Bound and not Bound_Out_Of_Range); is Int : Integer; Maths_Error : Maths.ErrorCode; begin if Maths.HasNoValue (Maths_Value) then Bound := Sem.Typ_Type_Bound'(Value => 0, Is_Defined => False); Unknown_Bound := True; Bound_Out_Of_Range := False; else Maths.ValueToInteger (Maths_Value, Int, Maths_Error); if Maths_Error = Maths.NoError then Bound := Sem.Typ_Type_Bound'(Value => Int, Is_Defined => True); Unknown_Bound := False; Bound_Out_Of_Range := False; else Bound := Sem.Typ_Type_Bound'(Value => 0, Is_Defined => False); Unknown_Bound := False; Bound_Out_Of_Range := True; end if; end if; end Convert_Choice_Bound; ------------------------------------------------------------------------ -- note: returns True if any of the bounds is undefined, unless the -- choice is not a range, in which case, Choice_Upper is unused function Is_Choice_In_Range (Choice_Lower : Sem.Typ_Type_Bound; Choice_Upper : Sem.Typ_Type_Bound; Choice_Is_Range : Boolean; Range_Lower : Sem.Typ_Type_Bound; Range_Upper : Sem.Typ_Type_Bound) return Boolean is Result : Boolean; begin if (Choice_Lower.Is_Defined and then Range_Lower.Is_Defined and then Choice_Lower.Value < Range_Lower.Value) or else (Choice_Lower.Is_Defined and then Range_Upper.Is_Defined and then Choice_Lower.Value > Range_Upper.Value) or else (Choice_Is_Range and then Choice_Upper.Is_Defined and then Range_Upper.Is_Defined and then Choice_Upper.Value > Range_Upper.Value) then Result := False; else Result := True; end if; return Result; end Is_Choice_In_Range; ------------------------------------------------------------------------ function Is_Range_Empty (Range_Lower : Sem.Typ_Type_Bound; Range_Upper : Sem.Typ_Type_Bound) return Boolean --# pre Range_Lower.Is_Defined and Range_Upper.Is_Defined; --# return not (Range_Lower.Value <= Range_Upper.Value); is begin return not (Range_Lower.Value <= Range_Upper.Value); end Is_Range_Empty; ----------------------------------------------------------------------- procedure Convert_Boolean_Maths_Value (Value : in out Maths.Value) --# derives Value from *; is begin if Value = Maths.FalseValue then Value := Maths.ZeroInteger; elsif Value = Maths.TrueValue then Value := Maths.OneInteger; end if; end Convert_Boolean_Maths_Value; begin -- Wf_Aggregate_Choice -- Assume aggregate is array aggregate with named association Aggregate_Stack.Pop (Type_Sym => Index_Type_Symbol, Lower_Bound => Index_Type_Lower_Bound, Upper_Bound => Index_Type_Upper_Bound, Agg_Flags => Aggregate_Flags, Counter => Entry_Counter, Complete_Rec => Complete_Rec); First_Node := STree.Child_Node (Current_Node => Node); -- ASSUME First_Node = simple_expression OR annotation_simple_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => First_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => First_Node) = SP_Symbols.annotation_simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect First_Node = simple_expression OR annotation_simple_expression in Wf_Aggregate_Choice"); Second_Node := STree.Next_Sibling (Current_Node => First_Node); -- ASSUME Second_Node = range_constraint OR simple_expression OR -- annotation_range_constraint OR annotation_simple_expression OR NULL if Second_Node = STree.NullNode then -- ASSUME Second_Node = NULL Case_Choice_Sort := Single_Expression; elsif STree.Syntax_Node_Type (Node => Second_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Second_Node) = SP_Symbols.annotation_simple_expression then -- ASSUME Second_Node = simple_expression OR annotation_simple_expression Case_Choice_Sort := Explicit_Range; elsif STree.Syntax_Node_Type (Node => Second_Node) = SP_Symbols.range_constraint or else STree.Syntax_Node_Type (Node => Second_Node) = SP_Symbols.annotation_range_constraint then -- ASSUME Second_Node = range_constraint OR annotation_range_constraint Case_Choice_Sort := Range_Constraint; else Case_Choice_Sort := Single_Expression; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Second_Node = range_constraint OR simple_expression OR annotation_range_constraint OR" & " annotation_simple_expression OR NULL in Wf_Aggregate_Choice"); end if; --# assert (STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.annotation_simple_expression) and --# (STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.range_constraint or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.annotation_range_constraint or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.annotation_simple_expression or --# Second_Node = STree.NullNode) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# ((Index_Type_Lower_Bound.Is_Defined and Index_Type_Upper_Bound.Is_Defined) -> --# (Index_Type_Lower_Bound.Value <= Index_Type_Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Index_Type_Symbol) or Dictionary.IsTypeMark (Index_Type_Symbol, Dictionary.Dict)); case Case_Choice_Sort is when Single_Expression => Exp_Stack.Pop (Item => First_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if Dictionary.IsUnknownTypeMark (First_Result.Type_Symbol) then null; elsif Name_Exp.Param_Count > 0 and then Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Dictionary.GetArrayIndex (Name_Exp.Type_Symbol, Name_Exp.Param_Count)) then if not First_Result.Is_Static then ErrorHandler.Semantic_Error (Err_Num => 36, Reference => 1, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; else ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; -- code to work out whether we have a single choice or a -- range and to collect the appropriate values -- note that these will be nonsense if semantic errors have been found Choice_Lower_Maths_Value := First_Result.Value; if First_Result.Is_ARange then Is_A_Range := True; Choice_Upper_Maths_Value := First_Result.Range_RHS; else Is_A_Range := False; end if; Name_Exp.Errors_In_Expression := Semantic_Errors_Found or else Name_Exp.Errors_In_Expression or else First_Result.Errors_In_Expression; when Explicit_Range => Exp_Stack.Pop (Item => Second_Result, Stack => E_Stack); Exp_Stack.Pop (Item => First_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Second_Result.Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 42, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; elsif Name_Exp.Param_Count > 0 and then not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Dictionary.GetArrayIndex (Name_Exp.Type_Symbol, Name_Exp.Param_Count)) then ErrorHandler.Semantic_Error (Err_Num => 106, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; if not (First_Result.Is_Static and then Second_Result.Is_Static) then ErrorHandler.Semantic_Error (Err_Num => 45, Reference => 1, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; -- code to collect the appropriate values for the extent of the range -- note that these will be nonsense if semantic errors have been found# Choice_Lower_Maths_Value := First_Result.Value; Choice_Upper_Maths_Value := Second_Result.Value; Is_A_Range := True; Name_Exp.Errors_In_Expression := Semantic_Errors_Found or else Name_Exp.Errors_In_Expression or else First_Result.Errors_In_Expression or else Second_Result.Errors_In_Expression; when Range_Constraint => Exp_Stack.Pop (Item => Second_Result, Stack => E_Stack); Exp_Stack.Pop (Item => First_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Second_Result.Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 106, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; elsif Name_Exp.Param_Count > 0 and then not Dictionary.CompatibleTypes (Scope, First_Result.Type_Symbol, Dictionary.GetArrayIndex (Name_Exp.Type_Symbol, Name_Exp.Param_Count)) then ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; if not (First_Result.Is_Constant and then First_Result.Is_ARange) then ErrorHandler.Semantic_Error (Err_Num => 95, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; if not Second_Result.Is_Static then ErrorHandler.Semantic_Error (Err_Num => 45, Reference => 1, Position => STree.Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; -- code to collect the appropriate values for the extent of the range -- note that these will be nonsense if semantic errors have been found Choice_Lower_Maths_Value := Second_Result.Value; Choice_Upper_Maths_Value := Second_Result.Range_RHS; Is_A_Range := True; Name_Exp.Errors_In_Expression := Semantic_Errors_Found or else Name_Exp.Errors_In_Expression or else First_Result.Errors_In_Expression or else Second_Result.Errors_In_Expression; -- somewhere need to check that Second_Result range is within the type -- given by First_Result end case; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); --# assert (STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.annotation_simple_expression) and --# (STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.range_constraint or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.annotation_range_constraint or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.annotation_simple_expression or --# Second_Node = STree.NullNode) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# ((Index_Type_Lower_Bound.Is_Defined and Index_Type_Upper_Bound.Is_Defined) -> --# (Index_Type_Lower_Bound.Value <= Index_Type_Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Index_Type_Symbol) or Dictionary.IsTypeMark (Index_Type_Symbol, Dictionary.Dict)); if not Semantic_Errors_Found then Convert_Boolean_Maths_Value (Value => Choice_Lower_Maths_Value); Convert_Choice_Bound (Maths_Value => Choice_Lower_Maths_Value, Bound => Choice_Lower_Bound, Unknown_Bound => Lower_Bound_Unknown, Bound_Out_Of_Range => Lower_Bound_Out_Of_Range); if Is_A_Range then Convert_Boolean_Maths_Value (Value => Choice_Upper_Maths_Value); -- CUMV always defined here Convert_Choice_Bound (Maths_Value => Choice_Upper_Maths_Value, Bound => Choice_Upper_Bound, Unknown_Bound => Upper_Bound_Unknown, Bound_Out_Of_Range => Upper_Bound_Out_Of_Range); else Choice_Upper_Bound := Sem.Unknown_Type_Bound; end if; if Lower_Bound_Out_Of_Range or else (Is_A_Range and then Upper_Bound_Out_Of_Range) then Both_Choice_Bounds_Known := False; ErrorHandler.Semantic_Warning (Err_Num => 305, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); elsif Lower_Bound_Unknown or else (Is_A_Range and then Upper_Bound_Unknown) then Both_Choice_Bounds_Known := False; Complete_Rec.Undeterminable := True; ErrorHandler.Semantic_Warning (Err_Num => 200, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); else Both_Choice_Bounds_Known := True; end if; --# assert (STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.annotation_simple_expression) and --# (STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.range_constraint or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.annotation_range_constraint or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.annotation_simple_expression or --# Second_Node = STree.NullNode) and --# not Semantic_Errors_Found and --# ((Both_Choice_Bounds_Known and Is_A_Range) -> (Choice_Lower_Bound.Is_Defined and Choice_Upper_Bound.Is_Defined)) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# ((Index_Type_Lower_Bound.Is_Defined and Index_Type_Upper_Bound.Is_Defined) -> --# (Index_Type_Lower_Bound.Value <= Index_Type_Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Index_Type_Symbol) or Dictionary.IsTypeMark (Index_Type_Symbol, Dictionary.Dict)); if Both_Choice_Bounds_Known then -- check the case choice lies within controlling type if not Is_Choice_In_Range (Choice_Lower => Choice_Lower_Bound, Choice_Upper => Choice_Upper_Bound, Choice_Is_Range => Is_A_Range, Range_Lower => Index_Type_Lower_Bound, Range_Upper => Index_Type_Upper_Bound) then if Case_Choice_Sort = Range_Constraint then ErrorHandler.Semantic_Error (Err_Num => 410, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 410, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); end if; Semantic_Errors_Found := True; elsif Is_A_Range and then Is_Range_Empty (Range_Lower => Choice_Lower_Bound, Range_Upper => Choice_Upper_Bound) then if Case_Choice_Sort = Range_Constraint then ErrorHandler.Semantic_Error (Err_Num => 409, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 409, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); end if; Semantic_Errors_Found := True; end if; --# assert (STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.annotation_simple_expression) and --# (STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.range_constraint or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.annotation_range_constraint or --# STree.Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.annotation_simple_expression or --# Second_Node = STree.NullNode) and --# Both_Choice_Bounds_Known and --# ((not Semantic_Errors_Found and Is_A_Range) -> (Choice_Lower_Bound.Value <= Choice_Upper_Bound.Value)) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# ((Index_Type_Lower_Bound.Is_Defined and Index_Type_Upper_Bound.Is_Defined) -> --# (Index_Type_Lower_Bound.Value <= Index_Type_Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Index_Type_Symbol) or Dictionary.IsTypeMark (Index_Type_Symbol, Dictionary.Dict)); -- check the case choice lies within Range_Constraint type if Case_Choice_Sort = Range_Constraint then Sem.Get_Type_Bounds (Type_Symbol => First_Result.Type_Symbol, Lower_Bound => Range_Constraint_Lower_Bound, Upper_Bound => Range_Constraint_Upper_Bound); if not Is_Choice_In_Range (Choice_Lower => Choice_Lower_Bound, Choice_Upper => Choice_Upper_Bound, Choice_Is_Range => Is_A_Range, Range_Lower => Range_Constraint_Lower_Bound, Range_Upper => Range_Constraint_Upper_Bound) then ErrorHandler.Semantic_Error (Err_Num => 413, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; end if; end if; end if; --# assert (STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.annotation_simple_expression) and --# ((not Semantic_Errors_Found and Both_Choice_Bounds_Known and Is_A_Range) -> --# (Choice_Lower_Bound.Value <= Choice_Upper_Bound.Value)) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# ((Index_Type_Lower_Bound.Is_Defined and Index_Type_Upper_Bound.Is_Defined) -> --# (Index_Type_Lower_Bound.Value <= Index_Type_Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Index_Type_Symbol) or Dictionary.IsTypeMark (Index_Type_Symbol, Dictionary.Dict)); if (not Semantic_Errors_Found) and then Both_Choice_Bounds_Known and then (Aggregate_Flags.Check_Completeness or else Aggregate_Flags.Check_Overlap) then if Is_A_Range then CompleteCheck.SeenRange (Complete_Rec, Choice_Lower_Bound.Value, Choice_Upper_Bound.Value, Out_Of_Range_Seen, Overlap_Seen); else CompleteCheck.SeenElement (Complete_Rec, Choice_Lower_Bound.Value, Out_Of_Range_Seen, Overlap_Seen); end if; if Out_Of_Range_Seen then Aggregate_Flags.Out_Of_Range_Seen := True; end if; if Aggregate_Flags.Check_Overlap and then Overlap_Seen = CompleteCheck.Overlap then ErrorHandler.Semantic_Error (Err_Num => 407, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); Semantic_Errors_Found := True; end if; end if; --# assert Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# ((Index_Type_Lower_Bound.Is_Defined and Index_Type_Upper_Bound.Is_Defined) -> --# (Index_Type_Lower_Bound.Value <= Index_Type_Upper_Bound.Value)) and --# (Complete_Rec.ActualUpperBound - Complete_Rec.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Index_Type_Symbol) or Dictionary.IsTypeMark (Index_Type_Symbol, Dictionary.Dict)); if Semantic_Errors_Found then Aggregate_Flags.Check_Completeness := False; end if; Aggregate_Stack.Push (Type_Sym => Index_Type_Symbol, Lower_Bound => Index_Type_Lower_Bound, Upper_Bound => Index_Type_Upper_Bound, Agg_Flags => Aggregate_Flags, Counter => Entry_Counter, Complete_Rec => Complete_Rec); end Wf_Aggregate_Choice; spark-2012.0.deb/examiner/sem-check_priority_property_consistency.adb0000644000175000017500000001053511753202336025112 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Priority_Property_Consistency (Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Priority_Property_Value : in LexTokenManager.Lex_String; Error_Node_Pos : in LexTokenManager.Token_Position; Consistent : out Boolean) is The_Error : Natural := 0; function Object_Can_Have_Priority (Sym : Dictionary.Symbol; Type_Sym : Dictionary.Symbol) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is Result : Boolean; begin if Dictionary.IsOwnVariable (Sym) and then Dictionary.GetOwnVariableProtected (Sym) then if not Dictionary.IsUnknownTypeMark (Type_Sym) then if (Dictionary.Is_Declared (Item => Type_Sym) or else Dictionary.IsPredefined (Type_Sym)) and then not Dictionary.IsProtectedTypeMark (Type_Sym) then -- Type in the announcement is not a protected type. Result := False; else -- Type announced protected own variable. -- Type is either not declared (ok) or is declared and -- is a protected type (ok). Result := True; end if; else -- Protected own variable is not type announced Result := False; end if; else -- Not a protected own variable. Result := False; end if; return Result; end Object_Can_Have_Priority; begin -- Check_Priority_Property_Consistency if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Priority_Property_Value, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then if not Object_Can_Have_Priority (Sym => Sym, Type_Sym => Type_Sym) then -- Priority property not allowed. The_Error := 919; elsif Dictionary.IsProtectedTypeMark (Type_Sym) and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetTypePriority (Type_Sym), Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Priority_Property_Value, Lex_Str2 => Dictionary.GetTypePriority (Type_Sym)) /= LexTokenManager.Str_Eq then -- Priority property value does not match that defined in the protected type. The_Error := 932; end if; elsif Object_Can_Have_Priority (Sym => Sym, Type_Sym => Type_Sym) and then Dictionary.Is_Declared (Item => Type_Sym) and then Dictionary.IsProtectedTypeMark (Type_Sym) then -- The own variable should have a priority. The_Error := 922; end if; if The_Error /= 0 then ErrorHandler.Semantic_Error (Err_Num => The_Error, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end if; -- Mark consistent if no errors. -- Also mark consistent if a priority is given in the correct place but -- doesn't match that in the protected type definition. -- This stops secondary errors. Consistent := The_Error = 0 or else The_Error = 932; end Check_Priority_Property_Consistency; spark-2012.0.deb/examiner/sem-walk_expression_p-expression_type_from_context.adb0000644000175000017500000005444311753202336027276 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Expression_Type_From_Context (Exp_Node : in STree.SyntaxNode; E_Stack : in Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type; New_Context_Type : out Dictionary.Symbol) is Top_Of_Exp_Stack : Sem.Exp_Record; Parameter_Name_OK : Boolean; Param_Symbol : Dictionary.Symbol; Parent, T_Node_2 : STree.SyntaxNode; begin ------------------------------------------------------------------------ -- Possible parent nodes are: -- -- Group 1 - Expressions appearing in statements or declarations -- -- constant_declaration -- variable_declaration -- assignment_statement -- condition -- case_statement -- return_statement -- delay_statement -- -- Group 2 - Expressions appearing in expressions, common to both code -- and annotations -- -- annotation_named_argument_association -- named_argument_association -- annotation_positional_argument_association -- positional_argument_association -- annotation_attribute_designator_opt -- attribute_designator_opt -- annotation_aggregate_or_expression -- aggregate_or_expression -- annotation_primary -- primary -- annotation_qualified_expression -- qualified_expression -- -- Group 3 - Expressions appearing in annotations only -- -- store -- store_list -- return_expression -- predicate ------------------------------------------------------------------------ Parent := STree.Parent_Node (Current_Node => Exp_Node); -- ASSUME Parent = constant_declaration OR named_argument_association OR return_statement OR -- proof_constant_declaration OR annotation_named_argument_association OR return_expression OR -- positional_argument_association OR attribute_designator_opt OR primary OR -- annotation_positional_argument_association OR annotation_attribute_designator_opt OR annotation_primary OR -- aggregate_or_expression OR qualified_expression OR ancestor_part OR -- annotation_aggregate_or_expression OR annotation_qualified_expression OR annotation_ancestor_part OR -- positional_record_component_association OR named_record_component_association OR -- annotation_positional_record_component_association OR annotation_named_record_component_association OR -- variable_declaration OR unconstrained_array_assignment OR assignment_statement OR condition OR -- case_statement OR priority_pragma OR delay_statement OR store OR store_list OR predicate case STree.Syntax_Node_Type (Node => Parent) is when SP_Symbols.constant_declaration | SP_Symbols.variable_declaration | SP_Symbols.assignment_statement | SP_Symbols.condition | SP_Symbols.case_statement | SP_Symbols.return_statement | SP_Symbols.delay_statement => -- ASSUME Parent = constant_declaration OR variable_declaration OR assignment_statement OR condition OR -- case_statement OR return_statement OR delay_statement -- Context here is passed in from calling environment, and -- no change required. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.named_argument_association | SP_Symbols.annotation_named_argument_association => -- ASSUME Parent = named_argument_association OR annotation_named_argument_association if Exp_Stack.Is_Empty (Stack => E_Stack) then -- Must be a named argument association in a procedure -- call. Wf_Proc_Call processes the parameter name, and -- does NOT put it on the expression stack, but it does pass -- the expected type into WalkExpression, so this is unchanged -- in this case. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); else -- Must be a named argument association which is the parameter -- list of a function call. Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); -- We need to find the parameter name, and determine if it is legal: Find_Named_Argument_Association_Parameter (Node => Parent, Subprog_Sym => Top_Of_Exp_Stack.Other_Symbol, Name_Is_Parameter_Name => Parameter_Name_OK, Param_Sym => Param_Symbol); if Parameter_Name_OK then -- The parameter name denotes a legal parameter of this subprogram, -- so look up its type. New_Context_Type := Dictionary.GetType (Param_Symbol); else -- The parameter name is illegal. This will be picked up again later -- on in wf_named_argument_association. The type context simply -- becomes unknown. New_Context_Type := Dictionary.GetUnknownTypeMark; end if; end if; when SP_Symbols.qualified_expression | SP_Symbols.annotation_qualified_expression => -- ASSUME Parent = qualified_expression OR annotation_qualified_expression -- Context changes to the type denoted by the Name preceeding the -- expression. The result of evaluating this Name should be on the -- top of the Expression stack. Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); New_Context_Type := Top_Of_Exp_Stack.Type_Symbol; when SP_Symbols.positional_argument_association | SP_Symbols.annotation_positional_argument_association => -- ASSUME Parent = positional_argument_association OR annotation_positional_argument_association -- Parent could be name_argument_list or another -- positional_argument_association. -- This could be part of a -- Type conversion -- Array index -- Function call if Exp_Stack.Is_Empty (Stack => E_Stack) then -- EStack might be empty here => we must be processing an -- actual param of a procedure call statement. Context -- will have been passed in from wf_proc_call, so no change here. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); else Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); case Top_Of_Exp_Stack.Sort is when Sem.Is_Object => -- Must be an array indexing expression. The context is therefore -- the subtype corresponding to the N'th dimension of that array type. if Top_Of_Exp_Stack.Param_Count < Natural'Last then New_Context_Type := Dictionary.GetArrayIndex (Top_Of_Exp_Stack.Type_Symbol, Top_Of_Exp_Stack.Param_Count + 1); else New_Context_Type := Dictionary.GetUnknownTypeMark; end if; when Sem.Is_Function => -- Must be an actual parameter of a function call. The context -- is the subtype indicated by the corresponding formal parameter, -- if there is any such parameter. If the wrong number of actual -- parameters has been given, then return UnknownTypeMark - this -- error will be picked up later in the UP pass. if Top_Of_Exp_Stack.Param_Count < Dictionary.GetNumberOfSubprogramParameters (Top_Of_Exp_Stack.Other_Symbol) then New_Context_Type := Dictionary.GetType (Dictionary.GetSubprogramParameter (Top_Of_Exp_Stack.Other_Symbol, Top_Of_Exp_Stack.Param_Count + 1)); else New_Context_Type := Dictionary.GetUnknownTypeMark; end if; when Sem.Is_Type_Mark => -- Must be a type conversion. The argument of the type conversion -- could be pretty much anything - legal or illegal, so the -- context is unknown. New_Context_Type := Dictionary.GetUnknownTypeMark; when others => -- Other cases - all errors which will be caught later on... -- We still need to push something onto the TStack to make -- it balance, so simply copy the existing top entry. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end case; end if; when SP_Symbols.aggregate_or_expression | SP_Symbols.annotation_aggregate_or_expression => -- ASSUME Parent = aggregate_or_expression OR annotation_aggregate_or_expression Parent := STree.Parent_Node (Current_Node => Parent); -- ASSUME Parent = component_association OR named_association OR named_association_rep OR -- annotation_component_association OR annotation_named_association OR annotation_named_association_rep OR -- positional_association OR positional_association_rep OR -- annotation_positional_association OR annotation_positional_association_rep OR name_value_property case STree.Syntax_Node_Type (Node => Parent) is when SP_Symbols.component_association | SP_Symbols.annotation_component_association => -- ASSUME Parent = component_association OR annotation_component_association -- Must be an array aggregate with a single others clause, so the -- new context type is the type of the array element Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); New_Context_Type := Dictionary.GetArrayComponent (Top_Of_Exp_Stack.Type_Symbol); when SP_Symbols.named_association | SP_Symbols.named_association_rep | SP_Symbols.annotation_named_association | SP_Symbols.annotation_named_association_rep => -- ASSUME Parent = named_association OR named_association_rep OR -- annotation_named_association OR annotation_named_association_rep Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); case Top_Of_Exp_Stack.Sort is when Sem.Is_Parameter_Name => -- Must be a named field of a record aggregate. New context is -- the type of that field, unless the field was itself illegal, -- in which case the context is unknown. if Top_Of_Exp_Stack = Null_Parameter_Record then New_Context_Type := Dictionary.GetUnknownTypeMark; else New_Context_Type := Dictionary.GetType (Top_Of_Exp_Stack.Other_Symbol); end if; when Sem.Is_Type_Mark => -- Must be a named element of an array aggregate. New context is -- the type of the array element. New_Context_Type := Dictionary.GetArrayComponent (Top_Of_Exp_Stack.Type_Symbol); when others => New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end case; when SP_Symbols.positional_association | SP_Symbols.positional_association_rep | SP_Symbols.annotation_positional_association | SP_Symbols.annotation_positional_association_rep => -- ASSUME Parent = positional_association OR positional_association_rep OR -- annotation_positional_association OR annotation_positional_association_rep Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); case Top_Of_Exp_Stack.Sort is when Sem.Is_Type_Mark => if Dictionary.TypeIsRecord (Top_Of_Exp_Stack.Type_Symbol) then -- New context is the type of the N'th field of the record, assuming -- there is such a field. if Top_Of_Exp_Stack.Param_Count < Dictionary.GetNumberOfComponents (Top_Of_Exp_Stack.Type_Symbol) then New_Context_Type := Dictionary.GetType (Dictionary.GetRecordComponent (Top_Of_Exp_Stack.Type_Symbol, Top_Of_Exp_Stack.Param_Count + 1)); else New_Context_Type := Dictionary.GetUnknownTypeMark; end if; elsif Dictionary.TypeIsArray (Top_Of_Exp_Stack.Type_Symbol) then -- New context is the element type of the array New_Context_Type := Dictionary.GetArrayComponent (Top_Of_Exp_Stack.Type_Symbol); else -- Must be an error - this will be caught later on in the UP -- pass, but we need to push something so... New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end if; when others => -- Must be an error - this will be caught later on in the UP -- pass, but we need to push something so... New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end case; when SP_Symbols.name_value_property => -- ASSUME Parent = name_value_property New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when others => New_Context_Type := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = component_association OR named_association OR named_association_rep OR " & "annotation_component_association OR annotation_named_association OR annotation_named_association_rep OR " & "positional_association OR positional_association_rep OR annotation_positional_association OR " & "annotation_positional_association_rep OR name_value_property in Expression_Type_From_Context"); end case; when SP_Symbols.attribute_designator_opt | SP_Symbols.annotation_attribute_designator_opt => -- ASSUME Parent = attribute_designator_opt OR annotation_attribute_designator_opt -- Context change for attribute arguments is handled in -- AttributeDesignatorTypeFromContext, so no change here. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.primary | SP_Symbols.annotation_primary => -- ASSUME Parent = primary OR annotation_primary -- Must be a parenthesized expression - context does not change. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.store => -- ASSUME Parent = store Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); case Top_Of_Exp_Stack.Sort is when Sem.Is_Object => if Dictionary.TypeIsArray (Top_Of_Exp_Stack.Type_Symbol) then New_Context_Type := Dictionary.GetArrayComponent (Top_Of_Exp_Stack.Type_Symbol); elsif Dictionary.TypeIsRecord (Top_Of_Exp_Stack.Type_Symbol) then -- The record field being updated should be an identifier -- node directly below the store node, so T_Node_2 := STree.Last_Child_Of (Start_Node => Parent); if STree.Syntax_Node_Type (Node => T_Node_2) = SP_Symbols.identifier then -- ASSUME T_Node_2 = identifier Param_Symbol := Dictionary.LookupSelectedItem (Prefix => Top_Of_Exp_Stack.Type_Symbol, Selector => STree.Node_Lex_String (Node => T_Node_2), Scope => Dictionary.GetScope (Top_Of_Exp_Stack.Type_Symbol), Context => Dictionary.ProofContext); if not Dictionary.Is_Null_Symbol (Param_Symbol) and then Dictionary.IsRecordComponent (Param_Symbol) then STree.Set_Node_Lex_String (Sym => Param_Symbol, Node => T_Node_2); New_Context_Type := Dictionary.GetType (Param_Symbol); else -- error - will be caught in up_wf_store New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end if; else -- error - will be caught in up_wf_store New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end if; else -- error - will be caught in up_wf_store New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end if; when others => -- Must be an error - this will be caught later on in the UP -- pass, but we need to push something so... New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end case; when SP_Symbols.store_list => -- ASSUME Parent = store_list Top_Of_Exp_Stack := Exp_Stack.Top (Stack => E_Stack); case Top_Of_Exp_Stack.Sort is when Sem.Is_Object => if Dictionary.TypeIsArray (Top_Of_Exp_Stack.Type_Symbol) then if Top_Of_Exp_Stack.Param_Count < Natural'Last then New_Context_Type := Dictionary.GetArrayIndex (Top_Of_Exp_Stack.Type_Symbol, Top_Of_Exp_Stack.Param_Count + 1); else New_Context_Type := Dictionary.GetUnknownTypeMark; end if; else -- error - will be caught in up_wf_store New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end if; when others => -- Must be an error - this will be caught later on in the UP -- pass, but we need to push something so... New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); end case; when SP_Symbols.return_expression | SP_Symbols.predicate => -- ASSUME return_expression OR predicate -- Context for predicate and return_expression is always passed -- in from wf_predicate, or wf_function_constraint, so no change needed. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.proof_constant_declaration | SP_Symbols.ancestor_part | SP_Symbols.annotation_ancestor_part | SP_Symbols.positional_record_component_association | SP_Symbols.annotation_positional_record_component_association | SP_Symbols.named_record_component_association | SP_Symbols.annotation_named_record_component_association | SP_Symbols.unconstrained_array_assignment | SP_Symbols.priority_pragma => -- ASSUME Parent = proof_constant_declaration OR ancestor_part OR annotation_ancestor_part OR -- positional_record_component_association OR named_record_component_association OR -- annotation_positional_record_component_association OR annotation_named_record_component_association OR -- unconstrained_array_assignment OR priority_pragma -- In all other cases, the context is unchanged, but we push a copy -- of the current context type to keep the stack balanced. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when others => New_Context_Type := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = constant_declaration OR named_argument_association OR return_statement OR " & "proof_constant_declaration OR annotation_named_argument_association OR return_expression OR " & "positional_argument_association OR attribute_designator_opt OR primary OR " & "annotation_positional_argument_association OR annotation_attribute_designator_opt OR annotation_primary OR " & "aggregate_or_expression OR qualified_expression OR ancestor_part OR " & "annotation_aggregate_or_expression OR annotation_qualified_expression OR annotation_ancestor_part OR " & "positional_record_component_association OR named_record_component_association OR " & "annotation_positional_record_component_association OR annotation_named_record_component_association OR " & "variable_declaration OR unconstrained_array_assignment OR assignment_statement OR condition OR case_statement OR " & "priority_pragma OR delay_statement OR store OR store_list OR predicate in Expression_Type_From_Context"); end case; end Expression_Type_From_Context; spark-2012.0.deb/examiner/dict.smf0000644000175000017500000000254511753202337015712 0ustar eugeneugendictionary.adb -vcg dictionary-add_declaration.adb -vcg dictionary-add_generic_formal_parameter_local.adb -vcg dictionary-add_record_component.adb -vcg dictionary-add_record_subcomponent.adb -vcg dictionary-add_renaming_declaration.adb -vcg dictionary-add_subprogram_parameter.adb -vcg dictionary-add_use_type_reference.adb -vcg dictionary-addinheritsreference.adb -vcg dictionary-addloop.adb -vcg dictionary-addwithreference.adb -vcg dictionary-attribute_is_visible.adb -vcg dictionary-attribute_is_visible_but_obsolete_local.adb -vcg dictionary-dynamic_symbol_table.adb -vcg dictionary-generatesimplename.adb -vcg dictionary-get_binary_operator_type_local.adb -vcg dictionary-get_record_component.adb dictionary-get_scalar_attribute_type.adb -vcg dictionary-get_scalar_attribute_value.adb -vcg dictionary-getanyprefixneeded.adb -vcg dictionary-getscope.adb -vcg dictionary-initialize.adb -vcg dictionary-instantiate_subprogram_parameters.adb -vcg dictionary-is_callable.adb -vcg dictionary-is_renamed_local.adb -vcg dictionary-lookupitem.adb -vcg dictionary-lookupscope.adb -vcg dictionary-lookupselecteditem.adb -vcg dictionary-nextsymbol.adb -vcg dictionary-operator_is_visible.adb -vcg dictionary-rawdict.adb -vcg dictionary-search_for_inherited_operations.adb -vcg dictionary-targetdata.adb -vcg dictionary-write.adb -vcg dictionary-writeoperatorrenamingdeclaration.adb -vcg spark-2012.0.deb/examiner/spark_xml.adb0000644000175000017500000013450211753202336016726 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO, Ada.Characters.Latin_1; package body SPARK_XML is ------------------------------- -- General Utility functions -- ------------------------------- -- Simple constructor function to build an E_Strings.T (presently an ExaminerString) -- This is public because is makes definign schemas easier, without having to keep -- track of lots of temporary variable for all the strings. function X_Str (Str : in String) return E_Strings.T is begin return E_Strings.Copy_String (Str => Str); end X_Str; -- Cleans up a string, performing the following replacements -- Character Replacement -- ========= =========== -- < < -- > > -- & & -- ' ' -- " " -- These are fundamental characters in XML and cannot occur in -- character data (tag attributes, or character data. function Filter_String (Str : in E_Strings.T) return E_Strings.T is Old_String : E_Strings.T; New_String : E_Strings.T := E_Strings.Empty_String; Ch : Character; begin Old_String := Str; while (E_Strings.Get_Length (E_Str => Old_String) > 0) loop E_Strings.Pop_Char (E_Str => Old_String, Char => Ch); case Ch is when '<' => E_Strings.Append_String (E_Str => New_String, Str => "<"); when '>' => E_Strings.Append_String (E_Str => New_String, Str => ">"); when '&' => E_Strings.Append_String (E_Str => New_String, Str => "&"); when ''' => E_Strings.Append_String (E_Str => New_String, Str => "'"); when '"' => E_Strings.Append_String (E_Str => New_String, Str => """); when others => E_Strings.Append_Char (E_Str => New_String, Ch => Ch); end case; end loop; return New_String; end Filter_String; ------------------- -- Schema Access -- ------------------- function Get_Tag_Name (Schema : in Schema_Record; TID : in Tag_ID) return E_Strings.T is begin return Schema.Tags.Tag_Array (TID).Name; end Get_Tag_Name; function Get_Attribute_Name (Schema : in Schema_Record; AID : in Attribute_ID) return E_Strings.T is begin return (Schema.Attributes.Attribute_Array (AID).Name); end Get_Attribute_Name; function Get_Tag_Attributes (Schema : in Schema_Record; TID : in Tag_ID) return Tag_Attribute_Array is begin return Schema.Tags.Tag_Array (TID).Tag_Attributes; end Get_Tag_Attributes; function Get_Tag_Attribute (Schema : in Schema_Record; TID : in Tag_ID; TAID : in Tag_Attribute_Array_Index) return Attribute_ID is begin return Schema.Tags.Tag_Array (TID).Tag_Attributes (TAID); end Get_Tag_Attribute; function Find_Tag (Schema : in Schema_Record; Name : in E_Strings.T) return Tag_ID is Found : Tag_ID := Null_Tag; begin for I in Tag_ID loop if E_Strings.Eq_String (E_Str1 => Name, E_Str2 => Get_Tag_Name (Schema => Schema, TID => I)) then Found := I; exit; end if; end loop; return Found; end Find_Tag; function Get_Attribute_Type (Schema : in Schema_Record; AID : in Attribute_ID) return Attribute_Type is begin return Schema.Attributes.Attribute_Array (AID).Content_Type; end Get_Attribute_Type; function Attribute_Is_Required (Schema : in Schema_Record; AID : in Attribute_ID) return Boolean is begin return Schema.Attributes.Attribute_Array (AID).Required; end Attribute_Is_Required; function Is_Null_Attribute (AID : in Attribute_ID) return Boolean is begin return (AID = Null_Attribute); end Is_Null_Attribute; function Is_Null_Tag (TID : in Tag_ID) return Boolean is begin return (TID = Null_Tag); end Is_Null_Tag; function Get_Last_Child_Tag (Schema : in Schema_Record; TID : in Tag_ID) return Child_Tag_Array_Index is begin return Schema.Tags.Tag_Array (TID).Last_Child; end Get_Last_Child_Tag; function Get_Child_Tags (Schema : in Schema_Record; TID : in Tag_ID) return Child_Tag_Array is begin return Schema.Tags.Tag_Array (TID).Child_Tags; end Get_Child_Tags; function Get_Child_Tag (Schema : in Schema_Record; TID : in Tag_ID; CTID : in Child_Tag_Array_Index) return Child_Tag is begin return Schema.Tags.Tag_Array (TID).Child_Tags (CTID); end Get_Child_Tag; function Is_Legal_Child (Schema : in Schema_Record; Parent : in Tag_ID; Child : in Tag_ID) return Boolean is Found : Boolean := False; Children : Child_Tag_Array; Upper : Child_Tag_Array_Index; begin if not Is_Null_Tag (TID => Child) then -- The Null tag is never valid. Children := Get_Child_Tags (Schema => Schema, TID => Parent); Upper := Get_Last_Child_Tag (Schema => Schema, TID => Parent); for I in Child_Tag_Array_Index range Child_Tag_Array_Index'First .. Upper loop --# assert Upper = Upper% and Upper in Child_Tag_Array_Index; if Children (I).Child = Child then Found := True; exit; end if; end loop; end if; return Found; end Is_Legal_Child; --------------------- -- Schema creation -- --------------------- -- Initialise the schema variables. procedure Init_Schema (Schema : out Schema_Record) is begin Schema := Empty_Schema_Record; end Init_Schema; -- Add a tag to the schema procedure Add_Tag (Schema : in out Schema_Record; Name : in E_Strings.T; ID : out Tag_ID) is begin if (Schema.Tags.Last_Tag < Tag_ID'Last) then Schema.Tags.Last_Tag := Tag_ID'Succ (Schema.Tags.Last_Tag); Schema.Tags.Tag_Array (Schema.Tags.Last_Tag).Name := Name; ID := Schema.Tags.Last_Tag; else ID := Null_Tag; end if; end Add_Tag; procedure Add_Attribute_To_Tag (Schema : in out Schema_Record; TID : in Tag_ID; Name : in E_Strings.T; Content_Type : in Attribute_Type; Required : in Boolean; ID : out Attribute_ID; Success : out Boolean) is Temp_AID : Attribute_ID; -- Add an attribute to a tag procedure Add_Attribute (Schema : in out Schema_Record; Name : in E_Strings.T; Content_Type : in Attribute_Type; Required : in Boolean; ID : out Attribute_ID) --# derives ID from Schema & --# Schema from *, --# Content_Type, --# Name, --# Required; is begin if (Schema.Attributes.Last_Attribute < Attribute_ID'Last) then Schema.Attributes.Last_Attribute := Attribute_ID'Succ (Schema.Attributes.Last_Attribute); Schema.Attributes.Attribute_Array (Schema.Attributes.Last_Attribute) := Attribute'(Name => Name, Content_Type => Content_Type, Required => Required); ID := Schema.Attributes.Last_Attribute; else ID := Null_Attribute; -- Return the null attribute to indicate failure end if; end Add_Attribute; procedure Attach_Attribute (Schema : in out Schema_Record; AID : in Attribute_ID; TID : in Tag_ID; Success : out Boolean) --# derives Schema from *, --# AID, --# TID & --# Success from Schema, --# TID; is Tmp_Tag : Tag; begin Tmp_Tag := Schema.Tags.Tag_Array (TID); if (Tmp_Tag.Last_Tag_Attribute < Max_Attributes_Per_Tag) then Tmp_Tag.Tag_Attributes (Tmp_Tag.Last_Tag_Attribute) := AID; Tmp_Tag.Last_Tag_Attribute := Tag_Attribute_Array_Index'Succ (Tmp_Tag.Last_Tag_Attribute); Schema.Tags.Tag_Array (TID) := Tmp_Tag; Success := True; else Success := False; end if; end Attach_Attribute; begin Add_Attribute (Schema => Schema, Name => Name, Content_Type => Content_Type, Required => Required, ID => Temp_AID); if not Is_Null_Attribute (AID => Temp_AID) then Attach_Attribute (Schema => Schema, AID => Temp_AID, TID => TID, Success => Success); else -- Failed to add the attribute to the attribute database Success := False; end if; ID := Temp_AID; end Add_Attribute_To_Tag; procedure Add_Child_Tag (Schema : in out Schema_Record; TID : in Tag_ID; Child : in Tag_ID; Required : in Boolean; Success : out Boolean) is begin if (Schema.Tags.Tag_Array (TID).Last_Child < Child_Tag_Array_Index'Last) then Schema.Tags.Tag_Array (TID).Child_Tags (Schema.Tags.Tag_Array (TID).Last_Child) := Child_Tag'(Child => Child, Required => Required); Schema.Tags.Tag_Array (TID).Last_Child := Child_Tag_Array_Index'Succ (Schema.Tags.Tag_Array (TID).Last_Child); Success := True; else Success := False; end if; end Add_Child_Tag; procedure Add_CDATA (Schema : in out Schema_Record; TID : in Tag_ID) is begin Schema.Tags.Tag_Array (TID).Allow_CDATA := True; end Add_CDATA; function CDATA (Schema : in Schema_Record; TID : in Tag_ID) return Boolean is begin return Schema.Tags.Tag_Array (TID).Allow_CDATA; end CDATA; ----------------------------------- -- ScheamState access and update -- ----------------------------------- procedure Init_Schema_State (Schema_State : out Schema_State_Record) is begin Schema_State := Empty_Schema_State_Record; end Init_Schema_State; function Tag_Stack_Peek (Schema_State : in Schema_State_Record) return Tag_ID is begin return Schema_State.Tag_Stack.Stack (Schema_State.Tag_Stack.Current); end Tag_Stack_Peek; function Tag_Stack_Peek_N (Schema_State : in Schema_State_Record; N : in Tag_Depth) return Tag_ID is begin return Schema_State.Tag_Stack.Stack (N); end Tag_Stack_Peek_N; function Tag_Stack_Depth (Schema_State : in Schema_State_Record) return Tag_Depth is begin return Schema_State.Tag_Stack.Current; end Tag_Stack_Depth; function Tag_Stack_Empty (Schema_State : in Schema_State_Record) return Boolean is begin return (Tag_Stack_Depth (Schema_State => Schema_State) = Tag_Depth'First); end Tag_Stack_Empty; function Tag_Stack_Full (Schema_State : in Schema_State_Record) return Boolean is begin return (Tag_Stack_Depth (Schema_State => Schema_State) = Tag_Depth'Last); end Tag_Stack_Full; procedure Tag_Stack_Push (Schema_State : in out Schema_State_Record; Depth : out Tag_Depth; Status : out Schema_Status) --# derives Depth, --# Schema_State, --# Status from Schema_State; is Return_Depth : Tag_Depth := Tag_Depth'First; begin if Schema_State.Tag_Stack.Current = Tag_Depth'Last then -- Full Status := SS_Stack_Full; else Schema_State.Tag_Stack.Current := Tag_Depth'Succ (Schema_State.Tag_Stack.Current); Schema_State.Tag_Stack.Stack (Schema_State.Tag_Stack.Current) := Schema_State.Working_Tag.TID; Return_Depth := Schema_State.Tag_Stack.Current; Status := SS_OK; end if; Depth := Return_Depth; end Tag_Stack_Push; procedure Tag_Stack_Pop (Schema_State : in out Schema_State_Record; TID : out Tag_ID; Status : out Schema_Status) --# derives Schema_State, --# Status, --# TID from Schema_State; is begin if Schema_State.Tag_Stack.Current = Tag_Depth'First then -- Empty Status := SS_Stack_Empty; TID := Null_Tag; else TID := Tag_Stack_Peek (Schema_State => Schema_State); Schema_State.Tag_Stack.Current := Tag_Depth'Pred (Schema_State.Tag_Stack.Current); Status := SS_OK; end if; end Tag_Stack_Pop; function Get_Working_Attribute_Val (Schema_State : in Schema_State_Record; TAID : in Tag_Attribute_Array_Index) return E_Strings.T is begin return E_Strings.Trim (Schema_State.Working_Tag.Attribs (TAID).Val); end Get_Working_Attribute_Val; function Get_Working_Attribute_ID (Schema_State : in Schema_State_Record; TAID : in Tag_Attribute_Array_Index) return Attribute_ID is begin return Schema_State.Working_Tag.Attribs (TAID).AID; end Get_Working_Attribute_ID; procedure Set_Working_Attribute (Schema_State : in out Schema_State_Record; TAID : in Tag_Attribute_Array_Index; AID : in Attribute_ID; Val : in E_Strings.T) --# derives Schema_State from *, --# AID, --# TAID, --# Val; is begin Schema_State.Working_Tag.Attribs (TAID).AID := AID; Schema_State.Working_Tag.Attribs (TAID).Val := Val; end Set_Working_Attribute; function Tag_Stack_Hunt_Up (Schema_State : in Schema_State_Record; TID : in Tag_ID) return Tag_Depth is Location : Tag_Depth := Tag_Depth'First; Upper : Tag_Depth; begin Upper := Tag_Stack_Depth (Schema_State => Schema_State); for I in Tag_Depth range Tag_Depth'First .. Upper loop --# assert Upper = Upper% and Upper in Tag_Depth; if TID = Tag_Stack_Peek_N (Schema_State => Schema_State, N => I) then Location := I; exit; end if; end loop; return Location; end Tag_Stack_Hunt_Up; function Tag_Stack_Hunt_Down (Schema_State : in Schema_State_Record; TID : in Tag_ID) return Tag_Depth is Location : Tag_Depth := Tag_Depth'First; Upper : Tag_Depth; begin Upper := Tag_Stack_Depth (Schema_State => Schema_State); for I in reverse Tag_Depth range Tag_Depth'First .. Upper loop --# assert Upper = Upper% and Upper in Tag_Depth; if TID = Tag_Stack_Peek_N (Schema_State => Schema_State, N => I) then Location := I; exit; end if; end loop; return Location; end Tag_Stack_Hunt_Down; function Get_Req_Attributes (Schema : in Schema_Record; Schema_State : in Schema_State_Record) return Tag_Attribute_Array is Tag_Attribute_List : Tag_Attribute_Array; -- Tag Attributes that we have found. RAID : Tag_Attribute_Array := Tag_Attribute_Array'(others => Null_Attribute); Next_Slot : Tag_Attribute_Array_Index := Tag_Attribute_Array_Index'First; -- Pointer to end of the list. begin Tag_Attribute_List := Get_Tag_Attributes (Schema => Schema, TID => Schema_State.Working_Tag.TID); for Current_Attribute in Tag_Attribute_Array_Index loop --# assert Next_Slot >= Tag_Attribute_Array_Index'First and --# Next_Slot <= Current_Attribute; if Attribute_Is_Required (Schema => Schema, AID => Tag_Attribute_List (Current_Attribute)) then RAID (Next_Slot) := Tag_Attribute_List (Current_Attribute); -- If there is room for another, increment the Next_Slot. -- As both arrays are the same size, if this condition fails then the loop -- will exit. if Next_Slot < Tag_Attribute_Array_Index'Last then Next_Slot := Next_Slot + 1; end if; end if; end loop; return RAID; end Get_Req_Attributes; function Attribute_Is_Set (Schema_State : in Schema_State_Record; AID : in Attribute_ID) return Boolean is Success : Boolean := False; begin for I in Tag_Attribute_Array_Index loop if Schema_State.Working_Tag.Attribs (I).AID = AID then Success := True; exit; end if; end loop; return Success; end Attribute_Is_Set; function All_Required_Attributes (Schema : in Schema_Record; Schema_State : in Schema_State_Record) return Boolean is Required : Tag_Attribute_Array; Success : Boolean := True; begin Required := Get_Req_Attributes (Schema => Schema, Schema_State => Schema_State); for I in Tag_Attribute_Array_Index loop if not Attribute_Is_Set (Schema_State => Schema_State, AID => Required (I)) then Success := False; exit; end if; end loop; return Success; end All_Required_Attributes; function Valid_Working_Tag (Schema : in Schema_Record; Schema_State : in Schema_State_Record) return Boolean is begin return All_Required_Attributes (Schema => Schema, Schema_State => Schema_State) and (not (Schema_State.Working_Tag = Empty_Working_Tag)); end Valid_Working_Tag; ----------- -- Debug -- ----------- function Is_Error (Error : in Schema_Status) return Boolean is begin return not (Error = SS_OK); end Is_Error; procedure Print_Schema_Error (Error : in Schema_Status) is --# hide Print_Schema_Error; type Message_Array is array (Schema_Status) of String (1 .. 55); Messages : constant Message_Array := Message_Array' (SS_OK => "Schema state OK ", SS_Invalid_Attribute => "Invalid attribute for working tag ", SS_Invalid_Tag => "Invalid tag at this point ", SS_To_Many_Attributes => "Reached attribute limit ", SS_Wrong_Content_Type => "Attempt to assign value of incorrect type to attribute ", SS_Stack_Full => "The Schema Stack is full ", SS_Stack_Empty => "The Schema Stack is empty ", SS_Tag_Incomplete => "One or more required attribute is missing ", SS_Invalid_Depth => "There are no tags at this depth ", SS_No_Such_Tag => "The specified tag is not in the schema ", SS_Tag_Not_Found => "Could not find an instance of that tag in the hierarchy"); begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Messages (Error), 55); end Print_Schema_Error; procedure Print_Working_State (Schema : in Schema_Record; Schema_State : in Schema_State_Record) is --# hide Print_Working_State; procedure Print_Tag_Schema (Tag : in Tag_ID) is begin E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "<<<<<<<<<< Tag Schema >>>>>>>>>>")); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "Tag : ")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Get_Tag_Name (Schema => Schema, TID => Tag)); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " ")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "Attributes")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "----------")); for I in Tag_Attribute_Array_Index loop declare Tmp_Attribute_ID : constant Attribute_ID := Get_Tag_Attribute (Schema => Schema, TID => Tag, TAID => I); begin if Attribute_Is_Required (Schema => Schema, AID => Tmp_Attribute_ID) then E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " * ")); else E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " ")); end if; E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Get_Attribute_Name (Schema => Schema, AID => Tmp_Attribute_ID)); case Get_Attribute_Type (Schema => Schema, AID => Tmp_Attribute_ID) is when At_String => E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " STRING")); when At_Integer => E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " INTEGER")); when At_Float => E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " FLOAT")); when At_Null => E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "")); end case; end; end loop; E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "Child Tags")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "----------")); for I in Child_Tag_Array_Index loop declare Tmp_Child : constant Child_Tag := Get_Child_Tag (Schema => Schema, TID => Tag, CTID => I); begin if Tmp_Child.Required then E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " * ")); else E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " ")); end if; E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Get_Tag_Name (Schema => Schema, TID => Tmp_Child.Child)); end; end loop; end Print_Tag_Schema; procedure Print_Working_Tag_State is procedure Print_Working_Attribute (Attrib : in Tag_Attribute_Array_Index) is begin E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Get_Attribute_Name (Schema => Schema, AID => Schema_State.Working_Tag.Attribs (Attrib).AID)); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " = ")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Schema_State.Working_Tag.Attribs (Attrib).Val); end Print_Working_Attribute; begin E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "<<<<<<< Working_Tag State >>>>>>>")); for I in Tag_Attribute_Array_Index loop Print_Working_Attribute (Attrib => I); end loop; end Print_Working_Tag_State; procedure Print_Stack is begin E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "<<<<<<<<<< Tag Stack >>>>>>>>>>")); for I in Tag_Depth loop E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => Tag_Depth'Image (I))); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " ")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Get_Tag_Name (Schema => Schema, TID => Tag_Stack_Peek_N (Schema_State => Schema_State, N => I))); exit when I = Schema_State.Tag_Stack.Current; end loop; end Print_Stack; begin E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "================")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "START DEBUG DUMP ")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "================")); Print_Tag_Schema (Tag => Schema_State.Working_Tag.TID); Print_Tag_Schema (Tag => Tag_Stack_Peek (Schema_State => Schema_State)); Print_Working_Tag_State; Print_Stack; E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "================")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => " END DEBUG DUMP ")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => X_Str (Str => "================")); end Print_Working_State; ------------------ -- Tag Creation -- ------------------ -- This should only be used carefully. It opens a tag without first checking that -- it is a legal child of the presently open tag. This is for use when tags have to -- be generated out of order. procedure Init_Opening_Tag_No_Check (Schema_State : in out Schema_State_Record; TID : in Tag_ID; Status : out Schema_Status) is begin if Tag_Stack_Full (Schema_State => Schema_State) then -- Check that we can actually generate another tag Status := SS_Stack_Full; else if Is_Null_Tag (TID => TID) then Status := SS_No_Such_Tag; -- Null tag. else Schema_State.Working_Tag := Working_Tag_Type' (TID => TID, Attribs => Working_Attribute_Array'(others => Working_Attribute'(AID => Null_Attribute, Val => E_Strings.Empty_String))); Status := SS_OK; end if; end if; end Init_Opening_Tag_No_Check; -- Opening tags -- Initialise the opening tag, then add attributes to it. -- Then call Output_Opening_Tag to return the string. procedure Init_Opening_Tag_By_ID (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; TID : in Tag_ID; Status : out Schema_Status) is begin if Tag_Stack_Full (Schema_State => Schema_State) then -- Check that we can actually generate another tag Status := SS_Stack_Full; else if Is_Legal_Child (Schema => Schema, Parent => Tag_Stack_Peek (Schema_State => Schema_State), Child => TID) then Schema_State.Working_Tag := Working_Tag_Type' (TID => TID, Attribs => Working_Attribute_Array'(others => Working_Attribute'(AID => Null_Attribute, Val => E_Strings.Empty_String))); Status := SS_OK; else Schema_State.Working_Tag := Empty_Working_Tag; Status := SS_Invalid_Tag; end if; end if; end Init_Opening_Tag_By_ID; procedure Init_Opening_Tag (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Name : in E_Strings.T; Status : out Schema_Status) is begin Init_Opening_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => Find_Tag (Schema => Schema, Name => Name), Status => Status); end Init_Opening_Tag; procedure Find_Attribute (Schema : in Schema_Record; Tag_Ident : in Tag_ID; Name : in E_Strings.T; CType : in Attribute_Type; AID : out Attribute_ID; Status : out Schema_Status) --# derives AID, --# Status from CType, --# Name, --# Schema, --# Tag_Ident; is A_Array : Tag_Attribute_Array; Found : Attribute_ID := Null_Attribute; begin A_Array := Get_Tag_Attributes (Schema => Schema, TID => Tag_Ident); for I in Tag_Attribute_Array_Index loop if E_Strings.Eq_String (E_Str1 => Name, E_Str2 => Get_Attribute_Name (Schema => Schema, AID => A_Array (I))) then Found := A_Array (I); exit; end if; end loop; if Is_Null_Attribute (AID => Found) then Status := SS_Invalid_Attribute; AID := Null_Attribute; elsif not (Get_Attribute_Type (Schema => Schema, AID => Found) = CType) then Status := SS_Wrong_Content_Type; AID := Null_Attribute; else Status := SS_OK; AID := Found; end if; end Find_Attribute; procedure Add_Working_Attribute (Schema_State : in out Schema_State_Record; AID : in Attribute_ID; Value : in E_Strings.T; Status : out Schema_Status) --# derives Schema_State from *, --# AID, --# Value & --# Status from Schema_State; is Found : Tag_Attribute_Array_Index := Tag_Attribute_Array_Index'First; begin -- Find the next free slot for I in Tag_Attribute_Array_Index loop if Is_Null_Attribute (AID => Get_Working_Attribute_ID (Schema_State => Schema_State, TAID => I)) then Found := I; exit; end if; end loop; -- Add the attribute to the working tag if Is_Null_Attribute (AID => Get_Working_Attribute_ID (Schema_State => Schema_State, TAID => Found)) then Set_Working_Attribute (Schema_State, Found, AID, Filter_String (Str => Value)); Status := SS_OK; else -- We didn't find a free spot Status := SS_To_Many_Attributes; end if; end Add_Working_Attribute; procedure Add_Attribute_Str (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Name : in E_Strings.T; Value : in E_Strings.T; Status : out Schema_Status) is Tmp_Attribute : Attribute_ID; Tmp_Status : Schema_Status; begin -- Find the attribute in the schema Find_Attribute (Schema => Schema, Tag_Ident => Schema_State.Working_Tag.TID, Name => Name, CType => At_String, AID => Tmp_Attribute, Status => Tmp_Status); if not (Tmp_Status = SS_OK) then Status := Tmp_Status; else Add_Working_Attribute (Schema_State => Schema_State, AID => Tmp_Attribute, Value => Value, Status => Status); end if; end Add_Attribute_Str; -- Removes all spaces from an Examiner_String -- In XML, a non-string attribute cannot contain spaces. function Strip_String (Str : in E_Strings.T) return E_Strings.T is Ch : Character; Old_String : E_Strings.T; New_String : E_Strings.T := E_Strings.Empty_String; begin Old_String := Str; while (E_Strings.Get_Length (E_Str => Old_String) > 0) loop E_Strings.Pop_Char (E_Str => Old_String, Char => Ch); if not (Ch = ' ') then E_Strings.Append_Char (E_Str => New_String, Ch => Ch); end if; end loop; return New_String; end Strip_String; procedure Add_Attribute_Int (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Name : in E_Strings.T; Value : in Integer; Status : out Schema_Status) is Tmp_Attribute : Attribute_ID; Tmp_Status : Schema_Status; Tmp_String : E_Strings.T; begin -- Find the attribute in the schema Find_Attribute (Schema => Schema, Tag_Ident => Schema_State.Working_Tag.TID, Name => Name, CType => At_Integer, AID => Tmp_Attribute, Status => Tmp_Status); if not (Tmp_Status = SS_OK) then Status := Tmp_Status; else E_Strings.Put_Int_To_String (Dest => Tmp_String, Item => Value, Start_Pt => 1, Base => 10); Add_Working_Attribute (Schema_State => Schema_State, AID => Tmp_Attribute, Value => Strip_String (Str => Tmp_String), Status => Status); end if; end Add_Attribute_Int; function Output_Attributes (Schema : in Schema_Record; Schema_State : in Schema_State_Record) return E_Strings.T is Temp_String : E_Strings.T := E_Strings.Empty_String; begin for I in Tag_Attribute_Array_Index loop if not Is_Null_Attribute (AID => Get_Working_Attribute_ID (Schema_State => Schema_State, TAID => I)) then E_Strings.Append_String (E_Str => Temp_String, Str => " "); E_Strings.Append_Examiner_String (E_Str1 => Temp_String, E_Str2 => Get_Attribute_Name (Schema => Schema, AID => Schema_State.Working_Tag.Attribs (I).AID)); E_Strings.Append_String (E_Str => Temp_String, Str => "="""); E_Strings.Append_Examiner_String (E_Str1 => Temp_String, E_Str2 => Get_Working_Attribute_Val (Schema_State => Schema_State, TAID => I)); E_Strings.Append_String (E_Str => Temp_String, Str => """"); end if; exit when Is_Null_Attribute (AID => Get_Working_Attribute_ID (Schema_State => Schema_State, TAID => I)); end loop; return Temp_String; end Output_Attributes; procedure Output_Opening_Tag (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; XML : out E_Strings.T; Depth : out Tag_Depth; Status : out Schema_Status) is Temp_String : E_Strings.T := E_Strings.Empty_String; Indent_Temp_String : E_Strings.T := E_Strings.Empty_String; Tmp_Depth : Tag_Depth := Tag_Depth'First; Tmp_Status : Schema_Status; begin if Valid_Working_Tag (Schema => Schema, Schema_State => Schema_State) then -- Start with " Temp_String, Str => "<"); E_Strings.Append_Examiner_String (E_Str1 => Temp_String, E_Str2 => Get_Tag_Name (Schema => Schema, TID => Schema_State.Working_Tag.TID)); -- Add the attributes E_Strings.Append_Examiner_String (E_Str1 => Temp_String, E_Str2 => Output_Attributes (Schema => Schema, Schema_State => Schema_State)); -- End the opening Tag E_Strings.Append_String (E_Str => Temp_String, Str => ">"); -- Push the tag onto the top of the tagstack; Tag_Stack_Push (Schema_State => Schema_State, Depth => Tmp_Depth, Status => Tmp_Status); XML := E_Strings.Empty_String; if (Tmp_Status = SS_Stack_Full) then -- The stack is full Depth := Tmp_Depth; Status := Tmp_Status; else Status := SS_OK; Depth := Tmp_Depth; E_Strings.Append_Char (E_Str => Indent_Temp_String, Ch => Ada.Characters.Latin_1.LF); for I in Tag_Depth range 2 .. Tmp_Depth loop E_Strings.Append_Char (E_Str => Indent_Temp_String, Ch => ' '); end loop; E_Strings.Append_Examiner_String (E_Str1 => Indent_Temp_String, E_Str2 => Temp_String); XML := Indent_Temp_String; end if; else Status := SS_Tag_Incomplete; Depth := Tmp_Depth; XML := E_Strings.Empty_String; end if; end Output_Opening_Tag; function Closing_Tag_String (Schema : in Schema_Record; Schema_State : in Schema_State_Record; TID : in Tag_ID) return E_Strings.T is Tmp_String : E_Strings.T := E_Strings.Empty_String; begin E_Strings.Append_Char (E_Str => Tmp_String, Ch => Ada.Characters.Latin_1.LF); for I in Tag_Depth range 2 .. Tag_Stack_Depth (Schema_State => Schema_State) + 1 loop -- Put in some indentation E_Strings.Append_Char (E_Str => Tmp_String, Ch => ' '); end loop; E_Strings.Append_String (E_Str => Tmp_String, Str => " Tmp_String, E_Str2 => Get_Tag_Name (Schema => Schema, TID => TID)); E_Strings.Append_String (E_Str => Tmp_String, Str => ">"); return Tmp_String; end Closing_Tag_String; -- Closing tags procedure Close_Tag (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Depth : in Tag_Depth; XML : out E_Strings.T; Status : out Schema_Status) is Tmp_String : E_Strings.T := E_Strings.Empty_String; Closing_Tag : Tag_ID; Tmp_Status : Schema_Status := SS_OK; Closing_Tag_Str : E_Strings.T; begin if Tag_Stack_Empty (Schema_State => Schema_State) then -- Everything is already closed Status := SS_Stack_Empty; XML := Tmp_String; elsif (Depth > Tag_Stack_Depth (Schema_State => Schema_State)) -- Whoops, trying to close an unopened tag or (Depth = Tag_Depth'First) then -- or the empty tag. Status := SS_Invalid_Depth; XML := Tmp_String; else while (Depth <= Tag_Stack_Depth (Schema_State => Schema_State)) loop Tag_Stack_Pop (Schema_State => Schema_State, TID => Closing_Tag, Status => Tmp_Status); exit when not (Tmp_Status = SS_OK); Closing_Tag_Str := Closing_Tag_String (Schema => Schema, Schema_State => Schema_State, TID => Closing_Tag); E_Strings.Append_Examiner_String (E_Str1 => Tmp_String, E_Str2 => Closing_Tag_Str); end loop; Status := Tmp_Status; XML := Tmp_String; end if; end Close_Tag; -- Close the lowest tag in the stack that matches TID procedure Close_Tag_By_ID (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; TID : in Tag_ID; XML : out E_Strings.T; Status : out Schema_Status) is Depth : Tag_Depth; begin Depth := Tag_Stack_Hunt_Up (Schema_State => Schema_State, TID => TID); if Depth = Tag_Depth'First then -- Not found Status := SS_Tag_Not_Found; XML := E_Strings.Empty_String; else Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => Depth, XML => XML, Status => Status); end if; end Close_Tag_By_ID; -- Close the lowest tag in the stack that matches TID procedure Close_Top_Tag_By_ID (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; TID : in Tag_ID; XML : out E_Strings.T; Status : out Schema_Status) is Depth : Tag_Depth; begin Depth := Tag_Stack_Hunt_Down (Schema_State => Schema_State, TID => TID); if Depth = Tag_Depth'First then -- Not found Status := SS_Tag_Not_Found; XML := E_Strings.Empty_String; else Close_Tag (Schema => Schema, Schema_State => Schema_State, Depth => Depth, XML => XML, Status => Status); end if; end Close_Top_Tag_By_ID; -- Close the lowest tag in the stack that matches TID procedure Close_Tag_By_Name (Schema : in Schema_Record; Schema_State : in out Schema_State_Record; Name : in E_Strings.T; XML : out E_Strings.T; Status : out Schema_Status) is TID : Tag_ID; begin TID := Find_Tag (Schema => Schema, Name => Name); if Is_Null_Tag (TID => TID) then Status := SS_No_Such_Tag; XML := E_Strings.Empty_String; else Close_Tag_By_ID (Schema => Schema, Schema_State => Schema_State, TID => TID, XML => XML, Status => Status); end if; end Close_Tag_By_Name; end SPARK_XML; spark-2012.0.deb/examiner/sem-walk_expression_p-find_named_argument_association_parameter.adb0000644000175000017500000001073511753202336031665 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Find_Named_Argument_Association_Parameter (Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Name_Is_Parameter_Name : out Boolean; Param_Sym : out Dictionary.Symbol) is Ident_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; It : Dictionary.Iterator; Sym : Dictionary.Symbol; function Find_Identifier (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_argument_association; --# return Return_Node => STree.Syntax_Node_Type (Return_Node, STree.Table) = SP_Symbols.identifier; is Ident_Node : STree.SyntaxNode; begin Ident_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Ident_Node = annotation_named_argument_association OR annotation_simple_name OR -- named_argument_association OR simple_name if STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.simple_name or else STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.annotation_simple_name then -- ASSUME Ident_Node = annotation_simple_name OR simple_name Ident_Node := STree.Child_Node (Current_Node => Ident_Node); elsif STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.named_argument_association or else STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.annotation_named_argument_association then -- ASSUME Ident_Node = named_argument_association OR annotation_named_argument_association Ident_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Ident_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = annotation_named_argument_association OR annotation_simple_name OR " & "named_argument_association OR simple_name in Find_Identifier"); end if; -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Find_Identifier"); return Ident_Node; end Find_Identifier; begin -- Find_Named_Argument_Association_Parameter Ident_Node := Find_Identifier (Node => Node); Ident_Str := STree.Node_Lex_String (Node => Ident_Node); Name_Is_Parameter_Name := False; Param_Sym := Dictionary.NullSymbol; It := Dictionary.FirstSubprogramParameter (Subprog_Sym); while not Dictionary.IsNullIterator (It) loop --# assert STree.Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# STree.Table = STree.Table~; Sym := Dictionary.CurrentSymbol (It); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Sym), Lex_Str2 => Ident_Str) = LexTokenManager.Str_Eq then Name_Is_Parameter_Name := True; Param_Sym := Sym; STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); exit; end if; It := Dictionary.NextSymbol (It); end loop; end Find_Named_Argument_Association_Parameter; spark-2012.0.deb/examiner/statistics.adb0000644000175000017500000003211511753202336017115 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with ExaminerConstants; with SystemErrors; package body Statistics is subtype Percentage is Integer range 0 .. 100; subtype Index_19 is Integer range 1 .. 19; subtype String_19 is String (Index_19); subtype Bucket_Index is Integer range 0 .. 10; type Bucket_Array is array (Bucket_Index) of Natural; type Table_Data_Record is record Label : String_19; No_Of_Reports : Natural; Max_Size : Natural; -- Signifies a dynamic table when equal to 0 Min_Units_Used : Natural; Max_Units_Used : Natural; Buckets : Bucket_Array; Percent_Used : Percentage; end record; type Table_Usage_Array is array (Table_Type) of Table_Data_Record; -- Initialize data for each of the tables TableUsage : Table_Usage_Array := Table_Usage_Array' (RelationTable => Table_Data_Record'(Label => String_19'("Relation Table "), No_Of_Reports => 0, Max_Size => 0, Min_Units_Used => 0, Max_Units_Used => 0, Buckets => Bucket_Array'(others => 0), Percent_Used => 0), VCGHeap => Table_Data_Record'(Label => String_19'("VCG Heap "), No_Of_Reports => 0, Max_Size => 0, Min_Units_Used => 0, Max_Units_Used => 0, Buckets => Bucket_Array'(others => 0), Percent_Used => 0), StringTable => Table_Data_Record'(Label => String_19'("String Table "), No_Of_Reports => 0, Max_Size => ExaminerConstants.String_Table_Size, Min_Units_Used => 0, Max_Units_Used => 0, Buckets => Bucket_Array'(others => 0), Percent_Used => 0), SymbolTable => Table_Data_Record'(Label => String_19'("Symbol Table "), No_Of_Reports => 0, Max_Size => 0, Min_Units_Used => 0, Max_Units_Used => 0, Buckets => Bucket_Array'(others => 0), Percent_Used => 0), SyntaxTree => Table_Data_Record'(Label => String_19'("Syntax Tree "), No_Of_Reports => 0, Max_Size => ExaminerConstants.SyntaxTreeSize, Min_Units_Used => 0, Max_Units_Used => 0, Buckets => Bucket_Array'(others => 0), Percent_Used => 0), RecordFields => Table_Data_Record'(Label => String_19'("Record components "), No_Of_Reports => 0, Max_Size => ExaminerConstants.MaxRecordComponents, Min_Units_Used => 0, Max_Units_Used => 0, Buckets => Bucket_Array'(others => 0), Percent_Used => 0), RecordErrors => Table_Data_Record'(Label => String_19'("Record errors "), No_Of_Reports => 0, Max_Size => ExaminerConstants.MaxRecordErrors, Min_Units_Used => 0, Max_Units_Used => 0, Buckets => Bucket_Array'(others => 0), Percent_Used => 0)); function Calc_Percent (Size : in Natural; Max_Size : in Natural) return Percentage --# pre Max_Size >= Size and --# Max_Size /= 0; is T : Long_Long_Integer; R : Percentage; begin T := (Long_Long_Integer (Size) * 100) / Long_Long_Integer (Max_Size); -- Clip just to be on the safe side if T < 0 then R := 0; elsif T > 100 then R := 100; else R := Percentage (T); end if; return R; end Calc_Percent; procedure SetTableUsage (Table : in Table_Type; Size : in Natural) is Max_Size : Integer; begin Max_Size := TableUsage (Table).Max_Size; -- Is the value of "Size" silly (more than maximum)? if Max_Size > 0 and then Size > Max_Size then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Statistics_Usage_Greater_Than_Table_Size, Msg => "in Statistics.SetTableUsage"); -- Is more of the table being used than previously recorded? else if Size > TableUsage (Table).Max_Units_Used then -- Update number of units being used in table, and calculate -- percentage of table in use. TableUsage (Table).Max_Units_Used := Size; if Max_Size /= 0 then TableUsage (Table).Percent_Used := Calc_Percent (Size, Max_Size); end if; end if; if TableUsage (Table).Min_Units_Used = 0 or else Size < TableUsage (Table).Min_Units_Used then TableUsage (Table).Min_Units_Used := Size; end if; -- Add one to the appropriate size bucket -- In the highly unlikely event that a bucket becomes fulll -- no more is added. if Size >= 2 ** (10 + (Bucket_Index'Last - 1)) and then TableUsage (Table).Buckets (Bucket_Index'Last) < Integer'Last then TableUsage (Table).Buckets (Bucket_Index'Last) := TableUsage (Table).Buckets (Bucket_Index'Last) + 1; else for I in Bucket_Index range Bucket_Index'First .. Bucket_Index'Last - 1 loop if Size < 2 ** (10 + I) and then TableUsage (Table).Buckets (I) < Integer'Last then TableUsage (Table).Buckets (I) := TableUsage (Table).Buckets (I) + 1; exit; end if; end loop; end if; end if; end SetTableUsage; procedure WriteOutput (File : in SPARK_IO.File_Type) is Column2Posn : constant Integer := 20; Column3Posn : constant Integer := 30; Column4Posn : constant Integer := 40; Column5Posn : constant Integer := 50; LastColumn : constant Integer := 60; begin SPARK_IO.New_Line (File => File, Spacing => 2); SPARK_IO.Put_Line (File => File, Item => "Resource statistics", Stop => 0); SPARK_IO.New_Line (File => File, Spacing => 1); -- Write header lines SPARK_IO.Set_Col (File => File, Posn => Column3Posn); SPARK_IO.Put_Line (File => File, Item => "Units Used", Stop => 0); SPARK_IO.Put_String (File => File, Item => "Table", Stop => 0); SPARK_IO.Set_Col (File => File, Posn => Column3Posn - 5); SPARK_IO.Put_String (File => File, Item => "Min", Stop => 0); SPARK_IO.Set_Col (File => File, Posn => Column4Posn - 5); SPARK_IO.Put_String (File => File, Item => "Max", Stop => 0); SPARK_IO.Set_Col (File => File, Posn => Column5Posn - 10); SPARK_IO.Put_String (File => File, Item => " Max Size", Stop => 0); SPARK_IO.Set_Col (File => File, Posn => Column5Posn); SPARK_IO.Put_String (File => File, Item => " % used", Stop => 0); -- Write a line for each of the tables for Table in Table_Type loop SPARK_IO.New_Line (File => File, Spacing => 1); SPARK_IO.Put_String (File => File, Item => TableUsage (Table).Label, Stop => 0); SPARK_IO.Set_Col (File => File, Posn => Column2Posn); SPARK_IO.Put_Integer (File => File, Item => TableUsage (Table).Min_Units_Used, Width => Column3Posn - Column2Posn, Base => 10); SPARK_IO.Set_Col (File => File, Posn => Column3Posn); SPARK_IO.Put_Integer (File => File, Item => TableUsage (Table).Max_Units_Used, Width => Column4Posn - Column3Posn, Base => 10); SPARK_IO.Set_Col (File => File, Posn => Column4Posn); if TableUsage (Table).Max_Size /= 0 then SPARK_IO.Put_Integer (File => File, Item => TableUsage (Table).Max_Size, Width => Column5Posn - Column4Posn, Base => 10); SPARK_IO.Set_Col (File => File, Posn => Column5Posn); SPARK_IO.Put_Integer (File => File, Item => TableUsage (Table).Percent_Used, Width => LastColumn - Column5Posn, Base => 10); else SPARK_IO.Put_String (File => File, Item => " Dynamic", Stop => 0); SPARK_IO.Set_Col (File => File, Posn => Column5Posn); SPARK_IO.Put_String (File => File, Item => " Dynamic", Stop => 0); end if; end loop; SPARK_IO.New_Line (File => File, Spacing => 1); if CommandLineData.Content.Debug.Extra_Stats then -- Write out histogram SPARK_IO.New_Line (File => File, Spacing => 1); SPARK_IO.Put_Line (File => File, Item => "Histogram", Stop => 0); SPARK_IO.Put_String (File => File, Item => "Table ", Stop => 0); -- Note use '^' here rather than "**" to save horizontal space so the -- whole histogram fits in 80 characters. SPARK_IO.Put_String (File => File, Item => "2^10 2^11 2^12 2^13 2^14 2^15 2^16 2^17 2^18 2^19 2^20", Stop => 0); SPARK_IO.New_Line (File => File, Spacing => 1); for Table in Table_Type loop SPARK_IO.Put_String (File => File, Item => TableUsage (Table).Label, Stop => 0); for I in Bucket_Index loop SPARK_IO.Set_Col (File => File, Posn => (Index_19'Last + 3) + (5 * I)); SPARK_IO.Put_Integer (File => File, Item => TableUsage (Table).Buckets (I), Width => 0, Base => 10); end loop; SPARK_IO.New_Line (File => File, Spacing => 1); end loop; end if; end WriteOutput; end Statistics; spark-2012.0.deb/examiner/sem-walk_expression_p-range_constraint_type_from_context.adb0000644000175000017500000000720011753202336030424 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Range_Constraint_Type_From_Context (Exp_Node : STree.SyntaxNode; E_Stack : Exp_Stack.Exp_Stack_Type; T_Stack : Type_Context_Stack.T_Stack_Type) return Dictionary.Symbol is New_Context_Type : Dictionary.Symbol; Parent : STree.SyntaxNode; begin Parent := STree.Parent_Node (Current_Node => Exp_Node); -- ASSUME Parent = integer_type_definition OR floating_point_constraint OR -- fixed_point_constraint OR derived_type_definition OR -- constraint OR case_choice OR -- aggregate_choice OR annotation_aggregate_choice case STree.Syntax_Node_Type (Node => Parent) is when SP_Symbols.case_choice => -- ASSUME Parent = case_choice -- In case_choice, the expected subtype is passed in from -- wf_case_choice, so no change here. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.aggregate_choice | SP_Symbols.annotation_aggregate_choice => -- ASSUME Parent = aggregate_choice OR annotation_aggregate_choice -- In *_aggregate_choice, the range constraint is always preceeded by -- a simple_expression which denotes the subtype be to constrained, which -- must be on the top of the Exp_Stack. That's the type needed for the new context. New_Context_Type := Exp_Stack.Top (Stack => E_Stack).Type_Symbol; when SP_Symbols.constraint | SP_Symbols.integer_type_definition | SP_Symbols.derived_type_definition | SP_Symbols.floating_point_constraint | SP_Symbols.fixed_point_constraint => -- ASSUME Parent = constraint OR integer_type_definition OR floating_point_constraint OR -- fixed_point_constraint OR derived_type_definition -- In these cases, the context has been supplied by whoever called WalkExpression, -- so is simply preserved. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when others => -- Must be an error - this will be caught later on in the UP -- pass, but we need to push something so... New_Context_Type := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = integer_type_definition OR floating_point_constraint OR fixed_point_constraint OR " & "derived_type_definition OR constraint OR case_choice OR " & "aggregate_choice OR annotation_aggregate_choice in Range_Constraint_Type_From_Context"); end case; return New_Context_Type; end Range_Constraint_Type_From_Context; spark-2012.0.deb/examiner/vcg-producevcs.adb0000644000175000017500000004076311753202337017666 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with DAG; with Debug; with Pile; separate (VCG) procedure ProduceVCs (VCG_Heap : in out Cells.Heap_Record; Start_Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; VCG_Output_File : in SPARK_IO.File_Type; DPC_Output_File : in SPARK_IO.File_Type; Output_Filename : in E_Strings.T; End_Position : in LexTokenManager.Token_Position; Flow_Heap : in out Heap.HeapRecord; Semantic_Error_In_Subprogram : in Boolean; Data_Flow_Error_In_Subprogram : in Boolean; Type_Check_Exports : in Boolean) is VCG_Failure : Boolean; Verbose_Echo : Boolean; procedure Put_Line (S : in String) --# global in Verbose_Echo; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# S, --# Verbose_Echo; is begin if Verbose_Echo then ScreenEcho.Put_Line (S); end if; end Put_Line; procedure New_Line --# global in Verbose_Echo; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Verbose_Echo; is begin if Verbose_Echo then ScreenEcho.New_Line (1); end if; end New_Line; procedure Dump_Used_Symbols (S : in String) --# derives null from S; is --# hide Dump_Used_Symbols; Iterator : Declarations.UsedSymbolIterator; CN : Cells.Cell; CS : Dictionary.Symbol; CDAG : Cells.Cell; begin Debug.PrintMsg (S, True); Declarations.Initialize (Iterator); while not Declarations.IsNullIterator (Iterator) loop CN := Declarations.CurrentNode (Iterator); CS := Cells.Get_Symbol_Value (VCG_Heap, CN); Debug.Print_Sym (Msg => "Symbol: ", Sym => CS); if Dictionary.Is_Constant (CS) then Debug.PrintMsg ("is a constant and ", False); if Dictionary.IsPrivateType (Dictionary.GetType (CS), Scope) then Debug.PrintMsg ("is a private type in this scope", True); else if Dictionary.TypeIsScalar (Dictionary.GetType (CS)) then Debug.PrintMsg ("is a scalar in this scope", True); elsif Dictionary.TypeIsArray (Dictionary.GetType (CS)) then Debug.PrintMsg ("is an array in this scope", True); CDAG := Pile.DAG (VCG_Heap, CN); if Cells.Is_Null_Cell (CDAG) then Debug.PrintMsg ("and its DAG is Null", True); else Debug.PrintDAG ("and its DAG is ", CDAG, VCG_Heap, Scope); end if; elsif Dictionary.TypeIsRecord (Dictionary.GetType (CS)) then Debug.PrintMsg ("is an array in this scope", True); CDAG := Pile.DAG (VCG_Heap, CN); if Cells.Is_Null_Cell (CDAG) then Debug.PrintMsg ("and its DAG is Null", True); else Debug.PrintDAG ("and its DAG is ", CDAG, VCG_Heap, Scope); end if; else Debug.PrintMsg ("is OTHER in this scope", True); end if; end if; else Debug.PrintMsg ("is not a constant", True); end if; Iterator := Declarations.NextNode (VCG_Heap, Iterator); end loop; Debug.PrintMsg ("---End---", True); end Dump_Used_Symbols; pragma Unreferenced (Dump_Used_Symbols); procedure Process_Composite_Constants --# global in CommandLineData.Content; --# in End_Position; --# in Scope; --# in STree.Table; --# in out Declarations.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Flow_Heap; --# in out Graph.Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCG_Heap; --# derives Declarations.State, --# Dictionary.Dict, --# Flow_Heap, --# Graph.Table, --# LexTokenManager.State, --# Statistics.TableUsage, --# StmtStack.S, --# VCG_Heap from *, --# CommandLineData.Content, --# Declarations.State, --# Dictionary.Dict, --# Flow_Heap, --# Graph.Table, --# LexTokenManager.State, --# Scope, --# StmtStack.S, --# STree.Table, --# VCG_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Declarations.State, --# Dictionary.Dict, --# End_Position, --# ErrorHandler.Error_Context, --# Flow_Heap, --# Graph.Table, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# StmtStack.S, --# STree.Table, --# VCG_Heap; is Iterator : Declarations.UsedSymbolIterator; CN : Cells.Cell; CS : Dictionary.Symbol; CT : Dictionary.Symbol; CDAG : Cells.Cell; Change_Made : Boolean; Exp_Node : STree.SyntaxNode; function Rule_Is_Required (CS : Dictionary.Symbol) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Scope; is begin return CommandLineData.Content.Constant_Rules = CommandLineData.All_Rules or else (CommandLineData.Content.Constant_Rules = CommandLineData.Keen and then not (Dictionary.GetConstantRulePolicy (CS, Scope) = Dictionary.No_Rule_Requested)) or else (CommandLineData.Content.Constant_Rules = CommandLineData.Lazy and then Dictionary.GetConstantRulePolicy (CS, Scope) = Dictionary.Rule_Requested); end Rule_Is_Required; procedure Raise_Warnings --# global in CommandLineData.Content; --# in Declarations.State; --# in Dictionary.Dict; --# in End_Position; --# in LexTokenManager.State; --# in Scope; --# in VCG_Heap; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Declarations.State, --# Dictionary.Dict, --# End_Position, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# VCG_Heap; is Iterator : Declarations.UsedSymbolIterator; CN : Cells.Cell; CS : Dictionary.Symbol; CT : Dictionary.Symbol; CDAG : Cells.Cell; begin Declarations.Initialize (Iterator); while not Declarations.IsNullIterator (Iterator) loop CN := Declarations.CurrentNode (Iterator); CS := Cells.Get_Symbol_Value (VCG_Heap, CN); CT := Dictionary.GetType (CS); -- We're interested in visible (not private), composite constants... if Dictionary.Is_Constant (CS) and then not Dictionary.IsPrivateType (CT, Scope) and then not Dictionary.IsGenericParameterSymbol (CS) and then (Dictionary.TypeIsArray (CT) or else Dictionary.TypeIsRecord (CT)) and then Rule_Is_Required (CS => CS) then CDAG := Pile.DAG (VCG_Heap, CN); if Cells.Is_Null_Cell (CDAG) then if Dictionary.ConstantExpIsWellformed (CS) then if Dictionary.TypeIsArray (CT) and then Dictionary.GetNumberOfDimensions (CT) > 1 then -- At present, FDL cannot represent multi-dimensional array aggregates, -- so we simply issue a warning and continue. The warning -- appears at the end of the listing for the subprogram that -- we are generating VCs for. ErrorHandler.Semantic_Warning_Sym (Err_Num => 312, Position => End_Position, Sym => CS, Scope => Dictionary.GetScope (CS)); end if; else -- Cannot produce rule due to semantic errors in Expression -- we simply issue a warning and continue. The warning -- appears at the end of the listing for the subprogram that -- we are generating VCs for. ErrorHandler.Semantic_Warning_Sym (Err_Num => 313, Position => End_Position, Sym => CS, Scope => Dictionary.GetScope (CS)); end if; end if; end if; Iterator := Declarations.NextNode (VCG_Heap, Iterator); end loop; end Raise_Warnings; begin loop Change_Made := False; Declarations.Initialize (Iterator); while not Declarations.IsNullIterator (Iterator) loop CN := Declarations.CurrentNode (Iterator); CS := Cells.Get_Symbol_Value (VCG_Heap, CN); CT := Dictionary.GetType (CS); -- We're interested in visible (not private), composite constants... if Dictionary.Is_Constant (CS) and then not Dictionary.IsPrivateType (CT, Scope) and then not Dictionary.IsGenericParameterSymbol (CS) and then (Dictionary.TypeIsArray (CT) or else Dictionary.TypeIsRecord (CT)) and then Rule_Is_Required (CS => CS) then CDAG := Pile.DAG (VCG_Heap, CN); if Cells.Is_Null_Cell (CDAG) then if Dictionary.ConstantExpIsWellformed (CS) then Exp_Node := STree.RefToNode (Dictionary.GetConstantExpNode (CS)); if (not Dictionary.TypeIsArray (CT)) or else Dictionary.GetNumberOfDimensions (CT) = 1 then -- Build the DAG for this initializing expression, and store the -- resulting root Cell in the Pile Node for this constant. -- The initializing expression must be evaluation in the Scope where -- it is declared. DAG.BuildConstantInitializationDAG (Exp_Node, Dictionary.GetScope (CS), VCG_Heap, Flow_Heap, CDAG); Pile.SetDAG (VCG_Heap, CN, CDAG); -- This newly generated DAG might contain references to other -- constants and so on which need FDL declarations and Rules, so... Declarations.Find_DAG_Declarations (Heap => VCG_Heap, Root => CDAG); -- ...that might have changed the state of the Declarations package, -- which we are currrently iterating over. This means our Iterator -- is no longer valid, so we have to give up here and start a new -- pass. Change_Made := True; end if; end if; end if; end if; exit when Change_Made; Iterator := Declarations.NextNode (VCG_Heap, Iterator); end loop; -- No changes at all made - that means we must have processed all the constants, -- so we can terminate. exit when not Change_Made; end loop; Raise_Warnings; end Process_Composite_Constants; begin -- ProduceVCs; Verbose_Echo := CommandLineData.Content.Echo and not CommandLineData.Content.Brief; New_Line; Put_Line (S => " Building model of subprogram ..."); Graph.Reinitialize_Graph; VCG_Failure := False; DAG.BuildGraph (Start_Node, Subprog_Sym, Scope, VCG_Output_File, End_Position, VCG_Failure, VCG_Heap, Flow_Heap, Semantic_Error_In_Subprogram, Data_Flow_Error_In_Subprogram, Type_Check_Exports); if VCG_Failure then ErrorHandler.Semantic_Error (Err_Num => 962, Reference => 0, Position => End_Position, Id_Str => LexTokenManager.Null_String); else if CommandLineData.Content.Debug.VCG then Debug.PrintMsg ("----------- Dump of VCG State after DAG.BuildGraph ---------------", True); Graph.Dump_Graph_Table (VCG_Heap, Scope, Graph.PFs); Graph.Dump_Graph_Dot (VCG_Heap, Output_Filename, 0, Scope, Graph.PFs); Debug.PrintMsg ("------------------------------------------------------------------", True); end if; New_Line; Put_Line (S => " Generating VCs ..."); Graph.Gen_VCs (Heap => VCG_Heap, Output_File => VCG_Output_File, Output_File_Name => Output_Filename, Scope => Scope, Gen_VC_Failure => VCG_Failure); if VCG_Failure then New_Line; ErrorHandler.Semantic_Error (Err_Num => 962, Reference => 0, Position => End_Position, Id_Str => LexTokenManager.Null_String); else if CommandLineData.Content.VCG then New_Line; Put_Line (S => " Writing VCs ..."); Graph.Print_VCs_Or_DPCs (VCG_Heap, VCG_Output_File, Scope, Graph.VCs); end if; if CommandLineData.Content.DPC then New_Line; Put_Line (S => " Writing DPCs ..."); Graph.Print_VCs_Or_DPCs (VCG_Heap, DPC_Output_File, Scope, Graph.DPCs); end if; -- We also need to generate DAGs and Declarations for the -- initializing expressions of any composite constants that -- have been referenced in the VCs printed above. if CommandLineData.Content.Constant_Rules /= CommandLineData.No_Rules then Process_Composite_Constants; end if; end if; end if; end ProduceVCs; spark-2012.0.deb/examiner/clists.ads0000644000175000017500000000643211753202335016247 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; --# inherit Cells, --# Statistics; package Clists is function FirstCell (Heap : Cells.Heap_Record; ListName : Cells.Cell) return Cells.Cell; function LastCell (Heap : Cells.Heap_Record; ListName : Cells.Cell) return Cells.Cell; function NextCell (Heap : Cells.Heap_Record; CellName : Cells.Cell) return Cells.Cell; function IsEmptyList (Heap : Cells.Heap_Record; ListName : Cells.Cell) return Boolean; procedure AppendCell (Heap : in out Cells.Heap_Record; CellName : in Cells.Cell; ListName : in Cells.Cell); --# derives Heap from *, --# CellName, --# ListName; procedure InsertCell (Heap : in out Cells.Heap_Record; CellName : in Cells.Cell; ListName : in Cells.Cell); --# derives Heap from *, --# CellName, --# ListName; procedure Concatenate (Heap : in out Cells.Heap_Record; List_1 : in Cells.Cell; List_2 : in Cells.Cell); --# derives Heap from *, --# List_1, --# List_2; -- transfer cells from List_1 to List_2; procedure TransferCells (Heap : in out Cells.Heap_Record; List_1 : in Cells.Cell; List_2 : in Cells.Cell); --# derives Heap from *, --# List_1, --# List_2; procedure CreateList (Heap : in out Cells.Heap_Record; ListName : out Cells.Cell); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap & --# ListName from Heap; procedure DisposeOfList (Heap : in out Cells.Heap_Record; ListName : in Cells.Cell); --# derives Heap from *, --# ListName; procedure RemoveLeader (Heap : in out Cells.Heap_Record; ListName : in Cells.Cell); --# derives Heap from *, --# ListName; end Clists; spark-2012.0.deb/examiner/sem-add_derives_stream_effects.adb0000644000175000017500000000675611753202336023044 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Add_Derives_Stream_Effects (Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions) is Export_It, Import_It : Dictionary.Iterator; Export_Sym, Import_Sym : Dictionary.Symbol; begin -- first scan exports for reference to out streams Export_It := Dictionary.FirstExport (Abstraction, Subprog_Sym); while not Dictionary.IsNullIterator (Export_It) loop Export_Sym := Dictionary.CurrentSymbol (Export_It); -- writes to an out streams are also implicit imports if Dictionary.GetOwnVariableOrConstituentMode (Export_Sym) = Dictionary.OutMode then Dictionary.AddDependency (Abstraction => Abstraction, Comp_Unit => ContextManager.Ops.Current_Unit, TheProcedure => Subprog_Sym, TheExport => Export_Sym, TheImport => Export_Sym, ImportReference => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos)); end if; Export_It := Dictionary.NextSymbol (Export_It); end loop; -- now scan imports for reads of in streams Import_It := Dictionary.FirstImport (Abstraction, Subprog_Sym); while not Dictionary.IsNullIterator (Import_It) loop Import_Sym := Dictionary.CurrentSymbol (Import_It); -- reads of in streams imply an update as well if Dictionary.GetOwnVariableOrConstituentMode (Import_Sym) = Dictionary.InMode then Dictionary.AddExport (Abstraction => Abstraction, TheProcedure => Subprog_Sym, TheExport => Import_Sym, ExportReference => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos), Annotation => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos)); Dictionary.AddDependency (Abstraction => Abstraction, Comp_Unit => ContextManager.Ops.Current_Unit, TheProcedure => Subprog_Sym, TheExport => Import_Sym, TheImport => Import_Sym, ImportReference => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos)); end if; Import_It := Dictionary.NextSymbol (Import_It); end loop; end Add_Derives_Stream_Effects; spark-2012.0.deb/examiner/sem-wf_argument_association-tagged_actual_must_be_object_check.adb0000644000175000017500000000415211753202336031402 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Argument_Association) procedure Tagged_Actual_Must_Be_Object_Check (Node_Pos : in LexTokenManager.Token_Position; Formal_Type : in Dictionary.Symbol; Actual_Type : in Dictionary.Symbol; Controlling_Type : in Dictionary.Symbol; Is_A_Variable : in Boolean; Is_A_Constant : in Boolean; Error_Found : in out Boolean) is begin if not Dictionary.Is_Null_Symbol (Controlling_Type) and then Dictionary.Types_Are_Equal (Left_Symbol => Formal_Type, Right_Symbol => Controlling_Type, Full_Range_Subtype => False) and then not Dictionary.Types_Are_Equal (Left_Symbol => Actual_Type, Right_Symbol => Controlling_Type, Full_Range_Subtype => False) and then -- but must be an extension of it (earlier check) not (Is_A_Variable or else Is_A_Constant) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 827, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end Tagged_Actual_Must_Be_Object_Check; spark-2012.0.deb/examiner/sem-dependency_relation-create_full_dependency.adb0000644000175000017500000001250111753202336026176 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Dependency_Relation) procedure Create_Full_Dependency (Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Import_List, Export_List : in SeqAlgebra.Seq; The_Heap : in Heap.HeapRecord) is First_Valid_Export : Dictionary.Symbol; Member : SeqAlgebra.MemberOfSeq; Import : Dictionary.Symbol; ----------------------- function Get_First_Valid_Export (Export_List : SeqAlgebra.Seq; The_Heap : Heap.HeapRecord) return Dictionary.Symbol is Member : SeqAlgebra.MemberOfSeq; Sym : Dictionary.Symbol; begin Member := SeqAlgebra.FirstMember (The_Heap, Export_List); if SeqAlgebra.IsNullMember (Member) then Sym := Dictionary.NullSymbol; else Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member))); end if; return Sym; end Get_First_Valid_Export; ----------------------------------------------------------------------- procedure Copy_Imports (Abstraction : in Dictionary.Abstractions; Export_List : in SeqAlgebra.Seq; First_Valid_Export : in Dictionary.Symbol; Subprog_Sym : in Dictionary.Symbol; The_Heap : in Heap.HeapRecord) --# global in out Dictionary.Dict; --# derives Dictionary.Dict from *, --# Abstraction, --# Export_List, --# First_Valid_Export, --# Subprog_Sym, --# The_Heap; is Member : SeqAlgebra.MemberOfSeq; begin Member := SeqAlgebra.FirstMember (The_Heap, Export_List); if not SeqAlgebra.IsNullMember (Member) then -- there is at leat one valid export, we want to loop through rest loop Member := SeqAlgebra.NextMember (The_Heap, Member); exit when SeqAlgebra.IsNullMember (Member); Dictionary.CopyDependencyList (Abstraction, Subprog_Sym, First_Valid_Export, Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member)))); end loop; end if; end Copy_Imports; begin -- Create_Full_Dependency First_Valid_Export := Get_First_Valid_Export (Export_List => Export_List, The_Heap => The_Heap); Member := SeqAlgebra.FirstMember (The_Heap, Import_List); while not SeqAlgebra.IsNullMember (Member) loop Import := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Member))); --# accept Flow, 41, "Expected stable expression"; if not Dictionary.Is_Null_Symbol (First_Valid_Export) then --# end accept; Dictionary.AddDependency (Abstraction => Abstraction, Comp_Unit => ContextManager.Ops.Current_Unit, TheProcedure => Subprog_Sym, TheExport => First_Valid_Export, TheImport => Import, ImportReference => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos)); else -- no valid export to associate with but mark to avoid knock-on errors Dictionary.ForceImport (Abstraction, Subprog_Sym, Import, Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos)); end if; Member := SeqAlgebra.NextMember (The_Heap, Member); end loop; Copy_Imports (Abstraction => Abstraction, Export_List => Export_List, First_Valid_Export => First_Valid_Export, Subprog_Sym => Subprog_Sym, The_Heap => The_Heap); end Create_Full_Dependency; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_selected_component.adb0000644000175000017500000003553111753202336025612 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Walk_Expression_P) procedure Wf_Selected_Component (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ref_Var : in SeqAlgebra.Seq; E_Stack : in out Exp_Stack.Exp_Stack_Type; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Context : in Sem.Tilde_Context) is Sym : Dictionary.Symbol; Sym2 : Dictionary.Symbol; Selector_Node : STree.SyntaxNode; Selector : LexTokenManager.Lex_String; Node_Pos, Select_Pos : LexTokenManager.Token_Position; Type_Info : Sem.Exp_Record; Prefix_OK : Boolean; Ident_Context : Dictionary.Contexts; begin Node_Pos := STree.Node_Position (Node => Node); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_selected_component then -- in annotation Ident_Context := Dictionary.ProofContext; else Ident_Context := Dictionary.ProgramContext; end if; Selector_Node := STree.Last_Child_Of (Start_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))); -- ASSUME Selector_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Selector_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Selector_Node = identifier in Wf_Selected_Component"); Selector := STree.Node_Lex_String (Node => Selector_Node); Select_Pos := STree.Node_Position (Node => Selector_Node); Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); case Type_Info.Sort is when Sem.Is_Unknown => Type_Info.Errors_In_Expression := True; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); when Sem.Is_Package => Sem.Check_Package_Prefix (Node_Pos => Node_Pos, Pack_Sym => Type_Info.Other_Symbol, Scope => Scope, OK => Prefix_OK); if Prefix_OK then Sym := Dictionary.LookupSelectedItem (Prefix => Type_Info.Other_Symbol, Selector => Selector, Scope => Scope, Context => Ident_Context); -- Here, we do a special check to spot a common error in order to give a more -- informative error message. -- -- If the user user has referred to an entity "P.F" which has already been -- renamed, then they should just refer to "F" and "P.F" is illegal. -- -- To spot this, if P.F is not visible, we try again to look up "F" alone, -- and if it's visible AND is renamed, then we issue semantic error 419, -- which is much more helpful than the previously-issued semantic error 1. if Dictionary.Is_Null_Symbol (Sym) then Sym2 := Dictionary.LookupItem (Name => Selector, Scope => Scope, Context => Ident_Context, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Sym2) and then Dictionary.Is_Subprogram (Sym2) and then not Dictionary.IsImplicitProofFunction (Sym2) and then Dictionary.Is_Renamed (Subprogram => Sym2, Scope => Scope) then ErrorHandler.Semantic_Error2 (Err_Num => 419, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str1 => Selector, Id_Str2 => Dictionary.GetSimpleName (Type_Info.Other_Symbol)); -- Define Sym as Sym2 here to prevent a second knock-on error in StackIdentifier below Sym := Sym2; end if; end if; if Sym = Type_Info.Other_Symbol then -- found P in P such as P.P.P.X when P.X is intended Sym := Dictionary.NullSymbol; end if; if not Dictionary.Is_Null_Symbol (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Selector_Node); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_selected_component and then ErrorHandler.Generate_SLI then -- in annotation SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Selector_Node, Symbol => Sym, Is_Declaration => False); end if; end if; Stack_Identifier (Sym => Sym, Id_Str => Selector, Node => Node, Prefix => Type_Info.Other_Symbol, Scope => Scope, E_Stack => E_Stack, The_Heap => The_Heap, Ref_Var => Ref_Var, Dotted => True, Context => Context, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_selected_component); else Exp_Stack.Push (X => Unknown_Symbol_Record, Stack => E_Stack); end if; when Sem.Is_Object => if Dictionary.IsRecordTypeMark (Type_Info.Type_Symbol, Scope) then -- Type_Info.TypeSymbol here might denote a record subtype, -- so find the root type before looking for the selector. Type_Info.Type_Symbol := Dictionary.GetRootType (Type_Info.Type_Symbol); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_selected_component then -- in annotation Sym := Dictionary.LookupSelectedItem (Prefix => Type_Info.Type_Symbol, Selector => Selector, Scope => Scope, Context => Dictionary.ProofContext); else -- not in annotation if Type_Info.Arg_List_Found or else Dictionary.Is_Constant (Type_Info.Other_Symbol) or else Dictionary.IsFunction (Type_Info.Other_Symbol) then -- do not collect any component entities Sym := Dictionary.LookupSelectedItem (Prefix => Type_Info.Type_Symbol, Selector => Selector, Scope => Scope, Context => Dictionary.ProgramContext); else -- do collect component entities if not ComponentManager.HasChildren (Component_Data, ComponentManager.GetComponentNode (Component_Data, Type_Info.Other_Symbol)) then -- add allchildren of the prefix to the component mananger -- and declare subcomponents for each in the dictionary Sem.Add_Record_Sub_Components (Record_Var_Sym => Type_Info.Other_Symbol, Record_Type_Sym => Type_Info.Type_Symbol, Component_Data => Component_Data, The_Heap => The_Heap); end if; -- subcomponent symbol must be in Dictionary here Sym := Dictionary.LookupSelectedItem (Prefix => Type_Info.Other_Symbol, Selector => Selector, Scope => Scope, Context => Dictionary.ProgramContext); Type_Info.Other_Symbol := Sym; end if; end if; -- If Sym is found, but it's NOT a record component (e.g. it -- denotes the name of a type or something), then something -- is very wrong. if not Dictionary.Is_Null_Symbol (Sym) and then Dictionary.IsRecordComponent (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Selector_Node); Type_Info.Type_Symbol := Dictionary.GetType (Sym); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.selected_component then -- not in annotation Type_Info.Is_An_Entire_Variable := False; elsif ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Selector_Node, Symbol => Sym, Is_Declaration => False); end if; Type_Info.Is_Constant := False; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); else Exp_Stack.Push (X => Unknown_Symbol_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 8, Reference => ErrorHandler.No_Reference, Position => Select_Pos, Id_Str => Selector); end if; elsif Dictionary.IsProtectedType (Dictionary.GetRootType (Type_Info.Type_Symbol)) then -- handle protected function call Sym := Dictionary.LookupSelectedItem (Prefix => Type_Info.Other_Symbol, Selector => Selector, Scope => Scope, Context => Ident_Context); if not Dictionary.Is_Null_Symbol (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Selector_Node); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_selected_component and then ErrorHandler.Generate_SLI then -- in annotation SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Selector_Node, Symbol => Sym, Is_Declaration => False); end if; end if; Stack_Identifier (Sym => Sym, Id_Str => Selector, Node => Node, Prefix => Type_Info.Other_Symbol, Scope => Scope, E_Stack => E_Stack, The_Heap => The_Heap, Ref_Var => Ref_Var, Dotted => False, Context => Context, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_selected_component); else Exp_Stack.Push (X => Unknown_Symbol_Record, Stack => E_Stack); if Dictionary.IsPrivateType (Type_Info.Type_Symbol, Scope) then ErrorHandler.Semantic_Error_Sym2 (Err_Num => 316, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => Type_Info.Other_Symbol, Sym2 => Type_Info.Type_Symbol, Scope => Scope); else ErrorHandler.Semantic_Error (Err_Num => 9, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Type_Info.Other_Symbol)); end if; end if; when Sem.Is_Function => Exp_Stack.Push (X => Unknown_Symbol_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 3, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Type_Info.Other_Symbol)); when others => Exp_Stack.Push (X => Unknown_Symbol_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 5, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Type_Info.Other_Symbol)); end case; if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_selected_component then -- in annotation Selector_Node := STree.Next_Sibling (Current_Node => Selector_Node); -- ASSUME Selector_Node = tilde OR percent OR NULL if Selector_Node /= STree.NullNode then -- ASSUME Selector_Node = tilde OR percent -- handle ~ or % operator case STree.Syntax_Node_Type (Node => Selector_Node) is when SP_Symbols.tilde => -- ASSUME Selector_Node = tilde Wf_Tilde (Node_Pos => STree.Node_Position (Node => Selector_Node), Scope => Scope, E_Stack => E_Stack, Context => Context); when SP_Symbols.percent => -- ASSUME Selector_Node = percent Wf_Percent (Node_Pos => STree.Node_Position (Node => Selector_Node), Scope => Scope, E_Stack => E_Stack); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Selector_Node = tilde OR percent OR NULL in Wf_Selected_Component"); end case; end if; end if; end Wf_Selected_Component; spark-2012.0.deb/examiner/sem-walk_expression_p-walk_annotation_expression-up_wf_store_list.adb0000644000175000017500000001174611753202336032302 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Walk_Annotation_Expression) procedure Up_Wf_Store_List (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Exp_Result, Type_Info : Sem.Exp_Record; Sym : Dictionary.Symbol; Error_Found : Boolean := False; -------------------------------------------------------------- procedure Chain_Up_To_Store (Node : in out STree.SyntaxNode) --# global in STree.Table; --# derives Node from *, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store_list; --# post STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store; is begin while STree.Syntax_Node_Type (Node => Node) = SP_Symbols.store_list loop Node := STree.Parent_Node (Current_Node => Node); end loop; -- ASSUME Node = store SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.store, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = store in Chain_Up_To_Store"); end Chain_Up_To_Store; -------------------------------------------------------------- function Expression_Location (Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.store_list; is Exp_Loc : LexTokenManager.Token_Position; begin if STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Node)) = SP_Symbols.annotation_expression then -- ASSUME STree.Child_Node (Current_Node => Node) = annotation_expression Exp_Loc := STree.Node_Position (Node => Node); elsif STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Node)) = SP_Symbols.store_list then -- ASSUME STree.Child_Node (Current_Node => Node) = store_list Exp_Loc := STree.Node_Position (Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))); else Exp_Loc := LexTokenManager.Null_Token_Position; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect STree.Child_Node (Current_Node => Node) = store_list OR annotation_expression " & "in Expression_Location"); end if; return Exp_Loc; end Expression_Location; begin -- Up_Wf_Store_List Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); Sym := Type_Info.Other_Symbol; -- we must be dealing with an array update because Down_Wf_Store_List -- prunes at store_list node for records and so this procedure will -- never be called with a record (or any other illegal type) if Type_Info.Param_Count >= Dictionary.GetNumberOfDimensions (Type_Info.Type_Symbol) then -- too many index expressions found Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 93, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); Chain_Up_To_Store (Node => Node); else -- still counting index expressions Type_Info.Param_Count := Type_Info.Param_Count + 1; if not Dictionary.CompatibleTypes (Scope, Dictionary.GetArrayIndex (Type_Info.Type_Symbol, Type_Info.Param_Count), Exp_Result.Type_Symbol) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Expression_Location (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; Type_Info.Errors_In_Expression := Error_Found or else Type_Info.Errors_In_Expression or else Exp_Result.Errors_In_Expression; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); end Up_Wf_Store_List; spark-2012.0.deb/examiner/cells-utility-list.ads0000644000175000017500000001340411753202335020517 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; --# inherit Cells, --# Cells.Utility, --# Cell_Storage, --# Dictionary, --# Statistics, --# SystemErrors; package Cells.Utility.List is -- A singly linked list built on top of Cells. Note that this is -- different to Clists as you can put arbitrary DAGs into this -- list. type Linked_List is private; type Iterator is private; ------------------------------------------------------------------------------ -- Queries ------------------------------------------------------------------------------ -- Returns the current length of the list. function Get_Length (VCG_Heap : in Cells.Heap_Record; The_List : in Linked_List) return Natural; ------------------------------------------------------------------------------ -- List manipulation ------------------------------------------------------------------------------ -- Creates an empty list. Complexity is O(1). procedure Create (VCG_Heap : in out Cells.Heap_Record; The_List : out Linked_List); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# VCG_Heap from *, --# VCG_Heap & --# The_List from VCG_Heap; -- Appends the given cell to the end of the list. Complexity is -- O(1). procedure Append (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List; The_Cell : in Cells.Cell); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# VCG_Heap & --# VCG_Heap from *, --# The_Cell, --# The_List; -- Appends the given list to the end of the list. Complexity is -- O(1). The appended list becomes the empty list afterwards. procedure Append_List (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List; The_List_To_Append : in Linked_List); --# derives VCG_Heap from *, --# The_List, --# The_List_To_Append; -- This kills the list structure, but not the individual -- cells. Complexity is O(n). procedure Empty (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List); --# derives VCG_Heap from *, --# The_List; ------------------------------------------------------------------------------ -- Iterators ------------------------------------------------------------------------------ -- -- Note that if you change the list in any way while iterating, -- you will get amusing and undefined behaviour. -- ------------------------------------------------------------------------------ -- For all of these complexity is O(1). Note that if you go off -- the end of the list you will keep getting Null_Cells, but the -- Examiner will not abort. function First_Cell (VCG_Heap : in Cells.Heap_Record; The_List : in Linked_List) return Iterator; function Next_Cell (VCG_Heap : in Cells.Heap_Record; Previous : in Iterator) return Iterator; function Current_Cell (VCG_Heap : in Cells.Heap_Record; Current : in Iterator) return Cells.Cell; function Is_Null_Iterator (Current : in Iterator) return Boolean; ------------------------------------------------------------------------------ -- Utility ------------------------------------------------------------------------------ -- Joins the contents of the list together with /\. If the empty -- list is given, this will return true. procedure Join_And (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List; The_Conjunct : out Cells.Cell); --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# VCG_Heap from *, --# Dictionary.Dict, --# The_List, --# VCG_Heap & --# The_Conjunct from Dictionary.Dict, --# The_List, --# VCG_Heap; -- Reverses the list in-place. Complexity is O(n). procedure Invert (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List); --# derives VCG_Heap from *, --# The_List; private type Linked_List is new Integer range 0 .. Natural (Cell_Storage.Cell'Last); type Iterator is new Integer range 0 .. Natural (Cell_Storage.Cell'Last); end Cells.Utility.List; spark-2012.0.deb/examiner/sem-check_named_association.adb0000644000175000017500000004073111753202336022325 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Named_Association (The_Formals : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Named_Argument_Assoc_Node : in STree.SyntaxNode) is type Iterator is record Base_It : STree.Iterator; Search_It : STree.Iterator; Dict_It : Dictionary.Iterator; end record; Null_Iterator : constant Iterator := Iterator'(Base_It => STree.NullIterator, Search_It => STree.NullIterator, Dict_It => Dictionary.NullIterator); It : Iterator; function Is_Null (It : Iterator) return Boolean --# return It = Null_Iterator; is begin return It = Null_Iterator; end Is_Null; --------------------------------------------------------------- -- Gets the first formal parameter for this dictionary entity --------------------------------------------------------------- function First_Formal (Sym : Dictionary.Symbol) return Dictionary.Iterator --# global in Dictionary.Dict; is Result : Dictionary.Iterator; begin if Dictionary.Is_Generic_Subprogram (The_Symbol => Sym) then -- It's a generic unit. Result := Dictionary.FirstGenericFormalParameter (Sym); elsif Dictionary.Is_Subprogram (Sym) then -- It's a subprogram. Result := Dictionary.FirstSubprogramParameter (Sym); else -- It's a task or protected type. Result := Dictionary.FirstKnownDiscriminant (Sym); end if; return Result; end First_Formal; --------------------------------------------------------------- -- Find duplicate formal parameters --------------------------------------------------------------- function Next_Duplicate_Formal (It : Iterator) return Iterator --# global in LexTokenManager.State; --# in STree.Table; --# pre Syntax_Node_Type (Get_Node (It.Base_It), STree.Table) = SP_Symbols.identifier and --# (Syntax_Node_Type (Get_Node (It.Search_It), STree.Table) = SP_Symbols.identifier or --# It.Search_It = STree.NullIterator); --# return Return_It => ((Syntax_Node_Type (Get_Node (Return_It.Base_It), STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Get_Node (Return_It.Search_It), STree.Table) = SP_Symbols.identifier) or --# Return_It = Null_Iterator); is My_Base_It : STree.Iterator; My_Search_It : STree.Iterator; My_Base_Node : STree.SyntaxNode; My_Search_Node : STree.SyntaxNode; Result : Iterator; begin if STree.IsNull (It.Search_It) then -- We've not found a duplicate yet -- So our base is the first formal parameter My_Base_It := It.Base_It; else -- We've found one duplicate and are looking for another. -- So our base is the next formal parameter My_Base_It := STree.NextNode (It.Base_It); end if; My_Search_It := STree.NullIterator; while not STree.IsNull (My_Base_It) loop My_Base_Node := Get_Node (It => My_Base_It); --# assert Syntax_Node_Type (My_Base_Node, STree.Table) = SP_Symbols.identifier and --# My_Base_Node = Get_Node (My_Base_It) and --# (Syntax_Node_Type (Get_Node (My_Search_It), STree.Table) = SP_Symbols.identifier or --# My_Search_It = STree.NullIterator); My_Search_It := STree.NextNode (My_Base_It); while not STree.IsNull (My_Search_It) loop My_Search_Node := Get_Node (It => My_Search_It); --# assert Syntax_Node_Type (My_Base_Node, STree.Table) = SP_Symbols.identifier and --# My_Base_Node = Get_Node (My_Base_It) and --# Syntax_Node_Type (My_Search_Node, STree.Table) = SP_Symbols.identifier and --# My_Search_Node = Get_Node (My_Search_It); -- exit if the identifiers hanging off the base and dup nodes -- are the same. i.e. we've found a duplicate. exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => My_Base_Node), Lex_Str2 => Node_Lex_String (Node => My_Search_Node)) = LexTokenManager.Str_Eq; My_Search_It := STree.NextNode (My_Search_It); end loop; -- We found a duplicate exit when not STree.IsNull (My_Search_It); My_Base_It := STree.NextNode (My_Base_It); end loop; if STree.IsNull (My_Search_It) then -- We didn't find a duplicate Result := Null_Iterator; else Result := Iterator'(Base_It => My_Base_It, Search_It => My_Search_It, Dict_It => Dictionary.NullIterator); end if; return Result; end Next_Duplicate_Formal; function First_Duplicate_Formal (Node : STree.SyntaxNode) return Iterator --# global in LexTokenManager.State; --# in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association; --# return Return_It => ((Syntax_Node_Type (Get_Node (Return_It.Base_It), STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Get_Node (Return_It.Search_It), STree.Table) = SP_Symbols.identifier) or --# Return_It = Null_Iterator); is First_It : STree.Iterator; begin First_It := STree.Find_First_Formal_Parameter_Node (From_Root => Node); return Next_Duplicate_Formal (It => Iterator'(Base_It => First_It, Search_It => STree.NullIterator, Dict_It => Dictionary.NullIterator)); end First_Duplicate_Formal; --------------------------------------------------------------- -- Find illegal formal parameters --------------------------------------------------------------- function Next_Illegal_Formal (It : Iterator; The_Formals : Dictionary.Symbol) return Iterator --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# pre Syntax_Node_Type (Get_Node (It.Base_It), STree.Table) = SP_Symbols.identifier and --# (Syntax_Node_Type (Get_Node (It.Search_It), STree.Table) = SP_Symbols.identifier or --# It.Search_It = STree.NullIterator); --# return Return_It => ((Syntax_Node_Type (Get_Node (Return_It.Base_It), STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Get_Node (Return_It.Search_It), STree.Table) = SP_Symbols.identifier) or --# Return_It = Null_Iterator); is My_Base_It : STree.Iterator; My_Base_Node : STree.SyntaxNode; My_Dict_It : Dictionary.Iterator; Result : Iterator; begin if STree.IsNull (It.Search_It) then -- We've not found an illegal name -- So our base is the first formal parameter My_Base_It := It.Base_It; else -- We've found one illegal and are looking for another. -- So our base is the next formal parameter. My_Base_It := STree.NextNode (It.Base_It); end if; while not STree.IsNull (My_Base_It) loop My_Base_Node := Get_Node (It => My_Base_It); --# assert Syntax_Node_Type (My_Base_Node, STree.Table) = SP_Symbols.identifier and --# My_Base_Node = Get_Node (My_Base_It); My_Dict_It := First_Formal (Sym => The_Formals); -- Loop through all the formals declared in the type while not Dictionary.IsNullIterator (My_Dict_It) loop --# assert Syntax_Node_Type (My_Base_Node, STree.Table) = SP_Symbols.identifier and --# My_Base_Node = Get_Node (My_Base_It); -- Looking for a formal to match the one in the tree exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Dictionary.CurrentSymbol (My_Dict_It)), Lex_Str2 => Node_Lex_String (Node => My_Base_Node)) = LexTokenManager.Str_Eq; My_Dict_It := Dictionary.NextSymbol (My_Dict_It); end loop; -- My_Dict_It is null if we didn't find it. exit when Dictionary.IsNullIterator (My_Dict_It); My_Base_It := STree.NextNode (My_Base_It); end loop; if STree.IsNull (My_Base_It) then -- We didn't find any more illegal formals Result := Null_Iterator; else Result := Iterator'(Base_It => My_Base_It, Search_It => My_Base_It, Dict_It => Dictionary.NullIterator); end if; return Result; end Next_Illegal_Formal; function First_Illegal_Formal (Node : STree.SyntaxNode; The_Formals : Dictionary.Symbol) return Iterator --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association; --# return Return_It => ((Syntax_Node_Type (Get_Node (Return_It.Base_It), STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Get_Node (Return_It.Search_It), STree.Table) = SP_Symbols.identifier) or --# Return_It = Null_Iterator); is First_It : STree.Iterator; begin First_It := STree.Find_First_Formal_Parameter_Node (From_Root => Node); return Next_Illegal_Formal (It => Iterator'(Base_It => First_It, Search_It => STree.NullIterator, Dict_It => Dictionary.NullIterator), The_Formals => The_Formals); end First_Illegal_Formal; --------------------------------------------------------------- -- Find missing formal parameters --------------------------------------------------------------- function Next_Missing_Formal (It : Iterator; The_Formals : Dictionary.Symbol; Named_Argument_Assoc_Node : STree.SyntaxNode) return Iterator --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# pre Syntax_Node_Type (Named_Argument_Assoc_Node, STree.Table) = SP_Symbols.named_argument_association; is My_Base_It : STree.Iterator; My_Base_Node : STree.SyntaxNode; My_Dict_It : Dictionary.Iterator; Result : Iterator; begin if Dictionary.IsNullIterator (It.Dict_It) then -- We've not found a missing formal yet -- So our base is the first formal My_Dict_It := First_Formal (Sym => The_Formals); else -- We've found one missing and are looking for another. -- So our base is the next formal parameter My_Dict_It := Dictionary.NextSymbol (It.Dict_It); end if; while not Dictionary.IsNullIterator (My_Dict_It) loop My_Base_It := STree.Find_First_Formal_Parameter_Node (From_Root => Named_Argument_Assoc_Node); -- Loop through all the formals while not STree.IsNull (My_Base_It) loop My_Base_Node := Get_Node (It => My_Base_It); --# assert Syntax_Node_Type (Get_Node (My_Base_It), STree.Table) = SP_Symbols.identifier and --# My_Base_Node = Get_Node (My_Base_It); -- Looking for a formal to match the one in the tree exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Dictionary.CurrentSymbol (My_Dict_It)), Lex_Str2 => Node_Lex_String (Node => My_Base_Node)) = LexTokenManager.Str_Eq; My_Base_It := STree.NextNode (My_Base_It); end loop; -- My_Base_It is null if we didn't find it. exit when STree.IsNull (My_Base_It); My_Dict_It := Dictionary.NextSymbol (My_Dict_It); end loop; if Dictionary.IsNullIterator (My_Dict_It) then Result := Null_Iterator; else Result := Iterator'(Base_It => STree.NullIterator, Search_It => STree.NullIterator, Dict_It => My_Dict_It); end if; return Result; end Next_Missing_Formal; function First_Missing_Formal (The_Formals : Dictionary.Symbol; Named_Argument_Assoc_Node : STree.SyntaxNode) return Iterator --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# pre Syntax_Node_Type (Named_Argument_Assoc_Node, STree.Table) = SP_Symbols.named_argument_association; is begin return Next_Missing_Formal (It => Iterator'(Base_It => STree.NullIterator, Search_It => STree.NullIterator, Dict_It => Dictionary.NullIterator), The_Formals => The_Formals, Named_Argument_Assoc_Node => Named_Argument_Assoc_Node); end First_Missing_Formal; begin -- Check_Named_Association ------------------------------------------ -- Report all duplicated formal parameters ------------------------------------------ It := First_Duplicate_Formal (Node => Named_Argument_Assoc_Node); while not Is_Null (It => It) loop --# assert Syntax_Node_Type (Get_Node (It.Base_It), STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Get_Node (It.Search_It), STree.Table) = SP_Symbols.identifier; ErrorHandler.Semantic_Error (Err_Num => 4, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Get_Node (It => It.Search_It)), Id_Str => Node_Lex_String (Node => Get_Node (It => It.Search_It))); It := Next_Duplicate_Formal (It => It); end loop; ------------------------------------------ -- Report all illegal formal parameters ------------------------------------------ It := First_Illegal_Formal (Node => Named_Argument_Assoc_Node, The_Formals => The_Formals); while not Is_Null (It => It) loop --# assert Syntax_Node_Type (Get_Node (It.Base_It), STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Get_Node (It.Search_It), STree.Table) = SP_Symbols.identifier; ErrorHandler.Semantic_Error_Lex1_Sym1 (Err_Num => 2, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Get_Node (It => It.Search_It)), Id_Str => Node_Lex_String (Node => Get_Node (It => It.Search_It)), Sym => The_Formals, Scope => Scope); It := Next_Illegal_Formal (It => It, The_Formals => The_Formals); end loop; ------------------------------------------ -- Report all missing formal parameters ------------------------------------------ It := First_Missing_Formal (The_Formals => The_Formals, Named_Argument_Assoc_Node => Named_Argument_Assoc_Node); while not Is_Null (It => It) loop ErrorHandler.Semantic_Error (Err_Num => 23, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => STree.FindLastActualParameterNode (Named_Argument_Assoc_Node)), Id_Str => Dictionary.GetSimpleName (Dictionary.CurrentSymbol (It.Dict_It))); It := Next_Missing_Formal (It => It, The_Formals => The_Formals, Named_Argument_Assoc_Node => Named_Argument_Assoc_Node); end loop; end Check_Named_Association; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-semanticerr.adb0000644000175000017500000100447711753202336026154 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure SemanticErr (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is procedure SemanticErrExpl (E_Str : in out E_Strings.T) --# global in Err_Num; --# derives E_Str from *, --# Err_Num; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an AppendString for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Num; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Num, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation SemanticErrExpl (Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin -- Directives for HTML output: --! <"semantic-"> --! <"*** Semantic Error : "><" : "> -- To find a spare error number to add a new error, see the -- final case at the bottom of this case statement. case Err_Num.ErrorNum is when 1 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); --! If the identifier is declared in a --! separate (or parent) package, the package must be included in an inherit clause --! and the identifier prefixed with the package name. --! Ensure that there are no errors in the declaration of the identifier when 2 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not denote a formal parameter for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); when 3 => E_Strings.Append_String (E_Str => E_Str, Str => "Incorrect number of actual parameters for call of subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 4 => E_Strings.Append_String (E_Str => E_Str, Str => "More than one parameter association is given for formal parameter "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 5 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal use of identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Usually associated with the use of an identifier other than a package --! name as a prefix in a selected component. when 6 => E_Strings.Append_String (E_Str => E_Str, Str => "Identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not the name of a variable"); when 7 => E_Strings.Append_String (E_Str => E_Str, Str => "Identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not the name of a procedure"); when 8 => E_Strings.Append_String (E_Str => E_Str, Str => "There is no field named "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " in this entity"); --! Issued when the selector in a selected component of a record --! references a non-existent field. when 9 => E_Strings.Append_String (E_Str => E_Str, Str => "Selected components are not allowed for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Occurs if the prefix to a selected component representing a procedure --! in a procedure call statement or a type mark is not a package. Also --! occurs if a selector is applied in an expression to an object which --! is not a record variable. when 10 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal redeclaration of identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 11 => E_Strings.Append_String (E_Str => E_Str, Str => "There is no package declaration for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued if a package body is encountered for which there is no --! package specification. when 12 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " can only be completed by a variable declaration, not a constant"); --! If the object in question is really a constant, then remove it from --! the enclosing package's own variable annotation. when 13 => E_Strings.Append_String (E_Str => E_Str, Str => "A body for subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been declared"); when 14 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal parent unit name"); --! Issued if the name in a "separate" clause of a subunit does not --! correctly identify a compilation unit. Common causes of this error --! are a syntax error in the parent unit or omitting the --! parent unit specification and/or parent unit body entries from the index file when 15 => E_Strings.Append_String (E_Str => E_Str, Str => "The stub for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or cannot be located"); --! Common causes of this error are an error in the declaration --! of the stub or the omission of the parent unit body from the index file when 16 => E_Strings.Append_String (E_Str => E_Str, Str => "A body for package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been declared"); when 17 => E_Strings.Append_String (E_Str => E_Str, Str => "A body stub for package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been declared"); when 18 => E_Strings.Append_String (E_Str => E_Str, Str => "Identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not the name of a package"); when 19 => E_Strings.Append_String (E_Str => E_Str, Str => "Identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not the name of a procedure"); when 20 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal operator symbol"); --! Issued if a renaming declaration contains a non-existent operator. when 21 => E_Strings.Append_String (E_Str => E_Str, Str => "This entity is not an array"); --! Issued if an attempt is made to index into a name which does not --! represent an array. when 22 => E_Strings.Append_String (E_Str => E_Str, Str => "The type in this declaration is not consistent with the previous declaration of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Occurs when the type given in the Ada declaration of an own variable --! differs from that --! "announced" in the package's own variable clause. when 23 => E_Strings.Append_String (E_Str => E_Str, Str => "No parameter association is given for formal parameter "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 24 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " (exported by called subprogram) is not visible at this point"); --! When a procedure is called any global variables exported by that --! procedure must be visible at the point of call. This error message --! indicates that the global variable concerned is not visible. --! It may be that it needs to be added to the global annotation of --! the procedure containing the call (or some further enclosing subprogram) --! or it may be that an inherit clause is missing from the package containing --! the call. when 25 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " (imported by called subprogram) is not visible at this point"); --! When a procedure is called any global variables imported by that --! procedure must be visible at the point of call. This error message --! indicates that the global variable concerned is not visible. --! It may be that it needs to be added to the global annotation of --! the subprogram containing the call (or some further enclosing subprogram) --! or it may be that an inherit clause is missing from the package containing --! the call. when 26 => E_Strings.Append_String (E_Str => E_Str, Str => "The deferred constant "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have an associated full definition"); --! Issued at the end of a package specification if no full declaration --! has been supplied for a deferred constant declared in the package --! specification. when 27 => E_Strings.Append_String (E_Str => E_Str, Str => "The private type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have an associated full definition"); --! Issued at the end of a package specification if no full declaration --! has been supplied for a private type declared in the package specification. when 28 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have a definition"); --! Issued at the end of a package body if an own variable announced in the --! package specification has neither been given an Ada declaration nor refined. when 29 => E_Strings.Append_String (E_Str => E_Str, Str => "The subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ", declared in the package specification, does not have an associated body"); when 30 => E_Strings.Append_String (E_Str => E_Str, Str => "Attribute "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not yet implemented in the Examiner"); --! The attribute is identified in Annex K of the SPARK 95 report as a --! valid SPARK 95 --! attribute but the Examiner does not currently support it. --! It is --! possible to work round the omission by putting the use of the attribute --! inside a --! suitable function which is hidden from the Examiner. when 31 => E_Strings.Append_String (E_Str => E_Str, Str => "The prefix of this attribute is not an object or type"); when 32 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal type conversion"); --! Likely causes are type conversions involving record types or --! non-convertible arrays. when 33 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal aggregate"); --! Issued if the prefix of an aggregate is not a composite type. when 34 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal procedure call"); --! Issued if a call is made to a user-defined subprogram in a --! package initialization part. when 35 => E_Strings.Append_String (E_Str => E_Str, Str => "Binary operator is not declared for types "); if Err_Num.Name1 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); if Err_Num.Name2 /= Error_Types.NoName then E_Strings.Append_String (E_Str => E_Str, Str => " and "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); end if; end if; --! Indicates use of an undeclared binary operator; this message means that --! the type on each side --! of the operator cannot appear with the operator used. e.g. --! attempting to add an integer to --! an enumeration literal. when 36 => E_Strings.Append_String (E_Str => E_Str, Str => "Expression is not static"); when 37 => E_Strings.Append_String (E_Str => E_Str, Str => "Expression is not constant"); when 38 => E_Strings.Append_String (E_Str => E_Str, Str => "Expression is not of the expected type"); when 39 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal use of unconstrained type"); --! An unconstrained array type or variable of such a type is --! illegally used. Use of --! unconstrained arrays in SPARK is limited to passing them as --! parameters, indexing into them --! and taking attributes of them. This message also arises if a string --! literal is used as an actual parameter where the formal parameter is a string subtype. --! In this case, the error can be removed by qualifying the string literal with the subtype --! name. when 40 => E_Strings.Append_String (E_Str => E_Str, Str => "Numeric or Time_Span type required"); --! This operator is only defined for numeric types and, if the --! Ravenscar Profile is --! selected, for type Ada.Real_Time.Time_Span. when 41 => E_Strings.Append_String (E_Str => E_Str, Str => "Array type required"); --! Issued if a subtype declaration taking the form of a constrained --! subtype of an --! unconstrained array type is encountered but with a type mark which --! does not represent an --! array. when 42 => E_Strings.Append_String (E_Str => E_Str, Str => "Incompatible types"); --! Issued when a name represents an object which is not of the required type. when 43 => E_Strings.Append_String (E_Str => E_Str, Str => "Range is not constant"); when 44 => E_Strings.Append_String (E_Str => E_Str, Str => "Scalar type required"); --! The bounds of an explicit range must be scalar types. when 45 => E_Strings.Append_String (E_Str => E_Str, Str => "Range is not static"); when 46 => E_Strings.Append_String (E_Str => E_Str, Str => "Discrete type required"); when 47 => E_Strings.Append_String (E_Str => E_Str, Str => "The definition of this type contains errors which may make this array definition invalid"); --! Issued if an array type definition is encountered where one or --! more of the index types used in the definition contained errors in its original declaration. For --! example, SPARK requires array index bounds to be constant (known at compile time) so an attempt --! to use an illegal subtype with variable bounds as an array index will generate this message. when 49 => E_Strings.Append_String (E_Str => E_Str, Str => "Attribute "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " takes only one argument"); --! Only SPARK 95 attributes 'Min and 'Max require two arguments. when 50 => E_Strings.Append_String (E_Str => E_Str, Str => "Initializing expression must be constant"); --! To assign a non-constant expression to a variable, an assignment statement --! in the body of the program unit (following the 'begin') must be used. when 51 => E_Strings.Append_String (E_Str => E_Str, Str => "Arrays may not be ordered"); --! Issued if an ordering operator such as "<" is encountered between --! objects of an array --! type other than string or a constrained subtype of string. when 52 => E_Strings.Append_String (E_Str => E_Str, Str => "Only Scalar, String and Time types may be ordered"); --! Ordering operators are only defined for scalar types and type --! String plus, if the --! Ravenscar Profile is selected, types Time and Time_Span in package --! Ada.Real_Time. when 53 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal others clause"); --! In SPARK record aggregates may not contain an others clause. when 54 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal attribute: "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued when an attribute not supported by SPARK is used. when 55 => E_Strings.Append_String (E_Str => E_Str, Str => "Attribute "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " takes no argument"); when 56 => E_Strings.Append_String (E_Str => E_Str, Str => "Argument expected"); when 57 => E_Strings.Append_String (E_Str => E_Str, Str => "Fixed type definition must have associated range constraint"); when 58 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " expected, to repeat initial identifier"); --! Occurs at the end of a package, subprogram, protected type, task type --! or loop if the terminal identifier does not --! match the name or label originally given. when 59 => E_Strings.Append_String (E_Str => E_Str, Str => "Composite subtype definition may not have associated range constraint"); --! A subtype of the form applicable to a subrange of a scalar type has --! been encountered --! but the type provided is not a scalar type. when 60 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal choice in record aggregate"); --! In SPARK record aggregates may not contain multiple choices, each --! field must be --! assigned a value individually. when 61 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal occurrence of body stub - a body stub may only occur in a compilation unit"); when 62 => E_Strings.Append_String (E_Str => E_Str, Str => "A body for the embedded package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is required"); --! Issued if an embedded package declares subprograms or own variables --! and no body is --! provided. when 63 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not a type mark"); when 64 => E_Strings.Append_String (E_Str => E_Str, Str => "Parameters of function subprograms must be of mode in"); when 65 => E_Strings.Append_String (E_Str => E_Str, Str => "Formal parameters of renamed operators may not be renamed"); --! The names of the parameters used in renaming declarations may not --! be altered from Left, --! Right for binary operators and Right for unary operators. These are --! the names given for --! the parameters in the ARM and the SPARK Definition requires that --! parameter names are not --! changed. when 66 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected package initialization - no own variables of package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " require initialization"); --! Either the package does not have an initializes annotation or all --! the own variables --! requiring initialization were given values at the point of declaration. when 67 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal machine code insertion. Machine code functions are not permitted in SPARK 83."); --! This is an Ada 83 rule. Machine code can only be used in procedures. when 68 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal operator renaming - operators are defined on types not subtypes"); --! Issued if an attempt is made to rename an operator using a subtype --! of the type for --! which it was originally implicitly declared. when 69 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has two parameters"); when 70 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " expected"); --! pragma Import expected. --! pragma Interface expected. when 71 => E_Strings.Append_String (E_Str => E_Str, Str => "This expression does not represent the expected subprogram or variable name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued if the name supplied in a pragma interface, import or attach_handler --! does not match the name of the associated subprogram or variable. when 72 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected pragma "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Unexpected pragma Import. --! Pragma import may only occur in a body stub, or immediately --! after a subprogram declaration in the visible part of a package, --! or immediately after a variable declaration. --! 72 --! Unexpected pragma Interface. --! Pragma interface may only occur in a body stub or immediately --! after a subprogram declaration in the visible part of a package. when 73 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been declared or refined"); --! Issued if an Ada declaration is given for an own variable which --! has been refined, or in --! a refinement clause if an own variable is refined more than once. when 74 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not occur in the package own variable list"); --! A subject of a refinement definition of a package must be an own --! variable of that --! package. when 75 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal use of inherited package"); --! Issued if an attempt is made to refine an own variable onto an own --! variable of a --! non-embedded package. when 76 => E_Strings.Append_String (E_Str => E_Str, Str => "Identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is already declared and cannot be"); E_Strings.Append_String (E_Str => E_Str, Str => " the name of an embedded package"); --! Issued when a refinement clause in a package body attempts to name --! an embedded package --! own variable as a refinement constituent and the name given for the --! embedded package is --! already in use. when 77 => E_Strings.Append_String (E_Str => E_Str, Str => "Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " should occur in this own variable clause"); --! Occurs in the own variable clause of a package embedded in another --! package if an own --! variable which is a refinement constituent of an own variable of the --! enclosing package is --! omitted. when 78 => E_Strings.Append_String (E_Str => E_Str, Str => "Initialization of own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is ineffective"); --! Issued if an own variable occurs in the initialization clause of --! an embedded package --! and the own variable concerned is a refinement constituent of another --! own variable which --! is not listed in the initialization specification of its package. when 79 => E_Strings.Append_String (E_Str => E_Str, Str => "Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " should occur in this initialization specification"); --! Occurs in the initialization clause of a package embedded in another --! package if an own --! variable which is a refinement constituent of an initialized own variable --! of the enclosing --! package is omitted. when 80 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected own variable clause - "); E_Strings.Append_String (E_Str => E_Str, Str => "no variable in this clause "); E_Strings.Append_String (E_Str => E_Str, Str => "is a refinement constituent"); when 81 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable clause expected - "); E_Strings.Append_String (E_Str => E_Str, Str => "own variables of this package "); E_Strings.Append_String (E_Str => E_Str, Str => "occur as refinement constituents"); when 82 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected initialization specification - "); E_Strings.Append_String (E_Str => E_Str, Str => "no own variables of this package "); E_Strings.Append_String (E_Str => E_Str, Str => "require initialization"); --! An own variable initialization clause and that of its refinement --! constituents must be --! consistent. when 83 => E_Strings.Append_String (E_Str => E_Str, Str => "Initialization specification expected - "); E_Strings.Append_String (E_Str => E_Str, Str => "own variables of this package require "); E_Strings.Append_String (E_Str => E_Str, Str => "initialization"); --! Issued if an own variable does not occur in the initialization --! clause of an embedded --! package and the own variable concerned is a refinement constituent --! of another own variable --! which is listed in the initialization clause of its package. when 84 => E_Strings.Append_String (E_Str => E_Str, Str => "The refinement constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have a declaration"); --! Issued at the end of a package if a refinement constituent of a --! refined own variable --! has not been given an Ada declaration or further refined. when 85 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not a constituent of any abstract own variable appearing" & " in the earlier global definition for this subprogram"); --! A variable XXX which has occurred in a refined global annotation --! is neither a variable --! that occurred in the earlier global definition nor a refinement --! constituent of any such --! variable. when 86 => E_Strings.Append_String (E_Str => E_Str, Str => "At least one constituent of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " was expected in this refined global definition"); --! If the global annotation of a procedure specification contains --! an own variable and that --! own variable is later refined then at least one refinement constituent --! of the own variable --! shall appear in the second global annotation supplied for the procedure --! body. when 87 => E_Strings.Append_String (E_Str => E_Str, Str => "Refined global definition expected for subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! A global definition containing abstract own variables was given in --! the definition for --! subprogram XXX, in a package specification. A refined global definition --! is required in the --! package body. when 88 => E_Strings.Append_String (E_Str => E_Str, Str => "Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not a refinement constituent"); when 89 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not a private type declared in this package"); when 90 => E_Strings.Append_String (E_Str => E_Str, Str => "This operator may not be applied to ranges"); when 91 => E_Strings.Append_String (E_Str => E_Str, Str => "Ranges may not be assigned"); when 92 => E_Strings.Append_String (E_Str => E_Str, Str => "Named association may not be used here"); when 93 => E_Strings.Append_String (E_Str => E_Str, Str => "Number of index expressions differs from number of dimensions of array "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 94 => E_Strings.Append_String (E_Str => E_Str, Str => "Condition is not boolean"); --! Issued anywhere a boolean expression is required (e.g. in if, --! exit and while statements) and the expression provided --! is not of type boolean. when 95 => E_Strings.Append_String (E_Str => E_Str, Str => "Type mark expected"); when 96 => E_Strings.Append_String (E_Str => E_Str, Str => "Attribute "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not valid with this prefix"); when 97 => E_Strings.Append_String (E_Str => E_Str, Str => "Attribute BASE may only appear as a prefix"); --! 'BASE may only be used as a prefix to another attribute. when 98 => E_Strings.Append_String (E_Str => E_Str, Str => "This expression is not a range"); when 99 => E_Strings.Append_String (E_Str => E_Str, Str => "Unconstrained array expected"); --! Occurs if a subtype is declared of an array which is already constrained. when 100 => E_Strings.Append_String (E_Str => E_Str, Str => "Floating point type mark expected"); when 101 => E_Strings.Append_String (E_Str => E_Str, Str => "Fixed point type mark expected"); when 102 => E_Strings.Append_String (E_Str => E_Str, Str => "This is not the name of a field of record "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 103 => E_Strings.Append_String (E_Str => E_Str, Str => "A value has already been supplied for field "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 104 => E_Strings.Append_String (E_Str => E_Str, Str => "No value has been supplied for field "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 105 => E_Strings.Append_String (E_Str => E_Str, Str => "More values have been supplied than number of fields in record "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 106 => E_Strings.Append_String (E_Str => E_Str, Str => "Range is not of the expected type"); when 107 => E_Strings.Append_String (E_Str => E_Str, Str => "Expression is not of the expected type. Actual type is "); if Err_Num.Name1 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); if Err_Num.Name2 /= Error_Types.NoName then E_Strings.Append_String (E_Str => E_Str, Str => ". Expected type is "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); end if; end if; when 108 => E_Strings.Append_String (E_Str => E_Str, Str => "Expression is not of the expected type. Expected any Integer type"); when 109 => E_Strings.Append_String (E_Str => E_Str, Str => "Expression is not of the expected type. Expected any Real type"); when 110 => E_Strings.Append_String (E_Str => E_Str, Str => "Use type clauses following an embedded package are not currently supported by the Examiner"); when 111 => E_Strings.Append_String (E_Str => E_Str, Str => "Package renaming is not currently supported by the Examiner"); when 112 => E_Strings.Append_String (E_Str => E_Str, Str => "A use type clause may not appear here. They are only permitted " & "as part of a context clause or directly following an embedded " & "package specification"); when 113 => E_Strings.Append_String (E_Str => E_Str, Str => "Private subprogram declarations are not permitted in SPARK 83"); --! Private subprograms would not be callable in SPARK 83 and are therefore not --! permitted; they may be declared and called in SPARK 95. when 114 => E_Strings.Append_String (E_Str => E_Str, Str => "Subtype mark or Range may not be used in an expression in this context"); --! A subtype mark or an explicit Range attribute may not be used in a context --! where a simple expression is expected. when 115 => E_Strings.Append_String (E_Str => E_Str, Str => "In a package body, an own variable annotation must include one or more refinement constituents"); --! Annotation should be of the form 'own S is A, B, C;' when 116 => E_Strings.Append_String (E_Str => E_Str, Str => "View conversion to own type is not permitted in target of an assignment"); when 117 => E_Strings.Append_String (E_Str => E_Str, Str => "Aggregate must be qualified with subtype mark"); --! Aggregates are qualified expressions so they must be prefixed with a --! subtype mark. An exception is made in the case of aggregate assignments to --! unconstrained arrays as the rules of Ada do not permit unconstrained array --! aggregates to be qualified when 118 => E_Strings.Append_String (E_Str => E_Str, Str => "Aggregate assignment to unconstrained multi-dimensional array not permitted"); --! Unqualified aggregates may only be used in assignments to one-dimensional unconstrained --! arrays. SPARK does not permit aggregate assignment to multi-dimensional unconstrained --! arrays when 119 => E_Strings.Append_String (E_Str => E_Str, Str => "Unary operator is not declared for type "); if Err_Num.Name1 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); end if; --! Indicates use of an undeclared unary operator; this message means that --! the type on the right hand side --! of the operator cannot appear with the operator used. e.g. --! attempting to negate an enumeration literal. when 120 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma import not allowed here because variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is already initialized. See ALRM B.1(24)"); when 121 => E_Strings.Append_String (E_Str => E_Str, Str => "'Flow_Message' or 'Warning_Message' expected"); --! The identifier indicating what kind of message to justify must be either --! 'Flow_Message' or 'Warning_Message' or some unique abbreviation of them such as --! 'Fl' or even 'F'. Case is ignored. when 122 => E_Strings.Append_String (E_Str => E_Str, Str => "Error or warning number expected"); --! This item should be an integer literal representing the error or warning message that --! is being marked as expected. when 123 => E_Strings.Append_String (E_Str => E_Str, Str => "This warning number may not appear in an accept annotation"); --! It does not make sense to allow certain warnings to be justified with the accept annotation. --! In particular, attempting to justify warnings raised by the justification system itself --! could lead to some special kind of recursive hell that we would not wish to enter. when 124 => E_Strings.Append_String (E_Str => E_Str, Str => "Incorrect number of names in accept annotation: should be 0"); --! This class of error does not reference any variables, and therefore requires --! no names. when 125 => E_Strings.Append_String (E_Str => E_Str, Str => "Incorrect number of names in accept annotation: should be 1"); --! This class of error references one variable, and therefore requires --! one name. when 126 => E_Strings.Append_String (E_Str => E_Str, Str => "Incorrect number of names in accept annotation: should be 2"); --! This class of error references two variables, and therefore requires --! two names. Two names are need to justify expected information --! flow messages such as "X is not derived from Y". --! Note that for messages of this kind the accept annotation should list the names in the order --! "export, import" when 127 => E_Strings.Append_String (E_Str => E_Str, Str => "Incorrect number of names in accept annotation: should be 0 or 1"); --! This class of error references either zero or one variable, and therefore --! requires either zero or one name. An ineffective assignment error requires --! the name of variable being assigned to. An ineffective statement error --! has no name associated with it. when 128 => E_Strings.Append_String (E_Str => E_Str, Str => "Incorrect number of names in accept annotation: should be 1 or 2"); --! This class of error references either one or two variables, and therefore --! requires either one or two names. One name is required when the export --! is a function return value. when 129 => E_Strings.Append_String (E_Str => E_Str, Str => "Assignment to view conversion is not currently implemented"); when 130 => E_Strings.Append_String (E_Str => E_Str, Str => "A type from the current package should not appear in a use type clause"); when 131 => E_Strings.Append_String (E_Str => E_Str, Str => "The package name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " should appear in a with clause preceding the use type clause"); when 132 => E_Strings.Append_String (E_Str => E_Str, Str => "The unit name or the name of an enclosing package of the unit should not appear in its with clause"); --! A package should not 'with' itself and a subunit --! should not 'with' the package (or main program) which declares its stub when 133 => E_Strings.Append_String (E_Str => E_Str, Str => "Name in with clause is locally redeclared"); when 134 => E_Strings.Append_String (E_Str => E_Str, Str => "A package name should not appear in its own inherit clause"); when 135 => E_Strings.Append_String (E_Str => E_Str, Str => "The package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is undeclared or not visible, or there is a circularity in the list of inherited packages"); --! Possible causes of this error are --! an error in the inherited package specification or --! omitting an entry for the package specification from the index file or --! circular inheritance when 136 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not declared in the own variable clause of the corresponding package declaration"); --! A refinement clause of a package body defines the constituent --! parts of own variables given in the own variable clause of the --! corresponding package declaration when 137 => E_Strings.Append_String (E_Str => E_Str, Str => "The child package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); --! Possible causes of this error are --! an error in the child package specification or --! omitting the child from the parent's component list in the index file --! or omitting the child specification entry from the index file when 138 => E_Strings.Append_String (E_Str => E_Str, Str => "Child package own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is does not appear in the own variable clause of the child package"); --! A constituent of a refinement clause which is defined in a --! child package must be an own variable of the child package when 139 => E_Strings.Append_String (E_Str => E_Str, Str => "The variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not declared in the own variable clause of this package"); --! A package can only initialize variables --! declared in its own variable clause when 140 => E_Strings.Append_String (E_Str => E_Str, Str => "The predecessor package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); --! The parent of a child package must be a library package and --! must be declared prior to a child package. If using an index file --! the parent must have an entry in the index file and the child package --! must be listed as a component of the parent package when 141 => E_Strings.Append_String (E_Str => E_Str, Str => "The private type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); when 142 => E_Strings.Append_String (E_Str => E_Str, Str => "The subprogram prefix "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); --! The prefix should appear in the inherit clause of the current package when 143 => E_Strings.Append_String (E_Str => E_Str, Str => "The subprogram "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); when 144 => E_Strings.Append_String (E_Str => E_Str, Str => "The dotted name "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); --! The name must denote an entire variable or an own variable --! of a package. If the variable or own variable is declared in a --! separate (or parent) package, the package must be included in an inherit clause --! and the identifier prefixed with the package name when 145 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); --! The identifier should be a typemark. If the typemark is declared in a --! separate (or parent) package, the package must be included in an inherit clause --! and the identifier prefixed with the package name. --! Ensure that there are no errors in the declaration of the typemark when 148 => E_Strings.Append_String (E_Str => E_Str, Str => "The abstract proof type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may not be used to define an own variable in another package"); --! Own variables may be "type announced" as being of an abstract proof --! type only where --! that type is declared later in the same package. Thus --# own State : --! T; is legal if --! --# type T is abstract; appears later in the package; however, --# own --! State : P.T; is --! illegal if T is an abstract proof type declared in remote package P. when 149 => E_Strings.Append_String (E_Str => E_Str, Str => "More than one own variable has been announced as being of type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " which may not therefore be declared as an abstract proof type"); --! Occurs when an own variable clause announces more than one own variable --! as being of --! a type XXX and XXX is later declared as being of an abstract proof type. --! Each abstract --! own variable must be of a unique type. when 150 => E_Strings.Append_String (E_Str => E_Str, Str => "Entire variable expected. The names of constants never appear in mandatory annotations"); --! Issued when a the name of a constant is found in a mandatory annotation such as --! a global or derives annotation. Constants should not appear in such annotations. when 151 => E_Strings.Append_String (E_Str => E_Str, Str => "The variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not occur either in the package own variable list or as a refinement constituent"); --! A variable declared in a package must have been previously announced --! as either an own --! variable or as a concrete refinement constituent of an own variable. when 152 => E_Strings.Append_String (E_Str => E_Str, Str => "The number of formal parameters is not consistent with the previous declaration of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 153 => E_Strings.Append_String (E_Str => E_Str, Str => "The declaration of formal parameter "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not consistent with the subprogram's previous declaration"); --! Issued if the name, type or parameter mode of a parameter is different --! in the --! subprogram body declaration from that declared originally. when 154 => E_Strings.Append_String (E_Str => E_Str, Str => "The subprogram or task body "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have an annotation"); --! A subprogram or task body must have a global annotation if it references --! global variables; a --! procedure or task body must have a dependency relation to perform --! information flow analysis. when 155 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected annotation - all annotations required for procedure or task body "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " have already occurred"); --! Do not repeat global or derives annotations in the body --! (or body stub) of a subprogram, entry or task except for --! state (own variable) refinement. when 156 => E_Strings.Append_String (E_Str => E_Str, Str => "Entire variable expected"); --! Issued when an identifier which SPARK requires to be an entire --! variable represents --! something other than this. Most commonly this message occurs when --! a component of a --! structured variable appears in a core annotation. when 157 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already appears in the global variable list"); when 158 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a formal parameter of this subprogram"); --! Issued in a global annotation if it names a formal parameter of the --! subprogram. when 159 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already appeared as an exported variable"); when 160 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already appears in the list of imported variables"); when 161 => E_Strings.Append_String (E_Str => E_Str, Str => "Exportation of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is incompatible with its parameter mode"); --! Issued if a parameter appears as an export to a procedure when it is of --! parameter mode --! in. when 162 => E_Strings.Append_String (E_Str => E_Str, Str => "Importation of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is incompatible with its parameter mode"); --! Issued if a parameter appears as an import to a procedure when it is of --! parameter mode --! out. when 163 => E_Strings.Append_String (E_Str => E_Str, Str => "Subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " cannot be called from here"); --! SPARK contains rules to prevent construction of programs containing --! recursive --! subprogram calls; this error message occurs if a procedure or function --! is called before --! its body has been declared. Re-ordering of subprogram bodies in the --! package concerned will --! be required. when 165 => E_Strings.Append_String (E_Str => E_Str, Str => "This parameter is overlapped by another one, which is exported"); --! Violation of the anti-aliasing rule. when 166 => E_Strings.Append_String (E_Str => E_Str, Str => "This parameter is overlapped by an exported global variable"); --! Violation of the anti-aliasing rule. when 167 => E_Strings.Append_String (E_Str => E_Str, Str => "Imported variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not named in the initialization specification of its package"); --! Issued when an own variable which is imported into the main program --! procedure (or a task when the Ravenscar profile is enabled) has not --! been declared as being initialized by its package. At the main program --! level the only --! imports that are permitted are initialized own variables of inherited --! packages. There are --! two possible cases to consider: (1) the main program should be importing --! the variable in --! which case it should be annotated in its package with --# initializes --! (and, of course, actually --! initialized in some way) or be an external variable or protected variable --! which is implicitly --! initialized; or (2) the own variable concerned is not initialized at --! elaboration, --! should not therefore be considered an import to the main program and --! should be removed from the --! main program's import list. when 168 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a loop parameter, whose updating is not allowed"); when 169 => E_Strings.Append_String (E_Str => E_Str, Str => "Global variables of function subprograms must be of mode in"); --! It is an important property of SPARK that functions cannot have --! side-effects, therefore --! only the reading of global variable is permitted. It is usually --! convenient to omit --! modes from function global annotations but use of mode 'in' is --! permitted. when 170 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a formal parameter of mode in, whose updating is not allowed"); when 171 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a formal parameter of mode out, whose value cannot be read"); when 172 => E_Strings.Append_String (E_Str => E_Str, Str => "The actual parameter associated with an exported formal parameter must be an entire variable"); --! Issued if an actual parameter which is an array element is associated --! with an exported --! formal parameter in a procedure call. Exported parameters must be either --! entire variables --! or a record field. when 173 => E_Strings.Append_String (E_Str => E_Str, Str => "This exported parameter is named in the global definition of the procedure"); --! Violation of the anti-aliasing rule. when 174 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not an own variable"); --! Occurs in initialization specifications if something other than a --! variable is listed as --! being initialized. when 175 => E_Strings.Append_String (E_Str => E_Str, Str => """all"" can only be used in a justification when using a code generator profile."); --! A justification of an error requires the actual variables named in --! the error message to be referenced. The keyword "all" can only --! be used with language profiles for auto-code generators such as SCADE KCG. --! Such profiles are only available with the SPARK Pro Toolset. when 176 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have a derives annotation so it may not be called from "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " which does have a derives annotation"); --! When analysing with flow=auto, a procedure or entry without a derives annotation --! may not be called by a procedure, task or entry with a derives annotation. This is because --! the body of the caller must be checked against its derives annotation. --! In order to calculate the correct dependency relation for the body of the caller --! there must be derives annotations present on all called procedures or entries. when 180 => E_Strings.Append_String (E_Str => E_Str, Str => "Entire composite constant expected"); --! Issued when an identifier which SPARK requires to be an entire --! composite constant represents something other than this. when 181 => E_Strings.Append_String (E_Str => E_Str, Str => "Invalid policy for constant proof rule generation"); when 182 => E_Strings.Append_String (E_Str => E_Str, Str => "Rule Policy for "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already declared in current scope"); --! Issued when a rule policy has already been declared for this --! constant within this declarative region. This rule policy will --! be ineffective. when 190 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already appears in the inherit clause"); when 191 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already appears in the with clause"); when 200 => E_Strings.Append_String (E_Str => E_Str, Str => "The parameter "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is neither imported nor exported"); --! Each formal parameter of a subprogram shall be imported or exported or both. when 201 => E_Strings.Append_String (E_Str => E_Str, Str => "The global variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is neither imported nor exported"); --! Every variable in a global definition must also appear in the associated derives annotation --! where it will be either imported or exported or both. when 250 => E_Strings.Append_String (E_Str => E_Str, Str => "The 'Size value for type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been set"); when 251 => E_Strings.Append_String (E_Str => E_Str, Str => "The attribute value for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "'Size must be of an integer type"); when 252 => E_Strings.Append_String (E_Str => E_Str, Str => "The attribute value for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "'Size must be a static simple expression"); --! The value of 'Size must be static and must be of --! an integer type when 253 => E_Strings.Append_String (E_Str => E_Str, Str => "The attribute value for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "'Size must not be negative"); --! The value of 'Size must be a positive integer or zero when 254 => E_Strings.Append_String (E_Str => E_Str, Str => "The Size attribute can only be specified for a first subtype"); --! Setting 'Size for a user-defined non-first subtype is not permitted. --! See Ada95 LRM 13.3(48) when 255 => E_Strings.Append_String (E_Str => E_Str, Str => "The Address attribute can only be specified for a variable, a constant, or a program unit"); --! Ada95 LRM Annex N.31 defines a program unit to be either a package, a task unit, --! a protected unit, a protected entry, a generic unit, or an explicitly --! declared subprogram other than an enumeration literal. when 273 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may not be refined because it was declared with a " & "type mark which has not subsequently been declared as " & "an abstract proof type"); --! Where a type mark is included in an own variable declaration it --! indicates that the own --! variable will either be of a concrete type of that name (which may --! be either already --! declared or be declared later in the package) or of an abstract proof --! type declared in --! the package specification. In the former case the refinement is --! illegal because own --! variables of concrete Ada types may not be refined. In the latter case --! it is legal; --! however, no suitable proof type declaration has been found in this case. when 300 => E_Strings.Append_String (E_Str => E_Str, Str => "Renaming declarations are not allowed here"); --! A renaming declaration must be the first declarative item of a package --! body or main --! program or it must be placed immediately after the declaration of --! an embedded package. when 301 => E_Strings.Append_String (E_Str => E_Str, Str => "Renaming or use type declarations here can only rename subprograms in package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! A renaming declaration may be placed immediately after the declaration --! of an embedded --! package; in this case it may only rename subprograms declared in that --! package. when 302 => E_Strings.Append_String (E_Str => E_Str, Str => "The subprogram specification in this renaming declaration " & "is not consistent with the declaration of subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued in a subprogram renaming declaration if it contains parameter --! names, numbers or --! types which differ from those originally declared. when 303 => E_Strings.Append_String (E_Str => E_Str, Str => "An operator can only be renamed by the same operator"); --! Issued if a renaming declaration has a different operator on each --! side of the reserved --! word RENAMES. when 304 => E_Strings.Append_String (E_Str => E_Str, Str => "A renaming declaration for operator "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not allowed"); when 305 => E_Strings.Append_String (E_Str => E_Str, Str => "The specification in this renaming declaration is not " & "consistent with the implicit declaration of operator "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued in an operator renaming declaration if it contains types --! which differ from those --! applicable to the operator being renamed. when 306 => E_Strings.Append_String (E_Str => E_Str, Str => "Operator "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is already visible"); --! Occurs in an operator renaming declaration if an attempt is made --! to rename an operator --! which is already visible. (The message will also appear as a secondary --! consequence of --! trying to rename an operator between undeclared types.) when 307 => E_Strings.Append_String (E_Str => E_Str, Str => "The implicit declaration of this operator does not occur in package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); when 308 => E_Strings.Append_String (E_Str => E_Str, Str => "Type is limited"); --! Issued if an attempt is made to assign a variable of a type which is --! limited or which --! contains a limited type. when 309 => E_Strings.Append_String (E_Str => E_Str, Str => "Operator not visible for these types"); --! This message means that the operator exists between the types on each --! side of it but --! that it is not visible. The most likely cause is that the types --! concerned are defined in --! another package and that renaming is required to make the operator visible. when 310 => E_Strings.Append_String (E_Str => E_Str, Str => "The % operator may only appear in an assert or check statement in a for loop"); --! The % operator is used to indicate the value of a variable on entry to a for loop. This is because --! the variable may be used in the exit expression of the loop and may also be modified in the body --! of the loop. Since the semantics of Ada require the exit expression to be fixed after evaluation --! we require a way of reasoning about the original value of a variable prior to any alteration in --! the loop body. No other situation requires this value so % may not be used anywhere else. when 311 => E_Strings.Append_String (E_Str => E_Str, Str => "Announced own variable types may not be implemented as unconstrained arrays"); --! Where an own variable is announced as being of some type, --! SPARK requires that type --! to be declared; the declaration cannot be in the form of an --! unconstrained array because --! SPARK prohibits unconstrained variables. when 312 => E_Strings.Append_String (E_Str => E_Str, Str => "A subprogram can only be renamed to the same name with the package prefix removed"); when 313 => E_Strings.Append_String (E_Str => E_Str, Str => "Only one main program is permitted"); when 314 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has been refined and may not appear here"); --! Issued if an attempt is made to use, in a second annotation, an own --! variable which has --! been refined. Second annotations should use the appropriate refinement --! constituents of the --! own variable. when 315 => E_Strings.Append_String (E_Str => E_Str, Str => "Unsupported proof context"); --! Certain proof contexts have been included in the syntax of SPARK but --! are not yet --! supported; this error message results if one is found. when 316 => E_Strings.Append_String (E_Str => E_Str, Str => "Selected components are not allowed for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " since type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is private here"); --! If a type is private, then record field selectors may not be used. --! In pre- and post-conditions, a proof function can be declared to --! yield the required attribute of a private type. when 317 => E_Strings.Append_String (E_Str => E_Str, Str => "Tilde, in a function return annotation, may only be applied to an external variable of mode IN"); --! The tilde decoration indicates the initial value of a variable or --! parameter which is both imported and exported. A function may not have an explicit side effect --! on a program variable and so cannot be regarded as exporting such a variable. For modelling purposes --! a read of an external (stream) variable is regarded as having a side effect (outside the SPARK --! boundary). Since it may be necessary to refer to the initial value of the external variable, before --! this implicit side effect occurs, the use of tilde is allowed only for external variables of mode IN which --! are globally referenced by function. when 318 => E_Strings.Append_String (E_Str => E_Str, Str => "Tilde or Percent may only be applied to variables"); --! The tilde decoration indicates the initial value of a variable or --! parameter which is --! both imported and exported. Percent indicates the value of a variable --! on entry to a for loop; this message occurs if either operator is applied --! to any other object. when 319 => E_Strings.Append_String (E_Str => E_Str, Str => "Tilde may only be applied to a variable which is both imported and exported"); --! The tilde decoration indicates the initial value of a variable or --! parameter which is --! both imported and exported; this message occurs if the variable --! concerned is either --! exported only or imported only in which case no distinction between --! its initial and final --! value is required. when 320 => E_Strings.Append_String (E_Str => E_Str, Str => "Tilde or Percent may only be applied to an entire variable"); --! Tilde (and %) may not be applied to an element of an array or field of a record. --! e.g. to --! indicate the initial value of the Ith element of array V use V~(I) --! not V(I)~. when 321 => E_Strings.Append_String (E_Str => E_Str, Str => "Tilde may not appear in pre-conditions"); --! Since it does not make sense to refer to anything other than the --! initial value of a --! variable in a pre-condition there is no need to use tilde to distinguish --! initial from --! final values. when 322 => E_Strings.Append_String (E_Str => E_Str, Str => "Only imports may be referenced in pre-conditions or return expressions"); --! Pre-conditions are concerned with the initial values of information --! carried into a --! subprogram. Since only imports can do this only imports can appear in --! pre-condition --! expressions. when 323 => E_Strings.Append_String (E_Str => E_Str, Str => "Updates may only be applied to records or arrays"); --! The extended SPARK update syntax is only used to express changes to --! components of a --! structured variable. when 324 => E_Strings.Append_String (E_Str => E_Str, Str => "Only one field name may appear here"); --! When using the extended SPARK update syntax for a record, you can not --! update more than --! one element in each clause of the update. For example, you cannot use --! [x,y => z], you --! must instead use [x => z; y => z]. when 325 => E_Strings.Append_String (E_Str => E_Str, Str => "Type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has not been declared"); --! Occurs if a type is "announced" as part of an own variable --! clause and the end --! of the package is reached without an Ada declaration for a type of --! this name being found. when 326 => E_Strings.Append_String (E_Str => E_Str, Str => "Predicate is not boolean"); --! Occurs anywhere where a proof context is found not to be a boolean --! expression. when 327 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a global variable which may not be updated in a function subprogram"); when 328 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " (exported by called subprogram) may not be updated in a function subprogram"); --! Occurs if a function calls a procedure which exports a global variable; --! this would --! create an illegal side-effect of the function. when 329 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal function call"); --! Issued if a call is made to a user-defined subprogram in a package --! initialization part. when 330 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal use of an own variable not of this package"); --! Issued if an attempt is made, in a package initialization part, to --! update an own --! variable of a non-enclosing package. when 331 => E_Strings.Append_String (E_Str => E_Str, Str => "Private types may not be unconstrained arrays"); when 332 => E_Strings.Append_String (E_Str => E_Str, Str => "This private type was not declared as limited"); --! Issued where the type contains a component which is a limited private --! type, but where --! the declaration of this type in the visible part of the package does --! not specify that the --! type is limited. when 333 => E_Strings.Append_String (E_Str => E_Str, Str => "Initialization of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not announced in the initialization clause of this package"); --! Issued when an own variable is initialized either by assignment or --! by having a pragma Import attached to it when initialization of the variable --! is not announced in its package's own variable initialization specification. when 334 => E_Strings.Append_String (E_Str => E_Str, Str => "Identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not the name of a function"); when 335 => E_Strings.Append_String (E_Str => E_Str, Str => "This annotation should be placed with the declaration of function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued if a function is declared in a package specification without an --! annotation but --! one is then supplied on the function body. when 336 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected annotation - all annotations required for function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " have already occurred"); when 337 => E_Strings.Append_String (E_Str => E_Str, Str => "Package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may not be used as a prefix here"); --! Selected component notation may not be used in places where an item is --! directly --! visible. when 338 => E_Strings.Append_String (E_Str => E_Str, Str => "Scalar parameter "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode in out and must appear as an import"); --! Parameters passed as mode in out must be listed as imports in the --! subprogram's --! dependency relation if they are of scalar types. The rule also applies --! to a parameter of a --! private type if its full declaration is scalar. when 339 => E_Strings.Append_String (E_Str => E_Str, Str => "Subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " was not declared in package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); when 340 => E_Strings.Append_String (E_Str => E_Str, Str => "Only operators may be renamed in package specifications"); --! User-declared subprograms may not be renamed in package specifications --! although the --! implicitly declared function subprograms associated with operators may be. when 341 => E_Strings.Append_String (E_Str => E_Str, Str => "A range may not appear here"); --! Issued if a range is found where a single value is expected, for example, --! if an array --! slice is constructed. when 342 => E_Strings.Append_String (E_Str => E_Str, Str => "This proof annotation should be placed with the declaration of subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Like global and derives annotations, proof annotations should be placed --! on the first --! appearance of a subprogram. There may also be a requirement for a --! second proof annotation --! on a subprogram body where it references an abstract own variable. when 343 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected proof annotation - all annotations required for subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " have already occurred"); --! Issued if a second proof annotation for a subprogram is found but --! the subprogram does --! not reference any abstract own variables. A second annotation is --! only required where it --! is necessary to express both an abstract (external) and a refined --! (internal) view of an --! operation. when 399 => E_Strings.Append_String (E_Str => E_Str, Str => "Range error in annotation expression"); --! Issued if a proof annotation contains an expression that would cause --! a constraint error --! if it were in an executable Ada statement. For example: "--# post X = --! T'Succ(T'Last);" --! VCs generated from such malformed predicates would always be unprovable. when 400 => E_Strings.Append_String (E_Str => E_Str, Str => "Expression contains division by zero"); --! Issued when a static expression, evaluated using perfect arithmetic, --! is found to --! contain a division by zero. when 401 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal numeric literal"); --! Issued when a numeric literal is illegal because it contains, for example, --! digits not --! compatible with its number base. when 402 => E_Strings.Append_String (E_Str => E_Str, Str => "Constraint_Error will be raised here"); --! Issued whenever a static expression would cause a constraint error. --! e.g. assigning a --! value to a constant outside the constant's type range. In SPARK a static --! expression may --! not yield a value which violates a range constraint. when 403 => E_Strings.Append_String (E_Str => E_Str, Str => "Argument value is inconsistent with the number of dimensions of array type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued when an array attribute containing an argument is found and the --! value of the --! argument is inconsistent with the number of dimensions of the array --! type to which it is --! being applied. when 407 => E_Strings.Append_String (E_Str => E_Str, Str => "This choice overlaps a previous one"); --! Choices in case statements and array aggregates may not overlap. when 408 => E_Strings.Append_String (E_Str => E_Str, Str => "Case statement is incomplete"); --! A case statement must either explicitly supply choices to cover the --! whole range of the --! (sub)type of the controlling expression, or it must supply an others choice. when 409 => E_Strings.Append_String (E_Str => E_Str, Str => "Empty range specified"); --! In SPARK, no static range is permitted to be null. when 410 => E_Strings.Append_String (E_Str => E_Str, Str => "Choice out of range"); --! The choices in case statements and array aggregates must be within --! the constraints of --! the appropriate (sub)type. when 411 => E_Strings.Append_String (E_Str => E_Str, Str => "Others clause required"); --! Issued where an others clause is required to satisfy the Ada language rules. when 412 => E_Strings.Append_String (E_Str => E_Str, Str => "Explicit boolean range not permitted"); when 413 => E_Strings.Append_String (E_Str => E_Str, Str => "Invalid range constraint"); --! Issued where a range constraint is outside the range of the (sub)type to --! which the --! constraint applies. when 414 => E_Strings.Append_String (E_Str => E_Str, Str => "Array aggregate is incomplete"); --! An array aggregate must either explicitly supply values for all array --! elements or --! provide an others clause. when 415 => E_Strings.Append_String (E_Str => E_Str, Str => "Too many entries in array aggregate"); --! Issued where an array aggregate using positional association contains --! more entries than --! required by the array index type. when 416 => E_Strings.Append_String (E_Str => E_Str, Str => "Type may not have an empty range"); when 417 => E_Strings.Append_String (E_Str => E_Str, Str => "String subtypes must have a lower index bound of 1"); when 418 => E_Strings.Append_String (E_Str => E_Str, Str => "Index upper and/or lower bounds do not match those expected"); --! Issued where assignment, association or type conversion is attempted --! between two --! different constrained subtypes of the same unconstrained array type, and --! where the index --! bounds do not match. when 419 => Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has been renamed locally, so the prefix "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must not be used"); --! When an entity is renamed, the fully qualified name is no longer visible, --! and so must not be used. when 420 => E_Strings.Append_String (E_Str => E_Str, Str => "Array index(es) not convertible"); --! Issued when an attempt is made to convert between two arrays whose indexes --! are neither --! of the same type nor numeric. when 421 => E_Strings.Append_String (E_Str => E_Str, Str => "Array components are not of the expected type"); --! Issued when a type conversion attempts to convert between two array types --! whose --! components are of different types. when 422 => E_Strings.Append_String (E_Str => E_Str, Str => "Array component constraints do not match those expected"); --! Issued when a type conversion attempts to convert between two array types --! whose --! components are of the same type but do not have constraints which can be --! statically --! determined to be identical. when 423 => E_Strings.Append_String (E_Str => E_Str, Str => "Array has different number of dimensions from that expected"); --! Issued when attempting to convert between two array types which have --! different numbers --! of dimensions. when 424 => E_Strings.Append_String (E_Str => E_Str, Str => "Attributes are not permitted in a String concatenation expression"); --! Character attributes such as 'Val, 'Pos, 'Succ and 'Pred are not --! permitted below a concatentation operator in a String expression. when 425 => E_Strings.Append_String (E_Str => E_Str, Str => "String literals may not be converted"); --! Issued if the argument of a type conversion is a string literal. A common --! cause is an --! attempt to type qualify a string and accidentally omitting the tick character. when 500 => E_Strings.Append_String (E_Str => E_Str, Str => "Mode expected"); --! Issued when performing data flow analysis only where a subprogram has no --! dependency --! clause and its global variables have not been given modes in the global --! annotation. when 501 => E_Strings.Append_String (E_Str => E_Str, Str => "Dependency relation expected"); --! A dependency relation is required for each procedure if information flow --! analysis is to --! be performed. when 502 => E_Strings.Append_String (E_Str => E_Str, Str => "Exportation of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is incompatible with its global mode"); --! Issued when a procedure has both a global annotation with modes and a --! dependency --! relation, and a global of mode in is listed as an export in the dependency --! relation. when 503 => E_Strings.Append_String (E_Str => E_Str, Str => "Importation of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is incompatible with its global mode"); --! Issued when a procedure has both a global annotation with modes and a --! dependency --! relation, and a global of mode out is listed as an import in the --! dependency relation. when 504 => E_Strings.Append_String (E_Str => E_Str, Str => "Parameter "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode in out and must appear as an import"); when 505 => E_Strings.Append_String (E_Str => E_Str, Str => "Global variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode in out and must appear as an import"); --! Issued where a procedure has both a global annotation with modes and --! a dependency --! relation, and a global variable of mode in out is not listed as an --! import in the --! dependency relation. when 506 => E_Strings.Append_String (E_Str => E_Str, Str => "Parameter "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode in out and must appear as an export"); when 507 => E_Strings.Append_String (E_Str => E_Str, Str => "Global variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode in out and must appear as an export"); --! Issued where a procedure has both a global annotation with modes --! and a dependency --! relation, and a global variable of mode in out is not listed as an --! export in the --! dependency relation. when 508 => E_Strings.Append_String (E_Str => E_Str, Str => "This global variable is a parameter of mode in and can only have the global mode in"); when 509 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected refined dependency relation"); --! When using refinement in automatic flow analysis mode, if there is a --! dependency relation on the subprogram specification then there must --! also be one on the body. Similarly, if there is no dependency relation --! on the specification then the body is not permitted to have one. when 550 => E_Strings.Append_String (E_Str => E_Str, Str => "use type clauses may only be used in SPARK95: clause ignored"); when 551 => E_Strings.Append_String (E_Str => E_Str, Str => "All operators for type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " are already visible"); when 552 => E_Strings.Append_String (E_Str => E_Str, Str => "The type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already appears in the use type clause"); when 554 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a limited private type for which no operators can be made visible"); when 555 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not mentioned in an earlier with clause of this compilation unit"); when 600 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma Import has a minimum of 2 and a maximum of 4 parameters"); when 601 => E_Strings.Append_String (E_Str => E_Str, Str => "Convention, Entity, External_Name or Link_Name expected"); when 602 => E_Strings.Append_String (E_Str => E_Str, Str => "An association for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been given"); when 603 => E_Strings.Append_String (E_Str => E_Str, Str => "No association for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " was given"); when 604 => E_Strings.Append_String (E_Str => E_Str, Str => "This package may not have a body - consider use of pragma Elaborate_Body"); --! In Ada 95, a package body is illegal unless it is required for the --! purpose of providing --! a subprogram body, or unless this pragma is used. This error is --! issued where a package --! body is found for a package whose specification does not require a body. when 605 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma Elaborate_Body has one parameter"); when 606 => E_Strings.Append_String (E_Str => E_Str, Str => "This expression does not represent the expected package name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued when the parameter to a pragma Elaborate_Body is invalid. when 607 => E_Strings.Append_String (E_Str => E_Str, Str => "This package requires a body and must therefore" & " include either pragma Elaborate_Body or a subprogram " & "declaration"); --! Issued where a package specification contains no subprogram declarations, --! but whose own --! variables (as specified in the package annotation) are not all declared --! (and initialized --! where appropriate) in the package specification. This is because such a --! package is not --! allowed a body in Ada 95 unless either the pragma is given or a --! subprogram declared. when 608 => E_Strings.Append_String (E_Str => E_Str, Str => "Reduced accuracy subtypes of real numbers are considered obsolescent and are not supported by SPARK"); when 609 => E_Strings.Append_String (E_Str => E_Str, Str => "This entity cannot be assigned to"); when 610 => E_Strings.Append_String (E_Str => E_Str, Str => "Child packages may not be used in SPARK83"); when 611 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal use of deferred constant prior to its full declaration"); when 613 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal name for body stub"); --! Issued if a dotted name appears in a body stub as in "package body --! P.Q is --! separate". No legal stub could ever have such a name. when 614 => E_Strings.Append_String (E_Str => E_Str, Str => "Child packages may be declared only at library level"); --! Issued if an attempt is made to declare a child package which is --! embedded in a package --! or subprogram. when 615 => E_Strings.Append_String (E_Str => E_Str, Str => "Name does not match name of package"); --! Issued if the closing identifier of a package has a different number of --! identifiers --! from the name originally given for the package. For example "package --! P.Q is ... end --! P.Q.R;" when 616 => E_Strings.Append_String (E_Str => E_Str, Str => "The private package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not visible at this point"); --! Issued if an attempt is made to with or inherit a private package --! from the visible part --! of a public package. when 617 => E_Strings.Append_String (E_Str => E_Str, Str => "Public sibling "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not visible at this point"); --! Arises from attempting to inherit a public sibling child package --! from a private child --! package. when 618 => E_Strings.Append_String (E_Str => E_Str, Str => "The owner of the current package does not inherit the package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! A private descendent (although it may be a public package) can only --! inherit a remote package if its parent also inherits --! it; this is a analogous to the behaviour of embedded packages which --! may also only inherit --! a remote package if their enclosing package also does so. when 619 => E_Strings.Append_String (E_Str => E_Str, Str => "The package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not owned by the current package"); --! This message indicates an attempt to claim that own variables of a --! package other than a --! private child package of the current package are refinement constituents --! of an abstract --! own variable of the current package. when 620 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variables here must be refinement constituents in package owner "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Own variables of private child packages must appear as refinement --! constituents of the --! package which owns the child. If the Examiner has seen the owner --! package body before --! processing the child and has not found the required refinement --! constituent then this --! message results on processing the child. when 621 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " expected as a refinement constituent in this package"); --! Own variables of private child packages must appear as refinement --! constituents of the --! package which owns the child. If the Examiner has seen a child package --! which declares an --! own variable before examining its owner’s body then this message --! is issued if the --! owner lacks the required refinement constituent declaration. when 622 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " did not occur in an initialization specification"); --! Issued if an own variable appears in an initialization clause and is --! also a refinement --! constituent of an own variable which is not marked as initialized. when 623 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " occurred in an initialization specification"); --! Issued if an own variable does not appear in an initialization clause --! and is also a --! refinement constituent of an own variable that is marked as initialized. when 624 => E_Strings.Append_String (E_Str => E_Str, Str => "All operators from ancestor package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " are already visible"); --! A package must appear in a with clause before types declared in --! it can be specified in a use type clause. -- nice gap here for generics when 626 => E_Strings.Append_String (E_Str => E_Str, Str => "Global/derives/declare on generic subprogram instantiation is not allowed"); when 628 => E_Strings.Append_String (E_Str => E_Str, Str => "Formal parameter of the instantiation of subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not allowed"); when 629 => E_Strings.Append_String (E_Str => E_Str, Str => "The generic subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has no generic formal parameters"); when 630 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not the name of generic subprogram"); --! Only generic subprogram can be instantiated. when 631 => E_Strings.Append_String (E_Str => E_Str, Str => "Generic function found where a generic procedure was expected"); --! Subprogram kind of generic and its instantiation must match when 632 => E_Strings.Append_String (E_Str => E_Str, Str => "Generic procedure found where a generic function was expected"); --! Subprogram kind of generic and its instantiation must match when 635 => E_Strings.Append_String (E_Str => E_Str, Str => "Incorrect number of generic actual parameters for instantiation of generic unit "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The number of generic formal and actual parameters must match exactly when 636 => E_Strings.Append_String (E_Str => E_Str, Str => "Type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not compatible with generic formal parameter "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); --! See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type --! which is of a compatible class. Note that SPARK does not have default values for such --! associations when 637 => E_Strings.Append_String (E_Str => E_Str, Str => "User-defined generic units are not permitted in SPARK 83"); --! There are weaknesses in the generic type model of Ada 83 that prevent the implementation --! of a safe subset of generics in SPARK 83. These deficiencies are overcome in Ada 95. --! SPARK 83 users may employ the predefined unit Unchecked_Conversion only. when 638 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected global annotation. A generic subprogram may not reference or update global variables"); --! A standalone generic subprogram may not have a global annotation. Note that a subprogram in a --! generic package may have a global annotation as long as it only refers to own variables that --! are local to the package. when 639 => E_Strings.Append_String (E_Str => E_Str, Str => "A generic formal object may only have default mode or mode in"); --! SPARK restricts formal objects to being constants in order to avoid concealed information --! flows. when 640 => E_Strings.Append_String (E_Str => E_Str, Str => "A generic formal object may only be instantiated with a constant expression"); --! SPARK restricts formal objects to being constants in order to avoid concealed information --! flows. when 641 => E_Strings.Append_String (E_Str => E_Str, Str => "There is no generic subprogram declaration named "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " so a generic body of that name cannot be declared here"); --! A generic body must be preceded by a generic declaration of the same name. when 645 => E_Strings.Append_String (E_Str => E_Str, Str => "Actual array element "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not compatible with the element type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " of the generic formal parameter"); --! See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type --! which is of a compatible class. Note that SPARK does not have default values for such --! associations when 646 => E_Strings.Append_String (E_Str => E_Str, Str => "Actual array index "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not compatible with the index type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " of the generic formal parameter"); --! See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type --! which is of a compatible class. Note that SPARK does not have default values for such --! associations when 647 => E_Strings.Append_String (E_Str => E_Str, Str => "Actual array "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has more dimensions than formal array "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); --! See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type --! which is of a compatible class. Note that SPARK does not have default values for such --! associations when 648 => E_Strings.Append_String (E_Str => E_Str, Str => "Actual array "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has fewer dimensions than formal array "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); --! See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type --! which is of a compatible class. Note that SPARK does not have default values for such --! associations when 649 => E_Strings.Append_String (E_Str => E_Str, Str => "Actual array "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is constrained but the associated formal "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is unconstrained"); --! See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type --! which is of a compatible class. Note that SPARK does not have default values for such --! associations when 650 => E_Strings.Append_String (E_Str => E_Str, Str => "Actual array "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is unconstrained but the associated formal "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is constrained"); --! See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type --! which is of a compatible class. Note that SPARK does not have default values for such --! associations when 651 => E_Strings.Append_String (E_Str => E_Str, Str => "Variables of generic types may not be initialized at declaration"); --! In non-generic code we statically know the value being assigned to the variable and can --! check that it is in range. In the case of a generic we cannot do this because we do not --! know the bounds of the variable's type. The variable may, however, be assigned to in the sequence --! of statements in the generic body because generation of run-time checks will provide --! suitable protection from out-of-range values. when 652 => E_Strings.Append_String (E_Str => E_Str, Str => "Subtypes of generic types are not permitted"); --! In non-generic code we statically know the values being used as the range bounds for --! a subtype and can check that they are in range. In the case of a generic we cannot --! do this because we do not know the bounds of the variable's type. when 653 => E_Strings.Append_String (E_Str => E_Str, Str => "Constants of generic types are not permitted"); --! In non-generic code we statically know the value being assigned to the constant and can --! check that it is in range. In the case of a generic we cannot do this because we do not --! know the bounds of the constant's type. A variable, assigned to in the sequence --! of statements in the generic body, may be a suitable substitute for such a constant. when 654 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a generic subprogram which must be instantiated before it can be called"); --! Generic units provide a template for creating callable units and are not directly --! callable. when 655 => E_Strings.Append_String (E_Str => E_Str, Str => "Invalid prefix, "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a generic package"); --! Components of generic packages cannot be accessed directly. First instantiate the package --! and then access components of the instantiation. when 656 => E_Strings.Append_String (E_Str => E_Str, Str => "The only currently supported attribute in this context is 'Always_Valid"); when 657 => E_Strings.Append_String (E_Str => E_Str, Str => "A 'Always_Valid assertion requires a variable here"); --! The 'Always_Valid assertion can only be applied to variables or --! to components of record variables. when 658 => E_Strings.Append_String (E_Str => E_Str, Str => "The object in this assertion must be scalar or a non-tagged aggregation of scalar components"); --! The 'Always_Valid assertion can only be applied to objects which are: --! (1) of a scalar type, --! (2) a one dimensional array of scalar components, --! (3) an entire record variable of a non-tagged type with all --! components that are either scalar or an array of scalar components, --! (4) an array variable whose components are records satisfying (3). --! Additionally a field of a record satisfying these constraints may --! be marked individually as always valid. when 659 => E_Strings.Append_String (E_Str => E_Str, Str => "A 'Always_Valid assertion must be in the same declarative " & "region as contains the declaration of the variable to " & "which it refers"); when 660 => E_Strings.Append_String (E_Str => E_Str, Str => "A 'Always_Valid assertion must not be applied to an object already marked as always valid"); when 662 => E_Strings.Append_String (E_Str => E_Str, Str => "Only Mode in own variables and constituents can be marked using 'Always_Valid"); --! The 'Always_Valid assertion can only be applied to variables which are --! own variables with the mode in, or to subcomponents of records --! which are mode in own variables. when 700 => E_Strings.Append_String (E_Str => E_Str, Str => "Mode 'in out' may not be applied to own variables or their refinement constituents"); --! Own variables may be given a mode to indicate that they are system level --! inputs --! or outputs (i.e. they obtain values from or pass values to the external --! environment). Since effective SPARK design strictly separates inputs from --! outputs the mode 'in out' is not permitted. when 701 => E_Strings.Append_String (E_Str => E_Str, Str => "The mode of this refinement constituent is not consistent with its subject: "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! If an abstract own variable is given a mode then its refinement --! constituents must --! all be of the same mode. when 702 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be given the mode 'in' to match its earlier announcement "); --! Issued if an own variable of an embedded package is not given the --! same mode as --! the earlier refinement constituent that announced it would exist. when 703 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be given the mode 'out' to match its earlier announcement "); --! Issued if an own variable of an embedded package is not given the same --! mode as --! the earlier refinement constituent that announced it would exist. when 704 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may not have a mode because one was not present in its earlier announcement "); --! Issued if an own variable of an embedded package is given a mode when --! the earlier refinement constituent that announced it would exist did not --! have one. when 705 => E_Strings.Append_String (E_Str => E_Str, Str => "Refinement constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be given the mode 'in' to match the child package own variable with which it is being associated"); --! If a refinement constituent is an own variable of a private package then the --! constituent must have the same mode as the own variable to which it refers. when 706 => E_Strings.Append_String (E_Str => E_Str, Str => "Refinement constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be given the mode 'out' to match the child package own variable with which it is being associated"); --! If a refinement constituent is an own variable of a private package then the --! constituent must have the same mode as the own variable to which it refers. when 707 => E_Strings.Append_String (E_Str => E_Str, Str => "Refinement constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may not have a mode because one was not present on the child package " & "own variable with which it is being associated"); --! If a refinement constituent is an own variable of a private package then the --! constituent can only be given a mode if the own variable to which it --! refers has one. when 708 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has a mode and may not appear in an initializes clause"); --! Mode own variables (stream variables) are implicitly initialized by the --! environment --! to which they are connected and may not appear in initializes clauses --! since this --! would require their explicit initialization. when 709 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable or constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has mode 'out' and may not be referenced by a function"); --! Functions are permitted to reference own variables that are either unmoded --! or of --! mode 'in'. Since mode 'out' own variables represent outputs to the --! environment, --! reading them in a function does not make sense and is not allowed. when 710 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable or constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode 'in' and can only have global mode 'in'"); --! Global modes, if given, must be consistent with the modes of own variables --! that appear --! in the global list. when 711 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable or constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode 'out' and can only have global mode 'out'"); --! Global modes, if given, must be consistent with the modes of own --! variables that appear --! in the global list. when 712 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable or constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of either mode 'in' or mode 'out' and may not have global mode 'in out'"); --! Global modes, if given, must be consistent with the modes of own variables --! that appear --! in the global list. when 713 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable or constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode 'in' and may not appear in a dependency clause as an export"); --! Own variables with mode 'in' denote system-level inputs; their exportation --! is --! not allowed. when 714 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable or constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode 'out' and may not appear in a dependency clause as an import"); --! Own variables with mode 'out' denote system-level outputs; their --! importation is --! not allowed. when 715 => E_Strings.Append_String (E_Str => E_Str, Str => "Function "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " references external (stream) variables and may only appear directly" & " in an assignment or return statement"); --! To avoid ordering effects, functions which globally access own --! variables which --! have modes (indicating that they are connected to the external --! environment) may --! only appear directly in assignment or return statements. --! They may not appear as --! actual parameters or in any other form of expression. when 716 => E_Strings.Append_String (E_Str => E_Str, Str => "External (stream) variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may only appear directly" & " in an assignment or return statement; or as an actual parameter to an unchecked conversion"); --! To avoid ordering effects, own variables which --! have modes (indicating that they are connected to the external --! environment) may --! only appear directly in assignment or return statements. They may not --! appear as --! actual parameters (other than to instantiations of Unchecked_Conversion) or in any other form of expression. when 717 => E_Strings.Append_String (E_Str => E_Str, Str => "External (stream) variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode 'in' and may not be assigned to"); --! Own variables with mode 'in' represent inputs to the system from the --! external --! environment. As such, assigning to them does not make sense and is not --! permitted. when 718 => E_Strings.Append_String (E_Str => E_Str, Str => "External (stream) variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is of mode 'out' and may not be referenced"); --! Own variables with mode 'out' represent outputs to the external --! environment from the system. As such, referencing them does not make sense --! and is not permitted. when 719 => E_Strings.Append_String (E_Str => E_Str, Str => "External (stream) variables may not be referenced or updated during package elaboration"); --! Own variables with modes represent inputs and outputs between the external --! environment and the system. Referencing or updating them during package --! elaboration would introduce ordering effects and is not permitted. when 720 => E_Strings.Append_String (E_Str => E_Str, Str => "Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is an external (stream) variable and may not be initialized at declaration"); --! Own variables with modes represent inputs and outputs between the external --! environment and the system. Referencing or updating them during package --! elaboration would introduce ordering effects and is not permitted. when 721 => E_Strings.Append_String (E_Str => E_Str, Str => "This refined function global annotation may not reference "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " because it is an external (stream) variable whose abstract subject "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have a mode"); --! Functions may be used to reference external (stream) variables and the --! Examiner --! generates the appropriate information flow to show that the value returned --! by --! the function is 'volatile'. If the abstract view of the same function --! shows it --! referencing an own variable which is not an external stream then the --! volatility --! of the function is concealed. The error can be removed either by making the --! abstract own variable a mode 'in' stream or by using a procedure instead --! of a --! function to read the refined stream variable. when 722 => E_Strings.Append_String (E_Str => E_Str, Str => "The mode on abstract global variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be made 'in out' to make it consistent with the referencing of" & " mode 'in' external (stream) constituent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " in the refined global annotation"); --! Where a procedure references an external (stream) variable of mode 'in' the --! Examiner constructs appropriate information flow to show that the input --! stream --! is 'volatile'. If the abstract view shows that the procedure obtains its --! result --! by simply reading an own variable which is not an external stream then the --! volatility is concealed. The error can be removed either by making the --! global --! mode of XXX 'in out' or making XXX an external (stream) variable of mode --! 'in'. when 723 => E_Strings.Append_String (E_Str => E_Str, Str => "Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must appear in this refined global annotation"); --! Issued when a global variable which is present in the first (abstract) --! global annotation --! is omitted from the second (refined) one. when 724 => E_Strings.Append_String (E_Str => E_Str, Str => "Exit label must match the label of the most closely enclosing loop statement"); --! If an exit statement names a loop label, then the most closely enclosing --! loop statement must have a matching label when 725 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected function or variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may only appear directly in an assignment or return statement"); --! To avoid ordering effects, protected functions may --! only appear directly in assignment or return statements. They may not --! appear as --! actual parameters or in any other form of expression. Ordering effects --! occur --! because the global state referenced by the protected function may be updated --! by another process during expression evaluation. when 730 => E_Strings.Append_String (E_Str => E_Str, Str => "A loop with no iteration scheme or exit statements may only " & "appear as the last " & "statement in the outermost scope of the main subprogram (or a task " & "body when using the Ravenscar profile)"); --! If a loop has neither an iteration scheme nor any exit statements then it --! will --! run forever. Any statements following it will be unreachable. SPARK only --! allows --! one such loop which must be the last statement of the main program when 750 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); E_Strings.Append_String (E_Str => E_Str, Str => ". An array type may not be used as its own index type"); --! The type mark used for the index of an array type declaration must not --! be the same as the name of the array type being declared when 751 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); E_Strings.Append_String (E_Str => E_Str, Str => ". A record type may not include fields of its own type"); --! The type mark given for a field in a record type declaration must --! not be the same as the name of the record type being declared when 752 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); E_Strings.Append_String (E_Str => E_Str, Str => ". This identifier must appear in a preceding legal global annotation or formal parameter list"); --! For an identifier to appear legally as an import in a derives annotation, --! it must be a formal parameter or must appear legally in a --! preceding global annotation and must be of mode 'in' or mode 'in out' when 753 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); E_Strings.Append_String (E_Str => E_Str, Str => ". This identifier must appear in a preceding legal global annotation or formal parameter list"); --! For an identifier to appear legally as an export in a derives annotation, --! it must be a formal parameter or must appear legally in a --! preceding global annotation and must be of mode 'out' or mode 'in out' when 754 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); E_Strings.Append_String (E_Str => E_Str, Str => ". This package must be both inherited and withed to be visible here"); --! For a package name to be visible in Ada context, it must appear in --! both the inherit clause and the with clause of the enclosing package when 755 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); E_Strings.Append_String (E_Str => E_Str, Str => ". A parent of a child package must be inherited to be visible here"); --! A parent of a child package must be inherited (but not withed) --! to be visible in that child. when 756 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); if Err_Num.Name2 /= Error_Types.NoName then Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); E_Strings.Append_String (E_Str => E_Str, Str => ". The grandparent of a child package should not appear in this prefix."); --! A grandparent of a child package should not be included in prefixes --! referencing a declaration of the child package when 757 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifer "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is either undeclared or not visible at this point"); E_Strings.Append_String (E_Str => E_Str, Str => ". A record field name cannot be the same as its indicated type"); when 770 => E_Strings.Append_String (E_Str => E_Str, Str => "If Any_Priority is defined, Priority and Interrupt_Priority must also be defined"); --! If the type Any_Priority is defined in package System, then the subtypes --! Priority and --! Interrupt_Priority must also be defined; if support for tasking is not --! required, then --! the definition of Any_Priority may be removed when 771 => E_Strings.Append_String (E_Str => E_Str, Str => "The parent type of this subtype must be Any_Priority"); --! Ada 95 requires that both Priority and Interrupt_Priority be immediate --! subtypes --! of Any_Priority. when 772 => E_Strings.Append_String (E_Str => E_Str, Str => "The range of Priority must contain at least 30 values; LRM D.1(26)"); --! Ada 95 requires that the range of the subtype Priority include at least --! 30 values; --! this requirement is stated in the Ada 95 Language Reference Manual at --! D.1(26) when 773 => E_Strings.Append_String (E_Str => E_Str, Str => "Priority'First must equal Any_Priority'First; LRM D.1(10)"); --! Ada 95 requires that task priority types meet the following criteria, --! the second of which is relevant to this error: --!
  • subtype Any_Priority is Integer range implementation-defined; --!
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined; --!
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last when 774 => E_Strings.Append_String (E_Str => E_Str, Str => "Interrupt_Priority'First must equal Priority'Last + 1; LRM D.1(10)"); --! Ada 95 requires that task priority types meet the following criteria, --! the third of which is relevant to this error: --!
  • subtype Any_Priority is Integer range implementation-defined; --!
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined; --!
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last when 775 => E_Strings.Append_String (E_Str => E_Str, Str => "Interrupt_Priority'Last must equal Any_Priority'Last; LRM D.1(10)"); --! Ada 95 requires that task priority types meet the following criteria, --! the third of which is relevant to this error: --!
  • subtype Any_Priority is Integer range implementation-defined; --!
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined; --!
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last when 776 => E_Strings.Append_String (E_Str => E_Str, Str => "In SPARK95 mode, only packages Standard, System, Ada.Real_Time " & "and Ada.Interrupts may be specified in the config file"); --! In SPARK95 mode, the packages that may be specified in the target --! configuration file are: Standard, System, Ada.Real_Time and Ada.Interrupts. --! The latter two are ignored unless the Ravenscar profile is selected. when 777 => E_Strings.Append_String (E_Str => E_Str, Str => "In package System, Priority must be an immediate subtype of Integer"); --! Ada 95, and hence SPARK95, defines Priority as being an immediate --! subtype of Integer when 778 => E_Strings.Append_String (E_Str => E_Str, Str => "This identifier is not valid at this point in the target configuration file"); --! The specified identifier cannot be used here; it is most probably --! either not valid in the target configuration file at all, or might --! be valid in a different package, but not here. when 779 => E_Strings.Append_String (E_Str => E_Str, Str => "Definition of this package in the target configuration file is not allowed in SPARK83 mode"); --! In SPARK83 mode, only package Standard may be specified in the --! target configuration file. when 780 => E_Strings.Append_String (E_Str => E_Str, Str => "Type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be private"); --! This type may only be declared as private in the target configuration --! file. when 781 => E_Strings.Append_String (E_Str => E_Str, Str => "The lower bound of a signed integer type declaration must be greater than or equal to System.Min_Int"); --! This error can only be generated in SPARK95 mode when the configuration --! file specifies --! a value for System.Min_Int when 782 => E_Strings.Append_String (E_Str => E_Str, Str => "The upper bound of a signed integer type declaration must be less than or equal to System.Max_Int"); --! This error can only be generated in SPARK95 mode when the configuration --! file specifies --! a value for System.Max_Int when 783 => E_Strings.Append_String (E_Str => E_Str, Str => "Modulus must be less than or equal to System.Max_Binary_Modulus"); --! This error can only be generated in SPARK95 mode when the configuration --! file specifies --! a value for System.Max_Binary_Modulus when 784 => E_Strings.Append_String (E_Str => E_Str, Str => "System.Max_Binary_Modulus must be a positive power of 2"); when 785 => E_Strings.Append_String (E_Str => E_Str, Str => "The number of digits specified exceeds the value defined for System.Max_Digits"); --! The maximum decimal precision for a floating point type, where --! a range specification has not been included, is defined --! by System.Max_Digits when 786 => E_Strings.Append_String (E_Str => E_Str, Str => "The number of digits specified exceeds the value defined for System.Max_Base_Digits"); --! The maximum decimal precision for a floating point type, where --! a range specification has been included, is defined --! by System.Max_Base_Digits when 787 => E_Strings.Append_String (E_Str => E_Str, Str => "Digits value must be positive"); when 788 => E_Strings.Append_String (E_Str => E_Str, Str => "Delta value must be positive"); when 789 => E_Strings.Append_String (E_Str => E_Str, Str => "The only currently supported type attribute in this context is 'Base"); when 790 => E_Strings.Append_String (E_Str => E_Str, Str => "A base type assertion requires a type here"); when 791 => E_Strings.Append_String (E_Str => E_Str, Str => "The base type in this assertion must be a predefined type"); --! Predefined types are those defined either by the language, or in package --! Standard, using the configuration file mechanism when 792 => E_Strings.Append_String (E_Str => E_Str, Str => "The types in this assertion must both be either floating point or signed integer"); when 793 => E_Strings.Append_String (E_Str => E_Str, Str => "This base type must have a defined range in the configuration file"); --! If a predefined type is to be used in a base type assertion or in --! a derived type declaration, then it must appear in the configuration --! file and have a well-defined range. when 794 => E_Strings.Append_String (E_Str => E_Str, Str => "Range of subtype exceeds range of base type"); when 795 => E_Strings.Append_String (E_Str => E_Str, Str => "A base type assertion must be in the same declarative region as that of the full type definition"); when 796 => E_Strings.Append_String (E_Str => E_Str, Str => "This type already has a base type: either it already has a " & "base type assertion, or is explicitly derived, or is a predefined type"); --! A base type assertion can only be given exactly once. Explicitly derived --! scalar types and predefined types never need a base type assertion. when 797 => E_Strings.Append_String (E_Str => E_Str, Str => "The base type in a floating point base type assertion must have a defined accuracy"); when 798 => E_Strings.Append_String (E_Str => E_Str, Str => "The accuracy of the base type in a base type assertion must be at " & "least that of the type which is the subject of the assertion"); when 799 => E_Strings.Append_String (E_Str => E_Str, Str => "Only a simple type can be the subject of a base type assertion "); when 800 => E_Strings.Append_String (E_Str => E_Str, Str => "Modulus must be a positive power of 2"); --! In SPARK, modular types must have a modulus which is a positive --! power of 2 when 801 => E_Strings.Append_String (E_Str => E_Str, Str => "Modular types may only be used in SPARK95"); --! Ada83 (and hence SPARK83) does not include modular types when 803 => E_Strings.Append_String (E_Str => E_Str, Str => "Unary arithmetic operators are not permitted for modular types"); --! Unary arithmetic operators are of little value. The "abs" and "+" operators --! have no effect for modular types, and so are not required. The unary minus --! operator is a source of potential confusion, and so is not permitted in --! SPARK when 804 => E_Strings.Append_String (E_Str => E_Str, Str => "Universal expression may not be implicitly converted to a modular " & "type here. " & "Left hand operand requires qualification to type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! A universal expression cannot be used as the left hand operand of a binary --! operator if the right hand operand is of a modular type. Qualification of --! the --! left hand expression is required in this case. when 805 => E_Strings.Append_String (E_Str => E_Str, Str => "Universal expression may not be implicitly converted to a modular " & "type here. " & "Right hand operand requires qualification to type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! A universal expression cannot be used as the right hand operand of a binary --! operator if the left hand operand is of a modular type. Qualification of --! the --! right hand expression is required in this case. when 806 => E_Strings.Append_String (E_Str => E_Str, Str => "Universal expression may not be implicitly converted to a modular " & "type here. " & "Right hand operand requires qualification"); --! A universal expression cannot be used as operand of an unary "not" --! operator if no type can be determined from the context of the expression. --! Qualification of the operand is required in this case. when 814 => E_Strings.Append_String (E_Str => E_Str, Str => "Default_Bit_Order must be of type Bit_Order"); --! The only possible type for the constant System.Default_Bit_Order --! is System.Bit_Order when it appears in the configuration file when 815 => E_Strings.Append_String (E_Str => E_Str, Str => "The only allowed values of Default_Bit_Order are Low_Order_First and High_Order_First"); --! System.Bit_Order is implicity declared in package System when a configuration --! file is given. This is an enumeration type with only two literals --! Low_Order_First and High_Order_First when 820 => E_Strings.Append_String (E_Str => E_Str, Str => "Abstract types are not currently permitted in SPARK"); --! Only non-abstract tagged types are currently supported. It is hoped to --! lift this --! restriction in a future Examiner release. when 821 => E_Strings.Append_String (E_Str => E_Str, Str => "This type declaration must be a tagged record because it's private type is tagged"); --! If a type is declared as "tagged private" then its full declaration must --! be a tagged --! record. when 822 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not a tagged type; only tagged types may be extended"); --! In SPARK, "new" can only be used to declare a type extension; other --! derived types are not permitted. when 823 => E_Strings.Append_String (E_Str => E_Str, Str => "This type may not be extended in the same package in which it is declared"); --! SPARK only permits types from another library package to be extended. --! This rule prevents --! overloading of inherited operations. when 824 => E_Strings.Append_String (E_Str => E_Str, Str => "This package already extends a type from package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ". Only one type extension per package is permitted"); --! SPARK only permits one type extension per package. This rule prevents --! overloading of inherited operations. when 825 => E_Strings.Append_String (E_Str => E_Str, Str => "Type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " expected in order to complete earlier private extension"); --! Since SPARK only permits one type extension per package it follows that --! the declaration --! "new XXX with private" in a package visible part must be paired with "new --! XXX with record..." --! in its private part. The ancestor type XXX must be the same in both --! declarations. when 826 => E_Strings.Append_String (E_Str => E_Str, Str => "Type extension is not permitted in SPARK 83"); --! Type extension is an Ada 95 feature not included in Ada or SPARK 83. when 827 => E_Strings.Append_String (E_Str => E_Str, Str => "The actual parameter associated with a tagged formal parameter in an " & "inherited operation must be an object not an expression"); --! There are several reasons for this SPARK rule. Firstly, Ada requires --! tagged parameters to be --! passed by reference and so an object must exist at least implicitly. --! Secondly, in order to --! perform flow analysis of inherited subprogram calls, the Examiner needs --! identify what subset of the --! information available at the point of call --! is passed to and from the called subprogram. Since information can only --! flow through objects it --! follows that actual parameter must be an object. when 828 => E_Strings.Append_String (E_Str => E_Str, Str => "Tagged types and tagged type extensions may only be declared in library-level package specifications"); --! This SPARK rule facilitates the main uses of tagged types while greatly --! simplifying visibility rules. when 829 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal re-declaration: this subprogram shares the same name as the inheritable root operation "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " but does not override it"); --! To avoid overloading, SPARK prohibits more than one potentially visible --! subprogram having the --! same name. when 830 => E_Strings.Append_String (E_Str => E_Str, Str => "A private type may not be implemented as a tagged type or an extension of a tagged type"); --! This rule means that a private type can only be implemented as a tagged --! type if the private --! type itself is tagged. when 831 => E_Strings.Append_String (E_Str => E_Str, Str => "Extended tagged types may only be converted in the direction of their root type"); --! This is an Ada rule: type conversions simply omit unused fields of the --! extended type. It follows --! that conversions must be in the direction of the root type. when 832 => E_Strings.Append_String (E_Str => E_Str, Str => "Only tagged objects, not expressions, may be converted"); --! For flow analysis purposes the Examiner needs to know what subset of the --! information in --! the unconverted view is available in the converted view. Since --! information can only flow --! through objects it follows that only objects can be converted. when 833 => E_Strings.Append_String (E_Str => E_Str, Str => "Invalid record aggregate: type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has a private ancestor"); --! If an extended type has a private ancestor then an extension aggregate --! must be used rather --! than a normal aggregate. when 834 => E_Strings.Append_String (E_Str => E_Str, Str => "Null records are only permitted if they are tagged"); --! An empty record can have no use in a SPARK program others than as a --! root type from which --! other types can be derived and extended. For this reason, null records --! are only allowed --! if they are tagged. when 835 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not an extended tagged record type"); --! An extension aggregate is only appropriate if the record type it is --! defining is --! an extended record. A normal aggregate should be used for other record --! (and array) --! types. when 836 => E_Strings.Append_String (E_Str => E_Str, Str => "This expression does not represent a valid ancestor type of the aggregate "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The expression before the reserved word "with" must be of an ancestor type --! of the overall aggregate --! type. In SPARK, the ancestor expression may not be a subtype mark. when 837 => E_Strings.Append_String (E_Str => E_Str, Str => "Invalid record aggregate: there is a private ancestor between the " & "type of this expression " & "and the type of the aggregate "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The ancestor type can be an tagged type with a private extension; --! however, there must be no private --! extensions between the ancestor type and the type of the aggregate. when 838 => E_Strings.Append_String (E_Str => E_Str, Str => "Incomplete aggregate: null record cannot be used here because fields in "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " require values"); --! The aggregate form "with null record" can only be used if the type of --! the aggregate is a null record --! extension of the ancestor type. If any fields are added between the --! ancestor type and the aggregate type then --! values need to be supplied for them so "null record" is inappropriate. when 839 => E_Strings.Append_String (E_Str => E_Str, Str => "This package already contains a root tagged type or tagged" & " type extension. " & "Only one such declaration per package is permitted"); --! SPARK permits one root tagged type or one tagged type extension per --! package, but not both. This rule prevents --! the declaration of illegal operations with more than one controlling --! parameter. when 840 => E_Strings.Append_String (E_Str => E_Str, Str => "A tagged or extended type may not appear here. " & "SPARK does not permit the declaration of primitive functions " & "with controlling results"); --! A primitive function controlled by its return result would be almost --! unusable in SPARK because a data --! flow error would occur wherever it was used. when 841 => E_Strings.Append_String (E_Str => E_Str, Str => "The return type in the declaration of this function contained an error. " & "It is not possible to check the validity of this return type"); --! Issued when there is an error in the return type on a function's --! initial declaration. In this situation --! we cannot be sure what return type is expected in the function's body. --! It would be misleading to simply --! report a type mismatch since the types might match perfectly and both --! be wrong. Instead, the Examiner reports --! the above error and refuses to analyse the function body until its --! specification is corrected. when 842 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma Atomic_Components is not permitted in SPARK when the Ravenscar profile is selected"); when 843 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma Volatile_Components is not permitted in SPARK when the Ravenscar profile is selected"); when 844 => E_Strings.Append_String (E_Str => E_Str, Str => "Missing or contradictory overriding_indicator for operation "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ". This operation successfully overrides its parent operation"); --! In SPARK2005, an operation which successfully overrides a parent operation --! must be specified as Overriding. when 845 => E_Strings.Append_String (E_Str => E_Str, Str => "Subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not successfully override a parent operation"); --! In SPARK2005, an overriding operation must successfully override an --! operation inherited from the parent. when 850 => E_Strings.Append_String (E_Str => E_Str, Str => "This construct may only be used when the Ravenscar profile is selected"); --! Support for concurrent features of the SPARK language, including --! protected objects, --! tasking, etc. are only supported when the Ravenscar profile is selected. when 851 => E_Strings.Append_String (E_Str => E_Str, Str => "The parameter to pragma Atomic must be a simple_name"); --! The parameter to pragma Atomic must be a simple_name; and may not be --! passed using --! a named association when 852 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma Atomic may only appear in the same immediate scope as the type to which it applies"); --! This is an Ada rule (pragma Atomic takes a local name see LRM 13.1(1)). --! Note that this precludes the use of pragma Atomic on a predefined type when 853 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma Atomic may only apply to a scalar base type, or to a non-tagged " & "record type with exactly 1 field that is a predefined scalar type"); --! pragma Atomic may only be applied to base types that are scalar. (i.e. --! enumeration types, integer types, real types, modular types) or a non-tagged --! record type with a single field which is a predefined scalar type, such --! as Integer, Character or Boolean. As an additional special case, a --! record type with a single field of type System.Address is also allowed. when 854 => E_Strings.Append_String (E_Str => E_Str, Str => "pragma Atomic takes exactly one parameter"); when 855 => E_Strings.Append_String (E_Str => E_Str, Str => "The type of own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not consistent with its modifier"); --! An own variable with a task modifier must be of a task type. A task own --! variable must have the task modifier. An own variable with a protected --! modifier must be a protected object, suspension object or pragma --! atomic type. A protected or suspension object own variable must --! have the protected modifier. when 858 => E_Strings.Append_String (E_Str => E_Str, Str => "A variable that appears in a protects property list may not appear in a refinement clause"); --! A variable in a protects list is effectively protected and hence --! cannot be refined. when 859 => E_Strings.Append_String (E_Str => E_Str, Str => "A protected own variable may not appear in a refinement clause"); --! Protected state cannot be refined or be used as refinement constituents when 860 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " appears in a protects list and hence must appear in the initializes clause"); --! Protected state (including all refinement constituents) must be initialized. when 861 => E_Strings.Append_String (E_Str => E_Str, Str => "Both abstract own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " and refinement constitutent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must have an Integrity property"); --! If an abstract own variable has an Integrity property, then so must --! all its refinement constituents, and vice-versa. when 862 => E_Strings.Append_String (E_Str => E_Str, Str => "Both abstract own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " and refinement constitutent "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must have the same Integrity value"); --! If both an abstract own variable and a refinement constituent have --! Integrity properties specified, then the value of the Integrity --! must be the same. when 863 => E_Strings.Append_String (E_Str => E_Str, Str => "Own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is protected and may not appear in an initializes clause"); --! Protected own variables must always be initialized, and should not appear in --! initializes annotations. when 864 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected initialization specification - "); E_Strings.Append_String (E_Str => E_Str, Str => "all own variables of this package "); E_Strings.Append_String (E_Str => E_Str, Str => "are either implicitly initialized, or do not require initialization"); --! An own variable initialization clause and that of its refinement --! constituents must be --! consistent. when 865 => E_Strings.Append_String (E_Str => E_Str, Str => "Field "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is part of the ancestor part of this aggregate and does not require a value here"); --! An extension aggregate must supply values for all fields that are part of --! the overall aggregate --! type but not those which are part of the ancestor part. when 866 => E_Strings.Append_String (E_Str => E_Str, Str => "The expression in a delay_until statement must be of type Ada.Real_Time.Time"); --! When the Ravenscar Profile is selected, the delay until statement may be --! used. --! The argument of this statement must be of type Ada.Real_Time.Time. when 867 => E_Strings.Append_String (E_Str => E_Str, Str => "Subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " contains a delay statement but does not have a delay property"); --! Any subprogram that may call delay until must have a delay property in a --! declare annotation. Your subprogram is directly or indirectly making a --! call to delay until when 868 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected object "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may only be declared immediately within a library package"); --! This error message is issued if a type mark representing a protected type --! appears anywhere other than in --! a library level variable declaration or library-level own variable type --! announcement. when 869 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already contains an Entry declaration; only one Entry is permitted"); --! The Ravenscar profile prohibits a protected type from declaring more than --! one entry. when 870 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have any operations, at least one operation must be declared"); --! A protected type which provides no operations can never be used so SPARK --! requires the --! declaration of at least one. when 871 => E_Strings.Append_String (E_Str => E_Str, Str => "A type can only be explicitly derived from a predefined Integer or Floating Point type or" & " from a tagged record type"); when 872 => E_Strings.Append_String (E_Str => E_Str, Str => "Variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not protected; only protected items may be globally accessed by protected operations"); --! In order to avoid the possibility of shared data corruption, --! SPARK prohibits protected operations --! from accessing unprotected data items. when 873 => E_Strings.Append_String (E_Str => E_Str, Str => "This subprogram requires a global annotation which references the protected type name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! In order to statically-detect certain bounded errors defined by the --! Ravenscar profile, SPARK --! requires every visible operation of protected type to globally --! reference the abstract state of --! the type. when 874 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected state "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be initialized at declaration"); --! Because there is no guarantee that a concurrent thread that initializes a --! protected object will be executed before one that reads it, the only way --! we can be sure that a protected object is properly initialized is to do --! so at the point of declaration. You have either declared some protected --! state and not included an initialization or you have tried to initialize --! some protected state in package body elaboration. when 875 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected type expected; access discriminants may only refer to protected types in SPARK"); --! Access discriminants have been allowed in SPARK solely to allow devices --! made up of co-operating --! Ravenscar-compliant units to be constructed. For this reason only --! protected types may appear in --! access discriminants. when 876 => E_Strings.Append_String (E_Str => E_Str, Str => "This protected type or task declaration must include " & "either a pragma Priority or " & "pragma Interrupt_Priority"); --! To allow the static detection of certain bounded errors defined by the --! Ravenscar profile, SPARK requires --! an explicitly-set priority for each protected type, task type or object --! of those types. The System.Default_Priority --! may used explicitly provided package System has been defined in the --! configuration file. when 877 => E_Strings.Append_String (E_Str => E_Str, Str => "Priority values require an argument which is an expression of type integer"); when 878 => E_Strings.Append_String (E_Str => E_Str, Str => "This protected type declaration contains a pragma Attach_Handler " & "and must therefore also " & "include a pragma Interrupt_Priority"); --! To allow the static detection of certain bounded errors defined by the --! Ravenscar profile, SPARK requires --! an explicitly-set priority for each protected type or object. --! The System.Default_Priority may used explicitly --! provided package System has been defined in the configuration file. when 879 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected pragma "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ": this pragma may not appear here"); --! pragma Interrupt_Priority must be the first item in --! a protected type declaration or task type declaration; pragma Priority --! must be the first item in --! a protected type declaration, task type declaration or the main program. when 880 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma Priority or Interrupt_Priority expected here"); --! Issued when a pragma other than Priority or Interrupt_Priority appears as --! the first item in --! a protected type or task type declaration. when 881 => E_Strings.Append_String (E_Str => E_Str, Str => "The priority of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be in the range "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); --! See LRM D.1(17). when 882 => E_Strings.Append_String (E_Str => E_Str, Str => "Integrity property requires an argument which is an expression of type Natural"); when 883 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma Interrupt_Handler may not be used; SPARK does not support" & " the dynamic attachment of interrupt handlers [LRM C3.1(9)]"); --! Interrupt_Handler is of no use unless dynamic attachment of interrupt --! handlers is to be used. when 884 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma Attach_Handler is only permitted immediately after the " & "corresponding protected procedure declaration in a protected type " & "declaration"); --! Pragma Attach_Handler may only be used within a protected type --! declaration. Furthermore, it must --! immediately follow a protected procedure declaration with the same name --! as the first argument to the pragma. when 885 => E_Strings.Append_String (E_Str => E_Str, Str => "Pragma Attach_Handler may only be applied to a procedure with no parameters"); --! See LRM C.3.1(5). when 887 => E_Strings.Append_String (E_Str => E_Str, Str => "A discriminant may only appear alone, not in an expression"); --! Issued when a task or protected type priority is set using an expression --! involving a discriminant. The use --! of such an expression greatly complicates the static evaluation of the --! priority of task or protected subtypes --! thus preventing the static elimination of certain Ravenscar bounded errors. when 888 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected Delay, "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already has a Delay property"); --! A procedure may only have a maximum of one delay annotation. when 889 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must have the suspendable property"); --! The type used to declare this object must be a protected type with and --! entry or a suspension object type when 890 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already appears in the suspends list"); --! Items may not appear more than once in an a suspends list. when 891 => E_Strings.Append_String (E_Str => E_Str, Str => "Task type or protected type required"); --! Issued in a subtype declaration where the constraint is a discriminant --! constraint. Only task and protected types --! may take a discriminant constraint as part of a subtype declaration. when 892 => E_Strings.Append_String (E_Str => E_Str, Str => "Array type, task type or protected type required"); --! Issued in a subtype declaration where the constraint is a either a --! discriminant constraint or an index --! constraint (these two forms cannot always be distinguished --! syntactically). Only task and protected types --! may take a discriminant constraint and only array types may --! take an index constraint as part of a --! subtype declaration. when 893 => E_Strings.Append_String (E_Str => E_Str, Str => "Number of discriminant constraints differs from number of known discriminants of type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued in a subtype declaration if too many or two few discriminant --! constraints are supplied. when 894 => E_Strings.Append_String (E_Str => E_Str, Str => "Only variables of a protected type may be aliased"); --! SPARK supports the keyword aliased in variable declarations only so --! that protected and task types can support --! access discriminants. Since it has no other purpose it may not be --! used except in a protected object declaration. when 895 => E_Strings.Append_String (E_Str => E_Str, Str => "Attribute Access may only be applied to variables which are declared as aliased, variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not aliased"); --! This is a slightly annoying Ada issue. Marking a variable as aliased --! prevents it being placed in a register --! which would make pointing at it hazardous; however, SPARK only permits --! 'Access on protected types which are --! limited and therefore always passed by reference anyway and immune from --! register optimization. Requiring --! aliased on protected objects that will appear in discriminant --! constraints is therefore unwanted syntactic sugar --! only. when 896 => E_Strings.Append_String (E_Str => E_Str, Str => "The task type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have an associated body"); --! Issued at the end of a package body if a task type declared in its --! specification contains neither a body --! nor a body stub for it. when 897 => E_Strings.Append_String (E_Str => E_Str, Str => "The protected type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have an associated body"); --! Issued at the end of a package body if a protected type declared in --! its specification contains neither a body --! nor a body stub for it. when 898 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not a protected or task type which requires a body"); --! Issued if a body or body stub for a task or protected type is --! encountered and there is no matching specification. when 899 => E_Strings.Append_String (E_Str => E_Str, Str => "A body for type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been declared"); --! Issued if a body or body stub for a task or protected type is --! encountered and an earlier body --! has already been encountered. when 901 => E_Strings.Append_String (E_Str => E_Str, Str => "Suspension object "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may only be declared immediately within a library package specification or body"); --! Suspension objects must be declared at library level. They cannot --! be used in protected type state or as local variables in subprograms. when 902 => E_Strings.Append_String (E_Str => E_Str, Str => "Recursive use of typemark "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " in known descriminant"); when 903 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected or suspension object types cannot be used to declare constants"); --! Protected and suspension objects are used to ensure integrity of shared --! objects. If it is necessary to share constant data then these constructs --! should not be used. when 904 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected or suspension objects cannot be used as subprogram parameters"); --! SPARK does not currently support this feature. when 905 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected or suspension objects cannot be returned from functions"); --! SPARK does not currently support this feature. when 906 => E_Strings.Append_String (E_Str => E_Str, Str => "Protected or suspension objects cannot be used in composite types"); --! Protected and suspension objects cannot be used in record or array --! structures. when 907 => E_Strings.Append_String (E_Str => E_Str, Str => "Delay until must be called from a task or unprotected procedure body"); --! You are calling delay until from an invalid construct. --! Any construct that calls delay until must have a delay property in the --! declare annotation. This construct must be one of a task or procedure body --! when 908 => E_Strings.Append_String (E_Str => E_Str, Str => "Blocking properties are not allowed in protected scope"); --! Procedures in protected scope must not block and therefore blocking --! properties are prohibited when 909 => E_Strings.Append_String (E_Str => E_Str, Str => "Object "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " cannot suspend"); --! You are either applying the suspendable property to an own variable --! that cannot suspend or you have declared a variable (whose own variable --! has the suspendable property) which cannot suspend. Or you have used an --! item in a suspends list that does not have the suspendable property. --! An object can only suspend if it is a suspension object or a protected --! type with an entry. when 910 => E_Strings.Append_String (E_Str => E_Str, Str => "Name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must appear in the suspends list property for the enclosing unit"); --! Protected entry calls and calls to --! Ada.Synchronous_Task_Control.Suspend_Until_True may block the currently --! executing task. SPARK requires you announce this fact by placing the --! actual callee name in the suspends list for the enclosing unit. when 911 => E_Strings.Append_String (E_Str => E_Str, Str => "The argument in pragma Priority for the main program must be an " & "integer literal " & "or a local constant of static integer value"); --! If the main program priority is not an integer literal then you --! should declare a constant that has the required value in the --! declarative part of the main program prior to the position of the pragma. when 912 => E_Strings.Append_String (E_Str => E_Str, Str => "This call contains a delay property that is not propagated to the enclosing unit"); --! The call being made has a declare annotation that contains a delay --! property. SPARK requires that this property is propagated up --! the call chain and hence must appear in a declare annotation --! for the enclosing unit. when 913 => E_Strings.Append_String (E_Str => E_Str, Str => "This call has a name in its suspends list which is not propagated to the enclosing unit"); --! The call being made has a declare annotation that contains a suspends --! list. SPARK requires that the entire list is propagated up the call --! chain and hence must appear in a declare annotation for the enclosing unit. when 914 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " specified in the suspends list can never be called"); --! You have specified the name of a protected or suspension object in --! the suspends list that can never be called by this procedure or task. when 915 => E_Strings.Append_String (E_Str => E_Str, Str => "Procedure "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has a delay property but cannot delay"); --! You have specified a delay property for this procedure but delay --! until can never be called from it. when 916 => E_Strings.Append_String (E_Str => E_Str, Str => " Protected object "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has a circular dependency in subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); --! The type of the protected object mentions the protected object name in --! the derives list for the given subprogram when 917 => E_Strings.Append_String (E_Str => E_Str, Str => "Procedure "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " cannot be called from a protected action"); --! The procedure being called may block and hence cannot be called from --! a protected action. when 918 => E_Strings.Append_String (E_Str => E_Str, Str => "The delay property is not allowed for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The delay property may only be applied to a procedure when 919 => E_Strings.Append_String (E_Str => E_Str, Str => "The priority property is not allowed for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The priority property can only be applied to protected own variables --! which are type announced. If the type has been declared it must be a --! protected type when 920 => E_Strings.Append_String (E_Str => E_Str, Str => "The suspends property is not allowed for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The suspends property may only be applied to task type specifications --! and procedures when 921 => E_Strings.Append_String (E_Str => E_Str, Str => "The identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not recognised as a component of a property list"); --! The property list can only specify the reserved word delay, suspends or --! priority. when 922 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must have the priority property"); --! In order to perform the ceiling priority checks the priority property must --! be given to all own variables of protected type. when 923 => E_Strings.Append_String (E_Str => E_Str, Str => "The procedure "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " cannot be called from a function as it has a blocking side effect"); --! Blocking is seen as a side effect and hence procedures that potentially --! block cannot be called from functions. when 924 => E_Strings.Append_String (E_Str => E_Str, Str => "The suspendable property is not allowed for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Objects that suspend must be declared as own protected variables when 925 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable or task "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must have a type announcement"); --! Own variables of protected type and own tasks must have a type announcement when 926 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal declaration of task "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ". Task objects must be declared at library level"); --! Task objects must be declared in library level package specifications or --! bodies. when 927 => E_Strings.Append_String (E_Str => E_Str, Str => "The own task annotation for this task is missing the name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " in its suspends list"); --! The task type declaration has name XXX in its list and this must appear --! in the own task annotation when 928 => E_Strings.Append_String (E_Str => E_Str, Str => "Private elements are not allowed for protected type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Protected type XXX has been used to declare a protected, moded own variable. --! Protected, moded own variables are refined onto a set of virtual elements with --! the same mode. As such private elements are not allowed. when 929 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected declare annotation. Procedure "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " should have the declare annotation on the specification"); --! Declare annotations cannot appear on the procedure body if it appears --! on the procedure specification when 930 => E_Strings.Append_String (E_Str => E_Str, Str => "Task "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not appear in the own task annotation for this package"); --! A task has been declared that is not specified as an own task of the --! package. when 931 => E_Strings.Append_String (E_Str => E_Str, Str => "Task "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not have a definition"); --! A task name appears in the own task annotation for this package but --! is never declared when 932 => E_Strings.Append_String (E_Str => E_Str, Str => "The priority for protected object "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " does not match that given in the own variable declaration"); --! The priority given in the priority property must match that given in --! the protected type. when 933 => E_Strings.Append_String (E_Str => E_Str, Str => "A pragma Priority is required for the main program when Ravenscar Profile is enabled"); --! When SPARK profile Ravenscar is selected, all tasks, protected objects --! and the main program must explicitly be assigned a priority. when 934 => E_Strings.Append_String (E_Str => E_Str, Str => "Priority ceiling check failure: the priority of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is less than that of "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The active priority of a task is the higher of its base priority --! and the ceiling priorities of all protected objects that it is --! executing. The active priority at the point of a call to a --! protected operation must not exceed the ceiling priority of the --! callee. when 935 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must have the interrupt property"); --! An own variable has been declared using a protected type with a pragma --! attach handler. Such objects are used in interrupt processing and must --! have the interrupt property specified in their own variable declaration when 936 => E_Strings.Append_String (E_Str => E_Str, Str => "The interrupt property is not allowed for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The interrupt property can only be applied to protected own variables --! that are type announced. If the type is declared then it must be a --! protected type that contains an attach handler when 937 => E_Strings.Append_String (E_Str => E_Str, Str => "The protects property is not allowed for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! The protects property can only be applied to protected own variables --! that are type announced. If the type is declared then it must be a --! protected type. when 938 => E_Strings.Append_String (E_Str => E_Str, Str => "The unprotected variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is shared by "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " and "); Append_Name (E_Str => E_Str, Name => Err_Num.Name3, Scope => Err_Num.Scope); --! XXX is an unprotected variable that appears in the global list of the --! threads YYY and ZZZ. Unprotected variables cannot be shared between --! threads in SPARK. A thread is one of: the main program, a task, an --! interrupt handler. when 939 => E_Strings.Append_String (E_Str => E_Str, Str => "The suspendable item "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is referenced by "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " and "); Append_Name (E_Str => E_Str, Name => Err_Num.Name3, Scope => Err_Num.Scope); --! XXX is an own variable with the suspends property that appears in the --! suspends list of the threads YYY and ZZZ. SPARK prohibits this to --! prevent more than one thread being suspended on the same item at any --! one time. A thread is one of: the main program, a task, an interrupt --! handler. when 940 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is a protected own variable. Protected variables may not be used in proof contexts"); --! The use of protected variables in pre and postconditions or other proof annotations is not (currently) --! supported. Protected variables are volatile because they can be changed at any time by another program --! thread and this may invalidate some common proof techniques. The prohibition of protected variables --! does not prevent proof of absence of run-time errors nor proof of protected operation bodies. See the --! manual "SPARK Proof Manual" for more details. when 941 => E_Strings.Append_String (E_Str => E_Str, Str => "The type of own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be local to this package"); --! The type used to an announce an own variable with a protects property --! must be declared in the same package. when 942 => E_Strings.Append_String (E_Str => E_Str, Str => "Only one instance of the type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is allowed"); --! Type XXX has a protects property. This means there can be only one object --! in the package that has this type or any subtype of this type. when 943 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " cannot appear in a protects list"); --! All items in a protects list must be unprotected own variables owned by --! this package when 944 => E_Strings.Append_String (E_Str => E_Str, Str => "The name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is already protected by "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); --! The name XXX appears in more than one protects list. The first time --! it appeared was for own variable YYY. XXX should appear in at most --! one protects list. when 945 => E_Strings.Append_String (E_Str => E_Str, Str => "The property "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be given a static expression for its value"); --! This property can only accept a static expression. when 946 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must only ever be accessed from operations in protected type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); --! The own variable XXX is protected by the protected type YYY and hence --! must never be accessed from anywhere else. when 947 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " appears in a protects list for type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " but is not used in the body"); --! The protected type YYY claims to protect XXX via a protects property. --! However, the variable XXX is not used by any operation in YYY. when 948 => E_Strings.Append_String (E_Str => E_Str, Str => "The type of own variable or task "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be a base type"); --! Own tasks and protected own variables of a protected type must be --! announced using the base type. The --! subsequent variable declaration may be a subtype of the base type when 949 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected partition annotation: a global annotation may only " & "appear here when " & "the Ravenscar profile is selected"); --! When the sequential SPARK profile is selected, the global and derives --! annotation on the main program describes --! the entire program's behaviour. No additional, partition annotation --! is required or permitted. Note that an --! annotation must appear here if the Ravenscar profile is selected. when 950 => E_Strings.Append_String (E_Str => E_Str, Str => "Partition annotation expected: a global and, optionally, a " & "derives annotation must " & "appear after 'main_program' when the Ravenscar profile is selected"); --! When the Ravenscar profile is selected the global and derives annotation --! on the main program describes the --! behaviour of the environment task only, not the entire program. --! An additional annotation, called the --! partition annotation, is required to describe the entire program's --! behaviour; this annotation follows --! immediately after 'main_program;' when 951 => E_Strings.Append_String (E_Str => E_Str, Str => "Inherited package "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " contains tasks and/or interrupt handlers and must therefore appear in the preceding WITH clause"); --! In order to ensure that a Ravenscar program is complete, SPARK requires --! that all 'active' packages --! inherited by the environment task also appear in a corresponding --! with clause. This check ensures that --! any program entities described in the partition annotation are also --! linked into the program itself. when 952 => E_Strings.Append_String (E_Str => E_Str, Str => "Subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is an interrupt handler and cannot be called"); --! Interrupt handler operations cannot be called. --cfr1203 --error message only needed if we eliminate suspension objects from partition-level annos --cfr1203 when 985 => --cfr1203 AppendName --cfr1203 (E_Str, Err_Num.Name1, Err_Num.Scope); --cfr1203 ELStrings.Append_String --cfr1203 (E_Str, " is a predefined suspension object which must not --cfr1203 appear in the partition global annotation"); when 953 => E_Strings.Append_String (E_Str => E_Str, Str => "Interrupt property error for own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ". "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not an interrupt handler in type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name3, Scope => Err_Num.Scope); --! The handler names in an interrupt property must match one in the --! protected type of the own variable. when 954 => E_Strings.Append_String (E_Str => E_Str, Str => "Interrupt property error for own variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ". Interrupt stream name "); Append_Name (E_Str => E_Str, Name => Err_Num.Name2, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is illegal"); --! The stream name must be unprefixed and not already in use within the --! scope of the package. when 955 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " can only appear in the partition wide flow annotation"); --! Interrupt stream variables are used only to enhance the partition --! wide flow annotation and must not be used elsewhere. when 956 => Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " already appears in as an interrupt handler in the interrupt mappings"); --! An interrupt handler can be mapped onto exactly one interrupt stream --! variable. An interrupt stream variable may be mapped onto many interrupt --! handlers. when 957 => E_Strings.Append_String (E_Str => E_Str, Str => "Consecutive updates of protected variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " are disallowed when they do not depend directly on its preceding value"); --! A protected variable cannot be updated without direct reference to its --! preceding value more than once within a subprogram or task. --! Each update of a protected variable may have a wider effect than --! just the change of value of the protected variable. The overall --! change is considered to be the accumulation of all updates and --! reads of the protected variable and to preseve this information flow --! successive updates must directly depend on the preceding value of --! the variable when 958 => E_Strings.Append_String (E_Str => E_Str, Str => "A task may not import the unprotected state "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! A task may not import unprotected state unless it is mode in. --! This is because under the concurrent elaboration policy, the task cannot --! rely on the state being initialized before it is run. when 959 => E_Strings.Append_String (E_Str => E_Str, Str => "Unprotected state "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is exported by a task and hence must not appear in an initializes clause"); --! Own variable XXX is being accessed by a task. The order in which the task --! is run and the own variable initialized is non-deterministic under a --! concurrent elaboration policy. In this case SPARK forces the task to --! perform the initialization and as such the own variable must not appear --! in an initializes clause. when 960 => E_Strings.Append_String (E_Str => E_Str, Str => "The function Ada.Real_Time.Clock " & "can only be used directly (1) in an assignment or return statement or (2) " & "to initialize a library a level constant"); --!
  • To avoid ordering effects, functions which globally access own --! variables which have modes (indicating that they are connected to --! the external environment) may only appear directly in assignment or --! return statements. They may not appear as actual parameters or in any --! other form of expression. --!
  • SPARK relaxes the illegal use of function calls in elaboration code in --! the case of the function Ada.Real_Time.Clock. However the function can only --! be used to directly initialize a constant value. when 961 => E_Strings.Append_String (E_Str => E_Str, Str => "This property value is of an incorrect format"); --! Please check the user manual for valid property value formats. when 962 => E_Strings.Append_String (E_Str => E_Str, Str => "Error(s) detected by VC Generator. See the .vcg file for more information"); --! This message is echoed to the screen if an unrecoverable --! error occurs which --! makes the generation of VCs for the current subprogram impossible. --! Another message more precisely identifying the problem will be placed in --! the .vcg file. when 986 => E_Strings.Append_String (E_Str => E_Str, Str => "A protected function may not call a locally-declared protected procedure"); --! See LRM 9.5.1 (2). A protected function has read access to the --! protected elements of the type whereas --! the called procedure has read-write access. There is no way in which --! an Ada compiler can determine whether --! the procedure will illegally update the protected state or not so the --! call is prohibited by the rules of Ada. --! (Of course, in SPARK, we know there is no function side effect but the --! rules of Ada must prevail nonetheless). when 987 => E_Strings.Append_String (E_Str => E_Str, Str => "Task types and protected types may only be declared in package specifications"); --! The Examiner performs certain important checks at the whole program level --! such as detection of illegal sharing of --! unprotected state and partition-level information flow analysis. --! These checks require visibility of task --! types and protected types (especially those containing interrupt --! handlers). SPARK therefore requires these --! types to be declare in package specifications. Subtypes and objects --! of task types, protected types and their --! subtypes may be declared in package bodies. when 988 => E_Strings.Append_String (E_Str => E_Str, Str => "Illegal re-use of identifier "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => "; this identifier is used in a directly visible protected type"); --! SPARK does not allow the re-use of operation names which are already --! in use in a directly visible --! protected type. The restriction is necessary to avoid overload resolution --! issues in the protected --! body. For example, type PT in package P declares operation K. --! Package P also declares an operation K. --! From inside the body of PT, a call to K could refer to either of the --! two Ks since both are directly visible. when 989 => E_Strings.Append_String (E_Str => E_Str, Str => "The last statement of a task body must be a plain loop with no exits"); --! To prevent any possibility of a task terminating (which can lead to a --! bounded error), SPARK requires --! each task to end with a non-terminating loop. The environment task (or --! "main program") does not need --! to end in a plain loop provided the program closure includes at least --! one other task. If there are --! no other tasks, then the environment task must be made non-terminating --! with a plain loop. when 990 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected annotation, a task body may have only global and derives annotations"); --! Issued if a pre, post or declare annotation is attached to a task body. when 991 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected task body, "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not the name of a task declared in this package specification"); --! Issued if task body is encountered for which there is no preceding --! declaration. when 992 => E_Strings.Append_String (E_Str => E_Str, Str => "A body for task type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been declared"); --! Issued if a duplicate body or body stub is encountered for a task. when 993 => E_Strings.Append_String (E_Str => E_Str, Str => "There is no protected type declaration for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued if a body is found for a protected types for which there is --! no preceding declaration. when 994 => E_Strings.Append_String (E_Str => E_Str, Str => "Invalid guard, "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not a Boolean protected element of this protected type"); --! The SPARK Ravenscar rules require a simple Boolean guard which must --! be one of the protected elements --! of the type declaring the entry. when 995 => E_Strings.Append_String (E_Str => E_Str, Str => "Unexpected entry body, "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is not the name of an entry declared in this protected type"); --! Local entries are not permitted so a protected body can declare at --! most one entry body and that must have --! declared in the protected type specification. when 996 => E_Strings.Append_String (E_Str => E_Str, Str => "The protected operation "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => ", declared in this type, does not have an associated body"); --! Each exported protected operation must have a matching implementation --! in the associated protected body. when 997 => E_Strings.Append_String (E_Str => E_Str, Str => "A body for protected type "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " has already been declared"); --! Each protected type declaration must have exactly one matching --! protected body or body stub. when 998 => E_Strings.Append_String (E_Str => E_Str, Str => "There is no protected type declaration for "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); --! Issued if a protected body or body stub is found and there is no --! matching declaration for it. when 999 => E_Strings.Append_String (E_Str => E_Str, Str => "This feature of Generics is not yet implemented"); --! Generics are currently limited to instantiation of Unchecked_Conversion. -- The following semantic error numbers are unused. When adding a new -- semantic error, pick one of these, add a case for it above and remove -- it from this case. when 0 | 48 | 146 .. 147 | 164 | 177 .. 179 | 183 .. 189 | 192 .. 199 | 202 .. 249 | 256 .. 272 | 274 .. 299 | 344 .. 398 | 404 .. 406 | 426 .. 499 | 510 .. 549 | 553 | 556 .. 599 | 612 | 625 | 627 | 633 .. 634 | 642 .. 644 | 661 | 663 .. 699 | 726 .. 729 | 731 .. 749 | 758 .. 769 | 802 | 807 .. 813 | 816 .. 819 | 846 .. 849 | 856 .. 857 | 886 | 900 | 963 .. 985 => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO SemanticErr"); end case; AppendReference (E_Str, Err_Num.Reference); Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end SemanticErr; spark-2012.0.deb/examiner/errorhandler-erroraccumulator.adb0000644000175000017500000001300711753202336023000 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) package body ErrorAccumulator is ----------------------------------------------------------------------------- function Is_Active (This : T) return Boolean is begin return This.Active; end Is_Active; function Is_Error_Continuation (The_Error : Error_Types.StringError) return Boolean is begin return ((The_Error.ErrorType = Error_Types.CondlDependencyErr and then The_Error.MessageId = ErrorHandler.Dependency_Err_Number (ErrorHandler.May_Be_Used_Continue)) or else (The_Error.ErrorType = Error_Types.UncondDependencyErr and then The_Error.MessageId = ErrorHandler.Dependency_Err_Number (ErrorHandler.Not_Used_Continue))); end Is_Error_Continuation; function Is_Error_Start (The_Error : Error_Types.StringError) return Boolean is begin return ((The_Error.ErrorType = Error_Types.CondlDependencyErr and then The_Error.MessageId = ErrorHandler.Dependency_Err_Number (ErrorHandler.May_Be_Used_New)) or else (The_Error.ErrorType = Error_Types.UncondDependencyErr and then The_Error.MessageId = ErrorHandler.Dependency_Err_Number (ErrorHandler.Not_Used_New))); end Is_Error_Start; procedure Start_Msg (This : out T; Start_Error : in ErrorHandler.Error_Struct; Start_Indent : in Natural; Explanation : in E_Strings.T; Line_Length : in Natural; Indent : in Natural) is begin This := T' (Active => True, Start_Error => Start_Error, Start_Indent => Start_Indent, Explanation => Explanation, Line_Length => Line_Length, Indent => Indent); end Start_Msg; procedure Flush (This : in out T; Listing : in SPARK_IO.File_Type) is Unused_New_Start : Natural; begin if This.Active then if not E_Strings.Is_Empty (E_Str => This.Explanation) then --# accept F, 10, Unused_New_Start, "Expected ineffective assignment"; ErrorHandler.PrintLine (Listing => Listing, Start_Pos => This.Start_Indent, End_Pos => This.Line_Length, Indent => This.Indent, Line => This.Explanation, Add_New_Line => False, New_Start => Unused_New_Start); --# end accept; end if; SPARK_IO.Put_String (File => Listing, Item => ".", Stop => 1); This.Active := False; end if; --# accept Flow, 33, Unused_New_Start, "Expected to be neither referenced nor exported"; end Flush; procedure Add (This : in out T; Error : in ErrorHandler.Error_Struct; End_Pos, Indent : in Natural; Listing : in SPARK_IO.File_Type) is New_Start : Natural; function Need_To_Add return Boolean --# global in Error; --# in This; is begin return (This.Start_Error.Error.MessageId = ErrorHandler.Dependency_Err_Number (ErrorHandler.May_Be_Used_New) and Error.Error.ErrorType = Error_Types.CondlDependencyErr and Error.Error.MessageId = ErrorHandler.Dependency_Err_Number (ErrorHandler.May_Be_Used_Continue)) or else (This.Start_Error.Error.MessageId = ErrorHandler.Dependency_Err_Number (ErrorHandler.Not_Used_New) and Error.Error.ErrorType = Error_Types.UncondDependencyErr and Error.Error.MessageId = ErrorHandler.Dependency_Err_Number (ErrorHandler.Not_Used_Continue)); end Need_To_Add; begin if This.Active then if Need_To_Add then ErrorHandler.PrintLine (Listing => Listing, Start_Pos => This.Start_Indent, End_Pos => End_Pos, Indent => Indent, Line => Error.Error.Message, Add_New_Line => False, New_Start => New_Start); This.Start_Indent := New_Start; elsif Is_Error_Continuation (The_Error => Error.Error) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "in ErrorHandler.ErrorAccumulator.Add"); else Flush (This => This, Listing => Listing); end if; end if; end Add; end ErrorAccumulator; spark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_anno.adb0000644000175000017500000006570711753202336030570 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Package_Declaration.Wf_Package_Specification) procedure Wf_Anno (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope_Type : in Enclosing_Scope_Types; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) is -- ASSUME Scope is VisibleScope of Pack_Sym Own_Var_Clause_Node, Init_Spec_Node : STree.SyntaxNode; ---------------------------------------------------------- procedure Wf_Own (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope_Type : in Enclosing_Scope_Types; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.own_variable_clause; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------- procedure Wf_Init_Spec (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope_Type : in Enclosing_Scope_Types; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# Scope_Type, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# SLI.State, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.initialization_specification; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------- procedure Do_Embedded_Package (Anno_Node, Own_Var_Clause_Node, Init_Spec_Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Init_Spec_Node, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Anno_Node, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Init_Spec_Node, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Init_Spec_Node, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation and --# (Syntax_Node_Type (Own_Var_Clause_Node, STree.Table) = SP_Symbols.own_variable_clause or --# Own_Var_Clause_Node = STree.NullNode) and --# (Syntax_Node_Type (Init_Spec_Node, STree.Table) = SP_Symbols.initialization_specification or --# Init_Spec_Node = STree.NullNode); --# post STree.Table = STree.Table~; is type Init_Expected is (Yes, No, No_Protected); function Initialization_Expected (Pack_Sym : Dictionary.Symbol) return Init_Expected --# global in Dictionary.Dict; is Own_Var_List : Dictionary.Iterator; Expected : Init_Expected; begin Own_Var_List := Dictionary.FirstOwnVariable (Pack_Sym); Expected := No; while not Dictionary.IsNullIterator (Own_Var_List) loop if not Dictionary.IsOwnVariableOrConstituentWithMode (Dictionary.CurrentSymbol (Own_Var_List)) and then Dictionary.OwnVariableIsInitialized (Dictionary.GetSubject (Dictionary.CurrentSymbol (Own_Var_List))) and then not Dictionary.GetOwnVariableProtected (Dictionary.CurrentSymbol (Own_Var_List)) then Expected := Yes; exit; end if; --# assert True; if Dictionary.GetOwnVariableProtected (Dictionary.CurrentSymbol (Own_Var_List)) then if Expected = No then Expected := No_Protected; end if; end if; Own_Var_List := Dictionary.NextSymbol (Own_Var_List); end loop; return Expected; end Initialization_Expected; begin -- Do_Embedded_Package if Dictionary.FirstOwnVariable (Pack_Sym) = Dictionary.NullIterator then -- no refines announcements found -- ASSUME Own_Var_Clause_Node = own_variable_clause OR NULL if Syntax_Node_Type (Node => Own_Var_Clause_Node) = SP_Symbols.own_variable_clause then -- ASSUME Own_Var_Clause_Node = own_variable_clause -- unexpected own var clause ErrorHandler.Semantic_Error (Err_Num => 80, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Own_Var_Clause_Node), Id_Str => LexTokenManager.Null_String); end if; else -- at least one announcement found so there must be an own var clause -- ASSUME Own_Var_Clause_Node = own_variable_clause OR NULL if Own_Var_Clause_Node = STree.NullNode then -- required own var clause is missing -- ASSUME Own_Var_Clause_Node = NULL ErrorHandler.Semantic_Error (Err_Num => 81, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Parent_Node (Current_Node => Anno_Node)), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Own_Var_Clause_Node) = SP_Symbols.own_variable_clause then -- ASSUME Own_Var_Clause_Node = own_variable_clause -- own var clause both needed and present Wf_Own (Node => Own_Var_Clause_Node, Pack_Sym => Pack_Sym, Scope_Type => In_Package, Scope => Scope, The_Heap => The_Heap); -- now check for required or superfluous initializations case Initialization_Expected (Pack_Sym => Pack_Sym) is when Yes => -- ASSUME Init_Spec_Node = initialization_specification OR NULL if Init_Spec_Node = STree.NullNode then -- ASSUME Init_Spec_Node = NULL -- initialization missing from inner package ErrorHandler.Semantic_Error (Err_Num => 83, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Own_Var_Clause_Node), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Init_Spec_Node) = SP_Symbols.initialization_specification then -- ASSUME Init_Spec_Node = initialization_specification -- initialization both required and present Wf_Init_Spec (Node => Init_Spec_Node, Pack_Sym => Pack_Sym, Scope_Type => In_Package, Scope => Scope); end if; when No => -- ASSUME Init_Spec_Node = initialization_specification OR NULL if Syntax_Node_Type (Node => Init_Spec_Node) = SP_Symbols.initialization_specification then -- ASSUME Init_Spec_Node = initialization_specification -- unexpected init ErrorHandler.Semantic_Error (Err_Num => 82, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Init_Spec_Node), Id_Str => LexTokenManager.Null_String); end if; when No_Protected => -- ASSUME Init_Spec_Node = initialization_specification OR NULL if Syntax_Node_Type (Node => Init_Spec_Node) = SP_Symbols.initialization_specification then -- ASSUME Init_Spec_Node = initialization_specification -- unexpected init on protected state ErrorHandler.Semantic_Error (Err_Num => 864, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Init_Spec_Node), Id_Str => LexTokenManager.Null_String); end if; end case; end if; end if; end Do_Embedded_Package; ---------------------------------------------------------- procedure Do_Library_Package (Anno_Node, Own_Var_Clause_Node, Init_Spec_Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Init_Spec_Node, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Anno_Node, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Init_Spec_Node, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Init_Spec_Node, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation and --# (Syntax_Node_Type (Own_Var_Clause_Node, STree.Table) = SP_Symbols.own_variable_clause or --# Own_Var_Clause_Node = STree.NullNode) and --# (Syntax_Node_Type (Init_Spec_Node, STree.Table) = SP_Symbols.initialization_specification or --# Init_Spec_Node = STree.NullNode); --# post STree.Table = STree.Table~; is It : Dictionary.Iterator; Var_Sym : Dictionary.Symbol; Error_Node_Pos : LexTokenManager.Token_Position; begin Error_Node_Pos := Node_Position (Node => Anno_Node); -- ASSUME Own_Var_Clause_Node = own_variable_clause OR NULL if Syntax_Node_Type (Node => Own_Var_Clause_Node) = SP_Symbols.own_variable_clause then -- ASSUME Own_Var_Clause_Node = own_variable_clause Error_Node_Pos := Node_Position (Node => Own_Var_Clause_Node); Wf_Own (Node => Own_Var_Clause_Node, Pack_Sym => Pack_Sym, Scope_Type => In_Library, Scope => Scope, The_Heap => The_Heap); end if; -- ASSUME Init_Spec_Node = initialization_specification OR NULL if Syntax_Node_Type (Node => Init_Spec_Node) = SP_Symbols.initialization_specification then -- ASSUME Init_Spec_Node = initialization_specification Error_Node_Pos := Node_Position (Node => Init_Spec_Node); Dictionary.AddInitializationSpecification (Pack_Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Init_Spec_Node), End_Position => Node_Position (Node => Init_Spec_Node))); Wf_Init_Spec (Node => Init_Spec_Node, Pack_Sym => Pack_Sym, Scope_Type => In_Library, Scope => Scope); end if; It := Dictionary.FirstOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (It) loop --# assert STree.Table = STree.Table~; Var_Sym := Dictionary.CurrentSymbol (It); if Dictionary.IsVirtualElement (Var_Sym) and then not Dictionary.IsOwnVariableOrConstituentWithMode (Var_Sym) and then not Dictionary.OwnVariableIsInitialized (Var_Sym) then -- Unmoded own variables that appear in a protects list MUST appear in -- an intializes clause. ErrorHandler.Semantic_Error (Err_Num => 860, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str => Dictionary.GetSimpleName (Var_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Do_Library_Package; begin -- Wf_Anno Own_Var_Clause_Node := Child_Node (Current_Node => Node); -- ASSUME Own_Var_Clause_Node = own_variable_clause OR NULL if Own_Var_Clause_Node = STree.NullNode then Init_Spec_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Own_Var_Clause_Node) = SP_Symbols.own_variable_clause then Init_Spec_Node := Next_Sibling (Current_Node => Own_Var_Clause_Node); Dictionary.AddOwnAnnotation (Pack_Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node))); else Init_Spec_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Own_Var_Clause_Node = own_variable_clause OR NULL in Wf_Anno"); end if; -- ASSUME Init_Spec_Node = initialization_specification OR NULL SystemErrors.RT_Assert (C => Init_Spec_Node = STree.NullNode or else Syntax_Node_Type (Node => Init_Spec_Node) = SP_Symbols.initialization_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Init_Spec_Node = initialization_specification OR NULL in Wf_Anno"); if Scope_Type = In_Package then -- package embedded in package Do_Embedded_Package (Anno_Node => Node, Own_Var_Clause_Node => Own_Var_Clause_Node, Init_Spec_Node => Init_Spec_Node, Pack_Sym => Pack_Sym, Scope => Scope, The_Heap => The_Heap); else -- library unit or package embedded in procedure. Do_Library_Package (Anno_Node => Node, Own_Var_Clause_Node => Own_Var_Clause_Node, Init_Spec_Node => Init_Spec_Node, Pack_Sym => Pack_Sym, Scope => Scope, The_Heap => The_Heap); end if; end Wf_Anno; spark-2012.0.deb/examiner/dag_io.ads0000644000175000017500000002641611753202336016175 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; with Dictionary; with Labels; with Pairs; with SPARK_IO; use type Cells.Cell; use type Cells.Cell_Kind; --# inherit AdjustFDL_RWs, --# Cells, --# Cell_Storage, --# Clists, --# CommandLineData, --# CStacks, --# Dictionary, --# E_Strings, --# Labels, --# LexTokenManager, --# Maths, --# Pairs, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# SystemErrors; package DAG_IO is ---------------------------------------------------------------- -- The null string literal "" is declared as a special -- deferred constant in FDL using this name. This name -- is also used in Declarations to produce the declaration -- of this constant, so it is declared here once. -- The name is chosen so that it cannot clash with any -- legal user-defined identifer. ---------------------------------------------------------------- Null_String_Literal_Name : constant String := "null__string"; -- When generating VCG files, we normally wrap -- near 72 columns for human-readable output. Default_Wrap_Limit : constant Positive := 72; -- The maximum value for Wrap_Limit - indicates basically -- that no wrapping is required. This is used to generate -- GraphViz DOT format, where line-breaks are not permitted -- in a node label. No_Wrap : constant Positive := Positive'Last - 3; procedure Print_Cell_Contents (Heap : in Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Cell_Name : in Cells.Cell; Suppress_Wrap : in out Boolean; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive; Escape_DOT : in Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Cell_Name, --# CommandLineData.Content, --# Dictionary.Dict, --# Escape_DOT, --# Heap, --# LexTokenManager.State, --# Output_File, --# Scope, --# Suppress_Wrap, --# Wrap_Limit & --# Suppress_Wrap from *, --# Cell_Name, --# Heap, --# LexTokenManager.State; procedure PrintLabel (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; LabelName : in Labels.Label; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# LabelName & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LabelName, --# LexTokenManager.State, --# Output_File, --# Scope, --# Wrap_Limit; procedure PrintVCFormula (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; PredicatePair : in Pairs.Pair; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# PredicatePair & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# PredicatePair, --# Scope, --# Wrap_Limit; -- Prints a VC as a dead-path conjecture (DPC) procedure PrintDPC (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; PredicatePair : in Pairs.Pair; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# PredicatePair & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# PredicatePair, --# Scope, --# Wrap_Limit; --------------------------------------------------------------------- -- Partition takes an FDL predicate at Root which is assumed to be -- a conjunction (i.e A and B and C and ...) and splits it into -- a list of the component terms A, B, C, ... -- -- The initial value of SubExpnList must be the result of a -- CLists.CreateList operation -- -- The returned list is denoted by the final value of SubExpnList -- -- Partition is used when printing predicates which need to be -- split into conjuncts, such as VC hypotheses, conclusions, -- and when producing DOT format so we can line-break after each -- conjunct. --------------------------------------------------------------------- procedure Partition (Root : in Cells.Cell; SubExpnList : in Cells.Cell; Heap : in out Cells.Heap_Record); --# global in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root, --# SubExpnList; -- Prints the DAG denoted by Root to Output_File in FDL procedure PrintDag (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Root : in Cells.Cell; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# Root, --# Scope, --# Wrap_Limit; -- Prints the DAG denoted by Root to Output_File in Dot format for the -- GraphViz tool procedure Print_DAG_Dot (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Root : in Cells.Cell; Scope : in Dictionary.Scopes; Wrap_Limit : in Positive); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# Root, --# Scope, --# Wrap_Limit; procedure Print_Heap_Dot (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File; end DAG_IO; spark-2012.0.deb/examiner/sem-walk_expression_p.adb0000644000175000017500000032564111753202336021252 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) package body Walk_Expression_P is --# inherit CommandLineData, --# Dictionary, --# ExaminerConstants, --# Sem, --# SystemErrors; package Exp_Stack is type Exp_Stack_Type is private; procedure Init (Stack : out Exp_Stack_Type); --# derives Stack from ; procedure Push (X : in Sem.Exp_Record; Stack : in out Exp_Stack_Type); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# derives Stack from *, --# X & --# null from CommandLineData.Content, --# Dictionary.Dict; procedure Pop (Item : out Sem.Exp_Record; Stack : in out Exp_Stack_Type); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# derives Item, --# Stack from Stack & --# null from CommandLineData.Content, --# Dictionary.Dict; --# post (Dictionary.Is_Null_Symbol (Item.Type_Symbol) or Dictionary.IsTypeMark (Item.Type_Symbol, Dictionary.Dict)) and --# (Dictionary.Is_Null_Symbol (Item.Stream_Symbol) or --# Dictionary.IsFunction (Item.Stream_Symbol, Dictionary.Dict) or --# Dictionary.IsOwnVariableOrConstituentWithMode (Item.Stream_Symbol, Dictionary.Dict)); function Top (Stack : Exp_Stack_Type) return Sem.Exp_Record; --# global in Dictionary.Dict; --# return Item => ((Dictionary.Is_Null_Symbol (Item.Type_Symbol) or Dictionary.IsTypeMark (Item.Type_Symbol, Dictionary.Dict)) and --# (Dictionary.Is_Null_Symbol (Item.Stream_Symbol) or --# Dictionary.IsFunction (Item.Stream_Symbol, Dictionary.Dict) or --# Dictionary.IsOwnVariableOrConstituentWithMode (Item.Stream_Symbol, Dictionary.Dict))); function Has_One_Entry (Stack : Exp_Stack_Type) return Boolean; function Is_Empty (Stack : Exp_Stack_Type) return Boolean; private subtype Index_Range is Integer range 1 .. ExaminerConstants.WalkExpStackMax; type Stack_Array is array (Index_Range) of Sem.Exp_Record; subtype Top_Range is Integer range 0 .. ExaminerConstants.WalkExpStackMax; type Exp_Stack_Type is record S : Stack_Array; Top_Ptr : Top_Range; end record; end Exp_Stack; ----------------------------------------------------------------------------- --# inherit Dictionary, --# ExaminerConstants, --# SPARK_IO, --# SystemErrors; package Type_Context_Stack is type T_Stack_Type is private; --# function Stack_Is_Valid (Stack : T_Stack_Type) return Boolean; procedure Init (Stack : out T_Stack_Type); --# derives Stack from ; --# post Stack_Is_Valid (Stack); procedure Push (X : in Dictionary.Symbol; Stack : in out T_Stack_Type); --# global in Dictionary.Dict; --# derives Stack from *, --# X & --# null from Dictionary.Dict; --# pre Stack_Is_Valid (Stack) and (Dictionary.Is_Null_Symbol (X) or Dictionary.IsTypeMark (X, Dictionary.Dict)); --# post Stack_Is_Valid (Stack); procedure Pop (Stack : in out T_Stack_Type); --# global in Dictionary.Dict; --# derives Stack from * & --# null from Dictionary.Dict; --# pre Stack_Is_Valid (Stack); --# post Stack_Is_Valid (Stack); function Top (Stack : T_Stack_Type) return Dictionary.Symbol; --# global in Dictionary.Dict; --# pre Stack_Is_Valid (Stack); --# return S => (Dictionary.Is_Null_Symbol (S) or Dictionary.IsTypeMark (S, Dictionary.Dict)); function Has_One_Entry (Stack : T_Stack_Type) return Boolean; --# pre Stack_Is_Valid (Stack); private subtype Index_Range is Integer range 1 .. ExaminerConstants.WalkExpStackMax; type Stack_Array is array (Index_Range) of Dictionary.Symbol; subtype Top_Range is Integer range 0 .. ExaminerConstants.WalkExpStackMax; type T_Stack_Type is record S : Stack_Array; Top_Ptr : Top_Range; end record; end Type_Context_Stack; -------------- Package bodies ------------------------------ package body Exp_Stack is separate; package body Type_Context_Stack is separate; ----------------------------------------------------------------------------- procedure Check_Binary_Operator (Operator : in SP_Symbols.SP_Symbol; Left : in Sem.Exp_Record; Right : in Sem.Exp_Record; Scope : in Dictionary.Scopes; T_Stack : in Type_Context_Stack.T_Stack_Type; Op_Pos : in LexTokenManager.Token_Position; Left_Pos : in LexTokenManager.Token_Position; Right_Pos : in LexTokenManager.Token_Position; Convert : in Boolean; Is_Annotation : in Boolean; Result : in out Sem.Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Convert, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Annotation, --# Left, --# Left_Pos, --# LexTokenManager.State, --# Operator, --# Op_Pos, --# Right, --# Right_Pos, --# Scope, --# SPARK_IO.File_Sys, --# T_Stack & --# Result from *, --# CommandLineData.Content, --# Convert, --# Dictionary.Dict, --# Is_Annotation, --# Left, --# Operator, --# Right, --# Scope, --# T_Stack; --# pre Type_Context_Stack.Stack_Is_Valid (T_Stack) and --# (Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict)); --# post Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict); is separate; ----------------------------------------------------------------------------- procedure Calc_Binary_Operator (Node_Pos : in LexTokenManager.Token_Position; Operator : in SP_Symbols.SP_Symbol; Left_Val, Right_Val : in Maths.Value; Is_Annotation : in Boolean; Result : in out Sem.Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Annotation, --# Left_Val, --# LexTokenManager.State, --# Node_Pos, --# Operator, --# Result, --# Right_Val, --# SPARK_IO.File_Sys & --# Result from *, --# Dictionary.Dict, --# Left_Val, --# LexTokenManager.State, --# Operator, --# Right_Val; --# pre Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict); --# post Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict); is separate; ----------------------------------------- -- Routines for handling list of names -- ----------------------------------------- procedure Add_Name (Name : in LexTokenManager.Lex_String; List : in Lists.List; Heap_Param : in out Lists.List_Heap; Present : out Boolean) --# global in LexTokenManager.State; --# derives Heap_Param, --# Present from Heap_Param, --# LexTokenManager.State, --# List, --# Name; is separate; ----------------------------------------------------------------------------- procedure Dispose_Of_Name_List (List : in out Lists.List; Heap_Param : in out Lists.List_Heap) --# global in LexTokenManager.State; --# derives Heap_Param from *, --# LexTokenManager.State, --# List & --# List from ; is separate; ----------------------------------------------------------------------------- -- This procedure is used in wf_named_argument_association and -- expression_type_from_context to find a named argument association -- parameter. If Name_Is_Parameter_Name returns True, then the -- identifier simple_name (below Node) is a legal parameter name -- for the given subprogram, and Param_Sym denotes that parameter name. -- If Name_Is_Parameter_Name return False, then the identifier is not -- a legal parameter names, and Param_Sym is NullSymbol. procedure Find_Named_Argument_Association_Parameter (Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Name_Is_Parameter_Name : out Boolean; Param_Sym : out Dictionary.Symbol) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out STree.Table; --# derives Name_Is_Parameter_Name, --# Param_Sym, --# STree.Table from Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Sym; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_argument_association; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Tilde (Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Context : in Sem.Anno_Tilde_Context) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node_Pos, --# Scope, --# SPARK_IO.File_Sys & --# E_Stack from *, --# Context, --# Dictionary.Dict, --# Scope; is separate; ----------------------------------------------------------------------------- procedure Wf_Percent (Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node_Pos, --# Scope, --# SPARK_IO.File_Sys & --# E_Stack from *, --# Dictionary.Dict, --# Scope; is separate; ----------------------------------------------------------------------------- procedure Create_Name_List (List : out Lists.List; Heap_Param : in out Lists.List_Heap) --# derives Heap_Param, --# List from Heap_Param; is separate; ----------------------------------------------------------------------------- function Unknown_Symbol_Record return Sem.Exp_Record --# global in Dictionary.Dict; is separate; ----------------------------------------------------------------------------- procedure Stack_Identifier (Sym : in Dictionary.Symbol; Id_Str : in LexTokenManager.Lex_String; Node : in STree.SyntaxNode; Prefix : in Dictionary.Symbol; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; The_Heap : in out Heap.HeapRecord; Ref_Var : in SeqAlgebra.Seq; Dotted : in Boolean; Context : in Sem.Tilde_Context; Is_Annotation : in Boolean) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Dictionary.Dict, --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Dotted, --# Id_Str, --# Is_Annotation, --# LexTokenManager.State, --# Prefix, --# Scope, --# Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# Dictionary.Dict, --# Dotted, --# ErrorHandler.Error_Context, --# Id_Str, --# Is_Annotation, --# LexTokenManager.State, --# Node, --# Prefix, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Sym & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Dotted, --# Id_Str, --# Is_Annotation, --# LexTokenManager.State, --# Prefix, --# Ref_Var, --# Scope, --# Sym, --# The_Heap & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Id_Str, --# Is_Annotation, --# LexTokenManager.State, --# Node, --# Prefix, --# Scope, --# Sym; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.selected_component or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_selected_component; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- function Ops_Are_Same_And_Commutative (Op1, Op2 : SP_Symbols.SP_Symbol) return Boolean is separate; ----------------------------------------------------------------------------- function Null_Type_Record return Sem.Exp_Record --# global in Dictionary.Dict; --# return R => (Dictionary.Is_Null_Symbol (R.Type_Symbol) or Dictionary.IsTypeMark (R.Type_Symbol, Dictionary.Dict)); is separate; ----------------------------------------------------------------------------- function Null_Parameter_Record return Sem.Exp_Record --# global in Dictionary.Dict; is separate; ------------------------------------------------------------------------ -- Subprograms used by Walk_Expression and Walk_Annotation_Expression -- ------------------------------------------------------------------------ -- Put_Exp_Record is handy for debugging expression walking, but -- is uncalled in production builds. procedure Put_Exp_Record (R : in Sem.Exp_Record) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# LexTokenManager.State, --# R; is separate; ----------------------------------------------------------------------------- function Get_Character_Literal (Node : STree.SyntaxNode) return Maths.Value --# global in LexTokenManager.State; --# in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.character_literal; is separate; ----------------------------------------------------------------------------- function Get_String_Literal_Length (Str : LexTokenManager.Lex_String) return Maths.Value --# global in LexTokenManager.State; is separate; ----------------------------------------------------------------------------- procedure Wf_Identifier (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; The_Heap : in out Heap.HeapRecord; Ref_Var : in SeqAlgebra.Seq; Context : in Sem.Tilde_Context) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Dictionary.Dict, --# E_Stack, --# STree.Table from *, --# CommandLineData.Content, --# Context, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# Context, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Context, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Ref_Var, --# Scope, --# STree.Table, --# The_Heap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Down_Wf_Name_Argument_List (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Heap_Param, --# Node, --# Scope, --# STree.Table & --# Heap_Param from *, --# E_Stack, --# Node, --# STree.Table & --# Next_Node from Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.name_argument_list or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_name_argument_list; --# post STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.named_argument_association or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_named_argument_association or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_positional_argument_association or --# Next_Node = STree.NullNode; is separate; ----------------------------------------------------------------------------- procedure Up_Wf_Name_Argument_List (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Heap_Param, --# LexTokenManager.State, --# Node, --# STree.Table & --# Heap_Param from *, --# E_Stack, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.name_argument_list or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_name_argument_list; is separate; ----------------------------------------------------------------------------- procedure Down_Wf_Aggregate (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Next_Node : out STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out Aggregate_Stack.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Aggregate_Stack.State from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# Heap_Param from Dictionary.Dict, --# E_Stack, --# Heap_Param, --# Node, --# Scope, --# STree.Table & --# Next_Node from Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# STree.Table; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); --# post (STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.component_association or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_component_association or --# Next_Node = STree.NullNode) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); is separate; ----------------------------------------------------------------------------- procedure Up_Wf_Aggregate (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out Aggregate_Stack.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Aggregate_Stack.State, --# E_Stack from Aggregate_Stack.State, --# Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Aggregate_Stack.State, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.extension_aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_extension_aggregate) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); --# post Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); is separate; ----------------------------------------------------------------------------- procedure Wf_Aggregate_Choice_Rep (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# Heap_Param, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# Next_Node from Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate_choice_rep or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate_choice_rep; --# post (STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.aggregate_choice_rep or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.aggregate_choice or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_aggregate_choice_rep or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_aggregate_choice or --# Next_Node = STree.NullNode) and --# STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Record_Component_Selector_Name (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# Heap_Param from CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_component_selector_name; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Down_Wf_Aggregate_Or_Expression (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# Next_Node from Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate_or_expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression; --# post STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.aggregate or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.expression or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_aggregate or --# STree.Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_expression or --# Next_Node = STree.NullNode; is separate; ----------------------------------------------------------------------------- procedure Up_Wf_Aggregate_Or_Expression (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out Aggregate_Stack.State; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# Node, --# STree.Table from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate_or_expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); --# post STree.Table = STree.Table~ and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); is separate; --------------------------------------------------------------------------------- -- The type context needed for an attribute with arguments (i.e. 'Val, 'Max etc) -- depends on the Prefix and the attribute identifier. Some attributes have two -- arguments, so we need a special handler here, since an attribute_designator -- node is the closent common parent node of one or both arguments. --------------------------------------------------------------------------------- function Attribute_Designator_Type_From_Context (Exp_Node : STree.SyntaxNode; E_Stack : Exp_Stack.Exp_Stack_Type; T_Stack : Type_Context_Stack.T_Stack_Type) return Dictionary.Symbol --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# pre (STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_attribute_designator) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# return S => (Dictionary.Is_Null_Symbol (S) or Dictionary.IsTypeMark (S, Dictionary.Dict)); is separate; --------------------------------------------------------------------------------- -- range constraints appear below aggregate_choice, case_choice, and below -- relational operators, and so can have a chage of type context. --------------------------------------------------------------------------------- function Range_Constraint_Type_From_Context (Exp_Node : STree.SyntaxNode; E_Stack : Exp_Stack.Exp_Stack_Type; T_Stack : Type_Context_Stack.T_Stack_Type) return Dictionary.Symbol --# global in Dictionary.Dict; --# in STree.Table; --# pre (STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.range_constraint or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_range_constraint) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# return S => (Dictionary.Is_Null_Symbol (S) or Dictionary.IsTypeMark (S, Dictionary.Dict)); is separate; --------------------------------------------------------------------------------- -- Simple expressions appear below aggregate_choice, case_choice, and below -- relational operators, and so can have a chage of type context. --------------------------------------------------------------------------------- function Simple_Expression_Type_From_Context (Exp_Node : STree.SyntaxNode; T_Stack : Type_Context_Stack.T_Stack_Type) return Dictionary.Symbol --# global in Aggregate_Stack.State; --# in Dictionary.Dict; --# in STree.Table; --# pre (STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_simple_expression) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); --# return S => (Dictionary.Is_Null_Symbol (S) or Dictionary.IsTypeMark (S, Dictionary.Dict)); is separate; --------------------------------------------------------------------------------- -- Returns the type needed for an expression from its context --------------------------------------------------------------------------------- procedure Expression_Type_From_Context (Exp_Node : in STree.SyntaxNode; E_Stack : in Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type; New_Context_Type : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out STree.Table; --# derives New_Context_Type from CommandLineData.Content, --# Dictionary.Dict, --# Exp_Node, --# E_Stack, --# LexTokenManager.State, --# STree.Table, --# T_Stack & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Exp_Node, --# E_Stack, --# LexTokenManager.State; --# pre (STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.expression or --# STree.Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.annotation_expression) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post (Dictionary.Is_Null_Symbol (New_Context_Type) or Dictionary.IsTypeMark (New_Context_Type, Dictionary.Dict)) --# and STree.Table = STree.Table~; is separate; --------------------------------------------------------------------------------- -- Primary grammer productions nearly always have the same type context -- as their parent nodes, but there is a single exception - when a primary -- node is the right-hand operand of an exponentiation operator, where the -- context is always Integer. --------------------------------------------------------------------------------- function Primary_Type_From_Context (Node : STree.SyntaxNode; T_Stack : Type_Context_Stack.T_Stack_Type) return Dictionary.Symbol --# global in Dictionary.Dict; --# in STree.Table; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.primary or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_primary) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# return S => (Dictionary.Is_Null_Symbol (S) or Dictionary.IsTypeMark (S, Dictionary.Dict)); is separate; ----------------------------------------------------------------------------- procedure Wf_Expression (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# T_Stack & --# E_Stack, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# T_Stack; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression_rep1 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression_rep2 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression_rep3 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression_rep4 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.expression_rep5 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression_rep1 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression_rep2 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression_rep3 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression_rep4 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression_rep5 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression_rep6 or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_expression_rep7) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Simple_Expression (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type; Context_Requires_Static : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context_Requires_Static, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# T_Stack & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# T_Stack & --# LexTokenManager.State from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# T_Stack; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.simple_expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_simple_expression) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Simple_Expression_Opt (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# T_Stack & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# T_Stack & --# STree.Table from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# T_Stack; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.simple_expression_opt or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_simple_expression_opt) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Term (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type; Context_Requires_Static : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Context_Requires_Static, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# T_Stack & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# T_Stack & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# T_Stack; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.term or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_term) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Factor (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# T_Stack & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# T_Stack & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# T_Stack; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.factor or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_factor) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Relation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# T_Stack & --# E_Stack from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# T_Stack & --# STree.Table from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.relation or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_relation) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Arange (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Node, --# Scope, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.arange or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_arange; is separate; ----------------------------------------------------------------------------- procedure Wf_Selected_Component (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ref_Var : in SeqAlgebra.Seq; E_Stack : in out Exp_Stack.Exp_Stack_Type; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Context : in Sem.Tilde_Context) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data from *, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# Context, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# E_Stack from *, --# CommandLineData.Content, --# Component_Data, --# Context, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Ref_Var, --# Scope, --# STree.Table, --# The_Heap; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.selected_component or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_selected_component) and --# ((STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_selected_component) -> --# (Context in Sem.Anno_Tilde_Context)); --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Attribute (E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# derives E_Stack from *, --# Dictionary.Dict & --# null from CommandLineData.Content; is separate; ----------------------------------------------------------------------------- procedure Wf_Attribute_Designator (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; The_Heap : in out Heap.HeapRecord; Ref_Var : in SeqAlgebra.Seq) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Ref_Var, --# Scope, --# STree.Table, --# The_Heap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_attribute_designator; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Primary (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ref_Var : in SeqAlgebra.Seq; E_Stack : in out Exp_Stack.Exp_Stack_Type; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict from Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# Node, --# Ref_Var, --# STree.Table, --# The_Heap & --# STree.Table from *, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# The_Heap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.primary or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_primary; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Positional_Argument_Association (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; Ref_Var : in SeqAlgebra.Seq; E_Stack : in out Exp_Stack.Exp_Stack_Type; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data from *, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# E_Stack from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Node, --# Scope, --# STree.Table & --# Node from *, --# Dictionary.Dict, --# E_Stack, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# Statistics.TableUsage, --# The_Heap from *, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# Node, --# Ref_Var, --# Scope, --# STree.Table, --# The_Heap & --# STree.Table from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association; --# post (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.name_argument_list or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_name_argument_list) and --# STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Named_Argument_Association (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# Heap_Param from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# STree.Table & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Heap_Param, --# Node, --# Scope, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_argument_association; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Ancestor_Part (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# Heap_Param from Dictionary.Dict, --# E_Stack, --# Heap_Param, --# Node, --# Scope, --# STree.Table & --# Node from *, --# Dictionary.Dict, --# E_Stack, --# Scope, --# STree.Table & --# STree.Table from *, --# E_Stack, --# Node; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.ancestor_part or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_ancestor_part; --# post STree.Table = STree.Table~ and --# (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.ancestor_part or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_ancestor_part or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.extension_aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_extension_aggregate or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.qualified_expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_qualified_expression); is separate; ----------------------------------------------------------------------------- procedure Wf_Aggregate_Choice (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out Aggregate_Stack.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Aggregate_Stack.State from *, --# Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Aggregate_Stack.State, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Node, --# Scope, --# STree.Table; --# pre (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.aggregate_choice or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate_choice) and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); --# post Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); is separate; ----------------------------------------------------------------------------- procedure Wf_Named_Association_Rep (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# Heap_Param from Dictionary.Dict, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# LexTokenManager.State, --# STree.Table from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_association_rep or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_association_rep; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Named_Record_Component_Association (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# Heap_Param from Dictionary.Dict, --# E_Stack, --# Heap_Param, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# LexTokenManager.State from *, --# Dictionary.Dict, --# E_Stack & --# STree.Table from *, --# Dictionary.Dict, --# E_Stack, --# Node; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_record_component_association; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Positional_Association (Node : in STree.SyntaxNode; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_record_component_association; is separate; ----------------------------------------------------------------------------- procedure Wf_Positional_Record_Component_Association (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope & --# LexTokenManager.State from *, --# Dictionary.Dict, --# E_Stack & --# Node, --# STree.Table from Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_record_component_association; --# post (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_component_association) and --# STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Component_Association (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# LexTokenManager.State, --# STree.Table from *, --# Dictionary.Dict, --# E_Stack, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_component_association; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------------- procedure Wf_Qualified_Expression (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# E_Stack, --# LexTokenManager.State from Dictionary.Dict, --# E_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.qualified_expression or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_qualified_expression; is separate; ----------------------------------------------------------------------------- procedure Walk_Expression (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Type_Context : in Dictionary.Symbol; Context_Requires_Static : in Boolean; Ref_Var : in SeqAlgebra.Seq; Result : out Sem.Exp_Record; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is separate; ----------------------------------------------------------------------------- procedure Walk_Annotation_Expression (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Type_Context : in Dictionary.Symbol; Context : in Sem.Anno_Tilde_Context; Result : out Sem.Exp_Record; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is separate; end Walk_Expression_P; spark-2012.0.deb/examiner/sem-compunit-walkstatements.adb0000644000175000017500000027334011753202336022416 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure WalkStatements (Seq_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : out ComponentManager.ComponentData) is --# inherit CompleteCheck, --# Dictionary, --# ExaminerConstants, --# Sem, --# SPARK_IO, --# SystemErrors; package Case_Stack --# own State : Stack_T; is --# type Stack_T is abstract; --# function Stack_Is_Valid (The_State : Stack_T) return Boolean; procedure Init; --# global in Dictionary.Dict; --# out State; --# derives State from & --# null from Dictionary.Dict; --# post Stack_Is_Valid (State); procedure Push (Case_Flags : in Sem.Typ_Case_Flags; Complete_ADT : in CompleteCheck.T; Sym : in Dictionary.Symbol; Lower_Bound : in Sem.Typ_Type_Bound; Upper_Bound : in Sem.Typ_Type_Bound); --# global in Dictionary.Dict; --# in out State; --# derives State from *, --# Case_Flags, --# Complete_ADT, --# Lower_Bound, --# Sym, --# Upper_Bound & --# null from Dictionary.Dict; --# pre Stack_Is_Valid (State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Sym) or Dictionary.IsTypeMark (Sym, Dictionary.Dict)); --# post Stack_Is_Valid (State); procedure Pop (Case_Flags : out Sem.Typ_Case_Flags; Complete_ADT : out CompleteCheck.T; Sym : out Dictionary.Symbol; Lower_Bound : out Sem.Typ_Type_Bound; Upper_Bound : out Sem.Typ_Type_Bound); --# global in Dictionary.Dict; --# in out State; --# derives Case_Flags, --# Complete_ADT, --# Lower_Bound, --# State, --# Sym, --# Upper_Bound from State & --# null from Dictionary.Dict; --# pre Stack_Is_Valid (State); --# post Stack_Is_Valid (State) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Sym) or Dictionary.IsTypeMark (Sym, Dictionary.Dict)); end Case_Stack; ----------------------------------------------------------------- -- VariableUpdateHistory -- -- Description: -- An object of the ADT, VariableUpdateHistory.History_T, is used to -- maintain a list of variables (represented by natural numbers) and -- each variable has a single associated STree.SyntaxNode -- The node is used to record the last occurrence of an update -- of each variable as the syntax tree is traversed. -- To use an object of History_T it must fist be -- created using Create_History and must disposed of when its no -- longer required (and certainly before leaving the scope in -- which the History_T object is declared) using Dispose_Of_History -- If the specified Heap.HeapRecord object becomes exhausted -- the Examiner will fail with a fatal error. ------------------------------------------------------------------ --# inherit ExaminerConstants, --# Heap, --# Sem, --# SP_Symbols, --# Statistics, --# STree, --# SystemErrors; package VariableUpdateHistory is type History_T is private; ----------------------------------------------------------------- -- Create_History -- -- Description: -- Initialises an object of type History_T. This subprogram -- must be called prior to using the object. -- The_Heap object must be an initialised Heap.HeapRecord Object. ------------------------------------------------------------------ procedure Create_History (The_Heap : in out Heap.HeapRecord; History : out History_T); --# global in out Statistics.TableUsage; --# derives History, --# The_Heap from The_Heap & --# Statistics.TableUsage from *, --# The_Heap; ----------------------------------------------------------------- -- Dispose_Of_History -- -- Description: -- Disposes of an object of type History_T. This subprogram -- must be called when object is no longer required and -- certainly before leaving the scope in which the History_T -- object is declared. The_Heap object must be the same object as -- was used in the call to Create_History for the History_T object. ------------------------------------------------------------------ procedure Dispose_Of_History (The_Heap : in out Heap.HeapRecord; History : in History_T); --# derives The_Heap from *, --# History; ----------------------------------------------------------------- -- Add_Update -- -- Description: -- Adds a variable - node pair to the History_T object (History). -- If the Variable is not present in History, it is added to -- History along with its associated node (Node). -- Otherwise, if the Variable is present in the History, the -- node associated with the Variable is updated to the value -- of the given Node. -- The_Heap must be the same object as used in the preceding -- call to Create_History. ------------------------------------------------------------------ procedure Add_Update (The_Heap : in out Heap.HeapRecord; History : in out History_T; Variable : in Natural; Node : in STree.SyntaxNode); --# global in STree.Table; --# in out Statistics.TableUsage; --# derives History, --# The_Heap from History, --# Node, --# The_Heap, --# Variable & --# Statistics.TableUsage from *, --# History, --# The_Heap, --# Variable & --# null from STree.Table; --# pre Sem.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement or --# Sem.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement; ----------------------------------------------------------------- -- Get_Last_Update -- -- Description: -- Obtains the value of the node currently associated with the -- given variable in the History. If the given Variable does -- not exist in the History a STree.NullNode will be -- returned. -- The_Heap must be the same object as used in the preceding -- call to Create_History. ------------------------------------------------------------------ procedure Get_Last_Update (The_Heap : in Heap.HeapRecord; History : in History_T; Variable : in Natural; Node : out STree.SyntaxNode); --# global in STree.Table; --# derives Node from History, --# The_Heap, --# Variable & --# null from STree.Table; --# post Sem.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement or --# Sem.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement or --# Node = STree.NullNode; private type History_T is range 0 .. Heap.Atom'Last; end VariableUpdateHistory; -------------------------------------------- L_Scope : Dictionary.Scopes; Next_Node, Last_Node : STree.SyntaxNode; Node_Type : SP_Symbols.SP_Symbol; Pure_Protected_Export_List : VariableUpdateHistory.History_T; -------------------------------------------- package body Case_Stack --# own State is S, --# Top_Ptr; is type Stack_Record is record Case_Flags : Sem.Typ_Case_Flags; Complete_ADT : CompleteCheck.T; Sym : Dictionary.Symbol; Upper_Bound : Sem.Typ_Type_Bound; Lower_Bound : Sem.Typ_Type_Bound; end record; Null_Record : constant Stack_Record := Stack_Record' (Case_Flags => Sem.Null_Case_Flags, Complete_ADT => CompleteCheck.NullT, Sym => Dictionary.NullSymbol, Upper_Bound => Sem.Unknown_Type_Bound, Lower_Bound => Sem.Unknown_Type_Bound); subtype Index_Range is Integer range 1 .. ExaminerConstants.WalkStmtStackMax; type Stack_Array is array (Index_Range) of Stack_Record; subtype Top_Range is Integer range 0 .. ExaminerConstants.WalkStmtStackMax; S : Stack_Array; Top_Ptr : Top_Range; procedure Init --# global in Dictionary.Dict; --# out S; --# out Top_Ptr; --# derives S, --# Top_Ptr from & --# null from Dictionary.Dict; --# post (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((S(I).Complete_ADT.ActualUpperBound - S(I).Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (S(I).Sym) or Dictionary.IsTypeMark (S(I).Sym, Dictionary.Dict)))); is begin Top_Ptr := 0; -- We allow a partial initialization of S here, mainly -- to avoid massive usage of stack, which is incompatible -- with the default stack limit on OS X. It's also much -- faster. --# accept Flow, 23, S, "Partial initialization here OK" & --# Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK" & --# Flow, 602, S, S, "Partial initialization here OK"; S (Index_Range'First) := Null_Record; end Init; procedure Push (Case_Flags : in Sem.Typ_Case_Flags; Complete_ADT : in CompleteCheck.T; Sym : in Dictionary.Symbol; Lower_Bound : in Sem.Typ_Type_Bound; Upper_Bound : in Sem.Typ_Type_Bound) --# global in Dictionary.Dict; --# in out S; --# in out Top_Ptr; --# derives S from *, --# Case_Flags, --# Complete_ADT, --# Lower_Bound, --# Sym, --# Top_Ptr, --# Upper_Bound & --# Top_Ptr from * & --# null from Dictionary.Dict; --# pre (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((S(I).Complete_ADT.ActualUpperBound - S(I).Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (S(I).Sym) or Dictionary.IsTypeMark (S(I).Sym, Dictionary.Dict)))) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Sym) or Dictionary.IsTypeMark (Sym, Dictionary.Dict)); --# post (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((S(I).Complete_ADT.ActualUpperBound - S(I).Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (S(I).Sym) or Dictionary.IsTypeMark (S(I).Sym, Dictionary.Dict)))); is begin if Top_Ptr = ExaminerConstants.WalkStmtStackMax then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Case_Stack_Overflow, Msg => "in WalkStatements.Case_Stack.Push"); end if; --# check Top_Ptr < ExaminerConstants.WalkStmtStackMax; Top_Ptr := Top_Ptr + 1; S (Top_Ptr) := Stack_Record' (Case_Flags => Case_Flags, Complete_ADT => Complete_ADT, Sym => Sym, Lower_Bound => Lower_Bound, Upper_Bound => Upper_Bound); --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK"; end Push; procedure Pop (Case_Flags : out Sem.Typ_Case_Flags; Complete_ADT : out CompleteCheck.T; Sym : out Dictionary.Symbol; Lower_Bound : out Sem.Typ_Type_Bound; Upper_Bound : out Sem.Typ_Type_Bound) --# global in Dictionary.Dict; --# in S; --# in out Top_Ptr; --# derives Case_Flags, --# Complete_ADT, --# Lower_Bound, --# Sym, --# Upper_Bound from S, --# Top_Ptr & --# Top_Ptr from * & --# null from Dictionary.Dict; --# pre (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((S(I).Complete_ADT.ActualUpperBound - S(I).Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (S(I).Sym) or Dictionary.IsTypeMark (S(I).Sym, Dictionary.Dict)))); --# post (for all I in Index_Range range Index_Range'First .. Top_Ptr => --# ((S(I).Complete_ADT.ActualUpperBound - S(I).Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (S(I).Sym) or Dictionary.IsTypeMark (S(I).Sym, Dictionary.Dict)))) and --# (Complete_ADT.ActualUpperBound - Complete_ADT.LowerBound < ExaminerConstants.CompleteCheckSize) and --# (Dictionary.Is_Null_Symbol (Sym) or Dictionary.IsTypeMark (Sym, Dictionary.Dict)); is begin if Top_Ptr = 0 then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Case_Stack_Underflow, Msg => "in WalkStatements.Case_Stack.Pop"); end if; --# check Top_Ptr > 0; Case_Flags := S (Top_Ptr).Case_Flags; Complete_ADT := S (Top_Ptr).Complete_ADT; Sym := S (Top_Ptr).Sym; Lower_Bound := S (Top_Ptr).Lower_Bound; Upper_Bound := S (Top_Ptr).Upper_Bound; Top_Ptr := Top_Ptr - 1; --# accept Flow, 30, Dictionary.Dict, "Variable not referenced nor exported OK"; end Pop; end Case_Stack; -------------------------------------------------------------------- package body VariableUpdateHistory is separate; -------------------------------------------------------------------- function Is_Last_In_Sequence (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.exit_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.loop_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.return_statement; is Local_Node : STree.SyntaxNode; Result : Boolean; function No_Node_Or_Only_Permitted_Nodes_Present (Seq_Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Seq_Node, STree.Table) = SP_Symbols.sequence_of_statements; is Result : Boolean; Seq_Node_To_Check : STree.SyntaxNode; function Permitted_Statement (Seq_Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Seq_Node, STree.Table) = SP_Symbols.sequence_of_statements; is begin -- First implementation of this function only allows justification_statements. -- It could be extended to allow pragmas and proof statements if desired. return Syntax_Node_Type (Node => Child_Node -- simple, compound, justification, proof statement or apragma (Next_Sibling (Current_Node => Seq_Node))) = -- statement SP_Symbols.justification_statement; end Permitted_Statement; begin -- No_Node_Or_Only_Permitted_Nodes_Present Seq_Node_To_Check := Seq_Node; loop --# assert Syntax_Node_Type (Seq_Node_To_Check, STree.Table) = SP_Symbols.sequence_of_statements; -- exit when we get to the top of the sequence of statements and there are -- no more statements to check; this happens immediately on first pass through -- loop if there no statements of any kind after the one we are checking on entry -- to Is_Last_In_Sequence if Syntax_Node_Type (Node => Parent_Node (Current_Node => Seq_Node_To_Check)) /= SP_Symbols.sequence_of_statements then -- it must be subprogram_implementation or something else but there definitely no -- more statements Result := True; exit; end if; -- failure case, a non-permitted statement if not Permitted_Statement (Seq_Node => Seq_Node_To_Check) then Result := False; exit; end if; -- move up chain of sequence_of_statements Seq_Node_To_Check := Parent_Node (Current_Node => Seq_Node_To_Check); end loop; return Result; end No_Node_Or_Only_Permitted_Nodes_Present; begin -- Is_Last_In_Sequence -- On entry, node is one of: exit_statement, loop_statement or return_statement. -- These nodes, under certain circumstances are required to be thelast executable -- statement in a sequence of statements. -- -- Grammar: -- -- e.g. subprogram_implementation -- | -- sequence_of_statements --- (designator, hidden part etc.) -- | -- sequence_of_statements --- statement (last one in seq) -- | -- sequence_of_statements --- statement -- | -- statement (first one in seq) -- Local_Node := Parent_Node (Current_Node => Node); -- ASSUME Local_Node = simple_statement OR compound_statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.simple_statement or else Syntax_Node_Type (Node => Local_Node) = SP_Symbols.compound_statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = simple_statement OR compound_statement in Is_Last_In_Sequence"); Local_Node := Parent_Node (Current_Node => Local_Node); -- ASSUME Local_Node = statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = statement in Is_Last_In_Sequence"); Local_Node := Parent_Node (Current_Node => Local_Node); -- ASSUME Local_Node = sequence_of_statements SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.sequence_of_statements, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = sequence_of_statements in Is_Last_In_Sequence"); -- If the Seq we have reached has no Next_Sibling then there are no statements of any kind -- after the node we are checking and which was passed in a parameter Node. -- If there is a Next_Sibling we need to check that the associated statements are -- permitted ones, i.e. they are not unreachable executable statements. -- -- For now, we only do the permitted node check for the loop case; this is where we have a customer report -- and the other two cases have separate problems: allowing something after exit confuses the flow analyser -- and there is a separate redundant check in wf_subprogram_body that still traps return not being last -- statement even if we allow it here if Syntax_Node_Type (Node => Node) = SP_Symbols.loop_statement then -- Node is what we entered with allow "permitted statements" after loop Result := No_Node_Or_Only_Permitted_Nodes_Present (Seq_Node => Local_Node); else -- don't allow statements after exit or return Result := Syntax_Node_Type (Node => Parent_Node (Current_Node => Local_Node)) /= SP_Symbols.sequence_of_statements; end if; return Result; end Is_Last_In_Sequence; -------------------------------------------------------------------- function Parent_Of_Sequence (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.exit_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.if_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.loop_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.return_statement; --# return Return_Node => (Return_Node /= STree.NullNode and --# Syntax_Node_Type (Return_Node, STree.Table) /= SP_Symbols.sequence_of_statements); is Local_Node : STree.SyntaxNode; begin Local_Node := Parent_Node (Current_Node => Node); -- ASSUME Local_Node = simple_statement OR compound_statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.simple_statement or else Syntax_Node_Type (Node => Local_Node) = SP_Symbols.compound_statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = simple_statement OR compound_statement in Parent_Of_Sequence"); Local_Node := Parent_Node (Current_Node => Local_Node); -- ASSUME Local_Node = statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = statement in Parent_Of_Sequence"); Local_Node := Parent_Node (Current_Node => Local_Node); -- ASSUME Local_Node = sequence_of_statements SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Local_Node) = SP_Symbols.sequence_of_statements, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = sequence_of_statements in Parent_Of_Sequence"); while Syntax_Node_Type (Node => Local_Node) = SP_Symbols.sequence_of_statements loop --# assert Syntax_Node_Type (Local_Node, STree.Table) = SP_Symbols.sequence_of_statements; Local_Node := Parent_Node (Current_Node => Local_Node); end loop; -- ASSUME Local_Node /= NULL SystemErrors.RT_Assert (C => Local_Node /= STree.NullNode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node /= NULL in Parent_Of_Sequence"); return Local_Node; end Parent_Of_Sequence; -------------------------------------------------------------------- -- patch a relation so that implicit side-effects are included. For -- stream exports this means adding a self-reference to the import list -- for stream imports it means adding a complete new relation deriving -- it from itself procedure Add_Stream_Effects (Table : in out RefList.HashTable; The_Heap : in out Heap.HeapRecord; Node : in STree.SyntaxNode; Export : in Dictionary.Symbol; Imports : in SeqAlgebra.Seq) --# global in Dictionary.Dict; --# in STree.Table; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# Export, --# Imports, --# Node, --# Table, --# The_Heap & --# Table from *, --# Dictionary.Dict, --# Imports, --# Node, --# The_Heap & --# null from STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.delay_statement; is Current_Member : SeqAlgebra.MemberOfSeq; Current_Import : Dictionary.Symbol; -- add relation of the form Import <- {Import} to Node procedure Add_Self_Dependency (Table : in out RefList.HashTable; The_Heap : in out Heap.HeapRecord; Node : in STree.SyntaxNode; Import : in Dictionary.Symbol) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# Table, --# The_Heap from *, --# Import, --# Node, --# Table, --# The_Heap & --# null from STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.delay_statement; is Import_Seq : SeqAlgebra.Seq; begin SeqAlgebra.CreateSeq (The_Heap, Import_Seq); SeqAlgebra.AddMember (The_Heap, Import_Seq, Natural (Dictionary.SymbolRef (Import))); RefList.AddRelation (Table, The_Heap, Node, Import, Import_Seq); --# accept F, 30, STree.Table, "Used for precondition only"; end Add_Self_Dependency; begin -- Add_Stream_Effects -- traverse Import list adding self references for any IN streams found Current_Member := SeqAlgebra.FirstMember (The_Heap, Imports); while not SeqAlgebra.IsNullMember (Current_Member) loop --# assert Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.delay_statement; Current_Import := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => The_Heap, M => Current_Member))); -- if the Current_Import is a stream then add a new identity relation if Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (Current_Import)) /= Dictionary.DefaultMode then -- we know it is mode in because wffs prevent reading of mode outs Add_Self_Dependency (Table => Table, The_Heap => The_Heap, Node => Node, Import => Current_Import); end if; Current_Member := SeqAlgebra.NextMember (The_Heap, Current_Member); end loop; -- finally, see if the Export is an OutStream and if so add it to the Import list if Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (Export)) /= Dictionary.DefaultMode then -- we know it is mode out because wffs prevent writing of mode ins SeqAlgebra.AddMember (The_Heap, Imports, Natural (Dictionary.SymbolRef (Export))); end if; end Add_Stream_Effects; -------------------------------------------------------------------- procedure Wf_Assign (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage, --# Table, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Wf_Condition (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage, --# Table, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.condition; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- -- This procedure prepares the flow analyser to deal with ininite loops. If the loop -- has some means of exiting (iteration scheme or exit statements) then nothing is -- done. Otherwise an empty referenced variable list is associated with the -- end_of_loop node to act as a stable exit expression in the manner of "exit when false". -- A Boolean type symbol is planted in the syntax tree at this point as a signal to the -- flow analyser that it should model the default exit as a way of providing a syntactic -- exit from the loop procedure Setup_Default_Loop_Exit (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Statistics.TableUsage, --# Table, --# The_Heap from *, --# Dictionary.Dict, --# Node, --# Scope, --# Table, --# The_Heap & --# STree.Table from *, --# Dictionary.Dict, --# Node, --# Scope; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.end_of_loop; --# post STree.Table = STree.Table~; is Ref_Var : SeqAlgebra.Seq; begin -- and end_of_loop node is placed in the syntax tree after the sequence of statements -- it controls. If the loop has no exits we attach and empty referenced variable -- list to this node so that that the flow analyser can pretend that there is an -- "exit when false" at this point. We plant type Boolean in the syntax tree to signal --to the flow analyser that this default exit point is active. If the loop has an -- iteration scheme or already has exits then we do nothing here. if not Dictionary.GetLoopHasExits (Dictionary.GetRegion (Scope)) then SeqAlgebra.CreateSeq (The_Heap, Ref_Var); RefList.AddRelation (Table, The_Heap, Node, Dictionary.NullSymbol, Ref_Var); STree.Add_Node_Symbol (Node => Node, Sym => Dictionary.GetPredefinedBooleanType); end if; end Setup_Default_Loop_Exit; -------------------------------------------------------------------- procedure Wf_Return (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table, --# Table from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.return_statement; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Wf_Case (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Case_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# Case_Stack.State, --# Table from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.case_statement and --# Case_Stack.Stack_Is_Valid (Case_Stack.State); --# post STree.Table = STree.Table~ and --# Case_Stack.Stack_Is_Valid (Case_Stack.State); is separate; -------------------------------------------------------------------- procedure Wf_Case_Choice (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Case_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Case_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from Case_Stack.State, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Case_Stack.State, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage, --# Table, --# TheHeap from *, --# Case_Stack.State, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.case_choice and --# Case_Stack.Stack_Is_Valid (Case_Stack.State); --# post STree.Table = STree.Table~ and --# Case_Stack.Stack_Is_Valid (Case_Stack.State); is separate; -------------------------------------------------------------------- procedure Wf_Exit (Node : in STree.SyntaxNode; The_Loop : in Dictionary.Symbol; Condition_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Condition_Node from Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Loop; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.exit_statement; --# post Syntax_Node_Type (Condition_Node, STree.Table) = SP_Symbols.condition or --# Condition_Node = STree.NullNode; is separate; -------------------------------------------------------------------- procedure Wf_Delay_Until (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage, --# Table, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.delay_statement; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Down_Loop (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Scope from ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.loop_statement; is separate; -------------------------------------------------------------------- procedure Up_Loop (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Scope from *, --# Dictionary.Dict; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.loop_statement; is separate; -------------------------------------------------------------------- procedure Wf_Loop_Param (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table, --# Table from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.loop_parameter_specification; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Wf_Proc_Call (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# Table from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Table, --# TheHeap & --# SLI.State from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure Up_Case (Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out Case_Stack.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Case_Stack.State from * & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Case_Stack.State, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.case_statement and --# Case_Stack.Stack_Is_Valid (Case_Stack.State); --# post Case_Stack.Stack_Is_Valid (Case_Stack.State); is separate; -------------------------------------------------------------------- procedure Wf_Proof_Statement_Or_Loop_Invariant (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table, --# TheHeap from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.proof_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.loop_invariant; --# post STree.Table = STree.Table~; is Child_Child_Node : STree.SyntaxNode; Unused : Boolean; pragma Unreferenced (Unused); begin -- Proof_Statement and Loop_Invariant have the same shape in the -- grammar, so this procedure WFFs both. Child_Child_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Child_Child_Node = predicate SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child_Child_Node) = SP_Symbols.predicate, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Child_Node = predicate in Wf_Proof_Statement_Or_Loop_Invariant"); if Syntax_Node_Type (Node) = SP_Symbols.proof_statement and then Syntax_Node_Type (Child_Node (Node)) = SP_Symbols.assume_statement then -- Assumptions are magically assumed to be true, hence they -- must be justified. We raise a warning here to prompt the -- user to do so. ErrorHandler.Semantic_Warning (Err_Num => 444, Position => Node_Position (Node), Id_Str => LexTokenManager.Null_String); end if; --# accept Flow, 10, Unused, "Expected ineffective assignment to Unused"; Wf_Predicate (Node => Child_Child_Node, Scope => Scope, Context => Postcondition, Component_Data => Component_Data, The_Heap => TheHeap, Errors_Found => Unused); --# end accept; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Wf_Proof_Statement_Or_Loop_Invariant; -------------------------------------------------------------------- procedure Init_Component_Data (Scope : in Dictionary.Scopes; Component_Data : out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Component_Data, --# The_Heap from Dictionary.Dict, --# Scope, --# The_Heap & --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Scope, --# The_Heap; is Subprog_Sym, Own_Var_Sym, Constituent_Sym : Dictionary.Symbol; It, Constituent_It : Dictionary.Iterator; ----------------- procedure Maybe_Add (Sym : in Dictionary.Symbol; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# derives Component_Data, --# Statistics.TableUsage, --# The_Heap from *, --# Component_Data, --# Dictionary.Dict, --# Sym, --# The_Heap; is begin if Dictionary.TypeIsRecord (Dictionary.GetType (Sym)) then ComponentManager.AddRoot (Component_Data, The_Heap, Sym); end if; end Maybe_Add; begin -- Init_Component_Data -- ensure we start with a completely fresh copy of Component_Data ComponentManager.Initialise (Component_Data); -- add roots of all record types as specified in S.P0468.53.9 para 6.1 Subprog_Sym := Dictionary.GetRegion (Scope); if Dictionary.Is_Subprogram (Subprog_Sym) or else (Dictionary.IsType (Subprog_Sym) and then Dictionary.TypeIsTask (Subprog_Sym)) then -- initialize using parameters, globals and local variables -- first do local variables It := Dictionary.First_Local_Variable (Subprogram => Subprog_Sym); while not Dictionary.IsNullIterator (It) loop Maybe_Add (Sym => Dictionary.CurrentSymbol (It), Component_Data => Component_Data, The_Heap => The_Heap); It := Dictionary.NextSymbol (It); end loop; -- then parameters It := Dictionary.FirstGlobalVariable (Dictionary.GetAbstraction (Subprog_Sym, Scope), Subprog_Sym); while not Dictionary.IsNullIterator (It) loop Maybe_Add (Sym => Dictionary.CurrentSymbol (It), Component_Data => Component_Data, The_Heap => The_Heap); It := Dictionary.NextSymbol (It); end loop; -- then globals It := Dictionary.FirstSubprogramParameter (Subprog_Sym); while not Dictionary.IsNullIterator (It) loop Maybe_Add (Sym => Dictionary.CurrentSymbol (It), Component_Data => Component_Data, The_Heap => The_Heap); It := Dictionary.NextSymbol (It); end loop; else -- package init part, initialize using own variable list, -- looking at constituents which are not own variables of -- embedded packages where they are found. It := Dictionary.FirstOwnVariable (Subprog_Sym); while not Dictionary.IsNullIterator (It) loop Own_Var_Sym := Dictionary.CurrentSymbol (It); if Dictionary.IsConcreteOwnVariable (Own_Var_Sym) then Maybe_Add (Sym => Own_Var_Sym, Component_Data => Component_Data, The_Heap => The_Heap); else -- must be abstract Constituent_It := Dictionary.FirstConstituent (Own_Var_Sym); while not Dictionary.IsNullIterator (Constituent_It) loop Constituent_Sym := Dictionary.CurrentSymbol (Constituent_It); if not Dictionary.IsOwnVariable (Constituent_Sym) then Maybe_Add (Sym => Constituent_Sym, Component_Data => Component_Data, The_Heap => The_Heap); end if; Constituent_It := Dictionary.NextSymbol (Constituent_It); end loop; end if; It := Dictionary.NextSymbol (It); end loop; end if; end Init_Component_Data; ----------------------------------------------------------------- -- CheckForMutuallyExclusiveBranches -- -- Description: -- Given two STree.SyntaxNodes, Given_Node and -- Preceding_Node, this subprogram checks that the Given_Node -- and the Preceding_Node are on mutually exclusive branches. -- For checking the updates of pure exported protected variables -- the Preceding_Node is the last update node as the syntax tree is -- traversed and the Given_Node is the update node just encountered. -- If the subprogram is repeatedly applied for each update node -- encountered (with its previous node) then this is sufficient -- to ensure that all updates are on mutually exclusive paths -- provided that Are_Mutually_Exclusive always has a return value -- of True. See S.P0468.53.49. ------------------------------------------------------------------ procedure CheckForMutuallyExclusiveBranches (Given_Node, Preceding_Node : in STree.SyntaxNode; The_Heap : in out Heap.HeapRecord; Are_Mutually_Exclusive : out Boolean) --# global in STree.Table; --# in out Statistics.TableUsage; --# derives Are_Mutually_Exclusive, --# The_Heap from Given_Node, --# Preceding_Node, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# Given_Node, --# Preceding_Node, --# STree.Table, --# The_Heap; --# pre (Syntax_Node_Type (Given_Node, STree.Table) = SP_Symbols.assignment_statement or --# Syntax_Node_Type (Given_Node, STree.Table) = SP_Symbols.procedure_call_statement) and --# (Syntax_Node_Type (Preceding_Node, STree.Table) = SP_Symbols.assignment_statement or --# Syntax_Node_Type (Preceding_Node, STree.Table) = SP_Symbols.procedure_call_statement); is separate; -------------------------------------------------------------------- procedure Check_Pure_Protected_Exports_Updated_Once_Only (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Pure_Protected_Export_List : in out VariableUpdateHistory.History_T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pure_Protected_Export_List, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Table, --# TheHeap & --# Pure_Protected_Export_List, --# Statistics.TableUsage, --# TheHeap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Node, --# Pure_Protected_Export_List, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.assignment_statement or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_call_statement; is The_Export : Natural; The_Export_Atom : Heap.Atom; The_Export_Sym : Dictionary.Symbol; The_Imports : SeqAlgebra.Seq; Previous_Node : STree.SyntaxNode; On_Mutually_Exclusive_Branches : Boolean; begin -- does this node has exports if RefList.NodeHasExportList (Table => Table, TheHeap => TheHeap, Node => Node) then -- Get the first export for this node. RefList.FirstExport (Table => Table, TheHeap => TheHeap, Node => Node, TheExport => The_Export_Atom); while not Heap.IsNullPointer (The_Export_Atom) loop -- Get the symbol for this export. The_Export_Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (Heap.AValue (TheHeap => TheHeap, A => The_Export_Atom))); -- Is it protected? if Dictionary.IsOwnVariable (The_Export_Sym) and then -- do not report error if the thing being updates is a suspension object not Dictionary.IsPredefinedSuspensionObjectType (Dictionary.GetType (The_Export_Sym)) and then Dictionary.GetOwnVariableProtected (The_Export_Sym) and then -- does it have a wellformed set of imports? -- Note: the imports list will be empty if there are none. RefList.ExportHasDependencies (TheExport => The_Export_Atom, TheHeap => TheHeap) then -- Get the imports for this export. The_Imports := RefList.DependencyList (TheHeap => TheHeap, TheExport => The_Export_Atom); The_Export := Heap.AValue (TheHeap => TheHeap, A => The_Export_Atom); -- Is it a pure export? (pure exports don't depend on themselves) if not SeqAlgebra.IsMember (TheHeap => TheHeap, S => The_Imports, GivenValue => The_Export) then -- It's a pure export. -- Has this export already been updated? VariableUpdateHistory.Get_Last_Update (The_Heap => TheHeap, History => Pure_Protected_Export_List, Variable => The_Export, Node => Previous_Node); -- ASSUME Previous_Node = assignment_statement OR procedure_call_statement OR NULL if Previous_Node = STree.NullNode then -- ASSUME Previous_Node = NULL -- The export has not been previously encountered -- Add the pure export to the update history VariableUpdateHistory.Add_Update (The_Heap => TheHeap, History => Pure_Protected_Export_List, Variable => The_Export, Node => Node); elsif Syntax_Node_Type (Node => Previous_Node) = SP_Symbols.assignment_statement or else Syntax_Node_Type (Node => Previous_Node) = SP_Symbols.procedure_call_statement then -- ASSUME Previous_Node = assignment_statement OR procedure_call_statement -- The export has previously been updated, check -- that the previous update was on a mutually -- exlusive path CheckForMutuallyExclusiveBranches (Given_Node => Node, Preceding_Node => Previous_Node, The_Heap => TheHeap, Are_Mutually_Exclusive => On_Mutually_Exclusive_Branches); if On_Mutually_Exclusive_Branches then -- The update is valid, add the new pair to the export list -- replacing the current pair involving The_Export. VariableUpdateHistory.Add_Update (The_Heap => TheHeap, History => Pure_Protected_Export_List, Variable => The_Export, Node => Node); else -- semantic error. ErrorHandler.Semantic_Error_Sym (Err_Num => 957, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => The_Export_Sym, Scope => Scope); end if; end if; end if; end if; The_Export_Atom := RefList.NextExport (TheHeap => TheHeap, TheExport => The_Export_Atom); end loop; end if; end Check_Pure_Protected_Exports_Updated_Once_Only; begin -- WalkStatements Case_Stack.Init; Init_Component_Data (Scope => Scope, Component_Data => Component_Data, The_Heap => TheHeap); -- Create a VariablUpdateHistory to retain the immediately previous -- node at which the update of each pure export protected varible occurred. VariableUpdateHistory.Create_History (The_Heap => TheHeap, History => Pure_Protected_Export_List); -- scope may change locally in loops but will always -- be back to original scope on exit from procedure L_Scope := Scope; Next_Node := Seq_Node; loop --# assert STree.Table = STree.Table~ and --# Case_Stack.Stack_Is_Valid (Case_Stack.State); Last_Node := Next_Node; Node_Type := Syntax_Node_Type (Node => Last_Node); case Node_Type is when SP_Symbols.assignment_statement => -- ASSUME Last_Node = assignment_statement Wf_Assign (Node => Last_Node, Scope => L_Scope, Table => Table, Component_Data => Component_Data); Check_Pure_Protected_Exports_Updated_Once_Only (Node => Last_Node, Scope => L_Scope, Pure_Protected_Export_List => Pure_Protected_Export_List); Next_Node := STree.NullNode; when SP_Symbols.condition => -- ASSUME Last_Node = condition Wf_Condition (Node => Last_Node, Scope => L_Scope, Table => Table, Component_Data => Component_Data); Next_Node := STree.NullNode; when SP_Symbols.exit_statement => -- ASSUME Last_Node = exit_statement Dictionary.MarkLoopHasExits (Dictionary.GetRegion (L_Scope)); Wf_Exit (Node => Last_Node, The_Loop => Dictionary.GetRegion (L_Scope), Condition_Node => Next_Node); when SP_Symbols.end_of_loop => -- ASSUME Last_Node = Last_Node Setup_Default_Loop_Exit (Node => Last_Node, Scope => L_Scope, Table => Table, The_Heap => TheHeap); Next_Node := Child_Node (Last_Node); when SP_Symbols.case_statement => -- ASSUME Last_Node = case_statement Wf_Case (Node => Last_Node, Scope => L_Scope, Table => Table, Component_Data => Component_Data); Next_Node := Child_Node (Last_Node); when SP_Symbols.case_choice => -- ASSUME Last_Node = case_choice Wf_Case_Choice (Node => Last_Node, Scope => L_Scope, Table => Table, Component_Data => Component_Data); Next_Node := STree.NullNode; when SP_Symbols.return_statement => -- ASSUME Last_Node = return_statement Wf_Return (Node => Last_Node, Scope => L_Scope, Table => Table, Component_Data => Component_Data); Next_Node := STree.NullNode; when SP_Symbols.loop_statement => -- ASSUME Last_Node = loop_statement Down_Loop (Node => Last_Node, Scope => L_Scope); Next_Node := Child_Node (Last_Node); when SP_Symbols.loop_invariant => -- ASSUME Last_Node = loop_invariant -- Loop invariant which preceeds the "loop" keyword is part of the -- loop iteration scheme, so doesn't appear as a statement, so we -- must WFF is here. Wf_Proof_Statement_Or_Loop_Invariant (Node => Last_Node, Scope => L_Scope, Component_Data => Component_Data); Next_Node := STree.NullNode; when SP_Symbols.loop_parameter_specification => -- ASSUME Last_Node = loop_parameter_specification Wf_Loop_Param (Node => Last_Node, Scope => L_Scope, Table => Table, Component_Data => Component_Data); Next_Node := Child_Node (Last_Node); when SP_Symbols.apragma => -- ASSUME Last_Node = apragma Wf_Pragma (Node => Last_Node, Scope => L_Scope); Next_Node := STree.NullNode; when SP_Symbols.procedure_call_statement => -- ASSUME Last_Node = procedure_call_statement Wf_Proc_Call (Node => Last_Node, Scope => L_Scope, Table => Table, Component_Data => Component_Data); Check_Pure_Protected_Exports_Updated_Once_Only (Node => Last_Node, Scope => L_Scope, Pure_Protected_Export_List => Pure_Protected_Export_List); Next_Node := STree.NullNode; when SP_Symbols.proof_statement => -- ASSUME Last_Node = proof_statement Wf_Proof_Statement_Or_Loop_Invariant (Node => Last_Node, Scope => L_Scope, Component_Data => Component_Data); Next_Node := STree.NullNode; when SP_Symbols.expression => -- ASSUME Last_Node = expression Next_Node := STree.NullNode; when SP_Symbols.delay_statement => -- ASSUME Last_Node = delay_statement --# accept Flow, 41, "Expected stable expression"; if CommandLineData.Ravenscar_Selected then --# end accept; Wf_Delay_Until (Node => Last_Node, Scope => L_Scope, Table => Table, Component_Data => Component_Data); else -- illegal ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Last_Node), Id_Str => LexTokenManager.Null_String); end if; Next_Node := STree.NullNode; when SP_Symbols.justification_statement => -- ASSUME Last_Node = justification_statement Wf_Justification_Statement (Node => Last_Node, Scope => L_Scope, Component_Data => Component_Data, The_Heap => TheHeap); Next_Node := STree.NullNode; when others => if Node_Type in SP_Symbols.SP_Non_Terminal then Next_Node := Child_Node (Last_Node); else Next_Node := STree.NullNode; end if; end case; if Next_Node = STree.NullNode then loop --# assert STree.Table = STree.Table~ and --# Case_Stack.Stack_Is_Valid (Case_Stack.State); Next_Node := Next_Sibling (Last_Node); exit when Next_Node /= STree.NullNode; -- new branch to right Next_Node := Parent_Node (Current_Node => Last_Node); Last_Node := Next_Node; case Syntax_Node_Type (Node => Last_Node) is when SP_Symbols.loop_statement => -- ASSUME Last_Node = loop_statement Up_Loop (Node => Last_Node, Scope => L_Scope); when SP_Symbols.case_statement => -- ASSUME Last_Node = case_statement Up_Case (Node => Last_Node); when others => null; end case; exit when Next_Node = Seq_Node; -- got back to top end loop; -- up end if; exit when Next_Node = Seq_Node; -- met start point on way up end loop; -- down -- Dispose of the update history VariableUpdateHistory.Dispose_Of_History (The_Heap => TheHeap, History => Pure_Protected_Export_List); end WalkStatements; spark-2012.0.deb/examiner/sem-subprogram_specification.adb0000644000175000017500000013454011753202336022573 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem) package body Subprogram_Specification is procedure Wf_Subprogram_Specification_From_Body (Node : in STree.SyntaxNode; Hidden : in Boolean; Current_Scope : in out Dictionary.Scopes; Subprog_Sym : out Dictionary.Symbol; First_Seen : out Boolean) is Ident_Node, Return_Type_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Type_Sym, First_Sym_Found : Dictionary.Symbol; Grand_Parent, Great_Grand_Parent : SP_Symbols.SP_Symbol; Adding_Proper_Body : Boolean; ----------------------------------------------- procedure Do_Add (Add_Subprog, Add_Body, Hidden : in Boolean; Ident_Node : in STree.SyntaxNode; Node_Pos : in LexTokenManager.Token_Position; First_Seen : in out Boolean; Current_Scope : in out Dictionary.Scopes; Subprog_Sym : in out Dictionary.Symbol) --# global in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Current_Scope from *, --# Add_Body, --# Add_Subprog, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Ident_Node, --# Node_Pos, --# STree.Table, --# Subprog_Sym & --# Dictionary.Dict from *, --# Add_Body, --# Add_Subprog, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Hidden, --# Ident_Node, --# Node_Pos, --# STree.Table, --# Subprog_Sym & --# First_Seen from *, --# Add_Subprog & --# SPARK_IO.File_Sys from *, --# Add_Body, --# Add_Subprog, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# Hidden, --# Ident_Node, --# LexTokenManager.State, --# Node_Pos, --# STree.Table, --# Subprog_Sym & --# STree.Table, --# Subprog_Sym from *, --# Add_Subprog, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# Ident_Node, --# Node_Pos, --# STree.Table; --# pre STree.Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is begin if Add_Subprog then Dictionary.AddSubprogram (Name => STree.Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos), Scope => Current_Scope, Context => Dictionary.ProgramContext, Subprogram => Subprog_Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Subprog_Sym); else First_Seen := False; end if; if Add_Body then Dictionary.AddBody (CompilationUnit => Subprog_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos), Hidden => Hidden); Current_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog_Sym); else Dictionary.AddBodyStub (CompilationUnit => Subprog_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, BodyStub => Dictionary.Location'(Start_Position => Node_Pos, End_Position => Node_Pos)); end if; end Do_Add; ----------------------------------------------- procedure Check_For_Child (Ident_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table; --# pre STree.Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; is begin if Dictionary.IsPackage (Dictionary.GetRegion (Current_Scope)) and then not Dictionary.IsEmbeddedPackage (Dictionary.GetRegion (Current_Scope)) and then not Dictionary.Is_Null_Symbol (Dictionary.LookupSelectedItem (Prefix => Dictionary.GetRegion (Current_Scope), Selector => STree.Node_Lex_String (Node => Ident_Node), Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext)) then -- name exists as child ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => STree.Node_Lex_String (Node => Ident_Node)); end if; end Check_For_Child; ----------------------------------------------- function Declared_In_Same_Or_Related_Scope (Sym : Dictionary.Symbol; Current_Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; -- return true if Sym is declared in Current_Scope or in the -- visible/private scope of the region associate with Current_Scope is begin return Dictionary.GetScope (Sym) = Current_Scope or else Dictionary.GetScope (Sym) = Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetRegion (Current_Scope)) or else Dictionary.GetScope (Sym) = Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Dictionary.GetRegion (Current_Scope)); end Declared_In_Same_Or_Related_Scope; begin -- Wf_Subprogram_Specification_From_Body Ident_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Subprogram_Specification_From_Body"); Ident_Str := STree.Node_Lex_String (Node => Ident_Node); Grand_Parent := STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node))); Great_Grand_Parent := STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => STree.Parent_Node (Current_Node => Node)))); Adding_Proper_Body := Great_Grand_Parent = SP_Symbols.abody or else Sem.In_Protected_Body (Current_Scope => Current_Scope); -- in prot bod we can't be adding a stub First_Seen := True; -- default value in case all checks below fail Subprog_Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Subprog_Sym) then if STree.Syntax_Node_Type (Node => STree.Parent_Node (Current_Node => Node)) = SP_Symbols.body_stub then Check_For_Child (Ident_Node => Ident_Node, Current_Scope => Current_Scope); Do_Add (Add_Subprog => True, Add_Body => False, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); elsif Grand_Parent = SP_Symbols.main_program_declaration then Do_Add (Add_Subprog => True, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); elsif Grand_Parent = SP_Symbols.generic_subprogram_body then Subprog_Sym := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 641, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); elsif Great_Grand_Parent /= SP_Symbols.subunit then Do_Add (Add_Subprog => True, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); else -- no stub for subunit Subprog_Sym := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 15, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; else -- symbol found so further checks needed -- if the Subprog_Sym is an implicit proof function associated -- with the declaration of an Ada function then we want to -- recover the associated Ada function before proceding (since -- that is the thing we are going to add a body to). The -- GetAdaFunction call is guarded to meet its precondition. if Dictionary.IsImplicitProofFunction (Subprog_Sym) then Subprog_Sym := Dictionary.GetAdaFunction (Subprog_Sym); end if; if Great_Grand_Parent = SP_Symbols.subunit then if Dictionary.Is_Subprogram (Subprog_Sym) and then Dictionary.HasBodyStub (Subprog_Sym) and then not Dictionary.HasBody (Subprog_Sym) then STree.Set_Node_Lex_String (Sym => Subprog_Sym, Node => Ident_Node); Do_Add (Add_Subprog => False, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); else Subprog_Sym := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; elsif Grand_Parent = SP_Symbols.generic_subprogram_body then if Dictionary.Is_Subprogram (Subprog_Sym) and then not Dictionary.HasBody (Subprog_Sym) then STree.Set_Node_Lex_String (Sym => Subprog_Sym, Node => Ident_Node); Do_Add (Add_Subprog => False, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); else Subprog_Sym := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 13, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; elsif -- a place where completion of declaration may be allowed (Sem.In_Package_Body (Current_Scope => Current_Scope) -- another place where completion of declaration may be allowed or else Sem.In_Protected_Body (Current_Scope => Current_Scope)) and then -- check that we are in a place where the the declaration can -- be legally completed (i.e. if subprog declared in a -- package spec it can only be completed in the package body -- (ditto protected type/body) Declared_In_Same_Or_Related_Scope (Sym => Subprog_Sym, Current_Scope => Current_Scope) then First_Sym_Found := Subprog_Sym; Subprog_Sym := Dictionary.LookupImmediateScope (Name => Ident_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetRegion (Current_Scope)), Context => Dictionary.ProgramContext); -- Above looked for declaration in spec vis part, if not -- found, try again in private part if Dictionary.Is_Null_Symbol (Subprog_Sym) and then Dictionary.IsPackage (Dictionary.GetRegion (Current_Scope)) then Subprog_Sym := Dictionary.LookupImmediateScope (Name => Ident_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Dictionary.GetRegion (Current_Scope)), Context => Dictionary.ProgramContext); end if; if Dictionary.Is_Null_Symbol (Subprog_Sym) then -- something definitely wrong if not Dictionary.Is_Subprogram (First_Sym_Found) or else Dictionary.IsProofFunction (First_Sym_Found) then -- Name in use for something other than a subprogram or in use for an explicit proof function. -- Report "illegal redec" rather than "already has body" for these cases ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); -- add anyway to prevent scope problems later Do_Add (Add_Subprog => True, Add_Body => Adding_Proper_Body, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); else -- it is a subprogram which must be a duplicate ErrorHandler.Semantic_Error (Err_Num => 13, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); if Adding_Proper_Body then if Dictionary.HasBody (First_Sym_Found) then -- add complete duplicate subprogram to dict Do_Add (Add_Subprog => True, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); else -- add body to duplicate procedure stub in dict Subprog_Sym := First_Sym_Found; Do_Add (Add_Subprog => False, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); end if; end if; end if; else -- Subprog_Sym was found in package's visible part if not Dictionary.Is_Subprogram (First_Sym_Found) then -- name in use for something other than a subprogram ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); -- add anyway to prevent scope problems later Do_Add (Add_Subprog => True, Add_Body => Adding_Proper_Body, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); else -- it is a subprogram which may be a duplicate if Dictionary.HasBody (Subprog_Sym) then ErrorHandler.Semantic_Error (Err_Num => 13, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); if Adding_Proper_Body then -- add complete duplicate procedure to dict Do_Add (Add_Subprog => True, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); end if; elsif Dictionary.HasBodyStub (Subprog_Sym) then ErrorHandler.Semantic_Error (Err_Num => 13, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); if Adding_Proper_Body then -- add body to duplicate procedure stub in dict Do_Add (Add_Subprog => False, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); end if; else -- the non-error case of pre-declaration of subprogram STree.Set_Node_Lex_String (Sym => Subprog_Sym, Node => Ident_Node); Do_Add (Add_Subprog => False, Add_Body => Adding_Proper_Body, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); end if; end if; end if; else -- not in a package so duplicate is definitely error if Dictionary.Is_Subprogram (Subprog_Sym) and then Dictionary.HasBody (Subprog_Sym) then ErrorHandler.Semantic_Error (Err_Num => 13, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; if Adding_Proper_Body then Do_Add (Add_Subprog => True, Add_Body => True, Hidden => Hidden, Ident_Node => Ident_Node, Node_Pos => STree.Node_Position (Node => Node), First_Seen => First_Seen, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym); else Subprog_Sym := Dictionary.NullSymbol; end if; end if; end if; if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.function_specification and then not Dictionary.Is_Null_Symbol (Subprog_Sym) then -- ASSUME Node = function_specification Return_Type_Node := STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Return_Type_Node = type_mark SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Return_Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = Return_Type_Node = type_mark in Wf_Subprogram_Specification_From_Body"); Sem.Wf_Type_Mark (Node => Return_Type_Node, Current_Scope => Current_Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); if First_Seen then if Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Sym, Current_Scope) then Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Return_Type_Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) or else Dictionary.IsProtectedTypeMark (Type_Sym) then Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 905, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Return_Type_Node), Id_Str => LexTokenManager.Null_String); end if; Dictionary.AddReturnType (TheFunction => Subprog_Sym, TypeMark => Type_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TypeReference => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Return_Type_Node), End_Position => STree.Node_Position (Node => Return_Type_Node))); else -- not First_Seen so check consistency of return type if not Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => Dictionary.GetType (Subprog_Sym), Full_Range_Subtype => False) then if Dictionary.IsUnknownTypeMark (Dictionary.GetType (Subprog_Sym)) then -- remind user that return type on spec was illegal ErrorHandler.Semantic_Error (Err_Num => 841, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Return_Type_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); else -- report inconsistency ErrorHandler.Semantic_Error (Err_Num => 22, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Return_Type_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; end if; end if; end if; end Wf_Subprogram_Specification_From_Body; ------------------------------------------------------------------------ procedure Wf_Subprogram_Specification (Spec_Node : in STree.SyntaxNode; Anno_Node : in STree.SyntaxNode; Constraint_Node : in STree.SyntaxNode; Inherit_Node : in STree.SyntaxNode; Context_Node : in STree.SyntaxNode; Generic_Formal_Part_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Current_Context : in Dictionary.Contexts; Generic_Unit : in Dictionary.Symbol; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Subprog_Sym : out Dictionary.Symbol) is Formal_Part_Node : STree.SyntaxNode; ------------------------------------------------------------------------ procedure Wf_Subprogram_Specification_From_Declaration (Spec_Node : in STree.SyntaxNode; Inherit_Node : in STree.SyntaxNode; Context_Node : in STree.SyntaxNode; Generic_Formal_Part_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Current_Context : in Dictionary.Contexts; Generic_Unit : in Dictionary.Symbol; Subprog_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Context_Node, --# Current_Context, --# Current_Scope, --# Dictionary.Dict, --# Generic_Formal_Part_Node, --# Generic_Unit, --# Inherit_Node, --# LexTokenManager.State, --# Spec_Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Context_Node, --# Current_Context, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Generic_Formal_Part_Node, --# Generic_Unit, --# Inherit_Node, --# LexTokenManager.State, --# SLI.State, --# SPARK_IO.File_Sys, --# Spec_Node, --# STree.Table & --# Subprog_Sym from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Context, --# Current_Scope, --# Dictionary.Dict, --# Generic_Unit, --# LexTokenManager.State, --# Spec_Node, --# STree.Table; --# pre (STree.Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification or --# STree.Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# STree.Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.entry_specification) and --# (STree.Syntax_Node_Type (Context_Node, STree.Table) = SP_Symbols.context_clause or Context_Node = STree.NullNode) and --# (STree.Syntax_Node_Type (Inherit_Node, STree.Table) = SP_Symbols.inherit_clause or Inherit_Node = STree.NullNode) and --# (STree.Syntax_Node_Type (Generic_Formal_Part_Node, STree.Table) = SP_Symbols.generic_formal_part or --# Generic_Formal_Part_Node = STree.NullNode); --# post STree.Table = STree.Table~; is Return_Type_Node : STree.SyntaxNode; Ident_Node : STree.SyntaxNode; Type_Sym : Dictionary.Symbol; Ok : Boolean; begin Ident_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Spec_Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Subprogram_Specification_From_Declaration"); Sem.Check_Valid_Ident (Ident_Node => Ident_Node, Current_Scope => Current_Scope, Ok => Ok); if Ok then if Dictionary.Is_Null_Symbol (Generic_Unit) then Dictionary.AddSubprogram (Name => STree.Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Spec_Node), End_Position => STree.Node_Position (Node => Spec_Node)), Scope => Current_Scope, Context => Current_Context, Subprogram => Subprog_Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Subprog_Sym); if STree.Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.entry_specification then Dictionary.SetSubprogramIsEntry (Subprog_Sym); end if; else Dictionary.AddSubprogram (Name => STree.Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Spec_Node), End_Position => STree.Node_Position (Node => Spec_Node)), Scope => Dictionary.GetEnclosingScope (Current_Scope), Context => Current_Context, Subprogram => Subprog_Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Subprog_Sym); Dictionary.Set_Subprogram_Generic_Unit (Subprogram => Subprog_Sym, Generic_Unit => Generic_Unit); Dictionary.Set_Generic_Unit_Owning_Subprogram (Generic_Unit => Generic_Unit, Subprogram => Subprog_Sym); -- ASSUME Inherit_Node = inherit_clause OR NULL if STree.Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.inherit_clause then -- ASSUME Inherit_Node = inherit_clause Sem.Wf_Inherit_Clause (Node => Inherit_Node, Comp_Sym => Subprog_Sym, Scope => Dictionary.GetEnclosingScope (Current_Scope)); end if; -- ASSUME Context_Node = context_clause OR NULL if STree.Syntax_Node_Type (Node => Context_Node) = SP_Symbols.context_clause then -- ASSUME Context_Node = context_clause Sem.Wf_Context_Clause (Node => Context_Node, Comp_Sym => Subprog_Sym, Scope => Dictionary.GetEnclosingScope (Current_Scope)); end if; -- ASSUME Generic_Formal_Part_Node = generic_formal_part OR NULL if STree.Syntax_Node_Type (Node => Generic_Formal_Part_Node) = SP_Symbols.generic_formal_part then -- ASSUME Generic_Formal_Part_Node = generic_formal_part Sem.Wf_Generic_Formal_Part (Node => Generic_Formal_Part_Node, Generic_Ident_Node_Pos => STree.Node_Position (Node => Ident_Node), Generic_Unit => Generic_Unit, Package_Or_Subprogram_Symbol => Subprog_Sym); end if; end if; else Subprog_Sym := Dictionary.NullSymbol; end if; if STree.Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification then -- ASSUME Spec_Node = function_specification Return_Type_Node := STree.Last_Sibling_Of (Start_Node => STree.Child_Node (Current_Node => Spec_Node)); -- ASSUME Return_Type_Node = type_mark SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Return_Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Return_Type_Node = type_mark in Wf_Subprogram_Specification_From_Declaration"); Sem.Wf_Type_Mark (Node => Return_Type_Node, Current_Scope => Current_Scope, Context => Current_Context, Type_Sym => Type_Sym); if Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Sym, Current_Scope) then Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Return_Type_Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) or else Dictionary.IsProtectedTypeMark (Type_Sym) then Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 905, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Return_Type_Node), Id_Str => LexTokenManager.Null_String); elsif Current_Context = Dictionary.ProgramContext and then Dictionary.TypeIsTagged (Type_Sym) and then (Dictionary.GetScope (Type_Sym) = Current_Scope) then -- attempt to declare primitive function with controlling return result Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 840, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Return_Type_Node), Id_Str => LexTokenManager.Null_String); end if; if not Dictionary.Is_Null_Symbol (Subprog_Sym) then Dictionary.AddReturnType (TheFunction => Subprog_Sym, TypeMark => Type_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TypeReference => Dictionary.Location'(Start_Position => STree.Node_Position (Node => Return_Type_Node), End_Position => STree.Node_Position (Node => Return_Type_Node))); -- mark signature as not wellformed if wf_type_mark has returned the unknown type if Dictionary.IsUnknownTypeMark (Type_Sym) then Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Subprog_Sym); end if; end if; end if; end Wf_Subprogram_Specification_From_Declaration; begin -- Wf_Subprogram_Specification Wf_Subprogram_Specification_From_Declaration (Spec_Node => Spec_Node, Inherit_Node => Inherit_Node, Context_Node => Context_Node, Generic_Formal_Part_Node => Generic_Formal_Part_Node, Current_Scope => Current_Scope, Current_Context => Current_Context, Generic_Unit => Generic_Unit, Subprog_Sym => Subprog_Sym); if not Dictionary.Is_Null_Symbol (Subprog_Sym) then Formal_Part_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Spec_Node)); -- ASSUME Formal_Part_Node = formal_part OR type_mark OR NULL if STree.Syntax_Node_Type (Node => Formal_Part_Node) = SP_Symbols.formal_part then -- ASSUME Formal_Part_Node = formal_part Sem.Wf_Formal_Part (Node => Formal_Part_Node, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym, First_Occurrence => True, Context => Current_Context); elsif Formal_Part_Node /= STree.NullNode and then STree.Syntax_Node_Type (Node => Formal_Part_Node) /= SP_Symbols.type_mark then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Formal_Part_Node = formal_part OR type_mark OR NULL in Wf_Subprogram_Specification"); end if; if STree.Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.procedure_annotation or else STree.Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.function_annotation then -- ASSUME Anno_Node = procedure_annotation OR function_annotation Sem.Wf_Subprogram_Annotation (Node => Anno_Node, Current_Scope => Current_Scope, Subprog_Sym => Subprog_Sym, First_Seen => True, The_Heap => The_Heap); end if; -- Synthesise 'all from all' dependency if necessary. if (STree.Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification or else STree.Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.entry_specification) and then Sem.Needs_Synthetic_Dependency (Proc_Task_Or_Entry => Subprog_Sym) then Sem.Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Spec_Node)), Subprog_Sym => Subprog_Sym, Abstraction => Dictionary.IsAbstract, The_Heap => The_Heap); end if; if STree.Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.procedure_constraint or else STree.Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.function_constraint then -- ASSUME Constraint_Node = procedure_constraint OR function_constraint Sem.Wf_Subprogram_Constraint (Node => Constraint_Node, Subprogram_Sym => Subprog_Sym, First_Seen => True, Component_Data => Component_Data, The_Heap => The_Heap); end if; end if; -- Special checks for proof functions. if not Dictionary.Is_Null_Symbol (Subprog_Sym) and then Dictionary.IsProofFunction (Subprog_Sym) and then Dictionary.HasPostcondition (Dictionary.IsAbstract, Subprog_Sym) then -- If the return is not a boolean then warn about -- potential confusion about infinite numbers. For -- example consider the following proof function: -- -- --# function A (X : Integer) return Integer; -- --# return X + 1; -- -- Since we always assume that a function return -- something in-type, the following check can be proven, -- but it then inserts effectively `false' in all -- subsequent hypotheses! -- -- --# check A (Integer'Last); -- -- Please see Build_Annotation_Expression for more -- information. if not Dictionary.TypeIsBoolean (Dictionary.GetType (Subprog_Sym)) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 320, Position => Sem.Node_Position (Spec_Node), Sym => Subprog_Sym, Scope => Current_Scope); end if; -- Implicit returns are always tricky. It is easy to -- introduce a false hypotheses as we don't have a function -- body to catch this: -- -- --# function F return Boolean; -- --# return B => False; if Dictionary.HasImplicitReturnVariable (Dictionary.IsAbstract, Subprog_Sym) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 321, Position => Sem.Node_Position (Constraint_Node), Sym => Subprog_Sym, Scope => Current_Scope); end if; end if; end Wf_Subprogram_Specification; end Subprogram_Specification; spark-2012.0.deb/examiner/sem-walk_name.adb0000644000175000017500000003411411753202336017444 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- This procedure has essentially the functionality of WalkExpression -- but is used to walk expressions which the language rules require to be a -- "name". In addition to the things returned by WalkExpression, a flag is -- returned indicating whether the expression was indeed a name. -- If the expression is not a name the expression is not traversed at all -- and Result is set to the Unknown_Type_Record -- -- After the name node has been found it is traversed by WalkExpression and a subset -- of the checks usually done by wf_primary are acarried out on the result. More -- checks are done here than are necessary for this application (e.g. getting bounds -- of a type mark) but they have been left in to make Walk_Name more widely applicable; -- it could be moved to Wellformed if ever needed elsewhere. separate (Sem) procedure Walk_Name (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Result : out Exp_Record; Is_A_Name : out Boolean; Ref_Var_Param : in SeqAlgebra.Seq) is Node, Next_Node : STree.SyntaxNode; Sym : Dictionary.Symbol; begin -- preset result for all non-name cases advance to relation node; Result := Unknown_Type_Record; Node := Child_Node (Current_Node => Exp_Node); -- ASSUME Node = relation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Node) = SP_Symbols.relation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = relation in Walk_Name"); Next_Node := Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = expression_rep1 OR expression_rep2 OR expression_rep3 OR -- expression_rep4 OR expression_rep5 OR NULL if Next_Node = STree.NullNode then -- ASSUME Next_Node = NULL -- advance to simple_expression; Node := Child_Node (Current_Node => Node); -- ASSUME Node = simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = simple_expression in Walk_Name"); Next_Node := Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = relational_operator OR inside OR outside OR NULL if Next_Node = STree.NullNode then -- ASSUME Next_Node = NULL -- advance to simple_expression_opt; Node := Child_Node (Current_Node => Node); -- ASSUME Node = simple_expression OR simple_expression_opt if Syntax_Node_Type (Node => Node) = SP_Symbols.simple_expression_opt then -- Node = simple_expression_opt -- advance to term; Node := Child_Node (Current_Node => Node); -- ASSUME Node = unary_adding_operator OR term if Syntax_Node_Type (Node => Node) = SP_Symbols.term then -- ASSUME Node = term -- advance to factor; Node := Child_Node (Current_Node => Node); -- ASSUME Node = term OR factor if Syntax_Node_Type (Node => Node) = SP_Symbols.factor then -- ASSUME Node = factor -- advance to primary; Node := Child_Node (Current_Node => Node); -- ASSUME Node = primary OR RWabs OR RWnot if Syntax_Node_Type (Node => Node) = SP_Symbols.primary then -- ASSUME Node = primary Next_Node := Next_Sibling (Current_Node => Node); -- ASSUME Next_Node = double_star OR NULL if Next_Node = STree.NullNode then -- ASSUME Next_Node = NULL -- advance to name; Node := Child_Node (Current_Node => Node); -- ASSUME Node = numeric_literal OR character_literal OR string_literal OR name OR -- qualified_expression OR expression OR attribute if Syntax_Node_Type (Node => Node) = SP_Symbols.name then -- ASSUME Node = name Is_A_Name := True; Walk_Expression_P.Walk_Expression (Exp_Node => Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => False, Ref_Var => Ref_Var_Param, Result => Result, Component_Data => Component_Data, The_Heap => The_Heap); -- now perform some checks on the result akin to those -- of wf_primary Sym := Result.Other_Symbol; case Result.Sort is when Is_Unknown => Result := Unknown_Type_Record; when Type_Result => null; when Is_Package => Result := Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 5, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); when Is_Function => ErrorHandler.Semantic_Error (Err_Num => 3, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); Result.Is_Static := False; Result.Is_Constant := False; Result.Is_ARange := False; when Is_Object => Result.Is_ARange := False; if Dictionary.IsVariableOrSubcomponent (Sym) then SeqAlgebra.AddMember (The_Heap, Ref_Var_Param, Natural (Dictionary.SymbolRef (Sym))); end if; when Is_Type_Mark => Result.Is_Static := Dictionary.IsStatic (Sym, Scope); Result.Is_Constant := True; if Dictionary.IsScalarTypeMark (Sym, Scope) then -- we can get bounds of range and so on Result.Is_ARange := True; -- get upper and lower bounds of type from dictionary Result.Value := Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- don't want base type LexTokenManager.First_Token, Sym)); Result.Range_RHS := Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- don't want base type LexTokenManager.Last_Token, Sym)); else -- not scalar so we don't do bounds and its not a range Result.Is_ARange := False; end if; when Is_Parameter_Name => null; -- should never occur end case; Result.Param_Count := 0; Result.Param_List := Lists.Null_List; elsif Syntax_Node_Type (Node => Node) = SP_Symbols.numeric_literal or else Syntax_Node_Type (Node => Node) = SP_Symbols.character_literal or else Syntax_Node_Type (Node => Node) = SP_Symbols.string_literal or else Syntax_Node_Type (Node => Node) = SP_Symbols.qualified_expression or else Syntax_Node_Type (Node => Node) = SP_Symbols.expression or else Syntax_Node_Type (Node => Node) = SP_Symbols.attribute then -- ASSUME Node = numeric_literal OR character_literal OR string_literal OR -- qualified_expression OR expression OR attribute Is_A_Name := False; else Is_A_Name := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = numeric_literal OR character_literal OR string_literal OR " & "name OR qualified_expression OR expression OR attribute in Walk_Name"); end if; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.double_star then -- ASSUME Next_Node = double_star Is_A_Name := False; else Is_A_Name := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = double_star OR NULL in Walk_Name"); end if; elsif Syntax_Node_Type (Node => Node) = SP_Symbols.RWabs or else Syntax_Node_Type (Node => Node) = SP_Symbols.RWnot then -- ASSUME Node = RWabs OR RWnot Is_A_Name := False; else Is_A_Name := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = primary OR RWabs OR RWnot in Walk_Name"); end if; elsif Syntax_Node_Type (Node => Node) = SP_Symbols.term then -- ASSUME Node = term Is_A_Name := False; else Is_A_Name := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = term OR factor in Walk_Name"); end if; elsif Syntax_Node_Type (Node => Node) = SP_Symbols.unary_adding_operator then -- ASSUME Node = unary_adding_operator Is_A_Name := False; else Is_A_Name := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = unary_adding_operator OR term in Walk_Name"); end if; elsif Syntax_Node_Type (Node => Node) = SP_Symbols.simple_expression then -- ASSUME Node = simple_expression Is_A_Name := False; else Is_A_Name := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = simple_expression OR simple_expression_opt in Walk_Name"); end if; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.relational_operator or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.inside or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.outside then -- ASSUME Next_Node = relational_operator OR inside OR outside Is_A_Name := False; else Is_A_Name := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = relational_operator OR inside OR outside OR NULL in Walk_Name"); end if; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.expression_rep1 or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.expression_rep2 or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.expression_rep3 or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.expression_rep4 or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.expression_rep5 then -- ASSUME Next_Node = expression_rep1 OR expression_rep2 OR expression_rep3 OR -- expression_rep4 OR expression_rep5 Is_A_Name := False; else Is_A_Name := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = expression_rep1 OR expression_rep2 OR expression_rep3 OR " & "expression_rep4 OR expression_rep5 OR NULL in Walk_Name"); end if; end Walk_Name; spark-2012.0.deb/examiner/vcg/0000755000175000017500000000000011753203756015037 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sparklex/0000755000175000017500000000000011753203756016670 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sparklex/sparklex.rlu0000644000175000017500000000167111753202314021237 0ustar eugeneugenrule(1): curr_line_invariant(CURR_LINE) may_be_deduced_from [e_strings__get_length(fld_conts(CURR_LINE)) < natural__last, fld_curr_pos(CURR_LINE) <= e_strings__get_length(fld_conts(CURR_LINE)) + 1, fld_lookahead_pos(CURR_LINE) <= e_strings__get_length(fld_conts(CURR_LINE)) + 1, fld_last_token_pos(CURR_LINE) <= e_strings__get_length(fld_conts(CURR_LINE)) + 1]. rule(2): e_strings__get_length(fld_conts(CURR_LINE)) < natural__last may_be_deduced_from [curr_line_invariant(CURR_LINE)]. rule(3): fld_curr_pos(CURR_LINE) <= e_strings__get_length(fld_conts(CURR_LINE)) + 1 may_be_deduced_from [curr_line_invariant(CURR_LINE)]. rule(4): fld_lookahead_pos(CURR_LINE) <= e_strings__get_length(fld_conts(CURR_LINE)) + 1 may_be_deduced_from [curr_line_invariant(CURR_LINE)]. rule(5): fld_last_token_pos(CURR_LINE) <= e_strings__get_length(fld_conts(CURR_LINE)) + 1 may_be_deduced_from [curr_line_invariant(CURR_LINE)]. spark-2012.0.deb/examiner/vcg/sem/0000755000175000017500000000000011753203756015623 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sem/compunit/0000755000175000017500000000000011753203756017461 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sem/compunit/walkstatements/0000755000175000017500000000000011753203756022527 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sem/compunit/walkstatements/case_stack/0000755000175000017500000000000011753203756024627 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sem/compunit/walkstatements/case_stack/case_stack.rlu0000644000175000017500000001147611753202314027451 0ustar eugeneugenrule(1): for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STATE) and (dictionary__is_null_symbol(fld_sym(element(fld_s(STATE), [I]))) <-> fld_sym(element(fld_s(STATE), [I])) = dictionary__nullsymbol) -> fld_actualupperbound(fld_complete_adt(element(fld_s(STATE), [I]))) - fld_lowerbound(fld_complete_adt(element(fld_s(STATE), [I]))) < examinerconstants__completechecksize and (dictionary__is_null_symbol(fld_sym(element(fld_s(STATE), [I]))) or dictionary__istypemark(fld_sym(element(fld_s(STATE), [I])), dictionary__dict))) may_be_deduced_from [stack_is_valid(STATE)]. rule(2): stack_is_valid(STATE) may_be_deduced_from [for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STATE) and (dictionary__is_null_symbol(fld_sym(element(fld_s(STATE), [I]))) <-> fld_sym(element(fld_s(STATE), [I])) = dictionary__nullsymbol) -> fld_actualupperbound(fld_complete_adt(element(fld_s(STATE), [I]))) - fld_lowerbound(fld_complete_adt(element(fld_s(STATE), [I]))) < examinerconstants__completechecksize and (dictionary__is_null_symbol(fld_sym(element(fld_s(STATE), [I]))) or dictionary__istypemark(fld_sym(element(fld_s(STATE), [I])), dictionary__dict)))]. rule(3): stack_is_valid(STATE) may_be_deduced_from [stack_is_valid(INITIAL_STATE), fld_s(INITIAL_STATE) = fld_s(STATE), INITIAL_TOP_PTR = fld_top_ptr(INITIAL_STATE), for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STATE) and (dictionary__is_null_symbol(fld_sym(element(fld_s(INITIAL_STATE), [I]))) <-> fld_sym(element(fld_s(INITIAL_STATE), [I])) = dictionary__nullsymbol) -> fld_actualupperbound(fld_complete_adt(element(fld_s(INITIAL_STATE), [I]))) - fld_lowerbound(fld_complete_adt(element(fld_s(INITIAL_STATE), [I]))) < examinerconstants__completechecksize and (dictionary__is_null_symbol(fld_sym(element(fld_s(INITIAL_STATE), [I]))) or dictionary__istypemark(fld_sym(element(fld_s(INITIAL_STATE), [I])), dictionary__dict)))]. rule(4): for_all(I : integer, index_range__first <= I and I <= top_ptr + 1 and (dictionary__is_null_symbol(fld_sym(element(update(s, [top_ptr + 1], mk__stack_record( case_flags := CASE_FLAGS, complete_adt := COMPLETE_ADT, sym := SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND)), [I]))) <-> fld_sym(element(update(s, [top_ptr + 1], mk__stack_record( case_flags := CASE_FLAGS, complete_adt := COMPLETE_ADT, sym := SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND)), [I])) = dictionary__nullsymbol) -> fld_actualupperbound(fld_complete_adt(element(update(s, [top_ptr + 1], mk__stack_record( case_flags := CASE_FLAGS, complete_adt := COMPLETE_ADT, sym := SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND)), [I]))) - fld_lowerbound(fld_complete_adt(element(update(s, [top_ptr + 1], mk__stack_record( case_flags := CASE_FLAGS, complete_adt := COMPLETE_ADT, sym := SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND)), [I]))) < examinerconstants__completechecksize and (dictionary__is_null_symbol(fld_sym(element(update(s, [top_ptr + 1], mk__stack_record( case_flags := CASE_FLAGS, complete_adt := COMPLETE_ADT, sym := SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND)), [I]))) or dictionary__istypemark(fld_sym(element(update(s, [top_ptr + 1], mk__stack_record( case_flags := CASE_FLAGS, complete_adt := COMPLETE_ADT, sym := SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND)), [I])), dictionary__dict))) may_be_deduced_from [for_all(I : integer, index_range__first <= I and I <= top_ptr and (dictionary__is_null_symbol(fld_sym(element(s, [I]))) <-> fld_sym(element(s, [I])) = dictionary__nullsymbol) -> fld_actualupperbound(fld_complete_adt(element(s, [I]))) - fld_lowerbound(fld_complete_adt(element(s, [I]))) < examinerconstants__completechecksize and (dictionary__is_null_symbol(fld_sym(element(s, [I]))) or dictionary__istypemark(fld_sym(element(s, [I])), dictionary__dict))), fld_actualupperbound(COMPLETE_ADT) - fld_lowerbound(COMPLETE_ADT) < examinerconstants__completechecksize, dictionary__is_null_symbol(SYM) or dictionary__istypemark(SYM, dictionary__dict), dictionary__is_null_symbol(SYM) <-> SYM = dictionary__nullsymbol].spark-2012.0.deb/examiner/vcg/sem/walk_expression_p/0000755000175000017500000000000011753203756021357 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sem/walk_expression_p/type_context_stack/0000755000175000017500000000000011753203756025271 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sem/walk_expression_p/type_context_stack/type_context_stack.rlu0000644000175000017500000000272011753202314031715 0ustar eugeneugenrule(1): for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STACK) and (dictionary__is_null_symbol(element(fld_s(STACK), [I])) <-> element(fld_s(STACK), [I]) = dictionary__nullsymbol) -> dictionary__is_null_symbol(element(fld_s(STACK), [I])) or dictionary__istypemark(element(fld_s(STACK), [I]), dictionary__dict)) may_be_deduced_from [stack_is_valid(STACK)]. rule(2): stack_is_valid(STACK) may_be_deduced_from [for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STACK) and (dictionary__is_null_symbol(element(fld_s(STACK), [I])) <-> element(fld_s(STACK), [I]) = dictionary__nullsymbol) -> dictionary__is_null_symbol(element(fld_s(STACK), [I])) or dictionary__istypemark(element(fld_s(STACK), [I]), dictionary__dict))]. rule(3): for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STACK) + 1 and (dictionary__is_null_symbol(element(update(fld_s(STACK), [fld_top_ptr(STACK) + 1], X), [I])) <-> element(update(fld_s(STACK), [fld_top_ptr(STACK) + 1], X), [I]) = dictionary__nullsymbol) -> dictionary__is_null_symbol(element(update(fld_s(STACK), [fld_top_ptr(STACK) + 1], X), [I])) or dictionary__istypemark(element(update(fld_s(STACK), [fld_top_ptr(STACK) + 1], X), [I]), dictionary__dict)) may_be_deduced_from [dictionary__is_null_symbol(X) or dictionary__istypemark(X, dictionary__dict), stack_is_valid(STACK)]. spark-2012.0.deb/examiner/vcg/sem/aggregate_stack/0000755000175000017500000000000011753203756020736 5ustar eugeneugenspark-2012.0.deb/examiner/vcg/sem/aggregate_stack/aggregate_stack.rlu0000644000175000017500000001724511753202314024573 0ustar eugeneugenrule(1): for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STATE) and (dictionary__is_null_symbol(fld_type_sym(element(fld_s(STATE), [I]))) <-> fld_type_sym(element(fld_s(STATE), [I])) = dictionary__nullsymbol) -> (dictionary__is_null_symbol(fld_type_sym(element(fld_s(STATE), [I]))) or dictionary__istypemark(fld_type_sym(element(fld_s(STATE), [I])), dictionary__dict)) and ((fld_is_defined(fld_lower_bound(element(fld_s(STATE), [I]))) and fld_is_defined(fld_upper_bound(element(fld_s(STATE), [I]))) -> fld_value(fld_lower_bound(element(fld_s(STATE), [I]))) <= fld_value(fld_upper_bound(element(fld_s(STATE), [I])))) and fld_actualupperbound(fld_complete_rec(element(fld_s(STATE), [I]))) - fld_lowerbound(fld_complete_rec(element(fld_s(STATE), [I]))) < examinerconstants__completechecksize)) may_be_deduced_from [stack_is_valid(STATE)]. rule(2): stack_is_valid(STATE) may_be_deduced_from [for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STATE) and (dictionary__is_null_symbol(fld_type_sym(element(fld_s(STATE), [I]))) <-> fld_type_sym(element(fld_s(STATE), [I])) = dictionary__nullsymbol) -> (dictionary__is_null_symbol(fld_type_sym(element(fld_s(STATE), [I]))) or dictionary__istypemark(fld_type_sym(element(fld_s(STATE), [I])), dictionary__dict)) and ((fld_is_defined(fld_lower_bound(element(fld_s(STATE), [I]))) and fld_is_defined(fld_upper_bound(element(fld_s(STATE), [I]))) -> fld_value(fld_lower_bound(element(fld_s(STATE), [I]))) <= fld_value(fld_upper_bound(element(fld_s(STATE), [I])))) and fld_actualupperbound(fld_complete_rec(element(fld_s(STATE), [I]))) - fld_lowerbound(fld_complete_rec(element(fld_s(STATE), [I]))) < examinerconstants__completechecksize))]. rule(3): stack_is_valid(STATE) may_be_deduced_from [stack_is_valid(INITIAL_STATE), fld_s(INITIAL_STATE) = fld_s(STATE), INITIAL_TOP_PTR = fld_top_ptr(INITIAL_STATE), for_all(I : integer, index_range__first <= I and I <= fld_top_ptr(STATE) and (dictionary__is_null_symbol(fld_type_sym(element(fld_s(INITIAL_STATE), [I]))) <-> fld_type_sym(element(fld_s(INITIAL_STATE), [I])) = dictionary__nullsymbol) -> (dictionary__is_null_symbol(fld_type_sym(element(fld_s(INITIAL_STATE), [I]))) or dictionary__istypemark(fld_type_sym(element(fld_s(INITIAL_STATE), [I])), dictionary__dict)) and ((fld_is_defined(fld_lower_bound(element(fld_s(INITIAL_STATE), [I]))) and fld_is_defined(fld_upper_bound(element(fld_s(INITIAL_STATE), [I]))) -> fld_value(fld_lower_bound(element(fld_s(INITIAL_STATE), [I]))) <= fld_value(fld_upper_bound(element(fld_s(INITIAL_STATE), [I])))) and fld_actualupperbound(fld_complete_rec(element(fld_s(INITIAL_STATE), [I]))) - fld_lowerbound(fld_complete_rec(element(fld_s(INITIAL_STATE), [I]))) < examinerconstants__completechecksize))]. rule(4): for_all(I : integer, index_range__first <= I and I <= top_ptr + 1 and (dictionary__is_null_symbol(fld_type_sym(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [i_]))) <-> fld_type_sym(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [i_])) = dictionary__nullsymbol) -> (dictionary__is_null_symbol(fld_type_sym(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [i_]))) or dictionary__istypemark(fld_type_sym(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [i_])), dictionary__dict)) and ((fld_is_defined(fld_lower_bound(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [I]))) and fld_is_defined(fld_upper_bound(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [I]))) -> fld_value(fld_lower_bound(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [I]))) <= fld_value(fld_upper_bound(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [I])))) and fld_actualupperbound(fld_complete_rec(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [I]))) - fld_lowerbound(fld_complete_rec(element(update(s, [top_ptr + 1], mk__typ_entry( type_sym := TYPE_SYM, lower_bound := LOWER_BOUND, upper_bound := UPPER_BOUND, agg_flags := AGG_FLAGS, counter := COUNTER, complete_rec := COMPLETE_REC)), [I]))) < examinerconstants__completechecksize)) may_be_deduced_from [for_all(I : integer, index_range__first <= I and I <= top_ptr and (dictionary__is_null_symbol(fld_type_sym(element(s, [I]))) <-> fld_type_sym(element(s, [I])) = dictionary__nullsymbol) -> (dictionary__is_null_symbol(fld_type_sym(element(s, [I]))) or dictionary__istypemark(fld_type_sym(element(s, [I])), dictionary__dict)) and ((fld_is_defined(fld_lower_bound(element(s, [I]))) and fld_is_defined(fld_upper_bound(element(s, [I]))) -> fld_value(fld_lower_bound(element(s, [I]))) <= fld_value(fld_upper_bound(element(s, [I])))) and fld_actualupperbound(fld_complete_rec(element(s, [I]))) - fld_lowerbound(fld_complete_rec(element(s, [I]))) < examinerconstants__completechecksize)), dictionary__is_null_symbol(TYPE_SYM) or dictionary__istypemark(TYPE_SYM, dictionary__dict), fld_is_defined(LOWER_BOUND) and fld_is_defined(UPPER_BOUND) -> fld_value(LOWER_BOUND) <= fld_value(UPPER_BOUND), fld_actualupperbound(COMPLETE_REC) - fld_lowerbound(COMPLETE_REC) < examinerconstants__completechecksize, top_ptr < top_range__last]. spark-2012.0.deb/examiner/sem-wf_priority_value.adb0000644000175000017500000001031611753202336021255 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Priority_Value (Node : in STree.SyntaxNode; Pragma_Kind : in Dictionary.RavenscarPragmas; Error_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord; Value_Rep : out LexTokenManager.Lex_String; Compatible : out Boolean) is Type_Sym : Dictionary.Symbol; Result : Exp_Record; Unused_Seq : SeqAlgebra.Seq; Unused_Component_Data : ComponentManager.ComponentData; begin -- check validity of argument -- must be integer and in appropriate System.[Interrupt_]Priority range SeqAlgebra.CreateSeq (The_Heap, Unused_Seq); ComponentManager.Initialise (Unused_Component_Data); -- ASSUME Node = expression OR annotation_expression if Syntax_Node_Type (Node => Node) = SP_Symbols.expression then -- ASSUME Node = expression --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Node, Scope => Scope, Type_Context => Dictionary.GetPredefinedIntegerType, Context_Requires_Static => True, Ref_Var => Unused_Seq, Result => Result, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; elsif Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_expression then -- ASSUME Node = annotation_expression --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Annotation_Expression (Exp_Node => Node, Scope => Scope, Type_Context => Dictionary.GetPredefinedIntegerType, Context => Precondition, Result => Result, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; else Result := Null_Exp_Record; Aggregate_Stack.Init; end if; SeqAlgebra.DisposeOfSeq (The_Heap, Unused_Seq); -- actual parameter must be fixed if not Result.Is_Constant then ErrorHandler.Semantic_Error (Err_Num => 37, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; -- argument must be integer value Type_Sym := Dictionary.GetRootType (Result.Type_Symbol); if Dictionary.CompatibleTypes (Scope, Type_Sym, Dictionary.GetPredefinedIntegerType) then Compatible := True; -- range checking here if values known in Dictionary Check_Priority_Range (Error_Sym => Error_Sym, Scope => Scope, Pragma_Kind => Pragma_Kind, Err_Pos => Node_Position (Node => Node), Value => Result.Value, Value_Rep => Value_Rep); else Compatible := False; Value_Rep := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 877, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end Wf_Priority_Value; spark-2012.0.deb/examiner/indexmanager-index_table_p.ads0000644000175000017500000002646711753202336022216 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with LexTokenLists; --# inherit CommandLineData, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# IndexManager, --# LexTokenLists, --# LexTokenManager, --# ScreenEcho, --# SPARK_IO, --# SystemErrors, --# XMLReport; private package IndexManager.Index_Table_P --# own Fatal_Error : Boolean; --# Index_Table : Index_Tables; --# initializes Fatal_Error, --# Index_Table; is procedure Stop_SPARK; --# global in Fatal_Error; --# derives null from Fatal_Error; -- Write a string to standard output, followed by a newline. -- If plain output specified then force to lower case to avoid -- platform-specific diffs (eg Windows drive letters). procedure Debug_Put_E_Str (E_Str : in E_Strings.T; New_Line : in Boolean); --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# E_Str, --# New_Line; procedure Output_Error (E : in IndexManager.Library_Manager_Errors; Source_File : in LexTokenManager.Lex_String; Token_Position : in IndexManager.File_Position; Token_String : in E_Strings.T); --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out Fatal_Error; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# Fatal_Error from *, --# E & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# E, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Source_File, --# Token_Position, --# Token_String; -- Add an index file that is not an auxiliary index file or a -- super index file in the list of index files. procedure Add_Index_File (Filename : in E_Strings.T); --# global in out Index_Table; --# in out LexTokenManager.State; --# derives Index_Table, --# LexTokenManager.State from *, --# Filename, --# LexTokenManager.State; -- Add a super index file in the list of index files. procedure Add_Super_Index_File (Filename : in E_Strings.T; Position : in IndexManager.File_Position; Source_File : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out Fatal_Error; --# in out Index_Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# Fatal_Error from *, --# Filename, --# Index_Table, --# LexTokenManager.State, --# Source_File & --# Index_Table from *, --# Filename, --# LexTokenManager.State, --# Position, --# Source_File & --# LexTokenManager.State from *, --# Filename & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Filename, --# Index_Table, --# LexTokenManager.State, --# Position, --# Source_File; -- Add an auxiliary index file in the list of index files. procedure Add_Aux_Index_File (Filename : in E_Strings.T; Unit : in LexTokenLists.Lists; Position : in IndexManager.File_Position; Source_File : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out Fatal_Error; --# in out Index_Table; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# Fatal_Error, --# Index_Table from *, --# Filename, --# Index_Table, --# LexTokenManager.State, --# Position, --# Source_File, --# Unit & --# LexTokenManager.State from *, --# Filename & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# Filename, --# Index_Table, --# LexTokenManager.State, --# Position, --# Source_File, --# Unit; -- Mark an index file in the list of index files as done. procedure Index_File_Done (Filename : in LexTokenManager.Lex_String); --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Index_Table; --# in out SPARK_IO.File_Sys; --# derives Index_Table from *, --# Filename, --# LexTokenManager.State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Filename, --# Index_Table, --# LexTokenManager.State; -- Check if the parent index file is actually an ancestor of the -- index file. A is an ancestor of B if A is more local than B. We -- assume that A is an ancestor of A. The empty index filename is -- the ancestor of all the index files. function Is_File_Ancestor (Parent_Filename : in LexTokenManager.Lex_String; Filename : in LexTokenManager.Lex_String) return Boolean; --# global in Index_Table; --# in LexTokenManager.State; -- Get the more relevant index file if one exists: -- * an auxiliary file which : -- + has the prefix of the unit -- + has not been already parsed -- + is more local than the top index file -- * if no more local auxiliary index files are found, the index -- file is a normal index file or a super index file which -- + has not been already parsed -- + is more local than the top index file -- * if no index file or super index file are found, the returned -- filename is empty -- -- An empty top index filename represent the more global index -- file. -- -- The Aux_Index_Unit is only defined for auxiliary index files. procedure Get_Next_Index_File (Unit : in LexTokenLists.Lists; Top_Filename : in LexTokenManager.Lex_String; Filename : out LexTokenManager.Lex_String; File_Type : out IndexManager.Entry_Types; Aux_Index_Unit : out LexTokenLists.Lists; Position : out IndexManager.File_Position); --# global in CommandLineData.Content; --# in Index_Table; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Aux_Index_Unit, --# Filename, --# File_Type, --# Position from Index_Table, --# LexTokenManager.State, --# Top_Filename, --# Unit & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Index_Table, --# LexTokenManager.State, --# Top_Filename, --# Unit; -- Check if the parent index file is actually an auxiliary -- ancestor of the index file. A is an auxiliary ancestor of B if -- A is in the same subtree of auxiliary index files as B and A is -- an ancestor of B in this subtree of auxiliary index files. We -- assume that A is an auxiliary ancestor of A. The empty index -- filename can not be in a auxiliary subtree. function Is_Aux_File_Ancestor (Parent_Index_Filename : in LexTokenManager.Lex_String; Index_Filename : in LexTokenManager.Lex_String) return Boolean; --# global in Index_Table; --# in LexTokenManager.State; procedure List_Index_File (Report_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in Index_Table; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Index_Table, --# LexTokenManager.State, --# Report_File, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content, --# Index_Table; end IndexManager.Index_Table_P; spark-2012.0.deb/examiner/relationalgebra.ads0000644000175000017500000005431411753202336020104 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- RelationalAlgebra -- -- Purpose: -- The package RelationalAlgebra provides an abstract data type for storing -- and manipulating binary relations between Natural numbers. -- It is a higher-level wrapper around the Heap package. -- -- Each binary relation is notionally represented by a matrix, e.g., -- -- X Y | Z -- --|-----|-----|-----| -- A |(A,X)|(A,Y)|(A,Z)| -- --|-----|-----|-----| -- B |(B,X)|(B,Y)|(B,Z)| -- --|-----|-----|-----| -- C |(C,X)|(C,Y)|(C,Z)| -- --|-----|-----|-----| -- D |(D,X)|(D,Y)|(D,Z)| -- --|-----|-----|-----| -- -- At each entry of the matrix is a pair of the binary relation. -- The matrix is indexed by the actual values of of pair given that the -- that the values are Natural numbers. -- -- In practice the Relations generated by the Examiner have sparse matrices -- and so the implementation is not a simple matrix but is optimized for -- representing sparse matrices. -- -- Clients: -- RelationalAlgebra is used by the Sem and FlowAnlyser modules of the -- Examiner. -- -- Use: -- Within the FlowAnalyser the relationships to be recorded are between -- expressions and variables or between variables. Expressions are numbered -- and, assuming variables are stored in a table, a variable may be -- represented by a number, that is its index within the table. -- This gives a pair of numbers to represent an element of the relation and -- also an index into the Relation's matrix. -- -- For an example of the use of RelationalAlgebra see -- FlowAnalyser-FlowAnalyse.adb -- Important principles are: -- -- 1. a Relation object must be Created before it is used; -- -- 2. a Relation object should be Disposed when its use is complete. -- A Relation object that has been disposed is recycled and returned to -- the Heap; -- -- 3. RelationalAlgebra is a wrapper for Heap and if the Heap becomes -- exhausted of free storage elements an attempt to perform any of the -- operations of this package will cause termination with a fatal -- error. As this may occur the methods of RelationalAlgebra are not -- guaranteed to terminate. -- -- Extension: -- It is not expected that any extension will be made to this package. -- -------------------------------------------------------------------------------- with Heap; with SeqAlgebra; --# inherit Heap, --# SeqAlgebra, --# Statistics, --# SystemErrors; package RelationAlgebra is -- The abstract data type representing a relation. type Relation is private; -- A null relation is a relation which has not been initialized by applying -- the Create operation Null_Relation : constant Relation; type Pair is private; NullPair : constant Pair; type RowLeader is private; NullRowLdr : constant RowLeader; type ColLeader is private; NullColLdr : constant ColLeader; -- The matrix implementation uses a space efficient -- representation but not necessarily computationally efficient. To -- improve computational efficiency during multiple element insertions the -- matrix implementation provides a cacheing mechanism that maintains -- references to the last used row and column leaders and the last elements -- accessed in the row and in the column. -- It has the following operations: Initialize_Cache, -- Reset_Column_Cache and Insert_Cached_Pair. -- The Insert_Cached_Pair is the operation which is used to construct and -- extend the matrix. type Caches is record Rtion : Relation; RowLdr : RowLeader; ColLdr : ColLeader; RowPair : Pair; ColPair : Pair; end record; ------------------------------------------------------------ -- Relation Algebra Operations function IsNullPair (P : Pair) return Boolean; -- Returns the value of the row value of a matrix element (Pair). function RowValue (TheHeap : Heap.HeapRecord; P : Pair) return Natural; -- Returns the value of the column value of a matrix element (Pair). function ColumnValue (TheHeap : Heap.HeapRecord; P : Pair) return Natural; -- Gets the next matrix element (Pair) in the row adjacent to Pair P. function RightSuccr (TheHeap : Heap.HeapRecord; P : Pair) return Pair; -- Gets the next matrix element (Pair) in the column adjacent to Pair P. function DownSuccr (TheHeap : Heap.HeapRecord; P : Pair) return Pair; -- Obtains the first row (Row_Leader) of the relation R. function FirstRowLeader (TheHeap : Heap.HeapRecord; R : Relation) return RowLeader; -- Obtains the succeeding row (Row_Leader) from the given Row_Leader L. function NextRowLeader (TheHeap : Heap.HeapRecord; L : RowLeader) return RowLeader; function FirstColLeader (TheHeap : Heap.HeapRecord; R : Relation) return ColLeader; -- Obtains the succeeding column (Col_Leader) from the given Col_Leader L. function NextColLeader (TheHeap : Heap.HeapRecord; L : ColLeader) return ColLeader; -- Obtains the first matrix element (Pair) in the row specified by -- Row_Leader L. function FirstInRow (TheHeap : Heap.HeapRecord; L : RowLeader) return Pair; -- Obtains the first matrix element (Pair) in the column specified by -- Col_Leader L. function FirstInCol (TheHeap : Heap.HeapRecord; L : ColLeader) return Pair; ------ Functions and operations for implementation of RelationAlgebra ------ -- Basic Constructors and Destructors -- CreateRelation must be applied to a Relation object, R, -- before it is used in any of the other Relation operations. procedure CreateRelation (TheHeap : in out Heap.HeapRecord; R : out Relation); --# global in out Statistics.TableUsage; --# derives R, --# TheHeap from TheHeap & --# Statistics.TableUsage from *, --# TheHeap; -- Objects of type Relations utilize storage managed by the package Heap. -- The storage used by a relation R must be returned to the Heap by calling -- DisposeOfRelation before R goes out of scope. procedure DisposeOfRelation (TheHeap : in out Heap.HeapRecord; R : in Relation); --# derives TheHeap from *, --# R; procedure UpdateRight (TheHeap : in out Heap.HeapRecord; P, R : in Pair); --# derives TheHeap from *, --# P, --# R; procedure UpdateDown (TheHeap : in out Heap.HeapRecord; P, D : in Pair); --# derives TheHeap from *, --# D, --# P; function Relation_To_Atom (R : Relation) return Heap.Atom; function Pair_To_Atom (P : Pair) return Heap.Atom; function Atom_To_Pair (A : Heap.Atom) return Pair; function RowLeader_To_Atom (R : RowLeader) return Heap.Atom; function Atom_To_RowLeader (A : Heap.Atom) return RowLeader; function ColLeader_To_Atom (C : ColLeader) return Heap.Atom; function Atom_To_ColLeader (A : Heap.Atom) return ColLeader; -- Initalizes the Cache from relation R and must be called prior to its use. -- Once initialized a cache is associated with R and should not be used to -- access any other relation. procedure InitialiseCache (TheHeap : in Heap.HeapRecord; R : in Relation; Cache : out Caches); --# derives Cache from R, --# TheHeap; -- Returns the row index value of the Row_Leader L. function RowLdrIndex (TheHeap : Heap.HeapRecord; L : RowLeader) return Natural; procedure Insert_Row_Leader (The_Heap : in out Heap.HeapRecord; R : in Relation; I : in Natural; Cache : in out Caches); --# global in out Statistics.TableUsage; --# derives Cache, --# The_Heap from Cache, --# I, --# R, --# The_Heap & --# Statistics.TableUsage from *, --# Cache, --# I, --# The_Heap; -- InsertPair provides the basic means to build a relation. -- Each pair in the relation R represented by the row value, I, and the -- column value J may be inserted individually. The pair is only inserted -- if it is not already present. There are no duplicates. procedure InsertPair (TheHeap : in out Heap.HeapRecord; R : in Relation; I, J : in Natural); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# I, --# J, --# R, --# TheHeap; -- AddRow adds an entire row to a relation R. -- The row index I is applied to each of the values in the set S to obtain -- a set of pairs that are added to the relation R if they are not already -- present in R. -- R and S must have been initialized using their corresponding Create ops. procedure AddRow (TheHeap : in out Heap.HeapRecord; R : in Relation; I : in Natural; S : in SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# I, --# R, --# S, --# TheHeap; -- AddCol adds an entire column to a relation R. -- The column index J is applied to each of the values in the set S to obtain -- a set of pairs that are added to the relation R if they are not already -- present in R. -- R and S must have been initialized using their corresponding Create ops. procedure AddCol (TheHeap : in out Heap.HeapRecord; R : in Relation; J : in Natural; S : in SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# J, --# R, --# S, --# TheHeap; ------------------------------------------------------------ -- Query operations -- True only if the row indicated by Index I has no entries or the row I -- does not exist. -- R must have been initialized using CreateRelation. function IsEmptyRow (TheHeap : Heap.HeapRecord; R : Relation; I : Natural) return Boolean; -- Returns the number of entries in a given column, J, present within -- relation R. -- R must have been initialized using CreateRelation. function ColumnCount (TheHeap : Heap.HeapRecord; R : Relation; J : Natural) return Natural; -- Resets the current cached column to the first row of the relation -- associated with the cache and the cached column element to the first -- element of the column. -- A call to Reset_Column_Cache is normally made when traversing and -- inserting elements into the matrix column by column prior to starting the -- traveresal of a new column. procedure ResetColumnCache (TheHeap : in Heap.HeapRecord; Cache : in out Caches); --# derives Cache from *, --# TheHeap; -- Creates a new relation T which is the same as relation R except that all -- the rows with indices in the set S are excluded from T. -- Any index in the set S which is not a row index of R is ignored. -- On successful completion the new relation T may be empty if R is empty or -- all rows in R are excluded. procedure RowRemoval (TheHeap : in out Heap.HeapRecord; R : in Relation; S : in SeqAlgebra.Seq; T : out Relation); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# R, --# S, --# TheHeap & --# T from TheHeap; -- Creates a new set S containing all the column value -- entries for the row of a relation R specified by GivenIndex. -- If a row specified by the GivenIndex is not present in R, -- S is the empty set. procedure RowExtraction (TheHeap : in out Heap.HeapRecord; R : in Relation; GivenIndex : in Natural; S : out SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives S from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# GivenIndex, --# R, --# TheHeap; -- Creates a new set S containing all the row value -- entries for the column of a relation R specified by GivenIndex. -- If a column specified by the GivenIndex is not present in R, -- S is the empty set. procedure ColExtraction (TheHeap : in out Heap.HeapRecord; R : in Relation; GivenIndex : in Natural; S : out SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives S from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# GivenIndex, --# R, --# TheHeap; -- The relation R is reduced to a sub-relation containing just the values -- which are elements of the set S. That is, every element of the final value -- of R is a row-column pair such that the pair was an element of the -- original value of R and both the row and the column values are in S. -- On successful completion R may be empty if there are no -- row-column pairs for which both row and column values are in S. procedure ExtractSubRelation (TheHeap : in out Heap.HeapRecord; R : in out Relation; S : in SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives R from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# R, --# S, --# TheHeap; -- An identity relation is the pairs (a,a), (b,b), (c,c), ... -- This subprogram adds the pairs of an identity relation to an existing -- relation R. The values for the identity relation are taken from the -- set S. The final value of R is the initial value of R with the pairs of -- the identity relation added as elements if they are not already present. -- If the set S is empty then R is unchanged. procedure AddIdentity (TheHeap : in out Heap.HeapRecord; R : in Relation; S : in SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# R, --# S, --# TheHeap; -- AugmentRelation performs the same operation as Sum except that it is -- performed in place and the augmented set is the final value of A. procedure AugmentRelation (TheHeap : in out Heap.HeapRecord; A, B : in Relation); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# A, --# B, --# TheHeap; -- Creates a new relation C which is the relation A augmented -- with all the elements from relation B which are not in A. -- The value of B is not changed. procedure Sum (TheHeap : in out Heap.HeapRecord; A, B : in Relation; C : out Relation); --# global in out Statistics.TableUsage; --# derives C from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# A, --# B, --# TheHeap; -- Creates a new relation C from the two relations A and B such that an -- element is in C only if it is present in both A and B. procedure Composition (TheHeap : in out Heap.HeapRecord; A, B : in Relation; C : out Relation); --# global in out Statistics.TableUsage; --# derives C from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# A, --# B, --# TheHeap; -- Calculates the transitive closure of relation R. -- The computation is done in place, i.e., the final value of R is the -- transitive closure of the initial value of R. procedure CloseRelation (TheHeap : in out Heap.HeapRecord; R : in Relation); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# R, --# TheHeap; -- Calculates the Cartesian Product of two sets A and B and places the -- result in relation C. procedure CartesianProduct (TheHeap : in out Heap.HeapRecord; A, B : in SeqAlgebra.Seq; C : out Relation); --# global in out Statistics.TableUsage; --# derives C from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# A, --# B, --# TheHeap; private type Relation is range 1 .. Heap.Atom'Last; --# assert Relation'Base is Integer; Null_Relation : constant Relation := Relation'Last; -- A Pair represents an element of the matrix. It has two values, -- a row value and a column value. From an element the next element in the -- same row may be accessed, its right successor. Similarly from an element -- the next element in the same row may be accessed, its down successor. -- The end of a row or a column is denoted by a NullPair. type Pair is range 0 .. Heap.Atom'Last; --# assert Pair'Base is Integer; NullPair : constant Pair := Pair (Heap.NullAtom); -- A RowLeader identifies a particular row of the matrix. The first -- row of a matrix is obtainable from a given Relation. Thereafter the -- next row of the matrix (in fact the next RowLeader) may be accessed from -- each RowLeader. The last RowLeader is terminated with a NullRowLdr. -- The first element (Pair) of the row is also accessible from the RowLeader. type RowLeader is range 0 .. Heap.Atom'Last; --# assert RowLeader'Base is Integer; -- A ColLeader identifies a particular column of the matrix. The first -- column of a matrix is obtainable from a given Relation. Thereafter the -- next column of the matrix (in fact the next ColLeader) may be accessed -- from each ColLeader. The last ColLeader is terminated with a NullColLdr. -- The first element (Pair) of the column is also accessible from the ColLeader. type ColLeader is range 0 .. Heap.Atom'Last; --# assert ColLeader'Base is Integer; NullRowLdr : constant RowLeader := RowLeader (Heap.NullAtom); NullColLdr : constant ColLeader := ColLeader (Heap.NullAtom); ------------------------------------------------------------ -- Private Subprograms: --: -- The declaration of the following subprograms have been promoted from the -- package body for use in child packages. Since the implementation of -- Relation objects is described in the package body, in this instance, one -- should refer to the package body for a description of these subprograms. -- Returns the column index value of the Col_Leader L. function ColLdrIndex (TheHeap : Heap.HeapRecord; L : ColLeader) return Natural; end RelationAlgebra; spark-2012.0.deb/examiner/configfile.ads0000644000175000017500000001020611753202335017045 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; use type SPARK_IO.File_Status; --# inherit Ada.Characters.Handling, --# Casing, --# CommandLineData, --# ContextManager, --# Dictionary, --# ErrorHandler, --# Error_IO, --# Error_Types, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# LexTokenLists, --# LexTokenManager, --# LexTokenStacks, --# Maths, --# ScreenEcho, --# SparkLex, --# SPARK_IO, --# SP_Expected_Symbols, --# SP_Symbols, --# Statistics, --# SystemErrors, --# XMLReport; package ConfigFile --# own State; --# initializes State; is procedure Read_Config_File (Opened_OK : out Boolean; No_Errors : out Boolean); --# global in CommandLineData.Content; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# out State; --# derives Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# No_Errors, --# SPARK_IO.File_Sys, --# State from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Opened_OK from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys & --# SparkLex.Curr_Line from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; procedure Output_Config_File (To_File : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out XMLReport.State; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys, --# XMLReport.State from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# State, --# To_File, --# XMLReport.State; end ConfigFile; spark-2012.0.deb/examiner/sem-compunit-checkpackageneedsbody.adb0000644000175000017500000000357211753202336023634 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Synopsis -- This procedure determines whether a package needs a body (new Ada 95 rule) ---------------------------------------------------------------------------- separate (Sem.CompUnit) procedure CheckPackageNeedsBody (Node_Pos : in LexTokenManager.Token_Position; Pack_Sym : in Dictionary.Symbol) is begin case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => null; when CommandLineData.SPARK95_Onwards => if Dictionary.IsPackage (Pack_Sym) and then Dictionary.GetScope (Pack_Sym) = Dictionary.GlobalScope and then not Dictionary.PackageRequiresBody (Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 604, Reference => 4, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end case; end CheckPackageNeedsBody; spark-2012.0.deb/examiner/sem-walk_expression_p-walk_annotation_expression-down_wf_store.adb0000644000175000017500000000664411753202336031573 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P.Walk_Annotation_Expression) procedure Down_Wf_Store (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Next_Node : out STree.SyntaxNode) is Type_Info : Sem.Exp_Record; Error_Found : Boolean := False; begin Exp_Stack.Pop (Item => Type_Info, Stack => E_Stack); case Type_Info.Sort is when Sem.Is_Unknown => Type_Info := Unknown_Symbol_Record; Next_Node := STree.NullNode; when Sem.Is_Object => if Dictionary.IsArrayTypeMark (Type_Info.Type_Symbol, Scope) then Type_Info.Param_Count := 0; Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = store_list SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.store_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = store_list in Down_Wf_Store"); elsif Dictionary.IsRecordTypeMark (Type_Info.Type_Symbol, Scope) then Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = store_list SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.store_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = store_list in Down_Wf_Store"); else -- neither record nor array Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 323, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Node)), Id_Str => LexTokenManager.Null_String); Next_Node := STree.NullNode; end if; when others => Type_Info := Unknown_Symbol_Record; ErrorHandler.Semantic_Error (Err_Num => 323, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Node)), Id_Str => LexTokenManager.Null_String); Next_Node := STree.NullNode; end case; Type_Info.Errors_In_Expression := Type_Info.Errors_In_Expression or else Error_Found; Exp_Stack.Push (X => Type_Info, Stack => E_Stack); end Down_Wf_Store; spark-2012.0.deb/examiner/lextokenmanager-seq_algebra.ads0000644000175000017500000002313211753202336022372 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Heap; with LexTokenManager; with SeqAlgebra; --# inherit Heap, --# LexTokenManager, --# SeqAlgebra, --# Statistics, --# SystemErrors; -- This package extends the package SeqAlgebra to support -- Lex_String. This package uses case insensitive comparison betweeen -- strings. package LexTokenManager.Seq_Algebra is type Seq is private; type Member_Of_Seq is private; -- Returns true only if S is a null seq function Is_Null_Seq (S : Seq) return Boolean; -- Returns true only if M is a null member (not in any sequence or set) function Is_Null_Member (M : Member_Of_Seq) return Boolean; -- Returns the first member of the given Seq S. The returned member -- can be used as a starting point to iterate through all members of the -- sequence or set. function First_Member (The_Heap : Heap.HeapRecord; S : Seq) return Member_Of_Seq; -- Next member of the sequence or set. A null member will be returned if the -- sequence or set has no more members. -- Successively calling Next_Member with the previously returned Member -- (starting with the member returned from First_Member) will iterate over -- all members of the sequence or set provided no elements are added or -- removed from the sequence or set during the iteration over its elements. function Next_Member (The_Heap : Heap.HeapRecord; M : Member_Of_Seq) return Member_Of_Seq; -- Initializes Seq S ready for use. It must be called prior to any -- other Sequence or Set operation. procedure Create_Seq (The_Heap : in out Heap.HeapRecord; S : out Seq); --# global in out Statistics.TableUsage; --# derives S, --# The_Heap from The_Heap & --# Statistics.TableUsage from *, --# The_Heap; -- Returns true only if the Sequence or Set S is empty. function Is_Empty_Seq (The_Heap : Heap.HeapRecord; S : Seq) return Boolean; -- SeqAlgebra uses the Heap package for storage. The storage must be -- released by calling Dispose before all references (there may be aliases) -- to the sequence or set within The_Heap are out of scope. -- WARNING: Disposing of a Seq S may leave Members of S with invalid -- references to non-existent elements. Do not use Members from a disposed -- Seq. -- As a rule Member objects of a set S should not have a larger scope than S. procedure Dispose_Of_Seq (The_Heap : in out Heap.HeapRecord; S : in Seq); --# derives The_Heap from *, --# S; -------- Functions and operations intended for sequences -------- -- Returns a pseudo-member which can be used to prefix new elements to a -- sequence. Appending to the pseudo-member will place the appended member -- at the head of the sequence. -- The caller must ensure that the sequence S is properly Created. -- The returned member is guaranteed to be non null and deemed to refer -- to the pseudo element of S. function Before_First_Member (S : Seq) return Member_Of_Seq; -- Inserts the GivenValue in a sequence referenced by M after member M. -- M must not be null. -- If the call is successful the new value of M refers to the appended -- element. procedure Append_After (The_Heap : in out Heap.HeapRecord; M : in out Member_Of_Seq; Given_Value : in LexTokenManager.Lex_String); --# global in out Statistics.TableUsage; --# derives M, --# Statistics.TableUsage from *, --# The_Heap & --# The_Heap from *, --# Given_Value, --# M; -- Gets the (Natural) value of the member M. -- The Member must be non null. -- The value returned is undefined if M is the pseudo member obtained -- from a call to Before_First_Member. function Value_Of_Member (The_Heap : Heap.HeapRecord; M : Member_Of_Seq) return LexTokenManager.Lex_String; -- If the Given_Value is not already an element of the set S add it to S. -- A Create operation must have been applied to S. -- WARNING: AddMember is only defined for a Seq object representing a set. -- If an element is added to a set it is in general indecidable as -- to whether it will be included in any current iteration over the elements -- of the set using Next_Member. procedure Add_Member (The_Heap : in out Heap.HeapRecord; S : in Seq; Given_Value : in LexTokenManager.Lex_String); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Given_Value, --# LexTokenManager.State, --# S, --# The_Heap; -- If an element with Given_Value exists in S, remove it from S. -- A Create operation must have been applied to S. -- WARNING: RemoveMember is only defined for a Seq object representing a set. -- Removing an element from a Seq will render any members -- referencing the element invalid and they should not be used. -- There are no checks against this erroneous use. procedure Remove_Member (The_Heap : in out Heap.HeapRecord; S : in Seq; Given_Value : in LexTokenManager.Lex_String); --# global in LexTokenManager.State; --# derives The_Heap from *, --# Given_Value, --# LexTokenManager.State, --# S; -- Return true only if there is an element in S with the value Given_Value. -- WARNING: IsMember is only defined for a Seq object representing a set. function Is_Member (The_Heap : Heap.HeapRecord; S : Seq; Given_Value : LexTokenManager.Lex_String) return Boolean; --# global in LexTokenManager.State; ----------- Set Operations on Seq representing Sets ----------- -- Creates a new set C with all the values from set A and set B. -- The caller should not apply CreateSeq to C prior to invoking Union. -- Sets A and B are unchanged. -- Note if value Z is in A and B, it will only appear once in C. -- This is a set operation do not use with a Seq representing a sequence. procedure Union (The_Heap : in out Heap.HeapRecord; A, B : in Seq; C : out Seq); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives C from The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# A, --# B, --# LexTokenManager.State, --# The_Heap; -- An in place Union, all elements in set B are added to set -- A with no duplicates. B is unchanged. -- This is a set operation do not use with a Seq representing a sequence. procedure Augment_Seq (The_Heap : in out Heap.HeapRecord; A, B : in Seq); --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# A, --# B, --# LexTokenManager.State, --# The_Heap; -- This is an "in-place" Complement: all elements -- are removed from A if they are also in B. B is unchanged. -- This is a set operation do not use with a Seq representing a sequence. procedure Reduction (The_Heap : in out Heap.HeapRecord; A, B : in Seq); --# global in LexTokenManager.State; --# derives The_Heap from *, --# A, --# B, --# LexTokenManager.State; procedure Debug (The_Heap : in Heap.HeapRecord; S : in Seq); --# derives null from S, --# The_Heap; private type Seq is record The_Seq : SeqAlgebra.Seq; end record; type Member_Of_Seq is record Member : SeqAlgebra.MemberOfSeq; end record; end LexTokenManager.Seq_Algebra; spark-2012.0.deb/examiner/sem-check_ceiling_priority.adb0000644000175000017500000000537011753202336022220 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Ceiling_Priority (Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Check_List : in Dictionary.Iterator; Priority_Lex_Value : in LexTokenManager.Lex_String; Error_Node_Pos : in LexTokenManager.Token_Position) is It : Dictionary.Iterator; Var_Sym : Dictionary.Symbol; Priority_Val : Maths.Value; Result : Maths.Value; Unused : Maths.ErrorCode; Priority_Ok : Boolean; begin It := Check_List; Priority_Val := Maths.ValueRep (Priority_Lex_Value); while not Dictionary.IsNullIterator (It) loop Var_Sym := Dictionary.CurrentSymbol (It); -- Ignore all own variables that do not have a priority property. if Dictionary.IsOwnVariable (Var_Sym) and then Dictionary.HasValidPriorityProperty (Var_Sym) then -- Ensure that Var_Sym's priority is >= given priority --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.GreaterOrEqual (Maths.ValueRep (Dictionary.GetPriorityProperty (Var_Sym)), Priority_Val, Result, Unused); Maths.ValueToBool (Result, Priority_Ok, Unused); --# end accept; if not Priority_Ok then -- "Priority Ceiling check failure: the priority of Var_Sym is less -- than that of the Sym" ErrorHandler.Semantic_Error_Sym2 (Err_Num => 934, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Sym => Sym, Sym2 => Var_Sym, Scope => Scope); end if; end if; It := Dictionary.NextSymbol (It); end loop; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Check_Ceiling_Priority; spark-2012.0.deb/examiner/sem-walk_expression_p-up_wf_aggregate.adb0000644000175000017500000002777211753202336024402 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Up_Wf_Aggregate (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Name_Exp : Sem.Exp_Record; Error_Found : Boolean := False; Parent : STree.SyntaxNode; -------------------------------------------------------------- procedure Check_Array_Completeness (Parent_Node_Pos : in LexTokenManager.Token_Position; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Aggregate_Stack.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Aggregate_Stack.State, --# Error_Found from *, --# Aggregate_Stack.State & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Aggregate_Stack.State, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Parent_Node_Pos, --# SPARK_IO.File_Sys; --# pre Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); --# post Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State); is Index_Type_Symbol : Dictionary.Symbol; Type_Lower_Bound : Sem.Typ_Type_Bound; Type_Upper_Bound : Sem.Typ_Type_Bound; Aggregate_Flags : Sem.Typ_Agg_Flags; Entry_Counter : Natural; Expected_Entries : Natural; Complete_Rec : CompleteCheck.T; begin --# accept Flow, 10, Index_Type_Symbol, "Expect ineffective assignment"; Aggregate_Stack.Pop (Type_Sym => Index_Type_Symbol, Lower_Bound => Type_Lower_Bound, Upper_Bound => Type_Upper_Bound, Agg_Flags => Aggregate_Flags, Counter => Entry_Counter, Complete_Rec => Complete_Rec); --# end accept; if Aggregate_Flags.Has_Others_Part then CompleteCheck.SeenOthers (Complete_Rec); end if; if Aggregate_Flags.Check_Completeness then if Aggregate_Flags.Association_Type = Sem.Aggregate_Is_Positional then if Type_Lower_Bound.Is_Defined and then Type_Upper_Bound.Is_Defined then if (Type_Upper_Bound.Value >= 0 and then Type_Lower_Bound.Value >= 0) or else (Type_Upper_Bound.Value < 0 and then Type_Lower_Bound.Value < 0) then Expected_Entries := Type_Upper_Bound.Value - Type_Lower_Bound.Value; if Expected_Entries < Natural'Last then Expected_Entries := Expected_Entries + 1; if Aggregate_Flags.More_Entries_Than_Natural or else Entry_Counter > Expected_Entries then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 415, Reference => ErrorHandler.No_Reference, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); elsif Entry_Counter < Expected_Entries and then not Aggregate_Flags.Has_Others_Part then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 414, Reference => ErrorHandler.No_Reference, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; elsif not Aggregate_Flags.Has_Others_Part then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 414, Reference => ErrorHandler.No_Reference, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; else --# check Type_Upper_Bound.Value >= 0 and Type_Lower_Bound.Value <= 0; if (Type_Upper_Bound.Value - Natural'Last) <= Type_Lower_Bound.Value then Expected_Entries := Type_Upper_Bound.Value - Type_Lower_Bound.Value; if Expected_Entries < Natural'Last then Expected_Entries := Expected_Entries + 1; if Aggregate_Flags.More_Entries_Than_Natural or else Entry_Counter > Expected_Entries then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 415, Reference => ErrorHandler.No_Reference, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); elsif Entry_Counter < Expected_Entries and then not Aggregate_Flags.Has_Others_Part then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 414, Reference => ErrorHandler.No_Reference, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; elsif not Aggregate_Flags.Has_Others_Part then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 414, Reference => ErrorHandler.No_Reference, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; elsif not Aggregate_Flags.Has_Others_Part then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 414, Reference => ErrorHandler.No_Reference, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; end if; else -- named association if Complete_Rec.Undeterminable and then not Aggregate_Flags.Has_Others_Part then ErrorHandler.Semantic_Warning (Err_Num => 306, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); elsif CompleteCheck.IsComplete (Complete_Rec) = CompleteCheck.Incomplete then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 414, Reference => ErrorHandler.No_Reference, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; end if; if Aggregate_Flags.Signal_Out_Of_Range and then Aggregate_Flags.Out_Of_Range_Seen then ErrorHandler.Semantic_Warning (Err_Num => 303, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; if Aggregate_Flags.Warn_No_Others and then not Aggregate_Flags.Has_Others_Part and then not (Aggregate_Flags.Check_Completeness and then Aggregate_Flags.Association_Type /= Sem.Aggregate_Is_Positional and then Complete_Rec.Undeterminable) then ErrorHandler.Semantic_Warning (Err_Num => 306, Position => Parent_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; --# accept Flow, 33, Index_Type_Symbol, "Expected to be neither referenced or exported"; end Check_Array_Completeness; begin -- Up_Wf_Aggregate Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); Parent := STree.Parent_Node (Current_Node => Node); -- ASSUME Parent = enumeration_representation_clause OR code_statement OR -- aggregate_or_expression OR qualified_expression OR -- annotation_aggregate_or_expression OR annotation_qualified_expression if STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.qualified_expression or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_qualified_expression then -- ASSUME Parent = qualified_expression OR annotation_qualified_expression -- this is a top level, not embedded, aggregate if not Dictionary.IsUnknownTypeMark (Name_Exp.Type_Symbol) then Name_Exp.Sort := Sem.Type_Result; Name_Exp.Param_Count := 0; Name_Exp.Param_List := Lists.Null_List; Name_Exp.Other_Symbol := Dictionary.NullSymbol; Name_Exp.Is_ARange := False; Name_Exp.Is_Static := False; -- constant should already be set if Dictionary.IsArrayTypeMark (Name_Exp.Type_Symbol, Scope) then Check_Array_Completeness (Parent_Node_Pos => STree.Node_Position (Node => Parent), Error_Found => Error_Found); end if; end if; elsif STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.enumeration_representation_clause or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.code_statement or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Parent) = SP_Symbols.annotation_aggregate_or_expression then -- ASSUME Parent = enumeration_representation_clause OR code_statement OR -- aggregate_or_expression OR annotation_aggregate_or_expression -- it is an embedded aggregate of a multi-dim array -- decrease depth of dimension count if Name_Exp.Param_Count > 0 then Name_Exp.Param_Count := Name_Exp.Param_Count - 1; Check_Array_Completeness (Parent_Node_Pos => STree.Node_Position (Node => Parent), Error_Found => Error_Found); else Error_Found := True; end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = enumeration_representation_clause OR code_statement OR " & "aggregate_or_expression OR qualified_expression OR " & "annotation_aggregate_or_expression OR annotation_qualified_expression in Up_Wf_Aggregate"); end if; Name_Exp.Errors_In_Expression := Name_Exp.Errors_In_Expression or else Error_Found; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); end Up_Wf_Aggregate; spark-2012.0.deb/examiner/sparkhtml.ads0000644000175000017500000000734611753202336016761 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- SparkHTML -- -- Purpose: -- This package contains the procedures required to convert the text-based -- report file output of the Examiner into HTML, with useful links where -- appropriate. -- -- The design of this package is given in S.P0468.53.47 -- -- Clients: -- Examiner MainLoop -- -- Use: -- See MainLoop -- -- Extension: -- Update as and when the Examiner's report file format changes. -------------------------------------------------------------------------------- with ContextManager; --# inherit Ada.Characters.Handling, --# CommandLineData, --# ContextManager, --# ContextManager.Ops, --# E_Strings, --# FileSystem, --# IndexManager, --# ScreenEcho, --# SPARK_IO; package SparkHTML --# own Generate_HTML : Boolean; --# HTML_Work_Dir : E_Strings.T; --# SPARK_Work_Dir : E_Strings.T; --# initializes Generate_HTML; is -- Creates subdir and frame file procedure Init_SPARK_HTML; --# global in CommandLineData.Content; --# in out Generate_HTML; --# in out SPARK_IO.File_Sys; --# out HTML_Work_Dir; --# out SPARK_Work_Dir; --# derives Generate_HTML, --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# SPARK_IO.File_Sys & --# HTML_Work_Dir from CommandLineData.Content, --# SPARK_IO.File_Sys & --# SPARK_Work_Dir from SPARK_IO.File_Sys; procedure Gen_Report_HTML; --# global in CommandLineData.Content; --# in HTML_Work_Dir; --# in SPARK_Work_Dir; --# in out Generate_HTML; --# in out SPARK_IO.File_Sys; --# derives Generate_HTML from *, --# CommandLineData.Content, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Generate_HTML, --# HTML_Work_Dir, --# SPARK_Work_Dir; procedure Gen_Listing_HTML (File_Descriptor : in ContextManager.FileDescriptors); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in Generate_HTML; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# File_Descriptor, --# Generate_HTML; end SparkHTML; spark-2012.0.deb/examiner/sem-compunit.adb0000644000175000017500000021500411753202336017343 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Error_Types; with VCG; separate (Sem) procedure CompUnit (Top_Node : in STree.SyntaxNode; Do_VCG : in Boolean) is type Hidden_Class is (Not_Hidden, Handler_Hidden, All_Hidden); ------------------------------------------------------- --# inherit ExaminerConstants, --# SPARK_IO, --# SystemErrors; package Stack --# own State; is procedure Init; --# global out State; --# derives State from ; procedure Push (X : in Boolean); --# global in out State; --# derives State from *, --# X; procedure Pop; --# global in out State; --# derives State from *; -- Note: this is not a proper function as it has a hidden -- side effect if a system error is raised function Top return Boolean; --# global in State; end Stack; ------------------------------------------------------------------------ --------------- Procedure variables ----------------------------- Semantic_Error_In_Subprogram_Body : Boolean; Data_Flow_Error_In_Subprogram_Body : Boolean; Unused_Data_Flow_Error_Flag : Boolean; Stmt_Node, Last_Node, Next_Node : STree.SyntaxNode; NodeType : SP_Symbols.SP_Symbol; ErrStatus : ErrorHandler.Error_Level; Err_Num_On_Success : Error_Types.ErrNumRange; Current_Scope : Dictionary.Scopes; TheHeap : Heap.HeapRecord; NodeTable : RefList.HashTable; GlobalComponentData : ComponentManager.ComponentData; Unused : Dictionary.Symbol; Package_Body_Withs_Own_Public_Child : Boolean; -------------- Package bodies ------------------------------ package body Stack is separate; ------------------------------------------------------------ ---------------- Embedded subprograms ---------------------- ------------------------------------------------------------ -- Returns whether a subprogram_implementation node has a fully hidden body, -- a hidden handler part, or no hiding at all. This code depends on the grammar -- in SPARK.LLA section 6.3 function Body_Hidden_Class (Node : STree.SyntaxNode) return Hidden_Class --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_implementation; is Result : Hidden_Class; Pragma_Rep_Node : STree.SyntaxNode; End_Node : STree.SyntaxNode; begin Pragma_Rep_Node := Child_Node (Current_Node => Node); -- ASSUME Pragma_Rep_Node = pragma_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Pragma_Rep_Node) = SP_Symbols.pragma_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pragma_Rep_Node = pragma_rep in Body_Hidden_Class"); -- if the Pragma_Rep is immediately followed by hidden part, then the whole -- body must be hidden. If the PragmaRep is followed by something else, then -- we _might_ have a hidden handler part at End_Node. End_Node := Next_Sibling (Current_Node => Pragma_Rep_Node); -- ASSUME End_Node = declarative_part OR sequence_of_statements OR code_insertion OR hidden_part if Syntax_Node_Type (Node => End_Node) = SP_Symbols.hidden_part then -- ASSUME End_Node = hidden_part Result := All_Hidden; elsif Syntax_Node_Type (Node => End_Node) = SP_Symbols.declarative_part or else Syntax_Node_Type (Node => End_Node) = SP_Symbols.sequence_of_statements or else Syntax_Node_Type (Node => End_Node) = SP_Symbols.code_insertion then -- ASSUME End_Node = declarative_part OR sequence_of_statements OR code_insertion End_Node := Last_Sibling_Of (Start_Node => End_Node); -- ASSUME End_Node = designator OR hidden_part if Syntax_Node_Type (Node => End_Node) = SP_Symbols.hidden_part then -- ASSUME End_Node = hidden_part Result := Handler_Hidden; elsif Syntax_Node_Type (Node => End_Node) = SP_Symbols.designator then -- ASSUME End_Node = designator Result := Not_Hidden; else Result := Not_Hidden; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Node = designator OR hidden_part in Body_Hidden_Class"); end if; else Result := Not_Hidden; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Node = declarative_part OR sequence_of_statements OR code_insertion OR " & "hidden_part in Body_Hidden_Class"); end if; return Result; end Body_Hidden_Class; ---------------------------------------------------------------------- procedure Wf_Use_Type_Clause (Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.use_type_clause; is separate; ------------------------------------------------------------------ procedure Wf_Proof_Renaming_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.proof_renaming_declaration; is separate; -------------------------------------------------------------------- procedure WalkStatements (Seq_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# Table, --# TheHeap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Seq_Node, --# STree.Table, --# Table, --# TheHeap & --# Component_Data from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Seq_Node, --# STree.Table, --# Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# Seq_Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Table, --# TheHeap; --# pre Syntax_Node_Type (Seq_Node, STree.Table) = SP_Symbols.sequence_of_statements; --# post STree.Table = STree.Table~; is separate; --------------------------------------------------------------------- -- function used by wf_subprogram_body and wf_body_stub function Has_Parameter_Global_Or_Return_Of_Local_Private_Type (Subprog_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; -- returns true if the subprogram has a parameter or global which is of a private type -- but which is not private when viewd from the subprogram is Result : Boolean := False; It : Dictionary.Iterator; Type_Sym : Dictionary.Symbol; function Is_Private_But_Not_Private_Here (Type_Sym : Dictionary.Symbol; Subprog_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.TypeIsPrivate (TheType => Type_Sym) and then not Dictionary.IsPrivateType (Type_Sym, Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog_Sym)); end Is_Private_But_Not_Private_Here; begin -- Has_Parameter_Global_Or_Return_Of_Local_Private_Type -- iterate through parameters It := Dictionary.FirstSubprogramParameter (Subprog_Sym); while It /= Dictionary.NullIterator loop Type_Sym := Dictionary.GetType (Dictionary.CurrentSymbol (It)); Result := Is_Private_But_Not_Private_Here (Type_Sym => Type_Sym, Subprog_Sym => Subprog_Sym); exit when Result; It := Dictionary.NextSymbol (It); end loop; if not Result then -- no parameters were private, so check globals It := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Subprog_Sym); while It /= Dictionary.NullIterator loop Type_Sym := Dictionary.GetType (Dictionary.CurrentSymbol (It)); Result := Is_Private_But_Not_Private_Here (Type_Sym => Type_Sym, Subprog_Sym => Subprog_Sym); exit when Result; It := Dictionary.NextSymbol (It); end loop; end if; -- no parameters or globals were private, so check for function return type. if not Result then if Dictionary.IsFunction (Subprog_Sym) then Type_Sym := Dictionary.GetType (Subprog_Sym); Result := Is_Private_But_Not_Private_Here (Type_Sym => Type_Sym, Subprog_Sym => Subprog_Sym); end if; end if; return Result; end Has_Parameter_Global_Or_Return_Of_Local_Private_Type; ---------------------------------------------------------------------------- procedure Wf_Subprogram_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Next_Node, --# Scope from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_body or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_body; --# post STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------ procedure Wf_Entry_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Next_Node, --# Scope from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.entry_body; --# post (Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.subprogram_implementation or --# Next_Node = STree.NullNode) and --# STree.Table = STree.Table~; is separate; ------------------------------------------------------------------------ procedure Wf_Proof_Function_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord; Proof_Func_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Proof_Func_Sym from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.proof_function_declaration; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------- procedure Wf_Body_Stub (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.body_stub; --# post STree.Table = STree.Table~; is separate; ----------------------------------------------------------------------- procedure Wf_Subunit (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Scope, --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subunit; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------- procedure CheckEmbedBodies (Comp_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys; is separate; --------------------------------------------------------------------- procedure CheckSuspendsListAccountedFor (Proc_Or_Task : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Proc_Or_Task, --# SPARK_IO.File_Sys; is separate; --------------------------------------------------------------------- procedure Up_Wf_Subprogram_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Scope from *, --# Dictionary.Dict; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_body or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_body; is separate; --------------------------------------------------------------------- -- PNA temporary reduced annotation for phase 1 generics cfr 1340 procedure Wf_Generic_Package_Instantiation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.generic_package_instantiation; is separate; ---------------------------------------------------------------------- procedure Wf_Generic_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out GlobalComponentData; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# GlobalComponentData, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# GlobalComponentData, --# LexTokenManager.State, --# Node, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# GlobalComponentData, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.generic_declaration; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- procedure CheckPackageNeedsBody (Node_Pos : in LexTokenManager.Token_Position; Pack_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is separate; ---------------------------------------------------------------------- procedure Wf_Package_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; With_Public_Child : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# Scope, --# STree.Table, --# With_Public_Child from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body; --# post STree.Table = STree.Table~; is separate; ---------------------------------------------------------------------- procedure Up_Wf_Package_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Withs_Own_Public_Child : in Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Withs_Own_Public_Child & --# Scope from *, --# Dictionary.Dict & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Withs_Own_Public_Child & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# STree.Table, --# Withs_Own_Public_Child; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_body; is separate; ---------------------------------------------------------------------- procedure Wf_Package_Initialization (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_initialization; is separate; ------------------------------------------------------------------------ procedure Wf_Protected_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# Next_Node, --# Scope, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.protected_body; --# post STree.Table = STree.Table~ and --# (Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.protected_operation_item or --# Next_Node = STree.NullNode); is separate; ------------------------------------------------------------------------ procedure Up_Wf_Protected_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Scope from *, --# Dictionary.Dict; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.protected_body; is separate; ------------------------------------------------------------------------ procedure Wf_Task_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Next_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# Next_Node, --# Scope from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.task_body; --# post STree.Table = STree.Table~ and --# (Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.subprogram_implementation or Next_Node = STree.NullNode); is separate; ------------------------------------------------------------------------ procedure Up_Wf_Task_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Scope from *, --# Dictionary.Dict; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.task_body; is separate; ------------------------------------------------------------------------ procedure Wf_Machine_Code_Insertion (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.code_insertion; is separate; begin -- CompUnit Package_Body_Withs_Own_Public_Child := False; Next_Node := Top_Node; Stack.Init; Heap.Initialize (TheHeap); RefList.Init (NodeTable); Aggregate_Stack.Init; ComponentManager.Initialise (GlobalComponentData); Current_Scope := Dictionary.GlobalScope; while Next_Node /= STree.NullNode --# assert STree.Table = STree.Table~; loop Last_Node := Next_Node; NodeType := Syntax_Node_Type (Node => Last_Node); case NodeType is when SP_Symbols.package_declaration | SP_Symbols.private_package_declaration => -- ASSUME Last_Node = package_declaration OR private_package_declaration ErrorHandler.Start_Unit; Wf_Package_Declaration (Node => Last_Node, Current_Scope => Current_Scope, Component_Data => GlobalComponentData, The_Heap => TheHeap); ErrorHandler.End_Unit; Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.generic_declaration => -- ASSUME Last_Node = generic_declaration ErrorHandler.Start_Unit; Wf_Generic_Declaration (Node => Last_Node, Current_Scope => Current_Scope); ErrorHandler.End_Unit; Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.generic_package_instantiation => -- ASSUME Last_Node = generic_package_instantiation Wf_Generic_Package_Instantiation (Node => Last_Node, Scope => Current_Scope); Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.generic_subprogram_instantiation => -- ASSUME Last_Node = generic_subprogram_instantiation Wf_Generic_Subprogram_Instantiation (Node => Last_Node, Scope => Current_Scope, Component_Data => GlobalComponentData, The_Heap => TheHeap); Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.package_body => -- ASSUME Last_Node = package_body ErrorHandler.Start_Unit; Wf_Package_Body (Node => Last_Node, Scope => Current_Scope, With_Public_Child => Package_Body_Withs_Own_Public_Child); Next_Node := Child_Node (Current_Node => Last_Node); when SP_Symbols.subprogram_body | SP_Symbols.not_overriding_subprogram_body => -- ASSUME Last_Node = subprogram_body OR not_overriding_subprogram_body ErrorHandler.Error_Reset; ErrorHandler.Start_Unit; Wf_Subprogram_Body (Node => Last_Node, Scope => Current_Scope, Component_Data => GlobalComponentData, Next_Node => Next_Node); ErrorHandler.Get_Error_Severity (Severity => ErrStatus); if ErrStatus = ErrorHandler.No_Error then Stack.Push (False); else Stack.Push (True); end if; when SP_Symbols.entry_body => -- ASSUME Last_Node = entry_body ErrorHandler.Error_Reset; ErrorHandler.Start_Unit; Wf_Entry_Body (Node => Last_Node, Scope => Current_Scope, Component_Data => GlobalComponentData, Next_Node => Next_Node); ErrorHandler.Get_Error_Severity (Severity => ErrStatus); if ErrStatus = ErrorHandler.No_Error then Stack.Push (False); else Stack.Push (True); end if; when SP_Symbols.task_body => -- ASSUME Last_Node = task_body --# accept Flow, 41, "Expected stable expression"; if CommandLineData.Ravenscar_Selected then --# end accept; ErrorHandler.Error_Reset; ErrorHandler.Start_Unit; Wf_Task_Body (Node => Last_Node, Scope => Current_Scope, Next_Node => Next_Node); ErrorHandler.Get_Error_Severity (Severity => ErrStatus); if ErrStatus = ErrorHandler.No_Error then Stack.Push (False); else Stack.Push (True); end if; else -- can't use task except in Ravenscar ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Last_Node), Id_Str => LexTokenManager.Null_String); -- prune tree walk, we don't want to enter this production when it's illegal Next_Node := STree.NullNode; end if; when SP_Symbols.protected_body => -- ASSUME Last_Node = protected_body --# accept Flow, 41, "Expected stable expression"; if CommandLineData.Ravenscar_Selected then --# end accept; ErrorHandler.Start_Unit; Wf_Protected_Body (Node => Last_Node, Scope => Current_Scope, Next_Node => Next_Node); else -- can't use protected type except in Ravenscar ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Last_Node), Id_Str => LexTokenManager.Null_String); -- prune tree walk, we don't want to enter this production when it's illegal Next_Node := STree.NullNode; end if; when SP_Symbols.body_stub => -- ASSUME Last_Node = body_stub Wf_Body_Stub (Node => Last_Node, Scope => Current_Scope, Component_Data => GlobalComponentData); Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.subunit => -- ASSUME Last_Node = subunit Wf_Subunit (Node => Last_Node, Scope => Current_Scope); Next_Node := Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = parent_unit_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.parent_unit_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = parent_unit_name in CompUnit"); when SP_Symbols.proof_function_declaration => -- ASSUME Last_Node = proof_function_declaration --# accept Flow, 10, Unused, "Expected ineffective assignment to Unused"; Wf_Proof_Function_Declaration (Node => Last_Node, Current_Scope => Current_Scope, The_Heap => TheHeap, Proof_Func_Sym => Unused); --# end accept; Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.basic_declarative_item => -- ASSUME Last_Node = basic_declarative_item Wf_Basic_Declarative_Item (Node => Last_Node, Current_Scope => Current_Scope, Component_Data => GlobalComponentData, The_Heap => TheHeap); Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.renaming_declaration => -- ASSUME Last_Node = renaming_declaration Wf_Renaming_Declaration (Node => Last_Node, Scope => Current_Scope); Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.proof_renaming_declaration => -- ASSUME Last_Node = proof_renaming_declaration Wf_Proof_Renaming_Declaration (Node => Last_Node, Scope => Current_Scope); Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.use_type_clause => -- ASSUME Last_Node = use_type_clause Wf_Use_Type_Clause (Node => Last_Node); Next_Node := STree.NullNode; -- nothing below here used when SP_Symbols.sequence_of_statements => -- ASSUME Last_Node = sequence_of_statements Heap.ReportUsage (TheHeap); Heap.Reset (TheHeap); RefList.Init (NodeTable); WalkStatements (Seq_Node => Last_Node, Scope => Current_Scope, Table => NodeTable, Component_Data => GlobalComponentData); ComponentManager.ReportUsage (GlobalComponentData); Next_Node := STree.NullNode; when SP_Symbols.package_initialization => -- ASSUME Last_Node = package_initialization Wf_Package_Initialization (Node => Last_Node, Scope => Current_Scope); ErrorHandler.Error_Reset; Next_Node := Child_Node (Last_Node); -- ASSUME Next_Node = sequence_of_statements OR hidden_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.sequence_of_statements or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.hidden_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = sequence_of_statements OR hidden_part in CompUnit"); when SP_Symbols.apragma => -- ASSUME Last_Node = apragma Wf_Pragma (Node => Last_Node, Scope => Current_Scope); Next_Node := STree.NullNode; when SP_Symbols.code_insertion => -- ASSUME Last_Node = code_insertion Wf_Machine_Code_Insertion (Node => Last_Node, Scope => Current_Scope); Next_Node := STree.NullNode; -- following series of cases are places where we prune tree walk -- because there is nothing below node of use to us when SP_Symbols.with_clause | SP_Symbols.code_statement | -- probably can't reach this one any more SP_Symbols.context_clause_rep | SP_Symbols.refinement_definition => -- ASSUME Last_Node = with_clause OR code_statement OR context_clause_rep OR refinement_definition Next_Node := STree.NullNode; -- this tests for whether down loop end has been reached at a terminal when others => if NodeType in SP_Symbols.SP_Non_Terminal then Next_Node := Child_Node (Current_Node => Last_Node); else Next_Node := STree.NullNode; end if; end case; if Next_Node = STree.NullNode then ------up loop---------- loop --# assert STree.Table = STree.Table~; Next_Node := Next_Sibling (Current_Node => Last_Node); exit when Next_Node /= STree.NullNode; Next_Node := Parent_Node (Current_Node => Last_Node); exit when Next_Node = STree.NullNode; Last_Node := Next_Node; case Syntax_Node_Type (Node => Last_Node) is when SP_Symbols.package_body => -- ASSUME Last_Node = package_body Up_Wf_Package_Body (Node => Last_Node, Scope => Current_Scope, Withs_Own_Public_Child => Package_Body_Withs_Own_Public_Child); ErrorHandler.End_Unit; when SP_Symbols.protected_body => -- ASSUME Last_Node = protected_body Up_Wf_Protected_Body (Node => Last_Node, Scope => Current_Scope); ErrorHandler.End_Unit; when SP_Symbols.task_body => -- ASSUME Last_Node = task_body Up_Wf_Task_Body (Node => Last_Node, Scope => Current_Scope); Stack.Pop; ErrorHandler.Error_Reset; ErrorHandler.End_Unit; when SP_Symbols.entry_body => -- ASSUME Last_Node = entry_body Current_Scope := Dictionary.GetEnclosingScope (Current_Scope); Stack.Pop; ErrorHandler.Error_Reset; ErrorHandler.End_Unit; when SP_Symbols.package_initialization => -- ASSUME Last_Node = package_initialization Stmt_Node := Child_Node (Current_Node => Last_Node); ErrorHandler.Get_Error_Severity (Severity => ErrStatus); if Syntax_Node_Type (Node => Stmt_Node) /= SP_Symbols.hidden_part and then ErrStatus = ErrorHandler.No_Error then --# accept Flow, 10, Unused_Data_Flow_Error_Flag, --# "Expected ineffective assignment to Unused_Data_Flow_Error_Flag"; FlowAnalyser.FlowAnalyse (Dictionary.GetRegion (Current_Scope), Stmt_Node, Node_Position (Node => Next_Sibling (Current_Node => Stmt_Node)), GlobalComponentData, TheHeap, NodeTable, Unused_Data_Flow_Error_Flag); --# end accept; ErrorHandler.Get_Error_Severity (Severity => ErrStatus); if ErrStatus = ErrorHandler.No_Error then ErrorHandler.Report_Success (Position => Node_Position (Node => Next_Sibling (Current_Node => Stmt_Node)), Subprog_Str => LexTokenManager.Null_String, Err_Num => ErrorHandler.No_Error_Default); end if; end if; when SP_Symbols.subprogram_implementation => -- ASSUME Last_Node = subprogram_implementation Stmt_Node := Child_Node (Current_Node => Last_Node); while Stmt_Node /= STree.NullNode and then Syntax_Node_Type (Node => Stmt_Node) /= SP_Symbols.sequence_of_statements --# assert STree.Table = STree.Table~; loop Stmt_Node := Next_Sibling (Current_Node => Stmt_Node); end loop; if Stmt_Node /= STree.NullNode then ErrorHandler.Get_Error_Severity (Severity => ErrStatus); Semantic_Error_In_Subprogram_Body := not (ErrStatus = ErrorHandler.No_Error and then Stack.Top = False); Data_Flow_Error_In_Subprogram_Body := False; -- default in case flow analyser not called below if not Semantic_Error_In_Subprogram_Body then -- don't do flow analysis in presence of semantic errors FlowAnalyser.FlowAnalyse (Dictionary.GetRegion (Current_Scope), Stmt_Node, Node_Position (Node => Next_Sibling (Current_Node => Stmt_Node)), GlobalComponentData, TheHeap, NodeTable, -- to get Data_Flow_Error_In_Subprogram_Body); -- If flow=auto then issue a note informing the user which flow analysis mode was used. -- If it's a function or a procedure with a derives, then info flow is used, otherwise -- data flow is used. Don't need to worry about 83/95/2005 because flow=auto is not -- allowed in 83 mode. Err_Num_On_Success := ErrorHandler.No_Error_Default; --# accept F, 41, "Stable expression OK here"; if CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow then if Dictionary.GetHasDerivesAnnotation (Dictionary.GetRegion (Current_Scope)) then -- information flow Err_Num_On_Success := ErrorHandler.No_Error_Info_Flow; else -- data flow Err_Num_On_Success := ErrorHandler.No_Error_Data_Flow; end if; end if; --# end accept; ErrorHandler.Get_Error_Severity (Severity => ErrStatus); if ErrStatus = ErrorHandler.No_Error then ErrorHandler.Report_Success (Position => Node_Position (Node => Next_Sibling (Current_Node => Stmt_Node)), Subprog_Str => Dictionary.GetSimpleName (Dictionary.GetRegion (Current_Scope)), Err_Num => Err_Num_On_Success); end if; ErrorHandler.Flush_Echo_Messages; end if; -- Call VCG regardless of error status (used to be inside above IF statement). Instead, we pass -- the error status to the VCG which generates a false VC for subprograms -- containing static semantic errors --# accept F, 41, "Language_Profile expected to be invariant"; if CommandLineData.Content.Language_Profile in CommandLineData.Auto_Code_Generators and then not CommandLineData.Content.VCG then ErrorHandler.Semantic_Warning (Err_Num => 425, Position => Node_Position (Node => Next_Sibling (Current_Node => Stmt_Node)), Id_Str => LexTokenManager.Null_String); end if; --# end accept; VCG.Generate_VCs (Start_Node => Stmt_Node, Scope => Current_Scope, Do_VCG => Do_VCG, End_Position => Node_Position (Node => Next_Sibling (Current_Node => Stmt_Node)), Flow_Heap => TheHeap, Semantic_Error_In_Subprogram => Semantic_Error_In_Subprogram_Body, Data_Flow_Error_In_Subprogram => Data_Flow_Error_In_Subprogram_Body, Type_Check_Exports => ErrorHandler.Possibly_Invalid_Values); end if; when SP_Symbols.subprogram_body | SP_Symbols.not_overriding_subprogram_body => -- ASSUME Last_Node = subprogram_body OR not_overriding_subprogram_body Up_Wf_Subprogram_Body (Node => Last_Node, Scope => Current_Scope); Stack.Pop; ErrorHandler.Error_Reset; ErrorHandler.End_Unit; when SP_Symbols.main_program_declaration => -- ASSUME Last_Node = main_program_declaration if not Dictionary.Is_Null_Symbol (Dictionary.GetThePartition) and then Dictionary.SubprogramSignatureIsWellformed (Dictionary.IsAbstract, Dictionary.GetThePartition) then FlowAnalyser.FlowAnalysePartition (Node => Last_Node, TheHeap => TheHeap); end if; when others => null; end case; end loop; end if; end loop; Heap.ReportUsage (TheHeap); --# accept Flow, 33, Unused, "Expected Unused to be neither referenced nor exported" & --# Flow, 33, Unused_Data_Flow_Error_Flag, "Expected Unused_Data_Flow_Error_Flag to be neither referenced nor exported"; end CompUnit; spark-2012.0.deb/examiner/errorhandler.smf0000644000175000017500000000435111753202337017453 0ustar eugeneugenerrorhandler.adb errorhandler-getfileline.adb errorhandler-printerrors.adb errorhandler-printline.adb errorhandler-appenderrors.adb errorhandler-appendsym.adb -vcg errorhandler-echoerrorentry.adb errorhandler-erroraccumulator.adb -vcg errorhandler-warningstatus.adb errorhandler-warningstatus-readwarningfile.adb errorhandler-errorbuffer.adb errorhandler-conversions.adb -vcg errorhandler-justifications.adb errorhandler-conversions-tostring.adb errorhandler-conversions-tostring-appendreference.adb -vcg errorhandler-conversions-tostring-condldependency.adb errorhandler-conversions-tostring-condldependency-condldependencyexpl.adb -vcg errorhandler-conversions-tostring-condlflowerr.adb errorhandler-conversions-tostring-condlflowerr-condlflowerrexpl.adb -vcg errorhandler-conversions-tostring-controlflowerror.adb errorhandler-conversions-tostring-controlflowerror-controlflowerrorexpl.adb -vcg errorhandler-conversions-tostring-depsemanticerr.adb -vcg errorhandler-conversions-tostring-depsemanticerr-depsemanticerrexpl.adb -vcg errorhandler-conversions-tostring-ineffectivestatement.adb -vcg errorhandler-conversions-tostring-ineffectivestatement-ineffectivestatementexpl.adb -vcg errorhandler-conversions-tostring-noerr.adb -vcg errorhandler-conversions-tostring-semanticerr.adb -vcg errorhandler-conversions-tostring-semanticerr-semanticerrexpl.adb -vcg errorhandler-conversions-tostring-stabilityerror.adb errorhandler-conversions-tostring-stabilityerror-stabilityerrorexpl.adb -vcg errorhandler-conversions-tostring-unconddependency.adb errorhandler-conversions-tostring-unconddependency-unconddependencyexpl.adb -vcg errorhandler-conversions-tostring-uncondflowerr.adb errorhandler-conversions-tostring-uncondflowerr-uncondflowerrexpl.adb -vcg errorhandler-conversions-tostring-usageerror.adb errorhandler-conversions-tostring-usageerror-usageerrorexpl.adb -vcg errorhandler-conversions-tostring-warningwithoutposition.adb errorhandler-conversions-tostring-warningwithoutposition-warningwithoutpositionexpl.adb -vcg errorhandler-conversions-tostring-warningwithposition.adb -vcg errorhandler-conversions-tostring-warningwithposition-warningwithpositionexpl.adb -vcg errorhandler-conversions-tostring-note.adb -vcg errorhandler-conversions-tostring-note-noteexpl.adb -vcg spark-2012.0.deb/examiner/componentmanager.ads0000644000175000017500000003052611753202335020304 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- ComponentManager -- -- Purpose: -- The ComponentManager package serves the purpose of keeping track of -- the components (fields) of record types and variables. It offers -- an ADT (ComponentData) and a set of operations on this to support an -- arbitrary number of records. -- -- The records are organised in a tree structure, with a number of roots -- being the top-level records and branches going down through subrecords -- (or tagged-type extensions) to the leaves which are scalar variables. -- -- At a lower level view, we have a large number of records within an array -- structure (array of ComponentDescriptor), each record containing many -- pointers to logically adjacent records using indexes of type Component. -- Pointers are maintained to FirstChild, and then each Child has a pointer -- to its next sibling, enabling access to all children. Each node also -- maintains a list of errors stored in a SeqAlgebra. -- -- The debug procedures at the bottom of this package offer an easy way -- of exploring the layout of a particular instance of the ADT. Debugging -- output using them is enabled with the -debug=c switch. -- -- Clients: -- Sem.Compunit and subunits -- FlowAnalyser -- -- Use: -- -- Extension: -- None planned. -------------------------------------------------------------------------------- with ComponentErrors; with Dictionary; with ExaminerConstants; with Heap; with SeqAlgebra; use type Dictionary.Symbol; --# inherit CommandLineData, --# ComponentErrors, --# Debug, --# Dictionary, --# ExaminerConstants, --# Heap, --# SeqAlgebra, --# Statistics, --# SystemErrors; package ComponentManager is type Component is private; type ComponentData is private; procedure Initialise (Data : out ComponentData); --# derives Data from ; procedure AddRoot (Data : in out ComponentData; HeapSeq : in out Heap.HeapRecord; RootSym : in Dictionary.Symbol); --# global in out Statistics.TableUsage; --# derives Data, --# HeapSeq, --# Statistics.TableUsage from *, --# Data, --# HeapSeq, --# RootSym; -- pre Dictionary.IsVariable(RootSym) and -- Dictionary.TypeIsRecord(Dictionary.GetType(RootSym)); -- Adds a record variable as a root node. The name of the -- node will be RootSym. The Hash link will be used to -- hash the node with others whose names have the same hash -- value. The node is created with null Parent, FirstChild, -- NextSibling, PreviousSibling and an empty ListOfErrors. procedure AddNextChild (Data : in out ComponentData; HeapSeq : in out Heap.HeapRecord; Node : in Component; ChildSym : in Dictionary.Symbol); --# global in CommandLineData.Content; --# in out Statistics.TableUsage; --# derives Data, --# HeapSeq, --# Statistics.TableUsage from *, --# ChildSym, --# Data, --# HeapSeq, --# Node & --# null from CommandLineData.Content; -- pre Dictionary.IsRecordVarComponent(ChildSym); -- Creates ChildNode and adds it as the next child of Node. -- The name of ChildNode will be ChildSym. The Hash link -- will be used to hash the node with others whose names -- have the same hash value. The parent of ChildNode is -- Node and is created with null FirstChild, -- NextSibling, PreviousSibling and an empty ListOfErrors. -- Duplicate insertions are ignored (CFR 1766) function GetComponentNode (Data : ComponentData; Sym : Dictionary.Symbol) return Component; -- pre Dictionary.IsVariable(Sym) and -- Dictionary.TypeIsRecord(Dictionary.GetType(Sym)) -- or Dictionary.IsRecordVarComponent(Sym); -- Returns the node with Sym as name. function HasChildren (Data : ComponentData; Node : Component) return Boolean; -- Returns true if Node has any children; otherwise false. function IsNullComponent (Node : Component) return Boolean; -- Returns true if Node is null; otherwise false. function IsALeaf (Data : ComponentData; Node : Component) return Boolean; -- Returns true if Node is a leaf; otherwise false. --647-- function IsARoot (Data : ComponentData; Node : Component) return Boolean; -- Returns true if Node is a root; otherwise false. function IsTransitiveParent (Data : ComponentData; Parent : Component; Node : Component) return Boolean; -- Returns true if Parent can be obtained from Node by a -- series of zero or more GetParent calls. Note that the -- function considers a node to be a transitive parent of -- itself. function GetRoot (Data : ComponentData; Node : Component) return Component; -- Returns the root of the tree of which Node is part. -- If Node itself is a root it returns Node. function GetParent (Data : ComponentData; Node : Component) return Component; -- Returns the immediate parent of Node. Returns null -- if Node is a root. function GetFirstChild (Data : ComponentData; Node : Component) return Component; -- Returns the first child of Node. Returns null if Node -- has no children. -- function GetNextSibling (Data : ComponentData; Node : Component) return Component; -- Returns the next sibling of Node. Result is null if -- Node has no next sibling i.e. it is the last child of -- its parent. function GetPreviousSibling (Data : ComponentData; Node : Component) return Component; -- Returns the previous sibling of Node. Result is null if -- Node has no previous sibling i.e. it is the first child -- of its parent. function GetName (Data : ComponentData; Node : Component) return Dictionary.Symbol; -- Returns the name of Node. procedure GetLeaves (HeapSeq : in out Heap.HeapRecord; Data : in ComponentData; Node : in Component; SeqOfLeafNames : out SeqAlgebra.Seq); --# global in out Statistics.TableUsage; --# derives HeapSeq, --# Statistics.TableUsage from *, --# Data, --# HeapSeq, --# Node & --# SeqOfLeafNames from HeapSeq; -- This returns a sequence of the names of all leaf nodes -- which are below Node. The sequence is created on -- HeapSeq (and not on the ComponentData heap). -- If Node is a leaf an empty sequence is returned. procedure AddError (HeapSeq : in out Heap.HeapRecord; TheErrorHeap : in ComponentErrors.HeapOfErrors; Data : in ComponentData; Node : in Component; NewError : in Natural); --# global in out Statistics.TableUsage; --# derives HeapSeq, --# Statistics.TableUsage from *, --# Data, --# HeapSeq, --# NewError, --# Node, --# TheErrorHeap; -- Adds NewError in the list of errors for Node. function GetListOfErrors (Data : ComponentData; Node : Component) return SeqAlgebra.Seq; -- Returns the sequence of errors on Node. procedure AddNewListOfErrors (HeapSeq : in out Heap.HeapRecord; Data : in out ComponentData; Node : in Component; NewErrorList : in SeqAlgebra.Seq); --# derives Data from *, --# NewErrorList, --# Node & --# HeapSeq from *, --# Data, --# Node; -- Dispose of existing ListOfErrors (including the case where this -- is an empty sequence) on Node and set it to NewErrorList. procedure EmptyListOfErrors (HeapSeq : in out Heap.HeapRecord; Data : in out ComponentData; Node : in Component); --# global in out Statistics.TableUsage; --# derives Data from *, --# HeapSeq, --# Node & --# HeapSeq, --# Statistics.TableUsage from *, --# HeapSeq; -- Set the ListOfErrors on Node to a new empty sequence. Do not -- dispose of the existing list of errors on Node as this may have -- been added to another node by AddNewListOfErrors. -- New function for use by MergeAndHandleErrors function GetFirstRoot (Data : ComponentData) return Component; -- New function for use by MergeAndHandleErrors function GetNextRoot (Data : ComponentData; RootNode : Component) return Component; function ComponentToRef (C : Component) return Natural; function RefToComponent (N : Natural) return Component; procedure ReportUsage (Data : in ComponentData); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# Data; -- Outputs a tree of the components starting at Node procedure Dump_Component_Tree (Data : in ComponentData; Node : in Component; Indentation : in Natural); --# derives null from Data, --# Indentation, --# Node; -- Outputs trees of all components in the manager procedure Dump_All_Component_Trees (Data : in ComponentData); --# derives null from Data; private -- Component Tree Structure MaxNumComponents : constant := ExaminerConstants.MaxRecordComponents; type Component is range 0 .. MaxNumComponents; subtype ComponentIndex is Component range 1 .. MaxNumComponents; NullComponent : constant Component := 0; type ComponentDescriptor is record Name : Dictionary.Symbol; ListOfErrors : SeqAlgebra.Seq; NextRoot, Hash, Parent, FirstChild, LastChild, --not in AAk's design, added for efficiency reasons NextSibling, PreviousSibling : Component; end record; type ArrayOfComponents is array (ComponentIndex) of ComponentDescriptor; type HeapOfComponents is record ListOfComponents : ArrayOfComponents; FirstRoot : Component; HighMark : Component; end record; --Hash Table of Root Components HashMax : constant Integer := MaxNumComponents / 10; subtype HashIndex is Integer range 0 .. HashMax; type HashTable is array (HashIndex) of Component; type ComponentData is record TheHeap : HeapOfComponents; TheTable : HashTable; end record; end ComponentManager; spark-2012.0.deb/examiner/labels.adb0000644000175000017500000001235511753202336016171 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Clists, Structures; package body Labels is function LabelHead (L : Label) return Cells.Cell is begin return Cells.Cell (L); end LabelHead; --------------------------------------------------------------------- function FirstPair (Heap : Cells.Heap_Record; L : Label) return Pairs.Pair is begin return Pairs.CellToPair (Clists.FirstCell (Heap, Cells.Cell (L))); end FirstPair; --------------------------------------------------------------------- function NextPair (Heap : Cells.Heap_Record; P : Pairs.Pair) return Pairs.Pair is begin return Pairs.CellToPair (Clists.NextCell (Heap, Pairs.PairHead (P))); end NextPair; --------------------------------------------------------------------- function LastPair (Heap : Cells.Heap_Record; L : Label) return Pairs.Pair is begin return Pairs.CellToPair (Clists.LastCell (Heap, Cells.Cell (L))); end LastPair; pragma Unreferenced (LastPair); -- Unused at present --------------------------------------------------------------------- function IsNull (L : Label) return Boolean is begin return Cells.Is_Null_Cell (Cells.Cell (L)); end IsNull; --------------------------------------------------------------------- function CellToLabel (C : Cells.Cell) return Label is begin return Label (C); end CellToLabel; --------------------------------------------------------------------- procedure AppendPair (Heap : in out Cells.Heap_Record; NewPair : in Pairs.Pair; LabelName : in Label) is begin Clists.AppendCell (Heap, Pairs.PairHead (NewPair), Cells.Cell (LabelName)); end AppendPair; --------------------------------------------------------------------- procedure CreateLabel (Heap : in out Cells.Heap_Record; NewLabel : out Label) is CellName : Cells.Cell; begin Cells.Create_Cell (Heap, CellName); NewLabel := Label (CellName); end CreateLabel; --------------------------------------------------------------------- procedure CopyLabel (Heap : in out Cells.Heap_Record; Original : in Label; Copy : out Label) is CopyName : Cells.Cell; begin Structures.CopyStructure (Heap, Cells.Cell (Original), CopyName); Copy := Label (CopyName); end CopyLabel; --------------------------------------------------------------------- procedure AddLabels (Heap : in out Cells.Heap_Record; Label_1 : in Label; Label_2 : in Label) is begin Clists.Concatenate (Heap, Cells.Cell (Label_1), Cells.Cell (Label_2)); end AddLabels; --------------------------------------------------------------------- procedure MultiplyLabels (Heap : in out Cells.Heap_Record; Label_1 : in Label; Label_2 : in Label; Product : out Label) is NextPair_1, NextPair_2, PairProduct, Pair_1, Pair_2, Pair_2Copy : Pairs.Pair; Label_1Copy, NewLabel : Label; begin CreateLabel (Heap, NewLabel); Pair_2 := FirstPair (Heap, Label_2); Cells.Dispose_Of_Cell (Heap, LabelHead (Label_2)); loop exit when Pairs.IsNullPair (Pair_2); NextPair_2 := NextPair (Heap, Pair_2); if Pairs.IsNullPair (NextPair_2) then Label_1Copy := Label_1; else CopyLabel (Heap, Label_1, Label_1Copy); end if; Pair_1 := FirstPair (Heap, Label_1Copy); Cells.Dispose_Of_Cell (Heap, LabelHead (Label_1Copy)); loop exit when Pairs.IsNullPair (Pair_1); NextPair_1 := NextPair (Heap, Pair_1); if Pairs.IsNullPair (NextPair_1) then Pair_2Copy := Pair_2; else Pairs.CopyPair (Heap, Pair_2, Pair_2Copy); end if; Pairs.MultiplyPairs (Heap, Pair_1, Pair_2Copy, PairProduct); AppendPair (Heap, PairProduct, NewLabel); Pair_1 := NextPair_1; end loop; Pair_2 := NextPair_2; end loop; Product := NewLabel; end MultiplyLabels; end Labels; spark-2012.0.deb/examiner/sparklex-lex-ltintro.adb0000644000175000017500000000423111753202336021031 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SparkLex.Lex) procedure LTIntro (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) is Ch : Character; begin LineManager.Accept_Char (Curr_Line => Curr_Line); -- less_than already recognised case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) is when '=' => Token := SP_Symbols.less_or_equal; LineManager.Accept_Char (Curr_Line => Curr_Line); when '>' => Token := SP_Symbols.box; LineManager.Accept_Char (Curr_Line => Curr_Line); when '<' => Token := SP_Symbols.left_label_paren; LineManager.Accept_Char (Curr_Line => Curr_Line); when '-' => LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch); if Ch = '>' then Token := SP_Symbols.is_equivalent_to; LineManager.Accept_Lookahead (Curr_Line => Curr_Line); else Token := SP_Symbols.less_than; LineManager.Reject_Lookahead (Curr_Line => Curr_Line); end if; when others => Token := SP_Symbols.less_than; end case; end LTIntro; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-note-noteexpl.adb0000644000175000017500000000362211753202337026430 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.Note) procedure NoteExpl (E_Str : in out E_Strings.T) is begin case Err_Num.ErrorNum is when 1 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when information flow analysis is not performed and when" & " modes were specified in the global annotation. It is a reminder" & " that the dependencies specified in this annotation" & " (including whether each variable is an import or an export) have" & " not been checked against the code, and may therefore be incorrect." & " (warning control file keyword: notes)"); when 2 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued as a reminder when information flow analysis is not" & " performed in SPARK 83. The dependencies specified in this annotation" & " have not been checked against the code, and may" & " therefore be incorrect. (warning control file keyword: notes)"); when 3 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued as a reminder that the declaration of the type Address" & " within the target configuration file" & " implicitly defines a deferred constant of type Null_Address." & " (warning control file keyword: notes)"); when 4 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued as a reminder that the declaration of the subtype" & " Priority within the target configuration file implicitly defines" & " a constant Default_Priority, of type Priority, with the value" & " (Priority'First + Priority'Last) / 2." & " (warning control file keyword: notes)"); when others => null; end case; end NoteExpl; spark-2012.0.deb/examiner/declarations-outputdeclarations-generatedeclarations.adb0000644000175000017500000001507011753202336027504 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Declarations.OutputDeclarations) procedure GenerateDeclarations (Heap : in out Cells.Heap_Record; UsedSymbols : in Cells.Cell; Scope : in Dictionary.Scopes; NeededSymbols : out Cells.Cell) is TheCurrentNode : Cells.Cell; TheNextNode : Cells.Cell; ParentNode : Cells.Cell; PrevNode : Cells.Cell; SuccessorNodes : Cells.Cell; DeclareList : Cells.Cell; procedure GenerateSuccessors (Heap : in out Cells.Heap_Record; Symbol : in Dictionary.Symbol; Scope : in Dictionary.Scopes; SuccessorList : out Cells.Cell) --# global in AttributeList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Scope, --# Symbol & --# SuccessorList from AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Scope, --# Symbol; is separate; ----------------------------------------------------------------------- function InList (Symbol : Dictionary.Symbol; DeclareList : Cells.Cell) return Boolean --# global in Heap; is ThisNode : Cells.Cell; Found : Boolean; begin ThisNode := DeclareList; loop if Pile.IsNull (ThisNode) then Found := False; exit; end if; if Symbol = Pile.NodeSymbol (Heap, ThisNode) then Found := True; exit; end if; ThisNode := Pile.Sibling (Heap, ThisNode); end loop; return Found; end InList; ----------------------------------------------------------------------- procedure SelectNode (FromList : in out Cells.Cell; DeclareList : in Cells.Cell) --# global in out Heap; --# derives FromList, --# Heap from DeclareList, --# FromList, --# Heap; is OldNode : Cells.Cell; begin loop exit when Pile.IsNull (FromList); exit when not InList (Pile.NodeSymbol (Heap, FromList), DeclareList); OldNode := FromList; FromList := Pile.Sibling (Heap, OldNode); Pile.Free (Heap, OldNode); end loop; end SelectNode; ----------------------------------------------------------------------- procedure AddDeclarationInOrder (Sym : in Dictionary.Symbol; DAG : in Cells.Cell; DeclareList : in out Cells.Cell) --# global in out Heap; --# in out Statistics.TableUsage; --# derives DeclareList, --# Statistics.TableUsage from *, --# DeclareList, --# Heap, --# Sym & --# Heap from *, --# DAG, --# DeclareList, --# Sym; is begin Pile.Insert (Heap, Sym, DAG, DeclareList); end AddDeclarationInOrder; begin -- GenerateDeclarations; TheCurrentNode := UsedSymbols; DeclareList := Cells.Null_Cell; loop exit when Pile.IsNull (TheCurrentNode); -- Generate the _immediate_ successors of the Symbol at TheCurrentNote GenerateSuccessors (Heap, Pile.NodeSymbol (Heap, TheCurrentNode), Scope, SuccessorNodes); AddDeclarationInOrder (Pile.NodeSymbol (Heap, TheCurrentNode), Pile.DAG (Heap, TheCurrentNode), DeclareList); if not Pile.OrderOK (Heap, DeclareList) then Debug.PrintMsg ("DeclareList order BROKEN", True); Debug.PrintMsg ("--------DeclareList after AddDeclarationInOrder--------", True); Pile.PrintPile (Heap, DeclareList); Debug.PrintMsg ("-------------------------------------------------------", True); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Assertion_Failure, Msg => "DeclareList not in order after AddDeclarationInOrder"); end if; -- Now for the transitive closure of those first level successors, -- freeing duplicate nodes as we go along. SelectNode (SuccessorNodes, DeclareList); TheNextNode := SuccessorNodes; if Pile.IsNull (TheNextNode) then loop PrevNode := TheCurrentNode; TheCurrentNode := Pile.Sibling (Heap, PrevNode); ParentNode := Pile.Parent (Heap, PrevNode); Pile.Free (Heap, PrevNode); SelectNode (TheCurrentNode, DeclareList); if not Pile.IsNull (TheCurrentNode) then Pile.SetParent (Heap, TheCurrentNode, ParentNode); exit; end if; TheCurrentNode := ParentNode; exit when Pile.IsNull (TheCurrentNode); end loop; else Pile.SetParent (Heap, TheNextNode, TheCurrentNode); TheCurrentNode := TheNextNode; end if; end loop; NeededSymbols := DeclareList; end GenerateDeclarations; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_arange.adb0000644000175000017500000002356711753202336023203 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------- -- Overview: Called to check validity of a -- arange node. Replaces calls to StaticARange, BaseTypeARange and -- CheckTypeARange ---------------------------------------------------------------------------- separate (Sem.Walk_Expression_P) procedure Wf_Arange (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Next_Node : STree.SyntaxNode; Left, Right, Result : Sem.Exp_Record; Left_Type, Right_Type : Dictionary.Symbol; --------------------------------------------------------------- procedure Integer_Implicit_Type_Conversion (Left_Type, Right_Type : in out Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in Dictionary.Dict; --# derives Left_Type, --# Right_Type from Dictionary.Dict, --# Left_Type, --# Right_Type, --# Scope; is begin if Dictionary.IsUniversalIntegerType (Left_Type) then if Dictionary.IsIntegerTypeMark (Right_Type, Scope) or else Dictionary.IsModularTypeMark (Right_Type, Scope) then Left_Type := Right_Type; end if; elsif Dictionary.IsUniversalIntegerType (Right_Type) then if Dictionary.IsIntegerTypeMark (Left_Type, Scope) or else Dictionary.IsModularTypeMark (Left_Type, Scope) then Right_Type := Left_Type; end if; end if; end Integer_Implicit_Type_Conversion; --------------------------------------------------------------- procedure Real_Implicit_Type_Conversion (Left_Type, Right_Type : in out Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in Dictionary.Dict; --# derives Left_Type, --# Right_Type from Dictionary.Dict, --# Left_Type, --# Right_Type, --# Scope; is begin if Dictionary.IsUniversalRealType (Left_Type) then if Dictionary.IsRealTypeMark (Right_Type, Scope) then Left_Type := Right_Type; end if; elsif Dictionary.IsUniversalRealType (Right_Type) then if Dictionary.IsRealTypeMark (Left_Type, Scope) then Right_Type := Left_Type; end if; end if; end Real_Implicit_Type_Conversion; --------------------------------------------------------------- function Range_Is_Empty (Left, Right : Maths.Value) return Boolean is Unused : Maths.ErrorCode; Maths_Result : Maths.Value; Func_Result : Boolean; begin --# accept Flow, 10, Unused, "Expected ineffective assignment : not used because it can only be ok or type mismatch"; Maths.Lesser (Right, Left, -- to get Maths_Result, Unused); --# end accept; --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.ValueToBool (Maths_Result, -- to get Func_Result, Unused); --# end accept; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; return Func_Result; end Range_Is_Empty; begin -- Wf_Arange Next_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Next_Node = attribute OR simple_expression -- annotation_attribute OR annotation_simple_expression if STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.attribute or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_attribute then -- ASSUME Next_Node = attribute OR annotation_attribute Exp_Stack.Pop (Item => Result, Stack => E_Stack); if not Result.Is_ARange then Result.Is_ARange := True; Result.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 98, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Next_Node), Id_Str => LexTokenManager.Null_String); end if; elsif STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.annotation_simple_expression then -- ASSUME Next_Node = simple_expression OR annotation_simple_expression -- explicit range of the form "Left .. Right" Exp_Stack.Pop (Item => Right, Stack => E_Stack); Exp_Stack.Pop (Item => Left, Stack => E_Stack); Result := Null_Type_Record; -- safety : we may not set all fields below -- In this case neither "Left" nor "Right" can themselves denote a Range. -- The following two checks prevent cases such as -- S'First .. S'Range -- S'Range .. S'Last -- S'Range .. S'Range -- which are all illegal. We check both Left and Right separately so -- that two errors are issued for the latter case. if Left.Is_ARange then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 114, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Next_Node), Id_Str => LexTokenManager.Null_String); end if; if Right.Is_ARange then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 114, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Next_Sibling (Next_Node)), Id_Str => LexTokenManager.Null_String); end if; if not Left.Is_ARange and then not Right.Is_ARange then -- Neither Left nor Right is a Range, so we can proceed... Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; Result.Is_Static := Left.Is_Static and then Right.Is_Static; Result.Is_ARange := True; Left_Type := Dictionary.GetRootType (Left.Type_Symbol); Right_Type := Dictionary.GetRootType (Right.Type_Symbol); Integer_Implicit_Type_Conversion (Left_Type => Left_Type, Right_Type => Right_Type, Scope => Scope); Real_Implicit_Type_Conversion (Left_Type => Left_Type, Right_Type => Right_Type, Scope => Scope); if not Dictionary.Types_Are_Equal (Left_Symbol => Left_Type, Right_Symbol => Right_Type, Full_Range_Subtype => False) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 42, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Next_Sibling (Next_Node)), Id_Str => LexTokenManager.Null_String); elsif not (Dictionary.IsScalarType (Left_Type, Scope) or else Dictionary.IsUnknownTypeMark (Left_Type)) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 44, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Result.Type_Symbol := Left_Type; Result.Value := Left.Value; Result.Range_RHS := Right.Value; -- check that static range is non empty if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.arange and then Range_Is_Empty (Left => Left.Value, Right => Right.Value) then Result.Value := Maths.NoValue; Result.Range_RHS := Maths.NoValue; Result.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 409, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; end if; Result.Errors_In_Expression := Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; else Result := Sem.Null_Exp_Record; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = attribute OR simple_expression OR " & "annotation_attribute OR annotation_simple_expression in Wf_Arange"); end if; Exp_Stack.Push (X => Result, Stack => E_Stack); end Wf_Arange; spark-2012.0.deb/examiner/dictionary-lookupitem.adb0000644000175000017500000005405311753202336021263 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function LookupItem (Name : LexTokenManager.Lex_String; Scope : Scopes; Context : Contexts; Full_Package_Name : Boolean) return Symbol is Item, Current_Region : Symbol; Package_Item : RawDict.Package_Info_Ref; Is_Visible, In_A_Subprogram : Boolean; Current_Scope, Enclosing_Scope : Scopes; Stop_At : LexTokenManager.Lex_String; ------------------------------------------------------------------------------ procedure Lookup_Context_Clauses (Name : in LexTokenManager.Lex_String; Scope : in Scopes; Start_Pos : in Scopes; Context : in Contexts; Full_Package_Name : in Boolean; Item : out Symbol; Is_Visible : out Boolean) --# global in Dict; --# in LexTokenManager.State; --# derives Is_Visible from Context, --# Dict, --# Full_Package_Name, --# LexTokenManager.State, --# Name, --# Scope, --# Start_Pos & --# Item from Dict, --# Full_Package_Name, --# LexTokenManager.State, --# Name, --# Scope; is Region : Symbol; The_Generic_Unit : RawDict.Generic_Unit_Info_Ref; The_Inherit_Clause : RawDict.Context_Clause_Info_Ref; Current_Package : RawDict.Package_Info_Ref; Current_Subprogram : RawDict.Subprogram_Info_Ref; Continue : Boolean := True; -------------------------------------------------------------------------------- function Is_Package_Directly_Visible (The_Package : RawDict.Package_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is The_Parent, Library_Package : RawDict.Package_Info_Ref; Result : Boolean; begin The_Parent := RawDict.Get_Package_Parent (The_Package => The_Package); if The_Parent = RawDict.Null_Package_Info_Ref then Result := True; elsif not (RawDict.GetSymbolDiscriminant (GetRegion (Scope)) = Package_Symbol) then Result := False; else -- The_Package is a child and Scope is in another package -- OK if Scope is (possibly embedded within) The_Package's parent -- or a descendent of the parent Library_Package := Get_Library_Package (Scope => Scope); Result := Library_Package = The_Parent or else Is_Proper_Descendent (Inner_Package => Library_Package, Outer_Package => The_Parent); end if; return Result; end Is_Package_Directly_Visible; -------------------------------------------------------------------------------- function Is_Subprogram_Directly_Visible (The_Generic_Subprogram : RawDict.Subprogram_Info_Ref) return Boolean --# global in Dict; is begin return RawDict.Get_Subprogram_Generic_Unit (The_Subprogram => The_Generic_Subprogram) /= RawDict.Null_Generic_Unit_Info_Ref; end Is_Subprogram_Directly_Visible; -------------------------------------------------------------------------------- function Has_Been_Withed (The_Withed_Symbol : Symbol; Scope : Scopes) return Boolean --# global in Dict; is Current_Scope, Last1 : Scopes; Ancestor : RawDict.Package_Info_Ref; Found : Boolean; begin Current_Scope := Scope; Last1 := Current_Scope; loop exit when (RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => GetRegion (Current_Scope)) = Get_Predefined_Package_Standard) or else Is_Withed (The_Withed_Symbol => The_Withed_Symbol, Scope => Current_Scope); Last1 := Current_Scope; Current_Scope := GetEnclosingScope (Current_Scope); end loop; Found := RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) /= Package_Symbol or else RawDict.Get_Package_Info_Ref (Item => GetRegion (Current_Scope)) /= Get_Predefined_Package_Standard; if not Found and then Last1 /= Current_Scope and then RawDict.GetSymbolDiscriminant (GetRegion (Last1)) = Package_Symbol then -- search through ancestors Ancestor := RawDict.Get_Package_Parent (The_Package => RawDict.Get_Package_Info_Ref (Item => GetRegion (Last1))); loop exit when Ancestor = RawDict.Null_Package_Info_Ref or else Is_Withed (The_Withed_Symbol => The_Withed_Symbol, Scope => Set_Visibility (The_Visibility => Visible, The_Unit => RawDict.Get_Package_Symbol (Ancestor))); Ancestor := RawDict.Get_Package_Parent (The_Package => Ancestor); end loop; Found := Ancestor /= RawDict.Null_Package_Info_Ref; end if; return Found; end Has_Been_Withed; begin -- Lookup_Context_Clauses Trace_Lex_Str (Msg => " In Lookup_Context_Clauses, seeking ", L => Name); Item := NullSymbol; Is_Visible := False; Region := GetRegion (Scope); case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => The_Inherit_Clause := RawDict.Get_Package_Inherit_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Subprogram_Symbol => if Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)) then The_Inherit_Clause := RawDict.Get_Subprogram_Inherit_Clauses (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); else The_Inherit_Clause := RawDict.Null_Context_Clause_Info_Ref; end if; when Generic_Unit_Symbol => The_Generic_Unit := RawDict.Get_Generic_Unit_Info_Ref (Item => Region); case RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => The_Generic_Unit) is when Generic_Of_Package => The_Inherit_Clause := RawDict.Get_Package_Inherit_Clauses (The_Package => RawDict.Get_Generic_Unit_Owning_Package (The_Generic_Unit => The_Generic_Unit)); when Generic_Of_Subprogram => The_Inherit_Clause := RawDict.Get_Subprogram_Inherit_Clauses (The_Subprogram => RawDict.Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit => The_Generic_Unit)); end case; when Type_Symbol => if Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) and then RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) = Protected_Type_Item then Region := RawDict.Get_Package_Symbol (Get_Enclosing_Package (Scope => Scope)); The_Inherit_Clause := RawDict.Get_Package_Inherit_Clauses (The_Package => Get_Enclosing_Package (Scope => Scope)); else The_Inherit_Clause := RawDict.Null_Context_Clause_Info_Ref; end if; when others => -- non-exec code The_Inherit_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Lookup_Context_Clauses"); end case; while Continue loop if The_Inherit_Clause = RawDict.Null_Context_Clause_Info_Ref then Item := NullSymbol; Is_Visible := False; Continue := False; end if; case RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Inherit_Clause) is when False => Current_Package := RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Inherit_Clause); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Package_Name (The_Package => Current_Package), Lex_Str2 => Name) = LexTokenManager.Str_Eq and then Is_Package_Directly_Visible (The_Package => Current_Package, Scope => Scope) and then (not Full_Package_Name or else RawDict.Get_Package_Parent (The_Package => Current_Package) = RawDict.Null_Package_Info_Ref) then Item := RawDict.Get_Package_Symbol (Current_Package); Is_Visible := Context = ProofContext or else Is_Embedded_Package (The_Package => Current_Package) or else IsLocal (Scope, Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Package_Symbol (Current_Package))) or else (RawDict.GetSymbolDiscriminant (Region) = Package_Symbol and then Is_Proper_Descendent (Inner_Package => Get_Library_Package (Scope => Scope), Outer_Package => Current_Package)) or else Has_Been_Withed (The_Withed_Symbol => RawDict.Get_Package_Symbol (Current_Package), Scope => Start_Pos); Continue := False; end if; when True => Current_Subprogram := RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Inherit_Clause); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Subprogram_Name (The_Subprogram => Current_Subprogram), Lex_Str2 => Name) = LexTokenManager.Str_Eq and then Is_Subprogram_Directly_Visible (The_Generic_Subprogram => Current_Subprogram) then Item := RawDict.Get_Subprogram_Symbol (Current_Subprogram); Is_Visible := Context = ProofContext or else IsLocal (Scope, Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Subprogram_Symbol (Current_Subprogram))) or else Has_Been_Withed (The_Withed_Symbol => RawDict.Get_Subprogram_Symbol (Current_Subprogram), Scope => Start_Pos); Continue := False; end if; end case; The_Inherit_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Inherit_Clause); end loop; Trace_Sym (Msg => " found in Lookup_Context_Clauses ", Sym => Item, Scope => Scope); end Lookup_Context_Clauses; ------------------------------------------------------------------------------ procedure Lookup_Children (Child_Sort : in PackageSort; Name : in LexTokenManager.Lex_String; The_Package : in RawDict.Package_Info_Ref; Scope : in Scopes; Context : in Contexts; Package_Item : out RawDict.Package_Info_Ref; Is_Visible : out Boolean) --# global in Dict; --# in LexTokenManager.State; --# derives Is_Visible, --# Package_Item from Child_Sort, --# Context, --# Dict, --# LexTokenManager.State, --# Name, --# Scope, --# The_Package; is Current_Package : RawDict.Package_Info_Ref; ----------------------------------------- function Check_Is_Withed (The_Package : RawDict.Package_Info_Ref; Scope : Scopes; Context : Contexts) return RawDict.Package_Info_Ref --# global in Dict; is Current_Scope : Scopes; Result : RawDict.Package_Info_Ref; begin if Context = ProofContext then Result := The_Package; else Current_Scope := Scope; loop exit when (RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => GetRegion (Current_Scope)) = Get_Predefined_Package_Standard) or else Is_Withed (The_Withed_Symbol => RawDict.Get_Package_Symbol (The_Package), Scope => Current_Scope); Current_Scope := GetEnclosingScope (Current_Scope); end loop; if RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) = Package_Symbol and then RawDict.Get_Package_Info_Ref (Item => GetRegion (Current_Scope)) = Get_Predefined_Package_Standard then Result := RawDict.Null_Package_Info_Ref; else Result := The_Package; end if; end if; return Result; end Check_Is_Withed; begin -- Lookup_Children Package_Item := RawDict.Null_Package_Info_Ref; case Child_Sort is when Public => Current_Package := RawDict.Get_Package_First_Public_Child (The_Package => The_Package); when PrivateChild => Current_Package := RawDict.Get_Package_First_Private_Child (The_Package => The_Package); end case; loop exit when Current_Package = RawDict.Null_Package_Info_Ref; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Package_Name (The_Package => Current_Package), Lex_Str2 => Name) = LexTokenManager.Str_Eq then Package_Item := Current_Package; exit; end if; Current_Package := RawDict.Get_Package_Next_Sibling (The_Package => Current_Package); end loop; if Package_Item /= RawDict.Null_Package_Info_Ref then Package_Item := Check_Is_Withed (The_Package => Package_Item, Scope => Scope, Context => Context); end if; Is_Visible := Package_Item /= RawDict.Null_Package_Info_Ref; end Lookup_Children; begin -- LookupItem Current_Scope := Scope; Current_Region := GetRegion (Current_Scope); TraceMsg ("--------------------------------------------------------------------------"); Trace_Lex_Str (Msg => "In LookupItem, seeking ", L => Name); Trace_Sym (Msg => " in ", Sym => Current_Region, Scope => Scope); loop LookupScope (Name => Name, Stop_At => LexTokenManager.Null_String, Scope => Current_Scope, Calling_Scope => Current_Scope, Context => Context, Item => Item, Is_Visible => Is_Visible); exit when Item /= NullSymbol; exit when IsCompilationUnit (Current_Region) or else (RawDict.GetSymbolDiscriminant (Current_Region) = Type_Symbol and then (Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Current_Region)) or else Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Current_Region)))); Current_Scope := GetEnclosingScope (Current_Scope); Current_Region := GetRegion (Current_Scope); end loop; if Item = NullSymbol then In_A_Subprogram := Is_Subprogram (Current_Region) or else (RawDict.GetSymbolDiscriminant (Current_Region) = Generic_Unit_Symbol and then RawDict.Get_Generic_Unit_Kind (The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Item => Current_Region)) = Generic_Of_Subprogram) or else (RawDict.GetSymbolDiscriminant (Current_Region) = Type_Symbol and then Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Current_Region))); loop if RawDict.GetSymbolDiscriminant (Current_Region) = Package_Symbol then Lookup_Children (Child_Sort => PrivateChild, Name => Name, The_Package => RawDict.Get_Package_Info_Ref (Item => Current_Region), Scope => Scope, Context => Context, Package_Item => Package_Item, Is_Visible => Is_Visible); Item := RawDict.Get_Package_Symbol (Package_Item); else Item := NullSymbol; Is_Visible := False; end if; exit when Item /= NullSymbol; if RawDict.GetSymbolDiscriminant (Current_Region) = Package_Symbol then --# accept F, 41, "Structurally this is the preferred placing for this condition"; if CommandLineData.Content.Language_Profile in CommandLineData.Auto_Code_Generators then Lookup_Children (Child_Sort => Public, Name => Name, The_Package => RawDict.Get_Package_Info_Ref (Item => Current_Region), Scope => Scope, Context => Context, Package_Item => Package_Item, Is_Visible => Is_Visible); Item := RawDict.Get_Package_Symbol (Package_Item); end if; --# end accept; else Item := NullSymbol; Is_Visible := False; end if; exit when Item /= NullSymbol; Lookup_Context_Clauses (Name => Name, Scope => Current_Scope, Start_Pos => Scope, Context => Context, Full_Package_Name => Full_Package_Name, Item => Item, Is_Visible => Is_Visible); exit when Item /= NullSymbol or else RawDict.GetSymbolDiscriminant (Current_Region) = Package_Symbol or else RawDict.GetSymbolDiscriminant (Current_Region) = Generic_Unit_Symbol or else (RawDict.GetSymbolDiscriminant (Current_Region) = Subprogram_Symbol and then Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Current_Region))); Enclosing_Scope := GetEnclosingScope (Current_Scope); if Is_Subprogram (Current_Region) and then Get_Visibility (Scope => Enclosing_Scope) = Local then Stop_At := GetSimpleName (Current_Region); elsif RawDict.GetSymbolDiscriminant (Current_Region) = Type_Symbol and then (Is_Protected_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Current_Region)) or else Is_Task_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Current_Region))) and then Get_Visibility (Scope => Enclosing_Scope) = Local then Stop_At := GetSimpleName (Current_Region); else Stop_At := LexTokenManager.Null_String; end if; LookupScope (Name => Name, Stop_At => Stop_At, Scope => Enclosing_Scope, Calling_Scope => Enclosing_Scope, Context => Context, Item => Item, Is_Visible => Is_Visible); if In_A_Subprogram and then Is_Variable (Item) then Is_Visible := False; end if; exit when Item /= NullSymbol; Current_Scope := Enclosing_Scope; Current_Region := GetRegion (Current_Scope); end loop; if Item = NullSymbol and then (RawDict.GetSymbolDiscriminant (Current_Region) /= Package_Symbol or else RawDict.Get_Package_Info_Ref (Item => Current_Region) /= Get_Predefined_Package_Standard) then LookupScope (Name => Name, Stop_At => LexTokenManager.Null_String, Scope => Predefined_Scope, Calling_Scope => Predefined_Scope, Context => Context, Item => Item, Is_Visible => Is_Visible); end if; end if; if not Is_Visible then Item := NullSymbol; end if; Trace_Sym (Msg => "Found in LookUpItem ", Sym => Item, Scope => Scope); TraceMsg ("--------------------------------------------------------------------------"); return Item; end LookupItem; spark-2012.0.deb/examiner/sem-compunit-wf_protected_body.adb0000644000175000017500000002466511753202336023056 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Protected_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Next_Node : out STree.SyntaxNode) is ------------------------------------------------------------------------ -- Checks required: -- 1. A protected type of declaration of the same name exists -- 2. No body for it exists already (and, if subunit, a stub does exist) -- 3. The closing identifier matches the initial -- 4. Each operation in the spec has a body -- 5. The second annotations on the operation bodies are refined correctly -- 6. Add body if wellformed ------------------------------------------------------------------------ Ident_Node, Protected_Operation_Item_Node, Closing_Ident_Node, With_Node : STree.SyntaxNode; Protected_Type_Sym : Dictionary.Symbol; Ident_Str, Closing_Str : LexTokenManager.Lex_String; In_Subunit, OK_To_Add : Boolean; Protected_Scope : Dictionary.Scopes; procedure Check_OK_To_Add (Type_Sym : in Dictionary.Symbol; In_Subunit : in Boolean; Ident_Pos : in LexTokenManager.Token_Position; Ident_Str : in LexTokenManager.Lex_String; OK_To_Add : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Pos, --# Ident_Str, --# In_Subunit, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Type_Sym & --# OK_To_Add from Dictionary.Dict, --# In_Subunit, --# Type_Sym; is begin OK_To_Add := True; if In_Subunit then -- we require a stub but must not have a previous body if Dictionary.HasBody (Type_Sym) then OK_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 997, Reference => ErrorHandler.No_Reference, Position => Ident_Pos, Id_Str => Ident_Str); end if; if not Dictionary.HasBodyStub (Type_Sym) then OK_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 15, Reference => ErrorHandler.No_Reference, Position => Ident_Pos, Id_Str => Ident_Str); end if; else -- we must have neither stub nor previous body if Dictionary.HasBody (Type_Sym) or else Dictionary.HasBodyStub (Type_Sym) then OK_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 997, Reference => ErrorHandler.No_Reference, Position => Ident_Pos, Id_Str => Ident_Str); end if; end if; end Check_OK_To_Add; begin -- Wf_Protected_Body -- Node is set to NullNode if there is an error in the protected body declaration -- and so stops the Compunit tree walk at that point. If the body is ok then we -- set the Node to the ProtectedOperationNode so that the main tree walk will -- find the various declarations. We set Node to NullNode here as a default. Next_Node := STree.NullNode; -- Set up key nodes Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Protected_Body"); Ident_Str := Node_Lex_String (Node => Ident_Node); Protected_Operation_Item_Node := Next_Sibling (Current_Node => Ident_Node); -- ASSUME Protected_Operation_Item_Node = protected_operation_item SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Protected_Operation_Item_Node) = SP_Symbols.protected_operation_item, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Protected_Operation_Item_Node = protected_operation_item in Wf_Protected_Body"); Closing_Ident_Node := Next_Sibling (Current_Node => Protected_Operation_Item_Node); -- ASSUME Closing_Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Closing_Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Closing_Ident_Node = identifier in Wf_Protected_Body"); Closing_Str := Node_Lex_String (Node => Closing_Ident_Node); Protected_Type_Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- see if already declared if Dictionary.IsType (Protected_Type_Sym) and then Dictionary.IsProtectedTypeMark (Protected_Type_Sym) then -- potentially ok STree.Set_Node_Lex_String (Sym => Protected_Type_Sym, Node => Ident_Node); -- enter local scope of newly-added protected body Protected_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Protected_Type_Sym); -- see if we are a subunit or an ordinary in-line declaration With_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Node)); -- ASSUME With_Node = subunit OR abody if Syntax_Node_Type (Node => With_Node) = SP_Symbols.abody then -- ASSUME With_Node = abody In_Subunit := False; elsif Syntax_Node_Type (Node => With_Node) = SP_Symbols.subunit then -- ASSUME With_Node = subunit In_Subunit := True; With_Node := Child_Node (Current_Node => Child_Node (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => With_Node)))); -- ASSUME With_Node = subunit OR with_clause if Syntax_Node_Type (Node => With_Node) = SP_Symbols.with_clause then With_Node := Parent_Node (Current_Node => With_Node); -- ASSUME With_Node = context_clause SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => With_Node) = SP_Symbols.context_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = context_clause in Wf_Protected_Body"); Wf_Context_Clause (Node => With_Node, Comp_Sym => Protected_Type_Sym, Scope => Protected_Scope); elsif Syntax_Node_Type (Node => With_Node) /= SP_Symbols.subunit then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = subunit OR with_clause in Wf_Protected_Body"); end if; else In_Subunit := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = subunit OR abody in Wf_Protected_Body"); end if; -- see if a body has already been declared etc. Check_OK_To_Add (Type_Sym => Protected_Type_Sym, In_Subunit => In_Subunit, Ident_Pos => Node_Position (Node => Ident_Node), Ident_Str => Ident_Str, OK_To_Add => OK_To_Add); if OK_To_Add then Dictionary.AddBody (CompilationUnit => Protected_Type_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Hidden => False); Next_Node := Protected_Operation_Item_Node; Scope := Protected_Scope; -- now check each declared operation in main Compunit tree walk end if; else -- either there is no spec to match the body or it not a protected type ErrorHandler.Semantic_Error (Err_Num => 998, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; -- Closing identifier check if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => Closing_Str) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Closing_Ident_Node), Id_Str => Ident_Str); end if; end Wf_Protected_Body; spark-2012.0.deb/examiner/sem-wf_pragma-wf_attach_handler.adb0000644000175000017500000005513511753202336023112 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Pragma) procedure Wf_Attach_Handler (Pragma_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is -- Checks: -- (1) Positional association -- (2) Exactly 2 arguments -- (3) First is the procedure name -- (4) 2nd is ignore for now -- (5) pragma immediately follows procedure -- -- Rule 6 removed after design rethink (6) only one attach_handler per PT -- (7) procedure must be parameterless -- (8) must be in PT The_Region : Dictionary.Symbol; Proc_Spec_Node : STree.SyntaxNode; Proc_Ident : LexTokenManager.Lex_String; Proc_Sym : Dictionary.Symbol; Error_Found : Boolean := False; procedure Find_Proc_Spec (Pragma_Node : in STree.SyntaxNode; Proc_Spec_Node : out STree.SyntaxNode) --# global in STree.Table; --# derives Proc_Spec_Node from Pragma_Node, --# STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; --# post Syntax_Node_Type (Proc_Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Proc_Spec_Node, STree.Table) = SP_Symbols.function_specification or --# Syntax_Node_Type (Proc_Spec_Node, STree.Table) = SP_Symbols.entry_specification or --# Syntax_Node_Type (Proc_Spec_Node, STree.Table) = SP_Symbols.identifier or --# Syntax_Node_Type (Proc_Spec_Node, STree.Table) = SP_Symbols.assert_pragma; -- locates the place where a procedure_specification should be if the pragma is -- correctly placed. is Current_Node : STree.SyntaxNode; begin -- There are two cases to consider: the attach_handler follows the first subprogram in the PT; or -- it follows some later subprogram declaration. -- Note that the protected_operation_declaration_rep grammar means that the sequence of declarations -- is "upside down" with the first declaration at the bottom. Current_Node := Child_Node (Current_Node => Parent_Node (Current_Node => Pragma_Node)); -- ASSUME Current_Node = apragma OR -- pragma_rep OR -- initial_declarative_item_rep OR -- later_declarative_item_rep OR -- sequence_of_labels OR -- code_insertion OR -- visible_part_rep OR -- basic_declarative_item_rep OR -- renaming_declaration_rep OR -- task_pragma OR -- protected_operation_declaration_rep OR -- procedure_specification OR -- function_specification -- protected_operation_declaration_rep to left of pragma if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.protected_operation_declaration_rep then -- ASSUME Current_Node = protected_operation_declaration_rep if Child_Node (Current_Node => Current_Node) = STree.NullNode then -- ASSUME Child_Node (Current_Node => Current_Node) = NULL -- The pragma is at the bottom of the sequence of protected_operation_declaration_reps and -- so we are dealing with FIRST subprogram in the PT (immediately after the priority pragma) -- Go to the top of the list of protected_operation_declaration_reps loop --# assert Syntax_Node_Type (Current_Node, STree.Table) = SP_Symbols.protected_operation_declaration_rep; Current_Node := Parent_Node (Current_Node => Current_Node); -- ASSUME Current_Node = protected_operation_declaration_rep OR protected_operation_declaration SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.protected_operation_declaration_rep or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.protected_operation_declaration, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = protected_operation_declaration_rep OR " & "protected_operation_declaration in Find_Proc_Spec"); exit when Syntax_Node_Type (Node => Current_Node) = SP_Symbols.protected_operation_declaration; end loop; -- ASSUME Current_Node = protected_operation_declaration -- Move to procedure spec Proc_Spec_Node := Child_Node (Current_Node => Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Current_Node)))); -- ASSUME Proc_Spec_Node = procedure_specification OR function_specification OR -- proof_function_declaration OR entry_specification elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Current_Node)) = SP_Symbols.protected_operation_declaration_rep then -- ASSUME Syntax_Node_Type (Node => Child_Node (Current_Node => Current_Node) = protected_operation_declaration_rep -- we are dealing with a potential subprogram in the -- sequence of declarations in the PT declarative part Proc_Spec_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Current_Node))); -- ASSUME Proc_Spec_Node = procedure_specification OR function_specification OR -- proof_function_declaration OR entry_specification OR -- identifier OR assert_pragma else Proc_Spec_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Current_Node) = protected_operation_declaration_rep OR " & "NULL in Find_Proc_Spec"); end if; else Proc_Spec_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = protected_operation_declaration_rep in Find_Proc_Spec"); end if; -- ASSUME Proc_Spec_Node = procedure_specification OR function_specification OR -- proof_function_declaration OR entry_specification OR -- identifier OR assert_pragma if Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.proof_function_declaration then -- ASSERT_PRAGMA Proc_Spec_Node = proof_function_declaration Proc_Spec_Node := Child_Node (Current_Node => Proc_Spec_Node); elsif Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.procedure_specification and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.function_specification and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.entry_specification and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.identifier and then Syntax_Node_Type (Node => Proc_Spec_Node) /= SP_Symbols.assert_pragma then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Proc_Spec_Node = procedure_specification OR function_specification OR " & "proof_function_declaration OR entry_specification OR identifier OR assert_pragma in Find_Proc_Spec"); end if; -- ASSUME Proc_Spec_Node = procedure_specification OR function_specification OR entry_specification OR -- identifier OR assert_pragma SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.function_specification or else Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.entry_specification or else Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.identifier or else Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.assert_pragma, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Proc_Spec_Node = procedure_specification OR function_specification OR entry_specification OR " & "identifier OR assert_pragma in Find_Proc_Spec"); end Find_Proc_Spec; -------------------------------------------------------------------------------------- procedure Check_Arguments (Pragma_Node : in STree.SyntaxNode; Entity_Name : in LexTokenManager.Lex_String; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Entity_Name, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pragma_Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found from *, --# Entity_Name, --# LexTokenManager.State, --# Pragma_Node, --# STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; is Arg_Assoc_Rep_Node : STree.SyntaxNode; Subprog_Name_Node : STree.SyntaxNode; procedure Check_Represent_Same_Name (Exp_Node : in STree.SyntaxNode; Name : in LexTokenManager.Lex_String; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# LexTokenManager.State, --# Name, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found from *, --# Exp_Node, --# LexTokenManager.State, --# Name, --# STree.Table; --# pre Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.ADA_expression; is Is_Chain : Boolean; Id_Node, Next_Node : STree.SyntaxNode; begin Id_Node := Exp_Node; loop Is_Chain := Next_Sibling (Current_Node => Id_Node) = STree.NullNode; Next_Node := Child_Node (Current_Node => Id_Node); exit when not Is_Chain or else Next_Node = STree.NullNode; Id_Node := Next_Node; end loop; if not Is_Chain or else Syntax_Node_Type (Node => Id_Node) /= SP_Symbols.identifier or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => Name) /= LexTokenManager.Str_Eq then -- Rule 3 failure Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 71, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => Name); end if; end Check_Represent_Same_Name; begin -- Check_Arguments Arg_Assoc_Rep_Node := Child_Node (Current_Node => Pragma_Node); -- ASSUME Arg_Assoc_Rep_Node = identifier OR assert_pragma if Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.identifier then -- ASSUME Arg_Assoc_Rep_Node = identifier Arg_Assoc_Rep_Node := Next_Sibling (Current_Node => Arg_Assoc_Rep_Node); -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep OR NULL if Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.argument_association_rep then -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep Arg_Assoc_Rep_Node := Child_Node (Current_Node => Arg_Assoc_Rep_Node); -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep OR argument_association if Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.argument_association_rep then -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep Arg_Assoc_Rep_Node := Child_Node (Current_Node => Arg_Assoc_Rep_Node); -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep OR argument_association if Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.argument_association then -- ASSUME Arg_Assoc_Rep_Node = argument_association -- pragma has two arguments Subprog_Name_Node := Child_Node (Current_Node => Arg_Assoc_Rep_Node); -- ASSUME Subprog_Name_Node = identifier OR ADA_expression if Syntax_Node_Type (Node => Subprog_Name_Node) = SP_Symbols.identifier then -- ASSUME Subprog_Name_Node = identifier -- form of expression wrong - Rule 1 failure Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 71, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Subprog_Name_Node), Id_Str => Entity_Name); elsif Syntax_Node_Type (Node => Subprog_Name_Node) = SP_Symbols.ADA_expression then -- ASSUME Subprog_Name_Node = ADA_expression -- form of expression ok so check name actually matches Check_Represent_Same_Name (Exp_Node => Subprog_Name_Node, Name => Entity_Name, Error_Found => Error_Found); else Error_Found := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Name_Node = identifier OR ADA_expression in Check_Arguments"); end if; elsif Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.argument_association_rep then -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep -- pragma does nor have exactly 2 arguments -- Rule 2 failure Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 69, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Attach_Handler_Token); else Error_Found := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Assoc_Rep_Node = argument_association_rep OR " & "argument_association in Check_Arguments"); end if; elsif Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.argument_association then -- ASSUME Arg_Assoc_Rep_Node = argument_association -- pragma does nor have exactly 2 arguments -- Rule 2 failure Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 69, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Attach_Handler_Token); else Error_Found := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Assoc_Rep_Node = argument_association_rep OR " & "argument_association in Check_Arguments"); end if; elsif Arg_Assoc_Rep_Node = STree.NullNode then -- ASSUME Arg_Assoc_Rep_Node = assert_pragma -- pragma does nor have exactly 2 arguments -- Rule 2 failure Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 69, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Attach_Handler_Token); else Error_Found := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Assoc_Rep_Node = argument_association_rep OR NULL in Check_Arguments"); end if; elsif Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.assert_pragma then -- ASSUME Arg_Assoc_Rep_Node = assert_pragma -- pragma does nor have exactly 2 arguments -- Rule 2 failure Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 69, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Attach_Handler_Token); else Error_Found := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Assoc_Rep_Node = identifier OR assert_pragma in Check_Arguments"); end if; end Check_Arguments; begin -- Wf_Attach_Handler The_Region := Dictionary.GetRegion (Scope); -- attach_Handler can only appear in the spec of a protected type if Dictionary.IsType (The_Region) and then Dictionary.IsProtectedTypeMark (The_Region) then Find_Proc_Spec (Pragma_Node => Pragma_Node, Proc_Spec_Node => Proc_Spec_Node); -- ASSUME Proc_Spec_Node = procedure_specification OR function_specification OR entry_specification OR -- identifier OR assert_pragma if Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.procedure_specification then -- ASSUME Proc_Spec_Node = procedure_specification Proc_Spec_Node := Child_Node (Current_Node => Child_Node (Current_Node => Proc_Spec_Node)); -- ASSUME Proc_Spec_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Proc_Spec_Node = identifier in Wf_Attach_Handler"); Proc_Ident := Node_Lex_String (Node => Proc_Spec_Node); Check_Arguments (Pragma_Node => Pragma_Node, Entity_Name => Proc_Ident, Error_Found => Error_Found); if not Error_Found then Proc_Sym := Dictionary.LookupItem (Name => Proc_Ident, Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.GetNumberOfSubprogramParameters (Proc_Sym) = 0 then STree.Set_Node_Lex_String (Sym => Proc_Sym, Node => Proc_Spec_Node); Dictionary.SetIsInterruptHandler (Proc_Sym); Dictionary.SetTypeHasPragma (The_Region, Dictionary.AttachHandler); else -- rule 7 failure ErrorHandler.Semantic_Error (Err_Num => 885, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Null_String); end if; end if; -- Error_Found elsif Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.function_specification or else Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.entry_specification or else Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.identifier or else Syntax_Node_Type (Node => Proc_Spec_Node) = SP_Symbols.assert_pragma then -- ASSUME Proc_Spec_Node = function_specification OR entry_specification OR identifier OR assert_pragma -- rule 5 failure ErrorHandler.Semantic_Error (Err_Num => 884, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Null_String); end if; else -- not in PT (Rule 8) ErrorHandler.Semantic_Error (Err_Num => 884, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Null_String); end if; end Wf_Attach_Handler; spark-2012.0.deb/examiner/error_io.ads0000644000175000017500000000731411753202336016567 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; with Error_Types; --# inherit Dictionary, --# Error_Types, --# E_Strings, --# LexTokenManager, --# SPARK_IO; package Error_IO is ---------------------------------------------------------------- -- This package provides simple primitives for the storage of -- Error Entries in a temporary file. -- -- A file of error entries behaves like any other standard -- file - it may be created, written to, reset, read from and -- closed in the standard way. -- -- The implementation of this package _may_ choose to store -- these files on disk, using the facilities of SPARK_IO, or -- may choose to store the file entirely in-memory on some -- hosts. ---------------------------------------------------------------- type File_Type is private; Null_File : constant File_Type; procedure Create (File : in out File_Type; Status : out SPARK_IO.File_Status); --# global in out SPARK_IO.File_Sys; --# derives File, --# SPARK_IO.File_Sys, --# Status from File, --# SPARK_IO.File_Sys; procedure Close (File : in out File_Type; Status : out SPARK_IO.File_Status); --# global in out SPARK_IO.File_Sys; --# derives File, --# SPARK_IO.File_Sys from *, --# File & --# Status from File, --# SPARK_IO.File_Sys; procedure Reset (File : in out File_Type; Mode_Of_File : in SPARK_IO.File_Mode; Status : out SPARK_IO.File_Status); --# global in out SPARK_IO.File_Sys; --# derives File, --# SPARK_IO.File_Sys from *, --# File, --# Mode_Of_File & --# Status from File, --# Mode_Of_File, --# SPARK_IO.File_Sys; procedure Put_Numeric_Error (File : in File_Type; Item : in Error_Types.NumericError); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# Item; procedure Get_Numeric_Error (File : in File_Type; Item : out Error_Types.NumericError); --# global in out SPARK_IO.File_Sys; --# derives Item from SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# File; private --# hide Error_IO; type File_Descriptor; type File_Type is access File_Descriptor; Null_File : constant File_Type := null; end Error_IO; spark-2012.0.deb/examiner/declarations-outputdeclarations-printdeclarations-printtyperules.adb0000644000175000017500000011402011753202336032150 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Debug; separate (Declarations.OutputDeclarations.PrintDeclarations) procedure PrintTypeRules (Write_Rules : in Boolean; Rule_File : in SPARK_IO.File_Type) is Empty : Boolean; Ok : Boolean; Sym : Dictionary.Symbol; type Bounds is (LowerBound, UpperBound); procedure PutBase (WithBase : in Boolean) --# global in Rule_File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Rule_File, --# WithBase; is begin if WithBase then SPARK_IO.Put_String (Rule_File, "__base", 0); end if; end PutBase; procedure PrintLowerLessThanUpperRule (Sym : in Dictionary.Symbol; WithBase : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# WithBase & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym, --# WithBase; is begin Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); PutBase (WithBase); SPARK_IO.Put_String (Rule_File, "__first <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); PutBase (WithBase); SPARK_IO.Put_Line (Rule_File, "__last may_be_deduced.", 0); if WithBase then --additional rule that base type is at least as big as type Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); PutBase (WithBase); SPARK_IO.Put_String (Rule_File, "__first <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__first may_be_deduced.", 0); Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); PutBase (WithBase); SPARK_IO.Put_String (Rule_File, "__last >= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__last may_be_deduced.", 0); end if; end PrintLowerLessThanUpperRule; procedure PrintABound (WhichBound : in Bounds; WithBase : in Boolean; StoreVal : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in Sym; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# LexTokenManager.State, --# StoreVal, --# WhichBound, --# WithBase & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# StoreVal, --# Sym, --# WhichBound, --# WithBase; is begin --PrintABound if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => StoreVal, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); PutBase (WithBase); if WhichBound = LowerBound then SPARK_IO.Put_String (Rule_File, "__first", 0); else SPARK_IO.Put_String (Rule_File, "__last", 0); end if; Print_Replacement_Rule (Rule_File => Rule_File, Store_Val => StoreVal, Type_Mark => Sym, Scope => Scope); elsif WhichBound = UpperBound then --if we do not know the value of the upper bound then put --out the less precise rule that Lower <= Upper PrintLowerLessThanUpperRule (Sym, WithBase); end if; end PrintABound; procedure PrintBounds (WithBase : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in Sym; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Sym, --# WithBase & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym, --# WithBase; is begin PrintABound (LowerBound, WithBase, Dictionary.GetScalarAttributeValue (WithBase, LexTokenManager.First_Token, Sym)); PrintABound (UpperBound, WithBase, Dictionary.GetScalarAttributeValue (WithBase, LexTokenManager.Last_Token, Sym)); end PrintBounds; -- print a replacement rule for attribute T'Modulus where T is a modular type procedure PrintModulus (Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from * & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is begin -- PrintModulus Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__modulus", 0); Print_Replacement_Rule (Rule_File => Rule_File, Store_Val => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Modulus_Token, Sym), Type_Mark => Sym, Scope => Scope); end PrintModulus; procedure PrintSizeBoundsRule (Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from * & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is begin -- We _can_ produce a rule that T'Size >= 0 for all types T. -- On the other hand, the upper bound of T'Size is implementation- -- dependent, so we cannot produce a rule for that. Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__size >= 0 may_be_deduced", 0); End_A_Rule (Rule_File => Rule_File); end PrintSizeBoundsRule; procedure PrintSizeReplacementRule (Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from * & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is LexValue : LexTokenManager.Lex_String; begin -- convert value from Maths.Value to LexTokenManager.LexString LexValue := Dictionary.TypeSizeAttribute (Sym); Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__size may_be_replaced_by ", 0); E_Strings.Put_String (File => Rule_File, E_Str => Maths.ValueToString (Maths.ValueRep (LexValue))); End_A_Rule (Rule_File => Rule_File); end PrintSizeReplacementRule; procedure PrintEnumerationRules (Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is It : Dictionary.Iterator; PositionNumber : Natural; LastLiteral, FirstLiteral : Dictionary.Symbol; begin --PrintEnumerationRules -- t__pos(t__first) may_be_replaced_by 0 . Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__pos(", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__first)", 0); SPARK_IO.Put_String (Rule_File, " may_be_replaced_by 0", 0); End_A_Rule (Rule_File => Rule_File); -- pos and val rules for literals PositionNumber := 0; It := Dictionary.FirstEnumerationLiteral (Sym); FirstLiteral := Dictionary.CurrentSymbol (It); loop -- t__pos("a literal") may_be_replaced_by "its position number" . Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__pos(", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Dictionary.CurrentSymbol (It)); SPARK_IO.Put_String (Rule_File, ") may_be_replaced_by ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); End_A_Rule (Rule_File => Rule_File); -- t__val("a position number") may_be_replaced_by "its associated literal". Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__val(", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_String (Rule_File, ") may_be_replaced_by ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Dictionary.CurrentSymbol (It)); End_A_Rule (Rule_File => Rule_File); -- keep copy of the last literal before exiting the loop LastLiteral := Dictionary.CurrentSymbol (It); It := Dictionary.NextSymbol (It); exit when Dictionary.IsNullIterator (It); PositionNumber := PositionNumber + 1; end loop; -- on exit, PositionNumber holds the highest position nunber of the type and -- LastLiteral holds the symbols of the last literal -- t__pos(t__last) may_be_replaced_by "the right value" . Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__pos(", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__last)", 0); SPARK_IO.Put_String (Rule_File, " may_be_replaced_by ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); End_A_Rule (Rule_File => Rule_File); -- t__pos(succ(X)) may_be_replaced_by t__pos(X) + 1 if ... Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__pos(succ(X)) may_be_replaced_by ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__pos(X) + 1", 0); SPARK_IO.Put_String (Rule_File, " if [X <=", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => LastLiteral); SPARK_IO.Put_String (Rule_File, ", X <> ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => LastLiteral); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- t__pos(pred(X)) may_be_replaced_by t__pos(X) - 1 if ... Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__pos(pred(X)) may_be_replaced_by ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__pos(X) - 1", 0); SPARK_IO.Put_String (Rule_File, " if [X >=", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => FirstLiteral); SPARK_IO.Put_String (Rule_File, ", X <> ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => FirstLiteral); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- colour__pos(X) >= 0 may_be_deduced ... Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__pos(X) >= 0 may_be_deduced_from", 0); SPARK_IO.Put_String (Rule_File, " [", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => FirstLiteral); SPARK_IO.Put_String (Rule_File, " <= X, X <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => LastLiteral); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- colour__pos(X) <= ? may_be_deduced ... Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__pos(X) <= ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_Line (Rule_File, " may_be_deduced_from", 0); SPARK_IO.Put_String (Rule_File, " [", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => FirstLiteral); SPARK_IO.Put_String (Rule_File, " <= X, X <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => LastLiteral); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- colour__val(X) >= first literal may_be_deduced ... Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__val(X) >= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => FirstLiteral); SPARK_IO.Put_Line (Rule_File, " may_be_deduced_from", 0); SPARK_IO.Put_String (Rule_File, " [0 <= X, X <= ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- colour__val(X) <= last literal may_be_deduced ... Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__val(X) <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => LastLiteral); SPARK_IO.Put_Line (Rule_File, " may_be_deduced_from", 0); SPARK_IO.Put_String (Rule_File, " [0 <= X, X <= ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- succ(colour__val(X)) may_be_replace_by colour__val(X+1) if ... Print_Rule_Name (Rule_File => Rule_File); SPARK_IO.Put_String (Rule_File, "succ(", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__val(X)) may_be_replaced_by ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__val(X+1)", 0); SPARK_IO.Put_String (Rule_File, " if [0 <= X, X < ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- pred(colour__val(X)) may_be_replace_by colour__val(X-1) if ... Print_Rule_Name (Rule_File => Rule_File); SPARK_IO.Put_String (Rule_File, "pred(", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__val(X)) may_be_replaced_by ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__val(X-1)", 0); SPARK_IO.Put_String (Rule_File, " if [0 < X, X <= ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- Pos to Val reciprocity Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__pos(", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__val(X)) may_be_replaced_by X", 0); SPARK_IO.Put_String (Rule_File, " if [0 <= X, X <= ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- Val to pos reciprocity Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__val(", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__pos(X)) may_be_replaced_by X", 0); SPARK_IO.Put_String (Rule_File, " if [", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => FirstLiteral); SPARK_IO.Put_String (Rule_File, " <= X, X <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => LastLiteral); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); -- Ordering equivalence (suggested by Phil Thornley, BAe) Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__pos(X) <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__pos(Y) & X <= Y are_interchangeable ", 0); SPARK_IO.Put_String (Rule_File, " if [", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => FirstLiteral); SPARK_IO.Put_String (Rule_File, " <= X, X <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => LastLiteral); SPARK_IO.Put_String (Rule_File, ", ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => FirstLiteral); SPARK_IO.Put_String (Rule_File, " <= Y, Y <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => LastLiteral); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__val(X) <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_Line (Rule_File, "__val(Y) & X <= Y are_interchangeable ", 0); SPARK_IO.Put_String (Rule_File, " if [0 <= X, X <= ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_String (Rule_File, ", 0 <= Y, Y <= ", 0); SPARK_IO.Put_Integer (Rule_File, PositionNumber, 0, 10); SPARK_IO.Put_String (Rule_File, "]", 0); End_A_Rule (Rule_File => Rule_File); end PrintEnumerationRules; procedure PrintConstraintRules (Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is begin -- If the implcitly-declared constraint is associated with a formal parameter -- of type String then we know its lower bound must be 1 (SPARK95.doc (section 3.6.3)) -- -- NOTE here - a special case is needed here, since String is the only -- array type in SPARK that allows a null-range literal "". -- Where "" is passed as an actual parameter, we have the anomaly that -- S'First = 1 and S'Last = 0, so we need special rules here. if Dictionary.IsPredefinedStringType (Dictionary.GetType (Dictionary.GetParameterAssociatedWithParameterConstraint (Sym))) then -- S'First = 1 Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__first may_be_replaced_by 1", 0); End_A_Rule (Rule_File => Rule_File); -- S'Last <= Positive'Last Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__last <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Dictionary.GetType (Sym)); SPARK_IO.Put_String (Rule_File, "__last may_be_deduced", 0); End_A_Rule (Rule_File => Rule_File); -- S'Last >= 0 -- NOT S'Last >= Positive'First - see above Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__last >= 0 may_be_deduced", 0); End_A_Rule (Rule_File => Rule_File); else -- For formal array parameters that aren't String... -- -- Subprogram constraint symbols are symbols representing the indexes of unconstrained -- objects as they are constrained by something at some point. We typically do not know the -- actual bounds but we do know that the anonymous subtype represented by the symbol must -- at least fit within the type of the matching index of the unconstrained type declaration. -- i.e. for type A is array (Integer range <>) of T; and formal parameter X of type A then -- we will have a subprogram constraint symbol x__index_subtype__1 and we know that -- x__index_subtype__1__first >= integer__first and x__index_subtype__1__last <= integer__last Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__first >= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Dictionary.GetType (Sym)); SPARK_IO.Put_String (Rule_File, "__first may_be_deduced", 0); End_A_Rule (Rule_File => Rule_File); Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__last <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Dictionary.GetType (Sym)); SPARK_IO.Put_String (Rule_File, "__last may_be_deduced", 0); End_A_Rule (Rule_File => Rule_File); -- and, as free bonus PrintLowerLessThanUpperRule (Sym, False); -- and by transitivity Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__last >= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Dictionary.GetType (Sym)); SPARK_IO.Put_String (Rule_File, "__first may_be_deduced", 0); End_A_Rule (Rule_File => Rule_File); Print_Rule_Name (Rule_File => Rule_File); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, "__first <= ", 0); Print_Symbol (File => Rule_File, Scope => Scope, Sym => Dictionary.GetType (Sym)); SPARK_IO.Put_String (Rule_File, "__last may_be_deduced", 0); End_A_Rule (Rule_File => Rule_File); end if; end PrintConstraintRules; procedure PrintRecordEqualityRule (Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Rule_Family_Name; --# in Rule_File; --# in Scope; --# in out Rule_Counter; --# in out SPARK_IO.File_Sys; --# derives Rule_Counter from *, --# Dictionary.Dict, --# Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Rule_Counter, --# Rule_Family_Name, --# Rule_File, --# Scope, --# Sym; is ComponentIt : Dictionary.Iterator; begin if Dictionary.RecordHasSomeFields (Sym) then Print_Rule_Name (Rule_File => Rule_File); SPARK_IO.Put_String (Rule_File, "A = B may_be_deduced_from", 0); SPARK_IO.New_Line (Rule_File, 1); SPARK_IO.Put_String (Rule_File, " [goal(checktype(A,", 0); Print_Symbol_No_Wrap (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, ")),", 0); SPARK_IO.New_Line (Rule_File, 1); SPARK_IO.Put_String (Rule_File, " goal(checktype(B,", 0); Print_Symbol_No_Wrap (File => Rule_File, Scope => Scope, Sym => Sym); SPARK_IO.Put_String (Rule_File, ")),", 0); SPARK_IO.New_Line (Rule_File, 1); if Dictionary.IsPrivateType (Sym, Scope) then SPARK_IO.Put_String (Rule_File, " fld_inherit(A) = fld_inherit(B)]", 0); else ComponentIt := Dictionary.FirstRecordComponent (Sym); -- If all ancestors of an extended record are null records then we don't want -- a declaration of an Inherit field referencing first of them. if Dictionary.TypeIsExtendedTagged (Sym) and then Dictionary.NoFieldsBelowThisRecord (Sym) then --skip inherit field ComponentIt := Dictionary.NextSymbol (ComponentIt); end if; while not Dictionary.IsNullIterator (ComponentIt) loop SPARK_IO.Put_String (Rule_File, " fld_", 0); Print_Symbol_No_Wrap (File => Rule_File, Scope => Scope, Sym => Dictionary.CurrentSymbol (ComponentIt)); SPARK_IO.Put_String (Rule_File, "(A) = fld_", 0); Print_Symbol_No_Wrap (File => Rule_File, Scope => Scope, Sym => Dictionary.CurrentSymbol (ComponentIt)); SPARK_IO.Put_String (Rule_File, "(B)", 0); ComponentIt := Dictionary.NextSymbol (ComponentIt); exit when Dictionary.IsNullIterator (ComponentIt); SPARK_IO.Put_String (Rule_File, ",", 0); SPARK_IO.New_Line (Rule_File, 1); end loop; SPARK_IO.Put_String (Rule_File, "]", 0); end if; End_A_Rule (Rule_File => Rule_File); end if; end PrintRecordEqualityRule; begin --PrintTypeRules if Write_Rules then loop --# accept Flow, 10, Ok, "Expected ineffective assignment to Ok"; Lists.Get_First (Heap => L_Heap, The_List => Type_List, Symbol => Sym, Empty => Empty, Ok => Ok); --# end accept; exit when Empty; -- Debug.PrintSym ("PrintTypeRule for ", Sym); if Dictionary.IsTypeMark (Sym) then PrintSizeBoundsRule (Sym); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.TypeSizeAttribute (Sym), Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then PrintSizeReplacementRule (Sym); end if; if Dictionary.TypeIsGeneric (Sym) then if Dictionary.TypeIsScalar (Sym) then -- Just output a lower <= upper may_be_deduced rule -- TBD unsure whether we can have WithBase => True here PNA 15/12/05 PrintLowerLessThanUpperRule (Sym => Sym, WithBase => False); end if; -- non-scalar generic types get no rules at all elsif Dictionary.IsPrivateType (Sym, Scope) then -- If the view of the type is private from this Scope, then no rules null; else if Dictionary.TypeIsScalar (Sym) then PrintBounds (WithBase => False); PrintBounds (WithBase => True); if Dictionary.TypeIsModular (Sym) then PrintModulus (Sym); end if; if Dictionary.TypeIsEnumeration (Sym) and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetRootType (Sym), Right_Symbol => Sym, Full_Range_Subtype => False) and then not Dictionary.IsPredefinedCharacterType (Sym) then PrintEnumerationRules (Sym); end if; elsif Dictionary.TypeIsRecord (Sym) and then not Dictionary.IsSubtype (Sym) then -- If Sym denotes some sort of record then print an inference -- rule for "=". Don't do this for full-range record subtypes -- (the only record subtypes allowed in SPARK) because: -- a. they always appear as the root type in FDL so no need -- for rules about the subtype; -- b. if we did allow rules to be printed for subtypes here -- we would need to modify PrintRecordEqualityRule to deal -- with them correctly. PrintRecordEqualityRule (Sym); end if; end if; elsif Dictionary.IsParameterConstraint (Sym) then PrintConstraintRules (Sym); end if; end loop; end if; --# accept Flow, 33, Ok, "Expected Ok to be neither referenced nor exported"; end PrintTypeRules; spark-2012.0.deb/examiner/sem-dependency_relation.adb0000644000175000017500000001100611753202336021514 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) package body Dependency_Relation is procedure Create_Full_Dependency (Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Import_List, Export_List : in SeqAlgebra.Seq; The_Heap : in Heap.HeapRecord) --# global in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Export_List, --# Import_List, --# Node_Pos, --# Subprog_Sym, --# The_Heap & --# SPARK_IO.File_Sys from *, --# Abstraction, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Export_List, --# Import_List, --# LexTokenManager.State, --# Node_Pos, --# Subprog_Sym, --# The_Heap; is separate; ----------------------------------------------------------------------- procedure Check_Derives_Consistency (Subprog_Sym : in Dictionary.Symbol; Position : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Position, --# SPARK_IO.File_Sys, --# Subprog_Sym, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# Subprog_Sym, --# The_Heap; is separate; -------------------------------------------------------------------------- procedure Create_Full_Subprog_Dependency (Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; The_Heap : in out Heap.HeapRecord) is separate; --------------------------------------------------------------------- procedure Wf_Dependency_Relation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Glob_Def_Err : in Boolean; The_Heap : in out Heap.HeapRecord) is separate; end Dependency_Relation; spark-2012.0.deb/examiner/reflist.adb0000644000175000017500000006626311753202336016406 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- RefList -- -- Description: -- -- The Reflist package provides a mechanism for storing node-based flow relations -- more efficiently. The actual relations are stored in the Heap and related -- to a syntax node by means of a hash table. -- See package spec. -- -- Operation -- -- The Reflist works in two parts. The first is concerned with creating or -- locating a KeyCell. The KeyCell is a heap atom which is the start point for -- the data structures required to describe the flow relations. The KeyCell can -- be found given a syntax node. The second part of the operation is concerned -- with adding or interrogating the data structures given a KeyCell as a -- starting point. -- -- KeyCell operations -- -- The package contains an array called "Table" which is an array of Heap atoms. -- It is initialized to all zeros (all null atoms). Finding a KeyCell starts -- with using a simple modulo hash function on an integer representation of the -- syntax node; this yields an Index into the Table. If the indexed value is 0 -- then there is no KeyCell for that node; otherwise, the Table entry will be -- the value of a heap atom. The Avalue of the atom contains the node number. -- If more than one node hashes to the same table index, a linked list of atoms -- is created using their Apointers. So searching for a KeyCell involves -- finding the correct table entry, then walking down the APointers of the atom -- at that table location until the Avalue of an atom matches the node we are -- seeking. The atom found is the KeyCell -- -- Node --(hash)--------------------------+ -- | -- Table------------------V------------------------------------+ -- |0|0|0|0|0|0|0|0|0|0|0|.|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0|0| -- +----------------------|------------------------------------+ -- | -- +-V-----------+ -- |Avalue = Node| -- |APtr | -- +-|-----------+ -- | -- +-V-----------+ -- |Avalue = Node| -- |APtr = null | -- +-------------+ -- -- Data Structure Operations -- -- Given a KeyCell we can find the associated information flows. -- There are three cases: -- (1) The node has a "null relation" (i.e. derives nothing from nothing") -- (2) The node has an associated referenced variable list (i.e. it references -- variables but has no exports; this is typical of Conditions) in the form -- of a SeqAlgebra.Seq -- (3) The node has one or more exports each of which has a (SeqAlgebra) -- sequence of imports -- from which it is derived. The sequence may of course be empty for any -- particualr export -- -- These cases are represented as follows: -- Null derives -- -- KeyCell------+ -- | BPtr = null| -- +------------+ -- -- RefList -- -- KeyCell------+ -- | BPtr ------------------> Head of SeqAlgebra.Seq containing referenced -- +------------+ variables -- -- (Note that because Seqs are built using Aptrs, we can distinguish the above -- case from the one that follows because the Bptr of the cell pointed to -- ("head of..." above) will be null) -- -- Export list -- -- KeyCell------+ +-----------------+ -- | BPtr ------------------>|AValue = export1 | -- +------------+ |BPtr ------------------> Head of Seq -- |APtr | containing imports -- +-|---------------+ for export1 -- | -- V -- +-----------------+ -- |AValue = export2 | -- |BPtr ------------------> Head of Seq -- |APtr = null | containing imports -- +-----------------+ for export2 -- -- -------------------------------------------------------------------------------- with ExaminerConstants; with SP_Symbols; with SystemErrors; use type SP_Symbols.SP_Symbol; package body RefList is HashDivider : constant Integer := HashMax + 1; procedure Init (Table : out HashTable) is begin Table := HashTable'(HashIndex => 0); end Init; ------------------------------------------------------------------------- function Hash (Node : STree.SyntaxNode) return HashIndex is begin return Natural (STree.NodeToRef (Node)) mod HashDivider; end Hash; ------------------------------------------------------------------------- function MatchingNode (TheHeap : Heap.HeapRecord; Node : STree.SyntaxNode; Cell : Heap.Atom) return Boolean is begin return Natural (STree.NodeToRef (Node)) = Heap.AValue (TheHeap, Cell); end MatchingNode; ------------------------------------------- procedure FindOrMakeKeyCell (Table : in out HashTable; TheHeap : in out Heap.HeapRecord; Node : in STree.SyntaxNode; Cell : out Heap.Atom) --# global in out Statistics.TableUsage; --# derives Cell, --# Table, --# TheHeap from Node, --# Table, --# TheHeap & --# Statistics.TableUsage from *, --# Node, --# Table, --# TheHeap; is Index : HashIndex; SeekCell, LocalCell : Heap.Atom; Found : Boolean; ---------------------------- procedure SetNodeValue (Cell : in Heap.Atom) --# global in Node; --# in out TheHeap; --# derives TheHeap from *, --# Cell, --# Node; is begin Heap.UpdateAValue (TheHeap, Cell, Natural (STree.NodeToRef (Node))); end SetNodeValue; ---------------------------- begin -- FindOrMakeKeyCell Index := Hash (Node); if Table (Index) = 0 then -- first use of this hash table position Heap.CreateAtom (TheHeap, LocalCell); SetNodeValue (LocalCell); Table (Index) := Natural (LocalCell); Cell := LocalCell; else -- a hit - search linked list for match SeekCell := Heap.Atom (Table (Index)); loop Found := MatchingNode (TheHeap, Node, SeekCell); exit when Found; exit when Heap.IsNullPointer (Heap.APointer (TheHeap, SeekCell)); SeekCell := Heap.APointer (TheHeap, SeekCell); end loop; if Found then Cell := SeekCell; else -- add new Atom to end of hit list Heap.CreateAtom (TheHeap, LocalCell); SetNodeValue (LocalCell); Heap.UpdateAPointer (TheHeap, SeekCell, LocalCell); Cell := LocalCell; end if; end if; end FindOrMakeKeyCell; ------------------------------------------------------------------------- procedure FindKeyCell (Table : in HashTable; TheHeap : in Heap.HeapRecord; Node : in STree.SyntaxNode; Cell : out Heap.Atom) --# derives Cell from Node, --# Table, --# TheHeap; is Index : HashIndex; SeekCell : Heap.Atom; begin -- FindKeyCell Index := Hash (Node); SeekCell := Heap.Atom (Table (Index)); loop exit when MatchingNode (TheHeap, Node, SeekCell); if Heap.IsNullPointer (Heap.APointer (TheHeap, SeekCell)) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Ref_List_Key_Cell_Missing, Msg => ""); end if; SeekCell := Heap.APointer (TheHeap, SeekCell); end loop; Cell := SeekCell; end FindKeyCell; ------------------------------------------------------------------------- procedure AddRelation (Table : in out HashTable; TheHeap : in out Heap.HeapRecord; Node : in STree.SyntaxNode; TheExport : in Dictionary.Symbol; Imports : in SeqAlgebra.Seq) is KeyCell, ExportCell : Heap.Atom; ------------------------------------ procedure LinkImports (TheHeap : in out Heap.HeapRecord; Cell : in Heap.Atom; Imps : in SeqAlgebra.Seq) --# derives TheHeap from *, --# Cell, --# Imps; is begin Heap.UpdateBPointer (TheHeap, Cell, Heap.Atom (SeqAlgebra.SeqToNatural (Imps))); end LinkImports; ------------------------------------ procedure SetSymValue (TheHeap : in out Heap.HeapRecord; Cell : in Heap.Atom; Sym : in Dictionary.Symbol) --# derives TheHeap from *, --# Cell, --# Sym; is begin Heap.UpdateAValue (TheHeap, Cell, Natural (Dictionary.SymbolRef (Sym))); end SetSymValue; ------------------------------------ begin -- AddRelation FindOrMakeKeyCell (Table, TheHeap, Node, -- to get KeyCell); if Dictionary.Is_Null_Symbol (TheExport) then -- create simple referenced variable list LinkImports (TheHeap, KeyCell, Imports); else -- create export list with each export pointing at import list Heap.CreateAtom (TheHeap, ExportCell); Heap.UpdateAPointer (TheHeap, ExportCell, Heap.BPointer (TheHeap, KeyCell)); Heap.UpdateBPointer (TheHeap, KeyCell, ExportCell); SetSymValue (TheHeap, ExportCell, TheExport); LinkImports (TheHeap, ExportCell, Imports); end if; end AddRelation; ------------------------------------------------------------ procedure AddNullRelation (Table : in out HashTable; TheHeap : in out Heap.HeapRecord; Node : in STree.SyntaxNode) is UnusedKeyCell : Heap.Atom; begin --# accept F, 10, UnusedKeyCell, "UnusedKeyCell unused here" & --# F, 33, UnusedKeyCell, "UnusedKeyCell unused here"; FindOrMakeKeyCell (Table, TheHeap, Node, -- to get UnusedKeyCell); -- Don't want to link anything to B ptr of KeyCell to indicate it is a -- null relation. end AddNullRelation; -------------extractor functions------------------------------- function NodeHasExportList (Table : in HashTable; TheHeap : in Heap.HeapRecord; Node : in STree.SyntaxNode) return Boolean is Index : HashIndex; SeekCell : Heap.Atom; Found : Boolean; begin Index := Hash (Node); if Table (Index) = 0 then -- no entry that matches Node Found := False; else -- a hit - search linked list for match SeekCell := Heap.Atom (Table (Index)); loop Found := MatchingNode (TheHeap, Node, SeekCell); if Found then -- Here we have a valid KeyCell called SeekCell. We need to -- validate that it is the KeyCell for a list of one or more -- exports rather than something else (such as a referenced -- variable list) Found := (not Heap.IsNullPointer (Heap.BPointer (TheHeap, SeekCell))) and then -- The KeyCell's BPtr points at something (either a RefList -- or an Export) (not Heap.IsNullPointer (Heap.BPointer (TheHeap, Heap.BPointer (TheHeap, SeekCell)))); -- and that something is an export because its Bptr points at -- a list of imports (if it was a RefList then the BPtr would be -- null) exit; -- no second chances, we either found it or we didn't end if; exit when -- no more possible KeyCells Heap.IsNullPointer (Heap.APointer (TheHeap, SeekCell)); SeekCell := Heap.APointer (TheHeap, SeekCell); -- next KeyCell end loop; end if; return Found; end NodeHasExportList; ----------------------------------------------------------------- function ExportHasDependencies (TheExport : in Heap.Atom; TheHeap : in Heap.HeapRecord) return Boolean is begin return not Heap.IsNullPointer (Heap.BPointer (TheHeap, TheExport)); end ExportHasDependencies; ----------------------------------------------------------------- procedure FirstExport (Table : in HashTable; TheHeap : in Heap.HeapRecord; Node : in STree.SyntaxNode; TheExport : out Heap.Atom) is KeyCell : Heap.Atom; TheExportLocal : Heap.Atom; begin FindKeyCell (Table, TheHeap, Node, -- to get KeyCell); TheExportLocal := Heap.BPointer (TheHeap, KeyCell); -- There are two valid cases: -- (1) TheExportLocal is null (indicating that the node's flow relation -- is "derives ;"; or -- (2) TheExportLocal's BPointer isn't null (indicating that there is at -- least one actual export). -- The only remaining case that the data structure could allow is trying -- to get the first export when TheExportLocal is actually linked to a -- Reflist (a list of variables referenced by a Condition for example). -- We check the validity thus: SystemErrors.RT_Assert (C => Heap.IsNullPointer (TheExportLocal) -- case 1 or else (not Heap.IsNullPointer (Heap.BPointer (TheHeap, TheExportLocal))), Sys_Err => SystemErrors.Other_Internal_Error, Msg => "FirstExport of a RefList Node is empty"); TheExport := TheExportLocal; end FirstExport; ----------------------------------------------------------------- function NextExport (TheHeap : Heap.HeapRecord; TheExport : Heap.Atom) return Heap.Atom is begin return Heap.APointer (TheHeap, TheExport); end NextExport; ----------------------------------------------------------------- function DependencyList (TheHeap : Heap.HeapRecord; TheExport : Heap.Atom) return SeqAlgebra.Seq is begin return SeqAlgebra.NaturalToSeq (Natural (Heap.BPointer (TheHeap, TheExport))); end DependencyList; ----------------------------------------------------------------- procedure ReferencedVarList (Table : in HashTable; TheHeap : in Heap.HeapRecord; Node : in STree.SyntaxNode; Seq : out SeqAlgebra.Seq) is KeyCell : Heap.Atom; SeqOrExport : Heap.Atom; begin FindKeyCell (Table, TheHeap, Node, -- to get KeyCell); SeqOrExport := Heap.BPointer (TheHeap, KeyCell); -- If we have wrongly (in terms of the precondition) called this procedure -- on a node with exports then SeqOrExport will not be the referenced -- variable list we were hoping for but the head of a list of exports. -- We can tell if this is the case by checking the Bptr of the cell; -- it will be null if we have the list we are seeking and not null if we -- have got an export by mistake SystemErrors.RT_Assert (C => Heap.IsNullPointer (Heap.BPointer (TheHeap, SeqOrExport)), Sys_Err => SystemErrors.Other_Internal_Error, Msg => "ReferencedVariableList called on a node which has exports"); Seq := SeqAlgebra.NaturalToSeq (Natural (SeqOrExport)); end ReferencedVarList; ----------------------------------------------------------------- procedure AllReferencedVariables (Table : in HashTable; TheHeap : in out Heap.HeapRecord; Node : in STree.SyntaxNode; Seq : out SeqAlgebra.Seq) is AllRefSeq : SeqAlgebra.Seq; AnExport : Heap.Atom; NullAtom : constant Heap.Atom := Heap.Atom (0); begin if NodeHasExportList (Table, TheHeap, Node) then -- create a set to union all imports into SeqAlgebra.CreateSeq (TheHeap, AllRefSeq); -- loop through exports FirstExport (Table, TheHeap, Node, -- to get AnExport); while AnExport /= NullAtom loop -- union imports of each export SeqAlgebra.AugmentSeq (TheHeap, AllRefSeq, DependencyList (TheHeap, AnExport)); -- next export AnExport := NextExport (TheHeap, AnExport); end loop; -- return union Seq := AllRefSeq; else -- Must just be a list of referenced variables -- so just return it. ReferencedVarList (Table, TheHeap, Node, Seq); end if; end AllReferencedVariables; ----------------------------------------------------------------- -- function returns true if Sym is the symbol of a record variable, or -- a subcomponent of a record and if it is not a leaf in the component -- data structure function IsRecordVarOrComponent (ComponentData : ComponentManager.ComponentData; Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return ((Dictionary.Is_Variable (Sym) and then Dictionary.TypeIsRecord (Dictionary.GetType (Sym))) or else Dictionary.IsSubcomponent (Sym)) and then ComponentManager.HasChildren (ComponentData, ComponentManager.GetComponentNode (ComponentData, Sym)); end IsRecordVarOrComponent; ----------------------------------------------------------------------- procedure ExpandSeq (ComponentData : in ComponentManager.ComponentData; TheSeq : in SeqAlgebra.Seq; TheHeap : in out Heap.HeapRecord) is ExpandedSeq : SeqAlgebra.Seq; LeafSeq : SeqAlgebra.Seq; NextMember, CurrentMember : SeqAlgebra.MemberOfSeq; Sym : Dictionary.Symbol; begin -- ExpandSeq SeqAlgebra.CreateSeq (TheHeap, ExpandedSeq); CurrentMember := SeqAlgebra.FirstMember (TheHeap, TheSeq); while not SeqAlgebra.IsNullMember (CurrentMember) loop NextMember := SeqAlgebra.NextMember (TheHeap, CurrentMember); Sym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => CurrentMember))); if IsRecordVarOrComponent (ComponentData, Sym) then ComponentManager.GetLeaves (TheHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, Sym), -- to get LeafSeq); SeqAlgebra.AugmentSeq (TheHeap, ExpandedSeq, -- with LeafSeq); SeqAlgebra.RemoveMember (TheHeap, TheSeq, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => CurrentMember)); end if; CurrentMember := NextMember; end loop; SeqAlgebra.AugmentSeq (TheHeap, TheSeq, -- with ExpandedSeq); SeqAlgebra.DisposeOfSeq (TheHeap, ExpandedSeq); end ExpandSeq; --------------------------------- procedure ExpandToComponentEntities (ComponentData : in ComponentManager.ComponentData; Table : in HashTable; TheHeap : in out Heap.HeapRecord) is KeyCell : Heap.Atom; FirstExportAtom : Heap.Atom; --------- function HasRelation (KeyCell : in Heap.Atom) return Boolean --# global in STree.Table; --# in TheHeap; is NodeType : SP_Symbols.SP_Symbol; begin NodeType := STree.Syntax_Node_Type (Node => STree.RefToNode (ExaminerConstants.RefType (Heap.AValue (TheHeap, KeyCell)))); return NodeType = SP_Symbols.assignment_statement or else NodeType = SP_Symbols.loop_parameter_specification or else NodeType = SP_Symbols.procedure_call_statement; end HasRelation; --------------- procedure ExpandRelation (FirstExportAtom : in Heap.Atom) --# global in ComponentData; --# in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# FirstExportAtom, --# TheHeap; is CurrentExportAtom, NextExportAtom : Heap.Atom; Dependencies : SeqAlgebra.Seq; CurrentExportSym : Dictionary.Symbol; ------------ procedure ExpandExportList (CurrentExportAtom, NextExportAtom : in Heap.Atom; CurrentExportSym : in Dictionary.Symbol; Dependencies : in SeqAlgebra.Seq) --# global in ComponentData; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# CurrentExportAtom, --# CurrentExportSym, --# Dependencies, --# NextExportAtom, --# TheHeap; is ExportLeaves : SeqAlgebra.Seq; CurrentExportMember : SeqAlgebra.MemberOfSeq; CurrentExportAtomLocal : Heap.Atom; NewAtom : Heap.Atom; begin CurrentExportAtomLocal := CurrentExportAtom; ComponentManager.GetLeaves (TheHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, CurrentExportSym), -- to get ExportLeaves); -- replace cell value of current export with first component CurrentExportMember := SeqAlgebra.FirstMember (TheHeap, ExportLeaves); Heap.UpdateAValue (TheHeap, CurrentExportAtomLocal, -- with SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => CurrentExportMember)); CurrentExportMember := SeqAlgebra.NextMember (TheHeap, CurrentExportMember); while not SeqAlgebra.IsNullMember (CurrentExportMember) loop -- link in other expanded export leaves -- first create new export cell Heap.CreateAtom (TheHeap, NewAtom); Heap.UpdateAValue (TheHeap, NewAtom, -- with SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => CurrentExportMember)); -- link it to the dependencies Heap.UpdateBPointer (TheHeap, NewAtom, Heap.Atom (SeqAlgebra.SeqToNatural (Dependencies))); -- link it into the sequence of exports Heap.UpdateAPointer (TheHeap, CurrentExportAtomLocal, NewAtom); Heap.UpdateAPointer (TheHeap, NewAtom, NextExportAtom); CurrentExportAtomLocal := NewAtom; -- get next expanded export leaf CurrentExportMember := SeqAlgebra.NextMember (TheHeap, CurrentExportMember); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, ExportLeaves); end ExpandExportList; begin -- ExpandRelation CurrentExportAtom := FirstExportAtom; while not Heap.IsNullPointer (CurrentExportAtom) loop NextExportAtom := NextExport (TheHeap, CurrentExportAtom); Dependencies := DependencyList (TheHeap, CurrentExportAtom); ExpandSeq (ComponentData, Dependencies, TheHeap); CurrentExportSym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (Heap.AValue (TheHeap, CurrentExportAtom))); if IsRecordVarOrComponent (ComponentData, CurrentExportSym) then -- export needs expanding as well as import ExpandExportList (CurrentExportAtom, NextExportAtom, CurrentExportSym, Dependencies); end if; CurrentExportAtom := NextExportAtom; end loop; end ExpandRelation; --------------- begin -- ExpandToComponentEntities for I in HashIndex loop if Table (I) /= 0 then -- there is an entry to be processed KeyCell := Heap.Atom (Table (I)); loop -- to process all cells that may be in clash list if HasRelation (KeyCell) then FirstExportAtom := Heap.BPointer (TheHeap, KeyCell); if not Heap.IsNullPointer (FirstExportAtom) then ExpandRelation (FirstExportAtom); end if; else -- just a sequence of imports ExpandSeq (ComponentData, SeqAlgebra.NaturalToSeq (Natural (Heap.BPointer (TheHeap, KeyCell))), TheHeap); end if; KeyCell := Heap.APointer (TheHeap, KeyCell); exit when Heap.IsNullPointer (KeyCell); end loop; end if; end loop; end ExpandToComponentEntities; end RefList; spark-2012.0.deb/examiner/sem-check_protected_modifier_consistency.adb0000644000175000017500000000424311753202336025133 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Protected_Modifier_Consistency (The_Type : in Dictionary.Symbol; Modifier_Is_Protected : in Boolean; Error_Node : in STree.SyntaxNode; Consistent : out Boolean) is begin Consistent := True; if Dictionary.Is_Declared (Item => The_Type) or else Dictionary.IsPredefined (The_Type) then if (Dictionary.IsProtectedTypeMark (The_Type) or else Dictionary.IsPredefinedSuspensionObjectType (The_Type)) and then not Modifier_Is_Protected then -- If the announced type is a PO or SO then the protected modifier -- should be present. Consistent := False; elsif Modifier_Is_Protected and then not Dictionary.IsAtomic (The_Type) then -- If the announced type is not atomic then the protected modifier -- should be not be present. Consistent := False; end if; end if; if not Consistent then ErrorHandler.Semantic_Error (Err_Num => 855, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Error_Node), Id_Str => Node_Lex_String (Node => Error_Node)); end if; end Check_Protected_Modifier_Consistency; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-stabilityerror.adb0000644000175000017500000001434611753202336026711 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure StabilityError (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is Stab_Index : ErrorHandler.Index_Type; Stab_Typ : ErrorHandler.Stability_Err_Type; procedure StabilityErrorExpl (E_Str : in out E_Strings.T) --# global in Stab_Typ; --# derives E_Str from *, --# Stab_Typ; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Stab_Typ; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Stab_Typ, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation StabilityErrorExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin Stab_Index := ErrorHandler.Index_Type'Val (Err_Num.Name1.Pos); Stab_Typ := ErrorHandler.Stability_Err_Type'Val (Err_Num.ErrorNum - Error_Types.StabilityErrOffset); case Stab_Typ is -- HTML Directives --! <"flow-"> --! <"!!! Flow Error : "><" : "> when ErrorHandler.Stable_Exit_Cond => --! 40 E_Strings.Append_String (E_Str => E_Str, Str => "Exit condition is stable, of index"); --! Exit condition is stable, of index 0 --! Exit condition is stable, of index 1 --! Exit condition is stable, of index greater than 1 --! In these cases the (loop) exit condition occurs in an iteration scheme, --! an exit statement, or an if-statement whose (unique) sequence of --! statements ends with an unconditional exit statement - see the SPARK --! Definition. The concept of loop stability is explained in Section --! 4.4 of Appendix A. A loop exit condition which is stable of index 0 --! takes the same value at every iteration around the loop, and with a --! stability index of 1, it always takes the same value after the first --! iteration. Stability with indices greater --! than 0 does not necessarily indicate a program error, but the --! conditions for loop termination require careful consideration when ErrorHandler.Stable_Fork_Cond => --! 41 E_Strings.Append_String (E_Str => E_Str, Str => "Expression is stable, of index"); --! Expression is stable, of index 0 --! Expression is stable, of index 1 --! Expression is stable, of index greater than 1 --! The expression, occurring within a loop, is either a case expression --! or a condition (Boolean-valued expression) associated with an --! if-statement, whose value determines the path taken through the body --! of the loop, but does not (directly) cause loop termination. --! Information flow analysis shows that the expression does not vary --! as the loop is executed, so the same branch of the case or if statement will --! be taken on every loop iteration. An Index of 0 means that the expression is --! immediately stable, 1 means it becomes stable after the first pass through the loop and so on. --! The stability index is given with reference to the loop most --! closely-containing the expression. Stable conditionals are not necessarily --! an error but do require careful evaluation; they can often be removed by lifting them --! outside the loop. end case; case Stab_Index is when ErrorHandler.Index_Zero => E_Strings.Append_String (E_Str => E_Str, Str => " 0"); when ErrorHandler.Index_One => E_Strings.Append_String (E_Str => E_Str, Str => " 1"); when ErrorHandler.Larger_Index => E_Strings.Append_String (E_Str => E_Str, Str => " greater than 1"); end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end StabilityError; spark-2012.0.deb/examiner/sem-convert_tagged_actual.adb0000644000175000017500000000330511753202336022030 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function Convert_Tagged_Actual (Actual, Tagged_Parameter_Sym : Dictionary.Symbol) return Dictionary.Symbol is ActualLocal : Dictionary.Symbol; begin ActualLocal := Actual; loop -- normal exit: successful conversion exit when Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (ActualLocal), Right_Symbol => Tagged_Parameter_Sym, Full_Range_Subtype => False); -- abnormal exit: can't follow inherited field pointers because type is hidden exit when Dictionary.Is_Null_Symbol (Dictionary.GetFirstRecordSubcomponent (ActualLocal)); ActualLocal := Dictionary.GetFirstRecordSubcomponent (ActualLocal); end loop; return ActualLocal; end Convert_Tagged_Actual; spark-2012.0.deb/examiner/sem-walk_expression_p-walk_expression.adb0000644000175000017500000010652211753202336024460 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Debug; with E_Strings; with SPrint; separate (Sem.Walk_Expression_P) procedure Walk_Expression (Exp_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Type_Context : in Dictionary.Symbol; Context_Requires_Static : in Boolean; Ref_Var : in SeqAlgebra.Seq; Result : out Sem.Exp_Record; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Next_Node, Last_Node, Local_Node : STree.SyntaxNode; Node_Type : SP_Symbols.SP_Symbol; L_Heap : Lists.List_Heap; Val : Maths.Value; E_Stack : Exp_Stack.Exp_Stack_Type; T_Stack : Type_Context_Stack.T_Stack_Type; Sym : Dictionary.Symbol; String_Value : LexTokenManager.Lex_String; -------------------------------------------------------------- -- Procedures for debugging Expression syntax and tree walking -- These are hidden with "derives ;" so as not to pollute the -- annotations -------------------------------------------------------------- procedure Dump_Syntax_Tree --# derives ; is --# hide Dump_Syntax_Tree; begin if CommandLineData.Content.Debug.Expressions then SPrint.Dump_Syntax_Tree (Exp_Node, 0); end if; end Dump_Syntax_Tree; procedure Dump_Down_Node --# derives ; is --# hide Dump_Down_Node; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Walk_Expression DOWN encounters node" & ExaminerConstants.RefType'Image (STree.NodeToRef (Last_Node)) & ' ' & SP_Symbols.SP_Symbol'Image (STree.Syntax_Node_Type (Node => Last_Node)), 0); end if; end Dump_Down_Node; procedure Dump_Up_Node --# derives ; is --# hide Dump_Up_Node; begin if CommandLineData.Content.Debug.Expressions then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Walk_Expression UP encounters node" & ExaminerConstants.RefType'Image (STree.NodeToRef (Last_Node)) & ' ' & SP_Symbols.SP_Symbol'Image (STree.Syntax_Node_Type (Node => Last_Node)), 0); end if; end Dump_Up_Node; procedure Dump_Result --# derives ; is --# hide Dump_Result; begin if CommandLineData.Content.Debug.Expressions then if Result.Is_Constant then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Walk_Expression constant result is ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Maths.ValueToString (Result.Value)); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Walk_Expression result is not constant", 0); end if; SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Walk_Expression Result is: ", 0); Put_Exp_Record (Result); end if; end Dump_Result; begin -- Walk_Expression Dump_Syntax_Tree; Exp_Stack.Init (Stack => E_Stack); Aggregate_Stack.Init; Lists.Init (L_Heap); Type_Context_Stack.Init (Stack => T_Stack); Type_Context_Stack.Push (X => Type_Context, Stack => T_Stack); Next_Node := Exp_Node; loop --# assert STree.Table = STree.Table~ and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); Last_Node := Next_Node; Node_Type := STree.Syntax_Node_Type (Node => Last_Node); Dump_Down_Node; case Node_Type is when SP_Symbols.character_literal => -- ASSUME Last_Node = character_literal Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetPredefinedCharacterType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => True, Is_Static => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Get_Character_Literal (Node => Last_Node), Range_RHS => Maths.NoValue), Stack => E_Stack); Next_Node := STree.NullNode; when SP_Symbols.string_literal => -- ASSUME Last_Node = string_literal String_Value := STree.Node_Lex_String (Node => Last_Node); --# accept F, 41, "Stable expression here OK"; if CommandLineData.Content.Debug.Expressions then Debug.Print_Lex_Str ("Walk_Expression pushing string literal with value: ", String_Value); end if; --# end accept; Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetPredefinedStringType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => True, Is_Static => (CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83), Is_ARange => False, String_Value => String_Value, Value => Maths.NoValue, Range_RHS => Get_String_Literal_Length (Str => String_Value)), Stack => E_Stack); Next_Node := STree.NullNode; when SP_Symbols.numeric_literal => -- ASSUME Last_Node = numeric_literal Local_Node := STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Last_Node)); -- ASSUME Local_Node = integer_number OR real_number OR based_integer OR based_real if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.real_number then -- ASSUME Local_Node = real_number Sem.Get_Literal_Value (Node => Local_Node, Val => Val); Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetUniversalRealType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => True, Is_Static => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Val, Range_RHS => Maths.NoValue), Stack => E_Stack); elsif STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.integer_number or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.based_integer or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.based_real then -- ASSUME Local_Node = integer_number OR based_integer OR based_real Sem.Get_Literal_Value (Node => Local_Node, Val => Val); Exp_Stack.Push (X => Sem.Exp_Record'(Type_Symbol => Dictionary.GetUniversalIntegerType, Other_Symbol => Dictionary.NullSymbol, Stream_Symbol => Dictionary.NullSymbol, Tagged_Parameter_Symbol => Dictionary.NullSymbol, Variable_Symbol => Dictionary.NullSymbol, Param_Count => 0, Param_List => Lists.Null_List, Sort => Sem.Type_Result, Arg_List_Found => False, Is_AVariable => False, Is_An_Entire_Variable => False, Errors_In_Expression => False, Has_Operators => False, Is_Constant => True, Is_Static => True, Is_ARange => False, String_Value => LexTokenManager.Null_String, Value => Val, Range_RHS => Maths.NoValue), Stack => E_Stack); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = integer_number OR real_number OR based_integer OR based_real in Walk_Expression"); end if; Next_Node := STree.NullNode; when SP_Symbols.selector => -- ASSUME Last_Node = selector Next_Node := STree.NullNode; when SP_Symbols.simple_name => -- ASSUME Last_Node = simple_name Local_Node := STree.Parent_Node (Current_Node => Last_Node); -- ASSUME Local_Node = name OR named_argument_association OR selector OR label OR loop_statement OR exit_statement OR -- procedure_specification OR return_expression OR own_variable OR proof_renaming_declaration OR -- renaming_declaration OR parent_unit_name OR enumeration_representation_clause OR -- record_representation_clause OR component_clause OR at_clause if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.named_argument_association then -- ASSUME Local_Node = named_argument_association -- do not look at identifier in this case Next_Node := STree.NullNode; elsif STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.name or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.selector or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.label or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.loop_statement or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.exit_statement or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.procedure_specification or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.return_expression or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.own_variable or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.proof_renaming_declaration or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.renaming_declaration or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.parent_unit_name or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.enumeration_representation_clause or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.record_representation_clause or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.component_clause or else STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.at_clause then -- ASSUME Local_Node = name OR selector OR label OR loop_statement OR exit_statement OR -- procedure_specification OR return_expression OR own_variable OR proof_renaming_declaration OR -- renaming_declaration OR parent_unit_name OR enumeration_representation_clause OR -- record_representation_clause OR component_clause OR at_clause Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = identifier in Walk_Expression"); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Local_Node = name OR named_argument_association OR selector OR " & "label OR loop_statement OR exit_statement OR " & "procedure_specification OR return_expression OR own_variable OR proof_renaming_declaration OR " & "renaming_declaration OR parent_unit_name OR enumeration_representation_clause OR " & "record_representation_clause OR component_clause OR at_clause in Walk_Expression"); end if; when SP_Symbols.identifier => -- ASSUME Last_Node = identifier Wf_Identifier (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, The_Heap => The_Heap, Ref_Var => Ref_Var, Context => Sem.Code); Next_Node := STree.NullNode; when SP_Symbols.name_argument_list => -- ASSUME Last_Node = name_argument_list Down_Wf_Name_Argument_List (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap, Next_Node => Next_Node); when SP_Symbols.aggregate => -- ASSUME Last_Node = aggregate Down_Wf_Aggregate (Node => Last_Node, Scope => Scope, Next_Node => Next_Node, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.aggregate_choice_rep => -- ASSUME Last_Node = aggregate_choice_rep Wf_Aggregate_Choice_Rep (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap, Next_Node => Next_Node); when SP_Symbols.record_component_selector_name => -- ASSUME Last_Node = record_component_selector_name Wf_Record_Component_Selector_Name (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap); Next_Node := STree.NullNode; when SP_Symbols.aggregate_or_expression => -- ASSUME Last_Node = aggregate_or_expression Down_Wf_Aggregate_Or_Expression (Node => Last_Node, E_Stack => E_Stack, Next_Node => Next_Node); when SP_Symbols.attribute_designator => -- ASSUME Last_Node = attribute_designator Type_Context_Stack.Push (X => Attribute_Designator_Type_From_Context (Exp_Node => Last_Node, E_Stack => E_Stack, T_Stack => T_Stack), Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = attribute_designator OR attribute_ident SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.attribute_designator or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.attribute_ident, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = attribute_designator OR attribute_ident in Walk_Expression"); when SP_Symbols.range_constraint => -- ASSUME Last_Node = range_constraint Type_Context_Stack.Push (X => Range_Constraint_Type_From_Context (Exp_Node => Last_Node, E_Stack => E_Stack, T_Stack => T_Stack), Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = arange SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.arange, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = arange in Walk_Expression"); when SP_Symbols.simple_expression => -- ASSUME Last_Node = simple_expression Type_Context_Stack.Push (X => Simple_Expression_Type_From_Context (Exp_Node => Last_Node, T_Stack => T_Stack), Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = simple_expression OR simple_expression_opt SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.simple_expression_opt, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = simple_expression OR simple_expression_opt in Walk_Expression"); when SP_Symbols.expression => -- ASSUME Last_Node = expression Expression_Type_From_Context (Exp_Node => Last_Node, E_Stack => E_Stack, T_Stack => T_Stack, New_Context_Type => Sym); Type_Context_Stack.Push (X => Sym, Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = relation SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.relation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = relation in Walk_Expression"); when SP_Symbols.primary => -- ASSUME Last_Node = primary Type_Context_Stack.Push (X => Primary_Type_From_Context (Node => Last_Node, T_Stack => T_Stack), Stack => T_Stack); Next_Node := STree.Child_Node (Current_Node => Last_Node); -- ASSUME Next_Node = numeric_literal OR character_literal OR string_literal OR name OR -- qualified_expression OR expression OR aattribute SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.numeric_literal or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.character_literal or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.string_literal or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.name or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.qualified_expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Next_Node) = SP_Symbols.attribute, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = numeric_literal OR character_literal OR string_literal OR name OR " & "qualified_expression OR expression OR attribute in Walk_Expression"); when others => if Node_Type in SP_Symbols.SP_Non_Terminal then Next_Node := STree.Child_Node (Current_Node => Last_Node); else Next_Node := STree.NullNode; end if; end case; -------------------------------------------------up loop---------- if Next_Node = STree.NullNode then loop --# assert STree.Table = STree.Table~ and --# Aggregate_Stack.Stack_Is_Valid (Aggregate_Stack.State) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); Next_Node := STree.Next_Sibling (Current_Node => Last_Node); exit when Next_Node /= STree.NullNode; -- new branch to right Next_Node := STree.Parent_Node (Current_Node => Last_Node); Last_Node := Next_Node; Dump_Up_Node; case STree.Syntax_Node_Type (Node => Last_Node) is when SP_Symbols.expression => -- ASSUME Last_Node = expression Wf_Expression (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack); Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.expression_rep1 | SP_Symbols.expression_rep2 | SP_Symbols.expression_rep3 | SP_Symbols.expression_rep4 | SP_Symbols.expression_rep5 => -- ASSUME Last_Node = expression_rep1 OR expression_rep2 OR expression_rep3 OR expression_rep4 OR expression_rep5 Wf_Expression (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack); when SP_Symbols.simple_expression => -- ASSUME Last_Node = simple_expression Wf_Simple_Expression (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack, Context_Requires_Static => Context_Requires_Static); Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.simple_expression_opt => -- ASSUME Last_Node = simple_expression_opt Wf_Simple_Expression_Opt (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack); when SP_Symbols.term => -- ASSUME Last_Node = term Wf_Term (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack, Context_Requires_Static => Context_Requires_Static); when SP_Symbols.factor => -- ASSUME Last_Node = factor Wf_Factor (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack); when SP_Symbols.relation => -- ASSUME Last_Node = relation Wf_Relation (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, T_Stack => T_Stack); when SP_Symbols.range_constraint => -- ASSUME Last_Node = range_constraint Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.arange => -- ASSUME Last_Node = arange Wf_Arange (Node => Last_Node, Scope => Scope, E_Stack => E_Stack); when SP_Symbols.selected_component => -- ASSUME Last_Node = selected_component Wf_Selected_Component (Node => Last_Node, Scope => Scope, Ref_Var => Ref_Var, E_Stack => E_Stack, Component_Data => Component_Data, The_Heap => The_Heap, Context => Sem.Code); when SP_Symbols.attribute => -- ASSUME Last_Node = attribute Wf_Attribute (E_Stack => E_Stack); when SP_Symbols.attribute_designator => -- ASSUME Last_Node = attribute_designator Wf_Attribute_Designator (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, The_Heap => The_Heap, Ref_Var => Ref_Var); Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.primary => -- ASSUME Last_Node = primary Wf_Primary (Node => Last_Node, Scope => Scope, Ref_Var => Ref_Var, E_Stack => E_Stack, Component_Data => Component_Data, The_Heap => The_Heap); Type_Context_Stack.Pop (Stack => T_Stack); when SP_Symbols.positional_argument_association => -- ASSUME Last_Node = positional_argument_association Wf_Positional_Argument_Association (Node => Last_Node, Scope => Scope, Ref_Var => Ref_Var, E_Stack => E_Stack, Component_Data => Component_Data, The_Heap => The_Heap); when SP_Symbols.named_argument_association => -- ASSUME Last_Node = named_argument_association Wf_Named_Argument_Association (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.name_argument_list => -- ASSUME Last_Node = name_argument_list Up_Wf_Name_Argument_List (Node => Last_Node, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.ancestor_part => -- ASSUME Last_Node = ancestor_part Wf_Ancestor_Part (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.aggregate_choice => -- ASSUME Last_Node = aggregate_choice Wf_Aggregate_Choice (Node => Last_Node, Scope => Scope, E_Stack => E_Stack); when SP_Symbols.named_association_rep => -- ASSUME Last_Node = named_association_rep Wf_Named_Association_Rep (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.named_record_component_association => -- ASSUME Last_Node = named_record_component_association Wf_Named_Record_Component_Association (Node => Last_Node, Scope => Scope, E_Stack => E_Stack, Heap_Param => L_Heap); when SP_Symbols.positional_association | SP_Symbols.record_component_association => -- ASSUME Last_Node = positional_association OR record_component_association Wf_Positional_Association (Node => Last_Node, E_Stack => E_Stack); when SP_Symbols.aggregate_or_expression => -- ASSUME Last_Node = aggregate_or_expression Up_Wf_Aggregate_Or_Expression (Node => Last_Node, Scope => Scope, E_Stack => E_Stack); when SP_Symbols.positional_record_component_association => -- ASSUME Last_Node = positional_record_component_association Wf_Positional_Record_Component_Association (Node => Last_Node, Scope => Scope, E_Stack => E_Stack); when SP_Symbols.component_association => -- ASSUME Last_Node = component_association Wf_Component_Association (Node => Last_Node, Scope => Scope, E_Stack => E_Stack); when SP_Symbols.aggregate | SP_Symbols.extension_aggregate => -- ASSUME Last_Node = aggregate OR extension_aggregate Up_Wf_Aggregate (Node => Last_Node, Scope => Scope, E_Stack => E_Stack); when SP_Symbols.qualified_expression => -- ASSUME Last_Node = qualified_expression Wf_Qualified_Expression (Node => Last_Node, Scope => Scope, E_Stack => E_Stack); when others => null; end case; exit when Next_Node = Exp_Node; -- got back to top end loop; -- up end if; exit when Next_Node = Exp_Node; -- met start point on way up end loop; -- down if not Exp_Stack.Has_One_Entry (Stack => E_Stack) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Expression_Stack_Corrupt, Msg => "in Walk_Expression"); end if; if not (Type_Context_Stack.Has_One_Entry (Stack => T_Stack) and then Dictionary.Types_Are_Equal (Left_Symbol => Type_Context_Stack.Top (Stack => T_Stack), Right_Symbol => Type_Context, Full_Range_Subtype => False)) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Type_Context_Stack_Corrupt, Msg => "in Walk_Expression"); end if; --# accept Flow, 10, E_Stack, "Expected ineffective assignment"; Exp_Stack.Pop (Item => Result, Stack => E_Stack); --# end accept; Dump_Result; end Walk_Expression; ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/examiner/errorhandler-conversions-tostring-warningwithoutposition-warningwithoutpositionexpl.adbspark-2012.0.deb/examiner/errorhandler-conversions-tostring-warningwithoutposition-warningwithoutpos0000644000175000017500000002364411753202337033577 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.WarningWithoutPosition) procedure WarningWithoutPositionExpl (E_Str : in out E_Strings.T) is begin case Err_Num.ErrorNum is when 9 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a --# hide XXX annotation is used to hide a user-defined exception handler. (warning control file" & " keyword: handler_parts)"); when 10 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a --# hide XXX annotation is used. (warning control file keyword: hidden_parts)"); when 400 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a variable declared in a subprogram is neither" & " referenced, nor updated." & " (warning control file keyword: unused_variables)"); when 402 => E_Strings.Append_String (E_Str => E_Str, Str => "In order to prove properties of code containing loops, the" & " loop must be ""cut"" with" & " a suitable assertion statement. When generating run-time checks," & " the Examiner" & " inserts a simple assertion to cut any loops which do not have one" & " supplied" & " by the user. The assertion is placed at the point where this" & " warning appears in" & " the listing file. The default assertion asserts that the" & " subprogram's precondition" & " (if any) is satisfied, that all imports to it are in their" & " subtypes and that any for" & " loop counter is in its subtype. In many cases this provides" & " sufficient information" & " to complete a proof of absence of run-time errors. If more" & " information is required," & " then the user can supply an assertion and the Examiner will" & " append the above information" & " to it. (warning control file keyword: default_loop_assertions)"); when 403 => E_Strings.Append_String (E_Str => E_Str, Str => "XXX is a variable which was initialized at declaration but" & " whose value is only ever" & " read not updated; it could therefore have been declared as" & " a constant. (warning control" & " file keyword: constant_variables)"); when 405 => E_Strings.Append_String (E_Str => E_Str, Str => "The Examiner generates VCs associated with" & " real numbers using perfect arithmetic rather than the machine" & " approximations used on the" & " target platform. It is possible that rounding errors might" & " cause a Constraint_Error even" & " if these run-time check proofs are completed satisfactorily." & " (warning control file keyword: real_rtcs)"); when 406 => E_Strings.Append_String (E_Str => E_Str, Str => "This message is echoed to the screen if the Examiner is unable" & " to create output files for the VCs being generated" & " (for instance, if the user does not have write" & " permission for the output directory)."); when 407 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where SPARK own variable and initialization annotations" & " make it clear that a" & " package requires a body but where no Ada requirement for a body" & " exists."); when 408 => E_Strings.Append_String (E_Str => E_Str, Str => "Semantic errors prevent VC Generation, so a single False VC" & " is produced. This will be detected and reported by POGS."); when 409 => E_Strings.Append_String (E_Str => E_Str, Str => "A subprogram which has excessive complexity of data structure" & " or number of paths may cause the VC Generator to exceed its capacity." & " A single False VC is generated in this case to make sure this" & " error is detected in subsequent proof and analysis with POGS"); when 410 => E_Strings.Append_String (E_Str => E_Str, Str => "Partition-wide flow analysis is performed by checking all" & " packages withed by the main program for" & " tasks and interrupt handlers and constructing an overall flow" & " relation that captures their cumulative" & " effect. It is for this reason that SPARK requires task and" & " protected types to be declared in package" & " specifications. If a task or protected type which contains" & " an interrupt handler, is hidden from the" & " Examiner (in a hidden package private part) or contains errors" & " in it specification, the partition-wide" & " flow analysis cannot be" & " constructed correctly and is therefore suppressed. Correct the" & " specification of the affected tasks" & " and (temporarily if desired) make them visible to the Examiner."); when 411 => E_Strings.Append_String (E_Str => E_Str, Str => "The Examiner checks that there is no potential sharing of" & " unprotected data between tasks. If a task type" & " is hidden from the Examiner in a hidden package private" & " part, then it is not possible to check whether that" & " task may share unprotected data."); when 412 => E_Strings.Append_String (E_Str => E_Str, Str => "The Examiner checks that no more than one task can suspend on" & " a single object. If a task" & " is hidden from the Examiner in a hidden package private part," & " then it is not possible to check whether that" & " task may suspend on the same object as another task."); when 413 => E_Strings.Append_String (E_Str => E_Str, Str => "The Examiner checks that no more than one task can suspend on a" & " single object and that there is no" & " potential sharing of unprotected data between tasks. These checks" & " depend on the accuracy of the annotations" & " on the task types withed by the main program. If these annotations" & " contain errors, then any reported" & " violations of the shared variable and max-one-in-a-queue checks will" & " be correct; however, the check" & " may be incomplete. The errors in the task annotations should be corrected."); when 414 => E_Strings.Append_String (E_Str => E_Str, Str => "Raised if an output file name is longer than the" & " limit imposed by the operating system and has been truncated." & " Section 4.7 of the Examiner User Manual describes how the output file names" & " are constructed. If this message is seen there is a possibility" & " that the output from two" & " or more subprograms will be written to the same file name," & " if they have a sufficiently large number of characters in common."); when 420 => E_Strings.Append_String (E_Str => E_Str, Str => "In release 7.5 of the Examiner, a flaw in the VC generation" & " was fixed such that subcomponents of records and elements of" & " arrays when used as ""out"" or ""in out""" & " parameters will now generate an" & " additional VC to verify absence of run-time errors. This warning" & " flags an instance of this occurrence. Please read the release" & " note and/or seek advice for assistance with this issue."); when 425 => E_Strings.Append_String (E_Str => E_Str, Str => "A code generator language profile such as KCG is in use" & " and so conditional flow errors may be present in the subprogram." & " Therefore the -vcg switch must be used to generate VCs and the VCs" & " related to definedness discharged using the proof tools."); when 426 => E_Strings.Append_String (E_Str => E_Str, Str => "A code generator language profile such as KCG allows a package body to" & " with its own public child which is not normally permitted in SPARK." & " The removal of this restriction means that the Examiner will not" & " detect mutual recursion between subprograms declared in the visible" & " parts of the package and its child. The code generator is expected" & " to guarantee the absence of recursion."); when 495 => E_Strings.Append_String (E_Str => E_Str, Str => "There is little that can be done to work around this as this" & " is a fundamental limitation of Windows. You could try one of the" & " following: Perform analysis higher up in the directory tree (i.e." & " in C:\a instead of C:\project_name\spark\analysis). You could try" & " remapping a directory to a new drive to do the same (google for subst)." & " You could try renaming or restructuring your program to flatten the" & " structure a bit. And finally you can perform analysis on a UNIX system" & " such as Mac OSX or GNU/Linux as they do not suffer from this problem."); when others => null; end case; end WarningWithoutPositionExpl; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-note.adb0000644000175000017500000001235711753202336024600 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure Note (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is procedure NoteExpl (E_Str : in out E_Strings.T) --# global in Err_Num; --# derives E_Str from *, --# Err_Num; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Num; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Num, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation NoteExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin -- HTML Directives --! <"note-"> --! <"--- note : "><" : "> case Err_Num.ErrorNum is when 1 => E_Strings.Append_String (E_Str => E_Str, Str => "This dependency relation was not used for this analysis and has not been checked for accuracy"); --! Issued when information flow analysis is not performed and when --! modes were specified in the global annotation. It is a reminder --! that the dependencies specified in this annotation --! (including whether each variable is an import or an export) have --! not been checked against the code, and may therefore be incorrect. --! (warning control file keyword: notes) when 2 => E_Strings.Append_String (E_Str => E_Str, Str => "This dependency relation has been used " & "only to identify imports and exports, " & "dependencies have been ignored"); --! Issued as a reminder when information flow analysis is not --! performed in SPARK 83. The dependencies specified in this annotation --! have not been checked against the code, and may --! therefore be incorrect. (warning control file keyword: notes) when 3 => E_Strings.Append_String (E_Str => E_Str, Str => "The deferred constant Null_Address has been implicitly defined here"); --! Issued as a reminder that the declaration of the type Address --! within the target configuration file --! implicitly defines a deferred constant of type Null_Address. --! (warning control file keyword: notes) when 4 => E_Strings.Append_String (E_Str => E_Str, Str => "The constant Default_Priority, of type Priority, has been implicitly defined here"); --! Issued as a reminder that the declaration of the subtype --! Priority within the target configuration file implicitly defines --! a constant Default_Priority, of type Priority, with the value --! (Priority'First + Priority'Last) / 2. --! (warning control file keyword: notes) when others => E_Strings.Append_String (E_Str => E_Str, Str => "UNKNOWN ERROR NUMBER PASSED TO Convert.Note"); end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end Note; spark-2012.0.deb/examiner/sem-wf_argument_association.adb0000644000175000017500000002203211753202336022414 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Argument_Association (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Param_Type : in Dictionary.Symbol; Position : in LexTokenManager.Token_Position; Exp_Result : in Exp_Record; Fun_Info : in out Exp_Record; Error_Found : in out Boolean) is Unused_Value : Maths.Value; ----------------------------------------------------------------------------- procedure Tagged_Actual_Must_Be_Object_Check (Node_Pos : in LexTokenManager.Token_Position; Formal_Type : in Dictionary.Symbol; Actual_Type : in Dictionary.Symbol; Controlling_Type : in Dictionary.Symbol; Is_A_Variable : in Boolean; Is_A_Constant : in Boolean; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Actual_Type, --# CommandLineData.Content, --# Controlling_Type, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Formal_Type, --# Is_A_Constant, --# Is_A_Variable, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys & --# Error_Found from *, --# Actual_Type, --# Controlling_Type, --# Dictionary.Dict, --# Formal_Type, --# Is_A_Constant, --# Is_A_Variable; is separate; begin -- Wf_Argument_Association Range_Check (A_Range => Exp_Result.Is_ARange, Position => Position, Error_Found => Error_Found); -- function is deemed constant if it is predefined and all its parameters are constant. Fun_Info.Is_Constant := Fun_Info.Is_Constant and then Exp_Result.Is_Constant; if Dictionary.Types_Are_Equal (Left_Symbol => Fun_Info.Tagged_Parameter_Symbol, Right_Symbol => Exp_Result.Type_Symbol, Full_Range_Subtype => False) or else (Dictionary.Is_Null_Symbol (Fun_Info.Tagged_Parameter_Symbol) and then Dictionary.CompatibleTypes (Scope, Param_Type, Exp_Result.Type_Symbol)) or else (not Dictionary.IsAnExtensionOf (Exp_Result.Type_Symbol, Fun_Info.Tagged_Parameter_Symbol) and then Dictionary.CompatibleTypes (Scope, Param_Type, Exp_Result.Type_Symbol)) then if not Dictionary.Is_Null_Symbol (Fun_Info.Other_Symbol) then Tagged_Actual_Must_Be_Object_Check (Node_Pos => Position, Formal_Type => Param_Type, Actual_Type => Exp_Result.Type_Symbol, Controlling_Type => Dictionary.GetSubprogramControllingType (Fun_Info.Other_Symbol), Is_A_Variable => Exp_Result.Is_AVariable, Is_A_Constant => Exp_Result.Is_Constant, Error_Found => Error_Found); end if; -- Following call will deal with scalar value constraint checking --# accept Flow, 10, Unused_Value, "Expected ineffective assignment"; Constraint_Check (Val => Exp_Result.Value, New_Val => Unused_Value, Is_Annotation => Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_named_argument_association or else Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_positional_argument_association, Typ => Param_Type, Position => Position); --# end accept; -- Check array bounds etc. if Dictionary.Is_Constrained_Array_Type_Mark (Param_Type, Scope) then -- Formal is a constrained subtype of an unconstrained array if Dictionary.Is_Unconstrained_Array_Type_Mark (Exp_Result.Type_Symbol, Scope) then -- Actual is unconstrained. In SPARK95 onwards, this is OK if -- the actual is a static String expression, but illegal -- otherwise. if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Dictionary.IsPredefinedStringType (Exp_Result.Type_Symbol) then -- Formal must be a constrained String subtype, so we need -- to check the upper bound of the actual against the expected -- upper bound of the formal. if Exp_Result.Range_RHS = Maths.NoValue then -- Actual is not static, so must be illegal ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str => LexTokenManager.Null_String); else -- Actual is static, so check upper-bound against that expected if Exp_Result.Range_RHS /= Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Param_Type)) then ErrorHandler.Semantic_Error (Err_Num => 418, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str => LexTokenManager.Null_String); end if; end if; else -- SPARK83 or not a String type, so illegal ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str => LexTokenManager.Null_String); end if; elsif Illegal_Unconstrained (Left_Type => Exp_Result.Type_Symbol, Right_Type => Param_Type) then -- Although both formal and actual are constrained their bounds don't match ErrorHandler.Semantic_Error (Err_Num => 418, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str => LexTokenManager.Null_String); end if; end if; -- To help the VCG with generating checks involving unconstrained formal parameters, we -- seed the syntax tree with a constraining type mark. The positional_argument_association -- node is already used for RTC purposes, so we seed the expression node instead. if Syntax_Node_Type (Node => Node) = SP_Symbols.named_argument_association then -- ASSUME Node = named_argument_association Plant_Constraining_Type (Expression_Type => Exp_Result.Type_Symbol, String_Length => Exp_Result.Range_RHS, Actual_Node => STree.Expression_From_Named_Argument_Association (Node => Node)); elsif Syntax_Node_Type (Node => Node) = SP_Symbols.positional_argument_association then -- ASSUME Node = positional_argument_association Plant_Constraining_Type (Expression_Type => Exp_Result.Type_Symbol, String_Length => Exp_Result.Range_RHS, Actual_Node => STree.Expression_From_Positional_Argument_Association (Node => Node)); end if; else Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Position, Id_Str => LexTokenManager.Null_String); end if; --# accept Flow, 33, Unused_Value, "Expected to be neither referenced nor exported"; end Wf_Argument_Association; spark-2012.0.deb/examiner/dictionary-attribute_is_visible_but_obsolete_local.adb0000644000175000017500000001321411753202336027220 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------------------- -- If more attributes need to be added as obsolete, it is recommended to follow -- the structure of AttributeIsVisible, the skeleton of which is present here. separate (Dictionary) function Attribute_Is_Visible_But_Obsolete_Local (Name : LexTokenManager.Lex_String; Prefix : PrefixSort; Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean is Result : Boolean; -------------------------------------------------------------------------------- function Type_Attribute_Is_Visible_But_Obsolete (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; is Result : Boolean; -------------------------------------------------------------------------------- -- Case analysis here is derived from AARM A.5.3 (72.d - 72.f) and -- AARM A.5.4 (4.a - 4.c) -------------------------------------------------------------------------------- function Type_Attribute_Is_Visible_But_Obsolete_95 (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Epsilon_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Floating_Point (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Large_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Large_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Real (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; else Result := False; end if; return Result; end Type_Attribute_Is_Visible_But_Obsolete_95; begin -- Type_Attribute_Is_Visible_But_Obsolete case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Result := False; -- Currently none such handled when CommandLineData.SPARK95_Onwards => -- and for SPARK95 onwards ... Result := Type_Attribute_Is_Visible_But_Obsolete_95 (Name => Name, Type_Mark => Type_Mark); end case; return Result; end Type_Attribute_Is_Visible_But_Obsolete; begin -- Attribute_Is_Visible_But_Obsolete_Local if Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope) then Result := False; else case Prefix is when AType => Result := Type_Attribute_Is_Visible_But_Obsolete (Name => Name, Type_Mark => Type_Mark); when ABaseType => Result := False; -- Currently none such handled when AnObject => Result := False; -- Currently none such handled end case; end if; return Result; end Attribute_Is_Visible_But_Obsolete_Local; spark-2012.0.deb/examiner/sprint.ads0000644000175000017500000000325311753202336016264 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with STree; use type STree.SyntaxNode; --# inherit E_Strings, --# LexTokenManager, --# SPARK_IO, --# SP_Symbols, --# STree; package SPrint is -- Prints a human-readable printout of the syntax tree rooted at Node procedure Dump_Syntax_Tree (Node : in STree.SyntaxNode; Indent : in Natural); --# global in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Indent, --# LexTokenManager.State, --# Node, --# STree.Table; end SPrint; spark-2012.0.deb/examiner/dictionary-get_scalar_attribute_type.adb0000644000175000017500000002735411753202336024327 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function Get_Scalar_Attribute_Type (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref is Result : RawDict.Type_Info_Ref; -------------------------------------------------------------------------------- function Get_Range_Attribute_Type (Type_Mark : RawDict.Type_Info_Ref) return RawDict.Type_Info_Ref --# global in Dict; is Result : RawDict.Type_Info_Ref; begin if Type_Is_Array (Type_Mark => Type_Mark) then Result := Get_Array_Index (Type_Mark => Type_Mark, Dimension => 1); else Result := Type_Mark; end if; return Result; end Get_Range_Attribute_Type; begin -- Get_Scalar_Attribute_Type if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Aft_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Digits_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Fore_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Emin_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Radix_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Emin_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Emin_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Modulus_Token) = LexTokenManager.Str_Eq then Result := Get_Universal_Integer_Type; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Delta_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Epsilon_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Large_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Large_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Epsilon_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Last_Token) = LexTokenManager.Str_Eq then Result := Get_Universal_Real_Type; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq then Result := Get_Range_Attribute_Type (Type_Mark => Type_Mark); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Overflows_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Rounds_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Denorm_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Signed_Zeros_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Valid_Token) = LexTokenManager.Str_Eq then Result := Get_Predefined_Boolean_Type; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Tail_Token) = LexTokenManager.Str_Eq or else -- 'Tail is a proof attribute LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Append_Token) = LexTokenManager.Str_Eq or else -- 'Append is a proof attribute LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq then Result := Get_Root_Type (Type_Mark => Type_Mark); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Access_Token) = LexTokenManager.Str_Eq then Result := RawDict.Get_Type_Accesses (Type_Mark => Type_Mark); else Result := RawDict.Null_Type_Info_Ref; end if; return Result; end Get_Scalar_Attribute_Type; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_tilde.adb0000644000175000017500000001257711753202336023046 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Tilde (Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Context : in Sem.Anno_Tilde_Context) is Stack_Top : Sem.Exp_Record; Subprog_Sym, Top_Sym : Dictionary.Symbol; Abstraction : Dictionary.Abstractions; begin case Context is when Sem.Precondition => -- Tilde not allowed at all Exp_Stack.Pop (Item => Stack_Top, Stack => E_Stack); Stack_Top.Errors_In_Expression := True; Exp_Stack.Push (X => Stack_Top, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 321, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); when Sem.Function_Return => -- tilde only allowed on imported "stream" variables Exp_Stack.Pop (Item => Stack_Top, Stack => E_Stack); Top_Sym := Stack_Top.Other_Symbol; -- the test below is sufficient to identify a gloabl of the function which is of mode IN. -- This is because eternal variables cannot appear as parameters AND -- external variable sof mode OUT cannot be referenced. Therefore a variable which is -- visible in a function return annotation and which has a mode must be a global of external IN mode. if not Dictionary.IsOwnVariableOrConstituentWithMode (Top_Sym) then Stack_Top.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 317, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; Exp_Stack.Push (X => Stack_Top, Stack => E_Stack); when Sem.Postcondition => -- Tilde may be allowed Exp_Stack.Pop (Item => Stack_Top, Stack => E_Stack); if Stack_Top.Sort = Sem.Is_Object then -- May be ok, further checks required Top_Sym := Stack_Top.Other_Symbol; if not Dictionary.Is_Variable (Top_Sym) then Stack_Top.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 318, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); elsif not Dictionary.Types_Are_Equal (Left_Symbol => Stack_Top.Type_Symbol, Right_Symbol => Dictionary.GetType (Top_Sym), Full_Range_Subtype => False) then -- New check that variable is entire Stack_Top.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 320, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); else -- It is an entire variable so further checks required Subprog_Sym := Dictionary.GetEnclosingCompilationUnit (Scope); Abstraction := Dictionary.GetAbstraction (Subprog_Sym, Scope); if not (Dictionary.IsImport (Abstraction, Subprog_Sym, Top_Sym) and Dictionary.IsExport (Abstraction, Subprog_Sym, Top_Sym)) then Stack_Top.Errors_In_Expression := True; ErrorHandler.Semantic_Error (Err_Num => 319, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; else -- Cannot be ok so error message needed Stack_Top.Errors_In_Expression := True; if not (Stack_Top.Sort = Sem.Is_Unknown) then -- Supress error for unknown things ErrorHandler.Semantic_Error (Err_Num => 318, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; Exp_Stack.Push (X => Stack_Top, Stack => E_Stack); end case; end Wf_Tilde; spark-2012.0.deb/examiner/sprint.adb0000644000175000017500000001102011753202336016232 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with LexTokenManager; with SPARK_IO; with SP_Symbols; package body SPrint is procedure Print_Node (Node : in STree.SyntaxNode; Indent : in Natural) --# global in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Indent, --# LexTokenManager.State, --# Node, --# STree.Table; is Node_Type : SP_Symbols.SP_Symbol; procedure Put_Node_Type --# global in Node_Type; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Node_Type; is -- Use 'Image here for convenience, so hide... --# hide Put_Node_Type; begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, SP_Symbols.SP_Symbol'Image (Node_Type), 0); end Put_Node_Type; begin Node_Type := STree.Syntax_Node_Type (Node => Node); SPARK_IO.Put_Integer (File => SPARK_IO.Standard_Output, Item => Integer (STree.NodeToRef (Node)), Width => 6, Base => 10); SPARK_IO.Put_Char (SPARK_IO.Standard_Output, ' '); for I in Natural range 1 .. Indent loop SPARK_IO.Put_Char (SPARK_IO.Standard_Output, '-'); end loop; Put_Node_Type; case Node_Type is when SP_Symbols.identifier | SP_Symbols.integer_number | SP_Symbols.real_number | SP_Symbols.based_integer | SP_Symbols.based_real | SP_Symbols.character_literal | SP_Symbols.string_literal => SPARK_IO.Put_Char (SPARK_IO.Standard_Output, ' '); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => STree.Node_Lex_String (Node => Node))); when others => null; end case; SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end Print_Node; procedure Recursive_Dump (Node : in STree.SyntaxNode; Indent : in Natural) --# global in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Indent, --# LexTokenManager.State, --# Node, --# STree.Table; is -- Recurse all ye who enter here! --# hide Recursive_Dump; Child : STree.SyntaxNode; begin -- pre-order traversal; Print_Node (Node, Indent); Child := STree.Child_Node (Current_Node => Node); while Child /= STree.NullNode loop Recursive_Dump (Child, Indent + 3); Child := STree.Next_Sibling (Current_Node => Child); end loop; end Recursive_Dump; ---------------------- -- Dump_Syntax_Tree -- ---------------------- procedure Dump_Syntax_Tree (Node : in STree.SyntaxNode; Indent : in Natural) is begin SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Dump of syntax tree for node ", 0); Print_Node (Node, 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Recursive_Dump (Node, Indent); end Dump_Syntax_Tree; end SPrint; spark-2012.0.deb/examiner/lists.adb0000644000175000017500000003227211753202336016065 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Lists is No_Next : constant List := 0; --------------------------------------------------------------------------- procedure Init (Heap : out List_Heap) is begin -- Partial initialization for now, since this is expensive on VAX/VMS. -- Re-think when VMS is retired. --# accept F, 32, Heap.Heap_Array, "Partial initialization OK" & --# F, 31, Heap.Heap_Array, "Partial initialization OK" & --# F, 602, Heap, Heap.Heap_Array, "Partial initialization OK"; Heap.High_Mark := 0; Heap.First_Free := No_Next; end Init; --------------------------------------------------------------------------- procedure New_List (Heap : in out List_Heap; The_List : out List; OK : out Boolean) is The_List_Local : List; begin if Heap.High_Mark < List'Last then --array not used up yet Heap.High_Mark := Heap.High_Mark + 1; The_List_Local := Heap.High_Mark; Heap.Heap_Array (The_List_Local) := Heap_Element'(Name => LexTokenManager.Null_String, Symbol => Dictionary.NullSymbol, Next => No_Next); The_List := The_List_Local; OK := True; elsif Heap.First_Free = No_Next then The_List := Null_List; OK := False; else The_List_Local := Heap.First_Free; Heap.First_Free := Heap.Heap_Array (Heap.First_Free).Next; Heap.Heap_Array (The_List_Local) := Heap_Element'(Name => LexTokenManager.Null_String, Symbol => Dictionary.NullSymbol, Next => No_Next); The_List := The_List_Local; OK := True; end if; end New_List; --------------------------------------------------------------------------- procedure Add_Name (Heap : in out List_Heap; The_List : in List; Name : in LexTokenManager.Lex_String; Already_Present : out Boolean; Ok : out Boolean) is Try, Try_Next, Add_Point : List; Done : Boolean; begin Already_Present := False; Ok := False; if The_List /= Null_List then Try := The_List; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Heap.Heap_Array (Try).Name, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq and then Dictionary.Is_Null_Symbol (Heap.Heap_Array (Try).Symbol) then -- list is valid Try_Next := Heap.Heap_Array (Try).Next; Done := False; loop if Try_Next = No_Next or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Heap.Heap_Array (Try_Next).Name, Lex_Str2 => Name) = LexTokenManager.Str_Second then if Heap.High_Mark < List'Last then -- Array not used up yet Ok := True; Done := True; Heap.High_Mark := Heap.High_Mark + 1; Add_Point := Heap.High_Mark; Heap.Heap_Array (Add_Point) := Heap_Element'(Name => Name, Symbol => Dictionary.NullSymbol, Next => Try_Next); Heap.Heap_Array (Try).Next := Add_Point; elsif Heap.First_Free = No_Next then --heap is full Done := True; else Ok := True; Done := True; Add_Point := Heap.First_Free; Heap.First_Free := Heap.Heap_Array (Heap.First_Free).Next; Heap.Heap_Array (Add_Point) := Heap_Element'(Name => Name, Symbol => Dictionary.NullSymbol, Next => Try_Next); Heap.Heap_Array (Try).Next := Add_Point; end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Heap.Heap_Array (Try_Next).Name, Lex_Str2 => Name) = LexTokenManager.Str_Eq then Ok := True; Done := True; Already_Present := True; end if; exit when Done; Try := Try_Next; Try_Next := Heap.Heap_Array (Try).Next; end loop; end if; end if; end Add_Name; --------------------------------------------------------------------------- procedure Add_Symbol (Heap : in out List_Heap; The_List : in List; Symbol : in Dictionary.Symbol; Already_Present : out Boolean; Ok : out Boolean) is Try, Try_Next, Add_Point : List; Done : Boolean; begin Already_Present := False; Ok := False; if The_List /= Null_List then Try := The_List; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Heap.Heap_Array (Try).Name, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq and then Dictionary.Is_Null_Symbol (Heap.Heap_Array (Try).Symbol) then -- list is valid Try_Next := Heap.Heap_Array (Try).Next; Done := False; loop if Try_Next = No_Next or else Dictionary.SymbolRef (Item => Heap.Heap_Array (Try_Next).Symbol) > Dictionary.SymbolRef (Item => Symbol) then if Heap.High_Mark < List'Last then -- Array not used up yet Ok := True; Done := True; Heap.High_Mark := Heap.High_Mark + 1; Add_Point := Heap.High_Mark; Heap.Heap_Array (Add_Point) := Heap_Element'(Name => LexTokenManager.Null_String, Symbol => Symbol, Next => Try_Next); Heap.Heap_Array (Try).Next := Add_Point; elsif Heap.First_Free = No_Next then --heap is full Done := True; else Ok := True; Done := True; Add_Point := Heap.First_Free; Heap.First_Free := Heap.Heap_Array (Heap.First_Free).Next; Heap.Heap_Array (Add_Point) := Heap_Element'(Name => LexTokenManager.Null_String, Symbol => Symbol, Next => Try_Next); Heap.Heap_Array (Try).Next := Add_Point; end if; elsif Heap.Heap_Array (Try_Next).Symbol = Symbol then Ok := True; Done := True; Already_Present := True; end if; exit when Done; Try := Try_Next; Try_Next := Heap.Heap_Array (Try).Next; end loop; end if; end if; end Add_Symbol; --------------------------------------------------------------------------- procedure Get_First (Heap : in out List_Heap; The_List : in out List; Symbol : out Dictionary.Symbol; Empty : out Boolean; Ok : out Boolean) is Ptr : List; begin if The_List = Null_List then -- can't get value from null list Ok := False; Empty := True; Symbol := Dictionary.NullSymbol; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Heap.Heap_Array (The_List).Name, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq and then Dictionary.Is_Null_Symbol (Heap.Heap_Array (The_List).Symbol) then -- list is valid Ptr := Heap.Heap_Array (The_List).Next; -- point to first element if Ptr = No_Next then -- list is empty Ok := True; --list is ok its just empty! Empty := True; Symbol := Dictionary.NullSymbol; else --list is not empty Ok := True; Empty := False; Symbol := Heap.Heap_Array (Ptr).Symbol; Heap.Heap_Array (Ptr).Name := LexTokenManager.Null_String; Heap.Heap_Array (Ptr).Symbol := Dictionary.NullSymbol; Heap.Heap_Array (The_List) := Heap_Element'(Name => LexTokenManager.Null_String, Symbol => Dictionary.NullSymbol, Next => Heap.First_Free); Heap.First_Free := The_List; The_List := Ptr; end if; else Ok := False; Empty := True; Symbol := Dictionary.NullSymbol; end if; end Get_First; --------------------------------------------------------------------------- procedure Delete_List (Heap : in out List_Heap; The_List : in out List) is Ptr, Free_Ptr : List; begin if The_List /= Null_List then -- can't delete null list Ptr := The_List; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Heap.Heap_Array (Ptr).Name, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq and then Dictionary.Is_Null_Symbol (Heap.Heap_Array (Ptr).Symbol) then -- list is valid Free_Ptr := Heap.First_Free; Heap.First_Free := Ptr; while Heap.Heap_Array (Ptr).Next /= No_Next loop Ptr := Heap.Heap_Array (Ptr).Next; Heap.Heap_Array (Ptr).Name := LexTokenManager.Null_String; Heap.Heap_Array (Ptr).Symbol := Dictionary.NullSymbol; end loop; Heap.Heap_Array (Ptr).Next := Free_Ptr; end if; end if; The_List := Null_List; end Delete_List; --------------------------------------------------------------------------- function Is_Member (Heap : List_Heap; The_List : List; Str : LexTokenManager.Lex_String) return Boolean is Result : Boolean; Try : List; begin if The_List = Null_List then --can't check memebership of null list Result := False; else Try := The_List; -- 782 - Deleted redundant type conversion if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Heap.Heap_Array (Try).Name, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq and then Dictionary.Is_Null_Symbol (Heap.Heap_Array (Try).Symbol) then -- list is valid Result := False; while Heap.Heap_Array (Try).Next /= No_Next loop Try := Heap.Heap_Array (Try).Next; if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Heap.Heap_Array (Try).Name, Lex_Str2 => Str) = LexTokenManager.Str_Eq then Result := True; exit; end if; end loop; else -- STR supplied was not a valid list pointer Result := False; end if; end if; return Result; end Is_Member; end Lists; spark-2012.0.deb/examiner/sem-wf_package_declaration-get_package_declaration_key_nodes.adb0000644000175000017500000002064411753202336031002 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Package_Declaration) procedure Get_Package_Declaration_Key_Nodes (Node : in STree.SyntaxNode; Context_Node : out STree.SyntaxNode; Inherit_Node : out STree.SyntaxNode; Generic_Formal_Part_Node : out STree.SyntaxNode; Spec_Node : out STree.SyntaxNode; Ident_Node : out STree.SyntaxNode; Private_Package_Declaration : out Boolean; Child_Package_Declaration : out Boolean) is begin Context_Node := Parent_Node (Current_Node => Node); -- ASSUME Context_Node = library_unit OR initial_declarative_item_rep OR generic_declaration if Syntax_Node_Type (Node => Context_Node) = SP_Symbols.library_unit or else Syntax_Node_Type (Node => Context_Node) = SP_Symbols.generic_declaration then -- ASSUME Context_Node = library_unit OR generic_declaration if Syntax_Node_Type (Node => Context_Node) = SP_Symbols.generic_declaration then -- ASSUME Context_Node = generic_declaration Context_Node := Parent_Node (Current_Node => Context_Node); end if; -- ASSUME Context_Node = library_unit SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Context_Node) = SP_Symbols.library_unit, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Context_Node = library_unit in Get_Package_Declaration_Key_Nodes"); Context_Node := Child_Node (Current_Node => Parent_Node (Current_Node => Context_Node)); -- ASSUME Context_Node = context_clause OR library_unit if Syntax_Node_Type (Node => Context_Node) = SP_Symbols.library_unit then -- ASSUME Context_Node = library_unit Context_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Context_Node) /= SP_Symbols.context_clause then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Context_Node = context_clause OR library_unit in Get_Package_Declaration_Key_Nodes"); end if; elsif Syntax_Node_Type (Node => Context_Node) = SP_Symbols.initial_declarative_item_rep then -- ASSUME Context_Node = initial_declarative_item_rep Context_Node := STree.NullNode; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Context_Node = library_unit OR initial_declarative_item_rep OR generic_declaration " & " in Get_Package_Declaration_Key_Nodes"); end if; -- ASSUME Context_Node = context_clause OR NULL SystemErrors.RT_Assert (C => Context_Node = STree.NullNode or else Syntax_Node_Type (Node => Context_Node) = SP_Symbols.context_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Context_Node = context_clause OR NULL in Get_Package_Declaration_Key_Nodes"); Private_Package_Declaration := Syntax_Node_Type (Node => Node) = SP_Symbols.private_package_declaration; Inherit_Node := Child_Node (Current_Node => Node); -- ASSUME Inherit_Node = inherit_clause OR generic_formal_part OR package_specification if Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.inherit_clause then -- ASSUME Inherit_Node = inherit_clause Generic_Formal_Part_Node := Next_Sibling (Current_Node => Inherit_Node); -- ASSUME Generic_Formal_Part_Node = generic_formal_part OR package_specification if Syntax_Node_Type (Node => Generic_Formal_Part_Node) = SP_Symbols.generic_formal_part then -- ASSUME Generic_Formal_Part_Node = generic_formal_part Spec_Node := Next_Sibling (Current_Node => Generic_Formal_Part_Node); elsif Syntax_Node_Type (Node => Generic_Formal_Part_Node) = SP_Symbols.package_specification then -- ASSUME Generic_Formal_Part_Node = package_specification Spec_Node := Generic_Formal_Part_Node; Generic_Formal_Part_Node := STree.NullNode; else Generic_Formal_Part_Node := STree.NullNode; Spec_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Generic_Formal_Part_Node = generic_formal_part OR package_specification" & " in Get_Package_Declaration_Key_Nodes"); end if; elsif Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.generic_formal_part then -- ASSUME Inherit_Node = generic_formal_part Generic_Formal_Part_Node := Inherit_Node; Inherit_Node := STree.NullNode; Spec_Node := Next_Sibling (Current_Node => Generic_Formal_Part_Node); elsif Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.package_specification then -- ASSUME Inherit_Node = package_specification Spec_Node := Inherit_Node; Inherit_Node := STree.NullNode; Generic_Formal_Part_Node := STree.NullNode; else Inherit_Node := STree.NullNode; Generic_Formal_Part_Node := STree.NullNode; Spec_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Node = inherit_clause OR generic_formal_part OR package_specification" & " in Get_Package_Declaration_Key_Nodes"); end if; -- ASSUME Inherit_Node = inherit_clause OR NULL SystemErrors.RT_Assert (C => Inherit_Node = STree.NullNode or else Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.inherit_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Node = inherit_clause OR NULL in Get_Package_Declaration_Key_Nodes"); -- ASSUME Generic_Formal_Part_Node = generic_formal_part OR NULL SystemErrors.RT_Assert (C => Generic_Formal_Part_Node = STree.NullNode or else Syntax_Node_Type (Node => Generic_Formal_Part_Node) = SP_Symbols.generic_formal_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Generic_Formal_Part_Node = generic_formal_part OR NULL in Get_Package_Declaration_Key_Nodes"); -- ASSUME Spec_Node = package_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.package_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = package_specification in Get_Package_Declaration_Key_Nodes"); Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Ident_Node = dotted_simple_name OR identifier if Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier then -- ASSUME Ident_Node = identifier Child_Package_Declaration := False; elsif Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Ident_Node = dotted_simple_name Child_Package_Declaration := True; Ident_Node := Last_Child_Of (Start_Node => Ident_Node); else Child_Package_Declaration := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = dotted_simple_name OR identifier in Get_Package_Declaration_Key_Nodes"); end if; -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Get_Package_Declaration_Key_Nodes"); end Get_Package_Declaration_Key_Nodes; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_attribute.adb0000644000175000017500000000260311753202336023735 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Attribute (E_Stack : in out Exp_Stack.Exp_Stack_Type) is Result : Sem.Exp_Record; begin Exp_Stack.Pop (Item => Result, Stack => E_Stack); -- this is type of entire attribute expression if Result.Sort /= Sem.Type_Result then Result := Sem.Unknown_Type_Record; end if; Exp_Stack.Push (X => Result, Stack => E_Stack); end Wf_Attribute; spark-2012.0.deb/examiner/sem-walk_expression_p-create_name_list.adb0000644000175000017500000000270011753202336024532 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Create_Name_List (List : out Lists.List; Heap_Param : in out Lists.List_Heap) is Ok : Boolean; begin Lists.New_List (Heap => Heap_Param, The_List => List, OK => Ok); if not Ok then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.List_Overflow_In_Expression, Msg => "in Create_Name_List"); end if; end Create_Name_List; spark-2012.0.deb/examiner/cells.adb0000644000175000017500000005115611753202335016032 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Exceptions; with ExaminerConstants; with SystemErrors; with Statistics; package body Cells is Initial_Heap_Capacity : constant := 2 ** 10; Null_Cell_Content : constant Cell_Content := Cell_Content' (A_Ptr => Null_Cell, B_Ptr => Null_Cell, C_Ptr => Null_Cell, Copy => Null_Cell, Free => False, Kind => Cell_Storage.Unknown_Kind, Rank => Unknown_Rank, Lex_Str => LexTokenManager.Null_String, Marked => False, Op_Symbol => SP_Symbols.RWnull, Val => 0, Assoc_Var => Dictionary.NullSymbol); procedure Initialize (Heap : out Heap_Record) is begin Cell_Storage.Initialize (Initial_Heap_Capacity, Heap.List_Of_Cells); Heap.High_Mark := Cell'First; Heap.Next_Free_Cell := Cell'First; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => Cell'First, Value => Null_Cell_Content); end Initialize; -------------------------------------------------------------------------- function Are_Identical (Cell_1, Cell_2 : Cell) return Boolean is begin return Cell_1 = Cell_2; end Are_Identical; -------------------------------------------------------------------------- function Get_A_Ptr (Heap : Heap_Record; CellName : Cell) return Cell is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).A_Ptr; end Get_A_Ptr; -------------------------------------------------------------------------- function Get_B_Ptr (Heap : Heap_Record; CellName : Cell) return Cell is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).B_Ptr; end Get_B_Ptr; -------------------------------------------------------------------------- function Get_C_Ptr (Heap : Heap_Record; CellName : Cell) return Cell is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).C_Ptr; end Get_C_Ptr; -------------------------------------------------------------------------- function Get_Natural_Value (Heap : Heap_Record; CellName : Cell) return Natural is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Val; end Get_Natural_Value; -------------------------------------------------------------------------- function Get_Rank (Heap : Heap_Record; CellName : Cell) return Cell_Rank is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Rank; end Get_Rank; -------------------------------------------------------------------------- function Get_Copy (Heap : Heap_Record; CellName : Cell) return Cell is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Copy; end Get_Copy; -------------------------------------------------------------------------- function Is_Free (Heap : Heap_Record; CellName : Cell) return Boolean is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Free; end Is_Free; -------------------------------------------------------------------------- function Is_Marked (Heap : Heap_Record; CellName : Cell) return Boolean is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Marked; end Is_Marked; -------------------------------------------------------------------------- function Is_Null_Cell (CellName : Cell) return Boolean is begin return CellName = Null_Cell; end Is_Null_Cell; -------------------------------------------------------------------------- function Is_Const_Cell (Heap : Heap_Record; CellName : Cell) return Boolean is begin return (Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Kind = Cell_Storage.Manifest_Const) or else (Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Kind = Cell_Storage.Named_Const); end Is_Const_Cell; -------------------------------------------------------------------------- function Is_Reference_Cell (Heap : Heap_Record; CellName : Cell) return Boolean is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Kind = Cell_Storage.Reference; end Is_Reference_Cell; -------------------------------------------------------------------------- function Get_Kind (Heap : Heap_Record; CellName : Cell) return Cell_Kind is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Kind; end Get_Kind; -------------------------------------------------------------------------- function Get_Op_Symbol (Heap : Heap_Record; CellName : Cell) return SP_Symbols.SP_Symbol is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Op_Symbol; end Get_Op_Symbol; -------------------------------------------------------------------------- function Get_Lex_Str (Heap : Heap_Record; CellName : Cell) return LexTokenManager.Lex_String is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Lex_Str; end Get_Lex_Str; -------------------------------------------------------------------------- function Get_Symbol_Value (Heap : Heap_Record; CellName : Cell) return Dictionary.Symbol is begin return Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Val)); end Get_Symbol_Value; -------------------------------------------------------------------------- function Cell_Ref (Cell_Name : in Cell) return Natural is begin return Natural (Cell_Name); end Cell_Ref; -------------------------------------------------------------------------- procedure Set_A_Ptr (Heap : in out Heap_Record; Cell_1, Cell_2 : in Cell) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (Heap.List_Of_Cells, Cell_1); The_Cell.A_Ptr := Cell_2; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => Cell_1, Value => The_Cell); end Set_A_Ptr; -------------------------------------------------------------------------- procedure Set_B_Ptr (Heap : in out Heap_Record; Cell_1, Cell_2 : in Cell) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (Heap.List_Of_Cells, Cell_1); The_Cell.B_Ptr := Cell_2; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => Cell_1, Value => The_Cell); end Set_B_Ptr; -------------------------------------------------------------------------- procedure Set_C_Ptr (Heap : in out Heap_Record; Cell_1, Cell_2 : in Cell) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (Heap.List_Of_Cells, Cell_1); The_Cell.C_Ptr := Cell_2; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => Cell_1, Value => The_Cell); end Set_C_Ptr; -------------------------------------------------------------------------- procedure Copy_Contents (Heap : in out Heap_Record; Source, Destination : in Cell) is Dest_Cell, Source_Cell : Cell_Content; begin Source_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => Source); Dest_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => Destination); Dest_Cell := Cell_Content' (A_Ptr => Source_Cell.A_Ptr, B_Ptr => Source_Cell.B_Ptr, C_Ptr => Dest_Cell.C_Ptr, Copy => Dest_Cell.Copy, Free => Dest_Cell.Free, Kind => Source_Cell.Kind, Rank => Source_Cell.Rank, Lex_Str => Source_Cell.Lex_Str, Marked => Dest_Cell.Marked, Op_Symbol => Source_Cell.Op_Symbol, Val => Source_Cell.Val, Assoc_Var => Source_Cell.Assoc_Var); Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => Destination, Value => Dest_Cell); end Copy_Contents; -------------------------------------------------------------------------- procedure Create_Cell (Heap : in out Heap_Record; CellName : out Cell) is NewCell : Cell; begin if Heap.Next_Free_Cell /= Null_Cell then -- There are cells in the returned free list, so recycle -- the first Cell on the free list NewCell := Heap.Next_Free_Cell; Heap.Next_Free_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => Heap.Next_Free_Cell).A_Ptr; Cell_Storage.Set_Element (Heap.List_Of_Cells, NewCell, Null_Cell_Content); CellName := NewCell; elsif Heap.High_Mark < Cell_Storage.Last_Index (Heap.List_Of_Cells) then -- Free list empty but still room within the array Heap.High_Mark := Heap.High_Mark + 1; NewCell := Heap.High_Mark; Cell_Storage.Set_Element (Heap.List_Of_Cells, NewCell, Null_Cell_Content); CellName := NewCell; elsif Heap.High_Mark < Cell_Storage.Cell'Last then -- All the current array elements have been used - extend by appending Heap.High_Mark := Heap.High_Mark + 1; NewCell := Heap.High_Mark; Cell_Storage.Append (Heap.List_Of_Cells, Null_Cell_Content); CellName := NewCell; else -- Array and returned cells in free list both used up -- Set table use to 100% Statistics.SetTableUsage (Statistics.VCGHeap, Integer (Cell'Last)); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.VCG_Heap_Is_Exhausted, Msg => ""); CellName := Null_Cell; end if; exception --# hide Create_Cell; when Storage_Error => -- Cell_Storage.Append really has run out of memory Statistics.SetTableUsage (Statistics.VCGHeap, Integer (Cell'Last)); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.VCG_Heap_Is_Exhausted, Msg => "in Cells.Create_Cell - Storage_Error in attempt to extend"); CellName := Null_Cell; when E : others => -- Something else has gone wrong Statistics.SetTableUsage (Statistics.VCGHeap, Integer (Cell'Last)); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.VCG_Heap_Is_Exhausted, Msg => "in Cells.Create_Cell - " & Ada.Exceptions.Exception_Name (E) & " - " & Ada.Exceptions.Exception_Message (E)); CellName := Null_Cell; end Create_Cell; -------------------------------------------------------------------------- procedure Create_Copy (Heap : in out Heap_Record; CellName : in Cell) is NewCell : Cell; Dest_Cell, Source_Cell : Cell_Content; begin Create_Cell (Heap, NewCell); Source_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); Dest_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => NewCell); Dest_Cell.Kind := Source_Cell.Kind; Dest_Cell.Rank := Source_Cell.Rank; Dest_Cell.Lex_Str := Source_Cell.Lex_Str; Dest_Cell.Op_Symbol := Source_Cell.Op_Symbol; Dest_Cell.Val := Source_Cell.Val; Dest_Cell.Assoc_Var := Source_Cell.Assoc_Var; Source_Cell.Copy := NewCell; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => NewCell, Value => Dest_Cell); Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => Source_Cell); end Create_Copy; -------------------------------------------------------------------------- procedure Dispose_Of_Cell (Heap : in out Heap_Record; CellName : in Cell) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName); if The_Cell.Free then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.VCG_Heap_Is_Corrupted, Msg => "in DisposeOfCell"); else The_Cell.A_Ptr := Heap.Next_Free_Cell; The_Cell.Free := True; Heap.Next_Free_Cell := CellName; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end if; end Dispose_Of_Cell; -------------------------------------------------------------------------- procedure Mark_Cell (Heap : in out Heap_Record; CellName : in Cell) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); The_Cell.Marked := True; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end Mark_Cell; -------------------------------------------------------------------------- procedure UnMark_Cell (Heap : in out Heap_Record; CellName : in Cell) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); The_Cell.Marked := False; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end UnMark_Cell; -------------------------------------------------------------------------- procedure Set_Kind (Heap : in out Heap_Record; CellName : in Cell; KindConst : in Cell_Kind) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); The_Cell.Kind := KindConst; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end Set_Kind; -------------------------------------------------------------------------- procedure Set_Rank (Heap : in out Heap_Record; CellName : in Cell; Rank : in Cell_Rank) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); The_Cell.Rank := Rank; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end Set_Rank; -------------------------------------------------------------------------- procedure Set_Op_Symbol (Heap : in out Heap_Record; CellName : in Cell; Sym : in SP_Symbols.SP_Symbol) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); The_Cell.Op_Symbol := Sym; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end Set_Op_Symbol; -------------------------------------------------------------------------- procedure Set_Lex_Str (Heap : in out Heap_Record; CellName : in Cell; Str : in LexTokenManager.Lex_String) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); The_Cell.Lex_Str := Str; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end Set_Lex_Str; -------------------------------------------------------------------------- procedure Set_Natural_Value (Heap : in out Heap_Record; CellName : in Cell; Value : in Natural) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); The_Cell.Val := Value; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end Set_Natural_Value; -------------------------------------------------------------------------- procedure Set_Symbol_Value (Heap : in out Heap_Record; CellName : in Cell; Sym : in Dictionary.Symbol) is begin Set_Natural_Value (Heap, CellName, Natural (Dictionary.SymbolRef (Sym))); end Set_Symbol_Value; -------------------------------------------------------------------------- procedure Report_Usage (TheHeap : in Heap_Record) is begin -- as the heap now uses the free list before increasing High_Mark, -- the max usage is High_Mark Statistics.SetTableUsage (Statistics.VCGHeap, Integer (TheHeap.High_Mark)); end Report_Usage; -------------------------------------------------------------------------- procedure Set_Assoc_Var (Heap : in out Heap_Record; CellName : in Cell; VarSym : in Dictionary.Symbol) is The_Cell : Cell_Content; begin The_Cell := Cell_Storage.Get_Element (V => Heap.List_Of_Cells, Index => CellName); The_Cell.Assoc_Var := VarSym; Cell_Storage.Set_Element (V => Heap.List_Of_Cells, Index => CellName, Value => The_Cell); end Set_Assoc_Var; -------------------------------------------------------------------------- function Get_Assoc_Var (Heap : in Heap_Record; CellName : in Cell) return Dictionary.Symbol is begin return Cell_Storage.Get_Element (Heap.List_Of_Cells, CellName).Assoc_Var; end Get_Assoc_Var; -------------------------------------------------------------------------- function Get_Heap_Size (Heap : in Heap_Record) return Cell_Storage.Cell is begin return Cell_Storage.Last_Index (Heap.List_Of_Cells); end Get_Heap_Size; end Cells; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-controlflowerror.adb0000644000175000017500000001105411753202336027246 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure ControlFlowError (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is Err_Type : ErrorHandler.Control_Flow_Err_Type; procedure ControlFlowErrorExpl (E_Str : in out E_Strings.T) --# global in Err_Type; --# derives E_Str from *, --# Err_Type; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Type; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Type, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation ControlFlowErrorExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin Err_Type := ErrorHandler.Control_Flow_Err_Type'Val (Err_Num.ErrorNum - Error_Types.ControlFlowErrOffset); case Err_Type is -- HTML Directives --! <"illegal-"> --! <"*** Illegal Structure : "><" : "> when ErrorHandler.Misplaced_Exit => --! 1 E_Strings.Append_String (E_Str => E_Str, Str => "An exit statement may not occur here"); --! Exit statements must be of the form "exit when c;" where the closest --! enclosing statement is a loop or "if c then S; exit;" where the --! if statement has no else part and its closest enclosing --! statement is a loop. See the SPARK Definition for details. when ErrorHandler.Misplaced_Return => --! 2 E_Strings.Append_String (E_Str => E_Str, Str => "A return statement may not occur here"); --! A return statement may only occur as the last statement of a function. when ErrorHandler.Missing_Return => --! 3 E_Strings.Append_String (E_Str => E_Str, Str => "The last statement of this function is not a return statement"); --! SPARK requires that the last statement of a function be a return statement. when ErrorHandler.Return_In_Proc => --! 4 E_Strings.Append_String (E_Str => E_Str, Str => "Return statements may not occur in procedure subprograms"); end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end ControlFlowError; spark-2012.0.deb/examiner/sli.ads0000644000175000017500000010561211753202336015536 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- SLI -- -- Purpose : -- -- The goal of this work is to provide for SPARK a way to output the -- SPARK Ada structure. This structure will be used to navigate -- through the SPARK Ada source code using IDE like GPS, GNATbench or -- Emacs. A similar output already exists for GNAT. The compiler -- writes structure information about the Ada source code in an ALI -- file. This file is later used to navigate into the Ada source code -- with GPS, GNATbench or Emacs. -- -- SPARK needs at least to provide the same level of navigation -- facilities as we get from the GNAT Ada compiler. Related with the -- SPARK Ada language (mainly the SPARK annotation), the following -- functionalities should be provided: -- -- Jump between the state (ASM) and the refinement of the state in -- the body -- -- Jump from the usage of a state to the definition of the state -- -- Jump from the usage of a proof function to the declaration of this -- proof function -- -- Jump between the global/derives annotation to the global/derives -- refined annotation in the body -- -- Jump from a variable inside a SPARK annotation to the definition -- of this variable in the Ada source code or to the definition of -- the state in the SPARK annotation. The user should have the choice -- between the 2 possibilities. -- -- The SLI file should contain only symbols about one compilation -- unit (the body and the spec but not the separates). Currently, -- when the Examiner is analysing a body, all the separates -- related to this body are not analysed at all. This makes a -- difference in the definition of a compilation unit between the -- GNAT compiler and the Examiner. The consequence is that, for -- a package containing separates, we will get one ALI file but many -- SLI files (one SLI file for the main body and one SLI file for -- each separate). This needs to be supported by the IDE. -- -- The format really hasn't changed from the ALI format. The SLI -- reader needs a first V line, and dummy P and R lines which can be -- left empty. It then needs a valid U line referencing the main file -- being considered, and then the regular cross-reference format as -- documented in Lib.XRef file, except that checksums and timestamps -- for 'D' lines can be omitted. -- -- For Ada source code navigation, the only relevant section are the -- X section and the D section because the X section refers to the D -- section. The format of the cross-reference section of the SLI file -- is completely described in the package Lib.XRef of the GNAT -- compiler. -- -- Cross-references for SPARK annotations are similar to those -- contained in the X section of the ALI file. For the SPARK -- annotations functionalities, the cross-reference format used in -- the ALI file can be reused. -- -- The Generate_* procedures are called during the sematic -- analysis. These procedures store all the cross-references in a -- data structure. Increment_Nb_Separates is also called during the -- sematic analysis to count the number of bodies declared as -- separate in the compilation unit. After the semantic analysis of -- the compilation unit, the SLI file is generated using the -- following sequence : Create_File to create the SLI file, Header to -- write the begining of the file, Dump_Xref to dump the data -- structure containing all the cross-references, Close_File to close -- the SLI file. -------------------------------------------------------------------------------- with ContextManager; with Dictionary; with LexTokenManager; with STree; use type ContextManager.UnitDescriptors; use type ContextManager.FileDescriptors; use type ContextManager.UnitTypes; use type Dictionary.Modes; use type Dictionary.SLI_Type; use type Dictionary.Symbol; use type LexTokenManager.Token_Position; use type STree.Iterator; use type STree.SyntaxNode; --# inherit CommandLineData, --# ContextManager, --# ContextManager.Ops, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# IndexManager, --# LexTokenLists, --# LexTokenManager, --# SPARK_IO, --# SP_Symbols, --# STree, --# SystemErrors, --# Version, --# XMLReport; package SLI --# own State; --# initializes State; is -- Create the SLI file associated with the compilation unit -- (File_Descriptor). procedure Create_File (File_Descriptor : in ContextManager.FileDescriptors); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys, --# State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# File_Descriptor, --# LexTokenManager.State; -- Close the SLI file. procedure Close_File; --# global in out State; --# derives State from *; -- Write the header of the SLI file associated with the -- compilation unit (File_Descriptor). procedure Header (File_Descriptor : in ContextManager.FileDescriptors); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in Dictionary.Dict; --# in STree.Table; --# in out ContextManager.Ops.Unit_Heap; --# in out ErrorHandler.Error_Context; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out State; --# in out XMLReport.State; --# derives ContextManager.Ops.Unit_Heap from * & --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# IndexManager.State, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# XMLReport.State & --# XMLReport.State from *, --# CommandLineData.Content; -- Write all the cross-references for the compilation unit -- associated with the file descriptor (File_Descriptor) and the -- closure in the SLI file. procedure Dump_Xref (File_Descriptor : in ContextManager.FileDescriptors); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out State; --# derives ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# File_Descriptor, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Descriptor, --# LexTokenManager.State, --# State, --# STree.Table & --# State from *, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# File_Descriptor, --# LexTokenManager.State; -- Try to find the symbol declaration associated with the name -- (Lex_Str) at position (Pos) in the dictionary giving a prefix -- (Prefix), a procedure/function name (Subprog_Sym) and a scope -- (Scope). Prefix contains the symbol declaration if found. procedure Look_Up (Prefix : in out Dictionary.Symbol; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; Lex_Str : in LexTokenManager.Lex_String; Pos : in LexTokenManager.Token_Position; Full_Package_Name : in Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Prefix from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Full_Package_Name, --# LexTokenManager.State, --# Lex_Str, --# Scope, --# Subprog_Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Full_Package_Name, --# LexTokenManager.State, --# Lex_Str, --# Pos, --# Prefix, --# Scope, --# Subprog_Sym; -- Add cross-references for the inherit annotation (Parse_Tree) -- giving a scope (Scope). The inherit annotation is located in -- the compilation unit (Comp_Unit). procedure Generate_Xref_Inherit (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; -- Add cross-references for the property list annotation -- (Parse_Tree) giving a scope (Scope). The property list -- annotation is located in the compilation unit (Comp_Unit). procedure Generate_Xref_Interrupt (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; -- Add cross-references for the property list annotation -- (Parse_Tree) giving a scope (Scope). The property list -- annotation is located in the compilation unit (Comp_Unit). procedure Generate_Xref_Suspends_Protects (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; -- Add cross-references for the own annotation (Parse_Tree) giving -- a scope (Scope). The own annotation is located in the -- compilation unit (Comp_Unit). procedure Generate_Xref_Own (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; -- Add cross-references for the refinement annotation (Parse_Tree) -- giving a scope (Scope). The refinement annotation is located in -- the compilation unit (Comp_Unit). procedure Generate_Xref_Refinement (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; -- Add cross-references for the initializes annotation -- (Parse_Tree) giving a scope (Scope). The initializes annotation -- is located in the compilation unit (Comp_Unit). procedure Generate_Xref_Initializes (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; -- Add cross-references for the global annotation (Parse_Tree) -- giving a procedure/function name (Subprog_Sym) and a scope -- (Scope). The global annotation is located in the compilation -- unit (Comp_Unit). procedure Generate_Xref_Global (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table, --# Subprog_Sym & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym; -- Add cross-references for the derives annotation (Parse_Tree) -- giving a procedure/function name (Subprog_Sym) and a scope -- (Scope). The derives annotation is located in the compilation -- unit (Comp_Unit). procedure Generate_Xref_Derives (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table, --# Subprog_Sym & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym; -- Add cross-references for the justification annotation -- (Parse_Tree) giving a scope (Scope). The justification -- annotation is located in the compilation unit (Comp_Unit). procedure Generate_Xref_Justification (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; -- Add cross-references for the formal parameters of a proof -- function annotation (Parse_Tree) giving a procedure/function -- name (Subprog_Sym) and a scope (Scope). The derives annotation -- is located in the compilation unit (Comp_Unit). procedure Generate_Xref_Proof_Function (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table, --# Subprog_Sym & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table, --# Subprog_Sym; -- Add cross-references for the object assertion annotation -- (Parse_Tree) giving a scope (Scope). The justification -- annotation is located in the compilation unit (Comp_Unit). procedure Generate_Xref_Object_Assertion (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Scope : in Dictionary.Scopes); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# State, --# STree.Table & --# State from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State, --# Parse_Tree, --# Scope, --# STree.Table; -- Add cross-references for the symbol (Symbol) in a predicate -- annotation (precondition, postcondition, return, assert and -- check). The predicate annotation is located in the compilation -- unit (Comp_Unit). The identifier of the symbol (Symbol) is in -- the subtree (Parse_Tree). procedure Generate_Xref_Symbol (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode; Symbol : in Dictionary.Symbol; Is_Declaration : in Boolean); --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# Is_Declaration, --# LexTokenManager.State, --# Parse_Tree, --# State, --# STree.Table, --# Symbol & --# State from *, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# Is_Declaration, --# Parse_Tree, --# STree.Table, --# Symbol; -- Cleanup all the cross-references tables. procedure Cleanup; --# global in out State; --# derives State from *; -- Increment by the number of separates found in the subtree -- (Parse_Tree) of the compilation unit (Comp_Unit). procedure Increment_Nb_Separates (Comp_Unit : in ContextManager.UnitDescriptors; Parse_Tree : in STree.SyntaxNode); --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Heap; --# in LexTokenManager.State; --# in STree.Table; --# in out SPARK_IO.File_Sys; --# in out State; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# LexTokenManager.State, --# Parse_Tree, --# STree.Table & --# State from *, --# Comp_Unit, --# Parse_Tree, --# STree.Table; end SLI; spark-2012.0.deb/examiner/lextokenmanager-relation_algebra-string.adb0000644000175000017500000005261011753202336024705 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Text_IO; with E_Strings.Not_SPARK; package body LexTokenManager.Relation_Algebra.String is procedure Create_Relation (The_Heap : in out Heap.HeapRecord; R : out Relation) is begin RelationAlgebra.CreateRelation (TheHeap => The_Heap, R => R.The_Relation); end Create_Relation; procedure Dispose_Of_Relation (The_Heap : in out Heap.HeapRecord; R : in Relation) is begin RelationAlgebra.DisposeOfRelation (TheHeap => The_Heap, R => R.The_Relation); end Dispose_Of_Relation; -- Returns the value of the row value of a matrix element (Pair). function Row_Value (The_Heap : Heap.HeapRecord; P : RelationAlgebra.Pair) return LexTokenManager.Lex_String is begin return LexTokenManager.Lex_String (Heap.AValue (TheHeap => The_Heap, A => RelationAlgebra.Pair_To_Atom (P => P))); end Row_Value; -- Returns the row index value of the Row_Leader L. function Row_Ldr_Index (The_Heap : Heap.HeapRecord; L : RelationAlgebra.RowLeader) return LexTokenManager.Lex_String is begin return LexTokenManager.Lex_String (Heap.AValue (TheHeap => The_Heap, A => RelationAlgebra.RowLeader_To_Atom (R => L))); end Row_Ldr_Index; -- Inserts an element (Pair) specified by I and J into the matrix -- representing relation R. If row I or column J do not exist in the matrix -- they are created. The new Pair (I, J) is inserted into the matrix and -- the Cache is updated such that the current row is I and the current -- column is J and the current row and column elements refer to the new -- Pair (I, J). -- If the element (I, J) already exists in the matrix the operation has no -- effect on the matrix but the Cache is updated with the current row set -- to I, the current row and column elements set to the Pair (I, J) but -- the current column value is not changed --- Is this correct?? -- R must be non null. procedure Cached_Insert_Pair (The_Heap : in out Heap.HeapRecord; R : in Relation; I, J : in LexTokenManager.Lex_String; Cache : in out RelationAlgebra.Caches) --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Cache, --# Statistics.TableUsage, --# The_Heap from *, --# Cache, --# I, --# J, --# LexTokenManager.State, --# R, --# The_Heap; is Current_Pair, Last_Pair, New_Pair : RelationAlgebra.Pair; Row_Val, Col_Val : LexTokenManager.Lex_String; Pair_Present : Boolean; procedure Insert_Row_Leader (The_Heap : in out Heap.HeapRecord; R : in Relation; I : in LexTokenManager.Lex_String; Cache : in out RelationAlgebra.Caches) --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Cache, --# The_Heap from Cache, --# I, --# LexTokenManager.State, --# R, --# The_Heap & --# Statistics.TableUsage from *, --# Cache, --# I, --# LexTokenManager.State, --# The_Heap; is Row_Ldr, Last_Ldr : RelationAlgebra.RowLeader; Ldr_Present : Boolean; Ldr_Index : LexTokenManager.Lex_String; procedure Create_Row_Leader (The_Heap : in out Heap.HeapRecord; P : in RelationAlgebra.RowLeader; I : in LexTokenManager.Lex_String; L : out RelationAlgebra.RowLeader) --# global in out Statistics.TableUsage; --# derives L from The_Heap & --# Statistics.TableUsage from *, --# The_Heap & --# The_Heap from *, --# I, --# P; is New_Atom : Heap.Atom; begin Heap.CreateAtom (TheHeap => The_Heap, NewAtom => New_Atom); Heap.UpdateAValue (TheHeap => The_Heap, A => New_Atom, Value => Natural (I)); Heap.UpdateBPointer (TheHeap => The_Heap, A => New_Atom, Pointer => RelationAlgebra.RowLeader_To_Atom (R => RelationAlgebra.NextRowLeader (TheHeap => The_Heap, L => P))); Heap.UpdateBPointer (TheHeap => The_Heap, A => RelationAlgebra.RowLeader_To_Atom (R => P), Pointer => New_Atom); L := RelationAlgebra.Atom_To_RowLeader (A => New_Atom); end Create_Row_Leader; begin Row_Ldr := Cache.RowLdr; Last_Ldr := RelationAlgebra.Atom_To_RowLeader (A => RelationAlgebra.Relation_To_Atom (R => R.The_Relation)); Ldr_Present := False; loop exit when Row_Ldr = RelationAlgebra.NullRowLdr; Ldr_Index := Row_Ldr_Index (The_Heap => The_Heap, L => Row_Ldr); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ldr_Index, Lex_Str2 => I) = LexTokenManager.Str_Eq then Ldr_Present := True; exit; end if; exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ldr_Index, Lex_Str2 => I) = LexTokenManager.Str_Second; Last_Ldr := Row_Ldr; Row_Ldr := RelationAlgebra.NextRowLeader (TheHeap => The_Heap, L => Row_Ldr); end loop; if not Ldr_Present then Create_Row_Leader (The_Heap => The_Heap, P => Last_Ldr, I => I, L => Row_Ldr); end if; if Row_Ldr /= Cache.RowLdr then Cache.RowLdr := Row_Ldr; Cache.RowPair := RelationAlgebra.FirstInRow (TheHeap => The_Heap, L => Row_Ldr); end if; end Insert_Row_Leader; procedure Create_Pair (The_Heap : in out Heap.HeapRecord; New_Pair : out RelationAlgebra.Pair; Row, Col : in LexTokenManager.Lex_String) --# global in out Statistics.TableUsage; --# derives New_Pair from The_Heap & --# Statistics.TableUsage from *, --# The_Heap & --# The_Heap from *, --# Col, --# Row; is A : Heap.Atom; begin Heap.CreateAtom (TheHeap => The_Heap, NewAtom => A); Heap.UpdateAValue (TheHeap => The_Heap, A => A, Value => Natural (Row)); Heap.UpdateBValue (TheHeap => The_Heap, A => A, Value => Natural (Col)); New_Pair := RelationAlgebra.Atom_To_Pair (A => A); end Create_Pair; begin Insert_Row_Leader (The_Heap => The_Heap, R => R, I => I, Cache => Cache); Last_Pair := RelationAlgebra.Atom_To_Pair (A => RelationAlgebra.RowLeader_To_Atom (R => Cache.RowLdr)); Current_Pair := Cache.RowPair; Pair_Present := False; loop exit when RelationAlgebra.IsNullPair (P => Current_Pair); Col_Val := Relation_Algebra.Column_Value (The_Heap => The_Heap, P => Current_Pair); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Col_Val, Lex_Str2 => J) = LexTokenManager.Str_Eq then Pair_Present := True; exit; end if; exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Col_Val, Lex_Str2 => J) = LexTokenManager.Str_Second; Last_Pair := Current_Pair; Current_Pair := RelationAlgebra.RightSuccr (TheHeap => The_Heap, P => Current_Pair); end loop; if Pair_Present then Cache.RowPair := Current_Pair; Cache.ColPair := Current_Pair; else Create_Pair (The_Heap => The_Heap, New_Pair => New_Pair, Row => I, Col => J); RelationAlgebra.UpdateRight (TheHeap => The_Heap, P => New_Pair, R => Current_Pair); RelationAlgebra.UpdateRight (TheHeap => The_Heap, P => Last_Pair, R => New_Pair); Relation_Algebra.Insert_Col_Leader (The_Heap => The_Heap, R => Relation_Algebra.Convert_To_Relation (R => R.The_Relation), J => J, Cache => Cache); Last_Pair := RelationAlgebra.Atom_To_Pair (A => RelationAlgebra.ColLeader_To_Atom (C => Cache.ColLdr)); Current_Pair := Cache.ColPair; loop exit when RelationAlgebra.IsNullPair (Current_Pair); Row_Val := Row_Value (The_Heap => The_Heap, P => Current_Pair); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Row_Val, Lex_Str2 => I) = LexTokenManager.Str_Second; Last_Pair := Current_Pair; Current_Pair := RelationAlgebra.DownSuccr (TheHeap => The_Heap, P => Current_Pair); end loop; RelationAlgebra.UpdateDown (TheHeap => The_Heap, P => New_Pair, D => Current_Pair); RelationAlgebra.UpdateDown (TheHeap => The_Heap, P => Last_Pair, D => New_Pair); Cache.RowPair := New_Pair; Cache.ColPair := New_Pair; end if; end Cached_Insert_Pair; procedure Insert_Pair (The_Heap : in out Heap.HeapRecord; R : in Relation; I, J : in LexTokenManager.Lex_String) is Cache : RelationAlgebra.Caches; begin RelationAlgebra.InitialiseCache (TheHeap => The_Heap, R => R.The_Relation, Cache => Cache); -- we do not need the changed value of Cache in this case --# accept F, 10, Cache, "Cache unused here"; Cached_Insert_Pair (The_Heap => The_Heap, R => R, I => I, J => J, Cache => Cache); --# end accept; end Insert_Pair; procedure Row_Extraction (The_Heap : in out Heap.HeapRecord; R : in Relation; Given_Index : in LexTokenManager.Lex_String; S : out Seq_Algebra.Seq) is Row_Index : LexTokenManager.Lex_String; Row_Ldr : RelationAlgebra.RowLeader; Row_Found : Boolean; Local_S : Seq_Algebra.Seq; Last_S : Seq_Algebra.Member_Of_Seq; P : RelationAlgebra.Pair; begin Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Local_S); -- The optimisation using sequence operations -- BeforeFirstMember and AppendAfter is only permissible -- because Indices in a relation are ordered identically to the -- set ordering in s SeqAlgebra. This assumption is implementation -- dependent and should be eliminated when a more efficient representation -- of sets and relations is implemented. Last_S := Seq_Algebra.Before_First_Member (S => Local_S); Row_Found := False; Row_Ldr := RelationAlgebra.FirstRowLeader (TheHeap => The_Heap, R => R.The_Relation); loop exit when Row_Ldr = RelationAlgebra.NullRowLdr; Row_Index := Row_Ldr_Index (The_Heap => The_Heap, L => Row_Ldr); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Row_Index, Lex_Str2 => Given_Index) = LexTokenManager.Str_Eq then Row_Found := True; exit; end if; exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Row_Index, Lex_Str2 => Given_Index) = LexTokenManager.Str_Second; Row_Ldr := RelationAlgebra.NextRowLeader (TheHeap => The_Heap, L => Row_Ldr); end loop; if Row_Found then P := RelationAlgebra.FirstInRow (TheHeap => The_Heap, L => Row_Ldr); loop exit when P = RelationAlgebra.NullPair; -- The optimisation using sequence operations -- BeforeFirstMember and AppendAfter is only permissible -- because Indices in a relation are ordered identically to the -- set ordering in s SeqAlgebra. This assumption is implementation -- dependent and should be eliminated when a more efficient representation -- of sets and relations is implemented. Seq_Algebra.Append_After (The_Heap => The_Heap, M => Last_S, Given_Value => Relation_Algebra.Column_Value (The_Heap => The_Heap, P => P)); P := RelationAlgebra.RightSuccr (TheHeap => The_Heap, P => P); end loop; end if; S := Local_S; end Row_Extraction; procedure Add_Col (The_Heap : in out Heap.HeapRecord; R : in Relation; J : in LexTokenManager.Lex_String; S : in Seq_Algebra.Seq) is M : Seq_Algebra.Member_Of_Seq; Cache : RelationAlgebra.Caches; begin RelationAlgebra.InitialiseCache (TheHeap => The_Heap, R => R.The_Relation, Cache => Cache); M := Seq_Algebra.First_Member (The_Heap => The_Heap, S => S); loop exit when Seq_Algebra.Is_Null_Member (M => M); Cached_Insert_Pair (The_Heap => The_Heap, R => R, I => Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => M), J => J, Cache => Cache); M := Seq_Algebra.Next_Member (The_Heap => The_Heap, M => M); end loop; end Add_Col; procedure Augment_Relation (The_Heap : in out Heap.HeapRecord; A, B : in Relation) -- This procedure augments relation A by adding to it relation B. is P : RelationAlgebra.Pair; Row_Index : LexTokenManager.Lex_String; Row_Ldr : RelationAlgebra.RowLeader; Cache : RelationAlgebra.Caches; begin RelationAlgebra.InitialiseCache (TheHeap => The_Heap, R => A.The_Relation, Cache => Cache); Row_Ldr := RelationAlgebra.FirstRowLeader (TheHeap => The_Heap, R => B.The_Relation); loop exit when Row_Ldr = RelationAlgebra.NullRowLdr; Row_Index := Row_Ldr_Index (The_Heap => The_Heap, L => Row_Ldr); P := RelationAlgebra.FirstInRow (TheHeap => The_Heap, L => Row_Ldr); loop exit when P = RelationAlgebra.NullPair; Cached_Insert_Pair (The_Heap => The_Heap, R => A, I => Row_Index, J => Relation_Algebra.Column_Value (The_Heap => The_Heap, P => P), Cache => Cache); P := RelationAlgebra.RightSuccr (TheHeap => The_Heap, P => P); end loop; Row_Ldr := RelationAlgebra.NextRowLeader (TheHeap => The_Heap, L => Row_Ldr); RelationAlgebra.ResetColumnCache (TheHeap => The_Heap, Cache => Cache); end loop; end Augment_Relation; procedure Debug (The_Heap : in Heap.HeapRecord; R : in Relation) is Row_Ldr : RelationAlgebra.RowLeader; P : RelationAlgebra.Pair; procedure Print (S : in LexTokenManager.Lex_String; Is_Row_Leader : in Boolean) --# derives null from Is_Row_Leader, --# S; is --# hide Print; begin if Is_Row_Leader then Ada.Text_IO.New_Line; end if; Ada.Text_IO.Put (Item => E_Strings.Not_SPARK.Get_String (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => S))); Ada.Text_IO.Put (Item => " "); if Is_Row_Leader then Ada.Text_IO.Put (Item => "=> "); end if; end Print; begin Print (S => LexTokenManager.Null_String, Is_Row_Leader => True); Row_Ldr := RelationAlgebra.FirstRowLeader (TheHeap => The_Heap, R => R.The_Relation); loop exit when Row_Ldr = RelationAlgebra.NullRowLdr; Print (S => Row_Ldr_Index (The_Heap => The_Heap, L => Row_Ldr), Is_Row_Leader => True); P := RelationAlgebra.FirstInRow (TheHeap => The_Heap, L => Row_Ldr); loop exit when P = RelationAlgebra.NullPair; Print (S => Relation_Algebra.Column_Value (The_Heap => The_Heap, P => P), Is_Row_Leader => False); P := RelationAlgebra.RightSuccr (TheHeap => The_Heap, P => P); end loop; Row_Ldr := RelationAlgebra.NextRowLeader (TheHeap => The_Heap, L => Row_Ldr); end loop; end Debug; end LexTokenManager.Relation_Algebra.String; spark-2012.0.deb/examiner/dictionary-rawdict.adb0000644000175000017500000127241411753202336020534 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Unchecked_Conversion; separate (Dictionary) package body RawDict is --# hide RawDict; type OwnTaskInfo is record Variable : Variable_Info_Ref; Owner : Package_Info_Ref; Next : Symbol; end record; type OwnTaskRef is access OwnTaskInfo; type Pragmas is record Given : Boolean; -- true means has been declared Value : LexTokenManager.Lex_String; -- null string means no value, otherwise its a Maths StorageRep end record; type PragmaLists is array (Dictionary.RavenscarPragmas) of Pragmas; type ProtectedInfo is record Own_Variable : Own_Variable_Info_Ref; -- abstract "own var" with same name as type ElementsHidden : Boolean; Visible_Part : Part_Info; Local_Part : Part_Info; Private_Part : Part_Info; TheEntry : Symbol; FirstDiscriminant : Dictionary.Symbol; LastDiscriminant : Dictionary.Symbol; Pragmas : PragmaLists; Protected_Body : Declaration_Info_Ref; Has_Proper_Body : Boolean; end record; type ProtectedRef is access ProtectedInfo; type TaskInfo is record Signature_Is_Wellformed : Booleans; Has_Second_Annotation : Boolean; Has_Derives_Annotation : Boolean; Local_Part : Part_Info; FirstDiscriminant : Dictionary.Symbol; LastDiscriminant : Dictionary.Symbol; Pragmas : PragmaLists; First_Global_Variable : Global_Variables_T; Last_Global_Variable : Global_Variables_T; Task_Body : Declaration_Info_Ref; Has_Proper_Body : Boolean; Body_Is_Hidden : Boolean; Suspends_List : Symbol; First_Loop : Dictionary.Symbol; Last_Loop : Dictionary.Symbol; Uses_Unprotected_Variables : Boolean; Uses_Unchecked_Conversion : Boolean; Assigns_From_External : Boolean; end record; type TaskRef is access TaskInfo; type KnownDiscriminantInfo is record Name : LexTokenManager.Lex_String; Protected_Type : Type_Info_Ref; -- could also be a task type Type_Mark : Type_Info_Ref; SetsPriority : Boolean; Next : Dictionary.Symbol; end record; type KnownDiscriminantRef is access KnownDiscriminantInfo; type SubtypeInfo is record -- record used for extra info needed by subtypes of TASK and PROTECTED types Priority : LexTokenManager.Lex_String; -- StorageRep of number FirstConstraint : Dictionary.Symbol; -- list of discriminant constraints LastConstraint : Dictionary.Symbol; end record; type SubtypeRef is access SubtypeInfo; type DiscriminantConstraintInfo is record StaticValue : LexTokenManager.Lex_String; -- only ONE of this and next field will have a value AccessedObject : Dictionary.Symbol; Next : Dictionary.Symbol; end record; type DiscriminantConstraintRef is access DiscriminantConstraintInfo; type VirtualElementInfo is record The_Variable : RawDict.Variable_Info_Ref; TheOwner : Dictionary.Symbol; -- Variable_Info SeenByOwner : Boolean; Next : Dictionary.Symbol; end record; type VirtualElementInfoRef is access VirtualElementInfo; type SuspendsListItemInfo is record ThePOorSO : Dictionary.Symbol; IsAccountedFor : Boolean; Next : Dictionary.Symbol; end record; type SuspendsListItemInfoRef is access SuspendsListItemInfo; type InterruptStreamMapping is record TheHandler : LexTokenManager.Lex_String; TheInterruptStream : LexTokenManager.Lex_String; Next : Dictionary.Symbol; end record; type InterruptStreamMappingRef is access InterruptStreamMapping; type LoopInfo is record Name : LexTokenManager.Lex_String; Region : Dictionary.Symbol; LoopParameter : Dictionary.Symbol; -- LoopParameterInfor OnEntryVars : Dictionary.Symbol; -- LoopEntryVariableInfo; -- the following field stores a forced type conversion of a Cells.Cell created -- by the VCG and representing the exit expression of a for loop. Storing a -- reference to this expression in the Dictionary makes it easy to plant a loop -- invariant asserting that the ExitBound variable = the exit expression. ExitExpn : Natural; -- And similarly for the entry expression... EntryExpn : Natural; HasExits : Boolean; Next : Dictionary.Symbol; end record; type LoopRef is access LoopInfo; type LoopParameterInfo is record Name : LexTokenManager.Lex_String; Type_Mark : Type_Info_Ref; TheLoop : Dictionary.Symbol; HasStaticRange : Boolean; -- true means bounds fixed, non-empty and for loop must be entered IsReverse : Boolean; -- we assume for loops are forward unless this is set true end record; type LoopParameterRef is access LoopParameterInfo; -- Provides a linked list of variables used in the exit condition of a for loop. Each -- variable is uniquely associated with the loop and the actual Ada variable used in the -- exit expression. These special variables are used by teh VCG to store the value of variables -- on entry to the loop so that the loop bounds are properly frozen iaw Ada language definition. type LoopEntryVariableInfo is record OriginalVar : Dictionary.Symbol; TheLoop : Dictionary.Symbol; -- we need this to construct the name of the var (__entry__) Next : Dictionary.Symbol; end record; type LoopEntryVariableRef is access LoopEntryVariableInfo; -------------------------------------------------------------------------------- procedure DiscriminantDebug (CallerID : in String; Given, Expected : in Dictionary.SymbolDiscriminant) is begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*****************************************************************************", 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "* From: ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, CallerID, 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "* Symbol of type ", 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, Dictionary.SymbolDiscriminant'Image (Given), 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " found where ", 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "* ", 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, Dictionary.SymbolDiscriminant'Image (Expected), 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " was expected.", 0); end DiscriminantDebug; -------------------------------------------------------------------------------- function GetSymbolDiscriminant (Item : in Dictionary.Symbol) return Dictionary.SymbolDiscriminant is begin return Dictionary.Dynamic_Symbol_Table.Get_Symbol_Discriminant (Dict.Symbols, Item); end GetSymbolDiscriminant; -------------------------------------------------------------------------------- function Get_Symbol_Compilation_Unit (Item : in Dictionary.Symbol) return ContextManager.UnitDescriptors is begin return Dictionary.Dynamic_Symbol_Table.Get_Symbol_Compilation_Unit (Dict.Symbols, Item); end Get_Symbol_Compilation_Unit; -------------------------------------------------------------------------------- function Get_Symbol_Location (Item : in Dictionary.Symbol) return LexTokenManager.Token_Position is begin return Dictionary.Dynamic_Symbol_Table.Get_Symbol_Location (Dict.Symbols, Item); end Get_Symbol_Location; -------------------------------------------------------------------------------- procedure Set_Symbol_Location (Item : in Dictionary.Symbol; Location : in LexTokenManager.Token_Position) is begin Dictionary.Dynamic_Symbol_Table.Set_Symbol_Location (Dict.Symbols, Item, Location); end Set_Symbol_Location; -------------------------------------------------------------------------------- function GetSymbolRef (Item : in Dictionary.Symbol) return Dictionary.Ref_Type is begin return Dictionary.Dynamic_Symbol_Table.Get_Symbol_Ref (Dict.Symbols, Item); end GetSymbolRef; -------------------------------------------------------------------------------- function GetProtectedRef (Item : Dictionary.Symbol) return ProtectedRef is function RefTypeToProtectedRef is new Unchecked_Conversion (Dictionary.Ref_Type, ProtectedRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.ProtectedInfoSymbol then DiscriminantDebug ("GetProtectedRef", GetSymbolDiscriminant (Item), Dictionary.ProtectedInfoSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToProtectedRef (GetSymbolRef (Item)); end GetProtectedRef; -------------------------------------------------------------------------------- function GetTaskRef (Item : Dictionary.Symbol) return TaskRef is function RefTypeToTaskRef is new Unchecked_Conversion (Dictionary.Ref_Type, TaskRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.TaskInfoSymbol then DiscriminantDebug ("GetTaskRef", GetSymbolDiscriminant (Item), Dictionary.TaskInfoSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToTaskRef (GetSymbolRef (Item)); end GetTaskRef; -------------------------------------------------------------------------------- procedure AddSymbol (Discriminant : in Dictionary.SymbolDiscriminant; Ref : in Dictionary.Ref_Type; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; Item : out Dictionary.Symbol) --# global in out Dict; --# derives Dict from *, --# Discriminant, --# Ref & --# Item from Dict; is begin Dictionary.Dynamic_Symbol_Table.Add_Symbol (The_Table => Dictionary.Dict.Symbols, Discriminant => Discriminant, Ref => Ref, Comp_Unit => Comp_Unit, Loc => Loc, Item => Item); end AddSymbol; -------------------------------------------------------------------------------- -- Declaration_Info -------------------------------------------------------------------------------- function Get_Declaration_Info_Ref (Item : Dictionary.Symbol) return Declaration_Info_Ref is function RefType_To_Declaration_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Declaration_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Declaration_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Declaration_Symbol then DiscriminantDebug ("Get_Declaration_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Declaration_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Declaration_Info_Ref"); end if; return RefType_To_Declaration_Info_Ref (GetSymbolRef (Item)); end if; end Get_Declaration_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Declaration (Context : in Dictionary.Contexts; Scope : in Dictionary.Scopes; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Declaration : out Declaration_Info_Ref) is The_Declaration_Symbol : Dictionary.Symbol; function Declaration_Info_Ref_To_RefType is new Unchecked_Conversion (Declaration_Info_Ref, Dictionary.Ref_Type); begin The_Declaration := new Declaration_Info' (Self => Dictionary.NullSymbol, Context => Context, Scope => Scope, Item => Dictionary.NullSymbol, Next => Null_Declaration_Info_Ref); AddSymbol (Discriminant => Dictionary.Declaration_Symbol, Ref => Declaration_Info_Ref_To_RefType (The_Declaration), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Declaration_Symbol); The_Declaration.Self := The_Declaration_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Declaration"); end Create_Declaration; -------------------------------------------------------------------------------- procedure Set_Declaration_Context (The_Declaration : in Declaration_Info_Ref; Context : in Contexts) is begin The_Declaration.Context := Context; end Set_Declaration_Context; -------------------------------------------------------------------------------- procedure Set_Declaration_Item (The_Declaration : in Declaration_Info_Ref; Item : in Dictionary.Symbol) is begin The_Declaration.Item := Item; end Set_Declaration_Item; -------------------------------------------------------------------------------- procedure Set_Next_Declaration (The_Declaration, Next : in Declaration_Info_Ref) is begin The_Declaration.Next := Next; end Set_Next_Declaration; -------------------------------------------------------------------------------- function Get_Declaration_Symbol (The_Declaration : Declaration_Info_Ref) return Dictionary.Symbol is begin if The_Declaration = Null_Declaration_Info_Ref then return Dictionary.NullSymbol; else return The_Declaration.Self; end if; end Get_Declaration_Symbol; -------------------------------------------------------------------------------- function Get_Declaration_Context (The_Declaration : Declaration_Info_Ref) return Dictionary.Contexts is begin return The_Declaration.Context; end Get_Declaration_Context; -------------------------------------------------------------------------------- function Get_Declaration_Scope (The_Declaration : Declaration_Info_Ref) return Dictionary.Scopes is begin if The_Declaration = Null_Declaration_Info_Ref then return Dictionary.NullScope; else return The_Declaration.Scope; end if; end Get_Declaration_Scope; -------------------------------------------------------------------------------- function Get_Declaration_Item (The_Declaration : Declaration_Info_Ref) return Dictionary.Symbol is begin if The_Declaration = Null_Declaration_Info_Ref then return Dictionary.NullSymbol; else return The_Declaration.Item; end if; end Get_Declaration_Item; -------------------------------------------------------------------------------- function Get_Next_Declaration (The_Declaration : Declaration_Info_Ref) return Declaration_Info_Ref is begin if The_Declaration = Null_Declaration_Info_Ref then return Null_Declaration_Info_Ref; else return The_Declaration.Next; end if; end Get_Next_Declaration; -------------------------------------------------------------------------------- -- Enumeration_Literal_Info -------------------------------------------------------------------------------- function Get_Enumeration_Literal_Info_Ref (Item : Dictionary.Symbol) return Enumeration_Literal_Info_Ref is function RefType_To_Enumeration_Literal_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Enumeration_Literal_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Enumeration_Literal_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Enumeration_Literal_Symbol then DiscriminantDebug ("Get_Enumeration_Literal_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Enumeration_Literal_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Enumeration_Literal_Info_Ref"); end if; return RefType_To_Enumeration_Literal_Info_Ref (GetSymbolRef (Item)); end if; end Get_Enumeration_Literal_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Enumeration_Literal (Name : in LexTokenManager.Lex_String; Position : in LexTokenManager.Lex_String; Enumeration_Type : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Enumeration_Literal : out Enumeration_Literal_Info_Ref) is The_Enumeration_Literal_Symol : Dictionary.Symbol; function Enumeration_Literal_Info_Ref_To_RefType is new Unchecked_Conversion (Enumeration_Literal_Info_Ref, Dictionary.Ref_Type); begin The_Enumeration_Literal := new Enumeration_Literal_Info' (Self => Dictionary.NullSymbol, Name => Name, Position => Position, The_Type => Enumeration_Type, Next => Null_Enumeration_Literal_Info_Ref); AddSymbol (Discriminant => Dictionary.Enumeration_Literal_Symbol, Ref => Enumeration_Literal_Info_Ref_To_RefType (The_Enumeration_Literal), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Enumeration_Literal_Symol); The_Enumeration_Literal.Self := The_Enumeration_Literal_Symol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Enumeration_Literal"); end Create_Enumeration_Literal; -------------------------------------------------------------------------------- procedure Set_Next_Enumeration_Literal (The_Enumeration_Literal, Next : in Enumeration_Literal_Info_Ref) is begin The_Enumeration_Literal.Next := Next; end Set_Next_Enumeration_Literal; -------------------------------------------------------------------------------- function Get_Enumeration_Literal_Symbol (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return Dictionary.Symbol is begin if The_Enumeration_Literal = Null_Enumeration_Literal_Info_Ref then return Dictionary.NullSymbol; else return The_Enumeration_Literal.Self; end if; end Get_Enumeration_Literal_Symbol; -------------------------------------------------------------------------------- function Get_Enumeration_Literal_Name (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return LexTokenManager.Lex_String is begin if The_Enumeration_Literal = Null_Enumeration_Literal_Info_Ref then return LexTokenManager.Null_String; else return The_Enumeration_Literal.Name; end if; end Get_Enumeration_Literal_Name; -------------------------------------------------------------------------------- function Get_Enumeration_Literal_Position (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return LexTokenManager.Lex_String is begin if The_Enumeration_Literal = Null_Enumeration_Literal_Info_Ref then return LexTokenManager.Null_String; else return The_Enumeration_Literal.Position; end if; end Get_Enumeration_Literal_Position; -------------------------------------------------------------------------------- function Get_Enumeration_Literal_Type (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return Type_Info_Ref is begin if The_Enumeration_Literal = Null_Enumeration_Literal_Info_Ref then return Null_Type_Info_Ref; else return The_Enumeration_Literal.The_Type; end if; end Get_Enumeration_Literal_Type; -------------------------------------------------------------------------------- function Get_Next_Enumeration_Literal (The_Enumeration_Literal : Enumeration_Literal_Info_Ref) return Enumeration_Literal_Info_Ref is begin if The_Enumeration_Literal = Null_Enumeration_Literal_Info_Ref then return Null_Enumeration_Literal_Info_Ref; else return The_Enumeration_Literal.Next; end if; end Get_Next_Enumeration_Literal; -------------------------------------------------------------------------------- -- Array_Index_Info -------------------------------------------------------------------------------- function Get_Array_Index_Info_Ref (Item : Dictionary.Symbol) return Array_Index_Info_Ref is function RefType_To_Array_Index_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Array_Index_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Array_Index_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Array_Index_Symbol then DiscriminantDebug ("Get_Array_Index_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Array_Index_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Array_Index_Info_Ref"); end if; return RefType_To_Array_Index_Info_Ref (GetSymbolRef (Item)); end if; end Get_Array_Index_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Array_Index (Index_Type : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Array_Index : out Array_Index_Info_Ref) is The_Array_Index_Symbol : Dictionary.Symbol; function Array_Index_Info_Ref_To_RefType is new Unchecked_Conversion (Array_Index_Info_Ref, Dictionary.Ref_Type); begin The_Array_Index := new Array_Index_Info'(Self => Dictionary.NullSymbol, Index_Type => Index_Type, Next => Null_Array_Index_Info_Ref); AddSymbol (Discriminant => Dictionary.Array_Index_Symbol, Ref => Array_Index_Info_Ref_To_RefType (The_Array_Index), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Array_Index_Symbol); The_Array_Index.Self := The_Array_Index_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Array_Index"); end Create_Array_Index; -------------------------------------------------------------------------------- procedure Set_Next_Array_Index (The_Array_Index, Next : in Array_Index_Info_Ref) is begin The_Array_Index.Next := Next; end Set_Next_Array_Index; -------------------------------------------------------------------------------- function Get_Array_Index_Symbol (The_Array_Index : Array_Index_Info_Ref) return Dictionary.Symbol is begin if The_Array_Index = Null_Array_Index_Info_Ref then return Dictionary.NullSymbol; else return The_Array_Index.Self; end if; end Get_Array_Index_Symbol; -------------------------------------------------------------------------------- function Get_Array_Index_Type (The_Array_Index : Array_Index_Info_Ref) return Type_Info_Ref is begin if The_Array_Index = Null_Array_Index_Info_Ref then return Null_Type_Info_Ref; else return The_Array_Index.Index_Type; end if; end Get_Array_Index_Type; -------------------------------------------------------------------------------- function Get_Next_Array_Index (The_Array_Index : Array_Index_Info_Ref) return Array_Index_Info_Ref is begin if The_Array_Index = Null_Array_Index_Info_Ref then return Null_Array_Index_Info_Ref; else return The_Array_Index.Next; end if; end Get_Next_Array_Index; -------------------------------------------------------------------------------- -- Record_Component_Info -------------------------------------------------------------------------------- function Get_Record_Component_Info_Ref (Item : Dictionary.Symbol) return Record_Component_Info_Ref is function RefType_To_Record_Component_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Record_Component_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Record_Component_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Record_Component_Symbol then DiscriminantDebug ("Get_Record_Component_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Record_Component_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Record_Component_Info_Ref"); end if; return RefType_To_Record_Component_Info_Ref (GetSymbolRef (Item)); end if; end Get_Record_Component_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Record_Component (Name : in LexTokenManager.Lex_String; Record_Type : in Type_Info_Ref; Component_Type : in Type_Info_Ref; Inherited_Field : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Record_Component : out Record_Component_Info_Ref) is The_Record_Component_Symbol : Dictionary.Symbol; function Record_Component_Info_Ref_To_RefType is new Unchecked_Conversion (Record_Component_Info_Ref, Dictionary.Ref_Type); begin The_Record_Component := new Record_Component_Info' (Self => Dictionary.NullSymbol, Name => Name, Record_Type => Record_Type, Component_Type => Component_Type, Inherited_Field => Inherited_Field, Next => Null_Record_Component_Info_Ref); AddSymbol (Discriminant => Dictionary.Record_Component_Symbol, Ref => Record_Component_Info_Ref_To_RefType (The_Record_Component), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Record_Component_Symbol); The_Record_Component.Self := The_Record_Component_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Record_Component"); end Create_Record_Component; -------------------------------------------------------------------------------- procedure Set_Next_Record_Component (The_Record_Component, Next : in Record_Component_Info_Ref) is begin The_Record_Component.Next := Next; end Set_Next_Record_Component; -------------------------------------------------------------------------------- function Get_Record_Component_Symbol (The_Record_Component : Record_Component_Info_Ref) return Dictionary.Symbol is begin if The_Record_Component = Null_Record_Component_Info_Ref then return Dictionary.NullSymbol; else return The_Record_Component.Self; end if; end Get_Record_Component_Symbol; -------------------------------------------------------------------------------- function Get_Record_Component_Name (The_Record_Component : Record_Component_Info_Ref) return LexTokenManager.Lex_String is begin if The_Record_Component = Null_Record_Component_Info_Ref then return LexTokenManager.Null_String; else return The_Record_Component.Name; end if; end Get_Record_Component_Name; -------------------------------------------------------------------------------- function Get_Record_Component_Record_Type (The_Record_Component : Record_Component_Info_Ref) return Type_Info_Ref is begin if The_Record_Component = Null_Record_Component_Info_Ref then return Null_Type_Info_Ref; else return The_Record_Component.Record_Type; end if; end Get_Record_Component_Record_Type; -------------------------------------------------------------------------------- function Get_Record_Component_Type (The_Record_Component : Record_Component_Info_Ref) return Type_Info_Ref is begin if The_Record_Component = Null_Record_Component_Info_Ref then return Null_Type_Info_Ref; else return The_Record_Component.Component_Type; end if; end Get_Record_Component_Type; -------------------------------------------------------------------------------- function Get_Record_Component_Inherited_Field (The_Record_Component : Record_Component_Info_Ref) return Boolean is begin if The_Record_Component = Null_Record_Component_Info_Ref then return False; else return The_Record_Component.Inherited_Field; end if; end Get_Record_Component_Inherited_Field; -------------------------------------------------------------------------------- function Get_Next_Record_Component (The_Record_Component : Record_Component_Info_Ref) return Record_Component_Info_Ref is begin if The_Record_Component = Null_Record_Component_Info_Ref then return Null_Record_Component_Info_Ref; else return The_Record_Component.Next; end if; end Get_Next_Record_Component; -------------------------------------------------------------------------------- -- Subcomponent_Info -------------------------------------------------------------------------------- function Get_Subcomponent_Info_Ref (Item : Dictionary.Symbol) return Subcomponent_Info_Ref is function RefType_To_Subcomponent_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Subcomponent_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Subcomponent_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Subcomponent_Symbol then DiscriminantDebug ("Get_Subcomponent_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Subcomponent_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Subcomponent_Info_Ref"); end if; return RefType_To_Subcomponent_Info_Ref (GetSymbolRef (Item)); end if; end Get_Subcomponent_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Subcomponent (Object : in Dictionary.Symbol; Record_Component : in Record_Component_Info_Ref; Marked_Valid : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Subcomponent : out Subcomponent_Info_Ref) is The_Subcomponent_Symbol : Dictionary.Symbol; function Subcomponent_Info_Ref_To_RefType is new Unchecked_Conversion (Subcomponent_Info_Ref, Dictionary.Ref_Type); begin The_Subcomponent := new Subcomponent_Info' (Self => Dictionary.NullSymbol, Object => Object, Record_Component => Record_Component, Subcomponents => Null_Subcomponent_Info_Ref, Marked_Valid => Marked_Valid, Next => Null_Subcomponent_Info_Ref); AddSymbol (Discriminant => Dictionary.Subcomponent_Symbol, Ref => Subcomponent_Info_Ref_To_RefType (The_Subcomponent), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Subcomponent_Symbol); The_Subcomponent.Self := The_Subcomponent_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Subcomponent"); end Create_Subcomponent; -------------------------------------------------------------------------------- procedure Set_Subcomponent_Subcomponents (The_Subcomponent : in Subcomponent_Info_Ref; Sibling : in Subcomponent_Info_Ref) is begin The_Subcomponent.Subcomponents := Sibling; end Set_Subcomponent_Subcomponents; -------------------------------------------------------------------------------- procedure Set_Subcomponent_Marked_Valid (The_Subcomponent : in Subcomponent_Info_Ref; Marked_Valid : in Boolean) is begin The_Subcomponent.Marked_Valid := Marked_Valid; end Set_Subcomponent_Marked_Valid; -------------------------------------------------------------------------------- procedure Set_Next_Subcomponent (The_Subcomponent, Next : in Subcomponent_Info_Ref) is begin The_Subcomponent.Next := Next; end Set_Next_Subcomponent; -------------------------------------------------------------------------------- function Get_Subcomponent_Symbol (The_Subcomponent : Subcomponent_Info_Ref) return Dictionary.Symbol is begin if The_Subcomponent = Null_Subcomponent_Info_Ref then return Dictionary.NullSymbol; else return The_Subcomponent.Self; end if; end Get_Subcomponent_Symbol; -------------------------------------------------------------------------------- function Get_Subcomponent_Object (The_Subcomponent : Subcomponent_Info_Ref) return Dictionary.Symbol is begin if The_Subcomponent = Null_Subcomponent_Info_Ref then return Dictionary.NullSymbol; else return The_Subcomponent.Object; end if; end Get_Subcomponent_Object; -------------------------------------------------------------------------------- function Get_Subcomponent_Record_Component (The_Subcomponent : Subcomponent_Info_Ref) return Record_Component_Info_Ref is begin if The_Subcomponent = Null_Subcomponent_Info_Ref then return Null_Record_Component_Info_Ref; else return The_Subcomponent.Record_Component; end if; end Get_Subcomponent_Record_Component; -------------------------------------------------------------------------------- function Get_Subcomponent_Subcomponents (The_Subcomponent : Subcomponent_Info_Ref) return Subcomponent_Info_Ref is begin if The_Subcomponent = Null_Subcomponent_Info_Ref then return Null_Subcomponent_Info_Ref; else return The_Subcomponent.Subcomponents; end if; end Get_Subcomponent_Subcomponents; -------------------------------------------------------------------------------- function Get_Subcomponent_Marked_Valid (The_Subcomponent : Subcomponent_Info_Ref) return Boolean is begin if The_Subcomponent = Null_Subcomponent_Info_Ref then return False; else return The_Subcomponent.Marked_Valid; end if; end Get_Subcomponent_Marked_Valid; -------------------------------------------------------------------------------- function Get_Next_Subcomponent (The_Subcomponent : Subcomponent_Info_Ref) return Subcomponent_Info_Ref is begin if The_Subcomponent = Null_Subcomponent_Info_Ref then return Null_Subcomponent_Info_Ref; else return The_Subcomponent.Next; end if; end Get_Next_Subcomponent; -------------------------------------------------------------------------------- -- Type_Info -------------------------------------------------------------------------------- function Get_Type_Info_Ref (Item : Dictionary.Symbol) return Type_Info_Ref is function RefType_To_Type_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Type_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Type_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Type_Symbol then DiscriminantDebug ("Get_Type_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Type_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Type_Info_Ref"); end if; return RefType_To_Type_Info_Ref (GetSymbolRef (Item)); end if; end Get_Type_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Type (Name : in LexTokenManager.Lex_String; The_Declaration : in Declaration_Info_Ref; Is_Private : in Boolean; Is_Announcement : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; Type_Mark : out Type_Info_Ref) is Type_Mark_Symbol : Dictionary.Symbol; function Type_Info_Ref_To_RefType is new Unchecked_Conversion (Type_Info_Ref, Dictionary.Ref_Type); begin if Is_Announcement then SystemErrors.RT_Assert (C => not Is_Private, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Create_Type"); Type_Mark := new Type_Info' (Self => Dictionary.NullSymbol, Name => Name, Parent => Null_Type_Info_Ref, Declaration => Null_Declaration_Info_Ref, Is_Full_Range_Subtype => False, Discriminant => Dictionary.Unknown_Type_Item, Is_Private => Dictionary.Never, Is_Limited => Dictionary.Never, Is_Limited_Private => False, Is_Derived => False, Is_Tagged => False, Is_Own_Var_Type => False, Is_Atomic => False, Equality_Defined => True, Contains_Float => False, Constrained => False, Static => True, Wellformed => True, Base_Type => Null_Type_Info_Ref, Extends => Null_Type_Info_Ref, Accesses => Null_Type_Info_Ref, Lower => LexTokenManager.Null_String, Upper => LexTokenManager.Null_String, Modulus => LexTokenManager.Null_String, Error_Bound => LexTokenManager.Null_String, Head => Dictionary.NullSymbol, Tail => Dictionary.NullSymbol, Component_Type => Null_Type_Info_Ref, The_Virtual_Element_List => Dictionary.NullSymbol, Ancillary_Fields => Dictionary.NullSymbol, Size_Attribute => LexTokenManager.Null_String, Is_Announcement_Or_Private => True, Is_Announcement => True, Announcement_Declaration => The_Declaration, Private_Declaration => Null_Declaration_Info_Ref); elsif Is_Private then SystemErrors.RT_Assert (C => not Is_Announcement, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Create_Type"); Type_Mark := new Type_Info' (Self => Dictionary.NullSymbol, Name => Name, Parent => Null_Type_Info_Ref, Declaration => Null_Declaration_Info_Ref, Is_Full_Range_Subtype => False, Discriminant => Dictionary.Unknown_Type_Item, Is_Private => Dictionary.Sometimes, Is_Limited => Dictionary.Never, Is_Limited_Private => False, Is_Derived => False, Is_Tagged => False, Is_Own_Var_Type => False, Is_Atomic => False, Equality_Defined => True, Contains_Float => False, Constrained => False, Static => True, Wellformed => True, Base_Type => Null_Type_Info_Ref, Extends => Null_Type_Info_Ref, Accesses => Null_Type_Info_Ref, Lower => LexTokenManager.Null_String, Upper => LexTokenManager.Null_String, Modulus => LexTokenManager.Null_String, Error_Bound => LexTokenManager.Null_String, Head => Dictionary.NullSymbol, Tail => Dictionary.NullSymbol, Component_Type => Null_Type_Info_Ref, The_Virtual_Element_List => Dictionary.NullSymbol, Ancillary_Fields => Dictionary.NullSymbol, Size_Attribute => LexTokenManager.Null_String, Is_Announcement_Or_Private => True, Is_Announcement => False, Announcement_Declaration => Null_Declaration_Info_Ref, Private_Declaration => The_Declaration); else Type_Mark := new Type_Info' (Self => Dictionary.NullSymbol, Name => Name, Parent => Null_Type_Info_Ref, Declaration => The_Declaration, Is_Full_Range_Subtype => False, Discriminant => Dictionary.Unknown_Type_Item, Is_Private => Dictionary.Never, Is_Limited => Dictionary.Never, Is_Limited_Private => False, Is_Derived => False, Is_Tagged => False, Is_Own_Var_Type => False, Is_Atomic => False, Equality_Defined => True, Contains_Float => False, Constrained => False, Static => True, Wellformed => True, Base_Type => Null_Type_Info_Ref, Extends => Null_Type_Info_Ref, Accesses => Null_Type_Info_Ref, Lower => LexTokenManager.Null_String, Upper => LexTokenManager.Null_String, Modulus => LexTokenManager.Null_String, Error_Bound => LexTokenManager.Null_String, Head => Dictionary.NullSymbol, Tail => Dictionary.NullSymbol, Component_Type => Null_Type_Info_Ref, The_Virtual_Element_List => Dictionary.NullSymbol, Ancillary_Fields => Dictionary.NullSymbol, Size_Attribute => LexTokenManager.Null_String, Is_Announcement_Or_Private => False, Kind_Of_Generic => Dictionary.Invalid_Generic_Type); end if; AddSymbol (Discriminant => Dictionary.Type_Symbol, Ref => Type_Info_Ref_To_RefType (Type_Mark), Comp_Unit => Comp_Unit, Loc => Loc, Item => Type_Mark_Symbol); Type_Mark.Self := Type_Mark_Symbol; if The_Declaration /= Null_Declaration_Info_Ref then The_Declaration.Item := Type_Mark_Symbol; end if; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Type"); end Create_Type; -------------------------------------------------------------------------------- procedure Set_Type_Parent (Type_Mark : in Type_Info_Ref; Parent : in Type_Info_Ref) is begin Type_Mark.Parent := Parent; end Set_Type_Parent; -------------------------------------------------------------------------------- procedure Set_Type_Declaration (Type_Mark : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin Type_Mark.Declaration := The_Declaration; The_Declaration.Item := Type_Mark.Self; -- GAA end Set_Type_Declaration; -------------------------------------------------------------------------------- procedure Set_Type_Is_Full_Range_Subtype (Type_Mark : in Type_Info_Ref) is begin Type_Mark.Is_Full_Range_Subtype := True; end Set_Type_Is_Full_Range_Subtype; -------------------------------------------------------------------------------- procedure Set_Type_Discriminant (Type_Mark : in Type_Info_Ref; Discriminant : in Dictionary.Type_Discriminant) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Discriminant"); Type_Mark.Discriminant := Discriminant; end Set_Type_Discriminant; -------------------------------------------------------------------------------- procedure Set_Type_Private (Type_Mark : in Type_Info_Ref; Is_Private : in Dictionary.TriState) is begin Type_Mark.Is_Private := Is_Private; end Set_Type_Private; -------------------------------------------------------------------------------- procedure Set_Type_Limited (Type_Mark : in Type_Info_Ref; Is_Limited : in Dictionary.TriState) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Limited"); Type_Mark.Is_Limited := Is_Limited; end Set_Type_Limited; -------------------------------------------------------------------------------- procedure Set_Type_Is_Tagged (Type_Mark : in Type_Info_Ref; Is_Tagged : in Boolean) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Is_Tagged"); Type_Mark.Is_Tagged := Is_Tagged; end Set_Type_Is_Tagged; -------------------------------------------------------------------------------- procedure Set_Type_Is_Own_Var_Type (Type_Mark : in Type_Info_Ref) is begin Type_Mark.Is_Own_Var_Type := True; end Set_Type_Is_Own_Var_Type; -------------------------------------------------------------------------------- procedure Set_Type_Extends (Type_Mark : in Type_Info_Ref; Root_Type : in Type_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Extends"); Type_Mark.Extends := Root_Type; end Set_Type_Extends; -------------------------------------------------------------------------------- procedure Set_Type_Accesses (Type_Mark : in Type_Info_Ref; The_Access : in Type_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Accesses"); Type_Mark.Accesses := The_Access; end Set_Type_Accesses; -------------------------------------------------------------------------------- procedure Set_Type_Limited_Private (Type_Mark : in Type_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Limited_Private"); Type_Mark.Is_Limited_Private := True; end Set_Type_Limited_Private; -------------------------------------------------------------------------------- procedure Set_Type_Derived (Type_Mark : in Type_Info_Ref; Is_Derived : in Boolean) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Derived"); Type_Mark.Is_Derived := Is_Derived; end Set_Type_Derived; -------------------------------------------------------------------------------- procedure Set_Type_Equality_Defined (Type_Mark : in Type_Info_Ref; Equality_Defined : in Boolean) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Equality_Defined"); Type_Mark.Equality_Defined := Equality_Defined; end Set_Type_Equality_Defined; -------------------------------------------------------------------------------- procedure Set_Type_Contains_Float (Type_Mark : in Type_Info_Ref; Contains_Float : in Boolean) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Contains_Float"); Type_Mark.Contains_Float := Contains_Float; end Set_Type_Contains_Float; -------------------------------------------------------------------------------- procedure Set_Type_Constrained (Type_Mark : in Type_Info_Ref; Constrained : in Boolean) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Constrained"); Type_Mark.Constrained := Constrained; end Set_Type_Constrained; -------------------------------------------------------------------------------- procedure Set_Type_Static (Type_Mark : in Type_Info_Ref; Static : in Boolean) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Static"); Type_Mark.Static := Static; end Set_Type_Static; -------------------------------------------------------------------------------- procedure Set_Type_Wellformed (Type_Mark : in Type_Info_Ref; Wellformed : in Boolean) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Wellformed"); Type_Mark.Wellformed := Wellformed; end Set_Type_Wellformed; -------------------------------------------------------------------------------- procedure Set_Type_Lower (Type_Mark : in Type_Info_Ref; Lower : in LexTokenManager.Lex_String) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Lower"); Type_Mark.Lower := Lower; end Set_Type_Lower; -------------------------------------------------------------------------------- procedure Set_Type_Upper (Type_Mark : in Type_Info_Ref; Upper : in LexTokenManager.Lex_String) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Upper"); Type_Mark.Upper := Upper; end Set_Type_Upper; -------------------------------------------------------------------------------- procedure Set_Type_Modulus (Type_Mark : in Type_Info_Ref; Modulus : in LexTokenManager.Lex_String) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Modulus"); Type_Mark.Modulus := Modulus; end Set_Type_Modulus; -------------------------------------------------------------------------------- procedure Set_Type_Error_Bound (Type_Mark : in Type_Info_Ref; Error_Bound : in LexTokenManager.Lex_String) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Error_Bound"); Type_Mark.Error_Bound := Error_Bound; end Set_Type_Error_Bound; -------------------------------------------------------------------------------- procedure Set_Type_Base_Type (Type_Mark : in Type_Info_Ref; Base_Type : in Type_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Base_Type"); Type_Mark.Base_Type := Base_Type; end Set_Type_Base_Type; -------------------------------------------------------------------------------- procedure Set_Type_First_Enumeration_Literal (Type_Mark : in Type_Info_Ref; Enumeration_Literal : in Enumeration_Literal_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_First_Enumeration_Literal"); Type_Mark.Head := Get_Enumeration_Literal_Symbol (Enumeration_Literal); -- GAA end Set_Type_First_Enumeration_Literal; -------------------------------------------------------------------------------- procedure Set_Type_Last_Enumeration_Literal (Type_Mark : in Type_Info_Ref; Enumeration_Literal : in Enumeration_Literal_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Last_Enumeration_Literal"); Type_Mark.Tail := Get_Enumeration_Literal_Symbol (Enumeration_Literal); -- GAA end Set_Type_Last_Enumeration_Literal; -------------------------------------------------------------------------------- procedure Set_Type_First_Array_Index (Type_Mark : in Type_Info_Ref; Array_Index : in Array_Index_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_First_Array_Index"); Type_Mark.Head := Get_Array_Index_Symbol (Array_Index); -- GAA end Set_Type_First_Array_Index; -------------------------------------------------------------------------------- procedure Set_Type_Last_Array_Index (Type_Mark : in Type_Info_Ref; Array_Index : in Array_Index_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Last_Array_Index"); Type_Mark.Tail := Get_Array_Index_Symbol (Array_Index); -- GAA end Set_Type_Last_Array_Index; -------------------------------------------------------------------------------- procedure Set_Type_Array_Component (Type_Mark : in Type_Info_Ref; Component_Type : in Type_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Array_Component"); Type_Mark.Component_Type := Component_Type; end Set_Type_Array_Component; -------------------------------------------------------------------------------- procedure Set_Type_First_Record_Component (Type_Mark : in Type_Info_Ref; Record_Component : in Record_Component_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_First_Record_Component"); Type_Mark.Head := Get_Record_Component_Symbol (Record_Component); -- GAA end Set_Type_First_Record_Component; -------------------------------------------------------------------------------- procedure Set_Type_Last_Record_Component (Type_Mark : in Type_Info_Ref; Record_Component : in Record_Component_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Last_Record_Component"); Type_Mark.Tail := Get_Record_Component_Symbol (Record_Component); -- GAA end Set_Type_Last_Record_Component; -------------------------------------------------------------------------------- procedure Set_Type_Ancillary_Fields (Type_Mark : in Type_Info_Ref; The_Declaration : in Dictionary.Symbol) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Ancillary_Fields"); Type_Mark.Ancillary_Fields := The_Declaration; end Set_Type_Ancillary_Fields; -------------------------------------------------------------------------------- procedure Set_Type_Size_Attribute (Type_Mark : in Type_Info_Ref; Size_Val : in LexTokenManager.Lex_String) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Size_Attribute"); Type_Mark.Size_Attribute := Size_Val; end Set_Type_Size_Attribute; -------------------------------------------------------------------------------- procedure Set_Type_Atomic (Type_Mark : in Type_Info_Ref) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Atomic"); Type_Mark.Is_Atomic := True; end Set_Type_Atomic; -------------------------------------------------------------------------------- procedure Set_Type_Virtual_Element_List (Type_Mark : in Type_Info_Ref; The_List : in Dictionary.Symbol) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Full_Range_Subtype, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Virtual_Element_List"); Type_Mark.The_Virtual_Element_List := The_List; end Set_Type_Virtual_Element_List; -------------------------------------------------------------------------------- procedure Set_Type_Private_Type_Declaration (Type_Mark : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin SystemErrors.RT_Assert (C => Type_Mark.Is_Announcement_Or_Private, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Private_Type_Declaration"); Type_Mark.Is_Private := Dictionary.Sometimes; Type_Mark.Private_Declaration := The_Declaration; end Set_Type_Private_Type_Declaration; -------------------------------------------------------------------------------- procedure Set_Type_Kind_Of_Generic (Type_Mark : in Type_Info_Ref; Kind_Of_Generic : in Dictionary.Generic_Type_Discriminant) is begin SystemErrors.RT_Assert (C => not Type_Mark.Is_Announcement_Or_Private and then Type_Mark.Discriminant = Dictionary.Generic_Type_Item, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Type_Kind_Of_Generic"); Type_Mark.Kind_Of_Generic := Kind_Of_Generic; end Set_Type_Kind_Of_Generic; -------------------------------------------------------------------------------- function Get_Type_Symbol (Type_Mark : Type_Info_Ref) return Dictionary.Symbol is begin if Type_Mark = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return Type_Mark.Self; end if; end Get_Type_Symbol; -------------------------------------------------------------------------------- function Get_Type_Name (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String is begin if Type_Mark = Null_Type_Info_Ref then return LexTokenManager.Null_String; else return Type_Mark.Name; end if; end Get_Type_Name; -------------------------------------------------------------------------------- function Get_Type_Parent (Type_Mark : Type_Info_Ref) return Type_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Type_Info_Ref; else return Type_Mark.Parent; end if; end Get_Type_Parent; -------------------------------------------------------------------------------- function Get_Type_Declaration (Type_Mark : Type_Info_Ref) return Declaration_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return Type_Mark.Declaration; end if; end Get_Type_Declaration; -------------------------------------------------------------------------------- function Get_Type_Is_Full_Range_Subtype (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Type_Mark.Is_Full_Range_Subtype; end if; end Get_Type_Is_Full_Range_Subtype; -------------------------------------------------------------------------------- function Get_First_Constrained_Subtype (Type_Mark : Type_Info_Ref) return Type_Info_Ref is Result : Type_Info_Ref; begin Result := Type_Mark; while Result /= Null_Type_Info_Ref and then Get_Type_Is_Full_Range_Subtype (Type_Mark => Result) loop Result := Get_Type_Parent (Type_Mark => Result); end loop; return Result; end Get_First_Constrained_Subtype; -------------------------------------------------------------------------------- function Get_Type_Discriminant (Type_Mark : Type_Info_Ref) return Dictionary.Type_Discriminant is begin if Type_Mark = Null_Type_Info_Ref then return Dictionary.Unknown_Type_Item; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Discriminant; end if; end Get_Type_Discriminant; -------------------------------------------------------------------------------- function Get_Type_Private (Type_Mark : Type_Info_Ref) return Dictionary.TriState is begin if Type_Mark = Null_Type_Info_Ref then return Dictionary.Never; else return Type_Mark.Is_Private; end if; end Get_Type_Private; -------------------------------------------------------------------------------- function Get_Type_Limited (Type_Mark : Type_Info_Ref) return Dictionary.TriState is begin if Type_Mark = Null_Type_Info_Ref then return Dictionary.Never; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Is_Limited; end if; end Get_Type_Limited; -------------------------------------------------------------------------------- function Get_Type_Limited_Private (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Is_Limited_Private; end if; end Get_Type_Limited_Private; -------------------------------------------------------------------------------- function Get_Type_Derived (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Is_Derived; end if; end Get_Type_Derived; -------------------------------------------------------------------------------- function Get_Type_Is_Tagged (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Is_Tagged; end if; end Get_Type_Is_Tagged; -------------------------------------------------------------------------------- function Get_Type_Is_Own_Var_Type (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Type_Mark.Is_Own_Var_Type; end if; end Get_Type_Is_Own_Var_Type; -------------------------------------------------------------------------------- function Get_Type_Extends (Type_Mark : Type_Info_Ref) return Type_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Type_Info_Ref; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Extends; end if; end Get_Type_Extends; -------------------------------------------------------------------------------- function Get_Type_Accesses (Type_Mark : Type_Info_Ref) return Type_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Type_Info_Ref; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Accesses; end if; end Get_Type_Accesses; -------------------------------------------------------------------------------- function Get_Type_Equality_Defined (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Equality_Defined; end if; end Get_Type_Equality_Defined; -------------------------------------------------------------------------------- function Get_Type_Contains_Float (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Contains_Float; end if; end Get_Type_Contains_Float; -------------------------------------------------------------------------------- function Get_Type_Constrained (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Constrained; end if; end Get_Type_Constrained; -------------------------------------------------------------------------------- function Get_Type_Static (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Static; end if; end Get_Type_Static; -------------------------------------------------------------------------------- function Get_Type_Wellformed (Type_Mark : Type_Info_Ref) return Boolean is begin if Type_Mark = Null_Type_Info_Ref then return False; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Wellformed; end if; end Get_Type_Wellformed; -------------------------------------------------------------------------------- function Get_Type_Base_Type (Type_Mark : Type_Info_Ref) return Type_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Type_Info_Ref; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Base_Type; end if; end Get_Type_Base_Type; -------------------------------------------------------------------------------- function Get_Type_Lower (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String is begin if Type_Mark = Null_Type_Info_Ref then return LexTokenManager.Null_String; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Lower; end if; end Get_Type_Lower; -------------------------------------------------------------------------------- function Get_Type_Upper (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String is begin if Type_Mark = Null_Type_Info_Ref then return LexTokenManager.Null_String; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Upper; end if; end Get_Type_Upper; -------------------------------------------------------------------------------- function Get_Type_Modulus (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String is begin if Type_Mark = Null_Type_Info_Ref then return LexTokenManager.Null_String; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Modulus; end if; end Get_Type_Modulus; -------------------------------------------------------------------------------- function Get_Type_Error_Bound (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String is begin if Type_Mark = Null_Type_Info_Ref then return LexTokenManager.Null_String; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Error_Bound; end if; end Get_Type_Error_Bound; -------------------------------------------------------------------------------- function Get_Type_First_Enumeration_Literal (Type_Mark : Type_Info_Ref) return Enumeration_Literal_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Enumeration_Literal_Info_Ref; else return Get_Enumeration_Literal_Info_Ref (Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Head); -- GAA end if; end Get_Type_First_Enumeration_Literal; -------------------------------------------------------------------------------- function Get_Type_Last_Enumeration_Literal (Type_Mark : Type_Info_Ref) return Enumeration_Literal_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Enumeration_Literal_Info_Ref; else return Get_Enumeration_Literal_Info_Ref (Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Tail); -- GAA end if; end Get_Type_Last_Enumeration_Literal; -------------------------------------------------------------------------------- function Get_Type_First_Array_Index (Type_Mark : Type_Info_Ref) return Array_Index_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Array_Index_Info_Ref; else return Get_Array_Index_Info_Ref (Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Head); -- GAA end if; end Get_Type_First_Array_Index; -------------------------------------------------------------------------------- function Get_Type_Last_Array_Index (Type_Mark : Type_Info_Ref) return Array_Index_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Array_Index_Info_Ref; else return Get_Array_Index_Info_Ref (Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Tail); -- GAA end if; end Get_Type_Last_Array_Index; -------------------------------------------------------------------------------- function Get_Type_Array_Component (Type_Mark : Type_Info_Ref) return Type_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Type_Info_Ref; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Component_Type; end if; end Get_Type_Array_Component; -------------------------------------------------------------------------------- function Get_Type_First_Record_Component (Type_Mark : Type_Info_Ref) return Record_Component_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Record_Component_Info_Ref; else return Get_Record_Component_Info_Ref (Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Head); -- GAA end if; end Get_Type_First_Record_Component; -------------------------------------------------------------------------------- function Get_Type_Last_Record_Component (Type_Mark : Type_Info_Ref) return Record_Component_Info_Ref is begin if Type_Mark = Null_Type_Info_Ref then return Null_Record_Component_Info_Ref; else return Get_Record_Component_Info_Ref (Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Tail); -- GAA end if; end Get_Type_Last_Record_Component; -------------------------------------------------------------------------------- function Get_Type_Announcement (Type_Mark : Type_Info_Ref) return Declaration_Info_Ref is begin if Type_Mark /= Null_Type_Info_Ref and then Type_Mark.Is_Announcement_Or_Private and then Type_Mark.Is_Announcement then return Type_Mark.Announcement_Declaration; else return Null_Declaration_Info_Ref; end if; end Get_Type_Announcement; -------------------------------------------------------------------------------- function Get_Type_Private_Type_Declaration (Type_Mark : Type_Info_Ref) return Declaration_Info_Ref is begin if Type_Mark /= Null_Type_Info_Ref and then Type_Mark.Is_Announcement_Or_Private then return Type_Mark.Private_Declaration; else return Null_Declaration_Info_Ref; end if; end Get_Type_Private_Type_Declaration; -------------------------------------------------------------------------------- function Get_Type_Ancillary_Fields (Type_Mark : Type_Info_Ref) return Dictionary.Symbol is begin if Type_Mark = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Ancillary_Fields; end if; end Get_Type_Ancillary_Fields; -------------------------------------------------------------------------------- function Get_Type_Size_Attribute (Type_Mark : Type_Info_Ref) return LexTokenManager.Lex_String is begin if Type_Mark = Null_Type_Info_Ref then return LexTokenManager.Null_String; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Size_Attribute; end if; end Get_Type_Size_Attribute; -------------------------------------------------------------------------------- function Get_Type_Atomic (Type_Mark : Type_Info_Ref) return Boolean is begin return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).Is_Atomic; end Get_Type_Atomic; -------------------------------------------------------------------------------- function Get_Type_Virtual_Element_List (Type_Mark : Type_Info_Ref) return Dictionary.Symbol is begin if Type_Mark = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return Get_First_Constrained_Subtype (Type_Mark => Type_Mark).The_Virtual_Element_List; end if; end Get_Type_Virtual_Element_List; -------------------------------------------------------------------------------- function Get_Type_Kind_Of_Generic (Type_Mark : Type_Info_Ref) return Dictionary.Generic_Type_Discriminant is Constrained_Type_Mark : constant Type_Info_Ref := Get_First_Constrained_Subtype (Type_Mark => Type_Mark); begin if Constrained_Type_Mark = Null_Type_Info_Ref or else Constrained_Type_Mark.Is_Announcement_Or_Private or else Constrained_Type_Mark.Discriminant /= Dictionary.Generic_Type_Item then return Dictionary.Invalid_Generic_Type; else return Constrained_Type_Mark.Kind_Of_Generic; end if; end Get_Type_Kind_Of_Generic; -------------------------------------------------------------------------------- -- Protected_Type_Info -------------------------------------------------------------------------------- procedure Set_Protected_Type_Own_Variable (The_Protected_Type : in Type_Info_Ref; Own_Variable : in Own_Variable_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Own_Variable := Own_Variable; end Set_Protected_Type_Own_Variable; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Elements_Hidden (The_Protected_Type : in Type_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).ElementsHidden := True; end Set_Protected_Type_Elements_Hidden; -------------------------------------------------------------------------------- procedure Set_Protected_Type_First_Visible_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Visible_Part.First_Declaration := The_Declaration; end Set_Protected_Type_First_Visible_Declaration; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Last_Visible_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Visible_Part.Last_Declaration := The_Declaration; end Set_Protected_Type_Last_Visible_Declaration; -------------------------------------------------------------------------------- procedure Set_Protected_Type_First_Private_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Private_Part.First_Declaration := The_Declaration; end Set_Protected_Type_First_Private_Declaration; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Last_Private_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Private_Part.Last_Declaration := The_Declaration; end Set_Protected_Type_Last_Private_Declaration; -------------------------------------------------------------------------------- procedure Set_Protected_Type_First_Local_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Local_Part.First_Declaration := The_Declaration; end Set_Protected_Type_First_Local_Declaration; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Last_Local_Declaration (The_Protected_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Local_Part.Last_Declaration := The_Declaration; end Set_Protected_Type_Last_Local_Declaration; -------------------------------------------------------------------------------- procedure Set_Protected_Type_The_Entry (The_Protected_Type : in Type_Info_Ref; The_Entry : in Dictionary.Symbol) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).TheEntry := The_Entry; end Set_Protected_Type_The_Entry; -------------------------------------------------------------------------------- procedure Set_Protected_Type_First_Discriminant (The_Protected_Type : in Type_Info_Ref; Discriminant : in Dictionary.Symbol) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).FirstDiscriminant := Discriminant; end Set_Protected_Type_First_Discriminant; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Last_Discriminant (The_Protected_Type : in Type_Info_Ref; Discriminant : in Dictionary.Symbol) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).LastDiscriminant := Discriminant; end Set_Protected_Type_Last_Discriminant; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Has_Pragma (The_Protected_Type : in Type_Info_Ref; The_Pragma : in Dictionary.RavenscarPragmas) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Pragmas (The_Pragma).Given := True; end Set_Protected_Type_Has_Pragma; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Pragma_Value (The_Protected_Type : in Type_Info_Ref; The_Pragma : in Dictionary.RavenscarPragmasWithValue; The_Value : in LexTokenManager.Lex_String) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Pragmas (The_Pragma).Value := The_Value; end Set_Protected_Type_Pragma_Value; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Body (The_Protected_Type : in Type_Info_Ref; The_Body : in Declaration_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Protected_Body := The_Body; end Set_Protected_Type_Body; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Has_Proper_Body (The_Protected_Type : in Type_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Has_Proper_Body := True; end Set_Protected_Type_Has_Proper_Body; -------------------------------------------------------------------------------- procedure Set_Protected_Type_With_Clauses (The_Protected_Type : in Type_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Local_Part.With_Clauses := The_Context_Clause; end Set_Protected_Type_With_Clauses; -------------------------------------------------------------------------------- procedure Set_Protected_Type_Use_Type_Clauses (The_Protected_Type : in Type_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref) is begin GetProtectedRef (The_Protected_Type.Ancillary_Fields).Local_Part.Use_Type_Clauses := The_Use_Type_Clause; end Set_Protected_Type_Use_Type_Clauses; -------------------------------------------------------------------------------- function Get_Protected_Type_Own_Variable (The_Protected_Type : Type_Info_Ref) return Own_Variable_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Own_Variable_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Own_Variable; end if; end Get_Protected_Type_Own_Variable; -------------------------------------------------------------------------------- function Get_Protected_Type_Elements_Hidden (The_Protected_Type : Type_Info_Ref) return Boolean is begin if The_Protected_Type = Null_Type_Info_Ref then return False; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).ElementsHidden; end if; end Get_Protected_Type_Elements_Hidden; -------------------------------------------------------------------------------- function Get_Protected_Type_Has_Entry (The_Protected_Type : Type_Info_Ref) return Boolean is begin if The_Protected_Type = Null_Type_Info_Ref then return False; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).TheEntry /= Dictionary.NullSymbol; end if; end Get_Protected_Type_Has_Entry; -------------------------------------------------------------------------------- function Get_Protected_Type_First_Visible_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Visible_Part.First_Declaration; end if; end Get_Protected_Type_First_Visible_Declaration; -------------------------------------------------------------------------------- function Get_Protected_Type_Last_Visible_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Visible_Part.Last_Declaration; end if; end Get_Protected_Type_Last_Visible_Declaration; -------------------------------------------------------------------------------- function Get_Protected_Type_First_Private_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Private_Part.First_Declaration; end if; end Get_Protected_Type_First_Private_Declaration; -------------------------------------------------------------------------------- function Get_Protected_Type_Last_Private_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Private_Part.Last_Declaration; end if; end Get_Protected_Type_Last_Private_Declaration; -------------------------------------------------------------------------------- function Get_Protected_Type_First_Local_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Local_Part.First_Declaration; end if; end Get_Protected_Type_First_Local_Declaration; -------------------------------------------------------------------------------- function Get_Protected_Type_Last_Local_Declaration (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Local_Part.Last_Declaration; end if; end Get_Protected_Type_Last_Local_Declaration; -------------------------------------------------------------------------------- function Get_Protected_Type_First_Discriminant (The_Protected_Type : Type_Info_Ref) return Dictionary.Symbol is begin if The_Protected_Type = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).FirstDiscriminant; end if; end Get_Protected_Type_First_Discriminant; -------------------------------------------------------------------------------- function Get_Protected_Type_Last_Discriminant (The_Protected_Type : Type_Info_Ref) return Dictionary.Symbol is begin if The_Protected_Type = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).LastDiscriminant; end if; end Get_Protected_Type_Last_Discriminant; -------------------------------------------------------------------------------- function Get_Protected_Type_Has_Pragma (The_Protected_Type : Type_Info_Ref; The_Pragma : Dictionary.RavenscarPragmas) return Boolean is begin if The_Protected_Type = Null_Type_Info_Ref then return False; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Pragmas (The_Pragma).Given; end if; end Get_Protected_Type_Has_Pragma; -------------------------------------------------------------------------------- function Get_Protected_Type_Pragma_Value (The_Protected_Type : Type_Info_Ref; The_Pragma : Dictionary.RavenscarPragmasWithValue) return LexTokenManager.Lex_String is begin if The_Protected_Type = Null_Type_Info_Ref then return LexTokenManager.Null_String; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Pragmas (The_Pragma).Value; end if; end Get_Protected_Type_Pragma_Value; -------------------------------------------------------------------------------- function Get_Protected_Type_Body (The_Protected_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Protected_Body; end if; end Get_Protected_Type_Body; -------------------------------------------------------------------------------- function Get_Protected_Type_Has_Proper_Body (The_Protected_Type : Type_Info_Ref) return Boolean is begin if The_Protected_Type = Null_Type_Info_Ref then return False; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Has_Proper_Body; end if; end Get_Protected_Type_Has_Proper_Body; -------------------------------------------------------------------------------- function Get_Protected_Type_With_Clauses (The_Protected_Type : Type_Info_Ref) return Context_Clause_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Context_Clause_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Local_Part.With_Clauses; end if; end Get_Protected_Type_With_Clauses; -------------------------------------------------------------------------------- function Get_Protected_Type_Use_Type_Clauses (The_Protected_Type : Type_Info_Ref) return Use_Type_Clause_Info_Ref is begin if The_Protected_Type = Null_Type_Info_Ref then return Null_Use_Type_Clause_Info_Ref; else return GetProtectedRef (The_Protected_Type.Ancillary_Fields).Local_Part.Use_Type_Clauses; end if; end Get_Protected_Type_Use_Type_Clauses; -------------------------------------------------------------------------------- -- Task_Type_Info -------------------------------------------------------------------------------- procedure Set_Task_Type_Signature_Not_Wellformed (The_Task_Type : in Type_Info_Ref; Abstraction : in Dictionary.Abstractions) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Signature_Is_Wellformed (Abstraction) := False; -- we may be marking a missing second anno as malformed so we -- must also mark it as present, thus if Abstraction = Dictionary.IsRefined then GetTaskRef (The_Task_Type.Ancillary_Fields).Has_Second_Annotation := True; end if; end Set_Task_Type_Signature_Not_Wellformed; -------------------------------------------------------------------------------- procedure Set_Task_Type_Has_Second_Annotation (The_Task_Type : in Type_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Has_Second_Annotation := True; end Set_Task_Type_Has_Second_Annotation; -------------------------------------------------------------------------------- procedure Set_Task_Type_Has_Derives_Annotation (The_Task_Type : in Type_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Has_Derives_Annotation := True; end Set_Task_Type_Has_Derives_Annotation; -------------------------------------------------------------------------------- procedure Set_Task_Type_First_Local_Declaration (The_Task_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Local_Part.First_Declaration := The_Declaration; end Set_Task_Type_First_Local_Declaration; -------------------------------------------------------------------------------- procedure Set_Task_Type_Last_Local_Declaration (The_Task_Type : in Type_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Local_Part.Last_Declaration := The_Declaration; end Set_Task_Type_Last_Local_Declaration; -------------------------------------------------------------------------------- procedure Set_Task_Type_With_Clauses (The_Task_Type : in Type_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Local_Part.With_Clauses := The_Context_Clause; end Set_Task_Type_With_Clauses; -------------------------------------------------------------------------------- procedure Set_Task_Type_Use_Type_Clauses (The_Task_Type : in Type_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Local_Part.Use_Type_Clauses := The_Use_Type_Clause; end Set_Task_Type_Use_Type_Clauses; -------------------------------------------------------------------------------- procedure Set_Task_Type_First_Discriminant (The_Task_Type : in Type_Info_Ref; Discriminant : in Dictionary.Symbol) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).FirstDiscriminant := Discriminant; end Set_Task_Type_First_Discriminant; -------------------------------------------------------------------------------- procedure Set_Task_Type_Last_Discriminant (The_Task_Type : in Type_Info_Ref; Discriminant : in Dictionary.Symbol) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).LastDiscriminant := Discriminant; end Set_Task_Type_Last_Discriminant; -------------------------------------------------------------------------------- procedure Set_Task_Type_First_Global_Variable (The_Task_Type : in Type_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Global_Variable : in Global_Variable_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).First_Global_Variable (Abstraction) := The_Global_Variable; end Set_Task_Type_First_Global_Variable; -------------------------------------------------------------------------------- procedure Set_Task_Type_Last_Global_Variable (The_Task_Type : in Type_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Global_Variable : in Global_Variable_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Last_Global_Variable (Abstraction) := The_Global_Variable; end Set_Task_Type_Last_Global_Variable; -------------------------------------------------------------------------------- procedure Set_Task_Type_Has_Pragma (The_Task_Type : in Type_Info_Ref; The_Pragma : in Dictionary.RavenscarPragmas) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Pragmas (The_Pragma).Given := True; end Set_Task_Type_Has_Pragma; -------------------------------------------------------------------------------- procedure Set_Task_Type_Pragma_Value (The_Task_Type : in Type_Info_Ref; The_Pragma : in Dictionary.RavenscarPragmasWithValue; The_Value : in LexTokenManager.Lex_String) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Pragmas (The_Pragma).Value := The_Value; end Set_Task_Type_Pragma_Value; -------------------------------------------------------------------------------- procedure Set_Task_Type_First_Loop (The_Task_Type : in Type_Info_Ref; The_Loop : in Dictionary.Symbol) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).First_Loop := The_Loop; end Set_Task_Type_First_Loop; -------------------------------------------------------------------------------- procedure Set_Task_Type_Last_Loop (The_Task_Type : in Type_Info_Ref; The_Loop : in Dictionary.Symbol) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Last_Loop := The_Loop; end Set_Task_Type_Last_Loop; -------------------------------------------------------------------------------- procedure Set_Task_Type_Suspends_List (The_Task_Type : in Type_Info_Ref; The_Suspends_List : in Dictionary.Symbol) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Suspends_List := The_Suspends_List; end Set_Task_Type_Suspends_List; -------------------------------------------------------------------------------- procedure Set_Task_Type_Body (The_Task_Type : in Type_Info_Ref; The_Body : in Declaration_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Task_Body := The_Body; end Set_Task_Type_Body; -------------------------------------------------------------------------------- procedure Set_Task_Type_Has_Proper_Body (The_Task_Type : in Type_Info_Ref; Is_Hidden : in Boolean) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Has_Proper_Body := True; GetTaskRef (The_Task_Type.Ancillary_Fields).Body_Is_Hidden := Is_Hidden; end Set_Task_Type_Has_Proper_Body; -------------------------------------------------------------------------------- procedure Set_Task_Type_Uses_Unprotected_Variables (The_Task_Type : in Type_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Uses_Unprotected_Variables := True; end Set_Task_Type_Uses_Unprotected_Variables; -------------------------------------------------------------------------------- procedure Set_Task_Type_Uses_Unchecked_Conversion (The_Task_Type : in Type_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Uses_Unchecked_Conversion := True; end Set_Task_Type_Uses_Unchecked_Conversion; -------------------------------------------------------------------------------- procedure Set_Task_Type_Assigns_From_External (The_Task_Type : in Type_Info_Ref) is begin GetTaskRef (The_Task_Type.Ancillary_Fields).Assigns_From_External := True; end Set_Task_Type_Assigns_From_External; -------------------------------------------------------------------------------- function Get_Task_Type_Signature_Is_Wellformed (The_Task_Type : Type_Info_Ref; Abstraction : Dictionary.Abstractions) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Signature_Is_Wellformed (Abstraction); end if; end Get_Task_Type_Signature_Is_Wellformed; -------------------------------------------------------------------------------- function Get_Task_Type_Has_Second_Annotation (The_Task_Type : Type_Info_Ref) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Has_Second_Annotation; end if; end Get_Task_Type_Has_Second_Annotation; -------------------------------------------------------------------------------- function Get_Task_Type_Has_Derives_Annotation (The_Task_Type : Type_Info_Ref) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Has_Derives_Annotation; end if; end Get_Task_Type_Has_Derives_Annotation; -------------------------------------------------------------------------------- function Get_Task_Type_First_Local_Declaration (The_Task_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Task_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Local_Part.First_Declaration; end if; end Get_Task_Type_First_Local_Declaration; -------------------------------------------------------------------------------- function Get_Task_Type_Last_Local_Declaration (The_Task_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Task_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Local_Part.Last_Declaration; end if; end Get_Task_Type_Last_Local_Declaration; -------------------------------------------------------------------------------- function Get_Task_Type_With_Clauses (The_Task_Type : Type_Info_Ref) return Context_Clause_Info_Ref is begin if The_Task_Type = Null_Type_Info_Ref then return Null_Context_Clause_Info_Ref; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Local_Part.With_Clauses; end if; end Get_Task_Type_With_Clauses; -------------------------------------------------------------------------------- function Get_Task_Type_Use_Type_Clauses (The_Task_Type : Type_Info_Ref) return Use_Type_Clause_Info_Ref is begin if The_Task_Type = Null_Type_Info_Ref then return Null_Use_Type_Clause_Info_Ref; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Local_Part.Use_Type_Clauses; end if; end Get_Task_Type_Use_Type_Clauses; -------------------------------------------------------------------------------- function Get_Task_Type_First_Discriminant (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol is begin if The_Task_Type = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return GetTaskRef (The_Task_Type.Ancillary_Fields).FirstDiscriminant; end if; end Get_Task_Type_First_Discriminant; -------------------------------------------------------------------------------- function Get_Task_Type_Last_Discriminant (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol is begin if The_Task_Type = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return GetTaskRef (The_Task_Type.Ancillary_Fields).LastDiscriminant; end if; end Get_Task_Type_Last_Discriminant; -------------------------------------------------------------------------------- function Get_Task_Type_First_Global_Variable (The_Task_Type : Type_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref is begin if The_Task_Type = Null_Type_Info_Ref then return Null_Global_Variable_Info_Ref; else return GetTaskRef (The_Task_Type.Ancillary_Fields).First_Global_Variable (Abstraction); end if; end Get_Task_Type_First_Global_Variable; -------------------------------------------------------------------------------- function Get_Task_Type_Last_Global_Variable (The_Task_Type : Type_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref is begin if The_Task_Type = Null_Type_Info_Ref then return Null_Global_Variable_Info_Ref; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Last_Global_Variable (Abstraction); end if; end Get_Task_Type_Last_Global_Variable; -------------------------------------------------------------------------------- function Get_Task_Type_Has_Pragma (The_Task_Type : Type_Info_Ref; The_Pragma : Dictionary.RavenscarPragmas) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Pragmas (The_Pragma).Given; end if; end Get_Task_Type_Has_Pragma; -------------------------------------------------------------------------------- function Get_Task_Type_Pragma_Value (The_Task_Type : Type_Info_Ref; The_Pragma : Dictionary.RavenscarPragmasWithValue) return LexTokenManager.Lex_String is begin if The_Task_Type = Null_Type_Info_Ref then return LexTokenManager.Null_String; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Pragmas (The_Pragma).Value; end if; end Get_Task_Type_Pragma_Value; -------------------------------------------------------------------------------- function Get_Task_Type_First_Loop (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol is begin if The_Task_Type = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return GetTaskRef (The_Task_Type.Ancillary_Fields).First_Loop; end if; end Get_Task_Type_First_Loop; -------------------------------------------------------------------------------- function Get_Task_Type_Last_Loop (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol is begin if The_Task_Type = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Last_Loop; end if; end Get_Task_Type_Last_Loop; -------------------------------------------------------------------------------- function Get_Task_Type_Suspends_List (The_Task_Type : Type_Info_Ref) return Dictionary.Symbol is begin if The_Task_Type = Null_Type_Info_Ref then return Dictionary.NullSymbol; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Suspends_List; end if; end Get_Task_Type_Suspends_List; -------------------------------------------------------------------------------- function Get_Task_Type_Body (The_Task_Type : Type_Info_Ref) return Declaration_Info_Ref is begin if The_Task_Type = Null_Type_Info_Ref then return Null_Declaration_Info_Ref; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Task_Body; end if; end Get_Task_Type_Body; -------------------------------------------------------------------------------- function Get_Task_Type_Has_Proper_Body (The_Task_Type : Type_Info_Ref) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Has_Proper_Body; end if; end Get_Task_Type_Has_Proper_Body; -------------------------------------------------------------------------------- function Get_Task_Type_Uses_Unprotected_Variables (The_Task_Type : Type_Info_Ref) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Uses_Unprotected_Variables; end if; end Get_Task_Type_Uses_Unprotected_Variables; -------------------------------------------------------------------------------- function Get_Task_Type_Uses_Unchecked_Conversion (The_Task_Type : Type_Info_Ref) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Uses_Unchecked_Conversion; end if; end Get_Task_Type_Uses_Unchecked_Conversion; -------------------------------------------------------------------------------- function Get_Task_Type_Assigns_From_External (The_Task_Type : Type_Info_Ref) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Assigns_From_External; end if; end Get_Task_Type_Assigns_From_External; -------------------------------------------------------------------------------- function Get_Task_Type_Body_Is_Hidden (The_Task_Type : Type_Info_Ref) return Boolean is begin if The_Task_Type = Null_Type_Info_Ref then return False; else return GetTaskRef (The_Task_Type.Ancillary_Fields).Body_Is_Hidden; end if; end Get_Task_Type_Body_Is_Hidden; -------------------------------------------------------------------------------- -- Constant_Info -------------------------------------------------------------------------------- function Get_Constant_Info_Ref (Item : Dictionary.Symbol) return Constant_Info_Ref is function RefType_To_Constant_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Constant_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Constant_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Constant_Symbol then DiscriminantDebug ("Get_Constant_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Constant_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Constant_Info_Ref"); end if; return RefType_To_Constant_Info_Ref (GetSymbolRef (Item)); end if; end Get_Constant_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Constant (Name : in LexTokenManager.Lex_String; Type_Mark : in Type_Info_Ref; Static : in Boolean; The_Declaration : in Declaration_Info_Ref; Is_Deferred : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Constant : out Constant_Info_Ref) is The_Constant_Symbol : Dictionary.Symbol; function Constant_Info_Ref_To_RefType is new Unchecked_Conversion (Constant_Info_Ref, Dictionary.Ref_Type); begin if Is_Deferred then The_Constant := new Constant_Info' (Self => Dictionary.NullSymbol, Name => Name, Static => Static, Type_Mark => Type_Mark, Value => LexTokenManager.Null_String, Exp_Is_Wellformed => False, Exp_Node => 0, Associated_Generic_Parameter => Null_Generic_Parameter_Info_Ref, First_Rule_Policy => Null_Rule_Policy_Info_Ref, Last_Rule_Policy => Null_Rule_Policy_Info_Ref, Declaration => Null_Declaration_Info_Ref, Is_Deferred => True, Deferred_Declaration => The_Declaration); else The_Constant := new Constant_Info' (Self => Dictionary.NullSymbol, Name => Name, Static => Static, Type_Mark => Type_Mark, Value => LexTokenManager.Null_String, Exp_Is_Wellformed => False, Exp_Node => 0, Associated_Generic_Parameter => Null_Generic_Parameter_Info_Ref, First_Rule_Policy => Null_Rule_Policy_Info_Ref, Last_Rule_Policy => Null_Rule_Policy_Info_Ref, Declaration => The_Declaration, Is_Deferred => False); end if; AddSymbol (Discriminant => Dictionary.Constant_Symbol, Ref => Constant_Info_Ref_To_RefType (The_Constant), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Constant_Symbol); The_Constant.Self := The_Constant_Symbol; if The_Declaration /= Null_Declaration_Info_Ref then The_Declaration.Item := The_Constant_Symbol; end if; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Constant"); end Create_Constant; -------------------------------------------------------------------------------- procedure Set_Constant_Value (The_Constant : in Constant_Info_Ref; Value : in LexTokenManager.Lex_String) is begin The_Constant.Value := Value; end Set_Constant_Value; -------------------------------------------------------------------------------- procedure Set_Constant_Exp_Node (The_Constant : in Constant_Info_Ref; Exp_Is_Wellformed : in Boolean; Exp_Node : in ExaminerConstants.RefType) is begin The_Constant.Exp_Node := Exp_Node; The_Constant.Exp_Is_Wellformed := Exp_Is_Wellformed; end Set_Constant_Exp_Node; -------------------------------------------------------------------------------- procedure Set_Constant_Static (The_Constant : in Constant_Info_Ref; Static : in Boolean) is begin The_Constant.Static := Static; end Set_Constant_Static; -------------------------------------------------------------------------------- procedure Set_Constant_Declaration (The_Constant : in Constant_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Constant.Declaration := The_Declaration; The_Declaration.Item := The_Constant.Self; -- GAA end Set_Constant_Declaration; -------------------------------------------------------------------------------- procedure Set_Constant_Deferred_Declaration (The_Constant : in Constant_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin SystemErrors.RT_Assert (C => The_Constant.Is_Deferred, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Dictionary.RawDict.Set_Constant_Deferred_Declaration"); The_Constant.Deferred_Declaration := The_Declaration; end Set_Constant_Deferred_Declaration; -------------------------------------------------------------------------------- procedure Set_Constant_Associated_Generic_Parameter (The_Constant : in Constant_Info_Ref; The_Generic_Parameter : in Generic_Parameter_Info_Ref) is begin The_Constant.Associated_Generic_Parameter := The_Generic_Parameter; end Set_Constant_Associated_Generic_Parameter; -------------------------------------------------------------------------------- procedure Set_Constant_First_Rule_Policy (The_Constant : in Constant_Info_Ref; The_Rule_Policy : in Rule_Policy_Info_Ref) is begin The_Constant.First_Rule_Policy := The_Rule_Policy; end Set_Constant_First_Rule_Policy; -------------------------------------------------------------------------------- procedure Set_Constant_Last_Rule_Policy (The_Constant : in Constant_Info_Ref; The_Rule_Policy : in Rule_Policy_Info_Ref) is begin The_Constant.Last_Rule_Policy := The_Rule_Policy; end Set_Constant_Last_Rule_Policy; -------------------------------------------------------------------------------- function Get_Constant_Symbol (The_Constant : Constant_Info_Ref) return Dictionary.Symbol is begin if The_Constant = Null_Constant_Info_Ref then return Dictionary.NullSymbol; else return The_Constant.Self; end if; end Get_Constant_Symbol; -------------------------------------------------------------------------------- function Get_Constant_Name (The_Constant : Constant_Info_Ref) return LexTokenManager.Lex_String is begin if The_Constant = Null_Constant_Info_Ref then return LexTokenManager.Null_String; else return The_Constant.Name; end if; end Get_Constant_Name; -------------------------------------------------------------------------------- function Get_Constant_Type (The_Constant : Constant_Info_Ref) return Type_Info_Ref is begin if The_Constant = Null_Constant_Info_Ref then return Null_Type_Info_Ref; else return The_Constant.Type_Mark; end if; end Get_Constant_Type; -------------------------------------------------------------------------------- function Get_Constant_Value (The_Constant : Constant_Info_Ref) return LexTokenManager.Lex_String is begin if The_Constant = Null_Constant_Info_Ref then return LexTokenManager.Null_String; else return The_Constant.Value; end if; end Get_Constant_Value; -------------------------------------------------------------------------------- function Get_Constant_First_Rule_Policy (The_Constant : Constant_Info_Ref) return Rule_Policy_Info_Ref is begin if The_Constant = Null_Constant_Info_Ref then return Null_Rule_Policy_Info_Ref; else return The_Constant.First_Rule_Policy; end if; end Get_Constant_First_Rule_Policy; -------------------------------------------------------------------------------- function Get_Constant_Last_Rule_Policy (The_Constant : Constant_Info_Ref) return Rule_Policy_Info_Ref is begin if The_Constant = Null_Constant_Info_Ref then return Null_Rule_Policy_Info_Ref; else return The_Constant.Last_Rule_Policy; end if; end Get_Constant_Last_Rule_Policy; -------------------------------------------------------------------------------- function Get_Constant_Exp_Node (The_Constant : Constant_Info_Ref) return ExaminerConstants.RefType is begin if The_Constant = Null_Constant_Info_Ref then return ExaminerConstants.RefType'First; else return The_Constant.Exp_Node; end if; end Get_Constant_Exp_Node; -------------------------------------------------------------------------------- function Get_Constant_Exp_Is_Wellformed (The_Constant : Constant_Info_Ref) return Boolean is begin if The_Constant = Null_Constant_Info_Ref then return False; else return The_Constant.Exp_Is_Wellformed; end if; end Get_Constant_Exp_Is_Wellformed; -------------------------------------------------------------------------------- function Get_Constant_Static (The_Constant : Constant_Info_Ref) return Boolean is begin if The_Constant = Null_Constant_Info_Ref then return False; else return The_Constant.Static; end if; end Get_Constant_Static; -------------------------------------------------------------------------------- function Get_Constant_Declaration (The_Constant : Constant_Info_Ref) return Declaration_Info_Ref is begin if The_Constant = Null_Constant_Info_Ref then return Null_Declaration_Info_Ref; else return The_Constant.Declaration; end if; end Get_Constant_Declaration; -------------------------------------------------------------------------------- function Get_Constant_Deferred_Declaration (The_Constant : Constant_Info_Ref) return Declaration_Info_Ref is begin if The_Constant /= Null_Constant_Info_Ref and then The_Constant.Is_Deferred then return The_Constant.Deferred_Declaration; else return Null_Declaration_Info_Ref; end if; end Get_Constant_Deferred_Declaration; -------------------------------------------------------------------------------- function Get_Constant_Associated_Generic_Parameter (The_Constant : Constant_Info_Ref) return Generic_Parameter_Info_Ref is begin if The_Constant = Null_Constant_Info_Ref then return Null_Generic_Parameter_Info_Ref; else return The_Constant.Associated_Generic_Parameter; end if; end Get_Constant_Associated_Generic_Parameter; -------------------------------------------------------------------------------- -- Variable_Info -------------------------------------------------------------------------------- function Get_Variable_Info_Ref (Item : Dictionary.Symbol) return Variable_Info_Ref is function RefType_To_Variable_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Variable_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Variable_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Variable_Symbol then DiscriminantDebug ("Get_Variable_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Variable_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Variable_Info_Ref"); end if; return RefType_To_Variable_Info_Ref (GetSymbolRef (Item)); end if; end Get_Variable_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Variable (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Variable : out Variable_Info_Ref) is The_Variable_Symbol : Dictionary.Symbol; function Variable_Info_Ref_To_RefType is new Unchecked_Conversion (Variable_Info_Ref, Dictionary.Ref_Type); begin The_Variable := new Variable_Info' (Self => Dictionary.NullSymbol, Name => Name, Type_Mark => Dictionary.Dict.Types.Unknown_Type_Mark, Abstract_Type_Mark => Dictionary.Dict.Types.Unknown_Type_Mark, Initialized => False, Has_Address_Clause => False, Has_Pragma_Import => False, Is_Aliased => False, Marked_Valid => True, Exp_Node => 0, Declaration => Null_Declaration_Info_Ref, Global_References => Global_Variables_T'(Dictionary.Abstractions => Null_Global_Variable_Info_Ref), Own_Variable => Null_Own_Variable_Info_Ref, Own_Task => Dictionary.NullSymbol, Virtual_Element => Dictionary.NullSymbol, Constituent => Null_Constituent_Info_Ref, Subcomponents => Null_Subcomponent_Info_Ref); AddSymbol (Discriminant => Dictionary.Variable_Symbol, Ref => Variable_Info_Ref_To_RefType (The_Variable), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Variable_Symbol); The_Variable.Self := The_Variable_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Variable"); end Create_Variable; -------------------------------------------------------------------------------- procedure Set_Variable_Type (The_Variable : in Variable_Info_Ref; Type_Mark : in Type_Info_Ref) is begin The_Variable.Type_Mark := Type_Mark; end Set_Variable_Type; -------------------------------------------------------------------------------- procedure Set_Variable_Abstract_Type (The_Variable : in Variable_Info_Ref; Abstract_Type_Mark : in Type_Info_Ref) is begin The_Variable.Abstract_Type_Mark := Abstract_Type_Mark; end Set_Variable_Abstract_Type; -------------------------------------------------------------------------------- procedure Set_Variable_Initialized (The_Variable : in Variable_Info_Ref) is begin The_Variable.Initialized := True; end Set_Variable_Initialized; -------------------------------------------------------------------------------- procedure Set_Variable_Has_Address_Clause (The_Variable : in Variable_Info_Ref) is begin The_Variable.Has_Address_Clause := True; end Set_Variable_Has_Address_Clause; -------------------------------------------------------------------------------- procedure Set_Variable_Has_Pragma_Import (The_Variable : in Variable_Info_Ref) is begin The_Variable.Has_Pragma_Import := True; end Set_Variable_Has_Pragma_Import; -------------------------------------------------------------------------------- procedure Set_Variable_Is_Aliased (The_Variable : in Variable_Info_Ref) is begin The_Variable.Is_Aliased := True; end Set_Variable_Is_Aliased; -------------------------------------------------------------------------------- procedure Set_Variable_Marked_Valid (The_Variable : in Variable_Info_Ref; Val : in Boolean) is begin The_Variable.Marked_Valid := Val; end Set_Variable_Marked_Valid; -------------------------------------------------------------------------------- procedure Set_Variable_Declaration (The_Variable : in Variable_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Variable.Declaration := The_Declaration; The_Declaration.Item := The_Variable.Self; -- GAA end Set_Variable_Declaration; -------------------------------------------------------------------------------- procedure Set_Variable_Exp_Node (The_Variable : in Variable_Info_Ref; Exp_Node : in ExaminerConstants.RefType) is begin The_Variable.Exp_Node := Exp_Node; end Set_Variable_Exp_Node; -------------------------------------------------------------------------------- procedure Set_Variable_Own_Task (The_Variable : in Variable_Info_Ref; Own_Task : in Dictionary.Symbol) is begin The_Variable.Own_Task := Own_Task; end Set_Variable_Own_Task; -------------------------------------------------------------------------------- procedure Set_Variable_Virtual_Element (The_Variable : in Variable_Info_Ref; Virtual_Element : in Dictionary.Symbol) is begin The_Variable.Virtual_Element := Virtual_Element; end Set_Variable_Virtual_Element; -------------------------------------------------------------------------------- procedure Set_Variable_Global_References (The_Variable : in Variable_Info_Ref; Abstraction : in Dictionary.Abstractions; Reference : in Global_Variable_Info_Ref) is begin The_Variable.Global_References (Abstraction) := Reference; end Set_Variable_Global_References; -------------------------------------------------------------------------------- procedure Set_Variable_Own_Variable (The_Variable : in Variable_Info_Ref; Own_Variable : in Own_Variable_Info_Ref) is begin The_Variable.Own_Variable := Own_Variable; end Set_Variable_Own_Variable; -------------------------------------------------------------------------------- procedure Set_Variable_Constituent (The_Variable : in Variable_Info_Ref; The_Constituent : in Constituent_Info_Ref) is begin The_Variable.Constituent := The_Constituent; end Set_Variable_Constituent; -------------------------------------------------------------------------------- procedure Set_Variable_Subcomponents (The_Variable : in Variable_Info_Ref; Subcomponents : in Subcomponent_Info_Ref) is begin The_Variable.Subcomponents := Subcomponents; end Set_Variable_Subcomponents; -------------------------------------------------------------------------------- function Get_Variable_Symbol (The_Variable : Variable_Info_Ref) return Dictionary.Symbol is begin if The_Variable = Null_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Variable.Self; end if; end Get_Variable_Symbol; -------------------------------------------------------------------------------- function Get_Variable_Name (The_Variable : Variable_Info_Ref) return LexTokenManager.Lex_String is begin if The_Variable = Null_Variable_Info_Ref then return LexTokenManager.Null_String; else return The_Variable.Name; end if; end Get_Variable_Name; -------------------------------------------------------------------------------- function Get_Variable_Type (The_Variable : Variable_Info_Ref) return Type_Info_Ref is begin if The_Variable = Null_Variable_Info_Ref then return Null_Type_Info_Ref; else return The_Variable.Type_Mark; end if; end Get_Variable_Type; -------------------------------------------------------------------------------- function Get_Variable_Abstract_Type (The_Variable : Variable_Info_Ref) return Type_Info_Ref is begin if The_Variable = Null_Variable_Info_Ref then return Null_Type_Info_Ref; else return The_Variable.Abstract_Type_Mark; end if; end Get_Variable_Abstract_Type; -------------------------------------------------------------------------------- function Get_Variable_Initialized (The_Variable : Variable_Info_Ref) return Boolean is begin if The_Variable = Null_Variable_Info_Ref then return False; else return The_Variable.Initialized; end if; end Get_Variable_Initialized; -------------------------------------------------------------------------------- function Get_Variable_Has_Address_Clause (The_Variable : Variable_Info_Ref) return Boolean is begin if The_Variable = Null_Variable_Info_Ref then return False; else return The_Variable.Has_Address_Clause; end if; end Get_Variable_Has_Address_Clause; -------------------------------------------------------------------------------- function Get_Variable_Has_Pragma_Import (The_Variable : Variable_Info_Ref) return Boolean is begin if The_Variable = Null_Variable_Info_Ref then return False; else return The_Variable.Has_Pragma_Import; end if; end Get_Variable_Has_Pragma_Import; -------------------------------------------------------------------------------- function Get_Variable_Is_Aliased (The_Variable : Variable_Info_Ref) return Boolean is begin if The_Variable = Null_Variable_Info_Ref then return False; else return The_Variable.Is_Aliased; end if; end Get_Variable_Is_Aliased; -------------------------------------------------------------------------------- function Get_Variable_Marked_Valid (The_Variable : Variable_Info_Ref) return Boolean is begin if The_Variable = Null_Variable_Info_Ref then return False; else return The_Variable.Marked_Valid; end if; end Get_Variable_Marked_Valid; -------------------------------------------------------------------------------- function Get_Variable_Declaration (The_Variable : Variable_Info_Ref) return Declaration_Info_Ref is begin if The_Variable = Null_Variable_Info_Ref then return Null_Declaration_Info_Ref; else return The_Variable.Declaration; end if; end Get_Variable_Declaration; -------------------------------------------------------------------------------- function Get_Variable_Exp_Node (The_Variable : Variable_Info_Ref) return ExaminerConstants.RefType is begin if The_Variable = Null_Variable_Info_Ref then return ExaminerConstants.RefType'First; else return The_Variable.Exp_Node; end if; end Get_Variable_Exp_Node; -------------------------------------------------------------------------------- function Get_Variable_Global_References (The_Variable : Variable_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref is begin if The_Variable = Null_Variable_Info_Ref then return Null_Global_Variable_Info_Ref; else return The_Variable.Global_References (Abstraction); end if; end Get_Variable_Global_References; -------------------------------------------------------------------------------- function Get_Variable_Own_Variable (The_Variable : Variable_Info_Ref) return Own_Variable_Info_Ref is begin if The_Variable = Null_Variable_Info_Ref then return Null_Own_Variable_Info_Ref; else return The_Variable.Own_Variable; end if; end Get_Variable_Own_Variable; -------------------------------------------------------------------------------- function Get_Variable_Own_Task (The_Variable : Variable_Info_Ref) return Dictionary.Symbol is begin if The_Variable = Null_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Variable.Own_Task; end if; end Get_Variable_Own_Task; -------------------------------------------------------------------------------- function Get_Variable_Constituent (The_Variable : Variable_Info_Ref) return Constituent_Info_Ref is begin if The_Variable = Null_Variable_Info_Ref then return Null_Constituent_Info_Ref; else return The_Variable.Constituent; end if; end Get_Variable_Constituent; -------------------------------------------------------------------------------- function Get_Variable_Subcomponents (The_Variable : Variable_Info_Ref) return Subcomponent_Info_Ref is begin if The_Variable = Null_Variable_Info_Ref then return Null_Subcomponent_Info_Ref; else return The_Variable.Subcomponents; end if; end Get_Variable_Subcomponents; -------------------------------------------------------------------------------- function Get_Variable_Virtual_Element (The_Variable : Variable_Info_Ref) return Dictionary.Symbol is begin if The_Variable = Null_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Variable.Virtual_Element; end if; end Get_Variable_Virtual_Element; -------------------------------------------------------------------------------- -- Global_Variable_Info -------------------------------------------------------------------------------- function Get_Global_Variable_Info_Ref (Item : Dictionary.Symbol) return Global_Variable_Info_Ref is function RefType_To_Global_Variable_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Global_Variable_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Global_Variable_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Global_Variable_Symbol then DiscriminantDebug ("Get_Global_Variable_info_Ref", GetSymbolDiscriminant (Item), Dictionary.Global_Variable_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Global_Variable_info_Ref"); end if; return RefType_To_Global_Variable_Info_Ref (GetSymbolRef (Item)); end if; end Get_Global_Variable_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Global_Variable (Mode : in Dictionary.Modes; Prefix_Needed : in Boolean; The_Subprogram : in Subprogram_Info_Ref; The_Task_Type : in Type_Info_Ref; Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Variable : in Variable_Info_Ref; Next_Subprogram : in Global_Variable_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Global_Variable : out Global_Variable_Info_Ref) is The_Global_Variable_Symbol : Dictionary.Symbol; function Global_Variable_Info_Ref_To_RefType is new Unchecked_Conversion (Global_Variable_Info_Ref, Dictionary.Ref_Type); begin if The_Subprogram /= Null_Subprogram_Info_Ref then if Subprogram_Parameter /= Null_Subprogram_Parameter_Info_Ref then The_Global_Variable := new Global_Variable_Info' (Self => Dictionary.NullSymbol, Mode => Mode, Exported => False, Imported => False, Prefix_Needed => Prefix_Needed, Dependencies => Dependencies_T'(Dictionary.Abstractions => Null_Dependency_Info_Ref), Next_Subprogram => Next_Subprogram, Next_Variable => Null_Global_Variable_Info_Ref, Kind_Of_Global_Variable => Subprogram_Parameter_Item, Subprogram_With_Parameter => The_Subprogram, Subprogram_Parameter => Subprogram_Parameter); elsif Variable /= Null_Variable_Info_Ref then The_Global_Variable := new Global_Variable_Info' (Self => Dictionary.NullSymbol, Mode => Mode, Exported => False, Imported => False, Prefix_Needed => Prefix_Needed, Dependencies => Dependencies_T'(Dictionary.Abstractions => Null_Dependency_Info_Ref), Next_Subprogram => Next_Subprogram, Next_Variable => Null_Global_Variable_Info_Ref, Kind_Of_Global_Variable => Subprogram_Variable_Item, Subprogram_With_Variable => The_Subprogram, Variable => Variable); else The_Global_Variable := Null_Global_Variable_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Create_Global_Variable"); end if; elsif The_Task_Type /= Null_Type_Info_Ref and then Variable /= Null_Variable_Info_Ref then The_Global_Variable := new Global_Variable_Info' (Self => Dictionary.NullSymbol, Mode => Mode, Exported => False, Imported => False, Prefix_Needed => Prefix_Needed, Dependencies => Dependencies_T'(Dictionary.Abstractions => Null_Dependency_Info_Ref), Next_Subprogram => Next_Subprogram, Next_Variable => Null_Global_Variable_Info_Ref, Kind_Of_Global_Variable => Task_Type_Variable_Item, Task_Type => The_Task_Type, Task_Type_Variable => Variable); else The_Global_Variable := Null_Global_Variable_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Create_Global_Variable"); end if; AddSymbol (Discriminant => Dictionary.Global_Variable_Symbol, Ref => Global_Variable_Info_Ref_To_RefType (The_Global_Variable), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Global_Variable_Symbol); The_Global_Variable.Self := The_Global_Variable_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Global_Variable"); end Create_Global_Variable; -------------------------------------------------------------------------------- procedure Set_Global_Variable_Exported (The_Global_Variable : in Global_Variable_Info_Ref) is begin The_Global_Variable.Exported := True; end Set_Global_Variable_Exported; -------------------------------------------------------------------------------- procedure Set_Global_Variable_Imported (The_Global_Variable : in Global_Variable_Info_Ref) is begin The_Global_Variable.Imported := True; end Set_Global_Variable_Imported; -------------------------------------------------------------------------------- procedure Set_Global_Variable_Dependencies (The_Global_Variable : in Global_Variable_Info_Ref; Abstraction : in Dictionary.Abstractions; Dependency : in Dependency_Info_Ref) is begin The_Global_Variable.Dependencies (Abstraction) := Dependency; end Set_Global_Variable_Dependencies; -------------------------------------------------------------------------------- procedure Set_Next_Global_Variable (The_Global_Variable, Next : in Global_Variable_Info_Ref) is begin The_Global_Variable.Next_Variable := Next; end Set_Next_Global_Variable; -------------------------------------------------------------------------------- function Get_Global_Variable_Symbol (The_Global_Variable : Global_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Global_Variable.Self; end if; end Get_Global_Variable_Symbol; -------------------------------------------------------------------------------- function Get_Global_Variable_Mode (The_Global_Variable : Global_Variable_Info_Ref) return Dictionary.Modes is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Dictionary.InvalidMode; else return The_Global_Variable.Mode; end if; end Get_Global_Variable_Mode; -------------------------------------------------------------------------------- function Get_Global_Variable_Exported (The_Global_Variable : Global_Variable_Info_Ref) return Boolean is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return False; else return The_Global_Variable.Exported; end if; end Get_Global_Variable_Exported; -------------------------------------------------------------------------------- function Get_Global_Variable_Imported (The_Global_Variable : Global_Variable_Info_Ref) return Boolean is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return False; else return The_Global_Variable.Imported; end if; end Get_Global_Variable_Imported; -------------------------------------------------------------------------------- function Get_Global_Variable_Prefix_Needed (The_Global_Variable : Global_Variable_Info_Ref) return Boolean is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return False; else return The_Global_Variable.Prefix_Needed; end if; end Get_Global_Variable_Prefix_Needed; -------------------------------------------------------------------------------- function Get_Global_Variable_Dependencies (The_Global_Variable : Global_Variable_Info_Ref; Abstraction : Dictionary.Abstractions) return Dependency_Info_Ref is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Null_Dependency_Info_Ref; else return The_Global_Variable.Dependencies (Abstraction); end if; end Get_Global_Variable_Dependencies; -------------------------------------------------------------------------------- function Get_Global_Variable_Next_Subprogram (The_Global_Variable : Global_Variable_Info_Ref) return Global_Variable_Info_Ref is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Null_Global_Variable_Info_Ref; else return The_Global_Variable.Next_Subprogram; end if; end Get_Global_Variable_Next_Subprogram; -------------------------------------------------------------------------------- function Get_Global_Variable_Subprogram (The_Global_Variable : Global_Variable_Info_Ref) return Subprogram_Info_Ref is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Null_Subprogram_Info_Ref; else case The_Global_Variable.Kind_Of_Global_Variable is when Subprogram_Parameter_Item => return The_Global_Variable.Subprogram_With_Parameter; when Subprogram_Variable_Item => return The_Global_Variable.Subprogram_With_Variable; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Global_Variable_Subprogram"); return Null_Subprogram_Info_Ref; end case; end if; end Get_Global_Variable_Subprogram; -------------------------------------------------------------------------------- function Get_Global_Variable_Task_Type (The_Global_Variable : Global_Variable_Info_Ref) return Type_Info_Ref is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Null_Type_Info_Ref; else case The_Global_Variable.Kind_Of_Global_Variable is when Task_Type_Variable_Item => return The_Global_Variable.Task_Type; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Global_Variable_Task_Type"); return Null_Type_Info_Ref; end case; end if; end Get_Global_Variable_Task_Type; -------------------------------------------------------------------------------- function Get_Kind_Of_Global_Variable (The_Global_Variable : Global_Variable_Info_Ref) return Kind_Of_Global_Variable_T is begin return The_Global_Variable.Kind_Of_Global_Variable; end Get_Kind_Of_Global_Variable; -------------------------------------------------------------------------------- function Get_Global_Variable_Variable (The_Global_Variable : Global_Variable_Info_Ref) return Variable_Info_Ref is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Null_Variable_Info_Ref; else case The_Global_Variable.Kind_Of_Global_Variable is when Subprogram_Variable_Item => return The_Global_Variable.Variable; when Task_Type_Variable_Item => return The_Global_Variable.Task_Type_Variable; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Global_Variable_Variable"); return Null_Variable_Info_Ref; end case; end if; end Get_Global_Variable_Variable; -------------------------------------------------------------------------------- function Get_Global_Variable_Parameter (The_Global_Variable : Global_Variable_Info_Ref) return Subprogram_Parameter_Info_Ref is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Null_Subprogram_Parameter_Info_Ref; else case The_Global_Variable.Kind_Of_Global_Variable is when Subprogram_Parameter_Item => return The_Global_Variable.Subprogram_Parameter; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Global_Variable_Parameter"); return Null_Subprogram_Parameter_Info_Ref; end case; end if; end Get_Global_Variable_Parameter; -------------------------------------------------------------------------------- function Get_Next_Global_Variable (The_Global_Variable : Global_Variable_Info_Ref) return Global_Variable_Info_Ref is begin if The_Global_Variable = Null_Global_Variable_Info_Ref then return Null_Global_Variable_Info_Ref; else return The_Global_Variable.Next_Variable; end if; end Get_Next_Global_Variable; -------------------------------------------------------------------------------- -- Own_Variable_Info -------------------------------------------------------------------------------- function Get_Own_Variable_Info_Ref (Item : Dictionary.Symbol) return Own_Variable_Info_Ref is function RefType_To_Own_Variable_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Own_Variable_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Own_Variable_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Own_Variable_Symbol then DiscriminantDebug ("Get_Own_Variable_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Own_Variable_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Own_Variable_Info_Ref"); end if; return RefType_To_Own_Variable_Info_Ref (GetSymbolRef (Item)); end if; end Get_Own_Variable_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Own_Variable (Variable : in Variable_Info_Ref; Owner : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Own_Variable : out Own_Variable_Info_Ref) is The_Own_Variable_Symbol : Dictionary.Symbol; function Own_Variable_Info_Ref_To_RefType is new Unchecked_Conversion (Own_Variable_Info_Ref, Dictionary.Ref_Type); begin -- note use of InvalidMode in this aggregate. The reasosn is that if we -- refine an own variable on to an embdedded package own variable then the -- own variable gets created by a call here when the refinement is processed. -- If there is an error when the embedded package own variable is eventually -- processed then the mode (set earlier to invalid) won't get overwitten so -- we are left with a nice detectable error case rather than an apaprently -- well ofrmed case of DefaultMode The_Own_Variable := new Own_Variable_Info' (Self => Dictionary.NullSymbol, Variable => Variable, Owner => Owner, Announced => False, Typed => False, Initialized => False, Mode => Dictionary.InvalidMode, Is_Protected => False, Is_Interrupt_Stream => False, Interrupt_Stream_Mappings => Dictionary.NullSymbol, Unprotected_Reference => Dictionary.NullSymbol, Suspends_Reference => Dictionary.NullSymbol, Implicit_In_Stream => Null_Implicit_In_Stream_Info_Ref, Priority => LexTokenManager.Null_String, Integrity => LexTokenManager.Null_String, Suspendable => False, Interruptable => False, Constituents => Null_Constituent_Info_Ref, Next => Null_Own_Variable_Info_Ref); AddSymbol (Discriminant => Dictionary.Own_Variable_Symbol, Ref => Own_Variable_Info_Ref_To_RefType (The_Own_Variable), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Own_Variable_Symbol); The_Own_Variable.Self := The_Own_Variable_Symbol; Variable.Own_Variable := The_Own_Variable; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Own_Variable"); end Create_Own_Variable; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Announced (The_Own_Variable : in Own_Variable_Info_Ref) is begin The_Own_Variable.Announced := True; end Set_Own_Variable_Announced; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Typed (The_Own_Variable : in Own_Variable_Info_Ref) is begin The_Own_Variable.Typed := True; end Set_Own_Variable_Typed; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Initialized (The_Own_Variable : in Own_Variable_Info_Ref) is begin The_Own_Variable.Initialized := True; end Set_Own_Variable_Initialized; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Constituents (The_Own_Variable : in Own_Variable_Info_Ref; The_Constituent : in Constituent_Info_Ref) is begin The_Own_Variable.Constituents := The_Constituent; end Set_Own_Variable_Constituents; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Mode (The_Own_Variable : in Own_Variable_Info_Ref; Mode : in Dictionary.Modes) is begin The_Own_Variable.Mode := Mode; end Set_Own_Variable_Mode; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Protected (The_Own_Variable : in Own_Variable_Info_Ref; Is_Protected : in Boolean) is begin The_Own_Variable.Is_Protected := Is_Protected; end Set_Own_Variable_Protected; -------------------------------------------------------------------------------- procedure Set_Next_Own_Variable (The_Own_Variable, Next : in Own_Variable_Info_Ref) is begin The_Own_Variable.Next := Next; end Set_Next_Own_Variable; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Interrupt_Stream_Mappings (The_Own_Variable : in Own_Variable_Info_Ref; The_Interrupt_Stream_Mappings : in Dictionary.Symbol) is begin The_Own_Variable.Interrupt_Stream_Mappings := The_Interrupt_Stream_Mappings; end Set_Own_Variable_Interrupt_Stream_Mappings; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Implicit_In_Stream (The_Own_Variable : in Own_Variable_Info_Ref; The_Implicit_In_Stream : in Implicit_In_Stream_Info_Ref) is begin The_Own_Variable.Implicit_In_Stream := The_Implicit_In_Stream; end Set_Own_Variable_Implicit_In_Stream; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Is_Interrupt_Stream (The_Own_Variable : in Own_Variable_Info_Ref; Is_Interrupt_Stream : in Boolean) is begin The_Own_Variable.Is_Interrupt_Stream := Is_Interrupt_Stream; end Set_Own_Variable_Is_Interrupt_Stream; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Unprotected_Reference (The_Own_Variable : in Own_Variable_Info_Ref; By_Thread : in Dictionary.Symbol) is begin The_Own_Variable.Unprotected_Reference := By_Thread; end Set_Own_Variable_Unprotected_Reference; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Suspends_Reference (The_Own_Variable : in Own_Variable_Info_Ref; By_Thread : in Dictionary.Symbol) is begin The_Own_Variable.Suspends_Reference := By_Thread; end Set_Own_Variable_Suspends_Reference; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Is_Suspendable (The_Own_Variable : in Own_Variable_Info_Ref) is begin The_Own_Variable.Suspendable := True; end Set_Own_Variable_Is_Suspendable; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Has_Interrupt_Property (The_Own_Variable : in Own_Variable_Info_Ref) is begin The_Own_Variable.Interruptable := True; end Set_Own_Variable_Has_Interrupt_Property; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Priority_Property (The_Own_Variable : in Own_Variable_Info_Ref; The_Value : in LexTokenManager.Lex_String) is begin The_Own_Variable.Priority := The_Value; end Set_Own_Variable_Priority_Property; -------------------------------------------------------------------------------- procedure Set_Own_Variable_Integrity_Property (The_Own_Variable : in Own_Variable_Info_Ref; The_Value : in LexTokenManager.Lex_String) is begin The_Own_Variable.Integrity := The_Value; end Set_Own_Variable_Integrity_Property; -------------------------------------------------------------------------------- function Get_Own_Variable_Symbol (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Own_Variable.Self; end if; end Get_Own_Variable_Symbol; -------------------------------------------------------------------------------- function Get_Own_Variable_Variable (The_Own_Variable : Own_Variable_Info_Ref) return Variable_Info_Ref is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Null_Variable_Info_Ref; else return The_Own_Variable.Variable; end if; end Get_Own_Variable_Variable; -------------------------------------------------------------------------------- function Get_Own_Variable_Owner (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Own_Variable.Owner; end if; end Get_Own_Variable_Owner; -------------------------------------------------------------------------------- function Get_Own_Variable_Announced (The_Own_Variable : Own_Variable_Info_Ref) return Boolean is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return False; else return The_Own_Variable.Announced; end if; end Get_Own_Variable_Announced; -------------------------------------------------------------------------------- function Get_Own_Variable_Typed (The_Own_Variable : Own_Variable_Info_Ref) return Boolean is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return False; else return The_Own_Variable.Typed; end if; end Get_Own_Variable_Typed; -------------------------------------------------------------------------------- function Get_Own_Variable_Initialized (The_Own_Variable : Own_Variable_Info_Ref) return Boolean is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return False; else return The_Own_Variable.Initialized; end if; end Get_Own_Variable_Initialized; -------------------------------------------------------------------------------- function Get_Own_Variable_Constituents (The_Own_Variable : Own_Variable_Info_Ref) return Constituent_Info_Ref is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Null_Constituent_Info_Ref; else return The_Own_Variable.Constituents; end if; end Get_Own_Variable_Constituents; -------------------------------------------------------------------------------- function Get_Own_Variable_Mode (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Modes is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Dictionary.InvalidMode; else return The_Own_Variable.Mode; end if; end Get_Own_Variable_Mode; -------------------------------------------------------------------------------- function Get_Own_Variable_Protected (The_Own_Variable : Own_Variable_Info_Ref) return Boolean is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return False; else return The_Own_Variable.Is_Protected; end if; end Get_Own_Variable_Protected; -------------------------------------------------------------------------------- function Get_Own_Variable_Implicit_In_Stream (The_Own_Variable : Own_Variable_Info_Ref) return Implicit_In_Stream_Info_Ref is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Null_Implicit_In_Stream_Info_Ref; else return The_Own_Variable.Implicit_In_Stream; end if; end Get_Own_Variable_Implicit_In_Stream; -------------------------------------------------------------------------------- function Get_Next_Own_Variable (The_Own_Variable : Own_Variable_Info_Ref) return Own_Variable_Info_Ref is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Null_Own_Variable_Info_Ref; else return The_Own_Variable.Next; end if; end Get_Next_Own_Variable; -------------------------------------------------------------------------------- function Get_Own_Variable_Interrupt_Stream_Mappings (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Own_Variable.Interrupt_Stream_Mappings; end if; end Get_Own_Variable_Interrupt_Stream_Mappings; -------------------------------------------------------------------------------- function Get_Own_Variable_Is_Interrupt_Stream (The_Own_Variable : Own_Variable_Info_Ref) return Boolean is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return False; else return The_Own_Variable.Is_Interrupt_Stream; end if; end Get_Own_Variable_Is_Interrupt_Stream; -------------------------------------------------------------------------------- function Get_Own_Variable_Is_Suspendable (The_Own_Variable : Own_Variable_Info_Ref) return Boolean is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return False; else return The_Own_Variable.Suspendable; end if; end Get_Own_Variable_Is_Suspendable; -------------------------------------------------------------------------------- function Get_Own_Variable_Has_Interrupt_Property (The_Own_Variable : Own_Variable_Info_Ref) return Boolean is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return False; else return The_Own_Variable.Interruptable; end if; end Get_Own_Variable_Has_Interrupt_Property; -------------------------------------------------------------------------------- function Get_Own_Variable_Priority_Property (The_Own_Variable : Own_Variable_Info_Ref) return LexTokenManager.Lex_String is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return LexTokenManager.Null_String; else return The_Own_Variable.Priority; end if; end Get_Own_Variable_Priority_Property; -------------------------------------------------------------------------------- function Get_Own_Variable_Integrity_Property (The_Own_Variable : Own_Variable_Info_Ref) return LexTokenManager.Lex_String is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return LexTokenManager.Null_String; else return The_Own_Variable.Integrity; end if; end Get_Own_Variable_Integrity_Property; -------------------------------------------------------------------------------- function Get_Own_Variable_Unprotected_Reference (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Own_Variable.Unprotected_Reference; end if; end Get_Own_Variable_Unprotected_Reference; -------------------------------------------------------------------------------- function Get_Own_Variable_Suspends_Reference (The_Own_Variable : Own_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Own_Variable.Suspends_Reference; end if; end Get_Own_Variable_Suspends_Reference; -------------------------------------------------------------------------------- function Get_Own_Variable_Has_Valid_Priority_Property (The_Own_Variable : Own_Variable_Info_Ref) return Boolean is begin if The_Own_Variable = Null_Own_Variable_Info_Ref then return False; else return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Get_Own_Variable_Priority_Property (The_Own_Variable => The_Own_Variable), Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq; end if; end Get_Own_Variable_Has_Valid_Priority_Property; -------------------------------------------------------------------------------- -- Quantified_Variable_Info -------------------------------------------------------------------------------- function Get_Quantified_Variable_Info_Ref (Item : Dictionary.Symbol) return Quantified_Variable_Info_Ref is function RefType_To_Quantified_Variable_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Quantified_Variable_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Quantified_Variable_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Quantified_Variable_Symbol then DiscriminantDebug ("Get_Quantified_Variable_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Quantified_Variable_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Quantified_Variable_Info_Ref"); end if; return RefType_To_Quantified_Variable_Info_Ref (GetSymbolRef (Item)); end if; end Get_Quantified_Variable_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Quantified_Variable (Name : in LexTokenManager.Lex_String; Type_Mark : in Type_Info_Ref; The_Parameter_Constraint : in Parameter_Constraint_Info_Ref; Region : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Quantified_Variable : out Quantified_Variable_Info_Ref) is The_Quantified_Variable_Symbol : Dictionary.Symbol; function Quantified_Variable_Info_Ref_To_RefType is new Unchecked_Conversion (Quantified_Variable_Info_Ref, Dictionary.Ref_Type); begin The_Quantified_Variable := new Quantified_Variable_Info' (Self => Dictionary.NullSymbol, Name => Name, Type_Mark => Type_Mark, Parameter_Constraint => The_Parameter_Constraint, Region => Region); AddSymbol (Discriminant => Dictionary.Quantified_Variable_Symbol, Ref => Quantified_Variable_Info_Ref_To_RefType (The_Quantified_Variable), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Quantified_Variable_Symbol); The_Quantified_Variable.Self := The_Quantified_Variable_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Quantified_Variable"); end Create_Quantified_Variable; -------------------------------------------------------------------------------- function Get_Quantified_Variable_Symbol (The_Quantified_Variable : Quantified_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Quantified_Variable = Null_Quantified_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Quantified_Variable.Self; end if; end Get_Quantified_Variable_Symbol; -------------------------------------------------------------------------------- function Get_Quantified_Variable_Name (The_Quantified_Variable : Quantified_Variable_Info_Ref) return LexTokenManager.Lex_String is begin if The_Quantified_Variable = Null_Quantified_Variable_Info_Ref then return LexTokenManager.Null_String; else return The_Quantified_Variable.Name; end if; end Get_Quantified_Variable_Name; -------------------------------------------------------------------------------- function Get_Quantified_Variable_Type (The_Quantified_Variable : Quantified_Variable_Info_Ref) return Type_Info_Ref is begin if The_Quantified_Variable = Null_Quantified_Variable_Info_Ref then return Null_Type_Info_Ref; else return The_Quantified_Variable.Type_Mark; end if; end Get_Quantified_Variable_Type; -------------------------------------------------------------------------------- function Get_Quantified_Variable_Constraint (The_Quantified_Variable : Quantified_Variable_Info_Ref) return Parameter_Constraint_Info_Ref is begin if The_Quantified_Variable = Null_Quantified_Variable_Info_Ref then return Null_Parameter_Constraint_Info_Ref; else return The_Quantified_Variable.Parameter_Constraint; end if; end Get_Quantified_Variable_Constraint; -------------------------------------------------------------------------------- function Get_Quantified_Variable_Region (The_Quantified_Variable : Quantified_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Quantified_Variable = Null_Quantified_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Quantified_Variable.Region; end if; end Get_Quantified_Variable_Region; -------------------------------------------------------------------------------- -- Implicit_Return_Variable_Info -------------------------------------------------------------------------------- function Get_Implicit_Return_Variable_Info_Ref (Item : Dictionary.Symbol) return Implicit_Return_Variable_Info_Ref is function RefType_To_Implicit_Return_Variable_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Implicit_Return_Variable_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Implicit_Return_Variable_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Implicit_Return_Variable_Symbol then DiscriminantDebug ("Get_Implicit_Return_Variable_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Implicit_Return_Variable_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Implicit_Return_Variable_Info_Ref"); end if; return RefType_To_Implicit_Return_Variable_Info_Ref (GetSymbolRef (Item)); end if; end Get_Implicit_Return_Variable_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Implicit_Return_Variable (Name : in LexTokenManager.Lex_String; The_Function : in Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Implicit_Return_Variable : out Implicit_Return_Variable_Info_Ref) is The_Implicit_Return_Variable_Symbol : Dictionary.Symbol; function Implicit_Return_Variable_Info_Ref_To_RefType is new Unchecked_Conversion (Implicit_Return_Variable_Info_Ref, Dictionary.Ref_Type); begin The_Implicit_Return_Variable := new Implicit_Return_Variable_Info'(Self => Dictionary.NullSymbol, Name => Name, The_Function => The_Function); AddSymbol (Discriminant => Dictionary.Implicit_Return_Variable_Symbol, Ref => Implicit_Return_Variable_Info_Ref_To_RefType (The_Implicit_Return_Variable), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Implicit_Return_Variable_Symbol); The_Implicit_Return_Variable.Self := The_Implicit_Return_Variable_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Implicit_Return_Variable"); end Create_Implicit_Return_Variable; -------------------------------------------------------------------------------- function Get_Implicit_Return_Variable_Symbol (The_Implicit_Return_Variable : Implicit_Return_Variable_Info_Ref) return Dictionary.Symbol is begin if The_Implicit_Return_Variable = Null_Implicit_Return_Variable_Info_Ref then return Dictionary.NullSymbol; else return The_Implicit_Return_Variable.Self; end if; end Get_Implicit_Return_Variable_Symbol; -------------------------------------------------------------------------------- function Get_Implicit_Return_Variable_Name (The_Implicit_Return_Variable : Implicit_Return_Variable_Info_Ref) return LexTokenManager.Lex_String is begin if The_Implicit_Return_Variable = Null_Implicit_Return_Variable_Info_Ref then return LexTokenManager.Null_String; else return The_Implicit_Return_Variable.Name; end if; end Get_Implicit_Return_Variable_Name; -------------------------------------------------------------------------------- function Get_Implicit_Return_Variable_Function (The_Implicit_Return_Variable : Implicit_Return_Variable_Info_Ref) return Subprogram_Info_Ref is begin if The_Implicit_Return_Variable = Null_Implicit_Return_Variable_Info_Ref then return Null_Subprogram_Info_Ref; else return The_Implicit_Return_Variable.The_Function; end if; end Get_Implicit_Return_Variable_Function; -------------------------------------------------------------------------------- -- Implicit_In_Stream_Info -------------------------------------------------------------------------------- function Get_Implicit_In_Stream_Info_Ref (Item : Dictionary.Symbol) return Implicit_In_Stream_Info_Ref is function RefType_To_Implicit_In_Stream_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Implicit_In_Stream_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Implicit_In_Stream_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Implicit_In_Stream_Symbol then DiscriminantDebug ("Get_Implicit_In_Stream_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Implicit_In_Stream_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Implicit_In_Stream_Info_Ref"); end if; return RefType_To_Implicit_In_Stream_Info_Ref (GetSymbolRef (Item)); end if; end Get_Implicit_In_Stream_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Implicit_In_Stream (The_Own_Variable : in Own_Variable_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Implicit_In_Stream : out Implicit_In_Stream_Info_Ref) is The_Implicit_In_Stream_Symbol : Dictionary.Symbol; function Implicit_In_Stream_Info_Ref_To_RefType is new Unchecked_Conversion (Implicit_In_Stream_Info_Ref, Dictionary.Ref_Type); begin The_Implicit_In_Stream := new Implicit_In_Stream_Info'(Self => Dictionary.NullSymbol, Own_Variable => The_Own_Variable); AddSymbol (Discriminant => Dictionary.Implicit_In_Stream_Symbol, Ref => Implicit_In_Stream_Info_Ref_To_RefType (The_Implicit_In_Stream), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Implicit_In_Stream_Symbol); The_Implicit_In_Stream.Self := The_Implicit_In_Stream_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Implicit_In_Stream"); end Create_Implicit_In_Stream; -------------------------------------------------------------------------------- function Get_Implicit_In_Stream_Symbol (The_Implicit_In_Stream : Implicit_In_Stream_Info_Ref) return Dictionary.Symbol is begin if The_Implicit_In_Stream = Null_Implicit_In_Stream_Info_Ref then return Dictionary.NullSymbol; else return The_Implicit_In_Stream.Self; end if; end Get_Implicit_In_Stream_Symbol; -------------------------------------------------------------------------------- function Get_Implicit_In_Stream_Own_Variable (The_Implicit_In_Stream : Implicit_In_Stream_Info_Ref) return Own_Variable_Info_Ref is begin if The_Implicit_In_Stream = Null_Implicit_In_Stream_Info_Ref then return Null_Own_Variable_Info_Ref; else return The_Implicit_In_Stream.Own_Variable; end if; end Get_Implicit_In_Stream_Own_Variable; -------------------------------------------------------------------------------- -- Rule_Policy_Info -------------------------------------------------------------------------------- procedure Create_Rule_Policy (Scope : in Dictionary.Scopes; Value : in Dictionary.Rule_Policies; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Rule_Policy : out Rule_Policy_Info_Ref) is The_Rule_Policy_Symbol : Dictionary.Symbol; function Rule_Policy_Info_Ref_To_RefType is new Unchecked_Conversion (Rule_Policy_Info_Ref, Dictionary.Ref_Type); begin The_Rule_Policy := new Rule_Policy_Info'(Self => Dictionary.NullSymbol, Scope => Scope, Value => Value, Next => Null_Rule_Policy_Info_Ref); AddSymbol (Discriminant => Dictionary.Rule_Policy_Symbol, Ref => Rule_Policy_Info_Ref_To_RefType (The_Rule_Policy), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Rule_Policy_Symbol); The_Rule_Policy.Self := The_Rule_Policy_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Rule_Policy"); end Create_Rule_Policy; -------------------------------------------------------------------------------- procedure Set_Next_Rule_Policy (The_Rule_Policy, Next : in Rule_Policy_Info_Ref) is begin The_Rule_Policy.Next := Next; end Set_Next_Rule_Policy; -------------------------------------------------------------------------------- function Get_Rule_Policy_Symbol (The_Rule_Policy : Rule_Policy_Info_Ref) return Dictionary.Symbol is begin if The_Rule_Policy = Null_Rule_Policy_Info_Ref then return Dictionary.NullSymbol; else return The_Rule_Policy.Self; end if; end Get_Rule_Policy_Symbol; -------------------------------------------------------------------------------- function Get_Rule_Policy_Scope (The_Rule_Policy : Rule_Policy_Info_Ref) return Dictionary.Scopes is begin if The_Rule_Policy = Null_Rule_Policy_Info_Ref then return Dictionary.NullScope; else return The_Rule_Policy.Scope; end if; end Get_Rule_Policy_Scope; -------------------------------------------------------------------------------- function Get_Rule_Policy_Value (The_Rule_Policy : Rule_Policy_Info_Ref) return Dictionary.Rule_Policies is begin if The_Rule_Policy = Null_Rule_Policy_Info_Ref then return Dictionary.Unspecified; else return The_Rule_Policy.Value; end if; end Get_Rule_Policy_Value; -------------------------------------------------------------------------------- function Get_Next_Rule_Policy (The_Rule_Policy : Rule_Policy_Info_Ref) return Rule_Policy_Info_Ref is begin if The_Rule_Policy = Null_Rule_Policy_Info_Ref then return Null_Rule_Policy_Info_Ref; else return The_Rule_Policy.Next; end if; end Get_Next_Rule_Policy; -------------------------------------------------------------------------------- -- Constituent_Info -------------------------------------------------------------------------------- function Get_Constituent_Info_Ref (Item : Dictionary.Symbol) return Constituent_Info_Ref is function RefType_To_Constituent_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Constituent_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Constituent_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Constituent_Symbol then DiscriminantDebug ("Get_Constituent_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Constituent_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Constituent_Info_Ref"); end if; return RefType_To_Constituent_Info_Ref (GetSymbolRef (Item)); end if; end Get_Constituent_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Constituent (The_Own_Variable : in Own_Variable_Info_Ref; The_Variable : in Variable_Info_Ref; Mode : in Dictionary.Modes; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Constituent : out Constituent_Info_Ref) is The_Constituent_Symbol : Dictionary.Symbol; function Constituent_Info_Ref_To_RefType is new Unchecked_Conversion (Constituent_Info_Ref, Dictionary.Ref_Type); begin The_Constituent := new Constituent_Info' (Self => Dictionary.NullSymbol, Own_Variable => The_Own_Variable, Variable => The_Variable, Mode => Mode, Next => Null_Constituent_Info_Ref); AddSymbol (Discriminant => Dictionary.Constituent_Symbol, Ref => Constituent_Info_Ref_To_RefType (The_Constituent), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Constituent_Symbol); The_Constituent.Self := The_Constituent_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Constituent"); end Create_Constituent; -------------------------------------------------------------------------------- procedure Set_Next_Constituent (The_Constituent, Next : in Constituent_Info_Ref) is begin The_Constituent.Next := Next; end Set_Next_Constituent; -------------------------------------------------------------------------------- function Get_Constituent_Symbol (The_Constituent : Constituent_Info_Ref) return Dictionary.Symbol is begin if The_Constituent = Null_Constituent_Info_Ref then return Dictionary.NullSymbol; else return The_Constituent.Self; end if; end Get_Constituent_Symbol; -------------------------------------------------------------------------------- function Get_Constituent_Own_Variable (The_Constituent : Constituent_Info_Ref) return Own_Variable_Info_Ref is begin if The_Constituent = Null_Constituent_Info_Ref then return Null_Own_Variable_Info_Ref; else return The_Constituent.Own_Variable; end if; end Get_Constituent_Own_Variable; -------------------------------------------------------------------------------- function Get_Constituent_Variable (The_Constituent : Constituent_Info_Ref) return Variable_Info_Ref is begin if The_Constituent = Null_Constituent_Info_Ref then return Null_Variable_Info_Ref; else return The_Constituent.Variable; end if; end Get_Constituent_Variable; -------------------------------------------------------------------------------- function Get_Constituent_Mode (The_Constituent : Constituent_Info_Ref) return Dictionary.Modes is begin if The_Constituent = Null_Constituent_Info_Ref then return Dictionary.InvalidMode; else return The_Constituent.Mode; end if; end Get_Constituent_Mode; -------------------------------------------------------------------------------- function Get_Next_Constituent (The_Constituent : Constituent_Info_Ref) return Constituent_Info_Ref is begin if The_Constituent = Null_Constituent_Info_Ref then return Null_Constituent_Info_Ref; else return The_Constituent.Next; end if; end Get_Next_Constituent; -------------------------------------------------------------------------------- -- Context_Clause_Info -------------------------------------------------------------------------------- function Get_Context_Clause_Info_Ref (Item : Dictionary.Symbol) return Context_Clause_Info_Ref is function RefType_To_Context_Clause_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Context_Clause_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Context_Clause_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Context_Clause_Symbol then DiscriminantDebug ("Get_Context_Clause_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Context_Clause_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Context_Clause_Info_Ref"); end if; return RefType_To_Context_Clause_Info_Ref (GetSymbolRef (Item)); end if; end Get_Context_Clause_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Context_Clause (The_Package : in Package_Info_Ref; The_Subprogram : in Subprogram_Info_Ref; Explicit : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Context_Clause : out Context_Clause_Info_Ref) is The_Context_Clause_Symbol : Dictionary.Symbol; function Context_Clause_Info_Ref_To_RefType is new Unchecked_Conversion (Context_Clause_Info_Ref, Dictionary.Ref_Type); begin if The_Package /= Null_Package_Info_Ref and then The_Subprogram = Null_Subprogram_Info_Ref then The_Context_Clause := new Context_Clause_Info' (Self => Dictionary.NullSymbol, Is_Subprogram => False, The_Package => The_Package, Explicit => Explicit, Next => Null_Context_Clause_Info_Ref); elsif The_Package = Null_Package_Info_Ref and then The_Subprogram /= Null_Subprogram_Info_Ref then The_Context_Clause := new Context_Clause_Info' (Self => Dictionary.NullSymbol, Is_Subprogram => True, The_Subprogram => The_Subprogram, Explicit => Explicit, Next => Null_Context_Clause_Info_Ref); else The_Context_Clause := Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Create_Context_Clause"); end if; AddSymbol (Discriminant => Dictionary.Context_Clause_Symbol, Ref => Context_Clause_Info_Ref_To_RefType (The_Context_Clause), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Context_Clause_Symbol); The_Context_Clause.Self := The_Context_Clause_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Context_Clause"); end Create_Context_Clause; -------------------------------------------------------------------------------- procedure Set_Context_Clause_Explicit (The_Context_Clause : in Context_Clause_Info_Ref) is begin The_Context_Clause.Explicit := True; end Set_Context_Clause_Explicit; -------------------------------------------------------------------------------- procedure Set_Next_Context_Clause (The_Context_Clause, Next : in Context_Clause_Info_Ref) is begin The_Context_Clause.Next := Next; end Set_Next_Context_Clause; -------------------------------------------------------------------------------- function Get_Context_Clause_Symbol (The_Context_Clause : Context_Clause_Info_Ref) return Dictionary.Symbol is begin if The_Context_Clause = Null_Context_Clause_Info_Ref then return Dictionary.NullSymbol; else return The_Context_Clause.Self; end if; end Get_Context_Clause_Symbol; -------------------------------------------------------------------------------- function Get_Context_Clause_Is_Subprogram (The_Context_Clause : Context_Clause_Info_Ref) return Boolean is begin if The_Context_Clause = Null_Context_Clause_Info_Ref then return False; else return The_Context_Clause.Is_Subprogram; end if; end Get_Context_Clause_Is_Subprogram; -------------------------------------------------------------------------------- function Get_Context_Clause_Package (The_Context_Clause : Context_Clause_Info_Ref) return Package_Info_Ref is begin if The_Context_Clause = Null_Context_Clause_Info_Ref then return Null_Package_Info_Ref; else return The_Context_Clause.The_Package; end if; end Get_Context_Clause_Package; -------------------------------------------------------------------------------- function Get_Context_Clause_Subprogram (The_Context_Clause : Context_Clause_Info_Ref) return Subprogram_Info_Ref is begin if The_Context_Clause = Null_Context_Clause_Info_Ref then return Null_Subprogram_Info_Ref; else return The_Context_Clause.The_Subprogram; end if; end Get_Context_Clause_Subprogram; -------------------------------------------------------------------------------- function Get_Context_Clause_Explicit (The_Context_Clause : Context_Clause_Info_Ref) return Boolean is begin if The_Context_Clause = Null_Context_Clause_Info_Ref then return False; else return The_Context_Clause.Explicit; end if; end Get_Context_Clause_Explicit; -------------------------------------------------------------------------------- function Get_Next_Context_Clause (The_Context_Clause : Context_Clause_Info_Ref) return Context_Clause_Info_Ref is begin if The_Context_Clause = Null_Context_Clause_Info_Ref then return Null_Context_Clause_Info_Ref; else return The_Context_Clause.Next; end if; end Get_Next_Context_Clause; -------------------------------------------------------------------------------- -- Use_Type_Clause_Info -------------------------------------------------------------------------------- procedure Create_Use_Type_Clause (Type_Mark : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Use_Type_Clause : out Use_Type_Clause_Info_Ref) is The_Use_Type_Clause_Symbol : Dictionary.Symbol; pragma Unreferenced (The_Use_Type_Clause_Symbol); function Use_Type_Clause_Info_Ref_To_RefType is new Unchecked_Conversion (Use_Type_Clause_Info_Ref, Dictionary.Ref_Type); begin The_Use_Type_Clause := new Use_Type_Clause_Info'(The_Type => Type_Mark, Next => Null_Use_Type_Clause_Info_Ref); AddSymbol (Discriminant => Dictionary.Use_Type_Clause_Symbol, Ref => Use_Type_Clause_Info_Ref_To_RefType (The_Use_Type_Clause), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Use_Type_Clause_Symbol); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Use_Type_Clause"); end Create_Use_Type_Clause; -------------------------------------------------------------------------------- procedure Set_Next_Use_Type_Clause (The_Use_Type_Clause, Next : in Use_Type_Clause_Info_Ref) is begin The_Use_Type_Clause.Next := Next; end Set_Next_Use_Type_Clause; -------------------------------------------------------------------------------- function Get_Use_Type_Clause_Type (The_Use_Type_Clause : Use_Type_Clause_Info_Ref) return Type_Info_Ref is begin if The_Use_Type_Clause = Null_Use_Type_Clause_Info_Ref then return Null_Type_Info_Ref; else return The_Use_Type_Clause.The_Type; end if; end Get_Use_Type_Clause_Type; -------------------------------------------------------------------------------- function Get_Next_Use_Type_Clause (The_Use_Type_Clause : Use_Type_Clause_Info_Ref) return Use_Type_Clause_Info_Ref is begin if The_Use_Type_Clause = Null_Use_Type_Clause_Info_Ref then return Null_Use_Type_Clause_Info_Ref; else return The_Use_Type_Clause.Next; end if; end Get_Next_Use_Type_Clause; -------------------------------------------------------------------------------- -- Parameter_Constraint_Info_Ref -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Info_Ref (Item : Dictionary.Symbol) return Parameter_Constraint_Info_Ref is function RefType_To_Parameter_Constraint_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Parameter_Constraint_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Parameter_Constraint_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Parameter_Constraint_Symbol then DiscriminantDebug ("Get_Parameter_Constraint_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Parameter_Constraint_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Parameter_Constraint_Info_Ref"); end if; return RefType_To_Parameter_Constraint_Info_Ref (GetSymbolRef (Item)); end if; end Get_Parameter_Constraint_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Parameter_Constraint (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Dimension : in Positive; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Parameter_Constraint : out Parameter_Constraint_Info_Ref) is The_Parameter_Constraint_Symbol : Dictionary.Symbol; function Parameter_Constraint_Info_Ref_To_RefType is new Unchecked_Conversion (Parameter_Constraint_Info_Ref, Dictionary.Ref_Type); begin The_Parameter_Constraint := new Parameter_Constraint_Info' (Self => Dictionary.NullSymbol, Subprogram_Parameter => The_Subprogram_Parameter, Dimension => Dimension, Next => Null_Parameter_Constraint_Info_Ref); AddSymbol (Discriminant => Dictionary.Parameter_Constraint_Symbol, Ref => Parameter_Constraint_Info_Ref_To_RefType (The_Parameter_Constraint), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Parameter_Constraint_Symbol); The_Parameter_Constraint.Self := The_Parameter_Constraint_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Parameter_Constraint"); end Create_Parameter_Constraint; -------------------------------------------------------------------------------- procedure Set_Next_Parameter_Constraint (The_Parameter_Constraint, Next : in Parameter_Constraint_Info_Ref) is begin The_Parameter_Constraint.Next := Next; end Set_Next_Parameter_Constraint; -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Symbol (The_Parameter_Constraint : Parameter_Constraint_Info_Ref) return Dictionary.Symbol is begin if The_Parameter_Constraint = Null_Parameter_Constraint_Info_Ref then return Dictionary.NullSymbol; else return The_Parameter_Constraint.Self; end if; end Get_Parameter_Constraint_Symbol; -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Subprogram_Parameter (The_Parameter_Constraint : Parameter_Constraint_Info_Ref) return Subprogram_Parameter_Info_Ref is begin if The_Parameter_Constraint = Null_Parameter_Constraint_Info_Ref then return Null_Subprogram_Parameter_Info_Ref; else return The_Parameter_Constraint.Subprogram_Parameter; end if; end Get_Parameter_Constraint_Subprogram_Parameter; -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Dimension (The_Parameter_Constraint : Parameter_Constraint_Info_Ref) return Positive is begin if The_Parameter_Constraint = Null_Parameter_Constraint_Info_Ref then return 1; else return The_Parameter_Constraint.Dimension; end if; end Get_Parameter_Constraint_Dimension; -------------------------------------------------------------------------------- function Get_Next_Parameter_Constraint (The_Parameter_Constraint : Parameter_Constraint_Info_Ref) return Parameter_Constraint_Info_Ref is begin if The_Parameter_Constraint = Null_Parameter_Constraint_Info_Ref then return Null_Parameter_Constraint_Info_Ref; else return The_Parameter_Constraint.Next; end if; end Get_Next_Parameter_Constraint; -------------------------------------------------------------------------------- -- Subprogram_Parameter_Info -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Info_Ref (Item : Dictionary.Symbol) return Subprogram_Parameter_Info_Ref is function RefType_To_Subprogram_Parameter_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Subprogram_Parameter_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Subprogram_Parameter_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Subprogram_Parameter_Symbol then DiscriminantDebug ("Get_Subprogram_Parameter_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Subprogram_Parameter_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Subprogram_Parameter_Info_Ref"); end if; return RefType_To_Subprogram_Parameter_Info_Ref (GetSymbolRef (Item)); end if; end Get_Subprogram_Parameter_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Subprogram_Parameter (Name : in LexTokenManager.Lex_String; The_Subprogram : in Subprogram_Info_Ref; Type_Mark : in Type_Info_Ref; Mode : in Modes; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Subprogram_Parameter : out Subprogram_Parameter_Info_Ref) is The_Subprogram_Parameter_Symbol : Dictionary.Symbol; function Subprogram_Parameter_Info_Ref_To_RefType is new Unchecked_Conversion (Subprogram_Parameter_Info_Ref, Dictionary.Ref_Type); begin The_Subprogram_Parameter := new Subprogram_Parameter_Info' (Self => Dictionary.NullSymbol, Name => Name, Subprogram => The_Subprogram, Type_Mark => Type_Mark, Mode => Mode, Exported => Booleans'(Dictionary.Abstractions => False), Imported => Booleans'(Dictionary.Abstractions => False), Dependencies => Dependencies_T'(Dictionary.Abstractions => Null_Dependency_Info_Ref), Global_References => Global_Variables_T'(Dictionary.Abstractions => Null_Global_Variable_Info_Ref), Subcomponents => Null_Subcomponent_Info_Ref, Index_Constraints => Null_Parameter_Constraint_Info_Ref, Next => Null_Subprogram_Parameter_Info_Ref); AddSymbol (Discriminant => Dictionary.Subprogram_Parameter_Symbol, Ref => Subprogram_Parameter_Info_Ref_To_RefType (The_Subprogram_Parameter), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Subprogram_Parameter_Symbol); The_Subprogram_Parameter.Self := The_Subprogram_Parameter_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Subprogram_Parameter"); end Create_Subprogram_Parameter; -------------------------------------------------------------------------------- procedure Set_Subprogram_Parameter_Exported (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Abstraction : in Dictionary.Abstractions) is begin The_Subprogram_Parameter.Exported (Abstraction) := True; end Set_Subprogram_Parameter_Exported; -------------------------------------------------------------------------------- procedure Set_Subprogram_Parameter_Imported (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Abstraction : in Dictionary.Abstractions) is begin The_Subprogram_Parameter.Imported (Abstraction) := True; end Set_Subprogram_Parameter_Imported; -------------------------------------------------------------------------------- procedure Set_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Abstraction : in Dictionary.Abstractions; Dependency : in Dependency_Info_Ref) is begin The_Subprogram_Parameter.Dependencies (Abstraction) := Dependency; end Set_Subprogram_Parameter_Dependencies; -------------------------------------------------------------------------------- procedure Set_Subprogram_Parameter_Global_References (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Abstraction : in Dictionary.Abstractions; Reference : in Global_Variable_Info_Ref) is begin The_Subprogram_Parameter.Global_References (Abstraction) := Reference; end Set_Subprogram_Parameter_Global_References; -------------------------------------------------------------------------------- procedure Set_Subprogram_Parameter_Subcomponents (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; Subcomponents : in Subcomponent_Info_Ref) is begin The_Subprogram_Parameter.Subcomponents := Subcomponents; end Set_Subprogram_Parameter_Subcomponents; -------------------------------------------------------------------------------- procedure Set_Subprogram_Parameter_Index_Constraints (The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref; The_Index_Constraints : in Parameter_Constraint_Info_Ref) is begin The_Subprogram_Parameter.Index_Constraints := The_Index_Constraints; end Set_Subprogram_Parameter_Index_Constraints; -------------------------------------------------------------------------------- procedure Set_Next_Subprogram_Parameter (The_Subprogram_Parameter, Next : in Subprogram_Parameter_Info_Ref) is begin The_Subprogram_Parameter.Next := Next; end Set_Next_Subprogram_Parameter; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Dictionary.Symbol is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Dictionary.NullSymbol; else return The_Subprogram_Parameter.Self; end if; end Get_Subprogram_Parameter_Symbol; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Name (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return LexTokenManager.Lex_String is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return LexTokenManager.Null_String; else return The_Subprogram_Parameter.Name; end if; end Get_Subprogram_Parameter_Name; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Subprogram (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Subprogram_Info_Ref is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Null_Subprogram_Info_Ref; else return The_Subprogram_Parameter.Subprogram; end if; end Get_Subprogram_Parameter_Subprogram; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Type (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Type_Info_Ref is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Null_Type_Info_Ref; else return The_Subprogram_Parameter.Type_Mark; end if; end Get_Subprogram_Parameter_Type; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Mode (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Modes is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Dictionary.InvalidMode; else return The_Subprogram_Parameter.Mode; end if; end Get_Subprogram_Parameter_Mode; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Exported (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Abstraction : Dictionary.Abstractions) return Boolean is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return False; else return The_Subprogram_Parameter.Exported (Abstraction); end if; end Get_Subprogram_Parameter_Exported; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Imported (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Abstraction : Dictionary.Abstractions) return Boolean is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return False; else return The_Subprogram_Parameter.Imported (Abstraction); end if; end Get_Subprogram_Parameter_Imported; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Dependencies (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Abstraction : Dictionary.Abstractions) return Dependency_Info_Ref is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Null_Dependency_Info_Ref; else return The_Subprogram_Parameter.Dependencies (Abstraction); end if; end Get_Subprogram_Parameter_Dependencies; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Global_References (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Null_Global_Variable_Info_Ref; else return The_Subprogram_Parameter.Global_References (Abstraction); end if; end Get_Subprogram_Parameter_Global_References; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Subcomponents (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Subcomponent_Info_Ref is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Null_Subcomponent_Info_Ref; else return The_Subprogram_Parameter.Subcomponents; end if; end Get_Subprogram_Parameter_Subcomponents; -------------------------------------------------------------------------------- function Get_Subprogram_Parameter_Index_Constraints (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Parameter_Constraint_Info_Ref is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Null_Parameter_Constraint_Info_Ref; else return The_Subprogram_Parameter.Index_Constraints; end if; end Get_Subprogram_Parameter_Index_Constraints; -------------------------------------------------------------------------------- function Get_Next_Subprogram_Parameter (The_Subprogram_Parameter : Subprogram_Parameter_Info_Ref) return Subprogram_Parameter_Info_Ref is begin if The_Subprogram_Parameter = Null_Subprogram_Parameter_Info_Ref then return Null_Subprogram_Parameter_Info_Ref; else return The_Subprogram_Parameter.Next; end if; end Get_Next_Subprogram_Parameter; -------------------------------------------------------------------------------- -- Subprogram_Info -------------------------------------------------------------------------------- function Get_Subprogram_Info_Ref (Item : Dictionary.Symbol) return Subprogram_Info_Ref is function RefType_To_Subprogram_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Subprogram_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Subprogram_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Subprogram_Symbol then DiscriminantDebug ("Get_Subprogram_info_Ref", GetSymbolDiscriminant (Item), Dictionary.Subprogram_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Subprogram_Info_Ref"); end if; return RefType_To_Subprogram_Info_Ref (GetSymbolRef (Item)); end if; end Get_Subprogram_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Subprogram (Name : in LexTokenManager.Lex_String; The_Declaration : in Declaration_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Subprogram : out Subprogram_Info_Ref) is The_Subprogram_Symbol : Dictionary.Symbol; function Subprogram_Info_Ref_To_RefType is new Unchecked_Conversion (Subprogram_Info_Ref, Dictionary.Ref_Type); begin The_Subprogram := new Subprogram_Info' (Self => Dictionary.NullSymbol, Name => Name, Implicit_Proof_Functions => Null_Symbols, Implicit_Return_Variables => Implicit_Return_Variables_T' (Dictionary.Abstractions => Null_Implicit_Return_Variable_Info_Ref), Signature_Is_Wellformed => Booleans'(Dictionary.Abstractions => True), Precondition => Constraints'(Dictionary.Abstractions => 0), Postcondition => Constraints'(Dictionary.Abstractions => 0), Subprogram_Body => Null_Declaration_Info_Ref, Has_Proper_Body => False, Body_Is_Hidden => False, Has_Second_Annotation => False, Has_Second_Constraint => False, Has_Derives_Annotation => False, Has_Delay_Property => False, Delay_Property_Is_Accounted_For => False, Generic_Unit => Null_Generic_Unit_Info_Ref, Instantiation_Of => Null_Subprogram_Info_Ref, First_Generic_Association => Null_Generic_Association_Info_Ref, Last_Generic_Association => Null_Generic_Association_Info_Ref, Suspends_List => Dictionary.NullSymbol, Is_Entry => False, Entry_Barrier => Dictionary.NullSymbol, Is_Interrupt_Handler => False, Uses_Unprotected_Variables => False, Uses_Unchecked_Conversion => False, Assigns_From_External => False, Type_Mark => Null_Type_Info_Ref, With_Clauses => Null_Context_Clause_Info_Ref, Use_Type_Clauses => Null_Use_Type_Clause_Info_Ref, Inherit_Clauses => Null_Context_Clause_Info_Ref, First_Subprogram_Parameter => Null_Subprogram_Parameter_Info_Ref, Last_Subprogram_Parameter => Null_Subprogram_Parameter_Info_Ref, First_Global_Variable => Global_Variables_T'(Dictionary.Abstractions => Null_Global_Variable_Info_Ref), Last_Global_Variable => Global_Variables_T'(Dictionary.Abstractions => Null_Global_Variable_Info_Ref), Renaming_Declarations => Null_Declaration_Info_Ref, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref, First_Loop => Dictionary.NullSymbol, Last_Loop => Dictionary.NullSymbol, Specification => The_Declaration); AddSymbol (Discriminant => Dictionary.Subprogram_Symbol, Ref => Subprogram_Info_Ref_To_RefType (The_Subprogram), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Subprogram_Symbol); The_Subprogram.Self := The_Subprogram_Symbol; The_Declaration.Item := The_Subprogram_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Subprogram"); end Create_Subprogram; -------------------------------------------------------------------------------- procedure Set_Subprogram_Implicit_Proof_Function (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Proof_Function : in Dictionary.Symbol) is begin The_Subprogram.Implicit_Proof_Functions (Abstraction) := The_Proof_Function; end Set_Subprogram_Implicit_Proof_Function; -------------------------------------------------------------------------------- procedure Set_Subprogram_Implicit_Return_Variable (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Implicit_Return_Variable : in Implicit_Return_Variable_Info_Ref) is begin The_Subprogram.Implicit_Return_Variables (Abstraction) := The_Implicit_Return_Variable; end Set_Subprogram_Implicit_Return_Variable; -------------------------------------------------------------------------------- procedure Set_Subprogram_Signature_Not_Wellformed (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions) is begin The_Subprogram.Signature_Is_Wellformed (Abstraction) := False; -- we may be marking a missing second anno as malformed so we must also mark -- it as present, thus if Abstraction = Dictionary.IsRefined then The_Subprogram.Has_Second_Annotation := True; end if; end Set_Subprogram_Signature_Not_Wellformed; -------------------------------------------------------------------------------- procedure Set_Subprogram_Precondition (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; Precondition : in ExaminerConstants.RefType) is begin The_Subprogram.Precondition (Abstraction) := Precondition; end Set_Subprogram_Precondition; -------------------------------------------------------------------------------- procedure Set_Subprogram_Postcondition (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; Postcondition : in ExaminerConstants.RefType) is begin The_Subprogram.Postcondition (Abstraction) := Postcondition; end Set_Subprogram_Postcondition; -------------------------------------------------------------------------------- procedure Set_Subprogram_Body (The_Subprogram : in Subprogram_Info_Ref; The_Body : in Declaration_Info_Ref) is begin The_Subprogram.Subprogram_Body := The_Body; end Set_Subprogram_Body; -------------------------------------------------------------------------------- procedure Set_Subprogram_Has_Proper_Body (The_Subprogram : in Subprogram_Info_Ref; Is_Hidden : in Boolean) is begin The_Subprogram.Has_Proper_Body := True; The_Subprogram.Body_Is_Hidden := Is_Hidden; end Set_Subprogram_Has_Proper_Body; -------------------------------------------------------------------------------- procedure Set_Subprogram_Has_Second_Annotation (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Has_Second_Annotation := True; end Set_Subprogram_Has_Second_Annotation; -------------------------------------------------------------------------------- procedure Set_Subprogram_Has_Second_Constraint (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Has_Second_Constraint := True; end Set_Subprogram_Has_Second_Constraint; -------------------------------------------------------------------------------- procedure Set_Subprogram_Has_Derives_Annotation (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Has_Derives_Annotation := True; end Set_Subprogram_Has_Derives_Annotation; -------------------------------------------------------------------------------- procedure Set_Subprogram_Has_Delay_Property (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Has_Delay_Property := True; end Set_Subprogram_Has_Delay_Property; -------------------------------------------------------------------------------- procedure Set_Subprogram_Mark_Accounts_For_Delay (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Delay_Property_Is_Accounted_For := True; end Set_Subprogram_Mark_Accounts_For_Delay; -------------------------------------------------------------------------------- procedure Set_Subprogram_Generic_Unit (The_Subprogram : in Subprogram_Info_Ref; The_Generic_Unit : in Generic_Unit_Info_Ref) is begin The_Subprogram.Generic_Unit := The_Generic_Unit; end Set_Subprogram_Generic_Unit; -------------------------------------------------------------------------------- procedure Set_Subprogram_Instantiation_Of (The_Subprogram : in Subprogram_Info_Ref; The_Generic : in Subprogram_Info_Ref) is begin The_Subprogram.Instantiation_Of := The_Generic; end Set_Subprogram_Instantiation_Of; -------------------------------------------------------------------------------- procedure Set_Subprogram_First_Generic_Association (The_Subprogram : in Subprogram_Info_Ref; The_Generic_Association : in Generic_Association_Info_Ref) is begin The_Subprogram.First_Generic_Association := The_Generic_Association; end Set_Subprogram_First_Generic_Association; -------------------------------------------------------------------------------- procedure Set_Subprogram_Last_Generic_Association (The_Subprogram : in Subprogram_Info_Ref; The_Generic_Association : in Generic_Association_Info_Ref) is begin The_Subprogram.Last_Generic_Association := The_Generic_Association; end Set_Subprogram_Last_Generic_Association; -------------------------------------------------------------------------------- procedure Set_Subprogram_Suspends_List (The_Subprogram : in Subprogram_Info_Ref; The_Suspends_List : in Dictionary.Symbol) is begin The_Subprogram.Suspends_List := The_Suspends_List; end Set_Subprogram_Suspends_List; -------------------------------------------------------------------------------- procedure Set_Subprogram_Is_Entry (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Is_Entry := True; end Set_Subprogram_Is_Entry; -------------------------------------------------------------------------------- procedure Set_Subprogram_Entry_Barrier (The_Subprogram : in Subprogram_Info_Ref; The_Barrier : in Dictionary.Symbol) is begin The_Subprogram.Entry_Barrier := The_Barrier; end Set_Subprogram_Entry_Barrier; -------------------------------------------------------------------------------- procedure Set_Subprogram_Is_Interrupt_Handler (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Is_Interrupt_Handler := True; end Set_Subprogram_Is_Interrupt_Handler; -------------------------------------------------------------------------------- procedure Set_Subprogram_Uses_Unprotected_Variables (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Uses_Unprotected_Variables := True; end Set_Subprogram_Uses_Unprotected_Variables; -------------------------------------------------------------------------------- procedure Set_Subprogram_Uses_Unchecked_Conversion (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Uses_Unchecked_Conversion := True; end Set_Subprogram_Uses_Unchecked_Conversion; -------------------------------------------------------------------------------- procedure Set_Subprogram_Assigns_From_External (The_Subprogram : in Subprogram_Info_Ref) is begin The_Subprogram.Assigns_From_External := True; end Set_Subprogram_Assigns_From_External; -------------------------------------------------------------------------------- procedure Set_Subprogram_Return_Type (The_Subprogram : in Subprogram_Info_Ref; Type_Mark : in Type_Info_Ref) is begin The_Subprogram.Type_Mark := Type_Mark; end Set_Subprogram_Return_Type; -------------------------------------------------------------------------------- procedure Set_Subprogram_With_Clauses (The_Subprogram : in Subprogram_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref) is begin The_Subprogram.With_Clauses := The_Context_Clause; end Set_Subprogram_With_Clauses; -------------------------------------------------------------------------------- procedure Set_Subprogram_Use_Type_Clauses (The_Subprogram : in Subprogram_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref) is begin The_Subprogram.Use_Type_Clauses := The_Use_Type_Clause; end Set_Subprogram_Use_Type_Clauses; -------------------------------------------------------------------------------- procedure Set_Subprogram_Inherit_Clauses (The_Subprogram : in Subprogram_Info_Ref; The_Inherit_Clause : in Context_Clause_Info_Ref) is begin The_Subprogram.Inherit_Clauses := The_Inherit_Clause; end Set_Subprogram_Inherit_Clauses; -------------------------------------------------------------------------------- procedure Set_Subprogram_First_Parameter (The_Subprogram : in Subprogram_Info_Ref; The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref) is begin The_Subprogram.First_Subprogram_Parameter := The_Subprogram_Parameter; end Set_Subprogram_First_Parameter; -------------------------------------------------------------------------------- procedure Set_Subprogram_Last_Parameter (The_Subprogram : in Subprogram_Info_Ref; The_Subprogram_Parameter : in Subprogram_Parameter_Info_Ref) is begin The_Subprogram.Last_Subprogram_Parameter := The_Subprogram_Parameter; end Set_Subprogram_Last_Parameter; -------------------------------------------------------------------------------- procedure Set_Subprogram_First_Global_Variable (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Global_Variable : in Global_Variable_Info_Ref) is begin The_Subprogram.First_Global_Variable (Abstraction) := The_Global_Variable; end Set_Subprogram_First_Global_Variable; -------------------------------------------------------------------------------- procedure Set_Subprogram_Last_Global_Variable (The_Subprogram : in Subprogram_Info_Ref; Abstraction : in Dictionary.Abstractions; The_Global_Variable : in Global_Variable_Info_Ref) is begin The_Subprogram.Last_Global_Variable (Abstraction) := The_Global_Variable; end Set_Subprogram_Last_Global_Variable; -------------------------------------------------------------------------------- procedure Set_Subprogram_Renaming_Declarations (The_Subprogram : in Subprogram_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Subprogram.Renaming_Declarations := The_Declaration; end Set_Subprogram_Renaming_Declarations; -------------------------------------------------------------------------------- procedure Set_Subprogram_First_Declaration (The_Subprogram : in Subprogram_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Subprogram.First_Declaration := The_Declaration; end Set_Subprogram_First_Declaration; -------------------------------------------------------------------------------- procedure Set_Subprogram_Last_Declaration (The_Subprogram : in Subprogram_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Subprogram.Last_Declaration := The_Declaration; end Set_Subprogram_Last_Declaration; -------------------------------------------------------------------------------- procedure Set_Subprogram_First_Loop (The_Subprogram : in Subprogram_Info_Ref; The_Loop : in Dictionary.Symbol) is begin The_Subprogram.First_Loop := The_Loop; end Set_Subprogram_First_Loop; -------------------------------------------------------------------------------- procedure Set_Subprogram_Last_Loop (The_Subprogram : in Subprogram_Info_Ref; The_Loop : in Dictionary.Symbol) is begin The_Subprogram.Last_Loop := The_Loop; end Set_Subprogram_Last_Loop; -------------------------------------------------------------------------------- function Get_Subprogram_Symbol (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Dictionary.NullSymbol; else return The_Subprogram.Self; end if; end Get_Subprogram_Symbol; -------------------------------------------------------------------------------- function Get_Subprogram_Name (The_Subprogram : Subprogram_Info_Ref) return LexTokenManager.Lex_String is begin if The_Subprogram = Null_Subprogram_Info_Ref then return LexTokenManager.Null_String; else return The_Subprogram.Name; end if; end Get_Subprogram_Name; -------------------------------------------------------------------------------- function Get_Subprogram_Implicit_Proof_Function (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Dictionary.Symbol is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Dictionary.NullSymbol; else return The_Subprogram.Implicit_Proof_Functions (Abstraction); end if; end Get_Subprogram_Implicit_Proof_Function; -------------------------------------------------------------------------------- function Get_Subprogram_Implicit_Return_Variable (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Implicit_Return_Variable_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Implicit_Return_Variable_Info_Ref; else return The_Subprogram.Implicit_Return_Variables (Abstraction); end if; end Get_Subprogram_Implicit_Return_Variable; -------------------------------------------------------------------------------- function Get_Subprogram_Signature_Is_Wellformed (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Signature_Is_Wellformed (Abstraction); end if; end Get_Subprogram_Signature_Is_Wellformed; -------------------------------------------------------------------------------- function Get_Subprogram_Precondition (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return ExaminerConstants.RefType is begin if The_Subprogram = Null_Subprogram_Info_Ref then return 0; else return The_Subprogram.Precondition (Abstraction); end if; end Get_Subprogram_Precondition; -------------------------------------------------------------------------------- function Get_Subprogram_Postcondition (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return ExaminerConstants.RefType is begin if The_Subprogram = Null_Subprogram_Info_Ref then return 0; else return The_Subprogram.Postcondition (Abstraction); end if; end Get_Subprogram_Postcondition; -------------------------------------------------------------------------------- function Get_Subprogram_Body (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Declaration_Info_Ref; else return The_Subprogram.Subprogram_Body; end if; end Get_Subprogram_Body; -------------------------------------------------------------------------------- function Get_Subprogram_Has_Proper_Body (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Has_Proper_Body; end if; end Get_Subprogram_Has_Proper_Body; -------------------------------------------------------------------------------- function Get_Subprogram_Body_Is_Hidden (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Body_Is_Hidden; end if; end Get_Subprogram_Body_Is_Hidden; -------------------------------------------------------------------------------- function Get_Subprogram_Has_Second_Annotation (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Has_Second_Annotation; end if; end Get_Subprogram_Has_Second_Annotation; -------------------------------------------------------------------------------- function Get_Subprogram_Has_Second_Constraint (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Has_Second_Constraint; end if; end Get_Subprogram_Has_Second_Constraint; -------------------------------------------------------------------------------- function Get_Subprogram_Has_Derives_Annotation (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Has_Derives_Annotation; end if; end Get_Subprogram_Has_Derives_Annotation; -------------------------------------------------------------------------------- function Get_Subprogram_Has_Delay_Property (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Has_Delay_Property; end if; end Get_Subprogram_Has_Delay_Property; -------------------------------------------------------------------------------- function Get_Subprogram_Delay_Property_Is_Accounted_For (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Delay_Property_Is_Accounted_For; end if; end Get_Subprogram_Delay_Property_Is_Accounted_For; -------------------------------------------------------------------------------- function Get_Subprogram_Generic_Unit (The_Subprogram : Subprogram_Info_Ref) return Generic_Unit_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Generic_Unit_Info_Ref; else return The_Subprogram.Generic_Unit; end if; end Get_Subprogram_Generic_Unit; -------------------------------------------------------------------------------- function Get_Subprogram_Instantiation_Of (The_Subprogram : Subprogram_Info_Ref) return Subprogram_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Subprogram_Info_Ref; else return The_Subprogram.Instantiation_Of; end if; end Get_Subprogram_Instantiation_Of; -------------------------------------------------------------------------------- function Get_Subprogram_First_Generic_Association (The_Subprogram : Subprogram_Info_Ref) return Generic_Association_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Generic_Association_Info_Ref; else return The_Subprogram.First_Generic_Association; end if; end Get_Subprogram_First_Generic_Association; -------------------------------------------------------------------------------- function Get_Subprogram_Last_Generic_Association (The_Subprogram : Subprogram_Info_Ref) return Generic_Association_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Generic_Association_Info_Ref; else return The_Subprogram.Last_Generic_Association; end if; end Get_Subprogram_Last_Generic_Association; -------------------------------------------------------------------------------- function Get_Subprogram_Suspends_List (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Dictionary.NullSymbol; else return The_Subprogram.Suspends_List; end if; end Get_Subprogram_Suspends_List; -------------------------------------------------------------------------------- function Get_Subprogram_Is_Entry (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Is_Entry; end if; end Get_Subprogram_Is_Entry; -------------------------------------------------------------------------------- function Get_Subprogram_Entry_Barrier (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Dictionary.NullSymbol; else return The_Subprogram.Entry_Barrier; end if; end Get_Subprogram_Entry_Barrier; -------------------------------------------------------------------------------- function Get_Subprogram_Is_Interrupt_Handler (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Is_Interrupt_Handler; end if; end Get_Subprogram_Is_Interrupt_Handler; -------------------------------------------------------------------------------- function Get_Subprogram_Uses_Unprotected_Variables (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Uses_Unprotected_Variables; end if; end Get_Subprogram_Uses_Unprotected_Variables; -------------------------------------------------------------------------------- function Get_Subprogram_Uses_Unchecked_Conversion (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Uses_Unchecked_Conversion; end if; end Get_Subprogram_Uses_Unchecked_Conversion; -------------------------------------------------------------------------------- function Get_Subprogram_Assigns_From_External (The_Subprogram : Subprogram_Info_Ref) return Boolean is begin if The_Subprogram = Null_Subprogram_Info_Ref then return False; else return The_Subprogram.Assigns_From_External; end if; end Get_Subprogram_Assigns_From_External; -------------------------------------------------------------------------------- function Get_Subprogram_Return_Type (The_Subprogram : Subprogram_Info_Ref) return Type_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Type_Info_Ref; else return The_Subprogram.Type_Mark; end if; end Get_Subprogram_Return_Type; -------------------------------------------------------------------------------- function Get_Subprogram_With_Clauses (The_Subprogram : Subprogram_Info_Ref) return Context_Clause_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Context_Clause_Info_Ref; else return The_Subprogram.With_Clauses; end if; end Get_Subprogram_With_Clauses; -------------------------------------------------------------------------------- function Get_Subprogram_Use_Type_Clauses (The_Subprogram : Subprogram_Info_Ref) return Use_Type_Clause_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Use_Type_Clause_Info_Ref; else return The_Subprogram.Use_Type_Clauses; end if; end Get_Subprogram_Use_Type_Clauses; -------------------------------------------------------------------------------- function Get_Subprogram_Inherit_Clauses (The_Subprogram : Subprogram_Info_Ref) return Context_Clause_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Context_Clause_Info_Ref; else return The_Subprogram.Inherit_Clauses; end if; end Get_Subprogram_Inherit_Clauses; -------------------------------------------------------------------------------- function Get_Subprogram_First_Parameter (The_Subprogram : Subprogram_Info_Ref) return Subprogram_Parameter_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Subprogram_Parameter_Info_Ref; else return The_Subprogram.First_Subprogram_Parameter; end if; end Get_Subprogram_First_Parameter; -------------------------------------------------------------------------------- function Get_Subprogram_Last_Parameter (The_Subprogram : Subprogram_Info_Ref) return Subprogram_Parameter_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Subprogram_Parameter_Info_Ref; else return The_Subprogram.Last_Subprogram_Parameter; end if; end Get_Subprogram_Last_Parameter; -------------------------------------------------------------------------------- function Get_Subprogram_First_Global_Variable (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Global_Variable_Info_Ref; else return The_Subprogram.First_Global_Variable (Abstraction); end if; end Get_Subprogram_First_Global_Variable; -------------------------------------------------------------------------------- function Get_Subprogram_Last_Global_Variable (The_Subprogram : Subprogram_Info_Ref; Abstraction : Dictionary.Abstractions) return Global_Variable_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Global_Variable_Info_Ref; else return The_Subprogram.Last_Global_Variable (Abstraction); end if; end Get_Subprogram_Last_Global_Variable; -------------------------------------------------------------------------------- function Get_Subprogram_Renaming_Declarations (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Declaration_Info_Ref; else return The_Subprogram.Renaming_Declarations; end if; end Get_Subprogram_Renaming_Declarations; -------------------------------------------------------------------------------- function Get_Subprogram_First_Declaration (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Declaration_Info_Ref; else return The_Subprogram.First_Declaration; end if; end Get_Subprogram_First_Declaration; -------------------------------------------------------------------------------- function Get_Subprogram_Last_Declaration (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Declaration_Info_Ref; else return The_Subprogram.Last_Declaration; end if; end Get_Subprogram_Last_Declaration; -------------------------------------------------------------------------------- function Get_Subprogram_First_Loop (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Dictionary.NullSymbol; else return The_Subprogram.First_Loop; end if; end Get_Subprogram_First_Loop; -------------------------------------------------------------------------------- function Get_Subprogram_Last_Loop (The_Subprogram : Subprogram_Info_Ref) return Dictionary.Symbol is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Dictionary.NullSymbol; else return The_Subprogram.Last_Loop; end if; end Get_Subprogram_Last_Loop; -------------------------------------------------------------------------------- function Get_Subprogram_Specification (The_Subprogram : Subprogram_Info_Ref) return Declaration_Info_Ref is begin if The_Subprogram = Null_Subprogram_Info_Ref then return Null_Declaration_Info_Ref; else return The_Subprogram.Specification; end if; end Get_Subprogram_Specification; -------------------------------------------------------------------------------- -- Operator_Info -------------------------------------------------------------------------------- function Get_Operator_Info_Ref (Item : Dictionary.Symbol) return Operator_Info_Ref is function RefType_To_Operator_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Operator_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Operator_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Operator_Symbol then DiscriminantDebug ("Get_Operator_info_Ref", GetSymbolDiscriminant (Item), Dictionary.Operator_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Operator_info_Ref"); end if; return RefType_To_Operator_Info_Ref (GetSymbolRef (Item)); end if; end Get_Operator_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Unary_Operator (Name : in SP_Symbols.SP_Symbol; Operand : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Operator : out Operator_Info_Ref) is The_Operator_Symbol : Dictionary.Symbol; function Operator_Info_Ref_To_RefType is new Unchecked_Conversion (Operator_Info_Ref, Dictionary.Ref_Type); begin The_Operator := new Operator_Info'(Self => Dictionary.NullSymbol, Name => Name, Is_Binary => False, Operand => Operand); AddSymbol (Discriminant => Dictionary.Operator_Symbol, Ref => Operator_Info_Ref_To_RefType (The_Operator), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Operator_Symbol); The_Operator.Self := The_Operator_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Unary_Operator"); end Create_Unary_Operator; -------------------------------------------------------------------------------- procedure Create_Binary_Operator (Name : in SP_Symbols.SP_Symbol; Left, Right : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Operator : out Operator_Info_Ref) is The_Operator_Symbol : Dictionary.Symbol; function Operator_Info_Ref_To_RefType is new Unchecked_Conversion (Operator_Info_Ref, Dictionary.Ref_Type); begin The_Operator := new Operator_Info' (Self => Dictionary.NullSymbol, Name => Name, Is_Binary => True, Left_Operand => Left, Right_Operand => Right); AddSymbol (Discriminant => Dictionary.Operator_Symbol, Ref => Operator_Info_Ref_To_RefType (The_Operator), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Operator_Symbol); The_Operator.Self := The_Operator_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Binary_Operator"); end Create_Binary_Operator; -------------------------------------------------------------------------------- function Get_Operator_Symbol (The_Operator : Operator_Info_Ref) return Dictionary.Symbol is begin if The_Operator = Null_Operator_Info_Ref then return Dictionary.NullSymbol; else return The_Operator.Self; end if; end Get_Operator_Symbol; -------------------------------------------------------------------------------- function Get_Operator_Name (The_Operator : Operator_Info_Ref) return SP_Symbols.SP_Symbol is begin if The_Operator = Null_Operator_Info_Ref then return SP_Symbols.SPEND; else return The_Operator.Name; end if; end Get_Operator_Name; -------------------------------------------------------------------------------- function Get_Operator_Is_Binary (The_Operator : Operator_Info_Ref) return Boolean is begin if The_Operator = Null_Operator_Info_Ref then return False; else return The_Operator.Is_Binary; end if; end Get_Operator_Is_Binary; -------------------------------------------------------------------------------- function Get_Operator_Operand (The_Operator : Operator_Info_Ref) return Type_Info_Ref is begin if The_Operator = Null_Operator_Info_Ref then return Null_Type_Info_Ref; else return The_Operator.Operand; end if; end Get_Operator_Operand; -------------------------------------------------------------------------------- function Get_Operator_Left_Operand (The_Operator : Operator_Info_Ref) return Type_Info_Ref is begin if The_Operator = Null_Operator_Info_Ref then return Null_Type_Info_Ref; else return The_Operator.Left_Operand; end if; end Get_Operator_Left_Operand; -------------------------------------------------------------------------------- function Get_Operator_Right_Operand (The_Operator : Operator_Info_Ref) return Type_Info_Ref is begin if The_Operator = Null_Operator_Info_Ref then return Null_Type_Info_Ref; else return The_Operator.Right_Operand; end if; end Get_Operator_Right_Operand; -------------------------------------------------------------------------------- -- Dependency_Info -------------------------------------------------------------------------------- function Get_Dependency_Info_Ref (Item : Dictionary.Symbol) return Dependency_Info_Ref is function RefType_To_Dependency_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Dependency_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Dependency_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Dependency_Symbol then DiscriminantDebug ("Get_Dependency_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Dependency_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Dependency_Info_Ref"); end if; return RefType_To_Dependency_Info_Ref (GetSymbolRef (Item)); end if; end Get_Dependency_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Dependency (The_Import_Parameter : in Subprogram_Parameter_Info_Ref; The_Import_Variable : in Variable_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Dependency : out Dependency_Info_Ref) is The_Dependency_Symbol : Dictionary.Symbol; function Dependency_Info_Ref_To_RefType is new Unchecked_Conversion (Dependency_Info_Ref, Dictionary.Ref_Type); begin if The_Import_Parameter /= Null_Subprogram_Parameter_Info_Ref then The_Dependency := new Dependency_Info' (Self => Dictionary.NullSymbol, Next => Null_Dependency_Info_Ref, Kind_Of_Dependency => Dependency_Parameter_Item, Import_Parameter => The_Import_Parameter); elsif The_Import_Variable /= Null_Variable_Info_Ref then The_Dependency := new Dependency_Info' (Self => Dictionary.NullSymbol, Next => Null_Dependency_Info_Ref, Kind_Of_Dependency => Dependency_Variable_Item, Import_Variable => The_Import_Variable); else The_Dependency := Null_Dependency_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Create_Dependency"); end if; AddSymbol (Discriminant => Dictionary.Dependency_Symbol, Ref => Dependency_Info_Ref_To_RefType (The_Dependency), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Dependency_Symbol); The_Dependency.Self := The_Dependency_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Dependency"); end Create_Dependency; -------------------------------------------------------------------------------- procedure Set_Next_Dependency (The_Dependency, Next : in Dependency_Info_Ref) is begin The_Dependency.Next := Next; end Set_Next_Dependency; -------------------------------------------------------------------------------- function Get_Dependency_Symbol (The_Dependency : Dependency_Info_Ref) return Dictionary.Symbol is begin if The_Dependency = Null_Dependency_Info_Ref then return Dictionary.NullSymbol; else return The_Dependency.Self; end if; end Get_Dependency_Symbol; -------------------------------------------------------------------------------- function Get_Kind_Of_Dependency (The_Dependency : Dependency_Info_Ref) return Kind_Of_Dependency_T is begin return The_Dependency.Kind_Of_Dependency; end Get_Kind_Of_Dependency; -------------------------------------------------------------------------------- function Get_Dependency_Import_Parameter (The_Dependency : Dependency_Info_Ref) return Subprogram_Parameter_Info_Ref is begin if The_Dependency = Null_Dependency_Info_Ref then return Null_Subprogram_Parameter_Info_Ref; else return The_Dependency.Import_Parameter; end if; end Get_Dependency_Import_Parameter; -------------------------------------------------------------------------------- function Get_Dependency_Import_Variable (The_Dependency : Dependency_Info_Ref) return Variable_Info_Ref is begin if The_Dependency = Null_Dependency_Info_Ref then return Null_Variable_Info_Ref; else return The_Dependency.Import_Variable; end if; end Get_Dependency_Import_Variable; -------------------------------------------------------------------------------- function Get_Next_Dependency (The_Dependency : Dependency_Info_Ref) return Dependency_Info_Ref is begin if The_Dependency = Null_Dependency_Info_Ref then return Null_Dependency_Info_Ref; else return The_Dependency.Next; end if; end Get_Next_Dependency; -------------------------------------------------------------------------------- -- Package_Info -------------------------------------------------------------------------------- function Get_Package_Info_Ref (Item : Dictionary.Symbol) return Package_Info_Ref is function RefType_To_Package_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Package_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Package_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Package_Symbol then DiscriminantDebug ("Get_Package_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Package_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Package_Info_Ref"); end if; return RefType_To_Package_Info_Ref (GetSymbolRef (Item)); end if; end Get_Package_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Package (Name : in LexTokenManager.Lex_String; The_Declaration : in Declaration_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Package : out Package_Info_Ref) is The_Package_Symbol : Dictionary.Symbol; function Package_Info_Ref_To_RefType is new Unchecked_Conversion (Package_Info_Ref, Dictionary.Ref_Type); begin The_Package := new Package_Info' (Self => Dictionary.NullSymbol, Name => Name, Package_Body => Null_Declaration_Info_Ref, Package_Has_Proper_Body => False, Inherit_Clauses => Null_Context_Clause_Info_Ref, Own_Variables => Null_Own_Variable_Info_Ref, Task_List => Dictionary.NullSymbol, First_Loop => Dictionary.NullSymbol, Last_Loop => Dictionary.NullSymbol, Specification => The_Declaration, Visible_Part => Part_Info'(With_Clauses => Null_Context_Clause_Info_Ref, Use_Type_Clauses => Null_Use_Type_Clause_Info_Ref, Renaming_Declarations => Null_Declaration_Info_Ref, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref), Local_Part => Part_Info'(With_Clauses => Null_Context_Clause_Info_Ref, Use_Type_Clauses => Null_Use_Type_Clause_Info_Ref, Renaming_Declarations => Null_Declaration_Info_Ref, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref), Private_Part => Part_Info'(With_Clauses => Null_Context_Clause_Info_Ref, Use_Type_Clauses => Null_Use_Type_Clause_Info_Ref, Renaming_Declarations => Null_Declaration_Info_Ref, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref), Elaborate_Body_Found => False, Parent => Null_Package_Info_Ref, Next_Sibling => Null_Package_Info_Ref, Extends => Null_Package_Info_Ref, Declares_Tagged_Type => False, Private_Children => Child_Info'(First_Child => Null_Package_Info_Ref, Last_Child => Null_Package_Info_Ref), Public_Children => Child_Info'(First_Child => Null_Package_Info_Ref, Last_Child => Null_Package_Info_Ref), Is_Private => False, Generic_Unit => Null_Generic_Unit_Info_Ref); AddSymbol (Discriminant => Dictionary.Package_Symbol, Ref => Package_Info_Ref_To_RefType (The_Package), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Package_Symbol); The_Package.Self := The_Package_Symbol; The_Declaration.Item := The_Package_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Package"); end Create_Package; -------------------------------------------------------------------------------- procedure Set_Package_Body (The_Package : in Package_Info_Ref; The_Body : in Declaration_Info_Ref) is begin The_Package.Package_Body := The_Body; end Set_Package_Body; -------------------------------------------------------------------------------- procedure Set_Package_Has_Proper_Body (The_Package : in Package_Info_Ref) is begin The_Package.Package_Has_Proper_Body := True; end Set_Package_Has_Proper_Body; -------------------------------------------------------------------------------- procedure Set_Package_Inherit_Clauses (The_Package : in Package_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref) is begin The_Package.Inherit_Clauses := The_Context_Clause; end Set_Package_Inherit_Clauses; -------------------------------------------------------------------------------- procedure Set_Package_Own_Variables (The_Package : in Package_Info_Ref; Own_Variables : in Own_Variable_Info_Ref) is begin The_Package.Own_Variables := Own_Variables; end Set_Package_Own_Variables; -------------------------------------------------------------------------------- procedure Set_Package_Task_List (The_Package : in Package_Info_Ref; Task_List : in Dictionary.Symbol) is begin The_Package.Task_List := Task_List; end Set_Package_Task_List; -------------------------------------------------------------------------------- procedure Set_Package_First_Loop (The_Package : in Package_Info_Ref; The_Loop : in Dictionary.Symbol) is begin The_Package.First_Loop := The_Loop; end Set_Package_First_Loop; -------------------------------------------------------------------------------- procedure Set_Package_Last_Loop (The_Package : in Package_Info_Ref; The_Loop : in Dictionary.Symbol) is begin The_Package.Last_Loop := The_Loop; end Set_Package_Last_Loop; -------------------------------------------------------------------------------- procedure Set_Package_Visible_With_Clauses (The_Package : in Package_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref) is begin The_Package.Visible_Part.With_Clauses := The_Context_Clause; end Set_Package_Visible_With_Clauses; -------------------------------------------------------------------------------- procedure Set_Package_Visible_Use_Type_Clauses (The_Package : in Package_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref) is begin The_Package.Visible_Part.Use_Type_Clauses := The_Use_Type_Clause; end Set_Package_Visible_Use_Type_Clauses; -------------------------------------------------------------------------------- procedure Set_Package_Visible_Renaming_Declarations (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Package.Visible_Part.Renaming_Declarations := The_Declaration; end Set_Package_Visible_Renaming_Declarations; -------------------------------------------------------------------------------- procedure Set_Package_First_Visible_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Package.Visible_Part.First_Declaration := The_Declaration; end Set_Package_First_Visible_Declaration; -------------------------------------------------------------------------------- procedure Set_Package_Last_Visible_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Package.Visible_Part.Last_Declaration := The_Declaration; end Set_Package_Last_Visible_Declaration; -------------------------------------------------------------------------------- procedure Set_Package_Local_With_Clauses (The_Package : in Package_Info_Ref; The_Context_Clause : in Context_Clause_Info_Ref) is begin The_Package.Local_Part.With_Clauses := The_Context_Clause; end Set_Package_Local_With_Clauses; -------------------------------------------------------------------------------- procedure Set_Package_Local_Use_Type_Clauses (The_Package : in Package_Info_Ref; The_Use_Type_Clause : in Use_Type_Clause_Info_Ref) is begin The_Package.Local_Part.Use_Type_Clauses := The_Use_Type_Clause; end Set_Package_Local_Use_Type_Clauses; -------------------------------------------------------------------------------- procedure Set_Package_Local_Renaming_Declarations (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Package.Local_Part.Renaming_Declarations := The_Declaration; end Set_Package_Local_Renaming_Declarations; -------------------------------------------------------------------------------- procedure Set_Package_First_Local_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Package.Local_Part.First_Declaration := The_Declaration; end Set_Package_First_Local_Declaration; -------------------------------------------------------------------------------- procedure Set_Package_Last_Local_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Package.Local_Part.Last_Declaration := The_Declaration; end Set_Package_Last_Local_Declaration; -------------------------------------------------------------------------------- procedure Set_Package_First_Private_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Package.Private_Part.First_Declaration := The_Declaration; end Set_Package_First_Private_Declaration; -------------------------------------------------------------------------------- procedure Set_Package_Last_Private_Declaration (The_Package : in Package_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Package.Private_Part.Last_Declaration := The_Declaration; end Set_Package_Last_Private_Declaration; -------------------------------------------------------------------------------- procedure Set_Package_Elaborate_Body_Found (The_Package : in Package_Info_Ref) is begin The_Package.Elaborate_Body_Found := True; end Set_Package_Elaborate_Body_Found; -------------------------------------------------------------------------------- procedure Set_Package_Parent (The_Package : in Package_Info_Ref; The_Parent : in Package_Info_Ref) is begin The_Package.Parent := The_Parent; end Set_Package_Parent; -------------------------------------------------------------------------------- procedure Set_Package_Next_Sibling (The_Package : in Package_Info_Ref; The_Sibling : in Package_Info_Ref) is begin The_Package.Next_Sibling := The_Sibling; end Set_Package_Next_Sibling; -------------------------------------------------------------------------------- procedure Set_Package_Extends (The_Package : in Package_Info_Ref; The_Extends : in Package_Info_Ref) is begin The_Package.Extends := The_Extends; end Set_Package_Extends; -------------------------------------------------------------------------------- procedure Set_Package_Declares_Tagged_Type (The_Package : in Package_Info_Ref) is begin The_Package.Declares_Tagged_Type := True; end Set_Package_Declares_Tagged_Type; -------------------------------------------------------------------------------- procedure Set_Package_First_Private_Child (The_Package : in Package_Info_Ref; The_Child : in Package_Info_Ref) is begin The_Package.Private_Children.First_Child := The_Child; end Set_Package_First_Private_Child; -------------------------------------------------------------------------------- procedure Set_Package_Last_Private_Child (The_Package : in Package_Info_Ref; The_Child : in Package_Info_Ref) is begin The_Package.Private_Children.Last_Child := The_Child; end Set_Package_Last_Private_Child; -------------------------------------------------------------------------------- procedure Set_Package_First_Public_Child (The_Package : in Package_Info_Ref; The_Child : in Package_Info_Ref) is begin The_Package.Public_Children.First_Child := The_Child; end Set_Package_First_Public_Child; -------------------------------------------------------------------------------- procedure Set_Package_Last_Public_Child (The_Package : in Package_Info_Ref; The_Child : in Package_Info_Ref) is begin The_Package.Public_Children.Last_Child := The_Child; end Set_Package_Last_Public_Child; -------------------------------------------------------------------------------- procedure Set_Package_Is_Private (The_Package : in Package_Info_Ref) is begin The_Package.Is_Private := True; end Set_Package_Is_Private; -------------------------------------------------------------------------------- procedure Set_Package_Generic_Unit (The_Package : in Package_Info_Ref; The_Generic_Unit : in Generic_Unit_Info_Ref) is begin The_Package.Generic_Unit := The_Generic_Unit; end Set_Package_Generic_Unit; -------------------------------------------------------------------------------- function Get_Package_Symbol (The_Package : Package_Info_Ref) return Dictionary.Symbol is begin if The_Package = Null_Package_Info_Ref then return Dictionary.NullSymbol; else return The_Package.Self; end if; end Get_Package_Symbol; -------------------------------------------------------------------------------- function Get_Package_Name (The_Package : Package_Info_Ref) return LexTokenManager.Lex_String is begin if The_Package = Null_Package_Info_Ref then return LexTokenManager.Null_String; else return The_Package.Name; end if; end Get_Package_Name; -------------------------------------------------------------------------------- function Get_Package_Body (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Package_Body; end if; end Get_Package_Body; -------------------------------------------------------------------------------- function Get_Package_Has_Proper_Body (The_Package : Package_Info_Ref) return Boolean is begin if The_Package = Null_Package_Info_Ref then return False; else return The_Package.Package_Has_Proper_Body; end if; end Get_Package_Has_Proper_Body; -------------------------------------------------------------------------------- function Get_Package_Inherit_Clauses (The_Package : Package_Info_Ref) return Context_Clause_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Context_Clause_Info_Ref; else return The_Package.Inherit_Clauses; end if; end Get_Package_Inherit_Clauses; -------------------------------------------------------------------------------- function Get_Package_Own_Variables (The_Package : Package_Info_Ref) return Own_Variable_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Own_Variable_Info_Ref; else return The_Package.Own_Variables; end if; end Get_Package_Own_Variables; -------------------------------------------------------------------------------- function Get_Package_Task_List (The_Package : Package_Info_Ref) return Dictionary.Symbol is begin if The_Package = Null_Package_Info_Ref then return Dictionary.NullSymbol; else return The_Package.Task_List; end if; end Get_Package_Task_List; -------------------------------------------------------------------------------- function Get_Package_First_Loop (The_Package : Package_Info_Ref) return Dictionary.Symbol is begin if The_Package = Null_Package_Info_Ref then return Dictionary.NullSymbol; else return The_Package.First_Loop; end if; end Get_Package_First_Loop; -------------------------------------------------------------------------------- function Get_Package_Last_Loop (The_Package : Package_Info_Ref) return Dictionary.Symbol is begin if The_Package = Null_Package_Info_Ref then return Dictionary.NullSymbol; else return The_Package.Last_Loop; end if; end Get_Package_Last_Loop; -------------------------------------------------------------------------------- function Get_Package_Specification (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Specification; end if; end Get_Package_Specification; -------------------------------------------------------------------------------- function Get_Package_Visible_With_Clauses (The_Package : Package_Info_Ref) return Context_Clause_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Context_Clause_Info_Ref; else return The_Package.Visible_Part.With_Clauses; end if; end Get_Package_Visible_With_Clauses; -------------------------------------------------------------------------------- function Get_Package_Visible_Use_Type_Clauses (The_Package : Package_Info_Ref) return Use_Type_Clause_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Use_Type_Clause_Info_Ref; else return The_Package.Visible_Part.Use_Type_Clauses; end if; end Get_Package_Visible_Use_Type_Clauses; -------------------------------------------------------------------------------- function Get_Package_Visible_Renaming_Declarations (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Visible_Part.Renaming_Declarations; end if; end Get_Package_Visible_Renaming_Declarations; -------------------------------------------------------------------------------- function Get_Package_First_Visible_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Visible_Part.First_Declaration; end if; end Get_Package_First_Visible_Declaration; -------------------------------------------------------------------------------- function Get_Package_Last_Visible_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Visible_Part.Last_Declaration; end if; end Get_Package_Last_Visible_Declaration; -------------------------------------------------------------------------------- function Get_Package_Local_With_Clauses (The_Package : Package_Info_Ref) return Context_Clause_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Context_Clause_Info_Ref; else return The_Package.Local_Part.With_Clauses; end if; end Get_Package_Local_With_Clauses; -------------------------------------------------------------------------------- function Get_Package_Local_Use_Type_Clauses (The_Package : Package_Info_Ref) return Use_Type_Clause_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Use_Type_Clause_Info_Ref; else return The_Package.Local_Part.Use_Type_Clauses; end if; end Get_Package_Local_Use_Type_Clauses; -------------------------------------------------------------------------------- function Get_Package_Local_Renaming_Declarations (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Local_Part.Renaming_Declarations; end if; end Get_Package_Local_Renaming_Declarations; -------------------------------------------------------------------------------- function Get_Package_First_Local_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Local_Part.First_Declaration; end if; end Get_Package_First_Local_Declaration; -------------------------------------------------------------------------------- function Get_Package_Last_Local_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Local_Part.Last_Declaration; end if; end Get_Package_Last_Local_Declaration; -------------------------------------------------------------------------------- function Get_Package_First_Private_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Private_Part.First_Declaration; end if; end Get_Package_First_Private_Declaration; -------------------------------------------------------------------------------- function Get_Package_Last_Private_Declaration (The_Package : Package_Info_Ref) return Declaration_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Declaration_Info_Ref; else return The_Package.Private_Part.Last_Declaration; end if; end Get_Package_Last_Private_Declaration; -------------------------------------------------------------------------------- function Get_Package_Elaborate_Body_Found (The_Package : Package_Info_Ref) return Boolean is begin if The_Package = Null_Package_Info_Ref then return False; else return The_Package.Elaborate_Body_Found; end if; end Get_Package_Elaborate_Body_Found; -------------------------------------------------------------------------------- function Get_Package_Parent (The_Package : Package_Info_Ref) return Package_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Package_Info_Ref; else return The_Package.Parent; end if; end Get_Package_Parent; -------------------------------------------------------------------------------- function Get_Package_Next_Sibling (The_Package : Package_Info_Ref) return Package_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Package_Info_Ref; else return The_Package.Next_Sibling; end if; end Get_Package_Next_Sibling; -------------------------------------------------------------------------------- function Get_Package_Extends (The_Package : Package_Info_Ref) return Package_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Package_Info_Ref; else return The_Package.Extends; end if; end Get_Package_Extends; -------------------------------------------------------------------------------- function Get_Package_Declares_Tagged_Type (The_Package : Package_Info_Ref) return Boolean is begin if The_Package = Null_Package_Info_Ref then return False; else return The_Package.Declares_Tagged_Type; end if; end Get_Package_Declares_Tagged_Type; -------------------------------------------------------------------------------- function Get_Package_First_Private_Child (The_Package : Package_Info_Ref) return Package_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Package_Info_Ref; else return The_Package.Private_Children.First_Child; end if; end Get_Package_First_Private_Child; -------------------------------------------------------------------------------- function Get_Package_Last_Private_Child (The_Package : Package_Info_Ref) return Package_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Package_Info_Ref; else return The_Package.Private_Children.Last_Child; end if; end Get_Package_Last_Private_Child; -------------------------------------------------------------------------------- function Get_Package_First_Public_Child (The_Package : Package_Info_Ref) return Package_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Package_Info_Ref; else return The_Package.Public_Children.First_Child; end if; end Get_Package_First_Public_Child; -------------------------------------------------------------------------------- function Get_Package_Last_Public_Child (The_Package : Package_Info_Ref) return Package_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Package_Info_Ref; else return The_Package.Public_Children.Last_Child; end if; end Get_Package_Last_Public_Child; -------------------------------------------------------------------------------- function Get_Package_Is_Private (The_Package : Package_Info_Ref) return Boolean is begin if The_Package = Null_Package_Info_Ref then return False; else return The_Package.Is_Private; end if; end Get_Package_Is_Private; -------------------------------------------------------------------------------- function Get_Package_Generic_Unit (The_Package : Package_Info_Ref) return Generic_Unit_Info_Ref is begin if The_Package = Null_Package_Info_Ref then return Null_Generic_Unit_Info_Ref; else return The_Package.Generic_Unit; end if; end Get_Package_Generic_Unit; -------------------------------------------------------------------------------- -- Generic_Parameter_Info -------------------------------------------------------------------------------- function Get_Generic_Parameter_Info_Ref (Item : Dictionary.Symbol) return Generic_Parameter_Info_Ref is function RefType_To_Generic_Parameter_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Generic_Parameter_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Generic_Parameter_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Generic_Parameter_Symbol then DiscriminantDebug ("Get_Generic_Parameter_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Generic_Parameter_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Generic_Parameter_Info_Ref"); end if; return RefType_To_Generic_Parameter_Info_Ref (GetSymbolRef (Item)); end if; end Get_Generic_Parameter_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Generic_Parameter (Owning_Generic : in Generic_Unit_Info_Ref; Type_Mark : in Type_Info_Ref; Object : in Constant_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Generic_Parameter : out Generic_Parameter_Info_Ref) is The_Generic_Parameter_Symbol : Dictionary.Symbol; function Generic_Parameter_Info_Ref_To_RefType is new Unchecked_Conversion (Generic_Parameter_Info_Ref, Dictionary.Ref_Type); begin if Type_Mark /= Null_Type_Info_Ref then The_Generic_Parameter := new Generic_Parameter_Info' (Self => Dictionary.NullSymbol, Owning_Generic => Owning_Generic, Kind => Dictionary.Generic_Type_Parameter, Type_Mark => Type_Mark, Next => Null_Generic_Parameter_Info_Ref); elsif Object /= Null_Constant_Info_Ref then The_Generic_Parameter := new Generic_Parameter_Info' (Self => Dictionary.NullSymbol, Owning_Generic => Owning_Generic, Kind => Dictionary.Generic_Object_Parameter, Object => Object, Next => Null_Generic_Parameter_Info_Ref); else The_Generic_Parameter := Null_Generic_Parameter_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Create_Generic_Parameter"); end if; AddSymbol (Discriminant => Dictionary.Generic_Parameter_Symbol, Ref => Generic_Parameter_Info_Ref_To_RefType (The_Generic_Parameter), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Generic_Parameter_Symbol); The_Generic_Parameter.Self := The_Generic_Parameter_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Generic_Parameter"); end Create_Generic_Parameter; -------------------------------------------------------------------------------- procedure Set_Next_Generic_Parameter (The_Generic_Parameter, Next : in Generic_Parameter_Info_Ref) is begin The_Generic_Parameter.Next := Next; end Set_Next_Generic_Parameter; -------------------------------------------------------------------------------- function Get_Generic_Parameter_Symbol (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Dictionary.Symbol is begin if The_Generic_Parameter = Null_Generic_Parameter_Info_Ref then return Dictionary.NullSymbol; else return The_Generic_Parameter.Self; end if; end Get_Generic_Parameter_Symbol; -------------------------------------------------------------------------------- function Get_Generic_Parameter_Owning_Generic (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Generic_Unit_Info_Ref is begin if The_Generic_Parameter = Null_Generic_Parameter_Info_Ref then return Null_Generic_Unit_Info_Ref; else return The_Generic_Parameter.Owning_Generic; end if; end Get_Generic_Parameter_Owning_Generic; -------------------------------------------------------------------------------- function Get_Generic_Parameter_Kind (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Dictionary.Generic_Parameter_Kind is begin return The_Generic_Parameter.Kind; end Get_Generic_Parameter_Kind; -------------------------------------------------------------------------------- function Get_Generic_Parameter_Type (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Type_Info_Ref is begin if The_Generic_Parameter = Null_Generic_Parameter_Info_Ref then return Null_Type_Info_Ref; else return The_Generic_Parameter.Type_Mark; end if; end Get_Generic_Parameter_Type; -------------------------------------------------------------------------------- function Get_Generic_Parameter_Object (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Constant_Info_Ref is begin if The_Generic_Parameter = Null_Generic_Parameter_Info_Ref then return Null_Constant_Info_Ref; else return The_Generic_Parameter.Object; end if; end Get_Generic_Parameter_Object; -------------------------------------------------------------------------------- function Get_Next_Generic_Parameter (The_Generic_Parameter : Generic_Parameter_Info_Ref) return Generic_Parameter_Info_Ref is begin if The_Generic_Parameter = Null_Generic_Parameter_Info_Ref then return Null_Generic_Parameter_Info_Ref; else return The_Generic_Parameter.Next; end if; end Get_Next_Generic_Parameter; -------------------------------------------------------------------------------- -- Generic_Unit_Info -------------------------------------------------------------------------------- function Get_Generic_Unit_Info_Ref (Item : Dictionary.Symbol) return Generic_Unit_Info_Ref is function RefType_To_Generic_Unit_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Generic_Unit_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Generic_Unit_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.Generic_Unit_Symbol then DiscriminantDebug ("Get_Generic_Unit_Info_Ref", GetSymbolDiscriminant (Item), Dictionary.Generic_Unit_Symbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "Dictionary.RawDict.Get_Generic_Unit_Info_Ref"); end if; return RefType_To_Generic_Unit_Info_Ref (GetSymbolRef (Item)); end if; end Get_Generic_Unit_Info_Ref; -------------------------------------------------------------------------------- procedure Create_Generic_Unit (Kind : in Dictionary.Generic_Kind; Scope : in Dictionary.Scopes; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Generic_Unit : out Generic_Unit_Info_Ref) is The_Generic_Unit_Symbol : Dictionary.Symbol; function Generic_Unit_Info_Ref_To_RefType is new Unchecked_Conversion (Generic_Unit_Info_Ref, Dictionary.Ref_Type); begin case Kind is when Dictionary.Generic_Of_Subprogram => The_Generic_Unit := new Generic_Unit_Info' (Self => Dictionary.NullSymbol, Scope => Scope, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref, First_Generic_Parameter => Null_Generic_Parameter_Info_Ref, Last_Generic_Parameter => Null_Generic_Parameter_Info_Ref, Kind => Dictionary.Generic_Of_Subprogram, Owning_Subprogram => Null_Subprogram_Info_Ref); when Dictionary.Generic_Of_Package => The_Generic_Unit := new Generic_Unit_Info' (Self => Dictionary.NullSymbol, Scope => Scope, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref, First_Generic_Parameter => Null_Generic_Parameter_Info_Ref, Last_Generic_Parameter => Null_Generic_Parameter_Info_Ref, Kind => Dictionary.Generic_Of_Package, Owning_Package => Null_Package_Info_Ref); end case; AddSymbol (Discriminant => Dictionary.Generic_Unit_Symbol, Ref => Generic_Unit_Info_Ref_To_RefType (The_Generic_Unit), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Generic_Unit_Symbol); The_Generic_Unit.Self := The_Generic_Unit_Symbol; exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Generic_Unit"); end Create_Generic_Unit; -------------------------------------------------------------------------------- procedure Set_Generic_Unit_First_Declaration (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Generic_Unit.First_Declaration := The_Declaration; end Set_Generic_Unit_First_Declaration; -------------------------------------------------------------------------------- procedure Set_Generic_Unit_Last_Declaration (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Declaration : in Declaration_Info_Ref) is begin The_Generic_Unit.Last_Declaration := The_Declaration; end Set_Generic_Unit_Last_Declaration; -------------------------------------------------------------------------------- procedure Set_Generic_Unit_First_Generic_Parameter (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Generic_Parameter : in Generic_Parameter_Info_Ref) is begin The_Generic_Unit.First_Generic_Parameter := The_Generic_Parameter; end Set_Generic_Unit_First_Generic_Parameter; -------------------------------------------------------------------------------- procedure Set_Generic_Unit_Last_Generic_Parameter (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Generic_Parameter : in Generic_Parameter_Info_Ref) is begin The_Generic_Unit.Last_Generic_Parameter := The_Generic_Parameter; end Set_Generic_Unit_Last_Generic_Parameter; -------------------------------------------------------------------------------- procedure Set_Generic_Unit_Owning_Subprogram (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Subprogram : in Subprogram_Info_Ref) is begin The_Generic_Unit.Owning_Subprogram := The_Subprogram; end Set_Generic_Unit_Owning_Subprogram; -------------------------------------------------------------------------------- procedure Set_Generic_Unit_Owning_Package (The_Generic_Unit : in Generic_Unit_Info_Ref; The_Package : in Package_Info_Ref) is begin The_Generic_Unit.Owning_Package := The_Package; end Set_Generic_Unit_Owning_Package; -------------------------------------------------------------------------------- function Get_Generic_Unit_Symbol (The_Generic_Unit : Generic_Unit_Info_Ref) return Dictionary.Symbol is begin if The_Generic_Unit = Null_Generic_Unit_Info_Ref then return Dictionary.NullSymbol; else return The_Generic_Unit.Self; end if; end Get_Generic_Unit_Symbol; -------------------------------------------------------------------------------- function Get_Generic_Unit_Scope (The_Generic_Unit : Generic_Unit_Info_Ref) return Dictionary.Scopes is begin if The_Generic_Unit = Null_Generic_Unit_Info_Ref then return Dictionary.NullScope; else return The_Generic_Unit.Scope; end if; end Get_Generic_Unit_Scope; -------------------------------------------------------------------------------- function Get_Generic_Unit_First_Declaration (The_Generic_Unit : Generic_Unit_Info_Ref) return Declaration_Info_Ref is begin if The_Generic_Unit = Null_Generic_Unit_Info_Ref then return Null_Declaration_Info_Ref; else return The_Generic_Unit.First_Declaration; end if; end Get_Generic_Unit_First_Declaration; -------------------------------------------------------------------------------- function Get_Generic_Unit_Last_Declaration (The_Generic_Unit : Generic_Unit_Info_Ref) return Declaration_Info_Ref is begin if The_Generic_Unit = Null_Generic_Unit_Info_Ref then return Null_Declaration_Info_Ref; else return The_Generic_Unit.Last_Declaration; end if; end Get_Generic_Unit_Last_Declaration; -------------------------------------------------------------------------------- function Get_Generic_Unit_First_Generic_Parameter (The_Generic_Unit : Generic_Unit_Info_Ref) return Generic_Parameter_Info_Ref is begin if The_Generic_Unit = Null_Generic_Unit_Info_Ref then return Null_Generic_Parameter_Info_Ref; else return The_Generic_Unit.First_Generic_Parameter; end if; end Get_Generic_Unit_First_Generic_Parameter; -------------------------------------------------------------------------------- function Get_Generic_Unit_Last_Generic_Parameter (The_Generic_Unit : Generic_Unit_Info_Ref) return Generic_Parameter_Info_Ref is begin if The_Generic_Unit = Null_Generic_Unit_Info_Ref then return Null_Generic_Parameter_Info_Ref; else return The_Generic_Unit.Last_Generic_Parameter; end if; end Get_Generic_Unit_Last_Generic_Parameter; -------------------------------------------------------------------------------- function Get_Generic_Unit_Kind (The_Generic_Unit : Generic_Unit_Info_Ref) return Dictionary.Generic_Kind is begin return The_Generic_Unit.Kind; end Get_Generic_Unit_Kind; -------------------------------------------------------------------------------- function Get_Generic_Unit_Owning_Subprogram (The_Generic_Unit : Generic_Unit_Info_Ref) return Subprogram_Info_Ref is begin if The_Generic_Unit = Null_Generic_Unit_Info_Ref then return Null_Subprogram_Info_Ref; else return The_Generic_Unit.Owning_Subprogram; end if; end Get_Generic_Unit_Owning_Subprogram; -------------------------------------------------------------------------------- function Get_Generic_Unit_Owning_Package (The_Generic_Unit : Generic_Unit_Info_Ref) return Package_Info_Ref is begin if The_Generic_Unit = Null_Generic_Unit_Info_Ref then return Null_Package_Info_Ref; else return The_Generic_Unit.Owning_Package; end if; end Get_Generic_Unit_Owning_Package; -------------------------------------------------------------------------------- -- Generic_Association_Info -------------------------------------------------------------------------------- procedure Create_Generic_Type_Association (Formal_Type : in Type_Info_Ref; Actual_Type : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Generic_Association : out Generic_Association_Info_Ref) is The_Generic_Association_Symbol : Dictionary.Symbol; pragma Unreferenced (The_Generic_Association_Symbol); function Generic_Association_Info_Ref_To_RefType is new Unchecked_Conversion (Generic_Association_Info_Ref, Dictionary.Ref_Type); begin The_Generic_Association := new Generic_Association_Info'(Is_Object => False, Formal_Type => Formal_Type, Actual_Type => Actual_Type, Next => Null_Generic_Association_Info_Ref); AddSymbol (Discriminant => Dictionary.Generic_Association_Symbol, Ref => Generic_Association_Info_Ref_To_RefType (The_Generic_Association), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Generic_Association_Symbol); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Generic_Type_Association"); end Create_Generic_Type_Association; -------------------------------------------------------------------------------- procedure Create_Generic_Object_Association (Formal_Object : in Constant_Info_Ref; Actual_Object : in Constant_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; The_Generic_Association : out Generic_Association_Info_Ref) is The_Generic_Association_Symbol : Dictionary.Symbol; pragma Unreferenced (The_Generic_Association_Symbol); function Generic_Association_Info_Ref_To_RefType is new Unchecked_Conversion (Generic_Association_Info_Ref, Dictionary.Ref_Type); begin The_Generic_Association := new Generic_Association_Info'(Is_Object => True, Formal_Object => Formal_Object, Actual_Object => Actual_Object, Next => Null_Generic_Association_Info_Ref); AddSymbol (Discriminant => Dictionary.Generic_Association_Symbol, Ref => Generic_Association_Info_Ref_To_RefType (The_Generic_Association), Comp_Unit => Comp_Unit, Loc => Loc, Item => The_Generic_Association_Symbol); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => "Dictionary.RawDict.Create_Generic_Object_Association"); end Create_Generic_Object_Association; -------------------------------------------------------------------------------- procedure Set_Next_Generic_Association (The_Generic_Association, Next : in Generic_Association_Info_Ref) is begin The_Generic_Association.Next := Next; end Set_Next_Generic_Association; -------------------------------------------------------------------------------- function Get_Generic_Association_Is_Object (The_Generic_Association : Generic_Association_Info_Ref) return Boolean is begin return The_Generic_Association.Is_Object; end Get_Generic_Association_Is_Object; -------------------------------------------------------------------------------- function Get_Generic_Association_Formal_Type (The_Generic_Association : Generic_Association_Info_Ref) return Type_Info_Ref is begin if The_Generic_Association = Null_Generic_Association_Info_Ref then return Null_Type_Info_Ref; else return The_Generic_Association.Formal_Type; end if; end Get_Generic_Association_Formal_Type; -------------------------------------------------------------------------------- function Get_Generic_Association_Formal_Object (The_Generic_Association : Generic_Association_Info_Ref) return Constant_Info_Ref is begin if The_Generic_Association = Null_Generic_Association_Info_Ref then return Null_Constant_Info_Ref; else return The_Generic_Association.Formal_Object; end if; end Get_Generic_Association_Formal_Object; -------------------------------------------------------------------------------- function Get_Generic_Association_Actual_Type (The_Generic_Association : Generic_Association_Info_Ref) return Type_Info_Ref is begin if The_Generic_Association = Null_Generic_Association_Info_Ref then return Null_Type_Info_Ref; else return The_Generic_Association.Actual_Type; end if; end Get_Generic_Association_Actual_Type; -------------------------------------------------------------------------------- function Get_Generic_Association_Actual_Object (The_Generic_Association : Generic_Association_Info_Ref) return Constant_Info_Ref is begin if The_Generic_Association = Null_Generic_Association_Info_Ref then return Null_Constant_Info_Ref; else return The_Generic_Association.Actual_Object; end if; end Get_Generic_Association_Actual_Object; -------------------------------------------------------------------------------- function Get_Next_Generic_Association (The_Generic_Association : Generic_Association_Info_Ref) return Generic_Association_Info_Ref is begin if The_Generic_Association = Null_Generic_Association_Info_Ref then return Null_Generic_Association_Info_Ref; else return The_Generic_Association.Next; end if; end Get_Next_Generic_Association; -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -------------------------------------------------------------------------------- function GetKnownDiscriminantRef (Item : Dictionary.Symbol) return KnownDiscriminantRef is function RefTypeToKnownDiscriminantRef is new Unchecked_Conversion (Dictionary.Ref_Type, KnownDiscriminantRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.KnownDiscriminantSymbol then DiscriminantDebug ("GetKnownDiscriminantRef", GetSymbolDiscriminant (Item), Dictionary.KnownDiscriminantSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToKnownDiscriminantRef (GetSymbolRef (Item)); end GetKnownDiscriminantRef; -------------------------------------------------------------------------------- function GetSubtypeRef (Item : Dictionary.Symbol) return SubtypeRef is function RefTypeToSubtypeRef is new Unchecked_Conversion (Dictionary.Ref_Type, SubtypeRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.SubtypeSymbol then DiscriminantDebug ("GetSubtypeRef", GetSymbolDiscriminant (Item), Dictionary.SubtypeSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToSubtypeRef (GetSymbolRef (Item)); end GetSubtypeRef; -------------------------------------------------------------------------------- function GetDiscriminantConstraintRef (Item : Dictionary.Symbol) return DiscriminantConstraintRef is function RefTypeToDiscriminantConstraintRef is new Unchecked_Conversion (Dictionary.Ref_Type, DiscriminantConstraintRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.DiscriminantConstraintSymbol then DiscriminantDebug ("GetDiscriminantConstraintRef", GetSymbolDiscriminant (Item), Dictionary.DiscriminantConstraintSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToDiscriminantConstraintRef (GetSymbolRef (Item)); end GetDiscriminantConstraintRef; -------------------------------------------------------------------------------- function GetOwnTaskRef (Item : Dictionary.Symbol) return OwnTaskRef is function RefTypeToOwnTaskRef is new Unchecked_Conversion (Dictionary.Ref_Type, OwnTaskRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.OwnTaskSymbol then DiscriminantDebug ("GetOwnTaskRef", GetSymbolDiscriminant (Item), Dictionary.OwnTaskSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToOwnTaskRef (GetSymbolRef (Item)); end GetOwnTaskRef; -------------------------------------------------------------------------------- function GetImplicitProofFunctionRef (Item : Dictionary.Symbol) return Subprogram_Info_Ref is function Reftype_To_Subprogram_Info_Ref is new Unchecked_Conversion (Dictionary.Ref_Type, Subprogram_Info_Ref); begin if Item = Dictionary.NullSymbol then return Null_Subprogram_Info_Ref; else if GetSymbolDiscriminant (Item) /= Dictionary.ImplicitProofFunctionSymbol then DiscriminantDebug ("GetImplicitProofFunctionRef", GetSymbolDiscriminant (Item), Dictionary.ImplicitProofFunctionSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return Reftype_To_Subprogram_Info_Ref (GetSymbolRef (Item)); end if; end GetImplicitProofFunctionRef; -------------------------------------------------------------------------------- function GetVirtualElementRef (Item : Dictionary.Symbol) return VirtualElementInfoRef is function RefTypeToVirtualElementRef is new Unchecked_Conversion (Dictionary.Ref_Type, VirtualElementInfoRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.VirtualElementSymbol then DiscriminantDebug ("GetVirtualElementRef", GetSymbolDiscriminant (Item), Dictionary.VirtualElementSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToVirtualElementRef (GetSymbolRef (Item)); end GetVirtualElementRef; -------------------------------------------------------------------------------- function GetSuspendsListItemRef (Item : Dictionary.Symbol) return SuspendsListItemInfoRef is function RefTypeToSuspendsListItemRef is new Unchecked_Conversion (Dictionary.Ref_Type, SuspendsListItemInfoRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.SuspendsListItemSymbol then DiscriminantDebug ("GetSuspendsListItemRef", GetSymbolDiscriminant (Item), Dictionary.SuspendsListItemSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToSuspendsListItemRef (GetSymbolRef (Item)); end GetSuspendsListItemRef; -------------------------------------------------------------------------------- function GetInterruptStreamMappingRef (Item : Dictionary.Symbol) return InterruptStreamMappingRef is function RefTypeToInterruptStreamMappingRef is new Unchecked_Conversion (Dictionary.Ref_Type, InterruptStreamMappingRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.InterruptStreamMappingSymbol then DiscriminantDebug ("GetInterruptStreamMappingRef", GetSymbolDiscriminant (Item), Dictionary.InterruptStreamMappingSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToInterruptStreamMappingRef (GetSymbolRef (Item)); end GetInterruptStreamMappingRef; -------------------------------------------------------------------------------- function GetLoopRef (Item : Dictionary.Symbol) return LoopRef is function RefTypeToLoopRef is new Unchecked_Conversion (Dictionary.Ref_Type, LoopRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.LoopSymbol then DiscriminantDebug ("GetLoopRef", GetSymbolDiscriminant (Item), Dictionary.LoopSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToLoopRef (GetSymbolRef (Item)); end GetLoopRef; -------------------------------------------------------------------------------- function GetLoopParameterRef (Item : Dictionary.Symbol) return LoopParameterRef is function RefTypeToLoopParameterRef is new Unchecked_Conversion (Dictionary.Ref_Type, LoopParameterRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.LoopParameterSymbol then DiscriminantDebug ("GetLoopParameterRef", GetSymbolDiscriminant (Item), Dictionary.LoopParameterSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToLoopParameterRef (GetSymbolRef (Item)); end GetLoopParameterRef; -------------------------------------------------------------------------------- function GetLoopEntryVariableRef (Item : Dictionary.Symbol) return LoopEntryVariableRef is function RefTypeToLoopEntryVariableRef is new Unchecked_Conversion (Dictionary.Ref_Type, LoopEntryVariableRef); begin if GetSymbolDiscriminant (Item) /= Dictionary.LoopEntryVariableSymbol then DiscriminantDebug ("GetLoopEntryVariableRef", GetSymbolDiscriminant (Item), Dictionary.LoopEntryVariableSymbol); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => ""); end if; return RefTypeToLoopEntryVariableRef (GetSymbolRef (Item)); end GetLoopEntryVariableRef; -------------------------------------------------------------------------------- procedure CreateProtectedInfo (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; InfoSym : out Dictionary.Symbol) is NewProtected : ProtectedRef; function ProtectedRefToRefType is new Unchecked_Conversion (ProtectedRef, Dictionary.Ref_Type); begin NewProtected := new ProtectedInfo' (Own_Variable => Null_Own_Variable_Info_Ref, ElementsHidden => False, Visible_Part => Part_Info'(With_Clauses => Null_Context_Clause_Info_Ref, Use_Type_Clauses => Null_Use_Type_Clause_Info_Ref, Renaming_Declarations => Null_Declaration_Info_Ref, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref), Local_Part => Part_Info'(With_Clauses => Null_Context_Clause_Info_Ref, Use_Type_Clauses => Null_Use_Type_Clause_Info_Ref, Renaming_Declarations => Null_Declaration_Info_Ref, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref), Private_Part => Part_Info'(With_Clauses => Null_Context_Clause_Info_Ref, Use_Type_Clauses => Null_Use_Type_Clause_Info_Ref, Renaming_Declarations => Null_Declaration_Info_Ref, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref), TheEntry => Dictionary.NullSymbol, FirstDiscriminant => Dictionary.NullSymbol, LastDiscriminant => Dictionary.NullSymbol, Pragmas => PragmaLists'(Dictionary.Priority => Pragmas'(Given => False, Value => LexTokenManager.Null_String), Dictionary.InterruptPriority => Pragmas'(Given => False, Value => LexTokenManager.Null_String), Dictionary.AttachHandler => Pragmas'(Given => False, Value => LexTokenManager.Null_String)), Protected_Body => Null_Declaration_Info_Ref, Has_Proper_Body => False); AddSymbol (Discriminant => Dictionary.ProtectedInfoSymbol, Ref => ProtectedRefToRefType (NewProtected), Comp_Unit => Comp_Unit, Loc => Loc, Item => InfoSym); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateProtectedInfo; -------------------------------------------------------------------------------- procedure CreateTaskInfo (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; InfoSym : out Dictionary.Symbol) is NewTask : TaskRef; function TaskRefToRefType is new Unchecked_Conversion (TaskRef, Dictionary.Ref_Type); begin NewTask := new TaskInfo' (Signature_Is_Wellformed => Booleans'(Dictionary.Abstractions => True), Has_Second_Annotation => False, Has_Derives_Annotation => False, Local_Part => Part_Info'(With_Clauses => Null_Context_Clause_Info_Ref, Use_Type_Clauses => Null_Use_Type_Clause_Info_Ref, Renaming_Declarations => Null_Declaration_Info_Ref, First_Declaration => Null_Declaration_Info_Ref, Last_Declaration => Null_Declaration_Info_Ref), FirstDiscriminant => Dictionary.NullSymbol, LastDiscriminant => Dictionary.NullSymbol, Pragmas => PragmaLists'(Dictionary.Priority => Pragmas'(Given => False, Value => LexTokenManager.Null_String), Dictionary.InterruptPriority => Pragmas'(Given => False, Value => LexTokenManager.Null_String), Dictionary.AttachHandler => Pragmas'(Given => False, Value => LexTokenManager.Null_String)), First_Global_Variable => Global_Variables_T'(Dictionary.Abstractions => Null_Global_Variable_Info_Ref), Last_Global_Variable => Global_Variables_T'(Dictionary.Abstractions => Null_Global_Variable_Info_Ref), Task_Body => Null_Declaration_Info_Ref, Has_Proper_Body => False, Body_Is_Hidden => False, Suspends_List => Dictionary.NullSymbol, First_Loop => Dictionary.NullSymbol, Last_Loop => Dictionary.NullSymbol, Uses_Unprotected_Variables => False, Uses_Unchecked_Conversion => False, Assigns_From_External => False); AddSymbol (Discriminant => Dictionary.TaskInfoSymbol, Ref => TaskRefToRefType (NewTask), Comp_Unit => Comp_Unit, Loc => Loc, Item => InfoSym); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateTaskInfo; procedure CreateOwnTask (Variable : in Variable_Info_Ref; Owner : in Package_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; OwnTask : out Dictionary.Symbol) is function OwnTaskRefToRefType is new Unchecked_Conversion (OwnTaskRef, Dictionary.Ref_Type); NewOwnTask : OwnTaskRef; begin NewOwnTask := new OwnTaskInfo'(Variable => Variable, Owner => Owner, Next => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.OwnTaskSymbol, Ref => OwnTaskRefToRefType (NewOwnTask), Comp_Unit => Comp_Unit, Loc => Loc, Item => OwnTask); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateOwnTask; procedure SetNextOwnTask (Current, Next : in Dictionary.Symbol) is begin GetOwnTaskRef (Current).Next := Next; end SetNextOwnTask; function GetNextOwnTask (Current : Dictionary.Symbol) return Dictionary.Symbol is begin return GetOwnTaskRef (Current).Next; end GetNextOwnTask; function GetOwnTaskVariable (OwnTask : Dictionary.Symbol) return Variable_Info_Ref is begin return GetOwnTaskRef (OwnTask).Variable; end GetOwnTaskVariable; function GetOwnTaskOwner (OwnTask : Dictionary.Symbol) return Package_Info_Ref is begin return GetOwnTaskRef (OwnTask).Owner; end GetOwnTaskOwner; -- subprograms -------------------------------------------------------------- procedure CreateImplicitProofFunction (Ada_Function : in Subprogram_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; ProofFunction : out Dictionary.Symbol) is function Subprogram_Info_Ref_To_RefType is new Unchecked_Conversion (Subprogram_Info_Ref, Dictionary.Ref_Type); begin AddSymbol (Discriminant => Dictionary.ImplicitProofFunctionSymbol, Ref => Subprogram_Info_Ref_To_RefType (Ada_Function), Comp_Unit => Comp_Unit, Loc => Loc, Item => ProofFunction); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateImplicitProofFunction; function GetImplicitProofFunctionAdaFunction (ProofFunction : Dictionary.Symbol) return Subprogram_Info_Ref is begin return GetImplicitProofFunctionRef (ProofFunction); end GetImplicitProofFunctionAdaFunction; -- discriminants ----------------------------------------------------------------------- procedure CreateKnownDiscriminant (Name : in LexTokenManager.Lex_String; Protected_Type : in Type_Info_Ref; Type_Mark : in Type_Info_Ref; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; KnownDiscriminant : out Dictionary.Symbol) is NewKnownDiscriminant : KnownDiscriminantRef; function KnownDiscriminantRefToRefType is new Unchecked_Conversion (KnownDiscriminantRef, Dictionary.Ref_Type); begin NewKnownDiscriminant := new KnownDiscriminantInfo' (Name => Name, Protected_Type => Protected_Type, Type_Mark => Type_Mark, SetsPriority => False, Next => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.KnownDiscriminantSymbol, Ref => KnownDiscriminantRefToRefType (NewKnownDiscriminant), Comp_Unit => Comp_Unit, Loc => Loc, Item => KnownDiscriminant); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateKnownDiscriminant; procedure SetNextDiscriminant (Current, Next : in Dictionary.Symbol) is begin GetKnownDiscriminantRef (Current).Next := Next; end SetNextDiscriminant; procedure SetDiscriminantSetsPriority (Discriminant : in Dictionary.Symbol) is begin GetKnownDiscriminantRef (Discriminant).SetsPriority := True; end SetDiscriminantSetsPriority; function GetNextDiscriminant (Discriminant : Dictionary.Symbol) return Dictionary.Symbol is begin return GetKnownDiscriminantRef (Discriminant).Next; end GetNextDiscriminant; function GetDiscriminantName (Discriminant : Dictionary.Symbol) return LexTokenManager.Lex_String is begin return GetKnownDiscriminantRef (Discriminant).Name; end GetDiscriminantName; function GetDiscriminantTypeMark (Discriminant : Dictionary.Symbol) return Type_Info_Ref is begin return GetKnownDiscriminantRef (Discriminant).Type_Mark; end GetDiscriminantTypeMark; function GetDiscriminantProtectedType (Discriminant : Dictionary.Symbol) return Type_Info_Ref is begin return GetKnownDiscriminantRef (Discriminant).Protected_Type; end GetDiscriminantProtectedType; function GetDiscriminantSetsPriority (Discriminant : Dictionary.Symbol) return Boolean is begin return GetKnownDiscriminantRef (Discriminant).SetsPriority; end GetDiscriminantSetsPriority; -- note the following creates a record for extra info about task and protected subtypes, not subtypes in general procedure CreateSubtype (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheSubtype : out Dictionary.Symbol) is NewSubtype : SubtypeRef; function SubtypeRefToRefType is new Unchecked_Conversion (SubtypeRef, Dictionary.Ref_Type); begin NewSubtype := new SubtypeInfo' (Priority => LexTokenManager.Null_String, FirstConstraint => Dictionary.NullSymbol, LastConstraint => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.SubtypeSymbol, Ref => SubtypeRefToRefType (NewSubtype), Comp_Unit => Comp_Unit, Loc => Loc, Item => TheSubtype); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateSubtype; procedure SetSubtypeInfoPriority (TheSubtype : in Dictionary.Symbol; Priority : in LexTokenManager.Lex_String) is begin GetSubtypeRef (TheSubtype).Priority := Priority; end SetSubtypeInfoPriority; procedure SetSubtypeInfoFirstConstraint (TheSubtype : in Dictionary.Symbol; TheConstraint : in Dictionary.Symbol) is begin GetSubtypeRef (TheSubtype).FirstConstraint := TheConstraint; end SetSubtypeInfoFirstConstraint; procedure SetSubtypeInfoLastConstraint (TheSubtype : in Dictionary.Symbol; TheConstraint : in Dictionary.Symbol) is begin GetSubtypeRef (TheSubtype).LastConstraint := TheConstraint; end SetSubtypeInfoLastConstraint; function GetSubtypeInfoPriority (TheSubtype : Dictionary.Symbol) return LexTokenManager.Lex_String is begin return GetSubtypeRef (TheSubtype).Priority; end GetSubtypeInfoPriority; function GetSubtypeInfoFirstConstraint (TheSubtype : Dictionary.Symbol) return Dictionary.Symbol is begin return GetSubtypeRef (TheSubtype).FirstConstraint; end GetSubtypeInfoFirstConstraint; function GetSubtypeInfoLastConstraint (TheSubtype : Dictionary.Symbol) return Dictionary.Symbol is begin return GetSubtypeRef (TheSubtype).LastConstraint; end GetSubtypeInfoLastConstraint; procedure CreateDiscriminantConstraint (Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheDiscriminantConstraint : out Dictionary.Symbol) is NewDiscriminantConstraint : DiscriminantConstraintRef; function DiscriminantConstraintRefToRefType is new Unchecked_Conversion (DiscriminantConstraintRef, Dictionary.Ref_Type); begin NewDiscriminantConstraint := new DiscriminantConstraintInfo' (StaticValue => LexTokenManager.Null_String, AccessedObject => Dictionary.NullSymbol, Next => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.DiscriminantConstraintSymbol, Ref => DiscriminantConstraintRefToRefType (NewDiscriminantConstraint), Comp_Unit => Comp_Unit, Loc => Loc, Item => TheDiscriminantConstraint); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateDiscriminantConstraint; procedure SetDiscriminantConstraintStaticValue (TheConstraint : in Dictionary.Symbol; TheValue : in LexTokenManager.Lex_String) is begin GetDiscriminantConstraintRef (TheConstraint).StaticValue := TheValue; end SetDiscriminantConstraintStaticValue; procedure SetDiscriminantConstraintAccessedObject (TheConstraint : in Dictionary.Symbol; TheObject : in Dictionary.Symbol) is begin GetDiscriminantConstraintRef (TheConstraint).AccessedObject := TheObject; end SetDiscriminantConstraintAccessedObject; procedure SetNextDiscriminantConstraint (TheConstraint : in Dictionary.Symbol; Next : in Dictionary.Symbol) is begin GetDiscriminantConstraintRef (TheConstraint).Next := Next; end SetNextDiscriminantConstraint; function GetDiscriminantConstraintStaticValue (TheConstraint : Dictionary.Symbol) return LexTokenManager.Lex_String is begin return GetDiscriminantConstraintRef (TheConstraint).StaticValue; end GetDiscriminantConstraintStaticValue; function GetDiscriminantConstraintAccessedObject (TheConstraint : Dictionary.Symbol) return Dictionary.Symbol is begin return GetDiscriminantConstraintRef (TheConstraint).AccessedObject; end GetDiscriminantConstraintAccessedObject; function GetNextDiscriminantConstraint (TheConstraint : Dictionary.Symbol) return Dictionary.Symbol is begin return GetDiscriminantConstraintRef (TheConstraint).Next; end GetNextDiscriminantConstraint; procedure CreateInterruptStreamMapping (TheHandler : in LexTokenManager.Lex_String; TheInterruptStream : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheMapping : out Dictionary.Symbol) is NewMapping : InterruptStreamMappingRef; function InterruptStreamMappingRefToRefType is new Unchecked_Conversion (InterruptStreamMappingRef, Dictionary.Ref_Type); begin NewMapping := new InterruptStreamMapping' (TheHandler => TheHandler, TheInterruptStream => TheInterruptStream, Next => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.InterruptStreamMappingSymbol, Ref => InterruptStreamMappingRefToRefType (NewMapping), Comp_Unit => Comp_Unit, Loc => Loc, Item => TheMapping); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateInterruptStreamMapping; procedure SetNextInterruptStreamMapping (Current, Next : in Dictionary.Symbol) is begin GetInterruptStreamMappingRef (Current).Next := Next; end SetNextInterruptStreamMapping; function GetNextInterruptStreamMapping (TheMapping : Dictionary.Symbol) return Dictionary.Symbol is begin return GetInterruptStreamMappingRef (TheMapping).Next; end GetNextInterruptStreamMapping; function GetInterruptStreamMappingHandler (TheMapping : in Symbol) return LexTokenManager.Lex_String is begin return GetInterruptStreamMappingRef (TheMapping).TheHandler; end GetInterruptStreamMappingHandler; function GetInterruptStreamMappingStream (TheMapping : in Symbol) return LexTokenManager.Lex_String is begin return GetInterruptStreamMappingRef (TheMapping).TheInterruptStream; end GetInterruptStreamMappingStream; procedure CreateSuspendsListItem (ThePOorSO : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; SuspendsListItem : out Dictionary.Symbol) is NewSuspendsListItem : SuspendsListItemInfoRef; function SuspendsListItemRefToRefType is new Unchecked_Conversion (SuspendsListItemInfoRef, Dictionary.Ref_Type); begin NewSuspendsListItem := new SuspendsListItemInfo'(ThePOorSO => ThePOorSO, IsAccountedFor => False, Next => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.SuspendsListItemSymbol, Ref => SuspendsListItemRefToRefType (NewSuspendsListItem), Comp_Unit => Comp_Unit, Loc => Loc, Item => SuspendsListItem); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateSuspendsListItem; procedure SetNextSuspendsListItem (Current, Next : in Dictionary.Symbol) is begin GetSuspendsListItemRef (Current).Next := Next; end SetNextSuspendsListItem; function GetSuspendsListItem (SuspendsListItem : Dictionary.Symbol) return Dictionary.Symbol is begin return GetSuspendsListItemRef (SuspendsListItem).ThePOorSO; end GetSuspendsListItem; procedure SetSuspendsListItemIsAccountedFor (SuspendsListItem : in Dictionary.Symbol) is begin GetSuspendsListItemRef (SuspendsListItem).IsAccountedFor := True; end SetSuspendsListItemIsAccountedFor; function GetSuspendsListItemIsAccountedFor (SuspendsListItem : Dictionary.Symbol) return Boolean is begin return GetSuspendsListItemRef (SuspendsListItem).IsAccountedFor; end GetSuspendsListItemIsAccountedFor; function GetNextSuspendsListItem (SuspendsListItem : Dictionary.Symbol) return Dictionary.Symbol is begin return GetSuspendsListItemRef (SuspendsListItem).Next; end GetNextSuspendsListItem; procedure CreateVirtualElement (The_Variable : in RawDict.Variable_Info_Ref; TheOwner : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheVirtualElement : out Dictionary.Symbol) is NewVirtualElement : VirtualElementInfoRef; function VirtualElementRefToRefType is new Unchecked_Conversion (VirtualElementInfoRef, Dictionary.Ref_Type); begin NewVirtualElement := new VirtualElementInfo' (The_Variable => The_Variable, TheOwner => TheOwner, SeenByOwner => False, Next => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.VirtualElementSymbol, Ref => VirtualElementRefToRefType (NewVirtualElement), Comp_Unit => Comp_Unit, Loc => Loc, Item => TheVirtualElement); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateVirtualElement; function GetVirtualElementVariable (VirtualElement : Dictionary.Symbol) return RawDict.Variable_Info_Ref is begin return GetVirtualElementRef (VirtualElement).The_Variable; end GetVirtualElementVariable; function GetVirtualElementOwner (VirtualElement : Dictionary.Symbol) return Dictionary.Symbol is begin return GetVirtualElementRef (VirtualElement).TheOwner; end GetVirtualElementOwner; procedure SetVirtualElementSeenByOwner (VirtualElement : in Dictionary.Symbol) is begin GetVirtualElementRef (VirtualElement).SeenByOwner := True; end SetVirtualElementSeenByOwner; function GetVirtualElementSeenByOwner (VirtualElement : Dictionary.Symbol) return Boolean is begin return GetVirtualElementRef (VirtualElement).SeenByOwner; end GetVirtualElementSeenByOwner; procedure SetNextVirtualElement (Current, Next : in Dictionary.Symbol) is begin GetVirtualElementRef (Current).Next := Next; end SetNextVirtualElement; function GetNextVirtualElement (VirtualElement : Dictionary.Symbol) return Dictionary.Symbol is begin return GetVirtualElementRef (VirtualElement).Next; end GetNextVirtualElement; procedure CreateLoop (Region : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; TheLoop : out Dictionary.Symbol) is NewLoop : LoopRef; function LoopRefToRefType is new Unchecked_Conversion (LoopRef, Dictionary.Ref_Type); begin NewLoop := new LoopInfo' (Name => LexTokenManager.Null_String, Region => Region, LoopParameter => Dictionary.NullSymbol, OnEntryVars => Dictionary.NullSymbol, ExitExpn => 0, EntryExpn => 0, HasExits => False, Next => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.LoopSymbol, Ref => LoopRefToRefType (NewLoop), Comp_Unit => Comp_Unit, Loc => Loc, Item => TheLoop); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateLoop; procedure SetLoopName (Name : in LexTokenManager.Lex_String; TheLoop : in Dictionary.Symbol) is begin GetLoopRef (TheLoop).Name := Name; end SetLoopName; procedure SetLoopParameter (ForLoop, LoopParameter : in Dictionary.Symbol) is begin GetLoopRef (ForLoop).LoopParameter := LoopParameter; end SetLoopParameter; procedure SetLoopOnEntryVars (ForLoop, OnEntryVars : in Dictionary.Symbol) is begin GetLoopRef (ForLoop).OnEntryVars := OnEntryVars; end SetLoopOnEntryVars; procedure SetLoopExitExpn (ForLoop : in Dictionary.Symbol; Expn : in Natural) is begin GetLoopRef (ForLoop).ExitExpn := Expn; end SetLoopExitExpn; procedure SetLoopEntryExpn (ForLoop : in Dictionary.Symbol; Expn : in Natural) is begin GetLoopRef (ForLoop).EntryExpn := Expn; end SetLoopEntryExpn; procedure SetLoopHasExits (TheLoop : in Dictionary.Symbol) is begin GetLoopRef (TheLoop).HasExits := True; end SetLoopHasExits; procedure SetNextLoop (Current, Next : in Dictionary.Symbol) is begin GetLoopRef (Current).Next := Next; end SetNextLoop; function GetLoopName (TheLoop : Dictionary.Symbol) return LexTokenManager.Lex_String is begin return GetLoopRef (TheLoop).Name; end GetLoopName; function GetLoopRegion (TheLoop : Dictionary.Symbol) return Dictionary.Symbol is begin return GetLoopRef (TheLoop).Region; end GetLoopRegion; function GetLoopParameter (TheLoop : Dictionary.Symbol) return Dictionary.Symbol is begin return GetLoopRef (TheLoop).LoopParameter; end GetLoopParameter; function GetLoopOnEntryVars (TheLoop : Dictionary.Symbol) return Dictionary.Symbol is begin return GetLoopRef (TheLoop).OnEntryVars; end GetLoopOnEntryVars; function GetLoopExitExpn (TheLoop : Dictionary.Symbol) return Natural is begin return GetLoopRef (TheLoop).ExitExpn; end GetLoopExitExpn; function GetLoopEntryExpn (TheLoop : Dictionary.Symbol) return Natural is begin return GetLoopRef (TheLoop).EntryExpn; end GetLoopEntryExpn; function GetLoopHasExits (TheLoop : Dictionary.Symbol) return Boolean is begin return GetLoopRef (TheLoop).HasExits; end GetLoopHasExits; function GetNextLoop (TheLoop : Dictionary.Symbol) return Dictionary.Symbol is begin return GetLoopRef (TheLoop).Next; end GetNextLoop; procedure CreateLoopParameter (Name : in LexTokenManager.Lex_String; Type_Mark : in Type_Info_Ref; TheLoop : in Dictionary.Symbol; HasStaticRange : in Boolean; IsReverse : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; LoopParameter : out Dictionary.Symbol) is NewParameter : LoopParameterRef; function LoopParameterRefToRefType is new Unchecked_Conversion (LoopParameterRef, Dictionary.Ref_Type); begin NewParameter := new LoopParameterInfo' (Name => Name, Type_Mark => Type_Mark, TheLoop => TheLoop, HasStaticRange => HasStaticRange, IsReverse => IsReverse); AddSymbol (Discriminant => Dictionary.LoopParameterSymbol, Ref => LoopParameterRefToRefType (NewParameter), Comp_Unit => Comp_Unit, Loc => Loc, Item => LoopParameter); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateLoopParameter; procedure CreateLoopEntryVariable (OriginalVar : in Dictionary.Symbol; TheLoop : in Dictionary.Symbol; Comp_Unit : in ContextManager.UnitDescriptors; Loc : in LexTokenManager.Token_Position; LoopEntryVariable : out Dictionary.Symbol) is NewBound : LoopEntryVariableRef; function LoopEntryVariableRefToRefType is new Unchecked_Conversion (LoopEntryVariableRef, Dictionary.Ref_Type); begin NewBound := new LoopEntryVariableInfo'(OriginalVar => OriginalVar, TheLoop => TheLoop, Next => Dictionary.NullSymbol); AddSymbol (Discriminant => Dictionary.LoopEntryVariableSymbol, Ref => LoopEntryVariableRefToRefType (NewBound), Comp_Unit => Comp_Unit, Loc => Loc, Item => LoopEntryVariable); exception when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Symbol_Table_Overflow_Dynamic, Msg => ""); end CreateLoopEntryVariable; function GetLoopParameterName (LoopParameter : Dictionary.Symbol) return LexTokenManager.Lex_String is begin return GetLoopParameterRef (LoopParameter).Name; end GetLoopParameterName; function GetLoopParameterType (LoopParameter : Dictionary.Symbol) return Type_Info_Ref is begin return GetLoopParameterRef (LoopParameter).Type_Mark; end GetLoopParameterType; function GetLoopParameterLoop (LoopParameter : Dictionary.Symbol) return Dictionary.Symbol is begin return GetLoopParameterRef (LoopParameter).TheLoop; end GetLoopParameterLoop; function GetLoopParameterHasStaticRange (LoopParameter : Dictionary.Symbol) return Boolean is begin return GetLoopParameterRef (LoopParameter).HasStaticRange; end GetLoopParameterHasStaticRange; function GetLoopParameterIsReverse (LoopParameter : Dictionary.Symbol) return Boolean is begin return GetLoopParameterRef (LoopParameter).IsReverse; end GetLoopParameterIsReverse; procedure SetLoopEntryVariableNext (LoopEntryVariable : in Dictionary.Symbol; Next : in Dictionary.Symbol) is begin GetLoopEntryVariableRef (LoopEntryVariable).Next := Next; end SetLoopEntryVariableNext; function GetLoopEntryVariableOriginalVar (LoopEntryVariable : Dictionary.Symbol) return Dictionary.Symbol is begin return GetLoopEntryVariableRef (LoopEntryVariable).OriginalVar; end GetLoopEntryVariableOriginalVar; function GetLoopEntryVariableTheLoop (LoopEntryVariable : Dictionary.Symbol) return Dictionary.Symbol is begin return GetLoopEntryVariableRef (LoopEntryVariable).TheLoop; end GetLoopEntryVariableTheLoop; function GetLoopEntryVariableNext (LoopEntryVariable : Dictionary.Symbol) return Dictionary.Symbol is begin return GetLoopEntryVariableRef (LoopEntryVariable).Next; end GetLoopEntryVariableNext; end RawDict; -- hidden because of non-SPARK code spark-2012.0.deb/examiner/stree-findlastitemindependencyrelation.adb0000644000175000017500000003262311753202336024656 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (STree) function FindLastItemInDependencyRelation (Node : SyntaxNode) return LexTokenManager.Token_Position is The_Next_Node : SyntaxNode; -------------------------------------------------------------------------------- function Find_Last_Import_Or_Export (Node : SyntaxNode) return SyntaxNode --# global in Table; --# pre Syntax_Node_Type (Node, Table) = SP_Symbols.dependency_clause; --# return Return_Node => (Syntax_Node_Type (Return_Node, Table) = SP_Symbols.entire_variable --# or Syntax_Node_Type (Return_Node, Table) = SP_Symbols.multiply); is The_Next_Node : SyntaxNode; begin --first see if there are any imports at all The_Next_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node))); -- ASSUME The_Next_Node = multiply OR dependency_clause_optrep OR NULL if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.multiply or else Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_clause_optrep then -- ASSUME The_Next_Node = multiply OR dependency_clause_optrep -- there are some imports -- first find dependency_clause_optrep if there is one. -- we must be pointing at one or at a star if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.multiply then -- ASSUME The_Next_Node = multiply if Syntax_Node_Type (Node => Next_Sibling (Current_Node => The_Next_Node)) = SP_Symbols.dependency_clause_optrep then The_Next_Node := Next_Sibling (Current_Node => The_Next_Node); end if; end if; if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_clause_optrep then -- ASSUME The_Next_Node = dependency_clause_optrep -- we need to find rightmost entire variable The_Next_Node := Child_Node (Current_Node => The_Next_Node); -- ASSUME The_Next_Node = dependency_clause_optrep OR entire_variable if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_clause_optrep then -- ASSUME The_Next_Node = dependency_clause_optrep The_Next_Node := Next_Sibling (Current_Node => The_Next_Node); -- ASSUME The_Next_Node = entire_variable SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.entire_variable, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = entire_variable in Find_Last_Import_Or_Export"); elsif Syntax_Node_Type (Node => The_Next_Node) /= SP_Symbols.entire_variable then The_Next_Node := NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = dependency_clause_optrep OR entire_variable " & "in Find_Last_Import_Or_Export"); end if; end if; elsif The_Next_Node = NullNode then -- ASSUME The_Next_Node = NULL -- there are no imports so look at the exports The_Next_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME The_Next_Node = dependency_clause_optrep OR entire_variable if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_clause_optrep then -- ASSUME The_Next_Node = dependency_clause_optrep The_Next_Node := Next_Sibling (Current_Node => The_Next_Node); -- ASSUME The_Next_Node = entire_variable SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.entire_variable, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = entire_variable in Find_Last_Import_Or_Export"); elsif Syntax_Node_Type (Node => The_Next_Node) /= SP_Symbols.entire_variable then The_Next_Node := NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = dependency_clause_optrep OR entire_variable in Find_Last_Import_Or_Export"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = multiply OR dependency_clause_optrep OR NULL in Find_Last_Import_Or_Export"); end if; return The_Next_Node; end Find_Last_Import_Or_Export; -------------------------------------------------------------------------------- function Find_Last_Import_Of_Null_Import_List (Node : SyntaxNode) return SyntaxNode --# global in Table; --# pre Syntax_Node_Type (Node, Table) = SP_Symbols.null_import_list; --# return Return_Node => Syntax_Node_Type (Return_Node, Table) = SP_Symbols.entire_variable; is The_Next_Node : SyntaxNode; begin The_Next_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node))); -- ASSUME The_Next_Node = dependency_clause_optrep OR entire_variable if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_clause_optrep then -- ASSUME The_Next_Node = dependency_clause_optrep The_Next_Node := Next_Sibling (Current_Node => The_Next_Node); -- ASSUME The_Next_Node = entire_variable SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.entire_variable, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = entire_variable in Find_Last_Import_Of_Null_Import_List"); elsif Syntax_Node_Type (Node => The_Next_Node) /= SP_Symbols.entire_variable then The_Next_Node := NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = dependency_clause_optrep OR entire_variable " & "in Find_Last_Import_Of_Null_Import_List"); end if; return The_Next_Node; end Find_Last_Import_Of_Null_Import_List; begin -- FindLastItemInDependencyRelation -- Position to report error -- First see if there is a null_import_list. If there is, we know it is the last clause The_Next_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME The_Next_Node = dependency_relation_rep OR null_import_list OR NULL if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_relation_rep then -- ASSUME The_Next_Node = dependency_relation_rep -- we don't have a singleton null_import_list but there might still be one to our right; thus: The_Next_Node := Next_Sibling (Current_Node => The_Next_Node); -- ASSUME The_Next_Node = ampersand OR NULL if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.ampersand then -- ASSUME The_Next_Node = ampersand The_Next_Node := Next_Sibling (Current_Node => The_Next_Node); -- ASSUME The_Next_Node = null_import_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.null_import_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = null_import_list in FindLastItemInDependencyRelation"); elsif The_Next_Node /= NullNode then The_Next_Node := NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = ampersand OR NULL in FindLastItemInDependencyRelation"); end if; elsif The_Next_Node /= NullNode and then Syntax_Node_Type (Node => The_Next_Node) /= SP_Symbols.null_import_list then The_Next_Node := NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = dependency_relation_rep OR null_import_list OR NULL " & "in FindLastItemInDependencyRelation"); end if; --# check The_Next_Node = NullNode or Syntax_Node_Type (The_Next_Node, Table) = SP_Symbols.null_import_list; if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.null_import_list then -- ASSUME The_Next_Node = null_import_list -- the place to report the error is the last import of the null_import_list The_Next_Node := Find_Last_Import_Of_Null_Import_List (Node => The_Next_Node); elsif The_Next_Node = NullNode then -- ASSUME The_Next_Node = NULL -- next choice is last of several derives imports if there is one The_Next_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME The_Next_Node = dependency_relation_rep OR NULL if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_relation_rep then -- ASSUME The_Next_Node = dependency_relation_rep The_Next_Node := Child_Node (Current_Node => The_Next_Node); -- ASSUME The_Next_Node = dependency_relation_rep OR dependency_clause if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_relation_rep then -- ASSUME The_Next_Node = dependency_relation_rep The_Next_Node := Next_Sibling (Current_Node => Next_Sibling (Current_Node => The_Next_Node)); -- ASSUME The_Next_Node = dependency_clause SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = dependency_clause in FindLastItemInDependencyRelation"); The_Next_Node := Find_Last_Import_Or_Export (Node => The_Next_Node); elsif Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.dependency_clause then -- ASSUME The_Next_Node = dependency_clause The_Next_Node := Find_Last_Import_Or_Export (Node => The_Next_Node); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = dependency_relation_rep OR dependency_clause " & "in FindLastItemInDependencyRelation"); end if; elsif The_Next_Node = NullNode then -- ASSUME The_Next_Node = NULL -- there is no derives so start trawling the globals -- first move to mode_global node, if this fails then there 3rd and 4th choice fail The_Next_Node := Child_Node (Current_Node => Child_Node (Current_Node => Parent_Node (Current_Node => Node))); if Syntax_Node_Type (Node => The_Next_Node) = SP_Symbols.global_definition_rep then --3rd or 4th choice are possibilities --The_Next_Node is the topmost global_definition_rep; this is the new --start point for the search for a suitable reporting place. --find rightmost global_variable_clause The_Next_Node := Child_Node (Current_Node => The_Next_Node); if Syntax_Node_Type (Node => Next_Sibling (Current_Node => The_Next_Node)) = SP_Symbols.global_variable_clause then The_Next_Node := Next_Sibling (Current_Node => The_Next_Node); end if; --The_Next_Node is now rightmost global_variable_clause --now find right most gloabl variable in that clause --first advance to global_variable_list The_Next_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => The_Next_Node))); -- now find rightmost global variable if Syntax_Node_Type (Node => Next_Sibling (Current_Node => The_Next_Node)) = SP_Symbols.global_variable then The_Next_Node := Next_Sibling (Current_Node => The_Next_Node); end if; else --only place we can point is the subprogram declaration The_Next_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Node)); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = dependency_relation_rep OR NULL in FindLastItemInDependencyRelation"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Next_Node = null_import_list OR NULL in FindLastItemInDependencyRelation"); end if; return Node_Position (Node => The_Next_Node); end FindLastItemInDependencyRelation; spark-2012.0.deb/examiner/flowanalyser.adb0000644000175000017500000001470711753202336017440 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with ErrorHandler; with ExaminerConstants; with RelationAlgebra; with SeqAlgebra; with SP_Symbols; with SystemErrors; use type CommandLineData.Flow_Analysis_Options; use type SP_Symbols.SP_Symbol; package body FlowAnalyser is ------------------------------------------------------------------------ -- IFA_Stack implements a Stack ADT holding the state of the -- information flow analyser as it traverses the syntax tree representing -- the body of a program unit (procedure, function or task body). -- -- It is used in FlowAnalyser.FlowAnalyse where the tree-traveral -- algorithm is implemented. ------------------------------------------------------------------------ --# inherit ExaminerConstants, --# Heap, --# RelationAlgebra, --# SeqAlgebra, --# Statistics, --# SystemErrors; package IFA_Stack is type MemberType is ( Action, -- Null, Assignment, or Procedure Call statement IfNode, ElsifNode, CaseNode, LoopHead, ExitNode, ExitBranch, DefaultExitNode); type StackMember is record MemberKind : MemberType; DefinedVars, UnPreservedVars, AllVars, SeqOfExpns : SeqAlgebra.Seq; Lambda, Mu, Rho, Theta, ThetaTilde, RhoProd : RelationAlgebra.Relation; end record; type Stack is private; -- constant used only in fatal error cases so that we can eliminate DF errors -- on paths that call SystemErrors.FatalError NullMember : constant StackMember := StackMember' (Action, SeqAlgebra.Null_Seq, SeqAlgebra.Null_Seq, SeqAlgebra.Null_Seq, SeqAlgebra.Null_Seq, RelationAlgebra.Null_Relation, RelationAlgebra.Null_Relation, RelationAlgebra.Null_Relation, RelationAlgebra.Null_Relation, RelationAlgebra.Null_Relation, RelationAlgebra.Null_Relation); ---------------------------------------- -- Basic accessor functions for a Stack ---------------------------------------- function IsEmpty (S : Stack) return Boolean; function Top (S : Stack) return StackMember; ---------------------------------------- -- Constructor, Push and Pop ---------------------------------------- procedure ClearStack (S : out Stack); --# derives S from ; procedure Push (S : in out Stack; M : in StackMember); --# derives S from *, --# M; procedure Pop (S : in out Stack; M : out StackMember); --# derives M, --# S from S; ---------------------------------------- -- Constructor for a single StackMember ---------------------------------------- procedure EstablishMember (TheHeap : in out Heap.HeapRecord; Kind : in MemberType; M : out StackMember); --# global in out Statistics.TableUsage; --# derives M from Kind, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# TheHeap; --# post M.MemberKind = Kind; -- and other fields denote new, empty sequences and relations --------------------------------------------- -- DisposeOfMember does a "Dispose" operation -- on all sequences and relations in M, -- using the operations supplied by -- SeqAlgebra and RelationAlgebra --------------------------------------------- procedure DisposeOfMember (TheHeap : in out Heap.HeapRecord; M : in StackMember); --# derives TheHeap from *, --# M; private -- Implementation is bounded to -- ExaminerConstants.StackManagerStackSize elements on the stack subtype PointerRange is Integer range 0 .. ExaminerConstants.StackManagerStackSize; subtype IndexRange is PointerRange range 1 .. PointerRange'Last; type Vector is array (IndexRange) of StackMember; type Stack is record StackVector : Vector; StackPointer : PointerRange; end record; end IFA_Stack; -- This function renaming cannot be replaced by a use type clause at present -- because it refers to the embedded package above and applies to the -- subprogram below so it has to appear between the two. SPARK does not -- currently permit a use type clause in this location. function "=" (Left, Right : in IFA_Stack.MemberType) return Boolean renames IFA_Stack."="; package body IFA_Stack is separate; procedure FlowAnalyse (SubprogSym : in Dictionary.Symbol; StartNode : in STree.SyntaxNode; EndPosition : in LexTokenManager.Token_Position; ComponentData : in out ComponentManager.ComponentData; TheHeap : in out Heap.HeapRecord; Table : in RefList.HashTable; DataFlowErrorFound : out Boolean) is separate; procedure FlowAnalysePartition (Node : in STree.SyntaxNode; TheHeap : in out Heap.HeapRecord) is separate; end FlowAnalyser; spark-2012.0.deb/examiner/lextokenmanager.adb0000644000175000017500000013267011753202336020116 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Handling; with Ada.Characters.Latin_1; with Statistics; with SystemErrors; package body LexTokenManager --# own State is String_Table, --# The_Last_Token; is type Table_Contents is array (Lex_String) of Character; subtype Hash_Index is Natural range 0 .. ExaminerConstants.String_Hash_Size - 1; type Hash_Table_Struct is array (Hash_Index) of Lex_String; type Table_Structure is record Next_Vacant : Lex_String; Hash_Table : Hash_Table_Struct; Contents : Table_Contents; end record; -- The string table uses hashing with chaining, ie if there is a clash then -- the next item is rehashed to a new location and a link is provided to it. -- This link occupies the first 3 bytes in each entry (because the max string -- table size is 2**21). Str_Link_Len : constant Lex_String := 3; String_Table : Table_Structure; The_Last_Token : Lex_String; function Lex_String_Compare (Lex_Str1 : Lex_String; Lex_Str2 : Lex_String; Case_Sensitive : Boolean) return Str_Comp_Result --# global in String_Table; is Result : Str_Comp_Result := Str_Eq; Finished : Boolean := False; Index1 : Lex_String; Index2 : Lex_String; function Character_Pos (Index : Lex_String; Case_Sensitive : Boolean) return Natural --# global in String_Table; is Result : Natural; begin if Case_Sensitive then Result := Character'Pos (String_Table.Contents (Index)); else Result := Character'Pos (Ada.Characters.Handling.To_Upper (String_Table.Contents (Index))); end if; return Result; end Character_Pos; begin if Lex_Str1 = Lex_Str2 then Result := Str_Eq; elsif Lex_Str1 /= Null_String and then Lex_Str2 /= Null_String then -- This check should never fail, but would need precondition and proof to show this. if (Lex_Str1 <= Lex_String'Last - Str_Link_Len and then Lex_Str2 <= Lex_String'Last - Str_Link_Len) then Index1 := Lex_Str1 + Str_Link_Len; Index2 := Lex_Str2 + Str_Link_Len; else Index1 := Null_String; Index2 := Null_String; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.String_Table_Overflow, Msg => "Invalid input string."); end if; while not Finished loop -- If we've reached the end of both strings then they are equal if (String_Table.Contents (Index1) = Ada.Characters.Latin_1.NUL and then String_Table.Contents (Index2) = Ada.Characters.Latin_1.NUL) then Result := Str_Eq; Finished := True; -- If the current character in the first string is closer to the start of the -- alphabet then the first string is alphabetically first. Note that Ada.Characters.Latin_1.NUL -- comes before 'A' so this will return StrFirst if we've reached the end of the -- first string but not the second. elsif (Character_Pos (Index => Index1, Case_Sensitive => Case_Sensitive) < Character_Pos (Index => Index2, Case_Sensitive => Case_Sensitive)) then Result := Str_First; Finished := True; -- Reverse of the previous comparison... elsif (Character_Pos (Index => Index1, Case_Sensitive => Case_Sensitive) > Character_Pos (Index => Index2, Case_Sensitive => Case_Sensitive)) then Result := Str_Second; Finished := True; else -- If we have reached this point and one of the strings has hit the top of the -- table without its final char being null then something has gone wrong, but -- guard avoids possible RTE. if (Index1 < Lex_String'Last and Index2 < Lex_String'Last) then Index1 := Index1 + 1; Index2 := Index2 + 1; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.String_Table_Overflow, Msg => "Attempt to index past end of table."); end if; end if; end loop; -- Deal with the cases where at least one string is null elsif Lex_Str1 = Null_String then Result := Str_First; else Result := Str_Second; end if; return Result; end Lex_String_Compare; -- Performs case insensitive comparison of two Lex_Strings and -- returns which string comes first when ordered alphabetically -- (See also Comp_Str_Case_Insensitive and -- Comp_Str_Case_Sensitive, below) function Lex_String_Case_Insensitive_Compare (Lex_Str1 : Lex_String; Lex_Str2 : Lex_String) return Str_Comp_Result --# global in String_Table; is begin return Lex_String_Compare (Lex_Str1 => Lex_Str1, Lex_Str2 => Lex_Str2, Case_Sensitive => False); end Lex_String_Case_Insensitive_Compare; -- Performs case sensitive comparison of two Lex_Strings and -- returns which string comes first when ordered alphabetically -- (See also Comp_Str_Case_Insensitive and -- Comp_Str_Case_Sensitive, below) function Lex_String_Case_Sensitive_Compare (Lex_Str1 : Lex_String; Lex_Str2 : Lex_String) return Str_Comp_Result --# global in String_Table; is begin return Lex_String_Compare (Lex_Str1 => Lex_Str1, Lex_Str2 => Lex_Str2, Case_Sensitive => True); end Lex_String_Case_Sensitive_Compare; function Comp_Str_Case_Insensitive (Str : E_Strings.T; Lex_Str : Lex_String) return Boolean --# global in String_Table; is LX : Natural; SX : Lex_String; Str_Equal : Boolean; begin if Lex_Str = Null_String and then E_Strings.Is_Empty (E_Str => Str) then Str_Equal := True; elsif Lex_Str = Null_String and then not E_Strings.Is_Empty (E_Str => Str) then Str_Equal := False; else SX := Lex_Str + Str_Link_Len; Str_Equal := True; LX := 1; while Str_Equal and then LX <= E_Strings.Get_Length (E_Str => Str) loop if Ada.Characters.Handling.To_Upper (E_Strings.Get_Element (E_Str => Str, Pos => LX)) = Ada.Characters.Handling.To_Upper (String_Table.Contents (SX)) then LX := LX + 1; SX := SX + 1; else Str_Equal := False; end if; end loop; if String_Table.Contents (SX) /= Ada.Characters.Latin_1.NUL then Str_Equal := False; end if; end if; return Str_Equal; end Comp_Str_Case_Insensitive; function Comp_Str_Case_Sensitive (Str : E_Strings.T; Lex_Str : Lex_String) return Boolean --# global in String_Table; is LX : Natural; SX : Lex_String; Str_Equal : Boolean; begin if Lex_Str = Null_String and then E_Strings.Is_Empty (E_Str => Str) then Str_Equal := True; elsif Lex_Str = Null_String and then not E_Strings.Is_Empty (E_Str => Str) then Str_Equal := False; else SX := Lex_Str + Str_Link_Len; Str_Equal := True; LX := 1; while Str_Equal and then LX <= E_Strings.Get_Length (E_Str => Str) loop if E_Strings.Get_Element (E_Str => Str, Pos => LX) = String_Table.Contents (SX) then LX := LX + 1; SX := SX + 1; else Str_Equal := False; end if; end loop; if String_Table.Contents (SX) /= Ada.Characters.Latin_1.NUL then Str_Equal := False; end if; end if; return Str_Equal; end Comp_Str_Case_Sensitive; procedure Insert_Examiner_String (Str : in E_Strings.T; Lex_Str : out Lex_String) --# global in out String_Table; --# derives Lex_Str, --# String_Table from Str, --# String_Table; is Hash_Val : Hash_Index; Loc_Str, Token_Str, Str_Link : Lex_String; Searching : Boolean; function Hash (Str : E_Strings.T) return Hash_Index is Val : Hash_Index; begin Val := 0; for Ix in E_Strings.Positions range 1 .. E_Strings.Get_Length (E_Str => Str) loop Val := (Val + Hash_Index (Character'Pos (Ada.Characters.Handling.To_Upper (E_Strings.Get_Element (E_Str => Str, Pos => Ix))))) mod ExaminerConstants.String_Hash_Size; end loop; return Val; end Hash; procedure Set_Str_Link (Lex_Str, Str_Link : in Lex_String) --# global in out String_Table; --# derives String_Table from *, --# Lex_Str, --# Str_Link; is begin String_Table.Contents (Lex_Str) := Character'Val (Str_Link / 2 ** 14); String_Table.Contents (Lex_Str + 1) := Character'Val ((Str_Link / 2 ** 7) mod 2 ** 7); String_Table.Contents (Lex_Str + 2) := Character'Val (Str_Link mod 2 ** 7); end Set_Str_Link; function Get_Str_Link (Lex_Str : in Lex_String) return Lex_String --# global in String_Table; is Link : Lex_String; begin Link := Character'Pos (String_Table.Contents (Lex_Str)) * 2 ** 14; Link := Link + Character'Pos (String_Table.Contents (Lex_Str + 1)) * 2 ** 7; Link := Link + Character'Pos (String_Table.Contents (Lex_Str + 2)); return Link; end Get_Str_Link; procedure Copy_Str (Str : in E_Strings.T; Lex_Str : out Lex_String) --# global in out String_Table; --# derives Lex_Str, --# String_Table from Str, --# String_Table; is SX : Lex_String; begin if String_Table.Next_Vacant = Null_String or else ((String_Table.Next_Vacant + Str_Link_Len) + Lex_String (E_Strings.Get_Length (E_Str => Str))) + 1 > Lex_String'Last then Lex_Str := Null_String; else -- There is space for link, string and string terminator SX := String_Table.Next_Vacant; Lex_Str := SX; Set_Str_Link (Lex_Str => SX, Str_Link => Null_String); SX := SX + Str_Link_Len; -- Skip StringLink. for LX in E_Strings.Positions range 1 .. E_Strings.Get_Length (E_Str => Str) loop String_Table.Contents (SX) := E_Strings.Get_Element (E_Str => Str, Pos => LX); SX := SX + 1; end loop; String_Table.Contents (SX) := Ada.Characters.Latin_1.NUL; -- Lex_String Terminator. if SX = Lex_String'Last then String_Table.Next_Vacant := Null_String; else String_Table.Next_Vacant := SX + 1; end if; end if; end Copy_Str; begin Token_Str := Null_String; Hash_Val := Hash (Str => Str); Loc_Str := String_Table.Hash_Table (Hash_Val); if Loc_Str = Null_String then -- No string elements at this entry. Copy_Str (Str => Str, Lex_Str => Token_Str); String_Table.Hash_Table (Hash_Val) := Token_Str; -- String element now exists at this Hash_Table entry. else Searching := True; while Searching loop if Comp_Str_Case_Sensitive (Str => Str, Lex_Str => Loc_Str) then Token_Str := Loc_Str; Searching := False; else Str_Link := Get_Str_Link (Lex_Str => Loc_Str); if Str_Link = Null_String then Copy_Str (Str => Str, Lex_Str => Token_Str); Set_Str_Link (Lex_Str => Loc_Str, Str_Link => Token_Str); Searching := False; else Loc_Str := Str_Link; end if; end if; end loop; end if; if Token_Str = Null_String then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.String_Table_Overflow, Msg => ""); end if; Lex_Str := Token_Str; end Insert_Examiner_String; function Lex_String_To_String (Lex_Str : Lex_String) return E_Strings.T --# global in String_Table; is IX : Lex_String; CH : Character; Str : E_Strings.T := E_Strings.Empty_String; begin if Lex_Str /= Null_String then IX := Lex_Str + Str_Link_Len; loop CH := String_Table.Contents (IX); exit when CH = Ada.Characters.Latin_1.NUL; -- Lex_String Terminator. E_Strings.Append_Char (E_Str => Str, Ch => CH); IX := IX + 1; end loop; end if; return Str; end Lex_String_To_String; function Is_Attribute_Token (Tok : Lex_String; Language : CommandLineData.Language_Profiles) return Boolean --# global in String_Table; is type Index is ( Aft_Token_Entry, Base_Token_Entry, Delta_Token_Entry, Digits_Token_Entry, Emax_Token_Entry, Epsilon_Token_Entry, First_Token_Entry, Fore_Token_Entry, Large_Token_Entry, Last_Token_Entry, Length_Token_Entry, Machine_Emax_Token_Entry, Machine_Emin_Token_Entry, Machine_Mantissa_Token_Entry, Machine_Overflows_Token_Entry, Machine_Radix_Token_Entry, Machine_Rounds_Token_Entry, Mantissa_Token_Entry, Pos_Token_Entry, Pred_Token_Entry, Range_Token_Entry, Safe_Emax_Token_Entry, Safe_Large_Token_Entry, Safe_Small_Token_Entry, Size_Token_Entry, Small_Token_Entry, Succ_Token_Entry, Val_Token_Entry, Denorm_Token_Entry, Model_Emin_Token_Entry, Model_Epsilon_Token_Entry, Model_Mantissa_Token_Entry, Model_Small_Token_Entry, Safe_First_Token_Entry, Safe_Last_Token_Entry, Component_Size_Token_Entry, Min_Token_Entry, Max_Token_Entry, Signed_Zeros_Token_Entry, Valid_Token_Entry, Adjacent_Token_Entry, Compose_Token_Entry, Copy_Sign_Token_Entry, Leading_Part_Token_Entry, Remainder_Token_Entry, Scaling_Token_Entry, Ceiling_Token_Entry, Exponent_Token_Entry, Floor_Token_Entry, Fraction_Token_Entry, Machine_Token_Entry, Model_Token_Entry, Rounding_Token_Entry, Truncation_Token_Entry, Unbiased_Rounding_Token_Entry, Address_Token_Entry, Modulus_Token_Entry, Tail_Token_Entry, Append_Token_Entry, Access_Token_Entry, Always_Valid_Token_Entry, Mod_Token_Entry, Machine_Rounding_Token_Entry); type Language_Array is array (CommandLineData.Language_Profiles) of Boolean; type Tables_Element is record Token : Lex_String; Language : Language_Array; end record; type Tables is array (Index) of Tables_Element; ------------------------------------------------------------------ -- This table defines the sets of attributes that are defined -- in SPARK83, SPARK95, and SPARK2005 modes. -- -- Seven floating-point and fixed-point attributes are considered -- obsolete but remain as implementation-defined attributes -- in SPARK95 and 2005 modes, following the advice of AARM A.5.3 (72.f) -- and A.5.4 (4.b) ------------------------------------------------------------------ Attribute_Table : constant Tables := Tables' (Aft_Token_Entry => Tables_Element'(Token => Aft_Token, Language => Language_Array'(others => True)), Base_Token_Entry => Tables_Element'(Token => Base_Token, Language => Language_Array'(others => True)), Delta_Token_Entry => Tables_Element'(Token => Delta_Token, Language => Language_Array'(others => True)), Digits_Token_Entry => Tables_Element'(Token => Digits_Token, Language => Language_Array'(others => True)), Emax_Token_Entry => Tables_Element'(Token => Emax_Token, Language => Language_Array'(others => True)), Epsilon_Token_Entry => Tables_Element'(Token => Epsilon_Token, Language => Language_Array'(others => True)), First_Token_Entry => Tables_Element'(Token => First_Token, Language => Language_Array'(others => True)), Fore_Token_Entry => Tables_Element'(Token => Fore_Token, Language => Language_Array'(others => True)), Large_Token_Entry => Tables_Element'(Token => Large_Token, Language => Language_Array'(others => True)), Last_Token_Entry => Tables_Element'(Token => Last_Token, Language => Language_Array'(others => True)), Length_Token_Entry => Tables_Element'(Token => Length_Token, Language => Language_Array'(others => True)), Machine_Emax_Token_Entry => Tables_Element'(Token => Machine_Emax_Token, Language => Language_Array'(others => True)), Machine_Emin_Token_Entry => Tables_Element'(Token => Machine_Emin_Token, Language => Language_Array'(others => True)), Machine_Mantissa_Token_Entry => Tables_Element'(Token => Machine_Mantissa_Token, Language => Language_Array'(others => True)), Machine_Overflows_Token_Entry => Tables_Element'(Token => Machine_Overflows_Token, Language => Language_Array'(others => True)), Machine_Radix_Token_Entry => Tables_Element'(Token => Machine_Radix_Token, Language => Language_Array'(others => True)), Machine_Rounds_Token_Entry => Tables_Element'(Token => Machine_Rounds_Token, Language => Language_Array'(others => True)), Mantissa_Token_Entry => Tables_Element'(Token => Mantissa_Token, Language => Language_Array'(others => True)), Pos_Token_Entry => Tables_Element'(Token => Pos_Token, Language => Language_Array'(others => True)), Pred_Token_Entry => Tables_Element'(Token => Pred_Token, Language => Language_Array'(others => True)), Range_Token_Entry => Tables_Element'(Token => Range_Token, Language => Language_Array'(others => True)), Safe_Emax_Token_Entry => Tables_Element'(Token => Safe_Emax_Token, Language => Language_Array'(others => True)), Safe_Large_Token_Entry => Tables_Element'(Token => Safe_Large_Token, Language => Language_Array'(others => True)), Safe_Small_Token_Entry => Tables_Element'(Token => Safe_Small_Token, Language => Language_Array'(others => True)), Size_Token_Entry => Tables_Element'(Token => Size_Token, Language => Language_Array'(others => True)), Small_Token_Entry => Tables_Element'(Token => Small_Token, Language => Language_Array'(others => True)), Succ_Token_Entry => Tables_Element'(Token => Succ_Token, Language => Language_Array'(others => True)), Val_Token_Entry => Tables_Element'(Token => Val_Token, Language => Language_Array'(others => True)), Denorm_Token_Entry => Tables_Element'(Token => Denorm_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Model_Emin_Token_Entry => Tables_Element'(Token => Model_Emin_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Model_Epsilon_Token_Entry => Tables_Element'(Token => Model_Epsilon_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Model_Mantissa_Token_Entry => Tables_Element'(Token => Model_Mantissa_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Model_Small_Token_Entry => Tables_Element'(Token => Model_Small_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Safe_First_Token_Entry => Tables_Element'(Token => Safe_First_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Safe_Last_Token_Entry => Tables_Element'(Token => Safe_Last_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Component_Size_Token_Entry => Tables_Element'(Token => Component_Size_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Min_Token_Entry => Tables_Element'(Token => Min_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Max_Token_Entry => Tables_Element'(Token => Max_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Signed_Zeros_Token_Entry => Tables_Element'(Token => Signed_Zeros_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Valid_Token_Entry => Tables_Element'(Token => Valid_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Adjacent_Token_Entry => Tables_Element'(Token => Adjacent_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Compose_Token_Entry => Tables_Element'(Token => Compose_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Copy_Sign_Token_Entry => Tables_Element'(Token => Copy_Sign_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Leading_Part_Token_Entry => Tables_Element'(Token => Leading_Part_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Remainder_Token_Entry => Tables_Element'(Token => Remainder_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Scaling_Token_Entry => Tables_Element'(Token => Scaling_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Ceiling_Token_Entry => Tables_Element'(Token => Ceiling_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Exponent_Token_Entry => Tables_Element'(Token => Exponent_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Floor_Token_Entry => Tables_Element'(Token => Floor_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Fraction_Token_Entry => Tables_Element'(Token => Fraction_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Machine_Token_Entry => Tables_Element'(Token => Machine_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Model_Token_Entry => Tables_Element'(Token => Model_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Rounding_Token_Entry => Tables_Element'(Token => Rounding_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Truncation_Token_Entry => Tables_Element'(Token => Truncation_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Unbiased_Rounding_Token_Entry => Tables_Element'(Token => Unbiased_Rounding_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Address_Token_Entry => Tables_Element'(Token => Address_Token, Language => Language_Array'(others => False)), Modulus_Token_Entry => Tables_Element'(Token => Modulus_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Tail_Token_Entry => Tables_Element'(Token => Tail_Token, Language => Language_Array'(others => True)), Append_Token_Entry => Tables_Element'(Token => Append_Token, Language => Language_Array'(others => True)), Access_Token_Entry => Tables_Element'(Token => Access_Token, Language => Language_Array'(CommandLineData.SPARK83 => False, others => True)), Always_Valid_Token_Entry => Tables_Element'(Token => Always_Valid_Token, Language => Language_Array'(others => True)), Mod_Token_Entry => Tables_Element'(Token => Mod_Token, Language => Language_Array'(CommandLineData.SPARK2005_Profiles => True, others => False)), Machine_Rounding_Token_Entry => Tables_Element'(Token => Machine_Rounding_Token, Language => Language_Array'(CommandLineData.SPARK2005_Profiles => True, others => False))); Result : Boolean := False; begin -- Is_Attribute_Token for I in Index loop if Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attribute_Table (I).Token, Lex_Str2 => Tok) = Str_Eq then Result := Attribute_Table (I).Language (Language); exit; end if; end loop; return Result; end Is_Attribute_Token; procedure Initialise_String_Table --# global in out String_Table; --# derives String_Table from *; is ----------------------------------------------------------- -- This procedure inserts the given String into the String -- table and checks that the new Lex_String returned is as -- expected. We use type String here so that callers may -- pass a string literal. ----------------------------------------------------------- procedure Ins (Str : in String; Expected_Str : in Lex_String) --# global in out String_Table; --# derives String_Table from *, --# Str & --# null from Expected_Str; is New_Str : Lex_String; begin Insert_Examiner_String (Str => E_Strings.Copy_String (Str => Str), Lex_Str => New_Str); if New_Str /= Expected_Str then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Init, Msg => "Initialise_String_Table : New_Str /= Expected_Str"); end if; end Ins; begin Ins ("Aft", Aft_Token); Ins ("Base", Base_Token); Ins ("delta", Delta_Token); Ins ("digits", Digits_Token); Ins ("Emax", Emax_Token); Ins ("Epsilon", Epsilon_Token); Ins ("First", First_Token); Ins ("Fore", Fore_Token); Ins ("Large", Large_Token); Ins ("Last", Last_Token); --# assert True; Ins ("Length", Length_Token); Ins ("Machine_Emax", Machine_Emax_Token); Ins ("Machine_Emin", Machine_Emin_Token); Ins ("Machine_Mantissa", Machine_Mantissa_Token); Ins ("Machine_Overflows", Machine_Overflows_Token); Ins ("Machine_Radix", Machine_Radix_Token); Ins ("Machine_Rounds", Machine_Rounds_Token); Ins ("Mantissa", Mantissa_Token); Ins ("Pos", Pos_Token); Ins ("Pred", Pred_Token); --# assert True; Ins ("Range", Range_Token); Ins ("Safe_Emax", Safe_Emax_Token); Ins ("Safe_Large", Safe_Large_Token); Ins ("Safe_Small", Safe_Small_Token); Ins ("Size", Size_Token); Ins ("Small", Small_Token); Ins ("Succ", Succ_Token); Ins ("Val", Val_Token); Ins ("Left", Left_Token); Ins ("Right", Right_Token); --# assert True; Ins ("True", True_Token); Ins ("False", False_Token); Ins ("0", Zero_Value); Ins ("1", One_Value); Ins ("superindex", Super_Index_Token); Ins ("Interface", Interface_Token); Ins ("Import", Import_Token); Ins ("Link_Name", Link_Name_Token); Ins ("External_Name", External_Name_Token); Ins ("Entity", Entity_Token); --# assert True; Ins ("Convention", Convention_Token); Ins ("Elaborate_Body", Elaborate_Body_Token); Ins ("Ada", Ada_Token); Ins ("SPARK", SPARK_Token); Ins ("Denorm", Denorm_Token); Ins ("Model_Emin", Model_Emin_Token); Ins ("Model_Epsilon", Model_Epsilon_Token); Ins ("Model_Mantissa", Model_Mantissa_Token); Ins ("Model_Small", Model_Small_Token); Ins ("Safe_First", Safe_First_Token); Ins ("Safe_Last", Safe_Last_Token); --# assert True; Ins ("Component_Size", Component_Size_Token); Ins ("Min", Min_Token); Ins ("Max", Max_Token); Ins ("Signed_Zeros", Signed_Zeros_Token); Ins ("Valid", Valid_Token); Ins ("Characters", Characters_Token); Ins ("Latin_1", Latin_1_Token); Ins ("Adjacent", Adjacent_Token); Ins ("Compose", Compose_Token); Ins ("Copy_Sign", Copy_Sign_Token); --# assert True; Ins ("Leading_Part", Leading_Part_Token); Ins ("Remainder", Remainder_Token); Ins ("Scaling", Scaling_Token); Ins ("Ceiling", Ceiling_Token); Ins ("Exponent", Exponent_Token); Ins ("Floor", Floor_Token); Ins ("Fraction", Fraction_Token); Ins ("Machine", Machine_Token); Ins ("Model", Model_Token); Ins ("Rounding", Rounding_Token); --# assert True; Ins ("Truncation", Truncation_Token); Ins ("Unbiased_Rounding", Unbiased_Rounding_Token); Ins ("Address", Address_Token); Ins ("Modulus", Modulus_Token); Ins ("Tail", Tail_Token); Ins ("Append", Append_Token); Ins ("System", System_Token); Ins ("Min_Int", Min_Int_Token); Ins ("Max_Int", Max_Int_Token); Ins ("Max_Binary_Modulus", Max_Binary_Modulus_Token); --# assert True; Ins ("Max_Base_Digits", Max_Base_Digits_Token); Ins ("Max_Digits", Max_Digits_Token); Ins ("Max_Mantissa", Max_Mantissa_Token); Ins ("Fine_Delta", Fine_Delta_Token); Ins ("Null_Address", Null_Address_Token); Ins ("Storage_Unit", Storage_Unit_Token); Ins ("Word_Size", Word_Size_Token); Ins ("Any_Priority", Any_Priority_Token); Ins ("Priority", Priority_Token); Ins ("Interrupt_Priority", Interrupt_Priority_Token); --# assert True; Ins ("Default_Priority", Default_Priority_Token); Ins ("Atomic", Atomic_Token); Ins ("Real_Time", Real_Time_Token); Ins ("inherit", Inherit_Token); Ins ("Synchronous_Task_Control", Synchronous_Task_Control_Token); Ins ("Attach_Handler", Attach_Handler_Token); Ins ("Interrupt_Handler", Interrupt_Handler_Token); Ins ("Interrupts", Interrupts_Token); Ins ("Access", Access_Token); Ins ("Atomic_Components", Atomic_Components_Token); --# assert True; Ins ("Volatile_Components", Volatile_Components_Token); Ins ("main_program", Main_Program_Token); Ins ("assert", Assert_Token); Ins ("overriding", Overriding_Token); Ins ("Unchecked_Conversion", Unchecked_Conversion_Token); Ins ("Rule", Rule_Token); Ins ("NoRule", No_Rule_Token); Ins ("Always_Valid", Always_Valid_Token); --# assert True; Ins ("Bit_Order", Bit_Order_Token); Ins ("High_Order_First", High_Order_First_Token); Ins ("Low_Order_First", Low_Order_First_Token); Ins ("Default_Bit_Order", Default_Bit_Order_Token); --# assert True; Ins ("All_Calls_Remote", All_Calls_Remote_Token); Ins ("Asynchronous", Asynchronous_Token); Ins ("Controlled", Controlled_Token); Ins ("Discard_Names", Discard_Names_Token); Ins ("Elaborate", Elaborate_Token); Ins ("Elaborate_All", Elaborate_All_Token); Ins ("Export", Export_Token); Ins ("Inline", Inline_Token); Ins ("Inspection_Point", Inspection_Point_Token); Ins ("Linker_Options", Linker_Options_Token); Ins ("List", List_Token); Ins ("Locking_Policy", Locking_Policy_Token); Ins ("Normalize_Scalars", Normalize_Scalars_Token); Ins ("Optimize", Optimize_Token); Ins ("Pack", Pack_Token); Ins ("Page", Page_Token); Ins ("Preelaborate", Preelaborate_Token); Ins ("Pure", Pure_Token); Ins ("Queueing_Policy", Queueing_Policy_Token); Ins ("Remote_Call_Interface", Remote_Call_Interface_Token); Ins ("Remote_Types", Remote_Types_Token); Ins ("Restrictions", Restrictions_Token); Ins ("Reviewable", Reviewable_Token); Ins ("Shared_Passive", Shared_Passive_Token); Ins ("Storage_Size", Storage_Size_Token); Ins ("Suppress", Suppress_Token); Ins ("Task_Dispatching_Policy", Task_Dispatching_Policy_Token); Ins ("Volatile", Volatile_Token); --# assert True; Ins ("Memory_Size", Memory_Size_Token); Ins ("Shared", Shared_Token); Ins ("System_Name", System_Name_Token); -- Ada2005 Ins ("Mod", Mod_Token); Ins ("Machine_Rounding", Machine_Rounding_Token); Ins ("Priority_Last", Priority_Last_Token); Ins ("Standard", Standard_Token); Ins ("Integer", Integer_Token); Ins ("Float", Float_Token); Ins ("Seconds_Count", Seconds_Count_Token); Ins ("Interrupt_ID", Interrupt_ID_Token); -- Dictionary types Ins ("Universal_Integer", Universal_Integer_Token); Ins ("Universal_Real", Universal_Real_Token); Ins ("Universal_Fixed", Universal_Fixed_Token); Ins ("Character", Character_Token); Ins ("Boolean", Boolean_Token); Ins ("Duration", Duration_Token); Ins ("String", String_Token); Ins ("Natural", Natural_Token); Ins ("Positive", Positive_Token); Ins ("Interfaces", Interfaces_Token); -------------------------------------------------------------- -- Insert new lex strings here -- Remember to adjust Next_Vacant_Token in lextokenmanager.ads -------------------------------------------------------------- if Next_Vacant_Token /= String_Table.Next_Vacant then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Init, Msg => "Initialise_String_Table : Bad value for Next_Vacant_Token"); end if; end Initialise_String_Table; procedure Report_Usage --# global in String_Table; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# String_Table; is begin -- table fills from bottom up with no reclamation, so Next_Vacant indicates -- the amount of space used Statistics.SetTableUsage (Statistics.StringTable, Integer (String_Table.Next_Vacant - 1)); end Report_Usage; procedure Insert_Nat (N : in Natural; Lex_Str : out Lex_String) --# global in out String_Table; --# derives Lex_Str, --# String_Table from N, --# String_Table; is separate; function Is_Standard_Token (Lex_Str : Lex_String) return Boolean --# global in The_Last_Token; is begin return Lex_Str <= The_Last_Token; end Is_Standard_Token; procedure Set_Last_Token --# global in String_Table; --# in out The_Last_Token; --# derives The_Last_Token from *, --# String_Table; is begin if The_Last_Token /= Next_Vacant_Token then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "LEXTOKENMANAGER.SET_LAST_TOKEN : THE_LAST_TOKEN ALREADY INITIALISED"); else The_Last_Token := String_Table.Next_Vacant - 1; while String_Table.Contents (The_Last_Token - 1) /= Ada.Characters.Latin_1.NUL loop The_Last_Token := The_Last_Token - 1; end loop; end if; end Set_Last_Token; begin String_Table.Next_Vacant := 1; String_Table.Hash_Table := Hash_Table_Struct'(others => Null_String); String_Table.Contents := Table_Contents'(others => Ada.Characters.Latin_1.NUL); The_Last_Token := Next_Vacant_Token; end LexTokenManager; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-noerr.adb0000644000175000017500000000465011753202336024755 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure NoErr (Err_Num : in Error_Types.NumericError; E_Str : in out E_Strings.T) is begin E_Strings.Append_String (E_Str => E_Str, Str => "Flow analysis of"); if Err_Num.Name1 = Error_Types.NoName then E_Strings.Append_String (E_Str => E_Str, Str => " package initialization"); elsif Err_Num.Name1 = Error_Types.ThePartitionName then E_Strings.Append_String (E_Str => E_Str, Str => " the entire partition"); else E_Strings.Append_String (E_Str => E_Str, Str => " subprogram "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); end if; E_Strings.Append_String (E_Str => E_Str, Str => " performed"); if Err_Num.ErrorNum = ErrorHandler.No_Error_Info_Flow then E_Strings.Append_String (E_Str => E_Str, Str => " (information-flow mode)"); elsif Err_Num.ErrorNum = ErrorHandler.No_Error_Data_Flow then E_Strings.Append_String (E_Str => E_Str, Str => " (data-flow mode)"); end if; E_Strings.Append_String (E_Str => E_Str, Str => ": no errors found"); E_Strings.Append_String (E_Str => E_Str, Str => "."); end NoErr; spark-2012.0.deb/examiner/sparklex-lex-getident.adb0000644000175000017500000001002411753202336021136 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SparkLex.Lex) procedure GetIdent (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) -- Given that the current position in the line buffer, CurrLine, indexes a -- letter, GetIdent forward scans the input line buffer to locate the end of -- the identifier. -- The extent of the identifier is determined by the -- maximal length substring starting at the current position accepted by one -- of the following two regular expressions :- -- identifier ::= letter {[underline] letter_or_digit} -- illegal_id ::= identifier non_id_separator {non_id_separator} -- where -- non_id_separator is any character which cannot legally separate an -- identifier from the next token in the Ada Lexis. -- The expression which scans the longest substring is the one that is chosen. -- The value of Token is set according to which expression was used. -- GetIdent selects the correct expression without backtracking. is type Id_State is (Underline_Letter_Or_Digit, Letter_Or_Digit_Only, Illegal_State); State : Id_State; Ch : Character; function Id_Separator (Ch : Character) return Boolean is begin return Ch = ' ' or else Ch = End_Of_Text or else Format_Effector (Ch => Ch) or else Simple_Delimiter (Ch => Ch); end Id_Separator; begin LineManager.Accept_Char (Curr_Line => Curr_Line); -- The first letter has already been parsed State := Underline_Letter_Or_Digit; loop --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# State in Id_State; Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); exit when Id_Separator (Ch => Ch); LineManager.Accept_Char (Curr_Line => Curr_Line); case State is when Underline_Letter_Or_Digit => -- Modified following to eliminate reference to tilde and -- also to disallow reference to double underbar in proof context -- this second reverses an undocumented change in the tool if Letter_Or_Digit (Ch => Ch) then null; elsif Ch = '_' then State := Letter_Or_Digit_Only; else State := Illegal_State; end if; when Letter_Or_Digit_Only => if Letter_Or_Digit (Ch => Ch) then State := Underline_Letter_Or_Digit; else State := Illegal_State; end if; when Illegal_State => null; end case; end loop; case State is when Underline_Letter_Or_Digit => Token := SP_Symbols.identifier; when Letter_Or_Digit_Only => Token := SP_Symbols.illegal_id; when Illegal_State => Token := SP_Symbols.illegal_id; end case; end GetIdent; spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_return.adb0000644000175000017500000001322711753202336024423 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Wf_Return (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) is Subprog_Sym, Expected_Type : Dictionary.Symbol; Result_Type : Exp_Record; Ref_Var : SeqAlgebra.Seq; Child : STree.SyntaxNode; begin -- conditions of Section 5.8 of the SPARK definition apply to this statement. SeqAlgebra.CreateSeq (TheHeap, Ref_Var); Subprog_Sym := Dictionary.GetEnclosingCompilationUnit (Scope); if Dictionary.IsProcedure (Subprog_Sym) then ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Return_In_Proc, Position => Node_Position (Node => Node)); else -- subprogram is a function, check location of return within it; case Syntax_Node_Type (Node => Parent_Of_Sequence (Node => Node)) is when SP_Symbols.if_statement | SP_Symbols.elsif_part | SP_Symbols.else_part | SP_Symbols.loop_statement | SP_Symbols.case_statement_alternative | SP_Symbols.others_part => ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Misplaced_Return, Position => Node_Position (Node => Node)); when others => if not Is_Last_In_Sequence (Node => Node) then ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Misplaced_Return, Position => Node_Position (Node => Node)); end if; end case; -- seed syntax tree with expected type for run-time check Expected_Type := Dictionary.GetType (Subprog_Sym); STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); Child := Child_Node (Current_Node => Node); -- ASSUME Child = expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = expression in Wf_Condition"); -- now check return type is correct Walk_Expression_P.Walk_Expression (Exp_Node => Child, Scope => Scope, Type_Context => Expected_Type, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => Result_Type, Component_Data => Component_Data, The_Heap => TheHeap); Assignment_Check (Position => Node_Position (Node => Node), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Result_Type); -- if expression represents an IN stream variable then issue warning about -- possible validity problems if it is not marked valid if Result_Type.Is_AVariable and then Dictionary.GetOwnVariableOrConstituentMode (Result_Type.Variable_Symbol) = Dictionary.InMode then -- Check to see if the variable has been marked valid. Note that the -- OtherSymbol is checked,not the variableSymbol, since this will be -- the Subcomponent symbol if we are referring to a record component if Dictionary.VariableOrSubcomponentIsMarkedValid (Result_Type.Other_Symbol) then -- MCA: do we also need to add a use of 'Valid to the summary? -- Debug.PrintSym ("Return Access is Valid =", Result_Type.OtherSymbol); null; else -- The warning is stronger when the external variable is a type that doesn't -- generate run-time checks if Dictionary.TypeIsScalar (Result_Type.Type_Symbol) and then not Dictionary.TypeIsBoolean (Result_Type.Type_Symbol) then -- weaker warning ErrorHandler.Semantic_Warning_Sym (Err_Num => 392, Position => Node_Position (Node => Child), Sym => Result_Type.Other_Symbol, Scope => Scope); else -- stronger warning ErrorHandler.Semantic_Warning_Sym (Err_Num => 393, Position => Node_Position (Node => Child), Sym => Result_Type.Other_Symbol, Scope => Scope); end if; end if; end if; end if; -- add reference variable list to RefList hash table RefList.AddRelation (Table, TheHeap, Node, Dictionary.NullSymbol, Ref_Var); end Wf_Return; spark-2012.0.deb/examiner/sp_relations-sp_terminal_like.adb0000644000175000017500000000252311753202336022744 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- packed version of SPTerminalLike. separate (SP_Relations) function SP_Terminal_Like (Sym : SP_Symbols.SP_Symbol) return Boolean is --# hide SP_Terminal_Like; Result : Boolean; begin if Sym in SP_Symbols.SP_Terminal then Result := True; else Result := Boolean'Val ((Rel_Tab (Sym) / Terminal_Like) mod Term_Like_Lim); end if; return Result; end SP_Terminal_Like; spark-2012.0.deb/examiner/sem-wf_property_list.adb0000644000175000017500000022045211753202336021123 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; with SystemErrors; separate (Sem) procedure Wf_Property_List (Node : in STree.SyntaxNode; Type_Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; The_Owner : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) is Property_Node : STree.SyntaxNode; It : STree.Iterator; -------------------------------------------------------------------------------------- procedure Wf_Interrupt_Property (Name_Node : in STree.SyntaxNode; Value_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Own_Variable : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Own_Variable, --# Value_Node & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Name_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Own_Variable, --# Value_Node & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Scope, --# The_Own_Variable & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Own_Variable, --# Value_Node & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Node, --# Scope, --# SLI.State, --# STree.Table, --# The_Own_Variable, --# Value_Node; --# pre Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.identifier and --# (Syntax_Node_Type (Value_Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression or --# Value_Node = STree.NullNode); is Consistent : Boolean; Unused_Value_Rep : LexTokenManager.Lex_String; It : STree.Iterator; Next_Node : STree.SyntaxNode; -------------------------------------------------------------------------------------- procedure Process_One_Element (Node : in STree.SyntaxNode; The_Owner : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Owner & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Owner; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_aggregate_choice_rep; is It : Dictionary.Iterator; Handler_Lex_Str : LexTokenManager.Lex_String; Interrupt_Stream_Lex_Str : LexTokenManager.Lex_String; OK_To_Add : Boolean; begin -- Extract the handler name Handler_Lex_Str := Node_Lex_String (Node => Get_Node (It => Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down))); -- Extract the stream name Interrupt_Stream_Lex_Str := Node_Lex_String (Node => Get_Node (It => Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Next_Sibling (Node), In_Direction => STree.Down))); -- Check the handler is not already in the list OK_To_Add := True; It := Dictionary.FirstInterruptStreamMapping (The_Owner); while OK_To_Add and not Dictionary.IsNullIterator (It) loop OK_To_Add := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetInterruptStreamMappingHandler (Dictionary.CurrentSymbol (It)), Lex_Str2 => Handler_Lex_Str) /= LexTokenManager.Str_Eq; It := Dictionary.NextSymbol (It); end loop; if OK_To_Add then -- This is not a duplicate -- Record the mapping of handler onto stream Dictionary.AddInterruptStreamMapping (Subject => The_Owner, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), TheInterruptHandler => Handler_Lex_Str, TheInterruptStream => Interrupt_Stream_Lex_Str); -- If the protected type is declared in another package we can create the -- implicit interrupt stream variable now. if Dictionary.Is_Declared (Item => Dictionary.GetType (The_Owner)) then Create_Interrupt_Stream_Variable (For_PO => The_Owner, The_Handler => Handler_Lex_Str, The_Stream_Variable => Interrupt_Stream_Lex_Str, Error_Node_Pos => Node_Position (Node => Node)); end if; else -- This is a duplicate ErrorHandler.Semantic_Error (Err_Num => 956, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Handler_Lex_Str); end if; end Process_One_Element; begin -- Wf_Interrupt_Property if not (Dictionary.IsOwnVariable (The_Own_Variable) and then Dictionary.GetOwnVariableProtected (The_Own_Variable) and then Dictionary.OwnVariableHasType (The_Own_Variable, Scope)) then -- The interrupt property can only apply to protected own variables that -- are type announced. ErrorHandler.Semantic_Error (Err_Num => 936, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Parent_Node (Current_Node => Name_Node)), Id_Str => Dictionary.GetSimpleName (The_Own_Variable)); else Check_Interrupt_Property_Consistency (Has_Interrupt_Property => True, Sym => The_Own_Variable, Type_Sym => Dictionary.GetType (The_Own_Variable), Error_Node_Pos => Node_Position (Node => Name_Node), Consistent => Consistent); if Consistent then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetPriorityProperty (The_Own_Variable), Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then --# accept Flow, 10, Unused_Value_Rep, "Expected ineffective assignment"; Check_Priority_Range (Error_Sym => The_Own_Variable, Scope => Scope, Pragma_Kind => Dictionary.AttachHandler, Err_Pos => Node_Position (Node => Parent_Node (Current_Node => Name_Node)), Value => Maths.ValueRep (Dictionary.GetPriorityProperty (The_Own_Variable)), Value_Rep => Unused_Value_Rep); --# end accept; end if; Dictionary.SetHasInterruptProperty (The_Own_Variable); -- Parse the interrupt stream list if there is one if Syntax_Node_Type (Node => Value_Node) = SP_Symbols.annotation_aggregate_or_expression then It := Find_First_Node (Node_Kind => SP_Symbols.annotation_aggregate_choice_rep, From_Root => Value_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_aggregate_choice_rep and --# Next_Node = Get_Node (It); Process_One_Element (Node => Next_Node, The_Owner => The_Own_Variable); It := STree.NextNode (It); end loop; end if; end if; end if; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Interrupt (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Value_Node, Scope => Scope); end if; --# accept Flow, 33, Unused_Value_Rep, "Expected to be neither referenced nor exported"; end Wf_Interrupt_Property; -------------------------------------------------------------------------------------- procedure Wf_Name_Property (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Owner : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Owner & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Owner & --# LexTokenManager.State from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Node, --# Scope, --# STree.Table, --# The_Owner & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Owner & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# STree.Table, --# The_Owner; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.name_property; is Name_Node : STree.SyntaxNode; Name_Str : E_Strings.T; -------------------------------------------------------------------------------------- procedure Wf_Suspendable_Property (Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; The_Own_Variable : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# Scope, --# The_Own_Variable & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# The_Own_Variable; is Type_Sym : Dictionary.Symbol; Consistent : Boolean; begin if Dictionary.IsOwnVariable (The_Own_Variable) and then Dictionary.OwnVariableHasType (The_Own_Variable, Scope) then Type_Sym := Dictionary.GetType (The_Own_Variable); else Type_Sym := Dictionary.GetUnknownTypeMark; end if; Check_Suspendable_Property_Consistency (Sym => The_Own_Variable, Type_Sym => Type_Sym, Is_In_Suspends_List => True, Error_Node_Pos => Node_Pos, Consistent => Consistent); if Consistent then Dictionary.SetIsSuspendable (The_Own_Variable); end if; end Wf_Suspendable_Property; -------------------------------------------------------------------------------------- procedure Wf_Delay_Property (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Procedure : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Scope, --# The_Procedure & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Procedure; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.RWdelay; is begin if not Dictionary.IsProcedure (The_Procedure) then -- Only procedures can have the delay property. ErrorHandler.Semantic_Error (Err_Num => 918, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Parent_Node (Current_Node => Node)), Id_Str => Dictionary.GetSimpleName (The_Procedure)); elsif Dictionary.IsOrIsInProtectedScope (Scope) then -- Blocking annotation not allowed in protected scope. ErrorHandler.Semantic_Error (Err_Num => 908, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.HasDelayProperty (The_Procedure) then -- The applicable compilation unit already has a delay statement. ErrorHandler.Semantic_Error (Err_Num => 888, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Parent_Node (Current_Node => Node)), Id_Str => Dictionary.GetSimpleName (The_Procedure)); else Dictionary.SetHasDelayProperty (The_Procedure); end if; end Wf_Delay_Property; begin -- Wf_Name_Property Name_Node := Child_Node (Current_Node => Node); -- ASSUME Name_Node = RWdelay OR identifier if Syntax_Node_Type (Node => Name_Node) = SP_Symbols.RWdelay then -- ASSUME Name_Node = RWdelay Wf_Delay_Property (Node => Name_Node, Scope => Scope, The_Procedure => The_Owner); elsif Syntax_Node_Type (Node => Name_Node) = SP_Symbols.identifier then -- ASSUME Name_Node = identifier Name_Str := LexTokenManager.Lex_String_To_String (Lex_Str => Node_Lex_String (Node => Name_Node)); if E_Strings.Eq1_String (E_Str => Name_Str, Str => "suspendable") then Wf_Suspendable_Property (Node_Pos => Node_Position (Node => Name_Node), Scope => Scope, The_Own_Variable => The_Owner); elsif E_Strings.Eq1_String (E_Str => Name_Str, Str => "interrupt") then Wf_Interrupt_Property (Name_Node => Name_Node, Value_Node => STree.NullNode, Scope => Scope, The_Own_Variable => The_Owner); else ErrorHandler.Semantic_Error (Err_Num => 921, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Name_Node)); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = RWdelay OR identifier in Wf_Name_Property"); end if; end Wf_Name_Property; -------------------------------------------------------------------------------------- procedure Wf_Name_Value_Property (Node : in STree.SyntaxNode; Type_Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; The_Owner : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap, --# The_Owner & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# The_Owner, --# Type_Node_Pos; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.name_value_property; --# post STree.Table = STree.Table~; is Name_Node : STree.SyntaxNode; Value_Node : STree.SyntaxNode; Name_Str : E_Strings.T; -------------------------------------------------------------------------------------- procedure Wf_Priority_Property (Name_Node : in STree.SyntaxNode; Value_Node : in STree.SyntaxNode; The_Owner : in Dictionary.Symbol; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap, --# Value_Node & --# Dictionary.Dict, --# LexTokenManager.State from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap, --# The_Owner, --# Value_Node & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# The_Owner, --# Value_Node & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# Value_Node; --# pre Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Value_Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression; --# post STree.Table = STree.Table~; is Var_Type : Dictionary.Symbol; Value_Rep : LexTokenManager.Lex_String; Expression_Node : STree.SyntaxNode; Valid : Boolean; -------------------------------------------------------------------------------------- function Determine_Pragma_Kind (The_Owner : Dictionary.Symbol) return Dictionary.RavenscarPragmas --# global in Dictionary.Dict; is Result : Dictionary.RavenscarPragmas; begin if Dictionary.IsOwnVariable (The_Owner) and then Dictionary.GetHasInterruptProperty (The_Owner) then Result := Dictionary.AttachHandler; else Result := Dictionary.InterruptPriority; end if; return Result; end Determine_Pragma_Kind; begin -- Wf_Priority_Property Expression_Node := Child_Node (Current_Node => Value_Node); -- ASSUME Expression_Node = annotation_aggregate OR annotation_expression if Syntax_Node_Type (Node => Expression_Node) = SP_Symbols.annotation_aggregate then -- ASSUME Expression_Node = annotation_aggregate -- The grammar allows an aggregate here but for the priority property -- value we must have a static expression. ErrorHandler.Semantic_Error (Err_Num => 945, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Value_Node), Id_Str => Node_Lex_String (Node => Name_Node)); elsif Syntax_Node_Type (Node => Expression_Node) = SP_Symbols.annotation_expression then -- ASSUME Expression_Node = annotation_expression Wf_Priority_Value (Node => Expression_Node, Pragma_Kind => Determine_Pragma_Kind (The_Owner => The_Owner), Error_Sym => The_Owner, Scope => Scope, The_Heap => The_Heap, Value_Rep => Value_Rep, Compatible => Valid); if Valid then if Dictionary.IsOwnVariable (The_Owner) and then Dictionary.OwnVariableHasType (The_Owner, Scope) then Var_Type := Dictionary.GetType (The_Owner); else Var_Type := Dictionary.GetUnknownTypeMark; end if; Check_Priority_Property_Consistency (Sym => The_Owner, Type_Sym => Var_Type, Priority_Property_Value => Value_Rep, Error_Node_Pos => Node_Position (Node => Name_Node), Consistent => Valid); if Valid then Dictionary.SetPriorityProperty (OwnVariable => The_Owner, TheValue => Value_Rep); end if; end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Expression_Node = annotation_aggregate OR annotation_expression in Wf_Priority_Property"); end if; end Wf_Priority_Property; -------------------------------------------------------------------------------------- procedure Wf_Suspends_Property (Name_Node_Pos : in LexTokenManager.Token_Position; Value_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Owner : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Owner, --# Value_Node & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Name_Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Owner, --# Value_Node & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Owner, --# Value_Node & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Node_Pos, --# Scope, --# SLI.State, --# STree.Table, --# The_Owner, --# Value_Node; --# pre Syntax_Node_Type (Value_Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression; --# post STree.Table = STree.Table~; is It : STree.Iterator; Scope_To_Check : Dictionary.Scopes; Next_Node : STree.SyntaxNode; -------------------------------------------------------------------------------------- procedure Process_One_PO_Or_SO (Value_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Owner : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Owner, --# Value_Node & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Owner, --# Value_Node & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Value_Node; --# pre Syntax_Node_Type (Value_Node, STree.Table) = SP_Symbols.annotation_primary; --# post STree.Table = STree.Table~; is Var_Sym : Dictionary.Symbol; Unused : Boolean; begin --# accept Flow, 10, Unused, "Expected ineffective assignment"; Wf_Entire_Variable (Node => Value_Node, Scope => Scope, Error_Hint => In_Suspends_List, Var_Sym => Var_Sym, Dotted => Unused); --# end accept; if not Dictionary.Is_Null_Symbol (Var_Sym) then if Dictionary.SuspendsOn (TheTaskOrProc => The_Owner, ThePOorSO => Var_Sym) then ErrorHandler.Semantic_Error_Sym (Err_Num => 890, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Value_Node), Sym => Var_Sym, Scope => Scope); elsif Dictionary.GetIsSuspendable (Var_Sym) then Dictionary.AddPOorSOToSuspendsList (TheTaskOrProc => The_Owner, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Value_Node), End_Position => Node_Position (Node => Value_Node)), ThePOorSO => Var_Sym); else -- This symbol cannot suspend. ErrorHandler.Semantic_Error_Sym (Err_Num => 909, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Value_Node), Sym => Var_Sym, Scope => Scope); end if; end if; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Process_One_PO_Or_SO; begin -- Wf_Suspends_Property if Dictionary.IsOwnTask (The_Owner) then Scope_To_Check := Scope; else Scope_To_Check := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => The_Owner); end if; if not ((Dictionary.IsTypeMark (The_Owner) and then Dictionary.TypeIsTask (The_Owner)) or else (Dictionary.IsProcedure (The_Owner))) then -- The suspends list is not being applied to a valid construct. ErrorHandler.Semantic_Error (Err_Num => 920, Reference => ErrorHandler.No_Reference, Position => Name_Node_Pos, Id_Str => Dictionary.GetSimpleName (The_Owner)); elsif Dictionary.IsProcedure (The_Owner) and then Dictionary.IsOrIsInProtectedScope (Scope) then -- Blocking annotation not allowed in protected scope. ErrorHandler.Semantic_Error (Err_Num => 908, Reference => ErrorHandler.No_Reference, Position => Name_Node_Pos, Id_Str => LexTokenManager.Null_String); else It := Find_First_Node (Node_Kind => SP_Symbols.annotation_primary, From_Root => Value_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_primary and --# Next_Node = Get_Node (It) and --# STree.Table = STree.Table~; Process_One_PO_Or_SO (Value_Node => Next_Node, Scope => Scope_To_Check, The_Owner => The_Owner); It := STree.NextNode (It); end loop; end if; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Suspends_Protects (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Value_Node, Scope => Scope_To_Check); end if; end Wf_Suspends_Property; -------------------------------------------------------------------------------------- procedure Wf_Protects_Property (Name_Node_Pos : in LexTokenManager.Token_Position; Value_Node : in STree.SyntaxNode; Type_Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; The_Own_Var : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Own_Var, --# Value_Node & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Name_Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Own_Var, --# Type_Node_Pos, --# Value_Node & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Own_Var, --# Type_Node_Pos, --# Value_Node & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Node_Pos, --# Scope, --# SLI.State, --# STree.Table, --# The_Own_Var, --# Type_Node_Pos, --# Value_Node; --# pre Syntax_Node_Type (Value_Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression; --# post STree.Table = STree.Table~; is The_Own_Var_Type : Dictionary.Symbol; It : STree.Iterator; Next_Node : STree.SyntaxNode; -------------------------------------------------------------------------------------- procedure Process_One_Element (Value_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Own_Var : in Dictionary.Symbol; The_Own_Var_Type : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Own_Var, --# The_Own_Var_Type, --# Value_Node & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Own_Var, --# Value_Node & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Value_Node; --# pre Syntax_Node_Type (Value_Node, STree.Table) = SP_Symbols.annotation_primary; --# post STree.Table = STree.Table~; is Protects_Item_Sym : Dictionary.Symbol; Is_Dotted : Boolean; begin Wf_Entire_Variable (Node => Value_Node, Scope => Scope, Error_Hint => In_Suspends_List, Var_Sym => Protects_Item_Sym, Dotted => Is_Dotted); if not Dictionary.Is_Null_Symbol (Protects_Item_Sym) then if not Dictionary.IsOwnVariable (Protects_Item_Sym) or else Is_Dotted or else Dictionary.GetOwnVariableProtected (Protects_Item_Sym) then -- Items in protects properties must be local, unprotected own variables. ErrorHandler.Semantic_Error_Sym (Err_Num => 943, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Value_Node), Sym => Protects_Item_Sym, Scope => Scope); elsif Dictionary.GetOwnVariableMode (The_Own_Var) /= Dictionary.DefaultMode and then Dictionary.GetOwnVariableMode (The_Own_Var) /= Dictionary.GetOwnVariableMode (Protects_Item_Sym) then -- Refinement constituent mode mismatch ErrorHandler.Semantic_Error_Sym (Err_Num => 701, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Value_Node), Sym => Protects_Item_Sym, Scope => Scope); elsif Dictionary.IsVirtualElement (Protects_Item_Sym) then -- This item has already appeared in another protects list. ErrorHandler.Semantic_Error_Sym2 (Err_Num => 944, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Value_Node), Sym => Protects_Item_Sym, Sym2 => Dictionary.GetVirtualElementOwner (Protects_Item_Sym), Scope => Scope); else -- Add this variable to the protects list for the type. Dictionary.AddVirtualElement (ToProtectedType => The_Own_Var_Type, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Value_Node), End_Position => Node_Position (Node => Value_Node)), TheVirtualElement => Protects_Item_Sym, TheOwner => The_Own_Var); end if; end if; end Process_One_Element; -------------------------------------------------------------------------------------- function Value_Is_Correct_Format (Value_Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Value_Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression; -- The allowable values are: -- (1) protects => X -- (2) protects => (X) -- (3) protects => (X, Y, Z) -- Hence the only branch allowed in the syntax tree for the above is at nodes -- of type annotation_positional_association_rep vis (3). -- However, we also allow -- (4) protects => X.Y -- i.e. a branch at nodes of type annotation_selected_component. -- This ensures we get the more meaningful error message (943). is It : STree.Iterator; Result : Boolean := True; begin It := Find_First_Branch_Node (From_Root => Value_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop if Syntax_Node_Type (Node => Get_Node (It => It)) /= SP_Symbols.annotation_positional_association_rep and then Syntax_Node_Type (Node => Get_Node (It => It)) /= SP_Symbols.annotation_selected_component then Result := False; exit; end if; It := STree.NextNode (It); end loop; return Result; end Value_Is_Correct_Format; begin -- Wf_Protects_Property if Dictionary.IsOwnVariable (The_Own_Var) and then Dictionary.GetOwnVariableProtected (The_Own_Var) and then Dictionary.OwnVariableHasType (OwnVariable => The_Own_Var, Scope => Scope) then The_Own_Var_Type := Dictionary.GetRootType (Dictionary.GetType (The_Own_Var)); if Dictionary.Is_Declared (Item => The_Own_Var_Type) then -- The type of the own variable must be local to this package. ErrorHandler.Semantic_Error (Err_Num => 941, Reference => ErrorHandler.No_Reference, Position => Type_Node_Pos, Id_Str => Dictionary.GetSimpleName (The_Own_Var)); elsif not Value_Is_Correct_Format (Value_Node => Value_Node) then -- The format must be a simple expression or a position associated -- aggregate. ErrorHandler.Semantic_Error (Err_Num => 961, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Value_Node), Id_Str => Dictionary.GetSimpleName (The_Own_Var)); else It := Find_First_Node (Node_Kind => SP_Symbols.annotation_primary, From_Root => Value_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.annotation_primary and --# Next_Node = Get_Node (It) and --# STree.Table = STree.Table~; Process_One_Element (Value_Node => Next_Node, Scope => Scope, The_Own_Var => The_Own_Var, The_Own_Var_Type => The_Own_Var_Type); It := STree.NextNode (It); end loop; end if; else -- The protects property must be applied to protected own variables ErrorHandler.Semantic_Error (Err_Num => 937, Reference => ErrorHandler.No_Reference, Position => Name_Node_Pos, Id_Str => Dictionary.GetSimpleName (The_Own_Var)); end if; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Suspends_Protects (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Value_Node, Scope => Scope); end if; end Wf_Protects_Property; -------------------------------------------------------------------------------------- procedure Wf_Integrity_Property (Name_Str : in LexTokenManager.Lex_String; Value_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; The_Own_Variable : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap, --# Value_Node & --# Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Scope, --# STree.Table, --# The_Heap, --# The_Own_Variable, --# Value_Node & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Str, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# Value_Node & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap, --# Value_Node; --# pre Syntax_Node_Type (Value_Node, STree.Table) = SP_Symbols.annotation_aggregate_or_expression; --# post STree.Table = STree.Table~; is Expression_Node : STree.SyntaxNode; Type_Sym : Dictionary.Symbol; Result : Exp_Record; Result_Rep : LexTokenManager.Lex_String; Unused_Seq : SeqAlgebra.Seq; Unused_Component_Data : ComponentManager.ComponentData; -------------------------------------------------------------------------------------- function Is_Natural_Integer (V : in Maths.Value) return Boolean is Comp : Maths.Value; Result : Boolean; Unused : Maths.ErrorCode; begin if Maths.IsIntegerValue (V) then --# accept Flow, 10, Unused, "Expected ineffective assignment"; Maths.GreaterOrEqual (V, Maths.ZeroInteger, Comp, Unused); Maths.ValueToBool (Comp, Result, Unused); --# end accept; else Result := False; end if; --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; return Result; end Is_Natural_Integer; begin -- Wf_Integrity_Property -- Value should be static expression, type Integer, and >= 0 Expression_Node := Child_Node (Current_Node => Value_Node); -- ASSUME Expression_Node = annotation_aggregate OR annotation_expression if Syntax_Node_Type (Node => Expression_Node) = SP_Symbols.annotation_aggregate then -- ASSUME Expression_Node = annotation_aggregate -- The grammar allows an aggregate here but for the integrity property -- value we must have a static expression. ErrorHandler.Semantic_Error (Err_Num => 945, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Value_Node), Id_Str => Name_Str); elsif Syntax_Node_Type (Node => Expression_Node) = SP_Symbols.annotation_expression then -- ASSUME Expression_Node = annotation_expression -- 1 walk the expression SeqAlgebra.CreateSeq (The_Heap, Unused_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Annotation_Expression (Exp_Node => Expression_Node, Scope => Scope, Type_Context => Dictionary.GetPredefinedIntegerType, Context => Precondition, Result => Result, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unused_Seq); -- 2 check it's constant if Result.Is_Constant then -- 3 check type Type_Sym := Dictionary.GetRootType (Result.Type_Symbol); if Dictionary.CompatibleTypes (Scope, Type_Sym, Dictionary.GetPredefinedIntegerType) and then -- 4 check >= 0 Is_Natural_Integer (V => Result.Value) then Maths.StorageRep (Result.Value, Result_Rep); Dictionary.SetIntegrityProperty (The_Own_Variable, Result_Rep); else ErrorHandler.Semantic_Error (Err_Num => 882, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Expression_Node), Id_Str => LexTokenManager.Null_String); end if; else ErrorHandler.Semantic_Error (Err_Num => 37, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Expression_Node), Id_Str => LexTokenManager.Null_String); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Expression_Node = annotation_aggregate OR annotation_expression in Wf_Integrity_Property"); end if; end Wf_Integrity_Property; begin -- Wf_Name_Value_Property Name_Node := Child_Node (Current_Node => Node); -- ASSUME Name_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Name_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = identifier in Wf_Name_Value_Property"); Name_Str := LexTokenManager.Lex_String_To_String (Lex_Str => Node_Lex_String (Node => Name_Node)); Value_Node := Next_Sibling (Current_Node => Name_Node); -- ASSUME Value_Node = annotation_aggregate_or_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Value_Node) = SP_Symbols.annotation_aggregate_or_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Value_Node = annotation_aggregate_or_expression in Wf_Name_Value_Property"); if E_Strings.Eq1_String (E_Str => Name_Str, Str => "priority") then Wf_Priority_Property (Name_Node => Name_Node, Value_Node => Value_Node, The_Owner => The_Owner, Scope => Scope, The_Heap => The_Heap); elsif E_Strings.Eq1_String (E_Str => Name_Str, Str => "suspends") then Wf_Suspends_Property (Name_Node_Pos => Node_Position (Node => Name_Node), Value_Node => Value_Node, Scope => Scope, The_Owner => The_Owner); elsif E_Strings.Eq1_String (E_Str => Name_Str, Str => "protects") then Wf_Protects_Property (Name_Node_Pos => Node_Position (Node => Name_Node), Value_Node => Value_Node, Type_Node_Pos => Type_Node_Pos, Scope => Scope, The_Own_Var => The_Owner); elsif E_Strings.Eq1_String (E_Str => Name_Str, Str => "interrupt") then Wf_Interrupt_Property (Name_Node => Name_Node, Value_Node => Value_Node, Scope => Scope, The_Own_Variable => The_Owner); elsif E_Strings.Eq1_String (E_Str => Name_Str, Str => "integrity") then Wf_Integrity_Property (Name_Str => Node_Lex_String (Node => Name_Node), Value_Node => Value_Node, Scope => Scope, The_Own_Variable => The_Owner, The_Heap => The_Heap); else ErrorHandler.Semantic_Error (Err_Num => 921, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Name_Node)); end if; end Wf_Name_Value_Property; begin -- Wf_Property_List It := Find_First_Node (Node_Kind => SP_Symbols.property, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop --# assert STree.Table = STree.Table~; -- ASSUME It = property Property_Node := Child_Node (Current_Node => Get_Node (It => It)); -- ASSUME Property_Node = name_property OR name_value_property if Syntax_Node_Type (Node => Property_Node) = SP_Symbols.name_property then -- ASSUME Property_Node = name_property Wf_Name_Property (Node => Property_Node, Scope => Scope, The_Owner => The_Owner); elsif Syntax_Node_Type (Node => Property_Node) = SP_Symbols.name_value_property then -- ASSUME Property_Node = name_value_property Wf_Name_Value_Property (Node => Property_Node, Type_Node_Pos => Type_Node_Pos, Scope => Scope, The_Owner => The_Owner, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Property_Node = name_property OR name_value_property in Wf_Property_List"); end if; It := STree.NextNode (It); end loop; end Wf_Property_List; spark-2012.0.deb/examiner/dag-buildgraph-modelassignmentstmt.adb0000644000175000017500000005535311753202336023705 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (DAG.BuildGraph) procedure ModelAssignmentStmt is AssignedVarCell, AssignedVarRoot, DAGRoot, StmtCell, MkAggregateCell, ModList : Cells.Cell; ExpnNode, VariableComponentNode : STree.SyntaxNode; StmtLabel : Labels.Label; StreamSymbol : Dictionary.Symbol; OthersAggregate : Boolean; -- Synthesize the DAG for mk__T(E), plugging in DAGRoot for E -- and the unconstrained array type name for T procedure CreateMkAggregateCell --# global in AssignedVarRoot; --# in DAGRoot; --# in Dictionary.Dict; --# in OthersAggregate; --# in out Statistics.TableUsage; --# in out VCGHeap; --# out MkAggregateCell; --# derives MkAggregateCell from OthersAggregate, --# VCGHeap & --# Statistics.TableUsage from *, --# OthersAggregate, --# VCGHeap & --# VCGHeap from *, --# AssignedVarRoot, --# DAGRoot, --# Dictionary.Dict, --# OthersAggregate; is Sym : Dictionary.Symbol; begin if OthersAggregate then -- Create "mk__" cell CreateCellKind (MkAggregateCell, VCGHeap, Cell_Storage.Mk_Aggregate); -- The type for the RHS is the same as the type of the LHS Sym := Dictionary.GetType (Cells.Get_Symbol_Value (VCGHeap, AssignedVarRoot)); Cells.Set_Symbol_Value (VCGHeap, MkAggregateCell, Sym); -- The expression is given by DAGRoot, calculated above. SetRightArgument (MkAggregateCell, DAGRoot, VCGHeap); else -- Defensive, and avoids flow error, but MkAggregateCell should not -- be used if assignment isn't an unconstrained_array_assignment. MkAggregateCell := Cells.Null_Cell; end if; end CreateMkAggregateCell; procedure CreateAssignedVarCell --# global in AssignedVarRoot; --# in out Statistics.TableUsage; --# in out VCGHeap; --# out AssignedVarCell; --# derives AssignedVarCell from VCGHeap & --# Statistics.TableUsage from *, --# VCGHeap & --# VCGHeap from *, --# AssignedVarRoot; is LocalCell : Cells.Cell; begin LocalCell := AssignedVarRoot; loop exit when (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Op) and (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Element_Function) and (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Field_Access_Function); if (Cells.Get_Kind (VCGHeap, LocalCell) = Cell_Storage.Op) then LocalCell := LeftPtr (VCGHeap, LocalCell); elsif (Cells.Get_Kind (VCGHeap, LocalCell) = Cell_Storage.Element_Function) or (Cells.Get_Kind (VCGHeap, LocalCell) = Cell_Storage.Field_Access_Function) then LocalCell := RightPtr (VCGHeap, LocalCell); end if; end loop; Cells.Create_Cell (VCGHeap, AssignedVarCell); Cells.Copy_Contents (VCGHeap, LocalCell, AssignedVarCell); Cells.Set_Kind (VCGHeap, AssignedVarCell, Cell_Storage.Modified_Op); end CreateAssignedVarCell; ------------------------------------------------------------------------------- -- Digs down to primary node where the wf_primary will have planted a symbol -- if the primary references a stream variable. The returned symbol is either -- the stream variable itself if it is a direct assignement or the symbol of a -- function if it is a function that globally references one or more streams function AssignedStreamSymbol (ExpnNode : STree.SyntaxNode) return Dictionary.Symbol --# global in STree.Table; is LocalNode : STree.SyntaxNode; Result : Dictionary.Symbol; begin Result := Dictionary.NullSymbol; --default answer LocalNode := ExpnNode; loop -- to have any chance of success the chain must lead to a primary if STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.primary then Result := STree.NodeSymbol (LocalNode); exit; end if; -- failure cases, if these are found it can't possibly be a simple stream or stream -- function assignment so we need to get out of the loop exit when STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.unary_adding_operator; exit when STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.RWabs; exit when STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.RWnot; LocalNode := STree.Child_Node (Current_Node => LocalNode); end loop; return Result; end AssignedStreamSymbol; -- Complete an assignment model LHS := RHS and chain it into graph procedure SetStreamAssignment (LHS, RHS : in Cells.Cell) --# global in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Graph.Table, --# LHS, --# RHS, --# StmtStack.S, --# VCGHeap & --# Statistics.TableUsage from *, --# LHS, --# RHS, --# VCGHeap; is StmtLabel : Labels.Label; StmtCell, ModList : Cells.Cell; begin PrepareLabel (VCGHeap, StmtLabel, StmtCell); Clists.CreateList (VCGHeap, ModList); Clists.AppendCell (VCGHeap, LHS, ModList); SetRightArgument (LHS, RHS, VCGHeap); SetAuxPtr (StmtCell, ModList, VCGHeap); Chain (StmtLabel, VCGHeap); end SetStreamAssignment; -- Build volatility model for a direct read of a stream variable procedure ModelStreamVariableSideEffect --# global in StreamSymbol; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# StmtStack.S, --# VCGHeap from Graph.Table, --# StmtStack.S, --# StreamSymbol, --# VCGHeap & --# Statistics.TableUsage from *, --# StreamSymbol, --# VCGHeap; is StreamTargetVar, StreamFunction, StreamExpn : Cells.Cell; begin -- ModelStreamVariableSideEffect CreateReferenceCell (StreamExpn, VCGHeap, StreamSymbol); -- now create the proof attribute function. BuildStreamRHS (VCGHeap, StreamSymbol, StreamExpn, --to get StreamFunction); CreateModifiedCell (StreamTargetVar, VCGHeap, StreamSymbol); --set up assignment SetStreamAssignment (StreamTargetVar, StreamFunction); end ModelStreamVariableSideEffect; -- Build a volatility model for an assignment of a function that globally -- references one or more stream variables procedure ModelStreamFunctionSideEffect --# global in Dictionary.Dict; --# in LScope; --# in StreamSymbol; --# in out Graph.Table; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out VCGHeap; --# derives Graph.Table, --# Statistics.TableUsage, --# StmtStack.S, --# VCGHeap from *, --# Dictionary.Dict, --# Graph.Table, --# LScope, --# StmtStack.S, --# StreamSymbol, --# VCGHeap; is ImportIt : Dictionary.Iterator; ImportSym : Dictionary.Symbol; StreamTargetVar, StreamFunction, StreamExpn : Cells.Cell; begin -- ModelStreamFunctionSideEffect ImportIt := Dictionary.FirstGlobalVariable (Dictionary.GetAbstraction (StreamSymbol, LScope), StreamSymbol); while not Dictionary.IsNullIterator (ImportIt) loop ImportSym := Dictionary.CurrentSymbol (ImportIt); if Dictionary.IsOwnVariableOrConstituentWithMode (ImportSym) then -- a side effect model is needed CreateModifiedCell (StreamTargetVar, VCGHeap, ImportSym); CreateReferenceCell (StreamExpn, VCGHeap, ImportSym); BuildStreamRHS (VCGHeap, ImportSym, StreamExpn, -- to get StreamFunction); SetStreamAssignment (StreamTargetVar, StreamFunction); end if; ImportIt := Dictionary.NextSymbol (ImportIt); end loop; end ModelStreamFunctionSideEffect; -- construct model of form StreamVar := StreamVar'Append (StreamVar, Expn); procedure ModelOutputStreamVolatility (AssignedVar : in Dictionary.Symbol; DAGRoot : in out Cells.Cell) --# global in out Statistics.TableUsage; --# in out VCGHeap; --# derives DAGRoot from VCGHeap & --# Statistics.TableUsage from *, --# AssignedVar, --# VCGHeap & --# VCGHeap from *, --# AssignedVar, --# DAGRoot; is TickCell, PrefixCell, IdentCell, CommaCell, LHArgCell, RHArgCell : Cells.Cell; begin -- ModelOutputStreamVolatility CreateOpCell (TickCell, VCGHeap, SP_Symbols.apostrophe); CreateOpCell (CommaCell, VCGHeap, SP_Symbols.comma); CreateFixedVarCell (PrefixCell, VCGHeap, AssignedVar); CreateCellKind (IdentCell, VCGHeap, Cell_Storage.Attrib_Function); Cells.Set_Lex_Str (VCGHeap, IdentCell, LexTokenManager.Append_Token); -- function arguments RHArgCell := DAGRoot; CreateReferenceCell (LHArgCell, VCGHeap, AssignedVar); --assemble into a function attribute SetLeftArgument (TickCell, PrefixCell, VCGHeap); SetRightArgument (TickCell, IdentCell, VCGHeap); SetRightArgument (IdentCell, CommaCell, VCGHeap); SetLeftArgument (CommaCell, LHArgCell, VCGHeap); SetRightArgument (CommaCell, RHArgCell, VCGHeap); -- return build up function as new expression to be assigned DAGRoot := TickCell; end ModelOutputStreamVolatility; --------------------------------------- function AssignedVarIsAnExport (TheSubprogram, TheAssignedVar : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsProcedure (TheSubprogram) and then -- only procedures have exports (Dictionary.IsExport (Dictionary.IsAbstract, TheSubprogram, TheAssignedVar) or else Dictionary.IsExport (Dictionary.IsRefined, TheSubprogram, TheAssignedVar)); end AssignedVarIsAnExport; --------------------------------------- -- Following procedure converts an "assignment" so that it has an -- entire variable on the LHS. Thus R.F := 0 becomes r := upf_f (0); -- The procedure is here because it is used by ModelAssignmentStmt and -- ModelProcedureCall. procedure ConvertToEntireVariable (AssignedVarRoot : in Cells.Cell; DAGRoot : in out Cells.Cell) --# global in out Statistics.TableUsage; --# in out VCGHeap; --# derives DAGRoot, --# Statistics.TableUsage, --# VCGHeap from *, --# AssignedVarRoot, --# DAGRoot, --# VCGHeap; is type ExtractorKind is (ArrayExtractor, RecordExtractor); ExtractorFound : Boolean; ExtractorCell, PrefixRoot, RHSRoot, NewRHSRoot : Cells.Cell; Kind : ExtractorKind; procedure FormNewRHS (PrefixRoot, RHSRoot : in Cells.Cell; NewRHSRoot : out Cells.Cell; Kind : in ExtractorKind) --# global in out Statistics.TableUsage; --# in out VCGHeap; --# derives NewRHSRoot from VCGHeap & --# Statistics.TableUsage from *, --# Kind, --# PrefixRoot, --# VCGHeap & --# VCGHeap from *, --# Kind, --# PrefixRoot, --# RHSRoot; is CommaCell, LocalNewRHSRoot : Cells.Cell; begin if Kind = ArrayExtractor then CreateCellKind (LocalNewRHSRoot, VCGHeap, Cell_Storage.Update_Function); else CreateUpfCell (LocalNewRHSRoot, VCGHeap, Cells.Get_Symbol_Value (VCGHeap, PrefixRoot), Cells.Get_Lex_Str (VCGHeap, PrefixRoot)); end if; CreateOpCell (CommaCell, VCGHeap, SP_Symbols.comma); SetRightArgument (LocalNewRHSRoot, CommaCell, VCGHeap); SetLeftArgument (CommaCell, RightPtr (VCGHeap, PrefixRoot), VCGHeap); SetRightArgument (CommaCell, RHSRoot, VCGHeap); NewRHSRoot := LocalNewRHSRoot; --# accept F, 601, NewRHSRoot, Kind, "False coupling here OK"; end FormNewRHS; procedure SearchForExtractor (Root : in Cells.Cell; ExtractorFound : out Boolean; ExtractorCell : out Cells.Cell; Kind : out ExtractorKind) --# global in VCGHeap; --# derives ExtractorCell, --# ExtractorFound, --# Kind from Root, --# VCGHeap; is LocalCell : Cells.Cell; begin LocalCell := Root; ExtractorFound := False; ExtractorCell := Cells.Null_Cell; -- ensure vals for out pars Kind := ArrayExtractor; -- ensure vals for out pars loop exit when (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Op) and (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Update_Function) and (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Element_Function) and (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Field_Access_Function) and (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Field_Update_Function); if (Cells.Get_Kind (VCGHeap, LocalCell) = Cell_Storage.Element_Function) then ExtractorFound := True; ExtractorCell := LocalCell; Kind := ArrayExtractor; exit; end if; if (Cells.Get_Kind (VCGHeap, LocalCell) = Cell_Storage.Field_Access_Function) then ExtractorFound := True; ExtractorCell := LocalCell; Kind := RecordExtractor; exit; end if; if (Cells.Get_Kind (VCGHeap, LocalCell) = Cell_Storage.Op) then LocalCell := LeftPtr (VCGHeap, LocalCell); elsif (Cells.Get_Kind (VCGHeap, LocalCell) = Cell_Storage.Update_Function) then LocalCell := RightPtr (VCGHeap, LocalCell); elsif (Cells.Get_Kind (VCGHeap, LocalCell) = Cell_Storage.Field_Update_Function) then LocalCell := RightPtr (VCGHeap, LocalCell); end if; end loop; end SearchForExtractor; begin -- ConvertToEntireVariable RHSRoot := DAGRoot; --# accept F, 10, ExtractorCell, "Ineffective assignment here OK"; SearchForExtractor (AssignedVarRoot, ExtractorFound, ExtractorCell, Kind); --# end accept; if ExtractorFound then PrefixRoot := AssignedVarRoot; loop FormNewRHS (PrefixRoot, RHSRoot, NewRHSRoot, Kind); RHSRoot := NewRHSRoot; SearchForExtractor (RHSRoot, ExtractorFound, ExtractorCell, Kind); exit when not ExtractorFound; Structures.CopyStructure (VCGHeap, ExtractorCell, PrefixRoot); end loop; end if; DAGRoot := RHSRoot; end ConvertToEntireVariable; ------------------------------------------------------------------------ begin -- ModelAssignmentStmt PrepareLabel (VCGHeap, StmtLabel, StmtCell); Clists.CreateList (VCGHeap, ModList); -- VariableComponentNode is LHS of assignment. VariableComponentNode := STree.Child_Node (Current_Node => Node); if STree.Syntax_Node_Type (Node => VariableComponentNode) = SP_Symbols.unconstrained_array_assignment then -- For an others aggregate the LHS of the assignment is child of child VariableComponentNode := STree.Child_Node (Current_Node => VariableComponentNode); OthersAggregate := True; else OthersAggregate := False; end if; -- ExpnNode is RHS of assignment ExpnNode := STree.Next_Sibling (Current_Node => VariableComponentNode); BuildExpnDAG (VariableComponentNode, LScope, Scope, LineNmbr, True, False, LoopStack, FlowHeap, VCGHeap, ContainsReals, VCGFailure, ShortCircuitStack, CheckStack, KindOfStackedCheck, -- to get AssignedVarRoot); -- For a normal assignment statement we call BuildExpnDAG for the LHS, then again for -- the RHS, and glue the two together. However, if this is an unconstrained_array_assignment then the -- LHS will be OK but the RHS will only contain the expression after the arrow in the -- others aggregate. In this case we need to synthesize the DAG for the RHS which will -- be of the form "mk__T(E)" where T is the unconstrained array type and E is the -- expression after the arrow in the aggregate (which is given by ExpnNode). BuildExpnDAG (ExpnNode, LScope, Scope, LineNmbr, True, DoAssumeLocalRvalues, LoopStack, FlowHeap, VCGHeap, ContainsReals, VCGFailure, ShortCircuitStack, CheckStack, KindOfStackedCheck, -- to get DAGRoot); -- If this is an unconstrained_array_assignment then create the necessary model structure CreateMkAggregateCell; CreateAssignedVarCell; -- moved from below generation of RTC to make assigned var symbol available -- if the assigned expression represents a stream variable of mode in then -- wf_assignment_statement will have put its subtype into the syntax tree. -- If this subtype is the same as that of the variable assigned to we do not -- want to generate a RTC for the assignment. wf_assignment_statement -- similarly plants the type for the results of an unchecked_conversion. -- -- If the assigned variable is an export of the subprogram then we _do_ generate -- a check even if the subtypes are the same. This is to prevent the result of -- an unchecked conversion escaping to the calling environment without any checks. -- There is a similar situation with the exporting of Ports; however, these generate -- a check in CheckTypeOfExports in IncorporateConstraints. The modifications -- for unchecked conversion will result in an additional VC in the case streams. -- If we do need a check then use original RHS DAG structure before -- ConvertToEntireVariable if STree.NodeSymbol (ExpnNode) /= STree.NodeSymbol (Node) or else AssignedVarIsAnExport (Dictionary.GetRegion (Scope), Cells.Get_Symbol_Value (VCGHeap, AssignedVarCell)) then CheckConstraintRunTimeError (STree.NodeSymbol (Node), DAGRoot, Scope, VCGHeap, ShortCircuitStack, CheckStack, ContainsReals); end if; UnStackRtcs (LineNmbr, VCGHeap, CheckStack, KindOfStackedCheck); Clists.AppendCell (VCGHeap, AssignedVarCell, ModList); ConvertToEntireVariable (AssignedVarRoot, DAGRoot); -- if the assigned var is an output stream then we need to model volatility if Dictionary.IsOwnVariableOrConstituentWithMode (Cells.Get_Symbol_Value (VCGHeap, AssignedVarCell)) then ModelOutputStreamVolatility (Cells.Get_Symbol_Value (VCGHeap, AssignedVarCell), DAGRoot); end if; -- Complete model of assignment. In general the RHS is given by DAGRoot but for -- the special case of an unconstrained_array_assignment it is MkAggregateCell. if not OthersAggregate then SetRightArgument (AssignedVarCell, DAGRoot, VCGHeap); else SetRightArgument (AssignedVarCell, MkAggregateCell, VCGHeap); end if; SetAuxPtr (StmtCell, ModList, VCGHeap); Chain (StmtLabel, VCGHeap); -- see if an assignment of stream is involved and model side effect if it is StreamSymbol := AssignedStreamSymbol (ExpnNode); if not Dictionary.Is_Null_Symbol (StreamSymbol) then -- we must model side effect of stream assignment if Dictionary.IsAdaFunction (StreamSymbol) then ModelStreamFunctionSideEffect; else -- since it is not null and is not a function it must be a variable -- so create side-effect model for a stream variable assignment ModelStreamVariableSideEffect; end if; end if; end ModelAssignmentStmt; spark-2012.0.deb/examiner/file_utils.ads0000644000175000017500000000335111753202336017103 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; use type SPARK_IO.File_Type; --# inherit CommandLineData, --# Date_Time, --# E_Strings, --# ScreenEcho, --# SPARK_IO, --# Version; package File_Utils is type File_Types is (Dec_File, Rule_File, Other_File); procedure Print_A_Header (File : in SPARK_IO.File_Type; Header_Line : in String; File_Type : in File_Types); --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# File, --# File_Type, --# Header_Line; end File_Utils; ././@LongLink0000000000000000000000000000015400000000000011565 Lustar rootrootspark-2012.0.deb/examiner/errorhandler-conversions-tostring-warningwithposition-warningwithpositionexpl.adbspark-2012.0.deb/examiner/errorhandler-conversions-tostring-warningwithposition-warningwithpositione0000644000175000017500000007303611753202337033527 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.WarningWithPosition) procedure WarningWithPositionExpl (E_Str : in out E_Strings.T) is begin case Err_Num.ErrorNum is when 1 => E_Strings.Append_String (E_Str => E_Str, Str => "This warning will appear against an identifier in a with clause if" & " it is not also present in an inherit clause. Such an identifier" & " cannot be used in any non-hidden part of" & " a SPARK program. The use of with without inherit is permitted to" & " allow reference in hidden" & " parts of the text to imported packages which are not legal SPARK." & " For example, the body of" & " SPARK_IO is hidden and implements the exported operations of the" & " package by use of package" & " TEXT_IO. For this reason TEXT_IO must appear in the with clause of" & " SPARK_IO. (warning control file keyword: with_clauses)"); when 2 => E_Strings.Append_String (E_Str => E_Str, Str => "The significance of representation clauses cannot be assessed by the" & " Examiner because" & " it depends on the specific memory architecture of the target system." & " Like pragmas," & " representation clauses can change the meaning of a SPARK program and" & " the warning" & " highlights the need to ensure their correctness by other means." & " (warning control file" & " keyword: representation_clauses)"); when 3 => E_Strings.Append_String (E_Str => E_Str, Str => "All pragmas encountered by the Examiner generate this warning." & " While many pragmas (e.g." & " pragma page) are harmless others can change a program's meaning," & " for example by causing" & " two variables to share a single memory location." & " (warning control file keyword: pragma" & " pragma_identifier or pragma all)"); when 4 => E_Strings.Append_String (E_Str => E_Str, Str => "The declare annotation is ignored by the Examiner if the profile" & " is not Ravenscar. (warning control file keyword: declare_annotations)"); when 5 => E_Strings.Append_String (E_Str => E_Str, Str => "Interrupt identifiers are implementation defined and the Examiner cannot check that values are" & " used only once. Duplication can occur by declaring more than object of a single (sub)type where" & " that type defines handlers. It may also occur if interrupt identifiers are set via discriminants" & " and two or more actual discriminants generate the same value." & " (warning control file keyword: interrupt_handlers)"); when 6 => E_Strings.Append_String (E_Str => E_Str, Str => "Machine code is inherently implementation dependent and cannot be analysed" & " by the Examiner. Users are responsible for ensuring that the behaviour" & " of the inserted machine code matches the annotation of the subprogram containing it."); when 7 => E_Strings.Append_String (E_Str => E_Str, Str => "Such identifiers will be rejected by an Ada2005 compiler and by the SPARK" & " Examiner for SPARK2005. It is recommended to rename" & " such identifiers for future upward compatibility." & " (warning control file keyword: ada2005_reserved_words)"); when 11 => E_Strings.Append_String (E_Str => E_Str, Str => "The others clause is non-executable because all case choices have" & " already been" & " covered explicitly. If the range of the case choice is altered later" & " then the" & " others clause may be executed with unexpected results. It is better" & " to omit the" & " others clause in which case any extension of the case range will result in a" & " compilation error."); when 12 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 13.9. The use of Unchecked_Conversion can result in implementation-defined" & " values being returned. The function should be used with great care. The principal" & " use of Unchecked_Conversion is SPARK programs is the for the reading of external ports" & " prior to performing a validity check; here the suppression of constraint checking prior" & " to validation is useful. The Examiner does not assume that the value returned by" & " an unchecked conversion is valid and so unprovable run-time check VCs will result if" & " a suitable validity check is not carried out before the value is used." & " (warning control file keyword: unchecked_conversion)"); when 13 => E_Strings.Append_String (E_Str => E_Str, Str => "See ALRM 13.9. The use of Unchecked_Conversion can result in invalid" & " values being returned. The function should be used with great care especially, as in" & " this case, where the type returned does not generate Ada run-time checks nor SPARK" & " run-time verification conditions. For such types, this warning is the ONLY reminder" & " the Examiner generates that the generated value may have an invalid representation." & " For this reason the warning is NOT suppressed by the warning control file keyword" & " unchecked_conversion." & " The principal use of Unchecked_Conversion is SPARK programs is the for the reading of external ports" & " prior to performing a validity check; here the suppression of constraint checking prior" & " to validation is useful."); when 120 => E_Strings.Append_String (E_Str => E_Str, Str => "This end accept annotation does not match any preceding start accept in this unit."); when 121 => E_Strings.Append_String (E_Str => E_Str, Str => "The accept annotation is used to indicate that a particular flow error or semantic warning" & " message is expected and can be justified. This error indicates that the expected message" & " did not actually occur. Note that when matching any information flow error messages containing" & " two variable names, the export should be placed first and the import second (the order" & " in the error message may differ from this depending on the style of information flow" & " error reporting selected). For example: --# accept Flow, 601, X, Y, ""...""; justifies" & " the message: ""X may be derived from the imported value(s) of Y"" or the alternative" & " form: ""Y may be used in the derivation of X""."); when 122 => E_Strings.Append_String (E_Str => E_Str, Str => "The number of justifications per source file is limited. If you reach this limit" & " it is worth careful consideration of why the code generates so many warnings."); when 169 => E_Strings.Append_String (E_Str => E_Str, Str => "With the publication of Edition 3.1 of the SPARK Definition the" & " previous restriction" & " prohibiting the direct updating of own variables of non-enclosing" & " packages was removed; however, the preferred use of packages as" & " abstract state machines is compromised by such action which is" & " therefore discouraged. (warning control file keyword: direct_updates)"); when 200 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued if a static expression exceeds the internal limits of the Examiner" & " because its" & " value is, for example, too large to be evaluated using infinite precision" & " arithmetic. No" & " value will be recorded for the expression and this may limit the" & " Examiner's ability to" & " detect certain sorts of errors such as numeric constraints." & " (warning control file keyword: static_expressions)"); when 201 => E_Strings.Append_String (E_Str => E_Str, Str => "Raised, for example, when evaluating 'Size of a type" & " that does not have an explicit Size representation clause." & " Attributes of implementation-defined types, such as" & " Integer'Last may also be" & " unknown to be Examiner if they are not specified in the" & " configuration file" & " (warning control file keyword: static_expressions)"); when 202 => E_Strings.Append_String (E_Str => E_Str, Str => "Raised when comparing two real numbers. The examiner cannot deal" & " with real numbers specified to" & " such a high degree of precision. Consider reducing the precision" & " of these numbers."); when 300 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an aggregate of a multi-dimensional array is found." & " Suppresses generation" & " of VCs for that subprogram. Can be worked round by using" & " arrays of arrays."); when 302 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a potentially re-orderable expression is encountered." & " For example x := a + b + c; Whether" & " intermediate sub-expression values overflow may depend on the" & " order of evaluation which is" & " compiler-dependent." & " Therefore, code generating this warning should be parenthesized to" & " remove the ambiguity." & " e.g. x := (a + b) + c;"); when 303 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where choices in an array aggregate or case statement are" & " outside the range" & " which can be detected because of limits on the size of a table" & " internal to the Examiner."); when 304 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when the Examiner cannot determine the completeness of a" & " case statement because" & " the bounds of the type of the controlling expression exceed the" & " size of the internal table" & " used to perform the checks."); when 305 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when the Examiner cannot determine the completeness of an" & " array aggregate or" & " case statement because the number used in a choice exceed the size" & " allowed in the internal" & " table used to perform the checks."); when 306 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when the Examiner cannot determine the completeness of an" & " array aggregate" & " because its bounds exceed the size of the internal table used to" & " perform the checks."); when 307 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where the array index (sub)type is inappropriate: this is" & " probably because there" & " is an error in its definition, which will have been indicated by" & " a previous error message."); when 308 => E_Strings.Append_String (E_Str => E_Str, Str => "The use of this operator is discouraged in SPARK because of the" & " difficulty in" & " determining exactly what it means to say that two instances of a" & " floating point number are" & " equal"); when 309 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued where a type conversion is either converting from a (sub)type" & " to the same" & " (sub)type or is converting between two subtypes of the same type." & " In the former case the" & " type conversion may be safely removed because no constraint check" & " is required; in the" & " latter case the type conversion may be safely replaced by a type" & " qualification which" & " preserves the constraint check.(warning control file keyword:" & " type_conversions)"); when 310 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a language feature defined by Ada 95 to be obsolescent is" & " used. Use of such" & " features is not recommended because compiler support for them cannot" & " be guaranteed.(warning control file keyword:obsolescent_features)"); when 312 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when a VC or PF references a multi-dimensional array constant." & " Can be worked round by using arrays of arrays."); when 313 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when replacement rules are requested for a composite constant which" & " had semantic errors in its initializing expression, or is" & " a deferred constant whose completion is hidden from the Examiner." & " Semantic errors must be eliminated before replacement rules can be generated."); when 314 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued when an attempt is made to generate type deduction rules for a constant" & " which has semantic errors in its type. These semantic errors" & " must be eliminated before type deduction rules can be generated."); when 315 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued in flow=auto mode when a function calls a procedure that does not have a" & " derives annotation. In most cases this assumption will not affect" & " the validity of the analysis, but if the called procedure derives" & " null from an import this can have an impact. Note that functions" & " are considered to have implicit derives annotations so this warning" & " is not issued for calls to functions."); when 320 => E_Strings.Append_String (E_Str => E_Str, Str => "Any proof function with a non-bool return can introduce unsoundness if" & " the result could overflow. For example a return of (x + 1) is not ok if" & " x can take the value of integer'last." & " (warning control file keyword: proof_function_non_boolean)"); when 321 => E_Strings.Append_String (E_Str => E_Str, Str => "Any proof function with an implicit return can easily introduce" & " unsoundness as they do not have a body which we can check to" & " expose any contradictions. For example: return B => False." & " (warning control file keyword: proof_function_implicit)"); when 322 => E_Strings.Append_String (E_Str => E_Str, Str => "(warning control file keyword: proof_function_refinement)"); when 323 => E_Strings.Append_String (E_Str => E_Str, Str => "(warning control file keyword: proof_function_refinement)"); when 350 => E_Strings.Append_String (E_Str => E_Str, Str => "The presence of a pragma Import makes it possible that the variable" & " is connected" & " to some external device. The behaviour of such variables is best" & " captured by" & " making them moded own variables (or ""stream"" variables). If variables" & " connected" & " to the external environment are treated as if they are normal program" & " variables then" & " misleading analysis results are inevitable. The use of pragma Import on local" & " variables of subprograms is particularly deprecated. The warning" & " may safely be" & " disregarded if the variable is not associated with memory-mapped" & " input/output" & " or if the variable concerned is an own variable and the operations on it are" & " suitably annotated to indicate volatile, stream-like behaviour." & " Where pragma Import is used, it is essential that the variable is properly" & " initialized at the point from which it is imported." & " (warning control file keyword:imported_objects)"); when 351 => E_Strings.Append_String (E_Str => E_Str, Str => "Great care is needed when attaching an address clause to a constant. The use" & " of such a clause is safe if, and only if, the address supplied provides a valid" & " value for the constant which does not vary during the execution life of the program," & " for example, mapping the constant to PROM data." & " If the address clause causes the constant to have a value which may alter, or worse," & " change dynamically under the influence of some device external to the program, then" & " misleading or incorrect analysis is certain to result." & " If the intention is to create an input port of some kind, then a constant should not" & " be used. Instead a moded own variable (or ""stream"" variables) should be used." & " (warning control file keyword: address_clauses)"); when 380 => E_Strings.Append_String (E_Str => E_Str, Str => "The Examiner checks the case used for an identifier against the" & " declaration of that identifier and warns if they do not match." & " (warning control file keyword:style_check_casing)"); when 390 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued to inform the user that a generic subprogram instantiation" & " cannot be completed because of earlier errors in the generic declaration."); when 392 => E_Strings.Append_String (E_Str => E_Str, Str => "Where values are read from external variables (i.e. variables" & " connected to the external" & " environment) there is no guarantee that the bit pattern read will" & " be a valid representation for" & " the type of the external variable. Unexpected behaviour may result" & " if invalid values are used in expressions." & " If the code is compiled with Ada run-time checks enabled the" & " assignment of an invalid value may (but need not) raise a" & " run-time exception dependent on the compiler." & " A compiler may provide facilities to apply extended checking" & " which may also raise a run-time exception if an invalid value is used." & " The SPARK Toolset does not check the validity of the external variable" & " and therefore any possible exception arising from its assignment is" & " outside the scope of proof of absence of RTE." & " To ensure that a run-time exception cannot occur make the type of" & " the external variable such that any possible bit pattern that" & " may be read from the external source is a valid value." & " If the desired type is such a type then the always_valid assertion" & " may be applied to the external variable; otherwise use explicit tests" & " to ensure it has a valid value for the desired type before converting" & " to an object of the desired type." & " In SPARK 95 the 'Valid attribute (see ALRM 13.9.2) may be used to" & " determine the validity of a value if it can be guaranteed that the" & " assignment of an invalid value read from an external variable will" & " not raise a run time exception, either by compiling the code with" & " checks off or by ensuring the compiler does not apply constraint" & " checks when assigning same subtype objects." & " Note that when the Examiner is used to generate run-time checks, it" & " will not be possible to discharge those involving external variables" & " unless one of the above steps is taken." & " More information on interfacing can be found in the INFORMED manual" & " and the SPARK Proof Manual." & " (warning control file" & " keyword: external_assignment)"); when 393 => E_Strings.Append_String (E_Str => E_Str, Str => "Where values are read from external variables (i.e. variables" & " connected to the external" & " environment) there is no guarantee that the bit pattern read will" & " be a valid representation for" & " the type of the external variable. Unexpected behaviour may result" & " if invalid values are used in expressions." & " If the code is compiled with Ada run-time checks enabled the" & " assignment of an invalid value may (but need not) raise a" & " run-time exception dependent on the compiler." & " A compiler may provide facilities to apply extended checking" & " which may also raise a run-time exception if an invalid value is used" & " The SPARK Toolset does not check the validity of the external variable" & " and therefore any possible exception arising from its assignment is" & " outside the scope of proof of absence of RTE." & " Where, as in this case, the type is one for which" & " Ada run-time checks need not be generated and SPARK run-time" & " verification conditions are not generated, extra care is required." & " For such types, this warning is the ONLY reminder" & " the Examiner generates that the external value may have an invalid" & " representation." & " For this reason the warning is NOT suppressed by the warning" & " control file keyword external_assignment." & " To ensure that a run-time exception cannot occur make the type of" & " the external variable such that any possible bit pattern that" & " may be read from the external source is a valid value." & " Explicit tests of the value may then be used to determine the" & " value of an object of the desired type." & " In SPARK 95 the 'Valid attribute (see ALRM 13.9.2) may be used to" & " determine the validity of a value if it can be guaranteed that the" & " assignment of an invalid value read from an external variable will" & " not raise a run time exception, either by compiling the code with" & " checks off or by ensuring the compiler does not apply constraint" & " checks when assigning same subtype objects." & " Boolean external variables require special care since the Examiner" & " does not generate run-time checks" & " for Boolean variables; use of 'Valid is essential when reading" & " Boolean external variables." & " More information on interfacing can be found in the INFORMED manual" & " and the SPARK Proof Manual."); when 394 => E_Strings.Append_String (E_Str => E_Str, Str => "A variable of a private type can only be used (without generating" & " a data flow error) if there is some way of" & " giving it an initial value. For a limited private type only a" & " procedure that has an export of that type" & " and no imports of that type is suitable. For a private type either" & " a procedure, function or (deferred)" & " constant is required. The required facility may be placed in, or" & " already available in, a public" & " child package." & " (warning control file keyword: private_types)"); when 395 => E_Strings.Append_String (E_Str => E_Str, Str => "When own variables are given modes they are considered to be inputs" & " from or outputs" & " to the external environment. The Examiner regards them as being" & " volatile (i.e. their" & " values can change in ways not visible from an inspection of the" & " source code). If" & " a variable is declared in that way but it is actually an ordinary" & " variable which is NOT" & " connected to the environment then misleading analysis is inevitable." & " The Examiner" & " expects to find an address clause or pragma import for variables of this kind to" & " indicate that they" & " are indeed memory-mapped input/output ports. This warning is issued" & " if an address" & " clause or pragma import is not found."); when 396 => E_Strings.Append_String (E_Str => E_Str, Str => "The presence of an address clause makes it possible that the variable" & " is connected" & " to some external device. The behaviour of such variables is best" & " captured by" & " making them moded own variables (or ""stream"" variables). If variables" & " connected" & " to the external environment are treated as if they are normal program" & " variables then" & " misleading analysis results are inevitable. The use of address clauses" & " on local" & " variables of subprograms is particularly deprecated. The warning" & " may safely be" & " disregarded if the variable is not associated with memory-mapped" & " input/output" & " or if the variable concerned is an own variable and the operations on it are" & " suitably annotated to indicate volatile, stream-like behaviour." & " (warning control file keyword: address_clauses)"); when 397 => E_Strings.Append_String (E_Str => E_Str, Str => "A variable of a private type can only be used (without generating a data" & " flow error) if there is some way of" & " giving it an initial value. For a limited private type only a procedure" & " that has an export of that type" & " and no imports of that type is suitable. For a private type either a" & " procedure, function or (deferred)" & " constant is required."); when 398 => E_Strings.Append_String (E_Str => E_Str, Str => "The own variable can only be used (without generating a data flow error)" & " if there is some way of" & " giving it an initial value. If it is" & " initialized during package elaboration (or implicitly by the environment" & " because it represents an" & " input port) it should be placed in an ""initializes"" annotation." & " Otherwise there needs to be some way" & " of assigning an initial value during program execution. Either the own" & " variable needs to be declared" & " in the visible part of the package so that a direct assignment can be" & " made to it or, more usually, the" & " package must declare at least one procedure for which the own variable" & " is an export but not an import." & " Note that if the own variable is an abstract own variable with some" & " constituents initialized" & " during elaboration and some during program execution then it will never" & " be possible correctly to" & " initialize it; such abstract own variables must be divided into separate" & " initialized and uninitialized" & " components."); when 399 => E_Strings.Append_String (E_Str => E_Str, Str => "Issued to inform the user that flow analysis has been suppressed" & " because of the error in the called subprogram's interface."); when others => null; end case; end WarningWithPositionExpl; spark-2012.0.deb/examiner/componenterrors.adb0000644000175000017500000001366711753202335020174 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors, Statistics; package body ComponentErrors is procedure Initialise (TheErrorHeap : out HeapOfErrors) is begin --# accept F, 32, TheErrorHeap.ListOfComponentErrors, "Initialization partial but effective" & --# F, 31, TheErrorHeap.ListOfComponentErrors, "Initialization partial but effective" & --# F, 602, TheErrorHeap, TheErrorHeap.ListOfComponentErrors, "Initialization partial but effective"; TheErrorHeap.HighMark := NullComponentError; TheErrorHeap.NextFreeComponent := NullComponentError; end Initialise; procedure CreateError (TheErrorHeap : in out HeapOfErrors; HeapSeq : in out Heap.HeapRecord; ErrClass : in ErrorClass; ErrVal : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol; NewError : out ComponentError) is NewErrorLocal : ComponentError; NodeList : SeqAlgebra.Seq; begin if TheErrorHeap.NextFreeComponent /= NullComponentError then -- returned locations are re-usable NewErrorLocal := TheErrorHeap.NextFreeComponent; TheErrorHeap.NextFreeComponent := TheErrorHeap.ListOfComponentErrors (NewErrorLocal).NextError; elsif TheErrorHeap.HighMark < MaxNumComponentErrors then -- return list empty but unused cells remain in array TheErrorHeap.HighMark := TheErrorHeap.HighMark + 1; NewErrorLocal := TheErrorHeap.HighMark; else --returned list empty and array used up, nothing left Statistics.SetTableUsage (Statistics.RecordErrors, MaxNumComponentErrors); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Component_Error_Overflow, Msg => ""); NewErrorLocal := NullComponentError; -- strictly unnecessary since prev lines doesn't return end if; -- if we get here we have a new, valid index into the array (which may point at garbage) SeqAlgebra.CreateSeq (HeapSeq, NodeList); TheErrorHeap.ListOfComponentErrors (NewErrorLocal) := ErrorDescriptor' (ErrClass => ErrClass, ErrVal => ErrVal, Position => Position, Sym => Sym, AssociatedComponentNodes => NodeList, NextError => NullComponentError); NewError := NewErrorLocal; end CreateError; procedure DisposeOfError (TheErrorHeap : in out HeapOfErrors; HeapSeq : in out Heap.HeapRecord; OldError : in ComponentError) is begin SeqAlgebra.DisposeOfSeq (HeapSeq, TheErrorHeap.ListOfComponentErrors (OldError).AssociatedComponentNodes); TheErrorHeap.ListOfComponentErrors (OldError).NextError := TheErrorHeap.NextFreeComponent; TheErrorHeap.NextFreeComponent := OldError; end DisposeOfError; function IsSameError (TheErrorHeap : HeapOfErrors; Error1 : ComponentError; Error2 : ComponentError) return Boolean is FirstError, SecondError : ErrorDescriptor; begin FirstError := TheErrorHeap.ListOfComponentErrors (Error1); SecondError := TheErrorHeap.ListOfComponentErrors (Error2); return FirstError.ErrClass = SecondError.ErrClass and then FirstError.ErrVal = SecondError.ErrVal and then FirstError.Position = SecondError.Position and then FirstError.Sym = SecondError.Sym; end IsSameError; function ClassOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return ErrorClass is begin return TheErrorHeap.ListOfComponentErrors (Error).ErrClass; end ClassOfError; function ValueOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return Natural is begin return TheErrorHeap.ListOfComponentErrors (Error).ErrVal; end ValueOfError; function PositionOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return LexTokenManager.Token_Position is begin return TheErrorHeap.ListOfComponentErrors (Error).Position; end PositionOfError; function SymOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return Dictionary.Symbol is begin return TheErrorHeap.ListOfComponentErrors (Error).Sym; end SymOfError; function AssociatedComponentNodesOfError (TheErrorHeap : HeapOfErrors; Error : ComponentError) return SeqAlgebra.Seq is begin return TheErrorHeap.ListOfComponentErrors (Error).AssociatedComponentNodes; end AssociatedComponentNodesOfError; procedure ReportUsage (TheErrorHeap : in HeapOfErrors) is begin Statistics.SetTableUsage (Statistics.RecordErrors, TheErrorHeap.HighMark); end ReportUsage; end ComponentErrors; spark-2012.0.deb/examiner/dictionary-addloop.adb0000644000175000017500000000772711753202336020523 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure AddLoop (Scope : in Scopes; Comp_Unit : in ContextManager.UnitDescriptors; LoopStatement : in Location; TheLoop : out Symbol) is CompilationUnit, Previous : Symbol; begin RawDict.CreateLoop (Region => GetRegion (Scope), Comp_Unit => Comp_Unit, Loc => LoopStatement.Start_Position, TheLoop => TheLoop); CompilationUnit := GetEnclosingCompilationUnit (Scope); case RawDict.GetSymbolDiscriminant (CompilationUnit) is when Subprogram_Symbol => Previous := RawDict.Get_Subprogram_Last_Loop (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit)); if Previous = NullSymbol then RawDict.Set_Subprogram_First_Loop (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit), The_Loop => TheLoop); else RawDict.SetNextLoop (Previous, TheLoop); end if; RawDict.Set_Subprogram_Last_Loop (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => CompilationUnit), The_Loop => TheLoop); when Type_Symbol => -- must be task body, no other type could have a loop in it Previous := RawDict.Get_Task_Type_Last_Loop (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit)); if Previous = NullSymbol then RawDict.Set_Task_Type_First_Loop (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit), The_Loop => TheLoop); else RawDict.SetNextLoop (Previous, TheLoop); end if; RawDict.Set_Task_Type_Last_Loop (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => CompilationUnit), The_Loop => TheLoop); when Package_Symbol => Previous := RawDict.Get_Package_Last_Loop (The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit)); if Previous = NullSymbol then RawDict.Set_Package_First_Loop (The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit), The_Loop => TheLoop); else RawDict.SetNextLoop (Previous, TheLoop); end if; RawDict.Set_Package_Last_Loop (The_Package => RawDict.Get_Package_Info_Ref (Item => CompilationUnit), The_Loop => TheLoop); when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddLoop"); end case; if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "loop statement for "); Write_Name (File => Dict.TemporaryFile, Item => TheLoop); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => LoopStatement); Write_Line (Dict.TemporaryFile, " ;"); end if; end AddLoop; spark-2012.0.deb/examiner/casing.adb0000644000175000017500000001103611753202335016165 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with ErrorHandler; with LexTokenManager; with SP_Symbols; use type LexTokenManager.Str_Comp_Result; package body Casing is -- Return TRUE if the casing is revevant for the token Lex_Str. function Is_Casing_Activated (Lex_Str : in LexTokenManager.Lex_String) return Boolean --# global in CommandLineData.Content; --# in LexTokenManager.State; is begin return (LexTokenManager.Is_Standard_Token (Lex_Str => Lex_Str) and then CommandLineData.Content.Casing_Standard) or else ((not LexTokenManager.Is_Standard_Token (Lex_Str => Lex_Str)) and then CommandLineData.Content.Casing_Identifier); end Is_Casing_Activated; ----------------------------------------------------------------------------------------- procedure Check_String_Casing (Str : in E_Strings.T; Lex_Str : in LexTokenManager.Lex_String; Position : in LexTokenManager.Token_Position) is begin if Is_Casing_Activated (Lex_Str => Lex_Str) and then not E_Strings.Eq_CS_String (E_Str1 => Str, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Str)) then -- The casing is not the same and the casing switch has been -- set -> raise the warning. ErrorHandler.Semantic_Warning (Err_Num => 380, Position => Position, Id_Str => Lex_Str); end if; end Check_String_Casing; ----------------------------------------------------------------------------------------- procedure Check_Casing (Lex_Str1 : in LexTokenManager.Lex_String; Lex_Str2 : in LexTokenManager.Lex_String; Position : in LexTokenManager.Token_Position) is begin if Is_Casing_Activated (Lex_Str => Lex_Str1) and then LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Lex_Str1, Lex_Str2 => Lex_Str2) /= LexTokenManager.Str_Eq then -- The casing is not the same and the casing switch has been -- set -> raise the warning. ErrorHandler.Semantic_Warning (Err_Num => 380, Position => Position, Id_Str => Lex_Str1); end if; end Check_Casing; ----------------------------------------------------------------------------------------- procedure Check_Node_Casing (Top_Node : in STree.SyntaxNode) is It : STree.Iterator; Ident_Node : STree.SyntaxNode; begin -- Iterate on all the identifiers in the tree with the top node -- TOP_NODE. It := STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Top_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Ident_Node := STree.Get_Node (It => It); --# assert STree.Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Ident_Node = STree.Get_Node (It); -- Check if the casing is correct between the reference -- string LEX_STR1 and the specific usage of the string -- LEX_STR2. Check_Casing (Lex_Str1 => STree.Node_Lex_String (Node => Ident_Node), Lex_Str2 => STree.Node_Token_String (Node => Ident_Node), Position => STree.Node_Position (Node => Ident_Node)); It := STree.NextNode (It => It); end loop; end Check_Node_Casing; end Casing; spark-2012.0.deb/examiner/maths.ads0000644000175000017500000004536311753202336016071 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with LexTokenManager; use type LexTokenManager.Str_Comp_Result; --# inherit E_Strings, --# LexTokenManager, --# SPARK_IO; package Maths is type Value is private; NoValue : constant Value; ZeroReal : constant Value; ZeroInteger : constant Value; OneInteger : constant Value; ExactHalf : constant Value; TrueValue : constant Value; FalseValue : constant Value; type ErrorCode is ( NoError, IllegalValue, --ie. not a valid SPARK literal IllegalOperation, --ie. wrong for Value types passed OverFlow, --ie. too many digits for array DivideByZero, TypeMismatch, ConstraintError); --eg. pred(t'base'first) ---------------------------------------------------------------------------- --Conversion of numeric literals procedure LiteralToValue (Str : in LexTokenManager.Lex_String; Num : out Value; OK : out ErrorCode); --# global in LexTokenManager.State; --# derives Num, --# OK from LexTokenManager.State, --# Str; -- post (Ok = NoError) or (Ok = illegalValue) or (Ok = overflow); ---------------------------------------------------------------------------- function IntegerToValue (I : Integer) return Value; --------------------------------------------------------------------------- procedure StorageRep (Num : in Value; Rep : out LexTokenManager.Lex_String); --# global in out LexTokenManager.State; --# derives LexTokenManager.State, --# Rep from LexTokenManager.State, --# Num; ---------------------------------------------------------------------------- function ValueRep (StoreRep : LexTokenManager.Lex_String) return Value; --# global in LexTokenManager.State; --caution, although this function turns a LexString into a value it is --not the same as procedure LiteralToValue. This one converts only --things which were first converted by StorageRep. LiteralToValue can parse --any numeric literal to a value. ---------------------------------------------------------------------------- function HasNoValue (Num : Value) return Boolean; pragma Inline (HasNoValue); ---------------------------------------------------------------------------- function ValueToString (Num : Value) return E_Strings.T; ---------------------------------------------------------------------------- procedure ValueToInteger (Num : in Value; Int : out Integer; Ok : out ErrorCode); --# derives Int, --# Ok from Num; -- post (Ok = NoError) or (Ok = TypeMismatch) or (Ok = overflow); ---------------------------------------------------------------------------- procedure Negate (Num : in out Value); --# derives Num from *; -- pre Num.Sort = IntegerValue or Num.Sort = RealValue; ---------------------------------------------------------------------------- procedure Absolute (Num : in out Value); --# derives Num from *; -- pre Num.Sort = IntegerValue or Num.Sort = RealValue; ---------------------------------------------------------------------------- procedure ConvertToInteger (Num : in out Value); --# derives Num from *; -- pre Num.Sort = IntegerValue or Num.Sort = RealValue; ---------------------------------------------------------------------------- procedure ConvertToReal (Num : in out Value); --# derives Num from *; -- pre Num.Sort = IntegerValue or Num.Sort = RealValue; ---------------------------------------------------------------------------- procedure Floor (Val : in Value; Result : out Value; OK : out ErrorCode); --# derives OK, --# Result from Val; -- pre Val.Sort = IntegerValue or Val.Sort = RealValue; -- post (Ok = NoError) or (Ok = Overflow) ---------------------------------------------------------------------------- procedure Ceiling (Val : in Value; Result : out Value; OK : out ErrorCode); --# derives OK, --# Result from Val; -- pre Val.Sort = IntegerValue or Val.Sort = RealValue; -- post (Ok = NoError) or (Ok = Overflow) ---------------------------------------------------------------------------- procedure Add (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch) or (Ok = Overflow); ---------------------------------------------------------------------------- procedure Subtract (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch) or (Ok = Overflow); ---------------------------------------------------------------------------- procedure Multiply (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch) or (Ok = Overflow); ---------------------------------------------------------------------------- procedure Divide (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch) or (Ok = Overflow) or -- (Ok = DivideByZero); ---------------------------------------------------------------------------- procedure Modulus (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch) or (Ok = IllegalOperation) or -- (Ok = DivideByZero); ---------------------------------------------------------------------------- procedure Remainder (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch) or (Ok = IllegalOperation) or -- (Ok = DivideByZero); ---------------------------------------------------------------------------- procedure Greater (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch); ---------------------------------------------------------------------------- procedure Lesser (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch); ---------------------------------------------------------------------------- procedure LesserOrEqual (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch); ---------------------------------------------------------------------------- procedure GreaterOrEqual (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = TypeMismatch); ---------------------------------------------------------------------------- procedure InsideRange (Val, LowerBound, UpperBound : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from LowerBound, --# UpperBound, --# Val; -- post (Ok = NoError) or (Ok = TypeMismatch); ---------------------------------------------------------------------------- procedure OutsideRange (Val, LowerBound, UpperBound : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from LowerBound, --# UpperBound, --# Val; -- post (Ok = NoError) or (Ok = TypeMismatch); ---------------------------------------------------------------------------- procedure RaiseByPower (FirstNum, SecondNum : in Value; Result : out Value; Ok : out ErrorCode); --# derives Ok, --# Result from FirstNum, --# SecondNum; -- post (Ok = NoError) or (Ok = IllegalOperation) or (Ok = OverFlow); ---------------------------------------------------------------------------- -- Support for non-numeric types ---------------------------------------------------------------------------- function AndOp (LeftVal, RightVal : Value) return Value; ---------------------------------------------------------------------------- function OrOp (LeftVal, RightVal : Value) return Value; ---------------------------------------------------------------------------- function XorOp (LeftVal, RightVal : Value) return Value; ---------------------------------------------------------------------------- procedure NotOp (TheVal : in out Value); --# derives TheVal from *; -- pre TheVal.Sort = TruthValue ---------------------------------------------------------------------------- procedure ModularNotOp (TheVal : in out Value; TheModulus : in Value); --# derives TheVal from *, --# TheModulus; -- pre TheVal.Sort = IntegerValue and IsAPositivePowerOf2 (TheModulus); ---------------------------------------------------------------------------- procedure ValueToBool (TheVal : in Value; Result : out Boolean; Ok : out ErrorCode); --# derives Ok, --# Result from TheVal; -- post (Ok = NoError) or (Ok = TypeMismatch) ---------------------------------------------------------------------------- function BoolToValue (B : Boolean) return Value; ---------------------------------------------------------------------------- procedure PredOp (TheVal : in out Value; Ok : out ErrorCode); --# derives Ok, --# TheVal from TheVal; -- post (Ok = NoError) or (Ok = TypeMismatch) ---------------------------------------------------------------------------- procedure SuccOp (TheVal : in out Value; Ok : out ErrorCode); --# derives Ok, --# TheVal from TheVal; -- post (Ok = NoError) or (Ok = TypeMismatch) ---------------------------------------------------------------------------- function MakeEnum (Pos : Natural) return Value; ---------------------------------------------------------------------------- function IsIntegerValue (Val : Value) return Boolean; function IsRealValue (Val : Value) return Boolean; ---------------------------------------------------------------------------- --converts real value to integer value rounding away from 0 as required by --Ada 95 LRM 4.9(33) and LRM 4.9(40). function Ada95RealToInteger (TheReal : Value) return Value; ---------------------------------------------------------------------------- -- returns True for 1, 2, 4, 8, 16 ... useful for wellformedness -- of modular type declarations function IsAPositivePowerOf2 (Num : in Value) return Boolean; private --------------- IMPORTANT ----------------------------------------------- -- MaxLength defines the size of the numbers that can be supported -- to full precision. -- -- If we want to support a floating point defined by -- 1 sign bit -- e exponent bits -- s significand bits -- The largest number that can be represented is 2 ** (2**(e-1)) -- The smallest number that can be represented is 2 ** -((2**(e-1))+s-1) -- -- In order to represent any number in this range to the precision -- implied by the smallest number then in the numerator/denominator format -- the numerator must be able to represent 2 ** ((2**e) + s -1) -- So we require MaxLength > ((2**e) + s-1) log 2 -- -- e s MaxLength > -- IEEE Single Precision Float 8 23 84 -- IEEE Double Precision Float 11 52 632 ------------------------------------------------------------------------- MaxLength : constant Integer := 640; subtype LengthRange is Integer range 0 .. MaxLength; subtype PosRange is Integer range 1 .. MaxLength; type Digit is range 0 .. 15; for Digit'Size use 4; type ValueType is (RealValue, IntegerValue, TruthValue, UnknownValue); type ValueArray is array (PosRange) of Digit; pragma Pack (ValueArray); --NB. Values are stored with LSD in Numerals(1) and MSD in -- Numerals(Length) type Part is record Numerals : ValueArray; Length : LengthRange; Overflowed : Boolean; end record; type Value is record Numerator : Part; Denominator : Part; IsPositive : Boolean; Sort : ValueType; end record; ------------------------IMPORTANT-------------------------------------- -- Modular Type support -- -- The largest modular type supported is 2**BinaryMaxLength. -- -- The value of BinaryMaxLength has an upper bound of -- |_ MaxLength / Log 2 _| -- which is the largest power of 2 that can be evaluated in a ValueArray. -- -- These values are stored with LSB in element 0, -- and MSB in element BinaryMaxLength ----------------------------------------------------------------------- BinaryMaxLength : constant Integer := 211; subtype BinaryLengthRange is Integer range 0 .. BinaryMaxLength; type Bits is array (BinaryLengthRange) of Boolean; ZeroBits : constant Bits := Bits'(others => False); ZeroPart : constant Part := Part'(Length => 1, Numerals => ValueArray'(PosRange => 0), Overflowed => False); OnePart : constant Part := Part'(Length => 1, Numerals => ValueArray'(1 => 1, others => 0), Overflowed => False); TwoPart : constant Part := Part'(Length => 1, Numerals => ValueArray'(1 => 2, others => 0), Overflowed => False); ZeroReal : constant Value := Value'(Numerator => ZeroPart, Denominator => OnePart, IsPositive => True, Sort => RealValue); ExactHalf : constant Value := Value'(Numerator => OnePart, Denominator => TwoPart, IsPositive => True, Sort => RealValue); ZeroInteger : constant Value := Value'(Numerator => ZeroPart, Denominator => OnePart, IsPositive => True, Sort => IntegerValue); OneInteger : constant Value := Value'(Numerator => OnePart, Denominator => OnePart, IsPositive => True, Sort => IntegerValue); NoValue : constant Value := Value'(Numerator => ZeroPart, Denominator => OnePart, IsPositive => True, Sort => UnknownValue); FalseValue : constant Value := Value'(Numerator => ZeroPart, Denominator => ZeroPart, IsPositive => False, Sort => TruthValue); TrueValue : constant Value := Value'(Numerator => ZeroPart, Denominator => ZeroPart, IsPositive => True, Sort => TruthValue); end Maths; spark-2012.0.deb/examiner/sem-find_previous_package.adb0000644000175000017500000001654611753202336022046 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------- -- Find_Previous_Package -- -- Given a initial_declarative_item_rep node, we sometimes need to find -- if there's a embedded package declaration in the tree rooted at that -- node. This function returns the LexString of that package if it -- can be found, or NullString if it can't. -- -- This function is used by wf_renaming_declararation and -- wf_use_type_declaration, both of which need to locate such -- packages, so this function appears here so it can be called -- by both subunits. ---------------------------------------------------------------------- separate (Sem) function Find_Previous_Package (Node : STree.SyntaxNode) return LexTokenManager.Lex_String is Last_Node, Next_Node : STree.SyntaxNode; Pack_Ident : LexTokenManager.Lex_String; begin -- Phase 1 - search down the tree rooted at Node for a -- basic_declarative_item node or a package_declaration node Last_Node := Child_Node (Current_Node => Node); loop -- ASSUME Last_Node = initial_declarative_item_rep OR basic_declarative_item OR -- package_declaration OR generic_package_instantiation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Last_Node) = SP_Symbols.initial_declarative_item_rep or else Syntax_Node_Type (Node => Last_Node) = SP_Symbols.basic_declarative_item or else Syntax_Node_Type (Node => Last_Node) = SP_Symbols.package_declaration or else Syntax_Node_Type (Node => Last_Node) = SP_Symbols.generic_package_instantiation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = initial_declarative_item_rep OR basic_declarative_item OR " & "package_declaration OR generic_package_instantiation in Find_Previous_Package"); exit when Syntax_Node_Type (Node => Last_Node) /= SP_Symbols.initial_declarative_item_rep; --# assert Syntax_Node_Type (Last_Node, STree.Table) = SP_Symbols.initial_declarative_item_rep; Next_Node := Next_Sibling (Current_Node => Last_Node); -- ASSUME Next_Node = basic_declarative_item OR package_declaration OR renaming_declaration OR -- use_type_clause OR proof_renaming_declaration OR apragma if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.basic_declarative_item or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.package_declaration then -- ASSUME Next_Node = basic_declarative_item OR package_declaration -- If there's a basic_declarative_item or a package_declaration to the right -- of Last_Node, then set Last_Node to that node and exit. Last_Node := Next_Node; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.renaming_declaration or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.use_type_clause or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.proof_renaming_declaration or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.apragma then -- ASSUME Next_Node = renaming_declaration OR use_type_clause OR -- proof_renaming_declaration OR apragma -- No? Then go down the tree and try again Last_Node := Child_Node (Current_Node => Last_Node); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = basic_declarative_item OR package_declaration OR renaming_declaration OR " & "use_type_clause OR proof_renaming_declaration OR apragma in Find_Previous_Package"); end if; end loop; --# assert Syntax_Node_Type (Last_Node, STree.Table) = SP_Symbols.basic_declarative_item or --# Syntax_Node_Type (Last_Node, STree.Table) = SP_Symbols.package_declaration or --# Syntax_Node_Type (Last_Node, STree.Table) = SP_Symbols.generic_package_instantiation; -- Phase 2 -- Last_Node should be a basic_declarative_item or a package_declaration case Syntax_Node_Type (Node => Last_Node) is when SP_Symbols.basic_declarative_item => -- ASSUME Last_Node = basic_declarative_item -- No previous package, so return NullString Pack_Ident := LexTokenManager.Null_String; when SP_Symbols.package_declaration => -- ASSUME Last_Node = package_declaration -- If this package has an inherit clause, then skip over it Last_Node := Child_Node (Current_Node => Last_Node); -- ASSUME Last_Node = inherit_clause OR package_specification if Syntax_Node_Type (Node => Last_Node) = SP_Symbols.inherit_clause then -- ASSUME Last_Node = inherit_clause Last_Node := Next_Sibling (Current_Node => Last_Node); elsif Syntax_Node_Type (Node => Last_Node) /= SP_Symbols.package_specification then Last_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = inherit_clause OR package_specification in Find_Previous_Package"); end if; -- ASSUME Last_Node = package_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Last_Node) = SP_Symbols.package_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = package_specification in Find_Previous_Package"); -- Find and return the package's identifier node Last_Node := Last_Child_Of (Start_Node => Last_Node); -- ASSUME Last_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Last_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = identifier in Find_Previous_Package"); Pack_Ident := Node_Lex_String (Node => Last_Node); when SP_Symbols.generic_package_instantiation => -- ASSUME Last_Node = generic_package_instantiation Pack_Ident := LexTokenManager.Null_String; when others => Pack_Ident := LexTokenManager.Null_String; -- to avoid flow error SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = basic_declarative_item OR package_declaration OR " & "generic_package_instantiation in Find_Previous_Package"); end case; return Pack_Ident; end Find_Previous_Package; spark-2012.0.deb/examiner/flowanalyser-flowanalyse.adb0000644000175000017500000041520711753202336021762 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Debug, RelationAlgebra.Debug; separate (FlowAnalyser) procedure FlowAnalyse (SubprogSym : in Dictionary.Symbol; StartNode : in STree.SyntaxNode; EndPosition : in LexTokenManager.Token_Position; ComponentData : in out ComponentManager.ComponentData; TheHeap : in out Heap.HeapRecord; Table : in RefList.HashTable; DataFlowErrorFound : out Boolean) is subtype ExpnCountRange is Natural range 0 .. ExaminerConstants.FlowAnalysisMaxExpnNumber; LastNode, Node : STree.SyntaxNode; NodeType : SP_Symbols.SP_Symbol; DependencyRelation : RelationAlgebra.Relation; ExpnNumber : ExpnCountRange; -- Natural; S : IFA_Stack.Stack; InnerExpns : SeqAlgebra.Seq; ReferencedVars : SeqAlgebra.Seq; SeqOfExports : SeqAlgebra.Seq; ExpSeqOfExports : SeqAlgebra.Seq; SeqOfInitVars : SeqAlgebra.Seq; LocalInits : SeqAlgebra.Seq; VarsUsedAsConstants : SeqAlgebra.Seq; SeqOfImports : SeqAlgebra.Seq; ExpSeqOfImports : SeqAlgebra.Seq; DataFlowErrorFoundLocal : Boolean; -- ShareableProtectedVars is set of protected own vars that are not streams; these need special handling -- in the flow analyser because they are potentially shareable and may cause communication between program threads. ShareableProtectedVars : SeqAlgebra.Seq; -- Each member of the previous set has an implicitly-declared associated in stream InStreamsOfShareableProtectedVars : SeqAlgebra.Seq; -- we need to identify protected variables that are referenced somwehere ReferencedProtectedVars : SeqAlgebra.Seq; subtype ExpnIndexRange is Natural range 1 .. ExaminerConstants.FlowAnalysisMaxExpnNumber; type LocnDictionaryType is array (ExpnIndexRange) of STree.SyntaxNode; StmtLocations, ExpnLocations : LocnDictionaryType; type ExpnKind is ( SimpleAssignment, -- was SingleAssignment ComplexAssignment, -- was MultipleAssignment FieldUpdateByProc, -- case used for Inc (R.F) and similar Initialization, ReturnExpn, ForkExpn, ExitExpn, DefaultExitExpn, ControlVarAssignment, ModellingStmt); -- statement for modelling protected state interactions, not traceable to SPARK source type KindDictionaryType is array (ExpnIndexRange) of ExpnKind; KindDictionary : KindDictionaryType; -- ParamDictionary maps expression number to the symbol that is directly -- affected by that expression, such as a local variable or an export. -- Expresions which do not directly affect a variable or export (e.g. the -- controlling expression of an if statement) map to Dictionary.NullSymbol. type ParamDictionaryType is array (ExpnIndexRange) of Dictionary.Symbol; ParamDictionary : ParamDictionaryType; FnResultRepn : constant Natural := 0; ZeroStableExpnSeq, OneStableExpnSeq, OtherStableExpnSeq : SeqAlgebra.Seq; NullAtom : constant Heap.Atom := Heap.Atom (0); Scope : Dictionary.Scopes; ------------------------------------------------------------- procedure PrintStackTop (Msg : in String) --# global in SubprogSym; --# derives null from Msg, --# SubprogSym; is --# hide PrintStackTop begin Debug.PrintMsg ("-------------------------------------------------------------------------------", True); Debug.PrintMsg (Msg, True); Debug.Print_Sym (Msg => "For Subprogram ", Sym => SubprogSym); Debug.Print_Sym_Seq (Msg => "DefinedVars ", Seq => IFA_Stack.Top (S).DefinedVars, The_Heap => TheHeap); Debug.PrintMsg ("----------------------------", True); Debug.Print_Sym_Seq (Msg => "UnPreservedVars ", Seq => IFA_Stack.Top (S).UnPreservedVars, The_Heap => TheHeap); Debug.PrintMsg ("----------------------------", True); Debug.Print_Sym_Seq (Msg => "AllVars ", Seq => IFA_Stack.Top (S).AllVars, The_Heap => TheHeap); Debug.PrintMsg ("----------------------------", True); RelationAlgebra.Debug.Print_Lambda ("Lambda ", IFA_Stack.Top (S).Lambda, TheHeap); Debug.PrintMsg ("----------------------------", True); RelationAlgebra.Debug.Print_Mu ("Mu ", IFA_Stack.Top (S).Mu, TheHeap); Debug.PrintMsg ("----------------------------", True); -- Theta and Theta~ are both relations from V to E - the same as Lambda, -- so we can re-use Print_Lambda here. RelationAlgebra.Debug.Print_Lambda ("Theta ", IFA_Stack.Top (S).Theta, TheHeap); Debug.PrintMsg ("----------------------------", True); RelationAlgebra.Debug.Print_Lambda ("Theta~ ", IFA_Stack.Top (S).ThetaTilde, TheHeap); Debug.PrintMsg ("----------------------------", True); RelationAlgebra.Debug.Print_Rho ("Rho ", IFA_Stack.Top (S).Rho, TheHeap); Debug.PrintMsg ("-------------------------------------------------------------------------------", True); end PrintStackTop; ------------------------------------------------------------- procedure IncrementExpression (X : in out ExpnCountRange) --# derives X from *; --# pre X <= ExaminerConstants.FlowAnalysisMaxExpnNumber; --# post X = X~ + 1; is begin if X = ExaminerConstants.FlowAnalysisMaxExpnNumber then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Flow_Analyser_Expression_Limit, Msg => ""); else X := X + 1; end if; end IncrementExpression; ------------------------------------------------------------- procedure AddSymbol (TheHeap : in out Heap.HeapRecord; Sequ : in SeqAlgebra.Seq; Sym : in Dictionary.Symbol) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# Sequ, --# Sym, --# TheHeap; is begin SeqAlgebra.AddMember (TheHeap, Sequ, Natural (Dictionary.SymbolRef (Sym))); end AddSymbol; ------------------------------------------------------------- procedure InsertSymbolPair (TheHeap : in out Heap.HeapRecord; Rel : in RelationAlgebra.Relation; Sym1, Sym2 : in Dictionary.Symbol) --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# TheHeap from *, --# Rel, --# Sym1, --# Sym2, --# TheHeap; is begin RelationAlgebra.InsertPair (TheHeap, Rel, Natural (Dictionary.SymbolRef (Sym1)), Natural (Dictionary.SymbolRef (Sym2))); end InsertSymbolPair; ------------------------------------------------------------- function SymValue (Cell : Heap.Atom) return Dictionary.Symbol --# global in TheHeap; is begin return Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (Heap.AValue (TheHeap, Cell))); end SymValue; ------------------------------------------------------------- function RepToSym (R : in Natural) return Dictionary.Symbol is begin return Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (R)); end RepToSym; ------------------------------------------------------------- -- with the introduction of Ravenscar we need to make sure that Task bodies are analysed like -- procedure bodies. Since task bodies are not procedure as far as the Dictionary is concerned -- we add the following functions to relace direct dictionary calls. function IsSubprogram (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.Is_Subprogram (Sym) or else (Dictionary.IsType (Sym) and then Dictionary.TypeIsTask (Sym)); end IsSubprogram; function IsProcedure (Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsProcedure (Sym) or else (Dictionary.IsType (Sym) and then Dictionary.TypeIsTask (Sym)); end IsProcedure; --------------------------------------------------------------- procedure FormSeqOfInitVars --# global in ComponentData; --# in Dictionary.Dict; --# in SubprogSym; --# in out Statistics.TableUsage; --# in out TheHeap; --# out SeqOfInitVars; --# derives SeqOfInitVars from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# SubprogSym, --# TheHeap; is PackageIterator, OwnVarIterator : Dictionary.Iterator; begin SeqAlgebra.CreateSeq (TheHeap, SeqOfInitVars); if IsSubprogram (SubprogSym) then -- we are doing a subprogram which may have embedded packages PackageIterator := Dictionary.First_Embedded_Package (Compilation_Unit => SubprogSym); while not Dictionary.IsNullIterator (PackageIterator) loop OwnVarIterator := Dictionary.FirstInitializedOwnVariable (Dictionary.CurrentSymbol (PackageIterator)); while not Dictionary.IsNullIterator (OwnVarIterator) loop AddSymbol (TheHeap, SeqOfInitVars, Dictionary.CurrentSymbol (OwnVarIterator)); OwnVarIterator := Dictionary.NextSymbol (OwnVarIterator); end loop; PackageIterator := Dictionary.NextSymbol (PackageIterator); end loop; -- no else part, package initializations can't have embedded packs end if; RefList.ExpandSeq (ComponentData, SeqOfInitVars, TheHeap); end FormSeqOfInitVars; ------------------------------------------------------------- procedure FormLocalInits --# global in ComponentData; --# in Dictionary.Dict; --# in SubprogSym; --# in out Statistics.TableUsage; --# in out TheHeap; --# out LocalInits; --# derives LocalInits from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# SubprogSym, --# TheHeap; is procedure FormSubprogramLocalInits --# global in Dictionary.Dict; --# in LocalInits; --# in SubprogSym; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# LocalInits, --# SubprogSym, --# TheHeap; is It : Dictionary.Iterator; begin It := Dictionary.First_Initialized_Variable (Subprogram => SubprogSym); while not Dictionary.IsNullIterator (It) loop AddSymbol (TheHeap, LocalInits, Dictionary.CurrentSymbol (It)); It := Dictionary.NextSymbol (It); end loop; end FormSubprogramLocalInits; procedure FormPackageLocalInits (Package_Symbol : Dictionary.Symbol) --# global in Dictionary.Dict; --# in LocalInits; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# LocalInits, --# Package_Symbol, --# TheHeap; is ConstituentList, InitOwnVarList : Dictionary.Iterator; InitOwnVarSym, ConstituentSym : Dictionary.Symbol; begin InitOwnVarList := Dictionary.FirstInitializedOwnVariable (Package_Symbol); while not Dictionary.IsNullIterator (InitOwnVarList) loop InitOwnVarSym := Dictionary.CurrentSymbol (InitOwnVarList); if Dictionary.IsConcreteOwnVariable (InitOwnVarSym) then if Dictionary.VariableIsInitialized (InitOwnVarSym) then -- Local, concrete, and initialized at declaration AddSymbol (TheHeap, LocalInits, InitOwnVarSym); end if; else ConstituentList := Dictionary.FirstConstituent (InitOwnVarSym); while not Dictionary.IsNullIterator (ConstituentList) loop ConstituentSym := Dictionary.CurrentSymbol (ConstituentList); if (not Dictionary.IsOwnVariable (ConstituentSym)) and then Dictionary.VariableIsInitialized (ConstituentSym) then -- Local refinement constituent initialized at declaration AddSymbol (TheHeap, LocalInits, ConstituentSym); elsif Dictionary.GetConstituentMode (ConstituentSym) /= Dictionary.DefaultMode and then Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetOwner (ConstituentSym), Right_Symbol => Package_Symbol) then -- Local, external refinement constituent is initialized implicitly AddSymbol (TheHeap, LocalInits, ConstituentSym); end if; -- That's all. Note that refinement constituents which are own variables of -- _other_ packages (either nested or private child) are never -- considered to be locally initialized here - they are none of our business -- in the context of the analysis of _this_ package body initialization! ConstituentList := Dictionary.NextSymbol (ConstituentList); end loop; end if; InitOwnVarList := Dictionary.NextSymbol (InitOwnVarList); end loop; end FormPackageLocalInits; begin -- FormLocalInits SeqAlgebra.CreateSeq (TheHeap, LocalInits); if IsSubprogram (SubprogSym) then FormSubprogramLocalInits; else FormPackageLocalInits (Package_Symbol => SubprogSym); end if; RefList.ExpandSeq (ComponentData, LocalInits, TheHeap); end FormLocalInits; ------------------------------------------------------------- procedure ModelLocalInits (Node : in STree.SyntaxNode) --# global in LocalInits; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnNumber, --# KindDictionary, --# ParamDictionary, --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# LocalInits, --# TheHeap & --# S from *, --# LocalInits, --# TheHeap & --# StmtLocations from *, --# ExpnNumber, --# LocalInits, --# Node, --# TheHeap; is MemberOfInitVars : SeqAlgebra.MemberOfSeq; InitVarRep : Natural; InitVar : Dictionary.Symbol; R : IFA_Stack.StackMember; UndefinedVars : SeqAlgebra.Seq; begin MemberOfInitVars := SeqAlgebra.FirstMember (TheHeap, LocalInits); if not SeqAlgebra.IsNullMember (MemberOfInitVars) then IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R); while not SeqAlgebra.IsNullMember (MemberOfInitVars) loop InitVarRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfInitVars); InitVar := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (InitVarRep)); IncrementExpression (ExpnNumber); StmtLocations (ExpnNumber) := Node; ParamDictionary (ExpnNumber) := InitVar; SeqAlgebra.AddMember (TheHeap, R.DefinedVars, InitVarRep); SeqAlgebra.AddMember (TheHeap, R.UnPreservedVars, InitVarRep); SeqAlgebra.AddMember (TheHeap, R.AllVars, InitVarRep); SeqAlgebra.AddMember (TheHeap, R.SeqOfExpns, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.Mu, ExpnNumber, InitVarRep); SeqAlgebra.Complement (TheHeap, R.AllVars, R.DefinedVars, UndefinedVars); RelationAlgebra.AddIdentity (TheHeap, R.Rho, UndefinedVars); SeqAlgebra.DisposeOfSeq (TheHeap, UndefinedVars); KindDictionary (ExpnNumber) := Initialization; MemberOfInitVars := SeqAlgebra.NextMember (TheHeap, MemberOfInitVars); end loop; IFA_Stack.Push (S, R); end if; end ModelLocalInits; ------------------------------------------------------------- procedure FormDependencyRelation --# global in CommandLineData.Content; --# in ComponentData; --# in Dictionary.Dict; --# in SubprogSym; --# in out Statistics.TableUsage; --# in out TheHeap; --# out DependencyRelation; --# out ExpSeqOfExports; --# out ExpSeqOfImports; --# out InStreamsOfShareableProtectedVars; --# out ReferencedProtectedVars; --# out SeqOfExports; --# out SeqOfImports; --# out ShareableProtectedVars; --# derives DependencyRelation, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InStreamsOfShareableProtectedVars, --# ReferencedProtectedVars, --# SeqOfExports, --# SeqOfImports, --# ShareableProtectedVars from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# SubprogSym, --# TheHeap & --# null from CommandLineData.Content; is Abstraction : Dictionary.Abstractions; It, ExportIt, ImportIt : Dictionary.Iterator; ExportVar, RefinedExportVar, ImportVar : Dictionary.Symbol; ------------------------------- procedure AddFunctionImport (Sym : in Dictionary.Symbol) --# global in DependencyRelation; --# in ExpSeqOfImports; --# in SeqOfImports; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# DependencyRelation, --# ExpSeqOfImports, --# SeqOfImports, --# Sym, --# TheHeap; is ImpVarRep : Natural; begin ImpVarRep := Natural (Dictionary.SymbolRef (Sym)); SeqAlgebra.AddMember (TheHeap, SeqOfImports, ImpVarRep); SeqAlgebra.AddMember (TheHeap, ExpSeqOfImports, ImpVarRep); RelationAlgebra.InsertPair (TheHeap, DependencyRelation, ImpVarRep, FnResultRepn); end AddFunctionImport; ------------------------------- -- This procedure adds implicit stream effects to the imported and exported -- variable sets. An imported in stream needs also to be made an (implicit) -- export and vice versa. Implemented by adding all streams to both sets; this -- is always safe because of earlier wellformation checks. -- It also adds the notional in stream associated with each unmoded protected variable -- to the list of exports and populates the sets ShareableProtectedVars and InStreamsOfShareableProtectedVars procedure AddStreamInteractions (Abstraction : in Dictionary.Abstractions) --# global in Dictionary.Dict; --# in ExpSeqOfExports; --# in ExpSeqOfImports; --# in InStreamsOfShareableProtectedVars; --# in SeqOfExports; --# in SeqOfImports; --# in ShareableProtectedVars; --# in SubprogSym; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# Abstraction, --# Dictionary.Dict, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InStreamsOfShareableProtectedVars, --# SeqOfExports, --# SeqOfImports, --# ShareableProtectedVars, --# SubprogSym, --# TheHeap; is GlobalIt : Dictionary.Iterator; GlobalSym : Dictionary.Symbol; begin GlobalIt := Dictionary.FirstGlobalVariable (Abstraction, SubprogSym); while not Dictionary.IsNullIterator (GlobalIt) loop GlobalSym := Dictionary.CurrentSymbol (GlobalIt); if Dictionary.GetOwnVariableOrConstituentMode (GlobalSym) /= Dictionary.DefaultMode then AddSymbol (TheHeap, SeqOfExports, GlobalSym); AddSymbol (TheHeap, SeqOfImports, GlobalSym); AddSymbol (TheHeap, ExpSeqOfExports, GlobalSym); AddSymbol (TheHeap, ExpSeqOfImports, GlobalSym); elsif Dictionary.IsUnmodedProtectedOwnVariable (GlobalSym) then AddSymbol (TheHeap, SeqOfExports, Dictionary.GetProtectedImplicitInStream (GlobalSym)); AddSymbol (TheHeap, ExpSeqOfExports, Dictionary.GetProtectedImplicitInStream (GlobalSym)); AddSymbol (TheHeap, ShareableProtectedVars, GlobalSym); AddSymbol (TheHeap, InStreamsOfShareableProtectedVars, Dictionary.GetProtectedImplicitInStream (GlobalSym)); end if; GlobalIt := Dictionary.NextSymbol (GlobalIt); end loop; end AddStreamInteractions; ------------------------------- -- Procedure to mark imports of the form "derives null from x,y,z; as imports. -- Any that are added by this procedure that are already imports will be ignored -- because SeqAlgebra sequences don't accept duplicate entries. procedure AddForcedImports (Abstraction : in Dictionary.Abstractions) --# global in Dictionary.Dict; --# in ExpSeqOfImports; --# in SeqOfImports; --# in SubprogSym; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# Abstraction, --# Dictionary.Dict, --# ExpSeqOfImports, --# SeqOfImports, --# SubprogSym, --# TheHeap; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; begin It := Dictionary.FirstImport (Abstraction, SubprogSym); while not Dictionary.IsNullIterator (It) loop Sym := Dictionary.CurrentSymbol (It); AddSymbol (TheHeap, SeqOfImports, Sym); AddSymbol (TheHeap, ExpSeqOfImports, Sym); It := Dictionary.NextSymbol (It); end loop; end AddForcedImports; ------------------------------- begin -- FormDependencyRelation SeqAlgebra.CreateSeq (TheHeap, SeqOfExports); SeqAlgebra.CreateSeq (TheHeap, ExpSeqOfExports); SeqAlgebra.CreateSeq (TheHeap, SeqOfImports); SeqAlgebra.CreateSeq (TheHeap, ExpSeqOfImports); SeqAlgebra.CreateSeq (TheHeap, ShareableProtectedVars); SeqAlgebra.CreateSeq (TheHeap, ReferencedProtectedVars); SeqAlgebra.CreateSeq (TheHeap, InStreamsOfShareableProtectedVars); RelationAlgebra.CreateRelation (TheHeap, DependencyRelation); if IsSubprogram (SubprogSym) then Abstraction := Dictionary.GetAbstraction (SubprogSym, Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => SubprogSym)); if IsProcedure (SubprogSym) then ExportIt := Dictionary.FirstExport (Abstraction, SubprogSym); while not Dictionary.IsNullIterator (ExportIt) loop --# assert Abstraction in Dictionary.Abstractions; ExportVar := Dictionary.CurrentSymbol (ExportIt); AddSymbol (TheHeap, SeqOfExports, ExportVar); AddSymbol (TheHeap, ExpSeqOfExports, ExportVar); ImportIt := Dictionary.FirstDependency (Abstraction, SubprogSym, ExportVar); while not Dictionary.IsNullIterator (ImportIt) loop --# assert Abstraction in Dictionary.Abstractions; ImportVar := Dictionary.CurrentSymbol (ImportIt); AddSymbol (TheHeap, SeqOfImports, ImportVar); AddSymbol (TheHeap, ExpSeqOfImports, ImportVar); InsertSymbolPair (TheHeap, DependencyRelation, ImportVar, ExportVar); ImportIt := Dictionary.NextSymbol (ImportIt); end loop; ExportIt := Dictionary.NextSymbol (ExportIt); end loop; AddStreamInteractions (Abstraction); AddForcedImports (Abstraction); -- add the data sink NullVariable as an import and an export AddSymbol (TheHeap, SeqOfImports, Dictionary.GetNullVariable); AddSymbol (TheHeap, ExpSeqOfImports, Dictionary.GetNullVariable); AddSymbol (TheHeap, SeqOfExports, Dictionary.GetNullVariable); AddSymbol (TheHeap, ExpSeqOfExports, Dictionary.GetNullVariable); else -- form dependency relation for a function subprogram; SeqAlgebra.AddMember (TheHeap, SeqOfExports, FnResultRepn); SeqAlgebra.AddMember (TheHeap, ExpSeqOfExports, FnResultRepn); -- add the data sink NullVariable as an import AddSymbol (TheHeap, SeqOfImports, Dictionary.GetNullVariable); AddSymbol (TheHeap, ExpSeqOfImports, Dictionary.GetNullVariable); It := Dictionary.FirstSubprogramParameter (SubprogSym); while not Dictionary.IsNullIterator (It) loop --# assert Abstraction in Dictionary.Abstractions; AddFunctionImport (Dictionary.CurrentSymbol (It)); It := Dictionary.NextSymbol (It); end loop; It := Dictionary.FirstGlobalVariable (Abstraction, SubprogSym); while not Dictionary.IsNullIterator (It) loop --# assert Abstraction in Dictionary.Abstractions; AddFunctionImport (Dictionary.CurrentSymbol (It)); It := Dictionary.NextSymbol (It); end loop; AddStreamInteractions (Abstraction); end if; else -- create "dependency list" for package body initialization It := Dictionary.FirstInitializedOwnVariable (SubprogSym); while not Dictionary.IsNullIterator (It) loop ExportVar := Dictionary.CurrentSymbol (It); if Dictionary.Is_Declared (Item => ExportVar) then AddSymbol (TheHeap, SeqOfExports, ExportVar); AddSymbol (TheHeap, ExpSeqOfExports, ExportVar); else -- abstract own var ExportIt := Dictionary.FirstConstituent (ExportVar); while not Dictionary.IsNullIterator (ExportIt) loop -- exclude refinements to embedded packages RefinedExportVar := Dictionary.CurrentSymbol (ExportIt); if Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetOwner (RefinedExportVar), Right_Symbol => SubprogSym) then AddSymbol (TheHeap, SeqOfExports, RefinedExportVar); AddSymbol (TheHeap, ExpSeqOfExports, RefinedExportVar); end if; ExportIt := Dictionary.NextSymbol (ExportIt); end loop; end if; It := Dictionary.NextSymbol (It); end loop; end if; RefList.ExpandSeq (ComponentData, ExpSeqOfImports, TheHeap); RefList.ExpandSeq (ComponentData, ExpSeqOfExports, TheHeap); if CommandLineData.Content.Debug.Rho then Debug.Print_Sym (Msg => "Required flow relations for subprogram ", Sym => SubprogSym); Debug.Print_Sym_Seq (Msg => "Exports ", Seq => SeqOfExports, The_Heap => TheHeap); Debug.Print_Sym_Seq (Msg => "Imports ", Seq => SeqOfImports, The_Heap => TheHeap); RelationAlgebra.Debug.Print_Rho ("Rho ", DependencyRelation, TheHeap); end if; end FormDependencyRelation; ------------------------------------------------------------- procedure NullStatement --# global in out S; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives S, --# Statistics.TableUsage, --# TheHeap from *, --# TheHeap; is R : IFA_Stack.StackMember; begin IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R); IFA_Stack.Push (S, R); end NullStatement; ------------------------------------------------------------- procedure CombineSequence --# global in out S; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives S, --# Statistics.TableUsage, --# TheHeap from *, --# S, --# TheHeap; is A, B, R : IFA_Stack.StackMember; NewVars : SeqAlgebra.Seq; DefinedVars, UnPreservedVars, AllVars, SeqOfExpns : SeqAlgebra.Seq; Lambda, Mu, Rho, Theta, ThetaTilde : RelationAlgebra.Relation; begin if IFA_Stack.Top (S).MemberKind = IFA_Stack.Action then IFA_Stack.Pop (S, B); if not IFA_Stack.IsEmpty (S) and then -- only combine if top exists and is action IFA_Stack.Top (S).MemberKind = IFA_Stack.Action then IFA_Stack.Pop (S, A); IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R); SeqAlgebra.Union (TheHeap, A.DefinedVars, B.DefinedVars, DefinedVars); R.DefinedVars := DefinedVars; SeqAlgebra.Union (TheHeap, A.UnPreservedVars, B.UnPreservedVars, UnPreservedVars); R.UnPreservedVars := UnPreservedVars; SeqAlgebra.Union (TheHeap, A.AllVars, B.AllVars, AllVars); R.AllVars := AllVars; SeqAlgebra.Union (TheHeap, A.SeqOfExpns, B.SeqOfExpns, SeqOfExpns); R.SeqOfExpns := SeqOfExpns; SeqAlgebra.Complement (TheHeap, B.AllVars, A.AllVars, NewVars); RelationAlgebra.AddIdentity (TheHeap, A.Rho, NewVars); SeqAlgebra.DisposeOfSeq (TheHeap, NewVars); SeqAlgebra.Complement (TheHeap, A.AllVars, B.AllVars, NewVars); RelationAlgebra.AddIdentity (TheHeap, B.Rho, NewVars); SeqAlgebra.DisposeOfSeq (TheHeap, NewVars); RelationAlgebra.Composition (TheHeap, A.Rho, B.Lambda, Lambda); R.Lambda := Lambda; RelationAlgebra.AugmentRelation (TheHeap, R.Lambda, A.Lambda); RelationAlgebra.Composition (TheHeap, A.Mu, B.Rho, Mu); R.Mu := Mu; RelationAlgebra.AugmentRelation (TheHeap, R.Mu, B.Mu); RelationAlgebra.Composition (TheHeap, A.Rho, B.Rho, Rho); R.Rho := Rho; RelationAlgebra.RowRemoval (TheHeap, B.Theta, A.UnPreservedVars, Theta); R.Theta := Theta; RelationAlgebra.AugmentRelation (TheHeap, R.Theta, A.Theta); RelationAlgebra.RowRemoval (TheHeap, B.ThetaTilde, A.DefinedVars, ThetaTilde); R.ThetaTilde := ThetaTilde; RelationAlgebra.AugmentRelation (TheHeap, R.ThetaTilde, A.ThetaTilde); IFA_Stack.Push (S, R); IFA_Stack.DisposeOfMember (TheHeap, A); IFA_Stack.DisposeOfMember (TheHeap, B); else IFA_Stack.Push (S, B); end if; end if; end CombineSequence; ----------------------------------------------------------------- -- This function returns a reference number for the implicit -- in stream associated with a shareable protected variable. -- It is used by both Mapping and ConnectProtectedStreams function AssociatedStreamRep (TheProtVarRep : Natural) return Natural --# global in Dictionary.Dict; is ProtSym, StreamSym : Dictionary.Symbol; begin ProtSym := RepToSym (TheProtVarRep); StreamSym := Dictionary.GetProtectedImplicitInStream (ProtSym); return Natural (Dictionary.SymbolRef (StreamSym)); end AssociatedStreamRep; ------------------------------------------------------------- -- This procedure performs a modelling function connected with -- potentially shareable (unmoded) protected variables. -- For each protected variable P there is a companion in stream -- Pin. Before the start of flow analysis, we need to initialize -- the in stream: for each P in set ShareableProtectedVars, we -- construct the mapping Pin <-- P. -- Between each mapping statement we construct Pin <-- Pin, P. -- The following procedure performs the former if WithSelfReference -- is False and the latter if it is True. procedure ConnectProtectedStreams (Node : in STree.SyntaxNode; VarsToMap : in SeqAlgebra.Seq; WithSelfReference : in Boolean) --# global in Dictionary.Dict; --# in ReferencedVars; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# StmtLocations from *, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# ReferencedVars, --# TheHeap, --# VarsToMap, --# WithSelfReference & --# ExpnNumber, --# KindDictionary, --# ParamDictionary, --# Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# ExpnNumber, --# ReferencedVars, --# TheHeap, --# VarsToMap, --# WithSelfReference & --# S from *, --# TheHeap; is ProtVarMem : SeqAlgebra.MemberOfSeq; ProtVarRep, StreamRep : Natural; R : IFA_Stack.StackMember; procedure AddOneMapping (StreamRep, ProtVarRep : in Natural; WithSelfReference : in Boolean) --# global in Node; --# in R; --# in ReferencedVars; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# StmtLocations from *, --# ExpnNumber, --# Node & --# ExpnNumber, --# KindDictionary from *, --# ExpnNumber & --# ParamDictionary from *, --# ExpnNumber, --# StreamRep & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# ProtVarRep, --# R, --# ReferencedVars, --# StreamRep, --# TheHeap, --# WithSelfReference; is UndefinedVars : SeqAlgebra.Seq; begin IncrementExpression (ExpnNumber); StmtLocations (ExpnNumber) := Node; ExpnLocations (ExpnNumber) := Node; -- Rather unintuitively, the export is the IN stream associated with the PO ParamDictionary (ExpnNumber) := RepToSym (StreamRep); -- make Pin an export SeqAlgebra.AddMember (TheHeap, R.DefinedVars, StreamRep); SeqAlgebra.AddMember (TheHeap, R.UnPreservedVars, StreamRep); SeqAlgebra.AddMember (TheHeap, R.AllVars, StreamRep); SeqAlgebra.AddMember (TheHeap, R.SeqOfExpns, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.Mu, ExpnNumber, StreamRep); -- make P an import SeqAlgebra.AddMember (TheHeap, ReferencedVars, ProtVarRep); SeqAlgebra.AddMember (TheHeap, R.AllVars, ProtVarRep); RelationAlgebra.InsertPair (TheHeap, R.Lambda, ProtVarRep, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.Rho, ProtVarRep, StreamRep); RelationAlgebra.InsertPair (TheHeap, R.Theta, ProtVarRep, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.ThetaTilde, ProtVarRep, ExpnNumber); -- conditionally make Pin an import if WithSelfReference then SeqAlgebra.AddMember (TheHeap, ReferencedVars, StreamRep); RelationAlgebra.InsertPair (TheHeap, R.Lambda, StreamRep, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.Rho, StreamRep, StreamRep); RelationAlgebra.InsertPair (TheHeap, R.Theta, StreamRep, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.ThetaTilde, StreamRep, ExpnNumber); end if; SeqAlgebra.Complement (TheHeap, R.AllVars, R.DefinedVars, UndefinedVars); RelationAlgebra.AddIdentity (TheHeap, R.Rho, UndefinedVars); SeqAlgebra.DisposeOfSeq (TheHeap, UndefinedVars); -- mark statement as being for modelling purposes only and not associated with -- a user source statement. No errors are reported for these kinds of statement KindDictionary (ExpnNumber) := ModellingStmt; end AddOneMapping; begin -- ConnectProtectedStreams IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R); ProtVarMem := SeqAlgebra.FirstMember (TheHeap, VarsToMap); while not SeqAlgebra.IsNullMember (ProtVarMem) loop ProtVarRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => ProtVarMem); StreamRep := AssociatedStreamRep (ProtVarRep); AddOneMapping (StreamRep, ProtVarRep, WithSelfReference); ProtVarMem := SeqAlgebra.NextMember (TheHeap, ProtVarMem); end loop; IFA_Stack.Push (S, R); end ConnectProtectedStreams; ------------------------------------------------------------- -- The Rho and other sets/relations for the sequence of statements -- will have substituted the implicit in stream P__in for each -- referenced protected variable P. This procedure initializes -- each P__in by constructing the mapping P__in <-- P procedure InitializeProtectedStreams (Node : in STree.SyntaxNode) --# global in Dictionary.Dict; --# in ReferencedProtectedVars; --# in ReferencedVars; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# StmtLocations from *, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# ReferencedProtectedVars, --# ReferencedVars, --# TheHeap & --# ExpnNumber, --# KindDictionary, --# ParamDictionary, --# S from *, --# Dictionary.Dict, --# ExpnNumber, --# ReferencedProtectedVars, --# ReferencedVars, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# ExpnNumber, --# ReferencedProtectedVars, --# ReferencedVars, --# S, --# TheHeap; is TempStackTop : IFA_Stack.StackMember; begin if not SeqAlgebra.IsEmptySeq (TheHeap, ReferencedProtectedVars) then IFA_Stack.Pop (S, TempStackTop); -- Then build the relation for initializing the streams ConnectProtectedStreams (Node => Node, VarsToMap => ReferencedProtectedVars, WithSelfReference => False); -- Restore stack IFA_Stack.Push (S, TempStackTop); -- and combine the intializations in CombineSequence; end if; end InitializeProtectedStreams; ------------------------------------------------------------- procedure Mapping (Node : in STree.SyntaxNode) -- This procedure constructs the flow relations for a mapping statement. --# global in ComponentData; --# in Dictionary.Dict; --# in ReferencedProtectedVars; --# in ReferencedVars; --# in ShareableProtectedVars; --# in STree.Table; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# ExpnNumber, --# KindDictionary, --# ParamDictionary, --# S, --# StmtLocations from *, --# ComponentData, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# ParamDictionary, --# ReferencedProtectedVars, --# ReferencedVars, --# ShareableProtectedVars, --# STree.Table, --# Table, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# ParamDictionary, --# ReferencedProtectedVars, --# ReferencedVars, --# S, --# ShareableProtectedVars, --# STree.Table, --# Table, --# TheHeap; is ExportAtom : Heap.Atom; DepList : SeqAlgebra.Seq; ExpVarRep, FirstExpression : Natural; NmbrOfExpns : ExpnCountRange; R : IFA_Stack.StackMember; UndefinedVars : SeqAlgebra.Seq; AssignedExpressionNode : STree.SyntaxNode; AssignedSym : Dictionary.Symbol := Dictionary.NullSymbol; ExportSet : SeqAlgebra.Seq; -- set of all 'exports' of this statement AssignmentKind : ExpnKind; -- this set gets populated with all the protected vars Pin that need a mapping Pin <- Pin adding ProtectedSelfReferenceToAdd : SeqAlgebra.Seq; -- this set is the set ofthings we need to add Pin <= Pin, P to. ProtectedUpdatesToConnect : SeqAlgebra.Seq; ProtVarMem : SeqAlgebra.MemberOfSeq; ProtVarRep : Natural; procedure SubstituteProtectedVars (Imports : in SeqAlgebra.Seq) --# global in Dictionary.Dict; --# in ProtectedSelfReferenceToAdd; --# in ReferencedProtectedVars; --# in ShareableProtectedVars; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# Imports, --# ProtectedSelfReferenceToAdd, --# ReferencedProtectedVars, --# ShareableProtectedVars, --# TheHeap; is ProtectedImports : SeqAlgebra.Seq; ProtMem : SeqAlgebra.MemberOfSeq; ProtRep : Natural; begin SeqAlgebra.Intersection (TheHeap, Imports, ShareableProtectedVars, -- to get ProtectedImports); SeqAlgebra.Reduction (TheHeap, Imports, ProtectedImports); -- Imports now only contain things that AREN'T shareable protected vars and -- ProtectedImports contains a list of that ARE -- Now we insert the associated in stream for each member of ProtectedImports into Imports ProtMem := SeqAlgebra.FirstMember (TheHeap, ProtectedImports); while not SeqAlgebra.IsNullMember (ProtMem) loop ProtRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => ProtMem); SeqAlgebra.AddMember (TheHeap, Imports, AssociatedStreamRep (ProtRep)); -- and in the set of all referenced protected vars SeqAlgebra.AddMember (TheHeap, ReferencedProtectedVars, ProtRep); -- and we add the stream to the set of things that require a self-reference in mapping SeqAlgebra.AddMember (TheHeap, ProtectedSelfReferenceToAdd, AssociatedStreamRep (ProtRep)); ProtMem := SeqAlgebra.NextMember (TheHeap, ProtMem); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, ProtectedImports); end SubstituteProtectedVars; ------------------------------------------------------ procedure ProcedureAssignmentKind (ExNum : in ExpnIndexRange; AssignmentKind : out ExpnKind) --# global in ComponentData; --# in Dictionary.Dict; --# in ExportSet; --# in ParamDictionary; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives AssignmentKind from ComponentData, --# Dictionary.Dict, --# ExNum, --# ExportSet, --# ParamDictionary, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# ExNum, --# ParamDictionary, --# TheHeap; is ExportSiblings : SeqAlgebra.Seq; CurrentSibling : SeqAlgebra.MemberOfSeq; begin AssignmentKind := ComplexAssignment; if Dictionary.IsSubcomponent (ParamDictionary (ExNum)) then ComponentManager.GetLeaves (TheHeap, ComponentData, ComponentManager.GetRoot (ComponentData, ComponentManager.GetComponentNode (ComponentData, ParamDictionary (ExNum))), -- to get ExportSiblings); CurrentSibling := SeqAlgebra.FirstMember (TheHeap, ExportSiblings); while not SeqAlgebra.IsNullMember (CurrentSibling) loop if not SeqAlgebra.IsMember (TheHeap, ExportSet, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => CurrentSibling)) then AssignmentKind := FieldUpdateByProc; exit; end if; CurrentSibling := SeqAlgebra.NextMember (TheHeap, CurrentSibling); end loop; end if; end ProcedureAssignmentKind; begin -- Mapping SeqAlgebra.CreateSeq (TheHeap, ExportSet); SeqAlgebra.CreateSeq (TheHeap, ProtectedSelfReferenceToAdd); IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R); NmbrOfExpns := 0; RefList.FirstExport (Table, TheHeap, Node, -- to get ExportAtom); while ExportAtom /= NullAtom loop --# assert NmbrOfExpns in ExpnCountRange and --# ExportAtom in Heap.Atom; IncrementExpression (ExpnNumber); IncrementExpression (NmbrOfExpns); StmtLocations (ExpnNumber) := Node; ParamDictionary (ExpnNumber) := SymValue (ExportAtom); AssignedSym := SymValue (ExportAtom); ExpVarRep := Heap.AValue (TheHeap, ExportAtom); SeqAlgebra.AddMember (TheHeap, ExportSet, ExpVarRep); SeqAlgebra.AddMember (TheHeap, R.DefinedVars, ExpVarRep); SeqAlgebra.AddMember (TheHeap, R.UnPreservedVars, ExpVarRep); SeqAlgebra.AddMember (TheHeap, R.AllVars, ExpVarRep); SeqAlgebra.AddMember (TheHeap, R.SeqOfExpns, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.Mu, ExpnNumber, ExpVarRep); DepList := RefList.DependencyList (TheHeap, ExportAtom); -- Where DepList contains references to potentially shareable -- protected variables (indicated by membership of ShareableProtectedVars) -- then we need to replace each such P with its associated implicit in stream Pin SubstituteProtectedVars (DepList); -- After the above call, set ProtectedSelfReferenceToAdd contains all the -- Pin have been substituted so we can add self-dependencies on them below -- new loop-free version using suitable rel & seq algebra calls SeqAlgebra.AugmentSeq (TheHeap, ReferencedVars, DepList); SeqAlgebra.AugmentSeq (TheHeap, R.AllVars, DepList); RelationAlgebra.AddCol (TheHeap, R.Lambda, ExpnNumber, DepList); RelationAlgebra.AddCol (TheHeap, R.Rho, ExpVarRep, DepList); -- In the two following statements, ordered pairs in Theta and ThetaTilde -- associated with a mapping statement are all attributed to the first -- of its expressions, to avoid replication of SPARK error messages. FirstExpression := (ExpnNumber - NmbrOfExpns) + 1; RelationAlgebra.AddCol (TheHeap, R.Theta, FirstExpression, DepList); RelationAlgebra.AddCol (TheHeap, R.ThetaTilde, FirstExpression, DepList); ExportAtom := RefList.NextExport (TheHeap, ExportAtom); end loop; -- We have now modelled all the explicit mappings obtained from the RefList for -- the statement in question (with any protected imports replaced by their -- associated implicit in stream. For each of these streams, we now need to add -- a self reference Pin <- Pin ProtVarMem := SeqAlgebra.FirstMember (TheHeap, ProtectedSelfReferenceToAdd); while not SeqAlgebra.IsNullMember (ProtVarMem) loop --# assert NmbrOfExpns in ExpnCountRange; ProtVarRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => ProtVarMem); -- ProtVarRep contains one instance of a Pin IncrementExpression (ExpnNumber); IncrementExpression (NmbrOfExpns); StmtLocations (ExpnNumber) := Node; ParamDictionary (ExpnNumber) := RepToSym (ProtVarRep); AssignedSym := RepToSym (ProtVarRep); -- make Pin an export SeqAlgebra.AddMember (TheHeap, ExportSet, ProtVarRep); SeqAlgebra.AddMember (TheHeap, R.DefinedVars, ProtVarRep); SeqAlgebra.AddMember (TheHeap, R.UnPreservedVars, ProtVarRep); SeqAlgebra.AddMember (TheHeap, R.AllVars, ProtVarRep); SeqAlgebra.AddMember (TheHeap, R.SeqOfExpns, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.Mu, ExpnNumber, ProtVarRep); -- make Pin an import SeqAlgebra.AddMember (TheHeap, ReferencedVars, ProtVarRep); RelationAlgebra.InsertPair (TheHeap, R.Lambda, ProtVarRep, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.Rho, ProtVarRep, ProtVarRep); RelationAlgebra.InsertPair (TheHeap, R.Theta, ProtVarRep, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.ThetaTilde, ProtVarRep, ExpnNumber); ProtVarMem := SeqAlgebra.NextMember (TheHeap, ProtVarMem); end loop; -- after this mapping statement, we will add a modelling statement (of the fomr Pin <- Pin, P) for -- protected state that has been defined by the mapping SeqAlgebra.Intersection (TheHeap, R.DefinedVars, ShareableProtectedVars, -- to get ProtectedUpdatesToConnect); SeqAlgebra.Complement (TheHeap, R.AllVars, R.DefinedVars, UndefinedVars); RelationAlgebra.AddIdentity (TheHeap, R.Rho, UndefinedVars); SeqAlgebra.DisposeOfSeq (TheHeap, UndefinedVars); IFA_Stack.Push (S, R); -- tests to distinguish between a simple assignment statement -- where we want to report error on expression and procedure call which -- is potentially a multiple assignment. Note however that a procedure -- call with a single export still wants to report error on statement not -- expression. Also, if assigned variable is an array we must report on -- the statement in case it is an index that is undefined. AssignedExpressionNode := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); if STree.Syntax_Node_Type (Node => AssignedExpressionNode) = SP_Symbols.expression then -- assignment statement not procedure call if NmbrOfExpns = 1 then -- single assignment, could be scalar, array aggregate or unexpanded record aggregate if Dictionary.TypeIsArray (Dictionary.GetType (AssignedSym)) then -- single assignment to array regarded as a complex assignment KindDictionary (ExpnNumber) := ComplexAssignment; else -- assignment to scalar or unexpanded record ExpnLocations (ExpnNumber) := AssignedExpressionNode; KindDictionary (ExpnNumber) := SimpleAssignment; end if; else -- multiple expressions, must be expanded record aggregate for ExpnCounter in ExpnCountRange range (ExpnNumber - NmbrOfExpns) + 1 .. ExpnNumber loop ProcedureAssignmentKind (ExpnCounter, -- to get AssignmentKind); KindDictionary (ExpnCounter) := AssignmentKind; end loop; end if; else -- procedure call - for loop works even in single export case for ExpnCounter in ExpnCountRange range (ExpnNumber - NmbrOfExpns) + 1 .. ExpnNumber loop ProcedureAssignmentKind (ExpnCounter, -- to get AssignmentKind); KindDictionary (ExpnCounter) := AssignmentKind; end loop; end if; SeqAlgebra.DisposeOfSeq (TheHeap, ExportSet); SeqAlgebra.DisposeOfSeq (TheHeap, ProtectedSelfReferenceToAdd); -- Add in modelling statement Pin <- Pin, P; ConnectProtectedStreams (Node => Node, VarsToMap => ProtectedUpdatesToConnect, WithSelfReference => True); CombineSequence; -- connect it to the original mapping that was top of stack -- new top of stack is the combined effect of the Mapping and the protected state update model SeqAlgebra.DisposeOfSeq (TheHeap, ProtectedUpdatesToConnect); end Mapping; ----------------------------------------------------------------- procedure ProcessExpression (Node, StmtNode : in STree.SyntaxNode; R : in IFA_Stack.StackMember) --# global in ReferencedVars; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations from *, --# ExpnNumber, --# Node & --# ExpnNumber, --# ParamDictionary from *, --# ExpnNumber & --# KindDictionary from *, --# ExpnNumber, --# R & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# Node, --# R, --# ReferencedVars, --# Table, --# TheHeap & --# StmtLocations from *, --# ExpnNumber, --# StmtNode; is ImportList : SeqAlgebra.Seq; begin IncrementExpression (ExpnNumber); ExpnLocations (ExpnNumber) := Node; StmtLocations (ExpnNumber) := StmtNode; -- The expression being processed controls a compound expression, -- or an implicit or explicit loop exit, so is not directly connected -- to any variable or export, so... ParamDictionary (ExpnNumber) := Dictionary.NullSymbol; case R.MemberKind is when IFA_Stack.IfNode | IFA_Stack.ElsifNode | IFA_Stack.CaseNode => KindDictionary (ExpnNumber) := ForkExpn; when IFA_Stack.ExitNode => KindDictionary (ExpnNumber) := ExitExpn; when IFA_Stack.DefaultExitNode --898 => KindDictionary (ExpnNumber) := DefaultExitExpn; --898 when others => null; end case; SeqAlgebra.AddMember (TheHeap, R.SeqOfExpns, ExpnNumber); RefList.ReferencedVarList (Table, TheHeap, Node, -- to get ImportList); SeqAlgebra.AugmentSeq (TheHeap, ReferencedVars, ImportList); SeqAlgebra.AugmentSeq (TheHeap, R.AllVars, ImportList); end ProcessExpression; ----------------------------------------------------------------- procedure StartIf -- This procedure constructs the stack member for a condition node of an -- if_statement. --# global in Node; --# in ReferencedVars; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# StmtLocations from *, --# ExpnNumber, --# Node & --# ExpnNumber, --# ParamDictionary from *, --# ExpnNumber & --# KindDictionary from *, --# ExpnNumber, --# TheHeap & --# S from *, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# Node, --# ReferencedVars, --# Table, --# TheHeap; is R : IFA_Stack.StackMember; begin IFA_Stack.EstablishMember (TheHeap, IFA_Stack.IfNode, R); ProcessExpression (Node, Node, R); IFA_Stack.Push (S, R); end StartIf; ----------------------------------------------------------------- procedure StartElsIf -- This procedure constructs the stack member for a condition node of an -- elsif clause. --# global in Node; --# in ReferencedVars; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# StmtLocations from *, --# ExpnNumber, --# Node & --# ExpnNumber, --# ParamDictionary from *, --# ExpnNumber & --# KindDictionary from *, --# ExpnNumber, --# TheHeap & --# S from *, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# Node, --# ReferencedVars, --# Table, --# TheHeap; is R : IFA_Stack.StackMember; begin IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ElsifNode, R); ProcessExpression (Node, Node, R); IFA_Stack.Push (S, R); end StartElsIf; ----------------------------------------------------------------- procedure StartCase -- This procedure constructs the stack member for the expression node of a -- case_statement. --# global in Node; --# in ReferencedVars; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# StmtLocations from *, --# ExpnNumber, --# Node & --# ExpnNumber, --# ParamDictionary from *, --# ExpnNumber & --# KindDictionary from *, --# ExpnNumber, --# TheHeap & --# S from *, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# Node, --# ReferencedVars, --# Table, --# TheHeap; is R : IFA_Stack.StackMember; begin IFA_Stack.EstablishMember (TheHeap, IFA_Stack.CaseNode, R); ProcessExpression (Node, Node, R); IFA_Stack.Push (S, R); end StartCase; ----------------------------------------------------------------- procedure StartLoop --# global in ComponentData; --# in Dictionary.Dict; --# in Node; --# in ReferencedProtectedVars; --# in ReferencedVars; --# in ShareableProtectedVars; --# in STree.Table; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# ExpnNumber, --# KindDictionary, --# ParamDictionary, --# S, --# StmtLocations from *, --# ComponentData, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# ParamDictionary, --# ReferencedProtectedVars, --# ReferencedVars, --# ShareableProtectedVars, --# STree.Table, --# Table, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# ParamDictionary, --# ReferencedProtectedVars, --# ReferencedVars, --# S, --# ShareableProtectedVars, --# STree.Table, --# Table, --# TheHeap; is LocalNode : STree.SyntaxNode; procedure PushLoopHead --# global in out S; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives S, --# Statistics.TableUsage, --# TheHeap from *, --# TheHeap; is R : IFA_Stack.StackMember; begin IFA_Stack.EstablishMember (TheHeap, IFA_Stack.LoopHead, R); IFA_Stack.Push (S, R); end PushLoopHead; ----------------------------------------------------------------- procedure StartWhileLoop --# global in LocalNode; --# in ReferencedVars; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# StmtLocations from *, --# ExpnNumber, --# LocalNode & --# ExpnNumber, --# ParamDictionary from *, --# ExpnNumber & --# KindDictionary from *, --# ExpnNumber, --# TheHeap & --# S from *, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# LocalNode, --# ReferencedVars, --# Table, --# TheHeap; is R1, R2 : IFA_Stack.StackMember; begin IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitNode, R1); IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitBranch, R2); PushLoopHead; ProcessExpression (LocalNode, LocalNode, R1); IFA_Stack.Push (S, R1); IFA_Stack.Push (S, R2); end StartWhileLoop; ----------------------------------------------------------------- procedure StartForLoop --# global in ComponentData; --# in Dictionary.Dict; --# in LocalNode; --# in ReferencedProtectedVars; --# in ReferencedVars; --# in ShareableProtectedVars; --# in STree.Table; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# ExpnNumber, --# KindDictionary, --# ParamDictionary, --# S, --# StmtLocations from *, --# ComponentData, --# Dictionary.Dict, --# ExpnNumber, --# LocalNode, --# ParamDictionary, --# ReferencedProtectedVars, --# ReferencedVars, --# ShareableProtectedVars, --# STree.Table, --# Table, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# Dictionary.Dict, --# ExpnNumber, --# LocalNode, --# ParamDictionary, --# ReferencedProtectedVars, --# ReferencedVars, --# S, --# ShareableProtectedVars, --# STree.Table, --# Table, --# TheHeap; is R1, R2, R3 : IFA_Stack.StackMember; ControlVarRep : Natural; ExportAtom : Heap.Atom; begin -- LocalNode is loop_parameter_specification; -- Establish initialization of control variable; -- Establish test for termination; -- moved lower and made conditional -- IFA_Stack.EstablishMember (TheHeap, -- IFA_Stack.ExitNode, -- R1); -- IFA_Stack.EstablishMember (TheHeap, -- IFA_Stack.ExitBranch, -- R2); -- Establish model of updating of control variable; IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R3); Mapping (LocalNode); KindDictionary (ExpnNumber) := ControlVarAssignment; PushLoopHead; RefList.FirstExport (Table, TheHeap, LocalNode, -- to get ExportAtom); ControlVarRep := Heap.AValue (TheHeap, ExportAtom); -- ChHeck to see whether loop must be enetered or might be bypassed if not Dictionary.LoopParameterHasStaticRange (RepToSym (ControlVarRep)) then -- We have a loop that may be bypassed because it's range might be empty so -- we need to set up an exit at the top of the loop. IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitNode, R1); IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitBranch, R2); IncrementExpression (ExpnNumber); ExpnLocations (ExpnNumber) := LocalNode; StmtLocations (ExpnNumber) := STree.Parent_Node (Current_Node => LocalNode); KindDictionary (ExpnNumber) := ExitExpn; SeqAlgebra.AddMember (TheHeap, R1.SeqOfExpns, ExpnNumber); SeqAlgebra.AddMember (TheHeap, ReferencedVars, ControlVarRep); SeqAlgebra.AddMember (TheHeap, R1.AllVars, ControlVarRep); IFA_Stack.Push (S, R1); IFA_Stack.Push (S, R2); end if; IncrementExpression (ExpnNumber); StmtLocations (ExpnNumber) := LocalNode; SeqAlgebra.AddMember (TheHeap, R3.DefinedVars, ControlVarRep); SeqAlgebra.AddMember (TheHeap, R3.UnPreservedVars, ControlVarRep); SeqAlgebra.AddMember (TheHeap, R3.AllVars, ControlVarRep); SeqAlgebra.AddMember (TheHeap, R3.SeqOfExpns, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R3.Mu, ExpnNumber, ControlVarRep); SeqAlgebra.AddMember (TheHeap, ReferencedVars, ControlVarRep); RelationAlgebra.InsertPair (TheHeap, R3.Lambda, ControlVarRep, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R3.Rho, ControlVarRep, ControlVarRep); RelationAlgebra.InsertPair (TheHeap, R3.Theta, ControlVarRep, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R3.ThetaTilde, ControlVarRep, ExpnNumber); IFA_Stack.Push (S, R3); KindDictionary (ExpnNumber) := ControlVarAssignment; --# accept Flow, 601, ExpnNumber, S, "Spurious coupling expected" & --# Flow, 601, StmtLocations, S, "Spurious coupling expected" & --# Flow, 601, ExpnLocations, S, "Spurious coupling expected" & --# Flow, 601, KindDictionary, S, "Spurious coupling expected"; end StartForLoop; -- ignore 4 spurious couplings with S begin -- StartLoop -- advance to iteration_scheme; LocalNode := STree.Child_Node (Current_Node => Node); if LocalNode = STree.NullNode then -- loop does not have an iteration_scheme; PushLoopHead; else -- localnode is of type iteration_scheme; LocalNode := STree.Child_Node (Current_Node => LocalNode); case STree.Syntax_Node_Type (Node => LocalNode) is when SP_Symbols.condition => StartWhileLoop; when SP_Symbols.loop_parameter_specification => StartForLoop; when others => null; end case; end if; end StartLoop; ----------------------------------------------------------------- procedure ModelExit --# global in Node; --# in ReferencedVars; --# in STree.Table; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# ExpnNumber, --# ParamDictionary from *, --# ExpnNumber, --# Node & --# KindDictionary from *, --# ExpnNumber, --# Node, --# TheHeap & --# S from *, --# Node, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# Node, --# ReferencedVars, --# Table, --# TheHeap & --# StmtLocations from *, --# ExpnNumber, --# Node, --# STree.Table; is R1, R2 : IFA_Stack.StackMember; begin IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitNode, R1); IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitBranch, R2); if Node /= STree.NullNode then ProcessExpression (Node, STree.Parent_Node (Current_Node => Node), R1); IFA_Stack.Push (S, R1); IFA_Stack.Push (S, R2); else IFA_Stack.Push (S, R1); end if; end ModelExit; ----------------------------------------------------------------- -- This procedure handles two things: -- (1) For a plain loop with no exits it ass an "exit when false" at the end -- (2) For a FOR loop which must have been entered it adds an exit, dependent on -- the loop counter at the end (so that the body of the loop is always executed -- at least once. -- In all other cases it models a null statement. procedure ModelDefaultExitOrFinalForLoopExit --# global in Dictionary.Dict; --# in Node; --# in ReferencedVars; --# in STree.Table; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out ParamDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations, --# ExpnNumber, --# KindDictionary, --# StmtLocations from *, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# STree.Table, --# Table, --# TheHeap & --# ParamDictionary from *, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# STree.Table & --# S from *, --# Dictionary.Dict, --# Node, --# STree.Table, --# Table, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# ExpnNumber, --# Node, --# ReferencedVars, --# STree.Table, --# Table, --# TheHeap; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.end_of_loop; is R1, R2 : IFA_Stack.StackMember; LoopCounter : Dictionary.Symbol; function IsAForLoop return Boolean --# global in Node; --# in STree.Table; is begin -- look for a loop_parameter_specification node to see if we are in a for loop return STree.Syntax_Node_Type (Node => STree.LoopParameterSpecFromEndOfLoop (Node)) = SP_Symbols.loop_parameter_specification; end IsAForLoop; function GetLoopCounter return Dictionary.Symbol --# global in Node; --# in STree.Table; --# in Table; --# in TheHeap; is LoopParamNode : STree.SyntaxNode; ExportAtom : Heap.Atom; begin -- Move to loop_parameter_specificatio node LoopParamNode := STree.LoopParameterSpecFromEndOfLoop (Node); -- the loop counter is modelled by wffs as being updated here so is an export of this node RefList.FirstExport (Table, TheHeap, LoopParamNode, -- to get ExportAtom); -- convert to a symbol and return return SymValue (ExportAtom); end GetLoopCounter; procedure ModelFinalForLoopExit --# global in LoopCounter; --# in Node; --# in ReferencedVars; --# in STree.Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out StmtLocations; --# in out TheHeap; --# derives ExpnLocations from *, --# ExpnNumber, --# Node & --# ExpnNumber, --# KindDictionary from *, --# ExpnNumber & --# S from *, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# LoopCounter, --# ReferencedVars, --# TheHeap & --# StmtLocations from *, --# ExpnNumber, --# Node, --# STree.Table; --# pre IsAForLoop (Node, STree.Table); is R1, R2 : IFA_Stack.StackMember; begin -- Establish test for termination; IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitNode, R1); IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitBranch, R2); IncrementExpression (ExpnNumber); ExpnLocations (ExpnNumber) := Node; StmtLocations (ExpnNumber) := STree.Parent_Node (Current_Node => Node); KindDictionary (ExpnNumber) := ExitExpn; SeqAlgebra.AddMember (TheHeap, R1.SeqOfExpns, ExpnNumber); -- exit depends on loop counter AddSymbol (TheHeap, ReferencedVars, LoopCounter); AddSymbol (TheHeap, R1.AllVars, LoopCounter); IFA_Stack.Push (S, R1); IFA_Stack.Push (S, R2); end ModelFinalForLoopExit; begin -- ModelDefaultExitOrFinalForLoopExit if Dictionary.IsPredefinedBooleanType (STree.NodeSymbol (Node)) then -- A Boolean type symbol placed in this not by WalkStatements.wf_loop_param indicates -- that the loop is a plain loop, with no iteration scheme and no exit statement. -- In this case we patch in a "exit when false" exit statement at the bottom of the loop -- to give a syntactic exit path so that the loop can be flow analysed correctly. IFA_Stack.EstablishMember (TheHeap, IFA_Stack.DefaultExitNode, R1); IFA_Stack.EstablishMember (TheHeap, IFA_Stack.ExitBranch, R2); ProcessExpression (Node, Node, R1); IFA_Stack.Push (S, R1); IFA_Stack.Push (S, R2); elsif IsAForLoop then -- We have two kinds of for loop to deal with. A for loop that may be bypassed because -- its counter range may be empty will already have had an exit statement modeled at its -- head by StartForLoop. In the case where the loop counter has a static range that we -- know to be non-empty then this exit will not have been placed (to avoid analysis -- of the semantcially infeasible path round the loop) so we need to add an exit at the bottom -- of the loop. -- The following IF statement covers these two cases. LoopCounter := GetLoopCounter; if Dictionary.LoopParameterHasStaticRange (LoopCounter) then ModelFinalForLoopExit; else -- it's a for loop that may be bypassed, an exit already exists at the top -- so just place a null statement here IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R1); IFA_Stack.Push (S, R1); end if; else -- It's a plain (not a for) loop, but it already has (a) user-supplied exit statement(s). -- there are already proper exits so just model a null statement IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R1); IFA_Stack.Push (S, R1); end if; end ModelDefaultExitOrFinalForLoopExit; --898 end ----------------------------------------------------------------- procedure Join --# global in out S; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives S, --# Statistics.TableUsage, --# TheHeap from *, --# S, --# TheHeap; is A, B, R : IFA_Stack.StackMember; Finished : Boolean; PreservedVars : SeqAlgebra.Seq; ExpnNmbr : Natural; DefinedVars, UnPreservedVars : SeqAlgebra.Seq; Lambda, Mu, Rho, Theta, ThetaTilde : RelationAlgebra.Relation; begin if IFA_Stack.Top (S).MemberKind = IFA_Stack.ExitNode then IFA_Stack.Pop (S, A); if IFA_Stack.Top (S).MemberKind = IFA_Stack.Action then IFA_Stack.DisposeOfMember (TheHeap, A); IFA_Stack.Pop (S, A); end if; A.MemberKind := IFA_Stack.ExitBranch; IFA_Stack.Pop (S, B); B.MemberKind := IFA_Stack.ExitNode; IFA_Stack.Push (S, B); IFA_Stack.Push (S, A); else Finished := False; while not Finished loop IFA_Stack.Pop (S, A); IFA_Stack.Pop (S, B); IFA_Stack.Pop (S, R); Finished := (R.MemberKind = IFA_Stack.IfNode); ExpnNmbr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => SeqAlgebra.FirstMember (TheHeap, R.SeqOfExpns)); SeqAlgebra.AugmentSeq (TheHeap, R.SeqOfExpns, A.SeqOfExpns); SeqAlgebra.AugmentSeq (TheHeap, R.SeqOfExpns, B.SeqOfExpns); R.MemberKind := IFA_Stack.Action; SeqAlgebra.Union (TheHeap, A.DefinedVars, B.DefinedVars, DefinedVars); R.DefinedVars := DefinedVars; SeqAlgebra.Intersection (TheHeap, A.UnPreservedVars, B.UnPreservedVars, UnPreservedVars); R.UnPreservedVars := UnPreservedVars; RelationAlgebra.CartesianProduct (TheHeap, R.AllVars, R.DefinedVars, Rho); R.Rho := Rho; RelationAlgebra.Sum (TheHeap, A.Lambda, B.Lambda, Lambda); R.Lambda := Lambda; RelationAlgebra.AddCol (TheHeap, R.Lambda, ExpnNmbr, R.AllVars); RelationAlgebra.Sum (TheHeap, A.Theta, B.Theta, Theta); R.Theta := Theta; RelationAlgebra.AddCol (TheHeap, R.Theta, ExpnNmbr, R.AllVars); RelationAlgebra.Sum (TheHeap, A.ThetaTilde, B.ThetaTilde, ThetaTilde); R.ThetaTilde := ThetaTilde; RelationAlgebra.AddCol (TheHeap, R.ThetaTilde, ExpnNmbr, R.AllVars); RelationAlgebra.Sum (TheHeap, A.Mu, B.Mu, Mu); R.Mu := Mu; RelationAlgebra.AddRow (TheHeap, R.Mu, ExpnNmbr, R.DefinedVars); RelationAlgebra.AugmentRelation (TheHeap, R.Rho, A.Rho); RelationAlgebra.AugmentRelation (TheHeap, R.Rho, B.Rho); SeqAlgebra.AugmentSeq (TheHeap, R.AllVars, A.AllVars); SeqAlgebra.AugmentSeq (TheHeap, R.AllVars, B.AllVars); SeqAlgebra.Complement (TheHeap, R.AllVars, R.UnPreservedVars, PreservedVars); RelationAlgebra.AddIdentity (TheHeap, R.Rho, PreservedVars); SeqAlgebra.DisposeOfSeq (TheHeap, PreservedVars); IFA_Stack.DisposeOfMember (TheHeap, A); IFA_Stack.DisposeOfMember (TheHeap, B); IFA_Stack.Push (S, R); end loop; end if; end Join; ----------------------------------------------------------------- procedure CombineCases --# global in out S; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives S, --# Statistics.TableUsage, --# TheHeap from *, --# S, --# TheHeap; is UnPreservedVars, TempVarSeq : SeqAlgebra.Seq; ExpnNmbr : Natural; Product : RelationAlgebra.Relation; A, R : IFA_Stack.StackMember; begin IFA_Stack.Pop (S, R); loop IFA_Stack.Pop (S, A); SeqAlgebra.AugmentSeq (TheHeap, R.AllVars, A.AllVars); SeqAlgebra.AugmentSeq (TheHeap, R.SeqOfExpns, A.SeqOfExpns); exit when A.MemberKind = IFA_Stack.CaseNode; SeqAlgebra.AugmentSeq (TheHeap, R.DefinedVars, A.DefinedVars); -- replace R.UnPreservedVars by its intersection with A.UnPreservedVars; SeqAlgebra.Intersection (TheHeap, R.UnPreservedVars, A.UnPreservedVars, TempVarSeq); SeqAlgebra.DisposeOfSeq (TheHeap, R.UnPreservedVars); SeqAlgebra.CreateSeq (TheHeap, UnPreservedVars); R.UnPreservedVars := UnPreservedVars; SeqAlgebra.AugmentSeq (TheHeap, R.UnPreservedVars, TempVarSeq); SeqAlgebra.DisposeOfSeq (TheHeap, TempVarSeq); -- end of replacement; RelationAlgebra.AugmentRelation (TheHeap, R.Lambda, A.Lambda); RelationAlgebra.AugmentRelation (TheHeap, R.Theta, A.Theta); RelationAlgebra.AugmentRelation (TheHeap, R.ThetaTilde, A.ThetaTilde); RelationAlgebra.AugmentRelation (TheHeap, R.Mu, A.Mu); RelationAlgebra.AugmentRelation (TheHeap, R.Rho, A.Rho); IFA_Stack.DisposeOfMember (TheHeap, A); end loop; SeqAlgebra.Complement (TheHeap, R.AllVars, R.UnPreservedVars, TempVarSeq); RelationAlgebra.AddIdentity (TheHeap, R.Rho, TempVarSeq); SeqAlgebra.DisposeOfSeq (TheHeap, TempVarSeq); ExpnNmbr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => SeqAlgebra.FirstMember (TheHeap, A.SeqOfExpns)); RelationAlgebra.AddCol (TheHeap, R.Lambda, ExpnNmbr, A.AllVars); RelationAlgebra.AddCol (TheHeap, R.Theta, ExpnNmbr, A.AllVars); RelationAlgebra.AddCol (TheHeap, R.ThetaTilde, ExpnNmbr, A.AllVars); RelationAlgebra.AddRow (TheHeap, R.Mu, ExpnNmbr, R.DefinedVars); RelationAlgebra.CartesianProduct (TheHeap, A.AllVars, R.DefinedVars, Product); IFA_Stack.DisposeOfMember (TheHeap, A); RelationAlgebra.AugmentRelation (TheHeap, R.Rho, Product); RelationAlgebra.DisposeOfRelation (TheHeap, Product); IFA_Stack.Push (S, R); end CombineCases; ----------------------------------------------------------------- procedure CloseLoop --# global in InnerExpns; --# in KindDictionary; --# in OneStableExpnSeq; --# in OtherStableExpnSeq; --# in ZeroStableExpnSeq; --# in out S; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives S from *, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# InnerExpns, --# KindDictionary, --# OneStableExpnSeq, --# OtherStableExpnSeq, --# S, --# TheHeap, --# ZeroStableExpnSeq; is ExpnNmbr : Natural; ExpnsInBody, ExpnsInExits, BodyVars, NewVars, TempSeq : SeqAlgebra.Seq; FirstExitFound : Boolean; BodyLambda, Closure, RhoProducts, TempRelation, TopMu : RelationAlgebra.Relation; A, R : IFA_Stack.StackMember; AuxiliaryStack : IFA_Stack.Stack; UnPreservedVars, SeqOfExpns : SeqAlgebra.Seq; Rho, Theta, ThetaTilde, Lambda : RelationAlgebra.Relation; procedure StabilityTest --# global in BodyLambda; --# in BodyVars; --# in ExpnsInBody; --# in InnerExpns; --# in KindDictionary; --# in OneStableExpnSeq; --# in OtherStableExpnSeq; --# in ZeroStableExpnSeq; --# in out RhoProducts; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives RhoProducts from TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# BodyLambda, --# BodyVars, --# ExpnsInBody, --# InnerExpns, --# KindDictionary, --# OneStableExpnSeq, --# OtherStableExpnSeq, --# RhoProducts, --# TheHeap, --# ZeroStableExpnSeq; is subtype VarNmbrRange is Natural range 1 .. ExaminerConstants.FlowAnalysisMaxVarNumber; type IndegreeType is array (VarNmbrRange) of Natural; Indegree : IndegreeType; type StabilityIndexType is array (VarNmbrRange) of Natural; StabilityIndex : StabilityIndexType; LambdaCol, ReducedCol, TestSeq, PendingVarSeq, StableVarSeq, SuccessorSeq : SeqAlgebra.Seq; M, N : SeqAlgebra.MemberOfSeq; MemberNmbr, ValueOfM, ValueOfN, MaxVarIndex, ExpnIndex : Natural; NmbrOfM, NmbrOfN : VarNmbrRange; ---------------------------------------------- function max (X, Y : Natural) return Natural is Result : Natural; begin case X > Y is when True => Result := X; when False => Result := Y; end case; return Result; end max; ---------------------------------------------- procedure IncNumber (X : in out Natural) --# derives X from *; --# post X = X~ + 1 and X <= ExaminerConstants.FlowAnalysisMaxVarNumber; is begin if X >= ExaminerConstants.FlowAnalysisMaxVarNumber then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_Flow_Analyser_Expressions, Msg => ""); end if; X := X + 1; end IncNumber; ---------------------------------------------- begin -- StabilityTest SeqAlgebra.CreateSeq (TheHeap, PendingVarSeq); SeqAlgebra.CreateSeq (TheHeap, StableVarSeq); RelationAlgebra.ExtractSubRelation (TheHeap, RhoProducts, BodyVars); M := SeqAlgebra.FirstMember (TheHeap, BodyVars); MemberNmbr := 0; while not SeqAlgebra.IsNullMember (M) loop --# assert MemberNmbr in Natural; ValueOfM := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M); IncNumber (MemberNmbr); --# accept Flow, 23, StabilityIndex, "Expect flow error on 1st write to array"; StabilityIndex (MemberNmbr) := 0; --# end accept; --# accept Flow, 23, Indegree, "Expect flow error on 1st write to array"; Indegree (MemberNmbr) := RelationAlgebra.ColumnCount (TheHeap, RhoProducts, ValueOfM); --# end accept; if Indegree (MemberNmbr) = 0 then SeqAlgebra.AddMember (TheHeap, PendingVarSeq, ValueOfM); SeqAlgebra.AddMember (TheHeap, StableVarSeq, ValueOfM); end if; M := SeqAlgebra.NextMember (TheHeap, M); end loop; while not SeqAlgebra.IsNullMember (SeqAlgebra.FirstMember (TheHeap, PendingVarSeq)) loop M := SeqAlgebra.FirstMember (TheHeap, PendingVarSeq); ValueOfM := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M); SeqAlgebra.RemoveMember (TheHeap, PendingVarSeq, ValueOfM); RelationAlgebra.RowExtraction (TheHeap, RhoProducts, ValueOfM, SuccessorSeq); NmbrOfM := SeqAlgebra.MemberIndex (TheHeap, BodyVars, ValueOfM); N := SeqAlgebra.FirstMember (TheHeap, SuccessorSeq); while not SeqAlgebra.IsNullMember (N) loop ValueOfN := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => N); NmbrOfN := SeqAlgebra.MemberIndex (TheHeap, BodyVars, ValueOfN); --# accept Flow, 504, Indegree, "Expect flow error"; Indegree (NmbrOfN) := Indegree (NmbrOfN) - 1; --# end accept; --# accept Flow, 504, StabilityIndex, "Expect flow error"; StabilityIndex (NmbrOfN) := max (StabilityIndex (NmbrOfN), StabilityIndex (NmbrOfM) + 1); --# end accept; if Indegree (NmbrOfN) = 0 then SeqAlgebra.AddMember (TheHeap, PendingVarSeq, ValueOfN); SeqAlgebra.AddMember (TheHeap, StableVarSeq, ValueOfN); end if; N := SeqAlgebra.NextMember (TheHeap, N); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, SuccessorSeq); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, PendingVarSeq); M := SeqAlgebra.FirstMember (TheHeap, ExpnsInBody); while not SeqAlgebra.IsNullMember (M) loop ValueOfM := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M); if not SeqAlgebra.IsMember (TheHeap, InnerExpns, ValueOfM) then SeqAlgebra.AddMember (TheHeap, InnerExpns, ValueOfM); if (KindDictionary (ValueOfM) = ExitExpn) or --(KindDictionary (ValueOfM) = DefaultExitExpn) or --898 (KindDictionary (ValueOfM) = ForkExpn) then RelationAlgebra.ColExtraction (TheHeap, BodyLambda, ValueOfM, LambdaCol); SeqAlgebra.Intersection (TheHeap, LambdaCol, BodyVars, ReducedCol); SeqAlgebra.DisposeOfSeq (TheHeap, LambdaCol); SeqAlgebra.Complement (TheHeap, ReducedCol, StableVarSeq, TestSeq); if SeqAlgebra.IsEmptySeq (TheHeap, TestSeq) then if SeqAlgebra.IsEmptySeq (TheHeap, ReducedCol) then ExpnIndex := 0; else MaxVarIndex := 0; N := SeqAlgebra.FirstMember (TheHeap, ReducedCol); while not SeqAlgebra.IsNullMember (N) loop --# accept Flow, 501, StabilityIndex, "Expect flow error"; MaxVarIndex := max (MaxVarIndex, StabilityIndex (SeqAlgebra.MemberIndex (TheHeap, BodyVars, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => N)))); --# end accept; N := SeqAlgebra.NextMember (TheHeap, N); end loop; ExpnIndex := MaxVarIndex + 1; end if; if ExpnIndex = 0 then SeqAlgebra.AddMember (TheHeap, ZeroStableExpnSeq, ValueOfM); elsif ExpnIndex = 1 then SeqAlgebra.AddMember (TheHeap, OneStableExpnSeq, ValueOfM); else SeqAlgebra.AddMember (TheHeap, OtherStableExpnSeq, ValueOfM); end if; end if; SeqAlgebra.DisposeOfSeq (TheHeap, ReducedCol); SeqAlgebra.DisposeOfSeq (TheHeap, TestSeq); end if; end if; M := SeqAlgebra.NextMember (TheHeap, M); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, StableVarSeq); --# accept Flow, 602, Statistics.TableUsage, Indegree, "expect 4 warnings from array DF anomolies" & --# Flow, 602, Statistics.TableUsage, StabilityIndex, "expect 4 warnings from array DF anomolies" & --# Flow, 602, TheHeap, Indegree, "expect 4 warnings from array DF anomolies" & --# Flow, 602, TheHeap, StabilityIndex, "expect 4 warnings from array DF anomolies"; end StabilityTest; -------------------------------------------------------------- begin -- CloseLoop IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R); SeqAlgebra.CreateSeq (TheHeap, BodyVars); SeqAlgebra.CreateSeq (TheHeap, ExpnsInBody); SeqAlgebra.CreateSeq (TheHeap, ExpnsInExits); IFA_Stack.ClearStack (AuxiliaryStack); FirstExitFound := False; IFA_Stack.Pop (S, A); while A.MemberKind /= IFA_Stack.LoopHead loop SeqAlgebra.AugmentSeq (TheHeap, R.AllVars, A.AllVars); case A.MemberKind is when IFA_Stack.Action => SeqAlgebra.AugmentSeq (TheHeap, BodyVars, A.DefinedVars); SeqAlgebra.AugmentSeq (TheHeap, R.DefinedVars, A.DefinedVars); SeqAlgebra.AugmentSeq (TheHeap, ExpnsInBody, A.SeqOfExpns); SeqAlgebra.AugmentSeq (TheHeap, R.UnPreservedVars, A.UnPreservedVars); when IFA_Stack.ExitBranch => SeqAlgebra.AugmentSeq (TheHeap, R.DefinedVars, A.DefinedVars); SeqAlgebra.AugmentSeq (TheHeap, ExpnsInExits, A.SeqOfExpns); if FirstExitFound then TempSeq := R.UnPreservedVars; SeqAlgebra.Intersection (TheHeap, TempSeq, A.UnPreservedVars, UnPreservedVars); R.UnPreservedVars := UnPreservedVars; SeqAlgebra.DisposeOfSeq (TheHeap, TempSeq); else FirstExitFound := True; SeqAlgebra.DisposeOfSeq (TheHeap, R.UnPreservedVars); SeqAlgebra.CreateSeq (TheHeap, UnPreservedVars); R.UnPreservedVars := UnPreservedVars; SeqAlgebra.AugmentSeq (TheHeap, R.UnPreservedVars, A.UnPreservedVars); end if; when IFA_Stack.ExitNode => SeqAlgebra.AugmentSeq (TheHeap, ExpnsInBody, A.SeqOfExpns); when others => null; end case; IFA_Stack.Push (AuxiliaryStack, A); IFA_Stack.Pop (S, A); end loop; IFA_Stack.Push (AuxiliaryStack, A); SeqAlgebra.Union (TheHeap, ExpnsInBody, ExpnsInExits, SeqOfExpns); R.SeqOfExpns := SeqOfExpns; -- Restore stack S. while not IFA_Stack.IsEmpty (AuxiliaryStack) loop IFA_Stack.Pop (AuxiliaryStack, A); IFA_Stack.Push (S, A); end loop; -- Construct Lambda, Rho, Theta and ThetaTilde relations. RelationAlgebra.CreateRelation (TheHeap, BodyLambda); RelationAlgebra.CreateRelation (TheHeap, RhoProducts); RelationAlgebra.AddIdentity (TheHeap, RhoProducts, R.AllVars); IFA_Stack.Pop (S, A); while A.MemberKind /= IFA_Stack.LoopHead loop case A.MemberKind is when IFA_Stack.Action => SeqAlgebra.Complement (TheHeap, R.AllVars, A.AllVars, NewVars); RelationAlgebra.AddIdentity (TheHeap, A.Rho, NewVars); SeqAlgebra.DisposeOfSeq (TheHeap, NewVars); RelationAlgebra.Composition (TheHeap, A.Rho, BodyLambda, TempRelation); RelationAlgebra.DisposeOfRelation (TheHeap, BodyLambda); RelationAlgebra.Sum (TheHeap, A.Lambda, TempRelation, BodyLambda); RelationAlgebra.DisposeOfRelation (TheHeap, TempRelation); TempRelation := R.Rho; RelationAlgebra.Composition (TheHeap, A.Rho, R.Rho, Rho); R.Rho := Rho; A.RhoProd := RhoProducts; RelationAlgebra.Composition (TheHeap, A.Rho, RhoProducts, Rho); RhoProducts := Rho; RelationAlgebra.DisposeOfRelation (TheHeap, A.Rho); A.Rho := TempRelation; RelationAlgebra.RowRemoval (TheHeap, R.Theta, A.UnPreservedVars, TempRelation); RelationAlgebra.DisposeOfRelation (TheHeap, R.Theta); RelationAlgebra.Sum (TheHeap, A.Theta, TempRelation, Theta); R.Theta := Theta; RelationAlgebra.DisposeOfRelation (TheHeap, TempRelation); RelationAlgebra.RowRemoval (TheHeap, R.ThetaTilde, A.DefinedVars, TempRelation); RelationAlgebra.DisposeOfRelation (TheHeap, R.ThetaTilde); RelationAlgebra.Sum (TheHeap, A.ThetaTilde, TempRelation, ThetaTilde); R.ThetaTilde := ThetaTilde; RelationAlgebra.DisposeOfRelation (TheHeap, TempRelation); when IFA_Stack.ExitBranch => SeqAlgebra.Complement (TheHeap, R.AllVars, A.AllVars, NewVars); RelationAlgebra.AddIdentity (TheHeap, A.Rho, NewVars); SeqAlgebra.DisposeOfSeq (TheHeap, NewVars); RelationAlgebra.AugmentRelation (TheHeap, R.Rho, A.Rho); RelationAlgebra.AugmentRelation (TheHeap, BodyLambda, A.Lambda); RelationAlgebra.AugmentRelation (TheHeap, R.Theta, A.Theta); RelationAlgebra.AugmentRelation (TheHeap, R.ThetaTilde, A.ThetaTilde); when IFA_Stack.ExitNode => ExpnNmbr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => SeqAlgebra.FirstMember (TheHeap, A.SeqOfExpns)); RelationAlgebra.AddCol (TheHeap, BodyLambda, ExpnNmbr, A.AllVars); RelationAlgebra.AddCol (TheHeap, R.Theta, ExpnNmbr, A.AllVars); RelationAlgebra.AddCol (TheHeap, R.ThetaTilde, ExpnNmbr, A.AllVars); RelationAlgebra.CartesianProduct (TheHeap, A.AllVars, R.DefinedVars, TempRelation); RelationAlgebra.AugmentRelation (TheHeap, R.Rho, TempRelation); RelationAlgebra.DisposeOfRelation (TheHeap, TempRelation); when others => null; end case; IFA_Stack.Push (AuxiliaryStack, A); IFA_Stack.Pop (S, A); end loop; IFA_Stack.Push (AuxiliaryStack, A); RelationAlgebra.CreateRelation (TheHeap, Closure); RelationAlgebra.AugmentRelation (TheHeap, Closure, RhoProducts); RelationAlgebra.CloseRelation (TheHeap, Closure); RelationAlgebra.AddIdentity (TheHeap, Closure, R.AllVars); RelationAlgebra.Composition (TheHeap, Closure, BodyLambda, Lambda); R.Lambda := Lambda; RelationAlgebra.Composition (TheHeap, Closure, R.Rho, TempRelation); RelationAlgebra.DisposeOfRelation (TheHeap, Closure); RelationAlgebra.DisposeOfRelation (TheHeap, R.Rho); R.Rho := TempRelation; -- Restore stack S. while not IFA_Stack.IsEmpty (AuxiliaryStack) loop IFA_Stack.Pop (AuxiliaryStack, A); IFA_Stack.Push (S, A); end loop; -- Construct Mu relation. IFA_Stack.Pop (S, A); while A.MemberKind /= IFA_Stack.LoopHead loop case A.MemberKind is when IFA_Stack.Action => RelationAlgebra.Composition (TheHeap, A.RhoProd, R.Rho, TempRelation); RelationAlgebra.AugmentRelation (TheHeap, TempRelation, A.Rho); RelationAlgebra.Composition (TheHeap, A.Mu, TempRelation, TopMu); RelationAlgebra.DisposeOfRelation (TheHeap, TempRelation); RelationAlgebra.AugmentRelation (TheHeap, R.Mu, TopMu); RelationAlgebra.DisposeOfRelation (TheHeap, TopMu); when IFA_Stack.ExitBranch => RelationAlgebra.AugmentRelation (TheHeap, R.Mu, A.Mu); when IFA_Stack.ExitNode => ExpnNmbr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => SeqAlgebra.FirstMember (TheHeap, A.SeqOfExpns)); RelationAlgebra.AddRow (TheHeap, R.Mu, ExpnNmbr, R.DefinedVars); when others => null; end case; IFA_Stack.DisposeOfMember (TheHeap, A); IFA_Stack.Pop (S, A); end loop; IFA_Stack.DisposeOfMember (TheHeap, A); IFA_Stack.Push (S, R); SeqAlgebra.DisposeOfSeq (TheHeap, ExpnsInExits); StabilityTest; SeqAlgebra.DisposeOfSeq (TheHeap, ExpnsInBody); SeqAlgebra.DisposeOfSeq (TheHeap, BodyVars); RelationAlgebra.DisposeOfRelation (TheHeap, BodyLambda); RelationAlgebra.DisposeOfRelation (TheHeap, RhoProducts); end CloseLoop; ----------------------------------------------------------------- procedure ReturnExpression --# global in Node; --# in ReferencedVars; --# in STree.Table; --# in Table; --# in out ExpnLocations; --# in out ExpnNumber; --# in out KindDictionary; --# in out S; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives ExpnLocations from *, --# ExpnNumber, --# Node & --# ExpnNumber, --# KindDictionary from *, --# ExpnNumber & --# S from *, --# TheHeap & --# Statistics.TableUsage, --# TheHeap from *, --# ExpnNumber, --# Node, --# ReferencedVars, --# STree.Table, --# Table, --# TheHeap; is ImportList : SeqAlgebra.Seq; R : IFA_Stack.StackMember; UndefinedVars : SeqAlgebra.Seq; begin IFA_Stack.EstablishMember (TheHeap, IFA_Stack.Action, R); SeqAlgebra.AddMember (TheHeap, R.DefinedVars, FnResultRepn); SeqAlgebra.AddMember (TheHeap, R.UnPreservedVars, FnResultRepn); SeqAlgebra.AddMember (TheHeap, R.AllVars, FnResultRepn); IncrementExpression (ExpnNumber); ExpnLocations (ExpnNumber) := Node; KindDictionary (ExpnNumber) := ReturnExpn; SeqAlgebra.AddMember (TheHeap, R.SeqOfExpns, ExpnNumber); RelationAlgebra.InsertPair (TheHeap, R.Mu, ExpnNumber, FnResultRepn); RefList.ReferencedVarList (Table, TheHeap, STree.Parent_Node (Current_Node => Node), -- to get ImportList); SeqAlgebra.AugmentSeq (TheHeap, ReferencedVars, ImportList); SeqAlgebra.AugmentSeq (TheHeap, R.AllVars, ImportList); RelationAlgebra.AddCol (TheHeap, R.Lambda, ExpnNumber, ImportList); RelationAlgebra.AddCol (TheHeap, R.Rho, FnResultRepn, ImportList); RelationAlgebra.AddCol (TheHeap, R.Theta, ExpnNumber, ImportList); RelationAlgebra.AddCol (TheHeap, R.ThetaTilde, ExpnNumber, ImportList); SeqAlgebra.Complement (TheHeap, R.AllVars, R.DefinedVars, UndefinedVars); RelationAlgebra.AddIdentity (TheHeap, R.Rho, UndefinedVars); SeqAlgebra.DisposeOfSeq (TheHeap, UndefinedVars); IFA_Stack.Push (S, R); end ReturnExpression; -------------------------------------------------------------------------- procedure CheckVarsUsedAsConsts --# global in Dictionary.Dict; --# in LocalInits; --# in S; --# in SubprogSym; --# in VarsUsedAsConstants; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# Dictionary.Dict, --# LocalInits, --# S, --# SubprogSym, --# TheHeap, --# VarsUsedAsConstants; is MemberOfInitVars : SeqAlgebra.MemberOfSeq; InitVarRep : Natural; begin if IsSubprogram (SubprogSym) then MemberOfInitVars := SeqAlgebra.FirstMember (TheHeap, LocalInits); while not SeqAlgebra.IsNullMember (MemberOfInitVars) loop InitVarRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfInitVars); if not SeqAlgebra.IsMember (TheHeap, IFA_Stack.Top (S).DefinedVars, InitVarRep) then SeqAlgebra.AddMember (TheHeap, VarsUsedAsConstants, InitVarRep); end if; MemberOfInitVars := SeqAlgebra.NextMember (TheHeap, MemberOfInitVars); end loop; end if; end CheckVarsUsedAsConsts; -------------------------------------------------------------------------- procedure AnalyseRelations --# global in CommandLineData.Content; --# in DependencyRelation; --# in Dictionary.Dict; --# in EndPosition; --# in ExpnLocations; --# in ExpSeqOfExports; --# in ExpSeqOfImports; --# in InnerExpns; --# in InStreamsOfShareableProtectedVars; --# in KindDictionary; --# in LexTokenManager.State; --# in OneStableExpnSeq; --# in OtherStableExpnSeq; --# in ParamDictionary; --# in ReferencedVars; --# in S; --# in Scope; --# in SeqOfExports; --# in SeqOfImports; --# in SeqOfInitVars; --# in StmtLocations; --# in STree.Table; --# in SubprogSym; --# in VarsUsedAsConstants; --# in ZeroStableExpnSeq; --# in out ComponentData; --# in out DataFlowErrorFoundLocal; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out TheHeap; --# derives ComponentData, --# Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# DependencyRelation, --# Dictionary.Dict, --# EndPosition, --# ExpnLocations, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InnerExpns, --# InStreamsOfShareableProtectedVars, --# KindDictionary, --# ParamDictionary, --# ReferencedVars, --# S, --# SeqOfExports, --# SeqOfImports, --# SeqOfInitVars, --# StmtLocations, --# STree.Table, --# SubprogSym, --# TheHeap, --# VarsUsedAsConstants & --# DataFlowErrorFoundLocal from *, --# ComponentData, --# Dictionary.Dict, --# EndPosition, --# ExpnLocations, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InnerExpns, --# KindDictionary, --# ParamDictionary, --# S, --# SeqOfInitVars, --# StmtLocations, --# STree.Table, --# SubprogSym, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ComponentData, --# DependencyRelation, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# ExpnLocations, --# ExpSeqOfExports, --# ExpSeqOfImports, --# InnerExpns, --# InStreamsOfShareableProtectedVars, --# KindDictionary, --# LexTokenManager.State, --# OneStableExpnSeq, --# OtherStableExpnSeq, --# ParamDictionary, --# ReferencedVars, --# S, --# Scope, --# SeqOfExports, --# SeqOfImports, --# SeqOfInitVars, --# SPARK_IO.File_Sys, --# StmtLocations, --# STree.Table, --# SubprogSym, --# TheHeap, --# VarsUsedAsConstants, --# ZeroStableExpnSeq; is separate; begin -- FlowAnalyse Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => SubprogSym); RefList.ExpandToComponentEntities (ComponentData, Table, TheHeap); FormSeqOfInitVars; FormLocalInits; FormDependencyRelation; SeqAlgebra.CreateSeq (TheHeap, ReferencedVars); SeqAlgebra.CreateSeq (TheHeap, InnerExpns); SeqAlgebra.CreateSeq (TheHeap, ZeroStableExpnSeq); SeqAlgebra.CreateSeq (TheHeap, OneStableExpnSeq); SeqAlgebra.CreateSeq (TheHeap, OtherStableExpnSeq); SeqAlgebra.CreateSeq (TheHeap, VarsUsedAsConstants); IFA_Stack.ClearStack (S); DataFlowErrorFoundLocal := False; ExpnNumber := 0; Node := StartNode; -- ANALYSIS ONLY -- The following initializations are not needed but were inserted -- temporarily to ensure that expected flow errors were not masking -- any other possible errors. The initializations do not form part -- of the delivered code. -- KindDictionary := KindDictionaryType'(others => SimpleAssignment); -- ParamDictionary := ParamDictionaryType'(others => Dictionary.NullSymbol); -- ExpnLocations := LocnDictionaryType'(others => STree.NullNode); -- StmtLocations := LocnDictionaryType'(others => STree.NullNode); --# accept Flow, 23, StmtLocations, "Partial but effective array init." & --# Flow, 23, KindDictionary, "Partial but effective array init." & --# Flow, 23, ParamDictionary, "Partial but effective array init."; ModelLocalInits (Node); --# end accept; loop -- Down Loop -- LastNode := Node; NodeType := STree.Syntax_Node_Type (Node => Node); --# accept Flow, 23, ExpnLocations, "Expect err owing to partial but effective array init."; case NodeType is when SP_Symbols.sequence_of_statements | SP_Symbols.statement | SP_Symbols.simple_statement | SP_Symbols.compound_statement | SP_Symbols.elsif_part | SP_Symbols.alternatives | SP_Symbols.others_part | SP_Symbols.loop_statement => Node := STree.Child_Node (Current_Node => Node); when SP_Symbols.simple_name => Node := STree.NullNode; when SP_Symbols.proof_statement | SP_Symbols.justification_statement | SP_Symbols.apragma | SP_Symbols.null_statement => NullStatement; Node := STree.NullNode; when SP_Symbols.assignment_statement | SP_Symbols.procedure_call_statement | SP_Symbols.delay_statement => Mapping (Node); Node := STree.NullNode; when SP_Symbols.if_statement => Node := STree.Child_Node (Current_Node => Node); LastNode := Node; StartIf; -- Expect err owing to partial but effective array init. Node := STree.NullNode; when SP_Symbols.condition => StartElsIf; -- Expect err owing to partial but effective array init. Node := STree.NullNode; when SP_Symbols.else_part => Node := STree.Child_Node (Current_Node => Node); if Node = STree.NullNode and then IFA_Stack.Top (S).MemberKind /= IFA_Stack.ExitNode then NullStatement; end if; when SP_Symbols.case_statement => Node := STree.Child_Node (Current_Node => Node); LastNode := Node; StartCase; -- Expect err owing to partial but effective array init. Node := STree.NullNode; when SP_Symbols.case_statement_alternative => Node := STree.Child_Node (Current_Node => Node); Node := STree.Next_Sibling (Current_Node => Node); when SP_Symbols.return_statement => Node := STree.Child_Node (Current_Node => Node); ReturnExpression; -- Expect err owing to partial but effective array init. Node := STree.NullNode; when SP_Symbols.loop_statement_opt => StartLoop; -- Expect err owing to partial but effective array init. Node := STree.NullNode; when SP_Symbols.exit_statement => Node := STree.Child_Node (Current_Node => Node); Node := STree.Next_Sibling (Current_Node => Node); if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.simple_name then -- Exit statement has a label, which we must skip -- to get to the condition. Node := STree.Next_Sibling (Current_Node => Node); end if; ModelExit; -- Expect err owing to partial but effective array init. Node := STree.NullNode; when SP_Symbols.end_of_loop => ModelDefaultExitOrFinalForLoopExit; -- Expect err owing to partial but effective array init. Node := STree.NullNode; when SP_Symbols.sequence_of_labels | SP_Symbols.label => Node := STree.NullNode; when others => Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "FlowAnalyse: Unexpected node kind in main tree"); end case; --# end accept; -- Up Loop -------------------------- if Node = STree.NullNode and then LastNode /= StartNode then loop Node := STree.Next_Sibling (Current_Node => LastNode); exit when Node /= STree.NullNode; Node := STree.Parent_Node (Current_Node => LastNode); exit when Node = STree.NullNode or else Node = StartNode; NodeType := STree.Syntax_Node_Type (Node => Node); case NodeType is when SP_Symbols.statement => if STree.Child_Node (Current_Node => STree.Parent_Node (Current_Node => Node)) /= Node then CombineSequence; end if; when SP_Symbols.if_statement => Join; when SP_Symbols.case_statement => CombineCases; when SP_Symbols.loop_statement => CloseLoop; if ((STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)) /= STree.NullNode) and then (STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => Node)))) = SP_Symbols.loop_parameter_specification)) or else ((STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))) /= STree.NullNode) and then (STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node))))) = SP_Symbols.loop_parameter_specification)) then CombineSequence; end if; when others => null; end case; LastNode := Node; end loop; -- Up end if; exit when Node = STree.NullNode or else Node = StartNode; end loop; -- Down -- combine in local variable initializations if there are any if not SeqAlgebra.IsEmptySeq (TheHeap, LocalInits) then CheckVarsUsedAsConsts; CombineSequence; end if; -- For every protected variable P that has been referenced somewhere, we need to -- set up an initialization of the associated protected stream thus : P--in <-- P. --# accept Flow, 10, ExpnNumber, "Final assignment to ExpnNumber not used" & --# Flow, 504, ExpnLocations, "Partial but effective array init."; InitializeProtectedStreams (Node); --# end accept; if CommandLineData.Content.Debug.Rho then PrintStackTop ("Calculated flow relations before AnalyseRelations"); end if; AnalyseRelations; -- Expect err owing to partial but effective array init. IFA_Stack.DisposeOfMember (TheHeap, IFA_Stack.Top (S)); SeqAlgebra.DisposeOfSeq (TheHeap, ReferencedVars); SeqAlgebra.DisposeOfSeq (TheHeap, InnerExpns); SeqAlgebra.DisposeOfSeq (TheHeap, ZeroStableExpnSeq); SeqAlgebra.DisposeOfSeq (TheHeap, OneStableExpnSeq); SeqAlgebra.DisposeOfSeq (TheHeap, OtherStableExpnSeq); SeqAlgebra.DisposeOfSeq (TheHeap, SeqOfImports); SeqAlgebra.DisposeOfSeq (TheHeap, SeqOfInitVars); SeqAlgebra.DisposeOfSeq (TheHeap, LocalInits); SeqAlgebra.DisposeOfSeq (TheHeap, VarsUsedAsConstants); SeqAlgebra.DisposeOfSeq (TheHeap, SeqOfExports); SeqAlgebra.DisposeOfSeq (TheHeap, ExpSeqOfImports); -- added during 1215 but not really part of it SeqAlgebra.DisposeOfSeq (TheHeap, ExpSeqOfExports); -- added during 1215 but not really part of it SeqAlgebra.DisposeOfSeq (TheHeap, ShareableProtectedVars); SeqAlgebra.DisposeOfSeq (TheHeap, ReferencedProtectedVars); SeqAlgebra.DisposeOfSeq (TheHeap, InStreamsOfShareableProtectedVars); RelationAlgebra.DisposeOfRelation (TheHeap, DependencyRelation); -- signal presence of DFEs for use by VCG DataFlowErrorFound := DataFlowErrorFoundLocal; --# accept Flow, 602, Statistics.TableUsage, StmtLocations, "Partial but effective array init." & --# Flow, 602, Statistics.TableUsage, ExpnLocations, "Partial but effective array init." & --# Flow, 602, Statistics.TableUsage, KindDictionary, "Partial but effective array init." & --# Flow, 602, Statistics.TableUsage, ParamDictionary, "Partial but effective array init." & --# Flow, 602, ErrorHandler.Error_Context, StmtLocations, "Partial but effective array init." & --# Flow, 602, ErrorHandler.Error_Context, ExpnLocations, "Partial but effective array init." & --# Flow, 602, ErrorHandler.Error_Context, KindDictionary, "Partial but effective array init." & --# Flow, 602, ErrorHandler.Error_Context, ParamDictionary, "Partial but effective array init." & --# Flow, 602, ComponentData, StmtLocations, "Partial but effective array init." & --# Flow, 602, ComponentData, ExpnLocations, "Partial but effective array init." & --# Flow, 602, ComponentData, KindDictionary, "Partial but effective array init." & --# Flow, 602, ComponentData, ParamDictionary, "Partial but effective array init." & --# Flow, 602, TheHeap, StmtLocations, "Partial but effective array init." & --# Flow, 602, TheHeap, ExpnLocations, "Partial but effective array init." & --# Flow, 602, TheHeap, KindDictionary, "Partial but effective array init." & --# Flow, 602, TheHeap, ParamDictionary, "Partial but effective array init." & --# Flow, 602, DataFlowErrorFound, StmtLocations, "Partial but effective array init." & --# Flow, 602, DataFlowErrorFound, ExpnLocations, "Partial but effective array init." & --# Flow, 602, DataFlowErrorFound, KindDictionary, "Partial but effective array init." & --# Flow, 602, DataFlowErrorFound, ParamDictionary, "Partial but effective array init." & --# Flow, 602, SPARK_IO.File_Sys, StmtLocations, "Partial but effective array init." & --# Flow, 602, SPARK_IO.File_Sys, ExpnLocations, "Partial but effective array init." & --# Flow, 602, SPARK_IO.File_Sys, KindDictionary, "Partial but effective array init." & --# Flow, 602, SPARK_IO.File_Sys, ParamDictionary, "Partial but effective array init."; end FlowAnalyse; spark-2012.0.deb/examiner/sem-compunit-wf_package_initialization.adb0000644000175000017500000002150011753202336024533 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Package_Initialization (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is Stmt_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Pack_Sym : Dictionary.Symbol; ---------------------------------------------------------------- procedure Check_Whether_Initialization_Was_Needed (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position; Ident_Str : in LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Str, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is Constituent_List, Init_Own_Var_List : Dictionary.Iterator; Init_Part_Needed : Boolean; Init_Own_Var_Sym : Dictionary.Symbol; begin Init_Part_Needed := False; Init_Own_Var_List := Dictionary.FirstInitializedOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (Init_Own_Var_List) loop Init_Own_Var_Sym := Dictionary.CurrentSymbol (Init_Own_Var_List); if Dictionary.IsConcreteOwnVariable (Init_Own_Var_Sym) then -- filter out own vars initialized at declaration if not Dictionary.VariableIsInitialized (Init_Own_Var_Sym) then Init_Part_Needed := True; end if; else Constituent_List := Dictionary.FirstConstituent (Init_Own_Var_Sym); while not Dictionary.IsNullIterator (Constituent_List) loop if (not Dictionary.IsOwnVariable (Dictionary.CurrentSymbol (Constituent_List))) and then (not Dictionary.VariableIsInitialized (Dictionary.CurrentSymbol (Constituent_List))) then Init_Part_Needed := True; exit; end if; Constituent_List := Dictionary.NextSymbol (Constituent_List); end loop; end if; exit when Init_Part_Needed; Init_Own_Var_List := Dictionary.NextSymbol (Init_Own_Var_List); end loop; if not Init_Part_Needed then ErrorHandler.Semantic_Error (Err_Num => 66, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Ident_Str); end if; end Check_Whether_Initialization_Was_Needed; ---------------------------------------------------------------- function Null_Initialization (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.sequence_of_statements; is Current_Node : STree.SyntaxNode; Result : Boolean; begin Current_Node := Child_Node (Current_Node => Node); -- ASSUME Current_Node = sequence_of_statements OR statement if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.sequence_of_statements then -- ASSUME Current_Node = sequence_of_statements Result := False; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.statement then -- ASSUME Current_Node = statement Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = sequence_of_labels OR simple_statement OR compound_statement OR proof_statement OR -- justification_statement OR apragma if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.sequence_of_labels or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.compound_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.proof_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.justification_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.apragma then -- ASSUME Current_Node = sequence_of_labels OR compound_statement OR proof_statement OR -- justification_statement OR apragma Result := False; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.simple_statement then -- ASSUME Current_Node = simple_statement Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = null_statement OR assignment_statement OR procedure_call_statement OR -- exit_statement OR return_statement OR delay_statement if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.null_statement then -- ASSUME Current_Node = null_statement Result := True; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.assignment_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_call_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.exit_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.return_statement or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.delay_statement then -- ASSUME Current_Node = assignment_statement OR procedure_call_statement -- exit_statement OR return_statement OR delay_statement Result := False; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = null_statement OR assignment_statement OR procedure_call_statement OR " & "exit_statement OR return_statement OR delay_statement in Null_Initialization"); end if; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = sequence_of_labels OR simple_statement OR compound_statement OR " & "proof_statement OR justification_statement OR apragma in Null_Initialization"); end if; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = sequence_of_statements OR statement in Null_Initialization"); end if; return Result; end Null_Initialization; begin -- Wf_Package_Initialization Pack_Sym := Dictionary.GetRegion (Scope); Ident_Str := Dictionary.GetSimpleName (Pack_Sym); Stmt_Node := Child_Node (Current_Node => Node); -- ASSUME Stmt_Node = sequence_of_statements OR hidden_part if Syntax_Node_Type (Node => Stmt_Node) = SP_Symbols.hidden_part then -- ASSUME Stmt_Node = hidden_part ErrorHandler.Hidden_Text (Position => Node_Position (Node => Stmt_Node), Unit_Str => Ident_Str, Unit_Typ => SP_Symbols.package_initialization); elsif Syntax_Node_Type (Node => Stmt_Node) = SP_Symbols.sequence_of_statements then -- ASSUME Stmt_Node = sequence_of_statements -- not hidden so check whether it was necessary if not Null_Initialization (Node => Stmt_Node) then Check_Whether_Initialization_Was_Needed (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node), Ident_Str => Ident_Str); end if; end if; end Wf_Package_Initialization; spark-2012.0.deb/examiner/sem-check_interrupt_property_consistency.adb0000644000175000017500000000540411753202336025264 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Interrupt_Property_Consistency (Has_Interrupt_Property : in Boolean; Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Error_Node_Pos : in LexTokenManager.Token_Position; Consistent : out Boolean) is function Type_Can_Interrupt (Type_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin -- To interrupt the type must be a protected type with an attach handler. return Dictionary.IsProtectedTypeMark (Type_Sym) and then Dictionary.GetTypeHasPragma (Dictionary.GetRootType (Type_Sym), Dictionary.AttachHandler); end Type_Can_Interrupt; begin -- Check_Interrupt_Property_Consistency Consistent := True; if Dictionary.Is_Declared (Item => Type_Sym) or else Dictionary.IsPredefined (Type_Sym) then if Has_Interrupt_Property and then not Type_Can_Interrupt (Type_Sym => Type_Sym) then -- We have an own variable with the interrupt property whose type -- cannot interrupt. Consistent := False; ErrorHandler.Semantic_Error (Err_Num => 936, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); elsif not Has_Interrupt_Property and then Type_Can_Interrupt (Type_Sym => Type_Sym) then -- We have an own variable without the interrupt property whose -- type can interrupt. Consistent := False; ErrorHandler.Semantic_Error (Err_Num => 935, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end if; end if; end Check_Interrupt_Property_Consistency; spark-2012.0.deb/examiner/dictionary-generatesimplename.adb0000644000175000017500000002433711753202336022742 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with LexTokenStacks; separate (Dictionary) function GenerateSimpleName (Item : Symbol; Separator : String) return E_Strings.T is Name : E_Strings.T; -------------------------------------------------------------------------------- -- Each protected own variable has an associated implicit in stream which is used for -- volatile flow analysis of shared protected state. The names of these should never -- apepar in Examienr output; however, if they are needed for diagnostic reasons they -- can be constructed by this function. For a stream associated with P we return P__in. function Get_Implicit_Protected_In_Stream_Name (The_Implicit_In_Stream : RawDict.Implicit_In_Stream_Info_Ref) return E_Strings.T --# global in Dict; --# in LexTokenManager.State; is Name : E_Strings.T; begin Name := LexTokenManager.Lex_String_To_String (Lex_Str => RawDict.Get_Variable_Name (The_Variable => Get_Own_Variable_Of_Protected_Implicit_In_Stream (The_Implicit_In_Stream => The_Implicit_In_Stream))); E_Strings.Append_String (E_Str => Name, Str => "__in"); return Name; end Get_Implicit_Protected_In_Stream_Name; -------------------------------------------------------------------------------- function Get_Loop_Name (The_Loop : Symbol) return E_Strings.T --# global in Dict; --# in LexTokenManager.State; is Name : E_Strings.T; function Get_Loop_Number (The_Loop : Symbol) return Positive --# global in Dict; is Loops : Iterator; Number : Positive; begin Loops := First_Loop (CompilationUnit => GetEnclosingCompilationUnit (Set_Visibility (The_Visibility => Local, The_Unit => The_Loop))); Number := 1; while CurrentSymbol (Loops) /= The_Loop and then Number < Positive'Last loop Loops := NextSymbol (Loops); Number := Number + 1; end loop; return Number; end Get_Loop_Number; -------------------------------------------------------------------------------- function Image (Number : Positive) return E_Strings.T is --# hide Image; Signed_Image : constant String := Positive'Image (Number); begin return E_Strings.Copy_String (Str => Signed_Image (2 .. Signed_Image'Length)); end Image; begin -- Get_Loop_Name if LoopHasName (The_Loop) then Name := LexTokenManager.Lex_String_To_String (Lex_Str => GetSimpleName (Item => The_Loop)); else Name := E_Strings.Copy_String (Str => "LOOP__"); E_Strings.Append_Examiner_String (E_Str1 => Name, E_Str2 => Image (Number => Get_Loop_Number (The_Loop => The_Loop))); end if; return Name; end Get_Loop_Name; -------------------------------------------------------------------------------- function Get_Package_Name (The_Package : RawDict.Package_Info_Ref; Separator : String) return E_Strings.T --# global in Dict; --# in LexTokenManager.State; is Package_Local : RawDict.Package_Info_Ref; Current_Token : LexTokenManager.Lex_String; Stack : LexTokenStacks.Stacks; Name : E_Strings.T; begin Name := E_Strings.Empty_String; Package_Local := The_Package; LexTokenStacks.Clear (Stack); while Package_Local /= RawDict.Null_Package_Info_Ref loop LexTokenStacks.Push (Stack, RawDict.Get_Package_Name (The_Package => Package_Local)); Package_Local := RawDict.Get_Package_Parent (The_Package => Package_Local); end loop; loop LexTokenStacks.Pop (Stack, Current_Token); E_Strings.Append_Examiner_String (E_Str1 => Name, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Token)); exit when LexTokenStacks.IsEmpty (Stack); E_Strings.Append_String (E_Str => Name, Str => Separator); end loop; return Name; end Get_Package_Name; -------------------------------------------------------------------------------- function Get_Record_Variable_Name (The_Record : RawDict.Subcomponent_Info_Ref; Separator : String) return E_Strings.T --# global in Dict; --# in LexTokenManager.State; is Record_Local : Symbol; Current_Token : LexTokenManager.Lex_String; Stack : LexTokenStacks.Stacks; Name : E_Strings.T; begin Name := E_Strings.Empty_String; Record_Local := RawDict.Get_Subcomponent_Symbol (The_Record); LexTokenStacks.Clear (Stack); loop -- we want to ignore any inherited fields for name generation purposes if RawDict.GetSymbolDiscriminant (Record_Local) /= Subcomponent_Symbol or else not RawDict.Get_Record_Component_Inherited_Field (The_Record_Component => RawDict.Get_Subcomponent_Record_Component (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Record_Local))) then LexTokenStacks.Push (Stack, GetSimpleName (Record_Local)); end if; exit when RawDict.GetSymbolDiscriminant (Record_Local) /= Subcomponent_Symbol; -- entire record var Record_Local := RawDict.Get_Subcomponent_Object (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Record_Local)); end loop; loop LexTokenStacks.Pop (Stack, Current_Token); E_Strings.Append_Examiner_String (E_Str1 => Name, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Current_Token)); exit when LexTokenStacks.IsEmpty (Stack); E_Strings.Append_String (E_Str => Name, Str => Separator); end loop; return Name; end Get_Record_Variable_Name; -------------------------------------------------------------------------------- function Get_Loop_Entry_Variable_Name (The_Loop_Entry_Var : Symbol) return E_Strings.T --# global in Dict; --# in LexTokenManager.State; is Result : E_Strings.T; Loop_Name : E_Strings.T; begin -- Loop on entry variable names are constructed from the original variable name -- and the associated loop name Result := LexTokenManager.Lex_String_To_String (Lex_Str => GetSimpleName (Item => The_Loop_Entry_Var)); Loop_Name := Get_Loop_Name (The_Loop => RawDict.GetLoopEntryVariableTheLoop (The_Loop_Entry_Var)); E_Strings.Append_String (E_Str => Result, Str => "__entry__"); E_Strings.Append_Examiner_String (E_Str1 => Result, E_Str2 => Loop_Name); return Result; end Get_Loop_Entry_Variable_Name; -------------------------------------------------------------------------------- function Get_Parameter_Constraint_Name (The_Parameter_Constraint : RawDict.Parameter_Constraint_Info_Ref) return E_Strings.T --# global in Dict; --# in LexTokenManager.State; is Name : E_Strings.T; begin Name := LexTokenManager.Lex_String_To_String (Lex_Str => Get_Parameter_Constraint_Simple_Name (The_Parameter_Constraint => The_Parameter_Constraint)); -- above line will return the name of the formal parameter associated with the constraint E_Strings.Append_String (E_Str => Name, Str => "__index__subtype__"); E_Strings.Append_Examiner_String (E_Str1 => Name, E_Str2 => Maths.ValueToString (Num => Maths.IntegerToValue (I => RawDict.Get_Parameter_Constraint_Dimension (The_Parameter_Constraint => The_Parameter_Constraint)))); return Name; end Get_Parameter_Constraint_Name; begin -- GenerateSimpleName case RawDict.GetSymbolDiscriminant (Item) is when LoopSymbol => Name := Get_Loop_Name (The_Loop => Item); when Package_Symbol => Name := Get_Package_Name (The_Package => RawDict.Get_Package_Info_Ref (Item => Item), Separator => Separator); when Subcomponent_Symbol => Name := Get_Record_Variable_Name (The_Record => RawDict.Get_Subcomponent_Info_Ref (Item => Item), Separator => Separator); when Implicit_In_Stream_Symbol => Name := Get_Implicit_Protected_In_Stream_Name (The_Implicit_In_Stream => RawDict.Get_Implicit_In_Stream_Info_Ref (Item => Item)); when LoopEntryVariableSymbol => Name := Get_Loop_Entry_Variable_Name (The_Loop_Entry_Var => Item); when Parameter_Constraint_Symbol => Name := Get_Parameter_Constraint_Name (The_Parameter_Constraint => RawDict.Get_Parameter_Constraint_Info_Ref (Item => Item)); when Type_Symbol => Name := Fetch_Simple_Name (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Item)); when others => Name := LexTokenManager.Lex_String_To_String (Lex_Str => GetSimpleName (Item => Item)); end case; return Name; end GenerateSimpleName; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_component_association.adb0000644000175000017500000003056411753202336026337 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Component_Association (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Name_Exp, Exp_Result : Sem.Exp_Record; Others_Node : STree.SyntaxNode; Has_Others_Clause : Boolean; Expected_Type : Dictionary.Symbol; --------------------------------------------------------- function Doing_Embedded_Aggregate (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_component_association; is Current_Node : STree.SyntaxNode; Result : Boolean; begin Current_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Current_Node = aggregate_or_expression OR named_association OR positional_association OR -- annotation_aggregate_or_expression OR annotation_named_association OR -- annotation_positional_association if STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_aggregate_or_expression then -- ASSUME Current_Node = aggregate_or_expression OR annotation_aggregate_or_expression Result := False; elsif STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.named_association or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.positional_association or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_named_association or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_positional_association then -- ASSUME Current_Node = named_association OR positional_association OR -- annotation_named_association OR annotation_positional_association Current_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Current_Node)); -- ASSUME Current_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR NULL if Current_Node = STree.NullNode then -- ASSUME Current_Node = NULL Result := False; elsif STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_aggregate_or_expression then -- ASSUME Current_Node = aggregate_or_expression OR annotation_aggregate_or_expression Current_Node := STree.Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = aggregate OR expression OR -- annotation_aggregate OR annotation_expression if STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_aggregate then -- ASSUME Current_Node = aggregate OR annotation_aggregate Result := True; elsif STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_expression then -- ASSUME Current_Node = expression OR annotation_expression Result := False; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = aggregate OR expression OR " & "annotation_aggregate OR annotation_expression in Doing_Embedded_Aggregate"); end if; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR " & "NULL in Doing_Embedded_Aggregate"); end if; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = aggregate_or_expression OR named_association OR positional_association OR " & "annotation_aggregate_or_expression OR annotation_named_association OR annotation_positional_association " & "in Doing_Embedded_Aggregate"); end if; return Result; end Doing_Embedded_Aggregate; begin -- Wf_Component_Association -- check for positional_association in which case we do nothing -- neither do we do anything if others part is an embedded aggregate if not Doing_Embedded_Aggregate (Node => Node) then Others_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Others_Node = aggregate_or_expression OR named_association OR positional_association OR -- annotation_aggregate_or_expression OR annotation_named_association OR -- annotation_positional_association if STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.named_association or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_aggregate_or_expression or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_named_association then -- ASSUME Others_Node = aggregate_or_expression OR named_association OR -- annotation_aggregate_or_expression OR annotation_named_association Others_Node := STree.Child_Node (Current_Node => Others_Node); -- ASSUME Others_Node = aggregate OR expression OR named_association_rep OR -- annotation_aggregate OR annotation_expression OR annotation_named_association_rep if STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_expression then -- ASSUME Others_Node = expression OR annotation_expression Has_Others_Clause := True; elsif STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_aggregate then -- ASSUME Others_Node = aggregate OR annotation_aggregate Has_Others_Clause := False; elsif STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.named_association_rep or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_named_association_rep then -- ASSUME Others_Node = named_association_rep OR annotation_named_association_rep Others_Node := STree.Next_Sibling (Current_Node => Others_Node); -- ASSUME Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR NULL if Others_Node = STree.NullNode then -- ASSUME Others_Node = NULL Has_Others_Clause := False; elsif STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.aggregate_or_expression or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_aggregate_or_expression then -- ASSUME Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression Others_Node := STree.Child_Node (Current_Node => Others_Node); -- ASSUME Others_Node = aggregate OR expression OR -- annotation_aggregate OR annotation_expression if STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_expression then -- ASSUME Others_Node = expression OR annotation_expression Has_Others_Clause := True; elsif STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.aggregate or else STree.Syntax_Node_Type (Node => Others_Node) = SP_Symbols.annotation_aggregate then -- ASSUME Others_Node = aggregate OR annotation_aggregate Has_Others_Clause := False; else Has_Others_Clause := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Others_Node = aggregate OR expression OR " & "annotation_aggregate OR annotation_expression in Wf_Component_Association"); end if; else Has_Others_Clause := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Others_Node = aggregate_or_expression OR annotation_aggregate_or_expression OR " & "NULL in Wf_Component_Association"); end if; else Has_Others_Clause := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Others_Node = aggregate OR expression OR named_association_rep OR " & "annotation_aggregate OR annotation_expression OR annotation_named_association_rep " & "in Wf_Component_Association"); end if; if Has_Others_Clause then Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); if Dictionary.IsUnknownTypeMark (Name_Exp.Type_Symbol) then Name_Exp.Errors_In_Expression := True; elsif Dictionary.TypeIsArray (Name_Exp.Type_Symbol) then Expected_Type := Dictionary.GetArrayComponent (Name_Exp.Type_Symbol); Sem.Assignment_Check (Position => STree.Node_Position (Node => Others_Node), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Exp_Result); Name_Exp.Is_Static := False; Name_Exp.Is_Constant := Name_Exp.Is_Constant and then Exp_Result.Is_Constant; STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); end if; Name_Exp.Errors_In_Expression := Name_Exp.Errors_In_Expression or else Exp_Result.Errors_In_Expression; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); end if; elsif STree.Syntax_Node_Type (Node => Others_Node) /= SP_Symbols.positional_association and then STree.Syntax_Node_Type (Node => Others_Node) /= SP_Symbols.annotation_positional_association then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Others_Node = aggregate_or_expression OR named_association OR positional_association OR " & "annotation_aggregate_or_expression OR annotation_named_association OR annotation_positional_association " & "in Wf_Component_Association"); end if; end if; end Wf_Component_Association; spark-2012.0.deb/examiner/sem-wf_context_clause_package_body-use_clause.adb0000644000175000017500000003334411753202336026044 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Context_Clause_Package_Body) procedure Use_Clause (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) is Sym : Dictionary.Symbol; Next_Node : STree.SyntaxNode; It : STree.Iterator; Prefix_Ok : Boolean; OK_To_Add : Boolean := True; procedure Check_Prefix (Type_Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Prefix_Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Node & --# Prefix_Ok from CommandLineData.Content, --# Comp_Sym, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Type_Node & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Type_Node; --# pre Syntax_Node_Type (Type_Node, STree.Table) = SP_Symbols.type_mark; --# post STree.Table = STree.Table~; is Curr_Node : STree.SyntaxNode; Last_Node : STree.SyntaxNode; Sym : Dictionary.Symbol; Parent_Sym : Dictionary.Symbol; Lib_Sym : Dictionary.Symbol; Ok : Boolean := True; begin Lib_Sym := Dictionary.GetLibraryPackage (Scope); Last_Node := Child_Node (Current_Node => Child_Node (Current_Node => Type_Node)); -- ASSUME Last_Node = dotted_simple_name OR identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Last_Node) = SP_Symbols.dotted_simple_name or else Syntax_Node_Type (Node => Last_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = dotted_simple_name OR identifier in Check_Prefix"); Curr_Node := Last_Child_Of (Start_Node => Type_Node); -- ASSUME Curr_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Node = identifier in Check_Prefix"); Parent_Sym := Dictionary.NullSymbol; Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Curr_Node), Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Curr_Node, STree.Table) = SP_Symbols.identifier; if Dictionary.Is_Null_Symbol (Sym) then -- not declared or not visible Ok := False; ErrorHandler.Semantic_Error (Err_Num => 131, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Node_Lex_String (Node => Curr_Node)); elsif not Dictionary.IsPackage (Sym) then Ok := False; ErrorHandler.Semantic_Error (Err_Num => 18, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Node_Lex_String (Node => Curr_Node)); end if; exit when not Ok; STree.Set_Node_Lex_String (Sym => Sym, Node => Curr_Node); exit when Parent_Node (Current_Node => Curr_Node) = Last_Node or else Curr_Node = Last_Node; Curr_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Curr_Node)); -- ASSUME Curr_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Node = identifier in Check_Prefix"); Parent_Sym := Sym; Sym := Dictionary.LookupSelectedItem (Prefix => Sym, Selector => Node_Lex_String (Node => Curr_Node), Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext); end loop; if Ok then -- check visibility in current scope if Dictionary.Packages_Are_Equal (Left_Symbol => Sym, Right_Symbol => Lib_Sym) then -- using self Sym := Dictionary.NullSymbol; Ok := False; ErrorHandler.Semantic_Error (Err_Num => 130, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Node_Lex_String (Node => Curr_Node)); else if Dictionary.Is_Null_Symbol (Parent_Sym) or else Dictionary.IsProperDescendent (Lib_Sym, Sym) -- using an ancestor or else Dictionary.IsProperDescendent (Lib_Sym, Parent_Sym) -- using child of ancestor or else Dictionary.Packages_Are_Equal (Left_Symbol => Parent_Sym, Right_Symbol => Lib_Sym) -- using own child then -- look up directly Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Curr_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); else Sym := Dictionary.LookupSelectedItem (Prefix => Parent_Sym, Selector => Node_Lex_String (Node => Curr_Node), Scope => Scope, Context => Dictionary.ProgramContext); end if; if Dictionary.Is_Null_Symbol (Sym) then Ok := False; ErrorHandler.Semantic_Error (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Node_Lex_String (Node => Curr_Node)); elsif Dictionary.IsProperDescendent (Lib_Sym, Sym) then -- using an ancestor Ok := False; ErrorHandler.Semantic_Error (Err_Num => 624, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Node_Lex_String (Node => Curr_Node)); else STree.Set_Node_Lex_String (Sym => Sym, Node => Curr_Node); end if; end if; end if; if Ok and then not Dictionary.Is_Withed_Locally (The_Withed_Symbol => Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Get_Visibility (Scope => Scope), The_Unit => Comp_Sym)) then Ok := False; ErrorHandler.Semantic_Error (Err_Num => 555, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Node_Lex_String (Node => Curr_Node)); end if; Prefix_Ok := Ok; end Check_Prefix; begin -- Use_Clause case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Error (Err_Num => 550, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when CommandLineData.SPARK95_Onwards => It := Find_First_Node (Node_Kind => SP_Symbols.type_mark, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.type_mark and --# Next_Node = Get_Node (It); -- first we must check that the prefix is a package which is locally withed Check_Prefix (Type_Node => Next_Node, Comp_Sym => Comp_Sym, Scope => Scope, Prefix_Ok => Prefix_Ok); if not Prefix_Ok then Sym := Dictionary.GetUnknownTypeMark; else -- there's a valid package prefix so go on to check -- that whole thing is a suitable type mark Wf_Type_Mark (Node => Next_Node, Current_Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext, Type_Sym => Sym); end if; -- no action if any error found during wffing of type mark if not Dictionary.IsUnknownTypeMark (Sym) then -- work entirely in terms of base types Sym := Dictionary.GetRootType (Sym); if Syntax_Node_Type (Node => Child_Node (Current_Node => Child_Node (Current_Node => Next_Node))) = SP_Symbols.identifier or else Dictionary.GetScope (Sym) = Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetPredefinedPackageStandard) then -- from standard or there is no dotted part so all operators are already visible OK_To_Add := False; ErrorHandler.Semantic_Error_Sym (Err_Num => 551, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Sym => Sym, Scope => Scope); end if; -- check for duplicates if Dictionary.IsUsedLocally (Sym, Scope) then OK_To_Add := False; ErrorHandler.Semantic_Error_Sym (Err_Num => 552, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Sym => Sym, Scope => Scope); end if; -- limited private type, no operators avaiable if Dictionary.TypeIsLimited (Sym, Scope) then OK_To_Add := False; ErrorHandler.Semantic_Error_Sym (Err_Num => 554, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Sym => Sym, Scope => Scope); end if; if OK_To_Add then Dictionary.AddUseTypeReference (The_Visibility => Dictionary.Get_Visibility (Scope => Scope), The_Unit => Comp_Sym, TheType => Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node))); end if; end if; -- not unknown type mark It := STree.NextNode (It); end loop; end case; end Use_Clause; spark-2012.0.deb/examiner/sem-check_suspendable_property_consistency.adb0000644000175000017500000000633711753202336025543 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Check_Suspendable_Property_Consistency (Sym : in Dictionary.Symbol; Type_Sym : in Dictionary.Symbol; Is_In_Suspends_List : in Boolean; Error_Node_Pos : in LexTokenManager.Token_Position; Consistent : out Boolean) is The_Error : Natural := 0; function Type_Is_Suspendable (Type_Sym : Dictionary.Symbol) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; is begin -- To suspend the type must be a protected type with an entry or -- a suspension object type. return (Dictionary.IsProtectedTypeMark (Type_Sym) and then Dictionary.GetProtectedTypeHasEntry (Dictionary.GetRootType (Type_Sym))) or else Dictionary.IsPredefinedSuspensionObjectType (Type_Sym); end Type_Is_Suspendable; begin -- Check_Suspendable_Property_Consistency if Is_In_Suspends_List then if not (Dictionary.IsOwnVariable (Sym) or else Dictionary.IsConstituent (Sym)) then -- We have an entry in a suspends list that it not being applied -- to an own variable. The_Error := 924; elsif Dictionary.IsOwnVariable (Sym) and then not Dictionary.GetOwnVariableProtected (Sym) then -- The own variable is not protected and hence cannot suspend. The_Error := 924; elsif (Dictionary.Is_Declared (Item => Type_Sym) or else Dictionary.IsPredefined (Type_Sym)) and then not Type_Is_Suspendable (Type_Sym => Type_Sym) then -- We have a protected own variable whose type we know cannot suspend. The_Error := 924; end if; elsif Dictionary.IsOwnVariable (Sym) and then Dictionary.GetOwnVariableProtected (Sym) and then Type_Is_Suspendable (Type_Sym => Type_Sym) then -- We are declaring an object that can suspend but does not have a -- suspendable property declared in the own variable annotation. The_Error := 889; end if; if The_Error /= 0 then ErrorHandler.Semantic_Error (Err_Num => The_Error, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end if; Consistent := The_Error = 0; end Check_Suspendable_Property_Consistency; spark-2012.0.deb/examiner/sem-compunit-up_wf_package_body.adb0000644000175000017500000010650611753202336023157 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.CompUnit) procedure Up_Wf_Package_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Withs_Own_Public_Child : in Boolean) is Ident_Node, End_Desig_Node, Node_To_Report_Errors, Next_Node, Ref_Node : STree.SyntaxNode; Pack_Sym : Dictionary.Symbol; ------------------------------------------------------- procedure Check_Own_Variables_Declared (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys; is Constituent_List, Own_Var_List : Dictionary.Iterator; Own_Var_Sym, Constituent : Dictionary.Symbol; procedure Check_Stream_Has_Address (Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys, --# Sym; is begin if Dictionary.GetOwnVariableOrConstituentMode (Sym) /= Dictionary.DefaultMode and then not Dictionary.IsProtectedTypeMark (Dictionary.GetType (Sym)) and then not Dictionary.VariableHasAddressClause (Sym) and then not Dictionary.VariableHasPragmaImport (Sym) then ErrorHandler.Semantic_Warning (Err_Num => 395, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end if; end Check_Stream_Has_Address; function Own_Var_Declared_Remotely (Constituent : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsOwnVariable (Constituent) and then Dictionary.GetContext (Dictionary.GetRegion (Dictionary.GetScope (Constituent))) = Dictionary.ProgramContext; end Own_Var_Declared_Remotely; begin -- Check_Own_Variables_Declared Own_Var_List := Dictionary.FirstOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (Own_Var_List) loop Own_Var_Sym := Dictionary.CurrentSymbol (Own_Var_List); if not Dictionary.GetOwnVariableIsInterruptStream (Own_Var_Sym) then if Dictionary.IsRefinedOwnVariable (Own_Var_Sym) then Constituent_List := Dictionary.FirstConstituent (Own_Var_Sym); while not Dictionary.IsNullIterator (Constituent_List) loop Constituent := Dictionary.CurrentSymbol (Constituent_List); if not (Own_Var_Declared_Remotely (Constituent => Constituent) or else Dictionary.Is_Declared (Item => Constituent)) then ErrorHandler.Semantic_Error_Sym (Err_Num => 84, Reference => 20, Position => Node_Pos, Sym => Constituent, Scope => Scope); elsif not Own_Var_Declared_Remotely (Constituent => Constituent) then -- it is declared, so check streams have addresses, etc. Check_Stream_Has_Address (Sym => Constituent, Node_Pos => Node_Pos); -- CheckProtectedReallyIs (Constituent); end if; Constituent_List := Dictionary.NextSymbol (Constituent_List); end loop; else if not Dictionary.Is_Declared (Item => Own_Var_Sym) then ErrorHandler.Semantic_Error (Err_Num => 28, Reference => 20, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Own_Var_Sym)); else -- it is declared, so check streams have addresses, etc. Check_Stream_Has_Address (Sym => Own_Var_Sym, Node_Pos => Node_Pos); end if; end if; end if; Own_Var_List := Dictionary.NextSymbol (Own_Var_List); end loop; end Check_Own_Variables_Declared; ------------------------------------------------------- -- Check that Integrity property of refinement constituents (if present) -- is OK with the Integrity of the abstract own variable(s), given -- the selected setting of CommandLineData.Content.InfoFlowPolicy procedure Check_Integrity_Of_Refinements (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys; is Constituent_List, Own_Var_List : Dictionary.Iterator; Own_Var_Sym, Constituent : Dictionary.Symbol; Abstract_Integrity, Constituent_Integrity : LexTokenManager.Lex_String; procedure Check_Integrities (Own_Var_Sym, Constituent : in Dictionary.Symbol; Abstract_Integrity, Constituent_Integrity : in LexTokenManager.Lex_String; Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstract_Integrity, --# CommandLineData.Content, --# Constituent, --# Constituent_Integrity, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Own_Var_Sym, --# Scope, --# SPARK_IO.File_Sys; is begin if Abstract_Integrity = LexTokenManager.Null_String then if Constituent_Integrity = LexTokenManager.Null_String then -- Both null. No action required. null; else -- Abstract integrity null, constituent not null - error! ErrorHandler.Semantic_Error_Sym2 (Err_Num => 861, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => Own_Var_Sym, Sym2 => Constituent, Scope => Scope); end if; else if Constituent_Integrity = LexTokenManager.Null_String then -- Abstract var has an integrity, constituent doesn't. -- This is OK - the constituent inherits the Integrity of the abstract. null; else -- Both have specific Integrity properties, so need to check -- that they are the same, else error 862 if Maths.ValueRep (Abstract_Integrity) /= Maths.ValueRep (Constituent_Integrity) then ErrorHandler.Semantic_Error_Sym2 (Err_Num => 862, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => Own_Var_Sym, Sym2 => Constituent, Scope => Scope); end if; end if; end if; end Check_Integrities; begin -- Check_Integrity_Of_Refinements case CommandLineData.Content.Info_Flow_Policy is when CommandLineData.None => -- Nothing to do if -policy is not set at all... null; when CommandLineData.Safety | CommandLineData.Security => Own_Var_List := Dictionary.FirstOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (Own_Var_List) loop Own_Var_Sym := Dictionary.CurrentSymbol (Own_Var_List); if not Dictionary.GetOwnVariableIsInterruptStream (Own_Var_Sym) then if Dictionary.IsRefinedOwnVariable (Own_Var_Sym) then Abstract_Integrity := Dictionary.GetIntegrityProperty (Own_Var_Sym); Constituent_List := Dictionary.FirstConstituent (Own_Var_Sym); while not Dictionary.IsNullIterator (Constituent_List) loop Constituent := Dictionary.CurrentSymbol (Constituent_List); Constituent_Integrity := Dictionary.GetIntegrityProperty (Constituent); Check_Integrities (Own_Var_Sym => Own_Var_Sym, Constituent => Constituent, Abstract_Integrity => Abstract_Integrity, Constituent_Integrity => Constituent_Integrity, Node_Pos => Node_Pos, Scope => Scope); Constituent_List := Dictionary.NextSymbol (Constituent_List); end loop; end if; end if; Own_Var_List := Dictionary.NextSymbol (Own_Var_List); end loop; end case; end Check_Integrity_Of_Refinements; ------------------------------------------------------- procedure Check_Exported_Subprograms_Declared (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is procedure Check_Subprograms (Subprogram_It : in Dictionary.Iterator; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys, --# Subprogram_It; is Subprog_Sym : Dictionary.Symbol; It : Dictionary.Iterator; begin It := Subprogram_It; while not Dictionary.IsNullIterator (It) loop Subprog_Sym := Dictionary.CurrentSymbol (It); if not (Dictionary.IsProofFunction (Subprog_Sym) or else Dictionary.HasBody (Subprog_Sym) or else Dictionary.HasBodyStub (Subprog_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 29, Reference => 21, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Subprograms; begin -- Check_Exported_Subprograms_Declared Check_Subprograms (Subprogram_It => Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Pack_Sym), Node_Pos => Node_Pos); Check_Subprograms (Subprogram_It => Dictionary.First_Private_Subprogram (The_Package => Pack_Sym), Node_Pos => Node_Pos); end Check_Exported_Subprograms_Declared; ---------------------------------------------------------------- procedure Check_Visible_Task_Types_Declared (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Task_Sym : Dictionary.Symbol; begin It := Dictionary.First_Visible_Task_Type (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (It) loop Task_Sym := Dictionary.CurrentSymbol (It); if not (Dictionary.HasBody (Task_Sym) or else Dictionary.HasBodyStub (Task_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 896, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Task_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Visible_Task_Types_Declared; ---------------------------------------------------------------- procedure Check_Private_Task_Types_Declared (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Task_Sym : Dictionary.Symbol; begin It := Dictionary.First_Private_Task_Type (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (It) loop Task_Sym := Dictionary.CurrentSymbol (It); if not (Dictionary.HasBody (Task_Sym) or else Dictionary.HasBodyStub (Task_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 896, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Task_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Private_Task_Types_Declared; ---------------------------------------------------------------- procedure Check_Visible_Protected_Types_Declared (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Protected_Sym : Dictionary.Symbol; begin It := Dictionary.First_Visible_Protected_Type (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (It) loop Protected_Sym := Dictionary.CurrentSymbol (It); if not (Dictionary.HasBody (Protected_Sym) or else Dictionary.HasBodyStub (Protected_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 897, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Protected_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Visible_Protected_Types_Declared; ---------------------------------------------------------------- procedure Check_Private_Protected_Types_Declared (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Protected_Sym : Dictionary.Symbol; begin It := Dictionary.First_Private_Protected_Type (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (It) loop Protected_Sym := Dictionary.CurrentSymbol (It); if not (Dictionary.HasBody (Protected_Sym) or else Dictionary.HasBodyStub (Protected_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 897, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Protected_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Private_Protected_Types_Declared; ---------------------------------------------------------------- procedure Check_Initialization (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys; is Constituent_List, Init_Own_Var_List : Dictionary.Iterator; Init_Own_Var_Sym, Constituent_Sym : Dictionary.Symbol; begin Init_Own_Var_List := Dictionary.FirstInitializedOwnVariable (Pack_Sym); -- Note. Moded own variables will not get picked up by this iterator since -- other wffs ensure that moded own vars can never appear in Init clauses -- Note. Protected moded own variables will get picked up by this iterator -- as protected own variables are implicitly initialized. while not Dictionary.IsNullIterator (Init_Own_Var_List) loop Init_Own_Var_Sym := Dictionary.CurrentSymbol (Init_Own_Var_List); if Dictionary.IsConcreteOwnVariable (Init_Own_Var_Sym) then if not Dictionary.VariableIsInitialized (Init_Own_Var_Sym) then ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Undefined_Var, Position => Node_Pos, Var_Sym => Init_Own_Var_Sym, Scope => Scope); ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Undefined_Export, Position => Node_Pos, Var_Sym => Init_Own_Var_Sym, Scope => Scope); end if; else -- abstract own var Constituent_List := Dictionary.FirstConstituent (Init_Own_Var_Sym); while not Dictionary.IsNullIterator (Constituent_List) loop Constituent_Sym := Dictionary.CurrentSymbol (Constituent_List); if (not Dictionary.IsOwnVariable (Constituent_Sym)) and then not Dictionary.VariableIsInitialized (Constituent_Sym) and then Dictionary.GetConstituentMode (Constituent_Sym) = Dictionary.DefaultMode then ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Undefined_Var, Position => Node_Pos, Var_Sym => Constituent_Sym, Scope => Scope); ErrorHandler.Usage_Error (Err_Type => ErrorHandler.Undefined_Export, Position => Node_Pos, Var_Sym => Constituent_Sym, Scope => Scope); end if; Constituent_List := Dictionary.NextSymbol (Constituent_List); end loop; end if; Init_Own_Var_List := Dictionary.NextSymbol (Init_Own_Var_List); end loop; end Check_Initialization; ---------------------------------------------------------------- procedure Check_Own_Tasks_Declared (Pack_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Own_Task_Sym : Dictionary.Symbol; begin It := Dictionary.FirstOwnTask (Pack_Sym); while not Dictionary.IsNullIterator (It) loop Own_Task_Sym := Dictionary.CurrentSymbol (It); if not Dictionary.Is_Declared (Item => Own_Task_Sym) then ErrorHandler.Semantic_Error (Err_Num => 931, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Own_Task_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Own_Tasks_Declared; begin -- Up_Wf_Package_Body Pack_Sym := Dictionary.GetRegion (Scope); Next_Node := Child_Node (Current_Node => Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Node))); -- ASSUME Next_Node = pragma_rep OR hidden_part if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.pragma_rep then -- ASSUME Next_Node = pragma_rep Next_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME Next_Node = declarative_part OR dotted_simple_name OR package_initialization if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.declarative_part then -- ASSUME Next_Node = declarative_part Next_Node := Next_Sibling (Current_Node => Next_Node); elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.dotted_simple_name and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.package_initialization then Next_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = declarative_part OR dotted_simple_name OR " & "package_initialization in Up_Wf_Package_Body"); end if; -- ASSUME Next_Node = dotted_simple_name OR package_initialization if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.package_initialization then -- ASSUME Next_Node = package_initialization Next_Node := Child_Node (Current_Node => Next_Node); -- ASSUME Next_Node = sequence_of_statements OR hidden_part if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.hidden_part then -- ASSUME Next_Node = hidden_part End_Desig_Node := STree.NullNode; Node_To_Report_Errors := Next_Node; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.sequence_of_statements then -- ASSUME Next_Node = sequence_of_statements End_Desig_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME End_Desig_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => End_Desig_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Desig_Node = dotted_simple_name in Up_Wf_Package_Body"); Node_To_Report_Errors := End_Desig_Node; else End_Desig_Node := STree.NullNode; Node_To_Report_Errors := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = sequence_of_statements OR hidden_part in Up_Wf_Package_Body"); end if; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Next_Node = dotted_simple_name -- no pack init part provided End_Desig_Node := Next_Node; Node_To_Report_Errors := End_Desig_Node; Check_Initialization (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors), Scope => Scope); else End_Desig_Node := STree.NullNode; Node_To_Report_Errors := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = dotted_simple_name OR package_initialization in Up_Wf_Package_Body"); end if; --# check (Syntax_Node_Type (End_Desig_Node, STree.Table) = SP_Symbols.dotted_simple_name or --# End_Desig_Node = STree.NullNode) and --# (Syntax_Node_Type (Node_To_Report_Errors, STree.Table) = SP_Symbols.hidden_part or --# Syntax_Node_Type (Node_To_Report_Errors, STree.Table) = SP_Symbols.dotted_simple_name); if Syntax_Node_Type (Node => End_Desig_Node) = SP_Symbols.dotted_simple_name then -- ASSUME End_Desig_Node = dotted_simple_name Ident_Node := Last_Child_Of (Start_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Up_Wf_Package_Body"); Check_Closing_Identifier (End_Name_Node => End_Desig_Node, Ident_Node => Ident_Node); end if; Check_Announced_Types_Declared (Pack_Sym => Pack_Sym, Scope => Scope, Node_Pos => Node_Position (Node => Node_To_Report_Errors)); Check_Own_Variables_Declared (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors), Scope => Scope); Check_Integrity_Of_Refinements (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors), Scope => Scope); Check_Exported_Subprograms_Declared (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors)); Check_Visible_Task_Types_Declared (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors)); Check_Private_Task_Types_Declared (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors)); Check_Visible_Protected_Types_Declared (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors)); Check_Private_Protected_Types_Declared (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors)); CheckEmbedBodies (Comp_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors)); Check_Own_Tasks_Declared (Pack_Sym => Pack_Sym, Node_Pos => Node_Position (Node => Node_To_Report_Errors)); if Withs_Own_Public_Child then ErrorHandler.Semantic_Warning (Err_Num => 426, Position => Node_Position (Node => Node_To_Report_Errors), Id_Str => LexTokenManager.Null_String); end if; elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.hidden_part then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = pragma_rep OR hidden_part in Up_Wf_Package_Body"); end if; -- The cross-references for the refinement are generated after the -- full semantic analysis of the package body because we need to -- know if a refinement constituent is actually an abstract own -- variable or a body concrete own variable. If it is an abstract -- own variable, the refinement constituent is considered as a -- declaration, if it is a body concrete own variable, the -- refinement constituent is a usage of the body concrete variable -- that will be declared later in the package body. if ErrorHandler.Generate_SLI then Ref_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Ref_Node = refinement_definition OR package_implementation if Syntax_Node_Type (Node => Ref_Node) = SP_Symbols.refinement_definition then -- ASSUME Ref_Node = refinement_definition SLI.Generate_Xref_Refinement (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ref_Node, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Pack_Sym)); elsif Syntax_Node_Type (Node => Ref_Node) /= SP_Symbols.package_implementation then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ref_Node = refinement_definition OR package_implementation in Up_Wf_Package_Body"); end if; end if; Scope := Dictionary.GetEnclosingScope (Scope); end Up_Wf_Package_Body; spark-2012.0.deb/examiner/vcg.smf0000644000175000017500000000023211753202337015535 0ustar eugeneugenclists.adb -vcg cstacks.adb labels.adb -vcg pairs.adb -vcg pile.adb -vcg structures.adb -vcg graph.adb stmtstack.adb -vcg vcg.adb vcg-producevcs.adb -vcg spark-2012.0.deb/examiner/sem-wf_predicate.adb0000644000175000017500000000466011753202336020145 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Predicate (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Context : in Anno_Tilde_Context; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Errors_Found : out Boolean) is Pred_Result : Exp_Record; Child : STree.SyntaxNode; begin Child := Child_Node (Current_Node => Node); -- ASSUME Child = annotation_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child) = SP_Symbols.annotation_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = annotation_expression in Wf_Predicate"); Walk_Expression_P.Walk_Annotation_Expression (Exp_Node => Child, Scope => Scope, Type_Context => Dictionary.GetPredefinedBooleanType, Context => Context, Result => Pred_Result, Component_Data => Component_Data, The_Heap => The_Heap); if not Dictionary.TypeIsBoolean (Pred_Result.Type_Symbol) then Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 326, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else -- result is Boolean but errors may have been found lower down the expression Errors_Found := Pred_Result.Errors_In_Expression; end if; end Wf_Predicate; spark-2012.0.deb/examiner/sli-xref.SHADOW.adb0000644000175000017500000017317011753202336017447 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with GNAT.Dynamic_Tables; # if not SPARK then pragma Warnings (Off); # end if; with GNAT.Table.Sort; # if not SPARK then pragma Warnings (On); # end if; with CommandLineData; with ContextManager.Ops; with E_Strings; with ExaminerConstants; with FileSystem; with LexTokenLists; with SLI.IO; with SPARK_IO; with SystemErrors; -- Data structure of the cross-references table: -- -- 1. DECL_COMP_UNIT is an array of Decl_Comp_Unit_Item -- (DECLARATION_COMPILATION_UNIT + SYM) where each -- DECLARATION_COMPILATION_UNIT is a compilation unit (spec or body) -- that the Examiner has performed a semantic analysis. -- -- 2. SYM is an array of Sym_Item (SYMBOL_DECLARATION + -- USAGE_COMP_UNIT) where each SYMBOL_DECLARATION is a symbol that is -- declared in the DECLARATION_COMPILATION_UNIT. -- -- 3. USAGE_COMP_UNIT is an array of Usage_Comp_Unit_Item -- (USAGE_COMPILATION_UNIT + POS_REF_TYPE) where each -- USAGE_COMPILATION_UNIT is a compilation unit (spec or body) where -- the SYMBOL_DECLARATION declared in the -- DECLARATION_COMPILATION_UNIT is used. -- -- 4. POS_REF_TYPE is an array of Pos_Ref_Type_Item (LINE_NUMBER + -- COLUMN_NUMBER + REF_TYPE) -- -- 4.1 where each LINE_NUMBER is the line number where the -- SYMBOL_DECLARATION declared in the DECLARATION_COMPILATION_UNIT -- and used in the USAGE_COMP_UNIT is located, -- -- 4.2 where each COLUMN_NUMBER is the column number where the -- SYMBOL_DECLARATION declared in the DECLARATION_COMPILATION_UNIT -- and used in the USAGE_COMP_UNIT is located, -- -- 4.3 where each REF_TYPE is the type of crosse-reference of the -- SYMBOL_DECLARATION declared in the DECLARATION_COMPILATION_UNIT -- and used in the USAGE_COMP_UNIT. -- * DECL_COMP_UNIT ------------------+-----------------------------------+---- -- | DECLARATION_COMPILATION_UNIT + 0 | DECLARATION_COMPILATION_UNIT_ITEM | ... -- +--------------------------------+-+-----------------------------------+---- -- | -- V -- * SYM --------------------+ -- | SYMBOL_DECLARATION + | * USAGE_COMP_UNIT -----------+----------------------+---- -- | 0------------+->| USAGE_COMPILATION_UNIT + 0 | USAGE_COMP_UNIT_ITEM | ... -- +-------------------------+ +--------------------------+-+----------------------+---- -- | SYMBOL_DECLARATION_ITEM | | -- +-------------------------+ V -- | ... | * POS_REF_TYPE -----+ -- | LINE_NUMBER + | -- | COLUMN_NUMBER + | -- | REF_TYPE | -- +-------------------+ -- | POS_REF_TYPE_ITEM | -- +-------------------+ -- | ... | package body SLI.Xref --# own State is Decl_Comp_Unit_P.Table; is type SLI_Type_Char_Assoc_T is array (Dictionary.SLI_Type) of Character; SLI_Type_Char_Assoc : constant SLI_Type_Char_Assoc_T := SLI_Type_Char_Assoc_T' (Dictionary.SLI_Array_Object => 'a', Dictionary.SLI_Array_Type => 'A', Dictionary.SLI_Boolean_Object => 'b', Dictionary.SLI_Boolean_Type => 'B', Dictionary.SLI_Enumeration_Object => 'e', Dictionary.SLI_Enumeration_Type => 'E', Dictionary.SLI_Floating_Point_Object => 'f', Dictionary.SLI_Floating_Point_Type => 'F', Dictionary.SLI_Abstract_Type => 'H', Dictionary.SLI_Signed_Integer_Object => 'i', Dictionary.SLI_Signed_Integer_Type => 'I', Dictionary.SLI_Generic_Package_Type => 'k', Dictionary.SLI_Package_Type => 'K', Dictionary.SLI_Label_On_Loop => 'l', Dictionary.SLI_Modular_Integer_Object => 'm', Dictionary.SLI_Modular_Integer_Type => 'M', Dictionary.SLI_Enumeration_Literal => 'n', Dictionary.SLI_Named_Number => 'N', Dictionary.SLI_Fixed_Point_Object => 'o', Dictionary.SLI_Fixed_Point_Type => 'O', Dictionary.SLI_Record_Object => 'r', Dictionary.SLI_Record_Type => 'R', Dictionary.SLI_String_Object => 's', Dictionary.SLI_String_Type => 'S', Dictionary.SLI_Task_Object => 't', Dictionary.SLI_Task_Type => 'T', Dictionary.SLI_Generic_Procedure_Type => 'u', Dictionary.SLI_Procedure_Type => 'U', Dictionary.SLI_Generic_Function_Op => 'v', Dictionary.SLI_Function_Op => 'V', Dictionary.SLI_Protected_Object => 'w', Dictionary.SLI_Protected_Type => 'W', Dictionary.SLI_Entry_Family => 'Y', Dictionary.SLI_Generic_Formal_Parameter => 'z', Dictionary.SLI_Unknown_Type => 'Z'); ---------------------------------------- -- Pos_Ref_Type -- ---------------------------------------- type Pos_Ref_Type_Item_T is record Pos : LexTokenManager.Token_Position; Ref_Type : Character; end record; type Pos_Ref_Type_Item_Index is range 0 .. Integer'Last; # if not SPARK then package Pos_Ref_Type_P is new GNAT.Dynamic_Tables (Table_Component_Type => Pos_Ref_Type_Item_T, Table_Index_Type => Pos_Ref_Type_Item_Index, Table_Low_Bound => 1, Table_Initial => 1024, Table_Increment => 10); # else --# inherit Xref; package Pos_Ref_Type_P is type Table_Ptr is array (Xref.Pos_Ref_Type_Item_Index) of Xref.Pos_Ref_Type_Item_T; type Instance is record Table : Table_Ptr; end record; procedure Init (T : out Instance); --# derives T from ; procedure Free (T : in Instance); --# derives null from T; procedure Append (T : in out Instance; New_Val : in Xref.Pos_Ref_Type_Item_T); --# derives T from *, --# New_Val; end Pos_Ref_Type_P; # end if; ---------------------------------------- -- Usage_Comp_Unit -- ---------------------------------------- type Usage_Comp_Unit_Item_T is record Usage_Comp_Unit : ContextManager.UnitDescriptors; Pos_Ref_Type : Pos_Ref_Type_P.Instance; Sorted : Boolean; end record; type Usage_Comp_Unit_Item_Index is range 0 .. ExaminerConstants.ContextManagerMaxUnits; # if not SPARK then package Usage_Comp_Unit_P is new GNAT.Dynamic_Tables (Table_Component_Type => Usage_Comp_Unit_Item_T, Table_Index_Type => Usage_Comp_Unit_Item_Index, Table_Low_Bound => 1, Table_Initial => 1024, Table_Increment => 10); # else --# inherit Xref; package Usage_Comp_Unit_P is type Table_Ptr is array (Xref.Usage_Comp_Unit_Item_Index) of Xref.Usage_Comp_Unit_Item_T; type Instance is record Table : Table_Ptr; end record; procedure Init (T : out Instance); --# derives T from ; procedure Free (T : in Instance); --# derives null from T; procedure Append (T : in out Instance; New_Val : in Xref.Usage_Comp_Unit_Item_T); --# derives T from *, --# New_Val; end Usage_Comp_Unit_P; # end if; ---------------------------------------- -- Sym -- ---------------------------------------- type Sym_Item_T is record Sym : Dictionary.Symbol; Sym_Type : Dictionary.SLI_Type; Usage_Comp_Unit : Usage_Comp_Unit_P.Instance; end record; type Sym_Item_Index is range 0 .. Natural'Last; # if not SPARK then package Sym_P is new GNAT.Dynamic_Tables (Table_Component_Type => Sym_Item_T, Table_Index_Type => Sym_Item_Index, Table_Low_Bound => 1, Table_Initial => 1024, Table_Increment => 10); # else --# inherit Xref; package Sym_P is type Table_Ptr is array (Xref.Sym_Item_Index) of Xref.Sym_Item_T; type Instance is record Table : Table_Ptr; end record; procedure Init (T : out Instance); --# derives T from ; procedure Free (T : in Instance); --# derives null from T; procedure Append (T : in out Instance; New_Val : in Xref.Sym_Item_T); --# derives T from *, --# New_Val; end Sym_P; # end if; ---------------------------------------- -- Decl_Comp_Unit -- ---------------------------------------- type Decl_Comp_Unit_Item_T is record Decl_Comp_Unit : ContextManager.UnitDescriptors; Nb_Separates : Natural; Sym : Sym_P.Instance; end record; type Decl_Comp_Unit_Item_Index is range 0 .. ExaminerConstants.ContextManagerMaxUnits; # if not SPARK then package Decl_Comp_Unit_P is new GNAT.Table (Table_Component_Type => Decl_Comp_Unit_Item_T, Table_Index_Type => Decl_Comp_Unit_Item_Index, Table_Low_Bound => 1, Table_Initial => 1024, Table_Increment => 10); package Decl_Comp_Unit_Sort_P is new Decl_Comp_Unit_P.Sort; # else --# inherit ContextManager, --# Dictionary, --# LexTokenManager, --# Pos_Ref_Type_P, --# Sym_P, --# Usage_Comp_Unit_P, --# Xref; package Decl_Comp_Unit_P --# own Table; --# initializes Table; is type Table_Ptr is array (Xref.Decl_Comp_Unit_Item_Index) of Xref.Decl_Comp_Unit_Item_T; Table : Table_Ptr := Table_Ptr' (others => Xref.Decl_Comp_Unit_Item_T' (Decl_Comp_Unit => ContextManager.NullUnit, Nb_Separates => 0, Sym => Sym_P.Instance' (Table => Sym_P.Table_Ptr' (others => Xref.Sym_Item_T' (Sym => Dictionary.NullSymbol, Sym_Type => Dictionary.SLI_Unknown_Type, Usage_Comp_Unit => Usage_Comp_Unit_P.Instance' (Table => Usage_Comp_Unit_P.Table_Ptr' (others => Xref.Usage_Comp_Unit_Item_T' (Usage_Comp_Unit => ContextManager.NullUnit, Pos_Ref_Type => Pos_Ref_Type_P.Instance' (Table => Pos_Ref_Type_P.Table_Ptr' (others => Xref.Pos_Ref_Type_Item_T' (Pos => LexTokenManager.Null_Token_Position, Ref_Type => ' '))), Sorted => False)))))))); procedure Init; --# global in out Table; --# derives Table from *; procedure Free; --# global in out Table; --# derives Table from *; procedure Append (New_Val : in Xref.Decl_Comp_Unit_Item_T); --# global in out Table; --# derives Table from *, --# New_Val; end Decl_Comp_Unit_P; # end if; ---------------------------------------- -- Pos_Ref_Type -- ---------------------------------------- # if not SPARK then -- Sort Pos_Ref_Type table based on: -- 1. the reference type of the usage -- 2. the line number of the usage position -- 3. the column number of the usage postion function Lt_Pos_Ref_Type (Comp1, Comp2 : Pos_Ref_Type_Item_T) return Boolean is begin if Comp1.Ref_Type < Comp2.Ref_Type then return True; elsif Comp1.Ref_Type > Comp2.Ref_Type then return False; elsif Integer (Comp1.Pos.Start_Line_No) < Integer (Comp2.Pos.Start_Line_No) then return True; elsif Integer (Comp1.Pos.Start_Line_No) > Integer (Comp2.Pos.Start_Line_No) then return False; else return Comp1.Pos.Start_Pos < Comp2.Pos.Start_Pos; end if; end Lt_Pos_Ref_Type; procedure Sort_Pos_Ref_Type_Table is new Pos_Ref_Type_P.Sort_Table (Lt => Lt_Pos_Ref_Type); function Pos_Ref_Type_Last (T : Pos_Ref_Type_P.Instance) return Pos_Ref_Type_Item_Index renames Pos_Ref_Type_P.Last; # else package body Pos_Ref_Type_P is --# hide Pos_Ref_Type_P; end Pos_Ref_Type_P; procedure Sort_Pos_Ref_Type_Table (Table : in out Pos_Ref_Type_P.Instance) --# derives Table from *; is --# hide Sort_Pos_Ref_Type_Table; begin null; end Sort_Pos_Ref_Type_Table; function Pos_Ref_Type_Last (T : Pos_Ref_Type_P.Instance) return Pos_Ref_Type_Item_Index is --# hide Pos_Ref_Type_Last; begin null; end Pos_Ref_Type_Last; # end if; ---------------------------------------- -- Usage_Comp_Unit -- ---------------------------------------- # if not SPARK then -- Sort Usage_Comp_Unit table based on the declaration line number -- in the SLI file. function Lt_Usage_Comp_Unit (Comp1, Comp2 : Usage_Comp_Unit_Item_T) return Boolean is Line_Number1 : Natural := 0; Line_Number2 : Natural := 0; begin if Comp1.Usage_Comp_Unit /= ContextManager.NullUnit then Line_Number1 := ContextManager.Ops.Get_Line_Number (Descriptor => Comp1.Usage_Comp_Unit); end if; if Comp2.Usage_Comp_Unit /= ContextManager.NullUnit then Line_Number2 := ContextManager.Ops.Get_Line_Number (Descriptor => Comp2.Usage_Comp_Unit); end if; return Line_Number1 < Line_Number2; end Lt_Usage_Comp_Unit; procedure Sort_Usage_Comp_Unit_Table is new Usage_Comp_Unit_P.Sort_Table (Lt => Lt_Usage_Comp_Unit); function Usage_Comp_Unit_Last (T : Usage_Comp_Unit_P.Instance) return Usage_Comp_Unit_Item_Index renames Usage_Comp_Unit_P.Last; # else package body Usage_Comp_Unit_P is --# hide Usage_Comp_Unit_P; end Usage_Comp_Unit_P; procedure Sort_Usage_Comp_Unit_Table (Table : in out Usage_Comp_Unit_P.Instance) --# derives Table from *; is --# hide Sort_Usage_Comp_Unit_Table; begin null; end Sort_Usage_Comp_Unit_Table; function Usage_Comp_Unit_Last (T : Usage_Comp_Unit_P.Instance) return Usage_Comp_Unit_Item_Index is --# hide Usage_Comp_Unit_Last; begin null; end Usage_Comp_Unit_Last; # end if; ---------------------------------------- -- Sym -- ---------------------------------------- # if not SPARK then -- Sort the Sym table based on -- 1. the line number of the declaration position -- 2. the column number of the declaration position function Lt_Sym (Comp1, Comp2 : Sym_Item_T) return Boolean is Pos1 : LexTokenManager.Token_Position := LexTokenManager.Null_Token_Position; Pos2 : LexTokenManager.Token_Position := LexTokenManager.Null_Token_Position; begin if not Dictionary.Is_Null_Symbol (Comp1.Sym) then Pos1 := Dictionary.Get_Symbol_Location (Item => Comp1.Sym); end if; if not Dictionary.Is_Null_Symbol (Comp2.Sym) then Pos2 := Dictionary.Get_Symbol_Location (Item => Comp2.Sym); end if; if Integer (Pos1.Start_Line_No) < Integer (Pos2.Start_Line_No) then return True; elsif Integer (Pos1.Start_Line_No) > Integer (Pos2.Start_Line_No) then return False; else return Pos1.Start_Pos < Pos2.Start_Pos; end if; end Lt_Sym; procedure Sort_Sym_Table is new Sym_P.Sort_Table (Lt => Lt_Sym); function Sym_Last (T : Sym_P.Instance) return Sym_Item_Index renames Sym_P.Last; # else package body Sym_P is --# hide Sym_P; end Sym_P; procedure Sort_Sym_Table (Table : in out Sym_P.Instance) --# derives Table from *; is --# hide Sort_Sym_Table; begin null; end Sort_Sym_Table; function Sym_Last (T : Sym_P.Instance) return Sym_Item_Index is --# hide Sym_Last; begin null; end Sym_Last; # end if; ---------------------------------------- -- Decl_Comp_Unit -- ---------------------------------------- # if not SPARK then -- Sort Decl_Comp_Unit table based on the declaration line number -- in the SLI file. function Lt_Decl_Comp_Unit (Comp1, Comp2 : Decl_Comp_Unit_Item_T) return Boolean is Line_Number1 : Natural := 0; Line_Number2 : Natural := 0; begin if Comp1.Decl_Comp_Unit /= ContextManager.NullUnit then Line_Number1 := ContextManager.Ops.Get_Line_Number (Descriptor => Comp1.Decl_Comp_Unit); end if; if Comp2.Decl_Comp_Unit /= ContextManager.NullUnit then Line_Number2 := ContextManager.Ops.Get_Line_Number (Descriptor => Comp2.Decl_Comp_Unit); end if; return Line_Number1 < Line_Number2; end Lt_Decl_Comp_Unit; procedure Sort_Decl_Comp_Unit_Table is new Decl_Comp_Unit_Sort_P.Sort_Table (Lt => Lt_Decl_Comp_Unit); function Decl_Comp_Unit_Last return Decl_Comp_Unit_Item_Index renames Decl_Comp_Unit_P.Last; # else package body Decl_Comp_Unit_P is --# hide Decl_Comp_Unit_P; end Decl_Comp_Unit_P; procedure Sort_Decl_Comp_Unit_Table --# global in out Decl_Comp_Unit_P.Table; --# derives Decl_Comp_Unit_P.Table from *; is --# hide Sort_Decl_Comp_Unit_Table; begin null; end Sort_Decl_Comp_Unit_Table; function Decl_Comp_Unit_Last return Decl_Comp_Unit_Item_Index --# global in Decl_Comp_Unit_P.Table; is --# hide Decl_Comp_Unit_Last; begin null; end Decl_Comp_Unit_Last; # end if; ---------------------------------------- -- Search a compilation unit descriptor (Decl_Comp_Unit) in -- Decl_Comp_Unit_P.Table. Return the index number if found or 0 -- if not found. function Search_Decl_Comp_Unit (Decl_Comp_Unit : in ContextManager.UnitDescriptors) return Decl_Comp_Unit_Item_Index --# global in Decl_Comp_Unit_P.Table; is I : Decl_Comp_Unit_Item_Index; Found : Boolean; Top_Item : Decl_Comp_Unit_Item_Index; begin I := 1; Found := False; Top_Item := Decl_Comp_Unit_Last; while I <= Top_Item and then not Found loop if Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit /= ContextManager.NullUnit and then Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit = Decl_Comp_Unit then Found := True; else I := I + 1; end if; end loop; if not Found then I := 0; end if; return I; end Search_Decl_Comp_Unit; -- Search a symbol (Sym) in Decl_Comp_Unit_P.Table -- (Decl_Comp_Unit).Sym.Table. Return the index number if found or -- 0 if not found. function Search_Sym (Decl_Comp_Unit : in Decl_Comp_Unit_Item_Index; Sym : in Dictionary.Symbol) return Sym_Item_Index --# global in Decl_Comp_Unit_P.Table; is I : Sym_Item_Index; Found : Boolean; Top_Item : Sym_Item_Index; begin I := 1; Found := False; Top_Item := Sym_Last (T => Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym); while I <= Top_Item and then not Found loop if not Dictionary.Is_Null_Symbol (Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (I).Sym) and then Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (I).Sym = Sym then Found := True; else I := I + 1; end if; end loop; if not Found then I := 0; end if; return I; end Search_Sym; -- Search a compilation unit descriptor (Usage_Comp_Unit) in -- Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table -- (Sym).Usage_Comp_Unit.Table. Return the index number if found -- or 0 if not found. function Search_Usage_Comp_Unit (Decl_Comp_Unit : in Decl_Comp_Unit_Item_Index; Sym : in Sym_Item_Index; Usage_Comp_Unit : in ContextManager.UnitDescriptors) return Usage_Comp_Unit_Item_Index --# global in Decl_Comp_Unit_P.Table; is I : Usage_Comp_Unit_Item_Index; Found : Boolean; Top_Item : Usage_Comp_Unit_Item_Index; begin I := 1; Found := False; Top_Item := Usage_Comp_Unit_Last (T => Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit); while I <= Top_Item and then not Found loop if Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit.Table (I).Usage_Comp_Unit /= ContextManager.NullUnit and then Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit.Table (I).Usage_Comp_Unit = Usage_Comp_Unit then Found := True; else I := I + 1; end if; end loop; if not Found then I := 0; end if; return I; end Search_Usage_Comp_Unit; -- Search the position and reference type (Pos_Ref_Type) in -- Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table -- (Sym).Usage_Comp_Unit.Table -- (Usage_Comp_Unit).Pos_Ref_Type.Table. Return the index number -- if found or 0 if not found. function Search_Pos_Ref_Type (Decl_Comp_Unit : in Decl_Comp_Unit_Item_Index; Sym : in Sym_Item_Index; Usage_Comp_Unit : in Usage_Comp_Unit_Item_Index; Pos_Ref_Type : in Pos_Ref_Type_Item_T) return Pos_Ref_Type_Item_Index --# global in Decl_Comp_Unit_P.Table; is I : Pos_Ref_Type_Item_Index; Found : Boolean; Top_Item : Pos_Ref_Type_Item_Index; begin I := 1; Found := False; Top_Item := Pos_Ref_Type_Last (T => Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit.Table (Usage_Comp_Unit).Pos_Ref_Type); while I <= Top_Item and then not Found loop if Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit.Table (Usage_Comp_Unit).Pos_Ref_Type.Table (I) = Pos_Ref_Type then Found := True; else I := I + 1; end if; end loop; if not Found then I := 0; end if; return I; end Search_Pos_Ref_Type; procedure Increment_Nb_Separates (Comp_Unit : in ContextManager.UnitDescriptors) --# global in out Decl_Comp_Unit_P.Table; --# derives Decl_Comp_Unit_P.Table from *, --# Comp_Unit; is I : Decl_Comp_Unit_Item_Index; Decl_Comp_Unit_Item : Decl_Comp_Unit_Item_T; begin -- Check if the compilation unit (Comp_Unit) has already been -- declared. I := Search_Decl_Comp_Unit (Decl_Comp_Unit => Comp_Unit); if I = 0 then -- Add the new compilation unit (Comp_Unit) in -- Decl_Comp_Unit_P.Table. Decl_Comp_Unit_Item.Decl_Comp_Unit := Comp_Unit; Decl_Comp_Unit_Item.Nb_Separates := 0; Sym_P.Init (T => Decl_Comp_Unit_Item.Sym); Decl_Comp_Unit_P.Append (New_Val => Decl_Comp_Unit_Item); I := Decl_Comp_Unit_Last; end if; --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Decl_Comp_Unit_P.Table (I).Nb_Separates := Decl_Comp_Unit_P.Table (I).Nb_Separates + 1; --# end accept; exception --# hide Increment_Nb_Separates; when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.XRef_Table_Full, Msg => "SLI.XREF.INCREMENT_NB_SEPARATES : Cross-references table full"); end Increment_Nb_Separates; procedure Add_Usage (Decl_Comp_Unit : in ContextManager.UnitDescriptors; Sym : in Dictionary.Symbol; Usage_Comp_Unit : in ContextManager.UnitDescriptors; Pos : in LexTokenManager.Token_Position; Ref_Type : in Character) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Decl_Comp_Unit_P.Table; --# in out SPARK_IO.File_Sys; --# derives Decl_Comp_Unit_P.Table from *, --# Decl_Comp_Unit, --# Dictionary.Dict, --# Pos, --# Ref_Type, --# Sym, --# Usage_Comp_Unit & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Decl_Comp_Unit, --# Decl_Comp_Unit_P.Table, --# Dictionary.Dict, --# LexTokenManager.State, --# Sym, --# Usage_Comp_Unit; is I : Decl_Comp_Unit_Item_Index; J : Sym_Item_Index; K : Usage_Comp_Unit_Item_Index; L : Pos_Ref_Type_Item_Index; Decl_Comp_Unit_Item : Decl_Comp_Unit_Item_T; Sym_Item : Sym_Item_T; Usage_Comp_Unit_Item : Usage_Comp_Unit_Item_T; Pos_Ref_Type_Item : Pos_Ref_Type_Item_T; begin if Decl_Comp_Unit /= ContextManager.NullUnit and then not Dictionary.Is_Null_Symbol (Sym) and then Usage_Comp_Unit /= ContextManager.NullUnit then -- Check if the compilation unit (Decl_Comp_Unit) has -- already been declared. I := Search_Decl_Comp_Unit (Decl_Comp_Unit => Decl_Comp_Unit); if I = 0 then -- Add the new compilation unit (Decl_Comp_Unit) in -- Decl_Comp_Unit_P.Table. Decl_Comp_Unit_Item.Decl_Comp_Unit := Decl_Comp_Unit; Decl_Comp_Unit_Item.Nb_Separates := 0; Sym_P.Init (T => Decl_Comp_Unit_Item.Sym); Decl_Comp_Unit_P.Append (New_Val => Decl_Comp_Unit_Item); I := Decl_Comp_Unit_Last; end if; -- Check if the symbol (Sym) has already been declared in -- the compilation unit (Decl_Comp_Unit). J := Search_Sym (Decl_Comp_Unit => I, Sym => Sym); if J = 0 then -- Add the new symbol (Sym) in Decl_Comp_Unit_P.Table -- (I).Sym. Sym_Item.Sym := Sym; Dictionary.Get_SLI_Type (Item => Sym, Result => Sym_Item.Sym_Type); Usage_Comp_Unit_P.Init (T => Sym_Item.Usage_Comp_Unit); --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Sym_P.Append (T => Decl_Comp_Unit_P.Table (I).Sym, New_Val => Sym_Item); --# end accept; J := Sym_Last (T => Decl_Comp_Unit_P.Table (I).Sym); end if; -- Check if the compilation unit (Usage_Comp_Unit) has -- already been declared for the symbol (Sym) in the -- compilation unit (Decl_Comp_Unit). K := Search_Usage_Comp_Unit (Decl_Comp_Unit => I, Sym => J, Usage_Comp_Unit => Usage_Comp_Unit); if K = 0 then -- Add the new compilation unit (Usage_Comp_Unit) in -- Decl_Comp_Unit_P.Table (I).Sym.Table -- (J).Usage_Comp_Unit. Usage_Comp_Unit_Item.Usage_Comp_Unit := Usage_Comp_Unit; Usage_Comp_Unit_Item.Sorted := False; Pos_Ref_Type_P.Init (T => Usage_Comp_Unit_Item.Pos_Ref_Type); --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Usage_Comp_Unit_P.Append (T => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit, New_Val => Usage_Comp_Unit_Item); --# end accept; K := Usage_Comp_Unit_Last (T => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit); end if; -- Check if the new position and reference type -- (Pos_Ref_Type_Item) has already been declared for the -- compilation unit (Usage_Comp_Unit) in the symbol (Sym) in -- the compilation unit (Decl_Comp_Unit). Pos_Ref_Type_Item := Pos_Ref_Type_Item_T'(Pos => Pos, Ref_Type => Ref_Type); L := Search_Pos_Ref_Type (Decl_Comp_Unit => I, Sym => J, Usage_Comp_Unit => K, Pos_Ref_Type => Pos_Ref_Type_Item); if L = 0 then -- Add the new position and reference type (Pos_Ref_Type) -- in Decl_Comp_Unit_P.Table (I).Sym.Table -- (J).Usage_Comp_Unit.Table (K).Pos_Ref_Type. --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Pos_Ref_Type_P.Append (T => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Pos_Ref_Type, New_Val => Pos_Ref_Type_Item); --# end accept; end if; end if; exception --# hide Add_Usage; when Storage_Error => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.XRef_Table_Full, Msg => "SLI.XREF.ADD_USAGE : Cross-references table full"); end Add_Usage; -- Cleanup Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table -- (Sym).Usage_Comp_Unit.Table (Usage_Comp_Unit).Pos_Ref_Type. procedure Cleanup_Pos_Ref_Type (Decl_Comp_Unit : in Decl_Comp_Unit_Item_Index; Sym : in Sym_Item_Index; Usage_Comp_Unit : in Usage_Comp_Unit_Item_Index) --# global in out Decl_Comp_Unit_P.Table; --# derives Decl_Comp_Unit_P.Table from *, --# Decl_Comp_Unit, --# Sym, --# Usage_Comp_Unit; is begin if Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit.Table (Usage_Comp_Unit).Usage_Comp_Unit /= ContextManager.NullUnit then --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit.Table (Usage_Comp_Unit).Usage_Comp_Unit := ContextManager.NullUnit; --# end accept; Pos_Ref_Type_P.Free (T => Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit.Table (Usage_Comp_Unit).Pos_Ref_Type); --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit.Table (Usage_Comp_Unit).Sorted := False; --# end accept; end if; end Cleanup_Pos_Ref_Type; -- Cleanup Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table -- (Sym).Usage_Comp_Unit and all subtables. procedure Cleanup_Usage_Comp_Unit (Decl_Comp_Unit : in Decl_Comp_Unit_Item_Index; Sym : in Sym_Item_Index) --# global in out Decl_Comp_Unit_P.Table; --# derives Decl_Comp_Unit_P.Table from *, --# Decl_Comp_Unit, --# Sym; is Top_Item : Usage_Comp_Unit_Item_Index; I : Usage_Comp_Unit_Item_Index; begin if not Dictionary.Is_Null_Symbol (Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Sym) then Top_Item := Usage_Comp_Unit_Last (T => Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit); I := 1; while I <= Top_Item loop Cleanup_Pos_Ref_Type (Decl_Comp_Unit => Decl_Comp_Unit, Sym => Sym, Usage_Comp_Unit => I); I := I + 1; end loop; --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Sym := Dictionary.NullSymbol; --# end accept; Usage_Comp_Unit_P.Free (T => Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym.Table (Sym).Usage_Comp_Unit); end if; end Cleanup_Usage_Comp_Unit; -- Cleanup Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym and all -- subtables. procedure Cleanup_Sym (Decl_Comp_Unit : in Decl_Comp_Unit_Item_Index) --# global in out Decl_Comp_Unit_P.Table; --# derives Decl_Comp_Unit_P.Table from *, --# Decl_Comp_Unit; is Top_Item : Sym_Item_Index; I : Sym_Item_Index; begin if Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Decl_Comp_Unit /= ContextManager.NullUnit then Top_Item := Sym_Last (T => Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym); I := 1; while I <= Top_Item loop Cleanup_Usage_Comp_Unit (Decl_Comp_Unit => Decl_Comp_Unit, Sym => I); I := I + 1; end loop; --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Decl_Comp_Unit := ContextManager.NullUnit; Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Nb_Separates := 0; --# end accept; Sym_P.Free (T => Decl_Comp_Unit_P.Table (Decl_Comp_Unit).Sym); end if; end Cleanup_Sym; -- Cleanup Decl_Comp_Unit_P.Table and all subtables. procedure Cleanup_Decl_Comp_Unit --# global in out Decl_Comp_Unit_P.Table; --# derives Decl_Comp_Unit_P.Table from *; is Top_Item : Decl_Comp_Unit_Item_Index; I : Decl_Comp_Unit_Item_Index; begin Top_Item := Decl_Comp_Unit_Last; I := 1; while I <= Top_Item loop Cleanup_Sym (Decl_Comp_Unit => I); I := I + 1; end loop; Decl_Comp_Unit_P.Free; Decl_Comp_Unit_P.Init; end Cleanup_Decl_Comp_Unit; procedure Dump (Comp_Unit : in ContextManager.UnitDescriptors) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Decl_Comp_Unit_P.Table; --# in out SPARK_IO.File_Sys; --# out IO.Stream_Buffer; --# derives Decl_Comp_Unit_P.Table from *, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# LexTokenManager.State & --# IO.Stream_Buffer from Comp_Unit, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Decl_Comp_Unit_P.Table, --# Dictionary.Dict, --# LexTokenManager.State & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Comp_Unit, --# ContextManager.Ops.Unit_Heap, --# Decl_Comp_Unit_P.Table, --# Dictionary.Dict, --# LexTokenManager.State; is Top_Item_I : Decl_Comp_Unit_Item_Index; Top_Item_J : Sym_Item_Index; Top_Item_K : Usage_Comp_Unit_Item_Index; I : Decl_Comp_Unit_Item_Index; J : Sym_Item_Index; K : Usage_Comp_Unit_Item_Index; Ref_Count : Natural; Write_Decl_Comp_Unit : Boolean; Write_Sym : Boolean; Unit_Name : LexTokenLists.Lists; Dummy_Unit_Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; Dummy_Unit_Type : ContextManager.UnitTypes; Current_Comp_Unit : ContextManager.UnitDescriptors; Prev_Comp_Unit : ContextManager.UnitDescriptors; Continue : Boolean; -- This procedure dump and cleanup all the usages of the symbol -- (J) declared in the compilation unit (I) and used in the -- compilation unit (K). procedure Dump_Usage (Write_Usage_Comp_Unit : in Boolean; I : in Decl_Comp_Unit_Item_Index; J : in Sym_Item_Index; K : in Usage_Comp_Unit_Item_Index; Write_Decl_Comp_Unit : in out Boolean; Write_Sym : in out Boolean; Ref_Count : in out Natural) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Decl_Comp_Unit_P.Table; --# in out SPARK_IO.File_Sys; --# out IO.Stream_Buffer; --# derives Decl_Comp_Unit_P.Table from *, --# ContextManager.Ops.Unit_Heap, --# Dictionary.Dict, --# I, --# J, --# K, --# Write_Sym & --# IO.Stream_Buffer from ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# Decl_Comp_Unit_P.Table, --# Dictionary.Dict, --# I, --# J, --# K, --# LexTokenManager.State, --# Ref_Count, --# Write_Decl_Comp_Unit, --# Write_Sym, --# Write_Usage_Comp_Unit & --# Ref_Count from *, --# Decl_Comp_Unit_P.Table, --# Dictionary.Dict, --# I, --# J, --# K, --# Write_Sym & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Decl_Comp_Unit_P.Table, --# Dictionary.Dict, --# I, --# J, --# LexTokenManager.State, --# Write_Sym & --# Write_Decl_Comp_Unit, --# Write_Sym from *; is Max_Ref_Count : constant := 10; Str : E_Strings.T; Pos : LexTokenManager.Token_Position; Top_Item_L : Pos_Ref_Type_Item_Index; L : Pos_Ref_Type_Item_Index; Dummy_Unit_Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; I_Cleanup : Decl_Comp_Unit_Item_Index; Result : Dictionary.SLI_Type; begin -- Write the name of the declaration compilation unit -- (I). Write it only once! if Write_Decl_Comp_Unit then IO.Put_String (Item => "X "); IO.Put_Integer (Item => ContextManager.Ops.Get_Line_Number (Descriptor => Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit)); IO.Put_Char (Item => ' '); Str := LexTokenManager.Lex_String_To_String (Lex_Str => ContextManager.Ops.GetSourceFileName (Descriptor => ContextManager.Ops.Get_File_Descriptor (Unit_Descriptor => Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit))); IO.E_Strings_Put_String (E_Str => FileSystem.Just_File (Fn => Str, Ext => True)); IO.New_Line; Write_Decl_Comp_Unit := False; end if; --# assert True; -- Write the name of the symbol (J) declared in the -- compilation unit (I). Write it only once per declaration -- compilation unit (I)! if Write_Sym then Pos := Dictionary.Get_Symbol_Location (Item => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Sym); IO.Put_Integer (Item => Integer (Pos.Start_Line_No)); if Decl_Comp_Unit_P.Table (I).Sym.Table (J).Sym_Type = Dictionary.SLI_Unknown_Type then -- The type was not yet known when the cross-reference -- has been added. Dictionary.Get_SLI_Type (Item => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Sym, Result => Result); if Result = Dictionary.SLI_Unknown_Type then -- At this point, the type must be known because we -- are dumping the SLI file. If it is still -- unknown, the symbol is assumed to be an abstract -- type. It is the best that we can do here. Result := Dictionary.SLI_Abstract_Type; end if; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Decl_Comp_Unit_P.Table (I).Sym.Table (J).Sym_Type := Result; --# end accept; end if; IO.Put_Char (Item => SLI_Type_Char_Assoc (Decl_Comp_Unit_P.Table (I).Sym.Table (J).Sym_Type)); IO.Put_Integer (Item => Pos.Start_Pos); IO.Put_Char (Item => ' '); Str := LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Item => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Sym)); IO.E_Strings_Put_String (E_Str => Str); Write_Sym := False; end if; --# assert True; -- Prevent to have very long lines of cross-references. if Ref_Count > Max_Ref_Count then IO.New_Line; IO.Put_Char (Item => '.'); Ref_Count := 0; end if; IO.Put_Char (Item => ' '); -- Write the line number of the usage compilation unit -- (K). It is only needed if the declaration compilation -- unit (I) is different from the usage compilation unit -- (K). if Write_Usage_Comp_Unit and then Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Usage_Comp_Unit /= ContextManager.NullUnit then IO.Put_Integer (Item => ContextManager.Ops.Get_Line_Number (Descriptor => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Usage_Comp_Unit)); IO.Put_Char (Item => '|'); end if; --# assert True; -- Dump all the usages of the symbol (J) declared in the -- compilation unit (I) and used in the compilation unit -- (K). Top_Item_L := Pos_Ref_Type_Last (T => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Pos_Ref_Type); L := 1; -- Sort all the usages of the symbol (J) declared in the -- compilation unit (I) and used in the compilation unit -- (K). Optimisation : Sort the table only once. if Top_Item_L > 1 and then not Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Sorted then --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Sort_Pos_Ref_Type_Table (Table => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Pos_Ref_Type); Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Sorted := True; --# end accept; end if; while L <= Top_Item_L loop -- Prevent to have very long lines of cross-references. if Ref_Count > Max_Ref_Count then IO.New_Line; IO.Put_Char (Item => '.'); Ref_Count := 0; end if; if L /= 1 then IO.Put_Char (Item => ' '); end if; -- Dump the usage (L) of the symbol (J) declared in the -- compilation unit (I) and used in the compilation unit -- (K). IO.Put_Integer (Item => Integer (Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Pos_Ref_Type.Table (L).Pos.Start_Line_No)); IO.Put_Char (Item => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Pos_Ref_Type.Table (L).Ref_Type); IO.Put_Integer (Item => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Pos_Ref_Type.Table (L).Pos.Start_Pos); Ref_Count := Ref_Count + 1; L := L + 1; end loop; --# assert True; -- Cleanup all the usages of the symbol (J) declared in the -- compilation unit (I) and used in the compilation unit -- (K). if Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Usage_Comp_Unit /= ContextManager.NullUnit then --# accept F, 10, Dummy_Unit_Name, "Ineffective assignment here OK"; ContextManager.Ops.GetUnitName (Descriptor => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Usage_Comp_Unit, UnitName => Dummy_Unit_Name, UnitType => Unit_Type); --# end accept; if Unit_Type = ContextManager.MainProgram or else Unit_Type = ContextManager.PackageBody or else Unit_Type = ContextManager.SubUnit then -- Never cleanup a specification. I_Cleanup := Search_Decl_Comp_Unit (Decl_Comp_Unit => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Usage_Comp_Unit); -- Check if there are separates for this compilation -- unit. If there are no separates, we can cleanup the -- table of cross-references but if there are still -- separates, we need to keep the table of cross -- references until all the separates has been dumped. if I_Cleanup = 0 or else (I_Cleanup /= 0 and then Decl_Comp_Unit_P.Table (I_Cleanup).Nb_Separates = 0) then Cleanup_Pos_Ref_Type (Decl_Comp_Unit => I, Sym => J, Usage_Comp_Unit => K); end if; end if; end if; --# accept F, 33, Dummy_Unit_Name , "Dummy_Unit_Name not referenced here"; end Dump_Usage; begin -- Optimisation : prepare the cleanup of the cross-references -- table. The goal of this preparation is to be able to cleanup -- the cross-references table just after dumping the -- cross-references info into the SLI file. This code -- calculates the field NB_SEPARATES as it will be after the -- dump of the compilation unit. -- -- Start the calculation with the compilation unit for wich the -- dump is performed. Current_Comp_Unit := Comp_Unit; Continue := True; while Continue loop if Current_Comp_Unit /= ContextManager.NullUnit then --# accept F, 10, Dummy_Unit_Name, "Ineffective assignment here OK"; ContextManager.Ops.GetUnitName (Descriptor => Current_Comp_Unit, UnitName => Dummy_Unit_Name, UnitType => Unit_Type); --# end accept; if Unit_Type = ContextManager.MainProgram or else Unit_Type = ContextManager.PackageBody or else Unit_Type = ContextManager.SubUnit then -- Never cleanup a specification. I := Search_Decl_Comp_Unit (Decl_Comp_Unit => Current_Comp_Unit); if I = 0 or else (I /= 0 and then Decl_Comp_Unit_P.Table (I).Nb_Separates = 0) then -- The current compilation unit doesn't have any -- separates or all the separates has been dumped. if Unit_Type = ContextManager.SubUnit then -- The current compilation unit is a separate -- that doesn't have any separates or that all -- the separates has been dumped => CLEANUP the -- cross-references for this seprate and get its -- parent. Prev_Comp_Unit := Current_Comp_Unit; ContextManager.Ops.Get_Parent (Unit_Descriptor => Current_Comp_Unit); -- The new current compilation unit becomes now -- the parent of the previous current -- compilation unit. I := Search_Decl_Comp_Unit (Decl_Comp_Unit => Current_Comp_Unit); if I /= 0 and then Decl_Comp_Unit_P.Table (I).Nb_Separates > 0 then -- Decrement the number of separates of the -- new current compilation unit (= the parent -- of the previous current compilation unit). --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Decl_Comp_Unit_P.Table (I).Nb_Separates := Decl_Comp_Unit_P.Table (I).Nb_Separates - 1; --# end accept; else -- The field NB_SEPARATES of the new current -- compilation unit (= the parent of a -- previous current compilation unit) must be -- at least 1 => STOP. --# accept F, 10, Dummy_Unit_Type, "Ineffective assignment here OK"; ContextManager.Ops.GetUnitName (Descriptor => Prev_Comp_Unit, UnitName => Unit_Name, UnitType => Dummy_Unit_Type); --# end accept; LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Unit_Name); SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); if I /= 0 then --# accept F, 10, Dummy_Unit_Type, "Ineffective assignment here OK"; ContextManager.Ops.GetUnitName (Descriptor => Current_Comp_Unit, UnitName => Unit_Name, UnitType => Dummy_Unit_Type); --# end accept; LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Unit_Name); SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; Continue := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.XREF.DUMP PROGRAM ERROR"); end if; else -- The current compilation unit is not a -- separate. The current compilation unit -- doesn't have any separates or all the -- separates has been dumped => CLEANUP the -- cross-references for this body. Continue := False; end if; else -- There are still separates for the current -- compilation unit that need to be dumped => KEEP -- the cross-references for this body. Continue := False; end if; else -- The current compilation unit is a spec => KEEP the -- cross-references for this spec. Continue := False; end if; else -- The current compilation unit is null => STOP. Continue := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "SLI.XREF.DUMP PROGRAM ERROR"); end if; end loop; IO.New_Line; -- Dump all the usages of all the symbols declared in all the -- compilation units and used in all compilation units. All the -- declaration compilation units and all the usage compilation -- units must be in the closure. Top_Item_I := Decl_Comp_Unit_Last; I := 1; if Top_Item_I > 1 then Sort_Decl_Comp_Unit_Table; end if; while I <= Top_Item_I loop if Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit /= ContextManager.NullUnit and then ContextManager.Ops.In_Closure (Descriptor => Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit) then Write_Decl_Comp_Unit := True; -- Dump all the usages of all the symbols declared in the -- compilation unit (I) and used in all the compilation -- units. Declaration compilation unit (I) and all the -- usage compilation units must be in the closure. Top_Item_J := Sym_Last (T => Decl_Comp_Unit_P.Table (I).Sym); J := 1; if Top_Item_J > 1 then --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Sort_Sym_Table (Table => Decl_Comp_Unit_P.Table (I).Sym); --# end accept; end if; while J <= Top_Item_J loop if not Dictionary.Is_Null_Symbol (Decl_Comp_Unit_P.Table (I).Sym.Table (J).Sym) then Write_Sym := True; Ref_Count := 2; -- Dump all the usages of the symbol (J) declared -- in the compilation unit (I) and used in all the -- compilation units. Declaration compilation unit -- (I) and all the usage compilation units must be -- in the closure. -- All the usages of the symbol (J) declared in the -- compilation unit (I) and used in the compilation -- unit where the symbol is declared must appears -- at the beginning of the list of -- cross-references. K := Search_Usage_Comp_Unit (Decl_Comp_Unit => I, Sym => J, Usage_Comp_Unit => Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit); if K /= 0 then -- Dump and cleanup all the usages of the symbol -- (J) declared in the compilation unit (I) and -- used in the compilation unit (K) with the -- declaration compilation unit (I) the same as -- the usage compilation unit (K). Declaration -- compilation unit (I) must be in the closure. Dump_Usage (Write_Usage_Comp_Unit => False, I => I, J => J, K => K, Write_Decl_Comp_Unit => Write_Decl_Comp_Unit, Write_Sym => Write_Sym, Ref_Count => Ref_Count); end if; Top_Item_K := Usage_Comp_Unit_Last (T => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit); K := 1; if Top_Item_K > 1 then --# accept W, 169, Decl_Comp_Unit_P.Table, "Direct updates OK here"; Sort_Usage_Comp_Unit_Table (Table => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit); --# end accept; end if; while K <= Top_Item_K loop if Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Usage_Comp_Unit /= ContextManager.NullUnit and then Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Usage_Comp_Unit /= Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit and then ContextManager.Ops.In_Closure (Descriptor => Decl_Comp_Unit_P.Table (I).Sym.Table (J).Usage_Comp_Unit.Table (K).Usage_Comp_Unit) then -- Dump and cleanup all the usages of the -- symbol (J) declared in the compilation -- unit (I) and used in the compilation unit -- (K) with the declaration compilation unit -- (I) different from the usage compilation -- unit (K). Declaration compilation unit (I) -- and usage compilation unit (K) must be in -- the closure. Dump_Usage (Write_Usage_Comp_Unit => True, I => I, J => J, K => K, Write_Decl_Comp_Unit => Write_Decl_Comp_Unit, Write_Sym => Write_Sym, Ref_Count => Ref_Count); end if; K := K + 1; end loop; if not Write_Sym then IO.New_Line; end if; J := J + 1; end if; end loop; -- Cleanup all the usages of all the symbols declared in -- the compilation unit (I) and used in all the -- compilation units. --# accept F, 10, Dummy_Unit_Name, "Ineffective assignment here OK"; ContextManager.Ops.GetUnitName (Descriptor => Decl_Comp_Unit_P.Table (I).Decl_Comp_Unit, UnitName => Dummy_Unit_Name, UnitType => Unit_Type); --# end accept; if Unit_Type = ContextManager.MainProgram or else Unit_Type = ContextManager.PackageBody or else Unit_Type = ContextManager.SubUnit then -- Never cleanup a specification. if Decl_Comp_Unit_P.Table (I).Nb_Separates = 0 then -- Check if there are separates for this -- compilation unit. If there are no separates, we -- can cleanup the table of cross-references but if -- there are still separates, we need to keep the -- table of cross references until all the -- separates has been dumped. Cleanup_Sym (Decl_Comp_Unit => I); end if; end if; end if; I := I + 1; end loop; --# accept F, 33, Dummy_Unit_Name, "Dummy_Unit_Name not referenced here" & --# F, 33, Dummy_Unit_Type, "Dummy_Unit_Type not referenced here"; end Dump; # if not SPARK then begin Decl_Comp_Unit_P.Init; # end if; end SLI.Xref; spark-2012.0.deb/examiner/sem-compunit-wf_use_type_clause.adb0000644000175000017500000002101511753202336023223 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------- -- This unit WFFs use type clauses that appear _inside_ a package body only. -- Currently these are not allowed in SPARK83 at all, and in SPARK95, we -- WFF their position (they must directly follow the embedded package to -- which they refer), but report they are otherwise unimplemented. -- -- This does NOT WFF use type clauses that appear as part of a context -- clause - these are handled separately by -- Sem.CompUnit.wf_context_clause.use_clause ---------------------------------------------------------------------------- separate (Sem.CompUnit) procedure Wf_Use_Type_Clause (Node : in STree.SyntaxNode) is It : STree.Iterator; Parent_Item_Rep, Next_Node : STree.SyntaxNode; procedure Process_Dotted_Simple_Name (Node : in STree.SyntaxNode; Parent : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Parent, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dotted_simple_name and --# (Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.initial_declarative_item_rep or --# Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.context_clause_rep); is OK : Boolean; procedure Check_Position (Node_Pos : in LexTokenManager.Token_Position; Parent : in STree.SyntaxNode; Pack_String : in LexTokenManager.Lex_String; Pos_OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_String, --# Parent, --# SPARK_IO.File_Sys, --# STree.Table & --# Pos_OK from LexTokenManager.State, --# Pack_String, --# Parent, --# STree.Table; --# pre Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.initial_declarative_item_rep or --# Syntax_Node_Type (Parent, STree.Table) = SP_Symbols.context_clause_rep; is Ident : LexTokenManager.Lex_String; begin if Syntax_Node_Type (Node => Parent) = SP_Symbols.initial_declarative_item_rep then -- should follow a package declaration Ident := Find_Previous_Package (Node => Parent); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 112, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); Pos_OK := False; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident, Lex_Str2 => Pack_String) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 301, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Ident); Pos_OK := False; else Pos_OK := True; end if; else Pos_OK := False; end if; end Check_Position; begin -- Process_Dotted_Simple_Name Check_Position (Node_Pos => Node_Position (Node => Node), Parent => Parent, Pack_String => Node_Lex_String (Node => Last_Child_Of (Start_Node => Node)), Pos_OK => OK); if OK then -- Position is OK, but alas "use type" is currently unimplemented... -- If this is ever completed, then remember to revise the comment -- at the top of this unit! ErrorHandler.Semantic_Error (Err_Num => 110, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end Process_Dotted_Simple_Name; begin -- Wf_Use_Type_Clause case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Error (Err_Num => 550, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when CommandLineData.SPARK95_Onwards => -- Could be "use type E.T1, E.T2;" so we need to loop and check -- the position of each type mark. Parent_Item_Rep := Parent_Node (Current_Node => Node); -- ASSUME Parent_Item_Rep = initial_declarative_item_rep OR context_clause_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Parent_Item_Rep) = SP_Symbols.initial_declarative_item_rep or else Syntax_Node_Type (Node => Parent_Item_Rep) = SP_Symbols.context_clause_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent_Item_Rep = initial_declarative_item_rep OR context_clause_rep in Wf_Use_Type_Clause"); It := Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.dotted_simple_name and --# Next_Node = Get_Node (It) and --# (Syntax_Node_Type (Parent_Item_Rep, STree.Table) = SP_Symbols.initial_declarative_item_rep or --# Syntax_Node_Type (Parent_Item_Rep, STree.Table) = SP_Symbols.context_clause_rep); Process_Dotted_Simple_Name (Node => Next_Node, Parent => Parent_Item_Rep); It := STree.NextNode (It); end loop; end case; end Wf_Use_Type_Clause; spark-2012.0.deb/examiner/sem-wf_generic_formal_part.adb0000644000175000017500000006337111753202336022213 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem) procedure Wf_Generic_Formal_Part (Node : in STree.SyntaxNode; Generic_Ident_Node_Pos : in LexTokenManager.Token_Position; Generic_Unit : in Dictionary.Symbol; Package_Or_Subprogram_Symbol : in Dictionary.Symbol) is Object_Or_Type_Node : STree.SyntaxNode; -------------------------------------------------------------------------------- procedure Wf_Formal_Object (Node : in STree.SyntaxNode; Generic_Ident_Node_Pos : in LexTokenManager.Token_Position; Generic_Unit : in Dictionary.Symbol; Package_Or_Subprogram_Symbol : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Generic_Ident_Node_Pos, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.formal_object_declaration; --# post STree.Table = STree.Table~; is Ident_List_Node : STree.SyntaxNode; Mode_Node : STree.SyntaxNode; Type_Node : STree.SyntaxNode; Formal_Type : Dictionary.Symbol; -------------------------------------------------------------------------------- procedure Process_Identifiers (Node : in STree.SyntaxNode; Type_Sym : in Dictionary.Symbol; Generic_Ident_Node_Pos : in LexTokenManager.Token_Position; Generic_Unit : in Dictionary.Symbol; Package_Or_Subprogram_Symbol : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# STree.Table, --# Type_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Generic_Ident_Node_Pos, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier_list; --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; The_Object : Dictionary.Symbol; The_Duplicate : Dictionary.Symbol; begin It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It) and --# STree.Table = STree.Table~; Ident_Str := Node_Lex_String (Node => Next_Node); The_Duplicate := Dictionary.LookupItem (Name => Ident_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Package_Or_Subprogram_Symbol), Context => Dictionary.ProofContext, Full_Package_Name => False); if The_Duplicate = Package_Or_Subprogram_Symbol then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Generic_Ident_Node_Pos, Id_Str => Ident_Str); elsif not Dictionary.Is_Null_Symbol (The_Duplicate) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); else Dictionary.Add_Generic_Object (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), The_Type => Type_Sym, The_Object_Sym => The_Object); STree.Add_Node_Symbol (Node => Next_Node, Sym => The_Object); Dictionary.Add_Generic_Formal_Parameter (Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), Generic_Unit => Generic_Unit, The_Type => Dictionary.NullSymbol, The_Object => The_Object); end if; It := STree.NextNode (It); end loop; end Process_Identifiers; begin -- Wf_Formal_Object Ident_List_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_List_Node = identifier_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_List_Node) = SP_Symbols.identifier_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_List_Node = identifier_list in Wf_Formal_Object"); Mode_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Ident_List_Node)); -- ASSUME Mode_Node = in_mode OR inout_mode OR out_mode OR NULL SystemErrors.RT_Assert (C => Mode_Node = STree.NullNode or else Syntax_Node_Type (Node => Mode_Node) = SP_Symbols.in_mode or else Syntax_Node_Type (Node => Mode_Node) = SP_Symbols.inout_mode or else Syntax_Node_Type (Node => Mode_Node) = SP_Symbols.out_mode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Mode_Node = in_mode OR inout_mode OR out_mode OR NULL in Wf_Formal_Object"); Type_Node := Next_Sibling (Current_Node => Next_Sibling (Current_Node => Ident_List_Node)); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Wf_Formal_Object"); if Syntax_Node_Type (Node => Mode_Node) = SP_Symbols.inout_mode or else Syntax_Node_Type (Node => Mode_Node) = SP_Symbols.out_mode then ErrorHandler.Semantic_Error (Err_Num => 639, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_List_Node), Id_Str => LexTokenManager.Null_String); end if; Wf_Type_Mark (Node => Type_Node, Current_Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), Context => Dictionary.ProgramContext, Type_Sym => Formal_Type); if not Dictionary.IsUnknownTypeMark (Formal_Type) then Process_Identifiers (Node => Ident_List_Node, Type_Sym => Formal_Type, Generic_Ident_Node_Pos => Generic_Ident_Node_Pos, Generic_Unit => Generic_Unit, Package_Or_Subprogram_Symbol => Package_Or_Subprogram_Symbol); end if; end Wf_Formal_Object; -------------------------------------------------------------------------------- procedure Wf_Formal_Type (Node : in STree.SyntaxNode; Generic_Ident_Node_Pos : in LexTokenManager.Token_Position; Generic_Unit : in Dictionary.Symbol; Package_Or_Subprogram_Symbol : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Generic_Ident_Node_Pos, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Generic_Unit, --# LexTokenManager.State, --# Node, --# Package_Or_Subprogram_Symbol, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.formal_type_declaration; --# post STree.Table = STree.Table~; is Ident_Node : STree.SyntaxNode; Type_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Declaration : Dictionary.Location; Type_Mark : Dictionary.Symbol; The_Duplicate : Dictionary.Symbol; begin Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = indentifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = indentifier in Wf_Formal_Type"); Ident_Str := Node_Lex_String (Node => Ident_Node); -- check whether name already in use The_Duplicate := Dictionary.LookupItem (Name => Ident_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Package_Or_Subprogram_Symbol), Context => Dictionary.ProofContext, Full_Package_Name => False); if The_Duplicate = Package_Or_Subprogram_Symbol then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Generic_Ident_Node_Pos, Id_Str => Ident_Str); elsif not Dictionary.Is_Null_Symbol (The_Duplicate) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else Type_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Ident_Node)); -- ASSUME Type_Node = formal_private_type_definition OR formal_discrete_type_definition OR -- formal_signed_integer_type_definition OR formal_modular_type_definition OR -- formal_floating_point_definition OR formal_ordinary_fixed_point_definition OR -- formal_array_type_definition Declaration := Dictionary.Location' (Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)); case Syntax_Node_Type (Node => Type_Node) is when SP_Symbols.formal_private_type_definition => -- ASSUME Type_Node = formal_private_type_definition Dictionary.Add_Generic_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), The_Type => Type_Mark); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Mark); Dictionary.Set_Generic_Private_Type (The_Type => Type_Mark, Is_Limited => (Syntax_Node_Type (Node => Child_Node (Current_Node => Type_Node)) = SP_Symbols.RWlimited)); when SP_Symbols.formal_discrete_type_definition => -- ASSUME Type_Node = formal_discrete_type_definition Dictionary.Add_Generic_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), The_Type => Type_Mark); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Mark); Dictionary.Set_Generic_Discrete_Type (The_Type => Type_Mark); when SP_Symbols.formal_signed_integer_type_definition => -- ASSUME Type_Node = formal_signed_integer_type_definition Dictionary.Add_Generic_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), The_Type => Type_Mark); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Mark); Dictionary.Set_Generic_Integer_Type (The_Type => Type_Mark); when SP_Symbols.formal_modular_type_definition => -- ASSUME Type_Node = formal_modular_type_definition Dictionary.Add_Generic_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), The_Type => Type_Mark); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Mark); Dictionary.Set_Generic_Modular_Type (The_Type => Type_Mark); when SP_Symbols.formal_floating_point_definition => -- ASSUME Type_Node = formal_floating_point_definition Dictionary.Add_Generic_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), The_Type => Type_Mark); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Mark); Dictionary.Set_Generic_Floating_Point_Type (The_Type => Type_Mark); when SP_Symbols.formal_ordinary_fixed_point_definition => -- ASSUME Type_Node = formal_ordinary_fixed_point_definition Dictionary.Add_Generic_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), The_Type => Type_Mark); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Mark); Dictionary.Set_Generic_Fixed_Point_Type (The_Type => Type_Mark); when SP_Symbols.formal_array_type_definition => -- ASSUME Type_Node = formal_array_type_definition Type_Node := Child_Node (Current_Node => Type_Node); -- ASSUME Type_Node = array_type_definition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.array_type_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = array_type_definition in Wf_Formal_Type"); Wf_Array_Type_Definition (Node => Type_Node, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), Ident_Node => Ident_Node, Dec_Loc => Node_Position (Node => Node), The_Array => Type_Mark); Dictionary.Set_Generic_Array_Type (The_Type => Type_Mark); when others => Type_Mark := Dictionary.GetUnknownTypeMark; -- just to avoid data flow error SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = formal_private_type_definition OR formal_discrete_type_definition OR " & "formal_signed_integer_type_definition OR formal_modular_type_definition OR " & "formal_floating_point_definition OR formal_ordinary_fixed_point_definition OR " & "formal_array_type_definition in Wf_Formal_Type"); end case; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Type_Mark, Is_Declaration => True); end if; Dictionary.Add_Generic_Formal_Parameter (Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Generic_Unit => Generic_Unit, The_Type => Type_Mark, The_Object => Dictionary.NullSymbol); end if; end Wf_Formal_Type; begin -- Wf_Generic_Formal_Part Object_Or_Type_Node := Next_Sibling (Current_Node => Last_Child_Of (Start_Node => Node)); while Syntax_Node_Type (Node => Object_Or_Type_Node) = SP_Symbols.formal_object_declaration or else Syntax_Node_Type (Node => Object_Or_Type_Node) = SP_Symbols.formal_type_declaration loop --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Object_Or_Type_Node, STree.Table) = SP_Symbols.formal_object_declaration or --# Syntax_Node_Type (Object_Or_Type_Node, STree.Table) = SP_Symbols.formal_type_declaration); case Syntax_Node_Type (Node => Object_Or_Type_Node) is when SP_Symbols.formal_object_declaration => -- ASSUME Object_Or_Type_Node = formal_object_declaration Wf_Formal_Object (Node => Object_Or_Type_Node, Generic_Ident_Node_Pos => Generic_Ident_Node_Pos, Generic_Unit => Generic_Unit, Package_Or_Subprogram_Symbol => Package_Or_Subprogram_Symbol); when SP_Symbols.formal_type_declaration => -- ASSUME Object_Or_Type_Node = formal_type_declaration Wf_Formal_Type (Node => Object_Or_Type_Node, Generic_Ident_Node_Pos => Generic_Ident_Node_Pos, Generic_Unit => Generic_Unit, Package_Or_Subprogram_Symbol => Package_Or_Subprogram_Symbol); when others => null; end case; Object_Or_Type_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Object_Or_Type_Node)); end loop; end Wf_Generic_Formal_Part; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_positional_record_component_association.adb0000644000175000017500000002004111753202336032123 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Positional_Record_Component_Association (Node : in out STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type) is Name_Exp, Exp_Result : Sem.Exp_Record; Expected_Type : Dictionary.Symbol; -------------------------------------------------------------- procedure Chain_Up_To_Component_Association (Node : in out STree.SyntaxNode) --# global in STree.Table; --# derives Node from *, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_record_component_association; --# post STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_record_component_association; is begin while STree.Syntax_Node_Type (Node => Node) /= SP_Symbols.record_component_association and then STree.Syntax_Node_Type (Node => Node) /= SP_Symbols.annotation_record_component_association loop --# assert STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.positional_record_component_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_positional_record_component_association; Node := STree.Parent_Node (Current_Node => Node); -- ASSUME Node = annotation_record_component_association OR annotation_positional_record_component_association OR -- record_component_association OR positional_record_component_association SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.record_component_association or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.positional_record_component_association or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_record_component_association or else STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_positional_record_component_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = annotation_record_component_association OR " & "annotation_positional_record_component_association OR record_component_association OR " & "positional_record_component_association in Chain_Up_To_Component_Association"); end loop; end Chain_Up_To_Component_Association; ------------------------------------------------------------------- function Expression_Node (Association_Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre STree.Syntax_Node_Type (Association_Node, STree.Table) = SP_Symbols.positional_record_component_association or --# STree.Syntax_Node_Type (Association_Node, STree.Table) = SP_Symbols.annotation_positional_record_component_association; is Result : STree.SyntaxNode; begin Result := STree.Child_Node (Current_Node => Association_Node); -- ASSUME Result = annotation_positional_record_component_association OR annotation_expression OR -- positional_record_component_association OR expression if STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_positional_record_component_association or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.positional_record_component_association then -- ASSUME Result = annotation_positional_record_component_association OR positional_record_component_association Result := STree.Next_Sibling (Current_Node => Result); -- ASSUME Result = annotation_expression OR expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_expression or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = annotation_expression OR expression in Expression_Node"); elsif STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_expression or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.expression then -- ASSUME Result = annotation_expression OR expression Result := STree.Child_Node (Current_Node => Result); -- ASSUME Result = annotation_relation OR quantified_expression OR relation SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_relation or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.quantified_expression or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.relation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = annotation_relation OR quantified_expression OR relation in Expression_Node"); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = annotation_positional_record_component_association OR annotation_expression OR " & "positional_record_component_association OR expression in Expression_Node"); end if; return STree.Node_Position (Node => Result); end Expression_Node; begin -- Wf_Positional_Record_Component_Association Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); -- next associated expression Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); -- aggregate type -- we know that the aggregate type is a record because of checks done in wf_ancestor_part if Name_Exp.Param_Count >= Dictionary.GetNumberOfComponents (Name_Exp.Type_Symbol) then -- aggregate already complete, extra expression found Exp_Stack.Push (X => Sem.Unknown_Type_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 105, Reference => ErrorHandler.No_Reference, Position => Expression_Node (Association_Node => Node), Id_Str => Dictionary.GetSimpleName (Name_Exp.Other_Symbol)); Chain_Up_To_Component_Association (Node => Node); -- ASSUME Node = annotation_record_component_association OR record_component_association else -- there are still associations needed Name_Exp.Param_Count := Name_Exp.Param_Count + 1; Expected_Type := Dictionary.GetType (Dictionary.GetRecordComponent (Name_Exp.Type_Symbol, Name_Exp.Param_Count)); STree.Add_Node_Symbol (Node => Node, Sym => Expected_Type); Sem.Assignment_Check (Position => Expression_Node (Association_Node => Node), Scope => Scope, Target_Type => Expected_Type, Exp_Result => Exp_Result); Name_Exp.Is_Constant := Name_Exp.Is_Constant and then Exp_Result.Is_Constant; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); end if; end Wf_Positional_Record_Component_Association; spark-2012.0.deb/examiner/contextmanager-ops.adb0000644000175000017500000010167311753202336020547 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with FileSystem; with IndexManager; with ScreenEcho; with SP_Symbols; with SystemErrors; use type SP_Symbols.SP_Symbol; package body ContextManager.Ops is type File_Entries is record Name : LexTokenManager.Lex_String; Status : ContextManager.FileStatus; File : SPARK_IO.File_Type; Listing_Req : Boolean; Listing_Filename : E_Strings.T; Line_Context : SparkLex.Line_Context; Error_Context : ErrorHandler.Error_Contexts; Unit_Count : Natural; Errs_Reported : Boolean; end record; Null_File_Entry : constant File_Entries := File_Entries' (Name => LexTokenManager.Null_String, Status => ContextManager.NoFileEntry, File => SPARK_IO.Null_File, Listing_Req => False, Listing_Filename => E_Strings.Empty_String, Line_Context => SparkLex.Null_Line_Context, Error_Context => ErrorHandler.Null_Error_Context, Unit_Count => Natural'First, Errs_Reported => False); type Unit_Entries is record Name : LexTokenLists.Lists; Unit_Type : ContextManager.UnitTypes; Status : ContextManager.UnitStatus; File_Descriptor : ContextManager.FileDescriptors; Parse_Tree : STree.SyntaxNode; VCG : Boolean; Unit_Number : Natural; Cycle_Detected : Boolean; Comp_Unit_Flag : Natural; Inherit_Clause : STree.SyntaxNode; end record; Null_Unit_Entry : constant Unit_Entries := Unit_Entries' (Name => LexTokenLists.Null_List, Unit_Type => ContextManager.PackageSpecification, Status => ContextManager.NoUnitEntry, File_Descriptor => ContextManager.NullFile, Parse_Tree => STree.NullNode, VCG => False, Unit_Number => 0, Cycle_Detected => False, Comp_Unit_Flag => 0, Inherit_Clause => STree.NullNode); subtype File_Pointers is ContextManager.FileDescriptors range 1 .. ContextManager.FileDescriptors'Last; type File_Heap_Contents is array (File_Pointers) of File_Entries; type File_Heaps is record Content : File_Heap_Contents; Last_Used : ContextManager.FileDescriptors; end record; subtype Unit_Pointers is ContextManager.UnitDescriptors range 1 .. ContextManager.UnitDescriptors'Last; type Unit_Heap_Contents is array (Unit_Pointers) of Unit_Entries; type Unit_Heaps is record Content : Unit_Heap_Contents; Last_Used : ContextManager.UnitDescriptors; end record; subtype Stack_Heights is Integer range 0 .. ExaminerConstants.ContextManagerMaxUnits; subtype Stack_Pointers is Integer range 1 .. ExaminerConstants.ContextManagerMaxUnits; type Stack_Contents is array (Stack_Pointers) of ContextManager.UnitDescriptors; type Unit_Stacks is record Content : Stack_Contents; Height : Stack_Heights; end record; File_Heap : File_Heaps; Unit_Heap : Unit_Heaps; Unit_Stack : Unit_Stacks; procedure Open_File (File_Descriptor : in ContextManager.FileDescriptors) is Source_Filename : E_Strings.T; Source_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Status : SPARK_IO.File_Status; Full_Name : E_Strings.T; Stat : FileSystem.Typ_File_Spec_Status; begin Source_Filename := LexTokenManager.Lex_String_To_String (Lex_Str => File_Heap.Content (File_Descriptor).Name); FileSystem.Open_Source_File (File => Source_File, Name => Source_Filename, Status => Status); if Status = SPARK_IO.Ok then File_Heap.Content (File_Descriptor).File := Source_File; SparkLex.Clear_Line_Context; SparkLex.Store_Line_Context (File_Line => File_Heap.Content (File_Descriptor).Line_Context); ErrorHandler.Error_Init (Source_File_Name => Source_Filename, Echo => CommandLineData.Content.Echo); ErrorHandler.Get_Error_Context (Context => File_Heap.Content (File_Descriptor).Error_Context); File_Heap.Content (File_Descriptor).Status := ContextManager.FileOpen; else ScreenEcho.Put_String ("Cannot open file "); if CommandLineData.Content.Plain_Output then Full_Name := FileSystem.Just_File (Fn => Source_Filename, Ext => True); else --# accept F, 10, Stat, "Stat not used"; FileSystem.Find_Full_File_Name (File_Spec => Source_Filename, File_Status => Stat, Full_File_Name => Full_Name); --# end accept; end if; ScreenEcho.Put_ExaminerString (Full_Name); ScreenEcho.New_Line (1); File_Heap.Content (File_Descriptor).Status := ContextManager.UnableToOpen; ErrorHandler.Set_File_Open_Error; end if; --# accept F, 33, Stat, "Stat not used"; end Open_File; procedure Close_File (File_Descriptor : in ContextManager.FileDescriptors) is Success : SPARK_IO.File_Status; begin if File_Heap.Content (File_Descriptor).Status = ContextManager.FileOpen then --# accept F, 10, Success, "Not required here"; SPARK_IO.Close (File_Heap.Content (File_Descriptor).File, Success); --# end accept; end if; File_Heap.Content (File_Descriptor).Status := ContextManager.FileEnd; --# accept F, 33, Success, "Not required here" ; end Close_File; function Current_Unit return ContextManager.UnitDescriptors is Result : ContextManager.UnitDescriptors; begin if Unit_Stack.Height > 0 then Result := Unit_Stack.Content (Unit_Stack.Height); else Result := ContextManager.NullUnit; end if; return Result; end Current_Unit; function Get_Unit_Status (Unit_Descriptor : ContextManager.UnitDescriptors) return ContextManager.UnitStatus is begin return Unit_Heap.Content (Unit_Descriptor).Status; end Get_Unit_Status; procedure Create_Unit_Descriptor (File_Descriptor : in ContextManager.FileDescriptors; Unit_Descriptor : out ContextManager.UnitDescriptors) is begin if Unit_Heap.Last_Used = Unit_Pointers'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Context_Unit_Heap_Overflow, Msg => ""); end if; Unit_Heap.Last_Used := Unit_Heap.Last_Used + 1; Unit_Descriptor := Unit_Heap.Last_Used; Unit_Heap.Content (Unit_Descriptor) := Null_Unit_Entry; Unit_Heap.Content (Unit_Descriptor).Status := ContextManager.UnitCreated; Unit_Heap.Content (Unit_Descriptor).File_Descriptor := File_Descriptor; Unit_Heap.Content (Unit_Descriptor).VCG := False; if File_Descriptor /= ContextManager.NullFile then File_Heap.Content (File_Descriptor).Unit_Count := File_Heap.Content (File_Descriptor).Unit_Count + 1; Unit_Heap.Content (Unit_Descriptor).Unit_Number := File_Heap.Content (File_Descriptor).Unit_Count; else Unit_Heap.Content (Unit_Descriptor).Unit_Number := 0; end if; end Create_Unit_Descriptor; function Get_File_Descriptor (Unit_Descriptor : ContextManager.UnitDescriptors) return ContextManager.FileDescriptors is begin return Unit_Heap.Content (Unit_Descriptor).File_Descriptor; end Get_File_Descriptor; procedure SetUnitStatus (Descriptor : in ContextManager.UnitDescriptors; Status : in ContextManager.UnitStatus) is begin Unit_Heap.Content (Descriptor).Status := Status; end SetUnitStatus; procedure MarkUnitInCycle (Descriptor : in ContextManager.UnitDescriptors) is begin Unit_Heap.Content (Descriptor).Cycle_Detected := True; end MarkUnitInCycle; function UnitInCycle (Descriptor : ContextManager.UnitDescriptors) return Boolean is begin return Unit_Heap.Content (Descriptor).Cycle_Detected; end UnitInCycle; function GetFileStatus (Descriptor : ContextManager.FileDescriptors) return ContextManager.FileStatus is begin return File_Heap.Content (Descriptor).Status; end GetFileStatus; procedure SetVCG (Descriptor : in ContextManager.UnitDescriptors; VCG : in Boolean) is begin Unit_Heap.Content (Descriptor).VCG := VCG; end SetVCG; procedure GetVCG (Descriptor : in ContextManager.UnitDescriptors; VCG : out Boolean) is begin VCG := Unit_Heap.Content (Descriptor).VCG; end GetVCG; procedure GetUnitByName (UnitName : in LexTokenLists.Lists; UnitTypeSet : in ContextManager.UnitTypeSets; Descriptor : out ContextManager.UnitDescriptors) is begin Descriptor := ContextManager.NullUnit; for I in Unit_Pointers range 1 .. Unit_Heap.Last_Used loop if LexTokenLists.Eq_Unit (First_Item => UnitName, Second => Unit_Heap.Content (I).Name) and UnitTypeSet (Unit_Heap.Content (I).Unit_Type) then Descriptor := I; exit; end if; end loop; end GetUnitByName; procedure SetUnitName (Descriptor : in ContextManager.UnitDescriptors; UnitName : in LexTokenLists.Lists; UnitType : in ContextManager.UnitTypes) is begin Unit_Heap.Content (Descriptor).Name := UnitName; Unit_Heap.Content (Descriptor).Unit_Type := UnitType; end SetUnitName; procedure GetUnitName (Descriptor : in ContextManager.UnitDescriptors; UnitName : out LexTokenLists.Lists; UnitType : out ContextManager.UnitTypes) is begin UnitName := Unit_Heap.Content (Descriptor).Name; UnitType := Unit_Heap.Content (Descriptor).Unit_Type; end GetUnitName; procedure SetParseTree (Descriptor : in ContextManager.UnitDescriptors; ParseTree : in STree.SyntaxNode) is begin Unit_Heap.Content (Descriptor).Parse_Tree := ParseTree; Unit_Heap.Content (Descriptor).Inherit_Clause := STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.inherit_clause, From_Root => ParseTree, In_Direction => STree.Down)); end SetParseTree; procedure GetParseTree (Descriptor : in ContextManager.UnitDescriptors; ParseTree : out STree.SyntaxNode) is begin ParseTree := Unit_Heap.Content (Descriptor).Parse_Tree; end GetParseTree; function FirstUnitDescriptor return ContextManager.UnitDescriptors is Result : ContextManager.UnitDescriptors; begin if Unit_Heap.Last_Used = ContextManager.NullUnit then Result := ContextManager.NullUnit; else Result := Unit_Pointers'First; end if; return Result; end FirstUnitDescriptor; function NextUnitDescriptor (Descriptor : ContextManager.UnitDescriptors) return ContextManager.UnitDescriptors is Result : ContextManager.UnitDescriptors; begin if Descriptor = Unit_Heap.Last_Used then Result := ContextManager.NullUnit; else Result := Descriptor + 1; end if; return Result; end NextUnitDescriptor; procedure PushUnit (Descriptor : in ContextManager.UnitDescriptors) is begin if Unit_Stack.Height = Stack_Heights'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Context_Unit_Stack_Overflow, Msg => ""); end if; Unit_Stack.Height := Unit_Stack.Height + 1; Unit_Stack.Content (Unit_Stack.Height) := Descriptor; end PushUnit; procedure PopUnit (Descriptor : out ContextManager.UnitDescriptors) is begin if Unit_Stack.Height = 0 then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Context_Unit_Stack_Underflow, Msg => ""); end if; Descriptor := Unit_Stack.Content (Unit_Stack.Height); Unit_Stack.Height := Unit_Stack.Height - 1; end PopUnit; procedure CreateFileDescriptor (Descriptor : out ContextManager.FileDescriptors) is begin if File_Heap.Last_Used = File_Pointers'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Context_File_Heap_Overflow, Msg => ""); end if; File_Heap.Last_Used := File_Heap.Last_Used + 1; Descriptor := File_Heap.Last_Used; File_Heap.Content (File_Heap.Last_Used).Status := ContextManager.NoFileEntry; File_Heap.Content (File_Heap.Last_Used).Unit_Count := 0; File_Heap.Content (File_Heap.Last_Used).Errs_Reported := False; end CreateFileDescriptor; procedure SetSourceFileName (Descriptor : in ContextManager.FileDescriptors; SourceFileName : in LexTokenManager.Lex_String) is begin File_Heap.Content (Descriptor).Name := SourceFileName; end SetSourceFileName; function GetSourceFileName (Descriptor : in ContextManager.FileDescriptors) return LexTokenManager.Lex_String is begin return File_Heap.Content (Descriptor).Name; end GetSourceFileName; procedure GetSourceFile (Descriptor : in ContextManager.FileDescriptors; SourceFile : out SPARK_IO.File_Type) is begin SourceFile := File_Heap.Content (Descriptor).File; end GetSourceFile; function ListingReqt (Descriptor : ContextManager.FileDescriptors) return Boolean is begin return File_Heap.Content (Descriptor).Listing_Req; end ListingReqt; function FirstFileDescriptor return ContextManager.FileDescriptors is Result : ContextManager.FileDescriptors; begin if File_Heap.Last_Used = ContextManager.NullFile then Result := ContextManager.NullFile; else Result := File_Pointers'First; end if; return Result; end FirstFileDescriptor; function NextFileDescriptor (Descriptor : ContextManager.FileDescriptors) return ContextManager.FileDescriptors is Result : ContextManager.FileDescriptors; begin if Descriptor = File_Heap.Last_Used then Result := ContextManager.NullFile; else Result := Descriptor + 1; end if; return Result; end NextFileDescriptor; function GetFileByName (FileName : in LexTokenManager.Lex_String) return ContextManager.FileDescriptors is Descriptor : ContextManager.FileDescriptors; begin Descriptor := ContextManager.NullFile; for I in File_Pointers range 1 .. File_Heap.Last_Used loop if LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => FileName, Lex_Str2 => File_Heap.Content (I).Name) = LexTokenManager.Str_Eq then Descriptor := I; exit; end if; end loop; return Descriptor; end GetFileByName; procedure SetFileStatus (Descriptor : in ContextManager.FileDescriptors; Status : in ContextManager.FileStatus) is begin File_Heap.Content (Descriptor).Status := Status; end SetFileStatus; procedure SetListingReq (Descriptor : in ContextManager.FileDescriptors; Req : in Boolean) is begin File_Heap.Content (Descriptor).Listing_Req := Req; end SetListingReq; procedure SetLineContext (Descriptor : in ContextManager.FileDescriptors; FileContext : in SparkLex.Line_Context) is begin File_Heap.Content (Descriptor).Line_Context := FileContext; end SetLineContext; procedure GetLineContext (Descriptor : in ContextManager.FileDescriptors; FileContext : out SparkLex.Line_Context) is begin FileContext := File_Heap.Content (Descriptor).Line_Context; end GetLineContext; procedure SetErrorContext (Descriptor : in ContextManager.FileDescriptors; Context : in ErrorHandler.Error_Contexts) is begin File_Heap.Content (Descriptor).Error_Context := Context; end SetErrorContext; procedure GetErrorContext (Descriptor : in ContextManager.FileDescriptors; Context : out ErrorHandler.Error_Contexts) is begin Context := File_Heap.Content (Descriptor).Error_Context; end GetErrorContext; procedure SetListingFileName (Descriptor : in ContextManager.FileDescriptors; Listing_File_Name : in E_Strings.T) is begin File_Heap.Content (Descriptor).Listing_Filename := Listing_File_Name; end SetListingFileName; procedure GetListingFileName (Descriptor : in ContextManager.FileDescriptors; Listing_File_Name : out E_Strings.T) is begin Listing_File_Name := File_Heap.Content (Descriptor).Listing_Filename; end GetListingFileName; procedure SetErrorsReported (Descriptor : in ContextManager.FileDescriptors) is begin File_Heap.Content (Descriptor).Errs_Reported := True; end SetErrorsReported; function ErrorsReported (Descriptor : ContextManager.FileDescriptors) return Boolean is begin return File_Heap.Content (Descriptor).Errs_Reported; end ErrorsReported; procedure Get_Unit (Descriptor : in ContextManager.FileDescriptors; Unit_Descriptor : out ContextManager.UnitDescriptors) is Id_Str : LexTokenManager.Lex_String; begin Unit_Descriptor := ContextManager.NullUnit; for I in Unit_Pointers range 1 .. Unit_Heap.Last_Used loop if Unit_Heap.Content (I).Unit_Type /= ContextManager.InterUnitPragma and then Unit_Heap.Content (I).File_Descriptor = Descriptor then if Unit_Descriptor = ContextManager.NullUnit then Unit_Descriptor := I; else if LexTokenLists.Get_Length (List => Unit_Heap.Content (I).Name) = 0 then Id_Str := LexTokenManager.Null_String; else Id_Str := LexTokenLists.Get_Element (List => Unit_Heap.Content (I).Name, Pos => LexTokenLists.Get_Length (List => Unit_Heap.Content (I).Name)); end if; ErrorHandler.SLI_Generation_Warning (Position => STree.Node_Position (Node => STree.Get_Node (It => STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Unit_Heap.Content (I).Parse_Tree, In_Direction => STree.Down))), Id_Str => Id_Str); end if; end if; end loop; end Get_Unit; procedure Get_Parent (Unit_Descriptor : in out ContextManager.UnitDescriptors) is Unit_Name : LexTokenLists.Lists; Dummy_Item : LexTokenManager.Lex_String; begin Unit_Name := Unit_Heap.Content (Unit_Descriptor).Name; --# accept F, 10, Dummy_Item, "Ineffective assignment here OK"; LexTokenLists.Pop (List => Unit_Name, Item => Dummy_Item); --# end accept; GetUnitByName (UnitName => Unit_Name, UnitTypeSet => ContextManager.UnitTypeSets'(ContextManager.SubUnit | ContextManager.PackageBody | ContextManager.MainProgram => True, others => False), Descriptor => Unit_Descriptor); --# accept F, 33, Dummy_Item, "Expect Dummy_Item unused"; end Get_Parent; procedure Dependency_Closure (Descriptor : in ContextManager.FileDescriptors) is It : STree.Iterator; Lex_Str : LexTokenLists.Lists; Unit_Descriptor : ContextManager.UnitDescriptors; Spec_Found : Boolean; Components : IndexManager.Component_Lists; Queue_Size : constant := ExaminerConstants.ContextManagerMaxUnits; subtype Queue_0 is Integer range 0 .. Queue_Size; subtype Queue_1 is Queue_0 range 1 .. Queue_0'Last; type Queue_Item is record Unit_Descriptor : ContextManager.UnitDescriptors; Done : Boolean; end record; type Queue_Array is array (Queue_1) of Queue_Item; type Queue_T is record The_Array : Queue_Array; Top : Queue_0; end record; Queue : Queue_T; -- Build a string list from a dotted name identifier (Node). function Build_List (Node : in STree.SyntaxNode) return LexTokenLists.Lists --# global in STree.Table; is It : STree.Iterator; Return_Val : LexTokenLists.Lists; begin SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Other_Internal_Error, Msg => "CONTEXTMANAGER.BUILD_LIST : Node should be a SP_Symbols.dotted_simple_name"); Return_Val := LexTokenLists.Null_List; It := STree.Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); while It /= STree.NullIterator loop LexTokenLists.Append (List => Return_Val, Item => STree.Node_Lex_String (Node => STree.Get_Node (It => It))); It := STree.NextNode (It => It); end loop; return Return_Val; end Build_List; -- Add the compilation unit descriptor (Descriptor) in the -- queue. procedure Add_Queue (Descriptor : in ContextManager.UnitDescriptors) --# global in out Queue; --# in out Unit_Heap; --# derives Queue, --# Unit_Heap from *, --# Descriptor, --# Queue; is Found : Boolean; begin -- Find if the compilation unit descriptor (Descriptor) has -- already been in the queue or is already in the queue. Found := False; for I in Queue_1 range 1 .. Queue.Top loop if Queue.The_Array (I).Unit_Descriptor = Descriptor then Found := True; exit; end if; end loop; if not Found then -- Never seen the compilation unit descriptor -- (Descriptor) in the queue => add the compilation unit -- descriptor (Descriptor) in the queue. if Queue.Top < Queue_Size then Queue.Top := Queue.Top + 1; Queue.The_Array (Queue.Top) := Queue_Item'(Unit_Descriptor => Descriptor, Done => False); -- Set the closure flag. Unit_Heap.Content (Descriptor).Comp_Unit_Flag := 1; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Queue_Overflow, Msg => "CONTEXTMANAGER.ADD_QUEUE : Queue full"); end if; end if; end Add_Queue; -- Get and remove the next compilation unit descriptor -- (Unit_Descriptor) from the queue. procedure Get_Next (Unit_Descriptor : out ContextManager.UnitDescriptors) --# global in out Queue; --# derives Queue, --# Unit_Descriptor from Queue; is begin Unit_Descriptor := ContextManager.NullUnit; for I in Queue_1 range 1 .. Queue.Top loop if not Queue.The_Array (I).Done then Queue.The_Array (I).Done := True; Unit_Descriptor := Queue.The_Array (I).Unit_Descriptor; exit; end if; end loop; end Get_Next; begin -- Reset the closure flag. for I in Unit_Pointers range 1 .. Unit_Heap.Last_Used loop Unit_Heap.Content (I).Comp_Unit_Flag := 0; end loop; -- Initiate the closure calculation. Get_Unit (Descriptor => Descriptor, Unit_Descriptor => Unit_Descriptor); Queue := Queue_T' (The_Array => Queue_Array'(others => Queue_Item'(Unit_Descriptor => ContextManager.NullUnit, Done => True)), Top => 0); Spec_Found := False; while not Spec_Found loop if Unit_Heap.Content (Unit_Descriptor).Unit_Type = ContextManager.PackageBody then -- Set the closure flag. Unit_Heap.Content (Unit_Descriptor).Comp_Unit_Flag := 1; -- It is an Ada package body. -- Find the specification of the Unit_Pointer_Body. for I in Unit_Pointers range 1 .. Unit_Heap.Last_Used loop if LexTokenLists.Eq_Unit (First_Item => Unit_Heap.Content (I).Name, Second => Unit_Heap.Content (Unit_Descriptor).Name) and then Unit_Heap.Content (I).Unit_Type = ContextManager.PackageSpecification then Add_Queue (Descriptor => I); exit; end if; end loop; Spec_Found := True; elsif Unit_Heap.Content (Unit_Descriptor).Unit_Type = ContextManager.GenericSubprogramBody then -- Set the closure flag. Unit_Heap.Content (Unit_Descriptor).Comp_Unit_Flag := 1; -- It is an Ada generic subprogram body. -- Find the specification of the Unit_Pointer_Body. for I in Unit_Pointers range 1 .. Unit_Heap.Last_Used loop if LexTokenLists.Eq_Unit (First_Item => Unit_Heap.Content (I).Name, Second => Unit_Heap.Content (Unit_Descriptor).Name) and then Unit_Heap.Content (I).Unit_Type = ContextManager.GenericSubprogramDeclaration then Add_Queue (Descriptor => I); exit; end if; end loop; Spec_Found := True; elsif Unit_Heap.Content (Unit_Descriptor).Unit_Type = ContextManager.PackageSpecification or else Unit_Heap.Content (Unit_Descriptor).Unit_Type = ContextManager.MainProgram then -- It is an Ada package specification or an Ada main -- program. Add_Queue (Descriptor => Unit_Descriptor); Spec_Found := True; elsif Unit_Heap.Content (Unit_Descriptor).Unit_Type = ContextManager.SubUnit then -- Set the closure flag. Unit_Heap.Content (Unit_Descriptor).Comp_Unit_Flag := 1; -- It is an Ada separate unit Get_Parent (Unit_Descriptor => Unit_Descriptor); Spec_Found := False; else Spec_Found := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "in ContextManager.Ops.Dependency_Closure"); end if; end loop; -- Add the private childs units to the closure (if any). IndexManager.Look_Up_Components (Required_Unit => Unit_Heap.Content (Unit_Descriptor).Name, Components => Components); for I in IndexManager.Component_Index loop exit when Components (I) = LexTokenLists.Null_List; for J in Unit_Pointers range 1 .. Unit_Heap.Last_Used loop if LexTokenLists.Eq_Unit (First_Item => Unit_Heap.Content (J).Name, Second => Components (I)) and then Unit_Heap.Content (J).Unit_Type = ContextManager.PackageSpecification then Add_Queue (Descriptor => J); exit; end if; end loop; end loop; -- Calculate the closure. Get_Next (Unit_Descriptor => Unit_Descriptor); while Unit_Descriptor /= ContextManager.NullUnit loop It := STree.Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Unit_Heap.Content (Unit_Descriptor).Inherit_Clause, In_Direction => STree.Down); while It /= STree.NullIterator loop Lex_Str := Build_List (Node => STree.Get_Node (It => It)); GetUnitByName (Lex_Str, ContextManager.PackageSpecificationSet, Unit_Descriptor); if Unit_Descriptor /= ContextManager.NullUnit then Add_Queue (Descriptor => Unit_Descriptor); end if; It := STree.NextNode (It => It); end loop; Get_Next (Unit_Descriptor => Unit_Descriptor); end loop; if CommandLineData.Content.Debug.SLI then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "DEBUG DEPENDENCY CLOSURE", Stop => 0); for I in Unit_Pointers range 1 .. Unit_Heap.Last_Used loop if Unit_Heap.Content (I).Comp_Unit_Flag /= 0 then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "COMPILATION UNIT = ", Stop => 0); LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Unit_Heap.Content (I).Name); if Unit_Heap.Content (I).Unit_Type = ContextManager.PackageSpecification or Unit_Heap.Content (I).Unit_Type = ContextManager.GenericSubprogramDeclaration then SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => " SPEC", Stop => 0); elsif Unit_Heap.Content (I).Unit_Type = ContextManager.PackageBody or Unit_Heap.Content (I).Unit_Type = ContextManager.SubUnit or Unit_Heap.Content (I).Unit_Type = ContextManager.MainProgram then SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => " BODY", Stop => 0); else SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => " UNKNOWN", Stop => 0); end if; end if; end loop; end if; end Dependency_Closure; function In_Closure (Descriptor : in ContextManager.UnitDescriptors) return Boolean is begin return Unit_Heap.Content (Descriptor).Comp_Unit_Flag /= 0; end In_Closure; procedure Set_Line_Number (Descriptor : in ContextManager.UnitDescriptors; Line_Number : in Positive) is begin Unit_Heap.Content (Descriptor).Comp_Unit_Flag := Line_Number; end Set_Line_Number; function Get_Line_Number (Descriptor : in ContextManager.UnitDescriptors) return Natural is begin return Unit_Heap.Content (Descriptor).Comp_Unit_Flag; end Get_Line_Number; begin Unit_Heap := Unit_Heaps'(Content => Unit_Heap_Contents'(others => Null_Unit_Entry), Last_Used => 0); Unit_Stack := Unit_Stacks'(Content => Stack_Contents'(others => ContextManager.NullUnit), Height => Stack_Heights'First); -- Keep this partial initialization due to limitation of the stack -- size with MacOS/X. It should be: -- File_Heap := File_Heaps'(Content => File_Heap_Contents'(others => Null_File_Entry), -- Last_Used => 0); File_Heap.Last_Used := 0; --# accept F, 23, File_Heap.Content, "Partial initialization OK here"; File_Heap.Content (File_Pointers'First) := Null_File_Entry; --# accept F, 602, File_Heap, File_Heap.Content, "Partial initialization OK here"; end ContextManager.Ops; spark-2012.0.deb/examiner/sparklex-linemanager.adb0000644000175000017500000003101411753202336021031 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SparkLex) package body LineManager is procedure Clear_Line (Curr_Line : out SparkLex.Line_Context) is begin Curr_Line := SparkLex.Line_Context' (Context => SparkLex.In_Ada, Anno_Context => SparkLex.Start_Annotation, Line_No => LexTokenManager.Line_Numbers'First, Last_Token_Pos => E_Strings.Positions'First, Curr_Pos => E_Strings.Positions'First, Lookahead_Pos => E_Strings.Positions'First, Conts => E_Strings.Get_Empty_String); end Clear_Line; procedure Copy_In_Line (Line : in SparkLex.Line_Context; Curr_Line : out SparkLex.Line_Context) is begin Curr_Line := Line; end Copy_In_Line; procedure Record_Curr_Pos (Curr_Line : in out SparkLex.Line_Context) is begin Curr_Line.Last_Token_Pos := Curr_Line.Curr_Pos; end Record_Curr_Pos; procedure Reset_Curr_Pos (Curr_Line : in out SparkLex.Line_Context) is begin Curr_Line.Curr_Pos := Curr_Line.Last_Token_Pos; Curr_Line.Lookahead_Pos := Curr_Line.Last_Token_Pos; end Reset_Curr_Pos; procedure Accept_Char (Curr_Line : in out SparkLex.Line_Context) is begin if Curr_Line.Curr_Pos <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) then Curr_Line.Curr_Pos := Curr_Line.Curr_Pos + 1; end if; Curr_Line.Lookahead_Pos := Curr_Line.Curr_Pos; end Accept_Char; procedure Lookahead_Char (Curr_Line : in out SparkLex.Line_Context; Ch : out Character) is begin if Curr_Line.Lookahead_Pos > E_Strings.Get_Length (E_Str => Curr_Line.Conts) then if Curr_Line.Lookahead_Pos > 1 and then E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Lookahead_Pos - 1) = SparkLex.End_Of_Text then Ch := SparkLex.End_Of_Text; else Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Lookahead_Pos); end if; else case E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Lookahead_Pos) is when SparkLex.End_Of_Text => Ch := SparkLex.End_Of_Text; Curr_Line.Lookahead_Pos := Curr_Line.Lookahead_Pos + 1; when others => Curr_Line.Lookahead_Pos := Curr_Line.Lookahead_Pos + 1; Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Lookahead_Pos); end case; end if; end Lookahead_Char; procedure Accept_Lookahead (Curr_Line : in out SparkLex.Line_Context) is begin if Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) then Curr_Line.Lookahead_Pos := Curr_Line.Lookahead_Pos + 1; end if; Curr_Line.Curr_Pos := Curr_Line.Lookahead_Pos; end Accept_Lookahead; procedure Reject_Lookahead (Curr_Line : in out SparkLex.Line_Context) is begin Curr_Line.Lookahead_Pos := Curr_Line.Curr_Pos; end Reject_Lookahead; function Separator (Ch : Character) return Boolean is begin return Ch = ' ' or else Ch = Ada.Characters.Latin_1.HT or else Ch = Ada.Characters.Latin_1.VT or else Ch = Ada.Characters.Latin_1.CR or else Ch = Ada.Characters.Latin_1.LF or else Ch = Ada.Characters.Latin_1.FF; end Separator; procedure Next_Sig_Char (Prog_Text : in SPARK_IO.File_Type; Curr_Line : in out SparkLex.Line_Context) is Loc_Pos : E_Strings.Positions; Ch : Character; procedure Get_Next_Line (Prog_Text : in SPARK_IO.File_Type; Curr_Line : in out SparkLex.Line_Context) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives Curr_Line, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Curr_Line, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Prog_Text, --# SPARK_IO.File_Sys; --# post E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; is Pos : E_Strings.Positions; End_Pos : E_Strings.Lengths; Line : E_Strings.T; Line_No_Before_Get : Positive; procedure Inc_Line_Number (Curr_Line : in out SparkLex.Line_Context) --# derives Curr_Line from *; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts); is begin if Curr_Line.Line_No = LexTokenManager.Line_Numbers'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Too_Many_File_Lines, Msg => "in SparkLex.LineManager"); end if; Curr_Line.Line_No := Curr_Line.Line_No + 1; end Inc_Line_Number; begin -- Get_Next_Line Curr_Line.Conts := E_Strings.Get_Empty_String; loop --# assert E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last; if SPARK_IO.End_Of_Line (Prog_Text) then -- Skip empty line if SPARK_IO.End_Of_File (Prog_Text) then Curr_Line.Conts := E_Strings.Get_Empty_String; E_Strings.Append_Char (E_Str => Curr_Line.Conts, Ch => SparkLex.End_Of_Text); else SPARK_IO.Skip_Line (Prog_Text, 1); Inc_Line_Number (Curr_Line => Curr_Line); end if; else -- Attempt to read the line Line_No_Before_Get := SPARK_IO.Line (Prog_Text); E_Strings.Get_Line (File => Prog_Text, E_Str => Line); Inc_Line_Number (Curr_Line => Curr_Line); if E_Strings.Get_Length (E_Str => Line) = 0 then -- Examiner bug - OK but not acccepted -- Unable to read line, eight-bit character? ErrorHandler.Lex_Error (Error_Message => "Line contains illegal character(s)", Recovery_Message => "Ignored", Error_Item => LexTokenManager.Lex_Value'(Position => LexTokenManager.Token_Position'(Start_Line_No => Curr_Line.Line_No, Start_Pos => 2), Token_Str => LexTokenManager.Null_String)); SPARK_IO.Skip_Line (Prog_Text, 1); if Line_No_Before_Get = SPARK_IO.Line (Prog_Text) then Curr_Line.Conts := E_Strings.Get_Empty_String; E_Strings.Append_Char (E_Str => Curr_Line.Conts, Ch => SparkLex.End_Of_Text); end if; elsif E_Strings.Get_Length (E_Str => Line) < Natural'Last then -- got a line! Curr_Line.Conts := Line; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Line too long in SparkLex.LineManager"); end if; end if; -- Skip over leading separators. Pos := 1; End_Pos := E_Strings.Get_Length (E_Str => Curr_Line.Conts); loop exit when Pos >= End_Pos; --# assert Pos < End_Pos and End_Pos = E_Strings.Get_Length (Curr_Line.Conts) and --# E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last; exit when not Separator (Ch => E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Pos)); Pos := Pos + 1; end loop; exit when not Separator (Ch => E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Pos)); exit when End_Pos > 0 and then E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => End_Pos) = SparkLex.End_Of_Text; end loop; -- Context sensitive annotation continuation check if Curr_Line.Context = SparkLex.In_Annotation and then (End_Pos - Pos < 2 or else E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Pos) /= '-' or else E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Pos + 1) /= '-' or else E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Pos + 2) /= CommandLineData.Content.Anno_Char) then Pos := 1; Curr_Line.Context := SparkLex.In_Ada; end if; Curr_Line.Curr_Pos := Pos; Curr_Line.Lookahead_Pos := Pos; end Get_Next_Line; begin -- Next_Sig_Char Loc_Pos := Curr_Line.Curr_Pos; loop --# assert Loc_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last; if Loc_Pos > E_Strings.Get_Length (E_Str => Curr_Line.Conts) then Get_Next_Line (Prog_Text => Prog_Text, Curr_Line => Curr_Line); Loc_Pos := Curr_Line.Curr_Pos; end if; Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Loc_Pos); if Loc_Pos <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) and then Ch /= ' ' and then Ch /= Ada.Characters.Latin_1.HT and then Ch /= Ada.Characters.Latin_1.VT and then Ch /= Ada.Characters.Latin_1.LF and then Ch /= Ada.Characters.Latin_1.FF then exit; end if; if Loc_Pos <= E_Strings.Get_Length (E_Str => Curr_Line.Conts) then Loc_Pos := Loc_Pos + 1; -- Skip separator end if; end loop; Curr_Line.Last_Token_Pos := Loc_Pos; Curr_Line.Curr_Pos := Loc_Pos; Curr_Line.Lookahead_Pos := Loc_Pos; end Next_Sig_Char; procedure Set_Context (Curr_Line : in out SparkLex.Line_Context; New_Context : in SparkLex.Program_Context) is begin Curr_Line.Context := New_Context; end Set_Context; procedure Set_Anno_Context (Curr_Line : in out SparkLex.Line_Context; New_Context : in SparkLex.Annotation_Context) is begin Curr_Line.Anno_Context := New_Context; end Set_Anno_Context; end LineManager; spark-2012.0.deb/examiner/componentmanager.adb0000644000175000017500000005177711753202335020276 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with Debug; with E_Strings; with SPARK_IO; with Statistics; with SystemErrors; package body ComponentManager is HashDivider : constant Integer := HashMax + 1; ---------------------------------------------------------------------------- -- Local Operations ----------------------------------------------------------------------------- function Hash (Sym : Dictionary.Symbol) return HashIndex is begin return Natural (Dictionary.SymbolRef (Sym)) mod HashDivider; end Hash; ----------------------------------------------------------------------------- procedure LinkInNewComponent (Data : in out ComponentData; HeapSeq : in out Heap.HeapRecord; Sym : in Dictionary.Symbol; TheComponent : out Component) --# global in out Statistics.TableUsage; --# derives Data from *, --# HeapSeq, --# Sym & --# HeapSeq from * & --# Statistics.TableUsage from *, --# Data, --# HeapSeq & --# TheComponent from Data; is TheComponentLocal : Component; HashVal : HashIndex; procedure NewComponent (Data : in out ComponentData; HeapSeq : in out Heap.HeapRecord; TheComponent : out Component) --# global in out Statistics.TableUsage; --# derives Data, --# Statistics.TableUsage from *, --# Data, --# HeapSeq & --# HeapSeq from * & --# TheComponent from Data; is ErrSeq : SeqAlgebra.Seq; TheComponentLocal : Component; begin if Data.TheHeap.HighMark = MaxNumComponents then Statistics.SetTableUsage (Statistics.RecordFields, MaxNumComponents); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Component_Manager_Overflow, Msg => ""); --note that above call does not return end if; --get next array entry Data.TheHeap.HighMark := Data.TheHeap.HighMark + 1; TheComponentLocal := Data.TheHeap.HighMark; --create empty error sequence SeqAlgebra.CreateSeq (HeapSeq, ErrSeq); --initialize an entry Data.TheHeap.ListOfComponents (TheComponentLocal) := ComponentDescriptor' (Name => Dictionary.NullSymbol, ListOfErrors => ErrSeq, NextRoot => NullComponent, Hash => NullComponent, Parent => NullComponent, FirstChild => NullComponent, LastChild => NullComponent, NextSibling => NullComponent, PreviousSibling => NullComponent); TheComponent := TheComponentLocal; end NewComponent; begin --LinkInNewComponent --put empty record in next empty slot of heap NewComponent (Data, HeapSeq, --to get TheComponentLocal); --generate a hash index that will point to new entry HashVal := Hash (Sym); --if the hash table already points at something this maintains the link Data.TheHeap.ListOfComponents (TheComponentLocal).Hash := Data.TheTable (HashVal); --and this completes the link from the hash table to the new entry Data.TheTable (HashVal) := TheComponentLocal; TheComponent := TheComponentLocal; end LinkInNewComponent; ----------------------------------------------------------------------------- -- Exported Operations ----------------------------------------------------------------------------- function ComponentToRef (C : Component) return Natural is begin return Natural (C); end ComponentToRef; ----------------------------------------------------------------------------- function RefToComponent (N : Natural) return Component is begin return Component (N); end RefToComponent; ----------------------------------------------------------------------------- procedure Initialise (Data : out ComponentData) is begin --# accept F, 32, Data.TheHeap.ListOfComponents, "Initialization partial but effective" & --# F, 31, Data.TheHeap.ListOfComponents, "Initialization partial but effective" & --# F, 602, Data, Data.TheHeap.ListOfComponents, "Initialization partial but effective"; Data.TheTable := HashTable'(HashIndex => NullComponent); Data.TheHeap.HighMark := NullComponent; Data.TheHeap.FirstRoot := NullComponent; end Initialise; --782 expect 2 errors, 1 warning, initialization incomplete but effective ----------------------------------------------------------------------------- function GetComponentNode (Data : ComponentData; Sym : Dictionary.Symbol) return Component is CurrentComponent : Component; begin CurrentComponent := Data.TheTable (Hash (Sym)); if CurrentComponent /= NullComponent then -- At least one Component hashes from this symbol loop -- is this the one we want? exit when Data.TheHeap.ListOfComponents (CurrentComponent).Name = Sym; --no, try next in hash list CurrentComponent := Data.TheHeap.ListOfComponents (CurrentComponent).Hash; --any more to try? exit when CurrentComponent = NullComponent; end loop; end if; return CurrentComponent; end GetComponentNode; ----------------------------------------------------------------------------- procedure AddRoot (Data : in out ComponentData; HeapSeq : in out Heap.HeapRecord; RootSym : in Dictionary.Symbol) is NewRootComponent : Component; begin --this operation is idempotent NewRootComponent := GetComponentNode (Data, RootSym); if NewRootComponent = NullComponent then LinkInNewComponent (Data, HeapSeq, RootSym, --to get NewRootComponent); Data.TheHeap.ListOfComponents (NewRootComponent).Name := RootSym; Data.TheHeap.ListOfComponents (NewRootComponent).NextRoot := Data.TheHeap.FirstRoot; Data.TheHeap.FirstRoot := NewRootComponent; end if; end AddRoot; ----------------------------------------------------------------------------- procedure AddNextChild (Data : in out ComponentData; HeapSeq : in out Heap.HeapRecord; Node : in Component; ChildSym : in Dictionary.Symbol) is CurrentChildComponent, NewChildComponent, LastChild : Component; NoDuplicates : Boolean := True; begin if Node /= NullComponent then -- Valid node to which to add child, in all other cases we are -- attempting to add child to empty node, should not happen in normal use -- but may occur when walking expressions in situations such as defining -- types and constants where component data is not being collected but -- must still be there to make procedure call legal CurrentChildComponent := Data.TheHeap.ListOfComponents (Node).FirstChild; while CurrentChildComponent /= NullComponent loop if Data.TheHeap.ListOfComponents (CurrentChildComponent).Name = ChildSym then --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.Components then Debug.Print_Sym (Msg => "Name overload in ComponentManager rejected: ", Sym => ChildSym); end if; --# end accept; NoDuplicates := False; end if; CurrentChildComponent := Data.TheHeap.ListOfComponents (CurrentChildComponent).NextSibling; end loop; if NoDuplicates then LinkInNewComponent (Data, HeapSeq, ChildSym, --to get NewChildComponent); Data.TheHeap.ListOfComponents (NewChildComponent).Name := ChildSym; Data.TheHeap.ListOfComponents (NewChildComponent).Parent := Node; LastChild := Data.TheHeap.ListOfComponents (Node).LastChild; if LastChild = NullComponent then -- Adding first child Data.TheHeap.ListOfComponents (Node).FirstChild := NewChildComponent; else --at least one existing child Data.TheHeap.ListOfComponents (LastChild).NextSibling := NewChildComponent; Data.TheHeap.ListOfComponents (NewChildComponent).PreviousSibling := LastChild; end if; Data.TheHeap.ListOfComponents (Node).LastChild := NewChildComponent; end if; end if; end AddNextChild; ----------------------------------------------------------------------------- function HasChildren (Data : ComponentData; Node : Component) return Boolean is begin return Node /= NullComponent and then Data.TheHeap.ListOfComponents (Node).FirstChild /= NullComponent; end HasChildren; ----------------------------------------------------------------------------- function IsNullComponent (Node : Component) return Boolean is begin return Node = NullComponent; end IsNullComponent; ----------------------------------------------------------------------------- function IsALeaf (Data : ComponentData; Node : Component) return Boolean is begin return not HasChildren (Data, Node); end IsALeaf; ----------------------------------------------------------------------------- function IsARoot (Data : ComponentData; Node : Component) return Boolean is begin return Node /= NullComponent and then Data.TheHeap.ListOfComponents (Node).Parent = NullComponent; end IsARoot; ----------------------------------------------------------------------------- function IsTransitiveParent (Data : ComponentData; Parent : Component; Node : Component) return Boolean is CurrentComponent : Component; Result : Boolean := False; begin CurrentComponent := Node; loop exit when CurrentComponent = NullComponent; if CurrentComponent = Parent then Result := True; exit; end if; CurrentComponent := Data.TheHeap.ListOfComponents (CurrentComponent).Parent; end loop; return Result; end IsTransitiveParent; ----------------------------------------------------------------------------- function GetRoot (Data : ComponentData; Node : Component) return Component is CurrentComponent : Component; begin CurrentComponent := Node; loop exit when Data.TheHeap.ListOfComponents (CurrentComponent).Parent = NullComponent; CurrentComponent := Data.TheHeap.ListOfComponents (CurrentComponent).Parent; end loop; return CurrentComponent; end GetRoot; ----------------------------------------------------------------------------- function GetParent (Data : ComponentData; Node : Component) return Component is begin return Data.TheHeap.ListOfComponents (Node).Parent; end GetParent; ----------------------------------------------------------------------------- function GetFirstChild (Data : ComponentData; Node : Component) return Component is begin return Data.TheHeap.ListOfComponents (Node).FirstChild; end GetFirstChild; ----------------------------------------------------------------------------- function GetNextSibling (Data : ComponentData; Node : Component) return Component is begin return Data.TheHeap.ListOfComponents (Node).NextSibling; end GetNextSibling; ----------------------------------------------------------------------------- function GetPreviousSibling (Data : ComponentData; Node : Component) return Component is begin return Data.TheHeap.ListOfComponents (Node).PreviousSibling; end GetPreviousSibling; ----------------------------------------------------------------------------- function GetName (Data : ComponentData; Node : Component) return Dictionary.Symbol is begin return Data.TheHeap.ListOfComponents (Node).Name; end GetName; ----------------------------------------------------------------------------- procedure GetLeaves (HeapSeq : in out Heap.HeapRecord; Data : in ComponentData; Node : in Component; SeqOfLeafNames : out SeqAlgebra.Seq) is LocalSeq : SeqAlgebra.Seq; CurrentNode, NextNode : Component; begin SeqAlgebra.CreateSeq (HeapSeq, LocalSeq); CurrentNode := Data.TheHeap.ListOfComponents (Node).FirstChild; if CurrentNode /= NullComponent then loop -- down loop if Data.TheHeap.ListOfComponents (CurrentNode).FirstChild /= NullComponent then NextNode := Data.TheHeap.ListOfComponents (CurrentNode).FirstChild; else -- Leaf found -- Add name to list SeqAlgebra.AddMember (HeapSeq, LocalSeq, Natural (Dictionary.SymbolRef (Data.TheHeap.ListOfComponents (CurrentNode).Name))); --now see if there is a sibling NextNode := Data.TheHeap.ListOfComponents (CurrentNode).NextSibling; end if; if NextNode = NullComponent then NextNode := CurrentNode; loop -- up loop NextNode := Data.TheHeap.ListOfComponents (NextNode).Parent; exit when NextNode = Node; --back to top if Data.TheHeap.ListOfComponents (NextNode).NextSibling /= NullComponent then NextNode := Data.TheHeap.ListOfComponents (NextNode).NextSibling; exit; end if; end loop; end if; exit when NextNode = Node; --entire tree processed CurrentNode := NextNode; end loop; end if; SeqOfLeafNames := LocalSeq; end GetLeaves; ----------------------------------------------------------------------------- procedure AddError (HeapSeq : in out Heap.HeapRecord; TheErrorHeap : in ComponentErrors.HeapOfErrors; Data : in ComponentData; Node : in Component; NewError : in Natural) is ListOfNodesAssociatedWithError : SeqAlgebra.Seq; begin ListOfNodesAssociatedWithError := ComponentErrors.AssociatedComponentNodesOfError (TheErrorHeap, NewError); SeqAlgebra.AddMember (HeapSeq, ListOfNodesAssociatedWithError, Natural (Node)); SeqAlgebra.AddMember (HeapSeq, Data.TheHeap.ListOfComponents (Node).ListOfErrors, NewError); end AddError; ----------------------------------------------------------------------------- function GetListOfErrors (Data : ComponentData; Node : Component) return SeqAlgebra.Seq is begin return Data.TheHeap.ListOfComponents (Node).ListOfErrors; end GetListOfErrors; ----------------------------------------------------------------------------- procedure AddNewListOfErrors (HeapSeq : in out Heap.HeapRecord; Data : in out ComponentData; Node : in Component; NewErrorList : in SeqAlgebra.Seq) is begin SeqAlgebra.DisposeOfSeq (HeapSeq, Data.TheHeap.ListOfComponents (Node).ListOfErrors); Data.TheHeap.ListOfComponents (Node).ListOfErrors := NewErrorList; end AddNewListOfErrors; ----------------------------------------------------------------------------- procedure EmptyListOfErrors (HeapSeq : in out Heap.HeapRecord; Data : in out ComponentData; Node : in Component) is NewErrSeq : SeqAlgebra.Seq; begin SeqAlgebra.CreateSeq (HeapSeq, NewErrSeq); Data.TheHeap.ListOfComponents (Node).ListOfErrors := NewErrSeq; end EmptyListOfErrors; -- New function for use by MergeAndHandleErrors function GetFirstRoot (Data : ComponentData) return Component is begin return Data.TheHeap.FirstRoot; end GetFirstRoot; -- New function for use by MergeAndHandleErrors function GetNextRoot (Data : ComponentData; RootNode : Component) return Component is begin return Data.TheHeap.ListOfComponents (RootNode).NextRoot; end GetNextRoot; procedure ReportUsage (Data : in ComponentData) is begin Statistics.SetTableUsage (Statistics.RecordFields, Integer (Data.TheHeap.HighMark)); end ReportUsage; ----------------------------------------------------------------------------- procedure Dump_Component_Tree (Data : in ComponentData; Node : in Component; Indentation : in Natural) is --# hide Dump_Component_Tree; Current_Child : Component; procedure Print_Sym (Sym : in Dictionary.Symbol) is begin if Dictionary.Is_Null_Symbol (Sym) then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Null Symbol", 0); else E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GetAnyPrefixNeeded (Sym => Sym, Scope => Dictionary.GlobalScope, Separator => ".")); SPARK_IO.Put_Char (SPARK_IO.Standard_Output, '.'); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Dictionary.GenerateSimpleName (Item => Sym, Separator => ".")); end if; end Print_Sym; begin if CommandLineData.Content.Debug.Components and Node /= NullComponent then -- Print this node for I in Natural range 1 .. Indentation - 1 loop SPARK_IO.Put_String (SPARK_IO.Standard_Output, "| ", 0); end loop; if Indentation >= 1 then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "+--", 0); end if; Print_Sym (Sym => GetName (Data, Node)); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " (", 0); SPARK_IO.Put_Integer (SPARK_IO.Standard_Output, ComponentToRef (Node), 0, 10); SPARK_IO.Put_String (SPARK_IO.Standard_Output, ")", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); -- Find all children Current_Child := GetFirstChild (Data, Node); while Current_Child /= NullComponent loop Dump_Component_Tree (Data => Data, Node => Current_Child, Indentation => Indentation + 1); Current_Child := GetNextSibling (Data, Current_Child); end loop; end if; end Dump_Component_Tree; procedure Dump_All_Component_Trees (Data : in ComponentData) is --# hide Dump_All_Component_Trees; Current_Root : Component; begin if CommandLineData.Content.Debug.Components then Current_Root := GetFirstRoot (Data); while Current_Root /= NullComponent loop Dump_Component_Tree (Data => Data, Node => Current_Root, Indentation => 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "----------------------------", 0); Current_Root := GetNextRoot (Data, Current_Root); end loop; end if; end Dump_All_Component_Trees; end ComponentManager; spark-2012.0.deb/examiner/flowanalyser-flowanalyse-analyserelations-checkdependencies.adb0000644000175000017500000003742211753202336030756 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (FlowAnalyser.FlowAnalyse.AnalyseRelations) procedure CheckDependencies is IntersectionSeq, DependencyCol, RhoCol, RhoRow : SeqAlgebra.Seq; MemberOfInitVars, MemberOfExports : SeqAlgebra.MemberOfSeq; InitVarRep, ExpVarRep : Natural; InitVar, ExportVar : Dictionary.Symbol; ExportLeaves : SeqAlgebra.Seq; MemberOfExportLeaves : SeqAlgebra.MemberOfSeq; ExportLeafRep : Natural; LeafRhoCol : SeqAlgebra.Seq; NewError : Natural; MemberOfDependencyCol : SeqAlgebra.MemberOfSeq; DepRep : Natural; DepSym : Dictionary.Symbol; DepLeaves : SeqAlgebra.Seq; MemberOfRhoCol : SeqAlgebra.MemberOfSeq; RhoSym : Dictionary.Symbol; RhoRep : Natural; RootRhoRep : Natural; PreservedVars : SeqAlgebra.Seq; PreservedExportLeaves : SeqAlgebra.Seq; function IsNonLeafRecord (Sym : Dictionary.Symbol) return Boolean --# global in ComponentData; --# in Dictionary.Dict; is begin return Dictionary.Is_Variable (Sym) and then Dictionary.TypeIsRecord (Dictionary.GetType (Sym)) and then ComponentManager.HasChildren (ComponentData, ComponentManager.GetComponentNode (ComponentData, Sym)); end IsNonLeafRecord; ------------------------ function EmptyIntersection (A, B : SeqAlgebra.Seq) return Boolean --# global in TheHeap; is Empty : Boolean := True; M, N : SeqAlgebra.MemberOfSeq; ValueOfM, ValueOfN : Natural; begin M := SeqAlgebra.FirstMember (TheHeap, A); N := SeqAlgebra.FirstMember (TheHeap, B); loop exit when SeqAlgebra.IsNullMember (M) or SeqAlgebra.IsNullMember (N); ValueOfM := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => M); ValueOfN := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => N); if ValueOfM < ValueOfN then M := SeqAlgebra.NextMember (TheHeap, M); elsif ValueOfM > ValueOfN then N := SeqAlgebra.NextMember (TheHeap, N); else --equal values, intersection is not empty Empty := False; end if; exit when not Empty; end loop; return Empty; end EmptyIntersection; ------------------------ begin --CheckDependencies -- check effectiveness of initializations of package own variables; MemberOfInitVars := SeqAlgebra.FirstMember (TheHeap, SeqOfInitVars); while not SeqAlgebra.IsNullMember (MemberOfInitVars) loop InitVarRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfInitVars); InitVar := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (InitVarRep)); RelationAlgebra.RowExtraction (TheHeap, IFA_Stack.Top (S).Rho, InitVarRep, RhoRow); SeqAlgebra.Intersection (TheHeap, RhoRow, ExpSeqOfExports, IntersectionSeq); if SeqAlgebra.IsEmptySeq (TheHeap, IntersectionSeq) then if Dictionary.IsSubcomponent (InitVar) then ComponentErrors.CreateError (TheErrorHeap, TheHeap, ComponentErrors.Dependency, ErrorHandler.Dependency_Err_Type'Pos (ErrorHandler.Ineff_Init), EndPosition, Dictionary.NullSymbol, --to get NewError); ComponentManager.AddError (TheHeap, TheErrorHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, InitVar), NewError); else ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Ineff_Init, Position => EndPosition, Import_Var_Sym => InitVar, Export_Var_Sym => Dictionary.NullSymbol, Scope => Scope); end if; end if; SeqAlgebra.DisposeOfSeq (TheHeap, RhoRow); SeqAlgebra.DisposeOfSeq (TheHeap, IntersectionSeq); MemberOfInitVars := SeqAlgebra.NextMember (TheHeap, MemberOfInitVars); end loop; --calculate preserved vars for use in implicit record self-dependency SeqAlgebra.Complement (TheHeap, IFA_Stack.Top (S).AllVars, IFA_Stack.Top (S).UnPreservedVars, --to get PreservedVars); MemberOfExports := SeqAlgebra.FirstMember (TheHeap, SeqOfExports); while not SeqAlgebra.IsNullMember (MemberOfExports) loop ExpVarRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfExports); ExportVar := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (ExpVarRep)); -- Do not process any dependency stuff associated with implicit exports of mode in if Dictionary.GetOwnVariableOrConstituentMode (ExportVar) /= Dictionary.InMode and then -- nor anything to do with exporting of the "data sink" null variable not Dictionary.Is_Null_Variable (ExportVar) and then -- nor updates of implicit in streams associated with protected state not Dictionary.IsImplicitInStream (ExportVar) then RelationAlgebra.ColExtraction (TheHeap, DependencyRelation, SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfExports), --to get DependencyCol); if IsNonLeafRecord (ExportVar) then -- 9.3.2 (3) of S.P0466.53.9 SeqAlgebra.CreateSeq (TheHeap, RhoCol); ComponentManager.GetLeaves (TheHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, ExportVar), --to get ExportLeaves); MemberOfExportLeaves := SeqAlgebra.FirstMember (TheHeap, ExportLeaves); while not SeqAlgebra.IsNullMember (MemberOfExportLeaves) loop ExportLeafRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfExportLeaves); RelationAlgebra.ColExtraction (TheHeap, IFA_Stack.Top (S).Rho, ExportLeafRep, --to get LeafRhoCol); SeqAlgebra.AugmentSeq (TheHeap, RhoCol, --with LeafRhoCol); SeqAlgebra.DisposeOfSeq (TheHeap, LeafRhoCol); MemberOfExportLeaves := SeqAlgebra.NextMember (TheHeap, MemberOfExportLeaves); end loop; --added this to handle implicit dependency of record on itself SeqAlgebra.Intersection (TheHeap, PreservedVars, ExportLeaves, --to get PreservedExportLeaves); SeqAlgebra.AugmentSeq (TheHeap, RhoCol, --with PreservedExportLeaves); else RelationAlgebra.ColExtraction (TheHeap, IFA_Stack.Top (S).Rho, ExpVarRep, --to get RhoCol); end if; MemberOfDependencyCol := SeqAlgebra.FirstMember (TheHeap, DependencyCol); -- If the spec says "derives Y from X" then there should be an entry in -- the Rho relation for this dependency. If not then flow error 50 is raised. -- This check is not performed in data-flow analysis mode, or if flow=auto -- and there was no derives annotation, because in those cases the 'specified' -- dependency will be a synthesised 'all exports from all imports'. -- -- It *is* possible to have a package symbol here, hence the guards. -- Don't check calculated rho against specified rho if there was no derives -- annotation, unless the subprogram is a function, in which case we can check -- it, even in data-flow mode. while ((Dictionary.IsFunction (SubprogSym) or else Dictionary.IsProcedure (SubprogSym) or else Dictionary.IsTaskType (SubprogSym)) and then Dictionary.GetHasDerivesAnnotation (SubprogSym)) and then not SeqAlgebra.IsNullMember (MemberOfDependencyCol) loop DepRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfDependencyCol); DepSym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (DepRep)); if IsNonLeafRecord (DepSym) then ComponentManager.GetLeaves (TheHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, DepSym), --to get DepLeaves); if EmptyIntersection (DepLeaves, RhoCol) then ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Not_Used, Position => EndPosition, Import_Var_Sym => DepSym, Export_Var_Sym => ExportVar, Scope => Scope); end if; else if not SeqAlgebra.IsMember (TheHeap, RhoCol, DepRep) then ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Not_Used, Position => EndPosition, Import_Var_Sym => DepSym, Export_Var_Sym => ExportVar, Scope => Scope); end if; end if; MemberOfDependencyCol := SeqAlgebra.NextMember (TheHeap, MemberOfDependencyCol); end loop; -- start of 9.3.3 of S.P0468.53.9 SeqAlgebra.Reduction (TheHeap, RhoCol, SeqOfInitVars); MemberOfRhoCol := SeqAlgebra.FirstMember (TheHeap, RhoCol); while not SeqAlgebra.IsNullMember (MemberOfRhoCol) loop RhoRep := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => MemberOfRhoCol); RhoSym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (RhoRep)); --add guard to prevent dependencies on OUT streams being reported if Dictionary.GetOwnVariableOrConstituentMode (Dictionary.GetMostEnclosingObject (RhoSym)) /= Dictionary.OutMode then if Dictionary.IsSubcomponent (RhoSym) then RootRhoRep := Natural (Dictionary.SymbolRef (ComponentManager.GetName (ComponentData, ComponentManager.GetRoot (ComponentData, ComponentManager.GetComponentNode (ComponentData, RhoSym))))); if not SeqAlgebra.IsMember (TheHeap, DependencyCol, RootRhoRep) then if SeqAlgebra.IsMember (TheHeap, SeqOfImports, RootRhoRep) then ComponentErrors.CreateError (TheErrorHeap, TheHeap, ComponentErrors.Dependency, ErrorHandler.Dependency_Err_Type'Pos (ErrorHandler.May_Be_Used), EndPosition, ExportVar, --to get NewError); else ComponentErrors.CreateError (TheErrorHeap, TheHeap, ComponentErrors.Dependency, ErrorHandler.Dependency_Err_Type'Pos (ErrorHandler.Uninitialised), EndPosition, ExportVar, --to get NewError); end if; ComponentManager.AddError (TheHeap, TheErrorHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, RhoSym), NewError); end if; else --not a record or an entire record if not SeqAlgebra.IsMember (TheHeap, DependencyCol, RhoRep) then if SeqAlgebra.IsMember (TheHeap, SeqOfImports, RhoRep) then if Dictionary.RelationViolatesInfoFlowPolicy (ExportVar, RhoSym) then ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.May_Be_Integrity_Violation, Position => EndPosition, Import_Var_Sym => RhoSym, Export_Var_Sym => ExportVar, Scope => Scope); else ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.May_Be_Used, Position => EndPosition, Import_Var_Sym => RhoSym, Export_Var_Sym => ExportVar, Scope => Scope); end if; else ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Uninitialised, Position => EndPosition, Import_Var_Sym => RhoSym, Export_Var_Sym => ExportVar, Scope => Scope); end if; end if; end if; end if; MemberOfRhoCol := SeqAlgebra.NextMember (TheHeap, MemberOfRhoCol); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, DependencyCol); SeqAlgebra.DisposeOfSeq (TheHeap, RhoCol); end if; -- ignoring exports of mode in and exports that are null variables (data sinks) MemberOfExports := SeqAlgebra.NextMember (TheHeap, MemberOfExports); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, PreservedVars); end CheckDependencies; spark-2012.0.deb/examiner/sem-compunit-wf_subprogram_body-processpartitionannotation.adb0000644000175000017500000002432211753202336030735 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.Wf_Subprogram_Body) procedure ProcessPartitionAnnotation (Main_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is Global_Node, Derives_Node : STree.SyntaxNode; Anno_Error : Boolean; function Find_Main_Program_Anno_Node (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.main_program_declaration; --# return Result_Node => Syntax_Node_Type (Result_Node, STree.Table) = SP_Symbols.main_program_annotation; is Result : STree.SyntaxNode; begin Result := Child_Node (Current_Node => Node); -- ASSUME Result = inherit_clause OR main_program_annotation if Syntax_Node_Type (Node => Result) = SP_Symbols.inherit_clause then -- ASSUME Result = inherit_clause Result := Next_Sibling (Current_Node => Result); elsif Syntax_Node_Type (Node => Result) /= SP_Symbols.main_program_annotation then Result := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = inherit_clause OR main_program_annotation in Find_Main_Program_Anno_Node"); end if; -- ASSUME Result = main_program_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Result) = SP_Symbols.main_program_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Node = main_program_annotation in Find_Main_Program_Anno_Node"); return Result; end Find_Main_Program_Anno_Node; function Find_Global_Node (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.main_program_declaration; --# return Result_Node => (Syntax_Node_Type (Result_Node, STree.Table) = SP_Symbols.moded_global_definition or --# Result_Node = STree.NullNode); is Result : STree.SyntaxNode; begin Result := Next_Sibling (Current_Node => Find_Main_Program_Anno_Node (Node => Node)); -- ASSUME Result = moded_global_definition OR not_overriding_subprogram_body if Syntax_Node_Type (Node => Result) = SP_Symbols.not_overriding_subprogram_body then -- ASSUME Result = not_overriding_subprogram_body Result := STree.NullNode; elsif Syntax_Node_Type (Node => Result) /= SP_Symbols.moded_global_definition then Result := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = moded_global_definition OR not_overriding_subprogram_body in Find_Global_Node"); end if; -- ASSUME Result = moded_global_definition OR NULL SystemErrors.RT_Assert (C => Result = STree.NullNode or else Syntax_Node_Type (Node => Result) = SP_Symbols.moded_global_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = moded_global_definition OR NULL in Find_Global_Node"); return Result; end Find_Global_Node; function Find_Derives_Node (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.main_program_declaration; --# return Result_Node => (Syntax_Node_Type (Result_Node, STree.Table) = SP_Symbols.dependency_relation or --# Result_Node = STree.NullNode); is Result : STree.SyntaxNode; begin Result := Find_Global_Node (Node => Node); -- ASSUME Result = moded_global_definition OR NULL if Syntax_Node_Type (Node => Result) = SP_Symbols.moded_global_definition then -- ASSUME Result = moded_global_definition Result := Next_Sibling (Current_Node => Result); -- ASSUME Result = dependency_relation OR not_overriding_subprogram_body if Syntax_Node_Type (Node => Result) = SP_Symbols.not_overriding_subprogram_body then -- ASSUME Result = not_overriding_subprogram_body Result := STree.NullNode; elsif Syntax_Node_Type (Node => Result) /= SP_Symbols.dependency_relation then Result := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = dependency_relation OR not_overriding_subprogram_body in Find_Derives_Node"); end if; elsif Result /= STree.NullNode then Result := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = moded_global_definition OR NULL in Find_Derives_Node"); end if; -- ASSUME Result = dependency_relation OR NULL SystemErrors.RT_Assert (C => Result = STree.NullNode or else Syntax_Node_Type (Node => Result) = SP_Symbols.dependency_relation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = dependency_relation OR NULL in Find_Derives_Node"); return Result; end Find_Derives_Node; begin -- ProcessPartitionAnnotation Global_Node := Find_Global_Node (Node => Main_Node); -- A partition annotation exists if the Global_Node is not null. -- There must be a partition annotation in Ravenscar and there -- must not be one otherwise if not CommandLineData.Ravenscar_Selected then if Syntax_Node_Type (Node => Global_Node) = SP_Symbols.moded_global_definition then -- unexpected partition annotation ErrorHandler.Semantic_Error (Err_Num => 949, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Global_Node), Id_Str => LexTokenManager.Null_String); end if; else -- Ravenscar IS selected if Global_Node = STree.NullNode then -- missing partition annotation Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Dictionary.GetThePartition); ErrorHandler.Semantic_Error (Err_Num => 950, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Find_Main_Program_Anno_Node (Node => Main_Node)), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Global_Node) = SP_Symbols.moded_global_definition then -- partition annotation both present and required, so process it -- first the globals Wf_Global_Definition (Node => Global_Node, Scope => Scope, Subprog_Sym => Dictionary.GetThePartition, First_Seen => True, Sem_Err_Found => Anno_Error); if Anno_Error then Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Dictionary.GetThePartition); end if; Derives_Node := Find_Derives_Node (Node => Main_Node); -- ASSUME Derives_Node = dependency_relation OR NULL -- now check whether derives is there if Syntax_Node_Type (Node => Derives_Node) = SP_Symbols.dependency_relation then -- ASSUME Derives_Node = dependency_relation if CommandLineData.Content.Flow_Option /= CommandLineData.Data_Flow then -- derives present and required if flow=info (or allowed, if flow=auto) Dependency_Relation.Wf_Dependency_Relation (Node => Derives_Node, Scope => Scope, Subprog_Sym => Dictionary.GetThePartition, First_Seen => True, Glob_Def_Err => Anno_Error, The_Heap => TheHeap); else -- in DFA mode, we ignore the derives and use the moded globals Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Position (Node => Derives_Node), Subprog_Sym => Dictionary.GetThePartition, Abstraction => Dictionary.IsAbstract, The_Heap => TheHeap); ErrorHandler.Semantic_Note (Err_Num => 1, Position => Node_Position (Node => Derives_Node), Id_Str => LexTokenManager.Null_String); end if; elsif Derives_Node = STree.NullNode then -- ASSUME Derives_Node = NULL -- Derives is NOT present if CommandLineData.Content.Flow_Option = CommandLineData.Info_Flow then -- but in IFA mode it should have been ErrorHandler.Semantic_Error (Err_Num => 501, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Global_Node), Id_Str => LexTokenManager.Null_String); else -- not there but ok because flow=data or flow=auto selected Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Position (Node => Global_Node), Subprog_Sym => Dictionary.GetThePartition, Abstraction => Dictionary.IsAbstract, The_Heap => TheHeap); end if; end if; end if; end if; end ProcessPartitionAnnotation; ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrootspark-2012.0.deb/examiner/declarations-outputdeclarations-generatedeclarations-generatesuccessors.adbspark-2012.0.deb/examiner/declarations-outputdeclarations-generatedeclarations-generatesuccessors.ad0000644000175000017500000005066511753202336033260 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Declarations.OutputDeclarations.GenerateDeclarations) procedure GenerateSuccessors (Heap : in out Cells.Heap_Record; Symbol : in Dictionary.Symbol; Scope : in Dictionary.Scopes; SuccessorList : out Cells.Cell) is SuccList : Cells.Cell; procedure AddSymbol (Sym : in Dictionary.Symbol; List : in out Cells.Cell) --# global in Dictionary.Dict; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# List, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# List, --# Sym; is begin -- Boolean is predefined in FDL, so never need to add it here. -- Also, don't add Universal types, since we're only interested in -- adding successors which are named types at this stage. -- Add everything else to make sure the Pile contains all -- needed declarations. Pile.Insert makes sure there are no -- duplicate entries. if not Dictionary.TypeIsBoolean (Sym) and then not Dictionary.IsUniversalIntegerType (Sym) and then not Dictionary.IsUniversalRealType (Sym) and then not Dictionary.IsUniversalFixedType (Sym) then Pile.Insert (Heap, Sym, Cells.Null_Cell, List); end if; end AddSymbol; ---------------------------------------------------------------------- procedure HandleObject (Sym : in Dictionary.Symbol; List : in out Cells.Cell) --# global in Dictionary.Dict; --# in Scope; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# List, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# List, --# Scope, --# Sym; is It : Dictionary.Iterator; TypeSym : Dictionary.Symbol; begin -- If the Sym is an own variable, local to this package -- which has refinement constituents we need to get the types of the -- constituents in the list of required symbols so that we can print an -- fdl record type declaration if IsLocalOwnVariableWithRefinement (Sym, Scope) then It := Dictionary.FirstConstituent (Sym); while not Dictionary.IsNullIterator (It) loop AddSymbol (Dictionary.GetRootType (Dictionary.GetType (Dictionary.CurrentSymbol (It))), List); It := Dictionary.NextSymbol (It); end loop; end if; TypeSym := Dictionary.GetRootType (Dictionary.GetType (Sym)); if Dictionary.IsOwnVariable (Sym) then TypeSym := Dictionary.GetRootType (Dictionary.GetOwnVariableTypeHere (Sym, Scope)); end if; AddSymbol (TypeSym, List); end HandleObject; ---------------------------------------------------------------------- procedure HandleType (Sym : in Dictionary.Symbol; List : in out Cells.Cell) --# global in AttributeList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# List, --# Statistics.TableUsage from *, --# AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# List, --# Scope, --# Sym; is Typ : Dictionary.Symbol; ---------------------------------- -- Creates a prexix cell, of provided type. procedure CreatePrefixCell (Typ : in Dictionary.Symbol; PrefixCell : out Cells.Cell) --# global in out Heap; --# in out Statistics.TableUsage; --# derives Heap from *, --# Typ & --# PrefixCell from Heap & --# Statistics.TableUsage from *, --# Heap; is LocalPrefixCell : Cells.Cell; begin Cells.Create_Cell (Heap, LocalPrefixCell); Cells.Set_Kind (Heap, LocalPrefixCell, Cell_Storage.Fixed_Var); Cells.Set_Symbol_Value (Heap, LocalPrefixCell, Typ); PrefixCell := LocalPrefixCell; end CreatePrefixCell; ---------------------------------- -- Creates an attribute cell, of provided token. procedure CreateAttributeCell (AttributeToken : in LexTokenManager.Lex_String; AttributeCell : out Cells.Cell) --# global in out Heap; --# in out Statistics.TableUsage; --# derives AttributeCell from Heap & --# Heap from *, --# AttributeToken & --# Statistics.TableUsage from *, --# Heap; is LocalAttributeCell : Cells.Cell; begin Cells.Create_Cell (Heap, LocalAttributeCell); Cells.Set_Kind (Heap, LocalAttributeCell, Cell_Storage.Attrib_Value); Cells.Set_Lex_Str (Heap, LocalAttributeCell, AttributeToken); AttributeCell := LocalAttributeCell; end CreateAttributeCell; ---------------------------------- -- Creates a tick cell. procedure CreateTickCell (TickCell : out Cells.Cell) --# global in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap & --# TickCell from Heap; is LocalTickCell : Cells.Cell; begin Cells.Create_Cell (Heap, LocalTickCell); Cells.Set_Kind (Heap, LocalTickCell, Cell_Storage.Op); Cells.Set_Op_Symbol (Heap, LocalTickCell, SP_Symbols.apostrophe); TickCell := LocalTickCell; end CreateTickCell; ---------------------------------- -- This procedure re-factored and made visible here, so can be -- called from HandleRecordType below. procedure AddSizeAttribute (Typ : in Dictionary.Symbol) --# global in AttributeList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Typ; is PrefixCell, AttributeCell, TickCell : Cells.Cell; begin -- Generate and add cell for: 'size CreatePrefixCell (Typ, PrefixCell); CreateAttributeCell (LexTokenManager.Size_Token, AttributeCell); CreateTickCell (TickCell); Cells.Set_A_Ptr (Heap, TickCell, PrefixCell); Cells.Set_B_Ptr (Heap, TickCell, AttributeCell); AddAttribute (Heap, TickCell); end AddSizeAttribute; ---------------------------------- -- Adds the type and the root type of each field of the given record type procedure HandleRecordType (Typ : in Dictionary.Symbol; List : in out Cells.Cell) --# global in Dictionary.Dict; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# List, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# List, --# Typ; is ComponentIt : Dictionary.Iterator; begin ComponentIt := Dictionary.FirstRecordComponent (Typ); while not Dictionary.IsNullIterator (ComponentIt) loop AddSymbol (Dictionary.GetRootType (Dictionary.GetType (Dictionary.CurrentSymbol (ComponentIt))), List); AddSymbol (Dictionary.GetType (Dictionary.CurrentSymbol (ComponentIt)), List); ComponentIt := Dictionary.NextSymbol (ComponentIt); end loop; end HandleRecordType; ---------------------------------- -- Adds the type and the root type of each index of the given array type, followed -- by the type and the root type of the array element. procedure HandleArrayType (Typ : in Dictionary.Symbol; List : in out Cells.Cell) --# global in Dictionary.Dict; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# List, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# List, --# Typ; is IndexIt : Dictionary.Iterator; begin IndexIt := Dictionary.FirstArrayIndex (Typ); while not Dictionary.IsNullIterator (IndexIt) loop AddSymbol (Dictionary.GetRootType (Dictionary.CurrentSymbol (IndexIt)), List); AddSymbol (Dictionary.CurrentSymbol (IndexIt), List); IndexIt := Dictionary.NextSymbol (IndexIt); end loop; AddSymbol (Dictionary.GetRootType (Dictionary.GetArrayComponent (Typ)), List); AddSymbol (Dictionary.GetArrayComponent (Typ), List); end HandleArrayType; ---------------------------------- -- The following adds Typ'First and Typ'Last of the given scalar type -- into the AttributeList so that we get fdl constant declarations -- and rules for them later on (even if they don't appear in the VC file -- itself. For Modular types, Typ'Modulus is also added. For base types, -- Typ'Base'First and Typ'Base'Last are added. procedure AddScalarTypeToGetBoundsAttributes (Typ : in Dictionary.Symbol) --# global in AttributeList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Typ; is PrefixCell, BasePrefixCell, AttributeCell, TickCell : Cells.Cell; -- Creates a base prexix cell, of provided type. -- Base types are indicated with a prefix cell of the correct type that -- has a A part that points to a prefix cell of the same type. Here -- the base prexix cell, is calculated from and makes use of the provided -- prefix cell. procedure CreateBasePrefixCell (PrefixCell : in Cells.Cell; BasePrefixCell : out Cells.Cell) --# global in out Heap; --# in out Statistics.TableUsage; --# derives BasePrefixCell from Heap & --# Heap from *, --# PrefixCell & --# Statistics.TableUsage from *, --# Heap; is Typ : Dictionary.Symbol; LocalBasePrefixCell : Cells.Cell; begin -- Get the type of the prefix cell. Typ := Cells.Get_Symbol_Value (Heap, PrefixCell); Cells.Create_Cell (Heap, LocalBasePrefixCell); Cells.Set_Kind (Heap, LocalBasePrefixCell, Cell_Storage.Op); Cells.Set_Symbol_Value (Heap, LocalBasePrefixCell, Typ); -- Join to prefix cell. Cells.Set_A_Ptr (Heap, LocalBasePrefixCell, PrefixCell); BasePrefixCell := LocalBasePrefixCell; end CreateBasePrefixCell; begin -- All of the attributes added are for the same type, and thus may -- share the same prefix cell. CreatePrefixCell (Typ, PrefixCell); -- Generate and add cell for: 'first CreateAttributeCell (LexTokenManager.First_Token, AttributeCell); CreateTickCell (TickCell); Cells.Set_A_Ptr (Heap, TickCell, PrefixCell); Cells.Set_B_Ptr (Heap, TickCell, AttributeCell); AddAttribute (Heap, TickCell); -- Generate and add cell for: 'last CreateAttributeCell (LexTokenManager.Last_Token, AttributeCell); CreateTickCell (TickCell); Cells.Set_A_Ptr (Heap, TickCell, PrefixCell); Cells.Set_B_Ptr (Heap, TickCell, AttributeCell); AddAttribute (Heap, TickCell); -- As all of the base attributes are of the same type, they may -- share the same base prefix cell. CreateBasePrefixCell (PrefixCell, BasePrefixCell); -- Generate and add cell for: 'base'first CreateAttributeCell (LexTokenManager.First_Token, AttributeCell); CreateTickCell (TickCell); Cells.Set_A_Ptr (Heap, TickCell, BasePrefixCell); Cells.Set_B_Ptr (Heap, TickCell, AttributeCell); AddAttribute (Heap, TickCell); -- Generate and add cell for: 'base'first CreateAttributeCell (LexTokenManager.Last_Token, AttributeCell); CreateTickCell (TickCell); Cells.Set_A_Ptr (Heap, TickCell, BasePrefixCell); Cells.Set_B_Ptr (Heap, TickCell, AttributeCell); AddAttribute (Heap, TickCell); -- If this is a modular type, also add 'modulus if Dictionary.TypeIsModular (Typ) then -- Generate and add cell for: 'modulus CreateAttributeCell (LexTokenManager.Modulus_Token, AttributeCell); CreateTickCell (TickCell); Cells.Set_A_Ptr (Heap, TickCell, PrefixCell); Cells.Set_B_Ptr (Heap, TickCell, AttributeCell); AddAttribute (Heap, TickCell); end if; end AddScalarTypeToGetBoundsAttributes; begin -- HandleType; if Dictionary.IsPrivateType (Sym, Scope) then -- For a private type Sym, we normally produce no successors, other -- than Sym'Size and the case of a private tagged extension, where we need -- to produce a declaration of the type of the inherited component -- only. AddSizeAttribute (Sym); if Dictionary.TypeIsExtendedTagged (Sym) then AddSymbol (Dictionary.GetRootOfExtendedType (Sym), List); end if; elsif not Dictionary.IsUnknownTypeMark (Sym) and then not Dictionary.TypeIsOwnAbstractHere (Sym, Scope) then -- if not abstract view of own variable type, -- then produce successors Typ := Dictionary.GetRootType (Sym); if Dictionary.IsScalarTypeMark (Typ, Scope) then AddScalarTypeToGetBoundsAttributes (Typ); AddSymbol (Typ, List); AddSizeAttribute (Typ); -- If Sym denotes a scalar subtype, then we need attributes -- for it as well. if Sym /= Typ then AddScalarTypeToGetBoundsAttributes (Sym); AddSymbol (Sym, List); AddSizeAttribute (Sym); end if; elsif Dictionary.TypeIsAbstractProof (Typ) then AddSymbol (Typ, List); elsif Dictionary.IsRecordTypeMark (Typ, Scope) then HandleRecordType (Typ, List); AddSizeAttribute (Typ); -- If Sym denotes a subtype, then add a Size attribute for -- it as well. if Typ /= Sym then AddSizeAttribute (Sym); end if; elsif Dictionary.IsArrayTypeMark (Typ, Scope) then HandleArrayType (Typ, List); -- If Sym denotes a subtype, then add a Size attribute for -- it as well. if Typ /= Sym then AddSizeAttribute (Sym); end if; end if; end if; end HandleType; ---------------------------------------------------------------------- procedure HandleSubprogram (Sym : in Dictionary.Symbol; List : in out Cells.Cell) --# global in Dictionary.Dict; --# in Scope; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# List, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# List, --# Scope, --# Sym; is ParameterIt : Dictionary.Iterator; ParamSym : Dictionary.Symbol; begin if Dictionary.IsFunction (Sym) then AddSymbol (Dictionary.GetRootType (Dictionary.GetType (Sym)), List); end if; ParameterIt := Dictionary.FirstSubprogramParameter (Sym); while not Dictionary.IsNullIterator (ParameterIt) loop ParamSym := Dictionary.CurrentSymbol (ParameterIt); if Dictionary.Is_Variable (ParamSym) and then Dictionary.IsOwnVariable (ParamSym) then AddSymbol (Dictionary.GetRootType (Dictionary.GetOwnVariableTypeHere (ParamSym, Scope)), List); else AddSymbol (Dictionary.GetRootType (Dictionary.GetType (ParamSym)), List); end if; ParameterIt := Dictionary.NextSymbol (ParameterIt); end loop; end HandleSubprogram; ---------------------------------------------------------------------- procedure HandleRecordComponent (Sym : in Dictionary.Symbol; List : in out Cells.Cell) --# global in Dictionary.Dict; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# List, --# Statistics.TableUsage from *, --# Dictionary.Dict, --# Heap, --# List, --# Sym; is begin AddSymbol (Dictionary.GetRecordType (Sym), List); end HandleRecordComponent; begin -- GenerateSuccessors; SuccList := Cells.Null_Cell; if Dictionary.Is_Variable (Symbol) or else Dictionary.Is_Constant (Symbol) or else Dictionary.IsEnumerationLiteral (Symbol) or else Dictionary.IsKnownDiscriminant (Symbol) then HandleObject (Symbol, SuccList); elsif Dictionary.IsTypeMark (Symbol) then HandleType (Symbol, SuccList); elsif Dictionary.IsProcedure (Symbol) or Dictionary.IsFunction (Symbol) then HandleSubprogram (Symbol, SuccList); elsif Dictionary.IsRecordComponent (Symbol) then HandleRecordComponent (Symbol, SuccList); end if; SuccessorList := SuccList; end GenerateSuccessors; spark-2012.0.deb/examiner/stmtstack.ads0000644000175000017500000000636711753202336016773 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- StmtStack -- -- Purpose: -- StmtStack is an Abstract State Machine that provides a simple -- stack of entries. Each entry contains a pair of values: -- -- StmtNumber - the number of a BPG statement node, with -- ZERO indicating a null entry, and a positive -- number interpreted as a value of type Graph.MatrixIndex -- -- Kind - the class of statement, as defined by ArcKind below. -- -- Clients: -- The StmtStack is used by the (iterative) traversal algorithm in -- DAG.BuildGraph to keep track of the nesting of statement forms in the -- syntax tree as the BPG is produced. -- -- Use: -- See DAG.BuildGraph -- -- Extension: -- None planned at present. -------------------------------------------------------------------------------- with ExaminerConstants; --# inherit ExaminerConstants, --# SystemErrors; package StmtStack --# own S : StmtStacks; --# initializes S; is type ArcKind is ( Elementary, IfStart, IfTrueBranch, IfFalseBranch, CaseStart, CaseBranch, CaseExit, LoopStart, LoopExit); type StmtRecord is record StmtNmbr : Natural; Kind : ArcKind; end record; subtype StmtRange is Integer range 0 .. ExaminerConstants.StmtStackSize; type StmtVector is array (StmtRange) of StmtRecord; type StmtStacks is record Vector : StmtVector; Pointer : StmtRange; end record; function IsEmpty return Boolean; --# global in S; function Top return StmtRecord; --# global in S; procedure Clear; --# global out S; --# derives S from ; -- Pop, but fatal error if underflow procedure Pop; --# global in out S; --# derives S from *; -- Push, but fatal error if overflow procedure Push (R : in StmtRecord); --# global in out S; --# derives S from *, --# R; -- Print the current state of the stack to Standard_Output via Debug package, -- preceding by the given Msg procedure Dump_Stack (Msg : in String); --# global in S; --# derives null from Msg, --# S; end StmtStack; spark-2012.0.deb/examiner/mainloop.ads0000644000175000017500000001625411753202336016570 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --# inherit Casing, --# CommandLineData, --# ConfigFile, --# ContextManager, --# ContextManager.Ops, --# Declarations, --# Dictionary, --# ErrorHandler, --# Error_Types, --# E_Strings, --# FileSystem, --# File_Utils, --# Graph, --# IndexManager, --# LexTokenLists, --# LexTokenManager, --# MetaFile, --# RequiredUnits, --# ScreenEcho, --# Sem, --# SLI, --# SparkHTML, --# SparkLex, --# SPARK_IO, --# SPParser, --# Statistics, --# StmtStack, --# STree, --# VCG, --# XMLReport; package MainLoop is procedure Process_Files; --# global in out CommandLineData.Content; --# in out ConfigFile.State; --# in out ContextManager.Ops.File_Heap; --# in out ContextManager.Ops.Unit_Heap; --# in out ContextManager.Ops.Unit_Stack; --# in out Declarations.State; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out IndexManager.State; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SparkHTML.Generate_HTML; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# in out STree.Table; --# in out VCG.Invoked; --# out Dictionary.Dict; --# out Sem.State; --# out SparkHTML.HTML_Work_Dir; --# out SparkHTML.SPARK_Work_Dir; --# out SparkLex.Curr_Line; --# out XMLReport.State; --# derives CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Declarations.State, --# ErrorHandler.Error_Context, --# Graph.Table, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# Statistics.TableUsage, --# StmtStack.S, --# STree.Table, --# VCG.Invoked from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# STree.Table & --# ConfigFile.State from *, --# CommandLineData.Content, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# Dictionary.Dict, --# SparkHTML.Generate_HTML from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# STree.Table & --# Sem.State, --# SparkLex.Curr_Line, --# XMLReport.State from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# SPARK_IO.File_Sys, --# STree.Table & --# SparkHTML.HTML_Work_Dir, --# SparkHTML.SPARK_Work_Dir from CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# ErrorHandler.Error_Context, --# IndexManager.State, --# LexTokenManager.State, --# SLI.State, --# SparkHTML.Generate_HTML, --# Statistics.TableUsage, --# STree.Table, --# VCG.Invoked; end MainLoop; spark-2012.0.deb/examiner/sem-check_no_overloading_from_tagged_ops-successfully_overrides.adb0000644000175000017500000002236311753202336031676 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Check_No_Overloading_From_Tagged_Ops) function Successfully_Overrides (Root_Subprog, Second_Subprog, Actual_Tagged_Parameter_Type : Dictionary.Symbol) return Boolean is function Subtype_Bounds_Statically_Match (First_Subtype, Second_Subtype : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; -- pre Dictionary.GetType (First_Subtype) = Dictionary.GetType (Second_Subtype); is Result : Boolean; function Scalar_Bounds_Match (Src_Sym, Tgt_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean; begin Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Src_Sym), Lex_Str2 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Tgt_Sym)) = LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Src_Sym), Lex_Str2 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Tgt_Sym)) = LexTokenManager.Str_Eq; return Result; end Scalar_Bounds_Match; begin -- Subtype_Bounds_Statically_Match if Dictionary.TypeIsScalar (First_Subtype) then Result := Scalar_Bounds_Match (Src_Sym => First_Subtype, Tgt_Sym => Second_Subtype); elsif Dictionary.TypeIsArray (First_Subtype) then Result := Indexes_Match (Target => First_Subtype, Source => Second_Subtype); elsif Dictionary.TypeIsRecord (First_Subtype) then Result := True; elsif Dictionary.TypeIsPrivate (TheType => First_Subtype) then Result := True; else Result := False; -- unexpected case, above should trap everything end if; return Result; end Subtype_Bounds_Statically_Match; function Same_Type (First_Subtype, Second_Subtype : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is FirstType, SecondType : Dictionary.Symbol; begin if Dictionary.IsType (First_Subtype) then FirstType := First_Subtype; else -- compare parent types FirstType := Dictionary.GetType (First_Subtype); end if; if Dictionary.IsType (Second_Subtype) then SecondType := Second_Subtype; else -- compare parent types SecondType := Dictionary.GetType (Second_Subtype); end if; return Dictionary.Types_Are_Equal (Left_Symbol => FirstType, Right_Symbol => SecondType, Full_Range_Subtype => False); end Same_Type; function Both_Procedures (Root_Subprog, Second_Subprog : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsProcedure (Root_Subprog) and then Dictionary.IsProcedure (Second_Subprog); end Both_Procedures; function Both_Functions (Root_Subprog, Second_Subprog : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean; begin Result := Dictionary.IsFunction (Root_Subprog) and then Dictionary.IsFunction (Second_Subprog) and then Same_Type (First_Subtype => Dictionary.GetType (Root_Subprog), Second_Subtype => Dictionary.GetType (Second_Subprog)) and then Subtype_Bounds_Statically_Match (First_Subtype => Dictionary.GetType (Root_Subprog), Second_Subtype => Dictionary.GetType (Second_Subprog)); return Result; end Both_Functions; function Have_Same_Number_Of_Parameters (Root_Subprog, Second_Subprog : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.GetNumberOfSubprogramParameters (Root_Subprog) = Dictionary.GetNumberOfSubprogramParameters (Second_Subprog); end Have_Same_Number_Of_Parameters; function Parameter_Types_Ok (Root_Param, Second_Param, Actual_Tagged_Parameter_Type : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Root_Param_Type, Second_Param_Type : Dictionary.Symbol; Inheritance_In_Force, Result : Boolean; begin Root_Param_Type := Dictionary.GetType (Root_Param); Second_Param_Type := Dictionary.GetType (Second_Param); Inheritance_In_Force := Dictionary.Types_Are_Equal (Left_Symbol => Second_Param_Type, Right_Symbol => Actual_Tagged_Parameter_Type, Full_Range_Subtype => False); Result := (Inheritance_In_Force and then Dictionary.IsAnExtensionOf (Root_Param_Type, Second_Param_Type)) or else (not Inheritance_In_Force and then Same_Type (First_Subtype => Root_Param_Type, Second_Subtype => Second_Param_Type) and then Subtype_Bounds_Statically_Match (First_Subtype => Root_Param_Type, Second_Subtype => Second_Param_Type)); return Result; end Parameter_Types_Ok; function Modes_Match (Root_Param, Second_Param : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Root_Mode, Second_Mode : Dictionary.Modes; begin Root_Mode := Dictionary.GetSubprogramParameterMode (Root_Param); Second_Mode := Dictionary.GetSubprogramParameterMode (Second_Param); return Root_Mode = Second_Mode or else (Root_Mode = Dictionary.InMode and then Second_Mode = Dictionary.DefaultMode) or else (Second_Mode = Dictionary.InMode and then Root_Mode = Dictionary.DefaultMode); end Modes_Match; function Valid_Type_Symbol (The_Subtype : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return not Dictionary.Is_Null_Symbol (The_Subtype) and then not Dictionary.IsUnknownTypeMark (The_Subtype); end Valid_Type_Symbol; function Parameters_Match (Root_Subprog, Second_Subprog, Actual_Tagged_Parameter_Type : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean := True; Root_Param, Second_Param : Dictionary.Symbol; Number_Of_Subprogram_Parameters : Natural; begin Number_Of_Subprogram_Parameters := Dictionary.GetNumberOfSubprogramParameters (Root_Subprog); for I in Natural range 1 .. Number_Of_Subprogram_Parameters loop Root_Param := Dictionary.GetSubprogramParameter (Root_Subprog, I); Second_Param := Dictionary.GetSubprogramParameter (Second_Subprog, I); if not (Valid_Type_Symbol (The_Subtype => Dictionary.GetType (Root_Param)) and then Valid_Type_Symbol (The_Subtype => Dictionary.GetType (Second_Param)) and then Parameter_Types_Ok (Root_Param => Root_Param, Second_Param => Second_Param, Actual_Tagged_Parameter_Type => Actual_Tagged_Parameter_Type) and then Modes_Match (Root_Param => Root_Param, Second_Param => Second_Param)) then Result := False; exit; end if; end loop; return Result; end Parameters_Match; begin -- Successfully_Overrides return (Both_Procedures (Root_Subprog => Root_Subprog, Second_Subprog => Second_Subprog) or else Both_Functions (Root_Subprog => Root_Subprog, Second_Subprog => Second_Subprog)) and then Have_Same_Number_Of_Parameters (Root_Subprog => Root_Subprog, Second_Subprog => Second_Subprog) and then Parameters_Match (Root_Subprog => Root_Subprog, Second_Subprog => Second_Subprog, Actual_Tagged_Parameter_Type => Actual_Tagged_Parameter_Type); end Successfully_Overrides; spark-2012.0.deb/examiner/sem-wf_context_clause_package_body.adb0000644000175000017500000002040211753202336023705 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) procedure Wf_Context_Clause_Package_Body (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; With_Public_Child : out Boolean) is Next_Node : STree.SyntaxNode; Has_Own_Public_Child : Boolean; ----------------------------- procedure With_Clause (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; With_Public_Child : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table, --# With_Public_Child from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.with_clause; --# post STree.Table = STree.Table~; is separate; procedure Use_Clause (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.use_type_clause; --# post STree.Table = STree.Table~; is separate; begin -- Wf_Context_Clause Next_Node := Child_Node (Current_Node => Node); -- ASSUME Next_Node = with_clause SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.with_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = with_clause in Wf_Context_Clause"); With_Clause (Node => Next_Node, Comp_Sym => Comp_Sym, Scope => Scope, With_Public_Child => With_Public_Child); Next_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME Next_Node = context_clause_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.context_clause_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = context_clause_rep in Wf_Context_Clause"); if Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) = SP_Symbols.context_clause_rep then -- ASSUME Child_Node (Current_Node => Next_Node) = context_clause_rep -- there is one or more further with clause or usetype clause to process Next_Node := Next_Sibling (Current_Node => Last_Child_Of (Start_Node => Next_Node)); loop --# assert STree.Table = STree.Table~; -- ASSUME Next_Node = with_clause OR use_type_clause OR pragma_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.with_clause or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.use_type_clause or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.pragma_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = with_clause OR use_type_clause OR pragma_rep in Wf_Context_Clause"); exit when Syntax_Node_Type (Node => Next_Node) = SP_Symbols.pragma_rep; -- ASSUME Next_Node = with_clause OR use_type_clause if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.with_clause then -- ASSUME Next_Node = with_clause With_Clause (Node => Next_Node, Comp_Sym => Comp_Sym, Scope => Scope, With_Public_Child => Has_Own_Public_Child); With_Public_Child := With_Public_Child or else Has_Own_Public_Child; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.use_type_clause then -- ASSUME Next_Node = use_type_clause Use_Clause (Node => Next_Node, Comp_Sym => Comp_Sym, Scope => Scope); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = with_clause OR use_type_clause in Wf_Context_Clause"); end if; Next_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Next_Node)); end loop; elsif Child_Node (Current_Node => Next_Node) /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Next_Node) = context_clause_rep OR NULL in Wf_Context_Clause"); end if; end Wf_Context_Clause_Package_Body; spark-2012.0.deb/examiner/cells.ads0000644000175000017500000003357011753202335016053 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Cells -- -- Purpose: -- -- This package provides an ADT for a Heap of Cells. -- -- The heap is used to build the primary linked -- data structures used by the VCG, including DAGs -- representing FDL expressions, predicate/action pairs, -- stacks of cells, and so on. -- -- Clients: -- -- The Examiner's VC Generator components use this package to build -- higher-level ADTs (CStacks, CLists, Pile, Pairs, Labels, Graph, -- Structures). The DAGs representing FDL are built by the DAG -- package and its subunits, particular DAG.BuildGraph, DAG.BuildExpnDAG, -- and DAG.BuildAnnotationExpnDAG. -- -- The Graph package sits at the top of this tree, and represents the -- Basic-Path Graph of a subprogram during VC Generation. -- -- Use: -- -- A client should first declare and Initalize a Cells.Heap_Record object. -- -- Cells are then allocated using Create_Cell. Attributes can be set using -- the various Set_* procedures below. -- -- Extension: -- None planned. -------------------------------------------------------------------------------- with Cell_Storage; with Dictionary; with LexTokenManager; with SP_Symbols; use type Cell_Storage.Cell; use type Cell_Storage.Cell_Kind; --# inherit Cell_Storage, --# Dictionary, --# ExaminerConstants, --# LexTokenManager, --# SP_Symbols, --# Statistics, --# SystemErrors; package Cells is -- See package Cell_Storage for description -- of these types subtype Cell_Kind is Cell_Storage.Cell_Kind; subtype Cell is Cell_Storage.Cell; Null_Cell : constant Cell := Cell'First; subtype Cell_Rank is Cell_Storage.Cell_Rank; Unknown_Rank : constant Cell_Rank := Cell_Rank'First; -- A collection from which Cells may be allocated type Heap_Record is private; --================================================================= -- Constructor procedure for Heap_Record --================================================================= procedure Initialize (Heap : out Heap_Record); --# derives Heap from ; --================================================================= -- Accessor functions that yield useful properties of a Heap_Record -- and Cells -- -- Note - these are declared here, so may be used later in -- pre- and post-conditions --================================================================= function Are_Identical (Cell_1, Cell_2 : Cell) return Boolean; --# return Cell_1 = Cell_2; -- returns the value of the Kind attribute for CellName function Get_Kind (Heap : Heap_Record; CellName : Cell) return Cell_Kind; -- Returns the value of the Free attribute for CellName function Is_Free (Heap : Heap_Record; CellName : Cell) return Boolean; function Is_Null_Cell (CellName : Cell) return Boolean; --# return CellName = Null_Cell; function Is_Const_Cell (Heap : Heap_Record; CellName : Cell) return Boolean; --# return Get_Kind (Heap, CellName) = Cell_Storage.Manifest_Const or --# Get_Kind (Heap, CellName) = Cell_Storage.Named_Const; function Is_Reference_Cell (Heap : Heap_Record; CellName : Cell) return Boolean; --# return Get_Kind (Heap, CellName) = Cell_Storage.Reference; -- Returns Cell_Name as a numeric value. Can be used -- to print the value of a Cell object e.g. for debugging. function Cell_Ref (Cell_Name : in Cell) return Natural; --================================================================= -- Constructor and Accessor functions for each Cell attribute --================================================================= -- Get_Kind is declared above procedure Set_Kind (Heap : in out Heap_Record; CellName : in Cell; KindConst : in Cell_Kind); --# derives Heap from *, --# CellName, --# KindConst; --# post Get_Kind (Heap, CellName) = KindConst; -------------------------------------------------------------- -- Each Cell has three attributes that may be used to -- refer to other Cells. These are called A_Ptr, B_Ptr -- and C_Ptr respectively. function Get_A_Ptr (Heap : Heap_Record; CellName : Cell) return Cell; function Get_B_Ptr (Heap : Heap_Record; CellName : Cell) return Cell; function Get_C_Ptr (Heap : Heap_Record; CellName : Cell) return Cell; procedure Set_A_Ptr (Heap : in out Heap_Record; Cell_1, Cell_2 : in Cell); --# derives Heap from *, --# Cell_1, --# Cell_2; --# post Get_A_Ptr (Heap, Cell_1) = Cell_2; procedure Set_B_Ptr (Heap : in out Heap_Record; Cell_1, Cell_2 : in Cell); --# derives Heap from *, --# Cell_1, --# Cell_2; --# post Get_B_Ptr (Heap, Cell_1) = Cell_2; procedure Set_C_Ptr (Heap : in out Heap_Record; Cell_1, Cell_2 : in Cell); --# derives Heap from *, --# Cell_1, --# Cell_2; --# post Get_C_Ptr (Heap, Cell_1) = Cell_2; -------------------------------------------------------------- -- The "Value" attribute of a Cell can either be set to -- a Natural or a Dictionary.Symbol type, using the -- procedures below. It is entirely up to the caller -- to remember which type has been set (and therefore which -- type to subsequently access) depending on the context. function Get_Natural_Value (Heap : Heap_Record; CellName : Cell) return Natural; procedure Set_Natural_Value (Heap : in out Heap_Record; CellName : in Cell; Value : in Natural); --# derives Heap from *, --# CellName, --# Value; --# post Get_Natural_Value (Heap, CellName) = Value; function Get_Symbol_Value (Heap : Heap_Record; CellName : Cell) return Dictionary.Symbol; procedure Set_Symbol_Value (Heap : in out Heap_Record; CellName : in Cell; Sym : in Dictionary.Symbol); --# derives Heap from *, --# CellName, --# Sym; --# post Get_Symbol_Value (Heap, CellName) = Sym; -------------------------------------------------------------- function Get_Rank (Heap : Heap_Record; CellName : Cell) return Cell_Rank; procedure Set_Rank (Heap : in out Heap_Record; CellName : in Cell; Rank : in Cell_Rank); --# derives Heap from *, --# CellName, --# Rank; --# post Get_Rank (Heap, CellName) = Rank; -------------------------------------------------------------- function Get_Copy (Heap : Heap_Record; CellName : Cell) return Cell; -- There is no Set_Copy operation for the Copy attribute. -- This is instead set by the Copy_Contents and -- Create_Copy operations below. -------------------------------------------------------------- function Get_Op_Symbol (Heap : Heap_Record; CellName : Cell) return SP_Symbols.SP_Symbol; procedure Set_Op_Symbol (Heap : in out Heap_Record; CellName : in Cell; Sym : in SP_Symbols.SP_Symbol); --# derives Heap from *, --# CellName, --# Sym; --# post Get_Op_Symbol (Heap, CellName) = Sym; -------------------------------------------------------------- function Get_Lex_Str (Heap : Heap_Record; CellName : Cell) return LexTokenManager.Lex_String; procedure Set_Lex_Str (Heap : in out Heap_Record; CellName : in Cell; Str : in LexTokenManager.Lex_String); --# derives Heap from *, --# CellName, --# Str; --# post Get_Lex_Str (Heap, CellName) = Str; -------------------------------------------------------------- function Get_Assoc_Var (Heap : in Heap_Record; CellName : in Cell) return Dictionary.Symbol; procedure Set_Assoc_Var (Heap : in out Heap_Record; CellName : in Cell; VarSym : in Dictionary.Symbol); --# derives Heap from *, --# CellName, --# VarSym; --# post Get_Assoc_Var (Heap, CellName) = VarSym; -------------------------------------------------------------- -- Returns the value of the Boolean Marked attribute for CellName function Is_Marked (Heap : Heap_Record; CellName : Cell) return Boolean; procedure Mark_Cell (Heap : in out Heap_Record; CellName : in Cell); --# derives Heap from *, --# CellName; --# post Is_Marked (Heap, CellName); procedure UnMark_Cell (Heap : in out Heap_Record; CellName : in Cell); --# derives Heap from *, --# CellName; --# post not Is_Marked (Heap, CellName); --================================================================= -- Methods --================================================================= -------------------------------------------------------------- -- Allocates a new Cell on the Heap. -------------------------------------------------------------- procedure Create_Cell (Heap : in out Heap_Record; CellName : out Cell); --# global in out Statistics.TableUsage; --# derives CellName, --# Heap from Heap & --# Statistics.TableUsage from *, --# Heap; --# post CellName /= Null_Cell; -- if normal termination of this procedure -------------------------------------------------------------- -- Copies the following attributes of Source to Destination -- A_Ptr -- B_Ptr -- Kind -- Rank -- Lex_Str -- Op_Symbol -- Val -- Assoc_Var -- Other attributes in Destination are unmodified -------------------------------------------------------------- procedure Copy_Contents (Heap : in out Heap_Record; Source, Destination : in Cell); --# derives Heap from *, --# Destination, --# Source; -------------------------------------------------------------- -- Allocates a new Cell using Create_Cell, and copies -- the following attributes of CellName to it: -- Kind -- Rank -- Lex_Str -- Op_Symbol -- Val -- Assoc_Var -- -- The "Copy" attribute of CellName is then set to refer -- to the newly created Cell. -------------------------------------------------------------- procedure Create_Copy (Heap : in out Heap_Record; CellName : in Cell); --# global in out Statistics.TableUsage; --# derives Heap from *, --# CellName & --# Statistics.TableUsage from *, --# Heap; -------------------------------------------------------------- -- Returns CellName to the Heap's free list -------------------------------------------------------------- procedure Dispose_Of_Cell (Heap : in out Heap_Record; CellName : in Cell); --# derives Heap from *, --# CellName; -------------------------------------------------------------- -- Sets state of Statistics package to record -- the current high-water mark of TheHeap -------------------------------------------------------------- procedure Report_Usage (TheHeap : in Heap_Record); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage from *, --# TheHeap; --================================================================= -- Debugging --================================================================= -------------------------------------------------------------- -- Returns the current size of the vector. -------------------------------------------------------------- function Get_Heap_Size (Heap : in Heap_Record) return Cell_Storage.Cell; private subtype Cell_Content is Cell_Storage.Cell_Content; type Heap_Record is record High_Mark : Cell; Next_Free_Cell : Cell; List_Of_Cells : Cell_Storage.Vector; end record; end Cells; spark-2012.0.deb/examiner/dictionary-attribute_is_visible.adb0000644000175000017500000007160711753202336023312 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; separate (Dictionary) function Attribute_Is_Visible (Name : LexTokenManager.Lex_String; Prefix : PrefixSort; Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return Boolean is Result : Boolean; -------------------------------------------------------------------------------- function Type_Attribute_Is_Visible (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; is Result : Boolean; -------------------------------------------------------------------------------- function Type_Attribute_Is_Visible_83 (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Aft_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Delta_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Fore_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Fixed_Point (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq then Result := True; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Digits_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Epsilon_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Emin_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Radix_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Emax_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Floating_Point (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then if Type_Is_Scalar (Type_Mark => Type_Mark) then Result := True; elsif Type_Is_Array (Type_Mark => Type_Mark) then Result := RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); else Result := Type_Mark = Get_Unknown_Type_Mark; end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Large_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Overflows_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Rounds_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Large_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Small_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Real (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq then if Type_Is_Array (Type_Mark => Type_Mark) then Result := RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); else Result := Type_Mark = Get_Unknown_Type_Mark; end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq then if Type_Is_Discrete (Type_Mark => Type_Mark) then Result := not Type_Is_Boolean (Type_Mark => Type_Mark); else Result := Type_Mark = Get_Unknown_Type_Mark; end if; else Result := False; end if; return Result; end Type_Attribute_Is_Visible_83; -------------------------------------------------------------------------------- function Type_Attribute_Is_Visible_95 (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Aft_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Delta_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Fore_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Fixed_Point (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq then Result := True; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Digits_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Emin_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Denorm_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Emin_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Epsilon_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Model_Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Signed_Zeros_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Emax_Token) = LexTokenManager.Str_Eq or else -- Obsolete but implementation-defined in SPARK95 LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Emax_Token) = LexTokenManager.Str_Eq or else -- Ditto LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Epsilon_Token) = LexTokenManager.Str_Eq then -- Ditto Result := Type_Is_Floating_Point (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then if Type_Is_Scalar (Type_Mark => Type_Mark) then Result := True; elsif Type_Is_Array (Type_Mark => Type_Mark) then Result := RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); else Result := Type_Mark = Get_Unknown_Type_Mark; end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Overflows_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Rounds_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Machine_Radix_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Large_Token) = LexTokenManager.Str_Eq or else -- Obsolete but implementation-defined in SPARK95 LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Mantissa_Token) = LexTokenManager.Str_Eq or else -- Ditto LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Large_Token) = LexTokenManager.Str_Eq or else -- Ditto LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Safe_Small_Token) = LexTokenManager.Str_Eq or else -- Ditto LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Small_Token) = LexTokenManager.Str_Eq then -- Obsolete but implementation-defined -- for floating-point types in SPARK95. -- OK for fixed-point types. Result := Type_Is_Real (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq then if Type_Is_Array (Type_Mark => Type_Mark) then Result := RawDict.Get_Type_Constrained (Type_Mark => Type_Mark); else Result := Type_Mark = Get_Unknown_Type_Mark; end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq then if Type_Is_Discrete (Type_Mark) then Result := not Type_Is_Boolean (Type_Mark => Type_Mark); else Result := Type_Mark = Get_Unknown_Type_Mark; end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Component_Size_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Array (Type_Mark => Type_Mark); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq then Result := (Type_Is_Scalar (Type_Mark => Type_Mark) and then not Type_Is_Boolean (Type_Mark => Type_Mark)) or else Type_Mark = Get_Unknown_Type_Mark; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Modulus_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Modular (Type_Mark => Type_Mark) or else Type_Mark = Get_Unknown_Type_Mark; else Result := False; end if; return Result; end Type_Attribute_Is_Visible_95; -------------------------------------------------------------------------------- function Type_Attribute_Is_Visible_2005 (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Modular (Type_Mark => Type_Mark); else Result := False; end if; return Result; end Type_Attribute_Is_Visible_2005; begin -- Type_Attribute_Is_Visible case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Result := Type_Attribute_Is_Visible_83 (Name => Name, Type_Mark => Type_Mark); when CommandLineData.SPARK95 => Result := Type_Attribute_Is_Visible_95 (Name => Name, Type_Mark => Type_Mark); when CommandLineData.SPARK2005_Profiles => Result := Type_Attribute_Is_Visible_95 (Name => Name, Type_Mark => Type_Mark) or else Type_Attribute_Is_Visible_2005 (Name => Name, Type_Mark => Type_Mark); end case; return Result; end Type_Attribute_Is_Visible; ------------------------------------------------------------------------------- function Base_Attribute_Is_Visible (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; is Result : Boolean; begin if Type_Attribute_Is_Visible (Name => Name, Type_Mark => Type_Mark) then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq then Result := not Type_Is_Array (Type_Mark => Type_Mark); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq then Result := False; else Result := True; end if; else Result := False; end if; return Result; end Base_Attribute_Is_Visible; -------------------------------------------------------------------------------- function Object_Attribute_Is_Visible_83 (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq then Result := True; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Array (Type_Mark => Type_Mark); else Result := False; end if; return Result; end Object_Attribute_Is_Visible_83; -------------------------------------------------------------------------------- function Object_Attribute_Is_Visible_95 (Name : LexTokenManager.Lex_String; Type_Mark : RawDict.Type_Info_Ref) return Boolean --# global in CommandLineData.Content; --# in Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq then Result := True; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Component_Size_Token) = LexTokenManager.Str_Eq then Result := Type_Is_Array (Type_Mark => Type_Mark); -- Support for 'Valid in SPARK95 elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Valid_Token) = LexTokenManager.Str_Eq then -- LRM 13.9.2 (2) says 'Valid is only allowed for a prefix that -- denotes a scalar type. Result := Type_Is_Scalar (Type_Mark => Type_Mark); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Name, Lex_Str2 => LexTokenManager.Access_Token) = LexTokenManager.Str_Eq then Result := RawDict.Get_Type_Discriminant (Type_Mark => Type_Mark) = Protected_Type_Item and then CommandLineData.Ravenscar_Selected; else Result := False; end if; return Result; end Object_Attribute_Is_Visible_95; begin -- Attribute_Is_Visible if Type_Is_Private_Here (Type_Mark => Type_Mark, Scope => Scope) then Result := False; else case Prefix is when AType => Result := Type_Attribute_Is_Visible (Name => Name, Type_Mark => Type_Mark); when ABaseType => Result := Base_Attribute_Is_Visible (Name => Name, Type_Mark => Type_Mark); when AnObject => case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Result := Object_Attribute_Is_Visible_83 (Name => Name, Type_Mark => Type_Mark); when CommandLineData.SPARK95_Onwards => -- Object attributes in SPARK2005/KCG are currently the same -- as those of SPARK95, so... Result := Object_Attribute_Is_Visible_95 (Name => Name, Type_Mark => Type_Mark); end case; end case; end if; return Result; end Attribute_Is_Visible; spark-2012.0.deb/examiner/dictionary-is_renamed_local.adb0000644000175000017500000001431211753202336022345 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) function Is_Renamed_Local (The_Subprogram : RawDict.Subprogram_Info_Ref; Scope : Scopes) return Boolean is Current_Scope : Scopes; Region : Symbol; Found : Boolean; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Scope (The_Subprogram : RawDict.Subprogram_Info_Ref; Scope : Scopes) return Boolean --# global in Dict; is Region : Symbol; Found : Boolean; -------------------------------------------------------------------------------- function Search_Renaming_Declarations (The_Subprogram : RawDict.Subprogram_Info_Ref; Declarations : RawDict.Declaration_Info_Ref) return Boolean --# global in Dict; is Current_Declaration : RawDict.Declaration_Info_Ref; begin Current_Declaration := Declarations; while Current_Declaration /= RawDict.Null_Declaration_Info_Ref and then (RawDict.GetSymbolDiscriminant (RawDict.Get_Declaration_Item (The_Declaration => Current_Declaration)) /= Subprogram_Symbol or else RawDict.Get_Subprogram_Info_Ref (Item => RawDict.Get_Declaration_Item (The_Declaration => Current_Declaration)) /= The_Subprogram) loop Current_Declaration := RawDict.Get_Next_Declaration (The_Declaration => Current_Declaration); end loop; return Current_Declaration /= RawDict.Null_Declaration_Info_Ref; end Search_Renaming_Declarations; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Package_Specification (The_Subprogram : RawDict.Subprogram_Info_Ref; The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is begin return Search_Renaming_Declarations (The_Subprogram => The_Subprogram, Declarations => RawDict.Get_Package_Visible_Renaming_Declarations (The_Package => The_Package)); end Is_Renamed_In_This_Package_Specification; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Package_Body (The_Subprogram : RawDict.Subprogram_Info_Ref; The_Package : RawDict.Package_Info_Ref) return Boolean --# global in Dict; is begin return Search_Renaming_Declarations (The_Subprogram => The_Subprogram, Declarations => RawDict.Get_Package_Local_Renaming_Declarations (The_Package => The_Package)); end Is_Renamed_In_This_Package_Body; -------------------------------------------------------------------------------- function Is_Renamed_In_This_Subprogram (Subprogram : RawDict.Subprogram_Info_Ref; The_Subprogram : RawDict.Subprogram_Info_Ref) return Boolean --# global in Dict; is begin return Search_Renaming_Declarations (The_Subprogram => Subprogram, Declarations => RawDict.Get_Subprogram_Renaming_Declarations (The_Subprogram => The_Subprogram)); end Is_Renamed_In_This_Subprogram; begin -- Is_Renamed_In_This_Scope Region := GetRegion (Scope); case RawDict.GetSymbolDiscriminant (Region) is when Package_Symbol => case Get_Visibility (Scope => Scope) is when Visible | Privat => Found := Is_Renamed_In_This_Package_Specification (The_Subprogram => The_Subprogram, The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); when Local => Found := Is_Renamed_In_This_Package_Body (The_Subprogram => The_Subprogram, The_Package => RawDict.Get_Package_Info_Ref (Item => Region)) or else Is_Renamed_In_This_Package_Specification (The_Subprogram => The_Subprogram, The_Package => RawDict.Get_Package_Info_Ref (Item => Region)); end case; when Subprogram_Symbol => Found := Is_Renamed_In_This_Subprogram (Subprogram => The_Subprogram, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region)); when others => Found := False; end case; return Found; end Is_Renamed_In_This_Scope; begin -- Is_Renamed_Local Current_Scope := Scope; loop Found := Is_Renamed_In_This_Scope (The_Subprogram => The_Subprogram, Scope => Current_Scope); Region := GetRegion (Current_Scope); exit when Found or else RawDict.GetSymbolDiscriminant (Region) = Package_Symbol or else (RawDict.GetSymbolDiscriminant (Region) = Subprogram_Symbol and then Is_Main_Program (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region))); Current_Scope := GetEnclosingScope (Current_Scope); end loop; return Found; end Is_Renamed_Local; spark-2012.0.deb/examiner/sparklex-lex-getnumber.adb0000644000175000017500000003665111753202336021341 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SparkLex.Lex) procedure GetNumber (Curr_Line : in out Line_Context; Token : out SP_Symbols.SP_Terminal) -- Given that the character at the current position in the line buffer is a -- digit GetNum recognises the maximal length subsequence from the line buffer -- defined by one of the following regular expressions :- -- integer_number ::= integer [exponent] -- real_number ::= integer.integer [exponent] -- based_integer ::= integer#based_integer# [exponent] -- based_real ::= integer#based_integer.based_integer# [exponent] -- illegal_number ::= numeric_literal non_number_separator {non_number_separator} -- where -- non_number_separator is any chahracter which cannot legally separate -- a numeric_literal from the next token in the Ada lexis. -- On exit Token is set according to which of the expressions is recognised. -- The current position of the line buffer is the character immediately -- following the recognised subsequence. is Ch : Character; Legal : Boolean; Token_So_Far : SP_Symbols.SP_Terminal; function Num_Sep (Ch : Character) return Boolean is begin return Ch = ' ' or else Ch = End_Of_Text or else Simple_Delimiter (Ch => Ch) or else Format_Effector (Ch => Ch); end Num_Sep; function Extended_Digit (Ch : Character) return Boolean is begin -- We consider an extended digit to be a decimal digit or -- any letter, so... return Letter_Or_Digit (Ch => Ch); end Extended_Digit; procedure Get_Integer (Curr_Line : in out Line_Context; Legal : out Boolean) --# derives Curr_Line, --# Legal from Curr_Line; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos >= Curr_Line~.Curr_Pos and --# (Curr_Line~.Curr_Pos <= E_Strings.Get_Length (Curr_Line~.Conts) -> (Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos)) and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; -- -- This procedure recognises the maximal length sequence from the -- line buffer satisfying one of the two following regular expressions -- integer ::= digit {[underline] digit} -- illegal_int ::= integer underline -- -- If an integer is recognised, then Legal is set to TRUE otherwise False. -- is type Num_State is (Underline_Or_Digit, Digit_Only, Illegal_Num_State); State : Num_State; Ch : Character; begin LineManager.Accept_Char (Curr_Line => Curr_Line); -- First digit already recognised. State := Underline_Or_Digit; loop --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos >= Curr_Line~.Curr_Pos and --# (Curr_Line~.Curr_Pos <= E_Strings.Get_Length (Curr_Line~.Conts) -> (Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos)) and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); case State is when Underline_Or_Digit => if Ch = '_' then State := Digit_Only; end if; when Digit_Only => if Ada.Characters.Handling.Is_Digit (Ch) then State := Underline_Or_Digit; else State := Illegal_Num_State; end if; when Illegal_Num_State => null; end case; exit when (not Ada.Characters.Handling.Is_Digit (Ch) and then Ch /= '_') or else State = Illegal_Num_State; LineManager.Accept_Char (Curr_Line => Curr_Line); end loop; Legal := (State = Underline_Or_Digit); end Get_Integer; procedure Get_Based_Integer (Curr_Line : in out Line_Context; Legal : out Boolean) --# derives Curr_Line, --# Legal from Curr_Line; --# pre E_Strings.Get_Length (Curr_Line.Conts) < Natural'Last and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1; --# post E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos >= Curr_Line~.Curr_Pos and --# (Curr_Line~.Curr_Pos <= E_Strings.Get_Length (Curr_Line~.Conts) -> (Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos)) and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; -- -- This procedure recognises the maximal length sequence from the -- line buffer satisfying on of the the two following regular expression -- based_integer ::= extended_digit {[underline] extended_digit} -- illegal_based ::= based_integer underline -- -- If a based_integer is recognised, then Legal is set to TRUE otherwise False. -- is type Num_State is (Underline_Or_Ext_Digit, Ext_Digit_Only, Illegal_Num_State); State : Num_State; Ch : Character; begin LineManager.Accept_Char (Curr_Line => Curr_Line); -- First extended digit already recognised. State := Underline_Or_Ext_Digit; loop --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos >= Curr_Line~.Curr_Pos and --# (Curr_Line~.Curr_Pos <= E_Strings.Get_Length (Curr_Line~.Conts) -> (Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos)) and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos; Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); case State is when Underline_Or_Ext_Digit => if Ch = '_' then State := Ext_Digit_Only; end if; when Ext_Digit_Only => if Extended_Digit (Ch => Ch) then State := Underline_Or_Ext_Digit; else State := Illegal_Num_State; end if; when Illegal_Num_State => null; end case; exit when (not Extended_Digit (Ch => Ch) and then Ch /= '_') or else State = Illegal_Num_State; LineManager.Accept_Char (Curr_Line => Curr_Line); end loop; Legal := (State = Underline_Or_Ext_Digit); end Get_Based_Integer; begin Get_Integer (Curr_Line => Curr_Line, Legal => Legal); if Legal then Token_So_Far := SP_Symbols.integer_number; else Token_So_Far := SP_Symbols.illegal_number; end if; --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token_So_Far in SP_Symbols.SP_Terminal and --# Token_So_Far /= SP_Symbols.annotation_end and Token_So_Far /= SP_Symbols.SPEND; if E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) = '#' then LineManager.Accept_Char (Curr_Line => Curr_Line); if Extended_Digit (Ch => E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos)) then Get_Based_Integer (Curr_Line => Curr_Line, Legal => Legal); if Legal and then Token_So_Far /= SP_Symbols.illegal_number then Token_So_Far := SP_Symbols.based_integer; else Token_So_Far := SP_Symbols.illegal_number; end if; end if; end if; --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token_So_Far in SP_Symbols.SP_Terminal and --# Token_So_Far /= SP_Symbols.annotation_end and Token_So_Far /= SP_Symbols.SPEND; if E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) = '.' then LineManager.Lookahead_Char (Curr_Line => Curr_Line, Ch => Ch); -- Check for '..' symbol. LineManager.Reject_Lookahead (Curr_Line => Curr_Line); -- Current and lookahead position at first '.' char. if Token_So_Far = SP_Symbols.integer_number then if Ada.Characters.Handling.Is_Digit (Ch) then LineManager.Accept_Char (Curr_Line => Curr_Line); -- Accept decimal point, '.', char. Get_Integer (Curr_Line => Curr_Line, Legal => Legal); if Legal then Token_So_Far := SP_Symbols.real_number; else Token_So_Far := SP_Symbols.illegal_number; end if; end if; elsif Token_So_Far = SP_Symbols.based_integer then if Extended_Digit (Ch => Ch) then LineManager.Accept_Char (Curr_Line => Curr_Line); -- Accept decimal point, '.', char. Get_Based_Integer (Curr_Line => Curr_Line, Legal => Legal); if Legal then Token_So_Far := SP_Symbols.based_real; else Token_So_Far := SP_Symbols.illegal_number; end if; end if; end if; end if; --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token_So_Far in SP_Symbols.SP_Terminal and --# Token_So_Far /= SP_Symbols.annotation_end and Token_So_Far /= SP_Symbols.SPEND; if Token_So_Far = SP_Symbols.based_integer or else Token_So_Far = SP_Symbols.based_real then if E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos) = '#' then LineManager.Accept_Char (Curr_Line => Curr_Line); else Token_So_Far := SP_Symbols.illegal_number; end if; end if; --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token_So_Far in SP_Symbols.SP_Terminal and --# Token_So_Far /= SP_Symbols.annotation_end and Token_So_Far /= SP_Symbols.SPEND; Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); if Ch = 'E' or else Ch = 'e' then LineManager.Accept_Char (Curr_Line => Curr_Line); Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); if Ch = '+' then LineManager.Accept_Char (Curr_Line => Curr_Line); Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); elsif Ch = '-' then LineManager.Accept_Char (Curr_Line => Curr_Line); Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); if Token_So_Far /= SP_Symbols.real_number and then Token_So_Far /= SP_Symbols.based_real then Token_So_Far := SP_Symbols.illegal_number; end if; end if; if Ada.Characters.Handling.Is_Digit (Ch) then Get_Integer (Curr_Line => Curr_Line, Legal => Legal); if not Legal then Token_So_Far := SP_Symbols.illegal_number; end if; else Token_So_Far := SP_Symbols.illegal_number; end if; end if; --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token_So_Far in SP_Symbols.SP_Terminal and --# Token_So_Far /= SP_Symbols.annotation_end and Token_So_Far /= SP_Symbols.SPEND; Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); if Num_Sep (Ch => Ch) then Token := Token_So_Far; else while not Num_Sep (Ch => Ch) loop --# assert E_Strings.Get_Length (Curr_Line.Conts) = E_Strings.Get_Length (Curr_Line~.Conts) and --# Curr_Line.Curr_Pos > Curr_Line~.Curr_Pos and --# Curr_Line.Curr_Pos <= E_Strings.Get_Length (Curr_Line.Conts) + 1 and --# Curr_Line.Lookahead_Pos = Curr_Line.Curr_Pos and --# Curr_Line.Last_Token_Pos = Curr_Line~.Last_Token_Pos and --# Token_So_Far in SP_Symbols.SP_Terminal and --# Token_So_Far /= SP_Symbols.annotation_end and Token_So_Far /= SP_Symbols.SPEND; LineManager.Accept_Char (Curr_Line => Curr_Line); Ch := E_Strings.Get_Element (E_Str => Curr_Line.Conts, Pos => Curr_Line.Curr_Pos); end loop; Token := SP_Symbols.illegal_number; end if; end GetNumber; spark-2012.0.deb/examiner/dictionary-add_record_component.adb0000644000175000017500000001603011753202336023234 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure Add_Record_Component (Name : in LexTokenManager.Lex_String; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; The_Record_Type : in RawDict.Type_Info_Ref; The_Component_Type : in RawDict.Type_Info_Ref; InheritedField : in Boolean; ComponentTypeReference : in Location) is The_Record_Component, Previous : RawDict.Record_Component_Info_Ref; -------------------------------------------------------------------------------- function Is_Record_Private (The_Record_Type, The_Component_Type : RawDict.Type_Info_Ref) return TriState --# global in Dict; is Is_Component_Private, Result : TriState; -------------------------------------------------------------------------------- function Is_Private (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return TriState --# global in Dict; is Result : TriState; begin case RawDict.Get_Type_Private (Type_Mark => Type_Mark) is when Never => Result := Never; when Sometimes => if IsLocal (Scope, Get_Type_Scope (Type_Mark => Type_Mark)) then Result := Sometimes; else Result := Always; end if; when Always => Result := Always; end case; return Result; end Is_Private; begin -- Is_Record_Private Is_Component_Private := Is_Private (Type_Mark => The_Component_Type, Scope => Get_Type_Scope (Type_Mark => The_Record_Type)); case RawDict.Get_Type_Private (Type_Mark => The_Record_Type) is when Never => Result := Is_Component_Private; when Sometimes => case Is_Component_Private is when Never | Sometimes => Result := Sometimes; when Always => Result := Always; end case; when Always => Result := Always; end case; return Result; end Is_Record_Private; -------------------------------------------------------------------------------- function Is_Record_Limited (The_Record_Type, The_Component_Type : RawDict.Type_Info_Ref) return TriState --# global in Dict; is Is_Component_Limited, Result : TriState; -------------------------------------------------------------------------------- function Is_Limited (Type_Mark : RawDict.Type_Info_Ref; Scope : Scopes) return TriState --# global in Dict; is Result : TriState; begin case RawDict.Get_Type_Limited (Type_Mark => Type_Mark) is when Never => Result := Never; when Sometimes => if IsLocal (Scope, Get_Type_Scope (Type_Mark => Type_Mark)) then Result := Sometimes; else Result := Always; end if; when Always => Result := Always; end case; return Result; end Is_Limited; begin -- Is_Record_Limited Is_Component_Limited := Is_Limited (Type_Mark => The_Component_Type, Scope => Get_Type_Scope (Type_Mark => The_Record_Type)); case RawDict.Get_Type_Limited (Type_Mark => The_Record_Type) is when Never => Result := Is_Component_Limited; when Sometimes => case Is_Component_Limited is when Never | Sometimes => Result := Sometimes; when Always => Result := Always; end case; when Always => Result := Always; end case; return Result; end Is_Record_Limited; begin -- Add_Record_Component RawDict.Create_Record_Component (Name => Name, Record_Type => The_Record_Type, Component_Type => The_Component_Type, Inherited_Field => InheritedField, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Record_Component => The_Record_Component); Previous := RawDict.Get_Type_Last_Record_Component (Type_Mark => The_Record_Type); if Previous = RawDict.Null_Record_Component_Info_Ref then RawDict.Set_Type_First_Record_Component (Type_Mark => The_Record_Type, Record_Component => The_Record_Component); else RawDict.Set_Next_Record_Component (The_Record_Component => Previous, Next => The_Record_Component); end if; RawDict.Set_Type_Last_Record_Component (Type_Mark => The_Record_Type, Record_Component => The_Record_Component); RawDict.Set_Type_Private (Type_Mark => The_Record_Type, Is_Private => Is_Record_Private (The_Record_Type => The_Record_Type, The_Component_Type => The_Component_Type)); RawDict.Set_Type_Limited (Type_Mark => The_Record_Type, Is_Limited => Is_Record_Limited (The_Record_Type => The_Record_Type, The_Component_Type => The_Component_Type)); RawDict.Set_Type_Equality_Defined (Type_Mark => The_Record_Type, Equality_Defined => RawDict.Get_Type_Equality_Defined (Type_Mark => The_Record_Type) and then RawDict.Get_Type_Equality_Defined (Type_Mark => The_Component_Type)); RawDict.Set_Type_Contains_Float (Type_Mark => The_Record_Type, Contains_Float => RawDict.Get_Type_Contains_Float (Type_Mark => The_Record_Type) or else RawDict.Get_Type_Contains_Float (Type_Mark => The_Component_Type)); if The_Component_Type /= Get_Unknown_Type_Mark then AddOtherReference (RawDict.Get_Type_Symbol (The_Component_Type), GetRegion (Get_Type_Scope (Type_Mark => The_Record_Type)), ComponentTypeReference); end if; end Add_Record_Component; spark-2012.0.deb/examiner/dictionary-lookupscope.adb0000644000175000017500000016377611753202336021453 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure LookupScope (Name : in LexTokenManager.Lex_String; Stop_At : in LexTokenManager.Lex_String; Scope : in Scopes; Calling_Scope : in Scopes; Context : in Contexts; Item : out Symbol; Is_Visible : out Boolean) -- The extra Calling_Scope parameter is only used when LookUpScope is called from -- LookUpSelectedItem because in this case only the scope we start the search in -- is not the same as the scope where the search started (former is the visible -- scope of the prefix package). The extra parameter only affects the selection of -- the correct implicit proof function that corresponds to an Ada function is Region : Symbol; -------------------------------------------------------------------------------- function Lookup_Declarations (Name : LexTokenManager.Lex_String; Stop_At : LexTokenManager.Lex_String; Context : Contexts; Head : RawDict.Declaration_Info_Ref) return Symbol --# global in Dict; --# in LexTokenManager.State; is The_Declaration : RawDict.Declaration_Info_Ref; Item, Declarative_Item : Symbol; Declarative_Item_Name : LexTokenManager.Lex_String; -------------------------------------------------------------------------------- function Lookup_Declaration (Name : LexTokenManager.Lex_String; Declarative_Item : Symbol; Declarative_Item_Name : LexTokenManager.Lex_String; Context : Contexts; The_Declaration : RawDict.Declaration_Info_Ref) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; -------------------------------------------------------------------------------- function Lookup_Enumeration_Literals (Name : LexTokenManager.Lex_String; The_Type : RawDict.Type_Info_Ref) return RawDict.Enumeration_Literal_Info_Ref --# global in Dict; --# in LexTokenManager.State; is The_Enumeration_Literal : RawDict.Enumeration_Literal_Info_Ref; begin Trace_Lex_Str (Msg => " In Lookup_Enumeration_Literals, seeking ", L => Name); The_Enumeration_Literal := RawDict.Get_Type_First_Enumeration_Literal (Type_Mark => The_Type); while The_Enumeration_Literal /= RawDict.Null_Enumeration_Literal_Info_Ref and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Enumeration_Literal_Name (The_Enumeration_Literal => The_Enumeration_Literal), Lex_Str2 => Name) /= LexTokenManager.Str_Eq loop The_Enumeration_Literal := RawDict.Get_Next_Enumeration_Literal (The_Enumeration_Literal => The_Enumeration_Literal); end loop; return The_Enumeration_Literal; end Lookup_Enumeration_Literals; begin -- Lookup_Declaration if Context = ProgramContext and then RawDict.Get_Declaration_Context (The_Declaration => The_Declaration) = ProofContext then Item := NullSymbol; else if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Declarative_Item_Name, Lex_Str2 => Name) = LexTokenManager.Str_Eq then Item := Declarative_Item; elsif RawDict.GetSymbolDiscriminant (Declarative_Item) = Type_Symbol and then Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Declarative_Item)) and then RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Declarative_Item)) = Enumeration_Type_Item then Item := RawDict.Get_Enumeration_Literal_Symbol (Lookup_Enumeration_Literals (Name => Name, The_Type => RawDict.Get_Type_Info_Ref (Item => Declarative_Item))); else Item := NullSymbol; end if; end if; return Item; end Lookup_Declaration; begin -- Lookup_Declarations Trace_Lex_Str (Msg => "In Lookup_Declarations, seeking ", L => Name); Item := NullSymbol; The_Declaration := Head; loop exit when The_Declaration = RawDict.Null_Declaration_Info_Ref; Declarative_Item := RawDict.Get_Declaration_Item (The_Declaration => The_Declaration); if RawDict.GetSymbolDiscriminant (Declarative_Item) = Operator_Symbol then Declarative_Item_Name := LexTokenManager.Null_String; else Declarative_Item_Name := GetSimpleName (Declarative_Item); end if; Trace_Lex_Str (Msg => "Declarative_Item_Name is ", L => Declarative_Item_Name); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Stop_At, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Stop_At, Lex_Str2 => Declarative_Item_Name) = LexTokenManager.Str_Eq; Item := Lookup_Declaration (Name => Name, Declarative_Item => Declarative_Item, Declarative_Item_Name => Declarative_Item_Name, Context => Context, The_Declaration => The_Declaration); exit when Item /= NullSymbol; The_Declaration := RawDict.Get_Next_Declaration (The_Declaration => The_Declaration); end loop; return Item; end Lookup_Declarations; -------------------------------------------------------------------------------- function Lookup_Known_Discriminants (Name : in LexTokenManager.Lex_String; The_Type : in RawDict.Type_Info_Ref) return Symbol --# global in Dict; --# in LexTokenManager.State; is The_Discriminant : Symbol; begin Trace_Lex_Str (Msg => "In Lookup_Known_Discriminants, seeking ", L => Name); case RawDict.Get_Type_Discriminant (Type_Mark => The_Type) is when Protected_Type_Item => The_Discriminant := RawDict.Get_Protected_Type_First_Discriminant (The_Protected_Type => The_Type); when Task_Type_Item => The_Discriminant := RawDict.Get_Task_Type_First_Discriminant (The_Task_Type => The_Type); when others => -- non-exec code The_Discriminant := NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Lookup_Known_Discriminants"); end case; while (The_Discriminant /= NullSymbol and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.GetDiscriminantName (The_Discriminant), Lex_Str2 => Name) /= LexTokenManager.Str_Eq) loop The_Discriminant := RawDict.GetNextDiscriminant (The_Discriminant); end loop; return The_Discriminant; end Lookup_Known_Discriminants; -------------------------------------------------------------------------------- function Lookup_Package_Visible_Declarations (The_Package : RawDict.Package_Info_Ref; Name : LexTokenManager.Lex_String; Context : Contexts) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; ------------------------------------------------------------------------------ function Lookup_Own_Variables (Name : LexTokenManager.Lex_String; The_Package : RawDict.Package_Info_Ref) return RawDict.Variable_Info_Ref --# global in Dict; --# in LexTokenManager.State; is The_Own_Variable : RawDict.Own_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; begin Trace_Lex_Str (Msg => " In Lookup_Own_Variables, seeking ", L => Name); The_Own_Variable := RawDict.Get_Package_Own_Variables (The_Package => The_Package); loop if The_Own_Variable = RawDict.Null_Own_Variable_Info_Ref then The_Variable := RawDict.Null_Variable_Info_Ref; exit; end if; The_Variable := RawDict.Get_Own_Variable_Variable (The_Own_Variable => The_Own_Variable); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Variable_Name (The_Variable => The_Variable), Lex_Str2 => Name) = LexTokenManager.Str_Eq; The_Own_Variable := RawDict.Get_Next_Own_Variable (The_Own_Variable => The_Own_Variable); end loop; return The_Variable; end Lookup_Own_Variables; ------------------------------------------------------------------------------ function Lookup_Own_Tasks (Name : LexTokenManager.Lex_String; The_Package : RawDict.Package_Info_Ref) return RawDict.Variable_Info_Ref --# global in Dict; --# in LexTokenManager.State; is The_Task_Type : Symbol; The_Variable : RawDict.Variable_Info_Ref; begin Trace_Lex_Str (Msg => " In Lookup_Own_Tasks, seeking ", L => Name); The_Task_Type := RawDict.Get_Package_Task_List (The_Package => The_Package); loop if The_Task_Type = NullSymbol then The_Variable := RawDict.Null_Variable_Info_Ref; exit; end if; The_Variable := RawDict.GetOwnTaskVariable (The_Task_Type); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Variable_Name (The_Variable => The_Variable), Lex_Str2 => Name) = LexTokenManager.Str_Eq; The_Task_Type := RawDict.GetNextOwnTask (The_Task_Type); end loop; return The_Variable; end Lookup_Own_Tasks; begin -- Lookup_Package_Visible_Declarations Trace_Lex_Str (Msg => " In Lookup_Package_Visible_Declarations, seeking ", L => Name); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Package_Name (The_Package => The_Package), Lex_Str2 => Name) = LexTokenManager.Str_Eq then Item := RawDict.Get_Package_Symbol (The_Package); else Item := Lookup_Declarations (Name => Name, Stop_At => LexTokenManager.Null_String, Context => Context, Head => RawDict.Get_Package_First_Visible_Declaration (The_Package => The_Package)); if Item = NullSymbol then Item := Lookup_Declarations (Name => Name, Stop_At => LexTokenManager.Null_String, Context => Context, Head => RawDict.Get_Package_Visible_Renaming_Declarations (The_Package => The_Package)); end if; if Item = NullSymbol and then RawDict.Get_Package_Generic_Unit (The_Package => The_Package) /= RawDict.Null_Generic_Unit_Info_Ref then Item := Lookup_Declarations (Name => Name, Stop_At => LexTokenManager.Null_String, Context => Context, Head => RawDict.Get_Generic_Unit_First_Declaration (The_Generic_Unit => RawDict.Get_Package_Generic_Unit (The_Package => The_Package))); end if; if Item = NullSymbol and then Context = ProofContext then Item := RawDict.Get_Variable_Symbol (Lookup_Own_Variables (Name => Name, The_Package => The_Package)); if Item = NullSymbol then Item := RawDict.Get_Variable_Symbol (Lookup_Own_Tasks (Name => Name, The_Package => The_Package)); end if; end if; end if; return Item; end Lookup_Package_Visible_Declarations; ------------------------------------------------------------------------------ function Lookup_Protected_Type_Visible_Declarations (The_Protected_Type : RawDict.Type_Info_Ref; Name : LexTokenManager.Lex_String; Context : Contexts) return Symbol --# global in Dict; --# in LexTokenManager.State; --# pre RawDict.Get_Type_Discriminant (The_Protected_Type, Dict) = Protected_Type_Item; is Item : Symbol; begin Trace_Lex_Str (Msg => " In Lookup_Protected_Type_Visible_Declarations, seeking ", L => Name); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Type_Name (Type_Mark => The_Protected_Type), Lex_Str2 => Name) = LexTokenManager.Str_Eq then -- if we find PT (say) when looking from within PT then what we want to return is the -- implicitly declared abstract own variable of PT rather than the type itself Item := RawDict.Get_Variable_Symbol (RawDict.Get_Own_Variable_Variable (The_Own_Variable => RawDict.Get_Protected_Type_Own_Variable (The_Protected_Type => The_Protected_Type))); else Item := Lookup_Declarations (Name => Name, Stop_At => LexTokenManager.Null_String, Context => Context, Head => RawDict.Get_Protected_Type_First_Visible_Declaration (The_Protected_Type => The_Protected_Type)); end if; -- If not found then check the known discriminants if Item = NullSymbol then Item := Lookup_Known_Discriminants (Name => Name, The_Type => The_Protected_Type); end if; return Item; end Lookup_Protected_Type_Visible_Declarations; ------------------------------------------------------------------------------ function LookupVisibleDeclarations (Name : LexTokenManager.Lex_String; TheRegion : Symbol; Context : Contexts) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; ------------------------------------------------------------------------------ function Lookup_Task_Type_Visible_Declarations (The_Task_Type : RawDict.Type_Info_Ref; Name : LexTokenManager.Lex_String) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; begin Trace_Lex_Str (Msg => " In Lookup_Task_Type_Visible_Declarations, seeking ", L => Name); -- check the know discriminants Item := Lookup_Known_Discriminants (Name => Name, The_Type => The_Task_Type); return Item; end Lookup_Task_Type_Visible_Declarations; begin -- LookupVisibleDeclarations Trace_Lex_Str (Msg => "In LookupVisibleDeclarations, seeking ", L => Name); case RawDict.GetSymbolDiscriminant (TheRegion) is when Package_Symbol => Item := Lookup_Package_Visible_Declarations (The_Package => RawDict.Get_Package_Info_Ref (Item => TheRegion), Name => Name, Context => Context); when Type_Symbol => if Is_Type (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheRegion)) then case RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => TheRegion)) is when Protected_Type_Item => Item := Lookup_Protected_Type_Visible_Declarations (The_Protected_Type => RawDict.Get_Type_Info_Ref (Item => TheRegion), Name => Name, Context => Context); when Task_Type_Item => Item := Lookup_Task_Type_Visible_Declarations (The_Task_Type => RawDict.Get_Type_Info_Ref (Item => TheRegion), Name => Name); when others => Item := NullSymbol; -- can't be reached, there just to avoid DFerr SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.LookupVisibleDeclarations"); end case; else Item := NullSymbol; -- can't be reached, there just to avoid DFerr SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.LookupVisibleDeclarations"); end if; when Generic_Unit_Symbol => Item := Lookup_Declarations (Name => Name, Stop_At => LexTokenManager.Null_String, Context => Context, Head => RawDict.Get_Generic_Unit_First_Declaration (The_Generic_Unit => RawDict.Get_Generic_Unit_Info_Ref (Item => TheRegion))); when others => -- non-exec code Item := NullSymbol; -- can't be reached, there just to avoid DFerr SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.LookupVisibleDeclarations"); end case; return Item; end LookupVisibleDeclarations; ------------------------------------------------------------------------------ procedure Lookup_Local_Declarations (Name : in LexTokenManager.Lex_String; Stop_At : in LexTokenManager.Lex_String; Region : in Symbol; Context : in Contexts; Scope_Type : in Visibility; Calling_Scope : in Scopes; Item : out Symbol; Is_Visible : out Boolean) --# global in Dict; --# in LexTokenManager.State; --# derives Is_Visible, --# Item from Calling_Scope, --# Context, --# Dict, --# LexTokenManager.State, --# Name, --# Region, --# Scope_Type, --# Stop_At; is The_Discriminant : SymbolDiscriminant; ------------------------------------------------------------------------------ function Lookup_Package_Body (Name : LexTokenManager.Lex_String; Stop_At : LexTokenManager.Lex_String; The_Package : RawDict.Package_Info_Ref; Context : Contexts; Scope_Type : Visibility) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; ------------------------------------------------------------------------------ function Lookup_Refinement_Constituents (Name : LexTokenManager.Lex_String; The_Package : RawDict.Package_Info_Ref) return RawDict.Variable_Info_Ref --# global in Dict; --# in LexTokenManager.State; is AbstractOwnVariables : Iterator; Constituent : RawDict.Variable_Info_Ref; ------------------------------------------------------------------------------ function Lookup_Constituents (Name : LexTokenManager.Lex_String; The_Variable : RawDict.Variable_Info_Ref) return RawDict.Variable_Info_Ref --# global in Dict; --# in LexTokenManager.State; is Constituents : Iterator; Constituent : RawDict.Variable_Info_Ref; begin Trace_Lex_Str (Msg => " In Lookup_Constituents, seeking ", L => Name); Constituents := First_Constituent (The_Variable => The_Variable); loop if IsNullIterator (Constituents) then Constituent := RawDict.Null_Variable_Info_Ref; exit; end if; Constituent := RawDict.Get_Variable_Info_Ref (CurrentSymbol (Constituents)); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Variable_Name (The_Variable => Constituent), Lex_Str2 => Name) = LexTokenManager.Str_Eq and then not Is_Own_Variable (The_Variable => Constituent); Constituents := NextSymbol (Constituents); end loop; return Constituent; end Lookup_Constituents; begin -- Lookup_Refinement_Constituents Trace_Lex_Str (Msg => " In Lookup_Refinement_Constituents, seeking ", L => Name); AbstractOwnVariables := First_Abstract_Own_Variable (The_Package => The_Package); Constituent := RawDict.Null_Variable_Info_Ref; loop exit when IsNullIterator (AbstractOwnVariables); Constituent := Lookup_Constituents (Name => Name, The_Variable => RawDict.Get_Variable_Info_Ref (CurrentSymbol (AbstractOwnVariables))); exit when Constituent /= RawDict.Null_Variable_Info_Ref; AbstractOwnVariables := NextSymbol (AbstractOwnVariables); end loop; return Constituent; end Lookup_Refinement_Constituents; begin -- Lookup_Package_Body Trace_Lex_Str (Msg => "In Lookup_Package_Body, seeking ", L => Name); if Scope_Type = Privat then Item := Lookup_Declarations (Name => Name, Stop_At => Stop_At, Context => Context, Head => RawDict.Get_Package_First_Private_Declaration (The_Package => The_Package)); else Item := Lookup_Declarations (Name => Name, Stop_At => Stop_At, Context => Context, Head => RawDict.Get_Package_First_Local_Declaration (The_Package => The_Package)); if Item = NullSymbol then Item := Lookup_Declarations (Name => Name, Stop_At => Stop_At, Context => Context, Head => RawDict.Get_Package_Local_Renaming_Declarations (The_Package => The_Package)); end if; if Item = NullSymbol and then Context = ProofContext then Item := RawDict.Get_Variable_Symbol (Lookup_Refinement_Constituents (Name => Name, The_Package => The_Package)); end if; if Item = NullSymbol then Item := Lookup_Declarations (Name => Name, Stop_At => Stop_At, Context => Context, Head => RawDict.Get_Package_First_Private_Declaration (The_Package => The_Package)); end if; end if; if Item = NullSymbol then Item := Lookup_Package_Visible_Declarations (The_Package => The_Package, Name => Name, Context => Context); end if; return Item; end Lookup_Package_Body; ------------------------------------------------------------------------------ procedure Lookup_Subprogram_Body (Name : in LexTokenManager.Lex_String; Stop_At : in LexTokenManager.Lex_String; The_Subprogram : in RawDict.Subprogram_Info_Ref; Context : in Contexts; Item : out Symbol; Is_Visible : out Boolean) --# global in Dict; --# in LexTokenManager.State; --# derives Is_Visible, --# Item from Context, --# Dict, --# LexTokenManager.State, --# Name, --# Stop_At, --# The_Subprogram; is procedure LookupSubprogramParameters (Name : in LexTokenManager.Lex_String; The_Subprogram : in RawDict.Subprogram_Info_Ref; Context : in Contexts; Parameter : out Symbol; Is_Visible : out Boolean) --# global in Dict; --# in LexTokenManager.State; --# derives Is_Visible from Context, --# Dict, --# LexTokenManager.State, --# Name, --# The_Subprogram & --# Parameter from Dict, --# LexTokenManager.State, --# Name, --# The_Subprogram; is The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; ------------------------------------------------------------------------------ procedure Lookup_Global_Variables (Name : in LexTokenManager.Lex_String; The_Subprogram : in RawDict.Subprogram_Info_Ref; Context : in Contexts; Variable : out Symbol; Is_Visible : out Boolean) --# global in Dict; --# in LexTokenManager.State; --# derives Is_Visible from Context, --# Dict, --# LexTokenManager.State, --# Name, --# The_Subprogram & --# Variable from Dict, --# LexTokenManager.State, --# Name, --# The_Subprogram; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable : RawDict.Variable_Info_Ref; The_Subprogram_Parameter : RawDict.Subprogram_Parameter_Info_Ref; Stop : Boolean := False; begin Variable := NullSymbol; Is_Visible := False; Trace_Lex_Str (Msg => " In Lookup_Global_Variables, seeking ", L => Name); The_Global_Variable := RawDict.Get_Subprogram_First_Global_Variable (The_Subprogram => The_Subprogram, Abstraction => Get_Subprogram_Abstraction (The_Subprogram => The_Subprogram, Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Subprogram_Symbol (The_Subprogram)))); while not Stop loop if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Variable := NullSymbol; Is_Visible := False; exit; end if; case RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) is when RawDict.Subprogram_Variable_Item => The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Variable_Name (The_Variable => The_Variable), Lex_Str2 => Name) = LexTokenManager.Str_Eq and then not RawDict.Get_Global_Variable_Prefix_Needed (The_Global_Variable => The_Global_Variable) then Variable := RawDict.Get_Variable_Symbol (The_Variable); Is_Visible := Context = ProofContext or else Variable_Is_Declared (The_Variable => The_Variable); Stop := True; end if; when RawDict.Subprogram_Parameter_Item => The_Subprogram_Parameter := RawDict.Get_Global_Variable_Parameter (The_Global_Variable => The_Global_Variable); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Subprogram_Parameter_Name (The_Subprogram_Parameter => The_Subprogram_Parameter), Lex_Str2 => Name) = LexTokenManager.Str_Eq and then not RawDict.Get_Global_Variable_Prefix_Needed (The_Global_Variable => The_Global_Variable) then Variable := RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter); Is_Visible := True; Stop := True; end if; when others => -- non-exec code Stop := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Lookup_Global_Variables"); end case; The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; end Lookup_Global_Variables; begin -- LookupSubprogramParameters Trace_Lex_Str (Msg => " In LookupSubprogramParameters, seeking ", L => Name); The_Subprogram_Parameter := RawDict.Get_Subprogram_First_Parameter (The_Subprogram => The_Subprogram); while The_Subprogram_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Subprogram_Parameter_Name (The_Subprogram_Parameter => The_Subprogram_Parameter), Lex_Str2 => Name) /= LexTokenManager.Str_Eq loop The_Subprogram_Parameter := RawDict.Get_Next_Subprogram_Parameter (The_Subprogram_Parameter => The_Subprogram_Parameter); end loop; if The_Subprogram_Parameter = RawDict.Null_Subprogram_Parameter_Info_Ref then Lookup_Global_Variables (Name, The_Subprogram, Context, Parameter, Is_Visible); else Parameter := RawDict.Get_Subprogram_Parameter_Symbol (The_Subprogram_Parameter); Is_Visible := True; end if; end LookupSubprogramParameters; begin -- Lookup_Subprogram_Body Trace_Lex_Str (Msg => "In Lookup_Subprogram_Body, seeking ", L => Name); Item := Lookup_Declarations (Name => Name, Stop_At => Stop_At, Context => Context, Head => RawDict.Get_Subprogram_First_Declaration (The_Subprogram => The_Subprogram)); if Item = NullSymbol then Item := Lookup_Declarations (Name => Name, Stop_At => Stop_At, Context => Context, Head => RawDict.Get_Subprogram_Renaming_Declarations (The_Subprogram => The_Subprogram)); end if; if Item = NullSymbol then LookupSubprogramParameters (Name, The_Subprogram, Context, Item, Is_Visible); else Is_Visible := True; end if; end Lookup_Subprogram_Body; ------------------------------------------------------------------------------ function LookupLoop (Name : LexTokenManager.Lex_String; TheLoop : Symbol) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; ----------------------------------------------------------------------------- function LookupLoopParameter (Name : LexTokenManager.Lex_String; ForLoop : Symbol) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; begin Trace_Lex_Str (Msg => " In LookupLoopParameter, seeking ", L => Name); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => GetSimpleName (RawDict.GetLoopParameter (ForLoop)), Lex_Str2 => Name) = LexTokenManager.Str_Eq then Item := RawDict.GetLoopParameter (ForLoop); else Item := NullSymbol; end if; return Item; end LookupLoopParameter; begin -- LookupLoop Trace_Lex_Str (Msg => "In LookupLoop, seeking ", L => Name); if Is_For_Loop (TheSymbol => TheLoop) then Item := LookupLoopParameter (Name, TheLoop); else Item := NullSymbol; end if; return Item; end LookupLoop; -------------------------------------------------------------------------------- function Lookup_Type (Name : LexTokenManager.Lex_String; Context : Contexts; The_Type : RawDict.Type_Info_Ref; Scope_Type : Visibility) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; -------------------------------------------------------------------------------- function Lookup_Record_Components (Name : LexTokenManager.Lex_String; The_Type : RawDict.Type_Info_Ref) return RawDict.Record_Component_Info_Ref --# global in Dict; --# in LexTokenManager.State; is The_Record_Component : RawDict.Record_Component_Info_Ref; The_First_Record_Component : RawDict.Record_Component_Info_Ref; Current_Record : RawDict.Type_Info_Ref; begin Trace_Lex_Str (Msg => " In Lookup_Record_Components, seeking ", L => Name); Current_Record := The_Type; loop The_First_Record_Component := RawDict.Get_Type_First_Record_Component (Type_Mark => Current_Record); The_Record_Component := The_First_Record_Component; while (The_Record_Component /= RawDict.Null_Record_Component_Info_Ref -- did not find and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Record_Component_Name (The_Record_Component => The_Record_Component), Lex_Str2 => Name) /= LexTokenManager.Str_Eq) loop -- found The_Record_Component := RawDict.Get_Next_Record_Component (The_Record_Component => The_Record_Component); end loop; exit when The_Record_Component /= RawDict.Null_Record_Component_Info_Ref; -- carry success out of outer loop -- if we get here we failed to find field in local declarations -- so we search in inherited fields if there are any -- exit if no inherited fields found exit when not RawDict.Get_Record_Component_Inherited_Field (The_Record_Component => The_First_Record_Component); -- restart search in inherited fields Current_Record := RawDict.Get_Record_Component_Type (The_Record_Component => The_First_Record_Component); end loop; return The_Record_Component; end Lookup_Record_Components; -------------------------------------------------------------------------------- function Lookup_Protected_Local_Scope (Name : LexTokenManager.Lex_String; Context : Contexts; The_Protected_Type : RawDict.Type_Info_Ref; Scope_Type : Visibility) return Symbol --# global in Dict; --# in LexTokenManager.State; --# pre RawDict.Get_Type_Discriminant (The_Protected_Type, Dict) = Protected_Type_Item; is Item : Symbol; -------------------------------------------------------------------------------- function Lookup_Protected_Elements (Name : LexTokenManager.Lex_String; Context : Contexts; The_Protected_Type : RawDict.Type_Info_Ref) return Symbol --# global in Dict; --# in LexTokenManager.State; --# pre RawDict.Get_Type_Discriminant (The_Protected_Type, Dict) = Protected_Type_Item; is Item : Symbol; It : Iterator; function Lookup_Refinement_Constituents (Name : LexTokenManager.Lex_String; The_Protected_Type : RawDict.Type_Info_Ref) return Symbol --# global in Dict; --# in LexTokenManager.State; is Constituents : Iterator; Constituent : Symbol; begin Constituents := First_Constituent (The_Variable => Get_Protected_Type_Own_Variable (The_Protected_Type => The_Protected_Type)); loop if IsNullIterator (Constituents) then Constituent := NullSymbol; exit; end if; Constituent := CurrentSymbol (Constituents); exit when LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => GetSimpleName (Constituent), Lex_Str2 => Name) = LexTokenManager.Str_Eq; Constituents := NextSymbol (Constituents); end loop; return Constituent; end Lookup_Refinement_Constituents; begin -- Lookup_Protected_Elements Trace_Lex_Str (Msg => " In Lookup_Protected_Elements, seeking ", L => Name); Item := NullSymbol; It := First_Protected_Element (The_Protected_Type => The_Protected_Type); while not IsNullIterator (It) loop if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => GetSimpleName (CurrentSymbol (It)), Lex_Str2 => Name) = LexTokenManager.Str_Eq then Item := CurrentSymbol (It); exit; end if; It := NextSymbol (It); end loop; -- As we add each protected element to the Dictionary they first get added as -- refinement cosntiuents (in proof context) and then as variables. The variable -- add must match each up with the refinement constituent already added so we -- need a special look up here to find them if Item = NullSymbol and then Context = ProofContext then Item := Lookup_Refinement_Constituents (Name => Name, The_Protected_Type => The_Protected_Type); end if; -- if Item is null at this point then we have not found a match in the private part -- of the private type but we must also look in the visible part since things here -- are also directly visible if Item = NullSymbol then Item := Lookup_Protected_Type_Visible_Declarations (The_Protected_Type => The_Protected_Type, Name => Name, Context => Context); end if; return Item; end Lookup_Protected_Elements; begin -- Lookup_Protected_Local_Scope Trace_Lex_Str (Msg => " In Lookup_Protected_Local_Scope, seeking ", L => Name); -- Search local declarations if we are in local scope. If this fails (or if we -- are in private scope), then search protected elements etc., by calling -- Lookup_Protected_Elements Item := NullSymbol; if Scope_Type = Local then -- search starts in protected body where we seek local declarations Item := Lookup_Declarations (Name => Name, Stop_At => LexTokenManager.Null_String, Context => Context, Head => RawDict.Get_Protected_Type_First_Local_Declaration (The_Protected_Type => The_Protected_Type)); if Item = NullSymbol then -- we need to look at the part of the enclosing package body which is "above" the -- protected type declaration Item := Lookup_Package_Body (Name => Name, Stop_At => RawDict.Get_Type_Name (Type_Mark => The_Protected_Type), The_Package => Get_Enclosing_Package (Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Type_Symbol (The_Protected_Type))), Context => Context, Scope_Type => Local); end if; end if; if Item = NullSymbol then -- we either failed to find the item in the protected body or we skipped -- it completely because Scope_Type = Privat on entry; so now we search -- the private part of the protected type Item := Lookup_Protected_Elements (Name => Name, Context => Context, The_Protected_Type => The_Protected_Type); end if; return Item; end Lookup_Protected_Local_Scope; -------------------------------------------------------------------------------- function Lookup_Task_Local_Scope (Name : LexTokenManager.Lex_String; Context : Contexts; The_Task_Type : RawDict.Type_Info_Ref) return Symbol --# global in Dict; --# in LexTokenManager.State; is Item : Symbol; function Lookup_Global_Variables (Name : LexTokenManager.Lex_String; The_Task_Type : RawDict.Type_Info_Ref; Context : Contexts) return RawDict.Variable_Info_Ref --# global in Dict; --# in LexTokenManager.State; is The_Global_Variable : RawDict.Global_Variable_Info_Ref; The_Variable, Result : RawDict.Variable_Info_Ref; begin Trace_Lex_Str (Msg => " In Lookup_Global_Variables, seeking ", L => Name); The_Global_Variable := RawDict.Get_Task_Type_First_Global_Variable (The_Task_Type => The_Task_Type, Abstraction => Get_Task_Type_Abstraction (The_Task_Type => The_Task_Type, Scope => Set_Visibility (The_Visibility => Local, The_Unit => RawDict.Get_Type_Symbol (The_Task_Type)))); loop if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then Result := RawDict.Null_Variable_Info_Ref; exit; end if; The_Variable := RawDict.Get_Global_Variable_Variable (The_Global_Variable => The_Global_Variable); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => RawDict.Get_Variable_Name (The_Variable => The_Variable), Lex_Str2 => Name) = LexTokenManager.Str_Eq and then not RawDict.Get_Global_Variable_Prefix_Needed (The_Global_Variable => The_Global_Variable) then if Context = ProgramContext and then not Variable_Is_Declared (The_Variable => The_Variable) then Result := RawDict.Null_Variable_Info_Ref; else Result := The_Variable; end if; exit; end if; The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); end loop; return Result; end Lookup_Global_Variables; begin -- Lookup_Task_Local_Scope -- search starts in task body where we seek local declarations Trace_Lex_Str (Msg => " In Lookup_Task_Local_Scope, seeking ", L => Name); Item := Lookup_Declarations (Name => Name, Stop_At => LexTokenManager.Null_String, Context => Context, Head => RawDict.Get_Task_Type_First_Local_Declaration (The_Task_Type => The_Task_Type)); if Item = NullSymbol then -- look up globals Item := RawDict.Get_Variable_Symbol (Lookup_Global_Variables (Name => Name, The_Task_Type => The_Task_Type, Context => Context)); end if; if Item = NullSymbol then Item := Lookup_Known_Discriminants (Name => Name, The_Type => The_Task_Type); end if; return Item; end Lookup_Task_Local_Scope; begin -- Lookup_Type Trace_Lex_Str (Msg => " In Lookup_Type, seeking ", L => Name); case RawDict.Get_Type_Discriminant (Type_Mark => The_Type) is when Record_Type_Item => Item := RawDict.Get_Record_Component_Symbol (Lookup_Record_Components (Name => Name, The_Type => The_Type)); when Protected_Type_Item => Item := Lookup_Protected_Local_Scope (Name => Name, Context => Context, The_Protected_Type => The_Type, Scope_Type => Scope_Type); when Task_Type_Item => Item := Lookup_Task_Local_Scope (Name => Name, Context => Context, The_Task_Type => The_Type); when others => -- non-exec code Item := NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Lookup_Type"); end case; return Item; end Lookup_Type; ------------------------------------------------------------------------------ function Lookup_Subcomponents (Name : LexTokenManager.Lex_String; The_Subcomponent : RawDict.Subcomponent_Info_Ref; Calling_Scope : Scopes) return RawDict.Subcomponent_Info_Ref --# global in Dict; --# in LexTokenManager.State; is Current : RawDict.Subcomponent_Info_Ref; Current_Record : RawDict.Subcomponent_Info_Ref; First_Subcomponent : RawDict.Subcomponent_Info_Ref; Found : Boolean := False; IsPrivateField : Boolean := False; begin Trace_Lex_Str (Msg => " In Lookup_Subcomponents, seeking ", L => Name); Current_Record := The_Subcomponent; loop First_Subcomponent := Current_Record; Current := First_Subcomponent; if not IsPrivateField then loop -- search fields at current depth of current record exit when Current = RawDict.Null_Subcomponent_Info_Ref; Found := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Get_Subcomponent_Simple_Name (The_Subcomponent => Current), Lex_Str2 => Name) = LexTokenManager.Str_Eq; exit when Found; Current := RawDict.Get_Next_Subcomponent (The_Subcomponent => Current); end loop; end if; exit when Found; -- carry success out of outer loop -- if we get here then we failed to find the desired field at the current depth -- so we move to the first inherited field (if there is one and if it not a private -- extension) and try again exit when not RawDict.Get_Record_Component_Inherited_Field (The_Record_Component => RawDict.Get_Subcomponent_Record_Component (The_Subcomponent => First_Subcomponent)); -- no more inherited fields IsPrivateField := Type_Is_Private_Here (Type_Mark => Get_Subcomponent_Type (The_Subcomponent => Current_Record), Scope => Calling_Scope); Current_Record := RawDict.Get_Subcomponent_Subcomponents (The_Subcomponent => First_Subcomponent); end loop; if not Found then Current := RawDict.Null_Subcomponent_Info_Ref; end if; return Current; end Lookup_Subcomponents; begin -- Lookup_Local_Declarations -- first part of if traps things like declaring a variable P in -- a procedure P and also handles implicit return variables in -- function proof annotations. Trace_Lex_Str (Msg => " In Lookup_Local_Declarations, seeking ", L => Name); The_Discriminant := RawDict.GetSymbolDiscriminant (Region); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => GetSimpleName (Region), Lex_Str2 => Name) = LexTokenManager.Str_Eq and then -- Trap finding of A in X.A in nested record case -- Note that this case also means that seeking PT from the private part or body of PT -- returns the type symbol not the implicit own variable of the protected type The_Discriminant /= Subcomponent_Symbol and then The_Discriminant /= Variable_Symbol and then The_Discriminant /= Subprogram_Parameter_Symbol and then -- This line added as a result of CFR 1738. It allows the rest of the procedure to be -- used in the sole situation where we are performing lookup on a member of a record -- that has the same name as its own type. (The_Discriminant /= Type_Symbol or else RawDict.Get_Type_Discriminant (Type_Mark => RawDict.Get_Type_Info_Ref (Item => Region)) /= Record_Type_Item) then Item := Region; Is_Visible := True; else case The_Discriminant is when Package_Symbol => Item := Lookup_Package_Body (Name => Name, Stop_At => Stop_At, The_Package => RawDict.Get_Package_Info_Ref (Item => Region), Context => Context, Scope_Type => Scope_Type); Is_Visible := Item /= NullSymbol; when Subprogram_Symbol => Lookup_Subprogram_Body (Name => Name, Stop_At => Stop_At, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => Region), Context => Context, Item => Item, Is_Visible => Is_Visible); when LoopSymbol => Item := LookupLoop (Name, Region); Is_Visible := Item /= NullSymbol; when Subcomponent_Symbol => Item := RawDict.Get_Subcomponent_Symbol (Lookup_Subcomponents (Name => Name, The_Subcomponent => RawDict.Get_Subcomponent_Subcomponents (The_Subcomponent => RawDict.Get_Subcomponent_Info_Ref (Item => Region)), Calling_Scope => Calling_Scope)); Is_Visible := Item /= NullSymbol; when Variable_Symbol => Item := RawDict.Get_Subcomponent_Symbol (Lookup_Subcomponents (Name => Name, The_Subcomponent => RawDict.Get_Variable_Subcomponents (The_Variable => RawDict.Get_Variable_Info_Ref (Item => Region)), Calling_Scope => Calling_Scope)); Is_Visible := Item /= NullSymbol; when Subprogram_Parameter_Symbol => Item := RawDict.Get_Subcomponent_Symbol (Lookup_Subcomponents (Name => Name, The_Subcomponent => RawDict.Get_Subprogram_Parameter_Subcomponents (The_Subprogram_Parameter => RawDict.Get_Subprogram_Parameter_Info_Ref (Item => Region)), Calling_Scope => Calling_Scope)); Is_Visible := Item /= NullSymbol; when Type_Symbol => -- must be a type (record, task or protected) Item := Lookup_Type (Name => Name, Context => Context, The_Type => RawDict.Get_Type_Info_Ref (Item => Region), Scope_Type => Scope_Type); Is_Visible := Item /= NullSymbol; when Quantified_Variable_Symbol | Implicit_Return_Variable_Symbol => Item := NullSymbol; Is_Visible := False; when others => -- non-exec code Item := NullSymbol; Is_Visible := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Lookup_Local_Declarations"); end case; end if; end Lookup_Local_Declarations; begin -- LookupScope Region := GetRegion (Scope); Trace_Lex_Str (Msg => "In LookupScope, seeking ", L => Name); Trace_Sym (Msg => " in ", Sym => Region, Scope => Scope); Trace_Lex_Str (Msg => " with Stop_At set to ", L => Stop_At); case Get_Visibility (Scope => Scope) is when Visible => Item := LookupVisibleDeclarations (Name, Region, Context); Is_Visible := Item /= NullSymbol; when Local | Privat => Lookup_Local_Declarations (Name => Name, Stop_At => Stop_At, Region => Region, Context => Context, Scope_Type => Get_Visibility (Scope => Scope), Calling_Scope => Calling_Scope, Item => Item, Is_Visible => Is_Visible); end case; if Context = ProofContext and then IsAdaFunction (Item) then Item := RawDict.Get_Subprogram_Implicit_Proof_Function (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item), Abstraction => Get_Subprogram_Abstraction (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item), Scope => Calling_Scope)); end if; Trace_Sym (Msg => "Found in LookupScope ", Sym => Item, Scope => Scope); end LookupScope; spark-2012.0.deb/examiner/sem-compunit-wf_task_body.adb0000644000175000017500000005752411753202336022027 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Task_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Next_Node : out STree.SyntaxNode) is Task_Sym : Dictionary.Symbol; Ident_Node, Anno_Node, Subprogram_Implementation_Node, End_Node, With_Node : STree.SyntaxNode; OK_To_Add, In_Subunit : Boolean; Hidden : Hidden_Class; Task_Scope : Dictionary.Scopes; Ident_Str : LexTokenManager.Lex_String; Valid_Annotation : Boolean := False; ---------------------------------------------------------------------- procedure Check_OK_To_Add (Type_Sym : in Dictionary.Symbol; In_Subunit : in Boolean; Ident_Node_Pos : in LexTokenManager.Token_Position; Ident_Str : in LexTokenManager.Lex_String; OK_To_Add : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node_Pos, --# Ident_Str, --# In_Subunit, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Type_Sym & --# OK_To_Add from Dictionary.Dict, --# In_Subunit, --# Type_Sym; is begin OK_To_Add := True; if In_Subunit then -- we require a stub but must not have a previous body if Dictionary.HasBody (Type_Sym) then OK_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 992, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); end if; if not Dictionary.HasBodyStub (Type_Sym) then OK_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 15, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); end if; else -- we must have neither stub nor previous body if Dictionary.HasBody (Type_Sym) or else Dictionary.HasBodyStub (Type_Sym) then OK_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 992, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); end if; end if; end Check_OK_To_Add; ---------------------------------------------------------------------- function Requires_Second_Annotation (Task_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Global_Var : Dictionary.Symbol; Required : Boolean; Global_Item : Dictionary.Iterator; Enclosing_Region : Dictionary.Symbol; begin Required := False; Enclosing_Region := Dictionary.GetRegion (Dictionary.GetScope (Task_Sym)); Global_Item := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Task_Sym); while Global_Item /= Dictionary.NullIterator loop Global_Var := Dictionary.CurrentSymbol (Global_Item); if Dictionary.IsRefinedOwnVariable (Global_Var) and then Dictionary.GetOwner (Global_Var) = Enclosing_Region then Required := True; exit; end if; Global_Item := Dictionary.NextSymbol (Global_Item); end loop; return Required; end Requires_Second_Annotation; ---------------------------------------------------------------------- function Empty_Annotation (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_annotation; is Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Node); -- ASSUME Current_Node = moded_global_definition OR dependency_relation OR declare_annotation OR procedure_constraint if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_constraint then Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = precondition OR postcondition OR NULL SystemErrors.RT_Assert (C => Current_Node = STree.NullNode or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.precondition or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.postcondition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = precondition OR postcondition OR NULL in Empty_Annotation"); elsif Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.moded_global_definition and then Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.dependency_relation and then Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.declare_annotation then Current_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = moded_global_definition OR dependency_relation OR " & "declare_annotation OR procedure_constraint in Empty_Annotation"); end if; return Current_Node = STree.NullNode; end Empty_Annotation; ---------------------------------------------------------------------- procedure Process_Annotation (Anno_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Task_Sym : in Dictionary.Symbol; Valid_Annotation : in out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# Anno_Node, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Task_Sym, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Anno_Node, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Task_Sym, --# TheHeap & --# Valid_Annotation from *, --# Anno_Node, --# STree.Table; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation; --# post STree.Table = STree.Table~; is Current_Node : STree.SyntaxNode; procedure Raise_Error (Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys; is begin ErrorHandler.Semantic_Error (Err_Num => 990, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end Raise_Error; begin -- Process_Annotation Current_Node := Child_Node (Current_Node => Anno_Node); -- ASSUME Current_Node = moded_global_definition OR dependency_relation OR declare_annotation OR procedure_constraint -- to be legal, Current_Node must be a moded_global_definition if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.moded_global_definition then -- ASSUME Current_Node = moded_global_definition Current_Node := Last_Sibling_Of (Start_Node => Current_Node); -- ASSUME Current_Node = procedure_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = procedure_constraint in Process_Annotation"); if Child_Node (Current_Node => Current_Node) = STree.NullNode then -- ASSUME Child_Node (Current_Node => Current_Node) = NULL Valid_Annotation := True; Wf_Subprogram_Annotation (Node => Anno_Node, Current_Scope => Scope, Subprog_Sym => Task_Sym, First_Seen => False, The_Heap => TheHeap); elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Current_Node)) = SP_Symbols.precondition or else Syntax_Node_Type (Node => Child_Node (Current_Node => Current_Node)) = SP_Symbols.postcondition then -- ASSUME Child_Node (Current_Node => Current_Node) = precondition OR postcondition Raise_Error (Node_Pos => Node_Position (Node => Current_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Current_Node) = precondition OR postcondition OR " & "NULL in Process_Annotation"); end if; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dependency_relation or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.declare_annotation or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_constraint then Raise_Error (Node_Pos => Node_Position (Node => Current_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = moded_global_definition OR dependency_relation OR " & "declare_annotation OR procedure_constraint in Process_Annotation"); end if; end Process_Annotation; begin -- Wf_Task_Body -- set up default "pruning" of tree walk in case errors found below Next_Node := STree.NullNode; Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Task_Body"); Ident_Str := Node_Lex_String (Node => Ident_Node); Task_Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- Check that Task_Sym is an task type declared in the spec. Since we are looking up an identifier -- not a full, dotted name we can't find any other entry by mistake so a simple check is all that -- is needed. if Dictionary.IsTaskType (Task_Sym) then Anno_Node := Next_Sibling (Current_Node => Ident_Node); -- ASSUME Anno_Node = procedure_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.procedure_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = procedure_annotation in Wf_Task_Body"); Subprogram_Implementation_Node := Next_Sibling (Current_Node => Anno_Node); -- ASSUME Subprogram_Implementation_Node = subprogram_implementation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Subprogram_Implementation_Node) = SP_Symbols.subprogram_implementation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprogram_Implementation_Node = subprogram_implementation in Wf_Task_Body"); End_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Subprogram_Implementation_Node)); -- ASSUME End_Node = designator OR hidden_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => End_Node) = SP_Symbols.designator or else Syntax_Node_Type (Node => End_Node) = SP_Symbols.hidden_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Node = designator OR hidden_part in Wf_Task_Body"); Hidden := Body_Hidden_Class (Node => Subprogram_Implementation_Node); Task_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Task_Sym); -- see if we are a subunit or an ordinary in-line declaration With_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Node)); -- ASSUME With_Node = subunit OR abody if Syntax_Node_Type (Node => With_Node) = SP_Symbols.abody then -- ASSUME With_Node = abody In_Subunit := False; elsif Syntax_Node_Type (Node => With_Node) = SP_Symbols.subunit then -- ASSUME With_Node = subunit In_Subunit := True; With_Node := Child_Node (Current_Node => Child_Node (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => With_Node)))); -- ASSUME With_Node = subunit OR with_clause if Syntax_Node_Type (Node => With_Node) = SP_Symbols.with_clause then With_Node := Parent_Node (Current_Node => With_Node); -- ASSUME With_Node = context_clause SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => With_Node) = SP_Symbols.context_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = context_clause in Wf_Task_Body"); Wf_Context_Clause (Node => With_Node, Comp_Sym => Task_Sym, Scope => Task_Scope); elsif Syntax_Node_Type (Node => With_Node) /= SP_Symbols.subunit then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = subunit OR with_clause in Wf_Task_Body"); end if; else In_Subunit := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = subunit OR abody in Wf_Task_Body"); end if; -- The task is valid so far, it may be hidden or it may have a real sequence of statements -- see if a body has already been declared etc. Check_OK_To_Add (Type_Sym => Task_Sym, In_Subunit => In_Subunit, Ident_Node_Pos => Node_Position (Node => Ident_Node), Ident_Str => Ident_Str, OK_To_Add => OK_To_Add); if OK_To_Add then case Hidden is when All_Hidden => Dictionary.AddBody (CompilationUnit => Task_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Hidden => True); ErrorHandler.Hidden_Text (Position => Node_Position (Node => End_Node), Unit_Str => Ident_Str, Unit_Typ => SP_Symbols.subprogram_implementation); when Not_Hidden => Dictionary.AddBody (CompilationUnit => Task_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Hidden => False); when Handler_Hidden => Dictionary.AddBody (CompilationUnit => Task_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Hidden => False); ErrorHandler.Hidden_Handler (Position => Node_Position (Node => End_Node), Unit_Str => Ident_Str, Unit_Typ => SP_Symbols.task_body); end case; -- check annotation if In_Subunit then -- no anno expected if not Empty_Annotation (Node => Anno_Node) then ErrorHandler.Semantic_Error (Err_Num => 155, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Anno_Node), Id_Str => Ident_Str); else STree.Set_Node_Lex_String (Sym => Task_Sym, Node => Ident_Node); end if; else -- not in subunit, anno may be needed if Requires_Second_Annotation (Task_Sym => Task_Sym) then if Empty_Annotation (Node => Anno_Node) then ErrorHandler.Semantic_Error (Err_Num => 154, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); else -- anno present and required STree.Set_Node_Lex_String (Sym => Task_Sym, Node => Ident_Node); Process_Annotation (Anno_Node => Anno_Node, Scope => Scope, Task_Sym => Task_Sym, Valid_Annotation => Valid_Annotation); end if; else -- second anno not required if Empty_Annotation (Node => Anno_Node) then STree.Set_Node_Lex_String (Sym => Task_Sym, Node => Ident_Node); else ErrorHandler.Semantic_Error (Err_Num => 155, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Anno_Node), Id_Str => Ident_Str); end if; end if; end if; -- set up scope for rest of tree walk Scope := Task_Scope; -- set up next node for rest of tree walk Next_Node := Subprogram_Implementation_Node; end if; -- Synthesise all from all dependency if necessary -- (The checks for whether there was a derives on the spec and not the body or vice versa -- are performed in wf_procedure_annotation, called earlier.) -- As this is a task body then this must be a refined annotation - there's no other reason to have -- an annotation on a task body. if Valid_Annotation and then Needs_Synthetic_Dependency (Proc_Task_Or_Entry => Task_Sym) then Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Position (Node => Node), Subprog_Sym => Task_Sym, Abstraction => Dictionary.IsRefined, The_Heap => TheHeap); end if; -- Check closing identifier if present (i.e. not hidden) if Syntax_Node_Type (Node => End_Node) = SP_Symbols.designator and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Ident_Node), Lex_Str2 => Node_Lex_String (Node => Child_Node (Current_Node => End_Node))) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => End_Node), Id_Str => Node_Lex_String (Node => Ident_Node)); end if; else -- not a valid Task ErrorHandler.Semantic_Error (Err_Num => 991, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; end Wf_Task_Body; spark-2012.0.deb/examiner/sem-compunit-wf_generic_declaration.adb0000644000175000017500000003555011753202336024024 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Generic_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes) is Current_Node : STree.SyntaxNode; procedure Process_Generic_Subprogram_Declaration (Node : in STree.SyntaxNode; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.generic_subprogram_declaration; --# post STree.Table = STree.Table~; is Context_Node : STree.SyntaxNode; Inherit_Node : STree.SyntaxNode; Ident_Node : STree.SyntaxNode; Generic_Formal_Part_Node : STree.SyntaxNode; Subprogram_Declaration_Node : STree.SyntaxNode; Generic_Unit, Subprog_Sym : Dictionary.Symbol; procedure Check_And_Add_Generic_Unit (Node : in STree.SyntaxNode; Ident_Node : in STree.SyntaxNode; Generic_Unit : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict, --# Generic_Unit from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_declaration and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; is begin if Dictionary.Is_Null_Symbol (Dictionary.LookupItem (Name => Node_Lex_String (Node => Ident_Node), Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext, Full_Package_Name => False)) then Dictionary.Add_Generic_Unit (Kind => Dictionary.Generic_Of_Subprogram, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Dictionary.GlobalScope, Generic_Unit => Generic_Unit); else Generic_Unit := Dictionary.NullSymbol; ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Node_Lex_String (Node => Ident_Node)); end if; end Check_And_Add_Generic_Unit; begin -- Process_Generic_Subprogram_Declaration Context_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Node)); -- ASSUME Context_Node = library_unit SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Context_Node) = SP_Symbols.library_unit, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Context_Node = library_unit in Process_Generic_Subprogram_Declaration"); Context_Node := Child_Node (Current_Node => Parent_Node (Current_Node => Context_Node)); -- ASSUME Context_Node = context_clause OR library_unit if Syntax_Node_Type (Node => Context_Node) = SP_Symbols.library_unit then -- ASSUME Context_Node = library_unit Context_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Context_Node) /= SP_Symbols.context_clause then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Context_Node = context_clause OR library_unit in Process_Generic_Subprogram_Declaration"); end if; Inherit_Node := Child_Node (Current_Node => Node); -- ASSUME Inherit_Node = inherit_clause OR generic_formal_part if Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.inherit_clause then -- ASSUME Inherit_Node = inherit_clause Generic_Formal_Part_Node := Next_Sibling (Current_Node => Inherit_Node); elsif Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.generic_formal_part then -- ASSUME Inherit_Node = generic_formal_part Generic_Formal_Part_Node := Inherit_Node; Inherit_Node := STree.NullNode; else Generic_Formal_Part_Node := STree.NullNode; Inherit_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Node = inherit_clause OR generic_formal_part in Process_Generic_Subprogram_Declaration"); end if; -- ASSUME Generic_Formal_Part_Node = generic_formal_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Generic_Formal_Part_Node) = SP_Symbols.generic_formal_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Generic_Formal_Part_Node = generic_formal_part in Process_Generic_Subprogram_Declaration"); Subprogram_Declaration_Node := Next_Sibling (Current_Node => Generic_Formal_Part_Node); -- ASSUME Subprogram_Declaration_Node = not_overriding_subprogram_declaration SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Subprogram_Declaration_Node) = SP_Symbols.not_overriding_subprogram_declaration, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprogram_Declaration_Node = not_overriding_subprogram_declaration " & "in Process_Generic_Subprogram_Declaration"); Ident_Node := Child_Node (Current_Node => Subprogram_Declaration_Node); -- ASSUME Ident_Node = function_specification OR procedure_specification OR proof_function_declaration if Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.proof_function_declaration then -- ASSUME Ident_Node = proof_function_declaration Ident_Node := Child_Node (Current_Node => Ident_Node); -- ASSUME Ident_Node = function_specification elsif Syntax_Node_Type (Node => Ident_Node) /= SP_Symbols.function_specification and then Syntax_Node_Type (Node => Ident_Node) /= SP_Symbols.procedure_specification then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = function_specification OR procedure_specification OR " & "proof_function_declaration in Process_Generic_Subprogram_Declaration"); end if; -- ASSUME Ident_Node = function_specification OR procedure_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.function_specification or else Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.procedure_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect ident_Node = function_specification OR procedure_specification " & "in Process_Generic_Subprogram_Declaration"); -- find identifier Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Ident_Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect ident_Node = identifier in Process_Generic_Subprogram_Declaration"); Check_And_Add_Generic_Unit (Node => Subprogram_Declaration_Node, Ident_Node => Ident_Node, Generic_Unit => Generic_Unit); if not Dictionary.Is_Null_Symbol (Generic_Unit) then Wf_Subprogram_Declaration (Node => Subprogram_Declaration_Node, Inherit_Node => Inherit_Node, Context_Node => Context_Node, Generic_Formal_Part_Node => Generic_Formal_Part_Node, Current_Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Generic_Unit), Generic_Unit => Generic_Unit, Component_Data => Component_Data, The_Heap => TheHeap, Subprog_Sym => Subprog_Sym); if not Dictionary.Is_Null_Symbol (Subprog_Sym) and then Dictionary.FirstGenericFormalParameter (TheGeneric => Subprog_Sym) = Dictionary.NullIterator then ErrorHandler.Semantic_Error (Err_Num => 629, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Subprogram_Declaration_Node), Id_Str => Node_Lex_String (Node => Ident_Node)); end if; end if; end Process_Generic_Subprogram_Declaration; begin -- Wf_Generic_Declaration case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => -- not allowed in SPARK 83 ErrorHandler.Semantic_Error (Err_Num => 637, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when CommandLineData.SPARK95_Onwards => -- ASSUME Node = generic_declaration Current_Node := Child_Node (Current_Node => Node); -- ASSUME Current_Node = generic_subprogram_declaration OR generic_package_declaration if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.generic_subprogram_declaration then -- ASSUME Current_Node = generic_subprogram_declaration Process_Generic_Subprogram_Declaration (Node => Current_Node, Component_Data => GlobalComponentData); elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.generic_package_declaration then -- ASSUME Current_Node = generic_package_declaration ErrorHandler.Semantic_Warning (Err_Num => 415, Position => Node_Position (Node => Current_Node), Id_Str => LexTokenManager.Null_String); Wf_Package_Declaration (Node => Current_Node, Current_Scope => Current_Scope, Component_Data => GlobalComponentData, The_Heap => TheHeap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = generic_subprogram_declaration OR generic_package_declaration OR " & "in Wf_Generic_Declaration"); end if; end case; end Wf_Generic_Declaration; spark-2012.0.deb/examiner/sem-indexes_match.adb0000644000175000017500000000620711753202336020323 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem) function Indexes_Match (Target, Source : Dictionary.Symbol) return Boolean is Tgt_It, Src_It : Dictionary.Iterator; Ok : Boolean; function Bounds_Match (Src_Sym, Tgt_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is function Upper_Bound_Matches (Src_Sym, Tgt_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Src_Sym), Lex_Str2 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Tgt_Sym)) = LexTokenManager.Str_Eq; end Upper_Bound_Matches; -------------------- function Lower_Bound_Matches (Src_Sym, Tgt_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Src_Sym), Lex_Str2 => Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Tgt_Sym)) = LexTokenManager.Str_Eq; end Lower_Bound_Matches; begin -- Bounds_Match return Upper_Bound_Matches (Src_Sym => Src_Sym, Tgt_Sym => Tgt_Sym) and then Lower_Bound_Matches (Src_Sym => Src_Sym, Tgt_Sym => Tgt_Sym); end Bounds_Match; begin -- Indexes_Match Ok := True; Tgt_It := Dictionary.FirstArrayIndex (Target); Src_It := Dictionary.FirstArrayIndex (Source); while not Dictionary.IsNullIterator (Tgt_It) loop if not Bounds_Match (Src_Sym => Dictionary.CurrentSymbol (Src_It), Tgt_Sym => Dictionary.CurrentSymbol (Tgt_It)) then Ok := False; exit; end if; Tgt_It := Dictionary.NextSymbol (Tgt_It); Src_It := Dictionary.NextSymbol (Src_It); end loop; return Ok; end Indexes_Match; spark-2012.0.deb/examiner/sli-io.ads0000644000175000017500000000526411753202336016145 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; -- Manage the IO for the SLI file. --# inherit E_Strings; private package SLI.IO --# own out Stream_Buffer; is type File_Status is (Ok, Status_Error, Name_Error, Use_Error, Device_Error); -- Create a SLI file called Name_Of_File.SLI. procedure Create_File (Name_Of_File : in E_Strings.T; Status : out File_Status); --# global out Stream_Buffer; --# derives Status, --# Stream_Buffer from Name_Of_File; -- Close the SLI file. procedure Close; --# global out Stream_Buffer; --# derives Stream_Buffer from ; -- Write a character (Item) in the SLI file. procedure Put_Char (Item : in Character); --# global out Stream_Buffer; --# derives Stream_Buffer from Item; -- Write a string (Item) in the SLI file. procedure Put_String (Item : in String); --# global out Stream_Buffer; --# derives Stream_Buffer from Item; -- Write an integer (Item) in the SLI file. procedure Put_Integer (Item : in Integer); --# global out Stream_Buffer; --# derives Stream_Buffer from Item; -- Write a new line in the SLI file. procedure New_Line; --# global out Stream_Buffer; --# derives Stream_Buffer from ; -- Write a line (Item) in the SLI file. procedure Put_Line (Item : in String); --# global out Stream_Buffer; --# derives Stream_Buffer from Item; -- Write a string (EStr) in the SLI file. procedure E_Strings_Put_String (E_Str : in E_Strings.T); --# global out Stream_Buffer; --# derives Stream_Buffer from E_Str; -- Write a line (EStr) in the SLI file. procedure E_Strings_Put_Line (E_Str : in E_Strings.T); --# global out Stream_Buffer; --# derives Stream_Buffer from E_Str; end SLI.IO; spark-2012.0.deb/examiner/flowanalyser-flowanalyse-analyserelations-checkunused.adb0000644000175000017500000000660311753202336027630 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (FlowAnalyser.FlowAnalyse.AnalyseRelations) procedure CheckUnused is It : Dictionary.Iterator; Sym : Dictionary.Symbol; -- returns true if Sym is the transitive parent of something in S function ComponentsPresent (S : SeqAlgebra.Seq; Sym : Dictionary.Symbol) return Boolean --# global in ComponentData; --# in TheHeap; is ParentComponent : ComponentManager.Component; Found : Boolean := False; CurrentMember : SeqAlgebra.MemberOfSeq; begin ParentComponent := ComponentManager.GetComponentNode (ComponentData, Sym); CurrentMember := SeqAlgebra.FirstMember (TheHeap, S); while not SeqAlgebra.IsNullMember (CurrentMember) loop if ComponentManager.IsTransitiveParent (ComponentData, ParentComponent, ComponentManager.GetComponentNode (ComponentData, Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => CurrentMember))))) then Found := True; exit; end if; CurrentMember := SeqAlgebra.NextMember (TheHeap, CurrentMember); end loop; return Found; end ComponentsPresent; begin -- CheckUnused if IsSubprogram (SubprogSym) then It := Dictionary.First_Local_Variable (Subprogram => SubprogSym); while not Dictionary.IsNullIterator (It) loop Sym := Dictionary.CurrentSymbol (It); if Dictionary.TypeIsRecord (Dictionary.GetType (Sym)) then if not ComponentsPresent (IFA_Stack.Top (S).AllVars, Sym) then ErrorHandler.Semantic_Warning (Err_Num => 400, Position => EndPosition, Id_Str => Dictionary.GetSimpleName (Sym)); end if; else --not a record if not SeqAlgebra.IsMember (TheHeap, IFA_Stack.Top (S).AllVars, Natural (Dictionary.SymbolRef (Sym))) then ErrorHandler.Semantic_Warning (Err_Num => 400, Position => EndPosition, Id_Str => Dictionary.GetSimpleName (Sym)); end if; end if; It := Dictionary.NextSymbol (It); end loop; end if; end CheckUnused; spark-2012.0.deb/examiner/sem-wf_global_definition.adb0000644000175000017500000021542211753202336021655 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem) procedure Wf_Global_Definition (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Sem_Err_Found : out Boolean) is Abstraction : Dictionary.Abstractions; Doing_Function : Boolean; ---------------------------------------------------------------------- -- Handle Special case for stream variables here: -- Error occurs if the subprogram references a global which is a mode IN refinement -- constituent whose abstract subject is unmoded. This condition is illegal for -- a function subprogram (because it would conceal the stream side-efefct in the -- abstract view). For a procedure, we are protected because derives refinement -- checks will always reveal the error; however, we can improve error reporting -- by putting something out here as well procedure Check_Stream_Variable_Refinement (Subprog_Sym : in Dictionary.Symbol; Err_Pos : in LexTokenManager.Token_Position; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Pos, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Sem_Err_Found from *, --# Dictionary.Dict, --# Subprog_Sym; is Refined_Global : Dictionary.Symbol; Abstract_Global : Dictionary.Symbol; Refined_Glob_List : Dictionary.Iterator; function Is_Local_Constituent (Sym : Dictionary.Symbol; Subprog_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsConstituent (Sym) and then Dictionary.IsPackage (Dictionary.GetOwner (Sym)) and then Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetEnclosingPackage (Dictionary.GetScope (Subprog_Sym)), Right_Symbol => Dictionary.GetOwner (Sym)); end Is_Local_Constituent; begin -- Check_Stream_Variable_Refinement Refined_Glob_List := Dictionary.FirstGlobalVariable (Dictionary.IsRefined, Subprog_Sym); while not Dictionary.IsNullIterator (Refined_Glob_List) loop Refined_Global := Dictionary.CurrentSymbol (Refined_Glob_List); if Is_Local_Constituent (Sym => Refined_Global, Subprog_Sym => Subprog_Sym) then Abstract_Global := Dictionary.GetSubject (Refined_Global); if Dictionary.GetOwnVariableOrConstituentMode (Refined_Global) = Dictionary.InMode and then -- Abstract has no mode Dictionary.GetOwnVariableOrConstituentMode (Abstract_Global) = Dictionary.DefaultMode and then -- Abstract is not protected not (Dictionary.IsOwnVariable (Abstract_Global) and then Dictionary.GetOwnVariableProtected (Abstract_Global)) then -- an error condition may exist --# accept Flow, 41, "Expected stable expression"; if Dictionary.IsAdaFunction (Subprog_Sym) then --# end accept; -- an error condition does exist ErrorHandler.Semantic_Error2 (Err_Num => 721, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Id_Str1 => Dictionary.GetSimpleName (Refined_Global), Id_Str2 => Dictionary.GetSimpleName (Abstract_Global)); Sem_Err_Found := True; else -- must be procedure -- extra checks here - for there to be an error the -- abstract own variable must be have global mode -- IN and the refinement constituent as well if Dictionary.GetGlobalMode (Dictionary.IsAbstract, Subprog_Sym, Abstract_Global) = Dictionary.InMode and then Dictionary.GetGlobalMode (Dictionary.IsRefined, Subprog_Sym, Refined_Global) = Dictionary.InMode then ErrorHandler.Semantic_Error2 (Err_Num => 722, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Id_Str1 => Dictionary.GetSimpleName (Refined_Global), Id_Str2 => Dictionary.GetSimpleName (Abstract_Global)); Sem_Err_Found := True; end if; end if; end if; end if; Refined_Glob_List := Dictionary.NextSymbol (Refined_Glob_List); end loop; end Check_Stream_Variable_Refinement; ---------------------------------------------------------------------- function Type_Two_Error_Position (Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.moded_global_definition; is Temp_Node : STree.SyntaxNode; begin Temp_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Temp_Node = global_definition_rep OR global_variable_clause if Syntax_Node_Type (Node => Temp_Node) = SP_Symbols.global_definition_rep then -- ASSUME Temp_Node = global_definition_rep Temp_Node := Next_Sibling (Current_Node => Temp_Node); elsif Syntax_Node_Type (Node => Temp_Node) /= SP_Symbols.global_variable_clause then Temp_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Temp_Node = global_definition_rep OR global_variable_clause in Type_Two_Error_Position"); end if; -- ASSUME Temp_Node = global_variable_clause SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Temp_Node) = SP_Symbols.global_variable_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Temp_Node = global_variable_clause in Type_Two_Error_Position"); Temp_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Temp_Node))); -- ASSUME Temp_Node = global_variable_list OR global_variable if Syntax_Node_Type (Node => Temp_Node) = SP_Symbols.global_variable_list then -- ASSUME Temp_Node = global_variable_list Temp_Node := Next_Sibling (Current_Node => Temp_Node); elsif Syntax_Node_Type (Node => Temp_Node) /= SP_Symbols.global_variable then Temp_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Temp_Node = global_variable_list OR global_variable in Type_Two_Error_Position"); end if; -- Temp_Node is right-most global variable -- ASSUME Temp_Node = global_variable SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Temp_Node) = SP_Symbols.global_variable, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Temp_Node = global_variable in Type_Two_Error_Position"); return Node_Position (Node => Temp_Node); end Type_Two_Error_Position; ---------------------------------------------------------------------- procedure Check_On_Global_Refinement_2 (Subprog_Sym : in Dictionary.Symbol; Err_Pos : in LexTokenManager.Token_Position; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Pos, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Sem_Err_Found from *, --# Dictionary.Dict, --# Subprog_Sym; is A_Glo_Var : Dictionary.Symbol; First_Glob_List : Dictionary.Iterator; -- lifted out from loop in enclosing procedure below procedure Look_For_Refinement_Constituent (Subprog_Sym : in Dictionary.Symbol; A_Glo_Var : in Dictionary.Symbol; Err_Pos : in LexTokenManager.Token_Position; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from A_Glo_Var, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Pos, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Sem_Err_Found from *, --# A_Glo_Var, --# Dictionary.Dict, --# Subprog_Sym; is Found : Boolean; Second_Glob_List : Dictionary.Iterator; begin Found := False; Second_Glob_List := Dictionary.FirstGlobalVariable (Dictionary.IsRefined, Subprog_Sym); loop exit when Dictionary.IsNullIterator (Second_Glob_List); Found := Dictionary.IsRefinement (A_Glo_Var, Dictionary.CurrentSymbol (Second_Glob_List)); exit when Found; Second_Glob_List := Dictionary.NextSymbol (Second_Glob_List); end loop; if not Found then ErrorHandler.Semantic_Error_Sym (Err_Num => 86, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Sym => A_Glo_Var, Scope => Dictionary.GetScope (Subprog_Sym)); Sem_Err_Found := True; end if; end Look_For_Refinement_Constituent; -- new procedure similar to above but for unrefined global items procedure Look_For_Self (Subprog_Sym : in Dictionary.Symbol; A_Glo_Var : in Dictionary.Symbol; Err_Pos : in LexTokenManager.Token_Position; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from A_Glo_Var, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Pos, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Sem_Err_Found from *, --# A_Glo_Var, --# Dictionary.Dict, --# Subprog_Sym; is Found : Boolean; Second_Glob_List : Dictionary.Iterator; begin Found := False; Second_Glob_List := Dictionary.FirstGlobalVariable (Dictionary.IsRefined, Subprog_Sym); loop exit when Dictionary.IsNullIterator (Second_Glob_List); Found := A_Glo_Var = Dictionary.CurrentSymbol (Second_Glob_List); exit when Found; Second_Glob_List := Dictionary.NextSymbol (Second_Glob_List); end loop; if not Found and then not Dictionary.Is_Null_Variable (A_Glo_Var) then -- no refinement of "null" needed ErrorHandler.Semantic_Error_Sym (Err_Num => 723, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Sym => A_Glo_Var, Scope => Dictionary.GetScope (Subprog_Sym)); Sem_Err_Found := True; end if; end Look_For_Self; function Constituent_Required (The_Global, Subprogram : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; Region : Dictionary.Symbol; begin -- We expect a refinement sonstituent of The_Global if: -- The_Global is an abstract own variable and -- (its owner is the package in which Subprogram is declared or -- the Subprogram is declared in a protected type and the owner -- of The_Global is the package in which the protected type is declared) if Dictionary.IsRefinedOwnVariable (The_Global) then Result := Is_Enclosing_Package (Outer_Pack => Dictionary.GetOwner (The_Global), Scope => Dictionary.GetScope (Subprogram)); if not Result then Region := Dictionary.GetRegion (Dictionary.GetScope (Subprogram)); if Dictionary.IsProtectedType (Region) then Result := Is_Enclosing_Package (Outer_Pack => Dictionary.GetOwner (The_Global), Scope => Dictionary.GetScope (Region)); end if; end if; end if; return Result; end Constituent_Required; begin -- Check_On_Global_Refinement_2 First_Glob_List := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Subprog_Sym); while not Dictionary.IsNullIterator (First_Glob_List) loop A_Glo_Var := Dictionary.CurrentSymbol (First_Glob_List); if Constituent_Required (The_Global => A_Glo_Var, Subprogram => Subprog_Sym) then Look_For_Refinement_Constituent (Subprog_Sym => Subprog_Sym, A_Glo_Var => A_Glo_Var, Err_Pos => Err_Pos, Sem_Err_Found => Sem_Err_Found); else Look_For_Self (Subprog_Sym => Subprog_Sym, A_Glo_Var => A_Glo_Var, Err_Pos => Err_Pos, Sem_Err_Found => Sem_Err_Found); end if; First_Glob_List := Dictionary.NextSymbol (First_Glob_List); end loop; end Check_On_Global_Refinement_2; ---------------------------------------------------------------------- procedure Wf_Moded_Global_Definition (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Abstraction : in Dictionary.Abstractions; Doing_Function : in Boolean; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Doing_Function, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym & --# Sem_Err_Found from *, --# Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Doing_Function, --# First_Seen, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.moded_global_definition; --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node, Dependency_Node : STree.SyntaxNode; Mode : Dictionary.Modes; function Get_Mode (Node : STree.SyntaxNode) return Dictionary.Modes --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable_clause; is Mode_Node : STree.SyntaxNode; Result : Dictionary.Modes; begin Mode_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Mode_Node = in_mode OR inout_mode OR out_mode OR NULL if Mode_Node = STree.NullNode then -- ASSUME Mode_Node = NULL Result := Dictionary.DefaultMode; else -- ASSUME Mode_Node = in_mode OR inout_mode OR out_mode case Syntax_Node_Type (Node => Mode_Node) is when SP_Symbols.in_mode => -- ASSUME Mode_Node = in_mode Result := Dictionary.InMode; when SP_Symbols.inout_mode => -- ASSUME Mode_Node = inout_mode Result := Dictionary.InOutMode; when SP_Symbols.out_mode => -- ASSUME Mode_Node = out_mode Result := Dictionary.OutMode; when others => Result := Dictionary.DefaultMode; -- cannot occur SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Mode_Node = in_mode OR inout_mode OR out_mode OR NULL in Get_Mode"); end case; end if; return Result; end Get_Mode; -------------------------------------------------- procedure Process_Global_List (Node : in STree.SyntaxNode; Mode : in Dictionary.Modes; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Abstraction : in Dictionary.Abstractions; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Mode, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Mode, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym & --# Sem_Err_Found from *, --# Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Mode, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable_list; --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node : STree.SyntaxNode; ---------------------------------------------------------------------- procedure Process_One_Global (Node : in STree.SyntaxNode; Mode : in Dictionary.Modes; Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Seen : in Boolean; Abstraction : in Dictionary.Abstractions; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Mode, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Mode, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Sym & --# Sem_Err_Found from *, --# Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# First_Seen, --# LexTokenManager.State, --# Mode, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym & --# SPARK_IO.File_Sys from *, --# Abstraction, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Seen, --# LexTokenManager.State, --# Mode, --# Node, --# Scope, --# STree.Table, --# Subprog_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable; --# post STree.Table = STree.Table~; is Dotted : Boolean; Sym : Dictionary.Symbol; Global_Variable_Sym : Dictionary.Symbol; Param_Mode : Dictionary.Modes; Entire_Variable_Node : STree.SyntaxNode; ---------------------------------------------------------------------- procedure Global_Not_Refined (Global_Sym : in out Dictionary.Symbol; Error_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# Global_Sym, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Global_Sym, --# Sem_Err_Found from *, --# Dictionary.Dict, --# Global_Sym, --# Subprog_Sym; is begin if Dictionary.IsRefinedOwnVariable (Global_Sym) and then Is_Enclosing_Package (Outer_Pack => Dictionary.GetOwner (Global_Sym), Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 314, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Global_Sym)); Global_Sym := Dictionary.NullSymbol; Sem_Err_Found := True; end if; end Global_Not_Refined; ---------------------------------------------------------------------- procedure Check_Stream_Mode_Consistency (Global_Sym : in out Dictionary.Symbol; Mode : in Dictionary.Modes; Error_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# Global_Sym, --# LexTokenManager.State, --# Mode, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Global_Sym, --# Sem_Err_Found from *, --# Dictionary.Dict, --# Global_Sym, --# Mode, --# Subprog_Sym; is Possible_Stream_Mode : Dictionary.Modes; begin Possible_Stream_Mode := Dictionary.GetOwnVariableOrConstituentMode (Global_Sym); -- Possible_Stream_Mode contains DefaultMode unless the global is a stream -- variable or a stream constituent in which case it will be either InMode -- or OutMode (it can never be InOutMode because of earlier wffs). -- checks are only required if the mode is something other than DefaultMode if Possible_Stream_Mode /= Dictionary.DefaultMode then if Dictionary.IsFunction (Subprog_Sym) then -- required check is that global has default mode or mode in if Possible_Stream_Mode = Dictionary.OutMode then ErrorHandler.Semantic_Error (Err_Num => 709, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Global_Sym)); Global_Sym := Dictionary.NullSymbol; Sem_Err_Found := True; end if; else -- handle procedure -- required check is that a moded own variable must have -- the same mode as the global mode if one is given. -- Also, global mode in out cannot be used if the own -- variable is moded. if Mode = Dictionary.InMode and then Possible_Stream_Mode /= Mode then ErrorHandler.Semantic_Error (Err_Num => 711, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Global_Sym)); Global_Sym := Dictionary.NullSymbol; Sem_Err_Found := True; elsif Mode = Dictionary.OutMode and then Possible_Stream_Mode /= Mode then ErrorHandler.Semantic_Error (Err_Num => 710, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Global_Sym)); Global_Sym := Dictionary.NullSymbol; Sem_Err_Found := True; elsif Mode = Dictionary.InOutMode then ErrorHandler.Semantic_Error (Err_Num => 712, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Global_Sym)); Global_Sym := Dictionary.NullSymbol; Sem_Err_Found := True; end if; end if; end if; end Check_Stream_Mode_Consistency; ---------------------------------------------------------------------- procedure Unique_Global (Global_Sym : in out Dictionary.Symbol; Dotted : in Boolean; Error_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Abstraction : in Dictionary.Abstractions; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Abstraction, --# CommandLineData.Content, --# Dictionary.Dict, --# Dotted, --# ErrorHandler.Error_Context, --# Error_Pos, --# Global_Sym, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Global_Sym, --# Sem_Err_Found from *, --# Abstraction, --# Dictionary.Dict, --# Dotted, --# Global_Sym, --# LexTokenManager.State, --# Subprog_Sym; is It : Dictionary.Iterator; Global_Str : LexTokenManager.Lex_String; begin Global_Str := Dictionary.GetSimpleName (Global_Sym); if not Dotted and then Dictionary.Is_Subprogram (Subprog_Sym) then -- task types have no parameters so skip search It := Dictionary.FirstSubprogramParameter (Subprog_Sym); while It /= Dictionary.NullIterator loop if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Global_Str, Lex_Str2 => Dictionary.GetSimpleName (Dictionary.CurrentSymbol (It))) = LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 158, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Global_Str); Global_Sym := Dictionary.NullSymbol; Sem_Err_Found := True; exit; end if; It := Dictionary.NextSymbol (It); end loop; end if; if not Dictionary.Is_Null_Symbol (Global_Sym) then It := Dictionary.FirstGlobalVariable (Abstraction, Subprog_Sym); while It /= Dictionary.NullIterator loop if Global_Sym = Dictionary.CurrentSymbol (It) then ErrorHandler.Semantic_Error (Err_Num => 157, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Global_Str); Global_Sym := Dictionary.NullSymbol; Sem_Err_Found := True; exit; end if; It := Dictionary.NextSymbol (It); end loop; end if; end Unique_Global; ----------------------------------------------------------------------- procedure Check_Import_Init (Import_Node_Pos : in LexTokenManager.Token_Position; Import_Sym, Subprog_Sym : in Dictionary.Symbol; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Import_Node_Pos, --# Import_Sym, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym & --# Sem_Err_Found from *, --# Dictionary.Dict, --# Import_Sym, --# Subprog_Sym; is begin if Dictionary.IsMainProgram (Subprog_Sym) and then Dictionary.IsFunction (Subprog_Sym) and then not Dictionary.OwnVariableIsInitialized (Import_Sym) and then Dictionary.GetOwnVariableOrConstituentMode (Import_Sym) = Dictionary.DefaultMode then Sem_Err_Found := True; ErrorHandler.Semantic_Error (Err_Num => 167, Reference => ErrorHandler.No_Reference, Position => Import_Node_Pos, Id_Str => Dictionary.GetSimpleName (Import_Sym)); end if; end Check_Import_Init; ---------------------------------------------------------------------- procedure Check_On_Global_Refinement_1 (Subprog_Sym : in Dictionary.Symbol; Var_Sym : in Dictionary.Symbol; Err_Pos : in LexTokenManager.Token_Position; Sem_Err_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Pos, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Subprog_Sym, --# Var_Sym & --# Sem_Err_Found from *, --# Dictionary.Dict, --# Subprog_Sym, --# Var_Sym; is Subject_Sym : Dictionary.Symbol; Found : Boolean; It : Dictionary.Iterator; -- GAA duplicated code from SEM-DEPENDENCY_RELATION-CHECK_DERIVES_CONSISTENCY.ADB function Valid_Refinement (Constituent, Subprogram : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Owner, Region, Enclosing_Region : Dictionary.Symbol; Result : Boolean := False; begin if Dictionary.IsConstituent (Constituent) then -- We regard Constituent as a Valid_Refinement if: -- It is a refinement constituent of Subject AND -- (Subject is owned by the region in which the Subprogram is declared OR -- Subject is owned by the region in which the protected type in which the -- Subprogram is declared) Owner := Dictionary.GetOwner (Dictionary.GetSubject (Constituent)); Region := Dictionary.GetRegion (Dictionary.GetScope (Subprogram)); Result := Owner = Region; if not Result and then Dictionary.IsProtectedType (Region) then Enclosing_Region := Dictionary.GetRegion (Dictionary.GetScope (Region)); Result := Owner = Enclosing_Region; end if; end if; return Result; end Valid_Refinement; begin -- Check_On_Global_Refinement_1 -- look for refinement subject in first global anno Subject_Sym := Dictionary.GetSubject (Var_Sym); -- if subject not from this package then use own var itself if not Valid_Refinement (Constituent => Var_Sym, Subprogram => Subprog_Sym) then Subject_Sym := Var_Sym; end if; Found := False; It := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Subprog_Sym); loop exit when Dictionary.IsNullIterator (It); Found := Subject_Sym = Dictionary.CurrentSymbol (It); exit when Found; It := Dictionary.NextSymbol (It); end loop; if not Found then ErrorHandler.Semantic_Error_Sym (Err_Num => 85, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Sym => Var_Sym, Scope => Dictionary.GetScope (Subprog_Sym)); Sem_Err_Found := True; end if; end Check_On_Global_Refinement_1; ---------------------------------------------------------------------- procedure Check_Main_Program_Global (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Var_Sym : out Dictionary.Symbol; Dotted : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dotted, --# STree.Table, --# Var_Sym from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.entire_variable; --# post STree.Table = STree.Table~; is Id_Node, Next_Id_Node : STree.SyntaxNode; P_Id_Str, Id_Str : LexTokenManager.Lex_String; P_Sym : Dictionary.Symbol; begin Id_Node := Last_Child_Of (Start_Node => Node); -- ASSUME Id_Node = identifier if Syntax_Node_Type (Node => Id_Node) = SP_Symbols.identifier then -- ASSUME Id_Node = identifier Id_Str := Node_Lex_String (Node => Id_Node); Var_Sym := Dictionary.LookupItem (Name => Id_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); else Id_Str := LexTokenManager.Null_String; Var_Sym := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Id_Node = identifier in Check_Main_Program_Global"); end if; P_Id_Str := LexTokenManager.Null_String; loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Id_Node, STree.Table) = SP_Symbols.identifier; -- introduced for multiple prefixes if Dictionary.Is_Null_Symbol (Var_Sym) then ErrorHandler.Semantic_Error2 (Err_Num => 144, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Id_Str, Id_Str2 => P_Id_Str); exit; end if; Next_Id_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Id_Node)); -- ASSUME Next_Id_Node = identifier OR NULL SystemErrors.RT_Assert (C => Next_Id_Node = STree.NullNode or else Syntax_Node_Type (Node => Next_Id_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Id_Node = identifier OR NULL in Check_Main_Program_Global"); if Dictionary.IsOwnVariable (Var_Sym) then -- entire variable check -- at this point Sym is a variable, final check that there is no dotted -- part to the right of it as there would be if a record field was there if Next_Id_Node /= STree.NullNode then ErrorHandler.Semantic_Error (Err_Num => 156, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Var_Sym := Dictionary.NullSymbol; end if; exit; end if; if not Dictionary.IsPackage (Var_Sym) then ErrorHandler.Semantic_Error (Err_Num => 174, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str => Id_Str); Var_Sym := Dictionary.NullSymbol; exit; end if; if Next_Id_Node = STree.NullNode then -- package without a selected component ErrorHandler.Semantic_Error (Err_Num => 174, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str => Id_Str); Var_Sym := Dictionary.NullSymbol; exit; end if; STree.Set_Node_Lex_String (Sym => Var_Sym, Node => Id_Node); P_Id_Str := Id_Str; Id_Node := Next_Id_Node; Id_Str := Node_Lex_String (Node => Id_Node); P_Sym := Var_Sym; Var_Sym := Dictionary.LookupSelectedItem (Prefix => P_Sym, Selector => Id_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Var_Sym), Context => Dictionary.ProofContext); if Dictionary.Is_Null_Symbol (Var_Sym) then -- need also to search current scope for inherited child packages Var_Sym := Dictionary.LookupSelectedItem (Prefix => P_Sym, Selector => Id_Str, Scope => Scope, Context => Dictionary.ProofContext); end if; end loop; Dotted := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => P_Id_Str, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq and then not Dictionary.Is_Null_Symbol (Var_Sym); end Check_Main_Program_Global; begin -- Process_One_Global Entire_Variable_Node := Child_Node (Current_Node => Node); -- ASSUME Entire_Variable_Node = entire_variable SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Entire_Variable_Node) = SP_Symbols.entire_variable, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Entire_Variable_Node = entire_variable in Process_One_Global"); if Dictionary.IsMainProgram (Subprog_Sym) then Check_Main_Program_Global (Node => Entire_Variable_Node, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog_Sym), Var_Sym => Sym, Dotted => Dotted); else Wf_Entire_Variable (Node => Entire_Variable_Node, Scope => Scope, Error_Hint => In_Global_List, Var_Sym => Sym, Dotted => Dotted); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable and --# Syntax_Node_Type (Entire_Variable_Node, STree.Table) = SP_Symbols.entire_variable; if Dictionary.Is_Null_Symbol (Sym) then Sem_Err_Found := True; end if; if CommandLineData.Ravenscar_Selected and then not (Dictionary.Is_Subprogram (Subprog_Sym) and then Dictionary.Subprograms_Are_Equal (Left_Symbol => Subprog_Sym, Right_Symbol => Dictionary.GetThePartition)) and then Dictionary.IsOwnVariable (Sym) and then Dictionary.GetOwnVariableIsInterruptStream (Sym) then -- An interrupt stream variable is being used outside the partition -- wide flow annotation. ErrorHandler.Semantic_Error_Sym (Err_Num => 955, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Sym, Scope => Scope); Sym := Dictionary.NullSymbol; end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable and --# Syntax_Node_Type (Entire_Variable_Node, STree.Table) = SP_Symbols.entire_variable; if not Dictionary.Is_Null_Symbol (Sym) and then (Dictionary.IsMainProgram (Subprog_Sym) or else Dictionary.IsTaskType (Subprog_Sym)) then -- check to ensure that global is initialized if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then not Dictionary.GetHasDerivesAnnotation (Subprog_Sym) and then (Mode = Dictionary.InMode or else Mode = Dictionary.InOutMode) and then not Dictionary.OwnVariableIsInitialized (Sym) and then Dictionary.GetOwnVariableOrConstituentMode (Sym) = Dictionary.DefaultMode then ErrorHandler.Semantic_Error (Err_Num => 167, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Sym)); end if; end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable and --# Syntax_Node_Type (Entire_Variable_Node, STree.Table) = SP_Symbols.entire_variable; if not Dictionary.Is_Null_Symbol (Sym) then Global_Not_Refined (Global_Sym => Sym, Error_Pos => Node_Position (Node => Entire_Variable_Node), Subprog_Sym => Subprog_Sym, Sem_Err_Found => Sem_Err_Found); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable and --# Syntax_Node_Type (Entire_Variable_Node, STree.Table) = SP_Symbols.entire_variable; if not Dictionary.Is_Null_Symbol (Sym) then Unique_Global (Global_Sym => Sym, Dotted => Dotted, Error_Pos => Node_Position (Node => Entire_Variable_Node), Subprog_Sym => Subprog_Sym, Abstraction => Abstraction, Sem_Err_Found => Sem_Err_Found); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable and --# Syntax_Node_Type (Entire_Variable_Node, STree.Table) = SP_Symbols.entire_variable; if not Dictionary.Is_Null_Symbol (Sym) then Check_Stream_Mode_Consistency (Global_Sym => Sym, Mode => Mode, Error_Pos => Node_Position (Node => Entire_Variable_Node), Subprog_Sym => Subprog_Sym, Sem_Err_Found => Sem_Err_Found); end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable and --# Syntax_Node_Type (Entire_Variable_Node, STree.Table) = SP_Symbols.entire_variable; if not Dictionary.Is_Null_Symbol (Sym) and then Dictionary.IsOwnVariable (Sym) and then Dictionary.IsVirtualElement (Sym) then if Dictionary.IsOrIsInProtectedScope (Scope) and then Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetEnclosingProtectedRegion (Scope), Right_Symbol => Dictionary.GetRootType (Dictionary.GetType (Dictionary.GetVirtualElementOwner (Sym))), Full_Range_Subtype => False) then -- Mark the virtual element as having been seen by its "owning" -- protected type. Dictionary.SetVirtualElementSeenByOwner (Sym); else -- This is an access to a protected virtual element outside -- it's protected type. ErrorHandler.Semantic_Error_Sym2 (Err_Num => 946, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Sym, Sym2 => Dictionary.GetRootType (Dictionary.GetType (Dictionary.GetVirtualElementOwner (Sym))), Scope => Scope); end if; end if; --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.global_variable and --# Syntax_Node_Type (Entire_Variable_Node, STree.Table) = SP_Symbols.entire_variable; if not Dictionary.Is_Null_Symbol (Sym) then if not First_Seen then Check_On_Global_Refinement_1 (Subprog_Sym => Subprog_Sym, Var_Sym => Sym, Err_Pos => Node_Position (Node => Entire_Variable_Node), Sem_Err_Found => Sem_Err_Found); end if; Check_Import_Init (Import_Node_Pos => Node_Position (Node => Node), Import_Sym => Sym, Subprog_Sym => Subprog_Sym, Sem_Err_Found => Sem_Err_Found); -- check that mode on a global of an embedded procedure is compatible with -- parameters mode if the global is refering to a parameter of an enclosing -- procedure if Dictionary.IsSubprogramParameter (Sym) then Param_Mode := Dictionary.GetSubprogramParameterMode (Sym); if Param_Mode = Dictionary.InMode or else Param_Mode = Dictionary.DefaultMode then if Mode /= Dictionary.InMode and then Mode /= Dictionary.DefaultMode then ErrorHandler.Semantic_Error (Err_Num => 508, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; end if; Dictionary.AddGlobalVariable (Abstraction => Abstraction, Subprogram => Subprog_Sym, Variable => Sym, Mode => Mode, PrefixNeeded => Dotted, Comp_Unit => ContextManager.Ops.Current_Unit, VariableReference => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Global_Variable_Sym => Global_Variable_Sym); STree.Add_Node_Symbol (Node => Node, Sym => Global_Variable_Sym); -- Mark the global variable as being referenced by a thread if the -- owner of this global variable is a thread (i.e. task, main program or -- interrupt handler) and the variable is not protected. This data is -- required by the main program shared variable checks. if Dictionary.IsThread (Subprog_Sym) and then Dictionary.IsOwnVariable (Sym) and then not Dictionary.GetOwnVariableProtected (Sym) then Dictionary.SetUsesUnprotectedVariables (Subprog_Sym); end if; end if; end Process_One_Global; begin -- Process_Global_List It := Find_First_Node (Node_Kind => SP_Symbols.global_variable, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.global_variable and --# Next_Node = Get_Node (It); Process_One_Global (Node => Next_Node, Mode => Mode, Scope => Scope, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen, Abstraction => Abstraction, Sem_Err_Found => Sem_Err_Found); It := STree.NextNode (It); end loop; end Process_Global_List; begin -- Wf_Moded_Global_Definition Dependency_Node := Next_Sibling (Current_Node => Node); -- ASSUME Dependency_Node = function_constraint OR procedure_constraint OR declare_annotation OR -- dependency_relation OR not_overriding_subprogram_body OR NULL if Dependency_Node = STree.NullNode or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.function_constraint or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.procedure_constraint or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.declare_annotation or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.not_overriding_subprogram_body then Dependency_Node := STree.NullNode; elsif Syntax_Node_Type (Node => Dependency_Node) /= SP_Symbols.dependency_relation then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Dependency_Node = function_constraint OR procedure_constraint OR declare_annotation OR " & "dependency_relation OR not_overriding_subprogram_body OR NULL in Wf_Moded_Global_Definition"); end if; -- ASSUME Dependency_Node = dependency_relation OR NULL It := Find_First_Node (Node_Kind => SP_Symbols.global_variable_clause, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.global_variable_clause and --# Next_Node = Get_Node (It); Mode := Get_Mode (Node => Next_Node); -- Globals are required to have modes if: -- 1. Performing data-flow analysis (unless lang=83) -- 2. Performing auto-flow analysis if there is no derives anno --# accept Flow, 41, "Expected stable expression"; if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 then --# end accept; if (CommandLineData.Content.Flow_Option = CommandLineData.Data_Flow and then not Doing_Function and then Mode = Dictionary.DefaultMode) or else (CommandLineData.Content.Flow_Option = CommandLineData.Auto_Flow and then not Doing_Function and then Mode = Dictionary.DefaultMode and then Dependency_Node = STree.NullNode) then Sem_Err_Found := True; ErrorHandler.Semantic_Error (Err_Num => 500, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => LexTokenManager.Null_String); end if; end if; if Doing_Function and then (Mode = Dictionary.OutMode or else Mode = Dictionary.InOutMode) then Sem_Err_Found := True; ErrorHandler.Semantic_Error (Err_Num => 169, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => LexTokenManager.Null_String); end if; Next_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Next_Node)); -- ASSUME Next_Node = global_variable_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.global_variable_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = global_variable_list in Wf_Moded_Global_Definition"); Process_Global_List (Node => Next_Node, Mode => Mode, Scope => Scope, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen, Abstraction => Abstraction, Sem_Err_Found => Sem_Err_Found); It := STree.NextNode (It); end loop; end Wf_Moded_Global_Definition; begin -- Wf_Global_Definition Sem_Err_Found := False; Doing_Function := Dictionary.IsFunction (Subprog_Sym); if First_Seen then Abstraction := Dictionary.IsAbstract; else Abstraction := Dictionary.IsRefined; end if; Dictionary.AddGlobalAnnotation (Abstraction => Abstraction, Subprogram => Subprog_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Annotation => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node))); Wf_Moded_Global_Definition (Node => Node, Scope => Scope, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen, Abstraction => Abstraction, Doing_Function => Doing_Function, Sem_Err_Found => Sem_Err_Found); if not First_Seen then Check_On_Global_Refinement_2 (Subprog_Sym => Subprog_Sym, Err_Pos => Type_Two_Error_Position (Node => Node), Sem_Err_Found => Sem_Err_Found); Check_Stream_Variable_Refinement (Subprog_Sym => Subprog_Sym, Err_Pos => Type_Two_Error_Position (Node => Node), Sem_Err_Found => Sem_Err_Found); end if; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Global (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Node, Scope => Scope, Subprog_Sym => Subprog_Sym); end if; end Wf_Global_Definition; spark-2012.0.deb/examiner/stmtstack.adb0000644000175000017500000000573111753202336016744 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors; with Debug; package body StmtStack is EmptyStack : constant StmtStacks := StmtStacks'(Vector => StmtVector'(others => StmtRecord'(StmtNmbr => 0, Kind => ArcKind'First)), Pointer => 0); S : StmtStacks; function IsEmpty return Boolean is begin return S.Pointer = 0; end IsEmpty; function Top return StmtRecord -- pre not IsEmpty (S); is begin --# accept Flow, 10, "Expected ineffective statement"; if S.Pointer = 0 then --# end accept; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Statement_Stack_Underflow, Msg => ""); end if; return S.Vector (S.Pointer); end Top; procedure Clear is begin S := EmptyStack; end Clear; procedure Pop -- pre not IsEmpty (S); is begin if S.Pointer = 0 then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Statement_Stack_Underflow, Msg => ""); end if; S.Pointer := S.Pointer - 1; end Pop; procedure Push (R : in StmtRecord) is begin if S.Pointer = StmtRange'Last then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Statement_Stack_Overflow, Msg => ""); end if; S.Pointer := S.Pointer + 1; S.Vector (S.Pointer) := R; end Push; procedure Dump_Stack (Msg : in String) is --# hide Dump_Stack; begin Debug.PrintMsg (Msg, True); if S.Pointer = 0 then Debug.PrintMsg ("Empty", True); else Debug.PrintMsg ("Top", True); for I in reverse StmtRange range 1 .. S.Pointer loop Debug.PrintMsg (ArcKind'Image (S.Vector (I).Kind), False); Debug.PrintMsg (Integer'Image (S.Vector (I).StmtNmbr), True); end loop; Debug.PrintMsg ("Bottom", True); end if; end Dump_Stack; begin S := EmptyStack; end StmtStack; spark-2012.0.deb/examiner/cells-utility-list.adb0000644000175000017500000002623511753202335020504 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SystemErrors; with Cells.Utility; package body Cells.Utility.List is -- The overall picture of this datastructure is: -- -- +------+ -- | List |-------------------------\ -- +------+ | -- | | -- | (A - first) | (B - last) -- | | -- v v -- +------+ (A) +------+ (A) +------+ -- | |------>| |------>| |------> null -- +------+ +------+ +------+ -- | | | -- | (B) | (B) | (B) -- | | | -- v v v -- +------+ +------+ +------+ -- | DATA | | DATA | | DATA | -- +------+ +------+ +------+ -- -- List also has its `natural' field set to the current length of -- the list. ------------------------------------------------------------------------------ -- Queries ------------------------------------------------------------------------------ function Get_Length (VCG_Heap : in Cells.Heap_Record; The_List : in Linked_List) return Natural is begin return Cells.Get_Natural_Value (VCG_Heap, Cells.Cell (The_List)); end Get_Length; ------------------------------------------------------------------------------ -- List manipulation ------------------------------------------------------------------------------ -- The linked list root has the following fields: -- A : Pointer to the first element or null -- B : Pointer to the last element or null -- Natural : Length of the list procedure Create (VCG_Heap : in out Cells.Heap_Record; The_List : out Linked_List) is L : Cells.Cell; begin Cells.Create_Cell (VCG_Heap, L); Cells.Set_A_Ptr (VCG_Heap, L, Cells.Null_Cell); Cells.Set_B_Ptr (VCG_Heap, L, Cells.Null_Cell); Cells.Set_Natural_Value (VCG_Heap, L, 0); The_List := Linked_List (L); end Create; -- Each element of the linked list has the following fields: -- A : Pointer to the next element or null -- B : Pointer to the content cell procedure Append (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List; The_Cell : in Cells.Cell) is Prev : Cells.Cell; N : Cells.Cell; Current_Length : Natural; begin -- Set up the node. Cells.Create_Cell (VCG_Heap, N); Cells.Set_A_Ptr (VCG_Heap, N, Cells.Null_Cell); Cells.Set_B_Ptr (VCG_Heap, N, The_Cell); -- Link it into the list. if Cells.Is_Null_Cell (Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List))) then -- Case 1. We have an empty list. -- Update the list to point to our new node. Cells.Set_A_Ptr (VCG_Heap, Cells.Cell (The_List), N); Cells.Set_B_Ptr (VCG_Heap, Cells.Cell (The_List), N); else -- Case 2. We stick it at the end of the existing list. -- Point the current last element to this node. Prev := Cells.Get_B_Ptr (VCG_Heap, Cells.Cell (The_List)); Cells.Set_A_Ptr (VCG_Heap, Prev, N); -- Update the last element pointer of the list. Cells.Set_B_Ptr (VCG_Heap, Cells.Cell (The_List), N); end if; -- Increment the length of the list by 1. Current_Length := Get_Length (VCG_Heap, The_List); SystemErrors.RT_Assert (C => Current_Length < Natural'Last, Sys_Err => SystemErrors.VCG_Heap_Is_Exhausted, Msg => "Linked list length exceeds Natural'Last"); Cells.Set_Natural_Value (VCG_Heap, Cells.Cell (The_List), Current_Length + 1); end Append; procedure Append_List (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List; The_List_To_Append : in Linked_List) is Current_Length : Natural; begin if Cells.Is_Null_Cell (Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append))) then -- Case 1. The list to append is empty. Do nothing. null; elsif Cells.Is_Null_Cell (Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List))) then -- Case 2. The list is empty, so it just becomes the list to -- append. -- Transfer A and B pointer and length. Cells.Set_A_Ptr (VCG_Heap, Cells.Cell (The_List), Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append))); Cells.Set_B_Ptr (VCG_Heap, Cells.Cell (The_List), Cells.Get_B_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append))); Cells.Set_Natural_Value (VCG_Heap, Cells.Cell (The_List), Cells.Get_Natural_Value (VCG_Heap, Cells.Cell (The_List_To_Append))); -- Nuke the second list. Cells.Set_A_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append), Cells.Null_Cell); Cells.Set_B_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append), Cells.Null_Cell); Cells.Set_Natural_Value (VCG_Heap, Cells.Cell (The_List_To_Append), 0); else -- Case 3. Both lists are non-empty. Fiddle the last element -- and length of the list and then nuke the second. -- Point the last element to the first of the second list. Cells.Set_A_Ptr (VCG_Heap, Cells.Get_B_Ptr (VCG_Heap, Cells.Cell (The_List)), Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append))); -- Point to the new last element. Cells.Set_B_Ptr (VCG_Heap, Cells.Cell (The_List), Cells.Get_B_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append))); -- Update the length. SystemErrors.RT_Assert (C => Get_Length (VCG_Heap, The_List) <= Natural'Last - Get_Length (VCG_Heap, The_List_To_Append), Sys_Err => SystemErrors.VCG_Heap_Is_Exhausted, Msg => "Linked list length exceeds Natural'Last"); Current_Length := Get_Length (VCG_Heap, The_List) + Get_Length (VCG_Heap, The_List_To_Append); Cells.Set_Natural_Value (VCG_Heap, Cells.Cell (The_List), Current_Length); -- Nuke the second list. Cells.Set_A_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append), Cells.Null_Cell); Cells.Set_B_Ptr (VCG_Heap, Cells.Cell (The_List_To_Append), Cells.Null_Cell); Cells.Set_Natural_Value (VCG_Heap, Cells.Cell (The_List_To_Append), 0); end if; end Append_List; procedure Empty (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List) is N : Cells.Cell; To_Dispose : Cells.Cell; begin -- Iterator over element, disposing of them. N := Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List)); while not Cells.Is_Null_Cell (N) loop To_Dispose := N; N := Cells.Get_A_Ptr (VCG_Heap, N); Cells.Dispose_Of_Cell (VCG_Heap, To_Dispose); end loop; -- Tidy up the linked list. Cells.Set_A_Ptr (VCG_Heap, Cells.Cell (The_List), Cells.Null_Cell); Cells.Set_B_Ptr (VCG_Heap, Cells.Cell (The_List), Cells.Null_Cell); Cells.Set_Natural_Value (VCG_Heap, Cells.Cell (The_List), 0); end Empty; ------------------------------------------------------------------------------ -- Iterators ------------------------------------------------------------------------------ function First_Cell (VCG_Heap : in Cells.Heap_Record; The_List : in Linked_List) return Iterator is begin return Iterator (Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List))); end First_Cell; function Next_Cell (VCG_Heap : in Cells.Heap_Record; Previous : in Iterator) return Iterator is N : Cells.Cell; begin if Cells.Is_Null_Cell (Cells.Cell (Previous)) then N := Cells.Null_Cell; else N := Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (Previous)); end if; return Iterator (N); end Next_Cell; function Current_Cell (VCG_Heap : in Cells.Heap_Record; Current : in Iterator) return Cells.Cell is C : Cells.Cell; begin if Cells.Is_Null_Cell (Cells.Cell (Current)) then C := Cells.Null_Cell; else C := Cells.Get_B_Ptr (VCG_Heap, Cells.Cell (Current)); end if; return C; end Current_Cell; function Is_Null_Iterator (Current : in Iterator) return Boolean is begin return Cells.Is_Null_Cell (Cells.Cell (Current)); end Is_Null_Iterator; procedure Join_And (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List; The_Conjunct : out Cells.Cell) is Iter : Iterator; C : Cells.Cell; begin The_Conjunct := Cells.Null_Cell; Iter := First_Cell (VCG_Heap, The_List); while not Is_Null_Iterator (Iter) loop C := Current_Cell (VCG_Heap, Iter); if Cells.Is_Null_Cell (The_Conjunct) then The_Conjunct := C; else Utility.Conjoin (VCG_Heap, C, The_Conjunct); end if; Iter := Next_Cell (VCG_Heap, Iter); end loop; if Cells.Is_Null_Cell (The_Conjunct) then Utility.Create_Bool (VCG_Heap, True, The_Conjunct); end if; end Join_And; -- See Knuth, Vol 1, Section 2.2.3, Exercise 7. (I cheated and -- copied from the answers...) procedure Invert (VCG_Heap : in out Cells.Heap_Record; The_List : in Linked_List) is P, Q, R : Cells.Cell; begin -- I1 P := Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List)); Q := Cells.Null_Cell; -- I2 while not Cells.Is_Null_Cell (P) loop R := Q; Q := P; P := Cells.Get_A_Ptr (VCG_Heap, Q); Cells.Set_A_Ptr (VCG_Heap, Q, R); end loop; -- I3 (but also change the pointer to the last cell). Cells.Set_B_Ptr (VCG_Heap, Cells.Cell (The_List), Cells.Get_A_Ptr (VCG_Heap, Cells.Cell (The_List))); Cells.Set_A_Ptr (VCG_Heap, Cells.Cell (The_List), Q); end Invert; end Cells.Utility.List; spark-2012.0.deb/examiner/sem-compunit-walkstatements-wf_loop_param.adb0000644000175000017500000001743411753202336025241 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit.WalkStatements) procedure Wf_Loop_Param (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Table : in out RefList.HashTable; Component_Data : in out ComponentManager.ComponentData) is Ident_Node, Type_Node, Direction_Node, Range_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Type_Sym : Dictionary.Symbol; OK_To_Add : Boolean; Range_Result : Exp_Record; Ref_Var : SeqAlgebra.Seq; Loop_Param_Sym : Dictionary.Symbol; Unused : Maths.Value; Has_Static_Range : Boolean := True; Is_Reverse_Loop : Boolean; begin SeqAlgebra.CreateSeq (TheHeap, Ref_Var); Loop_Param_Sym := Dictionary.NullSymbol; Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Loop_Param"); Ident_Str := Node_Lex_String (Node => Ident_Node); Direction_Node := Next_Sibling (Current_Node => Ident_Node); -- ASSUME Direction_Node = forward OR backward SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Direction_Node) = SP_Symbols.forward or else Syntax_Node_Type (Node => Direction_Node) = SP_Symbols.backward, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Direction_Node = forward OR backward in Wf_Loop_Param"); Is_Reverse_Loop := Syntax_Node_Type (Node => Direction_Node) = SP_Symbols.backward; Type_Node := Next_Sibling (Current_Node => Direction_Node); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Wf_Loop_Param"); if Dictionary.IsDefined (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then OK_To_Add := False; ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else OK_To_Add := True; end if; Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); -- plant type for use by VCG STree.Add_Node_Symbol (Node => Type_Node, Sym => Type_Sym); if not Dictionary.IsUnknownTypeMark (Type_Sym) and then (not Dictionary.TypeIsDiscrete (Type_Sym) or else Dictionary.IsPrivateType (Type_Sym, Scope)) then ErrorHandler.Semantic_Error (Err_Num => 46, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; Range_Node := Next_Sibling (Current_Node => Type_Node); -- ASSUME Range_Node = arange OR NULL if Syntax_Node_Type (Node => Range_Node) = SP_Symbols.arange then -- ASSUME Range_Node = arange Walk_Expression_P.Walk_Expression (Exp_Node => Range_Node, Scope => Scope, Type_Context => Type_Sym, Context_Requires_Static => False, Ref_Var => Ref_Var, Result => Range_Result, Component_Data => Component_Data, The_Heap => TheHeap); if not Dictionary.IsUnknownTypeMark (Range_Result.Type_Symbol) then if not Range_Result.Is_ARange then ErrorHandler.Semantic_Error (Err_Num => 98, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Range_Node), Id_Str => LexTokenManager.Null_String); elsif not Dictionary.CompatibleTypes (Scope, Type_Sym, Range_Result.Type_Symbol) then ErrorHandler.Semantic_Error (Err_Num => 106, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Range_Node), Id_Str => LexTokenManager.Null_String); else -- determine whether explicit range is statically known Has_Static_Range := not (Maths.HasNoValue (Range_Result.Value) or else Maths.HasNoValue (Range_Result.Range_RHS)); -- static range check of any explicit range --# accept Flow, 10, Unused, "Expected ineffective assignment"; Constraint_Check (Val => Range_Result.Value, New_Val => Unused, Is_Annotation => False, Typ => Type_Sym, Position => Node_Position (Node => Range_Node)); Constraint_Check (Val => Range_Result.Range_RHS, New_Val => Unused, Is_Annotation => False, Typ => Type_Sym, Position => Node_Position (Node => Range_Node)); --# end accept; end if; end if; elsif Range_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = arange OR NULL in Wf_Loop_Param"); end if; if OK_To_Add then Dictionary.AddLoopParameter (TheLoop => Dictionary.GetRegion (Scope), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Name => Ident_Str, TypeMark => Type_Sym, StaticRange => Has_Static_Range, IsReverse => Is_Reverse_Loop, TypeReference => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node))); Loop_Param_Sym := Dictionary.GetLoopParameter (Dictionary.GetRegion (Scope)); end if; -- add reference variable list to RefList hash table RefList.AddRelation (Table, TheHeap, Node, Loop_Param_Sym, Ref_Var); --# accept Flow, 33, Unused, "Expected to be neither referenced nor exported"; end Wf_Loop_Param; spark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification-wf_anno-wf_own.adb0000644000175000017500000014675311753202336032066 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Anno) procedure Wf_Own (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope_Type : in Enclosing_Scope_Types; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) is It : STree.Iterator; Next_Node : STree.SyntaxNode; ------------------------------------------------------------------------- procedure Wf_Own_Variable_Specification (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope_Type : in Enclosing_Scope_Types; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.own_variable_specification; --# post STree.Table = STree.Table~; is type Wff_Rules is (Use_Embedded, Use_Library); type Modes is record Value : Dictionary.Modes; Is_Valid : Boolean; end record; Wff_Rule : Wff_Rules; Type_Announced : Boolean; Type_Node, Property_List_Node, Ident_List_Node, Ident_Node, Next_Node : STree.SyntaxNode; It : STree.Iterator; Mode : Modes; Is_Protected, Is_Task : Boolean; Announced_Type : Dictionary.Symbol; Type_To_Use : Dictionary.Symbol; Own_Var_Sym : Dictionary.Symbol; Ident_Str : LexTokenManager.Lex_String; Valid : Boolean; procedure Check_Own_Var_Type (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Type_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table, --# Type_Sym from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.type_mark; --# post (Dictionary.Is_Null_Symbol (Type_Sym) or Dictionary.IsTypeMark (Type_Sym, Dictionary.Dict)) and STree.Table = STree.Table~; is Type_String : LexTokenManager.Lex_String; Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Current_Node = identifier OR dotted_simple_name if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier then -- ASSUME Current_Node = identifier -- simple identifier which either exists or is an announcement Type_String := Node_Lex_String (Node => Current_Node); Type_Sym := Dictionary.LookupItem (Name => Type_String, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Type_Sym) then -- process type announcement Dictionary.Add_Type_Announcement (Name => Type_String, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Current_Node), End_Position => Node_Position (Node => Current_Node)), The_Package => Pack_Sym, The_Type => Type_Sym); STree.Add_Node_Symbol (Node => Current_Node, Sym => Type_Sym); elsif not Dictionary.IsTypeMark (Type_Sym) then -- illegal type mark Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 63, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Type_String); else STree.Set_Node_Lex_String (Sym => Type_Sym, Node => Current_Node); end if; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Current_Node = dotted_simple_name -- dotted identifier, this must already exist as type Wf_Type_Mark (Node => Node, Current_Scope => Scope, Context => Dictionary.ProofContext, Type_Sym => Type_Sym); -- check to prevent use of abstract proof types outside their package if Dictionary.TypeIsAbstractProof (Type_Sym) then ErrorHandler.Semantic_Error_Sym (Err_Num => 148, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Type_Sym, Scope => Scope); Type_Sym := Dictionary.GetUnknownTypeMark; end if; else Type_Sym := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier OR dotted_simple_name in Check_Own_Var_Type"); end if; end Check_Own_Var_Type; -------------------------------------------------------------------------- procedure Check_Modifiers (Node : in STree.SyntaxNode; Ident_Node : in STree.SyntaxNode; Announced_Type : in Dictionary.Symbol; Mode : out Modes; Is_Protected : out Boolean; Is_Task : out Boolean; Valid : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Announced_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Is_Protected, --# Is_Task, --# Mode from Node, --# STree.Table & --# Valid from Announced_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.own_variable_modifier and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; is Modifier_Node, From_Node : STree.SyntaxNode; Err_Node_Pos : LexTokenManager.Token_Position; procedure Get_Mode (From_Node : in STree.SyntaxNode; Err_Node_Pos : in LexTokenManager.Token_Position; The_Mode : out Modes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Node_Pos, --# From_Node, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# The_Mode from From_Node, --# STree.Table; --# pre Syntax_Node_Type (From_Node, STree.Table) = SP_Symbols.in_mode or --# Syntax_Node_Type (From_Node, STree.Table) = SP_Symbols.inout_mode or --# Syntax_Node_Type (From_Node, STree.Table) = SP_Symbols.out_mode or --# From_Node = STree.NullNode; is begin The_Mode := Modes'(Value => Dictionary.DefaultMode, Is_Valid => True); if From_Node /= STree.NullNode then -- Mode is present case Syntax_Node_Type (Node => From_Node) is when SP_Symbols.in_mode => -- ASSUME From_Node = in_mode The_Mode := Modes'(Value => Dictionary.InMode, Is_Valid => True); when SP_Symbols.out_mode => -- ASSUME From_Node = out_mode The_Mode := Modes'(Value => Dictionary.OutMode, Is_Valid => True); when SP_Symbols.inout_mode => -- ASSUME From_Node = inout_mode The_Mode := Modes'(Value => Dictionary.InOutMode, Is_Valid => False); ErrorHandler.Semantic_Error (Err_Num => 700, Reference => ErrorHandler.No_Reference, Position => Err_Node_Pos, Id_Str => LexTokenManager.Null_String); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect From_Node = in_mode OR inout_mode OR out_mode OR NULL in Get_Mode"); end case; end if; end Get_Mode; begin -- Check_Modifiers Mode := Modes'(Value => Dictionary.DefaultMode, Is_Valid => True); Is_Protected := False; -- default answers Is_Task := False; Err_Node_Pos := Node_Position (Node => Next_Sibling (Current_Node => Node)); Modifier_Node := Child_Node (Current_Node => Node); -- ASSUME Modifier_Node = mode OR protected_modifier OR protected_moded_modifier OR task_modifier case Syntax_Node_Type (Node => Modifier_Node) is when SP_Symbols.mode => -- ASSUME Modifier_Node = mode From_Node := Child_Node (Current_Node => Modifier_Node); -- ASSUME From_Node = in_mode OR inout_mode OR out_mode OR NULL SystemErrors.RT_Assert (C => From_Node = STree.NullNode or else Syntax_Node_Type (Node => From_Node) = SP_Symbols.in_mode or else Syntax_Node_Type (Node => From_Node) = SP_Symbols.inout_mode or else Syntax_Node_Type (Node => From_Node) = SP_Symbols.out_mode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect From_Node = in_mode OR inout_mode OR out_mode OR NULL in Check_Modifiers"); Get_Mode (From_Node => From_Node, Err_Node_Pos => Err_Node_Pos, The_Mode => Mode); when SP_Symbols.protected_moded_modifier => -- ASSUME Modifier_Node = protected_moded_modifier From_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Modifier_Node)); -- ASSUME From_Node = in_mode OR out_mode SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => From_Node) = SP_Symbols.in_mode or else Syntax_Node_Type (Node => From_Node) = SP_Symbols.out_mode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect From_Node = in_mode OR out_mode in Check_Modifiers"); Get_Mode (From_Node => From_Node, Err_Node_Pos => Err_Node_Pos, The_Mode => Mode); Is_Protected := True; when SP_Symbols.protected_modifier => -- ASSUME Modifier_Node = protected_modifier Is_Protected := True; when SP_Symbols.task_modifier => -- ASSUME Modifier_Node = task_modifier Is_Task := True; when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Modifier_Node = mode OR protected_modifier OR protected_moded_modifier OR " & "task_modifier in Check_Modifiers"); end case; Check_Task_Modifier_Consistency (The_Own_Var_Type => Announced_Type, The_Var_Type => Dictionary.NullSymbol, Modifier_Is_Task => Is_Task, Error_Node => Ident_Node, Consistent => Valid); if Valid then -- make sure we don't get two errors Check_Protected_Modifier_Consistency (The_Type => Announced_Type, Modifier_Is_Protected => Is_Protected, Error_Node => Ident_Node, Consistent => Valid); end if; end Check_Modifiers; ------------------------------------------------------------------------ procedure Validate (Node : in STree.SyntaxNode; Wff_Rule : in Wff_Rules; Pack_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Mode : in Modes; Is_Protected : in Boolean; Is_Task : in Boolean; Valid : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Protected, --# Is_Task, --# LexTokenManager.State, --# Mode, --# Node, --# Pack_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Wff_Rule & --# STree.Table, --# Valid from CommandLineData.Content, --# Dictionary.Dict, --# Is_Protected, --# Is_Task, --# LexTokenManager.State, --# Mode, --# Node, --# Pack_Sym, --# Scope, --# STree.Table, --# Wff_Rule; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Ident_Str : LexTokenManager.Lex_String; Enclosing_Scope : Dictionary.Scopes; Sym : Dictionary.Symbol; Err_Num : Natural; begin Ident_Str := Node_Lex_String (Node => Node); Valid := False; case Wff_Rule is when Use_Library => if (Is_Task or else Is_Protected) and then not CommandLineData.Ravenscar_Selected then ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.IsDefined (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); else Valid := True; end if; when Use_Embedded => Enclosing_Scope := Dictionary.GetEnclosingScope (Scope); Sym := Dictionary.LookupSelectedItem (Prefix => Pack_Sym, Selector => Ident_Str, Scope => Enclosing_Scope, Context => Dictionary.ProofContext); if (Is_Task or else Is_Protected) and then not CommandLineData.Ravenscar_Selected then ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.Is_Null_Symbol (Sym) or else not Dictionary.IsRefinementConstituent (Dictionary.GetRegion (Enclosing_Scope), Sym) then ErrorHandler.Semantic_Error (Err_Num => 88, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); elsif not Dictionary.IsDefined (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) or else (Dictionary.IsOwnVariable (Sym) and not Dictionary.OwnVariableIsAnnounced (Sym)) then -- potentially ok to add, the own variable name matches a refinement -- constituent of the enclosing package, we just need to check that -- any modes are the same, and that the protected status is also the -- same. if Mode.Value = Dictionary.GetConstituentMode (Sym) then Valid := not Is_Protected; if Is_Protected then -- Cannot have protected refinement constituents ErrorHandler.Semantic_Error (Err_Num => 859, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); else STree.Set_Node_Lex_String (Sym => Sym, Node => Node); end if; else if Dictionary.GetConstituentMode (Sym) = Dictionary.InMode then Err_Num := 702; -- must be IN elsif Dictionary.GetConstituentMode (Sym) = Dictionary.OutMode then Err_Num := 703; -- must be OUT else Err_Num := 704; -- no mode permitted end if; Valid := False; ErrorHandler.Semantic_Error (Err_Num => Err_Num, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); end if; else -- illegal redeclaration ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); end if; end case; end Validate; ------------------------------------------------------------------------ procedure Wf_Own_Task (Node : in STree.SyntaxNode; Ident_Str : in LexTokenManager.Lex_String; Declaration : in Dictionary.Location; Type_Announced : in Boolean; Announced_Type : in Dictionary.Symbol; Pack_Sym : in Dictionary.Symbol; Task_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict, --# Task_Sym from Announced_Type, --# ContextManager.Ops.Unit_Stack, --# Declaration, --# Dictionary.Dict, --# Ident_Str, --# Pack_Sym, --# Type_Announced & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Str, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Announced; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.own_variable_specification; is begin if not Type_Announced then Task_Sym := Dictionary.NullSymbol; -- Tasks must announce their type ErrorHandler.Semantic_Error (Err_Num => 925, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); else Dictionary.AddOwnTask (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, TypeMark => Announced_Type, ThePackage => Pack_Sym, TaskSym => Task_Sym); end if; end Wf_Own_Task; ------------------------------------------------------------------------- procedure Check_For_Illegal_Multiple_Instances (Own_Var_Sym : in Dictionary.Symbol; Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Own_Var_Sym, --# Scope, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Search_Sym : Dictionary.Symbol; Own_Var_Type : Dictionary.Symbol; Search_Type : Dictionary.Symbol; Duplicate_Found : Boolean := False; begin -- This restriction only applies to protected own variables if Dictionary.GetOwnVariableProtected (Own_Var_Sym) and then Dictionary.OwnVariableHasType (OwnVariable => Own_Var_Sym, Scope => Scope) then Own_Var_Type := Dictionary.GetType (Own_Var_Sym); -- go through all the own variables It := Dictionary.FirstOwnVariable (Dictionary.GetRegion (Scope)); while not Dictionary.IsNullIterator (It) loop Search_Sym := Dictionary.CurrentSymbol (It); -- don't compare it against itself if not Dictionary.Variables_Are_Equal (Left_Symbol => Search_Sym, Right_Symbol => Own_Var_Sym) then if Dictionary.OwnVariableHasType (OwnVariable => Search_Sym, Scope => Scope) then Search_Type := Dictionary.GetType (Search_Sym); -- are the types the same? if Dictionary.Types_Are_Equal (Left_Symbol => Search_Type, Right_Symbol => Own_Var_Type, Full_Range_Subtype => False) then -- If either has a protects property then report illegal -- multiple instance of the type. if Dictionary.FirstVirtualElement (Own_Var_Type) /= Dictionary.NullIterator or else Dictionary.FirstVirtualElement (Search_Type) /= Dictionary.NullIterator then ErrorHandler.Semantic_Error_Sym (Err_Num => 942, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => Own_Var_Type, Scope => Scope); Duplicate_Found := True; end if; end if; end if; end if; exit when Duplicate_Found; It := Dictionary.NextSymbol (It); end loop; end if; end Check_For_Illegal_Multiple_Instances; begin -- Wf_Own_Variable_Specification -- determine if there is a type announcement and/or properties Ident_List_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_List_Node = own_variable_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_List_Node) = SP_Symbols.own_variable_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_List_Node = own_variable_list in Wf_Own_Variable_Specification"); Announced_Type := Dictionary.GetUnknownTypeMark; Type_Node := Node; -- so that errors get reported somewhere other than the null node Type_Announced := False; -- determine what checks we will need to do on each own var if Scope_Type = In_Package then Wff_Rule := Use_Embedded; else Wff_Rule := Use_Library; end if; Next_Node := Next_Sibling (Current_Node => Ident_List_Node); -- ASSUME Next_Node = type_mark OR property_list OR NULL if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.type_mark then -- ASSUME Next_Node = type_mark Type_Announced := True; Type_Node := Next_Node; Check_Own_Var_Type (Node => Type_Node, Pack_Sym => Pack_Sym, Scope => Scope, Type_Sym => Announced_Type); Property_List_Node := Next_Sibling (Current_Node => Type_Node); elsif Next_Node = STree.NullNode or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.property_list then Property_List_Node := Next_Node; else Property_List_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = type_mark OR property_list OR NULL in Wf_Own_Variable_Specification"); end if; -- ASSUME Property_List_Node = property_list OR NULL SystemErrors.RT_Assert (C => Property_List_Node = STree.NullNode or else Syntax_Node_Type (Node => Property_List_Node) = SP_Symbols.property_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Property_List_Node = property_list OR NULL in Wf_Own_Variable_Specification"); -- loop through each own variable calling a validate routine for each It := Find_First_Node (Node_Kind => SP_Symbols.own_variable_modifier, From_Root => Ident_List_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); SystemErrors.RT_Assert (C => Dictionary.IsTypeMark (Announced_Type), Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Announced_Type to be a type in Wf_Own_Variable_Specification"); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.own_variable_modifier and --# Dictionary.IsTypeMark (Announced_Type, Dictionary.Dict) and --# Next_Node = Get_Node (It); Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Next_Sibling (Current_Node => Next_Node))); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Own_Variable_Specification"); Check_Modifiers (Node => Next_Node, Ident_Node => Ident_Node, Announced_Type => Announced_Type, Mode => Mode, Is_Protected => Is_Protected, Is_Task => Is_Task, Valid => Valid); if Valid then Validate (Node => Ident_Node, Wff_Rule => Wff_Rule, Pack_Sym => Pack_Sym, Scope => Scope, Mode => Mode, Is_Protected => Is_Protected, Is_Task => Is_Task, Valid => Valid); Ident_Str := Node_Lex_String (Node => Ident_Node); if Valid and then Type_Announced then if Dictionary.IsSubtype (Announced_Type) and then Dictionary.IsProtectedTypeMark (Dictionary.GetRootType (Announced_Type)) then -- Cannot use subtypes of protected types in own variable type announcements ErrorHandler.Semantic_Error (Err_Num => 948, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); else if Dictionary.FirstVirtualElement (Announced_Type) /= Dictionary.NullIterator then -- There is an own variable declaring a protects list for this type and -- hence this is an illegal second instance. -- Valid := False; ErrorHandler.Semantic_Error_Sym (Err_Num => 942, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Sym => Announced_Type, Scope => Scope); else Valid := True; end if; end if; end if; if Valid then if Is_Task then Wf_Own_Task (Node => Node, Ident_Str => Ident_Str, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Type_Announced => Type_Announced, Announced_Type => Announced_Type, Pack_Sym => Pack_Sym, Task_Sym => Own_Var_Sym); else --# accept Flow, 41, "Expected stable expression"; if Type_Announced then --# end accept; Type_To_Use := Announced_Type; else -- create a default proof type and return that Dictionary.Add_Default_Abstract_Proof_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Scope, The_Type => Type_To_Use); end if; STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_To_Use); if Mode.Is_Valid then Dictionary.Add_Own_Variable (Name => Ident_Str, The_Package => Pack_Sym, Mode => Mode.Value, Is_Protected => Is_Protected, Is_Interrupt_Stream => False, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Var_Symbol => Own_Var_Sym); if Is_Protected and then Mode.Value = Dictionary.DefaultMode then -- Protected own variables are implicitly initialized and hence -- effectively appear in the initializes clause. -- Protected streams should not implicitly appear in the -- initializes clause as we will never initialize them. Dictionary.AddInitializedOwnVariable (Own_Var_Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node))); end if; Dictionary.AddOwnVariableType (Own_Var_Sym, Type_To_Use, Dictionary.Location'(Start_Position => Node_Position (Node => Type_Node), End_Position => Node_Position (Node => Type_Node))); else Own_Var_Sym := Dictionary.NullSymbol; end if; end if; if Syntax_Node_Type (Node => Property_List_Node) = SP_Symbols.property_list and then not Dictionary.Is_Null_Symbol (Own_Var_Sym) then -- ASSUME Property_List_Node = property_list Wf_Property_List (Node => Property_List_Node, Type_Node_Pos => Node_Position (Node => Type_Node), Scope => Scope, The_Owner => Own_Var_Sym, The_Heap => The_Heap); end if; if not Dictionary.Is_Null_Symbol (Own_Var_Sym) and then Dictionary.IsOwnVariable (Own_Var_Sym) then Check_For_Illegal_Multiple_Instances (Own_Var_Sym => Own_Var_Sym, Node_Pos => Node_Position (Node => Ident_Node), Scope => Scope); end if; end if; end if; It := STree.NextNode (It); end loop; end Wf_Own_Variable_Specification; -------------------------------------------------------------------- procedure Check_All_Refinements_Accounted_For (Own_Var_Clause_Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Own_Var_Clause_Node, STree.Table) = SP_Symbols.own_variable_clause; is Var_Sym : Dictionary.Symbol; It : Dictionary.Iterator; Err_Pos : LexTokenManager.Token_Position; ---------------------------------------------------- function Find_Last_In_Own_Var_Clause (Own_Var_Clause_Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre Syntax_Node_Type (Own_Var_Clause_Node, STree.Table) = SP_Symbols.own_variable_clause; is Ret_Node, Next_Node : STree.SyntaxNode; begin Ret_Node := Child_Node (Current_Node => Child_Node (Current_Node => Own_Var_Clause_Node)); -- ASSUME Ret_Node = own_variable_clause_rep OR own_variable_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ret_Node) = SP_Symbols.own_variable_clause_rep or else Syntax_Node_Type (Node => Ret_Node) = SP_Symbols.own_variable_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ret_Node = own_variable_clause_rep OR own_variable_specification in Find_Last_In_Own_Var_Clause"); Next_Node := Next_Sibling (Current_Node => Ret_Node); -- ASSUME Next_Node = own_variable_specification OR NULL if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.own_variable_specification then -- ASSUME Next_Node = own_variable_specification Ret_Node := Next_Node; elsif Next_Node /= STree.NullNode then Ret_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = own_variable_specification OR NULL in Find_Last_In_Own_Var_Clause"); end if; -- ASSUME Ret_Node = own_variable_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ret_Node) = SP_Symbols.own_variable_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ret_Node = own_variable_specification in Find_Last_In_Own_Var_Clause"); Ret_Node := Child_Node (Current_Node => Child_Node (Current_Node => Ret_Node)); -- ASSUME Ret_Node = own_variable_list OR own_variable_modifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ret_Node) = SP_Symbols.own_variable_list or else Syntax_Node_Type (Node => Ret_Node) = SP_Symbols.own_variable_modifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ret_Node = own_variable_list OR own_variable_modifier in Find_Last_In_Own_Var_Clause"); Next_Node := Next_Sibling (Current_Node => Ret_Node); -- ASSUME Next_Node = own_variable_modifier OR own_variable if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.own_variable_modifier then -- ASSUME Next_Node = own_variable_modifier Ret_Node := Next_Node; elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.own_variable then Ret_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = own_variable_modifier OR own_variable in Find_Last_In_Own_Var_Clause"); end if; -- ASSUME Ret_Node = own_variable_modifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ret_Node) = SP_Symbols.own_variable_modifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ret_Node = own_variable_modifier in Find_Last_In_Own_Var_Clause"); return Node_Position (Node => Next_Sibling (Current_Node => Ret_Node)); end Find_Last_In_Own_Var_Clause; begin -- Check_All_Refinements_Accounted_For Err_Pos := Find_Last_In_Own_Var_Clause (Own_Var_Clause_Node => Own_Var_Clause_Node); It := Dictionary.FirstOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (It) loop Var_Sym := Dictionary.CurrentSymbol (It); if not Dictionary.OwnVariableIsAnnounced (Var_Sym) then ErrorHandler.Semantic_Error (Err_Num => 77, Reference => ErrorHandler.No_Reference, Position => Err_Pos, Id_Str => Dictionary.GetSimpleName (Var_Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_All_Refinements_Accounted_For; -------------------------------------------------------------------- procedure Check_Owner_Body_Not_Yet_Declared (Own_Var_Clause_Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Own_Var_Clause_Node, --# Pack_Sym, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Own_Var_Clause_Node, STree.Table) = SP_Symbols.own_variable_clause; is Owner : Dictionary.Symbol; begin Owner := Dictionary.GetPackageOwner (Pack_Sym); if not Dictionary.Is_Null_Symbol (Owner) and then Dictionary.HasBody (Owner) then ErrorHandler.Semantic_Error (Err_Num => 620, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Own_Var_Clause_Node), Id_Str => Dictionary.GetSimpleName (Owner)); end if; end Check_Owner_Body_Not_Yet_Declared; begin -- Wf_Own It := Find_First_Node (Node_Kind => SP_Symbols.own_variable_specification, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.own_variable_specification and --# Next_Node = Get_Node (It); Wf_Own_Variable_Specification (Node => Next_Node, Pack_Sym => Pack_Sym, Scope_Type => Scope_Type, Scope => Scope, The_Heap => The_Heap); It := STree.NextNode (It); end loop; if Scope_Type = In_Package then Check_All_Refinements_Accounted_For (Own_Var_Clause_Node => Node, Pack_Sym => Pack_Sym); elsif Scope_Type = In_Library then Check_Owner_Body_Not_Yet_Declared (Own_Var_Clause_Node => Node, Pack_Sym => Pack_Sym); end if; end Wf_Own; spark-2012.0.deb/examiner/declarations-outputdeclarations-printdeclarations-printruleheader.adb0000644000175000017500000000630411753202336032241 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Declarations.OutputDeclarations.PrintDeclarations) procedure PrintRuleHeader (Write_Rules : in Boolean; Rule_File : in SPARK_IO.File_Type) is OK : Boolean; procedure Setup_Family_Name --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in Scope; --# out Rule_Family_Name; --# derives Rule_Family_Name from Dictionary.Dict, --# LexTokenManager.State, --# Scope; is Max_Name_Len : constant E_Strings.Lengths := 12; begin Rule_Family_Name := LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Dictionary.GetRegion (Scope))); if E_Strings.Get_Length (E_Str => Rule_Family_Name) > Max_Name_Len then Rule_Family_Name := E_Strings.Section (E_Str => Rule_Family_Name, Start_Pos => 1, Length => Max_Name_Len); end if; Rule_Family_Name := E_Strings.Lower_Case (E_Str => Rule_Family_Name); E_Strings.Append_String (E_Str => Rule_Family_Name, Str => "_rules"); end Setup_Family_Name; begin --PrintRuleHeader --# accept Flow, 10, OK, "Expected ineffective assignment to OK"; Lists.New_List (Heap => L_Heap, The_List => Type_List, OK => OK); --# end accept; Setup_Family_Name; Rule_Counter := 0; if Write_Rules then SPARK_IO.Put_String (Rule_File, "rule_family ", 0); E_Strings.Put_String (File => Rule_File, E_Str => Rule_Family_Name); SPARK_IO.Put_Line (Rule_File, ":", 0); SPARK_IO.Set_Col (Rule_File, 2 * Indent); SPARK_IO.Put_Line (Rule_File, "X requires [X:any] &", 0); SPARK_IO.Set_Col (Rule_File, 2 * Indent); SPARK_IO.Put_Line (Rule_File, "X <= Y requires [X:ire, Y:ire] &", 0); SPARK_IO.Set_Col (Rule_File, 2 * Indent); SPARK_IO.Put_Line (Rule_File, "X >= Y requires [X:ire, Y:ire].", 0); SPARK_IO.New_Line (Rule_File, 1); end if; --# accept Flow, 33, OK, "Expected OK to be neither referenced nor exported"; end PrintRuleHeader; spark-2012.0.deb/examiner/graph.adb0000644000175000017500000024366311753202336016040 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Clists; with CommandLineData; with DAG_IO; with Debug; with Declarations; with E_Strings.Not_SPARK; with LexTokenManager; with Pairs; with Structures; with SystemErrors; package body Graph --# own Table is Assertion_Locn, --# Column, --# In_Degree, --# Nmbr_Of_Stmts, --# Out_Degree, --# Proof_Context, --# Refinement_Post_Check, --# Refinement_Pre_Check, --# Row, --# Subclass_Post_Check, --# Subclass_Pre_Check, --# Text_Line_Nmbr; -- If more refinement constituents are added here, then -- the initialization code in the package body elaboration part -- AND in procedure Reinitialize_Graph will need to be updated. is type Vector_Of_Cells is array (Matrix_Index) of Cells.Cell; type Proof_Context_Array is array (Matrix_Index) of Proof_Context_Type; type Vector_Of_Integers is array (Matrix_Index) of Integer; type Vector_Of_Degrees is array (Matrix_Index) of Natural; Row : Vector_Of_Cells; Column : Vector_Of_Cells; In_Degree : Vector_Of_Degrees; Out_Degree : Vector_Of_Degrees; Nmbr_Of_Stmts : Matrix_Index; Assertion_Locn : Vector_Of_Cells; Proof_Context : Proof_Context_Array; Text_Line_Nmbr : Vector_Of_Integers; Refinement_Pre_Check : Cells.Cell; Refinement_Post_Check : Cells.Cell; Subclass_Pre_Check : Cells.Cell; Subclass_Post_Check : Cells.Cell; -------------------------------------------------------------------------- procedure Inc_Nmbr_Of_Stmts --# global in out Nmbr_Of_Stmts; --# derives Nmbr_Of_Stmts from *; is begin if Nmbr_Of_Stmts = ExaminerConstants.VCGMatrixOrder then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.VCG_Graph_Size_Exceeded, Msg => ""); end if; Nmbr_Of_Stmts := Nmbr_Of_Stmts + 1; end Inc_Nmbr_Of_Stmts; -------------------------------------------------------------------------- procedure Set_Nmbr_Of_Stmts (N : in Matrix_Index) --# global out Nmbr_Of_Stmts; --# derives Nmbr_Of_Stmts from N; is begin Nmbr_Of_Stmts := N; end Set_Nmbr_Of_Stmts; -------------------------------------------------------------------------- function Get_Nmbr_Of_Stmts return Matrix_Index --# global in Nmbr_Of_Stmts; is begin return Nmbr_Of_Stmts; end Get_Nmbr_Of_Stmts; -------------------------------------------------------------------------- procedure Set_Proof_Context (X : in Proof_Context_Type) --# global in Nmbr_Of_Stmts; --# in out Proof_Context; --# derives Proof_Context from *, --# Nmbr_Of_Stmts, --# X; is begin Proof_Context (Nmbr_Of_Stmts) := X; end Set_Proof_Context; -------------------------------------------------------------------------- procedure Set_First_Proof_Context (X : in Proof_Context_Type) --# global in out Proof_Context; --# derives Proof_Context from *, --# X; is begin Proof_Context (1) := X; end Set_First_Proof_Context; -------------------------------------------------------------------------- procedure Set_Assertion_Locn (X : in Cells.Cell) --# global in Nmbr_Of_Stmts; --# in out Assertion_Locn; --# derives Assertion_Locn from *, --# Nmbr_Of_Stmts, --# X; is begin Assertion_Locn (Nmbr_Of_Stmts) := X; end Set_Assertion_Locn; -------------------------------------------------------------------------- procedure Set_First_Assertion_Locn (X : in Cells.Cell) --# global in out Assertion_Locn; --# derives Assertion_Locn from *, --# X; is begin Assertion_Locn (1) := X; end Set_First_Assertion_Locn; -------------------------------------------------------------------------- function Get_Assertion_Locn return Cells.Cell --# global in Assertion_Locn; --# in Nmbr_Of_Stmts; is begin return Assertion_Locn (Nmbr_Of_Stmts); end Get_Assertion_Locn; -------------------------------------------------------------------------- function Get_Preceding_Assertion_Locn return Cells.Cell --# global in Assertion_Locn; --# in Nmbr_Of_Stmts; -- pre Nmbr_Of_Stmts > 1; is begin return Assertion_Locn (Nmbr_Of_Stmts - 1); end Get_Preceding_Assertion_Locn; -------------------------------------------------------------------------- procedure Set_Text_Line_Nmbr (X : in Integer) --# global in Nmbr_Of_Stmts; --# in out Text_Line_Nmbr; --# derives Text_Line_Nmbr from *, --# Nmbr_Of_Stmts, --# X; is begin Text_Line_Nmbr (Nmbr_Of_Stmts) := X; end Set_Text_Line_Nmbr; ----------------------------------------------------------------------- procedure Insert_Text_Line_Nmbr (Index : in Matrix_Index; X : in Integer) --# global in out Text_Line_Nmbr; --# derives Text_Line_Nmbr from *, --# Index, --# X; is begin Text_Line_Nmbr (Index) := X; end Insert_Text_Line_Nmbr; ----------------------------------------------------------------------- procedure Set_Refinement_Pre_Check (X : in Cells.Cell) --# global out Refinement_Pre_Check; --# derives Refinement_Pre_Check from X; is begin Refinement_Pre_Check := X; end Set_Refinement_Pre_Check; -------------------------------------------------------------------------- procedure Set_Refinement_Post_Check (X : in Cells.Cell) --# global out Refinement_Post_Check; --# derives Refinement_Post_Check from X; is begin Refinement_Post_Check := X; end Set_Refinement_Post_Check; -------------------------------------------------------------------------- procedure Set_Subclass_Pre_Check (X : in Cells.Cell) --# global out Subclass_Pre_Check; --# derives Subclass_Pre_Check from X; is begin Subclass_Pre_Check := X; end Set_Subclass_Pre_Check; -------------------------------------------------------------------------- procedure Set_Subclass_Post_Check (X : in Cells.Cell) --# global out Subclass_Post_Check; --# derives Subclass_Post_Check from X; is begin Subclass_Post_Check := X; end Set_Subclass_Post_Check; -------------------------------------------------------------------------- procedure Reinitialize_Graph --# global out Assertion_Locn; --# out Column; --# out In_Degree; --# out Nmbr_Of_Stmts; --# out Out_Degree; --# out Proof_Context; --# out Refinement_Post_Check; --# out Refinement_Pre_Check; --# out Row; --# out Subclass_Post_Check; --# out Subclass_Pre_Check; --# out Text_Line_Nmbr; --# derives Assertion_Locn, --# Column, --# In_Degree, --# Nmbr_Of_Stmts, --# Out_Degree, --# Proof_Context, --# Refinement_Post_Check, --# Refinement_Pre_Check, --# Row, --# Subclass_Post_Check, --# Subclass_Pre_Check, --# Text_Line_Nmbr from ; is begin -- If this procedure changes, then the package -- elaboration code at the end of this compilation -- unit will also need to be updated. --# accept F, 23, Row, "Initialization is total" & --# F, 23, Column, "Initialization is total" & --# F, 23, In_Degree, "Initialization is total" & --# F, 23, Out_Degree, "Initialization is total" & --# F, 23, Proof_Context, "Initialization is total" & --# F, 23, Text_Line_Nmbr, "Initialization is total" & --# F, 23, Assertion_Locn, "Initialization is total"; for I in Matrix_Index loop Row (I) := Cells.Null_Cell; Column (I) := Cells.Null_Cell; In_Degree (I) := 0; Out_Degree (I) := 0; Proof_Context (I) := Unspecified; Text_Line_Nmbr (I) := 0; Assertion_Locn (I) := Cells.Null_Cell; end loop; --# end accept; Nmbr_Of_Stmts := 1; Refinement_Pre_Check := Cells.Null_Cell; Refinement_Post_Check := Cells.Null_Cell; Subclass_Pre_Check := Cells.Null_Cell; Subclass_Post_Check := Cells.Null_Cell; --# accept F, 602, Row, Row, "Initialization is total" & --# F, 602, Column, Column, "Initialization is total" & --# F, 602, In_Degree, In_Degree, "Initialization is total" & --# F, 602, Out_Degree, Out_Degree, "Initialization is total" & --# F, 602, Proof_Context, Proof_Context, "Initialization is total" & --# F, 602, Text_Line_Nmbr, Text_Line_Nmbr, "Initialization is total" & --# F, 602, Assertion_Locn, Assertion_Locn, "Initialization is total"; end Reinitialize_Graph; -------------------------------------------------------------------------- procedure Create_Coeff (Heap : in out Cells.Heap_Record; I, J : in Matrix_Index; K : in Labels.Label) --# global in out Column; --# in out In_Degree; --# in out Out_Degree; --# in out Row; --# in out Statistics.TableUsage; --# derives Column from *, --# Heap, --# J & --# Heap from *, --# Column, --# I, --# J, --# K, --# Row & --# In_Degree from *, --# J & --# Out_Degree from *, --# I & --# Row from *, --# Heap, --# I & --# Statistics.TableUsage from *, --# Heap; -- creates coefficient A(I, J), with value K; is -- 2 New_Element Cells used to reduce heap coupling in flow relations New_Row_Element, New_Col_Element : Cells.Cell; begin -- Create both new cells first Cells.Create_Cell (Heap, New_Row_Element); Cells.Create_Cell (Heap, New_Col_Element); -- Set row pointer; Cells.Set_Natural_Value (Heap, New_Row_Element, J); Cells.Set_B_Ptr (Heap, New_Row_Element, Labels.LabelHead (K)); Cells.Set_A_Ptr (Heap, New_Row_Element, Row (I)); Row (I) := New_Row_Element; Out_Degree (I) := Out_Degree (I) + 1; -- Set column pointer; Cells.Set_Natural_Value (Heap, New_Col_Element, I); Cells.Set_B_Ptr (Heap, New_Col_Element, Labels.LabelHead (K)); Cells.Set_A_Ptr (Heap, New_Col_Element, Column (J)); Column (J) := New_Col_Element; In_Degree (J) := In_Degree (J) + 1; end Create_Coeff; -------------------------------------------------------------------------- function Coefficient (Heap : Cells.Heap_Record; I, J : Matrix_Index) return Labels.Label --# global in Row; is Elem, Coeff_Cell : Cells.Cell; begin Coeff_Cell := Cells.Null_Cell; -- Pick out the head of the coefficient list for Row I Elem := Row (I); loop exit when Cells.Is_Null_Cell (Elem); -- Search the coeff list until a coefficient for column J -- is found if Cells.Get_Natural_Value (Heap, Elem) = J then -- Got it! Return the Label associated with this coefficient Coeff_Cell := Cells.Get_B_Ptr (Heap, Elem); exit; end if; Elem := Cells.Get_A_Ptr (Heap, Elem); end loop; return Labels.CellToLabel (Coeff_Cell); end Coefficient; -------------------------------------------------------------------------- procedure Dump_Graph_Dot (Heap : in out Cells.Heap_Record; Output_File_Name : in E_Strings.T; Output_File_Name_Suffix : in Natural; Scope : in Dictionary.Scopes; Print_Edges_As : in DOT_Dump_Kind) --# global in Assertion_Locn; --# in Column; --# in In_Degree; --# in Nmbr_Of_Stmts; --# in Out_Degree; --# in Proof_Context; --# in Row; --# derives Heap from * & --# null from Assertion_Locn, --# Column, --# In_Degree, --# Nmbr_Of_Stmts, --# Output_File_Name, --# Output_File_Name_Suffix, --# Out_Degree, --# Print_Edges_As, --# Proof_Context, --# Row, --# Scope; is --# hide Dump_Graph_Dot; Arc : Cells.Cell; Arc_Found : Boolean; Arc_Label : Labels.Label; Current_Pair : Pairs.Pair; Output_File : SPARK_IO.File_Type; OK : SPARK_IO.File_Status; procedure Form_And_Open_Output_File is -- Chop of the .vcg extension FN : constant String := E_Strings.Not_SPARK.Get_String (E_Str => E_Strings.Section (E_Str => Output_File_Name, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Output_File_Name) - 4)); -- Form the suffix string and chop off the leading space Suffix : constant String := Natural'Image (Output_File_Name_Suffix); Chopped_Suffix : constant String := String (Suffix (2 .. Suffix'Last)); DOT_Name : constant String := FN & "_" & Chopped_Suffix & ".dot"; begin SPARK_IO.Create (File => Output_File, Name_Length => DOT_Name'Length, Name_Of_File => DOT_Name, Form_Of_File => "", Status => OK); end Form_And_Open_Output_File; procedure Print_Logical_Expn_DOT (Root : in Cells.Cell) is Sub_Expn_List : Cells.Cell; List_Member : Cells.Cell; begin Clists.CreateList (Heap, Sub_Expn_List); DAG_IO.Partition (Root, Sub_Expn_List, Heap); List_Member := Clists.FirstCell (Heap, Sub_Expn_List); DAG_IO.PrintDag (Heap, Output_File, Cells.Get_B_Ptr (Heap, List_Member), Scope, DAG_IO.No_Wrap); List_Member := Clists.NextCell (Heap, List_Member); loop exit when Cells.Is_Null_Cell (List_Member); SPARK_IO.Put_String (File => Output_File, Item => " and\l", Stop => 0); DAG_IO.PrintDag (Heap, Output_File, Cells.Get_B_Ptr (Heap, List_Member), Scope, DAG_IO.No_Wrap); List_Member := Clists.NextCell (Heap, List_Member); end loop; SPARK_IO.Put_String (File => Output_File, Item => "\l", Stop => 0); Clists.DisposeOfList (Heap, Sub_Expn_List); end Print_Logical_Expn_DOT; procedure Print_PTC is Predicate : Cells.Cell; begin SPARK_IO.Put_String (File => Output_File, Item => "taillabel=""", Stop => 0); if Pairs.IsTrue (Heap, Current_Pair) then SPARK_IO.Put_String (File => Output_File, Item => "true", Stop => 0); else Predicate := Cells.Get_B_Ptr (Heap, Pairs.PairHead (Current_Pair)); Print_Logical_Expn_DOT (Root => Predicate); end if; SPARK_IO.Put_String (File => Output_File, Item => """", Stop => 0); end Print_PTC; procedure Print_Action is Suppress_Wrap : Boolean := False; Action : Cells.Cell; Mod_Cell : Cells.Cell; begin SPARK_IO.Put_String (File => Output_File, Item => "headlabel=""", Stop => 0); if Pairs.IsUnitAction (Heap, Current_Pair) then SPARK_IO.Put_String (File => Output_File, Item => "null", Stop => 0); else Action := Cells.Get_C_Ptr (Heap, Pairs.PairHead (Current_Pair)); Mod_Cell := Clists.FirstCell (Heap, Action); DAG_IO.Print_Cell_Contents (Heap => Heap, Output_File => Output_File, Cell_Name => Mod_Cell, Suppress_Wrap => Suppress_Wrap, Scope => Scope, Wrap_Limit => DAG_IO.No_Wrap, Escape_DOT => False); SPARK_IO.Put_String (File => Output_File, Item => " := ", Stop => 0); DAG_IO.PrintDag (Heap, Output_File, Cells.Get_B_Ptr (Heap, Mod_Cell), Scope, DAG_IO.No_Wrap); Mod_Cell := Clists.NextCell (Heap, Mod_Cell); loop exit when Cells.Is_Null_Cell (Mod_Cell); SPARK_IO.Put_String (File => Output_File, Item => " &\n", Stop => 0); DAG_IO.Print_Cell_Contents (Heap => Heap, Output_File => Output_File, Cell_Name => Mod_Cell, Suppress_Wrap => Suppress_Wrap, Scope => Scope, Wrap_Limit => DAG_IO.No_Wrap, Escape_DOT => False); SPARK_IO.Put_String (File => Output_File, Item => " := ", Stop => 0); DAG_IO.PrintDag (Heap, Output_File, Cells.Get_B_Ptr (Heap, Mod_Cell), Scope, DAG_IO.No_Wrap); Mod_Cell := Clists.NextCell (Heap, Mod_Cell); end loop; end if; SPARK_IO.Put_String (File => Output_File, Item => """", Stop => 0); end Print_Action; procedure Print_VC is Hypotheses : Cells.Cell; Conclusions : Cells.Cell; begin SPARK_IO.Put_String (File => Output_File, Item => "label=""", Stop => 0); if Pairs.IsTrue (Heap, Current_Pair) then SPARK_IO.Put_String (File => Output_File, Item => "true", Stop => 0); else Hypotheses := Cells.Get_B_Ptr (Heap, Pairs.PairHead (Current_Pair)); Print_Logical_Expn_DOT (Root => Hypotheses); SPARK_IO.Put_String (File => Output_File, Item => "\l->\l", Stop => 0); Conclusions := Cells.Get_C_Ptr (Heap, Pairs.PairHead (Current_Pair)); Print_Logical_Expn_DOT (Root => Conclusions); end if; SPARK_IO.Put_String (File => Output_File, Item => """", Stop => 0); end Print_VC; procedure Print_Node_Detail (I : in Matrix_Index) is begin SPARK_IO.Put_Integer (File => Output_File, Item => I, Width => 0, Base => 10); case Proof_Context (I) is when Precondition | Assertion | Default_Assertion | Postcondition => -- Make cut-point nodes filled in 50% gray, so they are easy to see SPARK_IO.Put_String (File => Output_File, Item => " [style=filled,color=gray50,", Stop => 0); when others => SPARK_IO.Put_String (File => Output_File, Item => " [", Stop => 0); end case; SPARK_IO.Put_String (File => Output_File, Item => "label=""", Stop => 0); -- Write label as node number and node type... SPARK_IO.Put_String (File => Output_File, Item => Matrix_Index'Image (I) & ' ' & Proof_Context_Type'Image (Proof_Context (I)), Stop => 0); -- ...and source line if present if Text_Line_Nmbr (I) /= 0 then SPARK_IO.Put_String (File => Output_File, Item => " line ", Stop => 0); SPARK_IO.Put_Integer (File => Output_File, Item => Text_Line_Nmbr (I), Width => 0, Base => 10); end if; SPARK_IO.Put_String (File => Output_File, Item => "\n", Stop => 0); Print_Logical_Expn_DOT (Root => Assertion_Locn (I)); SPARK_IO.Put_String (File => Output_File, Item => """];", Stop => 0); end Print_Node_Detail; begin Form_And_Open_Output_File; if OK = SPARK_IO.Ok then SPARK_IO.Put_String (File => Output_File, Item => "digraph ", Stop => 0); E_Strings.Put_String (File => Output_File, E_Str => E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Dictionary.GetRegion (Scope))))); SPARK_IO.Put_Line (File => Output_File, Item => " {", Stop => 0); SPARK_IO.Put_Line (File => Output_File, Item => "ranksep=""1.0 equally"";", Stop => 0); SPARK_IO.Put_Line (File => Output_File, Item => "nodesep=1.0;", Stop => 0); SPARK_IO.Put_Line (File => Output_File, Item => "node [shape=box,fontname=helvetica];", Stop => 0); SPARK_IO.Put_Line (File => Output_File, Item => "edge [labelfontname=helvetica,labelfontsize=10];", Stop => 0); -- Nodes for I in Matrix_Index range 1 .. Nmbr_Of_Stmts loop if In_Degree (I) = 0 and Out_Degree (I) = 0 then null; -- node not connected, so skip else if I = 1 then -- Precondition SPARK_IO.Put_String (File => Output_File, Item => "{ rank = source; ", Stop => 0); Print_Node_Detail (I => I); SPARK_IO.Put_String (File => Output_File, Item => " }", Stop => 0); elsif I = Nmbr_Of_Stmts then -- Postcondition SPARK_IO.Put_String (File => Output_File, Item => "{ rank = sink; ", Stop => 0); Print_Node_Detail (I => I); SPARK_IO.Put_String (File => Output_File, Item => " }", Stop => 0); else Print_Node_Detail (I => I); end if; SPARK_IO.New_Line (File => Output_File, Spacing => 1); end if; end loop; -- Edges -- For all statements except the precondition for Node in Matrix_Index range 2 .. Nmbr_Of_Stmts loop -- If that node has predecessors if In_Degree (Node) > 0 then -- Then search the coefficients in the Matrix for all -- Predecessors whose Successor is Node. for Predec in Matrix_Index range 1 .. Nmbr_Of_Stmts - 1 loop Arc_Found := False; Arc := Column (Node); while (not Arc_Found) and (not Cells.Is_Null_Cell (Arc)) loop if Cells.Get_Natural_Value (Heap, Arc) = Predec then Arc_Found := True; else Arc := Cells.Get_A_Ptr (Heap, Arc); end if; end loop; if Arc_Found then -- Found an arc from Statement Predec to Statement Node Arc_Label := Labels.CellToLabel (Cells.Get_B_Ptr (Heap, Arc)); Current_Pair := Labels.FirstPair (Heap, Arc_Label); loop exit when Pairs.IsNullPair (Current_Pair); SPARK_IO.Put_Integer (File => Output_File, Item => Predec, Width => 0, Base => 10); SPARK_IO.Put_String (File => Output_File, Item => " -> ", Stop => 0); SPARK_IO.Put_Integer (File => Output_File, Item => Node, Width => 0, Base => 10); SPARK_IO.Put_String (File => Output_File, Item => "[style=", Stop => 0); case Proof_Context (Node) is when Check_Statement | Run_Time_Check | Precon_Check => SPARK_IO.Put_String (File => Output_File, Item => "dashed", Stop => 0); when Assertion | Default_Assertion | Postcondition => SPARK_IO.Put_String (File => Output_File, Item => "bold,headport=n,tailport=s", Stop => 0); -- Increase weight for forward edges terminating -- at an assertion of postcondition. if Node > Predec then SPARK_IO.Put_String (File => Output_File, Item => ",weight=8.0", Stop => 0); end if; when others => SPARK_IO.Put_String (File => Output_File, Item => "solid", Stop => 0); end case; case Print_Edges_As is when PFs => SPARK_IO.Put_String (File => Output_File, Item => ",", Stop => 0); Print_PTC; SPARK_IO.Put_String (File => Output_File, Item => ",", Stop => 0); Print_Action; when VCs => SPARK_IO.Put_String (File => Output_File, Item => ",", Stop => 0); Print_VC; end case; SPARK_IO.Put_String (File => Output_File, Item => "];", Stop => 0); SPARK_IO.New_Line (File => Output_File, Spacing => 1); Current_Pair := Labels.NextPair (Heap, Current_Pair); end loop; end if; end loop; end if; end loop; SPARK_IO.Put_Line (File => Output_File, Item => "}", Stop => 0); end if; end Dump_Graph_Dot; ---------------------------------------------------------------------- procedure Gen_VCs (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Output_File_Name : in E_Strings.T; Scope : in Dictionary.Scopes; Gen_VC_Failure : out Boolean) --# global in Assertion_Locn; --# in CommandLineData.Content; --# in Nmbr_Of_Stmts; --# in Proof_Context; --# in out Column; --# in out In_Degree; --# in out Out_Degree; --# in out Row; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Column, --# Gen_VC_Failure, --# In_Degree, --# Out_Degree, --# Row from Column, --# CommandLineData.Content, --# Heap, --# In_Degree, --# Nmbr_Of_Stmts, --# Out_Degree, --# Proof_Context, --# Row & --# Heap, --# Statistics.TableUsage from *, --# Assertion_Locn, --# Column, --# CommandLineData.Content, --# Heap, --# In_Degree, --# Nmbr_Of_Stmts, --# Out_Degree, --# Proof_Context, --# Row & --# SPARK_IO.File_Sys from *, --# Column, --# CommandLineData.Content, --# Heap, --# In_Degree, --# Nmbr_Of_Stmts, --# Output_File, --# Out_Degree, --# Proof_Context, --# Row & --# null from Output_File_Name, --# Scope; is Arc_Label : Labels.Label; Assertion_Copy, Conjunction, Current_Predicate, Matrix_Element, WP : Cells.Cell; Current_Pair : Pairs.Pair; Failure : Boolean; Initial_Node : Matrix_Index; Graph_Suffix : Natural; function Is_Check_Statement (X : Proof_Context_Type) return Boolean is begin return X = Check_Statement or else X = Run_Time_Check or else X = Precon_Check; end Is_Check_Statement; function Is_Assert_Statement (X : Proof_Context_Type) return Boolean is begin return X = Assertion or else X = Default_Assertion; end Is_Assert_Statement; -- deletes pointers to A(I, J); procedure Delete_Coeff (Heap : in out Cells.Heap_Record; I, J : in Matrix_Index) --# global in out Column; --# in out In_Degree; --# in out Out_Degree; --# in out Row; --# derives Column, --# Row from *, --# Heap, --# I, --# J & --# Heap from *, --# Column, --# I, --# J, --# Row & --# In_Degree from *, --# J & --# Out_Degree from *, --# I; is L1, M1, L2, M2 : Cells.Cell; begin -- Delete row pointer; L1 := Row (I); if Cells.Get_Natural_Value (Heap, L1) = J then Row (I) := Cells.Get_A_Ptr (Heap, L1); else loop M1 := L1; L1 := Cells.Get_A_Ptr (Heap, L1); exit when Cells.Get_Natural_Value (Heap, L1) = J; end loop; Cells.Set_A_Ptr (Heap, M1, Cells.Get_A_Ptr (Heap, L1)); end if; Out_Degree (I) := Out_Degree (I) - 1; -- delete column pointer; L2 := Column (J); if Cells.Get_Natural_Value (Heap, L2) = I then Column (J) := Cells.Get_A_Ptr (Heap, L2); else loop M2 := L2; L2 := Cells.Get_A_Ptr (Heap, L2); exit when Cells.Get_Natural_Value (Heap, L2) = I; end loop; Cells.Set_A_Ptr (Heap, M2, Cells.Get_A_Ptr (Heap, L2)); end if; Cells.Dispose_Of_Cell (Heap, L1); Cells.Dispose_Of_Cell (Heap, L2); In_Degree (J) := In_Degree (J) - 1; --# accept F, 601, Column, Row, "False coupling OK"; end Delete_Coeff; ----------------------------------------------------------------- -- Partially eliminate statement K of program, -- where K denotes a Check statement -- -- For each sequence of paths I -> K -> J, replace -- this with I -> J with a Label formed from the -- Product of LabelIK and LabelKJ BUT -- leave paths I -> K remaining in place. -- -- Repeat until all successors of K -- have been considered, at which point K will have -- no remaining successors. ----------------------------------------------------------------- procedure Partial_Eliminate (K : in Matrix_Index) --# global in Nmbr_Of_Stmts; --# in Proof_Context; --# in out Column; --# in out Heap; --# in out In_Degree; --# in out Out_Degree; --# in out Row; --# in out Statistics.TableUsage; --# derives Column, --# Heap, --# In_Degree, --# Out_Degree, --# Row, --# Statistics.TableUsage from *, --# Column, --# Heap, --# K, --# Nmbr_Of_Stmts, --# Row & --# null from Proof_Context; is P1, P2, Product : Labels.Label; begin SystemErrors.RT_Assert (C => Is_Check_Statement (X => Proof_Context (K)), Sys_Err => SystemErrors.Precondition_Failure, Msg => "Trying to Partial_Eliminate a node which isn't a Check"); -- For all statements J except the Precondition... for J in Matrix_Index range 2 .. Nmbr_Of_Stmts loop -- If J is a successor or K if not Labels.IsNull (Coefficient (Heap => Heap, I => K, J => J)) then -- For all statements I except the Postcondition for I in Matrix_Index range 1 .. Nmbr_Of_Stmts - 1 loop -- If I is a predecessor of K if not Labels.IsNull (Coefficient (Heap => Heap, I => I, J => K)) then -- I is a predecessor of K; -- form product, taking deep copies of the Labels -- on the paths from I to K and K to J first: Labels.CopyLabel (Heap, Coefficient (Heap => Heap, I => I, J => K), P1); Labels.CopyLabel (Heap, Coefficient (Heap => Heap, I => K, J => J), P2); Labels.MultiplyLabels (Heap, P1, P2, Product); -- Check to see of an existing path from -- I to J already exists. if Labels.IsNull (Coefficient (Heap => Heap, I => I, J => J)) then -- no existing path, so just add a new path from -- I to J with Label Product Create_Coeff (Heap => Heap, I => I, J => J, K => Product); else -- Existing path from I to J, so add Product to its -- Label Labels.AddLabels (Heap, Coefficient (Heap => Heap, I => I, J => J), Product); end if; end if; end loop; -- Once we've dealt with all the predecessors I that form -- paths from I to J via K for a specific Label from K to J, -- we can delete the Label from K to J, before going -- on to consider the next successor J. Delete_Coeff (Heap => Heap, I => K, J => J); -- NOTE that we DON'T delete the original Labels I -> K here, -- since these forms the VCs for "all paths reaching a check" -- that we need. This is why this is _partial_ eliminate of -- statement K end if; end loop; SystemErrors.RT_Assert (C => Out_Degree (K) = 0, Sys_Err => SystemErrors.Postcondition_Failure, Msg => "Out_Degree of node is not zero after Partial_Eliminate"); end Partial_Eliminate; -------------------------------------------------------------------------- -- Eliminate statement K of program. -- -- For each sequence of paths I -> K -> J, replace -- this with I -> J with a Label formed from the -- Product of LabelIK and LabelKJ. -- -- Repeat until all predecessors and successors of K -- have been considered, at which point K will have -- no reamaining predecessors and successors, effectively -- removing it from the BPG. -------------------------------------------------------------------------- procedure Eliminate (Heap : in out Cells.Heap_Record; K : in Matrix_Index) --# global in Nmbr_Of_Stmts; --# in Proof_Context; --# in out Column; --# in out In_Degree; --# in out Out_Degree; --# in out Row; --# in out Statistics.TableUsage; --# derives Column, --# Heap, --# In_Degree, --# Out_Degree, --# Row, --# Statistics.TableUsage from *, --# Column, --# Heap, --# In_Degree, --# K, --# Nmbr_Of_Stmts, --# Out_Degree, --# Row & --# null from Proof_Context; is P1, P2, Product : Labels.Label; begin SystemErrors.RT_Assert (C => Proof_Context (K) = Unspecified, Sys_Err => SystemErrors.Precondition_Failure, Msg => "Trying to eliminate a node which isn't UNSPECIFIED"); -- For each statement for I in Matrix_Index range 1 .. Nmbr_Of_Stmts - 1 loop if not Labels.IsNull (Coefficient (Heap => Heap, I => I, J => K)) then -- I is a predecessor of K, since the Label connecting I to K is not null for J in Matrix_Index range 2 .. Nmbr_Of_Stmts loop if not Labels.IsNull (Coefficient (Heap => Heap, I => K, J => J)) then -- J is a successor of K, since the Label connecting K to J is not null -- We've found two nodes I and J such that I is a predecessor -- of K and J is a successor of K in the BPG. Graphically, we've -- found a sequence such as: -- I -> K -> J -- in the BPG. if Out_Degree (K) = 1 then -- J is last successor, so take a shallow -- copy of its content into P1 P1 := Coefficient (Heap => Heap, I => I, J => K); else -- Not the last successor, so take a deep -- copy into P1 Labels.CopyLabel (Heap, Coefficient (Heap => Heap, I => I, J => K), P1); end if; if In_Degree (K) = 1 then -- I is the last predecessor, so take a shallow -- copy of its content into P2 P2 := Coefficient (Heap => Heap, I => K, J => J); -- If I is the final predecessor, then we won't -- be needed the information regarding successor J again, -- so that Coeff can now be deleted from the BPG Delete_Coeff (Heap => Heap, I => K, J => J); else -- Not the last predecessor, so take a deep copy into P2 Labels.CopyLabel (Heap, Coefficient (Heap => Heap, I => K, J => J), P2); end if; -- Form the Product of P1 and P2 Labels.MultiplyLabels (Heap, P1, P2, Product); -- Check to see of an existing path from -- I to J already exists. if Labels.IsNull (Coefficient (Heap => Heap, I => I, J => J)) then -- no existing path, so just add a new path from -- I to J with Label Product Create_Coeff (Heap => Heap, I => I, J => J, K => Product); else -- Existing path from I to J, so add Product to its -- Label Labels.AddLabels (Heap, Coefficient (Heap => Heap, I => I, J => J), Product); end if; end if; end loop; -- Finally, having created or augmented a Label for the path from I to J, -- the path from I to K can be deleted. Delete_Coeff (Heap => Heap, I => I, J => K); end if; end loop; -- After elimination, statement K should have both -- In_Degree and Out_Degree set to 0 - i.e. no predecessors -- and no successors SystemErrors.RT_Assert (C => In_Degree (K) = 0, Sys_Err => SystemErrors.Postcondition_Failure, Msg => "In_Degree of node is not zero after Eliminate"); SystemErrors.RT_Assert (C => Out_Degree (K) = 0, Sys_Err => SystemErrors.Postcondition_Failure, Msg => "Out_Degree of node is not zero after Eliminate"); end Eliminate; begin -- Gen_VCs Failure := False; Graph_Suffix := 1; for K in Matrix_Index range 2 .. Nmbr_Of_Stmts - 1 loop -- HTML Directives --! --! <"!!! "> --! program-has-a-cyclic-path-without-an-assertion --! Program has a cyclic path without an assertion. --! SPARK generates VCs for paths between cutpoints in the code; these must --! be chosen by the developer in such a way that every loop traverses at --! least one cutpoint. If the SPARK --! Examiner detects a loop which is not broken by a cutpoint, --! it cannot generate verification --! conditions for the subprogram in which the loop is located, --! and instead, issues this --! warning. This can only be corrected by formulating a suitable --! loop-invariant assertion for --! the loop and including it as an assertion in the SPARK text --! at the appropriate point. if not Labels.IsNull (Coefficient (Heap => Heap, I => K, J => K)) then SPARK_IO.New_Line (File => Output_File, Spacing => 1); SPARK_IO.Put_Line (File => Output_File, Item => "!!! Program has a cyclic path without an assertion.", Stop => 0); Failure := True; exit; end if; if Is_Check_Statement (X => Proof_Context (K)) then -- Explicit Check, Runtime Check, or Precondition Check Partial_Eliminate (K => K); elsif not Is_Assert_Statement (X => Proof_Context (K)) then -- Not a Check nor an Assert of any kind. -- Can't be Precondition or PostCondition since K cannot -- denote these given range of the enclosing loop, so must be -- Unspecified Eliminate (Heap => Heap, K => K); end if; --# accept F, 41, "Stable expression expected here"; if CommandLineData.Content.Debug.VCG_All then Dump_Graph_Dot (Heap => Heap, Output_File_Name => Output_File_Name, Output_File_Name_Suffix => Graph_Suffix, Scope => Scope, Print_Edges_As => PFs); end if; --# end accept; Graph_Suffix := Graph_Suffix + 1; end loop; -- We now have a BPG with all UNSPECIFIED nodes removed - leaving only -- explicit assertions, the pre-condition, the post-condition and checks. -- Each arc is labelled with its path-traveral condition and action like -- a path-function. if not Failure then -- To generate verification conditions, we do one final application -- of the assignment axiom to generate the VC, which is essentially -- (Precondition and PTC) -> Postcondition (Action) for K in Matrix_Index range 2 .. Nmbr_Of_Stmts loop if In_Degree (K) > 0 then Matrix_Element := Column (K); while not Cells.Is_Null_Cell (Matrix_Element) loop Initial_Node := Cells.Get_Natural_Value (Heap, Matrix_Element); Arc_Label := Labels.CellToLabel (Cells.Get_B_Ptr (Heap, Matrix_Element)); Current_Pair := Labels.FirstPair (Heap, Arc_Label); while not Pairs.IsNullPair (Current_Pair) loop -- replace path traversal condition p of a pair (p, R) by the -- predicate a /\ p, where a is the assertion at the beginning -- of the path represented by (p, R); Structures.CopyStructure (Heap, Assertion_Locn (Initial_Node), Assertion_Copy); if Pairs.IsTrue (Heap, Current_Pair) then Cells.Set_B_Ptr (Heap, Pairs.PairHead (Current_Pair), Assertion_Copy); else Current_Predicate := Cells.Get_B_Ptr (Heap, Pairs.PairHead (Current_Pair)); Pairs.FormConjunction (Heap, Assertion_Copy, Current_Predicate, Conjunction); Cells.Set_B_Ptr (Heap, Pairs.PairHead (Current_Pair), Conjunction); end if; -- replace action part R of a pair (p, R) by weakest pre- -- condition WP = q!R, where q is the assertion at the end of the -- path represented by (p, R); Structures.CopyStructure (Heap, Assertion_Locn (K), Assertion_Copy); if Pairs.IsUnitAction (Heap, Current_Pair) then Cells.Set_C_Ptr (Heap, Pairs.PairHead (Current_Pair), Assertion_Copy); else Pairs.CombinePredicateWithAction (Heap => Heap, Action_R => Cells.Get_C_Ptr (Heap, Pairs.PairHead (Current_Pair)), Predicate_q => Assertion_Copy, Result => WP); Cells.Set_C_Ptr (Heap, Pairs.PairHead (Current_Pair), WP); end if; Current_Pair := Labels.NextPair (Heap, Current_Pair); end loop; Matrix_Element := Cells.Get_A_Ptr (Heap, Matrix_Element); end loop; end if; end loop; -- Finally, if requested, print out the BPG with VCs on each arc. if CommandLineData.Content.Debug.VCG_All then Dump_Graph_Dot (Heap => Heap, Output_File_Name => Output_File_Name, Output_File_Name_Suffix => Graph_Suffix, Scope => Scope, Print_Edges_As => VCs); end if; end if; Gen_VC_Failure := Failure; end Gen_VCs; ------------------------------------------------------------------------ procedure Print_VCs_Or_DPCs (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Kind : in Valid_Dump_Kind) --# global in Column; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in In_Degree; --# in Nmbr_Of_Stmts; --# in Proof_Context; --# in Refinement_Post_Check; --# in Refinement_Pre_Check; --# in Subclass_Post_Check; --# in Subclass_Pre_Check; --# in Text_Line_Nmbr; --# in out Declarations.State; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Declarations.State, --# Heap, --# LexTokenManager.State, --# Statistics.TableUsage from *, --# Column, --# Declarations.State, --# Dictionary.Dict, --# Heap, --# In_Degree, --# Kind, --# LexTokenManager.State, --# Nmbr_Of_Stmts, --# Proof_Context, --# Refinement_Post_Check, --# Refinement_Pre_Check, --# Subclass_Post_Check, --# Subclass_Pre_Check & --# SPARK_IO.File_Sys from *, --# Column, --# CommandLineData.Content, --# Declarations.State, --# Dictionary.Dict, --# Heap, --# In_Degree, --# Kind, --# LexTokenManager.State, --# Nmbr_Of_Stmts, --# Output_File, --# Proof_Context, --# Refinement_Post_Check, --# Refinement_Pre_Check, --# Scope, --# Subclass_Post_Check, --# Subclass_Pre_Check, --# Text_Line_Nmbr; is Arc : Cells.Cell; Arc_Label : Labels.Label; Current_Pair : Pairs.Pair; VC_Counter : Natural; Arc_Found : Boolean; Lex_String : LexTokenManager.Lex_String; Sub_Prog_String : E_Strings.T; -------------------------------------------------------------- procedure Print_Subprog_Prefix --# global in Dictionary.Dict; --# in Output_File; --# in Scope; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Dictionary.Dict, --# Output_File, --# Scope; is begin if Dictionary.IsFunction (Dictionary.GetRegion (Scope)) then SPARK_IO.Put_String (File => Output_File, Item => "function_", Stop => 0); elsif Dictionary.IsProcedure (Dictionary.GetRegion (Scope)) then SPARK_IO.Put_String (File => Output_File, Item => "procedure_", Stop => 0); elsif Dictionary.IsTaskType (Dictionary.GetRegion (Scope)) then SPARK_IO.Put_String (File => Output_File, Item => "task_type_", Stop => 0); end if; end Print_Subprog_Prefix; ------------------------------------------------------------- procedure Print_Refinement_Checks (Heap : in out Cells.Heap_Record; Counter : in Natural) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Output_File; --# in Refinement_Post_Check; --# in Refinement_Pre_Check; --# in Scope; --# in Subclass_Post_Check; --# in Subclass_Pre_Check; --# in Sub_Prog_String; --# in out Declarations.State; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Declarations.State, --# Heap, --# Statistics.TableUsage from *, --# Counter, --# Declarations.State, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Refinement_Post_Check, --# Refinement_Pre_Check, --# Subclass_Post_Check, --# Subclass_Pre_Check & --# LexTokenManager.State from *, --# Counter, --# Refinement_Post_Check, --# Refinement_Pre_Check, --# Subclass_Post_Check, --# Subclass_Pre_Check & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Counter, --# Declarations.State, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Output_File, --# Refinement_Post_Check, --# Refinement_Pre_Check, --# Scope, --# Subclass_Post_Check, --# Subclass_Pre_Check, --# Sub_Prog_String; is Counter_Local : Natural; Lex_String : LexTokenManager.Lex_String; begin Counter_Local := Counter; if not (Cells.Is_Null_Cell (Refinement_Pre_Check) and then Cells.Is_Null_Cell (Refinement_Post_Check)) then -- refinement VCs are needed SPARK_IO.Put_Line (File => Output_File, Item => "For checks of refinement integrity: ", Stop => 0); SPARK_IO.New_Line (File => Output_File, Spacing => 1); -- mark VC with unique hash code -- IO_Routines.HashVCFormula (Heap, -- Output_File, -- Pairs.CellToPair (Refinement_Pre_Check), -- Scope); Print_Subprog_Prefix; E_Strings.Put_String (File => Output_File, E_Str => Sub_Prog_String); SPARK_IO.Put_Char (File => Output_File, Item => '_'); LexTokenManager.Insert_Nat (N => Counter_Local, Lex_Str => Lex_String); E_Strings.Put_String (File => Output_File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_String)); SPARK_IO.Put_Line (File => Output_File, Item => ".", Stop => 0); DAG_IO.PrintVCFormula (Heap, Output_File, Pairs.CellToPair (Refinement_Pre_Check), Scope, DAG_IO.Default_Wrap_Limit); Declarations.FindVCFormulaDeclarations (Heap, Pairs.CellToPair (Refinement_Pre_Check), True); SPARK_IO.New_Line (File => Output_File, Spacing => 1); if not (Cells.Is_Null_Cell (Refinement_Post_Check)) then Counter_Local := Counter_Local + 1; -- mark VC with unique hash code -- IO_Routines.HashVCFormula (Heap, -- Output_File, -- Pairs.CellToPair (Refinement_Post_Check), -- Scope); Print_Subprog_Prefix; E_Strings.Put_String (File => Output_File, E_Str => Sub_Prog_String); SPARK_IO.Put_Char (File => Output_File, Item => '_'); LexTokenManager.Insert_Nat (N => Counter_Local, Lex_Str => Lex_String); E_Strings.Put_String (File => Output_File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_String)); SPARK_IO.Put_Line (File => Output_File, Item => ".", Stop => 0); DAG_IO.PrintVCFormula (Heap, Output_File, Pairs.CellToPair (Refinement_Post_Check), Scope, DAG_IO.Default_Wrap_Limit); Declarations.FindVCFormulaDeclarations (Heap, Pairs.CellToPair (Refinement_Post_Check), True); SPARK_IO.New_Line (File => Output_File, Spacing => 1); Counter_Local := Counter_Local + 1; -- for the benefit of subclass check that follows end if; end if; -- do subclass refinements checksre if needed if not (Cells.Is_Null_Cell (Subclass_Pre_Check) and then Cells.Is_Null_Cell (Subclass_Post_Check)) then SPARK_IO.Put_Line (File => Output_File, Item => "For checks of subclass inheritance integrity: ", Stop => 0); SPARK_IO.New_Line (File => Output_File, Spacing => 1); -- mark VC with unique hash code -- IO_Routines.HashVCFormula (Heap, -- Output_File, -- Pairs.CellToPair (Subclass_Pre_Check), -- Scope); Print_Subprog_Prefix; E_Strings.Put_String (File => Output_File, E_Str => Sub_Prog_String); SPARK_IO.Put_Char (File => Output_File, Item => '_'); LexTokenManager.Insert_Nat (N => Counter_Local, Lex_Str => Lex_String); E_Strings.Put_String (File => Output_File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_String)); SPARK_IO.Put_Line (File => Output_File, Item => ".", Stop => 0); DAG_IO.PrintVCFormula (Heap, Output_File, Pairs.CellToPair (Subclass_Pre_Check), Scope, DAG_IO.Default_Wrap_Limit); Declarations.FindVCFormulaDeclarations (Heap, Pairs.CellToPair (Subclass_Pre_Check), True); SPARK_IO.New_Line (File => Output_File, Spacing => 1); if not Cells.Is_Null_Cell (Subclass_Post_Check) then -- mark VC with unique hash code -- IO_Routines.HashVCFormula (Heap, -- Output_File, -- Pairs.CellToPair (Subclass_Post_Check), -- Scope); Counter_Local := Counter_Local + 1; Print_Subprog_Prefix; E_Strings.Put_String (File => Output_File, E_Str => Sub_Prog_String); SPARK_IO.Put_Char (File => Output_File, Item => '_'); LexTokenManager.Insert_Nat (N => Counter_Local, Lex_Str => Lex_String); E_Strings.Put_String (File => Output_File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_String)); SPARK_IO.Put_Line (File => Output_File, Item => ".", Stop => 0); DAG_IO.PrintVCFormula (Heap, Output_File, Pairs.CellToPair (Subclass_Post_Check), Scope, DAG_IO.Default_Wrap_Limit); Declarations.FindVCFormulaDeclarations (Heap, Pairs.CellToPair (Subclass_Post_Check), True); SPARK_IO.New_Line (File => Output_File, Spacing => 1); end if; end if; end Print_Refinement_Checks; ------------------------------------------------------------- begin -- Print_VCs_Or_DPCs Sub_Prog_String := E_Strings.Lower_Case (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Dictionary.GetSimpleName (Dictionary.GetRegion (Scope)))); SPARK_IO.New_Line (File => Output_File, Spacing => 2); VC_Counter := 0; for Node in Matrix_Index range 2 .. Nmbr_Of_Stmts loop if In_Degree (Node) > 0 then for Predec in Matrix_Index range 1 .. Nmbr_Of_Stmts - 1 loop Arc_Found := False; Arc := Column (Node); while (not Arc_Found) and (not Cells.Is_Null_Cell (Arc)) loop if Cells.Get_Natural_Value (Heap, Arc) = Predec then Arc_Found := True; else Arc := Cells.Get_A_Ptr (Heap, Arc); end if; end loop; if Arc_Found then SPARK_IO.Put_String (File => Output_File, Item => "For path(s) from ", Stop => 0); if Predec = 1 then SPARK_IO.Put_String (File => Output_File, Item => "start", Stop => 0); else if Proof_Context (Predec) = Assertion then SPARK_IO.Put_String (File => Output_File, Item => "assertion of line ", Stop => 0); elsif Proof_Context (Predec) = Default_Assertion then SPARK_IO.Put_String (File => Output_File, Item => "default assertion of line ", Stop => 0); else -- error case, above two cover all legal cases SPARK_IO.Put_String (File => Output_File, Item => "!!!unknown assertion of line ", Stop => 0); end if; SPARK_IO.Put_Integer (File => Output_File, Item => Text_Line_Nmbr (Predec), Width => 1, Base => 10); end if; SPARK_IO.Put_String (File => Output_File, Item => " to ", Stop => 0); --# accept F, 41, "Stable expression here OK"; case Proof_Context (Node) is when Assertion => SPARK_IO.Put_String (File => Output_File, Item => "assertion of line ", Stop => 0); SPARK_IO.Put_Integer (File => Output_File, Item => Text_Line_Nmbr (Node), Width => 1, Base => 10); when Default_Assertion => SPARK_IO.Put_String (File => Output_File, Item => "default assertion of line ", Stop => 0); SPARK_IO.Put_Integer (File => Output_File, Item => Text_Line_Nmbr (Node), Width => 1, Base => 10); when Check_Statement => SPARK_IO.Put_String (File => Output_File, Item => "check associated with statement of line ", Stop => 0); SPARK_IO.Put_Integer (File => Output_File, Item => Text_Line_Nmbr (Node), Width => 1, Base => 10); when Run_Time_Check => SPARK_IO.Put_String (File => Output_File, Item => "run-time check associated with statement of line ", Stop => 0); SPARK_IO.Put_Integer (File => Output_File, Item => Text_Line_Nmbr (Node), Width => 1, Base => 10); when Precon_Check => SPARK_IO.Put_String (File => Output_File, Item => "precondition check associated with statement of line ", Stop => 0); SPARK_IO.Put_Integer (File => Output_File, Item => Text_Line_Nmbr (Node), Width => 1, Base => 10); when Postcondition => SPARK_IO.Put_String (File => Output_File, Item => "finish", Stop => 0); when Precondition | Unspecified => null; end case; --# end accept; SPARK_IO.Put_Line (File => Output_File, Item => ":", Stop => 0); SPARK_IO.New_Line (File => Output_File, Spacing => 1); Arc_Label := Labels.CellToLabel (Cells.Get_B_Ptr (Heap, Arc)); Current_Pair := Labels.FirstPair (Heap, Arc_Label); while not Pairs.IsNullPair (Current_Pair) loop -- IO_Routines.HashVCFormula (Heap, -- Output_File, -- Current_Pair, -- Scope); Print_Subprog_Prefix; E_Strings.Put_String (File => Output_File, E_Str => Sub_Prog_String); SPARK_IO.Put_Char (File => Output_File, Item => '_'); VC_Counter := VC_Counter + 1; LexTokenManager.Insert_Nat (N => VC_Counter, Lex_Str => Lex_String); E_Strings.Put_String (File => Output_File, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_String)); SPARK_IO.Put_Line (File => Output_File, Item => ".", Stop => 0); --# accept F, 41, "Stable expression here expected and OK"; case Kind is when VCs => DAG_IO.PrintVCFormula (Heap, Output_File, Current_Pair, Scope, DAG_IO.Default_Wrap_Limit); when DPCs => case Proof_Context (Node) is when Assertion | Default_Assertion | Postcondition => DAG_IO.PrintDPC (Heap, Output_File, Current_Pair, Scope, DAG_IO.Default_Wrap_Limit); when others => SPARK_IO.Put_Line (File => Output_File, Item => "*** true . /* DPC not required for intermediate check */", Stop => 0); SPARK_IO.New_Line (File => Output_File, Spacing => 1); end case; end case; --# end accept; -- Find the FDL declarations needed. If we're printing DPCs, then DON'T -- ignore trivially True VCs, since we _will_ produce the hypotheses -- list for these, so we do need FDL declarations for any entities therein. Declarations.FindVCFormulaDeclarations (Heap => Heap, PredicatePair => Current_Pair, IgnoreTriviallyTrueVCs => (Kind = VCs)); Current_Pair := Labels.NextPair (Heap, Current_Pair); SPARK_IO.New_Line (File => Output_File, Spacing => 1); end loop; end if; end loop; end if; end loop; case Kind is when VCs => Print_Refinement_Checks (Heap => Heap, Counter => VC_Counter + 1); when DPCs => null; end case; end Print_VCs_Or_DPCs; procedure Dump_Graph_Table (Heap : in out Cells.Heap_Record; Scope : in Dictionary.Scopes; Print_Edges_As : in DOT_Dump_Kind) --# global in Assertion_Locn; --# in Column; --# in In_Degree; --# in Nmbr_Of_Stmts; --# in Out_Degree; --# in Proof_Context; --# in Refinement_Post_Check; --# in Refinement_Pre_Check; --# in Row; --# in Subclass_Post_Check; --# in Subclass_Pre_Check; --# in Text_Line_Nmbr; --# derives Heap from * & --# null from Assertion_Locn, --# Column, --# In_Degree, --# Nmbr_Of_Stmts, --# Out_Degree, --# Print_Edges_As, --# Proof_Context, --# Refinement_Post_Check, --# Refinement_Pre_Check, --# Row, --# Scope, --# Subclass_Post_Check, --# Subclass_Pre_Check, --# Text_Line_Nmbr; is --# hide Dump_Graph_Table; Arc : Cells.Cell; Arc_Label : Labels.Label; Current_Pair : Pairs.Pair; VC_Counter : Natural; Arc_Found : Boolean; begin Debug.PrintInt ("Number of Statements is: ", Integer (Nmbr_Of_Stmts)); for I in Matrix_Index range 1 .. Nmbr_Of_Stmts loop if In_Degree (I) = 0 and Out_Degree (I) = 0 then Debug.PrintMsg ("Statement" & Integer'Image (I) & " not connected", True); else Debug.PrintInt ("Statement", I); Debug.PrintMsg (" Proof Context = " & Proof_Context_Type'Image (Proof_Context (I)), True); Debug.PrintInt (" Text Line Number =", Text_Line_Nmbr (I)); Debug.PrintInt (" In Degree =", Integer (In_Degree (I))); Debug.PrintInt (" Out Degree =", Integer (Out_Degree (I))); Debug.PrintDAG (" Assertion Locn = ", Assertion_Locn (I), Heap, Scope); end if; end loop; -- Now Dump each arc represented by each coefficient in the Matrix itself -- Basically the same algorithm as Print_VCs above. VC_Counter := 0; -- For all statements except the precondition for Node in Matrix_Index range 2 .. Nmbr_Of_Stmts loop -- If that node has predecessors if In_Degree (Node) > 0 then -- Then search the coefficients in the Matrix for all -- Predecessors whose Successor is Node. for Predec in Matrix_Index range 1 .. Nmbr_Of_Stmts - 1 loop Arc_Found := False; Arc := Column (Node); while (not Arc_Found) and (not Cells.Is_Null_Cell (Arc)) loop if Cells.Get_Natural_Value (Heap, Arc) = Predec then Arc_Found := True; else Arc := Cells.Get_A_Ptr (Heap, Arc); end if; end loop; if Arc_Found then -- Found an arc from Statement Predec to Statement Node Debug.PrintMsg ("Found an arc from Stm" & Integer'Image (Predec) & " to Stm" & Integer'Image (Node), True); -- Fetch the Label associated with that arc Arc_Label := Labels.CellToLabel (Cells.Get_B_Ptr (Heap, Arc)); case Print_Edges_As is when PFs => DAG_IO.PrintLabel (Heap, SPARK_IO.Standard_Output, Arc_Label, Scope, DAG_IO.Default_Wrap_Limit); when VCs => -- Fetch the first Pair attached to that Label Current_Pair := Labels.FirstPair (Heap, Arc_Label); while not Pairs.IsNullPair (Current_Pair) loop VC_Counter := VC_Counter + 1; Debug.PrintInt ("Pair", VC_Counter); DAG_IO.PrintVCFormula (Heap, SPARK_IO.Standard_Output, Current_Pair, Scope, DAG_IO.Default_Wrap_Limit); Current_Pair := Labels.NextPair (Heap, Current_Pair); end loop; end case; end if; end loop; end if; end loop; end Dump_Graph_Table; begin -- This code matches that in Reinitialize_Graph --# accept F, 23, Row, "Initialization is total" & --# F, 23, Column, "Initialization is total" & --# F, 23, In_Degree, "Initialization is total" & --# F, 23, Out_Degree, "Initialization is total" & --# F, 23, Proof_Context, "Initialization is total" & --# F, 23, Text_Line_Nmbr, "Initialization is total" & --# F, 23, Assertion_Locn, "Initialization is total"; for I in Matrix_Index loop Row (I) := Cells.Null_Cell; Column (I) := Cells.Null_Cell; In_Degree (I) := 0; Out_Degree (I) := 0; Proof_Context (I) := Unspecified; Text_Line_Nmbr (I) := 0; Assertion_Locn (I) := Cells.Null_Cell; end loop; Nmbr_Of_Stmts := 1; Refinement_Pre_Check := Cells.Null_Cell; Refinement_Post_Check := Cells.Null_Cell; Subclass_Pre_Check := Cells.Null_Cell; Subclass_Post_Check := Cells.Null_Cell; --# accept F, 602, Row, Row, "Initialization is total" & --# F, 602, Column, Column, "Initialization is total" & --# F, 602, In_Degree, In_Degree, "Initialization is total" & --# F, 602, Out_Degree, Out_Degree, "Initialization is total" & --# F, 602, Proof_Context, Proof_Context, "Initialization is total" & --# F, 602, Text_Line_Nmbr, Text_Line_Nmbr, "Initialization is total" & --# F, 602, Assertion_Locn, Assertion_Locn, "Initialization is total"; end Graph; ././@LongLink0000000000000000000000000000020500000000000011562 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_known_discriminant_part.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000003026411753202336033125 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Known_Discriminant_Part (Node : in STree.SyntaxNode; Protected_Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) is -------------------------------------------------------------------------------------------------- -- Rules: -- (1) identifier not already visible -- (2) access -> type_mark is protected type (or susp obj type later) -- (3) not access -> type is discrete -------------------------------------------------------------------------------------------------- It : STree.Iterator; Next_Node : STree.SyntaxNode; procedure Check_Discriminant (Node : in STree.SyntaxNode; Protected_Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# Protected_Type_Sym, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Protected_Type_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.discriminant_specification; --# post STree.Table = STree.Table~; is Type_Sym : Dictionary.Symbol; Is_Access : Boolean; Type_Node, Ident_Node : STree.SyntaxNode; procedure Check_Identifiers (Node : in STree.SyntaxNode; Type_Mark : in Dictionary.Symbol; Protected_Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# Protected_Type_Sym, --# Scope, --# STree.Table, --# Type_Mark & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Protected_Type_Sym, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Mark; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier_list; is It : STree.Iterator; Next_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Sym : Dictionary.Symbol; begin It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); Ident_Str := Node_Lex_String (Node => Next_Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) then Dictionary.AddKnownDiscriminant (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), ProtectedOrTaskType => Protected_Type_Sym, TypeMark => Type_Mark); else -- already exists ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; It := STree.NextNode (It); end loop; end Check_Identifiers; begin -- Check_Discriminant -- check type mark is valid Is_Access := False; Type_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Type_Node = type_mark OR access_definition if Syntax_Node_Type (Node => Type_Node) = SP_Symbols.access_definition then -- ASSUME Type_Node = access_definition Is_Access := True; Type_Node := Child_Node (Current_Node => Type_Node); elsif Syntax_Node_Type (Node => Type_Node) /= SP_Symbols.type_mark then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark OR access_definition in Check_Discriminant"); end if; -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Check_Discriminant"); Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); if not Dictionary.IsUnknownTypeMark (Type_Sym) then if Is_Access then -- only a protected type is allowed if Dictionary.IsProtectedTypeMark (Type_Sym) then if Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => Protected_Type_Sym, Full_Range_Subtype => False) then -- "recursive" use in discriminant Type_Sym := Dictionary.GetUnknownTypeMark; -- This error cannot be checked because access types are not -- allowed as descriminants ErrorHandler.Semantic_Error (Err_Num => 902, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => Dictionary.GetSimpleName (Type_Sym)); else Type_Sym := Dictionary.GetAccess (Type_Sym); end if; else Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 875, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; else -- only a discrete type is allowed if not Dictionary.TypeIsDiscrete (Type_Sym) then Type_Sym := Dictionary.GetUnknownTypeMark; ErrorHandler.Semantic_Error (Err_Num => 46, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; end if; end if; -- UnknownType -- check each identifier associated with type Ident_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier_list in Check_Discriminant"); Check_Identifiers (Node => Ident_Node, Type_Mark => Type_Sym, Protected_Type_Sym => Protected_Type_Sym, Scope => Scope); end Check_Discriminant; begin -- Wf_Known_Discriminant_Part It := Find_First_Node (Node_Kind => SP_Symbols.discriminant_specification, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.discriminant_specification and --# Next_Node = Get_Node (It); Check_Discriminant (Node => Next_Node, Protected_Type_Sym => Protected_Type_Sym, Scope => Scope); It := STree.NextNode (It); end loop; end Wf_Known_Discriminant_Part; spark-2012.0.deb/examiner/sem-wf_basic_declarative_item.adb0000644000175000017500000032113211753202336022643 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem) procedure Wf_Basic_Declarative_Item (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Node_To_Check : STree.SyntaxNode; -------------------------------------------------------------------------- procedure Check_Subtype_Against_Basetype_Bounds (Base_Type_Sym : in Dictionary.Symbol; Subtype_First : in LexTokenManager.Lex_String; Subtype_Last : in LexTokenManager.Lex_String; Ident_Node_Pos : in LexTokenManager.Token_Position; Range_Node_Pos : in LexTokenManager.Token_Position; Errors : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Base_Type_Sym, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Errors, --# Ident_Node_Pos, --# LexTokenManager.State, --# Range_Node_Pos, --# SPARK_IO.File_Sys, --# Subtype_First, --# Subtype_Last & --# Errors from *, --# Base_Type_Sym, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Subtype_First, --# Subtype_Last; is separate; ----------------------------------------------------------------------------------- procedure Wf_Basic_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.basic_declaration; --# post STree.Table = STree.Table~; is separate; -------------------------------------------------------------------------- procedure Wf_Representation_Clause (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.representation_clause; --# post STree.Table = STree.Table~; is Location : Dictionary.Location; Ident_Node : STree.SyntaxNode; Subject_Str : LexTokenManager.Lex_String; Subject_Sym : Dictionary.Symbol; procedure Process_Address_Clause (Node : in STree.SyntaxNode; Subject_Str : in LexTokenManager.Lex_String; Subject_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in STree.Table; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Subject_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Subject_Str, --# Subject_Sym; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.representation_clause; is begin if Dictionary.Is_Variable (Subject_Sym) then -- It's a variable...OK unless it's a mode-less own variable Dictionary.AddVariableAddressClause (Subject_Sym); if Dictionary.GetOwnVariableOrConstituentMode (Subject_Sym) = Dictionary.DefaultMode then ErrorHandler.Semantic_Warning (Err_Num => 396, Position => Node_Position (Node => Node), Id_Str => Subject_Str); end if; elsif Dictionary.Is_Constant (Subject_Sym) then -- A constant...issue a warning ErrorHandler.Semantic_Warning (Err_Num => 351, Position => Node_Position (Node => Node), Id_Str => Subject_Str); elsif not Dictionary.IsProgramUnit (Subject_Sym) then -- if it's not a variable, not a constant, and not a program -- unit, then it's illegal. ErrorHandler.Semantic_Error (Err_Num => 255, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end Process_Address_Clause; procedure Process_Size_Clause (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Subject_Str : in LexTokenManager.Lex_String; Subject_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# Subject_Sym, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subject_Str, --# Subject_Sym, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Subject_Sym, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.representation_clause; --# post STree.Table = STree.Table~; is Unwanted_Seq : SeqAlgebra.Seq; Unused_Component_Data : ComponentManager.ComponentData; Simple_Expression : Exp_Record; Simp_Node : STree.SyntaxNode; Value_Int : Integer; Error : Maths.ErrorCode; Lex_Value : LexTokenManager.Lex_String; begin if Dictionary.IsType (Subject_Sym) then Heap.Reset (The_Heap); SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); Simp_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Child_Node (Current_Node => Node))); -- ASSUME Simp_Node = simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Simp_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Simp_Node = simple_expression in Process_Size_Clause"); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Simp_Node, Scope => Current_Scope, Type_Context => Dictionary.GetUniversalIntegerType, Context_Requires_Static => True, Ref_Var => Unwanted_Seq, Result => Simple_Expression, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); if Simple_Expression.Is_Static then if Dictionary.IsIntegerTypeMark (Simple_Expression.Type_Symbol, Current_Scope) then if Simple_Expression.Value /= Maths.NoValue then Maths.ValueToInteger (Simple_Expression.Value, Value_Int, Error); if Error = Maths.NoError then -- Size must not be negative if Value_Int >= 0 then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenManager.Null_String, Lex_Str2 => Dictionary.TypeSizeAttribute (Subject_Sym)) = LexTokenManager.Str_Eq then Maths.StorageRep (Simple_Expression.Value, Lex_Value); Dictionary.AddTypeSizeAttribute (Subject_Sym, Lex_Value); ErrorHandler.Representation_Clause (Position => Node_Position (Node => Node)); else ErrorHandler.Semantic_Error (Err_Num => 250, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Simp_Node), Id_Str => Subject_Str); end if; else ErrorHandler.Semantic_Error (Err_Num => 253, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Simp_Node), Id_Str => Subject_Str); end if; else ErrorHandler.Semantic_Warning (Err_Num => 200, Position => Node_Position (Node => Simp_Node), Id_Str => LexTokenManager.Null_String); end if; else ErrorHandler.Semantic_Warning (Err_Num => 201, Position => Node_Position (Node => Simp_Node), Id_Str => LexTokenManager.Null_String); end if; else ErrorHandler.Semantic_Error (Err_Num => 251, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Simp_Node), Id_Str => Subject_Str); end if; else ErrorHandler.Semantic_Error (Err_Num => 252, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Simp_Node), Id_Str => Subject_Str); end if; elsif Dictionary.IsSubtype (Subject_Sym) then -- Size for a non-first subtype is not permitted ErrorHandler.Semantic_Error (Err_Num => 254, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Subject_Str); else -- Anything else, just warn - legality is left to the compiler. ErrorHandler.Representation_Clause (Position => Node_Position (Node => Node)); end if; end Process_Size_Clause; function Attribute_Identifier (Node : STree.SyntaxNode) return LexTokenManager.Lex_String --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.representation_clause; is Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Node); -- ASSUME Current_Node = attribute_definition_clause SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.attribute_definition_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = attribute_definition_clause in Attribute_Identifier"); Current_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Child_Node (Current_Node => Current_Node)))); -- ASSUME Current_Node = attribute_ident SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.attribute_ident, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = attribute_ident in Attribute_Identifier"); return Node_Lex_String (Node => Current_Node); end Attribute_Identifier; begin -- Wf_Representation_Clause -- Currently checks only: -- (1) that the name of the type or object is declared and visible -- (2) if object is a variable and clause is an address then: -- (a) set HasAddressClause flag in Dictionary -- (b) issues warning if variable is NOT a stream -- (3) issues warning that rep clauses cannot be fully understood by Examiner -- (4) for enum and record reps calls Dict procedures to add locations to dict file Location := Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)); Ident_Node := Last_Child_Of (Start_Node => Node); -- ASSUME Ident_Node = identifier OR character_literal SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier or else Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.character_literal, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier OR character_literal in Wf_Representation_Clause"); Subject_Str := Node_Lex_String (Node => Ident_Node); Subject_Sym := Dictionary.LookupItem (Name => Subject_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Subject_Sym) then ErrorHandler.Semantic_Error (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Subject_Str); ErrorHandler.Representation_Clause (Position => Node_Position (Node => Node)); else -- ASSUME Child_Node (Current_Node => Node) = attribute_definition_clause OR enumeration_representation_clause OR -- record_representation_clause OR at_clause case Syntax_Node_Type (Node => Child_Node (Current_Node => Node)) is when SP_Symbols.attribute_definition_clause => -- ASSUME Child_Node (Current_Node => Node) = attribute_definition_clause if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attribute_Identifier (Node => Node), Lex_Str2 => LexTokenManager.Address_Token) = LexTokenManager.Str_Eq then case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Error (Err_Num => 54, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Address_Token); when CommandLineData.SPARK95_Onwards => STree.Set_Node_Lex_String (Sym => Subject_Sym, Node => Ident_Node); Process_Address_Clause (Node => Node, Subject_Str => Subject_Str, Subject_Sym => Subject_Sym); end case; ErrorHandler.Representation_Clause (Position => Node_Position (Node => Node)); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attribute_Identifier (Node => Node), Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq then STree.Set_Node_Lex_String (Sym => Subject_Sym, Node => Ident_Node); Process_Size_Clause (Node => Node, Current_Scope => Current_Scope, Subject_Str => Subject_Str, Subject_Sym => Subject_Sym, The_Heap => The_Heap); else STree.Set_Node_Lex_String (Sym => Subject_Sym, Node => Ident_Node); ErrorHandler.Representation_Clause (Position => Node_Position (Node => Node)); end if; when SP_Symbols.at_clause => -- ASSUME Child_Node (Current_Node => Node) = at_clause case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => STree.Set_Node_Lex_String (Sym => Subject_Sym, Node => Ident_Node); when CommandLineData.SPARK95_Onwards => -- "at" clause obsolete in 95 onwards ErrorHandler.Semantic_Warning (Err_Num => 310, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end case; Process_Address_Clause (Node => Node, Subject_Str => Subject_Str, Subject_Sym => Subject_Sym); ErrorHandler.Representation_Clause (Position => Node_Position (Node => Node)); when SP_Symbols.record_representation_clause => -- ASSUME Child_Node (Current_Node => Node) = record_representation_clause if Dictionary.IsRecordTypeMark (Subject_Sym, Current_Scope) then STree.Set_Node_Lex_String (Sym => Subject_Sym, Node => Ident_Node); Dictionary.Add_Representation_Clause (The_Type => Subject_Sym, Clause => Location); else ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; ErrorHandler.Representation_Clause (Position => Node_Position (Node => Node)); when SP_Symbols.enumeration_representation_clause => -- ASSUME Child_Node (Current_Node => Node) = enumeration_representation_clause if Dictionary.IsTypeMark (Subject_Sym) and then Dictionary.TypeIsEnumeration (Subject_Sym) then STree.Set_Node_Lex_String (Sym => Subject_Sym, Node => Ident_Node); Dictionary.Add_Representation_Clause (The_Type => Subject_Sym, Clause => Location); else ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; ErrorHandler.Representation_Clause (Position => Node_Position (Node => Node)); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Node) = attribute_definition_clause OR " & "enumeration_representation_clause OR record_representation_clause OR at_clause in Wf_Representation_Clause"); end case; end if; end Wf_Representation_Clause; -------------------------------------------------------------------------- procedure Wf_Basic_Proof_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.basic_proof_declaration; --# post STree.Table = STree.Table~; is Declaration_Node, Ident_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Sym : Dictionary.Symbol; function More_Than_One_Own_Var_Announced (Sym : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is It : Dictionary.Iterator; Count : Natural := 0; Current_Own_Var : Dictionary.Symbol; begin It := Dictionary.FirstOwnVariable (Dictionary.GetEnclosingPackage (Scope)); while not Dictionary.IsNullIterator (It) loop Current_Own_Var := Dictionary.CurrentSymbol (It); if Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Current_Own_Var), Right_Symbol => Sym, Full_Range_Subtype => False) and then Count <= 2 then Count := Count + 1; end if; It := Dictionary.NextSymbol (It); end loop; return Count = 2; end More_Than_One_Own_Var_Announced; -------------------------------------------------------------------------- procedure Wf_Base_Type_Assertion (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.base_type_assertion; --# post STree.Table = STree.Table~; is Name_Node, Type_Node, Attr_Node, Base_Type_Node : STree.SyntaxNode; Type_Str, Attr_Str, Base_Type_Str : LexTokenManager.Lex_String; Type_Sym, Base_Type_Sym : Dictionary.Symbol; Errors : Boolean := False; procedure Ck_Attr_Is_Base (Attr_Node : in STree.SyntaxNode; Attr_Str : in LexTokenManager.Lex_String; Errors : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Attr_Node, --# Attr_Str, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# Errors from *, --# Attr_Str, --# LexTokenManager.State; --# pre Syntax_Node_Type (Attr_Node, STree.Table) = SP_Symbols.attribute_ident; is begin -- check that the attribute asserted is in fact 'Base if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Attr_Str, Lex_Str2 => LexTokenManager.Base_Token) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 789, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Attr_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; end Ck_Attr_Is_Base; procedure Ck_Is_Type (Base_Type_Node, Type_Node : in STree.SyntaxNode; Base_Type_Sym, Type_Sym : in Dictionary.Symbol; Errors : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Base_Type_Node, --# Base_Type_Sym, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Node, --# Type_Sym & --# Errors from *, --# Base_Type_Sym, --# Dictionary.Dict, --# Type_Sym; --# pre Syntax_Node_Type (Base_Type_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Type_Node, STree.Table) = SP_Symbols.identifier; is begin -- check that the type and base type specified are both actually types if not Dictionary.IsType (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 790, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; if not Dictionary.IsType (Base_Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 790, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Base_Type_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; end Ck_Is_Type; procedure Ck_Typing (Base_Type_Node : in STree.SyntaxNode; Base_Type_Sym, Type_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Errors : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Base_Type_Node, --# Base_Type_Sym, --# CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Sym & --# Errors from *, --# Base_Type_Sym, --# Current_Scope, --# Dictionary.Dict, --# Type_Sym; --# pre Syntax_Node_Type (Base_Type_Node, STree.Table) = SP_Symbols.identifier; is begin -- check that the types are either both signed integer, or both -- floating point. if not ((Dictionary.IsIntegerTypeMark (Type_Sym, Current_Scope) and then Dictionary.IsIntegerTypeMark (Base_Type_Sym, Current_Scope)) or else (Dictionary.IsFloatingPointTypeMark (Type_Sym, Current_Scope) and then Dictionary.IsFloatingPointTypeMark (Base_Type_Sym, Current_Scope))) then ErrorHandler.Semantic_Error (Err_Num => 792, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Base_Type_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; end Ck_Typing; procedure Ck_No_Existing_Base_Type (Type_Node : in STree.SyntaxNode; Type_Sym : in Dictionary.Symbol; Errors : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Errors, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Node, --# Type_Sym & --# Errors from *, --# Dictionary.Dict, --# Type_Sym; --# pre Syntax_Node_Type (Type_Node, STree.Table) = SP_Symbols.identifier; is begin -- check that the assertion is not about a predefined type -- or one that already has a base type assertion; guarded by -- check on Errors to avoid bogus reports for bad types if not Errors and then Dictionary.IsType (Type_Sym) and then not Dictionary.Is_Null_Symbol (Dictionary.GetBaseType (Type_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 796, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; end Ck_No_Existing_Base_Type; procedure Ck_Predefined_Base_Type (Base_Type_Node : in STree.SyntaxNode; Base_Type_Sym : in Dictionary.Symbol; Errors : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Base_Type_Node, --# Base_Type_Sym, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# Errors from *, --# Base_Type_Sym, --# Dictionary.Dict; --# pre Syntax_Node_Type (Base_Type_Node, STree.Table) = SP_Symbols.identifier; is begin -- check that the base type specified is predefined if Dictionary.IsType (Base_Type_Sym) then if not Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetBaseType (Base_Type_Sym), Right_Symbol => Base_Type_Sym, Full_Range_Subtype => False) then ErrorHandler.Semantic_Error (Err_Num => 791, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Base_Type_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; end if; end Ck_Predefined_Base_Type; procedure Ck_Real_Accuracy (Base_Type_Node : in STree.SyntaxNode; Base_Type_Sym, Type_Sym : in Dictionary.Symbol; Errors : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Base_Type_Node, --# Base_Type_Sym, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Errors, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Sym & --# Errors from *, --# Base_Type_Sym, --# Dictionary.Dict, --# LexTokenManager.State, --# Type_Sym; --# pre Syntax_Node_Type (Base_Type_Node, STree.Table) = SP_Symbols.identifier; is Type_Accuracy, Base_Type_Accuracy : LexTokenManager.Lex_String; Type_Accuracy_Val, Base_Type_Accuracy_Val, Comp_Val : Maths.Value; Maths_Error : Maths.ErrorCode; begin -- This test only relevant if the type is a real if not Errors and then Dictionary.TypeIsReal (Type_Sym) then Type_Accuracy := Dictionary.GetScalarAttributeValue (False, LexTokenManager.Digits_Token, Type_Sym); Base_Type_Accuracy := Dictionary.GetScalarAttributeValue (False, LexTokenManager.Digits_Token, Base_Type_Sym); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Base_Type_Accuracy, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then -- we require that the base type have a defined accuracy ErrorHandler.Semantic_Error (Err_Num => 797, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Base_Type_Node), Id_Str => LexTokenManager.Null_String); Errors := True; else -- check that the accuracy of the base type is at least that of the type Type_Accuracy_Val := Maths.ValueRep (Type_Accuracy); Base_Type_Accuracy_Val := Maths.ValueRep (Base_Type_Accuracy); Maths.Lesser (Base_Type_Accuracy_Val, Type_Accuracy_Val, Comp_Val, Maths_Error); if Comp_Val = Maths.TrueValue and then Maths_Error = Maths.NoError then ErrorHandler.Semantic_Error (Err_Num => 798, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Base_Type_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; end if; end if; end Ck_Real_Accuracy; procedure Ck_Scoping (Type_Node : in STree.SyntaxNode; Type_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Errors : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Errors, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_Node, --# Type_Sym & --# Errors from *, --# Current_Scope, --# Dictionary.Dict, --# Type_Sym; --# pre Syntax_Node_Type (Type_Node, STree.Table) = SP_Symbols.identifier; is Type_Scope : Dictionary.Scopes; begin -- check that the type assertion occurs in the same scope -- as the declaration of the type, or the corresponding -- private scope (if the type is private) if not Errors then Type_Scope := Dictionary.GetScope (Type_Sym); if not (Type_Scope = Current_Scope or else (Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Dictionary.GetRegion (Type_Scope)) = Current_Scope and then Dictionary.TypeIsPrivate (TheType => Type_Sym))) then ErrorHandler.Semantic_Error (Err_Num => 795, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); Errors := True; end if; end if; end Ck_Scoping; begin -- Wf_Base_Type_Assertion Name_Node := Child_Node (Current_Node => Node); -- ASSUME Name_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Name_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = dotted_simple_name in Wf_Base_Type_Assertion"); Type_Node := Child_Node (Current_Node => Name_Node); -- ASSUME Type_Node = dotted_simple_name OR identifier if Syntax_Node_Type (Node => Type_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Type_Node = dotted_simple_name ErrorHandler.Semantic_Error (Err_Num => 799, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Type_Node) = SP_Symbols.identifier then -- ASSUME Type_Node = identifier Type_Str := Node_Lex_String (Node => Type_Node); Type_Sym := Dictionary.LookupItem (Name => Type_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); Attr_Node := Next_Sibling (Current_Node => Name_Node); -- ASSUME Attr_Node = attribute_ident SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Attr_Node) = SP_Symbols.attribute_ident, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Attr_Node = attribute_ident in Wf_Base_Type_Assertion"); Attr_Str := Node_Lex_String (Node => Attr_Node); Base_Type_Node := Next_Sibling (Current_Node => Attr_Node); -- ASSUME Base_Type_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Base_Type_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Base_Type_Node = identifier in Wf_Base_Type_Assertion"); Base_Type_Str := Node_Lex_String (Node => Base_Type_Node); Base_Type_Sym := Dictionary.LookupItem (Name => Base_Type_Str, Scope => Current_Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- perform static semantic checks Ck_Attr_Is_Base (Attr_Node => Attr_Node, Attr_Str => Attr_Str, Errors => Errors); Ck_Is_Type (Base_Type_Node => Base_Type_Node, Type_Node => Type_Node, Base_Type_Sym => Base_Type_Sym, Type_Sym => Type_Sym, Errors => Errors); Ck_Typing (Base_Type_Node => Base_Type_Node, Base_Type_Sym => Base_Type_Sym, Type_Sym => Type_Sym, Current_Scope => Current_Scope, Errors => Errors); Ck_No_Existing_Base_Type (Type_Node => Type_Node, Type_Sym => Type_Sym, Errors => Errors); Ck_Predefined_Base_Type (Base_Type_Node => Base_Type_Node, Base_Type_Sym => Base_Type_Sym, Errors => Errors); if not Errors then -- If something has gone wrong, then DON'T try to look -- up Type_Sym'First and Type_Sym'Last Check_Subtype_Against_Basetype_Bounds (Base_Type_Sym => Base_Type_Sym, Subtype_First => Dictionary.GetScalarAttributeValue (False, LexTokenManager.First_Token, Type_Sym), Subtype_Last => Dictionary.GetScalarAttributeValue (False, LexTokenManager.Last_Token, Type_Sym), Ident_Node_Pos => Node_Position (Node => Base_Type_Node), Range_Node_Pos => Node_Position (Node => Base_Type_Node), Errors => Errors); end if; Ck_Real_Accuracy (Base_Type_Node => Base_Type_Node, Base_Type_Sym => Base_Type_Sym, Type_Sym => Type_Sym, Errors => Errors); Ck_Scoping (Type_Node => Type_Node, Type_Sym => Type_Sym, Current_Scope => Current_Scope, Errors => Errors); if not Errors then STree.Set_Node_Lex_String (Sym => Type_Sym, Node => Type_Node); STree.Set_Node_Lex_String (Sym => Base_Type_Sym, Node => Base_Type_Node); Dictionary.SetBaseType (Type_Sym, Base_Type_Sym); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = dotted_simple_name OR identifier in Wf_Base_Type_Assertion"); end if; end Wf_Base_Type_Assertion; -------------------------------------------------------------------------- -- Process a clause of the form: -- --# assert A'Always_Valid; -- or --# assert A.B.C'Always_Valid; -- Where A is a variable, and B anc C are (sub) components. -- If successful, mark the Variable/SubComponent symbol as "MarkedValid" -- which will suppress the "representation might be invalid" warning, -- and cause the appropriate "in range" VCs to be generated. -------------------------------------------------------------------------- procedure Wf_Always_Valid_Variable_Assertion (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.alwaysvalid_variable_assertion; --# post STree.Table = STree.Table~; is Name_Node : STree.SyntaxNode; Entire_Sym, Object_Sym, Type_Sym : Dictionary.Symbol; ------------------------------------------------------------------------- -- Name_Node - Node naming a variable, or a (sub) field of a variable -- Entire_Sym - Symbol of entire variable (ie, first identifer found) -- Object_Sym - Symbol of right most element (ie, the variable or -- subcomponent to be marked) -- == NullSymbol if anything goes wrong ------------------------------------------------------------------------- procedure Process_Dotted_Simple_Name (Name_Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord; Entire_Sym : out Dictionary.Symbol; Object_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Component_Data, --# Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Name_Node, --# STree.Table, --# The_Heap & --# Entire_Sym from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Name_Node, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Name_Node, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Object_Sym from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Name_Node, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Name_Node, STree.Table) = SP_Symbols.dotted_simple_name; --# post STree.Table = STree.Table~; is It : STree.Iterator; Id_Node : STree.SyntaxNode; Id_Str, Previous_Id_Str : LexTokenManager.Lex_String; begin -- the first node must be the entire variable Previous_Id_Str := LexTokenManager.Null_String; It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Name_Node, In_Direction => STree.Down); Id_Node := Get_Node (It => It); -- must be the entire variable -- ASSUME Id_Node = identifier Id_Str := Node_Lex_String (Node => Id_Node); Object_Sym := Dictionary.LookupItem (Name => Id_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); Entire_Sym := Object_Sym; loop --# assert STree.Table = STree.Table~; -- check that what we have so far (inc the first symbol) is valid if Dictionary.Is_Null_Symbol (Object_Sym) then ErrorHandler.Semantic_Error2 (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str1 => Id_Str, Id_Str2 => Previous_Id_Str); Object_Sym := Dictionary.NullSymbol; exit; end if; STree.Set_Node_Lex_String (Sym => Object_Sym, Node => Id_Node); -- get the next symbol, if there is one It := STree.NextNode (It); exit when STree.IsNull (It); Id_Node := Get_Node (It => It); -- ASSUME Id_Node = identifier Previous_Id_Str := Id_Str; Id_Str := Node_Lex_String (Node => Id_Node); -- we have another symbol (field selector), thus the previous -- one must be the entire variable, or a subcomponent, and it -- must be of a record type if not (Dictionary.IsVariableOrSubcomponent (Object_Sym) and then Dictionary.TypeIsRecord (Dictionary.GetType (Object_Sym))) then ErrorHandler.Semantic_Error_Sym (Err_Num => 9, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Sym => Object_Sym, Scope => Current_Scope); Object_Sym := Dictionary.NullSymbol; exit; end if; -- We have a symbol that might be a subcomponent, before we can -- check that it is, we need to add the subcomponent to the enclosing -- record symbol, since they are not added unless/until they are -- needed Add_Record_Sub_Components (Record_Var_Sym => Object_Sym, Record_Type_Sym => Dictionary.GetType (Object_Sym), Component_Data => Component_Data, The_Heap => The_Heap); -- check that the symbol is indeed a subcomponent Object_Sym := Dictionary.LookupSelectedItem (Prefix => Object_Sym, Selector => Id_Str, Scope => Current_Scope, Context => Dictionary.ProofContext); end loop; -- return the innermost component symbol, since this is what -- will carry the valid mark end Process_Dotted_Simple_Name; -- Only one-dimensional arrays can be marked as always_valid. function Is_One_Dimensional_Array (Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is begin return Dictionary.IsArrayTypeMark (Type_Sym, Scope) and then Dictionary.GetNumberOfDimensions (Type_Sym) = 1; end Is_One_Dimensional_Array; -- A suitable record type is a non-tagged and only -- contains scalar or array of scalar components. function Is_Suitable_Record_Type (Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is Iterator : Dictionary.Iterator; Rec_Comp_Type : Dictionary.Symbol; Suitable : Boolean; begin if not Dictionary.Is_Null_Symbol (Type_Sym) and then Dictionary.TypeIsRecord (Type_Sym) and then not Dictionary.TypeIsTagged (Type_Sym) then Iterator := Dictionary.FirstRecordComponent (Type_Sym); Suitable := not Dictionary.IsNullIterator (Iterator); -- Iterate over the components ensuring that they are of a -- suitable type. Exit the loop if an unsuitable one is found. while (not Dictionary.IsNullIterator (Iterator)) loop Rec_Comp_Type := Dictionary.GetType (Dictionary.CurrentSymbol (Iterator)); if not (Dictionary.IsScalarTypeMark (Rec_Comp_Type, Scope) or else (not (Is_One_Dimensional_Array (Rec_Comp_Type, Scope) and then not Dictionary.IsScalarTypeMark (Dictionary.GetArrayComponent (Rec_Comp_Type), Scope)))) then exit; end if; Iterator := Dictionary.NextSymbol (Iterator); end loop; -- The record is suitable if the record has at least one component -- and all its components are of a suitable type in which case -- the loop exits with a NullIterator. Suitable := Suitable and Dictionary.IsNullIterator (Iterator); else Suitable := False; end if; return Suitable; end Is_Suitable_Record_Type; -- A suitable array type is one-dimensional and has components which -- are scalar or are a suitable record type. function Is_Suitable_Array_Type (Type_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; is Suitable : Boolean; Element_Type : Dictionary.Symbol; begin if Is_One_Dimensional_Array (Type_Sym, Scope) then Element_Type := Dictionary.GetArrayComponent (Type_Sym); Suitable := Dictionary.IsScalarTypeMark (Element_Type, Scope) or else Is_Suitable_Record_Type (Element_Type, Scope); else Suitable := False; end if; return Suitable; end Is_Suitable_Array_Type; begin -- Wf_Always_Valid_Variable_Assertion Name_Node := Child_Node (Current_Node => Node); -- ASSUME Name_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Name_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Name_Node = dotted_simple_name in Wf_Always_Valid_Variable_Assertion"); -- check that the attribute asserted is in fact 'Always_Valid if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Next_Sibling (Current_Node => Name_Node)), Lex_Str2 => LexTokenManager.Always_Valid_Token) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 656, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Sibling (Current_Node => Name_Node)), Id_Str => LexTokenManager.Null_String); else -- parse the variable name, which might refer to a record -- subcomponent: eg. Entire_Sym.First.Second.Component_Sym -- Object_Sym == NullSymbol if anything goes wrong Process_Dotted_Simple_Name (Name_Node => Name_Node, Current_Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap, Entire_Sym => Entire_Sym, Object_Sym => Object_Sym); -- Check that we are referring ultimately to a variable if not Dictionary.Is_Null_Symbol (Object_Sym) and then not Dictionary.Is_Variable (Entire_Sym) then ErrorHandler.Semantic_Error (Err_Num => 657, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Object_Sym := Dictionary.NullSymbol; end if; -- Check that the variable is declared in the same scope if not Dictionary.Is_Null_Symbol (Object_Sym) and then Dictionary.GetScope (Entire_Sym) /= Current_Scope then ErrorHandler.Semantic_Error (Err_Num => 659, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Object_Sym := Dictionary.NullSymbol; end if; -- Check that the type of the object is appropriate. -- It must either be scalar, a one dimensional array of scalar -- componemts, a record with only scalar or array of scalar -- components, or an array of records satisfying the aforementioned -- constraints. if not Dictionary.Is_Null_Symbol (Object_Sym) then Type_Sym := Dictionary.GetType (Object_Sym); if not (Dictionary.IsScalarTypeMark (Type_Sym, Current_Scope) or else Is_Suitable_Array_Type (Type_Sym, Current_Scope) or else Is_Suitable_Record_Type (Type_Sym, Current_Scope)) then ErrorHandler.Semantic_Error (Err_Num => 658, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => LexTokenManager.Null_String); Object_Sym := Dictionary.NullSymbol; end if; end if; -- Should check that it is a mode in own variable... if not Dictionary.Is_Null_Symbol (Object_Sym) and then Dictionary.GetOwnVariableOrConstituentMode (Entire_Sym) /= Dictionary.InMode then ErrorHandler.Semantic_Error (Err_Num => 662, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Object_Sym := Dictionary.NullSymbol; end if; -- Check that this object has not been marked valid already -- and if not, then mark it. The object might be a variable, -- or a record subcomponent, and they must be dealt with separately. if not Dictionary.Is_Null_Symbol (Object_Sym) then if Dictionary.Is_Variable (Object_Sym) then if not Dictionary.VariableIsMarkedValid (Object_Sym) then Dictionary.SetVariableMarkedValid (Object_Sym, True); else ErrorHandler.Semantic_Error (Err_Num => 660, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => LexTokenManager.Null_String); end if; elsif Dictionary.IsSubcomponent (Object_Sym) then if not Dictionary.SubcomponentIsMarkedValid (Object_Sym) then Dictionary.SetSubcomponentMarkedValid (Object_Sym, True); else ErrorHandler.Semantic_Error (Err_Num => 660, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Name_Node), Id_Str => LexTokenManager.Null_String); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Assertion_Failure, Msg => "Component must be a variable or a SubComponent"); end if; end if; end if; end Wf_Always_Valid_Variable_Assertion; procedure Wf_Object_Assertion (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.object_assertion; --# post STree.Table = STree.Table~; is Ident_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; procedure Process_Simple_Name_Rep (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Rule_Policy : in Dictionary.Rule_Policies) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Rule_Policy, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Rule_Policy, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.object_assertion; --# post STree.Table = STree.Table~; is It : STree.Iterator; Next_Node : STree.SyntaxNode; procedure Process_Dotted_Simple_Name (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Rule_Policy : in Dictionary.Rule_Policies) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Rule_Policy, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dotted_simple_name; --# post STree.Table = STree.Table~; is Ident_Node, Next_Ident_Node : STree.SyntaxNode; It : STree.Iterator; Previous_Id_Str, Id_Str : LexTokenManager.Lex_String; Sym, SymSoFar, SymType, The_Rule_Policy : Dictionary.Symbol; PrefixOk : Boolean; begin It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Node, In_Direction => STree.Down); if not STree.IsNull (It => It) then Ident_Node := Get_Node (It => It); -- ASSUME Ident_Node = identifier Id_Str := Node_Lex_String (Node => Ident_Node); Previous_Id_Str := LexTokenManager.Null_String; Sym := Dictionary.LookupItem (Name => Id_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Ident_Node = Get_Node (It); if Dictionary.Is_Null_Symbol (Sym) then ErrorHandler.Semantic_Error2 (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Id_Str, Id_Str2 => Previous_Id_Str); exit; end if; It := STree.NextNode (It); Next_Ident_Node := Get_Node (It => It); -- ASSUME Next_Ident_Node = identifier OR NULL if Dictionary.Is_Constant (Sym) then SymType := Dictionary.GetType (Sym); -- is constant, needs to be a composite constant -- with no dotted part to right. if Next_Ident_Node /= STree.NullNode then ErrorHandler.Semantic_Error (Err_Num => 180, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Sym := Dictionary.NullSymbol; end if; if not (Dictionary.TypeIsArray (SymType) or else Dictionary.TypeIsRecord (SymType)) then ErrorHandler.Semantic_Error (Err_Num => 180, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Sym := Dictionary.NullSymbol; end if; if not Dictionary.Is_Null_Symbol (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); end if; exit; end if; if not Dictionary.IsPackage (Sym) then ErrorHandler.Semantic_Error (Err_Num => 180, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Sym := Dictionary.NullSymbol; exit; end if; if Next_Ident_Node = STree.NullNode then -- package without a selected component ErrorHandler.Semantic_Error (Err_Num => 180, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Sym := Dictionary.NullSymbol; exit; end if; Check_Package_Prefix (Node_Pos => Node_Position (Node => Ident_Node), Pack_Sym => Sym, Scope => Current_Scope, OK => PrefixOk); if not PrefixOk then Sym := Dictionary.NullSymbol; exit; end if; STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); Previous_Id_Str := Id_Str; Ident_Node := Next_Ident_Node; -- ASSUME Ident_Node = identifier Id_Str := Node_Lex_String (Node => Ident_Node); SymSoFar := Sym; Sym := Dictionary.LookupSelectedItem (Prefix => Sym, Selector => Id_Str, Scope => Current_Scope, Context => Dictionary.ProofContext); -- check to see if we are getting the same symbol over and again if Sym = SymSoFar then -- P.P.P.P.X case Sym := Dictionary.NullSymbol; -- to cause "Not visible" error at top of loop end if; end loop; if not Dictionary.Is_Null_Symbol (Sym) then if Dictionary.IsConstantRulePolicyPresent (Sym, Current_Scope) then ErrorHandler.Semantic_Error2 (Err_Num => 182, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str1 => Id_Str, Id_Str2 => Previous_Id_Str); else Dictionary.AddConstantRulePolicy (TheConstant => Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), TheScope => Current_Scope, ThePolicy => Rule_Policy, TheRulePolicy => The_Rule_Policy); STree.Add_Node_Symbol (Node => Ident_Node, Sym => The_Rule_Policy); end if; end if; end if; end Process_Dotted_Simple_Name; begin -- Process_Simple_Name_Rep It := Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Child_Node (Current_Node => Node), In_Direction => STree.Down); while not STree.IsNull (It) loop -- for each identifier in list of constants Next_Node := Get_Node (It => It); --# assert Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.dotted_simple_name and --# Next_Node = Get_Node (It) and --# STree.Table = STree.Table~; Process_Dotted_Simple_Name (Node => Next_Node, Current_Scope => Current_Scope, Rule_Policy => Rule_Policy); It := STree.NextNode (It); end loop; end Process_Simple_Name_Rep; begin -- Wf_Object_Assertion Ident_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Object_Assertion"); Ident_Str := Node_Lex_String (Node => Ident_Node); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Rule_Token) = LexTokenManager.Str_Eq then Process_Simple_Name_Rep (Node => Node, Current_Scope => Current_Scope, Rule_Policy => Dictionary.Rule_Requested); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.No_Rule_Token) = LexTokenManager.Str_Eq then Process_Simple_Name_Rep (Node => Node, Current_Scope => Current_Scope, Rule_Policy => Dictionary.No_Rule_Requested); else -- Illegal proof rule switch ErrorHandler.Semantic_Error (Err_Num => 181, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); end if; if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Object_Assertion (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Node, Scope => Current_Scope); end if; end Wf_Object_Assertion; begin -- Wf_Basic_Proof_Declaration Declaration_Node := Child_Node (Current_Node => Node); -- ASSUME Declaration_Node = proof_type_declaration OR type_assertion OR object_assertion OR proof_constant_declaration case Syntax_Node_Type (Node => Declaration_Node) is when SP_Symbols.proof_type_declaration => -- ASSUME Declaration_Node = proof_type_declaration Ident_Node := Child_Node (Current_Node => Declaration_Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Basic_Proof_Declaration"); Ident_Str := Node_Lex_String (Node => Ident_Node); Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Sym) or else (Dictionary.IsTypeMark (Sym) and then Dictionary.TypeIsAnnounced (TheType => Sym) and then not Dictionary.Is_Declared (Item => Sym)) then -- it's ok to add because it either a new type or an announced type. -- unless more than one own variable has announced it if More_Than_One_Own_Var_Announced (Sym => Sym, Scope => Current_Scope) then ErrorHandler.Semantic_Error (Err_Num => 149, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else -- still ok to add -- Assumption warning: code assumes that proof type -- is "abstract" because that is all the grammar -- would permit at this point. If we add fdl-based -- proof types then more checks will be needed -- here. if not Dictionary.Is_Null_Symbol (Sym) then STree.Set_Node_Lex_String (Sym => Sym, Node => Ident_Node); end if; Dictionary.Add_Abstract_Proof_Type (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Current_Scope, The_Type => Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Sym); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Sym, Is_Declaration => True); end if; end if; else -- illegal duplicate ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); end if; when SP_Symbols.type_assertion => -- ASSUME Declaration_Node = type_assertion Declaration_Node := Child_Node (Current_Node => Declaration_Node); -- ASSUME Declaration_Node = base_type_assertion OR alwaysvalid_variable_assertion if Syntax_Node_Type (Node => Declaration_Node) = SP_Symbols.base_type_assertion then -- ASSUME Declaration_Node = base_type_assertion Wf_Base_Type_Assertion (Node => Declaration_Node, Current_Scope => Current_Scope); elsif Syntax_Node_Type (Node => Declaration_Node) = SP_Symbols.alwaysvalid_variable_assertion then -- ASSUME Declaration_Node = alwaysvalid_variable_assertion Wf_Always_Valid_Variable_Assertion (Node => Declaration_Node, Current_Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Declaration_Node = base_type_assertion OR " & "alwaysvalid_variable_assertion in Wf_Basic_Proof_Declaration"); end if; when SP_Symbols.object_assertion => -- ASSUME Declaration_Node = object_assertion Wf_Object_Assertion (Node => Declaration_Node, Current_Scope => Current_Scope); when SP_Symbols.proof_constant_declaration => -- ASSUME Declaration_Node = proof_constant_declaration ErrorHandler.Semantic_Error (Err_Num => 315, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Declaration_Node = proof_type_declaration OR type_assertion OR object_assertion OR " & "proof_constant_declaration in Wf_Basic_Proof_Declaration"); end case; end Wf_Basic_Proof_Declaration; begin -- Wf_Basic_Declarative_Item Node_To_Check := Child_Node (Current_Node => Node); -- ASSUME Node_To_Check = basic_declaration OR justification_statement OR representation_clause OR basic_proof_declaration if Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.basic_declaration then -- ASSUME Node_To_Check = basic_declaration Wf_Basic_Declaration (Node => Node_To_Check, Current_Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.representation_clause then -- ASSUME Node_To_Check = representation_clause Wf_Representation_Clause (Node => Node_To_Check, Current_Scope => Current_Scope, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.basic_proof_declaration then -- ASSUME Node_To_Check = basic_proof_declaration Wf_Basic_Proof_Declaration (Node => Node_To_Check, Current_Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => Node_To_Check) = SP_Symbols.justification_statement then -- ASSUME Node_To_Check = justification_statement Wf_Justification_Statement (Node => Node_To_Check, Scope => Current_Scope, Component_Data => Component_Data, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node_To_Check = basic_declaration OR justification_statement OR representation_clause OR " & "basic_proof_declaration in Wf_Basic_Declarative_Item"); end if; end Wf_Basic_Declarative_Item; spark-2012.0.deb/examiner/indexmanager-cache.SHADOW.adb0000644000175000017500000006257211753202336021424 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Containers.Hashed_Maps; with Ada.Strings; with Ada.Strings.Hash_Case_Insensitive; with CommandLineData; with E_Strings.Not_SPARK; with IndexManager.Index_Table_P; with SPARK_IO; package body IndexManager.Cache is type Unit_Key_Type is record Unit : LexTokenLists.Lists; Is_Components : Boolean; -- case Is_Components is -- when False => Unit_Types : ContextManager.UnitTypes; -- end case; end record; type Unit_Element_Type is record Is_Components : Boolean; -- case Is_Components is -- when False => Source_Filename : LexTokenManager.Lex_String; -- when True => Components : IndexManager.Component_Lists; -- end case; Index_Filename : LexTokenManager.Lex_String; end record; # if not SPARK then -- The Ada declaration of the hash table. function Unit_Hash (Key : Unit_Key_Type) return Ada.Containers.Hash_Type is use type Ada.Containers.Hash_Type; Return_Val : Ada.Containers.Hash_Type := 0; begin for I in LexTokenLists.Positions'First .. LexTokenLists.Get_Length (List => Key.Unit) loop Return_Val := Return_Val + Ada.Strings.Hash_Case_Insensitive (E_Strings.Not_SPARK.Get_String (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenLists.Get_Element (List => Key.Unit, Pos => I)))); end loop; return Return_Val; end Unit_Hash; function Unit_Equivalent_Keys (Left, Right : Unit_Key_Type) return Boolean is use type ContextManager.UnitTypes; begin if CommandLineData.Content.Debug.File_Names then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.CACHE.UNIT_EQUIVALENT_KEYS ", Stop => 0); LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Left.Unit); SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ' '); LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Right.Unit); SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; return Left.Is_Components = Right.Is_Components and then LexTokenLists.Eq_Unit (First_Item => Left.Unit, Second => Right.Unit) and then (Left.Is_Components or else Left.Unit_Types = Right.Unit_Types); end Unit_Equivalent_Keys; function Eq_Unit_Element_Type (Left, Right : Unit_Element_Type) return Boolean; package Unit_Hash_P is new Ada.Containers.Hashed_Maps (Key_Type => Unit_Key_Type, Element_Type => Unit_Element_Type, Hash => Unit_Hash, Equivalent_Keys => Unit_Equivalent_Keys, "=" => Eq_Unit_Element_Type); function Get_Element (Position : Unit_Hash_P.Cursor) return Unit_Element_Type renames Unit_Hash_P.Element; # else -- The SPARK declaration of the hash table. --# inherit Cache; package Unit_Hash_P is type Map is array (Positive) of Cache.Unit_Element_Type; type Cursor is private; No_Element : constant Cursor; procedure Replace_Element (Container : in out Map; Position : in Cursor; New_Item : in Cache.Unit_Element_Type); --# derives Container from *, --# New_Item, --# Position; procedure Insert (Container : in out Map; Key : in Cache.Unit_Key_Type; New_Item : in Cache.Unit_Element_Type; Position : out Cursor; Inserted : out Boolean); --# derives Container, --# Inserted, --# Position from Container, --# Key, --# New_Item; function Find (Container : Map; Key : Cache.Unit_Key_Type) return Cursor; private --# hide Unit_Hash_P; end Unit_Hash_P; # end if; function "=" (Left, Right : Unit_Hash_P.Cursor) return Boolean renames Unit_Hash_P."="; The_Unit_Hash : Unit_Hash_P.Map; # if SPARK then -- The SPARK body of the hash table. package body Unit_Hash_P is --# hide Unit_Hash_P; end Unit_Hash_P; function Get_Element (Position : Unit_Hash_P.Cursor) return Unit_Element_Type is --# hide Get_Element; begin null; end Get_Element; # end if; function Eq_Unit_Element_Type (Left, Right : Unit_Element_Type) return Boolean --# global LexTokenManager.State; is procedure Trace --# derives ; is --# hide Trace; begin if CommandLineData.Content.Debug.File_Names then SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.CACHE.EQ_UNIT_ELEMENT_TYPE ", Stop => 0); if Left.Is_Components then Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Left.Source_Filename), New_Line => False); else SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "NO SOURCE FILENAME", Stop => 0); end if; SPARK_IO.Put_Char (File => SPARK_IO.Standard_Output, Item => ' '); if Right.Is_Components then Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Right.Source_Filename), New_Line => False); else SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "NO SOURCE FILENAME", Stop => 0); end if; SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; end Trace; function Eq_Component_Lists (Item1, Item2 : IndexManager.Component_Lists) return Boolean --# global LexTokenManager.State; is Return_Val : Boolean := True; begin Trace; for I in IndexManager.Component_Index loop if not LexTokenLists.Eq_Unit (First_Item => Item1 (I), Second => Item2 (I)) then Return_Val := False; end if; exit when not Return_Val or else (Item1 (I) = LexTokenLists.Null_List and then Item2 (I) = LexTokenLists.Null_List); end loop; return Return_Val; end Eq_Component_Lists; begin return Left.Is_Components = Right.Is_Components and then ((not Left.Is_Components and then (LexTokenManager.Lex_String_Case_Sensitive_Compare (Lex_Str1 => Left.Source_Filename, Lex_Str2 => Right.Source_Filename) = LexTokenManager.Str_Eq)) or else (Left.Is_Components and then Eq_Component_Lists (Item1 => Left.Components, Item2 => Right.Components))); end Eq_Unit_Element_Type; procedure Context_Manager_Unit_Types_Image (Unit_Type : in ContextManager.UnitTypes) is --# hide Context_Manager_Unit_Types_Image; begin SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => ContextManager.UnitTypes'Image (Unit_Type), Stop => 0); end Context_Manager_Unit_Types_Image; procedure Add_Unit (Unit : in LexTokenLists.Lists; Unit_Types : in ContextManager.UnitTypes; Source_Filename : in E_Strings.T; Index_Filename : in LexTokenManager.Lex_String; Index_Position : in IndexManager.File_Position) is Position : Unit_Hash_P.Cursor; Inserted : Boolean; New_Item : Unit_Element_Type; Lex_Str_Source_Filename : LexTokenManager.Lex_String; begin if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.CACHE.ADD_UNIT ", Stop => 0); LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Unit); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " ", Stop => 0); Context_Manager_Unit_Types_Image (Unit_Type => Unit_Types); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => Source_Filename, New_Line => False); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " from index file ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Index_Filename), New_Line => False); SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; -- Try to insert a unit in the hash table. LexTokenManager.Insert_Examiner_String (Str => Source_Filename, Lex_Str => Lex_Str_Source_Filename); New_Item := Unit_Element_Type'(Is_Components => False, Source_Filename => Lex_Str_Source_Filename, Components => IndexManager.Component_Lists'(others => LexTokenLists.Null_List), Index_Filename => Index_Filename); Unit_Hash_P.Insert (Container => The_Unit_Hash, Key => Unit_Key_Type'(Unit => Unit, Is_Components => False, Unit_Types => Unit_Types), New_Item => New_Item, Position => Position, Inserted => Inserted); if not Inserted then -- The unit has not been inserted in the hash table because -- it is already present. if Index_Table_P.Is_Aux_File_Ancestor (Parent_Index_Filename => Get_Element (Position => Position).Index_Filename, Index_Filename => Index_Filename) then if Eq_Unit_Element_Type (Get_Element (Position => Position), New_Item) then -- The same values for the same unit at the same level -- => duplicate in the index files => raise a warning. Index_Table_P.Output_Error (E => IndexManager.EW_Duplicate, Source_File => Index_Filename, Token_Position => Index_Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Unit)); else -- Two different values for the same unit at the same -- level => contradiction in the index files => stop -- SPARK. Index_Table_P.Output_Error (E => IndexManager.EF_Contradiction, Source_File => Index_Filename, Token_Position => Index_Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Unit)); end if; elsif Index_Table_P.Is_File_Ancestor (Parent_Filename => Index_Filename, Filename => Get_Element (Position => Position).Index_Filename) then if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.CACHE.ADD_UNIT : UPDATE THE CACHE", Stop => 0); end if; -- Two different values of the same unit but not at the -- same level => update the hash table with the more -- local definition. Unit_Hash_P.Replace_Element (Container => The_Unit_Hash, Position => Position, New_Item => New_Item); if not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq) and then not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Get_Element (Position => Position).Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq) and then Eq_Unit_Element_Type (Get_Element (Position => Position), New_Item) then -- The same values for the same unit but not at the -- same level => duplicate in the index files => raise -- a warning. Index_Table_P.Output_Error (E => IndexManager.EW_Duplicate, Source_File => Index_Filename, Token_Position => Index_Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Unit)); end if; elsif not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq) and then not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Get_Element (Position => Position).Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq) and then Eq_Unit_Element_Type (Get_Element (Position => Position), New_Item) then -- The same values for the same unit but not at the same -- level => duplicate in the index files => raise a -- warning. Index_Table_P.Output_Error (E => IndexManager.EW_Duplicate, Source_File => Index_Filename, Token_Position => Index_Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Unit)); end if; end if; end Add_Unit; procedure Get_Unit (Required_Unit : in LexTokenLists.Lists; Unit_Types : in ContextManager.UnitTypes; Source_Filename : out LexTokenManager.Lex_String; Index_Filename : out LexTokenManager.Lex_String; Found : out Boolean) is Position : Unit_Hash_P.Cursor; Unit_Element : Unit_Element_Type; begin Position := Unit_Hash_P.Find (Container => The_Unit_Hash, Key => Unit_Key_Type'(Unit => Required_Unit, Is_Components => False, Unit_Types => Unit_Types)); if Position = Unit_Hash_P.No_Element then Source_Filename := LexTokenManager.Null_String; Index_Filename := LexTokenManager.Null_String; Found := False; else Unit_Element := Get_Element (Position => Position); Source_Filename := Unit_Element.Source_Filename; Index_Filename := Unit_Element.Index_Filename; Found := True; end if; end Get_Unit; procedure Add_Components (Unit : in LexTokenLists.Lists; Components : in IndexManager.Component_Lists; Index_Filename : in LexTokenManager.Lex_String; Index_Position : in IndexManager.File_Position) is Position : Unit_Hash_P.Cursor; Inserted : Boolean; New_Item : Unit_Element_Type; begin if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.CACHE.ADD_COMPONENTS ", Stop => 0); LexTokenLists.Print_List (File => SPARK_IO.Standard_Output, List => Unit); SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => " from index file ", Stop => 0); Index_Table_P.Debug_Put_E_Str (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Index_Filename), New_Line => False); SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; -- Try to insert a unit in the hash table. New_Item := Unit_Element_Type'(Is_Components => True, Source_Filename => LexTokenManager.Null_String, Components => Components, Index_Filename => Index_Filename); Unit_Hash_P.Insert (Container => The_Unit_Hash, Key => Unit_Key_Type'(Unit => Unit, Is_Components => True, Unit_Types => ContextManager.InvalidUnit), New_Item => New_Item, Position => Position, Inserted => Inserted); if not Inserted then -- The unit has not been inserted in the hash table because -- it is already present. if Index_Table_P.Is_Aux_File_Ancestor (Parent_Index_Filename => Get_Element (Position => Position).Index_Filename, Index_Filename => Index_Filename) then if Eq_Unit_Element_Type (Get_Element (Position => Position), New_Item) then -- The same values for the same unit at the same level -- => duplicate in the index files => raise a warning. Index_Table_P.Output_Error (E => IndexManager.EW_Duplicate, Source_File => Index_Filename, Token_Position => Index_Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Unit)); else -- Two different values for the same unit at the same -- level => contradiction in the index files => stop -- SPARK. Index_Table_P.Output_Error (E => IndexManager.EF_Contradiction, Source_File => Index_Filename, Token_Position => Index_Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Unit)); end if; elsif Index_Table_P.Is_File_Ancestor (Parent_Filename => Index_Filename, Filename => Get_Element (Position => Position).Index_Filename) then if CommandLineData.Content.Debug.File_Names then -- Debug SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => "INDEXMANAGER.CACHE.ADD_COMPONENTS : UPDATE THE CACHE", Stop => 0); end if; -- Two different values of the same unit but not at the -- same level => update the hash table with the more -- local definition. Unit_Hash_P.Replace_Element (Container => The_Unit_Hash, Position => Position, New_Item => New_Item); if not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq) and then not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Get_Element (Position => Position).Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq) and then Eq_Unit_Element_Type (Get_Element (Position => Position), New_Item) then -- The same values for the same unit but not at the -- same level => duplicate in the index files => raise -- a warning. Index_Table_P.Output_Error (E => IndexManager.EW_Duplicate, Source_File => Index_Filename, Token_Position => Index_Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Unit)); end if; elsif not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq) and then not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Get_Element (Position => Position).Index_Filename, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq) and then Eq_Unit_Element_Type (Get_Element (Position => Position), New_Item) then -- The same values for the same unit but not at the same -- level => duplicate in the index files => raise a -- warning. Index_Table_P.Output_Error (E => IndexManager.EW_Duplicate, Source_File => Index_Filename, Token_Position => Index_Position, Token_String => LexTokenLists.Token_List_To_String (Token_List => Unit)); end if; end if; end Add_Components; procedure Get_Components (Required_Unit : in LexTokenLists.Lists; Components : out IndexManager.Component_Lists; Index_Filename : out LexTokenManager.Lex_String; Found : out Boolean) is Position : Unit_Hash_P.Cursor; Unit_Element : Unit_Element_Type; begin Position := Unit_Hash_P.Find (Container => The_Unit_Hash, Key => Unit_Key_Type'(Unit => Required_Unit, Is_Components => True, Unit_Types => ContextManager.InvalidUnit)); if Position = Unit_Hash_P.No_Element then Components := IndexManager.Component_Lists'(others => LexTokenLists.Null_List); Index_Filename := LexTokenManager.Null_String; Found := False; else Unit_Element := Get_Element (Position => Position); Components := Unit_Element.Components; Index_Filename := Unit_Element.Index_Filename; Found := True; end if; end Get_Components; # if SPARK then begin The_Unit_Hash := Unit_Hash_P.Map' (others => Unit_Element_Type'(Is_Components => False, Source_Filename => LexTokenManager.Null_String, Components => IndexManager.Component_Lists'(others => LexTokenLists.Null_List), Index_Filename => LexTokenManager.Null_String)); # end if; end IndexManager.Cache; spark-2012.0.deb/examiner/errorhandler-conversions-tostring.adb0000644000175000017500000005467411753202336023645 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions) procedure ToString (Err_Num : in Error_Types.NumericError; Purpose : in Error_Types.ConversionRequestSource; Err_Str : out Error_Types.StringError) is Error_String : E_Strings.T; Explanation_Needed : Boolean; procedure AppendReference (E_Str : in out E_Strings.T; Reference : in Natural) --# global in CommandLineData.Content; --# in out Source_Used; --# derives E_Str, --# Source_Used from *, --# CommandLineData.Content, --# Reference; is separate; ------------------------------------------------------------ procedure Append_Lex_String (E_Str : in out E_Strings.T; L_Str : in LexTokenManager.Lex_String) --# global in LexTokenManager.State; --# derives E_Str from *, --# LexTokenManager.State, --# L_Str; is pragma Inline (Append_Lex_String); begin E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => L_Str)); end Append_Lex_String; ------------------------------------------------------------ procedure Append_Symbol (E_Str : in out E_Strings.T; Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# Sym; is Package_Ex_Str : E_Strings.T; begin -- put Access on the front of access types before constructing rest of string if Dictionary.IsType (Sym) and then Dictionary.TypeIsAccess (Sym) then E_Strings.Append_String (E_Str => E_Str, Str => "Access "); end if; -- construct rest of string Package_Ex_Str := Dictionary.GetAnyPrefixNeeded (Sym => Sym, Scope => Scope, Separator => "."); if E_Strings.Get_Length (E_Str => Package_Ex_Str) > 0 then E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Package_Ex_Str); E_Strings.Append_String (E_Str => E_Str, Str => "."); end if; E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Dictionary.GenerateSimpleName (Item => Sym, Separator => ".")); end Append_Symbol; ------------------------------------------------------------------------ procedure Append_Name (E_Str : in out E_Strings.T; Name : in Error_Types.Names; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Name, --# Scope; is begin case Name.Name_Sort is when Error_Types.None => null; when Error_Types.LexString => Append_Lex_String (E_Str => E_Str, L_Str => Name.Name_Str); when Error_Types.Entity => null; when Error_Types.Symbol => Append_Symbol (E_Str => E_Str, Sym => Name.Name_Sym, Scope => Scope); when Error_Types.ParserSymbol => null; when Error_Types.StabilityIndex => null; when Error_Types.ThePartition => null; end case; end Append_Name; ------------------------------------------------------------ procedure Append_Export_Var (E_Str : in out E_Strings.T; Name : in Error_Types.Names; Scope : in Dictionary.Scopes; Capitalise : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# Capitalise, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Name, --# Scope; is begin if Name = Error_Types.NoName then if Capitalise then E_Strings.Append_String (E_Str => E_Str, Str => "T"); else E_Strings.Append_String (E_Str => E_Str, Str => "t"); end if; E_Strings.Append_String (E_Str => E_Str, Str => "he function value"); else Append_Name (E_Str => E_Str, Name => Name, Scope => Scope); end if; end Append_Export_Var; ------------------------------------------------------------ procedure UncondFlowErr (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure CondlFlowErr (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure UncondDependency (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure CondlDependency (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure SemanticErr (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Source_Used; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation & --# Source_Used from *, --# CommandLineData.Content, --# Err_Num; is separate; ------------------------------------------------------------ procedure DepSemanticErr (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure WarningWithPosition (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure WarningWithoutPosition (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure ControlFlowError (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# derives E_Str from *, --# Err_Num, --# With_Explanation; is separate; ------------------------------------------------------------ procedure IneffectiveStatement (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure StabilityError (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# derives E_Str from *, --# Err_Num, --# With_Explanation; is separate; ------------------------------------------------------------ procedure UsageError (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State, --# With_Explanation; is separate; ------------------------------------------------------------ procedure NoErr (Err_Num : in Error_Types.NumericError; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State; is separate; ------------------------------------------------------------ procedure Note (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) --# derives E_Str from *, --# Err_Num, --# With_Explanation; is separate; ------------------------------------------------------------ procedure Syntax_Or_Lex_Error (Err_Num : in Error_Types.NumericError; E_Str : in out E_Strings.T) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# derives E_Str from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Err_Num, --# LexTokenManager.State; is begin -- When a syntax error has been constructed the entire text string of the error -- gets put into the string table and included as Name1 in the numeric form of the -- error record. Conversion back to a string just needs the following: Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); end Syntax_Or_Lex_Error; ------------------------------------------------------------ procedure Check_Explanation (Explanation_Class : in Explanation_Classes; Error_Number : in Error_Types.ErrNumRange; Purpose : in Error_Types.ConversionRequestSource; Explanation_Needed : out Boolean) --# global in CommandLineData.Content; --# in out Explanation_Table; --# derives Explanation_Needed, --# Explanation_Table from CommandLineData.Content, --# Error_Number, --# Explanation_Class, --# Explanation_Table, --# Purpose; is begin -- In general, explanation depend on command line switch setting and whether that explanation -- has appeared before. case CommandLineData.Content.Error_Explanation is when CommandLineData.Off => Explanation_Needed := False; when CommandLineData.First_Occurrence => Explanation_Needed := not Explanation_Table (Explanation_Class) (Error_Number) (Purpose); Explanation_Table (Explanation_Class) (Error_Number) (Purpose) := True; when CommandLineData.Every_Occurrence => Explanation_Needed := True; end case; -- But we also have a special case where we turn explanations off if (HTML and (Purpose=ForReport)). -- This is because explanations are only a click away when looking at HTML report files, so why clutter up screen? if CommandLineData.Content.HTML and then Purpose in Error_Types.ForReport then Explanation_Needed := False; end if; -- We also turn it off for XML generation, at least for now. if CommandLineData.Content.XML then Explanation_Needed := False; end if; end Check_Explanation; ------------------------------------------------------------ begin --ToString if Err_Num = Error_Types.Empty_NumericError then Err_Str := Error_Types.Empty_StringError; else Error_String := E_Strings.Empty_String; case Err_Num.ErrorType is when Error_Types.UncondFlowErr => Check_Explanation (Explanation_Class => Flow_Errors, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); UncondFlowErr (Err_Num, Explanation_Needed, Error_String); when Error_Types.CondlFlowErr => Check_Explanation (Explanation_Class => Flow_Errors, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); CondlFlowErr (Err_Num, Explanation_Needed, Error_String); when Error_Types.UncondDependencyErr => Check_Explanation (Explanation_Class => Dependency_Errs, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); UncondDependency (Err_Num, Explanation_Needed, Error_String); when Error_Types.CondlDependencyErr => Check_Explanation (Explanation_Class => Dependency_Errs, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); CondlDependency (Err_Num, Explanation_Needed, Error_String); when Error_Types.SemanticErr => Check_Explanation (Explanation_Class => Semantic_Errs, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); SemanticErr (Err_Num, Explanation_Needed, Error_String); when Error_Types.DepSemanticErr => Check_Explanation (Explanation_Class => Dep_Semantic_Errs, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); DepSemanticErr (Err_Num, Explanation_Needed, Error_String); when Error_Types.WarningWithPosition => Check_Explanation (Explanation_Class => Warnings, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); WarningWithPosition (Err_Num, Explanation_Needed, Error_String); when Error_Types.WarningWithoutPosition => Check_Explanation (Explanation_Class => Warnings, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); WarningWithoutPosition (Err_Num, Explanation_Needed, Error_String); when Error_Types.ControlFlowErr => Check_Explanation (Explanation_Class => Control_Flows, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); ControlFlowError (Err_Num, Explanation_Needed, Error_String); when Error_Types.NoErr => NoErr (Err_Num, Error_String); when Error_Types.IneffectiveStat => Check_Explanation (Explanation_Class => Ineffective_Statements, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); IneffectiveStatement (Err_Num, Explanation_Needed, Error_String); when Error_Types.StabilityErr => Check_Explanation (Explanation_Class => Flow_Errors, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); StabilityError (Err_Num, Explanation_Needed, Error_String); when Error_Types.UsageErr => Check_Explanation (Explanation_Class => Flow_Errors, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); UsageError (Err_Num, Explanation_Needed, Error_String); when Error_Types.Note => Check_Explanation (Explanation_Class => Notes, Error_Number => Err_Num.ErrorNum, Purpose => Purpose, Explanation_Needed => Explanation_Needed); Note (Err_Num, Explanation_Needed, Error_String); when Error_Types.SyntaxErr | Error_Types.LexErr | Error_Types.SyntaxRec => Syntax_Or_Lex_Error (Err_Num => Err_Num, E_Str => Error_String); end case; Err_Str := Error_Types.StringError' (ErrorType => Err_Num.ErrorType, Position => Err_Num.Position, Message => Error_String, MessageId => Err_Num.ErrorNum); end if; end ToString; spark-2012.0.deb/examiner/adjustfdl_rws.adb0000644000175000017500000000304411753202335017574 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SparkLex; package body AdjustFDL_RWs is procedure Possibly_Adjust (E_Str : in out E_Strings.T; Prefix : in E_Strings.T) is Temp_Str : E_Strings.T; begin if SparkLex.Check_FDL_RW (Ex_Str => E_Str) then Temp_Str := Prefix; E_Strings.Append_String (E_Str => Temp_Str, Str => "__"); E_Strings.Append_Examiner_String (E_Str1 => Temp_Str, E_Str2 => E_Str); E_Str := Temp_Str; end if; end Possibly_Adjust; end AdjustFDL_RWs; spark-2012.0.deb/examiner/sem-wf_array_type_definition.adb0000644000175000017500000001746611753202336022604 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem) procedure Wf_Array_Type_Definition (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Array : out Dictionary.Symbol) is Root_Node, Type_Node, Next_Node : STree.SyntaxNode; It : STree.Iterator; Constrained : Boolean; Type_Sym, The_Array_Index : Dictionary.Symbol; Type_Pos : LexTokenManager.Token_Position; begin Root_Node := Child_Node (Current_Node => Node); -- ASSUME Root_Node = unconstrained_array_definition OR constrained_array_definition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Root_Node) = SP_Symbols.unconstrained_array_definition or else Syntax_Node_Type (Node => Root_Node) = SP_Symbols.constrained_array_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Root_Node = unconstrained_array_definition OR constrained_array_definition in Wf_Array_Type_Definition"); Constrained := Syntax_Node_Type (Node => Root_Node) = SP_Symbols.constrained_array_definition; Root_Node := Child_Node (Current_Node => Root_Node); -- ASSUME Root_Node = unconstrained_array_definition_rep OR index_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Root_Node) = SP_Symbols.unconstrained_array_definition_rep or else Syntax_Node_Type (Node => Root_Node) = SP_Symbols.index_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Root_Node = unconstrained_array_definition_rep OR index_constraint in Wf_Array_Type_Definition"); Type_Node := Next_Sibling (Current_Node => Root_Node); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Wf_Array_Type_Definition"); Type_Pos := Node_Position (Node => Type_Node); The_Array := Dictionary.GetUnknownTypeMark; -- default answer in case of errors Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); if Dictionary.Is_Unconstrained_Array_Type_Mark (Type_Sym, Scope) then ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; -- Check that the type is not a suspension object or protected type if Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) or else Dictionary.IsProtectedTypeMark (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 906, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); else Dictionary.Add_Array_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Scope => Scope, Context => Dictionary.ProgramContext, Constrained => Constrained, Component_Type => Type_Sym, Component_Type_Reference => Dictionary.Location'(Start_Position => Type_Pos, End_Position => Type_Pos), The_Type => The_Array); STree.Add_Node_Symbol (Node => Ident_Node, Sym => The_Array); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => The_Array, Is_Declaration => True); end if; -- now loop through all the index type marks It := Find_First_Node (Node_Kind => SP_Symbols.type_mark, From_Root => Root_Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.type_mark and --# Next_Node = Get_Node (It); Wf_Type_Mark (Node => Next_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); if not Dictionary.IsUnknownTypeMark (Type_Sym) then if Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => The_Array, Full_Range_Subtype => False) then -- Type of index is same as type of array being declared ErrorHandler.Semantic_Error (Err_Num => 750, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Dictionary.GetSimpleName (Type_Sym)); else -- no self-reference attempted if not Dictionary.IsDiscreteTypeMark (Type_Sym, Scope) then ErrorHandler.Semantic_Error (Err_Num => 46, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => LexTokenManager.Null_String); end if; if not Dictionary.TypeIsWellformed (Type_Sym) then ErrorHandler.Semantic_Error (Err_Num => 47, Reference => 1, Position => Node_Position (Node => Next_Node), Id_Str => LexTokenManager.Null_String); end if; end if; end if; Dictionary.AddArrayIndex (TheArrayType => The_Array, IndexType => Type_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node)), TheArrayIndex => The_Array_Index); STree.Add_Node_Symbol (Node => Next_Node, Sym => The_Array_Index); It := STree.NextNode (It); end loop; end if; end Wf_Array_Type_Definition; spark-2012.0.deb/examiner/sem-wf_context_clause_package_body-with_clause.adb0000644000175000017500000004644211753202336026226 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: -- Checks a with clause for Sem starting at node with_clause. -- Directly capable of raising errors for: undeclared item in with list, -- duplicate item in with list or withing of something which is not a -- package. -- -- NB. In present form permits with for something not inherited; this is -- necessary for withing something to be used solely in hidden part -- (eg. text_io by spark_io). However, we wish to issue a -- semantic warning in such circumstances. -- It is also necessary to with something not inherited in the case -- where an inherit cannot be placed; for example where a package -- body withs a private child package. -------------------------------------------------------------------------------- separate (Sem.Wf_Context_Clause_Package_Body) procedure With_Clause (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; With_Public_Child : out Boolean) is It : STree.Iterator; Next_Node : STree.SyntaxNode; A_Public_Child : Boolean; ----------------------------- procedure Process_Dotted_Simple_Name (Node : in STree.SyntaxNode; Comp_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; A_Public_Child : out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives A_Public_Child, --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Comp_Sym, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dotted_simple_name; --# post STree.Table = STree.Table~; is Prefix_Sym : Dictionary.Symbol := Dictionary.NullSymbol; Current_Sym : Dictionary.Symbol; Current_Node : STree.SyntaxNode; Explicit_Duplicate : Boolean; Withing_Descendent : Boolean := False; Discard : Boolean; Lib_Sym : Dictionary.Symbol; Search_String : LexTokenManager.Lex_String; ---------------------------- function Dotted_Identifier_Found (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.dotted_simple_name; is Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Node); -- ASSUME Current_Node = dotted_simple_name OR identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name or Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = dotted_simple_name OR identifier in Dotted_Simple_Name"); return Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name; end Dotted_Identifier_Found; ------------------- function Is_Last_Identifier_Node (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.identifier; is begin return Syntax_Node_Type (Node => Parent_Node (Current_Node => Parent_Node (Current_Node => Node))) /= SP_Symbols.dotted_simple_name; end Is_Last_Identifier_Node; -------------------- function Look_Up (Prefix : in Dictionary.Symbol; Str : in LexTokenManager.Lex_String; Scope : in Dictionary.Scopes; Full_Package_Name : in Boolean) return Dictionary.Symbol --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Sym : Dictionary.Symbol; begin if Dictionary.Is_Null_Symbol (Prefix) then Sym := Dictionary.LookupItem (Name => Str, Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => Full_Package_Name); else Sym := Dictionary.LookupSelectedItem (Prefix => Prefix, Selector => Str, Scope => Scope, Context => Dictionary.ProofContext); end if; return Sym; end Look_Up; begin -- Process_Dotted_Simple_Name A_Public_Child := False; if Dotted_Identifier_Found (Node => Node) and then CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 then ErrorHandler.Semantic_Error (Err_Num => 610, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Lib_Sym := Dictionary.GetLibraryPackage (Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Comp_Sym)); Current_Node := Last_Child_Of (Start_Node => Node); loop --# assert STree.Table = STree.Table~; -- ASSUME Current_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = identifier in Process_Dotted_Simple_Name"); -- look up in global scope first: Search_String := Node_Lex_String (Node => Current_Node); Current_Sym := Look_Up (Prefix => Prefix_Sym, Str => Search_String, Scope => Dictionary.GlobalScope, Full_Package_Name => True); if Dictionary.IsPackage (Current_Sym) and then not Dictionary.Packages_Are_Equal (Left_Symbol => Current_Sym, Right_Symbol => Lib_Sym) then -- package exists and is not self -- if necessary, check inherited by looking up in current scope if Dictionary.IsProperDescendent (Current_Sym, Lib_Sym) then -- Withing a private descendent is allowed if Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetPackageOwner (Current_Sym), Right_Symbol => Lib_Sym) then Withing_Descendent := True; A_Public_Child := False; --# accept F, 41, "Structurally this is the preferred place for this test"; elsif -- Otherwise withing a descendent is allowed when -- Language_Profile is an Auto_Code_Generator. CommandLineData.Content.Language_Profile in CommandLineData.Auto_Code_Generators then --# end accept; Withing_Descendent := True; -- Inform caller that a public child is inherited. A_Public_Child := True; else -- Not a valid with of a descendent Withing_Descendent := False; A_Public_Child := False; -- For consistency with other options make Current_Sym = null -- if not withing a descendent. Current_Sym := Dictionary.NullSymbol; end if; elsif Dictionary.IsProperDescendent (Lib_Sym, Current_Sym) then -- withing an ancestor if Is_Last_Identifier_Node (Node => Current_Node) then Current_Sym := Look_Up (Prefix => Dictionary.NullSymbol, Str => Search_String, Scope => Scope, Full_Package_Name => False); end if; elsif not Dictionary.Is_Null_Symbol (Prefix_Sym) and then Dictionary.IsProperDescendent (Lib_Sym, Prefix_Sym) then -- withing child of ancestor Current_Sym := Look_Up (Prefix => Dictionary.NullSymbol, Str => Search_String, Scope => Scope, Full_Package_Name => False); else Current_Sym := Look_Up (Prefix => Prefix_Sym, Str => Search_String, Scope => Scope, Full_Package_Name => True); end if; end if; if not Dictionary.Is_Null_Symbol (Current_Sym) and then not Dictionary.IsPackage (Current_Sym) and then not Dictionary.Is_Generic_Subprogram (The_Symbol => Current_Sym) then -- can't be inherited ErrorHandler.Semantic_Error (Err_Num => 18, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Search_String); exit; end if; if Is_Last_Identifier_Node (Node => Current_Node) and then Dictionary.IsPackage (Current_Sym) and then Dictionary.Packages_Are_Equal (Left_Symbol => Current_Sym, Right_Symbol => Lib_Sym) then -- trying to with self (or enclosing package) ErrorHandler.Semantic_Error (Err_Num => 132, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Search_String); exit; end if; -- extra check for private root packages, -- which cannot be with'd by specs of public packages: if not Dictionary.Is_Null_Symbol (Current_Sym) and then Dictionary.IsPackage (Current_Sym) and then -- guard for precon of next line Dictionary.IsPrivatePackage (Current_Sym) and then Dictionary.Is_Null_Symbol (Dictionary.GetPackageParent (Current_Sym)) and then Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Visible and then Dictionary.Is_Null_Symbol (Dictionary.GetPackageOwner (Lib_Sym)) and then not Dictionary.IsPrivatePackage (Dictionary.GetRootPackage (Lib_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 616, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Search_String); exit; end if; -- Look up will find generic functions because they have an associated -- proof function in proof context. We want to exclude them unless they have -- also been inherited. if Dictionary.Is_Generic_Subprogram (The_Symbol => Current_Sym) and then not Dictionary.IsInherited (Current_Sym, Comp_Sym) then Current_Sym := Dictionary.NullSymbol; end if; if Dictionary.Is_Null_Symbol (Current_Sym) then if CommandLineData.Ravenscar_Selected and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Search_String, Lex_Str2 => LexTokenManager.Ada_Token) /= LexTokenManager.Str_Eq and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Search_String, Lex_Str2 => LexTokenManager.System_Token) /= LexTokenManager.Str_Eq then -- stronger warning for uninherited withs of non-predefined packages in Ravenscar ErrorHandler.Semantic_Warning (Err_Num => 391, Position => Node_Position (Node => Current_Node), Id_Str => Search_String); else ErrorHandler.Semantic_Warning (Err_Num => 1, Position => Node_Position (Node => Current_Node), Id_Str => Search_String); end if; exit; end if; -- check sym found is not a local redeclaration if not Dictionary.IsGlobalScope (Dictionary.GetScope (Current_Sym)) then -- This semantic error has not been checked with new error number -- because unable to find test case which causes the error. ErrorHandler.Semantic_Error (Err_Num => 133, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => Search_String); exit; end if; -- there is something to add because symbol is not null Dictionary.AddWithReference (The_Visibility => Dictionary.Get_Visibility (Scope => Scope), The_Unit => Comp_Sym, The_Withed_Symbol => Current_Sym, Explicit => Is_Last_Identifier_Node (Node => Current_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Current_Node), End_Position => Node_Position (Node => Current_Node)), Already_Present => Explicit_Duplicate); if Explicit_Duplicate then ErrorHandler.Semantic_Error_Sym (Err_Num => 191, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Sym => Current_Sym, Scope => Dictionary.GlobalScope); end if; STree.Set_Node_Lex_String (Sym => Current_Sym, Node => Current_Node); -- Handle the case of a with for a descendent package. -- Relevant for all private child packages, their descendents and -- for non-private descendent public child packages only if a code -- generator language profile is selected. -- Add a 'fake inherit' as well as the 'with' if Withing_Descendent then --# accept Flow, 10, Discard, "Can never be explicitly duplicated, as is only ever implicit"; Dictionary.AddInheritsReference (The_Unit => Comp_Sym, The_Inherited_Symbol => Current_Sym, Explicit => False, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Current_Node), End_Position => Node_Position (Node => Current_Node)), Already_Present => Discard); --# end accept; end if; Current_Node := Parent_Node (Current_Node => Current_Node); -- ASSUME Current_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = dotted_simple_name in Process_Dotted_Simple_Name"); exit when Current_Node = Node; Prefix_Sym := Current_Sym; -- ready for next lookup Current_Node := Next_Sibling (Current_Node => Current_Node); end loop; end if; --# accept Flow, 33, Discard, "Expected to be neither referenced nor exported"; end Process_Dotted_Simple_Name; begin -- With_Clause With_Public_Child := False; It := Find_First_Node (Node_Kind => SP_Symbols.dotted_simple_name, From_Root => Node, In_Direction => STree.Down); while not STree.IsNull (It) loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.dotted_simple_name and --# Next_Node = Get_Node (It); Process_Dotted_Simple_Name (Node => Next_Node, Comp_Sym => Comp_Sym, Scope => Scope, A_Public_Child => A_Public_Child); With_Public_Child := With_Public_Child or else A_Public_Child; It := STree.NextNode (It); end loop; end With_Clause; spark-2012.0.deb/examiner/sem-compunit-wf_entry_body.adb0000644000175000017500000005106411753202336022217 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Entry_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; Next_Node : out STree.SyntaxNode) is -- Actions: -- (1) First identifier must be name of (sole) entry declared in spec -- (2) Second identifier must be Boolean and must be protected element -- (3) If valid, add body, set up a local scope -- (4) wff annotation; note FirstSeen is False by definition; however, second anno may not be needed -- (5) Allow main tree walk to continue in new scope -- (6) Check end designator matches if not hidden Entry_Sym, Guard_Sym : Dictionary.Symbol; Entry_Spec_Node : STree.SyntaxNode; Formal_Part_Node : STree.SyntaxNode; Ident_Node : STree.SyntaxNode; Guard_Node : STree.SyntaxNode; Anno_Node : STree.SyntaxNode; Subprogram_Implementation_Node : STree.SyntaxNode; Pragma_Rep_Node : STree.SyntaxNode; End_Node : STree.SyntaxNode; Hidden : Hidden_Class; -- check whether a second anno is needed, if it is present, and process it if necessary procedure Check_Annotation (Node_Pos : in LexTokenManager.Token_Position; Anno_Node : in STree.SyntaxNode; Entry_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Component_Data, --# LexTokenManager.State, --# STree.Table from *, --# Anno_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Entry_Sym, --# LexTokenManager.State, --# Scope, --# STree.Table, --# TheHeap & --# Dictionary.Dict, --# Statistics.TableUsage, --# TheHeap from *, --# Anno_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Entry_Sym, --# LexTokenManager.State, --# Node_Pos, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Anno_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Entry_Sym, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap & --# SLI.State from *, --# Anno_Node, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Entry_Sym, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation; --# post STree.Table = STree.Table~; is Constraint_Node : STree.SyntaxNode; -- A second annotation is only needed if the abstract global anno contains -- the implicitly-declared "own variable" that shares the name of the type. function Requires_Second_Annotation (Entry_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; Own_Var : Dictionary.Symbol; It : Dictionary.Iterator; begin Own_Var := Dictionary.GetProtectedTypeOwnVariable (Dictionary.GetRegion (Dictionary.GetScope (Entry_Sym))); It := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Entry_Sym); while not Dictionary.IsNullIterator (It) loop Result := Dictionary.Variables_Are_Equal (Left_Symbol => Dictionary.CurrentSymbol (It), Right_Symbol => Own_Var); exit when Result; It := Dictionary.NextSymbol (It); end loop; return Result; end Requires_Second_Annotation; function Has_Second_Annotation (Anno_Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation; is Child_Anno_Node : STree.SyntaxNode; begin Child_Anno_Node := Child_Node (Current_Node => Anno_Node); -- ASSUME Child_Anno_Node = moded_global_definition OR dependency_relation OR declare_annotation OR -- procedure_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Child_Anno_Node) = SP_Symbols.moded_global_definition or else Syntax_Node_Type (Node => Child_Anno_Node) = SP_Symbols.dependency_relation or else Syntax_Node_Type (Node => Child_Anno_Node) = SP_Symbols.declare_annotation or else Syntax_Node_Type (Node => Child_Anno_Node) = SP_Symbols.procedure_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Anno_Node = moded_global_definition OR dependency_relation OR " & "declare_annotation OR procedure_constraint in Has_Second_Annotation"); return Syntax_Node_Type (Node => Child_Anno_Node) = SP_Symbols.moded_global_definition or else Syntax_Node_Type (Node => Child_Anno_Node) = SP_Symbols.dependency_relation; end Has_Second_Annotation; begin -- Check_Annotation if Has_Second_Annotation (Anno_Node => Anno_Node) then if Requires_Second_Annotation (Entry_Sym => Entry_Sym) then -- wanted and present so process it Wf_Subprogram_Annotation (Node => Anno_Node, Current_Scope => Scope, Subprog_Sym => Entry_Sym, First_Seen => False, The_Heap => TheHeap); -- check for and handle second, concrete constraint Constraint_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Anno_Node)); -- ASSUME Constraint_Node = procedure_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.procedure_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = procedure_constraint in Check_Annotation"); Wf_Subprogram_Constraint (Node => Constraint_Node, Subprogram_Sym => Entry_Sym, First_Seen => False, Component_Data => Component_Data, The_Heap => TheHeap); -- Synthesise 'all from all' dependency if necessary. if Needs_Synthetic_Dependency (Proc_Task_Or_Entry => Entry_Sym) then Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Pos, Subprog_Sym => Entry_Sym, Abstraction => Dictionary.IsRefined, The_Heap => TheHeap); end if; else -- anno found but not needed ErrorHandler.Semantic_Error (Err_Num => 155, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Anno_Node), Id_Str => Dictionary.GetSimpleName (Entry_Sym)); Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Entry_Sym); end if; else -- no anno if Requires_Second_Annotation (Entry_Sym => Entry_Sym) then -- anno missing ErrorHandler.Semantic_Error (Err_Num => 87, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Dictionary.GetSimpleName (Entry_Sym)); Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsRefined, Entry_Sym); end if; end if; end Check_Annotation; begin -- Wf_Entry_Body Entry_Spec_Node := Child_Node (Current_Node => Node); -- ASSUME Entry_Spec_Node = entry_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Entry_Spec_Node) = SP_Symbols.entry_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Entry_Spec_Node = entry_specification in Wf_Entry_Body"); Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Entry_Spec_Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Entry_Body"); Formal_Part_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Entry_Spec_Node)); -- ASSUME Formal_Part_Node = formal_part OR NULL Guard_Node := Next_Sibling (Current_Node => Entry_Spec_Node); -- ASSUME Guard_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Guard_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Guard_Node = identifier in Wf_Entry_Body"); Anno_Node := Next_Sibling (Current_Node => Guard_Node); -- ASSUME Anno_Node = procedure_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.procedure_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = procedure_annotation in Wf_Entry_Body"); Subprogram_Implementation_Node := Next_Sibling (Current_Node => Anno_Node); -- ASSUME Subprogram_Implementation_Node = subprogram_implementation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Subprogram_Implementation_Node) = SP_Symbols.subprogram_implementation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprogram_Implementation_Node = subprogram_implementation in Wf_Entry_Body"); Pragma_Rep_Node := Child_Node (Current_Node => Subprogram_Implementation_Node); -- ASSUME Pragma_Rep_Node = pragma_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Pragma_Rep_Node) = SP_Symbols.pragma_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pragma_Rep_Node = pragma_rep in Wf_Entry_Body"); End_Node := Last_Sibling_Of (Start_Node => Pragma_Rep_Node); -- ASSUME End_Node = designator OR hidden_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => End_Node) = SP_Symbols.designator or else Syntax_Node_Type (Node => End_Node) = SP_Symbols.hidden_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Node = designator OR hidden_part in Wf_Entry_Body"); Hidden := Body_Hidden_Class (Node => Subprogram_Implementation_Node); Entry_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Ident_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); -- Check that Entry_Sym is an entry declared in the spec. Since we are looking up an identifier -- not a full, dotted name we can't find any other entry by mistake so a simple check is all that -- is needed. if Dictionary.IsEntry (Entry_Sym) then -- ASSUME Formal_Part_Node = formal_part OR NULL if Syntax_Node_Type (Node => Formal_Part_Node) = SP_Symbols.formal_part then -- ASSUME Formal_Part_Node = formal_part STree.Set_Node_Lex_String (Sym => Entry_Sym, Node => Ident_Node); Wf_Formal_Part (Node => Formal_Part_Node, Current_Scope => Scope, Subprog_Sym => Entry_Sym, First_Occurrence => False, Context => Dictionary.ProgramContext); elsif Formal_Part_Node = STree.NullNode then -- ASSUME Formal_Part_Node = NULL if Dictionary.GetNumberOfSubprogramParameters (Entry_Sym) /= 0 then ErrorHandler.Semantic_Error (Err_Num => 152, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Entry_Sym)); else STree.Set_Node_Lex_String (Sym => Entry_Sym, Node => Ident_Node); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Formal_Part_Node = formal_part OR NULL in Wf_Entry_Body"); end if; -- ok so far -- now check that the Guard is valid Guard_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Guard_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.Is_Variable (Guard_Sym) and then Dictionary.IsRefinement (Dictionary.GetProtectedTypeOwnVariable (Dictionary.GetRegion (Scope)), Guard_Sym) and then Dictionary.TypeIsBoolean (Dictionary.GetType (Guard_Sym)) then -- Guard is a protected element of type Boolean, which is OK -- store it for use in VCG Dictionary.SetSubprogramEntryBarrier (Entry_Sym, Guard_Sym); STree.Set_Node_Lex_String (Sym => Guard_Sym, Node => Guard_Node); -- The entry is valid so far, it may be hidden or it may have a real sequence of statements if Hidden = All_Hidden then Dictionary.AddBody (CompilationUnit => Entry_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Hidden => True); ErrorHandler.Hidden_Text (Position => Node_Position (Node => End_Node), Unit_Str => Node_Lex_String (Node => Ident_Node), Unit_Typ => SP_Symbols.subprogram_implementation); Next_Node := STree.NullNode; -- prune tree walk on hidden part else Dictionary.AddBody (CompilationUnit => Entry_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, TheBody => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Hidden => False); Check_Annotation (Node_Pos => Node_Position (Node => Node), Anno_Node => Anno_Node, Entry_Sym => Entry_Sym, Scope => Scope, Component_Data => Component_Data); if Hidden = Handler_Hidden then ErrorHandler.Hidden_Handler (Position => Node_Position (Node => End_Node), Unit_Str => Node_Lex_String (Node => Ident_Node), Unit_Typ => SP_Symbols.entry_body); end if; -- set up scope for rest of tree walk Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Entry_Sym); -- set up next node for rest of tree walk Next_Node := Subprogram_Implementation_Node; end if; else -- Guard is not a protected element or is not Boolean ErrorHandler.Semantic_Error (Err_Num => 994, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Guard_Node), Id_Str => Node_Lex_String (Node => Guard_Node)); Next_Node := STree.NullNode; -- prune tree walk on error end if; else -- not a valid Entry ErrorHandler.Semantic_Error (Err_Num => 995, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Node_Lex_String (Node => Ident_Node)); Next_Node := STree.NullNode; -- prune tree walk on error end if; -- check closing identifier if Syntax_Node_Type (Node => End_Node) = SP_Symbols.designator then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Ident_Node), Lex_Str2 => Node_Lex_String (Node => Child_Node (Current_Node => End_Node))) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 58, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => End_Node), Id_Str => Node_Lex_String (Node => Ident_Node)); end if; end if; end Wf_Entry_Body; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_relation.adb0000644000175000017500000006066211753202336023560 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ---------------------------------------------------------------------------- -- Overview: Called to check validity of a relation node. -- Replaces calls to StaticTerm, BaseTypeTerm and CheckTypeTerm ---------------------------------------------------------------------------- separate (Sem.Walk_Expression_P) procedure Wf_Relation (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; T_Stack : in Type_Context_Stack.T_Stack_Type) is Op_Node, Right_Node : STree.SyntaxNode; Right, Left, Result : Sem.Exp_Record; Ident_Str : LexTokenManager.Lex_String; Errors_Found : Boolean := False; --------------------------------------------------------------- function Membership_Test (Operator : SP_Symbols.SP_Symbol; Val, Lower_Bound, Upper_Bound : Maths.Value) return Maths.Value is Result : Maths.Value; OK : Maths.ErrorCode; begin if Operator = SP_Symbols.inside then --# accept Flow, 10, OK, "Expected ineffective assignment"; Maths.InsideRange (Val, Lower_Bound, Upper_Bound, --to get Result, OK); --# end accept; else --# accept Flow, 10, OK, "Expected ineffective assignment"; Maths.OutsideRange (Val, Lower_Bound, Upper_Bound, --to get Result, OK); --# end accept; end if; --# accept Flow, 33, OK, "Expected to be neither referenced nor exported"; return Result; end Membership_Test; --------------------------------------------------------------- function Type_Membership_Test (Operator : SP_Symbols.SP_Symbol; Left : Sem.Exp_Record; RH_Type : Dictionary.Symbol; Scope : Dictionary.Scopes) return Maths.Value --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Maths.Value; ------------- function Invert_If_Outside (Operator : SP_Symbols.SP_Symbol; Raw_Result : Maths.Value) return Maths.Value is Result : Maths.Value; begin Result := Raw_Result; if Operator = SP_Symbols.outside then Maths.NotOp (Result); end if; return Result; end Invert_If_Outside; ------------- function Scalar_Type_Membership_Test (Operator : SP_Symbols.SP_Symbol; Val : Maths.Value; RH_Type : Dictionary.Symbol) return Maths.Value --# global in Dictionary.Dict; --# in LexTokenManager.State; is begin return Membership_Test (Operator => Operator, Val => Val, Lower_Bound => Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- not base type LexTokenManager.First_Token, RH_Type)), Upper_Bound => Maths.ValueRep (Dictionary.GetScalarAttributeValue (False, -- not base type LexTokenManager.Last_Token, RH_Type))); end Scalar_Type_Membership_Test; ------------- function Non_Scalar_Type_Membership_Test (Operator : SP_Symbols.SP_Symbol; LH_Type, RH_Type : Dictionary.Symbol; Scope : Dictionary.Scopes) return Maths.Value --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Maths.Value; begin if Dictionary.TypeIsRecord (RH_Type) then Result := Maths.TrueValue; -- no record subtypes so must be member elsif Dictionary.Is_Unconstrained_Array_Type_Mark (RH_Type, Scope) then Result := Maths.TrueValue; -- array must be member of its base type else -- two constrained arrays if Sem.Indexes_Match (Target => LH_Type, Source => RH_Type) then Result := Maths.TrueValue; else Result := Maths.FalseValue; end if; end if; return Invert_If_Outside (Operator => Operator, Raw_Result => Result); end Non_Scalar_Type_Membership_Test; begin -- Type_Membership_Test if Dictionary.IsPrivateType (RH_Type, Scope) or else Dictionary.TypeIsBoolean (RH_Type) then Result := Invert_If_Outside (Operator => Operator, Raw_Result => Maths.TrueValue); elsif Dictionary.TypeIsScalar (RH_Type) then Result := Scalar_Type_Membership_Test (Operator => Operator, Val => Left.Value, RH_Type => RH_Type); else Result := Non_Scalar_Type_Membership_Test (Operator => Operator, LH_Type => Left.Type_Symbol, RH_Type => RH_Type, Scope => Scope); end if; return Result; end Type_Membership_Test; --------------------------------------------------------------- -- if we have statically evaluated the result plant it for VCG; -- otherwise plant the left hand type so we can distinguish Boolean -- models from normal inequality models in the VCG procedure Plant_Result (Op_Node : in STree.SyntaxNode; Result : in Maths.Value; LH_Type : in Dictionary.Symbol) --# global in Dictionary.Dict; --# in out STree.Table; --# derives STree.Table from *, --# Dictionary.Dict, --# LH_Type, --# Op_Node, --# Result; --# pre (STree.Syntax_Node_Type (Op_Node, STree.Table) = SP_Symbols.inside or --# STree.Syntax_Node_Type (Op_Node, STree.Table) = SP_Symbols.outside) and --# (Dictionary.Is_Null_Symbol (LH_Type) or Dictionary.IsTypeMark (LH_Type, Dictionary.Dict)); --# post STree.Table = STree.Table~; is begin if Result = Maths.TrueValue then STree.Add_Node_Symbol (Node => Op_Node, Sym => Dictionary.GetTrue); elsif Result = Maths.FalseValue then STree.Add_Node_Symbol (Node => Op_Node, Sym => Dictionary.GetFalse); else -- no statically evaluated result available so plant type instead STree.Add_Node_Symbol (Node => Op_Node, Sym => LH_Type); end if; end Plant_Result; --------------------------------------------------------------- procedure Do_Boolean_Binary_Operator (Operator : in SP_Symbols.SP_Symbol; Op_Node_Pos, Left_Node_Pos, Right_Node_Pos : in LexTokenManager.Token_Position; Left, Right : in Sem.Exp_Record; Scope : in Dictionary.Scopes; Is_Annotation : in Boolean; T_Stack : in Type_Context_Stack.T_Stack_Type; Result : in out Sem.Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Annotation, --# Left, --# Left_Node_Pos, --# LexTokenManager.State, --# Operator, --# Op_Node_Pos, --# Result, --# Right, --# Right_Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# T_Stack & --# Result from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Is_Annotation, --# Left, --# LexTokenManager.State, --# Operator, --# Right, --# Scope, --# T_Stack; --# pre (Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict)) and --# Type_Context_Stack.Stack_Is_Valid (T_Stack); --# post Dictionary.Is_Null_Symbol (Result.Type_Symbol) or Dictionary.IsTypeMark (Result.Type_Symbol, Dictionary.Dict); is begin Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; Result.Is_Static := Left.Is_Static and then Right.Is_Static and then Dictionary.TypeIsScalar (Left.Type_Symbol) and then Dictionary.TypeIsScalar (Right.Type_Symbol); Result.Has_Operators := True; if Left.Is_ARange or else Right.Is_ARange then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 90, Reference => ErrorHandler.No_Reference, Position => Op_Node_Pos, Id_Str => LexTokenManager.Null_String); else -- neither are ranges if Operator = SP_Symbols.equals or else Operator = SP_Symbols.not_equal then Check_Binary_Operator (Operator => Operator, Left => Left, Right => Right, Scope => Scope, T_Stack => T_Stack, Op_Pos => Op_Node_Pos, Left_Pos => Left_Node_Pos, Right_Pos => Right_Node_Pos, Convert => True, Is_Annotation => Is_Annotation, Result => Result); if Result /= Sem.Unknown_Type_Record then if not Dictionary.IsPredefinedStringType (Dictionary.GetRootType (Left.Type_Symbol)) and then not Dictionary.IsPredefinedStringType (Dictionary.GetRootType (Right.Type_Symbol)) then -- Unconstrained array types only permitted if: -- 1. we are in annotation context, and -- 2. both sides are unconstrained. -- So you are allowed to say, for example, "post X = T'(others => 0)" for a -- subprogram that initializes an unconstrained array or "post X /= Y" where -- both X and Y are compatible unconstrained array types. -- Note that test 2 is almost certainly redundant because if only one side was -- unconstrained then the incompatibility would be detected elsewhere before -- this code was reached. if (Dictionary.Is_Unconstrained_Array_Type_Mark (Left.Type_Symbol, Scope) and then not (Is_Annotation and then Dictionary.Is_Unconstrained_Array_Type_Mark (Right.Type_Symbol, Scope))) or else (Dictionary.Is_Unconstrained_Array_Type_Mark (Right.Type_Symbol, Scope) and then not (Is_Annotation and then Dictionary.Is_Unconstrained_Array_Type_Mark (Left.Type_Symbol, Scope))) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 39, Reference => ErrorHandler.No_Reference, Position => Op_Node_Pos, Id_Str => LexTokenManager.Null_String); elsif Sem.Illegal_Unconstrained (Left_Type => Left.Type_Symbol, Right_Type => Right.Type_Symbol) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 418, Reference => ErrorHandler.No_Reference, Position => Op_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; if Result /= Sem.Unknown_Type_Record then Calc_Binary_Operator (Node_Pos => Left_Node_Pos, Operator => Operator, Left_Val => Left.Value, Right_Val => Right.Value, Is_Annotation => Is_Annotation, Result => Result); end if; end if; else -- ordering operator if Dictionary.IsUnknownTypeMark (Left.Type_Symbol) or else Dictionary.IsUnknownTypeMark (Right.Type_Symbol) or else (Dictionary.IsScalarTypeMark (Left.Type_Symbol, Scope) and then Dictionary.IsScalarTypeMark (Right.Type_Symbol, Scope)) or else (Dictionary.IsPredefinedStringType (Dictionary.GetRootType (Left.Type_Symbol)) and then Dictionary.IsPredefinedStringType (Dictionary.GetRootType (Right.Type_Symbol))) or else (CommandLineData.Ravenscar_Selected and then Dictionary.IsPredefinedTimeType (Left.Type_Symbol) and then Dictionary.IsPredefinedTimeType (Right.Type_Symbol)) then Check_Binary_Operator (Operator => Operator, Left => Left, Right => Right, Scope => Scope, T_Stack => T_Stack, Op_Pos => Op_Node_Pos, Left_Pos => Left_Node_Pos, Right_Pos => Right_Node_Pos, Convert => True, Is_Annotation => Is_Annotation, Result => Result); Calc_Binary_Operator (Node_Pos => Left_Node_Pos, Operator => Operator, Left_Val => Left.Value, Right_Val => Right.Value, Is_Annotation => Is_Annotation, Result => Result); elsif Dictionary.IsArrayTypeMark (Left.Type_Symbol, Scope) and then Dictionary.IsArrayTypeMark (Right.Type_Symbol, Scope) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 51, Reference => ErrorHandler.No_Reference, Position => Op_Node_Pos, Id_Str => LexTokenManager.Null_String); else Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 52, Reference => ErrorHandler.No_Reference, Position => Op_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; end if; Result.Errors_In_Expression := Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; end Do_Boolean_Binary_Operator; begin -- Wf_Relation Op_Node := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Node)); -- ASSUME Op_Node = relational_operator OR inside OR outside OR NULL if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.relational_operator or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.inside or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.outside then -- ASSUME Op_Node = relational_operator OR inside OR outside Exp_Stack.Pop (Item => Right, Stack => E_Stack); Exp_Stack.Pop (Item => Left, Stack => E_Stack); Result := Null_Type_Record; if STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.inside or else STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.outside then -- ASSUME Op_Node = inside OR outside Right_Node := STree.Next_Sibling (Current_Node => Op_Node); -- ASSUME Right_Node = name OR arange OR annotation_name OR annotation_arange if STree.Syntax_Node_Type (Node => Right_Node) = SP_Symbols.name or else STree.Syntax_Node_Type (Node => Right_Node) = SP_Symbols.annotation_name then -- ASSUME Right_Node = name OR annotation_name if Right.Sort = Sem.Is_Unknown then Result := Sem.Unknown_Type_Record; elsif Right.Sort /= Sem.Is_Type_Mark then Result := Sem.Unknown_Type_Record; Ident_Str := Dictionary.GetSimpleName (Right.Other_Symbol); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 95, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Right_Node), Id_Str => Ident_Str); else Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 63, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Right_Node), Id_Str => Ident_Str); end if; else if Dictionary.CompatibleTypes (Scope, Left.Type_Symbol, Right.Type_Symbol) then Result.Is_Constant := Left.Is_Constant; Result.Is_Static := CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Left.Is_Static; Result.Type_Symbol := Dictionary.GetPredefinedBooleanType; Result.Value := Type_Membership_Test (Operator => STree.Syntax_Node_Type (Node => Op_Node), Left => Left, RH_Type => Right.Type_Symbol, Scope => Scope); Result.Has_Operators := True; Plant_Result (Op_Node => Op_Node, Result => Result.Value, LH_Type => Left.Type_Symbol); -- calculate value here depending on bounds of type mark -- obtained from the dictionary else -- type mismatch Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 42, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end if; elsif STree.Syntax_Node_Type (Node => Right_Node) = SP_Symbols.arange or else STree.Syntax_Node_Type (Node => Right_Node) = SP_Symbols.annotation_arange then -- ASSUME Right_Node = arange OR annotation_arange if Dictionary.CompatibleTypes (Scope, Left.Type_Symbol, Right.Type_Symbol) and then Right.Is_ARange then Result.Is_Constant := Left.Is_Constant and then Right.Is_Constant; Result.Is_Static := CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then Left.Is_Static and then Right.Is_Static; Result.Type_Symbol := Dictionary.GetPredefinedBooleanType; Result.Value := Membership_Test (Operator => STree.Syntax_Node_Type (Node => Op_Node), Val => Left.Value, Lower_Bound => Right.Value, Upper_Bound => Right.Range_RHS); Result.Has_Operators := True; Plant_Result (Op_Node => Op_Node, Result => Result.Value, LH_Type => Left.Type_Symbol); else -- type mismatch or RHS is not a range Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 42, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Right_Node = name OR arange OR annotation_name OR annotation_arange in Wf_Relation"); end if; Result.Errors_In_Expression := Errors_Found or else Result.Errors_In_Expression or else Left.Errors_In_Expression or else Right.Errors_In_Expression; elsif STree.Syntax_Node_Type (Node => Op_Node) = SP_Symbols.relational_operator then -- ASSUME Op_Node = relational_operator Do_Boolean_Binary_Operator (Operator => STree.Syntax_Node_Type (Node => STree.Child_Node (Current_Node => Op_Node)), Op_Node_Pos => STree.Node_Position (Node => STree.Child_Node (Current_Node => Op_Node)), Left_Node_Pos => STree.Node_Position (Node => STree.Child_Node (Current_Node => Node)), Right_Node_Pos => STree.Node_Position (Node => STree.Next_Sibling (Current_Node => Op_Node)), Left => Left, Right => Right, Scope => Scope, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_relation, T_Stack => T_Stack, Result => Result); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = relational_operator OR inside OR outside in Wf_Relation"); end if; -- OtherSymbol may carry a function symbol in the case of uses of unchecked_conversion. -- This symbol is used (by wf_Assign) to convery information to the VCG to supress -- checks when an unchecked_conversion is assigned to something of the same subtype. -- We do not want this mechanism if the unchecked_conversion is sued in any other context -- than a direct assignment. Therefore we clear OtherSymbol here: Result.Other_Symbol := Dictionary.NullSymbol; Exp_Stack.Push (X => Result, Stack => E_Stack); elsif Op_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = relational_operator OR inside OR outside OR NULL in Wf_Relation"); end if; end Wf_Relation; spark-2012.0.deb/examiner/spark.smf0000644000175000017500000000001211753202337016072 0ustar eugeneugen@main.smf spark-2012.0.deb/examiner/support.smf0000644000175000017500000000153611753202337016502 0ustar eugeneugencommandlinedata.adb commandlinehandler.adb completecheck.adb componenterrors.adb componentmanager.adb configfile.adb contextmanager-ops.adb date_time.adb -vcg e_strings.adb file_utils.adb heap.adb indexmanager-cache.shb -vcg indexmanager-index_table_p.adb indexmanager.adb lextokenlists.adb lextokenmanager.adb lextokenmanager-insert_nat.adb -vcg lextokenmanager-relation_algebra.adb lextokenmanager-relation_algebra-string.adb lextokenmanager-seq_algebra.adb lextokenstacks.adb lists.adb maths.adb maths-literaltovalue.adb maths-parsestring.adb maths-valuetostring.adb metafile.adb reflist.adb relationalgebra.adb requiredunits.adb -vcg screenecho.adb seqalgebra.adb simplelists.adb sparkhtml.adb statistics.adb stree.adb stree-findlastitemindependencyrelation.adb -vcg symbol_set.adb -vcg systemerrors.adb -vcg sprint.adb -vcg spark_xml.adb xmlreport.adb -vcg spark-2012.0.deb/examiner/errorhandler-conversions-tostring-usageerror.adb0000644000175000017500000002216711753202336026011 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler.Conversions.ToString) procedure UsageError (Err_Num : in Error_Types.NumericError; With_Explanation : in Boolean; E_Str : in out E_Strings.T) is Err_Type : ErrorHandler.Usage_Err_Type; procedure UsageErrorExpl (E_Str : in out E_Strings.T) --# global in Err_Type; --# derives E_Str from *, --# Err_Type; is separate; -- Note that the parameter names for this subunit are chosen to make it as easy as -- possible to auto-generate the subunit from this, its parent, file. The -- generation requires copying the case statement below, stripping out the -- current Append'Thing' statements and adding an Append_String for the -- explanatory text that is delineated by --! comments. procedure Append_Explanation --# global in Err_Type; --# in With_Explanation; --# in out E_Str; --# derives E_Str from *, --# Err_Type, --# With_Explanation; is Explanation_String : E_Strings.T := E_Strings.Empty_String; begin if With_Explanation then -- we need to at least look for an explanation UsageErrorExpl (E_Str => Explanation_String); if E_Strings.Get_Length (E_Str => Explanation_String) > 0 then -- there actually is one E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Prefix); E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => Explanation_String); E_Strings.Append_String (E_Str => E_Str, Str => ErrorHandler.Explanation_Postfix); end if; end if; end Append_Explanation; begin Err_Type := ErrorHandler.Usage_Err_Type'Val (Err_Num.ErrorNum - Error_Types.UsageErrOffset); case Err_Type is -- HTML Directives --! <"flow-"> --! <"!!! Flow Error : "><" : "> when ErrorHandler.Unused_Import => --! 30 E_Strings.Append_String (E_Str => E_Str, Str => "The variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is imported but neither referenced nor exported"); when ErrorHandler.Undefined_Export => --! 31 if Err_Num.Name1 = Error_Types.NoName then E_Strings.Append_String (E_Str => E_Str, Str => "The returned function value is not defined"); else E_Strings.Append_String (E_Str => E_Str, Str => "The variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is exported but not (internally) defined"); end if; --! The variable XXX is exported but not (internally) defined. when ErrorHandler.Undefined_Var => --! 32 E_Strings.Append_String (E_Str => E_Str, Str => "The variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is neither imported nor defined"); when ErrorHandler.Unreferenced_Var => --! 33 E_Strings.Append_String (E_Str => E_Str, Str => "The variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is neither referenced nor exported"); when ErrorHandler.Redefined_Import => --! 34 E_Strings.Append_String (E_Str => E_Str, Str => "The imported, non-exported variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " may be redefined"); --! The updating of imported-only variables is forbidden under all --! circumstances. when ErrorHandler.Ineffective_Import => --! 35 E_Strings.Append_String (E_Str => E_Str, Str => "Importation of the initial value of variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " is ineffective"); --! The meaning of this message is explained in Section 4.2 of Appendix A when ErrorHandler.Referenced_But_Not_In_Partition => --! 36 E_Strings.Append_String (E_Str => E_Str, Str => "The referencing of variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " by a task or interrupt handler has been omitted from the partition annotation"); --! This message is only issued when processing the partition annotation. The partition annotation --! must describe all the actions of the tasks and interrupt handlers making up the program. Therefore, --! if a variable is imported somewhere in the program by a task or interrupt handler, then it must --! also be an import at the partition level. As well as the omission of explicit imports, this message is also --! generated if the implicit imports of tasks and interrupt handlers are omitted. For tasks this means --! any variable the task suspends on and for interrupt handlers it means the name of the protected --! object containing the handler or, if given, the name of the interrupt stream associated with the --! handler. when ErrorHandler.Updated_But_Not_In_Partition => --! 37 E_Strings.Append_String (E_Str => E_Str, Str => "The updating of variable "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " by a task or interrupt handler has been omitted from the partition annotation"); --! This message is only issued when processing the partition annotation. The partition annotation --! must describe all the actions of the tasks and interrupt handlers making up the program. Therefore, --! if a variable is exported somewhere in the program by a task or interrupt handler, then it must --! also be an export at the partition level. when ErrorHandler.Uninitialized_Protected_Element => --! 38 E_Strings.Append_String (E_Str => E_Str, Str => "The protected element "); Append_Name (E_Str => E_Str, Name => Err_Num.Name1, Scope => Err_Num.Scope); E_Strings.Append_String (E_Str => E_Str, Str => " must be initialized at its point of declaration"); --! To avoid potential race conditions during program startup, all --! elements of a protected type must be initialized with a constant value --! at the point of declaration. end case; Append_Explanation; E_Strings.Append_String (E_Str => E_Str, Str => "."); end UsageError; spark-2012.0.deb/examiner/sem-compunit-wf_proof_function_declaration.adb0000644000175000017500000002644711753202336025447 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Proof_Function_Declaration (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord; Proof_Func_Sym : out Dictionary.Symbol) is Type_Sym : Dictionary.Symbol; Constraint_Node : STree.SyntaxNode; Ident_Node : STree.SyntaxNode; Return_Type_Node : STree.SyntaxNode; Spec_Node : STree.SyntaxNode; Dummy_Component_Data : ComponentManager.ComponentData; Ident_Str : LexTokenManager.Lex_String; begin ComponentManager.Initialise (Data => Dummy_Component_Data); Spec_Node := Child_Node (Current_Node => Node); -- ASSUME Spec_Node = function_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = function_specification in Wf_Proof_Function_Declaration"); Return_Type_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Return_Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Return_Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = Return_Type_Node = type_mark in Wf_Proof_Function_Declaration"); Constraint_Node := Next_Sibling (Current_Node => Spec_Node); -- ASSUME Constraint_Node = function_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Constraint_Node) = SP_Symbols.function_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Constraint_Node = function_constraint in Wf_Proof_Function_Declaration"); -- If we are in a package or protected body we may refine a proof function -- definition if In_Package_Body (Current_Scope => Current_Scope) or else In_Protected_Body (Current_Scope => Current_Scope) then Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Subprogram_Specification_From_Declaration"); Ident_Str := Node_Lex_String (Node => Ident_Node); -- Lookup in the dictionary to see if the name already exists Proof_Func_Sym := Dictionary.LookupItem (Name => Ident_Str, Scope => Current_Scope, Context => Dictionary.ProofContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (Proof_Func_Sym) then -- The name already exists in this scope. -- Is it is declared in the visible part, or Proof_Func_Sym := Dictionary.LookupImmediateScope (Name => Ident_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Dictionary.GetRegion (Current_Scope)), Context => Dictionary.ProofContext); -- the private part of the unit? if Dictionary.Is_Null_Symbol (Proof_Func_Sym) and then Dictionary.IsPackage (Dictionary.GetRegion (Current_Scope)) then Proof_Func_Sym := Dictionary.LookupImmediateScope (Name => Ident_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Dictionary.GetRegion (Current_Scope)), Context => Dictionary.ProofContext); end if; if not Dictionary.Is_Null_Symbol (Proof_Func_Sym) and then Dictionary.IsProofFunction (Proof_Func_Sym) and then not Dictionary.IsImplicitProofFunction (Proof_Func_Sym) then -- It is the refinement of an explicit proof function declared -- in the visible or private part of the unit. -- Check that the return type of the refinement is consistent Wf_Type_Mark (Node => Return_Type_Node, Current_Scope => Current_Scope, Context => Dictionary.ProofContext, Type_Sym => Type_Sym); if not Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => Dictionary.GetType (Proof_Func_Sym), Full_Range_Subtype => False) then if Dictionary.IsUnknownTypeMark (Dictionary.GetType (Proof_Func_Sym)) then -- remind user that return type on spec was illegal ErrorHandler.Semantic_Error (Err_Num => 841, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Return_Type_Node), Id_Str => Dictionary.GetSimpleName (Proof_Func_Sym)); else -- report inconsistency ErrorHandler.Semantic_Error (Err_Num => 22, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Return_Type_Node), Id_Str => Dictionary.GetSimpleName (Proof_Func_Sym)); end if; end if; else -- Name in use for something other than an explicit proof function. ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Ident_Str); -- Add the proof function declaration anyway to avoid the -- propagation of errors. Dictionary.AddSubprogram (Name => Ident_Str, Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => Node_Position (Node => Ident_Node), End_Position => Node_Position (Node => Ident_Node)), Scope => Current_Scope, Context => Dictionary.ProofContext, Subprogram => Proof_Func_Sym); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Proof_Func_Sym); end if; -- Now enter the refined proof function constraints in to the dictionary. -- ASSUME Constraint_Node = function_constraint --# accept F, 10, Dummy_Component_Data, "As can be seen from the name, this is not needed here."; Wf_Subprogram_Constraint (Node => Constraint_Node, Subprogram_Sym => Proof_Func_Sym, First_Seen => False, Component_Data => Dummy_Component_Data, The_Heap => The_Heap); --# end accept; -- If the refinement contains an implicit return - warn. See -- Sem.Subprogram_Specification.Wf_Subprogram_Specification -- for more information. if not Dictionary.HasImplicitReturnVariable (Dictionary.IsAbstract, Proof_Func_Sym) and then Dictionary.HasImplicitReturnVariable (Dictionary.IsRefined, Proof_Func_Sym) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 321, Position => Node_Position (Node), Sym => Proof_Func_Sym, Scope => Current_Scope); end if; -- Refinements cannot be checked. if Dictionary.HasPostcondition (Dictionary.IsAbstract, Proof_Func_Sym) and Dictionary.HasPostcondition (Dictionary.IsRefined, Proof_Func_Sym) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 322, Position => Node_Position (Node), Sym => Proof_Func_Sym, Scope => Current_Scope); end if; if Dictionary.HasPrecondition (Dictionary.IsAbstract, Proof_Func_Sym) and Dictionary.HasPrecondition (Dictionary.IsRefined, Proof_Func_Sym) then ErrorHandler.Semantic_Warning_Sym (Err_Num => 323, Position => Node_Position (Node), Sym => Proof_Func_Sym, Scope => Current_Scope); end if; else -- It is a new identifier in this scope it cannot be a refinement. -- Assume it is a new proof function declaration --# accept Flow, 10, Dummy_Component_Data, "Expected ineffective assignment"; Subprogram_Specification.Wf_Subprogram_Specification (Spec_Node => Spec_Node, Anno_Node => STree.NullNode, Constraint_Node => Constraint_Node, Inherit_Node => STree.NullNode, Context_Node => STree.NullNode, Generic_Formal_Part_Node => STree.NullNode, Current_Scope => Current_Scope, Generic_Unit => Dictionary.NullSymbol, Current_Context => Dictionary.ProofContext, Component_Data => Dummy_Component_Data, The_Heap => The_Heap, Subprog_Sym => Proof_Func_Sym); --# end accept; end if; else -- The declaration is not in a package or protected body it cannot be -- a refinement. Assume a new proof function declaration. --# accept Flow, 10, Dummy_Component_Data, "Expected ineffective assignment"; Subprogram_Specification.Wf_Subprogram_Specification (Spec_Node => Spec_Node, Anno_Node => STree.NullNode, Constraint_Node => Constraint_Node, Inherit_Node => STree.NullNode, Context_Node => STree.NullNode, Generic_Formal_Part_Node => STree.NullNode, Current_Scope => Current_Scope, Generic_Unit => Dictionary.NullSymbol, Current_Context => Dictionary.ProofContext, Component_Data => Dummy_Component_Data, The_Heap => The_Heap, Subprog_Sym => Proof_Func_Sym); --# end accept; end if; end Wf_Proof_Function_Declaration; spark-2012.0.deb/examiner/declarations.adb0000644000175000017500000013177011753202336017402 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cell_Storage; with Clists; with CStacks; with Debug; with E_Strings; with Pile; with SP_Symbols; with SystemErrors; use type SP_Symbols.SP_Symbol; package body Declarations --# own State is AttributeList, --# BitwiseOpList, --# ProcedureExportList, --# ReturnSymbol, --# RootIntegerUsed, --# UsedSymbols; is UsedSymbols : Cells.Cell := Cells.Null_Cell; AttributeList : Cells.Cell := Cells.Null_Cell; BitwiseOpList : Cells.Cell := Cells.Null_Cell; ProcedureExportList : Cells.Cell := Cells.Null_Cell; ReturnSymbol : Cells.Cell := Cells.Null_Cell; RootIntegerUsed : Boolean := False; ---------------------------------------------------------------- procedure StartProcessing (Heap : in out Cells.Heap_Record) --# global in out Statistics.TableUsage; --# out AttributeList; --# out BitwiseOpList; --# out ProcedureExportList; --# out ReturnSymbol; --# out RootIntegerUsed; --# out UsedSymbols; --# derives AttributeList, --# BitwiseOpList, --# Heap, --# ProcedureExportList from Heap & --# ReturnSymbol, --# RootIntegerUsed, --# UsedSymbols from & --# Statistics.TableUsage from *, --# Heap; is begin UsedSymbols := Cells.Null_Cell; ReturnSymbol := Cells.Null_Cell; Cells.Create_Cell (Heap, AttributeList); Cells.Create_Cell (Heap, ProcedureExportList); Cells.Create_Cell (Heap, BitwiseOpList); RootIntegerUsed := False; end StartProcessing; ---------------------------------------------------------------- -- New check to ensure that we don't end up with Ada and Implicit -- proof functions in list of used symbols procedure Add (Heap : in out Cells.Heap_Record; Symbol : in Dictionary.Symbol) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# Statistics.TableUsage, --# UsedSymbols from *, --# Dictionary.Dict, --# Heap, --# Symbol, --# UsedSymbols; is begin if not Dictionary.IsQuantifiedVariable (Symbol) then Pile.Insert (Heap, Symbol, Cells.Null_Cell, UsedSymbols); end if; end Add; ---------------------------------------------------------------- procedure AddAttribute (Heap : in out Cells.Heap_Record; TickCell : in Cells.Cell) --# global in AttributeList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# TickCell; is InsertPtr : Cells.Cell; ------------------------------------ function HasBase (TickCell : Cells.Cell) return Boolean --# global in Heap; is begin return Cells.Get_Kind (Heap, Cells.Get_A_Ptr (Heap, TickCell)) = Cell_Storage.Op; end HasBase; ------------------------------------ function PrefixVal (TickCell : Cells.Cell) return Integer --# global in Heap; is PrefixCell : Cells.Cell; begin PrefixCell := Cells.Get_A_Ptr (Heap, TickCell); if Cells.Get_Kind (Heap, PrefixCell) = Cell_Storage.Op then -- Base found PrefixCell := Cells.Get_A_Ptr (Heap, PrefixCell); end if; return Cells.Get_Natural_Value (Heap, PrefixCell); end PrefixVal; ------------------------------------ function IsEqual (TickCell1, TickCell2 : Cells.Cell) return Boolean --# global in Heap; --# in LexTokenManager.State; is begin return PrefixVal (TickCell1) = PrefixVal (TickCell2) and then HasBase (TickCell1) = HasBase (TickCell2) and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, TickCell1)), Lex_Str2 => Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, TickCell2))) = LexTokenManager.Str_Eq and then Cells.Get_Assoc_Var (Heap, Cells.Get_B_Ptr (Heap, TickCell1)) = Cells.Get_Assoc_Var (Heap, Cells.Get_B_Ptr (Heap, TickCell2)); end IsEqual; ------------------------------------ function IsGreater (TickCell1, TickCell2 : Cells.Cell) return Boolean --# global in Dictionary.Dict; --# in Heap; --# in LexTokenManager.State; is Result : Boolean; Val1, Val2 : Integer; Result_Cmp : LexTokenManager.Str_Comp_Result; begin if HasBase (TickCell1) = HasBase (TickCell2) then Val1 := PrefixVal (TickCell1); Val2 := PrefixVal (TickCell2); if Val1 /= Val2 then Result := Val1 > Val2; else Result_Cmp := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Cells.Get_Lex_Str (Heap => Heap, CellName => Cells.Get_B_Ptr (Heap, TickCell1)), Lex_Str2 => Cells.Get_Lex_Str (Heap => Heap, CellName => Cells.Get_B_Ptr (Heap, TickCell2))); if Result_Cmp /= LexTokenManager.Str_Eq then Result := Result_Cmp = LexTokenManager.Str_First; else Result := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Item => Cells.Get_Assoc_Var (Heap => Heap, CellName => Cells.Get_B_Ptr (Heap, TickCell1))), Lex_Str2 => Dictionary.GetSimpleName (Item => Cells.Get_Assoc_Var (Heap => Heap, CellName => Cells.Get_B_Ptr (Heap, TickCell2)))) = LexTokenManager.Str_First; end if; end if; else Result := HasBase (TickCell1); end if; return Result; end IsGreater; ------------------------------------ function NextTickCell (CurrentLink : Cells.Cell) return Cells.Cell --# global in Heap; --pre not Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, CurrentLink)); is begin return Cells.Get_C_Ptr (Heap, Cells.Get_A_Ptr (Heap, CurrentLink)); end NextTickCell; ------------------------------------ procedure InsertAfter (InsertPoint : in Cells.Cell; TickCell : in Cells.Cell) --# global in out Heap; --# in out Statistics.TableUsage; --# derives Heap from *, --# InsertPoint, --# TickCell & --# Statistics.TableUsage from *, --# Heap; is NewLink : Cells.Cell; begin Cells.Create_Cell (Heap, NewLink); Cells.Set_A_Ptr (Heap, NewLink, Cells.Get_A_Ptr (Heap, InsertPoint)); Cells.Set_A_Ptr (Heap, InsertPoint, NewLink); Cells.Set_C_Ptr (Heap, NewLink, TickCell); end InsertAfter; ------------------------------------ begin --AddAttribute if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Cells.Get_Lex_Str (Heap, Cells.Get_B_Ptr (Heap, TickCell)), Lex_Str2 => LexTokenManager.Base_Token) /= LexTokenManager.Str_Eq then InsertPtr := AttributeList; loop if Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, InsertPtr)) then InsertAfter (InsertPtr, TickCell); exit; end if; if IsGreater (TickCell, NextTickCell (InsertPtr)) then InsertAfter (InsertPtr, TickCell); exit; end if; if IsEqual (TickCell, NextTickCell (InsertPtr)) then exit; end if; InsertPtr := Cells.Get_A_Ptr (Heap, InsertPtr); end loop; end if; end AddAttribute; ---------------------------------------------------------------- procedure AddBitwiseOp (Heap : in out Cells.Heap_Record; OpCell : in Cells.Cell) --# global in BitwiseOpList; --# in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# Statistics.TableUsage from *, --# BitwiseOpList, --# Dictionary.Dict, --# Heap, --# OpCell, --# UsedSymbols & --# UsedSymbols from *, --# Dictionary.Dict, --# Heap, --# OpCell; is InsertPtr : Cells.Cell; function IsEqual (OpCell1, OpCell2 : Cells.Cell) return Boolean --# global in Heap; is begin return ((Cells.Get_Natural_Value (Heap, OpCell1) = Cells.Get_Natural_Value (Heap, OpCell2)) and then (Cells.Get_Op_Symbol (Heap, OpCell1) = Cells.Get_Op_Symbol (Heap, OpCell2))); end IsEqual; ------------------------------------ function IsGreater (OpCell1, OpCell2 : Cells.Cell) return Boolean --# global in Heap; is Result : Boolean; Val1, Val2 : Integer; begin Val1 := Cells.Get_Natural_Value (Heap, OpCell1); Val2 := Cells.Get_Natural_Value (Heap, OpCell2); if Val1 = Val2 then Result := Cells.Get_Op_Symbol (Heap, OpCell1) > Cells.Get_Op_Symbol (Heap, OpCell2); else Result := Val1 > Val2; end if; return Result; end IsGreater; ------------------------------------ function NextOpCell (CurrentLink : Cells.Cell) return Cells.Cell --# global in Heap; --pre not Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, CurrentLink)); is begin return Cells.Get_C_Ptr (Heap, Cells.Get_A_Ptr (Heap, CurrentLink)); end NextOpCell; ------------------------------------ procedure InsertAfter (InsertPoint : in Cells.Cell; OpCell : in Cells.Cell) --# global in out Heap; --# in out Statistics.TableUsage; --# derives Heap from *, --# InsertPoint, --# OpCell & --# Statistics.TableUsage from *, --# Heap; is NewLink : Cells.Cell; begin Cells.Create_Cell (Heap, NewLink); Cells.Set_A_Ptr (Heap, NewLink, Cells.Get_A_Ptr (Heap, InsertPoint)); Cells.Set_A_Ptr (Heap, InsertPoint, NewLink); Cells.Set_C_Ptr (Heap, NewLink, OpCell); end InsertAfter; ------------------------------------ begin --AddBitwiseOp Add (Heap, Cells.Get_Symbol_Value (Heap, OpCell)); -- To get a type declaration InsertPtr := BitwiseOpList; loop if Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, InsertPtr)) then InsertAfter (InsertPtr, OpCell); exit; end if; if IsGreater (OpCell, NextOpCell (InsertPtr)) then InsertAfter (InsertPtr, OpCell); exit; end if; if IsEqual (OpCell, NextOpCell (InsertPtr)) then exit; end if; InsertPtr := Cells.Get_A_Ptr (Heap, InsertPtr); end loop; end AddBitwiseOp; ---------------------------------------------------------------- procedure AddProcedureExport (Heap : in out Cells.Heap_Record; ExportCell : in Cells.Cell) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in ProcedureExportList; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# Statistics.TableUsage, --# UsedSymbols from *, --# Dictionary.Dict, --# ExportCell, --# Heap, --# LexTokenManager.State, --# ProcedureExportList, --# UsedSymbols; is NewLink : Cells.Cell; function AlreadyPresent return Boolean --# global in ExportCell; --# in Heap; --# in LexTokenManager.State; --# in ProcedureExportList; is CurrentCell : Cells.Cell; Found : Boolean; begin Found := False; CurrentCell := Cells.Get_A_Ptr (Heap, ProcedureExportList); while CurrentCell /= Cells.Null_Cell and not Found loop Found := Cells.Get_Symbol_Value (Heap, CurrentCell) = Cells.Get_Symbol_Value (Heap, ExportCell) and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Cells.Get_Lex_Str (Heap, CurrentCell), Lex_Str2 => Cells.Get_Lex_Str (Heap, ExportCell)) = LexTokenManager.Str_Eq; CurrentCell := Cells.Get_A_Ptr (Heap, CurrentCell); end loop; return Found; end AlreadyPresent; begin -- AddProcedureExport if not AlreadyPresent then Add (Heap, Cells.Get_Symbol_Value (Heap, ExportCell)); -- Ensure we get a type decl Cells.Create_Cell (Heap, NewLink); -- put in linked list Cells.Set_A_Ptr (Heap, NewLink, Cells.Get_A_Ptr (Heap, ProcedureExportList)); Cells.Set_A_Ptr (Heap, ProcedureExportList, NewLink); -- Copy in values to new list element Cells.Set_Symbol_Value (Heap, NewLink, Cells.Get_Symbol_Value (Heap, ExportCell)); Cells.Set_Lex_Str (Heap, NewLink, Cells.Get_Lex_Str (Heap, ExportCell)); end if; end AddProcedureExport; ---------------------------------------------------------------- procedure AddReturnVar (Heap : in out Cells.Heap_Record; ReturnVarCell : in Cells.Cell) --# global in Dictionary.Dict; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# out ReturnSymbol; --# derives Heap, --# Statistics.TableUsage, --# UsedSymbols from *, --# Dictionary.Dict, --# Heap, --# ReturnVarCell, --# UsedSymbols & --# ReturnSymbol from ReturnVarCell; is begin Add (Heap, Cells.Get_Symbol_Value (Heap, ReturnVarCell)); -- To get a type declaration ReturnSymbol := ReturnVarCell; end AddReturnVar; ---------------------------------------------------------------- procedure AddUseOfRootInteger --# global out RootIntegerUsed; --# derives RootIntegerUsed from ; is begin RootIntegerUsed := True; end AddUseOfRootInteger; ------------------------------------------------------------------------- procedure Find_DAG_Declarations (Heap : in out Cells.Heap_Record; Root : in Cells.Cell) --# global in AttributeList; --# in BitwiseOpList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in ProcedureExportList; --# in out ReturnSymbol; --# in out RootIntegerUsed; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# ReturnSymbol, --# RootIntegerUsed, --# Statistics.TableUsage, --# UsedSymbols from *, --# AttributeList, --# BitwiseOpList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# ProcedureExportList, --# Root, --# UsedSymbols; is P, Parenthesis_Cell, Sq_Bracket_Cell : Cells.Cell; Parenthesis_Form : SP_Symbols.SP_Symbol; Par_Reqd : Boolean; S : CStacks.Stack; -------------------------------------------------------------- function Is_Leaf (Node : Cells.Cell) return Boolean --# global in Heap; is begin return Cells.Is_Null_Cell (Cells.Get_B_Ptr (Heap, Node)); end Is_Leaf; -------------------------------------------------------------- procedure Parenthesise (V : in Cells.Cell; Left_Tree : in Boolean; Par_Reqd : out Boolean; Parenthesis_Form : out SP_Symbols.SP_Symbol) --# global in Heap; --# derives Parenthesis_Form from Heap, --# V & --# Par_Reqd from Heap, --# Left_Tree, --# V; is V_Precedence, W_Precedence : Natural; Operand, W : Cells.Cell; V_Kind : Cells.Cell_Kind; -------------------------------------------- function Precedence_Value (C : Cells.Cell) return Natural --# global in Heap; is Prec_Val : Natural; begin if Cells.Get_Kind (Heap, C) = Cell_Storage.FDL_Div_Op then Prec_Val := 5; else case Cells.Get_Op_Symbol (Heap, C) is when SP_Symbols.RWand | SP_Symbols.RWor | SP_Symbols.RWandthen | SP_Symbols.RWorelse | SP_Symbols.implies | SP_Symbols.RWnot | SP_Symbols.is_equivalent_to => Prec_Val := 1; when SP_Symbols.equals | SP_Symbols.not_equal | SP_Symbols.less_than | SP_Symbols.less_or_equal | SP_Symbols.greater_than | SP_Symbols.greater_or_equal => Prec_Val := 2; when SP_Symbols.plus | SP_Symbols.minus | SP_Symbols.ampersand => Prec_Val := 3; when SP_Symbols.multiply | SP_Symbols.divide | SP_Symbols.RWmod => Prec_Val := 5; when SP_Symbols.double_star => Prec_Val := 6; when others => Prec_Val := 7; end case; end if; return Prec_Val; end Precedence_Value; -------------------------------------------------------------------- begin -- Parenthesise; Par_Reqd := False; Parenthesis_Form := SP_Symbols.left_paren; V_Kind := Cells.Get_Kind (Heap, V); if (V_Kind = Cell_Storage.Declared_Function) or (V_Kind = Cell_Storage.Proof_Function) or (V_Kind = Cell_Storage.Attrib_Function) or (V_Kind = Cell_Storage.Field_Access_Function) or (V_Kind = Cell_Storage.Mk_Aggregate) or (V_Kind = Cell_Storage.List_Function) or (V_Kind = Cell_Storage.Element_Function) or (V_Kind = Cell_Storage.Update_Function) or (V_Kind = Cell_Storage.Pred_Function) or (V_Kind = Cell_Storage.Succ_Function) or (V_Kind = Cell_Storage.Abs_Function) or (V_Kind = Cell_Storage.Trunc_Function) or (V_Kind = Cell_Storage.Field_Update_Function) or (V_Kind = Cell_Storage.Bitwise_Op) then Par_Reqd := True; if (V_Kind = Cell_Storage.List_Function) then Parenthesis_Form := SP_Symbols.square_open; end if; elsif ((V_Kind = Cell_Storage.Op) -- TEMPORARY FIX until right_paren given its own kind and then ((Cells.Get_Op_Symbol (Heap, V) /= SP_Symbols.right_paren) -- END OF TEMPORARY FIX and (Cells.Get_Op_Symbol (Heap, V) /= SP_Symbols.comma))) or else (V_Kind = Cell_Storage.FDL_Div_Op) then if Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, V)) then -- V is a monadic operator; Operand := Cells.Get_B_Ptr (Heap, V); if not Is_Leaf (Node => Operand) then Par_Reqd := True; end if; else if Left_Tree then W := Cells.Get_A_Ptr (Heap, V); else W := Cells.Get_B_Ptr (Heap, V); end if; if not Cells.Is_Null_Cell (W) then if (Cells.Get_Kind (Heap, W) = Cell_Storage.Op) or else (Cells.Get_Kind (Heap, W) = Cell_Storage.FDL_Div_Op) then V_Precedence := Precedence_Value (C => V); W_Precedence := Precedence_Value (C => W); -- general rule for constructing unambiguous expressions: Par_Reqd := (V_Precedence > W_Precedence) or ((V_Precedence = W_Precedence) and not Left_Tree); -- supplementary rules, to improve clarity: if (V_Precedence = 1) or -- v is a logical operation; (W_Precedence = 2) then -- subtree is a relation; Par_Reqd := True; end if; end if; end if; end if; end if; end Parenthesise; ----------------------------------------------------------------------- procedure Find_Cell_Contents_Declarations (Heap : in out Cells.Heap_Record; Cell_Name : in Cells.Cell) --# global in AttributeList; --# in BitwiseOpList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in ProcedureExportList; --# in out ReturnSymbol; --# in out RootIntegerUsed; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# Statistics.TableUsage from *, --# AttributeList, --# BitwiseOpList, --# Cell_Name, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# ProcedureExportList, --# UsedSymbols & --# ReturnSymbol, --# RootIntegerUsed from *, --# Cell_Name, --# Heap & --# UsedSymbols from *, --# Cell_Name, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# ProcedureExportList; is Id_Ref : Dictionary.Symbol; ------------------------------------------------------- procedure Find_Manifest_Constant_Cell_Declarations (Cell_Name : in Cells.Cell) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# in out Heap; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# Statistics.TableUsage, --# UsedSymbols from *, --# Cell_Name, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# UsedSymbols; is Ex_String : E_Strings.T; L_Str : LexTokenManager.Lex_String; begin --FindManifestConstantCell L_Str := Cells.Get_Lex_Str (Heap, Cell_Name); Ex_String := LexTokenManager.Lex_String_To_String (Lex_Str => L_Str); if E_Strings.Get_Element (E_Str => Ex_String, Pos => 1) = ''' then --character literal Add (Heap, Dictionary.GetPredefinedCharacterType); elsif E_Strings.Get_Element (E_Str => Ex_String, Pos => 1) = '"' then --string literal Add (Heap, Dictionary.GetPredefinedCharacterType); Add (Heap, Dictionary.GetPredefinedStringType); else -- should be a numeric null; end if; end Find_Manifest_Constant_Cell_Declarations; ------------------------------------------------------- begin -- Find_Cell_Contents_Declarations case Cells.Get_Kind (Heap, Cell_Name) is when Cell_Storage.Manifest_Const => Find_Manifest_Constant_Cell_Declarations (Cell_Name => Cell_Name); when Cell_Storage.Op => if Cells.Get_Op_Symbol (Heap, Cell_Name) = SP_Symbols.apostrophe then AddAttribute (Heap, Cell_Name); end if; when Cell_Storage.Return_Var => AddReturnVar (Heap, Cell_Name); when Cell_Storage.Named_Const => Id_Ref := Cells.Get_Symbol_Value (Heap, Cell_Name); Add (Heap, Id_Ref); when Cell_Storage.Declared_Function | Cell_Storage.Proof_Function | Cell_Storage.Modified_Op | Cell_Storage.Reference | Cell_Storage.Constraining_Index | Cell_Storage.Fixed_Var | Cell_Storage.Mk_Aggregate | Cell_Storage.Unconstrained_Attribute_Prefix => Id_Ref := Cells.Get_Symbol_Value (Heap, Cell_Name); if Cells.Get_Kind (Heap, Cell_Name) = Cell_Storage.Mk_Aggregate and then Dictionary.IsSubtype (Id_Ref) then Id_Ref := Dictionary.GetRootType (Id_Ref); end if; Add (Heap, Id_Ref); when Cell_Storage.Root_Integer => AddUseOfRootInteger; when Cell_Storage.Bitwise_Op => if Dictionary.TypeIsArray (Cells.Get_Symbol_Value (Heap, Cell_Name)) then AddBitwiseOp (Heap, Cell_Name); end if; when Cell_Storage.Procedure_Export => AddProcedureExport (Heap, Cell_Name); when others => null; end case; end Find_Cell_Contents_Declarations; ---------------------------------------------------------------- begin -- Find_DAG_Declarations -- Algorithm of D.E. Knuth, Fundamental Algorithms, p.317; CStacks.CreateStack (S); Cells.Create_Cell (Heap, Parenthesis_Cell); Cells.Set_Kind (Heap, Parenthesis_Cell, Cell_Storage.Op); Cells.Set_Op_Symbol (Heap, Parenthesis_Cell, SP_Symbols.left_paren); Cells.Create_Cell (Heap, Sq_Bracket_Cell); Cells.Set_Op_Symbol (Heap, Sq_Bracket_Cell, SP_Symbols.square_open); P := Root; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (Heap, P, S); if Is_Leaf (Node => P) then P := Cells.Null_Cell; else if (not Cells.Is_Null_Cell (Cells.Get_A_Ptr (Heap, P))) then Parenthesise (V => P, Left_Tree => True, Par_Reqd => Par_Reqd, Parenthesis_Form => Parenthesis_Form); if Par_Reqd then if Parenthesis_Form = SP_Symbols.left_paren then CStacks.Push (Heap, Parenthesis_Cell, S); else CStacks.Push (Heap, Sq_Bracket_Cell, S); end if; end if; end if; P := Cells.Get_A_Ptr (Heap, P); end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (Heap, S); CStacks.Pop (Heap, S); Find_Cell_Contents_Declarations (Heap => Heap, Cell_Name => P); if Is_Leaf (Node => P) then P := Cells.Null_Cell; loop exit when not ((Cells.Are_Identical (CStacks.Top (Heap, S), Parenthesis_Cell)) or (Cells.Are_Identical (CStacks.Top (Heap, S), Sq_Bracket_Cell))); CStacks.Pop (Heap, S); end loop; else Parenthesise (V => P, Left_Tree => False, Par_Reqd => Par_Reqd, Parenthesis_Form => Parenthesis_Form); if Par_Reqd then if Parenthesis_Form = SP_Symbols.left_paren then CStacks.Push (Heap, Parenthesis_Cell, S); else CStacks.Push (Heap, Sq_Bracket_Cell, S); end if; end if; P := Cells.Get_B_Ptr (Heap, P); end if; end loop; end Find_DAG_Declarations; ------------------------------------------------------------------------- procedure FindVCFormulaDeclarations (Heap : in out Cells.Heap_Record; PredicatePair : in Pairs.Pair; IgnoreTriviallyTrueVCs : in Boolean) --# global in AttributeList; --# in BitwiseOpList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in ProcedureExportList; --# in out ReturnSymbol; --# in out RootIntegerUsed; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# ReturnSymbol, --# RootIntegerUsed, --# Statistics.TableUsage, --# UsedSymbols from *, --# AttributeList, --# BitwiseOpList, --# Dictionary.Dict, --# Heap, --# IgnoreTriviallyTrueVCs, --# LexTokenManager.State, --# PredicatePair, --# ProcedureExportList, --# UsedSymbols; is ConclusionRoot, HypothesisRoot : Cells.Cell; -- GAA duplicated code from DAG_IO.ADB function IsTriviallyTrue (DAG : Cells.Cell) return Boolean --# global in Dictionary.Dict; --# in Heap; is CurrentCell : Cells.Cell; Result : Boolean := True; function IsTrueCell (TheCell : Cells.Cell) return Boolean --# global in Dictionary.Dict; --# in Heap; is begin return Cells.Get_Kind (Heap, TheCell) = Cell_Storage.Named_Const and then Dictionary.IsEnumerationLiteral (Cells.Get_Symbol_Value (Heap, TheCell)) and then Dictionary.Enumeration_Literals_Are_Equal (Left_Symbol => Cells.Get_Symbol_Value (Heap, TheCell), Right_Symbol => Dictionary.GetTrue); end IsTrueCell; function AppropriateBinaryOperator (OpSym : SP_Symbols.SP_Symbol) return Boolean is begin return OpSym = SP_Symbols.RWand or else OpSym = SP_Symbols.RWandthen or else OpSym = SP_Symbols.RWor or else OpSym = SP_Symbols.RWorelse or else OpSym = SP_Symbols.equals or else OpSym = SP_Symbols.implies or else OpSym = SP_Symbols.is_equivalent_to; end AppropriateBinaryOperator; begin --IsTriviallyTrue CurrentCell := DAG; loop exit when IsTrueCell (CurrentCell); --success condition --some expression other than an operator - fail if Cells.Get_Kind (Heap, CurrentCell) /= Cell_Storage.Op then Result := False; exit; end if; --inappropriate operator - fail if not AppropriateBinaryOperator (Cells.Get_Op_Symbol (Heap, CurrentCell)) then Result := False; exit; end if; --thing on left of operator is not true - fail if not IsTrueCell (Cells.Get_A_Ptr (Heap, CurrentCell)) then Result := False; exit; end if; --move down right hand chain of tree to get next sub-expression CurrentCell := Cells.Get_B_Ptr (Heap, CurrentCell); --fallen off the end - fail - (I think this check is redundant but safe) if Cells.Is_Null_Cell (CurrentCell) then Result := False; exit; end if; end loop; return Result; end IsTriviallyTrue; ------------------------------------------------------------------------- procedure FindLogicalExpnDeclarations (Heap : in out Cells.Heap_Record; Root : in Cells.Cell) --# global in AttributeList; --# in BitwiseOpList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in ProcedureExportList; --# in out ReturnSymbol; --# in out RootIntegerUsed; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# ReturnSymbol, --# RootIntegerUsed, --# Statistics.TableUsage, --# UsedSymbols from *, --# AttributeList, --# BitwiseOpList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# ProcedureExportList, --# Root, --# UsedSymbols; is SubExpnList : Cells.Cell; procedure Partition --# global in Root; --# in SubExpnList; --# in out Heap; --# in out Statistics.TableUsage; --# derives Heap, --# Statistics.TableUsage from *, --# Heap, --# Root, --# SubExpnList; is P, SubExpn : Cells.Cell; S : CStacks.Stack; begin CStacks.CreateStack (S); P := Root; loop loop exit when Cells.Is_Null_Cell (P); CStacks.Push (Heap, P, S); if (Cells.Get_Kind (Heap, P) = Cell_Storage.Op) and then ((Cells.Get_Op_Symbol (Heap, P) = SP_Symbols.RWand) or (Cells.Get_Op_Symbol (Heap, P) = SP_Symbols.RWandthen)) then P := Cells.Get_A_Ptr (Heap, P); else Cells.Create_Cell (Heap, SubExpn); Cells.Set_B_Ptr (Heap, SubExpn, P); Clists.AppendCell (Heap, SubExpn, SubExpnList); P := Cells.Null_Cell; end if; end loop; exit when CStacks.IsEmpty (S); P := CStacks.Top (Heap, S); CStacks.Pop (Heap, S); if (Cells.Get_Kind (Heap, P) = Cell_Storage.Op) and then ((Cells.Get_Op_Symbol (Heap, P) = SP_Symbols.RWand) or (Cells.Get_Op_Symbol (Heap, P) = SP_Symbols.RWandthen)) then P := Cells.Get_B_Ptr (Heap, P); else P := Cells.Null_Cell; end if; end loop; end Partition; procedure FindListOfExpnsDeclarations --# global in AttributeList; --# in BitwiseOpList; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in ProcedureExportList; --# in SubExpnList; --# in out Heap; --# in out ReturnSymbol; --# in out RootIntegerUsed; --# in out Statistics.TableUsage; --# in out UsedSymbols; --# derives Heap, --# ReturnSymbol, --# RootIntegerUsed, --# Statistics.TableUsage, --# UsedSymbols from *, --# AttributeList, --# BitwiseOpList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# ProcedureExportList, --# SubExpnList, --# UsedSymbols; is ListMember : Cells.Cell; begin -- This looks like the place to suppress multiple Trues in hypotheses -- and do something with trues in conclusions ListMember := Clists.FirstCell (Heap, SubExpnList); loop Find_DAG_Declarations (Heap => Heap, Root => Cells.Get_B_Ptr (Heap, ListMember)); ListMember := Clists.NextCell (Heap, ListMember); exit when Cells.Is_Null_Cell (ListMember); end loop; end FindListOfExpnsDeclarations; begin -- FindLogicalExpnDeclarations Clists.CreateList (Heap, SubExpnList); Partition; FindListOfExpnsDeclarations; Clists.DisposeOfList (Heap, SubExpnList); end FindLogicalExpnDeclarations; begin --FindVCFormulaDeclarations HypothesisRoot := Cells.Get_B_Ptr (Heap, Pairs.PairHead (PredicatePair)); ConclusionRoot := Cells.Get_C_Ptr (Heap, Pairs.PairHead (PredicatePair)); if IgnoreTriviallyTrueVCs and then IsTriviallyTrue (ConclusionRoot) then null; else FindLogicalExpnDeclarations (Heap, HypothesisRoot); FindLogicalExpnDeclarations (Heap, ConclusionRoot); end if; end FindVCFormulaDeclarations; procedure Initialize (It : out UsedSymbolIterator) --# global in UsedSymbols; --# derives It from UsedSymbols; is begin It := UsedSymbolIterator'(It => UsedSymbols); end Initialize; function CurrentNode (It : in UsedSymbolIterator) return Cells.Cell is begin return It.It; end CurrentNode; function NextNode (Heap : in Cells.Heap_Record; It : in UsedSymbolIterator) return UsedSymbolIterator is begin return UsedSymbolIterator'(It => Cells.Get_A_Ptr (Heap, It.It)); end NextNode; function IsNullIterator (It : in UsedSymbolIterator) return Boolean is begin return It = NullIterator; end IsNullIterator; ---------------------------------------------------------------- procedure PrintDeclarationTail (File : in SPARK_IO.File_Type) is begin SPARK_IO.New_Line (File, 1); SPARK_IO.Put_Line (File, "end;", 0); end PrintDeclarationTail; ---------------------------------------------------------------------- procedure OutputDeclarations (Heap : in out Cells.Heap_Record; File : in SPARK_IO.File_Type; Rule_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Write_Rules : in Boolean; EndPosition : in LexTokenManager.Token_Position) --# global in AttributeList; --# in BitwiseOpList; --# in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in ProcedureExportList; --# in ReturnSymbol; --# in RootIntegerUsed; --# in UsedSymbols; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context from *, --# AttributeList, --# CommandLineData.Content, --# Dictionary.Dict, --# EndPosition, --# File, --# Heap, --# LexTokenManager.State, --# Rule_File, --# Scope, --# SPARK_IO.File_Sys, --# UsedSymbols, --# Write_Rules & --# Heap, --# Statistics.TableUsage from *, --# AttributeList, --# Dictionary.Dict, --# Heap, --# LexTokenManager.State, --# Scope, --# UsedSymbols, --# Write_Rules & --# SPARK_IO.File_Sys from *, --# AttributeList, --# BitwiseOpList, --# CommandLineData.Content, --# Dictionary.Dict, --# EndPosition, --# ErrorHandler.Error_Context, --# File, --# Heap, --# LexTokenManager.State, --# ProcedureExportList, --# ReturnSymbol, --# RootIntegerUsed, --# Rule_File, --# Scope, --# UsedSymbols, --# Write_Rules; is separate; end Declarations; spark-2012.0.deb/examiner/sem-walk_expression_p-get_string_literal_length.adb0000644000175000017500000000240411753202336026457 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Get_String_Literal_Length (Str : LexTokenManager.Lex_String) return Maths.Value is begin return Maths.IntegerToValue (E_Strings.Get_Length (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => Str)) - 2); -- -2 is to remove quotes end Get_String_Literal_Length; spark-2012.0.deb/examiner/sem-walk_expression_p-simple_expression_type_from_context.adb0000644000175000017500000002404611753202336030643 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) function Simple_Expression_Type_From_Context (Exp_Node : STree.SyntaxNode; T_Stack : Type_Context_Stack.T_Stack_Type) return Dictionary.Symbol is New_Context_Type : Dictionary.Symbol; Parent, Child : STree.SyntaxNode; Grand_Parent : STree.SyntaxNode; begin -- The determination of the type context for a simple_expression depends -- on the parent (and possibly the grandparent) node in the syntax tree. -- The possible parent nodes (as determined from the grammar) are as follows, -- and fall into 3 groups: -- -- Group 1 - parent nodes where a change of context might be needed -- arange -- annotation_arange -- aggregate_choice -- annotation_aggregate_choice -- case_choice -- relation -- annotation_relation -- -- Group 2 - parent nodes where the simple_expression appears in a universal -- context, and the context is supplied by whoever called WalkExpression, -- so no change in context needed. -- Modular_Type_Definition -- Floating_Accuracy_Definition -- Fixed_Accuracy_Definition -- -- Group 3 - Simple_Expressions appearing in rep. clauses, which are not analysed -- at present, so no change in context needed. -- Attribute_Definition_Clause -- Mod_Clause -- Component_Clause -- At_Clause Parent := STree.Parent_Node (Current_Node => Exp_Node); -- ASSUME Parent = modular_type_definition OR floating_accuracy_definition OR fixed_accuracy_definition OR -- case_choice OR attribute_definition_clause OR mod_clause OR component_clause OR at_clause OR -- arange OR aggregate_choice OR relation OR simple_expression OR -- annotation_arange OR annotation_aggregate_choice OR annotation_relation OR annotation_simple_expression case STree.Syntax_Node_Type (Node => Parent) is -------------------------- -- Group 1 Parent nodes -- -------------------------- when SP_Symbols.arange | SP_Symbols.annotation_arange => -- ASSUME Parent = arange OR annotation_arange -- For [annotation_]arange, the new context depends on the -- grandparent node as well. Possible grandparent nodes are: -- relation -- annotation_relation -- loop_parameter_specification -- range_constraint -- annotation_range_constraint -- quantified_expression -- component_clause Grand_Parent := STree.Parent_Node (Current_Node => Parent); -- ASSUME Grand_Parent = loop_parameter_specification OR component_clause OR quantified_expression OR -- range_constraint OR relation OR -- annotation_range_constraint OR annotation_relation case STree.Syntax_Node_Type (Node => Grand_Parent) is when SP_Symbols.relation | SP_Symbols.annotation_relation => -- ASSUME Grand_Parent = relation OR annotation_relation -- Must be a membership test like "A in B .. C" or -- Context is lost here, since "in" is defined for all types. New_Context_Type := Dictionary.GetUnknownTypeMark; when SP_Symbols.loop_parameter_specification => -- ASSUME Grand_Parent = loop_parameter_specification -- Context for the loop range is passed in from wf_loop_param, -- so no change is needed here. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.range_constraint | SP_Symbols.annotation_range_constraint => -- ASSUME Grand_Parent = range_constraint OR annotation_range_constraint -- These nodes have their own special function for determining context, -- so no change here. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.quantified_expression => -- ASSUME Grand_Parent = quantified_expression -- Down_wf_quantifier plants the quantified variable's symbol -- in the Identifier node below the quantified_expression node, so we -- can grab that and look up its type. New_Context_Type := Dictionary.GetType (STree.NodeSymbol (STree.Next_Sibling (STree.Child_Node (Grand_Parent)))); when SP_Symbols.component_clause => -- ASSUME Grand_Parent = component_clause -- Part of a rep. clause, so no change New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when others => -- Must be an error resulting from an invalid syntax tree, -- but we need to push something so... New_Context_Type := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Grand_Parent = loop_parameter_specification OR component_clause OR quantified_expression OR " & "range_constraint OR relation OR " & "annotation_range_constraint OR annotation_relation in Simple_Expression_Type_From_Context"); end case; when SP_Symbols.aggregate_choice | SP_Symbols.annotation_aggregate_choice => -- ASSUME Parent = aggregate_choice OR annotation_aggregate_choice -- For a named aggregate choice, the required index type is -- always on top of the aggregate stack, so... New_Context_Type := Aggregate_Stack.Top_Type_Sym; when SP_Symbols.case_choice => -- ASSUME Parent = case_choice -- The correct type for the context is passed into WalkExpression -- from wf_case_choice, so no change required here. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.relation | SP_Symbols.annotation_relation => -- ASSUME Parent = relation OR annotation_relation Child := STree.Next_Sibling (Current_Node => STree.Child_Node (Current_Node => Parent)); -- ASSUME Child = relational_operator OR inside OR outside OR NULL if Child = STree.NullNode then -- ASSUME Child = NULL -- This relation has no operator, so preserve context New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); elsif STree.Syntax_Node_Type (Node => Child) = SP_Symbols.relational_operator or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.inside or else STree.Syntax_Node_Type (Node => Child) = SP_Symbols.outside then -- This relation has an operator, so context is lost New_Context_Type := Dictionary.GetUnknownTypeMark; else New_Context_Type := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child = relational_operator OR inside OR outside OR NULL in Simple_Expression_Type_From_Context"); end if; -------------------------- -- Group 2 Parent nodes -- -------------------------- when SP_Symbols.modular_type_definition | SP_Symbols.floating_accuracy_definition | SP_Symbols.fixed_accuracy_definition => -- ASSUME Parent = modular_type_definition OR floating_accuracy_definition OR fixed_accuracy_definition -- No change in context here. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); -------------------------- -- Group 3 Parent nodes -- -------------------------- when SP_Symbols.attribute_definition_clause | SP_Symbols.mod_clause | SP_Symbols.component_clause | SP_Symbols.at_clause => -- ASSUME Parent = attribute_definition_clause OR mod_clause OR component_clause OR at_clause -- No change in context here. New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when SP_Symbols.simple_expression | SP_Symbols.annotation_simple_expression => -- ASSUME Parent = simple_expression OR annotation_simple_expression -- Must be an error, which will be caught elsewhere, -- but we need to push something so... New_Context_Type := Type_Context_Stack.Top (Stack => T_Stack); when others => New_Context_Type := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent = modular_type_definition OR floating_accuracy_definition OR fixed_accuracy_definition OR " & "case_choice OR attribute_definition_clause OR mod_clause OR component_clause OR at_clause OR " & "arange OR aggregate_choice OR relation OR simple_expression OR " & "annotation_arange OR annotation_aggregate_choice OR annotation_relation OR annotation_simple_expression " & "in Simple_Expression_Type_From_Context"); end case; return New_Context_Type; end Simple_Expression_Type_From_Context; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_attribute_designator.adb0000644000175000017500000025556411753202336026174 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Attribute_Designator (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; The_Heap : in out Heap.HeapRecord; Ref_Var : in SeqAlgebra.Seq) is Ident_Node, Arg_Exp_Node, Second_Arg_Exp_Node : STree.SyntaxNode; Ident_Node_Pos, Arg_Exp_Node_Pos, Second_Arg_Exp_Node_Pos : LexTokenManager.Token_Position; Type_So_Far, Argument_Expression : Sem.Exp_Record; Second_Argument_Expression : Sem.Exp_Record; Argument_Found, Second_Argument_Found : Boolean := False; Ident_Str : LexTokenManager.Lex_String; Val, Unused_Val, RHS_Val : Maths.Value; Base_Found : Boolean; Ok_So_Far : Boolean; Prefix_Kind : Dictionary.PrefixSort; Prefix_Type : Dictionary.Symbol; VCG_Type : Dictionary.Symbol; ---------------------------------------------------------------------- procedure Get_Prefix (Ident_Node_Pos : in LexTokenManager.Token_Position; Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; Base_Found : in Boolean; Prefix : out Sem.Exp_Record; Kind : out Dictionary.PrefixSort; E_Stack : in out Exp_Stack.Exp_Stack_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Base_Found, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# E_Stack, --# Ident_Node_Pos, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys & --# E_Stack from * & --# Kind from Base_Found, --# Dictionary.Dict, --# E_Stack & --# Prefix from Base_Found, --# CommandLineData.Content, --# Dictionary.Dict, --# E_Stack, --# Scope; --# post Dictionary.Is_Null_Symbol (Prefix.Type_Symbol) or Dictionary.IsTypeMark (Prefix.Type_Symbol, Dictionary.Dict); is Result : Sem.Exp_Record; begin Kind := Dictionary.AType; Exp_Stack.Pop (Item => Result, Stack => E_Stack); -- this is type of prefix expression if Result.Sort = Sem.Is_Unknown then Result := Sem.Unknown_Type_Record; elsif Result.Sort = Sem.Is_Type_Mark then Result.Is_Static := Dictionary.IsStatic (Result.Type_Symbol, Scope); Result.Is_Constant := True; Result.Is_ARange := False; if Base_Found then if CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then not Dictionary.TypeIsScalar (Result.Type_Symbol) then Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 96, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Base_Token); end if; Kind := Dictionary.ABaseType; end if; elsif Dictionary.IsObject (Result.Other_Symbol) or else Dictionary.IsSubcomponent (Result.Other_Symbol) then if Dictionary.IsUniversalIntegerType (Result.Type_Symbol) or else Dictionary.IsUniversalRealType (Result.Type_Symbol) then -- its a named number and not a proper object Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 31, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => LexTokenManager.Null_String); else Result.Is_Static := Dictionary.IsStatic (Result.Type_Symbol, Scope); Result.Is_Constant := Dictionary.Is_Constant (Result.Type_Symbol); Result.Is_ARange := False; Result.Variable_Symbol := Dictionary.NullSymbol; Result.Is_AVariable := False; Result.Is_An_Entire_Variable := False; Kind := Dictionary.AnObject; end if; elsif Result = Sem.Unknown_Type_Record then null; else Result := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 31, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; Prefix := Result; end Get_Prefix; ---------------------------------------------------------------------- function Proof_Attribute_Is_Visible (Ident_Str : LexTokenManager.Lex_String; Prefix_Kind : Dictionary.PrefixSort; Prefix_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is begin return Prefix_Kind = Dictionary.AnObject and then ((LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Tail_Token) = LexTokenManager.Str_Eq and then Dictionary.GetOwnVariableOrConstituentMode (Prefix_Sym) = Dictionary.InMode) or else (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Append_Token) = LexTokenManager.Str_Eq and then Dictionary.GetOwnVariableOrConstituentMode (Prefix_Sym) = Dictionary.OutMode)); end Proof_Attribute_Is_Visible; ---------------------------------------------------------------------- function Always_Takes_One_Argument (Str : LexTokenManager.Lex_String) return Boolean --# global in LexTokenManager.State; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Tail_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq; end Always_Takes_One_Argument; ---------------------------------------------------------------------- function Always_Takes_Two_Arguments (Str : LexTokenManager.Lex_String) return Boolean --# global in LexTokenManager.State; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Append_Token) = LexTokenManager.Str_Eq; end Always_Takes_Two_Arguments; ---------------------------------------------------------------------- function Argument_Type_Correct (Str : LexTokenManager.Lex_String; Prefix_Type, Arg_Type : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq then Result := Dictionary.IsIntegerTypeMark (Arg_Type, Scope) or else Dictionary.IsModularTypeMark (Arg_Type, Scope) or else Dictionary.IsUnknownTypeMark (Arg_Type); else Result := Dictionary.CompatibleTypes (Scope, Prefix_Type, Arg_Type); end if; return Result; end Argument_Type_Correct; ---------------------------------------------------------------------- function Check_Static (Ident_Str : LexTokenManager.Lex_String; Prefix_Type : Dictionary.Symbol; Type_So_Far : Sem.Exp_Record; Argument_Expression : Sem.Exp_Record; Argument_Found : Boolean; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean; function Is_Static_Array_Attribute (Ident_Str : LexTokenManager.Lex_String; Prefix_Type : Dictionary.Symbol; Scope : Dictionary.Scopes) return Boolean --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; is begin return CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83 and then (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq) and then Dictionary.Is_Constrained_Array_Type_Mark (Prefix_Type, Scope); end Is_Static_Array_Attribute; ---------------------------------------------------------------------- function Attribute_Considered_Static (Str : LexTokenManager.Lex_String; Scope : Dictionary.Scopes; Type_So_Far : Sem.Exp_Record) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Aft_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Base_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Delta_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Digits_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Epsilon_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Fore_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Large_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Machine_Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Machine_Emin_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Machine_Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Machine_Overflows_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Machine_Radix_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Machine_Rounds_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Safe_Emax_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Safe_Large_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Safe_Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Component_Size_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Denorm_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Model_Emin_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Model_Epsilon_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Model_Mantissa_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Model_Small_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Safe_First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Safe_Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Signed_Zeros_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Modulus_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq then Result := True; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq then -- 'Size is static only for a prefix that denotes a static -- scalar subtype Result := Dictionary.IsScalarType (Type_So_Far.Type_Symbol, Scope); else Result := False; end if; return Result; end Attribute_Considered_Static; begin -- Check_Static Result := (Type_So_Far.Is_Static and then Attribute_Considered_Static (Str => Ident_Str, Scope => Scope, Type_So_Far => Type_So_Far)) or else Is_Static_Array_Attribute (Ident_Str => Ident_Str, Prefix_Type => Prefix_Type, Scope => Scope); if Result and then Argument_Found then Result := Argument_Expression.Is_Static; end if; return Result; end Check_Static; ---------------------------------------------------------------------- function Check_Constant (Ident_Str : LexTokenManager.Lex_String; Type_So_Far : Sem.Exp_Record; Argument_Expression : Sem.Exp_Record; Argument_Found : Boolean; Scope : Dictionary.Scopes) return Boolean --# global in Dictionary.Dict; --# in LexTokenManager.State; is Result : Boolean; begin Result := Type_So_Far.Is_Constant or else Dictionary.Is_Constrained_Array_Type_Mark (Type_So_Far.Type_Symbol, Scope); if Result then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Base_Token) = LexTokenManager.Str_Eq then Result := not Dictionary.IsUnconstrainedArrayType (Dictionary.GetRootType (Type_So_Far.Type_Symbol)); elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Size_Token) = LexTokenManager.Str_Eq then -- S'Size is only considered to be constant/static for -- scalar types Result := Dictionary.IsScalarType (Type_So_Far.Type_Symbol, Scope); end if; end if; if Result and then Argument_Found then Result := Argument_Expression.Is_Constant; end if; return Result; end Check_Constant; ---------------------------------------------------------------------- function Check_Range (Ident_Str : LexTokenManager.Lex_String) return Boolean --# global in LexTokenManager.State; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq; end Check_Range; ---------------------------------------------------------------------- procedure Basic_Checks (Node : in STree.SyntaxNode; Ident_Node_Pos : in LexTokenManager.Token_Position; Ident_Str : in LexTokenManager.Lex_String; Arg_Exp_Node_Pos : in LexTokenManager.Token_Position; Second_Arg_Exp_Node_Pos : in LexTokenManager.Token_Position; Argument_Found : in Boolean; Second_Argument_Found : in Boolean; Type_So_Far : in out Sem.Exp_Record; Ok : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Argument_Found, --# Arg_Exp_Node_Pos, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node_Pos, --# Ident_Str, --# LexTokenManager.State, --# Node, --# Second_Argument_Found, --# Second_Arg_Exp_Node_Pos, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_So_Far & --# Ok from Argument_Found, --# CommandLineData.Content, --# Ident_Str, --# LexTokenManager.State, --# Node, --# Second_Argument_Found, --# STree.Table, --# Type_So_Far & --# Type_So_Far from *, --# Argument_Found, --# CommandLineData.Content, --# Dictionary.Dict, --# Ident_Str, --# LexTokenManager.State, --# Node, --# Second_Argument_Found, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_attribute_designator; is -- this function should gradually wither away as attributes are implemented function Not_Yet_Implemented (Str : LexTokenManager.Lex_String) return Boolean --# global in LexTokenManager.State; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Adjacent_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Compose_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Copy_Sign_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Leading_Part_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Remainder_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Scaling_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Exponent_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Fraction_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Machine_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Model_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Rounding_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Truncation_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Machine_Rounding_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Unbiased_Rounding_Token) = LexTokenManager.Str_Eq; end Not_Yet_Implemented; ---------------------------------------------------------------------- -- identifies special attributes that are part of the pre/post annotation -- language only function Is_Proof_Attribute (Str : LexTokenManager.Lex_String) return Boolean --# global in LexTokenManager.State; is begin return LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Tail_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Append_Token) = LexTokenManager.Str_Eq; end Is_Proof_Attribute; ---------------------------------------------------------------------- function Never_Takes_Arguments (Str : LexTokenManager.Lex_String) return Boolean --# global in LexTokenManager.State; is begin return not (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.First_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Last_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Length_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Pos_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Pred_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Range_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Succ_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Val_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Min_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Max_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Tail_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Ceiling_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Floor_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Append_Token) = LexTokenManager.Str_Eq or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Str, Lex_Str2 => LexTokenManager.Mod_Token) = LexTokenManager.Str_Eq); end Never_Takes_Arguments; -- Returns True iff Node appears in an expression tree below -- the left or right hand side of an "&" operator. "&" is -- a binary_adding_operator so appears as part of a simple_expression -- or an annotation_simple_expression. function Attribute_Is_Below_Catenation_Operator (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_attribute_designator; is Result : Boolean; Current_Node : STree.SyntaxNode; begin -- Depends on the structure of "simple_expression" and -- "annotation_simple_expression" in SPARK.LLA Current_Node := Node; -- Find parent [annotation_]primary or [annotation_]arange while STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.annotation_primary and then STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.primary and then STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.annotation_arange and then STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.arange loop Current_Node := STree.Parent_Node (Current_Node => Current_Node); end loop; -- ASSUME Current_Node = annotation_primary OR primary OR annotation_arange OR arange if STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_arange or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.arange then -- ASSUME Current_Node = annotation_arange OR arange Result := False; elsif STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_primary or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.primary then -- ASSUME Current_Node = annotation_primary OR primary -- Find parent [annotation_]simple_expression[_opt] while STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.simple_expression and then STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.annotation_simple_expression and then STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.simple_expression_opt and then STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.annotation_simple_expression_opt loop Current_Node := STree.Parent_Node (Current_Node => Current_Node); end loop; -- ASSUME Current_Node = simple_expression OR annotation_simple_expression OR -- simple_expression_opt OR annotation_simple_expression_opt if STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.simple_expression_opt or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_simple_expression_opt then -- ASSUME Current_Node = simple_expression_opt OR annotation_simple_expression_opt Current_Node := STree.Parent_Node (Current_Node => Current_Node); -- ASSUME Current_Node = simple_expression OR annotation_simple_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = simple_expression OR annotation_simple_expression " & "in Attribute_Is_Below_Catenation_Operator"); Current_Node := STree.Next_Sibling (Current_Node => Current_Node); -- ASSUME Current_Node = simple_expression OR range_constraint OR inside OR -- annotation_simple_expression OR annotation_range_constraint OR outside OR -- relational_operator OR binary_adding_operator OR arange OR NULL -- From SPARK.LLA: -- -- simple_expression : -- simple_expression binary_adding_operator term -- | simple_expression_opt ; -- (and similarly for annotation_simple_expression) -- So...we might be looking at an attribute which is on the Left or -- the Right hand side of the binary_adding_operator. -- If we're on the Left side, then the simple_expression found -- might have a binary_adding_operator as its sibling, so if Current_Node = STree.NullNode or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_simple_expression or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.range_constraint or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_range_constraint or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.inside or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.outside or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.relational_operator or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.arange then -- ASSUME Current_Node = simple_expression OR range_constraint OR inside OR -- annotation_simple_expression OR annotation_range_constraint OR outside OR -- relational_operator OR arange OR NULL Current_Node := STree.NullNode; elsif STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.binary_adding_operator then Current_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = relational_operator OR binary_adding_operator OR arange OR " & " simple_expression OR range_constraint OR inside OR " & "annotation_simple_expression OR annotation_range_constraint OR outside OR " & "NULL in Attribute_Is_Below_Catenation_Operator"); end if; elsif STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_simple_expression then -- ASSUME Current_Node = simple_expression OR annotation_simple_expression -- If the attribute was on the Right of the "&", then we'll have stepped -- over the "term" and arrived at the simple_expression _above_ the operator -- node, so Current_Node := STree.Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = annotation_simple_expression OR simple_expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.simple_expression or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.annotation_simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = simple_expression OR annotation_simple_expression " & "in Attribute_Is_Below_Catenation_Operator"); Current_Node := STree.Next_Sibling (Current_Node => Current_Node); -- ASSUME Current_Node = binary_adding_operator SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.binary_adding_operator, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = binary_adding_operator in Attribute_Is_Below_Catenation_Operator"); else Current_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = simple_expression OR annotation_simple_expression OR " & "simple_expression_opt OR annotation_simple_expression_opt " & "in Attribute_Is_Below_Catenation_Operator"); end if; -- ASSUME Current_Node = binary_adding_operator OR NULL if Current_Node = STree.NullNode then Result := False; elsif STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.binary_adding_operator then -- ASSUME Current_Node = binary_adding_operator Current_Node := STree.Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = plus OR minus OR ampersand SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.plus or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.minus or else STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.ampersand, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = plus OR minus OR ampersand in Attribute_Is_Below_Catenation_Operator"); Result := STree.Syntax_Node_Type (Node => Current_Node) = SP_Symbols.ampersand; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = binary_adding_operator OR NULL in Attribute_Is_Below_Catenation_Operator"); end if; else Result := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = annotation_primary OR primary OR annotation_arange OR arange " & "in Attribute_Is_Below_Catenation_Operator"); end if; return Result; end Attribute_Is_Below_Catenation_Operator; begin -- Basic_Checks Ok := False; if not LexTokenManager.Is_Attribute_Token (Tok => Ident_Str, Language => CommandLineData.Content.Language_Profile) then Type_So_Far := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 54, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); elsif Not_Yet_Implemented (Str => Ident_Str) then Type_So_Far := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 30, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); elsif Is_Proof_Attribute (Str => Ident_Str) and then STree.Syntax_Node_Type (Node => Node) = SP_Symbols.attribute_designator then Type_So_Far := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 54, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); elsif Argument_Found and then Never_Takes_Arguments (Str => Ident_Str) then Type_So_Far := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 55, Reference => ErrorHandler.No_Reference, Position => Arg_Exp_Node_Pos, Id_Str => Ident_Str); elsif not Argument_Found and then (Always_Takes_One_Argument (Str => Ident_Str) or else (Always_Takes_Two_Arguments (Str => Ident_Str))) then Type_So_Far := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 56, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => LexTokenManager.Null_String); elsif not Second_Argument_Found and then (Always_Takes_Two_Arguments (Str => Ident_Str)) then Type_So_Far := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 56, Reference => ErrorHandler.No_Reference, Position => Arg_Exp_Node_Pos, Id_Str => LexTokenManager.Null_String); elsif Second_Argument_Found and then Always_Takes_One_Argument (Str => Ident_Str) then Type_So_Far := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 49, Reference => ErrorHandler.No_Reference, Position => Second_Arg_Exp_Node_Pos, Id_Str => Ident_Str); -- check that prefix of Pred, Succ, Pos, Val is typemark elsif Always_Takes_One_Argument (Str => Ident_Str) and then not Is_Proof_Attribute (Str => Ident_Str) and then -- don't want 'Tail to trip this test Type_So_Far.Sort = Sem.Is_Object then ErrorHandler.Semantic_Error (Err_Num => 63, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => STree.Parent_Node (Current_Node => Node)), Id_Str => Dictionary.GetSimpleName (Type_So_Far.Other_Symbol)); Type_So_Far := Sem.Unknown_Type_Record; elsif Attribute_Is_Below_Catenation_Operator (Node => Node) then -- FDL has no "&" operator, so the VCG has to be able to -- statically evaluate all "&" operators (for Strings and Characters). -- Therefore, for simplicity, the SPARK LRM (4.5.3) forbids attributes -- below an "&" operator. Chacacter'Val ( ... ) is particularly -- troublesome, since this would require the VCG to be able to -- statically evaluate some arbitrary integer-valued expression. Type_So_Far := Sem.Unknown_Type_Record; -- Assume Character type result to avoid additional type-checking errors Type_So_Far.Type_Symbol := Dictionary.GetPredefinedCharacterType; ErrorHandler.Semantic_Error (Err_Num => 424, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); else Ok := True; end if; end Basic_Checks; ---------------------------------------------------------------------- procedure Base_Checks (Node : in STree.SyntaxNode; Ident_Node_Pos : in LexTokenManager.Token_Position; Ident_Str : in LexTokenManager.Lex_String; Type_So_Far : in out Sem.Exp_Record; Continue : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Continue from Ident_Str, --# LexTokenManager.State & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node_Pos, --# Ident_Str, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Type_So_Far & --# Type_So_Far from *, --# Dictionary.Dict, --# Ident_Str, --# LexTokenManager.State, --# Node, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_attribute_designator; is The_Parent_Node, The_Child_Node : STree.SyntaxNode; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Base_Token) = LexTokenManager.Str_Eq then Continue := False; -- whatever happens we don't want to do any more checks The_Parent_Node := STree.Parent_Node (Current_Node => Node); -- ASSUME The_Parent_Node = attribute OR attribute_designator OR -- annotation_attribute OR annotation_attribute_designator SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => The_Parent_Node) = SP_Symbols.attribute or else STree.Syntax_Node_Type (Node => The_Parent_Node) = SP_Symbols.attribute_designator or else STree.Syntax_Node_Type (Node => The_Parent_Node) = SP_Symbols.annotation_attribute or else STree.Syntax_Node_Type (Node => The_Parent_Node) = SP_Symbols.annotation_attribute_designator, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Parent_Node = attribute OR attribute_designator OR " & "annotation_attribute OR annotation_attribute_designator in Base_Checks"); The_Child_Node := STree.Child_Node (Current_Node => Node); -- ASSUME The_Child_Node = attribute_designator OR annotation_attribute_designator OR attribute_ident SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.attribute_designator or else STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.annotation_attribute_designator or else STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.attribute_ident, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect The_Child_Node = attribute_designator OR annotation_attribute_designator OR " & "attribute_ident in Base_Checks"); if STree.Syntax_Node_Type (Node => The_Parent_Node) = SP_Symbols.attribute or else STree.Syntax_Node_Type (Node => The_Parent_Node) = SP_Symbols.annotation_attribute or else STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.attribute_designator or else STree.Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.annotation_attribute_designator then Type_So_Far := Sem.Unknown_Type_Record; if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_attribute_designator then ErrorHandler.Semantic_Error (Err_Num => 97, Reference => 2, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else ErrorHandler.Semantic_Error (Err_Num => 97, Reference => 1, Position => STree.Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; elsif Type_So_Far.Sort /= Sem.Is_Type_Mark then Type_So_Far := Sem.Unknown_Type_Record; ErrorHandler.Semantic_Error (Err_Num => 96, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); end if; else Continue := True; end if; end Base_Checks; ----------------------------------------------------------------------------- procedure Process_Array_Attribute (Ident_Str : in LexTokenManager.Lex_String; Prefix_Type : in Dictionary.Symbol; Arg_Exp_Node_Pos : in LexTokenManager.Token_Position; Argument_Expression : in Sem.Exp_Record; Argument_Found : in Boolean; Scope : in Dictionary.Scopes; Type_So_Far : in out Sem.Exp_Record; Ok_So_Far : in out Boolean; VCG_Type : in out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Argument_Expression, --# Argument_Found, --# Arg_Exp_Node_Pos, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Type_So_Far & --# Ok_So_Far from *, --# Argument_Expression, --# Argument_Found, --# CommandLineData.Content, --# Dictionary.Dict, --# Type_So_Far & --# Type_So_Far from *, --# Argument_Expression, --# Argument_Found, --# CommandLineData.Content, --# Dictionary.Dict, --# Ident_Str, --# LexTokenManager.State, --# Prefix_Type, --# Scope & --# VCG_Type from *, --# Argument_Expression, --# Argument_Found, --# CommandLineData.Content, --# Dictionary.Dict, --# Ident_Str, --# LexTokenManager.State, --# Scope, --# Type_So_Far; --# --# pre Dictionary.Is_Null_Symbol (VCG_Type) or Dictionary.IsTypeMark (VCG_Type, Dictionary.Dict); --# post Dictionary.Is_Null_Symbol (VCG_Type) or --# Dictionary.IsTypeMark (VCG_Type, Dictionary.Dict) or --# Dictionary.IsParameterConstraint (VCG_Type, Dictionary.Dict); is Index_Number : Positive; Continue : Boolean; procedure Process_Argument (Ident_Str : in LexTokenManager.Lex_String; Arg_Exp_Node_Pos : in LexTokenManager.Token_Position; Argument_Expression : in Sem.Exp_Record; Scope : in Dictionary.Scopes; Index_Number : in out Positive; Type_So_Far : in out Sem.Exp_Record; Ok_So_Far : in out Boolean; Ok : out Boolean) -- 430 annotation completed --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Argument_Expression, --# Arg_Exp_Node_Pos, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Type_So_Far & --# Index_Number, --# Ok_So_Far from *, --# Argument_Expression, --# CommandLineData.Content, --# Dictionary.Dict, --# Type_So_Far & --# Ok from Argument_Expression, --# CommandLineData.Content, --# Dictionary.Dict, --# Type_So_Far & --# Type_So_Far from *, --# Argument_Expression, --# CommandLineData.Content, --# Dictionary.Dict, --# Ident_Str, --# LexTokenManager.State; is Err : Maths.ErrorCode; Local_Index_Number : Integer; --------------------------------------------------------- procedure Set_Illegal_Result (Ident_Str : in LexTokenManager.Lex_String; Type_So_Far : out Sem.Exp_Record; Ok_So_Far : out Boolean) --# global in Dictionary.Dict; --# in LexTokenManager.State; --# derives Ok_So_Far from & --# Type_So_Far from Dictionary.Dict, --# Ident_Str, --# LexTokenManager.State; is begin Ok_So_Far := False; Type_So_Far := Sem.Unknown_Type_Record; Type_So_Far.Is_ARange := Check_Range (Ident_Str => Ident_Str); end Set_Illegal_Result; begin -- Process_Argument Ok := False; -- Ada83 LRM says arg N must be Universal Integer. -- Ada95 LRM says arg N must be Universal Integer or any integer -- (signed or modular) type. if not (Dictionary.IsUniversalIntegerType (Argument_Expression.Type_Symbol) or else Dictionary.IsUnknownTypeMark (Argument_Expression.Type_Symbol) or else ((Dictionary.TypeIsInteger (Argument_Expression.Type_Symbol) or else Dictionary.TypeIsModular (Argument_Expression.Type_Symbol)) and then CommandLineData.Content.Language_Profile /= CommandLineData.SPARK83)) then Set_Illegal_Result (Ident_Str => Ident_Str, Type_So_Far => Type_So_Far, Ok_So_Far => Ok_So_Far); ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Arg_Exp_Node_Pos, Id_Str => LexTokenManager.Null_String); elsif not Argument_Expression.Is_Static then Set_Illegal_Result (Ident_Str => Ident_Str, Type_So_Far => Type_So_Far, Ok_So_Far => Ok_So_Far); ErrorHandler.Semantic_Error (Err_Num => 36, Reference => 1, Position => Arg_Exp_Node_Pos, Id_Str => LexTokenManager.Null_String); else -- we have a static expression of the correct type Maths.ValueToInteger (Argument_Expression.Value, -- to get Local_Index_Number, Err); if Err = Maths.NoError then if Local_Index_Number > 0 and then Local_Index_Number <= Dictionary.GetNumberOfDimensions (Type_So_Far.Type_Symbol) then Index_Number := Local_Index_Number; Ok := True; else -- number out of range ErrorHandler.Semantic_Error_Sym (Err_Num => 403, Reference => ErrorHandler.No_Reference, Position => Arg_Exp_Node_Pos, Sym => Type_So_Far.Type_Symbol, Scope => Scope); Set_Illegal_Result (Ident_Str => Ident_Str, Type_So_Far => Type_So_Far, Ok_So_Far => Ok_So_Far); end if; else -- maths conversion error Set_Illegal_Result (Ident_Str => Ident_Str, Type_So_Far => Type_So_Far, Ok_So_Far => Ok_So_Far); end if; end if; end Process_Argument; begin -- Process_Array_Attribute Continue := True; Index_Number := 1; -- default value if no argument found if Argument_Found then Process_Argument (Ident_Str => Ident_Str, Arg_Exp_Node_Pos => Arg_Exp_Node_Pos, Argument_Expression => Argument_Expression, Scope => Scope, Index_Number => Index_Number, Type_So_Far => Type_So_Far, Ok_So_Far => Ok_So_Far, Ok => Continue); end if; if Continue then -- Set suitable symbol to be planted in the syntax tree for use by VCG if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Component_Size_Token) = LexTokenManager.Str_Eq then -- For component_size we want the type of the array for use by the VCG VCG_Type := Dictionary.GetRootType (Type_So_Far.Type_Symbol); elsif Dictionary.Is_Unconstrained_Array_Type_Mark (Type_So_Far.Type_Symbol, Scope) then -- For unconstrained arrays, obtain the implcitly declared constraint symbol for the array object VCG_Type := Dictionary.GetSubprogramParameterConstraint (Type_So_Far.Other_Symbol, Index_Number); if Dictionary.Is_Null_Symbol (VCG_Type) then VCG_Type := Dictionary.GetUnknownTypeMark; end if; else -- For constrained arrays then obtain appropriate index for the array type; this is what the VCG needs VCG_Type := Dictionary.GetArrayIndex (Type_So_Far.Type_Symbol, Index_Number); end if; Type_So_Far.Is_Static := Check_Static (Ident_Str => Ident_Str, Prefix_Type => Prefix_Type, Type_So_Far => Type_So_Far, Argument_Expression => Argument_Expression, Argument_Found => Argument_Found, Scope => Scope); Type_So_Far.Is_Constant := Check_Constant (Ident_Str => Ident_Str, Type_So_Far => Type_So_Far, Argument_Expression => Argument_Expression, Argument_Found => Argument_Found, Scope => Scope); Type_So_Far.Is_ARange := Check_Range (Ident_Str => Ident_Str); Type_So_Far.Type_Symbol := Dictionary.GetArrayAttributeType (Ident_Str, Type_So_Far.Type_Symbol, Index_Number); Type_So_Far.Other_Symbol := Dictionary.NullSymbol; Type_So_Far.Sort := Sem.Type_Result; end if; end Process_Array_Attribute; -------------------------------------------------------------------- procedure Calc_Attribute (Node : in STree.SyntaxNode; Attrib_Name : in LexTokenManager.Lex_String; Prefix : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Base_Found : in Boolean; Argument : in out Maths.Value; RHS_Of_Range : out Maths.Value) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Argument, --# RHS_Of_Range from Argument, --# Attrib_Name, --# Base_Found, --# Dictionary.Dict, --# LexTokenManager.State, --# Prefix, --# Scope & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Argument, --# Attrib_Name, --# Base_Found, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Prefix, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_attribute_designator; is separate; begin -- Wf_Attribute_Designator Argument_Expression := Sem.Null_Exp_Record; Second_Argument_Expression := Sem.Null_Exp_Record; Base_Found := False; Ident_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Ident_Node = attribute_designator OR annotation_attribute_designator OR attribute_ident if STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.attribute_designator or else STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.annotation_attribute_designator then -- ASSUME Ident_Node = attribute_designator OR annotation_attribute_designator Ident_Node := STree.Next_Sibling (Current_Node => Ident_Node); Base_Found := True; elsif STree.Syntax_Node_Type (Node => Ident_Node) /= SP_Symbols.attribute_ident then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = attribute_designator OR annotation_attribute_designator OR " & "attribute_ident in Wf_Attribute_Designator"); end if; -- ASSUME Ident_Node = attribute_ident SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.attribute_ident, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = attribute_ident in Wf_Attribute_Designator"); Ident_Str := STree.Node_Lex_String (Node => Ident_Node); Ident_Node_Pos := STree.Node_Position (Node => Ident_Node); Second_Arg_Exp_Node := STree.NullNode; Arg_Exp_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Ident_Node)); -- ASSUME Arg_Exp_Node = expression OR annotation_expression OR NULL if STree.Syntax_Node_Type (Node => Arg_Exp_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Arg_Exp_Node) = SP_Symbols.annotation_expression then -- ASSUME Arg_Exp_Node = expression OR annotation_expression Second_Arg_Exp_Node := STree.Next_Sibling (Current_Node => Arg_Exp_Node); elsif Arg_Exp_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Exp_Node = expression OR annotation_expression OR NULL in Wf_Attribute_Designator"); end if; -- look for second argument -- ASSUME Second_Arg_Exp_Node = expression OR annotation_expression OR NULL if STree.Syntax_Node_Type (Node => Second_Arg_Exp_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Second_Arg_Exp_Node) = SP_Symbols.annotation_expression then -- ASSUME Second_Arg_Exp_Node = expression OR annotation_expression Second_Argument_Found := True; Exp_Stack.Pop (Item => Second_Argument_Expression, Stack => E_Stack); elsif Second_Arg_Exp_Node = STree.NullNode then -- ASSUME Second_Arg_Exp_Node = NULL Second_Argument_Found := False; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Second_Arg_Exp_Node = expression OR annotation_expression OR NULL in Wf_Attribute_Designator"); end if; Second_Arg_Exp_Node_Pos := STree.Node_Position (Node => Second_Arg_Exp_Node); -- look for first argument -- ASSUME Arg_Exp_Node = expression OR annotation_expression OR NULL if STree.Syntax_Node_Type (Node => Arg_Exp_Node) = SP_Symbols.expression or else STree.Syntax_Node_Type (Node => Arg_Exp_Node) = SP_Symbols.annotation_expression then -- ASSUME Arg_Exp_Node = expression OR annotation_expression Argument_Found := True; Exp_Stack.Pop (Item => Argument_Expression, Stack => E_Stack); elsif Arg_Exp_Node = STree.NullNode then -- ASSUME Arg_Exp_Node = NULL Argument_Found := False; end if; Arg_Exp_Node_Pos := STree.Node_Position (Node => Arg_Exp_Node); Get_Prefix (Ident_Node_Pos => Ident_Node_Pos, Node_Pos => STree.Node_Position (Node => Node), Scope => Scope, Base_Found => Base_Found, Prefix => Type_So_Far, Kind => Prefix_Kind, E_Stack => E_Stack); Prefix_Type := Type_So_Far.Type_Symbol; VCG_Type := Prefix_Type; -- if clause to add prefix variable to reference list --# assert STree.Table = STree.Table~ and --# (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_attribute_designator) and --# (Dictionary.Is_Null_Symbol (VCG_Type) or Dictionary.IsTypeMark (VCG_Type, Dictionary.Dict)); -- X'Valid _IS_ considered a read of X for flow analysis. if STree.Syntax_Node_Type (Node => Node) = SP_Symbols.attribute_designator and then Dictionary.IsVariableOrSubcomponent (Type_So_Far.Other_Symbol) and then (Dictionary.Is_Unconstrained_Array_Type_Mark (Type_So_Far.Type_Symbol, Scope) or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Valid_Token) = LexTokenManager.Str_Eq) then SeqAlgebra.AddMember (The_Heap, Ref_Var, Natural (Dictionary.SymbolRef (Type_So_Far.Other_Symbol))); end if; Basic_Checks (Node => Node, Ident_Node_Pos => Ident_Node_Pos, Ident_Str => Ident_Str, Arg_Exp_Node_Pos => Arg_Exp_Node_Pos, Second_Arg_Exp_Node_Pos => Second_Arg_Exp_Node_Pos, Argument_Found => Argument_Found, Second_Argument_Found => Second_Argument_Found, Type_So_Far => Type_So_Far, Ok => Ok_So_Far); if Ok_So_Far then Base_Checks (Node => Node, Ident_Node_Pos => Ident_Node_Pos, Ident_Str => Ident_Str, Type_So_Far => Type_So_Far, Continue => Ok_So_Far); end if; --# assert STree.Table = STree.Table~ and --# (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_attribute_designator) and --# (Dictionary.Is_Null_Symbol (VCG_Type) or Dictionary.IsTypeMark (VCG_Type, Dictionary.Dict)); -- any attempt to use proof attributes 'Tail and 'Append in program context -- will have been trapped by basic checks which will have set Ok_So_Far to False. -- We can therefore treat type and argument checking of these proof attributes -- normally from here on because if they are being checked then they must be -- being used in a valid context if Ok_So_Far then if Dictionary.Attribute_Is_Visible_But_Obsolete (Ident_Str, Prefix_Kind, Type_So_Far.Type_Symbol, Scope) then ErrorHandler.Semantic_Warning (Err_Num => 310, Position => Ident_Node_Pos, Id_Str => Ident_Str); end if; if not (Dictionary.AttributeIsVisible (Ident_Str, Prefix_Kind, Type_So_Far.Type_Symbol, Scope) or else Proof_Attribute_Is_Visible (Ident_Str => Ident_Str, Prefix_Kind => Prefix_Kind, Prefix_Sym => Type_So_Far.Other_Symbol)) then Type_So_Far := Sem.Unknown_Type_Record; Ok_So_Far := False; ErrorHandler.Semantic_Error (Err_Num => 96, Reference => ErrorHandler.No_Reference, Position => Ident_Node_Pos, Id_Str => Ident_Str); elsif Dictionary.IsArrayAttribute (Ident_Str, Type_So_Far.Type_Symbol) then if Second_Argument_Found then -- must be error, array attributes take a maximum of one argument Type_So_Far := Sem.Unknown_Type_Record; Ok_So_Far := False; ErrorHandler.Semantic_Error (Err_Num => 49, Reference => ErrorHandler.No_Reference, Position => Second_Arg_Exp_Node_Pos, Id_Str => Ident_Str); else -- zero or one expression provided Process_Array_Attribute (Ident_Str => Ident_Str, Prefix_Type => Prefix_Type, Arg_Exp_Node_Pos => Arg_Exp_Node_Pos, Argument_Expression => Argument_Expression, Argument_Found => Argument_Found, Scope => Scope, Type_So_Far => Type_So_Far, Ok_So_Far => Ok_So_Far, VCG_Type => VCG_Type); end if; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident_Str, Lex_Str2 => LexTokenManager.Access_Token) = LexTokenManager.Str_Eq then -- 'Access only allowed if subject is aliased. We could roll this into AttributeIsVisible -- but this would make the error unclear, so we do a special check here if Dictionary.VariableIsAliased (Type_So_Far.Other_Symbol) then -- valid application Type_So_Far.Is_Static := False; Type_So_Far.Is_Constant := True; Type_So_Far.Is_ARange := False; Type_So_Far.Type_Symbol := Dictionary.GetScalarAttributeType (Ident_Str, Type_So_Far.Type_Symbol); Type_So_Far.Variable_Symbol := Type_So_Far.Other_Symbol; Type_So_Far.Other_Symbol := Dictionary.NullSymbol; Type_So_Far.Sort := Sem.Type_Result; -- note we preserve OtherSymbol in VariableSymbol for use in wf_discriminant_constraint else Ok_So_Far := False; ErrorHandler.Semantic_Error_Sym (Err_Num => 895, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Node), Sym => Type_So_Far.Other_Symbol, Scope => Scope); end if; else -- an "ordinary" attribute if Argument_Found then if not (Always_Takes_One_Argument (Str => Ident_Str) or else Always_Takes_Two_Arguments (Str => Ident_Str)) then Type_So_Far := Sem.Unknown_Type_Record; Ok_So_Far := False; ErrorHandler.Semantic_Error (Err_Num => 55, Reference => ErrorHandler.No_Reference, Position => Arg_Exp_Node_Pos, Id_Str => Ident_Str); elsif Argument_Type_Correct (Str => Ident_Str, Prefix_Type => Type_So_Far.Type_Symbol, Arg_Type => Argument_Expression.Type_Symbol, Scope => Scope) then -- first argument type is ok, is there a second if not Second_Argument_Found or else Argument_Type_Correct (Str => Ident_Str, Prefix_Type => Type_So_Far.Type_Symbol, Arg_Type => Second_Argument_Expression.Type_Symbol, Scope => Scope) then -- either no second arg or it type checks ok Type_So_Far.Is_Static := Check_Static (Ident_Str => Ident_Str, Prefix_Type => Prefix_Type, Type_So_Far => Type_So_Far, Argument_Expression => Argument_Expression, Argument_Found => Argument_Found, Scope => Scope); Type_So_Far.Is_Constant := Check_Constant (Ident_Str => Ident_Str, Type_So_Far => Type_So_Far, Argument_Expression => Argument_Expression, Argument_Found => Argument_Found, Scope => Scope); Type_So_Far.Is_ARange := Check_Range (Ident_Str => Ident_Str); Type_So_Far.Type_Symbol := Dictionary.GetScalarAttributeType (Ident_Str, Type_So_Far.Type_Symbol); Type_So_Far.Other_Symbol := Dictionary.NullSymbol; Type_So_Far.Sort := Sem.Type_Result; else -- second argument type wrong Type_So_Far := Sem.Unknown_Type_Record; Ok_So_Far := False; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Second_Arg_Exp_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; else -- first argument type wrong Type_So_Far := Sem.Unknown_Type_Record; Ok_So_Far := False; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Arg_Exp_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; else -- no argument found so just set up result Type_So_Far.Is_Static := Check_Static (Ident_Str => Ident_Str, Prefix_Type => Prefix_Type, Type_So_Far => Type_So_Far, Argument_Expression => Argument_Expression, Argument_Found => Argument_Found, Scope => Scope); Type_So_Far.Is_Constant := Check_Constant (Ident_Str => Ident_Str, Type_So_Far => Type_So_Far, Argument_Expression => Argument_Expression, Argument_Found => Argument_Found, Scope => Scope); Type_So_Far.Is_ARange := Check_Range (Ident_Str => Ident_Str); Type_So_Far.Type_Symbol := Dictionary.GetScalarAttributeType (Ident_Str, Type_So_Far.Type_Symbol); Type_So_Far.Other_Symbol := Dictionary.NullSymbol; Type_So_Far.Sort := Sem.Type_Result; end if; end if; end if; --# assert STree.Table = STree.Table~ and --# (STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute_designator or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_attribute_designator) and --# (Dictionary.Is_Null_Symbol (VCG_Type) or --# Dictionary.IsTypeMark (VCG_Type, Dictionary.Dict) or --# Dictionary.IsParameterConstraint (VCG_Type, Dictionary.Dict)); -- if Dict has returned a null symbol for the attribute type then convert it to -- the unknown type symbol if Dictionary.Is_Null_Symbol (Type_So_Far.Type_Symbol) then Type_So_Far.Type_Symbol := Dictionary.GetUnknownTypeMark; end if; if Ok_So_Far then if Argument_Found then if Second_Argument_Found then -- we could statically evaluate Max and Min here but since use of these functions -- with two static arguments seems unlikely it has been left for now. However, -- we must check that any static argument is in type range. Val := Argument_Expression.Value; --# accept Flow, 10, Unused_Val, "Expected ineffective assignment"; Sem.Constraint_Check (Val => Val, New_Val => Unused_Val, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_attribute_designator, Typ => Dictionary.GetRootType (Prefix_Type), Position => Arg_Exp_Node_Pos); --# end accept; Val := Second_Argument_Expression.Value; --# accept Flow, 10, Unused_Val, "Expected ineffective assignment"; Sem.Constraint_Check (Val => Val, New_Val => Unused_Val, Is_Annotation => STree.Syntax_Node_Type (Node => Node) = SP_Symbols.annotation_attribute_designator, Typ => Dictionary.GetRootType (Prefix_Type), Position => Second_Arg_Exp_Node_Pos); --# end accept; else -- just one argument found Val := Argument_Expression.Value; end if; else -- no arguments found Val := Maths.NoValue; end if; -- constraint checking of arguments to attributes other than Min/Max done as part -- of CalcAttribute Calc_Attribute (Node => Node, Attrib_Name => Ident_Str, Prefix => Prefix_Type, Scope => Scope, Base_Found => Base_Found, Argument => Val, RHS_Of_Range => RHS_Val); Type_So_Far.Value := Val; Type_So_Far.Range_RHS := RHS_Val; end if; Exp_Stack.Push (X => Type_So_Far, Stack => E_Stack); STree.Add_Node_Symbol (Node => Node, Sym => VCG_Type); --# accept Flow, 33, Unused_Val, "Expected ineffective assignment"; end Wf_Attribute_Designator; spark-2012.0.deb/examiner/sem-compunit-wf_subprogram_body.adb0000644000175000017500000020601611753202336023236 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Wf_Subprogram_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; Next_Node : out STree.SyntaxNode) is -- look up table: if First_Seen then we are dealing with Abstract spec else Refined type Which_Abstractions is array (Boolean) of Dictionary.Abstractions; Which_Abstraction : constant Which_Abstractions := Which_Abstractions'(False => Dictionary.IsRefined, True => Dictionary.IsAbstract); Ident_Node, End_Desig_Node : STree.SyntaxNode; Global_Node, Dependency_Node, Constraint_Node, Declare_Node : STree.SyntaxNode; With_Node, Main_Node, Spec_Node, Anno_Node : STree.SyntaxNode; Subprog_Implem_Node, Formal_Part_Node : STree.SyntaxNode; Subprog_Sym : Dictionary.Symbol; Hidden : Hidden_Class; First_Seen : Boolean; Subprog_Scope, Formal_Part_Scope : Dictionary.Scopes; Is_Overriding : Boolean := False; ------------------------------------------------------------------ procedure ProcessPartitionAnnotation (Main_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Main_Node, --# Scope, --# STree.Table, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Main_Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# TheHeap; --# pre Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.main_program_declaration; --# post STree.Table = STree.Table~; is separate; ------------------------------------------------------------------ procedure Shared_Variable_Check (Main_Program_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Main_Program_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Main_Program_Sym, --# Scope, --# SPARK_IO.File_Sys; is Inherited_Package_It : Dictionary.Iterator; Inherited_Package_Sym : Dictionary.Symbol; It : Dictionary.Iterator; Sym : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; ------------------------------------------------------------------ procedure Check_Unprotected_Globals (Check_List : in Dictionary.Iterator; The_Thread : in Dictionary.Symbol; Annotations_Are_Wellformed : in Boolean; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Check_List, --# The_Thread & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Annotations_Are_Wellformed, --# Check_List, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# The_Thread; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; Other_Thread : Dictionary.Symbol; begin It := Check_List; while It /= Dictionary.NullIterator loop Sym := Dictionary.CurrentSymbol (It); if not Dictionary.Is_Null_Variable (Sym) then if not Dictionary.GetOwnVariableProtected (Sym) then Other_Thread := Dictionary.GetUnprotectedReference (Sym); if not Dictionary.Is_Null_Symbol (Other_Thread) then -- This is non-protected global variable that is being -- accessed by more than one thread of control. ErrorHandler.Semantic_Error_Sym3 (Err_Num => 938, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Sym => Sym, Sym2 => Other_Thread, Sym3 => The_Thread, Scope => Scope); else -- Mark this global variable as being accessed by a thread. Dictionary.SetUnprotectedReference (Sym, The_Thread); end if; end if; end if; It := Dictionary.NextSymbol (It); end loop; if not Annotations_Are_Wellformed then -- The thread has errors in the annotations and so the shared variable check -- may not be complete. ErrorHandler.Semantic_Warning_Sym (Err_Num => 413, Position => Error_Node_Pos, Sym => The_Thread, Scope => Scope); end if; end Check_Unprotected_Globals; begin -- Shared_Variable_Check -- Look for access to unprotected globals by the main program Check_Unprotected_Globals (Check_List => Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Main_Program_Sym), The_Thread => Main_Program_Sym, Annotations_Are_Wellformed => Dictionary.SubprogramSignatureIsWellformed (Dictionary.IsAbstract, Main_Program_Sym), Scope => Scope, Error_Node_Pos => Error_Node_Pos); -- Look for access to unprotected globals by all tasks. Inherited_Package_It := Dictionary.FirstInheritsClause (Main_Program_Sym); while Inherited_Package_It /= Dictionary.NullIterator loop Inherited_Package_Sym := Dictionary.CurrentSymbol (Inherited_Package_It); It := Dictionary.FirstOwnTask (Inherited_Package_Sym); while It /= Dictionary.NullIterator loop Sym := Dictionary.CurrentSymbol (It); Type_Sym := Dictionary.GetRootType (Dictionary.GetType (Sym)); if Dictionary.Is_Declared (Item => Type_Sym) then if Dictionary.UsesUnprotectedVariables (Type_Sym) then Check_Unprotected_Globals (Check_List => Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Type_Sym), The_Thread => Sym, Annotations_Are_Wellformed => Dictionary.SubprogramSignatureIsWellformed (Dictionary.IsAbstract, Type_Sym), Scope => Scope, Error_Node_Pos => Error_Node_Pos); end if; elsif not Dictionary.IsUnknownTypeMark (Type_Sym) then -- The task type is not available and hence we cannot perform -- the shared variable check for this task. ErrorHandler.Semantic_Warning_Sym (Err_Num => 411, Position => Error_Node_Pos, Sym => Type_Sym, Scope => Scope); end if; It := Dictionary.NextSymbol (It); end loop; Inherited_Package_It := Dictionary.NextSymbol (Inherited_Package_It); end loop; end Shared_Variable_Check; ------------------------------------------------------------------ procedure Max_One_In_A_Queue_Check (Main_Program_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Main_Program_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Main_Program_Sym, --# Scope, --# SPARK_IO.File_Sys; is Inherited_Package_It : Dictionary.Iterator; Inherited_Package_Sym : Dictionary.Symbol; It : Dictionary.Iterator; Sym : Dictionary.Symbol; Type_Sym : Dictionary.Symbol; ------------------------------------------------------------------ procedure Check_Suspends_Items (Check_List : in Dictionary.Iterator; The_Thread : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Error_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives Dictionary.Dict from *, --# Check_List, --# The_Thread & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Check_List, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Node_Pos, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# The_Thread; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; Other_Thread : Dictionary.Symbol; begin It := Check_List; while It /= Dictionary.NullIterator loop Sym := Dictionary.CurrentSymbol (It); Other_Thread := Dictionary.GetSuspendsReference (Sym); if not Dictionary.Is_Null_Symbol (Other_Thread) then -- This is a suspendable entity that is being -- accessed by more than one thread of control. ErrorHandler.Semantic_Error_Sym3 (Err_Num => 939, Reference => ErrorHandler.No_Reference, Position => Error_Node_Pos, Sym => Sym, Sym2 => Other_Thread, Sym3 => The_Thread, Scope => Scope); else -- Mark this suspends item as being accessed by a thread. Dictionary.SetSuspendsReference (Sym, The_Thread); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Suspends_Items; begin -- Max_One_In_A_Queue_Check -- Look for suspendable entities in the main program Check_Suspends_Items (Check_List => Dictionary.FirstSuspendsListItem (Main_Program_Sym), The_Thread => Main_Program_Sym, Scope => Scope, Error_Node_Pos => Error_Node_Pos); -- Look for suspendable entities in all the tasks. -- Note. interrupt handlers cannot call operations that suspend. Inherited_Package_It := Dictionary.FirstInheritsClause (Main_Program_Sym); while Inherited_Package_It /= Dictionary.NullIterator loop Inherited_Package_Sym := Dictionary.CurrentSymbol (Inherited_Package_It); It := Dictionary.FirstOwnTask (Inherited_Package_Sym); while It /= Dictionary.NullIterator loop Sym := Dictionary.CurrentSymbol (It); Type_Sym := Dictionary.GetRootType (Dictionary.GetType (Sym)); if Dictionary.Is_Declared (Item => Type_Sym) then Check_Suspends_Items (Check_List => Dictionary.FirstSuspendsListItem (Type_Sym), The_Thread => Sym, Scope => Scope, Error_Node_Pos => Error_Node_Pos); elsif not Dictionary.IsUnknownTypeMark (Type_Sym) then -- The task type is not available and hence we cannot perform -- the max-one-in-a-queue check for this task. ErrorHandler.Semantic_Warning_Sym (Err_Num => 412, Position => Error_Node_Pos, Sym => Type_Sym, Scope => Scope); end if; It := Dictionary.NextSymbol (It); end loop; Inherited_Package_It := Dictionary.NextSymbol (Inherited_Package_It); end loop; end Max_One_In_A_Queue_Check; ------------------------------------------------------------------ procedure Wf_Main_Program (Node : in STree.SyntaxNode; Subprog_Sym : in Dictionary.Symbol; Scope, Subprog_Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Dictionary.Dict, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# Subprog_Scope, --# Subprog_Sym, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Scope, --# Subprog_Sym, --# TheHeap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.main_program_declaration; --# post STree.Table = STree.Table~; is Context_Node, Inherit_Node, Precondition_Node : STree.SyntaxNode; ------------------------------------------------------------------ procedure Check_Program_Completeness (Node_Pos : in LexTokenManager.Token_Position; Subprog_Sym : in Dictionary.Symbol; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Scope, --# SPARK_IO.File_Sys, --# Subprog_Sym; is Inherit_It : Dictionary.Iterator; Inherited_Package : Dictionary.Symbol; function Contains_Task (The_Package : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is begin return not Dictionary.IsNullIterator (Dictionary.FirstOwnTask (The_Package)); end Contains_Task; function Contains_Interrupt (The_Package : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Result : Boolean := False; It : Dictionary.Iterator; begin It := Dictionary.FirstOwnVariable (The_Package); while not Dictionary.IsNullIterator (It) loop Result := Dictionary.GetHasInterruptProperty (Dictionary.CurrentSymbol (It)); exit when Result; It := Dictionary.NextSymbol (It); end loop; return Result; end Contains_Interrupt; begin -- Check_Program_Completeness Inherit_It := Dictionary.FirstInheritsClause (Subprog_Sym); while not Dictionary.IsNullIterator (Inherit_It) loop Inherited_Package := Dictionary.CurrentSymbol (Inherit_It); if Contains_Task (The_Package => Inherited_Package) or else Contains_Interrupt (The_Package => Inherited_Package) then -- then it must also be WITHed to ensure program completeness if not Dictionary.Is_Withed (The_Withed_Symbol => Inherited_Package, Scope => Scope) then ErrorHandler.Semantic_Error_Sym (Err_Num => 951, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Sym => Inherited_Package, Scope => Scope); end if; end if; Inherit_It := Dictionary.NextSymbol (Inherit_It); end loop; end Check_Program_Completeness; function Get_Precondition_Node (Node : in STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.main_program_declaration; --# return Return_Node => Return_Node = STree.NullNode or Syntax_Node_Type (Return_Node, STree.Table) = SP_Symbols.precondition; is Current_Node : STree.SyntaxNode; begin -- Find new_overriding_subprogram_body. Current_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Node)); -- ASSUME Current_Node = not_overriding_subprogram_body SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.not_overriding_subprogram_body, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = not_overriding_subprogram_body in Get_Precondition_Node"); -- Find procedure_annotation or function_annotation. Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = procedure_specification OR function_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.function_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = procedure_specification OR function_specification in Get_Precondition_Node"); Current_Node := Next_Sibling (Current_Node => Current_Node); -- ASSUME Current_Node = procedure_annotation OR function_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_annotation or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.function_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = procedure_annotation OR function_annotation in Get_Precondition_Node"); -- Find constraint. This is always the last one for either -- functions or procedures. See SPARK.LLA. Current_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Current_Node)); -- ASSUME Current_Node = procedure_constraint OR function_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_constraint or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.function_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = procedure_constraint OR function_constraint in Get_Precondition_Node"); -- Find precondition, if it exists. This will always be the -- first child. Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = precondition OR postcondition OR return_expression OR NULL if Current_Node /= STree.NullNode then -- ASSUME Current_Node = precondition OR postcondition OR return_expression case Syntax_Node_Type (Node => Current_Node) is when SP_Symbols.precondition => -- We have found a precondition. null; when SP_Symbols.postcondition | SP_Symbols.return_expression => -- The function or procedure has no precondition, -- but it has a postcondition or return annotation. Current_Node := STree.NullNode; when others => -- We don't expect to reach this, obviously, as we -- don't have any other constraint annotations in -- SPARK. SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = precondition OR postcondition OR return_expression " & "in Get_Precondition_Node"); end case; end if; return Current_Node; end Get_Precondition_Node; begin -- Wf_Main_Program if not Dictionary.MainProgramExists then Dictionary.AddMainProgram (Subprog_Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node))); Inherit_Node := Child_Node (Current_Node => Node); -- ASSUME Inherit_Node = inherit_clause OR main_program_annotation if Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.inherit_clause then -- ASSUME Inherit_Node = inherit_clause Wf_Inherit_Clause (Node => Inherit_Node, Comp_Sym => Subprog_Sym, Scope => Scope); elsif Syntax_Node_Type (Node => Inherit_Node) /= SP_Symbols.main_program_annotation then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Inherit_Node = inherit_clause OR main_program_annotation in Wf_Main_Program"); end if; Context_Node := Child_Node (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => Node))); -- ASSUME Context_Node = context_clause OR library_unit if Syntax_Node_Type (Node => Context_Node) = SP_Symbols.context_clause then -- ASSUME Context_Node = context_clause Wf_Context_Clause (Node => Context_Node, Comp_Sym => Subprog_Sym, Scope => Subprog_Scope); elsif Syntax_Node_Type (Node => Context_Node) /= SP_Symbols.library_unit then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Context_Node = context_clause OR library_unit in Wf_Main_Program"); end if; -- We need to check if the main program has a precondition -- and issue a warning that its correctness is not checked. Precondition_Node := Get_Precondition_Node (Node => Node); -- ASSUME Precondition_Node = precondition OR NULL if Syntax_Node_Type (Node => Precondition_Node) = SP_Symbols.precondition then -- ASSUME Precondition_Node = precondition ErrorHandler.Semantic_Warning (Err_Num => 431, Position => Node_Position (Precondition_Node), Id_Str => LexTokenManager.Null_String); end if; -- check here, in Ravencar, that all inherited packages with tasks/interrupts are also WITHed if Syntax_Node_Type (Node => Inherit_Node) = SP_Symbols.inherit_clause and then CommandLineData.Ravenscar_Selected then Check_Program_Completeness (Node_Pos => Node_Position (Node => Inherit_Node), Subprog_Sym => Subprog_Sym, Scope => Subprog_Scope); end if; -- in Ravencar mode, a main program may have an addition partition flow analysis annotation ProcessPartitionAnnotation (Main_Node => Node, Scope => Scope); else -- Dictionary.MainProgramExists ErrorHandler.Semantic_Error (Err_Num => 313, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end Wf_Main_Program; ------------------------------------------------------------------ function Requires_Second_Annotation (Subprog_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Global_Var : Dictionary.Symbol; Required : Boolean; Global_Item : Dictionary.Iterator; Enclosing_Region : Dictionary.Symbol; begin Required := False; if not Dictionary.IsGlobalScope (Dictionary.GetScope (Subprog_Sym)) then Enclosing_Region := Dictionary.GetRegion (Dictionary.GetScope (Subprog_Sym)); if Dictionary.IsPackage (Enclosing_Region) or else (Dictionary.IsType (Enclosing_Region) and then Dictionary.IsProtectedTypeMark (Enclosing_Region)) then Global_Item := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Subprog_Sym); while Global_Item /= Dictionary.NullIterator loop Global_Var := Dictionary.CurrentSymbol (Global_Item); if Dictionary.IsRefinedOwnVariable (Global_Var) and then Dictionary.GetOwner (Global_Var) = Enclosing_Region then Required := True; exit; end if; Global_Item := Dictionary.NextSymbol (Global_Item); end loop; end if; end if; return Required; end Requires_Second_Annotation; ---------------------------------------------------------------------- procedure Check_Function_Has_Return (Subprog_Node : in STree.SyntaxNode; End_Desig_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# End_Desig_Node_Pos, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Node; --# pre Syntax_Node_Type (Subprog_Node, STree.Table) = SP_Symbols.subprogram_implementation; is Next_Node : STree.SyntaxNode; begin Next_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Subprog_Node)); -- Skip over declarative_part if there is one -- ASSUME Next_Node = declarative_part OR sequence_of_statements OR code_insertion OR hidden_part if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.declarative_part then -- ASSUME Next_Node = declarative_part Next_Node := Next_Sibling (Current_Node => Next_Node); elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.sequence_of_statements and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.code_insertion and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.hidden_part then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = declarative_part OR sequence_of_statements OR code_insertion OR " & "hidden_part in Check_Function_Has_Return"); end if; -- ASSUME Next_Node = sequence_of_statements OR code_insertion OR hidden_part if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.sequence_of_statements then -- ASSUME Next_Node = sequence_of_statements Next_Node := Child_Node (Current_Node => Next_Node); -- Now we have a sequence_of_statements which can be reduced to: -- sequence_of_statements statement | statement ; -- (See SPARK.LLA) -- If the sequence_of_statements is a sequence_of_statements followed by -- a statement then skip to the statement (which will be the final statement -- in the subprogram). -- ASSUME Next_Node = sequence_of_statements OR statement if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.sequence_of_statements then -- ASSUME Next_Node = sequence_of_statements Next_Node := Next_Sibling (Current_Node => Next_Node); elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.statement then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = sequence_of_statements OR statement in Check_Function_Has_Return"); end if; -- ASSUME Next_Node = statement SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.statement, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = statement in Check_Function_Has_Return"); -- The final statement in the subprogram should be a return statement, but we -- need to cater for labels because a statement can be reduced to: -- simple_statement | sequence_of_labels simple_statement ... -- (and a simple_statement can be reduced to a return_statement). -- The child node will either be a simple_statement or a sequence_of_labels Next_Node := Child_Node (Current_Node => Next_Node); -- Skip the label(s) if present. -- ASSUME Next_Node = sequence_of_labels OR simple_statement OR compound_statement OR -- proof_statement OR justification_statement OR apragma if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.sequence_of_labels then -- ASSUME Next_Node = sequence_of_labels Next_Node := Next_Sibling (Next_Node); elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.simple_statement and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.compound_statement and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.proof_statement and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.justification_statement and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.apragma then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = sequence_of_labels OR simple_statement OR compound_statement OR " & "proof_statement OR justification_statement OR apragma in Check_Function_Has_Return"); end if; -- ASSUME Next_Node = simple_statement OR compound_statement OR proof_statement OR justification_statement OR apragma -- Now we have reached the final statement in the subprogram. This should be -- a return statement. if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.simple_statement then -- ASSUME Next_Node = simple_statement if Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) = SP_Symbols.null_statement or else Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) = SP_Symbols.assignment_statement or else Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) = SP_Symbols.procedure_call_statement or else Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) = SP_Symbols.exit_statement or else Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) = SP_Symbols.delay_statement then -- ASSUME Child_Node (Current_Node => Next_Node) = null_statement OR assignment_statement OR -- procedure_call_statement OR exit_statement OR delay_statement ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Missing_Return, Position => End_Desig_Node_Pos); elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)) /= SP_Symbols.return_statement then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Next_Node) = null_statement OR assignment_statement OR " & "procedure_call_statement OR exit_statement OR return_statement OR " & "delay_statement in Check_Function_Has_Return"); end if; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.compound_statement or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.proof_statement or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.justification_statement or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.apragma then -- ASSUME Next_Node = compound_statement OR proof_statement OR justification_statement OR apragma ErrorHandler.Control_Flow_Error (Err_Type => ErrorHandler.Missing_Return, Position => End_Desig_Node_Pos); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = simple_statement OR compound_statement OR proof_statement OR " & "justification_statement OR apragma in Check_Function_Has_Return"); end if; elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.code_insertion and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.hidden_part then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = sequence_of_statements OR code_insertion OR " & "hidden_part in Check_Function_Has_Return"); end if; end Check_Function_Has_Return; begin -- Wf_Subprogram_Body Main_Node := Parent_Node (Current_Node => Node); -- ASSUME Main_Node = proper_body OR protected_operation_item OR generic_subprogram_body OR main_program_declaration SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Main_Node) = SP_Symbols.main_program_declaration or else Syntax_Node_Type (Node => Main_Node) = SP_Symbols.proper_body or else Syntax_Node_Type (Node => Main_Node) = SP_Symbols.protected_operation_item or else Syntax_Node_Type (Node => Main_Node) = SP_Symbols.generic_subprogram_body, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Main_Node = proper_body OR protected_operation_item OR generic_subprogram_body OR " & "main_program_declaration in Wf_Subprogram_Body"); Spec_Node := Child_Node (Current_Node => Node); -- ASSUME Spec_Node = overriding_indicator OR procedure_specification OR function_specification if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.overriding_indicator then -- ASSUME Spec_Node = overriding_indicator -- ASSUME Child_Node (Current_Node => Spec_Node) = RWoverriding OR RWnot if Syntax_Node_Type (Node => Child_Node (Current_Node => Spec_Node)) = SP_Symbols.RWoverriding then -- ASSUME Child_Node (Current_Node => Spec_Node) = RWoverriding Is_Overriding := True; end if; Spec_Node := Next_Sibling (Current_Node => Spec_Node); elsif Syntax_Node_Type (Node => Spec_Node) /= SP_Symbols.procedure_specification and then Syntax_Node_Type (Node => Spec_Node) /= SP_Symbols.function_specification then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = overriding_indicator OR procedure_specification OR " & "function_specification in Wf_Subprogram_Body"); end if; -- ASSUME Spec_Node = procedure_specification OR function_specification Subprog_Implem_Node := Last_Sibling_Of (Start_Node => Spec_Node); -- ASSUME Subprog_Implem_Node = subprogram_implementation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Subprog_Implem_Node) = SP_Symbols.subprogram_implementation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Implem_Node = subprogram_implementation in Wf_Subprogram_Body"); Hidden := Body_Hidden_Class (Node => Subprog_Implem_Node); Subprog_Scope := Scope; -- NOTE: Given Ada83 declaration order restrictions, I /think/ that we could always -- check formal parts in subprogram local scope rather than, as above, sometimes -- doing it the scope in which the subprogram is being declared. With relaxed ordering -- there /might/ be a problem with subunits thus: -- spec -- stub -- declarations that the body can't see -- of course these can't exist in 83 -- the body (here we might see the declarations we didn't ought to?) -- Anyway, I thought it best to leave the existing code alone and chnage the scope only -- for the generic case -- ASSUME Spec_Node = procedure_specification OR function_specification if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification then -- ASSUME Spec_Node = procedure_specification OR function_specification Subprogram_Specification.Wf_Subprogram_Specification_From_Body (Node => Spec_Node, Hidden => (Hidden = All_Hidden), Current_Scope => Subprog_Scope, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen); else Subprog_Sym := Dictionary.NullSymbol; First_Seen := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = procedure_specification OR function_specification in Wf_Subprogram_Body"); end if; Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Subprogram_Body"); if Syntax_Node_Type (Node => Main_Node) = SP_Symbols.generic_subprogram_body then -- ASSUME Main_Node = generic_subprogram_body Formal_Part_Scope := Dictionary.GetEnclosingScope (Scope => Subprog_Scope); else Formal_Part_Scope := Scope; end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_body or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_body) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# Syntax_Node_Type (Subprog_Implem_Node, STree.Table) = SP_Symbols.subprogram_implementation and --# (Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.proper_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.protected_operation_item or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.generic_subprogram_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.main_program_declaration) and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; if not Dictionary.Is_Null_Symbol (Subprog_Sym) then if Syntax_Node_Type (Node => Main_Node) = SP_Symbols.main_program_declaration then -- ASSUME Main_Node = main_program_declaration Wf_Main_Program (Node => Main_Node, Subprog_Sym => Subprog_Sym, Scope => Scope, Subprog_Scope => Subprog_Scope); elsif Syntax_Node_Type (Node => Main_Node) = SP_Symbols.proper_body then -- ASSUME Main_Node = proper_body -- check to look for WITH node in case of subunit With_Node := Parent_Node (Current_Node => Main_Node); -- ASSUME With_Node = subunit OR abody if Syntax_Node_Type (Node => With_Node) = SP_Symbols.subunit then -- ASSUME With_Node = subunit -- there may be a WITH node to deal with With_Node := Child_Node (Current_Node => Child_Node (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => With_Node)))); -- ASSUME With_Node = subunit OR with_clause if Syntax_Node_Type (Node => With_Node) = SP_Symbols.with_clause then -- ASSUME With_Node = with_clause With_Node := Parent_Node (Current_Node => With_Node); -- ASSUME With_Node = context_clause SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => With_Node) = SP_Symbols.context_clause, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = context_clause in Wf_Subprogram_Body"); Wf_Context_Clause (Node => With_Node, Comp_Sym => Subprog_Sym, Scope => Subprog_Scope); elsif Syntax_Node_Type (Node => With_Node) /= SP_Symbols.subunit then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = subunit OR with_clause in Wf_Subprogram_Body"); end if; elsif Syntax_Node_Type (Node => With_Node) /= SP_Symbols.abody then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect With_Node = subunit OR abody in Wf_Subprogram_Body"); end if; end if; Formal_Part_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Formal_Part_Node = formal_part OR type_mark OR NULL if Syntax_Node_Type (Node => Formal_Part_Node) = SP_Symbols.formal_part then -- ASSUME Formal_Part_Node = formal_part Wf_Formal_Part (Node => Formal_Part_Node, Current_Scope => Formal_Part_Scope, Subprog_Sym => Subprog_Sym, First_Occurrence => First_Seen, Context => Dictionary.ProgramContext); elsif Formal_Part_Node = STree.NullNode or else Syntax_Node_Type (Node => Formal_Part_Node) = SP_Symbols.type_mark then -- ASSUME Formal_Part_Node = type_mark OR NULL if Dictionary.GetNumberOfSubprogramParameters (Subprog_Sym) /= 0 then ErrorHandler.Semantic_Error (Err_Num => 152, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Formal_Part_Node = formal_part OR type_mark OR NULL in Wf_Subprogram_Body"); end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_body or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_body) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# Syntax_Node_Type (Subprog_Implem_Node, STree.Table) = SP_Symbols.subprogram_implementation and --# (Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.proper_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.protected_operation_item or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.generic_subprogram_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.main_program_declaration) and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; Anno_Node := Next_Sibling (Current_Node => Spec_Node); -- ASSUME Anno_Node = procedure_annotation OR function_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.procedure_annotation or else Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.function_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = procedure_annotation OR function_annotation in Wf_Subprogram_Body"); Get_Subprogram_Anno_Key_Nodes (Node => Anno_Node, Global_Node => Global_Node, Dependency_Node => Dependency_Node, Declare_Node => Declare_Node, Constraint_Node => Constraint_Node); if Global_Node = STree.NullNode and then Dependency_Node = STree.NullNode and then Declare_Node = STree.NullNode then -- ASSUME Global_Node = NULL AND Dependency_Node = NULL AND Declare_Node = NULL if Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.abody or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.protected_body or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.protected_operation_item or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.library_unit_body or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.library_unit then -- ASSUME Parent_Node (Current_Node => Main_Node) = abody OR protected_body OR -- protected_operation_item OR -- library_unit_body OR library_unit if not First_Seen and then Requires_Second_Annotation (Subprog_Sym => Subprog_Sym) then ErrorHandler.Semantic_Error (Err_Num => 87, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Spec_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsRefined, Subprog_Sym); elsif First_Seen and then Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification and then (CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 or else CommandLineData.Content.Flow_Option = CommandLineData.Info_Flow) then -- Subprogram or task body does not have an annotation ErrorHandler.Semantic_Error (Err_Num => 154, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Spec_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Subprog_Sym); end if; elsif Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) /= SP_Symbols.subunit then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent_Node (Current_Node => Main_Node) = subunit OR abody OR protected_body OR " & "protected_operation_item OR library_unit_body OR library_unit in Wf_Subprogram_Body"); end if; else if not (First_Seen or else Requires_Second_Annotation (Subprog_Sym => Subprog_Sym)) or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.subunit then -- annotation not required if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification then -- ASSUME Spec_Node = procedure_specification ErrorHandler.Semantic_Error (Err_Num => 155, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Anno_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); elsif Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification then -- ASSUME Spec_Node = function_specification -- now distinguish between repeated anno and misplaced anno if Dictionary.IsNullIterator (Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Subprog_Sym)) then -- misplaced anno ErrorHandler.Semantic_Error (Err_Num => 335, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Anno_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); else -- duplicated anno ErrorHandler.Semantic_Error (Err_Num => 336, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Anno_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; end if; else -- annotation both present and required Wf_Subprogram_Annotation (Node => Anno_Node, Current_Scope => Scope, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen, The_Heap => TheHeap); end if; end if; Scope := Subprog_Scope; Next_Node := Spec_Node; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_body or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_body) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# Syntax_Node_Type (Subprog_Implem_Node, STree.Table) = SP_Symbols.subprogram_implementation and --# (Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.proper_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.protected_operation_item or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.generic_subprogram_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.main_program_declaration) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint) and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; -- Synthesise the dependency "all exports from all imports" if necessary. if Needs_Synthetic_Dependency (Proc_Task_Or_Entry => Subprog_Sym) and then Dictionary.IsProcedure (Subprog_Sym) then if Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.abody or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.protected_body or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.protected_operation_item or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.library_unit_body or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.library_unit then -- ASSUME Parent_Node (Current_Node => Main_Node) = abody OR protected_body OR -- protected_operation_item OR -- library_unit_body OR library_unit if First_Seen or else Requires_Second_Annotation (Subprog_Sym => Subprog_Sym) then Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Position (Node => Node), Subprog_Sym => Subprog_Sym, Abstraction => Which_Abstraction (First_Seen), The_Heap => TheHeap); end if; elsif Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) /= SP_Symbols.subunit then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Parent_Node (Current_Node => Main_Node) = subunit OR abody OR protected_body OR " & "protected_operation_item OR library_unit_body OR library_unit in Wf_Subprogram_Body"); end if; end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_body or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_body) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# Syntax_Node_Type (Subprog_Implem_Node, STree.Table) = SP_Symbols.subprogram_implementation and --# (Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.proper_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.protected_operation_item or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.generic_subprogram_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.main_program_declaration) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint) and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; -- ASSUME Constraint_Node = procedure_constraint OR function_constraint if Syntax_Node_Type (Node => Child_Node (Current_Node => Constraint_Node)) = SP_Symbols.precondition or else Syntax_Node_Type (Node => Child_Node (Current_Node => Constraint_Node)) = SP_Symbols.postcondition or else Syntax_Node_Type (Node => Child_Node (Current_Node => Constraint_Node)) = SP_Symbols.return_expression then -- ASSUME Child_Node (Current_Node => Constraint_Node) = precondition OR postcondition OR return_expression -- a constraint exists; should it? Check here if not (First_Seen or else Requires_Second_Annotation (Subprog_Sym => Subprog_Sym) or else Has_Parameter_Global_Or_Return_Of_Local_Private_Type (Subprog_Sym => Subprog_Sym)) or else Syntax_Node_Type (Node => Parent_Node (Current_Node => Main_Node)) = SP_Symbols.subunit then -- annotation not required -- two possible errors: misplaced anno or duplicate anno if Dictionary.HasPrecondition (Dictionary.IsAbstract, Subprog_Sym) or else Dictionary.HasPostcondition (Dictionary.IsAbstract, Subprog_Sym) then -- illegal duplicate anno ErrorHandler.Semantic_Error (Err_Num => 343, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Constraint_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); else -- misplaced anno ErrorHandler.Semantic_Error (Err_Num => 342, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Constraint_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; else -- annotation is required so continue Wf_Subprogram_Constraint (Node => Constraint_Node, Subprogram_Sym => Subprog_Sym, First_Seen => First_Seen, Component_Data => Component_Data, The_Heap => TheHeap); end if; elsif Child_Node (Current_Node => Constraint_Node) /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Constraint_Node) = precondition OR postcondition OR " & "return_expression OR NULL in Wf_Subprogram_Body"); end if; else Next_Node := STree.NullNode; end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_body or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.not_overriding_subprogram_body) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# Syntax_Node_Type (Subprog_Implem_Node, STree.Table) = SP_Symbols.subprogram_implementation and --# (Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.proper_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.protected_operation_item or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.generic_subprogram_body or --# Syntax_Node_Type (Main_Node, STree.Table) = SP_Symbols.main_program_declaration) and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; -- set up identifier for hidden part reporting End_Desig_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Subprog_Implem_Node)); -- ASSUME End_Desig_Node = designator OR hidden_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => End_Desig_Node) = SP_Symbols.designator or else Syntax_Node_Type (Node => End_Desig_Node) = SP_Symbols.hidden_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Desig_Node = designator OR hidden_part in Wf_Subprogram_Body"); case Hidden is when All_Hidden => ErrorHandler.Hidden_Text (Position => Node_Position (Node => End_Desig_Node), Unit_Str => Node_Lex_String (Node => Ident_Node), Unit_Typ => SP_Symbols.subprogram_implementation); when Handler_Hidden => ErrorHandler.Hidden_Handler (Position => Node_Position (Node => End_Desig_Node), Unit_Str => Node_Lex_String (Node => Ident_Node), Unit_Typ => SP_Symbols.subprogram_implementation); when Not_Hidden => null; end case; -- For SPARK 83 and 95: -- If a potentially inheritable subprogram of the same name exists then -- the new declaration is only legal if it successfully overrides it. -- This check is only required if the subprogram has not been previously declared -- because, if it has, the check will already have been done in the package spec -- For SPARK 2005: -- The check is required even if the subprogram has been previously declared -- as we need to verify that the overriding_indicator is correct. if First_Seen or else CommandLineData.Content.Language_Profile in CommandLineData.SPARK2005_Profiles then Check_No_Overloading_From_Tagged_Ops (Ident_Node => Ident_Node, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Dictionary.IsRefined, Is_Overriding => Is_Overriding); end if; if Dictionary.IsMainProgram (Subprog_Sym) and then Syntax_Node_Type (Node => Main_Node) /= SP_Symbols.generic_subprogram_body and then CommandLineData.Ravenscar_Selected then Shared_Variable_Check (Main_Program_Sym => Subprog_Sym, Scope => Scope, Error_Node_Pos => Node_Position (Node => Node)); Max_One_In_A_Queue_Check (Main_Program_Sym => Subprog_Sym, Scope => Scope, Error_Node_Pos => Node_Position (Node => Node)); end if; -- Check that function ends with a return statement; this check was previously done -- at up_wf_subprogram body where any error detected was too late to stop flow analysis -- or VC generation if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification then -- ASSUME Spec_Node = function_specification Check_Function_Has_Return (Subprog_Node => Subprog_Implem_Node, End_Desig_Node_Pos => Node_Position (Node => End_Desig_Node)); end if; end Wf_Subprogram_Body; spark-2012.0.deb/examiner/sem-wf_package_declaration-wf_package_specification.adb0000644000175000017500000007533711753202336027143 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Overview: -- Checks a Package Specification for Sem on down pass through -- TreeProcessor. Starts at node package_specification. May directly raise -- errors for: mismatch between initial and terminal identifier. -- Other errors may be raised indirectly by wf_package_annotation which -- are called from here. -------------------------------------------------------------------------------- with SLI; separate (Sem.Wf_Package_Declaration) procedure Wf_Package_Specification (Node : in STree.SyntaxNode; Ident_Str : in LexTokenManager.Lex_String; Pack_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) is Enclosing_Scope_Type : Enclosing_Scope_Types; End_Pos_Node, Anno_Node, Vis_Part_Node, Priv_Part_Node, Ident_Node, End_Name_Node : STree.SyntaxNode; Pack_Vis_Scope : Dictionary.Scopes; ------------------------------ procedure Get_Package_Specification_Key_Nodes (Node : in STree.SyntaxNode; Anno_Node : out STree.SyntaxNode; Ident_Node : out STree.SyntaxNode; Vis_Part_Node : out STree.SyntaxNode; Priv_Part_Node : out STree.SyntaxNode) --# global in STree.Table; --# derives Anno_Node, --# Ident_Node, --# Priv_Part_Node, --# Vis_Part_Node from Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_specification; --# post Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Vis_Part_Node, STree.Table) = SP_Symbols.visible_part_rep and --# (Syntax_Node_Type (Priv_Part_Node, STree.Table) = SP_Symbols.basic_declarative_item_rep or --# Syntax_Node_Type (Priv_Part_Node, STree.Table) = SP_Symbols.hidden_part or --# Priv_Part_Node = STree.NullNode); is begin Anno_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Anno_Node = package_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.package_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = package_annotation in Get_Package_Specification_Key_Nodes"); Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Ident_Node = dotted_simple_name OR identifier if Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Ident_Node = dotted_simple_name -- declaring a child package Ident_Node := Last_Child_Of (Start_Node => Ident_Node); elsif Syntax_Node_Type (Node => Ident_Node) /= SP_Symbols.identifier then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = dotted_simple_name OR identifier in Get_Package_Specification_Key_Nodes"); end if; -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Get_Package_Specification_Key_Nodes"); Vis_Part_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Anno_Node)); -- ASSUME Vis_Part_Node = visible_part_rep SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Vis_Part_Node) = SP_Symbols.visible_part_rep, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Vis_Part_Node = visible_part_rep in Get_Package_Specification_Key_Nodes"); Priv_Part_Node := Next_Sibling (Current_Node => Next_Sibling (Current_Node => Anno_Node)); -- ASSUME Priv_Part_Node = private_part OR dotted_simple_name if Syntax_Node_Type (Node => Priv_Part_Node) = SP_Symbols.private_part then -- ASSUME Private_Part = private_part Priv_Part_Node := Child_Node (Current_Node => Priv_Part_Node); elsif Syntax_Node_Type (Node => Priv_Part_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Priv_Part_Node = dotted_simple_name Priv_Part_Node := STree.NullNode; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Priv_Part_Node = private_part OR dotted_simple_name in Get_Package_Specification_Key_Nodes"); end if; -- ASSUME Priv_Part_Node = basic_declarative_item_rep OR hidden_part OR NULL SystemErrors.RT_Assert (C => Priv_Part_Node = STree.NullNode or else Syntax_Node_Type (Node => Priv_Part_Node) = SP_Symbols.basic_declarative_item_rep or else Syntax_Node_Type (Node => Priv_Part_Node) = SP_Symbols.hidden_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Private_Part = basic_declarative_item_rep OR hidden_part OR NULL " & "in Get_Package_Specification_Key_Nodes"); end Get_Package_Specification_Key_Nodes; ------------------------------ procedure Wf_Anno (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Scope_Type : in Enclosing_Scope_Types; Scope : in Dictionary.Scopes; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# Scope, --# Scope_Type, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.package_annotation; --# post STree.Table = STree.Table~; is separate; ------------------------------ procedure Wf_Visible (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.visible_part_rep; --# post STree.Table = STree.Table~; is separate; ------------------------------ procedure Check_Deferred_Items (Error_Pos : in LexTokenManager.Token_Position; Pack_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Error_Pos, --# LexTokenManager.State, --# Pack_Sym, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; Sym : Dictionary.Symbol; begin It := Dictionary.First_Deferred_Constant (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (It) loop Sym := Dictionary.CurrentSymbol (It); if not Dictionary.Is_Declared (Item => Sym) then ErrorHandler.Semantic_Error (Err_Num => 26, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end if; It := Dictionary.NextSymbol (It); end loop; It := Dictionary.First_Private_Type (The_Package => Pack_Sym); while not Dictionary.IsNullIterator (It) loop Sym := Dictionary.CurrentSymbol (It); if not Dictionary.Is_Declared (Item => Sym) then ErrorHandler.Semantic_Error (Err_Num => 27, Reference => ErrorHandler.No_Reference, Position => Error_Pos, Id_Str => Dictionary.GetSimpleName (Sym)); end if; It := Dictionary.NextSymbol (It); end loop; end Check_Deferred_Items; ------------------------------ procedure Check_Types_Can_Be_Used (Pack_Sym : in Dictionary.Symbol; Err_Node_Pos : in LexTokenManager.Token_Position) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Err_Node_Pos, --# LexTokenManager.State, --# Pack_Sym, --# SPARK_IO.File_Sys; is separate; ------------------------------ procedure Check_Modes (Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Pack_Sym, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.visible_part_rep; is separate; ------------------------------ procedure Wf_Private (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# derives Aggregate_Stack.State, --# Component_Data, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# The_Heap from *, --# CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Component_Data, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Current_Scope, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.basic_declarative_item_rep; --# post STree.Table = STree.Table~; is separate; ------------------------------ procedure Check_Body_Required_By_Spark (Node_Pos : in LexTokenManager.Token_Position; Pack_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# SPARK_IO.File_Sys; is It : Dictionary.Iterator; begin It := Dictionary.FirstInitializedOwnVariable (Pack_Sym); while not Dictionary.IsNullIterator (It) loop if Dictionary.Is_Declared (Item => Dictionary.CurrentSymbol (It)) and then not Dictionary.VariableIsInitialized (Dictionary.CurrentSymbol (It)) then case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Warning (Err_Num => 407, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); when CommandLineData.SPARK95_Onwards => ErrorHandler.Semantic_Error (Err_Num => 607, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end case; exit; end if; It := Dictionary.NextSymbol (It); end loop; end Check_Body_Required_By_Spark; ------------------------------ procedure Check_State_Can_Be_Initialized (Pack_Sym : in Dictionary.Symbol; Anno_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Anno_Node, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pack_Sym, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation; is separate; begin -- Wf_Package_Specification Get_Package_Specification_Key_Nodes (Node => Node, Anno_Node => Anno_Node, Ident_Node => Ident_Node, Vis_Part_Node => Vis_Part_Node, Priv_Part_Node => Priv_Part_Node); -- tells us where package is being declared Find_Enclosing_Scope_Type (Scope => Current_Scope, Enclosing_Scope_Type => Enclosing_Scope_Type); Pack_Vis_Scope := Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Pack_Sym); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Vis_Part_Node, STree.Table) = SP_Symbols.visible_part_rep; Wf_Anno (Node => Anno_Node, Pack_Sym => Pack_Sym, Scope_Type => Enclosing_Scope_Type, Scope => Pack_Vis_Scope, The_Heap => The_Heap); Wf_Visible (Node => Vis_Part_Node, Pack_Sym => Pack_Sym, Current_Scope => Pack_Vis_Scope, Component_Data => Component_Data, The_Heap => The_Heap); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Syntax_Node_Type (Vis_Part_Node, STree.Table) = SP_Symbols.visible_part_rep; -- ASSUME Priv_Part_Node = basic_declarative_item_rep OR hidden_part OR NULL if Priv_Part_Node = STree.NullNode then -- ASSUME Priv_Part_Node = NULL -- no private part End_Name_Node := Last_Sibling_Of (Start_Node => Anno_Node); -- ASSUME End_Name_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => End_Name_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Name_Node = dotted_simple_name in Wf_Package_Specification"); Check_Closing_Identifier (End_Name_Node => End_Name_Node, Ident_Node => Ident_Node); Check_Deferred_Items (Error_Pos => Node_Position (Node => Child_Node (Current_Node => End_Name_Node)), Pack_Sym => Pack_Sym); Check_Types_Can_Be_Used (Pack_Sym => Pack_Sym, Err_Node_Pos => Node_Position (Node => End_Name_Node)); elsif Syntax_Node_Type (Node => Priv_Part_Node) = SP_Symbols.hidden_part then -- ASSUME Priv_Part_Node = hidden_part Dictionary.AddPrivatePart (Pack_Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Priv_Part_Node), End_Position => Node_Position (Node => Priv_Part_Node)), True); ErrorHandler.Hidden_Text (Position => Node_Position (Node => Priv_Part_Node), Unit_Str => Ident_Str, Unit_Typ => SP_Symbols.private_part); Check_Types_Can_Be_Used (Pack_Sym => Pack_Sym, Err_Node_Pos => Node_Position (Node => Priv_Part_Node)); elsif Syntax_Node_Type (Node => Priv_Part_Node) = SP_Symbols.basic_declarative_item_rep then -- ASSUME Priv_Part_Node = basic_declarative_item_rep Dictionary.AddPrivatePart (Pack_Sym, Dictionary.Location'(Start_Position => Node_Position (Node => Priv_Part_Node), End_Position => Node_Position (Node => Priv_Part_Node)), False); Wf_Private (Node => Priv_Part_Node, Current_Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Privat, The_Unit => Pack_Sym), Component_Data => Component_Data, The_Heap => The_Heap); -- check that private types resolved into scalars have not -- invalidated parameter declarations of exported procedures if CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 then -- guarded because '83 rules different from '95 onwards Check_Modes (Node => Vis_Part_Node, Pack_Sym => Pack_Sym, The_Heap => The_Heap); end if; End_Name_Node := Next_Sibling (Current_Node => Priv_Part_Node); -- ASSUME End_Name_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => End_Name_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Name_Node = dotted_simple_name in Wf_Package_Specification"); Check_Closing_Identifier (End_Name_Node => End_Name_Node, Ident_Node => Ident_Node); Check_Deferred_Items (Error_Pos => Node_Position (Node => Child_Node (Current_Node => End_Name_Node)), Pack_Sym => Pack_Sym); Check_Types_Can_Be_Used (Pack_Sym => Pack_Sym, Err_Node_Pos => Node_Position (Node => End_Name_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Private_Part = basic_declarative_item_rep OR hidden_part OR NULL in Wf_Package_Specification"); end if; -- check for cases where package requires a body --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.package_annotation; if Enclosing_Scope_Type = In_Library and then not Dictionary.PackageRequiresBody (Pack_Sym) then End_Pos_Node := Last_Sibling_Of (Start_Node => Anno_Node); -- ASSUME End_Pos_Node = private_part OR dotted_simple_name if Syntax_Node_Type (Node => End_Pos_Node) = SP_Symbols.private_part then -- ASSUME End_Pos_Node = private_part -- declaring a library package for which Ada rules do not demand a body so -- checks must be made to see if Spark rules require a body End_Pos_Node := Child_Node (Current_Node => End_Pos_Node); -- ASSUME End_Pos_Node = basic_declarative_item_rep OR hidden_part if Syntax_Node_Type (Node => End_Pos_Node) = SP_Symbols.basic_declarative_item_rep then End_Pos_Node := Next_Sibling (Current_Node => End_Pos_Node); elsif Syntax_Node_Type (Node => End_Pos_Node) /= SP_Symbols.hidden_part then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Pos_Node = basic_declarative_item_rep OR hidden_part in Wf_Package_Specification"); end if; elsif Syntax_Node_Type (Node => End_Pos_Node) /= SP_Symbols.dotted_simple_name then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect End_Pos_Node = private_part OR dotted_simple_name in Wf_Package_Specification"); end if; Check_Body_Required_By_Spark (Node_Pos => Node_Position (Node => End_Pos_Node), Pack_Sym => Pack_Sym); end if; Check_State_Can_Be_Initialized (Pack_Sym => Pack_Sym, Anno_Node => Anno_Node); Check_Announced_Types_Declared (Pack_Sym => Pack_Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Pack_Sym), Node_Pos => Node_Position (Node => Last_Sibling_Of (Start_Node => Anno_Node))); -- The cross-references for the own variables are generated -- after the full semantic analysis of the package -- specification because we need to know if an own variable is -- actually an abstract own variable or a visble concrete own -- variable. If it is an abstract own variable, the own -- variable is considered as a declaration, if it is a visible -- concrete own variable, the own variable is a usage of the -- visible concrete variable that will be declared later in the -- same package specification. If the declaration of the -- concrete variable only appears in the package body, the own -- variable is considered as an abstract declaration. if ErrorHandler.Generate_SLI and then Child_Node (Current_Node => Anno_Node) /= STree.NullNode then SLI.Generate_Xref_Own (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Child_Node (Current_Node => Anno_Node), Scope => Pack_Vis_Scope); end if; end Wf_Package_Specification; spark-2012.0.deb/examiner/simplelists.adb0000644000175000017500000000524211753202336017274 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- package to provide simple, unordered list of naturals, duplicates permitted - -------------------------------------------------------------------------------- package body SimpleLists is procedure Init (List : out SimpleList) is begin --# accept F, 32, List.ListArray, "Initialization partial but effective" & --# F, 31, List.ListArray, "Initialization partial but effective" & --# F, 602, List, List.ListArray, "Initialization partial but effective"; List.HighestOccupied := 0; end Init; -------------------------------------------------------------------- procedure AddItem (Item : in Natural; List : in out SimpleList; Ok : out Boolean) is begin if List.HighestOccupied = ExaminerConstants.SimpleListsSize then Ok := False; else Ok := True; List.HighestOccupied := List.HighestOccupied + 1; List.ListArray (List.HighestOccupied) := Item; end if; end AddItem; -------------------------------------------------------------------- function NumberOfItems (List : SimpleList) return Natural is begin return List.HighestOccupied; end NumberOfItems; -------------------------------------------------------------------- procedure GetItem (List : in SimpleList; Position : in Positive; Item : out Natural; Ok : out Boolean) is begin if Position > List.HighestOccupied then Ok := False; Item := 0; else Ok := True; Item := List.ListArray (Position); end if; end GetItem; end SimpleLists; spark-2012.0.deb/examiner/sem-wf_pragma-wf_elaborate_body.adb0000644000175000017500000002035011753202336023113 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- Synopsis -- This procedure checks the validity of a pragma elaborate_body for SPARK 95 -------------------------------------------------------------------------------- separate (Sem.Wf_Pragma) procedure Wf_Elaborate_Body (Pragma_Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol) is Id_Node : STree.SyntaxNode; Exp_Node : STree.SyntaxNode; procedure Check_Represent_Same_Name (Exp_Node : in STree.SyntaxNode; Pack_Sym : in Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exp_Node, --# LexTokenManager.State, --# Pack_Sym, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table from *, --# Dictionary.Dict, --# Exp_Node, --# LexTokenManager.State, --# Pack_Sym; --# pre Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.ADA_expression; --# post STree.Table = STree.Table~; is Is_Chain : Boolean; Id_Node, Next_Node : STree.SyntaxNode; Name : LexTokenManager.Lex_String; begin Name := Dictionary.GetSimpleName (Item => Pack_Sym); Id_Node := Exp_Node; loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.ADA_expression; Is_Chain := Next_Sibling (Current_Node => Id_Node) = STree.NullNode; Next_Node := Child_Node (Current_Node => Id_Node); exit when not Is_Chain or else Next_Node = STree.NullNode; Id_Node := Next_Node; end loop; if Is_Chain and then Syntax_Node_Type (Node => Id_Node) = SP_Symbols.identifier and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => Name) = LexTokenManager.Str_Eq then STree.Set_Node_Lex_String (Sym => Pack_Sym, Node => Id_Node); else ErrorHandler.Semantic_Error (Err_Num => 606, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => Name); end if; end Check_Represent_Same_Name; begin -- Wf_Elaborate_Body if Dictionary.IsPackage (Pack_Sym) and then Dictionary.GetScope (Pack_Sym) = Dictionary.GlobalScope then -- legal, library-level package Id_Node := Child_Node (Current_Node => Pragma_Node); -- ASSUME Id_Node = identifier OR assert_pragma if Syntax_Node_Type (Node => Id_Node) = SP_Symbols.identifier then -- ASSUME Id_Node = identifier Dictionary.SetPackageElaborateBodyFound (Pack_Sym); Exp_Node := Next_Sibling (Current_Node => Id_Node); -- ASSUME Exp_Node = argument_association_rep OR NULL if Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.argument_association_rep then -- ASSUME Exp_Node = argument_association_rep Exp_Node := Child_Node (Current_Node => Exp_Node); -- ASSUME Exp_Node = argument_association_rep OR argument_association if Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.argument_association then -- ASSUME Exp_Node = argument_association Exp_Node := Child_Node (Current_Node => Exp_Node); -- ASSUME Exp_Node = identifier OR ADA_expression if Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.identifier then -- ASSUME Exp_Node = identifier -- wrong number of arguments ErrorHandler.Semantic_Error (Err_Num => 605, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.ADA_expression then -- ASSUME Exp_Node = ADA_expression Check_Represent_Same_Name (Exp_Node => Exp_Node, Pack_Sym => Pack_Sym); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = identifier OR ADA_expression in Wf_Elaborate_Body"); end if; elsif Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.argument_association_rep then -- ASSUME Exp_Node = argument_association_rep -- wrong number of arguments ErrorHandler.Semantic_Error (Err_Num => 605, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = argument_association_rep OR argument_association in Wf_Elaborate_Body"); end if; elsif Exp_Node = STree.NullNode then -- ASSUME Exp_Node = NULL -- wrong number of arguments ErrorHandler.Semantic_Error (Err_Num => 605, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = argument_association_rep OR NULL in Wf_Elaborate_Body"); end if; elsif Syntax_Node_Type (Node => Id_Node) = SP_Symbols.assert_pragma then -- ASSUME Id_Node = assert_pragma -- wrong number of arguments ErrorHandler.Semantic_Error (Err_Num => 605, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Id_Node), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Id_Node = identifier OR assert_pragma in Wf_Elaborate_Body"); end if; else -- not a library level package ErrorHandler.Semantic_Error (Err_Num => 72, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Elaborate_Body_Token); end if; end Wf_Elaborate_Body; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_record_component_selector_name.adb0000644000175000017500000001673311753202336030203 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Record_Component_Selector_Name (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) is Name_Exp, Field_Info : Sem.Exp_Record; Field_Ident : LexTokenManager.Lex_String; Field_Symbol : Dictionary.Symbol; Already_Present : Boolean; Ident_Node : STree.SyntaxNode; -------------------------------------------- procedure Check_Valid_Field (Aggregate_Type : in Dictionary.Symbol; Ancestor_Type : in Dictionary.Symbol; Ident_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Field_Symbol : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Aggregate_Type, --# Ancestor_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Field_Symbol from Aggregate_Type, --# Ancestor_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Scope, --# STree.Table & --# STree.Table from *, --# Aggregate_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Scope; --# pre STree.Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Aggregate_Field_Sym, Ancestor_Field_Sym : Dictionary.Symbol; Field_Str : LexTokenManager.Lex_String; begin Field_Str := STree.Node_Lex_String (Node => Ident_Node); Aggregate_Field_Sym := Dictionary.LookupSelectedItem (Prefix => Aggregate_Type, Selector => Field_Str, Scope => Scope, Context => Dictionary.ProgramContext); if Dictionary.Is_Null_Symbol (Aggregate_Field_Sym) then -- no such field ErrorHandler.Semantic_Error (Err_Num => 8, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Field_Str); Field_Symbol := Dictionary.NullSymbol; else STree.Set_Node_Lex_String (Sym => Aggregate_Field_Sym, Node => Ident_Node); -- field found, but we need to check that it is not in the ancestor part Ancestor_Field_Sym := Dictionary.LookupSelectedItem (Prefix => Ancestor_Type, Selector => Field_Str, Scope => Scope, Context => Dictionary.ProgramContext); if Dictionary.Is_Null_Symbol (Ancestor_Field_Sym) then -- not in ancestor part Field_Symbol := Aggregate_Field_Sym; else -- it is in the ancestor part ErrorHandler.Semantic_Error (Err_Num => 865, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Field_Str); Field_Symbol := Dictionary.NullSymbol; end if; end if; end Check_Valid_Field; begin -- Wf_Record_Component_Selector_Name Exp_Stack.Pop (Item => Name_Exp, Stack => E_Stack); Ident_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = identifier in Wf_Record_Component_Selector_Name"); Field_Ident := STree.Node_Lex_String (Node => Ident_Node); Check_Valid_Field (Aggregate_Type => Name_Exp.Type_Symbol, Ancestor_Type => Name_Exp.Other_Symbol, Ident_Node => Ident_Node, Scope => Scope, Field_Symbol => Field_Symbol); if Dictionary.Is_Null_Symbol (Field_Symbol) then -- look up failed so push a null record as a placeholder Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Exp_Stack.Push (X => Null_Parameter_Record, Stack => E_Stack); else -- valid field name Add_Name (Name => Field_Ident, List => Name_Exp.Param_List, Heap_Param => Heap_Param, Present => Already_Present); if Already_Present then Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Exp_Stack.Push (X => Null_Parameter_Record, Stack => E_Stack); ErrorHandler.Semantic_Error (Err_Num => 103, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Field_Ident); else -- no value thus far assigned Field_Info := Null_Parameter_Record; Field_Info.Other_Symbol := Field_Symbol; Exp_Stack.Push (X => Name_Exp, Stack => E_Stack); Exp_Stack.Push (X => Field_Info, Stack => E_Stack); end if; end if; end Wf_Record_Component_Selector_Name; spark-2012.0.deb/examiner/flowanalyser-flowanalyse-analyserelations-checkexpressions.adb0000644000175000017500000004207311753202336030710 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (FlowAnalyser.FlowAnalyse.AnalyseRelations) procedure CheckExpressions is Expn, ZeroStableExpn, OneStableExpn, OtherStableExpn, Variable : SeqAlgebra.MemberOfSeq; ExpnNmbr, VarNmbr : Natural; VarSym : Dictionary.Symbol; Intersecn, LambdaCol, MuRow, ThetaCol, ThetaTildeCol : SeqAlgebra.Seq; procedure AddRecordComponentError (ErrClass : in ComponentErrors.ErrorClass; ErrVal : in Natural; Position : in LexTokenManager.Token_Position; Sym : in Dictionary.Symbol) --# global in ComponentData; --# in out Statistics.TableUsage; --# in out TheErrorHeap; --# in out TheHeap; --# derives Statistics.TableUsage, --# TheHeap from *, --# ComponentData, --# ErrClass, --# ErrVal, --# Position, --# Sym, --# TheErrorHeap, --# TheHeap & --# TheErrorHeap from *, --# ErrClass, --# ErrVal, --# Position, --# TheHeap; is NewError : Natural; begin ComponentErrors.CreateError (TheErrorHeap, TheHeap, ErrClass, ErrVal, Position, Dictionary.NullSymbol, --to get NewError); ComponentManager.AddError (TheHeap, TheErrorHeap, ComponentData, ComponentManager.GetComponentNode (ComponentData, Sym), NewError); end AddRecordComponentError; begin -- CheckExpressions Expn := SeqAlgebra.FirstMember (TheHeap, IFA_Stack.Top (S).SeqOfExpns); ZeroStableExpn := SeqAlgebra.FirstMember (TheHeap, ZeroStableExpnSeq); OneStableExpn := SeqAlgebra.FirstMember (TheHeap, OneStableExpnSeq); OtherStableExpn := SeqAlgebra.FirstMember (TheHeap, OtherStableExpnSeq); while not SeqAlgebra.IsNullMember (Expn) loop --# assert True; ExpnNmbr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => Expn); if KindDictionary (ExpnNmbr) /= ControlVarAssignment and then KindDictionary (ExpnNmbr) /= ModellingStmt then RelationAlgebra.ColExtraction (TheHeap, IFA_Stack.Top (S).Theta, ExpnNmbr, ThetaCol); RelationAlgebra.ColExtraction (TheHeap, IFA_Stack.Top (S).ThetaTilde, ExpnNmbr, ThetaTildeCol); SeqAlgebra.Reduction (TheHeap, ThetaCol, SeqOfInitVars); SeqAlgebra.Reduction (TheHeap, ThetaCol, ExpSeqOfImports); Variable := SeqAlgebra.FirstMember (TheHeap, ThetaCol); while not SeqAlgebra.IsNullMember (Variable) loop DataFlowErrorFoundLocal := True; -- signal presence of data flow error to caller --# assert True; VarNmbr := SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => Variable); VarSym := Dictionary.ConvertSymbolRef (ExaminerConstants.RefType (VarNmbr)); if SeqAlgebra.IsMember (TheHeap, ThetaTildeCol, VarNmbr) then --# accept Flow, 41, "Expected stable expression"; case KindDictionary (ExpnNmbr) is when ComplexAssignment | FieldUpdateByProc => --# end accept; if Dictionary.IsSubcomponent (VarSym) then AddRecordComponentError (ComponentErrors.DataFlow, ErrorHandler.Data_Flow_Err_Type'Pos (ErrorHandler.Stmt_Undefined), STree.Node_Position (Node => StmtLocations (ExpnNmbr)), VarSym); else ErrorHandler.Data_Flow_Error (Err_Type => ErrorHandler.Stmt_Undefined, Position => STree.Node_Position (Node => StmtLocations (ExpnNmbr)), Var_Sym => VarSym, Scope => Scope); end if; when others => if Dictionary.IsSubcomponent (VarSym) then AddRecordComponentError (ComponentErrors.DataFlow, ErrorHandler.Data_Flow_Err_Type'Pos (ErrorHandler.Expn_Undefined), STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), VarSym); else ErrorHandler.Data_Flow_Error (Err_Type => ErrorHandler.Expn_Undefined, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Var_Sym => VarSym, Scope => Scope); end if; end case; else --# accept Flow, 41, "Expected stable expression"; case KindDictionary (ExpnNmbr) is when ComplexAssignment | FieldUpdateByProc => --# end accept; if Dictionary.IsSubcomponent (VarSym) then AddRecordComponentError (ComponentErrors.DataFlow, ErrorHandler.Data_Flow_Err_Type'Pos (ErrorHandler.Stmt_May_Be_Undefined), STree.Node_Position (Node => StmtLocations (ExpnNmbr)), VarSym); else ErrorHandler.Data_Flow_Error (Err_Type => ErrorHandler.Stmt_May_Be_Undefined, Position => STree.Node_Position (Node => StmtLocations (ExpnNmbr)), Var_Sym => VarSym, Scope => Scope); end if; when others => if Dictionary.IsSubcomponent (VarSym) then AddRecordComponentError (ComponentErrors.DataFlow, ErrorHandler.Data_Flow_Err_Type'Pos (ErrorHandler.Expn_May_Be_Undefined), STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), VarSym); else ErrorHandler.Data_Flow_Error (Err_Type => ErrorHandler.Expn_May_Be_Undefined, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Var_Sym => VarSym, Scope => Scope); end if; end case; end if; Variable := SeqAlgebra.NextMember (TheHeap, Variable); end loop; SeqAlgebra.DisposeOfSeq (TheHeap, ThetaCol); SeqAlgebra.DisposeOfSeq (TheHeap, ThetaTildeCol); --# assert True; RelationAlgebra.RowExtraction (TheHeap, IFA_Stack.Top (S).Mu, ExpnNmbr, MuRow); SeqAlgebra.Intersection (TheHeap, MuRow, ExpSeqOfExports, Intersecn); if SeqAlgebra.IsEmptySeq (TheHeap, Intersecn) then VarSym := ParamDictionary (ExpnNmbr); if not Dictionary.Is_Null_Variable (VarSym) then -- don't report case KindDictionary (ExpnNmbr) is when ComplexAssignment | FieldUpdateByProc => if Dictionary.IsSubcomponent (VarSym) then if KindDictionary (ExpnNmbr) = ComplexAssignment then AddRecordComponentError (ComponentErrors.IneffectiveStmt, 0, STree.Node_Position (Node => StmtLocations (ExpnNmbr)), VarSym); else --must be a record field directly assigned by proc call AddRecordComponentError (ComponentErrors.IneffectiveFieldAssignment, 0, STree.Node_Position (Node => StmtLocations (ExpnNmbr)), VarSym); end if; else ErrorHandler.Ineffective_Stmt (STree.Node_Position (Node => StmtLocations (ExpnNmbr)), ParamDictionary (ExpnNmbr), Scope); end if; when Initialization => --# accept Flow, 41, "Expected stable expression"; if Dictionary.Is_Subprogram (SubprogSym) then --# end accept; if Dictionary.IsSubcomponent (VarSym) then AddRecordComponentError (ComponentErrors.Dependency, ErrorHandler.Dependency_Err_Type'Pos (ErrorHandler.Ineff_Local_Init), STree.Node_Position (Node => STree.RefToNode (Dictionary.GetVariableExpNode (ComponentManager.GetName (ComponentData, ComponentManager.GetRoot (ComponentData, ComponentManager.GetComponentNode (ComponentData, VarSym)))))), VarSym); else ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Ineff_Local_Init, Position => STree.Node_Position (Node => STree.RefToNode (Dictionary.GetVariableExpNode (ParamDictionary (ExpnNmbr)))), Import_Var_Sym => VarSym, Export_Var_Sym => Dictionary.NullSymbol, Scope => Scope); end if; else --must be package if Dictionary.IsSubcomponent (VarSym) then AddRecordComponentError (ComponentErrors.Dependency, ErrorHandler.Dependency_Err_Type'Pos (ErrorHandler.Ineff_Local_Init), EndPosition, VarSym); else ErrorHandler.Dependency_Error (Err_Type => ErrorHandler.Ineff_Local_Init, Position => EndPosition, Import_Var_Sym => VarSym, Export_Var_Sym => Dictionary.NullSymbol, Scope => Scope); end if; end if; when others => ErrorHandler.Ineffective_Stmt (Position => STree.Node_Position (Node => StmtLocations (ExpnNmbr)), Var_Sym => Dictionary.NullSymbol, Scope => Scope); end case; end if; -- null variable end if; SeqAlgebra.DisposeOfSeq (TheHeap, Intersecn); SeqAlgebra.DisposeOfSeq (TheHeap, MuRow); --# assert True; if (KindDictionary (ExpnNmbr) = ForkExpn) and not SeqAlgebra.IsMember (TheHeap, InnerExpns, ExpnNmbr) then RelationAlgebra.ColExtraction (TheHeap, IFA_Stack.Top (S).Lambda, ExpnNmbr, LambdaCol); SeqAlgebra.Intersection (TheHeap, LambdaCol, ExpSeqOfImports, Intersecn); if SeqAlgebra.IsEmptySeq (TheHeap, Intersecn) then ErrorHandler.Data_Flow_Error (Err_Type => ErrorHandler.Invariant_Exp, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Var_Sym => Dictionary.NullSymbol, Scope => Scope); end if; SeqAlgebra.DisposeOfSeq (TheHeap, LambdaCol); SeqAlgebra.DisposeOfSeq (TheHeap, Intersecn); end if; --# assert True; if not SeqAlgebra.IsNullMember (ZeroStableExpn) then if SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => ZeroStableExpn) = ExpnNmbr then if (KindDictionary (ExpnNmbr) = ExitExpn) then ErrorHandler.Stability_Error (Err_Type => ErrorHandler.Stable_Exit_Cond, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Stability_Index => ErrorHandler.Index_Zero); elsif (KindDictionary (ExpnNmbr) = ForkExpn) then ErrorHandler.Stability_Error (Err_Type => ErrorHandler.Stable_Fork_Cond, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Stability_Index => ErrorHandler.Index_Zero); end if; -- 898 -- we also have a kind of stable exit in the case of a DefaultExitExpression -- inserted into infinite loops at the "End_of_loop" node so as to provide -- a syntactic exit point for the benefit of the flow analyser. Since a -- DefaultExitExpression is neither an ExitExpn or a ForkExpn, it is ignored -- by the preceding if statement and no error is reported. ZeroStableExpn := SeqAlgebra.NextMember (TheHeap, ZeroStableExpn); end if; end if; --# assert True; if not SeqAlgebra.IsNullMember (OneStableExpn) then if SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => OneStableExpn) = ExpnNmbr then if (KindDictionary (ExpnNmbr) = ExitExpn) then ErrorHandler.Stability_Error (Err_Type => ErrorHandler.Stable_Exit_Cond, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Stability_Index => ErrorHandler.Index_One); elsif (KindDictionary (ExpnNmbr) = ForkExpn) then ErrorHandler.Stability_Error (Err_Type => ErrorHandler.Stable_Fork_Cond, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Stability_Index => ErrorHandler.Index_One); end if; OneStableExpn := SeqAlgebra.NextMember (TheHeap, OneStableExpn); end if; end if; --# assert True; if not SeqAlgebra.IsNullMember (OtherStableExpn) then if SeqAlgebra.Value_Of_Member (The_Heap => TheHeap, M => OtherStableExpn) = ExpnNmbr then if (KindDictionary (ExpnNmbr) = ExitExpn) then ErrorHandler.Stability_Error (Err_Type => ErrorHandler.Stable_Exit_Cond, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Stability_Index => ErrorHandler.Larger_Index); elsif (KindDictionary (ExpnNmbr) = ForkExpn) then ErrorHandler.Stability_Error (Err_Type => ErrorHandler.Stable_Fork_Cond, Position => STree.Node_Position (Node => ExpnLocations (ExpnNmbr)), Stability_Index => ErrorHandler.Larger_Index); end if; OtherStableExpn := SeqAlgebra.NextMember (TheHeap, OtherStableExpn); end if; end if; end if; Expn := SeqAlgebra.NextMember (TheHeap, Expn); end loop; end CheckExpressions; spark-2012.0.deb/examiner/sem-wf_external_interface.adb0000644000175000017500000006243711753202336022055 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --Synopsis --This procedure checks the validity of a pragma interface (Ada83) or pragma --import (Ada95). The checks made are: -- 1. Internal consistency of associations used, number of parameters etc. -- 2. The Entity/Subprogram name is that expected -------------------------------------------------------------------------------- separate (Sem) procedure Wf_External_Interface (Pragma_Node : in STree.SyntaxNode; Entity_Sym : in Dictionary.Symbol; Error_Found : out Boolean) is procedure Check_Represent_Same_Name (Exp_Node : in STree.SyntaxNode; Entity_Sym : in Dictionary.Symbol; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Entity_Sym, --# ErrorHandler.Error_Context, --# Exp_Node, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found, --# STree.Table from *, --# Dictionary.Dict, --# Entity_Sym, --# Exp_Node, --# LexTokenManager.State, --# STree.Table; --# pre Syntax_Node_Type (Exp_Node, STree.Table) = SP_Symbols.ADA_expression; --# post STree.Table = STree.Table~; is Is_Chain : Boolean; Id_Node, Next_Node : STree.SyntaxNode; Name : LexTokenManager.Lex_String; begin Name := Dictionary.GetSimpleName (Item => Entity_Sym); Id_Node := Exp_Node; loop --# assert STree.Table = STree.Table~; Is_Chain := Next_Sibling (Current_Node => Id_Node) = STree.NullNode; Next_Node := Child_Node (Current_Node => Id_Node); exit when not Is_Chain or else Next_Node = STree.NullNode; Id_Node := Next_Node; end loop; if Is_Chain and then Syntax_Node_Type (Node => Id_Node) = SP_Symbols.identifier and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Id_Node), Lex_Str2 => Name) = LexTokenManager.Str_Eq then -- ASSUME Id_Node = identifier STree.Set_Node_Lex_String (Sym => Entity_Sym, Node => Id_Node); else Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 71, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => Name); end if; end Check_Represent_Same_Name; ------------------------------------------------------------------ procedure Wf_Pragma_Interface (Pragma_Node : in STree.SyntaxNode; Entity_Sym : in Dictionary.Symbol; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Entity_Sym, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pragma_Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found, --# STree.Table from *, --# Dictionary.Dict, --# Entity_Sym, --# LexTokenManager.State, --# Pragma_Node, --# STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is Arg_Assoc_Rep_Node : STree.SyntaxNode; Subprog_Name_Node : STree.SyntaxNode; begin Arg_Assoc_Rep_Node := Child_Node (Current_Node => Pragma_Node); -- ASSUME Arg_Assoc_Rep_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Assoc_Rep_Node = identifier in Wf_Pragma_Interface"); Arg_Assoc_Rep_Node := Next_Sibling (Current_Node => Arg_Assoc_Rep_Node); -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep OR NULL if Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.argument_association_rep then -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep Arg_Assoc_Rep_Node := Child_Node (Current_Node => Arg_Assoc_Rep_Node); -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep OR argument_association if Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.argument_association_rep then -- ASSUME Arg_Assoc_Rep_Node = argument_association_rep if Syntax_Node_Type (Node => Child_Node (Current_Node => Arg_Assoc_Rep_Node)) = SP_Symbols.argument_association then -- ASSUME Child_Node (Current_Node => Arg_Assoc_Rep_Node) = argument_association -- pragma has 2 arguments Subprog_Name_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Arg_Assoc_Rep_Node)); -- ASSUME Subprog_Name_Node = identifier OR ADA_expression if Syntax_Node_Type (Node => Subprog_Name_Node) = SP_Symbols.identifier then -- ASSUME Subprog_Name_Node = identifier -- form of expression wrong Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 71, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Subprog_Name_Node), Id_Str => Dictionary.GetSimpleName (Item => Entity_Sym)); elsif Syntax_Node_Type (Node => Subprog_Name_Node) = SP_Symbols.ADA_expression then -- ASSUME Subprog_Name_Node = ADA_expression -- form of expression ok so check name actually matches Check_Represent_Same_Name (Exp_Node => Subprog_Name_Node, Entity_Sym => Entity_Sym, Error_Found => Error_Found); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Name_Node = identifier OR ADA_expression in Wf_Pragma_Interface"); end if; elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Arg_Assoc_Rep_Node)) = SP_Symbols.argument_association_rep then -- ASSUME Child_Node (Current_Node => Arg_Assoc_Rep_Node) = argument_association_rep -- pragma does have more than 2 arguments Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 69, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Interface_Token); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Arg_Assoc_Rep_Node = argument_association_rep OR " & "argument_association in Wf_Pragma_Interface"); end if; elsif Syntax_Node_Type (Node => Arg_Assoc_Rep_Node) = SP_Symbols.argument_association then -- ASSUME Arg_Assoc_Rep_Node = argument_association -- pragma does have 1 argument Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 69, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Interface_Token); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Assoc_Rep_Node = argument_association_rep OR argument_association in Wf_Pragma_Interface"); end if; elsif Arg_Assoc_Rep_Node = STree.NullNode then -- ASSUME Arg_Assoc_Rep_Node = NULL -- pragma does have 0 argument Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 69, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Interface_Token); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Assoc_Rep_Node = argument_association_rep OR NULL in Wf_Pragma_Interface"); end if; end Wf_Pragma_Interface; ---------------------- procedure Wf_Pragma_Import (Pragma_Node : in STree.SyntaxNode; Entity_Sym : in Dictionary.Symbol; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# Entity_Sym, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pragma_Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Error_Found, --# STree.Table from *, --# Dictionary.Dict, --# Entity_Sym, --# LexTokenManager.State, --# Pragma_Node, --# STree.Table; --# pre Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma; --# post STree.Table = STree.Table~; is Max_Args : constant Natural := 4; type Args is (Illegal, Convention, Entity, External_Name, Link_Name); subtype Legal_Args is Args range Convention .. Link_Name; type Founds is array (Legal_Args) of Boolean; subtype Arg_Count_T is Natural range 0 .. Max_Args; Found : Founds := Founds'(Legal_Args => False); Using_Named_Association : Boolean := False; Arg_Ass_Node : STree.SyntaxNode; Arg_Count : Arg_Count_T := 0; procedure Check_Argument (Node : in STree.SyntaxNode; Entity_Sym : in Dictionary.Symbol; Arg_Count : in Arg_Count_T; Using_Named_Association : in out Boolean; Found : in out Founds; Error_Found : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Arg_Count, --# CommandLineData.Content, --# Dictionary.Dict, --# Entity_Sym, --# ErrorHandler.Error_Context, --# Found, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Using_Named_Association & --# Error_Found, --# STree.Table from *, --# Arg_Count, --# Dictionary.Dict, --# Entity_Sym, --# Found, --# LexTokenManager.State, --# Node, --# STree.Table, --# Using_Named_Association & --# Found from *, --# Arg_Count, --# LexTokenManager.State, --# Node, --# STree.Table, --# Using_Named_Association & --# Using_Named_Association from *, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.argument_association and --# Arg_Count >= 1; --# post STree.Table = STree.Table~; is Exp_Node : STree.SyntaxNode; Arg : Args; function Get_Arg (Arg_String : LexTokenManager.Lex_String) return Args --# global in LexTokenManager.State; is Result : Args; begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Arg_String, Lex_Str2 => LexTokenManager.Convention_Token) = LexTokenManager.Str_Eq then Result := Convention; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Arg_String, Lex_Str2 => LexTokenManager.Entity_Token) = LexTokenManager.Str_Eq then Result := Entity; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Arg_String, Lex_Str2 => LexTokenManager.External_Name_Token) = LexTokenManager.Str_Eq then Result := External_Name; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Arg_String, Lex_Str2 => LexTokenManager.Link_Name_Token) = LexTokenManager.Str_Eq then Result := Link_Name; else Result := Illegal; end if; return Result; end Get_Arg; begin -- Check_Argument Exp_Node := Child_Node (Current_Node => Node); -- ASSUME Exp_Node = identifier OR ADA_expression if Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.identifier then -- ASSUME Exp_Node = identifier -- named association Using_Named_Association := True; Arg := Get_Arg (Arg_String => Node_Lex_String (Node => Exp_Node)); if Arg = Illegal then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 601, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); elsif Found (Arg) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 602, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => Node_Lex_String (Node => Exp_Node)); else Found (Arg) := True; if Arg = Entity then Exp_Node := Next_Sibling (Current_Node => Exp_Node); -- ASSUME Exp_Node = ADA_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.ADA_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = ADA_expression in Check_Argument"); Check_Represent_Same_Name (Exp_Node => Exp_Node, Entity_Sym => Entity_Sym, Error_Found => Error_Found); end if; end if; elsif Syntax_Node_Type (Node => Exp_Node) = SP_Symbols.ADA_expression then -- ASSUME Exp_Node = ADA_expression -- positional association if Using_Named_Association then -- illegal switch form named to positional assoc Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 601, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Exp_Node), Id_Str => LexTokenManager.Null_String); else Arg := Args'Val (Arg_Count); if Arg /= Illegal then Found (Arg) := True; if Arg = Entity then Check_Represent_Same_Name (Exp_Node => Exp_Node, Entity_Sym => Entity_Sym, Error_Found => Error_Found); end if; end if; end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Exp_Node = identifier OR ADA_expression in Check_Argument"); end if; end Check_Argument; begin -- Wf_Pragma_Import Arg_Ass_Node := Child_Node (Current_Node => Pragma_Node); -- ASSUME Arg_Ass_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Ass_Node = identifier in Wf_Pragma_Import"); Arg_Ass_Node := Next_Sibling (Current_Node => Arg_Ass_Node); -- ASSUME Arg_Ass_Node = argument_association_rep OR NULL if Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association_rep then -- ASSUME Arg_Ass_Node = argument_association_rep while Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association_rep loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Arg_Ass_Node, STree.Table) = SP_Symbols.argument_association_rep; Arg_Ass_Node := Child_Node (Current_Node => Arg_Ass_Node); -- ASSUME Arg_Ass_Node = argument_association_rep OR argument_association SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association_rep or else Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Ass_Node = argument_association_rep OR argument_association in Wf_Pragma_Import"); end loop; --# check Syntax_Node_Type (Arg_Ass_Node, STree.Table) = SP_Symbols.argument_association; -- now pointing at leftmost argument association while Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association loop --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Arg_Ass_Node, STree.Table) = SP_Symbols.argument_association; if Arg_Count = Max_Args then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 600, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Arg_Ass_Node), Id_Str => LexTokenManager.Null_String); exit; end if; Arg_Count := Arg_Count + 1; Check_Argument (Node => Arg_Ass_Node, Entity_Sym => Entity_Sym, Arg_Count => Arg_Count, Using_Named_Association => Using_Named_Association, Found => Found, Error_Found => Error_Found); Arg_Ass_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Arg_Ass_Node)); -- ASSUME Arg_Ass_Node = argument_association OR NULL SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Arg_Ass_Node) = SP_Symbols.argument_association or else Arg_Ass_Node = STree.NullNode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Ass_Node = argument_association OR NULL in Wf_Pragma_Import"); end loop; if Arg_Count < 2 then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 600, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Convention_Token); else if not Found (Convention) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 603, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Convention_Token); end if; if not Found (Entity) then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 603, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Entity_Token); end if; end if; elsif Arg_Ass_Node = STree.NullNode then -- ASSUME Arg_Ass_Node = NULL -- there are no arguments Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 600, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Arg_Ass_Node = argument_association_rep OR NULL in Wf_Pragma_Import"); end if; end Wf_Pragma_Import; begin -- Wf_External_Interface Error_Found := False; case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Wf_Pragma_Interface (Pragma_Node => Pragma_Node, Entity_Sym => Entity_Sym, Error_Found => Error_Found); when CommandLineData.SPARK95_Onwards => Wf_Pragma_Import (Pragma_Node => Pragma_Node, Entity_Sym => Entity_Sym, Error_Found => Error_Found); end case; end Wf_External_Interface; spark-2012.0.deb/examiner/errorhandler-printline.adb0000644000175000017500000001062311753202336021414 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (ErrorHandler) procedure PrintLine (Listing : in SPARK_IO.File_Type; Start_Pos, End_Pos, Indent : in Natural; Line : in E_Strings.T; Add_New_Line : in Boolean; New_Start : out Natural) is Pos, Current_Line_End, Current_Line_Start : Natural; procedure Print_Current_Line --# global in Add_New_Line; --# in Current_Line_End; --# in Current_Line_Start; --# in Line; --# in Listing; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Add_New_Line, --# Current_Line_End, --# Current_Line_Start, --# Line, --# Listing; is begin for Ix in Natural range Current_Line_Start .. Current_Line_End loop Put_Char (Listing, E_Strings.Get_Element (E_Str => Line, Pos => Ix)); end loop; if Current_Line_End /= E_Strings.Get_Length (E_Str => Line) or Add_New_Line then New_Line (File => Listing, Spacing => 1); end if; end Print_Current_Line; procedure Find_Current_Line_End (Current_Line_Pos : in Natural) --# global in Current_Line_Start; --# in End_Pos; --# in Line; --# in Pos; --# out Current_Line_End; --# derives Current_Line_End from Current_Line_Pos, --# Current_Line_Start, --# End_Pos, --# Line, --# Pos; is Next_Space_Pos, Current_Space_Pos : Integer; function Find_Next_Space (Curr_Pos : in Natural) return Natural --# global in Line; is Next_Pos : Natural; begin Next_Pos := Curr_Pos; loop exit when Next_Pos = E_Strings.Get_Length (E_Str => Line); Next_Pos := Next_Pos + 1; exit when E_Strings.Get_Element (E_Str => Line, Pos => Next_Pos) = ' '; end loop; return Next_Pos; end Find_Next_Space; begin if End_Pos = 0 then Current_Line_End := E_Strings.Get_Length (E_Str => Line); else Current_Space_Pos := Find_Next_Space (Curr_Pos => Current_Line_Pos); loop exit when Current_Space_Pos = E_Strings.Get_Length (E_Str => Line); Next_Space_Pos := Find_Next_Space (Curr_Pos => Current_Space_Pos); exit when (Pos + Next_Space_Pos) - Current_Line_Start >= End_Pos; Current_Space_Pos := Next_Space_Pos; end loop; Current_Line_End := Current_Space_Pos; end if; end Find_Current_Line_End; begin Current_Line_Start := 1; Pos := Start_Pos; Find_Current_Line_End (Current_Line_Pos => 0); loop Print_Current_Line; exit when Current_Line_End = E_Strings.Get_Length (E_Str => Line); Put_Spaces (File => Listing, N => Indent); Pos := Indent; Current_Line_Start := Current_Line_End + 1; Find_Current_Line_End (Current_Line_Pos => Current_Line_Start); end loop; New_Start := Pos + ((Current_Line_End + 1) - Current_Line_Start); end PrintLine; spark-2012.0.deb/examiner/vcg.ads0000644000175000017500000001320711753202337015525 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- VCG -- -- Purpose: -- -- The main entry point of the VC-Generator. Takes the Syntax-Tree, Dictionary, -- LexTokenManager table and Flow Analyser Heap that have been previously -- populated by the Sem and FlowAnalyser phases, and produces Verfication -- Conditions or Path Functions for the given subprogram. -- -- In many of the VCG Components, there are references to "JFB section x.y" -- These are referring to -- Jean Francois Bergeretti -- "An Algebraic Approach to Program Analysis: Foundations of a Practical -- Analysis System" -- PhD Thesis -- University of Southampton Department of Electronics, 1979 -- -- This document effectively serves as a mathematical specification of the -- VC Generator. -- -- For a copy of this document, please contact either the SPARK team at Praxis -- (sparkinfo@praxis-his.com), or contract the University of Southampton. -- -- Clients: -- -- Sem.CompUnit -- -- Use: -- -- See Sem.Compunit -- -- Extension: -- -- None planned. -------------------------------------------------------------------------------- with CommandLineData; with Dictionary; with Heap; with LexTokenManager; with STree; use type CommandLineData.Rule_Generation_Policies; use type Dictionary.Rule_Policies; use type Dictionary.Scopes; --# inherit Cells, --# Clists, --# CommandLineData, --# CStacks, --# DAG, --# Debug, --# Declarations, --# Dictionary, --# ErrorHandler, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# File_Utils, --# Graph, --# Heap, --# Labels, --# LexTokenLists, --# LexTokenManager, --# Maths, --# Pairs, --# Pile, --# ScreenEcho, --# SeqAlgebra, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# StmtStack, --# STree, --# Structures, --# SystemErrors; package VCG --# own Invoked : Boolean; --# initializes Invoked; is -- The "Invoked" flag is set when the VCG runs and actually generates any VCs. -- This is used in MainLoop to warn the user of the common error where a -- user runs the Examiner on a package specification expecting VCs to be -- generated, but doesn't get any VCs, or (worse) ends up with an old set -- of VCs left over from an earlier run. Invoked : Boolean := False; procedure Generate_VCs (Start_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Do_VCG : in Boolean; End_Position : in LexTokenManager.Token_Position; Flow_Heap : in out Heap.HeapRecord; Semantic_Error_In_Subprogram : in Boolean; Data_Flow_Error_In_Subprogram : in Boolean; Type_Check_Exports : in Boolean); --# global in CommandLineData.Content; --# in STree.Table; --# in out Declarations.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out Graph.Table; --# in out Invoked; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out StmtStack.S; --# derives Declarations.State, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Flow_Heap, --# Graph.Table, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# StmtStack.S from *, --# CommandLineData.Content, --# Data_Flow_Error_In_Subprogram, --# Dictionary.Dict, --# Do_VCG, --# End_Position, --# ErrorHandler.Error_Context, --# Flow_Heap, --# LexTokenManager.State, --# Scope, --# Semantic_Error_In_Subprogram, --# SPARK_IO.File_Sys, --# Start_Node, --# STree.Table, --# Type_Check_Exports & --# Invoked from *, --# Do_VCG; end VCG; spark-2012.0.deb/examiner/dictionary-addinheritsreference.adb0000644000175000017500000003042211753202336023242 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Dictionary) procedure AddInheritsReference (The_Unit : in Symbol; The_Inherited_Symbol : in Symbol; Explicit : in Boolean; Comp_Unit : in ContextManager.UnitDescriptors; Declaration : in Location; Already_Present : out Boolean) is The_Inherited_Package : RawDict.Package_Info_Ref; The_Inherited_Subprogram : RawDict.Subprogram_Info_Ref; The_Context_Clause : RawDict.Context_Clause_Info_Ref; Need_To_Add : Boolean; -------------------------------------------------------------------------------- procedure Add_Inherits_Annotation (The_Inherited_Symbol : in Symbol; The_Unit : in Symbol; Declaration : in Location) --# global in Dict; --# in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Declaration, --# Dict, --# LexTokenManager.State, --# The_Inherited_Symbol, --# The_Unit; is begin if SPARK_IO.Is_Open (Dict.TemporaryFile) then Write_String (Dict.TemporaryFile, "inherits annotation of "); Write_Name (File => Dict.TemporaryFile, Item => The_Inherited_Symbol); Write_String (Dict.TemporaryFile, " in "); Write_Name (File => Dict.TemporaryFile, Item => The_Unit); Write_String (Dict.TemporaryFile, " is at "); Write_Location (File => Dict.TemporaryFile, Loc => Declaration); Write_Line (Dict.TemporaryFile, " ;"); end if; end Add_Inherits_Annotation; -------------------------------------------------------------------------------- procedure Add_Package_Inherits_Reference (The_Context_Clause : in RawDict.Context_Clause_Info_Ref; The_Package : in RawDict.Package_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Context_Clause, --# The_Package; is begin RawDict.Set_Next_Context_Clause (The_Context_Clause => The_Context_Clause, Next => RawDict.Get_Package_Inherit_Clauses (The_Package => The_Package)); RawDict.Set_Package_Inherit_Clauses (The_Package => The_Package, The_Context_Clause => The_Context_Clause); end Add_Package_Inherits_Reference; -------------------------------------------------------------------------------- procedure Add_Subprogram_Inherits_Reference (The_Context_Clause : in RawDict.Context_Clause_Info_Ref; The_Subprogram : in RawDict.Subprogram_Info_Ref) --# global in out Dict; --# derives Dict from *, --# The_Context_Clause, --# The_Subprogram; is begin RawDict.Set_Next_Context_Clause (The_Context_Clause => The_Context_Clause, Next => RawDict.Get_Subprogram_Inherit_Clauses (The_Subprogram => The_Subprogram)); RawDict.Set_Subprogram_Inherit_Clauses (The_Subprogram => The_Subprogram, The_Inherit_Clause => The_Context_Clause); if Get_The_Partition /= RawDict.Null_Subprogram_Info_Ref and then The_Subprogram = Get_Main_Program then RawDict.Set_Subprogram_Inherit_Clauses (The_Subprogram => Get_The_Partition, The_Inherit_Clause => The_Context_Clause); end if; end Add_Subprogram_Inherits_Reference; -------------------------------------------------------------------------------- procedure Check_If_Already_Present (The_Inherited_Symbol : in Symbol; The_Unit : in Symbol; Explicit : in Boolean; Already_Present : out Boolean; Need_To_Add : out Boolean) --# global in out Dict; --# derives Already_Present, --# Dict from Dict, --# Explicit, --# The_Inherited_Symbol, --# The_Unit & --# Need_To_Add from Dict, --# The_Inherited_Symbol, --# The_Unit; is The_Inherited_Package : RawDict.Package_Info_Ref; The_Inherited_Subprogram : RawDict.Subprogram_Info_Ref; The_Context_Clause : RawDict.Context_Clause_Info_Ref; begin Already_Present := False; Need_To_Add := True; case RawDict.GetSymbolDiscriminant (The_Unit) is when Package_Symbol => The_Context_Clause := RawDict.Get_Package_Inherit_Clauses (The_Package => RawDict.Get_Package_Info_Ref (Item => The_Unit)); -- GAA External when Subprogram_Symbol => The_Context_Clause := RawDict.Get_Subprogram_Inherit_Clauses (The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => The_Unit)); -- GAA External when others => -- non-exec code The_Context_Clause := RawDict.Null_Context_Clause_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Check_If_Already_Present"); end case; case RawDict.GetSymbolDiscriminant (The_Inherited_Symbol) is when Package_Symbol => The_Inherited_Package := RawDict.Get_Package_Info_Ref (Item => The_Inherited_Symbol); -- GAA External loop exit when The_Context_Clause = RawDict.Null_Context_Clause_Info_Ref; if not RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) and then RawDict.Get_Context_Clause_Package (The_Context_Clause => The_Context_Clause) = The_Inherited_Package then Need_To_Add := False; if Explicit then if RawDict.Get_Context_Clause_Explicit (The_Context_Clause => The_Context_Clause) then Already_Present := True; else RawDict.Set_Context_Clause_Explicit (The_Context_Clause => The_Context_Clause); end if; end if; exit; end if; The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Context_Clause); end loop; when Subprogram_Symbol | ImplicitProofFunctionSymbol => case RawDict.GetSymbolDiscriminant (The_Inherited_Symbol) is when Subprogram_Symbol => The_Inherited_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Inherited_Symbol); -- GAA External when ImplicitProofFunctionSymbol => The_Inherited_Subprogram := RawDict.GetImplicitProofFunctionAdaFunction (The_Inherited_Symbol); when others => The_Inherited_Subprogram := RawDict.Null_Subprogram_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Check_If_Already_Present"); end case; loop exit when The_Context_Clause = RawDict.Null_Context_Clause_Info_Ref; if RawDict.Get_Context_Clause_Is_Subprogram (The_Context_Clause => The_Context_Clause) and then RawDict.Get_Context_Clause_Subprogram (The_Context_Clause => The_Context_Clause) = The_Inherited_Subprogram then Need_To_Add := False; if Explicit then if RawDict.Get_Context_Clause_Explicit (The_Context_Clause => The_Context_Clause) then Already_Present := True; else RawDict.Set_Context_Clause_Explicit (The_Context_Clause => The_Context_Clause); end if; end if; exit; end if; The_Context_Clause := RawDict.Get_Next_Context_Clause (The_Context_Clause => The_Context_Clause); end loop; when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.Check_If_Already_Present"); end case; end Check_If_Already_Present; begin -- AddInheritsReference Check_If_Already_Present (The_Inherited_Symbol => The_Inherited_Symbol, The_Unit => The_Unit, Explicit => Explicit, Already_Present => Already_Present, Need_To_Add => Need_To_Add); if Need_To_Add then Add_Inherits_Annotation (The_Inherited_Symbol => The_Inherited_Symbol, The_Unit => The_Unit, Declaration => Declaration); case RawDict.GetSymbolDiscriminant (The_Inherited_Symbol) is when Package_Symbol => The_Inherited_Package := RawDict.Get_Package_Info_Ref (Item => The_Inherited_Symbol); -- GAA External The_Inherited_Subprogram := RawDict.Null_Subprogram_Info_Ref; when Subprogram_Symbol => The_Inherited_Package := RawDict.Null_Package_Info_Ref; The_Inherited_Subprogram := RawDict.Get_Subprogram_Info_Ref (Item => The_Inherited_Symbol); -- GAA External when ImplicitProofFunctionSymbol => The_Inherited_Package := RawDict.Null_Package_Info_Ref; The_Inherited_Subprogram := RawDict.GetImplicitProofFunctionAdaFunction (The_Inherited_Symbol); when others => -- non-exec code The_Inherited_Package := RawDict.Null_Package_Info_Ref; The_Inherited_Subprogram := RawDict.Null_Subprogram_Info_Ref; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddInheritsReference"); end case; RawDict.Create_Context_Clause (The_Package => The_Inherited_Package, The_Subprogram => The_Inherited_Subprogram, Explicit => Explicit, Comp_Unit => Comp_Unit, Loc => Declaration.Start_Position, The_Context_Clause => The_Context_Clause); case RawDict.GetSymbolDiscriminant (The_Unit) is when Package_Symbol => Add_Package_Inherits_Reference (The_Context_Clause => The_Context_Clause, The_Package => RawDict.Get_Package_Info_Ref (Item => The_Unit)); -- GAA External when Subprogram_Symbol => Add_Subprogram_Inherits_Reference (The_Context_Clause => The_Context_Clause, The_Subprogram => RawDict.Get_Subprogram_Info_Ref (Item => The_Unit)); -- GAA External when others => -- non-exec code SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Symbol_Table, Msg => "in Dictionary.AddInheritsReference"); end case; AddOtherReference (The_Inherited_Symbol, The_Unit, Declaration); end if; end AddInheritsReference; spark-2012.0.deb/examiner/spark_io.ads0000644000175000017500000002533011753202336016554 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package SPARK_IO --# own File_Sys; --# initializes File_Sys; is type File_Type is private; type File_Mode is (In_File, Out_File, Append_File); type File_Status is ( Ok, Status_Error, Mode_Error, Name_Error, Use_Error, Device_Error, End_Error, Data_Error, Layout_Error); subtype Number_Base is Integer range 2 .. 16; function Standard_Input return File_Type; --# global in File_Sys; function Standard_Output return File_Type; --# global in File_Sys; Null_File : constant File_Type; ------------------ -- File Management ------------------ procedure Create (File : in out File_Type; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out File_Sys; --# derives File, --# File_Sys, --# Status from File, --# File_Sys, --# Form_Of_File, --# Name_Length, --# Name_Of_File; procedure Open (File : in out File_Type; Mode_Of_File : in File_Mode; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out File_Sys; --# derives File, --# File_Sys, --# Status from File, --# File_Sys, --# Form_Of_File, --# Mode_Of_File, --# Name_Length, --# Name_Of_File; procedure Close (File : in out File_Type; Status : out File_Status); --# global in out File_Sys; --# derives File, --# File_Sys from *, --# File & --# Status from File, --# File_Sys; procedure Delete (File : in out File_Type; Status : out File_Status); --# global in out File_Sys; --# derives File, --# File_Sys from *, --# File & --# Status from File, --# File_Sys; procedure Reset (File : in out File_Type; Mode_Of_File : in File_Mode; Status : out File_Status); --# global in out File_Sys; --# derives File, --# File_Sys from *, --# File, --# Mode_Of_File & --# Status from File, --# File_Sys, --# Mode_Of_File; function Valid_File (File : File_Type) return Boolean; --# global in File_Sys; function Mode (File : File_Type) return File_Mode; --# global in File_Sys; procedure Name (File : in File_Type; Name_Of_File : out String; Stop : out Natural); --# global in File_Sys; --# derives Name_Of_File, --# Stop from File, --# File_Sys; procedure Form (File : in File_Type; Form_Of_File : out String; Stop : out Natural); --# global in File_Sys; --# derives Form_Of_File, --# Stop from File, --# File_Sys; function Is_Open (File : File_Type) return Boolean; --# global in File_Sys; -------------------------------------------- -- Control of default input and output files -------------------------------------------- -- -- Not supported in SPARK_IO -- ----------------------------------------- -- Specification of line and page lengths ----------------------------------------- -- -- Not supported in SPARK_IO -- -------------------------------- -- Column, Line and Page Control -------------------------------- procedure New_Line (File : in File_Type; Spacing : in Positive); --# global in out File_Sys; --# derives File_Sys from *, --# File, --# Spacing; procedure Skip_Line (File : in File_Type; Spacing : in Positive); --# global in out File_Sys; --# derives File_Sys from *, --# File, --# Spacing; procedure New_Page (File : in File_Type); --# global in out File_Sys; --# derives File_Sys from *, --# File; function End_Of_Line (File : File_Type) return Boolean; --# global in File_Sys; function End_Of_File (File : File_Type) return Boolean; --# global in File_Sys; procedure Set_Col (File : in File_Type; Posn : in Positive); --# global in out File_Sys; --# derives File_Sys from *, --# File, --# Posn; function Col (File : File_Type) return Positive; --# global in File_Sys; function Line (File : File_Type) return Positive; --# global in File_Sys; ------------------------- -- Character Input-Output ------------------------- procedure Get_Char (File : in File_Type; Item : out Character); --# global in out File_Sys; --# derives File_Sys, --# Item from File, --# File_Sys; procedure Put_Char (File : in File_Type; Item : in Character); --# global in out File_Sys; --# derives File_Sys from *, --# File, --# Item; ---------------------- -- String Input-Output ---------------------- procedure Get_String (File : in File_Type; Item : out String; Stop : out Natural); --# global in out File_Sys; --# derives File_Sys, --# Item, --# Stop from File, --# File_Sys; procedure Put_String (File : in File_Type; Item : in String; Stop : in Natural); --# global in out File_Sys; --# derives File_Sys from *, --# File, --# Item, --# Stop; procedure Get_Line (File : in File_Type; Item : out String; Stop : out Natural); --# global in out File_Sys; --# derives File_Sys, --# Item, --# Stop from File, --# File_Sys; procedure Put_Line (File : in File_Type; Item : in String; Stop : in Natural); --# global in out File_Sys; --# derives File_Sys from *, --# File, --# Item, --# Stop; ----------------------- -- Integer Input-Output ----------------------- -- SPARK_IO only supports input-output of -- the built-in integer type Integer procedure Get_Integer (File : in File_Type; Item : out Integer; Width : in Natural; Read : out Boolean); --# global in out File_Sys; --# derives File_Sys, --# Item, --# Read from File, --# File_Sys, --# Width; procedure Put_Integer (File : in File_Type; Item : in Integer; Width : in Natural; Base : in Number_Base); --# global in out File_Sys; --# derives File_Sys from *, --# Base, --# File, --# Item, --# Width; procedure Get_Int_From_String (Source : in String; Item : out Integer; Start_Pos : in Positive; Stop : out Natural); --# derives Item, --# Stop from Source, --# Start_Pos; --------------------- -- Float Input-Output --------------------- -- SPARK_IO only supports input-output of -- the built-in real type Float procedure Get_Float (File : in File_Type; Item : out Float; Width : in Natural; Read : out Boolean); --# global in out File_Sys; --# derives File_Sys, --# Item, --# Read from File, --# File_Sys, --# Width; procedure Put_Float (File : in File_Type; Item : in Float; Fore : in Natural; Aft : in Natural; Exp : in Natural); --# global in out File_Sys; --# derives File_Sys from *, --# Aft, --# Exp, --# File, --# Fore, --# Item; procedure Get_Float_From_String (Source : in String; Item : out Float; Start_Pos : in Positive; Stop : out Natural); --# derives Item, --# Stop from Source, --# Start_Pos; procedure Put_Float_To_String (Dest : in out String; Item : in Float; Start_Pos : in Positive; Aft : in Natural; Exp : in Natural); --# derives Dest from *, --# Aft, --# Exp, --# Item, --# Start_Pos; pragma Inline (Valid_File, End_Of_Line, End_Of_File, Get_Char); private --# hide SPARK_IO; type File_Descriptor; type File_Type is access all File_Descriptor; Null_File : constant File_Type := null; end SPARK_IO; spark-2012.0.deb/examiner/sem-wf_renaming_declaration.adb0000644000175000017500000033220511753202336022351 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; separate (Sem) procedure Wf_Renaming_Declaration (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) is Spec_Node : STree.SyntaxNode; -------------------------------------------------------------------------- procedure Check_Position (Node : in STree.SyntaxNode; Pack_String : in LexTokenManager.Lex_String; Scope : in Dictionary.Scopes; Pos_OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Pack_String, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Pos_OK from LexTokenManager.State, --# Node, --# Pack_String, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.renaming_declaration; is Last_Node : STree.SyntaxNode; Ident : LexTokenManager.Lex_String; begin if Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Visible then -- If this renaming is part of the visible part of a package, then -- it must be in legal position because syntax ensures it is. Pos_OK := True; else Last_Node := Parent_Node (Current_Node => Node); -- ASSUME Last_Node = initial_declarative_item_rep OR renaming_declaration_rep if Syntax_Node_Type (Node => Last_Node) = SP_Symbols.initial_declarative_item_rep then -- ASSUME Last_Node = initial_declarative_item_rep -- If a renaming is part of an initial_declartive_item_rep, then -- it should follow be following an embedded package declaration Ident := Find_Previous_Package (Node => Last_Node); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then -- There is no preceding package ErrorHandler.Semantic_Error (Err_Num => 300, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Pos_OK := False; elsif LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Ident, Lex_Str2 => Pack_String) /= LexTokenManager.Str_Eq then -- There is a preceding package, but it's the wrong one! ErrorHandler.Semantic_Error (Err_Num => 301, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident); Pos_OK := False; else -- There is a preceding package and it _is_ the one -- mentioned in the renaming Pos_OK := True; end if; elsif Syntax_Node_Type (Node => Last_Node) = SP_Symbols.renaming_declaration_rep then -- ASSUME Last_Node = renaming_declaration_rep -- Find the enclosing declarative_part node loop Last_Node := Parent_Node (Current_Node => Last_Node); exit when Syntax_Node_Type (Node => Last_Node) = SP_Symbols.declarative_part; end loop; -- ASSUME Last_Node = declarative_part Last_Node := Parent_Node (Current_Node => Last_Node); -- ASSUME Last_Node = subprogram_implementation OR package_implementation if Syntax_Node_Type (Node => Last_Node) = SP_Symbols.subprogram_implementation then -- ASSUME Last_Node = subprogram_implementation Last_Node := Parent_Node (Current_Node => Last_Node); -- ASSUME Last_Node = subprogram_body OR not_overriding_subprogram_body OR task_body OR entry_body if Syntax_Node_Type (Node => Last_Node) = SP_Symbols.not_overriding_subprogram_body then -- ASSUME Last_Node = not_overriding_subprogram_body Last_Node := Parent_Node (Current_Node => Last_Node); -- ASSUME Last_Node = protected_operation_item OR generic_subprogram_body OR main_program_declaration if Syntax_Node_Type (Node => Last_Node) = SP_Symbols.protected_operation_item or else Syntax_Node_Type (Node => Last_Node) = SP_Symbols.generic_subprogram_body then -- ASSUME Last_Node = protected_operation_item OR generic_subprogram_body -- If a renaming is in a subprogram implementation, but that subprogram -- isn't the main program, then error ErrorHandler.Semantic_Error (Err_Num => 300, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Pos_OK := False; elsif Syntax_Node_Type (Node => Last_Node) = SP_Symbols.main_program_declaration then -- ASSUME Last_Node = main_program_declaration Pos_OK := True; else Pos_OK := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = protected_operation_item OR generic_subprogram_body OR " & "main_program_declaration in Check_Position"); end if; elsif Syntax_Node_Type (Node => Last_Node) = SP_Symbols.subprogram_body or else Syntax_Node_Type (Node => Last_Node) = SP_Symbols.task_body or else Syntax_Node_Type (Node => Last_Node) = SP_Symbols.entry_body then -- ASSUME Last_Node = subprogram_body OR task_body OR entry_body ErrorHandler.Semantic_Error (Err_Num => 300, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); Pos_OK := False; else Pos_OK := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = subprogram_body OR not_overriding_subprogram_body OR " & "task_body OR entry_body in Check_Position"); end if; elsif Syntax_Node_Type (Node => Last_Node) = SP_Symbols.package_implementation then -- ASSUME Last_Node = package_implementation Pos_OK := True; else Pos_OK := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = subprogram_implementation in Check_Position"); end if; else Pos_OK := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Last_Node = initial_declarative_item_rep OR renaming_declaration_rep in Check_Position"); end if; end if; end Check_Position; -------------------------------------------------------------------------- procedure Check_Operator_Renaming (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.renaming_declaration; --# post STree.Table = STree.Table~; is type Expected_Argument_Type is (None, One, One_Or_Two, Two); type Param_Count_Type is (One_Parameter, Two_Parameters, Too_Many_Parameters); Pack_Node : STree.SyntaxNode; Type_Node : STree.SyntaxNode; Formal_Node : STree.SyntaxNode; Op2_Node : STree.SyntaxNode; Op_Node : STree.SyntaxNode; First_Param_Node : STree.SyntaxNode; Second_Param_Node : STree.SyntaxNode; Second_Type_Node : STree.SyntaxNode; First_Type_Node : STree.SyntaxNode; First_Type, Second_Type, Return_Type, Return_Type_Given : Dictionary.Symbol; Op1, Op2 : SP_Symbols.SP_Symbol; Expected1, Expected2_Unused : Expected_Argument_Type; OK, Defined, Already_Visible : Boolean; Number_Found : Param_Count_Type; Op_Name1_Unused, Op_Name2 : LexTokenManager.Lex_String; ------------------------------------------------------------------ procedure Wf_Operator_Symbol (Node : in STree.SyntaxNode; Op : out SP_Symbols.SP_Symbol; Op_Name : out LexTokenManager.Lex_String; Params : out Expected_Argument_Type) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Op, --# Op_Name, --# Params from LexTokenManager.State, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.operator_symbol; is Op_String : E_Strings.T; begin Op_Name := Node_Lex_String (Node => Child_Node (Current_Node => Node)); Op_String := LexTokenManager.Lex_String_To_String (Lex_Str => Op_Name); if E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '=' and then E_Strings.Get_Length (E_Str => Op_String) = 3 then Op := SP_Symbols.equals; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '/' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = '=' and then E_Strings.Get_Length (E_Str => Op_String) = 4 then Op := SP_Symbols.not_equal; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '<' and then E_Strings.Get_Length (E_Str => Op_String) = 3 then Op := SP_Symbols.less_than; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '<' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = '=' and then E_Strings.Get_Length (E_Str => Op_String) = 4 then Op := SP_Symbols.less_or_equal; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '>' and then E_Strings.Get_Length (E_Str => Op_String) = 3 then Op := SP_Symbols.greater_than; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '>' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = '=' and then E_Strings.Get_Length (E_Str => Op_String) = 4 then Op := SP_Symbols.greater_or_equal; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '+' and then E_Strings.Get_Length (E_Str => Op_String) = 3 then Op := SP_Symbols.plus; Params := One_Or_Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '-' and then E_Strings.Get_Length (E_Str => Op_String) = 3 then Op := SP_Symbols.minus; Params := One_Or_Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '/' and then E_Strings.Get_Length (E_Str => Op_String) = 3 then Op := SP_Symbols.divide; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '*' and then E_Strings.Get_Length (E_Str => Op_String) = 3 then Op := SP_Symbols.multiply; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '&' and then E_Strings.Get_Length (E_Str => Op_String) = 3 then Op := SP_Symbols.ampersand; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = '*' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = '*' and then E_Strings.Get_Length (E_Str => Op_String) = 4 then Op := SP_Symbols.double_star; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = 'r' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = 'e' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 4) = 'm' and then E_Strings.Get_Length (E_Str => Op_String) = 5 then Op := SP_Symbols.RWrem; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = 'm' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = 'o' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 4) = 'd' and then E_Strings.Get_Length (E_Str => Op_String) = 5 then Op := SP_Symbols.RWmod; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = 'a' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = 'b' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 4) = 's' and then E_Strings.Get_Length (E_Str => Op_String) = 5 then Op := SP_Symbols.RWabs; Params := One; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = 'a' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = 'n' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 4) = 'd' and then E_Strings.Get_Length (E_Str => Op_String) = 5 then Op := SP_Symbols.RWand; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = 'o' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = 'r' and then E_Strings.Get_Length (E_Str => Op_String) = 4 then Op := SP_Symbols.RWor; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = 'x' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = 'o' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 4) = 'r' and then E_Strings.Get_Length (E_Str => Op_String) = 5 then Op := SP_Symbols.RWxor; Params := Two; elsif E_Strings.Get_Element (E_Str => Op_String, Pos => 2) = 'n' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 3) = 'o' and then E_Strings.Get_Element (E_Str => Op_String, Pos => 4) = 't' and then E_Strings.Get_Length (E_Str => Op_String) = 5 then Op := SP_Symbols.RWnot; Params := One; else Op := SP_Symbols.SPEND; Op_Name := LexTokenManager.Null_String; Params := None; ErrorHandler.Semantic_Error (Err_Num => 20, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; end Wf_Operator_Symbol; ------------------------------------------------------------------------- procedure Get_Parameters (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Number_Found : out Param_Count_Type; First_Type : out Dictionary.Symbol; Second_Type : out Dictionary.Symbol; First_Node : out STree.SyntaxNode; Second_Node : out STree.SyntaxNode; First_Type_Node : out STree.SyntaxNode; Second_Type_Node : out STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# First_Node, --# First_Type_Node from Node, --# STree.Table & --# First_Type, --# Number_Found, --# Second_Node, --# Second_Type, --# Second_Type_Node, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.formal_part; --# post STree.Table = STree.Table~ and --# Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.identifier and --# (Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.identifier or --# Second_Node = STree.NullNode) and --# (Syntax_Node_Type (First_Type_Node, STree.Table) = SP_Symbols.type_mark or --# First_Type_Node = STree.NullNode) and --# (Syntax_Node_Type (Second_Type_Node, STree.Table) = SP_Symbols.type_mark or --# Second_Type_Node = STree.NullNode) and --# ((First_Type_Node = STree.NullNode) -> (Number_Found = Too_Many_Parameters)) and --# ((Second_Type_Node = STree.NullNode) -> (Number_Found = Too_Many_Parameters)); is Next_Node : STree.SyntaxNode; Type_Sym : Dictionary.Symbol; ----------------------------------------- procedure Check_Mode (Mode_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Mode_Node, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (Mode_Node, STree.Table) = SP_Symbols.mode; is Next_Node : STree.SyntaxNode; begin Next_Node := Child_Node (Current_Node => Mode_Node); -- ASSUME Next_Node = in_mode OR inout_mode OR out_mode OR NULL if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.inout_mode or else Syntax_Node_Type (Node => Next_Node) = SP_Symbols.out_mode then -- ASSUME Next_Node = inout_mode OR out_mode ErrorHandler.Semantic_Error (Err_Num => 64, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Parent_Node (Current_Node => Mode_Node)), Id_Str => LexTokenManager.Null_String); elsif Next_Node /= STree.NullNode and then Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.in_mode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = in_mode OR inout_mode OR out_mode OR NULL in Check_Mode"); end if; end Check_Mode; ----------------------------------------- procedure Check_Extra_Branches (Node : in STree.SyntaxNode; Count : in out Param_Count_Type) --# global in STree.Table; --# derives Count from *, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.type_mark; is Next_Node : STree.SyntaxNode; begin Next_Node := Parent_Node (Current_Node => Node); if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.parameter_specification then -- ASSUME Next_Node = parameter_specification Next_Node := Parent_Node (Current_Node => Next_Node); if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.formal_part_rep then -- ASSUME Next_Node = formal_part_rep Next_Node := Parent_Node (Current_Node => Next_Node); -- ASSUME Next_Node = formal_part OR formal_part_rep if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.formal_part_rep then -- ASSUME Next_Node = formal_part_rep Count := Too_Many_Parameters; elsif Syntax_Node_Type (Node => Next_Node) /= SP_Symbols.formal_part then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = formal_part_rep OR formal_part in Check_Extra_Branches"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = formal_part_rep in Check_Extra_Branches"); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = parameter_specification in Check_Extra_Branches"); end if; end Check_Extra_Branches; begin -- Get_Parameters First_Node := Last_Child_Of (Start_Node => Node); -- ASSUME First_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => First_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect First_Node = identifier in Get_Parameters"); Next_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => First_Node)); -- ASSUME Next_Node = identifier OR mode if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.identifier then -- ASSUME Next_Node = identifier -- "(LEFT, RIGHT : TYPE)" construction Number_Found := Two_Parameters; Second_Node := Next_Node; Next_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Next_Node)); -- ASSUME Next_Node = identifier OR mode if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.identifier then -- ASSUME Next_Node = identifier -- "(LEFT, RIGHT, OTHER : TYPE)" construction => Too_Many_Parameters First_Type_Node := STree.NullNode; Second_Type_Node := STree.NullNode; First_Type := Dictionary.GetUnknownTypeMark; Second_Type := Dictionary.GetUnknownTypeMark; Number_Found := Too_Many_Parameters; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.mode then -- ASSUME Next_Node = mode -- "(LEFT, RIGHT : TYPE)" construction Check_Mode (Mode_Node => Next_Node); Next_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME Next_Node = type_mark SystemErrors.RT_Assert (C => Next_Node /= STree.NullNode and then Syntax_Node_Type (Node => Next_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = type_mark in Get_Parameters"); First_Type_Node := Next_Node; Second_Type_Node := Next_Node; Wf_Type_Mark (Node => Next_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); First_Type := Type_Sym; Second_Type := Type_Sym; -- Check "(LEFT, RIGHT : TYPE; OTHER : TYPE)" construction => Too_Many_Parameters Check_Extra_Branches (Node => Next_Node, Count => Number_Found); else First_Type_Node := STree.NullNode; Second_Type_Node := STree.NullNode; First_Type := Dictionary.GetUnknownTypeMark; Second_Type := Dictionary.GetUnknownTypeMark; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = identifier OR mode in Get_Parameters"); end if; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.mode then -- ASSUME Next_Node = mode -- "(LEFT : TYPE; RIGHT : TYPE)" or "(RIGHT : TYPE)" construction Check_Mode (Mode_Node => Next_Node); Next_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME Next_Node = type_mark SystemErrors.RT_Assert (C => Next_Node /= STree.NullNode and then Syntax_Node_Type (Node => Next_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = type_mark in Get_Parameters"); First_Type_Node := Next_Node; Second_Type_Node := Next_Node; Wf_Type_Mark (Node => Next_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => First_Type); Next_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => Next_Node))); -- ASSUME Next_Node = parameter_specification OR NULL if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.parameter_specification then -- ASSUME Next_Node = parameter_specification -- "(LEFT : TYPE; RIGHT : TYPE)" construction Number_Found := Two_Parameters; Next_Node := Last_Child_Of (Start_Node => Next_Node); -- ASSUME Next_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = identifier in Get_Parameters"); Second_Node := Next_Node; Next_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Next_Node)); -- ASSUME Next_Node = identifier OR mode if Syntax_Node_Type (Node => Next_Node) = SP_Symbols.identifier then -- ASSUME Next_Node = identifier -- "(LEFT : TYPE; RIGHT, OTHER : TYPE)" construction => Too_Many_Parameters Second_Type := Dictionary.GetUnknownTypeMark; Number_Found := Too_Many_Parameters; elsif Syntax_Node_Type (Node => Next_Node) = SP_Symbols.mode then -- ASSUME Next_Node = mode -- "(LEFT : TYPE; RIGHT : TYPE)" construction Check_Mode (Mode_Node => Next_Node); Next_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME Next_Node = type_mark SystemErrors.RT_Assert (C => Next_Node /= STree.NullNode and then Syntax_Node_Type (Node => Next_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = type_mark in Get_Parameters"); Second_Type_Node := Next_Node; Wf_Type_Mark (Node => Next_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Second_Type); -- Check "(LEFT : TYPE; RIGHT : TYPE; OTHER : TYPE)" construction => Too_Many_Parameters Check_Extra_Branches (Node => Next_Node, Count => Number_Found); else Second_Type := Dictionary.GetUnknownTypeMark; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = identifier OR mode in Get_Parameters"); end if; elsif Next_Node = STree.NullNode then -- "(RIGHT : TYPE)" construction Second_Node := STree.NullNode; Second_Type := Dictionary.GetUnknownTypeMark; Number_Found := One_Parameter; else Second_Node := STree.NullNode; Second_Type := Dictionary.GetUnknownTypeMark; Number_Found := Too_Many_Parameters; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = parameter_specification OR NULL in Get_Parameters"); end if; else Second_Node := STree.NullNode; First_Type_Node := STree.NullNode; Second_Type_Node := STree.NullNode; First_Type := Dictionary.GetUnknownTypeMark; Second_Type := Dictionary.GetUnknownTypeMark; Number_Found := Too_Many_Parameters; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = identifier OR mode in Get_Parameters"); end if; end Get_Parameters; ------------------------------------------------------------------------- function Number_Correct (Expected : Expected_Argument_Type; Found : Param_Count_Type) return Boolean --# return B => ((Found = Too_Many_Parameters) -> (not B)); is begin return Found /= Too_Many_Parameters and then (Found /= Two_Parameters or else Expected /= One) and then (Found /= One_Parameter or else Expected /= Two); end Number_Correct; ------------------------------------------------------------------------- procedure Check_Names_Right (Number_Found : in Param_Count_Type; First_Node : in STree.SyntaxNode; Second_Node : in STree.SyntaxNode) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Node, --# LexTokenManager.State, --# Number_Found, --# Second_Node, --# SPARK_IO.File_Sys, --# STree.Table; --# pre Syntax_Node_Type (First_Node, STree.Table) = SP_Symbols.identifier and --# (Syntax_Node_Type (Second_Node, STree.Table) = SP_Symbols.identifier or --# Second_Node = STree.NullNode); is begin if Number_Found = One_Parameter then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => First_Node), Lex_Str2 => LexTokenManager.Right_Token) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 65, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); end if; elsif Number_Found = Two_Parameters then if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => First_Node), Lex_Str2 => LexTokenManager.Left_Token) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 65, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => First_Node), Id_Str => LexTokenManager.Null_String); end if; -- ASSUME Second_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Second_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Second_Node = identifier in Check_Names_Right"); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Second_Node), Lex_Str2 => LexTokenManager.Right_Token) /= LexTokenManager.Str_Eq then ErrorHandler.Semantic_Error (Err_Num => 65, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Second_Node), Id_Str => LexTokenManager.Null_String); end if; end if; end Check_Names_Right; ------------------------------------------------------------------------- procedure Check_Types (Number_Found : in Param_Count_Type; Op : in SP_Symbols.SP_Symbol; Scope : in Dictionary.Scopes; First_Type, Second_Type : in Dictionary.Symbol; Op_Node : in STree.SyntaxNode; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# First_Type, --# LexTokenManager.State, --# Number_Found, --# Op, --# Op_Node, --# Scope, --# Second_Type, --# SPARK_IO.File_Sys, --# STree.Table & --# OK from CommandLineData.Content, --# Dictionary.Dict, --# First_Type, --# Number_Found, --# Op, --# Scope, --# Second_Type; --# pre Syntax_Node_Type (Op_Node, STree.Table) = SP_Symbols.operator_symbol; is begin OK := True; if Number_Found = One_Parameter then if not (Dictionary.IsType (First_Type) or else Dictionary.IsUnknownTypeMark (First_Type)) then OK := False; ErrorHandler.Semantic_Error (Err_Num => 68, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.TypeIsLimited (First_Type, Scope) -- For private type Time_Span, unary "-" and "abs" can be renamed. -- No other unary operators are renamable for any private type. -- Note that we do not need to do a test to see if the operator is -- defined here, since that is covered by Check_Operator. or else (Dictionary.IsPrivateType (First_Type, Scope) and then (not CommandLineData.Ravenscar_Selected or else not Dictionary.IsPredefinedTimeSpanType (First_Type))) then OK := False; ErrorHandler.Semantic_Error_Sym (Err_Num => 119, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Sibling (Current_Node => Op_Node)), Sym => First_Type, Scope => Scope); elsif (Dictionary.TypeIsModular (First_Type) and then (Op = SP_Symbols.RWabs or else Op = SP_Symbols.plus or else Op = SP_Symbols.minus)) then -- Unary arithmetic operators are not allowed for modular types, -- so their renamings must also be illegal. Note we _do_ allow -- logical unary "not" on modular types. OK := False; ErrorHandler.Semantic_Error (Err_Num => 803, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); end if; elsif Number_Found = Two_Parameters then if not ((Dictionary.IsType (First_Type) or else Dictionary.IsUnknownTypeMark (First_Type)) and then (Dictionary.IsType (Second_Type) or else Dictionary.IsUnknownTypeMark (Second_Type))) then OK := False; ErrorHandler.Semantic_Error (Err_Num => 68, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Op_Node), Id_Str => LexTokenManager.Null_String); elsif Dictionary.TypeIsLimited (First_Type, Scope) or else Dictionary.TypeIsLimited (Second_Type, Scope) -- For private types Time and Time_Span, all defined binary operators -- can be renamed. For other private types, only "=" can be renamed. or else (Dictionary.IsPrivateType (First_Type, Scope) and then Op /= SP_Symbols.equals and then (not CommandLineData.Ravenscar_Selected or else not Dictionary.IsPredefinedTimeType (First_Type))) or else (Dictionary.IsPrivateType (Second_Type, Scope) and then Op /= SP_Symbols.equals and then (not CommandLineData.Ravenscar_Selected or else not Dictionary.IsPredefinedTimeType (Second_Type))) then OK := False; ErrorHandler.Semantic_Error_Sym2 (Err_Num => 35, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Sibling (Current_Node => Op_Node)), Sym => First_Type, Sym2 => Second_Type, Scope => Scope); end if; else OK := False; end if; end Check_Types; ------------------------------------------------------------------------- procedure Check_Operator (Number_Found : in Param_Count_Type; Op : in SP_Symbols.SP_Symbol; Scope : in Dictionary.Scopes; First_Type, Second_Type : in Dictionary.Symbol; Defined, Already_Visible : out Boolean; Return_Type : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# derives Already_Visible from CommandLineData.Content, --# Dictionary.Dict, --# First_Type, --# Number_Found, --# Op, --# Scope, --# Second_Type & --# Defined, --# Return_Type from CommandLineData.Content, --# Dictionary.Dict, --# First_Type, --# Number_Found, --# Op, --# Second_Type; is begin if Number_Found = One_Parameter then Defined := Dictionary.UnaryOperatorIsDefined (Op, First_Type); if Defined then Already_Visible := Dictionary.UnaryOperatorIsVisible (Op, First_Type, Scope); Return_Type := First_Type; else Already_Visible := False; Return_Type := Dictionary.GetUnknownTypeMark; end if; elsif Number_Found = Two_Parameters then Return_Type := Dictionary.Get_Binary_Operator_Type (Name => Op, Left => First_Type, Right => Second_Type); if Dictionary.Is_Null_Symbol (Return_Type) then Already_Visible := False; Return_Type := Dictionary.GetUnknownTypeMark; Defined := False; else Already_Visible := Dictionary.BinaryOperatorIsVisible (Op, First_Type, Second_Type, Scope); Defined := True; end if; else Already_Visible := False; Return_Type := Dictionary.GetUnknownTypeMark; Defined := False; end if; end Check_Operator; ------------------------------------------------------------------------ function Renamed_From_Right_Place (Pack_Node, First_Type_Node, Second_Type_Node : STree.SyntaxNode) return Boolean --# global in LexTokenManager.State; --# in STree.Table; --# pre (Syntax_Node_Type (Pack_Node, STree.Table) = SP_Symbols.dotted_simple_name or --# Syntax_Node_Type (Pack_Node, STree.Table) = SP_Symbols.identifier) and --# Syntax_Node_Type (First_Type_Node, STree.Table) = SP_Symbols.type_mark and --# Syntax_Node_Type (Second_Type_Node, STree.Table) = SP_Symbols.type_mark; is OK : Boolean; ------------------------------------------------------------------------ function Names_Match (Pack_Node, Type_Node : STree.SyntaxNode) return Boolean --# global in LexTokenManager.State; --# in STree.Table; --# pre (Syntax_Node_Type (Pack_Node, STree.Table) = SP_Symbols.dotted_simple_name or --# Syntax_Node_Type (Pack_Node, STree.Table) = SP_Symbols.identifier) and --# Syntax_Node_Type (Type_Node, STree.Table) = SP_Symbols.type_mark; is Curr_Pack_Node, Curr_Type_Node : STree.SyntaxNode; Match : Boolean; Pack_Done, Type_Done : Boolean; begin Curr_Type_Node := Child_Node (Current_Node => Child_Node (Current_Node => Type_Node)); -- ASSUME Curr_Type_Node = identifier OR dotted_simple_name if Syntax_Node_Type (Node => Curr_Type_Node) = SP_Symbols.identifier then -- ASSUME Curr_Type_Node = identifier -- no prefix on type mark Match := False; elsif Syntax_Node_Type (Node => Curr_Type_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Curr_Type_Node = dotted_simple_name Curr_Pack_Node := Last_Child_Of (Start_Node => Pack_Node); -- ASSUME Curr_Pack_Node = identifier Curr_Type_Node := Last_Child_Of (Start_Node => Curr_Type_Node); -- ASSUME Curr_Type_Node = identifier loop SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Pack_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Pack_Node = identifier in Names_Match"); SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Type_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Type_Node = identifier in Names_Match"); Match := LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Curr_Pack_Node), Lex_Str2 => Node_Lex_String (Node => Curr_Type_Node)) = LexTokenManager.Str_Eq; exit when not Match; Curr_Pack_Node := Parent_Node (Current_Node => Curr_Pack_Node); -- ASSUME Curr_Pack_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Pack_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Pack_Node = dotted_simple_name in Names_Match"); Pack_Done := Curr_Pack_Node = Parent_Node (Current_Node => Pack_Node); Curr_Type_Node := Parent_Node (Current_Node => Curr_Type_Node); -- ASSUME Curr_Type_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Type_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Type_Node = dotted_simple_name in Names_Match"); Type_Done := Curr_Type_Node = Child_Node (Current_Node => Child_Node (Current_Node => Type_Node)); if Pack_Done or else Type_Done then Match := Pack_Done and then Type_Done; exit; end if; -- move on to next identifiers in names: Curr_Pack_Node := Next_Sibling (Current_Node => Curr_Pack_Node); -- ASSUME Curr_Pack_Node = identifier Curr_Type_Node := Next_Sibling (Current_Node => Curr_Type_Node); -- ASSUME Curr_Type_Node = identifier end loop; else Match := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Type_Node = identifier OR dotted_simple_name in Names_Match"); end if; return Match; end Names_Match; begin -- Renamed_From_Right_Place OK := Names_Match (Pack_Node => Pack_Node, Type_Node => First_Type_Node); if not OK then OK := Names_Match (Pack_Node => Pack_Node, Type_Node => Second_Type_Node); end if; return OK; end Renamed_From_Right_Place; ------------------------------------------------------------------------ procedure Do_Rename (Number_Found : in Param_Count_Type; Op : in SP_Symbols.SP_Symbol; First_Type, Second_Type : in Dictionary.Symbol; Scope : in Dictionary.Scopes; Declaration : in Dictionary.Location; Op_Node : in STree.SyntaxNode) --# global in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# ContextManager.Ops.Unit_Stack, --# Declaration, --# First_Type, --# Number_Found, --# Op, --# Scope, --# Second_Type & --# SPARK_IO.File_Sys from *, --# ContextManager.Ops.Unit_Stack, --# Declaration, --# Dictionary.Dict, --# First_Type, --# LexTokenManager.State, --# Number_Found, --# Op, --# Scope, --# Second_Type & --# STree.Table from *, --# ContextManager.Ops.Unit_Stack, --# Declaration, --# Dictionary.Dict, --# First_Type, --# Number_Found, --# Op, --# Op_Node, --# Scope, --# Second_Type; --# pre Syntax_Node_Type (Op_Node, STree.Table) = SP_Symbols.operator_symbol; --# post STree.Table = STree.Table~; is Op_Sym : Dictionary.Symbol; begin case Number_Found is when One_Parameter => Dictionary.RenameUnaryOperator (Name => Op, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Operand => First_Type, Scope => Scope, Op_Sym => Op_Sym); when Two_Parameters => Dictionary.RenameBinaryOperator (Name => Op, Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Declaration, Left => First_Type, Right => Second_Type, Scope => Scope, Op_Sym => Op_Sym); when Too_Many_Parameters => Op_Sym := Dictionary.NullSymbol; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "in Check_Operator_Renaming.Do_Rename"); end case; STree.Add_Node_Symbol (Node => Op_Node, Sym => Op_Sym); end Do_Rename; begin -- Check_Operator_Renaming Op_Node := Child_Node (Current_Node => Node); -- ASSUME Op_Node = operator_symbol SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Op_Node) = SP_Symbols.operator_symbol, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op_Node = operator_symbol in Check_Operator_Renaming"); Formal_Node := Next_Sibling (Current_Node => Op_Node); -- ASSUME Formal_Node = formal_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.formal_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Formal_Node = formal_part in Check_Operator_Renaming"); Type_Node := Next_Sibling (Current_Node => Formal_Node); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Check_Operator_Renaming"); Pack_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Type_Node)); -- ASSUME Pack_Node = dotted_simple_name OR identifier Op2_Node := Next_Sibling (Current_Node => Next_Sibling (Current_Node => Type_Node)); -- ASSUME Op2_Node = operator_symbol SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Op2_Node) = SP_Symbols.operator_symbol, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Op2_Node = operator_symbol in Check_Operator_Renaming"); if Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Pack_Node = dotted_simple_name case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => -- report error at second identifier in name Pack_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Last_Child_Of (Start_Node => Pack_Node))); -- ASSUME Pack_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pack_Node = identifier in Check_Operator_Renaming"); ErrorHandler.Semantic_Error (Err_Num => 610, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pack_Node), Id_Str => LexTokenManager.Null_String); OK := False; when CommandLineData.SPARK95_Onwards => Check_Position (Node => Node, Pack_String => LexTokenManager.Null_String, Scope => Scope, Pos_OK => OK); end case; elsif Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.identifier then -- ASSUME Pack_Node = identifier Check_Position (Node => Node, Pack_String => Node_Lex_String (Node => Pack_Node), Scope => Scope, Pos_OK => OK); else OK := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pack_Node = dotted_simple_name OR identifier in Check_Operator_Renaming"); end if; if OK then --# accept Flow, 10, Op_Name1_Unused, "Expected ineffective assignment"; Wf_Operator_Symbol (Node => Op_Node, Op => Op1, Op_Name => Op_Name1_Unused, Params => Expected1); --# end accept; --# accept Flow, 10, Expected2_Unused, "Expected ineffective assignment"; Wf_Operator_Symbol (Node => Op2_Node, Op => Op2, Op_Name => Op_Name2, Params => Expected2_Unused); --# end accept; if Op1 /= Op2 then ErrorHandler.Semantic_Error (Err_Num => 303, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Op2_Node), Id_Str => LexTokenManager.Null_String); -- check that /= is not being renamed elsif Op1 = SP_Symbols.not_equal then ErrorHandler.Semantic_Error (Err_Num => 304, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Op_Node), Id_Str => Node_Lex_String (Node => Child_Node (Current_Node => Op_Node))); else -- operator strings are the same (and neither is /=) Get_Parameters (Node => Formal_Node, Scope => Scope, Number_Found => Number_Found, First_Type => First_Type, Second_Type => Second_Type, First_Node => First_Param_Node, Second_Node => Second_Param_Node, First_Type_Node => First_Type_Node, Second_Type_Node => Second_Type_Node); if Number_Correct (Expected => Expected1, Found => Number_Found) then Check_Names_Right (Number_Found => Number_Found, First_Node => First_Param_Node, Second_Node => Second_Param_Node); Check_Types (Number_Found => Number_Found, Op => Op1, Scope => Scope, First_Type => First_Type, Second_Type => Second_Type, Op_Node => Op_Node, OK => OK); if OK then Check_Operator (Number_Found => Number_Found, Op => Op1, Scope => Scope, First_Type => First_Type, Second_Type => Second_Type, Defined => Defined, Already_Visible => Already_Visible, Return_Type => Return_Type); if not Defined then if Number_Found = One_Parameter then ErrorHandler.Semantic_Error_Sym (Err_Num => 119, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Formal_Node), Sym => First_Type, Scope => Scope); elsif Number_Found = Two_Parameters then ErrorHandler.Semantic_Error_Sym2 (Err_Num => 35, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Formal_Node), Sym => First_Type, Sym2 => Second_Type, Scope => Scope); end if; elsif Already_Visible then ErrorHandler.Semantic_Error (Err_Num => 306, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Op2_Node), Id_Str => Op_Name2); else Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Return_Type_Given); if not Dictionary.Types_Are_Equal (Left_Symbol => Return_Type, Right_Symbol => Return_Type_Given, Full_Range_Subtype => False) then ErrorHandler.Semantic_Error (Err_Num => 305, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => Op_Name2); elsif Renamed_From_Right_Place (Pack_Node => Pack_Node, First_Type_Node => First_Type_Node, Second_Type_Node => Second_Type_Node) then Do_Rename (Number_Found => Number_Found, Op => Op1, First_Type => First_Type, Second_Type => Second_Type, Scope => Scope, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Op2_Node), End_Position => Node_Position (Node => Op2_Node)), Op_Node => Op_Node); else if Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.dotted_simple_name then -- use last identifier in package name to report error Pack_Node := Next_Sibling (Current_Node => Pack_Node); end if; ErrorHandler.Semantic_Error (Err_Num => 307, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Op2_Node), Id_Str => Node_Lex_String (Node => Pack_Node)); end if; end if; end if; else -- number of parameters wrong ErrorHandler.Semantic_Error (Err_Num => 305, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Formal_Node), Id_Str => Op_Name2); end if; end if; end if; --# accept Flow, 33, Expected2_Unused, "Expected to be neither referenced nor exported" & --# Flow, 33, Op_Name1_Unused, "Expected to be neither referenced nor exported"; end Check_Operator_Renaming; -------------------------------------------------------------------------- procedure Check_Subprogram_Renaming (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes) --# global in CommandLineData.Content; --# in ContextManager.Ops.Unit_Stack; --# in LexTokenManager.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Dictionary.Dict from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.renaming_declaration; --# post STree.Table = STree.Table~; is type Subprog_Sorts is (Func, Proc, Unknown); Pack_Sym, Subprog_Sym : Dictionary.Symbol; Spec_Node, Formal_Node, Pack_Node, Subprog_Node : STree.SyntaxNode; OK : Boolean; Subprog_Sort : Subprog_Sorts; -------------------------------------------- procedure Check_Names_Same (Node, Subprog_Node : in STree.SyntaxNode; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Node & --# OK from LexTokenManager.State, --# Node, --# STree.Table, --# Subprog_Node; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.renaming_declaration and --# Syntax_Node_Type (Subprog_Node, STree.Table) = SP_Symbols.identifier; is begin if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Node_Lex_String (Node => Subprog_Node), Lex_Str2 => Node_Lex_String (Node => Last_Child_Of (Start_Node => Node))) /= LexTokenManager.Str_Eq then OK := False; ErrorHandler.Semantic_Error (Err_Num => 312, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else OK := True; end if; end Check_Names_Same; -------------------------------------------- procedure Find_Package (Scope : in Dictionary.Scopes; Pack_Node : in STree.SyntaxNode; Pack_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Pack_Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# Pack_Sym, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Pack_Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Pack_Node, STree.Table) = SP_Symbols.dotted_simple_name or --# Syntax_Node_Type (Pack_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Curr_Node : STree.SyntaxNode; Prefix_OK : Boolean; begin Curr_Node := Last_Child_Of (Start_Node => Pack_Node); -- ASSUME Curr_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Node = identifier in Find_Package"); Pack_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Node => Curr_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); loop -- loop to handle multiple prefixes --# assert STree.Table = STree.Table~; if Dictionary.Is_Null_Symbol (Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Node_Lex_String (Node => Curr_Node)); exit; end if; if not Dictionary.IsPackage (Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 18, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Curr_Node), Id_Str => Node_Lex_String (Node => Curr_Node)); Pack_Sym := Dictionary.NullSymbol; exit; end if; Check_Package_Prefix (Node_Pos => Node_Position (Node => Curr_Node), Pack_Sym => Pack_Sym, Scope => Scope, OK => Prefix_OK); if not Prefix_OK then Pack_Sym := Dictionary.NullSymbol; exit; end if; STree.Set_Node_Lex_String (Sym => Pack_Sym, Node => Curr_Node); -- finished if processed all identifiers under Pack_Node Curr_Node := Parent_Node (Current_Node => Curr_Node); -- ASSUME Curr_Node = dotted_simple_name SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Node) = SP_Symbols.dotted_simple_name, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Node = dotted_simple_name in Find_Package"); exit when Curr_Node = Parent_Node (Current_Node => Pack_Node); Curr_Node := Next_Sibling (Current_Node => Curr_Node); -- ASSUME Curr_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Curr_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Curr_Node = identifier in Find_Package"); Pack_Sym := Dictionary.LookupSelectedItem (Prefix => Pack_Sym, Selector => Node_Lex_String (Node => Curr_Node), Scope => Scope, Context => Dictionary.ProgramContext); end loop; end Find_Package; -------------------------------------------------------- procedure Find_Subprogram (Node_Pos : in LexTokenManager.Token_Position; Scope : in Dictionary.Scopes; Pack_Sym : in Dictionary.Symbol; Subprog_Node : in STree.SyntaxNode; Sort : in Subprog_Sorts; Subprog_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node_Pos, --# Pack_Sym, --# Scope, --# Sort, --# SPARK_IO.File_Sys, --# STree.Table, --# Subprog_Node & --# STree.Table, --# Subprog_Sym from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Pack_Sym, --# Scope, --# STree.Table, --# Subprog_Node; --# pre Syntax_Node_Type (Subprog_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is function Select_Error (S : Subprog_Sorts) return Natural is Res : Natural; begin case S is when Func => Res := 334; when Proc => Res := 19; when Unknown => Res := 0; end case; return Res; end Select_Error; begin -- Find_Subprogram if Dictionary.IsDefined (Name => Node_Lex_String (Node => Subprog_Node), Scope => Scope, Context => Dictionary.ProofContext, Full_Package_Name => False) then ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => Node_Lex_String (Node => Subprog_Node)); Subprog_Sym := Dictionary.NullSymbol; else Subprog_Sym := Dictionary.LookupSelectedItem (Pack_Sym, Node_Lex_String (Node => Subprog_Node), Scope, Dictionary.ProgramContext); if Dictionary.Is_Null_Symbol (Subprog_Sym) then ErrorHandler.Semantic_Error (Err_Num => 1, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Subprog_Node), Id_Str => Node_Lex_String (Node => Subprog_Node)); elsif not Dictionary.Is_Subprogram (Subprog_Sym) then ErrorHandler.Semantic_Error (Err_Num => Select_Error (S => Sort), Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Subprog_Node), Id_Str => Node_Lex_String (Node => Subprog_Node)); Subprog_Sym := Dictionary.NullSymbol; -- check to prevent 2nd renames elsif Dictionary.Is_Renamed (Subprogram => Subprog_Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Pack_Sym)) or else Dictionary.Is_Renamed (Subprogram => Subprog_Sym, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Visible, The_Unit => Pack_Sym)) then ErrorHandler.Semantic_Error2 (Err_Num => 339, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Subprog_Node), Id_Str1 => Node_Lex_String (Node => Subprog_Node), Id_Str2 => Dictionary.GetSimpleName (Pack_Sym)); Subprog_Sym := Dictionary.NullSymbol; else STree.Set_Node_Lex_String (Sym => Subprog_Sym, Node => Subprog_Node); end if; end if; end Find_Subprogram; -------------------------------------------------------- procedure Check_Consistency (Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; Spec_Node, Formal_Node, Subprog_Node : in STree.SyntaxNode; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Formal_Node, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# Spec_Node, --# STree.Table, --# Subprog_Node, --# Subprog_Sym & --# OK, --# STree.Table from CommandLineData.Content, --# Dictionary.Dict, --# Formal_Node, --# LexTokenManager.State, --# Scope, --# Spec_Node, --# STree.Table, --# Subprog_Sym; --# pre Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.subprogram_specification and --# (Syntax_Node_Type (Formal_Node, STree.Table) = SP_Symbols.formal_part or --# Syntax_Node_Type (Formal_Node, STree.Table) = SP_Symbols.type_mark or --# Formal_Node = STree.NullNode) and --# Syntax_Node_Type (Subprog_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is It : Dictionary.Iterator; Param_Count : Natural; Current_Formal_Mode, Current_Mode : Dictionary.Modes; Current_Formal, Current_Formal_Type, Current_Type : Dictionary.Symbol; List_Node, Param_Node, Ident_Node : STree.SyntaxNode; Specs_It : STree.Iterator; Ident_It : STree.Iterator; Current_Formal_Ident : LexTokenManager.Lex_String; --------------------------------------------------- function Check_Right_Sort_Of_Subprogram (Sym : Dictionary.Symbol; Node : STree.SyntaxNode) return Boolean --# global in Dictionary.Dict; --# in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.subprogram_specification; is Right_Sort : Boolean; begin if Dictionary.IsFunction (Sym) then Right_Sort := Syntax_Node_Type (Node => Child_Node (Current_Node => Node)) = SP_Symbols.function_specification; else Right_Sort := Syntax_Node_Type (Node => Child_Node (Current_Node => Node)) = SP_Symbols.procedure_specification; end if; return Right_Sort; end Check_Right_Sort_Of_Subprogram; --------------------------------------------------- function Get_Mode (Node : STree.SyntaxNode) return Dictionary.Modes --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.parameter_specification; is Mode_Node : STree.SyntaxNode; Result : Dictionary.Modes; begin Mode_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node))); -- ASSUME Mode_Node = in_mode OR inout_mode OR out_mode OR NULL if Mode_Node = STree.NullNode then -- ASSUME Mode_Node = NULL Result := Dictionary.InMode; else case Syntax_Node_Type (Node => Mode_Node) is when SP_Symbols.in_mode => -- ASSUME Mode_Node = in_mode Result := Dictionary.InMode; when SP_Symbols.inout_mode => -- ASSUME Mode_Node = inout_mode Result := Dictionary.InOutMode; when SP_Symbols.out_mode => -- ASSUME Mode_Node = out_mode Result := Dictionary.OutMode; when others => Result := Dictionary.InvalidMode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Mode_Node = in_mode OR inout_mode OR out_mode OR NULL in Get_Mode"); end case; end if; return Result; end Get_Mode; --------------------------------------------------- procedure Get_Type (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Type_Sym : out Dictionary.Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table, --# Type_Sym from CommandLineData.Content, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.parameter_specification; --# post STree.Table = STree.Table~; is Type_Node : STree.SyntaxNode; begin Type_Node := Next_Sibling (Current_Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node))); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Get_Type"); Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); end Get_Type; --------------------------------------------------- function Modes_Equivalent (Formal_Mode, Renamed_Mode : Dictionary.Modes) return Boolean is Equivalent : Boolean; begin if Formal_Mode = Dictionary.DefaultMode then Equivalent := Renamed_Mode = Dictionary.InMode; else Equivalent := Formal_Mode = Renamed_Mode; end if; return Equivalent; end Modes_Equivalent; --------------------------------------------------- procedure Check_Return_Type (Subprog_Sym : in Dictionary.Symbol; Formal_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Consistent : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# in out STree.Table; --# derives Consistent from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Formal_Node, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Subprog_Sym & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Formal_Node, --# LexTokenManager.State, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table & --# STree.Table from *, --# CommandLineData.Content, --# Dictionary.Dict, --# Formal_Node, --# LexTokenManager.State, --# Scope; --# pre Syntax_Node_Type (Formal_Node, STree.Table) = SP_Symbols.formal_part or --# Syntax_Node_Type (Formal_Node, STree.Table) = SP_Symbols.type_mark or --# Formal_Node = STree.NullNode; --# post STree.Table = STree.Table~; is Type_Node : STree.SyntaxNode; Type_Sym : Dictionary.Symbol; begin -- ASSUME Formal_Node = formal_part OR type_mark OR NULL if Formal_Node = STree.NullNode or else Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.type_mark then -- ASSUME Formal_Node = type_mark OR NULL Type_Node := Formal_Node; elsif Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.formal_part then -- ASSUME Formal_Node = formal_part Type_Node := Next_Sibling (Current_Node => Formal_Node); else Type_Node := STree.NullNode; end if; -- ASSUME Type_Node = type_mark OR NULL if Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark then -- ASSUME Type_Node = type_mark Wf_Type_Mark (Node => Type_Node, Current_Scope => Scope, Context => Dictionary.ProgramContext, Type_Sym => Type_Sym); Consistent := Consistent and then Dictionary.Types_Are_Equal (Left_Symbol => Type_Sym, Right_Symbol => Dictionary.GetType (Subprog_Sym), Full_Range_Subtype => False); elsif Type_Node /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark OR NULL in Check_Return_Type"); end if; end Check_Return_Type; begin -- Check_Consistency OK := Check_Right_Sort_Of_Subprogram (Sym => Subprog_Sym, Node => Spec_Node); if OK then if Formal_Node = STree.NullNode or else Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.type_mark then -- ASSUME Formal_Node = type_mark OR NULL OK := Dictionary.GetNumberOfSubprogramParameters (Subprog_Sym) = 0; elsif Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.formal_part then -- ASSUME Formal_Node = formal_part Param_Count := 0; It := Dictionary.FirstSubprogramParameter (Subprog_Sym); Specs_It := Find_First_Node (Node_Kind => SP_Symbols.parameter_specification, From_Root => Formal_Node, In_Direction => STree.Down); while not STree.IsNull (Specs_It) loop Param_Node := Get_Node (It => Specs_It); --# assert Syntax_Node_Type (Param_Node, STree.Table) = SP_Symbols.parameter_specification and --# Param_Node = Get_Node (Specs_It) and --# STree.Table = STree.Table~; Current_Mode := Get_Mode (Node => Param_Node); Get_Type (Node => Param_Node, Scope => Scope, Type_Sym => Current_Type); List_Node := Child_Node (Current_Node => Param_Node); -- ASSUME List_Node = identifier_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => List_Node) = SP_Symbols.identifier_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect List_Node = identifier_list in Check_Consistency"); Ident_It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => List_Node, In_Direction => STree.Down); while not STree.IsNull (Ident_It) loop Ident_Node := Get_Node (It => Ident_It); --# assert Syntax_Node_Type (Param_Node, STree.Table) = SP_Symbols.parameter_specification and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier and --# Param_Node = Get_Node (Specs_It) and --# Ident_Node = Get_Node (Ident_It) and --# STree.Table = STree.Table~; if Param_Count < Natural'Last then Param_Count := Param_Count + 1; else OK := False; end if; if Dictionary.IsNullIterator (It) then OK := False; else Current_Formal := Dictionary.CurrentSymbol (It); Current_Formal_Type := Dictionary.GetType (Current_Formal); Current_Formal_Mode := Dictionary.GetSubprogramParameterMode (Current_Formal); Current_Formal_Ident := Dictionary.GetSimpleName (Current_Formal); OK := OK and then Dictionary.Types_Are_Equal (Left_Symbol => Current_Type, Right_Symbol => Current_Formal_Type, Full_Range_Subtype => False) and then Modes_Equivalent (Formal_Mode => Current_Formal_Mode, Renamed_Mode => Current_Mode) and then LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Current_Formal_Ident, Lex_Str2 => Node_Lex_String (Node => Ident_Node)) = LexTokenManager.Str_Eq; It := Dictionary.NextSymbol (It); end if; Ident_It := STree.NextNode (Ident_It); end loop; -- Idents Specs_It := STree.NextNode (Specs_It); end loop; -- Specs OK := OK and then (Param_Count = Dictionary.GetNumberOfSubprogramParameters (Subprog_Sym)); end if; if Dictionary.IsFunction (Subprog_Sym) then Check_Return_Type (Subprog_Sym => Subprog_Sym, Formal_Node => Formal_Node, Scope => Scope, Consistent => OK); end if; end if; if not OK then ErrorHandler.Semantic_Error (Err_Num => 302, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Subprog_Node), Id_Str => Node_Lex_String (Node => Subprog_Node)); end if; end Check_Consistency; begin -- Check_Subprogram_Renaming -- check that we are not in package spec if Dictionary.Get_Visibility (Scope => Scope) = Dictionary.Visible then ErrorHandler.Semantic_Error (Err_Num => 340, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Spec_Node := Child_Node (Current_Node => Node); -- ASSUME Spec_Node = subprogram_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.subprogram_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = subprogram_specification in Check_Subprogram_Renaming"); Pack_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Spec_Node)); -- ASSUME Pack_Node = dotted_simple_name OR identifier if Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Pack_Node = dotted_simple_name case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => -- report error at second identifier in name Pack_Node := Next_Sibling (Current_Node => Parent_Node (Current_Node => Last_Child_Of (Start_Node => Pack_Node))); -- ASSUME Pack_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pack_Node = identifier in Check_Subprogram_Renaming"); ErrorHandler.Semantic_Error (Err_Num => 610, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Pack_Node), Id_Str => LexTokenManager.Null_String); OK := False; when CommandLineData.SPARK95_Onwards => Check_Position (Node => Node, Pack_String => LexTokenManager.Null_String, Scope => Scope, Pos_OK => OK); end case; elsif Syntax_Node_Type (Node => Pack_Node) = SP_Symbols.identifier then -- ASSUME Pack_Node = identifier Check_Position (Node => Node, Pack_String => Node_Lex_String (Node => Pack_Node), Scope => Scope, Pos_OK => OK); else OK := False; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Pack_Node = dotted_simple_name OR identifier in Check_Subprogram_Renaming"); end if; if OK then Subprog_Node := Child_Node (Current_Node => Next_Sibling (Current_Node => Next_Sibling (Current_Node => Spec_Node))); -- ASSUME Subprog_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Subprog_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Subprog_Node = identifier in Check_Subprogram_Renaming"); Check_Names_Same (Node => Node, Subprog_Node => Subprog_Node, OK => OK); if OK then Find_Package (Scope => Scope, Pack_Node => Pack_Node, Pack_Sym => Pack_Sym); if not Dictionary.Is_Null_Symbol (Pack_Sym) then -- detect sort of subprogram so that Find_Subprogram can report -- correct error if Syntax_Node_Type (Node => Child_Node (Current_Node => Spec_Node)) = SP_Symbols.procedure_specification then -- ASSUME Child_Node (Current_Node => Spec_Node) = procedure_specification Subprog_Sort := Proc; elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Spec_Node)) = SP_Symbols.function_specification then -- ASSUME Child_Node (Current_Node => Spec_Node) = function_specification Subprog_Sort := Func; else Subprog_Sort := Unknown; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = procedure_specification OR " & "function_specification in Check_Subprogram_Renaming"); end if; Find_Subprogram (Node_Pos => Node_Position (Node => Node), Scope => Scope, Pack_Sym => Pack_Sym, Subprog_Node => Subprog_Node, Sort => Subprog_Sort, Subprog_Sym => Subprog_Sym); if not Dictionary.Is_Null_Symbol (Subprog_Sym) then Formal_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Child_Node (Current_Node => Spec_Node))); -- ASSUME Formal_Node = formal_part OR type_mark OR NULL SystemErrors.RT_Assert (C => Formal_Node = STree.NullNode or else Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.formal_part or else Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Formal_Node = formal_part OR type_mark OR NULL in Check_Subprogram_Renaming"); Check_Consistency (Scope => Scope, Subprog_Sym => Subprog_Sym, Spec_Node => Spec_Node, Formal_Node => Formal_Node, Subprog_Node => Subprog_Node, OK => OK); if OK then Dictionary.RenameSubprogram (Subprogram => Subprog_Sym, SubprogramReference => Dictionary.Location'(Start_Position => Node_Position (Node => Subprog_Node), End_Position => Node_Position (Node => Subprog_Node)), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node)), Scope => Scope); end if; -- consistency check end if; -- names not same end if; -- no such subprogram end if; -- not a package end if; -- wrong place end if; -- in package spec end Check_Subprogram_Renaming; begin -- Wf_Renaming_Declaration Spec_Node := Child_Node (Current_Node => Node); -- ASSUME Spec_Node = operator_symbol OR subprogram_specification OR package_renaming_declaration case Syntax_Node_Type (Node => Spec_Node) is when SP_Symbols.subprogram_specification => -- ASSUME Spec_Node = subprogram_specification Check_Subprogram_Renaming (Node => Node, Scope => Scope); when SP_Symbols.operator_symbol => -- ASSUME Spec_Node = operator_symbol Check_Operator_Renaming (Node => Node, Scope => Scope); when SP_Symbols.package_renaming_declaration => -- ASSUME Spec_Node = package_renaming_declaration ErrorHandler.Semantic_Error (Err_Num => 111, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); when others => SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = operator_symbol OR subprogram_specification OR " & "package_renaming_declaration in Wf_Renaming_Declaration"); end case; end Wf_Renaming_Declaration; spark-2012.0.deb/examiner/symbol_set.adb0000644000175000017500000000421711753202336017105 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Symbol_Set is procedure Initialise (The_Set : out T) is begin Heap.Initialize (The_Set.The_Heap); SeqAlgebra.CreateSeq (TheHeap => The_Set.The_Heap, S => The_Set.The_Symbols); end Initialise; procedure Add (The_Set : in out T; Sym : in Dictionary.Symbol) is begin SeqAlgebra.AddMember (TheHeap => The_Set.The_Heap, S => The_Set.The_Symbols, GivenValue => Natural (Dictionary.SymbolRef (Sym))); end Add; procedure Remove (The_Set : in out T; Sym : in Dictionary.Symbol) is begin SeqAlgebra.RemoveMember (TheHeap => The_Set.The_Heap, S => The_Set.The_Symbols, GivenValue => Natural (Dictionary.SymbolRef (Sym))); end Remove; function Contains (The_Set : in T; Sym : in Dictionary.Symbol) return Boolean is begin return SeqAlgebra.IsMember (TheHeap => The_Set.The_Heap, S => The_Set.The_Symbols, GivenValue => Natural (Dictionary.SymbolRef (Sym))); end Contains; end Symbol_Set; spark-2012.0.deb/examiner/sem-compunit-wf_body_stub.adb0000644000175000017500000014355511753202336022042 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with SLI; separate (Sem.CompUnit) procedure Wf_Body_Stub (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Component_Data : in out ComponentManager.ComponentData) is type Err_Lookup is array (Boolean) of Natural; Which_Err : constant Err_Lookup := Err_Lookup'(False => 155, True => 336); -- look up table: if First_Seen then we are dealing with Abstract spec else Refined type Which_Abstractions is array (Boolean) of Dictionary.Abstractions; Which_Abstraction : constant Which_Abstractions := Which_Abstractions'(False => Dictionary.IsRefined, True => Dictionary.IsAbstract); Node_Type : SP_Symbols.SP_Symbol; Ident_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Spec_Node : STree.SyntaxNode; Formal_Node : STree.SyntaxNode; Pragma_Node : STree.SyntaxNode; Anno_Node : STree.SyntaxNode; Global_Node : STree.SyntaxNode; Dependency_Node : STree.SyntaxNode; Declare_Node : STree.SyntaxNode; Constraint_Node : STree.SyntaxNode; Pack_Sym, Subprog_Sym, Protected_Sym, Task_Sym : Dictionary.Symbol; First_Seen : Boolean; Scope_Local : Dictionary.Scopes; Interfacing_Pragma_Found : Boolean := False; Other_Pragma_Found : Boolean := False; Unused : Boolean; Valid_Annotation : Boolean := False; -- used for task type stubs Valid_Stub_Position : Boolean := True; Is_Overriding : Boolean; ------------------------------------------------------------------ procedure Check_Position (Node : in STree.SyntaxNode; Valid_Stub_Position : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in STree.Table; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# SPARK_IO.File_Sys, --# STree.Table & --# Valid_Stub_Position from *, --# Node, --# STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.body_stub; is Outer_Node : STree.SyntaxNode; begin Outer_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => Node))); while Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.later_declarative_item_rep loop -- ASSUME Outer_Node = later_declarative_item_rep Outer_Node := Parent_Node (Current_Node => Outer_Node); end loop; -- ASSUME Outer_Node = declarative_part SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.declarative_part, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Outer_Node = declarative_part in Check_Position"); Outer_Node := Parent_Node (Current_Node => Parent_Node (Current_Node => Parent_Node (Current_Node => Outer_Node))); -- ASSUME Outer_Node = proper_body OR protected_operation_item OR generic_subprogram_body OR -- main_program_declaration OR library_unit_body if Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.proper_body or else Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.protected_operation_item then -- ASSUME Outer_Node = proper_body OR protected_operation_item if Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.protected_operation_item then while Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.protected_operation_item loop -- ASSUME Outer_Node = protected_operation_item Outer_Node := Parent_Node (Current_Node => Outer_Node); end loop; -- ASSUME Outer_Node = protected_body SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.protected_body, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Outer_Node = protected_body in Check_Position"); Outer_Node := Parent_Node (Current_Node => Outer_Node); end if; -- ASSUME Outer_Node = proper_body SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.proper_body, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Outer_Node = proper_body in Check_Position"); Outer_Node := Parent_Node (Current_Node => Outer_Node); -- ASSUME Outer_Node = abody OR subunit if Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.abody then -- ASSUME Outer_Node = abody Valid_Stub_Position := False; ErrorHandler.Semantic_Error (Err_Num => 61, Reference => 17, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Outer_Node) /= SP_Symbols.subunit then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Outer_Node = abody OR subunit in Check_Position"); end if; elsif Syntax_Node_Type (Node => Outer_Node) /= SP_Symbols.generic_subprogram_body and then Syntax_Node_Type (Node => Outer_Node) /= SP_Symbols.main_program_declaration and then Syntax_Node_Type (Node => Outer_Node) /= SP_Symbols.library_unit_body then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Outer_Node = proper_body OR protected_operation_item OR generic_subprogram_body OR " & "main_program_declaration OR library_unit_body in Check_Position"); end if; end Check_Position; ------------------------------------------------------------------------ function Requires_Second_Annotation (Subprog_Sym : Dictionary.Symbol) return Boolean --# global in Dictionary.Dict; is Global_Var : Dictionary.Symbol; Required : Boolean; Global_Item : Dictionary.Iterator; Enclosing_Region : Dictionary.Symbol; begin Required := False; if not Dictionary.IsGlobalScope (Dictionary.GetScope (Subprog_Sym)) then Enclosing_Region := Dictionary.GetEnclosingCompilationUnit (Dictionary.GetScope (Subprog_Sym)); if Dictionary.IsPackage (Enclosing_Region) then Global_Item := Dictionary.FirstGlobalVariable (Dictionary.IsAbstract, Subprog_Sym); while Global_Item /= Dictionary.NullIterator loop Global_Var := Dictionary.CurrentSymbol (Global_Item); if Dictionary.IsRefinedOwnVariable (Global_Var) and then Dictionary.Packages_Are_Equal (Left_Symbol => Dictionary.GetOwner (Global_Var), Right_Symbol => Enclosing_Region) then Required := True; exit; end if; Global_Item := Dictionary.NextSymbol (Global_Item); end loop; end if; end if; return Required; end Requires_Second_Annotation; ------------------------------------------------------------------------ function Empty_Annotation (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.procedure_annotation; is Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Node); -- ASSUME Current_Node = moded_global_definition OR dependency_relation OR declare_annotation OR procedure_constraint if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_constraint then Current_Node := Child_Node (Current_Node => Current_Node); -- ASSUME Current_Node = precondition OR postcondition OR NULL SystemErrors.RT_Assert (C => Current_Node = STree.NullNode or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.precondition or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.postcondition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = precondition OR postcondition OR NULL in Empty_Annotation"); elsif Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.moded_global_definition and then Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.dependency_relation and then Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.declare_annotation then Current_Node := STree.NullNode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = moded_global_definition OR dependency_relation OR declare_annotation OR " & "procedure_constraint in Empty_Annotation"); end if; return Current_Node = STree.NullNode; end Empty_Annotation; ---------------------------------------------------------------------- procedure Process_Annotation (Anno_Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Task_Sym : in Dictionary.Symbol; Valid_Annotation : in out Boolean) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Aggregate_Stack.State; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# in out TheHeap; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Statistics.TableUsage, --# STree.Table, --# TheHeap from *, --# Anno_Node, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Scope, --# STree.Table, --# Task_Sym, --# TheHeap & --# ErrorHandler.Error_Context, --# SLI.State, --# SPARK_IO.File_Sys from Anno_Node, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# Task_Sym, --# TheHeap & --# Valid_Annotation from *, --# Anno_Node, --# STree.Table; --# pre Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation; --# post STree.Table = STree.Table~; is Current_Node : STree.SyntaxNode; begin Current_Node := Child_Node (Current_Node => Anno_Node); -- ASSUME Current_Node = moded_global_definition OR dependency_relation OR declare_annotation OR procedure_constraint -- to be legal, Current_Node must be a moded_global_definition if Syntax_Node_Type (Node => Current_Node) = SP_Symbols.moded_global_definition then -- ASSUME Current_Node = moded_global_definition Current_Node := Last_Sibling_Of (Start_Node => Current_Node); -- ASSUME Current_Node = procedure_constraint SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = procedure_constraint in Process_Annotation"); if Child_Node (Current_Node => Current_Node) = STree.NullNode then -- ASSUME Child_Node (Current_Node => Current_Node) = NULL Valid_Annotation := True; Wf_Subprogram_Annotation (Node => Anno_Node, Current_Scope => Scope, Subprog_Sym => Task_Sym, First_Seen => False, The_Heap => TheHeap); elsif Syntax_Node_Type (Node => Child_Node (Current_Node => Current_Node)) = SP_Symbols.precondition or else Syntax_Node_Type (Node => Child_Node (Current_Node => Current_Node)) = SP_Symbols.postcondition then -- ASSUME Child_Node (Current_Node => Current_Node) = precondition OR postcondition ErrorHandler.Semantic_Error (Err_Num => 990, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Current_Node) = precondition OR postcondition OR " & "NULL in Process_Annotation"); end if; elsif Syntax_Node_Type (Node => Current_Node) = SP_Symbols.dependency_relation or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.declare_annotation or else Syntax_Node_Type (Node => Current_Node) = SP_Symbols.procedure_constraint then ErrorHandler.Semantic_Error (Err_Num => 990, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Current_Node), Id_Str => LexTokenManager.Null_String); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Current_Node = moded_global_definition OR dependency_relation OR declare_annotation OR " & "procedure_constraint in Process_Annotation"); end if; end Process_Annotation; begin -- Wf_Body_Stub Pragma_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Node)); -- ASSUME Pragma_Node = apragma OR procedure_annotation OR function_annotation OR -- dotted_simple_name OR task_stub OR protected_stub if Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.apragma then -- ASSUME Pragma_Node = apragma if Is_External_Interface (Pragma_Node => Pragma_Node) then -- either Interface of Import correctly used for language variant Interfacing_Pragma_Found := True; else -- some other pragma found Other_Pragma_Found := True; end if; elsif Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.procedure_annotation or else Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.function_annotation or else Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.dotted_simple_name or else Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.task_stub or else Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.protected_stub then -- ASSUME Pragma_Node = procedure_annotation OR function_annotation OR dotted_simple_name OR task_stub OR protected_stub Check_Position (Node => Node, Valid_Stub_Position => Valid_Stub_Position); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = apragma OR procedure_annotation OR function_annotation OR dotted_simple_name OR " & "task_stub OR protected_stub in Wf_Body_Stub"); end if; --# assert STree.Table = STree.Table~ and --# (Interfacing_Pragma_Found -> (Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma)); Node_Type := Syntax_Node_Type (Node => Child_Node (Current_Node => Node)); -- ASSUME Node_Type = overriding_indicator OR procedure_specification OR function_specification OR -- dotted_simple_name OR task_stub OR protected_stub if Node_Type = SP_Symbols.dotted_simple_name then -- ASSUME Node_Type = dotted_simple_name Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Ident_Node = dotted_simple_name OR identifier if Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.dotted_simple_name then -- ASSUME Ident_Node = dotted_simple_name ErrorHandler.Semantic_Error (Err_Num => 613, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => LexTokenManager.Null_String); elsif Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier then -- ASSUME Ident_Node = identifier Pack_Sym := Dictionary.LookupImmediateScope (Name => Node_Lex_String (Ident_Node), Scope => Scope, Context => Dictionary.ProgramContext); if Dictionary.Is_Null_Symbol (Pack_Sym) or else not Dictionary.IsPackage (Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 11, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Node_Lex_String (Ident_Node)); elsif Dictionary.HasBody (Pack_Sym) or else Dictionary.HasBodyStub (Pack_Sym) then ErrorHandler.Semantic_Error (Err_Num => 16, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Node_Lex_String (Ident_Node)); elsif Dictionary.IsPackage (Dictionary.GetRegion (Scope)) and then not Dictionary.IsEmbeddedPackage (Dictionary.GetRegion (Scope)) and then not Dictionary.Is_Null_Symbol (Dictionary.LookupSelectedItem (Prefix => Dictionary.GetRegion (Scope), Selector => Node_Lex_String (Ident_Node), Scope => Dictionary.GlobalScope, Context => Dictionary.ProofContext)) then -- name exists as child ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Node_Lex_String (Ident_Node)); else STree.Set_Node_Lex_String (Sym => Pack_Sym, Node => Ident_Node); if Valid_Stub_Position then Dictionary.AddBodyStub (CompilationUnit => Pack_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, BodyStub => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node))); end if; end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = dotted_simple_name OR identifier in Wf_Body_Stub"); end if; elsif Node_Type = SP_Symbols.procedure_specification or else Node_Type = SP_Symbols.function_specification or else (Node_Type = SP_Symbols.overriding_indicator and then (Syntax_Node_Type (Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node))) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Next_Sibling (Current_Node => Child_Node (Current_Node => Node))) = SP_Symbols.function_specification)) then -- ASSUME Node_Type = procedure_specification OR function_specification OR overriding_indicator if Valid_Stub_Position then Scope_Local := Scope; Is_Overriding := False; if Node_Type = SP_Symbols.overriding_indicator then -- ASSUME Node_Type = overriding_indicator -- ASSUME Child_Node (Current_Node => Node_Type) = RWoverriding OR RWnot if Syntax_Node_Type (Node => Last_Child_Of (Start_Node => Node)) = SP_Symbols.RWoverriding then -- ASSUME Child_Node (Current_Node => Node_Type) = RWoverriding Is_Overriding := True; end if; Spec_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); else Spec_Node := Child_Node (Current_Node => Node); end if; -- ASSUME Spec_Node = procedure_specification OR function_specification SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification or else Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Spec_Node = procedure_specification OR function_specification in Wf_Body_Stub"); Anno_Node := Next_Sibling (Current_Node => Spec_Node); -- ASSUME Anno_Node = procedure_annotation OR function_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.procedure_annotation or else Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.function_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = procedure_annotation OR function_annotation in Wf_Body_Stub"); Get_Subprogram_Anno_Key_Nodes (Node => Anno_Node, Global_Node => Global_Node, Dependency_Node => Dependency_Node, Declare_Node => Declare_Node, Constraint_Node => Constraint_Node); --# assert STree.Table = STree.Table~ and --# (Interfacing_Pragma_Found -> (Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma)) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# (Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation or --# Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.function_annotation) and --# (Syntax_Node_Type (Global_Node, STree.Table) = SP_Symbols.moded_global_definition or --# Global_Node = STree.NullNode) and --# (Syntax_Node_Type (Dependency_Node, STree.Table) = SP_Symbols.dependency_relation or --# Dependency_Node = STree.NullNode) and --# (Syntax_Node_Type (Declare_Node, STree.Table) = SP_Symbols.declare_annotation or --# Declare_Node = STree.NullNode) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint); Formal_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Formal_Node = formal_part OR type_mark OR NULL if Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.type_mark then -- ASSUME Formal_Node = type_mark Formal_Node := STree.NullNode; elsif Formal_Node /= STree.NullNode and then Syntax_Node_Type (Node => Formal_Node) /= SP_Symbols.formal_part then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Formal_Node = formal_part OR type_mark OR NULL in Wf_Body_Stub"); end if; Subprogram_Specification.Wf_Subprogram_Specification_From_Body (Node => Spec_Node, Hidden => False, Current_Scope => Scope_Local, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen); --# assert STree.Table = STree.Table~ and --# (Interfacing_Pragma_Found -> (Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma)) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# (Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation or --# Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.function_annotation) and --# (Syntax_Node_Type (Global_Node, STree.Table) = SP_Symbols.moded_global_definition or --# Global_Node = STree.NullNode) and --# (Syntax_Node_Type (Dependency_Node, STree.Table) = SP_Symbols.dependency_relation or --# Dependency_Node = STree.NullNode) and --# (Syntax_Node_Type (Declare_Node, STree.Table) = SP_Symbols.declare_annotation or --# Declare_Node = STree.NullNode) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint); if not Dictionary.Is_Null_Symbol (Subprog_Sym) then if Syntax_Node_Type (Node => Pragma_Node) = SP_Symbols.apragma and then not First_Seen then -- illegal redeclaration ErrorHandler.A_Pragma (Pragma_Name => Node_Lex_String (Child_Node (Current_Node => Pragma_Node)), Position => Node_Position (Node => Pragma_Node)); ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); else -- this else means we don't process illegal redeclarations further if First_Seen and then Other_Pragma_Found then -- only an interfacing pragma will do case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => ErrorHandler.Semantic_Error (Err_Num => 70, Reference => 18, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Interface_Token); when CommandLineData.SPARK95_Onwards => ErrorHandler.Semantic_Error (Err_Num => 70, Reference => 18, Position => Node_Position (Node => Pragma_Node), Id_Str => LexTokenManager.Import_Token); end case; end if; -- wrong pragma -- If we are here then we have either: -- a legal declaration using a correct interfacing pragma; or -- a declaration with the wrong pragma that we have reported; or -- a legal "is separate" -- In each case we can go on to check formal parts and annotations if Syntax_Node_Type (Node => Formal_Node) = SP_Symbols.formal_part then Wf_Formal_Part (Node => Formal_Node, Current_Scope => Scope_Local, Subprog_Sym => Subprog_Sym, First_Occurrence => First_Seen, Context => Dictionary.ProgramContext); elsif Dictionary.GetNumberOfSubprogramParameters (Subprog_Sym) /= 0 then ErrorHandler.Semantic_Error (Err_Num => 152, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; -- formal part to check --# assert STree.Table = STree.Table~ and --# (Interfacing_Pragma_Found -> (Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma)) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# (Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.procedure_annotation or --# Syntax_Node_Type (Anno_Node, STree.Table) = SP_Symbols.function_annotation) and --# (Syntax_Node_Type (Global_Node, STree.Table) = SP_Symbols.moded_global_definition or --# Global_Node = STree.NullNode) and --# (Syntax_Node_Type (Dependency_Node, STree.Table) = SP_Symbols.dependency_relation or --# Dependency_Node = STree.NullNode) and --# (Syntax_Node_Type (Declare_Node, STree.Table) = SP_Symbols.declare_annotation or --# Declare_Node = STree.NullNode) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint); if (First_Seen or else Requires_Second_Annotation (Subprog_Sym => Subprog_Sym)) and then (Syntax_Node_Type (Node => Global_Node) = SP_Symbols.moded_global_definition or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.dependency_relation or else Syntax_Node_Type (Node => Declare_Node) = SP_Symbols.declare_annotation) then Wf_Subprogram_Annotation (Node => Anno_Node, Current_Scope => Scope_Local, Subprog_Sym => Subprog_Sym, First_Seen => First_Seen, The_Heap => TheHeap); -- We have processed a procedure annotation. We can use this to suppress the all-from-all behaviour if -- flow=auto, but we need to know whether or not the procedure annotation consisted of global and derives -- or global only. This is recorded in the Dictionary "HasDerivesAnnotation" by Wf_Subprogram_Annotation. elsif not First_Seen and then Requires_Second_Annotation (Subprog_Sym => Subprog_Sym) and then Global_Node = STree.NullNode and then Dependency_Node = STree.NullNode and then Declare_Node = STree.NullNode then -- ASSUME Global_Node = NULL AND Dependency_Node = NULL AND Declare_Node = NULL ErrorHandler.Semantic_Error (Err_Num => 87, Reference => 16, Position => Node_Position (Node => Spec_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification then -- ASSUME Spec_Node = procedure_specification Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsRefined, Subprog_Sym); end if; elsif Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification and then First_Seen and then (CommandLineData.Content.Language_Profile = CommandLineData.SPARK83 or else CommandLineData.Content.Flow_Option = CommandLineData.Info_Flow) and then Global_Node = STree.NullNode and then Dependency_Node = STree.NullNode and then Declare_Node = STree.NullNode then -- ASSUME Global_Node = NULL AND Dependency_Node = NULL AND Declare_Node = NULL ErrorHandler.Semantic_Error (Err_Num => 154, Reference => 19, Position => Node_Position (Node => Spec_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); Dictionary.SetSubprogramSignatureNotWellformed (Dictionary.IsAbstract, Subprog_Sym); elsif Syntax_Node_Type (Node => Global_Node) = SP_Symbols.moded_global_definition or else Syntax_Node_Type (Node => Dependency_Node) = SP_Symbols.dependency_relation or else Syntax_Node_Type (Node => Declare_Node) = SP_Symbols.declare_annotation then ErrorHandler.Semantic_Error (Err_Num => Which_Err (Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.function_specification), Reference => 16, Position => Node_Position (Node => Spec_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; --# assert STree.Table = STree.Table~ and --# (Interfacing_Pragma_Found -> (Syntax_Node_Type (Pragma_Node, STree.Table) = SP_Symbols.apragma)) and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint); if Interfacing_Pragma_Found then --# accept Flow, 10, Unused, "Export not required here"; Wf_External_Interface (Pragma_Node => Pragma_Node, Entity_Sym => Subprog_Sym, Error_Found => Unused); --# end accept; end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint); -- -- Synthesise an 'all from all' dependency if necessary. if Syntax_Node_Type (Node => Spec_Node) = SP_Symbols.procedure_specification and then Needs_Synthetic_Dependency (Proc_Task_Or_Entry => Subprog_Sym) and then (First_Seen or else Requires_Second_Annotation (Subprog_Sym => Subprog_Sym)) then Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Position (Node => Node), Subprog_Sym => Subprog_Sym, Abstraction => Which_Abstraction (First_Seen), The_Heap => TheHeap); end if; --# assert STree.Table = STree.Table~ and --# (Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.procedure_specification or --# Syntax_Node_Type (Spec_Node, STree.Table) = SP_Symbols.function_specification) and --# (Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.procedure_constraint or --# Syntax_Node_Type (Constraint_Node, STree.Table) = SP_Symbols.function_constraint); if Syntax_Node_Type (Node => Child_Node (Current_Node => Constraint_Node)) = SP_Symbols.precondition or else Syntax_Node_Type (Node => Child_Node (Current_Node => Constraint_Node)) = SP_Symbols.postcondition or else Syntax_Node_Type (Node => Child_Node (Current_Node => Constraint_Node)) = SP_Symbols.return_expression then -- ASSUME Child_Node (Current_Node => Constraint_Node) = precondition OR postcondition OR return_expression -- a pre/post/return exists. Should it? -- checks to see if constraint found is allowed if not (First_Seen or else Requires_Second_Annotation (Subprog_Sym => Subprog_Sym) or else Has_Parameter_Global_Or_Return_Of_Local_Private_Type (Subprog_Sym => Subprog_Sym)) then -- annotation not required -- two possible errors: misplaced anno or duplicate anno if Dictionary.HasPrecondition (Dictionary.IsAbstract, Subprog_Sym) or else Dictionary.HasPostcondition (Dictionary.IsAbstract, Subprog_Sym) then -- illegal duplicate anno ErrorHandler.Semantic_Error (Err_Num => 343, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Constraint_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); else -- misplaced anno ErrorHandler.Semantic_Error (Err_Num => 342, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Constraint_Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); end if; else -- annotation is required so continue Wf_Subprogram_Constraint (Node => Constraint_Node, Subprogram_Sym => Subprog_Sym, First_Seen => First_Seen, Component_Data => Component_Data, The_Heap => TheHeap); end if; elsif Child_Node (Current_Node => Constraint_Node) /= STree.NullNode then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Child_Node (Current_Node => Constraint_Node = precondition OR postcondition OR " & "NULL in Wf_Body_Stub"); end if; end if; -- not an illegal redclaration end if; -- subprogsym not null -- Prior to SPARK 2005, the check is only required when the procedure -- has not been previously declared. For SPARK 2005, the check is -- always required as the overriding_indicator for a -- subprogram body stub may be incorrect. if First_Seen or else CommandLineData.Content.Language_Profile in CommandLineData.SPARK2005_Profiles then Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Spec_Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Body_Stub"); Check_No_Overloading_From_Tagged_Ops (Ident_Node => Ident_Node, Subprog_Sym => Subprog_Sym, Scope => Scope, Abstraction => Dictionary.IsRefined, Is_Overriding => Is_Overriding); end if; end if; -- don't add stub if position illegal elsif Node_Type = SP_Symbols.task_stub then -- ASSUME Node_Type = task_stub if CommandLineData.Ravenscar_Selected then if Valid_Stub_Position then Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Body_Stub"); Ident_Str := Node_Lex_String (Ident_Node); Task_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Ident_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Task_Sym) or else not Dictionary.IsTaskType (Task_Sym) then ErrorHandler.Semantic_Error (Err_Num => 898, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Node_Lex_String (Ident_Node)); elsif Dictionary.HasBody (Task_Sym) or else Dictionary.HasBodyStub (Task_Sym) then ErrorHandler.Semantic_Error (Err_Num => 899, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Node_Lex_String (Ident_Node)); else -- valid so far Dictionary.AddBodyStub (CompilationUnit => Task_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, BodyStub => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node))); -- check annotation Anno_Node := Next_Sibling (Current_Node => Ident_Node); -- ASSUME Anno_Node = procedure_annotation SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Anno_Node) = SP_Symbols.procedure_annotation, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Anno_Node = procedure_annotation in Wf_Body_Stub"); if Requires_Second_Annotation (Subprog_Sym => Task_Sym) then if Empty_Annotation (Node => Anno_Node) then ErrorHandler.Semantic_Error (Err_Num => 154, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Ident_Str); else -- anno present and required STree.Set_Node_Lex_String (Sym => Task_Sym, Node => Ident_Node); Process_Annotation (Anno_Node => Anno_Node, Scope => Scope, Task_Sym => Task_Sym, Valid_Annotation => Valid_Annotation); end if; else -- second anno not required if Empty_Annotation (Node => Anno_Node) then STree.Set_Node_Lex_String (Sym => Task_Sym, Node => Ident_Node); else ErrorHandler.Semantic_Error (Err_Num => 155, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Anno_Node), Id_Str => Ident_Str); end if; end if; -- Create "full" derives if annotation is present, valid and IFA is not selected. -- We know we are in SPARK 95 or 2005 as this is a task stub. -- Actually, we must only create the 'full' derives annotation if there is no explicit derives if Valid_Annotation and then CommandLineData.Content.Flow_Option /= CommandLineData.Info_Flow and then not Dictionary.GetHasDerivesAnnotation (Task_Sym) then Dependency_Relation.Create_Full_Subprog_Dependency (Node_Pos => Node_Position (Node => Node), Subprog_Sym => Task_Sym, Abstraction => Dictionary.IsRefined, The_Heap => TheHeap); end if; end if; end if; -- don't process stub if illegally positioned else -- illegal ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; elsif Node_Type = SP_Symbols.protected_stub then -- ASSUME Node_Type = protected_stub if CommandLineData.Ravenscar_Selected then if Valid_Stub_Position then Ident_Node := Child_Node (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Wf_Body_Stub"); Protected_Sym := Dictionary.LookupItem (Name => Node_Lex_String (Ident_Node), Scope => Scope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if Dictionary.Is_Null_Symbol (Protected_Sym) or else not (Dictionary.IsType (Protected_Sym) and then Dictionary.IsProtectedTypeMark (Protected_Sym)) then ErrorHandler.Semantic_Error (Err_Num => 898, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Node_Lex_String (Ident_Node)); elsif Dictionary.HasBody (Protected_Sym) or else Dictionary.HasBodyStub (Protected_Sym) then ErrorHandler.Semantic_Error (Err_Num => 899, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Ident_Node), Id_Str => Node_Lex_String (Ident_Node)); else STree.Set_Node_Lex_String (Sym => Protected_Sym, Node => Ident_Node); Dictionary.AddBodyStub (CompilationUnit => Protected_Sym, Comp_Unit => ContextManager.Ops.Current_Unit, BodyStub => Dictionary.Location'(Start_Position => Node_Position (Node => Node), End_Position => Node_Position (Node => Node))); end if; end if; -- don't process stub if illegally positioned else -- illegal ErrorHandler.Semantic_Error (Err_Num => 850, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node_Type = overriding_indicator OR procedure_specification OR function_specification OR " & "dotted_simple_name OR task_stub OR protected_stub in Wf_Body_Stub"); end if; if ErrorHandler.Generate_SLI then SLI.Increment_Nb_Separates (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Node); end if; --# accept F, 33, Unused, "Export not required here"; end Wf_Body_Stub; spark-2012.0.deb/examiner/lextokenlists.ads0000644000175000017500000000574011753202336017660 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ExaminerConstants; with E_Strings; with LexTokenManager; with SPARK_IO; use type LexTokenManager.Str_Comp_Result; --# inherit ExaminerConstants, --# E_Strings, --# LexTokenManager, --# SPARK_IO, --# SystemErrors; package LexTokenLists is subtype Lengths is Natural range 0 .. ExaminerConstants.Lex_Token_Lists_Max_Length; subtype Positions is Lengths range 1 .. ExaminerConstants.Lex_Token_Lists_Max_Length; type Lists is private; Null_List : constant Lists; function Prefix_Unit (Poss_Prefix, Prefixed : Lists) return Boolean; --# global in LexTokenManager.State; function Eq_Unit (First_Item, Second : Lists) return Boolean; --# global in LexTokenManager.State; procedure Append (List : in out Lists; Item : in LexTokenManager.Lex_String); --# derives List from *, --# Item; -- Write a dotted name identifier (List) to the standard output. procedure Print_List (File : in SPARK_IO.File_Type; List : in Lists); --# global in LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File, --# LexTokenManager.State, --# List; function Token_List_To_String (Token_List : Lists) return E_Strings.T; --# global in LexTokenManager.State; function Get_Length (List : Lists) return Lengths; function Get_Element (List : Lists; Pos : Positions) return LexTokenManager.Lex_String; procedure Pop (List : in out Lists; Item : out LexTokenManager.Lex_String); --# derives Item, --# List from List; private type Contents is array (Positions) of LexTokenManager.Lex_String; type Lists is record Length : Lengths; Content : Contents; end record; Null_List : constant Lists := Lists'(0, Contents'(Positions => LexTokenManager.Null_String)); end LexTokenLists; spark-2012.0.deb/examiner/sem-walk_expression_p-wf_named_argument_association.adb0000644000175000017500000002260311753202336027316 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Walk_Expression_P) procedure Wf_Named_Argument_Association (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; E_Stack : in out Exp_Stack.Exp_Stack_Type; Heap_Param : in out Lists.List_Heap) is Exp_Result, Fun_Info : Sem.Exp_Record; Fun_Sym, Param_Sym : Dictionary.Symbol; Ident_Node : STree.SyntaxNode; Ident_Str : LexTokenManager.Lex_String; Already_Present, Name_Is_Parameter_Name : Boolean; Error_Found : Boolean := False; -------------------------------------------------------------- function Find_Identifier (Node : STree.SyntaxNode) return STree.SyntaxNode --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_argument_association; --# return Return_Node => STree.Syntax_Node_Type (Return_Node, STree.Table) = SP_Symbols.identifier; is Ident_Node : STree.SyntaxNode; begin Ident_Node := STree.Child_Node (Current_Node => Node); -- ASSUME Ident_Node = annotation_named_argument_association OR annotation_simple_name OR -- named_argument_association OR simple_name if STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.simple_name or else STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.annotation_simple_name then -- ASSUME Ident_Node = annotation_simple_name OR simple_name Ident_Node := STree.Child_Node (Current_Node => Ident_Node); elsif STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.named_argument_association or else STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.annotation_named_argument_association then -- ASSUME Ident_Node = named_argument_association OR annotation_named_argument_association Ident_Node := STree.Child_Node (Current_Node => STree.Next_Sibling (Current_Node => Ident_Node)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = annotation_named_argument_association OR annotation_simple_name OR " & "named_argument_association OR simple_name in Find_Identifier"); end if; -- ASSUME Ident_Node = identifier SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_Node = identifier in Find_Identifier"); return Ident_Node; end Find_Identifier; -------------------------------------------------------------- function Expression_Location (Node : STree.SyntaxNode) return LexTokenManager.Token_Position --# global in STree.Table; --# pre STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.named_argument_association or --# STree.Syntax_Node_Type (Node, STree.Table) = SP_Symbols.annotation_named_argument_association; is Result : STree.SyntaxNode; begin Result := STree.Child_Node (Current_Node => Node); -- ASSUME Result = annotation_named_argument_association OR annotation_simple_name -- named_argument_association OR simple_name if STree.Syntax_Node_Type (Node => Result) = SP_Symbols.simple_name or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_simple_name then -- ASSUME Result = annotation_simple_name OR simple_name Result := STree.Next_Sibling (Current_Node => Result); elsif STree.Syntax_Node_Type (Node => Result) = SP_Symbols.named_argument_association or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_named_argument_association then -- ASSUME Result = named_argument_association OR annotation_named_argument_association Result := STree.Next_Sibling (Current_Node => STree.Next_Sibling (Current_Node => Result)); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = annotation_named_argument_association OR annotation_simple_name OR " & "named_argument_association OR simple_name in Expression_Location"); end if; -- ASSUME Result = annotation_expression OR expression SystemErrors.RT_Assert (C => STree.Syntax_Node_Type (Node => Result) = SP_Symbols.annotation_expression or else STree.Syntax_Node_Type (Node => Result) = SP_Symbols.expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Result = annotation_expression OR expression in Expression_Location"); return STree.Node_Position (Node => Result); end Expression_Location; begin -- Wf_Named_Argument_Association Exp_Stack.Pop (Item => Exp_Result, Stack => E_Stack); Exp_Stack.Pop (Item => Fun_Info, Stack => E_Stack); Fun_Sym := Fun_Info.Other_Symbol; Find_Named_Argument_Association_Parameter (Node => Node, Subprog_Sym => Fun_Sym, Name_Is_Parameter_Name => Name_Is_Parameter_Name, Param_Sym => Param_Sym); Ident_Node := Find_Identifier (Node => Node); Ident_Str := STree.Node_Lex_String (Node => Ident_Node); if Name_Is_Parameter_Name then -- Seed syntax tree with expected type for run-time check; -- but, don't do this for instantiation of unchecked_conversion -- because we don't want any RTCs for association of those parameters -- (provided the function parameter subtype and actual subtype match) if not (Dictionary.IsAnUncheckedConversion (Fun_Sym) and then Dictionary.Types_Are_Equal (Left_Symbol => Exp_Result.Type_Symbol, Right_Symbol => Dictionary.GetType (Param_Sym), Full_Range_Subtype => False)) then STree.Add_Node_Symbol (Node => Node, Sym => Dictionary.GetType (Param_Sym)); end if; -- There is a special case involving functions an stream variables. We allow a stream -- variable to be a parameter to an Unchecked_Conversion but need to ensure that -- the function inherits the restrictions associated with referencing a stream -- (e.g. cannot be used in gernal expression). We can do this here by checking -- the StreamSymbol of the parameter expression (there will only be one if we are -- talking about an unchecked conversion) and if it is non-null then setting the -- stream symbol of the function result record (now an object) to the function symbol. -- Note that this clause will only be executed for an unchecked conversion because -- a parameter which is a stream would hav ebeen rejected at wf_primary in all other -- cases if not Dictionary.Is_Null_Symbol (Exp_Result.Stream_Symbol) then Fun_Info.Stream_Symbol := Fun_Sym; end if; Add_Name (Name => Ident_Str, List => Fun_Info.Param_List, Heap_Param => Heap_Param, Present => Already_Present); if Already_Present then Error_Found := True; ErrorHandler.Semantic_Error (Err_Num => 4, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str); else -- not already present so do further checks Sem.Wf_Argument_Association (Node => Node, Scope => Scope, Param_Type => Dictionary.GetType (Param_Sym), Position => Expression_Location (Node => Node), Exp_Result => Exp_Result, Fun_Info => Fun_Info, Error_Found => Error_Found); end if; else Error_Found := True; ErrorHandler.Semantic_Error_Lex1_Sym1 (Err_Num => 2, Reference => ErrorHandler.No_Reference, Position => STree.Node_Position (Node => Ident_Node), Id_Str => Ident_Str, Sym => Fun_Sym, Scope => Scope); end if; Fun_Info.Errors_In_Expression := Error_Found or else Fun_Info.Errors_In_Expression or else Exp_Result.Errors_In_Expression; Exp_Stack.Push (X => Fun_Info, Stack => E_Stack); end Wf_Named_Argument_Association; spark-2012.0.deb/examiner/errorhandler-conversions-tostring-depsemanticerr-depsemanticerrexpl.adb0000644000175000017500000000372111753202337032530 0ustar eugeneugenseparate (ErrorHandler.Conversions.ToString.DepSemanticErr) procedure DepSemanticErrExpl (E_Str : in out E_Strings.T) is begin case Err_Num.ErrorNum is when 1 => E_Strings.Append_String (E_Str => E_Str, Str => "XXX occurred as an export in the earlier dependency relation but" & " neither XXX nor any refinement constituent of it occurs in the" & " refined dependency relation."); when 2 => E_Strings.Append_String (E_Str => E_Str, Str => "A refinement constituent of XXX occurs as an export in the" & " refined dependency relation but XXX does not occur as an export in" & " the earlier dependency relation."); when 3 => E_Strings.Append_String (E_Str => E_Str, Str => "The dependency of the exported value of XXX on the imported value" & " of YYY occurs in the earlier dependency relation but in the refined" & " dependency relation, no constituents of XXX" & " depend on any constituents of YYY."); when 4 => E_Strings.Append_String (E_Str => E_Str, Str => "A refined dependency relation states a dependency of XXX or a" & " constituent of XXX on YYY or a constituent of YYY, but in the" & " earlier relation, no dependency of XXX on YYY is stated."); when 5 => E_Strings.Append_String (E_Str => E_Str, Str => "Either a dependency of a constituent of XXX on at least one" & " constituent of XXX occurs in the refined dependency relation, or" & " not all the constituents of XXX occur as exports in" & " the refined dependency relation. However, the dependency of XXX on" & " itself does not occur in the earlier dependency relation."); when others => null; end case; end DepSemanticErrExpl; spark-2012.0.deb/examiner/graph.ads0000644000175000017500000003543611753202336016056 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Cells; with Dictionary; with ExaminerConstants; with E_Strings; with Labels; with SPARK_IO; use type SPARK_IO.File_Status; --# inherit Cells, --# Clists, --# CommandLineData, --# DAG_IO, --# Debug, --# Declarations, --# Dictionary, --# ExaminerConstants, --# E_Strings, --# Labels, --# LexTokenManager, --# Pairs, --# SPARK_IO, --# Statistics, --# Structures, --# SystemErrors; package Graph --# own Table : Table_Type; --# initializes Table; is ---------------------------------------------------------------------------- -- The Graph.Table state forms the core data structure of the VC Generator. -- -- It represents the Basic Path Graph (BPG) of the subprogram for which -- we are generating VCs. -- -- The BPG is initially built by DAG.BuildGraph, based on data coming -- from the Syntax Tree, Dictionary and Flow Analyser. This BPG has many -- "Unspecified" nodes representing every statement of the subprogram under -- analysis. The "arcs" of the graph (between nodes) reflect the simple -- control-flow structure of the source program. Each of these is -- generated with a predicate/action Pair structure, representing the -- path-traversal condition and (possible) assignment performed by that arc. -- -- After DAG.BuildGraph, the calling environment then invokes Graph.Gen_VCs. -- -- After Gen_VCs -- ------------- -- All "Unspecified" nodes are removed, leaving only nodes representing -- the pre-codition, post-condition, assertions, checks and so on. -- At this point the Pair attached to each arc represents the Verification -- Condition for that basic path, after application of Hoare's assignment -- axiom to generate weakest pre-conditions and so on. -- ---------------------------------------------------------------------------- type Dump_Kind is (DPCs, VCs, PFs); subtype Valid_Dump_Kind is Dump_Kind range DPCs .. VCs; subtype DOT_Dump_Kind is Dump_Kind range VCs .. PFs; --# type Table_Type is abstract; -- for proof functions below -- Each node (or "Statement") in the basic-path graph (BPG) is numbered thus... subtype Matrix_Index is Integer range 1 .. ExaminerConstants.VCGMatrixOrder; -- ...and has a type: type Proof_Context_Type is ( Assertion, -- Explicit assert annotation Default_Assertion, -- Examiner-generated assertion (loop invariant) Check_Statement, -- Explicit check annotation Run_Time_Check, -- Examiner-generated run-time check Precon_Check, -- Check of a call to a subprogram with a precondition Postcondition, -- Postcodition Precondition, -- Precondition Unspecified); -- Other nodes ---------------------------------------------------------------------- -- The BPG has various attributes which are manipulated by the -- following operations ---------------------------------------------------------------------- ----------------------- -- Number of Statements ----------------------- function Get_Nmbr_Of_Stmts return Matrix_Index; --# global in Table; procedure Inc_Nmbr_Of_Stmts; --# global in out Table; --# derives Table from *; --# pre Get_Nmbr_Of_Stmts (Table) < ExaminerConstants.VCGMatrixOrder; --# post Get_Nmbr_Of_Stmts (Table) = Get_Nmbr_Of_Stmts (Table~) + 1; procedure Set_Nmbr_Of_Stmts (N : in Matrix_Index); --# global in out Table; --# derives Table from *, --# N; --# post Get_Nmbr_Of_Stmts (Table) = N; ----------------- -- Proof Contexts ----------------- -- Returns the Proof_Context_Type for statement N --# function Get_Proof_Context (T : in Table_Type; --# N : in Matrix_Index) return Proof_Context_Type; -- Sets the Proof Context of the largest numbered statement procedure Set_Proof_Context (X : in Proof_Context_Type); --# global in out Table; --# derives Table from *, --# X; --# post Get_Proof_Context (Table, Get_Nmbr_Of_Stmts (Table)) = X; -- Sets the Proof Context for the precondition, which -- is always statement 1. procedure Set_First_Proof_Context (X : in Proof_Context_Type); --# global in out Table; --# derives Table from *, --# X; --# post Get_Proof_Context (Table, 1) = X; --------------------- -- Assertion Location --------------------- -- Each statement in the BPG may have a (possibly null) -- Assertion associated with it. This is an FDL predicate -- represented as s DAG. -- Returns the Assertion for the node denoted by Get_Nmbr_Of_Stms function Get_Assertion_Locn return Cells.Cell; --# global in Table; -- Returns the Assertion for the node denoted by Get_Nmbr_Of_Stms - 1 function Get_Preceding_Assertion_Locn return Cells.Cell; --# global in Table; --# pre Get_Nmbr_Of_Stmts (Table) > 1; -- Sets the Assertion for the node denoted by Get_Nmbr_Of_Stms procedure Set_Assertion_Locn (X : in Cells.Cell); --# global in out Table; --# derives Table from *, --# X; --# post Get_Assertion_Locn (Table) = X; -- Sets the Assertion for the precondition, which is always statement 1 procedure Set_First_Assertion_Locn (X : in Cells.Cell); --# global in out Table; --# derives Table from *, --# X; -------------------------------------------------------------------- -- Text_Line_Nmbr - associates a statement with a source line number -------------------------------------------------------------------- -- Set the line number associated with the statement denoted by Get_Nmbr_Of_Stmts procedure Set_Text_Line_Nmbr (X : in Integer); --# global in out Table; --# derives Table from *, --# X; -- Set the line number associated with statement denoted by Index procedure Insert_Text_Line_Nmbr (Index : in Matrix_Index; X : in Integer); --# global in out Table; --# derives Table from *, --# Index, --# X; ------------------------------------------------------------------ -- Refinements VCs - In addition to those generated directly -- from the BPG, two additional VCs are generated for consistency -- of state refinement and two more for sub-class consistency (aka -- the VCs for "LSP" or "Covariance".) These are stored in four -- further attributes of this package, thus: ------------------------------------------------------------------ procedure Set_Refinement_Pre_Check (X : in Cells.Cell); --# global in out Table; --# derives Table from *, --# X; procedure Set_Refinement_Post_Check (X : in Cells.Cell); --# global in out Table; --# derives Table from *, --# X; procedure Set_Subclass_Pre_Check (X : in Cells.Cell); --# global in out Table; --# derives Table from *, --# X; procedure Set_Subclass_Post_Check (X : in Cells.Cell); --# global in out Table; --# derives Table from *, --# X; -------------------------------------- -- (Re-)Initialization of this package -------------------------------------- -- Reset the BPG ready to start over with a new subprogram. -- The Table state is returned to the same state it was -- in following the elaboration of this package. procedure Reinitialize_Graph; --# global out Table; --# derives Table from ; ------------------------------------------------------------------ -- Creation of the BPG -- -- The BPG is stored as a matrix of "Coefficients", each -- of which represents an arc from statement I to -- statement J with Label K. -- -- The following procedure provides a Constructor for -- adding an arc to the BPG ------------------------------------------------------------------ procedure Create_Coeff (Heap : in out Cells.Heap_Record; I, J : in Matrix_Index; K : in Labels.Label); --# global in out Statistics.TableUsage; --# in out Table; --# derives Heap from *, --# I, --# J, --# K, --# Table & --# Statistics.TableUsage from *, --# Heap & --# Table from *, --# Heap, --# I, --# J; -- ...and lookup of a Label given statements I and J function Coefficient (Heap : Cells.Heap_Record; I, J : Matrix_Index) return Labels.Label; --# global in Table; ------------------------------------------------------------------ -- Generation and Printing of Verification Conditions ------------------------------------------------------------------ -- Gen_VCs essentially reduces the BPG by removing "Unspecified" -- statements, using Hoare's assignment axiom. The traveral- -- condition and action of such statements are back-substituted, -- and this process is repeated until only explicit pre-condition, -- post-condition, assertion, and check statements are left. -- The arcs between those remaining statements are left decorated -- with their appropriate VC. procedure Gen_VCs (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Output_File_Name : in E_Strings.T; Scope : in Dictionary.Scopes; Gen_VC_Failure : out Boolean); --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out Table; --# derives Gen_VC_Failure, --# Heap, --# Table from CommandLineData.Content, --# Heap, --# Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Heap, --# Output_File, --# Table & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Heap, --# Table & --# null from Output_File_Name, --# Scope; procedure Print_VCs_Or_DPCs (Heap : in out Cells.Heap_Record; Output_File : in SPARK_IO.File_Type; Scope : in Dictionary.Scopes; Kind : in Valid_Dump_Kind); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Table; --# in out Declarations.State; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Declarations.State, --# Heap, --# LexTokenManager.State, --# Statistics.TableUsage from *, --# Declarations.State, --# Dictionary.Dict, --# Heap, --# Kind, --# LexTokenManager.State, --# Table & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Declarations.State, --# Dictionary.Dict, --# Heap, --# Kind, --# LexTokenManager.State, --# Output_File, --# Scope, --# Table; ------------------------------------------- -- Debug and Tracing ------------------------------------------- -- Produces a human-readable output of Graph.Table on standard output procedure Dump_Graph_Table (Heap : in out Cells.Heap_Record; Scope : in Dictionary.Scopes; Print_Edges_As : in DOT_Dump_Kind); --# global in Table; --# derives Heap from * & --# null from Print_Edges_As, --# Scope, --# Table; -- Produces BPG in DOT format. -- The DOT format is that expected by the tools from www.graphviz.org -- -- Output_File_Name is the name of the VCG file being generated. This is -- transformed by removing the ".vcg" extension, adding the numeric -- suffix given, then adding ".dot" as a new extension. This allows -- for a numerically indexed sequence of graphs to be produced for a single -- subprogram. procedure Dump_Graph_Dot (Heap : in out Cells.Heap_Record; Output_File_Name : in E_Strings.T; Output_File_Name_Suffix : in Natural; Scope : in Dictionary.Scopes; Print_Edges_As : in DOT_Dump_Kind); --# global in Table; --# derives Heap from * & --# null from Output_File_Name, --# Output_File_Name_Suffix, --# Print_Edges_As, --# Scope, --# Table; end Graph; ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_real.adbspark-2012.0.deb/examiner/sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaratio0000644000175000017500000013400011753202336033116 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SLI; separate (Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration) procedure Wf_Real (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) is The_Child_Node : STree.SyntaxNode; ------------------------------------------------------------------------ procedure Check_Accuracy (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Is_Floating : in Boolean; Has_Range : in Boolean; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord; Accuracy : out LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Accuracy from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Is_Floating, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# STree.Table, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Has_Range, --# Is_Floating, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.simple_expression; --# post STree.Table = STree.Table~; is Accuracy_Type : Exp_Record; Unused_Component_Data : ComponentManager.ComponentData; Unwanted_Seq : SeqAlgebra.Seq; ------------------------------- function Type_Correct (Type_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes; Is_Floating : Boolean) return Boolean --# global in Dictionary.Dict; is Result : Boolean; begin if Is_Floating then Result := Dictionary.IsIntegerTypeMark (Type_Sym, Scope); else Result := Dictionary.IsRealTypeMark (Type_Sym, Scope); end if; return Result or else Dictionary.IsUnknownTypeMark (Type_Sym); end Type_Correct; ------------------------------- function Error_To_Raise (Is_Floating : Boolean) return Natural is E : Natural; begin if Is_Floating then E := 108; else E := 109; end if; return E; end Error_To_Raise; ------------------------------- procedure Check_Against_System_Constants (Dec_Loc : in LexTokenManager.Token_Position; Has_Range : in Boolean; Accuracy_Type : in Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Accuracy_Type, --# CommandLineData.Content, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Has_Range, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is System_Sym : Dictionary.Symbol; Max_Digits_Sym : Dictionary.Symbol; Max_Base_Digits_Sym : Dictionary.Symbol; Unwanted_ME : Maths.ErrorCode; Max_Val : LexTokenManager.Lex_String; Result : Maths.Value; begin case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => -- No System in SPARK83, so nothing can be done here null; when CommandLineData.SPARK95_Onwards => -- see if package System exists... System_Sym := Dictionary.LookupItem (Name => LexTokenManager.System_Token, Scope => Dictionary.GlobalScope, Context => Dictionary.ProgramContext, Full_Package_Name => False); if not Dictionary.Is_Null_Symbol (System_Sym) then if not Has_Range then -- if so, and we're an unranged fp type then see if -- System.Max_Digits exists ... Max_Digits_Sym := Dictionary.LookupSelectedItem (Prefix => System_Sym, Selector => LexTokenManager.Max_Digits_Token, Scope => Dictionary.GetScope (System_Sym), Context => Dictionary.ProgramContext); if not Dictionary.Is_Null_Symbol (Max_Digits_Sym) then -- if it does... Max_Val := Dictionary.Get_Value (The_Constant => Max_Digits_Sym); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Max_Val, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then -- and if it has a sensible value, then check -- that the requested number of digits does not -- exceed it. --# accept Flow, 10, Unwanted_ME, "Expected ineffective assignment"; Maths.Greater (Accuracy_Type.Value, Maths.ValueRep (Max_Val), Result, Unwanted_ME); --# end accept; if Result = Maths.BoolToValue (True) then ErrorHandler.Semantic_Error (Err_Num => 785, Reference => ErrorHandler.No_Reference, Position => Dec_Loc, Id_Str => LexTokenManager.Null_String); end if; end if; end if; else -- if so, and we're a ranged fp type then see if -- System.Max_Base_Digits exists ... Max_Base_Digits_Sym := Dictionary.LookupSelectedItem (Prefix => System_Sym, Selector => LexTokenManager.Max_Base_Digits_Token, Scope => Dictionary.GetScope (System_Sym), Context => Dictionary.ProgramContext); if not Dictionary.Is_Null_Symbol (Max_Base_Digits_Sym) then -- if it does... Max_Val := Dictionary.Get_Value (The_Constant => Max_Base_Digits_Sym); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Max_Val, Lex_Str2 => LexTokenManager.Null_String) /= LexTokenManager.Str_Eq then -- and if it has a sensible value, then check -- that the requested number of digits does not -- exceed it. --# accept Flow, 10, Unwanted_ME, "Expected ineffective assignment"; Maths.Greater (Accuracy_Type.Value, Maths.ValueRep (Max_Val), Result, Unwanted_ME); --# end accept; if Result = Maths.BoolToValue (True) then ErrorHandler.Semantic_Error (Err_Num => 786, Reference => ErrorHandler.No_Reference, Position => Dec_Loc, Id_Str => LexTokenManager.Null_String); end if; end if; end if; end if; end if; end case; --# accept Flow, 33, Unwanted_ME, "Expected to be neither referenced nor exported"; end Check_Against_System_Constants; procedure Check_Accuracy_Is_Positive (Node_Pos : in LexTokenManager.Token_Position; Is_Floating : in Boolean; Accuracy_Type : in Exp_Record) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in LexTokenManager.State; --# in out ErrorHandler.Error_Context; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from Accuracy_Type, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Floating, --# LexTokenManager.State, --# Node_Pos, --# SPARK_IO.File_Sys; is T : Maths.Value; Err : Natural; begin T := Accuracy_Type.Value; Maths.Absolute (T); if (Accuracy_Type.Value = Maths.ZeroReal) or else (Accuracy_Type.Value = Maths.ZeroInteger) or else (Accuracy_Type.Value /= T) then if Is_Floating then Err := 787; else Err := 788; end if; ErrorHandler.Semantic_Error (Err_Num => Err, Reference => ErrorHandler.No_Reference, Position => Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end Check_Accuracy_Is_Positive; begin -- Check_Accuracy SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => True, Ref_Var => Unwanted_Seq, Result => Accuracy_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Accuracy_Type.Value, Accuracy); if not Type_Correct (Type_Sym => Accuracy_Type.Type_Symbol, Scope => Scope, Is_Floating => Is_Floating) then Accuracy := LexTokenManager.Null_String; -- no value in error case ErrorHandler.Semantic_Error (Err_Num => Error_To_Raise (Is_Floating => Is_Floating), Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; if Accuracy_Type.Is_Static then if Accuracy_Type.Is_ARange then ErrorHandler.Semantic_Error (Err_Num => 114, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); else Check_Accuracy_Is_Positive (Node_Pos => Node_Position (Node => Node), Is_Floating => Is_Floating, Accuracy_Type => Accuracy_Type); end if; else ErrorHandler.Semantic_Error (Err_Num => 36, Reference => 1, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; if Is_Floating then -- Check against the target configuration file; -- System.Max_Digits or Max_Base_Digits may be defined. Check_Against_System_Constants (Dec_Loc => Dec_Loc, Has_Range => Has_Range, Accuracy_Type => Accuracy_Type); end if; end Check_Accuracy; ------------------------------------------------------------------------ procedure Check_Range_Item (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Is_Floating : in Boolean; The_Heap : in out Heap.HeapRecord; Static : out Boolean; Bound_Val : out LexTokenManager.Lex_String) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# Dictionary.Dict, --# LexTokenManager.State, --# Static, --# STree.Table, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Bound_Val from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# Is_Floating, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Floating, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Node, --# Scope, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.attribute or --# Syntax_Node_Type (Node, STree.Table) = SP_Symbols.simple_expression; --# post STree.Table = STree.Table~; is Range_Type : Exp_Record; Unused_Component_Data : ComponentManager.ComponentData; Unwanted_Seq : SeqAlgebra.Seq; ------------------------------- function Type_Correct (Type_Sym : Dictionary.Symbol; Scope : Dictionary.Scopes; Is_Floating : Boolean) return Boolean --# global in Dictionary.Dict; is Result : Boolean; begin if Is_Floating then Result := Dictionary.IsRealTypeMark (Type_Sym, Scope); else Result := (Dictionary.IsUniversalRealType (Type_Sym) or else Dictionary.IsFixedPointTypeMark (Type_Sym, Scope)); end if; return Result or else Dictionary.IsUnknownTypeMark (Type_Sym); end Type_Correct; begin -- Check_Range_Item SeqAlgebra.CreateSeq (The_Heap, Unwanted_Seq); ComponentManager.Initialise (Unused_Component_Data); --# accept Flow, 10, Unused_Component_Data, "Expected ineffective assignment"; Walk_Expression_P.Walk_Expression (Exp_Node => Node, Scope => Scope, Type_Context => Dictionary.GetUnknownTypeMark, Context_Requires_Static => True, Ref_Var => Unwanted_Seq, Result => Range_Type, Component_Data => Unused_Component_Data, The_Heap => The_Heap); --# end accept; SeqAlgebra.DisposeOfSeq (The_Heap, Unwanted_Seq); Maths.StorageRep (Range_Type.Value, Bound_Val); if not Type_Correct (Type_Sym => Range_Type.Type_Symbol, Scope => Scope, Is_Floating => Is_Floating) then Bound_Val := LexTokenManager.Null_String; ErrorHandler.Semantic_Error (Err_Num => 38, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => LexTokenManager.Null_String); end if; Static := Range_Type.Is_Static; end Check_Range_Item; ------------------------------------------------------------------------ function Attribute_Found (Node : STree.SyntaxNode) return Boolean --# global in STree.Table; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.range_constraint; is begin return Syntax_Node_Type (Node => Child_Node (Current_Node => Child_Node (Current_Node => Node))) = SP_Symbols.attribute; end Attribute_Found; ------------------------------------------------------------------------ procedure Wf_Floating_Point_Constraint (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.floating_point_constraint and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Accuracy_Node, Range_Node : STree.SyntaxNode; Range_Node_Pos : LexTokenManager.Token_Position; Left_Is_Static, Right_Is_Static : Boolean; Accuracy : LexTokenManager.Lex_String; Upper : LexTokenManager.Lex_String := LexTokenManager.Null_String; Lower : LexTokenManager.Lex_String := LexTokenManager.Null_String; Type_Symbol : Dictionary.Symbol; begin Accuracy_Node := Child_Node (Current_Node => Node); -- ASSUME Accuracy_Node = floating_accuracy_definition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Accuracy_Node) = SP_Symbols.floating_accuracy_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Accuracy_Node = floating_accuracy_definition in Wf_Floating_Point_Constraint"); Range_Node := Next_Sibling (Current_Node => Accuracy_Node); -- ASSUME Range_Node = range_constraint OR NULL SystemErrors.RT_Assert (C => Range_Node = STree.NullNode or else Syntax_Node_Type (Node => Range_Node) = SP_Symbols.range_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = range_constraint OR NULL in Wf_Floating_Point_Constraint"); Accuracy_Node := Child_Node (Current_Node => Accuracy_Node); -- ASSUME Accuracy_Node = simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Accuracy_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Accuracy_Node = simple_expression in Wf_Floating_Point_Constraint"); Check_Accuracy (Node => Accuracy_Node, Scope => Scope, Is_Floating => True, Has_Range => Range_Node /= STree.NullNode, Dec_Loc => Dec_Loc, The_Heap => The_Heap, Accuracy => Accuracy); if Syntax_Node_Type (Node => Range_Node) = SP_Symbols.range_constraint then -- ASSUME Range_Node = range_constraint if Attribute_Found (Node => Range_Node) then ErrorHandler.Semantic_Error (Err_Num => 98, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Range_Node), Id_Str => LexTokenManager.Null_String); else Range_Node_Pos := Node_Position (Node => Range_Node); Range_Node := Child_Node (Current_Node => Child_Node (Current_Node => Range_Node)); -- ASSUME Range_Node = attribute OR simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Range_Node) = SP_Symbols.attribute or else Syntax_Node_Type (Node => Range_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = attribute OR simple_expression in Wf_Floating_Point_Constraint"); Check_Range_Item (Node => Range_Node, Scope => Scope, Is_Floating => True, The_Heap => The_Heap, Static => Left_Is_Static, Bound_Val => Lower); Range_Node := Next_Sibling (Current_Node => Range_Node); -- ASSUME Range_Node = simple_expression OR NULL if Syntax_Node_Type (Node => Range_Node) = SP_Symbols.simple_expression then -- ASSUME Range_Node = simple_expression Check_Range_Item (Node => Range_Node, Scope => Scope, Is_Floating => True, The_Heap => The_Heap, Static => Right_Is_Static, Bound_Val => Upper); elsif Range_Node = STree.NullNode then -- ASSUME Range_Node = NULL Right_Is_Static := True; else Right_Is_Static := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = simple_expression OR NULL in Wf_Floating_Point_Constraint"); end if; if not (Left_Is_Static and then Right_Is_Static) then ErrorHandler.Semantic_Error (Err_Num => 45, Reference => 1, Position => Range_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; end if; Empty_Type_Check (Dec_Loc => Dec_Loc, Lower => Lower, Upper => Upper); Dictionary.Add_Floating_Point_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Lower => Lower, Upper => Upper, Error_Bound => Accuracy, Scope => Scope, Context => Dictionary.ProgramContext, The_Type => Type_Symbol); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Type_Symbol, Is_Declaration => True); end if; end Wf_Floating_Point_Constraint; ------------------------------------------------------------------------ procedure Wf_Fixed_Point_Constraint (Node : in STree.SyntaxNode; Scope : in Dictionary.Scopes; Ident_Node : in STree.SyntaxNode; Dec_Loc : in LexTokenManager.Token_Position; The_Heap : in out Heap.HeapRecord) --# global in CommandLineData.Content; --# in ContextManager.Ops.File_Heap; --# in ContextManager.Ops.Unit_Heap; --# in ContextManager.Ops.Unit_Stack; --# in out Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SLI.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# in out STree.Table; --# out Aggregate_Stack.State; --# derives Aggregate_Stack.State, --# LexTokenManager.State, --# The_Heap from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# Dictionary.Dict, --# STree.Table from CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap & --# ErrorHandler.Error_Context from *, --# CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# SLI.State, --# SPARK_IO.File_Sys from CommandLineData.Content, --# ContextManager.Ops.File_Heap, --# ContextManager.Ops.Unit_Heap, --# ContextManager.Ops.Unit_Stack, --# Dec_Loc, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Ident_Node, --# LexTokenManager.State, --# Node, --# Scope, --# SLI.State, --# SPARK_IO.File_Sys, --# STree.Table, --# The_Heap & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# ContextManager.Ops.Unit_Stack, --# Dictionary.Dict, --# LexTokenManager.State, --# Node, --# Scope, --# STree.Table, --# The_Heap; --# pre Syntax_Node_Type (Node, STree.Table) = SP_Symbols.fixed_point_constraint and --# Syntax_Node_Type (Ident_Node, STree.Table) = SP_Symbols.identifier; --# post STree.Table = STree.Table~; is Accuracy_Node, Range_Node : STree.SyntaxNode; Range_Node_Pos : LexTokenManager.Token_Position; Left_Is_Static, Right_Is_Static : Boolean; Accuracy : LexTokenManager.Lex_String; Upper : LexTokenManager.Lex_String := LexTokenManager.Null_String; Lower : LexTokenManager.Lex_String := LexTokenManager.Null_String; Type_Symbol : Dictionary.Symbol; begin Accuracy_Node := Child_Node (Current_Node => Node); -- ASSUME Accuracy_Node = fixed_accuracy_definition SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Accuracy_Node) = SP_Symbols.fixed_accuracy_definition, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Accuracy_Node = fixed_accuracy_definition in Wf_Fixed_Point_Constraint"); Range_Node := Next_Sibling (Current_Node => Accuracy_Node); -- ASSUME Range_Node = range_constraint OR NULL SystemErrors.RT_Assert (C => Range_Node = STree.NullNode or else Syntax_Node_Type (Node => Range_Node) = SP_Symbols.range_constraint, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = range_constraint OR NULL in Wf_Fixed_Point_Constraint"); Accuracy_Node := Child_Node (Current_Node => Accuracy_Node); -- ASSUME Accuracy_Node = simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Accuracy_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Accuracy_Node = simple_expression in Wf_Fixed_Point_Constraint"); Check_Accuracy (Node => Accuracy_Node, Scope => Scope, Is_Floating => False, Has_Range => False, Dec_Loc => Dec_Loc, The_Heap => The_Heap, Accuracy => Accuracy); if Syntax_Node_Type (Node => Range_Node) = SP_Symbols.range_constraint then -- ASSUME Range_Node = range_constraint if Attribute_Found (Node => Range_Node) then ErrorHandler.Semantic_Error (Err_Num => 98, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Range_Node), Id_Str => LexTokenManager.Null_String); else Range_Node_Pos := Node_Position (Node => Range_Node); Range_Node := Child_Node (Current_Node => Child_Node (Current_Node => Range_Node)); -- ASSUME Range_Node = attribute OR simple_expression SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Range_Node) = SP_Symbols.attribute or else Syntax_Node_Type (Node => Range_Node) = SP_Symbols.simple_expression, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = attribute OR simple_expression in Wf_Fixed_Point_Constraint"); Check_Range_Item (Node => Range_Node, Scope => Scope, Is_Floating => True, The_Heap => The_Heap, Static => Left_Is_Static, Bound_Val => Lower); Range_Node := Next_Sibling (Current_Node => Range_Node); -- ASSUME Range_Node = simple_expression OR NULL if Syntax_Node_Type (Node => Range_Node) = SP_Symbols.simple_expression then -- ASSUME Range_Node = simple_expression Check_Range_Item (Node => Range_Node, Scope => Scope, Is_Floating => True, The_Heap => The_Heap, Static => Right_Is_Static, Bound_Val => Upper); elsif Range_Node = STree.NullNode then -- ASSUME Range_Node = NULL Right_Is_Static := True; else Right_Is_Static := True; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Range_Node = simple_expression OR NULL in Wf_Fixed_Point_Constraint"); end if; if not (Left_Is_Static and then Right_Is_Static) then ErrorHandler.Semantic_Error (Err_Num => 45, Reference => 1, Position => Range_Node_Pos, Id_Str => LexTokenManager.Null_String); end if; end if; elsif Range_Node = STree.NullNode then -- ASSUME Range_Node = NULL ErrorHandler.Semantic_Error (Err_Num => 57, Reference => ErrorHandler.No_Reference, Position => Dec_Loc, Id_Str => LexTokenManager.Null_String); end if; Empty_Type_Check (Dec_Loc => Dec_Loc, Lower => Lower, Upper => Upper); Dictionary.Add_Fixed_Point_Type (Name => Node_Lex_String (Node => Ident_Node), Comp_Unit => ContextManager.Ops.Current_Unit, Declaration => Dictionary.Location'(Start_Position => Dec_Loc, End_Position => Dec_Loc), Lower => Lower, Upper => Upper, Error_Bound => Accuracy, Scope => Scope, Context => Dictionary.ProgramContext, The_Type => Type_Symbol); STree.Add_Node_Symbol (Node => Ident_Node, Sym => Type_Symbol); if ErrorHandler.Generate_SLI then SLI.Generate_Xref_Symbol (Comp_Unit => ContextManager.Ops.Current_Unit, Parse_Tree => Ident_Node, Symbol => Type_Symbol, Is_Declaration => True); end if; end Wf_Fixed_Point_Constraint; begin -- Wf_Real The_Child_Node := Child_Node (Current_Node => Node); if Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.fixed_point_constraint then -- ASSUME Next_Node = fixed_point_constraint Wf_Fixed_Point_Constraint (Node => The_Child_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, The_Heap => The_Heap); elsif Syntax_Node_Type (Node => The_Child_Node) = SP_Symbols.floating_point_constraint then -- ASSUME Next_Node = floating_point_constraint Wf_Floating_Point_Constraint (Node => The_Child_Node, Scope => Scope, Ident_Node => Ident_Node, Dec_Loc => Dec_Loc, The_Heap => The_Heap); else SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node = floating_point_constraint OR fixed_point_constraint in Wf_Real"); end if; Heap.ReportUsage (The_Heap); --# accept Flow, 602, Aggregate_Stack.State, Aggregate_Stack.State, "Partial initialization OK here"; end Wf_Real; spark-2012.0.deb/examiner/sem-compunit-up_wf_protected_body.adb0000644000175000017500000000676511753202336023563 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.CompUnit) procedure Up_Wf_Protected_Body (Node : in STree.SyntaxNode; Scope : in out Dictionary.Scopes) is Subprog_It : Dictionary.Iterator; Virtual_Element_It : Dictionary.Iterator; Closing_Ident_Node : STree.SyntaxNode; begin Closing_Ident_Node := Last_Sibling_Of (Start_Node => Child_Node (Current_Node => Node)); -- ASSUME Closing_Ident_Node = identifier SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Closing_Ident_Node) = SP_Symbols.identifier, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Closing_Ident_Node = identifier in Up_Wf_Protected_Body"); -- run checks here to ensure that all declared subprograms have bodies Subprog_It := Dictionary.First_Visible_Subprogram (The_Package_Or_Type => Dictionary.GetRegion (Scope)); while not Dictionary.IsNullIterator (Subprog_It) loop if not Dictionary.HasBody (Dictionary.CurrentSymbol (Subprog_It)) then ErrorHandler.Semantic_Error (Err_Num => 996, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Closing_Ident_Node), Id_Str => Dictionary.GetSimpleName (Dictionary.CurrentSymbol (Subprog_It))); end if; Subprog_It := Dictionary.NextSymbol (Subprog_It); end loop; -- Check all the virtual elements have been seen. Virtual_Element_It := Dictionary.FirstVirtualElement (Dictionary.GetRegion (Scope)); while not Dictionary.IsNullIterator (Virtual_Element_It) loop if Dictionary.IsConcreteOwnVariable (Dictionary.CurrentSymbol (Virtual_Element_It)) and then not Dictionary.VirtualElementSeenByOwner (Dictionary.CurrentSymbol (Virtual_Element_It)) then -- This variable appears in the protects property but is not used by the -- protected type. Don't output the error if the variable is abstract as -- there will have already been an error output in the refinement clause. ErrorHandler.Semantic_Error_Sym2 (Err_Num => 947, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Closing_Ident_Node), Sym => Dictionary.CurrentSymbol (Virtual_Element_It), Sym2 => Dictionary.GetRegion (Scope), Scope => Scope); end if; Virtual_Element_It := Dictionary.NextSymbol (Virtual_Element_It); end loop; -- step out to enclosing scope for continued tree walk Scope := Dictionary.GetEnclosingScope (Scope); end Up_Wf_Protected_Body; spark-2012.0.deb/examiner/sem-wf_formal_part-wf_param.adb0000644000175000017500000002167111753202336022306 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (Sem.Wf_Formal_Part) procedure Wf_Param (Node : in STree.SyntaxNode; Current_Scope : in Dictionary.Scopes; Subprog_Sym : in Dictionary.Symbol; First_Occurrence : in Boolean; Context : in Dictionary.Contexts; Param_Count : in out Natural; Errors_Found : in out Boolean) is Ident_List_Node, Next_Node, Type_Node : STree.SyntaxNode; It : STree.Iterator; Node_Type : SP_Symbols.SP_Symbol; Mode : Dictionary.Modes; Sym, Type_Sym : Dictionary.Symbol; Ident_Str : LexTokenManager.Lex_String; Exit_Loop : Boolean := False; function Modes_Equivalent (Mode1, Mode2 : Dictionary.Modes) return Boolean --# global in CommandLineData.Content; is Result : Boolean; begin case CommandLineData.Content.Language_Profile is when CommandLineData.SPARK83 => Result := Mode1 = Mode2; when CommandLineData.SPARK95_Onwards => Result := (Mode1 = Mode2) or else (Mode1 = Dictionary.InMode and then Mode2 = Dictionary.DefaultMode) or else (Mode1 = Dictionary.DefaultMode and then Mode2 = Dictionary.InMode); end case; return Result; end Modes_Equivalent; begin -- Wf_Param Next_Node := Next_Sibling (Current_Node => Child_Node (Current_Node => Node)); -- ASSUME Next_Node = mode SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Next_Node) = SP_Symbols.mode, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Next_Node = mode in Wf_Param"); Node_Type := Syntax_Node_Type (Node => Child_Node (Current_Node => Next_Node)); -- ASSUME Node_Type = in_mode OR inout_mode OR out_mode OR NULL case Node_Type is when SP_Symbols.in_mode => Mode := Dictionary.InMode; when SP_Symbols.out_mode => Mode := Dictionary.OutMode; when SP_Symbols.inout_mode => Mode := Dictionary.InOutMode; when SP_Symbols.SPEND => Mode := Dictionary.DefaultMode; when others => Mode := Dictionary.InvalidMode; SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Node_Type = in_mode OR inout_mode OR out_mode OR NULL in Wf_Param"); end case; Type_Node := Next_Sibling (Current_Node => Next_Node); -- ASSUME Type_Node = type_mark SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Type_Node) = SP_Symbols.type_mark, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Type_Node = type_mark in Wf_Param"); Wf_Type_Mark (Node => Type_Node, Current_Scope => Current_Scope, Context => Context, Type_Sym => Type_Sym); -- if the type is wrong then an error will be reported by wf_type_mark and Type_Sym will -- be set to the UnknownType Errors_Found := Errors_Found or else Dictionary.IsUnknownTypeMark (Type_Sym); if Dictionary.IsPredefinedSuspensionObjectType (Type_Sym) or else Dictionary.IsProtectedTypeMark (Type_Sym) then Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 904, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Type_Node), Id_Str => LexTokenManager.Null_String); end if; Ident_List_Node := Child_Node (Current_Node => Node); -- ASSUME Ident_List_Node = identifier_list SystemErrors.RT_Assert (C => Syntax_Node_Type (Node => Ident_List_Node) = SP_Symbols.identifier_list, Sys_Err => SystemErrors.Invalid_Syntax_Tree, Msg => "Expect Ident_List_Node = identifier_list in Wf_Param"); It := Find_First_Node (Node_Kind => SP_Symbols.identifier, From_Root => Ident_List_Node, In_Direction => STree.Down); while not STree.IsNull (It) and then not Exit_Loop loop Next_Node := Get_Node (It => It); --# assert STree.Table = STree.Table~ and --# Syntax_Node_Type (Next_Node, STree.Table) = SP_Symbols.identifier and --# Next_Node = Get_Node (It); if Param_Count < Natural'Last then Param_Count := Param_Count + 1; Ident_Str := Node_Lex_String (Node => Next_Node); --# accept Flow, 41, "Expected stable expression"; if First_Occurrence then --# end accept; if not Dictionary.IsDefined (Name => Ident_Str, Scope => Dictionary.Set_Visibility (The_Visibility => Dictionary.Local, The_Unit => Subprog_Sym), Context => Context, Full_Package_Name => False) then Dictionary.AddSubprogramParameter (Name => Ident_Str, Subprogram => Subprog_Sym, TypeMark => Type_Sym, TypeReference => Dictionary.Location'(Start_Position => Node_Position (Node => Type_Node), End_Position => Node_Position (Node => Type_Node)), Mode => Mode, Comp_Unit => ContextManager.Ops.Current_Unit, Specification => Dictionary.Location'(Start_Position => Node_Position (Node => Next_Node), End_Position => Node_Position (Node => Next_Node))); if Dictionary.IsFunction (Subprog_Sym) and then not (Mode = Dictionary.InMode or else Mode = Dictionary.DefaultMode) then Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 64, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => LexTokenManager.Null_String); end if; else Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 10, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; else -- subprogram previously declared so check params match if Param_Count <= Dictionary.GetNumberOfSubprogramParameters (Subprog_Sym) then Sym := Dictionary.GetSubprogramParameter (Subprog_Sym, Param_Count); else Sym := Dictionary.NullSymbol; end if; if Dictionary.Is_Null_Symbol (Sym) or else LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Dictionary.GetSimpleName (Sym), Lex_Str2 => Ident_Str) /= LexTokenManager.Str_Eq or else not Modes_Equivalent (Mode1 => Dictionary.GetSubprogramParameterMode (Sym), Mode2 => Mode) or else not Dictionary.Types_Are_Equal (Left_Symbol => Dictionary.GetType (Sym), Right_Symbol => Type_Sym, Full_Range_Subtype => False) then Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 153, Reference => 11, Position => Node_Position (Node => Next_Node), Id_Str => Ident_Str); end if; end if; else Errors_Found := True; ErrorHandler.Semantic_Error (Err_Num => 152, Reference => ErrorHandler.No_Reference, Position => Node_Position (Node => Node), Id_Str => Dictionary.GetSimpleName (Subprog_Sym)); Exit_Loop := True; end if; It := STree.NextNode (It); end loop; end Wf_Param; spark-2012.0.deb/Makefile0000644000175000017500000001617011753202341014102 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for the entire Spark Toolset source distribution # Build the SPARK Toolset on NT, Solaris, Linux or MacOS X ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ ################################################################################ # DETERMINE HOST INFORMATION ################################################################################ # Determine which platform this Makefile is being run on. TARGET:=$(shell uname -s) GCC_TARGET:=$(shell gcc -dumpmachine) # Canonicalize the target string. ifeq ($(findstring CYGWIN_NT,${TARGET}),CYGWIN_NT) TARGET:=Windows endif ################################################################################ # PLATFORM SPECIFIC CONFIGURATION ################################################################################ BUILD_VICTOR:=false # Windows. ifeq (${TARGET},Windows) EXEEXTN:=.exe ifeq (${GCC_TARGET},i686-pc-mingw32) BUILD_VICTOR:=true endif endif # Sun. ifeq (${TARGET},SunOS) EXEEXTN:= endif # Linux. ifeq (${TARGET},Linux) EXEEXTN:= BUILD_VICTOR:=true endif # Darwin (Mac OS X). ifeq (${TARGET},Darwin) EXEEXTN:= BUILD_VICTOR:=true endif ################################################################################ # TARGETS ################################################################################ # Build all of the tools, copy them to bin, and clean up. all: $(MAKE) makeall $(MAKE) copyall $(MAKE) cleanall ################################################################################ # BUILDS ################################################################################ makeall: $(MAKE) -C sparklalr $(MAKE) -C examiner $(MAKE) -C simplifier $(MAKE) -C wraputility $(MAKE) -C checker $(MAKE) -C pogs $(MAKE) -C sparkclean $(MAKE) -C sparkformat $(MAKE) -C sparkmake $(MAKE) -C sparksimp $(MAKE) -C victor $(MAKE) -C victor_wrapper copyall: cp ./examiner/spark${EXEEXTN} ./bin/ cp ./simplifier/spadesimp${EXEEXTN} ./bin/ cp ./simplifier/zombiescope${EXEEXTN} ./bin/ cp ./wraputility/wrap_utility${EXEEXTN} ./bin/ cp ./checker/checker${EXEEXTN} ./bin/ cp ./pogs/pogs${EXEEXTN} ./bin/ cp ./sparkclean/sparkclean${EXEEXTN} ./bin/ cp ./sparkformat/sparkformat${EXEEXTN} ./bin/ cp ./sparkmake/sparkmake${EXEEXTN} ./bin/ cp ./sparksimp/sparksimp${EXEEXTN} ./bin/ cp ./victor_wrapper/victor${EXEEXTN} ./bin/ ifeq (${BUILD_VICTOR},true) cp ./victor/alt-ergo/alt-ergo.opt ./bin/alt-ergo${EXEEXTN} cp ./victor/vct/bin/vct${EXEEXTN} ./bin/ endif cp ./victor/vct/run/prelude.fdl ./share/spark/ cp ./victor/vct/run/prelude.rul ./share/spark/ cp ./victor/vct/run/divmod.rul ./share/spark/ ifeq (${TARGET},Windows) # Copy manifests for SICStus tools. cp ./simplifier/spadesimp${EXEEXTN}.manifest ./bin/ cp ./simplifier/zombiescope${EXEEXTN}.manifest ./bin/ cp ./checker/checker${EXEEXTN}.manifest ./bin/ # Copy the required DLLs into bin. cp ./dlls/* ./bin/ endif cleanall: $(MAKE) -C sparklalr clean $(MAKE) -C examiner clean $(MAKE) -C simplifier clean $(MAKE) -C wraputility clean $(MAKE) -C checker clean $(MAKE) -C pogs clean $(MAKE) -C sparkclean clean $(MAKE) -C sparkformat clean $(MAKE) -C sparkmake clean $(MAKE) -C sparksimp clean $(MAKE) -C victor clean $(MAKE) -C victor_wrapper clean reallycleanall: rm -f ./bin/* rm -f ./share/spark/* $(MAKE) -C sparklalr reallyclean $(MAKE) -C examiner reallyclean $(MAKE) -C simplifier reallyclean $(MAKE) -C wraputility reallyclean $(MAKE) -C checker reallyclean $(MAKE) -C pogs reallyclean $(MAKE) -C sparkclean reallyclean $(MAKE) -C sparkformat reallyclean $(MAKE) -C sparkmake reallyclean $(MAKE) -C sparksimp reallyclean $(MAKE) -C victor clean $(MAKE) -C victor_wrapper reallyclean analyseall: -$(MAKE) -C sparklalr self-analysis -$(MAKE) -C examiner self-analysis -$(MAKE) -C simplifier spxref -$(MAKE) -C wraputility self-analysis -$(MAKE) -C checker spxref -$(MAKE) -C pogs self-analysis -$(MAKE) -C sparkclean self-analysis -$(MAKE) -C sparkformat self-analysis -$(MAKE) -C sparkmake self-analysis -$(MAKE) -C victor_wrapper self-analysis analyseallplain: -$(MAKE) -C sparklalr self-analysis -$(MAKE) -C examiner self-analysis -$(MAKE) -C simplifier spxrefplain -$(MAKE) -C wraputility self-analysis -$(MAKE) -C checker spxrefplain -$(MAKE) -C pogs self-analysis -$(MAKE) -C sparkclean self-analysis -$(MAKE) -C sparkformat self-analysis -$(MAKE) -C sparkmake self-analysis -$(MAKE) -C victor_wrapper self-analysis # Regenerate the reference results. # (May be used to capture analysis of the current code base). generaterereference: analyseallplain cp ./sparklalr/vcg/sparklalr.rep ./analyse/referenceanalysis/sparklalr.rep cp ./examiner/vcg/mainunits95.rep ./analyse/referenceanalysis/examiner.rep cp ./simplifier/spxref_undefined.txt ./analyse/referenceanalysis/simplifier_spxref_undefined.txt cp ./wraputility/vcg/wrap_utility.rep ./analyse/referenceanalysis/wrap_utility.rep cp ./checker/spxref_undefined.txt ./analyse/referenceanalysis/checker_spxref_undefined.txt cp ./pogs/vcg/pogs.rep ./analyse/referenceanalysis/pogs.rep cp ./sparkclean/vcg/sparkclean.rep ./analyse/referenceanalysis/sparkclean.rep cp ./sparkformat/vcg/sparkformat.rep ./analyse/referenceanalysis/sparkformat.rep cp ./sparkmake/vcg/sparkmake.rep ./analyse/referenceanalysis/sparkmake.rep cp ./victor_wrapper/vcg/victor.rep ./analyse/referenceanalysis/victor.rep ################################################################################ # END-OF-FILE spark-2012.0.deb/bin/0000755000175000017500000000000011753203755013217 5ustar eugeneugenspark-2012.0.deb/sparklalr/0000755000175000017500000000000011753203755014442 5ustar eugeneugenspark-2012.0.deb/sparklalr/fatal.ads0000644000175000017500000000234011753202335016212 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package Fatal is Command_Line_Error : exception; -- Raises an exception to be caught by the top level handler, which -- will stop the program, hence the postcondition False. procedure Stop_With_Command_Line_Exception; --# derives ; --# post False; end Fatal; spark-2012.0.deb/sparklalr/ees_sym.ads0000644000175000017500000000613511753202335016575 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; --# inherit Command_Line_Options, --# SPARK.Ada.Text_IO, --# Sparklalr_Common, --# Sparklalr_Memory, --# Sparklalr_Memory.Dump, --# Sparklalr_Parser, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; package Ees_Sym --# own State; is procedure Gen_Essentials; --# global in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in Symbols_Dump.State; --# in out Sparklalr_Memory.Dump.State; --# out State; --# derives Sparklalr_Memory.Dump.State from *, --# Sparklalr_Memory.Stat_No, --# Symbols_Dump.State & --# State from Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State; procedure Out_Essentials; --# global in Sparklalr_Memory.Stat_No; --# in out State; --# derives State from *, --# Sparklalr_Memory.Stat_No; procedure Sp_Exp_Out (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Command_Line_Options.State; --# in Sparklalr_Memory.Stat_No; --# in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Command_Line_Options.State, --# Sparklalr_Memory.Stat_No, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Stat_No, --# State, --# Symbols_Dump.State; end Ees_Sym; spark-2012.0.deb/sparklalr/sparklalr_input.adb0000644000175000017500000013747111753202335020332 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with Fatal; with SPARK_Ada_Integer_Text_IO; with SPARK.Ada.Command_Line; with SPARK.Ada.Strings.Unbounded; with SPARK.Ada.Text_IO.Unbounded_String; with Sparklalr_Char_Class; with Sparklalr_Level; with Sparklalr_Memory; with Symbols_Dump; use type SPARK.Ada.Text_IO.Exception_T; use type Sparklalr_Char_Class.Char_Class; package body Sparklalr_Input --# own State is Ch, --# Gram_In, --# Keymap, --# Keytable; is Nkeywords : constant := 13; -- NUMBER OF KEYWORDS LEFT,RIGHT,... subtype Keymap_Elem is Sparklalr_Symbol.Symbol range Sparklalr_Symbol.Ampmark .. Sparklalr_Symbol.Ampmain; subtype Keywords_Range is Natural range 0 .. Nkeywords; type Keytable_T is array (Keywords_Range) of Sparklalr_Common.Id_Name; type Keymap_T is array (Keywords_Range) of Keymap_Elem; Keytable : Keytable_T; Keymap : Keymap_T; Gram_In : SPARK.Ada.Text_IO.File_Type; Ch : Character; -- Local procedures/functions procedure Get_Char (F : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T) --# global in Sparklalr_Char_Class.Charmap; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# out Ch; --# derives Ch, --# Col from Col, --# Gram_In & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# Col, --# Gram_In, --# Sparklalr_Error.State & --# Gram_In from *, --# Col, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State; is begin Ch := ' '; if not SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) then if SPARK.Ada.Text_IO.End_Of_Line_File (File => Gram_In) or else (Col = Sparklalr_Common.Line_Length) then Sparklalr_Error.Write_The_Line (Gram_In, F, Col); else SPARK.Ada.Text_IO.Get_Character_File (File => Gram_In, Item => Ch); Col := Col + 1; Sparklalr_Error.Set_Line_Out (Col, Ch); end if; end if; end Get_Char; -- End local procedures/functions procedure Initialise (Signpost : out Symbol_Set_Type) --# global in Command_Line_Options.State; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# out Ch; --# out Gram_In; --# out Keymap; --# out Keytable; --# out Sparklalr_Char_Class.Charmap; --# out Sparklalr_Error.State; --# out Sparklalr_Level.State; --# derives Ch, --# Keymap, --# Keytable, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Level.State from & --# Gram_In, --# Sparklalr_Error.State from Command_Line_Options.State & --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# Command_Line_Options.State; is procedure Init_Maps --# global out Keymap; --# out Keytable; --# out Sparklalr_Char_Class.Charmap; --# derives Keymap, --# Keytable, --# Sparklalr_Char_Class.Charmap from ; is begin Sparklalr_Char_Class.Initialise; Keytable := Keytable_T'(others => " "); Keymap := Keymap_T'(others => Sparklalr_Symbol.Ampmark); Keytable (0) := " "; Keymap (0) := Sparklalr_Symbol.Ampmark; Keytable (1) := "CONST "; Keymap (1) := Sparklalr_Symbol.Ampconst; Keytable (2) := "FUNC "; Keymap (2) := Sparklalr_Symbol.Ampfunc; Keytable (3) := "GRAM "; Keymap (3) := Sparklalr_Symbol.Ampgram; Keytable (4) := "LABEL "; Keymap (4) := Sparklalr_Symbol.Amplabel; Keytable (5) := "LEFT "; Keymap (5) := Sparklalr_Symbol.Left; Keytable (6) := "MAIN "; Keymap (6) := Sparklalr_Symbol.Ampmain; Keytable (7) := "NON "; Keymap (7) := Sparklalr_Symbol.Non; Keytable (8) := "PREC "; Keymap (8) := Sparklalr_Symbol.Prec; Keytable (9) := "PROG "; Keymap (9) := Sparklalr_Symbol.Ampprog; Keytable (10) := "RIGHT "; Keymap (10) := Sparklalr_Symbol.Right; Keytable (11) := "TERM "; Keymap (11) := Sparklalr_Symbol.Ampterm; Keytable (12) := "TYPE "; Keymap (12) := Sparklalr_Symbol.Amptype; Keytable (13) := "VAR "; Keymap (13) := Sparklalr_Symbol.Ampvar; end Init_Maps; begin -- Initialise Init_Maps; SPARK.Ada.Text_IO.Unbounded_String.Open (File => Gram_In, Mode => SPARK.Ada.Text_IO.In_File, Name => SPARK.Ada.Strings.Unbounded.Concat_Unbounded_String_String (Left => Command_Line_Options.Get_File_Name, Right => ".LLA"), Form => SPARK.Ada.Strings.Unbounded.Null_Unbounded_String); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Gram_In) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Line_Error (Item => "Unable to open LLA input grammar file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); Fatal.Stop_With_Command_Line_Exception; end if; Ch := ' '; Signpost := Symbol_Set_Type'(Sparklalr_Symbol.Ampmark | Sparklalr_Symbol.Ampterm .. Sparklalr_Symbol.Endfile => True, others => False); Sparklalr_Error.Initialise (Gram_In); Sparklalr_Level.Initialise; end Initialise; procedure Finalize (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# Gram_In; is begin if not SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) then Sparklalr_Error.Error (F, 0); end if; end Finalize; function Signpost_To_Symbol_Set_Type (Signpost : in Symbol_Set_Type) return Symbol_Set_Type is subtype I_Range is Sparklalr_Symbol.Symbol range Sparklalr_Symbol.Ampmark .. Sparklalr_Symbol.Endfile; Result : Symbol_Set_Type; begin Result := Symbol_Set_Type'(others => False); for I in I_Range loop Result (I) := Signpost (I); end loop; return Result; end Signpost_To_Symbol_Set_Type; procedure Scan (F : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : out Sparklalr_Symbol.Symbol; Token : out Sparklalr_Common.Id_Name) --# global in Keymap; --# in Keytable; --# in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Ch, --# Col, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# Col, --# Gram_In, --# Keytable, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State & --# Symb from Col, --# Gram_In, --# Keymap, --# Keytable, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State & --# Token from Col, --# Gram_In, --# Keytable, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State; is Continue : Boolean; procedure Error_Symbol (Col : in Sparklalr_Error.Err_Col_T; Symb : out Sparklalr_Symbol.Symbol) --# global in out Sparklalr_Error.State; --# derives Sparklalr_Error.State from *, --# Col & --# Symb from ; is begin Sparklalr_Error.Syn_Error (10, Col); Symb := Sparklalr_Symbol.Errsym; end Error_Symbol; procedure Scan_Number (F : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : out Sparklalr_Symbol.Symbol) --# global in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Ch, --# Col, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# Col, --# Gram_In, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State & --# Symb from ; is C : Character; End_Of_Line : Boolean; begin --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; while Sparklalr_Char_Class.Get_Charmap (C) = Sparklalr_Char_Class.Digit loop Get_Char (F, Col); --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; end loop; Symb := Sparklalr_Symbol.Number; --# accept F, 33, End_Of_Line, "Unused OK"; end Scan_Number; procedure Scan_Comment (F : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : out Sparklalr_Symbol.Symbol) --# global in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Ch, --# Col, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# Col, --# Gram_In, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State & --# Symb from Gram_In; is CCH : Character; C : Character; End_Of_Line : Boolean; begin Symb := Sparklalr_Symbol.Nullsymb; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; if C = '*' then loop Get_Char (F, Col); CCH := Ch; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; exit when SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) or else ((CCH = '*') and then (C = ')')); end loop; Get_Char (F, Col); else Error_Symbol (Col, Symb); end if; --# accept F, 33, End_Of_Line, "Unused OK"; end Scan_Comment; procedure Scan_Ident (F : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : out Sparklalr_Symbol.Symbol; Token : out Sparklalr_Common.Id_Name) --# global in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Ch, --# Col, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# Col, --# Gram_In, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State & --# Symb from & --# Token from Ch, --# Col, --# Gram_In, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State; is Name : Sparklalr_Common.Id_Name; I : Integer; C : Character; End_Of_Line : Boolean; begin Name := " "; Symb := Sparklalr_Symbol.Ident; Name (1) := Ch; I := 1; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; while (Sparklalr_Char_Class.Get_Charmap (C) = Sparklalr_Char_Class.Letter) or else (Sparklalr_Char_Class.Get_Charmap (C) = Sparklalr_Char_Class.Digit) or else (Sparklalr_Char_Class.Get_Charmap (C) = Sparklalr_Char_Class.Underscore) loop Get_Char (F, Col); I := I + 1; if I <= Sparklalr_Common.Id_Length then Name (I) := Ch; end if; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; end loop; Token := Name; --# accept F, 33, End_Of_Line, "Unused OK"; end Scan_Ident; procedure Scan_Ampersand (F : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : out Sparklalr_Symbol.Symbol; Token : out Sparklalr_Common.Id_Name) --# global in Keymap; --# in Keytable; --# in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Ch, --# Col, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Col, --# Gram_In, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State & --# Sparklalr_Error.State from *, --# Col, --# Gram_In, --# Keytable, --# Sparklalr_Char_Class.Charmap & --# Symb from Col, --# Gram_In, --# Keymap, --# Keytable, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State & --# Token from Col, --# Gram_In, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State; is C : Character; End_Of_Line : Boolean; procedure Assign_Key (Col : in Sparklalr_Error.Err_Col_T; Symb : out Sparklalr_Symbol.Symbol; Token : in Sparklalr_Common.Id_Name) --# global in Keymap; --# in Keytable; --# in out Sparklalr_Error.State; --# derives Sparklalr_Error.State from *, --# Col, --# Keytable, --# Token & --# Symb from Keymap, --# Keytable, --# Token; is I, J, K : Integer; begin I := 0; J := Nkeywords + 1; while I + 1 /= J loop K := (I + J) / 2; if Token >= Keytable (K) then I := K; else J := K; end if; end loop; if Token = Keytable (I) then Symb := Keymap (I); else Sparklalr_Error.Syn_Error (2, Col); Symb := Sparklalr_Symbol.Errsym; end if; end Assign_Key; begin -- Scan_Ampersand Token := " "; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; if Sparklalr_Char_Class.Get_Charmap (C) = Sparklalr_Char_Class.Letter then Get_Char (F, Col); --# accept F, 10, Symb, "Ineffective assigment to Symb here expected and OK"; Scan_Ident (F, Col, Symb, Token); --# end accept; Assign_Key (Col, Symb, Token); else if Sparklalr_Char_Class.Get_Charmap (C) = Sparklalr_Char_Class.Ampchar then Get_Char (F, Col); Symb := Sparklalr_Symbol.Ampmark; else Symb := Sparklalr_Symbol.Ampersand; end if; end if; --# accept F, 33, End_Of_Line, "Unused OK"; end Scan_Ampersand; begin -- Scan Symb := Sparklalr_Symbol.Nullsymb; Token := " "; Continue := True; while Continue loop Get_Char (F, Col); while (Ch = ' ') and then not SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) loop -- SKIP BLANKS Get_Char (F, Col); end loop; if SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) then Symb := Sparklalr_Symbol.Endfile; Continue := False; else case Sparklalr_Char_Class.Get_Charmap (Ch) is when Sparklalr_Char_Class.Letter => Scan_Ident (F, Col, Symb, Token); Continue := False; when Sparklalr_Char_Class.Digit => Scan_Number (F, Col, Symb); Continue := False; when Sparklalr_Char_Class.Equalchar => Symb := Sparklalr_Symbol.Equals; Continue := False; when Sparklalr_Char_Class.Uparrowchar => Symb := Sparklalr_Symbol.Uparrow; Continue := False; when Sparklalr_Char_Class.Commachar => Symb := Sparklalr_Symbol.Comma; Continue := False; when Sparklalr_Char_Class.Scolonchar => Symb := Sparklalr_Symbol.Scolon; Continue := False; when Sparklalr_Char_Class.Colonchar => Symb := Sparklalr_Symbol.Colon; Continue := False; when Sparklalr_Char_Class.Ampchar => Scan_Ampersand (F, Col, Symb, Token); Continue := False; when Sparklalr_Char_Class.Lparenchar => Scan_Comment (F, Col, Symb); Continue := True; when Sparklalr_Char_Class.Otherchar => Error_Symbol (Col, Symb); Continue := False; when others => null; end case; end if; end loop; end Scan; procedure Skipto (F : in out SPARK.Ada.Text_IO.File_Type; Arg_Set : in Symbol_Set_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : in out Sparklalr_Symbol.Symbol) --# global in Keymap; --# in Keytable; --# in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Ch, --# Col, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State, --# Symb from *, --# Arg_Set, --# Col, --# Gram_In, --# Keymap, --# Keytable, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Symb; is Token : Sparklalr_Common.Id_Name; begin if not (Arg_Set (Symb)) then --# accept F, 10, Token, "Ineffective assignment here expected and OK"; Scan (F, Col, Symb, Token); --# end accept; while not (Arg_Set (Symb)) loop if Symb = Sparklalr_Symbol.Ampersand then loop --# accept F, 10, Token, "Ineffective assignment here expected and OK"; Scan (F, Col, Symb, Token); --# end accept; exit when Symb = Sparklalr_Symbol.Ampersand; end loop; end if; --# accept F, 10, Token, "Ineffective assignment here expected and OK"; Scan (F, Col, Symb, Token); --# end accept; end loop; end if; --# accept F, 33, Token, "Token is unused OK"; end Skipto; procedure Copy_Action (F, Echo : in out SPARK.Ada.Text_IO.File_Type; Signpost : in Symbol_Set_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : in out Sparklalr_Symbol.Symbol) --# global in Keymap; --# in Keytable; --# in Sparklalr_Char_Class.Charmap; --# in Sparklalr_Memory.Prod_No; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# derives Ch, --# Col, --# Echo, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State, --# Symb from *, --# Col, --# Gram_In, --# Keymap, --# Keytable, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Symb & --# F, --# Sparklalr_Level.State from *, --# Col, --# Gram_In, --# Keymap, --# Keytable, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Memory.Prod_No, --# Symb; is Lmargin : Integer; New_Line : Boolean; Symbolset_Skipto : Symbol_Set_Type; C : Character; Quote : Character; Token : Sparklalr_Common.Id_Name; End_Of_Line : Boolean; procedure Copy_String (Quote : in Character; F, Echo : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T) --# global in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Ch, --# Col, --# Echo, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# Col, --# Gram_In, --# Quote, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State; is begin if not SPARK.Ada.Text_IO.End_Of_Line_File (File => Gram_In) then loop Get_Char (Echo, Col); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => Ch); exit when (Ch = Quote) or else SPARK.Ada.Text_IO.End_Of_Line_File (File => Gram_In); end loop; end if; end Copy_String; procedure Copy_Comment (F, Echo : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T) --# global in Sparklalr_Char_Class.Charmap; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# out Ch; --# derives Ch from Col, --# Gram_In, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State & --# Col, --# Echo, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# Col, --# Gram_In, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State; is C : Character; End_Of_Line : Boolean; begin loop Get_Char (Echo, Col); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => Ch); --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; exit when (Ch = '*') and then (C = ')'); end loop; --# accept F, 33, End_Of_Line, "Unused OK"; end Copy_Comment; begin -- Copy_Action Symbolset_Skipto := Signpost_To_Symbol_Set_Type (Signpost); Symbolset_Skipto (Sparklalr_Symbol.Ampersand) := True; Symbolset_Skipto (Sparklalr_Symbol.Scolon) := True; Skipto (Echo, Symbolset_Skipto, Col, Symb); if Symb = Sparklalr_Symbol.Ampersand then Sparklalr_Level.Set_Level_Action_Flag (Sparklalr_Memory.Get_Prod_No, True); SPARK.Ada.Text_IO.Put_File (File => F, Item => " when"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Memory.Get_Prod_No, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); New_Line := False; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; while (C = ' ') and then not SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) loop if SPARK.Ada.Text_IO.End_Of_Line_File (File => Gram_In) then New_Line := True; end if; --# accept F, 10, Ch, "Ineffective assigment to Ch here expected and OK"; Get_Char (Echo, Col); --# end accept; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; end loop; Lmargin := Col; if New_Line then SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end if; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; while (C /= '&') and then not SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) loop if New_Line then Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 6); end if; New_Line := False; Get_Char (Echo, Col); if Ch = '\' then Sparklalr_Error.Syn_Error (18, Col); else SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => Ch); if (Ch = ''') or else (Ch = '"') then Quote := Ch; --# accept F, 10, Ch, "Ineffective assigment to Ch here expected and OK"; Copy_String (Quote, F, Echo, Col); --# end accept; else --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; if (Ch = '(') and then (C = '*') then --# accept F, 10, Ch, "Ineffective assigment to Ch here expected and OK"; Copy_Comment (F, Echo, Col); --# end accept; end if; end if; end if; if SPARK.Ada.Text_IO.End_Of_Line_File (File => Gram_In) then --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; while (C = ' ') and then (Col /= Lmargin) and then not SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) loop --# accept F, 10, Ch, "Ineffective assigment to Ch here expected and OK"; Get_Char (Echo, Col); --# end accept; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); New_Line := True; end if; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Get_Char (Echo, Col); --# accept F, 10, Token, "Ineffective assignment here expected and OK"; Scan (Echo, Col, Symb, Token); --# end accept; else Sparklalr_Error.Syn_Error (4, Col); end if; --# accept F, 33, End_Of_Line, "Unused OK" & --# F, 33, Token, "Token is unused OK"; end Copy_Action; procedure Skip_Action (F : in out SPARK.Ada.Text_IO.File_Type; Signpost : in Symbol_Set_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : in out Sparklalr_Symbol.Symbol) --# global in Keymap; --# in Keytable; --# in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Ch, --# Col, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State, --# Symb from *, --# Col, --# Gram_In, --# Keymap, --# Keytable, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Symb; is Symbolset_Skipto : Symbol_Set_Type; C : Character; Token : Sparklalr_Common.Id_Name; End_Of_Line : Boolean; begin Symbolset_Skipto := Signpost_To_Symbol_Set_Type (Signpost); Symbolset_Skipto (Sparklalr_Symbol.Ampersand) := True; Symbolset_Skipto (Sparklalr_Symbol.Scolon) := True; Skipto (F, Symbolset_Skipto, Col, Symb); if Symb = Sparklalr_Symbol.Ampersand then --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; while (C /= '&') and then not SPARK.Ada.Text_IO.End_Of_File_File (File => Gram_In) loop --# accept F, 10, Ch, "Ineffective assigment to Ch here expected and OK"; Get_Char (F, Col); --# end accept; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => Gram_In, Item => C, End_Of_Line => End_Of_Line); --# end accept; end loop; Get_Char (F, Col); --# accept F, 10, Token, "Ineffective assignment here expected and OK"; Scan (F, Col, Symb, Token); --# end accept; else Sparklalr_Error.Syn_Error (4, Col); end if; --# accept F, 33, End_Of_Line, "Unused OK" & --# F, 33, Token, "Token is unused OK"; end Skip_Action; procedure Proc_Term (F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : out Boolean; Col : in out Sparklalr_Error.Err_Col_T; Signpost : in Symbol_Set_Type; Symb : out Sparklalr_Symbol.Symbol) --# global in Keymap; --# in Keytable; --# in Sparklalr_Char_Class.Charmap; --# in out Ch; --# in out Gram_In; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out Symbols_Dump.State; --# derives Ch, --# Col, --# F, --# Gram_In, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State, --# Sparklalr_Level.State, --# Symbols_Dump.State from *, --# Col, --# Gram_In, --# Keymap, --# Keytable, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Symbols_Dump.State & --# Gram_Rules from & --# Symb from Col, --# Gram_In, --# Keymap, --# Keytable, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Symbols_Dump.State; is Current_Lev : Sparklalr_Level.Lev_Struct; Dummy : Integer; pragma Unreferenced (Dummy); Next_Symb_Var : Sparklalr_Symbol.Symbol; Symbolset_Skipto : Symbol_Set_Type; Token : Sparklalr_Common.Id_Name; procedure Element_Process (Current_Lev : in Sparklalr_Level.Lev_Struct; F : in out SPARK.Ada.Text_IO.File_Type; Token : in Sparklalr_Common.Id_Name; Col : in Sparklalr_Error.Err_Col_T) --# global in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out Symbols_Dump.State; --# derives F, --# Symbols_Dump.State from *, --# Symbols_Dump.State, --# Token & --# Sparklalr_Error.State from *, --# Col, --# Symbols_Dump.State, --# Token & --# Sparklalr_Level.State from *, --# Current_Lev, --# Symbols_Dump.State, --# Token; is Dummy : Integer; begin Symbols_Dump.Find (False, F, False, Token, Col, Dummy); Sparklalr_Level.Assign_Term_Lev (Dummy, Current_Lev); end Element_Process; begin -- Proc_Term Gram_Rules := False; Token := "SPEND "; --# accept F, 10, Dummy, "Ineffective assigment to Dummy here expected and OK"; Symbols_Dump.Define (False, F, False, Token, Col, Dummy); --# end accept; Sparklalr_Level.Initiate_Term_Lev (0); Token := "SPDEFAULT "; --# accept F, 10, Dummy, "Ineffective assigment to Dummy here expected and OK"; Symbols_Dump.Define (False, F, False, Token, Col, Dummy); --# end accept; Sparklalr_Level.Initiate_Term_Lev (1); Token := "SPACCEPT "; --# accept F, 10, Dummy, "Ineffective assigment to Dummy here expected and OK"; Symbols_Dump.Define (True, F, False, Token, Col, Dummy); --# end accept; Sparklalr_Level.Initiate (Current_Lev); Scan (F, Col, Symb, Token); while not Signpost (Symb) loop if (Symb = Sparklalr_Symbol.Left) or else (Symb = Sparklalr_Symbol.Right) or else (Symb = Sparklalr_Symbol.Non) then Sparklalr_Level.Associativity (Symb, Current_Lev); Scan (F, Col, Symb, Token); end if; if (Symb = Sparklalr_Symbol.Ident) or else (Symb = Sparklalr_Symbol.Lit) then loop -- PROCESS LIST Element_Process (Current_Lev, F, Token, Col); Scan (F, Col, Symb, Token); Next_Symb_Var := Symb; if (Next_Symb_Var = Sparklalr_Symbol.Comma) or else (Next_Symb_Var = Sparklalr_Symbol.Scolon) then Scan (F, Col, Symb, Token); end if; exit when not ((Symb = Sparklalr_Symbol.Ident) or else (Symb = Sparklalr_Symbol.Lit)); end loop; else Sparklalr_Error.Syn_Error (8, Col); Symbolset_Skipto := Symbol_Set_False_Const; Symbolset_Skipto (Sparklalr_Symbol.Left) := True; Symbolset_Skipto (Sparklalr_Symbol.Right) := True; Symbolset_Skipto (Sparklalr_Symbol.Non) := True; Symbolset_Skipto (Sparklalr_Symbol.Ident) := True; Symbolset_Skipto (Sparklalr_Symbol.Lit) := True; Skipto (F, Symbolset_Skipto, Col, Symb); end if; end loop; --# accept F, 33, Dummy, "Dummy is unused OK"; end Proc_Term; procedure Set_Symbol_Set (Symbol_Set : in out Symbol_Set_Type; Symb : in Sparklalr_Symbol.Symbol; Value : in Boolean) is begin Symbol_Set (Symb) := Value; end Set_Symbol_Set; function Get_Symbol_Set (Symbol_Set : in Symbol_Set_Type; Symb : in Sparklalr_Symbol.Symbol) return Boolean is begin return Symbol_Set (Symb); end Get_Symbol_Set; end Sparklalr_Input; spark-2012.0.deb/sparklalr/sparklalr_level.adb0000644000175000017500000001610111753202335020264 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Sparklalr_Level --# own State is Init_Lev, --# Level, --# Term_Lev; is type Level_T is array (Sparklalr_Common.Production_Index) of Lev_Struct; type Term_Lev_T is array (Sparklalr_Common.Term_Range) of Lev_Struct; Init_Lev : Lev_Struct; Level : Level_T; Term_Lev : Term_Lev_T; -- Local procedures/functions procedure Assign (L : out Lev_Struct; M : in Lev_Struct) -- ASSIGNS PRECEDENCE --# derives L from M; is begin L := Lev_Struct'(Assoc => M.Assoc, Action_Flag => M.Action_Flag, Lev => M.Lev); end Assign; -- End local procedures/functions procedure Initiate (L : out Lev_Struct) -- INITIATES PRECEDENCE --# global in Init_Lev; --# derives L from Init_Lev; is begin Assign (L, Init_Lev); end Initiate; procedure Initialise --# global out Init_Lev; --# out Level; --# out Term_Lev; --# derives Init_Lev, --# Level, --# Term_Lev from ; is begin Init_Lev := Lev_Struct'(Assoc => Nodef, Action_Flag => False, Lev => 0); Level := Level_T'(others => Init_Lev); Term_Lev := Term_Lev_T'(others => Init_Lev); end Initialise; procedure Assign_Level (I : in Sparklalr_Common.Production_Index; M : in Lev_Struct) --# global in out Level; --# derives Level from *, --# I, --# M; is begin Assign (Level (I), M); end Assign_Level; procedure Assign_Term_Lev (I : in Sparklalr_Common.Term_Range; M : in Lev_Struct) --# global in out Term_Lev; --# derives Term_Lev from *, --# I, --# M; is begin Assign (Term_Lev (I), M); end Assign_Term_Lev; procedure Initiate_Level (I : in Sparklalr_Common.Production_Index) --# global in Init_Lev; --# in out Level; --# derives Level from *, --# I, --# Init_Lev; is begin Initiate (Level (I)); end Initiate_Level; procedure Initiate_Term_Lev (I : in Sparklalr_Common.Term_Range) --# global in Init_Lev; --# in out Term_Lev; --# derives Term_Lev from *, --# I, --# Init_Lev; is begin Initiate (Term_Lev (I)); end Initiate_Term_Lev; procedure Associativity (Symb : in Sparklalr_Symbol.Symbol; Current_Lev : in out Lev_Struct) is begin case Symb is when Sparklalr_Symbol.Left => Current_Lev.Assoc := Leftass; when Sparklalr_Symbol.Right => Current_Lev.Assoc := Rightass; when Sparklalr_Symbol.Non => Current_Lev.Assoc := Nonass; when others => null; end case; Current_Lev.Lev := Current_Lev.Lev + 1; end Associativity; procedure Precedence (Is_Shred : in Boolean; A, B : in Integer; Term_Index : in Integer; Report : out Boolean; Result_Precedence : out Integer) -- PRECEDENCE RETURNS A REPORT USED IN DETERMINING WHAT -- WHAT ACTION TO TAKE WHEN A PARSING CONFLICT ARISES --# global in Level; --# in Term_Lev; --# derives Report, --# Result_Precedence from A, --# B, --# Is_Shred, --# Level, --# Term_Index, --# Term_Lev; is P, Q : Lev_Struct; begin Report := False; case Is_Shred is when True => P := Term_Lev (Term_Index); Q := Level (A); if (P.Lev = 0) or else (Q.Assoc = Nodef) then Report := True; Result_Precedence := 1; else if P.Lev = Q.Lev then case P.Assoc is when Leftass => Result_Precedence := 2; when Rightass => Result_Precedence := 1; when Nonass => Result_Precedence := 5; when Nodef => Report := True; Result_Precedence := 1; end case; else if P.Lev > Q.Lev then Result_Precedence := 2; else Result_Precedence := 1; end if; end if; end if; when False => P := Level (A); Q := Level (B); if (P.Assoc = Nodef) and then (Q.Assoc = Nodef) then Report := True; if A > B then Result_Precedence := 4; else Result_Precedence := 3; end if; else if P.Lev > Q.Lev then Result_Precedence := 3; else if P.Lev < Q.Lev then Result_Precedence := 4; else if A > B then Result_Precedence := 4; else Result_Precedence := 3; end if; end if; end if; end if; end case; end Precedence; function Get_Term_Lev (I : in Sparklalr_Common.Term_Range) return Lev_Struct --# global in Term_Lev; is begin return Term_Lev (I); end Get_Term_Lev; procedure Set_Level_Action_Flag (I : in Sparklalr_Common.Production_Index; Value : in Boolean) --# global in out Level; --# derives Level from *, --# I, --# Value; is begin Level (I).Action_Flag := Value; end Set_Level_Action_Flag; end Sparklalr_Level; spark-2012.0.deb/sparklalr/spark_ada_integer_text_io.adb0000644000175000017500000000474511753202335022312 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Integer_Text_IO; with Ada.Text_IO; with SPARK.Ada.Text_IO.Not_SPARK; package body SPARK_Ada_Integer_Text_IO is --# hide SPARK_Ada_Integer_Text_IO; function Default_Width return SPARK.Ada.Text_IO.Field is begin return Integer'Width; end Default_Width; procedure Get_File (File : in out SPARK.Ada.Text_IO.File_Type; Item : out Integer; Width : in SPARK.Ada.Text_IO.Field) is begin Standard.Ada.Integer_Text_IO.Get (File => SPARK.Ada.Text_IO.Not_SPARK.To_File_Type (From => File), Item => Item, Width => Width); end Get_File; procedure Put_File (File : in out SPARK.Ada.Text_IO.File_Type; Item : in Integer; Width : in SPARK.Ada.Text_IO.Field; Base : in SPARK.Ada.Text_IO.Number_Base) is begin Standard.Ada.Integer_Text_IO.Put (File => SPARK.Ada.Text_IO.Not_SPARK.To_File_Type (From => File), Item => Item, Width => Width, Base => Base); end Put_File; procedure Put_Output (Item : in Integer; Width : in SPARK.Ada.Text_IO.Field; Base : in SPARK.Ada.Text_IO.Number_Base) is begin Standard.Ada.Integer_Text_IO.Put (File => Standard.Ada.Text_IO.Standard_Output, Item => Item, Width => Width, Base => Base); end Put_Output; end SPARK_Ada_Integer_Text_IO; spark-2012.0.deb/sparklalr/sparklalr_common.adb0000644000175000017500000001673611753202335020463 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_Ada_Integer_Text_IO; package body Sparklalr_Common is procedure Put_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type; Item : in String) is begin if Std_Out then SPARK.Ada.Text_IO.Put_Output (Item => Item); else SPARK.Ada.Text_IO.Put_File (File => File, Item => Item); end if; end Put_File_Output; procedure Put_Character_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type; Item : in Character) is begin if Std_Out then SPARK.Ada.Text_IO.Put_Character_Output (Item => Item); else SPARK.Ada.Text_IO.Put_Character_File (File => File, Item => Item); end if; end Put_Character_File_Output; procedure Put_Line_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type; Item : in String) is begin if Std_Out then SPARK.Ada.Text_IO.Put_Line_Output (Item => Item); else SPARK.Ada.Text_IO.Put_Line_File (File => File, Item => Item); end if; end Put_Line_File_Output; procedure Put_Integer_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type; Item : in Integer; Width : in SPARK.Ada.Text_IO.Field) is begin if Std_Out then SPARK_Ada_Integer_Text_IO.Put_Output (Item => Item, Width => Width, Base => 10); else SPARK_Ada_Integer_Text_IO.Put_File (File => File, Item => Item, Width => Width, Base => 10); end if; end Put_Integer_File_Output; procedure New_Line_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type) is begin if Std_Out then SPARK.Ada.Text_IO.New_Line_Output (Spacing => 1); else SPARK.Ada.Text_IO.New_Line_File (File => File, Spacing => 1); end if; end New_Line_File_Output; procedure Put_N_Chars (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; C : in Character; N : in Natural) is begin for I in Integer range 1 .. N loop Put_Character_File_Output (Std_Out => Std_Out, File => F, Item => C); end loop; end Put_N_Chars; procedure Print (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; String1 : in String; Posn : in out Integer; Tab : in Integer; Comm : in Boolean) is I : Integer; begin I := String1'Length; while String1 (I) = ' ' loop I := I - 1; end loop; if (Posn + I) > (Page_Width - 2) then New_Line_File_Output (Std_Out => Std_Out, File => F); if Comm then Put_File_Output (Std_Out => Std_Out, File => F, Item => "--"); Put_N_Chars (Std_Out => Std_Out, F => F, C => ' ', N => Tab - 2); else Put_N_Chars (Std_Out => Std_Out, F => F, C => ' ', N => Tab); end if; Posn := I + Tab; else Posn := Posn + I; end if; for J in Integer range 1 .. I loop Put_Character_File_Output (Std_Out => Std_Out, File => F, Item => String1 (J)); end loop; end Print; procedure Print2 (F : in out SPARK.Ada.Text_IO.File_Type; String1 : in String; String2 : in String; Posn : in out Integer; Tab : in Integer; Comm : in Boolean) is I, J : Integer; begin I := String1'Length; while String1 (I) = ' ' loop I := I - 1; end loop; J := String2'Length; while String2 (J) = ' ' loop J := J - 1; end loop; if ((Posn + I) + J) > (Page_Width - 2) then SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); if Comm then SPARK.Ada.Text_IO.Put_File (File => F, Item => "--"); Put_N_Chars (Std_Out => False, F => F, C => ' ', N => Tab - 2); else Put_N_Chars (Std_Out => False, F => F, C => ' ', N => Tab); end if; Posn := (I + J) + Tab; else Posn := (Posn + I) + J; end if; for K in Integer range 1 .. I loop SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => String1 (K)); end loop; for K in Integer range 1 .. J loop SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => String2 (K)); end loop; end Print2; function Code (Sr : in Action_Type; C : in Integer) return Integer -- ENCODES PARSING ACTION INTO INTEGER REPRESENTATION is Result_Code : Integer; begin if Sr = Reduce then Result_Code := C; else if Sr = Shift then Result_Code := Prod_Lim + C; else Result_Code := 0; end if; end if; return Result_Code; end Code; function Decode (C : in Integer) return Integer is Result_Decode : Integer; begin if C > Prod_Lim then Result_Decode := C - Prod_Lim; else Result_Decode := C; end if; return Result_Decode; end Decode; end Sparklalr_Common; spark-2012.0.deb/sparklalr/command_line_options.adb0000644000175000017500000003154011753202335021306 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Command_Line; with SPARK.Ada.Command_Line.Unbounded_String; with SPARK.Ada.Text_IO; with Fatal; package body Command_Line_Options --# own State is Debug, --# Dump_Mem, --# File_Name, --# Multi_Comp, --# Parser, --# Self_Pack, --# Verbose; is type Debug_T is array (Debug_Level_Range) of Boolean; File_Name : SPARK.Ada.Strings.Unbounded.Unbounded_String; Verbose : Boolean; Debug : Debug_T; -- DEBUG FLAGS Dump_Mem : Boolean; -- FLAG FOR FINAL MEMORY DUMPING Parser : Boolean; Self_Pack : Boolean; Multi_Comp : Boolean; procedure Get_Options --# global in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# out Debug; --# out Dump_Mem; --# out File_Name; --# out Multi_Comp; --# out Parser; --# out Self_Pack; --# out Verbose; --# derives Debug, --# Dump_Mem, --# File_Name, --# Multi_Comp, --# Parser, --# Self_Pack, --# SPARK.Ada.Command_Line.State, --# Verbose from SPARK.Ada.Command_Line.State & --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# SPARK.Ada.Command_Line.State; is Len_Cmd_Line : constant := 255; -- length of a command line string type Status_T is (Found, Missing, Invalid); subtype Cmd_Line_Range is Positive range 1 .. Len_Cmd_Line; Status : Status_T; Switch : SPARK.Ada.Strings.Unbounded.Unbounded_String; Cmd_Line_Pntr : Cmd_Line_Range; -- Get next parameter from command line and return it in conformant array 'String_Var'. -- Returns status which indicates result of operation in 'Status'. -- Pointer 'Cmd_Line_Pntr' is left pointing to next paramter to be read or last parameter in -- the command line. -- Where possible an syntactically invalid parameter is skipped so that subsequent -- calls to this procedure corectly return subsequent command line parameters. procedure Cl_Next_Param (Cmd_Line_Pntr : in out Cmd_Line_Range; String_Var : out SPARK.Ada.Strings.Unbounded.Unbounded_String; Status : out Status_T) --# global in SPARK.Ada.Command_Line.State; --# derives Cmd_Line_Pntr, --# Status, --# String_Var from Cmd_Line_Pntr, --# SPARK.Ada.Command_Line.State; is begin if Cmd_Line_Pntr <= SPARK.Ada.Command_Line.Argument_Count then String_Var := SPARK.Ada.Command_Line.Unbounded_String.Argument (Cmd_Line_Pntr); Status := Found; Cmd_Line_Pntr := Cmd_Line_Pntr + 1; else String_Var := SPARK.Ada.Strings.Unbounded.Null_Unbounded_String; Status := Missing; end if; end Cl_Next_Param; -- Checks if the command line has any unread parameters on it, returns True if yes, -- False if not. May be called at any time after a call to CL_Read. -- Does not affect the value of Cmd_Line_Pntr. function Cl_Empty (Cmd_Line_Pntr : in Cmd_Line_Range) return Boolean --# global in SPARK.Ada.Command_Line.State; is begin return Cmd_Line_Pntr > SPARK.Ada.Command_Line.Argument_Count; end Cl_Empty; procedure Cl_File_Name (Cmd_Line_Pntr : in out Cmd_Line_Range; File_Name : out SPARK.Ada.Strings.Unbounded.Unbounded_String; Status : out Status_T) --# global in SPARK.Ada.Command_Line.State; --# derives Cmd_Line_Pntr, --# File_Name, --# Status from Cmd_Line_Pntr, --# SPARK.Ada.Command_Line.State; is -- Checks 'File_Name' for correct syntax procedure Check_File_Name (File_Name : in SPARK.Ada.Strings.Unbounded.Unbounded_String; Status : in out Status_T) --# derives Status from *, --# File_Name; is C : Character; begin if Status = Found then -- check File_Name characters are valid for I in Positive range 1 .. SPARK.Ada.Strings.Unbounded.Get_Length (Source => File_Name) loop C := SPARK.Ada.Strings.Unbounded.Get_Element (Source => File_Name, Index => I); if (I = 1 and then not (C in 'a' .. 'z' or else C in 'A' .. 'Z' or else C in '0' .. '9')) or else (I /= 1 and then not (C in 'a' .. 'z' or else C in 'A' .. 'Z' or else C in '0' .. '9' or else C = '_')) then Status := Invalid; end if; end loop; end if; end Check_File_Name; begin -- Cl_File_Name Cl_Next_Param (Cmd_Line_Pntr, File_Name, Status); Check_File_Name (File_Name, Status); end Cl_File_Name; procedure Cl_Switch (Cmd_Line_Pntr : in out Cmd_Line_Range; Switch : out SPARK.Ada.Strings.Unbounded.Unbounded_String; Status : out Status_T) --# global in SPARK.Ada.Command_Line.State; --# derives Cmd_Line_Pntr, --# Status, --# Switch from Cmd_Line_Pntr, --# SPARK.Ada.Command_Line.State; is -- Checks Switch string for correct syntax procedure Check_Switch (Switch : in SPARK.Ada.Strings.Unbounded.Unbounded_String; Status : in out Status_T) --# derives Status from *, --# Switch; is C : Character; begin if Status = Found then -- check Switch characters are valid for I in Positive range 1 .. SPARK.Ada.Strings.Unbounded.Get_Length (Source => Switch) loop C := SPARK.Ada.Strings.Unbounded.Get_Element (Source => Switch, Index => I); if not (C in 'a' .. 'z' or else C in 'A' .. 'Z' or else C = '-') then Status := Invalid; end if; end loop; end if; end Check_Switch; begin -- Cl_Switch Cl_Next_Param (Cmd_Line_Pntr, Switch, Status); Check_Switch (Switch, Status); end Cl_Switch; begin -- Get_Options Verbose := False; Debug := Debug_T'(others => False); Dump_Mem := False; Parser := False; Self_Pack := False; Multi_Comp := False; Cmd_Line_Pntr := 1; Cl_File_Name (Cmd_Line_Pntr, File_Name, Status); case Status is when Missing => SPARK.Ada.Text_IO.Put_Line_Error (Item => "No grammar file name supplied"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); Fatal.Stop_With_Command_Line_Exception; when Invalid => SPARK.Ada.Text_IO.Put_Line_Error (Item => "Grammar file name contains invalid characters"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); Fatal.Stop_With_Command_Line_Exception; when Found => null; end case; while not Cl_Empty (Cmd_Line_Pntr) loop Cl_Switch (Cmd_Line_Pntr, Switch, Status); case Status is when Missing => null; when Invalid => SPARK.Ada.Text_IO.Put_Line_Error (Item => "Switch name contains invalid characters"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); Fatal.Stop_With_Command_Line_Exception; when Found => if SPARK.Ada.Strings.Unbounded.Get_Length (Switch) > 1 then case SPARK.Ada.Strings.Unbounded.Get_Element (Source => Switch, Index => 2) is when 'v' => Verbose := True; when 's' => Self_Pack := True; when 'm' => Multi_Comp := True; when 'd' => if SPARK.Ada.Strings.Unbounded.Get_Length (Switch) > 2 then case SPARK.Ada.Strings.Unbounded.Get_Element (Source => Switch, Index => 3) is when 'a' => Debug (1) := True; when 'b' => Debug (2) := True; when 'c' => Debug (3) := True; when 'd' => Debug (4) := True; when 'e' => Debug (5) := True; when 'f' => Debug (6) := True; when 'g' => Debug (7) := True; when 'h' => Debug (8) := True; when 'i' => Debug (9) := True; when 'u' => Dump_Mem := True; when others => SPARK.Ada.Text_IO.Put_Line_Error (Item => "Invalid switch"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); Fatal.Stop_With_Command_Line_Exception; end case; else SPARK.Ada.Text_IO.Put_Line_Error (Item => "Invalid switch"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); Fatal.Stop_With_Command_Line_Exception; end if; when 'p' => Parser := True; when others => SPARK.Ada.Text_IO.Put_Line_Error (Item => "Invalid switch"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); Fatal.Stop_With_Command_Line_Exception; end case; else SPARK.Ada.Text_IO.Put_Line_Error (Item => "Invalid switch"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); Fatal.Stop_With_Command_Line_Exception; end if; end case; end loop; end Get_Options; function Get_File_Name return SPARK.Ada.Strings.Unbounded.Unbounded_String --# global in File_Name; is begin return File_Name; end Get_File_Name; function Get_Verbose return Boolean --# global in Verbose; is begin return Verbose; end Get_Verbose; function Get_Debug_Level (Level : in Debug_Level_Range) return Boolean --# global in Debug; is begin return Debug (Level); end Get_Debug_Level; function Get_Dump_Mem return Boolean --# global in Dump_Mem; is begin return Dump_Mem; end Get_Dump_Mem; function Get_Parser return Boolean --# global in Parser; is begin return Parser; end Get_Parser; function Get_Self_Pack return Boolean --# global in Self_Pack; is begin return Self_Pack; end Get_Self_Pack; function Get_Multi_Comp return Boolean --# global in Multi_Comp; is begin return Multi_Comp; end Get_Multi_Comp; end Command_Line_Options; spark-2012.0.deb/sparklalr/sparklalr_goto.adb0000644000175000017500000020064311753202335020133 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with SPARK_Ada_Integer_Text_IO; with Sparklalr_Error; with Sparklalr_Memory; with Symbols_Dump; package body Sparklalr_Goto --# own State is Goto_Array, --# Goto_Count, --# Goto_List, --# Goto_List_Count, --# Goto_Table; is Goto_Table_Size : constant := 10000; subtype Pt_Goto_Rec is Natural range 0 .. Goto_Table_Size; subtype Goto_Array_Range is Positive range 1 .. Goto_Table_Size; type Goto_Rec is record Index, Check_Index : Sparklalr_Common.State_Range; Entry_State : Sparklalr_Common.State_Range; Go_Next : Pt_Goto_Rec; end record; type Goto_Array_Array_T is array (Goto_Array_Range) of Goto_Rec; type Goto_Array_T is record The_Array : Goto_Array_Array_T; Top : Pt_Goto_Rec; end record; type N_Term_Index_Pair is record Lower, Upper : Integer; end record; type N_Term_Index is array (Sparklalr_Common.Non_Term_Range) of N_Term_Index_Pair; type Goto_List_T is array (Sparklalr_Common.Non_Term_Range) of Pt_Goto_Rec; -- GOTO TABLE Goto_List : Goto_List_T; Goto_List_Count : Integer; Goto_Count : Integer; Goto_Table : N_Term_Index; Goto_Array : Goto_Array_T; -- Local procedures/functions procedure New_State_Set (G : in Pt_Goto_Rec; Org : in out Integer; Last_Var : out Integer; Next : out Next_T) --# global in out Goto_Array; --# derives Goto_Array, --# Org from *, --# G, --# Goto_Array & --# Last_Var from G, --# Goto_Array, --# Org & --# Next from ; is begin while Goto_Array.The_Array (G).Index - Org > Sparklalr_Common.Max_Set loop Org := (Org + Sparklalr_Common.Max_Set) + 1; end loop; while Goto_Array.The_Array (G).Index - Org < 0 loop Org := (Org - Sparklalr_Common.Max_Set) - 1; end loop; Last_Var := Goto_Array.The_Array (G).Index - Org; Next := Next_T'(others => False); Goto_Array.The_Array (G).Index := 0; end New_State_Set; procedure Copy (G : in Pt_Goto_Rec; Found : out Boolean; G1 : in out Pt_Goto_Rec) --# global in Goto_Array; --# derives Found, --# G1 from G, --# G1, --# Goto_Array; is Gval : Integer; begin Found := False; Gval := Goto_Array.The_Array (G).Entry_State; G1 := Goto_Array.The_Array (G1).Go_Next; while (G1 /= 0) and then not Found loop if Goto_Array.The_Array (G1).Entry_State = Gval then Found := True; else G1 := Goto_Array.The_Array (G1).Go_Next; end if; end loop; end Copy; procedure Insert_State (G : in Pt_Goto_Rec; Org : in Integer; Last_Var : in out Integer; Next : in out Next_T) --# global in out Goto_Array; --# derives Goto_Array from *, --# G & --# Last_Var, --# Next from *, --# G, --# Goto_Array, --# Last_Var, --# Org; is begin if Goto_Array.The_Array (G).Index - Org < Last_Var then Next (Goto_Array.The_Array (G).Index - Org) := True; else Next (Last_Var) := True; Last_Var := Goto_Array.The_Array (G).Index - Org; end if; Goto_Array.The_Array (G).Index := 0; end Insert_State; function Last_Goto (G : in Pt_Goto_Rec) return Boolean --# global in Goto_Array; is Result : Boolean; Gval : Integer; G_Tmp : Pt_Goto_Rec; begin G_Tmp := G; Result := True; Gval := Goto_Array.The_Array (G_Tmp).Entry_State; G_Tmp := Goto_Array.The_Array (G_Tmp).Go_Next; while (G_Tmp /= 0) and then Result loop if (Goto_Array.The_Array (G_Tmp).Entry_State = Gval) or else (Goto_Array.The_Array (G_Tmp).Index = 0) then G_Tmp := Goto_Array.The_Array (G_Tmp).Go_Next; else Result := False; end if; end loop; return Result; end Last_Goto; procedure Zero_Index (G : in Pt_Goto_Rec) --# global in out Goto_Array; --# derives Goto_Array from *, --# G; is G_Tmp : Pt_Goto_Rec; begin G_Tmp := G; if Last_Goto (G) then while G_Tmp /= 0 loop Goto_Array.The_Array (G_Tmp).Index := 0; G_Tmp := Goto_Array.The_Array (G_Tmp).Go_Next; end loop; end if; end Zero_Index; -- End local procedures/functions procedure Initialise --# global out Goto_Array; --# out Goto_Count; --# out Goto_List; --# out Goto_List_Count; --# out Goto_Table; --# derives Goto_Array, --# Goto_Count, --# Goto_List, --# Goto_List_Count, --# Goto_Table from ; is begin Goto_List := Goto_List_T'(others => 0); Goto_List_Count := 0; Goto_Count := 0; Goto_Table := N_Term_Index'(others => N_Term_Index_Pair'(Lower => 0, Upper => 0)); Goto_Array := Goto_Array_T' (The_Array => Goto_Array_Array_T'(others => Goto_Rec'(Index => 0, Check_Index => 0, Entry_State => 0, Go_Next => 0)), Top => 0); end Initialise; function Goto_Search (State_Index, Non_Term_Index : in Integer) return Integer -- SEARCHES GOTO TABLE --# global in Goto_Array; --# in Goto_List; is Result_Goto_Search : Integer; Glist : Pt_Goto_Rec; Found : Boolean; begin Found := False; Result_Goto_Search := 0; Glist := Goto_List (Non_Term_Index - Sparklalr_Common.Nt_Base); while (Glist /= 0) and then not Found loop if Goto_Array.The_Array (Glist).Index = State_Index then Result_Goto_Search := Goto_Array.The_Array (Glist).Entry_State; Found := True; else Glist := Goto_Array.The_Array (Glist).Go_Next; end if; end loop; return Result_Goto_Search; end Goto_Search; procedure Go_Out (I : in Integer) --# global in Goto_List; --# in out Goto_Array; --# in out Goto_List_Count; --# derives Goto_Array, --# Goto_List_Count from *, --# Goto_Array, --# Goto_List, --# I; is G, G1 : Pt_Goto_Rec; Found : Boolean; Org : Integer; Last_Var : Integer; Next : Next_T; procedure New_State_Set_Nested (G : in Pt_Goto_Rec; Org : in out Integer; Last_Var : out Integer; Next : out Next_T) --# global in out Goto_Array; --# derives Goto_Array, --# Org from *, --# G, --# Goto_Array & --# Last_Var from G, --# Goto_Array, --# Org & --# Next from ; is begin while Goto_Array.The_Array (G).Check_Index - Org > Sparklalr_Common.Max_Set loop Org := (Org + Sparklalr_Common.Max_Set) + 1; end loop; while Goto_Array.The_Array (G).Index - Org < 0 loop Org := (Org - Sparklalr_Common.Max_Set) - 1; end loop; Last_Var := Goto_Array.The_Array (G).Check_Index - Org; Next := Next_T'(others => False); Goto_Array.The_Array (G).Check_Index := 0; end New_State_Set_Nested; procedure Insert_State_Nested (G : in Pt_Goto_Rec; Org : in Integer; Last_Var : in out Integer; Next : in out Next_T) --# global in out Goto_Array; --# derives Goto_Array from *, --# G & --# Last_Var, --# Next from *, --# G, --# Goto_Array, --# Last_Var, --# Org; is begin if Goto_Array.The_Array (G).Check_Index - Org < Last_Var then Next (Goto_Array.The_Array (G).Check_Index - Org) := True; else Next (Last_Var) := True; Last_Var := Goto_Array.The_Array (G).Check_Index - Org; end if; Goto_Array.The_Array (G).Check_Index := 0; end Insert_State_Nested; function Last_Goto_Nested (G : in Pt_Goto_Rec) return Boolean --# global in Goto_Array; is Result : Boolean; Gval : Integer; G_Tmp : Pt_Goto_Rec; begin G_Tmp := G; Result := True; Gval := Goto_Array.The_Array (G_Tmp).Entry_State; G_Tmp := Goto_Array.The_Array (G_Tmp).Go_Next; while (G_Tmp /= 0) and then Result loop if (Goto_Array.The_Array (G_Tmp).Entry_State = Gval) or else (Goto_Array.The_Array (G_Tmp).Check_Index = 0) then G_Tmp := Goto_Array.The_Array (G_Tmp).Go_Next; else Result := False; end if; end loop; return Result; end Last_Goto_Nested; procedure Zero_Index_Nested (G : in Pt_Goto_Rec) --# global in out Goto_Array; --# derives Goto_Array from *, --# G; is G_Tmp : Pt_Goto_Rec; begin G_Tmp := G; while G_Tmp /= 0 loop Goto_Array.The_Array (G_Tmp).Check_Index := 0; G_Tmp := Goto_Array.The_Array (G_Tmp).Go_Next; end loop; end Zero_Index_Nested; procedure Write_States (Last_Var : in Integer; Next : in Next_T) --# global in out Goto_List_Count; --# derives Goto_List_Count from *, --# Last_Var, --# Next; is I : Integer; Next_Tmp : Next_T; begin Next_Tmp := Next; if Next_Tmp = Next_False_Const then Goto_List_Count := Goto_List_Count + 1; else Goto_List_Count := Goto_List_Count + 1; I := Last_Var - 1; while Next_Tmp /= Next_False_Const loop if Next_Tmp (I) then Goto_List_Count := Goto_List_Count + 1; Next_Tmp (I) := False; end if; I := I - 1; end loop; end if; end Write_States; begin G := Goto_List (I); while G /= 0 loop if Goto_Array.The_Array (G).Check_Index /= 0 then Org := 0; New_State_Set_Nested (G, Org, Last_Var, Next); G1 := G; Copy (G, Found, G1); while Found loop if Goto_Array.The_Array (G1).Check_Index - Org < 0 then Write_States (Last_Var, Next); New_State_Set_Nested (G1, Org, Last_Var, Next); else Insert_State_Nested (G1, Org, Last_Var, Next); end if; Copy (G, Found, G1); end loop; Write_States (Last_Var, Next); if Last_Goto_Nested (G) then Zero_Index_Nested (G); end if; end if; G := Goto_Array.The_Array (G).Go_Next; end loop; end Go_Out; procedure Dump_Goto (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Goto_Array; --# in Goto_List; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Goto_Array, --# Goto_List, --# Sparklalr_Memory.Stat_No, --# Symbols_Dump.State; is Posn : Integer; begin SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " GOTO TABLE "); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Posn := 1; for I in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); Posn := Posn + 1; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print (Std_Out => False, F => F, String1 => Symbols_Dump.Get_Nterm_Set (I), Posn => Posn, Tab => 2, Comm => False); --# end accept; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " : "); for J in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop if Goto_Search (J, I + Sparklalr_Common.Nt_Base) /= 0 then SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => J, Width => 4, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Search (J, I + Sparklalr_Common.Nt_Base), Width => 4, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end if; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Posn := 1; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end Dump_Goto; procedure Goto_Gen (F : in out SPARK.Ada.Text_IO.File_Type; Insertion, State_Index, Non_Term_Index : in Integer; Call_Pa_Insert : out Boolean) -- GENERATES AN ENTRY FOR INSERTION IN THE GOTO TABLE --# global in Command_Line_Options.State; --# in Symbols_Dump.State; --# in out Goto_Array; --# in out Goto_Count; --# in out Goto_List; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Call_Pa_Insert from Non_Term_Index & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Goto_Array, --# Goto_List, --# Insertion, --# Non_Term_Index, --# State_Index, --# Symbols_Dump.State & --# Goto_Array, --# Goto_Count, --# Goto_List, --# Sparklalr_Error.State from *, --# Goto_Array, --# Goto_List, --# Insertion, --# Non_Term_Index, --# State_Index; is G : Integer; procedure Goto_Insert (F : in out SPARK.Ada.Text_IO.File_Type; State_Index, Non_Term_Index, Insertion : in Integer) -- INSERTS A NEW ENTRY INTO THE GOTO TABLE --# global in Command_Line_Options.State; --# in Symbols_Dump.State; --# in out Goto_Array; --# in out Goto_List; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Command_Line_Options.State, --# Non_Term_Index, --# Symbols_Dump.State & --# Goto_Array from *, --# Goto_List, --# Insertion, --# Non_Term_Index, --# State_Index & --# Goto_List from *, --# Goto_Array, --# Non_Term_Index & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Insertion, --# Non_Term_Index, --# State_Index, --# Symbols_Dump.State; is Posn : Integer; Non_Term_Index_Tmp : Integer; begin Non_Term_Index_Tmp := Non_Term_Index - Sparklalr_Common.Nt_Base; if Command_Line_Options.Get_Debug_Level (5) then SPARK.Ada.Text_IO.Put_Output (Item => " GOTO : (STATE="); SPARK_Ada_Integer_Text_IO.Put_Output (Item => State_Index, Width => 3, Base => 10); SPARK.Ada.Text_IO.Put_Output (Item => ",NONTERM="); Posn := 29; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print (Std_Out => True, F => F, String1 => Symbols_Dump.Get_Nterm_Set (Non_Term_Index_Tmp), Posn => Posn, Tab => 29, Comm => False); --# end accept; SPARK.Ada.Text_IO.Put_Output (Item => ") = "); SPARK_Ada_Integer_Text_IO.Put_Output (Item => Insertion, Width => 3, Base => 10); SPARK.Ada.Text_IO.New_Line_Output (Spacing => 1); end if; Goto_Array.Top := Goto_Array.Top + 1; Goto_Array.The_Array (Goto_Array.Top) := Goto_Rec' (Index => State_Index, Check_Index => State_Index, Entry_State => Insertion, Go_Next => Goto_List (Non_Term_Index_Tmp)); Goto_List (Non_Term_Index_Tmp) := Goto_Array.Top; end Goto_Insert; begin -- Goto_Gen if Non_Term_Index > Sparklalr_Common.Nt_Base then G := Goto_Search (State_Index, Non_Term_Index); if (G /= 0) and then (G /= Insertion) then Sparklalr_Error.Error (F, 32); else if G = 0 then Goto_Count := Goto_Count + 1; Goto_Insert (F, State_Index, Non_Term_Index, Insertion); end if; end if; Call_Pa_Insert := False; else Call_Pa_Insert := True; end if; end Goto_Gen; procedure Goto_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Goto_List; --# in Symbols_Dump.State; --# in out Goto_Array; --# in out Goto_Table; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Goto_Array, --# Goto_List, --# Goto_Table, --# Symbols_Dump.State & --# Goto_Array, --# Goto_Table, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Goto_Array, --# Goto_List, --# Symbols_Dump.State; is procedure Aux_Got_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Goto_List; --# in Symbols_Dump.State; --# in out Goto_Array; --# in out Goto_Table; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# Goto_Array, --# Goto_Table, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Goto_Array, --# Goto_List, --# Symbols_Dump.State; is Posn : Integer; Curr_Got_Index : Integer; procedure Go_Out_Nested (F : in out SPARK.Ada.Text_IO.File_Type; I : in Integer; Curr_Got_Index : in out Integer; Posn : in Integer) --# global in Goto_List; --# in Symbols_Dump.State; --# in out Goto_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Curr_Got_Index, --# Goto_Array from *, --# Goto_Array, --# Goto_List, --# I & --# F from *, --# Curr_Got_Index, --# Goto_Array, --# Goto_List, --# I, --# Posn, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Goto_Array, --# Goto_List, --# I, --# Posn, --# Symbols_Dump.State; is Comma_Required : Boolean; G, G1 : Pt_Goto_Rec; Org : Integer; Last_Var : Integer; Next : Next_T; Found : Boolean; Posn_Tmp : Integer; procedure Write_States (F : in out SPARK.Ada.Text_IO.File_Type; Nt : in Integer; G : in Pt_Goto_Rec; Org : in Integer; Last_Var : in Integer; Next : in Next_T; Curr_Got_Index : in out Integer; Posn : in out Integer) --# global in Goto_Array; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Curr_Got_Index from *, --# Last_Var, --# Next & --# F from *, --# Curr_Got_Index, --# G, --# Goto_Array, --# Last_Var, --# Next, --# Nt, --# Org, --# Posn, --# Symbols_Dump.State & --# Posn, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Last_Var, --# Next, --# Nt, --# Posn, --# Symbols_Dump.State; is I : Integer; Next_Tmp : Next_T; begin Next_Tmp := Next; if Next_Tmp = Next_False_Const then SPARK.Ada.Text_IO.Put_File (File => F, Item => "State_Pair'("); Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Posn, 10, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Last_Var + Org, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Array.The_Array (G).Entry_State, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); Curr_Got_Index := Curr_Got_Index + 1; else SPARK.Ada.Text_IO.Put_File (File => F, Item => "State_Pair'("); Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Posn, 10, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Last_Var + Org, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Array.The_Array (G).Entry_State, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); Curr_Got_Index := Curr_Got_Index + 1; I := Last_Var - 1; while Next_Tmp /= Next_False_Const loop if Next_Tmp (I) then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Got_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); SPARK.Ada.Text_IO.Put_File (File => F, Item => "State_Pair'("); Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Posn, 10, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I + Org, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Array.The_Array (G).Entry_State, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); Curr_Got_Index := Curr_Got_Index + 1; Next_Tmp (I) := False; end if; I := I - 1; end loop; end if; end Write_States; begin -- Go_Out_Nested Posn_Tmp := Posn; Comma_Required := False; G := Goto_List (I); while G /= 0 loop if Goto_Array.The_Array (G).Index /= 0 then if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); else Comma_Required := True; end if; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Got_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Org := 0; New_State_Set (G, Org, Last_Var, Next); G1 := G; Copy (G, Found, G1); while Found loop if Goto_Array.The_Array (G1).Index - Org < 0 then Write_States (F, I, G, Org, Last_Var, Next, Curr_Got_Index, Posn_Tmp); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Got_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); New_State_Set (G1, Org, Last_Var, Next); else Insert_State (G1, Org, Last_Var, Next); end if; Copy (G, Found, G1); end loop; Write_States (F, I, G, Org, Last_Var, Next, Curr_Got_Index, Posn_Tmp); Zero_Index (G); end if; G := Goto_Array.The_Array (G).Go_Next; end loop; end Go_Out_Nested; begin -- Aux_Got_Out Curr_Got_Index := 1; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Goto_Table : constant Aux_GOT := Aux_GOT'("); Posn := 1; for I in Integer range 2 .. Symbols_Dump.Get_Nnon_Terms loop SPARK.Ada.Text_IO.Put_File (File => F, Item => "----- "); Posn := Posn + 6; Sparklalr_Common.Print (Std_Out => False, F => F, String1 => Symbols_Dump.Get_Nterm_Set (I), Posn => Posn, Tab => 6, Comm => False); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Goto_Table (I).Lower := Curr_Got_Index; Go_Out_Nested (F, I, Curr_Got_Index, Posn); Goto_Table (I).Upper := Curr_Got_Index - 1; if I < Symbols_Dump.Get_Nnon_Terms then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end if; Posn := 1; end loop; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end Aux_Got_Out; procedure Main_Got_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Goto_Table; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Goto_Table, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Symbols_Dump.State; is Posn : Integer; begin SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Non_Term_Table : constant Main_GOT := Main_GOT'("); Posn := 1; for Nterm in Integer range 2 .. Symbols_Dump.Get_Nnon_Terms loop SPARK.Ada.Text_IO.Put_File (File => F, Item => " "); Posn := Posn + 3; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nterm), Posn, 3, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => GOT_Index_Pair'("); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Table (Nterm).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Table (Nterm).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); if Nterm < Symbols_Dump.Get_Nnon_Terms then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); Posn := 1; end if; end loop; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); end Main_Got_Out; begin -- Goto_Out SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type State_Pair is"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " record"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Red_Goal : SP_Symbols.SP_Non_Terminal;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Current_State, Next_State : SP_Productions.SP_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " end record;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " pragma PACK(State_Pair);"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Aux_GOT is array (GOT_Index) of State_Pair;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type GOT_Index_Pair is"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " record"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Lower, Upper : Goto_Index;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " end record;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " pragma PACK(GOT_Index_Pair);"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Main_GOT is array (SP_Symbols.SP_Grammar_Non_Terminal) of GOT_Index_Pair;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Aux_Got_Out (F); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Main_Got_Out (F); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end Goto_Out; procedure Goto_Out_Sp (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Goto_List; --# in Goto_List_Count; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out Goto_Array; --# in out Goto_Table; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Goto_Array, --# Goto_List, --# Goto_List_Count, --# Goto_Table, --# Sparklalr_Memory.Stat_No, --# Symbols_Dump.State & --# Goto_Array, --# Goto_Table, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Goto_Array, --# Goto_List, --# Symbols_Dump.State; is Goto_Count_P2, State_Count_P2, Nt_Count_P2 : Integer; procedure Aux_Got_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Goto_List; --# in Symbols_Dump.State; --# in out Goto_Array; --# in out Goto_Table; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# Goto_Array, --# Goto_Table, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Goto_Array, --# Goto_List, --# Symbols_Dump.State; is Posn : Integer; Curr_Got_Index : Integer; procedure Go_Out_Nested (F : in out SPARK.Ada.Text_IO.File_Type; I : in Integer; Curr_Got_Index : in out Integer) --# global in Goto_List; --# in Symbols_Dump.State; --# in out Goto_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Curr_Got_Index, --# Goto_Array from *, --# Goto_Array, --# Goto_List, --# I & --# F from *, --# Curr_Got_Index, --# Goto_Array, --# Goto_List, --# I, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Goto_Array, --# Goto_List, --# I, --# Symbols_Dump.State; is Comma_Required : Boolean; G, G1 : Pt_Goto_Rec; Org : Integer; Last_Var : Integer; Next : Next_T; Found : Boolean; procedure Write_States (F : in out SPARK.Ada.Text_IO.File_Type; Nt : in Integer; G : in Pt_Goto_Rec; Org : in Integer; Last_Var : in Integer; Next : in Next_T; Curr_Got_Index : in out Integer) --# global in Goto_Array; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Curr_Got_Index from *, --# Last_Var, --# Next & --# F from *, --# Curr_Got_Index, --# G, --# Goto_Array, --# Last_Var, --# Next, --# Nt, --# Org, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Last_Var, --# Next, --# Nt, --# Symbols_Dump.State; is I, Posn : Integer; Next_Tmp : Next_T; begin Next_Tmp := Next; if Next_Tmp = Next_False_Const then SPARK.Ada.Text_IO.Put_File (File => F, Item => "(Current_State * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Last_Var + Org, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + Next_State * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Array.The_Array (G).Entry_State, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ") + Red_Goal * (SP_Symbols.SP_Non_Terminal'Pos ("); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 10); Posn := 10; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Posn, 10, False); --# end accept; SPARK.Ada.Text_IO.Put_File (File => F, Item => ") - First_Non_Terminal)"); Curr_Got_Index := Curr_Got_Index + 1; else SPARK.Ada.Text_IO.Put_File (File => F, Item => "(Current_State * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Last_Var + Org, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + Next_State * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Array.The_Array (G).Entry_State, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ") + Red_Goal * (SP_Symbols.SP_Non_Terminal'Pos ("); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 10); Posn := 10; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Posn, 10, False); --# end accept; SPARK.Ada.Text_IO.Put_File (File => F, Item => ") - First_Non_Terminal)"); Curr_Got_Index := Curr_Got_Index + 1; I := Last_Var - 1; while Next_Tmp /= Next_False_Const loop if Next_Tmp (I) then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Got_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); SPARK.Ada.Text_IO.Put_File (File => F, Item => "(Current_State * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I + Org, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + Next_State * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Array.The_Array (G).Entry_State, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ") + Red_Goal * (SP_Symbols.SP_Non_Terminal'Pos ("); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 10); Posn := 10; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Posn, 10, False); --# end accept; SPARK.Ada.Text_IO.Put_File (File => F, Item => ") - First_Non_Terminal)"); Curr_Got_Index := Curr_Got_Index + 1; Next_Tmp (I) := False; end if; I := I - 1; end loop; end if; end Write_States; begin -- Go_Out_Nested Comma_Required := False; G := Goto_List (I); while G /= 0 loop if Goto_Array.The_Array (G).Index /= 0 then if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); else Comma_Required := True; end if; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Got_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Org := 0; New_State_Set (G, Org, Last_Var, Next); G1 := G; Copy (G, Found, G1); while Found loop if Goto_Array.The_Array (G1).Index - Org < 0 then Write_States (F, I, G, Org, Last_Var, Next, Curr_Got_Index); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Got_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); New_State_Set (G1, Org, Last_Var, Next); else Insert_State (G1, Org, Last_Var, Next); end if; Copy (G, Found, G1); end loop; Write_States (F, I, G, Org, Last_Var, Next, Curr_Got_Index); Zero_Index (G); end if; G := Goto_Array.The_Array (G).Go_Next; end loop; end Go_Out_Nested; begin -- Aux_Got_Out Curr_Got_Index := 1; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Goto_Table : constant Aux_GOT := Aux_GOT'("); Posn := 1; for I in Integer range 2 .. Symbols_Dump.Get_Nnon_Terms loop SPARK.Ada.Text_IO.Put_File (File => F, Item => "----- "); Posn := Posn + 6; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (I), Posn, 6, False); --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Goto_Table (I).Lower := Curr_Got_Index; Go_Out_Nested (F, I, Curr_Got_Index); Goto_Table (I).Upper := Curr_Got_Index - 1; if I < Symbols_Dump.Get_Nnon_Terms then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end if; Posn := 1; end loop; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end Aux_Got_Out; procedure Main_Got_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Goto_Table; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Goto_Table, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Symbols_Dump.State; is Posn : Integer; begin SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Non_Term_Table : constant Main_GOT := Main_GOT'("); Posn := 1; for Nterm in Integer range 2 .. Symbols_Dump.Get_Nnon_Terms loop SPARK.Ada.Text_IO.Put_File (File => F, Item => " "); Posn := Posn + 3; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nterm), Posn, 3, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => Lower_GOT_Index * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Table (Nterm).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + Upper_GOT_Index * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Table (Nterm).Upper, Width => 1, Base => 10); if Nterm < Symbols_Dump.Get_Nnon_Terms then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); Posn := 1; end if; end loop; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); end Main_Got_Out; begin -- Goto_Out_Sp Nt_Count_P2 := 0; while 2 ** Nt_Count_P2 <= Symbols_Dump.Get_Nnon_Terms - 1 loop Nt_Count_P2 := Nt_Count_P2 + 1; end loop; Goto_Count_P2 := 0; while 2 ** Goto_Count_P2 <= Goto_List_Count loop Goto_Count_P2 := Goto_Count_P2 + 1; end loop; State_Count_P2 := 0; while 2 ** State_Count_P2 <= Sparklalr_Memory.Get_Stat_No loop State_Count_P2 := State_Count_P2 + 1; end loop; SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Packed_State_Pair is range 0 .. 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => 2 * State_Count_P2 + Nt_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-1;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " State_Size : constant Packed_State_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Red_Goal_Size : constant Packed_State_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Nt_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Current_State : constant Packed_State_Pair := 1;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Next_State : constant Packed_State_Pair := State_Size;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Red_Goal : constant Packed_State_Pair := State_Size * State_Size;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " First_Non_Terminal : constant Packed_State_Pair :="); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SP_Symbols.SP_Non_Terminal'Pos (SP_Symbols.SP_Non_Terminal'First);"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Aux_GOT is array (GOT_Index) of Packed_State_Pair;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Packed_GOT_Index_Pair is range 0 .. 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => 2 * Goto_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-1;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " GOT_Index_Size : constant Packed_GOT_Index_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Goto_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Lower_GOT_Index : constant Packed_GOT_Index_Pair := 1;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Upper_GOT_Index : constant Packed_GOT_Index_Pair := GOT_Index_Size;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Main_GOT is array (SP_Symbols.SP_Grammar_Non_Terminal)"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " of Packed_GOT_Index_Pair;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Aux_Got_Out (F); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Main_Got_Out (F); end Goto_Out_Sp; procedure Goto_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Goto_Count; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Goto_Count, --# Std_Out; is begin Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Goto_Count, Width => 6); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " GOTO ENTRIES GENERATED"); end Goto_Stats; procedure Init_Goto_List (I : in Sparklalr_Common.Non_Term_Range) --# global in out Goto_List; --# derives Goto_List from *, --# I; is begin Goto_List (I) := 0; end Init_Goto_List; function Get_Goto_List_Count return Integer --# global in Goto_List_Count; is begin return Goto_List_Count; end Get_Goto_List_Count; function Get_Next (Next : in Next_T; I : in Sparklalr_Common.Term_Range) return Boolean is begin return Next (I); end Get_Next; procedure Set_Next (Next : in out Next_T; I : in Sparklalr_Common.Term_Range; Value : in Boolean) is begin Next (I) := Value; end Set_Next; end Sparklalr_Goto; spark-2012.0.deb/sparklalr/sparklalr_memory-dump.adb0000644000175000017500000044550211753202335021443 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with SPARK_Ada_Integer_Text_IO; with SPARK.Ada.Command_Line; with SPARK.Ada.Strings.Unbounded; with SPARK.Ada.Text_IO.Unbounded_String; with Sparklalr_Goto; with Sparklalr_Level; with Sparklalr_Parser; with Sparklalr_Symbol; with Symbols_Dump; use type SPARK.Ada.Text_IO.Exception_T; use type Sparklalr_Symbol.Symbol; package body Sparklalr_Memory.Dump --# own State is Action, --# Act_Open, --# Empty, --# First_Var, --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memo, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Release_Point, --# State_Var, --# Terminal_Like; is Look_Table_Size : constant := 10000; type Set_Of_Term is array (Sparklalr_Common.Term_Range) of Boolean; subtype Look_Set is Natural range 0 .. Look_Table_Size; subtype Look_Array_Range is Positive range 1 .. Look_Table_Size; type Look_Item is record Lset : Set_Of_Term; Litem, Ritem : Look_Set; end record; type Look_Array_Array_T is array (Look_Array_Range) of Look_Item; type Look_Array_T is record The_Array : Look_Array_Array_T; Top : Look_Set; end record; Set_Of_Term_False_Const : constant Set_Of_Term := Set_Of_Term'(others => False); subtype Item_Array_Range is Positive range 1 .. Item_Table_Size; subtype Memory_Array_Range is Positive range 1 .. Memory_Table_Size; type Item is record Dot : Pt_Memory; Lh : Look_Set; end record; type Memory is record Tag : Integer; Ptr : Pt_Memory; -- No discriminated/variant records in SPARK, so -- a bare union will have to do... -- case Tag is -- when 1 => Contents : Contents_T; -- when 2 => Itm : Pt_Item; -- when 3 => Mem_Pt : Pt_Memory; -- end case; end record; type Item_Array_Array_T is array (Item_Array_Range) of Item; type Memory_Array_Array_T is array (Memory_Array_Range) of Memory; type Item_Array_T is record The_Array : Item_Array_Array_T; Top : Pt_Item; end record; type Memory_Array_T is record The_Array : Memory_Array_Array_T; Top : Pt_Memory; end record; type Prod_Ptr_T is array (Sparklalr_Common.Production_Index) of Pt_Memory; type Ntrdn_T is array (Sparklalr_Common.Non_Term_Range) of Pt_Memory; type Empty_T is array (Sparklalr_Common.Non_Term_Range) of Boolean; type State_Var_T is array (Sparklalr_Common.State_Range) of Pt_Memory; type First_Var_T is array (Sparklalr_Common.Non_Term_Range) of Look_Set; Look_Tree : Look_Set; Release_Point, Free_List : Pt_Memory; Memo, Mem : Pt_Memory; Ntrdn : Ntrdn_T; Empty : Empty_T; Prod_Ptr : Prod_Ptr_T; Memory_Array : Memory_Array_T; State_Var : State_Var_T; First_Var : First_Var_T; Terminal_Like : Sparklalr_Memory.Symbol_Set_T; Look_Array : Look_Array_T; Item_Array : Item_Array_T; Action : SPARK.Ada.Text_IO.File_Type; Act_Open : Boolean; -- Local procedures/functions procedure Stack (It : in Integer) -- STACKS A NEW ENTRY ONTO THE PRODUCTION REPRESENTATION --# global in Command_Line_Options.State; --# in out Mem; --# in out Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Mem from *, --# Memory_Array & --# Memory_Array from *, --# It, --# Mem & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# It, --# Mem, --# Memory_Array; is begin Memory_Array.Top := Memory_Array.Top + 1; Memory_Array.The_Array (Mem).Ptr := Memory_Array.Top; Mem := Memory_Array.The_Array (Mem).Ptr; Memory_Array.The_Array (Mem) := Memory'(Tag => 1, Ptr => 0, Contents => It, Itm => 0, Mem_Pt => 0); if Command_Line_Options.Get_Debug_Level (1) then if Mem /= 0 then SPARK.Ada.Text_IO.Put_Output (Item => " STACK : "); SPARK_Ada_Integer_Text_IO.Put_Output (Item => It, Width => 4, Base => 10); SPARK.Ada.Text_IO.Put_Line_Output (Item => ", MEM = "); end if; end if; end Stack; function Mem_Length (Ptr1, Ptr2 : in Pt_Memory) return Integer --# global in Memory_Array; is Result : Integer; Ptr1_Tmp : Pt_Memory; begin Ptr1_Tmp := Ptr1; Result := 1; while (Ptr1_Tmp /= 0) and then (Ptr1_Tmp /= Ptr2) loop Ptr1_Tmp := Memory_Array.The_Array (Ptr1_Tmp).Ptr; Result := Result + 1; end loop; return Result; end Mem_Length; -- End local procedures/functions procedure Initialise --# global in Command_Line_Options.State; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# out Action; --# out Act_Open; --# out Empty; --# out First_Var; --# out Free_List; --# out Item_Array; --# out Look_Array; --# out Look_Tree; --# out Mem; --# out Memo; --# out Memory_Array; --# out Ntrdn; --# out Prod_Ptr; --# out Release_Point; --# out Sparklalr_Memory.Max_Right; --# out Sparklalr_Memory.Prod_No; --# out Sparklalr_Memory.Prod_Sum; --# out Sparklalr_Memory.Stat_No; --# out State_Var; --# out Terminal_Like; --# derives Action, --# Act_Open from Command_Line_Options.State & --# Empty, --# First_Var, --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memo, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Release_Point, --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# State_Var, --# Terminal_Like from & --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State; is begin Sparklalr_Memory.Initialise; Look_Tree := 0; Release_Point := 0; Free_List := 0; Memory_Array := Memory_Array_T' (The_Array => Memory_Array_Array_T'(others => Memory'(Tag => 1, Ptr => 0, Contents => 0, Itm => 0, Mem_Pt => 0)), Top => 0); Memory_Array.Top := 1; Memo := Memory_Array.Top; Memory_Array.The_Array (Memo) := Memory'(Tag => 1, Ptr => 0, Contents => -1, Itm => 0, Mem_Pt => 0); Mem := Memo; Ntrdn := Ntrdn_T'(others => 0); Empty := Empty_T'(others => False); Stack (Sparklalr_Common.Nt_Base + 1); Prod_Ptr := Prod_Ptr_T'(others => 0); Prod_Ptr (1) := Memo; Stack (Sparklalr_Common.Nt_Base + 2); Stack (0); -- SPEND Stack (-1); Prod_Ptr (2) := Mem; State_Var := State_Var_T'(others => 0); First_Var := First_Var_T'(others => 0); Terminal_Like := Sparklalr_Memory.Symbol_Set_T'(others => False); Look_Array := Look_Array_T' (The_Array => Look_Array_Array_T'(others => Look_Item'(Lset => Set_Of_Term'(others => False), Litem => 0, Ritem => 0)), Top => 0); Item_Array := Item_Array_T'(The_Array => Item_Array_Array_T'(others => Item'(Dot => 0, Lh => 0)), Top => 0); SPARK.Ada.Text_IO.Unbounded_String.Create (File => Action, Mode => SPARK.Ada.Text_IO.Out_File, Name => SPARK.Ada.Strings.Unbounded.Concat_Unbounded_String_String (Left => Command_Line_Options.Get_File_Name, Right => ".ACT"), Form => SPARK.Ada.Strings.Unbounded.Null_Unbounded_String); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Action) /= SPARK.Ada.Text_IO.No_Exception then Act_Open := False; SPARK.Ada.Text_IO.Put_Error (Item => "Unable to open output file ACT"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); else Act_Open := True; end if; end Initialise; procedure Mem_Dump --# global in Command_Line_Options.State; --# in Empty; --# in Item_Array; --# in Look_Array; --# in Mem; --# in Memo; --# in Memory_Array; --# in Ntrdn; --# in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in Sparklalr_Memory.Stat_No; --# in State_Var; --# in Symbols_Dump.State; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# Command_Line_Options.State, --# Empty, --# Item_Array, --# Look_Array, --# Mem, --# Memo, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# State_Var, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Empty, --# Mem, --# Memo, --# Memory_Array, --# Ntrdn, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# State_Var, --# Symbols_Dump.State; is Item_List_Table_Size : constant := 20000; subtype Pt_Item_List is Natural range 0 .. Item_List_Table_Size; subtype Item_List_Array_Range is Positive range 1 .. Item_List_Table_Size; type Item_List is record Pitem : Pt_Item; Inext : Pt_Item_List; end record; type Item_List_Array_Array_T is array (Item_List_Array_Range) of Item_List; type Item_List_Array_T is record The_Array : Item_List_Array_Array_T; Top : Pt_Item_List; end record; Item_List_Array : Item_List_Array_T; Look_List_Table_Size : constant := 20000; subtype Pt_Look_List is Natural range 0 .. Look_List_Table_Size; subtype Look_List_Array_Range is Positive range 1 .. Look_List_Table_Size; type Look_List is record Plook : Look_Set; Lnext : Pt_Look_List; end record; type Look_List_Array_Array_T is array (Look_List_Array_Range) of Look_List; type Look_List_Array_T is record The_Array : Look_List_Array_Array_T; Top : Pt_Look_List; end record; Look_List_Array : Look_List_Array_T; Diagnose : SPARK.Ada.Text_IO.File_Type; Item_Head : Pt_Item_List; Look_Head : Pt_Look_List; Pmem, Mem_Stp : Pt_Memory; procedure Head (Heading : in Integer; F : in out SPARK.Ada.Text_IO.File_Type) --# global SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Heading; is begin --# accept W, 303, "when others here covers all cases"; case Heading is when 1 => SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " PRODUCTION STORAGE : "); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " ADDRESS "); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " CONTENTS "); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " NEXT LOCATION"); when 2 => SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " EFF STORAGE : "); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => '0'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " NONTERMINAL "); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " EFF LIST "); when 3 => SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " STATE STORAGE : "); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " ADDRESS "); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " ITEM ADR."); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " NEXT LOCATION"); when 4 => SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => '1'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " ITEM STORAGE : "); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " ITEM ADR."); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " DOT ADR. "); when 5 => SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " LOOKAHEAD SET STORAGE : "); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => '0'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " ITEM LOOKAHEAD SET"); when 6 => SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => '1'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " PRODUCTION POINTERS : "); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " PRODUCTION"); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 4); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " ADDRESS "); when 7 => SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " EFF LIST POINTERS : "); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " NONTERMINAL"); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 3); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " ADDRESS "); when 8 => SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " STATE POINTERS : "); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => 'O'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_File (File => F, Item => " STATE "); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " ADDRESS "); when others => null; end case; --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end Head; procedure Print_Look (L : in Look_Set; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Look_Array; --# in Symbols_Dump.State; --# derives F from *, --# L, --# Look_Array, --# Symbols_Dump.State; is begin for I in Integer range 0 .. Symbols_Dump.Get_Nterms loop if Look_Array.The_Array (L).Lset (I) then SPARK.Ada.Text_IO.Put_File (File => F, Item => Symbols_Dump.Get_Term_Set (I)); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " "); end if; end loop; end Print_Look; procedure Out1 (Pmem : in out Pt_Memory; Mem_Stp : in Pt_Memory; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# Pmem, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Memory_Array, --# Mem_Stp, --# Pmem; is Pmout1, Pmout2 : Pt_Memory; begin while Pmem /= Mem_Stp loop Pmout1 := Pmem; Pmout2 := Memory_Array.The_Array (Pmem).Ptr; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout1, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); --# accept W, 303, "when others here covers all cases"; case Memory_Array.The_Array (Pmem).Tag is when 1 => SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Memory_Array.The_Array (Pmem).Contents, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); when 2 => SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Memory_Array.The_Array (Pmem).Itm, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); when 3 => SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Memory_Array.The_Array (Pmem).Mem_Pt, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); when others => null; end case; --# end accept; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout2, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Pmem := Memory_Array.The_Array (Pmem).Ptr; end loop; end Out1; procedure Out2 (Pmem : in Pt_Memory; Mem_Stp : in Pt_Memory; Item_Head : out Pt_Item_List; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Memory_Array; --# in out Item_List_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Memory_Array, --# Mem_Stp, --# Pmem & --# Item_Head, --# Item_List_Array from Item_List_Array, --# Memory_Array, --# Mem_Stp, --# Pmem; is Pmout1, Pmout2, Pmem_Tmp : Pt_Memory; Temp_Item : Pt_Item_List; begin Pmem_Tmp := Pmem; Item_Head := 0; while Pmem_Tmp /= Mem_Stp loop Pmout1 := Pmem_Tmp; Pmout2 := Memory_Array.The_Array (Pmem_Tmp).Ptr; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout1, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); --# accept W, 303, "when others here covers all cases"; case Memory_Array.The_Array (Pmem_Tmp).Tag is when 1 => SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Memory_Array.The_Array (Pmem_Tmp).Contents, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); when 2 => SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Memory_Array.The_Array (Pmem_Tmp).Itm, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); when 3 => SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Memory_Array.The_Array (Pmem_Tmp).Mem_Pt, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); when others => null; end case; --# end accept; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout2, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Item_List_Array.Top := Item_List_Array.Top + 1; Temp_Item := Item_List_Array.Top; Item_List_Array.The_Array (Temp_Item) := Item_List'(Inext => Item_Head, Pitem => Memory_Array.The_Array (Pmem_Tmp).Itm); Item_Head := Temp_Item; Pmem_Tmp := Memory_Array.The_Array (Pmem_Tmp).Ptr; end loop; end Out2; procedure Out3 (Item_Head : in Pt_Item_List; Look_Head : out Pt_Look_List; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Item_Array; --# in Item_List_Array; --# in Look_Array; --# in Symbols_Dump.State; --# in out Look_List_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Item_Array, --# Item_Head, --# Item_List_Array, --# Look_Array, --# Symbols_Dump.State & --# Look_Head from Item_Head, --# Item_List_Array, --# Look_List_Array & --# Look_List_Array from *, --# Item_Array, --# Item_Head, --# Item_List_Array & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Item_Head, --# Item_List_Array; is I : Pt_Item_List; Piout : Pt_Item; Pmout1 : Pt_Memory; Temp_Look : Pt_Look_List; begin I := Item_Head; Look_Head := 0; while I /= 0 loop Piout := Item_List_Array.The_Array (I).Pitem; Pmout1 := Item_Array.The_Array (Item_List_Array.The_Array (I).Pitem).Dot; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Piout, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout1, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 6); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Print_Look (Item_Array.The_Array (Item_List_Array.The_Array (I).Pitem).Lh, F); Look_List_Array.Top := Look_List_Array.Top + 1; Temp_Look := Look_List_Array.Top; Look_List_Array.The_Array (Temp_Look) := Look_List'(Plook => Item_Array.The_Array (Item_List_Array.The_Array (I).Pitem).Lh, Lnext => Look_Head); Look_Head := Temp_Look; I := Item_List_Array.The_Array (I).Inext; end loop; end Out3; procedure Out4 (Look_Head : in Pt_Look_List; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Look_Array; --# in Look_List_Array; --# in Symbols_Dump.State; --# derives F from *, --# Look_Array, --# Look_Head, --# Look_List_Array, --# Symbols_Dump.State; is L : Pt_Look_List; begin L := Look_Head; while L /= 0 loop Print_Look (Look_List_Array.The_Array (L).Plook, F); L := Look_List_Array.The_Array (L).Lnext; end loop; end Out4; procedure Out5 (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Prod_No; is Pmout1 : Pt_Memory; begin for I in Integer range 1 .. Sparklalr_Memory.Prod_No - 1 loop Pmout1 := Prod_Ptr (I); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout1, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end loop; end Out5; procedure Out6 (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Empty; --# in Ntrdn; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Empty, --# Ntrdn, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Empty, --# Symbols_Dump.State; is Pmout1 : Pt_Memory; begin for I in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop Pmout1 := Ntrdn (I); if Empty (I) then Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout1, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "True"); else Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout1, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "False"); end if; end loop; end Out6; procedure Out7 (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Memory.Stat_No; --# in State_Var; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Sparklalr_Memory.Stat_No, --# State_Var & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Stat_No; is Pmout1 : Pt_Memory; begin for I in Integer range 1 .. Sparklalr_Memory.Stat_No loop Pmout1 := State_Var (I); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 5); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pmout1, Width => SPARK_Ada_Integer_Text_IO.Default_Width, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end loop; end Out7; begin -- Mem_Dump Item_List_Array := Item_List_Array_T'(The_Array => Item_List_Array_Array_T'(others => Item_List'(Pitem => 0, Inext => 0)), Top => 0); Look_List_Array := Look_List_Array_T'(The_Array => Look_List_Array_Array_T'(others => Look_List'(Plook => 0, Lnext => 0)), Top => 0); SPARK.Ada.Text_IO.Put_Line_Output (Item => "DUMPMEM Set..."); SPARK.Ada.Text_IO.Unbounded_String.Create (File => Diagnose, Mode => SPARK.Ada.Text_IO.Out_File, Name => SPARK.Ada.Strings.Unbounded.Concat_Unbounded_String_String (Left => Command_Line_Options.Get_File_Name, Right => ".DGN"), Form => SPARK.Ada.Strings.Unbounded.Null_Unbounded_String); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Diagnose) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to open DGN output file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; Pmem := Memo; Mem_Stp := Ntrdn (1); SPARK.Ada.Text_IO.Put_Character_File (File => Diagnose, Item => '1'); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => Diagnose, C => ' ', N => 5); SPARK.Ada.Text_IO.Put_Line_File (File => Diagnose, Item => " MEMORY DUMP : "); Head (1, Diagnose); Out1 (Pmem, Mem_Stp, Diagnose); Mem_Stp := State_Var (1); Head (2, Diagnose); Out1 (Pmem, Mem_Stp, Diagnose); Mem_Stp := Mem; Head (3, Diagnose); Out2 (Pmem, Mem_Stp, Item_Head, Diagnose); Head (4, Diagnose); Out3 (Item_Head, Look_Head, Diagnose); Head (5, Diagnose); Out4 (Look_Head, Diagnose); Head (6, Diagnose); Out5 (Diagnose); Head (7, Diagnose); Out6 (Diagnose); Head (8, Diagnose); Out7 (Diagnose); SPARK.Ada.Text_IO.Close (File => Diagnose); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Diagnose) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to close DGN output file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; end Mem_Dump; function Prodstart (P : in Pt_Memory) return Pt_Memory --# global in Memory_Array; --# in Prod_Ptr; is P_Tmp : Pt_Memory; begin P_Tmp := P; while Memory_Array.The_Array (P_Tmp).Contents >= 0 loop P_Tmp := Memory_Array.The_Array (P_Tmp).Ptr; end loop; return Memory_Array.The_Array (Prod_Ptr (-Memory_Array.The_Array (P_Tmp).Contents)).Ptr; end Prodstart; procedure Dump_Items (F : in out SPARK.Ada.Text_IO.File_Type; S, T : in Pt_Memory) --# global in Item_Array; --# in Look_Array; --# in Memory_Array; --# in Prod_Ptr; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Item_Array, --# Look_Array, --# Memory_Array, --# Prod_Ptr, --# S, --# Symbols_Dump.State, --# T; is P, Q : Pt_Memory; C : Integer; Dot_Out : Boolean; Posn : Integer; S_Tmp : Pt_Memory; procedure Dump_Look (F : in out SPARK.Ada.Text_IO.File_Type; L : in Look_Set; Posn : in Integer) --# global in Look_Array; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# L, --# Look_Array, --# Posn, --# Symbols_Dump.State; is Tab : Integer; Posn_Tmp : Integer; begin Posn_Tmp := Posn; SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => '('); Posn_Tmp := Posn_Tmp + 1; Tab := Posn_Tmp; for J in Integer range 0 .. Symbols_Dump.Get_Nterms loop if Look_Array.The_Array (L).Lset (J) then Sparklalr_Common.Print (Std_Out => False, F => F, String1 => Symbols_Dump.Get_Term_Set (J), Posn => Posn_Tmp, Tab => Tab, Comm => False); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); Posn_Tmp := Posn_Tmp + 1; end if; end loop; SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); end Dump_Look; begin -- Dump_Items S_Tmp := S; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Posn := 1; while (S_Tmp /= T) and then (S_Tmp /= 0) loop Q := Item_Array.The_Array (Memory_Array.The_Array (S_Tmp).Itm).Dot; P := Prodstart (Q); C := Memory_Array.The_Array (P).Contents; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 4); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => '['); Posn := Posn + 5; Sparklalr_Common.Print (Std_Out => False, F => F, String1 => Symbols_Dump.Get_Nterm_Set (C - Sparklalr_Common.Nt_Base), Posn => Posn, Tab => 5, Comm => False); SPARK.Ada.Text_IO.Put_File (File => F, Item => " : "); Posn := Posn + 3; P := Memory_Array.The_Array (P).Ptr; Dot_Out := False; while Memory_Array.The_Array (P).Contents > 0 loop C := Memory_Array.The_Array (P).Contents; if P = Q then Dot_Out := True; SPARK.Ada.Text_IO.Put_File (File => F, Item => ". "); Posn := Posn + 2; end if; if C > Sparklalr_Common.Nt_Base then Sparklalr_Common.Print (Std_Out => False, F => F, String1 => Symbols_Dump.Get_Nterm_Set (C - Sparklalr_Common.Nt_Base), Posn => Posn, Tab => 8, Comm => False); else if C <= Symbols_Dump.Get_Nterms then Sparklalr_Common.Print (Std_Out => False, F => F, String1 => Symbols_Dump.Get_Term_Set (C), Posn => Posn, Tab => 8, Comm => False); else SPARK.Ada.Text_IO.Put_File (File => F, Item => " *!* "); -- was ^Z Posn := Posn + 5; end if; end if; SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); Posn := Posn + 1; P := Memory_Array.The_Array (P).Ptr; end loop; if not Dot_Out then SPARK.Ada.Text_IO.Put_File (File => F, Item => " . "); Posn := Posn + 3; end if; SPARK.Ada.Text_IO.Put_File (File => F, Item => "] , "); Posn := Posn + 4; Dump_Look (F, Item_Array.The_Array (Memory_Array.The_Array (S_Tmp).Itm).Lh, Posn); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Posn := 1; S_Tmp := Memory_Array.The_Array (S_Tmp).Ptr; end loop; end Dump_Items; procedure Dump_Prdns (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Memory_Array; --# in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State; is P : Pt_Memory; Posn : Integer; begin Posn := 1; for I in Integer range 1 .. Sparklalr_Memory.Prod_No - 1 loop SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => 4, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); Posn := Posn + 5; P := Memory_Array.The_Array (Prod_Ptr (I)).Ptr; Symbols_Dump.Print_Sym (F, Memory_Array.The_Array (P).Contents, Posn, 10, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => " : "); Posn := Posn + 3; P := Memory_Array.The_Array (P).Ptr; while Memory_Array.The_Array (P).Contents > 0 loop SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); Posn := Posn + 1; Symbols_Dump.Print_Sym (F, Memory_Array.The_Array (P).Contents, Posn, 10, False); P := Memory_Array.The_Array (P).Ptr; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Posn := 1; end loop; end Dump_Prdns; procedure Summary --# global in Memory_Array; --# in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in out Sparklalr_Memory.Prod_Sum; --# out Sparklalr_Memory.Max_Right; --# derives Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_Sum from Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Prod_Sum; is C, T : Pt_Memory; begin Sparklalr_Memory.Set_Max_Right (0); Sparklalr_Memory.Set_Prod_Sum (1, 1, 1); Sparklalr_Memory.Set_Prod_Sum (1, 2, 2); for I in Integer range 2 .. Sparklalr_Memory.Prod_No - 1 loop C := Memory_Array.The_Array (Prod_Ptr (I)).Ptr; Sparklalr_Memory.Set_Prod_Sum (I, 1, Memory_Array.The_Array (C).Contents - Sparklalr_Common.Nt_Base); T := Prod_Ptr (I + 1); Sparklalr_Memory.Set_Prod_Sum (I, 2, Mem_Length (Memory_Array.The_Array (C).Ptr, T) - 1); if Sparklalr_Memory.Prod_Sum (I) (2) > Sparklalr_Memory.Max_Right then Sparklalr_Memory.Set_Max_Right (Sparklalr_Memory.Prod_Sum (I) (2)); end if; end loop; end Summary; procedure New_State (S : in Pt_Memory; Result : out Integer) -- FINDS THE GOTO STATE OF THE ITEM POINTED TO BY "S" --# global in Mem; --# in Memory_Array; --# in Sparklalr_Memory.Stat_No; --# in State_Var; --# in out Item_Array; --# derives Item_Array from *, --# Memory_Array, --# S & --# Result from Item_Array, --# Mem, --# Memory_Array, --# S, --# Sparklalr_Memory.Stat_No, --# State_Var; is Temp_Item : Pt_Item; procedure Go2 (Pitem : in Pt_Item) --# global in Memory_Array; --# in out Item_Array; --# derives Item_Array from *, --# Memory_Array, --# Pitem; is begin if Memory_Array.The_Array (Item_Array.The_Array (Pitem).Dot).Contents > 0 then Item_Array.The_Array (Pitem).Dot := Memory_Array.The_Array (Item_Array.The_Array (Pitem).Dot).Ptr; end if; end Go2; function Fstate (Pitem : in Pt_Item) return Integer --# global in Item_Array; --# in Mem; --# in Memory_Array; --# in Sparklalr_Memory.Stat_No; --# in State_Var; is Found : Boolean; I : Integer; S, T : Pt_Memory; begin I := 1; Found := False; while (I <= Sparklalr_Memory.Stat_No) and then not Found loop S := State_Var (I); if I = Sparklalr_Memory.Stat_No then T := Mem; else T := State_Var (I + 1); end if; while (S /= T) and then not Found loop if Memory_Array.The_Array (S).Itm = Pitem then Found := False; else if Item_Array.The_Array (Memory_Array.The_Array (S).Itm).Dot = Item_Array.The_Array (Pitem).Dot then if Item_Array.The_Array (Memory_Array.The_Array (S).Itm).Lh = Item_Array.The_Array (Pitem).Lh then Found := True; else S := Memory_Array.The_Array (S).Ptr; end if; else S := Memory_Array.The_Array (S).Ptr; end if; end if; end loop; I := I + 1; end loop; return I - 1; end Fstate; begin Temp_Item := Memory_Array.The_Array (S).Itm; Go2 (Temp_Item); Result := Fstate (Temp_Item); end New_State; procedure Gen_Terminal_Like --# global in Memory_Array; --# in Ntrdn; --# in Symbols_Dump.State; --# in out Terminal_Like; --# derives Terminal_Like from *, --# Memory_Array, --# Ntrdn, --# Symbols_Dump.State; is S, T : Pt_Memory; Not_Closed, All_Terminal_Like : Boolean; begin for Index in Integer range -1 .. Symbols_Dump.Get_Nterms loop Terminal_Like (Index) := True; end loop; for Index in Integer range Symbols_Dump.Get_Nterms + 1 .. Sparklalr_Common.Max_Sym loop Terminal_Like (Index) := False; end loop; -- Form closure of TerminalLike Not_Closed := True; while Not_Closed loop Not_Closed := False; for Nt in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop S := Ntrdn (Nt); T := Ntrdn (Nt + 1); All_Terminal_Like := True; while (S /= T) and then All_Terminal_Like loop if Memory_Array.The_Array (Memory_Array.The_Array (Memory_Array.The_Array (S).Mem_Pt).Ptr).Contents >= 0 then if (Memory_Array.The_Array (Memory_Array.The_Array (Memory_Array.The_Array (Memory_Array.The_Array (S).Mem_Pt).Ptr).Ptr).Contents >= 0) or else (not Terminal_Like (Memory_Array.The_Array (Memory_Array.The_Array (Memory_Array.The_Array (S).Mem_Pt).Ptr).Contents)) then All_Terminal_Like := False; end if; else All_Terminal_Like := False; end if; S := Memory_Array.The_Array (S).Ptr; end loop; if All_Terminal_Like and then not Terminal_Like (Nt + Sparklalr_Common.Nt_Base) then Terminal_Like (Nt + Sparklalr_Common.Nt_Base) := True; Not_Closed := True; end if; end loop; end loop; end Gen_Terminal_Like; procedure Findntredns (F : in out SPARK.Ada.Text_IO.File_Type) -- FINDS DEFINING PRODUCTIONS FOR EACH NONTERMINAL --# global in Command_Line_Options.State; --# in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in Symbols_Dump.State; --# in out Empty; --# in out Mem; --# in out Memory_Array; --# in out Ntrdn; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives Empty from *, --# Symbols_Dump.State & --# F, --# Sparklalr_Error.State from *, --# Mem, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State & --# Mem, --# Memory_Array, --# Ntrdn from *, --# Mem, --# Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Mem, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State; is Frst : Boolean; M : Pt_Memory; procedure Stack_Mem (M : in Pt_Memory) -- STACKS POINTERS TO INTERNAL PRODUCTION REPRESENTATION --# global in Command_Line_Options.State; --# in out Mem; --# in out Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Mem from *, --# Memory_Array & --# Memory_Array from *, --# M, --# Mem & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# M, --# Mem, --# Memory_Array; is begin Memory_Array.Top := Memory_Array.Top + 1; Memory_Array.The_Array (Mem).Ptr := Memory_Array.Top; Mem := Memory_Array.The_Array (Mem).Ptr; Memory_Array.The_Array (Mem) := Memory'(Tag => 3, Ptr => 0, Contents => 0, Itm => 0, Mem_Pt => M); if Command_Line_Options.Get_Debug_Level (2) then if M /= 0 then SPARK.Ada.Text_IO.Put_Output (Item => " STACKM:EM , MEM ="); end if; if Mem /= 0 then SPARK.Ada.Text_IO.Put_Line_Output (Item => " "); end if; end if; end Stack_Mem; begin -- Findntredns for I in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop Frst := True; Ntrdn (I) := 0; Empty (I) := False; for J in Integer range 1 .. Sparklalr_Memory.Prod_No - 1 loop if Memory_Array.The_Array (Memory_Array.The_Array (Prod_Ptr (J)).Ptr).Contents = I + Sparklalr_Common.Nt_Base then M := Memory_Array.The_Array (Prod_Ptr (J)).Ptr; Stack_Mem (M); if Frst then Frst := False; Ntrdn (I) := Mem; end if; end if; end loop; Ntrdn (Symbols_Dump.Get_Nnon_Terms + 1) := Mem; if 0 = Ntrdn (I) then Sparklalr_Error.Error (F, 30); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => Symbols_Dump.Get_Nterm_Set (I)); SPARK.Ada.Text_IO.Put_Character_Output (Item => ' '); SPARK.Ada.Text_IO.Put_Line_Output (Item => Symbols_Dump.Get_Nterm_Set (I)); end if; end loop; Stack_Mem (0); Ntrdn (Symbols_Dump.Get_Nnon_Terms + 1) := Mem; Empty (Symbols_Dump.Get_Nnon_Terms + 1) := False; if Command_Line_Options.Get_Debug_Level (4) then for J in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop if Ntrdn (J) /= 0 then SPARK.Ada.Text_IO.Put_Output (Item => " NTRDN "); SPARK_Ada_Integer_Text_IO.Put_Output (Item => J, Width => 3, Base => 10); SPARK.Ada.Text_IO.Put_Line_Output (Item => " INDEX = "); end if; end loop; end if; end Findntredns; procedure Mem_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Mem; --# in Memo; --# in Memory_Array; --# in Ntrdn; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Mem, --# Memo, --# Memory_Array, --# Ntrdn, --# Std_Out; is Measure1, Measure2 : Integer; begin Measure1 := Mem_Length (Memo, Mem); Measure2 := Mem_Length (Memo, Ntrdn (1)); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Measure1, Width => 6); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " MEMORY RECORDS USED."); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Measure2, Width => 6); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " MEMORY RECORDS USED FOR PRODUCTION STORAGE."); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Measure1 - Measure2, Width => 6); Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => " MEMORY RECORDS USED FOR OTHER"); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " INFORMATION."); end Mem_Stats; procedure Productions_Package_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Memory_Array; --# in Prod_Ptr; --# in Sparklalr_Memory.Max_Right; --# in Sparklalr_Memory.Prod_No; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State; is Posn : Integer; P : Pt_Memory; begin SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "package SP_Productions is"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Posn := 1; for I in Integer range 1 .. Sparklalr_Memory.Prod_No - 1 loop SPARK.Ada.Text_IO.Put_File (File => F, Item => "--_"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => 4, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); Posn := Posn + 8; P := Memory_Array.The_Array (Prod_Ptr (I)).Ptr; Symbols_Dump.Print_Sym (F, Memory_Array.The_Array (P).Contents, Posn, 10, True); SPARK.Ada.Text_IO.Put_File (File => F, Item => " ::="); Posn := Posn + 5; P := Memory_Array.The_Array (P).Ptr; while Memory_Array.The_Array (P).Contents > 1 loop -- > SPEND and SPDEFAULT Posn := Posn + 1; Symbols_Dump.Print_Sym (F, Memory_Array.The_Array (P).Contents, Posn, 10, True); P := Memory_Array.The_Array (P).Ptr; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Posn := 1; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " SP_Max_Prod : constant Positive := "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Memory.Prod_No - 1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " SP_Max_Right : constant Positive := "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Memory.Max_Right, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " SP_Max_State : constant Positive := "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Memory.Stat_No, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type SP_Prod_No is range 0 .. SP_Max_Prod;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type SP_Right is range 0 .. SP_Max_Right;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type SP_State is range 0 .. SP_Max_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " subtype Valid_States is SP_State range 1 .. SP_State'Last;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " No_State : constant SP_State := SP_State'First;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "end SP_Productions;"); end Productions_Package_Out; procedure Rhs_Process (F : in out SPARK.Ada.Text_IO.File_Type; Symb : in out Sparklalr_Symbol.Symbol; Gram_Rules : in Boolean; Col : in out Sparklalr_Error.Err_Col_T; Signpost : in Sparklalr_Input.Symbol_Set_Type) --# global in Act_Open; --# in Command_Line_Options.State; --# in Sparklalr_Char_Class.Charmap; --# in out Action; --# in out Mem; --# in out Memory_Array; --# in out Prod_Ptr; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Input.State; --# in out Sparklalr_Level.State; --# in out Sparklalr_Memory.Prod_No; --# in out Symbols_Dump.State; --# derives Action, --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error, --# Sparklalr_Memory.Prod_No from *, --# Act_Open, --# Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State & --# Col, --# F, --# Sparklalr_Error.State, --# Sparklalr_Input.State from *, --# Act_Open, --# Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Symbols_Dump.State & --# Mem, --# Memory_Array, --# Prod_Ptr, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Act_Open, --# Col, --# Command_Line_Options.State, --# Mem, --# Memory_Array, --# Prod_Ptr, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Prod_No, --# Symb, --# Symbols_Dump.State & --# Sparklalr_Level.State from *, --# Act_Open, --# Col, --# Command_Line_Options.State, --# Gram_Rules, --# Mem, --# Memory_Array, --# Prod_Ptr, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Prod_No, --# Symb, --# Symbols_Dump.State & --# Symb from Act_Open, --# Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Symbols_Dump.State & --# Symbols_Dump.State from *, --# Col, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State; is Symbolset_Skipto : Sparklalr_Input.Symbol_Set_Type; Next_Symb_Var : Sparklalr_Symbol.Symbol; Token : Sparklalr_Common.Id_Name; procedure Complete_Production --# global in Command_Line_Options.State; --# in out Mem; --# in out Memory_Array; --# in out Prod_Ptr; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Level.State; --# in out Sparklalr_Memory.Prod_No; --# derives Mem from *, --# Memory_Array & --# Memory_Array, --# Prod_Ptr from *, --# Mem, --# Memory_Array, --# Sparklalr_Memory.Prod_No & --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error, --# Sparklalr_Level.State, --# Sparklalr_Memory.Prod_No from *, --# Sparklalr_Memory.Prod_No & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Mem, --# Memory_Array, --# Sparklalr_Memory.Prod_No; is begin Stack (-Sparklalr_Memory.Prod_No); if Sparklalr_Memory.Prod_No < Sparklalr_Common.Prod_Lim then Sparklalr_Memory.Set_Prod_No (Sparklalr_Memory.Prod_No + 1); Prod_Ptr (Sparklalr_Memory.Prod_No) := Mem; Sparklalr_Level.Initiate_Level (Sparklalr_Memory.Prod_No); else SPARK.Ada.Text_IO.Put_Error (Item => "Production limit exceeded"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; end Complete_Production; procedure Dup_Lhs --# global in Command_Line_Options.State; --# in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in out Mem; --# in out Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Mem from *, --# Memory_Array & --# Memory_Array from *, --# Mem, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Mem, --# Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No; is It : Integer; begin It := Memory_Array.The_Array (Memory_Array.The_Array (Prod_Ptr (Sparklalr_Memory.Prod_No - 1)).Ptr).Contents; Stack (It); end Dup_Lhs; procedure Rhs_Element (Symb : in Sparklalr_Symbol.Symbol; F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : in Boolean; Token : in Sparklalr_Common.Id_Name; Col : in Sparklalr_Error.Err_Col_T) --# global in Command_Line_Options.State; --# in Sparklalr_Memory.Prod_No; --# in out Mem; --# in out Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out Symbols_Dump.State; --# derives F, --# Symbols_Dump.State from *, --# Symb, --# Symbols_Dump.State, --# Token & --# Mem from *, --# Memory_Array & --# Memory_Array from *, --# Mem, --# Symb, --# Symbols_Dump.State, --# Token & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Mem, --# Memory_Array, --# Symb, --# Symbols_Dump.State, --# Token & --# Sparklalr_Error.State from *, --# Col, --# Symb, --# Symbols_Dump.State, --# Token & --# Sparklalr_Level.State from *, --# Gram_Rules, --# Mem, --# Memory_Array, --# Sparklalr_Memory.Prod_No, --# Symb, --# Symbols_Dump.State, --# Token; is Result_Find : Integer; begin Symbols_Dump.Find (Symb = Sparklalr_Symbol.Ident, F, Gram_Rules, Token, Col, Result_Find); Stack (Result_Find); if (Symb = Sparklalr_Symbol.Lit) or else (Memory_Array.The_Array (Mem).Contents < Sparklalr_Common.Nt_Base) then Sparklalr_Level.Assign_Level (Sparklalr_Memory.Prod_No, Sparklalr_Level.Get_Term_Lev (Memory_Array.The_Array (Mem).Contents)); end if; end Rhs_Element; -- Precedence definitions should not be used in a grammar file for SPARKLALR procedure Term_Precedence (F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : in Boolean; Col : in out Sparklalr_Error.Err_Col_T) --# global in Sparklalr_Char_Class.Charmap; --# in Sparklalr_Memory.Prod_No; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Input.State; --# in out Sparklalr_Level.State; --# in out Symbols_Dump.State; --# derives Col, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Input.State, --# Symbols_Dump.State from *, --# Col, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State & --# F, --# Sparklalr_Error.State from *, --# Col, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Symbols_Dump.State & --# Sparklalr_Level.State from *, --# Col, --# Gram_Rules, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State; is Dummy : Integer; Symb : Sparklalr_Symbol.Symbol; Token : Sparklalr_Common.Id_Name; begin Sparklalr_Input.Scan (F, Col, Symb, Token); if Symb = Sparklalr_Symbol.Ident then Symbols_Dump.Find (True, F, Gram_Rules, Token, Col, Dummy); if Dummy >= Sparklalr_Common.Nt_Base then Sparklalr_Error.Syn_Error (13, Col); else Sparklalr_Level.Assign_Level (Sparklalr_Memory.Prod_No, Sparklalr_Level.Get_Term_Lev (Dummy)); end if; else Sparklalr_Error.Syn_Error (12, Col); end if; end Term_Precedence; begin -- Rhs_Process if Symb = Sparklalr_Symbol.Uparrow then Dup_Lhs; end if; Sparklalr_Input.Scan (F, Col, Symb, Token); Next_Symb_Var := Symb; while (Next_Symb_Var = Sparklalr_Symbol.Ident) or else (Next_Symb_Var = Sparklalr_Symbol.Lit) loop Rhs_Element (Symb, F, Gram_Rules, Token, Col); Sparklalr_Input.Scan (F, Col, Symb, Token); Next_Symb_Var := Symb; end loop; if Symb = Sparklalr_Symbol.Prec then Term_Precedence (F, Gram_Rules, Col); --# accept F, 10, Token, "Ineffective assignment here expected and OK"; Sparklalr_Input.Scan (F, Col, Symb, Token); --# end accept; end if; if Symb = Sparklalr_Symbol.Equals then if Act_Open and then Command_Line_Options.Get_Parser then Sparklalr_Input.Copy_Action (Action, F, Signpost, Col, Symb); else Sparklalr_Input.Skip_Action (F, Signpost, Col, Symb); end if; end if; if (Symb = Sparklalr_Symbol.Scolon) or else (Symb = Sparklalr_Symbol.Uparrow) or else (Symb = Sparklalr_Symbol.Ampmark) then Complete_Production; else Sparklalr_Error.Syn_Error (5, Col); Symbolset_Skipto := Sparklalr_Input.Signpost_To_Symbol_Set_Type (Signpost); Sparklalr_Input.Set_Symbol_Set (Symbolset_Skipto, Sparklalr_Symbol.Scolon, True); Sparklalr_Input.Set_Symbol_Set (Symbolset_Skipto, Sparklalr_Symbol.Uparrow, True); Sparklalr_Input.Skipto (F, Symbolset_Skipto, Col, Symb); end if; end Rhs_Process; procedure Lhs_Process (F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : in Boolean; Token : in Sparklalr_Common.Id_Name; Col : in Sparklalr_Error.Err_Col_T) --# global in Command_Line_Options.State; --# in out Mem; --# in out Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out Symbols_Dump.State; --# derives F, --# Symbols_Dump.State from *, --# Symbols_Dump.State, --# Token & --# Mem from *, --# Memory_Array & --# Memory_Array from *, --# Mem, --# Symbols_Dump.State, --# Token & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Mem, --# Memory_Array, --# Symbols_Dump.State, --# Token & --# Sparklalr_Error.State from *, --# Col, --# Mem, --# Memory_Array, --# Symbols_Dump.State, --# Token & --# Sparklalr_Level.State from *, --# Gram_Rules, --# Symbols_Dump.State, --# Token; is Result_Find : Integer; begin Symbols_Dump.Find (True, F, Gram_Rules, Token, Col, Result_Find); Stack (Result_Find); if Memory_Array.The_Array (Mem).Contents < Sparklalr_Common.Nt_Base then Sparklalr_Error.Syn_Error (14, Col); end if; end Lhs_Process; procedure State_Generation (F : in out SPARK.Ada.Text_IO.File_Type) -- GENERATES COLLECTIONS OF ACCESSIBLE SETS OF ITEMS --# global in Command_Line_Options.State; --# in Memo; --# in Ntrdn; --# in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in Symbols_Dump.State; --# in out Empty; --# in out First_Var; --# in out Free_List; --# in out Item_Array; --# in out Look_Array; --# in out Look_Tree; --# in out Mem; --# in out Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Goto.State; --# in out Sparklalr_Parser.State; --# out Release_Point; --# out Sparklalr_Memory.Stat_No; --# out State_Var; --# derives Empty from *, --# Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Empty, --# First_Var, --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memo, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# First_Var from *, --# Empty, --# Look_Array, --# Look_Tree, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State & --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memory_Array, --# Release_Point, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State_Var from Empty, --# First_Var, --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memo, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# Sparklalr_Error.State from *, --# Empty, --# First_Var, --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memo, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State; is type State_Changed_T is array (Sparklalr_Common.State_Range) of Boolean; type State_Hash_T is array (Sparklalr_Common.State_Range) of Sparklalr_Common.State_Range; subtype Set_Size_Elem is Natural range 0 .. 4095; type Set_Size_T is array (Sparklalr_Common.State_Range) of Set_Size_Elem; S, A : Pt_Memory; Max_State : Sparklalr_Common.State_Range; New_State_Var : Boolean; B : Integer; Empty_Set : Boolean; Dum : Pt_Memory; Previous : Sparklalr_Common.State_Range; Changes : Boolean; Result_State_Exists : Boolean; Look_Set_Added : Boolean; P : Look_Set; Set_Size : Set_Size_T; State_Hash : State_Hash_T; Prev_Stat : Sparklalr_Common.State_Range; State_Overflow : Boolean; State_Changed : State_Changed_T; Result_Pa_Search : Integer; Call_Pa_Insert : Boolean; Core : Pt_Memory; Look_Ahead : Look_Set; Pl : Sparklalr_Parser.Pt_Pa_Rec; I : Integer; function Set_Of_Term_Inequals (A, B : in Set_Of_Term) return Boolean -- A <= B is Result : Boolean; I : Integer; begin Result := True; I := Sparklalr_Common.Term_Range'First; while Result and then (I in Sparklalr_Common.Term_Range) loop if A (I) and then not B (I) then Result := False; end if; I := I + 1; end loop; return Result; end Set_Of_Term_Inequals; procedure Look_Table (S : in Set_Of_Term; P : out Look_Set; Look_Set_Added : in out Boolean) --# global in out Look_Array; --# in out Look_Tree; --# derives Look_Array, --# Look_Set_Added from *, --# Look_Array, --# Look_Tree, --# S & --# Look_Tree from *, --# Look_Array & --# P from Look_Array, --# Look_Tree, --# S; is T : Look_Set; procedure New_Look (S : in Set_Of_Term; T : out Look_Set) --# global in out Look_Array; --# derives Look_Array from *, --# S & --# T from Look_Array; is begin Look_Array.Top := Look_Array.Top + 1; T := Look_Array.Top; Look_Array.The_Array (T) := Look_Item'(Lset => S, Litem => 0, Ritem => 0); end New_Look; begin P := Look_Tree; if P /= 0 then while Look_Array.The_Array (P).Lset /= S loop if Set_Of_Term_Inequals (Look_Array.The_Array (P).Lset, S) then if Look_Array.The_Array (P).Ritem = 0 then New_Look (S, T); Look_Set_Added := True; Look_Array.The_Array (P).Ritem := T; end if; P := Look_Array.The_Array (P).Ritem; else if Look_Array.The_Array (P).Litem = 0 then New_Look (S, T); Look_Set_Added := True; Look_Array.The_Array (P).Litem := T; end if; P := Look_Array.The_Array (P).Litem; end if; end loop; else New_Look (S, T); Look_Set_Added := True; Look_Tree := T; P := Look_Tree; end if; end Look_Table; procedure Eff (F : in out SPARK.Ada.Text_IO.File_Type) -- DETERMINES WHETHER A NONTERMINAL CAN PRODUCE THE EMPTY SYMBOL --# global in Command_Line_Options.State; --# in Memory_Array; --# in Ntrdn; --# in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in Symbols_Dump.State; --# in out Empty; --# in out First_Var; --# in out Look_Array; --# in out Look_Tree; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Empty from *, --# Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Empty, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State & --# First_Var, --# Look_Array, --# Look_Tree from *, --# Empty, --# Look_Array, --# Look_Tree, --# Memory_Array, --# Ntrdn, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State; is type Temp_First_T is array (Sparklalr_Common.Non_Term_Range) of Set_Of_Term; C : Integer; Changes, Continue : Boolean; P, S, T : Pt_Memory; Temp_First : Temp_First_T; Posn : Integer; Look_Set_Added : Boolean; procedure Findemptyredns -- FINDS THOSE PRODUCTIONS WHICH CAN PRODUCE THE EMPTY SYMBOL --# global in Memory_Array; --# in Prod_Ptr; --# in Sparklalr_Memory.Prod_No; --# in out Empty; --# derives Empty from *, --# Memory_Array, --# Prod_Ptr, --# Sparklalr_Memory.Prod_No; is I : Integer; P : Pt_Memory; Finished, Closed, Keep_On : Boolean; begin for J in Integer range 2 .. Sparklalr_Memory.Prod_No - 1 loop if Memory_Array.The_Array (Memory_Array.The_Array (Memory_Array.The_Array (Prod_Ptr (J)).Ptr).Ptr).Contents < 0 then Empty (Memory_Array.The_Array (Memory_Array.The_Array (Prod_Ptr (J)).Ptr).Contents - Sparklalr_Common.Nt_Base) := True; end if; end loop; -- NOW DETERMINE CLOSURE OF ALL EMPTY REDUCTIONS Closed := False; while not Closed loop Keep_On := True; I := 2; while Keep_On and then (I <= Sparklalr_Memory.Prod_No - 1) loop if not Empty (Memory_Array.The_Array (Memory_Array.The_Array (Prod_Ptr (I)).Ptr).Contents - Sparklalr_Common.Nt_Base) then P := Memory_Array.The_Array (Memory_Array.The_Array (Prod_Ptr (I)).Ptr).Ptr; Finished := False; while (Memory_Array.The_Array (P).Contents > Sparklalr_Common.Nt_Base) and then not Finished loop if Empty (Memory_Array.The_Array (P).Contents - Sparklalr_Common.Nt_Base) then P := Memory_Array.The_Array (P).Ptr; else Finished := True; end if; end loop; if Memory_Array.The_Array (P).Contents < 0 then Empty (Memory_Array.The_Array (Memory_Array.The_Array (Prod_Ptr (I)).Ptr).Contents - Sparklalr_Common.Nt_Base) := True; Keep_On := False; end if; end if; I := I + 1; end loop; if I >= Sparklalr_Memory.Prod_No - 1 then Closed := True; end if; end loop; end Findemptyredns; begin -- Eff Temp_First := Temp_First_T'(others => Set_Of_Term_False_Const); Findemptyredns; for I in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop T := Ntrdn (I + 1); S := Ntrdn (I); while S /= T loop P := Memory_Array.The_Array (Memory_Array.The_Array (S).Mem_Pt).Ptr; Continue := True; while (Memory_Array.The_Array (P).Contents > 0) and then Continue loop C := Memory_Array.The_Array (P).Contents; if C < Sparklalr_Common.Nt_Base then Temp_First (I) (C) := True; Continue := False; else if not Empty (C - Sparklalr_Common.Nt_Base) then Continue := False; else P := Memory_Array.The_Array (P).Ptr; end if; end if; end loop; S := Memory_Array.The_Array (S).Ptr; end loop; end loop; -- NOW REFLECT TRANSITIVITY Changes := True; while Changes loop Changes := False; for I in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop T := Ntrdn (I + 1); S := Ntrdn (I); while S /= T loop Continue := True; P := Memory_Array.The_Array (Memory_Array.The_Array (S).Mem_Pt).Ptr; C := Memory_Array.The_Array (P).Contents; while (C > Sparklalr_Common.Nt_Base) and then Continue loop Changes := Changes or else not (Set_Of_Term_Inequals (Temp_First (C - Sparklalr_Common.Nt_Base), Temp_First (I))); if Changes then Temp_First (I) := Temp_First (I) or Temp_First (C - Sparklalr_Common.Nt_Base); end if; if not Empty (C - Sparklalr_Common.Nt_Base) then Continue := False; else P := Memory_Array.The_Array (P).Ptr; C := Memory_Array.The_Array (P).Contents; end if; end loop; S := Memory_Array.The_Array (S).Ptr; end loop; end loop; end loop; if Command_Line_Options.Get_Debug_Level (8) then Posn := 1; for I in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop SPARK.Ada.Text_IO.Put_Output (Item => " EFF ("); SPARK.Ada.Text_IO.Put_Output (Item => Symbols_Dump.Get_Nterm_Set (I)); SPARK.Ada.Text_IO.Put_Output (Item => ") = ["); Posn := Posn + 16; for J in Integer range 0 .. Symbols_Dump.Get_Nterms loop if Temp_First (I) (J) then Sparklalr_Common.Print (Std_Out => True, F => F, String1 => Symbols_Dump.Get_Term_Set (J), Posn => Posn, Tab => 17, Comm => False); end if; end loop; SPARK.Ada.Text_IO.Put_Line_Output (Item => " ]"); Posn := 1; end loop; end if; if Command_Line_Options.Get_Debug_Level (9) then for I in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop SPARK.Ada.Text_IO.Put_Output (Item => " EMPTY : "); SPARK.Ada.Text_IO.Put_Output (Item => Symbols_Dump.Get_Nterm_Set (I)); SPARK.Ada.Text_IO.Put_Character_Output (Item => ' '); if Empty (I) then SPARK.Ada.Text_IO.Put_Line_Output (Item => " EMPTY"); else SPARK.Ada.Text_IO.Put_Line_Output (Item => " NONEMPTY"); end if; end loop; end if; for I in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop --# accept F, 10, Look_Set_Added, "Ineffective assignment here expected and OK" & --# F, 10, "Unused variable Look_Set_Added"; Look_Set_Added := False; Look_Table (Temp_First (I), First_Var (I), Look_Set_Added); --# end accept; end loop; end Eff; procedure Mark --# global in Mem; --# out Release_Point; --# derives Release_Point from Mem; is begin Release_Point := Mem; end Mark; procedure Stack_Item (F : in out SPARK.Ada.Text_IO.File_Type; Core : in Pt_Memory; Look_Ahead : in Look_Set) -- ADDS AN ITEM TO THE TOP OF MEMORY --# global in Command_Line_Options.State; --# in Look_Array; --# in Symbols_Dump.State; --# in out Free_List; --# in out Item_Array; --# in out Mem; --# in out Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Core, --# Free_List, --# Item_Array, --# Look_Ahead, --# Look_Array, --# Mem, --# Memory_Array, --# Symbols_Dump.State & --# Free_List, --# Mem from Free_List, --# Memory_Array & --# Item_Array from *, --# Core, --# Free_List, --# Look_Ahead, --# Memory_Array & --# Memory_Array from *, --# Free_List, --# Item_Array, --# Mem; is M : Pt_Memory; L : Look_Set; Posn : Integer; begin if Free_List /= 0 then M := Free_List; Free_List := Memory_Array.The_Array (Free_List).Ptr; else Memory_Array.Top := Memory_Array.Top + 1; M := Memory_Array.Top; Memory_Array.The_Array (M).Tag := 2; Item_Array.Top := Item_Array.Top + 1; Memory_Array.The_Array (M).Itm := Item_Array.Top; end if; Item_Array.The_Array (Memory_Array.The_Array (M).Itm) := Item'(Dot => Core, Lh => Look_Ahead); Memory_Array.The_Array (Mem).Ptr := M; Memory_Array.The_Array (M).Ptr := 0; Mem := M; --# assert True; if Command_Line_Options.Get_Debug_Level (3) then Posn := 1; if Memory_Array.The_Array (M).Itm /= 0 then SPARK.Ada.Text_IO.Put_Output (Item => " STACKITEM : , MEM = "); Posn := Posn + 27; end if; if Mem /= 0 then SPARK.Ada.Text_IO.Put_Character_Output (Item => ' '); Posn := Posn + 6; end if; if (Memory_Array.The_Array (M).Itm /= 0) then if Item_Array.The_Array (Memory_Array.The_Array (M).Itm).Dot /= 0 then SPARK.Ada.Text_IO.Put_Output (Item => " ,DOT = "); SPARK.Ada.Text_IO.Put_Output (Item => " , LOOKAHEAD = ["); Posn := Posn + 30; L := Item_Array.The_Array (Memory_Array.The_Array (M).Itm).Lh; for I in Integer range 0 .. Symbols_Dump.Get_Nterms loop if Look_Array.The_Array (L).Lset (I) then Sparklalr_Common.Print (Std_Out => True, F => F, String1 => Symbols_Dump.Get_Term_Set (I), Posn => Posn, Tab => 3, Comm => False); end if; end loop; SPARK.Ada.Text_IO.Put_Line_Output (Item => "]"); end if; end if; end if; end Stack_Item; procedure Close (F : in out SPARK.Ada.Text_IO.File_Type; Nstate : in Integer) -- CLOSES A SET OF ITEMS --# global in Command_Line_Options.State; --# in Empty; --# in First_Var; --# in Ntrdn; --# in State_Var; --# in Symbols_Dump.State; --# in out Free_List; --# in out Item_Array; --# in out Look_Array; --# in out Look_Tree; --# in out Mem; --# in out Memory_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Empty, --# First_Var, --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memory_Array, --# Nstate, --# Ntrdn, --# State_Var, --# Symbols_Dump.State & --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memory_Array from Empty, --# First_Var, --# Free_List, --# Item_Array, --# Look_Array, --# Look_Tree, --# Mem, --# Memory_Array, --# Nstate, --# Ntrdn, --# State_Var; is P, Q, S, A, C : Pt_Memory; Lasts : Pt_Memory; Temp_Item : Item; Temp_Set : Set_Of_Term; B : Integer; Dummy : Pt_Memory; End_Search : Boolean; Not_Found : Boolean; Look_Set_Added : Boolean; procedure New_Item (Nstate : in Integer; Temp_Item : in Item; Not_Found : out Boolean; Look_Set_Added : in out Boolean) -- DETERMINES WHETHER AN ITEM ALREADY EXISTS IN THE STATE "NSTATE" -- IF IT EXISTS THE LOOKAHEAD SETS ARE UNITED --# global in Memory_Array; --# in State_Var; --# in out Item_Array; --# in out Look_Array; --# in out Look_Tree; --# derives Item_Array, --# Look_Array, --# Look_Set_Added, --# Look_Tree from *, --# Item_Array, --# Look_Array, --# Look_Tree, --# Memory_Array, --# Nstate, --# State_Var, --# Temp_Item & --# Not_Found from Item_Array, --# Look_Array, --# Look_Tree, --# Memory_Array, --# Nstate, --# State_Var, --# Temp_Item; is S, Lasts : Pt_Memory; Found : Boolean; begin S := State_Var (Nstate); Lasts := State_Var (Nstate + 1); Found := False; loop if Temp_Item.Dot = Item_Array.The_Array (Memory_Array.The_Array (S).Itm).Dot then Found := True; Look_Table (Look_Array.The_Array (Item_Array.The_Array (Memory_Array.The_Array (S).Itm).Lh).Lset or Look_Array.The_Array (Temp_Item.Lh).Lset, Item_Array.The_Array (Memory_Array.The_Array (S).Itm).Lh, Look_Set_Added); else S := Memory_Array.The_Array (S).Ptr; end if; exit when Found or else (S = Lasts); end loop; Not_Found := not Found; end New_Item; begin loop Look_Set_Added := False; S := State_Var (Nstate); Lasts := State_Var (Nstate + 1); while S /= Lasts loop A := Item_Array.The_Array (Memory_Array.The_Array (S).Itm).Dot; B := Memory_Array.The_Array (A).Contents; if B > Sparklalr_Common.Nt_Base then P := Ntrdn (B - Sparklalr_Common.Nt_Base); Q := Ntrdn ((B - Sparklalr_Common.Nt_Base) + 1); while P /= Q loop C := Memory_Array.The_Array (Memory_Array.The_Array (P).Mem_Pt).Ptr; Temp_Item.Dot := C; Temp_Set := Set_Of_Term'(others => False); Dummy := A; loop Dummy := Memory_Array.The_Array (Dummy).Ptr; if Memory_Array.The_Array (Dummy).Contents > Sparklalr_Common.Nt_Base then Temp_Set := Temp_Set or Look_Array.The_Array (First_Var (Memory_Array.The_Array (Dummy).Contents - Sparklalr_Common.Nt_Base)).Lset; End_Search := not Empty (Memory_Array.The_Array (Dummy).Contents - Sparklalr_Common.Nt_Base); else if Memory_Array.The_Array (Dummy).Contents > 0 then Temp_Set (Memory_Array.The_Array (Dummy).Contents) := True; End_Search := True; else Temp_Set := Temp_Set or Look_Array.The_Array (Item_Array.The_Array (Memory_Array.The_Array (S).Itm).Lh).Lset; End_Search := True; end if; end if; exit when End_Search; end loop; Look_Table (Temp_Set, Temp_Item.Lh, Look_Set_Added); New_Item (Nstate, Temp_Item, Not_Found, Look_Set_Added); if Not_Found then Stack_Item (F => F, Core => Temp_Item.Dot, Look_Ahead => Temp_Item.Lh); end if; P := Memory_Array.The_Array (P).Ptr; end loop; end if; S := Memory_Array.The_Array (S).Ptr; end loop; exit when not Look_Set_Added; end loop; end Close; procedure Absorb_State (State2 : in Integer; Changes : out Boolean) -- ABSORBS STATNO INTO STATE2 --# global in Release_Point; --# in Sparklalr_Memory.Stat_No; --# in State_Var; --# in out Free_List; --# in out Item_Array; --# in out Look_Array; --# in out Look_Tree; --# in out Mem; --# in out Memory_Array; --# derives Changes, --# Item_Array, --# Look_Array, --# Look_Tree from Item_Array, --# Look_Array, --# Look_Tree, --# Memory_Array, --# Sparklalr_Memory.Stat_No, --# State2, --# State_Var & --# Free_List, --# Memory_Array from Free_List, --# Mem, --# Memory_Array, --# Release_Point & --# Mem from Release_Point; is Dum1, Dum2 : Pt_Memory; Look_Set_Added : Boolean; function Diff (Left, Right : in Set_Of_Term) return Set_Of_Term is Result : Set_Of_Term; begin Result := Set_Of_Term_False_Const; for I in Sparklalr_Common.Term_Range loop if Left (I) then Result (I) := not Right (I); else Result (I) := False; end if; end loop; return Result; end Diff; procedure Release --# global in Release_Point; --# in out Free_List; --# in out Mem; --# in out Memory_Array; --# derives Free_List, --# Memory_Array from Free_List, --# Mem, --# Memory_Array, --# Release_Point & --# Mem from Release_Point; is begin Memory_Array.The_Array (Mem).Ptr := Free_List; Free_List := Memory_Array.The_Array (Release_Point).Ptr; Mem := Release_Point; Memory_Array.The_Array (Mem).Ptr := 0; end Release; begin Dum1 := State_Var (Sparklalr_Memory.Stat_No); Changes := False; loop Dum2 := State_Var (State2); while Item_Array.The_Array (Memory_Array.The_Array (Dum1).Itm).Dot /= Item_Array.The_Array (Memory_Array.The_Array (Dum2).Itm).Dot loop Dum2 := Memory_Array.The_Array (Dum2).Ptr; end loop; if Diff (Look_Array.The_Array (Item_Array.The_Array (Memory_Array.The_Array (Dum1).Itm).Lh).Lset, Look_Array.The_Array (Item_Array.The_Array (Memory_Array.The_Array (Dum2).Itm).Lh).Lset) /= Set_Of_Term_False_Const then Changes := True; --# accept F, 10, Look_Set_Added, "Ineffective assignment here expected and OK" & --# F, 10, "Unused variable Look_Set_Added"; Look_Set_Added := False; Look_Table (Look_Array.The_Array (Item_Array.The_Array (Memory_Array.The_Array (Dum2).Itm).Lh).Lset or Look_Array.The_Array (Item_Array.The_Array (Memory_Array.The_Array (Dum1).Itm).Lh).Lset, Item_Array.The_Array (Memory_Array.The_Array (Dum2).Itm).Lh, Look_Set_Added); --# end accept; end if; Dum1 := Memory_Array.The_Array (Dum1).Ptr; exit when Dum1 = 0; end loop; Release; end Absorb_State; procedure State_Exists (Sno : in Integer; Super_State : out Sparklalr_Common.State_Range; Result : out Boolean; Set_Size : in Set_Size_T; State_Hash : in out State_Hash_T; State_Overflow : in out Boolean) -- DETERMINES WHETHER THE CORES OF THE CURRENT SET OF ITEMS -- IS EQUIVALENT TO THE CORES OF AN EXISTING SET --# global in Item_Array; --# in Memory_Array; --# in Sparklalr_Memory.Stat_No; --# in State_Var; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Result, --# Super_State from Item_Array, --# Memory_Array, --# Set_Size, --# Sno, --# State_Hash, --# State_Var & --# SPARK.Ada.Text_IO.The_Standard_Output, --# State_Overflow from *, --# Item_Array, --# Memory_Array, --# Set_Size, --# Sno, --# State_Hash, --# State_Var & --# State_Hash from *, --# Item_Array, --# Memory_Array, --# Set_Size, --# Sno, --# Sparklalr_Memory.Stat_No, --# State_Overflow, --# State_Var; is Test_State : Integer; State_Found, Item_Found : Boolean; Dum1, Dum2 : Pt_Memory; I, N : Integer; H, D : Integer; function Hash_Itm (Itm : in Pt_Item) return Integer --# global in Item_Array; is Two_Power18 : constant := 262244; Hashcon1 : constant := 8192; -- = HASHCON2 DIV (STATEMAX+1) Hashcon2 : constant := 16777216; -- = 100000000B Hashcon3 : constant := 2925; -- = 5555B begin return (((Item_Array.The_Array (Itm).Dot mod Two_Power18) * Hashcon3) mod Hashcon2) / Hashcon1; end Hash_Itm; begin H := Hash_Itm (Memory_Array.The_Array (State_Var (Sno)).Itm); D := 1; Test_State := State_Hash (H); State_Found := False; N := Set_Size (Sno); while (Test_State /= 0) and then not State_Found loop Item_Found := False; if Set_Size (Test_State) = N then Dum1 := State_Var (Sno); loop Dum2 := State_Var (Test_State); I := N; Item_Found := False; loop if Item_Array.The_Array (Memory_Array.The_Array (Dum1).Itm).Dot = Item_Array.The_Array (Memory_Array.The_Array (Dum2).Itm).Dot then Item_Found := True; else Dum2 := Memory_Array.The_Array (Dum2).Ptr; I := I - 1; end if; exit when Item_Found or else (I = 0); end loop; if Item_Found then Dum1 := Memory_Array.The_Array (Dum1).Ptr; end if; exit when (Dum1 = 0) or else not Item_Found; end loop; end if; if Item_Found then State_Found := True; else if D /= Sparklalr_Common.State_Max then H := H + D; D := D + 2; if H > Sparklalr_Common.State_Max then H := (H - Sparklalr_Common.State_Max) - 1; end if; Test_State := State_Hash (H); else Test_State := 0; SPARK.Ada.Text_IO.Put_Line_Output (Item => " HASH TABLE OVERFLOW"); State_Overflow := True; end if; end if; end loop; if not (State_Found or else State_Overflow) then State_Hash (H) := Sparklalr_Memory.Stat_No; end if; Result := State_Found; Super_State := Test_State; end State_Exists; procedure Next_State (Prev_Stat : in out Sparklalr_Common.State_Range; State_Changed : in out State_Changed_T) --# global in Sparklalr_Memory.Stat_No; --# derives Prev_Stat, --# State_Changed from Prev_Stat, --# Sparklalr_Memory.Stat_No, --# State_Changed; is I : Integer; begin if Prev_Stat /= Sparklalr_Memory.Stat_No then I := Prev_Stat + 1; else I := 1; end if; while (I /= Prev_Stat) and then not State_Changed (I) loop if I /= Sparklalr_Memory.Stat_No then I := I + 1; else I := 1; end if; end loop; if State_Changed (I) then Prev_Stat := I; else Prev_Stat := Sparklalr_Memory.Stat_No + 1; end if; State_Changed (I) := False; end Next_State; begin -- State_Generation Eff (F => F); Sparklalr_Parser.Init_Pa_List; State_Var := State_Var_T'(others => 0); State_Hash := State_Hash_T'(others => 0); for J in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop Sparklalr_Goto.Init_Goto_List (J); end loop; State_Overflow := False; -- GENERATE THE FIRST SET --# accept F, 10, Look_Set_Added, "Ineffective assignment here expected and OK" & --# F, 10, "Unused variable Look_Set_Added"; Look_Set_Added := False; Look_Table (Set_Of_Term'(0 => True, others => False), P, Look_Set_Added); --# end accept; Core := Memory_Array.The_Array (Memory_Array.The_Array (Memo).Ptr).Ptr; Stack_Item (F => F, Core => Core, Look_Ahead => P); Set_Size := Set_Size_T'(others => 0); Set_Size (1) := 1; State_Var (1) := Mem; Close (F => F, Nstate => 1); -- GENERATE THE REMAINING SETS State_Changed := State_Changed_T'(others => False); State_Changed (1) := False; Prev_Stat := 1; Max_State := 0; Sparklalr_Memory.Set_Stat_No (1); loop --# assert True; New_State_Var := Prev_Stat > Max_State; if New_State_Var then Max_State := Prev_Stat; end if; S := State_Var (Prev_Stat); I := 1; loop Mark; if I > Symbols_Dump.Get_Nterms then B := (Sparklalr_Common.Nt_Base + I) - Symbols_Dump.Get_Nterms; else B := I; end if; Dum := S; Empty_Set := True; Sparklalr_Memory.Set_Stat_No (Sparklalr_Memory.Stat_No + 1); Set_Size (Sparklalr_Memory.Stat_No) := 0; loop --# assert True; A := Item_Array.The_Array (Memory_Array.The_Array (Dum).Itm).Dot; if Memory_Array.The_Array (A).Contents = B then Core := Memory_Array.The_Array (A).Ptr; Look_Ahead := Item_Array.The_Array (Memory_Array.The_Array (Dum).Itm).Lh; Stack_Item (F => F, Core => Core, Look_Ahead => Look_Ahead); Set_Size (Sparklalr_Memory.Stat_No) := Set_Size (Sparklalr_Memory.Stat_No) + 1; if Empty_Set then if Command_Line_Options.Get_Debug_Level (7) and then (Mem /= 0) then SPARK.Ada.Text_IO.Put_Output (Item => " STATE: "); SPARK_Ada_Integer_Text_IO.Put_Output (Item => Sparklalr_Memory.Stat_No + 1, Width => 3, Base => 10); SPARK.Ada.Text_IO.Put_Line_Output (Item => " AT "); end if; if Sparklalr_Memory.Stat_No < Sparklalr_Common.State_Max then State_Var (Sparklalr_Memory.Stat_No) := Mem; Empty_Set := False; else SPARK.Ada.Text_IO.New_Line_Output (Spacing => 1); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_Output (Item => " STATE OVERFLOW"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " STATE OVERFLOW"); State_Overflow := True; Sparklalr_Memory.Set_Stat_No (Sparklalr_Memory.Stat_No - 1); end if; end if; end if; Dum := Memory_Array.The_Array (Dum).Ptr; exit when (Dum = State_Var (Prev_Stat + 1)) or else State_Overflow; end loop; --# assert True; if Empty_Set or else State_Overflow then Sparklalr_Memory.Set_Stat_No (Sparklalr_Memory.Stat_No - 1); else --# accept F, 41, "Stable expression here expected and OK"; if New_State_Var then State_Exists (Sparklalr_Memory.Stat_No, Previous, Result_State_Exists, Set_Size, State_Hash, State_Overflow); if Result_State_Exists then Absorb_State (Previous, Changes); State_Var (Sparklalr_Memory.Stat_No) := 0; Sparklalr_Memory.Set_Stat_No (Sparklalr_Memory.Stat_No - 1); if Changes then Close (F => F, Nstate => Previous); State_Changed (Previous) := True; end if; Sparklalr_Goto.Goto_Gen (F, Previous, Prev_Stat, B, Call_Pa_Insert); if Call_Pa_Insert then Sparklalr_Parser.Pa_Insert (F => F, State_Index => Prev_Stat, Term_Index => B, Insertion => Sparklalr_Common.Code (Sparklalr_Common.Shift, Previous)); end if; else Close (F => F, Nstate => Sparklalr_Memory.Stat_No); State_Changed (Sparklalr_Memory.Stat_No) := True; Sparklalr_Goto.Goto_Gen (F, Sparklalr_Memory.Stat_No, Prev_Stat, B, Call_Pa_Insert); if Call_Pa_Insert then Sparklalr_Parser.Pa_Insert (F => F, State_Index => Prev_Stat, Term_Index => B, Insertion => Sparklalr_Common.Code (Sparklalr_Common.Shift, Sparklalr_Memory.Stat_No)); end if; end if; else if B > Symbols_Dump.Get_Nterms then Previous := Sparklalr_Goto.Goto_Search (Prev_Stat, B); else --# accept F, 10, Pl, "Ineffective assignment here expected and OK"; Sparklalr_Parser.Pa_Search (Prev_Stat, B, Result_Pa_Search, Pl); --# end accept; Previous := Sparklalr_Common.Decode (Result_Pa_Search); end if; Absorb_State (Previous, Changes); State_Var (Sparklalr_Memory.Stat_No) := 0; Sparklalr_Memory.Set_Stat_No (Sparklalr_Memory.Stat_No - 1); if Changes then Close (F => F, Nstate => Previous); State_Changed (Previous) := True; end if; end if; --# end accept; end if; I := I + 1; exit when (I > Symbols_Dump.Get_Nterms + Symbols_Dump.Get_Nnon_Terms) or else State_Overflow; end loop; Next_State (Prev_Stat, State_Changed); exit when (Prev_Stat > Sparklalr_Memory.Stat_No) or else State_Overflow; end loop; --# accept F, 10, Look_Set_Added, "Ineffective assignment here expected and OK"; Look_Table (Set_Of_Term'(others => False), P, Look_Set_Added); --# end accept; Stack_Item (F => F, Core => 0, Look_Ahead => P); State_Var (Sparklalr_Memory.Stat_No + 1) := Mem; --# accept F, 33, Pl, "Pl is unused OK"; end State_Generation; function Get_Next (Ptr : in Pt_Memory) return Pt_Memory --# global in Memory_Array; is begin return Memory_Array.The_Array (Ptr).Ptr; end Get_Next; function Get_Contents (Ptr : in Pt_Memory) return Contents_T --# global in Memory_Array; is begin return Memory_Array.The_Array (Ptr).Contents; end Get_Contents; function Get_Item (Ptr : in Pt_Memory) return Pt_Item --# global in Memory_Array; is begin return Memory_Array.The_Array (Ptr).Itm; end Get_Item; function Get_Mem_Pt (Ptr : in Pt_Memory) return Pt_Memory --# global in Memory_Array; is begin return Memory_Array.The_Array (Ptr).Mem_Pt; end Get_Mem_Pt; function Get_Dot (Ptr : in Pt_Item) return Pt_Memory --# global in Item_Array; is begin return Item_Array.The_Array (Ptr).Dot; end Get_Dot; function Get_Lh_Lset (Ptr : in Pt_Item; I : in Sparklalr_Common.Term_Range) return Boolean --# global in Item_Array; --# in Look_Array; is begin return Look_Array.The_Array (Item_Array.The_Array (Ptr).Lh).Lset (I); end Get_Lh_Lset; function Get_Terminal_Like (S : in Sparklalr_Common.Sym_Range) return Boolean --# global in Terminal_Like; is begin return Terminal_Like (S); end Get_Terminal_Like; function Get_State (S : in Sparklalr_Common.State_Range) return Pt_Memory --# global in State_Var; is begin return State_Var (S); end Get_State; function Get_Ntrdn (I : in Sparklalr_Common.Non_Term_Range) return Pt_Memory --# global in Ntrdn; is begin return Ntrdn (I); end Get_Ntrdn; end Sparklalr_Memory.Dump; spark-2012.0.deb/sparklalr/all.wrn0000644000175000017500000000022411753202335015731 0ustar eugeneugen-- Warning control file for SPARKLALR declare_annotations default_loop_assertions handler_parts hidden_parts notes pragma Unreferenced with_clauses spark-2012.0.deb/sparklalr/sparklalr.adb0000644000175000017500000017330511753202335017107 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --------------------------------------------------------------------------- -- -- - CFR 825 - PNA - Modify to produce GNAT style-compliant source code -- - CFR 1023 - RCC - Modify to produce Unreferenced pragmas where needed -- - CFR 1350 - RCC - Hide SPRelations body for SPARKMake Tool -- - CFR 1480 - JMA - Modifications to compile under GNU Pascal 3.2.3 -- - ("COMP" now a reserved word; no $ allowed in identifiers; -- - functions implemented in C declared as "external", not "C") -- - CFR 1613 - RCC - Modifications for GPC 3.4.5 and first port to Mac OS X -- - CFR 1929 - GA - Creating of a SPARK implementation of SPARKLALR, based on -- existing Pascal implementation. --------------------------------------------------------------------------- with Command_Line_Options; with Ees_Sym; with Fatal; with SPARK_Ada_Integer_Text_IO; with SPARK.Ada.Command_Line; with SPARK.Ada.Strings.Unbounded; with SPARK.Ada.Text_IO; with SPARK.Ada.Text_IO.Unbounded_String; with Sparklalr_Common; with Sparklalr_Conflict; with Sparklalr_Error; with Sparklalr_Goto; with Sparklalr_Input; with Sparklalr_Memory; with Sparklalr_Memory.Dump; with Sparklalr_Memory.Left_Corner; with Sparklalr_Parser; with Sparklalr_Patab; with Sparklalr_Symbol; with Symbols_Dump; use type SPARK.Ada.Text_IO.Exception_T; use type Sparklalr_Symbol.Symbol; --# inherit Command_Line_Options, --# Ees_Sym, --# Fatal, --# SPARK.Ada.Command_Line, --# SPARK.Ada.Strings.Unbounded, --# SPARK.Ada.Text_IO, --# SPARK.Ada.Text_IO.Unbounded_String, --# Sparklalr_Char_Class, --# Sparklalr_Common, --# Sparklalr_Conflict, --# Sparklalr_Error, --# Sparklalr_Goto, --# Sparklalr_Input, --# Sparklalr_Level, --# Sparklalr_Memory, --# Sparklalr_Memory.Dump, --# Sparklalr_Memory.Left_Corner, --# Sparklalr_Parser, --# Sparklalr_Patab, --# Sparklalr_Symbol, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; --# main_program; procedure Sparklalr --# global in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# out Command_Line_Options.State; --# out Ees_Sym.State; --# out Sparklalr_Char_Class.Charmap; --# out Sparklalr_Conflict.State; --# out Sparklalr_Error.State; --# out Sparklalr_Goto.State; --# out Sparklalr_Input.State; --# out Sparklalr_Level.State; --# out Sparklalr_Memory.Dump.State; --# out Sparklalr_Memory.Left_Corner.State; --# out Sparklalr_Memory.Max_Right; --# out Sparklalr_Memory.Prod_No; --# out Sparklalr_Memory.Prod_Sum; --# out Sparklalr_Memory.Stat_No; --# out Sparklalr_Parser.State; --# out Sparklalr_Patab.State; --# out Symbols_Dump.State; --# derives Command_Line_Options.State, --# Ees_Sym.State, --# SPARK.Ada.Command_Line.State, --# Sparklalr_Conflict.State, --# Sparklalr_Error.State, --# Sparklalr_Goto.State, --# Sparklalr_Input.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Left_Corner.State, --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Symbols_Dump.State from SPARK.Ada.Command_Line.State & --# SPARK.Ada.Text_IO.The_Standard_Error, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# SPARK.Ada.Command_Line.State & --# Sparklalr_Char_Class.Charmap from ; is subtype Ampterm_Ampgram_Range is Sparklalr_Symbol.Symbol range Sparklalr_Symbol.Ampterm .. Sparklalr_Symbol.Ampgram; subtype Ampprog_Ampmain is Sparklalr_Symbol.Symbol range Sparklalr_Symbol.Ampprog .. Sparklalr_Symbol.Ampmain; Echo : SPARK.Ada.Text_IO.File_Type; Spark_Output : SPARK.Ada.Text_IO.File_Type; Col : Sparklalr_Error.Err_Col_T; Symb : Sparklalr_Symbol.Symbol; Gram_Rules : Boolean; Signpost : Sparklalr_Input.Symbol_Set_Type; User_Code : Boolean; Symb_Set : Sparklalr_Input.Symbol_Set_Type; I : Sparklalr_Symbol.Symbol; procedure Initialise --# global in Command_Line_Options.State; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# out Col; --# out Echo; --# out Gram_Rules; --# out Signpost; --# out Sparklalr_Char_Class.Charmap; --# out Sparklalr_Conflict.State; --# out Sparklalr_Error.State; --# out Sparklalr_Goto.State; --# out Sparklalr_Input.State; --# out Sparklalr_Level.State; --# out Sparklalr_Memory.Dump.State; --# out Sparklalr_Memory.Max_Right; --# out Sparklalr_Memory.Prod_No; --# out Sparklalr_Memory.Prod_Sum; --# out Sparklalr_Memory.Stat_No; --# out Sparklalr_Parser.State; --# out Sparklalr_Patab.State; --# out Spark_Output; --# out Symb; --# out Symbols_Dump.State; --# derives Col, --# Gram_Rules, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Conflict.State, --# Sparklalr_Goto.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Symb, --# Symbols_Dump.State from & --# Echo, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Dump.State, --# Spark_Output from Command_Line_Options.State & --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State; is begin Symbols_Dump.Initialise; Sparklalr_Memory.Dump.Initialise; Sparklalr_Goto.Initialise; Col := 0; SPARK.Ada.Text_IO.Unbounded_String.Create (File => Echo, Mode => SPARK.Ada.Text_IO.Out_File, Name => SPARK.Ada.Strings.Unbounded.Concat_Unbounded_String_String (Left => Command_Line_Options.Get_File_Name, Right => ".EKO"), Form => SPARK.Ada.Strings.Unbounded.Null_Unbounded_String); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Echo) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to open output EKO file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; if Command_Line_Options.Get_Parser then SPARK.Ada.Text_IO.Unbounded_String.Create (File => Spark_Output, Mode => SPARK.Ada.Text_IO.Out_File, Name => SPARK.Ada.Strings.Unbounded.Concat_Unbounded_String_String (Left => Command_Line_Options.Get_File_Name, Right => ".PAR"), Form => SPARK.Ada.Strings.Unbounded.Null_Unbounded_String); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Spark_Output) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to open output PAR file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; end if; Sparklalr_Parser.Initialise; Sparklalr_Patab.Initialise; Sparklalr_Conflict.Initialise; Gram_Rules := True; Sparklalr_Input.Initialise (Signpost); Symb := Sparklalr_Symbol.Nullsymb; --# accept F, 602, Spark_Output, Spark_Output, "Always defined before used"; end Initialise; procedure Grammar --# global in Command_Line_Options.State; --# in Gram_Rules; --# in Signpost; --# in Sparklalr_Char_Class.Charmap; --# in out Col; --# in out Echo; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Conflict.State; --# in out Sparklalr_Error.State; --# in out Sparklalr_Goto.State; --# in out Sparklalr_Input.State; --# in out Sparklalr_Level.State; --# in out Sparklalr_Memory.Dump.State; --# in out Sparklalr_Memory.Max_Right; --# in out Sparklalr_Memory.Prod_No; --# in out Sparklalr_Memory.Prod_Sum; --# in out Sparklalr_Memory.Stat_No; --# in out Sparklalr_Parser.State; --# in out Sparklalr_Patab.State; --# in out Symbols_Dump.State; --# out Symb; --# derives Col, --# Sparklalr_Input.State, --# Sparklalr_Memory.Prod_No, --# Symb, --# Symbols_Dump.State from Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State & --# Echo, --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Col, --# Command_Line_Options.State, --# Gram_Rules, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Conflict.State, --# Sparklalr_Error.State, --# Sparklalr_Goto.State, --# Sparklalr_Input.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Symbols_Dump.State & --# Sparklalr_Conflict.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State from *, --# Col, --# Command_Line_Options.State, --# Gram_Rules, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Goto.State, --# Sparklalr_Input.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# Sparklalr_Error.State, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Stat_No from *, --# Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Goto.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# Sparklalr_Level.State from *, --# Col, --# Command_Line_Options.State, --# Gram_Rules, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Symbols_Dump.State & --# Sparklalr_Memory.Max_Right from *, --# Col, --# Command_Line_Options.State, --# Gram_Rules, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Goto.State, --# Sparklalr_Input.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Parser.State, --# Symbols_Dump.State; is Symbolset_Skipto : Sparklalr_Input.Symbol_Set_Type; Grammar_Token : Sparklalr_Common.Id_Name; procedure Out_Pt --# global in Command_Line_Options.State; --# in Sparklalr_Conflict.State; --# in Sparklalr_Goto.State; --# in Sparklalr_Memory.Dump.State; --# in Sparklalr_Memory.Prod_No; --# in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in Sparklalr_Patab.State; --# in Symbols_Dump.State; --# in out Echo; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Memory.Prod_Sum; --# out Sparklalr_Memory.Max_Right; --# derives Echo, --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Sparklalr_Conflict.State, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Symbols_Dump.State & --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_Sum from Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Prod_Sum; is -- - This procedure highlights any redundant productions in an input grammar - procedure Check_Consistency (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Goto.State; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Stat_No, --# Std_Out, --# Symbols_Dump.State; is J : Integer; Nt_Used : Boolean; Posn : Integer; begin for I in Integer range 2 .. Symbols_Dump.Get_Nnon_Terms loop Nt_Used := False; J := 1; while (not Nt_Used) and then (J <= Sparklalr_Memory.Get_Stat_No) loop Nt_Used := Sparklalr_Goto.Goto_Search (J, I + Sparklalr_Common.Nt_Base) /= 0; J := J + 1; end loop; if not Nt_Used then Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => "**** WARNING NON-TERMINAL "); Posn := 27; --# accept F, 41, "Stable expression here expected and OK"; if Std_Out then --# end accept; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print (Std_Out => True, F => F, String1 => Symbols_Dump.Get_Nterm_Set (I), Posn => Posn, Tab => 27, Comm => False); --# end accept; else --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print (Std_Out => False, F => F, String1 => Symbols_Dump.Get_Nterm_Set (I), Posn => Posn, Tab => 27, Comm => False); --# end accept; end if; Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " IS DEFINED BUT NEVER USED."); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " This may produce an inconsistent parser!!!"); end if; end loop; end Check_Consistency; procedure Statistics (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Conflict.State; --# in Sparklalr_Goto.State; --# in Sparklalr_Memory.Dump.State; --# in Sparklalr_Memory.Prod_No; --# in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in Sparklalr_Patab.State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Conflict.State, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Std_Out, --# Symbols_Dump.State; is begin Sparklalr_Common.New_Line_File_Output (Std_Out => Std_Out, File => F); Sparklalr_Conflict.Conflict_Stats (Std_Out => Std_Out, F => F); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Symbols_Dump.Get_Nterms, Width => 6); Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => " TERMINALS ("); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Sparklalr_Common.Term_Lim, Width => 1); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " MAX)"); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Symbols_Dump.Get_Nnon_Terms, Width => 6); Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => " NONTERMINALS ("); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Sparklalr_Common.Non_Term_Lim, Width => 1); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " MAX)"); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Sparklalr_Memory.Get_Prod_No - 1, Width => 6); Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => " PRODUCTIONS ("); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Sparklalr_Common.Prod_Lim, Width => 1); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " MAX)"); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Sparklalr_Memory.Get_Stat_No, Width => 6); Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => " STATES ("); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Sparklalr_Common.State_Max, Width => 1); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " MAX)"); Sparklalr_Parser.Pa_Stats (Std_Out => Std_Out, F => F); Sparklalr_Goto.Goto_Stats (Std_Out => Std_Out, F => F); Sparklalr_Patab.Opt_Stats (Std_Out => Std_Out, F => F); Sparklalr_Memory.Dump.Mem_Stats (Std_Out => Std_Out, F => F); end Statistics; procedure Dump_Verb --# global in Command_Line_Options.State; --# in Sparklalr_Conflict.State; --# in Sparklalr_Goto.State; --# in Sparklalr_Memory.Dump.State; --# in Sparklalr_Memory.Prod_No; --# in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in Sparklalr_Patab.State; --# in Symbols_Dump.State; --# in out Echo; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Echo, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Conflict.State, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Symbols_Dump.State & --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# Command_Line_Options.State, --# Sparklalr_Conflict.State, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Symbols_Dump.State; is Descript : SPARK.Ada.Text_IO.File_Type; procedure Dump_States (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Conflict.State; --# in Sparklalr_Memory.Dump.State; --# in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Conflict.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State; is S, T : Sparklalr_Memory.Dump.Pt_Memory; begin for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " STATE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => 4, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); S := Sparklalr_Memory.Dump.Get_State (I); T := Sparklalr_Memory.Dump.Get_State (I + 1); Sparklalr_Memory.Dump.Dump_Items (F, S, T); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Sparklalr_Parser.Dump_Actions (F, I); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Sparklalr_Conflict.Dump_Conflicts (F, I); end loop; end Dump_States; begin -- Dump_Verb SPARK.Ada.Text_IO.Unbounded_String.Create (File => Descript, Mode => SPARK.Ada.Text_IO.Out_File, Name => SPARK.Ada.Strings.Unbounded.Concat_Unbounded_String_String (Left => Command_Line_Options.Get_File_Name, Right => ".DSC"), Form => SPARK.Ada.Strings.Unbounded.Null_Unbounded_String); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Descript) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to open output DSC file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; Sparklalr_Memory.Dump.Dump_Prdns (Descript); Check_Consistency (Std_Out => False, F => Descript); Dump_States (Descript); Sparklalr_Goto.Dump_Goto (Descript); Statistics (Std_Out => False, F => Echo); Statistics (Std_Out => False, F => Descript); Statistics (Std_Out => True, F => Descript); SPARK.Ada.Text_IO.Close (File => Descript); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Descript) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to close output DSC file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; end Dump_Verb; begin -- Out_Pt if Command_Line_Options.Get_Verbose then Dump_Verb; else Sparklalr_Memory.Dump.Dump_Prdns (Echo); Check_Consistency (Std_Out => True, F => Echo); Statistics (Std_Out => True, F => Echo); Check_Consistency (Std_Out => False, F => Echo); Sparklalr_Conflict.Print_Conflicts (Echo); Statistics (Std_Out => False, F => Echo); end if; if Command_Line_Options.Get_Dump_Mem then Sparklalr_Memory.Dump.Mem_Dump; end if; Sparklalr_Memory.Dump.Summary; end Out_Pt; begin -- Grammar Sparklalr_Input.Scan (Echo, Col, Symb, Grammar_Token); while not Sparklalr_Input.Get_Symbol_Set (Signpost, Symb) loop if Symb = Sparklalr_Symbol.Ident then Sparklalr_Memory.Dump.Lhs_Process (Echo, Gram_Rules, Grammar_Token, Col); --# accept F, 10, Grammar_Token, "Ineffective assignment here expected and OK"; Sparklalr_Input.Scan (Echo, Col, Symb, Grammar_Token); --# end accept; if Symb = Sparklalr_Symbol.Colon then loop Sparklalr_Memory.Dump.Rhs_Process (Echo, Symb, Gram_Rules, Col, Signpost); exit when Symb /= Sparklalr_Symbol.Uparrow; end loop; else Sparklalr_Error.Syn_Error (6, Col); end if; else Sparklalr_Error.Syn_Error (7, Col); Symbolset_Skipto := Sparklalr_Input.Signpost_To_Symbol_Set_Type (Signpost); Sparklalr_Input.Set_Symbol_Set (Symbolset_Skipto, Sparklalr_Symbol.Scolon, True); --# accept F, 10, Symb, "Ineffective assignment here expected and OK"; Sparklalr_Input.Skipto (Echo, Symbolset_Skipto, Col, Symb); --# end accept; end if; Sparklalr_Input.Scan (Echo, Col, Symb, Grammar_Token); end loop; if not Sparklalr_Error.Get_Prod_Err then Sparklalr_Memory.Dump.Findntredns (Echo); end if; if not Sparklalr_Error.Get_Prod_Err then Sparklalr_Memory.Dump.State_Generation (Echo); Sparklalr_Conflict.Parse_Action_Generation (Echo); Out_Pt; end if; end Grammar; begin Command_Line_Options.Get_Options; Initialise; for K in Ampterm_Ampgram_Range loop if Symb < K then Symb_Set := Sparklalr_Input.Symbol_Set_False_Const; I := K; while I < Sparklalr_Symbol.Endfile loop Sparklalr_Input.Set_Symbol_Set (Symb_Set, I, True); I := Sparklalr_Symbol.Symbol'Succ (I); end loop; Sparklalr_Input.Set_Symbol_Set (Symb_Set, I, True); Sparklalr_Input.Skipto (Echo, Symb_Set, Col, Symb); end if; User_Code := K = Symb; case K is when Sparklalr_Symbol.Ampterm => if User_Code then Sparklalr_Input.Proc_Term (Echo, Gram_Rules, Col, Signpost, Symb); end if; when Sparklalr_Symbol.Ampgram => if User_Code then Grammar; else Sparklalr_Error.Error (Echo, 1); end if; end case; Sparklalr_Error.List_Line_Errors (Echo, Col); end loop; if Command_Line_Options.Get_Parser and then (not Sparklalr_Error.Get_Prod_Err) then for K in Ampprog_Ampmain loop if Symb < K then Symb_Set := Sparklalr_Input.Symbol_Set_False_Const; I := K; while I < Sparklalr_Symbol.Endfile loop Sparklalr_Input.Set_Symbol_Set (Symb_Set, I, True); I := Sparklalr_Symbol.Symbol'Succ (I); end loop; Sparklalr_Input.Set_Symbol_Set (Symb_Set, I, True); Sparklalr_Input.Skipto (Echo, Symb_Set, Col, Symb); end if; case K is when Sparklalr_Symbol.Ampprog => -- The following relations must be generated before output generation SPARK.Ada.Text_IO.New_Line_Output (Spacing => 1); SPARK.Ada.Text_IO.Put_Line_Output (Item => "Determing Shift States, Reduce States, and Unique Reduce States..."); Sparklalr_Parser.Gen_State_Info; SPARK.Ada.Text_IO.Put_Line_Output (Item => "Generating relation Left Corner..."); Sparklalr_Memory.Left_Corner.Gen_Left_Corner; SPARK.Ada.Text_IO.Put_Line_Output (Item => "Generating Essential Expected Symbols..."); Ees_Sym.Gen_Essentials; SPARK.Ada.Text_IO.Put_Line_Output (Item => "Calculating Table Sizes..."); Sparklalr_Patab.Calc_Table_Sizes; Sparklalr_Memory.Left_Corner.Count_Left_Corners; Ees_Sym.Out_Essentials; -- -------------------------------------------------------------------- -- -------- Main section of SPARK parser table generartion ------------ SPARK.Ada.Text_IO.Put_Line_Output (Item => "Generating SPARK Parser Tables..."); Sparklalr_Memory.Dump.Productions_Package_Out (Spark_Output); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); Symbols_Dump.Symbols_Package_Out (Spark_Output); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); Symbols_Dump.Symbol_Strings_Out; SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "with SP_Productions;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "with SP_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "--# inherit SP_Productions,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "--# SP_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "package SP_Parser_Actions is"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " type SP_Action_Kind is (Error, Shift, Reduce, Accpt);"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " type SP_Parse_Act is record"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Act : SP_Action_Kind;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " State : SP_Productions.SP_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Symbol : SP_Symbols.SP_Symbol;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Red_By : SP_Productions.SP_Right;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Prod_No : SP_Productions.SP_Prod_No;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " end record;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " type Action_Index is private;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " First_Action_Index : constant Action_Index;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " No_Sym : constant SP_Symbols.SP_Symbol := SP_Symbols.SPDEFAULT;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " No_Red : constant SP_Productions.SP_Right := 0;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " No_Prod : constant SP_Productions.SP_Prod_No := 0;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Accept_Action : constant SP_Parse_Act :="); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " SP_Parse_Act'(Act => Accpt,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " State => SP_Productions.No_State,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Symbol => No_Sym,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Red_By => No_Red,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Prod_No => No_Prod);"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Error_Action : constant SP_Parse_Act :="); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " SP_Parse_Act'(Act => Error,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " State => SP_Productions.No_State,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Symbol => No_Sym,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Red_By => No_Red,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Prod_No => No_Prod);"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " function SPA (CST : SP_Productions.Valid_States;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " CSY : SP_Symbols.SP_Terminal) return SP_Parse_Act;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " procedure Scan_Action_Table"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " (State_No : in SP_Productions.Valid_States;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Index : in out Action_Index;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Parse_Act : out SP_Parse_Act;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Action_Symbol : out SP_Symbols.SP_Terminal);"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " --# derives Action_Symbol,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " --# Index,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " --# Parse_Act from Index,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " --# State_No;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "private"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => Spark_Output, Item => " Max_PAT_Segment : constant Natural := "); SPARK_Ada_Integer_Text_IO.Put_File (File => Spark_Output, Item => Sparklalr_Patab.Get_Pat_Seg_Count, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " type Action_Index is range 0 .. Max_PAT_Segment;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " First_Action_Index : constant Action_Index := 0;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "end SP_Parser_Actions;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "with SP_Productions;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "with SP_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "use type SP_Productions.SP_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "--# inherit SP_Productions,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "--# SP_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "package SP_Parser_Goto is"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " type Goto_Index is private;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " First_Goto_Index : constant Goto_Index;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " function SP_Goto (CST : SP_Productions.Valid_States;"); SPARK.Ada.Text_IO.Put_File (File => Spark_Output, Item => " CSY : SP_Symbols.SP_Non_Terminal)"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " return SP_Productions.SP_State;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " procedure Scan_Goto_Table"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " (State_No : in SP_Productions.Valid_States;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Index : in out Goto_Index;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Goal_State : out SP_Productions.SP_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Reduction_Goal : out SP_Symbols.SP_Non_Terminal);"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " --# derives Goal_State,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " --# Index,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " --# Reduction_Goal from Index,"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " --# State_No;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "private"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => Spark_Output, Item => " No_Of_Goto_Entries : constant Positive := "); SPARK_Ada_Integer_Text_IO.Put_File (File => Spark_Output, Item => Sparklalr_Goto.Get_Goto_List_Count, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " type Goto_Index is range 1 .. No_Of_Goto_Entries + 1;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " First_Goto_Index : constant Goto_Index := 1;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "end SP_Parser_Goto;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "package body SP_Parser_Actions is"); --# accept F, 41, "Stable expression here expected and OK"; if Command_Line_Options.Get_Self_Pack then Sparklalr_Patab.Pa_Out_Sp (Spark_Output); else Sparklalr_Patab.Pa_Out (Spark_Output); end if; --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " function SPA (CST : SP_Productions.Valid_States;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " CSY : SP_Symbols.SP_Terminal) return SP_Parse_Act"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " is separate;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " procedure Scan_Action_Table"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " (State_No : in SP_Productions.Valid_States;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Index : in out Action_Index;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Parse_Act : out SP_Parse_Act;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Action_Symbol : out SP_Symbols.SP_Terminal)"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " is separate;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "end SP_Parser_Actions;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "package body SP_Parser_Goto is"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " subtype GOT_Index is Goto_Index range 1 .. Goto_Index'Last - 1;"); --# accept F, 41, "Stable expression here expected and OK"; if Command_Line_Options.Get_Self_Pack then Sparklalr_Goto.Goto_Out_Sp (Spark_Output); else Sparklalr_Goto.Goto_Out (Spark_Output); end if; --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " function SP_Goto (CST : SP_Productions.Valid_States;"); SPARK.Ada.Text_IO.Put_File (File => Spark_Output, Item => " CSY : SP_Symbols.SP_Non_Terminal)"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " return SP_Productions.SP_State"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " is separate;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " procedure Scan_Goto_Table"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " (State_No : in SP_Productions.Valid_States;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Index : in out Goto_Index;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Goal_State : out SP_Productions.SP_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " Reduction_Goal : out SP_Symbols.SP_Non_Terminal)"); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => " is separate;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => Spark_Output, Item => "end SP_Parser_Goto;"); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); Ees_Sym.Sp_Exp_Out (Spark_Output); SPARK.Ada.Text_IO.New_Line_File (File => Spark_Output, Spacing => 1); Sparklalr_Memory.Left_Corner.Out_Left_Corner (Spark_Output); -- --------------------------------------------------------------------------- -- ---- Program fragments must not be included in a SPARKLALR grammar file --- when Sparklalr_Symbol.Amplabel => null; when Sparklalr_Symbol.Ampconst => null; when Sparklalr_Symbol.Amptype => null; when Sparklalr_Symbol.Ampvar => null; when Sparklalr_Symbol.Ampfunc => null; when Sparklalr_Symbol.Ampmain => null; end case; -- ---------------------------------------------------------------------------- Sparklalr_Error.List_Line_Errors (Echo, Col); end loop; end if; Sparklalr_Input.Finalize (Echo); SPARK.Ada.Text_IO.Close (File => Spark_Output); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Spark_Output) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to close output PAR file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; SPARK.Ada.Text_IO.Close (File => Echo); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Echo) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to close output EKO file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; --# accept F, 602, Ees_Sym.State, Ees_Sym.State, "Always defined before used" & --# F, 602, Sparklalr_Memory.Left_Corner.State, Sparklalr_Memory.Left_Corner.State, "Always defined before used"; exception --# hide Sparklalr; when Fatal.Command_Line_Error => null; when others => SPARK.Ada.Text_IO.Put_Error (Item => "An unexpected internal error has occurred"); raise; end Sparklalr; spark-2012.0.deb/sparklalr/spark_ada_integer_text_io.ads0000644000175000017500000000530311753202335022322 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; --# inherit SPARK.Ada.Text_IO; package SPARK_Ada_Integer_Text_IO is -- Default_Width := SPARK.Ada.Text_IO.Field := Integer'Width; function Default_Width return SPARK.Ada.Text_IO.Field; -- procedure Get -- (File : File_Type; -- Item : out Integer; -- Width : Field := 0); procedure Get_File (File : in out SPARK.Ada.Text_IO.File_Type; Item : out Integer; Width : in SPARK.Ada.Text_IO.Field); --# derives File, --# Item from File, --# Width; -- procedure Put -- (File : File_Type; -- Item : Integer; -- Width : Field := Default_Width; -- Base : Number_Base := Default_Base); procedure Put_File (File : in out SPARK.Ada.Text_IO.File_Type; Item : in Integer; Width : in SPARK.Ada.Text_IO.Field; Base : in SPARK.Ada.Text_IO.Number_Base); --# derives File from *, --# Base, --# Item, --# Width; -- procedure Put -- (File : File_Type := Standard_Output; -- Item : Integer; -- Width : Field := Default_Width; -- Base : Number_Base := Default_Base); procedure Put_Output (Item : in Integer; Width : in SPARK.Ada.Text_IO.Field; Base : in SPARK.Ada.Text_IO.Number_Base); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives SPARK.Ada.Text_IO.The_Standard_Output from *, --# Base, --# Item, --# Width; end SPARK_Ada_Integer_Text_IO; spark-2012.0.deb/sparklalr/Makefile0000644000175000017500000000426511753202335016102 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for the SPARKLALR parser # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=sparklalr # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} ${OUTPUT_NAME} -o $@ -bargs ${BIND_OPTS} self-analysis: -spark -plain @${OUTPUT_NAME}.smf # Cleaning code base # ================== clean: standardclean reallyclean: clean targetclean vcclean ################################################################################ # END-OF-FILE spark-2012.0.deb/sparklalr/sparklalr_parser.adb0000644000175000017500000013120711753202335020456 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with SPARK_Ada_Integer_Text_IO; with Sparklalr_Goto; with Sparklalr_Memory; with Symbols_Dump; use type Sparklalr_Goto.Next_T; package body Sparklalr_Parser --# own State is Pat_Count, --# Pa_Array, --# Pa_Count, --# Pa_List, --# Reduce_State, --# Unique_Reduce_State; is subtype Pa_Array_Range is Positive range 1 .. Pa_Table_Size; type Pa_Rec is record Index : Sparklalr_Common.Term_Range; The_Entry : Sparklalr_Common.Short_Int; Pa_Next : Pt_Pa_Rec; end record; type Pa_Array_Array_T is array (Pa_Array_Range) of Pa_Rec; type Pa_Array_T is record The_Array : Pa_Array_Array_T; Top : Pt_Pa_Rec; end record; type State_Set is array (Sparklalr_Common.State_Range) of Boolean; type Pa_List_T is array (Sparklalr_Common.State_Range) of Pt_Pa_Rec; -- PARSING ACTION TABLE Pa_List : Pa_List_T; Reduce_State : State_Set; Unique_Reduce_State : State_Set; Pa_Count : Integer; Pat_Count : Integer; Pa_Array : Pa_Array_T; -- Local procedures/functions procedure Sort_Pa (Head : in out Pt_Pa_Rec) --# global in out Pa_Array; --# derives Head, --# Pa_Array from Head, --# Pa_Array; is P, Pnext, Q : Pt_Pa_Rec; Found : Boolean; begin if Head /= 0 then P := Pa_Array.The_Array (Head).Pa_Next; Pa_Array.The_Array (Head).Pa_Next := 0; else P := 0; end if; while P /= 0 loop Pnext := Pa_Array.The_Array (P).Pa_Next; if Pa_Array.The_Array (P).The_Entry >= Pa_Array.The_Array (Head).The_Entry then Pa_Array.The_Array (P).Pa_Next := Head; Head := P; else Q := Head; Found := False; loop if Pa_Array.The_Array (Q).Pa_Next /= 0 then if Pa_Array.The_Array (P).The_Entry >= Pa_Array.The_Array (Pa_Array.The_Array (Q).Pa_Next).The_Entry then Found := True; else Q := Pa_Array.The_Array (Q).Pa_Next; end if; else Found := True; end if; exit when Found; end loop; Pa_Array.The_Array (P).Pa_Next := Pa_Array.The_Array (Q).Pa_Next; Pa_Array.The_Array (Q).Pa_Next := P; end if; P := Pnext; end loop; end Sort_Pa; -- End local procedures/functions procedure Init_Pa_List --# global out Pa_List; --# derives Pa_List from ; is begin Pa_List := Pa_List_T'(others => 0); end Init_Pa_List; procedure Init_Pat_Count --# global out Pat_Count; --# derives Pat_Count from ; is begin Pat_Count := 0; end Init_Pat_Count; procedure Initialise --# global out Pat_Count; --# out Pa_Array; --# out Pa_Count; --# out Pa_List; --# out Reduce_State; --# out Unique_Reduce_State; --# derives Pat_Count, --# Pa_Array, --# Pa_Count, --# Pa_List, --# Reduce_State, --# Unique_Reduce_State from ; is begin Init_Pa_List; Init_Pat_Count; Reduce_State := State_Set'(others => False); Unique_Reduce_State := State_Set'(others => False); Pa_Count := 0; Pa_Array := Pa_Array_T'(The_Array => Pa_Array_Array_T'(others => Pa_Rec'(Index => 0, The_Entry => 0, Pa_Next => 0)), Top => 0); end Initialise; procedure Gen_State_Info --# global in Pa_Array; --# in Pa_List; --# in Sparklalr_Memory.Stat_No; --# in out Reduce_State; --# in out Unique_Reduce_State; --# derives Reduce_State, --# Unique_Reduce_State from *, --# Pa_Array, --# Pa_List, --# Sparklalr_Memory.Stat_No; is Ptr, List_Start : Pt_Pa_Rec; Is_Unique_Reduce : Boolean; begin for ST in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop Ptr := Pa_List (ST); List_Start := Ptr; Is_Unique_Reduce := True; while Ptr /= 0 loop if (Pa_Array.The_Array (Ptr).The_Entry > Sparklalr_Common.Prod_Lim) or else (Pa_Array.The_Array (Ptr).The_Entry = 1) then Is_Unique_Reduce := False; else Reduce_State (ST) := True; Is_Unique_Reduce := Is_Unique_Reduce and then (Pa_Array.The_Array (Ptr).The_Entry = Pa_Array.The_Array (List_Start).The_Entry); end if; Ptr := Pa_Array.The_Array (Ptr).Pa_Next; end loop; if Is_Unique_Reduce then Unique_Reduce_State (ST) := True; end if; end loop; end Gen_State_Info; procedure Pa_Search (State_Index, Term_Index : in Integer; Result : out Integer; Pl : out Pt_Pa_Rec) --# global in Pa_Array; --# in Pa_List; --# derives Pl, --# Result from Pa_Array, --# Pa_List, --# State_Index, --# Term_Index; is Found : Boolean; Plist : Pt_Pa_Rec; begin Result := 0; Pl := 0; Found := False; Plist := Pa_List (State_Index); while (Plist /= 0) and then not Found loop if Pa_Array.The_Array (Plist).Index = Term_Index then Pl := Plist; Result := Pa_Array.The_Array (Plist).The_Entry; Found := True; else Plist := Pa_Array.The_Array (Plist).Pa_Next; end if; end loop; end Pa_Search; procedure Pa_Insert (F : in out SPARK.Ada.Text_IO.File_Type; State_Index, Term_Index, Insertion : in Integer) -- INSERTS A NEW ENTRY INTO THE PARSING ACTION TABLE --# global in Command_Line_Options.State; --# in Symbols_Dump.State; --# in out Pa_Array; --# in out Pa_Count; --# in out Pa_List; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Command_Line_Options.State, --# Symbols_Dump.State, --# Term_Index & --# Pa_Array from *, --# Insertion, --# Pa_List, --# State_Index, --# Term_Index & --# Pa_Count from * & --# Pa_List from *, --# Pa_Array, --# State_Index, --# Term_Index & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Insertion, --# State_Index, --# Symbols_Dump.State, --# Term_Index; is Posn : Integer; Result_Pa_Search : Integer; Pl : Pt_Pa_Rec; begin if Command_Line_Options.Get_Debug_Level (6) then SPARK.Ada.Text_IO.Put_Output (Item => " PA(STATE="); SPARK_Ada_Integer_Text_IO.Put_Output (Item => State_Index, Width => 3, Base => 10); SPARK.Ada.Text_IO.Put_Output (Item => ",TERMINAL="); Posn := 23; --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Sparklalr_Common.Print (Std_Out => True, F => F, String1 => Symbols_Dump.Get_Term_Set (Term_Index), Posn => Posn, Tab => 23, Comm => False); --# end accept; SPARK.Ada.Text_IO.Put_Output (Item => ") = "); SPARK_Ada_Integer_Text_IO.Put_Output (Item => Insertion, Width => 3, Base => 10); SPARK.Ada.Text_IO.New_Line_Output (Spacing => 1); end if; Pa_Count := Pa_Count + 1; Pa_Search (State_Index, Term_Index, Result_Pa_Search, Pl); if Result_Pa_Search /= 0 then Pa_Array.The_Array (Pl).The_Entry := Insertion; else Pa_Array.Top := Pa_Array.Top + 1; Pa_Array.The_Array (Pa_Array.Top) := Pa_Rec'(Index => Term_Index, The_Entry => Insertion, Pa_Next => Pa_List (State_Index)); Pa_List (State_Index) := Pa_Array.Top; end if; end Pa_Insert; function Action_Equal (Act1, Act2 : in Pt_Pa_Rec) return Boolean --# global in Pa_Array; is Found : Boolean; A2 : Pt_Pa_Rec; C1, C2 : Integer; Act1_Tmp : Pt_Pa_Rec; function Eq_Pa_Size (A1, A2 : in Pt_Pa_Rec) return Boolean --# global in Pa_Array; is A1_Tmp : Pt_Pa_Rec; A2_Tmp : Pt_Pa_Rec; begin A1_Tmp := A1; A2_Tmp := A2; while (A1_Tmp /= 0) and then (A2_Tmp /= 0) loop A1_Tmp := Pa_Array.The_Array (A1_Tmp).Pa_Next; A2_Tmp := Pa_Array.The_Array (A2_Tmp).Pa_Next; end loop; return A1_Tmp = A2_Tmp; end Eq_Pa_Size; begin Act1_Tmp := Act1; Found := False; if Eq_Pa_Size (Act1_Tmp, Act2) then while Act1_Tmp /= 0 loop C1 := Pa_Array.The_Array (Act1_Tmp).Index; C2 := Pa_Array.The_Array (Act1_Tmp).The_Entry; A2 := Act2; Found := False; while (A2 /= 0) and then not Found loop if Pa_Array.The_Array (A2).Index = C1 then if Pa_Array.The_Array (A2).The_Entry = C2 then Found := True; else A2 := 0; end if; else A2 := Pa_Array.The_Array (A2).Pa_Next; end if; end loop; if Found then Act1_Tmp := Pa_Array.The_Array (Act1_Tmp).Pa_Next; else Act1_Tmp := 0; end if; end loop; end if; return Found; end Action_Equal; procedure Pa_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Pa_Count; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Pa_Count, --# Std_Out; is begin Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Pa_Count, Width => 6); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " PARSING ACTIONS GENERATED"); end Pa_Stats; procedure Dump_Actions (F : in out SPARK.Ada.Text_IO.File_Type; Nstate : in Integer) --# global in Pa_Array; --# in Pa_List; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Nstate, --# Pa_Array, --# Pa_List, --# Symbols_Dump.State; is Pl : Pt_Pa_Rec; J : Integer; Posn : Integer; Result_Pa_Search : Integer; begin Posn := 1; for I in Integer range 0 .. Symbols_Dump.Get_Nterms loop --# accept F, 10, Pl, "Ineffective assigment to Pl here expected and OK"; Pa_Search (Nstate, I, Result_Pa_Search, Pl); --# end accept; if Result_Pa_Search /= 0 then Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 4); Posn := Posn + 4; --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Sparklalr_Common.Print (Std_Out => False, F => F, String1 => Symbols_Dump.Get_Term_Set (I), Posn => Posn, Tab => 4, Comm => False); --# end accept; --# accept F, 10, Pl, "Ineffective assigment to Pl here expected and OK"; Pa_Search (Nstate, I, J, Pl); --# end accept; if J > Sparklalr_Common.Prod_Lim then SPARK.Ada.Text_IO.Put_File (File => F, Item => " SHIFT "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => J - Sparklalr_Common.Prod_Lim, Width => 4, Base => 10); else if (J = 1) or else (J = -1) then if J = 1 then SPARK.Ada.Text_IO.Put_File (File => F, Item => " ACCEPT "); else SPARK.Ada.Text_IO.Put_File (File => F, Item => " ERROR "); end if; else SPARK.Ada.Text_IO.Put_File (File => F, Item => " REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => J, Width => 4, Base => 10); end if; end if; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Posn := 1; end if; end loop; --# accept F, 33, Pl, "Pl is unused OK"; end Dump_Actions; procedure Action_Gen (State_Var : in Integer) --# global in Unique_Reduce_State; --# in out Pat_Count; --# in out Pa_Array; --# in out Pa_List; --# derives Pat_Count from *, --# Pa_Array, --# Pa_List, --# State_Var, --# Unique_Reduce_State & --# Pa_Array, --# Pa_List from Pa_Array, --# Pa_List, --# State_Var; is Curr_Entry : Integer; Red_Count : Sparklalr_Common.Production_Count; Exit_Flag : Boolean; P : Pt_Pa_Rec; Next : Sparklalr_Goto.Next_T; First_Var : Sparklalr_Common.Term_Range; procedure Write_Cond (Next : in Sparklalr_Goto.Next_T; First_Var : in Sparklalr_Common.Term_Range) --# global in out Pat_Count; --# derives Pat_Count from *, --# First_Var, --# Next; is T : Sparklalr_Common.Term_Range; Next_Tmp : Sparklalr_Goto.Next_T; begin Next_Tmp := Next; Pat_Count := Pat_Count + 1; if Next_Tmp /= Sparklalr_Goto.Next_False_Const then T := First_Var; loop T := T + 1; if Sparklalr_Goto.Get_Next (Next_Tmp, T) then Pat_Count := Pat_Count + 1; Sparklalr_Goto.Set_Next (Next_Tmp, T, False); end if; exit when Next_Tmp = Sparklalr_Goto.Next_False_Const; end loop; end if; end Write_Cond; begin Sort_Pa (Pa_List (State_Var)); P := Pa_List (State_Var); while P /= 0 loop Curr_Entry := Pa_Array.The_Array (P).The_Entry; First_Var := Pa_Array.The_Array (P).Index; Next := Sparklalr_Goto.Next_False_Const; Exit_Flag := False; Red_Count := 0; loop P := Pa_Array.The_Array (P).Pa_Next; if P /= 0 then if Pa_Array.The_Array (P).The_Entry = Curr_Entry then if Pa_Array.The_Array (P).Index < First_Var then Sparklalr_Goto.Set_Next (Next, First_Var, True); First_Var := Pa_Array.The_Array (P).Index; else Sparklalr_Goto.Set_Next (Next, Pa_Array.The_Array (P).Index, True); end if; else Exit_Flag := True; end if; else Exit_Flag := True; end if; exit when Exit_Flag; end loop; if Curr_Entry >= 0 then if Curr_Entry <= Sparklalr_Common.Prod_Lim then Red_Count := Red_Count + 1; end if; if (Red_Count = 1) and then (P = 0) and then Unique_Reduce_State (State_Var) then Pat_Count := Pat_Count + 1; else Write_Cond (Next, First_Var); end if; end if; end loop; end Action_Gen; procedure Action_Gen_Pa_Out (F : in out SPARK.Ada.Text_IO.File_Type; State_Var : in Integer; Curr_Pat_Index : in out Integer) --# global in Sparklalr_Memory.Prod_Sum; --# in Symbols_Dump.State; --# in Unique_Reduce_State; --# in out Pa_Array; --# in out Pa_List; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Curr_Pat_Index from *, --# Pa_Array, --# Pa_List, --# State_Var, --# Unique_Reduce_State & --# F from *, --# Curr_Pat_Index, --# Pa_Array, --# Pa_List, --# Sparklalr_Memory.Prod_Sum, --# State_Var, --# Symbols_Dump.State, --# Unique_Reduce_State & --# Pa_Array, --# Pa_List from Pa_Array, --# Pa_List, --# State_Var & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Pa_Array, --# Pa_List, --# Sparklalr_Memory.Prod_Sum, --# State_Var, --# Symbols_Dump.State, --# Unique_Reduce_State; is Red_Count : Sparklalr_Common.Production_Count; Exit_Flag : Boolean; P : Pt_Pa_Rec; Next : Sparklalr_Goto.Next_T; First_Var : Sparklalr_Common.Term_Range; Curr_Entry : Integer; procedure Write_Act (F : in out SPARK.Ada.Text_IO.File_Type; Curr_Entry : in Integer) --# global in Sparklalr_Memory.Prod_Sum; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Curr_Entry, --# Sparklalr_Memory.Prod_Sum, --# Symbols_Dump.State; is Posn : Integer; begin Posn := 22; if Curr_Entry /= 1 then -- ACCEPT if Curr_Entry > Sparklalr_Common.Prod_Lim then SPARK.Ada.Text_IO.Put_File (File => F, Item => ", SP_Parse_Act'(Shift, "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Entry - Sparklalr_Common.Prod_Lim, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", No_Sym, No_Red, No_Prod)"); else SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ", SP_Parse_Act'(Reduce, SP_Productions.No_State, "); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 22); --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Symbols_Dump.Print_String_Sym (F, Sparklalr_Common.Sp_Symbol_Str, Sparklalr_Memory.Get_Prod_Sum (Curr_Entry, 1) + Sparklalr_Common.Nt_Base, Posn, 22, False); --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 22); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Memory.Get_Prod_Sum (Curr_Entry, 2), Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Entry, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); end if; else SPARK.Ada.Text_IO.Put_File (File => F, Item => ", Accept_Action"); end if; end Write_Act; procedure Write_Cond (F : in out SPARK.Ada.Text_IO.File_Type; Next : in Sparklalr_Goto.Next_T; First_Var : in Sparklalr_Common.Term_Range; Curr_Entry : in Integer; Curr_Pat_Index : in out Integer) --# global in Sparklalr_Memory.Prod_Sum; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Curr_Pat_Index from *, --# First_Var, --# Next & --# F from *, --# Curr_Entry, --# Curr_Pat_Index, --# First_Var, --# Next, --# Sparklalr_Memory.Prod_Sum, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Curr_Entry, --# First_Var, --# Next, --# Sparklalr_Memory.Prod_Sum, --# Symbols_Dump.State; is T : Sparklalr_Common.Term_Range; Posn : Integer; Next_Tmp : Sparklalr_Goto.Next_T; begin SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Sym_Action_Pair'("); Next_Tmp := Next; Curr_Pat_Index := Curr_Pat_Index + 1; Posn := 21; if Next_Tmp = Sparklalr_Goto.Next_False_Const then --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Term_Set (First_Var), Posn, 21, False); --# end accept; Write_Act (F, Curr_Entry); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); else --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Term_Set (First_Var), Posn, 21, False); --# end accept; Write_Act (F, Curr_Entry); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); T := First_Var; loop T := T + 1; if Sparklalr_Goto.Get_Next (Next_Tmp, T) then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Pat_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " => Sym_Action_Pair'("); Curr_Pat_Index := Curr_Pat_Index + 1; Posn := 21; --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Term_Set (T), Posn, 13, False); --# end accept; Write_Act (F, Curr_Entry); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); Sparklalr_Goto.Set_Next (Next_Tmp, T, False); end if; exit when Next_Tmp = Sparklalr_Goto.Next_False_Const; end loop; end if; end Write_Cond; begin -- Action_Gen_Pa_Out Sort_Pa (Pa_List (State_Var)); P := Pa_List (State_Var); while P /= 0 loop Curr_Entry := Pa_Array.The_Array (P).The_Entry; First_Var := Pa_Array.The_Array (P).Index; Next := Sparklalr_Goto.Next_False_Const; Exit_Flag := False; Red_Count := 0; loop P := Pa_Array.The_Array (P).Pa_Next; if P /= 0 then if Pa_Array.The_Array (P).The_Entry = Curr_Entry then if Pa_Array.The_Array (P).Index < First_Var then Sparklalr_Goto.Set_Next (Next, First_Var, True); First_Var := Pa_Array.The_Array (P).Index; else Sparklalr_Goto.Set_Next (Next, Pa_Array.The_Array (P).Index, True); end if; else Exit_Flag := True; end if; else Exit_Flag := True; end if; exit when Exit_Flag; end loop; if Curr_Entry >= 0 then if Curr_Entry <= Sparklalr_Common.Prod_Lim then Red_Count := Red_Count + 1; end if; if (Red_Count = 1) and then (P = 0) and then Unique_Reduce_State (State_Var) then SPARK.Ada.Text_IO.Put_File (File => F, Item => "Sym_Action_Pair'(Default"); Curr_Pat_Index := Curr_Pat_Index + 1; Write_Act (F, Curr_Entry); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); else if P = 0 then Write_Cond (F, Next, First_Var, Curr_Entry, Curr_Pat_Index); else Write_Cond (F, Next, First_Var, Curr_Entry, Curr_Pat_Index); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Pat_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); end if; end if; end if; end loop; end Action_Gen_Pa_Out; procedure Action_Gen_Pa_Out_Sp (F : in out SPARK.Ada.Text_IO.File_Type; State_Var : in Integer; Curr_Pat_Index : in out Integer) --# global in Sparklalr_Memory.Prod_Sum; --# in Symbols_Dump.State; --# in Unique_Reduce_State; --# in out Pa_Array; --# in out Pa_List; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Curr_Pat_Index from *, --# Pa_Array, --# Pa_List, --# State_Var, --# Unique_Reduce_State & --# F from *, --# Curr_Pat_Index, --# Pa_Array, --# Pa_List, --# Sparklalr_Memory.Prod_Sum, --# State_Var, --# Symbols_Dump.State, --# Unique_Reduce_State & --# Pa_Array, --# Pa_List from Pa_Array, --# Pa_List, --# State_Var & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Pa_Array, --# Pa_List, --# Sparklalr_Memory.Prod_Sum, --# State_Var, --# Symbols_Dump.State, --# Unique_Reduce_State; is Red_Count : Sparklalr_Common.Production_Count; Exit_Flag : Boolean; P : Pt_Pa_Rec; Posn : Integer; Next : Sparklalr_Goto.Next_T; First_Var : Sparklalr_Common.Term_Range; Curr_Entry : Integer; procedure Write_Act (F : in out SPARK.Ada.Text_IO.File_Type; Posn : in out Integer; Tab : in Integer; Curr_Entry : in Integer) --# global in Sparklalr_Memory.Prod_Sum; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# Posn, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Curr_Entry, --# Posn, --# Sparklalr_Memory.Prod_Sum, --# Symbols_Dump.State, --# Tab; is begin if Curr_Entry /= 1 then -- ACCEPT if Curr_Entry > Sparklalr_Common.Prod_Lim then SPARK.Ada.Text_IO.Put_File (File => F, Item => " + (Shift_Act + State * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Entry - Sparklalr_Common.Prod_Lim, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); else SPARK.Ada.Text_IO.Put_File (File => F, Item => " + (Reduce_Act + ((Symbol * (SP_Symbols.SP_Non_Terminal'Pos ("); Posn := Posn + 72; Symbols_Dump.Print_String_Sym (F, Sparklalr_Common.Sp_Symbol_Str, Sparklalr_Memory.Get_Prod_Sum (Curr_Entry, 1) + Sparklalr_Common.Nt_Base, Posn, Tab, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => ") - First_Non_Terminal) + Red_By * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Memory.Get_Prod_Sum (Curr_Entry, 2), Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ") + Prod_No * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Entry, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => "))"); Posn := Posn + 32; end if; else SPARK.Ada.Text_IO.Put_File (File => F, Item => " + Accept_Act"); Posn := Posn + 12; end if; end Write_Act; procedure Write_Cond (F : in out SPARK.Ada.Text_IO.File_Type; Next : in Sparklalr_Goto.Next_T; First_Var : in Sparklalr_Common.Term_Range; Curr_Entry : in Integer; Curr_Pat_Index : in out Integer) --# global in Sparklalr_Memory.Prod_Sum; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Curr_Pat_Index from *, --# First_Var, --# Next & --# F from *, --# Curr_Entry, --# Curr_Pat_Index, --# First_Var, --# Next, --# Sparklalr_Memory.Prod_Sum, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Curr_Entry, --# First_Var, --# Next, --# Sparklalr_Memory.Prod_Sum, --# Symbols_Dump.State; is T : Sparklalr_Common.Term_Range; Posn : Integer; Next_Tmp : Sparklalr_Goto.Next_T; begin Next_Tmp := Next; Curr_Pat_Index := Curr_Pat_Index + 1; if Next_Tmp = Sparklalr_Goto.Next_False_Const then SPARK.Ada.Text_IO.Put_File (File => F, Item => "SP_Symbols.SP_Terminal'Pos ("); Posn := 36; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Term_Set (First_Var), Posn, 10, False); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); Posn := Posn + 1; --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Write_Act (F, Posn, 10, Curr_Entry); --# end accept; else SPARK.Ada.Text_IO.Put_File (File => F, Item => "SP_Symbols.SP_Terminal'Pos ("); Posn := 26; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Term_Set (First_Var), Posn, 10, False); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); Posn := Posn + 1; --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Write_Act (F, Posn, 10, Curr_Entry); --# end accept; T := First_Var; loop T := T + 1; if Sparklalr_Goto.Get_Next (Next_Tmp, T) then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Pat_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Curr_Pat_Index := Curr_Pat_Index + 1; SPARK.Ada.Text_IO.Put_File (File => F, Item => "SP_Symbols.SP_Terminal'Pos ("); Posn := 26; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Term_Set (T), Posn, 10, False); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); Posn := Posn + 1; --# accept F, 10, Posn, "Ineffective assigment to Posn here expected and OK"; Write_Act (F, Posn, 10, Curr_Entry); --# end accept; Sparklalr_Goto.Set_Next (Next_Tmp, T, False); end if; exit when Next_Tmp = Sparklalr_Goto.Next_False_Const; end loop; end if; end Write_Cond; begin -- Action_Gen_Pa_Out_Sp Sort_Pa (Pa_List (State_Var)); Posn := 0; P := Pa_List (State_Var); while P /= 0 loop Curr_Entry := Pa_Array.The_Array (P).The_Entry; First_Var := Pa_Array.The_Array (P).Index; Next := Sparklalr_Goto.Next_False_Const; Exit_Flag := False; Red_Count := 0; loop P := Pa_Array.The_Array (P).Pa_Next; if P /= 0 then if Pa_Array.The_Array (P).The_Entry = Curr_Entry then if Pa_Array.The_Array (P).Index < First_Var then Sparklalr_Goto.Set_Next (Next, First_Var, True); First_Var := Pa_Array.The_Array (P).Index; else Sparklalr_Goto.Set_Next (Next, Pa_Array.The_Array (P).Index, True); end if; else Exit_Flag := True; end if; else Exit_Flag := True; end if; exit when Exit_Flag; end loop; if Curr_Entry >= 0 then if Curr_Entry <= Sparklalr_Common.Prod_Lim then Red_Count := Red_Count + 1; end if; if (Red_Count = 1) and then (P = 0) and then Unique_Reduce_State (State_Var) then SPARK.Ada.Text_IO.Put_File (File => F, Item => "Default"); Curr_Pat_Index := Curr_Pat_Index + 1; Write_Act (F, Posn, 10, Curr_Entry); else if P = 0 then Write_Cond (F, Next, First_Var, Curr_Entry, Curr_Pat_Index); else Write_Cond (F, Next, First_Var, Curr_Entry, Curr_Pat_Index); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Pat_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); end if; end if; end if; end loop; end Action_Gen_Pa_Out_Sp; function Get_Reduce_State (S : in Sparklalr_Common.State_Range) return Boolean --# global in Reduce_State; is begin return Reduce_State (S); end Get_Reduce_State; function Get_Pat_Count return Integer --# global in Pat_Count; is begin return Pat_Count; end Get_Pat_Count; function Get_Pa_List (I : in Sparklalr_Common.State_Range) return Pt_Pa_Rec --# global in Pa_List; is begin return Pa_List (I); end Get_Pa_List; end Sparklalr_Parser; spark-2012.0.deb/sparklalr/sparklalr_error.adb0000644000175000017500000005731311753202335020320 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_Ada_Integer_Text_IO; with Sparklalr_Char_Class; use type Sparklalr_Char_Class.Char_Class; package body Sparklalr_Error --# own State is Blank_Line, --# Error_Set, --# Err_Ptr, --# Line_Out, --# Prod_Err, --# Sequenced, --# Tail_Ptr; is Error_Table_Size : constant := 100; type Error_Set_T is array (Error_Range) of Boolean; Error_Set_False_Const : constant Error_Set_T := Error_Set_T'(others => False); subtype Pt_Error_Type is Natural range 0 .. Error_Table_Size; subtype Err_Array_Range is Positive range 1 .. Error_Table_Size; type Error_Type is record Used : Boolean; Err_Col : Err_Col_T; Error_No : Error_Range; Next : Pt_Error_Type; end record; type Err_Array_T is array (Err_Array_Range) of Error_Type; subtype Line_Array_Range is Positive range 1 .. Sparklalr_Common.Line_Length; subtype Line_Array is String (Line_Array_Range); Err_Ptr : Err_Array_T; Tail_Ptr : Pt_Error_Type; Error_Set : Error_Set_T; Prod_Err : Boolean; Sequenced : Boolean; Line_Out : Line_Array; Blank_Line : Line_Array; -- Local procedures/functions procedure Skip_Seqno (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sequenced; --# in Sparklalr_Char_Class.Charmap; --# derives F from *, --# Sequenced, --# Sparklalr_Char_Class.Charmap; is C : Character; End_Of_Line : Boolean; I : Integer; begin --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => F, Item => C, End_Of_Line => End_Of_Line); --# end accept; if Sequenced and then (Sparklalr_Char_Class.Get_Charmap (C) = Sparklalr_Char_Class.Digit) then --# accept F, 10, I, "Ineffective assignment here expected"; SPARK_Ada_Integer_Text_IO.Get_File (File => F, Item => I, Width => 0); --# end accept; --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => F, Item => C, End_Of_Line => End_Of_Line); --# end accept; if C = ' ' then --# accept F, 10, C, "Skipping whitespace, so value is discarded OK"; SPARK.Ada.Text_IO.Get_Character_File (File => F, Item => C); --# end accept; end if; end if; --# accept F, 33, End_Of_Line, "Unused OK" & --# F, 33, I, "Unused OK"; end Skip_Seqno; procedure List_Errors (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Error_Set; --# in out Err_Ptr; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Err_Ptr from * & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Error_Set, --# Err_Ptr; is procedure Eprint (F : in out SPARK.Ada.Text_IO.File_Type) --# global in out Err_Ptr; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Err_Ptr, --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Err_Ptr; is T : Pt_Error_Type; Min_Err_Col : Err_Col_T; Continue : Boolean; Err_Head : Pt_Error_Type; Column : Err_Col_T; begin Err_Head := 1; Column := 0; Continue := True; while Continue loop while Err_Head /= 0 loop if Err_Ptr (Err_Head).Err_Col < Column then Err_Head := Err_Ptr (Err_Head).Next; else Sparklalr_Common.Put_N_Chars (Std_Out => True, F => F, C => '^', N => (Err_Ptr (Err_Head).Err_Col - Column) + 1); SPARK_Ada_Integer_Text_IO.Put_Output (Item => Err_Ptr (Err_Head).Error_No, Width => 2, Base => 10); SPARK.Ada.Text_IO.Put_Character_Output (Item => ' '); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => '^', N => (Err_Ptr (Err_Head).Err_Col - Column) + 1); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Err_Ptr (Err_Head).Error_No, Width => 2, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); Column := Err_Ptr (Err_Head).Err_Col + 4; T := Err_Head; Err_Head := Err_Ptr (Err_Head).Next; Err_Ptr (T) := Error_Type'(Used => False, Err_Col => 0, Error_No => 0, Next => 0); end if; end loop; SPARK.Ada.Text_IO.New_Line_Output (Spacing => 1); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Column := 0; Min_Err_Col := Sparklalr_Common.Line_Length; Continue := False; for I in Err_Array_Range loop if Err_Ptr (I).Used then if Min_Err_Col > Err_Ptr (I).Err_Col then Min_Err_Col := Err_Ptr (I).Err_Col; Err_Head := I; end if; Continue := True; end if; end loop; end loop; end Eprint; procedure Emessages (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Error_Set; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Error_Set, --# Std_Out; is begin for I in Error_Range loop if Error_Set (I) then Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => "**ERROR** :"); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => I, Width => 3); Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => " "); case I is when 0 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "LINE TOO LONG - TRUNCATED"); when 2 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "& NOT FOLLOWED BY KNOWN KEYWORD"); when 3 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "UNEXPECTED SYMBOL - SKIPPING FORWARD"); when 4 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "NO ACTION FOLLOWING ""="""); when 5 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "UNEXPECTED SYMBOL ON RHS"); when 6 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "COLON EXPECTED ON LHS"); when 7 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "LHS IDENTIFIER EXPECTED"); when 8 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "BAD TERM SECTION SYNTAX"); when 9 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "UNEXPECTED && - SKIPPED"); when 10 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "INVALID SYMBOL ENCOUNTERED"); when 12 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "IDENTIFIER NOT FOUND AFTER &PREC"); when 13 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "NON-TERMINAL FOUND AFTER &PREC"); when 14 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "TERMINAL SYMBOL ON LHS OF PRODUCTION"); when 15 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "TOO MANY NON-TERMINAL SYMBOLS"); when 16 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "TOO MANY TERMINAL SYMBOLS"); when 17 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "INVALID SYMBOL AFTER ""\"" "); when 18 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => """\"" Substitution is not allowed in this version"); when others => null; end case; end if; end loop; end Emessages; begin -- List_Errors Eprint (F); Emessages (Std_Out => True, F => F); Emessages (Std_Out => False, F => F); end List_Errors; procedure List_Line (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; Col : in Err_Col_T) --# global in Line_Out; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Col, --# Line_Out, --# Std_Out; is begin Sparklalr_Common.Put_Character_File_Output (Std_Out => Std_Out, File => F, Item => ' '); if Col > 0 then for I in Integer range 1 .. Col loop Sparklalr_Common.Put_Character_File_Output (Std_Out => Std_Out, File => F, Item => Line_Out (I)); end loop; end if; Sparklalr_Common.New_Line_File_Output (Std_Out => Std_Out, File => F); end List_Line; -- End local procedures/functions procedure Initialise (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Char_Class.Charmap; --# out Blank_Line; --# out Error_Set; --# out Err_Ptr; --# out Line_Out; --# out Prod_Err; --# out Sequenced; --# out Tail_Ptr; --# derives Blank_Line, --# Error_Set, --# Err_Ptr, --# Line_Out, --# Prod_Err, --# Tail_Ptr from & --# F, --# Sequenced from F, --# Sparklalr_Char_Class.Charmap; is C : Character; End_Of_Line : Boolean; begin Err_Ptr := Err_Array_T'(others => Error_Type'(Used => False, Err_Col => 0, Error_No => 0, Next => 0)); Tail_Ptr := 0; Error_Set := Error_Set_T'(others => False); Prod_Err := False; Blank_Line := Line_Array'(others => ' '); Line_Out := Blank_Line; Sequenced := True; Skip_Seqno (F); --# accept F, 10, End_Of_Line, "Ineffective assignment here expected"; SPARK.Ada.Text_IO.Look_Ahead_File (File => F, Item => C, End_Of_Line => End_Of_Line); --# end accept; Sequenced := Sparklalr_Char_Class.Get_Charmap (C) = Sparklalr_Char_Class.Digit; --# accept F, 33, End_Of_Line, "Unused OK"; end Initialise; procedure Syn_Error (Error_Num : in Error_Range; Col : in Err_Col_T) --# global in out Error_Set; --# in out Err_Ptr; --# in out Tail_Ptr; --# out Prod_Err; --# derives Error_Set from *, --# Error_Num & --# Err_Ptr from *, --# Col, --# Error_Num, --# Tail_Ptr & --# Prod_Err from & --# Tail_Ptr from *; is T : Pt_Error_Type; begin Prod_Err := True; T := Tail_Ptr + 1; Err_Ptr (T) := Error_Type'(Used => True, Err_Col => Col, Error_No => Error_Num, Next => 0); if Tail_Ptr /= 0 then Err_Ptr (Tail_Ptr).Next := T; end if; Tail_Ptr := T; Error_Set (Error_Num) := True; end Syn_Error; procedure Error (F : in out SPARK.Ada.Text_IO.File_Type; N : in Integer) --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# out Prod_Err; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# N & --# Prod_Err from ; is procedure Err_List (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; N : in Integer) --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# N, --# Std_Out; is begin Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => "***ERROR***"); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => N, Width => 4); Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => " "); --# accept W, 303, "when others covers all cases here"; case N is when 0 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "LLAMA TERMINATED BEFORE END OF INPUT GRAMMAR FILE"); when 1 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "NO GRAMMAR SPECIFICATIONS IN INPUT FILE"); when 30 => Sparklalr_Common.Put_File_Output (Std_Out => Std_Out, File => F, Item => "UNDEFINED NONTERMINAL SYMBOL -"); when 32 | 36 | 50 => Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => "INTERNAL LLAMA ERROR - CONSISTENCY CHECK"); when others => null; end case; --# end accept; end Err_List; begin -- Error Prod_Err := True; Err_List (Std_Out => False, F => F, N => N); Err_List (Std_Out => True, F => F, N => N); end Error; procedure Write_The_Line (F, Echo : in out SPARK.Ada.Text_IO.File_Type; Col : in out Err_Col_T) --# global in Line_Out; --# in Sequenced; --# in Sparklalr_Char_Class.Charmap; --# in out Error_Set; --# in out Err_Ptr; --# in out Prod_Err; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Tail_Ptr; --# derives Col, --# Error_Set from & --# Echo, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Col, --# Error_Set, --# Err_Ptr, --# Line_Out, --# Tail_Ptr & --# Err_Ptr from *, --# Col, --# Error_Set, --# Tail_Ptr & --# F from *, --# Sequenced, --# Sparklalr_Char_Class.Charmap & --# Prod_Err, --# Tail_Ptr from *, --# Col; is begin if Col = Sparklalr_Common.Line_Length then Syn_Error (0, Col); end if; List_Line (Std_Out => False, F => Echo, Col => Col); if Error_Set /= Error_Set_False_Const then List_Line (Std_Out => True, F => Echo, Col => Col); List_Errors (Echo); end if; Error_Set := Error_Set_False_Const; Col := 0; SPARK.Ada.Text_IO.Skip_Line_File (File => F, Spacing => 1); if Sequenced then Skip_Seqno (F); end if; end Write_The_Line; procedure Set_Line_Out (I : in Err_Col_T; C : in Character) --# global in out Line_Out; --# derives Line_Out from *, --# C, --# I; is begin Line_Out (I) := C; end Set_Line_Out; procedure List_Line_Errors (F : in out SPARK.Ada.Text_IO.File_Type; Col : in Err_Col_T) --# global in Error_Set; --# in Line_Out; --# in out Err_Ptr; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Err_Ptr from *, --# Error_Set & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Col, --# Error_Set, --# Err_Ptr, --# Line_Out; is begin if Error_Set /= Error_Set_False_Const then List_Line (Std_Out => False, F => F, Col => Col); List_Errors (F); end if; end List_Line_Errors; function Get_Prod_Err return Boolean --# global in Prod_Err; is begin return Prod_Err; end Get_Prod_Err; end Sparklalr_Error; spark-2012.0.deb/sparklalr/sparklalr_char_class.adb0000644000175000017500000000561011753202335021262 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Sparklalr_Char_Class is type Charmap_Array_T is array (Character) of Char_Class; Charmap : Charmap_Array_T; procedure Initialise is Ordmaxchar : constant := 255; -- ORDINAL OF MAX CHARACTER Uppercase : constant String := "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; Lowercase : constant String := "abcdefghijklmnopqrstuvwxyz"; Special : constant String := "&:;^|(,=_ "; Nspecial : constant := 10; subtype Letters_Range is Positive range 1 .. 26; subtype Letters_T is String (Letters_Range); Letters : Letters_T; subtype Sp_Range is Positive range 1 .. Nspecial; subtype Sp_T is String (Sp_Range); Sp : Sp_T; subtype Otherchar_Range is Character range Character'Val (0) .. Character'Val (Ordmaxchar); subtype Digit_Range is Character range Character'Val (Character'Pos ('0')) .. Character'Val (Character'Pos ('9')); begin Charmap := Charmap_Array_T'(others => None); for Ch in Otherchar_Range loop Charmap (Ch) := Otherchar; end loop; for Ch in Digit_Range loop Charmap (Ch) := Digit; end loop; Letters := Uppercase; for I in Letters_Range loop Charmap (Letters (I)) := Letter; end loop; Letters := Lowercase; for I in Letters_Range loop Charmap (Letters (I)) := Letter; end loop; Sp := Special; Charmap (Sp (1)) := Ampchar; Charmap (Sp (2)) := Colonchar; Charmap (Sp (3)) := Scolonchar; Charmap (Sp (4)) := Uparrowchar; Charmap (Sp (5)) := Uparrowchar; Charmap (Sp (6)) := Lparenchar; Charmap (Sp (7)) := Commachar; Charmap (Sp (8)) := Equalchar; Charmap (Sp (9)) := Underscore; Charmap (Sp (10)) := Blankchar; end Initialise; function Get_Charmap (C : in Character) return Char_Class is begin return Charmap (C); end Get_Charmap; end Sparklalr_Char_Class; spark-2012.0.deb/sparklalr/spark.sw0000644000175000017500000000021711753202335016126 0ustar eugeneugen-sparklib -output_directory=vcg -config_file=../common/gnat.cfg -listing_extension=ls_ -casing -index_file=sparklalr.idx -report=sparklalr.rep spark-2012.0.deb/sparklalr/sparklalr_common.ads0000644000175000017500000001616711753202335020502 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; --# inherit SPARK.Ada.Text_IO, --# SPARK_Ada_Integer_Text_IO; package Sparklalr_Common is Id_Length : constant := 50; Max_Set : constant := 255; -- MAXIMUM ORDINAL IN SET Line_Length : constant := 255; State_Max : constant := 2047; -- MAXIMUM STATE NUMBER Prod_Lim : constant := 1023; -- MAXIMUM PRODUCTION NUMBER Page_Width : constant := 78; -- Maximum width of output files Term_Lim : constant := Max_Set; -- MAXIMUM TERMINAL NUMBER Non_Term_Lim : constant := 511; -- MAXIMUM NUMBER OF NON-TERMINALS Max_Sym : constant := 766; -- = TERM_LIM + NON_TERM_LIM Nt_Base : constant := Term_Lim; -- NON-TERMINAL OFFSET FOR PRDN. TABLE Sp_Symbol_Str : constant String := "SP_Symbols. "; type Action_Type is (Shift, Reduce); subtype Production_Count is Natural range 0 .. Prod_Lim; subtype Production_Index is Positive range 1 .. Prod_Lim; subtype Id_Length_Count is Natural range 0 .. Id_Length; subtype Id_Length_Range is Positive range 1 .. Id_Length; subtype Short_Int is Integer range -32767 .. 32767; subtype State_Range is Natural range 0 .. State_Max; subtype Id_Name is String (Id_Length_Range); subtype Term_Range is Natural range 0 .. Term_Lim; subtype Non_Term_Range is Positive range 1 .. Non_Term_Lim; subtype Sym_Range is Integer range -1 .. Max_Sym; procedure Put_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type; Item : in String); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives File, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Item, --# Std_Out; procedure Put_Character_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type; Item : in Character); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives File, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Item, --# Std_Out; procedure Put_Line_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type; Item : in String); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives File, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Item, --# Std_Out; procedure Put_Integer_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type; Item : in Integer; Width : in SPARK.Ada.Text_IO.Field); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives File, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Item, --# Std_Out, --# Width; procedure New_Line_File_Output (Std_Out : in Boolean; File : in out SPARK.Ada.Text_IO.File_Type); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives File, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Std_Out; -- Puts C to Fil N times. N can be zero. procedure Put_N_Chars (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; C : in Character; N : in Natural); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# C, --# N, --# Std_Out; procedure Print (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; String1 : in String; Posn : in out Integer; Tab : in Integer; Comm : in Boolean); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Comm, --# Posn, --# Std_Out, --# String1, --# Tab & --# Posn from *, --# String1, --# Tab; procedure Print2 (F : in out SPARK.Ada.Text_IO.File_Type; String1 : in String; String2 : in String; Posn : in out Integer; Tab : in Integer; Comm : in Boolean); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Comm, --# Posn, --# String1, --# String2, --# Tab & --# Posn from *, --# String1, --# String2, --# Tab; function Code (Sr : in Action_Type; C : in Integer) return Integer; function Decode (C : in Integer) return Integer; end Sparklalr_Common; spark-2012.0.deb/sparklalr/sparklalr_memory-left_corner.adb0000644000175000017500000007122111753202335022771 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with SPARK_Ada_Integer_Text_IO; with Sparklalr_Common; with Sparklalr_Memory.Dump; with Symbols_Dump; package body Sparklalr_Memory.Left_Corner --# own State is Left_Corners, --# Left_Corner_Count; is -- ---- These types are used in the generation of the SPARK parser Tables ---- type Left_Corner_Entry is record Lower, Upper : Integer; Left_Corner_Symbols : Sparklalr_Memory.Symbol_Set_T; Other_Symbol : Boolean; Base_Symbol : Sparklalr_Common.Non_Term_Range; end record; type Left_Corner_Tab is array (Sparklalr_Common.Non_Term_Range) of Left_Corner_Entry; Left_Corner_Count : Integer; Left_Corners : Left_Corner_Tab; procedure Count_Left_Corners --# global in Symbols_Dump.State; --# in out Left_Corners; --# in out Left_Corner_Count; --# derives Left_Corners, --# Left_Corner_Count from Left_Corners, --# Left_Corner_Count, --# Symbols_Dump.State; is begin for Nt in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop if Left_Corners (Nt).Base_Symbol /= Nt then Left_Corners (Nt).Lower := Left_Corners (Left_Corners (Nt).Base_Symbol).Lower; Left_Corners (Nt).Upper := Left_Corners (Left_Corners (Nt).Base_Symbol).Upper; else Left_Corners (Nt).Lower := Left_Corner_Count; for Sym in Integer range 0 .. Symbols_Dump.Get_Nterms loop if Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt).Left_Corner_Symbols, Sym) then Left_Corner_Count := Left_Corner_Count + 1; end if; end loop; for Sym in Integer range Sparklalr_Common.Nt_Base + 1 .. Sparklalr_Common.Nt_Base + Symbols_Dump.Get_Nnon_Terms loop if Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt).Left_Corner_Symbols, Sym) then Left_Corner_Count := Left_Corner_Count + 1; end if; end loop; Left_Corners (Nt).Upper := Left_Corner_Count - 1; end if; end loop; end Count_Left_Corners; procedure Gen_Left_Corner --# global in Dump.State; --# in Symbols_Dump.State; --# out Left_Corners; --# out Left_Corner_Count; --# derives Left_Corners from Dump.State, --# Symbols_Dump.State & --# Left_Corner_Count from ; is C : Sparklalr_Common.Sym_Range; S, T : Dump.Pt_Memory; Not_Closed, Element_Added : Boolean; B : Sparklalr_Memory.Symbol_Set_T; procedure Merge_Left_Corner_Groups --# global in Symbols_Dump.State; --# in out Left_Corners; --# derives Left_Corners from *, --# Symbols_Dump.State; is Sym : Sparklalr_Common.Sym_Range; The_Same : Boolean; begin if Symbols_Dump.Get_Nnon_Terms > 0 then for Nt1 in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop if Nt1 - 1 > 0 then for Nt2 in Integer range 1 .. Nt1 - 1 loop if Left_Corners (Nt2).Base_Symbol = Nt2 then The_Same := True; Sym := 0; while The_Same and then (Sym <= Symbols_Dump.Get_Nterms) loop The_Same := Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt1).Left_Corner_Symbols, Sym) = Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt2).Left_Corner_Symbols, Sym); Sym := Sym + 1; end loop; Sym := 1; while The_Same and then (Sym <= Symbols_Dump.Get_Nnon_Terms) loop The_Same := Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt1).Left_Corner_Symbols, Sym + Sparklalr_Common.Nt_Base) = Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt2).Left_Corner_Symbols, Sym + Sparklalr_Common.Nt_Base); Sym := Sym + 1; end loop; if The_Same then Left_Corners (Nt2).Other_Symbol := True; Left_Corners (Nt1).Base_Symbol := Nt2; end if; end if; end loop; end if; end loop; end if; end Merge_Left_Corner_Groups; begin Left_Corner_Count := 1; Left_Corners := Left_Corner_Tab' (others => Left_Corner_Entry'(Lower => 0, Upper => 0, Left_Corner_Symbols => Sparklalr_Memory.Symbol_Set_T'(others => False), Other_Symbol => False, Base_Symbol => 1)); for Nt in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop for Sym_Set_El in Sparklalr_Common.Sym_Range loop Left_Corners (Nt).Left_Corner_Symbols (Sym_Set_El) := False; end loop; Left_Corners (Nt).Other_Symbol := False; Left_Corners (Nt).Base_Symbol := Nt; end loop; -- Form closure of LeftCorner Not_Closed := True; while Not_Closed loop Not_Closed := False; for Nt in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop S := Dump.Get_Ntrdn (Nt); T := Dump.Get_Ntrdn (Nt + 1); while S /= T loop if Dump.Get_Contents (Dump.Get_Next (Dump.Get_Mem_Pt (S))) >= 0 then C := Dump.Get_Contents (Dump.Get_Next (Dump.Get_Mem_Pt (S))); if not Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt).Left_Corner_Symbols, C) then Left_Corners (Nt).Left_Corner_Symbols (C) := True; Not_Closed := True; end if; if C > Sparklalr_Common.Nt_Base then B := Left_Corners (C - Sparklalr_Common.Nt_Base).Left_Corner_Symbols; Sparklalr_Memory.Set_Union (Left_Corners (Nt).Left_Corner_Symbols, B, Element_Added); Not_Closed := Not_Closed or else Element_Added; end if; end if; S := Dump.Get_Next (S); end loop; end loop; end loop; Merge_Left_Corner_Groups; end Gen_Left_Corner; procedure Out_Left_Corner (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Command_Line_Options.State; --# in Dump.State; --# in Left_Corners; --# in Left_Corner_Count; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Command_Line_Options.State, --# Dump.State, --# Left_Corners, --# Left_Corner_Count, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Left_Corners, --# Symbols_Dump.State; is Pos, Index, Left_Corners_P2 : Integer; Comma_Required : Boolean; begin SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "with SP_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "use type SP_Symbols.SP_Symbol;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "--# inherit SP_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "package SP_Relations is"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " function SP_Left_Corner (Parent : SP_Symbols.SP_Symbol;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Child : SP_Symbols.SP_Symbol) return Boolean;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " function SP_Terminal_Like (Sym : SP_Symbols.SP_Symbol) return Boolean;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "end SP_Relations;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "package body SP_Relations is"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " --# hide SP_Relations;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " No_Of_Left_Corners : constant Natural := "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corner_Count - 1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Left_Corner_Range is range 1 .. No_Of_Left_Corners;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Left_Corner_Rel is array (Left_Corner_Range) of SP_Symbols.SP_Symbol;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); if Command_Line_Options.Get_Self_Pack then Left_Corners_P2 := 0; while 2 ** Left_Corners_P2 <= Left_Corner_Count - 1 loop Left_Corners_P2 := Left_Corners_P2 + 1; end loop; SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Relation_Entry is range 0 .. 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => 2 * Left_Corners_P2 + 1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-1;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Term_Like_Lim : constant Relation_Entry := 2;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Low_Lim : constant Relation_Entry := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " High_Lim : constant Relation_Entry := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Terminal_Like : constant Relation_Entry := 1;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Low : constant Relation_Entry := Terminal_Like * Term_Like_Lim;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " High : constant Relation_Entry := Low * Low_Lim;"); else SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Relation_Entry is record"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Terminal_Like : Boolean;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Low, High : Left_Corner_Range;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " end record;"); end if; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Relation_Table is array (SP_Symbols.SP_Non_Terminal) of Relation_Entry;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Left_Corner : constant Left_Corner_Rel := Left_Corner_Rel'("); Comma_Required := False; Index := 1; for Nt in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms loop if Left_Corners (Nt).Base_Symbol = Nt then if Left_Corners (Nt).Other_Symbol then for Subordinate in Integer range Nt + 1 .. Symbols_Dump.Get_Nnon_Terms loop if Left_Corners (Subordinate).Base_Symbol = Nt then if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end if; Comma_Required := False; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 3); SPARK.Ada.Text_IO.Put_File (File => F, Item => "-- "); Pos := 7; --# accept F, 10, Pos, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Subordinate), Pos, 7, False); --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end if; end loop; end if; if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end if; Comma_Required := False; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 3); SPARK.Ada.Text_IO.Put_File (File => F, Item => "-- "); Pos := 7; --# accept F, 10, Pos, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Pos, 7, False); --# end accept; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " =>"); for Sym in Integer range 0 .. Symbols_Dump.Get_Nterms loop if Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt).Left_Corner_Symbols, Sym) then if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); else Comma_Required := True; end if; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Pos := 10; --# accept F, 10, Pos, "Ineffective assignment here expected and OK"; Symbols_Dump.Print_String_Sym (F, Sparklalr_Common.Sp_Symbol_Str, Sym, Pos, 10, False); --# end accept; Index := Index + 1; end if; end loop; for Sym in Integer range Sparklalr_Common.Nt_Base + 1 .. Sparklalr_Common.Nt_Base + Symbols_Dump.Get_Nnon_Terms loop if Sparklalr_Memory.Get_Symbol_Set (Left_Corners (Nt).Left_Corner_Symbols, Sym) then if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); else Comma_Required := True; end if; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Pos := 10; --# accept F, 10, Pos, "Ineffective assignment here expected and OK"; Symbols_Dump.Print_String_Sym (F, Sparklalr_Common.Sp_Symbol_Str, Sym, Pos, 10, False); --# end accept; Index := Index + 1; end if; end loop; end if; end loop; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Rel_Tab : constant Relation_Table := Relation_Table'("); if Command_Line_Options.Get_Self_Pack then for Nt in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms - 1 loop Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 4); Pos := 4; --# accept F, 10, Pos, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Pos, 4, False); --# end accept; SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); if Dump.Get_Terminal_Like (Nt + Sparklalr_Common.Nt_Base) then SPARK.Ada.Text_IO.Put_File (File => F, Item => "Terminal_Like * Boolean'Pos (True) + "); else SPARK.Ada.Text_IO.Put_File (File => F, Item => "Terminal_Like * Boolean'Pos (False) + "); end if; SPARK.Ada.Text_IO.Put_File (File => F, Item => "Low*"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners (Nt).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + "); SPARK.Ada.Text_IO.Put_File (File => F, Item => "High*"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners (Nt).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end loop; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 4); Pos := 4; --# accept F, 10, Pos, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Symbols_Dump.Get_Nnon_Terms), Pos, 4, False); --# end accept; SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); if Dump.Get_Terminal_Like (Symbols_Dump.Get_Nnon_Terms + Sparklalr_Common.Nt_Base) then SPARK.Ada.Text_IO.Put_File (File => F, Item => "Terminal_Like * Boolean'Pos (True) + "); else SPARK.Ada.Text_IO.Put_File (File => F, Item => "Terminal_Like * Boolean'Pos (False) + "); end if; SPARK.Ada.Text_IO.Put_File (File => F, Item => "Low*"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners (Symbols_Dump.Get_Nnon_Terms).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + "); SPARK.Ada.Text_IO.Put_File (File => F, Item => "High*"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners (Symbols_Dump.Get_Nnon_Terms).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); else for Nt in Integer range 1 .. Symbols_Dump.Get_Nnon_Terms - 1 loop Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 4); Pos := 4; --# accept F, 10, Pos, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Nt), Pos, 4, False); --# end accept; SPARK.Ada.Text_IO.Put_File (File => F, Item => " => Relation_Entry'("); if Dump.Get_Terminal_Like (Nt + Sparklalr_Common.Nt_Base) then SPARK.Ada.Text_IO.Put_File (File => F, Item => "True, "); else SPARK.Ada.Text_IO.Put_File (File => F, Item => "False, "); end if; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners (Nt).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners (Nt).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "),"); end loop; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 4); Pos := 4; --# accept F, 10, Pos, "Ineffective assignment here expected and OK"; Sparklalr_Common.Print2 (F, Sparklalr_Common.Sp_Symbol_Str, Symbols_Dump.Get_Nterm_Set (Symbols_Dump.Get_Nnon_Terms), Pos, 4, False); --# end accept; SPARK.Ada.Text_IO.Put_File (File => F, Item => " => Relation_Entry'("); if Dump.Get_Terminal_Like (Symbols_Dump.Get_Nnon_Terms + Sparklalr_Common.Nt_Base) then SPARK.Ada.Text_IO.Put_File (File => F, Item => "True, "); else SPARK.Ada.Text_IO.Put_File (File => F, Item => "False, "); end if; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners (Symbols_Dump.Get_Nnon_Terms).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Left_Corners (Symbols_Dump.Get_Nnon_Terms).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "));"); end if; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " function SP_Left_Corner (Parent : SP_Symbols.SP_Symbol;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Child : SP_Symbols.SP_Symbol)"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " return Boolean is separate;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " function SP_Terminal_Like (Sym : SP_Symbols.SP_Symbol)"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " return Boolean is separate;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "end SP_Relations;"); end Out_Left_Corner; end Sparklalr_Memory.Left_Corner; spark-2012.0.deb/sparklalr/sparklalr_patab.adb0000644000175000017500000012650511753202335020256 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with SPARK_Ada_Integer_Text_IO; with Sparklalr_Common; with Sparklalr_Goto; with Sparklalr_Memory; with Sparklalr_Parser; with Symbols_Dump; package body Sparklalr_Patab --# own State is Patab, --# Pat_Seg_Count, --# State_Table; is type Patab_Elem is record Nact : Integer; Other_Act : Boolean; Check_Other_Act : Boolean; end record; type Patab_T is array (Sparklalr_Common.State_Range) of Patab_Elem; type State_Index_Pair is record Lower, Upper : Integer; end record; type State_Index is array (Sparklalr_Common.State_Range) of State_Index_Pair; Patab : Patab_T; State_Table : State_Index; Pat_Seg_Count : Integer; procedure Initialise --# global out Patab; --# out Pat_Seg_Count; --# out State_Table; --# derives Patab, --# Pat_Seg_Count, --# State_Table from ; is begin Patab := Patab_T'(others => Patab_Elem'(Nact => 0, Other_Act => False, Check_Other_Act => False)); State_Table := State_Index'(others => State_Index_Pair'(Lower => 0, Upper => 0)); Pat_Seg_Count := 0; for I in Sparklalr_Common.State_Range loop Patab (I).Nact := I; end loop; end Initialise; procedure Pa_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Memory.Prod_Sum; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out Patab; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Parser.State; --# in out State_Table; --# derives F from *, --# Patab, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State_Table, --# Symbols_Dump.State & --# Patab, --# Sparklalr_Parser.State from *, --# Patab, --# Sparklalr_Memory.Stat_No & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Patab, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# State_Table from *, --# Patab, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State; is procedure Aux_Pat_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Memory.Prod_Sum; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out Patab; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Parser.State; --# in out State_Table; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Patab, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# Patab, --# Sparklalr_Parser.State from *, --# Patab, --# Sparklalr_Memory.Stat_No & --# State_Table from *, --# Patab, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State; is J : Integer; Other_Acts_There : Boolean; Curr_Pat_Index : Integer; begin Curr_Pat_Index := 1; Other_Acts_There := False; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Parse_Action_Table : constant Aux_PAT := Aux_PAT'("); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-- STATE INDEX"); for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop if not Patab (I).Other_Act then if I > 1 then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end if; State_Table (I).Lower := Curr_Pat_Index; SPARK.Ada.Text_IO.Put_File (File => F, Item => "----- "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => 1, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Pat_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Sparklalr_Parser.Action_Gen_Pa_Out (F, I, Curr_Pat_Index); State_Table (I).Upper := Curr_Pat_Index - 1; Other_Acts_There := True; end if; end loop; for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop if Patab (I).Check_Other_Act then if Other_Acts_There then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); else Other_Acts_There := True; end if; J := I; SPARK.Ada.Text_IO.Put_File (File => F, Item => "----- "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => J, Width => 1, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); loop J := Patab (J).Nact; Patab (J).Check_Other_Act := False; SPARK.Ada.Text_IO.Put_File (File => F, Item => "----- "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => J, Width => 1, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); exit when Patab (J).Nact = J; end loop; end if; if Patab (I).Other_Act then J := I; State_Table (I).Lower := Curr_Pat_Index; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Pat_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Sparklalr_Parser.Action_Gen_Pa_Out (F, J, Curr_Pat_Index); State_Table (I).Upper := Curr_Pat_Index - 1; loop J := Patab (J).Nact; Patab (J).Other_Act := False; State_Table (J) := State_Table (I); exit when Patab (J).Nact = J; end loop; end if; end loop; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); end Aux_Pat_Out; procedure Main_Pat_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Memory.Stat_No; --# in State_Table; --# derives F from *, --# Sparklalr_Memory.Stat_No, --# State_Table; is begin SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " State_Table : constant Main_PAT := Main_PAT'("); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-- STATE"); for State_Var in Integer range 1 .. Sparklalr_Memory.Get_Stat_No - 1 loop SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Var, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => PAT_Index_Pair'("); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Table (State_Var).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Table (State_Var).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "),"); end loop; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Memory.Get_Stat_No, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => PAT_Index_Pair'("); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Table (Sparklalr_Memory.Get_Stat_No).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Table (Sparklalr_Memory.Get_Stat_No).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "));"); end Main_Pat_Out; begin -- Pa_Out SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Default : constant SP_Symbols.SP_Terminal := SP_Symbols.SPDEFAULT;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " No_Of_PAT_Entries : constant Positive := "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Parser.Get_Pat_Count, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type PAT_Index is range 1 .. No_Of_PAT_Entries;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Sym_Action_Pair is record"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Term_Sym : SP_Symbols.SP_Terminal;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Parse_Action : SP_Parse_Act;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " end record;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " pragma PACK(Sym_Action_Pair);"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Aux_PAT is array (PAT_Index) of Sym_Action_Pair;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type PAT_Index_Pair is record"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Lower, Upper : PAT_Index;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " end record;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " pragma PACK(PAT_Index_Pair);"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Main_PAT is array (SP_Productions.Valid_States) of PAT_Index_Pair;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Aux_Pat_Out (F); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Main_Pat_Out (F); end Pa_Out; procedure Pa_Out_Sp (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Memory.Max_Right; --# in Sparklalr_Memory.Prod_Sum; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out Patab; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Parser.State; --# in out State_Table; --# derives F from *, --# Patab, --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State_Table, --# Symbols_Dump.State & --# Patab, --# Sparklalr_Parser.State from *, --# Patab, --# Sparklalr_Memory.Stat_No & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Patab, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# State_Table from *, --# Patab, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State; is Max_Right_Power2, Packed_Symact_Max, Terminal_Count_P2, Non_Terminal_Count_P2, State_Count_P2, Prod_Count_P2, Pat_Index_P2 : Integer; procedure Aux_Pat_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Memory.Prod_Sum; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out Patab; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Parser.State; --# in out State_Table; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Patab, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# Patab, --# Sparklalr_Parser.State from *, --# Patab, --# Sparklalr_Memory.Stat_No & --# State_Table from *, --# Patab, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State; is J : Integer; Other_Acts_There : Boolean; Curr_Pat_Index : Integer; begin Curr_Pat_Index := 1; Other_Acts_There := False; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Parse_Action_Table : constant Aux_PAT := Aux_PAT'("); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-- STATE INDEX"); for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop if not Patab (I).Other_Act then if I > 1 then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end if; State_Table (I).Lower := Curr_Pat_Index; SPARK.Ada.Text_IO.Put_File (File => F, Item => "----- "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => 1, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Pat_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Sparklalr_Parser.Action_Gen_Pa_Out_Sp (F, I, Curr_Pat_Index); State_Table (I).Upper := Curr_Pat_Index - 1; Other_Acts_There := True; end if; end loop; for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop if Patab (I).Check_Other_Act then if Other_Acts_There then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); else Other_Acts_There := True; end if; J := I; SPARK.Ada.Text_IO.Put_File (File => F, Item => "----- "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => J, Width => 1, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); loop J := Patab (J).Nact; Patab (J).Check_Other_Act := False; SPARK.Ada.Text_IO.Put_File (File => F, Item => "----- "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => J, Width => 1, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); exit when Patab (J).Nact = J; end loop; end if; if Patab (I).Other_Act then J := I; State_Table (I).Lower := Curr_Pat_Index; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Curr_Pat_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => "); Sparklalr_Parser.Action_Gen_Pa_Out_Sp (F, J, Curr_Pat_Index); State_Table (I).Upper := Curr_Pat_Index - 1; loop J := Patab (J).Nact; Patab (J).Other_Act := False; State_Table (J) := State_Table (I); exit when Patab (J).Nact = J; end loop; end if; end loop; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); end Aux_Pat_Out; procedure Main_Pat_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Sparklalr_Memory.Stat_No; --# in State_Table; --# derives F from *, --# Sparklalr_Memory.Stat_No, --# State_Table; is begin SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " State_Table : constant Main_PAT := Main_PAT'("); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-- STATE"); for State_Var in Integer range 1 .. Sparklalr_Memory.Get_Stat_No - 1 loop SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Var, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => Lower_PAT_Index * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Table (State_Var).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + Upper_PAT_Index * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Table (State_Var).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end loop; SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Memory.Get_Stat_No, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => Lower_PAT_Index * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Table (Sparklalr_Memory.Get_Stat_No).Lower, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + Upper_PAT_Index * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Table (Sparklalr_Memory.Get_Stat_No).Upper, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); end Main_Pat_Out; begin -- Pa_Out_Sp Terminal_Count_P2 := 0; while 2 ** Terminal_Count_P2 <= Symbols_Dump.Get_Nterms loop Terminal_Count_P2 := Terminal_Count_P2 + 1; end loop; Non_Terminal_Count_P2 := 0; while 2 ** Non_Terminal_Count_P2 <= Symbols_Dump.Get_Nnon_Terms loop Non_Terminal_Count_P2 := Non_Terminal_Count_P2 + 1; end loop; State_Count_P2 := 0; while 2 ** State_Count_P2 <= Sparklalr_Memory.Get_Stat_No loop State_Count_P2 := State_Count_P2 + 1; end loop; Prod_Count_P2 := 0; Max_Right_Power2 := 0; while 2 ** Max_Right_Power2 <= Sparklalr_Memory.Get_Max_Right loop Max_Right_Power2 := Max_Right_Power2 + 1; end loop; if ((2 + Terminal_Count_P2) + State_Count_P2) > ((((2 + Terminal_Count_P2) + Non_Terminal_Count_P2) + Max_Right_Power2) + Prod_Count_P2) then Packed_Symact_Max := (2 + Terminal_Count_P2) + State_Count_P2; else Packed_Symact_Max := (((2 + Terminal_Count_P2) + Non_Terminal_Count_P2) + Max_Right_Power2) + Prod_Count_P2; end if; Pat_Index_P2 := 0; while 2 ** Pat_Index_P2 <= Sparklalr_Parser.Get_Pat_Count loop Pat_Index_P2 := Pat_Index_P2 + 1; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " No_Of_PAT_Entries : constant Positive := "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Parser.Get_Pat_Count, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type PAT_Index is range 1 .. No_Of_PAT_Entries;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Packed_Sym_Action_Pair is range 0 .. 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Packed_Symact_Max, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-1;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Act_Lim : constant Packed_Sym_Action_Pair := 2**2;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Term_Sym_Lim : constant Packed_Sym_Action_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Terminal_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " State_Lim : constant Packed_Sym_Action_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Symbol_Lim : constant Packed_Sym_Action_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Non_Terminal_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Red_By_Lim : constant Packed_Sym_Action_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Max_Right_Power2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Prod_No_Lim : constant Packed_Sym_Action_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Prod_Count_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " pragma Unreferenced (Prod_No_Lim);"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Term_Sym : constant Packed_Sym_Action_Pair := 1;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Act : constant Packed_Sym_Action_Pair := Term_Sym * Term_Sym_Lim;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " State : constant Packed_Sym_Action_Pair := Act * Act_Lim;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Symbol : constant Packed_Sym_Action_Pair := Act * Act_Lim;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Red_By : constant Packed_Sym_Action_Pair := Symbol * Symbol_Lim;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Prod_No : constant Packed_Sym_Action_Pair := 0;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " First_Non_Terminal : constant Packed_Sym_Action_Pair :="); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SP_Symbols.SP_Non_Terminal'Pos (SP_Symbols.SP_Non_Terminal'First);"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Aux_PAT is array (PAT_Index) of Packed_Sym_Action_Pair;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Default : constant Packed_Sym_Action_Pair :="); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SP_Symbols.SP_Terminal'Pos (SP_Symbols.SPDEFAULT);"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Error_Act : constant Packed_Sym_Action_Pair :="); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Act * SP_Action_Kind'Pos (Error);"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " pragma Unreferenced (Error_Act);"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Shift_Act : constant Packed_Sym_Action_Pair :="); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Act * SP_Action_Kind'Pos (Shift);"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Reduce_Act : constant Packed_Sym_Action_Pair :="); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Act * SP_Action_Kind'Pos (Reduce);"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Accept_Act : constant Packed_Sym_Action_Pair :="); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Act * SP_Action_Kind'Pos (Accpt);"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Packed_PAT_Index_Pair is range 0 .. 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => 2 * Pat_Index_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-1;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " PAT_Index_Size : constant Packed_PAT_Index_Pair := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Pat_Index_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Lower_PAT_Index : constant Packed_PAT_Index_Pair := 1;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Upper_PAT_Index : constant Packed_PAT_Index_Pair := PAT_Index_Size;"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Main_PAT is array (SP_Productions.Valid_States)"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " of Packed_PAT_Index_Pair;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Aux_Pat_Out (F); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Main_Pat_Out (F); end Pa_Out_Sp; -- These procedures calculate the SPARK parser table sizes prior to output procedure Calc_Table_Sizes --# global in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out Patab; --# in out Sparklalr_Goto.State; --# in out Sparklalr_Parser.State; --# out Pat_Seg_Count; --# derives Patab, --# Sparklalr_Parser.State from *, --# Patab, --# Sparklalr_Memory.Stat_No & --# Pat_Seg_Count from Patab, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State & --# Sparklalr_Goto.State from *, --# Symbols_Dump.State; is J, Seg_Count : Integer; begin Pat_Seg_Count := 0; Sparklalr_Parser.Init_Pat_Count; for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop Seg_Count := Sparklalr_Parser.Get_Pat_Count; if not Patab (I).Check_Other_Act then Sparklalr_Parser.Action_Gen (I); end if; if Sparklalr_Parser.Get_Pat_Count - Seg_Count > Pat_Seg_Count then Pat_Seg_Count := Sparklalr_Parser.Get_Pat_Count - Seg_Count; end if; end loop; for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop Seg_Count := Sparklalr_Parser.Get_Pat_Count; if Patab (I).Check_Other_Act then J := I; Sparklalr_Parser.Action_Gen (J); loop J := Patab (J).Nact; Patab (J).Check_Other_Act := False; exit when Patab (J).Nact = J; end loop; end if; if Sparklalr_Parser.Get_Pat_Count - Seg_Count > Pat_Seg_Count then Pat_Seg_Count := Sparklalr_Parser.Get_Pat_Count - Seg_Count; end if; end loop; for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop Patab (I).Check_Other_Act := Patab (I).Other_Act; end loop; for I in Integer range 2 .. Symbols_Dump.Get_Nnon_Terms loop Sparklalr_Goto.Go_Out (I); end loop; end Calc_Table_Sizes; procedure Optimise -- SET UP THE PATAB DATA STRUCTURE -- SO THAT OPTIMISATION OF THE PASCAL PARSING ACTION FUNCTION IS -- CARRIED OUT --# global in Command_Line_Options.State; --# in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in out Patab; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Patab from *, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Patab, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State; is I : Integer; begin for Istart in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop I := Istart; if not Patab (I).Other_Act then for J in Integer range I + 1 .. Sparklalr_Memory.Get_Stat_No loop if Patab (J).Nact = J then if Sparklalr_Parser.Action_Equal (Sparklalr_Parser.Get_Pa_List (I), Sparklalr_Parser.Get_Pa_List (J)) then --# accept F, 41, "Stable expression here expected and OK"; if Command_Line_Options.Get_Debug_Level (6) then SPARK.Ada.Text_IO.Put_Output (Item => " MERGING ACTIONS "); SPARK_Ada_Integer_Text_IO.Put_Output (Item => I, Width => 3, Base => 10); SPARK.Ada.Text_IO.Put_Output (Item => " AND "); SPARK_Ada_Integer_Text_IO.Put_Output (Item => J, Width => 3, Base => 10); SPARK.Ada.Text_IO.New_Line_Output (Spacing => 1); end if; --# end accept; Patab (I).Other_Act := True; Patab (I).Check_Other_Act := True; Patab (I).Nact := J; Patab (J).Other_Act := True; Patab (J).Check_Other_Act := True; I := J; end if; end if; end loop; end if; end loop; end Optimise; procedure Opt_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type) --# global in Patab; --# in Sparklalr_Memory.Stat_No; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Patab, --# Sparklalr_Memory.Stat_No, --# Std_Out; is J : Integer; begin J := 0; for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop if Patab (I).Other_Act then J := J + 1; end if; end loop; Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => J, Width => 6); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " PARSING ACTIONS OPTIMISED BY CASE LABEL MERGE"); end Opt_Stats; function Get_Pat_Seg_Count return Integer --# global in Pat_Seg_Count; is begin return Pat_Seg_Count; end Get_Pat_Seg_Count; end Sparklalr_Patab; spark-2012.0.deb/sparklalr/sparklalr_input.ads0000644000175000017500000002423711753202335020346 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; with Sparklalr_Common; with Sparklalr_Error; with Sparklalr_Symbol; use type Sparklalr_Symbol.Symbol; --# inherit Command_Line_Options, --# Fatal, --# SPARK.Ada.Command_Line, --# SPARK.Ada.Strings.Unbounded, --# SPARK.Ada.Text_IO, --# SPARK.Ada.Text_IO.Unbounded_String, --# Sparklalr_Char_Class, --# Sparklalr_Common, --# Sparklalr_Error, --# Sparklalr_Level, --# Sparklalr_Memory, --# Sparklalr_Symbol, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; package Sparklalr_Input --# own State; is type Symbol_Set_Type is private; Symbol_Set_False_Const : constant Symbol_Set_Type; procedure Initialise (Signpost : out Symbol_Set_Type); --# global in Command_Line_Options.State; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# out Sparklalr_Char_Class.Charmap; --# out Sparklalr_Error.State; --# out Sparklalr_Level.State; --# out State; --# derives Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Level.State from & --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# Command_Line_Options.State & --# Sparklalr_Error.State, --# State from Command_Line_Options.State; procedure Finalize (F : in out SPARK.Ada.Text_IO.File_Type); --# global in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State from *, --# State; function Signpost_To_Symbol_Set_Type (Signpost : in Symbol_Set_Type) return Symbol_Set_Type; procedure Scan (F : in out SPARK.Ada.Text_IO.File_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : out Sparklalr_Symbol.Symbol; Token : out Sparklalr_Common.Id_Name); --# global in Sparklalr_Char_Class.Charmap; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out State; --# derives Col, --# Sparklalr_Error.State, --# State, --# Symb, --# Token from Col, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# State & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Col, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# State; procedure Skipto (F : in out SPARK.Ada.Text_IO.File_Type; Arg_Set : in Symbol_Set_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : in out Sparklalr_Symbol.Symbol); --# global in Sparklalr_Char_Class.Charmap; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out State; --# derives Col, --# F, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State, --# State, --# Symb from *, --# Arg_Set, --# Col, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# State, --# Symb; procedure Copy_Action (F, Echo : in out SPARK.Ada.Text_IO.File_Type; Signpost : in Symbol_Set_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : in out Sparklalr_Symbol.Symbol); --# global in Sparklalr_Char_Class.Charmap; --# in Sparklalr_Memory.Prod_No; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out State; --# derives Col, --# Echo, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State, --# State, --# Symb from *, --# Col, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# State, --# Symb & --# F, --# Sparklalr_Level.State from *, --# Col, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Memory.Prod_No, --# State, --# Symb; procedure Skip_Action (F : in out SPARK.Ada.Text_IO.File_Type; Signpost : in Symbol_Set_Type; Col : in out Sparklalr_Error.Err_Col_T; Symb : in out Sparklalr_Symbol.Symbol); --# global in Sparklalr_Char_Class.Charmap; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out State; --# derives Col, --# F, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State, --# State, --# Symb from *, --# Col, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# State, --# Symb; procedure Proc_Term (F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : out Boolean; Col : in out Sparklalr_Error.Err_Col_T; Signpost : in Symbol_Set_Type; Symb : out Sparklalr_Symbol.Symbol); --# global in Sparklalr_Char_Class.Charmap; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out State; --# in out Symbols_Dump.State; --# derives Col, --# F, --# SPARK.Ada.Text_IO.The_Standard_Output, --# Sparklalr_Error.State, --# Sparklalr_Level.State, --# State, --# Symbols_Dump.State from *, --# Col, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# State, --# Symbols_Dump.State & --# Gram_Rules from & --# Symb from Col, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# State, --# Symbols_Dump.State; -- Setter procedure Set_Symbol_Set (Symbol_Set : in out Symbol_Set_Type; Symb : in Sparklalr_Symbol.Symbol; Value : in Boolean); --# derives Symbol_Set from *, --# Symb, --# Value; -- Getter function Get_Symbol_Set (Symbol_Set : in Symbol_Set_Type; Symb : in Sparklalr_Symbol.Symbol) return Boolean; private type Symbol_Set_Type is array (Sparklalr_Symbol.Symbol) of Boolean; Symbol_Set_False_Const : constant Symbol_Set_Type := Symbol_Set_Type'(others => False); end Sparklalr_Input; spark-2012.0.deb/sparklalr/sparklalr_goto.ads0000644000175000017500000001370411753202335020154 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; with Sparklalr_Common; --# inherit Command_Line_Options, --# SPARK.Ada.Text_IO, --# Sparklalr_Common, --# Sparklalr_Error, --# Sparklalr_Memory, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; package Sparklalr_Goto --# own State; is type Next_T is private; Next_False_Const : constant Next_T; procedure Initialise; --# global out State; --# derives State from ; function Goto_Search (State_Index, Non_Term_Index : in Integer) return Integer; --# global in State; procedure Go_Out (I : in Integer); --# global in out State; --# derives State from *, --# I; procedure Dump_Goto (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Memory.Stat_No; --# in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Stat_No, --# State, --# Symbols_Dump.State; procedure Goto_Gen (F : in out SPARK.Ada.Text_IO.File_Type; Insertion, State_Index, Non_Term_Index : in Integer; Call_Pa_Insert : out Boolean); --# global in Command_Line_Options.State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out State; --# derives Call_Pa_Insert from Non_Term_Index & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Insertion, --# Non_Term_Index, --# State, --# State_Index, --# Symbols_Dump.State & --# Sparklalr_Error.State, --# State from *, --# Insertion, --# Non_Term_Index, --# State, --# State_Index; procedure Goto_Out (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output, --# State from *, --# State, --# Symbols_Dump.State; procedure Goto_Out_Sp (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives F from *, --# Sparklalr_Memory.Stat_No, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output, --# State from *, --# State, --# Symbols_Dump.State; procedure Goto_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type); --# global in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# State, --# Std_Out; procedure Init_Goto_List (I : in Sparklalr_Common.Non_Term_Range); --# global in out State; --# derives State from *, --# I; function Get_Goto_List_Count return Integer; --# global in State; function Get_Next (Next : in Next_T; I : in Sparklalr_Common.Term_Range) return Boolean; procedure Set_Next (Next : in out Next_T; I : in Sparklalr_Common.Term_Range; Value : in Boolean); --# derives Next from *, --# I, --# Value; private type Next_T is array (Sparklalr_Common.Term_Range) of Boolean; Next_False_Const : constant Next_T := Next_T'(others => False); end Sparklalr_Goto; spark-2012.0.deb/sparklalr/sparklalr_symbol.ads0000644000175000017500000000332411753202335020506 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package Sparklalr_Symbol is type Symbol is ( Nullsymb, Lit, Ident, Number, Equals, Uparrow, Scolon, Colon, Comma, Errsym, Ampersand, Ampmark, Left, Right, Non, Prec, Ampterm, Ampgram, Ampprog, Amplabel, Ampconst, Amptype, Ampvar, Ampfunc, Ampmain, Endfile); end Sparklalr_Symbol; spark-2012.0.deb/sparklalr/sparklalr_patab.ads0000644000175000017500000001404411753202335020271 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; --# inherit Command_Line_Options, --# SPARK.Ada.Text_IO, --# Sparklalr_Common, --# Sparklalr_Goto, --# Sparklalr_Memory, --# Sparklalr_Parser, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; package Sparklalr_Patab --# own State; is procedure Initialise; --# global out State; --# derives State from ; procedure Pa_Out (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Memory.Prod_Sum; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Parser.State; --# in out State; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State, --# Symbols_Dump.State & --# Sparklalr_Parser.State, --# State from Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State; procedure Pa_Out_Sp (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Memory.Max_Right; --# in Sparklalr_Memory.Prod_Sum; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Parser.State; --# in out State; --# derives F from *, --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State, --# Symbols_Dump.State & --# Sparklalr_Parser.State, --# State from Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State; procedure Calc_Table_Sizes; --# global in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out Sparklalr_Goto.State; --# in out Sparklalr_Parser.State; --# in out State; --# derives Sparklalr_Goto.State from *, --# Symbols_Dump.State & --# Sparklalr_Parser.State, --# State from Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State; procedure Optimise; --# global in Command_Line_Options.State; --# in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State & --# State from *, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State; procedure Opt_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Memory.Stat_No; --# in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Stat_No, --# State, --# Std_Out; -- Getter function Get_Pat_Seg_Count return Integer; --# global in State; end Sparklalr_Patab; spark-2012.0.deb/sparklalr/symbols_dump.ads0000644000175000017500000001637611753202335017656 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; with Sparklalr_Common; with Sparklalr_Error; --# inherit Command_Line_Options, --# SPARK.Ada.Command_Line, --# SPARK.Ada.Strings.Unbounded, --# SPARK.Ada.Text_IO, --# SPARK.Ada.Text_IO.Unbounded_String, --# Sparklalr_Common, --# Sparklalr_Error, --# Sparklalr_Level, --# SPARK_Ada_Integer_Text_IO; package Symbols_Dump --# own State; is procedure Initialise; --# global out State; --# derives State from ; procedure Define (Tnt : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : in Boolean; Token : in Sparklalr_Common.Id_Name; Col : in Sparklalr_Error.Err_Col_T; Result_Define : out Integer); --# global in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out State; --# derives F from *, --# State, --# Tnt & --# Result_Define from State, --# Tnt & --# Sparklalr_Error.State from *, --# Col, --# State, --# Tnt & --# Sparklalr_Level.State from *, --# Gram_Rules, --# State, --# Tnt & --# State from *, --# Tnt, --# Token; procedure Print_Sym (F : in out SPARK.Ada.Text_IO.File_Type; Sym : in Sparklalr_Common.Sym_Range; Posn : in out Integer; Tab : in Integer; Comm : in Boolean); --# global in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Comm, --# Posn, --# State, --# Sym, --# Tab & --# Posn from *, --# State, --# Sym, --# Tab; procedure Print_String_Sym (F : in out SPARK.Ada.Text_IO.File_Type; String_Var : in Sparklalr_Common.Id_Name; Sym : in Sparklalr_Common.Sym_Range; Posn : in out Integer; Tab : in Integer; Comm : in Boolean); --# global in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Comm, --# Posn, --# State, --# String_Var, --# Sym, --# Tab & --# Posn from *, --# State, --# String_Var, --# Sym, --# Tab; procedure Symbol_Strings_Out; --# global in Command_Line_Options.State; --# in State; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# Command_Line_Options.State, --# State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# State; procedure Find (Tnt : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : in Boolean; Token : in Sparklalr_Common.Id_Name; Col : in Sparklalr_Error.Err_Col_T; Result_Find : out Integer); --# global in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out State; --# derives F from *, --# State, --# Tnt, --# Token & --# Result_Find, --# State from State, --# Tnt, --# Token & --# Sparklalr_Error.State from *, --# Col, --# State, --# Tnt, --# Token & --# Sparklalr_Level.State from *, --# Gram_Rules, --# State, --# Tnt, --# Token; procedure Symbols_Package_Out (F : in out SPARK.Ada.Text_IO.File_Type); --# global in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# State; -- Getter function Get_Nterms return Integer; --# global in State; function Get_Nnon_Terms return Integer; --# global in State; function Get_Term_Set (I : in Sparklalr_Common.Term_Range) return Sparklalr_Common.Id_Name; --# global in State; function Get_Nterm_Set (I : in Sparklalr_Common.Non_Term_Range) return Sparklalr_Common.Id_Name; --# global in State; end Symbols_Dump; spark-2012.0.deb/sparklalr/ees_sym.adb0000644000175000017500000010105111753202335016545 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with SPARK_Ada_Integer_Text_IO; with Sparklalr_Common; with Sparklalr_Memory; with Sparklalr_Memory.Dump; with Sparklalr_Parser; with Symbols_Dump; package body Ees_Sym --# own State is Ees_Max, --# Essentials, --# Exp_Symbol_Count; is type Ess_Sym_Set is array (Sparklalr_Common.Sym_Range) of Boolean; type Ees_T is record Start_Index, T_Like_Count, Nt_Count : Integer; Entry_Symbol : Sparklalr_Common.Sym_Range; Expected_Ess_Symbols : Ess_Sym_Set; Other_State : Boolean; Base_State : Sparklalr_Common.State_Range; end record; type Ess_Sym_Tab is array (Sparklalr_Common.State_Range) of Ees_T; Essentials : Ess_Sym_Tab; Ees_Max, Exp_Symbol_Count : Integer; -- Local procedures/functions procedure Non_Empty_Expected (Ees_Set : in Ess_Sym_Set; Result : out Boolean) --# derives Result from Ees_Set; is C : Integer; Searching : Boolean; begin Searching := True; C := -1; while Searching and then (C <= Sparklalr_Common.Max_Sym) loop Searching := not Ees_Set (C); C := C + 1; end loop; Result := not Searching; end Non_Empty_Expected; -- End local procedures/functions procedure Gen_Essentials --# global in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in Symbols_Dump.State; --# in out Sparklalr_Memory.Dump.State; --# out Ees_Max; --# out Essentials; --# out Exp_Symbol_Count; --# derives Ees_Max, --# Essentials from Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# Exp_Symbol_Count from & --# Sparklalr_Memory.Dump.State from *, --# Sparklalr_Memory.Stat_No, --# Symbols_Dump.State; is Count, Ees_Start_Index : Sparklalr_Common.State_Range; S, T : Sparklalr_Memory.Dump.Pt_Memory; Temp_Ees : Ees_T; Insert_State : Sparklalr_Common.State_Range; procedure Gen_Ees (Current_State : in Sparklalr_Common.State_Range; S, T : in Sparklalr_Memory.Dump.Pt_Memory; Ees_Ent : in out Ees_T) --# global in Sparklalr_Parser.State; --# in Symbols_Dump.State; --# in out Sparklalr_Memory.Dump.State; --# derives Ees_Ent from *, --# Current_State, --# S, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Parser.State, --# Symbols_Dump.State, --# T & --# Sparklalr_Memory.Dump.State from *, --# Symbols_Dump.State; is P, Q : Sparklalr_Memory.Dump.Pt_Memory; C : Integer; Dot_Out : Boolean; Entry_Sym_Done : Boolean; S_Tmp : Sparklalr_Memory.Dump.Pt_Memory; begin S_Tmp := S; Sparklalr_Memory.Dump.Gen_Terminal_Like; Ees_Ent.T_Like_Count := 0; Ees_Ent.Nt_Count := 0; Ees_Ent.Expected_Ess_Symbols := Ess_Sym_Set'(others => False); Entry_Sym_Done := False; while (S_Tmp /= T) and then (S_Tmp /= Sparklalr_Memory.Dump.Null_Pt_Memory) loop Q := Sparklalr_Memory.Dump.Get_Dot (Sparklalr_Memory.Dump.Get_Item (S_Tmp)); P := Sparklalr_Memory.Dump.Prodstart (Q); if (Sparklalr_Memory.Dump.Get_Next (P) /= Q) or else (Sparklalr_Memory.Dump.Get_Contents (Sparklalr_Memory.Dump.Get_Next (P)) = Sparklalr_Common.Nt_Base + 2) then Dot_Out := False; while (Sparklalr_Memory.Dump.Get_Contents (P) > 0) and then (not Dot_Out) loop if Sparklalr_Memory.Dump.Get_Next (P) = Q then Dot_Out := True; C := Sparklalr_Memory.Dump.Get_Contents (P); if not Entry_Sym_Done then Entry_Sym_Done := True; Ees_Ent.Entry_Symbol := C; end if; end if; P := Sparklalr_Memory.Dump.Get_Next (P); end loop; if Dot_Out and then (not Sparklalr_Parser.Get_Reduce_State (Current_State)) then C := Sparklalr_Memory.Dump.Get_Contents (Q); if C > 0 then if not Sparklalr_Memory.Dump.Get_Terminal_Like (C) and then not Ees_Ent.Expected_Ess_Symbols (C) then Ees_Ent.Expected_Ess_Symbols (C) := True; Ees_Ent.Nt_Count := Ees_Ent.Nt_Count + 1; else if Sparklalr_Memory.Dump.Get_Terminal_Like (C) and then not Ees_Ent.Expected_Ess_Symbols (C) then Ees_Ent.Expected_Ess_Symbols (C) := True; Ees_Ent.T_Like_Count := Ees_Ent.T_Like_Count + 1; end if; end if; end if; end if; end if; S_Tmp := Sparklalr_Memory.Dump.Get_Next (S_Tmp); end loop; end Gen_Ees; procedure Insert_Ees (Curr_State : in Sparklalr_Common.State_Range; In_Ees : in Ees_T; Insert_State : out Sparklalr_Common.State_Range) --# global in out Essentials; --# derives Essentials, --# Insert_State from Curr_State, --# Essentials, --# In_Ees; is S, C : Integer; Searching, Equal_Ees : Boolean; begin Searching := True; Insert_State := 0; S := 1; while Searching and then (S < Curr_State) loop if (In_Ees.T_Like_Count = Essentials (S).T_Like_Count) and then (In_Ees.Nt_Count = Essentials (S).Nt_Count) then C := -1; Equal_Ees := True; while Equal_Ees and then (C <= Sparklalr_Common.Max_Sym) loop if In_Ees.Expected_Ess_Symbols (C) /= Essentials (S).Expected_Ess_Symbols (C) then Equal_Ees := False; else C := C + 1; end if; end loop; if Equal_Ees then Essentials (S).Other_State := True; Insert_State := S; Searching := False; else S := S + 1; end if; else S := S + 1; end if; end loop; end Insert_Ees; begin Essentials := Ess_Sym_Tab' (others => Ees_T'(Start_Index => 0, T_Like_Count => 0, Nt_Count => 0, Entry_Symbol => -1, Expected_Ess_Symbols => Ess_Sym_Set'(others => False), Other_State => False, Base_State => 0)); Exp_Symbol_Count := 0; Ees_Max := 0; Ees_Start_Index := 1; Temp_Ees := Ees_T' (Start_Index => 0, T_Like_Count => 0, Nt_Count => 0, Entry_Symbol => -1, Expected_Ess_Symbols => Ess_Sym_Set'(others => False), Other_State => False, Base_State => 0); for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop S := Sparklalr_Memory.Dump.Get_State (I); T := Sparklalr_Memory.Dump.Get_State (I + 1); Gen_Ees (I, S, T, Temp_Ees); Insert_Ees (I, Temp_Ees, Insert_State); if Insert_State = 0 then Temp_Ees.Start_Index := Ees_Start_Index; Temp_Ees.Base_State := 0; Count := Temp_Ees.T_Like_Count + Temp_Ees.Nt_Count; Ees_Start_Index := Ees_Start_Index + Count; if Count > Ees_Max then Ees_Max := Count; end if; else Temp_Ees.Start_Index := Essentials (Insert_State).Start_Index; Temp_Ees.Base_State := Insert_State; end if; Temp_Ees.Other_State := False; Essentials (I) := Temp_Ees; end loop; end Gen_Essentials; procedure Out_Essentials --# global in Essentials; --# in Sparklalr_Memory.Stat_No; --# in out Exp_Symbol_Count; --# derives Exp_Symbol_Count from *, --# Essentials, --# Sparklalr_Memory.Stat_No; is Result_Non_Empty_Expected : Boolean; procedure Out_Ees (Ees_Set : in Ess_Sym_Set) --# global in out Exp_Symbol_Count; --# derives Exp_Symbol_Count from *, --# Ees_Set; is begin for C in Sparklalr_Common.Sym_Range loop if C <= Sparklalr_Common.Nt_Base then if Ees_Set (C) then Exp_Symbol_Count := Exp_Symbol_Count + 1; end if; end if; end loop; for C in Sparklalr_Common.Sym_Range loop if C > Sparklalr_Common.Nt_Base then if Ees_Set (C) then Exp_Symbol_Count := Exp_Symbol_Count + 1; end if; end if; end loop; end Out_Ees; begin for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop Non_Empty_Expected (Essentials (I).Expected_Ess_Symbols, Result_Non_Empty_Expected); if (Essentials (I).Base_State = 0) and then Result_Non_Empty_Expected then Out_Ees (Essentials (I).Expected_Ess_Symbols); end if; end loop; end Out_Essentials; procedure Sp_Exp_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Command_Line_Options.State; --# in Ees_Max; --# in Essentials; --# in Exp_Symbol_Count; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Command_Line_Options.State, --# Ees_Max, --# Essentials, --# Exp_Symbol_Count, --# Sparklalr_Memory.Stat_No, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Essentials, --# Sparklalr_Memory.Stat_No, --# Symbols_Dump.State; is Comma_Required : Boolean; No_Exp_Sym_P2, Ees_Max_P2 : Integer; Result_Non_Empty_Expected : Boolean; procedure Out_Ees (F : in out SPARK.Ada.Text_IO.File_Type; Ees_Set : in Ess_Sym_Set; ST : in Sparklalr_Common.State_Range) --# global in Essentials; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Ees_Set, --# Essentials, --# ST, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Ees_Set, --# Symbols_Dump.State; is Index : Integer; Posn : Integer; Comma_Required : Boolean; begin Comma_Required := False; Posn := 10; Index := Essentials (ST).Start_Index; for C in Sparklalr_Common.Sym_Range loop if C <= Sparklalr_Common.Nt_Base then if Ees_Set (C) then if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " =>"); else Comma_Required := True; end if; Symbols_Dump.Print_String_Sym (F, Sparklalr_Common.Sp_Symbol_Str, C, Posn, 10, False); Index := Index + 1; end if; end if; end loop; for C in Sparklalr_Common.Sym_Range loop if C > Sparklalr_Common.Nt_Base then if Ees_Set (C) then if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " =>"); else Comma_Required := True; end if; Symbols_Dump.Print_String_Sym (F, Sparklalr_Common.Sp_Symbol_Str, C, Posn, 10, False); Index := Index + 1; end if; end if; end loop; end Out_Ees; begin -- Sp_Exp_Out SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "with SP_Productions;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "with SP_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "use type SP_Productions.SP_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "--# inherit SP_Productions,"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "--# SP_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "package SP_Expected_Symbols is"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Max_Ess_Symbol : constant Natural := "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Ees_Max, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type SP_Ess_Sym_Range is range 0 .. Max_Ess_Symbol;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type SP_Exp_Sym_List is array (SP_Ess_Sym_Range) of SP_Symbols.SP_Symbol;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " procedure Get_Expected_Symbols"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " (Error_State : in SP_Productions.SP_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " No_Of_Terminals : out SP_Ess_Sym_Range;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Terminal_List : out SP_Exp_Sym_List;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " No_Of_Non_Terminals : out SP_Ess_Sym_Range;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Non_Terminal_List : out SP_Exp_Sym_List);"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " --# derives Non_Terminal_List,"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " --# No_Of_Non_Terminals,"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " --# No_Of_Terminals,"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " --# Terminal_List from Error_State;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "end SP_Expected_Symbols;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "package body SP_Expected_Symbols is"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " No_Of_Exp_Symbols : constant Natural := "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Exp_Symbol_Count, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Exp_Sym_Range is range 1 .. No_Of_Exp_Symbols;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Exp_Symbol_List is array (Exp_Sym_Range) of SP_Symbols.SP_Symbol;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); if Command_Line_Options.Get_Self_Pack then No_Exp_Sym_P2 := 0; while 2 ** No_Exp_Sym_P2 <= Exp_Symbol_Count loop No_Exp_Sym_P2 := No_Exp_Sym_P2 + 1; end loop; Ees_Max_P2 := 0; while 2 ** Ees_Max_P2 <= Ees_Max loop Ees_Max_P2 := Ees_Max_P2 + 1; end loop; SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Ess_Symbol_Entry is range 0 .. 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => No_Exp_Sym_P2 + 2 * Ees_Max_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "-1;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Index_Lim : constant Ess_Symbol_Entry := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => No_Exp_Sym_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Term_Lim : constant Ess_Symbol_Entry := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Ees_Max_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " Non_Term_Lim : constant Ess_Symbol_Entry := 2**"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => 2 * Ees_Max_P2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Index : constant Ess_Symbol_Entry := 1;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " N_Terminals : constant Ess_Symbol_Entry := Index * Index_Lim;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " N_Non_Terminals : constant Ess_Symbol_Entry := N_Terminals * Term_Lim;"); else SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type Ess_Symbol_Entry is record"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Index : Exp_Sym_Range;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " No_Of_Terminals, No_Of_Non_Terminals : SP_Ess_Sym_Range;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " end record;"); end if; SPARK.Ada.Text_IO.Put_File (File => F, Item => " type Exp_Ess_Symbols is array (SP_Productions.Valid_States) of"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Ess_Symbol_Entry;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Comma_Required := False; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Essential_Symbols : constant Exp_Symbol_List := Exp_Symbol_List'("); for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop Non_Empty_Expected (Essentials (I).Expected_Ess_Symbols, Result_Non_Empty_Expected); if (Essentials (I).Base_State = 0) and then Result_Non_Empty_Expected then if Comma_Required then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); end if; Comma_Required := True; if Essentials (I).Other_State then for J in Integer range I + 1 .. Sparklalr_Memory.Get_Stat_No loop if Essentials (J).Base_State = I then SPARK.Ada.Text_IO.Put_File (File => F, Item => "--"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => J, Width => 5, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); end if; end loop; end if; SPARK.Ada.Text_IO.Put_File (File => F, Item => "--"); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => 5, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Essentials (I).Start_Index, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " =>"); Out_Ees (F, Essentials (I).Expected_Ess_Symbols, I); end if; end loop; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Expected_Symbols : constant Exp_Ess_Symbols := Exp_Ess_Symbols'("); if Command_Line_Options.Get_Self_Pack then for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => (Index * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Essentials (I).Start_Index, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " + "); SPARK.Ada.Text_IO.Put_File (File => F, Item => "N_Terminals * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Essentials (I).T_Like_Count, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ") + "); SPARK.Ada.Text_IO.Put_File (File => F, Item => "N_Non_Terminals * "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Essentials (I).Nt_Count, Width => 1, Base => 10); if I < Sparklalr_Memory.Get_Stat_No then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); else SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); end if; end loop; else for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => I, Width => 5, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " => Ess_Symbol_Entry'("); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Essentials (I).Start_Index, Width => 4, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Essentials (I).T_Like_Count, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ", "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Essentials (I).Nt_Count, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ')'); if I < Sparklalr_Memory.Get_Stat_No then SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ","); else SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ");"); end if; end loop; end if; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " procedure Get_Expected_Symbols"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " (Error_State : in SP_Productions.SP_State;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " No_Of_Terminals : out SP_Ess_Sym_Range;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Terminal_List : out SP_Exp_Sym_List;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " No_Of_Non_Terminals : out SP_Ess_Sym_Range;"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " Non_Terminal_List : out SP_Exp_Sym_List)"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " is separate;"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "end SP_Expected_Symbols;"); end Sp_Exp_Out; end Ees_Sym; spark-2012.0.deb/sparklalr/sparklalr.idx0000644000175000017500000000464211753202335017142 0ustar eugeneugenSPARK_Ada_Integer_Text_IO specification is in spark_ada_integer_text_io.ads SPARK_Ada_Integer_Text_IO body is in spark_ada_integer_text_io.adb Sparklalr_Symbol specification is in sparklalr_symbol.ads Sparklalr_Char_Class specification is in sparklalr_char_class.ads Sparklalr_Char_Class body is in sparklalr_char_class.adb Sparklalr_Common specification is in sparklalr_common.ads Sparklalr_Common body is in sparklalr_common.adb Command_Line_Options specification is in command_line_options.ads Command_Line_Options body is in command_line_options.adb Sparklalr_Error specification is in sparklalr_error.ads Sparklalr_Error body is in sparklalr_error.adb Sparklalr_Level specification is in sparklalr_level.ads Sparklalr_Level body is in sparklalr_level.adb Symbols_Dump specification is in symbols_dump.ads Symbols_Dump body is in symbols_dump.adb Sparklalr_Memory specification is in sparklalr_memory.ads Sparklalr_Memory body is in sparklalr_memory.adb Sparklalr_Goto specification is in sparklalr_goto.ads Sparklalr_Goto body is in sparklalr_goto.adb Sparklalr_Parser specification is in sparklalr_parser.ads Sparklalr_Parser body is in sparklalr_parser.adb Sparklalr_Patab specification is in sparklalr_patab.ads Sparklalr_Patab body is in sparklalr_patab.adb Sparklalr_Input specification is in sparklalr_input.ads Sparklalr_Input body is in sparklalr_input.adb Sparklalr_Memory.Dump specification is in sparklalr_memory-dump.ads Sparklalr_Memory.Dump body is in sparklalr_memory-dump.adb Sparklalr_Conflict specification is in sparklalr_conflict.ads Sparklalr_Conflict body is in sparklalr_conflict.adb Sparklalr_Memory.Left_Corner specification is in sparklalr_memory-left_corner.ads Sparklalr_Memory.Left_Corner body is in sparklalr_memory-left_corner.adb Ees_Sym specification is in ees_sym.ads Ees_Sym body is in ees_sym.adb Sparklalr body is in sparklalr.adb Fatal specification is in fatal.shs spark-2012.0.deb/sparklalr/sparklalr_level.ads0000644000175000017500000000657411753202335020322 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Sparklalr_Common; with Sparklalr_Symbol; --# inherit Sparklalr_Common, --# Sparklalr_Symbol; package Sparklalr_Level --# own State; is type Lev_Struct is private; procedure Initiate (L : out Lev_Struct); --# global in State; --# derives L from State; procedure Initialise; --# global out State; --# derives State from ; procedure Assign_Level (I : in Sparklalr_Common.Production_Index; M : in Lev_Struct); --# global in out State; --# derives State from *, --# I, --# M; procedure Assign_Term_Lev (I : in Sparklalr_Common.Term_Range; M : in Lev_Struct); --# global in out State; --# derives State from *, --# I, --# M; procedure Initiate_Level (I : in Sparklalr_Common.Production_Index); --# global in out State; --# derives State from *, --# I; procedure Initiate_Term_Lev (I : in Sparklalr_Common.Term_Range); --# global in out State; --# derives State from *, --# I; procedure Associativity (Symb : in Sparklalr_Symbol.Symbol; Current_Lev : in out Lev_Struct); --# derives Current_Lev from *, --# Symb; procedure Precedence (Is_Shred : in Boolean; A, B : in Integer; Term_Index : in Integer; Report : out Boolean; Result_Precedence : out Integer); --# global in State; --# derives Report, --# Result_Precedence from A, --# B, --# Is_Shred, --# State, --# Term_Index; -- Getter function Get_Term_Lev (I : in Sparklalr_Common.Term_Range) return Lev_Struct; --# global in State; -- Setter procedure Set_Level_Action_Flag (I : in Sparklalr_Common.Production_Index; Value : in Boolean); --# global in out State; --# derives State from *, --# I, --# Value; private type Assoc_T is (Leftass, Rightass, Nonass, Nodef); type Lev_Struct is record Assoc : Assoc_T; Action_Flag : Boolean; Lev : Sparklalr_Common.Short_Int; end record; end Sparklalr_Level; spark-2012.0.deb/sparklalr/fatal.adb0000644000175000017500000000336711753202335016203 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; package body Fatal is procedure Stop_With_Command_Line_Exception is --# hide Stop_With_Command_Line_Exception; begin SPARK.Ada.Text_IO.Put_Line_Error ("Usage: sparklalr [-v] [-s] [-m] [-d[a-i|u]] [-p] "); SPARK.Ada.Text_IO.Put_Line_Error (""); SPARK.Ada.Text_IO.Put_Line_Error ("Options: -v verbose output"); SPARK.Ada.Text_IO.Put_Line_Error (" -s self pack"); SPARK.Ada.Text_IO.Put_Line_Error (" -m multi comp"); SPARK.Ada.Text_IO.Put_Line_Error (" -d[a-i] debug [level]"); SPARK.Ada.Text_IO.Put_Line_Error (" -du debug dump memory"); SPARK.Ada.Text_IO.Put_Line_Error (" -p parser"); raise Command_Line_Error; end Stop_With_Command_Line_Exception; end Fatal; spark-2012.0.deb/sparklalr/sparklalr_parser.ads0000644000175000017500000001720511753202335020500 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; with Sparklalr_Common; --# inherit Command_Line_Options, --# SPARK.Ada.Text_IO, --# Sparklalr_Common, --# Sparklalr_Goto, --# Sparklalr_Memory, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; package Sparklalr_Parser --# own State; is Pa_Table_Size : constant := 20000; subtype Pt_Pa_Rec is Natural range 0 .. Pa_Table_Size; procedure Init_Pa_List; --# global in out State; --# derives State from *; procedure Init_Pat_Count; --# global in out State; --# derives State from *; procedure Initialise; --# global out State; --# derives State from ; procedure Gen_State_Info; --# global in Sparklalr_Memory.Stat_No; --# in out State; --# derives State from *, --# Sparklalr_Memory.Stat_No; procedure Pa_Search (State_Index, Term_Index : in Integer; Result : out Integer; Pl : out Pt_Pa_Rec); --# global in State; --# derives Pl, --# Result from State, --# State_Index, --# Term_Index; procedure Pa_Insert (F : in out SPARK.Ada.Text_IO.File_Type; State_Index, Term_Index, Insertion : in Integer); --# global in Command_Line_Options.State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives F from *, --# Command_Line_Options.State, --# Symbols_Dump.State, --# Term_Index & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Insertion, --# State_Index, --# Symbols_Dump.State, --# Term_Index & --# State from *, --# Insertion, --# State_Index, --# Term_Index; function Action_Equal (Act1, Act2 : in Pt_Pa_Rec) return Boolean; --# global in State; procedure Pa_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type); --# global in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# State, --# Std_Out; procedure Dump_Actions (F : in out SPARK.Ada.Text_IO.File_Type; Nstate : in Integer); --# global in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Nstate, --# State, --# Symbols_Dump.State; procedure Action_Gen (State_Var : in Integer); --# global in out State; --# derives State from *, --# State_Var; procedure Action_Gen_Pa_Out (F : in out SPARK.Ada.Text_IO.File_Type; State_Var : in Integer; Curr_Pat_Index : in out Integer); --# global in Sparklalr_Memory.Prod_Sum; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives Curr_Pat_Index, --# State from *, --# State, --# State_Var & --# F from *, --# Curr_Pat_Index, --# Sparklalr_Memory.Prod_Sum, --# State, --# State_Var, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Prod_Sum, --# State, --# State_Var, --# Symbols_Dump.State; procedure Action_Gen_Pa_Out_Sp (F : in out SPARK.Ada.Text_IO.File_Type; State_Var : in Integer; Curr_Pat_Index : in out Integer); --# global in Sparklalr_Memory.Prod_Sum; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives Curr_Pat_Index, --# State from *, --# State, --# State_Var & --# F from *, --# Curr_Pat_Index, --# Sparklalr_Memory.Prod_Sum, --# State, --# State_Var, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Prod_Sum, --# State, --# State_Var, --# Symbols_Dump.State; function Get_Reduce_State (S : in Sparklalr_Common.State_Range) return Boolean; --# global in State; function Get_Pat_Count return Integer; --# global in State; function Get_Pa_List (I : in Sparklalr_Common.State_Range) return Pt_Pa_Rec; --# global in State; end Sparklalr_Parser; spark-2012.0.deb/sparklalr/command_line_options.ads0000644000175000017500000000437011753202335021330 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Strings.Unbounded; --# inherit Fatal, --# SPARK.Ada.Command_Line, --# SPARK.Ada.Command_Line.Unbounded_String, --# SPARK.Ada.Strings.Unbounded, --# SPARK.Ada.Text_IO; package Command_Line_Options --# own State; is type Debug_Level_Range is range 1 .. 9; procedure Get_Options; --# global in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# out State; --# derives SPARK.Ada.Command_Line.State, --# State from SPARK.Ada.Command_Line.State & --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# SPARK.Ada.Command_Line.State; -- Getter function Get_File_Name return SPARK.Ada.Strings.Unbounded.Unbounded_String; --# global in State; function Get_Verbose return Boolean; --# global in State; function Get_Debug_Level (Level : in Debug_Level_Range) return Boolean; --# global in State; function Get_Dump_Mem return Boolean; --# global in State; function Get_Parser return Boolean; --# global in State; function Get_Self_Pack return Boolean; --# global in State; function Get_Multi_Comp return Boolean; --# global in State; end Command_Line_Options; spark-2012.0.deb/sparklalr/sparklalr_conflict.adb0000644000175000017500000012261411753202335020765 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with SPARK_Ada_Integer_Text_IO; with Sparklalr_Common; with Sparklalr_Level; with Sparklalr_Memory; with Sparklalr_Memory.Dump; with Sparklalr_Parser; with Sparklalr_Patab; with Symbols_Dump; use type Sparklalr_Common.Action_Type; package body Sparklalr_Conflict --# own State is List_Of_Conflicts, --# Rconflict_Array; is type Tconflict is (Shred, Redred); Rconflict_Table_Size : constant := 100; subtype Ptr_Conflict is Natural range 0 .. Rconflict_Table_Size; subtype Rconflict_Array_Range is Positive range 1 .. Rconflict_Table_Size; type Rconflict is record Ctype : Tconflict; Cstate, Cterm, Resolved, C1, C2 : Integer; Lnk : Ptr_Conflict; end record; type Rconflict_Array_Array_T is array (Rconflict_Array_Range) of Rconflict; type Rconflict_Array_T is record The_Array : Rconflict_Array_Array_T; Top : Ptr_Conflict; end record; Rconflict_Array : Rconflict_Array_T; List_Of_Conflicts : Ptr_Conflict; procedure Initialise --# global out List_Of_Conflicts; --# out Rconflict_Array; --# derives List_Of_Conflicts, --# Rconflict_Array from ; is begin Rconflict_Array := Rconflict_Array_T' (The_Array => Rconflict_Array_Array_T'(others => Rconflict'(Ctype => Shred, Cstate => 0, Cterm => 0, Resolved => 0, C1 => 0, C2 => 0, Lnk => 0)), Top => 0); List_Of_Conflicts := 0; end Initialise; procedure Print_Conflicts (F : in out SPARK.Ada.Text_IO.File_Type) --# global in List_Of_Conflicts; --# in Rconflict_Array; --# in Sparklalr_Memory.Dump.State; --# in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# List_Of_Conflicts, --# Rconflict_Array, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State; is Pc : Ptr_Conflict; Posn : Integer; S, T : Sparklalr_Memory.Dump.Pt_Memory; More : Boolean; begin if List_Of_Conflicts /= 0 then Posn := 1; for Nstate in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop Pc := List_Of_Conflicts; More := Rconflict_Array.The_Array (Pc).Cstate /= Nstate; while More loop Pc := Rconflict_Array.The_Array (Pc).Lnk; if Pc /= 0 then More := Rconflict_Array.The_Array (Pc).Cstate /= Nstate; else More := False; end if; end loop; if Pc /= 0 then More := True; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_File (File => F, Item => " STATE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Nstate, Width => 4, Base => 10); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); S := Sparklalr_Memory.Dump.Get_State (Nstate); T := Sparklalr_Memory.Dump.Get_State (Nstate + 1); Sparklalr_Memory.Dump.Dump_Items (F, S, T); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Sparklalr_Parser.Dump_Actions (F, Nstate); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); while More loop case Rconflict_Array.The_Array (Pc).Ctype is when Shred => SPARK.Ada.Text_IO.Put_File (File => F, Item => " ** SHIFT "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Pc).C1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => "/REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Pc).C2, Width => 1, Base => 10); when Redred => SPARK.Ada.Text_IO.Put_File (File => F, Item => " ** REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Pc).C1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => "/REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Pc).C2, Width => 1, Base => 10); end case; SPARK.Ada.Text_IO.Put_File (File => F, Item => " CONFLICT ON TERMINAL "); Posn := Posn + 43; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Symbols_Dump.Print_Sym (F, Rconflict_Array.The_Array (Pc).Cterm, Posn, 10, False); --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); --# accept W, 303, "when others here covers all cases"; case Rconflict_Array.The_Array (Pc).Resolved is when 1 => SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SHIFT SELECTED"); when 2 => SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " REDUCE SELECTED"); when 3 => SPARK.Ada.Text_IO.Put_File (File => F, Item => " REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Pc).C2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SELECTED "); when 4 => SPARK.Ada.Text_IO.Put_File (File => F, Item => " REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Pc).C1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SELECTED "); when 5 => SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " ***ERROR*** BINARY CONFLICT - ERROR INSERTED "); when others => null; end case; --# end accept; Posn := 1; Pc := Rconflict_Array.The_Array (Pc).Lnk; if Pc /= 0 then More := Rconflict_Array.The_Array (Pc).Cstate = Nstate; else More := False; end if; end loop; end if; end loop; end if; end Print_Conflicts; procedure Conflict_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type) --# global in List_Of_Conflicts; --# in Rconflict_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# List_Of_Conflicts, --# Rconflict_Array, --# Std_Out; is Sr_Count, Rr_Count : Integer; P : Ptr_Conflict; begin Sr_Count := 0; Rr_Count := 0; P := List_Of_Conflicts; while P /= 0 loop if Rconflict_Array.The_Array (P).Ctype = Shred then Sr_Count := Sr_Count + 1; else Rr_Count := Rr_Count + 1; end if; P := Rconflict_Array.The_Array (P).Lnk; end loop; Sparklalr_Common.New_Line_File_Output (Std_Out => Std_Out, File => F); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Sr_Count, Width => 6); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " SHIFT/REDUCE CONFLICTS"); Sparklalr_Common.Put_Integer_File_Output (Std_Out => Std_Out, File => F, Item => Rr_Count, Width => 6); Sparklalr_Common.Put_Line_File_Output (Std_Out => Std_Out, File => F, Item => " REDUCE/REDUCE CONFLICTS"); Sparklalr_Common.New_Line_File_Output (Std_Out => Std_Out, File => F); end Conflict_Stats; procedure Dump_Conflicts (F : in out SPARK.Ada.Text_IO.File_Type; Nstate : in Integer) --# global in List_Of_Conflicts; --# in Rconflict_Array; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# List_Of_Conflicts, --# Nstate, --# Rconflict_Array, --# Symbols_Dump.State; is Dummy : Ptr_Conflict; Posn : Integer; function Cflct (Init_Cflct : in Ptr_Conflict) return Ptr_Conflict --# global in Nstate; --# in Rconflict_Array; is P : Ptr_Conflict; Found : Boolean; begin Found := False; P := Init_Cflct; while (P /= 0) and then not Found loop if Rconflict_Array.The_Array (P).Cstate = Nstate then Found := True; else P := Rconflict_Array.The_Array (P).Lnk; end if; end loop; return P; end Cflct; begin -- Dump_Conflicts Posn := 1; Dummy := Cflct (List_Of_Conflicts); while Dummy /= 0 loop case Rconflict_Array.The_Array (Dummy).Ctype is when Shred => SPARK.Ada.Text_IO.Put_File (File => F, Item => " ** SHIFT "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Dummy).C1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => "/REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Dummy).C2, Width => 1, Base => 10); when Redred => SPARK.Ada.Text_IO.Put_File (File => F, Item => " ** REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Dummy).C1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => "/REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Dummy).C2, Width => 1, Base => 10); end case; SPARK.Ada.Text_IO.Put_File (File => F, Item => " CONFLICT ON TERMINAL "); Posn := Posn + 43; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Symbols_Dump.Print_Sym (F, Rconflict_Array.The_Array (Dummy).Cterm, Posn, 10, False); --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); --# accept W, 303, "when others here covers all cases"; case Rconflict_Array.The_Array (Dummy).Resolved is when 1 => SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SHIFT SELECTED"); when 2 => SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " REDUCE SELECTED"); when 3 => SPARK.Ada.Text_IO.Put_File (File => F, Item => " REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Dummy).C2, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SELECTED "); when 4 => SPARK.Ada.Text_IO.Put_File (File => F, Item => " REDUCE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Rconflict_Array.The_Array (Dummy).C1, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " SELECTED "); when 5 => SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " ***ERROR*** BINARY CONFLICT - ERROR INSERTED "); when others => null; end case; --# end accept; Posn := 1; Dummy := Cflct (Rconflict_Array.The_Array (Dummy).Lnk); end loop; end Dump_Conflicts; procedure Parse_Action_Generation (F : in out SPARK.Ada.Text_IO.File_Type) -- PARSEACTIONGENERATION GENERATES THE REDUCES IN THE -- PARSING ACTION TABLE --# global in Command_Line_Options.State; --# in Sparklalr_Level.State; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out List_Of_Conflicts; --# in out Rconflict_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Memory.Dump.State; --# in out Sparklalr_Parser.State; --# in out Sparklalr_Patab.State; --# derives F, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State from *, --# Command_Line_Options.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# List_Of_Conflicts, --# Rconflict_Array from Command_Line_Options.State, --# List_Of_Conflicts, --# Rconflict_Array, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Symbols_Dump.State; is S, T : Sparklalr_Memory.Dump.Pt_Memory; C : Integer; Result_Pa_Search1, Result_Pa_Search2 : Integer; Pl : Sparklalr_Parser.Pt_Pa_Rec; procedure Pa_Gen (F : in out SPARK.Ada.Text_IO.File_Type; Sr : in Sparklalr_Common.Action_Type; State_Index, Term_Index : in Integer; S : in Sparklalr_Memory.Dump.Pt_Memory; C : in Integer) -- ADD ENTRIES INTO PARSING ACTION TABLE RESOLVING -- ANY RESULTING CONFLICTS --# global in Command_Line_Options.State; --# in Sparklalr_Level.State; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out List_Of_Conflicts; --# in out Rconflict_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Memory.Dump.State; --# in out Sparklalr_Parser.State; --# derives F from *, --# C, --# Command_Line_Options.State, --# Sparklalr_Level.State, --# Sparklalr_Parser.State, --# Sr, --# State_Index, --# Symbols_Dump.State, --# Term_Index & --# List_Of_Conflicts, --# Rconflict_Array from C, --# List_Of_Conflicts, --# Rconflict_Array, --# Sparklalr_Level.State, --# Sparklalr_Parser.State, --# Sr, --# State_Index, --# Term_Index & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# C, --# Command_Line_Options.State, --# S, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sr, --# State_Index, --# Symbols_Dump.State, --# Term_Index & --# Sparklalr_Memory.Dump.State from *, --# S, --# Sparklalr_Parser.State, --# Sr, --# State_Index, --# Term_Index & --# Sparklalr_Parser.State from *, --# C, --# S, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sr, --# State_Index, --# Term_Index; is Result_Pa_Search, Result_New_State : Integer; Pl : Sparklalr_Parser.Pt_Pa_Rec; -- -- Automatic resolution of conflicts is not acceptable with SPARKLALR -- procedure Resolve_Conflict (F : in out SPARK.Ada.Text_IO.File_Type; Sr : in Sparklalr_Common.Action_Type; State_Index, Term_Index : in Integer; C : in Integer) -- RESOLUTION OF ANY GRAMMAR AMBIGUITIES USING -- THE PRECEDENCE INFORMATION --# global in Command_Line_Options.State; --# in Sparklalr_Level.State; --# in Symbols_Dump.State; --# in out List_Of_Conflicts; --# in out Rconflict_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Parser.State; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# C, --# Command_Line_Options.State, --# Sparklalr_Level.State, --# Sparklalr_Parser.State, --# Sr, --# State_Index, --# Symbols_Dump.State, --# Term_Index & --# List_Of_Conflicts, --# Rconflict_Array from C, --# List_Of_Conflicts, --# Rconflict_Array, --# Sparklalr_Level.State, --# Sparklalr_Parser.State, --# Sr, --# State_Index, --# Term_Index & --# Sparklalr_Parser.State from *, --# C, --# Sparklalr_Level.State, --# Sr, --# State_Index, --# Term_Index; is Result_Pa_Search : Integer; Pl : Sparklalr_Parser.Pt_Pa_Rec; procedure Rslv (F : in out SPARK.Ada.Text_IO.File_Type; Sr : in Tconflict; A, B : in Integer; State_Index, Term_Index : in Integer) -- RSLV USES PRECEDENCE TO PHYSICALLY ALTER THE -- PARSING ACTION TABLE TO OVERCOME ANY AMBIGUITY CONFLICTS. --# global in Command_Line_Options.State; --# in Sparklalr_Level.State; --# in Symbols_Dump.State; --# in out List_Of_Conflicts; --# in out Rconflict_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Parser.State; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# A, --# B, --# Command_Line_Options.State, --# Sparklalr_Level.State, --# Sr, --# State_Index, --# Symbols_Dump.State, --# Term_Index & --# List_Of_Conflicts from *, --# A, --# B, --# Rconflict_Array, --# Sparklalr_Level.State, --# Sr, --# Term_Index & --# Rconflict_Array from *, --# A, --# B, --# List_Of_Conflicts, --# Sparklalr_Level.State, --# Sr, --# State_Index, --# Term_Index & --# Sparklalr_Parser.State from *, --# A, --# B, --# Sparklalr_Level.State, --# Sr, --# State_Index, --# Term_Index; is Err_Code : constant := -1; Report : Boolean; Result_Precedence : Integer; Conflict : Ptr_Conflict; procedure Report_Conflict (F : in out SPARK.Ada.Text_IO.File_Type; State_Index, Term_Index : in Integer; S : in Tconflict; Conflict : out Ptr_Conflict) -- REPORTCONFLICT REPORTS A PARSING ACTION CONFLICT THAT MAY -- ARISE DUE TO AMBIGUITIES PRESENT IN THE INPUT GRAMMAR. -- FIRSTLY, THE APPROPRIATE MESSAGE IS SENT TO THE DAYFILE. -- THE CONFLICT IS ALSO STORED FOR FUTURE REFERENCE IN A -- LINKED LIST DATA STRUCTURE WHICH STORES INFORMATION -- SUCH AS CONFLICT TYPE, TERMINAL INVOLVED, AND RELEVANT SHIFTS OR -- REDUCTIONS . --# global in out List_Of_Conflicts; --# in out Rconflict_Array; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives Conflict, --# List_Of_Conflicts from Rconflict_Array & --# F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# S, --# State_Index & --# Rconflict_Array from *, --# List_Of_Conflicts, --# S, --# State_Index, --# Term_Index; is begin Rconflict_Array.Top := Rconflict_Array.Top + 1; Conflict := Rconflict_Array.Top; SPARK.Ada.Text_IO.Put_Output (Item => "STATE "); SPARK_Ada_Integer_Text_IO.Put_Output (Item => State_Index, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Output (Item => ": "); SPARK.Ada.Text_IO.Put_File (File => F, Item => "STATE "); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => State_Index, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => ": "); case S is when Shred => SPARK.Ada.Text_IO.Put_Line_Output (Item => "* SHIFT/REDUCE CONFLICT"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "* SHIFT/REDUCE CONFLICT"); when Redred => SPARK.Ada.Text_IO.Put_Line_Output (Item => "* REDUCE/REDUCE CONFLICT"); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "* REDUCE/REDUCE CONFLICT"); end case; Rconflict_Array.The_Array (Conflict) := Rconflict' (Lnk => List_Of_Conflicts, Cstate => State_Index, Cterm => Term_Index, Ctype => S, Resolved => 0, C1 => 0, C2 => 0); List_Of_Conflicts := Conflict; end Report_Conflict; begin -- Rslv Conflict := 0; Sparklalr_Level.Precedence (Sr = Shred, A, B, Term_Index, Report, Result_Precedence); if Report then Report_Conflict (F, State_Index, Term_Index, Sr, Conflict); end if; --# accept W, 303, "when others here covers all cases"; case Result_Precedence is when 1 => if Report then Rconflict_Array.The_Array (Conflict).Resolved := 1; end if; Sparklalr_Parser.Pa_Insert (F => F, State_Index => State_Index, Term_Index => Term_Index, Insertion => Sparklalr_Common.Code (Sparklalr_Common.Shift, B)); when 2 => if Report then Rconflict_Array.The_Array (Conflict).Resolved := 2; end if; Sparklalr_Parser.Pa_Insert (F => F, State_Index => State_Index, Term_Index => Term_Index, Insertion => Sparklalr_Common.Code (Sparklalr_Common.Reduce, A)); when 3 => if Report then Rconflict_Array.The_Array (Conflict).Resolved := 3; end if; when 4 => if Report then Rconflict_Array.The_Array (Conflict).Resolved := 3; end if; Sparklalr_Parser.Pa_Insert (F => F, State_Index => State_Index, Term_Index => Term_Index, Insertion => Sparklalr_Common.Code (Sparklalr_Common.Reduce, B)); when 5 => if Report then Rconflict_Array.The_Array (Conflict).Resolved := 5; end if; Sparklalr_Parser.Pa_Insert (F => F, State_Index => State_Index, Term_Index => Term_Index, Insertion => Err_Code); when others => null; end case; --# end accept; if Report then Rconflict_Array.The_Array (Conflict).C1 := B; Rconflict_Array.The_Array (Conflict).C2 := A; end if; end Rslv; begin -- Resolve_Conflict if Sr = Sparklalr_Common.Shift then -- SHIFT - REDUCE CONFLICT --# accept F, 10, Pl, "Ineffective assignment here expected and OK"; Sparklalr_Parser.Pa_Search (State_Index, Term_Index, Result_Pa_Search, Pl); --# end accept; Rslv (F, Shred, Sparklalr_Common.Decode (Result_Pa_Search), Term_Index, State_Index, Term_Index); else --# accept F, 10, Pl, "Ineffective assignment here expected and OK"; Sparklalr_Parser.Pa_Search (State_Index, Term_Index, Result_Pa_Search, Pl); --# end accept; if Result_Pa_Search <= Sparklalr_Common.Prod_Lim then -- REDUCE - REDUCE CONFLICT --# accept F, 10, Pl, "Ineffective assignment here expected and OK"; Sparklalr_Parser.Pa_Search (State_Index, Term_Index, Result_Pa_Search, Pl); --# end accept; Rslv (F, Redred, Sparklalr_Common.Decode (Result_Pa_Search), -C, State_Index, Term_Index); else -- SHIFT - REDUCE CONFLICT --# accept F, 10, Pl, "Ineffective assignment here expected and OK"; Sparklalr_Parser.Pa_Search (State_Index, Term_Index, Result_Pa_Search, Pl); --# end accept; Rslv (F, Shred, -C, Sparklalr_Common.Decode (Result_Pa_Search), State_Index, Term_Index); end if; end if; --# accept F, 33, Pl, "Pl is unused OK"; end Resolve_Conflict; begin -- Pa_Gen --# accept F, 10, Pl, "Ineffective assignment here expected and OK"; Sparklalr_Parser.Pa_Search (State_Index, Term_Index, Result_Pa_Search, Pl); --# end accept; if Result_Pa_Search /= 0 then Resolve_Conflict (F, Sr, State_Index, Term_Index, C); else if Sr = Sparklalr_Common.Shift then Sparklalr_Memory.Dump.New_State (S, Result_New_State); Sparklalr_Parser.Pa_Insert (F => F, State_Index => State_Index, Term_Index => Term_Index, Insertion => Sparklalr_Common.Code (Sr, Result_New_State)); else Sparklalr_Parser.Pa_Insert (F => F, State_Index => State_Index, Term_Index => Term_Index, Insertion => -C); end if; end if; --# accept F, 33, Pl, "Pl is unused OK"; end Pa_Gen; begin -- Parse_Action_Generation for I in Integer range 1 .. Sparklalr_Memory.Get_Stat_No loop S := Sparklalr_Memory.Dump.Get_State (I); T := Sparklalr_Memory.Dump.Get_State (I + 1); while S /= T loop C := Sparklalr_Memory.Dump.Get_Contents (Sparklalr_Memory.Dump.Get_Dot (Sparklalr_Memory.Dump.Get_Item (S))); if C < 0 then for J in Integer range 0 .. Symbols_Dump.Get_Nterms loop if Sparklalr_Memory.Dump.Get_Lh_Lset (Sparklalr_Memory.Dump.Get_Item (S), J) then Pa_Gen (F, Sparklalr_Common.Reduce, I, J, S, C); end if; end loop; else if C = 0 then -- SPEND C := -1; Pa_Gen (F, Sparklalr_Common.Reduce, I, 0, S, C); --# accept F, 41, "Stable expression here expected and OK"; if Command_Line_Options.Get_Multi_Comp then for Term_Sym in Integer range 2 .. Symbols_Dump.Get_Nterms loop -- Not SPEND or SPDEFAULT --# accept F, 10, Pl, "Ineffective assignment here expected and OK"; Sparklalr_Parser.Pa_Search (1, Term_Sym, Result_Pa_Search1, Pl); Sparklalr_Parser.Pa_Search (I, Term_Sym, Result_Pa_Search2, Pl); --# end accept; if (Result_Pa_Search1 /= 0) and then (Result_Pa_Search2 = 0) then Pa_Gen (F, Sparklalr_Common.Reduce, I, Term_Sym, S, C); end if; end loop; end if; --# end accept; end if; end if; S := Sparklalr_Memory.Dump.Get_Next (S); end loop; end loop; Sparklalr_Patab.Optimise; --# accept F, 33, Pl, "Pl is unused OK"; end Parse_Action_Generation; end Sparklalr_Conflict; spark-2012.0.deb/sparklalr/vcg/0000755000175000017500000000000011753203755015221 5ustar eugeneugenspark-2012.0.deb/sparklalr/sparklalr_char_class.ads0000644000175000017500000000321411753202335021301 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package Sparklalr_Char_Class --# own Charmap; is type Char_Class is ( None, Letter, Digit, Colonchar, Scolonchar, Equalchar, Commachar, Uparrowchar, Lparenchar, Ampchar, Blankchar, Underscore, Otherchar); procedure Initialise; --# global out Charmap; --# derives Charmap from ; function Get_Charmap (C : in Character) return Char_Class; --# global in Charmap; end Sparklalr_Char_Class; spark-2012.0.deb/sparklalr/sparklalr_memory-dump.ads0000644000175000017500000004472211753202335021463 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; with Sparklalr_Common; with Sparklalr_Error; with Sparklalr_Input; with Sparklalr_Symbol; --# inherit Command_Line_Options, --# SPARK.Ada.Command_Line, --# SPARK.Ada.Strings.Unbounded, --# SPARK.Ada.Text_IO, --# SPARK.Ada.Text_IO.Unbounded_String, --# Sparklalr_Char_Class, --# Sparklalr_Common, --# Sparklalr_Error, --# Sparklalr_Goto, --# Sparklalr_Input, --# Sparklalr_Level, --# Sparklalr_Memory, --# Sparklalr_Parser, --# Sparklalr_Symbol, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; package Sparklalr_Memory.Dump --# own State; is Max_Sym : constant := 766; -- = TERM_LIM + NON_TERM_LIM subtype Contents_T is Integer range -Sparklalr_Common.Prod_Lim .. Max_Sym; Item_Table_Size : constant := 20000; Memory_Table_Size : constant := 20000; subtype Pt_Item is Natural range 0 .. Item_Table_Size; subtype Pt_Memory is Natural range 0 .. Memory_Table_Size; Null_Pt_Memory : constant Pt_Memory; procedure Initialise; --# global in Command_Line_Options.State; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# out Sparklalr_Memory.Max_Right; --# out Sparklalr_Memory.Prod_No; --# out Sparklalr_Memory.Prod_Sum; --# out Sparklalr_Memory.Stat_No; --# out State; --# derives SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State & --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Prod_Sum, --# Sparklalr_Memory.Stat_No from & --# State from Command_Line_Options.State; procedure Mem_Dump; --# global in Command_Line_Options.State; --# in Sparklalr_Memory.Prod_No; --# in Sparklalr_Memory.Stat_No; --# in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# Command_Line_Options.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# State, --# Symbols_Dump.State; function Prodstart (P : in Pt_Memory) return Pt_Memory; --# global in State; procedure Dump_Items (F : in out SPARK.Ada.Text_IO.File_Type; S, T : in Pt_Memory); --# global in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# S, --# State, --# Symbols_Dump.State, --# T; procedure Dump_Prdns (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Memory.Prod_No; --# in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Prod_No, --# State, --# Symbols_Dump.State; procedure Summary; --# global in Sparklalr_Memory.Prod_No; --# in State; --# in out Sparklalr_Memory.Prod_Sum; --# out Sparklalr_Memory.Max_Right; --# derives Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_Sum from Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Prod_Sum, --# State; procedure New_State (S : in Pt_Memory; Result : out Integer); --# global in Sparklalr_Memory.Stat_No; --# in out State; --# derives Result from S, --# Sparklalr_Memory.Stat_No, --# State & --# State from *, --# S; procedure Gen_Terminal_Like; --# global in Symbols_Dump.State; --# in out State; --# derives State from *, --# Symbols_Dump.State; procedure Findntredns (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Command_Line_Options.State; --# in Sparklalr_Memory.Prod_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out State; --# derives F, --# Sparklalr_Error.State, --# State from *, --# Sparklalr_Memory.Prod_No, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Sparklalr_Memory.Prod_No, --# State, --# Symbols_Dump.State; procedure Mem_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type); --# global in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# State, --# Std_Out; procedure Productions_Package_Out (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Memory.Max_Right; --# in Sparklalr_Memory.Prod_No; --# in Sparklalr_Memory.Stat_No; --# in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Sparklalr_Memory.Max_Right, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Memory.Stat_No, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Prod_No, --# State, --# Symbols_Dump.State; procedure Rhs_Process (F : in out SPARK.Ada.Text_IO.File_Type; Symb : in out Sparklalr_Symbol.Symbol; Gram_Rules : in Boolean; Col : in out Sparklalr_Error.Err_Col_T; Signpost : in Sparklalr_Input.Symbol_Set_Type); --# global in Command_Line_Options.State; --# in Sparklalr_Char_Class.Charmap; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Input.State; --# in out Sparklalr_Level.State; --# in out Sparklalr_Memory.Prod_No; --# in out State; --# in out Symbols_Dump.State; --# derives Col, --# F, --# Sparklalr_Error.State, --# Sparklalr_Input.State from *, --# Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error, --# Sparklalr_Memory.Prod_No from *, --# Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Prod_No, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output, --# State from *, --# Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Prod_No, --# State, --# Symb, --# Symbols_Dump.State & --# Sparklalr_Level.State from *, --# Col, --# Command_Line_Options.State, --# Gram_Rules, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# Sparklalr_Memory.Prod_No, --# State, --# Symb, --# Symbols_Dump.State & --# Symb from Col, --# Command_Line_Options.State, --# Signpost, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State, --# State, --# Symbols_Dump.State & --# Symbols_Dump.State from *, --# Col, --# Sparklalr_Char_Class.Charmap, --# Sparklalr_Error.State, --# Sparklalr_Input.State; procedure Lhs_Process (F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : in Boolean; Token : in Sparklalr_Common.Id_Name; Col : in Sparklalr_Error.Err_Col_T); --# global in Command_Line_Options.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out State; --# in out Symbols_Dump.State; --# derives F, --# State, --# Symbols_Dump.State from *, --# Symbols_Dump.State, --# Token & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# State, --# Symbols_Dump.State, --# Token & --# Sparklalr_Error.State from *, --# Col, --# State, --# Symbols_Dump.State, --# Token & --# Sparklalr_Level.State from *, --# Gram_Rules, --# Symbols_Dump.State, --# Token; procedure State_Generation (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Command_Line_Options.State; --# in Sparklalr_Memory.Prod_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Error.State; --# in out Sparklalr_Goto.State; --# in out Sparklalr_Parser.State; --# in out State; --# out Sparklalr_Memory.Stat_No; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# State, --# Symbols_Dump.State & --# Sparklalr_Error.State, --# Sparklalr_Goto.State, --# Sparklalr_Parser.State, --# State from *, --# Sparklalr_Goto.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# State, --# Symbols_Dump.State & --# Sparklalr_Memory.Stat_No from Sparklalr_Goto.State, --# Sparklalr_Memory.Prod_No, --# Sparklalr_Parser.State, --# State, --# Symbols_Dump.State; -- Getter function Get_Next (Ptr : in Pt_Memory) return Pt_Memory; --# global in State; function Get_Contents (Ptr : in Pt_Memory) return Contents_T; --# global in State; function Get_Item (Ptr : in Pt_Memory) return Pt_Item; --# global in State; function Get_Mem_Pt (Ptr : in Pt_Memory) return Pt_Memory; --# global in State; function Get_Dot (Ptr : in Pt_Item) return Pt_Memory; --# global in State; function Get_Lh_Lset (Ptr : in Pt_Item; I : in Sparklalr_Common.Term_Range) return Boolean; --# global in State; function Get_Terminal_Like (S : in Sparklalr_Common.Sym_Range) return Boolean; --# global in State; function Get_State (S : in Sparklalr_Common.State_Range) return Pt_Memory; --# global in State; function Get_Ntrdn (I : in Sparklalr_Common.Non_Term_Range) return Pt_Memory; --# global in State; private Null_Pt_Memory : constant Pt_Memory := 0; end Sparklalr_Memory.Dump; spark-2012.0.deb/sparklalr/sparklalr_memory.ads0000644000175000017500000000634611753202335020520 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Sparklalr_Common; --# inherit Sparklalr_Common; package Sparklalr_Memory --# own Max_Right; --# Prod_No; --# Prod_Sum; --# Stat_No; is procedure Initialise; --# global out Max_Right; --# out Prod_No; --# out Prod_Sum; --# out Stat_No; --# derives Max_Right, --# Prod_No, --# Prod_Sum, --# Stat_No from ; -- Getter function Get_Stat_No return Integer; --# global in Stat_No; function Get_Prod_No return Sparklalr_Common.Production_Index; --# global in Prod_No; function Get_Prod_Sum (I : in Sparklalr_Common.Production_Index; J : in Positive) return Integer; --# global in Prod_Sum; function Get_Max_Right return Integer; --# global in Max_Right; private type Symbol_Set_T is array (Sparklalr_Common.Sym_Range) of Boolean; procedure Set_Union (A : in out Symbol_Set_T; B : in Symbol_Set_T; Elements_Added_To_A : out Boolean); --# derives A, --# Elements_Added_To_A from A, --# B; function Get_Symbol_Set (Symbol_Set : in Symbol_Set_T; I : in Sparklalr_Common.Sym_Range) return Boolean; subtype One_Two is Positive range 1 .. 2; type Prod_Sum_Row_T is array (One_Two) of Integer; type Prod_Sum_T is array (Sparklalr_Common.Production_Index) of Prod_Sum_Row_T; Stat_No : Integer; Prod_No : Sparklalr_Common.Production_Index; Prod_Sum : Prod_Sum_T; Max_Right : Integer; procedure Set_Stat_No (Val : in Integer); --# global out Stat_No; --# derives Stat_No from Val; procedure Set_Prod_No (Val : in Sparklalr_Common.Production_Index); --# global out Prod_No; --# derives Prod_No from Val; procedure Set_Prod_Sum (I : in Sparklalr_Common.Production_Index; J : in One_Two; Val : in Integer); --# global in out Prod_Sum; --# derives Prod_Sum from *, --# I, --# J, --# Val; procedure Set_Max_Right (Val : in Integer); --# global out Max_Right; --# derives Max_Right from Val; end Sparklalr_Memory; spark-2012.0.deb/sparklalr/sparklalr_memory.adb0000644000175000017500000000564111753202335020474 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Sparklalr_Memory is procedure Initialise is begin Stat_No := 0; Prod_Sum := Prod_Sum_T'(others => Prod_Sum_Row_T'(others => 0)); Prod_No := 2; Max_Right := 0; end Initialise; -- Getter function Get_Stat_No return Integer is begin return Stat_No; end Get_Stat_No; function Get_Prod_No return Sparklalr_Common.Production_Index is begin return Prod_No; end Get_Prod_No; function Get_Prod_Sum (I : in Sparklalr_Common.Production_Index; J : in Positive) return Integer is begin return Prod_Sum (I) (J); end Get_Prod_Sum; function Get_Max_Right return Integer is begin return Max_Right; end Get_Max_Right; procedure Set_Union (A : in out Symbol_Set_T; B : in Symbol_Set_T; Elements_Added_To_A : out Boolean) is begin Elements_Added_To_A := False; for Sym in Sparklalr_Common.Sym_Range loop if B (Sym) and then not A (Sym) then A (Sym) := True; Elements_Added_To_A := True; end if; end loop; end Set_Union; function Get_Symbol_Set (Symbol_Set : in Symbol_Set_T; I : in Sparklalr_Common.Sym_Range) return Boolean is begin return Symbol_Set (I); end Get_Symbol_Set; procedure Set_Stat_No (Val : in Integer) is begin Stat_No := Val; end Set_Stat_No; procedure Set_Prod_No (Val : in Sparklalr_Common.Production_Index) is begin Prod_No := Val; end Set_Prod_No; procedure Set_Prod_Sum (I : in Sparklalr_Common.Production_Index; J : in One_Two; Val : in Integer) is begin Prod_Sum (I) (J) := Val; end Set_Prod_Sum; procedure Set_Max_Right (Val : in Integer) is begin Max_Right := Val; end Set_Max_Right; end Sparklalr_Memory; spark-2012.0.deb/sparklalr/sparklalr_memory-left_corner.ads0000644000175000017500000000511711753202335023013 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; --# inherit Command_Line_Options, --# SPARK.Ada.Text_IO, --# Sparklalr_Common, --# Sparklalr_Memory, --# Sparklalr_Memory.Dump, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; package Sparklalr_Memory.Left_Corner --# own State; is procedure Count_Left_Corners; --# global in Symbols_Dump.State; --# in out State; --# derives State from *, --# Symbols_Dump.State; procedure Gen_Left_Corner; --# global in Dump.State; --# in Symbols_Dump.State; --# out State; --# derives State from Dump.State, --# Symbols_Dump.State; procedure Out_Left_Corner (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Command_Line_Options.State; --# in Dump.State; --# in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F from *, --# Command_Line_Options.State, --# Dump.State, --# State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# State, --# Symbols_Dump.State; end Sparklalr_Memory.Left_Corner; spark-2012.0.deb/sparklalr/fatal.shs0000644000175000017500000000234311753202335016243 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package Fatal is -- Command_Line_Error : exception; -- Raises an exception to be caught by the top level handler, which -- will stop the program, hence the postcondition False. procedure Stop_With_Command_Line_Exception; --# derives ; --# post False; end Fatal; spark-2012.0.deb/sparklalr/sparklalr_conflict.ads0000644000175000017500000001173011753202335021002 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; --# inherit Command_Line_Options, --# SPARK.Ada.Text_IO, --# Sparklalr_Common, --# Sparklalr_Level, --# Sparklalr_Memory, --# Sparklalr_Memory.Dump, --# Sparklalr_Parser, --# Sparklalr_Patab, --# SPARK_Ada_Integer_Text_IO, --# Symbols_Dump; package Sparklalr_Conflict --# own State; is procedure Initialise; --# global out State; --# derives State from ; procedure Print_Conflicts (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Memory.Dump.State; --# in Sparklalr_Memory.Stat_No; --# in Sparklalr_Parser.State; --# in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# State, --# Symbols_Dump.State; procedure Conflict_Stats (Std_Out : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type); --# global in State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# State, --# Std_Out; procedure Dump_Conflicts (F : in out SPARK.Ada.Text_IO.File_Type; Nstate : in Integer); --# global in State; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Nstate, --# State, --# Symbols_Dump.State; procedure Parse_Action_Generation (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Command_Line_Options.State; --# in Sparklalr_Level.State; --# in Sparklalr_Memory.Stat_No; --# in Symbols_Dump.State; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out Sparklalr_Memory.Dump.State; --# in out Sparklalr_Parser.State; --# in out Sparklalr_Patab.State; --# in out State; --# derives F, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# State from *, --# Command_Line_Options.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Symbols_Dump.State & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Command_Line_Options.State, --# Sparklalr_Level.State, --# Sparklalr_Memory.Dump.State, --# Sparklalr_Memory.Stat_No, --# Sparklalr_Parser.State, --# Sparklalr_Patab.State, --# Symbols_Dump.State; end Sparklalr_Conflict; spark-2012.0.deb/sparklalr/sparklalr_error.ads0000644000175000017500000000737211753202335020341 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Text_IO; with Sparklalr_Common; --# inherit SPARK.Ada.Text_IO, --# Sparklalr_Char_Class, --# Sparklalr_Common, --# SPARK_Ada_Integer_Text_IO; package Sparklalr_Error --# own State; is Error_Max : constant := 25; subtype Error_Range is Natural range 0 .. Error_Max; subtype Err_Col_T is Natural range 0 .. Sparklalr_Common.Line_Length; procedure Initialise (F : in out SPARK.Ada.Text_IO.File_Type); --# global in Sparklalr_Char_Class.Charmap; --# out State; --# derives F, --# State from F, --# Sparklalr_Char_Class.Charmap; procedure Syn_Error (Error_Num : in Error_Range; Col : in Err_Col_T); --# global in out State; --# derives State from *, --# Col, --# Error_Num; procedure Error (F : in out SPARK.Ada.Text_IO.File_Type; N : in Integer); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# N & --# State from *; procedure Write_The_Line (F, Echo : in out SPARK.Ada.Text_IO.File_Type; Col : in out Err_Col_T); --# global in Sparklalr_Char_Class.Charmap; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives Col from & --# Echo, --# SPARK.Ada.Text_IO.The_Standard_Output, --# State from *, --# Col, --# State & --# F from *, --# Sparklalr_Char_Class.Charmap, --# State; procedure Set_Line_Out (I : in Err_Col_T; C : in Character); --# global in out State; --# derives State from *, --# C, --# I; procedure List_Line_Errors (F : in out SPARK.Ada.Text_IO.File_Type; Col : in Err_Col_T); --# global in out SPARK.Ada.Text_IO.The_Standard_Output; --# in out State; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Col, --# State & --# State from *; -- Getter function Get_Prod_Err return Boolean; --# global in State; end Sparklalr_Error; spark-2012.0.deb/sparklalr/sparklalr.smf0000644000175000017500000000055711753202335017144 0ustar eugeneugencommand_line_options.adb ees_sym.adb sparklalr_char_class.adb sparklalr_common.adb sparklalr_conflict.adb sparklalr_error.adb sparklalr_goto.adb sparklalr_input.adb spark_ada_integer_text_io.adb sparklalr_level.adb sparklalr_memory.adb sparklalr_memory-dump.adb sparklalr_memory-left_corner.adb sparklalr_parser.adb sparklalr_patab.adb symbols_dump.adb sparklalr.adb spark-2012.0.deb/sparklalr/symbols_dump.adb0000644000175000017500000005521411753202335017627 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line_Options; with SPARK_Ada_Integer_Text_IO; with SPARK.Ada.Command_Line; with SPARK.Ada.Strings.Unbounded; with SPARK.Ada.Text_IO.Unbounded_String; with Sparklalr_Level; use type SPARK.Ada.Text_IO.Exception_T; package body Symbols_Dump --# own State is Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set; is type Nterm_Set_T is array (Sparklalr_Common.Non_Term_Range) of Sparklalr_Common.Id_Name; type Term_Set_T is array (Sparklalr_Common.Term_Range) of Sparklalr_Common.Id_Name; Nterm_Set : Nterm_Set_T; Term_Set : Term_Set_T; Nterms, Nnon_Terms : Integer; procedure Initialise --# global out Nnon_Terms; --# out Nterms; --# out Nterm_Set; --# out Term_Set; --# derives Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set from ; is begin Nterms := -1; Nnon_Terms := 0; Nterm_Set := Nterm_Set_T'(others => Sparklalr_Common.Id_Name'(others => ' ')); Term_Set := Term_Set_T'(others => Sparklalr_Common.Id_Name'(others => ' ')); end Initialise; procedure Define (Tnt : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : in Boolean; Token : in Sparklalr_Common.Id_Name; Col : in Sparklalr_Error.Err_Col_T; Result_Define : out Integer) -- DEFINES A NEW (NON) TERMINAL --# global in out Nnon_Terms; --# in out Nterms; --# in out Nterm_Set; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out Term_Set; --# derives F from *, --# Nnon_Terms, --# Tnt & --# Nnon_Terms, --# Nterms from *, --# Tnt & --# Nterm_Set from *, --# Nnon_Terms, --# Tnt, --# Token & --# Result_Define from Nnon_Terms, --# Nterms, --# Tnt & --# Sparklalr_Error.State from *, --# Col, --# Nnon_Terms, --# Nterms, --# Tnt & --# Sparklalr_Level.State from *, --# Gram_Rules, --# Nterms, --# Tnt & --# Term_Set from *, --# Nterms, --# Tnt, --# Token; is begin Result_Define := 0; if Tnt then if Nnon_Terms >= Sparklalr_Common.Non_Term_Lim then Sparklalr_Error.Syn_Error (15, Col); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Nnon_Terms, Width => 6, Base => 10); SPARK.Ada.Text_IO.Put_File (File => F, Item => " NONTERMINALS ("); SPARK_Ada_Integer_Text_IO.Put_File (File => F, Item => Sparklalr_Common.Non_Term_Lim, Width => 1, Base => 10); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " MAX)"); else Nnon_Terms := Nnon_Terms + 1; Nterm_Set (Nnon_Terms) := Token; Result_Define := Sparklalr_Common.Nt_Base + Nnon_Terms; end if; else if Nterms >= Sparklalr_Common.Term_Lim then Sparklalr_Error.Syn_Error (16, Col); else Nterms := Nterms + 1; Term_Set (Nterms) := Token; if Gram_Rules then Sparklalr_Level.Initiate_Term_Lev (Nterms); end if; Result_Define := Nterms; end if; end if; end Define; procedure Print_Sym (F : in out SPARK.Ada.Text_IO.File_Type; Sym : in Sparklalr_Common.Sym_Range; Posn : in out Integer; Tab : in Integer; Comm : in Boolean) --# global in Nterm_Set; --# in Term_Set; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Comm, --# Nterm_Set, --# Posn, --# Sym, --# Tab, --# Term_Set & --# Posn from *, --# Nterm_Set, --# Sym, --# Tab, --# Term_Set; is Id : Sparklalr_Common.Id_Name; I : Sparklalr_Common.Id_Length_Count; begin if Sym > Sparklalr_Common.Nt_Base then Id := Nterm_Set (Sym - Sparklalr_Common.Nt_Base); else Id := Term_Set (Sym); end if; I := Sparklalr_Common.Id_Length; while Id (I) = ' ' loop I := I - 1; end loop; if (Posn + I) > (Sparklalr_Common.Page_Width - 2) then SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); if Comm then SPARK.Ada.Text_IO.Put_File (File => F, Item => "--"); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => Tab - 2); else Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => Tab); end if; Posn := I + Tab; else Posn := Posn + I; end if; SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); for J in Integer range 1 .. I loop SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => Id (J)); end loop; end Print_Sym; procedure Print_String_Sym (F : in out SPARK.Ada.Text_IO.File_Type; String_Var : in Sparklalr_Common.Id_Name; Sym : in Sparklalr_Common.Sym_Range; Posn : in out Integer; Tab : in Integer; Comm : in Boolean) --# global in Nterm_Set; --# in Term_Set; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Comm, --# Nterm_Set, --# Posn, --# String_Var, --# Sym, --# Tab, --# Term_Set & --# Posn from *, --# Nterm_Set, --# String_Var, --# Sym, --# Tab, --# Term_Set; is Id : Sparklalr_Common.Id_Name; I, J : Sparklalr_Common.Id_Length_Count; begin I := Sparklalr_Common.Id_Length; while String_Var (I) = ' ' loop I := I - 1; end loop; if Sym > Sparklalr_Common.Nt_Base then Id := Nterm_Set (Sym - Sparklalr_Common.Nt_Base); else Id := Term_Set (Sym); end if; J := Sparklalr_Common.Id_Length; while Id (J) = ' ' loop J := J - 1; end loop; if ((Posn + I) + J) > (Sparklalr_Common.Page_Width - 2) then SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); if Comm then SPARK.Ada.Text_IO.Put_File (File => F, Item => "--"); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => Tab - 2); else Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => Tab); end if; Posn := (I + J) + Tab; else Posn := (Posn + I) + J; end if; SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ' '); for K in Integer range 1 .. I loop SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => String_Var (K)); end loop; for K in Integer range 1 .. J loop SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => Id (K)); end loop; end Print_String_Sym; procedure Symbol_Strings_Out --# global in Command_Line_Options.State; --# in Nnon_Terms; --# in Nterms; --# in Nterm_Set; --# in Term_Set; --# in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives SPARK.Ada.Command_Line.State, --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# Command_Line_Options.State, --# Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set & --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set; is Sp_Symbols : SPARK.Ada.Text_IO.File_Type; Posn : Integer; begin SPARK.Ada.Text_IO.Unbounded_String.Create (File => Sp_Symbols, Mode => SPARK.Ada.Text_IO.Out_File, Name => SPARK.Ada.Strings.Unbounded.Concat_Unbounded_String_String (Left => Command_Line_Options.Get_File_Name, Right => ".SYM"), Form => SPARK.Ada.Strings.Unbounded.Null_Unbounded_String); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Sp_Symbols) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to open output SYM file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; SPARK.Ada.Text_IO.Put_Line_File (File => Sp_Symbols, Item => "&TERMINALS"); for Sym in Integer range 0 .. Nterms loop Posn := 1; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Print_Sym (Sp_Symbols, Sym, Posn, 1, False); --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => Sp_Symbols, Spacing => 1); end loop; SPARK.Ada.Text_IO.Put_Line_File (File => Sp_Symbols, Item => "&NONTERMINALS"); for Sym in Integer range Sparklalr_Common.Nt_Base + 1 .. Sparklalr_Common.Nt_Base + Nnon_Terms loop Posn := 1; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Print_Sym (Sp_Symbols, Sym, Posn, 5, False); --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => Sp_Symbols, Spacing => 1); end loop; SPARK.Ada.Text_IO.Put_Line_File (File => Sp_Symbols, Item => "&END"); SPARK.Ada.Text_IO.Close (File => Sp_Symbols); if SPARK.Ada.Text_IO.Get_Last_Exception_File (File => Sp_Symbols) /= SPARK.Ada.Text_IO.No_Exception then SPARK.Ada.Text_IO.Put_Error (Item => "Unable to close output SYM file"); SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; end Symbol_Strings_Out; procedure Find (Tnt : in Boolean; F : in out SPARK.Ada.Text_IO.File_Type; Gram_Rules : in Boolean; Token : in Sparklalr_Common.Id_Name; Col : in Sparklalr_Error.Err_Col_T; Result_Find : out Integer) -- FINDS A (NON) TERMINAL --# global in out Nnon_Terms; --# in out Nterms; --# in out Nterm_Set; --# in out Sparklalr_Error.State; --# in out Sparklalr_Level.State; --# in out Term_Set; --# derives F from *, --# Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set, --# Tnt, --# Token & --# Nnon_Terms, --# Nterms, --# Nterm_Set, --# Result_Find, --# Term_Set from Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set, --# Tnt, --# Token & --# Sparklalr_Error.State from *, --# Col, --# Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set, --# Tnt, --# Token & --# Sparklalr_Level.State from *, --# Gram_Rules, --# Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set, --# Tnt, --# Token; is I : Integer; Found : Boolean; begin Result_Find := 0; Found := False; I := 1; while (I <= Nterms) and then not Found loop if Term_Set (I) = Token then Result_Find := I; Found := True; else I := I + 1; end if; end loop; if not Found then I := 1; Found := False; while (I <= Nnon_Terms) and then not Found loop if Nterm_Set (I) = Token then Found := True; Result_Find := I + Sparklalr_Common.Nt_Base; else I := I + 1; end if; end loop; if not Found then Define (Tnt, F, Gram_Rules, Token, Col, Result_Find); end if; end if; end Find; -- -------- The following procedures print out the SPARK parser tables ---- procedure Symbols_Package_Out (F : in out SPARK.Ada.Text_IO.File_Type) --# global in Nnon_Terms; --# in Nterms; --# in Nterm_Set; --# in Term_Set; --# in out SPARK.Ada.Text_IO.The_Standard_Output; --# derives F, --# SPARK.Ada.Text_IO.The_Standard_Output from *, --# Nnon_Terms, --# Nterms, --# Nterm_Set, --# Term_Set; is Posn : Integer; begin SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "package SP_Symbols is"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " type SP_Symbol is ("); Posn := 21; Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 21); for I in Integer range 0 .. Nterms loop Print_Sym (F, I, Posn, 21, False); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ','); Posn := Posn + 2; end loop; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); Sparklalr_Common.Put_N_Chars (Std_Out => False, F => F, C => ' ', N => 21); Posn := 21; for I in Integer range Sparklalr_Common.Nt_Base + 1 .. (Sparklalr_Common.Nt_Base + Nnon_Terms) - 1 loop Print_Sym (F, I, Posn, 21, False); SPARK.Ada.Text_IO.Put_Character_File (File => F, Item => ','); Posn := Posn + 2; end loop; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Print_Sym (F, Sparklalr_Common.Nt_Base + Nnon_Terms, Posn, 21, False); --# end accept; SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => " );"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " subtype SP_Terminal is SP_Symbol range"); Posn := 40; Print_Sym (F, 0, Posn, 10, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => " .."); Posn := Posn + 2; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Print_Sym (F, Nterms, Posn, 10, False); --# end accept; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " subtype SP_Non_Terminal is SP_Symbol range"); Posn := 42; Print_Sym (F, Sparklalr_Common.Nt_Base + 1, Posn, 10, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => " .."); Posn := Posn + 2; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Print_Sym (F, Sparklalr_Common.Nt_Base + Nnon_Terms, Posn, 10, False); --# end accept; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.Put_File (File => F, Item => " subtype SP_Grammar_Non_Terminal is SP_Symbol range"); Posn := 42; Print_Sym (F, Sparklalr_Common.Nt_Base + 2, Posn, 10, False); SPARK.Ada.Text_IO.Put_File (File => F, Item => " .."); Posn := Posn + 2; --# accept F, 10, Posn, "Ineffective assignment here expected and OK"; Print_Sym (F, Sparklalr_Common.Nt_Base + Nnon_Terms, Posn, 10, False); --# end accept; SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => ";"); SPARK.Ada.Text_IO.New_Line_File (File => F, Spacing => 1); SPARK.Ada.Text_IO.Put_Line_File (File => F, Item => "end SP_Symbols;"); end Symbols_Package_Out; function Get_Nterms return Integer --# global in Nterms; is begin return Nterms; end Get_Nterms; function Get_Nnon_Terms return Integer --# global in Nnon_Terms; is begin return Nnon_Terms; end Get_Nnon_Terms; function Get_Term_Set (I : in Sparklalr_Common.Term_Range) return Sparklalr_Common.Id_Name --# global in Term_Set; is begin return Term_Set (I); end Get_Term_Set; function Get_Nterm_Set (I : in Sparklalr_Common.Non_Term_Range) return Sparklalr_Common.Id_Name --# global in Nterm_Set; is begin return Nterm_Set (I); end Get_Nterm_Set; end Symbols_Dump; spark-2012.0.deb/COPYING30000644000175000017500000010451311753202341013557 0ustar eugeneugen GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . spark-2012.0.deb/sparkclean/0000755000175000017500000000000011753203756014573 5ustar eugeneugenspark-2012.0.deb/sparkclean/sparkclean.smf0000644000175000017500000000005211753202340017406 0ustar eugeneugenfiles.adb command_line.adb sparkclean.adb spark-2012.0.deb/sparkclean/all.wrn0000644000175000017500000000014611753202340016060 0ustar eugeneugen-- Warning control file for sparkclean hidden_parts notes pragma all with_clauses declare_annotations spark-2012.0.deb/sparkclean/Makefile0000644000175000017500000000425411753202340016224 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for sparkclean # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=sparkclean # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} ${OUTPUT_NAME} -o $@ -bargs ${BIND_OPTS} self-analysis: -spark -plain @${OUTPUT_NAME}.smf # Cleaning code base # ================== clean: standardclean reallyclean: clean targetclean vcclean ################################################################################ # END-OF-FILE spark-2012.0.deb/sparkclean/sparkclean.adb0000644000175000017500000002254311753202340017360 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Command_Line; with Files; with SPARK.Ada.Command_Line.Unbounded_String; with SPARK.Ada.Command_Line; with SPARK.Ada.Strings.Unbounded; with SPARK.Ada.Text_IO.Unbounded_String; with SPARK.Ada.Text_IO; with Version; with GNAT.Directory_Operations; with GNAT.OS_Lib; use type Command_Line.Status_T; --# inherit Command_Line, --# Files, --# SPARK.Ada.Command_Line, --# SPARK.Ada.Command_Line.Unbounded_String, --# SPARK.Ada.Strings.Unbounded, --# SPARK.Ada.Text_IO, --# SPARK.Ada.Text_IO.Unbounded_String, --# Version; -- This is the main program for SPARK Clean. -- -- We have one hidden procedure, Do_Clean, that does as little as -- possible: it traverses the current directory tree and calls -- Should_Clean on each file. If this function returns true then the -- file is deleted. -- -- The rest is fully written in SPARK. Overall we do the following: -- -- 1. Parse command-line -- 2. Print help message if requested or if an error was found in the -- command line. -- 3. Traverse current directory tree and delete requested files. If -- no specific file type has been requested (sparkclean has been -- invoked without arguments) then all files we know about will -- be deleted. -- 4. Set the return code to zero or non-zero as appropriate. --# main_program; procedure SPARKClean --# global in out SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# in out SPARK.Ada.Text_IO.The_Standard_Output; is CL : Command_Line.Data_T; CL_Status : Command_Line.Status_T; Overall_Success : Boolean; -- This function attempts to work out a file's extension. It -- should do something like this: -- "foobar.vcg" -> "vcg" -- ".vcg" -> "vcg" -- "foobar" -> "" -- "fail." -> "" -- "foo.bar.baz" -> "baz" function Get_Extension (File_Name : in String) return SPARK.Ada.Strings.Unbounded.Unbounded_String is Ext : SPARK.Ada.Strings.Unbounded.Unbounded_String; Ext_Starts_At : Natural; begin -- Search for the last '.' in the filename. Ext_Starts_At := 0; --# check File_Name'First > Ext_Starts_At; for N in reverse Natural range File_Name'Range loop --# assert N in File_Name'Range; if File_Name (N) = '.' then Ext_Starts_At := N; exit; end if; end loop; -- Copy and return the extension, if it exists. Ext := SPARK.Ada.Strings.Unbounded.Get_Null_Unbounded_String; if Ext_Starts_At >= 1 and Ext_Starts_At < File_Name'Last then for N in Positive range Ext_Starts_At + 1 .. File_Name'Last loop --# assert N in File_Name'Range --# and SPARK.Ada.Strings.Unbounded.Get_Length (Ext) < N; SPARK.Ada.Strings.Unbounded.Append_Char (Ext, File_Name (N)); end loop; end if; return Ext; end Get_Extension; -- Using the CL (command-line data), this function checks if the -- given file should be deleted based on its extension. function Should_Clean (File_Name : in String) return Boolean --# global in CL; --# in CL_Status; --# pre CL_Status = Command_Line.Ok; is File_Type : Files.File_Types; Result : Boolean; begin --# accept F, 30, CL_Status, "This is only needed for partial correctness." & --# F, 50, CL_Status, "This is only needed in the precondition"; File_Type := Files.Filetype_From_Extension (Get_Extension (File_Name)); case File_Type is when Files.Unknown => Result := False; when others => Result := CL.Files_To_Delete (File_Type); end case; return Result; end Should_Clean; -- This is a hidden procedure which will go through all files in -- the current directory and all subdirectories. For each file it -- will call Should_Clean above to decide if the file should be -- deleted. procedure Do_Clean --# global in CL; --# in CL_Status; --# in out Overall_Success; --# pre CL_Status = Command_Line.Ok and --# Overall_Success; is --# hide Do_Clean; Common_Prefix : constant GNAT.Directory_Operations.Dir_Name_Str := GNAT.Directory_Operations.Get_Current_Dir; procedure Scan_Directory (Dir : in GNAT.Directory_Operations.Dir_Name_Str) is D : GNAT.Directory_Operations.Dir_Type; Str : String (1 .. 4096); Last : Natural; Success : Boolean; begin GNAT.Directory_Operations.Open (D, Dir); loop GNAT.Directory_Operations.Read (D, Str, Last); exit when Last = 0; declare F : constant String := Dir & Str (1 .. Last); begin if GNAT.OS_Lib.Is_Directory (F) then -- Ignore "." and ".." if ((Last = 1) and then (Str (1) = '.')) or ((Last = 2) and then (Str (1) = '.' and Str (2) = '.')) then null; else -- Recurse Scan_Directory (F & GNAT.OS_Lib.Directory_Separator); end if; elsif Should_Clean (F) then GNAT.OS_Lib.Delete_File (F, Success); if not Success then SPARK.Ada.Text_IO.Put_Error ("Failed to delete file "); SPARK.Ada.Text_IO.Put_Line_Error (F (Common_Prefix'Last + 1 .. F'Last)); -- Flag that some error has occurred. Overall_Success := False; else SPARK.Ada.Text_IO.Put_Output ("Deleted: "); SPARK.Ada.Text_IO.Put_Line_Output (F (Common_Prefix'Last + 1 .. F'Last)); end if; end if; end; end loop; GNAT.Directory_Operations.Close (D); exception when others => GNAT.Directory_Operations.Close (D); raise; end Scan_Directory; begin Scan_Directory (Common_Prefix); end Do_Clean; begin -- Parse command line arguments. Command_Line.Initialize (CL, CL_Status); Overall_Success := CL_Status /= Command_Line.Error; -- Do what is requested. case CL_Status is when Command_Line.Ok => -- Clean files. Do_Clean; -- It is possible that we have failed to delete a file. if not Overall_Success then SPARK.Ada.Text_IO.Put_Line_Error ("Error: One or more files could not be deleted."); SPARK.Ada.Text_IO.Put_Line_Error (" Re-running sparkclean should tell you which."); end if; when Command_Line.Help | Command_Line.Error => -- Print usage. SPARK.Ada.Text_IO.Put_Output ("SPARKClean "); SPARK.Ada.Text_IO.Put_Line_Output (Version.Toolset_Banner_Line); SPARK.Ada.Text_IO.Put_Output ("Usage: "); SPARK.Ada.Text_IO.Unbounded_String.Put_Output (SPARK.Ada.Command_Line.Unbounded_String.Command_Name); SPARK.Ada.Text_IO.Put_Line_Output (" [OPTION]..."); SPARK.Ada.Text_IO.Put_Line_Output ("Automatically delete files generated by the SPARK tools."); SPARK.Ada.Text_IO.Put_Line_Output (""); SPARK.Ada.Text_IO.Put_Line_Output (" -examiner delete VCs, DPCs and reports/listings"); SPARK.Ada.Text_IO.Put_Line_Output (" -simplifier delete simplified VCs and DPCs"); SPARK.Ada.Text_IO.Put_Line_Output (" -victor delete victor proof logs"); SPARK.Ada.Text_IO.Put_Line_Output (" -pogs delete pogs summary files"); SPARK.Ada.Text_IO.Put_Line_Output (""); SPARK.Ada.Text_IO.Put_Line_Output ("With no OPTION given, all files generated by the SPARK tools will"); SPARK.Ada.Text_IO.Put_Line_Output ("be deleted."); SPARK.Ada.Text_IO.Put_Line_Output (""); SPARK.Ada.Text_IO.Put_Line_Output (Version.Toolset_Support_Line1); SPARK.Ada.Text_IO.Put_Line_Output (Version.Toolset_Support_Line2); SPARK.Ada.Text_IO.Put_Line_Output (Version.Toolset_Support_Line3); SPARK.Ada.Text_IO.Put_Line_Output (Version.Toolset_Support_Line4); end case; -- Set exit status. if Overall_Success then SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Success); else SPARK.Ada.Command_Line.Set_Exit_Status (SPARK.Ada.Command_Line.Failure); end if; end SPARKClean; spark-2012.0.deb/sparkclean/spark.sw0000644000175000017500000000026711753202340016257 0ustar eugeneugen-sparklib -output_directory=vcg -config_file=../common/gnat.cfg -nolisting -casing -index_file=sparkclean.idx -report=sparkclean.rep -flow=auto -vcg -dpc -rules=lazy -warning=all.wrn spark-2012.0.deb/sparkclean/command_line.ads0000644000175000017500000000427511753202340017705 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Files; --# inherit Files, --# SPARK.Ada.Command_Line, --# SPARK.Ada.Command_Line.Unbounded_String, --# SPARK.Ada.Strings.Unbounded, --# SPARK.Ada.Text_IO, --# SPARK.Ada.Text_IO.Unbounded_String; package Command_Line is type File_Type_Map is array (Files.Known_File_Types) of Boolean; type Data_T is record Files_To_Delete : File_Type_Map; end record; type Status_T is (Ok, Error, Help); -- Parse command line arguments and options. Status can be one of -- the following: -- * Ok - All is well, we can continue. -- * Help - No errors, but we should print a help text and abort. -- * Error - Some error occurred, we should print a help text and -- abort with a non-zero return code. procedure Initialize (Data : out Data_T; Status : out Status_T); --# global in SPARK.Ada.Command_Line.State; --# in out SPARK.Ada.Text_IO.The_Standard_Error; --# derives Data, --# Status from SPARK.Ada.Command_Line.State & --# SPARK.Ada.Text_IO.The_Standard_Error from *, --# SPARK.Ada.Command_Line.State; end Command_Line; spark-2012.0.deb/sparkclean/sparkclean.idx0000644000175000017500000000072711753202340017416 0ustar eugeneugen------------------------------------------------------------------------------ -- Version ------------------------------------------------------------------------------ version spec is in ../common/versioning/version.ads ------------------------------------------------------------------------------ -- sparkclean ------------------------------------------------------------------------------ command_line spec is in command_line.ads files spec is in files.ads spark-2012.0.deb/sparkclean/files.ads0000644000175000017500000000335711753202340016362 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Strings.Unbounded; --# inherit SPARK.Ada.Strings.Unbounded; package Files is type File_Types is ( Examiner_Files, -- vcg, dpc, fdl, rls Simplifier_Files, -- siv, sdp, slg, zlg, log, zsl Victor_Files, -- vct, vsm POGS_Files, -- sum Unknown -- anything else (must always be last) ); subtype Known_File_Types is File_Types range File_Types'First .. File_Types'Pred (Unknown); -- Given an extension (such as 'vcg') this function returns the -- type of the file as one of File_Types. function Filetype_From_Extension (Extension : in SPARK.Ada.Strings.Unbounded.Unbounded_String) return File_Types; end Files; spark-2012.0.deb/sparkclean/vcg/0000755000175000017500000000000011753203756015352 5ustar eugeneugenspark-2012.0.deb/sparkclean/command_line.adb0000644000175000017500000001710311753202340017656 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Command_Line; with SPARK.Ada.Command_Line.Unbounded_String; with SPARK.Ada.Strings.Unbounded; with SPARK.Ada.Text_IO.Unbounded_String; package body Command_Line is -- This procedure can be used to parse a command-line argument, -- S. Arguments starts with a dash '-' and can contain an optional -- value. For example: -- -- "-foo" -> Has_Value = False -- Option = "foo" -- Value = "" -- -- "-foo=bar" -> Has_Value = True -- Option = "foo" -- Value = "bar" -- -- Valid is set to false if the S does not start with a dash '-' -- or the value given is blank. procedure Parse_Option (S : in SPARK.Ada.Strings.Unbounded.Unbounded_String; Valid : out Boolean; Has_Value : out Boolean; Option : out SPARK.Ada.Strings.Unbounded.Unbounded_String; Value : out SPARK.Ada.Strings.Unbounded.Unbounded_String) --# derives Has_Value, --# Option, --# Valid, --# Value from S; is In_Name : Boolean := True; In_Sep : Boolean := True; C : Character; begin Valid := SPARK.Ada.Strings.Unbounded.Get_Length (S) >= 2; Has_Value := False; Option := SPARK.Ada.Strings.Unbounded.Get_Null_Unbounded_String; Value := SPARK.Ada.Strings.Unbounded.Get_Null_Unbounded_String; -- Parse option. for N in Natural range 1 .. SPARK.Ada.Strings.Unbounded.Get_Length (S) loop --# assert N <= SPARK.Ada.Strings.Unbounded.Get_Length (S) --# and SPARK.Ada.Strings.Unbounded.Get_Length (S) <= Natural'Last --# and SPARK.Ada.Strings.Unbounded.Get_Length (Option) < N --# and SPARK.Ada.Strings.Unbounded.Get_Length (Value) < N; C := SPARK.Ada.Strings.Unbounded.Get_Element (Source => S, Index => N); if N = 1 then -- Options must start with a dash. Valid := (C = '-'); elsif N > 1 and In_Name then if C = '=' then In_Name := False; Has_Value := True; else SPARK.Ada.Strings.Unbounded.Append_Char (Option, C); end if; elsif N > 1 and In_Sep then In_Sep := False; elsif N > 1 then SPARK.Ada.Strings.Unbounded.Append_Char (Value, C); end if; end loop; -- If we have an option with a value, it can't be empty. if Has_Value and SPARK.Ada.Strings.Unbounded.Get_Length (Value) = 0 then Valid := False; end if; end Parse_Option; procedure Initialize (Data : out Data_T; Status : out Status_T) is Opt_Name : SPARK.Ada.Strings.Unbounded.Unbounded_String; Opt_Value : SPARK.Ada.Strings.Unbounded.Unbounded_String; Opt_Has_Value : Boolean; Opt_Is_Valid : Boolean; Number_Of_Arguments : Natural; begin Number_Of_Arguments := SPARK.Ada.Command_Line.Argument_Count; if SPARK.Ada.Command_Line.Argument_Count = 0 then -- Delete all the files by default. Data := Data_T'(Files_To_Delete => File_Type_Map'(others => True)); Status := Ok; else -- Delete only the files requested. Data := Data_T'(Files_To_Delete => File_Type_Map'(others => False)); Status := Ok; -- Check arguments. Each argument flags up one set of files -- to delete. for N in Positive range 1 .. Number_Of_Arguments loop --# accept F, 10, Opt_Value, "So far no arguments have a value..."; Parse_Option (S => SPARK.Ada.Command_Line.Unbounded_String.Argument (N), Valid => Opt_Is_Valid, Has_Value => Opt_Has_Value, Option => Opt_Name, Value => Opt_Value); --# end accept; --# assert Number_Of_Arguments = Number_Of_Arguments% --# and Number_Of_Arguments = SPARK.Ada.Command_Line.Argument_Count (SPARK.Ada.Command_Line.State) --# and Status = Ok; if Opt_Is_Valid then if SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Opt_Name, "examiner") then Opt_Is_Valid := not Opt_Has_Value; Opt_Is_Valid := Opt_Is_Valid and not Data.Files_To_Delete (Files.Examiner_Files); Data.Files_To_Delete (Files.Examiner_Files) := True; elsif SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Opt_Name, "simplifier") then Opt_Is_Valid := not Opt_Has_Value; Opt_Is_Valid := Opt_Is_Valid and not Data.Files_To_Delete (Files.Simplifier_Files); Data.Files_To_Delete (Files.Simplifier_Files) := True; elsif SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Opt_Name, "victor") then Opt_Is_Valid := not Opt_Has_Value; Opt_Is_Valid := Opt_Is_Valid and not Data.Files_To_Delete (Files.Victor_Files); Data.Files_To_Delete (Files.Victor_Files) := True; elsif SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Opt_Name, "pogs") then Opt_Is_Valid := not Opt_Has_Value; Opt_Is_Valid := Opt_Is_Valid and not Data.Files_To_Delete (Files.POGS_Files); Data.Files_To_Delete (Files.POGS_Files) := True; elsif SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Opt_Name, "help") then Opt_Is_Valid := not Opt_Has_Value; Status := Help; else -- Unknown option. Opt_Is_Valid := False; end if; end if; -- If the option was not valid for any reason, we flag up -- an error. if not Opt_Is_Valid then SPARK.Ada.Text_IO.Put_Error ("Invalid option: `"); SPARK.Ada.Text_IO.Unbounded_String.Put_Error (SPARK.Ada.Command_Line.Unbounded_String.Argument (N)); SPARK.Ada.Text_IO.Put_Line_Error ("'"); Status := Error; end if; -- There is no point in carrying on if we're not Ok. exit when not (Status = Ok); end loop; end if; --# accept F, 33, Opt_Value, "So far, none of our arguments have a value, so..."; end Initialize; end Command_Line; spark-2012.0.deb/sparkclean/files.adb0000644000175000017500000000555711753202340016345 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Files is function Filetype_From_Extension (Extension : in SPARK.Ada.Strings.Unbounded.Unbounded_String) return File_Types is Result : File_Types; begin if SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "vcg") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "dpc") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "fdl") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "rls") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "rep") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "lst") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "sli") then Result := Examiner_Files; elsif SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "siv") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "slg") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "sdp") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "zsl") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "log") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "zlg") then Result := Simplifier_Files; elsif SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "vct") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "vlg") or SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "vsm") then Result := Victor_Files; elsif SPARK.Ada.Strings.Unbounded.Equal_Unbounded_String_String (Extension, "sum") then Result := POGS_Files; else Result := Unknown; end if; return Result; end Filetype_From_Extension; end Files; spark-2012.0.deb/wraputility/0000755000175000017500000000000011753203756015045 5ustar eugeneugenspark-2012.0.deb/wraputility/wraps.ads0000644000175000017500000000323111753202337016663 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- This program reads in a .PFS (path functions) or .VCS (verification -- -- conditions) file and truncates lines which are over eighty characters in -- -- length. Where a line exceeds 80 characters, the utility attempts to find -- -- convenient points in the line (with an intervening space character) at -- -- which to insert line breaks, and indents by ten columns after the break. -- -------------------------------------------------------------------------------- --# inherit SPARK_IO; package WRAPS is procedure WRAP; --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *; end WRAPS; spark-2012.0.deb/wraputility/all.wrn0000644000175000017500000000014111753202340016325 0ustar eugeneugen-- Warning control file for Wrap_Utility hidden_parts pragma all static_expressions with_clauses spark-2012.0.deb/wraputility/wraps.adb0000644000175000017500000005072511753202337016654 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Command_Line, GNAT.OS_Lib, GNAT.IO_Aux, SPARK_IO; use type SPARK_IO.File_Type; use type SPARK_IO.File_Status; -- Key layout concerns are summarised below: -- -------------------------------------------------------------------- -- Column number: -- 123456789... -- -- The column beoynd which wrapping is desired>M -- M -- V--split Characters-V----------V M -- SSSSSSSSSSSXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXCYYYYYYYZZZZZZZZ... -- ^Possible ^Text for this line ^ M ^ -- start Text cut off for next line| M Text -- offset M beyond -- -- -------------------- -- Note that, wrapped lines are prefixed with an indent. This is achieved by -- modifying the text to include non-breaking space Characters. -- -------------------------------------------------------------------- -- -- Key layout rules are summarised below: -- -- The line fits: -- Condition: start+line lies within the wrapping point. -- Action: write out the line and finish. (Never insert a newline here. -- Newlines are only added for wrapping purposes.) -- -- The line does not fit: -- Condition: Above condition is false. -- Action: Compare start point and wrapping column. -- -- Condition: The start point is beyond the wrapping column. -- Action: Scan remaining text forwards until a split Character (that is not -- the last Character!) is encountered. Write out line up to and -- including the split Character, and a newline. Collect remaining -- line, and start again. An indent is now requested. -- If a split Character is never found (or it's only found at the -- last Character), write out the whole line and finish. -- -- Condition: The start point is within the wrapping column. -- Action: Scan backwards from the wrapping column. If a split Character is -- encountered (that is not the first Character!), then write out the line -- up to and including the split Character, and a newline. Collect -- remaining line, and start again. An indent is now requested. If a split -- Character is never found (or it's only found at the first Character), -- write out the whole line, set offset to just after the wrapper column, -- and start again. -- -- Note: If the last line terminates with a new line, then the above rules hold. -- If the last line does not terminate with a new line, then the above rules hold, -- with an additional new line being added just before the end of the file. package body WRAPS is -- CONSTANTS -- maxlinelength : constant Integer := 262143; -- 2**18 - 1 or 256k MaxCols : constant Integer := 80; -- Maximum no. of output columns normally permitted Indentation : constant Integer := 10; -- Columns to indent for 2nd. & subsequent wraps SpaceChar : constant Integer := 32; -- ' ' {assumption: ASCII, so space is chr(32)} OpenParenthesis : constant Integer := 40; -- '(' CloseParenthesis : constant Integer := 41; -- ')' OpenBracket : constant Integer := 91; -- '[' CloseBracket : constant Integer := 93; -- ']' -- TYPES -- type linelength is range 0 .. maxlinelength; subtype linearraypos is linelength range 1 .. linelength'Last; subtype charcode is Integer range 0 .. 255; --assumption: ASCII range 0..255 type linearray is array (linearraypos) of charcode; type line is record contents : linearray; length : linelength; end record; -- line procedure WRAP is infile, outfile : SPARK_IO.File_Type; STATUS : SPARK_IO.File_Status; procedure DoRead (CH : out charcode) --# global in infile; --# in out SPARK_IO.File_Sys; --# derives CH, --# SPARK_IO.File_Sys from infile, --# SPARK_IO.File_Sys; is C : Character; begin SPARK_IO.Get_Char (infile, C); CH := Character'Pos (C); end DoRead; procedure DoWrite (CH : in charcode) --# global in outfile; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CH, --# outfile; is begin SPARK_IO.Put_Char (outfile, Character'Val (CH)); end DoWrite; procedure DoReadln --# global in infile; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# infile; is begin SPARK_IO.Skip_Line (infile, 1); end DoReadln; procedure DoWriteln --# global in outfile; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# outfile; is begin SPARK_IO.New_Line (outfile, 1); end DoWriteln; function CheckFileExists (Name : String) return Boolean -- checks that we're not trying to open a non-existent file is --# hide CheckFileExists; begin return GNAT.IO_Aux.File_Exists (Name); end CheckFileExists; -- True if we were called with a filename argument function New_Wrap return Boolean is --# hide New_Wrap; begin return Ada.Command_Line.Argument_Count > 0; end New_Wrap; -- Return the input file. -- This will be either the first argument, or "WRAPS.IN" if there is no -- argument. function OpenInFile return SPARK_IO.File_Type is --# hide OpenInFile; filename : GNAT.OS_Lib.String_Access := new String'("WRAP.INP"); filehandle : SPARK_IO.File_Type; STATUS : SPARK_IO.File_Status; begin if New_Wrap then filename := new String'(Ada.Command_Line.Argument (1)); end if; if CheckFileExists (filename.all) then SPARK_IO.Open (filehandle, SPARK_IO.In_File, filename'Length, filename.all, "", STATUS); if STATUS /= SPARK_IO.Ok then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Open input failed with: " & SPARK_IO.File_Status'Image (STATUS), 0); end if; else filehandle := SPARK_IO.Null_File; end if; return filehandle; end OpenInFile; -- Return the output file. -- This will be either the first argument and ".tmp", or "WRAPS.OUT" if there is no -- argument. function OpenOutFile return SPARK_IO.File_Type is --# hide OpenOutFile; filename : GNAT.OS_Lib.String_Access := new String'("WRAP.OUT"); filehandle : SPARK_IO.File_Type; STATUS : SPARK_IO.File_Status; begin if New_Wrap then filename := new String'(Ada.Command_Line.Argument (1) & ".tmp"); end if; SPARK_IO.Create (filehandle, filename'Length, filename.all, "", STATUS); if STATUS /= SPARK_IO.Ok then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Create output failed with: " & SPARK_IO.File_Status'Image (STATUS), 0); end if; if filehandle = SPARK_IO.Null_File then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Create output return Null_File", 0); end if; return filehandle; end OpenOutFile; procedure Initialise --# global out infile; --# out outfile; --# derives infile, --# outfile from ; is begin infile := OpenInFile; --# accept Flow, 22, "OpenInFile is hidden. Expression may actually change."; if infile = SPARK_IO.Null_File then --# end accept; outfile := SPARK_IO.Null_File; else outfile := OpenOutFile; end if; end Initialise; -- Delete the original file and rename the temporary file. procedure CleanUp --# derives ; is --# hide CleanUp; SUCCESS : Boolean := False; infile_name : GNAT.OS_Lib.String_Access; outfile_name : GNAT.OS_Lib.String_Access; newfile_name : GNAT.OS_Lib.String_Access; begin if New_Wrap then -- We have arguments infile_name := new String'(Ada.Command_Line.Argument (1)); outfile_name := new String'(Ada.Command_Line.Argument (1) & ".tmp"); if Ada.Command_Line.Argument_Count = 2 then -- We have a destination filename newfile_name := new String'(Ada.Command_Line.Argument (2)); else -- Replace the original file newfile_name := new String'(Ada.Command_Line.Argument (1)); end if; -- Delete the original input file. GNAT.OS_Lib.Delete_File (infile_name.all, SUCCESS); if not SUCCESS then SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** ERROR - wrap_utility failed to delete input file failed with Errno=" & Integer'Image (GNAT.OS_Lib.Errno), 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end if; -- Move the output file to the destination name. GNAT.OS_Lib.Rename_File (outfile_name.all, newfile_name.all, SUCCESS); if not SUCCESS then SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** ERROR - wrap_utility failed to rename output file", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end if; else null; -- We are behaving as the old wrap util, so do nothing. end if; -- New_Wrap end CleanUp; -------------------------------------------------------------------------------- procedure ReadLine (L : out line) --# global in infile; --# in out SPARK_IO.File_Sys; --# derives L, --# SPARK_IO.File_Sys from infile, --# SPARK_IO.File_Sys; --This routine reads a line of Character-codes into the line-buffer L. is CH : charcode; lensofar : linelength; begin lensofar := 0; while not SPARK_IO.End_Of_Line (infile) and lensofar < linelength (maxlinelength) loop lensofar := lensofar + 1; DoRead (CH); --# accept Flow, 23, L.contents, "Array elements are initialised before use."; L.contents (lensofar) := CH; --# end accept; end loop; L.length := lensofar; DoReadln; --# accept Flow, 602, L, L.contents, "Array elements are initialised before use."; end ReadLine; procedure OutPartOfLine (L : in line; FromCol, ToCol : in linelength) --# global in outfile; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# FromCol, --# L, --# outfile, --# ToCol; --Writes out line-buffer L unchanged, without adding any single quotes. is index : linelength; begin index := FromCol; while index <= ToCol loop DoWrite (L.contents (index)); index := index + 1; end loop; end OutPartOfLine; procedure OutUnchangedLine (L : in line) --# global in outfile; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# L, --# outfile; --Writes out line-buffer L unchanged, without any line-wrapping. is begin OutPartOfLine (L, 1, L.length); DoWriteln; end OutUnchangedLine; --Copies a line, wrapping if it appears to be necessary. procedure CopyAndMaybeWrapLine --# global in infile; --# in outfile; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# infile, --# outfile; is currentline : line; function IsALongLine (L : in line) return Boolean --True if we need to wrap the line around - i.e. more than MaxCols. is begin return L.length > linelength (MaxCols); end IsALongLine; --Write out line-buffer L, wrapping where necessary. procedure OutWrappedLine (L : in line) --# global in outfile; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# L, --# outfile; is OnCol, Width : linelength; procedure OutNextPartOfLine (L : in line; FromCol : in out linelength; InWidth : in linelength) --# global in outfile; --# in out SPARK_IO.File_Sys; --# derives FromCol from *, --# InWidth, --# L & --# SPARK_IO.File_Sys from *, --# FromCol, --# InWidth, --# L, --# outfile; -- pre FromCol + InWidth <= L.length and InWidth > 1; is ToCol : linelength; function OKSplitChar (C : in charcode) return Boolean --Returns true if C is a space, parenthesis or bracket. is begin return (C = SpaceChar) or else (C = OpenParenthesis) or else (C = CloseParenthesis) or else (C = OpenBracket) or else (C = CloseBracket); end OKSplitChar; begin --OutNextPartOfLine -- L: The line under consideration. -- FromCol: Index the first portion of L that not has already been displayed. -- InWidth: The remaining width of the screen before MaxCols is reached. -- ToCol: Index the last portion of L to be displayed on this -- line. The correct value is calculated below. -- Assume that the line fits perfectly. ToCol := (FromCol + InWidth) - 1; if not OKSplitChar (L.contents (ToCol)) then -- Cannot split at the ideal position. -- Search backwards to find the next-best split point. loop ToCol := ToCol - 1; exit when OKSplitChar (L.contents (ToCol)) or else (ToCol = FromCol); end loop; if ToCol = FromCol then -- Could not find any split point looking backwards. The -- line will need to wrap beyond MaxCols. -- Search forwards to find the least-worst split point. ToCol := (FromCol + InWidth) - 1; loop exit when (ToCol >= L.length) or else OKSplitChar (L.contents (ToCol)); ToCol := ToCol + 1; end loop; end if; end if; -- Display the selected portion of the line. OutPartOfLine (L, FromCol, ToCol); -- The index to first portion of L that not has already been displayed -- is adjusted accordingly. FromCol := ToCol + 1; end OutNextPartOfLine; procedure Indent (NoOfCols : in linelength) --# global in outfile; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# NoOfCols, --# outfile; -- pre NoOfCols >= 0; is begin for index in linelength range 1 .. NoOfCols loop DoWrite (SpaceChar); end loop; end Indent; begin--OutWrappedLine --Start at column 1. OnCol := 1; -- To start with, have the width of the screen in which to place content. Width := linelength (MaxCols); while OnCol + Width <= L.length loop OutNextPartOfLine (L, OnCol, Width); -- Wrap and indent, only if there is more text to display for this line. if OnCol <= L.length then DoWriteln; Indent (linelength (Indentation)); end if; -- Now, have the width of the screen (less indent) in which to place content. Width := linelength (MaxCols - Indentation); end loop; -- Display any residue. if OnCol <= L.length then OutPartOfLine (L, OnCol, L.length); end if; -- All done. Add final newline. DoWriteln; end OutWrappedLine; begin--CopyAndMaybeWrapLine ReadLine (currentline); if IsALongLine (currentline) then OutWrappedLine (currentline); else OutUnchangedLine (currentline); end if; end CopyAndMaybeWrapLine; procedure DisplayErrorMessage (Leader : String; STATUS : in SPARK_IO.File_Status) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Leader, --# STATUS; is --# hide DisplayErrorMessage ; begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, (Leader & SPARK_IO.File_Status'Image (STATUS)), 0); end DisplayErrorMessage; begin -- WRAP -- GetFileNames; Initialise; --# accept Flow, 22, "Initialise hides behaviour. Expression may actually change."; if infile /= SPARK_IO.Null_File then --# end accept; -- Please note that End_Of_File (infile) will report true where: -- The next Character is: EOF -- The next Character sequence is: NL, EOF while not SPARK_IO.End_Of_File (infile) loop CopyAndMaybeWrapLine; end loop; -- This call to DoWriteln would appear to be totally unnecessary. However, it would -- correctly restore a NL if End_Of_File has really detected: NL, EOF. (Of course, -- it also adds an additional NL if End_Of_File actually detected: EOF.) DoWriteln; --# accept Flow, 10, infile, "May actually affect infile."; SPARK_IO.Close (infile, STATUS); --# end accept; if STATUS /= SPARK_IO.Ok then DisplayErrorMessage ("Close In_File: ", STATUS); end if; --# accept Flow, 10, outfile, "May actually affect outfile."; SPARK_IO.Close (outfile, STATUS); --# end accept; if STATUS /= SPARK_IO.Ok then DisplayErrorMessage ("Close Out_File: ", STATUS); end if; CleanUp; end if; end WRAP; end WRAPS; spark-2012.0.deb/wraputility/Makefile0000644000175000017500000000467011753202340016500 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for the wraputility # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=wrap_utility # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: preamble prep gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} ${OUTPUT_NAME} -o $@ -bargs ${BIND_OPTS} self-analysis: preamble prep -spark -plain @${OUTPUT_NAME}.smf # Initialisations # =============== preamble: $(MAKE) -C ${ROOT}/examiner clean # Platform specific prepping # ========================== prep: $(MAKE) -C ${ROOT}/examiner prep # Cleaning code base # ================== clean: standardclean reallyclean: clean targetclean vcclean preamble $(MAKE) -C ${ROOT}/examiner reallyclean ################################################################################ # END-OF-FILE spark-2012.0.deb/wraputility/spark.sw0000644000175000017500000000022511753202340016523 0ustar eugeneugen-sparklib -output_directory=vcg -config_file=../common/gnat.cfg -listing_extension=ls_ -casing -index_file=wrap_utility.idx -report=wrap_utility.rep spark-2012.0.deb/wraputility/wrap_utility.adb0000644000175000017500000000231411753202337020243 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Command_Line; with WRAPS; procedure WRAP_UTILITY is begin WRAPS.WRAP; Ada.Command_Line.Set_Exit_Status (Ada.Command_Line.Success); exception when others => Ada.Command_Line.Set_Exit_Status (Ada.Command_Line.Failure); end WRAP_UTILITY; spark-2012.0.deb/wraputility/vcg/0000755000175000017500000000000011753203756015624 5ustar eugeneugenspark-2012.0.deb/wraputility/wrap_utility.smf0000644000175000017500000000001211753202340020265 0ustar eugeneugenwraps.adb spark-2012.0.deb/wraputility/wrap_utility.idx0000644000175000017500000000011311753202340020266 0ustar eugeneugensuperindex is in ../examiner/spark.idx wraps specification is in wraps.ads spark-2012.0.deb/pogs/0000755000175000017500000000000011753203756013420 5ustar eugeneugenspark-2012.0.deb/pogs/heap.adb0000644000175000017500000001304511753202340014774 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body Heap --158 refinement clause removed is --158 new procedure procedure Initialize (TheHeap : out HeapRecord) is begin TheHeap.HighMark := Atom (0); TheHeap.NextFreeAtom := Atom (0); --# accept F, 23, TheHeap.ListOfAtoms, "Partial initialization" & --# F, 602, TheHeap, TheHeap.ListOfAtoms, "Partial initialization"; TheHeap.ListOfAtoms (0).PointerA := 0; TheHeap.ListOfAtoms (0).PointerB := 0; end Initialize; -------------------------------------------------------------------- procedure CreateAtom (TheHeap : in out HeapRecord; NewAtom : out Atom; Success : out Boolean) is A : Atom; begin --160--new if part if TheHeap.HighMark < Atom (ListLength) then --haven't used up array yet TheHeap.HighMark := TheHeap.HighMark + 1; A := TheHeap.HighMark; TheHeap.ListOfAtoms (A).PointerA := 0; TheHeap.ListOfAtoms (A).PointerB := 0; NewAtom := A; Success := True; elsif TheHeap.NextFreeAtom = 0 then --160--if turned into elsif --array and returned atoms in free list both used up Success := False; NewAtom := 0; else --array used up but there are atoms in the returned free list A := TheHeap.NextFreeAtom; TheHeap.NextFreeAtom := TheHeap.ListOfAtoms (TheHeap.NextFreeAtom).PointerA; TheHeap.ListOfAtoms (A).PointerA := 0; TheHeap.ListOfAtoms (A).PointerB := 0; NewAtom := A; Success := True; end if; end CreateAtom; -------------------------------------------------------------------- procedure DisposeOfAtom (TheHeap : in out HeapRecord; OldAtom : in Atom) is begin TheHeap.ListOfAtoms (OldAtom).PointerA := TheHeap.NextFreeAtom; TheHeap.NextFreeAtom := OldAtom; end DisposeOfAtom; -------------------------------------------------------------------- function APointer (TheHeap : HeapRecord; A : Atom) return Atom is begin return TheHeap.ListOfAtoms (A).PointerA; end APointer; -------------------------------------------------------------------- function BPointer (TheHeap : HeapRecord; A : Atom) return Atom is begin return TheHeap.ListOfAtoms (A).PointerB; end BPointer; -------------------------------------------------------------------- function AValue (TheHeap : HeapRecord; A : Atom) return HeapIndex.IndexType is begin return TheHeap.ListOfAtoms (A).ValueA; end AValue; -------------------------------------------------------------------- function BValue (TheHeap : HeapRecord; A : Atom) return HeapIndex.IndexType is begin return TheHeap.ListOfAtoms (A).ValueB; end BValue; -------------------------------------------------------------------- procedure UpdateAPointer (TheHeap : in out HeapRecord; A : in Atom; Pointer : in Atom) is begin TheHeap.ListOfAtoms (A).PointerA := Pointer; end UpdateAPointer; -------------------------------------------------------------------- procedure UpdateBPointer (TheHeap : in out HeapRecord; A : in Atom; Pointer : in Atom) is begin TheHeap.ListOfAtoms (A).PointerB := Pointer; end UpdateBPointer; -------------------------------------------------------------------- procedure UpdateAValue (TheHeap : in out HeapRecord; A : in Atom; Value : in HeapIndex.IndexType) is begin TheHeap.ListOfAtoms (A).ValueA := Value; end UpdateAValue; -------------------------------------------------------------------- procedure UpdateBValue (TheHeap : in out HeapRecord; A : in Atom; Value : in HeapIndex.IndexType) is begin TheHeap.ListOfAtoms (A).ValueB := Value; end UpdateBValue; -------------------------------------------------------------------- --159 new function used in RefList function IsNullPointer (A : Atom) return Boolean is begin return (A = 0); end IsNullPointer; end Heap; spark-2012.0.deb/pogs/spark_calendar.ads0000644000175000017500000000605411753202340017053 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Calendar; package SPARK_Calendar --# own Time_History; --# initializes Time_History; is type Error_Code is (Valid, Time_Error); type Time is private; subtype Year_Number is Integer range 1901 .. 2099; subtype Month_Number is Integer range 1 .. 12; subtype Day_Number is Integer range 1 .. 31; -- function Clock converted to procedure, and dummy history variable added for annotations. procedure Clock (Now : out Time); --# global in out Time_History; --# derives Now, --# Time_History from Time_History; -- Function converted to procedure to allow error handling procedure Year (Date : in Time; Result : out Year_Number; Status : out Error_Code); --# derives Result, --# Status from Date; function Month (Date : Time) return Month_Number; function Day (Date : Time) return Day_Number; procedure Split (Date : in Time; Year_P : out Year_Number; Month_P : out Month_Number; Day_P : out Day_Number; Status : out Error_Code); --# derives Day_P, --# Month_P, --# Status, --# Year_P from Date; -- Function converted to procedure to allow error handling procedure Time_Of (Year_P : in Year_Number; Month_P : in Month_Number; Day_P : in Day_Number; Result : out Time; Status : out Error_Code); --# derives Result, --# Status from Day_P, --# Month_P, --# Year_P; function LT (Left, Right : Time) return Boolean; function LE (Left, Right : Time) return Boolean; function GT (Left, Right : Time) return Boolean; function GE (Left, Right : Time) return Boolean; private --# hide SPARK_Calendar; pragma Inline (Clock); pragma Inline (Year); pragma Inline (Month); pragma Inline (Day); pragma Inline (Split); pragma Inline (Time_Of); pragma Inline (LT); pragma Inline (LE); pragma Inline (GT); pragma Inline (GE); type Time is new Calendar.Time; end SPARK_Calendar; spark-2012.0.deb/pogs/vcs-analyse_dpc_file.adb0000644000175000017500000004562311753202340020140 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse a .DPC file -- -- At this time, the DPC file has the same format as a VCG file. Most of the -- -- code has been copied across but the format may change over time and thus -- -- justification of having a separate subprogram to analyse a DPC file. -- -------------------------------------------------------------------------------- separate (VCS) procedure Analyse_DPC_File (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Error_In_File : out Boolean; File_Date_Time : out E_Strings.T) is Dummy_Close_Status : SPARK_IO.File_Status; The_Date_Time : E_Strings.T; Open_Status : SPARK_IO.File_Status; File_Line : E_Strings.T; Read_Line_Success : Boolean; DPC_File : SPARK_IO.File_Type := SPARK_IO.Null_File; File_Status : File_Status_T; VC_Info : VC_Info_Type; Current_DPC_Name : E_Strings.T; Parsing_State : Parsing_State_Type := Initial; Trimmed_Line : E_Strings.T; Finished_With_File : Boolean; procedure Extract_DPC_File_Date_Time (DPC_File : in SPARK_IO.File_Type; File_Date_Time : out E_Strings.T; File_Status : out File_Status_T) --# global in out SPARK_IO.File_Sys; --# derives File_Date_Time, --# File_Status, --# SPARK_IO.File_Sys from DPC_File, --# SPARK_IO.File_Sys; is File_Line : E_Strings.T; Trimmed_Line : E_Strings.T; Sub_Program_Found : Boolean; begin File_Status := Not_Corrupt; File_Date_Time := E_Strings.Empty_String; -- Check for completely empty file. E_Strings.Get_Line (File => DPC_File, E_Str => File_Line); if E_Strings.Eq1_String (E_Str => File_Line, Str => "") and SPARK_IO.End_Of_File (DPC_File) then File_Status := Corrupt_Empty_File; else --Keep on reading from this file, until the desired information is retrieved --or the end of the file is reached. loop Trimmed_Line := E_Strings.Trim (File_Line); -- find date, there is no need to find details of each dpc as an entry -- on the vcheap for each dpc (vc) has already been created. if E_Strings.Eq1_String (E_Str => E_Strings.Section (Trimmed_Line, 1, 4), Str => "DATE") then File_Date_Time := E_Strings.Section (Trimmed_Line, DPC_File_Date_Time_Start_Column, DPC_File_Date_Time_Length); end if; Sub_Program_Found := Is_Valid_Subprogram (Trimmed_Line); exit when (Sub_Program_Found or SPARK_IO.End_Of_File (DPC_File)); E_Strings.Get_Line (File => DPC_File, E_Str => File_Line); end loop; end if; if E_Strings.Eq_String (File_Date_Time, E_Strings.Empty_String) then File_Date_Time := E_Strings.Copy_String (Str => "Unknown Date (for dpc generation)"); end if; end Extract_DPC_File_Date_Time; --------------------------------------------------------------------------- function Is_DPC_Error_Message (Line : E_Strings.T) return Boolean is begin return E_Strings.Get_Length (E_Str => Line) > 0 and then E_Strings.Get_Element (E_Str => Line, Pos => 1) = '!'; end Is_DPC_Error_Message; --------------------------------------------------------------------------- function DPC_Is_New_Range_Line (Line : E_Strings.T) return Boolean is begin return E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 17), Str => "For path(s) from ") or E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 14), Str => "For checks of "); end DPC_Is_New_Range_Line; --------------------------------------------------------------------------- function DPC_Is_New_VC_Line (Line : E_Strings.T) return Boolean is Ret_Val : Boolean; begin -- The shortest possible New VC Line is for a function that has -- a single letter identifier, followed by a full-stop e.g. -- function_g. -- which is 11 characters. if E_Strings.Get_Length (E_Str => Line) >= 11 then Ret_Val := E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 10), Str => "procedure_") or else E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 9), Str => "function_") or else E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 10), Str => "task_type_"); if Ret_Val then for I in E_Strings.Lengths range 9 .. E_Strings.Get_Length (E_Str => Line) - 1 loop if not (Ada.Characters.Handling.Is_Alphanumeric (E_Strings.Get_Element (E_Str => Line, Pos => I)) or else E_Strings.Get_Element (E_Str => Line, Pos => I) = '_') then Ret_Val := False; exit; end if; --# assert I in 9 .. E_Strings.Get_Length (Line) - 1 and --# Line = Line% and --# E_Strings.Get_Length (Line) >= 11; end loop; if E_Strings.Get_Element (E_Str => Line, Pos => E_Strings.Get_Length (E_Str => Line)) /= '.' then Ret_Val := False; end if; end if; else Ret_Val := False; end if; return Ret_Val; end DPC_Is_New_VC_Line; --------------------------------------------------------------------------- function Get_Line_Number (Line_Number : VC_Line_Type) return E_Strings.T is Number : Integer; Trimmed_Result : E_Strings.T; begin if Line_Number = Refinement_Or_Inheritance_VC then Trimmed_Result := E_Strings.Copy_String (Str => " "); elsif Line_Number = VC_Line_Start then Trimmed_Result := E_Strings.Copy_String (Str => "start"); elsif Line_Number = VC_Line_End then Trimmed_Result := E_Strings.Copy_String (Str => "finish"); else Number := Line_Number; E_Strings.Put_Int_To_String (Dest => Trimmed_Result, Item => Number, Start_Pt => 1, Base => 10); Trimmed_Result := E_Strings.Trim (E_Str => Trimmed_Result); end if; return Trimmed_Result; end Get_Line_Number; begin -- Analyse_DPC_File -- open DPC file E_Strings.Open (File => DPC_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => Open_Status); if Open_Status /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; --No errors, until discover otherwise. Error_In_File := False; Extract_DPC_File_Date_Time (DPC_File => DPC_File, File_Date_Time => The_Date_Time, File_Status => File_Status); -- Report any error to standard out and set error flag -- accordingly. case File_Status is when Not_Corrupt => null; when Corrupt_Empty_File => SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* DPC file corrupt: empty file ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_File := True; when Corrupt_Unknown_Subprogram => SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* DPC file corrupt: missing subprogram name ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_File := True; end case; --Record the date regardless of errors. This may be a string of the form 'no date'. File_Date_Time := The_Date_Time; if not (Error_In_File) then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "File ", 0); if CommandLine.Data.PlainOutput then E_Strings.Put_Line (File => Report_File, E_Str => E_Strings.Lower_Case (E_Str => OSFiling.Base_Filename (Path => Filename))); else E_Strings.Put_Line (File => Report_File, E_Str => Filename); end if; if CommandLine.Data.IgnoreDates then SPARK_IO.Put_Line (Report_File, "*** Warning: DPC date stamps ignored ***", 0); else SPARK_IO.Put_String (Report_File, "DPCs generated ", 0); E_Strings.Put_Line (File => Report_File, E_Str => The_Date_Time); end if; -- find first non blank line -- if we get to the end of the file first, flag a fatal error Read_Next_Non_Blank_Line (File => DPC_File, Success => Read_Line_Success, File_Line => File_Line); if not Read_Line_Success then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* DPC file corrupt: no data beyond header ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_File := True; else if Is_DPC_Error_Message (Line => File_Line) then SPARK_IO.New_Line (Report_File, 1); E_Strings.Put_String (File => Report_File, E_Str => File_Line); Error_In_File := True; else Error_In_File := False; -- initialize the 'current information' structure VC_Info := VC_Info_Type' (Start_Line => VC_Line_Start, End_Line => VC_Line_End, End_Line_Point_Type => VCDetails.Undetermined_Point, Number_Of_VCs => 0, This_Start_Line_Printed => False, File_Type => Standard_VC_File_Type, Any_VCs_Printed => False, Valid => False); Finished_With_File := False; -- process file line-by-line -- on entry to the loop there is already a valid line in the -- FileLine buffer while not Finished_With_File loop -- examine line and act accordingly if DPC_Is_New_Range_Line (Line => File_Line) then case Parsing_State is when Initial => Parsing_State := First_Range; when First_VC_Name => Parsing_State := New_Range; when New_VC_Name => Parsing_State := New_Range; when others => null; end case; Append_Next_Line_From_File (Line => File_Line, File => DPC_File); ProcessNewRangeLine (File_Line, VC_Info); elsif DPC_Is_New_VC_Line (Line => File_Line) then case Parsing_State is when First_Range => -- Initialise VCHeap and store the first VC on the VCHeap Trimmed_Line := E_Strings.Trim (File_Line); Current_DPC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); Parsing_State := First_VC_Name; if not VCHeap.Exists (Current_DPC_Name) then VCHeap.Reinitialize (Current_DPC_Name, Get_Line_Number (Line_Number => VC_Info.Start_Line), Get_Line_Number (Line_Number => VC_Info.End_Line), VC_Info.End_Line_Point_Type); end if; VCHeap.Set_DPC_State (Current_DPC_Name, VCDetails.DPC_SDP_Not_Present); when First_VC_Name => Trimmed_Line := E_Strings.Trim (File_Line); Current_DPC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); Parsing_State := New_VC_Name; if not VCHeap.Exists (Current_DPC_Name) then VCHeap.Add (VCHeap.First_Entry, Current_DPC_Name, Get_Line_Number (Line_Number => VC_Info.Start_Line), Get_Line_Number (Line_Number => VC_Info.End_Line), VC_Info.End_Line_Point_Type, VCDetails.VC_Not_Present, VCDetails.DPC_SDP_Not_Present); else VCHeap.Set_DPC_State (Current_DPC_Name, VCDetails.DPC_SDP_Not_Present); end if; when New_Range => -- Store a new VC on the VC Heap Trimmed_Line := E_Strings.Trim (File_Line); Current_DPC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); --SPARK_IO.Put_Line(ReportFile,"NewVCNameFound - New range",0); Parsing_State := New_VC_Name; if not VCHeap.Exists (Current_DPC_Name) then VCHeap.Add (VCHeap.First_Entry, Current_DPC_Name, Get_Line_Number (Line_Number => VC_Info.Start_Line), Get_Line_Number (Line_Number => VC_Info.End_Line), VC_Info.End_Line_Point_Type, VCDetails.VC_Not_Present, VCDetails.DPC_SDP_Not_Present); else VCHeap.Set_DPC_State (Current_DPC_Name, VCDetails.DPC_SDP_Not_Present); end if; when New_VC_Name => -- The range has not changed, but store a new VC on the VC Heap Trimmed_Line := E_Strings.Trim (File_Line); Current_DPC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); --SPARK_IO.Put_Line(ReportFile,"NewVCNameFound - Same range2",0); Parsing_State := New_VC_Name; if not VCHeap.Exists (Current_DPC_Name) then VCHeap.Add (VCHeap.First_Entry, Current_DPC_Name, Get_Line_Number (Line_Number => VC_Info.Start_Line), Get_Line_Number (Line_Number => VC_Info.End_Line), VC_Info.End_Line_Point_Type, VCDetails.VC_Not_Present, VCDetails.DPC_SDP_Not_Present); else VCHeap.Set_DPC_State (Current_DPC_Name, VCDetails.DPC_SDP_Not_Present); end if; when others => null; end case; end if; -- read next line Read_Next_Non_Blank_Line (File => DPC_File, Success => Read_Line_Success, File_Line => File_Line); -- if unsuccessful then check EOF -- and set FinishedWithFile accordingly if not Read_Line_Success then if SPARK_IO.End_Of_File (DPC_File) then Finished_With_File := True; else FatalErrors.Process (FatalErrors.Problem_Reading_File, E_Strings.Empty_String); end if; end if; end loop; -- write information for last VC -- two VCInfo parameters are necessary as WriteVCInfo compares them -- in deciding what to write (see definition of WriteVCInfo) if VC_Info.Valid then -- Reporting is now done as a table, so this has been commented out --WriteVCInfo( ReportFile, VCInfo, Dummy ); null; else SPARK_IO.Put_Line (Report_File, "No DPCs in file", 0); end if; end if; end if; end if; --# accept F, 10, Dummy_Close_Status, "Dummy_Close_Status unused here" & --# F, 10, DPC_File, "DPC_File unused here"; SPARK_IO.Close (DPC_File, Dummy_Close_Status); --# end accept; --# accept F, 33, Dummy_Close_Status, "Dummy_Close_Status unused here"; end Analyse_DPC_File; spark-2012.0.deb/pogs/commandline.adb0000644000175000017500000000616411753202340016351 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --To provide global state representing the command line options and a -- --procedure to parse the system command line. -- -- -- -------------------------------------------------------------------------------- with OSFiling; package body CommandLine is function Usage_String return E_Strings.T is begin return E_Strings.Copy_String (Str => "pogs [-d=input_directory] [-i] [-o=output_file] [-p] [-s] [-v]"); end Usage_String; procedure Read is Report_Filename : E_Strings.T; Temp_String : E_Strings.T; begin -- Process the command line arguments and store the result in Data OSCommandLine.Read (Data); -- What should the report filename be? -- -d specified? -o specified? Output filename -- ============= ============= =============== -- No No CWD.sum -- No Yes As specified (relative to CWD or absolute) -- Yes No Located in, and named after, specified output dir -- Yes Yes Located in specified output dir unless absolute path given if E_Strings.Is_Empty (E_Str => Data.ReportFile) then Report_Filename := OSFiling.Base_Dir_Name (Path => Data.StartDirectory); E_Strings.Append_Examiner_String (E_Str1 => Report_Filename, E_Str2 => OSFiling.Default_Report_Extn); Data.ReportFile := OSFiling.Full_Filename (Path => Data.StartDirectory, Filename => Report_Filename); else OSCommandLine.Normalize_Pathname (InputFile => Data.ReportFile, InputDir => Data.StartDirectory, ResultPath => Temp_String); Data.ReportFile := Temp_String; end if; end Read; end CommandLine; spark-2012.0.deb/pogs/total.ads0000644000175000017500000001113011753202340015214 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package providing data structure to store running totals, and a procedure -- --to print them. -- -- -- -------------------------------------------------------------------------------- with SPARK_IO; --# inherit Banner, --# CommandLine, --# E_Strings, --# FatalErrors, --# Heap, --# SPARK_IO, --# VCDetails, --# VCHeap; package Total --# own State; --# initializes State; is procedure Update_Totals (VCG : in Boolean; DPC : in Boolean); --# global in VCHeap.State; --# in out State; --# derives State from *, --# DPC, --# VCG, --# VCHeap.State; procedure Output (Report_File : in SPARK_IO.File_Type; Temp_File : in out SPARK_IO.File_Type; Temp_False_File : in out SPARK_IO.File_Type; Temp_Contra_File : in out SPARK_IO.File_Type; Temp_Victor_File : in out SPARK_IO.File_Type; Temp_Riposte_File : in out SPARK_IO.File_Type; Temp_User_File : in out SPARK_IO.File_Type; Temp_Rlu_Error_File : in out SPARK_IO.File_Type; Temp_Rlu_Used_File : in out SPARK_IO.File_Type; Temp_PR_Verr_File : in out SPARK_IO.File_Type; Temp_Warn_Error_File : in out SPARK_IO.File_Type; Temp_SDP_Error_File : in out SPARK_IO.File_Type; Temp_DPC_Error_File : in out SPARK_IO.File_Type; Temp_Victor_Error_File : in out SPARK_IO.File_Type; Temp_Riposte_Error_File : in out SPARK_IO.File_Type); --# global in CommandLine.Data; --# in State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data, --# Report_File, --# State, --# Temp_Contra_File, --# Temp_DPC_Error_File, --# Temp_False_File, --# Temp_File, --# Temp_PR_Verr_File, --# Temp_Riposte_Error_File, --# Temp_Riposte_File, --# Temp_Rlu_Error_File, --# Temp_Rlu_Used_File, --# Temp_SDP_Error_File, --# Temp_User_File, --# Temp_Victor_Error_File, --# Temp_Victor_File, --# Temp_Warn_Error_File & --# Temp_Contra_File, --# Temp_DPC_Error_File, --# Temp_False_File, --# Temp_File, --# Temp_PR_Verr_File, --# Temp_Riposte_Error_File, --# Temp_Riposte_File, --# Temp_Rlu_Error_File, --# Temp_Rlu_Used_File, --# Temp_SDP_Error_File, --# Temp_User_File, --# Temp_Victor_Error_File, --# Temp_Victor_File, --# Temp_Warn_Error_File from *; end Total; spark-2012.0.deb/pogs/findfiles.adb0000644000175000017500000000702011753202340016016 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package for reading the file structure on the disc. This package is -- --OS independent, and calls OSDirectory.Scan to perform the actual directory -- --scan. -- -- -- -------------------------------------------------------------------------------- with FatalErrors, FileDetails, FileHeap, Heap, OSDirectory; use type FileDetails.FileTypes; package body FindFiles is --------------------------------------------------------------------------- procedure Scan (StartDirectory : in E_Strings.T) is DetailsSuccess : Boolean; DirectoryIsResolved : Boolean; FileType : FileDetails.FileTypes; ListIndex : Heap.Atom; Name : E_Strings.T; NextListIndex : Heap.Atom; NextSuccess : Boolean := True; UnResolvedDirFound : Boolean := True; begin -- initialize the FileHeap FileHeap.Initialize (StartDirectory); while UnResolvedDirFound loop UnResolvedDirFound := False; -- start at beginning of linked list ListIndex := FileHeap.FirstEntry; -- while not end of list while NextSuccess loop -- get entry --# accept F, 10, Name, "Name unused here"; FileHeap.Details (ListIndex, DetailsSuccess, Name, FileType, DirectoryIsResolved); --# end accept; if not DetailsSuccess then FatalErrors.Process (FatalErrors.Data_Structure_Inconsistency, E_Strings.Empty_String); end if; -- if unresolved directory if FileType = FileDetails.Directory and not DirectoryIsResolved then -- read directory and insert items in list OSDirectory.Scan (ListIndex); -- mark directory as resolved FileHeap.MarkDirectoryResolved (ListIndex); UnResolvedDirFound := True; end if; -- go to next entry if DetailsSuccess then FileHeap.Next (ListIndex, NextSuccess, NextListIndex); ListIndex := NextListIndex; end if; end loop; end loop; --# accept F, 33, Name, "Name unused here"; end Scan; end FindFiles; spark-2012.0.deb/pogs/filedetails.ads0000644000175000017500000001211311753202340016360 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package providing a structure in which to store file details, in the form -- --of a full pathname (without extension) and a type, either PlainFile or -- --directory. -- -- -- --To be used in tandem with the Heap data structure, hence the use of -- --Heap.Atom as the array range -- -------------------------------------------------------------------------------- with E_Strings, HeapIndex; use type E_Strings.Order_Types; use type HeapIndex.IndexType; --# inherit E_Strings, --# HeapIndex, --# OSFiling; package FileDetails is --ListLength : constant Integer := 20000; --type IndexType is range 0 .. ListLength; type FileTypes is (PlainFile, Directory, Invalid); type DataType is private; -------------------------------------------------------------------------- procedure Add (Details : in out DataType; Index : out HeapIndex.IndexType; Success : out Boolean; Name : in E_Strings.T; FileType : in FileTypes); --# derives Details from *, --# FileType, --# Name & --# Index, --# Success from Details; -------------------------------------------------------------------------- procedure Initialize (Details : out DataType); --# derives Details from ; -------------------------------------------------------------------------- procedure MarkDirectoryResolved (Details : in out DataType; Index : in HeapIndex.IndexType); --# derives Details from *, --# Index; -------------------------------------------------------------------------- -- this compares the information given and returns -- Result is as defined in section 5.4 of the specification -- it works directly on values rather than on indices into the Details -- structure so that information can be compared before insertion -- NOTE : the procedure is successful iff neither Type is Invalid -- procedure Order (FirstName : in E_Strings.T; FirstType : in FileTypes; SecondName : in E_Strings.T; SecondType : in FileTypes; Success : out Boolean; Result : out E_Strings.Order_Types); --# derives Result from FirstName, --# FirstType, --# SecondName, --# SecondType & --# Success from FirstType, --# SecondType; -------------------------------------------------------------------------- procedure Retrieve (Details : in DataType; Index : in HeapIndex.IndexType; Success : out Boolean; Name : out E_Strings.T; FileType : out FileTypes; DirectoryIsResolved : out Boolean); --# derives DirectoryIsResolved, --# FileType, --# Name, --# Success from Details, --# Index; private type DetailsElement is record Name : E_Strings.T; FileType : FileTypes; DirectoryIsResolved : Boolean; end record; NullDetailsElement : constant DetailsElement := DetailsElement'(Name => E_Strings.Empty_String, FileType => Invalid, DirectoryIsResolved => False); type ElementsArray is array (HeapIndex.IndexType) of DetailsElement; -- HighMark is the number of the highest allocated atom - zero if the -- structure is empty type DataType is record Details : ElementsArray; HighMark : HeapIndex.IndexType; end record; end FileDetails; spark-2012.0.deb/pogs/slg_parser.ads0000644000175000017500000002502311753202340016240 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- -- Package containing subprograms to facilitate incremental parsing of a -- -- simplifier log file. -- -- The basic mode of operation is: -- -- Initialize the log file for parsing - Init(Logfile, ParserInfo, Status) -- -- Provided Status = Success finde the section of interest, e.g. -- -- Find_Rule_Summary(ParserInfo, Status). -- Provided Status = Success obtain next subsection, e.g, -- Get_Next_Rulefile(ParserInfo, Rulefile, Status) -- Further subsections may be parsed similarly, e.g., -- Get_Next_Rule and Get_Next_VC. -- When a subsection has been exhuasted a status of Not_Found is -- returned and the previous higher level subsection parser should -- be invoked again, e.g., when Get_Next_VC returns a Status of -- Not_Found this means the next rule should be obtained via a call -- to Get_Next_Rule. -------------------------------------------------------------------------------- with E_Strings; with SPARK_IO; use type SPARK_IO.File_Status; --# inherit Ada.Characters.Latin_1, --# E_Strings, --# SPARK_IO; package SLG_Parser is type Log_Info_T is private; type Log_Status_T is (Success, Failure, Not_Found, Unexpected_Text, End_Of_File); -- Given the file name of a simplifier log file opens the file for -- incremental parsing. -- Status = Success if the file is opened without errors -- Status = Failure otherwise. procedure Init (Logfile_Name : in E_Strings.T; Info : out Log_Info_T; Status : out Log_Status_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# SPARK_IO.File_Sys, --# Status from Logfile_Name, --# SPARK_IO.File_Sys; -- Locates the start of the log file section which contains the list of -- of user rule files read in (but not necessarily used). -- Status = Success if the section is found -- Status = End_Of_File if the end of file is encountered during the find -- operation -- Status = Not_Found otherwise. procedure Find_Rulefiles_Read (Info : in out Log_Info_T; Status : out Log_Status_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# SPARK_IO.File_Sys, --# Status from Info, --# SPARK_IO.File_Sys; -- Locates the start of the log file section which contains the reporting -- of user rule file syntax errors ready for parsing this section. -- Status = Success if the section is found -- Status = End_Of_File if the end of file is encountered during the find -- operation -- Status = Not_Found otherwise. procedure Find_Rule_Syntax_Errors (Info : in out Log_Info_T; Status : out Log_Status_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# SPARK_IO.File_Sys, --# Status from Info, --# SPARK_IO.File_Sys; -- Successively obtain the next rule file name which contains a syntax error. -- Status = Success if a rule file name is returned in RuleFile, -- Status = End_Of_File if this is encountered, -- Status = Unexpected_Text if a syntax error is encountered, -- Status = Not_Found otherwise. procedure Get_Next_Rulefile_Syntax_Error (Info : in out Log_Info_T; Rulefile : out E_Strings.T; Status : out Log_Status_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# Rulefile, --# SPARK_IO.File_Sys, --# Status from Info, --# SPARK_IO.File_Sys; -- Locates the start of the log file section which contains the use -- of user rule file summary ready for parsing this section. -- Status = Success if the section is found -- Status = End_Of_File if the end of file is encountered during the find -- operation -- Status = Not_Found otherwise. procedure Find_Rule_Summary (Info : in out Log_Info_T; Status : out Log_Status_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# SPARK_IO.File_Sys, --# Status from Info, --# SPARK_IO.File_Sys; -- Successively obtain the next rule file name from the rule file summary. -- Status = Success if a rule file name is returned in RuleFile, -- Status = End_Of_File if this is encountered, -- Status = Unexpected_Text if a syntax error is encountered, -- Status = Not_Found otherwise. procedure Get_Next_Rulefile (Info : in out Log_Info_T; Rulefile : out E_Strings.T; Status : out Log_Status_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# Rulefile, --# SPARK_IO.File_Sys, --# Status from Info, --# SPARK_IO.File_Sys; -- If Get_Next_Rulefile succeeds, returning a rule file name in -- RuleFile, then without any intervening calls to other -- subprograms of Logfile_Incremental_Parser -- successive calls toGet_Next_Rule retrieve the next rule name -- of RuleFile from the summary. -- Status = Success if a rule name is returned in Rule, -- Status = End_Of_File if this is encountered, -- Status = Unexpected_Text if a syntax error is encountered, -- Status = Not_Found otherwise. procedure Get_Next_Rule (Info : in out Log_Info_T; Rule : out E_Strings.T; Status : out Log_Status_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# Rule, --# SPARK_IO.File_Sys, --# Status from Info, --# SPARK_IO.File_Sys; -- If Get_Next_Rule succeeds, returning a rule name in Rule, -- then without any intervening calls to other subprograms -- of Logfile_Incremental_Parser successive calls to Get_Next_Rule -- retrieve the next VC in which Rule is used from the summary. -- Status = Success if a VC number is returned in VC_Number, -- Status = End_Of_File if this is encountered, -- Status = Unexpected_Text if a syntax error is encountered, -- Status = Not_Found otherwise. procedure Get_Next_VC (Info : in out Log_Info_T; VC_Number : out E_Strings.T; Status : out Log_Status_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# SPARK_IO.File_Sys, --# Status, --# VC_Number from Info, --# SPARK_IO.File_Sys; -- Closes the Logfile and completes the incremental parse. procedure Finalise (Info : in out Log_Info_T); --# global in out SPARK_IO.File_Sys; --# derives Info, --# SPARK_IO.File_Sys from *, --# Info; private -- The Simplifier logfile has a number of three digit keys -- (see the simplifier user guide). This enumeration type -- represents the set of three digit codes used in the log. type Log_Key is ( -- The following literals represent the keys which denote -- headers for the various sections of the simplifier log. --------------------------------------------------------- Read_Rulefiles_Header, Syntax_Error_Header, No_Semantic_Checks_Header, VC_Header, VC_Summary_Header, Overall_Summary_Header, ---------------------------------------------------------- -- The remainder of the simplifier logfile keys. Load_Rulefile, Syntax_Error_In_Rulefile, Simplified, Proved, Restructured, Contradiction, Hyp_Eliminated, Hyp_Added, Substitution, New_Hyp_From_Subs, Rulefile_Use, Rule_Use, Conclusions, Hypotheses, VCs_Using_Rule, ---------------------------------------------------------- -- Not keys. To support processing only. Been_Initialised, Not_A_Recognised_Key, No_Room_For_Key, Reached_EOF); -- Subtype for all legal log keys. subtype Legal_Log_Keys is Log_Key range Read_Rulefiles_Header .. VCs_Using_Rule; -- Subtype just including the section headers. subtype Log_Headers is Legal_Log_Keys range Read_Rulefiles_Header .. Overall_Summary_Header; subtype Three_T is Positive range 1 .. 3; subtype Key_String is String (Three_T); type Log_Info_T is record File_Handle : SPARK_IO.File_Type; Key : Log_Key; Key_Found : Boolean; Posn : E_Strings.Lengths; Curr_Line : E_Strings.T; end record; end SLG_Parser; spark-2012.0.deb/pogs/vcs.adb0000644000175000017500000017526211753202340014664 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package to perform processing of VC files -- -- -- --The method of collecting information from the VC files is described in the -- --specification. Below is given the algorithm for writing the information to -- --the report file. -- -- -- --In order to print the status of all VCs as a table, information about the -- --VCs is now collected in the VCHeap. -- -- -- --A record is put on the heap for each VC identified in the VCG file -- --indicating its name (used for indexing the records) and status -- --(undischarged at this point). -- -- -- --The status is subsequently updated for VCs identified when parsing the -- --SIV and PLG files (if the VC has been recognised as fully simplified -- --or fully proved). -- -- -- --When this parsing phase is complete, the content of the VCHeap is printed -- --to the report file. -- -- -- -------------------------------------------------------------------------------- with Ada.Characters.Handling; with CommandLine; with FatalErrors; with Heap; with OSFiling; with PathFormatter; with SPARK_Calendar; with Total; with VCDetails; with VCHeap; use type SPARK_Calendar.Error_Code; use type VCDetails.VC_State_T; package body VCS is -- All non-refinement and non-inheritance VCs have line numbers associated -- with their start and finish points. -- Sentinel values are used as follows: -- -1 is used for refinement and inheritance VCs, which do not have any -- line number associated with them. -- 0 is used to mean the start of a subprogram (e.g. the precondition) -- 'Last is used to mean the end of a subprogram (e.g. the postcondition) subtype VC_Line_Type is Integer range -1 .. Integer'Last; Refinement_Or_Inheritance_VC : constant VC_Line_Type := -1; VC_Line_Start : constant VC_Line_Type := 0; VC_Line_End : constant VC_Line_Type := VC_Line_Type'Last; -- Added to describe different forms of corrupt files. -- (This is known not to be a complete solution. The file parsing routines in -- pogs are quite basic, making it difficult to offer extensive well-formed -- file checks. Neverthless, the checks that are considered, are the most common, -- and are tracable to genuine user concerns.) type File_Status_T is (Not_Corrupt, Corrupt_Empty_File, Corrupt_Unknown_Subprogram); type File_Types is (Simplified_VC_File_Type, Standard_VC_File_Type); type Parsing_State_Type is (Initial, First_Range, First_VC_Name, New_Range, New_VC_Name); type VC_Info_Type is record Start_Line : VC_Line_Type; End_Line : VC_Line_Type; End_Line_Point_Type : VCDetails.Terminal_Point_Type; Number_Of_VCs : Natural; Valid : Boolean; -- used so that invalid data (at start of -- processing) is not printed File_Type : File_Types; -- SIV / VCG / PLG -- used to print the 'UndischargedVCs' message -- used to decide whether to print the start line for the given range Any_VCs_Printed : Boolean; This_Start_Line_Printed : Boolean; end record; subtype Errors_Index is Integer range 1 .. 1000; type Errors_List is array (Errors_Index) of E_Strings.T; type Review_Errors is record Errors : Boolean; -- Have we recorded any errors? Error_List : Errors_List; -- The list of errors Last_Error : Errors_Index; -- The last error Excess_Count : Natural; -- How many errors are unreported. end record; Report_Wrap_Column : constant Positive := 70; -- Note that all the columns below refer to the line AFTER it has been -- trimmed of white space VCG_File_Date_Time_Start_Column : constant E_Strings.Positions := 8; -- note, setting this to 20 ensures that the resolution is only down to -- the nearest second, for compatibility with the same information as -- extracted from the SIV file VCG_File_Date_Time_Length : constant E_Strings.Lengths := 20; SIV_File_VC_Generation_Date_Start_Column : constant E_Strings.Positions := 9; SIV_File_VC_Generation_Date_Length : constant E_Strings.Lengths := 11; SIV_File_VC_Generation_Time_Start_Column : constant E_Strings.Positions := 22; SIV_File_VC_Generation_Time_Length : constant E_Strings.Lengths := 8; SIV_File_Simplification_Date_Start_Column : constant E_Strings.Positions := 43; SIV_File_Simplification_Date_Length : constant E_Strings.Lengths := 11; SIV_File_Simplification_Time_Start_Column : constant E_Strings.Positions := 56; SIV_File_Simplification_Time_Length : constant E_Strings.Lengths := 8; -- Constants for processing DPC and SDP files. -- These constants are identical to the processing of VCG and -- SIV files as their format, at this time, are the same. DPC_File_Date_Time_Start_Column : constant E_Strings.Positions := 8; DPC_File_Date_Time_Length : constant E_Strings.Lengths := 20; SDP_File_VC_Generation_Date_Start_Column : constant E_Strings.Positions := 9; SDP_File_VC_Generation_Date_Length : constant E_Strings.Lengths := 11; SDP_File_VC_Generation_Time_Start_Column : constant E_Strings.Positions := 22; SDP_File_VC_Generation_Time_Length : constant E_Strings.Lengths := 8; SDP_File_Simplification_Date_Start_Column : constant E_Strings.Positions := 44; SDP_File_Simplification_Date_Length : constant E_Strings.Lengths := 11; SDP_File_Simplification_Time_Start_Column : constant E_Strings.Positions := 56; SDP_File_Simplification_Time_Length : constant E_Strings.Lengths := 8; -- Constants to extract Proof Log File date PLG_File_VC_Proof_Date_Start_Column : constant E_Strings.Positions := 8; PLG_File_VC_Proof_Date_Length : constant E_Strings.Lengths := 11; PLG_File_VC_Proof_Time_Start_Column : constant E_Strings.Positions := 28; PLG_File_VC_Proof_Time_Length : constant E_Strings.Lengths := 8; Unknown_SIV_Date : constant String := "Unknown SIV file date"; Unknown_VCG_Date : constant String := "Unknown VCG file date"; Unknown_SDP_Date : constant String := "Unknown SDP file date"; Unknown_DPC_Date : constant String := "Unknown DPC file date"; Unknown_VCT_Date : constant String := "Unknown VCT file date"; ---------------------------------------------------------------------------- -- NB this trims the string given, then appends a space and a trimmed version -- of the next line from the input file procedure Append_Next_Line_From_File (Line : in out E_Strings.T; File : in SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives Line, --# SPARK_IO.File_Sys from *, --# File, --# SPARK_IO.File_Sys; is Next_Line : E_Strings.T; Trimmed_Line : E_Strings.T; begin Trimmed_Line := E_Strings.Trim (Line); Line := Trimmed_Line; E_Strings.Append_String (E_Str => Line, Str => " "); E_Strings.Get_Line (File => File, E_Str => Next_Line); Trimmed_Line := E_Strings.Trim (Next_Line); E_Strings.Append_Examiner_String (E_Str1 => Line, E_Str2 => Trimmed_Line); end Append_Next_Line_From_File; --------------------------------------------------------------------------- function Extract_Line_Number_At_Position (Line : E_Strings.T; Start_Pos : E_Strings.Positions) return VC_Line_Type is Result : VC_Line_Type := 0; Pos : E_Strings.Positions; Finished : Boolean := False; begin Pos := Start_Pos; while not Finished loop if E_Strings.Get_Element (E_Str => Line, Pos => Pos) >= '0' and then E_Strings.Get_Element (E_Str => Line, Pos => Pos) <= '9' then Result := (10 * Result) + (Character'Pos (E_Strings.Get_Element (E_Str => Line, Pos => Pos)) - Character'Pos ('0')); if Pos < E_Strings.Get_Length (E_Str => Line) then Pos := Pos + 1; else Finished := True; end if; else Finished := True; end if; end loop; return Result; end Extract_Line_Number_At_Position; --------------------------------------------------------------------------- function Is_VC_Proof_Success_Line (Line : E_Strings.T) return Boolean is begin return E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 14), Str => "*** PROVED VC "); end Is_VC_Proof_Success_Line; --------------------------------------------------------------------------- function Is_New_Range_Line (Line : E_Strings.T) return Boolean is begin return E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => 1, Length => 17), Str => "For path(s) from ") or else E_Strings.Eq1_String (E_Str => E_Strings.Section (E_Str => Line, Start_Pos => 1, Length => 14), Str => "For checks of "); end Is_New_Range_Line; --------------------------------------------------------------------------- function Is_New_VC_Line (Line : E_Strings.T) return Boolean is Ret_Val : Boolean; begin -- The shortest possible New VC Line is for a function that has -- a single letter identifier, followed by a full-stop e.g. -- function_g. which is 11 characters. if E_Strings.Get_Length (E_Str => Line) >= 11 then Ret_Val := E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 10), Str => "procedure_") or else E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 9), Str => "function_") or else E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 10), Str => "task_type_"); if Ret_Val then for I in E_Strings.Lengths range 9 .. E_Strings.Get_Length (E_Str => Line) - 1 loop if not (Ada.Characters.Handling.Is_Alphanumeric (E_Strings.Get_Element (E_Str => Line, Pos => I)) or else E_Strings.Get_Element (E_Str => Line, Pos => I) = '_') then Ret_Val := False; exit; end if; --# assert I in 9 .. E_Strings.Get_Length (Line) - 1 and --# Line = Line% and --# E_Strings.Get_Length (Line) >= 11; end loop; if E_Strings.Get_Element (E_Str => Line, Pos => E_Strings.Get_Length (E_Str => Line)) /= '.' then Ret_Val := False; end if; end if; else Ret_Val := False; end if; return Ret_Val; end Is_New_VC_Line; --------------------------------------------------------------------------- function Is_Trivially_True_VC (Line : E_Strings.T) return Boolean is begin return E_Strings.Eq1_String (E_Str => E_Strings.Section (Line, 1, 10), Str => "*** true ."); end Is_Trivially_True_VC; --------------------------------------------------------------------------- function Is_Trivially_False_VC (Line : E_Strings.T) return Boolean is VC_Proved_False : Boolean; Unused_Pos : E_Strings.Positions; begin if E_Strings.Get_Length (E_Str => Line) >= 14 then --# accept F, 10, Unused_Pos, "Unused_Pos unused here"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => " false .", String_Found => VC_Proved_False, String_Start => Unused_Pos); --# end accept; -- It must be a conclusion, not a hypothesis! VC_Proved_False := VC_Proved_False and then (E_Strings.Get_Element (E_Str => Line, Pos => 1) = 'C'); else VC_Proved_False := False; end if; --# accept F, 33, Unused_Pos, "Unused_Pos unused here"; return VC_Proved_False; end Is_Trivially_False_VC; --------------------------------------------------------------------------- function Is_VC_Error_Message (Line : E_Strings.T) return Boolean is begin return E_Strings.Get_Length (E_Str => Line) > 0 and then E_Strings.Get_Element (E_Str => Line, Pos => 1) = '!'; end Is_VC_Error_Message; --------------------------------------------------------------------------- -- see file header procedure WriteVCInfo (Report_File : in SPARK_IO.File_Type; VC_Info : in VC_Info_Type; Anything_Printed_This_Time : out Boolean) --# global in out SPARK_IO.File_Sys; --# derives Anything_Printed_This_Time from VC_Info & --# SPARK_IO.File_Sys from *, --# Report_File, --# VC_Info; is separate; pragma Unreferenced (WriteVCInfo); --------------------------------------------------------------------------- procedure PrintVCReport (VC_Filename : in E_Strings.T; SIV_Filename : in E_Strings.T; VCTR_Filename : in E_Strings.T; RSM_Filename : in E_Strings.T; PLG_Filename : in E_Strings.T; SLG_Filename : in E_Strings.T; VC_Error : in Boolean; SIV_Error : in Boolean; VCTR_Error : in Boolean; RSM_Error : in Boolean; PLG_Error : in Boolean; SLG_Error : in Boolean; SLG_File_Missing : in Boolean; REV_Errors : in Review_Errors; Report_File : in SPARK_IO.File_Type; Temp_File : in SPARK_IO.File_Type; Temp_False_File : in SPARK_IO.File_Type; Temp_Contra_File : in SPARK_IO.File_Type; Temp_Victor_File : in SPARK_IO.File_Type; Temp_Riposte_File : in SPARK_IO.File_Type; Temp_User_File : in SPARK_IO.File_Type; Temp_PR_Verr_File : in SPARK_IO.File_Type; Temp_Warn_Error_File : in SPARK_IO.File_Type) --# global in CommandLine.Data; --# in VCHeap.I_State; --# in VCHeap.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data, --# PLG_Error, --# PLG_Filename, --# Report_File, --# REV_Errors, --# RSM_Error, --# RSM_Filename, --# SIV_Error, --# SIV_Filename, --# SLG_Error, --# SLG_Filename, --# SLG_File_Missing, --# Temp_Contra_File, --# Temp_False_File, --# Temp_File, --# Temp_PR_Verr_File, --# Temp_Riposte_File, --# Temp_User_File, --# Temp_Victor_File, --# Temp_Warn_Error_File, --# VCHeap.I_State, --# VCHeap.State, --# VCTR_Error, --# VCTR_Filename, --# VC_Error, --# VC_Filename; is separate; --------------------------------------------------------------------------- -- Process a line of type (1). Note that this can actually spread over two -- file lines, so the first action of this procedure is to read the next -- line from the file and append it onto the previous one. This has no -- unwanted side effects, as there is always a blank line between this and -- the start of the VC procedure ProcessNewRangeLine (Line : in E_Strings.T; VC_Info : in out VC_Info_Type) --# derives VC_Info from *, --# Line; is separate; --------------------------------------------------------------------------- procedure Read_Next_Non_Blank_Line (File : in SPARK_IO.File_Type; Success : out Boolean; File_Line : out E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives File_Line, --# SPARK_IO.File_Sys, --# Success from File, --# SPARK_IO.File_Sys; is Finished : Boolean := False; Status : Boolean := False; Line_Read : E_Strings.T; Trimmed_Line : E_Strings.T := E_Strings.Empty_String; begin while not Finished loop E_Strings.Get_Line (File => File, E_Str => Line_Read); Trimmed_Line := E_Strings.Trim (Line_Read); if E_Strings.Get_Length (E_Str => Trimmed_Line) > 0 then Status := True; Finished := True; else if SPARK_IO.End_Of_File (File) then Status := False; Finished := True; end if; end if; end loop; Success := Status; File_Line := Trimmed_Line; end Read_Next_Non_Blank_Line; --------------------------------------------------------------------------- -- This will return true if the given E_Str indicates a -- subprogram. (This is line 10 in .vcg files.) Currently, we are -- a valid subprogram if we start with either procedure, function -- or task_type and then either follow with a space or a newline. function Is_Valid_Subprogram (E_Str : E_Strings.T) return Boolean is function VC_Or_Newline (Prefix : String) return Boolean --# global in E_Str; is Result : Boolean; begin if E_Strings.Starts_With (E_Str, Prefix) then -- We now know E_Str starts with the given prefix, now the -- next character must either not exist at all or be a -- space. -- Note that due to the implementation of E_Strings it -- would be sufficient to just call Get_Element; if we -- ask for a position outside the string E_Strings will -- return a space. However, the below is safe for future -- changes. if E_Strings.Get_Length (E_Str => E_Str) = Prefix'Length then Result := True; elsif E_Strings.Get_Element (E_Str => E_Str, Pos => Prefix'Length + 1) = ' ' then Result := True; else Result := False; end if; else Result := False; end if; return Result; end VC_Or_Newline; begin return VC_Or_Newline ("procedure") or else VC_Or_Newline ("function") or else VC_Or_Newline ("task_type"); end Is_Valid_Subprogram; --------------------------------------------------------------------------- procedure AnalyseVCFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Error_In_File : out Boolean; File_Date_Time : out E_Strings.T) --# global in CommandLine.Data; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.I_State; --# in out VCHeap.State; --# derives Error_In_File, --# SPARK_IO.File_Sys from CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys & --# FatalErrors.State, --# VCHeap.State from *, --# CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.State & --# File_Date_Time from Filename, --# SPARK_IO.File_Sys & --# VCHeap.I_State from *, --# CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys; -- precondition to entering this procedure is that the VCG file exists is separate; --------------------------------------------------------------------------- procedure AnalyseSimplifiedVCFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; VC_File_Date_Time : in E_Strings.T; SIV_File_Date_Time : out E_Strings.T; Error_In_SIV_File : out Boolean) --# global in CommandLine.Data; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.State; --# derives Error_In_SIV_File, --# SPARK_IO.File_Sys from CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VC_File_Date_Time & --# FatalErrors.State, --# VCHeap.State from *, --# CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.State, --# VC_File_Date_Time & --# SIV_File_Date_Time from Filename, --# SPARK_IO.File_Sys; -- precondition to entering this procedure is that the SIV file exists is separate; --------------------------------------------------------------------------- procedure AnalyseVictoredVCFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Error_In_VCTR_File : out Boolean; File_Error : out E_Strings.T; Temp_Victor_Error_File : in SPARK_IO.File_Type) --# global in CommandLine.Data; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.State; --# derives Error_In_VCTR_File, --# File_Error, --# SPARK_IO.File_Sys from CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# Temp_Victor_Error_File & --# FatalErrors.State, --# VCHeap.State from *, --# CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.State; -- precondition to entering this procedure is that the VCT file exists is separate; --------------------------------------------------------------------------- procedure AnalyseVictorLogFile (Filename : in E_Strings.T; VC_File_Date_Time : in E_Strings.T; SIV_File_Date_Time : in E_Strings.T; VLG_File_Date_Time : out E_Strings.T; Error_In_VLG_File : out Boolean; File_Error : out E_Strings.T) --# global in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# derives Error_In_VLG_File, --# File_Error from Filename, --# SIV_File_Date_Time, --# SPARK_IO.File_Sys, --# VC_File_Date_Time & --# FatalErrors.State, --# SPARK_IO.File_Sys from *, --# Filename, --# SPARK_IO.File_Sys & --# VLG_File_Date_Time from Filename, --# SPARK_IO.File_Sys; -- precondition to entering this procedure is that the VLG file exists is separate; --------------------------------------------------------------------------- procedure Analyse_Riposte_Summary_File (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Error_In_RSM_File : out Boolean; File_Error : out E_Strings.T; Temp_Riposte_Error_File : in SPARK_IO.File_Type) --# global in CommandLine.Data; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.State; --# derives Error_In_RSM_File, --# File_Error, --# SPARK_IO.File_Sys from CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# Temp_Riposte_Error_File & --# FatalErrors.State, --# VCHeap.State from *, --# CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.State; -- precondition to entering this procedure is that the RSM file exists is separate; --------------------------------------------------------------------------- procedure Analyse_DPC_File (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Error_In_File : out Boolean; File_Date_Time : out E_Strings.T) --# global in CommandLine.Data; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.I_State; --# in out VCHeap.State; --# derives Error_In_File, --# SPARK_IO.File_Sys from CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys & --# FatalErrors.State, --# VCHeap.I_State, --# VCHeap.State from *, --# CommandLine.Data, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.State & --# File_Date_Time from Filename, --# SPARK_IO.File_Sys; -- precondition to entering this procedure is that the DPC file exists is separate; --------------------------------------------------------------------------- procedure Analyse_Summary_DP_File (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; DPC_File_Date_Time : in E_Strings.T; Error_In_SDP_File : out Boolean) --# global in CommandLine.Data; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.State; --# derives Error_In_SDP_File, --# SPARK_IO.File_Sys from CommandLine.Data, --# DPC_File_Date_Time, --# Filename, --# Report_File, --# SPARK_IO.File_Sys & --# FatalErrors.State, --# VCHeap.State from *, --# CommandLine.Data, --# DPC_File_Date_Time, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.State; -- precondition to entering this procedure is that the SIV file exists is separate; --------------------------------------------------------------------------- procedure AnalyseReviewFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Errors : out Review_Errors) --# global in VCHeap.I_State; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.State; --# derives Errors, --# SPARK_IO.File_Sys, --# VCHeap.State from Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.I_State, --# VCHeap.State & --# FatalErrors.State from *, --# Filename, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.I_State, --# VCHeap.State; -- precondition to entering this procedure is that the PRV file exists is separate; --------------------------------------------------------------------------- procedure AnalyseProofLogFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; SIV_File_Date_Time : in E_Strings.T; Error_In_File : out Boolean) --# global in CommandLine.Data; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.State; --# derives Error_In_File, --# SPARK_IO.File_Sys from CommandLine.Data, --# Filename, --# Report_File, --# SIV_File_Date_Time, --# SPARK_IO.File_Sys & --# FatalErrors.State, --# VCHeap.State from *, --# CommandLine.Data, --# Filename, --# Report_File, --# SIV_File_Date_Time, --# SPARK_IO.File_Sys, --# VCHeap.State; -- precondition to entering this procedure is that the SIV file exists is separate; procedure AnalyseSimpLogFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Rule_Files_Errors : in out SPARK_IO.File_Type; Rule_Files_Used : in out SPARK_IO.File_Type; SLG_Error_In_File : out Boolean) --# global in CommandLine.Data; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# derives FatalErrors.State, --# SPARK_IO.File_Sys from *, --# CommandLine.Data, --# Filename, --# Report_File, --# Rule_Files_Errors, --# Rule_Files_Used, --# SPARK_IO.File_Sys & --# Rule_Files_Errors, --# Rule_Files_Used from *, --# CommandLine.Data, --# Filename, --# SPARK_IO.File_Sys & --# SLG_Error_In_File from CommandLine.Data, --# Filename, --# Report_File, --# Rule_Files_Errors, --# Rule_Files_Used, --# SPARK_IO.File_Sys; -- precondition to entering this procedure is that the SLG file exists -- and that the SIV file date time was valid is separate; --------------------------------------------------------------------------- procedure Analyse (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Analyse_Proof_Log : in Boolean; Temp_File : in SPARK_IO.File_Type; Temp_False_File : in SPARK_IO.File_Type; Temp_Contra_File : in SPARK_IO.File_Type; Temp_Victor_File : in SPARK_IO.File_Type; Temp_Riposte_File : in SPARK_IO.File_Type; Temp_User_File : in SPARK_IO.File_Type; Temp_Rlu_Error_File : in out SPARK_IO.File_Type; Temp_Rlu_Used_File : in out SPARK_IO.File_Type; Temp_PR_Verr_File : in SPARK_IO.File_Type; Temp_Warn_Error_File : in SPARK_IO.File_Type; Temp_SDP_Error_File : in SPARK_IO.File_Type; Temp_DPC_Error_File : in SPARK_IO.File_Type; Temp_Victor_Error_File : in SPARK_IO.File_Type; Temp_Riposte_Error_File : in SPARK_IO.File_Type) is VC_Filename : E_Strings.T; VC_File_Date_Time : E_Strings.T; VC_File_Contained_Error : Boolean; SIV_File_Date_Time : E_Strings.T; Simplified_VC_Filename : E_Strings.T; SIV_File_Contained_Error : Boolean; Victored_VC_Filename : E_Strings.T; VCTR_File_Contained_Error : Boolean; VCTR_File_Error : E_Strings.T; Victor_Log_Filename : E_Strings.T; Victor_File_Date_Time : E_Strings.T; Victor_Log_File_Contained_Error : Boolean; Victor_Log_File_Error : E_Strings.T; Riposte_Summary_Filename : E_Strings.T; RSM_File_Contained_Error : Boolean; RSM_File_Error : E_Strings.T; Simplifier_Log_Filename : E_Strings.T; SLG_File_Contained_Error : Boolean; SLG_File_Is_Missing : Boolean; Proof_Log_Filename : E_Strings.T; PLG_File_Contained_Error : Boolean; PRV_Filename : E_Strings.T; PRV_File_Errors : Review_Errors; DPC_Filename : E_Strings.T; DPC_File_Date_Time : E_Strings.T; DPC_File_Contained_Error : Boolean; Summary_DP_Filename : E_Strings.T; SDP_File_Contained_Error : Boolean; Processed_VCG_File : Boolean; Processed_DPC_File : Boolean; procedure Say_Filename_Being_Processed (Filename : in E_Strings.T) --# global in CommandLine.Data; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data, --# Filename; is begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Processing file ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => PathFormatter.Format (Filename)); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end Say_Filename_Being_Processed; begin -- Analyse Processed_VCG_File := False; Processed_DPC_File := False; VC_File_Contained_Error := False; SLG_File_Contained_Error := False; SLG_File_Is_Missing := False; SIV_File_Date_Time := E_Strings.Empty_String; Simplified_VC_Filename := E_Strings.Empty_String; Simplifier_Log_Filename := E_Strings.Empty_String; Proof_Log_Filename := E_Strings.Empty_String; Victored_VC_Filename := E_Strings.Empty_String; Riposte_Summary_Filename := E_Strings.Empty_String; PRV_File_Errors := Review_Errors' (Errors => False, Error_List => Errors_List'(others => E_Strings.Empty_String), Last_Error => Errors_Index'First, Excess_Count => 0); -- Initialise for VCG VC_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => VC_Filename, E_Str2 => OSFiling.VC_File_Extension); -- Initialise for DPC DPC_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => DPC_Filename, E_Str2 => OSFiling.DPC_File_Extension); -- Initialise for SDP Summary_DP_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => Summary_DP_Filename, E_Str2 => OSFiling.Summary_DP_File_Extension); SIV_File_Contained_Error := False; RSM_File_Contained_Error := False; VCTR_File_Contained_Error := False; PLG_File_Contained_Error := False; if OSFiling.Is_File (Name => VC_Filename) then -- Record that VCG file is present. Processed_VCG_File := True; -- Initilise the other files that may be seen. -- -- The logic here is quite confusing. -- -- These files are initialised to their full names if the vcg -- file exists. This initialisation is independent to the -- existence of these files, or their own dependencies. The -- absence of this initialisation (null string, from default -- initialisations above) is detected and sometimes used. This -- null string test (hopefully) is only ever used to infer -- whether or not the vcg file exists. It might be a lot neater -- to just test to see if the vcg file exists. -- -- Unfortunately, these empty strings may be processed in an -- unsafe manner. However, it seems that the potentially unsafe -- routines are only called when the vcg file is present, so -- these initialisations below must take place, and the -- potential errors are avoided. -- -- Oh, note also that these are all local variables. But, this -- 'Analyse' procedure also contains a call to 'PrintVCReport' -- (it does analysis and printing), so the local variables have -- a wider impact than might be expected. -- Initialise for SIV Simplified_VC_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => Simplified_VC_Filename, E_Str2 => OSFiling.Simplified_VC_File_Extension); -- Initialise for SLG Simplifier_Log_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => Simplifier_Log_Filename, E_Str2 => OSFiling.Simplifier_Log_File_Extension); SLG_File_Contained_Error := False; -- Initialise for Victor Victored_VC_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => Victored_VC_Filename, E_Str2 => OSFiling.Victored_VC_File_Extension); -- Initialise for Victor Log Victor_Log_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => Victor_Log_Filename, E_Str2 => OSFiling.Victor_Log_File_Extension); -- Initialise for Riposte Riposte_Summary_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => Riposte_Summary_Filename, E_Str2 => OSFiling.Riposte_Summary_File_Extension); -- Initialise for PLG Proof_Log_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => Proof_Log_Filename, E_Str2 => OSFiling.Proof_Log_File_Extension); PLG_File_Contained_Error := False; -- Initialise for PRV ("Proof Review") PRV_Filename := Filename; E_Strings.Append_Examiner_String (E_Str1 => PRV_Filename, E_Str2 => OSFiling.Review_File_Extension); -- Processing VCG file -- ################### -- If the VCG file exists: analyse it Say_Filename_Being_Processed (Filename => VC_Filename); AnalyseVCFile (Report_File, VC_Filename, VC_File_Contained_Error, -- for comparison with entry in SIV file VC_File_Date_Time); if VC_File_Contained_Error then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: VCG file error: subprogram processing abandoned ***", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_VCG_File); end if; --# assert True; -- Processing SIV file -- ################### -- If VCG analysis is not in error then: -- If the SIV file exists: analyse it if not VC_File_Contained_Error then if OSFiling.Is_File (Name => Simplified_VC_Filename) then Say_Filename_Being_Processed (Filename => Simplified_VC_Filename); AnalyseSimplifiedVCFile (Report_File => Report_File, Filename => Simplified_VC_Filename, VC_File_Date_Time => VC_File_Date_Time, SIV_File_Date_Time => SIV_File_Date_Time, Error_In_SIV_File => SIV_File_Contained_Error); if SIV_File_Contained_Error then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: SIV file error: subprogram processing abandoned ***", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_SIV_File); else -- Processing SLG file -- ################### -- If VCG analysis is not in error then: -- If the SIV analysis is not in error then: -- if the SLG file exists: analyse it -- DEBUG If you're looking to fix SEPR: 2263, then this is a good place to start.. if OSFiling.Is_File (Name => Simplifier_Log_Filename) then Say_Filename_Being_Processed (Filename => Simplifier_Log_Filename); AnalyseSimpLogFile (Report_File, Simplifier_Log_Filename, Temp_Rlu_Error_File, Temp_Rlu_Used_File, SLG_File_Contained_Error); if SLG_File_Contained_Error then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: SLG file error: subprogram processing abandoned ***", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_SLG_File); end if; else SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Simplifier log file not found: ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => PathFormatter.Format (Simplifier_Log_Filename)); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); -- The absence of an SLG file is not treated as an error. -- Some organisations may deliberately delete SLG files, to -- save space. To support the use of POGS on such data sets, -- missing SLG files can not be regarded as errors, as this -- would lead to the analysis of these subprograms being -- abandoned. -- It is still necessary to report missing SLG files. To maintain -- the distinction between missing SLG and error, an additional -- variable is used. SLG_File_Is_Missing := True; VCHeap.Raise_Error (Error_Kind => VCDetails.Missing_SLG_File); -- See: EmptyStringHere? -- Simplifier_Log_Filename := ELStrings.EmptyString; end if; -- TODO: J525-022: Do we want to look at the ViCToR equivalent here? end if; else -- DEBUG If you're looking to fix SEPR: 2264, then this is a good place to start.. -- otherwise say that VCs not simplified SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "VCs not simplified", 0); -- See: EmptyStringHere? --Simplified_VC_Filename := ELStrings.EmptyString; end if; end if; --# assert True; -- Processing VCT and VLG files -- ############################ -- If VCG analysis is not in error and -- If SIV analysis is not in error (or never happened, in which case it is not in error) then: -- Check that the vct file exists -- Check that the vlg file exists and obtain its timestamp -- Compare timestamps of the vlg to the ones recorded in vcg -- and siv files (which will be raised as an error if they -- are not in the expected sequence) -- Analyse the vct file if the above resulted in no errors if not VC_File_Contained_Error and not SIV_File_Contained_Error then if OSFiling.Is_File (Name => Victored_VC_Filename) then if OSFiling.Is_File (Name => Victor_Log_Filename) then -- First we analyse the VLG file Say_Filename_Being_Processed (Filename => Victor_Log_Filename); --# accept F, 10, Victor_File_Date_Time, "We don't use this yet, but may in the future."; AnalyseVictorLogFile (Filename => Victor_Log_Filename, VC_File_Date_Time => VC_File_Date_Time, SIV_File_Date_Time => SIV_File_Date_Time, VLG_File_Date_Time => Victor_File_Date_Time, Error_In_VLG_File => Victor_Log_File_Contained_Error, File_Error => Victor_Log_File_Error); --# end accept; -- We still flag this up as an error. -- Victor_Log_File_Contained_Error will not get passed on later, -- but VCTR_File_Contained_Error will be. if Victor_Log_File_Contained_Error then VCTR_File_Contained_Error := True; end if; -- Then we analyse the VCT file if the above resulted in no errors if not Victor_Log_File_Contained_Error then Say_Filename_Being_Processed (Filename => Victored_VC_Filename); AnalyseVictoredVCFile (Report_File, Victored_VC_Filename, VCTR_File_Contained_Error, VCTR_File_Error, Temp_Victor_Error_File); if VCTR_File_Contained_Error then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "*** Warning: VCT file error: ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => VCTR_File_Error); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_VCT_File); -- We also mention the error in the victor error file. E_Strings.Put_String (File => Temp_Victor_Error_File, E_Str => PathFormatter.Format (Victored_VC_Filename)); SPARK_IO.Put_Char (Temp_Victor_Error_File, ' '); SPARK_IO.Put_Char (Temp_Victor_Error_File, '('); E_Strings.Put_String (File => Temp_Victor_Error_File, E_Str => VCTR_File_Error); SPARK_IO.Put_Line (Temp_Victor_Error_File, ")", 0); end if; else SPARK_IO.Put_String (SPARK_IO.Standard_Output, "*** Warning: VLG file error: ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Victor_Log_File_Error); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_VLG_File); -- We also mention the error in the victor error file. E_Strings.Put_String (File => Temp_Victor_Error_File, E_Str => PathFormatter.Format (Victor_Log_Filename)); SPARK_IO.Put_Char (Temp_Victor_Error_File, ' '); SPARK_IO.Put_Char (Temp_Victor_Error_File, '('); E_Strings.Put_String (File => Temp_Victor_Error_File, E_Str => Victor_Log_File_Error); SPARK_IO.Put_Line (Temp_Victor_Error_File, ")", 0); end if; else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: VLG file missing: subprogram processing abandoned ***", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Missing_VLG_File); end if; end if; -- We don't mention it if no victor output can be found, -- as it is an optional feature. end if; --# assert True; -- Processing RSM file -- ################### -- If VCG analysis is not in error and -- If SIV analysis is not in error (or never happened, in which case it is not in error) then: -- Check that the rsm file exists -- Analyse the rsm file if the above resulted in no errors if not VC_File_Contained_Error and not SIV_File_Contained_Error then if OSFiling.Is_File (Name => Riposte_Summary_Filename) then Say_Filename_Being_Processed (Filename => Riposte_Summary_Filename); Analyse_Riposte_Summary_File (Report_File, Riposte_Summary_Filename, RSM_File_Contained_Error, RSM_File_Error, Temp_Riposte_Error_File); if RSM_File_Contained_Error then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "*** Warning: RSM file error: ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => RSM_File_Error); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_RSM_File); -- We also mention the error in the Riposte error file. E_Strings.Put_String (File => Temp_Riposte_Error_File, E_Str => PathFormatter.Format (Riposte_Summary_Filename)); SPARK_IO.Put_Char (Temp_Riposte_Error_File, ' '); SPARK_IO.Put_Char (Temp_Riposte_Error_File, '('); E_Strings.Put_String (File => Temp_Riposte_Error_File, E_Str => RSM_File_Error); SPARK_IO.Put_Line (Temp_Riposte_Error_File, ")", 0); end if; end if; -- We don't mention it if no Riposte output can be found, -- as it is an optional feature. end if; --# assert True; -- Processing PLG file -- ################### -- Unsimplified VCs might have been proven using the checker alone. -- If VCG analysis is not in error then: -- If any SIV analysis is not in error then: -- If any SLG analysis is not in error then: -- If any victor analysis is not in error (or never happened): -- If the 'Analyse_Proof_Log ' is set: -- If the PLG file exists then: analyse it -- Analyse the PLG file if not VC_File_Contained_Error and not SIV_File_Contained_Error and not SLG_File_Contained_Error and not VCTR_File_Contained_Error and Analyse_Proof_Log then if OSFiling.Is_File (Name => Proof_Log_Filename) then Say_Filename_Being_Processed (Filename => Proof_Log_Filename); AnalyseProofLogFile (Report_File, Proof_Log_Filename, SIV_File_Date_Time, PLG_File_Contained_Error); if PLG_File_Contained_Error then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: PLG file error: subprogram processing abandoned ***", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_PLG_File); end if; else null; -- See: EmptyStringHere? --Proof_Log_Filename := ELStrings.EmptyString; end if; end if; --# assert True; -- Processing PRV file -- ################### -- PRV files may be present regardless of simplification or application of the checker. -- If VCG analysis is not in error then: -- If any SIV analysis is not in error then: -- If any SLG analysis is not in error then: -- If any victor analysis is not in error (or never happened): -- If any PLG analysis is not in error then: -- If the PRV file exists then: analyse it if not VC_File_Contained_Error and not SIV_File_Contained_Error and not SLG_File_Contained_Error and not VCTR_File_Contained_Error and not PLG_File_Contained_Error and OSFiling.Is_File (Name => PRV_Filename) then Say_Filename_Being_Processed (Filename => PRV_Filename); AnalyseReviewFile (Report_File, PRV_Filename, PRV_File_Errors); if PRV_File_Errors.Errors then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: PRV file error: subprogram processing abandoned ***", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_PRV_File); end if; end if; end if; --# assert True; if OSFiling.Is_File (Name => DPC_Filename) then Processed_DPC_File := True; -- Processing DPC file -- ################### -- If the DPC file exists: analyse it. Say_Filename_Being_Processed (Filename => DPC_Filename); Analyse_DPC_File (Report_File, DPC_Filename, DPC_File_Contained_Error, -- for comparison with entry in SDP file DPC_File_Date_Time); if DPC_File_Contained_Error then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: DPC file error: subprogram processing abandoned ***", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_DPC_File); end if; --# assert True; -- Processing SDP File. -- ################### -- If the SDP file exists: analyse it if OSFiling.Is_File (Name => Summary_DP_Filename) then Say_Filename_Being_Processed (Filename => Summary_DP_Filename); Analyse_Summary_DP_File (Report_File, Summary_DP_Filename, DPC_File_Date_Time, SDP_File_Contained_Error); if SDP_File_Contained_Error then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: SDP file error: subprogram processing abandoned ***", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); VCHeap.Raise_Error (Error_Kind => VCDetails.Corrupt_SDP_File); end if; elsif OSFiling.Is_File (Name => DPC_Filename) and not OSFiling.Is_File (Name => Summary_DP_Filename) then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "DPCs not ZombieScoped", 0); E_Strings.Put_Line (File => Temp_SDP_Error_File, E_Str => PathFormatter.Format (Summary_DP_Filename)); elsif not OSFiling.Is_File (Name => DPC_Filename) and OSFiling.Is_File (Name => Summary_DP_Filename) then -- An error has occurred as an SDP file exists without the -- corresponding DPC file. SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "*** Warning: Missing DPC file error. ***", 0); SPARK_IO.Put_Line (Report_File, "*** Warning: Missing DPC files error. ***", 0); E_Strings.Put_Line (File => Temp_DPC_Error_File, E_Str => PathFormatter.Format (DPC_Filename)); end if; end if; --# assert True; if Processed_VCG_File or Processed_DPC_File then Total.Update_Totals (VCG => Processed_VCG_File, DPC => Processed_DPC_File); PrintVCReport (VC_Filename => VC_Filename, SIV_Filename => Simplified_VC_Filename, VCTR_Filename => Victored_VC_Filename, RSM_Filename => Riposte_Summary_Filename, PLG_Filename => Proof_Log_Filename, SLG_Filename => Simplifier_Log_Filename, VC_Error => VC_File_Contained_Error, SIV_Error => SIV_File_Contained_Error, VCTR_Error => VCTR_File_Contained_Error, RSM_Error => RSM_File_Contained_Error, PLG_Error => PLG_File_Contained_Error, SLG_Error => SLG_File_Contained_Error, SLG_File_Missing => SLG_File_Is_Missing, REV_Errors => PRV_File_Errors, Report_File => Report_File, Temp_File => Temp_File, Temp_False_File => Temp_False_File, Temp_Contra_File => Temp_Contra_File, Temp_Victor_File => Temp_Victor_File, Temp_Riposte_File => Temp_Riposte_File, Temp_User_File => Temp_User_File, Temp_PR_Verr_File => Temp_PR_Verr_File, Temp_Warn_Error_File => Temp_Warn_Error_File); SPARK_IO.New_Line (Report_File, 1); end if; --# assert True; --# accept Flow, 601, VCHeap.State, Temp_Rlu_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, VCHeap.State, Temp_Rlu_Used_File, "False coupling through SPARK_IO" & --# Flow, 601, VCHeap.State, Temp_Victor_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, VCHeap.State, Temp_Riposte_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, VCHeap.I_State, Temp_Victor_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, VCHeap.I_State, Temp_Riposte_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, Total.State, Temp_Rlu_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, Total.State, Temp_Rlu_Used_File, "False coupling through SPARK_IO" & --# Flow, 601, Total.State, Temp_Victor_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, Total.State, Temp_Riposte_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, Temp_Rlu_Error_File, CommandLine.Data, "False coupling through SPARK_IO" & --# Flow, 601, Temp_Rlu_Used_File, CommandLine.Data, "False coupling through SPARK_IO" & --# Flow, 601, Temp_Rlu_Error_File, Report_File, "False coupling through SPARK_IO" & --# Flow, 601, Temp_Rlu_Used_File, Report_File, "False coupling through SPARK_IO" & --# Flow, 601, FatalErrors.State, Temp_Victor_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, FatalErrors.State, Temp_Riposte_Error_File, "False coupling through SPARK_IO" & --# Flow, 33, Victor_File_Date_Time, "We don't export this yet - we might in the future."; end Analyse; end VCS; spark-2012.0.deb/pogs/all.wrn0000644000175000017500000000016111753202340014702 0ustar eugeneugen-- Warning control file for POGS default_loop_assertions hidden_parts pragma all static_expressions with_clauses spark-2012.0.deb/pogs/vcs-writevcinfo.adb0000644000175000017500000000745411753202340017216 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --WriteVCInfo procedure for VCS package. -- -- -- -------------------------------------------------------------------------------- separate (VCS) procedure WriteVCInfo (Report_File : in SPARK_IO.File_Type; VC_Info : in VC_Info_Type; Anything_Printed_This_Time : out Boolean) is procedure Print_Line_Number (Report_File : in SPARK_IO.File_Type; Line_Number : in VC_Line_Type; Start_Of_Line : in Boolean) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Line_Number, --# Report_File, --# Start_Of_Line; is Width : Natural; begin if Line_Number = VC_Line_Start then SPARK_IO.Put_String (Report_File, "start", 0); elsif Line_Number = VC_Line_End then SPARK_IO.Put_String (Report_File, "finish", 0); else if Start_Of_Line then Width := 5; else Width := 0; end if; SPARK_IO.Put_Integer (Report_File, Line_Number, Width, 10); end if; end Print_Line_Number; begin -- WriteVCInfo if VC_Info.Number_Of_VCs > 0 then Anything_Printed_This_Time := True; if VC_Info.File_Type = Simplified_VC_File_Type and not VC_Info.Any_VCs_Printed then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "Undischarged VCs remain for lines:", 0); end if; if not VC_Info.This_Start_Line_Printed or else SPARK_IO.Col (Report_File) > Report_Wrap_Column then SPARK_IO.New_Line (Report_File, 1); -- write new start line Print_Line_Number (Report_File => Report_File, Line_Number => VC_Info.Start_Line, Start_Of_Line => True); SPARK_IO.Put_String (Report_File, " to: ", 0); else SPARK_IO.Put_String (Report_File, ", ", 0); end if; -- write last line of range Print_Line_Number (Report_File => Report_File, Line_Number => VC_Info.End_Line, Start_Of_Line => False); -- print VC count SPARK_IO.Put_Char (Report_File, '('); SPARK_IO.Put_Integer (Report_File, VC_Info.Number_Of_VCs, 0, 10); SPARK_IO.Put_Char (Report_File, ')'); else Anything_Printed_This_Time := False; end if; end WriteVCInfo; spark-2012.0.deb/pogs/vcs-processnewrangeline.adb0000644000175000017500000002021511753202340020722 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to process type (1) lines for the VCS package -- -- -- -------------------------------------------------------------------------------- with VCDetails; separate (VCS) procedure ProcessNewRangeLine (Line : in E_Strings.T; VC_Info : in out VC_Info_Type) is New_End_Line : VC_Line_Type; New_Start_Line : VC_Line_Type; New_VC_Info : VC_Info_Type; New_End_Line_Point_Type : VCDetails.Terminal_Point_Type; --------------------------------------------------------------------- -- The first line specified in a line of type (1) is found as follows: -- If the line contains 'start' then that's it -- If the line contatin 'refinement' then that's it -- If the line contatin 'inheritance' then that's it -- otherwise find the first occurrence of 'line' and extract the number -- which follows function Extract_First_Line_In_Range (Line : E_Strings.T) return VC_Line_Type is Dummy_Position : E_Strings.Positions; Line_Found : Boolean; Line_Position : E_Strings.Positions; Line_Number : VC_Line_Type; Start_Found : Boolean; Refinement_Found : Boolean; Inheritance_Found : Boolean; begin -- Extract_First_Line_In_Range --# accept F, 10, Dummy_Position, "Dummy_Position unused here"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "inheritance", String_Found => Inheritance_Found, String_Start => Dummy_Position); E_Strings.Find_Sub_String (E_Str => Line, Search_String => "refinement", String_Found => Refinement_Found, String_Start => Dummy_Position); E_Strings.Find_Sub_String (E_Str => Line, Search_String => "start", String_Found => Start_Found, String_Start => Dummy_Position); --# end accept; if Refinement_Found or Inheritance_Found then Line_Number := Refinement_Or_Inheritance_VC; elsif Start_Found then Line_Number := VC_Line_Start; else --# accept F, 10, Line_Found, "Line_Found unused here"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "line", String_Found => Line_Found, String_Start => Line_Position); --# end accept; Line_Number := Extract_Line_Number_At_Position (Line => Line, Start_Pos => Line_Position + 5); end if; --# accept F, 33, Dummy_Position, "Dummy_Position unused here" & --# F, 33, Line_Found, "Line_Found unused here"; return Line_Number; end Extract_First_Line_In_Range; --------------------------------------------------------------------- -- the second line specified in a line of type (1) is found as follows: -- either find "finish", or find first occurrence of "line" after "start" -- or find second occurrence of "line" function Extract_Last_Line_In_Range (Line : E_Strings.T) return VC_Line_Type is Dummy_Position : E_Strings.Positions; Finish_Found : Boolean; Line_Found : Boolean; Line_Position : E_Strings.Positions; Line_Number : VC_Line_Type; First_Line_Pos : E_Strings.Positions; --------------------------------------------------------------------- -- this function returns the index of the string "start" in the line -- if this is not present, it returns the index of the first occurrence -- of "line" -- the action if neither is present is undefined function Position_Of_First_Line_Number (Line : E_Strings.T) return E_Strings.Positions is Line_Found : Boolean; Line_Position : E_Strings.Positions; Pos : E_Strings.Positions; Start_Found : Boolean; Start_Position : E_Strings.Positions; begin -- Position_Of_First_Line_Number E_Strings.Find_Sub_String (E_Str => Line, Search_String => "start", String_Found => Start_Found, String_Start => Start_Position); if Start_Found then Pos := Start_Position; else --# accept F, 10, Line_Found, "Line_Found unused here"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "line", String_Found => Line_Found, String_Start => Line_Position); --# end accept; Pos := Line_Position; end if; --# accept F, 33, Line_Found, "Line_Found unused here"; return Pos; end Position_Of_First_Line_Number; --------------------------------------------------------------------- begin -- Extract_Last_Line_In_Range --# accept F, 10, Dummy_Position, "Dummy_Position unused here"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "finish", String_Found => Finish_Found, String_Start => Dummy_Position); --# end accept; if Finish_Found then Line_Number := VC_Line_End; else -- find first line number First_Line_Pos := Position_Of_First_Line_Number (Line => Line); -- skip it and find second one --# accept F, 10, Line_Found, "Line_Found unused here"; E_Strings.Find_Sub_String_After (E_Str => Line, Search_Start => First_Line_Pos + 5, Search_String => "line", String_Found => Line_Found, String_Start => Line_Position); --# end accept; Line_Number := Extract_Line_Number_At_Position (Line => Line, Start_Pos => Line_Position + 5); end if; --# accept F, 33, Dummy_Position, "Dummy_Position unused here" & --# F, 33, Line_Found, "Line_Found unused here"; return Line_Number; end Extract_Last_Line_In_Range; --------------------------------------------------------------------- begin -- ProcessNewRangeLine New_Start_Line := Extract_First_Line_In_Range (Line => Line); if New_Start_Line = Refinement_Or_Inheritance_VC then New_End_Line := Refinement_Or_Inheritance_VC; else New_End_Line := Extract_Last_Line_In_Range (Line => Line); end if; New_End_Line_Point_Type := VCDetails.Path_End_To_Path_Type (Line => Line); New_VC_Info := VC_Info_Type' (Start_Line => New_Start_Line, End_Line => New_End_Line, End_Line_Point_Type => New_End_Line_Point_Type, Number_Of_VCs => 0, Valid => True, File_Type => VC_Info.File_Type, Any_VCs_Printed => False, This_Start_Line_Printed => False); VC_Info := New_VC_Info; end ProcessNewRangeLine; spark-2012.0.deb/pogs/vcs-printvcreport.adb0000644000175000017500000004760011753202340017575 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to print the VC report -- -- -- -------------------------------------------------------------------------------- with PathFormatter; with SPARK_IO; with VCDetails; separate (VCS) procedure PrintVCReport (VC_Filename : in E_Strings.T; SIV_Filename : in E_Strings.T; VCTR_Filename : in E_Strings.T; RSM_Filename : in E_Strings.T; PLG_Filename : in E_Strings.T; SLG_Filename : in E_Strings.T; VC_Error : in Boolean; SIV_Error : in Boolean; VCTR_Error : in Boolean; RSM_Error : in Boolean; PLG_Error : in Boolean; SLG_Error : in Boolean; SLG_File_Missing : in Boolean; REV_Errors : in Review_Errors; Report_File : in SPARK_IO.File_Type; Temp_File : in SPARK_IO.File_Type; Temp_False_File : in SPARK_IO.File_Type; Temp_Contra_File : in SPARK_IO.File_Type; Temp_Victor_File : in SPARK_IO.File_Type; Temp_Riposte_File : in SPARK_IO.File_Type; Temp_User_File : in SPARK_IO.File_Type; Temp_PR_Verr_File : in SPARK_IO.File_Type; Temp_Warn_Error_File : in SPARK_IO.File_Type) is Table_Pad_Width : constant Integer := 44; Proved_By_Col_Width : constant Integer := 20; Dead_Path_Col_Width : constant Integer := 11; Status_Code_Str_Width : constant Integer := 1; subtype Proved_By_Str_Index is Integer range 1 .. Proved_By_Col_Width; subtype Dead_Path_Str_Index is Integer range 1 .. Dead_Path_Col_Width; subtype Status_Code_Str_Index is Integer range 1 .. Status_Code_Str_Width; subtype Proved_By_Str_T is String (Proved_By_Str_Index); subtype Dead_Path_Str_T is String (Dead_Path_Str_Index); subtype Status_Code_Str_T is String (Status_Code_Str_Index); type Proved_By_Str_Array is array (VCDetails.VC_State_T) of Proved_By_Str_T; type Dead_Path_Str_Array is array (VCDetails.DPC_State_T) of Dead_Path_Str_T; type Proved_By_Status_Code_Str_Array is array (VCDetails.VC_State_T) of Status_Code_Str_T; type Dead_Path_Status_Code_Str_Array is array (VCDetails.DPC_State_T) of Status_Code_Str_T; Proved_By_Str : constant Proved_By_Str_Array := Proved_By_Str_Array' (VCDetails.VC_Not_Present => " No VCG ", VCDetails.VC_SIV_Not_Present => " No SIV ", VCDetails.VC_Undischarged => " Undischarged ", VCDetails.VC_Proved_By_Examiner => " Examiner ", VCDetails.VC_Proved_By_Inference => " Inference ", VCDetails.VC_Proved_By_Contradiction => " Contradiction ", VCDetails.VC_Proved_Using_User_Proof_Rules => " User Rules ", VCDetails.VC_Proved_By_Victor => " Victor ", VCDetails.VC_Proved_By_Riposte => " Riposte ", VCDetails.VC_Proved_By_Checker => " Checker ", VCDetails.VC_Proved_By_Review => " Review ", VCDetails.VC_False => " False "); Dead_Path_Str : constant Dead_Path_Str_Array := Dead_Path_Str_Array' (VCDetails.DPC_Not_Present => " No DPC ", VCDetails.DPC_SDP_Not_Present => " No SDP ", VCDetails.DPC_Unchecked => " Unchecked ", VCDetails.DPC_Live => " Live ", VCDetails.DPC_Dead => " Dead "); Proved_By_Status_Code : constant Proved_By_Status_Code_Str_Array := Proved_By_Status_Code_Str_Array' (VCDetails.VC_Not_Present => "-", VCDetails.VC_SIV_Not_Present => "S", VCDetails.VC_Undischarged => "U", VCDetails.VC_Proved_By_Examiner => "E", VCDetails.VC_Proved_By_Inference => "I", VCDetails.VC_Proved_By_Contradiction => "X", VCDetails.VC_Proved_By_Checker => "C", VCDetails.VC_Proved_By_Review => "R", VCDetails.VC_Proved_Using_User_Proof_Rules => "P", VCDetails.VC_Proved_By_Victor => "V", VCDetails.VC_Proved_By_Riposte => "O", VCDetails.VC_False => "F"); Dead_Path_Status_Code : constant Dead_Path_Status_Code_Str_Array := Dead_Path_Status_Code_Str_Array' (VCDetails.DPC_Not_Present => "-", VCDetails.DPC_SDP_Not_Present => "S", VCDetails.DPC_Unchecked => "U", VCDetails.DPC_Live => "L", VCDetails.DPC_Dead => "D"); VC_Success : Boolean; This_VC : Heap.Atom; Next_VC : Heap.Atom; VC_Number : E_Strings.T; Table_Line : E_Strings.T; VC_Name_Prefix : E_Strings.T; VC_Name_Tabulation : Integer; VC_Path_Start_Tabulation : Integer; VC_Path_End_Tabulation : Integer; This_VC_Name : E_Strings.T; This_VC_Path_Start : E_Strings.T; This_VC_Path_End : E_Strings.T; This_VC_End_Type : VCDetails.Terminal_Point_Type; This_VC_State : VCDetails.VC_State_T; This_DPC_State : VCDetails.DPC_State_T; Num_Undischarged : Integer; Num_False : Integer; Num_Contra : Integer; Num_User : Integer; Num_Victor : Integer; Num_Riposte : Integer; Duplicated : Boolean; Unused_Pos : Integer; Non_Duplicated_Errors : Integer; -------------------------------------------------------------------------- procedure Add_Padding (Line : in out E_Strings.T; Length : in Integer; Padding : in String) --# derives Line from *, --# Length, --# Padding; is begin -- Add_Padding while E_Strings.Get_Length (E_Str => Line) < Length loop E_Strings.Append_String (E_Str => Line, Str => Padding); end loop; end Add_Padding; begin --PrintVCReport Num_Undischarged := 0; Num_False := 0; Num_Contra := 0; Num_User := 0; Num_Victor := 0; Num_Riposte := 0; -- Num_Of_Dead_Paths := 0; VC_Name_Prefix := VCHeap.Get_VC_Name_Prefix; -- If there any warnings or errors found in analysing the VCG, -- SIV or PLG file, record the filname of the errant file to a -- temporary warning and error file for reporting in the overall -- or short summary generated by the Total package. if VC_Error then SPARK_IO.Put_Integer (Temp_Warn_Error_File, 1, 4, 10); E_Strings.Put_Line (File => Temp_Warn_Error_File, E_Str => PathFormatter.Format (VC_Filename)); end if; --# assert True; if SIV_Error then SPARK_IO.Put_Integer (Temp_Warn_Error_File, 1, 4, 10); E_Strings.Put_Line (File => Temp_Warn_Error_File, E_Str => PathFormatter.Format (SIV_Filename)); end if; --# assert True; if VCTR_Error then SPARK_IO.Put_Integer (Temp_Warn_Error_File, 1, 4, 10); E_Strings.Put_Line (File => Temp_Warn_Error_File, E_Str => PathFormatter.Format (VCTR_Filename)); end if; --# assert True; if RSM_Error then SPARK_IO.Put_Integer (Temp_Warn_Error_File, 1, 4, 10); E_Strings.Put_Line (File => Temp_Warn_Error_File, E_Str => PathFormatter.Format (RSM_Filename)); end if; --# assert True; if PLG_Error then SPARK_IO.Put_Integer (Temp_Warn_Error_File, 1, 4, 10); E_Strings.Put_Line (File => Temp_Warn_Error_File, E_Str => PathFormatter.Format (PLG_Filename)); end if; --# assert True; if (SLG_Error or SLG_File_Missing) then SPARK_IO.Put_Integer (Temp_Warn_Error_File, 1, 4, 10); E_Strings.Put_Line (File => Temp_Warn_Error_File, E_Str => PathFormatter.Format (SLG_Filename)); end if; --# assert True; if REV_Errors.Last_Error /= Errors_Index'First then -- don't count any errors which are duplicated in review file Non_Duplicated_Errors := REV_Errors.Last_Error - 1; for I in Integer range (Errors_Index'First + 1) .. REV_Errors.Last_Error loop --# accept F, 10, Unused_Pos, "Unused_Pos unused here"; E_Strings.Find_Sub_String (E_Str => REV_Errors.Error_List (I), Search_String => "duplicated", String_Found => Duplicated, String_Start => Unused_Pos); --# end accept; --# assert Non_Duplicated_Errors < REV_Errors.Last_Error --# and Non_Duplicated_Errors >= (REV_Errors.Last_Error - 1) - (I - (Errors_Index'First + 1)) --# and REV_Errors.Last_Error /= Errors_Index'First --# and Non_Duplicated_Errors >= -I + REV_Errors.Last_Error; if Duplicated then Non_Duplicated_Errors := Non_Duplicated_Errors - 1; end if; end loop; --# assert True; -- if there are any (nonduplicated) errors in the file, print number and VC name if Non_Duplicated_Errors >= 1 then if REV_Errors.Excess_Count > 0 then SPARK_IO.Put_Line (Report_File, "There were too many errors", 0); else SPARK_IO.Put_Integer (Temp_PR_Verr_File, Non_Duplicated_Errors, 4, 10); end if; E_Strings.Put_Line (File => Temp_PR_Verr_File, E_Str => PathFormatter.Format (VC_Filename)); end if; end if; --# assert True; -- Only display the table if there were no errors for this proof object. -- Note than a missing SLG file is not treated as an error. -- Some organisations may deliberately delete SLG files, to -- save space. if not (VC_Error or SIV_Error or VCTR_Error or SLG_Error or PLG_Error or REV_Errors.Last_Error > Errors_Index'First) then SPARK_IO.New_Line (Report_File, 1); Table_Line := E_Strings.Copy_String (Str => "VCs for "); E_Strings.Append_Examiner_String (E_Str1 => Table_Line, E_Str2 => VC_Name_Prefix); E_Strings.Append_String (E_Str => Table_Line, Str => " :"); E_Strings.Put_Line (File => Report_File, E_Str => Table_Line); VC_Name_Tabulation := VCHeap.Get_Longest_VC_Name_Length - E_Strings.Get_Length (E_Str => VC_Name_Prefix); -- Give at least 5 columns (including a leading space) -- for the VC number, so subprograms with up to 9999 -- VCs all come out looking the same. if VC_Name_Tabulation < 5 then VC_Name_Tabulation := 5; end if; VC_Path_Start_Tabulation := 3 + (VC_Name_Tabulation + VCHeap.Get_Longest_VC_Start_Length); VC_Path_End_Tabulation := 3 + (VC_Path_Start_Tabulation + VCHeap.Get_Longest_VC_End_Length); --# assert True; -- Print the table header Table_Line := E_Strings.Copy_String (Str => " -"); Add_Padding (Line => Table_Line, Length => VC_Path_End_Tabulation + Table_Pad_Width, Padding => "-"); E_Strings.Put_Line (File => Report_File, E_Str => Table_Line); Table_Line := E_Strings.Copy_String (Str => "| # "); Add_Padding (Line => Table_Line, Length => VC_Name_Tabulation, Padding => " "); E_Strings.Append_String (E_Str => Table_Line, Str => " | From "); Add_Padding (Line => Table_Line, Length => VC_Path_Start_Tabulation, Padding => " "); E_Strings.Append_String (E_Str => Table_Line, Str => " | To "); Add_Padding (Line => Table_Line, Length => VC_Path_End_Tabulation, Padding => " "); E_Strings.Append_String (E_Str => Table_Line, Str => " | Proved By | Dead Path | Status |"); E_Strings.Put_Line (File => Report_File, E_Str => Table_Line); Table_Line := E_Strings.Copy_String (Str => "|-"); Add_Padding (Line => Table_Line, Length => VC_Path_End_Tabulation + Table_Pad_Width, Padding => "-"); E_Strings.Put_Line (File => Report_File, E_Str => Table_Line); -- Now loop through the VCHeap and print one table line per VC This_VC := VCHeap.First_Entry; VC_Success := True; while not Heap.IsNullPointer (This_VC) and VC_Success loop VCHeap.Details (This_VC, This_VC_Name, This_VC_Path_Start, This_VC_Path_End, This_VC_End_Type, This_VC_State, This_DPC_State); -- trim the VC name prefix from the VC_Name to get VC number -- as we only print the VC number in the table VC_Number := E_Strings.Section (E_Str => This_VC_Name, Start_Pos => E_Strings.Get_Length (E_Str => VC_Name_Prefix) + 2, Length => E_Strings.Get_Length (E_Str => This_VC_Name) - (E_Strings.Get_Length (E_Str => VC_Name_Prefix) + 1)); -- Start composing the table line for this VC Table_Line := E_Strings.Copy_String (Str => "| "); E_Strings.Append_Examiner_String (E_Str1 => Table_Line, E_Str2 => VC_Number); -- pad with spaces to longest VC number length Add_Padding (Line => Table_Line, Length => VC_Name_Tabulation, Padding => " "); E_Strings.Append_String (E_Str => Table_Line, Str => " | "); E_Strings.Append_Examiner_String (E_Str1 => Table_Line, E_Str2 => This_VC_Path_Start); Add_Padding (Line => Table_Line, Length => VC_Path_Start_Tabulation, Padding => " "); E_Strings.Append_String (E_Str => Table_Line, Str => " | "); E_Strings.Append_String (E_Str => Table_Line, Str => VCDetails.End_Type_Image (This_VC_End_Type)); E_Strings.Append_Examiner_String (E_Str1 => Table_Line, E_Str2 => This_VC_Path_End); Add_Padding (Line => Table_Line, Length => VC_Path_End_Tabulation, Padding => " "); E_Strings.Append_String (E_Str => Table_Line, Str => " |"); E_Strings.Append_String (E_Str => Table_Line, Str => Proved_By_Str (This_VC_State)); E_Strings.Append_String (E_Str => Table_Line, Str => "|"); E_Strings.Append_String (E_Str => Table_Line, Str => Dead_Path_Str (This_DPC_State)); E_Strings.Append_String (E_Str => Table_Line, Str => "| "); E_Strings.Append_String (E_Str => Table_Line, Str => Proved_By_Status_Code (This_VC_State)); E_Strings.Append_String (E_Str => Table_Line, Str => Dead_Path_Status_Code (This_DPC_State)); E_Strings.Append_String (E_Str => Table_Line, Str => " |"); -- Now print the table line and get info for the next VC --# assert True; case This_VC_State is when VCDetails.VC_SIV_Not_Present | VCDetails.VC_Undischarged => Num_Undischarged := Num_Undischarged + 1; when VCDetails.VC_Proved_By_Contradiction => Num_Contra := Num_Contra + 1; when VCDetails.VC_Proved_Using_User_Proof_Rules => Num_User := Num_User + 1; when VCDetails.VC_False => Num_False := Num_False + 1; when VCDetails.VC_Proved_By_Victor => Num_Victor := Num_Victor + 1; when VCDetails.VC_Proved_By_Riposte => Num_Riposte := Num_Riposte + 1; when others => null; end case; --# assert True; E_Strings.Put_Line (File => Report_File, E_Str => Table_Line); VCHeap.Next (This_VC, VC_Success, Next_VC); This_VC := Next_VC; end loop; --# assert True; if Num_Contra > 0 then SPARK_IO.Put_Integer (Temp_Contra_File, Num_Contra, 4, 10); E_Strings.Put_Line (File => Temp_Contra_File, E_Str => PathFormatter.Format (VC_Filename)); end if; --# assert True; if Num_Victor > 0 then SPARK_IO.Put_Integer (Temp_Victor_File, Num_Victor, 4, 10); E_Strings.Put_Line (File => Temp_Victor_File, E_Str => PathFormatter.Format (VC_Filename)); end if; --# assert True; if Num_Riposte > 0 then SPARK_IO.Put_Integer (Temp_Riposte_File, Num_Riposte, 4, 10); E_Strings.Put_Line (File => Temp_Riposte_File, E_Str => PathFormatter.Format (VC_Filename)); end if; --# assert True; if Num_User > 0 then SPARK_IO.Put_Integer (Temp_User_File, Num_User, 4, 10); E_Strings.Put_Line (File => Temp_User_File, E_Str => PathFormatter.Format (VC_Filename)); end if; --# assert True; if Num_False > 0 then SPARK_IO.Put_Integer (Temp_False_File, Num_False, 4, 10); E_Strings.Put_Line (File => Temp_False_File, E_Str => PathFormatter.Format (VC_Filename)); end if; --# assert True; -- if the subprogram contains any undischarged VCs add the name and number to Temp_File if Num_Undischarged > 0 then SPARK_IO.Put_Integer (Temp_File, Num_Undischarged, 4, 10); E_Strings.Put_Line (File => Temp_File, E_Str => PathFormatter.Format (VC_Filename)); end if; -- Print the table footer. Table_Line := E_Strings.Copy_String (Str => " -"); Add_Padding (Line => Table_Line, Length => VC_Path_End_Tabulation + Table_Pad_Width, Padding => "-"); E_Strings.Put_Line (File => Report_File, E_Str => Table_Line); SPARK_IO.New_Line (Report_File, 1); end if; --# accept F, 33, Unused_Pos, "Unused_Pos unused here"; end PrintVCReport; spark-2012.0.deb/pogs/pogs.idx0000644000175000017500000000267611753202340015075 0ustar eugeneugen-- POGS Self Analysis Index File -- -- Components from Examiner -- commonstring specification is in ../examiner/commonstring.ads commonstringutilities specification is in ../examiner/commonstringutilities.ads e_strings specification is in ../examiner/e_strings.ads examinerconstants specification is in ../examiner/examinerconstants.ads spark_io specification is in ../examiner/spark_io.ads date_time specification is in ../examiner/date_time.ads -- -- Version -- version specification is in ../common/versioning/version.ads -- -- POGS -- heapindex specification is in heapindex.ads heap specification is in heap.ads spark_calendar specification is in spark_calendar.ads banner specification is in banner.ads banner body is in banner.adb commandline specification is in commandline.ads fatalerrors specification is in fatalerrors.ads filedetails specification is in filedetails.ads fileheap specification is in fileheap.ads findfiles specification is in findfiles.ads oscommandline specification is in oscommandline.ads osfiling specification is in osfiling.ads osdirectory specification is in osdirectory.ads pathformatter specification is in pathformatter.ads pathformatter body is in pathformatter.adb total specification is in total.ads vcdetails specification is in vcdetails.ads vcheap specification is in vcheap.ads vcs specification is in vcs.ads vcs body is in vcs.adb toppackage specification is in toppackage.ads slg_parser specification is in slg_parser.ads spark-2012.0.deb/pogs/Makefile0000644000175000017500000000567411753202340015060 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for POGS # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=pogs # Location of root. ROOT:=.. SPARK_PREPED:=oscommandline.adb # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # PLATFORM INDEPENDENT CONFIGURATION ################################################################################ # Files containing platform specific code that is handled by gnatprep PREP_TARGETS:=${SPARK_PREPED} ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: preamble prep gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} ${OUTPUT_NAME} -o $@ -bargs ${BIND_OPTS} self-analysis: preamble prep -spark -plain @${OUTPUT_NAME}.smf # Initialisations # =============== preamble: $(MAKE) -C ${ROOT}/examiner clean # Platform specific prepping # ========================== prep: ${PREP_TARGETS} $(MAKE) -C ${ROOT}/examiner prep %.ads: %.aps gnatprep ${PREP_OPTS} -DTarget=${PREP_TARGET} $< $@ %.adb: %.apb gnatprep ${PREP_OPTS} -DTarget=${PREP_TARGET} $< $@ # Cleaning code base # ================== clean: residueclean standardclean reallyclean: clean targetclean vcclean preamble $(MAKE) -C ${ROOT}/examiner reallyclean residueclean: rm -f ${PREP_TARGETS} ################################################################################ # END-OF-FILE spark-2012.0.deb/pogs/vcs-analyseprooflogfile.adb0000644000175000017500000004405711753202340020723 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse a .PLG file -- -- -- -------------------------------------------------------------------------------- separate (VCS) procedure AnalyseProofLogFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; SIV_File_Date_Time : in E_Strings.T; Error_In_File : out Boolean) is Dummy_Close_Status : SPARK_IO.File_Status; File_Line : E_Strings.T; Finished_With_File : Boolean; Open_Status : SPARK_IO.File_Status; Read_Line_Success : Boolean; Proof_Log_File : SPARK_IO.File_Type := SPARK_IO.Null_File; VC_Proof_Date_Time_From_PLG_File : E_Strings.T; Proof_Log_Obsolete : Boolean; Trimmed_Line : E_Strings.T; Current_VC_Name : E_Strings.T; procedure Extract_Dates_And_Times_From_Proof_Log_File (Proof_Log_File : in SPARK_IO.File_Type; Proof_Log_Date_Time : out E_Strings.T) --# global in out SPARK_IO.File_Sys; --# derives Proof_Log_Date_Time, --# SPARK_IO.File_Sys from Proof_Log_File, --# SPARK_IO.File_Sys; is File_Line : E_Strings.T; Trimmed_Line : E_Strings.T; Proof_Date_Time : E_Strings.T; Start_Found : Boolean := False; begin Proof_Log_Date_Time := E_Strings.Empty_String; while not Start_Found loop E_Strings.Get_Line (File => Proof_Log_File, E_Str => File_Line); Trimmed_Line := E_Strings.Trim (File_Line); -- find date if E_Strings.Eq1_String (E_Str => E_Strings.Section (Trimmed_Line, 1, 4), Str => "DATE") then -- extract the proof session date and time from the string Proof_Date_Time := E_Strings.Section (Trimmed_Line, PLG_File_VC_Proof_Date_Start_Column, PLG_File_VC_Proof_Date_Length); E_Strings.Append_String (E_Str => Proof_Date_Time, Str => " "); E_Strings.Append_Examiner_String (E_Str1 => Proof_Date_Time, E_Str2 => E_Strings.Section (Trimmed_Line, PLG_File_VC_Proof_Time_Start_Column, PLG_File_VC_Proof_Time_Length)); Proof_Log_Date_Time := Proof_Date_Time; end if; -- find start of instructions, then go on to analyse the rest of the file if E_Strings.Eq1_String (E_Str => E_Strings.Section (Trimmed_Line, 1, 7), Str => "COMMAND") then Start_Found := True; end if; end loop; -- if date has not been found must be in plain output mode if E_Strings.Is_Empty (E_Str => Proof_Log_Date_Time) then Proof_Log_Date_Time := E_Strings.Copy_String (Str => "Unknown date"); end if; end Extract_Dates_And_Times_From_Proof_Log_File; ------------------------------------------------------------------------- procedure Check_Proof_Log_Obsolescence (Proof_Log_Date_Time : in E_Strings.T; SIV_File_Date_Time : in E_Strings.T; PLG_Obsolete : out Boolean) --# global in CommandLine.Data; --# in out SPARK_IO.File_Sys; --# derives PLG_Obsolete from CommandLine.Data, --# Proof_Log_Date_Time, --# SIV_File_Date_Time & --# SPARK_IO.File_Sys from *, --# Proof_Log_Date_Time, --# SIV_File_Date_Time; is Result : Boolean; PLG_Time_Stamp : Integer; SIV_Time_Stamp : Integer; PLG_Date_Stamp : SPARK_Calendar.Time; SIV_Date_Stamp : SPARK_Calendar.Time; PLG_Error : Boolean; SIV_Error : Boolean; procedure Get_Date_And_Time (Date_String : in E_Strings.T; Date_Stamp : out SPARK_Calendar.Time; Time_Stamp : out Integer; Error : out Boolean) --# derives Date_Stamp, --# Error, --# Time_Stamp from Date_String; is subtype Month_Index is Integer range 0 .. SPARK_Calendar.Month_Number'Last; Year_Num : SPARK_Calendar.Year_Number; Raw_Month_Num : Month_Index; Month_Num : SPARK_Calendar.Month_Number; Day_Num : SPARK_Calendar.Day_Number; Hours_Num : Integer; Minutes_Num : Integer; Seconds_Num : Integer; Time_In_Seconds : Integer; Stamp : SPARK_Calendar.Time; Stop : Natural; Time_Error : SPARK_Calendar.Error_Code; function Month_Name_To_Month_Num (Month_Name : E_Strings.T) return Month_Index is Num : Month_Index := 0; begin if E_Strings.Eq1_String (E_Str => Month_Name, Str => "JAN") then Num := 1; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "FEB") then Num := 2; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "MAR") then Num := 3; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "APR") then Num := 4; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "MAY") then Num := 5; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "JUN") then Num := 6; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "JUL") then Num := 7; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "AUG") then Num := 8; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "SEP") then Num := 9; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "OCT") then Num := 10; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "NOV") then Num := 11; elsif E_Strings.Eq1_String (E_Str => Month_Name, Str => "DEC") then Num := 12; end if; return Num; end Month_Name_To_Month_Num; begin --Get_Date_And_Time -- If the provided date string is empty, do not attempt to parse it, and -- return an error. Otherwise constraint errors are raised in trying to -- convert an empty string into an integer value. -- Note this check does not exclude all situations that may raise a -- constraint error. To do this, would need to check that the string -- contains numeric characters in the correct locations. if E_Strings.Is_Empty (E_Str => Date_String) then Error := True; Time_Stamp := 0; else --# accept F, 10, Stop, "Stop unused here"; E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Date_String, Start_Pos => 8, Length => 4), Item => Year_Num, Start_Pt => 1, Stop => Stop); --# end accept; Raw_Month_Num := Month_Name_To_Month_Num (Month_Name => E_Strings.Section (Date_String, 4, 3)); if Raw_Month_Num = 0 then Error := True; Time_Stamp := 0; else Month_Num := Raw_Month_Num; --# accept F, 10, Stop, "Stop unused here"; E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Date_String, Start_Pos => 1, Length => 2), Item => Day_Num, Start_Pt => 1, Stop => Stop); --# end accept; SPARK_Calendar.Time_Of (Year_Num, Month_Num, Day_Num, Stamp, Time_Error); if not (Time_Error = SPARK_Calendar.Valid) then Error := True; Time_Stamp := 0; else --# accept F, 10, Stop, "Stop unused here"; E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Date_String, Start_Pos => 13, Length => 2), Item => Hours_Num, Start_Pt => 1, Stop => Stop); E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Date_String, Start_Pos => 16, Length => 2), Item => Minutes_Num, Start_Pt => 1, Stop => Stop); E_Strings.Get_Int_From_String (Source => E_Strings.Section (E_Str => Date_String, Start_Pos => 19, Length => 2), Item => Seconds_Num, Start_Pt => 1, Stop => Stop); --# end accept; Time_In_Seconds := (Seconds_Num + (Minutes_Num * 60)) + (Hours_Num * 3600); Date_Stamp := Stamp; Time_Stamp := Time_In_Seconds; Error := False; end if; end if; end if; --# accept F, 33, Stop, "Stop unused here" & --# F, 602, Date_Stamp, Date_Stamp, "Always well-defined when no error"; end Get_Date_And_Time; begin if E_Strings.Eq1_String (E_Str => Proof_Log_Date_Time, Str => "Unknown date") then -- If the /i option is specified then the absence of date is not a problem and we -- can assume that the file is valid. -- If the /i option is not specified then dates are assumed to be important and a -- file that lacks a date is therefore obsolete. Result := CommandLine.Data.IgnoreDates; else Get_Date_And_Time (Date_String => Proof_Log_Date_Time, Date_Stamp => PLG_Date_Stamp, Time_Stamp => PLG_Time_Stamp, Error => PLG_Error); Get_Date_And_Time (Date_String => SIV_File_Date_Time, Date_Stamp => SIV_Date_Stamp, Time_Stamp => SIV_Time_Stamp, Error => SIV_Error); if PLG_Error then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Date format error in PLG file", 0); Result := True; elsif SIV_Error then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Date format error in SIV file", 0); Result := True; elsif SPARK_Calendar.LT (SIV_Date_Stamp, PLG_Date_Stamp) then Result := False; elsif SPARK_Calendar.GT (SIV_Date_Stamp, PLG_Date_Stamp) then Result := True; elsif PLG_Time_Stamp < SIV_Time_Stamp then Result := True; else Result := False; end if; end if; PLG_Obsolete := Result; end Check_Proof_Log_Obsolescence; -------------------------------------------------------------------------- begin -- AnalyseProofLogFile -- open proof log file E_Strings.Open (File => Proof_Log_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => Open_Status); if Open_Status /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; Extract_Dates_And_Times_From_Proof_Log_File (Proof_Log_File => Proof_Log_File, Proof_Log_Date_Time => VC_Proof_Date_Time_From_PLG_File); -- If the SIV file has a Unknown date/time arising from /plain, -- then assume the proof log is obsolete, UNLESS we're running -- POGS with /i, so... if E_Strings.Eq1_String (E_Str => SIV_File_Date_Time, Str => Unknown_SIV_Date) then Proof_Log_Obsolete := True; else Check_Proof_Log_Obsolescence (Proof_Log_Date_Time => VC_Proof_Date_Time_From_PLG_File, SIV_File_Date_Time => SIV_File_Date_Time, PLG_Obsolete => Proof_Log_Obsolete); end if; if Proof_Log_Obsolete and not CommandLine.Data.IgnoreDates then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "*** Warning: Proof Log file out of date ***", 0); SPARK_IO.Put_String (Report_File, "SIV file time stamp: ", 0); E_Strings.Put_Line (File => Report_File, E_Str => SIV_File_Date_Time); SPARK_IO.Put_String (Report_File, "PLG file time stamp: ", 0); E_Strings.Put_Line (File => Report_File, E_Str => VC_Proof_Date_Time_From_PLG_File); Error_In_File := True; else SPARK_IO.New_Line (Report_File, 1); -- Only output dates if we are not ignoring them. if CommandLine.Data.IgnoreDates = False then SPARK_IO.Put_String (Report_File, "VCs proved ", 0); E_Strings.Put_Line (File => Report_File, E_Str => VC_Proof_Date_Time_From_PLG_File); end if; -- find first non blank line -- if we get to the end of the file first, flag a fatal error Read_Next_Non_Blank_Line (File => Proof_Log_File, Success => Read_Line_Success, File_Line => File_Line); if not Read_Line_Success then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* Proof Log file empty ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_File := True; else Error_In_File := False; Finished_With_File := False; -- process file line-by-line -- on entry to the loop there is already a valid line in the -- File_Line buffer while not Finished_With_File loop -- examine line and act accordingly if Is_VC_Proof_Success_Line (Line => File_Line) then Trimmed_Line := E_Strings.Trim (File_Line); Current_VC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 15, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 14); -- In a proof log file, it's possible that the "Proof -- Success Line" for a single VC can appear multiple times -- (if the user types "done" repeatedly after proving a VC for -- example), so we only mark the VC as proved here ONCE, -- to stop it being accounted for multiple times in the Totals. if VCHeap.Get_VC_State (Current_VC_Name) /= VCDetails.VC_Proved_By_Checker then VCHeap.Set_VC_State (Current_VC_Name, VCDetails.VC_Proved_By_Checker); end if; end if; if not Finished_With_File then -- read next line Read_Next_Non_Blank_Line (File => Proof_Log_File, Success => Read_Line_Success, File_Line => File_Line); -- if unsuccessful then check EOF -- and set Finished_With_File accordingly if not Read_Line_Success then if SPARK_IO.End_Of_File (Proof_Log_File) then Finished_With_File := True; else FatalErrors.Process (FatalErrors.Problem_Reading_File, E_Strings.Empty_String); end if; end if; end if; end loop; end if; end if; --# accept F, 10, Dummy_Close_Status, "Dummy_Close_Status unused here" & --# F, 10, Proof_Log_File, "Proof_Log_File unused here"; SPARK_IO.Close (Proof_Log_File, Dummy_Close_Status); --# end accept; --# accept F, 33, Dummy_Close_Status, "Dummy_Close_Status unused here"; end AnalyseProofLogFile; spark-2012.0.deb/pogs/vcs-analysesimplifiedvcfile.adb0000644000175000017500000003737411753202340021556 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse a .SIV file -- -- -- -------------------------------------------------------------------------------- separate (VCS) procedure AnalyseSimplifiedVCFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; VC_File_Date_Time : in E_Strings.T; SIV_File_Date_Time : out E_Strings.T; Error_In_SIV_File : out Boolean) is Bad_File_Format : Boolean := False; Dummy_Close_Status : SPARK_IO.File_Status; File_Line : E_Strings.T; Finished_With_File : Boolean; Open_Status : SPARK_IO.File_Status; Process_Success : Boolean; Read_Line_Success : Boolean; Simplified_VC_File : SPARK_IO.File_Type := SPARK_IO.Null_File; File_Status : File_Status_T; Simplification_Date_Time : E_Strings.T; VC_Generation_Date_Time_From_SIV_File : E_Strings.T; Trimmed_Line : E_Strings.T; Current_VC_Name : E_Strings.T; ------------------------------------------------------------------------- -- NOTE, this procedure also removes the comma inserted in the string -- by the simplifier -- procedure Extract_Dates_And_Times_From_Simplified_VC_File (Simplified_VC_File : in SPARK_IO.File_Type; VC_Generation_Date_Time : out E_Strings.T; Simplification_Date_Time : out E_Strings.T; File_Status : out File_Status_T) --# global in out SPARK_IO.File_Sys; --# derives File_Status, --# Simplification_Date_Time, --# SPARK_IO.File_Sys, --# VC_Generation_Date_Time from Simplified_VC_File, --# SPARK_IO.File_Sys; is File_Line : E_Strings.T; Trimmed_Line : E_Strings.T; Gen_Date_Time : E_Strings.T; Simp_Date_Time : E_Strings.T; Subprogram_Found : Boolean := False; begin File_Status := Not_Corrupt; VC_Generation_Date_Time := E_Strings.Empty_String; Simplification_Date_Time := E_Strings.Empty_String; --Check for completly empty file. E_Strings.Get_Line (File => Simplified_VC_File, E_Str => File_Line); if E_Strings.Is_Empty (E_Str => File_Line) and SPARK_IO.End_Of_File (Simplified_VC_File) then File_Status := Corrupt_Empty_File; else --Keep on reading from this file, until the desired information is retrieved --or the end of the fikle is reached. loop Trimmed_Line := E_Strings.Trim (File_Line); -- find date -- (There is an implicit assumption that the date, if present, will appear -- before the subprogram name.) -- When the Examiner is in plain_output mode, the DATE line doesn't appear. if E_Strings.Eq1_String (E_Str => E_Strings.Section (Trimmed_Line, 1, 7), Str => "CREATED") then -- extract the VC generation date and time from the string Gen_Date_Time := E_Strings.Section (Trimmed_Line, SIV_File_VC_Generation_Date_Start_Column, SIV_File_VC_Generation_Date_Length); E_Strings.Append_String (E_Str => Gen_Date_Time, Str => " "); E_Strings.Append_Examiner_String (E_Str1 => Gen_Date_Time, E_Str2 => E_Strings.Section (Trimmed_Line, SIV_File_VC_Generation_Time_Start_Column, SIV_File_VC_Generation_Time_Length)); VC_Generation_Date_Time := Gen_Date_Time; -- extract the simplification date and time from the string Simp_Date_Time := E_Strings.Section (Trimmed_Line, SIV_File_Simplification_Date_Start_Column, SIV_File_Simplification_Date_Length); E_Strings.Append_String (E_Str => Simp_Date_Time, Str => " "); E_Strings.Append_Examiner_String (E_Str1 => Simp_Date_Time, E_Str2 => E_Strings.Section (Trimmed_Line, SIV_File_Simplification_Time_Start_Column, SIV_File_Simplification_Time_Length)); Simplification_Date_Time := Simp_Date_Time; end if; Subprogram_Found := Is_Valid_Subprogram (Trimmed_Line); exit when (Subprogram_Found or SPARK_IO.End_Of_File (Simplified_VC_File)); E_Strings.Get_Line (File => Simplified_VC_File, E_Str => File_Line); end loop; end if; if (File_Status = Not_Corrupt) and not Subprogram_Found then File_Status := Corrupt_Unknown_Subprogram; end if; -- if date has not been found must be in plain output mode -- The above is a false assumption -- the file may just be corrupt. However, the -- effect below of setting the string as unknown date is reasonable for both cases. if E_Strings.Is_Empty (E_Str => VC_Generation_Date_Time) then E_Strings.Append_String (E_Str => VC_Generation_Date_Time, Str => Unknown_VCG_Date); E_Strings.Append_String (E_Str => Simplification_Date_Time, Str => Unknown_SIV_Date); end if; end Extract_Dates_And_Times_From_Simplified_VC_File; ------------------------------------------------------------------------- -- look at the next non-blank line to see whether it starts -- "*** true". If so the VC has been discharged. Otherwise, increment -- the counter of undischarged VCs, and set the flag that an undischarged -- VC has been found procedure Process_New_Simplified_VC_Line (Simplified_VC_File : in SPARK_IO.File_Type; VC_Name : in E_Strings.T; Success : out Boolean) --# global in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.State; --# derives FatalErrors.State, --# VCHeap.State from *, --# Simplified_VC_File, --# SPARK_IO.File_Sys, --# VCHeap.State, --# VC_Name & --# SPARK_IO.File_Sys, --# Success from Simplified_VC_File, --# SPARK_IO.File_Sys; is File_Line : E_Strings.T; Read_Line_Success : Boolean; begin Read_Next_Non_Blank_Line (File => Simplified_VC_File, Success => Read_Line_Success, File_Line => File_Line); if not Read_Line_Success then Success := False; else Success := True; if E_Strings.Eq1_String (E_Str => E_Strings.Section (File_Line, 1, 8), Str => "*** true") then if E_Strings.Eq1_String (E_Str => E_Strings.Section (File_Line, 15, 15), Str => "* contradiction") then VCHeap.Set_VC_State (VC_Name, VCDetails.VC_Proved_By_Contradiction); elsif E_Strings.Eq1_String (E_Str => E_Strings.Section (File_Line, 15, 14), Str => "* proved using") then VCHeap.Set_VC_State (VC_Name, VCDetails.VC_Proved_Using_User_Proof_Rules); elsif VCHeap.Get_VC_State (VC_Name) /= VCDetails.VC_Proved_By_Examiner then VCHeap.Set_VC_State (VC_Name, VCDetails.VC_Proved_By_Inference); end if; elsif VCHeap.Get_VC_State (VC_Name) /= VCDetails.VC_Proved_By_Examiner then -- The VC is undischarged if it has not been discharged by the -- Examiner. VCHeap.Set_VC_State (VC_Name, VCDetails.VC_Undischarged); end if; end if; end Process_New_Simplified_VC_Line; -------------------------------------------------------------------------- begin -- AnalyseSimplifiedVCFile -- open simplified VC file E_Strings.Open (File => Simplified_VC_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => Open_Status); if Open_Status /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; --No errors, until discover otherwise. Error_In_SIV_File := False; Extract_Dates_And_Times_From_Simplified_VC_File (Simplified_VC_File => Simplified_VC_File, VC_Generation_Date_Time => VC_Generation_Date_Time_From_SIV_File, Simplification_Date_Time => Simplification_Date_Time, File_Status => File_Status); case File_Status is when Not_Corrupt => null; when Corrupt_Empty_File => SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* SIV file corrupt: empty file ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_SIV_File := True; when Corrupt_Unknown_Subprogram => SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* SIV file corrupt: missing subprogram name ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_SIV_File := True; end case; --Only continue working on this file if an error has not been seen. --(Previously POGS would attempt to work with corrupt files. This feature has the -- capacity to produce confusing and wrong results.) if not (Error_In_SIV_File) then if CommandLine.Data.IgnoreDates or else E_Strings.Eq_String (E_Str1 => VC_Generation_Date_Time_From_SIV_File, E_Str2 => VC_File_Date_Time) then if not CommandLine.Data.IgnoreDates then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "VCs simplified ", 0); E_Strings.Put_Line (File => Report_File, E_Str => Simplification_Date_Time); end if; -- find first non blank line -- if we get to the end of the file first, flag a fatal error Read_Next_Non_Blank_Line (File => Simplified_VC_File, Success => Read_Line_Success, File_Line => File_Line); if not Read_Line_Success then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* SIV file corrupt: no data beyond header ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Bad_File_Format := True; else Finished_With_File := False; -- process file line-by-line -- on entry to the loop there is already a valid line in the -- File_Line buffer Current_VC_Name := E_Strings.Empty_String; while not Finished_With_File loop -- examine line and act accordingly if Is_New_Range_Line (Line => File_Line) then Append_Next_Line_From_File (Line => File_Line, File => Simplified_VC_File); elsif Is_New_VC_Line (Line => File_Line) then Trimmed_Line := E_Strings.Trim (File_Line); Current_VC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); Process_New_Simplified_VC_Line (Simplified_VC_File => Simplified_VC_File, VC_Name => Current_VC_Name, Success => Process_Success); if not Process_Success then SPARK_IO.Put_String (Report_File, "*** Warning: Bad format in simplified VC file ***", 0); Finished_With_File := True; Bad_File_Format := True; end if; end if; if not Finished_With_File then -- read next line and check if VC has been proved false Read_Next_Non_Blank_Line (File => Simplified_VC_File, Success => Read_Line_Success, File_Line => File_Line); if Is_Trivially_False_VC (Line => File_Line) then VCHeap.Set_VC_State (Current_VC_Name, VCDetails.VC_False); end if; -- if unsuccessful then check EOF -- and set Finished_With_File accordingly if not Read_Line_Success then if SPARK_IO.End_Of_File (Simplified_VC_File) then Finished_With_File := True; else FatalErrors.Process (FatalErrors.Problem_Reading_File, E_Strings.Empty_String); end if; end if; end if; end loop; end if; else -- SIV file is out of date SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "*** Warning: Simplified VC file out of date ***", 0); SPARK_IO.Put_String (Report_File, "VCs Generated: ", 0); E_Strings.Put_String (File => Report_File, E_Str => VC_File_Date_Time); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "SIV File Date: ", 0); E_Strings.Put_String (File => Report_File, E_Str => Simplification_Date_Time); SPARK_IO.New_Line (Report_File, 1); Bad_File_Format := True; end if; end if; --# accept F, 10, Dummy_Close_Status, "Dummy_Close_Status unused here" & --# F, 10, Simplified_VC_File, "Simplified_VC_File unused here"; SPARK_IO.Close (Simplified_VC_File, Dummy_Close_Status); --# end accept; SIV_File_Date_Time := Simplification_Date_Time; --Either an error being raised, or a 'Bad_File_Format' --being detected is an error. Error_In_SIV_File := Error_In_SIV_File or Bad_File_Format; --# accept F, 33, Dummy_Close_Status, "Dummy_Close_Status unused here"; end AnalyseSimplifiedVCFile; spark-2012.0.deb/pogs/spark_calendar.adb0000644000175000017500000000657411753202340017041 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body SPARK_Calendar is -- SPARK implementation of Calendar operations procedure Clock (Now : out SPARK_Calendar.Time) is begin Now := SPARK_Calendar.Time (Calendar.Clock); end Clock; procedure Year (Date : in SPARK_Calendar.Time; Result : out Year_Number; Status : out Error_Code) is begin Status := Valid; Result := Year_Number (Calendar.Year (Calendar.Time (Date))); exception when others => Status := Time_Error; end Year; function Month (Date : SPARK_Calendar.Time) return Month_Number is begin return Month_Number (Calendar.Month (Calendar.Time (Date))); end Month; function Day (Date : SPARK_Calendar.Time) return Day_Number is begin return Day_Number (Calendar.Day (Calendar.Time (Date))); end Day; procedure Split (Date : in SPARK_Calendar.Time; Year_P : out Year_Number; Month_P : out Month_Number; Day_P : out Day_Number; Status : out Error_Code) is Dummy_Seconds : Duration; begin Status := Valid; Calendar.Split (Calendar.Time (Date), Year_P, Month_P, Day_P, Dummy_Seconds); exception when others => Status := Time_Error; end Split; procedure Time_Of (Year_P : in Year_Number; Month_P : in Month_Number; Day_P : in Day_Number; Result : out SPARK_Calendar.Time; Status : out Error_Code) is Dummy_Seconds : Duration; begin Status := Valid; Dummy_Seconds := 0.0; Result := SPARK_Calendar.Time (Calendar.Time_Of (Calendar.Year_Number (Year_P), Calendar.Month_Number (Month_P), Calendar.Day_Number (Day_P), Dummy_Seconds)); exception when others => Status := Time_Error; end Time_Of; function LT (Left, Right : SPARK_Calendar.Time) return Boolean is begin return Left < Right; end LT; function LE (Left, Right : SPARK_Calendar.Time) return Boolean is begin return Left <= Right; end LE; function GT (Left, Right : SPARK_Calendar.Time) return Boolean is begin return Left > Right; end GT; function GE (Left, Right : SPARK_Calendar.Time) return Boolean is begin return Left >= Right; end GE; end SPARK_Calendar; spark-2012.0.deb/pogs/spark.sw0000644000175000017500000000020511753202340015074 0ustar eugeneugen-sparklib -output_directory=vcg -config_file=../common/gnat.cfg -listing_extension=ls_ -casing -index_file=pogs.idx -report=pogs.rep spark-2012.0.deb/pogs/pogs.adb0000644000175000017500000000577111753202340015036 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Main program for the Summary Tool -- -- -- -------------------------------------------------------------------------------- with Ada.Exceptions; with GNAT.Traceback.Symbolic; with POGS_Exceptions; with SPARK_IO; with TopPackage; with Version; procedure Pogs is begin TopPackage.Main_Procedure; exception when POGS_Exceptions.Usage_Error => SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line1, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line2, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line3, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line4, 0); when E : others => SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Unexpected internal error in POGS", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line1, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line2, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line3, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line4, 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Exception information:", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Ada.Exceptions.Exception_Information (E), 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Traceback:", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, GNAT.Traceback.Symbolic.Symbolic_Traceback (E), 0); end Pogs; spark-2012.0.deb/pogs/vcs-analyse_riposte_summary_file.adb0000644000175000017500000003551111753202340022627 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Synopsis: -- -- -- -- Procedure to analyse a .rsm file -- -- -- -------------------------------------------------------------------------------- separate (VCS) procedure Analyse_Riposte_Summary_File (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Error_In_RSM_File : out Boolean; File_Error : out E_Strings.T; Temp_Riposte_Error_File : in SPARK_IO.File_Type) is -- String and error conctants. Str_File_Corrupt : constant String := "Riposte summary file corrupt: Could not parse line"; Str_Cannot_Open : constant String := "Cannot open Riposte summary file"; type Riposte_Status_T is (Riposte_Error, Riposte_True, Riposte_True_Sortof, Riposte_False, Riposte_Undischarged); -- As defined in the Riposte User Manual type Riposte_CSV_Line_T is record Unit : E_Strings.T; Verdict : Riposte_Status_T; Verdict_Error : Boolean; Verdict_True : Boolean; Verdict_Sound : Boolean; Verdict_Complete : Boolean; Verdict_Affected_By_Undefinedness : Boolean; Verdict_Time : Natural; Verdict_Ground_Time : Integer; Verdict_Solved_Time : Integer; end record; Invalid_CSV_Line : constant Riposte_CSV_Line_T := Riposte_CSV_Line_T' (Unit => E_Strings.Empty_String, Verdict => Riposte_Error, Verdict_Error => True, Verdict_True => False, Verdict_Sound => True, Verdict_Complete => False, Verdict_Affected_By_Undefinedness => False, Verdict_Time => 0, Verdict_Ground_Time => -1, Verdict_Solved_Time => -1); File_Status : SPARK_IO.File_Status; Riposte_Summary_File : SPARK_IO.File_Type; Line_Read : E_Strings.T; Trimmed_Line : E_Strings.T; Success : Boolean; CSV_Line : Riposte_CSV_Line_T; Error_Flag_Mentioned : Boolean; function Unqote (E_Str : E_Strings.T) return E_Strings.T is L : E_Strings.Lengths; Retval : E_Strings.T; begin L := E_Strings.Get_Length (E_Str); if L >= 2 and then E_Strings.Get_Element (E_Str, E_Strings.Positions'First) = '"' and then E_Strings.Get_Element (E_Str, L) = '"' then Retval := E_Strings.Section (E_Str => E_Str, Start_Pos => E_Strings.Positions'First + 1, Length => L - 2); else Retval := E_Str; end if; return Retval; end Unqote; procedure Parse_CSV_String (CSV_Line : in E_Strings.T; CSV_Line_Position : in out E_Strings.Positions; The_String : out E_Strings.T; Ok : out Boolean; Expect_EOL : in Boolean) --# derives CSV_Line_Position, --# Ok, --# The_String from CSV_Line, --# CSV_Line_Position, --# Expect_EOL; --# pre CSV_Line_Position <= E_Strings.Get_Length (CSV_Line) + 1; --# post CSV_Line_Position <= E_Strings.Get_Length (CSV_Line) + 1; is Is_Quoted_String : Boolean; In_Quoted_String : Boolean; Comma_Found : Boolean; EOL_Found : Boolean; End_Position : E_Strings.Positions; Tmp_String : E_Strings.T; begin Is_Quoted_String := E_Strings.Get_Element (CSV_Line, CSV_Line_Position) = '"'; In_Quoted_String := Is_Quoted_String; Comma_Found := False; End_Position := CSV_Line_Position; for I in E_Strings.Positions range CSV_Line_Position .. E_Strings.Get_Length (CSV_Line) loop --# assert E_Strings.Get_Length (CSV_Line) <= E_Strings.Lengths'Last; End_Position := I; case E_Strings.Get_Element (CSV_Line, I) is when '"' => if In_Quoted_String and I > CSV_Line_Position then In_Quoted_String := False; end if; when ',' => if not In_Quoted_String then Comma_Found := True; end if; when others => null; end case; exit when Comma_Found; end loop; --# assert End_Position >= CSV_Line_Position and End_Position <= E_Strings.Get_Length (CSV_Line) + 1 --# and (Comma_Found -> (End_Position <= E_Strings.Get_Length (CSV_Line))); -- Work out if we hit the end of line. EOL_Found := not Comma_Found or else E_Strings.Get_Length (CSV_Line) = E_Strings.Lengths'Last; -- Make sure we found a comma if we were looking for one. Ok := Expect_EOL = EOL_Found; -- Make sure any quoted strings are OK. Ok := Ok and not In_Quoted_String; if Ok then Tmp_String := E_Strings.Section (E_Str => CSV_Line, Start_Pos => CSV_Line_Position, Length => End_Position - CSV_Line_Position); -- Strip away the quotes, if necessary. if Is_Quoted_String then The_String := Unqote (Tmp_String); else The_String := Tmp_String; end if; -- Jump over the comma. if not EOL_Found then CSV_Line_Position := End_Position + 1; end if; else The_String := E_Strings.Empty_String; end if; end Parse_CSV_String; function Parse_Natural (E_Str : E_Strings.T; Value_On_Error : Natural) return Natural is Tmp_Integer : Integer; The_Natural : Natural; Tmp_Stop : Integer; begin The_Natural := Value_On_Error; if E_Strings.Get_Length (E_Str) > 0 then --# accept F, 10, Tmp_Stop, "We don't care about Stop at the moment"; E_Strings.Get_Int_From_String (Source => E_Str, Item => Tmp_Integer, Start_Pt => E_Strings.Positions'First, Stop => Tmp_Stop); --# end accept; -- TODO: Check that Tmp_Stop = E_Strings.Get_Length (Tmp_String) ? if Tmp_Integer >= Natural'First then The_Natural := Natural'(Tmp_Integer); end if; end if; --# accept F, 33, Tmp_Stop, "We don't care about Stop at the moment"; return The_Natural; end Parse_Natural; function Parse_Riposte_Status (E_Str : E_Strings.T; Value_On_Error : Riposte_Status_T) return Riposte_Status_T is The_Riposte_Status : Riposte_Status_T; begin The_Riposte_Status := Value_On_Error; if E_Strings.Eq1_String (E_Str, "true") then The_Riposte_Status := Riposte_True; elsif E_Strings.Eq1_String (E_Str, "true_but_affected_by_undefinedness") then The_Riposte_Status := Riposte_True_Sortof; elsif E_Strings.Eq1_String (E_Str, "false") then The_Riposte_Status := Riposte_False; elsif E_Strings.Eq1_String (E_Str, "undischarged") then The_Riposte_Status := Riposte_Undischarged; elsif E_Strings.Eq1_String (E_Str, "error") then The_Riposte_Status := Riposte_Error; end if; return The_Riposte_Status; end Parse_Riposte_Status; procedure Parse_Riposte_CSV_Line (The_Line : in E_Strings.T; The_Record : out Riposte_CSV_Line_T; Ok : out Boolean) --# derives Ok, --# The_Record from The_Line; --# pre E_Strings.Get_Length (The_Line) >= 1; is Current_Position : E_Strings.Positions := E_Strings.Positions'First; Tmp : E_Strings.T; Num_CSV_Entries : constant Natural := 10; subtype CSV_Record_Index is Natural range 1 .. Num_CSV_Entries; begin The_Record := Invalid_CSV_Line; for I in CSV_Record_Index --# assert Current_Position <= E_Strings.Get_Length (The_Line) + 1; loop Parse_CSV_String (CSV_Line => The_Line, CSV_Line_Position => Current_Position, The_String => Tmp, Ok => Ok, Expect_EOL => (I = CSV_Record_Index'Last)); exit when not Ok; -- Each I will map to each number given in the Riposte user -- manual describing the "summary" proof artefact. case I is when 1 => The_Record.Unit := Tmp; when 2 => The_Record.Verdict := Parse_Riposte_Status (Tmp, Riposte_Error); when 3 => The_Record.Verdict_Error := Parse_Natural (Tmp, 1) /= 0; when 4 => The_Record.Verdict_True := Parse_Natural (Tmp, 0) /= 0; when 5 => The_Record.Verdict_Sound := Parse_Natural (Tmp, 1) /= 0; when 6 => The_Record.Verdict_Complete := Parse_Natural (Tmp, 0) /= 0; when 7 => The_Record.Verdict_Affected_By_Undefinedness := Parse_Natural (Tmp, 0) /= 0; when 8 => The_Record.Verdict_Time := Parse_Natural (Tmp, 0); when 9 => if E_Strings.Get_Length (Tmp) > 0 then The_Record.Verdict_Time := Parse_Natural (Tmp, 0); end if; when 10 => if E_Strings.Get_Length (Tmp) > 0 then The_Record.Verdict_Time := Parse_Natural (Tmp, 0); end if; end case; end loop; end Parse_Riposte_CSV_Line; begin -- Analyse_Riposte_Summary_File Error_In_RSM_File := False; File_Error := E_Strings.Empty_String; Riposte_Summary_File := SPARK_IO.Null_File; Error_Flag_Mentioned := False; -- open Riposte results file E_Strings.Open (File => Riposte_Summary_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => File_Status); if File_Status /= SPARK_IO.Ok then Error_In_RSM_File := True; File_Error := E_Strings.Copy_String (Str_Cannot_Open); FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; loop Read_Next_Non_Blank_Line (File => Riposte_Summary_File, Success => Success, File_Line => Line_Read); exit when not Success; --# assert Success; Trimmed_Line := E_Strings.Trim (Line_Read); Success := E_Strings.Get_Length (Trimmed_Line) >= 1; if Success then Parse_Riposte_CSV_Line (The_Line => E_Strings.Trim (Line_Read), The_Record => CSV_Line, Ok => Success); else CSV_Line := Invalid_CSV_Line; end if; if not Success then -- Notify stdout. SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* " & Str_File_Corrupt & " ************", 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "*** Offending line was: [", 0); E_Strings.Put_String (SPARK_IO.Standard_Output, Trimmed_Line); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "]", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); -- Also put somthing in the report file. SPARK_IO.Put_String (Report_File, "*** " & Str_File_Corrupt & " ***", 0); -- And finally set error flags. File_Error := E_Strings.Copy_String (Str_File_Corrupt); Error_In_RSM_File := True; end if; --# assert True; if not Success then null; else case CSV_Line.Verdict is when Riposte_True => VCHeap.Set_VC_State (CSV_Line.Unit, VCDetails.VC_Proved_By_Riposte); when Riposte_False => VCHeap.Set_VC_State (CSV_Line.Unit, VCDetails.VC_False); when Riposte_True_Sortof | Riposte_Undischarged => -- We don't do anything in this case. null; when Riposte_Error => -- This means riposte could not fully parse the VC or -- encountered some other kind of error. We will flag -- this up and include it in the final -- summary. However, we will only do this once. if not Error_Flag_Mentioned then E_Strings.Put_String (File => Temp_Riposte_Error_File, E_Str => PathFormatter.Format (Filename)); SPARK_IO.Put_Char (Temp_Riposte_Error_File, ' '); SPARK_IO.Put_Char (Temp_Riposte_Error_File, '('); SPARK_IO.Put_String (Temp_Riposte_Error_File, "Error returned by Riposte.", 0); SPARK_IO.Put_Line (Temp_Riposte_Error_File, ")", 0); Error_Flag_Mentioned := True; end if; end case; end if; end loop; --# accept F, 10, File_Status, "We don't care anymore since we've got everything we came for." & --# F, 10, Riposte_Summary_File, "Same as above."; SPARK_IO.Close (Riposte_Summary_File, File_Status); --# end accept; --# accept Flow, 601, FatalErrors.State, Temp_Riposte_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, VCHeap.State, Temp_Riposte_Error_File, "False coupling through SPARK_IO"; end Analyse_Riposte_Summary_File; spark-2012.0.deb/pogs/oscommandline.ads0000644000175000017500000000720211753202340016726 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Platform dependent package to read the command line given to the program -- --and return it as a single string -- -- -- -------------------------------------------------------------------------------- with E_Strings; --# inherit E_Strings, --# OSFiling; package OSCommandLine --# own State; --# initializes State; is type DataType is record Valid : Boolean; ReportFile : E_Strings.T; -- defaults to .sum StartDirectory : E_Strings.T; -- defaults to current directory AnalyseVCs : Boolean; AnalysePFs : Boolean; AnalyseProofLog : Boolean; VersionRequested : Boolean; -- -v on the command line IgnoreDates : Boolean; -- -i on the command line PlainOutput : Boolean; -- -p on the command line ShortSummary : Boolean; -- -s on the command line -- Defaults to ShortSummary = False, i.e., a full summary. OutputPercentUndischarged : Boolean; end record; DefaultDataType : constant DataType := DataType' (Valid => True, ReportFile => E_Strings.Empty_String, StartDirectory => E_Strings.Empty_String, AnalyseVCs => True, AnalysePFs => False, AnalyseProofLog => True, PlainOutput => False, VersionRequested => False, IgnoreDates => False, ShortSummary => False, OutputPercentUndischarged => True); -- Returns character that precedes command line options function SwitchCharacter return Character; procedure Read (Switches : out DataType); --# global in OSFiling.File_Structure; --# in State; --# derives Switches from OSFiling.File_Structure, --# State; -- Takes a file path and directory and returns a path consisting of the file relative to the directory. -- If the file is specified with an absolute path then that path is returned and InputDir is ignored. procedure Normalize_Pathname (InputFile : in E_Strings.T; InputDir : in E_Strings.T; ResultPath : out E_Strings.T); --# derives ResultPath from InputDir, --# InputFile; end OSCommandLine; spark-2012.0.deb/pogs/fileheap.ads0000644000175000017500000001137211753202340015656 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package combining Heap and FileDetails to give an ordered list of files -- -- -- --This is implemented as an abstract state machine. This is possible as there -- --is only a single occurrence of the FileHeap in the program. It is necessary -- --to prevent unacceptable overheads of creating local copies of the embedded -- --Heap and FileDetails types to circumvent the entire variable rule. It would -- --also be possible to implement Heap and FileDetails as ASMs but not -- --necessarily desirable as this would affect the high level annotations of the-- --program. -- -- -- -------------------------------------------------------------------------------- with E_Strings, FileDetails, Heap; use type Heap.Atom; --# inherit E_Strings, --# FatalErrors, --# FileDetails, --# Heap, --# HeapIndex; package FileHeap --# own State; is -- StartIndex is a point in the linked list at which to start the -- search. This is used to start insertion at the parent directory name -- If the file table is full, a fatal error is produced and Add does not -- return procedure Add (StartIndex : in Heap.Atom; NewName : in E_Strings.T; NewFileType : in FileDetails.FileTypes); --# global in out FatalErrors.State; --# in out State; --# derives FatalErrors.State, --# State from *, --# NewFileType, --# NewName, --# StartIndex, --# State; -------------------------------------------------------------------------- -- this procedure returns the file details for the specified entry in the -- linked list. Success if ListIndex points to a heap record which points to -- a valid set of file details procedure Details (ListIndex : in Heap.Atom; Success : out Boolean; Name : out E_Strings.T; FileType : out FileDetails.FileTypes; DirectoryIsResolved : out Boolean); --# global in State; --# derives DirectoryIsResolved, --# FileType, --# Name, --# Success from ListIndex, --# State; -------------------------------------------------------------------------- function FirstEntry return Heap.Atom; --# global in State; -------------------------------------------------------------------------- procedure Initialize (TopDirectory : in E_Strings.T); --# global out State; --# derives State from TopDirectory; -------------------------------------------------------------------------- procedure MarkDirectoryResolved (ListIndex : in Heap.Atom); --# global in out State; --# derives State from *, --# ListIndex; -------------------------------------------------------------------------- -- this procedure returns the 'NextOne' ordered element in FH after -- 'AfterThis'. It is successful if the NextOne is not a 'null' pointer procedure Next (AfterThis : in Heap.Atom; Success : out Boolean; NextOne : out Heap.Atom); --# global in State; --# derives NextOne, --# Success from AfterThis, --# State; end FileHeap; spark-2012.0.deb/pogs/findfiles.ads0000644000175000017500000000447411753202340016051 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package for reading the file structure on the disc. This package is -- --OS independent, and calls OSDirectory.Scan to perform the actual directory -- --scan. -- -- -- -- -- -------------------------------------------------------------------------------- with E_Strings; --# inherit E_Strings, --# FatalErrors, --# FileDetails, --# FileHeap, --# Heap, --# OSDirectory, --# OSFiling; package FindFiles is procedure Scan (StartDirectory : in E_Strings.T); --# global in OSFiling.File_Structure; --# in out FatalErrors.State; --# out FileHeap.State; --# derives FatalErrors.State from *, --# OSFiling.File_Structure, --# StartDirectory & --# FileHeap.State from OSFiling.File_Structure, --# StartDirectory; end FindFiles; spark-2012.0.deb/pogs/heapindex.ads0000644000175000017500000000215511753202340016045 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package HeapIndex is ListLength : constant Integer := 80000; type IndexType is range 0 .. ListLength; --# assert IndexType'Base is Integer; end HeapIndex; spark-2012.0.deb/pogs/heap.ads0000644000175000017500000000735611753202340015025 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with HeapIndex; --# inherit HeapIndex; package Heap --158 own var clauses removed is ListLength : constant Integer := 80000; --291, 293 (same as 1.2(c)) type Atom is range 0 .. ListLength; --# assert Atom'Base is Integer; --158 new abstract data type type HeapRecord is private; --158 new procedure procedure Initialize (TheHeap : out HeapRecord); --# derives TheHeap from ; --158 Heap added as a parameter and removed from globals list below procedure CreateAtom (TheHeap : in out HeapRecord; NewAtom : out Atom; Success : out Boolean); --# derives NewAtom, --# Success, --# TheHeap from TheHeap; procedure DisposeOfAtom (TheHeap : in out HeapRecord; OldAtom : in Atom); --# derives TheHeap from *, --# OldAtom; function APointer (TheHeap : HeapRecord; A : Atom) return Atom; function BPointer (TheHeap : HeapRecord; A : Atom) return Atom; function AValue (TheHeap : HeapRecord; A : Atom) return HeapIndex.IndexType; function BValue (TheHeap : HeapRecord; A : Atom) return HeapIndex.IndexType; procedure UpdateAPointer (TheHeap : in out HeapRecord; A : in Atom; Pointer : in Atom); --# derives TheHeap from *, --# A, --# Pointer; procedure UpdateBPointer (TheHeap : in out HeapRecord; A : in Atom; Pointer : in Atom); --# derives TheHeap from *, --# A, --# Pointer; procedure UpdateAValue (TheHeap : in out HeapRecord; A : in Atom; Value : in HeapIndex.IndexType); --# derives TheHeap from *, --# A, --# Value; procedure UpdateBValue (TheHeap : in out HeapRecord; A : in Atom; Value : in HeapIndex.IndexType); --# derives TheHeap from *, --# A, --# Value; --159 new function used in RefList function IsNullPointer (A : Atom) return Boolean; --158 private type AtomDescriptor is record ValueA, ValueB : HeapIndex.IndexType; PointerA, PointerB : Atom; end record; pragma Pack (AtomDescriptor); type ArrayOfAtoms is array (Atom) of AtomDescriptor; type HeapRecord is record ListOfAtoms : ArrayOfAtoms; HighMark, -- 160 NextFreeAtom : Atom; end record; end Heap; spark-2012.0.deb/pogs/banner-copyright.adb0000644000175000017500000000406211753202340017331 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Function to construct string announcing licensee of tool -- -- -- -------------------------------------------------------------------------------- separate (Banner) function Copyright (J : in Justification) return TypBannerLine is R : TypBannerLine; begin -- Display the copyright line if not in plain mode. -- Otherwise, display a blank line. if CommandLine.Data.PlainOutput then R := CreateBannerLine (FromText => "", WithJustification => J, FillChar => ' '); else R := CreateBannerLine (FromText => Version.Toolset_Copyright, WithJustification => J, FillChar => ' '); end if; return R; end Copyright; spark-2012.0.deb/pogs/filedetails.adb0000644000175000017500000001275311753202340016351 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package providing a structure in which to store file details, in the form -- --of a full pathname (without extension) and a type, either PlainFile or -- --directory. -- -- -- --To be used in tandem with the Heap data structure, hence the use of -- --Heap.Atom as the array range -- -------------------------------------------------------------------------------- with OSFiling; package body FileDetails is --------------------------------------------------------------------------- procedure Add (Details : in out DataType; Index : out HeapIndex.IndexType; Success : out Boolean; Name : in E_Strings.T; FileType : in FileTypes) is begin if Details.HighMark < HeapIndex.IndexType'Last then Success := True; Details.HighMark := Details.HighMark + 1; Index := Details.HighMark; Details.Details (Details.HighMark) := DetailsElement'(Name, FileType, False); else Success := False; Index := 0; end if; end Add; -------------------------------------------------------------------------- procedure Initialize (Details : out DataType) is begin --# accept F, 23, Details.Details, "Element-by-element array initialization" & --# F, 602, Details, Details.Details, "Element-by-element array initialization"; Details.HighMark := 0; for I in HeapIndex.IndexType loop Details.Details (I) := NullDetailsElement; end loop; end Initialize; ------------------------------------------------------------------------- procedure MarkDirectoryResolved (Details : in out DataType; Index : in HeapIndex.IndexType) is begin Details.Details (Index).DirectoryIsResolved := True; end MarkDirectoryResolved; ------------------------------------------------------------------------- procedure Order (FirstName : in E_Strings.T; FirstType : in FileTypes; SecondName : in E_Strings.T; SecondType : in FileTypes; Success : out Boolean; Result : out E_Strings.Order_Types) is NameOrder : E_Strings.Order_Types; begin -- Order -- check which name comes first NameOrder := OSFiling.Order (FirstName, SecondName); if FirstType = Invalid or SecondType = Invalid then Success := False; Result := E_Strings.Neither_First; else Success := True; -- if one comes first then return it if NameOrder /= E_Strings.Neither_First then Result := NameOrder; else -- otherwise if one is a file and the other is a directory, -- the file comes first if (FirstType = PlainFile and SecondType = Directory) then Result := E_Strings.First_One_First; elsif (FirstType = Directory and SecondType = PlainFile) then Result := E_Strings.Second_One_First; else -- otherwise neither comes first Result := E_Strings.Neither_First; end if; end if; end if; end Order; -------------------------------------------------------------------------- procedure Retrieve (Details : in DataType; Index : in HeapIndex.IndexType; Success : out Boolean; Name : out E_Strings.T; FileType : out FileTypes; DirectoryIsResolved : out Boolean) is begin if Index <= Details.HighMark and Index /= 0 then Success := True; Name := Details.Details (Index).Name; FileType := Details.Details (Index).FileType; DirectoryIsResolved := Details.Details (Index).DirectoryIsResolved; else Name := E_Strings.Empty_String; FileType := Invalid; DirectoryIsResolved := False; Success := False; end if; end Retrieve; end FileDetails; spark-2012.0.deb/pogs/vcs-analysevictorlogfile.adb0000644000175000017500000001314011753202340021071 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse a .vlg file -- -- -- -------------------------------------------------------------------------------- with Date_Time; separate (VCS) procedure AnalyseVictorLogFile (Filename : in E_Strings.T; VC_File_Date_Time : in E_Strings.T; SIV_File_Date_Time : in E_Strings.T; VLG_File_Date_Time : out E_Strings.T; Error_In_VLG_File : out Boolean; File_Error : out E_Strings.T) is -- String and error conctants. Str_File_Corrupt : constant String := "ViCToR log file corrupt: Could not parse timestamp"; Str_Cannot_Open : constant String := "Cannot open ViCToR log file"; Str_VCG_Is_Newer : constant String := "VCG file is newer than the VCT file"; Str_SIV_Is_Newer : constant String := "SIV file is newer than the VCT file"; Str_Bad_Timestamps : constant String := "Malformed timestamps"; Timestamp_Mark : constant String := "Date: "; File_Status : SPARK_IO.File_Status; Victor_Log_File : SPARK_IO.File_Type; Line_Read : E_Strings.T; begin -- AnalyseVictorLogFile Error_In_VLG_File := False; File_Error := E_Strings.Empty_String; Victor_Log_File := SPARK_IO.Null_File; -- Open the ViCToR log file. E_Strings.Open (File => Victor_Log_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => File_Status); if File_Status /= SPARK_IO.Ok then Error_In_VLG_File := True; File_Error := E_Strings.Copy_String (Str_Cannot_Open); FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; -- The timestamp is on the first line. E_Strings.Get_Line (File => Victor_Log_File, E_Str => Line_Read); Line_Read := E_Strings.Trim (Line_Read); --# assert True; -- We have three options: A timestamp, no timestamp or a corrupt file if E_Strings.Starts_With (Line_Read, Timestamp_Mark) and then E_Strings.Get_Length (Line_Read) >= Timestamp_Mark'Length + 1 then VLG_File_Date_Time := E_Strings.Section (E_Str => Line_Read, Start_Pos => Timestamp_Mark'Length + 1, Length => E_Strings.Get_Length (Line_Read) - Timestamp_Mark'Length); --# assert True; -- Since we've got a timestamp, we should compare it with the -- other timestamps, if we can. if not (E_Strings.Is_Empty (VC_File_Date_Time) or E_Strings.Starts_With (VC_File_Date_Time, "Unknown")) then case Date_Time.Compare_Timestamps (VC_File_Date_Time, VLG_File_Date_Time) is when Date_Time.A_Less_Than_B | Date_Time.A_Equals_B => null; when Date_Time.A_Greater_Than_B => Error_In_VLG_File := True; File_Error := E_Strings.Copy_String (Str_VCG_Is_Newer); when Date_Time.Malformed_Timestamps => Error_In_VLG_File := True; File_Error := E_Strings.Copy_String (Str_Bad_Timestamps); end case; end if; --# assert True; if not (E_Strings.Is_Empty (SIV_File_Date_Time) or E_Strings.Starts_With (SIV_File_Date_Time, "Unknown")) then case Date_Time.Compare_Timestamps (SIV_File_Date_Time, VLG_File_Date_Time) is when Date_Time.A_Less_Than_B | Date_Time.A_Equals_B => null; when Date_Time.A_Greater_Than_B => Error_In_VLG_File := True; File_Error := E_Strings.Copy_String (Str_SIV_Is_Newer); when Date_Time.Malformed_Timestamps => Error_In_VLG_File := True; File_Error := E_Strings.Copy_String (Str_Bad_Timestamps); end case; end if; elsif E_Strings.Is_Empty (Line_Read) then VLG_File_Date_Time := E_Strings.Copy_String (Unknown_VCT_Date); else VLG_File_Date_Time := E_Strings.Empty_String; Error_In_VLG_File := True; File_Error := E_Strings.Copy_String (Str_File_Corrupt); end if; --# assert True; -- Finally, close the ViCToR log file. --# accept F, 10, File_Status, "We don't care anymore since we've got everything we came for." & --# F, 10, Victor_Log_File, "Same as above."; SPARK_IO.Close (Victor_Log_File, File_Status); --# end accept; end AnalyseVictorLogFile; spark-2012.0.deb/pogs/fatalerrors.adb0000644000175000017500000001062311753202340016402 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Output 'fatal' error messages -- --Note that the Process procedure does not return to point of call. Instead -- --it raises an exception which is trapped by the main program, which causes -- --the program to stop politely. -- -- -- -- -- -------------------------------------------------------------------------------- with CommandLine; with POGS_Exceptions; with SPARK_IO; package body FatalErrors is --# hide FatalErrors; -- hidden to mask the use of exceptions and the non-initialization of -- FatalErrors.State procedure Process (Error : in Error_Type; Message : in E_Strings.T) is T : E_Strings.T; begin case Error is when Could_Not_Open_Input_File => T := E_Strings.Copy_String (Str => "Could not open expected input file " & "- unexpected file system error"); when Could_Not_Create_Report_File => E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Strings.Copy_String (Str => "Could not create report file ")); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Message); T := E_Strings.Copy_String (Str => "Check you have write permission for directory"); when Data_Structure_Inconsistency => T := E_Strings.Copy_String (Str => "Internal file table inconsistent."); when VC_Data_Structure_Inconsistency => T := E_Strings.Copy_String (Str => "Internal VC table inconsistent."); when Expected_Directory_Missing => E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Strings.Copy_String (Str => "Directory ")); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Message); T := E_Strings.Copy_String (Str => " not found or not readable."); when File_Heap_Full => T := E_Strings.Copy_String (Str => "Internal file table too small."); when VC_Heap_Full => T := E_Strings.Copy_String (Str => "Internal VC table too small."); when Invalid_Command_Line => T := E_Strings.Copy_String (Str => "Usage: "); E_Strings.Append_Examiner_String (E_Str1 => T, E_Str2 => CommandLine.Usage_String); when Problem_Reading_File => T := E_Strings.Copy_String (Str => "Could not read from input file " & "- unexpected file system error"); when Problem_Creating_Temp_File => T := E_Strings.Copy_String (Str => "Could not create temporary file."); when Subprogram_Totals_Inconsistent => T := E_Strings.Copy_String (Str => "Overall subprogram summary counts inconsistent."); end case; E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => T); raise POGS_Exceptions.Usage_Error; end Process; end FatalErrors; spark-2012.0.deb/pogs/pathformatter.adb0000644000175000017500000000575711753202340016752 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --To provide a standardised file name format which is platform independent -- --when the -plain option is chosen. -- -- -- -------------------------------------------------------------------------------- with CommandLine; package body PathFormatter is function Format (RawFileName : E_Strings.T) return E_Strings.T is WorkingDir : E_Strings.T; Result : E_Strings.T; begin if CommandLine.Data.PlainOutput and then E_Strings.Get_Length (CommandLine.Data.StartDirectory) < E_Strings.Get_Length (RawFileName) then -- strip the working directory off the start of VCFileName WorkingDir := CommandLine.Data.StartDirectory; Result := E_Strings.Section (E_Str => RawFileName, Start_Pos => E_Strings.Get_Length (E_Str => WorkingDir) + 1, Length => E_Strings.Get_Length (E_Str => RawFileName) - E_Strings.Get_Length (E_Str => WorkingDir)); -- Convert back slash directory separators to forward slashes Result := E_Strings.Translate (E_Str => Result, From_Char => '\', To_Char => '/'); -- Finally, strip a leading slash if present if E_Strings.Get_Length (Result) >= 1 and then E_Strings.Get_Element (Result, E_Strings.Positions'First) = '/' then Result := E_Strings.Section (E_Str => Result, Start_Pos => E_Strings.Positions'First + 1, Length => E_Strings.Get_Length (Result) - 1); end if; else Result := RawFileName; end if; return Result; end Format; end PathFormatter; spark-2012.0.deb/pogs/osdirectory.adb0000644000175000017500000001226111753202340016424 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Latin_1; with E_Strings; with E_Strings.Not_SPARK; with FatalErrors; with FileDetails; with FileHeap; with GNAT.Directory_Operations; with OSFiling; package body OSDirectory is ------------------------------------------------------------------------ -- this procedure reads the named directory using the C calls -- opendir(), readdir() and closedir() -- for each entry, it calls OSFiling.IsDirectory and then enters it in the -- linked list ------------------------------------------------------------------------ procedure Scan (ListIndex : in Heap.Atom) is --# hide Scan; DetailsSuccess : Boolean; FileType : FileDetails.FileTypes; DirectoryIsResolved : Boolean; TempName : String (1 .. 1024); BaseName, DirName, Filename : E_Strings.T; Dir : GNAT.Directory_Operations.Dir_Type; Last : Natural; -- Convert a regular string into an Examiner string. You may -- be tempted to refactor this using E_Strings.Copy_String, but -- note that we are given a fixed length string which is padded -- with NUL so it won't work. function Create_Examiner_String (Str : in String) return E_Strings.T is E_Str : E_Strings.T := E_Strings.Empty_String; Pos : E_Strings.Positions := 1; begin while Str (Pos) /= Ada.Characters.Latin_1.NUL loop E_Strings.Append_Char (E_Str => E_Str, Ch => Str (Pos)); Pos := Pos + 1; end loop; return E_Str; end Create_Examiner_String; -------------------------------------------------------------------------- begin -- ScanDirectory -- retrieve directory details FileHeap.Details (ListIndex, DetailsSuccess, DirName, FileType, DirectoryIsResolved); if not DetailsSuccess then FatalErrors.Process (FatalErrors.Data_Structure_Inconsistency, E_Strings.Empty_String); end if; -- block to trap exceptions from Open begin -- Read from DirName GNAT.Directory_Operations.Open (Dir => Dir, Dir_Name => E_Strings.Not_SPARK.Get_String (E_Str => DirName)); exception when others => -- note: this call will NOT return FatalErrors.Process (FatalErrors.Expected_Directory_Missing, DirName); end; -- Now repeatedly read a file from the directory loop -- read the file TempName := (others => Ada.Characters.Latin_1.NUL); GNAT.Directory_Operations.Read (Dir, TempName, Last); if Last = 0 then exit; end if; -- Ignore dot files; hidden Unix ones, and ., .. if TempName (1) /= '.' then -- Create a proper name BaseName := Create_Examiner_String (Str => TempName); Filename := OSFiling.Down_Directory (Path => DirName, Sub_Directory => BaseName); -- Breadth-first search; add any directory to our -- to-do list if OSFiling.Is_Directory (Name => Filename) then FileHeap.Add (ListIndex, Filename, FileDetails.Directory); else -- It's a vanilla file; get its full name Filename := OSFiling.Full_Filename (Path => DirName, Filename => BaseName); -- Check if it may be relevant. We need to do this -- here to guard against JC17-010 on -- Windows. Otherwise we may look at something like -- FOO.rep which would result in a duplicate analysis -- of foo.vcg. if OSFiling.Is_Relevant_File (Filename) then -- Remove the extension OSFiling.Remove_File_Extension (Filename); FileHeap.Add (ListIndex, Filename, FileDetails.PlainFile); end if; end if; -- IsDirectory end if; -- TempName(1) /= '.' end loop; -- infinite GNAT.Directory_Operations.Close (Dir); end Scan; end OSDirectory; spark-2012.0.deb/pogs/pogs_exceptions.ads0000644000175000017500000000245111753202340017310 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- User-defined exceptions required by POGS. -- Currently, just a single exception. This is raised by the FatalErrors package -- and indicates to the top-level exception handler that this error relates to -- invalid user input rather than being an unexpected internal error. package POGS_Exceptions is Usage_Error : exception; end POGS_Exceptions; spark-2012.0.deb/pogs/total.adb0000644000175000017500000023416011753202340015205 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package providing data structure to store running totals, and a procedure -- --to print them. -- -- -- -------------------------------------------------------------------------------- with Banner; with CommandLine; with E_Strings; with FatalErrors; with Heap; with VCHeap; with VCDetails; use type VCDetails.VC_State_T; use type VCDetails.DPC_State_T; package body Total --# own State is The_Totals; is type VC_Counter is array (VCDetails.Terminal_Point_Type) of Natural; type Error_Type_Counter is array (VCDetails.Error_Type) of Natural; Null_VC_Counter : constant VC_Counter := VC_Counter'(others => 0); -- if adding more entries to this type, you must alter the procedure -- Output below to display the results, and add code in the appropriate -- analysis routines to increment the counter(s). You must also alter -- the aggregate in Initialize below to initialize the new counter type Total_Type is record -- Subprograms without errors (all subprograms have vcs). Subprograms_With_VCs : Natural; -- Subprograms with errors. Subprograms_Where_Error : Error_Type_Counter; Subprograms_Where_VC_Analysis_Abandoned : Natural; Subprograms_Where_DPC_Analysis_Abandoned : Natural; -- A subprogram with At Least One (ALO) Undischarged VC. Subprograms_With_ALO_Undischarged_VC : Natural; -- The following fields record the number of subprograms, -- which are not necessarily fully proved, but have at least -- one VC proved by each of the following strategies: -- A subprogram with At Least One (ALO) VC proved by -- the Examiner. Subprograms_With_ALO_Examiner_VC : Natural; -- A subprogram with At Least One (ALO) VC proved by the -- simplifier without a user defined rule. Subprograms_With_ALO_Simplifier_VC : Natural; -- A subprogram with At Least One (ALO) VC proved using -- a using proof by contradiction. Subprograms_With_ALO_Contradiction_VC : Natural; -- A subprogram with At Least One (ALO) VC proved using -- a user defined rule. Subprograms_With_ALO_User_Rule_VC : Natural; -- A subprogram with At Least One (ALO) VC proved by Victor Subprograms_With_ALO_Victor_VC : Natural; -- A subprogram with At Least One (ALO) VC proved by Riposte Subprograms_With_ALO_Riposte_VC : Natural; -- A subprogram with At Least One (ALO) VC proved using -- the Checker. Subprograms_With_ALO_Checker_VC : Natural; -- A subprogram with At Least One (ALO) VC discharged by review Subprograms_With_ALO_Review_VC : Natural; -- A subprogram with At Least One (ALO) false VC. Subprograms_With_ALO_False_VC : Natural; -- The following fields represent the use of a hierachy of -- proof strategies: -- Examiner -> Simplifier -> User Rules -> ViCToR -> Checker -> Review -- When a subprogram is proved, the strategy latest in the hierarchy -- is considered to have been used to complete the proof, even -- if earlier strategies have also been applied. -- The hierachy gives a measure of the extent of the strategies -- required in proving a subprogram. -- The definitions of the hierarchical strategies is given below: -- A subprogram proof is completed by the examiner if: -- o at LEAST one VC was proved by the examiner and -- o ZERO VCs were proved by the simplifier, -- Victor, Riposte, the checker, review file or left Undischarged and -- o the subprogram has no false VCs. Subprograms_Proved_By_Examiner : Natural; -- A subprogram proof is completed by the simplifier if: -- o at LEAST one VC was proved by the simplifier and -- o ZERO VCs were proved by Victor, Riposte,, the checker, review file -- or left Undischarged and -- o the subprogram has no false VCs. Subprograms_Proved_By_Simplifier : Natural; -- A subprogram proof is completed by a user defined proof rule if: -- o at least one VC was proved by the simplifier and -- o at least one user defined rule has been used in the proof of a VC and -- o ZERO VCs were proved by Victor, Riposte, the checker, review file -- or left Undischarged and -- o the subprogram has no false VCs. Subprograms_Proved_With_User_Proof_Rule : Natural; -- A subprogram proof is completed by ViCToR if: -- o at least one VC was proved by ViCToR and -- o ZERO VCs were proved by the Riposte, checker, review file, -- or left Undischarged and -- o the subprogram has no false VCs. Subprograms_Proved_By_Victor : Natural; -- A subprogram proof is completed by Riposte if: -- o at least one VC was proved by Riposte and -- o ZERO VCs were proved by the checker, review file -- or left Undischarged and -- o the subprogram has no false VCs. Subprograms_Proved_By_Riposte : Natural; -- A subprogram proof is completed by the checker if: -- o at LEAST one VC was proved by the checker and -- o ZERO VCs were proved by the review file or -- left Undischarged and -- o the subprogram has no false VCs. Subprograms_Proved_By_Checker : Natural; -- A subprogram proof is completed by review if: -- o at LEAST one VC was proved in the review file and -- o ZERO VCs were left Undischarged and -- o the subprogram has no false VCs. Subprograms_Proved_By_Review : Natural; -- The following fields record the number of VCs proved by -- each strategy grouped by the sort of origin (the terminal point) -- of the VC. VCs_Total : VC_Counter; VCs_Proved_By_Examiner : VC_Counter; VCs_Proved_By_Simplifier : VC_Counter; VCs_Proved_With_User_Proof_Rule : VC_Counter; VCs_Proved_By_Victor : VC_Counter; VCs_Proved_By_Riposte : VC_Counter; VCs_Proved_By_Checker : VC_Counter; VCs_Proved_By_Review : VC_Counter; VCs_Proved_False : VC_Counter; VCs_Undischarged : VC_Counter; -- Record number of subprograms with DPCs Subprograms_With_DPCs : Natural; -- The following fields record the number of dead paths ZombieScope -- has found. Number_Of_Dead_Paths : Natural; Subprograms_With_Dead_Paths : Natural; end record; The_Totals : Total_Type; function Sum (T : in VC_Counter) return Natural is Result : Natural := 0; begin for I in VCDetails.Terminal_Point_Type loop Result := Result + T (I); end loop; return Result; end Sum; procedure Calculate_Percentages (The_Totals : in Total_Type; Percent_Undischarged_Str : out E_Strings.T; Percent_Proved_By_Examiner_Str : out E_Strings.T; Percent_Proved_By_Victor_Str : out E_Strings.T; Percent_Proved_By_Riposte_Str : out E_Strings.T; Percent_Proved_By_Checker_Str : out E_Strings.T; Percent_Proved_By_Review_Str : out E_Strings.T; Percent_Simplified_Str : out E_Strings.T; Percent_With_User_Rule_Str : out E_Strings.T; Percent_Proved_False_Str : out E_Strings.T) --# derives Percent_Proved_By_Checker_Str, --# Percent_Proved_By_Examiner_Str, --# Percent_Proved_By_Review_Str, --# Percent_Proved_By_Riposte_Str, --# Percent_Proved_By_Victor_Str, --# Percent_Proved_False_Str, --# Percent_Simplified_Str, --# Percent_Undischarged_Str, --# Percent_With_User_Rule_Str from The_Totals; --# pre Sum (The_Totals.VCs_Total) /= 0; is subtype Percentage is Natural range 0 .. 100; VCs_Total : Natural; procedure Total_And_Value_To_Percentage (Overall_Total : in Natural; Value : in Natural; Percent : out Percentage; Percent_C : out Character) --# derives Percent, --# Percent_C from Overall_Total, --# Value; --# pre (Overall_Total/=0); is Percise_Percent_Value : Float; Rounded_Percent_Value : Percentage; begin Percise_Percent_Value := (Float (Value) * 100.0) / Float (Overall_Total); Rounded_Percent_Value := Percentage (Percise_Percent_Value); case Rounded_Percent_Value is -- If the rounded percentage value is zero, but the actual -- value is non-zero, then the rounded value is forced to be 1. -- This behaviour ensures that a zero percentage really means -- zero. when 0 => if (Value /= 0) then Percent := 1; -- If the actual percent is less than 0.5 then this is -- indicated with an appropriate leading character. if (Percise_Percent_Value < 0.5) then Percent_C := '<'; else Percent_C := ' '; end if; else Percent := 0; Percent_C := ' '; end if; -- If the rounded percentage value is 100, but the actual value -- is not equal to the total, then the rounded value is forced -- to be 99. This behaviour ensures that a hundred percent -- really means all. when 100 => if (Value /= Overall_Total) then Percent := 99; -- If the actual percent is greater than 99.5 then this is -- indicated with an appropriate leading character. if (Percise_Percent_Value > 99.5) then Percent_C := '>'; else Percent_C := ' '; end if; else Percent := 100; Percent_C := ' '; end if; -- In all other cases, accept the rounding approximation. when 1 .. 99 => Percent := Rounded_Percent_Value; Percent_C := ' '; end case; end Total_And_Value_To_Percentage; procedure Generate_Percent_String (Percent : in Percentage; Percent_C : in Character; Percent_Str : out E_Strings.T) --# derives Percent_Str from Percent, --# Percent_C; is Percent_Part : E_Strings.T; begin --Initialise to empty string. Percent_Str := E_Strings.Empty_String; --For alingment: if percent is one digit, add two spaces. -- : if percent is two digits, add one spaces. case Percent is when 0 .. 9 => E_Strings.Append_String (E_Str => Percent_Str, Str => " "); when 10 .. 99 => E_Strings.Append_String (E_Str => Percent_Str, Str => " "); when 100 => null; end case; --Append the: '>','<',' ' prefix. (max length "X": 1) E_Strings.Append_Char (E_Str => Percent_Str, Ch => Percent_C); --Append the: percent number. (max length "XYYY": 4) E_Strings.Put_Int_To_String (Dest => Percent_Part, Item => Percent, Start_Pt => 1, Base => 10); E_Strings.Append_Examiner_String (E_Str1 => Percent_Str, E_Str2 => E_Strings.Trim (E_Str => Percent_Part)); --Append the: symbol '%'. (max length "XYYY%": 5) E_Strings.Append_Char (E_Str => Percent_Str, Ch => '%'); end Generate_Percent_String; function Make_Percent_String (N_Actual, N_Total : in Natural) return E_Strings.T --# pre N_Total /= 0; is Percent : Percentage; Size_Char : Character; -- For the ordering relation character in >99% or <1% Retval : E_Strings.T; begin Total_And_Value_To_Percentage (Overall_Total => N_Total, Value => N_Actual, Percent => Percent, Percent_C => Size_Char); Generate_Percent_String (Percent => Percent, Percent_C => Size_Char, Percent_Str => Retval); return Retval; end Make_Percent_String; begin VCs_Total := Sum (The_Totals.VCs_Total); Percent_Undischarged_Str := Make_Percent_String (Sum (The_Totals.VCs_Undischarged), VCs_Total); Percent_Proved_By_Examiner_Str := Make_Percent_String (Sum (The_Totals.VCs_Proved_By_Examiner), VCs_Total); Percent_Proved_By_Victor_Str := Make_Percent_String (Sum (The_Totals.VCs_Proved_By_Victor), VCs_Total); Percent_Proved_By_Riposte_Str := Make_Percent_String (Sum (The_Totals.VCs_Proved_By_Riposte), VCs_Total); Percent_Proved_By_Checker_Str := Make_Percent_String (Sum (The_Totals.VCs_Proved_By_Checker), VCs_Total); Percent_Proved_By_Review_Str := Make_Percent_String (Sum (The_Totals.VCs_Proved_By_Review), VCs_Total); Percent_Simplified_Str := Make_Percent_String (Sum (The_Totals.VCs_Proved_By_Simplifier), VCs_Total); Percent_With_User_Rule_Str := Make_Percent_String (Sum (The_Totals.VCs_Proved_With_User_Proof_Rule), VCs_Total); Percent_Proved_False_Str := Make_Percent_String (Sum (The_Totals.VCs_Proved_False), VCs_Total); end Calculate_Percentages; -- Never returns from this subprogram. -- Null dependency relation used to avoid propagation -- of FatalErrors.State impacting existing clients of Total. -- FatalErrors.State is of little interest in this context. procedure Fatal_Error (Error : in FatalErrors.Error_Type) --# derives null from Error; is --# hide Fatal_Error; begin FatalErrors.Process (Error, E_Strings.Empty_String); end Fatal_Error; function Totals_Are_Balanced return Boolean --# global in The_Totals; is Total_Subprograms_Proved : Natural; Total_Subprograms_At_Least_One_False_VC : Natural; Total_Subprograms_No_False_At_Least_One_Undischarged : Natural; Total_Subprograms : Natural; begin -- Total all of the subprogram that have been fully proved. Total_Subprograms_Proved := ((((((The_Totals.Subprograms_Proved_By_Examiner + The_Totals.Subprograms_Proved_By_Simplifier) + The_Totals.Subprograms_Proved_With_User_Proof_Rule) + The_Totals.Subprograms_Proved_By_Checker) + The_Totals.Subprograms_Proved_By_Review) + The_Totals.Subprograms_Proved_By_Victor) + The_Totals.Subprograms_Proved_By_Riposte); -- Total all of the subprogram that have at least one false vc. Total_Subprograms_At_Least_One_False_VC := The_Totals.Subprograms_With_ALO_False_VC; -- Total all of the subprogram that have no false vcs, and at least one -- undischarged vc. Total_Subprograms_No_False_At_Least_One_Undischarged := The_Totals.Subprograms_With_ALO_Undischarged_VC; -- Total all of the subprograms. Total_Subprograms := ((Total_Subprograms_Proved + Total_Subprograms_At_Least_One_False_VC) + Total_Subprograms_No_False_At_Least_One_Undischarged); -- Return true if the total matches the recorded total number of Subprograms -- with VCs and false otherwise. return (Total_Subprograms = The_Totals.Subprograms_With_VCs); end Totals_Are_Balanced; procedure Update_Totals (VCG : in Boolean; DPC : in Boolean) --# global in VCHeap.State; --# in out The_Totals; --# derives The_Totals from *, --# DPC, --# VCG, --# VCHeap.State; is Subprogram_Is_Undischarged : Boolean; Subprogram_Has_VC_Proved_By_Examiner : Boolean; Subprogram_Has_VC_Proved_By_Simplifier : Boolean; Subprogram_Has_VC_Proved_By_Contradiction : Boolean; Subprogram_Has_VC_Proved_With_User_Proof_Rule : Boolean; Subprogram_Has_VC_Proved_By_Victor : Boolean; Subprogram_Has_VC_Proved_By_Riposte : Boolean; Subprogram_Has_VC_Proved_By_Checker : Boolean; Subprogram_Has_VC_Proved_By_Review : Boolean; Subprogram_Has_VC_Proved_False : Boolean; Subprogram_Contains_Dead_Paths : Boolean; More_VCs : Boolean; Heap_Index : Heap.Atom; Next_Index : Heap.Atom; Unused_VC_Name : E_Strings.T; Unused_Path_Start : E_Strings.T; Unused_Path_End : E_Strings.T; End_Type : VCDetails.Terminal_Point_Type; VC_State : VCDetails.VC_State_T; DPC_State : VCDetails.DPC_State_T; -- Return true if there was an error analysing any of the .vcg -- derived files (i.e. .siv, .vct, etc.). Note that corrupted -- files related to ZombieScope are handeled separately. function Something_Is_Corrupt_In_VC return Boolean --# global in VCHeap.State; is Tmp : Boolean; begin for X in VCDetails.Error_Type_Corrupt_VC_Files loop Tmp := VCHeap.Error_Raised (Error_Kind => X); exit when Tmp; end loop; return Tmp; end Something_Is_Corrupt_In_VC; -- Return true if there was an error analysing any of the .dpc -- derived files (i.e. .sdp, etc.). function Something_Is_Corrupt_In_DPC return Boolean --# global in VCHeap.State; is Tmp : Boolean; begin for X in VCDetails.Error_Type_Corrupt_DPC_Files loop Tmp := VCHeap.Error_Raised (Error_Kind => X); exit when Tmp; end loop; return Tmp; end Something_Is_Corrupt_In_DPC; begin -- Initialisation. Subprogram_Is_Undischarged := False; Subprogram_Has_VC_Proved_By_Examiner := False; Subprogram_Has_VC_Proved_By_Simplifier := False; Subprogram_Has_VC_Proved_By_Contradiction := False; Subprogram_Has_VC_Proved_With_User_Proof_Rule := False; Subprogram_Has_VC_Proved_By_Victor := False; Subprogram_Has_VC_Proved_By_Riposte := False; Subprogram_Has_VC_Proved_By_Checker := False; Subprogram_Has_VC_Proved_By_Review := False; Subprogram_Has_VC_Proved_False := False; Subprogram_Contains_Dead_Paths := False; -- Keep count of all the errors. for X in VCDetails.Error_Type loop if VCHeap.Error_Raised (Error_Kind => X) then The_Totals.Subprograms_Where_Error (X) := The_Totals.Subprograms_Where_Error (X) + 1; end if; end loop; --# assert True; -- Only perform detailed analysis of subprogram if it did not -- have an associated corrupt file. if Something_Is_Corrupt_In_VC then The_Totals.Subprograms_Where_VC_Analysis_Abandoned := The_Totals.Subprograms_Where_VC_Analysis_Abandoned + 1; end if; if Something_Is_Corrupt_In_DPC then The_Totals.Subprograms_Where_DPC_Analysis_Abandoned := The_Totals.Subprograms_Where_DPC_Analysis_Abandoned + 1; end if; --# assert True; -- We count VCs only if the .vcg file is not corrupt. If -- something else is corrupt we don't care here. if VCG and then not VCHeap.Error_Raised (Error_Kind => VCDetails.Corrupt_VCG_File) then The_Totals.Subprograms_With_VCs := The_Totals.Subprograms_With_VCs + 1; end if; if DPC and then not VCHeap.Error_Raised (Error_Kind => VCDetails.Corrupt_DPC_File) then The_Totals.Subprograms_With_DPCs := The_Totals.Subprograms_With_DPCs + 1; end if; --# assert True; More_VCs := True; Heap_Index := VCHeap.First_Entry; while More_VCs and not Heap.IsNullPointer (Heap_Index) loop -- Get the details for the next VC. --# accept F, 10, Unused_Path_End, "Unused_Path_End unused here" & --# F, 10, Unused_Path_Start, "Unused_Path_Start unused here" & --# F, 10, Unused_VC_Name, "Unused_VC_Name unused here" ; VCHeap.Details (List_Index => Heap_Index, VC_Name => Unused_VC_Name, Path_Start => Unused_Path_Start, Path_End => Unused_Path_End, End_Type => End_Type, VC_State => VC_State, DPC_State => DPC_State); --# end accept; --# assert True; --# accept F, 41, "Expression is stable"; -- If we have a corrupt VCG file we don't do anything. if VCG and then not VCHeap.Error_Raised (Error_Kind => VCDetails.Corrupt_VCG_File) then --# end accept; The_Totals.VCs_Total (End_Type) := The_Totals.VCs_Total (End_Type) + 1; --# accept F, 41, "Expression is stable"; -- We only sum up totals if we don't have anything corrupted. if Something_Is_Corrupt_In_VC then --# end accept; The_Totals.VCs_Undischarged (End_Type) := The_Totals.VCs_Undischarged (End_Type) + 1; Subprogram_Is_Undischarged := True; else case VC_State is when VCDetails.VC_False => The_Totals.VCs_Proved_False (End_Type) := The_Totals.VCs_Proved_False (End_Type) + 1; Subprogram_Has_VC_Proved_False := True; Subprogram_Is_Undischarged := True; when VCDetails.VC_Proved_By_Examiner => Subprogram_Has_VC_Proved_By_Examiner := True; The_Totals.VCs_Proved_By_Examiner (End_Type) := The_Totals.VCs_Proved_By_Examiner (End_Type) + 1; when VCDetails.VC_Proved_By_Inference => Subprogram_Has_VC_Proved_By_Simplifier := True; The_Totals.VCs_Proved_By_Simplifier (End_Type) := The_Totals.VCs_Proved_By_Simplifier (End_Type) + 1; when VCDetails.VC_Proved_By_Contradiction => Subprogram_Has_VC_Proved_By_Contradiction := True; Subprogram_Has_VC_Proved_By_Simplifier := True; The_Totals.VCs_Proved_By_Simplifier (End_Type) := The_Totals.VCs_Proved_By_Simplifier (End_Type) + 1; when VCDetails.VC_Proved_By_Checker => Subprogram_Has_VC_Proved_By_Checker := True; The_Totals.VCs_Proved_By_Checker (End_Type) := The_Totals.VCs_Proved_By_Checker (End_Type) + 1; when VCDetails.VC_Proved_By_Victor => Subprogram_Has_VC_Proved_By_Victor := True; The_Totals.VCs_Proved_By_Victor (End_Type) := The_Totals.VCs_Proved_By_Victor (End_Type) + 1; when VCDetails.VC_Proved_By_Riposte => Subprogram_Has_VC_Proved_By_Riposte := True; The_Totals.VCs_Proved_By_Riposte (End_Type) := The_Totals.VCs_Proved_By_Riposte (End_Type) + 1; when VCDetails.VC_Proved_By_Review => Subprogram_Has_VC_Proved_By_Review := True; The_Totals.VCs_Proved_By_Review (End_Type) := The_Totals.VCs_Proved_By_Review (End_Type) + 1; when VCDetails.VC_Proved_Using_User_Proof_Rules => Subprogram_Has_VC_Proved_With_User_Proof_Rule := True; Subprogram_Has_VC_Proved_By_Simplifier := True; The_Totals.VCs_Proved_With_User_Proof_Rule (End_Type) := The_Totals.VCs_Proved_With_User_Proof_Rule (End_Type) + 1; The_Totals.VCs_Proved_By_Simplifier (End_Type) := The_Totals.VCs_Proved_By_Simplifier (End_Type) + 1; when VCDetails.VC_SIV_Not_Present | VCDetails.VC_Undischarged => The_Totals.VCs_Undischarged (End_Type) := The_Totals.VCs_Undischarged (End_Type) + 1; Subprogram_Is_Undischarged := True; when VCDetails.VC_Not_Present => null; end case; end if; end if; --# assert True; --# accept F, 41, "Expression is stable"; -- If we have a corrupt DPC file or anything related we don't do anything. if DPC and then not Something_Is_Corrupt_In_DPC then --# end accept; if DPC_State = VCDetails.DPC_Dead then -- Update the total number of subprograms containing -- dead paths. if not Subprogram_Contains_Dead_Paths then The_Totals.Subprograms_With_Dead_Paths := The_Totals.Subprograms_With_Dead_Paths + 1; end if; Subprogram_Contains_Dead_Paths := True; -- Update the total number of dead paths found. The_Totals.Number_Of_Dead_Paths := The_Totals.Number_Of_Dead_Paths + 1; end if; end if; --# assert True; VCHeap.Next (After_This => Heap_Index, Success => More_VCs, Next_One => Next_Index); Heap_Index := Next_Index; end loop; --# assert True; -- Update the 'At Least One' counts. if Subprogram_Is_Undischarged then if Subprogram_Has_VC_Proved_False then The_Totals.Subprograms_With_ALO_False_VC := The_Totals.Subprograms_With_ALO_False_VC + 1; else The_Totals.Subprograms_With_ALO_Undischarged_VC := The_Totals.Subprograms_With_ALO_Undischarged_VC + 1; end if; end if; --# assert True; if Subprogram_Has_VC_Proved_By_Examiner then The_Totals.Subprograms_With_ALO_Examiner_VC := The_Totals.Subprograms_With_ALO_Examiner_VC + 1; end if; --# assert True; if Subprogram_Has_VC_Proved_By_Simplifier then The_Totals.Subprograms_With_ALO_Simplifier_VC := The_Totals.Subprograms_With_ALO_Simplifier_VC + 1; end if; --# assert True; if Subprogram_Has_VC_Proved_By_Contradiction then The_Totals.Subprograms_With_ALO_Contradiction_VC := The_Totals.Subprograms_With_ALO_Contradiction_VC + 1; end if; --# assert True; if Subprogram_Has_VC_Proved_With_User_Proof_Rule then The_Totals.Subprograms_With_ALO_User_Rule_VC := The_Totals.Subprograms_With_ALO_User_Rule_VC + 1; end if; --# assert True; if Subprogram_Has_VC_Proved_By_Victor then The_Totals.Subprograms_With_ALO_Victor_VC := The_Totals.Subprograms_With_ALO_Victor_VC + 1; end if; --# assert True; if Subprogram_Has_VC_Proved_By_Riposte then The_Totals.Subprograms_With_ALO_Riposte_VC := The_Totals.Subprograms_With_ALO_Riposte_VC + 1; end if; --# assert True; if Subprogram_Has_VC_Proved_By_Checker then The_Totals.Subprograms_With_ALO_Checker_VC := The_Totals.Subprograms_With_ALO_Checker_VC + 1; end if; --# assert True; if Subprogram_Has_VC_Proved_By_Review then The_Totals.Subprograms_With_ALO_Review_VC := The_Totals.Subprograms_With_ALO_Review_VC + 1; end if; --# assert True; -- Update the proof strategy use hierarchy (See declaration of Total_Type) -- Examiner -> Simplifier -> User Rules -> ViCToR -> Checker -> Review if not Subprogram_Is_Undischarged then if Subprogram_Has_VC_Proved_By_Review then The_Totals.Subprograms_Proved_By_Review := The_Totals.Subprograms_Proved_By_Review + 1; elsif Subprogram_Has_VC_Proved_By_Checker then The_Totals.Subprograms_Proved_By_Checker := The_Totals.Subprograms_Proved_By_Checker + 1; elsif Subprogram_Has_VC_Proved_By_Victor then The_Totals.Subprograms_Proved_By_Victor := The_Totals.Subprograms_Proved_By_Victor + 1; elsif Subprogram_Has_VC_Proved_By_Riposte then The_Totals.Subprograms_Proved_By_Riposte := The_Totals.Subprograms_Proved_By_Riposte + 1; elsif Subprogram_Has_VC_Proved_With_User_Proof_Rule then The_Totals.Subprograms_Proved_With_User_Proof_Rule := The_Totals.Subprograms_Proved_With_User_Proof_Rule + 1; elsif Subprogram_Has_VC_Proved_By_Simplifier then The_Totals.Subprograms_Proved_By_Simplifier := The_Totals.Subprograms_Proved_By_Simplifier + 1; elsif Subprogram_Has_VC_Proved_By_Examiner then The_Totals.Subprograms_Proved_By_Examiner := The_Totals.Subprograms_Proved_By_Examiner + 1; end if; end if; --# assert True; --# accept F, 33, Unused_Path_End, "Unused_Path_End unused here" & --# F, 33, Unused_Path_Start, "Unused_Path_Start unused here" & --# F, 33, Unused_VC_Name, "Unused_VC_Name unused here"; end Update_Totals; procedure Output (Report_File : in SPARK_IO.File_Type; Temp_File : in out SPARK_IO.File_Type; Temp_False_File : in out SPARK_IO.File_Type; Temp_Contra_File : in out SPARK_IO.File_Type; Temp_Victor_File : in out SPARK_IO.File_Type; Temp_Riposte_File : in out SPARK_IO.File_Type; Temp_User_File : in out SPARK_IO.File_Type; Temp_Rlu_Error_File : in out SPARK_IO.File_Type; Temp_Rlu_Used_File : in out SPARK_IO.File_Type; Temp_PR_Verr_File : in out SPARK_IO.File_Type; Temp_Warn_Error_File : in out SPARK_IO.File_Type; Temp_SDP_Error_File : in out SPARK_IO.File_Type; Temp_DPC_Error_File : in out SPARK_IO.File_Type; Temp_Victor_Error_File : in out SPARK_IO.File_Type; Temp_Riposte_Error_File : in out SPARK_IO.File_Type) --# global in CommandLine.Data; --# in The_Totals; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data, --# Report_File, --# Temp_Contra_File, --# Temp_DPC_Error_File, --# Temp_False_File, --# Temp_File, --# Temp_PR_Verr_File, --# Temp_Riposte_Error_File, --# Temp_Riposte_File, --# Temp_Rlu_Error_File, --# Temp_Rlu_Used_File, --# Temp_SDP_Error_File, --# Temp_User_File, --# Temp_Victor_Error_File, --# Temp_Victor_File, --# Temp_Warn_Error_File, --# The_Totals & --# Temp_Contra_File, --# Temp_DPC_Error_File, --# Temp_False_File, --# Temp_File, --# Temp_PR_Verr_File, --# Temp_Riposte_Error_File, --# Temp_Riposte_File, --# Temp_Rlu_Error_File, --# Temp_Rlu_Used_File, --# Temp_SDP_Error_File, --# Temp_User_File, --# Temp_Victor_Error_File, --# Temp_Victor_File, --# Temp_Warn_Error_File from *; is Percent_Undischarged_Str : E_Strings.T; Percent_Proved_By_Examiner_Str : E_Strings.T; Percent_Proved_By_Victor_Str : E_Strings.T; Percent_Proved_By_Riposte_Str : E_Strings.T; Percent_Proved_By_Checker_Str : E_Strings.T; Percent_Proved_By_Review_Str : E_Strings.T; Percent_Simplified_Str : E_Strings.T; Percent_With_User_Rule_Str : E_Strings.T; Percent_Proved_False_Str : E_Strings.T; Total_Subprograms_Proved : Natural; subtype Valid_Column_Size is Integer range 4 .. Integer'Last; Name_Column_Width : constant Valid_Column_Size := 14; Column_Width : constant Valid_Column_Size := 11; subtype Rowname_String_Positions_T is E_Strings.Positions range E_Strings.Positions'First .. Name_Column_Width; subtype Colname_String_Positions_T is E_Strings.Positions range E_Strings.Positions'First .. Column_Width; subtype Table_Rowname_String_T is String (Rowname_String_Positions_T); subtype Table_Colname_String_T is String (Colname_String_Positions_T); -- Don't count the 0th column, only the data columns type Table_Column_Index is range 1 .. 10; type Table_Column_T is record Name : Table_Colname_String_T; Width : Valid_Column_Size; Brackets : Boolean; end record; type Table_Row_T is record Name : Table_Rowname_String_T; Nonzero_Only : Boolean; end record; type Table_Format_T is array (Table_Column_Index) of Table_Column_T; type Table_Row_Format_T is array (VCDetails.Terminal_Point_Type) of Table_Row_T; type Table_Column_Enabled_T is array (Table_Column_Index) of Boolean; -- Rationale for the widths: Total, Examiner and Simplifier -- will have big numbers (think iFACTS). The other colums -- should be much smaller; except maybe the Undischarged column -- as that is what you get from a pogs run on an unsimplified -- tree. Table_Format : constant Table_Format_T := Table_Format_T' (1 => Table_Column_T'("Total ", Column_Width, False), 2 => Table_Column_T'("Examiner ", Column_Width, False), 3 => Table_Column_T'("Simplifier ", Column_Width, False), 4 => Table_Column_T'("(User) ", 7, True), 5 => Table_Column_T'("Victor ", 7, False), 6 => Table_Column_T'("Riposte ", 8, False), 7 => Table_Column_T'("Checker ", 8, False), 8 => Table_Column_T'("Review ", 7, False), 9 => Table_Column_T'("False ", 6, False), 10 => Table_Column_T'("Undisc. ", Column_Width, False)); Table_Column_Enabled : Table_Column_Enabled_T; Table_Row_Format : constant Table_Row_Format_T := Table_Row_Format_T' (VCDetails.Assert_Point => Table_Row_T'("Assert/Post ", False), VCDetails.Precondition_Check_Point => Table_Row_T'("Precondition ", False), VCDetails.Check_Statement_Point => Table_Row_T'("Check stmnt. ", False), VCDetails.Runtime_Check_Point => Table_Row_T'("Runtime check ", False), VCDetails.Refinement_VC_Point => Table_Row_T'("Refinem. VCs ", False), VCDetails.Inheritance_VC_Point => Table_Row_T'("Inherit. VCs ", False), VCDetails.Undetermined_Point => Table_Row_T'("Undetermined ", True)); Overall_Errors : Boolean := False; Overall_Warnings : Boolean := False; function Align_Right (E_Str : E_Strings.T; Width : E_Strings.Lengths) return E_Strings.T is Retval : E_Strings.T; begin Retval := E_Strings.Empty_String; if E_Strings.Get_Length (E_Str) < Width then for I in E_Strings.Lengths range E_Strings.Get_Length (E_Str) + 1 .. Width loop E_Strings.Append_Char (Retval, ' '); end loop; end if; E_Strings.Append_Examiner_String (Retval, E_Str); return Retval; end Align_Right; function Align_Left (E_Str : E_Strings.T; Width : E_Strings.Lengths) return E_Strings.T is Retval : E_Strings.T; begin Retval := E_Str; if E_Strings.Get_Length (E_Str) < Width then for I in E_Strings.Lengths range E_Strings.Get_Length (E_Str) + 1 .. Width loop E_Strings.Append_Char (Retval, ' '); end loop; end if; return Retval; end Align_Left; -- This procedure prints a table row containing integers. If -- Show_Warn_Tag is specified, it will also print the '<<<' -- look-out tag at the end of the row if there are any -- undischarged or false VCs. procedure Print_Table_Row (Title : in String; The_Total, Examiner, Simplifier, User_Rules, Victor, Riposte, Checker, Review, False_VCs, Undischarged : in Integer; Show_Warn_Tag : in Boolean) --# global in Report_File; --# in Table_Column_Enabled; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Checker, --# Examiner, --# False_VCs, --# Report_File, --# Review, --# Riposte, --# Show_Warn_Tag, --# Simplifier, --# Table_Column_Enabled, --# The_Total, --# Title, --# Undischarged, --# User_Rules, --# Victor; is Value : Integer; begin -- Row name. E_Strings.Put_String (Report_File, Align_Left (E_Strings.Copy_String (Title), Name_Column_Width)); -- Other columns. for I in Table_Column_Index loop if Table_Column_Enabled (I) then case I is when 1 => Value := The_Total; when 2 => Value := Examiner; when 3 => Value := Simplifier; when 4 => Value := User_Rules; when 5 => Value := Victor; when 6 => Value := Riposte; when 7 => Value := Checker; when 8 => Value := Review; when 9 => Value := False_VCs; when 10 => Value := Undischarged; end case; if Table_Format (I).Brackets then if Value > 0 then SPARK_IO.Put_Char (Report_File, '('); SPARK_IO.Put_Integer (Report_File, Value, Table_Format (I).Width - 2, 10); SPARK_IO.Put_Char (Report_File, ')'); else E_Strings.Put_String (Report_File, Align_Left (E_Strings.Empty_String, Table_Format (I).Width)); end if; else SPARK_IO.Put_Integer (Report_File, Value, Table_Format (I).Width, 10); end if; end if; end loop; -- Print the 'look-out' tag, if necessary. if Show_Warn_Tag and (Undischarged > 0 or False_VCs > 0) then SPARK_IO.Put_String (Report_File, " <<<", 0); end if; -- Final EOL. SPARK_IO.New_Line (Report_File, 1); end Print_Table_Row; -- This procedure prints a table row containing strings. procedure Print_Table_String_Row (Title : in String; The_Total, Examiner, Simplifier, User_Rules, Victor, Riposte, Checker, Review, False_VCs, Undischarged : in E_Strings.T) --# global in Report_File; --# in Table_Column_Enabled; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Checker, --# Examiner, --# False_VCs, --# Report_File, --# Review, --# Riposte, --# Simplifier, --# Table_Column_Enabled, --# The_Total, --# Title, --# Undischarged, --# User_Rules, --# Victor; is Value : E_Strings.T; begin -- Row name. E_Strings.Put_String (Report_File, Align_Left (E_Strings.Copy_String (Title), Name_Column_Width)); -- Other columns. for I in Table_Column_Index loop if Table_Column_Enabled (I) then case I is when 1 => Value := The_Total; when 2 => Value := Examiner; when 3 => Value := Simplifier; when 4 => Value := User_Rules; when 5 => Value := Victor; when 6 => Value := Riposte; when 7 => Value := Checker; when 8 => Value := Review; when 9 => Value := False_VCs; when 10 => Value := Undischarged; end case; if Table_Format (I).Brackets then if not E_Strings.Is_Empty (Value) then SPARK_IO.Put_Char (Report_File, '('); E_Strings.Put_String (Report_File, Align_Right (Value, Table_Format (I).Width - 2)); SPARK_IO.Put_Char (Report_File, ')'); else E_Strings.Put_String (Report_File, Align_Left (E_Strings.Empty_String, Table_Format (I).Width)); end if; else E_Strings.Put_String (Report_File, Align_Right (Value, Table_Format (I).Width)); end if; end if; end loop; -- Final EOL. SPARK_IO.New_Line (Report_File, 1); end Print_Table_String_Row; -- This procedure prints the table column names. procedure Print_Table_Head --# global in Report_File; --# in Table_Column_Enabled; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Report_File, --# Table_Column_Enabled; is Tmp : E_Strings.T; begin -- Skip the 'row name' column for I in Integer range 1 .. Name_Column_Width loop SPARK_IO.Put_Char (Report_File, ' '); end loop; -- Other columns for I in Table_Column_Index loop if Table_Column_Enabled (I) then Tmp := E_Strings.Trim (E_Strings.Section (E_Str => E_Strings.Copy_String (Table_Format (I).Name), Start_Pos => E_Strings.Positions'First, Length => Table_Format (I).Width)); E_Strings.Put_String (Report_File, Align_Right (Tmp, Table_Format (I).Width)); end if; end loop; -- Final EOL SPARK_IO.New_Line (Report_File, 1); end Print_Table_Head; procedure Print_Table_Separator --# global in Report_File; --# in Table_Column_Enabled; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Report_File, --# Table_Column_Enabled; is begin -- Fill the 'name' column for I in Integer range 1 .. Name_Column_Width loop SPARK_IO.Put_Char (Report_File, '='); end loop; -- Other columns for I in Table_Column_Index loop if Table_Column_Enabled (I) then for J in Integer range 1 .. Table_Format (I).Width loop SPARK_IO.Put_Char (Report_File, '='); end loop; end if; end loop; -- Final EOL SPARK_IO.New_Line (Report_File, 1); end Print_Table_Separator; procedure Print_Final_Summary --# global in Overall_Errors; --# in Overall_Warnings; --# in The_Totals; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Overall_Errors, --# Overall_Warnings, --# The_Totals; is Total_VCs : Natural; False_VCs : Natural; Undisc_VCs : Natural; begin Total_VCs := Sum (The_Totals.VCs_Total); False_VCs := Sum (The_Totals.VCs_Proved_False); Undisc_VCs := Sum (The_Totals.VCs_Undischarged); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "----- BEGIN PROOF SUMMARY -----", 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "VCs discharged: ", 0); SPARK_IO.Put_Integer (SPARK_IO.Standard_Output, Total_VCs - (False_VCs + Undisc_VCs), 8, 10); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "VCs false: ", 0); SPARK_IO.Put_Integer (SPARK_IO.Standard_Output, False_VCs, 8, 10); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "VCs undischarged: ", 0); SPARK_IO.Put_Integer (SPARK_IO.Standard_Output, Undisc_VCs, 8, 10); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Warnings: ", 0); if Overall_Warnings then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "YES", 0); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "No", 0); end if; SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Errors: ", 0); if Overall_Errors then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "YES", 0); else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "No", 0); end if; SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "----- END PROOF SUMMARY -----", 0); end Print_Final_Summary; -- The purpose of this procedure is to read back one of the -- various temporary files created and produce a simple -- listing. -- There are two main file formats, one with number of VCs and -- one without. The Has_Numbers parameter indicates which one -- to expect. procedure Regurgitate_Temp_File (The_Temp_File : in out SPARK_IO.File_Type; The_String_1 : in String; The_String_2 : in String; Has_Numbers : in Boolean) --# global in Report_File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Has_Numbers, --# Report_File, --# The_String_1, --# The_String_2, --# The_Temp_File & --# The_Temp_File from *; is Temp_Store_Int : Integer; Temp_Store_String : E_Strings.T; Temp_Store_Bool : Boolean; Temp_Status : SPARK_IO.File_Status; begin --# accept F, 10, Temp_Status, "Temp_Status not used here" & --# F, 10, Temp_Store_Bool, "Temp_Store_Bool not used here"; SPARK_IO.Reset (The_Temp_File, SPARK_IO.In_File, Temp_Status); if not SPARK_IO.End_Of_File (The_Temp_File) then SPARK_IO.Put_Line (Report_File, The_String_1, 0); if The_String_2 /= "" then SPARK_IO.Put_Line (Report_File, The_String_2, 0); end if; SPARK_IO.New_Line (Report_File, 1); while not SPARK_IO.End_Of_File (The_Temp_File) loop --# accept F, 41, "No need to write two loops here."; if Has_Numbers then SPARK_IO.Get_Integer (The_Temp_File, Temp_Store_Int, 4, Temp_Store_Bool); SPARK_IO.Put_Integer (Report_File, Temp_Store_Int, 4, 10); SPARK_IO.Put_String (Report_File, " ", 0); end if; E_Strings.Get_Line (File => The_Temp_File, E_Str => Temp_Store_String); if Has_Numbers or E_Strings.Get_Length (Temp_Store_String) > 0 then if not Has_Numbers then SPARK_IO.Put_String (Report_File, " ", 0); end if; E_Strings.Put_String (File => Report_File, E_Str => Temp_Store_String); SPARK_IO.New_Line (Report_File, 1); end if; --# end accept; end loop; SPARK_IO.New_Line (Report_File, 1); end if; --# accept F, 33, Temp_Store_Bool, "Temp_Store_Bool is not needed." & --# F, 33, Temp_Status, "Temp_Status is not needed."; end Regurgitate_Temp_File; begin SPARK_IO.Put_Line (Report_File, Banner.MajorSeparatorLine, 0); SPARK_IO.Put_Line (Report_File, "Summary:", 0); SPARK_IO.New_Line (Report_File, 1); -- We want to print the file summaries in roughly two blocks: -- Warnings/Notes and actual errors. First, the warnings and -- non-critical errors: -- print out any used user-defined rule files Regurgitate_Temp_File (The_Temp_File => Temp_Rlu_Used_File, The_String_1 => "The following user-defined rule files have been used:", The_String_2 => "", Has_Numbers => False); -- print out the file names and numbers of any VC proved by a user-defined proof rule Regurgitate_Temp_File (The_Temp_File => Temp_User_File, The_String_1 => "The following subprograms have VCs proved using a user-defined proof rule:", The_String_2 => "", Has_Numbers => True); -- print out the file names and numbers of any VC proved by contradiction Regurgitate_Temp_File (The_Temp_File => Temp_Contra_File, The_String_1 => "The following subprograms have VCs proved by contradiction:", The_String_2 => "", Has_Numbers => True); -- print out the file names and numbers of any VC proved by ViCToR Regurgitate_Temp_File (The_Temp_File => Temp_Victor_File, The_String_1 => "The following subprograms have VCs proved by Victor:", The_String_2 => "", Has_Numbers => True); -- print out the file names and numbers of any VC proved by Riposte Regurgitate_Temp_File (The_Temp_File => Temp_Riposte_File, The_String_1 => "The following subprograms have VCs proved by Riposte:", The_String_2 => "", Has_Numbers => True); -- print out the names of any missing SDP files Regurgitate_Temp_File (The_Temp_File => Temp_SDP_Error_File, The_String_1 => "***WARNING: The following DPC files have not been ZombieScoped:", The_String_2 => "", Has_Numbers => False); -- print out the names of any missing DPC files Regurgitate_Temp_File (The_Temp_File => Temp_DPC_Error_File, The_String_1 => "***WARNING: The following DPC files are missing:", The_String_2 => "", Has_Numbers => False); -- Secondly, we print the actual errors (those indicating that -- your code is definitely wrong): -- print out the names of files containing warnings or errors Regurgitate_Temp_File (The_Temp_File => Temp_Warn_Error_File, The_String_1 => "***WARNING: The following files, or their absence, raised warnings or errors:", The_String_2 => "", Has_Numbers => True); -- print out the names of any user rule files containing syntax errors Regurgitate_Temp_File (The_Temp_File => Temp_Rlu_Error_File, The_String_1 => "***WARNING: The following user defined rule files contain syntax errors:", The_String_2 => "", Has_Numbers => False); -- print out the file names and numbers of any false VC Regurgitate_Temp_File (The_Temp_File => Temp_False_File, The_String_1 => "The following subprograms have VCs proved false:", The_String_2 => "", Has_Numbers => True); -- print out the file names and numbers of any undischarged VC (excluding those proved false) Regurgitate_Temp_File (The_Temp_File => Temp_File, The_String_1 => "The following subprograms have undischarged VCs (excluding those proved false):", The_String_2 => "", Has_Numbers => True); -- print out the file names and numbers of any files with review errors Regurgitate_Temp_File (The_Temp_File => Temp_PR_Verr_File, The_String_1 => "***WARNING: The PRV file(s) associated with the following subprograms may be out of date", The_String_2 => "The following subprograms have review files containing VCs already proved elsewhere:", Has_Numbers => True); -- print out all victor errors Regurgitate_Temp_File (The_Temp_File => Temp_Victor_Error_File, The_String_1 => "***WARNING: Please note the following warnings and/or errors surrounding victor files:", The_String_2 => "", Has_Numbers => False); -- print out all Riposte errors Regurgitate_Temp_File (The_Temp_File => Temp_Riposte_Error_File, The_String_1 => "***WARNING: Please note the following warnings and/or errors surrounding Riposte files:", The_String_2 => "", Has_Numbers => False); --# assert True; -- Print a summary of the number of subprograms conatining at -- least one instance of the following: SPARK_IO.Put_Line (Report_File, "Proof strategies used by subprograms", 0); SPARK_IO.Put_Line (Report_File, "-------------------------------------------------------------------------", 0); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one VC proved by examiner: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_Examiner_VC, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one VC proved by simplifier: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_Simplifier_VC, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one VC proved by contradiction: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_Contradiction_VC, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one VC proved with user proof rule: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_User_Rule_VC, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one VC proved by Victor: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_Victor_VC, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one VC proved by Riposte: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_Riposte_VC, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one VC proved using checker: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_Checker_VC, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one VC discharged by review: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_Review_VC, 4, 10); SPARK_IO.New_Line (Report_File, 1); --# assert True; -- Print out hierarchy of proof strategy use (see declaration of Total_Type) -- Examiner -> Simplifier -> User Rules -> ViCToR -> Checker -> Review SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "Maximum extent of strategies used for fully proved subprograms:", 0); SPARK_IO.Put_Line (Report_File, "-------------------------------------------------------------------------", 0); SPARK_IO.Put_String (Report_File, "Total subprograms with proof completed by examiner: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Proved_By_Examiner, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with proof completed by simplifier: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Proved_By_Simplifier, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with proof completed with user defined rules: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Proved_With_User_Proof_Rule, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with proof completed by Victor: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Proved_By_Victor, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with proof completed by Riposte: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Proved_By_Riposte, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with proof completed by checker: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Proved_By_Checker, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with VCs discharged by review: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Proved_By_Review, 4, 10); SPARK_IO.New_Line (Report_File, 1); -- BUG: This could, in theory, overflow. Total_Subprograms_Proved := ((((((The_Totals.Subprograms_Proved_By_Examiner + The_Totals.Subprograms_Proved_By_Simplifier) + The_Totals.Subprograms_Proved_With_User_Proof_Rule) + The_Totals.Subprograms_Proved_By_Checker) + The_Totals.Subprograms_Proved_By_Review) + The_Totals.Subprograms_Proved_By_Victor) + The_Totals.Subprograms_Proved_By_Riposte); --# assert Total_Subprograms_Proved = --# The_Totals.Subprograms_Proved_By_Examiner + --# The_Totals.Subprograms_Proved_By_Simplifier + --# The_Totals.Subprograms_Proved_With_User_Proof_Rule + --# The_Totals.Subprograms_Proved_By_Checker + --# The_Totals.Subprograms_Proved_By_Review + --# The_Totals.Subprograms_Proved_By_Victor + --# The_Totals.Subprograms_Proved_By_Riposte; SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "Overall subprogram summary:", 0); SPARK_IO.Put_Line (Report_File, "-------------------------------------------------------------------------", 0); SPARK_IO.Put_String (Report_File, "Total subprograms fully proved: ", 0); SPARK_IO.Put_Integer (Report_File, Total_Subprograms_Proved, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one undischarged VC: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_Undischarged_VC, 4, 10); if The_Totals.Subprograms_With_ALO_Undischarged_VC > 0 then SPARK_IO.Put_String (Report_File, " <<<", 0); end if; SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms with at least one false VC: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_ALO_False_VC, 4, 10); if The_Totals.Subprograms_With_ALO_False_VC > 0 then SPARK_IO.Put_String (Report_File, " <<<", 0); end if; SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, " -----", 0); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms for which VCs have been generated: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_VCs, 4, 10); SPARK_IO.New_Line (Report_File, 2); --# assert Total_Subprograms_Proved = --# The_Totals.Subprograms_Proved_By_Examiner + --# The_Totals.Subprograms_Proved_By_Simplifier + --# The_Totals.Subprograms_Proved_With_User_Proof_Rule + --# The_Totals.Subprograms_Proved_By_Checker + --# The_Totals.Subprograms_Proved_By_Review + --# The_Totals.Subprograms_Proved_By_Victor + --# The_Totals.Subprograms_Proved_By_Riposte; -- Only report errors if there are some. if The_Totals.Subprograms_Where_VC_Analysis_Abandoned > 0 or else The_Totals.Subprograms_Where_DPC_Analysis_Abandoned > 0 or else The_Totals.Subprograms_Where_Error (VCDetails.Missing_SLG_File) > 0 or else The_Totals.Subprograms_Where_Error (VCDetails.Missing_VLG_File) > 0 then SPARK_IO.Put_Line (Report_File, "WARNING: Overall error summary:", 0); SPARK_IO.Put_Line (Report_File, "-------------------------------------------------------------------------", 0); SPARK_IO.Put_String (Report_File, "Total simplified subprograms with missing slg file: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Where_Error (VCDetails.Missing_SLG_File), 7, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total victored subprograms with missing vlg file: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Where_Error (VCDetails.Missing_VLG_File), 7, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms where VC analysis was abandoned due to errors: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Where_VC_Analysis_Abandoned, 4, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total subprograms where DPC analysis was abandoned due to errors: ", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Where_DPC_Analysis_Abandoned, 3, 10); SPARK_IO.New_Line (Report_File, 2); Overall_Errors := True; else -- One blank line between each table in this group, but double blank line -- after the last table. SPARK_IO.New_Line (Report_File, 1); end if; -- The sum of the subprograms fully proved, -- the subprograms with at least undischarged VC and -- the subprograms with at least 1 false VC must equal -- the number of subprograms for which VCs have been generated. SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); --# assert Total_Subprograms_Proved = --# The_Totals.Subprograms_Proved_By_Examiner + --# The_Totals.Subprograms_Proved_By_Simplifier + --# The_Totals.Subprograms_Proved_With_User_Proof_Rule + --# The_Totals.Subprograms_Proved_By_Checker + --# The_Totals.Subprograms_Proved_By_Review + --# The_Totals.Subprograms_Proved_By_Victor + --# The_Totals.Subprograms_Proved_By_Riposte; -- Check the totals are balanced. if not Totals_Are_Balanced then Fatal_Error (Error => FatalErrors.Subprogram_Totals_Inconsistent); end if; --# assert True; SPARK_IO.Put_Line (Report_File, "ZombieScope Summary:", 0); SPARK_IO.Put_Line (Report_File, "-------------------------------------------------------------------------", 0); SPARK_IO.Put_String (Report_File, "Total subprograms for which DPCs have been generated:", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_DPCs, 20, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total number subprograms with dead paths found:", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_With_Dead_Paths, 26, 10); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "Total number of dead paths found:", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Number_Of_Dead_Paths, 40, 10); SPARK_IO.New_Line (Report_File, 3); -- Issue warning message if some DPC files have not been analysed. --# assert True; SPARK_IO.Put_Line (Report_File, "VC summary:", 0); SPARK_IO.Put_Line (Report_File, "-------------------------------------------------------------------------", 0); SPARK_IO.Put_Line (Report_File, "Note: (User) denotes where the Simplifier has proved VCs using one or", 0); SPARK_IO.Put_Line (Report_File, " more user-defined proof rules.", 0); SPARK_IO.New_Line (Report_File, 1); Table_Column_Enabled := Table_Column_Enabled_T' (1 => True, 2 => Sum (The_Totals.VCs_Proved_By_Examiner) > 0, 3 => Sum (The_Totals.VCs_Proved_By_Simplifier) > 0, 4 => Sum (The_Totals.VCs_Proved_With_User_Proof_Rule) > 0, 5 => Sum (The_Totals.VCs_Proved_By_Victor) > 0, 6 => Sum (The_Totals.VCs_Proved_By_Riposte) > 0, 7 => Sum (The_Totals.VCs_Proved_By_Checker) > 0, 8 => Sum (The_Totals.VCs_Proved_By_Review) > 0, 9 => Sum (The_Totals.VCs_Proved_False) > 0, 10 => Sum (The_Totals.VCs_Undischarged) > 0); SPARK_IO.Put_Line (Report_File, "Total VCs by type:", 0); SPARK_IO.Put_Line (Report_File, "------------------", 0); Print_Table_Head; for R in VCDetails.Terminal_Point_Type loop if not Table_Row_Format (R).Nonzero_Only or The_Totals.VCs_Total (R) > 0 then Print_Table_Row (Title => Table_Row_Format (R).Name, The_Total => The_Totals.VCs_Total (R), Examiner => The_Totals.VCs_Proved_By_Examiner (R), Simplifier => The_Totals.VCs_Proved_By_Simplifier (R), User_Rules => The_Totals.VCs_Proved_With_User_Proof_Rule (R), Victor => The_Totals.VCs_Proved_By_Victor (R), Riposte => The_Totals.VCs_Proved_By_Riposte (R), Checker => The_Totals.VCs_Proved_By_Checker (R), Review => The_Totals.VCs_Proved_By_Review (R), False_VCs => The_Totals.VCs_Proved_False (R), Undischarged => The_Totals.VCs_Undischarged (R), Show_Warn_Tag => False); end if; end loop; Print_Table_Separator; Print_Table_Row (Title => "Totals:", The_Total => Sum (The_Totals.VCs_Total), Examiner => Sum (The_Totals.VCs_Proved_By_Examiner), Simplifier => Sum (The_Totals.VCs_Proved_By_Simplifier), User_Rules => Sum (The_Totals.VCs_Proved_With_User_Proof_Rule), Victor => Sum (The_Totals.VCs_Proved_By_Victor), Riposte => Sum (The_Totals.VCs_Proved_By_Riposte), Checker => Sum (The_Totals.VCs_Proved_By_Checker), Review => Sum (The_Totals.VCs_Proved_By_Review), False_VCs => Sum (The_Totals.VCs_Proved_False), Undischarged => Sum (The_Totals.VCs_Undischarged), Show_Warn_Tag => True); --# assert True; if CommandLine.Data.OutputPercentUndischarged and then Sum (The_Totals.VCs_Total) /= 0 then Calculate_Percentages (The_Totals => The_Totals, Percent_Undischarged_Str => Percent_Undischarged_Str, Percent_Proved_By_Examiner_Str => Percent_Proved_By_Examiner_Str, Percent_Proved_By_Victor_Str => Percent_Proved_By_Victor_Str, Percent_Proved_By_Riposte_Str => Percent_Proved_By_Riposte_Str, Percent_Proved_By_Checker_Str => Percent_Proved_By_Checker_Str, Percent_Proved_By_Review_Str => Percent_Proved_By_Review_Str, Percent_Simplified_Str => Percent_Simplified_Str, Percent_With_User_Rule_Str => Percent_With_User_Rule_Str, Percent_Proved_False_Str => Percent_Proved_False_Str); if Sum (The_Totals.VCs_Proved_With_User_Proof_Rule) = 0 then Percent_With_User_Rule_Str := E_Strings.Empty_String; end if; Print_Table_String_Row (Title => "%Totals:", The_Total => E_Strings.Empty_String, Examiner => Percent_Proved_By_Examiner_Str, Simplifier => Percent_Simplified_Str, User_Rules => Percent_With_User_Rule_Str, Victor => Percent_Proved_By_Victor_Str, Riposte => Percent_Proved_By_Riposte_Str, Checker => Percent_Proved_By_Checker_Str, Review => Percent_Proved_By_Review_Str, False_VCs => Percent_Proved_False_Str, Undischarged => Percent_Undischarged_Str); end if; --# assert True; -- If we used or tried to use Victor or Riposte, we print a warning if The_Totals.Subprograms_With_ALO_Victor_VC > 0 or else The_Totals.Subprograms_Where_Error (VCDetails.Missing_VLG_File) > 0 or else The_Totals.Subprograms_Where_Error (VCDetails.Corrupt_VCT_File) > 0 or else The_Totals.Subprograms_Where_Error (VCDetails.Corrupt_VLG_File) > 0 then Overall_Warnings := True; SPARK_IO.Put_Line (Report_File, "!!! WARNING: Experimental feature used: Proof by Victor", 0); end if; if The_Totals.Subprograms_Where_Error (VCDetails.Missing_VLG_File) > 0 or else The_Totals.Subprograms_Where_Error (VCDetails.Corrupt_VCT_File) > 0 or else The_Totals.Subprograms_Where_Error (VCDetails.Corrupt_VLG_File) > 0 then Overall_Errors := True; end if; if The_Totals.Subprograms_With_ALO_Riposte_VC > 0 or else The_Totals.Subprograms_Where_Error (VCDetails.Corrupt_RSM_File) > 0 then Overall_Warnings := True; SPARK_IO.Put_Line (Report_File, "!!! WARNING: Experimental feature used: Proof with Riposte", 0); end if; if The_Totals.Subprograms_Where_Error (VCDetails.Corrupt_RSM_File) > 0 then Overall_Errors := True; end if; --# assert True; -- Only report errors if there are some. if The_Totals.Subprograms_Where_VC_Analysis_Abandoned > 0 then Overall_Errors := True; SPARK_IO.Put_Line (Report_File, "!!! ERRORS IN FILES RELATED TO ANALYSIS OF VCs; as below:", 0); for X in VCDetails.Error_Type_Missing_VC_Files loop if The_Totals.Subprograms_Where_Error (X) > 0 then case X is when VCDetails.Missing_SLG_File => SPARK_IO.Put_String (Report_File, "!!! Number of missing SLG (simplifier log) files: ", 0); when VCDetails.Missing_VLG_File => SPARK_IO.Put_String (Report_File, "!!! Number of missing VLG (ViCToR log) files: ", 0); end case; SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Where_Error (X), 8, 10); SPARK_IO.New_Line (Report_File, 1); end if; end loop; for X in VCDetails.Error_Type_Corrupt_VC_Files loop if The_Totals.Subprograms_Where_Error (X) > 0 then case X is when VCDetails.Corrupt_VCG_File => SPARK_IO.Put_String (Report_File, "!!! Number of erroneous VCG files: ", 0); when VCDetails.Corrupt_SIV_File => SPARK_IO.Put_String (Report_File, "!!! Number of erroneous SIV (simplified) files: ", 0); when VCDetails.Corrupt_SLG_File => SPARK_IO.Put_String (Report_File, "!!! Number of erroneous SLG (simplifier log) files: ", 0); when VCDetails.Corrupt_VCT_File => SPARK_IO.Put_String (Report_File, "!!! Number of erroneous VCT (Victor) files: ", 0); when VCDetails.Corrupt_VLG_File => SPARK_IO.Put_String (Report_File, "!!! Number of erroneous VLG (Victor log) files: ", 0); when VCDetails.Corrupt_RSM_File => SPARK_IO.Put_String (Report_File, "!!! Number of erroneous RSM (Riposte summary) files: ", 0); when VCDetails.Corrupt_PLG_File => SPARK_IO.Put_String (Report_File, "!!! Number of erroneous PLG (checker proof log) files: ", 0); when VCDetails.Corrupt_PRV_File => SPARK_IO.Put_String (Report_File, "!!! Number of erroneous PRV (manual proof review) files: ", 0); end case; SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Where_Error (X), 8, 10); SPARK_IO.New_Line (Report_File, 1); end if; end loop; end if; --# assert True; if The_Totals.Subprograms_Where_DPC_Analysis_Abandoned > 0 then Overall_Errors := True; SPARK_IO.Put_String (Report_File, "!!! ERRORS IN DPC FILES: !!!", 0); SPARK_IO.Put_Integer (Report_File, The_Totals.Subprograms_Where_DPC_Analysis_Abandoned, 8, 10); SPARK_IO.New_Line (Report_File, 1); end if; SPARK_IO.New_Line (Report_File, 1); --# assert True; Print_Final_Summary; end Output; begin The_Totals := Total_Type' (Subprograms_With_VCs => 0, Subprograms_Where_Error => Error_Type_Counter'(others => 0), Subprograms_Where_VC_Analysis_Abandoned => 0, Subprograms_Where_DPC_Analysis_Abandoned => 0, Subprograms_With_ALO_Undischarged_VC => 0, Subprograms_With_ALO_Examiner_VC => 0, Subprograms_With_ALO_Simplifier_VC => 0, Subprograms_With_ALO_Contradiction_VC => 0, Subprograms_With_ALO_User_Rule_VC => 0, Subprograms_With_ALO_Victor_VC => 0, Subprograms_With_ALO_Riposte_VC => 0, Subprograms_With_ALO_Checker_VC => 0, Subprograms_With_ALO_Review_VC => 0, Subprograms_With_ALO_False_VC => 0, Subprograms_Proved_By_Examiner => 0, Subprograms_Proved_By_Simplifier => 0, Subprograms_Proved_By_Victor => 0, Subprograms_Proved_By_Riposte => 0, Subprograms_Proved_By_Checker => 0, Subprograms_Proved_With_User_Proof_Rule => 0, Subprograms_Proved_By_Review => 0, VCs_Total => Null_VC_Counter, VCs_Proved_By_Examiner => Null_VC_Counter, VCs_Proved_By_Simplifier => Null_VC_Counter, VCs_Proved_By_Victor => Null_VC_Counter, VCs_Proved_By_Riposte => Null_VC_Counter, VCs_Proved_By_Checker => Null_VC_Counter, VCs_Proved_With_User_Proof_Rule => Null_VC_Counter, VCs_Proved_By_Review => Null_VC_Counter, VCs_Proved_False => Null_VC_Counter, VCs_Undischarged => Null_VC_Counter, Subprograms_With_DPCs => 0, Subprograms_With_Dead_Paths => 0, Number_Of_Dead_Paths => 0); end Total; spark-2012.0.deb/pogs/toppackage.adb0000644000175000017500000003403611753202340016200 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --------------------------------------------------------------------------------- --Synopsis: -- -- -- --Top level package for the summary tool -- --The main program is simply a call to the procedure Main_Procedure wrapped in -- --an execption handler to deal with the exceptions raised by FatalErrors -- -- -- --------------------------------------------------------------------------------- with Banner; with CommandLine; with E_Strings; with FatalErrors; with FileDetails; with FileHeap; with FindFiles; with Heap; with SPARK_IO; with Total; with VCHeap; with VCS; use type FileDetails.FileTypes; use type SPARK_IO.File_Status; package body TopPackage is procedure Main_Procedure is Unused_Directory_Is_Resolved : Boolean; Filename : E_Strings.T; File_Type : FileDetails.FileTypes; Next_File : Heap.Atom; Report_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Status : SPARK_IO.File_Status; Success : Boolean; This_File : Heap.Atom; Temp_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_False_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_Contra_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_Victor_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_Riposte_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_User_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_Rlu_Error_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_Rlu_Used_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_PR_Verr_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_Warn_Error_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_SDP_Error_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_DPC_Error_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_Victor_Error_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Temp_Riposte_Error_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Status_Temp : SPARK_IO.File_Status; begin VCHeap.Initialize; CommandLine.Read; if CommandLine.Data.VersionRequested then -- print banner on screen Banner.ReportVersion; else Banner.Screen; if not CommandLine.Data.Valid then FatalErrors.Process (FatalErrors.Invalid_Command_Line, E_Strings.Empty_String); end if; -- read file structure SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Reading file structure ...", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); FindFiles.Scan (CommandLine.Data.StartDirectory); -- open report file E_Strings.Create (File => Report_File, Name_Of_File => CommandLine.Data.ReportFile, Form_Of_File => "", Status => Status); if Status /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Could_Not_Create_Report_File, CommandLine.Data.ReportFile); end if; -- print the report file banner Banner.Report (Report_File); -- process the files This_File := FileHeap.FirstEntry; Success := True; -- create temporary file to store names of files with undischarged vcs SPARK_IO.Create (Temp_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; --# assert True; -- create temporary file to store names of files with false vcs SPARK_IO.Create (Temp_False_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of files with vcs proved by contradiction SPARK_IO.Create (Temp_Contra_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of files with vcs proved by Victor SPARK_IO.Create (Temp_Victor_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of files with vcs proved by Riposte SPARK_IO.Create (Temp_Riposte_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of files with vcs proved using a -- user-defined proof rule SPARK_IO.Create (Temp_User_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; --# assert True; -- create temporary file to store names of user rule files that contain -- syntax errors SPARK_IO.Create (Temp_Rlu_Error_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of user rule files that contain -- rules used for proof SPARK_IO.Create (Temp_Rlu_Used_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of review files containing errors SPARK_IO.Create (Temp_PR_Verr_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of files containing warnings or errors SPARK_IO.Create (Temp_Warn_Error_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of missing SDP file. SPARK_IO.Create (Temp_SDP_Error_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- create temporary file to store names of missing DPC file. SPARK_IO.Create (Temp_DPC_Error_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- Finally, we create a temporary file to store the names of -- erroneous vct/vlg files. SPARK_IO.Create (Temp_Victor_Error_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; -- ... and one for bad rsm files from Riposte. SPARK_IO.Create (Temp_Riposte_Error_File, 0, "", "", Status_Temp); if Status_Temp /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Problem_Creating_Temp_File, E_Strings.Empty_String); end if; --# assert True; while not Heap.IsNullPointer (This_File) and Success loop -- read file name --# accept F, 10, Unused_Directory_Is_Resolved, "Unused_Directory_Is_Resolved unused here"; FileHeap.Details (This_File, Success, Filename, File_Type, Unused_Directory_Is_Resolved); --# end accept; if not Success then FatalErrors.Process (FatalErrors.Data_Structure_Inconsistency, E_Strings.Empty_String); end if; if File_Type = FileDetails.PlainFile then --# accept F, 41, "Expression is stable but efficient"; if CommandLine.Data.AnalyseVCs then --Rather than assume that old values will be overwritten, actually start from --an initiised state each time. VCHeap.Initialize; if CommandLine.Data.ShortSummary then VCS.Analyse (SPARK_IO.Null_File, Filename, CommandLine.Data.AnalyseProofLog, Temp_File, Temp_False_File, Temp_Contra_File, Temp_Victor_File, Temp_Riposte_File, Temp_User_File, Temp_Rlu_Error_File, Temp_Rlu_Used_File, Temp_PR_Verr_File, Temp_Warn_Error_File, Temp_SDP_Error_File, Temp_DPC_Error_File, Temp_Victor_Error_File, Temp_Riposte_Error_File); else VCS.Analyse (Report_File, Filename, CommandLine.Data.AnalyseProofLog, Temp_File, Temp_False_File, Temp_Contra_File, Temp_Victor_File, Temp_Riposte_File, Temp_User_File, Temp_Rlu_Error_File, Temp_Rlu_Used_File, Temp_PR_Verr_File, Temp_Warn_Error_File, Temp_SDP_Error_File, Temp_DPC_Error_File, Temp_Victor_Error_File, Temp_Riposte_Error_File); end if; end if; --# end accept; -- put command to analyse path function files here -- Not Implemented in POGS yet... -- if CommandLine.Data.AnalysePFs then -- null; -- end if; end if; -- get next item in linked list FileHeap.Next (This_File, Success, Next_File); -- note that Success will be tested next, at the top of the loop This_File := Next_File; end loop; -- Output totals --# accept F, 10, Temp_File, "Temp_File unused here" & --# F, 10, Temp_False_File, "Temp_False_File unused here" & --# F, 10, Temp_Contra_File, "Temp_Contra_File unused here" & --# F, 10, Temp_Victor_File, "Temp_Victor_File unused here" & --# F, 10, Temp_Riposte_File, "Temp_Riposte_File unused here" & --# F, 10, Temp_User_File, "Temp_User_File unused here" & --# F, 10, Temp_Rlu_Error_File, "Temp_Rlu_Error_File unused here" & --# F, 10, Temp_Rlu_Used_File, "Temp_Rlu_Used_File unused here" & --# F, 10, Temp_Warn_Error_File, "Temp_PR_Verr_File unused here" & --# F, 10, Temp_PR_Verr_File, "Temp_PR_Verr_File unused here" & --# F, 10, Temp_DPC_Error_File, "Temp_DPC_Error_File unused here" & --# F, 10, Temp_SDP_Error_File, "Temp_SDP_Error_File unused here" & --# F, 10, Temp_Victor_Error_File, "Temp_Victor_Error_File unused here" & --# F, 10, Temp_Riposte_Error_File, "Temp_Riposte_Error_File unused here"; Total.Output (Report_File, Temp_File, Temp_False_File, Temp_Contra_File, Temp_Victor_File, Temp_Riposte_File, Temp_User_File, Temp_Rlu_Error_File, Temp_Rlu_Used_File, Temp_PR_Verr_File, Temp_Warn_Error_File, Temp_SDP_Error_File, Temp_DPC_Error_File, Temp_Victor_Error_File, Temp_Riposte_Error_File); --# end accept; Banner.FinishReport (Report_File); -- close report file --# accept F, 10, Status, "Status unused here" & --# F, 10, Report_File, "Report_File unused here"; SPARK_IO.Close (Report_File, Status); end if; --# accept F, 33, Unused_Directory_Is_Resolved, "Unused_Directory_Is_Resolved unused here"; end Main_Procedure; end TopPackage; spark-2012.0.deb/pogs/slg_parser.adb0000644000175000017500000005266211753202340016230 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- -- Package containing subprogram bodies to facilitate incremental parsing of a -- -- simplifier log file. -- -------------------------------------------------------------------------------- with Ada.Characters.Latin_1; package body SLG_Parser is function First_Three (Line : E_Strings.T) return Key_String is begin return Key_String' (1 => E_Strings.Get_Element (E_Str => Line, Pos => 1), 2 => E_Strings.Get_Element (E_Str => Line, Pos => 2), 3 => E_Strings.Get_Element (E_Str => Line, Pos => 3)); end First_Three; function Lookup_Key (Line : E_Strings.T) return Log_Key is Key_Str : Key_String; Key : Log_Key; begin if E_Strings.Get_Length (E_Str => Line) >= 3 then Key_Str := First_Three (Line); if Key_Str = "RRS" then Key := Read_Rulefiles_Header; elsif Key_Str = "&&&" then Key := Load_Rulefile; elsif Key_Str = "STX" then Key := Syntax_Error_Header; elsif Key_Str = "!!!" then Key := Syntax_Error_In_Rulefile; elsif Key_Str = "SEM" then Key := No_Semantic_Checks_Header; elsif Key_Str = "@@@" then Key := VC_Header; elsif Key_Str = "%%%" then Key := Simplified; elsif Key_Str = ">>>" then Key := Restructured; elsif Key_Str = "***" then Key := Proved; elsif Key_Str = "###" then Key := Contradiction; elsif Key_Str = "---" then Key := Hyp_Eliminated; elsif Key_Str = "+++" then Key := Hyp_Added; elsif Key_Str = "-S-" then Key := Substitution; elsif Key_Str = "" then Key := New_Hyp_From_Subs; elsif Key_Str = "VCN" then Key := VC_Summary_Header; elsif Key_Str = "FIL" then Key := Rulefile_Use; elsif Key_Str = "RUL" then Key := Rule_Use; elsif Key_Str = "CON" then Key := Conclusions; elsif Key_Str = "HYP" then Key := Hypotheses; elsif Key_Str = "OVR" then Key := Overall_Summary_Header; elsif Key_Str = "VCS" then Key := VCs_Using_Rule; else Key := Not_A_Recognised_Key; end if; else Key := No_Room_For_Key; end if; return Key; end Lookup_Key; function Is_White_Space (C : Character) return Boolean is begin return C = ' ' or else C = Ada.Characters.Latin_1.HT or else C = Ada.Characters.Latin_1.LF or else C = Ada.Characters.Latin_1.CR; end Is_White_Space; pragma Inline (Is_White_Space); function Is_Char_At_Posn (C : Character; Logfile : Log_Info_T) return Boolean is begin return (Logfile.Posn > 0 and then Logfile.Posn <= E_Strings.Get_Length (E_Str => Logfile.Curr_Line)) and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => Logfile.Posn) = C; end Is_Char_At_Posn; pragma Inline (Is_Char_At_Posn); function Is_Whitespace_At_Posn (Logfile : Log_Info_T) return Boolean is begin return (Logfile.Posn > 0 and then Logfile.Posn <= E_Strings.Get_Length (E_Str => Logfile.Curr_Line)) and then Is_White_Space (C => E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => Logfile.Posn)); end Is_Whitespace_At_Posn; pragma Inline (Is_Whitespace_At_Posn); procedure Inc_Posn (Logfile : in out Log_Info_T) --# derives Logfile from *; --# pre Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); --# post Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); is begin if Logfile.Posn > 0 then if Logfile.Posn >= E_Strings.Get_Length (E_Str => Logfile.Curr_Line) then Logfile.Posn := 0; else Logfile.Posn := Logfile.Posn + 1; end if; end if; end Inc_Posn; pragma Inline (Inc_Posn); procedure Get_Item (Logfile : in out Log_Info_T; Item : out E_Strings.T) --# derives Item, --# Logfile from Logfile; --# pre Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); --# post Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); is Start_Posn : E_Strings.Lengths; End_Posn : E_Strings.Lengths; begin Item := E_Strings.Empty_String; if Logfile.Posn > 0 then Start_Posn := Logfile.Posn; End_Posn := Start_Posn; -- Find the last character position of the item. -- Items are separated by commas or teminated by an end of line. while End_Posn < E_Strings.Get_Length (E_Str => Logfile.Curr_Line) and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => End_Posn + 1) /= ',' loop End_Posn := End_Posn + 1; --# assert End_Posn <= E_Strings.Get_Length (Logfile.Curr_Line) --# and Start_Posn > 0; end loop; --# assert End_Posn <= E_Strings.Get_Length (Logfile.Curr_Line) --# and Start_Posn > 0; -- Update the Logfile Posn to the terminating character or -- to 0 if the entire current line has been read. if End_Posn = E_Strings.Get_Length (E_Str => Logfile.Curr_Line) then Logfile.Posn := 0; else Logfile.Posn := End_Posn + 1; end if; -- Copy the substring to the Item result string. for I in E_Strings.Lengths range Start_Posn .. End_Posn loop --# assert Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line) --# and Start_Posn > 0; E_Strings.Append_Char (E_Str => Item, Ch => E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => I)); end loop; end if; end Get_Item; procedure Get_Next_Keyed_Line (Logfile : in out Log_Info_T) --# global in out SPARK_IO.File_Sys; --# derives Logfile, --# SPARK_IO.File_Sys from Logfile, --# SPARK_IO.File_Sys; --# post Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); is F : SPARK_IO.File_Type; Found : Boolean := False; EOF : Boolean := False; begin F := Logfile.File_Handle; while not (Found or EOF) loop if SPARK_IO.End_Of_File (F) then Logfile.Key := Reached_EOF; -- Remove any existing line. Logfile.Curr_Line := E_Strings.Empty_String; EOF := True; else E_Strings.Get_Line (File => F, E_Str => Logfile.Curr_Line); Logfile.Key := Lookup_Key (Logfile.Curr_Line); Found := (Logfile.Key in Legal_Log_Keys); end if; end loop; if Found then if E_Strings.Get_Length (E_Str => Logfile.Curr_Line) >= 4 then Logfile.Posn := 4; else Logfile.Posn := 0; end if; elsif E_Strings.Get_Length (E_Str => Logfile.Curr_Line) > 0 then Logfile.Posn := 1; else Logfile.Posn := 0; end if; end Get_Next_Keyed_Line; procedure Get_Next_Line (Logfile : in out Log_Info_T) --# global in out SPARK_IO.File_Sys; --# derives Logfile, --# SPARK_IO.File_Sys from Logfile, --# SPARK_IO.File_Sys; --# post Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); is F : SPARK_IO.File_Type; Found : Boolean; begin F := Logfile.File_Handle; if SPARK_IO.End_Of_File (F) then Logfile.Key := Reached_EOF; -- Remove any existing line. Logfile.Curr_Line := E_Strings.Empty_String; Found := False; else E_Strings.Get_Line (File => F, E_Str => Logfile.Curr_Line); Logfile.Key := Lookup_Key (Logfile.Curr_Line); Found := (Logfile.Key in Legal_Log_Keys); end if; if Found then if E_Strings.Get_Length (E_Str => Logfile.Curr_Line) >= 4 then Logfile.Posn := 4; else Logfile.Posn := 0; end if; elsif E_Strings.Get_Length (E_Str => Logfile.Curr_Line) > 0 then Logfile.Posn := 1; else Logfile.Posn := 0; end if; end Get_Next_Line; procedure Get_Next_Item (Logfile : in out Log_Info_T; Item : out E_Strings.T; Status : out Log_Status_T) --# global in out SPARK_IO.File_Sys; --# derives Item, --# Logfile, --# SPARK_IO.File_Sys, --# Status from Logfile, --# SPARK_IO.File_Sys; --# pre Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); --# post Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); is Continue_Search : Boolean; Have_Found : Boolean; Keep_Key : Log_Key; Keep_Key_Found : Boolean; begin -- At start, have not found an item. Item := E_Strings.Empty_String; -- Here we try to get an item (a block of text without white space) -- from the keyed line. Subsequent lines are considered to be part -- of the keyed line if they match the pattern of wrapped text. This -- is necessary, as the wrap_utility applies globally to a file. -- It is not correct to assume that all data will reside on the -- originating keyed line. Continue_Search := True; Have_Found := False; while (Continue_Search and (not Have_Found)) loop -- Scan the current keyed line for non-space, non-comma character, -- or reaching end of line. loop --# assert Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); exit when not (Is_Whitespace_At_Posn (Logfile)) and not (Is_Char_At_Posn (',', Logfile)); Inc_Posn (Logfile); end loop; --# assert Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); if not (Logfile.Posn = 0) then -- Found: non-space, non-comma character. Continue_Search := False; Have_Found := True; --# check Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); else -- Found: end of line. -- Keep the current key, and consider the next line. -- It's deemed to be a continuation if it is not the end of -- the file and it starts with: -- " [^ ]" (ten spaces, followed by a non-space). Keep_Key := Logfile.Key; Keep_Key_Found := Logfile.Key_Found; Get_Next_Line (Logfile); --# check Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); if not (Logfile.Key = Reached_EOF) and then E_Strings.Get_Length (E_Str => Logfile.Curr_Line) >= 11 and then (E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 1) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 2) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 3) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 4) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 5) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 6) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 7) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 8) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 9) = ' ' and then E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 10) = ' ' and then not (E_Strings.Get_Element (E_Str => Logfile.Curr_Line, Pos => 11) = ' ')) then -- Found: Is a continuation. Set the Key and Key_Found accordingly. Continue_Search := True; Logfile.Key := Keep_Key; Logfile.Key_Found := Keep_Key_Found; else -- Found: Is not a continuation. Set Key_Found accordingly. Continue_Search := False; Logfile.Key_Found := False; end if; end if; end loop; --# assert Logfile.Posn <= E_Strings.Get_Length (Logfile.Curr_Line); -- If found start of a block of text, get the item. if (Have_Found) then Get_Item (Logfile, Item); if E_Strings.Get_Length (E_Str => Item) > 0 then Status := Success; else Status := Not_Found; end if; else Status := Not_Found; end if; end Get_Next_Item; procedure Find_Header (Header : in Log_Key; Logfile : in out Log_Info_T; Status : out Log_Status_T) --# global in out SPARK_IO.File_Sys; --# derives Logfile, --# SPARK_IO.File_Sys, --# Status from Header, --# Logfile, --# SPARK_IO.File_Sys; is Searching : Boolean; begin -- The current line may contain the header desired. Only look at the -- next line if it does not. Status := Not_Found; Searching := True; while Searching loop if Logfile.Key = Header then Status := Success; Searching := False; elsif Logfile.Key in Log_Headers and then Logfile.Key > Header then -- A later section header has been encountered and -- the given Header is assumed to be not present. -- Status = Not_Found; Searching := False; elsif Logfile.Key = Reached_EOF then Status := End_Of_File; Searching := False; else Get_Next_Keyed_Line (Logfile); end if; end loop; Logfile.Key_Found := Status = Success; end Find_Header; procedure Get_Next_Subsection (Subsection : in Log_Key; Logfile : in out Log_Info_T; Item : out E_Strings.T; Status : out Log_Status_T) --# global in out SPARK_IO.File_Sys; --# derives Item, --# Logfile, --# SPARK_IO.File_Sys, --# Status from Logfile, --# SPARK_IO.File_Sys, --# Subsection; is begin -- Three scenarios are possible when we get here: -- -- 1) Have not looked at any lines. Logfile.Key will be 'Been_Initialised'. -- So: Want to get the next keyed line. -- 2) On a keyed line that has been fully processed. Logfile.Key_Found -- will be true, and Logfile.Key will be in Legal_Log_Keys. -- So: Want to get the next keyed line. -- 3) Have looked at the next line, searching for a continuation, but -- did not find one. Logfile.Key_Found will be false. Logfile.Key may -- be anything (we may or may not be on a keyed line). -- So: Only if this is not a keyed line, want to get the next keyed line. if Logfile.Key_Found or not (Logfile.Key in Legal_Log_Keys) then Get_Next_Keyed_Line (Logfile); end if; if Logfile.Key = Subsection then Logfile.Key_Found := True; Get_Next_Item (Logfile, Item, Status); if Status /= Success then Status := Unexpected_Text; end if; else Logfile.Key_Found := False; Status := Not_Found; Item := E_Strings.Empty_String; end if; end Get_Next_Subsection; procedure Init (Logfile_Name : in E_Strings.T; Info : out Log_Info_T; Status : out Log_Status_T) is Open_Status : SPARK_IO.File_Status; begin --# accept F, 23, Info.File_Handle, "The call to open initalises File_Handle"; E_Strings.Open (File => Info.File_Handle, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Logfile_Name, Form_Of_File => "", Status => Open_Status); --# end accept; if Open_Status = SPARK_IO.Ok then Status := Success; else Status := Failure; end if; Info.Key := Been_Initialised; Info.Key_Found := False; Info.Posn := 0; Info.Curr_Line := E_Strings.Empty_String; --# accept F, 602, SPARK_IO.File_Sys, Info.File_Handle, --# "The call to open initalises File_Handle"; --# accept F, 602, Info, Info.File_Handle, --# "The call to open initalises File_Handle"; --# accept F, 602, Status, Info.File_Handle, --# "The call to open initalises File_Handle"; end Init; procedure Find_Rulefiles_Read (Info : in out Log_Info_T; Status : out Log_Status_T) is begin Find_Header (Read_Rulefiles_Header, Info, Status); end Find_Rulefiles_Read; procedure Find_Rule_Syntax_Errors (Info : in out Log_Info_T; Status : out Log_Status_T) is begin Find_Header (Syntax_Error_Header, Info, Status); end Find_Rule_Syntax_Errors; procedure Get_Next_Rulefile_Syntax_Error (Info : in out Log_Info_T; Rulefile : out E_Strings.T; Status : out Log_Status_T) is begin Get_Next_Subsection (Syntax_Error_In_Rulefile, Info, Rulefile, Status); end Get_Next_Rulefile_Syntax_Error; procedure Find_Rule_Summary (Info : in out Log_Info_T; Status : out Log_Status_T) is begin Find_Header (Overall_Summary_Header, Info, Status); end Find_Rule_Summary; procedure Get_Next_Rulefile (Info : in out Log_Info_T; Rulefile : out E_Strings.T; Status : out Log_Status_T) is begin Get_Next_Subsection (Rulefile_Use, Info, Rulefile, Status); end Get_Next_Rulefile; procedure Get_Next_Rule (Info : in out Log_Info_T; Rule : out E_Strings.T; Status : out Log_Status_T) is begin Get_Next_Subsection (Rule_Use, Info, Rule, Status); end Get_Next_Rule; procedure Get_Next_VC (Info : in out Log_Info_T; VC_Number : out E_Strings.T; Status : out Log_Status_T) is begin if Info.Key = VCs_Using_Rule then -- A VCs_Using_Rule line is already being parsed. Get_Next_Item (Info, VC_Number, Status); else -- Move to the VCs_Using_Rule line and get first VC number. Get_Next_Subsection (VCs_Using_Rule, Info, VC_Number, Status); end if; end Get_Next_VC; procedure Finalise (Info : in out Log_Info_T) is Not_Used : SPARK_IO.File_Status; begin --# accept F, 10, Not_Used, "The status returned from Close is not used"; --# accept F, 33, Not_Used, "The status returned from Close is not used"; SPARK_IO.Close (Info.File_Handle, Not_Used); end Finalise; end SLG_Parser; spark-2012.0.deb/pogs/osdirectory.ads0000644000175000017500000000446511753202340016454 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Synopsis: -- -- -- -- Package to scan a directory. -- -- -- -------------------------------------------------------------------------------- with Heap; --# inherit E_Strings, --# FatalErrors, --# FileDetails, --# FileHeap, --# Heap, --# OSFiling; package OSDirectory is ------------------------------------------------------------------------ -- this procedure reads the named directory using the C calls -- opendir(), readdir() and closedir() -- for each entry, it calls OSFiling.IsDirectory and then enters it in the -- linked list ------------------------------------------------------------------------ procedure Scan (ListIndex : in Heap.Atom); --# global in OSFiling.File_Structure; --# in out FatalErrors.State; --# in out FileHeap.State; --# derives FatalErrors.State, --# FileHeap.State from *, --# ListIndex, --# OSFiling.File_Structure; end OSDirectory; spark-2012.0.deb/pogs/pathformatter.ads0000644000175000017500000000332211753202340016755 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --To provide a standardised file name format which is platform independent -- --when the -plain option is chosen. -- -- -- -------------------------------------------------------------------------------- with E_Strings; --# inherit CommandLine, --# E_Strings; package PathFormatter is function Format (RawFileName : E_Strings.T) return E_Strings.T; --# global in CommandLine.Data; end PathFormatter; spark-2012.0.deb/pogs/vcheap.ads0000644000175000017500000001671011753202340015350 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package combining Heap and VCDetails to give an ordered list of VC details. -- -- -- --This is implemented as an abstract state machine. This is possible as there -- --is only a single occurrence of the VCHeap in the program. It is necessary -- --to prevent unacceptable overheads of creating local copies of the embedded -- --Heap and VCDetails types to circumvent the entire variable rule. It would -- --also be possible to implement Heap and VCDetails as ASMs but not -- --necessarily desirable as this would affect the high level annotations of the-- --program. -- -- -- -------------------------------------------------------------------------------- with E_Strings; with Heap; with VCDetails; use type Heap.Atom; --# inherit E_Strings, --# FatalErrors, --# Heap, --# HeapIndex, --# VCDetails; package VCHeap --# own I_State; --# State; is -- Start_Index is a point in the linked list at which to start the -- search. This is used to start insertion at the first VC. -- If the name table is full, a fatal error is produced and Add does not -- return procedure Add (Start_Index : in Heap.Atom; New_Name : in E_Strings.T; Path_Start : in E_Strings.T; Path_End : in E_Strings.T; End_Type : in VCDetails.Terminal_Point_Type; VC_State : in VCDetails.VC_State_T; DPC_State : in VCDetails.DPC_State_T); --# global in out FatalErrors.State; --# in out I_State; --# in out State; --# derives FatalErrors.State, --# State from *, --# DPC_State, --# End_Type, --# New_Name, --# Path_End, --# Path_Start, --# Start_Index, --# State, --# VC_State & --# I_State from *, --# New_Name, --# Path_End, --# Path_Start; -- this procedure returns the VC details for the specified entry in the -- linked list. procedure Details (List_Index : in Heap.Atom; VC_Name : out E_Strings.T; Path_Start : out E_Strings.T; Path_End : out E_Strings.T; End_Type : out VCDetails.Terminal_Point_Type; VC_State : out VCDetails.VC_State_T; DPC_State : out VCDetails.DPC_State_T); --# global in State; --# derives DPC_State, --# End_Type, --# Path_End, --# Path_Start, --# VC_Name, --# VC_State from List_Index, --# State; function First_Entry return Heap.Atom; --# global in State; procedure Initialize; --# global out I_State; --# out State; --# derives I_State, --# State from ; procedure Raise_Error (Error_Kind : in VCDetails.Error_Type); --# global in out State; --# derives State from *, --# Error_Kind; function Error_Raised (Error_Kind : in VCDetails.Error_Type) return Boolean; --# global in State; procedure Reinitialize (First_Element : in E_Strings.T; First_Path_Start : in E_Strings.T; First_Path_End : in E_Strings.T; First_End_Type : in VCDetails.Terminal_Point_Type); --# global out I_State; --# out State; --# derives I_State from First_Element, --# First_Path_End, --# First_Path_Start & --# State from First_Element, --# First_End_Type, --# First_Path_End, --# First_Path_Start; -- this procedure returns the 'Next_One' ordered element in VCH after -- 'After_This'. It is successful if the Next_One is not a 'null' pointer procedure Next (After_This : in Heap.Atom; Success : out Boolean; Next_One : out Heap.Atom); --# global in State; --# derives Next_One, --# Success from After_This, --# State; procedure Set_VC_State (VC_Name : in E_Strings.T; VC_State : in VCDetails.VC_State_T); --# global in out FatalErrors.State; --# in out State; --# derives FatalErrors.State from *, --# State, --# VC_Name & --# State from *, --# VC_Name, --# VC_State; function Get_VC_State (VC_Name : in E_Strings.T) return VCDetails.VC_State_T; --# global in State; procedure Set_DPC_State (DPC_Name : in E_Strings.T; DPC_State : in VCDetails.DPC_State_T); --# global in out FatalErrors.State; --# in out State; --# derives FatalErrors.State from *, --# DPC_Name, --# State & --# State from *, --# DPC_Name, --# DPC_State; function Exists (VC_Name : E_Strings.T) return Boolean; --# global in State; procedure Get_VC_Name_End_Type (VC_Name : in E_Strings.T; VC_Type : out VCDetails.Terminal_Point_Type); --# global in State; --# in out FatalErrors.State; --# derives FatalErrors.State from *, --# State, --# VC_Name & --# VC_Type from State, --# VC_Name; function Get_Longest_VC_Name_Length return Integer; --# global in I_State; function Get_Longest_VC_Start_Length return Integer; --# global in I_State; function Get_Longest_VC_End_Length return Integer; --# global in I_State; function Get_VC_Name_Prefix return E_Strings.T; --# global in I_State; end VCHeap; spark-2012.0.deb/pogs/vcs-analysevictoredvcfile.adb0000644000175000017500000004372311753202340021243 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse a .vct file -- -- -- -------------------------------------------------------------------------------- separate (VCS) procedure AnalyseVictoredVCFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Error_In_VCTR_File : out Boolean; File_Error : out E_Strings.T; Temp_Victor_Error_File : in SPARK_IO.File_Type) is -- String and error conctants. Str_File_Corrupt : constant String := "ViCToR file corrupt: Could not parse line"; Str_Cannot_Open : constant String := "Cannot open ViCToR results file"; -- VCs or goals must start at 1 Invalid_VC_Number : constant Natural := 0; Blank_Conclusion_Number : constant Natural := 0; type Victor_Status_T is (Victor_True, Victor_Unproven, Victor_Error); type VC_Type_T is (VC_Type_Procedure, VC_Type_Function, VC_Type_Task_Type, VC_Type_Unknown); -- As defined in the ViCToR User Manual, Release 0.7.1 type Victor_CSV_Line_T is record VC_Path : E_Strings.T; Unit : E_Strings.T; VC_Type : VC_Type_T; Source : E_Strings.T; Destination : E_Strings.T; VC_Number : Natural; Conclusion_Number : Natural; Status : Victor_Status_T; Proof_Time : E_Strings.T; Remarks : E_Strings.T; Operator_Kinds_In_Hypothesis : E_Strings.T; Operator_Kinds_In_Conclusion : E_Strings.T; end record; Invalid_CSV_Line : constant Victor_CSV_Line_T := Victor_CSV_Line_T' (VC_Path => E_Strings.Empty_String, Unit => E_Strings.Empty_String, VC_Type => VC_Type_Unknown, Source => E_Strings.Empty_String, Destination => E_Strings.Empty_String, VC_Number => Invalid_VC_Number, Conclusion_Number => Blank_Conclusion_Number, Status => Victor_Error, Proof_Time => E_Strings.Empty_String, Remarks => E_Strings.Empty_String, Operator_Kinds_In_Hypothesis => E_Strings.Empty_String, Operator_Kinds_In_Conclusion => E_Strings.Empty_String); File_Status : SPARK_IO.File_Status; Victored_VC_File : SPARK_IO.File_Type; Line_Read : E_Strings.T; Trimmed_Line : E_Strings.T; Success : Boolean; CSV_Line : Victor_CSV_Line_T; Error_Flag_Mentioned : Boolean; function Unqote (E_Str : E_Strings.T) return E_Strings.T is L : E_Strings.Lengths; Retval : E_Strings.T; begin L := E_Strings.Get_Length (E_Str); if L >= 2 and then E_Strings.Get_Element (E_Str, E_Strings.Positions'First) = '"' and then E_Strings.Get_Element (E_Str, L) = '"' then Retval := E_Strings.Section (E_Str => E_Str, Start_Pos => E_Strings.Positions'First + 1, Length => L - 2); else Retval := E_Str; end if; return Retval; end Unqote; procedure Parse_CSV_String (CSV_Line : in E_Strings.T; CSV_Line_Position : in out E_Strings.Positions; The_String : out E_Strings.T; Ok : out Boolean; Expect_EOL : in Boolean) --# derives CSV_Line_Position, --# Ok, --# The_String from CSV_Line, --# CSV_Line_Position, --# Expect_EOL; --# pre CSV_Line_Position <= E_Strings.Get_Length (CSV_Line) + 1; --# post CSV_Line_Position <= E_Strings.Get_Length (CSV_Line) + 1; is Is_Quoted_String : Boolean; In_Quoted_String : Boolean; Comma_Found : Boolean; EOL_Found : Boolean; End_Position : E_Strings.Positions; Tmp_String : E_Strings.T; begin Is_Quoted_String := E_Strings.Get_Element (CSV_Line, CSV_Line_Position) = '"'; In_Quoted_String := Is_Quoted_String; Comma_Found := False; End_Position := CSV_Line_Position; for I in E_Strings.Positions range CSV_Line_Position .. E_Strings.Get_Length (CSV_Line) loop --# assert E_Strings.Get_Length (CSV_Line) <= E_Strings.Lengths'Last; End_Position := I; case E_Strings.Get_Element (CSV_Line, I) is when '"' => if In_Quoted_String and I > CSV_Line_Position then In_Quoted_String := False; end if; when ',' => if not In_Quoted_String then Comma_Found := True; end if; when others => null; end case; exit when Comma_Found; end loop; --# assert End_Position >= CSV_Line_Position and End_Position <= E_Strings.Get_Length (CSV_Line) + 1 --# and (Comma_Found -> (End_Position <= E_Strings.Get_Length (CSV_Line))); -- Work out if we hit the end of line. EOL_Found := not Comma_Found or else E_Strings.Get_Length (CSV_Line) = E_Strings.Lengths'Last; -- Make sure we found a comma if we were looking for one. Ok := Expect_EOL = EOL_Found; -- Make sure any quoted strings are OK. Ok := Ok and not In_Quoted_String; if Ok then Tmp_String := E_Strings.Section (E_Str => CSV_Line, Start_Pos => CSV_Line_Position, Length => End_Position - CSV_Line_Position); -- Strip away the quotes, if necessary. if Is_Quoted_String then The_String := Unqote (Tmp_String); else The_String := Tmp_String; end if; -- Jump over the comma. if not EOL_Found then CSV_Line_Position := End_Position + 1; end if; else The_String := E_Strings.Empty_String; end if; end Parse_CSV_String; function Parse_Natural (E_Str : E_Strings.T; Value_On_Error : Natural) return Natural is Tmp_Integer : Integer; The_Natural : Natural; Tmp_Stop : Integer; begin The_Natural := Value_On_Error; if E_Strings.Get_Length (E_Str) > 0 then --# accept F, 10, Tmp_Stop, "We don't care about Stop at the moment"; E_Strings.Get_Int_From_String (Source => E_Str, Item => Tmp_Integer, Start_Pt => E_Strings.Positions'First, Stop => Tmp_Stop); --# end accept; -- TODO: Check that Tmp_Stop = E_Strings.Get_Length (Tmp_String) ? if Tmp_Integer >= Natural'First then The_Natural := Natural'(Tmp_Integer); end if; end if; --# accept F, 33, Tmp_Stop, "We don't care about Stop at the moment"; return The_Natural; end Parse_Natural; function Parse_Victor_Status (E_Str : E_Strings.T; Value_On_Error : Victor_Status_T) return Victor_Status_T is The_Victor_Status : Victor_Status_T; begin The_Victor_Status := Value_On_Error; if E_Strings.Eq1_String (E_Str, "true") then The_Victor_Status := Victor_True; elsif E_Strings.Eq1_String (E_Str, "unproven") then The_Victor_Status := Victor_Unproven; elsif E_Strings.Eq1_String (E_Str, "error") then The_Victor_Status := Victor_Error; end if; return The_Victor_Status; end Parse_Victor_Status; function Parse_VC_Type (E_Str : E_Strings.T) return VC_Type_T is The_Type : VC_Type_T := VC_Type_Unknown; begin if E_Strings.Eq1_String (E_Str, "procedure") then The_Type := VC_Type_Procedure; elsif E_Strings.Eq1_String (E_Str, "function") then The_Type := VC_Type_Function; elsif E_Strings.Eq1_String (E_Str, "task_type") then The_Type := VC_Type_Task_Type; end if; return The_Type; end Parse_VC_Type; -- This function will construct the full unit name out the various -- fields in each csv row. So, for example given the following row: -- ap_/altitude,maintain,procedure,,,1,,true,0,,, -- We should get the following string back: -- procedure_maintain_1 function Full_Unit_Name (The_Record : in Victor_CSV_Line_T) return E_Strings.T is Tmp_Number : E_Strings.T; Tmp : E_Strings.T := E_Strings.Empty_String; begin case The_Record.VC_Type is when VC_Type_Procedure => E_Strings.Append_String (Tmp, "procedure_"); when VC_Type_Function => E_Strings.Append_String (Tmp, "function_"); when VC_Type_Task_Type => E_Strings.Append_String (Tmp, "task_type_"); when VC_Type_Unknown => null; end case; E_Strings.Append_Examiner_String (Tmp, The_Record.Unit); if The_Record.VC_Number /= Invalid_VC_Number then E_Strings.Append_Char (Tmp, '_'); E_Strings.Put_Int_To_String (Dest => Tmp_Number, Item => The_Record.VC_Number, Start_Pt => E_Strings.Positions'First, Base => 10); E_Strings.Append_Examiner_String (Tmp, Tmp_Number); end if; return Tmp; end Full_Unit_Name; procedure Parse_Victor_CSV_Line (The_Line : in E_Strings.T; The_Record : out Victor_CSV_Line_T; Ok : out Boolean) --# derives Ok, --# The_Record from The_Line; --# pre E_Strings.Get_Length (The_Line) >= 1; is Current_Position : E_Strings.Positions := E_Strings.Positions'First; Tmp : E_Strings.T; Num_CSV_Entries : constant Natural := 12; subtype CSV_Record_Index is Natural range 1 .. Num_CSV_Entries; begin The_Record := Invalid_CSV_Line; for I in CSV_Record_Index --# assert Current_Position <= E_Strings.Get_Length (The_Line) + 1; loop Parse_CSV_String (CSV_Line => The_Line, CSV_Line_Position => Current_Position, The_String => Tmp, Ok => Ok, Expect_EOL => (I = CSV_Record_Index'Last)); exit when not Ok; -- Each I will map to each number given in the ViCToR user -- manual describing the "CSV output file". case I is when 1 => The_Record.VC_Path := Tmp; when 2 => The_Record.Unit := Tmp; when 3 => The_Record.VC_Type := Parse_VC_Type (Tmp); when 4 => The_Record.Source := Tmp; when 5 => The_Record.Destination := Tmp; when 6 => The_Record.VC_Number := Parse_Natural (Tmp, Invalid_VC_Number); when 7 => The_Record.Conclusion_Number := Parse_Natural (Tmp, Blank_Conclusion_Number); when 8 => The_Record.Status := Parse_Victor_Status (Tmp, Victor_Error); when 9 => The_Record.Proof_Time := Tmp; when 10 => The_Record.Remarks := Tmp; when 11 => The_Record.Operator_Kinds_In_Hypothesis := Tmp; when 12 => The_Record.Operator_Kinds_In_Conclusion := Tmp; end case; end loop; end Parse_Victor_CSV_Line; begin -- AnalyseVictoredVCFile Error_In_VCTR_File := False; File_Error := E_Strings.Empty_String; Victored_VC_File := SPARK_IO.Null_File; Error_Flag_Mentioned := False; -- open ViCToR results file E_Strings.Open (File => Victored_VC_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => File_Status); if File_Status /= SPARK_IO.Ok then Error_In_VCTR_File := True; File_Error := E_Strings.Copy_String (Str_Cannot_Open); FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; loop Read_Next_Non_Blank_Line (File => Victored_VC_File, Success => Success, File_Line => Line_Read); exit when not Success; --# assert Success; Trimmed_Line := E_Strings.Trim (Line_Read); Success := E_Strings.Get_Length (Trimmed_Line) >= 1; if Success then Parse_Victor_CSV_Line (The_Line => E_Strings.Trim (Line_Read), The_Record => CSV_Line, Ok => Success); else CSV_Line := Invalid_CSV_Line; end if; if not Success then -- Notify stdout. SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* " & Str_File_Corrupt & " ************", 0); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "*** Offending line was: [", 0); E_Strings.Put_String (SPARK_IO.Standard_Output, Trimmed_Line); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "]", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); -- Also put somthing in the report file. SPARK_IO.Put_String (Report_File, "*** " & Str_File_Corrupt & " ***", 0); -- And finally set error flags. File_Error := E_Strings.Copy_String (Str_File_Corrupt); Error_In_VCTR_File := True; end if; --# assert True; if not Success then null; elsif (not E_Strings.Eq1_String (CSV_Line.Remarks, "excluded") and -- The goal must be blank, otherwise we may not have proved all conclusions. CSV_Line.Conclusion_Number = Blank_Conclusion_Number and -- We must have a VC number given. CSV_Line.VC_Number /= Invalid_VC_Number) then case CSV_Line.Status is when Victor_True => VCHeap.Set_VC_State (Full_Unit_Name (CSV_Line), VCDetails.VC_Proved_By_Victor); when Victor_Unproven => -- We don't do anything in this case. null; when Victor_Error => -- This means alt-ergo or perhaps vct encountered some -- kind of error. We will flag this up and include it -- in the final summary. However, we will only do this -- once. if not Error_Flag_Mentioned then E_Strings.Put_String (File => Temp_Victor_Error_File, E_Str => PathFormatter.Format (Filename)); SPARK_IO.Put_Char (Temp_Victor_Error_File, ' '); SPARK_IO.Put_Char (Temp_Victor_Error_File, '('); if E_Strings.Is_Empty (CSV_Line.Remarks) then SPARK_IO.Put_String (Temp_Victor_Error_File, "Error flag returned by vct/alt-ergo.", 0); else E_Strings.Put_String (File => Temp_Victor_Error_File, E_Str => CSV_Line.Remarks); end if; SPARK_IO.Put_Line (Temp_Victor_Error_File, ")", 0); Error_Flag_Mentioned := True; end if; end case; elsif CSV_Line.VC_Number = Invalid_VC_Number and CSV_Line.Status = Victor_Error then -- If we're here this means that victor has failed to -- produce useful output, most likely due to a translation -- or paring error, but there may be an error message worth -- passing up. E_Strings.Put_String (File => Temp_Victor_Error_File, E_Str => PathFormatter.Format (Filename)); SPARK_IO.Put_Char (Temp_Victor_Error_File, ' '); SPARK_IO.Put_Char (Temp_Victor_Error_File, '('); if E_Strings.Is_Empty (CSV_Line.Remarks) then SPARK_IO.Put_String (Temp_Victor_Error_File, "Unknown Error", 0); else E_Strings.Put_String (File => Temp_Victor_Error_File, E_Str => CSV_Line.Remarks); end if; SPARK_IO.Put_Line (Temp_Victor_Error_File, ")", 0); end if; end loop; --# accept F, 10, File_Status, "We don't care anymore since we've got everything we came for." & --# F, 10, Victored_VC_File, "Same as above."; SPARK_IO.Close (Victored_VC_File, File_Status); --# end accept; --# accept Flow, 601, FatalErrors.State, Temp_Victor_Error_File, "False coupling through SPARK_IO" & --# Flow, 601, VCHeap.State, Temp_Victor_Error_File, "False coupling through SPARK_IO"; end AnalyseVictoredVCFile; spark-2012.0.deb/pogs/commandline.ads0000644000175000017500000000431511753202340016366 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --To provide global state representing the command line options and a -- --procedure to parse the system command line. -- -- -- -------------------------------------------------------------------------------- with E_Strings; with OSCommandLine; --# inherit Ada.Characters.Latin_1, --# E_Strings, --# OSCommandLine, --# OSFiling; package CommandLine --# own Data; is -- Data structure to hold result of processing arguments Data : OSCommandLine.DataType; -- Read in and process the command line arguments procedure Read; --# global in OSCommandLine.State; --# in OSFiling.File_Structure; --# out Data; --# derives Data from OSCommandLine.State, --# OSFiling.File_Structure; -- Returns a "usage" string for reporting to user when command line -- is invalid. function Usage_String return E_Strings.T; end CommandLine; spark-2012.0.deb/pogs/oscommandline.apb0000644000175000017500000001756111753202340016732 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Command_Line; with E_Strings.Not_SPARK; with GNAT.OS_Lib; with OSFiling; package body OSCommandLine is --# hide OSCommandLine; -- Hide the fact that State has no concrete definition, and that we are making -- use of Ada.Command_Line and GNAT.OS_Lib. -- SwitchCharacter returns the command line switch character for the platform function SwitchCharacter return Character is begin #if Target = "Intel_WinNT" then return '/'; #else return '-'; #end if; end SwitchCharacter; procedure Normalize_Pathname (InputFile : in E_Strings.T; InputDir : in E_Strings.T; ResultPath : out E_Strings.T) is begin ResultPath := E_Strings.Copy_String (Str => GNAT.OS_Lib.Normalize_Pathname (Name => E_Strings.Not_SPARK.Get_String (E_Str => InputFile), Directory => E_Strings.Not_SPARK.Get_String (E_Str => InputDir), Resolve_Links => True, Case_Sensitive => True)); end Normalize_Pathname; procedure Read (Switches : out DataType) is Index : Natural; -- Index into the input argument string currently being processed ArgCount : Natural; -- How many command line arguments UnusedChar : Character; -- Dummy argument needed for calls to PopChar begin -- Use default values unless overridden by command line Switches := DefaultDataType; ArgCount := Ada.Command_Line.Argument_Count; -- Remember to pass correct data structure in the case where there are zero args Switches.StartDirectory := OSFiling.Get_Working_Directory; -- Assume things are fine until shown otherwise. Switches.Valid := True; -- Process each argument in turn -- Currently 7 switches are implemented: -- version, plain output, ignore dates, input directory, output file and short summary. for ArgIndex in Natural range 1 .. ArgCount loop Index := 1; -- First character in argument string being processed -- Every argument should start with a switch character. -- Fail if this argument does not. -- Allow both switch characters for the time being. (We still need the prepping and this test -- because '/' is not a valid switch character on Unix.) if (Ada.Command_Line.Argument (ArgIndex) (Index) = SwitchCharacter) or (Ada.Command_Line.Argument (ArgIndex) (Index) = '-') then -- Check the length of the argument before indexing into it. -- Every argument should be at least 2 characters. if (Ada.Command_Line.Argument (ArgIndex)'Length) > 1 then Index := Index + 1; else Switches.Valid := False; exit; end if; case Ada.Command_Line.Argument (ArgIndex) (Index) is when 'd' | 'D' => -- Next character must be '=', and there must be at least one character in the directory -- name itself, so the argument length must be at least 4. Index := Index + 1; if (Ada.Command_Line.Argument (ArgIndex)'Length > 3) and then (Ada.Command_Line.Argument (ArgIndex) (Index) = '=') then -- Copy the input argument string into StartDirectory Switches.StartDirectory := E_Strings.Copy_String (Str => Ada.Command_Line.Argument (ArgIndex)); -- Strip off the first three characters ("-d=") E_Strings.Pop_Char (E_Str => Switches.StartDirectory, Char => UnusedChar); E_Strings.Pop_Char (E_Str => Switches.StartDirectory, Char => UnusedChar); E_Strings.Pop_Char (E_Str => Switches.StartDirectory, Char => UnusedChar); else Switches.Valid := False; end if; when 'o' | 'O' => -- Next character must be '=', and there must be at least one character in the file -- name itself, so the argument length must be at least 4. Index := Index + 1; if (Ada.Command_Line.Argument (ArgIndex)'Length > 3) and then (Ada.Command_Line.Argument (ArgIndex) (Index) = '=') then -- Copy the input argument string into ReportFile Switches.ReportFile := E_Strings.Copy_String (Str => Ada.Command_Line.Argument (ArgIndex)); -- Strip off the first three characters ("-o=") E_Strings.Pop_Char (E_Str => Switches.ReportFile, Char => UnusedChar); E_Strings.Pop_Char (E_Str => Switches.ReportFile, Char => UnusedChar); E_Strings.Pop_Char (E_Str => Switches.ReportFile, Char => UnusedChar); else Switches.Valid := False; end if; when 'V' | 'v' => if Switches.VersionRequested or ((Ada.Command_Line.Argument (ArgIndex)'Length) > 2) then Switches.Valid := False; else -- use of this switch overrides any previously given -p Switches.VersionRequested := True; Switches.PlainOutput := False; end if; when 'I' | 'i' => if Switches.IgnoreDates or ((Ada.Command_Line.Argument (ArgIndex)'Length) > 2) then Switches.Valid := False; else Switches.IgnoreDates := True; end if; when 'P' | 'p' => if Switches.PlainOutput or ((Ada.Command_Line.Argument (ArgIndex)'Length) > 2) then Switches.Valid := False; else -- if -v already given, then ignore -p if not Switches.VersionRequested then Switches.PlainOutput := True; end if; end if; when 'S' | 's' => if Switches.ShortSummary or ((Ada.Command_Line.Argument (ArgIndex)'Length) > 2) then Switches.Valid := False; else Switches.ShortSummary := True; end if; when others => Switches.Valid := False; end case; else Switches.Valid := False; end if; end loop; end Read; end OSCommandLine; spark-2012.0.deb/pogs/vcdetails.ads0000644000175000017500000003027711753202340016064 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package providing a structure in which to store VC details. -- -- -- --To be used in tandem with the Heap data structure, hence the use of -- --Heap.Atom as the array range -- -------------------------------------------------------------------------------- with E_Strings; with HeapIndex; use type HeapIndex.IndexType; --# inherit E_Strings, --# HeapIndex; package VCDetails is type Terminal_Point_Type is ( Assert_Point, Precondition_Check_Point, Check_Statement_Point, Runtime_Check_Point, Refinement_VC_Point, Inheritance_VC_Point, Undetermined_Point); -- These errors are the union of all possible errors types from -- all possible analysis methods. type Error_Type is (-- Missing VC files (NOT zombiescope) Missing_SLG_File, -- SIV file, but no SLG file Missing_VLG_File, -- VCT file, but no VLG file -- Corrupt VC files (NOT zombiescope) Corrupt_VCG_File, -- The VCG file itself has a problem Corrupt_SIV_File, -- The SIV file was corrupted or malformed (simplifier) Corrupt_SLG_File, -- The SLG file was corrupted or malformed (simplifier log) Corrupt_VCT_File, -- The VCT file was corrupted or malformed (victor) Corrupt_VLG_File, -- The VLG file was corrupted or malformed (victor log) Corrupt_RSM_File, -- The RSM file was curripted or malformed (riposte summary) Corrupt_PLG_File, -- The PLG file was corrupted or malformed (checker log) Corrupt_PRV_File, -- The PLG file was corrupted or malformed (manual review) -- Missing DPC files (zombiescope) -- Corrupt DPC files (zombiescope) Corrupt_DPC_File, -- The DPC file itself has a problem Corrupt_SDP_File -- The SDP file was corrupted or malformed (zombiescope) ); subtype Error_Type_Missing_VC_Files is Error_Type range Missing_SLG_File .. Missing_VLG_File; subtype Error_Type_Corrupt_VC_Files is Error_Type range Corrupt_VCG_File .. Corrupt_PRV_File; subtype Error_Type_Corrupt_DPC_Files is Error_Type range Corrupt_DPC_File .. Corrupt_SDP_File; -- The possible states of a VC are: -- * VC_Not_Present: There is no VC (the user has only generated dpc -- files); -- * VC_SIV_Not_Present: The Simplifier has not tried to discharge the VC. -- * VC_Undischarged: The VC is undischarged - the Simplifier has failed to -- discharge the VC; -- * VC_Proved_By_Examiner: VC was discharged by the Examiner; -- * VC_Proved_By_Inference: VC was dischaged by the Simplifier and the -- proof is by inference; -- * VC_Proved_By_Contradiction: VC was dischaged by the Simplifier -- and the proof is by contradiction; -- * VC_Proved_By_Checker: VC was discharged using the Checker; -- * VC_Proved_By_Review: VC was discharged through a proof review file; -- * VC_Proved_By_Using_Proof_Rules: VC was discharged using user proof -- rules; -- * VC_Proved_By_Victor: VC was discharged using Victor; -- * VC_Proved_By_Riposte: VC was discharged using Riposte; -- * VC_False: VC is false. type VC_State_T is ( VC_Not_Present, VC_SIV_Not_Present, VC_Undischarged, VC_Proved_By_Examiner, VC_Proved_By_Inference, VC_Proved_By_Contradiction, VC_Proved_Using_User_Proof_Rules, VC_Proved_By_Victor, VC_Proved_By_Riposte, VC_Proved_By_Checker, VC_Proved_By_Review, VC_False); -- The possible states of a DPC are: -- * DPC_Not_Present: There is no DPC (the user has only generated -- VCG files). -- * DPC_SIV_Not_Present: No SDP is present for DPC. -- * DPC_Unchecked: The Examiner generated at DPC that does not required -- ZombieScope to check for dead paths - e.g. RTCs -- and asserts. -- * DPC_Live: Path is not a dead path. -- * DPC_Dead: Path is a dead path. type DPC_State_T is (DPC_Not_Present, DPC_SDP_Not_Present, DPC_Unchecked, DPC_Live, DPC_Dead); End_Type_Image_Length : constant := 12; subtype End_Type_Image_Index is Positive range 1 .. End_Type_Image_Length; subtype End_Type_Image_String is String (End_Type_Image_Index); type End_Type_To_Image_Array is array (Terminal_Point_Type) of End_Type_Image_String; End_Type_Image : constant End_Type_To_Image_Array := End_Type_To_Image_Array' (Assert_Point => " assert @ ", Precondition_Check_Point => "pre check @ ", Check_Statement_Point => "check stm @ ", Runtime_Check_Point => "rtc check @ ", Refinement_VC_Point => "refinement ", Inheritance_VC_Point => "inheritance ", Undetermined_Point => " "); type Data_Type is private; procedure Add (Details : in out Data_Type; Index : out HeapIndex.IndexType; Success : out Boolean; Name : in E_Strings.T; Path_Start : in E_Strings.T; Path_End : in E_Strings.T; End_Type : in Terminal_Point_Type; VC_State : in VC_State_T; DPC_State : in DPC_State_T); --# derives Details from *, --# DPC_State, --# End_Type, --# Name, --# Path_End, --# Path_Start, --# VC_State & --# Index, --# Success from Details; procedure Initialize (Details : out Data_Type); --# derives Details from ; procedure Raise_Error (Error_Kind : in Error_Type; Details : in out Data_Type); --# derives Details from *, --# Error_Kind; function Error_Raised (Error_Kind : in Error_Type; Details : in Data_Type) return Boolean; procedure Set_VC_State (Details : in out Data_Type; Index : in HeapIndex.IndexType; VC_State : in VC_State_T); --# derives Details from *, --# Index, --# VC_State; function Get_VC_State (Details : in Data_Type; Index : in HeapIndex.IndexType) return VC_State_T; procedure Set_DPC_State (Details : in out Data_Type; Index : in HeapIndex.IndexType; DPC_State : in DPC_State_T); --# derives Details from *, --# DPC_State, --# Index; function Get_DPC_State (Details : in Data_Type; Index : in HeapIndex.IndexType) return DPC_State_T; -------------------------------------------------------------------------- -- this compares the information given and returns -- Result is as defined in section 5.4 of the specification -- it works directly on values rather than on indices into the Details -- structure so that information can be compared before insertion -- NOTE : the procedure is successful iff neither Type is Invalid -------------------------------------------------------------------------- procedure Order (First_Name : in E_Strings.T; Second_Name : in E_Strings.T; Result : out E_Strings.Order_Types); --# derives Result from First_Name, --# Second_Name; procedure Retrieve (Details : in Data_Type; Index : in HeapIndex.IndexType; Success : out Boolean; Name : out E_Strings.T; Path_Start : out E_Strings.T; Path_End : out E_Strings.T; End_Type : out Terminal_Point_Type; VC_State : out VC_State_T; DPC_State : out DPC_State_T); --# derives DPC_State, --# End_Type, --# Name, --# Path_End, --# Path_Start, --# Success, --# VC_State from Details, --# Index; -------------------------------------------------------------------------- -- Path_End_To_Path_Type -- -- Parses a Path End string to convert it into a Terminal Point Type -- The basic patterns scanned for are: -- ... to check ... -> check statement point -- ... to run-time check ... -> run-time check point -- ... to precondition check ... -> precondition check point -- ... to assertion ... -> assertion point -- ... refinement ... -> refinement VC point -- ... inheritance ... -> inheritance VC point -- ... anything else ... -> undetermined point -------------------------------------------------------------------------- function Path_End_To_Path_Type (Line : E_Strings.T) return Terminal_Point_Type; function End_Point_Type (Details : in Data_Type; Index : in HeapIndex.IndexType) return Terminal_Point_Type; private type Details_Element is record Name : E_Strings.T; Path_Start : E_Strings.T; Path_End : E_Strings.T; End_Type : Terminal_Point_Type; VC_State : VC_State_T; DPC_State : DPC_State_T; end record; Null_Details_Element : constant Details_Element := Details_Element' (Name => E_Strings.Empty_String, Path_Start => E_Strings.Empty_String, Path_End => E_Strings.Empty_String, End_Type => Undetermined_Point, VC_State => VC_Not_Present, DPC_State => DPC_Not_Present); type Elements_Array is array (HeapIndex.IndexType) of Details_Element; -- Use an array to store the different error kinds that may be associated -- with a subprogram. This construct may be extended by adding a new enumerated -- type, without having to extend the subprogram interfaces. type Error_Array is array (Error_Type) of Boolean; -- High_Mark is the number of the highest allocated atom - zero if the -- structure is empty -- Error_Status records different kinds of errors that may be encountered -- during analysis. type Data_Type is record Details : Elements_Array; High_Mark : HeapIndex.IndexType; Error_Status : Error_Array; end record; end VCDetails; spark-2012.0.deb/pogs/vcg/0000755000175000017500000000000011753203756014177 5ustar eugeneugenspark-2012.0.deb/pogs/vcdetails.adb0000644000175000017500000002547711753202340016051 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package providing a structure in which to store VC details. -- -- -- --To be used in tandem with the Heap data structure, hence the use of -- --Heap.Atom as the array range -- -------------------------------------------------------------------------------- package body VCDetails is procedure Add (Details : in out Data_Type; Index : out HeapIndex.IndexType; Success : out Boolean; Name : in E_Strings.T; Path_Start : in E_Strings.T; Path_End : in E_Strings.T; End_Type : in Terminal_Point_Type; VC_State : in VC_State_T; DPC_State : in DPC_State_T) is begin if Details.High_Mark < HeapIndex.IndexType'Last then Success := True; Details.High_Mark := Details.High_Mark + 1; Index := Details.High_Mark; Details.Details (Details.High_Mark) := Details_Element'(Name, Path_Start, Path_End, End_Type, VC_State, DPC_State); else Success := False; Index := 0; end if; end Add; -------------------------------------------------------------------------- procedure Initialize (Details : out Data_Type) is begin -- Only set HighMark here. Initializing the whole array is -- unnecessary here, and is VERY SLOW on machines with limited -- RAM likes VAXes, where the initialization causes massive -- VM thrashing. Details.High_Mark := 0; -- Also set that no error has been seen. Details.Error_Status := Error_Array'(others => False); --# accept F, 31, Details.Details, "Partial initialization" & --# F, 32, Details.Details, "Partial initialization" & --# F, 602, Details, Details.Details, "Partial initialization"; end Initialize; ------------------------------------------------------------------------- procedure Raise_Error (Error_Kind : in Error_Type; Details : in out Data_Type) is begin Details.Error_Status (Error_Kind) := True; end Raise_Error; ------------------------------------------------------------------------- function Error_Raised (Error_Kind : in Error_Type; Details : in Data_Type) return Boolean is begin return Details.Error_Status (Error_Kind); end Error_Raised; -------------------------------------------------------------------------- procedure Set_VC_State (Details : in out Data_Type; Index : in HeapIndex.IndexType; VC_State : in VC_State_T) is begin Details.Details (Index).VC_State := VC_State; end Set_VC_State; -------------------------------------------------------------------------- function Get_VC_State (Details : in Data_Type; Index : in HeapIndex.IndexType) return VC_State_T is begin return Details.Details (Index).VC_State; end Get_VC_State; -------------------------------------------------------------------------- procedure Set_DPC_State (Details : in out Data_Type; Index : in HeapIndex.IndexType; DPC_State : in DPC_State_T) is begin Details.Details (Index).DPC_State := DPC_State; end Set_DPC_State; -------------------------------------------------------------------------- function Get_DPC_State (Details : in Data_Type; Index : in HeapIndex.IndexType) return DPC_State_T is begin return Details.Details (Index).DPC_State; end Get_DPC_State; ------------------------------------------------------------------------- procedure Order (First_Name : in E_Strings.T; Second_Name : in E_Strings.T; Result : out E_Strings.Order_Types) is begin -- check which name comes first if E_Strings.Get_Length (E_Str => First_Name) = E_Strings.Get_Length (E_Str => Second_Name) then Result := E_Strings.Lex_Order (First_Name => First_Name, Second_Name => Second_Name); elsif E_Strings.Get_Length (E_Str => First_Name) < E_Strings.Get_Length (E_Str => Second_Name) then Result := E_Strings.First_One_First; else Result := E_Strings.Second_One_First; end if; end Order; -------------------------------------------------------------------------- procedure Retrieve (Details : in Data_Type; Index : in HeapIndex.IndexType; Success : out Boolean; Name : out E_Strings.T; Path_Start : out E_Strings.T; Path_End : out E_Strings.T; End_Type : out Terminal_Point_Type; VC_State : out VC_State_T; DPC_State : out DPC_State_T) is begin if Index <= Details.High_Mark and Index /= 0 then Success := True; Name := Details.Details (Index).Name; Path_Start := Details.Details (Index).Path_Start; Path_End := Details.Details (Index).Path_End; End_Type := Details.Details (Index).End_Type; VC_State := Details.Details (Index).VC_State; DPC_State := Details.Details (Index).DPC_State; else Success := False; Name := E_Strings.Empty_String; Path_Start := E_Strings.Empty_String; Path_End := E_Strings.Empty_String; End_Type := Undetermined_Point; VC_State := VC_Not_Present; DPC_State := DPC_Not_Present; end if; end Retrieve; -------------------------------------------------------------------------- function Path_End_To_Path_Type (Line : E_Strings.T) return Terminal_Point_Type is Dummy_Position : E_Strings.Positions; End_Position : E_Strings.Positions; Point_Type : Terminal_Point_Type; Refinement_Found : Boolean; Inheritance_Found : Boolean; To_Found : Boolean; Check_Found : Boolean; Assert_Found : Boolean; Finish_Found : Boolean; Runtime_Check_Found : Boolean; Precondition_Check_Found : Boolean; begin --# accept F, 10, Dummy_Position, "Dummy_Position unused here"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => "inheritance", String_Found => Inheritance_Found, String_Start => Dummy_Position); E_Strings.Find_Sub_String (E_Str => Line, Search_String => "refinement", String_Found => Refinement_Found, String_Start => Dummy_Position); --# accept F, 10, To_Found, "To_Found unused here"; E_Strings.Find_Sub_String (E_Str => Line, Search_String => " to ", String_Found => To_Found, String_Start => End_Position); --# end accept; E_Strings.Find_Sub_String_After (E_Str => Line, Search_Start => End_Position, Search_String => "check", String_Found => Check_Found, String_Start => Dummy_Position); if Inheritance_Found then Point_Type := Inheritance_VC_Point; elsif Refinement_Found then Point_Type := Refinement_VC_Point; elsif Check_Found then E_Strings.Find_Sub_String_After (E_Str => Line, Search_Start => End_Position, Search_String => "precondition", String_Found => Precondition_Check_Found, String_Start => Dummy_Position); E_Strings.Find_Sub_String_After (E_Str => Line, Search_Start => End_Position, Search_String => "run-time", String_Found => Runtime_Check_Found, String_Start => Dummy_Position); if Precondition_Check_Found then Point_Type := Precondition_Check_Point; elsif Runtime_Check_Found then Point_Type := Runtime_Check_Point; else Point_Type := Check_Statement_Point; end if; else E_Strings.Find_Sub_String_After (E_Str => Line, Search_Start => End_Position, Search_String => "assert", String_Found => Assert_Found, String_Start => Dummy_Position); E_Strings.Find_Sub_String_After (E_Str => Line, Search_Start => End_Position, Search_String => "finish", String_Found => Finish_Found, String_Start => Dummy_Position); if Assert_Found or Finish_Found then Point_Type := Assert_Point; else Point_Type := Undetermined_Point; end if; end if; --# end accept; --# accept F, 33, Dummy_Position, "Dummy_Position unused here" & --# F, 33, To_Found, "To_Found unused here"; return Point_Type; end Path_End_To_Path_Type; -------------------------------------------------------------------------- function End_Point_Type (Details : in Data_Type; Index : in HeapIndex.IndexType) return Terminal_Point_Type is Result : Terminal_Point_Type; begin if Index <= Details.High_Mark and Index /= 0 then Result := Details.Details (Index).End_Type; else Result := Undetermined_Point; end if; return Result; end End_Point_Type; end VCDetails; spark-2012.0.deb/pogs/toppackage.ads0000644000175000017500000000636111753202340016221 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Top level package for the summary tool -- --The main program is simply a call to the procedure MainProcedure wrapped in -- --an execption handler to deal with the exceptions raised by FatalErrors -- -- -- -------------------------------------------------------------------------------- --# inherit Banner, --# CommandLine, --# E_Strings, --# FatalErrors, --# FileDetails, --# FileHeap, --# FindFiles, --# Heap, --# OSCommandLine, --# OSFiling, --# SPARK_IO, --# Total, --# VCHeap, --# VCS; package TopPackage is procedure Main_Procedure; --# global in OSCommandLine.State; --# in OSFiling.File_Structure; --# in out FatalErrors.State; --# in out FileHeap.State; --# in out SPARK_IO.File_Sys; --# in out Total.State; --# out CommandLine.Data; --# out VCHeap.I_State; --# out VCHeap.State; --# derives CommandLine.Data from OSCommandLine.State, --# OSFiling.File_Structure & --# FatalErrors.State, --# Total.State from *, --# OSCommandLine.State, --# OSFiling.File_Structure, --# SPARK_IO.File_Sys & --# FileHeap.State from *, --# OSCommandLine.State, --# OSFiling.File_Structure & --# SPARK_IO.File_Sys from *, --# OSCommandLine.State, --# OSFiling.File_Structure, --# Total.State & --# VCHeap.I_State, --# VCHeap.State from OSCommandLine.State, --# OSFiling.File_Structure, --# SPARK_IO.File_Sys; end TopPackage; spark-2012.0.deb/pogs/vcs-analyse_summary_dp_file.adb0000644000175000017500000003604011753202340021543 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse a .SPD file -- -- -- -------------------------------------------------------------------------------- separate (VCS) procedure Analyse_Summary_DP_File (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; DPC_File_Date_Time : in E_Strings.T; Error_In_SDP_File : out Boolean) is Bad_File_Format : Boolean := False; Dummy_Close_Status : SPARK_IO.File_Status; File_Line : E_Strings.T; Finished_With_File : Boolean; Open_Status : SPARK_IO.File_Status; Process_Success : Boolean; Read_Line_Success : Boolean; Summary_DP_File : SPARK_IO.File_Type := SPARK_IO.Null_File; File_Status : File_Status_T; Summary_Date_Time : E_Strings.T; DP_Generation_Date_Time_From_SDP_File : E_Strings.T; Trimmed_Line : E_Strings.T; Current_DPC_Name : E_Strings.T; ------------------------------------------------------------------------- -- NOTE, this procedure also removes the comma inserted in the string -- by the simplifier procedure Extract_Dates_And_Times_From_Summary_DPC_File (Summary_DP_File : in SPARK_IO.File_Type; DPC_Generation_Date_Time : out E_Strings.T; SDP_Date_Time : out E_Strings.T; File_Status : out File_Status_T) --# global in out SPARK_IO.File_Sys; --# derives DPC_Generation_Date_Time, --# File_Status, --# SDP_Date_Time, --# SPARK_IO.File_Sys from SPARK_IO.File_Sys, --# Summary_DP_File; is File_Line : E_Strings.T; Trimmed_Line : E_Strings.T; Gen_Date_Time : E_Strings.T; Simp_Date_Time : E_Strings.T; Subprogram_Found : Boolean := False; begin File_Status := Not_Corrupt; DPC_Generation_Date_Time := E_Strings.Empty_String; SDP_Date_Time := E_Strings.Empty_String; --Check for completly empty file. E_Strings.Get_Line (File => Summary_DP_File, E_Str => File_Line); if E_Strings.Eq1_String (E_Str => File_Line, Str => "") and SPARK_IO.End_Of_File (Summary_DP_File) then File_Status := Corrupt_Empty_File; else --Keep on reading from this file, until the desired information is retrieved --or the end of the file is reached. loop Trimmed_Line := E_Strings.Trim (File_Line); -- find date -- (There is an implicit assumption that the date, if present, will appear -- before the subprogram name.) -- When the Examiner is in plain_output mode, the DATE line doesn't appear. if E_Strings.Eq1_String (E_Str => E_Strings.Section (Trimmed_Line, 1, 7), Str => "CREATED") then -- extract the VC generation date and time from the string Gen_Date_Time := E_Strings.Section (Trimmed_Line, SDP_File_VC_Generation_Date_Start_Column, SDP_File_VC_Generation_Date_Length); E_Strings.Append_String (E_Str => Gen_Date_Time, Str => " "); E_Strings.Append_Examiner_String (E_Str1 => Gen_Date_Time, E_Str2 => E_Strings.Section (Trimmed_Line, SDP_File_VC_Generation_Time_Start_Column, SDP_File_VC_Generation_Time_Length)); DPC_Generation_Date_Time := Gen_Date_Time; -- extract the simplification date and time from the string Simp_Date_Time := E_Strings.Section (Trimmed_Line, SDP_File_Simplification_Date_Start_Column, SDP_File_Simplification_Date_Length); E_Strings.Append_String (E_Str => Simp_Date_Time, Str => " "); E_Strings.Append_Examiner_String (E_Str1 => Simp_Date_Time, E_Str2 => E_Strings.Section (Trimmed_Line, SDP_File_Simplification_Time_Start_Column, SDP_File_Simplification_Time_Length)); SDP_Date_Time := Simp_Date_Time; end if; Subprogram_Found := Is_Valid_Subprogram (Trimmed_Line); exit when (Subprogram_Found or SPARK_IO.End_Of_File (Summary_DP_File)); E_Strings.Get_Line (File => Summary_DP_File, E_Str => File_Line); end loop; end if; if (File_Status = Not_Corrupt) and not Subprogram_Found then File_Status := Corrupt_Unknown_Subprogram; end if; -- if date has not been found must be in plain output mode -- The above is a false assumption -- the file may just be corrupt. However, the -- effect below of setting the string as unknown date is reasonable for both cases. if E_Strings.Eq_String (DPC_Generation_Date_Time, E_Strings.Empty_String) then E_Strings.Append_String (E_Str => DPC_Generation_Date_Time, Str => Unknown_DPC_Date); E_Strings.Append_String (E_Str => SDP_Date_Time, Str => Unknown_SDP_Date); end if; end Extract_Dates_And_Times_From_Summary_DPC_File; ------------------------------------------------------------------------- -- look at the next non-blank line to see whether it starts -- "*** true". If so the VC has been discharged. Otherwise, increment -- the counter of undischarged VCs, and set the flag that an undischarged -- VC has been found procedure Process_New_Summary_DP_Line (Summary_DP_File : in SPARK_IO.File_Type; DPC_Name : in E_Strings.T; Success : out Boolean) --# global in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out VCHeap.State; --# derives FatalErrors.State, --# VCHeap.State from *, --# DPC_Name, --# SPARK_IO.File_Sys, --# Summary_DP_File, --# VCHeap.State & --# SPARK_IO.File_Sys, --# Success from SPARK_IO.File_Sys, --# Summary_DP_File; is File_Line : E_Strings.T; Read_Line_Success : Boolean; begin Read_Next_Non_Blank_Line (File => Summary_DP_File, Success => Read_Line_Success, File_Line => File_Line); if not Read_Line_Success then Success := False; else Success := True; if E_Strings.Eq1_String (E_Str => E_Strings.Section (File_Line, 1, 11), Str => "*** No dead") then VCHeap.Set_DPC_State (DPC_Name, VCDetails.DPC_Live); elsif E_Strings.Eq1_String (E_Str => E_Strings.Section (File_Line, 1, 8), Str => "*** Dead") then VCHeap.Set_DPC_State (DPC_Name, VCDetails.DPC_Dead); elsif E_Strings.Eq1_String -- Case when Examiner determined that no dead path search -- is required for this DPC. (E_Str => E_Strings.Section (File_Line, 1, 7), Str => "*** DPC") then VCHeap.Set_DPC_State (DPC_Name, VCDetails.DPC_Unchecked); else Success := False; end if; end if; end Process_New_Summary_DP_Line; -------------------------------------------------------------------------- begin -- Analyse_Summary_DPC_File -- open SDP file E_Strings.Open (File => Summary_DP_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => Open_Status); if Open_Status /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; --No errors, until discover otherwise. Error_In_SDP_File := False; Extract_Dates_And_Times_From_Summary_DPC_File (Summary_DP_File => Summary_DP_File, DPC_Generation_Date_Time => DP_Generation_Date_Time_From_SDP_File, SDP_Date_Time => Summary_Date_Time, File_Status => File_Status); case File_Status is when Not_Corrupt => null; when Corrupt_Empty_File => SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* SDP file corrupt: empty file ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_SDP_File := True; when Corrupt_Unknown_Subprogram => SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* SDP file corrupt: missing subprogram name ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_SDP_File := True; end case; --Only continue working on this file if an error has not been seen. --(Previously POGS would attempt to work with corrupt files. This feature has the -- capacity to produce confusing and wrong results.) if not (Error_In_SDP_File) then if CommandLine.Data.IgnoreDates or else E_Strings.Eq_String (E_Str1 => DP_Generation_Date_Time_From_SDP_File, E_Str2 => DPC_File_Date_Time) then if not CommandLine.Data.IgnoreDates then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "DPC ZombieScoped ", 0); E_Strings.Put_Line (File => Report_File, E_Str => Summary_Date_Time); end if; -- find first non blank line -- if we get to the end of the file first, flag a fatal error Read_Next_Non_Blank_Line (File => Summary_DP_File, Success => Read_Line_Success, File_Line => File_Line); if not Read_Line_Success then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* SDP file corrupt: no data beyond header ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Bad_File_Format := True; else Finished_With_File := False; -- process file line-by-line -- on entry to the loop there is already a valid line in the -- FileLine buffer while not Finished_With_File loop -- examine line and act accordingly if Is_New_Range_Line (Line => File_Line) then Append_Next_Line_From_File (Line => File_Line, File => Summary_DP_File); elsif Is_New_VC_Line (Line => File_Line) then Trimmed_Line := E_Strings.Trim (File_Line); Current_DPC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); Process_New_Summary_DP_Line (Summary_DP_File => Summary_DP_File, DPC_Name => Current_DPC_Name, Success => Process_Success); if not Process_Success then SPARK_IO.Put_String (Report_File, "*** Warning: Bad format in summary DP file ***", 0); Finished_With_File := True; Bad_File_Format := True; end if; end if; if not Finished_With_File then -- read next line Read_Next_Non_Blank_Line (File => Summary_DP_File, Success => Read_Line_Success, File_Line => File_Line); -- if unsuccessful then check EOF -- and set FinishedWithFile accordingly if not Read_Line_Success then if SPARK_IO.End_Of_File (Summary_DP_File) then Finished_With_File := True; else FatalErrors.Process (FatalErrors.Problem_Reading_File, E_Strings.Empty_String); end if; end if; end if; end loop; end if; else -- SDP file is out of date SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "*** Warning: Summary DP file out of date ***", 0); SPARK_IO.Put_String (Report_File, "DPCs Generated: ", 0); E_Strings.Put_String (File => Report_File, E_Str => DPC_File_Date_Time); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "SDP File Date: ", 0); E_Strings.Put_String (File => Report_File, E_Str => DP_Generation_Date_Time_From_SDP_File); SPARK_IO.New_Line (Report_File, 1); Bad_File_Format := True; end if; end if; --# accept F, 10, Dummy_Close_Status, "DummyCloseStatus unused here" & --# F, 10, Summary_DP_File, "Summary_DP_File unused here"; SPARK_IO.Close (Summary_DP_File, Dummy_Close_Status); --# end accept; --Either an error being raised, or a 'BadFileFormat' --being detected is an error. Error_In_SDP_File := Error_In_SDP_File or Bad_File_Format; --# accept F, 33, Dummy_Close_Status, "DummyCloseStatus unused here"; end Analyse_Summary_DP_File; spark-2012.0.deb/pogs/vcs-analysereviewfile.adb0000644000175000017500000004531711753202340020375 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse a .PRV file -- -- -- --Grammar for .PRV files: -- --PRVfile ::= {Line} -- --Line ::= [VCNumber][Comment]end_of_line_character -- --VCNumber ::= digit{digit} -- --Comment ::= --{non_end_of_line_character} -- -- -- --The proof review file (prv) gives the user an opportunity to declare that -- --a VC has been proved by inspection. The VC numbers proved can be placed one -- --per line with Ada style comments. -- -------------------------------------------------------------------------------- separate (VCS) procedure AnalyseReviewFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Errors : out Review_Errors) is -- Each line in the PRV file will be categorised as one of the following: -- A VCNumber line will contain a valid VC number and may contain a comment. -- A Comment_Line is a line with a comment and nothing else. -- Any other line is deemed an Invalid_Line. type PRV_File_Line_Type is (VC_Number_Line, Comment_Line, Invalid_Line); -- These are the errors that will be reported to the user via the report file. type Error is ( VC_Syntax_Error, VC_Does_Not_Exist, VC_Proved_By_Examiner, VC_Proved_By_Simplifier, VC_Proved_By_Checker, VC_Duplicated); -- The .prv file. Review_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Dummy_Close_Status : SPARK_IO.File_Status; Open_Status : SPARK_IO.File_Status; Finished_With_File : Boolean; Read_Line_Success : Boolean; The_Line_Type : PRV_File_Line_Type; File_Line : E_Strings.T; VC_Name : E_Strings.T; The_Result : E_Strings.T; Tmp_Errors : Review_Errors; -- Reads the given (non blank) line. -- The_Line_Type categorises the line. -- The_Result is set to: -- o The VC number found on the line if the line is a VC_Number_Line. -- o Null string is the line is a Comment_Line. -- o The entire line if the line is an Invalid_Line. procedure Process_Line (The_Line : in E_Strings.T; The_Line_Type : out PRV_File_Line_Type; The_Result : out E_Strings.T) --# derives The_Line_Type, --# The_Result from The_Line; is subtype Digit is Character range '0' .. '9'; subtype Two_Char_String_Index is Positive range 1 .. 2; subtype Two_Char_String is String (Two_Char_String_Index); Comment : constant Two_Char_String := "--"; Index : E_Strings.Positions; The_Char : Character; Finished : Boolean; begin The_Line_Type := Invalid_Line; Index := 1; Finished := False; while not Finished and then Index <= E_Strings.Get_Length (E_Str => The_Line) loop The_Char := E_Strings.Get_Element (E_Str => The_Line, Pos => Index); if The_Char in Digit then -- We found a number. Keep looking until a non digit is found. The_Line_Type := VC_Number_Line; elsif E_Strings.Eq1_String -- If the next characters (ignoring spaces) constitute a comment then we're done. (E_Str => E_Strings.Section (E_Strings.Trim (E_Strings.Section (E_Str => The_Line, Start_Pos => Index, Length => (E_Strings.Get_Length (E_Str => The_Line) - Index) + 1)), 1, Comment'Length), Str => Comment) then -- This is a comment line unless we already found a VC number in which case -- it's a VCNumber line as set above. if The_Line_Type /= VC_Number_Line then The_Line_Type := Comment_Line; end if; Finished := True; else -- This line is invalid as it contains neither a VC number or a comment. The_Line_Type := Invalid_Line; Finished := True; end if; if not Finished then Index := Index + 1; end if; end loop; case The_Line_Type is when VC_Number_Line => The_Result := E_Strings.Section (E_Str => The_Line, Start_Pos => 1, Length => Index - 1); when Comment_Line => The_Result := E_Strings.Empty_String; when Invalid_Line => The_Result := The_Line; end case; end Process_Line; procedure Init_Review_Errors --# global out Tmp_Errors; --# derives Tmp_Errors from ; is begin Tmp_Errors := Review_Errors' (Errors => False, Error_List => Errors_List'(others => E_Strings.Empty_String), Last_Error => Errors_Index'First, Excess_Count => 0); end Init_Review_Errors; -- Reports The_Error to the report file. The_Details are used to give -- further information to help track down the offending line in the file. procedure Report_Error (The_Error : in Error; The_Details : in E_Strings.T) --# global in Report_File; --# in out SPARK_IO.File_Sys; --# in out Tmp_Errors; --# derives SPARK_IO.File_Sys from *, --# Report_File, --# The_Details, --# The_Error & --# Tmp_Errors from *, --# The_Details, --# The_Error; is -- The max number of characters output for a line containing a syntax error. Max_Error_Length : constant := 40; procedure Record_Error (The_Error : in Error; The_Details : in E_Strings.T) --# global in out Tmp_Errors; --# derives Tmp_Errors from *, --# The_Details, --# The_Error; is Tmp_Message : E_Strings.T; procedure Add_Review_Error (Err : in E_Strings.T) --# global in out Tmp_Errors; --# derives Tmp_Errors from *, --# Err; is begin -- Is this the first time round? if not Tmp_Errors.Errors then Tmp_Errors.Last_Error := Errors_Index'First; Tmp_Errors.Errors := True; -- Close the guard. end if; -- Check if there is any room left if Tmp_Errors.Last_Error = Errors_Index'Last then -- FULL: increment the ExcessCount Tmp_Errors.Excess_Count := Tmp_Errors.Excess_Count + 1; else -- Otherwise, increment the end-of-list point Tmp_Errors.Last_Error := Tmp_Errors.Last_Error + 1; end if; if Tmp_Errors.Excess_Count = 0 then -- There was some room left Tmp_Errors.Error_List (Tmp_Errors.Last_Error) := Err; -- Store the error. end if; end Add_Review_Error; begin case The_Error is when VC_Syntax_Error => Tmp_Message := E_Strings.Copy_String (Str => "Syntax error: "); if E_Strings.Get_Length (E_Str => The_Details) > Max_Error_Length then E_Strings.Append_Examiner_String (E_Str1 => Tmp_Message, E_Str2 => E_Strings.Section (The_Details, E_Strings.Positions'First, Max_Error_Length)); else E_Strings.Append_Examiner_String (E_Str1 => Tmp_Message, E_Str2 => The_Details); end if; when VC_Does_Not_Exist => Tmp_Message := E_Strings.Copy_String (Str => "Warning: VC "); E_Strings.Append_Examiner_String (E_Str1 => Tmp_Message, E_Str2 => The_Details); E_Strings.Append_String (E_Str => Tmp_Message, Str => " not recognised"); when VC_Proved_By_Examiner => Tmp_Message := E_Strings.Copy_String (Str => "Warning: VC "); E_Strings.Append_Examiner_String (E_Str1 => Tmp_Message, E_Str2 => The_Details); E_Strings.Append_String (E_Str => Tmp_Message, Str => " has been proved by the examiner"); when VC_Proved_By_Simplifier => Tmp_Message := E_Strings.Copy_String (Str => "Warning: VC "); E_Strings.Append_Examiner_String (E_Str1 => Tmp_Message, E_Str2 => The_Details); E_Strings.Append_String (E_Str => Tmp_Message, Str => " has been proved by the simplifier"); when VC_Proved_By_Checker => Tmp_Message := E_Strings.Copy_String (Str => "Warning: VC "); E_Strings.Append_Examiner_String (E_Str1 => Tmp_Message, E_Str2 => The_Details); E_Strings.Append_String (E_Str => Tmp_Message, Str => " has been proved by the checker"); when VC_Duplicated => Tmp_Message := E_Strings.Copy_String (Str => "Warning: VC "); E_Strings.Append_Examiner_String (E_Str1 => Tmp_Message, E_Str2 => The_Details); E_Strings.Append_String (E_Str => Tmp_Message, Str => " has been duplicated"); end case; Add_Review_Error (Err => Tmp_Message); end Record_Error; begin Record_Error (The_Error => The_Error, The_Details => The_Details); case The_Error is when VC_Syntax_Error => SPARK_IO.Put_String (Report_File, "*** Error: The following line in the proof review file contains a syntax error: ***", 0); SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_String (Report_File, "*** """, 0); if E_Strings.Get_Length (E_Str => The_Details) > Max_Error_Length then E_Strings.Put_String (File => Report_File, E_Str => E_Strings.Section (E_Str => The_Details, Start_Pos => 1, Length => Max_Error_Length)); SPARK_IO.Put_String (Report_File, "...", 0); else E_Strings.Put_String (File => Report_File, E_Str => The_Details); end if; SPARK_IO.Put_String (Report_File, """", 0); SPARK_IO.New_Line (Report_File, 1); when VC_Does_Not_Exist => SPARK_IO.Put_String (Report_File, "*** Warning: VC ", 0); E_Strings.Put_String (File => Report_File, E_Str => The_Details); SPARK_IO.Put_String (Report_File, " in proof review file is not recognised ***", 0); SPARK_IO.New_Line (Report_File, 1); when VC_Proved_By_Examiner => SPARK_IO.Put_String (Report_File, "*** Warning: VC ", 0); E_Strings.Put_String (File => Report_File, E_Str => The_Details); SPARK_IO.Put_String (Report_File, " in proof review file has been proved by the examiner ***", 0); SPARK_IO.New_Line (Report_File, 1); when VC_Proved_By_Simplifier => SPARK_IO.Put_String (Report_File, "*** Warning: VC ", 0); E_Strings.Put_String (File => Report_File, E_Str => The_Details); SPARK_IO.Put_String (Report_File, " in proof review file has been proved by the simplifier ***", 0); SPARK_IO.New_Line (Report_File, 1); when VC_Proved_By_Checker => SPARK_IO.Put_String (Report_File, "*** Warning: VC ", 0); E_Strings.Put_String (File => Report_File, E_Str => The_Details); SPARK_IO.Put_String (Report_File, " in proof review file has been proved by the checker ***", 0); SPARK_IO.New_Line (Report_File, 1); when VC_Duplicated => SPARK_IO.Put_String (Report_File, "*** Warning: VC ", 0); E_Strings.Put_String (File => Report_File, E_Str => The_Details); SPARK_IO.Put_String (Report_File, " in proof review file has been duplicated ***", 0); SPARK_IO.New_Line (Report_File, 1); end case; end Report_Error; begin -- AnalyseReviewFile -- open review file E_Strings.Open (File => Review_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => Open_Status); -- Initialise the error record stack Init_Review_Errors; if Open_Status /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; -- Find first non blank line if we get to the end of the file first, -- flag a fatal error Read_Next_Non_Blank_Line (File => Review_File, Success => Read_Line_Success, File_Line => File_Line); if Read_Line_Success then Finished_With_File := False; -- Process file line-by-line. -- On entry to the loop there is already a valid line in the File_Line buffer while not Finished_With_File loop -- Get the first token on the line and act accordingly Process_Line (The_Line => File_Line, The_Line_Type => The_Line_Type, The_Result => The_Result); case The_Line_Type is when VC_Number_Line => VC_Name := VCHeap.Get_VC_Name_Prefix; E_Strings.Append_String (E_Str => VC_Name, Str => "_"); E_Strings.Append_Examiner_String (E_Str1 => VC_Name, E_Str2 => The_Result); if not VCHeap.Exists (VC_Name) then Report_Error (The_Error => VC_Does_Not_Exist, The_Details => The_Result); elsif VCHeap.Get_VC_State (VC_Name) = VCDetails.VC_Proved_By_Examiner then Report_Error (The_Error => VC_Proved_By_Examiner, The_Details => The_Result); elsif VCHeap.Get_VC_State (VC_Name) = VCDetails.VC_Proved_By_Inference or VCHeap.Get_VC_State (VC_Name) = VCDetails.VC_Proved_By_Contradiction or VCHeap.Get_VC_State (VC_Name) = VCDetails.VC_Proved_Using_User_Proof_Rules then Report_Error (The_Error => VC_Proved_By_Simplifier, The_Details => The_Result); elsif VCHeap.Get_VC_State (VC_Name) = VCDetails.VC_Proved_By_Checker then Report_Error (The_Error => VC_Proved_By_Checker, The_Details => The_Result); elsif VCHeap.Get_VC_State (VC_Name) = VCDetails.VC_Proved_By_Review then Report_Error (The_Error => VC_Duplicated, The_Details => The_Result); else -- Mark VC as proved by review VCHeap.Set_VC_State (VC_Name, VCDetails.VC_Proved_By_Review); end if; when Comment_Line => null; when Invalid_Line => Report_Error (The_Error => VC_Syntax_Error, The_Details => The_Result); end case; if not Finished_With_File then -- Read next line Read_Next_Non_Blank_Line (File => Review_File, Success => Read_Line_Success, File_Line => File_Line); -- If unsuccessful then check EOF and set Finished_With_File accordingly if not Read_Line_Success then if SPARK_IO.End_Of_File (Review_File) then Finished_With_File := True; else FatalErrors.Process (FatalErrors.Problem_Reading_File, E_Strings.Empty_String); end if; end if; end if; end loop; else SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* Review file empty ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); end if; --# accept F, 10, Dummy_Close_Status, "Dummy_Close_Status unused here" & --# F, 10, Review_File, "Review_File unused here"; SPARK_IO.Close (Review_File, Dummy_Close_Status); --# end accept; Errors := Tmp_Errors; --# accept F, 33, Dummy_Close_Status, "Dummy_Close_Status unused here"; end AnalyseReviewFile; spark-2012.0.deb/pogs/banner-datetime.adb0000644000175000017500000002442211753202340017117 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Format the current date and time in the form: -- --24-DEC-1988 12:00:00.00 -- -- -- --Separate of Banner package -- -------------------------------------------------------------------------------- with Calendar; separate (Banner) procedure DateTime (DateString : out TypDateTime) is --# hide DateTime; -- -- FUNCTION : -- -- Returns a string indicating the current date and time as a 23 character -- string in the format 24-DEC-1988 12:00:00.00 -- -- OPERATION : -- -- Uses the standard package CALENDAR. -- MonthsInYear : constant := 12; DaysInMonth : constant := 31; HoursInDay : constant := 24; MinutesInHour : constant := 60; SecondsInMinute : constant := 60; HundredthsInSecond : constant := 100; type YearsType is range 1991 .. 2099; --# assert YearsType'Base is Short_Integer; type MonthsType is range 1 .. MonthsInYear; --# assert MonthsType'Base is Short_Short_Integer; type DaysType is range 1 .. DaysInMonth; --# assert DaysType'Base is Short_Short_Integer; type HoursType is range 0 .. HoursInDay - 1; --# assert HoursType'Base is Short_Short_Integer; type MinutesType is range 0 .. MinutesInHour - 1; --# assert MinutesType'Base is Short_Short_Integer; type SecondsType is range 0 .. SecondsInMinute - 1; --# assert SecondsType'Base is Short_Short_Integer; type HundredthsType is range 0 .. HundredthsInSecond - 1; --# assert HundredthsType'Base is Short_Short_Integer; subtype Digit is Natural range 0 .. 9; Year : YearsType; Month : MonthsType; Day : DaysType; Hours : HoursType; Minutes : MinutesType; Seconds : SecondsType; Hundredths : HundredthsType; procedure DecodeTime (Time : in Calendar.Time; Year : out YearsType; Month : out MonthsType; Day : out DaysType; Hours : out HoursType; Minutes : out MinutesType; Seconds : out SecondsType; Hundredths : out HundredthsType) is SystemTime : Duration; procedure DecodeDuration (Time : in Duration; Hours : out HoursType; Minutes : out MinutesType; Seconds : out SecondsType; Hundredths : out HundredthsType) is type TimeOfDay is range 0 .. HoursInDay * MinutesInHour * SecondsInMinute * HundredthsInSecond - 1; --# assert Type'Base is Integer; SystemTime : TimeOfDay; begin SystemTime := TimeOfDay (Time * Duration'(100.0)); Hours := HoursType (SystemTime / (MinutesInHour * SecondsInMinute * HundredthsInSecond)); Minutes := MinutesType (SystemTime / (SecondsInMinute * HundredthsInSecond) mod MinutesInHour); Seconds := SecondsType (SystemTime / HundredthsInSecond mod SecondsInMinute); Hundredths := HundredthsType (SystemTime mod HundredthsInSecond); end DecodeDuration; begin Calendar.Split (Date => Time, Year => Calendar.Year_Number (Year), Month => Calendar.Month_Number (Month), Day => Calendar.Day_Number (Day), Seconds => SystemTime); DecodeDuration (SystemTime, Hours, Minutes, Seconds, Hundredths); end DecodeTime; function DigitImage (Value : Digit) return Character is type DigitImages is array (Digit) of Character; Image : constant DigitImages := DigitImages'('0', '1', '2', '3', '4', '5', '6', '7', '8', '9'); begin return Image (Value); end DigitImage; function DateImage (Day : DaysType; Month : MonthsType; Year : YearsType) return String is function YearImage (Year : YearsType) return String is DigitsInYear : constant := 4; subtype DigitInYear is Positive range 1 .. DigitsInYear; subtype ResultType is String (DigitInYear); Result : ResultType; function GetYearDigit (Year : YearsType; Pos : DigitInYear) return Digit is begin return Digit (Year / 10 ** (DigitInYear'Last - Pos) mod 10); end GetYearDigit; begin for I in DigitInYear loop Result (I) := DigitImage (GetYearDigit (Year, I)); end loop; return Result; end YearImage; function MonthImage (Month : MonthsType) return String is CharactersInMonth : constant := 3; subtype CharacterInMonth is Positive range 1 .. CharactersInMonth; subtype MonthNames is String (CharacterInMonth); type NamesOfMonths is array (MonthsType) of MonthNames; Image : constant NamesOfMonths := NamesOfMonths'("JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"); begin return Image (Month); end MonthImage; function DayImage (Day : DaysType) return String is DigitsInDay : constant := 2; subtype DigitInDay is Positive range 1 .. DigitsInDay; subtype ResultType is String (DigitInDay); Result : ResultType; function GetDayDigit (Day : DaysType; Pos : DigitInDay) return Digit is begin return Digit (Day / 10 ** (DigitInDay'Last - Pos) mod 10); end GetDayDigit; begin for I in DigitInDay loop Result (I) := DigitImage (GetDayDigit (Day, I)); end loop; return Result; end DayImage; begin return DayImage (Day) & '-' & MonthImage (Month) & '-' & YearImage (Year); end DateImage; function TimeImage (Hours : HoursType; Minutes : MinutesType; Seconds : SecondsType; Hundredths : HundredthsType) return String is function HourImage (Hour : HoursType) return String is DigitsInHour : constant := 2; subtype DigitInHour is Positive range 1 .. DigitsInHour; subtype ResultType is String (DigitInHour); Result : ResultType; function GetHourDigit (Hour : HoursType; Pos : DigitInHour) return Digit is begin return Digit (Hour / 10 ** (DigitInHour'Last - Pos) mod 10); end GetHourDigit; begin for I in DigitInHour loop Result (I) := DigitImage (GetHourDigit (Hour, I)); end loop; return Result; end HourImage; function MinuteImage (Minute : MinutesType) return String is DigitsInMinute : constant := 2; subtype DigitInMinute is Positive range 1 .. DigitsInMinute; subtype ResultType is String (DigitInMinute); Result : ResultType; function GetMinuteDigit (Minute : MinutesType; Pos : DigitInMinute) return Digit is begin return Digit (Minute / 10 ** (DigitInMinute'Last - Pos) mod 10); end GetMinuteDigit; begin for I in DigitInMinute loop Result (I) := DigitImage (GetMinuteDigit (Minute, I)); end loop; return Result; end MinuteImage; function SecondImage (Second : SecondsType) return String is DigitsInSecond : constant := 2; subtype DigitInSecond is Positive range 1 .. DigitsInSecond; subtype ResultType is String (DigitInSecond); Result : ResultType; function GetSecondDigit (Second : SecondsType; Pos : DigitInSecond) return Digit is begin return Digit (Second / 10 ** (DigitInSecond'Last - Pos) mod 10); end GetSecondDigit; begin for I in DigitInSecond loop Result (I) := DigitImage (GetSecondDigit (Second, I)); end loop; return Result; end SecondImage; function HundredthImage (Hundredth : HundredthsType) return String is DigitsInHundredth : constant := 2; subtype DigitInHundredth is Positive range 1 .. DigitsInHundredth; subtype ResultType is String (DigitInHundredth); Result : ResultType; function GetHundredthDigit (Hundredth : HundredthsType; Pos : DigitInHundredth) return Digit is begin return Digit (Hundredth / 10 ** (DigitInHundredth'Last - Pos) mod 10); end GetHundredthDigit; begin for I in DigitInHundredth loop Result (I) := DigitImage (GetHundredthDigit (Hundredth, I)); end loop; return Result; end HundredthImage; begin return HourImage (Hours) & ':' & MinuteImage (Minutes) & ':' & SecondImage (Seconds) & '.' & HundredthImage (Hundredths); end TimeImage; begin DecodeTime (Calendar.Clock, Year, Month, Day, Hours, Minutes, Seconds, Hundredths); DateString := DateImage (Day, Month, Year) & ' ' & TimeImage (Hours, Minutes, Seconds, Hundredths); end DateTime; spark-2012.0.deb/pogs/fileheap.adb0000644000175000017500000002414511753202340015637 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with FatalErrors, HeapIndex; use type HeapIndex.IndexType; package body FileHeap --# own State is StartOfPointersList, --# TheDetails, --# ThePointers; is ThePointers : Heap.HeapRecord; TheDetails : FileDetails.DataType; StartOfPointersList : Heap.Atom; function FirstEntry return Heap.Atom --# global in StartOfPointersList; is begin return StartOfPointersList; end FirstEntry; ------------------------------------------------------------------------ procedure Add (StartIndex : in Heap.Atom; NewName : in E_Strings.T; NewFileType : in FileDetails.FileTypes) --# global in out FatalErrors.State; --# in out TheDetails; --# in out ThePointers; --# derives FatalErrors.State, --# TheDetails, --# ThePointers from *, --# NewFileType, --# NewName, --# StartIndex, --# TheDetails, --# ThePointers; is Dummy : Boolean; ExistingName : E_Strings.T; ExistingFileType : FileDetails.FileTypes; ListIndex : Heap.Atom; LoopFinished : Boolean := False; NextEntryInList : Heap.Atom; OrderResult : E_Strings.Order_Types; OrderSuccess : Boolean; RetrieveSuccess : Boolean; ------------------------------------------------------------------------ procedure InsertInList (ListIndex : in Heap.Atom; NextEntryInList : in Heap.Atom; Name : in E_Strings.T; FileType : in FileDetails.FileTypes) --# global in out FatalErrors.State; --# in out TheDetails; --# in out ThePointers; --# derives FatalErrors.State from *, --# TheDetails, --# ThePointers & --# TheDetails from *, --# FileType, --# Name & --# ThePointers from *, --# ListIndex, --# NextEntryInList, --# TheDetails; is CreateAtomSuccess : Boolean; DetailsAddSuccess : Boolean; NewDetailsIndex : HeapIndex.IndexType; NewPointersIndex : Heap.Atom; begin -- InsertInList -- allocate heap atom Heap.CreateAtom (ThePointers, NewPointersIndex, CreateAtomSuccess); -- allocate file details entry FileDetails.Add (TheDetails, NewDetailsIndex, DetailsAddSuccess, Name, FileType); if not (CreateAtomSuccess and DetailsAddSuccess) then FatalErrors.Process (FatalErrors.File_Heap_Full, E_Strings.Empty_String); end if; -- point heap atom to file details entry Heap.UpdateAValue (ThePointers, NewPointersIndex, NewDetailsIndex); -- link heap atom into list Heap.UpdateAPointer (ThePointers, ListIndex, NewPointersIndex); Heap.UpdateAPointer (ThePointers, NewPointersIndex, NextEntryInList); end InsertInList; ------------------------------------------------------------------------- begin -- Add -- start at point specified in linked list ListIndex := StartIndex; while not LoopFinished loop -- if current item is last then add after it NextEntryInList := Heap.APointer (ThePointers, ListIndex); if NextEntryInList = 0 then InsertInList (ListIndex, NextEntryInList, NewName, NewFileType); LoopFinished := True; else -- otherwise get relative order of next item in list and the new item --# accept F, 10, Dummy, "Dummy unused here"; FileDetails.Retrieve (TheDetails, Heap.AValue (ThePointers, NextEntryInList), RetrieveSuccess, ExistingName, ExistingFileType, Dummy); --# end accept; if not RetrieveSuccess then FatalErrors.Process (FatalErrors.Data_Structure_Inconsistency, E_Strings.Empty_String); end if; FileDetails.Order (ExistingName, ExistingFileType, NewName, NewFileType, OrderSuccess, OrderResult); if not OrderSuccess then FatalErrors.Process (FatalErrors.Data_Structure_Inconsistency, E_Strings.Empty_String); end if; case OrderResult is when E_Strings.First_One_First => -- next item in list is first, keep going down list ListIndex := NextEntryInList; when E_Strings.Second_One_First => -- new item is first, insert here InsertInList (ListIndex, NextEntryInList, NewName, NewFileType); LoopFinished := True; when E_Strings.Neither_First => -- items identical: do nothing LoopFinished := True; end case; end if; end loop; --# accept F, 33, Dummy, "Dummy unused here"; end Add; ---------------------------------------------------------------------------- -- this procedure returns the file details for the specified entry in the -- linked list. Success if ListIndex points to a heap record which points to -- a valid set of file details procedure Details (ListIndex : in Heap.Atom; Success : out Boolean; Name : out E_Strings.T; FileType : out FileDetails.FileTypes; DirectoryIsResolved : out Boolean) --# global in TheDetails; --# in ThePointers; --# derives DirectoryIsResolved, --# FileType, --# Name, --# Success from ListIndex, --# TheDetails, --# ThePointers; is DetailsIndex : HeapIndex.IndexType; begin -- Details -- dereference linked list pointer DetailsIndex := Heap.AValue (ThePointers, ListIndex); -- if not null pointer then follow it if DetailsIndex /= 0 then FileDetails.Retrieve (TheDetails, DetailsIndex, Success, Name, FileType, DirectoryIsResolved); else -- if null pointer then return failure Success := False; Name := E_Strings.Empty_String; FileType := FileDetails.Invalid; DirectoryIsResolved := False; end if; end Details; -------------------------------------------------------------------------- procedure Initialize (TopDirectory : in E_Strings.T) --# global out StartOfPointersList; --# out TheDetails; --# out ThePointers; --# derives StartOfPointersList, --# ThePointers from & --# TheDetails from TopDirectory; is Dummy : Boolean; FirstDetailsIndex : HeapIndex.IndexType; FirstPointersIndex : Heap.Atom; begin -- Initialize Heap.Initialize (ThePointers); FileDetails.Initialize (TheDetails); -- insert first item --# accept F, 10, Dummy, "Dummy unused here"; FileDetails.Add (TheDetails, FirstDetailsIndex, Dummy, TopDirectory, FileDetails.Directory); Heap.CreateAtom (ThePointers, FirstPointersIndex, Dummy); --# end accept; Heap.UpdateAValue (ThePointers, FirstPointersIndex, FirstDetailsIndex); Heap.UpdateAPointer (ThePointers, FirstPointersIndex, 0); StartOfPointersList := FirstPointersIndex; --# accept F, 33, Dummy, "Dummy unused here"; end Initialize; --------------------------------------------------------------------------- procedure MarkDirectoryResolved (ListIndex : in Heap.Atom) --# global in ThePointers; --# in out TheDetails; --# derives TheDetails from *, --# ListIndex, --# ThePointers; is DetailsIndex : HeapIndex.IndexType; begin DetailsIndex := Heap.AValue (ThePointers, ListIndex); if DetailsIndex /= 0 then FileDetails.MarkDirectoryResolved (TheDetails, DetailsIndex); end if; end MarkDirectoryResolved; --------------------------------------------------------------------------- -- this procedure returns the 'NextOne' ordered element in FH after -- 'AfterThis'. It is successful if the NextOne is not a 'null' pointer procedure Next (AfterThis : in Heap.Atom; Success : out Boolean; NextOne : out Heap.Atom) --# global in ThePointers; --# derives NextOne, --# Success from AfterThis, --# ThePointers; is NextInList : Heap.Atom; begin -- Next NextInList := Heap.APointer (ThePointers, AfterThis); if NextInList = 0 then Success := False; NextOne := 0; else Success := True; NextOne := NextInList; end if; end Next; end FileHeap; spark-2012.0.deb/pogs/pogs.smf0000644000175000017500000000120211753202340015056 0ustar eugeneugenbanner.adb banner-copyright.adb -vcg banner-get_version.adb -vcg fatalerrors.adb -vcg filedetails.adb -vcg fileheap.adb -vcg findfiles.adb -vcg heap.adb -vcg pathformatter.adb -vcg total.adb vcdetails.adb -vcg vcheap.adb vcs.adb vcs-analyseprooflogfile.adb vcs-analysereviewfile.adb vcs-analysesimplogfile.adb vcs-analysesimplifiedvcfile.adb vcs-analysevictoredvcfile.adb -vcg vcs-analysevictorlogfile.adb -vcg vcs-analyse_dpc_file.adb vcs-analyse_summary_dp_file.adb vcs-analyse_riposte_summary_file.adb -vcg vcs-analysevcfile.adb vcs-printvcreport.adb vcs-processnewrangeline.adb vcs-writevcinfo.adb -vcg toppackage.adb -vcg slg_parser.adb spark-2012.0.deb/pogs/banner.ads0000644000175000017500000000532011753202340015342 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Produce the banner(s) to display on the screen and in the report file for -- --the Semantic Analysis Summary Tool -- -------------------------------------------------------------------------------- with SPARK_IO; --# inherit CommandLine, --# E_Strings, --# SPARK_IO, --# Version; package Banner is LenBanner : constant Integer := 79; subtype TypBannerRange is Integer range 1 .. LenBanner; subtype TypBannerLine is String (TypBannerRange); function MinorSeparatorLine return TypBannerLine; function MajorSeparatorLine return TypBannerLine; function EndOfReportMarker return TypBannerLine; procedure FinishReport (File : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# File; procedure Screen; --# global in CommandLine.Data; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data; procedure ReportVersion; --# global in CommandLine.Data; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data; procedure Report (File : in SPARK_IO.File_Type); --# global in CommandLine.Data; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data, --# File; end Banner; spark-2012.0.deb/pogs/vcs-analysesimplogfile.adb0000644000175000017500000002721211753202340020540 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse an .SLG file -- -- -- --Uses SLG_Parser to deconstruct SLG file, finding which rule files used -- --which had syntax errors, and which VCs proved with which rules. -- -- -- --This info stored in temporary files and recovered later in total -- -------------------------------------------------------------------------------- with FatalErrors; with PathFormatter; with SLG_Parser; with SPARK_IO; use type SLG_Parser.Log_Status_T; separate (VCS) procedure AnalyseSimpLogFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Rule_Files_Errors : in out SPARK_IO.File_Type; Rule_Files_Used : in out SPARK_IO.File_Type; SLG_Error_In_File : out Boolean) is Rule_Line : E_Strings.T; Rule_File : E_Strings.T; SLG_Directory : E_Strings.T; Rule_File_With_Path : E_Strings.T; Rule : E_Strings.T; Output_Line : E_Strings.T; VC_Number : E_Strings.T; SLG_Parser_Handle : SLG_Parser.Log_Info_T; SLG_Parser_Status : SLG_Parser.Log_Status_T; SLG_Parser_Rule_Status : SLG_Parser.Log_Status_T; SLG_Parser_VC_Status : SLG_Parser.Log_Status_T; Encountered_A_Rule_File : Boolean; Encountered_A_Rule : Boolean; Encountered_A_VC : Boolean; Duplicated : Boolean; procedure File_Contains (File : in out SPARK_IO.File_Type; E_Str : in E_Strings.T; Contains : out Boolean) --# global in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# derives Contains, --# SPARK_IO.File_Sys from E_Str, --# File, --# SPARK_IO.File_Sys & --# FatalErrors.State from *, --# E_Str, --# File, --# SPARK_IO.File_Sys & --# File from *, --# SPARK_IO.File_Sys; is Status : SPARK_IO.File_Status; Temp_Str : E_Strings.T; begin -- Reset file to read mode SPARK_IO.Reset (File, SPARK_IO.In_File, Status); -- Loop over all elements in the file -- comparing against the E_Str Contains := False; if Status = SPARK_IO.Ok then while not SPARK_IO.End_Of_File (File) loop E_Strings.Get_Line (File => File, E_Str => Temp_Str); if E_Strings.Eq_String (E_Str1 => E_Str, E_Str2 => Temp_Str) then Contains := True; exit; end if; end loop; -- Reset file to append mode SPARK_IO.Reset (File, SPARK_IO.Append_File, Status); end if; if Status /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; end File_Contains; begin -- AnalyseSimpLogFile SLG_Error_In_File := False; SLG_Parser.Init (Filename, SLG_Parser_Handle, SLG_Parser_Status); if SLG_Parser_Status = SLG_Parser.Success then SLG_Parser.Find_Rulefiles_Read (SLG_Parser_Handle, SLG_Parser_Status); if SLG_Parser_Status = SLG_Parser.Success then -- Find path prefix to use for rule files SLG_Directory := OSFiling.Dir_Name (Path => Filename); SLG_Directory := PathFormatter.Format (SLG_Directory); SLG_Parser.Find_Rule_Syntax_Errors (SLG_Parser_Handle, SLG_Parser_Status); if SLG_Parser_Status = SLG_Parser.Success then -- Store syntax errors while SLG_Parser_Status = SLG_Parser.Success loop SLG_Parser.Get_Next_Rulefile_Syntax_Error (SLG_Parser_Handle, Rule_File, SLG_Parser_Status); if SLG_Parser_Status = SLG_Parser.Success then -- Store the syntax error Rule_File_With_Path := SLG_Directory; E_Strings.Append_Examiner_String (E_Str1 => Rule_File_With_Path, E_Str2 => Rule_File); File_Contains (File => Rule_Files_Errors, E_Str => Rule_File_With_Path, Contains => Duplicated); if not Duplicated then E_Strings.Put_Line (File => Rule_Files_Errors, E_Str => Rule_File_With_Path); end if; end if; -- Otherwise loop will terminate end loop; end if; -- All syntax errors stored, now proceed to rule summary SLG_Parser.Find_Rule_Summary (SLG_Parser_Handle, SLG_Parser_Status); if SLG_Parser_Status = SLG_Parser.Success then Encountered_A_Rule_File := False; while SLG_Parser_Status = SLG_Parser.Success loop SLG_Parser.Get_Next_Rulefile (SLG_Parser_Handle, Rule_File, SLG_Parser_Status); if SLG_Parser_Status = SLG_Parser.Success then -- Add the rulefile to the list of used files Rule_File_With_Path := SLG_Directory; E_Strings.Append_Examiner_String (E_Str1 => Rule_File_With_Path, E_Str2 => Rule_File); File_Contains (File => Rule_Files_Used, E_Str => Rule_File_With_Path, Contains => Duplicated); if not Duplicated then E_Strings.Put_Line (File => Rule_Files_Used, E_Str => Rule_File_With_Path); end if; -- Output to the report if not Encountered_A_Rule_File then SPARK_IO.New_Line (Report_File, 1); SPARK_IO.Put_Line (Report_File, "The following user rules were used:", 0); Encountered_A_Rule_File := True; end if; Rule_Line := E_Strings.Copy_String (Str => "from "); E_Strings.Append_Examiner_String (E_Str1 => Rule_Line, E_Str2 => Rule_File_With_Path); E_Strings.Put_Line (File => Report_File, E_Str => Rule_Line); Encountered_A_Rule := False; SLG_Parser_Rule_Status := SLG_Parser.Success; while SLG_Parser_Rule_Status = SLG_Parser.Success loop SLG_Parser.Get_Next_Rule (SLG_Parser_Handle, Rule, SLG_Parser_Rule_Status); if SLG_Parser_Rule_Status = SLG_Parser.Success then -- Output rule number to report -- Rule.Length should be < 256 - 20 - 3 Encountered_A_Rule := True; Output_Line := E_Strings.Copy_String (Str => " "); E_Strings.Append_Examiner_String (E_Str1 => Output_Line, E_Str2 => Rule); E_Strings.Append_String (E_Str => Output_Line, Str => " used in proving VCs:"); E_Strings.Put_Line (File => Report_File, E_Str => Output_Line); Encountered_A_VC := False; Output_Line := E_Strings.Copy_String (Str => " "); SLG_Parser_VC_Status := SLG_Parser.Success; while SLG_Parser_VC_Status = SLG_Parser.Success loop SLG_Parser.Get_Next_VC (SLG_Parser_Handle, VC_Number, SLG_Parser_VC_Status); if SLG_Parser_VC_Status = SLG_Parser.Success then -- Output VC number to report E_Strings.Append_Examiner_String (E_Str1 => Output_Line, E_Str2 => VC_Number); E_Strings.Append_String (E_Str => Output_Line, Str => ", "); Encountered_A_VC := True; else -- Remove the comma and replace with full stop Output_Line := E_Strings.Section (E_Str => Output_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Output_Line) - 2); E_Strings.Append_Char (E_Str => Output_Line, Ch => '.'); end if; end loop; -- Must have found a VC if not Encountered_A_VC or SLG_Parser_VC_Status = SLG_Parser.Unexpected_Text then SLG_Error_In_File := True; end if; E_Strings.Put_Line (File => Report_File, E_Str => Output_Line); end if; end loop; -- Must have found a rule if not Encountered_A_Rule or SLG_Parser_Rule_Status = SLG_Parser.Unexpected_Text then SLG_Error_In_File := True; end if; end if; end loop; -- Must have found a rulefile if not Encountered_A_Rule_File or SLG_Parser_Status = SLG_Parser.Unexpected_Text then SLG_Error_In_File := True; end if; end if; end if; --# accept Flow, 10, SLG_Parser_Handle, "Modify filehandle to close file"; SLG_Parser.Finalise (SLG_Parser_Handle); --# end accept; else SLG_Error_In_File := True; end if; --# accept Flow, 601, Rule_Files_Used, Report_File, "False coupling through SPARK_IO" & --# Flow, 601, Rule_Files_Used, Rule_Files_Errors, "False coupling through SPARK_IO"; end AnalyseSimpLogFile; spark-2012.0.deb/pogs/vcs.ads0000644000175000017500000001354511753202340014700 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package to perform processing of VC files -- -- -- -------------------------------------------------------------------------------- with E_Strings; with SPARK_IO; use type SPARK_IO.File_Status; --# inherit Ada.Characters.Handling, --# Banner, --# CommandLine, --# Date_Time, --# E_Strings, --# FatalErrors, --# Heap, --# OSFiling, --# PathFormatter, --# SLG_Parser, --# SPARK_Calendar, --# SPARK_IO, --# Total, --# VCDetails, --# VCHeap; package VCS is -- given the filename, without extension, process vcg, siv, plg and prv files -- if/as appropriate procedure Analyse (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Analyse_Proof_Log : in Boolean; Temp_File : in SPARK_IO.File_Type; Temp_False_File : in SPARK_IO.File_Type; Temp_Contra_File : in SPARK_IO.File_Type; Temp_Victor_File : in SPARK_IO.File_Type; Temp_Riposte_File : in SPARK_IO.File_Type; Temp_User_File : in SPARK_IO.File_Type; Temp_Rlu_Error_File : in out SPARK_IO.File_Type; Temp_Rlu_Used_File : in out SPARK_IO.File_Type; Temp_PR_Verr_File : in SPARK_IO.File_Type; Temp_Warn_Error_File : in SPARK_IO.File_Type; Temp_SDP_Error_File : in SPARK_IO.File_Type; Temp_DPC_Error_File : in SPARK_IO.File_Type; Temp_Victor_Error_File : in SPARK_IO.File_Type; Temp_Riposte_Error_File : in SPARK_IO.File_Type); --# global in CommandLine.Data; --# in OSFiling.File_Structure; --# in out FatalErrors.State; --# in out SPARK_IO.File_Sys; --# in out Total.State; --# in out VCHeap.I_State; --# in out VCHeap.State; --# derives FatalErrors.State, --# VCHeap.I_State from *, --# Analyse_Proof_Log, --# CommandLine.Data, --# Filename, --# OSFiling.File_Structure, --# Report_File, --# SPARK_IO.File_Sys, --# Temp_Rlu_Error_File, --# Temp_Rlu_Used_File, --# VCHeap.I_State, --# VCHeap.State & --# SPARK_IO.File_Sys from *, --# Analyse_Proof_Log, --# CommandLine.Data, --# Filename, --# OSFiling.File_Structure, --# Report_File, --# Temp_Contra_File, --# Temp_DPC_Error_File, --# Temp_False_File, --# Temp_File, --# Temp_PR_Verr_File, --# Temp_Riposte_Error_File, --# Temp_Riposte_File, --# Temp_Rlu_Error_File, --# Temp_Rlu_Used_File, --# Temp_SDP_Error_File, --# Temp_User_File, --# Temp_Victor_Error_File, --# Temp_Victor_File, --# Temp_Warn_Error_File, --# VCHeap.I_State, --# VCHeap.State & --# Temp_Rlu_Error_File, --# Temp_Rlu_Used_File from *, --# Filename, --# OSFiling.File_Structure, --# SPARK_IO.File_Sys & --# Total.State, --# VCHeap.State from *, --# Analyse_Proof_Log, --# CommandLine.Data, --# Filename, --# OSFiling.File_Structure, --# Report_File, --# SPARK_IO.File_Sys, --# VCHeap.I_State, --# VCHeap.State; end VCS; spark-2012.0.deb/pogs/banner-get_version.adb0000644000175000017500000000377411753202340017656 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Function to construct string announcing version and title of tool -- -- -- -------------------------------------------------------------------------------- separate (Banner) function Get_Version (J : in Justification) return TypBannerLine is R : TypBannerLine; begin if CommandLine.Data.PlainOutput then R := CreateBannerLine (FromText => "POGS " & Version.Toolset_Distribution & " Edition", WithJustification => J, FillChar => ' '); else R := CreateBannerLine (FromText => "POGS " & Version.Toolset_Banner_Line, WithJustification => J, FillChar => ' '); end if; return R; end Get_Version; spark-2012.0.deb/pogs/osfiling.adb0000644000175000017500000003175611753202340015702 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings.Not_SPARK; with FatalErrors; with GNAT.Directory_Operations; with GNAT.IO_Aux; with GNAT.OS_Lib; package body OSFiling is ------------------------------------------------------------------------------ -- -- Given a path, this function changes directory to that path and returns a -- string containing the full path. Useful when you have a path such as "." -- or ".." and you want to get a meaningful directory name. -- -- Note: has the side-effect of changing the current directory to be the -- specified directory so if that isn't what you want then save the current -- directory before calling this function, then cd back to it afterwards. -- -- Note also that the returned directory name will have a trailing slash -- that you may have to deal with. -- -- If the specified path doesn't exist then a fatal error is reported. -- ------------------------------------------------------------------------------ function CD_And_Get_Name (Path : E_Strings.T) return GNAT.Directory_Operations.Dir_Name_Str is begin GNAT.Directory_Operations.Change_Dir (E_Strings.Not_SPARK.Get_String (E_Str => Path)); return GNAT.Directory_Operations.Get_Current_Dir; exception when others => -- Note: this call will NOT return so the return statement following -- it will never be executed, but is expected by the compiler. FatalErrors.Process (FatalErrors.Expected_Directory_Missing, Path); return ""; end CD_And_Get_Name; ---------------------------------------------------------------------------- function Base_Dir_Name (Path : E_Strings.T) return E_Strings.T is -- save for later Current_Dir : constant GNAT.Directory_Operations.Path_Name := GNAT.Directory_Operations.Get_Current_Dir; Full_Path_With_Slash : constant String := CD_And_Get_Name (Path); Full_Path_No_Slash : constant String := Full_Path_With_Slash (1 .. Full_Path_With_Slash'Length - 1); Base_Name : constant String := GNAT.Directory_Operations.Base_Name (Full_Path_No_Slash, ""); begin GNAT.Directory_Operations.Change_Dir (Current_Dir); -- return to saved dir return E_Strings.Copy_String (Str => Base_Name); exception when others => -- Note: this call will NOT return so the return statement following -- it will never be executed, but is expected by the compiler. FatalErrors.Process (FatalErrors.Expected_Directory_Missing, Path); return E_Strings.Empty_String; end Base_Dir_Name; ---------------------------------------------------------------------------- function Base_Filename (Path : E_Strings.T) return E_Strings.T is C : constant String := GNAT.Directory_Operations.Base_Name (E_Strings.Not_SPARK.Get_String (E_Str => Path), ""); begin return E_Strings.Copy_String (Str => C); end Base_Filename; ---------------------------------------------------------------------------- function Default_Report_Extn return E_Strings.T is begin return E_Strings.Copy_String (Str => ".sum"); end Default_Report_Extn; ------------------------------------------------------------------------------ -- this function combines the inputs to produce the path to the -- subdirectory function Down_Directory (Path : E_Strings.T; Sub_Directory : E_Strings.T) return E_Strings.T is Result : E_Strings.T; begin Result := Path; if E_Strings.Get_Element (E_Str => Path, Pos => E_Strings.Get_Length (E_Str => Path)) /= GNAT.Directory_Operations.Dir_Separator then E_Strings.Append_Char (E_Str => Result, Ch => GNAT.Directory_Operations.Dir_Separator); end if; E_Strings.Append_Examiner_String (E_Str1 => Result, E_Str2 => Sub_Directory); return Result; end Down_Directory; ------------------------------------------------------------------------------ -- this function combines the inputs to produce a full file name function Full_Filename (Path : E_Strings.T; Filename : E_Strings.T) return E_Strings.T is begin return Down_Directory (Path => Path, Sub_Directory => Filename); end Full_Filename; ------------------------------------------------------------------------------ function Get_Working_Directory return E_Strings.T is --# hide GetWorkingDirectory; -- save for later Current_Dir : constant GNAT.Directory_Operations.Path_Name := GNAT.Directory_Operations.Get_Current_Dir; Cwd : E_Strings.T; begin Cwd := E_Strings.Copy_String (Str => Current_Dir); -- Get_Current_Dir can return a trailing '/' on NT.. if E_Strings.Get_Length (E_Str => Cwd) /= 0 and then E_Strings.Get_Element (E_Str => Cwd, Pos => E_Strings.Get_Length (E_Str => Cwd)) = GNAT.Directory_Operations.Dir_Separator then Cwd := E_Strings.Section (E_Str => Cwd, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Cwd) - 1); end if; return Cwd; end Get_Working_Directory; ------------------------------------------------------------------------------ function Is_Directory (Name : E_Strings.T) return Boolean is String_Name : constant String := E_Strings.Not_SPARK.Get_String (E_Str => Name); begin return GNAT.OS_Lib.Is_Directory (String_Name); end Is_Directory; ----------------------------------------------------------------------------- function Is_File (Name : E_Strings.T) return Boolean is String_Name : constant String := E_Strings.Not_SPARK.Get_String (E_Str => Name); begin return GNAT.IO_Aux.File_Exists (String_Name); end Is_File; ------------------------------------------------------------------------------ function Order (First_Name, Second_Name : E_Strings.T) return E_Strings.Order_Types is begin return E_Strings.Lex_Order (First_Name => First_Name, Second_Name => Second_Name); end Order; ------------------------------------------------------------------------------ procedure Remove_File_Extension (Filename : in out E_Strings.T) is Dot_Pos : E_Strings.Positions; begin Dot_Pos := E_Strings.Get_Length (E_Str => Filename); while Dot_Pos > 1 and then E_Strings.Get_Element (E_Str => Filename, Pos => Dot_Pos) /= '.' loop Dot_Pos := Dot_Pos - 1; end loop; if E_Strings.Get_Element (E_Str => Filename, Pos => Dot_Pos) = '.' then Filename := E_Strings.Section (E_Str => Filename, Start_Pos => 1, Length => Dot_Pos - 1); end if; end Remove_File_Extension; ---------------------------------------------------------------------------- function Simplified_VC_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".siv"); end Simplified_VC_File_Extension; ------------------------------------------------------------------------------ function Victored_VC_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".vct"); end Victored_VC_File_Extension; ------------------------------------------------------------------------------ function Victor_Log_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".vlg"); end Victor_Log_File_Extension; ------------------------------------------------------------------------------ function Riposte_Summary_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".rsm"); end Riposte_Summary_File_Extension; ------------------------------------------------------------------------------ -- this function removes the last directory name from the supplied string function Dir_Name (Path : E_Strings.T) return E_Strings.T is C : constant String := GNAT.Directory_Operations.Dir_Name (E_Strings.Not_SPARK.Get_String (E_Str => Path)); begin return E_Strings.Copy_String (Str => C); end Dir_Name; ---------------------------------------------------------------------------- function VC_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".vcg"); end VC_File_Extension; ---------------------------------------------------------------------------- function Proof_Log_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".plg"); end Proof_Log_File_Extension; ---------------------------------------------------------------------------- function Review_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".prv"); end Review_File_Extension; ---------------------------------------------------------------------------- function Simplifier_Log_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".slg"); end Simplifier_Log_File_Extension; ---------------------------------------------------------------------------- function DPC_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".dpc"); end DPC_File_Extension; ---------------------------------------------------------------------------- function Summary_DP_File_Extension return E_Strings.T is begin return E_Strings.Copy_String (Str => ".sdp"); end Summary_DP_File_Extension; ---------------------------------------------------------------------------- function Directory_Separator return Character is begin return GNAT.Directory_Operations.Dir_Separator; end Directory_Separator; ---------------------------------------------------------------------------- function Is_Relevant_File (Name : in E_Strings.T) return Boolean is Extension : E_Strings.T; -- Stolen from SPARKClean. -- This function attempts to work out a file's extension. It -- should do something like this: -- "foobar.vcg" -> "vcg" -- ".vcg" -> "vcg" -- "foobar" -> "" -- "fail." -> "" -- "foo.bar.baz" -> "baz" function Get_Extension (File_Name : in E_Strings.T) return E_Strings.T is Ext : E_Strings.T; Ext_Starts_At : Natural; begin -- Search for the last '.' in the filename. Ext_Starts_At := 0; for N in reverse Natural range 1 .. E_Strings.Get_Length (File_Name) loop if E_Strings.Get_Element (File_Name, N) = '.' then Ext_Starts_At := N; exit; end if; end loop; -- Copy and return the extension, if it exists. Ext := E_Strings.Empty_String; if Ext_Starts_At >= 1 and Ext_Starts_At < E_Strings.Get_Length (File_Name) then for N in Positive range Ext_Starts_At + 1 .. E_Strings.Get_Length (File_Name) loop E_Strings.Append_Char (Ext, E_Strings.Get_Element (File_Name, N)); end loop; end if; return Ext; end Get_Extension; begin Extension := Get_Extension (Name); return E_Strings.Eq1_String (Extension, "vcg") or else E_Strings.Eq1_String (Extension, "siv") or else E_Strings.Eq1_String (Extension, "slg") or else E_Strings.Eq1_String (Extension, "dpc") or else E_Strings.Eq1_String (Extension, "sdp") or else E_Strings.Eq1_String (Extension, "plg") or else E_Strings.Eq1_String (Extension, "prv") or else E_Strings.Eq1_String (Extension, "vct") or else E_Strings.Eq1_String (Extension, "vlg"); end Is_Relevant_File; end OSFiling; spark-2012.0.deb/pogs/fatalerrors.ads0000644000175000017500000000533611753202340016430 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Note that the Process procedure does not return to point of call. Instead -- --it raises an exception which is trapped by the main program, which causes -- --the program to stop politely. -- -- -- --The use of the State variable makes it easy to see where fatal errors may -- --occur -- -------------------------------------------------------------------------------- with E_Strings; --# inherit CommandLine, --# E_Strings, --# SPARK_IO; package FatalErrors --# own State; --# initializes State; is type Error_Type is ( Could_Not_Open_Input_File, Could_Not_Create_Report_File, Data_Structure_Inconsistency, VC_Data_Structure_Inconsistency, Expected_Directory_Missing, File_Heap_Full, VC_Heap_Full, Invalid_Command_Line, Problem_Reading_File, Problem_Creating_Temp_File, Subprogram_Totals_Inconsistent); -- note Process DOES NOT return procedure Process (Error : in Error_Type; Message : in E_Strings.T); --# global out State; --# derives State from Error, --# Message; --# post False; -- does not terminate normally end FatalErrors; spark-2012.0.deb/pogs/vcs-analysevcfile.adb0000644000175000017500000004062211753202340017476 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Procedure to analyse a .VCG file -- -- -- -------------------------------------------------------------------------------- separate (VCS) procedure AnalyseVCFile (Report_File : in SPARK_IO.File_Type; Filename : in E_Strings.T; Error_In_File : out Boolean; File_Date_Time : out E_Strings.T) is Dummy_Close_Status : SPARK_IO.File_Status; The_Date_Time : E_Strings.T; File_Line : E_Strings.T; Finished_With_File : Boolean; Open_Status : SPARK_IO.File_Status; Read_Line_Success : Boolean; Sub_Program_Name : E_Strings.T; VC_File : SPARK_IO.File_Type := SPARK_IO.Null_File; VC_Info : VC_Info_Type; File_Status : File_Status_T; Current_VC_Name : E_Strings.T; Parsing_State : Parsing_State_Type := Initial; Trimmed_Line : E_Strings.T; ------------------------------------------------------------------------ procedure Extract_VC_File_Date_Time_And_Subprog_Name (VC_File : in SPARK_IO.File_Type; File_Date_Time : out E_Strings.T; Sub_Program_Name : out E_Strings.T; File_Status : out File_Status_T) --# global in out SPARK_IO.File_Sys; --# derives File_Date_Time, --# File_Status, --# SPARK_IO.File_Sys, --# Sub_Program_Name from SPARK_IO.File_Sys, --# VC_File; is File_Line : E_Strings.T; Trimmed_Line : E_Strings.T; Subprogram_Found : Boolean := False; begin File_Status := Not_Corrupt; File_Date_Time := E_Strings.Empty_String; Sub_Program_Name := E_Strings.Empty_String; -- Check for completely empty file. E_Strings.Get_Line (File => VC_File, E_Str => File_Line); if E_Strings.Is_Empty (E_Str => File_Line) and SPARK_IO.End_Of_File (VC_File) then File_Status := Corrupt_Empty_File; else --Keep on reading from this file, until the desired information is retrieved --or the end of the file is reached. loop Trimmed_Line := E_Strings.Trim (File_Line); -- find date -- (There is an implicit assumption that the date, if present, will appear -- before the subprogram name.) if E_Strings.Eq1_String (E_Str => E_Strings.Section (Trimmed_Line, 1, 4), Str => "DATE") then File_Date_Time := E_Strings.Section (Trimmed_Line, VCG_File_Date_Time_Start_Column, VCG_File_Date_Time_Length); end if; Subprogram_Found := Is_Valid_Subprogram (Trimmed_Line); if Subprogram_Found then -- Note that this does not actually work if we have a -- wrapped line. The original code for -- Is_Valid_Subprogram will cater for wrapped line -- that, but we seem to not care about it here. Sub_Program_Name := E_Strings.Trim (File_Line); end if; exit when (Subprogram_Found or SPARK_IO.End_Of_File (VC_File)); E_Strings.Get_Line (File => VC_File, E_Str => File_Line); end loop; end if; if E_Strings.Is_Empty (E_Str => File_Date_Time) then File_Date_Time := E_Strings.Copy_String (Str => "Unknown Date (for vc generation)"); end if; if (File_Status = Not_Corrupt) and not Subprogram_Found then File_Status := Corrupt_Unknown_Subprogram; end if; end Extract_VC_File_Date_Time_And_Subprog_Name; -------------------------------------------------------------------------- function Get_Line_Number (Line_Number : VC_Line_Type) return E_Strings.T is Number : Integer; Trimmed_Result : E_Strings.T; begin if Line_Number = Refinement_Or_Inheritance_VC then Trimmed_Result := E_Strings.Copy_String (Str => " "); elsif Line_Number = VC_Line_Start then Trimmed_Result := E_Strings.Copy_String (Str => "start"); elsif Line_Number = VC_Line_End then Trimmed_Result := E_Strings.Copy_String (Str => "finish"); else Number := Line_Number; E_Strings.Put_Int_To_String (Dest => Trimmed_Result, Item => Number, Start_Pt => 1, Base => 10); Trimmed_Result := E_Strings.Trim (E_Str => Trimmed_Result); end if; return Trimmed_Result; end Get_Line_Number; begin -- AnalyseVCFile Current_VC_Name := E_Strings.Empty_String; -- open VC file E_Strings.Open (File => VC_File, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => Open_Status); if Open_Status /= SPARK_IO.Ok then FatalErrors.Process (FatalErrors.Could_Not_Open_Input_File, E_Strings.Empty_String); end if; --No errors, until discover otherwise. Error_In_File := False; Extract_VC_File_Date_Time_And_Subprog_Name (VC_File => VC_File, File_Date_Time => The_Date_Time, Sub_Program_Name => Sub_Program_Name, File_Status => File_Status); -- Report any error to standard out and set error flag -- accordingly. case File_Status is when Not_Corrupt => null; when Corrupt_Empty_File => SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* VC file corrupt: empty file ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_File := True; when Corrupt_Unknown_Subprogram => SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* VC file corrupt: missing subprogram name ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_File := True; end case; --Record the date regardless of errors. This may be a string of the form 'no date'. File_Date_Time := The_Date_Time; --Only continue working on this file if an error has not been seen. --(Previously POGS would attempt to work with corrupt files. This feature has the -- capacity to produce confusing and wrong results.) if not Error_In_File then SPARK_IO.Put_String (Report_File, "File ", 0); if CommandLine.Data.PlainOutput then E_Strings.Put_Line (File => Report_File, E_Str => E_Strings.Lower_Case (E_Str => OSFiling.Base_Filename (Path => Filename))); else E_Strings.Put_Line (File => Report_File, E_Str => Filename); end if; E_Strings.Put_Line (File => Report_File, E_Str => Sub_Program_Name); SPARK_IO.New_Line (Report_File, 1); if CommandLine.Data.IgnoreDates then SPARK_IO.Put_Line (Report_File, "*** Warning: VC date stamps ignored ***", 0); else SPARK_IO.Put_String (Report_File, "VCs generated ", 0); E_Strings.Put_Line (File => Report_File, E_Str => The_Date_Time); end if; -- find first non blank line -- if we get to the end of the file first, flag a fatal error Read_Next_Non_Blank_Line (File => VC_File, Success => Read_Line_Success, File_Line => File_Line); if not Read_Line_Success then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "************* VC file corrupt: no data beyond header ************", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); Error_In_File := True; else -- if it's an error message then reproduce this and exit -- assumption: that if an error message is present it will be -- the first non blank line after the file header if Is_VC_Error_Message (Line => File_Line) then SPARK_IO.New_Line (Report_File, 1); E_Strings.Put_String (File => Report_File, E_Str => File_Line); Error_In_File := True; else Error_In_File := False; -- initialize the 'current information' structure VC_Info := VC_Info_Type' (Start_Line => VC_Line_Start, End_Line => VC_Line_End, End_Line_Point_Type => VCDetails.Undetermined_Point, Number_Of_VCs => 0, This_Start_Line_Printed => False, File_Type => Standard_VC_File_Type, Any_VCs_Printed => False, Valid => False); Finished_With_File := False; -- process file line-by-line -- on entry to the loop there is already a valid line in the -- File_Line buffer while not Finished_With_File loop -- examine line and act accordingly if Is_New_Range_Line (Line => File_Line) then case Parsing_State is when Initial => Parsing_State := First_Range; when First_VC_Name => Parsing_State := New_Range; when New_VC_Name => Parsing_State := New_Range; when others => null; end case; Append_Next_Line_From_File (Line => File_Line, File => VC_File); ProcessNewRangeLine (File_Line, VC_Info); elsif Is_New_VC_Line (Line => File_Line) then case Parsing_State is when First_Range => -- Initialise VCHeap and store the first VC on the VCHeap Trimmed_Line := E_Strings.Trim (File_Line); Current_VC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); Parsing_State := First_VC_Name; VCHeap.Reinitialize (Current_VC_Name, Get_Line_Number (Line_Number => VC_Info.Start_Line), Get_Line_Number (Line_Number => VC_Info.End_Line), VC_Info.End_Line_Point_Type); VCHeap.Set_VC_State (Current_VC_Name, VCDetails.VC_SIV_Not_Present); when First_VC_Name => Trimmed_Line := E_Strings.Trim (File_Line); Current_VC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); Parsing_State := New_VC_Name; VCHeap.Add (VCHeap.First_Entry, Current_VC_Name, Get_Line_Number (Line_Number => VC_Info.Start_Line), Get_Line_Number (Line_Number => VC_Info.End_Line), VC_Info.End_Line_Point_Type, VCDetails.VC_SIV_Not_Present, VCDetails.DPC_Not_Present); when New_Range => -- Store a new VC on the VC Heap Trimmed_Line := E_Strings.Trim (File_Line); Current_VC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); Parsing_State := New_VC_Name; VCHeap.Add (VCHeap.First_Entry, Current_VC_Name, Get_Line_Number (Line_Number => VC_Info.Start_Line), Get_Line_Number (Line_Number => VC_Info.End_Line), VC_Info.End_Line_Point_Type, VCDetails.VC_SIV_Not_Present, VCDetails.DPC_Not_Present); when New_VC_Name => -- The range has not changed, but store a new VC on the VC Heap Trimmed_Line := E_Strings.Trim (File_Line); Current_VC_Name := E_Strings.Section (E_Str => Trimmed_Line, Start_Pos => 1, Length => E_Strings.Get_Length (E_Str => Trimmed_Line) - 1); Parsing_State := New_VC_Name; VCHeap.Add (VCHeap.First_Entry, Current_VC_Name, Get_Line_Number (Line_Number => VC_Info.Start_Line), Get_Line_Number (Line_Number => VC_Info.End_Line), VC_Info.End_Line_Point_Type, VCDetails.VC_SIV_Not_Present, VCDetails.DPC_Not_Present); when others => null; end case; VC_Info.Number_Of_VCs := VC_Info.Number_Of_VCs + 1; elsif Is_Trivially_True_VC (Line => File_Line) then VCHeap.Set_VC_State (Current_VC_Name, VCDetails.VC_Proved_By_Examiner); elsif Is_Trivially_False_VC (Line => File_Line) then VCHeap.Set_VC_State (Current_VC_Name, VCDetails.VC_False); end if; -- read next line Read_Next_Non_Blank_Line (File => VC_File, Success => Read_Line_Success, File_Line => File_Line); -- if unsuccessful then check EOF -- and set Finished_With_File accordingly if not Read_Line_Success then if SPARK_IO.End_Of_File (VC_File) then Finished_With_File := True; else FatalErrors.Process (FatalErrors.Problem_Reading_File, E_Strings.Empty_String); end if; end if; end loop; -- write information for last VC -- two VC_Info parameters are necessary as WriteVC_Info compares them -- in deciding what to write (see definition of WriteVC_Info) if not VC_Info.Valid then SPARK_IO.Put_Line (Report_File, "No VCs in file", 0); end if; end if; end if; end if; --# accept F, 10, Dummy_Close_Status, "Dummy_Close_Status unused here" & --# F, 10, VC_File, "VC_File unused here"; SPARK_IO.Close (VC_File, Dummy_Close_Status); --# end accept; --# accept F, 33, Dummy_Close_Status, "Dummy_Close_Status unused here"; end AnalyseVCFile; spark-2012.0.deb/pogs/vcheap.adb0000644000175000017500000006475211753202340015340 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Package combining Heap and VCDetails to give an ordered list of VC details. -- -- -- -------------------------------------------------------------------------------- with FatalErrors; with HeapIndex; use type HeapIndex.IndexType; package body VCHeap --# own I_State is Longest_VC_End_Length, --# Longest_VC_Name_Length, --# Longest_VC_Start_Length, --# VC_Name_Prefix & --# State is Start_Of_Pointers_List, --# The_Details, --# The_Pointers; is The_Pointers : Heap.HeapRecord; The_Details : VCDetails.Data_Type; Start_Of_Pointers_List : Heap.Atom; -- this one is used to record the prefix name for the VCs VC_Name_Prefix : E_Strings.T; -- the following are used as VC are added to the heap to record information -- used later for displaying the results as a table Longest_VC_Name_Length : Integer; Longest_VC_Start_Length : Integer; Longest_VC_End_Length : Integer; function First_Entry return Heap.Atom --# global in Start_Of_Pointers_List; is begin return Start_Of_Pointers_List; end First_Entry; ------------------------------------------------------------------------ procedure Add (Start_Index : in Heap.Atom; New_Name : in E_Strings.T; Path_Start : in E_Strings.T; Path_End : in E_Strings.T; End_Type : in VCDetails.Terminal_Point_Type; VC_State : in VCDetails.VC_State_T; DPC_State : in VCDetails.DPC_State_T) --# global in out FatalErrors.State; --# in out Longest_VC_End_Length; --# in out Longest_VC_Name_Length; --# in out Longest_VC_Start_Length; --# in out The_Details; --# in out The_Pointers; --# derives FatalErrors.State, --# The_Details, --# The_Pointers from *, --# DPC_State, --# End_Type, --# New_Name, --# Path_End, --# Path_Start, --# Start_Index, --# The_Details, --# The_Pointers, --# VC_State & --# Longest_VC_End_Length from *, --# Path_End & --# Longest_VC_Name_Length from *, --# New_Name & --# Longest_VC_Start_Length from *, --# Path_Start; is Existing_Name : E_Strings.T; Existing_Path_Start : E_Strings.T; Existing_Path_End : E_Strings.T; Existing_End_Type : VCDetails.Terminal_Point_Type; Existing_VC_State : VCDetails.VC_State_T; Existing_DPC_State : VCDetails.DPC_State_T; List_Index : Heap.Atom; Loop_Finished : Boolean := False; Next_Entry_In_List : Heap.Atom; Order_Result : E_Strings.Order_Types; Retrieve_Success : Boolean; procedure Insert_In_List (List_Index : in Heap.Atom; Next_Entry_In_List : in Heap.Atom; Name : in E_Strings.T; Path_Start : in E_Strings.T; Path_End : in E_Strings.T; End_Type : in VCDetails.Terminal_Point_Type; VC_State : in VCDetails.VC_State_T; DPC_State : in VCDetails.DPC_State_T) --# global in out FatalErrors.State; --# in out The_Details; --# in out The_Pointers; --# derives FatalErrors.State from *, --# The_Details, --# The_Pointers & --# The_Details from *, --# DPC_State, --# End_Type, --# Name, --# Path_End, --# Path_Start, --# VC_State & --# The_Pointers from *, --# List_Index, --# Next_Entry_In_List, --# The_Details; is Create_Atom_Success : Boolean; Details_Add_Success : Boolean; New_Details_Index : HeapIndex.IndexType; New_Pointers_Index : Heap.Atom; begin -- allocate heap atom Heap.CreateAtom (The_Pointers, New_Pointers_Index, Create_Atom_Success); -- allocate file details entry VCDetails.Add (Details => The_Details, Index => New_Details_Index, Success => Details_Add_Success, Name => Name, Path_Start => Path_Start, Path_End => Path_End, End_Type => End_Type, VC_State => VC_State, DPC_State => DPC_State); if not (Create_Atom_Success and Details_Add_Success) then FatalErrors.Process (FatalErrors.VC_Heap_Full, E_Strings.Empty_String); end if; -- point heap atom to file details entry Heap.UpdateAValue (The_Pointers, New_Pointers_Index, New_Details_Index); -- link heap atom into list Heap.UpdateAPointer (The_Pointers, List_Index, New_Pointers_Index); Heap.UpdateAPointer (The_Pointers, New_Pointers_Index, Next_Entry_In_List); end Insert_In_List; ------------------------------------------------------------------------- function Longest_Of (First_Length : Integer; Second_Length : Integer) return Integer is Result : Integer; begin if First_Length > Second_Length then Result := First_Length; else Result := Second_Length; end if; return Result; end Longest_Of; begin -- Add -- start at point specified in linked list List_Index := Start_Index; while not Loop_Finished loop -- if current item is last then add after it Next_Entry_In_List := Heap.APointer (The_Pointers, List_Index); if Next_Entry_In_List = 0 then Insert_In_List (List_Index => List_Index, Next_Entry_In_List => Next_Entry_In_List, Name => New_Name, Path_Start => Path_Start, Path_End => Path_End, End_Type => End_Type, VC_State => VC_State, DPC_State => DPC_State); Loop_Finished := True; else -- otherwise get relative order of next item in list and the new item --# accept F, 10, Existing_Path_Start, "Existing_Path_Start not used here" & --# F, 10, Existing_Path_End, "Existing_Path_End not used here" & --# F, 10, Existing_End_Type, "Existing_End_Type not used here" & --# F, 10, Existing_VC_State, "Existing_VC_State not used here" & --# F, 10, Existing_DPC_State, "Existing_DPC_State not used here" ; VCDetails.Retrieve (The_Details, Heap.AValue (The_Pointers, Next_Entry_In_List), Retrieve_Success, Existing_Name, Existing_Path_Start, Existing_Path_End, Existing_End_Type, Existing_VC_State, Existing_DPC_State); --# end accept; if not Retrieve_Success then FatalErrors.Process (FatalErrors.VC_Data_Structure_Inconsistency, E_Strings.Empty_String); end if; VCDetails.Order (Existing_Name, New_Name, Order_Result); case Order_Result is when E_Strings.First_One_First => -- next item in list is first, keep going down list List_Index := Next_Entry_In_List; when E_Strings.Second_One_First => -- new item is first, insert here Insert_In_List (List_Index => List_Index, Next_Entry_In_List => Next_Entry_In_List, Name => New_Name, Path_Start => Path_Start, Path_End => Path_End, End_Type => End_Type, VC_State => VC_State, DPC_State => DPC_State); Loop_Finished := True; when E_Strings.Neither_First => -- items identical: do nothing Loop_Finished := True; end case; end if; end loop; Longest_VC_Name_Length := Longest_Of (First_Length => E_Strings.Get_Length (E_Str => New_Name), Second_Length => Longest_VC_Name_Length); Longest_VC_Start_Length := Longest_Of (First_Length => E_Strings.Get_Length (E_Str => Path_Start), Second_Length => Longest_VC_Start_Length); Longest_VC_End_Length := Longest_Of (First_Length => E_Strings.Get_Length (E_Str => Path_End) + VCDetails.End_Type_Image_Length, Second_Length => Longest_VC_End_Length); --# accept F, 33, Existing_Path_Start, "Existing_Path_Start not used here" & --# F, 33, Existing_Path_End, "Existing_Path_End not used here" & --# F, 33, Existing_End_Type, "Existing_End_Type not used here" & --# F, 33, Existing_VC_State, "Existing_VC_State not used here" & --# F, 33, Existing_DPC_State,"Existing_DPC_State not used here"; end Add; ---------------------------------------------------------------------------- -- this procedure returns the file details for the specified entry in the -- linked list. procedure Details (List_Index : in Heap.Atom; VC_Name : out E_Strings.T; Path_Start : out E_Strings.T; Path_End : out E_Strings.T; End_Type : out VCDetails.Terminal_Point_Type; VC_State : out VCDetails.VC_State_T; DPC_State : out VCDetails.DPC_State_T) --# global in The_Details; --# in The_Pointers; --# derives DPC_State, --# End_Type, --# Path_End, --# Path_Start, --# VC_Name, --# VC_State from List_Index, --# The_Details, --# The_Pointers; is Details_Index : HeapIndex.IndexType; Dummy : Boolean; begin -- dereference linked list pointer Details_Index := Heap.AValue (The_Pointers, List_Index); -- if not null pointer then follow it if Details_Index /= 0 then --# accept F, 10, Dummy, "Dummy not used here"; VCDetails.Retrieve (The_Details, Details_Index, Dummy, VC_Name, Path_Start, Path_End, End_Type, VC_State, DPC_State); --# end accept; else -- if null pointer then return failure VC_Name := E_Strings.Empty_String; Path_Start := E_Strings.Empty_String; Path_End := E_Strings.Empty_String; End_Type := VCDetails.Undetermined_Point; VC_State := VCDetails.VC_Not_Present; DPC_State := VCDetails.DPC_Not_Present; end if; --# accept F, 33, Dummy, "Dummy not used here"; end Details; -------------------------------------------------------------------------- procedure Initialize --# global out Longest_VC_End_Length; --# out Longest_VC_Name_Length; --# out Longest_VC_Start_Length; --# out Start_Of_Pointers_List; --# out The_Details; --# out The_Pointers; --# out VC_Name_Prefix; --# derives Longest_VC_End_Length, --# Longest_VC_Name_Length, --# Longest_VC_Start_Length, --# Start_Of_Pointers_List, --# The_Details, --# The_Pointers, --# VC_Name_Prefix from ; is begin Heap.Initialize (The_Pointers); VCDetails.Initialize (The_Details); Start_Of_Pointers_List := 0; VC_Name_Prefix := E_Strings.Empty_String; Longest_VC_Name_Length := 0; Longest_VC_Start_Length := 0; Longest_VC_End_Length := 0; end Initialize; -------------------------------------------------------------------------- procedure Raise_Error (Error_Kind : in VCDetails.Error_Type) --# global in out The_Details; --# derives The_Details from *, --# Error_Kind; is begin VCDetails.Raise_Error (Error_Kind => Error_Kind, Details => The_Details); end Raise_Error; -------------------------------------------------------------------------- function Error_Raised (Error_Kind : in VCDetails.Error_Type) return Boolean --# global in The_Details; is begin return VCDetails.Error_Raised (Error_Kind => Error_Kind, Details => The_Details); end Error_Raised; -------------------------------------------------------------------------- procedure Reinitialize (First_Element : in E_Strings.T; First_Path_Start : in E_Strings.T; First_Path_End : in E_Strings.T; First_End_Type : in VCDetails.Terminal_Point_Type) --# global out Longest_VC_End_Length; --# out Longest_VC_Name_Length; --# out Longest_VC_Start_Length; --# out Start_Of_Pointers_List; --# out The_Details; --# out The_Pointers; --# out VC_Name_Prefix; --# derives Longest_VC_End_Length from First_Path_End & --# Longest_VC_Name_Length, --# VC_Name_Prefix from First_Element & --# Longest_VC_Start_Length from First_Path_Start & --# Start_Of_Pointers_List, --# The_Pointers from & --# The_Details from First_Element, --# First_End_Type, --# First_Path_End, --# First_Path_Start; is Dummy : Boolean; First_Details_Index : HeapIndex.IndexType; First_Pointers_Index : Heap.Atom; begin -- Reinitialize Heap.Initialize (The_Pointers); VCDetails.Initialize (The_Details); -- insert first item --# accept F, 10, Dummy, "Dummy unused here"; VCDetails.Add (The_Details, First_Details_Index, Dummy, First_Element, First_Path_Start, First_Path_End, First_End_Type, VCDetails.VC_Not_Present, VCDetails.DPC_Not_Present); Heap.CreateAtom (The_Pointers, First_Pointers_Index, Dummy); --# end accept; Heap.UpdateAValue (The_Pointers, First_Pointers_Index, First_Details_Index); Heap.UpdateAPointer (The_Pointers, First_Pointers_Index, 0); Start_Of_Pointers_List := First_Pointers_Index; Longest_VC_Name_Length := E_Strings.Get_Length (E_Str => First_Element); Longest_VC_Start_Length := E_Strings.Get_Length (E_Str => First_Path_Start); Longest_VC_End_Length := E_Strings.Get_Length (E_Str => First_Path_End) + VCDetails.End_Type_Image_Length; VC_Name_Prefix := E_Strings.Section (First_Element, 1, E_Strings.Get_Length (E_Str => First_Element) - 2); --# accept F, 33, Dummy, "Dummy unused here"; end Reinitialize; --------------------------------------------------------------------------- -- this procedure returns the 'Next_One' ordered element in FH after -- 'After_This'. It is successful if the Next_One is not a 'null' pointer procedure Next (After_This : in Heap.Atom; Success : out Boolean; Next_One : out Heap.Atom) --# global in The_Pointers; --# derives Next_One, --# Success from After_This, --# The_Pointers; is Next_In_List : Heap.Atom; begin -- Next Next_In_List := Heap.APointer (The_Pointers, After_This); if Next_In_List = 0 then Success := False; Next_One := 0; else Success := True; Next_One := Next_In_List; end if; end Next; --------------------------------------------------------------------------- procedure Find_VC_By_Name (VC_Name : in E_Strings.T; VC_Index : out HeapIndex.IndexType) --# global in Start_Of_Pointers_List; --# in The_Details; --# in The_Pointers; --# in out FatalErrors.State; --# derives FatalErrors.State from *, --# Start_Of_Pointers_List, --# The_Details, --# The_Pointers, --# VC_Name & --# VC_Index from Start_Of_Pointers_List, --# The_Details, --# The_Pointers, --# VC_Name; is List_Index : Heap.Atom; Found : Boolean; Loop_Finished : Boolean; Retrieve_Success : Boolean; Current_VC_Name : E_Strings.T; Current_VC_Path_Start : E_Strings.T; Current_VC_Path_End : E_Strings.T; Current_VC_End_Type : VCDetails.Terminal_Point_Type; Current_VC_State : VCDetails.VC_State_T; Current_DPC_State : VCDetails.DPC_State_T; begin List_Index := Start_Of_Pointers_List; Found := False; Loop_Finished := False; while not Heap.IsNullPointer (List_Index) and not Loop_Finished loop --# accept F, 10, Current_VC_Path_Start, "Current_VC_Path_Start not used here" & --# F, 10, Current_VC_Path_End, "Current_VC_Path_End not used here" & --# F, 10, Current_VC_End_Type, "Current_VC_End_Type not used here" & --# F, 10, Current_VC_State, "Current_VC_State not used here" & --# F, 10, Current_DPC_State, "Current_DPC_State not used here"; VCDetails.Retrieve (The_Details, Heap.AValue (The_Pointers, List_Index), Retrieve_Success, Current_VC_Name, Current_VC_Path_Start, Current_VC_Path_End, Current_VC_End_Type, Current_VC_State, Current_DPC_State); --# end accept; if not Retrieve_Success then FatalErrors.Process (FatalErrors.VC_Data_Structure_Inconsistency, E_Strings.Empty_String); end if; if E_Strings.Eq_String (E_Str1 => VC_Name, E_Str2 => Current_VC_Name) then Found := True; Loop_Finished := True; else List_Index := Heap.APointer (The_Pointers, List_Index); end if; end loop; if Found then VC_Index := Heap.AValue (The_Pointers, List_Index); else VC_Index := 0; end if; --# accept F, 33, Current_VC_Path_Start, "Current_VC_Path_Start not used here" & --# F, 33, Current_VC_Path_End, "Current_VC_Path_End not used here" & --# F, 33, Current_VC_End_Type, "Current_VC_End_Type not used here" & --# F, 33, Current_VC_State, "Current_VC_State not used here" & --# F, 33, Current_DPC_State, "Current_DPC_State not used here"; end Find_VC_By_Name; -------------------------------------------------------------------------- procedure Set_VC_State (VC_Name : in E_Strings.T; VC_State : in VCDetails.VC_State_T) --# global in Start_Of_Pointers_List; --# in The_Pointers; --# in out FatalErrors.State; --# in out The_Details; --# derives FatalErrors.State from *, --# Start_Of_Pointers_List, --# The_Details, --# The_Pointers, --# VC_Name & --# The_Details from *, --# Start_Of_Pointers_List, --# The_Pointers, --# VC_Name, --# VC_State; is Details_Index : HeapIndex.IndexType; begin Find_VC_By_Name (VC_Name => VC_Name, VC_Index => Details_Index); if Details_Index /= 0 then VCDetails.Set_VC_State (The_Details, Details_Index, VC_State); end if; end Set_VC_State; -------------------------------------------------------------------------- function Get_VC_State (VC_Name : E_Strings.T) return VCDetails.VC_State_T --# global in Start_Of_Pointers_List; --# in The_Details; --# in The_Pointers; is -- Hide this function to hide the (unfortunate and downright -- annoying) side-effect that Find_VC_By_Name can have on FatalErrors.State --# hide Get_VC_State Details_Index : HeapIndex.IndexType; begin Find_VC_By_Name (VC_Name => VC_Name, VC_Index => Details_Index); return VCDetails.Get_VC_State (The_Details, Details_Index); end Get_VC_State; -------------------------------------------------------------------------- procedure Set_DPC_State (DPC_Name : in E_Strings.T; DPC_State : in VCDetails.DPC_State_T) --# global in Start_Of_Pointers_List; --# in The_Pointers; --# in out FatalErrors.State; --# in out The_Details; --# derives FatalErrors.State from *, --# DPC_Name, --# Start_Of_Pointers_List, --# The_Details, --# The_Pointers & --# The_Details from *, --# DPC_Name, --# DPC_State, --# Start_Of_Pointers_List, --# The_Pointers; is Details_Index : HeapIndex.IndexType; begin Find_VC_By_Name (VC_Name => DPC_Name, VC_Index => Details_Index); if Details_Index /= 0 then VCDetails.Set_DPC_State (The_Details, Details_Index, DPC_State); end if; end Set_DPC_State; --------------------------------------------------------------------------- function Exists (VC_Name : E_Strings.T) return Boolean --# global in Start_Of_Pointers_List; --# in The_Details; --# in The_Pointers; is -- Hide this function to hide the (unfortunate and downright -- annoying) side-effect that Find_VC_By_Name can have on FatalErrors.State --# hide Exists; Details_Index : HeapIndex.IndexType; begin Find_VC_By_Name (VC_Name => VC_Name, VC_Index => Details_Index); return (Details_Index /= 0); end Exists; --------------------------------------------------------------------------- procedure Get_VC_Name_End_Type (VC_Name : in E_Strings.T; VC_Type : out VCDetails.Terminal_Point_Type) --# global in Start_Of_Pointers_List; --# in The_Details; --# in The_Pointers; --# in out FatalErrors.State; --# derives FatalErrors.State from *, --# Start_Of_Pointers_List, --# The_Details, --# The_Pointers, --# VC_Name & --# VC_Type from Start_Of_Pointers_List, --# The_Details, --# The_Pointers, --# VC_Name; is VC_Index : HeapIndex.IndexType; begin Find_VC_By_Name (VC_Name => VC_Name, VC_Index => VC_Index); VC_Type := VCDetails.End_Point_Type (Details => The_Details, Index => VC_Index); end Get_VC_Name_End_Type; --------------------------------------------------------------------------- function Get_Longest_VC_Name_Length return Integer --# global in Longest_VC_Name_Length; is begin return Longest_VC_Name_Length; end Get_Longest_VC_Name_Length; --------------------------------------------------------------------------- function Get_Longest_VC_Start_Length return Integer --# global in Longest_VC_Start_Length; is begin return Longest_VC_Start_Length; end Get_Longest_VC_Start_Length; --------------------------------------------------------------------------- function Get_Longest_VC_End_Length return Integer --# global in Longest_VC_End_Length; is begin return Longest_VC_End_Length; end Get_Longest_VC_End_Length; -------------------------------------------------------------------------- function Get_VC_Name_Prefix return E_Strings.T --# global in VC_Name_Prefix; is begin return VC_Name_Prefix; end Get_VC_Name_Prefix; end VCHeap; spark-2012.0.deb/pogs/banner.adb0000644000175000017500000002452011753202340015324 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Produce the banner(s) to display on the screen and in the report file -- -------------------------------------------------------------------------------- with CommandLine; with E_Strings; with Version; package body Banner is LenDateTime : constant Integer := 23; subtype TypDateTimeRange is Integer range 1 .. LenDateTime; subtype TypDateTime is String (TypDateTimeRange); type Justification is (Left, Middle, Right); -- Creates a banner line with the given text and justification. function CreateBannerLine (FromText : in String; WithJustification : in Justification; FillChar : in Character) return TypBannerLine is TheLine : TypBannerLine := TypBannerLine'(TypBannerLine'First .. TypBannerLine'Last => ' '); StartingAt : Positive; -- Copies the Source into the Dest starting at the given location in Dest -- Characters will be lost if Dest is not big enough. procedure Insert (Source : in String; Dest : in out TypBannerLine; StartingAt : in Positive) --# derives Dest from *, --# Source, --# StartingAt; is begin for I in Natural range 1 .. Source'Length loop exit when I + (StartingAt - 1) > Dest'Length; --# assert I <= TypBannerRange'Last --# and StartingAt <= Dest'Length --# and I + (StartingAt - 1) <= Dest'Length; Dest (I + (StartingAt - 1)) := Source (I); end loop; end Insert; begin -- Fill the line up with fill characters. for I in Natural range 1 .. TheLine'Length loop TheLine (I) := FillChar; end loop; case WithJustification is when Left => StartingAt := 1; when Middle => if TheLine'Length - FromText'Length <= 1 then StartingAt := 1; else StartingAt := (TheLine'Length - FromText'Length) / 2; end if; when Right => if TheLine'Length - FromText'Length <= 0 then StartingAt := 1; else StartingAt := (TheLine'Length - FromText'Length) + 1; end if; end case; Insert (Source => FromText, Dest => TheLine, StartingAt => StartingAt); return TheLine; end CreateBannerLine; function MinorSeparatorLine return TypBannerLine is begin return CreateBannerLine ("-", Left, '-'); end MinorSeparatorLine; function MajorSeparatorLine return TypBannerLine is begin return CreateBannerLine ("=", Left, '='); end MajorSeparatorLine; function EndOfReportMarker return TypBannerLine is subtype MyStringIndex is Positive range 1 .. 34; subtype MyString is String (MyStringIndex); Text : constant MyString := " End of Semantic Analysis Summary "; begin return CreateBannerLine (Text, Middle, '='); end EndOfReportMarker; function NameOfReport return TypBannerLine is subtype MyStringIndex is Positive range 1 .. 25; subtype MyString is String (MyStringIndex); Text : constant MyString := "Semantic Analysis Summary"; begin return CreateBannerLine (Text, Middle, ' '); end NameOfReport; function Copyright (J : in Justification) return TypBannerLine --# global in CommandLine.Data; is separate; function Get_Version (J : in Justification) return TypBannerLine --# global in CommandLine.Data; is separate; procedure DateTime (DateString : out TypDateTime) --# derives DateString from ; is separate; ---------------------------------------------------------------------- -- this procedure prints the top 5 banner lines to the output procedure TopBanner (File : in SPARK_IO.File_Type) --# global in CommandLine.Data; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data, --# File; is begin SPARK_IO.Put_Line (File, MinorSeparatorLine, 0); SPARK_IO.Put_Line (File, NameOfReport, 0); SPARK_IO.Put_Line (File, Get_Version (Middle), 0); SPARK_IO.Put_Line (File, Copyright (Middle), 0); SPARK_IO.Put_Line (File, MinorSeparatorLine, 0); SPARK_IO.New_Line (File, 1); end TopBanner; procedure ReportVersion is begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Get_Version (Left), 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Copyright (Left), 0); end ReportVersion; ------------------------------------------------------------------------- procedure FinishReport (File : in SPARK_IO.File_Type) is begin SPARK_IO.Put_Line (File, EndOfReportMarker, 0); end FinishReport; -------------------------------------------------------------------------- procedure Screen is begin TopBanner (SPARK_IO.Standard_Output); end Screen; -------------------------------------------------------------------------- procedure Report (File : in SPARK_IO.File_Type) is DateAndTime : TypDateTime; ------------------------------------------------------------------------ procedure OutputAnalysisType (File : in SPARK_IO.File_Type) --# global in CommandLine.Data; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# CommandLine.Data, --# File; is begin if CommandLine.Data.AnalyseVCs then SPARK_IO.Put_Line (File, "Verification Condition files (.vcg)", 0); SPARK_IO.Put_Line (File, "Simplified Verification Condition files (.siv)", 0); SPARK_IO.Put_Line (File, "Victor result files (.vct)", 0); SPARK_IO.Put_Line (File, "Riposte result files (.rsm)", 0); end if; if CommandLine.Data.AnalysePFs then SPARK_IO.Put_Line (File, "Path Function files (.pfs)", 0); SPARK_IO.Put_Line (File, "Simplified Path Function files (.sip)", 0); end if; if CommandLine.Data.AnalyseProofLog then SPARK_IO.Put_Line (File, "Proof Logs (.plg)", 0); end if; SPARK_IO.Put_Line (File, "Dead Path Conjecture files (.dpc)", 0); SPARK_IO.Put_Line (File, "Summary Dead Path files (.sdp)", 0); if not CommandLine.Data.ShortSummary then SPARK_IO.New_Line (File, 1); SPARK_IO.Put_Line (File, """status"" column keys:", 0); SPARK_IO.Put_Line (File, " 1st character:", 0); SPARK_IO.Put_Line (File, " '-' - No VC", 0); SPARK_IO.Put_Line (File, " 'S' - No SIV", 0); SPARK_IO.Put_Line (File, " 'U' - Undischarged", 0); SPARK_IO.Put_Line (File, " 'E' - Proved by Examiner", 0); SPARK_IO.Put_Line (File, " 'I' - Proved by Simplifier by Inference", 0); SPARK_IO.Put_Line (File, " 'X' - Proved by Simplifier by Contradiction", 0); SPARK_IO.Put_Line (File, " 'P' - Proved by Simplifier using User Defined Proof Rules", 0); SPARK_IO.Put_Line (File, " 'V' - Proved by Victor", 0); SPARK_IO.Put_Line (File, " 'O' - Proved by Riposte", 0); SPARK_IO.Put_Line (File, " 'C' - Proved by Checker", 0); SPARK_IO.Put_Line (File, " 'R' - Proved by Review", 0); SPARK_IO.Put_Line (File, " 'F' - VC is False", 0); SPARK_IO.Put_Line (File, " 2nd character:", 0); SPARK_IO.Put_Line (File, " '-' - No DPC", 0); SPARK_IO.Put_Line (File, " 'S' - No SDP", 0); SPARK_IO.Put_Line (File, " 'U' - Unchecked", 0); SPARK_IO.Put_Line (File, " 'D' - Dead path", 0); SPARK_IO.Put_Line (File, " 'L' - Live path", 0); end if; end OutputAnalysisType; -------------------------------------------------------------------------- begin -- Report TopBanner (File); -- extra lines to state what type of analysis we're doing, -- the starting directory, and the date of generation SPARK_IO.Put_Line (File, "Summary of:", 0); SPARK_IO.New_Line (File, 1); OutputAnalysisType (File); SPARK_IO.New_Line (File, 1); SPARK_IO.Put_Line (File, "in the directory:", 0); if CommandLine.Data.PlainOutput then SPARK_IO.New_Line (File, 1); else E_Strings.Put_Line (File => File, E_Str => CommandLine.Data.StartDirectory); end if; SPARK_IO.New_Line (File, 1); SPARK_IO.Put_String (File, "Summary produced: ", 0); if CommandLine.Data.PlainOutput then SPARK_IO.New_Line (File, 1); else DateTime (DateAndTime); SPARK_IO.Put_Line (File, DateAndTime, 0); end if; SPARK_IO.New_Line (File, 1); if CommandLine.Data.IgnoreDates then SPARK_IO.Put_Line (File, "Ignore Dates option selected.", 0); SPARK_IO.New_Line (File, 1); end if; end Report; end Banner; spark-2012.0.deb/pogs/osfiling.ads0000644000175000017500000001235611753202340015716 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- --Synopsis: -- -- -- --Platform dependent package supplying low level filing utilities. FindFiles -- --is separate to prevent cyclic interpackage dependencies -- -- -- -------------------------------------------------------------------------------- with E_Strings; --# inherit E_Strings, --# SPARK_IO; package OSFiling --# own File_Structure; --# initializes File_Structure; is -- Return the filename from the string given, without drive or directory -- designation. -- Not guaranteed to work correctly if soft links are in the path. function Base_Filename (Path : E_Strings.T) return E_Strings.T; -- Return the directory from the string given, without drive or full path -- designation. If a string such as "." or ".." is given then the actual -- directory name is returned. -- Not guaranteed to work correctly if soft links are in the path. function Base_Dir_Name (Path : E_Strings.T) return E_Strings.T; function Default_Report_Extn return E_Strings.T; -- this function combines the inputs to produce the path to the -- subdirectory function Down_Directory (Path : E_Strings.T; Sub_Directory : E_Strings.T) return E_Strings.T; -- this function combines the inputs to produce a full file name function Full_Filename (Path : E_Strings.T; Filename : E_Strings.T) return E_Strings.T; function Get_Working_Directory return E_Strings.T; --# global in File_Structure; -- true iff the specified name is a directory function Is_Directory (Name : E_Strings.T) return Boolean; --# global in File_Structure; -- true iff the specified name exists and is not a directory function Is_File (Name : E_Strings.T) return Boolean; --# global in File_Structure; -- return which name comes first in dictionary order -- with the quirk that on the VAX, the file [spark]pogs -- immediately preceeds the directory [spark.pogs] function Order (First_Name, Second_Name : E_Strings.T) return E_Strings.Order_Types; -- remove the file extension: on the VAX this includes removing the -- file version number, but the reverse scan for a '.' must not go -- past the ']' because there are '.'s in dir specifications procedure Remove_File_Extension (Filename : in out E_Strings.T); --# derives Filename from *; -- NOTE: Please update Is_Relevant_File if you add/remove/change -- any of the functions of the form *_File_Extension. -- return the file extension for a simplified VC file function Simplified_VC_File_Extension return E_Strings.T; -- return the file extension for a victor results file function Victored_VC_File_Extension return E_Strings.T; -- return the file extension for a victor log file function Victor_Log_File_Extension return E_Strings.T; -- return the file extension for a Riposte summary file function Riposte_Summary_File_Extension return E_Strings.T; -- this function removes the last directory name from the supplied string function Dir_Name (Path : E_Strings.T) return E_Strings.T; -- return the file extension for a VC file function VC_File_Extension return E_Strings.T; -- return the file extension for a Proof Log file function Proof_Log_File_Extension return E_Strings.T; -- return the file extension for a Review file function Review_File_Extension return E_Strings.T; -- return the file extension for a Simplifier Log file function Simplifier_Log_File_Extension return E_Strings.T; -- return the file extension for DPC file function DPC_File_Extension return E_Strings.T; function Summary_DP_File_Extension return E_Strings.T; -- return the platform specific directory separator function Directory_Separator return Character; -- based on the extension of the file, decide if it is relevant to -- us. when adding / removing *_File_Extension functions, please -- also update this function. function Is_Relevant_File (Name : in E_Strings.T) return Boolean; end OSFiling; spark-2012.0.deb/analyse/0000755000175000017500000000000011753202341014071 5ustar eugeneugenspark-2012.0.deb/analyse/referenceanalysis/0000755000175000017500000000000011753203755017605 5ustar eugeneugenspark-2012.0.deb/analyse/referenceanalysis/sparklalr.rep0000644000175000017500000010034711753203755022315 0ustar eugeneugen ******************************************************* Report of SPARK Examination Examiner GPL Edition ******************************************************* Options: index_file=sparklalr.idx nowarning_file notarget_compiler_data config_file=gnat.cfg source_extension=ada listing_extension=ls_ nodictionary_file report_file=sparklalr.rep nohtml plain_output sparklib nostatistics fdl_identifiers=accept flow_analysis=auto language=95 profile=sequential annotation_character=# rules=lazy error_explanations=off justification_option=full casing=si output_directory=vcg output_directory (actual)=vcg Selected files: @sparklalr.smf Index Filename(s) used were: sparklalr.idx spark.idx Meta File(s) used were: sparklalr.smf command_line_options.adb ees_sym.adb sparklalr_char_class.adb sparklalr_common.adb sparklalr_conflict.adb sparklalr_error.adb sparklalr_goto.adb sparklalr_input.adb spark_ada_integer_text_io.adb sparklalr_level.adb sparklalr_memory.adb sparklalr_memory-dump.adb sparklalr_memory-left_corner.adb sparklalr_parser.adb sparklalr_patab.adb symbols_dump.adb sparklalr.adb Full warning reporting selected Target configuration file: Line package Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; Source Filename(s) used were: command_line_options.adb command_line_options.ads spark-ada-text_io.ads spark-ada-strings-unbounded.ads spark-ada-command_line-unbounded_string.ads spark-ada-command_line.ads fatal.shs spark-ada.ads spark.ads spark-ada-strings-maps.ads spark-ada-strings.ads ees_sym.adb ees_sym.ads symbols_dump.ads spark_ada_integer_text_io.ads sparklalr_parser.ads sparklalr_memory-dump.ads sparklalr_memory.ads sparklalr_common.ads sparklalr_symbol.ads sparklalr_level.ads sparklalr_input.ads sparklalr_goto.ads sparklalr_error.ads sparklalr_char_class.ads spark-ada-text_io-unbounded_string.ads sparklalr_char_class.adb sparklalr_common.adb sparklalr_conflict.adb sparklalr_conflict.ads sparklalr_patab.ads sparklalr_error.adb sparklalr_goto.adb sparklalr_input.adb spark_ada_integer_text_io.adb sparklalr_level.adb sparklalr_memory.adb sparklalr_memory-dump.adb sparklalr_memory-left_corner.adb sparklalr_memory-left_corner.ads sparklalr_parser.adb sparklalr_patab.adb symbols_dump.adb sparklalr.adb Source Filename: command_line_options.ads No Listing File Unit name: Command_Line_Options Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-text_io.ads No Listing File Unit name: SPARK.Ada.Text_IO Unit type: package specification Unit has been analysed, any errors are listed below. 56 error(s) or warning(s) Line with Ada.Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. end SPARK.Ada.Text_IO; --- Warning : 10: The private part of package Text_IO is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-unbounded.ads No Listing File Unit name: SPARK.Ada.Strings.Unbounded Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Unbounded; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Unbounded; --- Warning : 10: The private part of package Unbounded is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-command_line-unbounded_string.ads No Listing File Unit name: SPARK.Ada.Command_Line.Unbounded_String Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-command_line.ads No Listing File Unit name: SPARK.Ada.Command_Line Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: fatal.shs No Listing File Unit name: Fatal Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada.ads No Listing File Unit name: SPARK.Ada Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark.ads No Listing File Unit name: SPARK Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings-maps.ads No Listing File Unit name: SPARK.Ada.Strings.Maps Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Maps; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Maps; --- Warning : 10: The private part of package Maps is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings.ads No Listing File Unit name: SPARK.Ada.Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: ees_sym.ads No Listing File Unit name: Ees_Sym Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: symbols_dump.ads No Listing File Unit name: Symbols_Dump Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_ada_integer_text_io.ads No Listing File Unit name: SPARK_Ada_Integer_Text_IO Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_parser.ads No Listing File Unit name: Sparklalr_Parser Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_memory-dump.ads No Listing File Unit name: Sparklalr_Memory.Dump Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_memory.ads No Listing File Unit name: Sparklalr_Memory Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_common.ads No Listing File Unit name: Sparklalr_Common Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_symbol.ads No Listing File Unit name: Sparklalr_Symbol Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_level.ads No Listing File Unit name: Sparklalr_Level Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_input.ads No Listing File Unit name: Sparklalr_Input Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_goto.ads No Listing File Unit name: Sparklalr_Goto Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_error.ads No Listing File Unit name: Sparklalr_Error Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_char_class.ads No Listing File Unit name: Sparklalr_Char_Class Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-text_io-unbounded_string.ads No Listing File Unit name: SPARK.Ada.Text_IO.Unbounded_String Unit type: package specification Unit has been analysed, any errors are listed below. 10 error(s) or warning(s) Line --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. Source Filename: sparklalr_conflict.ads No Listing File Unit name: Sparklalr_Conflict Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_patab.ads No Listing File Unit name: Sparklalr_Patab Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_memory-left_corner.ads No Listing File Unit name: Sparklalr_Memory.Left_Corner Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: command_line_options.adb Listing Filename: command_line_options.lsb Unit name: Command_Line_Options Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: ees_sym.adb Listing Filename: ees_sym.lsb Unit name: Ees_Sym Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_char_class.adb Listing Filename: sparklalr_char_class.lsb Unit name: Sparklalr_Char_Class Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_common.adb Listing Filename: sparklalr_common.lsb Unit name: Sparklalr_Common Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_conflict.adb Listing Filename: sparklalr_conflict.lsb Unit name: Sparklalr_Conflict Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective assignment here expe 1 Warn 303 when others here covers all case 1 Flow 10 Ineffective assignment here expe 1 Warn 303 when others here covers all case 1 Warn 303 when others here covers all case 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Pl is unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Pl is unused OK 1 Flow 41 Stable expression here expected 1 Flow 10 Ineffective assignment here expe 2 Flow 33 end Pl is unused OK 1 Source Filename: sparklalr_error.adb Listing Filename: sparklalr_error.lsb Unit name: Sparklalr_Error Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Skipping whitespace, so value is 1 Flow 33 end Unused OK 1 Flow 33 end Unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Unused OK 1 Warn 303 when others covers all cases her 1 Source Filename: sparklalr_goto.adb Listing Filename: sparklalr_goto.lsb Unit name: Sparklalr_Goto Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Source Filename: sparklalr_input.adb Listing Filename: sparklalr_input.lsb Unit name: Sparklalr_Input Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Unreferenced (Dummy); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assigment to Symb he 1 Flow 33 end Unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Token is unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assigment to Ch here 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assigment to Ch here 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assigment to Ch here 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assigment to Ch here 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Unused OK 1 Flow 33 end Token is unused OK 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assigment to Ch here 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Unused OK 1 Flow 33 end Token is unused OK 1 Flow 10 Ineffective assigment to Dummy h 1 Flow 10 Ineffective assigment to Dummy h 1 Flow 10 Ineffective assigment to Dummy h 1 Flow 33 end Dummy is unused OK 1 Source Filename: spark_ada_integer_text_io.adb Listing Filename: spark_ada_integer_text_io.lsb Unit name: SPARK_Ada_Integer_Text_IO Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line with Ada.Integer_Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with SPARK.Ada.Text_IO.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end SPARK_Ada_Integer_Text_IO; --- Warning : 10: The body of package SPARK_Ada_Integer_Text_IO is hidden - hidden text is ignored by the Examiner. Source Filename: sparklalr_level.adb Listing Filename: sparklalr_level.lsb Unit name: Sparklalr_Level Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_memory.adb Listing Filename: sparklalr_memory.lsb Unit name: Sparklalr_Memory Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklalr_memory-dump.adb Listing Filename: sparklalr_memory-dump.lsb Unit name: Sparklalr_Memory.Dump Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 303 when others here covers all case 1 Warn 303 when others here covers all case 1 Warn 303 when others here covers all case 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Unused variable Look_Set_Added 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Unused variable Look_Set_Added 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Unused variable Look_Set_Added 1 Flow 41 Stable expression here expected 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 33 end Pl is unused OK 1 Source Filename: sparklalr_memory-left_corner.adb Listing Filename: sparklalr_memory-left_corner.lsb Unit name: Sparklalr_Memory.Left_Corner Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Source Filename: sparklalr_parser.adb Listing Filename: sparklalr_parser.lsb Unit name: Sparklalr_Parser Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective assigment to Posn he 1 Flow 10 Ineffective assigment to Pl here 1 Flow 10 Ineffective assigment to Posn he 1 Flow 10 Ineffective assigment to Pl here 1 Flow 33 end Pl is unused OK 1 Flow 10 Ineffective assigment to Posn he 1 Flow 10 Ineffective assigment to Posn he 1 Flow 10 Ineffective assigment to Posn he 1 Flow 10 Ineffective assigment to Posn he 1 Flow 10 Ineffective assigment to Posn he 1 Flow 10 Ineffective assigment to Posn he 1 Flow 10 Ineffective assigment to Posn he 1 Source Filename: sparklalr_patab.adb Listing Filename: sparklalr_patab.lsb Unit name: Sparklalr_Patab Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Stable expression here expected 1 Source Filename: symbols_dump.adb Listing Filename: symbols_dump.lsb Unit name: Symbols_Dump Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Source Filename: sparklalr.adb Listing Filename: sparklalr.lsb Unit name: Sparklalr Unit type: main program Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Sparklalr; --- Warning : 9: The body of subprogram Sparklalr has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 602 end Always defined before used 1 Flow 41 Stable expression here expected 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 10 Ineffective assignment here expe 1 Flow 41 Stable expression here expected 1 Flow 41 Stable expression here expected 1 Flow 602 end Always defined before used 1 Flow 602 end Always defined before used 1 Note: Automatic flow analysis mode selected --End of file-------------------------------------------------- spark-2012.0.deb/analyse/referenceanalysis/checker_spxref_undefined.txt0000644000175000017500000000020011753203755025352 0ustar eugeneugen\+prolog:'$acyclic'/1 _==directory->prolog:absolute_file_name/5 _==file->prolog:absolute_file_name/5 newutilities:list_to_set/2 spark-2012.0.deb/analyse/referenceanalysis/sparkclean.rep0000644000175000017500000001606611753203755022451 0ustar eugeneugen ******************************************************* Report of SPARK Examination Examiner GPL Edition ******************************************************* Options: index_file=sparkclean.idx warning_file=all.wrn notarget_compiler_data config_file=gnat.cfg source_extension=ada listing_extension=lst nolistings nodictionary_file report_file=sparkclean.rep nohtml vcg dpc plain_output sparklib nostatistics fdl_identifiers=accept flow_analysis=auto language=95 profile=sequential annotation_character=# rules=lazy error_explanations=off justification_option=full casing=si output_directory=vcg output_directory (actual)=vcg Selected files: @sparkclean.smf Index Filename(s) used were: sparkclean.idx spark.idx Meta File(s) used were: sparkclean.smf files.adb command_line.adb sparkclean.adb Summary warning reporting selected for: Hidden parts With clauses lacking a supporting inherit Declare annotations in non Ravenscar programs Notes All pragmas Target configuration file: Line package Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; No summarized warnings Source Filename(s) used were: files.adb files.ads spark-ada-strings-unbounded.ads spark-ada-strings-maps.ads spark-ada-strings.ads spark-ada.ads spark.ads command_line.adb command_line.ads spark-ada-text_io-unbounded_string.ads spark-ada-text_io.ads spark-ada-command_line-unbounded_string.ads spark-ada-command_line.ads sparkclean.adb version.ads Source Filename: files.ads No Listing File Unit name: Files Unit type: package specification Unit has been analysed, any errors are listed below. No errors found No summarized warnings Source Filename: spark-ada-strings-unbounded.ads No Listing File Unit name: SPARK.Ada.Strings.Unbounded Unit type: package specification Unit has been analysed, any errors are listed below. No errors found 2 summarized warning(s), comprising: 1 hidden part(s)* 1 with clause(s) lacking a supporting inherit (*Note: the above warnings may affect the validity of the analysis.) Source Filename: spark-ada-strings-maps.ads No Listing File Unit name: SPARK.Ada.Strings.Maps Unit type: package specification Unit has been analysed, any errors are listed below. No errors found 2 summarized warning(s), comprising: 1 hidden part(s)* 1 with clause(s) lacking a supporting inherit (*Note: the above warnings may affect the validity of the analysis.) Source Filename: spark-ada-strings.ads No Listing File Unit name: SPARK.Ada.Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found No summarized warnings Source Filename: spark-ada.ads No Listing File Unit name: SPARK.Ada Unit type: package specification Unit has been analysed, any errors are listed below. No errors found No summarized warnings Source Filename: spark.ads No Listing File Unit name: SPARK Unit type: package specification Unit has been analysed, any errors are listed below. No errors found No summarized warnings Source Filename: command_line.ads No Listing File Unit name: Command_Line Unit type: package specification Unit has been analysed, any errors are listed below. No errors found No summarized warnings Source Filename: spark-ada-text_io-unbounded_string.ads No Listing File Unit name: SPARK.Ada.Text_IO.Unbounded_String Unit type: package specification Unit has been analysed, any errors are listed below. No errors found 10 summarized warning(s), comprising: 10 declare annotations in non Ravenscar programs Source Filename: spark-ada-text_io.ads No Listing File Unit name: SPARK.Ada.Text_IO Unit type: package specification Unit has been analysed, any errors are listed below. No errors found 56 summarized warning(s), comprising: 1 hidden part(s)* 1 with clause(s) lacking a supporting inherit 54 declare annotations in non Ravenscar programs (*Note: the above warnings may affect the validity of the analysis.) Source Filename: spark-ada-command_line-unbounded_string.ads No Listing File Unit name: SPARK.Ada.Command_Line.Unbounded_String Unit type: package specification Unit has been analysed, any errors are listed below. No errors found No summarized warnings Source Filename: spark-ada-command_line.ads No Listing File Unit name: SPARK.Ada.Command_Line Unit type: package specification Unit has been analysed, any errors are listed below. No errors found No summarized warnings Source Filename: version.ads No Listing File Unit name: Version Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 3 end Pragma to suppress compiler warn 1 No summarized warnings Source Filename: files.adb Listing Filename: files.lst Unit name: Files Unit type: package body Unit has been analysed, any errors are listed below. No errors found No summarized warnings Source Filename: command_line.adb Listing Filename: command_line.lst Unit name: Command_Line Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 So far no arguments have a value 1 Flow 33 end So far, none of our arguments ha 1 No summarized warnings Source Filename: sparkclean.adb Listing Filename: sparkclean.lst Unit name: SPARKClean Unit type: main program Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 30 end This is only needed for partial 1 Flow 50 end This is only needed in the preco 1 3 summarized warning(s), comprising: 1 hidden part(s)* 2 with clause(s) lacking a supporting inherit (*Note: the above warnings may affect the validity of the analysis.) Note: Automatic flow analysis mode selected --End of file-------------------------------------------------- spark-2012.0.deb/analyse/referenceanalysis/victor.rep0000644000175000017500000004534211753203755021633 0ustar eugeneugen ******************************************************* Report of SPARK Examination Examiner GPL Edition ******************************************************* Options: index_file=victor.idx nowarning_file notarget_compiler_data config_file=gnat.cfg source_extension=ada listing_extension=ls_ nodictionary_file report_file=victor.rep nohtml vcg dpc plain_output sparklib nostatistics fdl_identifiers=accept flow_analysis=data language=95 profile=sequential annotation_character=# rules=lazy error_explanations=off justification_option=full casing=si output_directory=vcg output_directory (actual)=vcg Selected files: @victor.smf Index Filename(s) used were: victor.idx ada.idx spark.idx Meta File(s) used were: victor.smf victor.adb command_line.adb banner.adb victor_wrapper.adb Full warning reporting selected Target configuration file: Line package Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; Source Filename(s) used were: victor.adb victor_wrapper.ads spark_io.ads e_strings.ads command_line.ads banner.ads version.ads spark-ada-strings-unbounded.ads spark-ada-strings-maps.ads ada-characters-handling.shs spark-ada-strings.ads spark-ada.ads spark.ads command_line.adb banner.adb victor_wrapper.adb Source Filename: victor_wrapper.ads No Listing File Unit name: Victor_Wrapper Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_io.ads No Listing File Unit name: SPARK_IO Unit type: package specification Unit has been analysed, any errors are listed below. 26 error(s) or warning(s) Line --# Name_Of_File; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Name_Of_File; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Mode_Of_File; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Spacing; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Spacing; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Posn; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Item; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Stop; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Stop; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Width; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Width; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Start_Pos; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Width; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Item; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Start_Pos; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Start_Pos; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. pragma Inline (Valid_File, End_Of_Line, End_Of_File, Get_Char); ^ --- Warning : 3: Pragma - ignored by the Examiner. end SPARK_IO; --- Warning : 10: The private part of package SPARK_IO is hidden - hidden text is ignored by the Examiner. Source Filename: e_strings.ads No Listing File Unit name: E_Strings Unit type: package specification Unit has been analysed, any errors are listed below. 16 error(s) or warning(s) Line --# Str; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# E_Str2; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Ch; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Search_String; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Search_String; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Search_String; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# E_Str from E_Str; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Search_Start; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Search_Char; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Start_Pt; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Start_Pt; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# SPARK_IO.File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# SPARK_IO.File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# File; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# SPARK_IO.File_Sys; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. Source Filename: command_line.ads No Listing File Unit name: Command_Line Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: banner.ads No Listing File Unit name: Banner Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: version.ads No Listing File Unit name: Version Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 3 end Pragma to suppress compiler warn 1 Source Filename: spark-ada-strings-unbounded.ads No Listing File Unit name: SPARK.Ada.Strings.Unbounded Unit type: package specification Unit has been analysed, any errors are listed below. 18 error(s) or warning(s) Line with Ada.Strings.Unbounded; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. --# derives Target from Source; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# New_Item; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# New_Item; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# New_Item; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Index; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Source; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Test; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Mapping; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Low; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# New_Item; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Position; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Through; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Side; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Right; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Pad; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. --# Pad; --- Note : 1: This dependency relation was not used for this analysis and has not been checked for accuracy. end SPARK.Ada.Strings.Unbounded; --- Warning : 10: The private part of package Unbounded is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-maps.ads No Listing File Unit name: SPARK.Ada.Strings.Maps Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Maps; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Maps; --- Warning : 10: The private part of package Maps is hidden - hidden text is ignored by the Examiner. Source Filename: ada-characters-handling.shs No Listing File Unit name: Ada.Characters.Handling Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings.ads No Listing File Unit name: SPARK.Ada.Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada.ads No Listing File Unit name: SPARK.Ada Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark.ads No Listing File Unit name: SPARK Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: victor.adb Listing Filename: victor.lsb Unit name: Victor Unit type: main program Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 22 end Command_Line_Ok is not invariant 1 Source Filename: command_line.adb Listing Filename: command_line.lsb Unit name: Command_Line Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line with Ada.Command_Line; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with GNAT.Command_Line; ^ --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. end Initialize; --- Warning : 10: The body of subprogram Initialize is hidden - hidden text is ignored by the Examiner. end Set_Exit_Status_Error; --- Warning : 10: The body of subprogram Set_Exit_Status_Error is hidden - hidden text is ignored by the Examiner. Source Filename: banner.adb Listing Filename: banner.lsb Unit name: Banner Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: victor_wrapper.adb Listing Filename: victor_wrapper.lsb Unit name: Victor_Wrapper Unit type: package body Unit has been analysed, any errors are listed below. 6 error(s) or warning(s) Line with GNAT.OS_Lib; ^ --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. with GNAT.Strings; ^ --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. with SPARK.Ada.Strings.Unbounded.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. with Ada.Containers.Vectors; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end Find_On_Path; --- Warning : 10: The body of subprogram Find_On_Path is hidden - hidden text is ignored by the Examiner. end Do_Spawn; --- Warning : 10: The body of subprogram Do_Spawn is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 We're closing the file, so we do 1 Note: Data flow analysis mode selected --End of file-------------------------------------------------- spark-2012.0.deb/analyse/referenceanalysis/wrap_utility.rep0000644000175000017500000001073511753203755023057 0ustar eugeneugen ******************************************************* Report of SPARK Examination Examiner GPL Edition ******************************************************* Options: index_file=wrap_utility.idx nowarning_file notarget_compiler_data config_file=gnat.cfg source_extension=ada listing_extension=ls_ nodictionary_file report_file=wrap_utility.rep nohtml plain_output sparklib nostatistics fdl_identifiers=accept flow_analysis=auto language=95 profile=sequential annotation_character=# rules=lazy error_explanations=off justification_option=full casing=si output_directory=vcg output_directory (actual)=vcg Selected files: @wrap_utility.smf Index Filename(s) used were: wrap_utility.idx spark.idx main.idx Meta File(s) used were: wrap_utility.smf wraps.adb Full warning reporting selected Target configuration file: Line package Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; Source Filename(s) used were: wraps.adb wraps.ads spark_io.ads Source Filename: wraps.ads No Listing File Unit name: WRAPS Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_io.ads No Listing File Unit name: SPARK_IO Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Inline (Valid_File, End_Of_Line, End_Of_File, Get_Char); ^ --- Warning : 3: Pragma - ignored by the Examiner. end SPARK_IO; --- Warning : 10: The private part of package SPARK_IO is hidden - hidden text is ignored by the Examiner. Source Filename: wraps.adb Listing Filename: wraps.lsb Unit name: WRAPS Unit type: package body Unit has been analysed, any errors are listed below. 9 error(s) or warning(s) Line with Ada.Command_Line, GNAT.OS_Lib, GNAT.IO_Aux, SPARK_IO; ^ ^ ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. end CheckFileExists; --- Warning : 10: The body of subprogram CheckFileExists is hidden - hidden text is ignored by the Examiner. end New_Wrap; --- Warning : 10: The body of subprogram New_Wrap is hidden - hidden text is ignored by the Examiner. end OpenInFile; --- Warning : 10: The body of subprogram OpenInFile is hidden - hidden text is ignored by the Examiner. end OpenOutFile; --- Warning : 10: The body of subprogram OpenOutFile is hidden - hidden text is ignored by the Examiner. end CleanUp; --- Warning : 10: The body of subprogram CleanUp is hidden - hidden text is ignored by the Examiner. end DisplayErrorMessage; --- Warning : 10: The body of subprogram DisplayErrorMessage is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 22 OpenInFile is hidden. Expression 1 Flow 23 Array elements are initialised b 1 Flow 602 end Array elements are initialised b 1 Flow 22 Initialise hides behaviour. Expr 1 Flow 10 May actually affect infile. 1 Flow 10 May actually affect outfile. 1 Note: Automatic flow analysis mode selected --End of file-------------------------------------------------- spark-2012.0.deb/analyse/referenceanalysis/pogs.rep0000644000175000017500000010027611753203755021273 0ustar eugeneugen ******************************************************* Report of SPARK Examination Examiner GPL Edition ******************************************************* Options: index_file=pogs.idx nowarning_file notarget_compiler_data config_file=gnat.cfg source_extension=ada listing_extension=ls_ nodictionary_file report_file=pogs.rep nohtml plain_output sparklib nostatistics fdl_identifiers=accept flow_analysis=auto language=95 profile=sequential annotation_character=# rules=lazy error_explanations=off justification_option=full casing=si output_directory=vcg output_directory (actual)=vcg Selected files: @pogs.smf Index Filename(s) used were: pogs.idx ada.idx spark.idx Meta File(s) used were: pogs.smf banner.adb banner-copyright.adb banner-get_version.adb fatalerrors.adb filedetails.adb fileheap.adb findfiles.adb heap.adb pathformatter.adb total.adb vcdetails.adb vcheap.adb vcs.adb vcs-analyseprooflogfile.adb vcs-analysereviewfile.adb vcs-analysesimplogfile.adb vcs-analysesimplifiedvcfile.adb vcs-analysevictoredvcfile.adb vcs-analysevictorlogfile.adb vcs-analyse_dpc_file.adb vcs-analyse_summary_dp_file.adb vcs-analyse_riposte_summary_file.adb vcs-analysevcfile.adb vcs-printvcreport.adb vcs-processnewrangeline.adb vcs-writevcinfo.adb toppackage.adb slg_parser.adb Full warning reporting selected Target configuration file: Line package Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; Source Filename(s) used were: banner.adb banner.ads version.ads spark_io.ads e_strings.ads commandline.ads osfiling.ads oscommandline.ads spark-ada-strings-unbounded.ads spark-ada-strings-maps.ads ada-characters-handling.shs spark-ada-strings.ads spark-ada.ads spark.ads banner-copyright.adb banner-get_version.adb fatalerrors.adb fatalerrors.ads filedetails.adb filedetails.ads heapindex.ads fileheap.adb fileheap.ads heap.ads findfiles.adb findfiles.ads osdirectory.ads heap.adb pathformatter.adb pathformatter.ads total.adb total.ads vcheap.ads vcdetails.ads vcdetails.adb vcheap.adb vcs.adb vcs.ads spark_calendar.ads slg_parser.ads date_time.ads vcs-analyseprooflogfile.adb vcs-analysereviewfile.adb vcs-analysesimplogfile.adb vcs-analysesimplifiedvcfile.adb vcs-analysevictoredvcfile.adb vcs-analysevictorlogfile.adb vcs-analyse_dpc_file.adb vcs-analyse_summary_dp_file.adb vcs-analyse_riposte_summary_file.adb vcs-analysevcfile.adb vcs-printvcreport.adb vcs-processnewrangeline.adb vcs-writevcinfo.adb toppackage.adb toppackage.ads slg_parser.adb Source Filename: banner.ads No Listing File Unit name: Banner Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: version.ads No Listing File Unit name: Version Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 3 end Pragma to suppress compiler warn 1 Source Filename: spark_io.ads No Listing File Unit name: SPARK_IO Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Inline (Valid_File, End_Of_Line, End_Of_File, Get_Char); ^ --- Warning : 3: Pragma - ignored by the Examiner. end SPARK_IO; --- Warning : 10: The private part of package SPARK_IO is hidden - hidden text is ignored by the Examiner. Source Filename: e_strings.ads No Listing File Unit name: E_Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: commandline.ads No Listing File Unit name: CommandLine Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: osfiling.ads No Listing File Unit name: OSFiling Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: oscommandline.ads No Listing File Unit name: OSCommandLine Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings-unbounded.ads No Listing File Unit name: SPARK.Ada.Strings.Unbounded Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Unbounded; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Unbounded; --- Warning : 10: The private part of package Unbounded is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-maps.ads No Listing File Unit name: SPARK.Ada.Strings.Maps Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Maps; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Maps; --- Warning : 10: The private part of package Maps is hidden - hidden text is ignored by the Examiner. Source Filename: ada-characters-handling.shs No Listing File Unit name: Ada.Characters.Handling Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings.ads No Listing File Unit name: SPARK.Ada.Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada.ads No Listing File Unit name: SPARK.Ada Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark.ads No Listing File Unit name: SPARK Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: fatalerrors.ads No Listing File Unit name: FatalErrors Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: filedetails.ads No Listing File Unit name: FileDetails Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: heapindex.ads No Listing File Unit name: HeapIndex Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: fileheap.ads No Listing File Unit name: FileHeap Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: heap.ads No Listing File Unit name: Heap Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Pack (AtomDescriptor); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: findfiles.ads No Listing File Unit name: FindFiles Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: osdirectory.ads No Listing File Unit name: OSDirectory Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: pathformatter.ads No Listing File Unit name: PathFormatter Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: total.ads No Listing File Unit name: Total Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: vcheap.ads No Listing File Unit name: VCHeap Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: vcdetails.ads No Listing File Unit name: VCDetails Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: vcs.ads No Listing File Unit name: VCS Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_calendar.ads No Listing File Unit name: SPARK_Calendar Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Calendar; ^ --- Warning : 1: The identifier Calendar is either undeclared or not visible at this point. end SPARK_Calendar; --- Warning : 10: The private part of package SPARK_Calendar is hidden - hidden text is ignored by the Examiner. Source Filename: slg_parser.ads No Listing File Unit name: SLG_Parser Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: date_time.ads No Listing File Unit name: Date_Time Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: toppackage.ads No Listing File Unit name: TopPackage Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: banner.adb Listing Filename: banner.lsb Unit name: Banner Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: banner-copyright.adb Listing Filename: banner-copyright.lsb Unit name: Banner.Copyright Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: banner-get_version.adb Listing Filename: banner-get_version.lsb Unit name: Banner.Get_Version Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: fatalerrors.adb Listing Filename: fatalerrors.lsb Unit name: FatalErrors Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with POGS_Exceptions; ^ --- Warning : 1: The identifier POGS_Exceptions is either undeclared or not visible at this point. end FatalErrors; --- Warning : 10: The body of package FatalErrors is hidden - hidden text is ignored by the Examiner. Source Filename: filedetails.adb Listing Filename: filedetails.lsb Unit name: FileDetails Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line for I in HeapIndex.IndexType loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 23 end Element-by-element array initial 1 Flow 602 end Element-by-element array initial 1 Source Filename: fileheap.adb Listing Filename: fileheap.lsb Unit name: FileHeap Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not LoopFinished loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Dummy unused here 1 Flow 33 end Dummy unused here 1 Flow 10 Dummy unused here 2 Flow 33 end Dummy unused here 1 Source Filename: findfiles.adb Listing Filename: findfiles.lsb Unit name: FindFiles Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while UnResolvedDirFound loop --- Warning :402: Default assertion planted to cut loop. while NextSuccess loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Name unused here 1 Flow 33 end Name unused here 1 Source Filename: heap.adb Listing Filename: heap.lsb Unit name: Heap Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 23 end Partial initialization 1 Flow 602 end Partial initialization 1 Source Filename: pathformatter.adb Listing Filename: pathformatter.lsb Unit name: PathFormatter Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: total.adb Listing Filename: total.lsb Unit name: Total Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Fatal_Error; --- Warning : 10: The body of subprogram Fatal_Error is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Unused_Path_End unused here 1 Flow 10 Unused_Path_Start unused here 1 Flow 10 Unused_VC_Name unused here 1 Flow 41 Expression is stable 1 Flow 41 Expression is stable 1 Flow 41 Expression is stable 1 Flow 33 end Unused_Path_End unused here 1 Flow 33 end Unused_Path_Start unused here 1 Flow 33 end Unused_VC_Name unused here 1 Flow 10 end Temp_Status not used here 1 Flow 10 end Temp_Store_Bool not used here 1 Flow 41 No need to write two loops here. 2 Flow 33 end Temp_Store_Bool is not needed. 1 Flow 33 end Temp_Status is not needed. 1 Source Filename: vcdetails.adb Listing Filename: vcdetails.lsb Unit name: VCDetails Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 31 end Partial initialization 1 Flow 32 end Partial initialization 1 Flow 602 end Partial initialization 1 Flow 10 Dummy_Position unused here 7 Flow 10 To_Found unused here 1 Flow 33 end Dummy_Position unused here 1 Flow 33 end To_Found unused here 1 Source Filename: vcheap.adb Listing Filename: vcheap.lsb Unit name: VCHeap Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line end Get_VC_State; --- Warning : 10: The body of subprogram Get_VC_State is hidden - hidden text is ignored by the Examiner. end Exists; --- Warning : 10: The body of subprogram Exists is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Existing_Path_Start not used her 1 Flow 10 Existing_Path_End not used here 1 Flow 10 Existing_End_Type not used here 1 Flow 10 Existing_VC_State not used here 1 Flow 10 Existing_DPC_State not used here 1 Flow 33 end Existing_Path_Start not used her 1 Flow 33 end Existing_Path_End not used here 1 Flow 33 end Existing_End_Type not used here 1 Flow 33 end Existing_VC_State not used here 1 Flow 33 end Existing_DPC_State not used here 1 Flow 10 Dummy not used here 1 Flow 33 end Dummy not used here 1 Flow 10 Dummy unused here 2 Flow 33 end Dummy unused here 1 Flow 10 Current_VC_Path_Start not used h 1 Flow 10 Current_VC_Path_End not used her 1 Flow 10 Current_VC_End_Type not used her 1 Flow 10 Current_VC_State not used here 1 Flow 10 Current_DPC_State not used here 1 Flow 33 end Current_VC_Path_Start not used h 1 Flow 33 end Current_VC_Path_End not used her 1 Flow 33 end Current_VC_End_Type not used her 1 Flow 33 end Current_VC_State not used here 1 Flow 33 end Current_DPC_State not used here 1 Source Filename: vcs.adb Listing Filename: vcs.lsb Unit name: VCS Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Unreferenced (WriteVCInfo); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Unused_Pos unused here 1 Flow 33 end Unused_Pos unused here 1 Flow 10 We don't use this yet, but may i 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 33 end We don't export this yet - we mi 1 Source Filename: vcs-analyseprooflogfile.adb Listing Filename: vcs-analyseprooflogfile.lsb Unit name: VCS.AnalyseProofLogFile Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Stop unused here 1 Flow 10 Stop unused here 1 Flow 10 Stop unused here 3 Flow 33 end Stop unused here 1 Flow 602 end Always well-defined when no erro 1 Flow 10 Dummy_Close_Status unused here 1 Flow 10 Proof_Log_File unused here 1 Flow 33 end Dummy_Close_Status unused here 1 Source Filename: vcs-analysereviewfile.adb Listing Filename: vcs-analysereviewfile.lsb Unit name: VCS.AnalyseReviewFile Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Dummy_Close_Status unused here 1 Flow 10 Review_File unused here 1 Flow 33 end Dummy_Close_Status unused here 1 Source Filename: vcs-analysesimplogfile.adb Listing Filename: vcs-analysesimplogfile.lsb Unit name: VCS.AnalyseSimpLogFile Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Modify filehandle to close file 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Source Filename: vcs-analysesimplifiedvcfile.adb Listing Filename: vcs-analysesimplifiedvcfile.lsb Unit name: VCS.AnalyseSimplifiedVCFile Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Dummy_Close_Status unused here 1 Flow 10 Simplified_VC_File unused here 1 Flow 33 end Dummy_Close_Status unused here 1 Source Filename: vcs-analysevictoredvcfile.adb Listing Filename: vcs-analysevictoredvcfile.lsb Unit name: VCS.AnalyseVictoredVCFile Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 We don't care about Stop at the 1 Flow 33 end We don't care about Stop at the 1 Flow 10 We don't care anymore since we'v 1 Flow 10 Same as above. 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Source Filename: vcs-analysevictorlogfile.adb Listing Filename: vcs-analysevictorlogfile.lsb Unit name: VCS.AnalyseVictorLogFile Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 We don't care anymore since we'v 1 Flow 10 Same as above. 1 Source Filename: vcs-analyse_dpc_file.adb Listing Filename: vcs-analyse_dpc_file.lsb Unit name: VCS.Analyse_DPC_File Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Dummy_Close_Status unused here 1 Flow 10 DPC_File unused here 1 Flow 33 end Dummy_Close_Status unused here 1 Source Filename: vcs-analyse_summary_dp_file.adb Listing Filename: vcs-analyse_summary_dp_file.lsb Unit name: VCS.Analyse_Summary_DP_File Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 DummyCloseStatus unused here 1 Flow 10 Summary_DP_File unused here 1 Flow 33 end DummyCloseStatus unused here 1 Source Filename: vcs-analyse_riposte_summary_file.adb Listing Filename: vcs-analyse_riposte_summary_file.lsb Unit name: VCS.Analyse_Riposte_Summary_File Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 We don't care about Stop at the 1 Flow 33 end We don't care about Stop at the 1 Flow 10 We don't care anymore since we'v 1 Flow 10 Same as above. 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Source Filename: vcs-analysevcfile.adb Listing Filename: vcs-analysevcfile.lsb Unit name: VCS.AnalyseVCFile Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Dummy_Close_Status unused here 1 Flow 10 VC_File unused here 1 Flow 33 end Dummy_Close_Status unused here 1 Source Filename: vcs-printvcreport.adb Listing Filename: vcs-printvcreport.lsb Unit name: VCS.PrintVCReport Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Unused_Pos unused here 1 Flow 33 end Unused_Pos unused here 1 Source Filename: vcs-processnewrangeline.adb Listing Filename: vcs-processnewrangeline.lsb Unit name: VCS.ProcessNewRangeLine Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Dummy_Position unused here 3 Flow 10 Line_Found unused here 1 Flow 33 end Dummy_Position unused here 1 Flow 33 end Line_Found unused here 1 Flow 10 Line_Found unused here 1 Flow 33 end Line_Found unused here 1 Flow 10 Dummy_Position unused here 1 Flow 10 Line_Found unused here 1 Flow 33 end Dummy_Position unused here 1 Flow 33 end Line_Found unused here 1 Source Filename: vcs-writevcinfo.adb Listing Filename: vcs-writevcinfo.lsb Unit name: VCS.WriteVCInfo Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: toppackage.adb Listing Filename: toppackage.lsb Unit name: TopPackage Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Heap.IsNullPointer (This_File) and Success loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Unused_Directory_Is_Resolved unu 1 Flow 41 Expression is stable but efficie 2 Flow 10 Temp_File unused here 1 Flow 10 Temp_False_File unused here 1 Flow 10 Temp_Contra_File unused here 1 Flow 10 Temp_Victor_File unused here 1 Flow 10 Temp_Riposte_File unused here 1 Flow 10 Temp_User_File unused here 1 Flow 10 Temp_Rlu_Error_File unused here 1 Flow 10 Temp_Rlu_Used_File unused here 1 Flow 10 Temp_PR_Verr_File unused here 1 Flow 10 Temp_PR_Verr_File unused here 1 Flow 10 Temp_DPC_Error_File unused here 1 Flow 10 Temp_SDP_Error_File unused here 1 Flow 10 Temp_Victor_Error_File unused he 1 Flow 10 Temp_Riposte_Error_File unused h 1 Flow 10 end Status unused here 1 Flow 10 end Report_File unused here 1 Flow 33 end Unused_Directory_Is_Resolved unu 1 Source Filename: slg_parser.adb Listing Filename: slg_parser.lsb Unit name: SLG_Parser Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line pragma Inline (Is_White_Space); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Is_Char_At_Posn); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Is_Whitespace_At_Posn); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Inc_Posn); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 23 The call to open initalises File 1 Flow 602 end The call to open initalises File 1 Flow 602 end The call to open initalises File 1 Flow 602 end The call to open initalises File 1 Flow 10 end The status returned from Close i 1 Flow 33 end The status returned from Close i 1 Note: Automatic flow analysis mode selected --End of file-------------------------------------------------- spark-2012.0.deb/analyse/referenceanalysis/sparkformat.rep0000644000175000017500000014150611753203755022655 0ustar eugeneugen ******************************************************* Report of SPARK Examination Examiner GPL Edition ******************************************************* Options: index_file=sparkformat.idx nowarning_file notarget_compiler_data config_file=gnat.cfg source_extension=ada listing_extension=ls_ nodictionary_file report_file=sparkformat.rep nohtml plain_output sparklib nostatistics fdl_identifiers=accept flow_analysis=auto language=95 profile=sequential annotation_character=# rules=lazy error_explanations=off justification_option=full casing=si output_directory=vcg output_directory (actual)=vcg Selected files: @sparkformat.smf Index Filename(s) used were: sparkformat.idx ada.idx spark.idx spark.idx main.idx errorhandler.idx Meta File(s) used were: sparkformat.smf sparkformatcommandlinedata.adb sparkformatcommandlinehandler.adb sparkprogram.adb sparkprogram-iteration.adb sparkprogram-annotations.adb sparkprogram-reformatter.adb sparkprogram-reformatter-simplelex.adb sparkformat.adb Full warning reporting selected Target configuration file: Line package Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; Source Filename(s) used were: sparkformatcommandlinedata.adb sparkformatcommandlinedata.ads filesystem.ads e_strings.ads examinerconstants.ads spark_io.ads spark-ada-strings-unbounded.ads spark-ada-strings-maps.ads ada-characters-handling.shs spark-ada-strings.ads spark-ada.ads spark.ads sparkformatcommandlinehandler.adb sparkformatcommandlinehandler.ads screenecho.ads commandlinehandler.ads commandlinedata.ads xmlreport.ads version.ads systemerrors.ads spark_xml.ads sparkprogram.adb sparkprogram.ads statistics.ads sp_symbols.ads sparklex.ads seqalgebra.ads relationalgebra.ads lextokenmanager-seq_algebra.ads lextokenmanager-relation_algebra-string.ads lextokenmanager-relation_algebra.ads lextokenmanager.ads heap.ads errorhandler.ads dictionary.ads maths.ads lextokenstacks.ads contextmanager.ads sp_relations.ads sp_expected_symbols.ads error_types.ads error_io.ads sp_productions.ads heap_storage.ads spark-ada-containers.ads lextokenlists.ads sparkprogram-iteration.adb sparkprogram-annotations.adb sparkprogram-reformatter.adb sparkprogram-reformatter-simplelex.adb sparkformat.adb Source Filename: sparkformatcommandlinedata.ads No Listing File Unit name: SparkFormatCommandLineData Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: filesystem.ads No Listing File Unit name: FileSystem Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: e_strings.ads No Listing File Unit name: E_Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: examinerconstants.ads No Listing File Unit name: ExaminerConstants Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_io.ads No Listing File Unit name: SPARK_IO Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Inline (Valid_File, End_Of_Line, End_Of_File, Get_Char); ^ --- Warning : 3: Pragma - ignored by the Examiner. end SPARK_IO; --- Warning : 10: The private part of package SPARK_IO is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-unbounded.ads No Listing File Unit name: SPARK.Ada.Strings.Unbounded Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Unbounded; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Unbounded; --- Warning : 10: The private part of package Unbounded is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-maps.ads No Listing File Unit name: SPARK.Ada.Strings.Maps Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Maps; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Maps; --- Warning : 10: The private part of package Maps is hidden - hidden text is ignored by the Examiner. Source Filename: ada-characters-handling.shs No Listing File Unit name: Ada.Characters.Handling Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings.ads No Listing File Unit name: SPARK.Ada.Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada.ads No Listing File Unit name: SPARK.Ada Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark.ads No Listing File Unit name: SPARK Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkformatcommandlinehandler.ads No Listing File Unit name: SparkFormatCommandLineHandler Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: screenecho.ads No Listing File Unit name: ScreenEcho Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: commandlinehandler.ads No Listing File Unit name: CommandLineHandler Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: commandlinedata.ads No Listing File Unit name: CommandLineData Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: xmlreport.ads No Listing File Unit name: XMLReport Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: version.ads No Listing File Unit name: Version Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 3 end Pragma to suppress compiler warn 1 Source Filename: systemerrors.ads No Listing File Unit name: SystemErrors Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_xml.ads No Listing File Unit name: SPARK_XML Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkprogram.ads No Listing File Unit name: SPARKProgram Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: statistics.ads No Listing File Unit name: Statistics Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_symbols.ads No Listing File Unit name: SP_Symbols Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex.ads No Listing File Unit name: SparkLex Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: seqalgebra.ads No Listing File Unit name: SeqAlgebra Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: relationalgebra.ads No Listing File Unit name: RelationAlgebra Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager-seq_algebra.ads No Listing File Unit name: LexTokenManager.Seq_Algebra Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager-relation_algebra-string.ads No Listing File Unit name: LexTokenManager.Relation_Algebra.String Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager-relation_algebra.ads No Listing File Unit name: LexTokenManager.Relation_Algebra Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager.ads No Listing File Unit name: LexTokenManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: heap.ads No Listing File Unit name: Heap Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler.ads No Listing File Unit name: ErrorHandler Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Elaborate_All (SPARK_IO); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: dictionary.ads No Listing File Unit name: Dictionary Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: maths.ads No Listing File Unit name: Maths Unit type: package specification Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line pragma Inline (HasNoValue); ^ --- Warning : 3: Pragma - ignored by the Examiner. for Digit'Size use 4; ^ --- Warning : 2: Representation clause - ignored by the Examiner. pragma Pack (ValueArray); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: lextokenstacks.ads No Listing File Unit name: LexTokenStacks Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: contextmanager.ads No Listing File Unit name: ContextManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_relations.ads No Listing File Unit name: SP_Relations Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_expected_symbols.ads No Listing File Unit name: SP_Expected_Symbols Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: error_types.ads No Listing File Unit name: Error_Types Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: error_io.ads No Listing File Unit name: Error_IO Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Error_IO; --- Warning : 10: The private part of package Error_IO is hidden - hidden text is ignored by the Examiner. Source Filename: sp_productions.ads No Listing File Unit name: SP_Productions Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: heap_storage.ads No Listing File Unit name: Heap_Storage Unit type: package specification Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with Ada.Containers; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Containers.Vectors; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end Heap_Storage; --- Warning : 10: The private part of package Heap_Storage is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-containers.ads No Listing File Unit name: SPARK.Ada.Containers Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenlists.ads No Listing File Unit name: LexTokenLists Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkformatcommandlinedata.adb Listing Filename: sparkformatcommandlinedata.lsb Unit name: SparkFormatCommandLineData Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkformatcommandlinehandler.adb Listing Filename: sparkformatcommandlinehandler.lsb Unit name: SparkFormatCommandLineHandler Unit type: package body Unit has been analysed, any errors are listed below. 92 error(s) or warning(s) Line SparkFormatCommandLineData.Content.Valid := not Add_Modes_Found; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Add_Modes := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Add_Modes, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := Opt_Val_OK ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. CommandLineData.Content.Anno_Char := E_Strings.Get_Element (E_Str => Opt_Val, ^ --- Warning :169: Direct update of own variable CommandLineData.Content.Anno_Char, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := not Compress_Found; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Operation := SparkFormatCommandLineData.Compress; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Operation, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Constituent_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Constituent_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := Opt_Val_OK and then not Default_Function_Mode_Found; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Default_Function_Mode := SparkFormatCommandLineData.In_Mode; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Default_Function_Mode, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Default_Function_Mode := SparkFormatCommandLineData.Unmoded; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Default_Function_Mode, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := not Expand_Found; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Operation := SparkFormatCommandLineData.Expand; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Operation, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Exclude_Export := Opt_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Exclude_Export, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Export_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Export_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Global_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Global_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := not Help_Found; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Help := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Help, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Inherit_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Inherit_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Initialization_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Initialization_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Import_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Import_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := not Add_Modes_Found; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Add_Modes := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Add_Modes, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := not Alphabetic_Ordering_Found; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Own_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Own_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Properties_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Properties_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Refinement_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Refinement_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Separator_Indent := Indent_Val; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Separator_Indent, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := not Version_Found; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Version := True; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Version, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Number_Source := SparkFormatCommandLineData.Content.Number_Source + 1; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Number_Source, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Source_File_List (SparkFormatCommandLineData.Content.Number_Source). ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Source_File_List, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Number_Source := 0; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Number_Source, which is an own variable of a non-enclosing package. SparkFormatCommandLineData.Content.Valid := False; ^ --- Warning :169: Direct update of own variable SparkFormatCommandLineData.Content.Valid, which is an own variable of a non-enclosing package. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 We don't care if we found anythi 1 Flow 33 end And we don't tell anyone about i 1 Flow 22 Simulation of conditional compil 1 Source Filename: sparkprogram.adb Listing Filename: sparkprogram.lsb Unit name: SPARKProgram Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Unreferenced (Is_End); -- for future use ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Unreferenced (Is_End); -- for future use ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 end The variable Unused is not requi 1 Flow 33 end The variable Unused is not requi 1 Flow 10 end The variable Unused is not requi 1 Flow 33 end The variable Unused is not requi 1 Flow 10 end The variable Unused is not requi 1 Flow 33 end The variable Unused is not requi 1 Flow 10 end The variable Unused is not requi 1 Flow 33 end The variable Unused is not requi 1 Flow 41 Used to simplify loop structure 1 Flow 10 The next symbol should be 'deriv 1 Flow 602 end Start_Col may not be set if not 1 Flow 10 Final value of Index is not requ 1 Flow 10 The next symbol should be 'globa 1 Flow 602 end Start_Col may not be set if not 1 Flow 10 There is no globals list, so no 1 Flow 10 Not used when both global and de 1 Flow 10 If both globals and derives list 1 Flow 10 The state variable has been 'dis 1 Flow 10 The state variable has been 'dis 1 Flow 10 The state variable has been 'dis 1 Source Filename: sparkprogram-iteration.adb Listing Filename: sparkprogram-iteration.lsb Unit name: SPARKProgram.Iteration Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => ThisMember); --- Warning :402: Default assertion planted to cut loop. Source Filename: sparkprogram-annotations.adb Listing Filename: sparkprogram-annotations.lsb Unit name: SPARKProgram.Annotations Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkprogram-reformatter.adb Listing Filename: sparkprogram-reformatter.lsb Unit name: SPARKProgram.Reformatter Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkprogram-reformatter-simplelex.adb Listing Filename: sparkprogram-reformatter-simplelex.lsb Unit name: SPARKProgram.Reformatter.SimpleLex Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkformat.adb Listing Filename: sparkformat.lsb Unit name: SPARKFormat Unit type: main program Unit has been analysed, any errors are listed below. 7 error(s) or warning(s) Line with Ada.Exceptions; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Command_Line; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with GNAT.Traceback.Symbolic; ^ --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. with ScreenEcho; ^ --- Warning : 1: The identifier ScreenEcho is either undeclared or not visible at this point. end Print_Help; --- Warning : 10: The body of subprogram Print_Help is hidden - hidden text is ignored by the Examiner. end Print_Version; --- Warning : 10: The body of subprogram Print_Version is hidden - hidden text is ignored by the Examiner. end SPARKFormat; --- Warning : 9: The body of subprogram SPARKFormat has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 169 Direct update OK here. 1 Warn 169 Direct update OK here. 1 Flow 10 Status is ignored 1 Flow 10 Status is ignored 1 Flow 10 File 'Input' is closed 1 Flow 10 Status is ignored 1 Note: Automatic flow analysis mode selected --End of file-------------------------------------------------- spark-2012.0.deb/analyse/referenceanalysis/examiner.rep0000644000175000017500000134312111753203755022132 0ustar eugeneugen ******************************************************* Report of SPARK Examination Examiner GPL Edition ******************************************************* Options: index_file=spark.idx nowarning_file notarget_compiler_data config_file=gnat.cfg source_extension=ada listing_extension=ls_ dictionary_file=examiner.dic report_file=mainunits95.rep nohtml plain_output sparklib nostatistics fdl_identifiers=accept flow_analysis=auto language=95 profile=sequential annotation_character=# rules=lazy error_explanations=off justification_option=full casing=si output_directory=vcg output_directory (actual)=vcg Selected files: @spark.smf Index Filename(s) used were: spark.idx ada.idx spark.idx main.idx sli.idx indexmanager.idx dag.idx errorhandler.idx declarations.idx vcg.idx flowanalyser.idx sem.idx Meta File(s) used were: spark.smf main.smf support.smf commandlinedata.adb commandlinehandler.adb completecheck.adb componenterrors.adb componentmanager.adb configfile.adb contextmanager-ops.adb date_time.adb e_strings.adb file_utils.adb heap.adb indexmanager-cache.shb indexmanager-index_table_p.adb indexmanager.adb lextokenlists.adb lextokenmanager.adb lextokenmanager-insert_nat.adb lextokenmanager-relation_algebra.adb lextokenmanager-relation_algebra-string.adb lextokenmanager-seq_algebra.adb lextokenstacks.adb lists.adb maths.adb maths-literaltovalue.adb maths-parsestring.adb maths-valuetostring.adb metafile.adb reflist.adb relationalgebra.adb requiredunits.adb screenecho.adb seqalgebra.adb simplelists.adb sparkhtml.adb statistics.adb stree.adb stree-findlastitemindependencyrelation.adb symbol_set.adb systemerrors.adb sprint.adb spark_xml.adb xmlreport.adb sparklex.smf sparklex.adb sparklex-lex.adb sparklex-lex-apostintro.adb sparklex-lex-getident.adb sparklex-lex-getnumber.adb sparklex-lex-getstring.adb sparklex-lex-hyphintro.adb sparklex-lex-ltintro.adb sparklex-lex-nextlex.adb sparklex-linemanager.adb spparser.smf spparser.adb sp_parser_actions.adb sp_parser_actions-scan_action_table.adb sp_parser_actions-spa.adb sp_parser_goto.adb sp_parser_goto-scan_goto_table.adb sp_parser_goto-sp_goto.adb sp_expected_symbols.adb sp_expected_symbols-get_expected_symbols.adb dict.smf dictionary.adb dictionary-add_declaration.adb dictionary-add_generic_formal_parameter_local.adb dictionary-add_record_component.adb dictionary-add_record_subcomponent.adb dictionary-add_renaming_declaration.adb dictionary-add_subprogram_parameter.adb dictionary-add_use_type_reference.adb dictionary-addinheritsreference.adb dictionary-addloop.adb dictionary-addwithreference.adb dictionary-attribute_is_visible.adb dictionary-attribute_is_visible_but_obsolete_local.adb dictionary-dynamic_symbol_table.adb dictionary-generatesimplename.adb dictionary-get_binary_operator_type_local.adb dictionary-get_record_component.adb dictionary-get_scalar_attribute_type.adb dictionary-get_scalar_attribute_value.adb dictionary-getanyprefixneeded.adb dictionary-getscope.adb dictionary-initialize.adb dictionary-instantiate_subprogram_parameters.adb dictionary-is_callable.adb dictionary-is_renamed_local.adb dictionary-lookupitem.adb dictionary-lookupscope.adb dictionary-lookupselecteditem.adb dictionary-nextsymbol.adb dictionary-operator_is_visible.adb dictionary-rawdict.adb dictionary-search_for_inherited_operations.adb dictionary-targetdata.adb dictionary-write.adb dictionary-writeoperatorrenamingdeclaration.adb errorhandler.smf errorhandler.adb errorhandler-getfileline.adb errorhandler-printerrors.adb errorhandler-printline.adb errorhandler-appenderrors.adb errorhandler-appendsym.adb errorhandler-echoerrorentry.adb errorhandler-erroraccumulator.adb errorhandler-warningstatus.adb errorhandler-warningstatus-readwarningfile.adb errorhandler-errorbuffer.adb errorhandler-conversions.adb errorhandler-justifications.adb errorhandler-conversions-tostring.adb errorhandler-conversions-tostring-appendreference.adb errorhandler-conversions-tostring-condldependency.adb errorhandler-conversions-tostring-condldependency-condldependencyexpl.adb errorhandler-conversions-tostring-condlflowerr.adb errorhandler-conversions-tostring-condlflowerr-condlflowerrexpl.adb errorhandler-conversions-tostring-controlflowerror.adb errorhandler-conversions-tostring-controlflowerror-controlflowerrorexpl.adb errorhandler-conversions-tostring-depsemanticerr.adb errorhandler-conversions-tostring-depsemanticerr-depsemanticerrexpl.adb errorhandler-conversions-tostring-ineffectivestatement.adb errorhandler-conversions-tostring-ineffectivestatement-ineffectivestatementexpl.adb errorhandler-conversions-tostring-noerr.adb errorhandler-conversions-tostring-semanticerr.adb errorhandler-conversions-tostring-semanticerr-semanticerrexpl.adb errorhandler-conversions-tostring-stabilityerror.adb errorhandler-conversions-tostring-stabilityerror-stabilityerrorexpl.adb errorhandler-conversions-tostring-unconddependency.adb errorhandler-conversions-tostring-unconddependency-unconddependencyexpl.adb errorhandler-conversions-tostring-uncondflowerr.adb errorhandler-conversions-tostring-uncondflowerr-uncondflowerrexpl.adb errorhandler-conversions-tostring-usageerror.adb errorhandler-conversions-tostring-usageerror-usageerrorexpl.adb errorhandler-conversions-tostring-warningwithoutposition.adb errorhandler-conversions-tostring-warningwithoutposition-warningwithoutpositionexpl.adb errorhandler-conversions-tostring-warningwithposition.adb errorhandler-conversions-tostring-warningwithposition-warningwithpositionexpl.adb errorhandler-conversions-tostring-note.adb errorhandler-conversions-tostring-note-noteexpl.adb flows.smf flowanalyser.adb flowanalyser-ifa_stack.adb flowanalyser-flowanalyse.adb flowanalyser-flowanalyse-analyserelations.adb flowanalyser-flowanalyse-analyserelations-checkexpressions.adb flowanalyser-flowanalyse-analyserelations-checkusages.adb flowanalyser-flowanalyse-analyserelations-checkdependencies.adb flowanalyser-flowanalyse-analyserelations-checkunused.adb flowanalyser-flowanalyse-analyserelations-mergeandhandleerrors.adb flowanalyser-flowanalysepartition.adb declarations.smf declarations.adb declarations-outputdeclarations.adb declarations-outputdeclarations-generatedeclarations.adb declarations-outputdeclarations-generatedeclarations-generatesuccessors.adb declarations-outputdeclarations-printdeclarations.adb declarations-outputdeclarations-printdeclarations-printconstantrules.adb declarations-outputdeclarations-printdeclarations-printruleheader.adb declarations-outputdeclarations-printdeclarations-printtyperules.adb dag.smf cells.adb cells-utility.adb cells-utility-list.adb cell_storage.adb dag.adb dag-build_annotation_expression.adb dag-buildexpndag.adb dag-buildexpndag-upattributedesignator.adb dag-buildgraph.adb dag-buildgraph-modelassignmentstmt.adb dag-buildgraph-modelprocedurecall.adb dag-buildgraph-incorporateconstraints.adb dag-loopcontext.adb dag-substitutions.adb dag-type_constraint.adb dag_io.adb vcg.smf clists.adb cstacks.adb labels.adb pairs.adb pile.adb structures.adb graph.adb stmtstack.adb vcg.adb vcg-producevcs.adb sem.smf sem.adb sem-add_derives_stream_effects.adb sem-add_record_sub_components.adb sem-aggregate_stack.adb sem-assignment_check.adb sem-check_announced_types_declared.adb sem-check_ceiling_priority.adb sem-check_closing_identifier.adb sem-check_interrupt_property_consistency.adb sem-check_named_association.adb sem-check_no_overloading_from_tagged_ops.adb sem-check_no_overloading_from_tagged_ops-successfully_overrides.adb sem-check_package_prefix.adb sem-check_priority_property_consistency.adb sem-check_priority_range.adb sem-check_protected_modifier_consistency.adb sem-check_suspendable_property_consistency.adb sem-check_task_modifier_consistency.adb sem-check_valid_ident.adb sem-compunit.adb sem-compunit-checkembedbodies.adb sem-compunit-checkpackageneedsbody.adb sem-compunit-checksuspendslistaccountedfor.adb sem-compunit-stack.adb sem-compunit-up_wf_package_body.adb sem-compunit-up_wf_protected_body.adb sem-compunit-up_wf_subprogram_body.adb sem-compunit-up_wf_task_body.adb sem-compunit-walkstatements.adb sem-compunit-walkstatements-checkformutuallyexclusivebranches.adb sem-compunit-walkstatements-down_loop.adb sem-compunit-walkstatements-up_case.adb sem-compunit-walkstatements-up_loop.adb sem-compunit-walkstatements-variableupdatehistory.adb sem-compunit-walkstatements-wf_assign.adb sem-compunit-walkstatements-wf_case.adb sem-compunit-walkstatements-wf_case_choice.adb sem-compunit-walkstatements-wf_condition.adb sem-compunit-walkstatements-wf_delay_until.adb sem-compunit-walkstatements-wf_exit.adb sem-compunit-walkstatements-wf_loop_param.adb sem-compunit-walkstatements-wf_proc_call.adb sem-compunit-walkstatements-wf_return.adb sem-compunit-wf_body_stub.adb sem-compunit-wf_entry_body.adb sem-compunit-wf_generic_declaration.adb sem-compunit-wf_generic_package_instantiation.adb sem-compunit-wf_machine_code_insertion.adb sem-compunit-wf_package_body.adb sem-compunit-wf_package_body-wf_refine.adb sem-compunit-wf_package_body-wf_refine-wf_clause.adb sem-compunit-wf_package_initialization.adb sem-compunit-wf_proof_function_declaration.adb sem-compunit-wf_proof_renaming_declaration.adb sem-compunit-wf_protected_body.adb sem-compunit-wf_subprogram_body.adb sem-compunit-wf_subprogram_body-processpartitionannotation.adb sem-compunit-wf_subunit.adb sem-compunit-wf_task_body.adb sem-compunit-wf_use_type_clause.adb sem-constraint_check.adb sem-convert_tagged_actual.adb sem-create_implicit_positive_subtype.adb sem-create_interrupt_stream_variable.adb sem-dependency_relation.adb sem-dependency_relation-check_derives_consistency.adb sem-dependency_relation-create_full_dependency.adb sem-dependency_relation-create_full_subprog_dependency.adb sem-dependency_relation-wf_dependency_relation.adb sem-find_actual_node.adb sem-find_previous_package.adb sem-get_literal_value.adb sem-get_subprogram_anno_key_nodes.adb sem-get_type_bounds.adb sem-illegal_unconstrained.adb sem-indexes_match.adb sem-in_package_initialization.adb sem-is_enclosing_package.adb sem-is_external_interface.adb sem-needs_synthetic_dependency.adb sem-plant_constraining_type.adb sem-range_check.adb sem-subprogram_specification.adb sem-substitute_protected_type_self_reference.adb sem-unexpected_initialization.adb sem-unknown_type_record.adb sem-walk_expression_p.adb sem-walk_expression_p-add_name.adb sem-walk_expression_p-attribute_designator_type_from_context.adb sem-walk_expression_p-calc_binary_operator.adb sem-walk_expression_p-check_binary_operator.adb sem-walk_expression_p-check_binary_operator-homo_impl_type_conv.adb sem-walk_expression_p-create_name_list.adb sem-walk_expression_p-dispose_of_name_list.adb sem-walk_expression_p-down_wf_aggregate.adb sem-walk_expression_p-down_wf_aggregate_or_expression.adb sem-walk_expression_p-down_wf_name_argument_list.adb sem-walk_expression_p-expression_type_from_context.adb sem-walk_expression_p-exp_stack.adb sem-walk_expression_p-find_named_argument_association_parameter.adb sem-walk_expression_p-get_character_literal.adb sem-walk_expression_p-get_string_literal_length.adb sem-walk_expression_p-null_parameter_record.adb sem-walk_expression_p-null_type_record.adb sem-walk_expression_p-ops_are_same_and_commutative.adb sem-walk_expression_p-primary_type_from_context.adb sem-walk_expression_p-put_exp_record.adb sem-walk_expression_p-range_constraint_type_from_context.adb sem-walk_expression_p-simple_expression_type_from_context.adb sem-walk_expression_p-stack_identifier.adb sem-walk_expression_p-type_context_stack.adb sem-walk_expression_p-unknown_symbol_record.adb sem-walk_expression_p-up_wf_aggregate.adb sem-walk_expression_p-up_wf_aggregate_or_expression.adb sem-walk_expression_p-up_wf_name_argument_list.adb sem-walk_expression_p-walk_annotation_expression.adb sem-walk_expression_p-walk_annotation_expression-down_wf_quantifier.adb sem-walk_expression_p-walk_annotation_expression-down_wf_store.adb sem-walk_expression_p-walk_annotation_expression-down_wf_store_list.adb sem-walk_expression_p-walk_annotation_expression-up_wf_quantifier.adb sem-walk_expression_p-walk_annotation_expression-up_wf_store.adb sem-walk_expression_p-walk_annotation_expression-up_wf_store_list.adb sem-walk_expression_p-walk_expression.adb sem-walk_expression_p-wf_aggregate_choice.adb sem-walk_expression_p-wf_aggregate_choice_rep.adb sem-walk_expression_p-wf_ancestor_part.adb sem-walk_expression_p-wf_arange.adb sem-walk_expression_p-wf_attribute.adb sem-walk_expression_p-wf_attribute_designator.adb sem-walk_expression_p-wf_attribute_designator-calc_attribute.adb sem-walk_expression_p-wf_component_association.adb sem-walk_expression_p-wf_expression.adb sem-walk_expression_p-wf_factor.adb sem-walk_expression_p-wf_identifier.adb sem-walk_expression_p-wf_named_argument_association.adb sem-walk_expression_p-wf_named_association_rep.adb sem-walk_expression_p-wf_named_record_component_association.adb sem-walk_expression_p-wf_percent.adb sem-walk_expression_p-wf_positional_argument_association.adb sem-walk_expression_p-wf_positional_association.adb sem-walk_expression_p-wf_positional_record_component_association.adb sem-walk_expression_p-wf_primary.adb sem-walk_expression_p-wf_primary-protected_references_by.adb sem-walk_expression_p-wf_qualified_expression.adb sem-walk_expression_p-wf_record_component_selector_name.adb sem-walk_expression_p-wf_relation.adb sem-walk_expression_p-wf_selected_component.adb sem-walk_expression_p-wf_simple_expression.adb sem-walk_expression_p-wf_simple_expression_opt.adb sem-walk_expression_p-wf_term.adb sem-walk_expression_p-wf_tilde.adb sem-walk_name.adb sem-wf_argument_association.adb sem-wf_argument_association-tagged_actual_must_be_object_check.adb sem-wf_array_type_definition.adb sem-wf_basic_declarative_item.adb sem-wf_basic_declarative_item-check_subtype_against_basetype_bounds.adb sem-wf_basic_declarative_item-wf_basic_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_derived.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_enum.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_integer.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_known_discriminant_part.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_modular.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_priority_pragma.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration-wf_protected_op_dec.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_real.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_record.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_task_type_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_type_extension.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration-wf_ravenscar_subtype.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_variable_declaration.adb sem-wf_context_clause.adb sem-wf_context_clause_package_body.adb sem-wf_context_clause_package_body-use_clause.adb sem-wf_context_clause_package_body-with_clause.adb sem-wf_declare_annotation.adb sem-wf_entire_variable.adb sem-wf_external_interface.adb sem-wf_formal_part.adb sem-wf_formal_part-wf_param.adb sem-wf_generic_formal_part.adb sem-wf_generic_subprogram_instantiation.adb sem-wf_generic_subprogram_instantiation-wf_generic_actual_part.adb sem-wf_global_definition.adb sem-wf_inherit_clause.adb sem-wf_justification_statement.adb sem-wf_package_declaration.adb sem-wf_package_declaration-add_child.adb sem-wf_package_declaration-get_package_declaration_key_nodes.adb sem-wf_package_declaration-wf_package_specification.adb sem-wf_package_declaration-wf_package_specification-check_modes.adb sem-wf_package_declaration-wf_package_specification-check_state_can_be_initialized.adb sem-wf_package_declaration-wf_package_specification-check_types_can_be_used.adb sem-wf_package_declaration-wf_package_specification-wf_anno.adb sem-wf_package_declaration-wf_package_specification-wf_anno-wf_init_spec.adb sem-wf_package_declaration-wf_package_specification-wf_anno-wf_own.adb sem-wf_package_declaration-wf_package_specification-wf_private.adb sem-wf_package_declaration-wf_package_specification-wf_visible.adb sem-wf_package_declaration-wf_package_specification-wf_visible-wf_deferred.adb sem-wf_package_declaration-wf_package_specification-wf_visible-wf_private_type_declaration.adb sem-wf_pragma.adb sem-wf_pragma-wf_attach_handler.adb sem-wf_pragma-wf_elaborate_body.adb sem-wf_predicate.adb sem-wf_priority_value.adb sem-wf_property_list.adb sem-wf_renaming_declaration.adb sem-wf_subprogram_annotation.adb sem-wf_subprogram_constraint.adb sem-wf_subprogram_declaration.adb sem-wf_type_mark.adb spark.smf spark-ada-command_line.adb spark-ada-command_line-unbounded_string.adb spark-ada-strings-unbounded.adb spark-ada-strings-maps.adb spark-ada-text_io.adb spark-ada-text_io-unbounded_string.adb sli.adb sli-io.adb sli-xref.shb mainloop.adb examiner.adb casing.adb Full warning reporting selected Target configuration file: Line package Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; Source Filename(s) used were: commandlinedata.adb commandlinedata.ads xmlreport.ads version.ads spark_io.ads filesystem.ads e_strings.ads examinerconstants.ads spark-ada-strings-unbounded.ads spark-ada-strings-maps.ads ada-characters-handling.shs spark-ada-strings.ads spark-ada.ads spark.ads systemerrors.ads spark_xml.ads screenecho.ads commandlinehandler.adb commandlinehandler.ads completecheck.adb completecheck.ads maths.ads lextokenmanager.ads statistics.ads componenterrors.adb componenterrors.ads seqalgebra.ads heap.ads dictionary.ads sp_symbols.ads lextokenstacks.ads contextmanager.ads heap_storage.ads spark-ada-containers.ads componentmanager.adb componentmanager.ads debug.ads stree.ads cstacks.ads cells.ads cell_storage.ads configfile.adb configfile.ads sp_expected_symbols.ads sparklex.ads lextokenlists.ads error_types.ads error_io.ads errorhandler.ads casing.ads sp_relations.ads sp_productions.ads contextmanager-ops.adb contextmanager-ops.ads indexmanager.ads date_time.adb date_time.ads e_strings.adb file_utils.adb file_utils.ads heap.adb indexmanager-cache.shb indexmanager-cache.ads indexmanager-index_table_p.ads indexmanager-index_table_p.adb indexmanager.adb lextokenlists.adb lextokenmanager.adb lextokenmanager-insert_nat.adb lextokenmanager-relation_algebra.adb lextokenmanager-relation_algebra.ads relationalgebra.ads lextokenmanager-seq_algebra.ads lextokenmanager-relation_algebra-string.adb lextokenmanager-relation_algebra-string.ads lextokenmanager-seq_algebra.adb lextokenstacks.adb lists.adb lists.ads maths.adb maths-literaltovalue.adb maths-parsestring.adb maths-valuetostring.adb metafile.adb metafile.ads reflist.adb reflist.ads relationalgebra.adb requiredunits.adb requiredunits.ads screenecho.adb seqalgebra.adb simplelists.adb simplelists.ads sparkhtml.adb sparkhtml.ads statistics.adb stree.adb stree-findlastitemindependencyrelation.adb symbol_set.adb symbol_set.ads systemerrors.adb sprint.adb sprint.ads spark_xml.adb xmlreport.adb sparklex.adb sparklex-lex.adb sparklex-lex-apostintro.adb sparklex-lex-getident.adb sparklex-lex-getnumber.adb sparklex-lex-getstring.adb sparklex-lex-hyphintro.adb sparklex-lex-ltintro.adb sparklex-lex-nextlex.adb sparklex-linemanager.adb spparser.adb spparser.ads sp_parser_goto.ads sp_parser_actions.ads sp_parser_actions.adb sp_parser_actions-scan_action_table.adb sp_parser_actions-spa.adb sp_parser_goto.adb sp_parser_goto-scan_goto_table.adb sp_parser_goto-sp_goto.adb sp_expected_symbols.adb sp_expected_symbols-get_expected_symbols.adb dictionary.adb dictionary-add_declaration.adb dictionary-add_generic_formal_parameter_local.adb dictionary-add_record_component.adb dictionary-add_record_subcomponent.adb dictionary-add_renaming_declaration.adb dictionary-add_subprogram_parameter.adb dictionary-add_use_type_reference.adb dictionary-addinheritsreference.adb dictionary-addloop.adb dictionary-addwithreference.adb dictionary-attribute_is_visible.adb dictionary-attribute_is_visible_but_obsolete_local.adb dictionary-dynamic_symbol_table.adb dictionary-generatesimplename.adb dictionary-get_binary_operator_type_local.adb dictionary-get_record_component.adb dictionary-get_scalar_attribute_type.adb dictionary-get_scalar_attribute_value.adb dictionary-getanyprefixneeded.adb dictionary-getscope.adb dictionary-initialize.adb dictionary-instantiate_subprogram_parameters.adb dictionary-is_callable.adb dictionary-is_renamed_local.adb dictionary-lookupitem.adb dictionary-lookupscope.adb dictionary-lookupselecteditem.adb dictionary-nextsymbol.adb dictionary-operator_is_visible.adb dictionary-rawdict.adb dictionary-search_for_inherited_operations.adb dictionary-targetdata.adb dictionary-write.adb dictionary-writeoperatorrenamingdeclaration.adb errorhandler.adb errorhandler-getfileline.adb errorhandler-printerrors.adb errorhandler-printline.adb errorhandler-appenderrors.adb errorhandler-appendsym.adb errorhandler-echoerrorentry.adb errorhandler-erroraccumulator.adb errorhandler-warningstatus.adb errorhandler-warningstatus-readwarningfile.adb errorhandler-errorbuffer.adb errorhandler-conversions.adb errorhandler-justifications.adb errorhandler-conversions-tostring.adb errorhandler-conversions-tostring-appendreference.adb errorhandler-conversions-tostring-condldependency.adb errorhandler-conversions-tostring-condldependency-condldependencyexpl.adb errorhandler-conversions-tostring-condlflowerr.adb errorhandler-conversions-tostring-condlflowerr-condlflowerrexpl.adb errorhandler-conversions-tostring-controlflowerror.adb errorhandler-conversions-tostring-controlflowerror-controlflowerrorexpl.adb errorhandler-conversions-tostring-depsemanticerr.adb errorhandler-conversions-tostring-depsemanticerr-depsemanticerrexpl.adb errorhandler-conversions-tostring-ineffectivestatement.adb errorhandler-conversions-tostring-ineffectivestatement-ineffectivestatementexpl.adb errorhandler-conversions-tostring-noerr.adb errorhandler-conversions-tostring-semanticerr.adb errorhandler-conversions-tostring-semanticerr-semanticerrexpl.adb errorhandler-conversions-tostring-stabilityerror.adb errorhandler-conversions-tostring-stabilityerror-stabilityerrorexpl.adb errorhandler-conversions-tostring-unconddependency.adb errorhandler-conversions-tostring-unconddependency-unconddependencyexpl.adb errorhandler-conversions-tostring-uncondflowerr.adb errorhandler-conversions-tostring-uncondflowerr-uncondflowerrexpl.adb errorhandler-conversions-tostring-usageerror.adb errorhandler-conversions-tostring-usageerror-usageerrorexpl.adb errorhandler-conversions-tostring-warningwithoutposition.adb errorhandler-conversions-tostring-warningwithoutposition-warningwithoutpositionexpl.adb errorhandler-conversions-tostring-warningwithposition.adb errorhandler-conversions-tostring-warningwithposition-warningwithpositionexpl.adb errorhandler-conversions-tostring-note.adb errorhandler-conversions-tostring-note-noteexpl.adb flowanalyser.adb flowanalyser.ads relationalgebra-debug.ads flowanalyser-ifa_stack.adb flowanalyser-flowanalyse.adb flowanalyser-flowanalyse-analyserelations.adb flowanalyser-flowanalyse-analyserelations-checkexpressions.adb flowanalyser-flowanalyse-analyserelations-checkusages.adb flowanalyser-flowanalyse-analyserelations-checkdependencies.adb flowanalyser-flowanalyse-analyserelations-checkunused.adb flowanalyser-flowanalyse-analyserelations-mergeandhandleerrors.adb flowanalyser-flowanalysepartition.adb declarations.adb declarations.ads pile.ads pairs.ads dag_io.ads clists.ads adjustfdl_rws.ads labels.ads structures.ads declarations-outputdeclarations.adb declarations-outputdeclarations-generatedeclarations.adb declarations-outputdeclarations-generatedeclarations-generatesuccessors.adb declarations-outputdeclarations-printdeclarations.adb declarations-outputdeclarations-printdeclarations-printconstantrules.adb declarations-outputdeclarations-printdeclarations-printruleheader.adb declarations-outputdeclarations-printdeclarations-printtyperules.adb cells.adb cells-utility.adb cells-utility.ads cells-utility-list.adb cells-utility-list.ads cell_storage.adb dag.adb dag.ads stmtstack.ads graph.ads dag-build_annotation_expression.adb dag-buildexpndag.adb dag-buildexpndag-upattributedesignator.adb dag-buildgraph.adb dag-buildgraph-modelassignmentstmt.adb dag-buildgraph-modelprocedurecall.adb dag-buildgraph-incorporateconstraints.adb dag-loopcontext.adb dag-substitutions.adb dag-type_constraint.adb dag_io.adb clists.adb cstacks.adb labels.adb pairs.adb pile.adb structures.adb graph.adb stmtstack.adb vcg.adb vcg.ads vcg-producevcs.adb sem.adb sem.ads sli.ads sem-add_derives_stream_effects.adb sem-add_record_sub_components.adb sem-aggregate_stack.adb sem-assignment_check.adb sem-check_announced_types_declared.adb sem-check_ceiling_priority.adb sem-check_closing_identifier.adb sem-check_interrupt_property_consistency.adb sem-check_named_association.adb sem-check_no_overloading_from_tagged_ops.adb sem-check_no_overloading_from_tagged_ops-successfully_overrides.adb sem-check_package_prefix.adb sem-check_priority_property_consistency.adb sem-check_priority_range.adb sem-check_protected_modifier_consistency.adb sem-check_suspendable_property_consistency.adb sem-check_task_modifier_consistency.adb sem-check_valid_ident.adb sem-compunit.adb sem-compunit-checkembedbodies.adb sem-compunit-checkpackageneedsbody.adb sem-compunit-checksuspendslistaccountedfor.adb sem-compunit-stack.adb sem-compunit-up_wf_package_body.adb sem-compunit-up_wf_protected_body.adb sem-compunit-up_wf_subprogram_body.adb sem-compunit-up_wf_task_body.adb sem-compunit-walkstatements.adb sem-compunit-walkstatements-checkformutuallyexclusivebranches.adb sem-compunit-walkstatements-down_loop.adb sem-compunit-walkstatements-up_case.adb sem-compunit-walkstatements-up_loop.adb sem-compunit-walkstatements-variableupdatehistory.adb sem-compunit-walkstatements-wf_assign.adb sem-compunit-walkstatements-wf_case.adb sem-compunit-walkstatements-wf_case_choice.adb sem-compunit-walkstatements-wf_condition.adb sem-compunit-walkstatements-wf_delay_until.adb sem-compunit-walkstatements-wf_exit.adb sem-compunit-walkstatements-wf_loop_param.adb sem-compunit-walkstatements-wf_proc_call.adb sem-compunit-walkstatements-wf_return.adb sem-compunit-wf_body_stub.adb sem-compunit-wf_entry_body.adb sem-compunit-wf_generic_declaration.adb sem-compunit-wf_generic_package_instantiation.adb sem-compunit-wf_machine_code_insertion.adb sem-compunit-wf_package_body.adb sem-compunit-wf_package_body-wf_refine.adb sem-compunit-wf_package_body-wf_refine-wf_clause.adb sem-compunit-wf_package_initialization.adb sem-compunit-wf_proof_function_declaration.adb sem-compunit-wf_proof_renaming_declaration.adb sem-compunit-wf_protected_body.adb sem-compunit-wf_subprogram_body.adb sem-compunit-wf_subprogram_body-processpartitionannotation.adb sem-compunit-wf_subunit.adb sem-compunit-wf_task_body.adb sem-compunit-wf_use_type_clause.adb sem-constraint_check.adb sem-convert_tagged_actual.adb sem-create_implicit_positive_subtype.adb sem-create_interrupt_stream_variable.adb sem-dependency_relation.adb sem-dependency_relation-check_derives_consistency.adb sem-dependency_relation-create_full_dependency.adb sem-dependency_relation-create_full_subprog_dependency.adb sem-dependency_relation-wf_dependency_relation.adb sem-find_actual_node.adb sem-find_previous_package.adb sem-get_literal_value.adb sem-get_subprogram_anno_key_nodes.adb sem-get_type_bounds.adb sem-illegal_unconstrained.adb sem-indexes_match.adb sem-in_package_initialization.adb sem-is_enclosing_package.adb sem-is_external_interface.adb sem-needs_synthetic_dependency.adb sem-plant_constraining_type.adb sem-range_check.adb sem-subprogram_specification.adb sem-substitute_protected_type_self_reference.adb sem-unexpected_initialization.adb sem-unknown_type_record.adb sem-walk_expression_p.adb sem-walk_expression_p-add_name.adb sem-walk_expression_p-attribute_designator_type_from_context.adb sem-walk_expression_p-calc_binary_operator.adb sem-walk_expression_p-check_binary_operator.adb sem-walk_expression_p-check_binary_operator-homo_impl_type_conv.adb sem-walk_expression_p-create_name_list.adb sem-walk_expression_p-dispose_of_name_list.adb sem-walk_expression_p-down_wf_aggregate.adb sem-walk_expression_p-down_wf_aggregate_or_expression.adb sem-walk_expression_p-down_wf_name_argument_list.adb sem-walk_expression_p-expression_type_from_context.adb sem-walk_expression_p-exp_stack.adb sem-walk_expression_p-find_named_argument_association_parameter.adb sem-walk_expression_p-get_character_literal.adb sem-walk_expression_p-get_string_literal_length.adb sem-walk_expression_p-null_parameter_record.adb sem-walk_expression_p-null_type_record.adb sem-walk_expression_p-ops_are_same_and_commutative.adb sem-walk_expression_p-primary_type_from_context.adb sem-walk_expression_p-put_exp_record.adb sem-walk_expression_p-range_constraint_type_from_context.adb sem-walk_expression_p-simple_expression_type_from_context.adb sem-walk_expression_p-stack_identifier.adb sem-walk_expression_p-type_context_stack.adb sem-walk_expression_p-unknown_symbol_record.adb sem-walk_expression_p-up_wf_aggregate.adb sem-walk_expression_p-up_wf_aggregate_or_expression.adb sem-walk_expression_p-up_wf_name_argument_list.adb sem-walk_expression_p-walk_annotation_expression.adb sem-walk_expression_p-walk_annotation_expression-down_wf_quantifier.adb sem-walk_expression_p-walk_annotation_expression-down_wf_store.adb sem-walk_expression_p-walk_annotation_expression-down_wf_store_list.adb sem-walk_expression_p-walk_annotation_expression-up_wf_quantifier.adb sem-walk_expression_p-walk_annotation_expression-up_wf_store.adb sem-walk_expression_p-walk_annotation_expression-up_wf_store_list.adb sem-walk_expression_p-walk_expression.adb sem-walk_expression_p-wf_aggregate_choice.adb sem-walk_expression_p-wf_aggregate_choice_rep.adb sem-walk_expression_p-wf_ancestor_part.adb sem-walk_expression_p-wf_arange.adb sem-walk_expression_p-wf_attribute.adb sem-walk_expression_p-wf_attribute_designator.adb sem-walk_expression_p-wf_attribute_designator-calc_attribute.adb sem-walk_expression_p-wf_component_association.adb sem-walk_expression_p-wf_expression.adb sem-walk_expression_p-wf_factor.adb sem-walk_expression_p-wf_identifier.adb sem-walk_expression_p-wf_named_argument_association.adb sem-walk_expression_p-wf_named_association_rep.adb sem-walk_expression_p-wf_named_record_component_association.adb sem-walk_expression_p-wf_percent.adb sem-walk_expression_p-wf_positional_argument_association.adb sem-walk_expression_p-wf_positional_association.adb sem-walk_expression_p-wf_positional_record_component_association.adb sem-walk_expression_p-wf_primary.adb sem-walk_expression_p-wf_primary-protected_references_by.adb sem-walk_expression_p-wf_qualified_expression.adb sem-walk_expression_p-wf_record_component_selector_name.adb sem-walk_expression_p-wf_relation.adb sem-walk_expression_p-wf_selected_component.adb sem-walk_expression_p-wf_simple_expression.adb sem-walk_expression_p-wf_simple_expression_opt.adb sem-walk_expression_p-wf_term.adb sem-walk_expression_p-wf_tilde.adb sem-walk_name.adb sem-wf_argument_association.adb sem-wf_argument_association-tagged_actual_must_be_object_check.adb sem-wf_array_type_definition.adb sem-wf_basic_declarative_item.adb sem-wf_basic_declarative_item-check_subtype_against_basetype_bounds.adb sem-wf_basic_declarative_item-wf_basic_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_derived.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_enum.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_integer.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_known_discriminant_part.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_modular.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_priority_pragma.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration-wf_protected_op_dec.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_real.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_record.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_task_type_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_type_extension.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration-wf_ravenscar_subtype.adb sem-wf_basic_declarative_item-wf_basic_declaration-wf_variable_declaration.adb sem-wf_context_clause.adb sem-wf_context_clause_package_body.adb sem-wf_context_clause_package_body-use_clause.adb sem-wf_context_clause_package_body-with_clause.adb sem-wf_declare_annotation.adb sem-wf_entire_variable.adb sem-wf_external_interface.adb sem-wf_formal_part.adb sem-wf_formal_part-wf_param.adb sem-wf_generic_formal_part.adb sem-wf_generic_subprogram_instantiation.adb sem-wf_generic_subprogram_instantiation-wf_generic_actual_part.adb sem-wf_global_definition.adb sem-wf_inherit_clause.adb sem-wf_justification_statement.adb sem-wf_package_declaration.adb sem-wf_package_declaration-add_child.adb sem-wf_package_declaration-get_package_declaration_key_nodes.adb sem-wf_package_declaration-wf_package_specification.adb sem-wf_package_declaration-wf_package_specification-check_modes.adb sem-wf_package_declaration-wf_package_specification-check_state_can_be_initialized.adb sem-wf_package_declaration-wf_package_specification-check_types_can_be_used.adb sem-wf_package_declaration-wf_package_specification-wf_anno.adb sem-wf_package_declaration-wf_package_specification-wf_anno-wf_init_spec.adb sem-wf_package_declaration-wf_package_specification-wf_anno-wf_own.adb sem-wf_package_declaration-wf_package_specification-wf_private.adb sem-wf_package_declaration-wf_package_specification-wf_visible.adb sem-wf_package_declaration-wf_package_specification-wf_visible-wf_deferred.adb sem-wf_package_declaration-wf_package_specification-wf_visible-wf_private_type_declaration.adb sem-wf_pragma.adb sem-wf_pragma-wf_attach_handler.adb sem-wf_pragma-wf_elaborate_body.adb sem-wf_predicate.adb sem-wf_priority_value.adb sem-wf_property_list.adb sem-wf_renaming_declaration.adb sem-wf_subprogram_annotation.adb sem-wf_subprogram_constraint.adb sem-wf_subprogram_declaration.adb sem-wf_type_mark.adb spark-ada-command_line.adb spark-ada-command_line.ads spark-ada-command_line-unbounded_string.adb spark-ada-command_line-unbounded_string.ads spark-ada-strings-unbounded.adb spark-ada-strings-maps.adb spark-ada-text_io.adb spark-ada-text_io.ads spark-ada-text_io-unbounded_string.adb spark-ada-text_io-unbounded_string.ads sli.adb sli-io.ads sli-xref.ads sli-io.adb sli-xref.shb mainloop.adb mainloop.ads examiner.adb casing.adb Source Filename: commandlinedata.ads No Listing File Unit name: CommandLineData Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: xmlreport.ads No Listing File Unit name: XMLReport Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: version.ads No Listing File Unit name: Version Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 3 end Pragma to suppress compiler warn 1 Source Filename: spark_io.ads No Listing File Unit name: SPARK_IO Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Inline (Valid_File, End_Of_Line, End_Of_File, Get_Char); ^ --- Warning : 3: Pragma - ignored by the Examiner. end SPARK_IO; --- Warning : 10: The private part of package SPARK_IO is hidden - hidden text is ignored by the Examiner. Source Filename: filesystem.ads No Listing File Unit name: FileSystem Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: e_strings.ads No Listing File Unit name: E_Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: examinerconstants.ads No Listing File Unit name: ExaminerConstants Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings-unbounded.ads No Listing File Unit name: SPARK.Ada.Strings.Unbounded Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Unbounded; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Unbounded; --- Warning : 10: The private part of package Unbounded is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-maps.ads No Listing File Unit name: SPARK.Ada.Strings.Maps Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Maps; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Maps; --- Warning : 10: The private part of package Maps is hidden - hidden text is ignored by the Examiner. Source Filename: ada-characters-handling.shs No Listing File Unit name: Ada.Characters.Handling Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings.ads No Listing File Unit name: SPARK.Ada.Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada.ads No Listing File Unit name: SPARK.Ada Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark.ads No Listing File Unit name: SPARK Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: systemerrors.ads No Listing File Unit name: SystemErrors Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_xml.ads No Listing File Unit name: SPARK_XML Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: screenecho.ads No Listing File Unit name: ScreenEcho Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: commandlinehandler.ads No Listing File Unit name: CommandLineHandler Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: completecheck.ads No Listing File Unit name: CompleteCheck Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Pack (ElementArray); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: maths.ads No Listing File Unit name: Maths Unit type: package specification Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line pragma Inline (HasNoValue); ^ --- Warning : 3: Pragma - ignored by the Examiner. for Digit'Size use 4; ^ --- Warning : 2: Representation clause - ignored by the Examiner. pragma Pack (ValueArray); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: lextokenmanager.ads No Listing File Unit name: LexTokenManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: statistics.ads No Listing File Unit name: Statistics Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: componenterrors.ads No Listing File Unit name: ComponentErrors Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: seqalgebra.ads No Listing File Unit name: SeqAlgebra Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: heap.ads No Listing File Unit name: Heap Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary.ads No Listing File Unit name: Dictionary Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_symbols.ads No Listing File Unit name: SP_Symbols Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenstacks.ads No Listing File Unit name: LexTokenStacks Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: contextmanager.ads No Listing File Unit name: ContextManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: heap_storage.ads No Listing File Unit name: Heap_Storage Unit type: package specification Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with Ada.Containers; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Containers.Vectors; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end Heap_Storage; --- Warning : 10: The private part of package Heap_Storage is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-containers.ads No Listing File Unit name: SPARK.Ada.Containers Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: componentmanager.ads No Listing File Unit name: ComponentManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: debug.ads No Listing File Unit name: Debug Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: stree.ads No Listing File Unit name: STree Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: cstacks.ads No Listing File Unit name: CStacks Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: cells.ads No Listing File Unit name: Cells Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: cell_storage.ads No Listing File Unit name: Cell_Storage Unit type: package specification Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line with Ada.Containers; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Containers.Vectors; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. pragma Pack (Cell_Content); ^ --- Warning : 3: Pragma - ignored by the Examiner. end Cell_Storage; --- Warning : 10: The private part of package Cell_Storage is hidden - hidden text is ignored by the Examiner. Source Filename: configfile.ads No Listing File Unit name: ConfigFile Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_expected_symbols.ads No Listing File Unit name: SP_Expected_Symbols Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex.ads No Listing File Unit name: SparkLex Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenlists.ads No Listing File Unit name: LexTokenLists Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: error_types.ads No Listing File Unit name: Error_Types Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: error_io.ads No Listing File Unit name: Error_IO Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Error_IO; --- Warning : 10: The private part of package Error_IO is hidden - hidden text is ignored by the Examiner. Source Filename: errorhandler.ads No Listing File Unit name: ErrorHandler Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Elaborate_All (SPARK_IO); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: casing.ads No Listing File Unit name: Casing Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_relations.ads No Listing File Unit name: SP_Relations Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_productions.ads No Listing File Unit name: SP_Productions Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: contextmanager-ops.ads No Listing File Unit name: ContextManager.Ops Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: indexmanager.ads No Listing File Unit name: IndexManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: date_time.ads No Listing File Unit name: Date_Time Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: file_utils.ads No Listing File Unit name: File_Utils Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: indexmanager-cache.ads No Listing File Unit name: IndexManager.Cache Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: indexmanager-index_table_p.ads No Listing File Unit name: IndexManager.Index_Table_P Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager-relation_algebra.ads No Listing File Unit name: LexTokenManager.Relation_Algebra Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: relationalgebra.ads No Listing File Unit name: RelationAlgebra Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager-seq_algebra.ads No Listing File Unit name: LexTokenManager.Seq_Algebra Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager-relation_algebra-string.ads No Listing File Unit name: LexTokenManager.Relation_Algebra.String Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lists.ads No Listing File Unit name: Lists Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: metafile.ads No Listing File Unit name: MetaFile Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: reflist.ads No Listing File Unit name: RefList Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: requiredunits.ads No Listing File Unit name: RequiredUnits Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: simplelists.ads No Listing File Unit name: SimpleLists Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkhtml.ads No Listing File Unit name: SparkHTML Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: symbol_set.ads No Listing File Unit name: Symbol_Set Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sprint.ads No Listing File Unit name: SPrint Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spparser.ads No Listing File Unit name: SPParser Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_parser_goto.ads No Listing File Unit name: SP_Parser_Goto Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_parser_actions.ads No Listing File Unit name: SP_Parser_Actions Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: flowanalyser.ads No Listing File Unit name: FlowAnalyser Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: relationalgebra-debug.ads No Listing File Unit name: RelationAlgebra.Debug Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: declarations.ads No Listing File Unit name: Declarations Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: pile.ads No Listing File Unit name: Pile Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: pairs.ads No Listing File Unit name: Pairs Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: dag_io.ads No Listing File Unit name: DAG_IO Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: clists.ads No Listing File Unit name: Clists Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: adjustfdl_rws.ads No Listing File Unit name: AdjustFDL_RWs Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: labels.ads No Listing File Unit name: Labels Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: structures.ads No Listing File Unit name: Structures Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: cells-utility.ads No Listing File Unit name: Cells.Utility Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: cells-utility-list.ads No Listing File Unit name: Cells.Utility.List Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: dag.ads No Listing File Unit name: DAG Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: stmtstack.ads No Listing File Unit name: StmtStack Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: graph.ads No Listing File Unit name: Graph Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: vcg.ads No Listing File Unit name: VCG Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sem.ads No Listing File Unit name: Sem Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sli.ads No Listing File Unit name: SLI Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-command_line.ads No Listing File Unit name: SPARK.Ada.Command_Line Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-command_line-unbounded_string.ads No Listing File Unit name: SPARK.Ada.Command_Line.Unbounded_String Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-text_io.ads No Listing File Unit name: SPARK.Ada.Text_IO Unit type: package specification Unit has been analysed, any errors are listed below. 56 error(s) or warning(s) Line with Ada.Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. end SPARK.Ada.Text_IO; --- Warning : 10: The private part of package Text_IO is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-text_io-unbounded_string.ads No Listing File Unit name: SPARK.Ada.Text_IO.Unbounded_String Unit type: package specification Unit has been analysed, any errors are listed below. 10 error(s) or warning(s) Line --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. --# declare delay; ^ --- Warning : 4: declare annotation - ignored by the Examiner. Source Filename: sli-io.ads No Listing File Unit name: SLI.IO Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sli-xref.ads No Listing File Unit name: SLI.Xref Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: mainloop.ads No Listing File Unit name: MainLoop Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: commandlinedata.adb Listing Filename: commandlinedata.lsb Unit name: CommandLineData Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line end Debug; --- Warning : 10: The body of subprogram Debug is hidden - hidden text is ignored by the Examiner. end Dump_File_Names; --- Warning : 10: The body of subprogram Dump_File_Names is hidden - hidden text is ignored by the Examiner. Source Filename: commandlinehandler.adb Listing Filename: commandlinehandler.lsb Unit name: CommandLineHandler Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Command_Line; ^ --- Warning : 1: The identifier Command_Line is either undeclared or not visible at this point. end Ignore_Default_Switch_File; --- Warning : 10: The body of subprogram Ignore_Default_Switch_File is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 end Returned status not used here 1 Flow 10 end Returned handle not used here 1 Flow 33 end Unused not referenced here 1 Flow 22 Stable expression here OK 1 Warn 169 Direct updates OK here 5 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 5 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 14 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 10 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 8 Warn 169 Direct updates OK here 3 Warn 169 Direct updates OK here 11 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 4 Warn 169 Direct updates OK here 3 Warn 169 end Direct updates OK here 2 Warn 169 end Direct updates OK here 1 Warn 169 end Direct updates OK here 1 Warn 169 Direct updates OK here 5 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 3 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 6 Warn 169 Direct updates OK here 3 Warn 169 Direct updates OK here 10 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 4 Warn 169 Direct updates OK here 1 Warn 169 end Direct updates OK here 2 Warn 169 end Direct updates OK here 1 Warn 169 Direct updates OK here 27 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 6 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 14 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 16 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 4 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 8 Warn 169 Direct updates OK here 6 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 7 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 3 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 5 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 3 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 3 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 3 Warn 169 Direct updates OK here 6 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Flow 10 end Next_Symbol not used here 1 Flow 10 end Local_Command_String not used he 1 Source Filename: completecheck.adb Listing Filename: completecheck.lsb Unit name: CompleteCheck Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: componenterrors.adb Listing Filename: componenterrors.lsb Unit name: ComponentErrors Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Initialization partial but effec 1 Flow 31 end Initialization partial but effec 1 Flow 602 end Initialization partial but effec 1 Source Filename: componentmanager.adb Listing Filename: componentmanager.lsb Unit name: ComponentManager Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line with E_Strings; ^ --- Warning : 1: The identifier E_Strings is either undeclared or not visible at this point. with SPARK_IO; ^ --- Warning : 1: The identifier SPARK_IO is either undeclared or not visible at this point. end Dump_Component_Tree; --- Warning : 10: The body of subprogram Dump_Component_Tree is hidden - hidden text is ignored by the Examiner. end Dump_All_Component_Trees; --- Warning : 10: The body of subprogram Dump_All_Component_Trees is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Initialization partial but effec 1 Flow 31 end Initialization partial but effec 1 Flow 602 end Initialization partial but effec 1 Flow 41 Stable expression expected here 1 Source Filename: configfile.adb Listing Filename: configfile.lsb Unit name: ConfigFile Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Unreferenced (The_Constant); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Unreferenced (The_Constant); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 File_Spec_Status unused here 1 Flow 33 end File_Spec_Status unused here 1 Flow 10 end File_Status unused here 1 Flow 33 end File_Status unused here 1 Flow 10 Local_ME unused here 1 Flow 33 end Local_ME unused here 1 Flow 10 Local_ME unused here 1 Flow 10 Local_ME unused here 1 Flow 33 end Local_ME unused here 1 Flow 10 Unwanted_ME unused here 2 Flow 10 Unwanted_ME unused here 1 Flow 33 end Unwanted_ME unused here 1 Flow 10 Unwanted_ME unused here 1 Flow 33 end Unwanted_ME unused here 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Unwanted_ME unused here 2 Flow 10 Expected ineffective assignment 1 Flow 33 end Unwanted_ME unused here 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Integer_Type unused here 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Success to be neither r 1 Flow 41 Stable expression expected here 1 Flow 10 Expected ineffective assignment 3 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 33 end Expected to be neither reference 1 Flow 10 Unwanted_Punct_Token unused here 1 Flow 33 end Unwanted_Punct_Token unused here 1 Flow 10 Local_Config_File unused here 1 Source Filename: contextmanager-ops.adb Listing Filename: contextmanager-ops.lsb Unit name: ContextManager.Ops Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Stat not used 1 Flow 33 end Stat not used 1 Flow 10 Not required here 1 Flow 33 end Not required here 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Expect Dummy_Item unused 1 Flow 23 end Partial initialization OK here 1 Flow 602 end Partial initialization OK here 1 Source Filename: date_time.adb Listing Filename: date_time.lsb Unit name: Date_Time Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 501 end The value will always be defined 1 Flow 602 end Result is never undefined. 1 Flow 10 end We do not care about any assignm 5 Flow 33 end We don't use the dummy variable 1 Source Filename: e_strings.adb Listing Filename: e_strings.lsb Unit name: E_Strings Unit type: package body Unit has been analysed, any errors are listed below. 7 error(s) or warning(s) Line with SPARK.Ada.Strings.Unbounded.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end Find_Examiner_Sub_String; --- Warning : 10: The body of subprogram Find_Examiner_Sub_String is hidden - hidden text is ignored by the Examiner. end Get_Int_From_String; --- Warning : 10: The body of subprogram Get_Int_From_String is hidden - hidden text is ignored by the Examiner. end Create; --- Warning : 10: The body of subprogram Create is hidden - hidden text is ignored by the Examiner. end Open; --- Warning : 10: The body of subprogram Open is hidden - hidden text is ignored by the Examiner. end Put_String; --- Warning : 10: The body of subprogram Put_String is hidden - hidden text is ignored by the Examiner. end Put_Line; --- Warning : 10: The body of subprogram Put_Line is hidden - hidden text is ignored by the Examiner. Source Filename: file_utils.adb Listing Filename: file_utils.lsb Unit name: File_Utils Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Calendar; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end Get_Current_Date_Time; --- Warning : 10: The body of subprogram Get_Current_Date_Time is hidden - hidden text is ignored by the Examiner. Source Filename: heap.adb Listing Filename: heap.lsb Unit name: Heap Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Exceptions; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end CreateAtom; --- Warning : 9: The body of subprogram CreateAtom has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. Source Filename: indexmanager-cache.shb Listing Filename: indexmanager-cache.lsb Unit name: IndexManager.Cache Unit type: package body Unit has been analysed, any errors are listed below. 10 error(s) or warning(s) Line with Ada.Containers.Hashed_Maps; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Strings; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Strings.Hash_Case_Insensitive; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with E_Strings.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end Unit_Hash_P; --- Warning : 10: The private part of package Unit_Hash_P is hidden - hidden text is ignored by the Examiner. end Unit_Hash_P; --- Warning : 10: The body of package Unit_Hash_P is hidden - hidden text is ignored by the Examiner. end Get_Element; --- Warning : 10: The body of subprogram Get_Element is hidden - hidden text is ignored by the Examiner. end Trace; --- Warning : 10: The body of subprogram Trace is hidden - hidden text is ignored by the Examiner. for I in IndexManager.Component_Index loop --- Warning :402: Default assertion planted to cut loop. end Context_Manager_Unit_Types_Image; --- Warning : 10: The body of subprogram Context_Manager_Unit_Types_Image is hidden - hidden text is ignored by the Examiner. Source Filename: indexmanager-index_table_p.adb Listing Filename: indexmanager-index_table_p.lsb Unit name: IndexManager.Index_Table_P Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with Fatal; ^ --- Warning : 1: The identifier Fatal is either undeclared or not visible at this point. end Raise_Fatal_Index_Manager; --- Warning : 10: The body of subprogram Raise_Fatal_Index_Manager is hidden - hidden text is ignored by the Examiner. end Trace; --- Warning : 10: The body of subprogram Trace is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective statement here OK 1 Flow 35 end Ineffective initial value of var 1 Flow 41 Expect stable expression 1 Flow 41 Expect stable expression 3 Flow 32 end Initialization is partial but ef 1 Flow 31 end Initialization is partial but ef 1 Flow 602 end Initialization is partial but ef 1 Source Filename: indexmanager.adb Listing Filename: indexmanager.lsb Unit name: IndexManager Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expect File_Spec_Status Unused 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 2 Flow 10 Expect ineffective assignment 1 Flow 10 Expect ineffective assignment 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 33 end Expect File_Spec_Status unused 1 Flow 22 Stable expression here OK 1 Flow 41 Stable expression expected here 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Expect Components unused 1 Flow 33 end Expect Dummy_Aux_Index_Unit unus 1 Flow 33 end Expect Dummy_Source_Position unu 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Expect Actual_Unit_Type unused 1 Flow 33 end Expect Actual_Unit_Type unused 1 Flow 33 end Expect Dummy_Aux_Index_Unit unus 1 Flow 33 end Expect Dummy_Source_Position unu 1 Source Filename: lextokenlists.adb Listing Filename: lextokenlists.lsb Unit name: LexTokenLists Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager.adb Listing Filename: lextokenmanager.lsb Unit name: LexTokenManager Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager-insert_nat.adb Listing Filename: lextokenmanager-insert_nat.lsb Unit name: LexTokenManager.Insert_Nat Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager-relation_algebra.adb Listing Filename: lextokenmanager-relation_algebra.lsb Unit name: LexTokenManager.Relation_Algebra Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Cache unused here 1 Source Filename: lextokenmanager-relation_algebra-string.adb Listing Filename: lextokenmanager-relation_algebra-string.lsb Unit name: LexTokenManager.Relation_Algebra.String Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with Ada.Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with E_Strings.Not_SPARK; ^ --- Warning : 1: The identifier E_Strings is either undeclared or not visible at this point. end Print; --- Warning : 10: The body of subprogram Print is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Cache unused here 1 Source Filename: lextokenmanager-seq_algebra.adb Listing Filename: lextokenmanager-seq_algebra.lsb Unit name: LexTokenManager.Seq_Algebra Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with Ada.Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with E_Strings.Not_SPARK; ^ --- Warning : 1: The identifier E_Strings is either undeclared or not visible at this point. end Print; --- Warning : 10: The body of subprogram Print is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 M unused here 1 Flow 10 end Ineffective statement OK 4 Flow 10 Assignment is ineffective OK 1 Flow 35 end Importation of the initial value 1 Flow 35 end Importation of the initial value 1 Source Filename: lextokenstacks.adb Listing Filename: lextokenstacks.lsb Unit name: LexTokenStacks Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: lists.adb Listing Filename: lists.lsb Unit name: Lists Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Partial initialization OK 1 Flow 31 end Partial initialization OK 1 Flow 602 end Partial initialization OK 1 Source Filename: maths.adb Listing Filename: maths.lsb Unit name: Maths Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 SlashFound not used here 1 Flow 10 Ptr not used here 1 Flow 10 end Tmp_Ok1 not used here 1 Flow 33 end Tmp_Ok1 not used here 1 Flow 10 end Tmp_Ok2 not used here 1 Flow 33 end Tmp_Ok2 not used here 1 Flow 10 end Tmp_Ok1 not used here 1 Flow 33 end Tmp_Ok1 not used here 1 Flow 10 end Tmp_Ok2 not used here 1 Flow 33 end Tmp_Ok2 not used here 1 Flow 10 end Unused unused here 2 Flow 33 end Unused unused here 1 Source Filename: maths-literaltovalue.adb Listing Filename: maths-literaltovalue.lsb Unit name: Maths.LiteralToValue Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: maths-parsestring.adb Listing Filename: maths-parsestring.lsb Unit name: Maths.ParseString Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: maths-valuetostring.adb Listing Filename: maths-valuetostring.lsb Unit name: Maths.ValueToString Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: metafile.adb Listing Filename: metafile.lsb Unit name: MetaFile Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Known to be ineffective, must be 1 Flow 33 end Known to be ineffective, must be 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 41 Expect stable expression 1 Flow 41 Expect stable expression 1 Flow 33 end Expected unused to be neither re 1 Flow 10 Known to be ineffective, must be 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 41 Expect stable expression 1 Flow 41 Expect stable expression 1 Flow 41 Expect stable expression 1 Flow 41 Expect stable expression 1 Flow 41 Expect stable expression 1 Flow 33 end Expected unused to be neither re 1 Flow 33 end Expected Find_Status to be neith 1 Source Filename: reflist.adb Listing Filename: reflist.lsb Unit name: RefList Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 end UnusedKeyCell unused here 1 Flow 33 end UnusedKeyCell unused here 1 Source Filename: relationalgebra.adb Listing Filename: relationalgebra.lsb Unit name: RelationAlgebra Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Cache unused here 1 Source Filename: requiredunits.adb Listing Filename: requiredunits.lsb Unit name: RequiredUnits Unit type: package body Unit has been analysed, any errors are listed below. 7 error(s) or warning(s) Line with SPARK_IO, CommandLineData; -- used for trace/debug statements only, not inherited ^ ^ --- Warning : 1: The identifier SPARK_IO is either undeclared or not visible at this point. --- Warning : 1: The identifier CommandLineData is either undeclared or not visible at this point. end Trace; --- Warning : 10: The body of subprogram Trace is hidden - hidden text is ignored by the Examiner. LexTokenLists.Append (Unit_Name_Local, STree.Node_Lex_String (Node => Name_Ptr)); --- Warning :402: Default assertion planted to cut loop. exit when CurrentNode = STree.NullNode; --- Warning :402: Default assertion planted to cut loop. while STree.Syntax_Node_Type (Node => NextNode) /= SP_Symbols.simple_name loop --- Warning :402: Default assertion planted to cut loop. LexTokenLists.Append (LList, STree.Node_Lex_String (Node => STree.Child_Node (Current_Node => NextNode))); --- Warning :402: Default assertion planted to cut loop. Source Filename: screenecho.adb Listing Filename: screenecho.lsb Unit name: ScreenEcho Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: seqalgebra.adb Listing Filename: seqalgebra.lsb Unit name: SeqAlgebra Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 M unused here 1 Source Filename: simplelists.adb Listing Filename: simplelists.lsb Unit name: SimpleLists Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Initialization partial but effec 1 Flow 31 end Initialization partial but effec 1 Flow 602 end Initialization partial but effec 1 Source Filename: sparkhtml.adb Listing Filename: sparkhtml.lsb Unit name: SparkHTML Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Inline (Flash_Character); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Ineffective assignment here OK 3 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Expect String_Start unused 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Source_File_Closed_OK t 1 Flow 33 end Expected Dest_File_Closed_OK to 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 601 end False coupling in SPARK_IO 1 Flow 601 end False coupling in SPARK_IO 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: statistics.adb Listing Filename: statistics.lsb Unit name: Statistics Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: stree.adb Listing Filename: stree.lsb Unit name: STree Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 30 end Variable not referenced nor expo 1 Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Flow 23 Init partial but effective 1 Flow 602 end Init partial but effective 1 Source Filename: stree-findlastitemindependencyrelation.adb Listing Filename: stree-findlastitemindependencyrelation.lsb Unit name: STree.FindLastItemInDependencyRelation Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: symbol_set.adb Listing Filename: symbol_set.lsb Unit name: Symbol_Set Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: systemerrors.adb Listing Filename: systemerrors.lsb Unit name: SystemErrors Unit type: package body Unit has been analysed, any errors are listed below. 6 error(s) or warning(s) Line with CommandLineData; ^ --- Warning : 1: The identifier CommandLineData is either undeclared or not visible at this point. with Fatal; ^ --- Warning : 1: The identifier Fatal is either undeclared or not visible at this point. end Stop_Program; --- Warning : 10: The body of subprogram Stop_Program is hidden - hidden text is ignored by the Examiner. end Fatal_Error; --- Warning : 10: The body of subprogram Fatal_Error is hidden - hidden text is ignored by the Examiner. end RT_Assert; --- Warning : 10: The body of subprogram RT_Assert is hidden - hidden text is ignored by the Examiner. end RT_Warning; --- Warning : 10: The body of subprogram RT_Warning is hidden - hidden text is ignored by the Examiner. Source Filename: sprint.adb Listing Filename: sprint.lsb Unit name: SPrint Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line end Put_Node_Type; --- Warning : 10: The body of subprogram Put_Node_Type is hidden - hidden text is ignored by the Examiner. for I in Natural range 1 .. Indent loop --- Warning :402: Default assertion planted to cut loop. end Recursive_Dump; --- Warning : 10: The body of subprogram Recursive_Dump is hidden - hidden text is ignored by the Examiner. Source Filename: spark_xml.adb Listing Filename: spark_xml.lsb Unit name: SPARK_XML Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line end Print_Schema_Error; --- Warning : 10: The body of subprogram Print_Schema_Error is hidden - hidden text is ignored by the Examiner. end Print_Working_State; --- Warning : 10: The body of subprogram Print_Working_State is hidden - hidden text is ignored by the Examiner. Source Filename: xmlreport.adb Listing Filename: xmlreport.lsb Unit name: XMLReport Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line end Handle_Schema_Error; --- Warning : 10: The body of subprogram Handle_Schema_Error is hidden - hidden text is ignored by the Examiner. for I in My_Tag loop --- Warning :402: Default assertion planted to cut loop. for I in Attribute_Index loop --- Warning :402: Default assertion planted to cut loop. for I in Rel_Index loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Depth to be neither ref 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Id to be neither refere 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Depth to be neither ref 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Depth to be neither ref 1 Source Filename: sparklex.adb Listing Filename: sparklex.lsb Unit name: SparkLex Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Unreferenced (Set_Anno_Context); -- not used at present ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Unused not referenced here 1 Flow 33 end Unused not referenced here 1 Source Filename: sparklex-lex.adb Listing Filename: sparklex-lex.lsb Unit name: SparkLex.Lex Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line for I in E_Strings.Positions range Start_Pos + 1 .. End_Pos - 1 loop --- Warning :402: Default assertion planted to cut loop. for I in E_Strings.Positions range Start_Pos .. End_Pos loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Stable expression here OK 1 Flow 10 Skipping so ineffective assignme 1 Source Filename: sparklex-lex-apostintro.adb Listing Filename: sparklex-lex-apostintro.lsb Unit name: SparkLex.Lex.ApostIntro Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex-lex-getident.adb Listing Filename: sparklex-lex-getident.lsb Unit name: SparkLex.Lex.GetIdent Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex-lex-getnumber.adb Listing Filename: sparklex-lex-getnumber.lsb Unit name: SparkLex.Lex.GetNumber Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex-lex-getstring.adb Listing Filename: sparklex-lex-getstring.lsb Unit name: SparkLex.Lex.GetString Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 501 Ch always defined on this path 1 Flow 602 end Ch always defined here 1 Source Filename: sparklex-lex-hyphintro.adb Listing Filename: sparklex-lex-hyphintro.lsb Unit name: SparkLex.Lex.HyphIntro Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex-lex-ltintro.adb Listing Filename: sparklex-lex-ltintro.lsb Unit name: SparkLex.Lex.LTIntro Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex-lex-nextlex.adb Listing Filename: sparklex-lex-nextlex.lsb Unit name: SparkLex.Lex.NextLex Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex-linemanager.adb Listing Filename: sparklex-linemanager.lsb Unit name: SparkLex.LineManager Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: spparser.adb Listing Filename: spparser.lsb Unit name: SPParser Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line end Put_Symbol; --- Warning : 10: The body of subprogram Put_Symbol is hidden - hidden text is ignored by the Examiner. pragma Unreferenced (SPPrintAction); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Unreferenced (SPPrintStack); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 31 end Only the stack pointer needs to 1 Flow 32 end Only the stack pointer needs to 1 Flow 23 end The stack pointer is all that is 1 Flow 501 end The stack pointer is all that is 2 Flow 504 end The stack pointer is all that is 3 Flow 602 end The stack pointer is all that is 1 Flow 20 end NextState is guarded by Recovery 1 Flow 602 end NextState is guarded by Recovery 1 Flow 602 end NextState is guarded by Recovery 1 Flow 23 Whole array is initialized. 1 Flow 23 Whole array is initialized. 1 Flow 23 Whole array is initialized. 1 Flow 23 end Access to SymList Elements is gu 2 Flow 504 Access to SymList is guarded by 1 Flow 41 Pos is updated in the outer loop 1 Flow 504 Update of element of SymList - A 1 Flow 504 Access to SymList is guarded by 1 Flow 501 Access guarded by Success. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accesses guarded by Success 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accesses guarded by Success 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accesses guarded by Success 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accessed elements are defined. 1 Flow 602 end Accesses guarded by Success 1 Source Filename: sp_parser_actions.adb Listing Filename: sp_parser_actions.lsb Unit name: SP_Parser_Actions Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Unreferenced (Prod_No_Lim); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Unreferenced (Error_Act); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: sp_parser_actions-scan_action_table.adb Listing Filename: sp_parser_actions-scan_action_table.lsb Unit name: SP_Parser_Actions.Scan_Action_Table Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_parser_actions-spa.adb Listing Filename: sp_parser_actions-spa.lsb Unit name: SP_Parser_Actions.SPA Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_parser_goto.adb Listing Filename: sp_parser_goto.lsb Unit name: SP_Parser_Goto Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_parser_goto-scan_goto_table.adb Listing Filename: sp_parser_goto-scan_goto_table.lsb Unit name: SP_Parser_Goto.Scan_Goto_Table Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_parser_goto-sp_goto.adb Listing Filename: sp_parser_goto-sp_goto.lsb Unit name: SP_Parser_Goto.SP_Goto Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_expected_symbols.adb Listing Filename: sp_expected_symbols.lsb Unit name: SP_Expected_Symbols Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_expected_symbols-get_expected_symbols.adb Listing Filename: sp_expected_symbols-get_expected_symbols.lsb Unit name: SP_Expected_Symbols.Get_Expected_Symbols Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 23 Expected flow error 1 Flow 23 Expected flow error 1 Flow 504 Expected flow error 1 Flow 602 end Expected flow error 1 Flow 602 end Expected flow error 1 Source Filename: dictionary.adb Listing Filename: dictionary.lsb Unit name: Dictionary Unit type: package body Unit has been analysed, any errors are listed below. 109 error(s) or warning(s) Line with Ada.Containers.Vectors; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. for Ref_Type'Size use ExaminerConstants.Address_Size; ^ --- Warning : 2: Representation clause - ignored by the Examiner. end Dynamic_Symbol_Table; --- Warning : 10: The private part of package Dynamic_Symbol_Table is hidden - hidden text is ignored by the Examiner. end RawDict; --- Warning : 10: The private part of package RawDict is hidden - hidden text is ignored by the Examiner. Result := Root_Type = Extended_Local; --- Warning :402: Default assertion planted to cut loop. while The_Dimension > 1 loop --- Warning :402: Default assertion planted to cut loop. while Current /= Get_Unknown_Type_Mark and then Is_Subtype (Type_Mark => Current) loop --- Warning :402: Default assertion planted to cut loop. Current_Package := Next_Package; --- Warning :402: Default assertion planted to cut loop. Current_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); --- Warning :402: Default assertion planted to cut loop. Is_Private := Is_Private or else RawDict.Get_Package_Is_Private (The_Package => Current_Package); --- Warning :402: Default assertion planted to cut loop. Next_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); --- Warning :402: Default assertion planted to cut loop. while RawDict.GetSymbolDiscriminant (Current) = Subcomponent_Symbol loop --- Warning :402: Default assertion planted to cut loop. if Tmp_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. if Tmp_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. if Tmp_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. if Tmp_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. exit when Get_Type (The_Symbol => CurrentSymbol (First_Record_Component (Type_Mark => Result))) = Stop_Sym; --- Warning :402: Default assertion planted to cut loop. Current_Region := GetRegion (Current_Scope); --- Warning :402: Default assertion planted to cut loop. Region := GetRegion (Current); --- Warning :402: Default assertion planted to cut loop. exit when RawDict.Get_Package_Parent (The_Package => Current_Package) /= RawDict.Null_Package_Info_Ref; --- Warning :402: Default assertion planted to cut loop. exit when RawDict.Get_Package_Is_Private (The_Package => Current_Package); --- Warning :402: Default assertion planted to cut loop. while The_Rule_Policy /= RawDict.Null_Rule_Policy_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. Rule_Policy := Get_Constant_Rule_Policy_For_Current_Scope (The_Constant => The_Constant, --- Warning :402: Default assertion planted to cut loop. exit when RawDict.Get_Package_Is_Private (The_Package => Current_Package); --- Warning :402: Default assertion planted to cut loop. exit when LexTokenManager.Lex_String_Case_Insensitive_Compare --- Warning :402: Default assertion planted to cut loop. while Tmp_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while Tmp_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (GlobalVariable) and then Count < Natural'Last loop --- Warning :402: Default assertion planted to cut loop. for Dim in Positive range 1 .. Dimension - 1 loop --- Warning :402: Default assertion planted to cut loop. Array_Index := NextSymbol (Array_Index); --- Warning :402: Default assertion planted to cut loop. while not IsCompilationUnit (Sym) loop --- Warning :402: Default assertion planted to cut loop. Region := GetRegion (Current); --- Warning :402: Default assertion planted to cut loop. while CurrentSymbol (Loops) /= TheLoop and then Number < Positive'Last loop --- Warning :402: Default assertion planted to cut loop. pragma Style_Checks (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. Region := GetRegion (Scope); --- Warning :402: Default assertion planted to cut loop. if IsNullIterator (Own_Variables) then --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Current); --- Warning :402: Default assertion planted to cut loop. for No in Positive range 1 .. Number - 1 loop --- Warning :402: Default assertion planted to cut loop. CurrentRegion := GetRegion (CurrentScope); --- Warning :402: Default assertion planted to cut loop. end TraceMsg; --- Warning : 10: The body of subprogram TraceMsg is hidden - hidden text is ignored by the Examiner. end Trace_Lex_Str; --- Warning : 10: The body of subprogram Trace_Lex_Str is hidden - hidden text is ignored by the Examiner. end Trace_Sym; --- Warning : 10: The body of subprogram Trace_Sym is hidden - hidden text is ignored by the Examiner. while The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Context_Clause /= RawDict.Null_Context_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Use_Type_Clause /= RawDict.Null_Use_Type_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. Next_Package := RawDict.Get_Package_Parent (The_Package => Current_Package); --- Warning :402: Default assertion planted to cut loop. while The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref --- Warning :402: Default assertion planted to cut loop. while not Stop loop --- Warning :402: Default assertion planted to cut loop. while OnEntryVariableToTry /= NullSymbol loop --- Warning :402: Default assertion planted to cut loop. while OnEntryVariableToTry /= NullSymbol loop --- Warning :402: Default assertion planted to cut loop. while Current_Generic_Association /= RawDict.Null_Generic_Association_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while Current_Generic_Association /= RawDict.Null_Generic_Association_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. pragma Unreferenced (The_Generic_Parameter); ^ --- Warning : 3: Pragma - ignored by the Examiner. exit when (RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = --- Warning :402: Default assertion planted to cut loop. exit when (RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = --- Warning :402: Default assertion planted to cut loop. exit when RawDict.Get_Kind_Of_Global_Variable (The_Global_Variable => The_Global_Variable) = --- Warning :402: Default assertion planted to cut loop. Current_Record := Get_Type (The_Symbol => CurrentSymbol (First_Record_Component (Type_Mark => Current_Record))); --- Warning :402: Default assertion planted to cut loop. if The_Record_Component /= RawDict.Null_Record_Component_Info_Ref then --- Warning :402: Default assertion planted to cut loop. Current_Record := CurrentSymbol (First_Record_Component (Type_Mark => Current_Type)); --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Component) and then Count < Natural'Last loop --- Warning :402: Default assertion planted to cut loop. Number_Of_Actual_Components := Get_Number_Of_Actual_Components (The_Record_Type => Current_Type); --- Warning :402: Default assertion planted to cut loop. exit when Current_Record = RawDict.Null_Type_Info_Ref; --- Warning :402: Default assertion planted to cut loop. for No in Positive range 1 .. Number - 1 loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Generic_It) and then not IsNullIterator (Actual_It) loop --- Warning :402: Default assertion planted to cut loop. while Result /= RawDict.Null_Parameter_Constraint_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while (RawDict.GetSymbolDiscriminant (CurrentSymbol (Current)) /= Subprogram_Parameter_Symbol --- Warning :402: Default assertion planted to cut loop. while The_Parameter_Constraint /= RawDict.Null_Parameter_Constraint_Info_Ref --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Parameter) and then Count < Natural'Last loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Parameter) and then Count < Natural'Last loop --- Warning :402: Default assertion planted to cut loop. for No in Positive range 1 .. Number - 1 loop --- Warning :402: Default assertion planted to cut loop. while The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Inherit_Clause /= RawDict.Null_Context_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. Descendent := RawDict.Get_Package_First_Public_Child (The_Package => Item); --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while The_Subprogram_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref --- Warning :402: Default assertion planted to cut loop. while not Stop loop --- Warning :402: Default assertion planted to cut loop. if IsNullIterator (Own_Variables) then --- Warning :402: Default assertion planted to cut loop. while Is_Constituent (The_Variable => The_Variable) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. The_First_Record_Component := RawDict.Get_Type_First_Record_Component (Type_Mark => Current_Record); --- Warning :402: Default assertion planted to cut loop. while The_Record_Component /= RawDict.Null_Record_Component_Info_Ref -- did not find --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 35 end Importation of the initial value 1 Flow 50 end Value is not derived from the im 1 Flow 35 end Importation of the initial value 1 Flow 50 end Value is not derived from the im 1 Flow 35 end Importation of the initial value 1 Flow 50 end Value is not derived from the im 1 Flow 35 end Importation of the initial value 1 Flow 50 end Value is not derived from the im 1 Flow 35 end Importation of the initial value 1 Flow 50 end Value is not derived from the im 1 Flow 35 end Importation of the initial value 1 Flow 50 end Value is not derived from the im 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 35 end Importation of the initial value 1 Flow 50 end Value is not derived from the im 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected The_Generic_Parameter t 1 Flow 601 end Spurious data coupling via Dicti 1 Flow 601 end Spurious data coupling via Dicti 1 Flow 601 end Spurious data coupling via Dicti 1 Flow 41 Stable expression expected here 1 Source Filename: dictionary-add_declaration.adb Listing Filename: dictionary-add_declaration.lsb Unit name: Dictionary.Add_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-add_generic_formal_parameter_local.adb Listing Filename: dictionary-add_generic_formal_parameter_local.lsb Unit name: Dictionary.Add_Generic_Formal_Parameter_Local Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-add_record_component.adb Listing Filename: dictionary-add_record_component.lsb Unit name: Dictionary.Add_Record_Component Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-add_record_subcomponent.adb Listing Filename: dictionary-add_record_subcomponent.lsb Unit name: Dictionary.Add_Record_Subcomponent Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-add_renaming_declaration.adb Listing Filename: dictionary-add_renaming_declaration.lsb Unit name: Dictionary.Add_Renaming_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-add_subprogram_parameter.adb Listing Filename: dictionary-add_subprogram_parameter.lsb Unit name: Dictionary.Add_Subprogram_Parameter Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line for I in reverse Positive range 1 .. Number_Of_Dimensions loop --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-add_use_type_reference.adb Listing Filename: dictionary-add_use_type_reference.lsb Unit name: Dictionary.Add_Use_Type_Reference Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-addinheritsreference.adb Listing Filename: dictionary-addinheritsreference.lsb Unit name: Dictionary.AddInheritsReference Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line exit when The_Context_Clause = RawDict.Null_Context_Clause_Info_Ref; --- Warning :402: Default assertion planted to cut loop. exit when The_Context_Clause = RawDict.Null_Context_Clause_Info_Ref; --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-addloop.adb Listing Filename: dictionary-addloop.lsb Unit name: Dictionary.AddLoop Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-addwithreference.adb Listing Filename: dictionary-addwithreference.lsb Unit name: Dictionary.AddWithReference Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line exit when The_Context_Clause = RawDict.Null_Context_Clause_Info_Ref; --- Warning :402: Default assertion planted to cut loop. exit when The_Context_Clause = RawDict.Null_Context_Clause_Info_Ref; --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-attribute_is_visible.adb Listing Filename: dictionary-attribute_is_visible.lsb Unit name: Dictionary.Attribute_Is_Visible Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-attribute_is_visible_but_obsolete_local.adb Listing Filename: dictionary-attribute_is_visible_but_obsolete_local.lsb Unit name: Dictionary.Attribute_Is_Visible_But_Obsolete_Local Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-dynamic_symbol_table.adb Listing Filename: dictionary-dynamic_symbol_table.lsb Unit name: Dictionary.Dynamic_Symbol_Table Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line end Initialize; --- Warning : 10: The body of subprogram Initialize is hidden - hidden text is ignored by the Examiner. end Get_Current_Usage; --- Warning : 10: The body of subprogram Get_Current_Usage is hidden - hidden text is ignored by the Examiner. end Get_Info; --- Warning : 10: The body of subprogram Get_Info is hidden - hidden text is ignored by the Examiner. end Set_Info; --- Warning : 10: The body of subprogram Set_Info is hidden - hidden text is ignored by the Examiner. end Add_Symbol; --- Warning : 10: The body of subprogram Add_Symbol is hidden - hidden text is ignored by the Examiner. Source Filename: dictionary-generatesimplename.adb Listing Filename: dictionary-generatesimplename.lsb Unit name: Dictionary.GenerateSimpleName Unit type: subunit Unit has been analysed, any errors are listed below. 6 error(s) or warning(s) Line while CurrentSymbol (Loops) /= The_Loop and then Number < Positive'Last loop --- Warning :402: Default assertion planted to cut loop. end Image; --- Warning : 10: The body of subprogram Image is hidden - hidden text is ignored by the Examiner. while Package_Local /= RawDict.Null_Package_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. LexTokenStacks.Pop (Stack, Current_Token); --- Warning :402: Default assertion planted to cut loop. if RawDict.GetSymbolDiscriminant (Record_Local) /= Subcomponent_Symbol --- Warning :402: Default assertion planted to cut loop. LexTokenStacks.Pop (Stack, Current_Token); --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-get_binary_operator_type_local.adb Listing Filename: dictionary-get_binary_operator_type_local.lsb Unit name: Dictionary.Get_Binary_Operator_Type_Local Unit type: subunit Unit has been analysed, any errors are listed below. 14 error(s) or warning(s) Line pragma Style_Checks (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Style_Checks (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: dictionary-get_record_component.adb Listing Filename: dictionary-get_record_component.lsb Unit name: Dictionary.Get_Record_Component Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-get_scalar_attribute_type.adb Listing Filename: dictionary-get_scalar_attribute_type.lsb Unit name: Dictionary.Get_Scalar_Attribute_Type Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-get_scalar_attribute_value.adb Listing Filename: dictionary-get_scalar_attribute_value.lsb Unit name: Dictionary.Get_Scalar_Attribute_Value Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-getanyprefixneeded.adb Listing Filename: dictionary-getanyprefixneeded.lsb Unit name: Dictionary.GetAnyPrefixNeeded Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line exit when Current_Scope = Declared_Scope; --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-getscope.adb Listing Filename: dictionary-getscope.lsb Unit name: Dictionary.GetScope Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dictionary-initialize.adb Listing Filename: dictionary-initialize.lsb Unit name: Dictionary.Initialize Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line pragma Unreferenced (The_Enumeration_Literal); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Unreferenced (The_Array_Index); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Unreferenced (Type_Mark); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Flow errors due to undefined com 1 Flow 31 end Flow errors due to undefined com 1 Flow 602 end Flow errors due to undefined com 1 Flow 32 end Flow errors due to undefined com 1 Flow 31 end Flow errors due to undefined com 1 Flow 602 end Flow errors due to undefined com 1 Flow 32 end Flow errors due to undefined com 1 Flow 31 end Flow errors due to undefined com 1 Flow 602 end Flow errors due to undefined com 1 Flow 32 end Flow errors due to undefined com 1 Flow 31 end Flow errors due to undefined com 1 Flow 602 end Flow errors due to undefined com 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: dictionary-instantiate_subprogram_parameters.adb Listing Filename: dictionary-instantiate_subprogram_parameters.lsb Unit name: Dictionary.Instantiate_Subprogram_Parameters Unit type: subunit Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line while Generic_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while Generic_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Export_It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Dependency_It) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: dictionary-is_callable.adb Listing Filename: dictionary-is_callable.lsb Unit name: Dictionary.Is_Callable Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line while The_Declaration /= RawDict.Null_Declaration_Info_Ref --- Warning :402: Default assertion planted to cut loop. Region := GetRegion (Current_Scope); --- Warning :402: Default assertion planted to cut loop. Current_Region := GetRegion (Current_Scope); --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-is_renamed_local.adb Listing Filename: dictionary-is_renamed_local.lsb Unit name: Dictionary.Is_Renamed_Local Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while Current_Declaration /= RawDict.Null_Declaration_Info_Ref --- Warning :402: Default assertion planted to cut loop. Found := Is_Renamed_In_This_Scope (The_Subprogram => The_Subprogram, --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-lookupitem.adb Listing Filename: dictionary-lookupitem.lsb Unit name: Dictionary.LookupItem Unit type: subunit Unit has been analysed, any errors are listed below. 7 error(s) or warning(s) Line exit when (RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) = Package_Symbol --- Warning :402: Default assertion planted to cut loop. exit when Ancestor = RawDict.Null_Package_Info_Ref --- Warning :402: Default assertion planted to cut loop. while Continue loop --- Warning :402: Default assertion planted to cut loop. exit when (RawDict.GetSymbolDiscriminant (GetRegion (Current_Scope)) = Package_Symbol --- Warning :402: Default assertion planted to cut loop. exit when Current_Package = RawDict.Null_Package_Info_Ref; --- Warning :402: Default assertion planted to cut loop. LookupScope --- Warning :402: Default assertion planted to cut loop. if RawDict.GetSymbolDiscriminant (Current_Region) = Package_Symbol then --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Structurally this is the preferr 1 Source Filename: dictionary-lookupscope.adb Listing Filename: dictionary-lookupscope.lsb Unit name: Dictionary.LookupScope Unit type: subunit Unit has been analysed, any errors are listed below. 16 error(s) or warning(s) Line while The_Enumeration_Literal /= RawDict.Null_Enumeration_Literal_Info_Ref --- Warning :402: Default assertion planted to cut loop. exit when The_Declaration = RawDict.Null_Declaration_Info_Ref; --- Warning :402: Default assertion planted to cut loop. while (The_Discriminant /= NullSymbol --- Warning :402: Default assertion planted to cut loop. if The_Own_Variable = RawDict.Null_Own_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. if The_Task_Type = NullSymbol then --- Warning :402: Default assertion planted to cut loop. if IsNullIterator (Constituents) then --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (AbstractOwnVariables); --- Warning :402: Default assertion planted to cut loop. while not Stop loop --- Warning :402: Default assertion planted to cut loop. while The_Subprogram_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref --- Warning :402: Default assertion planted to cut loop. The_First_Record_Component := RawDict.Get_Type_First_Record_Component (Type_Mark => Current_Record); --- Warning :402: Default assertion planted to cut loop. while (The_Record_Component /= RawDict.Null_Record_Component_Info_Ref -- did not find --- Warning :402: Default assertion planted to cut loop. if IsNullIterator (Constituents) then --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. First_Subcomponent := Current_Record; --- Warning :402: Default assertion planted to cut loop. exit when Current = RawDict.Null_Subcomponent_Info_Ref; --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-lookupselecteditem.adb Listing Filename: dictionary-lookupselecteditem.lsb Unit name: Dictionary.LookupSelectedItem Unit type: subunit Unit has been analysed, any errors are listed below. 7 error(s) or warning(s) Line if The_Global_Variable = RawDict.Null_Global_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. Region := GetRegion (Current_Scope); --- Warning :402: Default assertion planted to cut loop. exit when (RawDict.GetSymbolDiscriminant (GetRegion (Current)) = Package_Symbol --- Warning :402: Default assertion planted to cut loop. exit when Ancestor = RawDict.Null_Package_Info_Ref --- Warning :402: Default assertion planted to cut loop. exit when Current_Package = RawDict.Null_Package_Info_Ref; --- Warning :402: Default assertion planted to cut loop. Region := GetRegion (Current_Scope); --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-nextsymbol.adb Listing Filename: dictionary-nextsymbol.lsb Unit name: Dictionary.NextSymbol Unit type: subunit Unit has been analysed, any errors are listed below. 27 error(s) or warning(s) Line while The_Declaration /= RawDict.Null_Declaration_Info_Ref and then not Found loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. if Current_Record = RawDict.Null_Type_Info_Ref then --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref and then not Found loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Subprogram_Parameter /= RawDict.Null_Subprogram_Parameter_Info_Ref --- Warning :402: Default assertion planted to cut loop. while not Stop loop --- Warning :402: Default assertion planted to cut loop. while not Stop loop --- Warning :402: Default assertion planted to cut loop. The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); --- Warning :402: Default assertion planted to cut loop. The_Subprogram_Parameter := --- Warning :402: Default assertion planted to cut loop. while not Stop and then The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while not Stop loop --- Warning :402: Default assertion planted to cut loop. The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); --- Warning :402: Default assertion planted to cut loop. The_Subprogram_Parameter := --- Warning :402: Default assertion planted to cut loop. while not Stop and then The_Global_Variable /= RawDict.Null_Global_Variable_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while not Stop loop --- Warning :402: Default assertion planted to cut loop. The_Global_Variable := RawDict.Get_Next_Global_Variable (The_Global_Variable => The_Global_Variable); --- Warning :402: Default assertion planted to cut loop. Descendent := RawDict.Get_Package_First_Public_Child (The_Package => Current_Package); --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. if Own_Variable = RawDict.Null_Own_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. if Own_Variable = RawDict.Null_Own_Variable_Info_Ref then --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-operator_is_visible.adb Listing Filename: dictionary-operator_is_visible.lsb Unit name: Dictionary.Operator_Is_Visible Unit type: subunit Unit has been analysed, any errors are listed below. 9 error(s) or warning(s) Line Current_Region := Dictionary.GetRegion (Current_Scope); --- Warning :402: Default assertion planted to cut loop. Is_Visible := --- Warning :402: Default assertion planted to cut loop. The_Package := RawDict.Get_Package_Parent (The_Package => The_Package); --- Warning :402: Default assertion planted to cut loop. while The_Use_Type_Clause /= RawDict.Null_Use_Type_Clause_Info_Ref --- Warning :402: Default assertion planted to cut loop. Found := Is_Used_In_This_Scope (Type_Mark => Type_Mark, --- Warning :402: Default assertion planted to cut loop. The_Package := RawDict.Get_Package_Parent (The_Package => The_Package); --- Warning :402: Default assertion planted to cut loop. exit when Current = RawDict.Null_Declaration_Info_Ref; --- Warning :402: Default assertion planted to cut loop. Found := --- Warning :402: Default assertion planted to cut loop. The_Package := RawDict.Get_Package_Parent (The_Package => The_Package); --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-rawdict.adb Listing Filename: dictionary-rawdict.lsb Unit name: Dictionary.RawDict Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Unchecked_Conversion; ^ --- Warning : 1: The identifier Unchecked_Conversion is either undeclared or not visible at this point. end RawDict; -- hidden because of non-SPARK code --- Warning : 10: The body of package RawDict is hidden - hidden text is ignored by the Examiner. Source Filename: dictionary-search_for_inherited_operations.adb Listing Filename: dictionary-search_for_inherited_operations.lsb Unit name: Dictionary.Search_For_Inherited_Operations Unit type: subunit Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while The_Declaration /= RawDict.Null_Declaration_Info_Ref loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Current_Package := RawDict.Get_Package_Extends (The_Package => Current_Package); --- Warning :402: Default assertion planted to cut loop. Source Filename: dictionary-targetdata.adb Listing Filename: dictionary-targetdata.lsb Unit name: Dictionary.TargetData Unit type: subunit Unit has been analysed, any errors are listed below. 6 error(s) or warning(s) Line Get_Char (File => File, --- Warning :402: Default assertion planted to cut loop. E_Strings.Append_Char (E_Str => Str, --- Warning :402: Default assertion planted to cut loop. pragma Unreferenced (Unused_String); ^ --- Warning : 3: Pragma - ignored by the Examiner. Get_String (File => Data_File, --- Warning :402: Default assertion planted to cut loop. Get_String (File => Data_File, --- Warning :402: Default assertion planted to cut loop. Get_String (File => Data_File, --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 end Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 end Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Dummy_Char not referenced here 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: dictionary-write.adb Listing Filename: dictionary-write.lsb Unit name: Dictionary.Write Unit type: subunit Unit has been analysed, any errors are listed below. 28 error(s) or warning(s) Line while not IsNullIterator (Parameter_It) and then Number < Positive'Last loop --- Warning :402: Default assertion planted to cut loop. while (The_Declaration /= RawDict.Null_Declaration_Info_Ref and then not Found) loop --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (With_Reference); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Inherits_Reference); --- Warning :402: Default assertion planted to cut loop. while (The_Declaration /= RawDict.Null_Declaration_Info_Ref and then not Found) loop --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Global_Variable); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Global_Variable); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Dependency); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Export); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Dependency); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Export); --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Constraint_It) and then not IsNullIterator (Known_It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Literal) and then Value < Natural'Last loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Component) and then Number < Positive'Last loop --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Array_Index) and then Dimension < Positive'Last loop --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (The_Loop); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Own_Variables); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Packages); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Constituents); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Abstract_Own_Variables); --- Warning :402: Default assertion planted to cut loop. exit when IsNullIterator (Declarative_Items); --- Warning :402: Default assertion planted to cut loop. while not IsNullIterator (Parameter) and then Number < Positive'Last loop --- Warning :402: Default assertion planted to cut loop. end Write_Library_Unit; --- Warning : 10: The body of subprogram Write_Library_Unit is hidden - hidden text is ignored by the Examiner. exit when IsNullIterator (Library_Units); --- Warning :402: Default assertion planted to cut loop. exit when SPARK_IO.End_Of_File (TemporaryFile); --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Source Filename: dictionary-writeoperatorrenamingdeclaration.adb Listing Filename: dictionary-writeoperatorrenamingdeclaration.lsb Unit name: Dictionary.WriteOperatorRenamingDeclaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler.adb Listing Filename: errorhandler.lsb Unit name: ErrorHandler Unit type: package body Unit has been analysed, any errors are listed below. 13 error(s) or warning(s) Line with Debug; ^ --- Warning : 1: The identifier Debug is either undeclared or not visible at this point. pragma Inline (Append_String); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Append_Lex_String); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Unreferenced (Append_Lex_String); -- unused at present ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Set_Col); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Put_Char); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Put_Integer); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (New_Line); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Put_Line); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Put_E_String); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Unreferenced (Put_E_String); -- unused at present ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Inline (Put_E_Line); ^ --- Warning : 3: Pragma - ignored by the Examiner. end Dump_State; --- Warning : 10: The body of subprogram Dump_State is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Flush changes buffer but we need 1 Flow 10 Design decision not to check fil 1 Flow 10 Flush changes buffer but we need 1 Flow 33 end Consequence of earlier deliberat 1 Flow 10 Intentional non use of file retu 1 Flow 33 end Consequence of earlier non-use 1 Flow 10 Flush changes buffer but we need 1 Flow 10 Intentional non use of file retu 1 Flow 33 end Consequence of earlier non-use 1 Flow 10 Only used where we want to echo 1 Flow 33 end Consequence of earlier deliberat 1 Flow 41 Mode-specific code 1 Flow 10 Returned parameter not needed in 1 Flow 10 Returned parameter not needed in 1 Source Filename: errorhandler-getfileline.adb Listing Filename: errorhandler-getfileline.lsb Unit name: ErrorHandler.GetFileLine Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-printerrors.adb Listing Filename: errorhandler-printerrors.lsb Unit name: ErrorHandler.PrintErrors Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Source Filename: errorhandler-printline.adb Listing Filename: errorhandler-printline.lsb Unit name: ErrorHandler.PrintLine Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-appenderrors.adb Listing Filename: errorhandler-appenderrors.lsb Unit name: ErrorHandler.AppendErrors Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Success to be neither r 1 Source Filename: errorhandler-appendsym.adb Listing Filename: errorhandler-appendsym.lsb Unit name: ErrorHandler.AppendSym Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end AppendSym; --- Warning : 10: The body of subprogram AppendSym is hidden - hidden text is ignored by the Examiner. Source Filename: errorhandler-echoerrorentry.adb Listing Filename: errorhandler-echoerrorentry.lsb Unit name: ErrorHandler.EchoErrorEntry Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Success to be neither r 1 Flow 10 Expected ineffective assignment 1 Source Filename: errorhandler-erroraccumulator.adb Listing Filename: errorhandler-erroraccumulator.lsb Unit name: ErrorHandler.ErrorAccumulator Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: errorhandler-warningstatus.adb Listing Filename: errorhandler-warningstatus.lsb Unit name: ErrorHandler.WarningStatus Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Init. is partial but effective. 1 Flow 31 end Init. is partial but effective. 1 Flow 602 end Init. is partial but effective. 1 Source Filename: errorhandler-warningstatus-readwarningfile.adb Listing Filename: errorhandler-warningstatus-readwarningfile.lsb Unit name: ErrorHandler.WarningStatus.ReadWarningFile Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected File_Spec_Status to be 1 Flow 10 Expected ineffective assignment 1 Flow 10 Not assigned to. Due to Text_IO 1 Flow 33 end Expected File_Status to be neith 1 Flow 34 end Not assigned to. Due to Text_IO 1 Source Filename: errorhandler-errorbuffer.adb Listing Filename: errorhandler-errorbuffer.lsb Unit name: ErrorHandler.ErrorBuffer Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 31 end Intentional incomplete initializ 1 Flow 32 end Intentional incomplete initializ 1 Flow 602 end Intentional incomplete initializ 1 Flow 31 end Intentional incomplete initializ 1 Flow 32 end Intentional incomplete initializ 1 Flow 602 end Intentional incomplete initializ 1 Flow 31 end Intentional incomplete initializ 1 Flow 32 end Intentional incomplete initializ 1 Flow 602 end Intentional incomplete initializ 1 Source Filename: errorhandler-conversions.adb Listing Filename: errorhandler-conversions.lsb Unit name: ErrorHandler.Conversions Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line for I in Sources range LRM .. Sources'Last loop --- Warning :402: Default assertion planted to cut loop. for J in Sources range LRM .. Sources'Last loop --- Warning :402: Default assertion planted to cut loop. Source Filename: errorhandler-justifications.adb Listing Filename: errorhandler-justifications.lsb Unit name: ErrorHandler.Justifications Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring.adb Listing Filename: errorhandler-conversions-tostring.lsb Unit name: ErrorHandler.Conversions.ToString Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Inline (Append_Lex_String); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: errorhandler-conversions-tostring-appendreference.adb Listing Filename: errorhandler-conversions-tostring-appendreference.lsb Unit name: ErrorHandler.Conversions.ToString.AppendReference Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line for I in Index12 loop --- Warning :402: Default assertion planted to cut loop. Source Filename: errorhandler-conversions-tostring-condldependency.adb Listing Filename: errorhandler-conversions-tostring-condldependency.lsb Unit name: ErrorHandler.Conversions.ToString.CondlDependency Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-condldependency-condldependencyexpl.adb Listing Filename: errorhandler-conversions-tostring-condldependency-condldependencyexpl.lsb Unit name: ErrorHandler.Conversions.ToString.CondlDependency.CondlDependencyExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-condlflowerr.adb Listing Filename: errorhandler-conversions-tostring-condlflowerr.lsb Unit name: ErrorHandler.Conversions.ToString.CondlFlowErr Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-condlflowerr-condlflowerrexpl.adb Listing Filename: errorhandler-conversions-tostring-condlflowerr-condlflowerrexpl.lsb Unit name: ErrorHandler.Conversions.ToString.CondlFlowErr.CondlFlowErrExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-controlflowerror.adb Listing Filename: errorhandler-conversions-tostring-controlflowerror.lsb Unit name: ErrorHandler.Conversions.ToString.ControlFlowError Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-controlflowerror-controlflowerrorexpl.adb Listing Filename: errorhandler-conversions-tostring-controlflowerror-controlflowerrorexpl.lsb Unit name: ErrorHandler.Conversions.ToString.ControlFlowError.ControlFlowErrorExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-depsemanticerr.adb Listing Filename: errorhandler-conversions-tostring-depsemanticerr.lsb Unit name: ErrorHandler.Conversions.ToString.DepSemanticErr Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-depsemanticerr-depsemanticerrexpl.adb Listing Filename: errorhandler-conversions-tostring-depsemanticerr-depsemanticerrexpl.lsb Unit name: ErrorHandler.Conversions.ToString.DepSemanticErr.DepSemanticErrExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-ineffectivestatement.adb Listing Filename: errorhandler-conversions-tostring-ineffectivestatement.lsb Unit name: ErrorHandler.Conversions.ToString.IneffectiveStatement Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-ineffectivestatement-ineffectivestatementexpl.adb Listing Filename: errorhandler-conversions-tostring-ineffectivestatement-ineffectivestatementexpl.lsb Unit name: ErrorHandler.Conversions.ToString.IneffectiveStatement.IneffectiveStatementExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-noerr.adb Listing Filename: errorhandler-conversions-tostring-noerr.lsb Unit name: ErrorHandler.Conversions.ToString.NoErr Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-semanticerr.adb Listing Filename: errorhandler-conversions-tostring-semanticerr.lsb Unit name: ErrorHandler.Conversions.ToString.SemanticErr Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-semanticerr-semanticerrexpl.adb Listing Filename: errorhandler-conversions-tostring-semanticerr-semanticerrexpl.lsb Unit name: ErrorHandler.Conversions.ToString.SemanticErr.SemanticErrExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-stabilityerror.adb Listing Filename: errorhandler-conversions-tostring-stabilityerror.lsb Unit name: ErrorHandler.Conversions.ToString.StabilityError Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-stabilityerror-stabilityerrorexpl.adb Listing Filename: errorhandler-conversions-tostring-stabilityerror-stabilityerrorexpl.lsb Unit name: ErrorHandler.Conversions.ToString.StabilityError.StabilityErrorExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-unconddependency.adb Listing Filename: errorhandler-conversions-tostring-unconddependency.lsb Unit name: ErrorHandler.Conversions.ToString.UncondDependency Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-unconddependency-unconddependencyexpl.adb Listing Filename: errorhandler-conversions-tostring-unconddependency-unconddependencyexpl.lsb Unit name: ErrorHandler.Conversions.ToString.UncondDependency.UncondDependencyExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-uncondflowerr.adb Listing Filename: errorhandler-conversions-tostring-uncondflowerr.lsb Unit name: ErrorHandler.Conversions.ToString.UncondFlowErr Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-uncondflowerr-uncondflowerrexpl.adb Listing Filename: errorhandler-conversions-tostring-uncondflowerr-uncondflowerrexpl.lsb Unit name: ErrorHandler.Conversions.ToString.UncondFlowErr.UncondFlowErrExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-usageerror.adb Listing Filename: errorhandler-conversions-tostring-usageerror.lsb Unit name: ErrorHandler.Conversions.ToString.UsageError Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-usageerror-usageerrorexpl.adb Listing Filename: errorhandler-conversions-tostring-usageerror-usageerrorexpl.lsb Unit name: ErrorHandler.Conversions.ToString.UsageError.UsageErrorExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-warningwithoutposition.adb Listing Filename: errorhandler-conversions-tostring-warningwithoutposition.lsb Unit name: ErrorHandler.Conversions.ToString.WarningWithoutPosition Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-warningwithoutposition-warningwithoutpositionexpl.adb Listing Filename: errorhandler-conversions-tostring-warningwithoutposition-warningwithoutpositionexpl.lsb Unit name: ErrorHandler.Conversions.ToString.WarningWithoutPosition.WarningWithoutPositionExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-warningwithposition.adb Listing Filename: errorhandler-conversions-tostring-warningwithposition.lsb Unit name: ErrorHandler.Conversions.ToString.WarningWithPosition Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-warningwithposition-warningwithpositionexpl.adb Listing Filename: errorhandler-conversions-tostring-warningwithposition-warningwithpositionexpl.lsb Unit name: ErrorHandler.Conversions.ToString.WarningWithPosition.WarningWithPositionExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-note.adb Listing Filename: errorhandler-conversions-tostring-note.lsb Unit name: ErrorHandler.Conversions.ToString.Note Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler-conversions-tostring-note-noteexpl.adb Listing Filename: errorhandler-conversions-tostring-note-noteexpl.lsb Unit name: ErrorHandler.Conversions.ToString.Note.NoteExpl Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: flowanalyser.adb Listing Filename: flowanalyser.lsb Unit name: FlowAnalyser Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: flowanalyser-ifa_stack.adb Listing Filename: flowanalyser-ifa_stack.lsb Unit name: FlowAnalyser.IFA_Stack Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Init. is partial but effective 1 Flow 31 end Init. is partial but effective 1 Flow 602 end Init. is partial but effective 1 Source Filename: flowanalyser-flowanalyse.adb Listing Filename: flowanalyser-flowanalyse.lsb Unit name: FlowAnalyser.FlowAnalyse Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end PrintStackTop; --- Warning : 10: The body of subprogram PrintStackTop is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 601 end Spurious coupling expected 1 Flow 601 end Spurious coupling expected 1 Flow 601 end Spurious coupling expected 1 Flow 601 end Spurious coupling expected 1 Flow 23 Expect flow error on 1st write t 1 Flow 23 Expect flow error on 1st write t 1 Flow 504 Expect flow error 1 Flow 504 Expect flow error 1 Flow 501 Expect flow error 1 Flow 602 end expect 4 warnings from array DF 1 Flow 602 end expect 4 warnings from array DF 1 Flow 602 end expect 4 warnings from array DF 1 Flow 602 end expect 4 warnings from array DF 1 Flow 23 Partial but effective array init 1 Flow 23 Partial but effective array init 1 Flow 23 Partial but effective array init 1 Flow 23 Expect err owing to partial but 8 Flow 10 Final assignment to ExpnNumber n 1 Flow 504 Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Flow 602 end Partial but effective array init 1 Source Filename: flowanalyser-flowanalyse-analyserelations.adb Listing Filename: flowanalyser-flowanalyse-analyserelations.lsb Unit name: FlowAnalyser.FlowAnalyse.AnalyseRelations Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: flowanalyser-flowanalyse-analyserelations-checkexpressions.adb Listing Filename: flowanalyser-flowanalyse-analyserelations-checkexpressions.lsb Unit name: FlowAnalyser.FlowAnalyse.AnalyseRelations.CheckExpressions Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Source Filename: flowanalyser-flowanalyse-analyserelations-checkusages.adb Listing Filename: flowanalyser-flowanalyse-analyserelations-checkusages.lsb Unit name: FlowAnalyser.FlowAnalyse.AnalyseRelations.CheckUsages Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not SeqAlgebra.IsNullMember (TempMem) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (M) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: flowanalyser-flowanalyse-analyserelations-checkdependencies.adb Listing Filename: flowanalyser-flowanalyse-analyserelations-checkdependencies.lsb Unit name: FlowAnalyser.FlowAnalyse.AnalyseRelations.CheckDependencies Unit type: subunit Unit has been analysed, any errors are listed below. 6 error(s) or warning(s) Line exit when SeqAlgebra.IsNullMember (M) or SeqAlgebra.IsNullMember (N); --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (MemberOfInitVars) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (MemberOfExports) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (MemberOfExportLeaves) loop --- Warning :402: Default assertion planted to cut loop. while ((Dictionary.IsFunction (SubprogSym) --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (MemberOfRhoCol) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: flowanalyser-flowanalyse-analyserelations-checkunused.adb Listing Filename: flowanalyser-flowanalyse-analyserelations-checkunused.lsb Unit name: FlowAnalyser.FlowAnalyse.AnalyseRelations.CheckUnused Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not SeqAlgebra.IsNullMember (CurrentMember) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: flowanalyser-flowanalyse-analyserelations-mergeandhandleerrors.adb Listing Filename: flowanalyser-flowanalyse-analyserelations-mergeandhandleerrors.lsb Unit name: FlowAnalyser.FlowAnalyse.AnalyseRelations.MergeAndHandleErrors Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Source Filename: flowanalyser-flowanalysepartition.adb Listing Filename: flowanalyser-flowanalysepartition.lsb Unit name: FlowAnalyser.FlowAnalysePartition Unit type: subunit Unit has been analysed, any errors are listed below. 19 error(s) or warning(s) Line while not Dictionary.IsNullIterator (ExportIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ImportIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ImportIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ExportIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (SuspensionIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ImportIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ExportIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ImportIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (SubprogramIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (WithedPackages) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (MemberOfExports) loop --- Warning :402: Default assertion planted to cut loop. while CommandLineData.Content.Flow_Option /= CommandLineData.Data_Flow --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (MemberOfRhoCol) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Mem) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Mem) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Mem) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Mem) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: declarations.adb Listing Filename: declarations.lsb Unit name: Declarations Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: declarations-outputdeclarations.adb Listing Filename: declarations-outputdeclarations.lsb Unit name: Declarations.OutputDeclarations Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: declarations-outputdeclarations-generatedeclarations.adb Listing Filename: declarations-outputdeclarations-generatedeclarations.lsb Unit name: Declarations.OutputDeclarations.GenerateDeclarations Unit type: subunit Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line if Pile.IsNull (ThisNode) then --- Warning :402: Default assertion planted to cut loop. exit when Pile.IsNull (FromList); --- Warning :402: Default assertion planted to cut loop. exit when Pile.IsNull (TheCurrentNode); --- Warning :402: Default assertion planted to cut loop. PrevNode := TheCurrentNode; --- Warning :402: Default assertion planted to cut loop. Source Filename: declarations-outputdeclarations-generatedeclarations-generatesuccessors.adb Listing Filename: declarations-outputdeclarations-generatedeclarations-generatesuccessors.lsb Unit name: Declarations.OutputDeclarations.GenerateDeclarations.GenerateSuccessors Unit type: subunit Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ComponentIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (IndexIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ParameterIt) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: declarations-outputdeclarations-printdeclarations.adb Listing Filename: declarations-outputdeclarations-printdeclarations.lsb Unit name: Declarations.OutputDeclarations.PrintDeclarations Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line end Debug_Rank_Sym; --- Warning : 10: The body of subprogram Debug_Rank_Sym is hidden - hidden text is ignored by the Examiner. end Debug_Rank_Int; --- Warning : 10: The body of subprogram Debug_Rank_Int is hidden - hidden text is ignored by the Examiner. end PrintStandardRules; --- Warning : 10: The body of subprogram PrintStandardRules is hidden - hidden text is ignored by the Examiner. end PrintConstantsInReverseOrder; --- Warning : 10: The body of subprogram PrintConstantsInReverseOrder is hidden - hidden text is ignored by the Examiner. pragma Unreferenced (PrintConstantsInReverseOrder); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 We are not actually assigning he 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Ok to be neither refere 1 Flow 33 end Expected Already_Present to be n 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected ineffective assignment 1 Flow 33 end Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: declarations-outputdeclarations-printdeclarations-printconstantrules.adb Listing Filename: declarations-outputdeclarations-printdeclarations-printconstantrules.lsb Unit name: Declarations.OutputDeclarations.PrintDeclarations.PrintConstantRules Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Init is partial but effective. 1 Flow 31 end Init is partial but effective. 1 Flow 602 end Init is partial but effective. 1 Flow 10 Expected ineffective assignment 1 Source Filename: declarations-outputdeclarations-printdeclarations-printruleheader.adb Listing Filename: declarations-outputdeclarations-printdeclarations-printruleheader.lsb Unit name: Declarations.OutputDeclarations.PrintDeclarations.PrintRuleHeader Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected OK to be neither refere 1 Source Filename: declarations-outputdeclarations-printdeclarations-printtyperules.adb Listing Filename: declarations-outputdeclarations-printdeclarations-printtyperules.lsb Unit name: Declarations.OutputDeclarations.PrintDeclarations.PrintTypeRules Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Ok to be neither refere 1 Source Filename: cells.adb Listing Filename: cells.lsb Unit name: Cells Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Exceptions; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end Create_Cell; --- Warning : 9: The body of subprogram Create_Cell has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. Source Filename: cells-utility.adb Listing Filename: cells-utility.lsb Unit name: Cells.Utility Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Warnings (Off); ^ --- Warning : 3: Pragma - ignored by the Examiner. pragma Warnings (On); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: cells-utility-list.adb Listing Filename: cells-utility-list.lsb Unit name: Cells.Utility.List Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line while not Cells.Is_Null_Cell (N) loop --- Warning :402: Default assertion planted to cut loop. while not Is_Null_Iterator (Iter) loop --- Warning :402: Default assertion planted to cut loop. while not Cells.Is_Null_Cell (P) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: cell_storage.adb Listing Filename: cell_storage.lsb Unit name: Cell_Storage Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Cell_Storage; --- Warning : 10: The body of package Cell_Storage is hidden - hidden text is ignored by the Examiner. Source Filename: dag.adb Listing Filename: dag.lsb Unit name: DAG Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with ContextManager.Ops; ^ --- Warning : 1: The identifier Ops is either undeclared or not visible at this point. end Debug_Print_DAG; --- Warning : 10: The body of subprogram Debug_Print_DAG is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 394 end It is intentional that the outsi 1 Flow 10 Unused here OK 1 Flow 33 end Unused here OK 1 Flow 10 ContainsReals, VCGFailure not us 2 Flow 10 KindOfStackedCheck not used here 1 Flow 10 CheckStack not used here 1 Flow 10 ShortCircuitStack not used here 1 Flow 10 VCGFailure not used here 1 Flow 10 ContainsReals not used here 1 Source Filename: dag-build_annotation_expression.adb Listing Filename: dag-build_annotation_expression.lsb Unit name: DAG.Build_Annotation_Expression Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 The stack will be empty and no l 1 Flow 41 Stable expression here OK 1 Flow 10 Ineffective assignment OK - Proc 1 Source Filename: dag-buildexpndag.adb Listing Filename: dag-buildexpndag.lsb Unit name: DAG.BuildExpnDAG Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 The stack has been emptied 1 Flow 10 The stack has been emptied 1 Flow 10 Unused here OK 1 Flow 33 end Unused here OK 1 Flow 41 Stable expression expected here 1 Flow 10 Ineffective assignment here OK 1 Source Filename: dag-buildexpndag-upattributedesignator.adb Listing Filename: dag-buildexpndag-upattributedesignator.lsb Unit name: DAG.BuildExpnDAG.UpAttributeDesignator Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: dag-buildgraph.adb Listing Filename: dag-buildgraph.lsb Unit name: DAG.BuildGraph Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end DebugPrintInvariant; --- Warning : 10: The body of subprogram DebugPrintInvariant is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 The stack has been emptied 1 Flow 10 The stack has been emptied 1 Flow 10 The stack has been emptied 1 Flow 10 end CaseExpn unused here 1 Flow 33 end CaseExpn unused here 1 Flow 10 end DAGRoot not used here 1 Flow 33 end DAGRoot not used here 1 Flow 10 The function defs are needed her 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Source Filename: dag-buildgraph-modelassignmentstmt.adb Listing Filename: dag-buildgraph-modelassignmentstmt.lsb Unit name: DAG.BuildGraph.ModelAssignmentStmt Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line exit when (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Op) and --- Warning :402: Default assertion planted to cut loop. if STree.Syntax_Node_Type (Node => LocalNode) = SP_Symbols.primary then --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (ImportIt) loop --- Warning :402: Default assertion planted to cut loop. exit when (Cells.Get_Kind (VCGHeap, LocalCell) /= Cell_Storage.Op) and --- Warning :402: Default assertion planted to cut loop. FormNewRHS (PrefixRoot, RHSRoot, NewRHSRoot, Kind); --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 601 end False coupling here OK 1 Flow 10 Ineffective assignment here OK 1 Source Filename: dag-buildgraph-modelprocedurecall.adb Listing Filename: dag-buildgraph-modelprocedurecall.lsb Unit name: DAG.BuildGraph.ModelProcedureCall Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end SEPR2124Warning; --- Warning : 10: The body of subprogram SEPR2124Warning is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 The stack has been emptied 1 Flow 10 UnusedSym unused here 1 Flow 35 end Coupled to UnusedSym only 1 Flow 33 end UnusedSym unused here 1 Flow 10 HypStack unused here 1 Flow 10 The stack has been emptied 1 Flow 10 Unused_Export_DAG unused here 1 Flow 33 end Unused_Export_DAG unused here 1 Source Filename: dag-buildgraph-incorporateconstraints.adb Listing Filename: dag-buildgraph-incorporateconstraints.lsb Unit name: DAG.BuildGraph.IncorporateConstraints Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Unreferenced (ProcessParameters); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 The stack has been emptied 1 Flow 31 end Return_Anno_DAG is indirectly up 1 Flow 50 end Return_Anno_DAG indirectly refer 1 Flow 50 end Return_Anno_DAG indirectly refer 1 Flow 50 end Return_Anno_DAG indirectly refer 1 Flow 50 end Return_Anno_DAG indirectly refer 1 Warn 3 end Suppress warnings on ReturnAnnoD 1 Flow 10 The stack has been emptied 1 Flow 10 The stack has been emptied 1 Flow 10 The stack has been emptied 1 Source Filename: dag-loopcontext.adb Listing Filename: dag-loopcontext.lsb Unit name: DAG.LoopContext Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 601 end False coupling expected 1 Flow 601 end False coupling expected 1 Flow 601 end False coupling expected 1 Source Filename: dag-substitutions.adb Listing Filename: dag-substitutions.lsb Unit name: DAG.Substitutions Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line for I in Positive range 1 .. Arg_No loop --- Warning :402: Default assertion planted to cut loop. exit when Cells.Is_Null_Cell (P); --- Warning :402: Default assertion planted to cut loop. --- Warning :402: Default assertion planted to cut loop. exit when Cells.Is_Null_Cell (P); --- Warning :402: Default assertion planted to cut loop. --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 31 end Constraint is updated indirectly 1 Flow 50 end Indirectly used via local pointe 1 Flow 50 end Indirectly used via local pointe 1 Flow 50 end Indirectly used via local pointe 1 Warn 3 end Suppress warnings on Constraint 1 Flow 31 end Constraint is updated indirectly 1 Flow 50 end Indirectly used via local pointe 1 Flow 50 end Indirectly used via local pointe 1 Flow 50 end Indirectly used via local pointe 1 Flow 50 end Indirectly used via local pointe 1 Warn 3 end Suppress warnings on Implicit_Re 1 Source Filename: dag-type_constraint.adb Listing Filename: dag-type_constraint.lsb Unit name: DAG.Type_Constraint Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line end Process_Type_Rec; --- Warning : 10: The body of subprogram Process_Type_Rec is hidden - hidden text is ignored by the Examiner. end Make; --- Warning : 10: The body of subprogram Make is hidden - hidden text is ignored by the Examiner. Source Filename: dag_io.adb Listing Filename: dag_io.lsb Unit name: DAG_IO Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line end Print_DAG_Dot; --- Warning : 10: The body of subprogram Print_DAG_Dot is hidden - hidden text is ignored by the Examiner. end Print_Heap_Dot; --- Warning : 10: The body of subprogram Print_Heap_Dot is hidden - hidden text is ignored by the Examiner. Source Filename: clists.adb Listing Filename: clists.lsb Unit name: Clists Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line if Cells.Is_Null_Cell (N) then --- Warning :402: Default assertion planted to cut loop. exit when Cells.Is_Null_Cell (ListCell); --- Warning :402: Default assertion planted to cut loop. Source Filename: cstacks.adb Listing Filename: cstacks.lsb Unit name: CStacks Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: labels.adb Listing Filename: labels.lsb Unit name: Labels Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line pragma Unreferenced (LastPair); -- Unused at present ^ --- Warning : 3: Pragma - ignored by the Examiner. exit when Pairs.IsNullPair (Pair_2); --- Warning :402: Default assertion planted to cut loop. exit when Pairs.IsNullPair (Pair_1); --- Warning :402: Default assertion planted to cut loop. Source Filename: pairs.adb Listing Filename: pairs.lsb Unit name: Pairs Unit type: package body Unit has been analysed, any errors are listed below. 10 error(s) or warning(s) Line if Cells.Is_Null_Cell (ModVarCell) then --- Warning :402: Default assertion planted to cut loop. exit when Cells.Is_Null_Cell (ModCell); --- Warning :402: Default assertion planted to cut loop. exit when Cells.Is_Null_Cell (P); --- Warning :402: Default assertion planted to cut loop. --- Warning :402: Default assertion planted to cut loop. exit when Cells.Is_Null_Cell (ModCell); --- Warning :402: Default assertion planted to cut loop. List1_Cell := Clists.FirstCell (Heap, List1); --- Warning :402: Default assertion planted to cut loop. exit when CStacks.IsEmpty (UnexploredCellStack); --- Warning :402: Default assertion planted to cut loop. exit when CStacks.IsEmpty (UnexploredCellStack); --- Warning :402: Default assertion planted to cut loop. exit when CStacks.IsEmpty (DefunctCellStack); --- Warning :402: Default assertion planted to cut loop. exit when CStacks.IsEmpty (MarkedCellStack); --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 MarkedCellStack unused here 1 Flow 10 MarkedCellStack unused here 1 Source Filename: pile.adb Listing Filename: pile.lsb Unit name: Pile Unit type: package body Unit has been analysed, any errors are listed below. 6 error(s) or warning(s) Line with Debug; ^ --- Warning : 1: The identifier Debug is either undeclared or not visible at this point. with ExaminerConstants; ^ --- Warning : 1: The identifier ExaminerConstants is either undeclared or not visible at this point. exit when Symbol = NodeSymbol (Heap, NextNode); --- Warning :402: Default assertion planted to cut loop. exit when Symbol = NodeSymbol (Heap, NextNode); --- Warning :402: Default assertion planted to cut loop. end PrintPile; --- Warning : 10: The body of subprogram PrintPile is hidden - hidden text is ignored by the Examiner. Result := Dictionary.Declared_Before (NodeSymbol (Heap, CurrentNode), NodeSymbol (Heap, NextNode)); --- Warning :402: Default assertion planted to cut loop. Source Filename: structures.adb Listing Filename: structures.lsb Unit name: Structures Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line exit when CStacks.IsEmpty (UnexploredCellStack); --- Warning :402: Default assertion planted to cut loop. exit when CStacks.IsEmpty (CopiedCellStack); --- Warning :402: Default assertion planted to cut loop. exit when CStacks.IsEmpty (UnexploredCellStack); --- Warning :402: Default assertion planted to cut loop. exit when CStacks.IsEmpty (DefunctCellStack); --- Warning :402: Default assertion planted to cut loop. Source Filename: graph.adb Listing Filename: graph.lsb Unit name: Graph Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with E_Strings.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end Dump_Graph_Dot; --- Warning : 10: The body of subprogram Dump_Graph_Dot is hidden - hidden text is ignored by the Examiner. end Dump_Graph_Table; --- Warning : 10: The body of subprogram Dump_Graph_Table is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 23 Initialization is total 1 Flow 23 Initialization is total 1 Flow 23 Initialization is total 1 Flow 23 Initialization is total 1 Flow 23 Initialization is total 1 Flow 23 Initialization is total 1 Flow 23 Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 601 end False coupling OK 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression here OK 1 Flow 41 Stable expression here expected 2 Flow 23 end Initialization is total 1 Flow 23 end Initialization is total 1 Flow 23 end Initialization is total 1 Flow 23 end Initialization is total 1 Flow 23 end Initialization is total 1 Flow 23 end Initialization is total 1 Flow 23 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Flow 602 end Initialization is total 1 Source Filename: stmtstack.adb Listing Filename: stmtstack.lsb Unit name: StmtStack Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Debug; ^ --- Warning : 1: The identifier Debug is either undeclared or not visible at this point. end Dump_Stack; --- Warning : 10: The body of subprogram Dump_Stack is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective statement 1 Source Filename: vcg.adb Listing Filename: vcg.lsb Unit name: VCG Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Fatal; ^ --- Warning : 1: The identifier Fatal is either undeclared or not visible at this point. end Generate_VCs_Local; --- Warning : 9: The body of subprogram Generate_VCs_Local has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 22 We are aware that Use_Windows_Co 1 Flow 601 end False coupling through SPARK_IO. 1 Flow 601 end ignore data coupling between fil 1 Flow 601 end ignore data coupling between fil 1 Flow 601 end ignore data coupling between fil 1 Flow 601 end ignore data coupling between fil 1 Flow 601 end ignore data coupling between fil 1 Flow 601 end ignore data coupling between fil 1 Flow 601 end ignore data coupling between fil 1 Flow 601 end ignore data coupling between fil 1 Flow 601 end false coupling through SPARK_IO 1 Flow 601 end false coupling through SPARK_IO 1 Flow 601 end false coupling through SPARK_IO 1 Flow 601 end false coupling through SPARK_IO 1 Flow 601 end false coupling through SPARK_IO 1 Flow 601 end false coupling through SPARK_IO 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Ancestor_Name not referenced her 1 Flow 10 Expected ineffective assignment 4 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Success to be neither r 1 Source Filename: vcg-producevcs.adb Listing Filename: vcg-producevcs.lsb Unit name: VCG.ProduceVCs Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line end Dump_Used_Symbols; --- Warning : 10: The body of subprogram Dump_Used_Symbols is hidden - hidden text is ignored by the Examiner. pragma Unreferenced (Dump_Used_Symbols); ^ --- Warning : 3: Pragma - ignored by the Examiner. while not Declarations.IsNullIterator (Iterator) loop --- Warning :402: Default assertion planted to cut loop. Change_Made := False; --- Warning :402: Default assertion planted to cut loop. while not Declarations.IsNullIterator (Iterator) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem.adb Listing Filename: sem.lsb Unit name: Sem Unit type: package body Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-add_derives_stream_effects.adb Listing Filename: sem-add_derives_stream_effects.lsb Unit name: Sem.Add_Derives_Stream_Effects Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Export_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Import_It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-add_record_sub_components.adb Listing Filename: sem-add_record_sub_components.lsb Unit name: Sem.Add_Record_Sub_Components Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line for I in Natural range 1 .. Number_Of_Non_Extended_Components loop --- Warning :402: Default assertion planted to cut loop. --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-aggregate_stack.adb Listing Filename: sem-aggregate_stack.lsb Unit name: Sem.Aggregate_Stack Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 30 end Variable not referenced nor expo 1 Flow 30 end Variable not referenced nor expo 1 Flow 30 end Variable not referenced nor expo 1 Flow 30 end Variable not referenced nor expo 1 Flow 50 end Value is not derived from the im 1 Source Filename: sem-assignment_check.adb Listing Filename: sem-assignment_check.lsb Unit name: Sem.Assignment_Check Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_announced_types_declared.adb Listing Filename: sem-check_announced_types_declared.lsb Unit name: Sem.Check_Announced_Types_Declared Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Own_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Own_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Type_List) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Source Filename: sem-check_ceiling_priority.adb Listing Filename: sem-check_ceiling_priority.lsb Unit name: Sem.Check_Ceiling_Priority Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 2 Flow 33 end Expected to be neither reference 1 Source Filename: sem-check_closing_identifier.adb Listing Filename: sem-check_closing_identifier.lsb Unit name: Sem.Check_Closing_Identifier Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_interrupt_property_consistency.adb Listing Filename: sem-check_interrupt_property_consistency.lsb Unit name: Sem.Check_Interrupt_Property_Consistency Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_named_association.adb Listing Filename: sem-check_named_association.lsb Unit name: Sem.Check_Named_Association Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (My_Dict_It) loop --- Warning :402: Default assertion planted to cut loop. while not Is_Null (It => It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-check_no_overloading_from_tagged_ops.adb Listing Filename: sem-check_no_overloading_from_tagged_ops.lsb Unit name: Sem.Check_No_Overloading_From_Tagged_Ops Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_no_overloading_from_tagged_ops-successfully_overrides.adb Listing Filename: sem-check_no_overloading_from_tagged_ops-successfully_overrides.lsb Unit name: Sem.Check_No_Overloading_From_Tagged_Ops.Successfully_Overrides Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line for I in Natural range 1 .. Number_Of_Subprogram_Parameters loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-check_package_prefix.adb Listing Filename: sem-check_package_prefix.lsb Unit name: Sem.Check_Package_Prefix Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_priority_property_consistency.adb Listing Filename: sem-check_priority_property_consistency.lsb Unit name: Sem.Check_Priority_Property_Consistency Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_priority_range.adb Listing Filename: sem-check_priority_range.lsb Unit name: Sem.Check_Priority_Range Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 4 Flow 33 end Expected to be neither reference 1 Source Filename: sem-check_protected_modifier_consistency.adb Listing Filename: sem-check_protected_modifier_consistency.lsb Unit name: Sem.Check_Protected_Modifier_Consistency Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_suspendable_property_consistency.adb Listing Filename: sem-check_suspendable_property_consistency.lsb Unit name: Sem.Check_Suspendable_Property_Consistency Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_task_modifier_consistency.adb Listing Filename: sem-check_task_modifier_consistency.lsb Unit name: Sem.Check_Task_Modifier_Consistency Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-check_valid_ident.adb Listing Filename: sem-check_valid_ident.lsb Unit name: Sem.Check_Valid_Ident Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit.adb Listing Filename: sem-compunit.lsb Unit name: Sem.CompUnit Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 41 Stable expression OK here 1 Flow 41 Language_Profile expected to be 1 Flow 33 end Expected Unused to be neither re 1 Flow 33 end Expected Unused_Data_Flow_Error_ 1 Source Filename: sem-compunit-checkembedbodies.adb Listing Filename: sem-compunit-checkembedbodies.lsb Unit name: Sem.CompUnit.CheckEmbedBodies Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while Own_Var_It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Pack_List) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-checkpackageneedsbody.adb Listing Filename: sem-compunit-checkpackageneedsbody.lsb Unit name: Sem.CompUnit.CheckPackageNeedsBody Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-checksuspendslistaccountedfor.adb Listing Filename: sem-compunit-checksuspendslistaccountedfor.lsb Unit name: Sem.CompUnit.CheckSuspendsListAccountedFor Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-stack.adb Listing Filename: sem-compunit-stack.lsb Unit name: Sem.CompUnit.Stack Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Source Filename: sem-compunit-up_wf_package_body.adb Listing Filename: sem-compunit-up_wf_package_body.lsb Unit name: Sem.CompUnit.Up_Wf_Package_Body Unit type: subunit Unit has been analysed, any errors are listed below. 12 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Own_Var_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Constituent_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Own_Var_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Constituent_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Init_Own_Var_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Constituent_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-up_wf_protected_body.adb Listing Filename: sem-compunit-up_wf_protected_body.lsb Unit name: Sem.CompUnit.Up_Wf_Protected_Body Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Subprog_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Virtual_Element_It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-up_wf_subprogram_body.adb Listing Filename: sem-compunit-up_wf_subprogram_body.lsb Unit name: Sem.CompUnit.Up_Wf_Subprogram_Body Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-up_wf_task_body.adb Listing Filename: sem-compunit-up_wf_task_body.lsb Unit name: Sem.CompUnit.Up_Wf_Task_Body Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-walkstatements.adb Listing Filename: sem-compunit-walkstatements.lsb Unit name: Sem.CompUnit.WalkStatements Unit type: subunit Unit has been analysed, any errors are listed below. 7 error(s) or warning(s) Line pragma Unreferenced (Unused); ^ --- Warning : 3: Pragma - ignored by the Examiner. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Constituent_It) loop --- Warning :402: Default assertion planted to cut loop. while not Heap.IsNullPointer (The_Export_Atom) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 23 end Partial initialization here OK 1 Flow 30 end Variable not referenced nor expo 1 Flow 602 end Partial initialization here OK 1 Flow 30 end Variable not referenced nor expo 1 Flow 30 end Variable not referenced nor expo 1 Flow 30 end Used for precondition only 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 41 Expected stable expression 1 Source Filename: sem-compunit-walkstatements-checkformutuallyexclusivebranches.adb Listing Filename: sem-compunit-walkstatements-checkformutuallyexclusivebranches.lsb Unit name: Sem.CompUnit.WalkStatements.CheckForMutuallyExclusiveBranches Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line while Child /= STree.NullNode and then Syntax_Node_Type (Node => Child) /= Child_Type loop --- Warning :402: Default assertion planted to cut loop. while Next_Instance /= STree.NullNode loop --- Warning :402: Default assertion planted to cut loop. while Child /= STree.NullNode loop --- Warning :402: Default assertion planted to cut loop. while not STree.IsNull (Iter) loop --- Warning :402: Default assertion planted to cut loop. while not STree.IsNull (Iter) and then Common_Ancestor = STree.NullNode loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-walkstatements-down_loop.adb Listing Filename: sem-compunit-walkstatements-down_loop.lsb Unit name: Sem.CompUnit.WalkStatements.Down_Loop Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-walkstatements-up_case.adb Listing Filename: sem-compunit-walkstatements-up_case.lsb Unit name: Sem.CompUnit.WalkStatements.Up_Case Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 33 end Expected to be neither reference 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-compunit-walkstatements-up_loop.adb Listing Filename: sem-compunit-walkstatements-up_loop.lsb Unit name: Sem.CompUnit.WalkStatements.Up_Loop Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-walkstatements-variableupdatehistory.adb Listing Filename: sem-compunit-walkstatements-variableupdatehistory.lsb Unit name: Sem.CompUnit.WalkStatements.VariableUpdateHistory Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line Next_Atom := Heap.APointer (The_Heap, Atom); --- Warning :402: Default assertion planted to cut loop. while Value /= Variable and not Heap.IsNullPointer (Heap.APointer (The_Heap, Atom)) loop --- Warning :402: Default assertion planted to cut loop. while Value /= Variable and not Heap.IsNullPointer (Heap.APointer (The_Heap, Atom)) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 30 end Used for precondition only 1 Flow 31 end History is logically updated but 1 Flow 50 end History is logically dependent o 1 Flow 50 end History is logically dependent o 1 Flow 50 end History is logically dependent o 1 Source Filename: sem-compunit-walkstatements-wf_assign.adb Listing Filename: sem-compunit-walkstatements-wf_assign.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Assign Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Source Filename: sem-compunit-walkstatements-wf_case.adb Listing Filename: sem-compunit-walkstatements-wf_case.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Case Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-compunit-walkstatements-wf_case_choice.adb Listing Filename: sem-compunit-walkstatements-wf_case_choice.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Case_Choice Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-walkstatements-wf_condition.adb Listing Filename: sem-compunit-walkstatements-wf_condition.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Condition Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-walkstatements-wf_delay_until.adb Listing Filename: sem-compunit-walkstatements-wf_delay_until.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Delay_Until Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-walkstatements-wf_exit.adb Listing Filename: sem-compunit-walkstatements-wf_exit.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Exit Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-walkstatements-wf_loop_param.adb Listing Filename: sem-compunit-walkstatements-wf_loop_param.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Loop_Param Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 2 Flow 33 end Expected to be neither reference 1 Source Filename: sem-compunit-walkstatements-wf_proc_call.adb Listing Filename: sem-compunit-walkstatements-wf_proc_call.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Proc_Call Unit type: subunit Unit has been analysed, any errors are listed below. 6 error(s) or warning(s) Line while not SeqAlgebra.IsNullMember (Current_Member) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Member_Of_Seq) loop --- Warning :402: Default assertion planted to cut loop. Calling_Region := Dictionary.GetRegion (Calling_Scope); --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 We already know. 1 Source Filename: sem-compunit-walkstatements-wf_return.adb Listing Filename: sem-compunit-walkstatements-wf_return.lsb Unit name: Sem.CompUnit.WalkStatements.Wf_Return Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-wf_body_stub.adb Listing Filename: sem-compunit-wf_body_stub.lsb Unit name: Sem.CompUnit.Wf_Body_Stub Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line while Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.later_declarative_item_rep loop --- Warning :402: Default assertion planted to cut loop. while Syntax_Node_Type (Node => Outer_Node) = SP_Symbols.protected_operation_item loop --- Warning :402: Default assertion planted to cut loop. while Global_Item /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Export not required here 1 Flow 33 end Export not required here 1 Source Filename: sem-compunit-wf_entry_body.adb Listing Filename: sem-compunit-wf_entry_body.lsb Unit name: Sem.CompUnit.Wf_Entry_Body Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-wf_generic_declaration.adb Listing Filename: sem-compunit-wf_generic_declaration.lsb Unit name: Sem.CompUnit.Wf_Generic_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-wf_generic_package_instantiation.adb Listing Filename: sem-compunit-wf_generic_package_instantiation.lsb Unit name: Sem.CompUnit.Wf_Generic_Package_Instantiation Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Unreferenced (Scope); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 30 end Procedure body not implemented 1 Flow 50 end Procedure body not implemented 1 Flow 50 end Procedure body not implemented 1 Source Filename: sem-compunit-wf_machine_code_insertion.adb Listing Filename: sem-compunit-wf_machine_code_insertion.lsb Unit name: Sem.CompUnit.Wf_Machine_Code_Insertion Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-wf_package_body.adb Listing Filename: sem-compunit-wf_package_body.lsb Unit name: Sem.CompUnit.Wf_Package_Body Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Owned_Packages) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Own_Vars) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-wf_package_body-wf_refine.adb Listing Filename: sem-compunit-wf_package_body-wf_refine.lsb Unit name: Sem.CompUnit.Wf_Package_Body.Wf_Refine Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-wf_package_body-wf_refine-wf_clause.adb Listing Filename: sem-compunit-wf_package_body-wf_refine-wf_clause.lsb Unit name: Sem.CompUnit.Wf_Package_Body.Wf_Refine.Wf_Clause Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-wf_package_initialization.adb Listing Filename: sem-compunit-wf_package_initialization.lsb Unit name: Sem.CompUnit.Wf_Package_Initialization Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Init_Own_Var_List) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Constituent_List) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-wf_proof_function_declaration.adb Listing Filename: sem-compunit-wf_proof_function_declaration.lsb Unit name: Sem.CompUnit.Wf_Proof_Function_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 As can be seen from the name, th 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-compunit-wf_proof_renaming_declaration.adb Listing Filename: sem-compunit-wf_proof_renaming_declaration.lsb Unit name: Sem.CompUnit.Wf_Proof_Renaming_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Unreferenced (Scope); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 30 end Left unused for future expansion 1 Flow 50 end Left unused for future expansion 1 Flow 50 end Left unused for future expansion 1 Source Filename: sem-compunit-wf_protected_body.adb Listing Filename: sem-compunit-wf_protected_body.lsb Unit name: Sem.CompUnit.Wf_Protected_Body Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-wf_subprogram_body.adb Listing Filename: sem-compunit-wf_subprogram_body.lsb Unit name: Sem.CompUnit.Wf_Subprogram_Body Unit type: subunit Unit has been analysed, any errors are listed below. 9 error(s) or warning(s) Line while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while Inherited_Package_It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while Inherited_Package_It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Inherit_It) loop --- Warning :402: Default assertion planted to cut loop. while Global_Item /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-wf_subprogram_body-processpartitionannotation.adb Listing Filename: sem-compunit-wf_subprogram_body-processpartitionannotation.lsb Unit name: Sem.CompUnit.Wf_Subprogram_Body.ProcessPartitionAnnotation Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-wf_subunit.adb Listing Filename: sem-compunit-wf_subunit.lsb Unit name: Sem.CompUnit.Wf_Subunit Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-compunit-wf_task_body.adb Listing Filename: sem-compunit-wf_task_body.lsb Unit name: Sem.CompUnit.Wf_Task_Body Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while Global_Item /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-compunit-wf_use_type_clause.adb Listing Filename: sem-compunit-wf_use_type_clause.lsb Unit name: Sem.CompUnit.Wf_Use_Type_Clause Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-constraint_check.adb Listing Filename: sem-constraint_check.lsb Unit name: Sem.Constraint_Check Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-convert_tagged_actual.adb Listing Filename: sem-convert_tagged_actual.lsb Unit name: Sem.Convert_Tagged_Actual Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line exit when Dictionary.Types_Are_Equal --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-create_implicit_positive_subtype.adb Listing Filename: sem-create_implicit_positive_subtype.lsb Unit name: Sem.Create_Implicit_Positive_Subtype Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-create_interrupt_stream_variable.adb Listing Filename: sem-create_interrupt_stream_variable.lsb Unit name: Sem.Create_Interrupt_Stream_Variable Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not (Found or else Dictionary.IsNullIterator (It)) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-dependency_relation.adb Listing Filename: sem-dependency_relation.lsb Unit name: Sem.Dependency_Relation Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-dependency_relation-check_derives_consistency.adb Listing Filename: sem-dependency_relation-check_derives_consistency.lsb Unit name: Sem.Dependency_Relation.Check_Derives_Consistency Unit type: subunit Unit has been analysed, any errors are listed below. 12 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Concrete_Export_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Constituent_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Concrete_Export_It) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Mem) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Export_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Import_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Export_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Import_It) loop --- Warning :402: Default assertion planted to cut loop. exit when SeqAlgebra.IsNullMember (R_Exp); --- Warning :402: Default assertion planted to cut loop. exit when SeqAlgebra.IsNullMember (Err_Imp); --- Warning :402: Default assertion planted to cut loop. exit when SeqAlgebra.IsNullMember (RT_Exp); --- Warning :402: Default assertion planted to cut loop. exit when SeqAlgebra.IsNullMember (Err_Imp); --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-dependency_relation-create_full_dependency.adb Listing Filename: sem-dependency_relation-create_full_dependency.lsb Unit name: Sem.Dependency_Relation.Create_Full_Dependency Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line Member := SeqAlgebra.NextMember (The_Heap, Member); --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Member) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Source Filename: sem-dependency_relation-create_full_subprog_dependency.adb Listing Filename: sem-dependency_relation-create_full_subprog_dependency.lsb Unit name: Sem.Dependency_Relation.Create_Full_Subprog_Dependency Unit type: subunit Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Member) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Import) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-dependency_relation-wf_dependency_relation.adb Listing Filename: sem-dependency_relation-wf_dependency_relation.lsb Unit name: Sem.Dependency_Relation.Wf_Dependency_Relation Unit type: subunit Unit has been analysed, any errors are listed below. 10 error(s) or warning(s) Line while Next_Export /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Member) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Member) loop --- Warning :402: Default assertion planted to cut loop. while not SeqAlgebra.IsNullMember (Member) loop --- Warning :402: Default assertion planted to cut loop. Member := SeqAlgebra.NextMember (The_Heap, Member); --- Warning :402: Default assertion planted to cut loop. while Next_Param /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while Next_Global /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while Export_It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while Import_It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 41 Expected stable expression 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Source Filename: sem-find_actual_node.adb Listing Filename: sem-find_actual_node.lsb Unit name: Sem.Find_Actual_Node Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-find_previous_package.adb Listing Filename: sem-find_previous_package.lsb Unit name: Sem.Find_Previous_Package Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-get_literal_value.adb Listing Filename: sem-get_literal_value.lsb Unit name: Sem.Get_Literal_Value Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-get_subprogram_anno_key_nodes.adb Listing Filename: sem-get_subprogram_anno_key_nodes.lsb Unit name: Sem.Get_Subprogram_Anno_Key_Nodes Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-get_type_bounds.adb Listing Filename: sem-get_type_bounds.lsb Unit name: Sem.Get_Type_Bounds Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-illegal_unconstrained.adb Listing Filename: sem-illegal_unconstrained.lsb Unit name: Sem.Illegal_Unconstrained Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-indexes_match.adb Listing Filename: sem-indexes_match.lsb Unit name: Sem.Indexes_Match Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Tgt_It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-in_package_initialization.adb Listing Filename: sem-in_package_initialization.lsb Unit name: Sem.In_Package_Initialization Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-is_enclosing_package.adb Listing Filename: sem-is_enclosing_package.lsb Unit name: Sem.Is_Enclosing_Package Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsGlobalScope (Scope_Chain) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-is_external_interface.adb Listing Filename: sem-is_external_interface.lsb Unit name: Sem.Is_External_Interface Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-needs_synthetic_dependency.adb Listing Filename: sem-needs_synthetic_dependency.lsb Unit name: Sem.Needs_Synthetic_Dependency Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-plant_constraining_type.adb Listing Filename: sem-plant_constraining_type.lsb Unit name: Sem.Plant_Constraining_Type Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-range_check.adb Listing Filename: sem-range_check.lsb Unit name: Sem.Range_Check Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-subprogram_specification.adb Listing Filename: sem-subprogram_specification.lsb Unit name: Sem.Subprogram_Specification Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-substitute_protected_type_self_reference.adb Listing Filename: sem-substitute_protected_type_self_reference.lsb Unit name: Sem.Substitute_Protected_Type_Self_Reference Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-unexpected_initialization.adb Listing Filename: sem-unexpected_initialization.lsb Unit name: Sem.Unexpected_Initialization Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-unknown_type_record.adb Listing Filename: sem-unknown_type_record.lsb Unit name: Sem.Unknown_Type_Record Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p.adb Listing Filename: sem-walk_expression_p.lsb Unit name: Sem.Walk_Expression_P Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-add_name.adb Listing Filename: sem-walk_expression_p-add_name.lsb Unit name: Sem.Walk_Expression_P.Add_Name Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-attribute_designator_type_from_context.adb Listing Filename: sem-walk_expression_p-attribute_designator_type_from_context.lsb Unit name: Sem.Walk_Expression_P.Attribute_Designator_Type_From_Context Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-calc_binary_operator.adb Listing Filename: sem-walk_expression_p-calc_binary_operator.lsb Unit name: Sem.Walk_Expression_P.Calc_Binary_Operator Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-check_binary_operator.adb Listing Filename: sem-walk_expression_p-check_binary_operator.lsb Unit name: Sem.Walk_Expression_P.Check_Binary_Operator Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-check_binary_operator-homo_impl_type_conv.adb Listing Filename: sem-walk_expression_p-check_binary_operator-homo_impl_type_conv.lsb Unit name: Sem.Walk_Expression_P.Check_Binary_Operator.Homo_Impl_Type_Conv Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line end Debug_1; --- Warning : 10: The body of subprogram Debug_1 is hidden - hidden text is ignored by the Examiner. end Debug_2; --- Warning : 10: The body of subprogram Debug_2 is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Final value of New_Val not used 1 Flow 10 Final value of New_Val not used 1 Flow 10 Final value of New_Val not used 1 Flow 10 Final value of New_Val not used 1 Flow 33 end Final value of New_Val not used 1 Source Filename: sem-walk_expression_p-create_name_list.adb Listing Filename: sem-walk_expression_p-create_name_list.lsb Unit name: Sem.Walk_Expression_P.Create_Name_List Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-dispose_of_name_list.adb Listing Filename: sem-walk_expression_p-dispose_of_name_list.lsb Unit name: Sem.Walk_Expression_P.Dispose_Of_Name_List Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-down_wf_aggregate.adb Listing Filename: sem-walk_expression_p-down_wf_aggregate.lsb Unit name: Sem.Walk_Expression_P.Down_Wf_Aggregate Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-walk_expression_p-down_wf_aggregate_or_expression.adb Listing Filename: sem-walk_expression_p-down_wf_aggregate_or_expression.lsb Unit name: Sem.Walk_Expression_P.Down_Wf_Aggregate_Or_Expression Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 504 Expected Fieldname to have undef 1 Flow 602 end Fieldname always defined if need 1 Source Filename: sem-walk_expression_p-down_wf_name_argument_list.adb Listing Filename: sem-walk_expression_p-down_wf_name_argument_list.lsb Unit name: Sem.Walk_Expression_P.Down_Wf_Name_Argument_List Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-expression_type_from_context.adb Listing Filename: sem-walk_expression_p-expression_type_from_context.lsb Unit name: Sem.Walk_Expression_P.Expression_Type_From_Context Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-exp_stack.adb Listing Filename: sem-walk_expression_p-exp_stack.lsb Unit name: Sem.Walk_Expression_P.Exp_Stack Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with Ada.Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end Debug_Item; --- Warning : 10: The body of subprogram Debug_Item is hidden - hidden text is ignored by the Examiner. end Debug_Stack; --- Warning : 10: The body of subprogram Debug_Stack is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Safe partial initialisation 1 Flow 31 end Safe partial initialisation 1 Flow 602 end Safe partial initialisation 1 Flow 35 end Importation of the initial value 1 Flow 50 end Value is not derived from the im 1 Source Filename: sem-walk_expression_p-find_named_argument_association_parameter.adb Listing Filename: sem-walk_expression_p-find_named_argument_association_parameter.lsb Unit name: Sem.Walk_Expression_P.Find_Named_Argument_Association_Parameter Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-get_character_literal.adb Listing Filename: sem-walk_expression_p-get_character_literal.lsb Unit name: Sem.Walk_Expression_P.Get_Character_Literal Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-get_string_literal_length.adb Listing Filename: sem-walk_expression_p-get_string_literal_length.lsb Unit name: Sem.Walk_Expression_P.Get_String_Literal_Length Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-null_parameter_record.adb Listing Filename: sem-walk_expression_p-null_parameter_record.lsb Unit name: Sem.Walk_Expression_P.Null_Parameter_Record Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-null_type_record.adb Listing Filename: sem-walk_expression_p-null_type_record.lsb Unit name: Sem.Walk_Expression_P.Null_Type_Record Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-ops_are_same_and_commutative.adb Listing Filename: sem-walk_expression_p-ops_are_same_and_commutative.lsb Unit name: Sem.Walk_Expression_P.Ops_Are_Same_And_Commutative Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-primary_type_from_context.adb Listing Filename: sem-walk_expression_p-primary_type_from_context.lsb Unit name: Sem.Walk_Expression_P.Primary_Type_From_Context Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-put_exp_record.adb Listing Filename: sem-walk_expression_p-put_exp_record.lsb Unit name: Sem.Walk_Expression_P.Put_Exp_Record Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Put_Exp_Record; --- Warning : 10: The body of subprogram Put_Exp_Record is hidden - hidden text is ignored by the Examiner. Source Filename: sem-walk_expression_p-range_constraint_type_from_context.adb Listing Filename: sem-walk_expression_p-range_constraint_type_from_context.lsb Unit name: Sem.Walk_Expression_P.Range_Constraint_Type_From_Context Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-simple_expression_type_from_context.adb Listing Filename: sem-walk_expression_p-simple_expression_type_from_context.lsb Unit name: Sem.Walk_Expression_P.Simple_Expression_Type_From_Context Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-stack_identifier.adb Listing Filename: sem-walk_expression_p-stack_identifier.lsb Unit name: Sem.Walk_Expression_P.Stack_Identifier Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-type_context_stack.adb Listing Filename: sem-walk_expression_p-type_context_stack.lsb Unit name: Sem.Walk_Expression_P.Type_Context_Stack Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line end Debug; --- Warning : 10: The body of subprogram Debug is hidden - hidden text is ignored by the Examiner. end Debug; --- Warning : 10: The body of subprogram Debug is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 32 end Safe partial initialisation 1 Flow 31 end Safe partial initialisation 1 Flow 602 end Safe partial initialisation 1 Flow 30 end Variable not referenced nor expo 1 Flow 30 end Variable not referenced nor expo 1 Flow 30 end Variable not referenced nor expo 1 Flow 50 end Value is not derived from the im 1 Source Filename: sem-walk_expression_p-unknown_symbol_record.adb Listing Filename: sem-walk_expression_p-unknown_symbol_record.lsb Unit name: Sem.Walk_Expression_P.Unknown_Symbol_Record Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-up_wf_aggregate.adb Listing Filename: sem-walk_expression_p-up_wf_aggregate.lsb Unit name: Sem.Walk_Expression_P.Up_Wf_Aggregate Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expect ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-walk_expression_p-up_wf_aggregate_or_expression.adb Listing Filename: sem-walk_expression_p-up_wf_aggregate_or_expression.lsb Unit name: Sem.Walk_Expression_P.Up_Wf_Aggregate_Or_Expression Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while STree.Syntax_Node_Type (Node => Node) /= SP_Symbols.component_association --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-up_wf_name_argument_list.adb Listing Filename: sem-walk_expression_p-up_wf_name_argument_list.lsb Unit name: Sem.Walk_Expression_P.Up_Wf_Name_Argument_List Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Unused_Value not needed here. 1 Flow 33 end Unused_Value not needed here. 1 Source Filename: sem-walk_expression_p-walk_annotation_expression.adb Listing Filename: sem-walk_expression_p-walk_annotation_expression.lsb Unit name: Sem.Walk_Expression_P.Walk_Annotation_Expression Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line with SPrint; ^ --- Warning : 1: The identifier SPrint is either undeclared or not visible at this point. end Dump_Syntax_Tree; --- Warning : 10: The body of subprogram Dump_Syntax_Tree is hidden - hidden text is ignored by the Examiner. end Dump_Down_Node; --- Warning : 10: The body of subprogram Dump_Down_Node is hidden - hidden text is ignored by the Examiner. end Dump_Up_Node; --- Warning : 10: The body of subprogram Dump_Up_Node is hidden - hidden text is ignored by the Examiner. end Dump_Result; --- Warning : 10: The body of subprogram Dump_Result is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Stable expression here OK 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-walk_expression_p-walk_annotation_expression-down_wf_quantifier.adb Listing Filename: sem-walk_expression_p-walk_annotation_expression-down_wf_quantifier.lsb Unit name: Sem.Walk_Expression_P.Walk_Annotation_Expression.Down_Wf_Quantifier Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-walk_annotation_expression-down_wf_store.adb Listing Filename: sem-walk_expression_p-walk_annotation_expression-down_wf_store.lsb Unit name: Sem.Walk_Expression_P.Walk_Annotation_Expression.Down_Wf_Store Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-walk_annotation_expression-down_wf_store_list.adb Listing Filename: sem-walk_expression_p-walk_annotation_expression-down_wf_store_list.lsb Unit name: Sem.Walk_Expression_P.Walk_Annotation_Expression.Down_Wf_Store_List Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-walk_annotation_expression-up_wf_quantifier.adb Listing Filename: sem-walk_expression_p-walk_annotation_expression-up_wf_quantifier.lsb Unit name: Sem.Walk_Expression_P.Walk_Annotation_Expression.Up_Wf_Quantifier Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-walk_annotation_expression-up_wf_store.adb Listing Filename: sem-walk_expression_p-walk_annotation_expression-up_wf_store.lsb Unit name: Sem.Walk_Expression_P.Walk_Annotation_Expression.Up_Wf_Store Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while Next_Node /= End_Node loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-walk_annotation_expression-up_wf_store_list.adb Listing Filename: sem-walk_expression_p-walk_annotation_expression-up_wf_store_list.lsb Unit name: Sem.Walk_Expression_P.Walk_Annotation_Expression.Up_Wf_Store_List Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while STree.Syntax_Node_Type (Node => Node) = SP_Symbols.store_list loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-walk_expression.adb Listing Filename: sem-walk_expression_p-walk_expression.lsb Unit name: Sem.Walk_Expression_P.Walk_Expression Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line with SPrint; ^ --- Warning : 1: The identifier SPrint is either undeclared or not visible at this point. end Dump_Syntax_Tree; --- Warning : 10: The body of subprogram Dump_Syntax_Tree is hidden - hidden text is ignored by the Examiner. end Dump_Down_Node; --- Warning : 10: The body of subprogram Dump_Down_Node is hidden - hidden text is ignored by the Examiner. end Dump_Up_Node; --- Warning : 10: The body of subprogram Dump_Up_Node is hidden - hidden text is ignored by the Examiner. end Dump_Result; --- Warning : 10: The body of subprogram Dump_Result is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Stable expression here OK 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-walk_expression_p-wf_aggregate_choice.adb Listing Filename: sem-walk_expression_p-wf_aggregate_choice.lsb Unit name: Sem.Walk_Expression_P.Wf_Aggregate_Choice Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_aggregate_choice_rep.adb Listing Filename: sem-walk_expression_p-wf_aggregate_choice_rep.lsb Unit name: Sem.Walk_Expression_P.Wf_Aggregate_Choice_Rep Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while Next_Node /= End_Node loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-wf_ancestor_part.adb Listing Filename: sem-walk_expression_p-wf_ancestor_part.lsb Unit name: Sem.Walk_Expression_P.Wf_Ancestor_Part Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line Current_Record := Dictionary.CurrentSymbol (Dictionary.FirstRecordComponent (Current_Record)); --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-wf_arange.adb Listing Filename: sem-walk_expression_p-wf_arange.lsb Unit name: Sem.Walk_Expression_P.Wf_Arange Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-walk_expression_p-wf_attribute.adb Listing Filename: sem-walk_expression_p-wf_attribute.lsb Unit name: Sem.Walk_Expression_P.Wf_Attribute Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_attribute_designator.adb Listing Filename: sem-walk_expression_p-wf_attribute_designator.lsb Unit name: Sem.Walk_Expression_P.Wf_Attribute_Designator Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.annotation_primary --- Warning :402: Default assertion planted to cut loop. while STree.Syntax_Node_Type (Node => Current_Node) /= SP_Symbols.simple_expression --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected ineffective assignment 1 Source Filename: sem-walk_expression_p-wf_attribute_designator-calc_attribute.adb Listing Filename: sem-walk_expression_p-wf_attribute_designator-calc_attribute.lsb Unit name: Sem.Walk_Expression_P.Wf_Attribute_Designator.Calc_Attribute Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-walk_expression_p-wf_component_association.adb Listing Filename: sem-walk_expression_p-wf_component_association.lsb Unit name: Sem.Walk_Expression_P.Wf_Component_Association Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_expression.adb Listing Filename: sem-walk_expression_p-wf_expression.lsb Unit name: Sem.Walk_Expression_P.Wf_Expression Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_factor.adb Listing Filename: sem-walk_expression_p-wf_factor.lsb Unit name: Sem.Walk_Expression_P.Wf_Factor Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Debug_Print; --- Warning : 10: The body of subprogram Debug_Print is hidden - hidden text is ignored by the Examiner. Source Filename: sem-walk_expression_p-wf_identifier.adb Listing Filename: sem-walk_expression_p-wf_identifier.lsb Unit name: Sem.Walk_Expression_P.Wf_Identifier Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_named_argument_association.adb Listing Filename: sem-walk_expression_p-wf_named_argument_association.lsb Unit name: Sem.Walk_Expression_P.Wf_Named_Argument_Association Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_named_association_rep.adb Listing Filename: sem-walk_expression_p-wf_named_association_rep.lsb Unit name: Sem.Walk_Expression_P.Wf_Named_Association_Rep Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-wf_named_record_component_association.adb Listing Filename: sem-walk_expression_p-wf_named_record_component_association.lsb Unit name: Sem.Walk_Expression_P.Wf_Named_Record_Component_Association Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line for I in Positive range --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-wf_percent.adb Listing Filename: sem-walk_expression_p-wf_percent.lsb Unit name: Sem.Walk_Expression_P.Wf_Percent Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while Dictionary.IsLoop (Dictionary.GetRegion (Current_Scope)) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-wf_positional_argument_association.adb Listing Filename: sem-walk_expression_p-wf_positional_argument_association.lsb Unit name: Sem.Walk_Expression_P.Wf_Positional_Argument_Association Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Tgt_It) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-walk_expression_p-wf_positional_association.adb Listing Filename: sem-walk_expression_p-wf_positional_association.lsb Unit name: Sem.Walk_Expression_P.Wf_Positional_Association Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line for I in Positive range Highest_Field_Number .. Number_Of_Fields loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-wf_positional_record_component_association.adb Listing Filename: sem-walk_expression_p-wf_positional_record_component_association.lsb Unit name: Sem.Walk_Expression_P.Wf_Positional_Record_Component_Association Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_primary.adb Listing Filename: sem-walk_expression_p-wf_primary.lsb Unit name: Sem.Walk_Expression_P.Wf_Primary Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line if STree.Next_Sibling (Current_Node => Current_Node) /= STree.NullNode then --- Warning :402: Default assertion planted to cut loop. if STree.Syntax_Node_Type (Node => Local_Node) = SP_Symbols.expression then --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Flow 601 end False coupling through SPARK_IO 1 Source Filename: sem-walk_expression_p-wf_primary-protected_references_by.adb Listing Filename: sem-walk_expression_p-wf_primary-protected_references_by.lsb Unit name: Sem.Walk_Expression_P.Wf_Primary.Protected_References_By Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line Region := Dictionary.GetRegion (Current); --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-walk_expression_p-wf_qualified_expression.adb Listing Filename: sem-walk_expression_p-wf_qualified_expression.lsb Unit name: Sem.Walk_Expression_P.Wf_Qualified_Expression Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_record_component_selector_name.adb Listing Filename: sem-walk_expression_p-wf_record_component_selector_name.lsb Unit name: Sem.Walk_Expression_P.Wf_Record_Component_Selector_Name Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_relation.adb Listing Filename: sem-walk_expression_p-wf_relation.lsb Unit name: Sem.Walk_Expression_P.Wf_Relation Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-walk_expression_p-wf_selected_component.adb Listing Filename: sem-walk_expression_p-wf_selected_component.lsb Unit name: Sem.Walk_Expression_P.Wf_Selected_Component Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_simple_expression.adb Listing Filename: sem-walk_expression_p-wf_simple_expression.lsb Unit name: Sem.Walk_Expression_P.Wf_Simple_Expression Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Ineffective assignment OK here 1 Flow 33 end Unused not referenced OK 1 Source Filename: sem-walk_expression_p-wf_simple_expression_opt.adb Listing Filename: sem-walk_expression_p-wf_simple_expression_opt.lsb Unit name: Sem.Walk_Expression_P.Wf_Simple_Expression_Opt Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Debug_Print; --- Warning : 10: The body of subprogram Debug_Print is hidden - hidden text is ignored by the Examiner. Source Filename: sem-walk_expression_p-wf_term.adb Listing Filename: sem-walk_expression_p-wf_term.lsb Unit name: Sem.Walk_Expression_P.Wf_Term Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_expression_p-wf_tilde.adb Listing Filename: sem-walk_expression_p-wf_tilde.lsb Unit name: Sem.Walk_Expression_P.Wf_Tilde Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-walk_name.adb Listing Filename: sem-walk_name.lsb Unit name: Sem.Walk_Name Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_argument_association.adb Listing Filename: sem-wf_argument_association.lsb Unit name: Sem.Wf_Argument_Association Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_argument_association-tagged_actual_must_be_object_check.adb Listing Filename: sem-wf_argument_association-tagged_actual_must_be_object_check.lsb Unit name: Sem.Wf_Argument_Association.Tagged_Actual_Must_Be_Object_Check Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_array_type_definition.adb Listing Filename: sem-wf_array_type_definition.lsb Unit name: Sem.Wf_Array_Type_Definition Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_basic_declarative_item.adb Listing Filename: sem-wf_basic_declarative_item.lsb Unit name: Sem.Wf_Basic_Declarative_Item Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while (not Dictionary.IsNullIterator (Iterator)) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Source Filename: sem-wf_basic_declarative_item-check_subtype_against_basetype_bounds.adb Listing Filename: sem-wf_basic_declarative_item-check_subtype_against_basetype_bounds.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Check_Subtype_Against_Basetype_Bounds Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 2 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_derived.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_derived.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Derived Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_enum.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_enum.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Enum Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_integer.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_integer.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Integer Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_known_discriminant_part.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_known_discriminant_part.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Known_Discriminant_Part Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_modular.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_modular.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Modular Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 2 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_priority_pragma.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_priority_pragma.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Priority_Pragma Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Protected_Type_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not STree.IsNull (It) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration-wf_protected_op_dec.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_protected_type_declaration-wf_protected_op_dec.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Protected_Type_Declaration.Wf_Protected_Op_Dec Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Global_It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_real.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_real.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Real Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 602 end Partial initialization OK here 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_record.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_record.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Record Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line if Dictionary.Packages_Are_Equal (Left_Symbol => Current_Package, --- Warning :402: Default assertion planted to cut loop. Current_Package := Dictionary.GetLibraryPackage (Dictionary.GetScope (Current_Record)); --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_task_type_declaration.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_task_type_declaration.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Task_Type_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_type_extension.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_full_type_declaration-wf_type_extension.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Full_Type_Declaration.Wf_Type_Extension Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Subtype_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration-wf_ravenscar_subtype.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_subtype_declaration-wf_ravenscar_subtype.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Subtype_Declaration.Wf_Ravenscar_Subtype Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Source Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_variable_declaration.adb Listing Filename: sem-wf_basic_declarative_item-wf_basic_declaration-wf_variable_declaration.lsb Unit name: Sem.Wf_Basic_Declarative_Item.Wf_Basic_Declaration.Wf_Variable_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Subprogram_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Global_Variable_It) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected unused variable 1 Flow 10 Unused_Component_Data is discard 1 Source Filename: sem-wf_context_clause.adb Listing Filename: sem-wf_context_clause.lsb Unit name: Sem.Wf_Context_Clause Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Unreferenced (Always_False); ^ --- Warning : 3: Pragma - ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 end A specification can never with i 1 Flow 33 end A specification can never with i 1 Source Filename: sem-wf_context_clause_package_body.adb Listing Filename: sem-wf_context_clause_package_body.lsb Unit name: Sem.Wf_Context_Clause_Package_Body Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_context_clause_package_body-use_clause.adb Listing Filename: sem-wf_context_clause_package_body-use_clause.lsb Unit name: Sem.Wf_Context_Clause_Package_Body.Use_Clause Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_context_clause_package_body-with_clause.adb Listing Filename: sem-wf_context_clause_package_body-with_clause.lsb Unit name: Sem.Wf_Context_Clause_Package_Body.With_Clause Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Structurally this is the preferr 1 Flow 10 Can never be explicitly duplicat 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_declare_annotation.adb Listing Filename: sem-wf_declare_annotation.lsb Unit name: Sem.Wf_Declare_Annotation Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_entire_variable.adb Listing Filename: sem-wf_entire_variable.lsb Unit name: Sem.Wf_Entire_Variable Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_external_interface.adb Listing Filename: sem-wf_external_interface.lsb Unit name: Sem.Wf_External_Interface Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_formal_part.adb Listing Filename: sem-wf_formal_part.lsb Unit name: Sem.Wf_Formal_Part Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_formal_part-wf_param.adb Listing Filename: sem-wf_formal_part-wf_param.lsb Unit name: Sem.Wf_Formal_Part.Wf_Param Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Source Filename: sem-wf_generic_formal_part.adb Listing Filename: sem-wf_generic_formal_part.lsb Unit name: Sem.Wf_Generic_Formal_Part Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_generic_subprogram_instantiation.adb Listing Filename: sem-wf_generic_subprogram_instantiation.lsb Unit name: Sem.Wf_Generic_Subprogram_Instantiation Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_generic_subprogram_instantiation-wf_generic_actual_part.adb Listing Filename: sem-wf_generic_subprogram_instantiation-wf_generic_actual_part.lsb Unit name: Sem.Wf_Generic_Subprogram_Instantiation.Wf_Generic_Actual_Part Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line exit when Actual_Type_Iterator = Dictionary.NullIterator and then Formal_Type_Iterator = Dictionary.NullIterator; --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Ineffective assignment here OK 1 Source Filename: sem-wf_global_definition.adb Listing Filename: sem-wf_global_definition.lsb Unit name: Sem.Wf_Global_Definition Unit type: subunit Unit has been analysed, any errors are listed below. 7 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Refined_Glob_List) loop --- Warning :402: Default assertion planted to cut loop. exit when Dictionary.IsNullIterator (Second_Glob_List); --- Warning :402: Default assertion planted to cut loop. exit when Dictionary.IsNullIterator (Second_Glob_List); --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (First_Glob_List) loop --- Warning :402: Default assertion planted to cut loop. while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. while It /= Dictionary.NullIterator loop --- Warning :402: Default assertion planted to cut loop. exit when Dictionary.IsNullIterator (It); --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Flow 41 Expected stable expression 1 Source Filename: sem-wf_inherit_clause.adb Listing Filename: sem-wf_inherit_clause.lsb Unit name: Sem.Wf_Inherit_Clause Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_justification_statement.adb Listing Filename: sem-wf_justification_statement.lsb Unit name: Sem.Wf_Justification_Statement Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_package_declaration.adb Listing Filename: sem-wf_package_declaration.lsb Unit name: Sem.Wf_Package_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_package_declaration-add_child.adb Listing Filename: sem-wf_package_declaration-add_child.lsb Unit name: Sem.Wf_Package_Declaration.Add_Child Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_package_declaration-get_package_declaration_key_nodes.adb Listing Filename: sem-wf_package_declaration-get_package_declaration_key_nodes.lsb Unit name: Sem.Wf_Package_Declaration.Get_Package_Declaration_Key_Nodes Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_package_declaration-wf_package_specification.adb Listing Filename: sem-wf_package_declaration-wf_package_specification.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification Unit type: subunit Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-wf_package_declaration-wf_package_specification-check_modes.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-check_modes.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Check_Modes Unit type: subunit Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Priv_Type_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Subprog_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Param_It) loop --- Warning :402: Default assertion planted to cut loop. while Syntax_Node_Type (Node => Vis_Part_Rep_Node) = SP_Symbols.visible_part_rep loop --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-wf_package_declaration-wf_package_specification-check_state_can_be_initialized.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-check_state_can_be_initialized.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Check_State_Can_Be_Initialized Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Local_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Own_Var_It) -- exit when no more owns --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-wf_package_declaration-wf_package_specification-check_types_can_be_used.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-check_types_can_be_used.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Check_Types_Can_Be_Used Unit type: subunit Unit has been analysed, any errors are listed below. 12 error(s) or warning(s) Line while not Dictionary.IsNullIterator (Export_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Import_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Import_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Import_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Type_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Op_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Type_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Op_It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (DefConIt) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (Private_Type_It) -- exit when no more private types --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-wf_package_declaration-wf_package_specification-wf_anno.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-wf_anno.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Anno Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_package_declaration-wf_package_specification-wf_anno-wf_init_spec.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-wf_anno-wf_init_spec.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Anno.Wf_Init_Spec Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_package_declaration-wf_package_specification-wf_anno-wf_own.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-wf_anno-wf_own.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Anno.Wf_Own Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Source Filename: sem-wf_package_declaration-wf_package_specification-wf_private.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-wf_private.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Private Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 41 Expected stable expression 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_package_declaration-wf_package_specification-wf_visible.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-wf_visible.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Visible Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_package_declaration-wf_package_specification-wf_visible-wf_deferred.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-wf_visible-wf_deferred.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Visible.Wf_Deferred Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_package_declaration-wf_package_specification-wf_visible-wf_private_type_declaration.adb Listing Filename: sem-wf_package_declaration-wf_package_specification-wf_visible-wf_private_type_declaration.lsb Unit name: Sem.Wf_Package_Declaration.Wf_Package_Specification.Wf_Visible.Wf_Private_Type_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_pragma.adb Listing Filename: sem-wf_pragma.lsb Unit name: Sem.Wf_Pragma Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while Syntax_Node_Type (Node => Node) = SP_Symbols.argument_association_rep loop --- Warning :402: Default assertion planted to cut loop. Is_Chain := Next_Sibling (Current_Node => Node) = STree.NullNode; --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-wf_pragma-wf_attach_handler.adb Listing Filename: sem-wf_pragma-wf_attach_handler.lsb Unit name: Sem.Wf_Pragma.Wf_Attach_Handler Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line Is_Chain := Next_Sibling (Current_Node => Id_Node) = STree.NullNode; --- Warning :402: Default assertion planted to cut loop. Source Filename: sem-wf_pragma-wf_elaborate_body.adb Listing Filename: sem-wf_pragma-wf_elaborate_body.lsb Unit name: Sem.Wf_Pragma.Wf_Elaborate_Body Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_predicate.adb Listing Filename: sem-wf_predicate.lsb Unit name: Sem.Wf_Predicate Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_priority_value.adb Listing Filename: sem-wf_priority_value.lsb Unit name: Sem.Wf_Priority_Value Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-wf_property_list.adb Listing Filename: sem-wf_property_list.lsb Unit name: Sem.Wf_Property_List Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line while OK_To_Add and not Dictionary.IsNullIterator (It) loop --- Warning :402: Default assertion planted to cut loop. while not STree.IsNull (It) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 2 Flow 33 end Expected to be neither reference 1 Flow 10 Expected ineffective assignment 1 Source Filename: sem-wf_renaming_declaration.adb Listing Filename: sem-wf_renaming_declaration.lsb Unit name: Sem.Wf_Renaming_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line Last_Node := Parent_Node (Current_Node => Last_Node); --- Warning :402: Default assertion planted to cut loop. SystemErrors.RT_Assert --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_subprogram_annotation.adb Listing Filename: sem-wf_subprogram_annotation.lsb Unit name: Sem.Wf_Subprogram_Annotation Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_subprogram_constraint.adb Listing Filename: sem-wf_subprogram_constraint.lsb Unit name: Sem.Wf_Subprogram_Constraint Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Source Filename: sem-wf_subprogram_declaration.adb Listing Filename: sem-wf_subprogram_declaration.lsb Unit name: Sem.Wf_Subprogram_Declaration Unit type: subunit Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 10 Expected ineffective assignment 1 Flow 33 end Expected to be neither reference 1 Flow 33 end Expected to be neither reference 1 Flow 33 end Expected to be neither reference 1 Source Filename: sem-wf_type_mark.adb Listing Filename: sem-wf_type_mark.lsb Unit name: Sem.Wf_Type_Mark Unit type: subunit Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line while not Dictionary.Is_Null_Symbol (CP) loop --- Warning :402: Default assertion planted to cut loop. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 22 Invariant expression OK here 1 Source Filename: spark-ada-command_line.adb Listing Filename: spark-ada-command_line.lsb Unit name: SPARK.Ada.Command_Line Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Command_Line; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Command_Line; --- Warning : 10: The body of package Command_Line is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-command_line-unbounded_string.adb Listing Filename: spark-ada-command_line-unbounded_string.lsb Unit name: SPARK.Ada.Command_Line.Unbounded_String Unit type: package body Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Command_Line; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Command_Line.Unbounded_String; --- Warning : 10: The body of package Unbounded_String is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-unbounded.adb Listing Filename: spark-ada-strings-unbounded.lsb Unit name: SPARK.Ada.Strings.Unbounded Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with SPARK.Ada.Strings.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. with SPARK.Ada.Strings.Maps.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end SPARK.Ada.Strings.Unbounded; --- Warning : 10: The body of package Unbounded is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-maps.adb Listing Filename: spark-ada-strings-maps.lsb Unit name: SPARK.Ada.Strings.Maps Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end SPARK.Ada.Strings.Maps; --- Warning : 10: The body of package Maps is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-text_io.adb Listing Filename: spark-ada-text_io.lsb Unit name: SPARK.Ada.Text_IO Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line with Ada.Exceptions; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with SPARK.Ada.Text_IO.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end SPARK.Ada.Text_IO; --- Warning : 10: The body of package Text_IO is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-text_io-unbounded_string.adb Listing Filename: spark-ada-text_io-unbounded_string.lsb Unit name: SPARK.Ada.Text_IO.Unbounded_String Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line with Ada.Exceptions; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with SPARK.Ada.Strings.Unbounded.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. with SPARK.Ada.Text_IO.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end SPARK.Ada.Text_IO.Unbounded_String; --- Warning : 10: The body of package Unbounded_String is hidden - hidden text is ignored by the Examiner. Source Filename: sli.adb Listing Filename: sli.lsb Unit name: SLI Unit type: package body Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Expected ineffective assignment 1 Flow 33 end Expected Actual_Unit_Type to be 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Expected stable expression 3 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 41 Stable expression expected here 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Dummy_Unit_Type not referenced h 1 Source Filename: sli-io.adb Listing Filename: sli-io.lsb Unit name: SLI.IO Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line with Ada.Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Integer_Text_IO; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with E_Strings.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end SLI.IO; --- Warning : 10: The body of package IO is hidden - hidden text is ignored by the Examiner. Source Filename: sli-xref.shb Listing Filename: sli-xref.lsb Unit name: SLI.Xref Unit type: package body Unit has been analysed, any errors are listed below. 16 error(s) or warning(s) Line with GNAT.Dynamic_Tables; ^ --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. with GNAT.Table.Sort; ^ --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. end Pos_Ref_Type_P; --- Warning : 10: The body of package Pos_Ref_Type_P is hidden - hidden text is ignored by the Examiner. end Sort_Pos_Ref_Type_Table; --- Warning : 10: The body of subprogram Sort_Pos_Ref_Type_Table is hidden - hidden text is ignored by the Examiner. end Pos_Ref_Type_Last; --- Warning : 10: The body of subprogram Pos_Ref_Type_Last is hidden - hidden text is ignored by the Examiner. end Usage_Comp_Unit_P; --- Warning : 10: The body of package Usage_Comp_Unit_P is hidden - hidden text is ignored by the Examiner. end Sort_Usage_Comp_Unit_Table; --- Warning : 10: The body of subprogram Sort_Usage_Comp_Unit_Table is hidden - hidden text is ignored by the Examiner. end Usage_Comp_Unit_Last; --- Warning : 10: The body of subprogram Usage_Comp_Unit_Last is hidden - hidden text is ignored by the Examiner. end Sym_P; --- Warning : 10: The body of package Sym_P is hidden - hidden text is ignored by the Examiner. end Sort_Sym_Table; --- Warning : 10: The body of subprogram Sort_Sym_Table is hidden - hidden text is ignored by the Examiner. end Sym_Last; --- Warning : 10: The body of subprogram Sym_Last is hidden - hidden text is ignored by the Examiner. end Decl_Comp_Unit_P; --- Warning : 10: The body of package Decl_Comp_Unit_P is hidden - hidden text is ignored by the Examiner. end Sort_Decl_Comp_Unit_Table; --- Warning : 10: The body of subprogram Sort_Decl_Comp_Unit_Table is hidden - hidden text is ignored by the Examiner. end Decl_Comp_Unit_Last; --- Warning : 10: The body of subprogram Decl_Comp_Unit_Last is hidden - hidden text is ignored by the Examiner. end Increment_Nb_Separates; --- Warning : 9: The body of subprogram Increment_Nb_Separates has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. end Add_Usage; --- Warning : 9: The body of subprogram Add_Usage has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 2 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 2 Flow 10 Ineffective assignment here OK 1 Flow 33 end Dummy_Unit_Name not referenced h 1 Flow 10 Ineffective assignment here OK 1 Warn 169 Direct updates OK here 1 Flow 10 Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Warn 169 Direct updates OK here 1 Warn 169 Direct updates OK here 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Dummy_Unit_Name not referenced h 1 Flow 33 end Dummy_Unit_Type not referenced h 1 Source Filename: mainloop.adb Listing Filename: mainloop.lsb Unit name: MainLoop Unit type: package body Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line end Trace; --- Warning : 10: The body of subprogram Trace is hidden - hidden text is ignored by the Examiner. end Trace_Unit; --- Warning : 10: The body of subprogram Trace_Unit is hidden - hidden text is ignored by the Examiner. end Trace_Meta_Filenames; --- Warning : 10: The body of subprogram Trace_Meta_Filenames is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Stat not used 1 Flow 33 end Stat not used 1 Flow 602 end Only used in HTML selected 1 Flow 602 end Only used in HTML selected 1 Warn 169 Direct updates OK here 1 Flow 41 Stable expression expected 1 Flow 10 end Status not used here 1 Flow 33 end Status not used here 1 Flow 10 end Not required 1 Flow 33 end Not required 1 Flow 10 end Not required 1 Flow 33 end Not required 1 Flow 41 Stable expression OK here 1 Flow 10 Final assignment after close 1 Flow 10 Final assignment after close 1 Flow 10 Final assignment after close 1 Flow 10 Not required here 1 Flow 33 end Not required here 1 Flow 10 end Not required here 4 Flow 33 end Not required here 1 Flow 41 Expect stable expression here 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Expect Dummy_Item unused 1 Flow 10 Not required here 4 Flow 41 Stable expression OK here 1 Flow 41 Stable expression expected here 1 Flow 10 Expect File_Status Unused 1 Flow 33 end Expect File_Status unused 1 Flow 602 end Expect undefined initial value 1 Flow 10 end Not required here 1 Flow 33 end Not required here 1 Warn 169 Direct updates OK here 1 Flow 10 Expect File_Status Unused 1 Warn 169 Direct updates OK here 1 Flow 10 Final assignment on Close 1 Flow 33 end Expect File_Status unused 1 Flow 602 end Expect undefined initial value 1 Source Filename: examiner.adb Listing Filename: examiner.lsb Unit name: Examiner Unit type: main program Unit has been analysed, any errors are listed below. 7 error(s) or warning(s) Line with Ada.Command_Line; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Ada.Exceptions; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with Fatal; ^ --- Warning : 1: The identifier Fatal is either undeclared or not visible at this point. with GNAT.Traceback.Symbolic; ^ --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. end Set_Exit_Status; --- Warning : 10: The body of subprogram Set_Exit_Status is hidden - hidden text is ignored by the Examiner. end Execute; --- Warning : 9: The body of subprogram Execute has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. end Examiner; --- Warning : 9: The body of subprogram Examiner has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Flow 602 end Defined before use in MainLoop 1 Source Filename: casing.adb Listing Filename: casing.lsb Unit name: Casing Unit type: package body Unit has been analysed, any errors are listed below. No errors found Note: Automatic flow analysis mode selected --End of file-------------------------------------------------- spark-2012.0.deb/analyse/referenceanalysis/simplifier_spxref_undefined.txt0000644000175000017500000000021611753203755026120 0ustar eugeneugen_==directory->prolog:absolute_file_name/5 _==file->prolog:absolute_file_name/5 user:environ/2 newutilities:list_to_set/2 user:qsave_program/2 spark-2012.0.deb/analyse/referenceanalysis/sparkmake.rep0000644000175000017500000005403311753203755022300 0ustar eugeneugen ******************************************************* Report of SPARK Examination Examiner GPL Edition ******************************************************* Options: index_file=sparkmake.idx nowarning_file notarget_compiler_data config_file=gnat.cfg source_extension=ada listing_extension=ls_ nodictionary_file report_file=sparkmake.rep nohtml plain_output sparklib nostatistics fdl_identifiers=accept flow_analysis=auto language=95 profile=sequential annotation_character=# rules=lazy error_explanations=off justification_option=full casing=si output_directory=vcg output_directory (actual)=vcg Selected files: @sparkmake.smf Index Filename(s) used were: sparkmake.idx ada.idx spark.idx spark.idx main.idx errorhandler.idx Meta File(s) used were: sparkmake.smf unit.adb unitmanager.adb tokenmanager.adb sparkmakecommandline.adb sparkmakeerrors.adb sparkmake.adb Full warning reporting selected Target configuration file: Line package Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; Source Filename(s) used were: unit.adb unit.ads tokenmanager.ads stringlist.ads sp_symbols.ads spark_io.ads sparkmakeerrors.ads sparkmakedebug.ads sparklex.ads lextokenmanager.ads e_strings.ads errorhandler.ads dictionary.ads commandlinedata.ads xmlreport.ads version.ads filesystem.ads examinerconstants.ads spark-ada-strings-unbounded.ads spark-ada-strings-maps.ads ada-characters-handling.shs spark-ada-strings.ads spark-ada.ads spark.ads systemerrors.ads spark_xml.ads screenecho.ads statistics.ads maths.ads lextokenstacks.ads contextmanager.ads commandlinehandler.ads sp_relations.ads sp_expected_symbols.ads error_types.ads error_io.ads sp_productions.ads lextokenlists.ads unitmanager.adb unitmanager-unitstore.ads unitmanager.ads units.ads regularexpression.ads directory_operations.ads tokenmanager.adb sparkmakecommandline.adb sparkmakecommandline.ads commandline.ads sparkmakeerrors.adb sparkmake.adb Source Filename: unit.ads No Listing File Unit name: Unit Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: tokenmanager.ads No Listing File Unit name: TokenManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: stringlist.ads No Listing File Unit name: StringList Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end StringList; --- Warning : 10: The private part of package StringList is hidden - hidden text is ignored by the Examiner. Source Filename: sp_symbols.ads No Listing File Unit name: SP_Symbols Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_io.ads No Listing File Unit name: SPARK_IO Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line pragma Inline (Valid_File, End_Of_Line, End_Of_File, Get_Char); ^ --- Warning : 3: Pragma - ignored by the Examiner. end SPARK_IO; --- Warning : 10: The private part of package SPARK_IO is hidden - hidden text is ignored by the Examiner. Source Filename: sparkmakeerrors.ads No Listing File Unit name: SparkMakeErrors Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkmakedebug.ads No Listing File Unit name: SparkMakeDebug Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparklex.ads No Listing File Unit name: SparkLex Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenmanager.ads No Listing File Unit name: LexTokenManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: e_strings.ads No Listing File Unit name: E_Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: errorhandler.ads No Listing File Unit name: ErrorHandler Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line pragma Elaborate_All (SPARK_IO); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: dictionary.ads No Listing File Unit name: Dictionary Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: commandlinedata.ads No Listing File Unit name: CommandLineData Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: xmlreport.ads No Listing File Unit name: XMLReport Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: version.ads No Listing File Unit name: Version Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Warn 3 end Pragma to suppress compiler warn 1 Source Filename: filesystem.ads No Listing File Unit name: FileSystem Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: examinerconstants.ads No Listing File Unit name: ExaminerConstants Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings-unbounded.ads No Listing File Unit name: SPARK.Ada.Strings.Unbounded Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Unbounded; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Unbounded; --- Warning : 10: The private part of package Unbounded is hidden - hidden text is ignored by the Examiner. Source Filename: spark-ada-strings-maps.ads No Listing File Unit name: SPARK.Ada.Strings.Maps Unit type: package specification Unit has been analysed, any errors are listed below. 2 error(s) or warning(s) Line with Ada.Strings.Maps; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. end SPARK.Ada.Strings.Maps; --- Warning : 10: The private part of package Maps is hidden - hidden text is ignored by the Examiner. Source Filename: ada-characters-handling.shs No Listing File Unit name: Ada.Characters.Handling Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada-strings.ads No Listing File Unit name: SPARK.Ada.Strings Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark-ada.ads No Listing File Unit name: SPARK.Ada Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark.ads No Listing File Unit name: SPARK Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: systemerrors.ads No Listing File Unit name: SystemErrors Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: spark_xml.ads No Listing File Unit name: SPARK_XML Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: screenecho.ads No Listing File Unit name: ScreenEcho Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: statistics.ads No Listing File Unit name: Statistics Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: maths.ads No Listing File Unit name: Maths Unit type: package specification Unit has been analysed, any errors are listed below. 3 error(s) or warning(s) Line pragma Inline (HasNoValue); ^ --- Warning : 3: Pragma - ignored by the Examiner. for Digit'Size use 4; ^ --- Warning : 2: Representation clause - ignored by the Examiner. pragma Pack (ValueArray); ^ --- Warning : 3: Pragma - ignored by the Examiner. Source Filename: lextokenstacks.ads No Listing File Unit name: LexTokenStacks Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: contextmanager.ads No Listing File Unit name: ContextManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: commandlinehandler.ads No Listing File Unit name: CommandLineHandler Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_relations.ads No Listing File Unit name: SP_Relations Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sp_expected_symbols.ads No Listing File Unit name: SP_Expected_Symbols Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: error_types.ads No Listing File Unit name: Error_Types Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: error_io.ads No Listing File Unit name: Error_IO Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Error_IO; --- Warning : 10: The private part of package Error_IO is hidden - hidden text is ignored by the Examiner. Source Filename: sp_productions.ads No Listing File Unit name: SP_Productions Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: lextokenlists.ads No Listing File Unit name: LexTokenLists Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: unitmanager-unitstore.ads No Listing File Unit name: UnitManager.UnitStore Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: unitmanager.ads No Listing File Unit name: UnitManager Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: units.ads No Listing File Unit name: Units Unit type: package specification Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Units; --- Warning : 10: The private part of package Units is hidden - hidden text is ignored by the Examiner. Source Filename: regularexpression.ads No Listing File Unit name: RegularExpression Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: directory_operations.ads No Listing File Unit name: Directory_Operations Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: sparkmakecommandline.ads No Listing File Unit name: SparkMakeCommandLine Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: commandline.ads No Listing File Unit name: CommandLine Unit type: package specification Unit has been analysed, any errors are listed below. No errors found Source Filename: unit.adb Listing Filename: unit.lsb Unit name: Unit Unit type: package body Unit has been analysed, any errors are listed below. 4 error(s) or warning(s) Line with E_Strings.Not_SPARK; ^ --- Warning : 1: The identifier Not_SPARK is either undeclared or not visible at this point. end Output_Unexpected_Token; --- Warning : 10: The body of subprogram Output_Unexpected_Token is hidden - hidden text is ignored by the Examiner. end Output_Object; --- Warning : 10: The body of subprogram Output_Object is hidden - hidden text is ignored by the Examiner. end Output_Id; --- Warning : 10: The body of subprogram Output_Id is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Closed file handle not used 1 Source Filename: unitmanager.adb Listing Filename: unitmanager.lsb Unit name: UnitManager Unit type: package body Unit has been analysed, any errors are listed below. 18 error(s) or warning(s) Line while Success and not StringList.Is_Null (It => Directory_It) loop --- Warning :402: Default assertion planted to cut loop. while Success and not StringList.Is_Null (It => Include_It) loop --- Warning :402: Default assertion planted to cut loop. while Success and not StringList.Is_Null (It => File_It) loop --- Warning :402: Default assertion planted to cut loop. while not StringList.Is_Null (It => Exclude_It) loop --- Warning :402: Default assertion planted to cut loop. while not Units.IsEmpty (All_Units) loop --- Warning :402: Default assertion planted to cut loop. while Result.The_Kind /= Unit.Package_Body_Unit loop --- Warning :402: Default assertion planted to cut loop. while Parent_Unit /= Unit.Null_Id loop --- Warning :402: Default assertion planted to cut loop. while not StringList.Is_Null (It => It) loop --- Warning :402: Default assertion planted to cut loop. while not StringList.Is_Null (It => It) loop --- Warning :402: Default assertion planted to cut loop. while not Units.IsEmpty (The_Withed_Units) loop --- Warning :402: Default assertion planted to cut loop. while Parent_Unit /= Unit.Null_Id loop --- Warning :402: Default assertion planted to cut loop. while not Units.IsEmpty (All_Units) loop --- Warning :402: Default assertion planted to cut loop. while not Units.IsEmpty (The_Withed_Components) loop --- Warning :402: Default assertion planted to cut loop. while not Units.IsEmpty (The_Withed_Components) loop --- Warning :402: Default assertion planted to cut loop. while not Units.IsEmpty (All_Units) loop --- Warning :402: Default assertion planted to cut loop. while not Found and not Units.IsEmpty (Other_Units) loop --- Warning :402: Default assertion planted to cut loop. while not Found and not Units.IsEmpty (Req_Units) loop --- Warning :402: Default assertion planted to cut loop. while not Units.IsEmpty (All_Units) loop --- Warning :402: Default assertion planted to cut loop. Source Filename: tokenmanager.adb Listing Filename: tokenmanager.lsb Unit name: TokenManager Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end To_String; --- Warning : 10: The body of subprogram To_String is hidden - hidden text is ignored by the Examiner. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 Ineffective assignment OK 1 Flow 10 Ineffective assignment OK 1 Flow 33 end Unused not references OK 1 Flow 10 Ineffective assignment OK 1 Flow 33 end Unused not references OK 1 Source Filename: sparkmakecommandline.adb Listing Filename: sparkmakecommandline.lsb Unit name: SparkMakeCommandLine Unit type: package body Unit has been analysed, any errors are listed below. 8 error(s) or warning(s) Line end Report_Usage; --- Warning : 10: The body of subprogram Report_Usage is hidden - hidden text is ignored by the Examiner. end Report_Version; --- Warning : 10: The body of subprogram Report_Version is hidden - hidden text is ignored by the Examiner. CommandLineData.Content.Language_Profile := CommandLineData.SPARK83; ^ --- Warning :169: Direct update of own variable CommandLineData.Content.Language_Profile, which is an own variable of a non-enclosing package. CommandLineData.Content.Language_Profile := CommandLineData.SPARK95; ^ --- Warning :169: Direct update of own variable CommandLineData.Content.Language_Profile, which is an own variable of a non-enclosing package. CommandLineData.Content.Language_Profile := CommandLineData.SPARK2005; ^ --- Warning :169: Direct update of own variable CommandLineData.Content.Language_Profile, which is an own variable of a non-enclosing package. CommandLineData.Content.Anno_Char := E_Strings.Get_Element (E_Str => Arg, ^ --- Warning :169: Direct update of own variable CommandLineData.Content.Anno_Char, which is an own variable of a non-enclosing package. CommandLineData.Content.Language_Profile := CommandLineData.SPARK95; ^ --- Warning :169: Direct update of own variable CommandLineData.Content.Language_Profile, which is an own variable of a non-enclosing package. CommandLineData.Content.FDL_Reserved := False; ^ --- Warning :169: Direct update of own variable CommandLineData.Content.FDL_Reserved, which is an own variable of a non-enclosing package. Source Filename: sparkmakeerrors.adb Listing Filename: sparkmakeerrors.lsb Unit name: SparkMakeErrors Unit type: package body Unit has been analysed, any errors are listed below. 1 error(s) or warning(s) Line end Fatal; --- Warning : 10: The body of subprogram Fatal is hidden - hidden text is ignored by the Examiner. Source Filename: sparkmake.adb Listing Filename: sparkmake.lsb Unit name: Sparkmake Unit type: main program Unit has been analysed, any errors are listed below. 5 error(s) or warning(s) Line with Ada.Exceptions; ^ --- Warning : 1: The identifier Ada is either undeclared or not visible at this point. with GNAT.Traceback.Symbolic; ^ --- Warning : 1: The identifier GNAT is either undeclared or not visible at this point. with ScreenEcho; ^ --- Warning : 1: The identifier ScreenEcho is either undeclared or not visible at this point. end Make; --- Warning : 10: The body of subprogram Make is hidden - hidden text is ignored by the Examiner. end Sparkmake; --- Warning : 9: The body of subprogram Sparkmake has a hidden exception handler - analysis and verification of contracts for this handler have not been performed. Expected messages marked with the accept annotation Type Msg Lines Reason Match No. From To No. Line Flow 10 end Ineffective assignment here OK 1 Flow 10 Ineffective assignment here OK 1 Flow 33 end Metafile not referenced here 1 Note: Automatic flow analysis mode selected --End of file-------------------------------------------------- spark-2012.0.deb/analyse/generatedanalysis/0000755000175000017500000000000011753202341017573 5ustar eugeneugenspark-2012.0.deb/simplifier/0000755000175000017500000000000011753203756014613 5ustar eugeneugenspark-2012.0.deb/simplifier/settings.pro0000644000175000017500000000602611753202337017173 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Declare the global prolog settings for the whole session. These settings % should be activated early in the system build. %############################################################################### %############################################################################### %MODULE %############################################################################### :- module(settings, [declare_settings/0]). %############################################################################### %DEPENDENCIES %############################################################################### %############################################################################### %TYPES %############################################################################### %############################################################################### %DYNAMICS %############################################################################### %############################################################################### %PREDICATES %############################################################################### %=============================================================================== %declare_settings. %------------------------------------------------------------------------------- % Declare global prolog settings for the session. %=============================================================================== declare_settings:- %Report single variables. set_prolog_flag(single_var_warnings, on), %Report discontiguous predicates. set_prolog_flag(discontiguous_warnings, on), %Report redefined predicates. set_prolog_flag(redefine_warnings, on), %Report calling of undefined predicates. set_prolog_flag(unknown, error), %Report syntax errors. set_prolog_flag(syntax_errors, error), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/simplifier/load__data_files.pro0000644000175000017500000005525711753202337020576 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Establishes the names of all files for this session, based on the % arguments provided on the command line and the parent directory. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(load__data_files, [load_data_files/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('librarypredicates.pro', [file_exists/1]). :- use_module('data__switches.pro', [get_switch_deadpaths/1, get_switch_input_file/1, get_switch_log/1]). :- use_module('data__data_files.pro', [get_datafiles_vcg/1, add_datafiles_vcg/1, get_datafiles_dpc/1, add_datafiles_dpc/1, get_datafiles_simplified_vcg/1, add_datafiles_simplified_vcg/1, get_datafiles_summary_dpc/1, add_datafiles_summary_dpc/1, get_datafiles_fdl/1, add_datafiles_fdl/1, get_datafiles_local_user_rule/1, add_datafiles_local_user_rule/1, get_datafiles_global_user_rule/1, add_datafiles_global_user_rule/1, get_datafiles_rule/1, add_datafiles_rule/1, get_datafiles_pfs/1, add_datafiles_pfs/1, get_datafiles_simplified_pfs/1, add_datafiles_simplified_pfs/1, get_datafiles_dec/1, add_datafiles_dec/1, get_datafiles_log/1, add_datafiles_log/1, add_datafiles_debug/2]). :- use_module('ioutilities.pro', [show_error/2]). :- use_module('newutilities.pro', [explode_separator_content_as_list/3, implode_separator_content_list/3]). :- use_module('simplifier_ioutilities.pro', [retrieve_proof_file_kind/1]). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### :- set_prolog_flag(double_quotes, chars). %=============================================================================== % load_data_files. %------------------------------------------------------------------------------- %Based on known information, determine and store the name of every file %that may be used. %=============================================================================== load_data_files:- % Establish all of the file names for this session. establish_static_files, establish_dynamic_files, !. %=============================================================================== % establish_static_files. %------------------------------------------------------------------------------- % Calculate and store all of the hard-coded static files. It is expected % that these files will only be used internally for debugging purposes. %=============================================================================== establish_static_files:- %Add all debug files. establish_debug_file(data__prf), establish_debug_file(data__vcg), establish_debug_file(data__files), !. %------------------------------------------------------------------------------- %Adds a debug file in a consistent manner. establish_debug_file(DebugFileKind):- implode_separator_content_list('', ['debug_', DebugFileKind, '.pro'], DebugFile_Atom), absolute_file_name(DebugFile_Atom, AbsoluteDebugFile_Atom), add_datafiles_debug(DebugFileKind, AbsoluteDebugFile_Atom), !. %=============================================================================== % establish_dynamic_files. %------------------------------------------------------------------------------- % Calculate and store all of the files, whose location depends on % changeable information. %=============================================================================== % Input file has not been provided. establish_dynamic_files:- \+ get_switch_input_file(_InputFile_Atom), show_error('Target file not provided on the command-line.', []). % Input file has been provided. establish_dynamic_files:- get_switch_input_file(InputFile_Atom), %The provided input file will use the syntax relevant for the current %platform. The input file provided may be relative to the current %directory, or rooted to some point in the filesystem. To treat the %platform specific relative or rooted file name consistently it is %always put into its absolute form, via a platform portable predicate. absolute_file_name(InputFile_Atom, AbsoluteInputFile_Atom), %Determine the proof file kind. proof_file_info(AbsoluteInputFile_Atom, ProofFileKind, AbsoluteDirectory_Atom, BaseFile_Atom), establish_proof_file_dependent_files(ProofFileKind, AbsoluteDirectory_Atom, BaseFile_Atom), establish_common_files(AbsoluteDirectory_Atom, BaseFile_Atom), !. % None of the above is an error. establish_dynamic_files:- show_error('Unexpected error in establishing input file details.', []). %------------------------------------------------------------------------------- %Set up files for verification conditions. establish_proof_file_dependent_files(verification_conditions, AbsoluteDirectory_Atom, BaseFile_Atom):- %Mandatory input files. %---------------------- %.vcg file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.vcg'], VcgFile_Atom), ensure_datafile_exists(VcgFile_Atom), add_datafiles_vcg(VcgFile_Atom), %.fdl file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.fdl'], FdlFile_Atom), ensure_datafile_exists(FdlFile_Atom), add_datafiles_fdl(FdlFile_Atom), %Optional input files. %--------------------- % Although the current examiner always generates VCs with a supporting % rls file, this has not always been the case. Thus, the rls files are % optional. %.rls file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.rls'], RuleFile_Atom), potentially_add_datafiles_rule(RuleFile_Atom), %.rlu file (local) implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.rlu'], LocalUserRuleFile_Atom), potentially_add_datafiles_local_user_rule(LocalUserRuleFile_Atom), %.rlu file (global) retrieve_parent_directory(AbsoluteDirectory_Atom, ParentDirectory_Atom), implode_separator_content_list('', [AbsoluteDirectory_Atom, ParentDirectory_Atom, '.rlu'], GlobalUserRuleFile_Atom), potentially_add_datafiles_global_user_rule(LocalUserRuleFile_Atom, GlobalUserRuleFile_Atom), %Mandatory output files. %----------------------- %.siv file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.siv'], SimplifiedVcgFile_Atom), add_datafiles_simplified_vcg(SimplifiedVcgFile_Atom), !. % Set up files for dead path search. establish_proof_file_dependent_files(zombie_scope, AbsoluteDirectory_Atom, BaseFile_Atom):- %Mandatory input files. %---------------------- %.dpc file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.dpc'], DpcFile_Atom), ensure_datafile_exists(DpcFile_Atom), add_datafiles_dpc(DpcFile_Atom), %.fdl file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.fdl'], FdlFile_Atom), ensure_datafile_exists(FdlFile_Atom), add_datafiles_fdl(FdlFile_Atom), %Optional input files. %--------------------- % Although the current examiner always generates VCs with a supporting % rls file, this has not always been the case. Thus, the rls files are % optional. %.rls file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.rls'], RuleFile_Atom), potentially_add_datafiles_rule(RuleFile_Atom), %Mandatory output files. %----------------------- %.sdp file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.sdp'], SummaryDpcFile_Atom), add_datafiles_summary_dpc(SummaryDpcFile_Atom), !. %Set up files for path functions. establish_proof_file_dependent_files(path_functions, AbsoluteDirectory_Atom, BaseFile_Atom):- %Mandatory input files. %---------------------- %.pfs file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.pfs'], PfsFile_Atom), ensure_datafile_exists(PfsFile_Atom), add_datafiles_pfs(PfsFile_Atom), %.dec file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.dec'], DecFile_Atom), ensure_datafile_exists(DecFile_Atom), add_datafiles_dec(DecFile_Atom), %Mandatory output files. %----------------------- %.sip file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.sip'], SimplifiedPfsFile_Atom), add_datafiles_simplified_pfs(SimplifiedPfsFile_Atom), !. %None of the above is unexpected. establish_proof_file_dependent_files(ProofFileKind, _AbsoluteDirectory_Atom, _BaseFile_Atom):- show_error('Unexpected proof file kind: ~p.', [ProofFileKind]). %------------------------------------------------------------------------------- ensure_datafile_exists(File_Atom):- file_exists(File_Atom), !. ensure_datafile_exists(File_Atom):- show_error('Required input file does not exist: ~a.', [File_Atom]). %------------------------------------------------------------------------------- potentially_add_datafiles_rule(RuleFile_Atom):- file_exists(RuleFile_Atom), add_datafiles_rule(RuleFile_Atom), !. % From above, file does not exist, so do not add. potentially_add_datafiles_rule(_RuleFile_Atom):- !. %------------------------------------------------------------------------------- potentially_add_datafiles_local_user_rule(LocalUserRuleFile_Atom):- file_exists(LocalUserRuleFile_Atom), add_datafiles_local_user_rule(LocalUserRuleFile_Atom), !. % From above, file does not exist, so do not add. potentially_add_datafiles_local_user_rule(_LocalUserRuleFile_Atom):- !. %------------------------------------------------------------------------------- potentially_add_datafiles_global_user_rule(LocalUserRuleFile_Atom, GlobalUserRuleFile_Atom):- % While unusual, it is possible for the optional user rule input files % to be exactly the same. In this case the global usr rule file is not % added. (For example, the examiner may generate false VCs following a % semantic error. It is a semantic error in SPARK for a package to % have a subprogram with the same name. In this case the examiner % generates false VCs for the subprogram, using the same name as its % package, creating potential duplicate same name for the local and % global user rule files). % \+ (LocalUserRuleFile_Atom = GlobalUserRuleFile_Atom), file_exists(GlobalUserRuleFile_Atom), add_datafiles_global_user_rule(GlobalUserRuleFile_Atom), !. % From above, file does not exist, so do not add. potentially_add_datafiles_global_user_rule(_LocalUserRuleFile_Atom, _GlobalUserRuleFile_Atom):- !. %------------------------------------------------------------------------------- establish_common_files(AbsoluteDirectory_Atom, BaseFile_Atom):- establish_log_file(AbsoluteDirectory_Atom, BaseFile_Atom), !. %------------------------------------------------------------------------------- % Log file requested. Use default name. % Log file is slg when Simplifier is discharging VCs and zlg when Simplifier % is searching for dead paths, establish_log_file(AbsoluteDirectory_Atom, BaseFile_Atom):- get_switch_deadpaths(off), get_switch_log(yes_log_file), %.slg file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.slg'], LogFile_Atom), add_datafiles_log(LogFile_Atom), !. establish_log_file(AbsoluteDirectory_Atom, BaseFile_Atom):- get_switch_deadpaths(on), get_switch_log(yes_log_file), %.zlg file implode_separator_content_list('', [AbsoluteDirectory_Atom, BaseFile_Atom, '.zlg'], LogFile_Atom), add_datafiles_log(LogFile_Atom), !. % Log file requested, with specific name. establish_log_file(_AbsoluteDirectory_Atom, _BaseFile_Atom):- get_switch_log(provided_log_file(LogFile_Atom)), % To treat the platform specific relative or rooted file name % consistently it is always put into its absolute form, via a platform % portable predicate. absolute_file_name(LogFile_Atom, AbsoluteLogFile_Atom), add_datafiles_log(AbsoluteLogFile_Atom), !. % User has specified that no log be used. establish_log_file(_AbsoluteDirectory_Atom, _BaseFile_Atom):- get_switch_log(no_log_file), !. % None of the above is an error. establish_log_file(_AbsoluteDirectory_Atom, _BaseFile_Atom):- show_error('Unable to determine correct logging behaviour.', []). %=============================================================================== % proof_file_info(+AbsoluteInputFile_Atom, % -ProofFileKind, % -AbsoluteDirectory_Atom, % -BaseFile_Atom). %------------------------------------------------------------------------------- % Given an input file (AbsoluteInputFile_Atom) determine its proof file % kind (ProofFileKind) and its absolute directory (with trailing '/') and % its base file name (BaseFile_Atom). Note that if the proof file kind can % not be determined from the file name, it is assumed that the extension % '.vcg' is missing and corrects aspects accordingly. Note that sicstus % uses '/' as the directory separator, regardless of whatever the platform % uses. % % For example: % AbsoluteInputFile_Atom=U:/one/two/three/four.vcg % ProofFileKind=verification_conditions % AbsoluteDirectory_Atom=U:/one/two/three/ % BaseFile_Atom=four % % For example: % AbsoluteInputFile_Atom=U:/one/two/three/six % ProofFileKind=verification_conditions % AbsoluteDirectory_Atom=U:/one/two/three/ % BaseFile_Atom=six %=============================================================================== proof_file_info(AbsoluteInputFile_Atom, ProofFileKind, AbsoluteDirectory_Atom, BaseFile_Atom):- %Retrieve the directory parts and file part. explode_separator_content_as_list('/', AbsoluteInputFile_Atom, Item_AtomList), %The last part is the file part. append(Directory_AtomList, [File_Atom], Item_AtomList), determine_proof_file_kind_and_base_file(File_Atom, ProofFileKind, BaseFile_Atom), %Reassemble the directory part. implode_separator_content_list('/', Directory_AtomList, AbsoluteDirectoryLessTrailing_Atom), %Add the trailing '/'. atom_concat(AbsoluteDirectoryLessTrailing_Atom, '/', AbsoluteDirectory_Atom), !. %------------------------------------------------------------------------------- determine_proof_file_kind_and_base_file(File_Atom, ProofFileKind, BaseFile_Atom):- atom_chars(File_Atom, File_CharList), determine_proof_file_kind_and_base_file_x(File_CharList, ProofFileKind, BaseFile__CharList), atom_chars(BaseFile_Atom, BaseFile__CharList), !. %------------------------------------------------------------------------------- % If deadpath switch is off. % Without extension, verification_conditions is the default. determine_proof_file_kind_and_base_file_x(File__BaseFile__CharList, verification_conditions, File__BaseFile__CharList):- get_switch_deadpaths(off), \+ member('.', File__BaseFile__CharList), !. % With explicit vcg extension is verification_conditions. determine_proof_file_kind_and_base_file_x(File_CharList, verification_conditions, BaseFile_CharList):- get_switch_deadpaths(off), append(BaseFile_CharList, ".vcg", File_CharList), !. % If dead path switch is on % Without extension deadpath_search. determine_proof_file_kind_and_base_file_x(File__BaseFile__CharList, zombie_scope, File__BaseFile__CharList):- get_switch_deadpaths(on), \+ member('.', File__BaseFile__CharList), !. % With explicit dpc extension determine_proof_file_kind_and_base_file_x(File_CharList, zombie_scope, BaseFile_CharList):- get_switch_deadpaths(on), append(BaseFile_CharList, ".dpc", File_CharList), !. % With explicit pfs extension is path_functions. determine_proof_file_kind_and_base_file_x(File_CharList, path_functions, BaseFile_CharList):- append(BaseFile_CharList, ".pfs", File_CharList), !. % Any other extension is treated as an error. determine_proof_file_kind_and_base_file_x(File_CharList, _ProofFileKind, _BaseFile_CharList):- atom_chars(File_Atom, File_CharList), show_error('Provided input file ~a should have no extension or have a .vcg or .pfs extension.', [File_Atom]). %=============================================================================== % retrieve_parent_directory(+AbsoluteDirectory_Atom, % -ParentDirectory_Atom). %------------------------------------------------------------------------------- % Given an absolute directory (with trailing '/') determine the parent % directory name. % % For example: % AbsoluteDirectory_Atom=U:/one/two/three/ % ParentDirectory_Atom=three %=============================================================================== retrieve_parent_directory(AbsoluteDirectory_Atom, ParentDirectory_Atom):- %Retrieve the directory parts and file part. explode_separator_content_as_list('/', AbsoluteDirectory_Atom, Item_AtomList), append(_LeadingItem_AtomList, [ParentDirectory_Atom, _TopDirectory_Atom], Item_AtomList), !. :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__proved_conc.pro0000644000175000017500000000735711753202337020774 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Manage (store, retreive and delete) database for 'proved conclusions'. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__proved_conc, [add_proved_conc/1, get_proved_conc/1, prune_proved_conc/1, prune_all_proved_concs/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### :- add_state(get_proved_conc, get_proved_conc('Id_Int')). :- dynamic(get_proved_conc/1). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % add_proved_conc(+Id_Int) % % Insert the proved conclusion Id_Int into the database. %=============================================================================== add_proved_conc(Id_Int):- assertz(get_proved_conc(Id_Int)). %=============================================================================== %=============================================================================== % prune_proved_conc(+Id_Int) % % Delete Id_Int from the database. %=============================================================================== prune_proved_conc(Id_Int):- retract(get_proved_conc(Id_Int)). %=============================================================================== %=============================================================================== % prune_all_proved_concs % % Delete all proved conclusions from the database. %=============================================================================== prune_all_proved_concs:- retractall(get_proved_conc(_)). %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__subst_hyp.pro0000644000175000017500000000733611753202337020510 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Manage (store, retreive and delete) database for 'substituted hypothesises'. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__subst_hyp, [add_subst_hyp/3, get_subst_hyp/3, prune_all_subst_hyp/3]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('data__hyp.pro', [get_hyp/3]). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### :- add_state(get_subst_hyp, get_hyp('Hypothesis_Term', 'HypothesisType', 'Id_Int')). :- dynamic(get_subst_hyp/3). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % add_subst_hyp(+Hyp_Term, +HypType, +Id_Int) % % Insert into the subst_hyp database if it is not a hypothesis and not already % in the database. %=============================================================================== add_subst_hyp(Hyp_Term, HypType, Id_Int):- get_hyp(Hyp_Term, HypType, Id_Int), !. add_subst_hyp(Hyp_Term, HypType, Id_Int):- get_subst_hyp(Hyp_Term, HypType, Id_Int), !. add_subst_hyp(Hyp_Term, HypType, Id_Int):- assertz(get_subst_hyp(Hyp_Term, HypType, Id_Int)), !. %=============================================================================== %=============================================================================== % prune_all_proved_concs % % Delete all proved conclusions from the database. %=============================================================================== prune_all_subst_hyp(Hyp_Term, HypType, Id_Int):- retractall(get_subst_hyp(Hyp_Term, HypType, Id_Int)). %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/load__declarations.pro0000644000175000017500000012264511753202337021147 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Establish the declarations for this session. Most of the declarations are % retrieved from a provided declaration file. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(load__declarations, [load_declarations/0, save_used_identifier/2]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_type/2]). :- use_module('data__declarations.pro', [add_declarations_constant/2, add_declarations_function/3, add_declarations_record_function/6, add_declarations_type/2, add_declarations_unbounded_function/3, add_declarations_used_identifier/1, add_declarations_variable/2, get_declarations_type/2, get_declarations_used_identifier/1, type_alias/2, pre_calculate_legacy_fdl/0]). :- use_module('ioutilities.pro', [read_lines_from_file_as_char_list/3, read_line_from_stream/2, throw_error/2]). :- use_module('newutilities.pro', [flatten_list/2, implode_separator_content_list/3, trim_atom/3]). :- use_module('simplifier_ioutilities.pro', [retrieve_declaration_file/1, convert_file_for_display/2]). :- use_module('parseutilities.pro', [atom_to_lower_case/2, parse_all_to_nothing/2, parse_atom/5, parse_atom_silent/4, parse_char_sep_atom_list/6, parse_nothing_to_all/2, parse_number/3, parse_possibly_signed_atom/4]). :- use_module('data__provenance.pro', [get_provenance_framework/1]). :- set_prolog_flag(double_quotes, chars). %############################################################################### % TYPES %############################################################################### :- add_type('FDL', [comment, title('Title'), packed_variable('TypeId_Atom', 'VarId_AtomList'), variable('TypeId_Atom', 'VarId_Atom'), constant('TypeId_Atom', 'ConstId_Atom'), type('TypeId_Atom', 'TypeStructure'), function('TypeId_Atom', 'Function_Atom', 'ArgTypeId_AtomList'), end]). :- add_type('Title', [procedure('Atom'), function('Atom'), anonymous('Atom')]). :- add_type('TypeStructure', [pending, range('Lower_Int', 'Upper_Int'), array('IndexTypeId_AtomList', 'ElementTypeId_Atom'), enumeration('EnumId_AtomList'), record('FieldList'), sequence('ElementTypeId_Atom'), set('ElementTypeId_Atom'), alias('AliasTypeId_Atom')]). :- add_type('Field', [packed_field('TypeId_Atom', 'FieldId_AtomList'), field('TypeId_Atom', 'FieldId_Atom')]). %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### :- dynamic(current_record_field_number/1). %=============================================================================== % load_declarations. %------------------------------------------------------------------------------- % Loads all declarations from the known declaration file. %=============================================================================== load_declarations :- nl, retrieve_declaration_file(DeclarationFile_Atom), convert_file_for_display(DeclarationFile_Atom, DisplayDeclarationFile_Atom), write('Reading '), write(DisplayDeclarationFile_Atom), write(' (for inherited FDL type declarations)'), nl, assertz(current_record_field_number(1)), load_declarations_static, load_declarations_file, pre_calculate_legacy_fdl, !. %------------------------------------------------------------------------------- load_declarations_file:- retrieve_declaration_file(File_Atom), read_lines_from_file_as_char_list(File_Atom, everyLine, CharList), load_declarations_from_char_list(CharList), !. %------------------------------------------------------------------------------- load_declarations_static:- implode_separator_content_list('\n', ['function bit__and(integer, integer) : integer;', 'function bit__or(integer, integer) : integer;', 'function bit__xor(integer, integer) : integer;'], Content_Atom), atom_chars(Content_Atom, Content_CharList), load_declarations_from_char_list(Content_CharList), !. %=============================================================================== % load_declarations_from_char_list(+CharList). %------------------------------------------------------------------------------- % Load all declarations from the large character list (CharList). %=============================================================================== load_declarations_from_char_list(CharList):- % Retrieve all declarations. retrieve_declarations_items(CharList, FDLList), % Flatten all declarations. unpack_declarations_items(FDLList, Unpacked_FDLList), % Standardise all declarations. standardise_declarations_items(Unpacked_FDLList, Standardised_FDLList), % Process all declarations. process_declarations_items(Standardised_FDLList), !. %=============================================================================== % retrieve_declarations_items(+CharList, -FDLList). %------------------------------------------------------------------------------- % Retrieve all declarations items as (FDLList) from the provided character % list (CharList). The declarations items are encoded to closely reflect % their original presentation form. %=============================================================================== % Make the parse_declarations call visible to the spxref tool. :- public parse_declarations/3. retrieve_declarations_items(CharList, FDLList):- phrase(parse_declarations(FDLList), CharList), !. retrieve_declarations_items(_CharList, _FDLList):- throw_error('Error in parsing declarations.\n', []). %------------------------------------------------------------------------------- parse_declarations([FDL | FDLList]) --> parse_atom_silent([space, newline], zeroormore), parse_declarations_item(FDL), parse_declarations(FDLList). parse_declarations([]) --> !. %------------------------------------------------------------------------------- % Comment. % { ... } parse_declarations_item(comment) --> "{", parse_atom_silent([space, newline], zeroormore), parse_declarations_comment_contents, parse_atom_silent([space, newline], zeroormore), "}", parse_atom_silent([space, newline], zeroormore), !. % Title. % title procedure addsuccessfulentry; parse_declarations_item(title(procedure(Subprogram_Atom))) --> "title", parse_atom_silent([space, newline], oneormore), "procedure", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, Subprogram_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. parse_declarations_item(title(function(Subprogram_Atom))) --> "title", parse_atom_silent([space, newline], oneormore), "function", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, Subprogram_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. parse_declarations_item(title(anonymous(Description_Atom))) --> "title", parse_atom_silent([space, newline], oneormore), parse_atom([not(semicolon)], oneormore, Description_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Variable. % var a : integer; parse_declarations_item(packed_variable(TypeId_Atom, VarId_AtomList)) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "var", parse_atom_silent([space, newline], oneormore), parse_char_sep_atom_list([alpha_numeric, under_score], [space, newline], ',', VarId_AtomList), parse_atom_silent([space, newline], zeroormore), ":", parse_atom_silent([space, newline], zeroormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Constant. % const basictypes__unsigned32t__size : integer = pending; parse_declarations_item(constant(ConstId_Atom, TypeId_Atom)) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "const", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, ConstId_Atom), parse_atom_silent([space, newline], zeroormore), ":", parse_atom_silent([space, newline], zeroormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), parse_atom_silent([not(semicolon)], oneormore), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Type pending. parse_declarations_item(type(TypeId_Atom, pending)) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "type", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), "pending", parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Type range. parse_declarations_item(type(TypeId_Atom, range(Lower_Term, Upper_Term))) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "type", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), parse_possibly_signed_atom([alpha_numeric, under_score], Lower_Term), parse_atom_silent([space, newline], zeroormore), "..", parse_atom_silent([space, newline], zeroormore), parse_possibly_signed_atom([alpha_numeric, under_score], Upper_Term), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Type array. % type issystemt = array [keystore__interface__returnvaluet] of boolean; parse_declarations_item(type(TypeId_Atom, array(IndexTypeId_AtomList, ElementTypeId_Atom))) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "type", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), "array", parse_atom_silent([space, newline], zeroormore), "[", parse_atom_silent([space, newline], zeroormore), parse_char_sep_atom_list([alpha_numeric, under_score], [space, newline], ',', IndexTypeId_AtomList), parse_atom_silent([space, newline], zeroormore), "]", parse_atom_silent([space, newline], zeroormore), "of", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, ElementTypeId_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Type enumeration. % type p__colour = (p__red, p__green, p__blue, p__purple); parse_declarations_item(type(TypeId_Atom, enumeration(EnumId_AtomList))) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "type", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), "(", parse_atom_silent([space, newline], zeroormore), parse_char_sep_atom_list([alpha_numeric, under_score], [space, newline], ',', EnumId_AtomList), parse_atom_silent([space, newline], zeroormore), ")", parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Type record. % type cert__id__contentst = record % inherit : cert__contentst % end; % type p__r = record % f1 : integer; % f2 : p__colour % end; parse_declarations_item(type(TypeId_Atom, record(FieldList))) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "type", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), "record", parse_atom_silent([space, newline], oneormore), parse_record_fields(FieldList), parse_atom_silent([space, newline], zeroormore), "end;", parse_atom_silent([space, newline], zeroormore), !. % Type sequence. parse_declarations_item(type(TypeId_Atom, sequence(ElementTypeId_Atom))) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "type", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), "sequence", parse_atom_silent([space, newline], oneormore), "of", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, ElementTypeId_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Type set. parse_declarations_item(type(TypeId_Atom, set(ElementTypeId_Atom))) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "type", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), "set", parse_atom_silent([space, newline], oneormore), "of", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, ElementTypeId_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Type alias. (type declared to be as other type). % type ada__real_time__time_span = integer; parse_declarations_item(type(TypeId_Atom, alias(AliasTypeId_Atom))) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "type", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), "=", parse_atom_silent([space, newline], zeroormore), parse_atom([alpha_numeric, under_score], oneormore, AliasTypeId_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Function. (with arguments) % function round__(real) : integer; parse_declarations_item(function(TypeId_Atom, Function_Atom, ArgTypeId_AtomList)) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "function", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, Function_Atom), parse_atom_silent([space, newline], zeroormore), "(", parse_atom_silent([space, newline], zeroormore), parse_char_sep_atom_list([alpha_numeric, under_score], [space, newline], ',', ArgTypeId_AtomList), parse_atom_silent([space, newline], zeroormore), ")", parse_atom_silent([space, newline], zeroormore), ":", parse_atom_silent([space, newline], zeroormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % Function. (without arguments) parse_declarations_item(function(TypeId_Atom, Function_Atom, [])) --> parse_optional_proof, parse_atom_silent([space, newline], zeroormore), "function", parse_atom_silent([space, newline], oneormore), parse_atom([alpha_numeric, under_score], oneormore, Function_Atom), parse_atom_silent([space, newline], zeroormore), ":", parse_atom_silent([space, newline], zeroormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % End. parse_declarations_item(end) --> "end", parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. % 'finish' is accepted, with any arguments following. parse_declarations_item(end) --> parse_other_terminal, parse_nothing_to_all, ";", parse_atom_silent([space, newline], zeroormore), %Stop parsing the file now. parse_all_to_nothing, !. % 'start' is accepted as the token at the end of a line just like ';'. parse_declarations_item(end) --> parse_other_terminal, parse_nothing_to_all, "start", parse_atom_silent([space, newline], zeroormore), parse_all_to_nothing, !. % End of file is accepted as the token at the end of a line. parse_declarations_item(end) --> parse_other_terminal, parse_all_to_nothing, parse_atom_silent([space, newline], zeroormore), parse_all_to_nothing, !. parse_declarations_item(ignored) --> parse_atom_silent([space, newline], zeroormore), parse_atom_silent([not(semicolon)], oneormore), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !. %------------------------------------------------------------------------------- parse_other_terminal --> "start". parse_other_terminal --> "finish". parse_other_terminal --> "end". parse_other_terminal --> "enddeclarations". %------------------------------------------------------------------------------- parse_optional_proof --> "proof", parse_atom_silent([space, newline], oneormore), !. parse_optional_proof --> !. %------------------------------------------------------------------------------- parse_declarations_comment_contents --> [Char], {\+ Char='}'}, parse_declarations_comment_contents. parse_declarations_comment_contents --> !. %------------------------------------------------------------------------------- % Continuation. parse_record_fields([packed_field(TypeId_Atom, FieldId_AtomList) | T_FieldList]) --> parse_char_sep_atom_list([alpha_numeric, under_score], [space, newline], ',', FieldId_AtomList), parse_atom_silent([space, newline], zeroormore), ":", parse_atom_silent([space, newline], zeroormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), ";", parse_atom_silent([space, newline], zeroormore), !, parse_record_fields(T_FieldList), !. % Final. parse_record_fields([packed_field(TypeId_Atom, FieldId_AtomList)]) --> parse_char_sep_atom_list([alpha_numeric, under_score], [space, newline], ',', FieldId_AtomList), parse_atom_silent([space, newline], zeroormore), ":", parse_atom_silent([space, newline], zeroormore), parse_atom([alpha_numeric, under_score], oneormore, TypeId_Atom), parse_atom_silent([space, newline], zeroormore), !. %=============================================================================== % unpack_declarations_items(+Packed_FDLList, -Unpacked_FDLList). %------------------------------------------------------------------------------- % The declarations items are transformed from closely reflecting their original % potentially packed presentation form (Packed_FDLList) into a more % flattened unpacked form (Unpacked_FDLList). %=============================================================================== unpack_declarations_items(Packed_FDLList, Unpacked_FDLList):- unpack_declarations_items_x(Packed_FDLList, Unpacked_FDLListList), flatten_list(Unpacked_FDLListList, Unpacked_FDLList), !. %------------------------------------------------------------------------------- unpack_declarations_items_x([], []):- !. unpack_declarations_items_x([packed_variable(TypeId_Atom, VarId_AtomList) | T_Packed_FDLList], [H_Unpacked_FDLList | T_Unpacked_FDLListList]):- unpack_variables(TypeId_Atom, VarId_AtomList, H_Unpacked_FDLList), unpack_declarations_items_x(T_Packed_FDLList, T_Unpacked_FDLListList). unpack_declarations_items_x([type(TypeId_Atom, record(FieldList)) | T_Packed_FDLList], [type(TypeId_Atom, record(Unpacked_FieldList)) | T_Unpacked_FDLListList]):- unpack_record_fields(FieldList, Unpacked_FieldList), unpack_declarations_items_x(T_Packed_FDLList, T_Unpacked_FDLListList). % Just copy over all other forms. unpack_declarations_items_x([H_Packed__Unpacked__FDL | T_Packed_FDLList], [H_Packed__Unpacked__FDL | T_Unpacked_FDLListList]):- unpack_declarations_items_x(T_Packed_FDLList, T_Unpacked_FDLListList). %------------------------------------------------------------------------------- unpack_variables(_TypeId_Atom, [], []):- !. unpack_variables(TypeId_Atom, [H_VarId_Atom | T_VarId_AtomList], [variable(TypeId_Atom, H_VarId_Atom) | T_Unpacked_FDLList]):- unpack_variables(TypeId_Atom, T_VarId_AtomList, T_Unpacked_FDLList). %------------------------------------------------------------------------------- unpack_record_fields(Packed_FieldList, Unpacked_FieldList):- unpack_record_fields_x(Packed_FieldList, Unpacked_FieldListList), flatten_list(Unpacked_FieldListList, Unpacked_FieldList), !. %------------------------------------------------------------------------------- unpack_record_fields_x([], []):- !. unpack_record_fields_x([packed_field(TypeId_Atom, FieldId_AtomList) | T_Packed_FieldList], [H_Unpacked_FieldList | T_Unpacked_FieldListList]):- unpack_record_fields_xx(TypeId_Atom, FieldId_AtomList, H_Unpacked_FieldList), unpack_record_fields_x(T_Packed_FieldList, T_Unpacked_FieldListList). %------------------------------------------------------------------------------- unpack_record_fields_xx(_TypeId_Atom, [], []):- !. unpack_record_fields_xx(TypeId_Atom, [H_FieldId_Atom | T_FieldId_AtomList], [field(TypeId_Atom, H_FieldId_Atom)| T_Unpacked_FieldList]):- unpack_record_fields_xx(TypeId_Atom, T_FieldId_AtomList, T_Unpacked_FieldList). %=============================================================================== % standardise_declarations_items(+FDLList, -Standardised_FDLList). %------------------------------------------------------------------------------- % The provided declarations items (FDLList) are transformed into a standard % form. %=============================================================================== standardise_declarations_items([], []):- !. standardise_declarations_items([H_FDL | T_FDLList], [H_Standardised_FDL | T_Standardised_FDLList]):- standardise_declarations_item(H_FDL, H_Standardised_FDL), standardise_declarations_items(T_FDLList, T_Standardised_FDLList). %------------------------------------------------------------------------------- standardise_declarations_item(Atom, Standard_Atom):- atomic(Atom), standardise_atom(Atom, Standard_Atom), !. standardise_declarations_item(FunctorN, StandardFunctorN):- FunctorN =.. [Functor_Atom | Args_FunctorNList], standardise_atom(Functor_Atom, StandardFunctor_Atom), standardise_declarations_item_x(Args_FunctorNList, StandardArgs_FunctorNList), StandardFunctorN =.. [StandardFunctor_Atom | StandardArgs_FunctorNList], !. %------------------------------------------------------------------------------- standardise_declarations_item_x([], []):- !. standardise_declarations_item_x([H_Args_FunctorN | T_Args_FunctorNList], [H_StandardArgs_FunctorN | T_StandardArgs_FunctorNList]):- standardise_declarations_item(H_Args_FunctorN , H_StandardArgs_FunctorN), standardise_declarations_item_x(T_Args_FunctorNList, T_StandardArgs_FunctorNList). %------------------------------------------------------------------------------- standardise_atom(Atom, Atom):- integer(Atom), !. standardise_atom(Standard_Atom, Standard_Atom):- get_provenance_framework(spark), !. standardise_atom(Atom, Standard_Atom):- get_provenance_framework(pascal), atom_to_lower_case(Atom, LowerCase_Atom), trim_atom(LowerCase_Atom, 24, Standard_Atom), !. %=============================================================================== % process_declarations_items(+FDLList). %------------------------------------------------------------------------------- % The provided declarations items (FDLList) are processed, storing entries % into the database. %=============================================================================== process_declarations_items([]):- !. process_declarations_items([H_FDL | T_FDLList]):- process_declarations_item(H_FDL), process_declarations_items(T_FDLList). %------------------------------------------------------------------------------- process_declarations_item(comment):- !. process_declarations_item(title(_Title)):- !. process_declarations_item(end):- !. process_declarations_item(ignored):- !. process_declarations_item(variable(TypeId_Atom, VarId_Atom)):- find_root_type(TypeId_Atom, CoreTypeId_Atom), add_declarations_variable(CoreTypeId_Atom, VarId_Atom), process_identifier(VarId_Atom), !. process_declarations_item(constant(ConstId_Atom, TypeId_Atom)):- find_root_type(TypeId_Atom, CoreTypeId_Atom), add_declarations_constant(CoreTypeId_Atom, ConstId_Atom), process_identifier(ConstId_Atom), !. process_declarations_item(type(TypeId_Atom, pending)):- add_declarations_type(TypeId_Atom, abstract), process_identifier(TypeId_Atom), !. process_declarations_item(type(TypeId_Atom, range(Lower_Int, Upper_Int))):- user:checktype(Lower_Int, RangeTypeId_Atom), user:checktype(Upper_Int, RangeTypeId_Atom), !, add_declarations_type(TypeId_Atom, alias(RangeTypeId_Atom)), process_identifier(TypeId_Atom), !. process_declarations_item(type(TypeId_Atom, array(IndexTypeId_AtomList, ElementTypeId_Atom))):- findall(CoreIndexTypeId_Atom, (member(IndexTypeId_Atom, IndexTypeId_AtomList), find_root_type(IndexTypeId_Atom, CoreIndexTypeId_Atom)), CoreIndexTypeId_AtomList), find_root_type(ElementTypeId_Atom, CoreElementTypeId_Atom), get_declarations_type(OtherTypeId_Atom, array(CoreIndexTypeId_AtomList, CoreElementTypeId_Atom)), \+ OtherTypeId_Atom = TypeId_Atom, add_declarations_type(TypeId_Atom, alias(OtherTypeId_Atom)), process_identifier(TypeId_Atom), atom_concat('mk__', TypeId_Atom, Function_Atom), add_declarations_unbounded_function(TypeId_Atom, Function_Atom, mk_array), !. process_declarations_item(type(TypeId_Atom, array(IndexTypeId_AtomList, ElementTypeId_Atom))):- findall(CoreIndexTypeId_Atom, (member(IndexTypeId_Atom, IndexTypeId_AtomList), find_root_type(IndexTypeId_Atom, CoreIndexTypeId_Atom)), CoreIndexTypeId_AtomList), find_root_type(ElementTypeId_Atom, CoreElementTypeId_Atom), add_declarations_type(TypeId_Atom, array(CoreIndexTypeId_AtomList, CoreElementTypeId_Atom)), process_identifier(TypeId_Atom), atom_concat('mk__', TypeId_Atom, Function_Atom), add_declarations_unbounded_function(TypeId_Atom, Function_Atom, mk_array), !. process_declarations_item(type(TypeId_Atom, enumeration(EnumId_AtomList))):- process_identifier(TypeId_Atom), process_declarations_enumerations(TypeId_Atom, EnumId_AtomList), add_declarations_type(TypeId_Atom, enumeration(EnumId_AtomList)), !. process_declarations_item(type(TypeId_Atom, record(FieldList))):- findall(field(CoreFieldTypeId_Atom, FieldId_Atom), (member(field(FieldTypeId_Atom, FieldId_Atom), FieldList), find_root_type(FieldTypeId_Atom, CoreFieldTypeId_Atom)), CoreFieldList), add_declarations_type(TypeId_Atom, record(CoreFieldList)), process_identifier(TypeId_Atom), process_declarations_record_fields(TypeId_Atom, FieldList), atom_concat('mk__', TypeId_Atom, Function_Atom), add_declarations_unbounded_function(TypeId_Atom, Function_Atom, mk_record), !. process_declarations_item(type(TypeId_Atom, sequence(ElementTypeId_Atom))):- find_root_type(ElementTypeId_Atom, CoreElementTypeId_Atom), get_declarations_type(OtherTypeId_Atom, sequence(CoreElementTypeId_Atom)), \+ OtherTypeId_Atom = TypeId_Atom, add_declarations_type(TypeId_Atom, alias(OtherTypeId_Atom)), process_identifier(TypeId_Atom), op(20,fy,TypeId_Atom), !. process_declarations_item(type(TypeId_Atom, sequence(ElementTypeId_Atom))):- find_root_type(ElementTypeId_Atom, CoreElementTypeId_Atom), add_declarations_type(TypeId_Atom, sequence(CoreElementTypeId_Atom)), process_identifier(TypeId_Atom), op(20,fy,TypeId_Atom), !. process_declarations_item(type(TypeId_Atom, set(ElementTypeId_Atom))):- find_root_type(ElementTypeId_Atom, CoreElementTypeId_Atom), get_declarations_type(OtherTypeId_Atom, set(CoreElementTypeId_Atom)), \+ OtherTypeId_Atom = TypeId_Atom, add_declarations_type(TypeId_Atom, alias(OtherTypeId_Atom)), process_identifier(TypeId_Atom), op(20,fy,TypeId_Atom), !. process_declarations_item(type(TypeId_Atom, set(ElementTypeId_Atom))):- find_root_type(ElementTypeId_Atom, CoreElementTypeId_Atom), add_declarations_type(TypeId_Atom, set(CoreElementTypeId_Atom)), process_identifier(TypeId_Atom), op(20,fy,TypeId_Atom), !. process_declarations_item(type(TypeId_Atom, alias(AliasTypeId_Atom))):- add_declarations_type(TypeId_Atom, alias(AliasTypeId_Atom)), process_identifier(TypeId_Atom), !. process_declarations_item(function(ReturnTypeId_Atom, Function_Atom, [])):- find_root_type(ReturnTypeId_Atom, CoreReturnTypeId_Atom), add_declarations_constant(CoreReturnTypeId_Atom, Function_Atom), process_identifier(Function_Atom), !. process_declarations_item(function(ReturnTypeId_Atom, Function_Atom, ArgTypeId_AtomList)):- findall(CoreArgTypeId_Atom, (member(ArgTypeId_Atom, ArgTypeId_AtomList), find_root_type(ArgTypeId_Atom, CoreArgTypeId_Atom)), CoreArgTypeId_AtomList), find_root_type(ReturnTypeId_Atom, CoreReturnTypeId_Atom), add_declarations_function(CoreReturnTypeId_Atom, Function_Atom, CoreArgTypeId_AtomList), process_identifier(Function_Atom), !. process_declarations_item(Standardised_FDL):- throw_error('Unexpected declarations construct: ~w\n', Standardised_FDL), !. %------------------------------------------------------------------------------- process_declarations_enumerations(_TypeId_Atom, []):- !. process_declarations_enumerations(TypeId_Atom, [H_EnumId_Atom | T_EnumId_AtomList]):- add_declarations_constant(TypeId_Atom, H_EnumId_Atom), process_identifier(H_EnumId_Atom), process_declarations_enumerations(TypeId_Atom, T_EnumId_AtomList). %------------------------------------------------------------------------------- process_declarations_record_fields(_RecordTypeId_Atom, []):- !. process_declarations_record_fields(RecordTypeId_Atom, [field(FieldTypeId_Atom, FieldId_Atom)]):- atom_concat('upf_', FieldId_Atom, UpdateFieldFunction_Atom), atom_concat('fld_', FieldId_Atom, AccessFieldFunction_Atom), standardise_declarations_items([UpdateFieldFunction_Atom, AccessFieldFunction_Atom], [StandardisedUpdateFieldFunction_Atom, StandardisedAccessFieldFunction_Atom]), process_identifier_records(StandardisedUpdateFieldFunction_Atom), process_identifier_records(StandardisedAccessFieldFunction_Atom), find_root_type(FieldTypeId_Atom, CoreFieldTypeId_Atom), add_declarations_function(RecordTypeId_Atom, StandardisedUpdateFieldFunction_Atom, [RecordTypeId_Atom, CoreFieldTypeId_Atom]), add_declarations_function(CoreFieldTypeId_Atom, StandardisedAccessFieldFunction_Atom, [RecordTypeId_Atom]), current_record_field_number(UniqueFieldId_Int), UninstantiatedUpdate_FunctorN=..[StandardisedUpdateFieldFunction_Atom, U1_Var, U2_Var], add_declarations_record_function(UniqueFieldId_Int, UninstantiatedUpdate_FunctorN, update, FieldId_Atom, [U1_Var, U2_Var], RecordTypeId_Atom), UninstantiatedAccess_FunctorN=..[StandardisedAccessFieldFunction_Atom, A1_Var], add_declarations_record_function(UniqueFieldId_Int, UninstantiatedAccess_FunctorN, access, FieldId_Atom, [A1_Var], RecordTypeId_Atom), !. process_declarations_record_fields(RecordTypeId_Atom, [field(FieldTypeId_Atom, FieldId_Atom) | T_FieldList]):- atom_concat('upf_', FieldId_Atom, UpdateFieldFunction_Atom), atom_concat('fld_', FieldId_Atom, AccessFieldFunction_Atom), standardise_declarations_items([UpdateFieldFunction_Atom, AccessFieldFunction_Atom], [StandardisedUpdateFieldFunction_Atom, StandardisedAccessFieldFunction_Atom]), process_identifier_records(StandardisedUpdateFieldFunction_Atom), process_identifier_records(StandardisedAccessFieldFunction_Atom), find_root_type(FieldTypeId_Atom, CoreFieldTypeId_Atom), add_declarations_function(RecordTypeId_Atom, StandardisedUpdateFieldFunction_Atom, [RecordTypeId_Atom, CoreFieldTypeId_Atom]), add_declarations_function(CoreFieldTypeId_Atom, StandardisedAccessFieldFunction_Atom, [RecordTypeId_Atom]), current_record_field_number(UniqueFieldId_Int), UninstantiatedUpdate_FunctorN=..[StandardisedUpdateFieldFunction_Atom, U1_Var, U2_Var], add_declarations_record_function(UniqueFieldId_Int, UninstantiatedUpdate_FunctorN, update, FieldId_Atom, [U1_Var, U2_Var], RecordTypeId_Atom), UninstantiatedAccess_FunctorN=..[StandardisedAccessFieldFunction_Atom, A1_Var], add_declarations_record_function(UniqueFieldId_Int, UninstantiatedAccess_FunctorN, access, FieldId_Atom, [A1_Var], RecordTypeId_Atom), increment_current_record_field_number(_UniqueFieldId_Int), process_declarations_record_fields(RecordTypeId_Atom, T_FieldList). %------------------------------------------------------------------------------- increment_current_record_field_number(M) :- retract(current_record_field_number(N)), M is N+1, asserta(current_record_field_number(M)), !. %------------------------------------------------------------------------------- % Return core alias or self in none. find_root_type(TypeId_Atom, AliasTypeId_Atom) :- get_declarations_type(TypeId_Atom, alias(AliasTypeId_Atom)), !. find_root_type(TypeId_Atom, TypeId_Atom) :- !. %=============================================================================== % process_identifier(+Identifier_Atom). % process_identifier_records(+Identifier_Atom). %=============================================================================== process_identifier(Identifier_Atom):- process_identifier_x(Identifier_Atom, not_record), !. process_identifier_records(Identifier_Atom):- process_identifier_x(Identifier_Atom, is_record), !. save_used_identifier(NV, _Whatever):- process_identifier_x(NV, not_record), !. %------------------------------------------------------------------------------- % Check to see if identifier has already been seen. process_identifier_x(Identifier_Atom, not_record):- get_declarations_used_identifier(Identifier_Atom), !, implode_separator_content_list('', ['Identifier declared multiple times - ', Identifier_Atom, '\n', '\n*** ERROR - ', 'CANNOT CONTINUE: Simplification terminated.\n'], Content_Atom), throw_error(Content_Atom, []). process_identifier_x(Identifier_Atom, _Any):- built_in_ident(Identifier_Atom), !, implode_separator_content_list('', ['Identifier reserved or already predeclared - ', Identifier_Atom, '\n', '\n*** ERROR - ', 'CANNOT CONTINUE: Simplification terminated.\n'], Content_Atom), throw_error(Content_Atom, []). process_identifier_x(Identifier_Atom, _Any):- add_declarations_used_identifier(Identifier_Atom), !. %------------------------------------------------------------------------------- built_in_ident(update). built_in_ident(element). built_in_ident(set). built_in_ident(succ). built_in_ident(pred). built_in_ident(first). built_in_ident(last). built_in_ident(nonfirst). built_in_ident(nonlast). built_in_ident(abs). built_in_ident(sqr). built_in_ident(odd). built_in_ident(div). built_in_ident(mod). built_in_ident(subset_of). built_in_ident(strict_subset_of). built_in_ident(true). built_in_ident(false). built_in_ident(integer). built_in_ident(boolean). built_in_ident(real). built_in_ident(in). built_in_ident(not_in). built_in_ident(and). built_in_ident(or). built_in_ident(not). built_in_ident(xor). built_in_ident(rem). :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/inferenc2.pro0000644000175000017500000020207311753202337017206 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provide inference facilities. %############################################################################### %=============================================================================== % infer(+Formula, -HypList). %------------------------------------------------------------------------------- % Try to infer Formula. If it succeeds, HypList is instantiated to the % list of hypotheses which were used to perform the inference. The % inference is achieved through a collection of heuristic strategies. %=============================================================================== infer(A, Hs) :- retractall(inference_depth_limit(_,_)), asserta(inference_depth_limit(main, 5)), /* limit on recursion */ asserta(inference_depth_limit(infrule, 3)), /* for a fresh goal */ retractall(used(_)), retractall(buffered_used_fact(_,_,_)), !, ( infer_subgoal(A, Hs) ; allow_new_strategies, is_inequality(A, Left, Right, _Op), !, find_mutual_types(Left, Right, Type), try_new_numeric_strategies(A, Type, Hs) ), !. %------------------------------------------------------------------------------- infer_subgoal(A, _) :- var(A), !, fail. infer_subgoal(A, Hs) :- ( simplification_is_on, simplify(A, B), !, ( see_if_can_infer(B, Hs) ; standardise_in_infer(on), norm_typed_expr(A, boolean, C), see_if_can_infer(C, Hs) ) ; see_if_can_infer(A, Hs) ; standardise_in_infer(on), norm_typed_expr(A, boolean, B), A \= B, see_if_can_infer(B, Hs) ), !. %------------------------------------------------------------------------------- see_if_can_infer(X, Hs) :- could_infer(X, Hs), !. see_if_can_infer(X, _) :- could_not_infer(X), !, fail. see_if_can_infer(X, Hs) :- do_infer(X, Hs), assertz(could_infer(X, Hs)), !. see_if_can_infer(X, _) :- assertz(could_not_infer(X)), !, fail. %------------------------------------------------------------------------------- do_infer(A=A, []) :- !. do_infer(A<>A, _) :- !, fail. do_infer([H1|T1]=[H2|T2], Hs) :- do_infer(H1=H2, Hx), do_infer(T1=T2, Hy), !, merge_sort(Hx, Hy, Hs), !. do_infer([H1|_]<>[H2|_], Hs) :- do_infer(H1<>H2, Hs), !. do_infer([_|T1]<>[_|T2], Hs) :- do_infer(T1<>T2, Hs), !. do_infer([]<>[_|_], []) :- !. do_infer([_|_]<>[], []) :- !. do_infer((set [])<>(set [_|_]), []) :- !. do_infer((set [_|_])<>(set []), []) :- !. do_infer(true, []) :- !. do_infer(not false, []) :- !. do_infer(A, [N]) :- get_hyp(A, _, N), !. do_infer(A and B, Hs) :- do_infer(A, Ha), do_infer(B, Hb), !, merge_sort(Ha, Hb, Hs), !. do_infer(A or B, Hs) :- ( do_infer(A, Hs) ; do_infer(B, Hs) ), !. do_infer(A -> B, Hs) :- ( do_infer(B, Hs) ; do_infer(not A, Hs) ; new_strategies_are_allowed, try_new_logic_strategies(A -> B, Hs) ), !. do_infer(A <-> B, Hs) :- do_infer(A -> B, Ha), do_infer(B -> A, Hb), !, merge_sort(Ha, Hb, Hs), !. do_infer(A <-> B, Hs) :- new_strategies_are_allowed, try_new_logic_strategies(A <-> B, Hs). do_infer(not not A, Hs) :- do_infer(A, Hs), !. do_infer(not A, Hs) :- neg(A, B), B\=(not A), do_infer(B, Hs), !. do_infer(Inequality, Hs) :- is_inequality(Inequality, A, B, Op), find_mutual_types(A, B, T), try_to_infer(Op, A, B, T, Hs), !. do_infer(E in (set [X|Y]), Hs) :- ( do_infer(E=X, Hs) ; do_infer(E in (set Y), Hs) ), !. do_infer(E in X \/ Y, Hs) :- ( do_infer(E in X, Hs) ; do_infer(E in Y, Hs) ), !. do_infer(E in X /\ Y, Hs) :- do_infer(E in X, Ha), do_infer(E in Y, Hb), !, merge_sort(Ha, Hb, Hs), !. do_infer(E in X \ Y, Hs) :- do_infer(E in X, Ha), do_infer(E not_in Y, Hb), !, merge_sort(Ha, Hb, Hs), !. do_infer(_E not_in (set []), []) :- !. do_infer(E not_in (set [X|Y]), Hs) :- do_infer(E<>X, Ha), do_infer(E not_in (set Y), Hb), !, merge_sort(Ha, Hb, Hs), !. do_infer(E not_in X \/ Y, Hs) :- do_infer(E not_in X, Ha), do_infer(E not_in Y, Hb), !, merge_sort(Ha, Hb, Hs), !. do_infer(E not_in X /\ Y, Hs) :- ( do_infer(E not_in X, Hs) ; do_infer(E not_in Y, Hs) ), !. do_infer(E not_in X \ Y, Hs) :- ( do_infer(E not_in X, Hs) ; do_infer(E in Y, Hs) ), !. do_infer(X subset_of Y, Hs) :- do_infer(X = Y, Hs), !. do_infer((set []) subset_of _X, []) :- !. do_infer(X \ _Y subset_of Z, Hs) :- do_infer(X subset_of Z, Hs), !. do_infer(X \ Y subset_of X \ Z, Hs) :- ( do_infer(Z subset_of Y, Hs) ; do_infer(X /\ Z subset_of X /\ Y, Hs) ), !. do_infer(X \/ Y subset_of X \/ Z, Hs) :- do_infer(Y subset_of Z, Hs), !. do_infer(Y \/ X subset_of Z \/ X, Hs) :- do_infer(Y subset_of Z, Hs), !. do_infer(Y \/ X subset_of X \/ Z, Hs) :- do_infer(Y subset_of Z, Hs), !. do_infer(X \/ Y subset_of Z \/ X, Hs) :- do_infer(Y subset_of Z, Hs), !. do_infer(X /\ Y subset_of X /\ Z, Hs) :- do_infer(Y subset_of Z, Hs), !. do_infer(Y /\ X subset_of Z /\ X, Hs) :- do_infer(Y subset_of Z, Hs), !. do_infer(Y /\ X subset_of X /\ Z, Hs) :- do_infer(Y subset_of Z, Hs), !. do_infer(X /\ Y subset_of Z /\ X, Hs) :- do_infer(Y subset_of Z, Hs), !. do_infer(X /\ Y subset_of X \/ Y, []) :- !. do_infer(Y /\ X subset_of X \/ Y, []) :- !. do_infer(X subset_of Y \/ Z, Hs) :- ( do_infer(X subset_of Y, Hs) ; do_infer(X subset_of Z, Hs) ), !. do_infer(X subset_of Y /\ Z, Hs) :- do_infer(X subset_of Y, Ha), do_infer(X subset_of Z, Hb), !, merge_sort(Ha, Hb, Hs), !. do_infer(X /\ Y subset_of Z, Hs) :- ( do_infer(X subset_of Z, Hs) ; do_infer(Y subset_of Z, Hs) ), !. do_infer((set X) subset_of (set Y), []) :- is_subset_of(X, Y), !. do_infer((set []) strict_subset_of X, Hs) :- set_infrule(_E in X, Hs), !. do_infer(X \ Y strict_subset_of Z, Hs) :- ( do_infer(X strict_subset_of Z, Hs) ; do_infer(X subset_of Z, Ha), set_infer(Y /\ Z <> (set []), Hb), !, merge_sort(Ha, Hb, Hs) ), !. do_infer(X /\ Y strict_subset_of Z, Hs) :- ( do_infer(X strict_subset_of Z, Hs) ; do_infer(Y strict_subset_of Z, Hs) ), !. do_infer(X strict_subset_of Y \/ Z, Hs) :- ( do_infer(X strict_subset_of Y, Hs) ; do_infer(X strict_subset_of Z, Hs) ), !. do_infer((set X) strict_subset_of (set Y), []) :- is_strict_subset_of(X, Y), !. do_infer(first([H|_T]) = X, Hs) :- do_infer(H=X, Hs), !. do_infer(first([H|_T] @ _Z) = X, Hs) :- do_infer(H=X, Hs), !. do_infer(last([H|T]) = X, Hs) :- last([H|T],L), do_infer(L=X, Hs), !. do_infer(last(_F @ [H|T]) = X, Hs) :- last([H|T],L), do_infer(L=X, Hs), !. % Added to improve the handling of structured objects. % The fact get_forall_hyp(Formula, Side_Conditions, N) is a hypothesis % of the form (forall Side_Conditions, Formula). % The Simplifier needs to ensure, before trying to infer the side conditions, % that the formula is not one of the side conditions; Otherwise, the % Simplifier will goto into an infinite recursion. % All the clauses below involving a universal quantified hypothesis needs % to first check that Formula is not a member of Side_Conditions. do_infer(Formula, Hs) :- get_forall_hyp(Formula, Conditions, N), \+ member(Formula, Conditions), do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). do_infer(X <= Y, Hs) :- get_forall_hyp(Y >= X, Conditions, N), \+ member(Y >= X, Conditions), \+ member(X <= Y, Conditions), do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). do_infer(X >= Y, Hs) :- get_forall_hyp(Y <= X, Conditions, N), \+ member(Y <= X, Conditions), \+ member(X >= Y, Conditions), do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). % Added to cope with cases of the form: % H: for_all(i, el(a, [i]) <= 100 % C: el(a, [n]) <= 2147483647 do_infer(X <= Y, Hs) :- int_or_enum_lit(Y, Which), ( % Know X <= Z (if Conditions hold) get_forall_hyp(X <= Z, Conditions, N) ; get_forall_hyp(Z >= X, Conditions, N) ), \+ member(X <= Z, Conditions), \+ member(Z >= X, Conditions), int_or_enum_lit(Z, Which), simplify(Z <= Y, true), /* and Z<=Y, therefore X<=Y */ do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). do_infer(Y >= X, Hs) :- int_or_enum_lit(Y, Which), ( get_forall_hyp(X <= Z, Conditions, N) /* so know X<=Z (if Conditions hold) */ ; get_forall_hyp(Z >= X, Conditions, N) ), \+ member(X <= Z, Conditions), \+ member(Z >= X, Conditions), int_or_enum_lit(Z, Which), simplify(Z <= Y, true), /* and Z<=Y, therefore X<=Y, i.e. Y>=X */ do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). do_infer(X <= Y, Hs) :- int_or_enum_lit(X, Which), ( get_forall_hyp(Z <= Y, Conditions, N) ; get_forall_hyp(Y >= Z, Conditions, N) ), \+ member(Z <= Y, Conditions), \+ member(Y >= Z, Conditions), int_or_enum_lit(Z, Which), simplify(X <= Z, true), /* and X<=Z, therefore X<=Y */ do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). do_infer(Y >= X, Hs) :- int_or_enum_lit(X, Which), ( get_forall_hyp(Z <= Y, Conditions, N) ; get_forall_hyp(Y >= Z, Conditions, N) ), \+ member(Z <= Y, Conditions), \+ member(Y >= Z, Conditions), int_or_enum_lit(Z, Which), simplify(X <= Z, true), /* and X<=Z, therefore X<=Y, i.e. Y>=X */ do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). % Added to cope with cases of the form: % H: for_all(i, el(a, [i]) <= 359 % C: el(a, [n]) * 10 <= 3599 do_infer(XtimesK <= Y, Hs) :- ( XtimesK = X * K ; XtimesK = K * X ), int(K), simplify(K > 0, true), /* Strictly positive K only */ int(Y), ( get_forall_hyp(X <= Z, Conditions, N) ; get_forall_hyp(Z >= X, Conditions, N) ), \+ member(X <= Z, Conditions), \+ member(Z >= X, Conditions), int(Z), simplify(Z * K <= Y, true), /* and Z*K<=Y, therefore X*K<=Y */ do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). do_infer(Y >= XtimesK, Hs) :- ( XtimesK = X * K ; XtimesK = K * X ), int(K), simplify(K > 0, true), /* Strictly positive K only */ int(Y), ( get_forall_hyp(X <= Z, Conditions, N) ; get_forall_hyp(Z >= X, Conditions, N) ), \+ member(X <= Z, Conditions), \+ member(Z >= X, Conditions), int(Z), simplify(Z * K <= Y, true), /* and Z*K<=Y, therefore X*K<=Y */ do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). do_infer(X <= YtimesK, Hs) :- ( YtimesK = Y * K ; YtimesK = K * Y ), int(K), simplify(K > 0, true), /* Strictly positive K only */ int(X), ( get_forall_hyp(Z <= Y, Conditions, N) ; get_forall_hyp(Y >= Z, Conditions, N) ), \+ member(Z <= Y, Conditions), \+ member(Y >= Z, Conditions), int(Z), simplify(X <= Z * K, true), /* and X<=Z*K, therefore X<=Y*K */ do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). do_infer(YtimesK >= X, Hs) :- ( YtimesK = Y * K ; YtimesK = K * Y ), int(K), simplify(K > 0, true), /* Strictly positive K only */ int(X), ( get_forall_hyp(Z <= Y, Conditions, N) ; get_forall_hyp(Y >= Z, Conditions, N) ), \+ member(Z <= Y, Conditions), \+ member(Y >= Z, Conditions), int(Z), simplify(X <= Z * K, true), /* and X<=Z*K, therefore X<=Y*K */ do_infer_side_conditions(Conditions, HCL), !, merge_sort([N], HCL, Hs). %=============================================================================== % TN K616-030 - additional reasoning for inequality arising % from looping over unconstrained arrays. % % The following inference rule essentially embodies the proof: % % H1: x + y <= z . % H2: z <= c . % H3: c2 + y >= c . % -> % C1: x <= c2 . % % for x, y, z, c, and c2 all integers % % Proof of this rule is established in the Proof Checker with: % infer x + y <= c2 + y using transitivity. % replace h # 4 : x + y <= c2 + y by x <= c2 using standardisation. % yes % done % % Such problems arise when iterating over unconstrained arrays in SPARK. %=============================================================================== do_infer(X <= C2, Hs) :- intexp(C2), get_hyp(X + Y <= Z, _, H1), intexp(Y), get_hyp(Z <= C, _, H2), intexp(C), simplify(C2 + Y >= C, true), merge_sort([H1], [H2], Hs). %=============================================================================== % TN K616-030 - additional reasoning for inequality arising % from looping over unconstrained arrays. % % and the "reverse" case for decrementing the loop index... %=============================================================================== do_infer(X >= C2, Hs) :- intexp(C2), get_hyp(X - Y >= Z, _, H1), intexp(Y), get_hyp(Z >= C, _, H2), intexp(C), simplify(C2 - Y <= C, true), merge_sort([H1], [H2], Hs). % % Discharge conclusions of the form % my_scalar_type__max(A, B) >= A and % A <= my_scalar_type__max(A, B) and % my_scalar_type__max(A, B) >= B and % B <= my_scalar_type__max(A, B) % do_infer(Arg1 <= Max_Function, []):- is_min_max_function(Max_Function, max, Arg1, _Arg2), !. do_infer(Arg2 <= Max_Function, []):- is_min_max_function(Max_Function, max, _Arg1, Arg2), !. do_infer(Max_Function >= Arg1, []):- is_min_max_function(Max_Function, max, Arg1, _Arg2), !. do_infer(Max_Function >= Arg2, []):- is_min_max_function(Max_Function, max, _Arg1, Arg2), !. % % Discharge conclusions of the form % my_scalar_type__min(A, B) <= A and % A >= my_scalar_type__min(A, B) and % my_scalar_type__min(A, B) <= B and % B >= my_scalar_type__min(A, B). % do_infer(Arg1 >= Min_Function, []):- is_min_max_function(Min_Function, min, Arg1, _Arg2), !. do_infer(Arg2 >= Min_Function, []):- is_min_max_function(Min_Function, min, _Arg1, Arg2), !. do_infer(Min_Function <= Arg1, []):- is_min_max_function(Min_Function, min, Arg1, _Arg2), !. do_infer(Min_Function <= Arg2, []):- is_min_max_function(Min_Function, min, _Arg1, Arg2), !. % % If the two arguments to a min/max function have the same upper and lower bound, % then infer that the min/max function share the upper and lower bounds. % do_infer(LowerBound <= Function, Hs):- is_min_max_function(Function, _MinOrMax, Arg1, Arg2), do_infer(LowerBound <= Arg1, H1), do_infer(LowerBound <= Arg2, H2), !, merge_sort(H1, H2, Hs), !. do_infer(Function >= LowerBound, Hs):- is_min_max_function(Function, _MinOrMax, _Arg1, _Arg2), do_infer(LowerBound <= Function, Hs), !. do_infer(UpperBound >= Function, Hs):- is_min_max_function(Function, _MinOrMax, Arg1, Arg2), do_infer(UpperBound >= Arg1, H1), do_infer(UpperBound >= Arg2, H2), !, merge_sort(H1, H2, Hs), !. do_infer(Function <= UpperBound, Hs):- is_min_max_function(Function, _MinOrMax, _Arg1, _Arg2), do_infer(UpperBound >= Function, Hs), !. % Added to cope with equalities which need to make use of quantified facts. do_infer(X = K, Hs) :- get_forall_hyp(X <= K, Conditions, N), % So X <= K via a for_all fact ... \+ member(X <= K, Conditions), \+ member(K >= X, Conditions), do_infer_side_conditions(Conditions, HCL), /* ...whose side-conditions hold... */ find_mutual_types(X, K, T), try_to_infer((>=), X, K, T, HL), /* ...and X>=K too, so X=K. */ append([N], HL, Hrest), !, merge_sort(Hrest, HCL, Hs). do_infer(X = K, Hs) :- get_forall_hyp(K >= X, Conditions, N), % So X <= K via a for_all fact. \+ member(X <= K, Conditions), \+ member(K >= X, Conditions), do_infer_side_conditions(Conditions, HCL), /* ...whose side-conditions hold... */ find_mutual_types(X, K, T), try_to_infer((>=), X, K, T, HL), /* ...and X>=K too, so X=K. */ append([N], HL, Hrest), !, merge_sort(Hrest, HCL, Hs). do_infer(X = K, Hs) :- get_forall_hyp(K <= X, Conditions, N), % So K <= X via a for_all fact. \+ member(K <= X, Conditions), \+ member(X >= K, Conditions), do_infer_side_conditions(Conditions, HCL), /* ...whose side-conditions hold... */ find_mutual_types(X, K, T), try_to_infer((>=), K, X, T, HL), /* ...and K>=X too, so X=K. */ append([N], HL, Hrest), !, merge_sort(Hrest, HCL, Hs). do_infer(X = K, Hs) :- get_forall_hyp(X >= K, Conditions, N), % So K <= X via a for_all fact \+ member(K <= X, Conditions), \+ member(X >= K, Conditions), do_infer_side_conditions(Conditions, HCL), /* ...whose side-conditions hold... */ find_mutual_types(X, K, T), try_to_infer((>=), K, X, T, HL), /* ...and K>=X too, so X=K. */ append([N], HL, Hrest), !, merge_sort(Hrest, HCL, Hs). % Additional predicates added to handle universally quantified terms. do_infer(element(ArrayStatement, [Index]) >= Bound_Val, H) :- Bound_Val \= element(_,_), do_infer(Bound_Val <= element(ArrayStatement, [Index]), H). do_infer(Bound_Val <= element(ArrayStatement, [Index]), H) :- Bound_Val \= element(_,_), element_update_infer(ArrayStatement, >=, Bound_Val, Index, Status, BaseArray, Ha), ( Status = found, /** The element is updated to satisfy the condition **/ H = Ha ; Status = base, /** The element is not updated so prove using a uq hypothesis **/ fact(for_all(I : Type, LWB <= element(BaseArray, [I]) and element(BaseArray, [I]) <= _UPB), H1), checktype(Index, Type), infer_subgoal(Bound_Val <= LWB, H2), testused(LWB >= Bound_Val), !, merge_sort(H1, H2, H) ). do_infer(Bound_Val >= element(ArrayStatement, [Index]), H):- Bound_Val \= element(_,_), do_infer(element(ArrayStatement, [Index]) <= Bound_Val, H). do_infer(element(ArrayStatement, [Index]) <= Bound_Val, H) :- Bound_Val \= element(_,_), element_update_infer(ArrayStatement, <=, Bound_Val, Index, Status, BaseArray, Ha), ( Status = found, H = Ha ; Status = base, fact(for_all(I : Type, _LWB <= element(BaseArray, [I]) and element(BaseArray, [I]) <= UPB), H1), checktype(Index, Type), infer_subgoal(UPB <= Bound_Val, H2), testused(Bound_Val >= UPB), !, merge_sort(H1, H2, H) ). % Adds special-case to handle conclusions of the form: % fld_F(element(A, [I}) >= Bound_Val % % Such expressions are common in SPARK VCs since they arise from the need % to show that fields of a 1-dimensional array of records are in their % subtypes. % % Note that we cannot directly pattern-match for fld_F, since this is % second-order, so we look first for a record_function() that might match, % then we look for a quantified hypothesis that matches. % Lower Bound case. do_infer(R >= Bound_Val, H) :- Bound_Val \= element(_,_), record_function(_, R, access, _, [element(_ArrayStatement, [_Index])], _), do_infer(Bound_Val <= R, H). do_infer(Bound_Val <= R, H) :- Bound_Val \= element(_,_), record_function(_, R, access, _, [element(ArrayStatement, [Index])], _), fact(for_all(I : Type, ILB <= I and I <= IUB -> LWB <= E and E <= _UPB), H1), functor(R, F, 1), E =.. [F, element(ArrayStatement, [I])], checktype(Index, Type), infer_subgoal(ILB <= Index, H2), infer_subgoal(Index <= IUB, H3), infer_subgoal(Bound_Val <= LWB, H4), testused(LWB >= Bound_Val), !, merge_sort(H1, H2, H5), merge_sort(H3, H4, H6), merge_sort(H5, H6, H). % Upper Bound case. do_infer(Bound_Val >= R, H) :- Bound_Val \= element(_,_), record_function(_, R, access, _, [element(_ArrayStatement, [_Index])], _), do_infer(R <= Bound_Val, H). do_infer(R <= Bound_Val, H) :- Bound_Val \= element(_,_), record_function(_, R, access, _, [element(ArrayStatement, [Index])], _), fact(for_all(I : Type, ILB <= I and I <= IUB -> _LWB <= E and E <= UPB), H1), functor(R, F, 1), E =.. [F, element(ArrayStatement, [I])], checktype(Index, Type), infer_subgoal(ILB <= Index, H2), infer_subgoal(Index <= IUB, H3), infer_subgoal(UPB <= Bound_Val, H4), testused(Bound_Val >= UPB), !, merge_sort(H1, H2, H5), merge_sort(H3, H4, H6), merge_sort(H5, H6, H). do_infer(Bound_Val <> element(ArrayStatement, [Index]), H):- Bound_Val \= element(_,_), do_infer(element(ArrayStatement, [Index]) <> Bound_Val, H). do_infer(element(ArrayStatement, [Index]) <> Bound_Val, H) :- Bound_Val \= element(_,_), element_update_infer(ArrayStatement, <>, Bound_Val, Index, Status, BaseArray, Ha), ( Status = found, H = Ha ; Status = base, fact(for_all(I : Type, LWB <= element(BaseArray, [I]) and element(BaseArray, [I]) <= UPB), H1), checktype(Index, Type), ( infer_subgoal(UPB < Bound_Val, H2) ; infer_subgoal(Bound_Val < LWB, H2) ), !, merge_sort(H1, H2, H) ). do_infer(Bound_Val = element(ArrayStatement, [Index]), H):- Bound_Val \= element(_,_), do_infer(element(ArrayStatement, [Index]) = Bound_Val, H). do_infer(element(ArrayStatement, [Index]) = Bound_Val, H) :- Bound_Val \= element(_,_), element_update_infer(ArrayStatement, =, Bound_Val, Index, Status, BaseArray, Ha), ( Status = found, H = Ha ; Status = base, /* check to see if there is a range of one that is equal to Bound_Val */ fact(for_all(I : Type, Bound <= element(BaseArray, [I]) and element(BaseArray, [I]) <= Bound), H1), infer_subgoal(Bound = Bound_Val, H2), checktype(Index, Type), !, merge_sort(H1, H2, H) ). % The 'element(....) OP Val' conclusion cannot be proved by % element_update_infer or from simple type bounded universally qualified % hypotheses. Therefore look for an implication bounded universally % qualified hypothesis. do_infer(element(Array, [Index]) <= Val, H) :- Val \= element(_,_), infer_by_uq_imp_hyp(element(Array, [Index]) <= Val, H), !. do_infer(Val <= element(Array, [Index]), H) :- Val \= element(_,_), infer_by_uq_imp_hyp(Val <= element(Array, [Index]), H), !. do_infer(element(Array, [Index]) <> Val, H):- Val \= element(_,_), infer_by_uq_imp_hyp(Val <> element(Array, [Index]), H), !. do_infer(Val <> element(Array, [Index]), H) :- Val \= element(_,_), infer_by_uq_imp_hyp(Val <> element(Array, [Index]), H), !. do_infer(Val = element(Array, [Index]), H) :- Val \= element(_,_), infer_by_uq_imp_hyp(Val = element(Array, [Index]), H), !. do_infer(element(Array, [Index]) = Val, H) :- Val \= element(_,_), infer_by_uq_imp_hyp(Val = element(Array, [Index]), H), !. % Prove conclusions involving universal quantifiers. do_infer(for_all(Var : T, X and Y), H) :- uq_infer(Var : T, X, H1), uq_infer(Var : T, Y, H2), !, merge_sort(H1, H2, H). do_infer(for_all(Var : T, X or Y), H) :- !, ( uq_infer(Var : T, X, H) ; uq_infer(Var : T, Y, H) ). do_infer(for_all(Var : T, X -> A and B), H) :- do_infer(for_all(Var : T, X -> A), H1), do_infer(for_all(Var : T, X -> B), H2), !, merge_sort(H1, H2, H). do_infer(for_all(Var : T, X -> A or B), H) :- ( do_infer(for_all(Var : T, X -> A), H) ; do_infer(for_all(Var : T, X -> B), H) ), !. % Upper bound on an array element. % A common class of VCs from a loop updating an array is that the update to an array element is % within range. % The tactic to discharge the VC is: % 1) show that it is true for all I, I_LWB <= I <= I_UPB and % 2) show that it is true when I = I_UPB+1. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB + 1 -> element(update(Array, [I_UPB+1], Val_Update), [I]) <= Val), H) :- !, % Prove it is true for all I, I_LWB <= I <= I_UPB - see next clause. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB -> element(Array, [I]) <= Val), H1), % Prove it is true for I = I_UPB + 1. do_infer(Val_Update <= Val, H2), merge_sort(H1, H2, H), !. % Handles the case when the index is an enumerated type. % The VC can be re-expressed by replacing succ(I_UPB) with I_UPB + 1. do_infer(for_all(I : T, I_LWB <= I and I <= succ(I_UPB) -> element(update(Array, [succ(I_UPB)], Val_Update), [I]) <= Val), H) :- !, % The tactic to discharge this type of VC is the same as the case when % the last element in an array indexed by an integer is updated - that is, % the Simplifier first tries to infer that the VC is true % for all I, I_LWB <= I <= I_UPB and then tries to show the VC is true % for the updated array element. % The Simplifier invokes the clause above by re-expressing succ(I_UPB) % to I_UPB+1 only to pattern match the above clause. Note % that I_UPB+1 is never evaluated. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB + 1 -> element(update(Array, [I_UPB + 1], Val_Update), [I]) <= Val), H), !. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB -> element(Array, [I]) <= Val), H) :- !, update_chain_infer(<=, Array, I_LWB, I_UPB, Val, Base_Array, H1), ( /* See if we can determine both a lower and upper bound on Base_Array(I2) */ fact(for_all(I2 : T, I2_LWB <= I2 and I2 <= I2_UPB -> (Val_LWB <= element(Base_Array, [I2]) and element(Base_Array, [I2]) <= Val_UPB)), H2) ; fact(for_all(I2 : T, I2_LWB <= I2 and I2 <= I2_UPB -> (element(Base_Array, [I2]) <= Val_UPB and Val_LWB <= element(Base_Array, [I2]))), H2) ) , infer_subgoal(I_UPB <= I2_UPB, H3), infer_subgoal(I2_LWB <= I_LWB, H4), infer_subgoal(Val_UPB <= Val, H5), !, merge_sort(H1, H2, Ha), merge_sort(H3, H4, Hb), merge_sort(H5, Ha, Hc), merge_sort(Hc, Hb, H). % Lower bound on an array element. % Similar to uppoer bound case. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB + 1 -> Val <= element(update(Array, [I_UPB+1], Val_Update), [I])), H) :- !, % Prove it is true for all I, I_LWB <= I <= I_UPB - see next clause. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB -> Val <= element(Array, [I])), H1), % Prove it is true for I = I_UPB + 1. do_infer(Val <= Val_Update, H2), merge_sort(H1, H2, H), !. do_infer(for_all(I : T, I_LWB <= I and I <= succ(I_UPB) -> Val <= element(update(Array, [succ(I_UPB)], Val_Update), [I])), H) :- !, % See comment for upper bound case for discharging VC updating an array element % indexed by an enumerated type. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB + 1 -> Val <= element(update(Array, [I_UPB+1], Val_Update), [I])), H), !. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB -> Val <= element(Array, [I])), H) :- !, update_chain_infer(>=, Array, I_LWB, I_UPB, Val, Base_Array, H1), ( /* See if we can determine both a lower and upper bound on Base_Array(I2) */ fact(for_all(I2 : T, I2_LWB <= I2 and I2 <= I2_UPB -> (Val_LWB <= element(Base_Array, [I2]) and element(Base_Array, [I2]) <= Val_UPB)), H2) ; fact(for_all(I2 : T, I2_LWB <= I2 and I2 <= I2_UPB -> (element(Base_Array, [I2]) <= Val_UPB and Val_LWB <= element(Base_Array, [I2]))), H2) ) , infer_subgoal(I_UPB <= I2_UPB, H3), infer_subgoal(I2_LWB <= I_LWB, H4), infer_subgoal(Val <= Val_LWB, H5), !, merge_sort(H1, H2, Ha), merge_sort(H3, H4, Hb), merge_sort(H5, Ha, Hc), merge_sort(Hc, Hb, H). % Unit-range case for universal quantifier do_infer(for_all(I : _T, LWB <= I and I <= UPB -> P), H) :- do_infer(LWB = UPB, H1), subst_vbl(I, LWB, P, New_P), do_infer(New_P, H2), !, merge_sort(H1, H2, H). % The universal quantifier is trivially true as the range is empty. do_infer(for_all(I : _T, LWB <= I and I <= UPB -> _X), H) :- do_infer(UPB < LWB, H), !. % VCs from a loop updating an array is an invariant of the form % for_all(I:T, I_LWB <= I and I <= I_UPB + 1) -> element(update(Array, [I_UPB+1], Val_Update), [I]) = Val % The tactic to discharge this type of VCs is % 1) show that the it is true for all I, I_LWB <= I <= I_UPB and % 2) show that it is true when I = I_UPB+1. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB + 1 -> element(update(Array, [I_UPB+1], Val_Update), [I]) = Val), H) :- % Phove the invariant is true for all I, I_LWB <= I <= I_UPB. fact(for_all(I : T, I_LWB <= I and I <= I_UPB -> element(Array, [I]) = Val), H), % We now prove that the invariant is true for I = I_UPB + 1. % Val is an arbitrary function of I (array index) - the function can be as simple as % a constant, the identity function or something more complicated such as 2 * I + 100. % We replace all 'I's in Val with 'I_UPB+1' as we are proving some % property on the I_UPB+1 element and then prove that the invariant % still holds for I_UPB+1. subst_vbl(I, I_UPB+1, Val, Val_Update_X), simplify(Val_Update_X = Val_Update, true), !. % Handle the case when the index to the array is an enumerated type. Simplifier % re-expresses the VC by replacing succ(I_UPB) with I_UPB + 1. do_infer(for_all(I : T, I_LWB <= I and I <= succ(I_UPB) -> element(update(Array, [succ(I_UPB)], Val_Update), [I]) = Val), H) :- % See comment for upper bound case for discharging VC updating an array element % indexed by an enumerated type. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB + 1 -> element(update(Array, [I_UPB+1], Val_Update), [I]) = Val), H), !. do_infer(for_all(I : T, I_LWB <= I and I <= I_UPB + 1 -> X), H) :- fact(for_all(I : T, I_LWB <= I and I <= I_UPB -> X), H1), subst_vbl(I, I_UPB+1, X, X_New), fact(X_New, H2), merge_sort(H1, H2, H), !. % Handling of inequality in universal quantifiers. % The universal quantifier reduces to a VC about a single element of the array as % the lower and upper bound are the same. do_infer(for_all(I : _T, Bound <= I and I <= Bound -> element(Array, [I]) <> Val), H) :- subst_vbl(I, Bound, Val, Val_X), fact(element(Array, [Bound]) <> Val_X, H), !. % The universal quantifier is trivially true as the range is empty. do_infer(for_all(I : _T, LWB <= I and I <= UPB -> element(_Array, [I]) <> _Val), H) :- do_infer(UPB < LWB, H), !. do_infer(for_all(Var : T, X), H) :- !, uq_infer(Var : T, X, H). %------------------------------------------------------------------------------- new_strategies_are_allowed :- allow_new_strategies, !. %------------------------------------------------------------------------------- find_mutual_types(A, B, T) :- checktype(A, T), checktype(B, T), !. %------------------------------------------------------------------------------- do_infer_side_conditions(List, Hs) :- inhibit_new_strategies(NeedToRestore), !, ( safe_infer_side_conditions(List, Hs), Success = true ; Success = fail ), !, restore_new_strategies(NeedToRestore), !, call(Success). %------------------------------------------------------------------------------- safe_infer_side_conditions([C|CL], Hs) :- infer_subgoal(C, H), !, safe_infer_side_conditions(CL, HL), !, merge_sort(H, HL, Hs). safe_infer_side_conditions([], []). %------------------------------------------------------------------------------- % This enforces the depth of recursive call limit, which is reset by the % top-level infer/2 predicate when it is invoked. try_to_infer(RO, A, B, TYPE, Hs) :- decrement_inference_depth_remaining(main), !, ( do_try_to_infer(RO, A, B, TYPE, Hs), Success = true ; Success = fail ), !, increment_inference_depth_remaining(main), !, call(Success). %------------------------------------------------------------------------------- % is_min_max_function(+Function, -MinOrMax, -Arg1, -Arg2) % % Function is a compound term and the predicate succeeds if the % function is a min or max function with exactly two arguments. % The parameter MinOrMax returns either 'min' or 'max' and % the parameters Arg1 and Arg2 returns the arguments to the function. %------------------------------------------------------------------------------- is_min_max_function(Function, MinOrMax, Arg1, Arg2):- compound(Function), functor(Function, Function_Name, 2), arg(1, Function, Arg1), arg(2, Function, Arg2), is_min_max_function_name(Function_Name, MinOrMax), !. is_min_max_function_name(Function_Name, min):- atom_concat(_Prefix_Str, '__min', Function_Name). is_min_max_function_name(Function_Name, max):- atom_concat(_Prefix_Str, '__max', Function_Name), !. %------------------------------------------------------------------------------- % Inference depth limit utilitity: for preventing non-termination. decrement_inference_depth_remaining(Category) :- inference_depth_limit(Category, Depth), Depth > 0, /* Fail if already reached 0 */ !, retractall(inference_depth_limit(Category, _)), !, NewDepth is Depth - 1, asserta(inference_depth_limit(Category, NewDepth)). %------------------------------------------------------------------------------- % Inference depth limit utilitity: for preventing non-termination. increment_inference_depth_remaining(Category) :- retract(inference_depth_limit(Category, Depth)), NewDepth is Depth + 1, !, asserta(inference_depth_limit(Category, NewDepth)). %------------------------------------------------------------------------------- do_try_to_infer(RO, A, B, TYPE, Hs) :- GOAL =.. [RO, A, B], ( type(TYPE, set(_)), !, set_infer(GOAL, Hs) ; type(TYPE, sequence(_)), !, sequence_infer(GOAL, Hs) ; type(TYPE, enumerated), enumeration(TYPE, ENUMLIST), handle_nested_used_facts(main), enumerated_infer(GOAL, ENUMLIST, Hs), restore_nested_used_facts(main) /* If this branch fails for enumerated types, then try */ /* using apply_deduction/3, so no cut here... */ ; handle_nested_used_facts(main), apply_deduction(GOAL, TYPE, Hs), restore_nested_used_facts(main) ), !. %------------------------------------------------------------------------------- apply_deduction(GOAL, TYPE, Hs) :- deduce_formula(GOAL, TYPE, Hs), /* No need to try anything else */ !. apply_deduction(GOAL, TYPE, Hs) :- allow_new_strategies, /* Guard: only use new strategies sledgehammer */ retractall(used(_)), /* to crack the remaining harder nuts! */ !, try_new_deduction_strategies(GOAL, TYPE, Hs), !. %------------------------------------------------------------------------------- sequence_infer(X = [], Hs) :- do_infer(length(X)=0, Hs), !. sequence_infer([] = X, Hs) :- do_infer(length(X)=0, Hs), !. sequence_infer(X @ Y = [], Hs) :- sequence_infer(X=[], H1), sequence_infer(Y=[], H2), !, merge_sort(H1, H2, Hs), !. sequence_infer([] = X @ Y, Hs) :- sequence_infer(X=[], H1), sequence_infer(Y=[], H2), !, merge_sort(H1, H2, Hs), !. sequence_infer(X @ [] = X, []) :- !. sequence_infer([] @ X = X, []) :- !. sequence_infer(X = X @ [], []) :- !. sequence_infer(X = [] @ X, []) :- !. sequence_infer([X|XT]=[Y|YT], Hs) :- do_infer(X=Y, H1), sequence_infer(XT=YT, H2), !, merge_sort(H1, H2, Hs), !. sequence_infer(first(X) @ nonfirst(X) = X, []) :- !. sequence_infer(X = first(X) @ nonfirst(X), []) :- !. sequence_infer(nonlast(X) @ last(X) = X, []) :- !. sequence_infer(X = nonlast(X) @ last(X), []) :- !. sequence_infer(nonfirst([_H|T]) = X, Hs) :- do_infer(T=X, Hs), !. sequence_infer(nonlast([H|T]) = X, Hs) :- append(N, [_L], [H|T]), do_infer(N=X, Hs), !. sequence_infer(X1 @ Y1 = X2 @ Y2, Hs) :- do_infer(X1=X2, H1), do_infer(Y1=Y2, H2), !, merge_sort(H1, H2, Hs), !. sequence_infer(X=Y, Hs) :- ( X=Y, Hs=[] ; sequence_infrule(X=Y, Hs) ), !. sequence_infer([_|_] <> [], []) :- !. sequence_infer([] <> [_|_], []) :- !. sequence_infer([X|_XT] <> [Y|_YT], Hs) :- do_infer(X<>Y, Hs), !. sequence_infer([_X|XT] <> [_Y|YT], Hs) :- sequence_infer(XT<>YT, Hs), !. sequence_infer(X @ Y <> [], Hs) :- ( sequence_infer(X <> [], Hs) ; sequence_infer(Y <> [], Hs) ), !. sequence_infer(X @ Y <> X, Hs) :- sequence_infer(Y <> [], Hs), !. sequence_infer(X @ Y <> Y, Hs) :- sequence_infer(X <> [], Hs), !. sequence_infer(X @ Y <> X @ Z, Hs) :- sequence_infer(Y <> Z, Hs), !. sequence_infer(X @ Y <> Z @ Y, Hs) :- sequence_infer(X <> Z, Hs), !. sequence_infer(X <> Y, Hs) :- sequence_infrule(X <> Y, Hs), !. %------------------------------------------------------------------------------- sequence_infrule(X, Hs) :- fact(X, Hs). sequence_infrule(X=Y, Hs) :- fact(X=Z, H1), testused(X=Z), sequence_infrule(Y=Z, H2), merge_sort(H1, H2, Hs). sequence_infrule(X<>Y, Hs) :- fact(X=Z, H1), testused(X=Z), sequence_infrule(Z<>Y, H2), merge_sort(H1, H2, Hs). sequence_infrule(X<>Y, Hs) :- fact(X<>Z, H1), sequence_infrule(Z=Y, H2), merge_sort(H1, H2, Hs). sequence_infrule(X<>Y, Hs) :- fact(Y<>Z, H1), sequence_infrule(X=Z, H2), merge_sort(H1, H2, Hs). %------------------------------------------------------------------------------- enumerated_infer(X, L, Hs) :- length(L, Depth), enum_infrule(X, L, Hs, Depth), !. %=============================================================================== % int_or_enum_lit(+X, -Which). %------------------------------------------------------------------------------- % Succeed if X is a signed integer or an enumeration literal, with Which % set to integer or enum respectively. %=============================================================================== int_or_enum_lit(X, integer) :- signed_integer(X), !. int_or_enum_lit(X, enum) :- atom(X), !, var_const(X, T, c), type(T, enumerated), enumeration(T, E), !, is_in(X, E), !. %=============================================================================== % handle_nested_used_facts(+Category). %------------------------------------------------------------------------------- % With recursive calls, we want to buffer the used(_) facts at the point of % call and restore them to what they were afterwards. This will either be % achieved on backtracking (in the event that the inference fails), or by % explicit call to the restore predicate (if the inference succeeds). %=============================================================================== handle_nested_used_facts(Category) :- save_nested_used_facts(Category). handle_nested_used_facts(Category) :- restore_nested_used_facts(Category), fail. /* want to fail if we get here, since inference failed */ %------------------------------------------------------------------------------- save_nested_used_facts(Category) :- inference_depth_limit(Category, D), retractall(buffered_used_fact(Category, D, _)), !, buffer_the_used_facts(Category, D). /* always succeeds exactly once */ %------------------------------------------------------------------------------- buffer_the_used_facts(Category, D) :- used(Fact), assertz(buffered_used_fact(Category, D, Fact)), fail. buffer_the_used_facts(_, _) :- retractall(used(_)). %------------------------------------------------------------------------------- restore_nested_used_facts(Category) :- retractall(used(_)), inference_depth_limit(Category, D), !, unbuffer_the_used_facts(Category, D). /* always succeeds exactly once */ %------------------------------------------------------------------------------- unbuffer_the_used_facts(Category, D) :- buffered_used_fact(Category, D, Fact), assertz(used(Fact)), fail. unbuffer_the_used_facts(_, _). /* when finished */ %=============================================================================== % enum_infrule(+Expression, +EnumerationLiterals, -Hypotheses, +RemainingDepth). %------------------------------------------------------------------------------- % Espression is the expression of an enumeration type to be inferred. % EnumerationLiterals is an ordered list of the type's enumeration % literals. Hypotheses are the hypotheses used in the inference. % RemainingDepth is further depth of recursive calls to enum_infrule that % can be made in attempting to infer the Expression. When the % RemainingDepth is 0 enum_infrule will fail. The initial value of % RemainingDepth should be set to the length of EnumerationLiterals. %=============================================================================== enum_infrule(X, _L, Hs, _) :- fact(X, Hs),!. % DepthReamining is 0 - no further recursion terminate and fail. enum_infrule(_, _, _, 0) :- !, fail. enum_infrule(pred(X)=Y, L, Hs, RemDepth) :- !, X \= Y, prove_not_first(X, L, H1), prove_not_last(Y, L, Ha), enumerated_simp(succ(Y), L, NEWY), NEWY \= succ(Y), !, % Simplified succ: do not decrement remaining recursion depth. enum_infrule(X=NEWY, L, Hb, RemDepth), merge_sort(Ha, Hb, H2), merge_sort(H1, H2, Hs). enum_infrule(succ(X)=Y, L, Hs, RemDepth) :- !, X \= Y, prove_not_last(X, L, H1), prove_not_first(Y, L, Ha), enumerated_simp(pred(Y), L, NEWY), NEWY \= pred(Y), !, % Simplified pred: do not decrement remaining recursion depth. enum_infrule(X=NEWY, L, Hb, RemDepth), merge_sort(Ha, Hb, H2), merge_sort(H1, H2, Hs). enum_infrule(X=succ(Y), L ,Hs, Depth) :- enum_infrule(succ(Y)=X, L, Hs, Depth),!. enum_infrule(X=pred(Y), L, Hs, Depth) :- enum_infrule(pred(Y)=X, L, Hs, Depth),!. enum_infrule(X=Y, L, Hs, RemDepth) :- ( is_in(X, L), is_in(Y, L), !, X = Y ; fact(X=Z, H1), testused(X=Z), % Protected by testused - no need to decrement Depth. enum_infrule(Z=Y, L, H2, RemDepth), merge_sort(H1, H2, Hs) ; fact(Y=Z, H1), testused(Y=Z), % Protected by testused - no need to decrement Depth. enum_infrule(X=Z, L, H2, RemDepth), merge_sort(H1, H2, Hs) ; equality_by_elimination(X=Y, L, Hs) ). enum_infrule(pred(X)<=Y, L, Hs, RemDepth) :- !, prove_not_first(X, L, H1), ( X = Y, H2 = [] ; prove_not_last(Y, L, Ha), enumerated_simp(succ(Y), L, NEWY), NEWY \= succ(Y), !, % Simplified succ: do not decrement remaining recursion depth. enum_infrule(X<=NEWY, L, Hb, RemDepth), merge_sort(Ha, Hb, H2) ; NewRemDepth is RemDepth - 1, enum_infrule(X<=Y, L, H2, NewRemDepth) ), merge_sort(H1, H2, Hs). enum_infrule(X<=succ(Y), L, Hs, RemDepth) :- !, prove_not_last(Y, L, H1), ( X = Y, H2 = [] ; prove_not_first(X, L, Ha), enumerated_simp(pred(X), L, NEWX), NEWX \= pred(X), !, % Simplified pred: do not decrement remaining recursion depth. enum_infrule(NEWX<=Y, L, Hb, RemDepth), merge_sort(Ha, Hb, H2) ; NewRemDepth is RemDepth - 1, enum_infrule(X<=Y, L, H2, NewRemDepth) ), merge_sort(H1, H2, Hs). enum_infrule(X<=pred(Y), L, Hs, RemDepth) :- !, ( X = Y, fail ) ; prove_not_first(Y, L, H1), NewRemDepth is RemDepth - 1, enum_infrule(X=X) ; fact(X=Z, H1), Z \= Y, testused(X=Z) ; fact(X=Z) ; fact(Z=Y, H1), Z \= X, testused(Z=Y) ; fact(Z 2, strict_sublist([NEWX, X], L), enum_infrule(NEWX 2, strict_sublist([Y, NEWY], L), enum_infrule(X=Y, L, Hs, Depth) :- enum_infrule(Y<=X, L, Hs, Depth). enum_infrule(pred(X) Y, Hs), !. enum_infrule(X 2, fact(X 2, fact(Z 2, fact(X<=Z, H1), testused(Z>=X), % Protected by testused - no need to decrement Depth. enum_infrule(Z 2, fact(Z<=Y, H1), testused(Y>=Z), % Protected by testused - no need to decrement Depth. enum_infrule(XY, L, Hs, Depth) :- enum_infrule(YY, L, Hs, RemDepth) :- ( X = Y, !, fail ; is_in(X, L), is_in(Y, L), !, X\=Y, Hs=[] ; enumerated_infer(XY, L, H2, RemDepth), merge_sort(H1, H2, Hs) ; fact(Y=Z, H1), testused(Y=Z), % Protected by testused - no need to decrement depth. enum_infrule(X<>Z, L, H2, RemDepth), merge_sort(H1, H2, Hs) ), !. %------------------------------------------------------------------------------- enum_lt(X, Y, L) :- enum_gt(Y, X, L). enum_gt(X, Y, L) :- append(_, [Y|T], L), is_in(X, T). enum_lte(X, X, _). enum_lte(X, Y, L) :- enum_lt(X, Y, L). %------------------------------------------------------------------------------- equality_by_elimination(X=Y, L, Hs) :- ( is_in(X, L), append(Before, [X| After], L), append(Before, After, L2), not_any(Y, L2, [], Hs) ; is_in(Y, L), append(Before, [Y| After], L), append(Before, After, L2), not_any(X, L2, [], Hs) ). not_any(_X, [], Hs, Hs) :- !. not_any(X, [L|Ls], Hsin, Hsout) :- fact(not X = L, H),!, not_any(X, Ls, [H|Hsin], Hsout). %=============================================================================== % is_inequality(+Exp, -LHS, -RHS, -Op). %------------------------------------------------------------------------------- % Split an inequality into its constituent sides and operator. %=============================================================================== is_inequality(A=B, A, B, (=)). is_inequality(A<>B, A, B, (<>)). is_inequality(A<=B, A, B, (<=)). is_inequality(A>=B, A, B, (>=)). is_inequality(AB, A, B, (>)). %=============================================================================== % infer_by_uq_imp_hyp(+Exp, -Hyp). %------------------------------------------------------------------------------- % Infer conclusion from universal quantified implication hypotheses. %=============================================================================== infer_by_uq_imp_hyp(element(Array, [Index]) >= Val, H) :- infer_by_uq_imp_hyp(Val <= element(Array, [Index]), H). infer_by_uq_imp_hyp(Val <= element(Array, [Index]), H) :- fact(for_all(I__1 : IType, I_LWB <= I__1 and I__1 <= I_UPB -> E_LWB <= element(Array, [I__1]) and element(Array, [I__1]) <= _E_UPB), H1), checktype(Index, IType), infer_subgoal(Val <= E_LWB, H2), infer_subgoal(Index <= I_UPB, H3), infer_subgoal(I_LWB <= Index, H4), merge_sort(H1, H2, Ha), merge_sort(H3, H4, Hb), merge_sort(Ha, Hb, H). infer_by_uq_imp_hyp(Val >= element(Array, [Index]), H) :- infer_by_uq_imp_hyp(element(Array, [Index]) <= Val, H). infer_by_uq_imp_hyp(element(Array, [Index]) <= Val, H) :- fact(for_all(I__1 : IType, I_LWB <= I__1 and I__1 <= I_UPB -> _E_LWB <= element(Array, [I__1]) and element(Array, [I__1]) <= E_UPB), H1), checktype(Index, IType), infer_subgoal(E_UPB <= Val, H2), infer_subgoal(Index <= I_UPB, H3), infer_subgoal(I_LWB <= Index, H4), merge_sort(H1, H2, Ha), merge_sort(H3, H4, Hb), merge_sort(Ha, Hb, H). infer_by_uq_imp_hyp(Val <> element(Array, [Index]), H) :- infer_by_uq_imp_hyp(element(Array, [Index]) <> Val, H). infer_by_uq_imp_hyp(element(Array, [Index]) <> Val, H) :- fact(for_all(I__1 : IType, I_LWB <= I__1 and I__1 <= I_UPB -> E_LWB <= element(Array, [I__1]) and element(Array, [I__1]) <= E_UPB), H1), checktype(Index, IType), infer_subgoal(Index <= I_UPB, H3), infer_subgoal(I_LWB <= Index, H4), ( infer_subgoal(E_UPB < Val, H2) ; infer_subgoal(Val < E_LWB, H2) ), merge_sort(H1, H2, Ha), merge_sort(H3, H4, Hb), merge_sort(Ha, Hb, H). infer_by_uq_imp_hyp(Val = element(Array, [Index]), H) :- infer_by_uq_imp_hyp(element(Array, [Index]) = Val, H). infer_by_uq_imp_hyp(element(Array, [Index]) = Val, H) :- ( fact(for_all(I__1 : IType, I_LWB <= I__1 and I__1 <= I_UPB -> Bound <= element(Array, [I__1]) and element(Array, [I__1]) <= Bound), H1) ; fact(for_all(I__1 : IType, I_LWB <= I__1 and I__1 <= I_UPB -> element(Array, [I__1]) = Bound ), H1) ), checktype(Index, IType), infer_subgoal(Bound = Val, H2), infer_subgoal(Index <= I_UPB, H3), infer_subgoal(I_LWB <= Index, H4), merge_sort(H1, H2, Ha), merge_sort(H3, H4, Hb), merge_sort(Ha, Hb, H). %=============================================================================== % update_chain_infer(+Op, +Exp, +I_LWB, +I_UPB, +Val, -Base_Array, -H). %------------------------------------------------------------------------------- % Update chain inference. %=============================================================================== % Check that all updates to elements of an array, whose indexes are within % a range, are bounded by a value. update_chain_infer(Op, update(Array, [I], UVal), I_LWB, I_UPB, Val, Base_Array, H) :- infer_subgoal(I_LWB <= I, H1), infer_subgoal(I <= I_UPB, H2), GOAL =.. [Op, UVal, Val], infer_subgoal(GOAL, H3), !, update_chain_infer(Op, Array, I_LWB, I_UPB, Val, Base_Array, HRest), merge_sort(H1, H2, Ha), merge_sort(H3, HRest, Hb), merge_sort(Ha, Hb, H). % From above, the updated element is outside the bounded region of the % array. update_chain_infer(OP, update(Array, [I], _UVal), I_LWB, I_UPB, Val, Base_Array, H) :- ( infer_subgoal(I < I_LWB, H1) ; infer_subgoal(I_UPB < I, H1) ), !, update_chain_infer(OP, Array, I_LWB, I_UPB, Val, Base_Array, H2), !, merge_sort(H1, H2, H). % This is the base array. If there are no more updates, then we succeed and % return Array. If there are more updates that the preceding clauses have % not been able to strip away, then we must fail. update_chain_infer(_OP, Array, _I_LWB, _I_UPB, _Val, Array, []) :- Array \= update(_, [_], _), !. %=============================================================================== % element_update_infer(+Exp, +OP, +Bound_Val, +Idx2, -Status, -Base_Array, -H). %------------------------------------------------------------------------------- % Searches an update chain for the 'outermost'(i.e. last) update of the % element I. It then infers that the update meets the inequality % condition. This is similar to the update_chain_infer predicate, but for % only a single element, rather than all elements in a portion of an array. % % If the element is not updated, then the base array of the updates is % returned. % % If the element is updated but the test fails then the predicate fails. % % If the element is updated and the test succeeds then the predicate % succeeds. %=============================================================================== % Check the outermost element update, Idx = Idx2, then check the update % value against the inequality bound. The base array is unified with the % empty list. There is no real need as the variable should not be used but % is included just in case. By using the empty list we avoid any possible % name clash. element_update_infer(update(_Array, [Idx], Upval), <=, Bound_Val, Idx2, found, [], H) :- infer_subgoal(Idx = Idx2, H1), !, infer_subgoal(Upval <= Bound_Val, H2), merge_sort(H1, H2, H). element_update_infer(update(_Array, [Idx], Upval), >=, Bound_Val, Idx2, found, [], H) :- infer_subgoal(Idx = Idx2, H1), !, infer_subgoal(Bound_Val <= Upval, H2), merge_sort(H1, H2, H). element_update_infer(update(_Array, [Idx], Upval), <>, Bound_Val, Idx2, found, [], H) :- infer_subgoal(Idx = Idx2, H1), !, infer_subgoal(Bound_Val <> Upval, H2), merge_sort(H1, H2, H). element_update_infer(update(_Array, [Idx], Upval), =, Bound_Val, Idx2, found, [], H) :- infer_subgoal(Idx = Idx2, H1), !, infer_subgoal(Bound_Val = Upval, H2), merge_sort(H1, H2, H). % The previous clause failed at the Idx = Idx2 stage, therefore move down % the update chain. % Note that it is not safe to assume here that Idx <> Idx2, so we must % check this explicitly. element_update_infer(update(Array, [Idx], _Upval), OP, Bound_Val, Idx2, Status, Base_Array, H) :- infer_subgoal(Idx <> Idx2, H1), !, element_update_infer(Array, OP, Bound_Val, Idx2, Status, Base_Array, H2), merge_sort(H1, H2, H). % If there are no more updates, then return the Array unchanged. If there % are any remaining updates, then fail. element_update_infer(Array, _OP, _Bound_Val, _Idx2, base, Array, []) :- Array \= update(_, [_], _), !. %=============================================================================== % uq_infer(+Var : +Exp, -H). %------------------------------------------------------------------------------- % Infer conclusions containing universal quantifiers. %=============================================================================== uq_infer(Var : T, element(update(A, [I], Val), [Var]) >= X, H) :- uq_infer(Var : T, X <= element(update(A, [I], Val), [Var]), H). uq_infer(Var : T, X <= element(update(A, [_I], Val), [Var]), H) :- infer_subgoal(X <= Val, H1), !, uq_infer(Var : T, X <= element(A, [Var]), H2),!, merge_sort(H1, H2, H). uq_infer(Var : T, X >= element(update(A, [I], Val), [Var]), H) :- uq_infer(Var : T, element(update(A, [I], Val), [Var]) <= X, H). uq_infer(Var : T, element(update(A, [_I], Val), [Var]) <= X, H) :- infer_subgoal(Val <= X, H1),!, uq_infer(Var : T, element(A, [Var]) <= X, H2), merge_sort(H1, H2, H). uq_infer(Var : T, X >= element(A, [Var]), H) :- uq_infer(Var : T, element(A, [Var]) <= X, H). uq_infer(Var : T, element(A, [Var]) <= X, H) :- ( fact(for_all(Var : T, element(A, [Var]) <= UPB and LWB <= element(A, [Var])), H1) ; fact(for_all(Var : T, LWB <= element(A, [Var]) and element(A, [Var]) <= UPB), H1) ), infer_subgoal(UPB <= X, H2), !, merge_sort(H1, H2, H). uq_infer(Var : T, element(A, [Var]) >= X, H) :- uq_infer(Var : T, X <= element(A, [Var]), H). uq_infer(Var : T, X <= element(A, [Var]), H) :- ( fact(for_all(Var : T, element(A, [Var]) <= UPB and LWB <= element(A, [Var])), H1) ; fact(for_all(Var : T, LWB <= element(A, [Var]) and element(A, [Var]) <= UPB), H1) ), infer_subgoal(X <= LWB, H2), !, merge_sort(H1, H2, H). %=============================================================================== % intexp(+A). %------------------------------------------------------------------------------- % Succeeds if E is a nice integers-only expression. %=============================================================================== intexp(A) :- var(A), !, fail. intexp(A) :- integer(A), !. intexp(-A) :- intexp(A), !. intexp(A+B) :- intexp(A), intexp(B), !. intexp(A-B) :- intexp(A), intexp(B), !. intexp(A*B) :- intexp(A), intexp(B), !. intexp(A div B) :- intexp(A), intexp(B), !. %=============================================================================== % set_infer(+Exp, -Hs). %------------------------------------------------------------------------------- % Infer set Exp, reporting used hypotheses as Hs. %=============================================================================== set_infer(_X /\ (set []) = (set []), []) :- !. set_infer((set []) = _X /\ (set []), []) :- !. set_infer((set []) /\ _X = (set []), []) :- !. set_infer((set []) = (set []) /\ _X, []) :- !. set_infer(X /\ X = X, []) :- !. set_infer(X = X /\ X, []) :- !. set_infer(X /\ Y = Y /\ X, []) :- !. set_infer(X \/ (set []) = X, []) :- !. set_infer(X = X \/ (set []), []) :- !. set_infer((set []) \/ X = X, []) :- !. set_infer(X = (set []) \/ X, []) :- !. set_infer(X \/ X = X, []) :- !. set_infer(X = X \/ X, []) :- !. set_infer(X \/ Y = Y \/ X, []) :- !. set_infer(X \ (set []) = X, []) :- !. set_infer(X = X \ (set []), []) :- !. set_infer((set []) \ _X = (set []), []) :- !. set_infer((set []) = (set []) \ _X, []) :- !. set_infer(X \ X = (set []), []) :- !. set_infer(X=Y, Hs) :- ( X=Y, Hs=[] ; set_infrule(X=Y, Hs) ; set_infrule(X subset_of Y, Ha), set_infrule(Y subset_of X, Hb), !, merge_sort(Ha, Hb, Hs) ), !. set_infer((set [_X|_Y]) <> (set []), []) :- !. set_infer((set []) <> (set [_X|_Y]), []) :- !. set_infer(X <> (set []), Hs) :- set_infrule(_E in X, Hs), !. set_infer((set []) <> X, Hs) :- set_infrule(_E in X, Hs), !. set_infer(X <> Y, Hs) :- set_infrule(X <> Y, Hs), !. %------------------------------------------------------------------------------- set_infrule(X, Hs) :- fact(X, Hs). set_infrule(X=Y, Hs) :- fact(X=Z, H1), testused(X=Z), set_infrule(Y=Z, H2), merge_sort(H1, H2, Hs). set_infrule(X<>Y, Hs) :- fact(X=Z, H1), testused(X=Z), set_infrule(Z<>Y, H2), merge_sort(H1, H2, Hs). set_infrule(X<>Y, Hs) :- fact(X<>Z, H1), set_infrule(Z=Y, H2), merge_sort(H1, H2, Hs). set_infrule(X<>Y, Hs) :- fact(Y<>Z, H1), set_infrule(X=Z, H2), merge_sort(H1, H2, Hs). set_infrule(X in Y, Hs) :- ( fact(not (X not_in Y), Hs) ; fact(X=Z, H1), testused(X=Z), set_infrule(Z in Y, H2), merge_sort(H1, H2, Hs) ; fact(Y=Z, H1), testused(Y=Z), set_infrule(X in Z, H2), merge_sort(H1, H2, Hs) ). set_infrule(X not_in Y, Hs) :- ( fact(not (X in Y), Hs) ; fact(X=Z, H1), testused(X=Z), set_infrule(Z not_in Y, H2), merge_sort(H1, H2, Hs) ; fact(Y=Z, H1), testused(Y=Z), set_infrule(X not_in Z, H2), merge_sort(H1, H2, Hs) ). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/load__pfs.pro0000644000175000017500000004441211753202337017262 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Loads path functions for this session. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(load__pfs, [load_pfs/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('data__data_files.pro', [get_datafiles_pfs/1]). :- use_module('data__pfs.pro', [add_pfs_action/2, add_pfs_pf/4, add_pfs_statement/2, add_pfs_successor_statement/3, add_pfs_traversal_condition/3]). :- use_module('ioutilities.pro', [read_line_from_stream/2, read_up_to_number_of_chars_from_stream/3, throw_error/2]). :- use_module('newutilities.pro', [unique_atom/2]). :- use_module('parseutilities.pro', [parse_atom_silent/4, parse_natural_int/3]). %############################################################################### % TYPES %############################################################################### :- add_type('ParseStatus', [end_of_file, notfinished, finished]). %############################################################################### % DATA %############################################################################### :- add_state(get_last_statement_id, get_last_statement_id('StatementId_Atom')). :- dynamic(get_last_statement_id/1). :- add_state(get_last_successor_statement_id, get_last_successor_statement_id('SuccessorStatementId_Atom')). :- dynamic(get_last_successor_statement_id/1). :- add_state(get_order, get_order('Int')). :- dynamic(get_order/1). %############################################################################### % PREDICATES %############################################################################### :- set_prolog_flag(double_quotes, chars). %=============================================================================== % load_pfs. %------------------------------------------------------------------------------- % Loads information from the known pfs file. %=============================================================================== load_pfs:- get_datafiles_pfs(PfsFile_Atom), open(PfsFile_Atom, read, Stream), process_path_functions(Stream), close(Stream), !. %=============================================================================== % process_path_functions(+Stream). %=============================================================================== process_path_functions(Stream):- initialise_order, repeat, read_line_from_stream(Stream, ReadText), process_path_functions_line(Stream, ReadText), process_path_functions_x(ReadText), !. %------------------------------------------------------------------------------- % Finish if at end of file. process_path_functions_x(end_of_file):- !. % From above, not at end of file. process_path_functions_x(_ReadLine):- fail. %=============================================================================== % process_path_functions_line(Stream, ReadText). %=============================================================================== % Do nothing at end of file. process_path_functions_line(_Stream, end_of_file):- !. % Check for statement line. process_path_functions_line(_Stream, CharList):- scan_for_statement(CharList), !. % Check for successor statement line. process_path_functions_line(_Stream, CharList):- scan_for_successor_statement(CharList), !. % Check for path function. process_path_functions_line(Stream, CharList):- scan_for_path_header(CharList, Number_Int), % Record this path function, associating with last found successor % statement. retrieve_and_increment_order(Order_Int), unique_atom('pf', PFId_Atom), must_get_last_successor_statement_id(SuccessorStatementId_Atom), add_pfs_pf(PFId_Atom, Order_Int, Number_Int, SuccessorStatementId_Atom), % The components of this path function are now retrieved. process_single_path_function(Stream, PFId_Atom), !. % From above, nothing found at this line. process_path_functions_line(_Stream, _CharList):- !. %=============================================================================== % scan_for_statement(+CharList). %=============================================================================== % Make this call visible to the spxref tool. :- public load__pfs:parse_statement/3. scan_for_statement(CharList):- phrase(parse_statement(PFTraceStatement), CharList), unique_atom('stmt', StatementId_Atom), add_pfs_statement(StatementId_Atom, PFTraceStatement), replace_last_statement_id(StatementId_Atom), !. %------------------------------------------------------------------------------- % Statement: XXX YYY parse_statement(statement(PFPositionFrom, Successors_Int)) --> "Statement:", parse_atom_silent([space, newline], zeroormore), parse_statement_position_from(PFPositionFrom), parse_atom_silent([space, newline], oneormore), parse_statement_successors(Successors_Int), parse_atom_silent([space, newline], zeroormore), !. %------------------------------------------------------------------------------- % start parse_statement_position_from(start) --> "start", !. % line 48 parse_statement_position_from(line(Int)) --> "line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Int), !. %------------------------------------------------------------------------------- % 2 successor(s) parse_statement_successors(Int) --> parse_natural_int(Int), parse_atom_silent([space, newline], oneormore), "successor(s)", !. %=============================================================================== % scan_for_successor_statement(+CharList). %=============================================================================== % Make this call visible to the spxref tool. :- public load__pfs:parse_successor_statement/3. scan_for_successor_statement(CharList):- phrase(parse_successor_statement(PFTraceSuccessorStatement), CharList), must_get_last_statement_id(ParentStatementId_Atom), unique_atom('suc_stmt', SuccessorStatementId_Atom), add_pfs_successor_statement(SuccessorStatementId_Atom, PFTraceSuccessorStatement, ParentStatementId_Atom), replace_last_successor_statement_id(SuccessorStatementId_Atom), !. %------------------------------------------------------------------------------- % Successor statement: XXX. parse_successor_statement(successor_statement(PFPositionTo)) --> parse_atom_silent([space, newline], zeroormore), "Successor statement:", parse_atom_silent([space, newline], zeroormore), parse_statement_position_to(PFPositionTo), parse_atom_silent([space, newline], zeroormore), ".", !. %------------------------------------------------------------------------------- parse_statement_position_to(finish) --> "finish", !. parse_statement_position_to(line(Int)) --> "line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Int), !. %=============================================================================== % scan_for_path_header(+CharList, -Number_Int). %=============================================================================== % Make this call visible to the spxref tool. :- public load__pfs:parse_path_header/3. scan_for_path_header(CharList, Number_Int):- phrase(parse_path_header(Number_Int), CharList), !. %------------------------------------------------------------------------------- % Path 1 parse_path_header(Number_Int) --> parse_atom_silent([space, newline], zeroormore), "Path", parse_atom_silent([space, newline], oneormore), parse_natural_int(Number_Int), !. %=============================================================================== % process_single_path_function(+Stream, +PFId_Atom). %=============================================================================== process_single_path_function(Stream, PFId_Atom):- % Retrieve traversal condition tag. read_line_from_stream(Stream, ReadText), confirm_traversal_condition(ReadText), % Retrieve traversal conditions (and action line). retrieve_traversal_conditions(Stream, PFId_Atom), % Retrieve action. retrieve_action(Stream, PFId_Atom), !. %=============================================================================== % confirm_traversal_condition(+ReadText). %=============================================================================== % Make this call visible to the spxref tool. :- public load__pfs:parse_traversal_condition/2. confirm_traversal_condition(end_of_file):- throw_error('Unexpected end of file in parsing pfs file.', []). confirm_traversal_condition(CharList):- phrase(parse_traversal_condition, CharList), !. confirm_traversal_condition(CharList):- throw_error('Badly formed traversal condition line: ~w', [CharList]). %------------------------------------------------------------------------------- % Traversal condition: parse_traversal_condition --> parse_atom_silent([space, newline], zeroormore), "Traversal condition:", parse_atom_silent([space, newline], zeroormore), !. %=============================================================================== % retrieve_traversal_conditions(+Stream, +PFId_Atom). %=============================================================================== retrieve_traversal_conditions(Stream, PFId_Atom):- repeat, read_up_to_number_of_chars_from_stream(Stream, 6, ReadText), process_single_path_function_leader(Stream, ReadText, PFId_Atom, ParseStatus), retrieve_traversal_conditions_x(ParseStatus), !. %------------------------------------------------------------------------------- % Is finished. retrieve_traversal_conditions_x(finished):- !. % Not finished. Fail and look at more lines. retrieve_traversal_conditions_x(notfinished):- fail. % Error if at end of file. retrieve_traversal_conditions_x(end_of_file):- throw_error('Unexpected end of file in parsing pfs file.', []). %=============================================================================== % process_single_path_function_leader(+Stream, +ReadText, +ParentPFId_Atom, % -ParseStatus). %=============================================================================== % At end_of_file only report end_of_file. process_single_path_function_leader(_Stream, end_of_file, _ParentPFId_Atom, end_of_file):- !. % Search for path formula. process_single_path_function_leader(Stream, CharList, ParentPFId_Atom, notfinished):- scan_for_path_formula(Stream, CharList, ParentPFId_Atom), !. % Search for action line. process_single_path_function_leader(Stream, CharList, _ParentPFId_Atom, finished):- scan_for_action_line(Stream, CharList), !. % None of the above is an error. process_single_path_function_leader(_Stream, CharList, _ParentPFId_Atom, _ParseStatus):- throw_error('Badly formed line in processing verification condition, starting: ~w', [CharList]). %=============================================================================== % scan_for_path_formula(+Stream, +CharList, +ParentVCId_Atom). %=============================================================================== % Make this call visible to the spxref tool. :- public load__pfs:parse_path_formula/3. scan_for_path_formula(Stream, CharList, ParentPFId_Atom):- phrase(parse_path_formula(Number_Int), CharList), % Retrieve the term from the stream. read_term(Stream, TravCond_Term, []), % Consume rest of line following the term. read_line_from_stream(Stream, _ReadText), add_pfs_traversal_condition(Number_Int, TravCond_Term, ParentPFId_Atom), !. %------------------------------------------------------------------------------- % 1: parse_path_formula(Number_Int) --> parse_atom_silent([space, newline], zeroormore), parse_natural_int(Number_Int), ":", parse_atom_silent([space, newline], zeroormore), !. %=============================================================================== % scan_for_action_line(+Stream, +CharList). %=============================================================================== % Make this call visible to the spxref tool. :- public load__pfs:parse_before_action_line/2. scan_for_action_line(Stream, CharList):- phrase(parse_before_action_line, CharList), % Retrieve the rest of the line, and check that the action line is as % expected. read_line_from_stream(Stream, ReadText), confirm_valid_action_line(ReadText), !. %------------------------------------------------------------------------------- % " " (i.e. six spaces) parse_before_action_line --> " ", !. %------------------------------------------------------------------------------- % Make this call visible to the spxref tool. :- public load__pfs:parse_action_line/2. confirm_valid_action_line(end_of_file):- throw_error('Unexpected end of file in parsing pfs file.', []). confirm_valid_action_line(CharList):- phrase(parse_action_line, CharList), !. confirm_valid_action_line(CharList):- throw_error('Badly formed action line between traversal conditions and action: ~w', [CharList]). %------------------------------------------------------------------------------- % Action: parse_action_line --> parse_atom_silent([space, newline], zeroormore), "Action:", parse_atom_silent([space, newline], zeroormore), !. %=============================================================================== % retrieve_action(Stream, PFId_Atom). %=============================================================================== retrieve_action(Stream, ParentPFId_Atom):- % Retrieve the term from the stream. read_term(Stream, Action_Term, []), % Consume rest of line following the term. read_line_from_stream(Stream, _ReadText), add_pfs_action(Action_Term, ParentPFId_Atom), !. %=============================================================================== % replace_last_statement_id(+StatementId_Atom). %=============================================================================== % Replace existing trace. replace_last_statement_id(StatementId_Atom):- retract(get_last_statement_id(_StatementId_Atom)), assert(get_last_statement_id(StatementId_Atom)), !. % Set initial trace. replace_last_statement_id(StatementId_Atom):- assert(get_last_statement_id(StatementId_Atom)), !. %=============================================================================== % must_get_last_statement_id(+StatementId_Atom). %=============================================================================== must_get_last_statement_id(StatementId_Atom):- get_last_statement_id(StatementId_Atom), !. must_get_last_statement_id(_StatementId_Atom):- throw_error('An expected statement line has not been found.', []). %=============================================================================== % replace_last_successor_statement_id(SuccessorStatementId_Atom). %=============================================================================== % Replace existing trace. replace_last_successor_statement_id(SuccessorStatementId_Atom):- retract(get_last_successor_statement_id(_SuccessorStatementId_Atom)), assert(get_last_successor_statement_id(SuccessorStatementId_Atom)), !. % Set initial trace. replace_last_successor_statement_id(SuccessorStatementId_Atom):- assert(get_last_successor_statement_id(SuccessorStatementId_Atom)), !. %=============================================================================== % must_get_last_successor_statement_id(SuccessorStatementId_Atom). %=============================================================================== must_get_last_successor_statement_id(SuccessorStatementId_Atom):- get_last_successor_statement_id(SuccessorStatementId_Atom), !. must_get_last_successor_statement_id(_SuccessorStatementId_Atom):- throw_error('An expected successor statement line has not been found.', []). %=============================================================================== % initialise_order. %=============================================================================== initialise_order:- retractall(get_order(_Int)), assert(get_order(1)), !. %=============================================================================== % retrieve_and_increment_order(+Int). %=============================================================================== retrieve_and_increment_order(Int):- retract(get_order(Int)), NextInt is Int+1, assert(get_order(NextInt)), !. :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/load__system.pro0000644000175000017500000000575011753202337020020 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Set information that is specific to the particular build or running % process. %############################################################################### %############################################################################### %MODULE %############################################################################### :- module(load__system, [load_system/0]). %############################################################################### %DEPENDENCIES %############################################################################### :- use_module('newutilities.pro', [fetch_date_and_time/2]). :- use_module('data__system.pro', [add_system_start_date_time/2]). %############################################################################### %TYPES %############################################################################### %############################################################################### %DYNAMICS %############################################################################### %############################################################################### %PREDICATES %############################################################################### %=============================================================================== % load_system. %------------------------------------------------------------------------------- % Sets configuration items that are specific to this particular process. %=============================================================================== load_system:- %Determine and store the time the system was started. This should be %the only point that the system clock is inspected. fetch_date_and_time(Date_Atom, Time_Atom), add_system_start_date_time(Date_Atom, Time_Atom), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/simplifier/schedulesimplification.pro0000644000175000017500000013253011753202337022062 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Top level predicates to performs the simplification and outputting of % vcs. This ties together code for the purposes of refactoring and is a % strong candidate for post-refactoring work. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % perform_simplification. %------------------------------------------------------------------------------- % VCG loading is weaved with simplification, echoing of output, and final % output. This new predicate tries to reorganise these. %=============================================================================== % There is a only a small amount of shared code between vcs and path % functions. Just consider these separately. perform_simplification(SimplifiedVcgFile_Stream) :- get_provenance_proof_file_kind(verification_conditions), perform_simplification_verification_conditions(SimplifiedVcgFile_Stream), !. % A dpc file contains a set of VCs where each VC has one "false" conclusion. % The Simplifier searches for dead paths by trying to prove each VC and a % a dead path exists if the proof is successful - only way a VC can be % true when the conclusion is "false" is if the hypothesis infers "false". perform_simplification(SummaryDpcFile_Stream) :- get_provenance_proof_file_kind(deadpath_search), % Change this to corresponding siv's dpc equivalent (if any). perform_simplification_verification_conditions(SummaryDpcFile_Stream), !. perform_simplification(SimplifiedVcgFile_Stream) :- get_provenance_proof_file_kind(path_functions), perform_simplification_path_functions(SimplifiedVcgFile_Stream), !. %=============================================================================== % perform_simplification_verification_conditions(+SimplifiedVcgFile_Stream). %------------------------------------------------------------------------------- % Simplify verification conditions. %=============================================================================== perform_simplification_verification_conditions(SimplifiedVcgFile_Stream):- repeat, perform_simplification_verification_conditions_x(SimplifiedVcgFile_Stream), !. perform_simplification_verification_conditions_x(SimplifiedVcgFile_Stream):- % Retrieve the next vc to process. retrieve_next_vcg, % Get details for this vc. get_vcg_vc(VCId_Atom, Order_Int, _Name_Atom, _Number_Int, _ParentVCTraceId_Atom), % Record that at least one VC has been successfully recorded. record_vc_retrieved, tidy_up_vc_state, retractall(current_vc_number(_N)), assert(current_vc_number(Order_Int)), % Perform simplification. simplify_verification_conditions(SimplifiedVcgFile_Stream, VCId_Atom, Order_Int), ( path_functions ; nl(SimplifiedVcgFile_Stream) ), !, fail. % From above, no more VCs available. % It is an error if zero VCs were encountered. perform_simplification_verification_conditions_x(_SimplifiedVcgFile_Stream):- complain_if_zero_vcs_retrieved, !. %------------------------------------------------------------------------------- record_vc_retrieved:- \+ get_vcg_seen_vc, add_vcg_seen_vc, !. record_vc_retrieved:- !. %------------------------------------------------------------------------------- complain_if_zero_vcs_retrieved:- get_vcg_seen_vc, !. % From above, no vcs were retrieved. complain_if_zero_vcs_retrieved:- show_error('No VCs found in parsing vcg file.\n', []). %------------------------------------------------------------------------------- simplify_verification_conditions(_SimplifiedVcgFile_Stream, VCId_Atom, Order_Int):- get_switch_typecheck_only(on), write('Type checking VC: '), write(Order_Int), hoist_vc_hypotheses(VCId_Atom, Order_Int), hoist_vc_conclusions(VCId_Atom, Order_Int), write(' - OK'), nl, flush_output, !. simplify_verification_conditions(SimplifiedVcgFile_Stream, VCId_Atom, Order_Int):- get_switch_typecheck_only(off), refactor_write_vcs_top(SimplifiedVcgFile_Stream, Order_Int, VCId_Atom), hoist_vc_hypotheses(VCId_Atom, Order_Int), hoist_vc_conclusions(VCId_Atom, Order_Int), issue_message('', []), issue_message('Trying to simplify: ', VCId_Atom), simplify_vc, refactor_write_vcs(SimplifiedVcgFile_Stream, Order_Int, VCId_Atom), report_to_user(VCId_Atom), !. %------------------------------------------------------------------------------- hoist_vc_hypotheses(VCId_Atom, VC_Num):- get_vcg_hypothesis(Number_Int, Hyp_Term, VCId_Atom), prune_vcg_hypothesis(Number_Int, Hyp_Term, VCId_Atom), process_component(Hyp_Term, Number_Int, VC_Num, hyp), fail. hoist_vc_hypotheses(_VCId_Atom, _VC_Num):- !. %------------------------------------------------------------------------------- hoist_vc_conclusions(VCId_Atom, VC_Num):- retract(get_vcg_conclusion(Number_Int, Conc_Term, VCId_Atom)), process_component(Conc_Term, Number_Int, VC_Num, conc), fail. hoist_vc_conclusions(_VCId_Atom, _VC_Num):- !. %=============================================================================== % perform_simplification_path_functions(+SimplifiedVcgFile_Stream). %------------------------------------------------------------------------------- % Simplify path functions. %=============================================================================== perform_simplification_path_functions(SimplifiedVcgFile_Stream):- generate_integer(Order_Int), perform_simplification_path_functions_x(SimplifiedVcgFile_Stream, Order_Int), !. perform_simplification_path_functions_x(SimplifiedVcgFile_Stream, Order_Int):- get_pfs_pf(PFId_Atom, Order_Int, _Number_Int, _ParentSuccessorStatementId_Atom), tidy_up_vc_state, retractall(current_vc_number(_N)), assert(current_vc_number(Order_Int)), simplify_path_functions(SimplifiedVcgFile_Stream, PFId_Atom, Order_Int), ( path_functions ; nl(SimplifiedVcgFile_Stream) ), !, fail. perform_simplification_path_functions_x(_SimplifiedVcgFile_Stream, _Order_Int):- !. %------------------------------------------------------------------------------- simplify_path_functions(_SimplifiedVcgFile_Stream, PFId_Atom, Order_Int):- get_switch_typecheck_only(on), write('Type checking path function: '), write(Order_Int), hoist_pf_traversal_conditions(PFId_Atom), spacer(8), hoist_pf_actions(PFId_Atom), write(' - OK'), flush_output, !. simplify_path_functions(SimplifiedVcgFile_Stream, PFId_Atom, Order_Int):- get_switch_typecheck_only(off), refactor_write_vcs_top(SimplifiedVcgFile_Stream, Order_Int, PFId_Atom), hoist_pf_traversal_conditions(PFId_Atom), spacer(8), hoist_pf_actions(PFId_Atom), simplify_vc, refactor_write_vcs(SimplifiedVcgFile_Stream, Order_Int, PFId_Atom), report_to_user(PFId_Atom), !. %------------------------------------------------------------------------------- hoist_pf_traversal_conditions(PFId_Atom):- get_pfs_traversal_condition(_Number_Int, TravCond_Term, PFId_Atom), hoist_pf_traversal_conditions_x(TravCond_Term), fail. hoist_pf_traversal_conditions(_PFId_Atom):- !. %------------------------------------------------------------------------------- hoist_pf_traversal_conditions_x(TravCond_Term):- restructure_formula(TravCond_Term, INTERMEDIATE), checkvalidtraversalconditiontype(INTERMEDIATE, SIMPLIFIED_CONDITION), !, ( \+ get_hyp(SIMPLIFIED_CONDITION, _, _), !, add_hyp(SIMPLIFIED_CONDITION, x, HYP_NO), log_simplification(hyp,HYP_NO,INTERMEDIATE,SIMPLIFIED_CONDITION) ; get_hyp(SIMPLIFIED_CONDITION, _, N), assert_log_fact(duplicate_hyp, [N, SIMPLIFIED_CONDITION]) ), !. %------------------------------------------------------------------------------- checkvalidtraversalconditiontype(X, Y) :- checktype(X, boolean), !, ( simplification_is_on, simplify(X, Y) ; Y = X ), !. checkvalidtraversalconditiontype(_, _) :- stopwith('Above traversal condition does not typecheck correctly.'), !. %------------------------------------------------------------------------------- hoist_pf_actions(PFId_Atom):- get_pfs_action(Action_Term, PFId_Atom), hoist_pf_actions_x(Action_Term), fail. hoist_pf_actions(_VCId_Atom):- !. %------------------------------------------------------------------------------- hoist_pf_actions_x(Action_Term):- retractall(simplified_action_part), checkvalidactiontype(Action_Term, SIMPLIFIED_ACTION), !, ( simplified_action_part, assert_log_fact(simplified, [conc,1,Action_Term,SIMPLIFIED_ACTION]) ; true ), !, add_conc(SIMPLIFIED_ACTION, x, _). %------------------------------------------------------------------------------- checkvalidactiontype(X & Y, A & B) :- checkvalidactiontype(X, A), !, checkvalidactiontype(Y, B), !. checkvalidactiontype(X := Y, X := Z) :- atom(X), !, ( checktype(X, T) ; stopwith3('Undeclared identifier ', X, ' on l.h.s. of ":=" in above action.') ), !, ( restructure_formula(Y, Z1), checktype(Z1, T) ; stopwith3('Type of r.h.s. of ":=" does not match that of l.h.s. [', X, '] in above') ), !, ( simplification_is_on, simplify(Z1, Z), ( simplified_action_part ; Z \= Z1, assertz(simplified_action_part) ; true ) ; Z = Z1 ), !. checkvalidactiontype(unit(function),[]) :- !. checkvalidactiontype(null,[]) :- !. :- dynamic(have_seen/1). :- dynamic(pfs_format/1). %=============================================================================== % refactor_write_vcs_top(+SimplifiedVcgFile_Stream, +Order_Int, +Id_Atom). %=============================================================================== % Display the header part of verification conditions or path function. %=============================================================================== refactor_write_vcs_top(SimplifiedVcgFile_Stream, Order_Int, Id_Atom):- get_provenance_proof_file_kind(verification_conditions), write_before_vc_banner(SimplifiedVcgFile_Stream, Order_Int, Id_Atom), !. refactor_write_vcs_top(SummaryDpcFile_Stream, Order_Int, Id_Atom):- get_provenance_proof_file_kind(deadpath_search), write_before_vc_banner(SummaryDpcFile_Stream, Order_Int, Id_Atom), !. % DELETE_PATH_FUNCTIONS refactor_write_vcs_top(SimplifiedVcgFile_Stream, Order_Int, Id_Atom):- get_provenance_proof_file_kind(path_functions), write_before_pf_banner(SimplifiedVcgFile_Stream, Order_Int, Id_Atom), !. % END_DELETE_PATH_FUNCTIONS %------------------------------------------------------------------------------- write_before_vc_banner(SimplifiedVcgFile_Stream, Order_Int, VCId_Atom):- get_vcg_vc(VCId_Atom, Order_Int, Name_Atom, _Number_Int, ParentVCTraceId_Atom), get_vcg_trace(ParentVCTraceId_Atom, VCTrace), maybe_echo_vc_trace(SimplifiedVcgFile_Stream, ParentVCTraceId_Atom, VCTrace), write(SimplifiedVcgFile_Stream, Name_Atom), write(SimplifiedVcgFile_Stream, '.'), nl(SimplifiedVcgFile_Stream), !. %------------------------------------------------------------------------------- maybe_echo_vc_trace(_SimplifiedVcgFile_Stream, ParentVCTraceId_Atom, _VCTrace):- have_seen(ParentVCTraceId_Atom), !. maybe_echo_vc_trace(SimplifiedVcgFile_Stream, ParentVCTraceId_Atom, VCTrace):- echo_vc_trace(SimplifiedVcgFile_Stream, VCTrace), nl(SimplifiedVcgFile_Stream), nl(SimplifiedVcgFile_Stream), assert(have_seen(ParentVCTraceId_Atom)), !. %------------------------------------------------------------------------------- echo_vc_trace(SimplifiedVcgFile_Stream, fudge(Line)):- write(SimplifiedVcgFile_Stream, Line), !. echo_vc_trace(SimplifiedVcgFile_Stream, checkRefinementIntegrity):- write(SimplifiedVcgFile_Stream, 'For checks of refinement integrity: '), !. echo_vc_trace(SimplifiedVcgFile_Stream, subclassInheritanceIntegrity):- write(SimplifiedVcgFile_Stream, 'For checks of subclass inheritance integrity: '), !. echo_vc_trace(SimplifiedVcgFile_Stream, traverseCutpoints(VCCutpointFrom, VCCutpointTo)):- write(SimplifiedVcgFile_Stream, 'For path(s) from '), echo_vc_trace_vccutpointfrom(SimplifiedVcgFile_Stream, VCCutpointFrom), write(SimplifiedVcgFile_Stream, ' to '), echo_vc_trace_vccutpointto(SimplifiedVcgFile_Stream, VCCutpointTo), write(SimplifiedVcgFile_Stream, ':'), !. %------------------------------------------------------------------------------- echo_vc_trace_vccutpointfrom(SimplifiedVcgFile_Stream, start):- write(SimplifiedVcgFile_Stream, 'start'), !. echo_vc_trace_vccutpointfrom(SimplifiedVcgFile_Stream, assertion(AssertionKind, Line_Int)):- echo_vc_trace_assertionkind(SimplifiedVcgFile_Stream, AssertionKind), write(SimplifiedVcgFile_Stream, 'assertion of line '), write(SimplifiedVcgFile_Stream, Line_Int), !. %------------------------------------------------------------------------------- echo_vc_trace_assertionkind(SimplifiedVcgFile_Stream, userprovided):- write(SimplifiedVcgFile_Stream, ''), !. echo_vc_trace_assertionkind(SimplifiedVcgFile_Stream, default):- write(SimplifiedVcgFile_Stream, 'default '), !. %------------------------------------------------------------------------------- echo_vc_trace_vccutpointto(SimplifiedVcgFile_Stream, finish):- write(SimplifiedVcgFile_Stream, 'finish'), !. echo_vc_trace_vccutpointto(SimplifiedVcgFile_Stream, assertion(AssertionKind, Line_Int)):- echo_vc_trace_vccutpointfrom(SimplifiedVcgFile_Stream, assertion(AssertionKind, Line_Int)), !. echo_vc_trace_vccutpointto(SimplifiedVcgFile_Stream, check(CheckKind, Line_Int)):- echo_vc_trace_checkkind(SimplifiedVcgFile_Stream, CheckKind), write(SimplifiedVcgFile_Stream, 'check associated with statement of line '), write(SimplifiedVcgFile_Stream, Line_Int), !. %------------------------------------------------------------------------------- echo_vc_trace_checkkind(_SimplifiedVcgFile_Stream, userprovided):- !. echo_vc_trace_checkkind(SimplifiedVcgFile_Stream, runtime):- write(SimplifiedVcgFile_Stream, 'run-time '), !. echo_vc_trace_checkkind(SimplifiedVcgFile_Stream, precondition):- write(SimplifiedVcgFile_Stream, 'precondition '), !. %------------------------------------------------------------------------------- write_before_pf_banner(SimplifiedVcgFile_Stream, Order_Int, PFId_Atom):- get_pfs_pf(PFId_Atom, Order_Int, Number_Int, ParentSuccessorStatementId_Atom), get_pfs_successor_statement(ParentSuccessorStatementId_Atom, PFTraceSuccessorStatement, ParentStatementId_Atom), get_pfs_statement(ParentStatementId_Atom, PFTraceStatement), maybe_echo_pf_trace_statement(SimplifiedVcgFile_Stream, ParentStatementId_Atom, PFTraceStatement), maybe_echo_pf_trace_successor_statement(SimplifiedVcgFile_Stream, ParentSuccessorStatementId_Atom, PFTraceSuccessorStatement), do_echo_path(SimplifiedVcgFile_Stream, Number_Int), !. %------------------------------------------------------------------------------- maybe_echo_pf_trace_statement(_SimplifiedVcgFile_Stream, ParentStatementId_Atom, _PFTraceStatement):- have_seen(ParentStatementId_Atom), !. maybe_echo_pf_trace_statement(SimplifiedVcgFile_Stream, ParentStatementId_Atom, PFTraceStatement):- maybe_echo_pf_trace_statement_x(PFTraceStatement, AtomListList), flatten_list(AtomListList, AtomList), implode_separator_content_list('', AtomList, Atom), consider_fudge_line(SimplifiedVcgFile_Stream), assert(pfs_format(extraline)), write(SimplifiedVcgFile_Stream, Atom),nl(SimplifiedVcgFile_Stream), assert(stmt_line(Atom)), assert(have_seen(ParentStatementId_Atom)), !. %------------------------------------------------------------------------------- consider_fudge_line(SimplifiedVcgFile_Stream):- pfs_format(extraline), nl(SimplifiedVcgFile_Stream), !. consider_fudge_line(_SimplifiedVcgFile_Stream):- !. %------------------------------------------------------------------------------- maybe_echo_pf_trace_statement_x(statement(PFPositionFrom, Successors_Int), ['Statement: ',OutPFPositionFrom,OutSuccessors]):- maybe_echo_pf_trace_statement_pfpositionfrom(PFPositionFrom, OutPFPositionFrom), maybe_echo_pf_trace_statement_pfsuccessors_int(Successors_Int, OutSuccessors), !. %------------------------------------------------------------------------------- maybe_echo_pf_trace_statement_pfpositionfrom(start, ['start ']):- !. maybe_echo_pf_trace_statement_pfpositionfrom(line(Int), ['line ', IntAtom]):- integer_to_atom(Int, IntAtom), !. %------------------------------------------------------------------------------- maybe_echo_pf_trace_statement_pfsuccessors_int(Int, [' ', IntAtom,' successor(s)']):- integer_to_atom(Int, IntAtom), !. %------------------------------------------------------------------------------- maybe_echo_pf_trace_successor_statement(_SimplifiedVcgFile_Stream, ParentStatementId_Atom, _PFTraceStatement):- have_seen(ParentStatementId_Atom), !. maybe_echo_pf_trace_successor_statement(SimplifiedVcgFile_Stream, ParentSuccessorStatementId_Atom, PFTraceSuccessorStatement):- maybe_echo_pf_trace_successor_statement_x(PFTraceSuccessorStatement, AtomListList), flatten_list(AtomListList, AtomList), implode_separator_content_list('', AtomList, Atom), write(SimplifiedVcgFile_Stream, Atom),nl(SimplifiedVcgFile_Stream), assert(succ_line(Atom)), assert(have_seen(ParentSuccessorStatementId_Atom)), !. %------------------------------------------------------------------------------- maybe_echo_pf_trace_successor_statement_x(successor_statement(PFPositionTo), [' Successor statement: ', PFPositionToOut, '.']):- maybe_echo_pf_trace_pfpositionto(PFPositionTo, PFPositionToOut), !. %------------------------------------------------------------------------------- maybe_echo_pf_trace_pfpositionto(finish, [' finish']):- !. maybe_echo_pf_trace_pfpositionto(line(Int), ['line ', IntAtom]):- integer_to_atom(Int, IntAtom), !. %------------------------------------------------------------------------------- do_echo_path(SimplifiedVcgFile_Stream, Int):- Int < 10, integer_to_atom(Int, IntAtom), implode_separator_content_list('', [' Path ', IntAtom], Atom), write(SimplifiedVcgFile_Stream, Atom),nl(SimplifiedVcgFile_Stream), assert(path_line(Atom)), !. % Only one space separator if at 10 or more. do_echo_path(SimplifiedVcgFile_Stream, Int):- integer_to_atom(Int, IntAtom), implode_separator_content_list('', [' Path ', IntAtom], Atom), write(SimplifiedVcgFile_Stream, Atom),nl(SimplifiedVcgFile_Stream), assert(path_line(Atom)), !. %=============================================================================== % refactor_write_vcs(+SimplifiedVcgFile_Stream, +Order_Int, +Id_Atom). %=============================================================================== % Display the content part of verification conditions, dead path search % or path function. %=============================================================================== % Keep vc/path functions separate. refactor_write_vcs(SimplifiedVcgFile_Stream, Order_Int, Id_Atom):- get_provenance_proof_file_kind(verification_conditions), setup_vc_name(Order_Int, Id_Atom), perform_write_verification_conditions(SimplifiedVcgFile_Stream, Id_Atom), !. refactor_write_vcs(SummaryDpcFile_Stream, Order_Int, Id_Atom):- get_provenance_proof_file_kind(deadpath_search), setup_vc_name(Order_Int, Id_Atom), perform_write_verification_conditions(SummaryDpcFile_Stream, Id_Atom), !. refactor_write_vcs(SimplifiedVcgFile_Stream, Order_Int, Id_Atom):- get_provenance_proof_file_kind(path_functions), perform_write_path_functions(SimplifiedVcgFile_Stream, Order_Int, Id_Atom), !. %------------------------------------------------------------------------------- perform_write_verification_conditions(SimplifiedVcgFile_Stream, Id_Atom):- write_vc(SimplifiedVcgFile_Stream, Id_Atom), write_log_facts, !. %------------------------------------------------------------------------------- perform_write_path_functions(SimplifiedVcgFile_Stream, _Order_Int, PFId_Atom):- write_vc(SimplifiedVcgFile_Stream, PFId_Atom), flush_output, write_log_facts, !. %------------------------------------------------------------------------------- setup_vc_name(Order_Int, Id_Atom):- retractall(vc_name(_)), get_vcg_vc(Id_Atom, Order_Int, Name_Atom, _Number_Int, _ParentVCTraceId_Atom), atom_concat(Name_Atom, '.', UseName_Atom), assert(vc_name(UseName_Atom)), !. %=============================================================================== % tidy_up_vc_state. %=============================================================================== % Clear up the vc state. %=============================================================================== tidy_up_vc_state :- prune_all_hyps(_,_,_), reset_next_hyp_id, prune_all_concs(_, _, _), retractall(found_contradiction), retractall(allow_new_strategies), retractall(join_hyp(_,_,_,_)), prune_all_subst_hyp(_, _, _), retractall(could_infer(_,_)), retractall(could_not_infer(_)), retractall(used(_)), retractall(inference_depth_limit(_,_)), retractall(raw_hyp_already_read(_,_)), !. %=============================================================================== % report_to_user. %=============================================================================== % Report progress to user. %=============================================================================== % Switch for dead path is off. report_to_user(_VC_Id) :- get_switch_deadpaths(off), get_switch_verbose(on), !, ( proved_all_conclusions, issue_message('All conclusions proved', []) ; found_contradiction, issue_message(' - Proved by contradiction within hypotheses', []) ; max_written_conc_no(MCN), issue_message('Number of conclusions remain unproven: ', MCN) ), issue_message('\n', []), !. report_to_user(VC_Id) :- % Report search for dead paths by ZombieScope. get_switch_deadpaths(on), !, report_to_user_deadpaths(VC_Id). % DELETE_PATH_FUNCTIONS report_to_user(_VC_Id) :- path_functions, /* output will be suppressed if echo is off. Churn out current path number instead */ !, current_vc_number(N), telling(OLDF), tell(user), write('Simplified path function: '), write(N), nl, flush_output, /*** Flush so that SPARKSimp gets the output promptly ***/ tell(OLDF). % END_DELETE_PATH_FUNCTIONS report_to_user(_VC_Id) :- /* output will be suppressed if echo is off. Churn out current vc number instead */ !, current_vc_number(N), telling(OLDF), tell(user), write('Simplified VC: '), write(N), ( proved_all_conclusions, write(' - All conclusions proved') ; found_contradiction, write(' - Proved by contradiction within hypotheses') ; max_written_conc_no(MCN), write(' - '), ( MCN == 1, write('1 conclusion remains unproven') ; write(MCN), write(' conclusions remain unproven') ), retractall(max_written_conc_no(_)) ), nl, flush_output, /*** Flush so that SPARKSimp gets the output promptly ***/ tell(OLDF). % Found dead path. report_to_user_deadpaths(VC_Id) :- current_vc_number(N), telling(OLDF), tell(user), write('VC: '), write(N), report_to_user_deadpaths_x(VC_Id), tell(OLDF), !. % ZombieScope detects a dead path if it proves the conclusion "false" by % inference or by contradiction. report_to_user_deadpaths_x(_VC_Id) :- proved_all_conclusions, !, write_deadpath_message. report_to_user_deadpaths_x(_VC_Id) :- found_contradiction, !, write_deadpath_message. % Not a dead path. report_to_user_deadpaths_x(VC_Id) :- \+ get_zombiescope_no_check_required(VC_Id), !, write(' - No dead path detected \n'), !. report_to_user_deadpaths_x(_VC_Id) :- % get_zombiescope_no_check_required(N), write(' - DPC not required for intermediate check\n'), !. % Write dead path detected message. write_deadpath_message :- write(' - Dead path detected \n'). % clauses used for reporting errors in a particular VC hyp or conc write_identity_of_predicate(N, VC_Num, hyp) :- write('VC '), write(VC_Num), write(' H'), write(N). write_identity_of_predicate(N, VC_Num, conc) :- write('VC '), write(VC_Num), write(' C'), write(N). /*** note use print/1 here to get FDL layout ***/ write_offending_predicate(FORMULA) :- write('Offending predicate is: '), print(FORMULA), nl. %=============================================================================== % process_component(+Term, +Number_Int, +VC_Num, +HypOrConc). %=============================================================================== % Adds Term as number Number_Int as a hypothesis or conclusion, depending % on HypOrConc. VC_Num is the current VC number, which is used in reporting % errors. %=============================================================================== process_component(FORM, N, VC_Num, Any) :- /* Trap formulae with Prolog variables in first */ \+ novars(FORM), !, write_error_preamble, write_identity_of_predicate(N, VC_Num, Any), write(' - Prolog variables occur in predicate.'), nl, /* we don't write the offending predicate out here, since the PROLOG */ /* variables get reported as _XXXXX where XXXXX varies between platforms */ /* This causes spurious regression test failures. */ set_exit_status, close_all_streams, halt, !. process_component(true, N, _VC_Num, hyp) :- /* Ignore true-already hypotheses */ add_hyp_with_id(true, x, N), !. process_component(FORM, N, _VC_Num, hyp) :- /* Next look for hypotheses already read in */ raw_hyp_already_read(M, FORM), !, assert_log_fact(repeat_hyp, [N, M]), add_hyp_with_id(true, x, N), !. process_component(FORM, N, VC_Num, F) :- /* Otherwise, process normally */ ( F = conc ; assertz(raw_hyp_already_read(N, FORM)) ), process_formula(FORM, FORMULA, N, VC_Num, F), process_component_x(F, FORMULA, x, N), log_simplification(F, N, FORM, FORMULA), !. process_component_x(hyp, FORMULA, Type, Id):- add_hyp_with_id(FORMULA, Type, Id), !. process_component_x(conc, FORMULA, Type, Id):- add_conc_with_id(FORMULA, Type, Id), !. process_component_x(HorC, _FORMULA, _Type, _Id):- show_error('HorC is not hyp or conc in predicate process_component_x but is: ~a', [HorC]), !. /*** process_formula(OLD,NEW,N,VC_Num,F) -- filter & simplify OLD to get NEW ***/ /*** VC_Num, N, and F are used to report errors ***/ process_formula(OLD_FORMULA, NEW_FORMULA, N, VC_Num, F) :- ( restructure_formula(OLD_FORMULA, INTERMEDIATE) ; write_error_preamble, write_identity_of_predicate(N, VC_Num, F), write(' - Restructuring failed.'), nl, write_offending_predicate(OLD_FORMULA), set_exit_status, close_all_streams, halt ), !, ( typechecking_during_load(on), ( checktype(INTERMEDIATE, boolean) ; write_error_preamble, write_identity_of_predicate(N, VC_Num, F), write(' did not typecheck as boolean.'), nl, write_offending_predicate(OLD_FORMULA), !, set_exit_status, close_all_streams, halt ) ; true ), !, ( simplification_is_on, ( simplify(INTERMEDIATE, NEW_FORMULA) ; nl, write('!!! WARNING: '), write_identity_of_predicate(N, VC_Num, F), write(' - Simplification failed.'), nl, write_offending_predicate(OLD_FORMULA), NEW_FORMULA=INTERMEDIATE ) ; NEW_FORMULA=INTERMEDIATE ), !. %------------------------------------------------------------------------------- /*** restructure_formula(OLD,NEW) -- no ~, set & seq prefix changes ***/ restructure_formula(for_all(V:T, P), for_all(V:CT, NewP)) :- find_core_type(T, CT), ( % Required quantified variable already exists. % The variable is reused. var_const(V, CT, _), !, restructure_formula(P, NewP) ; % Required quantified variable does not already exist. It is % introduced, to support restructuring, and removed afterwards. asserta(var_const(V, CT, temp)), ( restructure_formula(P, NewP), retract(var_const(V, CT, temp)) ; retract(var_const(V, CT, temp)), !, fail ) ), !. restructure_formula(for_some(V:T, P), for_some(V:CT, NewP)) :- find_core_type(T, CT), ( % Required quantified variable already exists. % The variable is reused. var_const(V, CT, _), !, restructure_formula(P, NewP) ; % Required quantified variable does not already exist. It is % introduced, to support restructuring, and removed afterwards. asserta(var_const(V, CT, temp)), ( restructure_formula(P, NewP), retract(var_const(V, CT, temp)) ; retract(var_const(V, CT, temp)), !, fail ) ), !. restructure_formula(X+Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWX \/ NEWY) ; restructure_nonset(X, NEWX), NEW = NEWX + NEWY ), !. restructure_formula(X*Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWX /\ NEWY) ; restructure_nonset(X, NEWX), NEW = NEWX * NEWY ), !. restructure_formula(X-Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWX \ NEWY) ; restructure_nonset(X, NEWX), NEW = NEWX - NEWY ), !. restructure_formula(X/Y, NEW) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !, checktype(NEWX, TX), checktype(NEWY, TY), !, ( TX = integer, TY = integer, NEW iss (NEWX div NEWY), /*1.4*/ /* SEPR 627 */ int(NEW), /* for safety: don't want to invoke =:= evaluation */ int(NEWX), /* unless we're certain both sides can be evaluated */ int(NEWY), /* to an integer. */ NEWX =:= NEW * NEWY /* only if Y divides X */ /*1.4*/ ; NEW = (NEWX / NEWY) ), !. restructure_formula(X rem Y, NEWX rem NEWY) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !. restructure_formula(X<=Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_formula(X, NEWX), NEW = (NEWX subset_of NEWY) ; restructure_formula(X, NEWX), NEW = (NEWX <= NEWY) ), !. restructure_formula(X>=Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWY subset_of NEWX) ; restructure_nonset(X, NEWX), NEW = (NEWX >= NEWY) ), !. restructure_formula(XY, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWY strict_subset_of NEWX) ; restructure_nonset(X, NEWX), NEW = (NEWX > NEWY) ), !. restructure_formula(element(A, I), element(NEWA, NEWI)) :- restructure_formula(A, NEWA), !, restructure_formula_list(I, NEWI), !. restructure_formula(update(A, I, X), update(NEWA, NEWI, NEWX)) :- restructure_formula(A, NEWA), !, restructure_formula_list(I, NEWI), !, restructure_formula(X, NEWX), !. restructure_formula(first(X), first(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(last(X), last(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(nonfirst(X), nonfirst(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(nonlast(X), nonlast(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(X @ Y, NEWX @ NEWY) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !. restructure_formula(X xor Y, (NEWX or NEWY) and not (NEWX and NEWY)) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !. restructure_formula(X or Y, NEWX or NEWY) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !. restructure_formula(X and Y, NEWX and NEWY) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !. restructure_formula(not X, not NEWX) :- restructure_formula(X, NEWX), !. restructure_formula(succ(X), succ(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(pred(X), pred(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(-X, -NEWX) :- restructure_formula(X, NEWX), !. restructure_formula(+X, NEWX) :- restructure_formula(X, NEWX), !. restructure_formula(abs(X), abs(NEWX)) :- restructure_nonset(X, NEWX), !. restructure_formula(sqr(X), sqr(NEWX)) :- restructure_nonset(X, NEWX), !. restructure_formula(odd(X), odd(NEWX)) :- restructure_nonset(X, NEWX), !. restructure_formula(X ** Y, NEWX ** NEWY) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !. restructure_formula((X~), NV) :- twiddles_conversion(X, NV), !. restructure_formula((X~), NV) :- path_functions, /* in which case it's an X' */ !, atom(X), name(X,XL), append(XL,[39],NVL), /* N.B. [39] = "'" */ name(NV,NVL), var_const(X,TYPE,v), assertz(var_const(NV,TYPE,c)), save_used_identifier(NV, var_const), assertz(twiddles_conversion(X, NV)), !. restructure_formula((X~), NV) :- atom(X), name(X,XL), append(XL,"~",NVL), name(NV,NVL), var_const(X,TYPE,v), assertz(var_const(NV,TYPE,c)), save_used_identifier(NV, var_const), assertz(twiddles_conversion(X, NV)), !. restructure_formula(+X, Y) :- restructure_formula(X, Y), !. restructure_formula(X, X) :- atomic(X), !. restructure_formula(X, X) :- float(X), !. restructure_formula(X, NEWX) :- nonvar(X), X =.. [F|ARGS], get_provenance_framework(spark), ( F = mk__array, !, restructure_array_aggregate(ARGS, NEWARGS) ; F = mk__record, !, restructure_record_aggregate(ARGS, NEWARGS) ; mk__function_name(F, _, array), !, restructure_array_aggregate(ARGS, NEWARGS) ; mk__function_name(F, _, record), !, restructure_record_aggregate(ARGS, NEWARGS) ), !, NEWX =.. [F|NEWARGS]. restructure_formula(X, Y) :- function_template(X, XL, F), !, restructure_formula_list(XL, YL), function_template(Y, YL, F), !. restructure_formula(X, Y) :- record_function(K, X, _, F, XL, _), !, restructure_formula_list(XL, YL), record_function(K, Y, _, F, YL, _), !. restructure_formula(X, Y) :- nonvar(X), X=..[OP|XARGS], ( type(OP,set(_)), XARGS=[XL], restructure_formula_list(XL, YL), Y=(set YL) ; type(OP,sequence(_)), XARGS=[XL], restructure_formula_list(XL, Y) ; XARGS = [XARG], ( ( OP = 'INTEGER' ; OP = 'SHORT_INTEGER' ; OP = 'LONG_INTEGER' ), TYPE = integer ; TYPE = OP ), restructure_formula(XARG, YARG), !, ( checktype(YARG, TYPE), Y = YARG ; Y =.. [OP,YARG] ) ; name(OP, OPL), append("mk__", TL, OPL), name(TYP, TL), ( type(TYP, array(_,_)), AF = mk__array ; type(TYP, record(_)), AF = mk__record ), X1 =.. [AF|XARGS], !, restructure_formula(X1, Y) ; restructure_formula_list(XARGS, YARGS), Y=..[OP|YARGS] ), !. %------------------------------------------------------------------------------- restructure_formula_list([X], [Y]) :- restructure_formula(X, Y), !. restructure_formula_list([X|XL], [Y|YL]) :- restructure_formula(X, Y), !, restructure_formula_list(XL, YL), !. restructure_formula_list([], []) :- !. %------------------------------------------------------------------------------- /*** restructure_set(OLD,NEW) -- no ~, set & seq prefix changes ***/ restructure_set(X+Y, NEWX \/ NEWY) :- restructure_set(X, NEWX), restructure_set(Y, NEWY), !. restructure_set(X*Y, NEWX /\ NEWY) :- restructure_set(X, NEWX), restructure_set(Y, NEWY), !. restructure_set(X-Y, NEWX \ NEWY) :- restructure_set(X, NEWX), restructure_set(Y, NEWY), !. restructure_set(X, Y) :- !, restructure_formula(X, Y), !. %------------------------------------------------------------------------------- /*** restructure_nonset(OLD,NEW) -- no ~, set & seq prefix changes ***/ restructure_nonset(X+Y, NEWX+NEWY) :- restructure_nonset(X, NEWX), restructure_nonset(Y, NEWY), !. restructure_nonset(X*Y, NEWX*NEWY) :- restructure_nonset(X, NEWX), restructure_nonset(Y, NEWY), !. restructure_nonset(X-Y, NEWX-NEWY) :- restructure_nonset(X, NEWX), restructure_nonset(Y, NEWY), !. restructure_nonset(X, Y) :- !, restructure_formula(X, Y), !. %------------------------------------------------------------------------------- restructure_array_aggregate([X|XL], [Y|YL]) :- ( X = (IND := EXPR), !, restructure_formula(EXPR, NEWEXPR), !, ( IND = (IND1 & IND2), restructure_indices(IND1, NEWIND1), restructure_indices(IND2, NEWIND2), !, NEWIND = (NEWIND1 & NEWIND2) ; IND = [LO .. HI], restructure_formula(LO, NEWLO), restructure_formula(HI, NEWHI), !, NEWIND = [NEWLO .. NEWHI] ; IND = [I], restructure_formula(I, NEWI), !, NEWIND = [NEWI] ), !, Y = (NEWIND := NEWEXPR) ; restructure_formula(X, Y) ), !, restructure_array_aggregate(XL, YL), !. restructure_array_aggregate([], []) :- !. %------------------------------------------------------------------------------- restructure_indices(X & Y, NEWX & NEWY) :- restructure_indices(X, NEWX), restructure_indices(Y, NEWY), !. restructure_indices([X .. Y], [NEWX .. NEWY]) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !. restructure_indices([I], NEWIND) :- restructure_formula(I, NEWI), !, NEWIND = [NEWI], !. %------------------------------------------------------------------------------- restructure_record_aggregate([F := EXP | REST], [F := NEWEXP | NEWREST]) :- !, restructure_formula(EXP, NEWEXP), !, restructure_record_aggregate(REST, NEWREST), !. restructure_record_aggregate([], []) :- !. %=============================================================================== % log_simplification(+HorC, +N, +OLD, +NEW). %=============================================================================== log_simplification(_HorC, _N, OLD, OLD) :- !. log_simplification(HorC, N, OLD, NEW) :- assert_log_fact(simplified, [HorC, N, OLD, NEW]), !. %=============================================================================== % do_wrap_lines. %------------------------------------------------------------------------------- % Wrap lines in logfile. %=============================================================================== do_wrap_lines :- nowrap_output_files, !. do_wrap_lines :- retrieve_simplified_proof_file(File_Atom), name(File_Atom, NEWVCL1), !, NEWVCL2 = NEWVCL1, !, do_wrap_lines_of(NEWVCL1, NEWVCL2), !, ( no_log_file ; logfile_name(LOGFILE), name(LOGFILE, LOGFL), do_wrap_lines_of(LOGFL, LOGFL) ), !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__rules.pro0000644000175000017500000001643311753202337017620 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides access to all information related to rules. This information % will be retrieved from the available rule files. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__rules, [get_rule/8, add_rule/8, prune_rule/8, replace_rule/3, nonground_replace_rule/4, inference_rule/3, user_inference_rule/3, user_rewrite_rule/4]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). %############################################################################### % TYPES %############################################################################### :- add_type('RuleId', ['Atom', 'Functor1']). :- add_type('RuleSource', [rls, rlu]). % Logical descriptions, affecting the legal applicability of rules. :- add_type('RuleLogic', [implication, equation]). % Heuristics, used to guide the application of rules. :- add_type('RuleHeuristic', [hint_direct_introduce, hint_conditional_introduce, hint_rewrite_one_direction, hint_rewrite_both_directions]). %############################################################################### % DATA %############################################################################### :- add_state(get_rule, get_rule('RuleFile_Atom', 'RuleId', 'RuleSource', 'RuleHeuristic', 'RuleLogic', 'LHS_ExpListList', 'RHS_ExpListList', 'Condition_ExpList')). :- dynamic(get_rule/8). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== add_rule(RuleFile_Atom, RuleId, RuleSource, RuleHeuristic, RuleLogic, LHS_ExpList, RHS_ExpList, Condition_ExpList):- % The exact behaviour of the Simplifier is dependent on rule ordering. % Thus, the use of assertz here (rather than, say, asserta), is needed % to perserve regression testing. assertz(get_rule(RuleFile_Atom, RuleId, RuleSource, RuleHeuristic, RuleLogic, LHS_ExpList, RHS_ExpList, Condition_ExpList)), !. %=============================================================================== %=============================================================================== % Prune. %=============================================================================== prune_rule(RuleFile_Atom, RuleId, RuleSource, RuleHeuristic, RuleLogic, LHS_ExpList, RHS_ExpList, Condition_ExpList):- retract(get_rule(RuleFile_Atom, RuleId, RuleSource, RuleHeuristic, RuleLogic, LHS_ExpList, RHS_ExpList, Condition_ExpList)), !. %=============================================================================== %=============================================================================== % Refactor. %=============================================================================== replace_rule(RuleId, LHS_Exp, RHS_Exp):- get_rule(_RuleFile_Atom, RuleId, rls, _RuleHeuristic, equation, [LHS_Exp], [RHS_Exp], _Condition_ExpList), ground(LHS_Exp), ground(RHS_Exp). nonground_replace_rule(RuleFile_Atom:RuleId, LHS_Exp, RHS_Exp, Condition_ExpList):- get_rule(RuleFile_Atom, RuleId, rls, _RuleHeuristic, equation, [LHS_Exp], [RHS_Exp], Condition_ExpList), \+ ground((LHS_Exp,RHS_Exp)). inference_rule(RuleFile_Atom:RuleId, RHS_Exp, LHS_ExpList):- get_rule(RuleFile_Atom, RuleId, rls, _RuleHeuristic, implication, LHS_ExpList, [RHS_Exp], []). user_inference_rule(RuleFile_Atom:RuleId, RHS_Exp, LHS_ExpList):- get_rule(RuleFile_Atom, RuleId, rlu, _RuleHeuristic, implication, LHS_ExpList, [RHS_Exp], []). user_rewrite_rule(RuleFile_Atom:RuleId, FinalLHS_Exp, FinalRHS_Exp, Condition_ExpList):- get_rule(RuleFile_Atom, RuleId, rlu, RuleHeuristic, equation, [LHS_Exp], [RHS_Exp], Condition_ExpList), switch_bindings(RuleHeuristic, FinalLHS_Exp, FinalRHS_Exp, LHS_Exp,RHS_Exp). switch_bindings(hint_rewrite_one_direction, FinalLHS_Exp, FinalRHS_Exp, FinalLHS_Exp,FinalRHS_Exp). switch_bindings(hint_rewrite_both_directions, FinalLHS_Exp, FinalRHS_Exp, FinalLHS_Exp,FinalRHS_Exp). switch_bindings(hint_rewrite_both_directions, FinalLHS_Exp, FinalRHS_Exp, FinalRHS_Exp, FinalLHS_Exp). %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/typecheck5.pro0000644000175000017500000006105411753202337017401 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % The main export, checktype(Expression, Type), takes a ground % (fully-instantiated) Expression and a Type which is either an atom or % uninstantiated. In the former case, the call will succeed if it is % possible to interpret Expression as being of the given Type; in the latter % case, the call will succeed if it is at all possible to type-check % Expression validly, and Type will end up instantiated to an appropriate % atomic type name as a result. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### checktype(E, T) :- nonvar(T), !, find_core_type(T, CT), !, ( checkhastype(E, CT) ; CT = real, checkhastype(E, integer) ), !. checktype(E, T) :- checkhastype(E, TYPE), !, ( T = TYPE ; TYPE = integer, !, T = real ; E =.. [mk__record|_], !, compatible_record_type(TYPE, T), checkhastype(E, T) ; E =.. [mk__array|_], !, compatible_array_type(TYPE, T), checkhastype(E, T) ). %------------------------------------------------------------------------------- checktypes([E|EL], [T|TL]) :- checktype(E, T), checktypes(EL, TL). checktypes([], []). %------------------------------------------------------------------------------- % BOOL1 checkhastype(true, boolean) :- !. % BOOL2 checkhastype(false, boolean) :- !. % BOOL3 checkhastype(for_all(V:AT, FORMULA), boolean) :- !, atom(V), find_core_type(AT, T), ( type(T, _) ; T = integer ; T = real ; T = boolean ), !, ( var_const(V, T, v), !, checkhastype(FORMULA, boolean) ; asserta(var_const(V, T, v)), checkhastype(FORMULA, boolean), retract(var_const(V, T, v)), ! ; retract(var_const(V, T, v)), fail ), !. % BOOL4 checkhastype(for_some(V:AT, FORMULA), boolean) :- !, atom(V), find_core_type(AT, T), ( type(T, _) ; T = integer ; T = real ; T = boolean ), !, ( var_const(V, T, v), !, checkhastype(FORMULA, boolean) ; asserta(var_const(V, T, v)), checkhastype(FORMULA, boolean), retract(var_const(V, T, v)), ! ; retract(var_const(V, T, v)), fail ), !. % I/R1 checkhastype(X+Y, IR) :- checkhastype(X, IRX), !, checkhastype(Y, IRY), !, ( ( IRX=real ; IRY=real ), !, IR=real ; IRX=integer, IRY=integer, (IR=integer ; IR=real) ), !. % I/R2 checkhastype(X-Y, IR) :- checkhastype(X, IRX), !, checkhastype(Y, IRY), !, ( ( IRX=real ; IRY=real ), !, IR=real ; IRX=integer, IRY=integer, (IR=integer ; IR=real) ), !. % I/R3 checkhastype(X*Y, IR) :- checkhastype(X, IRX), !, checkhastype(Y, IRY), !, ( ( IRX=real ; IRY=real ), !, IR=real ; IRX=integer, IRY=integer, (IR=integer ; IR=real) ), !. % I/R4 checkhastype(-X, IR) :- checkhastype(X, IRX), !, ( IRX=real, IR=real ; IRX=integer, (IR=integer ; IR=real) ), !. % INT1 checkhastype(X div Y, integer) :- checkhastype(X, integer), !, checkhastype(Y, integer), !. % INT2 checkhastype(X mod Y, integer) :- checkhastype(X, integer), !, checkhastype(Y, integer), !. % I/R5 checkhastype(abs(X), IR) :- checkhastype(X, IRX), !, ( IRX=real, IR=real ; IRX=integer, (IR=integer ; IR=real) ), !. % I/R6 checkhastype(sqr(X), IR) :- checkhastype(X, IRX), !, ( IRX=real, IR=real ; IRX=integer, (IR=integer ; IR=real) ), !. % I/R7 checkhastype(X ** Y, IR) :- checkhastype(Y, integer), checkhastype(X, IR), ( IR = integer ; IR = real ), !. % I/R8 checkhastype(+X, IR) :- checkhastype(X, IR), !, ( IR=real ; IR=integer ), !. % REA1 checkhastype(X/Y, real) :- checkhastype(X, real), !, checkhastype(Y, real), !. % REL1 checkhastype(X=Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; checkhastype(Y, TX) ), !. % REL2 checkhastype(X<>Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; checkhastype(Y, TX) ), !. % REL3 checkhastype(X>Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; type(TX, enumerated), checkhastype(Y, TX) ), !. % REL4 checkhastype(X=Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; type(TX, enumerated), checkhastype(Y, TX) ), !. % REL6 checkhastype(X<=Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; type(TX, enumerated), checkhastype(Y, TX) ), !. % ARR1 checkhastype(update(A, IL, E), T) :- checkhastype(A, T), ( type(T, array(ITL, ET)) ; find_core_type(T, TN), type(TN, array(ITL, ET)) ), checkhastypelist(IL, ITL), checkhastype(E, ET), !. % ARR2 checkhastype(element(A, IL), ET) :- checkhastype(A, T), ( type(T, array(ITL, ET)) ; find_core_type(T, TN), type(TN, array(ITL, ET)) ), checkhastypelist(IL, ITL), !. % BOOL5 checkhastype((not X), boolean) :- checkhastype(X, boolean), !. % BOOL6 checkhastype(X and Y, boolean) :- checkhastype(X, boolean), !, checkhastype(Y, boolean), !. % BOOL7 checkhastype(X or Y, boolean) :- checkhastype(X, boolean), !, checkhastype(Y, boolean), !. % BOOL8 checkhastype(X -> Y, boolean) :- checkhastype(X, boolean), !, checkhastype(Y, boolean), !. % BOOL9 checkhastype(X <-> Y, boolean) :- checkhastype(X, boolean), !, checkhastype(Y, boolean), !. % INT3 checkhastype(A, integer) :- integer(A), !. % TYP1 checkhastype(A, T) :- atomic(A), var_const(A, T, _), !. % ODD checkhastype(odd(X), boolean) :- checkhastype(X, integer), !. % ORD1 checkhastype(pred(X), T) :- checkhastype(X, T), !, (type(T,enumerated) ; T=integer), !. % ORD2 checkhastype(succ(X), T) :- checkhastype(X, T), !, (type(T,enumerated) ; T=integer), !. % SEQ1 checkhastype(length(S), integer) :- checkhastype(S, ST), !, type(ST, sequence(_)), !. % SEQ2 checkhastype(first(S), ET) :- checkhastype(S, ST), type(ST, sequence(ET)). % SEQ3 checkhastype(last(S), ET) :- checkhastype(S, ST), type(ST, sequence(ET)). % SEQ4 checkhastype(nonfirst(S), ST) :- checkhastype(S, ST), type(ST, sequence(_)). % SEQ5 checkhastype(nonlast(S), ST) :- checkhastype(S, ST), type(ST, sequence(_)). % SEQ6 checkhastype(X @ Y, ST) :- checkhastype(X, ST), type(ST, sequence(_)), checkhastype(Y, ST). % SET1 checkhastype(X \/ Y, ST) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST). % SET2 checkhastype(X \ Y, ST) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST). % SET3 checkhastype(X /\ Y, ST) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST). % SET4 checkhastype(X in Y, boolean) :- checkhastype(Y, ST), type(ST, set(ET)), checkhastype(X, ET), !. % SET5 checkhastype(X not_in Y, boolean) :- checkhastype(Y, ST), type(ST, set(ET)), checkhastype(X, ET), !. % SET6 checkhastype(X subset_of Y, boolean) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST), !. % SET7 checkhastype(X strict_subset_of Y, boolean) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST), !. % SEQ7 checkhastype([], T) :- type(T, sequence(_)). % SEQ8 checkhastype([E|EL], T) :- checkhastype(E, ET), type(T, sequence(ET)), checkhastype(EL, T). % SET8 checkhastype(set [], T) :- type(T, set(_)). % SET9 checkhastype(set [E|EL], T) :- checkhastype(E, ET), type(T, set(ET)), checkhastype(set EL, T). % ARR3 checkhastype(mk__array(VALUE), T) :- ( type(T, array([IT], VT)) ; find_core_type(T, TN), type(TN, array([IT], VT)) ), VALUE \= (_ := _), checktype(VALUE, VT). % ARR4 checkhastype(AGG, T) :- \+ atomic(AGG), AGG =.. [F|ARGS], get_provenance_framework(spark), ( F = mk__array, !, collect_indices_and_values( ARGS, [I|INDS], [V|VALS]), !, checktype(I, IT), checkrestoflist(INDS, IT), checktype(V, VT), checkrestoflist(VALS, VT), type(T, array([IT], VT)) ; F = mk__record, !, collect_fields_and_values( ARGS, FIELDS, VALUES), type(T, record(RECFIELDS)), permutation_of_fields( FIELDS, RECFIELDS, TYPES), checktypelist(VALUES, TYPES) ). % ARR5 checkhastype(E, T) :- \+ atomic(E), E =.. [F,Arg], mk__function_name(F, TN, array), Arg \= (_ := _), !, ( type(TN, array([IT], VT)), T = TN ; find_core_type(TN, T), type(T, array([IT], VT)) ), checktype(Arg, VT). % ARR6 checkhastype(AGG, T) :- \+ atomic(AGG), AGG =.. [F|ARGS], get_provenance_framework(spark), ( mk__function_name(F , TN, array), !, collect_indices_and_values( ARGS, [I|INDS], [V|VALS]), !, checktype(I, IT), checkrestoflist(INDS, IT), checktype(V, VT), checkrestoflist(VALS, VT), ( type(TN, array([IT], VT)), T = TN ; find_core_type(TN, T), type(T, array([IT], VT)) ) ; mk__function_name(F, T, record), !, collect_fields_and_values( ARGS, FIELDS, VALUES), type(T, record(RECFIELDS)), permutation_of_fields( FIELDS, RECFIELDS, TYPES), checktypelist(VALUES, TYPES) ). % FUNC checkhastype(A, T) :- \+ atomic(A), function_template(A, AL, F), checktypelist(AL, ATL), ( function(F, ATLX, T) ; function(F, ATLX, XT), compatible_type_lists([XT], [T]) ), compatible_type_lists(ATL, ATLX), !. % REC1 checkhastype(A, T) :- \+ atomic(A), record_function(_N, A, access, _FIELD, [REC], _), checkhastype(REC, RTYPE), A =.. [F|_], ( function(F, [RTYPE], T) ; function(F, [RTYPE], XT), compatible_type_lists([XT], [T]) ), !. % REC2 checkhastype(A, RTYPE) :- \+ atomic(A), record_function(_N, A, update, _FIELD, [R,V], _), checkhastype(R, RTYPE), A =.. [F|_], function(F, [RTYPE,VTYPE], RTYPE), checkhastype(V, VTYPE), !. % UNIV checkhastype(A, T) :- \+ atomic(A), \+ function_template(A, _, _), \+ record_function(_, A, _, _, _, _), A=..[F|ARGS], check_is_an_ok_arity_function(F, ARGS), checktypelist(ARGS, ATL), ( function(F, ATLX, T) ; function(F, ATLX, XT), compatible_type_lists([XT], [T]) ), compatible_type_lists(ATL, ATLX), !. % REA2 checkhastype(X, real) :- checkhastype(X, integer), !. % CTL1 checktypelist([E], [T]) :- !, checkhastype(E, T), !. % CTL2 checktypelist([E|EL], [T|TL]) :- checkhastype(E, T), checktypelist(EL, TL), !. %------------------------------------------------------------------------------- % CHL1 checkhastypelist([E], [T]) :- !, checkhastype(E, T), !. % CHL2 checkhastypelist([E|EL], [T|TL]) :- checkhastype(E, T), !, checkhastypelist(EL, TL), !. % CHL3 checkhastypelist([E], T) :- !, checkhastype(E, T), !. % CHL4 checkhastypelist([E|EL], T) :- checkhastype(E, T), !, checkhastypelist(EL, T), !. %------------------------------------------------------------------------------- compatible_type_lists([A], [A]) :- !. compatible_type_lists([integer], [real]) :- !. compatible_type_lists([A|AL], [A|RL]) :- compatible_type_lists(AL, RL), !. compatible_type_lists([integer|AL],[real|RL]) :- compatible_type_lists(AL,RL), !. compatible_type_lists([T1|AL],[T2|RL]) :- compatible_set_or_seq_types(T1, T2), !, compatible_type_lists(AL, RL), !. compatible_type_lists([], []) :- !. %------------------------------------------------------------------------------- compatible_set_or_seq_types(T1, T2) :- type(T1, set(ET1)), type(T2, set(ET2)), !, compatible_type_lists([ET1],[ET2]), !. compatible_set_or_seq_types(T1, T2) :- type(T1, sequence(ET1)), type(T2, sequence(ET2)), !, compatible_type_lists([ET1],[ET2]), !. %------------------------------------------------------------------------------- check_is_an_ok_arity_function(F, ARGS) :- function(F, ARGL, _), length(ARGL, LEN), length(ARGS, LEN), !. %------------------------------------------------------------------------------- compatible_record_type(T1, T2) :- type(T1, record(F1)), type(T2, record(F2)), T1 \= T2, same_record_field_names(F1, F2). %------------------------------------------------------------------------------- same_record_field_names(Fs, Gs) :- collect_record_field_names(Fs, Fn), collect_record_field_names(Gs, Gn), sort(Fn, S), sort(Gn, S), !. %------------------------------------------------------------------------------- collect_record_field_names([[F,_]|FTs], [F|Fs]) :- !, collect_record_field_names(FTs, Fs). collect_record_field_names([], []) :- !. %------------------------------------------------------------------------------- compatible_array_type(T1, T2) :- type(T1, array(I1, R1)), type(T2, array(I2, R2)), T1 \= T2, compatible_array_result_types(R1, R2), compatible_array_indices(I1, I2). %------------------------------------------------------------------------------- compatible_array_result_types(T, T) :- !. compatible_array_result_types(integer, real) :- !. compatible_array_result_types(real, integer) :- !. compatible_array_result_types(T1, T2) :- compatible_record_type(T1, T2), !. %------------------------------------------------------------------------------- compatible_array_indices([I|Is], [I|Js]) :- !, compatible_array_indices(Is, Js). compatible_array_indices([], []) :- !. %------------------------------------------------------------------------------- collect_indices_and_values([A|ARGS], INDS, [V|VALS]) :- ( A = (LHS := V), collect_indices(LHS, I), collect_indices_and_values(ARGS, RESTINDS, VALS), !, append(I, RESTINDS, INDS) ; V = A, !, collect_indices_and_values(ARGS, INDS, VALS) ), !. collect_indices_and_values([], [], []) :- !. %------------------------------------------------------------------------------- collect_indices(X & Y, I) :- collect_indices(X, XL), collect_indices(Y, YL), !, append(XL, YL, I), !. collect_indices([X .. Y], [X,Y]) :- !. collect_indices([X], [X]) :- !. %------------------------------------------------------------------------------- collect_fields_and_values([(F := V)|ARGS], [F|Fs], [V|Vs]) :- !, collect_fields_and_values(ARGS, Fs, Vs), !. collect_fields_and_values([], [], []) :- !. %------------------------------------------------------------------------------- permutation_of_fields([F|FIELDS], FTL, [T|TYPES]) :- gen_append(LHS, [[F,T]|RHS], FTL), !, append(LHS, RHS, NEWFTL), !, permutation_of_fields(FIELDS, NEWFTL, TYPES), !. permutation_of_fields([], [], []) :- !. %------------------------------------------------------------------------------- checkrestoflist([X|XL], T) :- checktype(X, T), checkrestoflist(XL, T). checkrestoflist([], _). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/load__vcg_dpc.pro0000644000175000017500000006204011753202337020074 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Loads all verification conditions from the provided input file. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(load__vcg_dpc, [load_vcg/0, load_dpc/0, retrieve_next_vcg/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('data__data_files.pro', [get_datafiles_vcg/1, get_datafiles_dpc/1]). :- use_module('data__vcg.pro', [add_vcg_file_handle/1, get_vcg_file_handle/1, add_vcg_conclusion/3, add_vcg_hypothesis/3, add_vcg_trace/2, add_vcg_vc/5, get_vcg_vc/5, add_zombiescope_no_check_required/1, get_zombiescope_no_check_required/1, prune_vcg_file_handle/0, prune_all_vcg_vc/0, prune_all_vcg_hypothesis/0, prune_all_vcg_conclusion/0, prune_all_zombiescope_no_check_required/0]). :- use_module('ioutilities.pro', [read_line_from_stream/2, read_up_to_number_of_chars_from_stream/3, show_error/2]). :- use_module('newutilities.pro', [unique_atom/2, implode_separator_content_list/3]). :- use_module('parseutilities.pro', [parse_atom/5, parse_atom_silent/4, parse_natural_int/3, parse_all_to_nothing/2, parse_nothing_to_all/2]). :- use_module('data__switches.pro', [get_switch_deadpaths/1, get_switch_hyp_limit/1]). %############################################################################### % TYPES %############################################################################### :- add_type('ParseStatus', [end_of_file, notfinished, finished]). %############################################################################### % DATA %############################################################################### % Traceability lines are placed before a collection of VCs. As each VC is % processed, it is correctly associated with the last traceability line % encountered via this state. :- add_state(get_last_trace_id, get_last_trace_id('TraceId_Atom')). :- dynamic(get_last_trace_id/1). :- add_state(get_order, get_order('Int')). :- dynamic(get_order/1). %############################################################################### % PREDICATES %############################################################################### :- set_prolog_flag(double_quotes, chars). %=============================================================================== % load_vcg. %------------------------------------------------------------------------------- % Initialise for loading the vcs. %=============================================================================== load_vcg:- get_datafiles_vcg(VcgFile_Atom), open(VcgFile_Atom, read, Stream), add_vcg_file_handle(Stream), % Initialise the internal ordering. initialise_order, !. % Loading of dpc files is identical to loading vcgs. % It is assumed that all VCs contain a single "false" conclusion. load_dpc:- get_datafiles_dpc(DpcFile_Atom), open(DpcFile_Atom, read, Stream), add_vcg_file_handle(Stream), % Initialise the internal ordering. initialise_order, !. %=============================================================================== % retrieve_next_vcg. %------------------------------------------------------------------------------- % Replace any currently loaded VC with the next VC from the file. If no % more VCs remain, then the predicate fails, and no VC data is stored. %=============================================================================== retrieve_next_vcg:- % Clear any previous VC information. clear_previous_vcg, % Get the next VC, if available. Fail otherwise. retrieve_next_vcg_x, !. %------------------------------------------------------------------------------- retrieve_next_vcg_x:- % If file handle is not present, have parsed whole file. get_vcg_file_handle(Stream), % If there is no next vc, have simplified all vcs. load_up_to_next_vc_start(Stream), % Collect the attributes of this vc. process_single_verification_condition(Stream), !. %------------------------------------------------------------------------------- clear_previous_vcg:- prune_all_vcg_vc, prune_all_vcg_hypothesis, prune_all_vcg_conclusion, prune_all_zombiescope_no_check_required, !. %=============================================================================== % load_up_to_next_vc_start(+VcgFile_Stream). %------------------------------------------------------------------------------- % Retrieve information from verification condition file stream, up to and % including the next VC start line. %=============================================================================== load_up_to_next_vc_start(VcgFile_Stream):- repeat, read_line_from_stream(VcgFile_Stream, ReadText), process_verification_conditions_line(ReadText), load_up_to_next_vc_start_x(ReadText), !. %------------------------------------------------------------------------------- % Do nothing at end of file. process_verification_conditions_line(end_of_file):- !. % Check for traceability line. process_verification_conditions_line(CharList):- scan_for_traceability(CharList), !. % Check for vc. process_verification_conditions_line(CharList):- scan_for_vc_header(CharList, Name_Atom, Number_Int), % Record this vc, associating with last found trace. retrieve_and_increment_order(Order_Int), unique_atom('vc', VCId_Atom), must_get_last_trace_id(TraceId_Atom), add_vcg_vc(VCId_Atom, Order_Int, Name_Atom, Number_Int, TraceId_Atom), !. % From above, nothing found at this line. process_verification_conditions_line(_CharList):- !. %------------------------------------------------------------------------------- %Reached start of a VC. Done. load_up_to_next_vc_start_x(_ReadText):- get_vcg_vc(_VCId_Atom, _Order_Int, _Name_Atom, _Number_Int, _TraceId_Atom), !. %Reached end of file. No more VCs to process. load_up_to_next_vc_start_x(end_of_file):- % Close the input stream. get_vcg_file_handle(VcgFile_Stream), close(VcgFile_Stream), % Remove the closed input stream. prune_vcg_file_handle, !. %=============================================================================== % process_single_verification_condition(+Stream, +ParentVCId_Atom). %------------------------------------------------------------------------------- % Retrieve a single VC from the verification condition file stream. Is is % expected that on entry the file stream is just after the VC header line. %=============================================================================== process_single_verification_condition(Stream):- % Get the VC id. get_vcg_vc(ParentVCId_Atom, _Order_Int, _Name_Atom, _Number_Int, _TraceId_Atom), repeat, read_up_to_number_of_chars_from_stream(Stream, 6, ReadText), process_single_verification_condition_leader(Stream, ReadText, ParentVCId_Atom, ParseStatus), process_single_verification_condition_x(ParseStatus), !. %------------------------------------------------------------------------------- % At end_of_file only report end_of_file. process_single_verification_condition_leader(_Stream, end_of_file, _ParentVCId_Atom, end_of_file):- !. % Search for finished line. process_single_verification_condition_leader(_Stream, CharList, _ParentVCId_Atom, finished):- scan_for_end_of_vc(CharList), !. % Search for vc that has been fully proved true. process_single_verification_condition_leader(Stream, CharList, ParentVCId_Atom, finished):- scan_for_proved_true_vc(Stream, CharList, ParentVCId_Atom), !. % Search for vc that has been fully proved false. process_single_verification_condition_leader(Stream, CharList, ParentVCId_Atom, finished):- scan_for_proved_false_vc(Stream, CharList, ParentVCId_Atom), !. % Search for vc hypothesis. process_single_verification_condition_leader(Stream, CharList, ParentVCId_Atom, notfinished):- scan_for_vc_hypothesis(Stream, CharList, ParentVCId_Atom), !. % Search for vc conclusion. process_single_verification_condition_leader(Stream, CharList, ParentVCId_Atom, notfinished):- scan_for_vc_conclusion(Stream, CharList, ParentVCId_Atom), !. % Search for implication between hypotheses and conclusions. process_single_verification_condition_leader(Stream, CharList, _ParentVCId_Atom, notfinished):- scan_for_vc_implication(Stream, CharList), !. % None of the above is an error. process_single_verification_condition_leader(_Stream, CharList, _ParentVCId_Atom, _ParseStatus):- show_error('Badly formed line in processing verification condition, starting: ~p\n', [CharList]). %------------------------------------------------------------------------------- % Is finished. process_single_verification_condition_x(finished):- !. % Not finished. Fail and look at more lines. process_single_verification_condition_x(notfinished):- fail. % Error if at end of file. process_single_verification_condition_x(end_of_file):- show_error('Unexpected end of file in parsing vcg file.\n', []). %=============================================================================== % scan_for_end_of_vc(CharList). %------------------------------------------------------------------------------- % Successful where the end of a vc is detected. %=============================================================================== %Make this call visible to the spxref tool. :- public load__vcg_dpc:parse_end_of_vc/3. scan_for_end_of_vc(CharList):- phrase(parse_end_of_vc(_Form), CharList), !. %------------------------------------------------------------------------------- % space newline parse_end_of_vc(normal)--> " ", !. % newline parse_end_of_vc(special)--> !. %=============================================================================== % scan_for_proved_true_vc(Stream, +CharList, +ParentVCId_Atom). %------------------------------------------------------------------------------- % Successful where a condensed true VC vc is detected. The VC details are % added accordingly. %=============================================================================== % Make this call visible to the spxref tool. :- public load__vcg_dpc:parse_proved_true_vc/2. scan_for_proved_true_vc(Stream, CharList, ParentVCId_Atom):- phrase(parse_proved_true_vc, CharList), % Consume the rest of the line from the input. There is an assumption % here that these true/false VCs will have more than six characters. % This is valid as per the examiner. read_line_from_stream(Stream, _ReadText), add_vcg_hypothesis(1, 'true', ParentVCId_Atom), scan_for_proved_true_vc_x(ParentVCId_Atom), !. scan_for_proved_true_vc_x(ParentVCId_Atom):- get_switch_deadpaths(off), add_vcg_conclusion(1, 'true', ParentVCId_Atom), !. % "Trivially true" VCs are DPCs that do not require ZombieScope % to check for dead paths. % ZombieScope model these VCs with a true hypothesis % and a false conclusion - that is, it is not a dead path. % Mark the DPC so that ZombieScope does not look for dead paths for this VC. scan_for_proved_true_vc_x(ParentVCId_Atom):- get_switch_deadpaths(on), add_vcg_conclusion(1, 'false', ParentVCId_Atom), add_zombiescope_no_check_required(ParentVCId_Atom), !. %------------------------------------------------------------------------------- % *** true . /* trivially true VC removed by Examiner */ parse_proved_true_vc --> "*", parse_all_to_nothing, !. %=============================================================================== %=============================================================================== % scan_for_proved_false_vc(+Stream, +CharList). %------------------------------------------------------------------------------- % Successful where a condensed false VC vc is detected. The VC details are % added accordingly. %=============================================================================== % Make this call visible to the spxref tool. :- public load__vcg_dpc:parse_proved_false_vc/2. scan_for_proved_false_vc(Stream, CharList, ParentVCId_Atom):- phrase(parse_proved_false_vc, CharList), % Consume the rest of the line from the input. There is an assumption % here that these true/false VCs will have more than six characters. % This is valid as per the examiner. read_line_from_stream(Stream, _ReadText), % Display a warning. format('!!! WARNING: UNPROVEABLE VC! Suggest you take corrective action.\n', []), % Insert a false vc. add_vcg_conclusion(1, 'false', ParentVCId_Atom), !. %------------------------------------------------------------------------------- % !!! false. /* WARNING: formula is false */ parse_proved_false_vc --> "!", parse_all_to_nothing, !. %=============================================================================== % scan_for_vc_hypothesis(+Stream, +CharList). %------------------------------------------------------------------------------- % Successful where a vc hypothesis is detected. The hypothesis is added to % the VC details accordingly. %=============================================================================== % Make this call visible to the spxref tool. :- public load__vcg_dpc:parse_vc_hypothesis/3. scan_for_vc_hypothesis(Stream, CharList, ParentVCId_Atom):- phrase(parse_vc_hypothesis(Number_Int), CharList), % Retrieve the term from the stream. read_term(Stream, Hyp_Term, []), % Consume rest of line following the term. % Accept any text up to the next newline. read_line_from_stream(Stream, _ReadText), add_vcg_hypothesis(Number_Int, Hyp_Term, ParentVCId_Atom), !. %------------------------------------------------------------------------------- parse_vc_hypothesis(Number_Int) --> "H", parse_natural_int(Number_Int), ":", parse_atom_silent([space, newline], zeroormore), !. %=============================================================================== % scan_for_vc_conclusion(Stream, CharList, ParentVCId_Atom). %------------------------------------------------------------------------------- % Successful where a vc conclusion is detected. The conclusion is added to % the VC details accordingly. %=============================================================================== % Make this call visible to the spxref tool. :- public load__vcg_dpc:parse_vc_conclusion/3. scan_for_vc_conclusion(Stream, CharList, ParentVCId_Atom):- phrase(parse_vc_conclusion(Number_Int), CharList), % Retrieve the term from the stream. read_term(Stream, Conc_Term, []), % Consume rest of line following the term. % Accept any text up to the next newline. read_line_from_stream(Stream, _ReadText), add_vcg_conclusion(Number_Int, Conc_Term, ParentVCId_Atom), !. %------------------------------------------------------------------------------- parse_vc_conclusion(Number_Int) --> "C", parse_natural_int(Number_Int), ":", parse_atom_silent([space, newline], zeroormore), !. %=============================================================================== % scan_for_vc_implication(+Stream, +CharList). %------------------------------------------------------------------------------- % Successful where the VC implication divider is detected. %=============================================================================== % Make this call visible to the spxref tool. :- public load__vcg_dpc:parse_vc_before_implication/2. scan_for_vc_implication(Stream, CharList):- phrase(parse_vc_before_implication, CharList), % Retrieve the rest of the line, and check that the implication is as % expected. read_line_from_stream(Stream, ReadText), confirm_valid_implication(ReadText), !. %------------------------------------------------------------------------------- % With six spaces the implication should follow. parse_vc_before_implication --> " ", !. %------------------------------------------------------------------------------- % Make this call visible to the spxref tool. :- public load__vcg_dpc:parse_vc_implication/2. confirm_valid_implication(end_of_file):- show_error('Unexpected end of file in parsing vcg file.\n', []). confirm_valid_implication(CharList):- phrase(parse_vc_implication, CharList), !. confirm_valid_implication(CharList):- show_error('Badly formed implication between hypotheses and conclusions: ~w\n', [CharList]). %------------------------------------------------------------------------------- parse_vc_implication --> parse_atom_silent([space, newline], zeroormore), "->", parse_atom_silent([space, newline], zeroormore), !. %=============================================================================== % scan_for_traceability(CharList). %------------------------------------------------------------------------------- % Successful where a traceability line is parsed. %=============================================================================== % Make this call visible to the spxref tool. :- public load__vcg_dpc:parse_traceability/3. scan_for_traceability(CharList):- phrase(parse_traceability(VCTrace), CharList), unique_atom('trace', TraceId_Atom), add_vcg_trace(TraceId_Atom, VCTrace), replace_last_trace_id(TraceId_Atom), !. %------------------------------------------------------------------------------- % For path(s) from XXX to YYY: parse_traceability(traverseCutpoints(VCCutpointFrom, VCCutpointTo)) --> "For path(s) from", parse_atom_silent([space, newline], oneormore), parse_cutpoint_from(VCCutpointFrom), parse_atom_silent([space, newline], oneormore), "to", parse_atom_silent([space, newline], oneormore), parse_cutpoint_to(VCCutpointTo), parse_atom_silent([space, newline], zeroormore), ":", !. parse_traceability(checkRefinementIntegrity) --> "For checks of refinement integrity: ", !. parse_traceability(subclassInheritanceIntegrity) --> "For checks of subclass inheritance integrity: ", !. parse_traceability(fudge(Line)) --> "For", parse_atom([not(colon)], oneormore, Atom), ":", {implode_separator_content_list('', ['For', Atom, ':'], Line)}, !. %------------------------------------------------------------------------------- % start parse_cutpoint_from(start) --> "start", !. % assertion of line NN parse_cutpoint_from(assertion(userprovided, Line_Int)) --> "assertion of line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Line_Int), !. % default assertion of line NN parse_cutpoint_from(assertion(default, Line_Int)) --> "default assertion of line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Line_Int), !. %------------------------------------------------------------------------------- % finish parse_cutpoint_to(finish) --> "finish", !. % assertion of line NN parse_cutpoint_to(assertion(userprovided, Line_Int)) --> "assertion of line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Line_Int), !. % default assertion of line NN parse_cutpoint_to(assertion(default, Line_Int)) --> "default assertion of line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Line_Int), !. % check associated with statement of line NN parse_cutpoint_to(check(userprovided, Line_Int)) --> "check associated with statement of line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Line_Int), !. % run-time check associated with statement of line NN parse_cutpoint_to(check(runtime, Line_Int)) --> "run-time check associated with statement of line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Line_Int), !. % precondition check associated with statement of line NN parse_cutpoint_to(check(precondition, Line_Int)) --> "precondition check associated with statement of line", parse_atom_silent([space, newline], oneormore), parse_natural_int(Line_Int), !. %=============================================================================== % scan_for_vc_header(+CharList, -Name_Atom, -Number_Int). %------------------------------------------------------------------------------- % Successful where a vc header line is parsed. %=============================================================================== % Make these calls visible to the spxref tool. :- public load__vcg_dpc:parse_vc_header_name/3. :- public load__vcg_dpc:parse_vc_header_number/3. scan_for_vc_header(CharList, Name_Atom, Number_Int):- phrase(parse_vc_header_name(Name_Atom), CharList), phrase(parse_vc_header_number(Number_Int), CharList), !. %------------------------------------------------------------------------------- % procedure_constrain_42. % Name_Atom is procedure_constrain_42 parse_vc_header_name(Name_Atom) --> parse_atom([alpha_numeric, under_score], oneormore, Name_Atom), ".", !. % procedure_constrain_42. % Number_Int is procedure_constrain_42 parse_vc_header_number(Number_Int) --> parse_nothing_to_all, parse_natural_int(Number_Int), ".", !. %=============================================================================== % replace_last_trace_id(+TraceId_Atom). %------------------------------------------------------------------------------- % Allow updating of the last traceability id as a new traceability line is % encountered. %=============================================================================== % Replace existing trace. replace_last_trace_id(TraceId_Atom):- retract(get_last_trace_id(_TraceId_Atom)), assert(get_last_trace_id(TraceId_Atom)), !. % Set initial trace. replace_last_trace_id(TraceId_Atom):- assert(get_last_trace_id(TraceId_Atom)), !. %=============================================================================== % must_get_last_trace_id(+TraceId_Atom). %------------------------------------------------------------------------------- % Raise an error if a trace id is requested, but one has not yet been seen. % This is likely a symptom of a malformed vcg file. %=============================================================================== must_get_last_trace_id(TraceId_Atom):- get_last_trace_id(TraceId_Atom), !. must_get_last_trace_id(_TraceId_Atom):- show_error('An expected traceability line has not been found.\n', []). %=============================================================================== % initialise_order. %------------------------------------------------------------------------------- % Initialise the VC ordering information. %=============================================================================== initialise_order:- retractall(get_order(_Int)), assert(get_order(1)), !. %=============================================================================== % retrieve_and_increment_order(+Int). %------------------------------------------------------------------------------- % Access and update the order information. %=============================================================================== retrieve_and_increment_order(Int):- retract(get_order(Int)), NextInt is Int+1, assert(get_order(NextInt)), !. :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/writevc.pro0000644000175000017500000004311311753202337017014 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Writes a verification condition or path function to the simplifier output % file. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### dead_path_found_message('*** Dead path detected.'). dead_path_not_found_message('*** No dead path detected.'). dead_path_not_checked_message('*** DPC not required for intermediate check.'). %############################################################################### % PREDICATES %############################################################################### write_vc(SDPFile_Stream, VC_Id) :- get_switch_deadpaths(on), !, write_dead_path(SDPFile_Stream, VC_Id), !. write_vc(SimplifiedVcgFile_Stream, _VC_Id) :- write_pre_hyp_part(SimplifiedVcgFile_Stream), write_hypotheses(SimplifiedVcgFile_Stream), write_pre_conc_part(SimplifiedVcgFile_Stream), find_max_conc_no, write_conclusions(SimplifiedVcgFile_Stream), !. %------------------------------------------------------------------------------- %=============================================================================== % write_dead_path(+SDPFile_Stream). %------------------------------------------------------------------------------- % Write to SDPFile_Stream the result of the dead path search. %=============================================================================== write_dead_path(SDPFile_Stream, _VC_Id) :- % A dead path is found when a contradiction has been found. found_contradiction, !, write_dead_path_found(SDPFile_Stream). write_dead_path(SDPFile_Stream, _VC_Id) :- % A dead path is found when all conclusions (assume that the dpc file % contains a single conclusion of false) have been proved. proved_all_conclusions, !, write_dead_path_found(SDPFile_Stream). write_dead_path(SDPFile_Stream, VC_Id) :- \+ get_zombiescope_no_check_required(VC_Id), !, dead_path_not_found_message(DeadPathNotFound_CharList), write(SDPFile_Stream, DeadPathNotFound_CharList), nl(SDPFile_Stream), nl(SDPFile_Stream). write_dead_path(SDPFile_Stream, VC_Id) :- get_zombiescope_no_check_required(VC_Id), !, dead_path_not_checked_message(DeadPathNotChecked_CharList), write(SDPFile_Stream, DeadPathNotChecked_CharList), nl(SDPFile_Stream), nl(SDPFile_Stream). %=============================================================================== % write_dead_path_found(+SDPFile_Stream). %------------------------------------------------------------------------------- % Write to SDPFile_Stream a dead path that has been found. %=============================================================================== write_dead_path_found(SDPFile_Stream) :- dead_path_found_message(DeadPathFound_CharList), write(SDPFile_Stream, DeadPathFound_CharList), nl(SDPFile_Stream), nl(SDPFile_Stream). %------------------------------------------------------------------------------- write_pre_hyp_part(_SimplifiedVcgFile_Stream) :- \+ path_functions, !. write_pre_hyp_part(_SimplifiedVcgFile_Stream) :- found_contradiction, !. write_pre_hyp_part(SimplifiedVcgFile_Stream) :- !, write(SimplifiedVcgFile_Stream, ' Traversal condition:'), nl(SimplifiedVcgFile_Stream). %------------------------------------------------------------------------------- write_hypotheses(SimplifiedVcgFile_Stream) :- found_contradiction, !, ( path_functions, write(SimplifiedVcgFile_Stream, ' Path eliminated. (Contradictory traversal condition)') ; write(SimplifiedVcgFile_Stream, '*** true . /* contradiction within hypotheses. */'), nl(SimplifiedVcgFile_Stream), nl(SimplifiedVcgFile_Stream) ), !, nl(SimplifiedVcgFile_Stream). write_hypotheses(SimplifiedVcgFile_Stream) :- \+ path_functions, proved_all_conclusions, proved_by_user_rules, write(SimplifiedVcgFile_Stream, '*** true . /* proved using user-defined proof rules. */'), nl(SimplifiedVcgFile_Stream), !, nl(SimplifiedVcgFile_Stream). write_hypotheses(_SimplifiedVcgFile_Stream) :- \+ path_functions, proved_all_conclusions, !. write_hypotheses(SimplifiedVcgFile_Stream) :- retractall(hn(_)), retractall(nhn(_)), asserta(hn(0)), asserta(nhn(0)), !, repeat, get_next_hypothesis(H), process_next_hypothesis(SimplifiedVcgFile_Stream, H), % until H = '$DONE', !. %------------------------------------------------------------------------------- get_next_hypothesis(H) :- get_next_hn(N), fetch_hypothesis(N, H), !. %------------------------------------------------------------------------------- get_next_hn(N) :- retract(hn(K)), N is K+1, assertz(hn(N)), !. %------------------------------------------------------------------------------- fetch_hypothesis(N, _H) :- know_eliminated(N), !, fail. fetch_hypothesis(N, H) :- fetch_complexities(hyp, N, X, S, SS), fetch_minimum_complexity(hyp, N, X, S, SS, H), !. fetch_hypothesis(N, H) :- % Default if no complexity facts for hypotheses added later on. get_hyp(H, x, N), !. fetch_hypothesis(N, '$DONE') :- max_hyp_no(MAX), N > MAX, !. %------------------------------------------------------------------------------- fetch_complexities(HorC, N, X, S, SS) :- complexity_fact(HorC, N, x, X), ( complexity_fact(HorC, N, s, S) ; S = [] ), !, ( complexity_fact(HorC, N, ss, SS) ; SS = [] ), !. %------------------------------------------------------------------------------- fetch_minimum_complexity(hyp, N, X, S, SS, HYP) :- minimum_choice(X, S, SS, CHOICE), !, get_hyp(HYP, CHOICE, N), !. fetch_minimum_complexity(conc, N, X, S, SS, HYP) :- minimum_choice(X, S, SS, CHOICE), !, get_conc(HYP, CHOICE, N), !. fetch_minimum_complexity(HorC, _N, _X, _S, _SS, _HYP) :- show_error('HorC is not hyp or conc but is ~a in fetch_minimum_complexity.', [HorC]). %------------------------------------------------------------------------------- minimum_choice(_X, [], [], x) :- !. minimum_choice(X, S, [], C) :- !, ( X =< S + 1, C = x ; C = [s, _] ), !. minimum_choice(X, [], SS, C) :- !, ( X =< SS + 1, C = x ; C = ss ), !. minimum_choice(X, S, SS, C) :- ( X =< SS + 1, ( X =< S + 1, C = x ; C = [s, _] ) ; ( SS =< S, C = ss ; C = [s, _] ) ), !. %------------------------------------------------------------------------------- %=============================================================================== % process_next_hypothesis(+Stream, +Hyp_Term) %------------------------------------------------------------------------------- % Write the hypothesis Hyp_Term to the output stream. %=============================================================================== process_next_hypothesis(SimplifiedVcgFile_Stream, '$DONE') :- nhn(0), !, write_unit_hyp_part(SimplifiedVcgFile_Stream). process_next_hypothesis(_SimplifiedVcgFile_Stream, '$DONE') :- !. process_next_hypothesis(_SimplifiedVcgFile_Stream, true) :- !. process_next_hypothesis(SimplifiedVcgFile_Stream, H) :- get_next_nhn(N), get_switch_renum(OnOrOff), process_next_hypothesis_x(SimplifiedVcgFile_Stream, N, H, OnOrOff), !. %=============================================================================== % process_next_hypothesis_x(+Stream, +HypNum_Int, +Hyp_Term, +Renum_OnOrOff) %------------------------------------------------------------------------------- % Use the actual hypothesis number (saved in hn) if Renum_OnOrOFf is 'off'; % Otherwise, use the hypothesis number HypNum_Int . %=============================================================================== process_next_hypothesis_x(SimplifiedVcgFile_Stream, N, H, on) :- write_next_hypothesis(SimplifiedVcgFile_Stream, N, H), !. process_next_hypothesis_x(SimplifiedVcgFile_Stream, _N1, H, off) :- hn(N), write_next_hypothesis(SimplifiedVcgFile_Stream, N, H), !. %------------------------------------------------------------------------------- get_next_nhn(N) :- retract(nhn(K)), N is K+1, assertz(nhn(N)), !. %------------------------------------------------------------------------------- write_unit_hyp_part(SimplifiedVcgFile_Stream) :- path_functions, !, write(SimplifiedVcgFile_Stream, ' true . {path is always traversed.}'), nl(SimplifiedVcgFile_Stream). write_unit_hyp_part(SimplifiedVcgFile_Stream) :- write(SimplifiedVcgFile_Stream, 'H1: true .'), nl(SimplifiedVcgFile_Stream). %------------------------------------------------------------------------------- write_next_hypothesis(SimplifiedVcgFile_Stream, N, H) :- path_functions, !, out_number_rj(SimplifiedVcgFile_Stream, N), print(SimplifiedVcgFile_Stream, H), write(SimplifiedVcgFile_Stream, ' .'), nl(SimplifiedVcgFile_Stream). write_next_hypothesis(SimplifiedVcgFile_Stream, N, H) :- write(SimplifiedVcgFile_Stream, 'H'), out_number_lj(SimplifiedVcgFile_Stream, N), print(SimplifiedVcgFile_Stream, H), write(SimplifiedVcgFile_Stream, ' .'), nl(SimplifiedVcgFile_Stream). %------------------------------------------------------------------------------- write_pre_conc_part(_SimplifiedVcgFile_Stream) :- found_contradiction, !. write_pre_conc_part(SimplifiedVcgFile_Stream) :- path_functions, !, write(SimplifiedVcgFile_Stream, ' Action:'), nl(SimplifiedVcgFile_Stream). write_pre_conc_part(_SimplifiedVcgFile_Stream) :- proved_all_conclusions, !. write_pre_conc_part(SimplifiedVcgFile_Stream) :- write(SimplifiedVcgFile_Stream, ' ->'), nl(SimplifiedVcgFile_Stream). %------------------------------------------------------------------------------- out_number_rj(SimplifiedVcgFile_Stream, N) :- size(N, DIGITS), ( SPACES = 5-DIGITS, SPACES >= 0 ; SPACES = 0 ), spacer(SimplifiedVcgFile_Stream, SPACES), !, write(SimplifiedVcgFile_Stream, N), write(SimplifiedVcgFile_Stream, ': '), !. %------------------------------------------------------------------------------- out_number_lj(SimplifiedVcgFile_Stream, N) :- size(N, DIGITS), ( SPACES is 4-DIGITS, SPACES >= 0 ; SPACES = 0 ), write(SimplifiedVcgFile_Stream, N), write(SimplifiedVcgFile_Stream, ': '), !, spacer(SimplifiedVcgFile_Stream, SPACES), !. %------------------------------------------------------------------------------- size(N, 1) :- N < 10, !. size(N, 2) :- N < 100, !. size(N, 3) :- N < 1000, !. size(N, 4) :- N < 10000, !. size(_N, 5) :- !. %------------------------------------------------------------------------------- write_conclusions(_SimplifiedVcgFile_Stream) :- found_contradiction, !. write_conclusions(_SimplifiedVcgFile_Stream) :- proved_all_conclusions, proved_by_user_rules, !. write_conclusions(SimplifiedVcgFile_Stream) :- path_functions, !, get_conc(X, _, 1), write_path_action(SimplifiedVcgFile_Stream, X), write(SimplifiedVcgFile_Stream, ' .'), nl(SimplifiedVcgFile_Stream), !. write_conclusions(SimplifiedVcgFile_Stream) :- retractall(hn(_)), retractall(nhn(_)), asserta(hn(0)), asserta(nhn(0)), !, repeat, get_next_conclusion(C), process_next_conclusion(SimplifiedVcgFile_Stream, C), % until C = '$DONE', !. %------------------------------------------------------------------------------- write_path_action(SimplifiedVcgFile_Stream, X & Y) :- !, write_path_action(SimplifiedVcgFile_Stream, X), !, write(SimplifiedVcgFile_Stream, ' &'), nl(SimplifiedVcgFile_Stream), !, write_path_action(SimplifiedVcgFile_Stream, Y), !. write_path_action(SimplifiedVcgFile_Stream, X := Y) :- spacer(SimplifiedVcgFile_Stream, 8), print(SimplifiedVcgFile_Stream, X), write(SimplifiedVcgFile_Stream, ' := '), ( simplify(Y, Z) ; Z = Y ), !, print(SimplifiedVcgFile_Stream, Z), !. write_path_action(SimplifiedVcgFile_Stream, []) :- write(SimplifiedVcgFile_Stream, ' unit function'), !. %------------------------------------------------------------------------------- get_next_conclusion(C) :- get_next_hn(N), fetch_conclusion(N, C), !. %------------------------------------------------------------------------------- fetch_conclusion(N, _C) :- get_proved_conc(N), !, fail. fetch_conclusion(N, C) :- fetch_complexities(conc, N, X, S, SS), fetch_minimum_complexity(conc, N, X, S, SS, C), !. fetch_conclusion(N, C) :- get_conc(C, x, N), % Default if no complexity facts, !. % for conclusions added later on. fetch_conclusion(N, '$DONE') :- max_conc_no(MAX), N > MAX, !. %------------------------------------------------------------------------------- process_next_conclusion(SimplifiedVcgFile_Stream, '$DONE') :- nhn(0), !, write_unit_conc_part(SimplifiedVcgFile_Stream). process_next_conclusion(SimplifiedVcgFile_Stream, '$DONE') :- nl(SimplifiedVcgFile_Stream), !. % Do not renumber conclusions. process_next_conclusion(SimplifiedVcgFile_Stream, C) :- renumber_conclusions(off), get_next_nhn(_DUMMY), hn(N), write_next_conclusion(SimplifiedVcgFile_Stream, N, C), !. % Do renumber conclusions. (This is the default) process_next_conclusion(SimplifiedVcgFile_Stream, C) :- renumber_conclusions(on), get_next_nhn(N), write_next_conclusion(SimplifiedVcgFile_Stream, N, C), !. %------------------------------------------------------------------------------- write_unit_conc_part(SimplifiedVcgFile_Stream) :- path_functions, !, write(SimplifiedVcgFile_Stream, ' (unit action: no variables affected)'), nl(SimplifiedVcgFile_Stream). write_unit_conc_part(SimplifiedVcgFile_Stream) :- write(SimplifiedVcgFile_Stream, '*** true . /* all conclusions proved */'), nl(SimplifiedVcgFile_Stream), nl(SimplifiedVcgFile_Stream). %------------------------------------------------------------------------------- write_next_conclusion(SimplifiedVcgFile_Stream, N, C) :- write(SimplifiedVcgFile_Stream, 'C'), out_number_lj(SimplifiedVcgFile_Stream, N), nhn(UndischargedConcs_Int), asserta(max_written_conc_no(UndischargedConcs_Int)), print(SimplifiedVcgFile_Stream, C), write(SimplifiedVcgFile_Stream, ' .'), nl(SimplifiedVcgFile_Stream). %------------------------------------------------------------------------------- proved_all_conclusions :- \+ (get_conc(_, _, N), \+ get_proved_conc(N)), maybe_issue_proved_vc_message, !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__data_files.pro0000644000175000017500000002514311753202337020557 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides access to all datafiles names used by the system. %############################################################################### :- module(data__data_files, [get_datafiles_vcg/1, add_datafiles_vcg/1, get_datafiles_dpc/1, add_datafiles_dpc/1, get_datafiles_simplified_vcg/1, add_datafiles_simplified_vcg/1, get_datafiles_summary_dpc/1, add_datafiles_summary_dpc/1, get_datafiles_fdl/1, add_datafiles_fdl/1, get_datafiles_local_user_rule/1, add_datafiles_local_user_rule/1, get_datafiles_global_user_rule/1, add_datafiles_global_user_rule/1, get_datafiles_rule/1, add_datafiles_rule/1, get_datafiles_pfs/1, add_datafiles_pfs/1, get_datafiles_simplified_pfs/1, add_datafiles_simplified_pfs/1, get_datafiles_dec/1, add_datafiles_dec/1, get_datafiles_log/1, add_datafiles_log/1, get_datafiles_debug/2, add_datafiles_debug/2, must_get_datafiles_debug/2, no_log_file/0, logfile_name/1, save_data__data_files/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('ioutilities.pro', [write_terms_to_file/2, throw_error/2]). %############################################################################### % TYPES %############################################################################### :- add_type('ProofFileKind', [verification_conditions, deadpath_search, path_functions]). :- add_type('DebugFileKind', [data__prf, data__vcg, data__data_files]). %############################################################################### % DATA %############################################################################### % The existence of a file name in a datafiles predicate indicates that the % file pertinent to this session and (unless relevant portions of the file % system are being externally modified during execution) is available for % use. % Files used for verification conditions. :- add_state(get_datafiles_vcg, get_datafiles_vcg('VcgFile_Atom')). :- dynamic(get_datafiles_vcg/1). :- add_state(get_datafiles_dpc, get_datafiles_dpc('DpcFile_Atom')). :- dynamic(get_datafiles_dpc/1). :- add_state(get_datafiles_simplified_vcg, get_datafiles_simplified_vcg('SimplifiedVcgFile_Atom')). :- dynamic(get_datafiles_simplified_vcg/1). :- add_state(get_datafiles_summary_dpc, get_datafiles_summary_dpc('SummaryDpcFile_Atom')). :- dynamic(get_datafiles_summary_dpc/1). :- add_state(get_datafiles_fdl, get_datafiles_fdl('FdlFile_Atom')). :- dynamic(get_datafiles_fdl/1). :- add_state(get_datafiles_local_user_rule, get_datafiles_local_user_rule('LocalUserRuleFile_Atom')). :- dynamic(get_datafiles_local_user_rule/1). :- add_state(get_datafiles_global_user_rule, get_datafiles_global_user_rule('GlobalUserRuleFile_Atom')). :- dynamic(get_datafiles_global_user_rule/1). :- add_state(get_datafiles_rule, get_datafiles_rule('RuleFile_Atom')). :- dynamic(get_datafiles_rule/1). % Files used for path functions. :- add_state(get_datafiles_pfs, get_datafiles_pfs('PfsFile_Atom')). :- dynamic(get_datafiles_pfs/1). :- add_state(get_datafiles_simplified_pfs, get_datafiles_simplified_pfs('SimplifiedPfsFile_Atom')). :- dynamic(get_datafiles_simplified_pfs/1). :- add_state(get_datafiles_dec, get_datafiles_dec('DecFile_Atom')). :- dynamic(get_datafiles_dec/1). % Files used for both path functions and verification conditions. :- add_state(get_datafiles_log, get_datafiles_log('LogFile_Atom')). :- dynamic(get_datafiles_log/1). % Files used for debugging. % As several debugging datafiles may be used, it is convenient to store many % datafiles with the same predicate each with a unique reference. :- add_state(get_datafiles_debug, get_datafiles_debug('DebugFileKind', 'DebugFile_Atom')). :- dynamic(get_datafiles_debug/2). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== % Files used for verification conditions. add_datafiles_vcg(VcgFile_Atom):- assert(get_datafiles_vcg(VcgFile_Atom)), !. add_datafiles_dpc(DpcFile_Atom):- assert(get_datafiles_dpc(DpcFile_Atom)), !. add_datafiles_simplified_vcg(SimplifiedVcgFile_Atom):- assert(get_datafiles_simplified_vcg(SimplifiedVcgFile_Atom)), !. add_datafiles_summary_dpc(SummaryDpcFile_Atom):- assert(get_datafiles_summary_dpc(SummaryDpcFile_Atom)), !. add_datafiles_fdl(FdlFile_Atom):- assert(get_datafiles_fdl(FdlFile_Atom)), !. add_datafiles_rule(RuleFile_Atom):- assert(get_datafiles_rule(RuleFile_Atom)), !. add_datafiles_local_user_rule(LocalUserRuleFile_Atom):- assert(get_datafiles_local_user_rule(LocalUserRuleFile_Atom)), !. add_datafiles_global_user_rule(GlobalUserRuleFile_Atom):- assert(get_datafiles_global_user_rule(GlobalUserRuleFile_Atom)), !. % Files used for path functions. add_datafiles_pfs(PfsFile_Atom):- assert(get_datafiles_pfs(PfsFile_Atom)), !. add_datafiles_simplified_pfs(SimplifiedPfsFile_Atom):- assert(get_datafiles_simplified_pfs(SimplifiedPfsFile_Atom)), !. add_datafiles_dec(DecFile_Atom):- assert(get_datafiles_dec(DecFile_Atom)), !. % Files used for both path functions and verification conditions. add_datafiles_log(LogFile_Atom):- assert(get_datafiles_log(LogFile_Atom)), !. % Files used for debugging. add_datafiles_debug(DebugFileKind, DebugFile_Atom):- assert(get_datafiles_debug(DebugFileKind, DebugFile_Atom)), !. %=============================================================================== %=============================================================================== % must_get_datafiles_debug(+DebugFileKind, DebugFile_Atom). %------------------------------------------------------------------------------- % Alternative access to get_datafiles_debug, raising an error if the requested % debug datafiles can not be found. %=============================================================================== must_get_datafiles_debug(DebugFileKind, _DebugFile_Atom):- \+ atom(DebugFileKind), throw_error('Expected atomic name for debug datafiles to find, but got: ~k', [DebugFileKind]). % Return the first found datafiles. must_get_datafiles_debug(DebugFileKind, DebugFile_Atom):- get_datafiles_debug(DebugFileKind, DebugFile_Atom), !. % From above, no datafiles found. must_get_datafiles_debug(DebugFileKind, _DebugFile_Atom):- throw_error('Could not find debug datafiles for: Expected atomic name for: ~k', [DebugFileKind]). %=============================================================================== %=============================================================================== % Refactor. %=============================================================================== no_log_file:- \+ get_datafiles_log(_FILE). logfile_name(FILE):- get_datafiles_log(FILE). %=============================================================================== %=============================================================================== % save_data__data_files. %=============================================================================== save_data__data_files:- must_get_datafiles_debug(data__data_files, DebugFile_Atom), write_terms_to_file(DebugFile_Atom, [data__data_files:get_datafiles_vcg/1, data__data_files:get_datafiles_simplified_vcg/1, data__data_files:get_datafiles_summary_dpc/1, data__data_files:get_datafiles_fdl/1, data__data_files:get_datafiles_local_user_rule/1, data__data_files:get_datafiles_global_user_rule/1, data__data_files:get_datafiles_rule/1, data__data_files:get_datafiles_pfs/1, data__data_files:get_datafiles_simplified_pfs/1, data__data_files:get_datafiles_dec/1, data__data_files:get_datafiles_log/1, data__data_files:get_datafiles_debug/2]), !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/Makefile0000644000175000017500000001046311753202337016251 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for the Simplifier # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ .NOTPARALLEL: ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUTNAME_SIMPLIFIER:=spadesimp OUTPUTNAME_ZOMBIESCOPE:=zombiescope # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # PLATFORM SPECIFIC CONFIGURATION ################################################################################ # Darwin (Mac OS X). ifeq (${TARGET},Darwin) # Modify PATH to pick up Apple's gcc before GNAT Pro. PATH:=/usr/bin:${PATH} endif ################################################################################ # TARGETS ################################################################################ all: ${OUTPUTNAME_SIMPLIFIER}${EXE_EXTN} ${OUTPUTNAME_ZOMBIESCOPE}${EXE_EXTN} build_manifest # Note that the build assumes PATH is set to include SICStus binaries and (on # Windows) MS VC Tools and LIB and INCLUDE are set to point at MS VC libraries # and includes respectively. ${OUTPUTNAME_SIMPLIFIER}${EXE_EXTN}: *.pro sicstus ${SICSTUS_SWITCHES} --goal "compile('simplifier.pro'), add_system_toolname('Simplifier'), buildsav_simplifier." spld ${SPLD_CONF} -o $@ --static --resources=${OUTPUTNAME_SIMPLIFIER}${SAV_EXTN}=/${OUTPUTNAME_SIMPLIFIER}${SAV_EXTN},${SICSTUS_LIBS} ${OUTPUTNAME_ZOMBIESCOPE}${EXE_EXTN}: *.pro sicstus ${SICSTUS_SWITCHES} --goal "compile('simplifier.pro'), add_system_toolname('ZombieScope'), buildsav_zombiescope." spld ${SPLD_CONF} -o $@ --static --resources=${OUTPUTNAME_ZOMBIESCOPE}${SAV_EXTN}=/${OUTPUTNAME_ZOMBIESCOPE}${SAV_EXTN},${SICSTUS_LIBS} # Manifest only required on Windows. ifeq ($(findstring ${TARGET},Windows),${TARGET}) build_manifest: # Copy over the correct manifest file for this windows build. cp ${OUTPUTNAME_SIMPLIFIER}.windows.manifest ${OUTPUTNAME_SIMPLIFIER}${EXE_EXTN}.manifest cp ${OUTPUTNAME_ZOMBIESCOPE}.windows.manifest ${OUTPUTNAME_ZOMBIESCOPE}${EXE_EXTN}.manifest else build_manifest: endif spxref: spxref -R -i spxref.pro simplifier.pro -w spxref_warning.txt -x spxref_cross.txt -m spxref_ported.txt -u spxref_undefined.txt spxrefplain: spxref # Normalize anonymous prolog variables. sed -e 's/_[0-9][0-9]*/_/g' spxref_undefined.txt > spxref_undefined.txt.tmp mv spxref_undefined.txt.tmp spxref_undefined.txt # Cleaning code base #=================== clean: rm -f spadesimp.sav spadesimp.exp spadesimp.ilk spadesimp.lib spadesimp.pdb rm -f zombiescope.sav zombiescope.exp zombiescope.ilk zombiescope.lib zombiescope.pdb reallyclean: clean rm -f ${OUTPUTNAME_SIMPLIFIER}${EXE_EXTN} ${OUTPUTNAME_SIMPLIFIER}${EXE_EXTN}.manifest rm -f ${OUTPUTNAME_ZOMBIESCOPE}${EXE_EXTN} ${OUTPUTNAME_ZOMBIESCOPE}${EXE_EXTN}.manifest rm -f spxref_warning.txt spxref_cross.txt spxref_ported.txt spxref_undefined.txt ################################################################################ # END-OF-FILE spark-2012.0.deb/simplifier/defectreporting.pro0000644000175000017500000001113711753202337020516 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Support file to offer improved reporting of warnings and errors detected % at compilation. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### :- dynamic(defect_counter/1). :- assert(defect_counter(0)). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % message_hook(+Severity, +Message, +Lines). %------------------------------------------------------------------------------- % Provide an alternative implementation for the message_hook. This enables % us abort with an error code, preventing compilation, if any defects are % detected. Further, it also supports slightly cleaner defect reports, to % assist in debugging. %=============================================================================== % Report errors and warnings, and count these as defects. message_hook(Severity, _Message, Lines):- member(Severity,[error, warning]), increase_defect_counter, % Present defect message, preceded by separating blank line. nl(user_error), print_message_lines(user_error, Severity, Lines), flush_output, !. % Report all other messages, but do not separate these with blank lines nor % count these as defects. message_hook(Severity, _Message, Lines):- print_message_lines(user_error, Severity, Lines), flush_output, !. %------------------------------------------------------------------------------- increase_defect_counter:- retract(defect_counter(At_Int)), Next_Int is At_Int+1, assert(defect_counter(Next_Int)), !. %=============================================================================== %=============================================================================== % zero_defects_reported. %------------------------------------------------------------------------------- % Succeeds where zero defects have been reported. %=============================================================================== zero_defects_reported:- defect_counter(0), !. %=============================================================================== %=============================================================================== % report_total_defects. %------------------------------------------------------------------------------- % Report the total number of defects reported. %=============================================================================== report_total_defects:- defect_counter(At_Int), nl, write(At_Int), write(' defects detected.'), nl, nl, !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/simplifier.pro0000644000175000017500000002002511753202337017471 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Top level file, to build the Simplifier. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % LOAD ENHANCED DEFECT REPORTING %=============================================================================== :- use_module('defectreporting.pro'). %=============================================================================== %=============================================================================== % LOAD SICSTUS LIBRARIES BEFORE DYNAMIC CHANGES %=============================================================================== :- use_module('librarypredicates.pro'). :- load_files(library(system), [when(compile_time), imports([environ/2])]). %=============================================================================== %=============================================================================== % DYNAMIC FEATURES PRE-LOAD %=============================================================================== :- use_module('settings.pro', [declare_settings/0]). :- declare_settings. :- use_module('opdeclar.pro', [declare_operators/0, hide_operators/0]). :- declare_operators. % Conditional compilation - only compile usage utilities is the user % has installed TCLTK libraries and wants usage profiling. :- if(environ('USAGE_PROFILING', 'ON')). :- use_module('usage_utilities.pro', [save_usage_data/0, view_usage/0]). :- endif. %=============================================================================== %=============================================================================== % FIXED SYSTEM CONFIGURATION %------------------------------------------------------------------------------- % This information will be the same for every execution of the tool. %=============================================================================== :- use_module('data__system.pro', [add_system_toolname/1]). :- use_module('load__switches.pro'). %=============================================================================== %=============================================================================== % LOAD SYSTEM %=============================================================================== :- include('main_simplifier.pro'). %=============================================================================== %=============================================================================== % runtime_entry(start). %------------------------------------------------------------------------------- % This is a special Sicstus predicate, which will be automatically invoked % when restoring a saved state. It is the top level, or main, predicate. %=============================================================================== runtime_entry(start):- simplifier_main, save_usage_profile, close_all_streams, halt(0), !. %=============================================================================== % save_usage_profile %------------------------------------------------------------------------------- % Predicate to save usage data if usage switch specifies a file. % Usage is only possible when the Simplifier is compiled using the % prolog flag profiledcode (refer to makefile) and when it is run in % Sicstus interactive mode (limitation of Sicstus) - see usage_profile script. %=============================================================================== :- if(environ('USAGE_PROFILING', 'ON')). save_usage_profile:- \+ get_switch_usage(no_usage_file), save_usage_data, !. :- endif. save_usage_profile:- !. %=============================================================================== % show_usage %------------------------------------------------------------------------------- % Show collected usage data. %=============================================================================== :- if(environ('USAGE_PROFILING', 'ON')). show_usage:- load_switches, view_usage, halt(0). :- endif. %=============================================================================== %=============================================================================== % DYNAMIC FEATURES POST-LOAD %=============================================================================== % _After_ all dynamic predicates have been introduced, we need to disable % "dynamic" as an operator in case a user has an fdl entity called % "dynamic". Same goes for the other predefined prolog operators that might % look like fdl identifiers. See the sicstus manual section "Standard % Operators" :- hide_operators. %=============================================================================== %=============================================================================== % BUILD SAV AND HALT %=============================================================================== buildsav_simplifier:- zero_defects_reported, save_program('spadesimp.sav'), close_all_streams, halt(0). buildsav_simplifier:- report_total_defects, close_all_streams, halt(1). buildsav_zombiescope:- zero_defects_reported, save_program('zombiescope.sav'), close_all_streams, halt(0). buildsav_zombiescope:- report_total_defects, close_all_streams, halt(1). buildsav_usage_profiled:- zero_defects_reported, save_program('spadesimp_usage_profiled.sav'), halt(0). buildsav_usage_profiled:- report_total_defects, halt(1). % The following are for building with SWI swisav_simplifier:- report_total_defects, qsave_program('spadesimp', [autoload(true), goal(simplifier_main), stand_alone(true)]), halt(0). swisav_simplifier:- report_total_defects, halt(1). swisav_zombiescope:- report_total_defects, qsave_program('zombiescope', [autoload(true), goal(simplifier_main), stand_alone(true)]), halt(0). swisav_zombiescope:- report_total_defects, halt(1). %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/parseutilities.pro0000644000175000017500000005215111753202337020401 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides utilities that support parsing lists of characters. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(parseutilities, [parse_all_to_nothing/2, parse_nothing_to_all/2, parse_atom/5, parse_atom_silent/4, parse_line/3, parse_char_sep_atom_list/6, parse_number/3, parse_natural_int/3, parse_possibly_signed_atom/4, inside_selected_character_class/2, atom_to_lower_case/2]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_type/2]). :- use_module('newutilities.pro', [atom_to_integer/2]). :- use_module('ioutilities.pro', [throw_error/2]). %############################################################################### % TYPES %############################################################################### :- add_type('SelectCharClass', [not('CharClass'), 'CharClass']). :- add_type('CharClass', [% Internal character classes. lower_case_char('Index_Int'), upper_case_char('Index_Int'), % Low level character classes. numeric, under_score, asterisk, space, newline, semicolon, colon, period, forwardslash, backwardslash, % Higher level character classes (entirely based on character % classes above). lower_case_char, upper_case_char, alpha, alpha_numeric]). :- add_type('ParseRequest', [% Must parse one character. one, % Must parse at least one character. oneormore, % Need not parse any characters. zeroormore]). %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### :- set_prolog_flag(double_quotes, chars). %=============================================================================== % character(?CharClass, ?Char). %------------------------------------------------------------------------------- % The intention is that this character predicate will never be accessed % outside this module. Instead, this module has a small collection of % general predicates for performing character related operations. %=============================================================================== character(numeric, '1'). character(numeric, '2'). character(numeric, '3'). character(numeric, '4'). character(numeric, '5'). character(numeric, '6'). character(numeric, '7'). character(numeric, '8'). character(numeric, '9'). character(numeric, '0'). character(lower_case_char(1), 'a'). character(lower_case_char(2), 'b'). character(lower_case_char(3), 'c'). character(lower_case_char(4), 'd'). character(lower_case_char(5), 'e'). character(lower_case_char(6), 'f'). character(lower_case_char(7), 'g'). character(lower_case_char(8), 'h'). character(lower_case_char(9), 'i'). character(lower_case_char(10), 'j'). character(lower_case_char(11), 'k'). character(lower_case_char(12), 'l'). character(lower_case_char(13), 'm'). character(lower_case_char(14), 'n'). character(lower_case_char(15), 'o'). character(lower_case_char(16), 'p'). character(lower_case_char(17), 'q'). character(lower_case_char(18), 'r'). character(lower_case_char(19), 's'). character(lower_case_char(20), 't'). character(lower_case_char(21), 'u'). character(lower_case_char(22), 'v'). character(lower_case_char(23), 'w'). character(lower_case_char(24), 'x'). character(lower_case_char(25), 'y'). character(lower_case_char(26), 'z'). character(upper_case_char(1), 'A'). character(upper_case_char(2), 'B'). character(upper_case_char(3), 'C'). character(upper_case_char(4), 'D'). character(upper_case_char(5), 'E'). character(upper_case_char(6), 'F'). character(upper_case_char(7), 'G'). character(upper_case_char(8), 'H'). character(upper_case_char(9), 'I'). character(upper_case_char(10), 'J'). character(upper_case_char(11), 'K'). character(upper_case_char(12), 'L'). character(upper_case_char(13), 'M'). character(upper_case_char(14), 'N'). character(upper_case_char(15), 'O'). character(upper_case_char(16), 'P'). character(upper_case_char(17), 'Q'). character(upper_case_char(18), 'R'). character(upper_case_char(19), 'S'). character(upper_case_char(20), 'T'). character(upper_case_char(21), 'U'). character(upper_case_char(22), 'V'). character(upper_case_char(23), 'W'). character(upper_case_char(24), 'X'). character(upper_case_char(25), 'Y'). character(upper_case_char(26), 'Z'). character(under_score, '_'). character(asterisk, '*'). character(space, ' '). character(newline, '\n'). character(semicolon, ';'). character(colon, ':'). character(period, '.'). character(forwardslash, '/'). character(backwardslash, '\\'). character(hyphen, '-'). %=============================================================================== % inside_selected_character_class(+SelectCharClassList, +Char). %------------------------------------------------------------------------------- % Is successful where the provided character (Char) is within one of the % selected character classes in (SelectCharClassList). Raises an error if % an unaccepted character class is provided. %=============================================================================== inside_selected_character_class(SelectCharClassList, Char):- member(SelectCharClass, SelectCharClassList), inside_selected_character_class_x(SelectCharClass, Char), !. %------------------------------------------------------------------------------- %Consider negated character class selection. inside_selected_character_class_x(not(CharClass), Char):- \+ inside_character_class(CharClass, Char), !. %Otherwise, consider as regular character class selection. inside_selected_character_class_x(CharClass, Char):- \+ CharClass=not(_CharClass), inside_character_class(CharClass, Char), !. %------------------------------------------------------------------------------- % numeric. inside_character_class(numeric, Char):- !, character(numeric, Char), !. % under_score. inside_character_class(under_score, Char):- !, character(under_score, Char), !. % asterisk. inside_character_class(asterisk, Char):- !, character(asterisk, Char), !. % space. inside_character_class(space, Char):- !, character(space, Char), !. % newline. inside_character_class(newline, Char):- !, character(newline, Char), !. % semicolon. inside_character_class(semicolon, Char):- !, character(semicolon, Char), !. % colon. inside_character_class(colon, Char):- !, character(colon, Char), !. % period. inside_character_class(period, Char):- !, character(period, Char), !. % hyphen inside_character_class(hyphen, Char):- !, character(hyphen, Char), !. % forwardslash. inside_character_class(forwardslash, Char):- !, character(forwardslash, Char), !. % backwardslash. inside_character_class(backwardslash, Char):- !, character(backwardslash, Char), !. % lower_case_char. inside_character_class(lower_case_char, Char):- !, character(lower_case_char(_Index_Int), Char), !. % upper_case_char. inside_character_class(upper_case_char, Char):- !, character(upper_case_char(_Index_Int), Char), !. % alpha. inside_character_class(alpha, Char):- !, inside_character_class_alpha(Char), !. % alpha_numeric. inside_character_class(alpha_numeric, Char):- !, inside_character_class_alpha_numeric(Char), !. %From above, provided character class is not accepted. Raise an error. inside_character_class(CharClass, _Char):- throw_error('Attempted to parse with an unaccepted character class: ~k', [CharClass]). %------------------------------------------------------------------------------- inside_character_class_alpha(Char):- character(lower_case_char(_Index_Int), Char), !. inside_character_class_alpha(Char):- character(upper_case_char(_Index_Int), Char), !. %------------------------------------------------------------------------------- inside_character_class_alpha_numeric(Char):- character(lower_case_char(_Index_Int), Char), !. inside_character_class_alpha_numeric(Char):- character(upper_case_char(_Index_Int), Char), !. inside_character_class_alpha_numeric(Char):- character(numeric, Char), !. %=============================================================================== % parse_all_to_nothing. %------------------------------------------------------------------------------- % Initially parses all characters, then increasingly parses one less % character. %=============================================================================== % Collect everything. parse_all_to_nothing --> [_Char], parse_all_to_nothing. % End of string. parse_all_to_nothing --> []. %=============================================================================== % parse_nothing_to_all. %------------------------------------------------------------------------------- % Initially parses zero characters, then increasingly parses an additional % character. %=============================================================================== parse_nothing_to_all --> []. parse_nothing_to_all --> [_Char], parse_nothing_to_all. %=============================================================================== % parse_atom(+SelectCharClassList, +ParseRequest, -Atom). %------------------------------------------------------------------------------- % Parses following characters that conform to at least one of the character % classes in the provided list (CharClassList) as an atom (Atom) taking % into account the parse request (ParseRequest). If ParseRequest is 'one' % then the predicate seeks to parse one character. If ParseRequest is % 'oneormore' then the predicate seeks to parse as many characters as % possible, but at least one. If ParseRequest is 'zeroormore' then the % predicate seeks to parse seeks to parse as many characters as possible, % but is still successful if zero are parsed. %=============================================================================== % Parse one character. parse_atom(SelectCharClassList, one, Atom) --> [H_Char], {inside_selected_character_class(SelectCharClassList, H_Char)}, {atom_chars(Atom, [H_Char])}, !. % Parse many (at least one) characters. parse_atom(SelectCharClassList, oneormore, Atom) --> parse_atom_one_or_more(SelectCharClassList, CharList), {atom_chars(Atom, CharList)}, !. % Parse many (even zero) characters. parse_atom(SelectCharClassList, zeroormore, Atom) --> parse_atom_zero_or_more(SelectCharClassList, CharList), {atom_chars(Atom, CharList)}, !. %------------------------------------------------------------------------------- % First character. parse_atom_one_or_more(SelectCharClassList, [H_Char | T_CharList]) --> [H_Char], {inside_selected_character_class(SelectCharClassList, H_Char)}, parse_atom_zero_or_more(SelectCharClassList, T_CharList), !. %------------------------------------------------------------------------------- % Subsequent characters. parse_atom_zero_or_more(SelectCharClassList, [H_Char | T_CharList]) --> [H_Char], {inside_selected_character_class(SelectCharClassList, H_Char)}, parse_atom_zero_or_more(SelectCharClassList, T_CharList), !. % From above, is end of atom. parse_atom_zero_or_more(_CharClassList, []) --> !. %=============================================================================== % parse_atom_silent(+SelectCharClassList, +ParseRequest). %------------------------------------------------------------------------------- % Exactly the same behaviour as parse_atom, except that the parsed result % is silently discarded. %=============================================================================== parse_atom_silent(SelectCharClassList, ParseRequest) --> parse_atom(SelectCharClassList, ParseRequest, _Atom), !. %=============================================================================== %parse_char_sep_atom_list(+Item_SelectCharClassList, % +Between_SelectCharClassList, % +Char, % -AtomList). %------------------------------------------------------------------------------- %Parses character separated atoms. Each atom must contain at least one %character from the provided character class list %(Item_SelectCharClassList). Between the atoms and provided character, %atoms of the provided character class list (Between_SelectCharClassList) %may be present. The character separated atoms parsed are returned as an %atom list (AtomList). At least one atom must be parsed for the predicate %to be successful. %=============================================================================== % Continuation. parse_char_sep_atom_list(Item_SelectCharClassList, Between_SelectCharClassList, Char, [H_Atom | T_AtomList]) --> parse_atom_silent(Between_SelectCharClassList, zeroormore), parse_atom(Item_SelectCharClassList, oneormore, H_Atom), parse_atom_silent(Between_SelectCharClassList, zeroormore), [Char], parse_atom_silent(Between_SelectCharClassList, zeroormore), !, parse_char_sep_atom_list(Item_SelectCharClassList, Between_SelectCharClassList, Char, T_AtomList), !. % Final. parse_char_sep_atom_list(Item_SelectCharClassList, Between_SelectCharClassList, _Char, [H_Atom]) --> parse_atom_silent(Between_SelectCharClassList, zeroormore), parse_atom(Item_SelectCharClassList, oneormore, H_Atom), parse_atom_silent(Between_SelectCharClassList, zeroormore), !. %=============================================================================== % parse_line(Line_Atom). %------------------------------------------------------------------------------- %Parse and return a line as any number of any characters up to a newline. The %newline is removed from the input stream. %=============================================================================== parse_line(Line_Atom) --> parse_atom([not(newline)], zeroormore, Line_Atom), parse_atom_silent([newline], one), !. %=============================================================================== % parse_number(-Int). %------------------------------------------------------------------------------- %Parse an integer number, accepting both a negative and positive prefix. %Where successful, the parsed number is returned as (Int). %=============================================================================== % Negative number. parse_number(Int) --> parse_atom_silent([space, newline], zeroormore), "-", parse_atom_silent([space, newline], zeroormore), parse_natural_int(Natural_Int), parse_atom_silent([space, newline], zeroormore), {Int is -(Natural_Int)}, !. % Positive number (by default). parse_number(Int) --> parse_atom_silent([space, newline], zeroormore), parse_natural_int(Natural_Int), parse_atom_silent([space, newline], zeroormore), {Int is Natural_Int}, !. % Positive number. parse_number(Int) --> parse_atom_silent([space, newline], zeroormore), "+", parse_atom_silent([space, newline], zeroormore), parse_natural_int(Natural_Int), parse_atom_silent([space, newline], zeroormore), {Int is Natural_Int}, !. %=============================================================================== % parse_natural_int(-Natural_Int). %------------------------------------------------------------------------------- % Parse an natural integer. Where successful, the number is returned as % (Natural_Int). %=============================================================================== %Make this call visible to the spxref tool. :- public newutilities:atom_to_integer/2. parse_natural_int(Natural_Int) --> parse_atom([numeric], oneormore, Atom), {atom_to_integer(Atom, Natural_Int)}, !. %=============================================================================== % parse_possibly_signed_atom(CharClassList, Term). %------------------------------------------------------------------------------- % Parse an atom, possibly prefixed by '-' or '+' signs. Where the '-' sign % is present, the atom is returned in the term structure: -(X). If % possible, the atom is converted to an integer. %=============================================================================== % Negative number. parse_possibly_signed_atom(CharClassList, -(Atom)) --> parse_atom_silent([space, newline], zeroormore), "-", parse_atom_silent([space, newline], zeroormore), parse_atom(CharClassList, oneormore, Signed_Atom), parse_atom_silent([space, newline], zeroormore), {modify_type(Signed_Atom, Atom)}, !. % Positive number (by default). parse_possibly_signed_atom(CharClassList, Atom) --> parse_atom_silent([space, newline], zeroormore), parse_atom(CharClassList, oneormore, Signed_Atom), parse_atom_silent([space, newline], zeroormore), {modify_type(Signed_Atom, Atom)}, !. % Positive number. parse_possibly_signed_atom(CharClassList, Atom) --> parse_atom_silent([space, newline], zeroormore), "+", parse_atom_silent([space, newline], zeroormore), parse_atom(CharClassList, oneormore, Signed_Atom), parse_atom_silent([space, newline], zeroormore), {modify_type(Signed_Atom, Atom)}, !. %------------------------------------------------------------------------------- modify_type(Signed_Atom, Term):- name(Signed_Atom,Signed_CodeList), name(Term,Signed_CodeList). %=============================================================================== % atom_to_lower_case(+MixedCase_Atom, -LowerCase_Atom). %------------------------------------------------------------------------------- %Convert all upper chase characters in the input atom (MixedCase_Atom) into %their lower case equivalent in (LowerCase_Atom). %=============================================================================== atom_to_lower_case(MixedCase_Atom, LowerCase_Atom):- atom_chars(MixedCase_Atom, MixedCase_CharList), atom_to_lower_case_x(MixedCase_CharList, LowerCase_CharList), atom_chars(LowerCase_Atom, LowerCase_CharList), !. %------------------------------------------------------------------------------- atom_to_lower_case_x([], []):- !. % Convert uppercase char to lowercase equivalent. atom_to_lower_case_x([H_MixedCase_Char | T_MixedCase_CharList], [H_LowerCase_Char | T_LowerCase_CharList]):- character(upper_case_char(Index_Int), H_MixedCase_Char), character(lower_case_char(Index_Int), H_LowerCase_Char), atom_to_lower_case_x(T_MixedCase_CharList, T_LowerCase_CharList). % Copy over all other characters. atom_to_lower_case_x([H_MixedCase__LowerCase__Char | T_MixedCase_CharList], [H_MixedCase__LowerCase__Char | T_LowerCase_CharList]):- atom_to_lower_case_x(T_MixedCase_CharList, T_LowerCase_CharList). :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__vcg.pro0000644000175000017500000002324211753202337017241 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides access to all information related to vcs. This information will % be retrieved from the provided vcg file. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__vcg, [get_vcg_file_handle/1, add_vcg_file_handle/1, get_vcg_seen_vc/0, add_vcg_seen_vc/0, get_vcg_trace/2, add_vcg_trace/2, get_vcg_vc/5, add_vcg_vc/5, get_vcg_hypothesis/3, add_vcg_hypothesis/3, get_vcg_conclusion/3, add_vcg_conclusion/3, get_zombiescope_no_check_required/1, add_zombiescope_no_check_required/1, prune_vcg_file_handle/0, prune_all_vcg_vc/0, prune_all_vcg_hypothesis/0, prune_all_vcg_conclusion/0, prune_all_zombiescope_no_check_required/0, prune_vcg_hypothesis/3, save_data__vcg/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('ioutilities.pro', [write_terms_to_file/2]). :- use_module('data__data_files.pro', [must_get_datafiles_debug/2]). :- use_module('data__switches', [get_switch_deadpaths/1, get_switch_hyp_limit/1]). %############################################################################### % TYPES %############################################################################### % For path(s) from XXX to YYY: % For checks of refinement integrity: :- add_type('VCTrace', [traverseCutpoints('VCCutpointFrom', 'VCCutpointTo'), checkRefinementIntegrity, subclassInheritanceIntegrity]). % start % assertion of line NN % ZZZ assertion of line NN :- add_type('VCCutpointFrom', [start, assertion('AssertionKind', 'Line_Int')]). % finish % assertion of line NN % default assertion of line NN % check associated with statement of line NN % run-time check associated with statement of line NN % precondition check associated with statement of line NN :- add_type('VCCutpointTo', [finish, assertion('AssertionKind', 'Line_Int'), check('CheckKind', 'Line_Int')]). :- add_type('AssertionKind', [userprovided, default]). :- add_type('CheckKind', [userprovided, runtime, precondition]). %############################################################################### % DATA %############################################################################### % VCs are loaded one-at-a-time. Thus, need a location to store the VCG file % handle between each VC load. :- add_state(get_vcg_file_handle, get_vcg_file_handle('VcgFile_Stream')). :- dynamic(get_vcg_file_handle/1). % Record that a vc has been seen. :- add_state(get_vcg_seen_vc, get_vcg_seen_vc). :- dynamic(get_vcg_seen_vc/0). % A vcg file has a number of trace lines. % Each trace line has a number of VCs. % Each VC has a number of hypotheses and a number of conclusions. :- add_state(get_vcg_trace, get_vcg_trace('TraceId_Atom', 'VCTrace')). :- dynamic(get_vcg_trace/2). :- add_state(get_vcg_vc, get_vcg_vc('VCId_Atom', 'Order_Int', 'Name_Atom', 'Number_Int', 'ParentVCTraceId_Atom')). :- dynamic(get_vcg_vc/5). :- add_state(get_vcg_hypothesis, get_vcg_hypothesis('Number_Int', 'Hyp_Term', 'ParentVCId_Atom')). :- dynamic(get_vcg_hypothesis/3). :- add_state(get_vcg_conclusion, get_vcg_conclusion('Number_Int', 'Conc_Term', 'ParentVCId_Atom')). :- dynamic(get_vcg_conclusion/3). :- add_state(get_zombiescope_no_check_required, get_zombiescope_no_check_required('ParentVCId_Atom')). :- dynamic(get_zombiescope_no_check_required/1). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== add_vcg_file_handle(VcgFile_Stream):- assert(get_vcg_file_handle(VcgFile_Stream)), !. add_vcg_seen_vc:- assert(get_vcg_seen_vc), !. add_vcg_trace(TraceId_Atom, VCTrace):- assert(get_vcg_trace(TraceId_Atom, VCTrace)), !. add_vcg_vc(VCId_Atom, Order_Int, Name_Atom, Number_Int, ParentVCTraceId_Atom):- assert(get_vcg_vc(VCId_Atom, Order_Int, Name_Atom, Number_Int, ParentVCTraceId_Atom)), !. add_vcg_hypothesis(Number_Int, Hyp_Term, ParentVCId_Atom):- get_switch_deadpaths(off), assert(get_vcg_hypothesis(Number_Int, Hyp_Term, ParentVCId_Atom)), !. % Loading hyps for ZombieScope. add_vcg_hypothesis(Number_Int, Hyp_Term, ParentVCId_Atom):- get_switch_deadpaths(on), get_switch_hyp_limit(HypLimit_Int), add_dpc_hypothesis(HypLimit_Int, Number_Int, Hyp_Term, ParentVCId_Atom), !. % Always load hyp if the limit is set to 0. add_dpc_hypothesis(0, Number_Int, Hyp_Term, ParentVCId_Atom):- assert(get_vcg_hypothesis(Number_Int, Hyp_Term, ParentVCId_Atom)), !. % Only load if limit has not been exceeded. add_dpc_hypothesis(HypLimit_Int, Number_Int, Hyp_Term, ParentVCId_Atom):- Number_Int =< HypLimit_Int, assert(get_vcg_hypothesis(Number_Int, Hyp_Term, ParentVCId_Atom)), !. % Remove the DPC if the the limit has been exceeded. add_dpc_hypothesis(_HypLimit_Int, _Number_Int, _Hyp_Term, ParentVCId_Atom):- prune_all_vcg_hypothesis, add_vcg_hypothesis(1, 'true', ParentVCId_Atom), add_dpc_hypothesis_x(ParentVCId_Atom), !. add_dpc_hypothesis_x(VCId_Atom):- user:log_fact(zombiescope_exceed_limit, [VCId_Atom]), !. add_dpc_hypothesis_x(VCId_Atom):- retractall(user:log_fact(_, _)), user:assert_log_fact(zombiescope_exceed_limit, [VCId_Atom]), !. add_vcg_conclusion(Number_Int, Conc_Term, ParentVCId_Atom):- assert(get_vcg_conclusion(Number_Int, Conc_Term, ParentVCId_Atom)), !. add_zombiescope_no_check_required(ParentVCId_Atom):- assert(get_zombiescope_no_check_required(ParentVCId_Atom)), !. %=============================================================================== %=============================================================================== % Prune. %=============================================================================== prune_vcg_file_handle:- retractall(get_vcg_file_handle(_VcgFile_Stream)), !. prune_all_vcg_vc:- retractall(get_vcg_vc(_VCId_Atom, _Order_Int, _Name_Atom, _Number_Int, _ParentVCTraceId_Atom)), !. prune_all_vcg_hypothesis:- retractall(get_vcg_hypothesis(_Number_Int, _Hyp_Term, _ParentVCId_Atom)), !. prune_all_vcg_conclusion:- retractall(get_vcg_conclusion(_Number_Int, _Conc_Term, _ParentVCId_Atom)), !. prune_all_zombiescope_no_check_required:- retractall(get_zombiescope_no_check_required(_VCId_Atom)), !. prune_vcg_hypothesis(Number_Int, Hyp_Term, ParentVCId_Atom):- retract(get_vcg_hypothesis(Number_Int, Hyp_Term, ParentVCId_Atom)), !. %=============================================================================== %=============================================================================== % save_data__vcg. %=============================================================================== save_data__vcg:- must_get_datafiles_debug(data__vcg, DebugFile_Atom), write_terms_to_file(DebugFile_Atom, [data__vcg:get_vcg_file_handle/1, data__vcg:get_vcg_seen_vc/0, data__vcg:get_vcg_trace/2, data__vcg:get_vcg_vc/5, data__vcg:get_vcg_hypothesis/3, data__vcg:get_vcg_conclusion/3]), !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/simp.pro0000644000175000017500000013167511753202337016314 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % defines the (large) predicate simplify(Old, New) which attempts to % simplify any expression given to it. %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % simplify(+In, -Out). %------------------------------------------------------------------------------- % Simplify expression In to get Out. %=============================================================================== % Simplify expressions of the form 'not(A)' where possible %========================================================= % NOT1 simplify(not(true),false) :- !. % NOT2 simplify(not(false),true) :- !. % NOT3 simplify(not(not(X)),A) :- simplify(X,A), !. % NOT4 simplify((not X),B) :- simplify(X,A), ( A=true, B=false ; A=false, B=true ; A=(not B) /* i.e. if A is "not Something", return the Something */ ; B=(not A) ), !. % Simplify expressions of the form 'A and B' where possible %========================================================== % AND1 simplify(false and _X,false) :- !. % AND2 simplify(_X and false,false) :- !. % AND3 simplify(X and (not X),false) :- !. % AND4 simplify((not X) and X,false) :- !. % AND5 simplify(true and X,Y) :- simplify(X,Y), !. % AND6 simplify(X and true,Y) :- simplify(X,Y), !. % AND7 simplify(X and X,Y) :- simplify(X,Y), !. % AND8 simplify(X and Y,Z) :- simplify(X,A), ( A=true, simplify(Y,Z) ; A=false, Z=A ; simplify(Y,B), ( ( B=true, Z=A ; B=false, Z=B ) ; ( A=(not B) ; A=(not Y) ; B=(not A) ; B=(not X) ), Z=false ; A=B, Z=A ; Z=(A and B) ) ), !. % Simplify expressions of the form 'A or B' where possible %========================================================= % OR1 simplify(true or _X,true) :- !. % OR2 simplify(_X or true,true) :- !. % OR3 simplify(X or (not X),true) :- !. % OR4 simplify((not X) or X,true) :- !. % OR5 simplify(false or X,Y) :- simplify(X,Y), !. % OR6 simplify(X or false,Y) :- simplify(X,Y), !. % OR7 simplify(X or X,Y) :- simplify(X,Y), !. % OR8 simplify(X or Y,Z) :- simplify(X,A), ( A=true, Z=A ; A=false, simplify(Y,Z) ; simplify(Y,B), ( B=true, Z=B ; B=false, Z=A ; ( A=(not B) ; A=(not Y) ; B=(not A) ; B=(not X) ), Z=true ; A=B, Z=A ; Z=(A or B) ) ), !. % Simplify expressions of the form 'A -> B' where possible %========================================================= % IMP1 simplify(false->_X,true) :- !. % IMP2 simplify(_X->true,true) :- !. % IMP3 simplify(X->X,true) :- !. % IMP4 simplify(true->X,Y) :- simplify(X,Y), !. % IMP5 simplify(X->false,A) :- simplify((not X),A), !. % IMP6 simplify(X->(not X),Y) :- simplify((not X),Y), !. % IMP7 simplify((not X)->X,Y) :- simplify(X,Y), !. % IMP8 simplify(X -> (Y -> Z),A) :- simplify((X and Y) -> Z,A), !. % IMP9 simplify(X->Y,Z) :- simplify(X,A), ( A=true, simplify(Y,Z) ; A=false, Z=true ; simplify(Y,B), ( B=true, Z=B ; B=false, simplify((not X),Z) ; ( A=(not B) ; A=(not Y) ; B=(not A) ; B=(not X) ), Z=B ; A=B, Z=true ; Z=(A->B) ) ), !. % Simplify expressions of the form 'A <-> B' where possible %========================================================== % EQV1 simplify(X<->X,true) :- !. % EQV2 simplify(X<->(not X),false) :- !. % EQV3 simplify((not X)<->X,false) :- !. % EQV4 simplify(X<->true,Y) :- simplify(X,Y), !. % EQV5 simplify(true<->X,Y) :- simplify(X,Y), !. % EQV6 simplify(X<->false,Y) :- simplify((not X),Y), !. % EQV7 simplify(false<->X,Y) :- simplify((not X),Y), !. % EQV8 simplify(X <-> (Y <-> Z),B) :- simplify(X <-> Y,A), simplify(A <-> Z,B), !. % EQV9 simplify(X <-> Y <-> Z,B) :- simplify(Y <-> Z,A), (Y <-> Z)\=A, simplify(X <-> A,B), !. % EQV10 simplify(X <-> Y <-> Z,B) :- simplify(X <-> Z,A), (X <-> Z)\=A, simplify(A <-> Y,B), !. % EQV11 simplify(X<->Y,Z) :- simplify(X,A), ( A=true, simplify(Y,Z) ; A=false, simplify((not Y),Z) ; simplify(Y,B), ( B=true, Z=A ; B=false, simplify((not X),Z) ; ( A=(not B) ; A=(not Y) ; B=(not A) ; B=(not X) ), Z=false ; A=B, Z=true ; Z=(A<->B) ) ), !. % Simplify 'for_all( )' exprs. where possible %============================================ % ALL3 simplify(for_all(V:T,X),NEW) :- find_core_type(T, CT), ( var_const(V, CT, _), STATE = dont_retract ; asserta(var_const(V, CT, temp)), STATE = retract ), simplify(X,Y), ( Y = true, NEW = true ; Y = false, NEW = false ; uq_normalise(V, Y, NEWY), NEW = (for_all(V:T, NEWY)) ), !, ( STATE = dont_retract ; retract(var_const(V, CT, temp)) ), !. % Simplify 'for_some( )' exprs. where possible %============================================= % EXI3 simplify(for_some(V:T,X),NEW) :- find_core_type(T, CT), ( var_const(V, CT, _), STATE = dont_retract ; asserta(var_const(V, CT, temp)), STATE = retract ), simplify(X,Y), ( Y = true, NEW = true ; Y = false, NEW = false ; NEW = (for_some(V:T, Y)) ), !, ( STATE = dont_retract ; retract(var_const(V, CT, temp)) ), !. % Simplify set-type expressions where possible %============================================= % SET1 simplify(A \/ B, S) :- !, set_simplify(A \/ B, S), !. % SET2 simplify(A \ B, S) :- !, set_simplify(A \ B, S), !. % SET3 simplify(A /\ B, S) :- !, set_simplify(A /\ B, S), !. % SET4 simplify((set A), S) :- !, set_simplify((set A), S), !. % SET5 simplify(A subset_of B, S) :- !, set_simplify(A subset_of B, S), !. % SET6 simplify(A strict_subset_of B, S) :- !, set_simplify(A strict_subset_of B, S), !. % SET7 simplify(A in B, S) :- !, set_simplify(A in B, S), !. % SET8 simplify(A not_in B, S) :- !, set_simplify(A not_in B, S), !. % Simplify atomic formulae where possible %======================================== % REL1 simplify(X=Y,Z) :- checktype(X,T), ( type(T, set(_)), !, set_simplify(X=Y,Z) ; type(T,enumerated), !, enumerated_simplify(X=Y,Z) ; simplify(X,A), simplify(Y,B), ( A=B, Z=true ; base_rational(A), ( base_rational(B), A\=B, Z=false ; B=P+Q, ( base_rational(P), simplify(A-P,R), Z=(Q=R) ; base_rational(Q), simplify(A-Q,R), Z=(P=R) ) ; B=P-Q, ( base_rational(P), simplify(P-A,R), Z=(Q=R) ; base_rational(Q), simplify(A+Q,R), Z=(P=R) ) ) ; base_rational(B), ( A=P+Q, ( base_rational(P), simplify(B-P,R), Z=(Q=R) ; base_rational(Q), simplify(B-Q,R), Z=(P=R) ) ; A=P-Q, ( base_rational(P), simplify(P-B,R), Z=(Q=R) ; base_rational(Q), simplify(B+Q,R), Z=(P=R) ) ) ; /* handle A,B boolean */ A=true, Z=B ; B=true, Z=A ; A=false, simplify(not B, Z) ; B=false, simplify(not A, Z) ; T=boolean, Z=(A<->B) ; Z=(A=B) ) ), !. % REL2 simplify(X>Y,Z) :- checktype(X,T), ( type(T,enumerated), !, enumerated_simplify(X>Y,Z) ; simplify(X,A), simplify(Y,B), ( base_rational(A), ( base_rational(B), ( B less_than A, Z=true ; ( A=B ; A less_than B ), Z=false ) ; B=P+Q, ( base_rational(P), simplify(A-P,R), Z=(QR) ; base_rational(Q), simplify(A+Q,R), Z=(PR) ; base_rational(Q), simplify(B-Q,R), Z=(P>R) ) ; A=P-Q, ( base_rational(P), simplify(P-B,R), Z=(QR) ) ) ; Z=(A>B) ) ), !. % REL3 simplify(XR) ; base_rational(Q), simplify(A-Q,R), Z=(P>R) ) ; B=P-Q, ( base_rational(P), simplify(P-A,R), Z=(QR) ) ) ; base_rational(B), ( A=P+Q, ( base_rational(P), simplify(B-P,R), Z=(QR) ; base_rational(Q), simplify(B+Q,R), Z=(PY,Z) :- checktype(X,T), ( type(T, set(_)), !, set_simplify(X<>Y,Z) ; type(T,enumerated), !, enumerated_simplify(X<>Y,Z) ; simplify(X,A), simplify(Y,B), ( A=B, Z=false ; base_rational(A), ( base_rational(B), ( A\=B, Z=true ; A=B, Z=true ) ; B=P+Q, ( base_rational(P), simplify(A-P,R), Z=(Q<>R) ; base_rational(Q), simplify(A-Q,R), Z=(P<>R) ) ; B=P-Q, ( base_rational(P), simplify(P-A,R), Z=(Q<>R) ; base_rational(Q), simplify(A+Q,R), Z=(P<>R) ) ) ; base_rational(B), ( A=P+Q, ( base_rational(P), simplify(B-P,R), Z=(Q<>R) ; base_rational(Q), simplify(B-Q,R), Z=(P<>R) ) ; A=P-Q, ( base_rational(P), simplify(P-B,R), Z=(Q<>R) ; base_rational(Q), simplify(B+Q,R), Z=(P<>R) ) ) ; /* handle A,B boolean */ A=true, simplify(not B, Z) ; B=true, simplify(not A, Z) ; A=false, Z=B ; B=false, Z=A ; T=boolean, Z=(not (A<->B)) ; Z=(A<>B) ) ), !. % REL5 simplify(X<=Y,Z) :- checktype(X,T), ( type(T,enumerated), !, enumerated_simplify(X<=Y,Z) ; simplify(X,A), simplify(Y,B), ( A=B, Z=true ; base_rational(A), ( base_rational(B), ( A less_than B, Z=true ; B less_than A, Z=false ) ; B=P+Q, ( base_rational(P), simplify(A-P,R), Z=(R<=Q) ; base_rational(Q), simplify(A-Q,R), Z=(R<=P) ) ; B=P-Q, ( base_rational(P), simplify(P-A,R), Z=(Q<=R) ; base_rational(Q), simplify(A+Q,R), Z=(R<=P) ) ) ; base_rational(B), ( A=P+Q, ( base_rational(P), simplify(B-P,R), Z=(Q<=R) ; base_rational(Q), simplify(B-Q,R), Z=(P<=R) ) ; A=P-Q, ( base_rational(P), simplify(P-B,R), Z=(R<=Q) ; base_rational(Q), simplify(B+Q,R), Z=(P<=R) ) ) ; Z=(A<=B) ) ), !. % REL6 simplify(X>=Y,Z) :- checktype(X,T), ( type(T,enumerated), !, enumerated_simplify(X>=Y,Z) ; simplify(X,A), simplify(Y,B), ( A=B, Z=true ; base_rational(A), ( base_rational(B), ( B less_than A, Z=true ; A less_than B, Z=false ) ; B=P+Q, ( base_rational(P), simplify(A-P,R), Z=(Q<=R) ; base_rational(Q), simplify(A-Q,R), Z=(P<=R) ) ; B=P-Q, ( base_rational(P), simplify(P-A,R), Z=(Q>=R) ; base_rational(Q), simplify(A+Q,R), Z=(P<=R) ) ) ; base_rational(B), ( A=P+Q, ( base_rational(P), simplify(B-P,R), Z=(Q>=R) ; base_rational(Q), simplify(B-Q,R), Z=(P>=R) ) ; A=P-Q, ( base_rational(P), simplify(P-B,R), Z=(Q<=R) ; base_rational(Q), simplify(B+Q,R), Z=(P>=R) ) ) ; Z=(A>=B) ) ), !. % ODD simplify(odd(X), ODD) :- simplify(X, NEWX), ( signed_integer(NEWX), X1 is NEWX*NEWX, ( X2 iss (X1 div 2)*2, X1 =:= X2, ODD=false ; X2 iss (X1 div 2)*2, X1 =\= X2, ODD=true ) ; NEWX=A*B, simplify(odd(A), TFA), simplify(odd(B), TFB), ( ( TFA = false ; TFB = false ), ODD = false ; TFA = true, ODD = TFB ; TFB = true, ODD = TFA ) ; ( NEWX=A+B ; NEWX=A-B ), simplify(odd(A), TFA), simplify(odd(B), TFB), ( TFA = true, ( TFB = true, ODD = false ; TFB = false, ODD = true ; ODD = (not TFB) ) ; TFA = false, ODD = TFB ; TFB = true, ODD = (not TFA) ; TFB = false, ODD = TFA ) ; NEWX = (-A), simplify(odd(A), ODD) ; ODD = odd(NEWX) ), !. % SQR1 simplify(sqr(abs(X)), SQR) :- simplify(X*X, SQR), !. % SQR2 simplify(sqr(X), SQR) :- simplify(X*X, SQR), !. % ABS simplify(abs(X), ABS) :- simplify(X, NEWX), ( signed_integer(NEWX), ( NEWX >= 0, ABS = NEWX ; NEWX < 0, ABS is -NEWX ) ; NEWX = A*A, ABS = NEWX ; NEWX = A*B, simplify(abs(A), ABSA), simplify(abs(B), ABSB), ABS = ABSA*ABSB ; NEWX = abs(_EXPR), ABS = NEWX ; ABS = abs(NEWX) ), !. % Simplify bitwise functions (which support SPARK modular types) where possible %============================================================================== % BIT__AND simplify(bit__and(X, Y), NEW) :- simplify(X, NEWX), simplify(Y, NEWY), ( NEWX = 0, /* 0 /\ Y ==> 0 */ NEW = 0 ; NEWY = 0, /* X /\ 0 ==> 0 */ NEW = 0 ; NEWX = NEWY, /* X /\ X ==> X, provided X >= 0 */ infer(NEWX >= 0), NEW = NEWX ; signed_integer(NEWY), /* X /\ 2**N-1 ==> ... */ POW2 iss NEWY+1, is_a_power_of_2(POW2), /* so Y = 2**N-1 */ infer(NEWX >= 0), ( infer(NEWX <= NEWY), /* ... ==> X, provided 0 <= X <= 2**N-1 */ NEW = NEWX ; signed_integer(NEWX), /* ... ==> X mod 2**N, provided X >= 0 */ NEW iss NEWX mod POW2 ; NEW = NEWX mod POW2 ) ; signed_integer(NEWX), /* 2**N-1 /\ Y ==> ... */ POW2 iss NEWX+1, is_a_power_of_2(POW2), /* so X = 2**N-1 */ infer(NEWY >= 0), ( infer(NEWY <= NEWX), /* ... ==> Y, provided 0 <= Y <= 2**N-1 */ NEW = NEWY ; signed_integer(NEWY), /* ... ==> Y mod 2**N, provided Y >= 0 */ NEW iss NEWY mod POW2 ; NEW = NEWY mod POW2 ) ; ( NEWY = POW2 - NEWX - 1, /* X /\ 2**N-X-1 ==> 0, provided 0 <= X <= 2**N-1*/ signed_integer(POW2), P2M1 iss POW2 - 1 ; NEWY = P2M1 - NEWX, /* X /\ 2**N-1-X ==> 0, provided 0 <= X <= 2**N-1 */ signed_integer(P2M1), POW2 iss P2M1 + 1 ), is_a_power_of_2(POW2), /* i.e., and of X with its negation */ infer(NEWX >= 0), infer(NEWX <= P2M1), NEW = 0 ; ( NEWX = POW2 - NEWY - 1, /* 2**N-Y-1 /\ Y ==> 0, provided 0 <= Y <= 2**N-1 */ signed_integer(POW2), P2M1 iss POW2 - 1 ; NEWX = P2M1 - NEWY, /* 2**N-1-Y /\ Y ==> 0, provided 0 <= Y <= 2**N-1 */ signed_integer(P2M1), POW2 iss P2M1 + 1 ), is_a_power_of_2(POW2), /* i.e., and of Y with its negation */ infer(NEWY >= 0), infer(NEWY <= P2M1), NEW = 0 ; ( NEWY = V * POW2 ; NEWY = POW2 * V ), signed_integer(POW2), /* X /\ Y * 2**N ==> 0, provided 0 <= X < 2**N */ is_a_power_of_2(POW2), infer(NEWX >= 0), infer(V >= 0), infer(NEWX < POW2), NEW = 0 ; ( NEWX = V * POW2 ; NEWX = POW2 * V ), signed_integer(POW2), /* X * 2**N /\ Y ==> 0, provided 0 <= Y < 2**N */ is_a_power_of_2(POW2), infer(NEWY >= 0), infer(V >= 0), infer(NEWY < POW2), NEW = 0 ; signed_integer(NEWX), /* evaluate, if both arguments are integers */ signed_integer(NEWY), NEWX >= 0, NEWY >= 0, evaluate_bit_and(NEWX, NEWY, NEW) ; NEW = bit__and(NEWX, NEWY) /* otherwise */ ), !. % BIT__OR simplify(bit__or(X, Y), NEW) :- simplify(X, NEWX), simplify(Y, NEWY), ( NEWX = 0, /* 0 \/ Y ==> Y, provided Y >= 0 */ infer(NEWY >= 0), NEW = NEWY ; NEWY = 0, /* X \/ 0 ==> X, provided X >= 0 */ infer(NEWX >= 0), NEW = NEWX ; NEWX = NEWY, /* X \/ X ==> X, provided X >= 0 */ infer(NEWX >= 0), NEW = NEWX ; ( NEWY = POW2 - NEWX - 1, /* X \/ 2**N-X-1 ==> 2**N-1, provided 0 <= X <= 2**N-1*/ signed_integer(POW2), P2M1 iss POW2 - 1 ; NEWY = P2M1 - NEWX, /* X /\ 2**N-1-X ==> 2**N-1, provided 0 <= X <= 2**N-1 */ signed_integer(P2M1), POW2 iss P2M1 + 1 ), is_a_power_of_2(POW2), /* i.e., or of X with its negation */ infer(NEWX >= 0), infer(NEWX <= P2M1), NEW = P2M1 ; ( NEWX = POW2 - NEWY - 1, /* 2**N-Y-1 \/ Y ==> 2**N-1, provided 0 <= Y <= 2**N-1 */ signed_integer(POW2), P2M1 iss POW2 - 1 ; NEWX = P2M1 - NEWY, /* 2**N-1-Y \/ Y ==> 2**N-1, provided 0 <= Y <= 2**N-1 */ signed_integer(P2M1), POW2 iss P2M1 + 1 ), is_a_power_of_2(POW2), /* i.e., or of Y with its negation */ infer(NEWY >= 0), infer(NEWY <= P2M1), NEW = P2M1 ; ( NEWY = V * POW2 ; NEWY = POW2 * V ), signed_integer(POW2), /* X \/ Y*2**N ==> X+Y*2**N, provided 0 <= X < 2**N */ is_a_power_of_2(POW2), infer(NEWX >= 0), infer(V >= 0), infer(NEWX < POW2), simplify(NEWX + POW2 * NEWY, NEW) ; ( NEWX = V * POW2 ; NEWX = POW2 * V ), signed_integer(POW2), /* X*2**N \/ Y ==> Y+X*2**N, provided 0 <= Y < 2**N */ is_a_power_of_2(POW2), infer(NEWY >= 0), infer(V >= 0), infer(NEWY < POW2), simplify(NEWY + POW2 * NEWX, NEW) ; signed_integer(NEWX), /* evaluate, if both arguments are integers */ signed_integer(NEWY), NEWX >= 0, NEWY >= 0, evaluate_bit_or(NEWX, NEWY, NEW) ; NEW = bit__or(NEWX, NEWY) /* otherwise */ ), !. % BIT__XOR simplify(bit__xor(X, Y), NEW) :- simplify(X, NEWX), simplify(Y, NEWY), ( NEWX = 0, /* 0 xor Y ==> Y, provided Y >= 0 */ infer(NEWY >= 0), NEW = NEWY ; NEWY = 0, /* X xor 0 ==> X, provided X >= 0 */ infer(NEWX >= 0), NEW = NEWX ; NEWX = NEWY, /* X xor X ==> 0, provided X >= 0 */ infer(NEWX >= 0), NEW = 0 ; ( NEWY = POW2 - NEWX - 1, /* X xor 2**N-X-1 ==> 2**N-1, provided 0 <= X <= 2**N-1*/ signed_integer(POW2), P2M1 iss POW2 - 1 ; NEWY = P2M1 - NEWX, /* X xor 2**N-1-X ==> 2**N-1, provided 0 <= X <= 2**N-1 */ signed_integer(P2M1), POW2 iss P2M1 + 1 ), is_a_power_of_2(POW2), /* i.e., xor of X with its negation */ infer(NEWX >= 0), infer(NEWX <= P2M1), NEW = P2M1 ; ( NEWX = POW2 - NEWY - 1, /* 2**N-Y-1 xor Y ==> 2**N-1, provided 0 <= Y <= 2**N-1 */ signed_integer(POW2), P2M1 iss POW2 - 1 ; NEWX = P2M1 - NEWY, /* 2**N-1-Y xor Y ==> 2**N-1, provided 0 <= Y <= 2**N-1 */ signed_integer(P2M1), POW2 iss P2M1 + 1 ), is_a_power_of_2(POW2), /* i.e., xor of Y with its negation */ infer(NEWY >= 0), infer(NEWY <= P2M1), NEW = P2M1 ; ( NEWY = V * POW2 ; NEWY = POW2 * V ), signed_integer(POW2), /* X xor Y*2**N ==> X+Y*2**N, provided 0 <= X < 2**N */ is_a_power_of_2(POW2), infer(NEWX >= 0), infer(V >= 0), infer(NEWX < POW2), simplify(NEWX + POW2 * NEWY, NEW) ; ( NEWX = V * POW2 ; NEWX = POW2 * V ), signed_integer(POW2), /* X*2**N xor Y ==> Y+X*2**N, provided 0 <= Y < 2**N */ is_a_power_of_2(POW2), infer(NEWY >= 0), infer(V >= 0), infer(NEWY < POW2), simplify(NEWY + POW2 * NEWX, NEW) ; signed_integer(NEWX), /* evaluate, if both arguments are integers */ signed_integer(NEWY), NEWX >= 0, NEWY >= 0, evaluate_bit_xor(NEWX, NEWY, NEW) ; NEW = bit__xor(NEWX, NEWY) /* otherwise */ ), !. % Simplify array, record & sequence type objects if possible %=========================================================== % ARR simplify(X, Y) :- array_simplify(X, Y), !. % REC simplify(X, Y) :- record_simplify(X, Y), !. % SEQ simplify(X, Y) :- sequence_simplify(X, Y), !. % ENU1 simplify(succ(X), Y) :- enumerated_simplify(succ(X), Y), !. % ENU2 simplify(pred(X), Y) :- enumerated_simplify(pred(X), Y), !. % Final catch-all %================ % EVAL simplify(X,Y) :- evaluate(X,Y), !. %=============================================================================== % evaluate(+In, -Out). %------------------------------------------------------------------------------- % Evaluate (non-boolean) In to get Out. %=============================================================================== % EVAL_BASE1 evaluate(X,X) :- ( base_rational(X) ; X=true ; X=false ), !. % EVAL_BASE2 evaluate(X,Y) :- integer(X), X<0, Y iss X, !. % EVAL_BASE3 evaluate(X,Y) :- rational_expression(X), evaluate_rational_expression(X, Y), !. % UMIN1 evaluate(-(-X),A) :- simplify(X,A), !. % UMIN2 evaluate(-X,A) :- simplify(X,B), ( base_rational(B), evaluate_rational_expression(-B, A) ; A=(-B) ), !. % UPLUS evaluate(+X, A) :- simplify(X, A), !. %SPECIAL evaluate(X+N-N,Y) :- simplify(X,Y), !. evaluate(X-N+N,Y) :- simplify(X,Y), !. % PLUS (Special Case) % Where we see a repeated increment in a VC, such as a + 1 + 1 This gets % parsed as (a + 1) + 1, but we would prefer to evaluate and simplify this % as a + (1 + 1) and so on by collecting and evaluating the like terms on % the right. This case can crop up from VCs generated from SPARK code that % has repeated increments on a single path, such as: % A := A + 1; % S; % A := A + 1; % and so on... evaluate((A + B) + C, D) :- integer(B), /* If B and C are both integer literals */ integer(C), E iss B + C, simplify(A + E, D). % PLUS (General Case) evaluate(X+Y,Z) :- simplify(X,A), simplify(Y,B), ( base_rational(A), base_rational(B), evaluate_rational_expression(A+B, Z) ; A=0, Z=B ; B=0, Z=A ; Z=A+B ), !. % MINUS evaluate(X-Y,Z) :- simplify(X,A), simplify(Y,B), ( base_rational(A), base_rational(B), evaluate_rational_expression(A-B, Z) ; B=0, Z=A ; A=0, simplify(-Y,Z) ; A=B, Z=0 ; Z=A-B ), !. % MULT evaluate(X*Y,Z) :- simplify(X,A), simplify(Y,B), ( base_rational(A), base_rational(B), evaluate_rational_expression(A*B, Z) ; ( A=0 ; B=0 ), Z=0 ; A=1, Z=B ; B=1, Z=A ; Z=A*B ), !. % DIV evaluate(X div Y,Z) :- simplify(X,A), simplify(Y,B), ( signed_integer(A), signed_integer(B), B\=0, Z iss A div B ; B=1, Z=A ; Z=A div B ), !. % / evaluate(X / Y,Z) :- simplify(X,A), simplify(Y,B), ( base_rational(A), base_rational(B), B\=0, evaluate_rational_expression(A/B, Z) ; B=1, Z=A ; Z=A / B ), !. % MOD evaluate(X mod Y, Z) :- simplify(X,A), simplify(Y,B), ( signed_integer(A), signed_integer(B), B\=0, Z iss A mod B ; B=1, Z=0 ; ( A = K*B ; A = B*K ), Z=0 ; ( A = K*C ; A = C*K ), signed_integer(K), simplify(K mod B = 0, true), Z = 0 ; Z=(A mod B) ), !. % EXP evaluate(X**Y,Z) :- simplify(X,A), simplify(Y,B), ( base_rational(A), signed_integer(B), evaluate_rational_expression(A**B, Z) ; B=0, Z=1 ; B=1, Z=A ; B=2, Z=A*A ; Z=A**B ), !. % EVAL_VAL evaluate(X,Y) :- val(X,Y), !. % FUNC_SPLIT evaluate(X,Z) :- (\+ atomic(X)), X=..[H|T], eval_list(T,U), Z=..[H|U], !. %=============================================================================== % eval_list(+InList, -OutList). %------------------------------------------------------------------------------- % Simplify each element of InList to get OutList. %=============================================================================== % EVL1 eval_list([],[]) :- !. % EVL2 eval_list([H1|T1],[H2|T2]) :- simplify(H1,H2), eval_list(T1,T2), !. %=============================================================================== % val(+In, -Out). %------------------------------------------------------------------------------- % Hook for additional simplification rules. %=============================================================================== val(X, X) :- atomic(X), !. %=============================================================================== % signed_integer(+I). %------------------------------------------------------------------------------- % Succeeds if is I an integer (with an optional -) %=============================================================================== signed_integer(I) :- ( integer(I), I>=0 ; I=(-I1), integer(I1), I1>0 ). %=============================================================================== % less_than(+X, +Y). %------------------------------------------------------------------------------- % Compare two signed_integers or rationals. %=============================================================================== -X less_than -Y :- integer(X), integer(Y), !, X>0, Y>0, Y less_than X. -X less_than Y :- integer(X), integer(Y), !, X>0, Y>=0, !. X less_than Y :- integer(X), integer(Y), !, X>=0, Y>X. X less_than Y :- base_rational(X), base_rational(Y), split_rational(X, Xn, Xd), split_rational(Y, Yn, Yd), Xn * Yd < Yn * Xd. %=============================================================================== % is_a_power_of_2(+X). %------------------------------------------------------------------------------- % Succeeds if X is a power of 2 (2, 4, 8, 16, 32, etc.). Fails otherwise. %=============================================================================== is_a_power_of_2( 2). /* 2**1 */ is_a_power_of_2( 4). /* 2**2 */ is_a_power_of_2( 8). /* 2**3 */ is_a_power_of_2( 16). /* 2**4 */ is_a_power_of_2( 32). /* 2**5 */ is_a_power_of_2( 64). /* 2**6 */ is_a_power_of_2( 128). /* 2**7 */ is_a_power_of_2( 256). /* 2**8 */ is_a_power_of_2( 512). /* 2**9 */ is_a_power_of_2( 1024). /* 2**10 */ is_a_power_of_2( 2048). /* 2**11 */ is_a_power_of_2( 4096). /* 2**12 */ is_a_power_of_2( 8192). /* 2**13 */ is_a_power_of_2( 16384). /* 2**14 */ is_a_power_of_2( 32768). /* 2**15 */ is_a_power_of_2( 65536). /* 2**16 */ is_a_power_of_2( 131072). /* 2**17 */ is_a_power_of_2( 262144). /* 2**18 */ is_a_power_of_2( 524288). /* 2**19 */ is_a_power_of_2( 1048576). /* 2**20 */ is_a_power_of_2( 2097152). /* 2**21 */ is_a_power_of_2( 4194304). /* 2**22 */ is_a_power_of_2( 8388608). /* 2**23 */ is_a_power_of_2( 16777216). /* 2**24 */ is_a_power_of_2( 33554432). /* 2**25 */ is_a_power_of_2( 67108864). /* 2**26 */ is_a_power_of_2( 134217728). /* 2**27 */ is_a_power_of_2( 268435456). /* 2**28 */ is_a_power_of_2( 536870912). /* 2**29 */ is_a_power_of_2(1073741824). /* 2**30 */ is_a_power_of_2(2147483648). /* 2**31 */ is_a_power_of_2(4294967296). /* 2**32 */ /* [I805-018] identified a limitation in generating powers of two */ /* above 2**32, so we now add ground entries here for 2**33 thru 2**64 */ is_a_power_of_2( 8589934592). /* 2**33 */ is_a_power_of_2( 17179869184). /* 2**34 */ is_a_power_of_2( 34359738368). /* 2**35 */ is_a_power_of_2( 68719476736). /* 2**36 */ is_a_power_of_2( 137438953472). /* 2**37 */ is_a_power_of_2( 274877906944). /* 2**38 */ is_a_power_of_2( 549755813888). /* 2**39 */ is_a_power_of_2( 1099511627776). /* 2**40 */ is_a_power_of_2( 2199023255552). /* 2**41 */ is_a_power_of_2( 4398046511104). /* 2**42 */ is_a_power_of_2( 8796093022208). /* 2**43 */ is_a_power_of_2( 17592186044416). /* 2**44 */ is_a_power_of_2( 35184372088832). /* 2**45 */ is_a_power_of_2( 70368744177664). /* 2**46 */ is_a_power_of_2( 140737488355328). /* 2**47 */ is_a_power_of_2( 281474976710656). /* 2**48 */ is_a_power_of_2( 562949953421312). /* 2**49 */ is_a_power_of_2( 1125899906842624). /* 2**50 */ is_a_power_of_2( 2251799813685248). /* 2**51 */ is_a_power_of_2( 4503599627370496). /* 2**52 */ is_a_power_of_2( 9007199254740992). /* 2**53 */ is_a_power_of_2( 18014398509481984). /* 2**54 */ is_a_power_of_2( 36028797018963968). /* 2**55 */ is_a_power_of_2( 72057594037927936). /* 2**56 */ is_a_power_of_2( 144115188075855872). /* 2**57 */ is_a_power_of_2( 288230376151711744). /* 2**58 */ is_a_power_of_2( 576460752303423488). /* 2**59 */ is_a_power_of_2( 1152921504606846976). /* 2**60 */ is_a_power_of_2( 2305843009213693952). /* 2**61 */ is_a_power_of_2( 4611686018427387904). /* 2**62 */ is_a_power_of_2( 9223372036854775808). /* 2**63 */ is_a_power_of_2(18446744073709551616). /* 2**64 */ is_a_power_of_2(X) :- \+ integer(X), !, fail. /* trap these */ is_a_power_of_2(X) :- X > 4294967296, /* 2**32 */ !, Y iss X div 4294967296, X iss Y * 4294967296, /* so it's an exact multiple of 2**32 */ !, is_a_power_of_2(Y). %=============================================================================== % next_two_to_N_minus_1_above(X, P2M1). %------------------------------------------------------------------------------- % Fetch next 2**N-1 above integer X. %=============================================================================== next_two_to_N_minus_1_above(X, _) :- /* used in DEDUCTION module */ \+ integer(X), !, fail. next_two_to_N_minus_1_above(X, P2M1) :- is_a_power_of_2(POW2), POW2 > X, !, P2M1 is POW2 - 1. %=============================================================================== % evaluate_bit_and(+X, +Y, -Result). %------------------------------------------------------------------------------- % Evaluate 'and' of two integers. %=============================================================================== evaluate_bit_and(NEWX, NEWY, NEW) :- form_bit_string(NEWX, XList), form_bit_string(NEWY, YList), and_bit_strings(XList, YList, ZList), form_number(ZList, NEW). %------------------------------------------------------------------------------- and_bit_strings([0|XL], [_|YL], [0|Rest]) :- !, and_bit_strings(XL, YL, Rest). and_bit_strings([1|XL], [Y|YL], [Y|Rest]) :- !, and_bit_strings(XL, YL, Rest). and_bit_strings(_XL, [], []) :- !. /* no more digits in one string or the other */ and_bit_strings([], _YL, []) :- !. %=============================================================================== % evaluate_bit_or(+X, +Y, -Result). %------------------------------------------------------------------------------- % Evaluate 'or' of two integers. %=============================================================================== evaluate_bit_or(NEWX, NEWY, NEW) :- evaluate_bit_and(NEWX, NEWY, BIT_AND), !, NEW iss NEWX + NEWY - BIT_AND. %=============================================================================== % evaluate_bit_xor(+X, +Y, -Result). %------------------------------------------------------------------------------- % Evaluate 'xor' of two integers. %=============================================================================== evaluate_bit_xor(NEWX, NEWY, NEW) :- evaluate_bit_and(NEWX, NEWY, BIT_AND), !, NEW iss NEWX + NEWY - 2 * BIT_AND. %=============================================================================== % form_bit_string(+N, -L). %------------------------------------------------------------------------------- % Turn number N into a (reverse order) list, e.g. 6 ==> [0, 1, 1] etc. %=============================================================================== form_bit_string(0, [0]) :- !. form_bit_string(1, [1]) :- !. form_bit_string(N, [LSbit|Rest]) :- LSbit iss N mod 2, NewN iss N div 2, !, form_bit_string(NewN, Rest). %=============================================================================== % form_number(+L, -N). %------------------------------------------------------------------------------- % Turn a bit string back into a number. %=============================================================================== form_number(List, Result) :- form_number_from_base(1, List, Result). %------------------------------------------------------------------------------- form_number_from_base(N, [B], Result) :- !, Result is N * B. form_number_from_base(N, [LSbit|Rest], Result) :- NewBase is N * 2, form_number_from_base(NewBase, Rest, MSresult), !, Result is MSresult + N * LSbit. form_number_from_base(_N, [], 0) :- !. /* catchall */ %=============================================================================== % uq_normalise(+I, +Statements, -NewStatements). %------------------------------------------------------------------------------- % Normalises the form of a universally quantified hypothesis such that all % parts are expressed in terms of <=. % % Also where there is a lower and upper bound to variable, order those % inequalities lower-upper. i.e. LOWER <= X and X <= UPPER %=============================================================================== uq_normalise(I, Statements, NewStatements):- statement_norm(I, Statements, NewStatements). %------------------------------------------------------------------------------- statement_norm(I, A -> B, NEWA -> NEWB):- statement_norm(I, A, NEWA), statement_norm(I, B, NEWB). statement_norm(I, A and B, NEWA and NEWB):- statement_norm(I, A, A2), statement_norm(I, B, B2), lower_upper(I, A2, B2, NEWA, NEWB). statement_norm(I, A or B, NEWA or NEWB) :- statement_norm(I, A, NEWA), statement_norm(I, B, NEWB). statement_norm(I, A >= B, NEWB <= NEWA):- statement_norm(I, A, NEWA), statement_norm(I, B, NEWB). statement_norm(_I, X, X):- !. %------------------------------------------------------------------------------- lower_upper(I, I <= UPPER, LOWER <= I, LOWER <= I, I <= UPPER). lower_upper(I, LOWER <= I, I <= UPPER, LOWER <= I, I <= UPPER). lower_upper(_I, A, B, A, B). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/main_simplifier.pro0000644000175000017500000002161611753202337020504 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Loads the common core of modules as required by the Simplifier. Note that % this code does not reside inside a module. It is included by one of the % top level build files. %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % LOAD MODULES %=============================================================================== :- use_module('data__switches.pro'). :- use_module('data__data_files.pro'). :- use_module('data__system.pro'). :- use_module('data__rules.pro'). :- use_module('data__declarations.pro'). :- use_module('data__vcg.pro'). :- use_module('data__pfs.pro'). :- use_module('data__provenance.pro'). :- use_module('data__hyp.pro'). :- use_module('data__conc.pro'). :- use_module('data__proved_conc.pro'). :- use_module('data__subst_hyp.pro'). :- use_module('ioutilities.pro'). :- use_module('newutilities.pro'). :- use_module('load__switches.pro'). :- use_module('load__data_files.pro'). :- use_module('load__rules.pro'). :- use_module('load__system.pro'). :- use_module('load__declarations.pro'). :- use_module('load__provenance.pro'). :- use_module('load__vcg_dpc.pro'). :- use_module('load__pfs.pro'). :- use_module('processrules.pro'). :- use_module('simplifier_ioutilities.pro'). :- use_module('../common/versioning/version.pro'). %=============================================================================== % INCLUDE CODE %=============================================================================== :- use_module('librarypredicates.pro', [file_exists/1, close_all_streams/0, rename_file/2, last/2, list_to_set/2, reverse/2, process_create/3, process_wait/2, datime/1]). :- include('utilities.pro'). :- include('dynamics.pro'). :- include('portray.pro'). :- include('deduction.pro'). :- include('newded.pro'). :- include('inferenc2.pro'). :- include('schedulesimplification.pro'). :- include('records2.pro'). :- include('simp.pro'). :- include('simpvc.pro'). :- include('quant.pro'). :- include('standard.pro'). :- include('typecheck5.pro'). :- include('userules.pro'). :- include('aritheval.pro'). :- include('writevc.pro'). :- include('makelog.pro'). %=============================================================================== % simplifier_main. %------------------------------------------------------------------------------- % This predicate sequences all of the actions of the Simplifier. %=============================================================================== simplifier_main :- %Initialise system information %---------------------------- load_system, %Load the switches provided by the user %-------------------------------------- load_switches, %Process help or version request %------------------------------- handle_information_request, %Load the input files %-------------------- load_data_files, %Begin system operation %---------------------- %Display the header to the screen. display_header(user_output), %Load the provenance for this proof problem. load_provenance, %Load the declarations for this proof problem. load_declarations, %Load available rules. load_rules, presimplify, format('\n\n\n', []), process_action, format('\n', []), halt, !. %------------------------------------------------------------------------------- simplifier_main :- format('\n\n', []), throw_error('The Simplifier failed.\n', []). %------------------------------------------------------------------------------- presimplify:- retrieve_proof_file(ProofFile_Atom), convert_file_for_display(ProofFile_Atom, DisplayProofFile_Atom), write('Processing '), write(DisplayProofFile_Atom), write(' ...'), nl, do_rule_substitutions0, write_log_file_banner, write_rules_read, % First load the vcs. new_load_vcg, retrieve_simplified_proof_file(SimplifiedProofFile_Atom), open(SimplifiedProofFile_Atom, write, SimplifiedProofFile_Stream), display_banner(SimplifiedProofFile_Stream), % Then simplify the vcs. perform_simplification(SimplifiedProofFile_Stream), close(SimplifiedProofFile_Stream), write_overall_rule_summary, close_log_file, ( typecheck_only(on) ; typecheck_only(off), do_wrap_lines ), maybe_issue_syntax_reminder, !. new_load_vcg:- get_provenance_proof_file_kind(verification_conditions), load_vcg, !. new_load_vcg:- get_provenance_proof_file_kind(deadpath_search), load_dpc, !. new_load_vcg:- get_provenance_proof_file_kind(path_functions), load_pfs, !. new_load_vcg:- throw_error('The Simplifier failed to determine proof file kind.\n', []). %------------------------------------------------------------------------------- process_action:- get_switch_typecheck_only(on), format('Typechecking only - no simplification has been performed', []), !. process_action:- get_switch_deadpaths(off), !, get_switch_typecheck_only(off), retrieve_simplified_proof_file(SimplifiedProofFile_Atom), convert_file_for_display(SimplifiedProofFile_Atom, DisplaySimplifiedProofFile_Atom), format('Automatic simplification completed.\n\n', []), format('Simplified output sent to ~w.',[DisplaySimplifiedProofFile_Atom]), !. process_action:- get_switch_deadpaths(on), retrieve_simplified_proof_file(SummaryDPCFile_Atom), convert_file_for_display(SummaryDPCFile_Atom, DisplaySummaryDPCFile_Atom), format('ZombieScope completed.\n\n', []), format('ZombieScope output sent to ~w.',[DisplaySummaryDPCFile_Atom]), !. %=============================================================================== % handle_information_request. %------------------------------------------------------------------------------- % If the user has requested information, rather than simplification, then % report the information and exit. %=============================================================================== % Explicitly requested help. handle_information_request:- get_switch_help(on), display_help(user_output), close_all_streams, halt(0), !. % Provided zero arguments, so implicitly requested help. handle_information_request:- get_switch_empty(on), display_help(user_output), close_all_streams, halt(0), !. % Requested version. handle_information_request:- get_switch_version(on), display_header(user_output), close_all_streams, halt(0), !. % None of the above. Continue as normal. handle_information_request:- !. %=============================================================================== var_const(VarId_Atom, TypeId_Atom, v):- get_declarations_variable(TypeId_Atom, VarId_Atom). var_const(ConstId_Atom, TypeId_Atom, c):- get_declarations_constant(TypeId_Atom, ConstId_Atom). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__system.pro0000644000175000017500000000776111753202337020016 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides access to all system information. This is information that is % specific to the particular build or running process. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__system, [get_system_start_date_time/2, add_system_start_date_time/2, get_system_toolname/1, add_system_toolname/1, save_data__system/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2]). :- use_module('ioutilities.pro', [write_terms_to_file/2]). :- use_module('data__data_files.pro', [must_get_datafiles_debug/2]). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### :- add_state(get_system_start_date_time, get_system_start_date_time('NowDate_Atom', 'NowTime_Atom')). :- dynamic(get_system_start_date_time/2). :- add_state(get_system_toolname, get_system_toolname('ToolName_Atom')). :- dynamic(get_system_toolname/1). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== add_system_start_date_time(NowDate_Atom, NowTime_Atom):- assert(get_system_start_date_time(NowDate_Atom, NowTime_Atom)), !. add_system_toolname(ToolName_Atom):- assert(get_system_toolname(ToolName_Atom)), !. %=============================================================================== %=============================================================================== % save_data__system. %=============================================================================== save_data__system:- must_get_datafiles_debug(data__system, DebugFile_Atom), write_terms_to_file(DebugFile_Atom, [data__system:get_system_start_date_time/2, data__system:get_system_toolname/1]), !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/quant.pro0000644000175000017500000001733011753202337016463 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Support predicates for working with quantifiers. %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % form_instantiable_hyp_facts. %------------------------------------------------------------------------------- % This predicate sets up a collection of predicates which can be % pattern-matched against to automate use of the universally-quantified % hypothesis formulae in discharging conclusions. % % Each for_all will typically be of the form (or worse!): % for_all(i:ordtype, lwb <= i and i <= upb -> (element(a, [i]) >= x and % element(a, [i]) <= y)) % % This predicate creates a set of facts of the form: % get_forall_hyp(STROB >= LWB, CONDS, N) % % Where N is the hypothesis number from which the fact was derived, STROB % is typically the structured object access, and CONDS is a list of % conditions that must be true to allow the hypothesis to be used. For % instance, the above, if it were H3, would give us: % get_forall_hyp(element(a, [I]) >= x, [lwb <= I, I <= upb], 3). % get_forall_hyp(3, element(a, [I]) <= y, [lwb <= I, I <= upb], 3). % % In these facts, bound variables are replaced by Prolog variables, so % standard Prolog pattern-matching may be used to find matches directly. % Furthermore, these facts are set up to eliminate all nested for_alls as % far as possible, so a formula: % for_all(i:ordt1, l1 <= i and i <= u1 -> for_all(j:ordt2, l2 <= j and % j <= u2 -> (element(element(a, [i]), [j]) >= lwb and % element(element( a, [i]), [j]) <= upb))) % Should yield a collection of facts such as: % get_forall_hyp(element(element(a, [I]), [J]) >= lwb, [l1 <= I, I <= u1, % l2 <= J, J <= u2], 4). % etc. %=============================================================================== form_instantiable_hyp_facts :- prune_all_forall_hyp, fail. form_instantiable_hyp_facts :- get_hyp(for_all(X:T, P), x, N), save_skolemisation_of(N, for_all(X:T, P)), fail. /* force backtracking, to do all relevant hypotheses */ form_instantiable_hyp_facts. %------------------------------------------------------------------------------- save_skolemisation_of(N, Formula) :- skolemise(Formula, Skolemisation, Conditions), !, save_the_skolemisations(N, Skolemisation, Conditions), !. %------------------------------------------------------------------------------- save_the_skolemisations(N, VAR, C) :- var(VAR), !, add_forall_hyp(VAR, C, N). save_the_skolemisations(N, X and Y, C) :- add_forall_hyp(X and Y, C, N), !, save_the_skolemisations(N, X, C), !, save_the_skolemisations(N, Y, C), !. save_the_skolemisations(N, X, C) :- add_forall_hyp(X, C, N), !. %=============================================================================== % skolemise(+Formula, -Goal_Part, -Conditions_List). %------------------------------------------------------------------------------- % Unwraps for_alls, splits implications up into Conditions (LHS) and Goal % parts (RHS). Unwrapping involves replacing the bound variable % consistently throughout the formula with a new Prolog variable (the "X" % in the relevant clauses below), but ensuring this variable does not % become instantiated or "entangled" (to use the quantum terminology!) % before the relevant expressions-with-Prolog-variables get returned ready % to be asserted by save_the_skolemisations in the calling environment. %=============================================================================== skolemise(VAR, VAR, []) :- /* Need? */ var(VAR), !. skolemise(for_all(_V:_T, VAR), VAR, []) :- var(VAR), !. skolemise(for_all(V:_T, LHS -> RHS), Formula, Conditions) :- introduce_prolog_variable(_X, V, LHS -> RHS, L -> R), /* X: new uncaptured variable */ !, skolemise(R, Formula, C2), form_conditions(L, C1), !, append(C1, C2, Conditions). skolemise(for_all(V:_T, RHS), Formula, Conditions) :- /* No implication */ introduce_prolog_variable(_X, V, RHS, R), !, skolemise(R, Formula, Conditions). skolemise(LHS -> RHS, Formula, Conditions) :- !, skolemise(RHS, Formula, C2), form_conditions(LHS, C1), !, append(C1, C2, Conditions). skolemise(Formula, Formula, []). %------------------------------------------------------------------------------- /* introduce_prolog_variable(NewVar, {for} Atom, OldFormula, NewFormula) */ introduce_prolog_variable(_X, _V, Old, Old) :- var(Old), !. introduce_prolog_variable(X, V, V, X) :- !. introduce_prolog_variable(_X, _V, Old, Old) :- atomic(Old), /* and not equal to V */ !. introduce_prolog_variable(X, V, [H|T], New) :- !, introduce_prolog_variable_in_list(X, V, [H|T], New). introduce_prolog_variable(_X, _V, [], []) :- !. introduce_prolog_variable(X, V, Old, New) :- /* Old must be composite */ Old =.. [F|OldArgs], introduce_prolog_variable_in_list(X, V, OldArgs, NewArgs), !, New =.. [F|NewArgs]. %------------------------------------------------------------------------------- introduce_prolog_variable_in_list(X, V, [OldH|OldT], [NewH|NewT]) :- introduce_prolog_variable(X, V, OldH, NewH), !, introduce_prolog_variable_in_list(X, V, OldT, NewT). introduce_prolog_variable_in_list(_X, _V, [], []) :- !. %------------------------------------------------------------------------------- form_conditions(VAR, [VAR]) :- var(VAR), !. form_conditions(L and R, List) :- form_conditions(L, L1), !, form_conditions(R, L2), !, append(L1, L2, List). form_conditions(L, [L]) :- !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/spadesimp.windows.manifest0000755000175000017500000000056611753202337022025 0ustar eugeneugen spark-2012.0.deb/simplifier/spxref.pro0000644000175000017500000000475711753202337016653 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % This code is not part of the core simplifier, it exists for analysis % purposes only. The small file loads all dynamic features of the % simplifier environment, enabling the use of the sicstus spxref analysis % tool. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %Initialisation file for spxref. :- use_module('settings.pro'). :- declare_settings. :- use_module('opdeclar.pro'). :- declare_operators. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/userules.pro0000644000175000017500000015571411753202337017213 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Support facilities for applying any user defined proof rules to discharge % VCs. %############################################################################### %############################################################################### % MODULE %############################################################################### %:- module(userules, % [apply_user_defined_proof_rules/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). %############################################################################### % TYPES %############################################################################### :- add_type('OperationType', [conc, rule_phase_inference, fact('FactId_Atom'), hyp('HypId_Atom')]). :- add_type('RuleNameType', ['Filename_CharList : Rulename_CharList(Int)']). :- add_type('RuleMatchType', [ rewrite('Expr', 'ProvedConditionType_List'), inference('ProvedConditionType_List') ]). :- add_type('ProvedConditionType', proved('Expr', 'HypId_Int_List', 'FactId_Int_List')). :- add_type('RuleSortType', [user_rewrite_rule, user_inference_rule]). %############################################################################### % DATA %############################################################################### :- add_state(get_fact_from_rule, get_fact_from_rule('Expr', 'Int', 'OperationType', 'RuleNameType', 'RuleMatchType', 'RuleSort')). :- add_state(get_candidate_fact, get_candidate_fact('Int', 'Expr')). :- add_state(used_unique_reference, used_unique_reference('Atom', 'Int')). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % apply_user_defined_proof_rules % % Three phases. %=============================================================================== apply_user_defined_proof_rules :- i_am_using_rule(udr0), prune_all_fact_from_rule, prune_all_unique_references, retractall(proved_by_user_rules), fail. %------------------------------------------------------------------------------- % Phase 1 %------------------------------------------------------------------------------- apply_user_defined_proof_rules :- i_am_using_rule(udr1), get_conc(Goal, x, N), \+ get_proved_conc(N), try_to_prove_via_inference_rule(Goal), fail. apply_user_defined_proof_rules :- record_any_proved_conclusions, proved_all_conclusions, !. apply_user_defined_proof_rules :- i_am_using_rule(udr2), get_conc(Goal, x, N), \+ get_proved_conc(N), try_to_prove_via_rewrite_rule(Goal), fail. apply_user_defined_proof_rules :- record_any_proved_conclusions, proved_all_conclusions, !. apply_user_defined_proof_rules :- i_am_using_rule(udr3), create_new_facts_from_hypotheses, fail. apply_user_defined_proof_rules :- record_any_proved_conclusions, proved_all_conclusions, !. %------------------------------------------------------------------------------- % Phase 2 %------------------------------------------------------------------------------- apply_user_defined_proof_rules :- i_am_using_rule(udr4), get_conc(Goal, x, N), \+ get_proved_conc(N), try_to_prove_via_inference_rule(Goal), fail. apply_user_defined_proof_rules :- record_any_proved_conclusions, proved_all_conclusions, !. apply_user_defined_proof_rules :- i_am_using_rule(udr5), get_conc(Goal, x, N), \+ get_proved_conc(N), try_to_prove_via_rewrite_rule(Goal), fail. apply_user_defined_proof_rules :- record_any_proved_conclusions, proved_all_conclusions, !. apply_user_defined_proof_rules :- i_am_using_rule(udr6), create_new_facts_from_existing_facts, fail. apply_user_defined_proof_rules :- record_any_proved_conclusions, proved_all_conclusions, !. %------------------------------------------------------------------------------- % Phase 3 %------------------------------------------------------------------------------- apply_user_defined_proof_rules :- i_am_using_rule(udr7), get_conc(Goal, x, N), \+ get_proved_conc(N), try_to_prove_via_inference_rule(Goal), fail. apply_user_defined_proof_rules :- record_any_proved_conclusions, proved_all_conclusions, !. apply_user_defined_proof_rules :- i_am_using_rule(udr8), get_conc(Goal, x, N), \+ get_proved_conc(N), try_to_prove_via_rewrite_rule(Goal), fail. apply_user_defined_proof_rules :- record_any_proved_conclusions, proved_all_conclusions, !. % always succeeds (eventually) apply_user_defined_proof_rules:- !. %------------------------------------------------------------------------------- % record_any_proved_conclusions % % Determines which (if any) conclusions have been proved by the previous % application of user rules. Records the conclusions as proved and promotes any % rules used to prove side conditions of rules to hypotheses. % This clause should be invoked after the completion of each attempt % in proving a Goal thus avoiding the unnecessary application of rules % to conclusions which are already proved. %------------------------------------------------------------------------------- record_any_proved_conclusions:- i_am_using_rule(udr9), get_conc(Goal, x, N), \+ get_proved_conc(N), is_a_fact_from_rule(_Ref, From, Goal, Name, RuleMatchDetails, RuleSort), record_conclusion_proved_by_rules(N, From, Goal, Name, RuleMatchDetails, RuleSort), fail. record_any_proved_conclusions:- !. %------------------------------------------------------------------------------- % record_conclusion_proved_by_rules(+Id_Int, +From, +Goal, +Name, % +RuleMatchDetails, +RuleSort) %------------------------------------------------------------------------------- record_conclusion_proved_by_rules(N, From, Goal, Name, RuleMatchDetails, RuleSort) :- issue_message('Applied rule ', Name), add_proved_conc(N), issue_proved_message(N), determine_facts_and_promote_to_hypotheses(From, RuleMatchDetails, NewRuleMatchDetails), assert_log_fact(rule_proved_conc, [N, Goal, Name, NewRuleMatchDetails, RuleSort]), mark_whether_proved_by_user_rules(RuleSort), !. %------------------------------------------------------------------------------- % mark_whether_proved_by_user_rules(+Rule_Type) %------------------------------------------------------------------------------- mark_whether_proved_by_user_rules(user_inference_rule) :- !, add_fact(proved_by_user_rules). mark_whether_proved_by_user_rules(user_rewrite_rule) :- !, add_fact(proved_by_user_rules). mark_whether_proved_by_user_rules(_) :- !. %############################################################################### % INFERENCE RULES. %############################################################################### %=============================================================================== % try_to_prove_via_inference_rule(+Goal) % % Attempt to prove goal through inference rules. % Process all inference rules where all the variables in Conditions are ground % first and then (in second clause) where some of the variables in % Conditions are not ground. %=============================================================================== try_to_prove_via_inference_rule(Goal) :- % Goal is ground. inference_rule_match(Name, Goal, Conditions, RuleSort), var_free(Conditions), fulfil(Conditions, ProvedConditions, []), var_free(ProvedConditions), % success: don't backtrack and retry !, record_rule_success(conc, Goal, Name, RuleSort, inference(ProvedConditions), _). try_to_prove_via_inference_rule(Goal) :- % Goal is ground. inference_rule_match(Name, Goal, Conditions, RuleSort), \+ var_free(Conditions), fulfil(Conditions, ProvedConditions, []), var_free(ProvedConditions), % success: don't backtrack and retry !, record_rule_success(conc, Goal, Name, RuleSort, inference(ProvedConditions), _). %############################################################################### % REWRITE RULES. %############################################################################### %=============================================================================== % try_to_prove_via_rewrite_rule(+Goal) % % Attempt to prove goal through rewrite rules. %=============================================================================== try_to_prove_via_rewrite_rule(Goal):- % Goal is ground. rewrite_rule_match(Name, Goal, NewGoal, Conditions, RuleSort), var_free(Conditions), fulfil([NewGoal|Conditions], ProvedConditions, []), % once the side-conditions have been met novars(NewGoal), var_free(ProvedConditions), % success: don't backtrack and retry */ !, record_rule_success(conc, Goal, Name, RuleSort, rewrite(NewGoal, ProvedConditions), _). try_to_prove_via_rewrite_rule(Goal):- % Goal is ground. rewrite_rule_match(Name, Goal, NewGoal, Conditions, RuleSort), \+ var_free(Conditions), fulfil([NewGoal|Conditions], ProvedConditions, []), % once the side-conditions have been met novars(NewGoal), var_free(ProvedConditions), % success: don't backtrack and retry */ !, record_rule_success(conc, Goal, Name, RuleSort, rewrite(NewGoal, ProvedConditions), _). %=============================================================================== % create_new_facts_from_hypotheses %=============================================================================== create_new_facts_from_hypotheses:- get_hyp(Formula, x, N), \+ know_eliminated(N), create_new_fact(hyp(N), Formula). %=============================================================================== % create_new_facts_from_existing_facts %=============================================================================== create_new_facts_from_existing_facts:- copy_existing_facts, get_candidate_fact(N, Formula), create_new_fact(fact(N), Formula). %=============================================================================== % copy_existing_facts %=============================================================================== copy_existing_facts:- prune_all_candidate_fact, fail. copy_existing_facts:- get_fact_from_rule(Fact, N, _, _, _, _), Fact \= true, add_candidate_fact(N, Fact), fail. copy_existing_facts :- !. %=============================================================================== % create_new_fact(+HypOrFact, +Formula) %=============================================================================== create_new_fact(HypOrFact, Formula):- rewrite_rule_match(Name, Formula, NewFormula, Conditions, RuleSort), fulfil(Conditions, ProvedConditions, []), nonvar(NewFormula), dont_already_know(NewFormula), var_free(ProvedConditions), record_rule_success(HypOrFact, NewFormula, Name, RuleSort, rewrite(Formula, ProvedConditions), _). %=============================================================================== % inference_rule_match(+Name, +Goal, -Conditions, -RuleSort) % % Find a user-defined inference rule and provide its generalised form. %=============================================================================== inference_rule_match(Name, Goal, Conditions, RuleSort) :- nonvar(Goal), functor(Goal, Op_Atom, Args_Int), !, functor(Goal1, Op_Atom, Args_Int), inference_rule_match_x(Name, Goal1, Conditions1, RuleSort), add_conds(Args_Int, Goal, Goal1, Conditions1, Conditions). %=============================================================================== % inference_rule_match_x(+Name, +Goal, -Conditions, -RuleSort) % %=============================================================================== inference_rule_match_x(Name, Goal, Conditions, inference_rule):- inference_rule(Name, Goal, Conditions). inference_rule_match_x(Name, Goal, Conditions, user_inference_rule):- user_inference_rule(Name, Goal, Conditions). %=============================================================================== % add_conds(+N, +A_Goal, +B_GOAL, +OldList, -NewList) % % Unify lists arguments in A_Goal and B_Goal creating NewList from OldList %=============================================================================== add_conds(0, _Goal, _Goal1, Conditions_List, Conditions_List):- !. add_conds(N, Goal, Goal1, OldConditions_List, NewConditions_List):- arg(N, Goal, Arg), arg(N, Goal1, Arg), !, M is N-1, add_conds(M, Goal, Goal1, OldConditions_List, NewConditions_List). add_conds(N, Goal, Goal1, OldConditions_List, [Arg=Arg1 | NewConditions_List]):- arg(N, Goal, Arg), arg(N, Goal1, Arg1), !, M is N-1, add_conds(M, Goal, Goal1, OldConditions_List, NewConditions_List). %=============================================================================== % record_rule_success(+From, +Goal, +Name, +RuleMatchDetails, -Ref_Int) % % From, -- Operation for which the rule was applied % Goal, -- Goal satisfied % Name, -- Name of applied rule % RuleMatchDetails, -- How the rule was applied % Ref -- The fact number corresponding to % the proof of the Goal %=============================================================================== record_rule_success(_, Goal, _Name, _RuleSort, _RuleMatchDetails, FactNumber) :- % already known: don't add again get_fact_from_rule(Goal, FactNumber, _, _, _, _), !. record_rule_success(From, Goal, Name, RuleSort, RuleMatchDetails, Ref) :- get_unique_reference(fact, Ref), add_fact_from_rule(Goal, Ref, From, Name, RuleMatchDetails, RuleSort), !. %=============================================================================== % rewrite_rule_match(Name, +Goal, -NewGoal, -NewConditions, -RuleSort) % % Apply the rewrite rule Name to Goal: % - NewGoal: is result of using the rewrite rule on Goal, % - NewConditions: list of conditions of the rule % - RuleSort: type of rule. % % Find a user-defined rewrite rule that matches and provide its generalised % form. % % User-defined rewrite rules OR non-ground rewrite rules coming % from the RLS file are both applicable here. %=============================================================================== rewrite_rule_match(Name, Goal, NewGoal, NewConditions, RuleSort):- nonvar(Goal), rewrite_rule_match_x(Name, Goal, NewGoal, NewConditions, RuleSort). rewrite_rule_match_x(Name, Goal, NewGoal, NewConditions, RuleSort):- rewrite_rule_match_y(Name, OldExp, NewExp, Conditions, RuleSort), pattern_match_rule(Goal, OldExp, NewExp, NewGoal, Conditions, NewConditions). rewrite_rule_match_x(Name, Goal, NewGoal, NewConditions, RuleSort):- rewrite_rule_match_y(Name, NewExp, OldExp, Conditions, RuleSort), nonvar(OldExp), pattern_match_rule(Goal, OldExp, NewExp, NewGoal, Conditions, NewConditions). rewrite_rule_match_y(Name, OldExp, NewExp, Conditions, user_rewrite_rule):- user_rewrite_rule(Name, OldExp, NewExp, Conditions). rewrite_rule_match_y(Name, OldExp, NewExp, Conditions, nonground_replace_rule):- nonground_replace_rule(Name, OldExp, NewExp, Conditions). %=============================================================================== % pattern_match_rule(Goal, OldExp, NewExp, NewGoal, Conditions, Conditions) % %=============================================================================== pattern_match_rule(Goal, Goal, NewExpr, NewExpr, Conditions, Conditions). pattern_match_rule(Goal, OldExp, NewExp, NewGoal, Conditions, NewConditions) :- ground(Goal), \+ atomic(Goal), % % The use of =.. in creating term NewGoal is justified as the code % needs to copy the arguments and the functor from Goal. % Goal =.. [Op|Args], pattern_match_rule_list(Args, OldExp, NewExp, NewArgs, Conditions, NewConditions), NewGoal =.. [Op|NewArgs]. %=============================================================================== % pattern_match_rule_list([OldArg|Rest], OldExp, NewExp, % [NewArg|Rest], Conditions, NewConditions) % %=============================================================================== pattern_match_rule_list([OldArg|Rest], OldExp, NewExp, [NewArg|Rest], Conditions, NewConditions) :- find_pattern_match(OldArg, OldExp, NewExp, NewArg, [], ExtraConditions), append(ExtraConditions, Conditions, NewConditions). pattern_match_rule_list([Arg|OldRest], OldExp, NewExp, [Arg|NewRest], Conditions, NewConditions) :- pattern_match_rule_list(OldRest, OldExp, NewExp, NewRest, Conditions, NewConditions). %=============================================================================== % find_pattern_match(OldArg, OldExp, NewExp, NewArg, Conditions, NewConditions) % %=============================================================================== find_pattern_match(OldArg, OldExp, NewExp, NewArg, Conditions, NewConditions) :- pattern_match_rule(OldArg, OldExp, NewExp, NewArg, Conditions, NewConditions). find_pattern_match(OldArg, OldExp, _NewExp, OldArg, Conditions, Conditions) :- ground(OldArg), functor(OldArg, Op, _), nonvar(OldExp), functor(OldExp, Op, _). %=============================================================================== % Solve a rule match's side conditions (handling ground conditions first). % % Inference rule strategy (based on that in the proof checker): % F may_be_deduced_from GOALS. % % 1. Split GOALS into fully-instantiated-goals (i.e. primary goals) and % partially-instantiated-goals (i.e. secondary goals). % % 2. Attempt to satisfy all primary goals. Either: % % a. All were satisfied. Then attempt to satisfy secondary goals. % As soon as one becomes satisfied, split the remainder into primaries % and secondaries and attempt to satisfy them in the same way (i.e. % recursively). % % b. At least one can be shown to be false. This rule-match will never % succeed in its current instantiation, so cut and fail. % % c. Some were not satisfied. Leave secondary goals and backtrack. %=============================================================================== %=============================================================================== % fulfil(Goals, ProvedGoals, UnsatisfiedGoals) %=============================================================================== fulfil([], [], []):- !. fulfil(Goals, ProvedGoals, UnsatisfiedGoals):- split(Goals, Primaries, Secondaries), try_to_satisfy(Primaries, ProvedPrimaries, UnsatisfiedPrimaries), fulfil_x(UnsatisfiedPrimaries, Secondaries, ProvedSecondaries, UnsatisfiedSecondaries), % All conditions must be ground. var_free(UnsatisfiedSecondaries), append(ProvedPrimaries, ProvedSecondaries, ProvedGoals), append(UnsatisfiedPrimaries, UnsatisfiedSecondaries, UnsatisfiedGoals). %=============================================================================== %fulfil_x(+UnsatisfiedPrimaries, +Secondaries, +ProvedSecondaries, % +UnsatisfiedSecondaries). %=============================================================================== fulfil_x([], Secondaries, ProvedSecondaries, UnsatisfiedSecondaries):- match_up(Secondaries, ProvedSecondaries, UnsatisfiedSecondaries). fulfil_x(UnsatisfiedPrimaries, Secondaries, [], Secondaries):- UnsatisfiedPrimaries \= []. %=============================================================================== % split(Goal_List, Primary_List, Secondary_List) %=============================================================================== split([], [], []):- !. split([G|Gs], [G|Ps], Ss) :- novars(G), !, split(Gs, Ps, Ss), !. split([G|Gs], Ps, [G|Ss]) :- split(Gs, Ps, Ss), !. %=============================================================================== % try_to_satisfy(+Primary_GoalList, +ProvedPrimary_GoalList, % +UnsatisifiedPrimary_GoalList). %=============================================================================== try_to_satisfy([], [], []):- !. try_to_satisfy([G|Gs], [proved(G,[],[])|Ps], Us) :- nonvar(G), G=goal(D), !, evaluate_immediate_condition(D), !, try_to_satisfy(Gs, Ps, Us), !. try_to_satisfy([G|_], _, _) :- % stop immediately if so does_not_typecheck_as_boolean(G), !, fail. try_to_satisfy([G|Gs], [proved(G,Hs,Fs)|Ps], Us) :- rule_phase_infer(G, Hs, Fs), !, try_to_satisfy(Gs, Ps, Us), !. try_to_satisfy([G|_], _, _) :- simplify(not G, NotG), % if a ground goal is deducibly false, stop now infer(NotG, _), !, fail. try_to_satisfy([G|Gs], Ps, [G|Us]) :- try_to_satisfy(Gs, Ps, Us), !. %=============================================================================== % match_up(Goals, ProvedGoals, UnsatisfiedGoals) %=============================================================================== match_up([], [], []):- !. match_up(Goals, ProvedGoals, UnsatisfiedGoals):- seek_solutions(Goals, Proved, Unsatisfied), % ensure list of unsatisfied goals is shrinking Goals \= Unsatisfied, split(Unsatisfied, Primaries, Secondaries), try_to_satisfy(Primaries, ProvedPrimaries, UnsatisfiedPrimaries), append(Proved, ProvedPrimaries, ProvedSoFar), match_up(Secondaries, ProvedSecondaries, UnsatisfiedSecondaries), append(ProvedSoFar, ProvedSecondaries, ProvedGoals), append(UnsatisfiedPrimaries, UnsatisfiedSecondaries, UnsatisfiedGoals). %=============================================================================== % seek_solutions(+Goal_ExpList, % -ProvedGaols_ExpList, % -UnsatisfiedGoal_ExpList) % % Try to prove all the goals in Goal_ExpList % * ProvedGoals_ExpList contains the list of all proved goals; % * UnsatisfiedGoal_ExpList contains the list of all unproven goals. %=============================================================================== seek_solutions([], [], []):- !. % Case when H_Goal_Exp is provable. seek_solutions([H_Goal_Exp|T_Goal_ExpList], [proved(H_Goal_Exp, Hs, Fs)|ProvedGoal_ExpList], UnsatisfiedGoal_ExpList):- do_satisfy_goal(H_Goal_Exp, Hs, Fs), seek_solutions(T_Goal_ExpList, ProvedGoal_ExpList, UnsatisfiedGoal_ExpList). % Case when H_Goal_Exp is not provable. seek_solutions([H_Goal_Exp|T_Goal_ExpList], ProvedGoal_ExpList, [H_Goal_Exp|UnsatisfiedGoal_ExpList]) :- seek_solutions(T_Goal_ExpList, ProvedGoal_ExpList, UnsatisfiedGoal_ExpList). %=============================================================================== % do_satisfy_goal(+Goal_Exp, +HypId_IntList, +FactId_IntList) %=============================================================================== do_satisfy_goal(G, [], []) :- nonvar(G), G = goal(Pred), !, evaluate_immediate_condition(Pred), !. do_satisfy_goal(Goal_Exp, Hs, Fs) :- % Save Goal in the database. retractall(current_sat_goal(_)), asserta(current_sat_goal(Goal_Exp)), !, try_satisfy_goal(Goal_Exp, [], Hs, Fs). %=============================================================================== % try_satisfy_goal(+Goal_Exp, Instance_ExpList, HypId_IntList, FactId_IntList) %=============================================================================== try_satisfy_goal(G, InstanceList, H, F) :- current_sat_goal(Goal), % Find a solution to satisfy Goal which may ground some of the variables % in Goal. satisfy_goal(Goal, Hs, Fs), % Ensure we have not gone down this path before. \+ is_in(Goal, InstanceList), !, try_satisfy_goal_x(G, Goal, InstanceList, H, F, Hs, Fs). %=============================================================================== % try_satisfy_goal_x(+A_Goal_Exp, +B_Goal_Exp, +Instance_ExpList, % +A_HypId_IntList, -B_HypId_IntList, % +A_FactId_IntList, -B_FactId_IntList) %=============================================================================== % Found a solution. try_satisfy_goal_x(Goal, Goal, _InstanceList, H, F, H, F). % On backtracking from try_satisfy_goal, find another possible solution to % satisfy goal. try_satisfy_goal_x(G, Goal, InstanceList, H, F, _Hs, _Fs):- try_satisfy_goal(G, [Goal|InstanceList], H, F). %=============================================================================== % satisfy_goal_x(+Goal, -Hs, -Fs) %=============================================================================== satisfy_goal(A=B, Hs, Fs) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X=B, Hs, Fs). satisfy_goal(A=B, Hs, Fs) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A=X, Hs, Fs). satisfy_goal(A<>B, Hs, Fs) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X<>B, Hs, Fs). satisfy_goal(A<>B, Hs, Fs) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A<>X, Hs, Fs). satisfy_goal(A>=B, Hs, Fs) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X>=B, Hs, Fs). satisfy_goal(A>=B, Hs, Fs) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A>=X, Hs, Fs). satisfy_goal(A<=B, Hs, Fs) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X<=B, Hs, Fs). satisfy_goal(A<=B, Hs, Fs) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A<=X, Hs, Fs). satisfy_goal(A>B, Hs, Fs) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X>B, Hs, Fs). satisfy_goal(A>B, Hs, Fs) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A>X, Hs, Fs). satisfy_goal(A B %------------------------------------------------------------------------------- satisfy_goal(A -> B, Hs, Fs) :- novars(A), !, satisfy_goal_A_implies_B_novars_A(A -> B, Hs, Fs). satisfy_goal(A -> B, Hs, Fs) :- novars(B), !, satisfy_goal_A_implies_B_novars_B(A -> B, Hs, Fs). satisfy_goal(A -> B, Hs, Fs) :- satisfy_goal_A_implies_B(A -> B, Hs, Fs). %------------------------------------------------------------------------------- % A <-> B %------------------------------------------------------------------------------- satisfy_goal(A <-> B, [], []) :- satisfy_goal_A_iff_B(A, B). satisfy_goal(A <-> B, Hs, Fs) :- satisfy_goal(A -> B, H1, F1), satisfy_goal(B -> A, H2, F2), merge_sort(H1, H2, Hs), merge_sort(F1, F2, Fs). %------------------------------------------------------------------------------- % A=B %------------------------------------------------------------------------------- satisfy_goal(A=B, [], []) :- novars(B), satisfy_goal_A_equals_B(A, B). satisfy_goal(A=B, [], []) :- novars(A), satisfy_goal_A_equals_B(B, A). %------------------------------------------------------------------------------- % A<>B %------------------------------------------------------------------------------- satisfy_goal(A<>B, Hs, []) :- fact(A>B, Hs). satisfy_goal(A<>B, Hs, []) :- fact(AB, [], [F]) :- get_fact_from_rule(A>B, F, _, _, _, _). satisfy_goal(A<>B, [], [F]) :- get_fact_from_rule(BB, [], [F]) :- get_fact_from_rule(AB, [], [F]) :- get_fact_from_rule(B>A, F, _, _, _, _). %------------------------------------------------------------------------------- % A>B %------------------------------------------------------------------------------- satisfy_goal(A>B, Hs, Fs) :- satisfy_goal(A>=B, H1, F1), novars(A<>B), rule_phase_infer(A<>B, H2, F2), merge_sort(H1, H2, Hs), merge_sort(F1, F2, Fs). %------------------------------------------------------------------------------- % AB), rule_phase_infer(A<>B, H2, F2), merge_sort(H1, H2, Hs), merge_sort(F1, F2, Fs). %------------------------------------------------------------------------------- % A>=B %------------------------------------------------------------------------------- satisfy_goal(A>=B, Hs, []) :- fact(A>B, Hs). satisfy_goal(A>=B, Hs, []) :- fact(B=B, [], [F]) :- get_fact_from_rule(A>B, F, _, _, _, _). satisfy_goal(A>=B, [], [F]) :- get_fact_from_rule(B=B, Hs, Fs) :- satisfy_goal(A=B, Hs, Fs). %------------------------------------------------------------------------------- % A<=B %------------------------------------------------------------------------------- satisfy_goal(A<=B, Hs, []) :- fact(AA, Hs). satisfy_goal(A<=B, [], [F]) :- get_fact_from_rule(AA, F, _, _, _, _). satisfy_goal(A<=B, Hs, Fs) :- satisfy_goal(A=B, Hs, Fs). %------------------------------------------------------------------------------- % X=A+B %------------------------------------------------------------------------------- satisfy_goal(X=A+B, [], []) :- novars(X), satisfy_goal_a_plus_b(X=A+B). %------------------------------------------------------------------------------- % X=A-B %------------------------------------------------------------------------------- satisfy_goal(X=A-B, [], []) :- novars(X), satisfy_goal_a_minus_b(X=A-B). %------------------------------------------------------------------------------- % Support predicate for X=A+B %------------------------------------------------------------------------------- satisfy_goal_a_plus_b(X=A+B) :- novars(B), A=X-B. satisfy_goal_a_plus_b(X=A+B) :- novars(A), B=X-A. %------------------------------------------------------------------------------- % Support predicate for X=A-B %------------------------------------------------------------------------------- satisfy_goal_a_minus_b(X=A-B) :- novars(B), A=X+B. satisfy_goal_a_minus_b(X=A-B) :- novars(A), B=A-X. %------------------------------------------------------------------------------- % Support predicate for var(G) % satisfy_goal_var_goal(+Goal, +HypId_List, +FactId_List) satisfy_goal_var_goal(Goal, [Hyp_Id], []):- get_hyp(Goal, x, Hyp_Id), integer(Hyp_Id). satisfy_goal_var_goal(Goal, [], [Fact_Id]):- get_fact_from_rule(Goal, Fact_Id, _, _, _, _), Goal \= true. %------------------------------------------------------------------------------- % satisfy_goal_not(not +Goal, -NotGoal). %------------------------------------------------------------------------------- satisfy_goal_not(not not Goal, Goal). satisfy_goal_not(not A=B, A<>B). satisfy_goal_not(not A<>B, A=B). satisfy_goal_not(not A>B, A<=B). satisfy_goal_not(not A=B). satisfy_goal_not(not A>=B, AB). %------------------------------------------------------------------------------- % satisfy_goal_A_or_B(A or B, Hs, Fs). %------------------------------------------------------------------------------- satisfy_goal_A_or_B(A, _B, Hs, Fs):- rule_phase_infer(A, Hs, Fs). satisfy_goal_A_or_B(_A, B, Hs, Fs):- satisfy_goal(B, Hs, Fs). %------------------------------------------------------------------------------- % satisfy_goal_A_implies_B_novars_A(A -> B, Hs, Fs). %------------------------------------------------------------------------------- satisfy_goal_A_implies_B_novars_A(A -> _B, Hs, Fs):- simplify(not A, NotA), rule_phase_infer(NotA, Hs, Fs). satisfy_goal_A_implies_B_novars_A(_A -> B, Hs, Fs):- satisfy_goal(B, Hs, Fs). %------------------------------------------------------------------------------- % satisfy_goal_A_implies_B_novars_B(A -> B, Hs, Fs). %------------------------------------------------------------------------------- satisfy_goal_A_implies_B_novars_B(_A -> B, Hs, Fs):- rule_phase_infer(B, Hs, Fs). satisfy_goal_A_implies_B_novars_B(A -> _B, Hs, Fs):- satisfy_goal(not A, Hs, Fs). %------------------------------------------------------------------------------- % satisfy_goal_A_implies_B(A -> B, Hs, Fs). %------------------------------------------------------------------------------- satisfy_goal_A_implies_B(A -> _B, Hs, Fs):- satisfy_goal(not A, Hs, Fs). satisfy_goal_A_implies_B(_A -> B, Hs, Fs):- satisfy_goal(B, Hs, Fs). %------------------------------------------------------------------------------- % satisfy_goal_A_iff_B(A <-> B, Hs, Fs). %------------------------------------------------------------------------------- satisfy_goal_A_iff_B(A, B):- novars(B), var(A), simplify(B, B1), A=B1. satisfy_goal_A_iff_B(A, B):- novars(A), var(B), simplify(A, A1), B=A1. %------------------------------------------------------------------------------- % satisfy_goal_A_equals_B_novars_B(A=B, Hs, Fs). %------------------------------------------------------------------------------- satisfy_goal_A_equals_B(A, B):- var(A), rational_expression(B), \+ base_rational(B), evaluate_rational_expression(B, A). satisfy_goal_A_equals_B(A, B):- A=B. %=============================================================================== % rule_phase_infer(Goal, Hs, Fact_List) % % If Goal is ground and can be directly inferred from the % (also ground) Condition of an existing inference rule % then try to infer that Condition directly % % In particular, if we're trying to prove a conclusion % % s__tail(s) >= 0 % % and we have a hypothesis % % word__always_valid(s) % % and 2 rules % X >= 0 may_be_deduced_from [ word__always_valid(X) ]. % word__always_valid(s__tail(s)) may_be_deduced_from [word__always_valid(s)]. % % Then this special case allows the relevant reasoning to % be established. % % This is only a partial solution, though. In future, % a more general solution that allows multiple __tail()'s % to be "stripped away" should be sought. %=============================================================================== rule_phase_infer(Goal, Hs, [Fact]) :- ground(Goal), inference_rule(Name, Goal, [Condition]), ground(Condition), infer(Condition, Hs), record_rule_success(rule_phase_inference, Goal, Name, inference_rule, inference([proved(Condition, Hs, [])]), Fact), !. rule_phase_infer(Goal, Hs, [Fact]) :- ground(Goal), user_inference_rule(Name, Goal, [Condition]), ground(Condition), infer(Condition, Hs), record_rule_success(rule_phase_inference, Goal, Name, user_inference_rule, inference([proved(Condition, Hs, [])]), Fact), !. % More generally. rule_phase_infer(Goal, Hs, []) :- infer(Goal, Hs), !. rule_phase_infer(Goal, [], [F]) :- is_a_fact_from_rule(F, _, Goal, _, _, _), !. %=============================================================================== % is_a_fact_from_rule(+FactRef, +From, +Goal, +Name, +Details, +RuleSort) % % FactRef: a unique reference (a fact number) for the fact. % From: Operation for which the rule was applied. May be: % conc % rule_phase_inference % fact(N) (a fact number) % hyp(N) (a hyp number) % % Goal: The goal satisfied. These are logically hypotheses terms % that are known to be true. % % Name: Always hyp(HypNo) - the hypnumber will contain what used to be % stored in 'Goal'. % % Details: Rule match details - always empty % % RuleSort: As above %=============================================================================== is_a_fact_from_rule(F, From, Goal, Name, Details, RuleSort) :- get_fact_from_rule(Goal, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X=Y, Name, Details, RuleSort) :- get_fact_from_rule(Y=X, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X<=Y, Name, Details, RuleSort) :- get_fact_from_rule(X=X, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X<=Y, Name, Details, RuleSort) :- get_fact_from_rule(Y>X, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X>=Y, Name, Details, RuleSort) :- get_fact_from_rule(X>Y, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X>=Y, Name, Details, RuleSort) :- get_fact_from_rule(Y<=X, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X>=Y, Name, Details, RuleSort) :- get_fact_from_rule(YX, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X>Y, Name, Details, RuleSort) :- get_fact_from_rule(YY, Name, Details, RuleSort) :- get_fact_from_rule(Y<>X, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X<>Y, Name, Details, RuleSort) :- get_fact_from_rule(XY, Name, Details, RuleSort) :- get_fact_from_rule(X>Y, F, From, Name, Details, RuleSort). is_a_fact_from_rule(F, From, X<>Y, Name, Details, RuleSort) :- get_fact_from_rule(YY, Name, Details, RuleSort) :- get_fact_from_rule(Y>X, F, From, Name, Details, RuleSort). %------------------------------------------------------------------------------- % dont_already_know(+Formula_Exp) %------------------------------------------------------------------------------- dont_already_know(Formula) :- % discard as a candidate new fact, if so rule_phase_infer(Formula, _, _), !, fail. dont_already_know(_Formula) :- % otherwise succeed !. %------------------------------------------------------------------------------- % does_not_typecheck_as_boolean(+Boolean_Exp) % % Fail if G is an boolean expression; Otherwise succeeds. %------------------------------------------------------------------------------- does_not_typecheck_as_boolean(G) :- checkhastype(G, boolean), !, fail. does_not_typecheck_as_boolean(_). %------------------------------------------------------------------------------- % evaluate_immediate_condition(+Cond) % % Succeed if the condition can be satisfied, fail otherwise. %------------------------------------------------------------------------------- evaluate_immediate_condition(X):- var(X), !, fail. evaluate_immediate_condition((X,Y)) :- % and evaluate_immediate_condition(X), !, evaluate_immediate_condition(Y). evaluate_immediate_condition((X;_)) :- % or: lhs evaluate_immediate_condition(X). evaluate_immediate_condition((_;Y)) :- % or: rhs evaluate_immediate_condition(Y). evaluate_immediate_condition(integer(N)) :- novars(N), signed_integer(N). evaluate_immediate_condition(integer(N)) :- integer(N). evaluate_immediate_condition(intexp(N)) :- novars(N), intexp(N). evaluate_immediate_condition(checktype(E,T)) :- novars(E), evaluate_immediate_condition_checktype(T), !, checktype(E,T). evaluate_immediate_condition(simplify(E,V)) :- novars(E), evaluate_immediate_condition_simplify(V), !, simplify(E,V). %------------------------------------------------------------------------------- % evaluate_immediate_condition_checktype(+E,+T) %------------------------------------------------------------------------------- evaluate_immediate_condition_checktype(T) :- var(T). evaluate_immediate_condition_checktype(T) :- atom(T). %------------------------------------------------------------------------------- % evaluate_immediate_condition_simplify(+V) %------------------------------------------------------------------------------- evaluate_immediate_condition_simplify(V) :- var(V). evaluate_immediate_condition_simplify(V) :- novars(V). %------------------------------------------------------------------------------- % determine_facts_and_promote_to_hypotheses(From, RuleMatchDetails, % NewRuleMatchDetails) :- % % Promote facts into the hypotheses and convert references to them. %------------------------------------------------------------------------------- determine_facts_and_promote_to_hypotheses(From, RuleMatchDetails, NewRuleMatchDetails) :- determine_facts_and_promote_to_hypotheses_x(RuleMatchDetails, Conds, NewRuleMatchDetails, NewConds), !, gather_facts_list([from(From)|Conds], FactNos), sort(FactNos, SortedFactNos), promote_to_hypotheses(SortedFactNos), convert_conditions_list(Conds, NewConds). %------------------------------------------------------------------------------- % determine_facts_and_promote_to_hypotheses_x(+RuleMatchDetails, +Conds, % -NewRuleMatchDetails, -NewConds). %------------------------------------------------------------------------------- determine_facts_and_promote_to_hypotheses_x(inference(Conds), Conds, inference(NewConds), NewConds). determine_facts_and_promote_to_hypotheses_x(rewrite(Goal, Conds), Conds, rewrite(Goal, NewConds), NewConds). %------------------------------------------------------------------------------- % gather_facts_list([H|T], FactsId_IntList) %------------------------------------------------------------------------------- gather_facts_list([], []). gather_facts_list([H|T], Facts) :- gather_facts(H, F1), !, gather_facts_list(T, F2), !, append(F1, F2, Facts). %------------------------------------------------------------------------------- % gather_facts(A, Id_IntList). %------------------------------------------------------------------------------- gather_facts(from(fact(N)), [N]). gather_facts(from(_), []). gather_facts(proved(_, _, F), F). gather_facts(_, []). %------------------------------------------------------------------------------- % promote_to_hypotheses(Id_IntList) %------------------------------------------------------------------------------- promote_to_hypotheses([]). promote_to_hypotheses([H_Id_Int|T_Id_IntList]) :- promote_fact_to_hypothesis(H_Id_Int), !, promote_to_hypotheses(T_Id_IntList). %------------------------------------------------------------------------------- % promote_to_hypotheses(IntList) %------------------------------------------------------------------------------- %Add hyp for fact not seen before. promote_fact_to_hypothesis(Id_Int) :- get_fact_from_rule(Fact, Id_Int, From, Name, RuleMatchDetails, RuleSort), promote_fact_to_hypothesis_x(RuleMatchDetails, Conds, NewRuleMatchDetails, NewConds), !, convert_conditions_list(Conds, NewConds), add_hyp(Fact, x, HypNo), issue_message('Applied rule ', Name), assert_log_fact(applied_rule, [HypNo, Fact, Name, NewRuleMatchDetails, RuleSort]), !, %Replace the processed fact, with a form that records the mapping. The %mapping form has empty RuleMatchDetails, and thus will not trigger the %adding of a hypothesis again. prune_fact_from_rule(Fact, Id_Int, From, Name, RuleMatchDetails, RuleSort), add_fact_from_rule(true, Id_Int, From, hyp(HypNo), [], RuleSort), % record the mapping !. %Silently skip over a fact that has been seen before. promote_fact_to_hypothesis(Id_Int) :- get_fact_from_rule(true, Id_Int, _From, hyp(_HypNo), [], _RuleSort), !. %------------------------------------------------------------------------------- % promote_fact_to_hypothesis_x(+RuleMatchDetails, Conds, % -NewRuleMatchDetails, -NewConds) %------------------------------------------------------------------------------- promote_fact_to_hypothesis_x(inference(Conds), Conds, inference(NewConds), NewConds). promote_fact_to_hypothesis_x(rewrite(Goal, Conds), Conds, rewrite(Goal, NewConds), NewConds). %=============================================================================== %############################################################################### % UTILITY PREDICATES %############################################################################### %=============================================================================== % convert_conditions_list(+Cond_List, -Cond_List) %=============================================================================== convert_conditions_list([], []). convert_conditions_list([Cond|Conds], [NewCond|NewConds]) :- convert_condition(Cond, NewCond), !, convert_conditions_list(Conds, NewConds). %=============================================================================== % convert_condition(proved(+Formula_Exp, +Id_IntList, +Fs), % proved(+Formula_Exp, -NewId_IntList, [])) :- %=============================================================================== convert_condition(proved(Formula_Exp, A_HypId_IntList, F_HypId_IntList), proved(Formula_Exp, NewHypId_IntList, [])) :- lookup_fact_mappings(F_HypId_IntList, B_HypId_IntList), append(A_HypId_IntList, B_HypId_IntList, A_B_HypId_IntList), !, sort(A_B_HypId_IntList, NewHypId_IntList). %=============================================================================== % lookup_fact_mappings(+ExprList, -Id_IntList) %=============================================================================== lookup_fact_mappings([], []). lookup_fact_mappings([H_Exp|T_ExpList], [H_Int|T_IntList]) :- get_fact_from_rule(true, H_Exp, _, hyp(H_Int), [], _), !, lookup_fact_mappings(T_ExpList, T_IntList). lookup_fact_mappings([H_Exp|T_ExpList], [H_Int|T_IntList]) :- get_fact_from_rule(Fact_Exp, H_Exp, _, _, _, _), get_hyp(Fact_Exp, _, H_Int), !, lookup_fact_mappings(T_ExpList, T_IntList). %=============================================================================== %=============================================================================== % add_fact(+Fact_Atom) % % Add fact if not already known. %=============================================================================== add_fact(Fact_Atom) :- call(Fact_Atom), !. add_fact(Fact_Atom) :- assertz(Fact_Atom), !. %=============================================================================== %############################################################################### % Predicates for managing temporary state. %############################################################################### %=============================================================================== % Temporary state: used_unique_reference/2 %=============================================================================== :- dynamic(used_unique_reference/2). %=============================================================================== % get_unique_reference(+Label_Atom, -Id_Int) % % Returns unique reference for given label by incrementing the last % reference by one if a reference for the label exists; otherwise, 1. %=============================================================================== get_unique_reference(Label, N) :- retract(used_unique_reference(Label, M)), !, N is M+1, asserta(used_unique_reference(Label, N)), !. get_unique_reference(Label, 1) :- asserta(used_unique_reference(Label, 1)), !. %=============================================================================== % prune_all_unique_references % % Retract all used_unique_reference from the database. %=============================================================================== prune_all_unique_references:- retractall(used_unique_reference(_, _)), !. %=============================================================================== %=============================================================================== % Temporary state: get_fact_from_rule %=============================================================================== :- dynamic get_fact_from_rule/6. %=============================================================================== % Schema for get_fact_from_rule % get_fact_from_rule(Goal_Exp, RefId_Int, % From, Name_CharList, RuleMatchDetails, RuleSort) % % Goal_Exp - The goal satisfied. It seems that these are logically hypotheses % terms that are known to be true. % % RefId_Int - a unique reference number for the fact. % % From - Operation for which the rule was applied. May be: % conc % rule_phase_inference % fact(N) (a fact number) % hyp(N) (a hyp number) % % % Name_CharList - Name of applied rule of the form: file:rulename. % % RuleMatchDetails - How the rule was applied of the form: % inference(ProvedConditions) % rewrite(Formula, ProvedConditions) % % Where: ProvedConditions takes the form: % [proved(Condition, HypNumberList, FactNumberList), ...] % % RuleSort - the rule classification. May be: % user_inference_rule % inference_rule % %=============================================================================== %=============================================================================== % add_fact_from_rule(+Goal, +Ref, +From, +Name, +RuleMatchDetails, +RuleSort) % % Record derivation of Goal from user defined rules. %=============================================================================== add_fact_from_rule(Goal, Ref, From, Name, RuleMatchDetails, RuleSort):- assertz(get_fact_from_rule(Goal, Ref, From, Name, RuleMatchDetails, RuleSort)). %=============================================================================== % prune_fact_from_rule(?Goal, ?Ref, ?From, ?Name, ?RuleMatchDetails, ?RuleSort) % % Retract matching get_fact_from_rule. %=============================================================================== prune_fact_from_rule(Goal, Ref, From, Name, RuleMatchDetails, RuleSort):- retract(get_fact_from_rule(Goal, Ref, From, Name, RuleMatchDetails, RuleSort)). %=============================================================================== % prune_all_fact_from_rule % % Retract all get_fact_from_rule. %=============================================================================== prune_all_fact_from_rule :- retractall(get_fact_from_rule(_,_,_,_,_,_)). %=============================================================================== %=============================================================================== % Temporary state: get_candidate_fact %=============================================================================== :- dynamic get_candidate_fact/2. %=============================================================================== % add_candidate_fact(+N, +Formula) %=============================================================================== add_candidate_fact(N, Formula):- assertz(get_candidate_fact(N, Formula)). %=============================================================================== % prune_all_candidate_fact %=============================================================================== prune_all_candidate_fact:- retractall(get_candidate_fact(_, _)). %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__provenance.pro0000644000175000017500000001352311753202337020623 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides access to all information related to the provenance of the target % proof problem. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__provenance, [get_provenance_framework/1, add_provenance_framework/1, get_provenance_proof_file_kind/1, add_provenance_proof_file_kind/1, get_provenance_date_time/2, add_provenance_date_time/2, get_provenance_banner/1, add_provenance_banner/1, get_provenance_subprogram_identifier/1, add_provenance_subprogram_identifier/1, path_functions/0, save_data__provenance/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('ioutilities.pro', [write_terms_to_file/2]). :- use_module('data__data_files.pro', [must_get_datafiles_debug/2]). %############################################################################### % TYPES %############################################################################### :- add_type('Framework', [spark, pascal]). %############################################################################### % DATA %############################################################################### :- add_state(get_provenance_framework, get_provenance_framework('Framework')). :- dynamic(get_provenance_framework/1). :- add_state(get_provenance_proof_file_kind, get_provenance_proof_file_kind('ProofFileKind')). :- dynamic(get_provenance_proof_file_kind/1). :- add_state(get_provenance_date_time, get_provenance_date_time('Date_Atom', 'Time_Atom')). :- dynamic(get_provenance_date_time/2). :- add_state(get_provenance_banner, get_provenance_banner('Line_AtomList')). :- dynamic(get_provenance_banner/1). :- add_state(get_provenance_subprogram_identifier, get_provenance_subprogram_identifier('SubprogramIdentifier_Atom')). :- dynamic(get_provenance_subprogram_identifier/1). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== add_provenance_framework(Framework):- assert(get_provenance_framework(Framework)), !. add_provenance_proof_file_kind(ProofFileKind):- assert(get_provenance_proof_file_kind(ProofFileKind)), !. add_provenance_date_time(Date_Atom, Time_Atom):- assert(get_provenance_date_time(Date_Atom, Time_Atom)), !. add_provenance_banner(Line_AtomList):- assert(get_provenance_banner(Line_AtomList)), !. add_provenance_subprogram_identifier(SubprogramIdentifier_Atom):- assert(get_provenance_subprogram_identifier(SubprogramIdentifier_Atom)), !. %=============================================================================== %=============================================================================== % save_data__provenance. %=============================================================================== save_data__provenance:- must_get_datafiles_debug(data__provenance, DebugFile_Atom), write_terms_to_file(DebugFile_Atom, [data__provenance:get_provenance_framework/1, data__provenance:get_provenance_proof_file_kind/2, data__provenance:get_provenance_date_time/2, data__provenance:get_provenance_banner/1, data__provenance:get_provenance_subprogram_identifier/1]), !. %=============================================================================== %=============================================================================== % Refactor. %=============================================================================== path_functions:- get_provenance_proof_file_kind(path_functions), !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__switches.pro0000644000175000017500000004255211753202337020320 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides access to all configurable switches. These may be configured by % the user on the command line. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__switches, [get_source_of_switch/2, add_source_of_switch/2, get_switch_input_file/1, add_switch_input_file/1, get_switch_log/1, add_switch_log/1, get_switch_wrap/1, add_switch_wrap/1, get_switch_verbose/1, add_switch_verbose/1, get_switch_user_rules/1, add_switch_user_rules/1, get_switch_plain/1, add_switch_plain/1, get_switch_typecheck_only/1, add_switch_typecheck_only/1, get_switch_renum/1, add_switch_renum/1, get_switch_simplification/1, add_switch_simplification/1, get_switch_standardisation/1, add_switch_standardisation/1, get_switch_rule_substitution/1, add_switch_rule_substitution/1, get_switch_nocontradiction_hunt/1, add_switch_nocontradiction_hunt/1, get_switch_substitution_elimination/1, add_switch_substitution_elimination/1, get_switch_expression_reduction/1, add_switch_expression_reduction/1, get_switch_complexity_limit/1, add_switch_complexity_limit/1, get_switch_depth_limit/1, add_switch_depth_limit/1, get_switch_inference_limit/1, add_switch_inference_limit/1, get_switch_help/1, add_switch_help/1, get_switch_version/1, add_switch_version/1, get_switch_empty/1, add_switch_empty/1, get_switch_usage/1, add_switch_usage/1, get_switch_deadpaths/1, add_switch_deadpaths/1, get_switch_hyp_limit/1, add_switch_hyp_limit/1, prune_source_of_switch/1, prune_switch_log/0, prune_switch_wrap/0, prune_switch_verbose/0, prune_switch_user_rules/0, prune_switch_plain/0, prune_switch_typecheck_only/0, prune_switch_renum/0, prune_switch_simplification/0, prune_switch_standardisation/0, prune_switch_rule_substitution/0, prune_switch_nocontradiction_hunt/0, prune_switch_substitution_elimination/0, prune_switch_expression_reduction/0, prune_switch_complexity_limit/0, prune_switch_depth_limit/0, prune_switch_inference_limit/0, prune_switch_help/0, prune_switch_version/0, prune_switch_empty/0, prune_switch_usage/0, prune_switch_deadpaths/0, prune_switch_hyp_limit/0, plain/1, complexity_limit/1, inference_limit/1, depth_limit/1, substitution_elimination/1, substitution_elimination/2, simplification/1, simplification/2, contradiction_hunt/1, contradiction_hunt/2, expression_reduction/1, expression_reduction/2, standardisation/1, standardisation/2, rule_substitution/1, rule_substitution/2, renumber_conclusions/1, typecheck_only/1, nowrap_output_files/0, standardise_in_infer/1, typechecking_during_load/1]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). %############################################################################### % TYPES %############################################################################### :- add_type('SwitchSource', [auto_set, user_set]). :- add_type('OnOrOff', [on, off]). :- add_type('LogFile', [no_log_file, yes_log_file, provided_log_file('LogFile_Atom')]). :- add_type('SelectVCs', [all, none, exclude('IntList')]). :- add_type('UsageFile', [no_usage_file, yes_usage_file, provided_usage_file('UsageFile_Atom')]). :- add_type('Switch', [switch_input_file, switch_log, switch_wrap, switch_verbose, switch_user_rules, switch_plain, switch_typecheck_only, switch_renum, switch_simplification, switch_standardisation, switch_rule_substitution, switch_nocontradiction_hunt, switch_substitution_elimination, switch_expression_reduction, switch_complexity_limit, switch_depth_limit, switch_inference_limit, switch_help, switch_version, switch_usage, switch_empty, switch_deadpaths]). %############################################################################### % DATA %############################################################################### :- add_state(get_source_of_switch, get_source_of_switch('Switch', 'SwitchSource')). :- dynamic(get_source_of_switch/2). :- add_state(get_switch_input_file, get_switch_input_file('InputFile_Atom')). :- dynamic(get_switch_input_file/1). :- add_state(get_switch_log, get_switch_log('LogFile')). :- dynamic(get_switch_log/1). :- add_state(get_switch_wrap, get_switch_wrap('OnOrOff')). :- dynamic(get_switch_wrap/1). :- add_state(get_switch_verbose, get_switch_verbose('OnOrOff')). :- dynamic(get_switch_verbose/1). :- add_state(get_switch_user_rules, get_switch_user_rules('OnOrOff')). :- dynamic(get_switch_user_rules/1). :- add_state(get_switch_plain, get_switch_plain('OnOrOff')). :- dynamic(get_switch_plain/1). :- add_state(get_switch_typecheck_only, get_switch_typecheck_only('OnOrOff')). :- dynamic(get_switch_typecheck_only/1). :- add_state(get_switch_renum, get_switch_renum('OnOrOff')). :- dynamic(get_switch_renum/1). :- add_state(get_switch_simplification, get_switch_simplification('SelectVCs')). :- dynamic(get_switch_simplification/1). :- add_state(get_switch_standardisation, get_switch_standardisation('SelectVCs')). :- dynamic(get_switch_standardisation/1). :- add_state(get_switch_rule_substitution, get_switch_rule_substitution('SelectVCs')). :- dynamic(get_switch_rule_substitution/1). :- add_state(get_switch_nocontradiction_hunt, get_switch_nocontradiction_hunt('SelectVCs')). :- dynamic(get_switch_nocontradiction_hunt/1). :- add_state(get_switch_substitution_elimination, get_switch_substitution_elimination('SelectVCs')). :- dynamic(get_switch_substitution_elimination/1). :- add_state(get_switch_expression_reduction, get_switch_expression_reduction('SelectVCs')). :- dynamic(get_switch_expression_reduction/1). :- add_state(get_switch_complexity_limit, get_switch_complexity_limit('Int')). :- dynamic(get_switch_complexity_limit/1). :- add_state(get_switch_depth_limit, get_switch_depth_limit('Int')). :- dynamic(get_switch_depth_limit/1). :- add_state(get_switch_inference_limit, get_switch_inference_limit('Int')). :- dynamic(get_switch_inference_limit/1). :- add_state(get_switch_help, get_switch_help('OnOrOff')). :- dynamic(get_switch_help/1). :- add_state(get_switch_version, get_switch_version('OnOrOff')). :- dynamic(get_switch_version/1). :- add_state(get_switch_empty, get_switch_empty('OnOrOff')). :- dynamic(get_switch_empty/1). :- add_state(get_switch_usage, get_switch_usage('UsageFile')). :- dynamic(get_switch_usage/1). :- add_state(get_switch_deadpaths, get_switch_deadpaths('OnOrOff')). :- dynamic(get_switch_deadpaths/1). :- add_state(get_switch_hyp_limit, get_switch_hyp_limit('Int')). :- dynamic(get_switch_hyp_limit/1). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== add_source_of_switch(Switch, SwitchSource):- assert(get_source_of_switch(Switch, SwitchSource)), !. add_switch_input_file(InputFile_Atom):- assert(get_switch_input_file(InputFile_Atom)), !. add_switch_log(LogFile):- assert(get_switch_log(LogFile)), !. add_switch_wrap(OnOrOff):- assert(get_switch_wrap(OnOrOff)), !. add_switch_verbose(OnOrOff):- assert(get_switch_verbose(OnOrOff)), !. add_switch_user_rules(OnOrOff):- assert(get_switch_user_rules(OnOrOff)), !. add_switch_plain(OnOrOff):- assert(get_switch_plain(OnOrOff)), !. add_switch_typecheck_only(OnOrOff):- assert(get_switch_typecheck_only(OnOrOff)), !. add_switch_renum(OnOrOff):- assert(get_switch_renum(OnOrOff)), !. add_switch_simplification(SelectVCs):- assert(get_switch_simplification(SelectVCs)), !. add_switch_standardisation(SelectVCs):- assert(get_switch_standardisation(SelectVCs)), !. add_switch_rule_substitution(SelectVCs):- assert(get_switch_rule_substitution(SelectVCs)), !. add_switch_nocontradiction_hunt(SelectVCs):- assert(get_switch_nocontradiction_hunt(SelectVCs)), !. add_switch_substitution_elimination(SelectVCs):- assert(get_switch_substitution_elimination(SelectVCs)), !. add_switch_expression_reduction(SelectVCs):- assert(get_switch_expression_reduction(SelectVCs)), !. add_switch_complexity_limit(Int):- assert(get_switch_complexity_limit(Int)), !. add_switch_depth_limit(Int):- assert(get_switch_depth_limit(Int)), !. add_switch_inference_limit(Int):- assert(get_switch_inference_limit(Int)), !. add_switch_help(OnOrOff):- assert(get_switch_help(OnOrOff)), !. add_switch_version(OnOrOff):- assert(get_switch_version(OnOrOff)), !. add_switch_empty(OnOrOff):- assert(get_switch_empty(OnOrOff)), !. add_switch_usage(UsageFile):- assert(get_switch_usage(UsageFile)), !. add_switch_deadpaths(OnOrOff):- assert(get_switch_deadpaths(OnOrOff)), !. add_switch_hyp_limit(Limit_Int):- assert(get_switch_hyp_limit(Limit_Int)), !. %=============================================================================== %=============================================================================== % Prune. %=============================================================================== prune_source_of_switch(Switch):- retract(get_source_of_switch(Switch, _SwitchSource)), !. prune_switch_log:- retract(get_switch_log(_LogFile)), !. prune_switch_wrap:- retract(get_switch_wrap(_OnOrOff)), !. prune_switch_verbose:- retract(get_switch_verbose(_OnOrOff)), !. prune_switch_user_rules:- retract(get_switch_user_rules(_OnOrOff)), !. prune_switch_plain:- retract(get_switch_plain(_OnOrOff)), !. prune_switch_typecheck_only:- retract(get_switch_typecheck_only(_OnOrOff)), !. prune_switch_renum:- retract(get_switch_renum(_OnOrOff)), !. prune_switch_simplification:- retract(get_switch_simplification(_SelectVCs)), !. prune_switch_standardisation:- retract(get_switch_standardisation(_SelectVCs)), !. prune_switch_rule_substitution:- retract(get_switch_rule_substitution(_SelectVCs)), !. prune_switch_nocontradiction_hunt:- retract(get_switch_nocontradiction_hunt(_SelectVCs)), !. prune_switch_substitution_elimination:- retract(get_switch_substitution_elimination(_SelectVCs)), !. prune_switch_expression_reduction:- retract(get_switch_expression_reduction(_SelectVCs)), !. prune_switch_complexity_limit:- retract(get_switch_complexity_limit(_Int)), !. prune_switch_depth_limit:- retract(get_switch_depth_limit(_Int)), !. prune_switch_inference_limit:- retract(get_switch_inference_limit(_Int)), !. prune_switch_help:- retract(get_switch_help(_OnOrOff)), !. prune_switch_version:- retract(get_switch_version(_OnOrOff)), !. prune_switch_empty:- retract(get_switch_empty(_OnOrOff)), !. prune_switch_usage:- retract(get_switch_usage(_SwitchFile)), !. prune_switch_deadpaths:- retract(get_switch_deadpaths(_OnOrOff)), !. prune_switch_hyp_limit:- retract(get_switch_hyp_limit(_Limit)), !. %=============================================================================== %=============================================================================== % Refactor. %=============================================================================== plain(STATUS):- get_switch_plain(STATUS). complexity_limit(VALUE):- get_switch_complexity_limit(VALUE). inference_limit(VALUE):- get_switch_inference_limit(VALUE). depth_limit(VALUE):- get_switch_depth_limit(VALUE). substitution_elimination(on):- get_switch_substitution_elimination(all). substitution_elimination(VC, off):- get_switch_substitution_elimination(exclude(IntList)), member(VC,IntList). simplification(on):- get_switch_simplification(all). simplification(VC, off):- get_switch_simplification(exclude(IntList)), member(VC,IntList). contradiction_hunt(off):- get_switch_nocontradiction_hunt(all). contradiction_hunt(VC, off):- get_switch_nocontradiction_hunt(exclude(IntList)), member(VC,IntList). expression_reduction(on):- get_switch_expression_reduction(all). expression_reduction(VC, off):- get_switch_expression_reduction(exclude(IntList)), member(VC,IntList). standardisation(on):- get_switch_standardisation(all). standardisation(VC, off):- get_switch_standardisation(exclude(IntList)), member(VC,IntList). rule_substitution(on):- get_switch_rule_substitution(all). rule_substitution(VC, off):- get_switch_rule_substitution(exclude(IntList)), member(VC,IntList). renumber_conclusions(STATUS):- get_switch_renum(STATUS). typecheck_only(STATUS):- get_switch_typecheck_only(STATUS). nowrap_output_files:- get_switch_wrap(off). standardise_in_infer(off). typechecking_during_load(on). %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/load__rules.pro0000644000175000017500000006541311753202337017630 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Loads all rules for this session. %############################################################################### %############################################################################### %MODULE %############################################################################### :- module(load__rules, [load_rules/0]). %############################################################################### %DEPENDENCIES %############################################################################### :- use_module('data__declarations.pro', [find_core_type/2]). :- use_module('data__data_files.pro', [get_datafiles_rule/1, get_datafiles_global_user_rule/1, get_datafiles_local_user_rule/1]). :- use_module('data__rules.pro', [add_rule/8, get_rule/8, prune_rule/8]). :- use_module('data__switches.pro', [get_switch_user_rules/1]). :- use_module('parseutilities.pro', [inside_selected_character_class/2, parse_atom/5, parse_atom_silent/4]). :- use_module('simplifier_ioutilities.pro', [convert_file_for_display/2]). :- use_module('data__formats.pro', [add_type/2]). :- use_module('ioutilities.pro', [show_error/2]). %############################################################################### %TYPES %############################################################################### :- add_type('RuleStatus', [accepted, rejected]). %############################################################################### %DATA %############################################################################### %############################################################################### %PREDICATES %############################################################################### %=============================================================================== % load_rules. %------------------------------------------------------------------------------- % Load the rules for this session. The rules are retrieved from the % available rule files. %=============================================================================== load_rules :- process_standard_rulefile, fail. load_rules :- get_switch_user_rules(on), user_rulefile_name(RuleFile_Atom), convert_file_for_display(RuleFile_Atom, DisplayRuleFile_Atom), write('Reading '), write(DisplayRuleFile_Atom), write(' (for user-defined proof rules)'), nl, user:assert_log_fact(read_in_user_rule_file, DisplayRuleFile_Atom), process_rulefile(rlu, RuleFile_Atom), fail. load_rules :- restructure_rules, !. %------------------------------------------------------------------------------- user_rulefile_name(FILE):- get_datafiles_global_user_rule(FILE). user_rulefile_name(FILE):- get_datafiles_local_user_rule(FILE). %------------------------------------------------------------------------------- process_standard_rulefile :- get_datafiles_rule(RuleFile_Atom), !, see(RuleFile_Atom), repeat, read(RULE), process_rule(RULE, RuleFile_Atom), /* UNTIL */ RULE = end_of_file, !, seen. %------------------------------------------------------------------------------- process_rule(RuleName_Atom: LHS_Exp may_be_replaced_by RHS_Exp, RuleFile_Atom) :- ground(LHS_Exp), ground(RHS_Exp), user:simplify(RHS_Exp, NewRHS_Exp), add_rule(RuleFile_Atom, RuleName_Atom, rls, hint_rewrite_one_direction, equation, [LHS_Exp], [NewRHS_Exp], []), !. process_rule(RuleName_Atom: LHS_Exp may_be_replaced_by RHS_Exp if Condition_ExpList, RuleFile_Atom) :- add_rule(RuleFile_Atom, RuleName_Atom, rls, hint_rewrite_one_direction, equation, [LHS_Exp], [RHS_Exp], Condition_ExpList), !. process_rule(RuleName_Atom: LHS_Exp may_be_replaced_by RHS_Exp, RuleFile_Atom) :- add_rule(RuleFile_Atom, RuleName_Atom, rls, hint_rewrite_one_direction, equation, [LHS_Exp], [RHS_Exp], []), !. process_rule(RuleName_Atom: RHS_Exp may_be_deduced, RuleFile_Atom) :- nonvar(RHS_Exp), add_rule(RuleFile_Atom, RuleName_Atom, rls, hint_direct_introduce, implication, [], [RHS_Exp], []), !. process_rule(RuleName_Atom: RHS_Exp may_be_deduced_from LHS_ExpList, RuleFile_Atom) :- add_rule(RuleFile_Atom, RuleName_Atom, rls, hint_conditional_introduce, implication, LHS_ExpList, [RHS_Exp], []), !. process_rule(_X, _File) :- !. %=============================================================================== %process_rulefile(+RuleSource, +RuleFile_Atom). %------------------------------------------------------------------------------- % Retrieve rules from a rule file. %=============================================================================== process_rulefile(RuleSource, RuleFile_Atom) :- open(RuleFile_Atom, read, RuleFile_Stream), process_rulefile_x(RuleSource, RuleFile_Atom, RuleFile_Stream), close(RuleFile_Stream), !. %------------------------------------------------------------------------------- process_rulefile_x(RuleSource, RuleFile_Atom, RuleFile_Stream):- repeat, on_exception(Exception, read(RuleFile_Stream, CandidateRule_Any), user_rule_syntax_error(Exception, RuleFile_Atom)), process_candidate_rule(RuleSource, RuleFile_Atom, CandidateRule_Any), !. %------------------------------------------------------------------------------- % Finish at end_of_file. process_candidate_rule(_RuleSource, _RuleFile_Atom, CandidateRule_Any) :- CandidateRule_Any == end_of_file, !. % Process and backtrack if not end of file. process_candidate_rule(RuleSource, RuleFile_Atom, CandidateRule_Any) :- process_candidate_rule_x(RuleSource, RuleFile_Atom, CandidateRule_Any), fail. %------------------------------------------------------------------------------- process_candidate_rule_x(RuleSource, RuleFile_Atom, CandidateRule_Any) :- check_candidate_rule(RuleFile_Atom, CandidateRule_Any), store_rule(RuleSource, RuleFile_Atom, CandidateRule_Any), !. %------------------------------------------------------------------------------- store_rule(RuleSource, RuleFile_Atom, (RuleId: Rule_Exp)):- % Extract rule components. extract_items_from_rule_exp(Rule_Exp, RuleHeuristic, RuleLogic, LHS_ExpList, RHS_ExpList, Condition_ExpList), add_rule(RuleFile_Atom, RuleId, RuleSource, RuleHeuristic, RuleLogic, LHS_ExpList, RHS_ExpList, Condition_ExpList), !. %------------------------------------------------------------------------------- user_rule_syntax_error(Exception, File) :- Exception = error(_, syntax_error(_,LineNumber,Message,_,_)), atom_concat(Message, ' on line ', Message2), number_codes(LineNumber, LineNumberCodes), atom_codes(LineNumberAtom, LineNumberCodes), atom_concat(Message2, LineNumberAtom, Message3), rulefile_complain('Prolog syntax error', Message3, File), fail. %=============================================================================== %check_candidate_rule(+RuleFile_Atom, +WholeRule_Any). %------------------------------------------------------------------------------- % Check purely for legal rule syntax (not semantics). % Checks are: % - Rule is not just a variable. % - Rule is of the form "Name: Rule", % - The Name is either an atom, or of the form Atom(Integer), % - The atomic rulename starts with a lowercase letter, and consists of % lowercase letters, digits and underscores, % - The integer argument in the name, if present, is non-negative, % - The Rule part is one of: % * Predicate may_be_deduced[_from Conditions] % * OldExpr may_be_replaced_by NewExpr [if Conditions] % * OldExpr & NewExpr are_interchangeable [if Conditions], % - The Conditions part, if present, is a well-formed list, % - Any goal(Immediate) elements of the Conditions list has a Immediate part % which is one of: % * (X,Y) ["and"] % * (X;Y) ["or"] % * integer(X) % * intexp(X) % * checktype(X,Y) % * simplify(X,Y), % - The rule Name is unique within the given File. % % If any of these conditions do not hold, the predicate fails. As errors % are encountered they are logged and displayed on screen. %=============================================================================== check_candidate_rule(RuleFile_Atom, WholeRule_Any) :- check_rule_structure(RuleFile_Atom, WholeRule_Any, RuleStatus), check_candidate_rule_x(RuleStatus), !. %------------------------------------------------------------------------------- % Succeed if rule was accepted. check_candidate_rule_x(accepted):- !. % Fail if rule was not accepted. check_candidate_rule_x(rejected):- !, fail. %------------------------------------------------------------------------------- % It is an error to only have a prolog variable. check_rule_structure(RuleFile_Atom, WholeRule_Any, rejected):- var(WholeRule_Any), rulefile_complain('Invalid rule (Prolog wildcard) in rulefile', [], RuleFile_Atom), !. % Rule is to be of form: Name: Rule check_rule_structure(RuleFile_Atom, (RuleId: Rule_Exp), RuleStatus):- check_rule_id(RuleFile_Atom, RuleId, One_RuleStatus), check_rule_exp(RuleFile_Atom, RuleId, Rule_Exp, Two_RuleStatus), determine_overall_rule_status([One_RuleStatus, Two_RuleStatus], RuleStatus), !. % From above, rule is off the wrong structure. check_rule_structure(RuleFile_Atom, WholeRule_Any, rejected):- rulefile_complain('Illegal rule syntax in rulefile', WholeRule_Any, RuleFile_Atom), !. %=============================================================================== %check_rule_id(+RuleFile_Atom, +RuleId, -RuleStatus). %------------------------------------------------------------------------------- % Checks the rule identifier, storing and reporting any errors, and setting % RuleStatus accordingly. %=============================================================================== % It is an error to only have a prolog variable. check_rule_id(RuleFile_Atom, RuleId, rejected):- var(RuleId), rulefile_complain('Invalid rulename (Prolog wildcard) in rule', [], RuleFile_Atom), !. % Rule name is of form: name. check_rule_id(RuleFile_Atom, RuleId, RuleStatus):- functor(RuleId, Base_Atom, 0), check_base(RuleFile_Atom, Base_Atom, One_RuleStatus), check_unique(RuleFile_Atom, Base_Atom, Two_RuleStatus), determine_overall_rule_status([One_RuleStatus, Two_RuleStatus], RuleStatus), !. % Rule name is of form: name(10). check_rule_id(RuleFile_Atom, RuleId, RuleStatus):- functor(RuleId, Base_Atom, 1), arg(1, RuleId, Sub_Int), check_base(RuleFile_Atom, Base_Atom, One_RuleStatus), check_sub(RuleFile_Atom, Base_Atom, Sub_Int, Two_RuleStatus), check_unique(RuleFile_Atom, RuleId, Three_RuleStatus), determine_overall_rule_status([One_RuleStatus, Two_RuleStatus, Three_RuleStatus], RuleStatus), !. % From above, ruleid is off the wrong structure. check_rule_id(RuleFile_Atom, RuleId, rejected):- rulefile_complain('Illegal rulename syntax in rulefile', RuleId, RuleFile_Atom), !. %=============================================================================== % check_base(+RuleFile_Atom, +Base_Atom, -RuleStatus). %------------------------------------------------------------------------------- % Checks the rule base name, storing and reporting any errors, and setting % RuleStatus accordingly. %=============================================================================== % Make the parse_legal_base_name call visible to the spxref tool. :- public parse_legal_base_name/2. % Must be atom and in particular form. check_base(_RuleFile_Atom, Base_Atom, accepted):- atom(Base_Atom), atom_chars(Base_Atom, BaseName_CharList), phrase(parse_legal_base_name, BaseName_CharList), !. % From above, base rule name is flawed. % Specific error message for not starting with a lower case character. check_base(RuleFile_Atom, Base_Atom, rejected):- atom(Base_Atom), atom_chars(Base_Atom, [BaseName_CharList | _T_BaseName_CharList]), \+ inside_selected_character_class([lower_case_char], BaseName_CharList), rulefile_complain('Illegal rulename (does not start with a lowercase letter)', Base_Atom, RuleFile_Atom), !. % From above, base rule name is flawed. % Use general error message. check_base(RuleFile_Atom, Base_Atom, rejected):- rulefile_complain('Illegal rulename (only lowercase letters, digits and underscores allowed)', Base_Atom, RuleFile_Atom), !. %------------------------------------------------------------------------------- parse_legal_base_name --> % Must lead with a lower case character. parse_atom_silent([lower_case_char], oneormore), % May follow with lower case characters, numbers or underscores. parse_atom_silent([lower_case_char, numeric, under_score], zeroormore), !. %=============================================================================== % check_sub(+RuleFile_Atom, +Base_Atom, +Sub_Int, -RuleStatus). %------------------------------------------------------------------------------- % Checks the rule sub-part, storing and reporting any errors, and setting % RuleStatus accordingly. %=============================================================================== % Must be an integer, greater than or equal to 0. check_sub(_RuleFile_Atom, _Base_Atom, Sub_Int, accepted):- integer(Sub_Int), Sub_Int>= 0, !. % From above, is error. check_sub(RuleFile_Atom, Base_Atom, Sub_Int, rejected):- functor(RuleId, Base_Atom, 1), arg(1, RuleId, Sub_Int), rulefile_complain('Illegal rule number in rule family', RuleId, RuleFile_Atom), !. %=============================================================================== % check_unique(+RuleFile_Atom, +RuleId, -RuleStatus). %------------------------------------------------------------------------------- % Checks that the rule is unique, storing and reporting any errors, and % setting RuleStatus accordingly. %=============================================================================== check_unique(RuleFile_Atom, RuleId, accepted):- \+ get_rule(RuleFile_Atom, RuleId, _RuleSource, _RuleHeuristic, _RuleLogic, _LHS_ExpList, _RHS_ExpList, _Condition_ExpList), !. check_unique(RuleFile_Atom, RuleId, rejected):- rulefile_complain('Illegal redeclaration of rule within rulefile', RuleId, RuleFile_Atom), !. %=============================================================================== % check_rule_exp(+RuleFile_Atom, +Rule_Exp, -RuleStatus). %------------------------------------------------------------------------------- % Checks the rule expression is well formed, storing and reporting any % errors, and setting RuleStatus accordingly. %=============================================================================== % It is an error to only have a prolog variable. check_rule_exp(RuleFile_Atom, RuleId, Rule_Exp, rejected):- var(Rule_Exp), rulefile_complain('Illegal rule body (Prolog wildcard)', RuleId, RuleFile_Atom), !. check_rule_exp(RuleFile_Atom, RuleId, Rule_Exp, RuleStatus):- extract_items_from_rule_exp(Rule_Exp, _RuleHeuristic, RuleLogic, LHS_ExpList, _RHS_ExpList, Condition_ExpList), check_condition_list(RuleFile_Atom, RuleId, Condition_ExpList, One_RuleStatus), check_implication_conditions(RuleFile_Atom, RuleId, RuleLogic, LHS_ExpList, Two_RuleStatus), determine_overall_rule_status([One_RuleStatus, Two_RuleStatus], RuleStatus), !. % From above, error encountered. check_rule_exp(RuleFile_Atom, RuleId, _Rule_Exp, rejected) :- rulefile_complain('Illegal rule body (malformed expression) in rulefile', RuleId, RuleFile_Atom), !. %------------------------------------------------------------------------------- % If implication, do check left hand side as if conditions. check_implication_conditions(RuleFile_Atom, RuleId, implication, LHS_ExpList, RuleStatus):- check_condition_list(RuleFile_Atom, RuleId, LHS_ExpList, RuleStatus), !. % From above is not implication. check_implication_conditions(_RuleFile_Atom, _RuleId, _RuleLogic, _LHS_ExpList, accepted):- !. %------------------------------------------------------------------------------- check_condition_list(_RuleFile_Atom, _RuleId, Condition_ExpList, accepted):- % Must not be just a variable. \+ var(Condition_ExpList), legal_conditions(Condition_ExpList), !. % From above, is in error. check_condition_list(RuleFile_Atom, RuleId, _Condition_ExpList, rejected):- rulefile_complain('Illegal conditions list in rule', RuleId, RuleFile_Atom), !. %------------------------------------------------------------------------------- % All conditions valid. legal_conditions([]):- !. legal_conditions([H_Condition_Exp | T_Condition_ExpList]):- legal_condition(H_Condition_Exp), legal_conditions(T_Condition_ExpList). %------------------------------------------------------------------------------- % Accept if just a variable. legal_condition(Condition_Exp) :- var(Condition_Exp), !. % If an executable goal, must take one of the legal forms. legal_condition(goal(Goal_Exp)) :- !, check_legal_goal_exp(Goal_Exp), !. % If none of the above, is taken to be a valid expression. legal_condition(_):- !. %------------------------------------------------------------------------------- % Must not be just a variable. check_legal_goal_exp(Goal_Exp):- var(Goal_Exp), !, fail. % Legal terminal forms. check_legal_goal_exp(integer(_Any)). check_legal_goal_exp(intexp(_Any)). check_legal_goal_exp(checktype(_X_Any, _Y_Any)). check_legal_goal_exp(simplify(_X_Any,_Y_Any)). % Legal composite forms. check_legal_goal_exp((LHS_Exp,RHS_Exp)) :- !, check_legal_goal_exp(LHS_Exp), check_legal_goal_exp(RHS_Exp). check_legal_goal_exp((LHS_Exp;RHS_Exp)) :- !, check_legal_goal_exp(LHS_Exp), check_legal_goal_exp(RHS_Exp). %=============================================================================== % determine_overall_rule_status(+RuleStatus_List, -RuleStatus). %------------------------------------------------------------------------------- % Combines a collection of rule status indicators as a single status. This % mechanism allows for the rule checking to be proceed without many % confusing failure points and potential backtracking. %=============================================================================== % No occurrence of rejected as accepted. determine_overall_rule_status(RuleStatus_List, accepted):- \+ member(rejected, RuleStatus_List), !. % From above, rejected is present at least once. determine_overall_rule_status(_RuleStatus_List, rejected):- !. %=============================================================================== % rulefile_complain(+Complaint, +Argument, +File). %------------------------------------------------------------------------------- % Report an error in a rulefile. %=============================================================================== rulefile_complain(Complaint, Argument, File) :- convert_file_for_display(File, DisplayFile), print('!!! Erroneous entry in rulefile '), print(DisplayFile), nl, print('!!! '), print(Complaint), ( Argument = [] ; nl, print('!!! Involving: '), print(Argument) ), nl, nl, /* Record detection of syntax error in log file. */ user:assert_log_fact(rule_syntax_error(Complaint, Argument, File), []), !. %=============================================================================== % extract_items_from_rule_exp(+Rule_Exp, % -RuleHeuristic, % -RuleLogic, % -LHS_ExpList, % -RHS_ExpList, % -Condition_ExpList). %------------------------------------------------------------------------------- % Given a rule expression Rule_Exp, extract its various attributes. Will % fail if the rue expression is flawed. %=============================================================================== extract_items_from_rule_exp(RHS_Exp may_be_deduced, hint_direct_introduce, implication, [], [RHS_Exp], []):- !. extract_items_from_rule_exp(RHS_Exp may_be_deduced_from LHS_ExpList, hint_conditional_introduce, implication, LHS_ExpList, [RHS_Exp], []):- !. extract_items_from_rule_exp(LHS_Exp may_be_replaced_by RHS_Exp, hint_rewrite_one_direction, equation, [LHS_Exp], [RHS_Exp], []):- !. extract_items_from_rule_exp(LHS_Exp may_be_replaced_by RHS_Exp if Condition_ExpList, hint_rewrite_one_direction, equation, [LHS_Exp], [RHS_Exp], Condition_ExpList):- !. extract_items_from_rule_exp(LHS_Exp & RHS_Exp are_interchangeable, hint_rewrite_both_directions, equation, [LHS_Exp], [RHS_Exp], []):- !. extract_items_from_rule_exp(LHS_Exp & RHS_Exp are_interchangeable if Condition_ExpList, hint_rewrite_both_directions, equation, [LHS_Exp], [RHS_Exp], Condition_ExpList):- !. %=============================================================================== % restructure_rules. %------------------------------------------------------------------------------- % To ease the applicability of rules, they are restructured into a % standardised form. %=============================================================================== restructure_rules :- get_rule(RuleFile_Atom, RuleId, RuleSource, RuleHeuristic, RuleLogic, LHS_ExpList, RHS_ExpList, Condition_ExpList), prune_rule(RuleFile_Atom, RuleId, RuleSource, RuleHeuristic, RuleLogic, LHS_ExpList, RHS_ExpList, Condition_ExpList), restructure_rule_exp_list(LHS_ExpList, RestructuredLHS_ExpList), restructure_rule_exp_list(RHS_ExpList, RestructuredRHS_ExpList), restructure_rule_exp_list(Condition_ExpList, RestructuredCondition_ExpList), add_rule(RuleFile_Atom, RuleId, RuleSource, RuleHeuristic, RuleLogic, RestructuredLHS_ExpList, RestructuredRHS_ExpList, RestructuredCondition_ExpList), fail. restructure_rules :- !. %------------------------------------------------------------------------------- restructure_rule_exp_list([], []):- !. restructure_rule_exp_list([H_In_Exp | T_In_ExpList], [H_Out_Exp | T_Out_ExpList]):- restructure_rule_exp(H_In_Exp, H_Out_Exp), restructure_rule_exp_list(T_In_ExpList, T_Out_ExpList). %------------------------------------------------------------------------------- % Preserve meta variables. restructure_rule_exp(In_m_Out_m_Exp, In_m_Out_m_Exp):- var(In_m_Out_m_Exp), !. % Adopt core type for quantified variables. As VCs are loaded, they are % processed by restructure_formula/2, replacing quantified variable types % with their core type. Here, the same normalization is applied to rules, % to increase the likelihood of the two forms matching. % Adopt core type for universally quantified variables. restructure_rule_exp(for_all(V:T, P), for_all(V:CT, NewP)) :- !, find_core_type(T, CT), restructure_rule_exp(P, NewP), !. % Adopt core type for extensionally quantified variables. restructure_rule_exp(for_some(V:T, P), for_some(V:CT, NewP)) :- !, find_core_type(T, CT), restructure_rule_exp(P, NewP), !. % From above, no changes. Recursive over arguments. restructure_rule_exp(In_Exp, Out_Exp):- In_Exp=..[Functor_Atom | Args_ExpList], restructure_rule_exp_list(Args_ExpList, NewArgs_ExpList), Out_Exp=..[Functor_Atom | NewArgs_ExpList], !. %############################################################################### %END-OF-FILE spark-2012.0.deb/simplifier/opdeclar.pro0000644000175000017500000001322011753202337017116 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Support to declare operator precedences and fixivity. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(opdeclar, [declare_operators/0, hide_operators/0]). %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DYNAMICS %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % declare_operators. %------------------------------------------------------------------------------- % Declare operator precedence and fixivity as used by the spark tools. %=============================================================================== declare_operators :- %-------------------- op(350, xfx, ** ), % INITIAL VALUE POSTFIX op(350, xf, ~), % Attributes op(350, yfx, #), %-------------------- %-------------------- op(375, fx, + ), op(375, fx, - ), %-------------------- %-------------------- op(400, yfx, * ), op(400, yfx, / ), op(400, yfx, div), op(400, yfx, mod), op(400, yfx, rem), op(400, yfx, @ ), op(400, yfx, /\ ), %-------------------- %-------------------- op(500, yfx, + ), op(500, yfx, - ), op(500, yfx, \/ ), op(500, yfx, \ ), %-------------------- %-------------------- op(700, yfx, <> ), op(700, yfx, <= ), op(700, yfx, in ), op(700, yfx, not_in ), op(700, yfx, subset_of ), op(700, yfx, strict_subset_of ), % Signed 'is' and special case 'exp_iss' for ** operator op(700, xfx, iss), op(700, xfx, exp_iss), % Arithmetic op(700, yfx, less_than), % Sets op(700, fy, set), %-------------------- %-------------------- op(900, fy, not ), %-------------------- %-------------------- op(925, yfx, and ), %-------------------- %-------------------- op(950, yfx, or ), % Guesstimated precedence! op(950, yfx, xor ), %-------------------- %-------------------- op(975, yfx, -> ), op(975, yfx, <-> ), %-------------------- %-------------------- % for range notation op(990, yfx, '..'), % Used for "unit function" op(990, fx, unit), %-------------------- %-------------------- op(992, yfx, requires), %-------------------- %-------------------- % Used in array aggregates op(995, yfx, ':='), %-------------------- %-------------------- % Used in array aggregates op(997, yfx, &), op(997, fx, rule_family), %-------------------- %-------------------- op(998, yfx, may_be_deduced_from), op(998, yfx, may_be_replaced_by), op(998, xf, are_interchangeable), op(998, yfx, if), op(998, xf, may_be_deduced), %-------------------- %-------------------- % Used in quantification op(999, xfy, :), %-------------------- !. %=============================================================================== % hide_operators. %------------------------------------------------------------------------------- % After all dynamic predicates have been introduced, we need to disable % "dynamic" as an operator in case a user has an fdl entity called % "dynamic". Same goes for the other predefined prolog operators that might % look like fdl identifiers. See the sicstus manual section "Standard % Operators". %=============================================================================== hide_operators:- op(0, fx, mode), op(0, fx, public), op(0, fx, dynamic), op(0, fx, multifile), op(0, fx, volatile), op(0, fx, block), op(0, fx, meta_predicate), op(0, fx, discontiguous), op(0, fx, initialization), op(0, fx, spy), op(0, fx, nospy), !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__conc.pro0000644000175000017500000001527611753202337017414 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Stores, retrieves and replaces conclusions into the database. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__conc, [add_conc/3, add_conc_min_id/4, add_conc_with_id/3, get_conc/3, replace_conc/4, prune_conc/3, prune_all_concs/3]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('ioutilities.pro', [show_error/2]). %############################################################################### % TYPES %############################################################################### :- add_type('ConclusionType', [x, ss, [s, 'TYPE']]). %############################################################################### % DATA %############################################################################### :- add_state(get_conc, get_conc('Conclusion_Term', 'ConclusionType', 'Id_Int')). :- dynamic(get_conc/3). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % add_conc(+Conc_Term, +ConcType, -Id_Int) % % The conclusion Conc_Term of type ConcType is added to the conclusion % database and Id_Int is bound to the conclusion's identifier. %=============================================================================== add_conc(Conc_Term, ConcType, Id_Int) :- next_conc_id(Id_Int), assertz(get_conc(Conc_Term, ConcType, Id_Int)), !. %=============================================================================== %=============================================================================== % add_conc_with_id(+Conc_Term, +ConcType, +Id_Int) % % Adds a conclusion into the database where all arguments are bounded. %=============================================================================== add_conc_with_id(Conc_Term, ConcType, Id_Int) :- assertz(get_conc(Conc_Term, ConcType, Id_Int)). %=============================================================================== %=============================================================================== % replace_conc(+OldConc_Term, +ConcType, +ConcId_Int, +ConcHyp_Term) % % Replace a conclusion in the database with a new conclusion. % % The predicate fails if OldConc_Term, ConcType, ConcId_Ind is not % in the database. %=============================================================================== replace_conc(OldConc_Term, ConcType, ConcId_Int, NewConc_Term):- prune_conc(OldConc_Term, ConcType, ConcId_Int), add_conc_with_id(NewConc_Term, ConcType, ConcId_Int). %=============================================================================== %=============================================================================== % prune_conc(?Conc_Term, ?ConcType, ?ConcId_Int) % % Predicate retracts a conclusion from the database and fails if no conclusion % is deleted. %=============================================================================== prune_conc(Conc_Term, ConcType, ConcId_Int):- retract(get_conc(Conc_Term, ConcType, ConcId_Int)). %=============================================================================== %=============================================================================== % prune_all_concs(?Conc_Term, ?ConcType, ?ConcId_Int) % % Retracts all conclusions from the database that matche the % bounded arguments. % % Predicate always succeeds independent of whether anything is deleted. %=============================================================================== prune_all_concs(Conc_Term, ConcType, ConcId_Int):- retractall(get_conc(Conc_Term, ConcType, ConcId_Int)). %=============================================================================== %=============================================================================== % next_conc_id(?FreeId_Int) : private % % Finds the next free conclusion identifier. %=============================================================================== next_conc_id(FreeId_Int):- next_conc_id_with_min_x(1, FreeId_Int), !. %=============================================================================== % next_conc_id_with_min_x(+MinId_Int, ?FreeId_Int) : private % % Secondary predicate to next_conc_id_with_min and next_conc_id. % The predicate assume that its minimum identifier is greater than or equal % to 1. %=============================================================================== % % If there are no conclusions in the database and MinId_Int = 1 then % FreeId_Int is 1, that is: % % next_conc_id_with_min_x(1, 1):- % !. % next_conc_id_with_min_x(MinId_Int, FreeId_Int) :- get_conc(_, _, MinId_Int), !, NextId_Int is MinId_Int + 1, next_conc_id_with_min_x(NextId_Int, FreeId_Int), !. next_conc_id_with_min_x(FreeId_Int, FreeId_Int) :- % No conc with identifier FreeId_Int - see above. !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__pfs.pro0000644000175000017500000001503011753202337017246 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides access to all information related to pfs. This information will % be retrieved from the provided pfs file. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__pfs, [get_pfs_statement/2, add_pfs_statement/2, get_pfs_successor_statement/3, add_pfs_successor_statement/3, get_pfs_pf/4, add_pfs_pf/4, get_pfs_traversal_condition/3, add_pfs_traversal_condition/3, get_pfs_action/2, add_pfs_action/2, save_data__pfs/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('ioutilities.pro', [write_terms_to_file/2]). :- use_module('data__data_files.pro', [must_get_datafiles_debug/2]). %############################################################################### % TYPES %############################################################################### % Statement: start 2 successor(s) % Statement: line 48 2 successor(s) :- add_type('PFTraceStatement', [statement('PFPositionFrom', 'Successors_Int')]). :- add_type('PFPositionFrom', [start, line('Int')]). % Successor statement: line 32. % Successor statement: finish. :- add_type('PFTraceSuccessorStatement', [successor_statement('PFPositionTo')]). :- add_type('PFPositionTo', [finish, line('Int')]). %############################################################################### % DATA %############################################################################### % A pfs file has a number of statement lines. % Each statement has a number of successors. % Each successor has a number of path functions. % Each path function has a number of traversal conditions and an action. :- add_state(get_pfs_statement, get_pfs_statement('StatementId_Atom', 'PFTraceStatement')). :- dynamic(get_pfs_statement/2). :- add_state(get_pfs_successor_statement, get_pfs_successor_statement('SuccessorStatementId_Atom', 'PFTraceSuccessorStatement', 'ParentStatementId_Atom')). :- dynamic(get_pfs_successor_statement/3). :- add_state(get_pfs_pf, get_pfs_pf('PFId_Atom', 'Order_Int', 'Number_Int', 'ParentSuccessorStatementId_Atom')). :- dynamic(get_pfs_pf/4). :- add_state(get_pfs_traversal_condition, get_pfs_traversal_condition('Number_Int', 'TravCond_Term', 'ParentPFId_Atom')). :- dynamic(get_pfs_traversal_condition/3). :- add_state(get_pfs_action, get_pfs_action('Action_Term', 'ParentPFId_Atom')). :- dynamic(get_pfs_action/2). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== add_pfs_statement(StatementId_Atom, PFTraceStatement):- assert(get_pfs_statement(StatementId_Atom, PFTraceStatement)), !. add_pfs_successor_statement(SuccessorStatementId_Atom, PFTraceSuccessorStatement, ParentStatementId_Atom):- assert(get_pfs_successor_statement(SuccessorStatementId_Atom, PFTraceSuccessorStatement, ParentStatementId_Atom)), !. add_pfs_pf(PFId_Atom, Order_Int, Number_Int, ParentSuccessorStatementId_Atom):- assert(get_pfs_pf(PFId_Atom, Order_Int, Number_Int, ParentSuccessorStatementId_Atom)), !. add_pfs_traversal_condition(Number_Int, TravCond_Term, ParentPFId_Atom):- assert(get_pfs_traversal_condition(Number_Int, TravCond_Term, ParentPFId_Atom)), !. add_pfs_action(Action_Term, ParentPFId_Atom):- assert(get_pfs_action(Action_Term, ParentPFId_Atom)), !. %=============================================================================== %=============================================================================== % save_data_prf. %=============================================================================== save_data__prf:- must_get_datafiles_debug(data__pfs, DebugFile_Atom), write_terms_to_file(DebugFile_Atom, [data__pfs:get_pfs_statement/2, data__pfs:get_pfs_successor_statement/3, data__pfs:get_pfs_pf/4, data__pfs:get_pfs_traversal_condition/3, data__pfs:get_pfs_action/2]), !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/simplifier_ioutilities.pro0000644000175000017500000005217111753202337022123 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides input and output utilities that are specific to the Simplifier. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(simplifier_ioutilities, [display_header/1, display_help/1, display_banner/1, retrieve_declaration_file/1, retrieve_simplified_proof_file/1, retrieve_proof_file/1, retrieve_proof_file_kind/1, convert_file_for_display/2, convert_file_to_base_name/2]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('ioutilities.pro', [show_error/2, display_header_full/1, display_header_plain/1, throw_error/2]). :- use_module('newutilities.pro', [implode_separator_content_list/3, explode_separator_content_as_list/3]). :- use_module('data__system.pro', [get_system_start_date_time/2]). :- use_module('data__provenance.pro', [get_provenance_proof_file_kind/1, get_provenance_framework/1, get_provenance_banner/1, get_provenance_date_time/2, get_provenance_subprogram_identifier/1]). :- use_module('data__switches.pro', [get_switch_plain/1, get_switch_deadpaths/1]). :- use_module('../common/versioning/version.pro', [toolset_support_line1/1, toolset_support_line2/1, toolset_support_line3/1, toolset_support_line4/1]). :- use_module('data__data_files.pro', [get_datafiles_vcg/1, get_datafiles_dpc/1, get_datafiles_pfs/1, get_datafiles_fdl/1, get_datafiles_dec/1, get_datafiles_simplified_pfs/1, get_datafiles_simplified_vcg/1, get_datafiles_summary_dpc/1]). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % display_header(+Stream). %------------------------------------------------------------------------------- % Display the header for the Simplifier. For correct processing, the % command line arguments must have already been processed. %=============================================================================== % Is not in plain mode. display_header(Stream):- get_switch_plain(off), display_header_full(Stream), !. % Is in plain mode. display_header(Stream):- get_switch_plain(on), display_header_plain(Stream), !. %None of the above is an error. display_header(_Stream):- show_error('Could not calculate banner. Unexpected system configuration.', []). %=============================================================================== % display_help(Stream). %------------------------------------------------------------------------------- % Display help text. %=============================================================================== % Help for Simplifier display_help(Stream):- get_switch_deadpaths(off), !, display_header(Stream), % Get common support information. toolset_support_line1(SupportLine1_Atom), toolset_support_line2(SupportLine2_Atom), toolset_support_line3(SupportLine3_Atom), toolset_support_line4(SupportLine4_Atom), format(Stream, '~n',[]), format(Stream, 'Usage: spadesimp [options] Target_File~n', []), format(Stream, '~n', []), format(Stream, 'Target_File - Either a .vcg or .pfs file~n', []), format(Stream, '~n', []), format(Stream, 'All options may be abbreviated to the shortest unique prefix.~n', []), format(Stream, '~n', []), format(Stream, 'Standard options~n', []), format(Stream, '----------------~n', []), format(Stream, '-help - Display this help information.~n', []), format(Stream, '-version - Display version information.~n', []), format(Stream, '-nolog - Do not generate a simplification log file.~n', []), format(Stream, '-log=Log_File - Specify filename for the simplification log file.~n', []), format(Stream, '-nowrap - Do not line wrap output files.~n', []), format(Stream, '-verbose - Display attempted simplification strategies.~n', []), format(Stream, '-nouserrules - Do not use user rules.~n', []), format(Stream, '-plain - Adopt a plain output style (e.g. no dates or version numbers).~n', []), format(Stream, '-typecheck - Only typecheck the input files.~n', []), format(Stream, '-norenum - Do not renumber hypotheses and conclusions in siv files.~n', []), format(Stream, '~n', []), format(Stream, 'Adjust strategy options~n', []), format(Stream, '-----------------------~n', []), format(Stream, '-nosimplification=SELECT~n', []), format(Stream, '-nostandardisation=SELECT~n', []), format(Stream, '-norule_substitution=SELECT~n', []), format(Stream, '-nocontradiction_hunt=SELECT~n', []), format(Stream, '-nosubstitution_elimination=SELECT~n', []), format(Stream, '-noexpression_reduction=SELECT~n', []), format(Stream, 'SELECT=(RANGE {, RANGE})~n', []), format(Stream, 'RANGE=Vc_Number or From_Vc_Number-To_Vc_Number~n', []), format(Stream, '~n', []), format(Stream, 'Adjust limit options~n', []), format(Stream, '--------------------~n', []), format(Stream, '-complexity_limit=Limit (Limit in range 10 .. 200)~n', []), format(Stream, '-depth_limit=Limit (Limit in range 1 .. 10)~n', []), format(Stream, '-inference_limit=Limit (Limit in range 10 .. 400)~n', []), format(Stream, '~n', []), format(Stream, '~a~n', [SupportLine1_Atom]), format(Stream, '~a~n', [SupportLine2_Atom]), format(Stream, '~a~n', [SupportLine3_Atom]), format(Stream, '~a~n', [SupportLine4_Atom]), !. % Help for ZombieScope display_help(Stream):- get_switch_deadpaths(on), !, display_header(Stream), % Get common support information. toolset_support_line1(SupportLine1_Atom), toolset_support_line2(SupportLine2_Atom), toolset_support_line3(SupportLine3_Atom), toolset_support_line4(SupportLine4_Atom), format(Stream, '~n',[]), format(Stream, 'Usage: zombiescope [options] Target_File~n', []), format(Stream, '~n', []), format(Stream, 'Target_File - a .dpc file~n', []), format(Stream, '~n', []), format(Stream, 'All options may be abbreviated to the shortest unique prefix.~n', []), format(Stream, '~n', []), format(Stream, 'Standard options~n', []), format(Stream, '----------------~n', []), format(Stream, '-help - Display this help information.~n', []), format(Stream, '-version - Display version information.~n', []), format(Stream, '-nolog - Do not generate a ZombieScope log file.~n', []), format(Stream, '-log=Log_File - Specify filename for the ZombieScope log file.~n', []), format(Stream, '-nowrap - Do not line wrap output files.~n', []), format(Stream, '-plain - Adopt a plain output style (e.g. no dates or version numbers).~n', []), format(Stream, '-norenum - Do not renumber hypotheses and conclusions in sdp files.~n', []), format(Stream, '-hyp_limit=Limit - Specify the maximum number of hypotheses that will be analysed.~n', []), format(Stream, '~n', []), format(Stream, '~a~n', [SupportLine1_Atom]), format(Stream, '~a~n', [SupportLine2_Atom]), format(Stream, '~a~n', [SupportLine3_Atom]), format(Stream, '~a~n', [SupportLine4_Atom]), !. %=============================================================================== % display_banner(Stream). %------------------------------------------------------------------------------- % Display the banner for the Simplifier output. The banner is affected by % the previously collected system and provenance information. %=============================================================================== display_banner(Stream):- %Leading stars for banner. format(Stream, '*****************************************************************************~n', []), %Retrive and write out the leading banner. get_provenance_banner(Line_AtomList), implode_separator_content_list('\n', Line_AtomList, Line_Atom), format(Stream, Line_Atom, []), format(Stream, '~n', []), %Closing stars for banner. format(Stream, '*****************************************************************************~n', []), %Display some spaces. format(Stream, '~n', []), format(Stream, '~n', []), % Note that POGS expects the date and time details, if present, to % appear before the subprogram name. %Consider displaying time details. display_time_details(Stream), %Embed the standard header into the file. format(Stream, '\n', []), display_header(Stream), format(Stream, '\n', []), %Display subprogram identifier. display_subprogram_identifier(Stream), %Display some spaces. format(Stream, '\n', []), format(Stream, '\n', []), format(Stream, '\n', []), format(Stream, '\n', []), !. %------------------------------------------------------------------------------- % Is in plain mode. display_time_details(_Stream):- get_switch_plain(on), !. % Is not in plain mode. % But did not retrieve a date time from the provenance file. display_time_details(_Stream):- get_switch_plain(off), \+ get_provenance_date_time(_Date_Atom, _Time_Atom), !. display_time_details(Stream):- get_switch_plain(off), get_provenance_date_time(Date_Atom, Time_Atom), % Note POGS collects both the CREATED and SIMPLIFIED dates, and expects % the following format. % % Once all trailing and leading space has been deleted: % % The CREATED date must start on column: 9, and be 11 chars wide. % The CREATED time must start on column: 22, and be 8 chars wide. % % The SIMPLIFIED date must start on column: 43, and be 11 chars wide. % The SIMPLIFIED time must start on column: 56, and be 8 chars wide. % % The date format is like: 09-JAN-1980 % The time format is like: 01:59:01 % % Currently POGS compares the text of the CREATED date and time with % relevant portions from the VCG file. The SIMPLIFIED date and time is % semantically compared against the date embedded into the PLG file. get_system_start_date_time(NowDate_Atom, NowTime_Atom), display_time_details_x(Stream, Date_Atom, Time_Atom, NowDate_Atom, NowTime_Atom), !. %None of the above is an error. display_time_details(_Stream):- show_error('Could not determine date and time details for the simplified file banner.', []). display_time_details_x(Stream, Date_Atom, Time_Atom, NowDate_Atom, NowTime_Atom):- get_switch_deadpaths(off), format(Stream, 'CREATED ~a, ~a SIMPLIFIED ~a, ~a\n', [Date_Atom, Time_Atom, NowDate_Atom, NowTime_Atom]). display_time_details_x(Stream, Date_Atom, Time_Atom, NowDate_Atom, NowTime_Atom):- get_switch_deadpaths(on), format(Stream, 'CREATED ~a, ~a ZombieScope ~a, ~a\n', [Date_Atom, Time_Atom, NowDate_Atom, NowTime_Atom]). %------------------------------------------------------------------------------- % The subprogram identifier is displayed in all modes. display_subprogram_identifier(Stream):- get_provenance_subprogram_identifier(SubprogramIdentifier_Atom), format(Stream, '~a\n', [SubprogramIdentifier_Atom]), !. % If in pascal mode, it is acceptable to have no subprogram identifier. In % this case a blank line is written out. display_subprogram_identifier(Stream):- get_provenance_framework(pascal), format(Stream, '\n', []), !. % From above, expected subprogram identifier is not present. display_subprogram_identifier(_Stream):- show_error('Expected subprogram identifier is not present.\n', []). %=============================================================================== % retrieve_proof_file(-ProofFile_Atom). %------------------------------------------------------------------------------- % Return the overloaded proof file name (either .vcg or .pfs). Raise an % error if this can not be done. %=============================================================================== retrieve_proof_file(ProofFile_Atom):- get_provenance_proof_file_kind(ProofFileKind), retrieve_proof_file_x(ProofFileKind, ProofFile_Atom), !. %------------------------------------------------------------------------------- retrieve_proof_file_x(verification_conditions, VcgFile_Atom):- get_datafiles_vcg(VcgFile_Atom), !. retrieve_proof_file_x(deadpath_search, DpcFile_Atom):- get_datafiles_dpc(DpcFile_Atom), !. % DELETE_PATH_FUNCTIONS retrieve_proof_file_x(path_functions, PfsFile_Atom):- get_datafiles_pfs(PfsFile_Atom), !. retrieve_proof_file_x(ProofFileKind, _File_Atom):- throw_error('Unable to retrieve proof file for proof file kind: ~k', [ProofFileKind]). %=============================================================================== % retrieve_simplified_proof_file(-SimplifiedProofFile_Atom). %------------------------------------------------------------------------------- % Return the overloaded simplified proof file name (either .siv, .sdp or .sip). % Raise an error if this can not be done. %=============================================================================== retrieve_simplified_proof_file(SimplifiedProofFile_Atom):- get_provenance_proof_file_kind(ProofFileKind), retrieve_simplified_proof_file_x(ProofFileKind, SimplifiedProofFile_Atom), !. %------------------------------------------------------------------------------- retrieve_simplified_proof_file_x(verification_conditions, SimplifiedVcgFile_Atom):- get_datafiles_simplified_vcg(SimplifiedVcgFile_Atom), !. retrieve_simplified_proof_file_x(deadpath_search, SummaryDeadpathFile_Atom):- get_datafiles_summary_dpc(SummaryDeadpathFile_Atom), !. retrieve_simplified_proof_file_x(path_functions, SimplifiedPfsFile_Atom):- get_datafiles_simplified_pfs(SimplifiedPfsFile_Atom), !. retrieve_simplified_proof_file_x(ProofFileKind, _File_Atom):- throw_error('Unable to retrieve simplified proof file for proof file kind: ~k', [ProofFileKind]). %=============================================================================== % retrieve_declaration_file(-DeclarationFile_Atom). %------------------------------------------------------------------------------- % Return the overloaded proof declaration file name (either .fdl or .dec). % Raise an error if this can not be done. %=============================================================================== retrieve_declaration_file(DeclarationFile_Atom):- get_provenance_proof_file_kind(ProofFileKind), retrieve_declaration_file_x(ProofFileKind, DeclarationFile_Atom), !. %------------------------------------------------------------------------------- retrieve_declaration_file_x(verification_conditions, FdlFile_Atom):- get_datafiles_fdl(FdlFile_Atom), !. retrieve_declaration_file_x(deadpath_search, FdlFile_Atom):- get_datafiles_fdl(FdlFile_Atom), !. retrieve_declaration_file_x(path_functions, DecFile_Atom):- get_datafiles_dec(DecFile_Atom), !. retrieve_declaration_file_x(ProofFileKind, _File_Atom):- throw_error('Unable to retrieve declaration file for proof file kind: ~k', [ProofFileKind]). %=============================================================================== % retrieve_proof_file_kind(-ProofFileKind). %------------------------------------------------------------------------------- % Based on which main proof file is available, return the proof file kind. %=============================================================================== % If both main proof files are available, then raise an error. retrieve_proof_file_kind(_ProofFileKind):- get_datafiles_vcg(VcgFile_Atom), convert_file_for_display(VcgFile_Atom, DisplayVcgFile_Atom), get_datafiles_pfs(PfsFile_Atom), convert_file_for_display(PfsFile_Atom, DisplayPfsFile_Atom), throw_error('Unexpected availability of two distinct proof files: ~a and ~a', [DisplayVcgFile_Atom, DisplayPfsFile_Atom]). % If dead path is off then proof kind is verification conditions. retrieve_proof_file_kind(verification_conditions):- get_switch_deadpaths(off), get_datafiles_vcg(_VcgFile_Atom), !. % If dead path is on then proof kind is searching for dead paths. retrieve_proof_file_kind(deadpath_search):- get_switch_deadpaths(on), get_datafiles_dpc(_DpcFile_Atom), !. % Error if neither vcg or dpc files have been loaded. retrieve_proof_file_kind(_ProofFileKind):- get_switch_deadpaths(off), throw_error('Unexpected no vcg files loaded', []). % Could not locate dpc file for dead path search. retrieve_proof_file_kind(_ProofFileKind):- get_switch_deadpaths(on), throw_error('Unexpected no dpc files loaded', []). % If pfs is available then is path_functions. retrieve_proof_file_kind(path_functions):- get_datafiles_pfs(_PfsFile_Atom), !. % If neither are available then raise an error. retrieve_proof_file_kind(_ProofFileKind):- throw_error('Unexpected no proof files available', []). %=============================================================================== % convert_file_for_display(+File_Atom, -DisplayFile_Atom). %------------------------------------------------------------------------------- % Plain mode exists to minimise differences across platforms. This extends % to the portrayal of files. This routine converts an internal file % (File_Atom) into an appropriate form for display (DisplayFile_Atom). %=============================================================================== % Is not in plain mode. % Display the whole file. convert_file_for_display(File_m_DisplayFile_m_Atom, File_m_DisplayFile_m_Atom):- get_switch_plain(off), !. % Is in plain mode. % Strip off the directory part. convert_file_for_display(File_Atom, DisplayFile_Atom):- get_switch_plain(on), convert_file_to_base_name(File_Atom, DisplayFile_Atom), !. convert_file_for_display(File_Atom, _DisplayFile_Atom):- throw_error('Error in converting file ~k for display', [File_Atom]). %=============================================================================== % convert_file_to_base_name(+File_Atom, -BaseName_Atom). %------------------------------------------------------------------------------- % Given a file (File_Atom), strip off its directory part and return the % reminder as (BaseName_Atom). %=============================================================================== convert_file_to_base_name(File_Atom, DisplayFile_Atom):- %Retrieve the directory parts and file part. explode_separator_content_as_list('/', File_Atom, Item_AtomList), %The last part is the file part. append(_Directory_AtomList, [DisplayFile_Atom], Item_AtomList), !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/records2.pro0000644000175000017500000007523711753202337017070 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Various facilities for record manipulations and simplifications. %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### record_simplify(OLD, NEW) :- ( record_function(N,OLD,update,F,[REC,VAL],T), ( record_function(_,VAL,access,F,[REC],T), simplify(REC, NEW) ; record_simp_of_type(T, REC, REC2), simplify(VAL, VAL2), record_function(N, NEWREC, update, F, [REC2, VAL2], T), !, order_updates(NEWREC, NEW) ) ; record_function(_,OLD,access,F,[RECORD],_), /* OLD is a fld_xxx(RECORD) */ !, /* so commit to this branch */ checktype(RECORD, T), /* find the record type, */ record_function(N,OLD,access,F,[RECORD],T), /* find the N for type T */ record_access(N, T, RECORD, NEW) /* and call record_access */ ), !. %------------------------------------------------------------------------------- order_updates(OLD, NEW) :- checktype(OLD, TYPE), make_update_list(OLD, TYPE, OLDLIST, CORE), keysort(OLDLIST, NEWLIST), reconstruct_record(NEW, TYPE, NEWLIST, CORE), !. %------------------------------------------------------------------------------- make_update_list(OLD, TYPE, LIST, CORE) :- record_function(N,OLD,update,_F,[RECORD,VALUE],TYPE), !, make_update_list(RECORD, TYPE, SO_FAR, CORE), !, ( is_in((N-_),SO_FAR), !, append(FRONT, [(N-_)|TAIL], SO_FAR), append(FRONT, [(N-VALUE)|TAIL], LIST) ; LIST=[(N-VALUE)|SO_FAR] ), !. make_update_list(CORE, _, [], CORE) :- !. %------------------------------------------------------------------------------- reconstruct_record(NEW, TYPE, [(N-V)|REST], CORE) :- reconstruct_record(SO_FAR, TYPE, REST, CORE), !, simplify(V, VALUE), record_function(N,NEW,update,_F,[SO_FAR,VALUE],TYPE), !. reconstruct_record(CORE, _, [], CORE) :- !. %------------------------------------------------------------------------------- record_access(N, T, RECORD, VALUE) :- record_function(M,RECORD,update,_F,[R,V],T), !, ( M=N, !, simplify(V,VALUE) ; record_access(N, T, R, VALUE) ), !. record_access(N, T, RECORD, VALUE) :- \+ atomic(RECORD), RECORD =.. [mk__record|COMPONENTS], get_provenance_framework(spark), !, record_function(N, _, access, FIELD, _, T), is_in(FIELD := V, COMPONENTS), simplify(V, VALUE), !. record_access(N, T, RECORD, VALUE) :- \+ atomic(RECORD), RECORD =.. [F|COMPONENTS], mk__function_name(F, T, record), get_provenance_framework(spark), !, record_function(N, _, access, FIELD, _, T), is_in(FIELD := V, COMPONENTS), simplify(V, VALUE), !. record_access(N, T, RECORD, VALUE) :- ( record_simp_of_type(T, RECORD, NEWREC) ; RECORD = element(_,_), array_simplify(RECORD, NEWREC) ; RECORD = update(_,_,_), array_simplify(RECORD, NEWREC) ; NEWREC=RECORD /* otherwise */ ), !, ( record_function(N,NEWREC,update,F,[R,VALUE],T) ; record_function(M,NEWREC,update,_G,[R,V],T), M \= N, !, record_access(N, T, R, VALUE) ; NEWREC =.. [F|COMPONENTS], mk__function_name(F, T, record), get_provenance_framework(spark), !, record_function(N, _, access, FIELD, _, T), is_in(FIELD := V, COMPONENTS), !, simplify(V, VALUE) ; record_function(N,VALUE,access,F,[NEWREC],T) ), !. %------------------------------------------------------------------------------- record_simp_of_type(T, OLD, NEW) :- ( record_function(N, OLD, update, F, [REC,VAL], T), ( record_function(_, VAL, access, F, [REC], T), simplify(REC, NEW) ; order_updates(OLD, NEW) ) ; record_function(_, OLD, access, F, [RECORD], _), !, checktype(RECORD, T2), record_function(N, OLD, access, F, [RECORD], T2), record_access(N, T2, RECORD, NEW) ), !. record_simp_of_type(_T, element(A,I), NEW) :- !, array_simplify(element(A,I), NEW). record_simp_of_type(_T, update(A,I,X), NEW) :- !, array_simplify(update(A,I,X), NEW). %------------------------------------------------------------------------------- array_simplify(update(A,I,X), NEW) :- !, remove_update_duplicates(update(A,I,X), SO_FAR), !, do_array_simplify(SO_FAR, NEW), !. array_simplify(element(A,I), NEW) :- !, do_array_simplify(element(A,I), NEW), !. %------------------------------------------------------------------------------- do_array_simplify(update(A, I, X), NEW) :- do_array_simplify(A, NEWA), simplify(X, NEWX), eval_list(I, NEWI), !, ( ( NEWX=element(A, J) ; A\=NEWA, NEWX=element(NEWA, J) ; X\=NEWX, ( X=element(A, J) ; A\=NEWA, X=element(NEWA, J) ) ), ( infer(NEWI=J) ; I\=NEWI, infer(I=J) ), !, NEW=NEWA ; NEWA=update(AA, J, _Y), ( infer(NEWI=J) ; I\=NEWI, infer(I=J) ), !, NEW=update(AA, NEWI, NEWX) ; NEW=update(NEWA, NEWI, NEWX) ), !. do_array_simplify(element(A, I), NEW) :- do_array_simplify(A, NEWA), eval_list(I, NEWI), find_element(NEWA, NEWI, NEW), !. do_array_simplify(X,Y) :- !, simplify(X,Y), !. %------------------------------------------------------------------------------- find_element(update(A, I, _X), J, E) :- infer(I<>J), !, find_element(A, J, E), !. find_element(update(_A, I, X), J, X) :- infer(I=J), !. find_element(A, [J], X) :- \+ atomic(A), A =.. [mk__array|COMPONENTS], get_provenance_framework(spark), reverse(COMPONENTS, REV_COMPS), find_array_component(REV_COMPS, J, X). find_element(A, [J], X) :- \+ atomic(A), A =.. [F|COMPONENTS], mk__function_name(F, _, array), get_provenance_framework(spark), reverse(COMPONENTS, REV_COMPS), find_array_component(REV_COMPS, J, X). find_element(A, J, element(A, J)) :- !. %------------------------------------------------------------------------------- remove_update_duplicates(update(A,I,X), update(NEW,I,X)) :- remove_updates(A,I,NEWA), !, remove_update_duplicates(NEWA, NEW), !. remove_update_duplicates(X, X) :- !. %------------------------------------------------------------------------------- remove_updates(update(A,I,X), J, NEW) :- ( infer(I=J), !, remove_updates(A, J, NEW) ; remove_updates(A, J, NEWA), NEW=update(NEWA, I, X) ), !. remove_updates(X, _, X) :- !. %------------------------------------------------------------------------------- find_array_component([H := V|T], J, X) :- ( satisfies_index_constraint(H, J), !, simplify(V, X) ; does_not_satisfy_index_constraint(H, J), !, find_array_component(T, J, X) ), !. find_array_component([V], _J, X) :- V \= (_ := _), simplify(V, X), checktype(X, _), !. %------------------------------------------------------------------------------- satisfies_index_constraint(I1 & I2, J) :- ( satisfies_index_constraint(I1, J) ; satisfies_index_constraint(I2, J) ), !. satisfies_index_constraint([I1 .. I2], J) :- infer(I1 <= J), infer(J <= I2), !. satisfies_index_constraint([I], J) :- infer(I=J), !. %------------------------------------------------------------------------------- does_not_satisfy_index_constraint(I1 & I2, J) :- does_not_satisfy_index_constraint(I1, J), does_not_satisfy_index_constraint(I2, J), !. does_not_satisfy_index_constraint([I1 .. I2], J) :- !, ( infer(J < I1) ; infer(J > I2) ), !. does_not_satisfy_index_constraint([I], J) :- infer(I <> J), !. %------------------------------------------------------------------------------- set_simplify(A \/ B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=NEWB, !, NEW=NEWA ; NEWA = (set ASET), NEWB = (set BSET), append(ASET, BSET, USET), simp_set_list(USET, U), sort(U, UNION), NEW = (set UNION) ; infer(NEWA subset_of NEWB), !, NEW=NEWB ; infer(NEWB subset_of NEWA), !, NEW=NEWA ; NEW = (NEWA \/ NEWB) ), !. set_simplify(A /\ B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=NEWB, !, NEW=NEWA ; NEWA = (set ASET), NEWB = (set BSET), make_intersection(ASET, BSET, NEW) ; infer(NEWA subset_of NEWB), !, NEW=NEWA ; infer(NEWB subset_of NEWA), !, NEW=NEWB ; NEW = (NEWA /\ NEWB) ), !. set_simplify(A \ B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=NEWB, !, NEW=(set []) ; NEWB=(set []), NEW=NEWA ; NEWA = (set ASET), NEWB = (set BSET), make_difference(ASET, BSET, NEW) ; infer(NEWA subset_of NEWB), !, NEW=(set []) ; NEW = (NEWA \ NEWB) ), !. set_simplify(A subset_of B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( ( NEWA=NEWB ; NEWA=(set []) ; NEWB = (B1 \/ B2), ( set_simplify(NEWA subset_of B1, true) ; set_simplify(NEWA subset_of B2, true) ) ; NEWA = (A1 \ A2), set_simplify(A1 subset_of NEWB, true) ; NEWA = (A1 /\ A2), ( set_simplify(A1 subset_of NEWB, true) ; set_simplify(A2 subset_of NEWB, true) ) ; NEWA = (set ASET), NEWB = (set BSET), is_subset_of(ASET, BSET) ), NEW=true ; NEW = (NEWA subset_of NEWB) ), !. set_simplify(A strict_subset_of B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=(set ASET), NEWB=(set BSET), is_strict_subset_of(ASET, BSET), NEW=true ; NEW = (NEWA strict_subset_of NEWB) ), !. set_simplify(A in B, NEW) :- simplify(A, NEWA), set_simplify(B, NEWB), ( NEWB=(set _), ( infer(NEWA in NEWB), NEW=true ; infer(NEWA not_in NEWB), NEW=false ) ; NEW = (NEWA in NEWB) ), !. set_simplify(A not_in B, NEW) :- simplify(A, NEWA), set_simplify(B, NEWB), ( NEWB=(set _), ( infer(NEWA not_in NEWB), NEW=true ; infer(NEWA in NEWB), NEW=false ) ; NEW = (NEWA not_in NEWB) ), !. set_simplify(A=B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=NEWB, NEW=true ; NEW=(NEWA=NEWB) ), !. set_simplify(A<>B, NEW) :- set_simplify(A=B, EQ), ( EQ=true, NEW=false ; EQ=(A1=B1), NEW=(A1<>B1) ; NEW=(not EQ) ), !. set_simplify((set L), (set M)) :- simp_set_list(L, N), sort(N, M), !. set_simplify(SOMETHING, ELSE) :- ( atom(SOMETHING), ELSE=SOMETHING ; SOMETHING =.. [F|SARGS], eval_list(SARGS, EARGS), ELSE =.. [F|EARGS] ), !. %------------------------------------------------------------------------------- simp_set_list([H|T], LIST) :- simp_set_list(T, TAIL), simplify(H, NEWH), ( in_set_list(NEWH, TAIL), LIST=TAIL ; LIST=[NEWH|TAIL] ), !. simp_set_list([], []) :- !. %------------------------------------------------------------------------------- in_set_list(E, [H|_T]) :- infer(E=H), !. in_set_list(E, [_|T]) :- in_set_list(E, T), !. %------------------------------------------------------------------------------- make_intersection([_H|_T], [], (set [])). make_intersection([], [_H|_T], (set [])). make_intersection(S1, S2, (set S)) :- mk_intersect(S1, S2, SET), sort(SET, S), !. %------------------------------------------------------------------------------- mk_intersect([], _L, []) :- !. mk_intersect([H|T], L, S) :- mk_intersect(T, L, I), ( in_set_list(H, L), ( not_in_set_list(H, I), S=[H|I] ; in_set_list(H, I), S=I ; S=[H|I] ) ; not_in_set_list(H, L), S=I ), !. %------------------------------------------------------------------------------- make_difference([], _, (set [])) :- !. make_difference(L, [], (set L)) :- !. make_difference(A, B, (set S)) :- mk_diff(A, B, SET), sort(SET, S), !. %------------------------------------------------------------------------------- mk_diff([], _, []) :- !. mk_diff([H|T], L, S) :- mk_diff(T, L, D), ( in_set_list(H, L), S=D ; not_in_set_list(H, L), S=[H|D] ), !. %------------------------------------------------------------------------------- not_in_set_list(_E, []) :- !. not_in_set_list(E, [H|T]) :- infer(E<>H), not_in_set_list(E, T), !. %------------------------------------------------------------------------------- is_subset_of([], _) :- !. is_subset_of([H|T], L) :- in_set_list(H, L), is_subset_of(T, L), !. is_strict_subset_of(A, B) :- is_subset_of(A, B), mk_diff(B, A, S), S=[_|_], !. %------------------------------------------------------------------------------- sequence_simplify(L1 @ L2, LIST) :- ( sequence_simplify(L1, LL1) ; simplify(L1, LL1) ), ( sequence_simplify(L2, LL2) ; simplify(L2, LL2) ), !, ( LL1 = [], LIST = LL2 ; LL2 = [], LIST = LL1 ; LL1 = [_|_], LL2 = [_|_], append(LL1, LL2, LIST) ; LIST = (LL1 @ LL2) ), !. sequence_simplify(first(SEQ), NEW) :- sequence_simplify(SEQ, NEWSEQ), ( NEWSEQ=[H|_], NEW=H ; NEW=first(NEWSEQ) ), !. sequence_simplify(last(SEQ), NEW) :- sequence_simplify(SEQ, NEWSEQ), ( NEWSEQ=[_|_], last(NEWSEQ, NEW) ; NEW=last(NEWSEQ) ), !. sequence_simplify(nonfirst(SEQ), NEW) :- sequence_simplify(SEQ, NEWSEQ), ( NEWSEQ=[_|T], NEW=T ; NEW=nonfirst(NEWSEQ) ), !. sequence_simplify(nonlast(SEQ), NEW) :- sequence_simplify(SEQ, NEWSEQ), ( NEWSEQ=[_|_], append(NEW, [_LAST], NEWSEQ) ; NEW=nonlast(NEWSEQ) ), !. sequence_simplify([H|T], NEW) :- eval_list([H|T], NEW), !. sequence_simplify([], []) :- !. %------------------------------------------------------------------------------- enumerated_simplify(succ(X), NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(succ(X), L, NEW), !. enumerated_simplify(pred(X), NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(pred(X), L, NEW), !. enumerated_simplify(X=Y, NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(X, L, NEWX), enumerated_simp(Y, L, NEWY), ( NEWX=NEWY, !, NEW=true ; is_in(NEWX, L), is_in(NEWY, L), !, NEW=false ; ( is_in(NEWX, L) ; is_in(NEWY, L) ), enumerated_eq_simp(NEWX=NEWY, NEW, L) ; NEW=(NEWX=NEWY) ), !. enumerated_simplify(X<>Y, NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(X, L, NEWX), enumerated_simp(Y, L, NEWY), ( NEWX=NEWY, !, NEW=false ; is_in(NEWX, L), is_in(NEWY, L), !, NEW=true ; enumerated_dis_simp(NEWX<>NEWY, NEW, L) ; NEW=(NEWX<>NEWY) ), !. enumerated_simplify(XY, NEW) :- enumerated_simplify(Y=Y, NEW) :- enumerated_simplify(Y<=X, NEW), !. enumerated_simplify(X, NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(X, L, NEW), !. %------------------------------------------------------------------------------- enumerated_simp(succ(X), L, NEW) :- nonvar(X), !, enumerated_simp(X, L, NEWX), ( ( strict_sublist([NEWX,NEW], L) ) ; ( NEWX=pred(NEW), L=[H|_], infer(NEW<>H) ) ; NEW=succ(NEWX) ), !. enumerated_simp(pred(X), L, NEW) :- nonvar(X), !, enumerated_simp(X, L, NEWX), ( ( strict_sublist([NEW,NEWX], L) ) ; NEWX=succ(NEW), last(L, H), infer(NEW<>H) ; NEW=pred(X) ), !. enumerated_simp(X, _, NEWX) :- simplify(X, NEWX), !. %------------------------------------------------------------------------------- rebalance(succ(X), Y, NEWX, NEWY, Enumeration) :- strict_sublist([PredofY, Y], Enumeration), /* Get pred(Y) */ rebalance(X, PredofY, NEWX, NEWY, Enumeration). rebalance(pred(X), Y, NEWX, NEWY, Enumeration) :- strict_sublist([Y, SuccofY], Enumeration), rebalance(X, SuccofY, NEWX, NEWY, Enumeration). rebalance(X, Y, X, Y, _) :- X \= pred(_), X \= succ(_), !. %------------------------------------------------------------------------------- prove_not_last(X, Enumeration) :- last(Enumeration, Last), rebalance(X, Last, RX, RLast, Enumeration), enumerated_infer(RXY, NEW, [First|Rest]) :- Y \= pred(_), Y \= succ(_), prove_not_first(X, [First|Rest]), !, ( strict_sublist([Y, NEWY], [First|Rest]), !, enumerated_dis_simp(X<>NEWY, NEW, [First|Rest]) ; NEW = (pred(X)<>Y) ). enumerated_dis_simp(succ(X)<>Y, NEW, L) :- Y \= pred(_), Y \= succ(_), prove_not_last(X, L), !, ( strict_sublist([NEWY, Y], L), !, enumerated_dis_simp(X<>NEWY, NEW, L) ; NEW = (succ(X)<>Y) ). enumerated_dis_simp(pred(X)<>pred(Y), NEW, [First|Rest]) :- prove_not_first(X, [First|Rest]), prove_not_first(Y, [First|Rest]), !, enumerated_dis_simp(X<>Y, NEW, [First|Rest]). enumerated_dis_simp(succ(X)<>succ(Y), NEW, L) :- prove_not_last(X, L), prove_not_last(Y, L), !, enumerated_dis_simp(X<>Y, NEW, L). enumerated_dis_simp(X<>succ(Y), NEW, L) :- X \= succ(_), X \= pred(_), !, enumerated_dis_simp(succ(Y)<>X, NEW, L). enumerated_dis_simp(X<>pred(Y), NEW, L) :- X \= succ(_), X \= pred(_), !, enumerated_dis_simp(pred(Y)<>X, NEW, L). enumerated_dis_simp(X<>Y, X<>Y, _) :- !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/aritheval.pro0000644000175000017500000003746711753202337017327 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % This module defines the X iss Expr predicate, which takes a ground % expression Expr and attempts to evaluate it, unifying X with the result % if this is possible. It is thus rather like the built-in is predicate of % Prolog, but with three important differences. Firstly, it will prevent % evaluations which would lead to an execution error, such as X is 1/0. % Secondly, it does not evaluate mod expressions at presence, since no % definition of mod is built into the Simplifier. (This is for historical % reasons, because Pascals mod differs from that of Ada, yet both languages % are in principle supported by the Simplifier.) Finally, it returns a % structure like those accepted by signed_integer, where the result is % negative, i.e. returning a negated, rather than a negative, literal. %############################################################################### %=============================================================================== % exp_iss(-Value, +Expression). %------------------------------------------------------------------------------- % See below for iss/2 % % Special case for ** operator %=============================================================================== X exp_iss A**B :- P iss A, Q iss B, !, ( Q = 0, X = 1 ; integer(Q), Q > 0, Q1 is Q - 1, !, P1 exp_iss P**Q1, !, X iss P * P1 ), !. %=============================================================================== % iss(-Value, +Expression). %------------------------------------------------------------------------------- % The Value iss Expression predicate, takes a ground expression as % Expression and attempts to evaluate it, unifying Value with the result % where possible. It is thus rather like the built-in 'is' predicate of % Prolog, but with the following important differences: % +It will prevent evaluations which would lead to an execution error, such % as X is 1/0. % +It returns negated positive literals (-(42)), rather that negative % literals (-42). %=============================================================================== % Variable. _X iss V :- var(V), !, fail. % Positive integer. A iss A :- integer(A), A>=0, !. % Negative integer. X iss A :- integer(A), A<0, !, A1 is -A, X=(-A1), !. % Double negation. X iss -(-A) :- X iss A, !. % Unary minus. X iss -(A) :- P iss A, !, ( P=0, X=0 ; integer(P), P>0, X=(-P) ; P=(-X), X>0 ), !. % Addition. X iss A+B :- P iss A, Q iss B, !, X1 is P+Q, ( X1>=0, X=X1 ; X1<0, X2 is -X1, X=(-X2) ), !. % Subtraction. X iss A-B :- X iss A+(-B), !. % Multiplication. X iss A*B :- P iss A, Q iss B, !, X1 is P*Q, ( X1>=0, X=X1 ; X1<0, X2 is -X1, X=(-X2) ), !. % Division. X iss A div B :- P iss A, Q iss B, !, Q\=0, eval_div(P, Q, X1), ( X1>=0, X=X1 ; X1<0, X2 is -X1, X=(-X2) ), !. % Modulus (SPARK definition). X iss A mod B :- get_provenance_framework(spark), !, P iss A, Q iss B, !, Q \= 0, eval_div(P, Q, X1), Remainder iss P - (X1 * Q), ( Remainder = 0, X = 0 ; signed_integer(P), signed_integer(Q), ( ( P >= 0, Q > 0 ; P =< 0, Q < 0 ), X = Remainder ; ( P >= 0, Q < 0 ; P =< 0, Q > 0 ), X iss Remainder + Q ) ; X = (P mod Q) ), !. % Modulus (Pascal definition). X iss A mod B :- get_provenance_framework(pascal), !, P iss A, Q iss B, !, Q \= 0, eval_div(P, Q, X1), Remainder iss P - (X1 * Q), ( Remainder = 0, X = 0 ; Q = 1, X = 0 ; X = (P mod Q) ), !. % Exponentiation (raise to the power of). % guard abs of both arguments to prevent % attempt to evaluate gigantic values that would % take too long or just run out of memory X iss A**B :- P iss abs(A), Q iss abs(B), simplify(P <= 1024, true), simplify(Q <= 1024, true), X exp_iss A**B. % Absolute value. X iss abs(A) :- P iss A, !, ( integer(P), P >= 0, X = P ; signed_integer(P), P < 0, X iss -P ), !. %=============================================================================== %=============================================================================== % rational_expression(+Expression). %------------------------------------------------------------------------------- % Succeeds if Expression is a nice rationals-only expression. %=============================================================================== rational_expression(A) :- var(A), !, fail. rational_expression(A) :- integer(A), !. rational_expression(-A) :- !, rational_expression(A). rational_expression(abs(A)) :- !, rational_expression(A). rational_expression(A+B) :- !, rational_expression(A), !, rational_expression(B). rational_expression(A-B) :- !, rational_expression(A), !, rational_expression(B). rational_expression(A*B) :- !, rational_expression(A), !, rational_expression(B). rational_expression(A/B) :- !, rational_expression(A), !, rational_expression(B). rational_expression(A**B) :- !, rational_expression(A), !, intexp(B). %=============================================================================== %=============================================================================== % evaluate_rational_expression(+RationalExpression, -Value). %------------------------------------------------------------------------------- % Given a RationalExpression, evaluate it and return its Value, as a % rational literal. % % Note that the legal values returned are defined by base_rational(V), % which succeeds if V has been reduced to its 'simplest' base form. So, for % instance, base_rational(3) and base_rational(4/3) succeed whereas % base_rational(8/2) and base_rational(-(- 1)) both fail. %=============================================================================== % Absolute value. evaluate_rational_expression(abs(R), Value) :- evaluate_rational_expression(R, Rvalue), ( positive_rational(Rvalue), Value = Rvalue ; negative_rational(Rvalue), Rvalue = (-Value) ), !. % Addition. evaluate_rational_expression(Rx + Ry, Value) :- evaluate_rational_expression(Rx, Xvalue), evaluate_rational_expression(Ry, Yvalue), split_rational(Xvalue, Xn, Xd), split_rational(Yvalue, Yn, Yd), Top iss Xn * Yd + Yn * Xd, Bottom iss Xd * Yd, !, make_base_rational(Top, Bottom, Value). % Subtraction. evaluate_rational_expression(Rx - Ry, Value) :- evaluate_rational_expression(Rx, Xvalue), evaluate_rational_expression(Ry, Yvalue), split_rational(Xvalue, Xn, Xd), split_rational(Yvalue, Yn, Yd), Top iss Xn * Yd - Yn * Xd, Bottom iss Xd * Yd, !, make_base_rational(Top, Bottom, Value). % Multiplication. evaluate_rational_expression(Rx * Ry, Value) :- evaluate_rational_expression(Rx, Xvalue), evaluate_rational_expression(Ry, Yvalue), split_rational(Xvalue, Xn, Xd), split_rational(Yvalue, Yn, Yd), Top iss Xn * Yn, Bottom iss Xd * Yd, !, make_base_rational(Top, Bottom, Value). % Exponentiation (raise to the power of). evaluate_rational_expression(R ** I, Value) :- evaluate_rational_expression(R, Rvalue), intexp(I), Ivalue iss I, ( Ivalue = 0, Value = 1 ; Ivalue = 1, Value = Rvalue ; Ivalue < 0, R \= 0, split_rational_alt_sign(Rvalue, Rn, Rd), make_base_rational(Rd, Rn, OneOverR), Ineg is -Ivalue, !, evaluate_rational_expression(OneOverR ** Ineg, Value) ; Idiv2 iss I div 2, Imod2 iss Ivalue - 2 * Idiv2, evaluate_rational_expression(Rvalue * Rvalue, Rsquared), evaluate_rational_expression(Rsquared ** Idiv2, SqValue), ( Imod2 = 0, Value = SqValue ; evaluate_rational_expression(Rvalue * SqValue, Value) ) ), !. % Base case. evaluate_rational_expression(R, R) :- base_rational(R), !. % Division. evaluate_rational_expression(Rx / Ry, Value) :- evaluate_rational_expression(Rx, Xvalue), evaluate_rational_expression(Ry, Yvalue), split_rational(Xvalue, Xn, Xd), split_rational_alt_sign(Yvalue, Yn, Yd), Top iss Xn * Yd, Bottom iss Xd * Yn, !, make_base_rational(Top, Bottom, Value). % Unary minus. evaluate_rational_expression(-R, Value) :- evaluate_rational_expression(R, Rvalue), ( positive_rational(Rvalue), ( Rvalue = 0, Value = 0 ; Value = (-Rvalue) ) ; negative_rational(Rvalue), Rvalue = (-Value) ), !. %=============================================================================== %=============================================================================== % base_rational(+Expression). %------------------------------------------------------------------------------- % Is successful where Expression is of the form: [-](I[/J]), % where I,J are integers, I>=0, J>0 and the greatest common divisor of I % and J is 1. % % For example, accepted cases: % -(1), 0, 42, 42/11, -(7/3). % % Rejected cases: % -(0) [should be 0 instead] % 7/1 [should be 7 instead] % (-7)/2 [should be -(7/2) instead] % /4 [should be 5/2 instead] % 3/(-(1)) [should be -(3/1) instead] % 4/(3/2) [should be 8/3 instead]. %=============================================================================== base_rational(-R) :- positive_rational(R), R \= 0. base_rational(R) :- positive_rational(R). positive_rational(I) :- integer(I), I>=0. positive_rational(I/J) :- integer(I), I>=0, integer(J), J>1, gcd(I, J, 1). negative_rational(-R) :- positive_rational(R), R \= 0. %=============================================================================== %=============================================================================== % strict_rational(+Expression). %------------------------------------------------------------------------------- % Succeed where Expression is a rational literal that isn't a signed % integer. %=============================================================================== strict_rational(R) :- base_rational(R), \+ signed_integer(R). %=============================================================================== %=============================================================================== % split_rational(+RationalLiteral, -NumeratorPart, -DenominatorPart). %------------------------------------------------------------------------------- % Split a RationalLiteral into a NumeratorPart and DenominatorPart. % % Note that the numerator carries the sign information, while the % denominator is always strictly +ve. %=============================================================================== split_rational(-(I/J), -I, J) :- integer(I), I>0, integer(J), J>0. split_rational(I/J, I, J) :- integer(I), I>0, integer(J), J>0. split_rational(I, I, 1) :- signed_integer(I). %=============================================================================== %=============================================================================== % split_rational_alt_sign(+RationalLiteral, -NumeratorPart, -DenominatorPart). %------------------------------------------------------------------------------- % Split a RationalLiteral into a NumeratorPart and DenominatorPart. % % Note that, unlike split_rational/3, the denominator carries the sign % information, while the numerator is always strictly +ve. % % The converse operation is needed for inverting. For example, to turn: -(3/4) % upside down. split_rational_alt_sign(-(3/4), N, D) gives: % N=3, D=(-(4)) % From which we can form: % -(4/3), % Which will yield numerator (-(4)) and denominator 3 via split_rational/3. %=============================================================================== split_rational_alt_sign(-(I/J), I, -J) :- integer(I), I>0, integer(J), J>0. split_rational_alt_sign(I/J, I, J) :- integer(I), I>0, integer(J), J>0. split_rational_alt_sign(I, I, 1) :- integer(I), I>0. split_rational_alt_sign(-I, I, (- 1)) :- integer(I), I>0. %=============================================================================== %=============================================================================== % make_base_rational(+Top, +Bottom, -Value). %------------------------------------------------------------------------------- % Given a signed integer Top and a (strictly positive) integer Bottom, form % the (unique) rational literal whose Value is that of the expression % Top/Bottom. %=============================================================================== make_base_rational(Top, Bottom, Value) :- signed_integer(Top), AbsTop is abs(Top), integer(Bottom), Bottom > 0, ( Bottom = 1, Value = Top ; gcd(AbsTop, Bottom, GCD), GCD > 0, ( GCD = 1, ( Top = AbsTop, !, Value = Top / Bottom ; Value = - (AbsTop / Bottom) ) ; ReducedAbsTop iss AbsTop div GCD, ReducedBottom iss Bottom div GCD, ( ReducedBottom = 1, !, ( Top = AbsTop, !, Value = ReducedAbsTop ; Value = (- ReducedAbsTop) ) ; Top = AbsTop, !, Value = ReducedAbsTop / ReducedBottom ; Value = - (ReducedAbsTop / ReducedBottom) ) ) ), !. %=============================================================================== %############################################################################### % TESTS %############################################################################### :- evaluate_rational_expression((1/2) ** 0, 1). :- evaluate_rational_expression((1/2) ** 1, (1/2)). :- evaluate_rational_expression((1/2) ** 2, (1/4)). :- evaluate_rational_expression((1/2) ** 3, (1/8)). :- evaluate_rational_expression((1/2) ** 4, (1/16)). :- evaluate_rational_expression((1/2) ** 5, (1/32)). :- evaluate_rational_expression((1/2) ** -1, 2). :- evaluate_rational_expression((1/2) ** -2, 4). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/gauge_simplifier.pro0000644000175000017500000007102211753202337020644 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Modified Sicstus gauge.pl for viewing profile data collected over % multiple Simplifier executions. % % All changes to gauge.pl are clearly marked. %############################################################################### /* Copyright (C) 1995, 1998, Swedish Institute of Computer Science. */ % File : gauge.pl % Author : Stefan Andersson % Updated : 10 May 2000 % Purpose : Visualizer for the profiler :- module(gauge, [view/1]). :- meta_predicate view(:). :- dynamic tcl_variable/2. :- discontiguous tcl_proc/1. :- use_module(library(types), [ must_be/4 ]). :- use_module(library(lists), [ nth1/3 ]). :- use_module(library(tcltk), [ tcl_delete/1, tcl_eval/3, tk_new/2, tk_main_window/2, tk_destroy_window/1, tk_next_event/2 ]). :- load_files(library(context), [when(compile_time), if(changed)]). %% Defines context expansion keys :- ctxt_items([system,interp,header,canvas,value_info,help]). %% These are the default settings for some TCL variables. We want to %% store them as Prolog facts since we use the same settings as default %% the next time view/1 is called. To add persistency to a Tcl variable, %% just add a tcl_variable/2 fact for it. Note that these facts are %% updated when the Gauge window is closed. %% tcl_variable(TclVarName,Default). %% Settings tcl_variable('specvar','calls'). tcl_variable('resvar','predicate'). tcl_variable('sortvar','descending_values'). tcl_variable('scalevar','lin'). %% [PM] 3.11.1 'all' is rather pointless %% tcl_variable('showvar','all'). tcl_variable('showvar','nonzero'). tcl_variable('viewfont','Helvetica 10'). %% Printing tcl_variable('PrintCmd','lpr -Pprinter'). tcl_variable('SaveFile','0'). tcl_variable('SaveFileName','histogram.ps'). tcl_variable('ColorMode','color'). %%----------------------------------------------------------------------- view(System) :- must_be(System, pred_spec_tree, view(System), 1), ctxt(Ctxt, [system-System, interp-Interp]), Extra = [], % Extra = [top_level_events], % [PM] 4.0 tk_new([name('Gauge')|Extra], Interp), set_tcl_vars(Ctxt), % Update Tcl variables create_window(Ctxt), create_procs(Ctxt), tk_next_event(Interp, Event), on_event(Event, Ctxt, barval([],0)). %%----------------------------------------------------------------------- create_window(Ctxt) :- LabelLook = [min(relief),flat], FrameLook = [min(relief),sunken,min(borderwidth),2], %% This is to do proper event handling when the user closes %% the window using window manager functions. my_eval([wm,protocol,.,'WM_DELETE_WINDOW',br([prolog_event,quit])],Ctxt), %% Needed under some Unix environments since Tk has a bug that %% makes the background color of some widgets (notably labels) %% to be wrong my_eval([option,add,dq('*background'),sqb([.,cget,min(bg)])],Ctxt), Panel = +panel, frame(Panel, [], Ctxt), Settings = +settings, create_settings_panel(Settings, Ctxt, FrameLook, LabelLook), ValueInfo = Panel+value_info, create_value_info(ValueInfo, Ctxt, FrameLook, LabelLook), Buttons = Panel+button, create_button_panel(Buttons, Ctxt, FrameLook), pack([ValueInfo,Buttons], [min(side),top], Ctxt), MainView = +main_view, create_main_view(MainView, Ctxt, FrameLook, LabelLook), pack([MainView], [min(padx),2,min(pady),2,min(side),left,min(fill),both,min(expand),true], Ctxt), pack([Settings,ValueInfo,Panel], [min(padx),2,min(pady),2,min(side),top,min(fill),x], Ctxt). create_settings_panel(Path, Ctxt, _, _) :- PackOptions = [min(side),top,min(anchor),w,min(fill),x,min(padx),2,min(pady),2], frame(Path, [min(relief),groove,min(borderwidth),4], Ctxt), conc_path(Path, heading, Heading), label(Heading,'Settings',[],Ctxt), %% Specifications SpecVar = specvar, conc_path(Path, spec, Spec), optionmenu(Spec,'Specification',SpecVar, ['Calls', 'Instructions','Choice Points'], [calls,instructions,choice_points], Ctxt), %% Predicate/Clause ResVar = resvar, conc_path(Path, res, Res), optionmenu(Res,'Resolution',ResVar, ['Predicate','Clause'], [predicate,clause], Ctxt), %% Sort Order SortVar = sortvar, conc_path(Path, sort, Sort), optionmenu(Sort,'Sort Order',SortVar, ['Alphabetic','Descending Values','Ascending Values', 'Top 40'], [alphabetic,descending_values,ascending_values, desc40], Ctxt), %% Scale ScaleVar= scalevar, conc_path(Path, scale, Scale), optionmenu(Scale,'Scale',ScaleVar, ['Linear','Logarithmic'], [lin,log], Ctxt), %% Show ShowVar= showvar, conc_path(Path, show, Show), optionmenu(Show,'Show',ShowVar, ['All','Nonzero only'], [all,nonzero], Ctxt), %% Font FontVar= viewfont, conc_path(Path, font, Font), optionmenu(Font,'Font',FontVar, ['Helvetica 8', 'Helvetica 10', 'Helvetica 12', 'Helvetica 14', 'Times 8', 'Times 10', 'Times 12', 'Times 14'], ['Helvetica 8', 'Helvetica 10', 'Helvetica 12', 'Helvetica 14', 'Times 8', 'Times 10', 'Times 12', 'Times 14'], Ctxt), pack([Heading,Spec,Res,Sort,Scale,Show,Font],PackOptions,Ctxt). create_value_info(Path, Ctxt, FrameLook, _) :- frame(Path, [FrameLook], Ctxt), conc_path(Path, name, ValueInfoName), conc_path(Path, value, ValueInfoValue), conc_path(Path, relative, ValueInfoRel), path_to_widget(ValueInfoName, ValueInfoNameW), path_to_widget(ValueInfoValue, ValueInfoValueW), path_to_widget(ValueInfoRel, ValueInfoRelW), ctxt(Ctxt, [value_info-vi(ValueInfoNameW,ValueInfoValueW, ValueInfoRelW)]), label(ValueInfoName, '', [min(width),24], Ctxt), label(ValueInfoValue, '', [min(width),24], Ctxt), label(ValueInfoRel, '', [min(width),24], Ctxt), pack([ValueInfoName,ValueInfoValue,ValueInfoRel],[], Ctxt). create_button_panel(Path, Ctxt, _FrameLook) :- ButtonLook = [], frame(Path, [], Ctxt), conc_path(Path, button_left, ButtonBox), frame(ButtonBox, [], Ctxt), conc_path(ButtonBox, calc, Calc), conc_path(ButtonBox, reset, Reset), conc_path(ButtonBox, print, Print), conc_path(ButtonBox, help, Help), conc_path(ButtonBox, quit, Quit), button(Calc, 'Calculate', br([prolog_event,'calc($specvar,$resvar,$sortvar,$scalevar)']), ButtonLook, Ctxt), button(Reset, 'Reset', br([prolog_event,reset]), ButtonLook, Ctxt), button(Print, 'Print', br([prolog_event,print]), ButtonLook, Ctxt), button(Help, 'Help', br([prolog_event,help]), ButtonLook, Ctxt), button(Quit, 'Quit', br([prolog_event,quit]), ButtonLook, Ctxt), pack([ButtonBox], [min(side),left,min(fill),x], Ctxt), pack([Calc,Reset,Print,Help,Quit], [min(side),top,min(anchor),w,min(fill),x], Ctxt). create_main_view(Path, Ctxt, FrameLook, LabelLook) :- frame(Path, [], Ctxt), conc_path(Path, scview, ScrollView), frame(ScrollView, [], Ctxt), conc_path(ScrollView, view, View), conc_path(ScrollView, scrollbar, ScrollBar), my_eval([set,xsize,450], Ctxt), my_eval([set,ysize,'15c'], Ctxt), path_to_widget(View, ViewW), path_to_widget(ScrollBar, ScrollBarW), my_eval([canvas,ViewW, min(width),'$xsize', min(height),'$ysize', min(yscrollcommand),dq([ScrollBarW,set]), min(background),white, min(relief),sunken, min(borderwidth),2], Ctxt), my_eval([scrollbar,ScrollBarW,min(command),dq([ViewW,yview])], Ctxt), pack([View], [min(side),left,min(fill),both,min(expand),true], Ctxt), pack([ScrollBar], [min(side),right,min(fill),y], Ctxt), conc_path(Path, header, Header), conc_path(Header, header_label, HeaderLabel), frame(Header, FrameLook, Ctxt), label(HeaderLabel, '', LabelLook, Ctxt), pack([Header], [min(side),top,min(fill),x], Ctxt), pack([ScrollView], [min(side),top,min(fill),both,min(expand),true], Ctxt), pack([HeaderLabel], [], Ctxt), path_to_widget(HeaderLabel, HeaderLabelW), ctxt(Ctxt, [canvas-ViewW, header-HeaderLabelW]). %%----------------------------------------------------------------------- %% Collection of widgets button(Path, Text, Command, Look, Ctxt) :- path_to_widget(Path, Widget), my_eval([button,Widget,min(command),Command,min(text),dq([Text])|Look], Ctxt). radiobutton(Path, Text, Var, Look, Ctxt) :- last_item(Path, Value), path_to_widget(Path, Widget), my_eval([radiobutton,Widget, min(variable),Var, min(value),Value, min(text),dq([Text])|Look], Ctxt). label(Path, Text, Look, Ctxt) :- path_to_widget(Path, Widget), my_eval([label,Widget,min(text),dq([Text])|Look], Ctxt). %% Shows the contents of a variable label_var(Path, Var, Ctxt) :- path_to_widget(Path, Widget), my_eval([label,Widget,min(textvariable),Var], Ctxt). frame(Path, Look, Ctxt) :- path_to_widget(Path, Widget), my_eval([frame,Widget|Look], Ctxt). optionmenu(Path,Title,Var,Labels,Values,Ctxt) :- path_to_widget(Path,Widget), my_eval([frame,Widget,min(relief),groove,min(borderwidth),2],Ctxt), conc_path(Path,menubutton,MenuButtonPath), conc_path(MenuButtonPath,menu,MenuPath), conc_path(Path,label, LabelPath), path_to_widget(MenuButtonPath,MenuButton), path_to_widget(MenuPath,Menu), path_to_widget(LabelPath,Label), my_eval([menubutton,MenuButton,min(text),dq(Title), min(indicatoron),true, min(menu),Menu, min(relief),raised, min(borderwidth),1], Ctxt), my_eval([menu,Menu],Ctxt), atom_codes(Var, VarCodes), append(VarCodes, "_label", LabelVarCodes), my_eval([label,Label,min(textvariable),codes(LabelVarCodes)], Ctxt), pack([MenuButtonPath],[min(side),top,min(anchor),w],Ctxt), pack([LabelPath],[min(side),top,min(anchor),e],Ctxt), optionmenu_addentries(Menu,VarCodes,LabelVarCodes,Labels,Values,Ctxt). %% [MC] 3.8.6: made determinate optionmenu_addentries(_,_,_,[],[],_) :- !. optionmenu_addentries(Menu,VarCodes,LabelVarCodes, [Label|Labels],[Value|Values],Ctxt) :- my_eval([Menu,add,radiobutton, min(variable),codes(VarCodes), min(label),dq(Label), min(value),dq(Value), min(command),br([set,codes(LabelVarCodes),dq(Label)])], Ctxt), %% Set the initial value in the label my_eval([if,br([codes([0'$|VarCodes]),==,br(Value)]),br([set,codes(LabelVarCodes),dq(Label)])], Ctxt), optionmenu_addentries(Menu,VarCodes,LabelVarCodes,Labels,Values,Ctxt). %%----------------------------------------------------------------------- %% Widget id's (paths) are represented as +a+b+c (for .a.b.c). conc_path((+), Item, Path) :- !, Path = +Item. conc_path(Path, Item, Path+Item). pack(Paths, Options, Ctxt) :- wrap(Paths, Widgets, Options), my_eval([pack|Widgets], Ctxt). wrap([]) --> []. wrap([P|Ps]) --> [Widget], {path_to_widget(P,Widget)}, wrap(Ps). path_to_widget(+, .). path_to_widget(+Item, dot([Item])). path_to_widget(Path+Item, dot(List)) :- path_to_widget(Path, List, [Item]). path_to_widget(+Item) --> [Item]. path_to_widget(Path+Item) --> path_to_widget(Path), [Item]. last_item(+Item, Item). last_item(_+Item, Item). my_eval(Msg, Ctxt) :- ctxt(Ctxt, [interp-Interp]), tcl_eval(Interp, Msg, _). %%----------------------------------------------------------------------- %% Event handling on_event(quit, Ctxt, _) :- !, ctxt(Ctxt, [interp-Interp]), get_tcl_vars(Ctxt), % Store the new values of Tcl variables tk_main_window(Interp, Window), tk_destroy_window(Window), tcl_delete(Interp). on_event(Event, Ctxt, BarVal0) :- on_event(Event, Ctxt, BarVal0, BarVal), ctxt(Ctxt, [interp-Interp]), tk_next_event(Interp, NextEvent), on_event(NextEvent, Ctxt, BarVal). on_event(calc(Spec,Res,SortOrder,Scale), Ctxt, _, BarVal) :- !, ctxt(Ctxt, [system-System, header-HeaderW, interp-Interp]), %####################################################################### % BEGIN MODIFICATION %####################################################################### user:usage_data(Spec, Res, UnsortedBars), %####################################################################### % END MODIFICATION %####################################################################### %% If the user has chosen so, remove zero-values ( tcl_eval(Interp,[set,showvar],ShowValue), ShowValue == "nonzero" -> remove_zero_values(UnsortedBars,FilteredBars) ; FilteredBars = UnsortedBars ), sort_bars(SortOrder, FilteredBars, Bars), Txt = format('~a/~a. ~a. ~a. System is ~w', [Spec,Res,SortOrder,Scale,System]), my_eval([HeaderW,configure,min(text),br(Txt)], Ctxt), draw(Bars, Scale, TotalValue, Ctxt), BarVal = barval(Bars,TotalValue). on_event(reset, Ctxt, BarVal0, BarVal) :- !, BarVal0 = BarVal, ctxt(Ctxt, [system-System]), profile_reset(System). on_event(show_value(BarNr), Ctxt, BarVal0, BarVal) :- !, BarVal0 = BarVal, show_value(BarNr, BarVal, Ctxt). on_event(help, Ctxt, BarVal0, BarVal) :- !, BarVal0 = BarVal, help(Ctxt). on_event(print, Ctxt, BarVal0, BarVal) :- !, BarVal0 = BarVal, print_chart(Ctxt). on_event(_, _, BarVal, BarVal). show_value(0, _, Ctxt) :- !, my_eval(bell, Ctxt). show_value(BarNr, BarVal, Ctxt) :- BarVal = barval(Bars,TotalValue), ctxt(Ctxt, [value_info-vi(NameW,ValW,RelW)]), nth1(BarNr, Bars, Pred-Value), ( TotalValue=:=0 -> RelValue=1.0 ; RelValue is Value/TotalValue ), my_eval([NameW,configure,min(text), br(write(Pred))], Ctxt), my_eval([ValW,configure,min(text), dq(['Value: ',Value])], Ctxt), my_eval([RelW,configure,min(text), dq(format('Relative Value: ~2g',[RelValue]))], Ctxt). draw(Bars, Scale, TotalValue, Ctxt) :- length(Bars, NoBars), open_null_stream(Stream), max_values(Bars, 0, 0, 0, MaxValue, MaxLabelWidth, TotalValue, Stream, Ctxt), close(Stream), ctxt(Ctxt, [canvas-CanvasW]), my_eval([draw_init,Scale,CanvasW,MaxValue,MaxLabelWidth,NoBars], Ctxt), draw_bars(Scale, Bars, Ctxt). %% [PM] 3.10.2 SPRM 7251 %% A note on syntax of quoted atoms. ISO does not allow unbackslashed newline in quoted atoms. %% \n is a newline in ISO and SICStus mode %% \ is ignored in ISO and SICStus mode %% \n\ embeds a single newline in the atom in a portable way tcl_proc( 'proc draw_init {scale bar_view max_val max_labelw no_bars} { \n\ global view \n\ global viewfont \n\ global bar_start \n\ global y \n\ global yinc \n\ global ymax \n\ global xsize \n\ global xscale \n\ global tk_version \n\ if {$scale == "log" && $max_val > 0} { \n\ set max_val [expr log($max_val)] \n\ } \n\ set view $bar_view \n\ bind $bar_view {show_bar_value %y} \n\ set bar_start [expr $max_labelw + 10] \n\ set y 0 \n\ set yinc 24 \n\ set ymax [expr $no_bars*$yinc] \n\ $bar_view configure -scrollregion "0 0 $xsize $ymax" \n\ set xsize [winfo width $view] \n\ if {$max_val == 0} { \n\ set xscale 0 \n\ } else { \n\ set xscale [expr double($xsize-10-$bar_start)/$max_val] \n\ } \n\ $view addtag dead all \n\ $view delete dead \n\ }'). draw_bars(lin, Bars, Ctxt) :- draw_bars_lin(Bars, Ctxt). draw_bars(log, Bars, Ctxt) :- draw_bars_log(Bars, Ctxt). draw_bars_lin([], _). draw_bars_lin([Pred-Val|Rest], Ctxt) :- my_eval([draw_bar_lin,br(write(Pred)),Val], Ctxt), draw_bars_lin(Rest, Ctxt). tcl_proc( 'proc draw_bar_lin {label value} { \n\ global view \n\ global viewfont \n\ global bar_start \n\ global y \n\ global yinc \n\ global xscale \n\ set y [expr $y+$yinc] \n\ $view create text 5 [expr $y-10] -text $label -anchor w -font $viewfont \n\ $view create rectangle \ $bar_start [expr $y-20] \ [expr $bar_start+$xscale*$value] $y \ -outline black -fill yellow \n\ }'). draw_bars_log([], _). draw_bars_log([Pred-Val|Rest], Ctxt) :- my_eval([draw_bar_log,br(write(Pred)),Val], Ctxt), draw_bars_log(Rest, Ctxt). tcl_proc( 'proc draw_bar_log {label value} { \n\ global view \n\ global viewfont \n\ global bar_start \n\ global y \n\ global yinc \n\ global xscale \n\ set y [expr $y+$yinc] \n\ if {$value > 0} { \n\ set value [expr log($value)] \n\ } \n\ $view create text 5 [expr $y-10] -text $label -anchor w -font $viewfont \n\ $view create rectangle \n\ $bar_start [expr $y-20] \n\ [expr $bar_start+$xscale*$value] $y \n\ -outline black -fill pink \n\ }'). %% Determine max value and max length of predicate/clause label max_values([], MaxVal, MaxLab, Total, MaxVal, MaxLab, Total, _, _). max_values([Pred-Val|Rest], MaxVal0, MaxLab0, Total0, MaxVal, MaxLab, Total, Stream, Ctxt) :- MaxVal1 is max(Val, MaxVal0), Total1 is Total0 + Val, labelwidth(Pred, Width, Ctxt), MaxLab1 is max(Width, MaxLab0), max_values(Rest, MaxVal1, MaxLab1, Total1, MaxVal, MaxLab, Total, Stream, Ctxt). labelwidth(Term,Width,Ctxt) :- ctxt(Ctxt, [interp-Interp]), tcl_eval(Interp,[font,measure,dq('$viewfont'),br(write(Term))], WidthString), number_codes(Width,WidthString). %% Print the histogram on a postscript printer or on file. print_chart(Ctxt) :- %% Open the dialog ctxt(Ctxt, [canvas-CanvasW]), my_eval(['PrintDialog',CanvasW],Ctxt). %% Update the Tcl variables with the stored settings set_tcl_vars(Ctxt) :- tcl_variable(TclVar,TclValue), my_eval([set,TclVar,dq(TclValue)],Ctxt), fail. set_tcl_vars(_). %% Update the tcl_variable/2 facts get_tcl_vars(Ctxt) :- ctxt(Ctxt, [interp-Interp]), findall(V,tcl_variable(V,_),VarNames), member(VarName,VarNames), tcl_eval(Interp, [set,VarName], Value), atom_codes(ValueAtom,Value), retract(tcl_variable(VarName,_)), assert(tcl_variable(VarName,ValueAtom)), fail. get_tcl_vars(_). %% Variables tcl_proc( ' \n\ # Set the temporary directory. \n\ set tmplist "[lindex [array get env TMP] 1] [lindex [array get env TEMP] 1] [lindex [array get env TMPPATH] 1]" \n\ if {$tcl_platform(platform) == "unix"} { \n\ # Unix \n\ set dflt {/tmp} \n\ } else { \n\ # Windows \n\ set dflt {c:/temp} \n\ # Backslash to slash, needed under Windows \n\ regsub -all {\\\\} $tmplist "/" tmplist; # We have to use \\\\ instead \n\ #of \\ because of SICStus quoting and Tcl each wants quoting \n\ } \n\ set TempPrintFile [format "%s/gaugetmp.ps" [lindex "$tmplist $dflt" 0]] \n\ #puts $TempPrintFile \n\ '). %% The main printing dialog tcl_proc( ' \n\ # The main printing command \n\ proc PrintDialog {Canvas} { \n\ global PrintCmd ColorMode SaveFile SaveFileName \n\ \n\ toplevel .print \n\ wm title .print "Postscript print:" \n\ \n\ # Printing command \n\ frame .print.cmd \n\ label .print.cmd.printl -text "Print command:" \n\ if {$SaveFile} { \n\ entry .print.cmd.printe -textvariable PrintCmd \ -state disabled -bg gray \n\ } else { \n\ entry .print.cmd.printe -textvariable PrintCmd \ -state normal -bg white \n\ } \n\ pack .print.cmd.printl .print.cmd.printe -side left -padx 4 -pady 4 \n\ \n\ # Save to file \n\ frame .print.savefile \n\ checkbutton .print.savefile.sfcb -text "Save to file" \ -variable SaveFile \ -command {if {$SaveFile} \ {.print.savefile.filee configure -state normal -bg white;\ .print.cmd.printe configure -state disabled -bg gray;\ .print.buttons.print configure -text Save} \ else {.print.savefile.filee configure -state disabled -bg gray; \ .print.cmd.printe configure -state normal -bg white;\ .print.buttons.print configure -text Print}} \n\ label .print.savefile.filel -text "File name:" \n\ if {$SaveFile} { \n\ entry .print.savefile.filee -bg white -textvariable SaveFileName \ -state normal \n\ } else { \n\ entry .print.savefile.filee -textvariable SaveFileName \ -state disabled -bg gray \n\ } \n\ pack .print.savefile.sfcb .print.savefile.filel \ .print.savefile.filee -side left -padx 4 -pady 4 \n\ \n\ # Color mode \n\ frame .print.col \n\ radiobutton .print.col.colorrb -variable ColorMode -value color -text "Color" \n\ radiobutton .print.col.grayrb -variable ColorMode -value gray -text "Gray" \n\ radiobutton .print.col.monorb -variable ColorMode -value mono -text "Mono" \n\ pack .print.col.colorrb .print.col.grayrb .print.col.monorb \ -side left -padx 4 -pady 4 \n\ \n\ # Buttons \n\ frame .print.buttons \n\ button .print.buttons.print -text "Print" -width 8 \ -command "PrintChart $Canvas;destroy .print" \n\ button .print.buttons.cancel -text "Cancel" -width 8 \ -command "destroy .print" \n\ pack .print.buttons.print .print.buttons.cancel \ -side left -anchor c -padx 4 -pady 4 -expand true \n\ \n\ # Pack all frames \n\ pack .print.cmd .print.savefile .print.col .print.buttons \ -side top -anchor w -pady 4 -padx 4 -fill x \n\ }'). %% The actual printing procedure tcl_proc( ' \n\ proc PrintChart { Canvas } { \n\ global PrintCmd ColorMode SaveFile SaveFileName TempPrintFile \n\ \n\ if {$SaveFile} { \n\ $Canvas postscript -colormode $ColorMode -file $SaveFileName \n\ } else { \n\ $Canvas postscript -colormode $ColorMode -file $TempPrintFile \n\ eval exec $PrintCmd $TempPrintFile \n\ file delete $TempPrintFile \n\ } \n\ }'). %%----------------------------------------------------------------------- tcl_proc( 'proc show_bar_value {yhit} { \n\ global yinc \n\ global view \n\ global ymax \n\ set viewy [expr round([$view canvasy $yhit])] \n\ if {$viewy < 0} { \n\ set viewy 0 \n\ } else {if {$viewy >= $ymax} { \n\ set viewy [expr $ymax-1] \n\ }} \n\ set bar_nr [expr $viewy/$yinc + 1] \n\ prolog_event "show_value($bar_nr)" \n\ }'). %%----------------------------------------------------------------------- %% Install the Tcl procedures create_procs(Ctxt) :- tcl_proc(ProcAtom), ctxt(Ctxt, [interp-Interp]), tcl_eval(Interp, ProcAtom, _), fail. create_procs(_). %% Remove all entries where the value is 0. %% [MC] 3.8.6: made determinate remove_zero_values([],[]). remove_zero_values([Key-Value|Org],[Key-Value|New]) :- Value =\= 0, !, remove_zero_values(Org,New). remove_zero_values([_|Org],New) :- remove_zero_values(Org,New). %%----------------------------------------------------------------------- %% Sort and select using built-in keysort sort_bars(alphabetic, Bars0, Bars) :- sort(Bars0, Bars). sort_bars(descending_values, Bars0, Bars) :- swap(Bars0, Bars1), keysort(Bars1, Bars2), swap_rev(Bars2, [], Bars). sort_bars(ascending_values, Bars0, Bars) :- swap(Bars0, Bars1), keysort(Bars1, Bars2), swap(Bars2, Bars). sort_bars(desc40, Bars0, Bars) :- swap(Bars0, Bars1), keysort(Bars1, Bars2), swap_rev(Bars2, [], Bars3), firstn(40, Bars3, Bars). swap([], []). swap([K-V|T], [V-K|ST]) :- swap(T, ST). swap_rev([], L, L). swap_rev([K-V|T], T0, ST) :- swap_rev(T, [V-K|T0], ST). firstn(0, _, []) :- !. firstn(N, L, L1) :- firstn1(L, N, L1). firstn1([], _, []). firstn1([H|T], N, [H|T1]) :- N1 is N-1, firstn(N1, T, T1). %% [PM] 3.11.1 Merge two key-sorted lists by adding the values when a key occurs in both lists %% A key must not occur more than once in each list keyed_add([], KV2s, KVs) :- KVs = KV2s. keyed_add([K-V|KV1s], KV2s, KVs) :- keyed_add1(KV2s, KV1s, K,V, KVs). keyed_add1([], KV1s, K1,V1, KVs) :- KVs = [K1-V1|KV1s]. keyed_add1([K2-V2|KV2s], KV1s, K1,V1, KVs) :- ( K2 @< K1 -> KVs = [K2-V2|KVs1], keyed_add1(KV2s, KV1s, K1,V1, KVs1) ; K1 @< K2 -> KVs = [K1-V1|KVs1], keyed_add1(KV1s, KV2s, K2,V2, KVs1) ; % K1 == K2 -> V is V1+V2, KVs = [K1-V|KVs1], keyed_add(KV1s, KV2s, KVs1) ). %%----------------------------------------------------------------------- %% Simplified help, puts up a window with a text widget and reads a %% file in it. No hyper text yet. tcl_proc( 'proc help_window {} { \n\ toplevel .help \n\ text .help.text -yscrollcommand ".help.scroll set" \n\ scrollbar .help.scroll -command ".help.text yview" \n\ button .help.close -text "Close" -command "destroy .help" \n\ pack .help.scroll -side right -fill y \n\ pack .help.text -side top \n\ pack .help.close -side bottom -fill x \n\ }'). tcl_proc( 'proc help_file file { \n\ .help.text delete 1.0 end \n\ set f [open $file] \n\ while {![eof $f]} { \n\ .help.text insert end [read $f 1024] \n\ } \n\ close $f \n\ }'). help(Ctxt) :- ctxt(Ctxt, [interp-Interp]), tcl_eval(Interp,'winfo exists .help',Exists), ( Exists == "0" -> tcl_eval(Interp, help_window, _), absolute_file_name(library('gauge.txt'), File), tcl_eval(Interp, list([help_file,File]), _) ; tcl_eval(Interp, 'raise .help', _) ). spark-2012.0.deb/simplifier/data__declarations.pro0000644000175000017500000003620511753202337021135 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides access to all information related to declarations. Most of this % information will be retrieved from the provided declarations file. %############################################################################### :- module(data__declarations, [get_declarations_used_identifier/1, add_declarations_used_identifier/1, get_declarations_variable/2, add_declarations_variable/2, get_declarations_constant/2, add_declarations_constant/2, get_declarations_unbounded_function/3, add_declarations_unbounded_function/3, get_declarations_function/3, add_declarations_function/3, get_declarations_type/2, add_declarations_type/2, get_declarations_record_function/6, add_declarations_record_function/6, pre_calculate_legacy_fdl/0, type/2, find_core_type/2, function/3, record_function/6, type_alias/2, enumeration/2, mk__function_name/3, function_template/3]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- set_prolog_flag(double_quotes, chars). %############################################################################### % TYPES %############################################################################### :- add_type('TypeAttributes', [abstract, alias('AliasTypeId_Atom'), array('IndexTypeId_AtomList', 'ElementTypeId_Atom'), enumeration('EnumId_AtomList'), record('FieldList'), sequence('ElementTypeId_Atom'), set('ElementTypeId_Atom')]). :- add_type('UnboundedClass', [mk_array, mk_record]). :- add_type('Field', [field('TypeId_Atom', 'FieldId_Atom')]). :- add_type('Mode', [update, access]). :- add_type('InternalUnboundedClass', [array, record]). :- add_type('InternalTypeAttributes', [abstract, array('IndexTypeId_AtomList', 'ElementTypeId_Atom'), enumerated, record('FieldIdFieldTypeId_ListList'), sequence('ElementTypeId_Atom'), set('ElementTypeId_Atom')]). %############################################################################### % DATA %############################################################################### :- add_state(get_declarations_used_identifier, get_declarations_used_identifier('Id_Atom')). :- dynamic(get_declarations_used_identifier/1). :- add_state(get_declarations_variable, get_declarations_variable('TypeId_Atom', 'VarId_Atom')). :- dynamic(get_declarations_variable/2). :- add_state(get_declarations_constant, get_declarations_constant('TypeId_Atom', 'ConstId_Atom')). :- dynamic(get_declarations_constant/2). :- add_state(get_declarations_unbounded_function, get_declarations_unbounded_function('TypeId_Atom', 'Function_Atom', 'UnboundedClass')). :- dynamic(get_declarations_unbounded_function/3). :- add_state(get_declarations_function, get_declarations_function('ReturnTypeId_Atom', 'Function_Atom', 'ArgTypeId_AtomList')). :- dynamic(get_declarations_function/3). :- add_state(get_declarations_type, get_declarations_type('TypeId_Atom', 'TypeAttributes')). :- dynamic(get_declarations_type/2). :- add_state(get_declarations_record_function, get_declarations_record_function('UniqueFieldId_Int', 'Uninstantiated_FunctorN', 'Mode', 'FieldId_Atom', 'Args_VarList', 'TypeId_Atom')). :- dynamic(get_declarations_record_function/6). :- add_state(function, [function('Function_Atom', 'ArgTypeId_AtomList', 'ReturnTypeId_Atom')]). :- dynamic(function/3). :- add_state(record_function, record_function('UniqueFieldId_Int', 'Uninstantiated_FunctorN', 'Mode', 'FieldId_Atom', 'Args_VarList', 'TypeId_Atom')). :- dynamic(record_function/6). :- add_state(mk__function_name, mk__function_name('Function_Atom', 'TypeId_Atom', 'InternalUnboundedClass')). :- dynamic(mk__function_name/3). :- add_state(type_alias, type_alias('TypeId_Atom', 'AliasTypeId_Atom')). :- dynamic(type_alias/2). :- add_state(type, type('TypeId_Atom', 'InternalTypeAttributes')). :- dynamic(type/2). :- add_state(enumeration, enumeration('TypeId_Atom', 'EnumId_AtomList')). :- dynamic(enumeration/2). :- add_state(function_template, function_template('Function_Pred', 'VarList', 'Function_Atom')). :- dynamic(function_template/3). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== add_declarations_used_identifier(Id_Atom):- assertz(get_declarations_used_identifier(Id_Atom)), !. add_declarations_variable(TypeId_Atom, VarId_Atom):- assertz(get_declarations_variable(TypeId_Atom, VarId_Atom)), !. add_declarations_constant(TypeId_Atom, ConstId_Atom):- assertz(get_declarations_constant(TypeId_Atom, ConstId_Atom)), !. add_declarations_unbounded_function(TypeId_Atom, Function_Atom, UnboundedClass):- assertz(get_declarations_unbounded_function(TypeId_Atom, Function_Atom, UnboundedClass)), !. add_declarations_function(ReturnTypeId_Atom, Function_Atom, ArgTypeId_AtomList):- assertz(get_declarations_function(ReturnTypeId_Atom, Function_Atom, ArgTypeId_AtomList)), !. add_declarations_type(TypeId_Atom, TypeAttributes):- assertz(get_declarations_type(TypeId_Atom, TypeAttributes)), !. add_declarations_record_function(UniqueFieldId_Int, Uninstantiated_FunctorN, Mode, FieldId_Atom, Args_VarList, TypeId_Atom):- assertz(get_declarations_record_function(UniqueFieldId_Int, Uninstantiated_FunctorN, Mode, FieldId_Atom, Args_VarList, TypeId_Atom)), !. %=============================================================================== %=============================================================================== % Refactor. %=============================================================================== pre_calculate_legacy_fdl:- calculate_function, calculate_record_function, calculate_mk__function_name, calculate_type_alias, calculate_type, calculate_enumeration, calculate_function_template, !. %------------------------------------------------------------------------------- calculate_function:- get_declarations_function(ReturnTypeId_Atom, Function_Atom, ArgTypeId_AtomList), assertz(function(Function_Atom, ArgTypeId_AtomList, ReturnTypeId_Atom)), fail. calculate_function:- !. %------------------------------------------------------------------------------- calculate_record_function:- get_declarations_record_function(UniqueFieldId_Int, Uninstantiated_FunctorN, Mode, FieldId_Atom, Args_VarList, TypeId_Atom), assertz(record_function(UniqueFieldId_Int, Uninstantiated_FunctorN, Mode, FieldId_Atom, Args_VarList, TypeId_Atom)), fail. calculate_record_function:- !. %------------------------------------------------------------------------------- calculate_mk__function_name:- get_declarations_unbounded_function(TypeId_Atom, Function_Atom, mk_array), assertz(mk__function_name(Function_Atom, TypeId_Atom, array)), fail. calculate_mk__function_name:- get_declarations_unbounded_function(TypeId_Atom, Function_Atom, mk_record), assertz(mk__function_name(Function_Atom, TypeId_Atom, record)), fail. calculate_mk__function_name:- !. %------------------------------------------------------------------------------- calculate_type_alias:- get_declarations_type(TypeId_Atom, alias(AliasTypeId_Atom)), assert(type_alias(TypeId_Atom, AliasTypeId_Atom)), fail. calculate_type_alias:- !. %------------------------------------------------------------------------------- calculate_type:- get_declarations_type(TypeId_Atom, record(FieldList)), findall([FieldId_Atom, FieldTypeId_Atom], member(field(FieldTypeId_Atom, FieldId_Atom), FieldList), FIELD_LIST), assert(type(TypeId_Atom, record(FIELD_LIST))), fail. calculate_type:- get_declarations_type(TypeId_Atom, abstract), assert(type(TypeId_Atom, abstract)), fail. calculate_type:- get_declarations_type(TypeId_Atom, array(IndexTypeId_AtomList, ElementTypeId_Atom)), assert(type(TypeId_Atom, array(IndexTypeId_AtomList, ElementTypeId_Atom))), fail. calculate_type:- get_declarations_type(TypeId_Atom, enumeration(_EnumId_AtomList)), assert(type(TypeId_Atom, enumerated)), fail. calculate_type:- get_declarations_type(TypeId_Atom, sequence(ElementTypeId_Atom)), assert(type(TypeId_Atom, sequence(ElementTypeId_Atom))), fail. calculate_type:- get_declarations_type(TypeId_Atom, set(ElementTypeId_Atom)), assert(type(TypeId_Atom, set(ElementTypeId_Atom))), fail. calculate_type:- !. %------------------------------------------------------------------------------- calculate_enumeration:- get_declarations_type(TypeId_Atom, enumeration(EnumId_AtomList)), assert(enumeration(TypeId_Atom, EnumId_AtomList)), fail. calculate_enumeration:- !. %------------------------------------------------------------------------------- calculate_function_template:- function(Function_Atom,ArgTypeId_AtomList,_ReturnTypeId_Atom), atom_chars(Function_Atom, Function_CharList), \+ append("upf_", _, Function_CharList), \+ append("fld_", _, Function_CharList), save_function_template(Function_Atom, ArgTypeId_AtomList), fail. calculate_function_template:- !. save_function_template(FUNCTION, ARG_TYPES) :- length(ARG_TYPES, LENGTH), form_function_var_list(LENGTH, VAR_LIST), FUNCTION_CALL =.. [FUNCTION|VAR_LIST], assertz(function_template(FUNCTION_CALL, VAR_LIST, FUNCTION)), !. form_function_var_list(1, [_]) :- !. form_function_var_list(2, [_,_]) :- !. form_function_var_list(3, [_,_,_]) :- !. form_function_var_list(4, [_,_,_,_]) :- !. form_function_var_list(5, [_,_,_,_,_]) :- !. form_function_var_list(6, [_,_,_,_,_,_]) :- !. form_function_var_list(7, [_,_,_,_,_,_,_]) :- !. form_function_var_list(8, [_,_,_,_,_,_,_,_]) :- !. form_function_var_list(9, [_,_,_,_,_,_,_,_,_]) :- !. form_function_var_list(10, [_,_,_,_,_,_,_,_,_,_]) :- !. form_function_var_list(N, [_,_,_,_,_|X]) :- N>10, N1 is N-5, !, form_function_var_list(N1, X), !. % Should not get here. form_function_var_list(0, []) :- !. %------------------------------------------------------------------------------- %=============================================================================== % find_core_type(+TYPE, -CORE_TYPE). %------------------------------------------------------------------------------- % Return the core type of TYPE as CORE_TYPE, or if TYPE does not have a % CORE_TYPE, return TYPE. %=============================================================================== find_core_type(TYPE, CORE_TYPE) :- type_alias(TYPE, CORE_TYPE), !. find_core_type(TYPE, TYPE) :- !. %=============================================================================== :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/newded.pro0000644000175000017500000001071211753202337016576 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % A HypList-free interface to various predicates such as infer(Formula, % HypList). (In the Proof Checker, infer(Formula) does not return a list % of hypotheses used. The Simplifier code was derived from the Checker % code, but with the additional argument in a number of predicates, to aid % construction of a simplification log recording its reasoning. The % HypList-free variants are still used by other bits of legacy code % inherited from the Checker, however, hence this module.) %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### deduce_formula(F, T, Hs) :- deduce(F, T, HL), sort(HL, Hs), !. %------------------------------------------------------------------------------- /* infer: interface to the inference engine for the expression simplifier. This checks to see if there is already an inference_depth_limit in place, and uses it if so, or imposes the default one if not. */ infer(Goal) :- inference_depth_limit(main, _), /* it exists */ !, infer_subgoal(Goal, _). infer(Goal) :- /* otherwise */ infer(Goal, _). %------------------------------------------------------------------------------- simplification_is_on :- simplification(on), !, current_vc_number(N), !, \+ simplification(N, off), !. %------------------------------------------------------------------------------- standardisation_is_on :- standardisation(on), !, current_vc_number(N), !, \+ standardisation(N, off), !. %------------------------------------------------------------------------------- contradiction_hunt_is_on :- \+ contradiction_hunt(off), !, current_vc_number(N), !, \+ contradiction_hunt(N, off), !. %------------------------------------------------------------------------------- expression_reduction_is_on :- expression_reduction(on), !, current_vc_number(N), !, \+ expression_reduction(N, off), !. %------------------------------------------------------------------------------- substitution_elimination_is_on :- substitution_elimination(on), !, current_vc_number(N), !, \+ substitution_elimination(N, off), !. %------------------------------------------------------------------------------- rule_substitution_is_on :- rule_substitution(on), !, current_vc_number(N), !, \+ rule_substitution(N, off), !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/load__provenance.pro0000644000175000017500000004316411753202337020635 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Loads the provenance for this proof problem. This key % information is embedded into the main proof file. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(load__provenance, [load_provenance/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__provenance.pro', [get_provenance_framework/1, add_provenance_banner/1, add_provenance_date_time/2, add_provenance_framework/1, add_provenance_proof_file_kind/1, add_provenance_subprogram_identifier/1]). :- use_module('ioutilities.pro', [throw_error/2, read_lines_from_file_as_char_list/3]). :- use_module('newutilities.pro', [implode_separator_content_list/3]). :- use_module('simplifier_ioutilities.pro', [retrieve_proof_file/1, convert_file_for_display/2, retrieve_proof_file_kind/1]). :- use_module('parseutilities.pro', [parse_atom/5, parse_atom_silent/4, parse_line/3, parse_char_sep_atom_list/6, parse_all_to_nothing/2, parse_nothing_to_all/2]). :- use_module('data__data_files.pro', [get_datafiles_vcg/1, get_datafiles_pfs/1]). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### :- set_prolog_flag(double_quotes, chars). %=============================================================================== % load_provenance. %------------------------------------------------------------------------------- % The provided proof file is investigated to establish the required provenance % information as described below. % % The kind of proof file is determined from which data files are available. % vcg data files => verification_conditions % pfs data files => path_functions % % Only the first 15 lines of the proof file are queried. This should, very % comfortably, enclose every banner ever generated for processing by the % Simplifier. % % The banner of the proof file must take the form: % 1st line of the file contains: ** % 2ed line of the file contains: anything. % 3rd line of the file contains: anything. % 4th line of the file contains: anything. % 5th line of the file contains: ** % % An error is raised if the banner is not present. % % An error is raised if the 1st line and 5th line of a present banner do not % exactly match. % % If the third line of the banner contains the word "Examiner" then the Simplifier % will operate in SPARK mode. Otherwise, the Simplifier will operate in % pascal mode. If the Simplifier is operating in pascal mode, a message will % be issued to that effect. (pascal mode is unusual - we don't want our % users to enter this mode accidentally). % % The remaining portion of the 15 lines are scanned for the following % components. These components may occur in any order. % % +A subprogram identifier must be present in the format below: % procedure P.Read % function Q.Read3.SafeRead % procedure PumpSwitch.PumpSwitchPT.OnInterruptHandler % task_type U_C.TT % % No subprogram identifier will lead to an error being raised. % A duplicate subprogram identifier will lead to an error being raised. % % NOTE: If operating in Pascal mode, the absence of a subprogram identifier % is not treated as an error. % % +A date may or may not be present in the file in the format below: % DATE : 7-JUN-1999 TIME : 15:41:20.73 % DATE : 12-APR-2006 15:37:05.57 % % A duplicate date will raise an error. % % Note that any other text in the remaining portion of the 15 lines will be % silently ignored. %=============================================================================== load_provenance:- % Detect and record the proof file kind. retrieve_proof_file_kind(ProofFileKind), add_provenance_proof_file_kind(ProofFileKind), % Retrieve the relevant proof file. retrieve_proof_file(File_Atom), % Retrieve the head of the file as (at most) 15 lines. read_lines_from_file_as_char_list(File_Atom, upToLine(15), CharList), % Process the provenance. process_provenance(CharList), !. %=============================================================================== % process_provenance(+Stream). %------------------------------------------------------------------------------- % The proof file provided as (Stream) is parsed to establish the required % provenance information. %=============================================================================== process_provenance(CharList):- % Process the banner. process_banner(CharList, Remaining_CharList), % Process the subprogram identifier. process_subprogram_identifier(Remaining_CharList), % Process datestamp, if present. process_datestamp(Remaining_CharList), !. %=============================================================================== % process_banner(+CharList, -Remaining_CharList). %------------------------------------------------------------------------------- % Retrieve and store the banner as provenance information, returning the remaining % characters. Raise errors accordingly. %=============================================================================== % Make this call visible to the spxref tool. :- public parse_banner/3. process_banner(CharList, Remaining_CharList):- % Get the banner. phrase(parse_banner([FirstAsteriskLine_Atom, SecondLine_Atom, ThirdLine_Atom, FourthLine_Atom, LastAsteriskLine_Atom]), CharList, Remaining_CharList), % Check that the asterisk lines match. matching_asterisk_lines(FirstAsteriskLine_Atom, LastAsteriskLine_Atom), % Store the well-formed banner. Do not store the asterisk lines. add_provenance_banner([SecondLine_Atom, ThirdLine_Atom, FourthLine_Atom]), % Determine framework from third line of banner. scan_for_framework(ThirdLine_Atom), !. % From above, banner could not be parsed. File is malformed. process_banner(_CharList, _Remaining_CharList):- retrieve_proof_file(File_Atom), convert_file_for_display(File_Atom, DisplayFile_Atom), throw_error('Malformed banner in file: ~a.', [DisplayFile_Atom]). %------------------------------------------------------------------------------- parse_banner([FirstAsteriskLine_Atom, SecondLine_Atom, ThirdLine_Atom, FourthLine_Atom, LastAsteriskLine_Atom]) --> % 1st line must contain an asterisk line. parse_asterisk_banner_line(FirstAsteriskLine_Atom), % 2ed line may contain anything. parse_line(SecondLine_Atom), % 3rd line may contain anything. parse_line(ThirdLine_Atom), % 4th line may contain anything. parse_line(FourthLine_Atom), % 5th line must contain an asterisk line. parse_asterisk_banner_line(LastAsteriskLine_Atom), !. %------------------------------------------------------------------------------- parse_asterisk_banner_line(AsteriskLine_Atom) --> parse_atom([space], zeroormore, LeadingWhiteSpace_Atom), parse_atom([asterisk], oneormore, Asterisks_Atom), parse_atom([space], zeroormore, ClosingWhiteSpace_Atom), parse_atom_silent([newline], one), {implode_separator_content_list('', [LeadingWhiteSpace_Atom, Asterisks_Atom, ClosingWhiteSpace_Atom], AsteriskLine_Atom)}, !. %------------------------------------------------------------------------------- % Asterisk lines match. matching_asterisk_lines(FirstAsteriskLine_m_LastAsteriskLine_m_Atom, FirstAsteriskLine_m_LastAsteriskLine_m_Atom):- !. % From above, asterisk lines do not match. matching_asterisk_lines(_FirstAsteriskLine_Atom, _LastAsteriskLine_Atom):- retrieve_proof_file(File_Atom), convert_file_for_display(File_Atom, DisplayFile_Atom), throw_error('Malformed banner in file: ~a. The bounding asterisk lines are different.', [DisplayFile_Atom]). %------------------------------------------------------------------------------- scan_for_framework(ThirdLine_Atom):- atom_chars(ThirdLine_Atom, ThirdLine_CharList), scan_for_framework_x(ThirdLine_CharList), !. %------------------------------------------------------------------------------- % Make this call visible to the spxref tool. :- public parse_spark_identifier/2. % Search for SPARK identifier. scan_for_framework_x(CharList):- phrase(parse_spark_identifier, CharList), add_provenance_framework(spark), !. % From above, failed to find spark identifier. This is pascal. scan_for_framework_x(_CharList):- add_provenance_framework(pascal), % SEPR:2307: Standardise warning and error messages. format('The Simplifier is operating in Pascal mode.\n', []), !. %------------------------------------------------------------------------------- parse_spark_identifier --> parse_nothing_to_all, "Examiner", parse_all_to_nothing, !. %=============================================================================== % process_subprogram_identifier(+Remaining_CharList). %------------------------------------------------------------------------------- % Retrieve and store the subprogram identifier. %=============================================================================== % Make this call visible to the spxref tool. :- public parse_subprogram_identifier/3. process_subprogram_identifier(Remaining_CharList):- phrase(parse_subprogram_identifier(SubprogramIdentifier_AtomList), Remaining_CharList), process_subprogram_identifier_x(SubprogramIdentifier_AtomList), !. %------------------------------------------------------------------------------- % One subprogram identifier located. % This is the correct situation. process_subprogram_identifier_x([SubprogramIdentifier_Atom]):- add_provenance_subprogram_identifier(SubprogramIdentifier_Atom), !. % Zero subprogram identifiers located. % Operating in Pascal mode. % This is not an error. process_subprogram_identifier_x([]):- get_provenance_framework(pascal), !. % Zero subprogram identifiers located. % This is an error. process_subprogram_identifier_x([]):- retrieve_proof_file(File_Atom), convert_file_for_display(File_Atom, DisplayFile_Atom), throw_error('Failed to locate a subprogram identifier in: ~p', [DisplayFile_Atom]). % From above, multiple subprogram identifiers located. % This is an error. process_subprogram_identifier_x(SubprogramIdentifier_AtomList):- retrieve_proof_file(File_Atom), convert_file_for_display(File_Atom, DisplayFile_Atom), throw_error('File: ~a contains multiple subprogram identifiers: ~p', [DisplayFile_Atom, SubprogramIdentifier_AtomList]). %------------------------------------------------------------------------------- parse_subprogram_identifier([H_SubprogramIdentifier_Atom | T_SubprogramIdentifier_AtomList]) --> parse_atom_silent([space], zeroormore), parse_subprogram_kind(SubprogramKind_Atom), % Newline is allowed below, as it could occur in some siv % files. This is a consequence of applying the wrap utility over the % whole output file. parse_atom_silent([space, newline], oneormore), parse_char_sep_atom_list([alpha_numeric, under_score], [], '.', DottedName_AtomList), {implode_separator_content_list('.', DottedName_AtomList, SubprogramLocation_Atom)}, parse_atom_silent([space], zeroormore), parse_atom_silent([newline], one), % Create the subprogram identifier. {implode_separator_content_list('', [SubprogramKind_Atom, ' ', SubprogramLocation_Atom], H_SubprogramIdentifier_Atom)}, % Continue. parse_subprogram_identifier(T_SubprogramIdentifier_AtomList), !. % From above, could not parse a subprogram identifier on this line. % Try the next line. parse_subprogram_identifier(SubprogramIdentifier_AtomList) --> parse_line(_Line_Atom), !, parse_subprogram_identifier(SubprogramIdentifier_AtomList). % From above, could not parse another line. Reached last last line. parse_subprogram_identifier([]) --> !. %------------------------------------------------------------------------------- parse_subprogram_kind(procedure) --> "procedure", !. parse_subprogram_kind(function) --> "function", !. parse_subprogram_kind('task_type') --> "task_type", !. %=============================================================================== % process_subprogram_identifier(+Remaining_CharList). %------------------------------------------------------------------------------- % Retrieve and store the datestamp, if present. %=============================================================================== % Make this call visible to the spxref tool. :- public parse_datestamp/3. process_datestamp(Remaining_CharList):- phrase(parse_datestamp(DateStamp_Tuple2List), Remaining_CharList), process_datestamp_x(DateStamp_Tuple2List), !. %------------------------------------------------------------------------------- % One datestamp located. % This is acceptable. process_datestamp_x([(Date_Atom, Time_Atom)]):- add_provenance_date_time(Date_Atom, Time_Atom), !. % Zero datestamps located. % This is acceptable. process_datestamp_x([]):- !. % From above, multiple datestamps located. % This is an error. process_datestamp_x(DateStamp_Tuple2List):- retrieve_proof_file(File_Atom), convert_file_for_display(File_Atom, DisplayFile_Atom), throw_error('File: ~a contains multiple datestamps: ~k', [DisplayFile_Atom, DateStamp_Tuple2List]). %------------------------------------------------------------------------------- parse_datestamp([(Date_Atom, Time_Atom) | T_DateStamp_Tuple2List]) --> parse_atom_silent([space], zeroormore), "DATE : ", parse_atom_silent([space], zeroormore), [Day1_Char, Day2_Char, '-', Month1_Char, Month2_Char, Month3_Char, '-', Year1_Char, Year2_Char, Year3_Char, Year4_Char], parse_time_leader, [Hour1_Char, Hour2_Char, ':', Min1_Char, Min2_Char, ':', Sec1_Char, Sec2_Char, '.', _HundSec1_Char, _HundSec2_Char], {atom_chars(Date_Atom, [Day1_Char, Day2_Char, '-', Month1_Char, Month2_Char, Month3_Char, '-', Year1_Char, Year2_Char, Year3_Char, Year4_Char])}, {atom_chars(Time_Atom, [Hour1_Char, Hour2_Char, ':', Min1_Char, Min2_Char, ':', Sec1_Char, Sec2_Char])}, % Continue. parse_datestamp(T_DateStamp_Tuple2List), !. %------------------------------------------------------------------------------- % From above, could not parse a datestamp on this line. % Try the next line. parse_datestamp(DateStamp_Tuple2List) --> parse_line(_Line_Atom), !, parse_datestamp(DateStamp_Tuple2List). % From above, could not parse another line. Reached last last line. parse_datestamp([]) --> !. %------------------------------------------------------------------------------- parse_time_leader --> " ". parse_time_leader --> " TIME : ". :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/load__switches.pro0000644000175000017500000011522711753202337020326 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Configures system switches, taking into account the contents of the % command line. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(load__switches, [load_switches/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_type/2]). :- use_module('data__switches.pro', [add_source_of_switch/2, add_switch_complexity_limit/1, add_switch_nocontradiction_hunt/1, add_switch_depth_limit/1, add_switch_verbose/1, add_switch_expression_reduction/1, add_switch_inference_limit/1, add_switch_input_file/1, get_switch_input_file/1, add_switch_log/1, add_switch_plain/1, add_switch_renum/1, add_switch_rule_substitution/1, add_switch_simplification/1, add_switch_standardisation/1, add_switch_substitution_elimination/1, add_switch_typecheck_only/1, add_switch_user_rules/1, add_switch_wrap/1, add_switch_help/1, add_switch_version/1, add_switch_empty/1, add_switch_usage/1, add_switch_hyp_limit/1, get_switch_hyp_limit/1, get_source_of_switch/2, get_switch_log/1, get_switch_deadpaths/1, add_switch_deadpaths/1, prune_source_of_switch/1, prune_switch_complexity_limit/0, prune_switch_nocontradiction_hunt/0, prune_switch_depth_limit/0, prune_switch_verbose/0, prune_switch_expression_reduction/0, prune_switch_inference_limit/0, prune_switch_log/0, prune_switch_plain/0, prune_switch_renum/0, prune_switch_rule_substitution/0, prune_switch_simplification/0, prune_switch_standardisation/0, prune_switch_substitution_elimination/0, prune_switch_typecheck_only/0, prune_switch_user_rules/0, prune_switch_help/0, prune_switch_version/0, prune_switch_empty/0, prune_switch_usage/0, prune_switch_wrap/0, prune_switch_deadpaths/0, prune_switch_hyp_limit/0]). :- use_module('newutilities.pro', [contains_no_dups/1, flatten_list/2, generate_int_list/3, implode_separator_content_list/3]). :- use_module('ioutilities.pro', [command_line_error/2, throw_error/2]). :- use_module('parseutilities.pro', [parse_number/3, parse_atom/5, parse_atom_silent/4]). :- use_module('data__system', [get_system_toolname/1]). %############################################################################### % TYPES %############################################################################### :- add_type('CMDLine', [empty, filename('InputFile_Atom'), simple_qualifier('SimpleQualifier'), log_qualifier('LogFile_Atom'), choices_qualifier('ChoicesName', 'SelectRange'), limit_qualifier('LimitName', 'Int'), usage_qualifier('UsageFile_Atom')]). :- add_type('SimpleQualifier', [nolog, nowrap, verbose, nouserrules, plain, typecheck, norenum, help, version]). :- add_type('ChoicesName', [nosimplification, nostandardisation, norule_substitution, nocontradiction_hunt, nosubstitution_elimination, noexpression_reduction]). :- add_type('SelectRange', [all, none, specific('Range_List')]). :- add_type('Range', [value('Int'), range('Lower_Int', 'Upper_Int')]). :- add_type('LimitName', [complexity_limit, depth_limit, inference_limit]). :- add_type('ProofFileKind', [verification_conditions, path_functions]). %############################################################################### % DATA %############################################################################### default_switch_hyp_limit(0). %############################################################################### % PREDICATES %############################################################################### :- set_prolog_flag(double_quotes, chars). %=============================================================================== % load_switches. %------------------------------------------------------------------------------- % Establish the value of all switches, taking into account the contents of % the command line. %=============================================================================== load_switches:- % Establish automatic settings. set_auto_settings, % Retrieve arguments. current_prolog_flag(argv, Arguments_AtomList), % Parse arguments. parse_arguments(Arguments_AtomList, CMDLine_List), % Process parsed arguments. process_arguments(CMDLine_List), !. %=============================================================================== % set_auto_settings. %------------------------------------------------------------------------------- % Initialise all switches to their default values. %=============================================================================== set_auto_settings :- add_source_of_switch(switch_input_file, auto_set), add_switch_empty(off), add_source_of_switch(switch_empty, auto_set), add_switch_log(yes_log_file), add_source_of_switch(switch_log, auto_set), add_switch_wrap(on), add_source_of_switch(switch_wrap, auto_set), add_switch_verbose(off), add_source_of_switch(switch_verbose, auto_set), set_user_rules_switch, add_source_of_switch(switch_user_rules, auto_set), add_switch_plain(off), add_source_of_switch(switch_plain, auto_set), add_switch_typecheck_only(off), add_source_of_switch(switch_typecheck_only, auto_set), add_switch_renum(on), add_source_of_switch(switch_renum, auto_set), add_switch_simplification(all), add_source_of_switch(switch_simplification, auto_set), add_switch_standardisation(all), add_source_of_switch(switch_standardisation, auto_set), add_switch_rule_substitution(all), add_source_of_switch(switch_rule_substitution, auto_set), add_switch_nocontradiction_hunt(none), add_source_of_switch(switch_nocontradiction_hunt, auto_set), add_switch_substitution_elimination(all), add_source_of_switch(switch_substitution_elimination, auto_set), add_switch_expression_reduction(all), add_source_of_switch(switch_expression_reduction, auto_set), add_switch_complexity_limit(20), add_source_of_switch(switch_complexity_limit, auto_set), add_switch_depth_limit(5), add_source_of_switch(switch_depth_limit, auto_set), add_switch_inference_limit(40), add_source_of_switch(switch_inference_limit, auto_set), add_switch_help(off), add_source_of_switch(switch_help, auto_set), add_switch_version(off), add_source_of_switch(switch_version, auto_set), add_switch_usage(no_usage_file), add_source_of_switch(switch_usage, auto_set), set_deadpath_switch, add_source_of_switch(switch_deadpaths, auto_set), default_switch_hyp_limit(Hyp_Limit_Default), add_switch_hyp_limit(Hyp_Limit_Default), add_source_of_switch(switch_hyp_limit, auto_set), !. %=============================================================================== % set_user_rules_switch. %------------------------------------------------------------------------------- % Only permit the use of user rules for the Simplifier. %=============================================================================== set_user_rules_switch:- get_system_toolname('Simplifier'), !, add_switch_user_rules(on). set_user_rules_switch:- get_system_toolname('ZombieScope'), !, add_switch_user_rules(off). %=============================================================================== % set_deadpath_switch. %------------------------------------------------------------------------------- % Set the deadpath switch based on the toolname %=============================================================================== set_deadpath_switch:- get_system_toolname('Simplifier'), !, add_switch_deadpaths(off). set_deadpath_switch:- get_system_toolname('ZombieScope'), !, add_switch_deadpaths(on). %=============================================================================== % parse_arguments(+Arguments_AtomList, -CMDLine_List). %------------------------------------------------------------------------------- % Parses the command line into an easily inspected structure. This is only % concerned with the syntax. Syntactically correct arguments may be % semantically wrong. %=============================================================================== % The empty qualifier is detected when no other qualifiers are detected. parse_arguments([], [empty]). parse_arguments(Arguments_AtomList, CMDLine_List):- parse_arguments_x(Arguments_AtomList, CMDLine_List), !. %------------------------------------------------------------------------------- % Make the parse_legal_base_name call visible to the spxref tool. :- public parse_simplifier_qualifier_part/3. :- public parse_zombiescope_qualifier_part/3. % From above, some qualifiers detected. parse_arguments_x([], []). % More arguments to consider. % Process Simplifier arguments parse_arguments_x([H_Arguments_Atom | T_Arguments_AtomList], [H_CMDLine | T_CMDLine_List]):- get_switch_deadpaths(off), % Parse the qualifier. atom_chars(H_Arguments_Atom, H_Arguments_CharList), phrase(parse_simplifier_qualifier_part(H_CMDLine), H_Arguments_CharList), parse_arguments_x(T_Arguments_AtomList, T_CMDLine_List). % Process ZombieScope arguments parse_arguments_x([H_Arguments_Atom | T_Arguments_AtomList], [H_CMDLine | T_CMDLine_List]):- get_switch_deadpaths(on), % Parse the qualifier. atom_chars(H_Arguments_Atom, H_Arguments_CharList), phrase(parse_zombiescope_qualifier_part(H_CMDLine), H_Arguments_CharList), parse_arguments_x(T_Arguments_AtomList, T_CMDLine_List). % Failure to achieve the above is an error. parse_arguments_x([H_Arguments_Atom | _T_Arguments_AtomList], _CMDLine_List):- throw_error('Error in analysing command argument: ~a', [H_Arguments_Atom]). %------------------------------------------------------------------------------- % Processing of Simplifier flags. %------------------------------------------------------------------------------- parse_simplifier_qualifier_part(CMDLine) --> parse_simplifier_simple_qualifier(CMDLine), !. parse_simplifier_qualifier_part(CMDLine) --> parse_simplifier_log_qualifier(CMDLine), !. parse_simplifier_qualifier_part(CMDLine) --> parse_simplifier_usage_qualifier(CMDLine), !. parse_simplifier_qualifier_part(CMDLine) --> parse_simplifier_choices_qualifier(CMDLine), !. parse_simplifier_qualifier_part(CMDLine) --> parse_simplifier_limit_qualifier(CMDLine), !. parse_simplifier_qualifier_part(CMDLine) --> parse_file_name(CMDLine), !. %------------------------------------------------------------------------------- parse_simplifier_simple_qualifier(simple_qualifier(SimpleQualifier)) --> parse_qualifier_prefix, parse_simplifier_simple_name(SimpleQualifier), !. parse_simplifier_simple_name(help) --> parse_mandatory_and_optional("h", "elp"), !. parse_simplifier_simple_name(nolog) --> parse_mandatory_and_optional("nol", "og"), !. parse_simplifier_simple_name(nowrap) --> parse_mandatory_and_optional("now", "rap"), !. parse_simplifier_simple_name(verbose) --> parse_mandatory_and_optional("verb", "ose"), !. parse_simplifier_simple_name(nouserrules) --> parse_mandatory_and_optional("nou", "serrules"), !. parse_simplifier_simple_name(plain) --> parse_mandatory_and_optional("p", "lain"), !. parse_simplifier_simple_name(typecheck) --> parse_mandatory_and_optional("t", "ypecheck"), !. parse_simplifier_simple_name(norenum) --> parse_mandatory_and_optional("nore", "num"), !. parse_simplifier_simple_name(version) --> parse_mandatory_and_optional("vers", "ion"), !. %------------------------------------------------------------------------------- parse_simplifier_log_qualifier(log_qualifier(LogFile_Atom)) --> parse_qualifier_prefix, parse_mandatory_and_optional("l", "og"), "=", parse_atom([alpha_numeric, under_score, period], oneormore, LogFile_Atom), !. %------------------------------------------------------------------------------- parse_simplifier_usage_qualifier(usage_qualifier(UsageFile_Atom)) --> parse_qualifier_prefix, parse_mandatory_and_optional("us", "age"), "=", parse_atom([alpha_numeric, under_score, period], oneormore, UsageFile_Atom), !. %------------------------------------------------------------------------------- parse_simplifier_choices_qualifier(choices_qualifier(ChoicesName, SelectRange)) --> parse_qualifier_prefix, parse_simplifier_choices_name(ChoicesName), parse_chosen_units(SelectRange), !. %------------------------------------------------------------------------------- parse_simplifier_choices_name(nosimplification) --> parse_mandatory_and_optional("nosi", "mplification"), !. parse_simplifier_choices_name(nostandardisation) --> parse_mandatory_and_optional("nost", "andardisation"), !. parse_simplifier_choices_name(norule_substitution) --> parse_mandatory_and_optional("noru", "le_substitution"), !. parse_simplifier_choices_name(nocontradiction_hunt) --> parse_mandatory_and_optional("noc", "ontradiction_hunt"), !. parse_simplifier_choices_name(nosubstitution_elimination) --> parse_mandatory_and_optional("nosu", "bstitution_elimination"), !. parse_simplifier_choices_name(noexpression_reduction) --> parse_mandatory_and_optional("noe", "xpression_reduction"), !. %------------------------------------------------------------------------------- parse_simplifier_limit_qualifier(limit_qualifier(LimitName, Int)) --> parse_qualifier_prefix, parse_simplifier_limit_name(LimitName), "=", parse_number(Int), !. %------------------------------------------------------------------------------- parse_simplifier_limit_name(complexity_limit) --> parse_mandatory_and_optional("c", "omplexity_limit"), !. parse_simplifier_limit_name(depth_limit) --> parse_mandatory_and_optional("d", "epth_limit"), !. parse_simplifier_limit_name(inference_limit) --> parse_mandatory_and_optional("i", "nference_limit"), !. %------------------------------------------------------------------------------- % Processing of ZombieScope flags. %------------------------------------------------------------------------------- parse_zombiescope_qualifier_part(CMDLine) --> parse_zombiescope_simple_qualifier(CMDLine), !. parse_zombiescope_qualifier_part(CMDLine) --> parse_zombiescope_log_qualifier(CMDLine), !. parse_zombiescope_qualifier_part(CMDLine) --> parse_zombiescope_limit_qualifier(CMDLine), !. parse_zombiescope_qualifier_part(CMDLine) --> parse_file_name(CMDLine), !. %------------------------------------------------------------------------------- parse_zombiescope_simple_qualifier(simple_qualifier(SimpleQualifier)) --> parse_qualifier_prefix, parse_zombiescope_simple_name(SimpleQualifier), !. parse_zombiescope_simple_name(help) --> parse_mandatory_and_optional("he", "lp"), !. parse_zombiescope_simple_name(nolog) --> parse_mandatory_and_optional("nol", "og"), !. parse_zombiescope_simple_name(nowrap) --> parse_mandatory_and_optional("now", "rap"), !. parse_zombiescope_simple_name(plain) --> parse_mandatory_and_optional("p", "lain"), !. parse_zombiescope_simple_name(norenum) --> parse_mandatory_and_optional("nor", "enum"), !. parse_zombiescope_simple_name(version) --> parse_mandatory_and_optional("v", "ersion"), !. %------------------------------------------------------------------------------- parse_zombiescope_log_qualifier(log_qualifier(LogFile_Atom)) --> parse_qualifier_prefix, parse_mandatory_and_optional("l", "og"), "=", parse_atom([alpha_numeric, under_score, period], oneormore, LogFile_Atom), !. %------------------------------------------------------------------------------- parse_zombiescope_limit_qualifier(limit_qualifier(LimitName, Int)) --> parse_qualifier_prefix, parse_zombiescope_limit_name(LimitName), "=", parse_number(Int), !. %------------------------------------------------------------------------------- parse_zombiescope_limit_name(hyp_limit) --> parse_mandatory_and_optional("hy", "p_limit"), !. %------------------------------------------------------------------------------- % Constraint is imposed. parse_chosen_units(specific(Range_List)) --> "=", parse_content_chosen_units(Range_List), !. % No constraint imposed, so applies to all VCs. parse_chosen_units(all) --> !. %------------------------------------------------------------------------------- % Non bracketed form for single ranges is acceptable. parse_content_chosen_units([Range]) --> parse_range(Range), !. % Otherwise brackets are required. parse_content_chosen_units(Range_List) --> "(", parse_range_collection(Range_List), ")", !. %------------------------------------------------------------------------------- % First range is mandatory. parse_range_collection([H_Range | T_Range_List]) --> parse_range(H_Range), parse_range_collection_x(T_Range_List), !. % Additional ranges. parse_range_collection_x([H_Range | T_Range_List]) --> ",", parse_range(H_Range), parse_range_collection_x(T_Range_List), !. % No more. parse_range_collection_x([]) --> !. %------------------------------------------------------------------------------- parse_range(range(Lower_Int, Upper_Int)) --> parse_number(Lower_Int), "-", parse_number(Upper_Int), !. parse_range(value(Int)) --> parse_number(Int), !. %------------------------------------------------------------------------------- %------------------------------------------------------------------------------- % File name is a sequence of alpha numeric characters, '_' and '.'. Note % that '/' and '\' are accepted, to specify files outside the current % directory on both unix and windows. Further, note that ':' is accepted to % allow for windows rooted drive names. Significantly, the file name does % not contain white space. parse_file_name(filename(InputFile_Atom)) --> parse_atom([alpha_numeric, under_score, hyphen, period, forwardslash, backwardslash, colon], oneormore, InputFile_Atom), !. %------------------------------------------------------------------------------- parse_mandatory_and_optional(Mandatory_CharList, Optional_CharList) --> Mandatory_CharList, parse_optional(Optional_CharList), !. %------------------------------------------------------------------------------- parse_optional([H_Char | T_CharList]) --> [H_Char], parse_optional(T_CharList). parse_optional(_T_CharList) --> "". %------------------------------------------------------------------------------- % SEPR:2369: Should the simplifier support Unix style switches (-switch)? % If so, should the interface to be simplifier be platform dependent, or % should both switch styles be allowed? parse_qualifier_prefix --> {qualifier_prefix(Char)}, [Char], !. %=============================================================================== % qualifier_prefix(+Char). %------------------------------------------------------------------------------- % The legal qualifier prefixes. %=============================================================================== qualifier_prefix('/'). qualifier_prefix('-'). %=============================================================================== % process_arguments(+CMDLine_List). %------------------------------------------------------------------------------- % Processes the parsed arguments. %=============================================================================== process_arguments([]):- !. process_arguments([H_CMDLine | T_CMDLine_List]):- process_argument(H_CMDLine), process_arguments(T_CMDLine_List). %------------------------------------------------------------------------------- % empty %------ process_argument(empty):- get_source_of_switch(switch_empty, auto_set), prune_source_of_switch(switch_empty), add_switch_empty(on), add_source_of_switch(switch_empty, user_set), !. process_argument(empty):- get_source_of_switch(switch_input_file, user_set), command_line_error('Unexpected multiple instances of empty command-line detected.\n', []). % filename %--------- process_argument(filename(InputFile_Atom)):- get_source_of_switch(switch_input_file, auto_set), prune_source_of_switch(switch_input_file), add_switch_input_file(InputFile_Atom), add_source_of_switch(switch_input_file, user_set), !. process_argument(filename(SecondInputFile_Atom)):- get_source_of_switch(switch_input_file, user_set), get_switch_input_file(InputFile_Atom), command_line_error('Multiple file names specified on the command-line (~a and ~a)\n', [InputFile_Atom, SecondInputFile_Atom]). % log %---- % First occurrence of a log switch (here /nolog). process_argument(simple_qualifier(nolog)):- get_source_of_switch(switch_log, auto_set), prune_switch_log, prune_source_of_switch(switch_log), add_switch_log(no_log_file), add_source_of_switch(switch_log, user_set), !. % First occurrence of a log switch (here /log=...). process_argument(log_qualifier(LogFile_Atom)):- get_source_of_switch(switch_log, auto_set), prune_switch_log, prune_source_of_switch(switch_log), add_switch_log(provided_log_file(LogFile_Atom)), add_source_of_switch(switch_log, user_set), !. % Duplicate occurrence of a log switch (order: /nolog /nolog). process_argument(simple_qualifier(nolog)):- get_source_of_switch(switch_log, user_set), get_switch_log(no_log_file), command_line_error('Illegal multiple uses of /nolog qualifier\n', []). % Duplicate occurrence of a log switch (order: /nolog /log=...). process_argument(simple_qualifier(nolog)):- get_source_of_switch(switch_log, user_set), get_switch_log(provided_log_file(_File_Atom)), command_line_error('Inconsistent use of /log=... and -nolog qualifiers\n', []). % Duplicate occurrence of a log switch (order: -log=... -nolog). process_argument(log_qualifier(_LogFile_Atom)):- get_source_of_switch(switch_log, user_set), get_switch_log(no_log_file), command_line_error('Inconsistent use of -log=... and -nolog qualifiers\n', []). % Duplicate occurrence of a log switch (order: -log=... -log=...). process_argument(log_qualifier(_OneLogFile_Atom)):- get_source_of_switch(switch_log, user_set), get_switch_log(provided_log_file(_TwoLogFile_Atom)), command_line_error('Illegal multiple uses of -log=... qualifier\n', []). % wrap %----- process_argument(simple_qualifier(nowrap)):- get_source_of_switch(switch_wrap, auto_set), prune_switch_wrap, prune_source_of_switch(switch_wrap), add_switch_wrap(off), add_source_of_switch(switch_wrap, user_set), !. process_argument(simple_qualifier(nowrap)):- get_source_of_switch(switch_wrap, user_set), command_line_error('-nowrap qualifier appears more than once on command-line\n', []). % verbose %-------- process_argument(simple_qualifier(verbose)):- get_source_of_switch(switch_verbose, auto_set), prune_switch_verbose, prune_source_of_switch(switch_verbose), add_switch_verbose(on), add_source_of_switch(switch_verbose, user_set), !. process_argument(simple_qualifier(verbose)):- get_source_of_switch(switch_verbose, user_set), command_line_error('Illegal multiple uses of -verbose qualifier\n', []). % user_rules %----------- process_argument(simple_qualifier(nouserrules)):- get_source_of_switch(switch_user_rules, auto_set), prune_switch_user_rules, prune_source_of_switch(switch_user_rules), add_switch_user_rules(off), add_source_of_switch(switch_user_rules, user_set), !. process_argument(simple_qualifier(nouserrules)):- get_source_of_switch(switch_user_rules, user_set), command_line_error('Illegal multiple uses of -nouserrules qualifier\n', []). % plain %------ process_argument(simple_qualifier(plain)):- get_source_of_switch(switch_plain, auto_set), prune_switch_plain, prune_source_of_switch(switch_plain), add_switch_plain(on), add_source_of_switch(switch_plain, user_set), !. process_argument(simple_qualifier(plain)):- get_source_of_switch(switch_plain, user_set), command_line_error('Illegal multiple uses of -plain qualifier\n', []). % typecheck_only %--------------- process_argument(simple_qualifier(typecheck)):- get_source_of_switch(switch_typecheck_only, auto_set), prune_switch_typecheck_only, prune_source_of_switch(switch_typecheck_only), add_switch_typecheck_only(on), add_source_of_switch(switch_typecheck_only, user_set), !. process_argument(simple_qualifier(typecheck)):- get_source_of_switch(switch_typecheck_only, user_set), command_line_error('Illegal multiple uses of -typecheck qualifier\n', []). % renum %------ process_argument(simple_qualifier(norenum)):- get_source_of_switch(switch_renum, auto_set), prune_switch_renum, prune_source_of_switch(switch_renum), add_switch_renum(off), add_source_of_switch(switch_renum, user_set), !. process_argument(simple_qualifier(norenum)):- get_source_of_switch(switch_renum, user_set), command_line_error('Illegal multiple uses of -norenum qualifier\n', []). % simplification %--------------- process_argument(choices_qualifier(nosimplification, SelectRange)):- get_source_of_switch(switch_simplification, auto_set), process_selection(SelectRange, SelectVCs), prune_switch_simplification, prune_source_of_switch(switch_simplification), add_switch_simplification(SelectVCs), add_source_of_switch(switch_simplification, user_set), !. process_argument(choices_qualifier(nosimplification, _SelectRange)):- get_source_of_switch(switch_simplification, user_set), command_line_error('Illegal multiple occurrences of -nosimplification qualifier\n', []). % standardisation %---------------- process_argument(choices_qualifier(nostandardisation, SelectRange)):- get_source_of_switch(switch_standardisation, auto_set), process_selection(SelectRange, SelectVCs), prune_switch_standardisation, prune_source_of_switch(switch_standardisation), add_switch_standardisation(SelectVCs), add_source_of_switch(switch_standardisation, user_set), !. process_argument(choices_qualifier(nostandardisation, _SelectRange)):- get_source_of_switch(switch_standardisation, user_set), command_line_error('Illegal multiple occurrences of -nostandardisation qualifier\n', []). % rule_substitution %------------------ process_argument(choices_qualifier(norule_substitution, SelectRange)):- get_source_of_switch(switch_rule_substitution, auto_set), process_selection(SelectRange, SelectVCs), prune_switch_rule_substitution, prune_source_of_switch(switch_rule_substitution), add_switch_rule_substitution(SelectVCs), add_source_of_switch(switch_rule_substitution, user_set), !. process_argument(choices_qualifier(norule_substitution, _SelectRange)):- get_source_of_switch(switch_rule_substitution, user_set), command_line_error('Illegal multiple occurrences of -norule_substitution qualifier\n', []). % contradiction_hunt %------------------- process_argument(choices_qualifier(nocontradiction_hunt, SelectRange)):- get_source_of_switch(switch_nocontradiction_hunt, auto_set), process_selection(SelectRange, SelectVCs), prune_switch_nocontradiction_hunt, prune_source_of_switch(switch_nocontradiction_hunt), add_switch_nocontradiction_hunt(SelectVCs), add_source_of_switch(switch_nocontradiction_hunt, user_set), !. process_argument(choices_qualifier(nocontradiction_hunt, _SelectRange)):- get_source_of_switch(switch_nocontradiction_hunt, user_set), command_line_error('Illegal multiple occurrences of -nocontradiction_hunt qualifier\n', []). % substitution_elimination %------------------------- process_argument(choices_qualifier(nosubstitution_elimination, SelectRange)):- get_source_of_switch(switch_substitution_elimination, auto_set), process_selection(SelectRange, SelectVCs), prune_switch_substitution_elimination, prune_source_of_switch(switch_substitution_elimination), add_switch_substitution_elimination(SelectVCs), add_source_of_switch(switch_substitution_elimination, user_set), !. process_argument(choices_qualifier(nosubstitution_elimination, _SelectRange)):- get_source_of_switch(switch_substitution_elimination, user_set), command_line_error('Illegal multiple occurrences of -nosubstitution_elimination qualifier\n', []). % expression_reduction %--------------------- process_argument(choices_qualifier(noexpression_reduction, SelectRange)):- get_source_of_switch(switch_expression_reduction, auto_set), process_selection(SelectRange, SelectVCs), prune_switch_expression_reduction, prune_source_of_switch(switch_expression_reduction), add_switch_expression_reduction(SelectVCs), add_source_of_switch(switch_expression_reduction, user_set), !. process_argument(choices_qualifier(noexpression_reduction, _SelectRange)):- get_source_of_switch(switch_expression_reduction, user_set), command_line_error('Illegal multiple occurrences of -noexpression_reduction qualifier\n', []). % complexity_limit %----------------- process_argument(limit_qualifier(complexity_limit, Int)):- get_source_of_switch(switch_complexity_limit, auto_set), check_limit_is_valid(Int, complexity_limit, 10, 200), prune_switch_complexity_limit, prune_source_of_switch(switch_complexity_limit), add_switch_complexity_limit(Int), add_source_of_switch(switch_complexity_limit, user_set), !. process_argument(limit_qualifier(complexity_limit, _Int)):- get_source_of_switch(switch_complexity_limit, user_set), command_line_error('Illegal multiple occurrences of -complexity_limit=... qualifier\n', []). % depth_limit %------------ process_argument(limit_qualifier(depth_limit, Int)):- get_source_of_switch(switch_depth_limit, auto_set), check_limit_is_valid(Int, depth_limit, 1, 10), prune_switch_depth_limit, prune_source_of_switch(switch_depth_limit), add_switch_depth_limit(Int), add_source_of_switch(switch_depth_limit, user_set), !. process_argument(limit_qualifier(depth_limit, _Int)):- get_source_of_switch(switch_depth_limit, user_set), command_line_error('Illegal multiple occurrences of -depth_limit=... qualifier\n', []). % inference_limit %---------------- process_argument(limit_qualifier(inference_limit, Int)):- get_source_of_switch(switch_inference_limit, auto_set), check_limit_is_valid(Int, inference_limit, 10, 400), prune_switch_inference_limit, prune_source_of_switch(switch_inference_limit), add_switch_inference_limit(Int), add_source_of_switch(switch_inference_limit, auto_set), !. process_argument(limit_qualifier(inference_limit, _Int)):- get_source_of_switch(switch_inference_limit, user_set), command_line_error('Illegal multiple occurrences of -inference_limit=... qualifier\n', []). % help %----- process_argument(simple_qualifier(help)):- get_source_of_switch(switch_help, auto_set), prune_switch_help, prune_source_of_switch(switch_help), add_switch_help(on), add_source_of_switch(switch_help, user_set), !. process_argument(simple_qualifier(help)):- get_source_of_switch(switch_help, user_set), command_line_error('Illegal multiple uses of -help qualifier\n', []). % version %-------- process_argument(simple_qualifier(version)):- get_source_of_switch(switch_version, auto_set), prune_switch_version, prune_source_of_switch(switch_version), add_switch_version(on), add_source_of_switch(switch_version, user_set), !. process_argument(simple_qualifier(version)):- get_source_of_switch(switch_version, user_set), command_line_error('Illegal multiple uses of -version qualifier\n', []). % Usage %-------- % First occurrence of a log switch (here /log=...). process_argument(usage_qualifier(UsageFile_Atom)):- get_source_of_switch(switch_usage, auto_set), prune_switch_usage, prune_source_of_switch(switch_usage), add_switch_usage(provided_usage_file(UsageFile_Atom)), add_source_of_switch(switch_usage, user_set), !. % Hypothesis limit for ZombieScope %------------------- process_argument(limit_qualifier(hyp_limit, Limit)):- integer(Limit), process_argument_hyp_limit(Limit), !. process_argument(limit_qualifier(hyp_limit, none)):- process_argument_hyp_limit(none), !. process_argument(choices_qualifier(hyp_limit, _Limit)):- get_source_of_switch(switch_hyp_limit, user_set), command_line_error('Illegal multiple occurrences of -hyp_limit=... qualifier\n', []). % unexpected %----------- process_argument(CMDLine):- throw_error('Unexpected parsed command line argument: ~w\n', [CMDLine]). % Set the hyp limit switch. %----------- process_argument_hyp_limit(Limit):- get_source_of_switch(switch_hyp_limit, auto_set), prune_switch_hyp_limit, prune_source_of_switch(switch_hyp_limit), add_switch_hyp_limit(Limit), add_source_of_switch(switch_hyp_limit, user_set), !. %=============================================================================== %=============================================================================== % process_selection(+SelectRange, -SelectVCs). %------------------------------------------------------------------------------- % Transform a range selection (SelectRange) into a more accessible vc % selection (SelectVCs), with some error checking for bad ranges. %=============================================================================== % Applies to all. process_selection(all, all):- !. % Applies to none. process_selection(none, none):- !. % Applies to those in range. process_selection(specific(Range_List), exclude(IntList)):- convert_range_list_as_int_list(Range_List, IntList), !. %------------------------------------------------------------------------------- convert_range_list_as_int_list(Range_List, IntList):- range_to_list(Range_List, IntListList), flatten_list(IntListList, IntList), convert_range_list_as_int_list_x(IntList), !. %------------------------------------------------------------------------------- convert_range_list_as_int_list_x(IntList):- contains_no_dups(IntList), !. convert_range_list_as_int_list_x(_IntList):- command_line_error('Illegal duplication in list of argument numbers\n', []). %------------------------------------------------------------------------------- range_to_list([], []):- !. range_to_list([H_Range | T_Range_List], [H_IntList | T_IntListList]):- convert_one_range_to_list(H_Range, H_IntList), range_to_list(T_Range_List, T_IntListList), !. %------------------------------------------------------------------------------- % Specific value. convert_one_range_to_list(value(Int), [Int]):- integer(Int), Int > 0, !. convert_one_range_to_list(value(_Int), _IntList):- command_line_error('Illegal entry in list of numbers argument\n', []). % Range of values. convert_one_range_to_list(range(Lower_Int, Upper_Int), IntList):- integer(Lower_Int), integer(Upper_Int), Lower_Int > 0, Lower_Int < Upper_Int, generate_int_list(Lower_Int, Upper_Int, IntList), !. convert_one_range_to_list(range(_Lower_Int, _Upper_Int), _IntList):- command_line_error('Illegal entry in list of numbers argument\n', []). %=============================================================================== % check_limit_is_valid(+Int, +Limit_Atom, +LowerLegal_Int, +UpperLegal_Int). %------------------------------------------------------------------------------- % Check that provided limit is valid, aborting with error if not. %=============================================================================== check_limit_is_valid(Int, _Limit_Atom, LowerLegal_Int, UpperLegal_Int):- integer(Int), Int >= LowerLegal_Int, Int =< UpperLegal_Int, !. check_limit_is_valid(Int, Limit_Atom, LowerLegal_Int, UpperLegal_Int):- command_line_error('Value ~d given for -~a is out of permitted range: ~d to ~d\n', [Int, Limit_Atom, LowerLegal_Int, UpperLegal_Int]). :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/utilities.pro0000644000175000017500000002761611753202337017356 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Various legacy utility predicates. Where these are generic, they should % be moved to newutilities. Where these are not generic, they should be % moved beside their caller, or into a specialised utility module. At this % stage, this file may be deleted, and newutilities renamed as utilities. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### % IS_IN is_in(X,[X|_]) :- !. is_in(X,[_|Y]) :- is_in(X,Y), !. %------------------------------------------------------------------------------- % GEN_APPEND(L1,L2,LL) - general (i.e. nondeterministic) append gen_append([],L,L). gen_append([H|T],L,[H|V]) :- gen_append(T,L,V). %------------------------------------------------------------------------------- % NEGIN(F,NewF) - move the nots in F in as far as possible to get NewF negin((not(P)),P1) :- !, neg(P,P1). negin(for_all(X,P),for_all(X,P1)) :- !, negin(P,P1). negin(for_some(X,P),for_some(X,P1)) :- !, negin(P,P1). negin((P and Q),(P1 and Q1)) :- !, negin(P,P1), negin(Q,Q1). negin((P or Q),(P1 or Q1)) :- !, negin(P,P1), negin(Q,Q1). negin(P,P). %------------------------------------------------------------------------------- % NEG(F,NF) - return NF equivalent to "not F" but with nots moved in neg((not(P)),P1) :- !, negin(P,P1). neg(for_all(X,P),for_some(X,P1)) :- !, neg(P,P1). neg(for_some(X,P),for_all(X,P1)) :- !, neg(P,P1). neg((P and Q),(P1 or Q1)) :- !, neg(P,P1), neg(Q,Q1). neg((P or Q),(P1 and Q1)) :- !, neg(P,P1), neg(Q,Q1). neg(A>B,A<=B) :- !. neg(A=B,AB) :- !. neg(A<>B,A=B) :- !. neg(P,(not(P))) :- !. %------------------------------------------------------------------------------- % VAR_FREE(J) - check no Prolog vars (or "goals") in justifications J var_free([]) :- !. var_free([K|K1]) :- novars(K), var_free(K1), !. %------------------------------------------------------------------------------- % NO_VARS(F) - check no vars in (non-list) structure F novars(K) :- atomic(K), !. novars(K) :- nonvar(K), K=..[_OP|Args], var_free(Args), !. %================================================================================ % exp_contains_exp(+Exp, +ContainsExp). %------------------------------------------------------------------------------- % Succeeds where Exp contains ContainsExp. %================================================================================ exp_contains_exp(Exp, ContainsExp):- % Replace all occurrences of ContainsExp in Exp with a totally unique % identifier. subst_vbl(ContainsExp, '$$uniq$$', Exp, NewExp), !, % If the replacment makes a difference, then ContainsExp must be in % Exp. \+ Exp=NewExp, !. %================================================================================ % safe_subst_vbl(+V, +X, +OLD, -NEW). %------------------------------------------------------------------------------- % Replace all V by X in OLD to give NEW. Unlike subst_vbl/4, continues to % work where V occurs in X. %================================================================================ safe_subst_vbl(V, X, OLD, NEW):- % Protect any V in X. subst_vbl(V, '$$uniq$$', X, ProtectedX), % Perform replacment. subst_vbl(V, ProtectedX, OLD, ProtectedNew), % Undo protection. subst_vbl('$$uniq$$', V, ProtectedNew, NEW), !. %------------------------------------------------------------------------------- % SUBST_VBL(V,X,OLD,NEW) - substitute all V in OLD by X to get NEW subst_vbl(_,_,Y,Y) :- var(Y), !. % leave Prolog variables unchanged. subst_vbl(V,X,V,X) :- !. subst_vbl(_V,_X,Y,Y) :- atomic(Y), !. %------------------------------------------------------------------------------- subst_vbl(V, X, F := E, F := E2) :- % record field names are always atomic. atomic(F), !, subst_vbl(V, X, E, E2), !. subst_vbl(V,X,F,F1) :- F=..[OP|Args], subst_vbl_list(V,X,Args,Args1), F1=..[OP|Args1], !. %------------------------------------------------------------------------------- % SUBST_VBL_LIST(V,X,OL,NL) - substitute all V in OL by X to get NL subst_vbl_list(V,X,[A],[A1]) :- subst_vbl(V,X,A,A1), !. subst_vbl_list(V,X,[A|Args],[A1|Args1]) :- subst_vbl(V,X,A,A1), !, subst_vbl_list(V,X,Args,Args1), !. subst_vbl_list(_V,_X,[],[]) :- !. %------------------------------------------------------------------------------- % INSERT(NUMBER, LIST, NEWLIST) insert(NUMBER, [HEAD|TAIL], [HEAD|INSERTED_LIST]) :- NUMBER > HEAD, !, insert(NUMBER, TAIL, INSERTED_LIST). insert(NUMBER, [NUMBER|_], _) :- !, fail. insert(NUMBER, LIST, [NUMBER|LIST]) :- !. %------------------------------------------------------------------------------- % merge_sort(L1, L2, LIST) -- merge lists L1 & L2 and sort into order merge_sort([], LIST, LIST) :- !. merge_sort(LIST, [], LIST) :- !. merge_sort(L1, L2, LIST) :- append(L1, L2, SO_FAR), !, sort(SO_FAR, LIST), !. %------------------------------------------------------------------------------- % strict_sublist(SUB, LIST) -- SUB is a sublist of LIST strict_sublist(SUB, LIST) :- append(SUB, _, LIST). strict_sublist(SUB, [_|LIST]) :- strict_sublist(SUB, LIST). %------------------------------------------------------------------------------- % write_error_preamble % -------------------- % Writes the preamble used for all critical error messages produced by the % Simplifier This string is searched for by SPARKSimp to recognise % Simplifier failures, so this string must match that defined in % sparksimp/sparksimp.apb SPARKSimp also expects to see this string at the % start of a line so a nl is produced first write_error_preamble :- nl, write('*** ERROR - '). %------------------------------------------------------------------------------- % Mathematics eval_div(X, Y, Z) :- Z is X // Y. %------------------------------------------------------------------------------- set_exit_status :- !. %------------------------------------------------------------------------------- do_wrap_lines_of(OLD, NEW) :- atom_codes(OLD_Atom, OLD), atom_codes(NEW_Atom, NEW), absolute_file_name(path('wrap_utility'), RunCMD, [extensions(['','.exe']), access(exist)]), process_create(RunCMD, [OLD_Atom, NEW_Atom], [process(Proc)]), process_wait(Proc, _ExitStatus), !. %------------------------------------------------------------------------------- maybe_issue_syntax_reminder :- syntax_error_in_file(_), !, % There is at least 1 user rule file with a syntax error. build_list_of_errant_files(ErrantFiles), write_error_preamble, write('Syntax error in a user rule file. Refer to log (slg) file.'), nl, issue_message('The following user rule files contain a syntax error: ', ErrantFiles), issue_message('Scroll back the screen log or consult the log (slg) file for details.', []). maybe_issue_syntax_reminder :- !. %------------------------------------------------------------------------------- build_list_of_errant_files([File|Rest]) :- retract(syntax_error_in_file(File)), !, build_list_of_errant_files(Rest). build_list_of_errant_files([]). %------------------------------------------------------------------------------- % Confirm the existence of a rule (but not its details). user_rule_exists(Name, File) :- ( user_inference_rule(File:Name, _, _) ; inference_rule(File:Name, _, _) ; user_rewrite_rule(File:Name, _, _, _) ; % Non-Ground replacement rules coming from the RLS file % are applied by the same code as for user-defined rules, % so we also check for one of those. nonground_replace_rule(File:Name, _, _, _) ), !. %------------------------------------------------------------------------------- %=============================================================================== % matching_records(+A_Record_Term, +CoreType, +Class, +B_Record_Term) % % Predicate succeeds if A_Record_Term and B_Record_Term have identical % structure but with the inner most argument of A_Record_term differs % to the inner most argument of B_Record_term, and te inner most % argument of A_Record_Term is a var_const. % %=============================================================================== % Must match at least one record field. matching_records(A_Record_Term, CoreType, Class, B_Record_Term) :- matching_record_and_field(A_Record_Term, B_Record_Term, A_Arg_Term, B_Arg_Term), matching_records_x(A_Arg_Term, CoreType, Class, B_Arg_Term), !. %------------------------------------------------------------------------------- % Terminate if a different variable or constant on each side. matching_records_x(A_Arg_Term, CoreType, Class, B_Arg_Term):- var_const(A_Arg_Term, CoreType, Class), var_const(B_Arg_Term, CoreType, Class), A_Arg_Term \= B_Arg_Term, !. % From above, do not have different variable or constant on each side. % Seek additional field access. matching_records_x(A_Record_Term, CoreType, Class, B_Record_Term):- matching_record_and_field(A_Record_Term, B_Record_Term, A_Arg_Term, B_Arg_Term), matching_records_x(A_Arg_Term, CoreType, Class, B_Arg_Term), !. matching_record_and_field(A_Record_Term, B_Record_Term, A_Arg_Term, B_Arg_Term) :- functor(A_Record_Term, FieldName_Atom, 1), functor(B_Record_Term, FieldName_Atom, 1), atom_concat('fld_', _FieldName_Atom, FieldName_Atom), arg(1, A_Record_Term, A_Arg_Term), arg(1, B_Record_Term, B_Arg_Term). %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/librarypredicates.pro0000644000175000017500000000763211753202337021047 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % It is possible to change the meaning of sicstus source code, by changing % global properties, such as operator precedence or prolog settings. % % The sicstus library modules are typically available in both source % code and pre-compiled units. Thus, in a standard sicstus install, the % source code of library modules should not affect their behaviour, as the % pre-compiled units will always take precedence. % % Thus, it is possible to load a library in a context where it should not % work (due to bad global settings) but it actually does work (because the % code is ignored due to a pre-compiled unit taking precedence). This % behaviour is subtle and confusing. % % To mitigate against this subtle and confusing behaviour all library % modules are deliberately declared up front, before changing any global % settings. The intention is that, should the pre-compiled units be deleted % for some reason, the system should build correctly. % % Note that the spxref code analysis tool only considers the source code of % library modules. However, spxref must be executed with a fixed set of % global settings. Thus, spxref must either consider the library modules % with the wrong settings (as is done just now), or the whole system code % with the wrong settings. %############################################################################### :- module(librarypredicates, [%file_systems. close_all_streams/0, file_exists/1, rename_file/2, %lists. last/2, list_to_set/2, reverse/2, %process. process_create/3, process_wait/2, %system. datime/1]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module(library(file_systems)). :- use_module(library(lists)). :- use_module(library(process)). :- use_module(library(system)). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/dynamics.pro0000644000175000017500000000710211753202337017136 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Declares dynamic predicates that are visible in the user module. % %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### :- dynamic(allow_new_strategies/0). :- dynamic(already_know_indentation/2). :- dynamic(buffered_used_fact/3). :- dynamic(candidate_lower/4). :- dynamic(candidate_upper/4). :- dynamic(complexity_fact/4). :- dynamic(could_infer/2). :- dynamic(could_not_infer/1). :- dynamic(current_root/2). :- dynamic(current_sat_goal/1). :- dynamic(current_stack_depth/1). :- dynamic(current_vc_number/1). :- dynamic(found_contradiction/0). :- dynamic(forward_inferences/1). :- dynamic(hn/1). :- dynamic(inference_depth_limit/2). :- dynamic(issued_contradiction_message/0). :- dynamic(issued_vc_proved_message/0). :- dynamic(join_hyp/4). :- dynamic(know_eliminated/1). :- dynamic(know_eliminated_in_subgoaling/2). :- dynamic(know_norm_expr/2). :- dynamic(know_substituted/1). :- dynamic(know_term_breakdown/2). :- dynamic(known_lower_numeric_limit/4). :- dynamic(known_upper_numeric_limit/4). :- dynamic(log_fact/2). :- dynamic(max_conc_no/1). :- dynamic(max_hyp_no/1). :- dynamic(max_written_conc_no/1). :- dynamic(moved_log_fact/3). :- dynamic(nhn/1). :- dynamic(overall_rule_summary/2). :- dynamic(pairing_depth/1). :- dynamic(path_line/1). :- dynamic(pfn/1). :- dynamic(potential_subst_fact/2). :- dynamic(proved_by_user_rules/0). :- dynamic(raw_hyp_already_read/2). :- dynamic(reduction_hyp/2). :- dynamic(rule_summary/2). :- dynamic(simplified_action_part/0). :- dynamic(stack/2). :- dynamic(stmt_line/1). :- dynamic(succ_line/1). :- dynamic(syntax_error_in_file/1). :- dynamic(twiddles_conversion/2). :- dynamic(used/1). :- dynamic(var_const/3). :- dynamic(vc_name/1). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/processrules.pro0000644000175000017500000001122411753202337020060 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides support for processing rules after they have been loaded. %############################################################################### %############################################################################### %MODULE %############################################################################### :- module(processrules, [do_rule_substitutions0/0]). :- use_module('data__rules.pro'). :- use_module('newutilities.pro'). %############################################################################### %DEPENDENCIES %############################################################################### %############################################################################### %TYPES %############################################################################### %############################################################################### %DATA %############################################################################### %############################################################################### %PREDICATES %############################################################################### %=============================================================================== %do_rule_substitutions0. %------------------------------------------------------------------------------- % Seeks to apply the loaded rules to transform the loaded rules. The aim % here is to eliminate constants in the rules up-front, to reduce the % number of rule applications seen in rewriting the vcs. %=============================================================================== do_rule_substitutions0 :- scalar_replacement_rule(Name, X, Y), apply_rule_to_existing_rules(Name, X, Y). do_rule_substitutions0 :- !. %------------------------------------------------------------------------------- scalar_replacement_rule(Name, X, Y) :- replace_rule(Name, X, Y), atom(X), user:int_enum_lit_or_const(Y, _T). %------------------------------------------------------------------------------- apply_rule_to_existing_rules(ApplyRuleName_Atom, ApplyLHS_Exp, ApplyRHS_Exp) :- get_rule(TargetRuleFile_Atom, TargetRuleName_Atom, RuleSource, Target_RuleHeuristic, Target_RuleLogic, TargetLHS_ExpList, TargetRHS_ExpList, TargetCondition_ExpList), /* prevent self-application */ TargetRuleName_Atom\= ApplyRuleName_Atom, user:subst_vbl_list(ApplyLHS_Exp, ApplyRHS_Exp, TargetLHS_ExpList, NewTargetLHS_ExpList), user:subst_vbl_list(ApplyLHS_Exp, ApplyRHS_Exp, TargetRHS_ExpList, NewTargetRHS_ExpList), user:subst_vbl_list(ApplyLHS_Exp, ApplyRHS_Exp, TargetCondition_ExpList, NewTargetCondition_ExpList), /* so they do differ */ \+ (TargetLHS_ExpList == NewTargetLHS_ExpList, TargetRHS_ExpList == NewTargetRHS_ExpList, TargetCondition_ExpList == NewTargetCondition_ExpList), prune_rule(TargetRuleFile_Atom, TargetRuleName_Atom, RuleSource, Target_RuleHeuristic, Target_RuleLogic, TargetLHS_ExpList, TargetRHS_ExpList, TargetCondition_ExpList), add_rule(TargetRuleFile_Atom, TargetRuleName_Atom, RuleSource, Target_RuleHeuristic, Target_RuleLogic, NewTargetLHS_ExpList, NewTargetRHS_ExpList, NewTargetCondition_ExpList), fail. %############################################################################### %END-OF-FILE spark-2012.0.deb/simplifier/data__formats.pro0000644000175000017500000000674111753202337020142 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % This module offers no system functionality. It provides a location for % modules to store their type and data descriptions. By providing a uniform % interface, rather than embedding this information into comments, the % expectation is that it will be more consistent, better maintained and % easier to read. %############################################################################### :- module(data__formats, [get_state/2, add_state/2, get_type/2, add_type/2]). %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### % As this module manages types and data, it is unable to initially manage % its own types and data until the whole module has been parsed. For this % reason the types and data declarations are contained in a predicated and % dynamically invoked at the end of the module. declare_data_formats:- add_state(get_state, get_state('Data_Atom', 'DataAttributes_Any')), add_state(get_type, get_type('Type_Atom', 'TypeAttributes_AnyList')). :- dynamic(get_state/2). :- dynamic(get_type/2). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % Add. %=============================================================================== add_state(Data_Atom, DataAttributes_Any):- assert(get_state(Data_Atom, DataAttributes_Any)), !. add_type(Type_Atom, TypeAttributes_AnyList):- assert(get_type(Type_Atom, TypeAttributes_AnyList)), !. %=============================================================================== % Delayed setting of types and data for this module. :- declare_data_formats. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/data__hyp.pro0000644000175000017500000002547011753202337017267 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Stores, retrieves and replaces hypothesis into the database. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(data__hyp, [add_hyp/3, add_hyp_with_id/3, get_hyp/3, get_next_hyp_id/1, replace_hyp/4, prune_hyp/3, prune_all_hyps/3, reset_next_hyp_id/0, add_forall_hyp/3, get_forall_hyp/3, prune_all_forall_hyp/0]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_state/2, add_type/2]). :- use_module('data__provenance.pro', [path_functions/0]). :- use_module('ioutilities.pro', [show_error/2]). :- use_module('opdeclar.pro', [declare_operators/0, hide_operators/0]). :- use_module(library(sets), [list_to_set/2]). :- declare_operators. %############################################################################### % TYPES %############################################################################### :- add_type('HypothesisType', [x, ss, [s, 'TYPE']]). %############################################################################### % DATA %############################################################################### :- add_state(get_hyp, get_hyp('Hypothesis_Term', 'HypothesisType', 'Id_Int')). :- add_state(get_next_hyp_id, get_hyp('Id_Int')). :- add_state(get_forall_hyp, get_forall_hyp('Id_Int', 'Hypothesis_Term', 'Condition_TermList')). :- dynamic(get_hyp/3). :- dynamic(get_next_hyp_id/1). :- dynamic(get_forall_hyp/3). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % add_hyp(+Hyp_Term, +HypType, -Id_Int) % % The predicate is determinate and always succeeds. The identfier of the % inserted hypothesis is bound to Id_Int; % % If the hypothesis is a duplicate then it is not inserted into the % database and its identifier is bound to Id_Int; % % No hypothesis is added to the database if the hyp is "true" and % it is not a path function. %=============================================================================== add_hyp(true,_,0) :- \+ path_functions, !. % % Adding a duplicate hypothesis. % add_hyp(Hyp_Term, HypType, Id_Int) :- get_hyp(Hyp_Term, HypType, Id_Int), user:assert_log_fact(duplicate_hyp, [Id_Int, Hyp_Term]), !. % % Adding a hypothesis that is not a duplicate. % add_hyp(Hyp_Term, HypType, Id_Int) :- next_hyp_id(Id_Int), assertz(get_hyp(Hyp_Term, HypType, Id_Int)), retractall(user:could_not_infer(_)), !. %=============================================================================== %=============================================================================== % add_hyp_with_id(+Hyp_Term, +HypType, +Id_Int) % % Adds a hypothesis into the database with a specific hypothesis identifier. % % There are two cases: % HypType = x : typically used when loading hypothesises from VCG files % where the identifiers are specified. In this case, % update the database to record that this identifier is % used. % % HypType = ss or [s, Type] : adding a hypothesis that is related (a variant) % of a hypothesis with the same hypothesis id. %=============================================================================== add_hyp_with_id(Hyp_Term, x, Id_Int) :- assertz(get_hyp(Hyp_Term, x, Id_Int)), set_next_hyp_id(Id_Int), !. add_hyp_with_id(Hyp_Term, HypType, Id_Int) :- assertz(get_hyp(Hyp_Term, HypType, Id_Int)), !. %=============================================================================== %=============================================================================== % replace_hyp(+OldHyp_Term, +HypType, +HypId_Int, +NewHyp_Term) % % Replace a hypothesis in the database with a new hypothesis. % % The predicate fails if OldHyp_Term, HypType, HypId_Ind is not % in the database. %=============================================================================== replace_hyp(OldHyp_Term, HypType, HypId_Int, NewHyp_Term):- prune_hyp(OldHyp_Term, HypType, HypId_Int), assertz(get_hyp(NewHyp_Term, HypType, HypId_Int)). %=============================================================================== %=============================================================================== % prune_hyp(?Hyp_Term, ?HypType, ?HypId_Int) % % Predicate retracts a hypthesis from the database. % % Predicate fails if nothing is deleted from the database. %=============================================================================== prune_hyp(Hyp_Term, HypType, HypId_Int):- retract(get_hyp(Hyp_Term, HypType, HypId_Int)). %=============================================================================== %=============================================================================== % prune_all_hyps(?Hyp_Term, ?HypType, ?HypId_Int) % % Retracts all hypothesises from the database that matches the % arguments. % % Predicate always succeeds independent of whether anything is deleted % from the database. %=============================================================================== prune_all_hyps(Hyp_Term, HypType, HypId_Int):- retractall(get_hyp(Hyp_Term, HypType, HypId_Int)). %=============================================================================== %=============================================================================== % next_hyp_id(-NextId_Int) %------------------------------------------------------------------------------- % Return the next hypothesis identifier and record its use in the database. %=============================================================================== next_hyp_id(HypId_Int):- retract(get_next_hyp_id(HypId_Int)), HypId1_Int is HypId_Int + 1, assert(get_next_hyp_id(HypId1_Int)), !. % % If no hypothesises have been inserted into the database, then the hypothesis % identifier is 1 and the next free identifier is 2. % next_hyp_id(1):- assert(get_next_hyp_id(2)), !. %=============================================================================== %=============================================================================== % set_next_hyp_id(+HypId_Int) %------------------------------------------------------------------------------- % Record that the hypothesis identifier has been used and increment % get_next_hyp_id if the identifier is greater than get_next_hyp_id. %=============================================================================== set_next_hyp_id(HypId_Int):- HypId1_Int is HypId_Int + 1, set_next_hyp_id_x(HypId1_Int). %=============================================================================== % set_next_hyp_id_x(+HypId1_Int) %=============================================================================== set_next_hyp_id_x(HypId1_Int):- retract(get_next_hyp_id(MaxHypId_Int)), set_next_hyp_id_y(HypId1_Int, MaxHypId_Int), !. set_next_hyp_id_x(HypId1_Int):- set_next_hyp_id_y(HypId1_Int, 0), !. %=============================================================================== % set_next_hyp_id_Y(+HypId_Int, MaxHypId_Int) %=============================================================================== set_next_hyp_id_y(HypId_Int, MaxHypId_Int):- MaxHypId_Int =< HypId_Int, assert(get_next_hyp_id(HypId_Int)), !. set_next_hyp_id_y(_HypId_Int, MaxHypId_Int):- assert(get_next_hyp_id(MaxHypId_Int)), !. %=============================================================================== %=============================================================================== % reset_next_hyp_id %------------------------------------------------------------------------------- % Reset the counter for hypothesis identifiers by removing get_next_hyp_id/1. %=============================================================================== reset_next_hyp_id:- retractall(get_next_hyp_id(_)), !. %=============================================================================== % add_forall_hyp(+Hypothesis_Term, +Conditions_TermList, +HypId_Int) %------------------------------------------------------------------------------- % Insert a univerally quantified hypothesis into the database. %=============================================================================== add_forall_hyp(Hypothesis_Term, Condition_TermList, HypId_Int) :- assertz(get_forall_hyp(Hypothesis_Term, Condition_TermList, HypId_Int)), !. %=============================================================================== %=============================================================================== % prune_all_forall_hyp %------------------------------------------------------------------------------- % Delete all univerally quantified hypotheses from the database. %=============================================================================== prune_all_forall_hyp :- retractall(get_forall_hyp(_Hypothesis_Term, _Condition_TermList, _HypId_Int)), !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/ioutilities.pro0000644000175000017500000005416611753202337017706 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides general input and output utilities. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(ioutilities, [read_line_from_stream/2, read_up_to_number_of_chars_from_stream/3, read_lines_from_file_as_char_list/3, write_terms_to_file/2, throw_error/2, show_error/2, show_error_long/4, show_warning/2, show_warning_long/4, command_line_error/2, display_header_full/1, display_header_plain/1, stopwith/1, stopwith3/3]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('data__formats.pro', [add_type/2]). :- use_module('newutilities.pro', [implode_separator_content_list/3]). :- use_module('data__system.pro', [get_system_toolname/1]). :- use_module('../common/versioning/version.pro', [toolset_version/1, toolset_copyright/1, toolset_banner_line/1, toolset_distribution/1]). :- use_module(library(file_systems), [close_all_streams/0]). %############################################################################### % TYPES %############################################################################### :- add_type('ReadText', ['CharList', end_of_file]). :- add_type('ModuleFunctorArity', ['Module_Atom' : ('Functor_Atom'/ 'Arity_Int')]). :- add_type('Lines', [everyLine, upToLine('Int')]). :- add_type('Problem', [error, warning]). %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % write_atom_list_to_stream(+Stream, +AtomList). %------------------------------------------------------------------------------- % Writes out a list of atoms, each on a separate line, to the provided % output stream (Stream). %=============================================================================== %All done. write_atom_list_to_stream(_Stream, []):- !. %From above, more to write. write_atom_list_to_stream(Stream, [H_Atom | T_AtomList]):- format(Stream, '~a\n', [H_Atom]), write_atom_list_to_stream(Stream, T_AtomList). %=============================================================================== % read_line_from_stream(+Stream, -ReadText). %------------------------------------------------------------------------------- % Retrieves a list of characters or end_of_file (ReadText) from the % provided input stream (Stream). The line is all characters up to the next % new_line or end_of_file. The new_line character is never returned. % Returns end_of_file if it is not preceded by any characters. (Thus, it is % not possible to distinguish between chars|new_line|end_of_file and % chars|end_of_file). %=============================================================================== read_line_from_stream(Stream, ReadText):- read_line(Stream, Line_Any), read_line_from_stream_x(Line_Any, ReadText), !. %------------------------------------------------------------------------------- % Is end of file. read_line_from_stream_x(end_of_file, end_of_file):- !. % Is not end of file. read_line_from_stream_x(CodeList, CharList):- atom_codes(Atom, CodeList), atom_chars(Atom, CharList), !. %=============================================================================== % read_up_to_number_of_chars_from_stream(+Stream, +Int, -ReadText). %------------------------------------------------------------------------------- % Retrieves a list of characters or end_of_file (ReadText) from the % provided input stream (Stream). The line is the next (Int) characters, % or all characters up to the next new_line, or end_of_file. The new_line % character is never returned. Returns end_of_file if it is encountered % before (Int) chars or end_of_line -- with any 'pending' characters being % silently thrown away. %=============================================================================== read_up_to_number_of_chars_from_stream(Stream, Int, ReadText):- read_number_of_chars_from_stream_x(Stream, Int, CharList), convert_charlist_to_readtext(CharList, ReadText), !. %------------------------------------------------------------------------------- % Reached end. read_number_of_chars_from_stream_x(_Stream, 0, []):- !. read_number_of_chars_from_stream_x(Stream, Int, CharList):- get_char(Stream, Char), read_number_of_chars_from_stream_xx(Stream, Int, Char, CharList), !. % Is end_of_file. Record this, and stop. read_number_of_chars_from_stream_xx(_Stream, _Int, end_of_file, [end_of_file]):- !. % Is newline. Do not record this, and stop. read_number_of_chars_from_stream_xx(_Stream, _Int, '\n', []):- !. % Is character. Record this and continue. read_number_of_chars_from_stream_xx(Stream, Int, H_Char, [H_Char | T_CharList]):- Next_Int is Int-1, read_number_of_chars_from_stream_x(Stream, Next_Int, T_CharList), !. %------------------------------------------------------------------------------- % If encountered end_of_file, then only return end_of_file. convert_charlist_to_readtext(CharList, end_of_file):- member(end_of_file, CharList), !. % From above, did not encounter end_of_file. convert_charlist_to_readtext(CharList, CharList):- !. %=============================================================================== % read_lines_from_file_as_char_list(+File_Atom, +Lines, -CharList). %------------------------------------------------------------------------------- % Retrieves a specified number of lines (Lines) from file (File_Atom) as a % list of characters (CharList). If Lines is everyLine then every line of % the file will be returned. If Lines is upToLine(Int) then Int lines will % be returned - or if the number of lines specified is greater than the % number contained in the file, no error is raised, and all of the lines in % the file will be returned. The list of characters is unfiltered, and so % may contain instances of new line. %=============================================================================== read_lines_from_file_as_char_list(File_Atom, Lines, CharList):- open(File_Atom, read, Stream), read_lines_from_file_as_char_list_x(Stream, Lines, 0, CharList), close(Stream), !. %------------------------------------------------------------------------------- %Reached line limit. read_lines_from_file_as_char_list_x(_Stream, upToLine(LineAt_Int), LineAt_Int, []):- !. %From above, not reached line limit. read_lines_from_file_as_char_list_x(Stream, Lines, LineAt_Int, CharList):- get_char(Stream, NextChar_Any), read_lines_from_file_as_char_list_xx(Stream, Lines, LineAt_Int, NextChar_Any, CharList), !. %------------------------------------------------------------------------------- %Reached the end of the file. read_lines_from_file_as_char_list_xx(_Stream, _Lines, _LineAt, end_of_file, []):- !. %Reached a newline. read_lines_from_file_as_char_list_xx(Stream, Lines, LineAt_Int, '\n', ['\n' | T_CharList]):- NextLineAt_Int is LineAt_Int + 1, read_lines_from_file_as_char_list_x(Stream, Lines, NextLineAt_Int, T_CharList), !. %From above, neither the end of the file nor newline. read_lines_from_file_as_char_list_xx(Stream, Lines, LineAt_Int, H_Char, [H_Char | T_CharList]):- read_lines_from_file_as_char_list_x(Stream, Lines, LineAt_Int, T_CharList), !. %=============================================================================== % write_terms_to_file(+File_Atom, +ModuleFunctorArityList). %------------------------------------------------------------------------------- % Writes out to file (File_Atom) the contents of every predicate described % by (ModuleFunctorArityList). %=============================================================================== write_terms_to_file(File_Atom, ModuleFunctorArityList):- open(File_Atom, write, Stream), write_terms_to_file_x(Stream, ModuleFunctorArityList), close(Stream), !. %------------------------------------------------------------------------------- write_terms_to_file_x(_Stream, []):- !. write_terms_to_file_x(Stream, [Module_Atom : (Functor_Atom / Arity_Int) | T_ModuleFunctorArityList]):- write_terms_to_file_xx(Stream, Module_Atom, Functor_Atom, Arity_Int), write_terms_to_file_x(Stream, T_ModuleFunctorArityList). %------------------------------------------------------------------------------- write_terms_to_file_xx(Stream, Module_Atom, Functor_Atom, Arity_Int):- % Construct the predicate to call. functor(Predicate, Functor_Atom, Arity_Int), % Call and instantiate the predicate. Module_Atom:Predicate, format(Stream, '~q\n', Predicate), fail. write_terms_to_file_xx(_Stream, _Module_Atom, _Functor_Atom, _Arity_Int):- !. %=============================================================================== % command_line_error(+FormattedText_Atom, +Arguments_AnyList). %------------------------------------------------------------------------------- % Raises an error. %=============================================================================== command_line_error(FormattedText_Atom, Arguments_AnyList):- % Append common leading text. atom_concat('ERROR IN COMMAND LINE SYNTAX\n!!! Involving: ', FormattedText_Atom, LeadingFormattedText_Atom), show_error(LeadingFormattedText_Atom, Arguments_AnyList). %=============================================================================== % show_error(+LineFormattedText_Atom, +LineArguments_AnyList). %------------------------------------------------------------------------------- % Raises an error (one line description). % % The line formatted text (LineFormattedText_Atom) should be on a single % line, and thus never contain a newline. A single trailing newline is % added automatically when displayed. %=============================================================================== throw_error(LineFormattedText_Atom, LineArguments_AnyList):- show_error(LineFormattedText_Atom, LineArguments_AnyList), !. show_error(LineFormattedText_Atom, LineArguments_AnyList):- highlight_problem(error, user_output, LineFormattedText_Atom, LineArguments_AnyList, [], []), !. %=============================================================================== %show_error_long(+LineFormattedText_Atom, % +LineArguments_AnyList, % +ParagraphFormattedText_Atom, % +ParagraphArguments_AnyList). %------------------------------------------------------------------------------- % Raises an error (one line plus longer description). % % The line formatted text (LineFormattedText_Atom) should be on a single % line, and thus never contain a newline. A single trailing newline is % added automatically when displayed. The paragraph formatted text % (ParagraphFormattedText_Atom) may contain as many newlines as desired. No % trailing newlines are automatically displayed. %=============================================================================== show_error_long(LineFormattedText_Atom, LineArguments_AnyList, ParagraphFormattedText_Atom, ParagraphArguments_AnyList):- highlight_problem(error, user_output, LineFormattedText_Atom, LineArguments_AnyList, ParagraphFormattedText_Atom, ParagraphArguments_AnyList), !. %=============================================================================== % show_warning(+LineFormattedText_Atom, +LineArguments_AnyList). %------------------------------------------------------------------------------- % Raises an warning (one line description). % % The line formatted text (LineFormattedText_Atom) should be on a single % line, and thus never contain a newline. A single trailing newline is % added automatically when displayed. %=============================================================================== show_warning(LineFormattedText_Atom, LineArguments_AnyList):- highlight_problem(warning, user_output, LineFormattedText_Atom, LineArguments_AnyList, [], []), !. %=============================================================================== %show_warning_long(+LineFormattedText_Atom, % +LineArguments_AnyList, % +ParagraphFormattedText_Atom, % +ParagraphArguments_AnyList). %------------------------------------------------------------------------------- % Raises an warning (one line plus longer description). % % The line formatted text (LineFormattedText_Atom) should be on a single % line, and thus never contain a newline. A single trailing newline is % added automatically when displayed. The paragraph formatted text % (ParagraphFormattedText_Atom) may contain as many newlines as desired. No % trailing newlines are automatically displayed. %=============================================================================== show_warning_long(LineFormattedText_Atom, LineArguments_AnyList, ParagraphFormattedText_Atom, ParagraphArguments_AnyList):- highlight_problem(warning, user_output, LineFormattedText_Atom, LineArguments_AnyList, ParagraphFormattedText_Atom, ParagraphArguments_AnyList), !. %=============================================================================== %highlight_problem(+Problem, % +LineFormattedText_Atom, % +LineArguments_AnyList, % +ParagraphFormattedText_Atom, % +ParagraphArguments_AnyList). %------------------------------------------------------------------------------- % Support predicate, to centrally manage all problem messages. The form of % formatted text and arguments should conform to the standard format % predicate. Note that the description line (LineFormattedText_Atom) % should never contain embedded new lines, as this is handled here. The % extended description (ParagraphFormattedText_Atom) may contain as many % embedded newlines as desired and will not have a trailing newline % automatically inserted. %=============================================================================== % On error: display problem and halt with error code 1. highlight_problem(error, Stream, LineFormattedText_Atom, LineArguments_AnyList, ParagraphFormattedText_Atom, ParagraphArguments_AnyList):- % Get the tool name. get_system_toolname(ToolName_Atom), % Double line break. format(Stream, '~n~n', []), % Make it clear which system encountered the problem. format(Stream, '*****************************************************************************~n', []), format(Stream, '* An error has occurred in the ~a~n', [ToolName_Atom]), % Write out the error string that is detected by sparksimp. format(Stream, '*** ERROR - ', []), % Display the one-line problem description. format(Stream, LineFormattedText_Atom, LineArguments_AnyList), format(Stream, '~n', []), % Display any additional problem description parts. format(Stream, ParagraphFormattedText_Atom, ParagraphArguments_AnyList), % Close block. format(Stream, '*****************************************************************************~n', []), % Double line break. format(Stream, '~n~n', []), close_all_streams, halt(1), !. % On warning: display problem and continue. highlight_problem(warning, Stream, LineFormattedText_Atom, LineArguments_AnyList, ParagraphFormattedText_Atom, ParagraphArguments_AnyList):- % Get the tool name. get_system_toolname(ToolName_Atom), % Double line break. format(Stream, '~n~n', []), % Make it clear which system encountered the problem. format(Stream, '*****************************************************************************~n', []), format(Stream, '* A warning was encountered in the ~a~n', [ToolName_Atom]), % Write out a warning string. This is not processed by any external % tool. format(Stream, '*** WARNING - ', []), % Display the one-line problem description. format(Stream, LineFormattedText_Atom, LineArguments_AnyList), format(Stream, '~n', []), % Display any additional problem description parts. format(Stream, ParagraphFormattedText_Atom, ParagraphArguments_AnyList), % Close block. format(Stream, '*****************************************************************************~n', []), % Force double line break. format(Stream, '~n~n', []), !. %=============================================================================== % stopwith(+Text_Atom). %------------------------------------------------------------------------------- % Raise an error. %=============================================================================== stopwith(Text_Atom) :- implode_separator_content_list('', [Text_Atom, '\n', '%PRESIMP-F-ABORT, Presimplification terminated.', '\n'], Content_Atom), throw_error(Content_Atom, []). %=============================================================================== % stopwith3(+X, +Y, +Z). %------------------------------------------------------------------------------- % Raise an error. %=============================================================================== stopwith3(X, Y, Z) :- name(X, XL), name(Y, YL), name(Z, ZL), append(YL, ZL, IL), !, append(XL, IL, RL), name(R, RL), !, stopwith(R). %=============================================================================== % display_header_full(+Stream). %------------------------------------------------------------------------------- % Display the header in full. %=============================================================================== display_header_full(Stream):- % Grammar for the toolset wide standard banner lines (not in plain mode): % % BANNER := "SPARK . " " . % TOOLNAME . " " . BANNERLINE . . % COPYRIGHT . "" % % TOOLNAME := Value of get_system_toolname. % BANNERLINE := Value of toolset_banner_line from versioning. % COPYRIGHT := Value of toolset_copyright from versioning. get_system_toolname(ToolName_Atom), toolset_banner_line(BannerLine_Atom), toolset_copyright(Copyright_Atom), format(Stream, 'SPARK ~a ~a~n~a~n', [ToolName_Atom, % SPARK ~a BannerLine_Atom, % ~a Copyright_Atom % ~n~a ]), % ~n !. %=============================================================================== % display_header_plain(+Stream). %------------------------------------------------------------------------------- % Display the plain header. %=============================================================================== display_header_plain(Stream):- % Grammar for the toolset wide standard banner lines (in plain mode): % % BANNER := "SPARK . " " . % TOOLNAME . " " . DISTRIBUTION . " Edition, " . "" . % "" % % TOOLNAME := Value of get_system_toolname. % DISTRIBUTION := Value of toolset_distribution from versioning. get_system_toolname(ToolName_Atom), toolset_distribution(Distribution_Atom), format(Stream, 'SPARK ~a ~a Edition~n~n', [ToolName_Atom, % SPARK ~a Distribution_Atom % ~a ]), % Edition~n~n !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/simpvc.pro0000644000175000017500000034072311753202337016641 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Top level simplification, ordering calls to the various simplification % routines declared elsewhere. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### % A. CONTRADICTION-HUNTER % % Store traversal conditions (rather like hypotheses). % % Each has % get_hyp(FORMULA, x, N) -- after "simplification" % get_hyp(STD_FORM, [s,T], N) -- full standard form (with mods.) % get_hyp(SS_FORM, ss, N) -- semi-standard form (XrY --> X'rY'). % % On reading hypotheses from file, negin, and-splitting and simplification should % be performed automatically before proceeding further. % % Look for contradictions thus: % % [Assume: know(X) :- hyp ; sf ; ssf ; inferred(X). ] % % (1) See if know "false". % % (2) For each known P, see if (not P)', sf(not P), ssf(not P) known. % % (3) See if can infer empty-range, e.g. for all X in A..B, that A>B (in other % words, a "contradictory pair", e.g. x>3 and x<0). % % (4) Perform forward-inferences and standardise to form additional sf facts % [must keep pointers to hypotheses used]. % % (5) Perform equality (and equivalence) substitutions to generate new facts and % try standardising these to see if a contradiction can be established. % % % B. EXPRESSION-REDUCTION % % [Only necessary if contradiction-hunt proves fruitless.] % % (1) If any hyp's or sf's are true, corresponding hypotheses can be eliminated. % % (2) If any range-subsumptions (e.g. x>0, x>1: latter superfluous), subsumed % hypothesis can be eliminated. % % (3) If any "complementary pair" disjunctions (e.g. x>0 or x<3), these can be % eliminated. % % (4) If any implication hypotheses, try satisfying l.h.s.: success allows % hypothesis to be reduced to its consequent. % % (5) If any equivalences, work on satisfy each side in turn: if either is % satisfiable, replace equivalence by other side; if either reduces (via % simple strategy) to false, replace by negated other side. % % (6) Form joins to depth 2 (?) of all relational expressions with a standard % form. For each in turn, see if its negation further joined to a depth of % two gives a contradiction; if so, the hypothesis itself is redundant and % may be eliminated. (If any joins of actual hypotheses yield "false", % contradiction has been established subsequent to contradiction-hunt.) simplify_vc :- do_simplify_vc, !. simplify_vc :- !. %------------------------------------------------------------------------------- do_simplify_vc :- clear_up_old_facts, % see if all conclusions proved. try_to_prove_concs, proved_all_conclusions, !. do_simplify_vc :- issue_message('Rule substitutions phase 1', []), do_rule_substitutions1, % Look for things in Concs that need substituting try_to_prove_concs, % see if all conclusions proved proved_all_conclusions, !. do_simplify_vc :- restructure_vc(toplevel), % move negation inwards, etc. issue_message('Rule substitutions phase 2', []), do_rule_substitutions2, % e.g. if rule C = 10, replace C by 10 form_instantiable_hyp_facts, issue_message('Standardise hypotheses', []), setup_hypotheses, % create standard & semi-standard forms issue_message('Standardise conclusions', []), setup_conclusions, % create standard & semi-standard forms try_to_prove_concs, % see if any conclusions proved proved_all_conclusions, % and stop if all proved !. do_simplify_vc :- contradiction_hunt, % look for contradictions issue_message('Expression reduction', []), expression_reduction, % get rid of redundant hypotheses form_instantiable_hyp_facts, % expression_reduction might have changed % hypotheses, so re-generate instantiable hyp facts asserta(allow_new_strategies), % use extra rules in inference engine try_to_prove_concs, % see if any more conclusions proved proved_all_conclusions, % and stop if all proved !. do_simplify_vc :- found_contradiction, % stop if so !. do_simplify_vc :- restructure_vc(toplevel), % Restructure prior to proof-framing extended_simplify, % eliminate redundant modulus operators etc. try_to_prove_concs, % eliminate any obvious ones, post-restructuring proved_all_conclusions, % and stop if all proved !. do_simplify_vc :- issue_message('Adding hypotheses from ground inference rules', []), find_max_hyp_no(_MAX), add_hypotheses_from_ground_inference_rules, try_to_prove_concs, % See if all conclusions have been proved. proved_all_conclusions, !. do_simplify_vc :- issue_message('Proof framing', []), try_proof_framing, % Try unwrapping, implication, proof by cases. fail. do_simplify_vc :- user_rule_exists(_,_), % If so, try using them issue_message('Applying proof rules', []), apply_user_defined_proof_rules, !. do_simplify_vc :- !. % otherwise, succeed anyway %------------------------------------------------------------------------------- clear_up_old_facts :- retractall(complexity_fact(_, _, _, _)), prune_all_hyps(_, [s|_], _), prune_all_hyps(_, ss, _), prune_all_concs(_, [s|_], _), prune_all_concs(_, ss, _), retractall(join_hyp(_, _, _, _)), prune_all_subst_hyp(_, _, _), prune_all_forall_hyp, retractall(found_contradiction), retractall(know_eliminated(_)), retractall(know_eliminated_in_subgoaling(_,_)), prune_all_processed_hyp_with_field_op, prune_all_processed_hyp_with_field_op_in_subgoal, retractall(know_substituted(_)), prune_all_proved_concs, retractall(know_norm_expr(_, _)), retractall(issued_contradiction_message), retractall(issued_vc_proved_message), retractall(allow_new_strategies), retractall(known_upper_numeric_limit(_,_,_,_)), retractall(known_lower_numeric_limit(_,_,_,_)), retractall(candidate_upper(_,_,_,_)), retractall(candidate_lower(_,_,_,_)), retractall(proved_by_user_rules), fail. clear_up_old_facts :- !. %------------------------------------------------------------------------------- add_hypotheses_from_ground_inference_rules :- inference_rule(_File:_Name, Rule, []), ground(Rule), simplify(Rule, SimpRule), maybe_add_new_hyp(SimpRule), fail. add_hypotheses_from_ground_inference_rules :- !. %------------------------------------------------------------------------------- % If a new hyp simplifies to 'true', then ignore it maybe_add_new_hyp(true) :- !. % Otherwise, add it and a log fact maybe_add_new_hyp(Rule) :- add_hyp(Rule, x, NewNo), assert_log_fact(new_hyp, [NewNo, Rule, toplevel]). %------------------------------------------------------------------------------- setup_hypotheses :- \+ get_hyp(_,_,_), asserta(max_hyp_no(0)), find_max_conc_no, !. setup_hypotheses :- ( get_hyp(A<=B, _, M) ; get_hyp(B>=A, _, M) ), ( get_hyp(A>=B, _, N) ; get_hyp(B<=A, _, N) ), do_replace_hyps(M, N, A=B), fail. setup_hypotheses :- ( get_hyp(A<=B, _, M) ; get_hyp(B>=A, _, M) ), ( get_hyp(A<>B, _, N) ; get_hyp(B<>A, _, N) ), do_replace_hyps(M, N, A MAX), !, asserta(max_hyp_no(MAX)), !. find_max_hyp_no(_MAX) :- asserta(max_hyp_no(0)), !. find_max_conc_no :- retractall(max_conc_no(_)), get_conc(_, x, MAX), \+ (get_conc(_, x, X), X > MAX), !, asserta(max_conc_no(MAX)), !. find_max_conc_no :- asserta(max_conc_no(0)), !. %------------------------------------------------------------------------------- fetch_next_hn(N) :- hn(N), !, retractall(hn(_)), NEXT_N is N + 1, asserta(hn(NEXT_N)), !. %------------------------------------------------------------------------------- stan_and_semi_stan(N) :- complexity_limit(LIM), get_hyp(FORMULA, x, N), ( simplification_is_on, simplify(FORMULA, HYP), ( FORMULA \= HYP, replace_hyp(_Old_Hyp, x, N, HYP), assert_log_fact(further_simplified, [hyp, N, FORMULA, HYP]) ; true ) ; HYP = FORMULA ), !, complexity(HYP, N, hyp, x, CX), !, ( CX < LIM, stan(HYP, N, hyp), maybe_semi_stan(HYP, N, hyp) ; true ), !. %------------------------------------------------------------------------------- % Don't bother standardising conclusions already proved. stan_and_semi_stan_conc(N) :- get_proved_conc(N), !. stan_and_semi_stan_conc(N) :- complexity_limit(LIM), get_conc(FORMULA, x, N), ( simplification_is_on, simplify(FORMULA, CONC), ( FORMULA \= CONC, replace_conc(_, x, N, CONC), assert_log_fact(further_simplified, [conc, N, FORMULA, CONC]) ; true ) ; CONC = FORMULA ), !, complexity(CONC, N, conc, x, CX), !, ( CX < LIM, stan(CONC, N, conc), maybe_semi_stan(CONC, N, conc) ; true ), !. %------------------------------------------------------------------------------- complexity(FORMULA, N, HorC, XXX, COMPLEXITY) :- has_complexity(FORMULA, COMPLEXITY), save_complexity(HorC, N, XXX, COMPLEXITY), !. %------------------------------------------------------------------------------- has_complexity(ATOM, 0) :- atomic(ATOM), !. has_complexity(F_ARGS, N) :- nonvar(F_ARGS), !, F_ARGS =.. [_F|ARGS], have_complexity(ARGS, C), N is C + 1. have_complexity([X], N) :- !, has_complexity(X, N). have_complexity([X|REST], N) :- has_complexity(X, XC), !, have_complexity(REST, RC), !, N is XC + RC. %------------------------------------------------------------------------------- stan(FORMULA, N, HorC) :- standardisation_is_on, !, norm_typed_expr(FORMULA, boolean, NEWFORM), !, complexity(NEWFORM, N, HorC, s, _), save_stan_fact(NEWFORM, N, HorC), !. stan(_FORMULA, _N, _HorC) :- !. %------------------------------------------------------------------------------- maybe_semi_stan(FORMULA, N, HorC) :- standardisation_is_on, !, is_relational_expression(FORMULA, RELOP, X, Y, TYPE_OF_ARGS), !, norm_typed_expr(X, TYPE_OF_ARGS, X1), !, norm_typed_expr(Y, TYPE_OF_ARGS, Y1), !, NEWFORM =.. [RELOP, X1, Y1], !, ( NEWFORM = FORMULA ; complexity(NEWFORM, N, HorC, ss, _), save_semi_stan_fact(NEWFORM, N, HorC) ), !. %------------------------------------------------------------------------------- is_relational_expression(X=Y, =, X, Y, TYPE) :- checktype(X, TYPE), checktype(Y, TYPE), !. is_relational_expression(X<>Y, <>, X, Y, TYPE) :- checktype(X, TYPE), checktype(Y, TYPE), !. is_relational_expression(X<=Y, <=, X, Y, TYPE) :- checktype(X, TYPE), checktype(Y, TYPE), !. is_relational_expression(X>=Y, >=, X, Y, TYPE) :- checktype(X, TYPE), checktype(Y, TYPE), !. is_relational_expression(XY, >, X, Y, TYPE) :- checktype(X, TYPE), checktype(Y, TYPE), !. %------------------------------------------------------------------------------- save_complexity(HorC, N, XXX, COMPLEXITY) :- retractall(complexity_fact(HorC, N, XXX, _)), !, assertz(complexity_fact(HorC, N, XXX, COMPLEXITY)). %------------------------------------------------------------------------------- complexity_token([s,_], s) :- !. complexity_token(X, X). %------------------------------------------------------------------------------- save_stan_fact(NEWFORM, N, hyp) :- prune_all_hyps(PLACE_HOLDER, [s, TYPE], N), !, save_stan_fact_x(NEWFORM, TYPE), !, PLACE_HOLDER = NEWFORM, add_hyp_with_id(PLACE_HOLDER, [s, TYPE], N), !. save_stan_fact(NEWFORM, N, conc) :- prune_all_concs(PLACE_HOLDER, [s, TYPE], N), !, save_stan_fact_x(NEWFORM, TYPE), !, PLACE_HOLDER = NEWFORM, add_conc_with_id(PLACE_HOLDER, [s, TYPE], N), !. save_stan_fact(_NEWFORM, _N, HorC):- show_error('HorC is not hyp or conc but is ~a in save_stan_fact.', [HorC]). %------------------------------------------------------------------------------- save_stan_fact_x(NEWFORM, TYPE):- is_relational_expression(NEWFORM, _, _, _, TYPE), !. save_stan_fact_x(_NEWFORM, '@'):- !. %------------------------------------------------------------------------------- save_semi_stan_fact(NEWFORM, N, hyp) :- prune_all_hyps(PLACE_HOLDER, ss, N), !, PLACE_HOLDER = NEWFORM, add_hyp_with_id(PLACE_HOLDER, ss, N), !. save_semi_stan_fact(NEWFORM, N, conc) :- prune_all_concs(PLACE_HOLDER, ss, N), !, PLACE_HOLDER = NEWFORM, add_conc_with_id(PLACE_HOLDER, ss, N), !. save_semi_stan_fact(_NEWFORM, _N, HorC) :- show_error('HorC is not hyp or conc but is ~a in save_semi_stan_fact.', [HorC]). %############################################################################### % C O N T R A D I C T I O N H U N T E R %############################################################################### % (1) See if know "false". % (2) For each known P, see if (not P)', sf(not P), ssf(not P) known. % (3) See if can infer empty-range, e.g. for all X in A..B, that A>B (in other % words, a "contradictory pair", e.g. x>3 and x<0). % (4) Perform forward-inferences and standardise to form additional sf facts % [must keep pointers to hypotheses used]. % (5) Perform equality (and equivalence) substitutions to generate new facts and % try standardising these to see if a contradiction can be established. contradiction_hunt :- proved_all_conclusions, !. contradiction_hunt :- contradiction_hunt_is_on, ( ( % (1) issue_message('Contradiction hunt phase 1', []), see_if_know_false(Hs), K='false-hypothesis' ; % (2) issue_message('Contradiction hunt phase 2', []), see_if_know_P_and_not_P(Hs), K='P-and-not-P' ; % (3) issue_message('Contradiction hunt phase 3', []), see_if_can_infer_empty_range(Hs), K='empty-range' ), assert_log_fact(contradiction, [K, Hs]) ; standardisation_is_on, % (4) issue_message('Contradiction hunt phase 4', []), perform_forward_inferences, % (5) issue_message('Contradiction hunt phase 5', []), see_if_contradiction_through_substitutions ), assertz(found_contradiction), issue_found_contradiction_message, !. contradiction_hunt :- !. %------------------------------------------------------------------------------- % (1) see_if_know_false. see_if_know_false([N]) :- get_hyp(false, _, N), !. %------------------------------------------------------------------------------- % (2): see_if_know_P_and_not_P. see_if_know_P_and_not_P(Hs) :- get_hyp(P, XXX, N), form_negation(P, NotP), ( infer(NotP, M) ; XXX = x, sufficiently_low_complexity(hyp, N), standardisation_is_on, ( try_infer_standard_form_of(NotP, M) ; try_infer_semi_standard_form_of(NotP, M) ) ), !, merge_sort([N], M, Hs), !. %------------------------------------------------------------------------------- sufficiently_low_complexity(HorC, N) :- complexity_limit(LIM), complexity_fact(HorC, N, _, C), C < LIM, !. %------------------------------------------------------------------------------- form_negation( X = Y , X <> Y ) :- !. form_negation( X <> Y, X = Y ) :- !. form_negation( X < Y, X >= Y ) :- !. form_negation( X > Y, X <= Y ) :- !. form_negation( X <= Y, X > Y ) :- !. form_negation( X >= Y, X < Y ) :- !. form_negation( not P , P ) :- !. form_negation( P, not P ) :- !. %------------------------------------------------------------------------------- try_infer_standard_form_of(P, Hs) :- norm_typed_expr(P, boolean, NormP), !, infer(NormP, Hs), !. %------------------------------------------------------------------------------- try_infer_semi_standard_form_of(P, Hs) :- is_relational_expression(P, RELOP, X, Y, TYPE_OF_ARGS), !, norm_typed_expr(X, TYPE_OF_ARGS, X1), !, norm_typed_expr(Y, TYPE_OF_ARGS, Y1), !, SemiStanP =.. [RELOP, X1, Y1], !, infer(SemiStanP, Hs), !. %------------------------------------------------------------------------------- % (3): see_if_can_infer_empty_range(Hyps) % N.B. We consider an expression E: % % ------------>------<------------ % E <= A...... ......E >= B . % % A=B, (E=A | E<=A, E>A) are covered by P_and_not_P search. % That leaves A=B and EA, and % A=B), (E<=A, E>B), (E<=A, E>=B). % Cover EA. % Cover three A=B and inferring A A, H2), merge_sort(H1, H2, Hs) ; infrule(E <= A, H1), % Part of predicate see_if_know_P_and_not_P/1 involves seeking % hyps of the form A=B, and trying to infer A<>B. Thus, it is a % waste of resources to consider again an inequality which may % be directly derived from a hypothesis equality. To guard for % this, we only continue if there is not a hyp of the form E=A % or A=E. \+ (get_hyp(E=A,_,_) ; get_hyp(A=E,_,_)), retractall(used(_)), infrule(E >= B, H2), infer(A < B, H3), merge_sort(H1, H2, Hx), merge_sort(Hx, H3, Hs) ), !. %------------------------------------------------------------------------------- % (4): perform_forward_inferences. perform_forward_inferences :- initialise_counter(forward_inferences), retractall(pairing_depth(_)), assertz(pairing_depth(-1)), repeat, increment_counter(pairing_depth), pairing_depth(DEPTH), form_new_pairing(DEPTH), increment_counter(forward_inferences), % until sufficient_forward_inferences_or_too_deep, !. %------------------------------------------------------------------------------- initialise_counter(X) :- F =.. [X, Y], retractall(F), Y = 0, asserta(F), !. increment_counter(X) :- F =.. [X, Yold], G =.. [X, Ynew], retract(F), Ynew is Yold + 1, asserta(G), !. %------------------------------------------------------------------------------- form_new_pairing(0) :- get_hyp(X, [s, Tx], Nx), get_hyp(Y, [s, Ty], Ny), Nx < Ny, join(X, Tx, Nx, Y, Ty, [Ny], 1, normal). form_new_pairing(D) :- D > 0, D1 is D + 1, get_hyp(X, [s, Tx], Nx), join_hyp(D, Y, Ty, Ny), join(X, Tx, Nx, Y, Ty, Ny, D1, normal). % ensure can get out! form_new_pairing(D) :- D > 5, !. %------------------------------------------------------------------------------- % join(X, Tx, Nx, Y, Ty, Ny, D, SAVE). join( X1 = I1, T, N1, X2 = I2, T, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, []), norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X = I, T, N, SAVE). join( X1 = I1, T, N1, X2 > I2, T, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, [2]), % 2nd. positive norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X > I, T, N, SAVE). join( X1 = I1, T, N1, X2 <> I2, T, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, []), norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X <> I, T, N, SAVE). join( X1 > I1, T, N1, X2 > I2, T, N2, D, SAVE ) :- ( T = integer, IM = 1 ; T = real, IM = 0 ), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, [1, 2]), % Both positive norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2 + IM * (M1 + M2 - 1), save_join_hyp(D, X > I, T, N, SAVE). join( X1 > I1, T, N1, X2 = I2, T, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, [1]), % 1st. positive norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X > I, T, N, SAVE). join( X1 <> I1, T, N1, X2 = I2, T, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, []), norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X <> I, T, N, SAVE). % The following should appear for reals only: join( X1 = I1, T, N1, X2 >= I2, real, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, [2]), % 2nd. positive norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X >= I, real, N, SAVE). join( X1 >= I1, real, N1, X2 = I2, T, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, [1]), % 1st. positive norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X >= I, real, N, SAVE). join( X1 >= I1, real, N1, X2 >= I2, real, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, [1, 2]), % Both positive norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X >= I, real, N, SAVE). join( X1 > I1, T, N1, X2 >= I2, real, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, [1, 2]), % i.e. both positive norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X > I, real, N, SAVE). join( X1 >= I1, real, N1, X2 > I2, T, N2, D, SAVE ) :- (T = integer ; T = real), insert(N1, N2, N), find_multipliers(X1, X2, M1, M2, [1, 2]), % i.e. both positive norm_typed_expr(M1 * X1 + M2 * X2, T, X), I iss M1 * I1 + M2 * I2, save_join_hyp(D, X > I, real, N, SAVE). find_multipliers(X1, X2, M1, M2, []) :- find_common_multipliers(X1, X2, M1, M2). find_multipliers(X1, X2, M1, M2, [1]) :- find_common_multipliers(X1, X2, MX1, MX2), ( MX1 > 0, M1=MX1, M2=MX2 ; MX1 < 0, M1 iss -MX1, M2 iss -MX2 ). find_multipliers(X1, X2, M1, M2, [2]) :- find_common_multipliers(X1, X2, MX1, MX2), ( MX2 > 0, M1=MX1, M2=MX2 ; MX2 < 0, M1 iss -MX1, M2 iss -MX2 ). find_multipliers(X1, X2, M1, M2, [1, 2]) :- find_common_multipliers(X1, X2, MX1, MX2), ( MX1 > 0, MX2 > 0, M1=MX1, M2=MX2 ; MX1 < 0, MX2 < 0, M1 iss -MX1, M2 iss -MX2 ). %------------------------------------------------------------------------------- find_common_multipliers(X1, X2, M1, M2) :- know_term_breakdown(X1, TL1), know_term_breakdown(X2, TL2), !, find_cancellation(TL1, TL2, M1, M2). %------------------------------------------------------------------------------- find_cancellation(TL1, TL2, M1, M2) :- gen_is_in([X, I1], TL1), is_in([X, I2], TL2), int_and_sign(I1, PI1, SI1), int_and_sign(I2, PI2, SI2), lcm(PI1, PI2, LCM), M1 iss SI1 * (LCM div PI1), M2 iss - SI2 * (LCM div PI2). %------------------------------------------------------------------------------- gen_is_in(X, [X|_]). gen_is_in(X, [_|Y]) :- gen_is_in(X, Y). %------------------------------------------------------------------------------- int_and_sign(I, I, 1) :- integer(I), I > 0, !. int_and_sign(0, _, _) :- !, fail. int_and_sign(-(0), _, _) :- !, fail. int_and_sign(I, II, - 1) :- s_integer(I), I < 0, !, II iss - I. %------------------------------------------------------------------------------- lcm(X, Y, LCM) :- gcd(X, Y, GCD), LCM iss (X * Y) div GCD, !. %------------------------------------------------------------------------------- gcd(X, X, X) :- !. gcd(X, 0, X) :- !. gcd(X, Y, G) :- X > Y, Y > 0, Z is X mod Y, !, gcd(Y, Z, G). % this causes gcd(50,50,1) to fail, rather than loop via the next clause. gcd(X, X, G) :- !, G=X. gcd(X, Y, G) :- Y > 0, !, gcd(Y, X, G). %------------------------------------------------------------------------------- save_join_hyp(_D, X, _T, N, normal) :- test_if_contradiction(X, N). save_join_hyp(_D, X, _T, _N, _) :- get_hyp(X, _, _), !. save_join_hyp(_D, X, _T, _N, _) :- join_hyp(_, X, _, _), !. save_join_hyp(D, X, T, N, normal) :- assertz(join_hyp(D, X, T, N)), !. save_join_hyp(_D, X, _T, _N, reduction) :- reduction_hyp(X, _), !. save_join_hyp(_D, X, _T, N, reduction) :- simplify(X, XX), assertz(reduction_hyp(XX, N)), !, ( XX = false ; true ), !. %------------------------------------------------------------------------------- test_if_contradiction(X, Hs) :- X =.. [_OP, A, _B], int(A), % and we know int(B), so. simplify(X, false), assertz(found_contradiction), issue_found_contradiction_message, assert_log_fact(contradiction, ['contradictory-combination', Hs]), fail. %------------------------------------------------------------------------------- sufficient_forward_inferences_or_too_deep :- inference_limit(INF), forward_inferences(N), N > INF, !. sufficient_forward_inferences_or_too_deep :- depth_limit(DEPTH), pairing_depth(D), D >= DEPTH, !. %------------------------------------------------------------------------------- % (5): see_if_contradiction_through_substitutions(Hyps) see_if_contradiction_through_substitutions :- complexity_limit(L), Lim is 3 * L, % allow some expansion leeway infrule(X = Y, N), not_too_complex(N, Lim), try_replacement(X, Y, N), % until found_contradiction. see_if_contradiction_through_substitutions :- get_hyp(X, x, N), var_const(X, boolean, _), try_replacement(X, true, N), % until found_contradiction. see_if_contradiction_through_substitutions :- get_hyp(not X, x, N), var_const(X, boolean, _), try_replacement(X, false, N), % until found_contradiction. see_if_contradiction_through_substitutions :- found_contradiction, !. %------------------------------------------------------------------------------- not_too_complex([H|T], Lim) :- Lim > 0, complexity_fact(hyp, H, _, C), % So C is largest complexity for H \+ ( complexity_fact(hyp, H, _, N), N>C ), NewLim is Lim - C, !, not_too_complex(T, NewLim). not_too_complex([], Lim) :- !, Lim >= 0. %------------------------------------------------------------------------------- try_replacement(I, _Y, _N) :- s_integer(I), !, fail. try_replacement(X, Y, N) :- get_subst_hyp(H, L, Hs), has_complexity(H, C), complexity_limit(Lim), % allow some expansion leeway C =< 3 * Lim, \+ is_in([X,Y], L), \+ is_in([Y,X], L), subst_vbl(X, Y, H, S), H \= S, merge_sort([N], Hs, NewHs), maybe_stan_and_store_hyp(S, [[X,Y]|L], NewHs). try_replacement(X, Y, N) :- get_hyp(H, x, K), complexity_fact(hyp, K, _, C), \+ ( complexity_fact(hyp, K, _, W), W>C ), % So C is largest complexity for Hk complexity_limit(L), C =< 3 * L, % allow some expansion leeway subst_vbl(X, Y, H, S), H \= S, K \= N, maybe_stan_and_store_hyp(S, [[X,Y]], [K,N]). %------------------------------------------------------------------------------- maybe_stan_and_store_hyp(S, L, Hs) :- complexity_limit(LIM), has_complexity(S, C), C < LIM, norm_typed_expr(S, boolean, S1), add_subst_hyp(S1, L, Hs), ( S1 = false, assertz(found_contradiction), issue_found_contradiction_message, assert_log_fact(contradiction, ['contradiction-through-substitutions', Hs]) ; true ), !. maybe_stan_and_store_hyp(S, L, Hs) :- add_subst_hyp(S, L, Hs), !. %############################################################################### % E X P R E S S I O N R E D U C T I O N %############################################################################### expression_reduction :- found_contradiction, !. expression_reduction :- proved_all_conclusions, !. expression_reduction :- expression_reduction_is_on, expression_reduction1. expression_reduction :- \+ expression_reduction_is_on, !. %------------------------------------------------------------------------------- expression_reduction1 :- eliminate_true_hypotheses. expression_reduction1 :- try_reducing_disjunction_options. expression_reduction1 :- eliminate_duplicates. expression_reduction1 :- eliminate_complementary_pair_disjunctions. expression_reduction1 :- try_simplifying_implications_and_equivalences. expression_reduction1 :- try_join_negations_to_get_contradiction. expression_reduction1 :- try_obvious_substitutions. expression_reduction1 :- eliminate_true_hypotheses. expression_reduction1 :- reduce_conclusions. expression_reduction1 :- see_if_know_false(Hs), % last attempt. assert_log_fact(contradiction, ['false-hypothesis', Hs]), assertz(found_contradiction), issue_found_contradiction_message. expression_reduction1 :- !. %------------------------------------------------------------------------------- eliminate_true_hypotheses :- get_hyp(true, _, N), save_eliminate(N, 'true-hypothesis', []), fail. %------------------------------------------------------------------------------- reduce_conclusions :- get_conc(A or B, _X, N), \+ get_proved_conc(N), try_reduce_disjunction_conclusion(N, A or B, _C), fail. reduce_conclusions :- get_conc(X, _, M), get_conc(X, _, N), M < N, \+ get_proved_conc(N), assert_log_fact(eliminated_conc, [N, M]), add_proved_conc(N), fail. %------------------------------------------------------------------------------- try_reduce_disjunction_conclusion(N, A or B, C) :- reduce_disjunction(A or B, C, Hs), C \= (A or B), !, replace_conc(A or B, X, N, C), !, ( C=true, assert_log_fact(proved, [N, A or B, Hs, A or B]), add_proved_conc(N), issue_proved_message(N) ; X = x, assert_log_fact(further_simplified, [conc, N, A or B, C, Hs]), ( infer(C, H2), merge_sort(Hs, H2, Hyps), !, assert_log_fact(proved, [N, C, Hyps, C]), add_proved_conc(N), issue_proved_message(N) ; true ) ; true ), !. %------------------------------------------------------------------------------- % "try_reducing_disjunction_options" always fails eventually try_reducing_disjunction_options :- repeat, % until reduced_all_disjunctions, !, fail. %------------------------------------------------------------------------------- reduced_all_disjunctions :- get_hyp(A or B, x, N), try_reducing_disjunction(A or B, x, N), !, fail. % succeed when none left to reduce reduced_all_disjunctions. %------------------------------------------------------------------------------- try_reducing_disjunction(A or B, X, N) :- reduce_disjunction(A or B, C, Hs), C \= (A or B), !, replace_hyp(_Old_Hyp, X, N, C), !, ( X = x, ( C=true, save_eliminate(N, 'true-disjunction', []) ; assert_log_fact(further_simplified, [hyp, N, A or B, C, Hs]) ) ; true ), !. %------------------------------------------------------------------------------- reduce_disjunction(A or _B, true, Hs) :- infer(A, Hs), !. reduce_disjunction(_A or B, true, Hs) :- infer(B, Hs), !. reduce_disjunction(A or B, AA, Hs) :- infer(not B, H1), !, reduce_disjunction(A, AA, H2), !, merge_sort(H1, H2, Hs), !. reduce_disjunction(A or B, BB, Hs) :- infer(not A, H1), !, reduce_disjunction(B, BB, H2), !, merge_sort(H1, H2, Hs), !. reduce_disjunction(A or B, C, Hs) :- reduce_disjunction(A, AA, H1), reduce_disjunction(B, BB, H2), !, ( ( AA = true ; BB = true ), C = true ; C = (AA or BB) ), !, merge_sort(H1, H2, Hs), !. reduce_disjunction(A, A, []) :- !. %------------------------------------------------------------------------------- eliminate_duplicates :- get_hyp(X, _, M), get_hyp(X, _, N), M < N, name(M, ML), append("duplicate of H", ML, MessL), name(Message, MessL), save_eliminate(N, Message, []), fail. %------------------------------------------------------------------------------- eliminate_complementary_pair_disjunctions :- get_hyp(X or Y, _, N), redundant_disjunction(X or Y), save_eliminate(N, 'P-or-not-P disjunction', []), fail. %------------------------------------------------------------------------------- % REDUNDANT_DISJUNCTION(F) - checks if F covers all integers for some subexpression % or if F is basically "X or not X" for some formula X. redundant_disjunction(A or (not A)) :- !. redundant_disjunction((not A) or A) :- !. redundant_disjunction(A or B) :- standardisation_is_on, norm_typed_expr((not A)<->B,boolean,true), !. redundant_disjunction(F) :- covers_interval(F,E,L,U), ( L=[] ; infer(E>=L) ), ( U=[] ; infer(E<=U) ), !. %------------------------------------------------------------------------------- % COVERS_INTERVAL(F,E,L,U) - expression E ranges over L..U in F. covers_interval(F1 and F2,E,L,U) :- find_range(F1 and F2,E,L,U). covers_interval(E=N,E,N,N). covers_interval(N=E,E,N,N). covers_interval(EE,E,[],N-1). covers_interval(E<=N,E,[],N). covers_interval(N>=E,E,[],N). covers_interval(E>N,E,N+1,[]). covers_interval(N=N,E,N,[]). covers_interval(N<=E,E,N,[]). covers_interval(A or B,E,L,U) :- covers_interval(A,E,L1,U1), covers_interval(B,E,L2,U2), combine_intervals(L1,U1,L2,U2,L,U), !. %------------------------------------------------------------------------------- % FIND_RANGE(CONJ,E,L,U) - find E in CONJ covering L..U in CONJ. find_range(F1 and F2,E,L,U) :- covers_interval(F1,E,L1,U1), covers_interval(F2,E,L2,U2), find_max(L1,L2,L), find_min(U1,U2,U), infer(L<=U). %------------------------------------------------------------------------------- % FIND_MAX(N1,N2,N) - find N in {N1,N2} s.t. N>=N1 and N>=N2. find_max(N1,N2,N) :- ( intexp(N1), ( intexp(N2), ( N1>=N2, N is N1 ; N2>N1, N is N2 ) ; V is N1, ( infer(V>=N2), N=V ; infer(N2>=V), N=N2 ) ) ; intexp(N2), V is N2, ( infer(N1>=V), N=N1 ; infer(V>=N1), N=V ) ; infer(N1>=N2), N=N1 ; infer(N2>=N1), N=N2 ), !. %------------------------------------------------------------------------------- % FIND_MIN(N1,N2,N) - find N in {N1,N2} s.t. N<=N1 and N<=N2. find_min(N1,N2,N) :- ( intexp(N1), ( intexp(N2), ( N1>=N2, N is N2 ; N2>N1, N is N1 ) ; V is N1, ( infer(V>=N2), N=N2 ; infer(N2>=V), N=V ) ) ; intexp(N2), V is N2, ( infer(N1>=V), N=V ; infer(V>=N1), N=N1 ) ; infer(N1>=N2), N=N2 ; infer(N2>=N1), N=N1 ), !. %------------------------------------------------------------------------------- % COMBINE_INTERVALS(L1,L2,U1,U2,LRes,URes) - combine L1..L2 & L2..U2. combine_intervals(L1,U1,U1,U2,L1,U2) :- U1\=[]. combine_intervals(U1,U2,L1,U1,L1,U2) :- U1\=[]. combine_intervals(L1,L2-1,L2,U2,L1,U2). combine_intervals(L2,U2,L1,L2-1,L1,U2). combine_intervals(L1,L2,L2+1,U2,L1,U2). combine_intervals(L2+1,U2,L1,L2,L1,U2). combine_intervals(L1,U1,L2,U2,L2,U1) :- U2\=[], L1\=[], ( standardisation_is_on, norm_typed_expr(U2+1>=L1,boolean,true) ; infer(U2+1>=L1) ; infer(U2>=L1-1) ; infer(L1<=U2) ). combine_intervals(L1,U1,L2,U2,L1,U2) :- U1\=[], L2\=[], ( standardisation_is_on, norm_typed_expr(U1+1>=L2,boolean,true) ; infer(U1+1>=L2) ; infer(U1>=L2-1) ; infer(L2<=U1) ). combine_intervals(L1,_U1,L2,U2,L2,U2) :- L1\=[], L2\=[], ( standardisation_is_on, norm_typed_expr(L2<=L1,boolean,true) ; infer(L2<=L1) ). %------------------------------------------------------------------------------- try_simplifying_implications_and_equivalences :- get_hyp(A -> B, x, N), \+ log_fact(forwardchain2, [N, _, B]), infer(A, Hs), update_implication(N), assert_log_fact(forwardchain2, [N, Hs, B]), fail. try_simplifying_implications_and_equivalences :- get_hyp(A <-> B, x, N), \+ log_fact(forwardchain2, [N, _, B]), infer(A, Hs), \+ is_in(N, Hs), update_equivalence(x, N, A), assert_log_fact(forwardchain2, [N, Hs, B]), fail. try_simplifying_implications_and_equivalences :- get_hyp(A <-> B, x, N), \+ log_fact(forwardchain2, [N, _, A]), infer(B, Hs), \+ is_in(N, Hs), update_equivalence(x, N, B), assert_log_fact(forwardchain2, [N, Hs, A]), fail. %------------------------------------------------------------------------------- update_implication(N) :- update_implication(x, N), fail. update_implication(N) :- update_implication(ss, N), fail. update_implication(N) :- update_implication([s,_], N), fail. update_implication(_N) :- !. %------------------------------------------------------------------------------- update_implication(X, N) :- replace_hyp(_A -> B, X, N, B), complexity_token(X, XX), retract(complexity_fact(hyp, N, XX, _)), complexity(B, N, hyp, XX, _), !. %------------------------------------------------------------------------------- update_equivalence(X, N, A) :- replace_hyp(A <-> B, X, N, B), complexity_token(X, XX), retract(complexity_fact(hyp, N, XX, _)), complexity(B, N, hyp, XX, _), !. update_equivalence(X, N, A) :- replace_hyp(B <-> A, X, N, B), complexity_token(X, XX), retract(complexity_fact(hyp, N, XX, _)), complexity(B, N, hyp, XX, _), !. %------------------------------------------------------------------------------- try_join_negations_to_get_contradiction :- standardisation_is_on, get_hyp(X, [s, Tx], Nx), retractall(reduction_hyp(_,_)), form_negation(X, NotX), norm_typed_expr(NotX, boolean, NormNotX), try_eliminating(NormNotX, Tx, Nx), fail. %------------------------------------------------------------------------------- try_eliminating(X, Tx, Nx) :- get_hyp(Y, [s, Ty], Ny), Nx \= Ny, \+ know_eliminated(Ny), join(X, Tx, Nx, Y, Ty, [Ny], 1, reduction), fail. try_eliminating(X, Tx, Nx) :- join_hyp(_, Y, Ty, Ny), \+ is_in(Nx, Ny), \+ exists_eliminated_in_list(Ny), join(X, Tx, Nx, Y, Ty, Ny, 1, reduction), fail. try_eliminating(_X, _Tx, Nx) :- reduction_hyp(false, Hs), save_eliminate(Nx,'redundant',Hs), !. %------------------------------------------------------------------------------- exists_eliminated_in_list([Ny|_]) :- know_eliminated(Ny), !. exists_eliminated_in_list([_|Rest]) :- exists_eliminated_in_list(Rest), !. %------------------------------------------------------------------------------- save_eliminate(N, _, _) :- know_eliminated(N), !. save_eliminate(N, Message, Hs) :- assertz(know_eliminated(N)), issue_elimination_message(N), eliminate_N_from_HL(N, Hs, Hlist), assert_log_fact(eliminated_hyp, [N, Message, Hlist]), !. %------------------------------------------------------------------------------- eliminate_N_from_HL(N, [N|Rest], HRest) :- !, eliminate_N_from_HL(N, Rest, HRest). eliminate_N_from_HL(N, [H|Rest], [H|HRest]) :- !, eliminate_N_from_HL(N, Rest, HRest). eliminate_N_from_HL(_N, [], []) :- !. %------------------------------------------------------------------------------- try_obvious_substitutions :- % always fails do_obvious_substitutions_at_depth(toplevel). %############################################################################### % P R O O F O F C O N C L U S I O N S %############################################################################### try_to_prove_concs :- % Stop if all conclusions have been proved. proved_all_conclusions, !. try_to_prove_concs :- % Stop if contradictions have been found. found_contradiction, !. try_to_prove_concs :- get_conc(REAL_C, x, N), ( C = REAL_C ; get_conc(C, [s|_], N), C \= REAL_C ; get_conc(C, ss, N), C \= REAL_C ), \+ get_proved_conc(N), try_to_prove_conc(C, N, REAL_C). try_to_prove_concs :- !. %------------------------------------------------------------------------------- try_to_prove_conc(C, N, X) :- clear_up_could_and_could_not_infer_facts, infer(C, Hs), add_proved_conc(N), issue_proved_message(N), assert_log_fact(proved, [N, C, Hs, X]), !, fail. try_to_prove_conc(C, N, X) :- join_hyp(_, C, _, Hs), add_proved_conc(N), issue_proved_message(N), assert_log_fact(proved, [N, C, Hs, X]), !, fail. %------------------------------------------------------------------------------- clear_up_could_and_could_not_infer_facts :- retractall(could_infer(_, _)), retractall(could_not_infer(_)), retractall(known_upper_numeric_limit(_,_,_,_)), retractall(known_lower_numeric_limit(_,_,_,_)), !. %------------------------------------------------------------------------------- issue_found_contradiction_message :- issued_contradiction_message, !. issue_found_contradiction_message :- path_functions, issue_message('Found contradiction in path traversal condition',[]), issue_message('Path eliminated',[]), assert_log_fact(proved_all, []), assertz(issued_contradiction_message), !. issue_found_contradiction_message :- issue_message('Found contradiction within hypotheses',[]), issue_message('VC is therefore true',[]), assert_log_fact(proved_all, []), assertz(issued_contradiction_message), !. %------------------------------------------------------------------------------- issue_elimination_message(_N) :- path_functions, !, issue_message('Eliminated a traversal condition', []), !. issue_elimination_message(_N) :- issue_message('Eliminated a redundant hypothesis', []), !. %------------------------------------------------------------------------------- issue_substitution_message(N) :- % Eliminated an X=Y hypothesis integer(N), !, issue_message('Eliminated equality hypothesis H', N), !. issue_substitution_message(N) :- issue_message('Applied substitution rule ', N), !. %------------------------------------------------------------------------------- issue_proved_message(N) :- issue_message('PROVED C',N), !. %------------------------------------------------------------------------------- maybe_issue_proved_vc_message :- issued_vc_proved_message, !. maybe_issue_proved_vc_message :- issue_message('Proved all conclusions - VC eliminated', []), assertz(issued_vc_proved_message), assert_log_fact(proved_all, []), !. %------------------------------------------------------------------------------- issue_message(_X, _Y) :- % if verbose mode off, do nothing. get_switch_verbose(off), !. issue_message(X, Y) :- % if verbose mode on, dump output to screen. get_switch_verbose(on), telling(F), tell(user), write('*** '), ( % if plain mode is off, then timestamp each message get_switch_plain(off), fetch_date_and_time(_DATE, TIME), write(TIME), write(' ') ; % if plain mode is on, then no timestamp get_switch_plain(on) ), write(X), ( Y = [] ; write(Y) ), !, nl, tell(F), !. %---------------------------------------------------------------------------- do_rule_substitutions1 :- \+ rule_substitution_is_on, !. do_rule_substitutions1 :- rule_substitution_is_on, find_max_conc_no, max_conc_no(MAX), retractall(hn(_)), asserta(hn(1)), repeat, fetch_next_hn(N), replace_conc_rule(N), % until N = MAX, !. %---------------------------------------------------------------------------- replace_conc_rule(N) :- get_proved_conc(N), !. replace_conc_rule(N) :- get_conc(C, x, N), ( replace_conc_rule1(C) ; % Where Conclusion N has constants that can be replaced on BOTH the % Left and Right hand sides, then we have to try again. % The first replace_conc_rule1(C) above might have changed conc/3 % so we have to match again to pick it up as C2 here. get_conc(C2, x, N), replace_conc_rule1(C2) ). % No more conclusions left, then just succeed. replace_conc_rule(_N) :- !. %---------------------------------------------------------------------------- replace_conc_rule1(C) :- C = (X <= _Y), % Find a conclusion of the form X <= Y var_const(X, _T, c), % and X denotes an FDL constant replace_rule(R, X, V), % and there's a replacement rule for it. !, substitute_and_eliminate(subst_and_elim, user_rule_no, toplevel, R, X, V), % go ahead and replace it. fail. replace_conc_rule1(C) :- C = (_X <= Y), % Find a conclusion of the form X <= Y var_const(Y, _T, c), % and Y denotes an FDL constant replace_rule(R, Y, V), % and there's a replacement rule for it. !, substitute_and_eliminate(subst_and_elim, user_rule_no, toplevel, R, Y, V), % go ahead and replace it. fail. replace_conc_rule1(C) :- C = (X >= _Y), % or X >= Y var_const(X, _T, c), % and X denotes an FDL constant replace_rule(R, X, V), % and there's a replacement rule for it. !, substitute_and_eliminate(subst_and_elim, user_rule_no, toplevel, R, X, V), % go ahead and replace it. fail. replace_conc_rule1(C) :- C = (_X >= Y), % or X >= Y var_const(Y, _T, c), % and Y denotes an FDL constant replace_rule(R, Y, V), % and there's a replacement rule for it. !, substitute_and_eliminate(subst_and_elim, user_rule_no, toplevel, R, Y, V), % go ahead and replace it. fail. %---------------------------------------------------------------------------- do_rule_substitutions2 :- \+ rule_substitution_is_on, !. do_rule_substitutions2 :- rule_substitution_is_on, replace_rule(Name, Old, New), substitute_and_eliminate(subst_and_elim, user_rule_no, toplevel, Name, Old, New), fail. % At this point, we can also exploit ground condition-less % user-defined replacement rules where the left-hand side % denotes an FDL constant do_rule_substitutions2 :- rule_substitution_is_on, user_rewrite_rule(File:Name, Old, New, []), ground(Old), ground(New), var_const(Old, _T, c), substitute_and_eliminate(subst_and_elim, user_rule_yes(File:Name), toplevel, Name, Old, New), fail. do_rule_substitutions2 :- !. %--------------------------------------------------------------------------- extended_simplify :- get_hyp(Expr, x, N), \+ know_eliminated(N), try_further_hyp_simplification(Expr, N), fail. extended_simplify :- get_conc(Expr, x, N), \+ get_proved_conc(N), try_further_conc_simplification(Expr, N), fail. extended_simplify. % succeed when none left to reduce %--------------------------------------------------------------------------- try_further_hyp_simplification(Expr, N) :- eliminate_redundant_moduluses(Expr, New, Hs), simplify(New, NewExpr), !, replace_hyp(_Old_Hyp, x, N, NewExpr), !, ( NewExpr=true, save_eliminate(N, 'redundant', []) ; assert_log_fact(further_simplified, [hyp, N, Expr, NewExpr, Hs]) ), !. %--------------------------------------------------------------------------- try_further_conc_simplification(Expr, N) :- eliminate_redundant_moduluses(Expr, New, Hs), simplify(New, NewExpr), !, replace_conc(_, x, N, NewExpr), assert_log_fact(further_simplified, [conc, N, Expr, NewExpr, Hs]), !, ( NewExpr=true, assert_log_fact(proved, [N, NewExpr, [], NewExpr]), add_proved_conc(N), issue_proved_message(N) ; true ), !. %--------------------------------------------------------------------------- eliminate_redundant_moduluses(Expr, NewExpr, Hs) :- do_eliminate_redundant_moduluses(Expr, NewExpr, Hs), !, Expr \= NewExpr. % only succeed if result is different %--------------------------------------------------------------------------- do_eliminate_redundant_moduluses(Expr mod N, Result, Hs) :- int(N), N > 0, !, do_eliminate_redundant_moduluses(Expr, NewExpr, H1), !, ( ( find_lower_numeric_limit_for(NewExpr, XL, integer, H2), simplify(XL >= 0, true) ; safe_deduce(NewExpr >= 0, integer, H2) ), find_upper_numeric_limit_for(NewExpr, XU, integer, H3), simplify(XU >= 0, true), simplify(XU <= N-1, true), Result = NewExpr, append(H1, H2, Hrest), append(H3, Hrest, HL), sort(HL, Hs) ; NewExpr \= Expr, Result = NewExpr mod N, Hs = H1 ; Result = Expr mod N, Hs = [] ), !. do_eliminate_redundant_moduluses(Atomic, Atomic, []) :- atomic(Atomic), !. do_eliminate_redundant_moduluses([H|T], Result, Hs) :- !, do_eliminate_redundant_moduluses_in_list([H|T], Result, Hs). do_eliminate_redundant_moduluses(Expr, NewExpr, Hs) :- Expr =.. [Op|Args], do_eliminate_redundant_moduluses_in_list(Args, NewArgs, Hs), !, NewExpr =.. [Op|NewArgs]. %--------------------------------------------------------------------------- do_eliminate_redundant_moduluses_in_list([], [], []). do_eliminate_redundant_moduluses_in_list([H|T], [NewH|NewT], Hs) :- do_eliminate_redundant_moduluses(H, NewH, Hh), !, do_eliminate_redundant_moduluses_in_list(T, NewT, Ht), !, append(Hh, Ht, HL), sort(HL, Hs). %############################################################################### % P R O O F S T R A T E G I E S %############################################################################### %=============================================================================== % try_proof_framing. %------------------------------------------------------------------------------- % The predicate try_proof_framing is called by do_simplify_vc if there are % still conclusions left to prove after all other armoury has been tried, % so it is the last resort. % % The key characteristic of proof framing, over other strategies in the % Simplifier, is the creation and investigation of subgoals. These subgoals % typically contain additional hypotheses which logically can not, or % simply are not desired, at the top goal level. % % The predicate works by calling try_toplevel_proof_strategy/2 for each % conclusion in turn, with a "depth" which determines how many "proof % frames" may be entered in the search for a proof (to bound the attempt). % The maximum value for this depth, which is what it starts as (and counts % down as successive proof frames are entered) is defined by the predicate % max_proof_framing_depth(15). %=============================================================================== try_proof_framing :- % do it for the main VC to a limited depth. max_proof_framing_depth(MaxDepth), % Find how many conclusions post-restructuring. find_max_conc_no, max_conc_no(MaxConcNumber), retractall(pfn(_)), asserta(pfn(1)), % Repeat for each conclusion. repeat, clear_up_stack_facts, fetch_next_pfn(ConcNumber), try_toplevel_proof_strategy(MaxDepth, ConcNumber), % until ConcNumber = MaxConcNumber, !. %--------------------------------------------------------------------------- % Stop searching at MaxDepth, in trying to prove conclusion ConcNumber. try_toplevel_proof_strategy(MaxDepth, ConcNumber) :- do_try_toplevel_proof_strategy(MaxDepth, ConcNumber), !. % catchall: always succeed try_toplevel_proof_strategy(_, _) :- !. %--------------------------------------------------------------------------- % Search depth value is hard-coded. Experimentation and experience suggests % this is a good value. max_proof_framing_depth(15). %--------------------------------------------------------------------------- % Fetch next conclusion number to be investigated. fetch_next_pfn(ConcNumber) :- pfn(ConcNumber), !, retractall(pfn(_)), NextConcNumber is ConcNumber + 1, asserta(pfn(NextConcNumber)), !. %=============================================================================== % do_try_toplevel_proof_strategy(+Depth, +ConcNumber). %------------------------------------------------------------------------------- % This predicate invokes proof framing (also known as subgoaling) depending % on the form of top level conclusions. This top level phase quickly % targets those conclusions that are more likely to be susceptible to % proof framing, and, where successful, records a matching log entry. %=============================================================================== % No further work needed as conclusion is proved. do_try_toplevel_proof_strategy(_Depth, ConcNumber) :- get_proved_conc(ConcNumber), !. % In describing changed composites, the examiner generates a goal of the % form: % H1: constraint(oldvar) % H2: newvar = update_construct(oldvar except for changed part) % H3: constraint(changed part of newvar) % C1: property(in terms of of newvar) % % Proof of this goal involves considering the unchanged and the changed % case. The existing proof by cases strategies perform this directly, if % newvar is replaced with its updated construct, enabling the proof by % cases to trigger through syntax matching. Thus, this entry predicate % applies such rewrites. do_try_toplevel_proof_strategy(Depth, ConcNumber) :- get_conc(Conc, x, ConcNumber), % Modify conclusion, by introducing external updates. Only successful if % a change is made. Collect relevant hyp numbers for log. rewrite_oldvars_with_newvars(Conc, NewConc, HypNumberList), !, post_rewrite_do_proof_or_fail(Depth, ConcNumber, Conc, NewConc, HypNumberList), % Success. assert_log_fact(proved_by_framing, [ConcNumber, Conc, 'replacing variables with update structures']), issue_proved_message(ConcNumber), !. % Quantifier unwrapping, targeting conclusions of form: for_all(Var:Type, Prop) . do_try_toplevel_proof_strategy(Depth, ConcNumber) :- get_conc(for_all(Var:Type, Prop), x, ConcNumber), !, do_quantification_proof_or_fail(Depth, ConcNumber, Var, Type, Prop), % Success. assert_log_fact(proved_by_framing, [ConcNumber, for_all(Var:Type, Prop), 'unwrapping a universally-quantified formula']), issue_proved_message(ConcNumber), !. % Proof by implication, targeting conclusions of form: Antecedent -> Consequent. % Note that the strategy is not recursively attempted on subgoals via % try_to_discharge_goals_to_depth/1, because this situation is already % considered by prove_subgoal/3. do_try_toplevel_proof_strategy(Depth, ConcNumber) :- % Implication conclusion. get_conc(Antecedent -> Consequent, x, ConcNumber), !, do_implication_proof_or_fail(Depth, ConcNumber, Antecedent, Consequent), % Success. assert_log_fact(proved_by_framing, [ConcNumber, Antecedent -> Consequent, implication]), issue_proved_message(ConcNumber), !. % Performs a targeted proof by cases, seeking universally quantified % subexpressions of the form: % element(update(A, [I], X), [J]) or update(update(A, [I], X), [J], Y) % Where the Simplifier cannot establish the truth of I=J. % % If triggered, a proof by cases is attempted, trying the two cases I=J and % I<>J in turn. If the conclusion can be proved for each case, then it is % true and we have completed an automated proof by cases much like a human % prover would carry out with the Proof Checker for such a conclusion. do_try_toplevel_proof_strategy(Depth, ConcNumber) :- get_conc(Conc, x, ConcNumber), is_inequality_with_updates(Conc, CasesList), !, do_cases_proof_or_fail(Depth, by_cases, ConcNumber, Conc, CasesList), % Success. assert_log_fact(proved_by_framing, [ConcNumber, Conc, 'cases on index values, given the update(A, [I], X) subexpressions']), issue_proved_message(ConcNumber), !. % Performs a targeted proof by cases, seeking goals of the the form: % Hn: A1 or A2 ... Ai % -> % Cm: B1 or B2 ... Bj % % If reaching this stage, the simplifier has been unable to prove Cm given % Hn. The assumption is that Hn is not strong enough to prove Cm. However, % each disjunct of Hn may be strong enough to prove one disjunct of Cm, % enabling the proof to be completed. This is attempted through a proof by % cases on the disjuncts on Hn. Note that, to minimise the search space, % the strategy is not recursively attempted on subgoals. Thus, if multiple % disjoined hypotheses need to be considered (a proof by cases, within a % proof by cases) the proof will not be found. If this becomes an issue, % the strategy might be made recursive and its performance impact % investigated accordingly. do_try_toplevel_proof_strategy(Depth, ConcNumber) :- get_conc(ConcExp, x, ConcNumber), is_targeted_disjunction(ConcExp, _CasesList), !, get_hyp(HypExp, _, HypNumber), is_targeted_disjunction(HypExp, CasesList), !, do_cases_proof_or_fail(Depth, by_cases_hyp, ConcNumber, ConcExp, CasesList), % Success. assert_log_fact(proved_by_framing_hyp, [ConcNumber, HypNumber, ConcExp, 'cases on disjunction']), issue_proved_message(ConcNumber), !. %================================================================================ % rewrite_oldvars_with_newvars(+Conc, -NewConc, -HypNumberList). %------------------------------------------------------------------------------- % Succeeds where conclusion is modifed via composite updates. %================================================================================ rewrite_oldvars_with_newvars(Conc, NewConc, HypNumberList):- % Retrieve all composite updates. findall(HypNumber-(From, To), is_update_hyp(From, To, HypNumber), UnsortedUpdateKeyList), % Each composite update hypothesis refers to a variable constrained in % earlier hypotheses. To apply these updates, unravelling the nesting %accordingly, they are reversed. keysort(UnsortedUpdateKeyList, SortedUpdateKeyList), reverse(SortedUpdateKeyList, RevSortedUpdateKeyList), % Apply relevant updates. apply_relevant_updates(Conc, RevSortedUpdateKeyList, NewConc, HypNumberList), !, % Updates were found if some hypotheses rewrites were performed. \+ HypNumberList=[], !. %------------------------------------------------------------------------------- % is_update_hyp(+From, +To, -HypNumber). is_update_hyp(From, To, HypNumber):- % Consider all standard hyps. get_hyp(From = To, x, HypNumber), % Only select those that transform variables. var_const(From, _, v), % Only select those that appear to be describing a portion of a % composite update. is_update_composite_hyp(From, To). %------------------------------------------------------------------------------- % is_update_composite_hyp(+From, +To). % Seek array update at top level of right hand side. is_update_composite_hyp(From, update(_Array, [_Index], NewValue)):- % Check that the updated form is expressed in terms of the old % variable. exp_contains_exp(NewValue, From), !. % Seek field update at top level of right hand side. is_update_composite_hyp(From, To):- % Check is field update. functor(To, Functor, 2), atom_concat('upf_', _FieldName, Functor), % Retrieve the new value. arg(2, To, NewValue), % Check that the updated form is expressed in terms of the old % variable. exp_contains_exp(NewValue, From), !. %------------------------------------------------------------------------------- % apply_relevant_updates(+Conc, +RevSortedUpdateKeyList, +NewConc, -HypNumberList). apply_relevant_updates(Conc_m_NewConc, [], Conc_m_NewConc, []):- !. % Apply composite updates whose variable is referenced in the conclusion. apply_relevant_updates(Conc, [HypNumber-(From, To) | T_UpdateKeyList], NewConc, [HypNumber | T_HypNumberList]):- exp_contains_exp(Conc, From), safe_subst_vbl(From, To, Conc, UpdatedConc), apply_relevant_updates(UpdatedConc, T_UpdateKeyList, NewConc, T_HypNumberList). % From above, variable not referenced in conclusion. Dismiss the composite % update. apply_relevant_updates(Conc, [_H_UpdateKey | T_UpdateKeyList], NewConc, HypNumberList):- apply_relevant_updates(Conc, T_UpdateKeyList, NewConc, HypNumberList). %=============================================================================== % split_conjunction(+Exp, -ConjunctList). %------------------------------------------------------------------------------- % Split a conjunction Exp into its conjuncts ConjunctList. %=============================================================================== split_conjunction(A and B, ConjunctList) :- split_conjunction(A, As), !, split_conjunction(B, Bs), !, append(As, Bs, ConjunctList). split_conjunction(F, [F]). %=============================================================================== % split_disjunction(+Exp, -DisjunctList). %------------------------------------------------------------------------------- % Split a disjunction Exp into its disjuncts DisjunctList. %=============================================================================== split_disjunction(A or B, DisjunctList) :- split_disjunction(A, As), !, split_disjunction(B, Bs), !, append(As, Bs, DisjunctList). split_disjunction(F, [F]). %============================================================================== %=============================================================================== % do_simplify_implications_and_equivalences. %------------------------------------------------------------------------------- % Simplify implications and equivalences at the current goal. %=============================================================================== do_simplify_implications_and_equivalences :- get_hyp(A -> B, x, N), \+ log_fact(forwardchain2, [N, _, B]), infer(A, Hs), update_implication(N), assert_log_fact(forwardchain2, [N, Hs, B]), fail. do_simplify_implications_and_equivalences :- get_hyp(A <-> B, x, N), \+ log_fact(forwardchain2, [N, _, B]), infer(A, Hs), \+ is_in(N, Hs), update_equivalence(x, N, A), assert_log_fact(forwardchain2, [N, Hs, B]), fail. do_simplify_implications_and_equivalences :- get_hyp(A <-> B, x, N), \+ log_fact(forwardchain2, [N, _, A]), infer(B, Hs), \+ is_in(N, Hs), update_equivalence(x, N, B), assert_log_fact(forwardchain2, [N, Hs, A]), fail. do_simplify_implications_and_equivalences:- !. %=============================================================================== % try_to_discharge_goals_to_depth(+Depth). %------------------------------------------------------------------------------- % Try to prove the goal at depth Depth. %=============================================================================== try_to_discharge_goals_to_depth(Depth) :- % Always succeeds. try_to_prove_subgoal_concs(Depth), % Only succeeds if all conclusions have been proved. proved_all_subgoal_conclusions, !. try_to_discharge_goals_to_depth(Depth) :- % reached depth limit, so stop (by failing) Depth =< 0, !, fail. try_to_discharge_goals_to_depth(Depth) :- % Explore subgoals at a deeper depth. NewDepth is Depth-1, ( get_conc(for_all(Var:Type, Prop), x, ConcNumber), \+ get_proved_conc(ConcNumber), !, do_quantification_proof_or_fail(NewDepth, ConcNumber, Var, Type, Prop) ; % Inspect the conclusion, looking for an inequality that suggests a % case split which is likely to advance proof. get_conc(Conc, x, ConcNumber), \+ get_proved_conc(ConcNumber), is_inequality_with_updates(Conc, CasesList), !, do_cases_proof_or_fail(NewDepth, by_cases, ConcNumber, Conc, CasesList) ), !, try_to_discharge_goals_to_depth(Depth), !. %---------------------------------------------------------------------------- % try_to_prove_subgoal_concs(Depth) -- always succeeds. try_to_prove_subgoal_concs(Depth) :- clear_up_could_and_could_not_infer_facts, get_conc(ActualConc, x, ConcNumber), ( % Consider the actual conclusion. ConsiderConc = ActualConc ; % Consider standardised form of the actual conclusion, if different. get_conc(ConsiderConc, [s|_], ConcNumber), ConsiderConc \= ActualConc ; % Consider semi-standardised form of the actual conclusion, if % different. get_conc(ConsiderConc, ss, ConcNumber), ConsiderConc \= ActualConc ), \+ get_proved_conc(ConcNumber), try_to_prove_subgoal_conc(Depth, ConsiderConc, ConcNumber, ActualConc). try_to_prove_subgoal_concs(_Depth) :- !. %---------------------------------------------------------------------------- % The following always fails, but updates state if conclusion is proved. try_to_prove_subgoal_conc(Depth, ConsiderConc, ConcNumber, ActualConc) :- ( % A direct contradiction has been found. get_hyp(false, _, H), Hs = [H] ; % Conclusion can be inferred. infer(ConsiderConc, Hs) ), % Log the proof and stop. add_proved_conc(ConcNumber), assert_log_fact(proved_subgoal, [ConcNumber, ConsiderConc, Hs, ActualConc, Depth]), !, fail. %---------------------------------------------------------------------------- % Succeeds where all conclusions have been proved. proved_all_subgoal_conclusions :- \+ (get_conc(_, _, ConcNumber), \+ get_proved_conc(ConcNumber)), !. %=============================================================================== % do_quantification_proof_or_fail(+Depth, +ConcNumber, +Var, +Type, +Prop). %------------------------------------------------------------------------------- % Unwrap a universally quantified expression, and create a new subgoal. %=============================================================================== % Predicate succeeds if and only if the proof is achieved. do_quantification_proof_or_fail(Depth, ConcNumber, Var, Type, Prop) :- push_vc_state, assert_log_fact(unwrapping, [ConcNumber, Depth]), try_proof_by_unwrapping(Depth, ConcNumber, Var, Type, Prop), !, % Success. pop_vc_state(success), add_proved_conc(ConcNumber). do_quantification_proof_or_fail(_Depth, _ConcNumber, _Var, _Type, _Prop) :- % From above, failure. Restore state. pop_vc_state(failure), !, fail. %---------------------------------------------------------------------------- try_proof_by_unwrapping(Depth, ConcNumber, Var, Type, Prop) :- make_new_uvar(Var, Type, UniqVar), subst_vbl(Var, UniqVar, Prop, NewProp), prove_subgoal(Depth, ConcNumber, NewProp), !. %---------------------------------------------------------------------------- % Introduce a new variable as UniqVar, with its name derived from Var and % Type and the presence of previously introduced variables. For example: % Var=myvariable, Type=integer, UniqVar=int_myvariable_1 % Var=a, Type=t, UniqVar=t_a_1 make_new_uvar(Var, Type, UniqVar) :- curtailType(Type, CurtailType), % Assemble the prefix for the unique name. atom_concat(CurtailType, '_', CurtailTypeUnderBar_Atom), atom_concat(CurtailTypeUnderBar_Atom, Var, CurtailTypeUnderBarVar_Atom), atom_concat(CurtailTypeUnderBarVar_Atom, '_', Prexix_Atom), !, % Seek a uniq number for this prefix. repeat, nextnumber(Prexix_Atom, NumberInt), integer_to_atom(NumberInt, NumberAtom), atom_concat(Prexix_Atom, NumberAtom, UniqVar), nondeclared(UniqVar), % Found uniq name. Externally record this new item alongside its new % core type. find_core_type(Type, CoreType), assertz(var_const(UniqVar, CoreType, p)), !. %---------------------------------------------------------------------------- curtailType(Type, CurtailType):- atom_chars(Type, Type_CharList), curtailType_x(Type_CharList, CurtailType_CharList), atom_chars(CurtailType, CurtailType_CharList), !. %---------------------------------------------------------------------------- curtailType_x([], []). curtailType_x([AChar], [AChar]). curtailType_x([AChar, BChar], [AChar, BChar]). curtailType_x([AChar, BChar, CChar | _Rest_CharList], [AChar, BChar, CChar]). %---------------------------------------------------------------------------- % Generate increasing numbers for namespace Atom, starting at 1. nextnumber(Atom, NextInt) :- retract(current_root(Atom,PrevInt)), NextInt is PrevInt+1, asserta(current_root(Atom,NextInt)), !. nextnumber(Atom, 1) :- asserta(current_root(Atom,1)), !. %---------------------------------------------------------------------------- % Check Atom has not been declared already. nondeclared(Atom) :- var_const(Atom, _, _), !, fail. nondeclared(_Atom) :- !. %=============================================================================== % post_rewrite_do_proof_or_fail(+Depth, +ConcNumber, +Conc, +NewConc, +HypNumberList). %------------------------------------------------------------------------------- % Describe a rewrite step in the logs, at the correct stack depth, and continue % proof. %=============================================================================== % Predicate succeeds if and only if the proof is achieved. post_rewrite_do_proof_or_fail(Depth, ConcNumber, Conc, NewConc, HypNumberList) :- push_vc_state, assert_log_fact(composite_rewrite, [ConcNumber, Conc, HypNumberList]), prove_subgoal(Depth, ConcNumber, NewConc), !, % Success. pop_vc_state(success), add_proved_conc(ConcNumber). post_rewrite_do_proof_or_fail(_Depth, _ConcNumber, _Conc, _NewConc, _HypNumberList) :- % From above, failure. Restore state. pop_vc_state(failure), !, fail. %=============================================================================== % do_implication_proof_or_fail(+Depth, +ConcNumber, +Antecedent, +Consequent). %------------------------------------------------------------------------------- % Forward chain an implication, and create a new subgoal. %=============================================================================== % Predicate succeeds if and only if the proof is achieved. do_implication_proof_or_fail(Depth, ConcNumber, Antecedent, Consequent) :- push_vc_state, assert_log_fact(implies_conc, [ConcNumber, Antecedent, Consequent, Depth]), try_proof_by_forwardchain(Depth, ConcNumber, Antecedent, Consequent), !, % Success. pop_vc_state(success), add_proved_conc(ConcNumber). do_implication_proof_or_fail(_Depth, _ConcNumber, _Antecedent, _Consequent) :- % From above, failure. Restore state. pop_vc_state(failure), !, fail. %---------------------------------------------------------------------------- try_proof_by_forwardchain(Depth, ConcNumber, Antecedent, Consequent) :- prove_subgoal(Depth, ConcNumber, Antecedent -> Consequent), !. %=============================================================================== % do_cases_proof_or_fail(+Depth, +ConcNumber, +Conc, +CasesList). %------------------------------------------------------------------------------- % Forward chain an implication, and create a new subgoal. %=============================================================================== % Predicate succeeds if and only if the proof is achieved. do_cases_proof_or_fail(Depth, Trigger, ConcNumber, Conc, CasesList) :- push_vc_state, assert_log_fact(Trigger, [ConcNumber, CasesList, Depth]), try_proof_by_cases(1, Depth, ConcNumber, Conc, CasesList), !, % Success. clear_up_could_and_could_not_infer_facts, pop_vc_state(success), add_proved_conc(ConcNumber). do_cases_proof_or_fail(_Depth, _Trigger, _ConcNumber, _Conc, _CasesList) :- % From above, failure. Restore state. clear_up_could_and_could_not_infer_facts, pop_vc_state(failure), !, fail. %---------------------------------------------------------------------------- try_proof_by_cases(_CaseNumber, _Depth, _ConcNumber, _Conc, []) :- !. % Do not need to reset the vc state for the first case, as the state is % initially clean. try_proof_by_cases(1, Depth, ConcNumber, Conc, [Case|CasesList]) :- !, noisily_add_new_hyp_list([Case], by_cases(1), Depth), handle_cases_proof(1, Depth, ConcNumber, Conc, Case, CasesList). % Do need to reset the vc state for all subsequent cases. try_proof_by_cases(CaseNumber, Depth, ConcNumber, Conc, [Case|CasesList]) :- !, % Reset the vc state, so ready to start subsequent case. pop_vc_state(cases), push_vc_state, noisily_add_new_hyp_list([Case], by_cases(CaseNumber), Depth), handle_cases_proof(CaseNumber, Depth, ConcNumber, Conc, Case, CasesList). %---------------------------------------------------------------------------- % Two cases: either Hs contain a contradiction from the cases, or prove the C handle_cases_proof(CaseNumber, Depth, ConcNumber, Conc, Case, CasesList) :- clear_up_could_and_could_not_infer_facts, contradiction_in_hypotheses_from_cases(Case, ConcNumber), !, NextCaseNumber is CaseNumber+1, try_proof_by_cases(NextCaseNumber, Depth, ConcNumber, Conc, CasesList), !. handle_cases_proof(CaseNumber, Depth, ConcNumber, Conc, _Case, CasesList) :- clear_up_could_and_could_not_infer_facts, simplify(Conc, NewConc), !, prove_subgoal(Depth, ConcNumber, NewConc), !, NextCaseNumber is CaseNumber+1, try_proof_by_cases(NextCaseNumber, Depth, ConcNumber, Conc, CasesList), !. %---------------------------------------------------------------------------- % Conduct targeted contradiction searches, based on the form of cases that % are expected to be considered. contradiction_in_hypotheses_from_cases(X = Y, ConcNumber) :- infer(X <> Y, Hs), !, ( get_hyp(X = Y, _, HypNumber), merge_sort([HypNumber], Hs, HL) ; HL = Hs ), !, assert_log_fact(contradiction, ['case-exclusion', HL]), !, add_proved_conc(ConcNumber), !. contradiction_in_hypotheses_from_cases(X <> Y, ConcNumber) :- ( find_empty_range(X <> Y, Hs) ; find_empty_range(Y <> X, Hs) ), !, assert_log_fact(contradiction, ['empty-range', Hs]), !, add_proved_conc(ConcNumber), !. %---------------------------------------------------------------------------- find_empty_range(X <> Y, [Ha, Hb|Hs]) :- ( get_hyp(L <= Y, _, Ha) ; get_hyp(Y >= L, _, Ha) ), ( get_hyp(Y <= U, _, Hb) ; get_hyp(U >= Y, _, Hb) ), ( is_an_empty_range(L, U), !, Hs = [] ; is_non_empty_range(L, U), all_excluded(Y, X, L, U, Hs) ). %---------------------------------------------------------------------------- is_an_empty_range(L, U) :- checktype(L, T), ( T = integer, signed_integer(L), signed_integer(U), simplify(L>U, true) ; enumeration(T, EL), is_in(L, EL), is_in(U, EL), enumerated_simplify(L>U, true) ), !. %---------------------------------------------------------------------------- % only succeed if at most 16 elements is_non_empty_range(L, U) :- checktype(L, T), ( T = integer, signed_integer(L), signed_integer(U), simplify(L<=U, true), simplify(U-L<16, true) ; enumeration(T, EL), is_in_with_pos(L, EL, PL), is_in_with_pos(U, EL, PU), PU - PL < 16, enumerated_simplify(L<=U, true) ), !. %---------------------------------------------------------------------------- all_excluded(Y, X, L, U, Hs) :- is_excluded(Y, X, L, Ha), ( L = U, !, Hs = Ha ; next_value_to_try(L, L1), all_excluded(Y, X, L1, U, Hb), merge_sort(Ha, Hb, Hs) ). %---------------------------------------------------------------------------- is_excluded(Y, _X, L, Hs) :- infer(Y <> L, Hs), !. is_excluded(_Y, L, L, _Hs) :- !. %---------------------------------------------------------------------------- next_value_to_try(L, L1) :- checktype(L, T), ( signed_integer(L), L1 iss L + 1 ; enumeration(T, EL), enumerated_simplify(succ(L), L1), is_in(L1, EL) ), !. %---------------------------------------------------------------------------- is_in_with_pos(Literal, Elements, Position) :- !, is_in_with_pos_from(0, Literal, Elements, Position). %---------------------------------------------------------------------------- is_in_with_pos_from(Position, Literal, [Literal|_], Position) :- !. is_in_with_pos_from(Start, Literal, [_|Elements], Position) :- New_Start is Start + 1, !, is_in_with_pos_from(New_Start, Literal, Elements, Position). %=============================================================================== % prove_subgoal(+Depth, +ConcNumber, +ConcExp). %------------------------------------------------------------------------------- % Working at depth Depth, on a goal related to top level conclusion number % ConcExp, and trying to prove ConcExp. %=============================================================================== % Recursively move antecedents as hypotheses in place, without creating % new subgoals. prove_subgoal(Depth, ConcNumber, A -> B) :- !, assert_log_fact(add_imp_hyps, [Depth]), split_conjunction(A, As), noisily_add_new_hyp_list(As, ordinary, Depth), do_simplify_implications_and_equivalences, restructure_vc(Depth), simplify(B, NEW_B), ( B = NEW_B ; assert_log_fact(simplified_conc, [ConcNumber, B, NEW_B, Depth]) ), !, prove_subgoal(Depth, ConcNumber, NEW_B). %---------------------------------------------------------------------------- % From above, ConcExp is not an implication. Create a subgoal to % investigate this conclusion. prove_subgoal(Depth, _ConcNumber, ConcExp) :- add_conc(ConcExp, x, M), assert_log_fact(new_goal, [M, ConcExp, Depth]), restructure_vc(Depth), !, impose_obvious_substitutions_at_depth(Depth), !, try_to_discharge_goals_to_depth(Depth). %=============================================================================== % impose_obvious_substitutions_at_depth(+Depth). %------------------------------------------------------------------------------- % Working on subgoal at depth Depth, perform a collection of standard % substitutions. %=============================================================================== impose_obvious_substitutions_at_depth(Depth) :- retractall(know_eliminated_in_subgoaling(Depth, _)), do_obvious_substitutions_at_depth(Depth). % always succeeds impose_obvious_substitutions_at_depth(_Depth). %------------------------------------------------------------------------------- do_obvious_substitutions_at_depth(Depth) :- repeat, done_all_obvious_substitutions(Depth), !, fail. %------------------------------------------------------------------------------- % This predicate succeeds when no more substitutions are identified. % Simplifier's proof strategy first performs all appropriate substitute and % eliminate of the form n = ... done_all_obvious_substitutions(Depth) :- substitution_elimination_is_on, equivalence_hyp(VAR, EXPRESSION, _X, N), \+ know_eliminated(N), \+ know_substituted(N), \+ know_eliminated_in_subgoaling(_, N), var_const(VAR, _, v), not_occurs_in(VAR, EXPRESSION), !, substitute_and_eliminate(subst_and_elim, user_rule_no, Depth, N, VAR, EXPRESSION). % The simplifier then performs all appropriate substitute and eliminate of % the form fld_(fld_(....fld_(VAR_1))) = % fld_(fld_(....fld_(VAR_1))) % where VAR_1 is not in VAR_2 and VAR_1 is a var_const. done_all_obvious_substitutions(Depth) :- substitution_elimination_is_on, equivalence_hyp(A_FieldRead_Term, B_FieldRead_Term, _X, N), \+ know_eliminated(N), \+ know_eliminated_in_subgoaling(_, N), \+ get_processed_hyp_with_field_op(N), \+ get_processed_hyp_with_field_op_in_subgoal(_, N), \+ know_substituted(N), matching_records(A_FieldRead_Term, _, v, B_FieldRead_Term), !, substitute_and_eliminate(subst_fld, user_rule_no, Depth, N, A_FieldRead_Term, B_FieldRead_Term). done_all_obvious_substitutions(Depth) :- substitution_elimination_is_on, ( get_hyp((not VAR) <-> EXP, X, N) ; get_hyp(EXP <-> (not VAR), X, N) ), var_const(VAR, _, v), \+ know_eliminated(N), \+ know_substituted(N), \+ know_eliminated_in_subgoaling(_, N), not_occurs_in(VAR, EXP), simplify(not EXP, EXPRESSION), !, substitute_and_eliminate(subst_and_elim, user_rule_no, Depth, N, VAR, EXPRESSION). done_all_obvious_substitutions(Depth) :- substitution_elimination_is_on, get_hyp(BOOL_ATOM, _X, N), var_const(BOOL_ATOM, boolean, v), \+ know_eliminated(N), \+ know_substituted(N), \+ know_eliminated_in_subgoaling(_, N), !, substitute_and_eliminate(subst_and_elim, user_rule_no, Depth, N, BOOL_ATOM, true). done_all_obvious_substitutions(Depth) :- substitution_elimination_is_on, get_hyp(not BOOL_ATOM, _X, N), var_const(BOOL_ATOM, boolean, v), \+ know_eliminated(N), \+ know_substituted(N), \+ know_eliminated_in_subgoaling(_, N), !, substitute_and_eliminate(subst_and_elim, user_rule_no, Depth, N, BOOL_ATOM, false). done_all_obvious_substitutions(_Depth) :- % true when code reaches here. !. %------------------------------------------------------------------------------- equivalence_hyp(A_Term, B_Term, X, N) :- get_hyp(A_Term = B_Term, X, N). equivalence_hyp(A_Term, B_Term, X, N) :- get_hyp(B_Term = A_Term, X, N). equivalence_hyp(A_Term, B_Term, X, N) :- get_hyp(A_Term <-> B_Term, X, N). equivalence_hyp(A_Term, B_Term, X, N) :- get_hyp(B_Term <-> A_Term, X, N). %------------------------------------------------------------------------------- not_occurs_in(VAR, VAR) :- !, fail. not_occurs_in(VAR, ATOM) :- atom(ATOM), !, ATOM \= VAR, !. not_occurs_in(VAR, EXPR) :- nonvar(EXPR), EXPR =.. [_OP|ARGS], !, not_occurs_in_list(VAR, ARGS), !. %------------------------------------------------------------------------------- not_occurs_in_list(VAR, [VAR|_]) :- !, fail. not_occurs_in_list(VAR, [HEAD|TAIL]) :- not_occurs_in(VAR, HEAD), !, not_occurs_in_list(VAR, TAIL), !. not_occurs_in_list(_, []) :- !. %=============================================================================== % is_inequality_with_updates(+Exp, -CasesList). %------------------------------------------------------------------------------- % The input expression Exp is investigated, seeking to identify a list of % cases for a proof by cases, that is likely to be successful. The % heuristic triggers on inequalities that reference updated structures, % suggesting corresponding cases, enabling the updates to be simplified in % each case. %=============================================================================== is_inequality_with_updates(LExp and _RExp, CasesList) :- % It is assumed that the cases that would be suggested by 'LExp' are % the same as the cases that would be suggested by 'RExp'. This is % typically valid, due to the structures generated by the examiner. The % assumption may not hold for structures introduced by user proof % assertions - having the acceptable consequence of suggesting poor % case selections that will not ease proof. is_inequality_with_updates(LExp, CasesList). is_inequality_with_updates(LExp <= RExp, CasesList) :- find_update_cases(LExp, RExp, CasesList), !. is_inequality_with_updates(LExp >= RExp, CasesList) :- find_update_cases(LExp, RExp, CasesList), !. is_inequality_with_updates(LExp < RExp, CasesList) :- find_update_cases(LExp, RExp, CasesList), !. is_inequality_with_updates(LExp > RExp, CasesList) :- find_update_cases(LExp, RExp, CasesList), !. is_inequality_with_updates(LExp = RExp, CasesList) :- find_update_cases(LExp, RExp, CasesList), !. is_inequality_with_updates(LExp <> RExp, CasesList) :- find_update_cases(LExp, RExp, CasesList), !. %---------------------------------------------------------------------------- % Look for update cases suggested by either the left or right hand side of % the deconstructed inequality expression. find_update_cases(LExp, _RExp, CasesList) :- find_an_update_case(LExp, CasesList). find_update_cases(_LExp, RExp, CasesList) :- find_an_update_case(RExp, CasesList). %---------------------------------------------------------------------------- % Disregard atomic structures early, for performance. find_an_update_case(X, _) :- atomic(X), !, fail. % Target one step updated array element. Cases are only suggested where % these are not directly provable in the current goal context. find_an_update_case(element(update(_, [I], _), [J]), [I=J, I<>J]) :- cant_show_equal_or_not(I, J). % Target two step updated array element. Cases are only suggested where % these are not directly provable in the current goal context. find_an_update_case(update(update(_, [I], _), [J], _), [I=J, I<>J]) :- cant_show_equal_or_not(I, J). % Search deeper within expressions. find_an_update_case(X, CASES) :- X =..[_OP|ARGS], find_an_update_case_in_list(ARGS, CASES). %---------------------------------------------------------------------------- find_an_update_case_in_list([H|_], CASES) :- find_an_update_case(H, CASES). find_an_update_case_in_list([_|T], CASES) :- find_an_update_case_in_list(T, CASES). %---------------------------------------------------------------------------- cant_show_equal_or_not(I, J) :- infer(I=J), !, fail. cant_show_equal_or_not(I, J) :- infer(I<>J), !, fail. cant_show_equal_or_not(_I, _J) :- !. %=============================================================================== % is_targeted_disjunction(+Exp, -DisjunctList). %------------------------------------------------------------------------------- % The input expression Exp is investigated, seeking to identify a list of % cases from a disjunction. %=============================================================================== is_targeted_disjunction(Exp, DisjunctList):- split_disjunction(Exp, DisjunctList), length(DisjunctList, CountInt), % Must be at least two. CountInt >= 2, % Must be less than (or exactly) the max . max_disjuncts(MaxDisjuncts), CountInt =< MaxDisjuncts, !. %--------------------------------------------------------------------------- % Maximum number disjuncts considered is hard-coded. Experimentation suggests % this is a good value. max_disjuncts(16). %=============================================================================== %=============================================================================== % restructure_vc(+Depth). %------------------------------------------------------------------------------- % Restructure and normalise the VC at depth Depth. The restructuring is % targeted, based on the strategies considered for proof framing. %=============================================================================== restructure_vc(Depth) :- movenots(Depth). restructure_vc(Depth) :- split_hyps(Depth). restructure_vc(Depth) :- split_concs(Depth). restructure_vc(Depth) :- do_implication(Depth). restructure_vc(Depth) :- equivalence(Depth). restructure_vc(_Depth) :- !. %------------------------------------------------------------------------------- % MOVENOTS - move nots as far into expressions as possible. movenots(Depth) :- retractall(hn(_)), assertz(hn(1)), repeat, hn(N), ( gethyp(N,H), negin(H,H1), force_simplify(H1, H2), add_hyp_with_id(H2, x, N), log_negation_simplification(N, H, H2,Depth) ; true ), M is N+1, retract(hn(N)), assertz(hn(M)), bigger_than_all_hyps(M), !, fail. %------------------------------------------------------------------------------- bigger_than_all_hyps(M) :- get_hyp(_, _, N), N>=M, !, fail. bigger_than_all_hyps(_) :- !. %------------------------------------------------------------------------------- force_simplify(X, Y) :- simplification_is_on, simplify(X, Y), !. force_simplify(X, X) :- !. %------------------------------------------------------------------------------- % GETHYP(N,H) - instantiates H to the Nth hypothesis & retracts it. gethyp(N,H) :- get_hyp(H, x, N), prune_hyp(H, x, N), !. %------------------------------------------------------------------------------- log_negation_simplification(_N, H, H, _D) :- !. log_negation_simplification(N, _OLD, NEW, Depth) :- assert_log_fact(restructured, [N, NEW, Depth]), !. %------------------------------------------------------------------------------- % SPLIT_HYPS - split conjunction hypotheses into two or more hypotheses. split_hyps(Depth) :- prune_hyp(A and B, x, N), add_hyp(A, x, Na), add_hyp(B, x, Nb), assert_log_fact(hyp_split, [N, [Na, A], [Nb, B], Depth]), !, % Try again in case either or A or B are also a conjunction split_hyps(Depth). %------------------------------------------------------------------------------- % SPLIT_CONCS - split conjunction conclusions into separate conclusions. split_concs(Depth) :- prune_conc(A and B, x, N), prune_all_concs(_, _, N), add_conc(A, x, Na), add_conc(B, x, Nb), assert_log_fact(conc_split, [N, [Na, A], [Nb, B], Depth]), !, % Try again in case either or A or B are also a conjunction split_concs(Depth). %------------------------------------------------------------------------------- % DO_IMPLICATION - given "A" and "A -> B", add "B" as a hypothesis; % given "not B" and "A -> B", add "not A" as a hypothesis. do_implication(Depth) :- get_hyp(A -> B, x, Nab), get_hyp(A, x, Na), add_hyp(B, x, Nb), assert_log_fact(forwardchain, [Nab, Na, Nb, B, Depth]), fail. do_implication(Depth) :- get_hyp(A -> B, x, Nab), get_hyp((not B), x, Nb), negin((not A),C), add_hyp(C, x, Na), assert_log_fact(backchain, [Nab, Nb, Na, C, Depth]), fail. %------------------------------------------------------------------------------- % EQUIVALENCE - given "A" and "A <-> B" or "B <-> A", add "B"; % given "not A" and "A <-> B" or "B <-> A", add "not B" equivalence(Depth) :- get_hyp(A <-> B, x, Nab), get_hyp(A, x, Na), add_hyp(B, x, Nb), assert_log_fact(forwardchain, [Nab, Na, Nb, B, Depth]), fail. equivalence(Depth) :- get_hyp(A <-> B, x, Nab), get_hyp(B, x, Nb), add_hyp(A, x, Na), assert_log_fact(forwardchain, [Nab, Nb, Na, A, Depth]), fail. equivalence(Depth) :- get_hyp(A <-> B, x, Nab), get_hyp(not(A), x, Na), negin(not(B),C), add_hyp(C, x, Nb), assert_log_fact(backchain, [Nab, Na, Nb, C, Depth]), fail. equivalence(Depth) :- get_hyp(A <-> B, x, Nab), get_hyp(not(B), x, Nb), negin(not(A),C), add_hyp(C, x, Na), assert_log_fact(backchain, [Nab, Nb, Na, C, Depth]), fail. %=============================================================================== % noisily_add_new_hyp_list(+HypList, +Means, +Depth). %------------------------------------------------------------------------------- % Add new hypotheses HypList, justified by Means, to the goal at depth % Depth. %=============================================================================== % Do not add true as a hypothesis. noisily_add_new_hyp_list([true|Hs], Means, Depth) :- !, noisily_add_new_hyp_list(Hs, Means, Depth). % Do not add a duplicate hypothesis. noisily_add_new_hyp_list([H|Hs], Means, Depth) :- get_hyp(H,_,_), !, noisily_add_new_hyp_list(Hs, Means, Depth). % Add a regular hypothesis. noisily_add_new_hyp_list([H|Hs], ordinary, Depth) :- add_hyp(H, x, N), assert_log_fact(new_hyp, [N, H, Depth]), !, noisily_add_new_hyp_list(Hs, ordinary, Depth). % Add a hypothesis, as part of a proof by cases. noisily_add_new_hyp_list([H|Hs], by_cases(C), Depth) :- add_hyp(H, x, N), assert_log_fact(new_hyp_for_case, [N, H, C, Depth]), !, noisily_add_new_hyp_list(Hs, by_cases(C), Depth). noisily_add_new_hyp_list([], _Means, _Depth). %------------------------------------------------------------------------------- % "The VC State Stack" % -------------------- % The following facts are stored on the stack, as proof framing is used: % hyp(X, N, H) -- hypotheses % conc(X, N, X) -- conclusions % proved_conclusion(N) -- to retain which ones have already been proved % log_fact(X, Y) -- log facts built up so far % % On entering a new proof-frame, these are all copied to the stack via a % call to push_vc_state, which fetches the current stack depth % (incrementing it as it does so) and asserts a set of stack(Depth, Fact) % facts. % % The conclusions, proved_conclusion and log_fact facts are also all % retracted, so they don't interfere with proof in the new proof frame. % Only the hypotheses are retained. % % If the proof-frame proof attempt fails, we restore the VC state simply by % calling pop_vc_state(failure), which does the following: % - fetches the current stack depth, decrementing it as it does so; % - retract all the current hypotheses, conclusions and proved_conclusion % facts; % - moves the log_facts built up within the proof-frame into temporary % storage (via some moved_log_fact facts); % - retracts each fact at the current depth from the stack in turn, % asserting it back into the database as it does so (to restore the % hypotheses, conclusions, proved_conclusions and log_facts from the % enclosing frame). % % If the proof-frame proof attempt succeeds, we do the same as the failure % case above, but via pop_vc_state(success), which also does the following: % - adds the new log facts from the proof frame we've just exited on to the % end of the log_fact facts, so these can get propagated back to the % enclosing proof frame into which we are now returning. % % Finally, in a proof-by-cases, after a successful proof of a case we call % pop_vc_state(cases), which behaves like the success case, but buffers the % case log-facts instead of restoring them immediately. Where the % subsequent case is successful, the buffered (previous case) log-facts are % moved to this case and appended to the state of the VC's proof. Where a % case fails, all buffered and moved log-facts are discarded. Since this % failure will ripple upwards in any proof tree, this discarding behaviour % also propagates upwards correctly. %------------------------------------------------------------------------------- %=============================================================================== % clear_up_stack_facts. %------------------------------------------------------------------------------- % Clear up the stack, to begin proving a new subgoal. %=============================================================================== clear_up_stack_facts :- % do for each conclusion attempt. retractall(stack(_,_)), retractall(current_stack_depth(_)), retractall(moved_log_fact(_,_,_)), clear_up_could_and_could_not_infer_facts, !. %=============================================================================== % push_vc_state. %------------------------------------------------------------------------------- % Place the current goal onto the stack, and leave a fresh subgoal. %=============================================================================== push_vc_state :- fetch_and_increment_stack_depth(SD), do_push_vc(SD), !. %------------------------------------------------------------------------------- % Store the hypotheses, conclusions and log_facts on the stack. Also remove % conclusions and log_facts, creating a fresh subgoal. do_push_vc(SD) :- get_hyp(H, X, N), assertz(stack(SD, get_hyp(H, X, N))), fail. do_push_vc(SD) :- get_next_hyp_id(NextHypId_Int), assertz(stack(SD, get_next_hyp_id(NextHypId_Int))), fail. do_push_vc(SD) :- prune_conc(C, X, N), assertz(stack(SD, get_conc(C, X, N))), fail. do_push_vc(SD) :- prune_proved_conc(N), assertz(stack(SD, get_proved_conc(N))), fail. do_push_vc(SD) :- retract(log_fact(X, Y)), assertz(stack(SD, log_fact(X, Y))), fail. do_push_vc(_) :- !. %------------------------------------------------------------------------------- fetch_and_increment_stack_depth(SD) :- retract(current_stack_depth(N)), SD is N+1, asserta(current_stack_depth(SD)), !. fetch_and_increment_stack_depth(1) :- asserta(current_stack_depth(1)), !. %=============================================================================== % pop_vc_state(+PopKind). %------------------------------------------------------------------------------- % Pop off the top goal from the stack. %=============================================================================== pop_vc_state(_Any) :- fetch_and_decrement_stack_depth(SD), prune_all_hyps(_, _, _), reset_next_hyp_id, prune_all_concs(_, _, _), prune_all_proved_concs, move_logfacts(SD), retract(stack(SD, FACT)), assertz(FACT), % Force backtracking. fail. pop_vc_state(success) :- current_stack_depth(SD), promote_moved_logfacts(SD), fail. pop_vc_state(failure) :- current_stack_depth(SD), PrevSD is SD + 1, % discard buffered dead-end log-facts retractall(moved_log_fact(PrevSD,_,_)), fail. pop_vc_state(_Any) :- % including the 'cases' case !. %---------------------------------------------------------------------------- move_logfacts(SD) :- retract(log_fact(X, Y)), assertz(moved_log_fact(SD, X, Y)), fail. move_logfacts(_SD) :- !. %---------------------------------------------------------------------------- promote_moved_logfacts(SD) :- PrevSD is SD + 1, retract(moved_log_fact(PrevSD, X, Y)), assert_log_fact(X, Y), fail. promote_moved_logfacts(_SD) :- % when none left to move at depth PrevSD !. %---------------------------------------------------------------------------- fetch_and_decrement_stack_depth(SD) :- retract(current_stack_depth(SD)), N is SD-1, !, asserta(current_stack_depth(N)), !. %=============================================================================== % substitute_and_eliminate(+Strategy, +UserRule, +Depth, +N, +V, +E). %------------------------------------------------------------------------------- % Perform substitute and eliminate, seeking to simplify the goal without % undermining its overall provability. % Parameter UserRule (user_rules_no, user_rules_yes(File:RuleId) specifies whether the % substitution is based on a user defined rule. %=============================================================================== substitute_and_eliminate(_Strategy, _, _, _, _, _) :- retractall(potential_subst_fact(_, _)), fail. substitute_and_eliminate(_Strategy, UserRules, Depth, N, V, E) :- get_hyp(H, X, K), K \= N, \+ know_eliminated(K), \+ know_eliminated_in_subgoaling(_, K), \+ get_processed_hyp_with_field_op(K), \+ get_processed_hyp_with_field_op_in_subgoal(_, K), subst_vbl(V, E, H, S), H \= S, simplify_if_allowed(S, S1), H \= S1, update_substituted_hyp(Depth, X, K, S1), substitute_and_eliminate_mark_user_rules(UserRules, hyp(K)), fail. substitute_and_eliminate(_Strategy, UserRules, Depth, _N, V, E) :- get_conc(C, X, K), \+ get_proved_conc(K), do_subst_and_simplify_conc(V, E, C, S), C \= S, update_substituted_conc(Depth, X, K, S), substitute_and_eliminate_mark_user_rules(UserRules, conc(K)), fail. substitute_and_eliminate(_Strategy, _UserRules, Depth, N, V, E) :- path_functions, !, assertz(know_substituted(N)), potential_subst_fact(_, _), !, % There is no need to write different log records for % "substitute and eliminate hypothesis a var/const" and "substitute % an unchanged record field" as the log messages for path_functions % is appropriate for both cases - unlike case when \+path_functions. assert_log_fact(substituted, [Depth, N, V, E]), copy_subst_facts_to_log, fail. substitute_and_eliminate(Strategy, UserRules, Depth, N, V, E) :- potential_subst_fact(_, _), !, substitute_and_eliminate_potential_subst_fact(Strategy, Depth, N, V, E), substitute_and_eliminate_mark_user_rules(UserRules, N), !, fail. substitute_and_eliminate(Strategy, UserRules, Depth, N, V, _E) :- % Ignore redundant rule applications. integer(N), !, substitute_and_eliminate_integer(Strategy, Depth, N, V), substitute_and_eliminate_mark_user_rules(UserRules, N), !, fail. %------------------------------------------------------------------------------- substitute_and_eliminate_mark_user_rules(user_rule_yes(File:RuleId), ConcOrHyp_N):- is_conc_or_hyp(ConcOrHyp_N), mark_whether_proved_by_user_rules(user_rewrite_rule), add_to_rule_summary(File:RuleId, ConcOrHyp_N), !. substitute_and_eliminate_mark_user_rules(_, _):- !. is_conc_or_hyp(hyp(_N)). is_conc_or_hyp(conc(_N)). %------------------------------------------------------------------------------- substitute_and_eliminate_potential_subst_fact(subst_and_elim, Depth, N, V, E) :- substitute_and_eliminate_x(Depth, N), assert_log_fact(substituted, [Depth, N, V, E]), copy_subst_facts_to_log. substitute_and_eliminate_potential_subst_fact(subst_fld, Depth, N, V, E) :- substitute_and_eliminate_y(Depth, N), assert_log_fact(substituted_fld, [Depth, N, V, E]). %------------------------------------------------------------------------------- substitute_and_eliminate_integer(subst_and_elim, Depth, N, V) :- substitute_and_eliminate_x(Depth, N), assert_log_fact(subst_elim_hyp, [Depth, N, V]). substitute_and_eliminate_integer(subst_fld, Depth, N, V) :- substitute_and_eliminate_y(Depth, N), assert_log_fact(subst_fld, [Depth, N, V]). %------------------------------------------------------------------------------- substitute_and_eliminate_x(Depth, N) :- Depth=toplevel, assertz(know_eliminated(N)), issue_substitution_message(N). substitute_and_eliminate_x(Depth, N) :- assertz(know_eliminated_in_subgoaling(Depth, N)). %------------------------------------------------------------------------------- substitute_and_eliminate_y(Depth, N) :- Depth=toplevel, add_processed_hyp_with_field_op(N), issue_substitution_message(N). substitute_and_eliminate_y(Depth, N) :- add_processed_hyp_with_field_op_in_subgoal(Depth, N). %------------------------------------------------------------------------------- do_subst_and_simplify_conc(V, E, C1 & C2, S1 & S2) :- !, do_subst_and_simplify_conc(V, E, C1, S1), !, do_subst_and_simplify_conc(V, E, C2, S2), !. do_subst_and_simplify_conc(V, E, X := Y, X := Z) :- !, do_subst_and_simplify_conc(V, E, Y, Z), !. do_subst_and_simplify_conc(V, E, C, S) :- !, subst_vbl(V, E, C, S1), !, ( simplification_is_on, % subst_vbl has done something, so simplify. C \= S1, simplify(S1, S) ; simplification_is_on, % subst_vbl no effect, so no need to simplify. C = S1, S = S1 ; \+ simplification_is_on, S = S1 ), !. %------------------------------------------------------------------------------- update_substituted_hyp(Depth, X, K, S1) :- replace_hyp(_Old_Hyp, X, K, S1), complexity_token(X, XX), retractall(complexity_fact(hyp, K, XX, _)), complexity(S1, K, hyp, XX, _), ( X = x, assertz(potential_subst_fact(subst_hyp, [Depth, K, S1])) ; true ), !. %------------------------------------------------------------------------------- update_substituted_conc(Depth, X, K, S) :- prune_conc(_, X, K), ( X = [s,T], XX = s, norm_typed_expr(S, boolean, SS) ; X = ss, XX = ss, is_relational_expression(S, Rop, A, B, T), norm_typed_expr(A, T, AA), norm_typed_expr(B, T, BB), SS =.. [Rop, AA, BB] ; XX = X, SS = S ), !, add_conc_with_id(SS, X, K), ( X = x, assertz(potential_subst_fact(subst_conc, [Depth, K, SS])) ; true ), retractall(complexity_fact(conc, K, XX, _)), complexity(SS, K, conc, XX, _), !. %------------------------------------------------------------------------------- simplify_if_allowed(X, Y) :- simplification_is_on, !, simplify(X, Y), !. simplify_if_allowed(X, X) :- !. %------------------------------------------------------------------------------- copy_subst_facts_to_log :- retract(potential_subst_fact(X, Y)), assert_log_fact(X, Y), fail. copy_subst_facts_to_log :- !. %############################################################################### % State private to module. %############################################################################### %=============================================================================== % Private state: % get_processed_hyp_with_field_op('HypId_Int')). % get_processed_hyp_with_field_op_in_subgoal('Level_Int', 'HypId_Int')). %------------------------------------------------------------------------------- % These state record which hypothesis containing equality between % the same fields (nested to arbitrary depth) of two variables has % been processed by the Simplifier to ensure termination. The use of these % state mirrors the state know_eliminated and know_eliminated_in_subgoal % when processing hypothesis containing equality containing a variable or % constant. %=============================================================================== :- dynamic(get_processed_hyp_with_field_op/1). :- dynamic(get_processed_hyp_with_field_op_in_subgoal/2). %=============================================================================== % add_processed_hyp_with_field_op(+HypId_Int) %------------------------------------------------------------------------------- % Add HypId_Int into the database to record that the simplifier has processed % the hypothesis where the hypothesis defines properties on variable or % constant that are nested inside functions - e.g. f1(x) = f2(y) and % f1(g1(x)) = f2(g2(y)). %=============================================================================== add_processed_hyp_with_field_op(HypId_Int) :- assert(get_processed_hyp_with_field_op(HypId_Int)). %=============================================================================== % prune_all_processed_hyp_with_field_op %------------------------------------------------------------------------------- % Retracts all hypothesis identifiers in get_processed_hyp_with_field_op. %=============================================================================== prune_all_processed_hyp_with_field_op :- retractall(get_processed_hyp_with_field_op(_)). %############################################################################### % get_processed_hyp_with_field_op_in_subgoal %############################################################################### %=============================================================================== % add_processed_hyp_with_field_op_in_subgoal(+Level_Int, +HypId_Int) %------------------------------------------------------------------------------- % Add hypothesis identifier and level into % get_processed_hyp_with_field_op_in_subgoal. % Similar to get_processed_hyp_with_field_op, the hypothesis defines properties on % variables and constants nested inside functions. %=============================================================================== add_processed_hyp_with_field_op_in_subgoal(Level_Int, HypId_Int) :- assert(get_processed_hyp_with_field_op_in_subgoal(Level_Int, HypId_Int)). %=============================================================================== % prune_all_processed_hyp_with_field_op_in_subgoal %------------------------------------------------------------------------------- % Retracts all hypothesis identifiers from % get_processed_hyp_with_field_op_in_subgoal. %=============================================================================== prune_all_processed_hyp_with_field_op_in_subgoal:- retractall(get_processed_hyp_with_field_op_in_subgoal(_, _)). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/portray.pro0000644000175000017500000001672011753202337017035 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Clauses to control formatting of vc output. These must appear in the user % module. %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### /* Portray a BINARY operator */ portray(Term):- nonvar(Term), functor(Term, Name_Atom, 2), arg(1,Term, A), arg(2,Term, B), /* Find a BINARY operator that matches */ current_op(Pred,F,Name_Atom), ( F = xfx ; F = yfx ; F = xfy ), /* Find the precedences of sub-terms A and B */ term_pri(A, AP), term_pri(B, BP), ( /* We parenthesize A in two cases: */ /* 1) For xfx operators, we parenthesize if A's functor is */ /* equal or lower in precedence to that of Term/ */ /* 2) If A's principal functor is lower precedence than that of Term, then */ /* we need to parenthesize A. */ /* Note that in ISO PROLOG, operator precedence numbers are SMALLER for */ /* more tightly binding operators, so we need the ">" operator to test */ /* for "lower precedence" here. */ ( F = xfx, /* Case 1 */ AP >= Pred ; F \= xfx, /* Case 2 */ AP > Pred ), write('('), write_term(A, [priority(Pred), portrayed(true), numbervars(true)]), write(')') ; ( F = xfx, AP < Pred /* not Case 1 */ ; F \= xfx, AP =< Pred /* not case 2 */ ), write_term(A, [priority(Pred), portrayed(true), numbervars(true)]) ), write(' '), write(Name_Atom), write(' '), ( /* If B's principal functor is lower or equal precedence than that of Term, then */ /* we need to parenthesize B */ /* This is to re-produce the POPLOG behaviour where multi-term expressions */ /* with equal-precedence operators are printed with the RHS parenthesised */ /* For example, we want "A + (B + C)" NOT "A + B + C" */ BP >= Pred, write('('), write_term(B, [priority(Pred), portrayed(true), numbervars(true)]), write(')') ; BP < Pred, write_term(B, [priority(Pred), portrayed(true), numbervars(true)]) ), !. /* Portray an UNARY operator of type fx or fy */ portray(Term):- nonvar(Term), functor(Term, Name_Atom, 1), arg(1,Term, A), /* Find a UNARY operator that matches */ ( current_op(Pred,fx,Name_Atom) ; current_op(Pred,fy,Name_Atom) ), /* Find the precedences of sub-term A */ term_pri(A, AP), write(Name_Atom), write(' '), ( /* If A's principal functor is lower precedence than that of Term, then */ /* we need to parenthesize A */ AP > Pred, write('('), write_term(A, [priority(Pred), portrayed(true), numbervars(true)]), write(')') ; AP =< Pred, write_term(A, [priority(Pred), portrayed(true), numbervars(true)]) ), !. /* Portray an UNARY operator of type xf or yf */ portray(Term):- nonvar(Term), functor(Term, Name_Atom, 1), arg(1,Term, A), /* Find a UNARY operator that matches */ ( current_op(Pred,xf,Name_Atom) ; current_op(Pred,yf,Name_Atom) ), /* Find the precedences of sub-term B */ term_pri(A, AP), ( /* If A's principal functor is lower precedence than that of Term, then */ /* we need to parenthesize A */ AP > Pred, write('('), write_term(A, [priority(Pred), portrayed(true), numbervars(true)]), write(')') ; AP =< Pred, write_term(A, [priority(Pred), portrayed(true), numbervars(true)]) ), write(' '), write(Name_Atom), !. /* Portray clauses for built-in PROLOG operators */ portray(List) :- (List = [] ; List = [_|_]), !, print_list(List). /* Catch all - functor F, which is not an operator, with non-zero */ /* number of arguments - print as F(Args) with commas */ /* between the arguments. */ portray(X) :- X =.. [F|Args], atomic(F), Args \== [], !, write(F), write('('), print_list1(Args), write(')'), !. /* print_list/1 - prints a list enclosed in [ ] with */ /* comma and space between each element */ print_list(List) :- write('['), print_list1(List), write(']'). print_list1([X]) :- !, print(X). print_list1([X|Xs]) :- !, print(X), write(', '), print_list1(Xs). print_list1([]). /* term_pri/2 - returns the precedence of the principal functor of Term */ term_pri(Term, Prio) :- /* Careful here to only look for BINARY operators */ nonvar(Term), functor(Term, Name_Atom, 2), ( current_op(Prio, xfx, Name_Atom) ; current_op(Prio, yfx, Name_Atom) ; current_op(Prio, xfy, Name_Atom) ). term_pri(Term, Prio) :- /* Careful here to only look for UNARY operators */ nonvar(Term), functor(Term, Name_Atom, 1), ( current_op(Prio, fx, Name_Atom) ; current_op(Prio, fy, Name_Atom) ; current_op(Prio, xf, Name_Atom) ; current_op(Prio, yf, Name_Atom) ). /* We don't want to parenthesise atoms, literals and so on, so we pretend */ /* that these have a very high precedence */ term_pri(_Term, Prio) :- Prio = 1, !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/standard.pro0000644000175000017500000010501111753202337017125 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % The main export of this module is norm_typed_expr(Old, Type, New) which, % given a ground expression Old of type Type, is converted into its % standard form, New. It works in a number of passes: moving everything to % one side of a relational operator, multiplying out, collecting together % like terms, reordering lexicographically, etc. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %############################################################################### % Integer and Boolean Expression Standardisation Module %############################################################################### % MAIN PROCEDURE: Normalise an expression of given type norm_typed_expr(OLD, _TYPE, NEW) :- know_norm_expr(OLD, NEW), !. norm_typed_expr(OLD, TYPE, NEW) :- do_norm_typed_expr(OLD, TYPE, SO_FAR), ( simplify(SO_FAR, NEW) ; NEW = SO_FAR ), save_term_breakdown(NEW, TYPE), save_norm_expr(OLD, NEW), !. %------------------------------------------------------------------------------- do_norm_typed_expr(element(A,I),_T,element(NEWA, NEWI)) :- checktype(A, AT), !, ( find_core_type(AT, ArrT), type(ArrT, array(INDTYPES, _ELEMTYPE)), !, ( do_norm_typed_expr(A, ArrT, NEWA) ; NEWA = A ), !, ( do_norm_typed_exprs(I, INDTYPES, NEWI) ; NEWI = I ) ; NEWA = A, NEWI = I ), !. do_norm_typed_expr(update(A, I, X),T,update(NEWA, NEWI, NEWX)) :- ( find_core_type(T, AT), type(AT, array(INDTYPES, ELEMTYPE)), !, ( do_norm_typed_expr(A, T, NEWA) ; NEWA = A ), !, ( do_norm_typed_exprs(I, INDTYPES, NEWI) ; NEWI = I ), !, ( do_norm_typed_expr(X, ELEMTYPE, NEWX) ; NEWX = X ) ; NEWA = A, NEWI = I, NEWX = X ), !. do_norm_typed_expr(X,INT,X1) :- ( INT=integer ; INT=real ), !, apply(X,X1), !. do_norm_typed_expr(X,boolean,X1) :- !, do_norm_expr(X,X1),!. do_norm_typed_expr(E,_,E) :- atomic(E), !. do_norm_typed_expr(E1,_,E2) :- (\+ atomic(E1)), E1=..[F|Args1], checktypes(Args1,Types), do_norm_typed_exprs(Args1,Types,Args2), EE=..[F|Args2], ( simplify(EE, E2) ; E2=EE ), !. %------------------------------------------------------------------------------- % Deal with list of objects and list of their types. do_norm_typed_exprs([],[],[]) :- !. do_norm_typed_exprs([A|A1],[T|T1],[B|B1]) :- do_norm_typed_expr(A,T,B), do_norm_typed_exprs(A1,T1,B1), !. %############################################################################### % FUNCTIONS % ========= % DEFINED_FUNCT succeeds if 'Structure' has the same 'Name' % and 'Arity' as a function defined in the database. The % corresponding 'Arg_types' and 'Result_type' are noted. %############################################################################### defined_funct(Structure,Arg_types,Result_type) :- functor(Structure,Name,Arity), ( function(Name,Arg_types,Result_type) ; built_in(Name,Arg_types,Result_type) ), length(Arg_types,Arity), !. %------------------------------------------------------------------------------- % If the arguments of a function are of type 'integer' % or 'boolean' they may be standardised. % Note care in filtering errors when reforming function. simp_funct(X,Arg_types,Y):- X=..[Name|Arg_list], % simplify arguments. simp_args(Arg_list,Arg_types,Args1), Y=..[Name|Args1], !. %------------------------------------------------------------------------------- % SIMP_ARGS(A,L,A1) : A is a list of arguments, % L is a list of types, % A1 becomes a list of simplified arguments simp_args([X],[Type],[X1]) :- is_a_valid_type(Type), !, norm_typed_expr(X,Type,X1), !. simp_args([X],[_],[X1]) :- checktype(X, Type), !, norm_typed_expr(X,Type,X1), !. simp_args([X|Y],[Type|T],[X1|Y1]) :- is_a_valid_type(Type), !, norm_typed_expr(X,Type,X1), !, simp_args(Y,T,Y1), !. simp_args([X|Y],[_|T],[X1|Y1]) :- checktype(X, Type), !, norm_typed_expr(X,Type,X1), !, simp_args(Y,T,Y1), !. %------------------------ END of functions --------------------------- %############################################################################### % *** S T A N D A R D F O R M S *** % ===================================== % Puts arithmetic expressions in standard form. %############################################################################### % Integer expression standardisation - main predicates apply(A,L) :- cv(A,B), sp(B,C), leftint(C,D), oneint(D,E), createlist(E,F), sortlist(F,G), busort_prods(G,H), compress(H,I), nozeros(I,J), form_expr(J,K), tidy(K,L). % To produce standard form with first product_term positive % and sign outside. eg. + or -(a - b + etc.) standard(A,M) :- cv(A,B), sp(B,C), leftint(C,D), oneint(D,E), createlist(E,F), sortlist(F,G), busort_prods(G,H), compress(H,I), nozeros(I,J), form_expr(J,K), sign(K,L), tidy(L,M). %------------------------------------------------------------------------------- % 'Types' % S_ATOMIC(X) - succeeds if X is an atom or a signed integer s_atomic(X) :- ( atom(X) ; (integer(X), X>=0) ), !. s_atomic(-X) :- integer(X), X>=0, !. %------------------------------------------------------------------------------- % S_INTEGER(X) - succeeds if X is a signed integer s_integer(X) :- integer(X), X>=0, !. s_integer(-X) :- integer(X), X>=0, !. %------------------------------------------------------------------------------- % TERM(X) - define a 'Term' term(update(_,_,_)) :- !. term(element(_,_)) :- !. term(_X div _Y) :- !. term(_X / _Y) :- !. term(_X mod _Y) :- !. term(_X rem _Y) :- !. term(odd(_X)) :- !. term(abs(_X)) :- !. term(sqr(_X)) :- !. term(first(_X)) :- !. term(last(_X)) :- !. term(nonfirst(_X)) :- !. term(nonlast(_X)) :- !. term(length(_X)) :- !. term(_X @ __Y) :- !. term(pred(_X)) :- !. term(last(_X)) :- !. term(_X \/ _Y) :- !. term(_X /\ _Y) :- !. term(_X \ _Y) :- !. term(_X in _Y) :- !. term(_X not_in _Y) :- !. term(_X subset_of _Y) :- !. term(_X strict_subset_of _Y) :- !. term(set _X) :- !. term([_X|_Y]) :- !. term(X) :- s_atomic(X), !. term(X) :- record_function(_, X, _, _, _, _), !. term(X) :- function_template(X, _, _), !. term(X) :- X=..[N|_], function(N,_,_), !. %------------------------------------------------------------------------------- % PRODUCT(X) - define a 'Product' product(X*Y) :- !, product(X), product(Y), !. product(X) :- term(X), !. %------------------------------------------------------------------------------- % STAGE 1 */ % Multiplies out expression to give sum of products form % Simplifies all occurrences of X div Y & X mod Y & % functions. Also checks for illegal expressions cv(X,_) :- var(X), !, fail. cv(X*Y,A) :- cv(X,X1), cv(Y,Y1), multiply_out(X1,Y1,A), !. cv(X+Y,X1+Y1) :- cv(X,X1), cv(Y,Y1), !. cv(X-Y,X1+Y1) :- cv(X,X1), cv(Y*(-(1)),Y1), !. cv(X div Y,A) :- standard(X,X1), standard(Y,Y1), simp_num(X1 div Y1,A), !. cv(X / Y,A) :- standard(X,X1), standard(Y,Y1), simp_num(X1 / Y1,A), !. cv(X mod Y,A) :- standard(X,X1), standard(Y,Y1), simp_num(X1 mod Y1,A), !. cv(X rem Y,A) :- standard(X,X1), standard(Y,Y1), simp_num(X1 rem Y1,A), !. cv(element(A,X),Y) :- checktype(element(A,X),T), !, do_norm_typed_expr(element(A,X),T,Y), !. cv(X,X) :- s_atomic(X), !. cv(INT,-NEGINT) :- integer(INT), INT<0, NEGINT is -INT, !. cv(-X,Y) :- cv(X*(-(1)),Y), !. cv(X,Y) :- defined_funct(X,Arg_types,_), simp_funct(X,Arg_types,Y), !. cv(X,X). %------------------------------------------------------------------------------- % Multiply out two expressions to form a sum of products % Note the input expressions are in sum of products form multiply_out(X1+X2,Y1+Y2,A1+A2+A3+A4):- multiply_out(X1,Y1,A1), multiply_out(X1,Y2,A2), multiply_out(X2,Y1,A3), multiply_out(X2,Y2,A4), !. multiply_out(X,Y1+Y2,A1+A2) :- product(X), multiply_out(X,Y1,A1), multiply_out(X,Y2,A2), !. multiply_out(X1+X2,Y,A1+A2) :- product(Y), multiply_out(X1,Y,A1), multiply_out(X2,Y,A2), !. multiply_out(X,Y,X*Y) :- product(X), product(Y), !. %------------------------------------------------------------------------------- % SIMP_NUM(X div Y,Z) - simplify term if possible; X & Y are in std form simp_num(X div Y,Z) :- s_integer(X), s_integer(Y), Z iss X div Y, !. simp_num(_X div 0,_) :- !, fail. simp_num(X div 1,Y):- simp_num(X,Y), !. simp_num((X div Y) div Z,B) :- standard(Y*Z,A), simp_num(X div A,B), !. simp_num((-X) div (-Y),Z) :- simp_num(X div Y,Z), !. simp_num((-X) div Y,A*(-(1))) :- simp_num(X div Y,A), !. simp_num(X div (-Y),A*(-(1))) :- simp_num(X div Y,A), !. simp_num(X / Y,Z) :- s_integer(X), s_integer(Y), Z iss X / Y, !. simp_num(_X / 0,_) :- !, fail. simp_num(X / 1,Y):- simp_num(X,Y), !. simp_num((X / Y) / Z,B) :- standard(Y*Z,A), simp_num(X / A,B), !. simp_num((-X) / (-Y),Z) :- simp_num(X / Y,Z), !. simp_num((-X) / Y,A*(-(1))) :- simp_num(X / Y,A), !. simp_num(X / (-Y),A*(-(1))) :- simp_num(X / Y,A), !. simp_num(X,X) :- !. %------------------------------------------------------------------------------- % STAGE 2 % SP(OLD,NEW) - remove redundant brackets sp(X*(Y*Z),A) :- sp(X*Y*Z,A), !. sp(X*Y,Z*Y) :- term(Y), sp(X,Z), !. sp(X+(Y+Z),A) :- sp(X+Y+Z,A), !. sp(X+Y,X1+Y1) :- product(Y), sp(Y,Y1), sp(X,X1), !. sp(X,X) :- term(X), !. %------------------------------------------------------------------------------- % STAGE 3 % LEFTINT(OLD,NEW) - for each product move all integers to the left leftint(X*Y,A) :- s_integer(Y), !, ( term(X), A=Y*X ; leftint(X,B), sp(Y*B,A) ), !. leftint(X*Y,Z*Y) :- % implicit: (\+ s_integer(Y)) leftint(X,Z), !. leftint(X+Y,X1+Y1) :- leftint(X,X1), leftint(Y,Y1), !. leftint(X,X) :- term(X), !. %------------------------------------------------------------------------------- % STAGE 4 % ONEINT(OLD,NEW) - evaluate integer part of product oneint(X*Y,A) :- s_integer(Y), A iss X*Y, !. oneint(X*Nonint,Z*Nonint) :- oneint(X,Z), !. oneint(X+Y,X1+Y1) :- oneint(X,X1), oneint(Y,Y1), !. oneint(X,X) :- term(X), !. %------------------------------------------------------------------------------- % STAGE 5 % CREATELIST(OLD,NEW) - form a list of product terms createlist(X+Y,[Y|Z]) :- createlist(X,Z), !. createlist(X,[X]) :- product(X), !. %------------------------------------------------------------------------------- % STAGE 6 % SORTLIST(OLD,NEW) - for each product, order the list of terms sortlist([X1|Y1],[X2|Y2]) :- sortprod(X1,X2), sortlist(Y1,Y2), !. sortlist([],[]) :- !. %------------------------------------------------------------------------------- % SORTPROD(OLD,NEW) - sort a product term sortprod(X,Y) :- % Put in list form*/ list_terms(X,Z), % Bubble sort the terms*/ busort_terms(Z,W), % Reform product list_terms(Y,W), !. %------------------------------------------------------------------------------- % LIST_TERMS(X,XLIST) - form a list of terms from a product term list_terms(X*Y,[Y|Z]) :- list_terms(X,Z), !. list_terms(X,[X]) :- term(X), !. %------------------------------------------------------------------------------- % BUSORT_TERMS(OLD,NEW) - bubblesort the list of terms busort_terms(L,S) :- gen_append(X,[A,B|Y],L), order_terms(B,A), gen_append(X,[B,A|Y],M), busort_terms(M,S), !. busort_terms(L,L) :- !. %------------------------------------------------------------------------------- % ORDER_TERMS(OLD,NEW) - succeeds if arguments are in required order % ORDER 'div' EXPRNS order_terms(A div B,C div B) :- !, order_exprs(A,C), !. order_terms(_A div B,_C div D) :- !, order_exprs(B,D), !. order_terms(_A div _B,_C mod _D) :- !. order_terms(_A div _B,_C rem _D) :- !. % ORDER '/' EXPRNS order_terms(A / B,C / B) :- !, order_exprs(A,C), !. order_terms(_A / B,_C / D) :- !, order_exprs(B,D), !. order_terms(_A / _B,_C mod _D) :- !. order_terms(_A / _B,_C rem _D) :- !. % ORDER rem EXPRNS order_terms(_A rem _B,_C div _D) :- !, fail. % ORDER rem EXPRNS order_terms(_A rem _B,_C / _D) :- !, fail. order_terms(A rem B,C rem B) :- !, order_exprs(A,C), !. order_terms(_A rem B,_C rem D) :- !, order_exprs(B,D), !. % ORDER mod EXPRNS order_terms(_A mod _B,_C div _D) :- !, fail. % ORDER mod EXPRNS order_terms(_A mod _B,_C / _D) :- !, fail. order_terms(A mod B,C mod B) :- !, order_exprs(A,C), !. order_terms(_A mod B,_C mod D) :- !, order_exprs(B,D), !. order_terms(A,B) :- s_atomic(A), !, ( s_atomic(B), !, less(A,B) ; true ), !. order_terms(_A,B) :- % implicit: (\+ s_atomic(A)) s_atomic(B), !, fail. order_terms(A,B) :- ( defined_funct(A,_,_) ; A = element(_,_) ; A = update(_,_,_) ), \+ ( defined_funct(B,_,_) ; B = element(_,_) ; B = update(_,_,_) ), !. order_terms(A,B) :- !, % put functions in list form A=..Function1, B=..Function2, order_functs(Function1,Function2), !. %------------------------------------------------------------------------------- % less(A,B) succeeds if the s_atomic A is ordered before B % - integers first, then atoms in alphabetic order - % Note : if A=B , the predicate fails. less(Y,X) :- s_integer(Y), !, \+ s_integer(X). less(_Y,X) :- s_integer(X), !, fail. less(Y,X) :- Y @< X, !. %------------------------------------------------------------------------------- % ORDER_EXPRS(A,B) - succeeds if expressions A & B are in right order order_exprs(X,Y) :- listexp(X,X1), % write X as a list of products listexp(Y,Y1), % write Y as a list of products orderlist(X1,Y1), % order by the products in the lists !. %------------------------------------------------------------------------------- % LISTEXP(E,LP) - write expression E as a list LP of products listexp(A,I) :- cv(A,B), sp(B,C), leftint(C,D), oneint(D,E), createlist(E,F), sortlist(F,G), busort_prods(G,H), compress(H,I), !. %------------------------------------------------------------------------------- % ORDERLIST(A,B) - order expressions by the products in them orderlist(_,[]) :- !, fail. orderlist([],_) :- !. orderlist([H|T1],[H|T2]) :- !, orderlist(T1,T2), !. orderlist([H1|_T1],[H2|_T2]) :- list_terms(H1,L1), % implicit: H1\=H2 list_terms(H2,L2), !, order(L1,L2), !. %------------------------------------------------------------------------------- % ORDER_FUNCTS - Functions are ordered first by the predicate name % then by the arguments. Here the functions are in % list form : predicate followed by arguments. order_functs([Name|Arg_list1],[Name|Arg_list2]) :- !, orderargs(Arg_list1,Arg_list2), !. order_functs([Name1|_],[Name2|_]) :- less(Name1,Name2), !. %------------------------------------------------------------------------------- % Equal functions are ordered by the arguments they contain. % Only expressions of type 'integer' can be compared, orderargs([Arg|A],[Arg|B]) :- !, orderargs(A,B), !. orderargs([Arg1|_],[Arg2|_]) :- checktype(Arg1, integer), checktype(Arg2, integer), !, order_exprs(Arg1,Arg2), !. orderargs([Arg1|_Rest1], [Arg2|_Rest2]) :- !, Arg1 @< Arg2. %------------------------------------------------------------------------------- % STAGE 7 % BUSORT_PRODS(OLD,NEW) - order the list of products itself busort_prods(L,S) :- gen_append(X,[A,B|Y],L), order_prods(B,A), gen_append(X,[B,A|Y],M), busort_prods(M,S), !. busort_prods(L,L) :- !. %------------------------------------------------------------------------------- % ORDER_PRODS(A,B) - succeeds if product terms A & B are in right order order_prods(A,B) :- essence(A,A1,_), list_terms(A1,L1), essence(B,B1,_), list_terms(B1,L2), order(L1,L2), !. %------------------------------------------------------------------------------- % ORDER(OLD,NEW) - lists of terms are ordered by the terms they contain order(_,[]) :- !, fail. order(_,[[]]) :- !, fail. order([],_) :- !. order([[]],_) :- !. order([H|T1],[H|T2]) :- !, order(T1,T2), !. order([I1|_T1],[I2|_T2]) :- s_integer(I1), s_integer(I2), !, I1 < I2. order([H1|_T1],[H2|_T2]) :- order_terms(H1,H2), !. %------------------------------------------------------------------------------- % STAGE 8 % COMPRESS(OLD,NEW) - add integers together & add similar product terms compress([X,Y|Z],R) :- s_integer(X), s_integer(Y), T iss X+Y, compress([T|Z],R), !. compress([X,Y|Z],R) :- essence(X,A,M), essence(Y,A,N), Sum iss M+N, compress([A*Sum|Z],R), !. compress([X,Y|Z],R) :- compress([Y|Z],S), compress([X],[C]), R= [C|S], !. compress([X*1],[X]) :- !. compress([X],[X]) :- !. compress([],[]) :- !. %------------------------------------------------------------------------------- % ESSENCE(XI,X,I) - gives integer part I & remainder X of expression essence(X*Y,X,Y) :- s_integer(Y), !. essence(X*Y,X*Y,1) :- % implicit: (\+ s_integer(Y)) term(Y), !. essence(X,[],X) :- s_integer(X), !. essence(X,X,1) :- % implicit: (\+ s_integer(X)) term(X), !. %------------------------------------------------------------------------------- % STAGE 9 % NOZEROS - Remove all zero product terms % Note: (a div b)*0 can NOT be removed as b may be zero nozeros([X*Y*A|T],Z) :- zero(A), s_atomic(Y), nozeros([X*A|T],Z), !. nozeros([X*A|T],Z) :- zero(A), s_atomic(X), nozeros(T,Z), !. nozeros([A|T],Z) :- zero(A), !, nozeros(T,Z), !. nozeros([X|T],[X|Z]) :- % implicit: (\+ zero(X)) nozeros(T,Z), !. nozeros([],[]) :- !. %------------------------------------------------------------------------------- % ZERO(X) - tests to see if X is zero, i.e. either "0" or "-0" zero(0). zero(-(0)). %------------------------------------------------------------------------------- % STAGE 10 % FORM_EXPR(L,E) - reform expression E from list L form_expr([X],Y) :- reorder(X,Y), !. form_expr([H|T],X+Y) :- reorder(H,X), s_integer(X), !, form_expr(T,Y), !. form_expr([H|T],Z) :- reorder(H,X), form_expr(T,Y), sp1(X+Y,Z), !. form_expr([],0) :- !. %------------------------------------------------------------------------------- % REORDER(X,Y) - rewrite product X the right way round to get Y reorder(X*1,Y) :- reorder(X,Y), !. reorder(X*(-Y),-Z) :- integer(Y), reorder(X*Y,Z), !. reorder(X*Y,Z) :- reorder(X,A), sp(Y*A,Z), !. reorder(X,X) :- term(X), !. %------------------------------------------------------------------------------- % SP1(X,Y) - use associativity to get rid of brackets in X giving Y sp1(X+(-Y),X1+(-Y)) :- product(Y), sp1(X,X1), !. sp1(X+Y,X1+Y) :- product(Y), sp1(X,X1), !. sp1(X+(Y+Z),A) :- sp1(X+Y+Z,A), !. sp1(-X,-X) :- product(X). sp1(X,X) :- product(X). %------------------------------------------------------------------------------- % SIGN - If the sign of the leftmost product term is minus then % change the sign of every product term and enclose the % whole expression with the unary minus operator. sign(X+Y,-(X1+Y1)) :- sign(X,-X1), changesign(Y,Y1), !. sign(X+Y,X+Y):- !. sign(X,X) :- product(X),!. sign(-X,-X) :- product(X),!. %------------------------------------------------------------------------------- % CHANGESIGN(X,MINUSX) - change the sign of X to get MINUSX changesign(-X,X) :- !. changesign(X,-X) :- !. %------------------------------------------------------------------------------- % TIDY - converts +(-product_term) to -product_term. tidy(X+(-Y),Z-Y1) :- tidy(X,Z), !, tidy(Y, Y1), !. tidy(X+Y,Z+Y1) :- tidy(X,Z), !, tidy(Y, Y1), !. tidy(-X,-Y) :- tidy(X,Y), !. tidy(X,X) :- product(X), !. % ------- END of integer expression standardiser ------------- %############################################################################### % STANDARDISE BOOLEAN EXPRESSIONS %############################################################################### % Reduce relational exprs. in boolean exprs. to standard form, % and simplify exprs. such as 'true or false' to 'true'. % also rewrite exprs such as % 'a and (b and c)' to 'a and b and c'. % Standardise the arguments of quantifiers & boolean functions. do_norm_expr(E,_):- var(E), nl, write('<<< ERROR: illegal variable >>>'), nl, !, fail. do_norm_expr(true,true):- !. do_norm_expr(false,false):- !. do_norm_expr(E,E) :- % boolean variable atomic(E), !. % Write all integer relational expressions in the form: % expr = 0 ; expr <> 0 ; expr > 0. do_norm_expr(A=B,N) :- checktype(A,TYPE), ( TYPE=integer ; TYPE=real ), !, % just in case standard(A-B,E), % write +;- N[A-B] => E , % where 1st term of N[A-B] is without - simp_rel(E=0,N), !. % simplify where possible do_norm_expr(A=B,Z) :- checktype(A,T), do_norm_typed_expr(A,T,X), do_norm_typed_expr(B,T,Y), ( (X=Y, Z=true) ; Z=(X=Y) ), !. do_norm_expr(A<>B,N):- checktype(A,TYPE), ( TYPE=integer ; TYPE=real ), !, % just in case standard(A-B,E), % write in form +;- N[A-B] % where 1st term of N[A-B] is without - simp_rel(E<>0,N), !. % simplify where possible do_norm_expr(A<>B,Z) :- checktype(A,T), do_norm_typed_expr(A,T,X), do_norm_typed_expr(B,T,Y), ( (X=Y, Z=false) ; Z=(X<>Y) ), !. do_norm_expr(A>=B,N):- checktype(A,integer), checktype(B,integer), !, apply(A-B+1,E), % write N[A-B+1] as E simp_rel(E>0,N), !. % and simplify E>0. do_norm_expr(A>=B,N) :- do_norm_expr(B<=A,N), !. do_norm_expr(A<=B,N):- checktype(A,integer), checktype(B,integer), !, apply(B-A+1,E), % write N[B-A+1] as E simp_rel(E>0,N), !. % and simplify E>0. do_norm_expr(A<=B,N):- checktype(A,real), checktype(B,real), !, apply(B-A,E), % write N[A-B+1] as E simp_rel(E>=0,N), !. % and simplify E>0. do_norm_expr(A<=B,N) :- checktype(A,T), do_norm_typed_expr(A,T,X), do_norm_typed_expr(B,T,Y), ( X=Y, N=true ; N=(Y>=X) ), !. do_norm_expr(A0,N), !. % and simplify E>0 do_norm_expr(AX) ), !. do_norm_expr(A>B,N) :- do_norm_expr(BB,N), !. do_norm_expr(not(A<>B),N) :- do_norm_expr(A=B,N), !. do_norm_expr(not(A=B,N), !. do_norm_expr(not(A>B),N) :- do_norm_expr(A<=B,N), !. do_norm_expr(not(A<=B),N) :- do_norm_expr(A>B,N), !. do_norm_expr(not(A>=B),N) :- do_norm_expr(A B,A1 -> B1):- do_norm_expr(A,A1), do_norm_expr(B,B1), !. do_norm_expr(A <-> B,A1 <-> B1) :- do_norm_expr(A,A1), do_norm_expr(B,B1), !. % Quantifiers: reduce boolean expressions to normal form % and simplify whole expression where possible, % e.g. 'for_all(x:integer,true)' is equivalent to 'true' do_norm_expr(for_all(X:T, Exp), for_all(X:T, E1)) :- find_core_type(T, CT), !, ( var_const(X, CT, _), !, do_norm_expr(Exp, E1) ; asserta(var_const(X, CT, p)), do_norm_expr(Exp, E1), retract(var_const(X, CT, p)) ; retract(var_const(X, CT, p)), fail ), !. do_norm_expr(for_some(X:T, Exp),for_some(X:T, E1)) :- find_core_type(T, CT), !, ( var_const(X, CT, _), !, do_norm_expr(Exp, E1) ; asserta(var_const(X, CT, p)), do_norm_expr(Exp, E1), retract(var_const(X, CT, p)) ; retract(var_const(X, CT, p)), fail ), !. do_norm_expr(update(A,I,X),Y) :- checktype(A,T), !, do_norm_typed_expr(update(A,I,X),T,Y), !. do_norm_expr(A,B) :- defined_funct(A,Arg_types,boolean), simp_funct(A,Arg_types,B), !. %------------------------------------------------------------------------------- % SIMP_REL(E=0,S) - simplify equality expressions where possible simp_rel(0=0,true) :- !. simp_rel(A=0,false) :- s_integer(A), \+ (0 is A), !. simp_rel(-A=0,X) :- simp_rel(A=0,X), !. simp_rel(A=0,X=Y) :- % catch all esplint(A, X, Y), !. % SIMP_REL(E<>0,S) - simplify inequality expressions where possible simp_rel(0<>0,false) :- !. simp_rel(A<>0,true) :- s_integer(A), \+ (0 is A), !. simp_rel(-A<>0,X) :- simp_rel(A<>0,X), !. simp_rel(A<>0,X<>Y) :- % catch all esplint(A, X, Y), !. % SIMP_REL(E>0,S) - simplify greater-than expressions where possible simp_rel(-A>0,false):- integer(A), A>=0, !. simp_rel(0>0,false) :- !. simp_rel(A>0,true) :- integer(A), A\=0, !. simp_rel(A>0,X>Y) :- % catch all splint(A, X, Y), !. % SIMP_REL(E<=0,S) - simplify >= expressions (reals) where possible simp_rel(0>=0,true) :- !. simp_rel(A>=0,TRUTH) :- intexp(A), VAL iss A, ( VAL >= 0, TRUTH = true ; VAL < 0, TRUTH = false ), !. simp_rel(A>=0,X>=Y) :- % catch all splint(A, X, Y), !. %------------------------------------------------------------------------------- esplint(I+X, X, II) :- s_integer(I), II iss -I, !. esplint(I-X, X, I) :- s_integer(I), !. esplint(X, X, 0) :- !. splint(I+X, X, II) :- s_integer(I), II iss -I, !. splint(I-X, -X, II) :- s_integer(I), II iss -I, !. splint(X, X, 0) :- !. %------------------------------------------------------------------------------- is_a_valid_type(integer). is_a_valid_type(boolean). is_a_valid_type(real). is_a_valid_type(X) :- type(X, _). %------------------------------------------------------------------------------- built_in((+), [integer, integer], integer). built_in((-), [integer, integer], integer). built_in((*), [integer, integer], integer). built_in((div), [integer, integer], integer). built_in((/), [integer, integer], integer). built_in((mod), [integer, integer], integer). built_in((rem), [integer, integer], integer). built_in((-), [integer], integer). built_in((/), [real, real], real ). built_in(abs, [integer], integer). built_in(sqr, [integer], integer). built_in((=), ['ANY', 'ANY'], boolean). built_in((<>), ['ANY', 'ANY'], boolean). built_in((>), ['ANY', 'ANY'], boolean). built_in((<), ['ANY', 'ANY'], boolean). built_in((>=), ['ANY', 'ANY'], boolean). built_in((<=), ['ANY', 'ANY'], boolean). built_in((not), [boolean], boolean). built_in((and), [boolean, boolean], boolean). built_in((or), [boolean, boolean], boolean). built_in((->), [boolean, boolean], boolean). built_in((<->), [boolean, boolean], boolean). built_in(odd, [integer], boolean). built_in(pred, ['ANY'], 'ANY' ). built_in(succ, ['ANY'], 'ANY' ). built_in(length, ['ANY'], integer). built_in(first, ['ANY'], 'ANY' ). built_in(last, ['ANY'], 'ANY' ). built_in(nonfirst, ['ANY'], 'ANY' ). built_in(nonlast, ['ANY'], 'ANY' ). built_in((@), ['ANY', 'ANY'], 'ANY' ). built_in((\/), ['ANY', 'ANY'], 'ANY' ). built_in((/\), ['ANY', 'ANY'], 'ANY' ). built_in((/), ['ANY', 'ANY'], 'ANY' ). built_in((in), ['ANY', 'ANY'], boolean). built_in((not_in), ['ANY', 'ANY'], boolean). built_in((subset_of), ['ANY', 'ANY'], boolean). built_in((strict_subset_of), ['ANY', 'ANY'], boolean). built_in('.', ['ANY', 'ANY'], 'ANY' ). %------------------------------------------------------------------------------- save_term_breakdown(NEW, _) :- is_relational_expression(NEW, _Rop, X, _, _), know_term_breakdown(X, _), !. save_term_breakdown(NEW, _) :- is_relational_expression(NEW, _Rop, X, _, T), ( T = integer ; T = real ), !, make_breakdown_list(X, LIST), assertz(know_term_breakdown(X, LIST)), !. save_term_breakdown(NEW, _) :- know_term_breakdown(NEW, _), !. save_term_breakdown(NEW, T) :- ( T = integer ; T = real ), !, make_breakdown_list(NEW, LIST), assertz(know_term_breakdown(NEW, LIST)), !. save_term_breakdown(_, _) :- % otherwise !. %------------------------------------------------------------------------------- make_breakdown_list(X=Y, Z=Y) :- !, make_breakdown_list(X, Z). make_breakdown_list(X<>Y, Z<>Y) :- !, make_breakdown_list(X, Z). make_breakdown_list(X<=Y, Z<=Y) :- !, make_breakdown_list(X, Z). make_breakdown_list(X>=Y, Z>=Y) :- !, make_breakdown_list(X, Z). make_breakdown_list(XY, Z>Y) :- !, make_breakdown_list(X, Z). make_breakdown_list(X+Y, [[T,I]|REST]) :- breakdown_term(Y, T, I), !, make_breakdown_list(X, REST). make_breakdown_list(X-Y, [[T,I]|REST]) :- breakdown_term(-Y, T, I), !, make_breakdown_list(X, REST). make_breakdown_list(X, [[T,I]]) :- breakdown_term(X, T, I), !. %------------------------------------------------------------------------------- breakdown_term(-X, T, I) :- breakdown_term(X, T, II), I iss -II, !. breakdown_term(X*Y, Y, X) :- s_integer(X), !. breakdown_term(X*Y, T*Y, I) :- breakdown_term(X, T, I), !. breakdown_term(X, X, 1) :- !. %------------------------------------------------------------------------------- % Here, calculated normalised forms are cached, to avoid their % recalculation. save_norm_expr(OLD, NEW) :- assertz(know_norm_expr(OLD, NEW)), !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/zombiescope.windows.manifest0000755000175000017500000000056611753202337022357 0ustar eugeneugen spark-2012.0.deb/simplifier/deduction.pro0000644000175000017500000115450711753202337017322 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Facilities to prove that a relational expression is true, given the % current hypotheses of the VC. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % int(+Exp). %------------------------------------------------------------------------------- % Integer test. %=============================================================================== int(X) :- (integer(X) ; (X=(-Y), integer(Y))), !. %=============================================================================== % testused(+Exp). %------------------------------------------------------------------------------- % Simple inference. %=============================================================================== testused(X=Y) :- ( used(X=Y) ; used(Y=X) ), !, fail. testused(X=Y) :- advance_used_facts(X=Y). testused(X=Y) :- retreat_used_facts(X=Y). testused(X>=Y) :- used(X>=Y), !, fail. testused(X>=Y) :- advance_used_facts(X>=Y). testused(X>=Y) :- retreat_used_facts(X>=Y). testused(X>Y) :- used(X>Y), !, fail. testused(X>Y) :- advance_used_facts(X>Y). testused(X>Y) :- retreat_used_facts(X>Y). %------------------------------------------------------------------------------- advance_used_facts(X=Y) :- (\+ X=Y), (\+ used(X=Y)), (\+ used(Y=X)), assertz(used(X=Y)), !. advance_used_facts(X>=Y) :- (\+ used(X>=Y)), assertz(used(X>=Y)), !. advance_used_facts(X>Y) :- (\+ used(X>Y)), assertz(used(X>Y)), !. %------------------------------------------------------------------------------- retreat_used_facts(X=Y) :- retract(used(X=Y)), !, fail. retreat_used_facts(X=Y) :- retract(used(Y=X)), !, fail. retreat_used_facts(X>=Y) :- retract(used(X>=Y)), !, fail. retreat_used_facts(X>Y) :- retract(used(X>Y)), !, fail. %=============================================================================== % fact(?Hyp_Term, ?HypId_List) %------------------------------------------------------------------------------- % Hyp_Term is a hypothesis or is equivalent to a hypothesis. %=============================================================================== fact(Hyp_Term, [HypId_Int]) :- get_hyp(Hyp_Term, x, HypId_Int). fact(X=Y, [HypId_Int]) :- ( get_hyp(not X<>Y, x, HypId_Int) ; get_hyp(Y=X, x, HypId_Int) ; get_hyp(not Y<>X, x, HypId_Int) ). fact(X<>Y, [HypId_Int]) :- ( get_hyp(not X=Y, x, HypId_Int) ; get_hyp(Y<>X, x, HypId_Int) ; get_hyp(not Y=X, x, HypId_Int) ). fact(X>Y, [HypId_Int]) :- ( get_hyp(not X<=Y, x, HypId_Int) ; get_hyp(Y=X, x, HypId_Int) ). fact(X=Y, x, HypId_Int) ; get_hyp(Y>X, x, HypId_Int) ; get_hyp(not Y<=X, x, HypId_Int) ). fact(X>=Y, [HypId_Int]) :- ( get_hyp(not XX, x, HypId_Int) ). fact(X<=Y, [HypId_Int]) :- ( get_hyp(not X>Y, x, HypId_Int) ; get_hyp(Y>=X, x, HypId_Int) ; get_hyp(not Y=Y,H1), fact(X<=Y,H2), append(H1, H2, Hs). standard_infrule(X<>Y,Hs) :- fact(X>Y,Hs) ; fact(XY,Hs) :- fact(X>=Y,H1), fact(X<>Y,H2), append(H1, H2, Hs). standard_infrule(XY,H2), append(H1, H2, Hs). standard_infrule(X>=Y,Hs) :- (fact(X=Y, Hs), testused(X=Y)) ; fact(X>Y,Hs). standard_infrule(X<=Y,Hs) :- (fact(X=Y,Hs), testused(X=Y)) ; fact(X=v holds given a hypothesis of % the form for_all(i:t, element(a,[i])>=v and element(a,[i])<=u) (plus % other variants involving range constraints on the left of implications % within the quantified formulae, use of record functions, etc.). In each % case, one side of the inequality (<= or >= only) must be either an array % access or a record access. extended_infrule(X<=Y, Hs) :- novars(X), ( X = element(_,_) ; record_function(_, X, access, _, _, _) ), ( get_forall_hyp(X <= Y, Conditions, N) ; get_forall_hyp(Y >= X, Conditions, N) ), novars(Y), testused(Y>=X), var_free(Conditions), decrement_inference_depth_remaining(infrule), ( do_infer_side_conditions(Conditions, HCL), Success = true ; Success = fail ), !, increment_inference_depth_remaining(infrule), call(Success), !, merge_sort([N], HCL, Hs). extended_infrule(X<=Y, Hs) :- novars(Y), ( Y = element(_,_) ; record_function(_, Y, access, _, _, _) ), ( get_forall_hyp(X <= Y, Conditions, N) ; get_forall_hyp(Y >= X, Conditions, N) ), novars(X), testused(Y>=X), var_free(Conditions), decrement_inference_depth_remaining(infrule), ( do_infer_side_conditions(Conditions, HCL), Success = true ; Success = fail ), !, increment_inference_depth_remaining(infrule), call(Success), !, merge_sort([N], HCL, Hs). extended_infrule(X>=Y, Hs) :- novars(X), ( X = element(_,_) ; record_function(_, X, access, _, _, _) ), ( get_forall_hyp(X >= Y, Conditions, N) ; get_forall_hyp(Y <= X, Conditions, N) ), novars(Y), testused(X>=Y), var_free(Conditions), decrement_inference_depth_remaining(infrule), ( do_infer_side_conditions(Conditions, HCL), Success = true ; Success = fail ), !, increment_inference_depth_remaining(infrule), call(Success), !, merge_sort([N], HCL, Hs). extended_infrule(X>=Y, Hs) :- novars(Y), ( Y = element(_,_) ; record_function(_, Y, access, _, _, _) ), ( get_forall_hyp(X >= Y, Conditions, N) ; get_forall_hyp(Y <= X, Conditions, N) ), novars(X), testused(X>=Y), var_free(Conditions), decrement_inference_depth_remaining(infrule), ( do_infer_side_conditions(Conditions, HCL), Success = true ; Success = fail ), !, increment_inference_depth_remaining(infrule), call(Success), !, merge_sort([N], HCL, Hs). %=============================================================================== % deduce(+R,+T,-Hs). %------------------------------------------------------------------------------- % Deduce relational expression R (whose lhs and rhs are of type T) from VC, % giving Hs instantiated to list of hypotheses used. %=============================================================================== deduce(true,_,[]). deduce(X,_,Hs) :- infrule(X,Hs). % Numeric upper bounds %================================================== % These rules cater for an integer bound N in the goal formula, and search % for corresponding facts (via infrule) which give an integer bound % sufficient to prove that N is a bound also -- e.g. if X<=K and K<=N, then % X<=N. % Less than or equal to %---------------------- deduce(X<=N, T, Hs) :- i_am_using_rule(le_trans_1), int(N), ( infrule(X < K, Hs), int(K), T=integer, % For integers: X<=M, where M=K-1 M=K-1 ; infrule(X <= K, Hs), testused(K >= X), int(K), M=K ), simplify(M<=N, true), !. deduce(N<=X, T, Hs) :- i_am_using_rule(le_trans_2), int(N), ( infrule(K < X, Hs), int(K), T=integer, % For integers: M<=X where M=K+1 M=K+1 ; infrule(K <= X, Hs), testused(X >= K), int(K), M=K ), simplify(N<=M, true), !. % Support reasoning of the form: % (X < K and K <= (N+1)) -> (X <= N) % Where N is an integer literal. This pattern can crop up when traversing % an array in SPARK using a "for" loop. deduce(X<=N, T, Hs) :- i_am_using_rule(le_trans_3), int(N), T=integer, M iss N+1, infrule(X (X <= N) % where N is an integer literal. % % This is really a generalisation of le_trans_3 where terms like (N + 1) + % 1 get evaluated earlier to N + 2, and so no longer match le_trans_3 % above. A full generalisation of this rule to handle: % X + C < K and % K <= N + C + 1 % -> % X <= N % where C = 0 (le_trans_3 above) % C = 1 (this rule) % C = 2, 3, 4 ... (could be considered in future) deduce(X<=N, T, Hs) :- i_am_using_rule(le_trans_3b), int(N), T=integer, M iss N+2, infrule(X+1 % C: X <= S(EXP) % % Where EXP is an expression and S is an operation (or series of operations) % on that expression. This situation occurs where reasoning about loop % iteration. For example, where reasoning about a for-loop, a variable I % will be incremented (or decremented) on each iteration, leading to loop % iteration VCs of the form: I<=EXP -> I <= EXP+1. % % (Such goals conform to the Rippling heuristic. However, this would require % a significant re-engineering of the Simplifier to implement. Instead the % specific case identified here is directly targeted with a hard coded % transitivity rule.) % % Essentially, the game is to exploit (fertilise with) hypothesis H, % enabling the conclusion to be simplified, in the hope that the simplified % conclusion is now more tractable. Transitivity enables 'C' to be broken % into two conjuncts. It can be arranged so that one of the conjuncts match % with 'H'. Hopefully, the second conjunct (the proof residue) should be % simpler. % Note that a single transitivity rule is hard coded into this strategy. % Other rules have the potential to be useful. However, as the examiner % pushes variables to the left and their constraints to the right (Var <= % Constraint), other transitive rules will cost the same, but have less % impact. deduce(I<=K, _T, Hs) :- i_am_using_rule(le_trans_4), % The checker gives us: % transitivity(10): K>=I may_be_deduced_from [ I<=J, J<=K ]. % Which can be expressed as: % ((I<=J) and (J<=K)) -> I<=K % So: % Find a hypothesis of the form: I<=J (making this conjunct trivially true) % Which will instantiate: J<=K (if this is true, can do the deduction) infrule((I<=J), Hs), %Choose to restrict the J to non-integers and non-atomic items. (As %these cases are handled quite well elsewhere.) \+ int(J), \+ atomic(J), % Must be able to prove the residue! Ideally, this should be undertaken % as a subgoal, enabling the reuse of other strategies and exploiting of % hypotheses. However, this is quite difficult to implement. Instead look % for the term being internally true (no hypotheses needed) through a % combination of normalisation and simplification. This has the capacity % to be effective on more simple problems. norm_typed_expr((J<=K), boolean, Normalised), simplify(Normalised, true), !. % Greater than or equal to %------------------------- deduce(N>=X, T, Hs) :- i_am_using_rule(ge_trans_1), int(N), ( infrule(X < K, Hs), int(K), T=integer, % For integers: X<=M, where M=K-1 M=K-1 ; infrule(X <= K, Hs), testused(K >= X), int(K), M=K ), simplify(M<=N, true), !. deduce(X>=N, T, Hs) :- i_am_using_rule(ge_trans_2), int(N), ( infrule(K < X, Hs), int(K), T=integer, % For integers: M<=X where M=K+1 M=K+1 ; infrule(K <= X, Hs), testused(X >= K), int(K), M=K ), simplify(N<=M, true), !. % Allow reasoning of the form: % (X > K and K >= (N-1)) -> (X >= N) % where N is an integer literal. This pattern can crop up when traversing % an array in SPARK using a "reverse for" loop. deduce(X>=N, T, Hs) :- i_am_using_rule(ge_trans_3), int(N), T=integer, M iss N-1, infrule(X>K, H1), infrule(K>=M, H2), !, merge_sort(H1, H2, Hs). % Less than %---------- deduce(X= X), int(K), simplify(K= K), int(K), simplify(NX, _, Hs) :- i_am_using_rule(gt_trans_1), int(N), ( infrule(X < K, Hs), int(K), simplify(K<=N, true) ; infrule(X <= K, Hs), testused(K >= X), int(K), simplify(KN, _, Hs) :- i_am_using_rule(gt_trans_2), int(N), ( infrule(K < X, Hs), int(K), simplify(N<=K, true) ; infrule(K <= X, Hs), testused(X >= K), int(K), simplify(NX-B,T,Hs) :- deduce(A<>B,T,Hs). deduce(X+A<>X+B,T,Hs) :- deduce(A<>B,T,Hs). % Alternative representations of N+X<>Y, where N is an integer. Each is % converted into N+X<>Y form and the code for X+Y=\Z on the next page used % to attempt the proof. The conditions used prevent infinite looping. deduce(X+N<>Y,T,Hs) :- int(N), (\+ int(X)), deduce(N+X<>Y,T,Hs). deduce(X-N<>Y,T,Hs) :- int(N), evaluate(-N,M), deduce(M+X<>Y,T,Hs). deduce(Y<>X-N,T,Hs) :- int(N), evaluate(-N,M), deduce(M+X<>Y,T,Hs). deduce(N-X<>Y,T,Hs) :- int(N), evaluate(-X,Z), deduce(N+Z<>Y,T,Hs). deduce(Y<>N+X,T,Hs) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X<>Y,T,Hs). deduce(Y<>X+N,T,Hs) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X<>Y,T,Hs). deduce(Y<>N-X,T,Hs) :- int(N), Y\=_+_, Y\=_-_, evaluate(-X,Z), deduce(N+Z<>Y,T,Hs). deduce(X+Y<>N,T,Hs) :- int(N), \+ int(X), \+ int(Y), evaluate(-X,Z), deduce(N+Z<>Y,T,Hs). deduce(X-Y<>N,T,Hs) :- int(N), \+ int(X), \+ int(Y), deduce(N+Y<>X,T,Hs). deduce(X+Y<>Z,T,Hs) :- i_am_using_rule(ineq_1), ( infrule(Y+X<>Z,Hs) ; infrule(Y<>Z-X,Hs) ; evaluate(-X,W), infrule(Y<>W+Z,Hs) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and =0, prove Y<>Z or fail */ ( deduce(Y<>Z,T,Hs) ; !, fail ) ; deduce(Y=Z,T,Hs) /* - and <>0, prove Y=Z */ ; int(Y), /* - and Y is an integer, sum */ ( /* them & prove sum=Z or fail */ evaluate(Y+X,W), deduce(Z<>W,T,Hs) ; !, fail ) ; int(Z), /* - and Z is an integer subtract */ ( /* & prove diff<>Y or fail */ evaluate(Z-X,W), deduce(Y<>W,T,Hs) ; !, fail ) ) ; (\+ int(Y)), /* If neither Y nor Z is an */ (\+ int(Z)), /* integer, try finding a */ ( /* substitution for Y and */ nonvar(Y), infrule(Y=W,H1), /* proving resulting inequality. */ testused(Y=W), deduce(X+W<>Z,T,H2), append(H1, H2, Hs) ; !, fail ) ). % Greater than %------------- deduce(X+Y>Z,T,Hs) :- i_am_using_rule(gt_1), ( infrule(Y+X>Z,Hs) ; infrule(Y>Z-X,Hs) ; evaluate(-X,W), infrule(Y>W+Z,Hs) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Y>Z or fail */ ( deduce(Y>Z,T,Hs) ; !, fail ) ; int(Y), /* - and Y is an integer, sum and */ ( /* prove sum>Z or fail */ evaluate(Y+X,W), deduce(ZW,T,Hs) ; !, fail ) ; nonvar(Y), /* - and Y & Z are the same, show */ nonvar(Z), /* X>0 or fail */ Y=Z, ( simplify(X>0,true), Hs=[] ; !, fail ) ; simplify(X>0,true), /* - and X>0, prove Y>=Z */ deduce(Y>=Z,T,Hs) ; T=integer, evaluate(X-1,W), /* - subtract 1 to give more */ ( /* equivalent forms to search */ infrule(W+Y>=Z,Hs) /* for which use the operator */ ; /* >= instead of >. */ infrule(Y+W>=Z,Hs) ; infrule(Y>=Z-W,Hs) ; evaluate(-W,V), infrule(Y>=V+Z,Hs) ) ) ; (\+ int(Y)), /* If neither Y nor Z are */ (\+ int(Z)), /* integers, try finding a */ ( /* substitution for Y and prove */ ( /* the resulting expression. Two */ nonvar(Y), infrule(Y>W,H1), /* >= followed by >. */ deduce(X+W>=Z,T,H2), append(H1, H2, Hs) ; !, fail ) ; ( nonvar(Y), infrule(Y>=W,H1), testused(Y>=W), deduce(X+W>Z,T,H2), append(H1, H2, Hs) ; !, fail ) ) ). % Alternate representations of N+X>Y, where N is an integer. All are % converted to a N+X>Y form and the above code used to attempt the proof. % The conditions before altering expressions are included to prevent % infinite loops. deduce(X+N>Y,T,Hs) :- int(N), (\+ int(X)), deduce(N+X>Y,T,Hs). deduce(X-N>Y,T,Hs) :- int(N), evaluate(-N,M), deduce(M+X>Y,T,Hs). deduce(YY,T,Hs). deduce(N-X>Y,T,Hs) :- int(N), evaluate(-X,Z), deduce(N+Z>Y,T,Hs). deduce(X+N>Y,T,Hs) :- i_am_using_rule(new_gt_1), int(N), simplify(N>0, true), deduce(Y<=X, T, Hs). deduce(N+X>Y,T,Hs) :- i_am_using_rule(new_gt_2), int(N), simplify(N>0, true), deduce(Y<=X, T, Hs). deduce(X>Y-N,T,Hs) :- i_am_using_rule(new_gt_3), int(N), simplify(N>0, true), deduce(Y<=X, T, Hs). deduce(X-N>Y,T,Hs) :- i_am_using_rule(new_gt_4), int(N), simplify(N<0, true), deduce(Y<=X, T, Hs). deduce(X>Y+N,T,Hs) :- i_am_using_rule(new_gt_5), int(N), simplify(N<0, true), deduce(Y<=X, T, Hs). deduce(X>N+Y,T,Hs) :- i_am_using_rule(new_gt_6), int(N), simplify(N<0, true), deduce(Y<=X, T, Hs). deduce(YY,T,Hs). deduce(YY,T,Hs). deduce(YY,T,Hs). deduce(X+YY,T,Hs). deduce(X-YX,T,Hs). % Less than %---------- deduce(X+YW,T,Hs) ; !, fail ) ; int(Z), /* - and Z is an integer, take X */ ( /* away & prove diff>Y or fail */ evaluate(Z-X,W), deduce(YX,true), Hs=[] ; !, fail ) ; simplify(0>X,true), /* - and X<0, prove Y<=Z */ deduce(Y<=Z,T,Hs) ; T=integer, evaluate(X+1,W), /* - add 1 to X to give more */ ( /* equivalent forms to search */ infrule(W+Y<=Z,Hs) /* for which use the operator */ ; /* <= instead of <. */ infrule(Y+W<=Z,Hs) ; infrule(Y<=Z-W,Hs) ; evaluate(-W,V), infrule(Y<=V+Z,Hs) ) ) ; (\+ int(Y)), /* If neither Y nor Z are */ (\+ int(Z)), /* integers, try finding a */ ( /* substitution for Y and prove */ ( /* the resulting expression. Two */ nonvar(Y), infrule(Y=Y), deduce(X+W>Z,T,H2), append(H1, H2, Hs) ; !, fail ) ) ). % Alternate representations of N+XX-N,T,Hs) :- int(N), evaluate(-N,M), deduce(M+X0, true), deduce(Y<=X, T, Hs). deduce(Y0, true), deduce(Y<=X, T, Hs). deduce(Y-N0, true), deduce(Y<=X, T, Hs). deduce(YN+X,T,Hs) :- int(N), Y\=_+_, Y\=_-_, deduce(N+XX+N,T,Hs) :- int(N), Y\=_+_, Y\=_-_, deduce(N+XN-X,T,Hs) :- int(N), Y\=_+_, Y\=_-_, evaluate(-X,Z), deduce(N+ZN,T,Hs) :- int(N), (\+ int(X)), (\+ int(Y)), evaluate(-X,Z), deduce(N+ZN,T,Hs) :- int(N), (\+ int(X)), (\+ int(Y)), deduce(N+Y=Z where Z is an integer literal - a common case for % Range_Check in SPARK. deduce(X+Y>=Z,_,Hs) :- i_am_using_rule(ge_1), checktype(X,integer), /* If X and Y are both integer typed... */ checktype(Y,integer), int(Z), /* ...and Z is an integer literal */ ( /* ... then find lower bounds on X and Y */ infrule(X > XLminus1, H1), int(XLminus1), XL=XLminus1+1 ; infrule(X >= XL, H1), int(XL) ), ( infrule(Y > YLminus1, H2), int(YLminus1), YL=YLminus1+1 ; infrule(Y >= YL, H2), int(YL) ), /* unnecessary cuts removed, ION, 25/03/04 */ evaluate(XL + YL, LB), simplify(LB >= Z, true), /* ...and compare those with Z */ !, append(H1, H2, Hs). % Special case for X+Y<=Z where Z is an integer literal - a common case for % Range_Check in SPARK. deduce(X+Y<=Z,_,Hs) :- i_am_using_rule(le_1), checktype(X,integer), /* If X and Y are both integer typed... */ checktype(Y,integer), int(Z), /* ...and Z is an integer literal */ ( /* ... then find upper bounds on X and Y */ infrule(X < XUplus1, H1), int(XUplus1), XU=XUplus1-1 ; infrule(X <= XU, H1), int(XU) ), ( infrule(Y < YUplus1, H2), int(YUplus1), YU=YUplus1-1 ; infrule(Y <= YU, H2), int(YU) ), /* unnecessary cuts removed, ION, 25/03/04 */ evaluate(XU + YU, UB), simplify(UB <= Z, true), /* ...and compare those with Z */ !, append(H1, H2, Hs). deduce(X+Y>=Z,T,Hs) :- i_am_using_rule(ge_2), ( infrule(Y+X>=Z,Hs) ; infrule(Y>=Z-X,Hs) ; infrule(X>=Z-Y,Hs) ; evaluate(-X,W), infrule(Y>=W+Z,Hs) ; int(X), /* If X is an integer literal */ ( simplify(X=0,true), /* and X=0, prove Y>=Z or fail */ ( deduce(Y>=Z,T,Hs) ; !, fail ) ; int(Y), /* - and Y is an integer literal , sum and */ ( /* prove sum>=Z or fail */ evaluate(Y+X,W), deduce(Z<=W,T,Hs) ; !, fail ) ; int(Z), /* - and Z is an integer, take */ ( /* away X & show diff<=Y or fail*/ evaluate(Z-X,W), deduce(Y>=W,T,Hs) ; !, fail ) ; nonvar(Y), nonvar(Z), /* - and Y & Z are the same, show */ Y=Z, /* X>=0 or fail */ ( simplify(X>=0,true), Hs=[] ; !, fail ) ; simplify(X>=0,true),deduce(Y>=Z,T,Hs) /* - and X>=0, prove Y>=Z */ ; T=integer, evaluate(X+1,W), /* - add 1 to X giving more */ ( /* equivalent forms using the */ infrule(W+Y>Z,Hs) /* operator > instead of >= */ ; infrule(Y+W>Z,Hs) ; infrule(Y>Z-W,Hs) ; evaluate(-W,V), infrule(Y>V+Z,Hs) ) ; ( infrule(W+Y>=Z,Hs) /* - and W+Y>=Z, */ ; infrule(Y+W>=Z,Hs) ; infrule(Y>=Z-W,Hs) ), int(W), /* and W is an integer literal, */ simplify(X>=W,true) /* and X>=W, then X+Y>=W+Y>=Z */ ) ; int(Z), ( infrule(X >= A, H1), /* If X>=A and */ int(A), simplify(Z-A, B), deduce(Y >= B, T, H2) /* Y>=Z-A, then X+Y>=Z */ ; infrule(Y >= B, H1), /* Otherwise, if Y>=B and */ int(B), simplify(Z-B, A), deduce(X >= A, T, H2) /* X>=Z-B, then X+Y>=Z */ ), append(H1, H2, Hs) ; (\+ int(Y)), (\+ int(Z)), /* If neither Y nor Z are */ ( /* integers, try finding a */ nonvar(Y), infrule(Y>=W,H1), testused(Y>=W), deduce(X+W>=Z,T,H2), append(H1, H2, Hs) ; !, fail ) ). % Alternative representations of N+X>=Y where N is an integer. All are % converted into N+X>=Y form and code above is used to attempt the proof. % Conditions before altering expressions are included to prevent infinite % looping. deduce(X+N>=Y,T,Hs) :- int(N), (\+ int(X)), deduce(N+X>=Y,T,Hs). deduce(X-N>=Y,T,Hs) :- int(N), evaluate(-N,M), deduce(M+X>=Y,T,Hs). deduce(Y<=X-N,T,Hs) :- int(N), evaluate(-N,M), deduce(M+X>=Y,T,Hs). deduce(N-X>=Y,T,Hs) :- int(N), evaluate(-X,Z), deduce(N+Z>=Y,T,Hs). deduce(N-X>=M,T,Hs) :- int(N), int(M), evaluate(N-M,L), deduce(X<=L,T,Hs). deduce(Y<=N+X,T,Hs) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X>=Y,T,Hs). deduce(Y<=X+N,T,Hs) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X>=Y,T,Hs). deduce(X+N>=Y,T,Hs) :- i_am_using_rule(new_ge_1), int(N), simplify(N>=0, true), deduce(Y<=X, T, Hs). deduce(N+X>=Y,T,Hs) :- i_am_using_rule(new_ge_2), int(N), simplify(N>=0, true), deduce(Y<=X, T, Hs). deduce(X>=Y-N,T,Hs) :- i_am_using_rule(new_ge_3), int(N), simplify(N>=0, true), deduce(Y<=X, T, Hs). deduce(X-N>=Y,T,Hs) :- i_am_using_rule(new_ge_4), int(N), simplify(N<=0, true), deduce(Y<=X, T, Hs). deduce(X>=Y+N,T,Hs) :- i_am_using_rule(new_ge_5), int(N), simplify(N<=0, true), deduce(Y<=X, T, Hs). deduce(X>=N+Y,T,Hs) :- i_am_using_rule(new_ge_6), int(N), simplify(N<=0, true), deduce(Y<=X, T, Hs). deduce(Y<=N-X,T,Hs) :- int(N), Y\=_+_, Y\=_-_, evaluate(-X,Z), deduce(N+Z>=Y,T,Hs). deduce(X+Y<=N,T,Hs) :- int(N), (\+ int(X)), (\+ int(Y)), evaluate(-X,Z), deduce(N+Z>=Y,T,Hs). deduce(X-Y<=N,T,Hs) :- int(N), (\+ int(X)), (\+ int(Y)), deduce(N+Y>=X,T,Hs). % Less-than-or-equals %-------------------- % Alternative representations of N+X>=Y where N is an integer. All are % converted into N+X>=Y form and code above is used to attempt the proof. % Conditions before altering expressions are included to prevent infinite % looping. deduce(X+N<=Y,T,Hs) :- int(N), (\+ int(X)), deduce(N+X<=Y,T,Hs). deduce(X-N<=Y,T,Hs) :- int(N), evaluate(-N,M), deduce(M+X<=Y,T,Hs). deduce(Y>=X-N,T,Hs) :- int(N), evaluate(-N,M), deduce(M+X<=Y,T,Hs). deduce(N-X<=Y,T,Hs) :- int(N), evaluate(-X,Z), deduce(N+Z<=Y,T,Hs). deduce(Y<=X+N,T,Hs) :- i_am_using_rule(new_le_1), int(N), simplify(N>=0, true), deduce(Y<=X, T, Hs). deduce(Y<=N+X,T,Hs) :- i_am_using_rule(new_le_2), int(N), simplify(N>=0, true), deduce(Y<=X, T, Hs). deduce(Y-N<=X,T,Hs) :- i_am_using_rule(new_le_3), int(N), simplify(N>=0, true), deduce(Y<=X, T, Hs). deduce(Y<=X-N,T,Hs) :- i_am_using_rule(new_le_4), int(N), simplify(N<=0, true), deduce(Y<=X, T, Hs). deduce(Y+N<=X,T,Hs) :- i_am_using_rule(new_le_5), int(N), simplify(N<=0, true), deduce(Y<=X, T, Hs). deduce(N+Y<=X,T,Hs) :- i_am_using_rule(new_le_6), int(N), simplify(N<=0, true), deduce(Y<=X, T, Hs). deduce(Y>=N+X,T,Hs) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X<=Y,T,Hs). deduce(Y>=X+N,T,Hs) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X<=Y,T,Hs). deduce(Y>=N-X,T,Hs) :- int(N), Y\=_+_, Y\=_-_, evaluate(-X,Z), deduce(N+Z<=Y,T,Hs). deduce(X+Y>=N,T,Hs) :- int(N), (\+ int(X)), (\+ int(Y)), evaluate(-X,Z), deduce(N+Z<=Y,T,Hs). deduce(X-Y>=N,T,Hs) :- int(N), (\+ int(X)), (\+ int(Y)), deduce(N+Y<=X,T,Hs). deduce(X+Y<=Z,T,Hs) :- i_am_using_rule(le_2), ( infrule(Y+X<=Z,Hs) ; infrule(Y<=Z-X,Hs) ; infrule(X<=Z-Y,Hs) ; evaluate(-X,W), infrule(Y<=W+Z,Hs) ; int(X), /* If X is an integer - */ ( /* - and X=0, prove Y>=Z or fail */ simplify(X=0,true), ( deduce(Y<=Z,T,Hs) ; !, fail ) ; int(Y), /* - and Y is an integer, sum and */ ( /* prove sum>=Z or fail */ evaluate(Y+X,W), deduce(Z>=W,T,Hs) ; !, fail ) ; int(Z), /* - and Z is an integer, take */ ( /* away X & show diff<=Y or fail*/ evaluate(Z-X,W), deduce(Y<=W,T,Hs) ; !, fail ) ; nonvar(Y), nonvar(Z), /* - and Y & Z are the same, show */ Y=Z, /* X<=0 or fail */ ( simplify(0>=X,true), Hs=[] ; !, fail ) ; simplify(0>=X,true), /* - and X<=0, prove Y>=Z */ deduce(Y<=Z,T,Hs) ; T=integer, evaluate(X-1,W), /* - add 1 to X giving more */ ( /* equivalent forms using the */ infrule(W+Y instead of >= */ ; infrule(Y+W=Y), deduce(X+W<=Z,T,H2), append(H1, H2, Hs) ; !, fail ) ). % Multiplication %================================================== % Equality %--------- deduce(X*Y=Z,T,Hs) :- i_am_using_rule(eq_2), ( infrule(Y*X=Z,Hs) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z=0 or fail */ ( deduce(Z=0,T,Hs) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y=Z or fail */ ( deduce(Y=Z,T,Hs) ; !, fail ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y=Z or fail */ ( evaluate(-Y,W), deduce(W=Z,T,Hs) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* by X & prove prod=Z or fail */ evaluate(X*Y,W), deduce(W=Z,T,Hs) ; !, fail ) ; infrule(Y=W,H1), /* - try substituting for Y */ testused(Y=W), deduce(X*W=Z,T,H2), append(H1, H2, Hs) ) ; (\+ int(X)), (\+ int(Y)), int(Z), simplify(Z=0,true), /* If only Z is an integer & Z=0, */ ( /* try to prove either X=0 or */ nonvar(X), deduce(X=0,T,Hs) /* Y=0. */ ; nonvar(Y), deduce(Y=0,T,Hs) ) ). % Alternative forms of N*X=Y - converted to this form to use above code, % avoiding looping. deduce(X*N=Y,T,Hs) :- int(N), deduce(N*X=Y,T,Hs). deduce(Y=N*X,T,Hs) :- int(N), Y\=_*_, deduce(N*X=Y,T,Hs). deduce(Y=X*N,T,Hs) :- int(N), Y\=_*_, deduce(N*X=Y,T,Hs). % Inequality %----------- deduce(X*Y<>Z,T,Hs) :- i_am_using_rule(ineq_2), ( infrule(Y*X<>Z,Hs) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z<>0 or fail */ ( deduce(Z<>0,T,Hs) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y<>Z or fail */ ( deduce(Y<>Z,T,Hs) ; !, fail ) ; deduce(Y=Z,T,H1), /* - and X<>1, prove Y=Z<>0 */ ( infrule(Y<>0,H2) ; infrule(Z<>0,H2) ), append(H1, H2, Hs) ; simplify(X=(-1),true), /* - and X=-1, prove -Y<>Z or fail */ ( evaluate(-Y,W), deduce(W<>Z,T,Hs) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* by X & prove prod<>Z or fail */ evaluate(X*Y,W), deduce(W<>Z,T,Hs) ; !, fail ) ; infrule(Y=W,H1), testused(Y=W), /* - try substituting for Y */ deduce(X*W<>Z,T,H2), append(H1, H2, Hs) ) ; (\+ int(X)), (\+ int(Y)), int(Z), /* If only Z is an integer - */ ( simplify(Z=0,true), /* - and Z=0, prove neither X nor */ deduce(X<>0,T,H1), deduce(Y<>0,T,H2), /* Y is zero */ append(H1, H2, Hs) ; simplify(Z>0,true), /* - and Z>0, prove X*Y<0, i.e. */ nonvar(X), nonvar(Y), ( /* one is greater than zero and */ deduce(X>0,T,H1), deduce(Y<0,T,H2) /* the other is less than zero */ ; deduce(X<0,T,H1), deduce(Y>0,T,H2) ), append(H1, H2, Hs) ; simplify(0>Z,true), /* - and Z<0, prove X*Y>0, i.e. */ nonvar(X), nonvar(Y), ( /* both X and Y have the same */ deduce(X>0,T,H1), deduce(Y>0,T,H2) /* sign. */ ; deduce(X<0,T,H1), deduce(Y<0,T,H2) ), append(H1, H2, Hs) ) ). % Alternative forms of N*X<>Y - converted to this form to use above. deduce(X*N<>Y,T,Hs) :- int(N), deduce(N*X<>Y,T,Hs). deduce(Y<>N*X,T,Hs) :- int(N), Y\=_*_, deduce(N*X<>Y,T,Hs). deduce(Y<>X*N,T,Hs) :- int(N), Y\=_*_, deduce(N*X<>Y,T,Hs). % Greater than %------------- deduce(X*Y>Z,T,Hs) :- i_am_using_rule(gt_2), ( infrule(Y*X>Z,Hs) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z<0 or fail */ ( deduce(Z<0,T,Hs) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y>Z or fail */ ( deduce(Y>Z,T,Hs) ; !, fail ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y>Z or fail */ ( evaluate(-Y,W), deduce(W>Z,T,Hs) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* & prove prod>Z or fail */ evaluate(X*Y,W), deduce(W>Z,T,Hs) ; !, fail ) ; simplify(X>0,true), /* - try substituting for Y, */ ( /* depending on sign of X. For */ infrule(Y>=W,H1), testused(Y>=W),/* each sign of X, there are two*/ deduce(X*W>Z,T,H2) /* cases: >= then > and > then */ ; /* >= for X>0, or <= then > and */ infrule(Y>W,H1), deduce(X*W>=Z,T,H2) /* < then >= for X<0. */ ), append(H1, H2, Hs) ; simplify(0>X,true), ( infrule(Y<=W,H1), testused(W>=Y), deduce(X*W>Z,T,H2) ; infrule(Y=Z,T,H2) ), append(H1, H2, Hs) ) ; (\+ int(X)), (\+ int(Y)), int(Z), simplify(0>=Z,true), /* If only Z is an integer, and Z */ nonvar(X), nonvar(Y), ( /* is less than or equal to zero, */ deduce(X>0,T,H1), deduce(Y>0,T,H2) /* try proving X*Y product>0, by */ ; /* showing X & Y have same sign. */ deduce(X<0,T,H1), deduce(Y<0,T,H2) ), append(H1, H2, Hs) ). % Alternative forms of N*X>Y, converted to this form to use above. deduce(X*N>Y,T,Hs) :- int(N), deduce(N*X>Y,T,Hs). deduce(YY,T,Hs). deduce(YY,T,Hs). % Less-than %---------- deduce(X*Y0 or fail */ ( deduce(Z>0,T,Hs) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y0,true), /* - try substituting for Y, */ ( /* depending on sign of X. For */ infrule(Y<=W,H1), testused(W>=Y), /* each sign of X, there are two*/ deduce(X*W0, or >= then < and */ infrule(Y then <= for X<0. */ ), append(H1, H2, Hs) ; simplify(0>X,true), ( infrule(Y>=W,H1), testused(Y>=W), deduce(X*WW,H1), deduce(X*W<=Z,T,H2) ), append(H1, H2, Hs) ) ; (\+ int(X)), (\+ int(Y)), /* If only Z is an integer, and Z */ int(Z), simplify(Z>=0,true), /* is greater than or equal to 0, */ nonvar(X), nonvar(Y), ( /* try proving X*Y product<0, by */ deduce(X>0,T,H1), deduce(Y<0,T,H2)/* showing X & Y have diff signs. */ ; deduce(X<0,T,H1), deduce(Y>0,T,H2) ), append(H1, H2, Hs) ). % Alternative forms of N*XN*X,T,Hs) :- int(N), Y\=_*_, deduce(N*XX*N,T,Hs) :- int(N), Y\=_*_, deduce(N*X=Z,T,Hs) :- i_am_using_rule(ge_3), ( infrule(Y*X>=Z,Hs) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z<=0 or fail */ ( deduce(Z<=0,T,Hs) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y>=Z or fail */ ( deduce(Y>=Z,T,Hs) ; !, fail ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y>=Z or fail*/ ( evaluate(-Y,W), deduce(W>=Z,T,Hs) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* and prove prod>=Z or fail */ evaluate(X*Y,W), deduce(W>=Z,T,Hs) ; !, fail ) ; simplify(X>0,true), /* - try finding substitution for */ ( /* Y depending on sign of X and */ infrule(Y>=W,H1), /* prove resulting expression. */ testused(Y>=W), deduce(X*W>=Z,T,H2) ; T = integer, /* - find W such that Y>W, i.e. */ infrule(Y > W, H1), /* Y >= W-1 for integer T. If */ simplify(W-1, Wminus1), /* X*(W-1)>=Z, then X*Y>=Z too. */ deduce(X*Wminus1 >= Z, T, H2) ), append(H1, H2, Hs) ; simplify(0>X,true), /* prove resulting expression. */ infrule(Y<=W,H1), testused(W>=Y), deduce(X*W>=Z,T,H2), append(H1, H2, Hs) ) ; (\+ int(X)), (\+ int(Y)), /* If only Z is an integer, less */ int(Z), simplify(0>=Z,true), /* than or equal to 0, prove X*Y */ nonvar(X), nonvar(Y), ( /* product greater than or equal */ deduce(X>=0,T,H1), deduce(Y>=0,T,H2) /* to 0. */ ; deduce(X<=0,T,H1), deduce(Y<=0,T,H2) ), append(H1, H2, Hs) ). % Alternative forms of N*X>=Y - converted to this form to use above code, % avoiding looping. deduce(X*N>=Y,T,Hs) :- int(N), deduce(N*X>=Y,T,Hs). deduce(Y<=N*X,T,Hs) :- int(N), Y\=_*_, deduce(N*X>=Y,T,Hs). deduce(Y<=X*N,T,Hs) :- int(N), Y\=_*_, deduce(N*X>=Y,T,Hs). % Special case for squaring X - see TN [K601-017] % Note this is OK for integers or reals. deduce(X*X>=0,_T,[]). % Less-than-or-equals %-------------------- deduce(X*Y<=Z,T,Hs) :- i_am_using_rule(le_3), ( infrule(Y*X<=Z,Hs) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z>=0 or fail */ ( deduce(Z>=0,T,Hs) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y<=Z or fail */ ( deduce(Y<=Z,T,Hs) ; !, fail ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y<=Z or fail*/ ( evaluate(-Y,W), deduce(W<=Z,T,Hs) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* and prove prod<=Z or fail */ evaluate(X*Y,W), deduce(W<=Z,T,Hs) ; !, fail ) ; simplify(X>0,true), /* - try finding substitution for */ ( /* Y depending on sign of X and */ infrule(Y<=W,H1), /* prove resulting expression. */ testused(W>=Y), deduce(X*W<=Z,T,H2) ; T = integer, /* - find W such that YX,true), infrule(Y>=W,H1), testused(Y>=W), deduce(X*W<=Z,T,H2), append(H1, H2, Hs) ) ; (\+ int(X)), (\+ int(Y)), /* If only Z is an integer greater*/ int(Z), simplify(Z>=0,true), /* than or equal to 0, prove X*Y */ nonvar(X), nonvar(Y), ( /* product less than or equal to */ deduce(X>=0,T,H1), /* 0. */ deduce(Y<=0,T,H2) ; deduce(X<=0,T,H1), deduce(Y>=0,T,H2) ), append(H1, H2, Hs) ). % Alternative forms of N*X<=Y - converted to this form to use above code, % avoiding looping. deduce(X*N<=Y,T,Hs) :- int(N), deduce(N*X<=Y,T,Hs). deduce(Y>=N*X,T,Hs) :- int(N), Y\=_*_, deduce(N*X<=Y,T,Hs). deduce(Y>=X*N,T,Hs) :- int(N), Y\=_*_, deduce(N*X<=Y,T,Hs). % General rules %================================================== % Equality %--------- deduce(X=Y,T,Hs) :- i_am_using_rule(eq_gen), ( X=Y, Hs=[] /* Proved if same expression. */ ; int(X), /* If both ints, equal or fail. */ int(Y), ( simplify(X=Y,true), Hs=[] ; !, fail ) ; infrule(X=Z,H1), /* Try transitive chain. */ testused(X=Z), deduce(Z=Y,T,H2), append(H1, H2, Hs) ). % Inequality %----------- % Add this special case to deal with: % X > I -> X <> I % Where I is an integer literal. Covers the common case of: % X > 0 -> X <> 0 % Which crops up in SPARK. deduce(X<>I,T,Hs) :- i_am_using_rule(ineq_gen_special), checktype(X, integer), int(I), deduce(X > I, T, Hs). % Add this special case to deal with: % X < I -> X <> I % Where I is an integer literal. Covers the common case of: % X < 0 -> X <> 0 % Which crops up in SPARK. deduce(X<>I,T,Hs) :- i_am_using_rule(ineq_gen_special2), checktype(X, integer), int(I), deduce(X < I, T, Hs). % Add this special case to deal with: % abs(X) > 0 -> X <> 0 % This is OK for integers or reals. deduce(X<>0,T,Hs) :- i_am_using_rule(ineq_gen_special3), % use safe_deduce/3 here to guard against unbounded recursion safe_deduce(abs(X) > 0, T, Hs). % General case. deduce(X<>Y,T,Hs) :- i_am_using_rule(ineq_gen), ( int(X), /* If X is an integer - */ ( int(Y), /* - and Y is an integer, prove */ ( /* not equal or fail */ (\+ simplify(X=Y,true)), Hs=[] ; !, fail ) ; deduce(Y<>X,T,Hs) /* - swap sides and prove */ ) ; ( /* Equivalent forms of the goal */ infrule(Z+X=Y,H1) /* giving subgoals */ ; infrule(X+Z=Y,H1) ; infrule(X-Z=Y,H1) ), ( int(Z), (\+ simplify(Z=0,true)), H2=[] ; infrule(Z<>0,H2) ), append(H1, H2, Hs) ; infrule(X<>Z,H1), deduce(Z=Y,T,H2), append(H1, H2, Hs) ; infrule(X=Z,H1), testused(X=Z), deduce(Z<>Y,T,H2), append(H1, H2, Hs) ). % Greater than %------------- deduce(X>Y,T,Hs) :- i_am_using_rule(gt_gen), ( int(X), /* If X is an integer - */ ( int(Y), /* - and Y is an integer, prove */ ( simplify(X>Y,true) ; !, fail ), /* X greater than Y or fail */ Hs=[] ; deduce(Y=Y,H1) /* Equivalent forms of the */ ; /* expression using different */ infrule(X+Z>=Y,H1) /* operator and integer addition */ ), /* or subtraction. */ ( int(Z), simplify(0>Z,true), H2=[] ; infrule(Z<0,H2) ), append(H1, H2, Hs) ; infrule(X-Z>=Y,H1), ( int(Z), simplify(Z>0,true), H2=[] ; infrule(Z>0,H2) ), append(H1, H2, Hs) ; infrule(X>=Z,H1), testused(X>=Z), /* Two possible intermediate */ deduce(Z>Y,T,H2), /* steps of deduction. */ append(H1, H2, Hs) ; infrule(X>Z,H1), testused(X>Z), deduce(Z>=Y,T,H2), append(H1, H2, Hs) ). % Less-than %---------- deduce(XX,true) ; !, fail ), /* X is less than Y or fail */ Hs=[] ; deduce(Y>X,T,Hs) /* - swap sides then prove. */ ) ; ( infrule(Z+X<=Y,H1) /* Equivalent forms of the */ ; /* expression using different */ infrule(X+Z<=Y,H1) /* operator and integer addition */ ), /* or subtraction. */ ( int(Z), simplify(Z>0,true), H2=[] ; infrule(Z>0,H2) ), append(H1, H2, Hs) ; infrule(X-Z<=Y,H1), ( int(Z), simplify(0>Z,true), H2=[] ; infrule(Z<0,H2) ), append(H1, H2, Hs) ; infrule(X<=Z,H1), testused(Z>=X), /* Two possible intermediate */ deduce(ZX), deduce(Z<=Y,T,H2), append(H1, H2, Hs) ). % Greater than or equals %----------------------- deduce(X>=Y,T,Hs) :- i_am_using_rule(ge_gen), ( int(X), /* If X is an integer - */ ( int(Y), /* - and Y is an integer, prove */ ( /* X>=Y or fail */ simplify(X>=Y,true), Hs=[] ; !, fail ) ; deduce(Y<=X,T,Hs) /* - swap sides then prove. */ ) ; nonvar(X), nonvar(Y), X=Y, Hs=[] /* Proved if X & Y are the same. */ ; ( infrule(Z+X>=Y,H1) /* Equivalent forms of the */ ; infrule(X+Z>=Y,H1) ), ( /* expression using different */ int(Z), /* operator and integer addition */ simplify(0>=Z,true), H2=[] /* or subtraction. */ ; infrule(Z<=0,H2) ), append(H1, H2, Hs) ; infrule(X-Z>=Y,H1), ( int(Z), simplify(Z>=0,true), H2=[] ; infrule(Z>=0,H2) ), append(H1, H2, Hs) ; infrule(X>=Z,H1), /* Try an intermediate step. */ testused(X>=Z), deduce(Z>=Y,T,H2), append(H1, H2, Hs) ). % Less-than-or-equals %-------------------- deduce(X<=Y,T,Hs) :- i_am_using_rule(le_gen), ( int(X), /* If X is an integer - */ ( int(Y), /* - and Y is an integer, prove */ ( /* Y>=X or fail */ simplify(Y>=X,true), Hs=[] ; !, fail ) ; deduce(Y>=X,T,Hs) /* - swap sides then prove. */ ) ; nonvar(X), nonvar(Y), /* Proved if X & Y are the same. */ X=Y, Hs=[] ; ( /* Equivalent forms of the */ infrule(Z+X<=Y,H1) /* expression using different */ ; /* operator and integer addition */ infrule(X+Z<=Y,H1) /* or subtraction. */ ), ( int(Z), simplify(Z>=0,true), H2=[] ; infrule(Z>=0,H2) ), append(H1, H2, Hs) ; infrule(X-Z<=Y,H1), ( int(Z), simplify(0>=Z,true), H2=[] ; infrule(Z<=0,H2) ), append(H1, H2, Hs) ; infrule(X<=Z,H1), /* Try an intermediate step. */ testused(Z>=X), deduce(Z<=Y,T,H2), append(H1, H2, Hs) ). % Bitwise logical operator rules %================================================== deduce(X, T, Hs) :- bitwise_deduce(X, T, Hs). /* try normal way round */ % Other bitwise bound permutations (but without introducing % non-termination). deduce(bit__and(X,Y) >= Z, T, Hs) :- Z \= bit__and(_,_), Z \= bit__or(_,_), Z \= bit__xor(_,_), !, bitwise_deduce(Z <= bit__and(X,Y), T, Hs). deduce(Z >= bit__and(X,Y), T, Hs) :- Z \= bit__and(_,_), Z \= bit__or(_,_), Z \= bit__xor(_,_), !, bitwise_deduce(bit__and(X,Y) <= Z, T, Hs). deduce(bit__or(X,Y) >= Z, T, Hs) :- Z \= bit__and(_,_), Z \= bit__or(_,_), Z \= bit__xor(_,_), !, bitwise_deduce(Z <= bit__or(X,Y), T, Hs). deduce(Z >= bit__or(X,Y), T, Hs) :- Z \= bit__and(_,_), Z \= bit__or(_,_), Z \= bit__xor(_,_), !, bitwise_deduce(bit__or(X,Y) <= Z, T, Hs). deduce(bit__xor(X,Y) >= Z, T, Hs) :- Z \= bit__and(_,_), Z \= bit__or(_,_), Z \= bit__xor(_,_), !, bitwise_deduce(Z <= bit__xor(X,Y), T, Hs). deduce(Z >= bit__xor(X,Y), T, Hs) :- Z \= bit__and(_,_), Z \= bit__or(_,_), Z \= bit__xor(_,_), !, bitwise_deduce(bit__xor(X,Y) <= Z, T, Hs). % Modulus bounds rules %================================================== % Rules for SPARK modulus (+, - include 0 for X, exclude for Y): % % -------------------------------------------------------------------------- % X|Y| X MOD Y | Example % -|-|----------------|----------------------------------------------------- % +|+| >= 0, <= Y-1 | 0 <= X mod 5 <= 4, for X>=0 e.g. 7 mod 5 = 2 % -|+| >= 0, <= Y-1 | 0 <= X mod 3 <= 2, for X<=0 e.g. -8 mod 3 = 1 % +|-| >= Y+1, <= 0 | -5 <= X mod -6 <= 0, for X>=0 e.g. 13 mod -6 = -5 % -|-| >= Y+1, <= 0 | -3 <= X mod -4 <= 0, for X<=0, e.g. -7 mod -4 = -3 % -------------------------------------------------------------------------- % Basic rules %------------ deduce(N <= _X mod Y, T, Hs) :- /* X+ Y+ lwb */ get_provenance_framework(spark), /* X- Y+ lwb */ ( int(N), !, simplify(N <= 0, true), H1 = [] ; infrule(N <= 0, H1) ), ( deduce(Y >= 1, T, H2) /* must be integer */ ; deduce(Y > 0, T, H2) ), append(H1, H2, HL), sort(HL, Hs), !. deduce(_ mod Y >= N, T, Hs) :- /* X+ Y+ lwb */ get_provenance_framework(spark), /* X- Y+ lwb */ ( int(N), !, simplify(N <= 0, true), H1 = [] ; infrule(N <= 0, H1) ), ( deduce(Y >= 1, T, H2) /* must be integer */ ; deduce(Y > 0, T, H2) ), append(H1, H2, HL), sort(HL, Hs), !. deduce(Z <= _X mod Y, T, Hs) :- /* X- Y- lwb */ get_provenance_framework(spark), /* X+ Y- lwb */ ( deduce(Y <= - 1, T, H1) /* must be integer */ ; deduce(Y < 0, T, H1) ), ( deduce(Z <= Y + 1, T, H2) ; deduce(Z < Y + 2, T, H2) ), !, append(H1, H2, Hs). deduce(_ mod Y >= Z, T, Hs) :- /* X- Y- lwb */ get_provenance_framework(spark), /* X+ Y- lwb */ ( deduce(Y <= - 1, T, H1) /* must be integer */ ; deduce(Y < 0, T, H1) ), ( deduce(Z <= Y + 1, T, H2) ; deduce(Z < Y + 2, T, H2) ), !, append(H1, H2, Hs). deduce(_ mod Y <= 0, T, Hs) :- /* X- Y- upb */ get_provenance_framework(spark), /* X+ Y- upb */ ( deduce(Y <= - 1, T, Hs) /* must be integer */ ; deduce(Y < 0, T, Hs) ), !. deduce(0 >= _X mod Y, T, Hs) :- /* X- Y- upb */ get_provenance_framework(spark), /* X+ Y- upb */ ( deduce(Y <= - 1, T, Hs) /* must be integer */ ; deduce(Y < 0, T, Hs) ), !. deduce(_ mod Y <= Z, T, Hs) :- /* X+ Y+ upb */ get_provenance_framework(spark), /* X- Y+ upb */ ( deduce(Y >= 1, T, H1) /* must be integer */ ; deduce(Y > 0, T, H1) ), ( deduce(Y <= Z + 1, T, H2) ; deduce(Y - 1 <= Z, T, H2) ; deduce(Y < Z + 2, T, H2) ; deduce(Y - 2 < Z, T, H2) ), !, append(H1, H2, Hs). deduce(Z >= _X mod Y, T, Hs) :- /* X+ Y+ upb */ get_provenance_framework(spark), /* X- Y+ upb */ ( deduce(Y >= 1, T, H1) /* must be integer */ ; deduce(Y > 0, T, H1) ), ( deduce(Y <= Z + 1, T, H2) ; deduce(Y - 1 <= Z, T, H2) ; deduce(Y < Z + 2, T, H2) ; deduce(Y - 2 < Z, T, H2) ), !, append(H1, H2, Hs). % Extra rules %------------ deduce(X mod Y < Z, T, Hs) :- get_provenance_framework(spark), ( int(Z), Z1 iss Z - 1 ; \+ int(Z), Z1 = Z - 1 ), deduce(X mod Y <= Z1, T, Hs). deduce(Z > X mod Y, T, Hs) :- get_provenance_framework(spark), ( int(Z), Z1 iss Z - 1 ; \+ int(Z), Z1 = Z - 1 ), deduce(X mod Y <= Z1, T, Hs). deduce(Z < X mod Y, T, Hs) :- get_provenance_framework(spark), ( int(Z), Z1 iss Z + 1 ; \+ int(Z), Z1 = Z + 1 ), deduce(Z1 <= X mod Y, T, Hs). deduce(X mod Y > Z, T, Hs) :- get_provenance_framework(spark), ( int(Z), Z1 iss Z + 1 ; \+ int(Z), Z1 = Z + 1 ), deduce(Z1 <= X mod Y, T, Hs). %=============================================================================== % bitwise_deduce(+R,+T,-Hs). %------------------------------------------------------------------------------- % Deduce expression R, related to bitwise operations, whose lhs and rhs are % of type T, giving Hs instantiated to list of hypotheses used. %=============================================================================== % bit__and: lower bound %---------------------- bitwise_deduce(Z <= bit__and(X, X), T, Hs) :- /* bit__and(X, X) = X: proceed from there */ !, deduce(Z <= X, T, Hs). bitwise_deduce(0 <= bit__and(X, Y), T, Hs) :- /* 0 <= bit__and(X, Y) if 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), !, append(H1, H2, Hs). bitwise_deduce(Z <= bit__and(X, Y), T, Hs) :- /* Z <= bit__and(X, Y) if 0<=X and 0<=Y and Z<=0 */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), deduce_once(Z <= 0, T, H3), !, append(H2, H3, H23), append(H1, H23, Hs), !. % bit__and: upper bound %---------------------- bitwise_deduce(bit__and(X, X) <= Z, T, Hs) :- /* bit__and(X, X) = X: proceed from there */ !, deduce(X <= Z, T, Hs). bitwise_deduce(bit__and(X, Y) <= X, T, Hs) :- /* bit__and(X, Y) <= X if 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), !, append(H1, H2, Hs). bitwise_deduce(bit__and(X, Y) <= Y, T, Hs) :- /* bit__and(X, Y) <= Y if 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), !, append(H1, H2, Hs). bitwise_deduce(bit__and(X, Y) <= Z, T, Hs) :- /* bit__and(X, Y) <= Z if 0<=X<=Z or 0<=Y<=Z */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), ( ( Z = X+Y ; Z = Y+X ), /* bit__and(X, Y) <= X+Y, because both X<=X+Y and Y<=X+Y */ H3 = [] ; deduce_once(X <= Z, T, H3) ; deduce_once(Y <= Z, T, H3) ; deduce(X + Y <= Z, T, H3) ), !, append(H2, H3, H23), append(H1, H23, Hs), !. % bit__or: lower bound %--------------------- bitwise_deduce(Z <= bit__or(X, X), T, Hs) :- /* bit__or(X, X) = X: proceed from there */ !, deduce(Z <= X, T, Hs). bitwise_deduce(X <= bit__or(X, Y), T, Hs) :- /* X <= bit__or(X, Y) if 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), !, append(H1, H2, Hs). bitwise_deduce(Y <= bit__or(X, Y), T, Hs) :- /* Y <= bit__or(X, Y) if 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), !, append(H1, H2, Hs). bitwise_deduce(Z <= bit__or(X, Y), T, Hs) :- /* Z <= bit__or(X, Y) if Z<=X and Z<=Y and 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), ( deduce_once(Z <= 0, T, H3), H4 = [] ; deduce_once(Z <= X, T, H3), deduce_once(Z <= Y, T, H4) ), !, append(H3, H4, H34), append(H2, H34, H234), append(H1, H234, Hs), !. % bit__or: upper bound %--------------------- bitwise_deduce(bit__or(X, X) <= Z, T, Hs) :- /* bit__or(X, X) = X: proceed from there */ !, deduce(X <= Z, T, Hs). bitwise_deduce(bit__or(X, Y) <= X+Y, T, Hs) :- /* bit__or(X, Y) <= X+Y if 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), !, append(H1, H2, Hs). bitwise_deduce(bit__or(X, Y) <= Z, T, Hs) :- /* bit__or(X, Y) <= Z if 0<=X and 0<=Y and one of... */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), ( int(Z), Z >= 0, next_two_to_N_minus_1_above(Z, Z), /* ... Z = 2**N-1 for some N, and */ deduce(X <= Z, T, H3), /* ... both X and Y are <= Z */ deduce(Y <= Z, T, H4), H5 = [] ; deduce(X+Y <= Z, T, H3), /* ... X+Y <= Z, or ... */ H4 = [], H5 = [] ; infrule_int_rhs(X <= Xint, H3), next_two_to_N_minus_1_above(Xint, XUpper2toNm1), /* ... X <= 2**M-1 and ... */ infrule_int_rhs(Y <= Yint, H4), next_two_to_N_minus_1_above(Yint, YUpper2toNm1), /* ... Y <= 2**N-1 and ... */ ( XUpper2toNm1 >= YUpper2toNm1, ZL = XUpper2toNm1 ; XUpper2toNm1 < YUpper2toNm1, ZL = YUpper2toNm1 ), deduce(ZL <= Z, T, H5) /* ... max(2**M-1, 2**N-1) <= Z */ ), !, append(H4, H5, H45), append(H3, H45, H345), append(H2, H345, H2345), append(H1, H2345, Hs), !. % bit__xor: lower bound %---------------------- bitwise_deduce(Z <= bit__xor(X, X), T, Hs) :- /* bit__xor(X, X) = 0: proceed from there */ !, deduce(Z <= 0, T, Hs). bitwise_deduce(0 <= bit__xor(X, Y), T, Hs) :- /* 0 <= bit__xor(X, Y) if 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), !, append(H1, H2, Hs). bitwise_deduce(Z <= bit__xor(X, Y), T, Hs) :- /* Z <= bit__xor(X, Y) if 0<=X and 0<=Y and Z<=0 */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), ( deduce_once(Z <= 0, T, H3) ; deduce(Z <= X - Y, T, H3) /* abs(X-Y) <= bit__xor(X, Y), since bit__and(X, Y) >= X and >= Y */ ; /* and bit__xor = X+Y - 2 * bit__and */ deduce(Z <= Y - X, T, H3) ), !, append(H2, H3, H23), append(H1, H23, Hs), !. % bit__xor: upper bound %---------------------- bitwise_deduce(bit__xor(X, X) <= Z, T, Hs) :- /* bit__xor(X, X) = 0: proceed from there */ !, deduce(0 <= Z, T, Hs). bitwise_deduce(bit__xor(X, Y) <= X+Y, T, Hs) :- /* bit__xor(X, Y) <= X+Y if 0<=X and 0<=Y */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), !, append(H1, H2, Hs). bitwise_deduce(bit__xor(X, Y) <= Z, T, Hs) :- /* bit__xor(X, Y) <= Z if 0<=X and 0<=Y and one of... */ deduce_once(X >= 0, T, H1), deduce_once(Y >= 0, T, H2), ( int(Z), Z >= 0, next_two_to_N_minus_1_above(Z, Z), /* ... Z = 2**N-1 for some N and */ safe_deduce(X <= Z, T, H3), /* ... both X and Y are <= Z, or ... */ safe_deduce(Y <= Z, T, H4), H5 = [] ; safe_deduce(X+Y <= Z, T, H3), /* ... X+Y <= Z, or ... */ H4 = [], H5 = [] ; infrule_int_rhs(X <= Xint, H3), next_two_to_N_minus_1_above(Xint, XUpper2toNm1), /* ... X <= 2**M-1 and ... */ infrule_int_rhs(Y <= Yint, H4), next_two_to_N_minus_1_above(Yint, YUpper2toNm1), /* ... Y <= 2**N-1 and ... */ ( XUpper2toNm1 >= YUpper2toNm1, ZL = XUpper2toNm1 ; XUpper2toNm1 < YUpper2toNm1, ZL = YUpper2toNm1 ), deduce(ZL <= Z, T, H5) /* ... max(2**M-1, 2**N-1) <= Z */ ), !, append(H4, H5, H45), append(H3, H45, H345), append(H2, H345, H2345), append(H1, H2345, Hs), !. %=============================================================================== % deduce_once(+GOAL, +Type, -HypList). %------------------------------------------------------------------------------- % Succeed once, or fail. %=============================================================================== deduce_once(X >= N, T, H) :- ( int(X), simplify(X >= N, true), H = [] ; deduce(X >= N, T, H) ; infrule(X >= XL, Ha), testused(X >= XL), deduce(XL >= N, T, Hb), append(Ha, Hb, H) ), !. deduce_once(Z <= N, T, H) :- ( int(Z), simplify(Z <= N, true), H = [] ; deduce(Z <= N, T, H) ; infrule(Z <= ZL, Ha), testused(ZL >= Z), deduce(ZL <= N, T, Hb), append(Ha, Hb, H) ), !. %=============================================================================== % infrule_int_rhs(+GOAL, -HypList). %------------------------------------------------------------------------------- % Solve with integer r.h.s. %=============================================================================== infrule_int_rhs(X <= Xint, H) :- ( infrule(X <= Xint, H) ; infrule(X <= XU, Ha), testused(XU >= X), infrule(XU <= Xint, Hb), append(Ha, Hb, H) ), int(Xint). %=============================================================================== % try_new_deduction_strategies(+GOAL, +Type, -HypList). %------------------------------------------------------------------------------- % Deduce expression GOAL, of type T, giving Hs instantiated to list of % hypotheses used. These new deduction strategies cater for for abs, div, % mod and exponentiation (to the power of) rules. %=============================================================================== % Inference rules %---------------- try_new_deduction_strategies(Goal, _T, Hs) :- inference_rule(_Name, Goal, Conditions), ground(Conditions), safe_infer_side_conditions(Conditions, Hs), !. try_new_deduction_strategies(X <= Atomic, _T, Hs) :- int_enum_lit_or_const(Atomic, Which), ( inference_rule(_Name, X <= OtherAtomic, Conditions) ; inference_rule(_Name, OtherAtomic >= X, Conditions) ), int_enum_lit_or_const(OtherAtomic, Which), simplify(OtherAtomic <= Atomic, true), ground(Conditions), safe_infer_side_conditions(Conditions, Hs), !. try_new_deduction_strategies(Atomic >= X, _T, Hs) :- int_enum_lit_or_const(Atomic, Which), ( inference_rule(_Name, X <= OtherAtomic, Conditions) ; inference_rule(_Name, OtherAtomic >= X, Conditions) ), int_enum_lit_or_const(OtherAtomic, Which), simplify(OtherAtomic <= Atomic, true), ground(Conditions), safe_infer_side_conditions(Conditions, Hs), !. try_new_deduction_strategies(X >= Atomic, _T, Hs) :- int_enum_lit_or_const(Atomic, Which), ( inference_rule(_Name, X >= OtherAtomic, Conditions) ; inference_rule(_Name, OtherAtomic <= X, Conditions) ), int_enum_lit_or_const(OtherAtomic, Which), simplify(OtherAtomic >= Atomic, true), ground(Conditions), safe_infer_side_conditions(Conditions, Hs), !. try_new_deduction_strategies(Atomic <= X, _T, Hs) :- int_enum_lit_or_const(Atomic, Which), ( inference_rule(_Name, X >= OtherAtomic, Conditions) ; inference_rule(_Name, OtherAtomic <= X, Conditions) ), int_enum_lit_or_const(OtherAtomic, Which), simplify(OtherAtomic >= Atomic, true), ground(Conditions), safe_infer_side_conditions(Conditions, Hs), !. % Additional rules for inequalities using transitivity. try_new_deduction_strategies(X <= Atomic, _T, Hs) :- int_or_enum_lit(Atomic, Which), ( inference_rule(_Name, Ident <= OtherAtomic, []) ; inference_rule(_Name, OtherAtomic >= Ident, []) ), int_or_enum_lit(OtherAtomic, Which), simplify(OtherAtomic <= Atomic, true), safe_infer_side_conditions([X <= Ident], Hs), !. try_new_deduction_strategies(Atomic >= X, _T, Hs) :- int_or_enum_lit(Atomic, Which), ( inference_rule(_Name, Ident <= OtherAtomic, []) ; inference_rule(_Name, OtherAtomic >= Ident, []) ), int_or_enum_lit(OtherAtomic, Which), simplify(OtherAtomic <= Atomic, true), safe_infer_side_conditions([X <= Ident], Hs), !. try_new_deduction_strategies(X >= Atomic, _T, Hs) :- int_or_enum_lit(Atomic, Which), ( inference_rule(_Name, Ident >= OtherAtomic, []) ; inference_rule(_Name, OtherAtomic <= Ident, []) ), int_or_enum_lit(OtherAtomic, Which), simplify(OtherAtomic >= Atomic, true), safe_infer_side_conditions([X >= Ident], Hs), !. try_new_deduction_strategies(Atomic <= X, _T, Hs) :- int_or_enum_lit(Atomic, Which), ( inference_rule(_Name, Ident >= OtherAtomic, []) ; inference_rule(_Name, OtherAtomic <= Ident, []) ), int_or_enum_lit(OtherAtomic, Which), simplify(OtherAtomic >= Atomic, true), safe_infer_side_conditions([X >= Ident], Hs), !. try_new_deduction_strategies(X <= Atomic, T, Hs) :- T \= real, int_or_enum_lit(Atomic, Which), ( Which = integer, simplify(Atomic + 1, Next) ; Which = enum, simplify(succ(Atomic), Next) ), int_or_enum_lit(Next, Which), ( inference_rule(_Name, Ident <= Next, []) ; inference_rule(_Name, Next >= Ident, []) ), safe_infer_side_conditions([X < Ident], Hs), !. try_new_deduction_strategies(Atomic >= X, T, Hs) :- T \= real, int_or_enum_lit(Atomic, Which), ( Which = integer, simplify(Atomic + 1, Next) ; Which = enum, simplify(succ(Atomic), Next) ), int_or_enum_lit(Next, Which), ( inference_rule(_Name, Ident <= Next, []) ; inference_rule(_Name, Next >= Ident, []) ), safe_infer_side_conditions([X < Ident], Hs), !. try_new_deduction_strategies(X >= Atomic, T, Hs) :- T \= real, int_or_enum_lit(Atomic, Which), ( Which = integer, simplify(Atomic - 1, Prev) ; Which = enum, simplify(pred(Atomic), Prev) ), int_or_enum_lit(Prev, Which), ( inference_rule(_Name, Ident >= Prev, []) ; inference_rule(_Name, Prev <= Ident, []) ), safe_infer_side_conditions([X > Ident], Hs), !. try_new_deduction_strategies(Atomic <= X, T, Hs) :- T \= real, int_or_enum_lit(Atomic, Which), ( Which = integer, simplify(Atomic - 1, Prev) ; Which = enum, simplify(pred(Atomic), Prev) ), int_or_enum_lit(Prev, Which), ( inference_rule(_Name, Ident >= Prev, []) ; inference_rule(_Name, Prev <= Ident, []) ), safe_infer_side_conditions([X > Ident], Hs), !. % Rules for common enumeration-type bounds reasoning in loops. try_new_deduction_strategies(LWB <= succ(X), _T, Hs) :- int_or_enum_lit(LWB, enum), standard_infrule(LWB <= X, H1), standard_infrule(X < C, H2), atom(C), ( inference_rule(_Name, C <= UPB, []) ; inference_rule(_Name, UPB >= C, []) ), int_or_enum_lit(UPB, enum), safe_infer_side_conditions([LWB <= X], H3), !, append(H1, H2, H12), merge_sort(H12, H3, Hs). try_new_deduction_strategies(succ(X) >= LWB, _T, Hs) :- int_or_enum_lit(LWB, enum), standard_infrule(LWB <= X, H1), standard_infrule(X < C, H2), atom(C), ( inference_rule(_Name, C <= UPB, []) ; inference_rule(_Name, UPB >= C, []) ), int_or_enum_lit(UPB, enum), safe_infer_side_conditions([LWB <= X], H3), !, append(H1, H2, H12), merge_sort(H12, H3, Hs). try_new_deduction_strategies(C <= succ(X), _T, Hs) :- \+ int_or_enum_lit(C, _), var_const(C, _T, c), standard_infrule(C <= X, H1), standard_infrule(X < D, H2), atom(D), ( inference_rule(_Name, D <= UPB, []) ; inference_rule(_Name, UPB >= D, []) ), int_or_enum_lit(UPB, enum), !, merge_sort(H1, H2, Hs). try_new_deduction_strategies(succ(X) >= C, _T, Hs) :- \+ int_or_enum_lit(C, _), var_const(C, _T, c), standard_infrule(C <= X, H1), standard_infrule(X < D, H2), atom(D), ( inference_rule(_Name, D <= UPB, []) ; inference_rule(_Name, UPB >= D, []) ), int_or_enum_lit(UPB, enum), !, merge_sort(H1, H2, Hs). try_new_deduction_strategies(succ(X) <= UPB, _T, Hs) :- int_or_enum_lit(UPB, enum), standard_infrule(X < C, Hs), atom(C), ( inference_rule(_Name, C <= E, []) ; inference_rule(_Name, E >= C, []) ), int_or_enum_lit(E, enum), simplify(E <= UPB, true), !. try_new_deduction_strategies(UPB >= succ(X), _T, Hs) :- int_or_enum_lit(UPB, enum), standard_infrule(X < C, Hs), atom(C), ( inference_rule(_Name, C <= E, []) ; inference_rule(_Name, E >= C, []) ), int_or_enum_lit(E, enum), simplify(E <= UPB, true), !. try_new_deduction_strategies(succ(X) <= C, _T, Hs) :- \+ int_or_enum_lit(C, _), var_const(C, _T, c), standard_infrule(X < C, Hs), atom(C), ( inference_rule(_Name, C <= E, []) ; inference_rule(_Name, E >= C, []) ), int_or_enum_lit(E, enum), !. try_new_deduction_strategies(C >= succ(X), _T, Hs) :- \+ int_or_enum_lit(C, _), var_const(C, _T, c), standard_infrule(X < C, Hs), atom(C), ( inference_rule(_Name, C <= E, []) ; inference_rule(_Name, E >= C, []) ), int_or_enum_lit(E, enum), !. try_new_deduction_strategies(pred(X) <= UPB, _T, Hs) :- int_or_enum_lit(UPB, enum), standard_infrule(X <= UPB, H1), standard_infrule(X > C, H2), atom(C), ( inference_rule(_Name, C >= LWB, []) ; inference_rule(_Name, LWB <= C, []) ), int_or_enum_lit(LWB, enum), safe_infer_side_conditions([UPB >= X], H3), !, append(H1, H2, H12), merge_sort(H12, H3, Hs). try_new_deduction_strategies(UPB >= pred(X), _T, Hs) :- int_or_enum_lit(UPB, enum), standard_infrule(X <= UPB, H1), standard_infrule(X > C, H2), atom(C), ( inference_rule(_Name, C >= LWB, []) ; inference_rule(_Name, LWB <= C, []) ), int_or_enum_lit(LWB, enum), safe_infer_side_conditions([UPB >= X], H3), !, append(H1, H2, H12), merge_sort(H12, H3, Hs). try_new_deduction_strategies(pred(X) <= C, _T, Hs) :- \+ int_or_enum_lit(C, _), var_const(C, _T, c), standard_infrule(X <= C, H1), standard_infrule(X > D, H2), atom(D), ( inference_rule(_Name, D >= LWB, []) ; inference_rule(_Name, LWB <= D, []) ), int_or_enum_lit(LWB, enum), !, merge_sort(H1, H2, Hs). try_new_deduction_strategies(C >= pred(X), _T, Hs) :- \+ int_or_enum_lit(C, _), var_const(C, _T, c), standard_infrule(X <= C, H1), standard_infrule(X > D, H2), atom(D), ( inference_rule(_Name, D >= LWB, []) ; inference_rule(_Name, LWB <= D, []) ), int_or_enum_lit(LWB, enum), !, merge_sort(H1, H2, Hs). try_new_deduction_strategies(pred(X) >= LWB, _T, Hs) :- int_or_enum_lit(LWB, enum), standard_infrule(X > C, Hs), atom(C), ( inference_rule(_Name, C >= E, []) ; inference_rule(_Name, E <= C, []) ), int_or_enum_lit(E, enum), simplify(E >= LWB, true), !. try_new_deduction_strategies(LWB <= pred(X), _T, Hs) :- int_or_enum_lit(LWB, enum), standard_infrule(X > C, Hs), atom(C), ( inference_rule(_Name, C >= E, []) ; inference_rule(_Name, E <= C, []) ), int_or_enum_lit(E, enum), simplify(E >= LWB, true), !. try_new_deduction_strategies(pred(X) >= C, _T, Hs) :- \+ int_or_enum_lit(C, _), var_const(C, _T, c), standard_infrule(X > C, Hs), atom(C), ( inference_rule(_Name, C >= E, []) ; inference_rule(_Name, E <= C, []) ), int_or_enum_lit(E, enum), !. try_new_deduction_strategies(C <= pred(X), _T, Hs) :- \+ int_or_enum_lit(C, _), var_const(C, _T, c), standard_infrule(X > C, Hs), atom(C), ( inference_rule(_Name, C >= E, []) ; inference_rule(_Name, E <= C, []) ), int_or_enum_lit(E, enum), !. % Absolute value rules. % Abs(1): abs(X) >= 0 may_be_deduced. try_new_deduction_strategies(abs(_) >= 0, _, []). try_new_deduction_strategies(0 <= abs(_), _, []). % Abs(2): abs(X) >= N may_be_deduced_from [N < 0]. try_new_deduction_strategies(abs(_) >= N, T, Hs) :- safe_deduce(N < 0, T, HL), sort(HL, Hs). try_new_deduction_strategies(N <= abs(_), T, Hs) :- safe_deduce(N < 0, T, HL), sort(HL, Hs). % Abs(3): abs(X) <= N may_be_deduced_from [(1) 0 <= X, (2) X <= N. try_new_deduction_strategies(abs(X) <= N, T, Hs) :- i_am_using_rule(abs_3a), safe_deduce(0 <= X, T, H1), /* (1) */ safe_deduce(X <= N, T, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(N >= abs(X), T, Hs) :- i_am_using_rule(abs_3b), safe_deduce(0 <= X, T, H1), /* (1) */ safe_deduce(X <= N, T, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). %Abs(4): abs(X) <= N may_be_deduced_from [(1) A <= X, % (2) X <= B, % (3) A >= 0, % (4) B <= N]. try_new_deduction_strategies(abs(X) <= N, T, Hs) :- i_am_using_rule(abs_4a), infrule(A <= X, H1), /* (1): generates candidate A */ infrule(X <= B, H2), /* (2): generates candidate B */ safe_deduce(A >= 0, T, H3), /* (3) */ safe_deduce(B <= N, T, H4), /* (4) */ append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= abs(X), T, Hs) :- i_am_using_rule(abs_4b), infrule(A <= X, H1), /* (1): generates candidate A */ infrule(X <= B, H2), /* (2): generates candidate B */ safe_deduce(A >= 0, T, H3), /* (3) */ safe_deduce(B <= N, T, H4), /* (4) */ append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Abs(5): abs(X) <= N may_be_deduced_from [(1) A <= X, % (2) X <= B, % (3) B <= 0, % (4) -A <= N]. try_new_deduction_strategies(abs(X) <= N, T, Hs) :- i_am_using_rule(abs_5a), infrule(A <= X, H1), /* (1): generates candidate A */ infrule(X <= B, H2), /* (2): generates candidate B */ safe_deduce(B <= 0, T, H3), /* (3) */ simplify(-A, MinusA), safe_deduce(MinusA <= N, T, H4), /* (4) */ append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= abs(X), T, Hs) :- i_am_using_rule(abs_5b), infrule(A <= X, H1), /* (1): generates candidate A */ infrule(X <= B, H2), /* (2): generates candidate B */ safe_deduce(B <= 0, T, H3), /* (3) */ simplify(-A, MinusA), safe_deduce(MinusA <= N, T, H4), /* (4) */ append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). /* Abs(6): abs(X) <= N may_be_deduced_from [(1) A <= X, (2) X <= B, (3) A <= 0, (4) 0 <= B, (5) N >= -A, (6) N >= B]. */ try_new_deduction_strategies(abs(X) <= N, T, Hs) :- i_am_using_rule(abs_6a), infrule(A <= X, H1), /* (1): generates candidate A */ infrule(X <= B, H2), /* (2): generates candidate B */ safe_deduce(A <= 0, T, H3), /* (3) */ simplify(-A, MinusA), safe_deduce(MinusA <= N, T, H5), /* (5) */ safe_deduce(0 <= B, T, H4), /* (4) */ safe_deduce(B <= N, T, H6), /* (6) */ append(H5, H6, H5to6), append(H4, H5to6, H4to6), append(H3, H4to6, H3to6), append(H2, H3to6, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= abs(X), T, Hs) :- i_am_using_rule(abs_6b), infrule(A <= X, H1), /* (1): generates candidate A */ infrule(X <= B, H2), /* (2): generates candidate B */ safe_deduce(A <= 0, T, H3), /* (3) */ simplify(-A, MinusA), safe_deduce(MinusA <= N, T, H5), /* (5) */ safe_deduce(0 <= B, T, H4), /* (4) */ safe_deduce(B <= N, T, H6), /* (6) */ append(H5, H6, H5to6), append(H4, H5to6, H4to6), append(H3, H4to6, H3to6), append(H2, H3to6, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Abs(7): abs(X) <= N may_be_deduced_from [(1) N > 1, % (2) -N <= X, % (3) X < N]. try_new_deduction_strategies(abs(X) <= N, T, Hs) :- i_am_using_rule(abs_7a), ( safe_deduce(N > 1, T, H1) /* (1) */ ; safe_deduce(N >= 2, T, H1) ), simplify(-N, MinusN), safe_deduce(MinusN <= X, T, H2), /* (2) */ ( safe_deduce(X < N, T, H3) /* (3) */ ; T = integer, /* in which case X X<=N-1 */ simplify(N-1, Nminus1), safe_deduce(X <= Nminus1, T, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= abs(X), T, Hs) :- i_am_using_rule(abs_7b), ( safe_deduce(N > 1, T, H1) /* (1) */ ; safe_deduce(N >= 2, T, H1) ), simplify(-N, MinusN), safe_deduce(MinusN <= X, T, H2), /* (2) */ ( safe_deduce(X < N, T, H3) /* (3) */ ; T = integer, /* in which case X X<=N-1 */ simplify(N-1, Nminus1), safe_deduce(X <= Nminus1, T, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Abs(8): abs(X - Y) <= N may_be_deduced_from [(1) 0 <= X, % (2) X <= N, % (3) 0 <= Y, % (4) Y <= N]. */ try_new_deduction_strategies(abs(X-Y) <= N, T, Hs) :- i_am_using_rule(abs_8a), safe_deduce(0 <= X, T, H1), /* (1) */ safe_deduce(X <= N, T, H2), /* (2) */ safe_deduce(0 <= Y, T, H3), /* (3) */ safe_deduce(Y <= N, T, H4), /* (4) */ append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= abs(X-Y), T, Hs) :- i_am_using_rule(abs_8b), safe_deduce(0 <= X, T, H1), /* (1) */ safe_deduce(X <= N, T, H2), /* (2) */ safe_deduce(0 <= Y, T, H3), /* (3) */ safe_deduce(Y <= N, T, H4), /* (4) */ append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Abs(9): abs(X - Y) >= 1 may_be_deduced_from... {one of} [X > Y] % [X < Y] % [X <> Y]. {all for integers} */ try_new_deduction_strategies(abs(X-Y) >= 1, integer, Hs) :- i_am_using_rule(abs_9a), safe_deduce(X > Y, integer, Hs). try_new_deduction_strategies(1 <= abs(X-Y), integer, Hs) :- i_am_using_rule(abs_9b), safe_deduce(X > Y, integer, Hs). try_new_deduction_strategies(abs(X-Y) >= 1, integer, Hs) :- i_am_using_rule(abs_9c), safe_deduce(Y > X, integer, Hs). try_new_deduction_strategies(1 <= abs(X-Y), integer, Hs) :- i_am_using_rule(abs_9d), safe_deduce(Y > X, integer, Hs). try_new_deduction_strategies(abs(X-Y) >= 1, integer, Hs) :- i_am_using_rule(abs_9e), safe_deduce(X <> Y, integer, Hs). try_new_deduction_strategies(1 <= abs(X-Y), integer, Hs) :- i_am_using_rule(abs_9f), safe_deduce(X <> Y, integer, Hs). % Integer division rules %----------------------- % Div(1): (X+Y) div 2 < Y may_be_deduced_from [(1) 0 <= X, % (2) X < Y]. {for integers} try_new_deduction_strategies(XplusY div 2 < Y, integer, Hs) :- i_am_using_rule(div_1a), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(0 <= X, integer, H1), /* (1) */ ( safe_deduce(X < Y, integer, H2) /* (2) */ ; simplify(Y-1, Yminus1), safe_deduce(X <= Yminus1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(Y > XplusY div 2, integer, Hs) :- i_am_using_rule(div_1b), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(0 <= X, integer, H1), /* (1) */ ( safe_deduce(X < Y, integer, H2) /* (2) */ ; simplify(Y-1, Yminus1), safe_deduce(X <= Yminus1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(XplusY div 2 <= Y - 1, integer, Hs) :- i_am_using_rule(div_1c), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(0 <= X, integer, H1), /* (1) */ ( safe_deduce(X < Y, integer, H2) /* (2) */ ; simplify(Y-1, Yminus1), safe_deduce(X <= Yminus1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(Y - 1 >= XplusY div 2, integer, Hs) :- i_am_using_rule(div_1d), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(0 <= X, integer, H1), /* (1) */ ( safe_deduce(X < Y, integer, H2) /* (2) */ ; simplify(Y-1, Yminus1), safe_deduce(X <= Yminus1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). % Div(2): (X+Y) div 2 >= X may_be_deduced_from [(1) X <= Y]. {for integers} try_new_deduction_strategies(XplusY div 2 >= X, integer, Hs) :- i_am_using_rule(div_2a), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(X <= Y, integer, Hs). /* (1) */ try_new_deduction_strategies(X <= XplusY div 2, integer, Hs) :- i_am_using_rule(div_2b), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(X <= Y, integer, Hs). /* (1) */ % Div(3): (X+Y) div 2 <= Y may_be_deduced_from [(1) X <= Y]. {for integers} try_new_deduction_strategies(XplusY div 2 <= Y, integer, Hs) :- i_am_using_rule(div_3a), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(X <= Y, integer, Hs). /* (1) */ try_new_deduction_strategies(Y >= XplusY div 2, integer, Hs) :- i_am_using_rule(div_3b), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(X <= Y, integer, Hs). /* (1) */ % Div(4): (X+Y) div 2 >= N may_be_deduced_from [(1) X >= N, % (2) Y >= N]. try_new_deduction_strategies(XplusY div 2 >= N, integer, Hs) :- i_am_using_rule(div_4a), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(X >= N, integer, H1), /* (1) */ safe_deduce(Y >= N, integer, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(N <= XplusY div 2, integer, Hs) :- i_am_using_rule(div_4b), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(X >= N, integer, H1), /* (1) */ safe_deduce(Y >= N, integer, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). % Div(5): (X+Y) div 2 <= N may_be_deduced_from [(1) X <= N, % (2) Y <= N]. try_new_deduction_strategies(XplusY div 2 <= N, integer, Hs) :- i_am_using_rule(div_5a), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(X <= N, integer, H1), /* (1) */ safe_deduce(Y <= N, integer, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(N >= XplusY div 2, integer, Hs) :- i_am_using_rule(div_5b), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(X <= N, integer, H1), /* (1) */ safe_deduce(Y <= N, integer, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). % Div(6): X < (X+Y) div 2 may_be_deduced_from [(1) 0 <= X, % (2) X < Y, % (3) one of {X+1 <> Y | X+1 < Y | X+2 <= Y}]. {for integers} try_new_deduction_strategies(X < XplusY div 2, integer, Hs) :- i_am_using_rule(div_6a), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(0 <= X, integer, H1), /* (1) */ ( safe_deduce(X < Y, integer, H2) /* (2) */ ; simplify(Y-1, Yminus1), safe_deduce(X <= Yminus1, integer, H2) ), ( simplify(X+1, Xplus1), ( safe_deduce(Xplus1 <> Y, integer, H3) /* (3) */ ; safe_deduce(Xplus1 < Y, integer, H3) ) ; simplify(X+2, Xplus2), safe_deduce(Xplus2 <= Y, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(XplusY div 2 > X, integer, Hs) :- i_am_using_rule(div_6b), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(0 <= X, integer, H1), /* (1) */ ( safe_deduce(X < Y, integer, H2) /* (2) */ ; simplify(Y-1, Yminus1), safe_deduce(X <= Yminus1, integer, H2) ), ( simplify(X+1, Xplus1), ( safe_deduce(Xplus1 <> Y, integer, H3) /* (3) */ ; safe_deduce(Xplus1 < Y, integer, H3) ) ; simplify(X+2, Xplus2), safe_deduce(Xplus2 <= Y, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(X + 1 <= XplusY div 2, integer, Hs) :- i_am_using_rule(div_6c), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(0 <= X, integer, H1), /* (1) */ ( safe_deduce(X < Y, integer, H2) /* (2) */ ; simplify(Y-1, Yminus1), safe_deduce(X <= Yminus1, integer, H2) ), ( simplify(X+1, Xplus1), ( safe_deduce(Xplus1 <> Y, integer, H3) /* (3) */ ; safe_deduce(Xplus1 < Y, integer, H3) ) ; simplify(X+2, Xplus2), safe_deduce(Xplus2 <= Y, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(XplusY div 2 >= X + 1, integer, Hs) :- i_am_using_rule(div_6d), ( XplusY = X + Y ; XplusY = Y + X ), safe_deduce(0 <= X, integer, H1), /* (1) */ ( safe_deduce(X < Y, integer, H2) /* (2) */ ; simplify(Y-1, Yminus1), safe_deduce(X <= Yminus1, integer, H2) ), ( simplify(X+1, Xplus1), ( safe_deduce(Xplus1 <> Y, integer, H3) /* (3) */ ; safe_deduce(Xplus1 < Y, integer, H3) ) ; simplify(X+2, Xplus2), safe_deduce(Xplus2 <= Y, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(7): X div Y >= M may_be_deduced_from [(1) M <= 0, % (2) X >= 0, % (3) Y > 0]. try_new_deduction_strategies(X div Y >= M, integer, Hs) :- i_am_using_rule(div_7a), safe_deduce(M <= 0, integer, H1), /* (1) */ safe_deduce(X >= 0, integer, H2), /* (2) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(M <= X div Y, integer, Hs) :- i_am_using_rule(div_7b), safe_deduce(M <= 0, integer, H1), /* (1) */ safe_deduce(X >= 0, integer, H2), /* (2) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(8): X div Y >= M may_be_deduced_from [(1) M <= 0, % (2) M <= X, % (3) Y > 0]. try_new_deduction_strategies(X div Y >= M, integer, Hs) :- i_am_using_rule(div_8a), safe_deduce(M <= 0, integer, H1), /* (1) */ safe_deduce(M <= X, integer, H2), /* (2) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(M <= X div Y, integer, Hs) :- i_am_using_rule(div_8b), safe_deduce(M <= 0, integer, H1), /* (1) */ safe_deduce(M <= X, integer, H2), /* (2) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(9): X div Y <= N may_be_deduced_from [(1) X <= N, % (2) 0 <= N, % (3) Y > 0]. try_new_deduction_strategies(X div Y <= N, integer, Hs) :- i_am_using_rule(div_9a), safe_deduce(0 <= N, integer, H2), /* (2) */ safe_deduce(X <= N, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= X div Y, integer, Hs) :- i_am_using_rule(div_9b), safe_deduce(0 <= N, integer, H2), /* (2) */ safe_deduce(X <= N, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(10): X div Y <= N may_be_deduced_from [(1) X <= Y * N, % (2) Y > 0]. try_new_deduction_strategies(X div Y <= N, integer, Hs) :- i_am_using_rule(div_10a), simplify(Y*N, YtimesN), safe_deduce(X <= YtimesN, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(N >= X div Y, integer, Hs) :- i_am_using_rule(div_10b), simplify(Y*N, YtimesN), safe_deduce(X <= YtimesN, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). % Div(11): X div Y <= N may_be_deduced_from [(1) X <= Y * N + Y - 1, % (2) N >= 0, % (3) Y > 0]. try_new_deduction_strategies(X div Y <= N, integer, Hs) :- i_am_using_rule(div_11a), simplify(Y*(N+1)-1, Product), safe_deduce(X <= Product, integer, H1), /* (1) */ safe_deduce(N >= 0, integer, H2), /* (2) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= X div Y, integer, Hs) :- i_am_using_rule(div_11b), simplify(Y*(N+1)-1, Product), safe_deduce(X <= Product, integer, H1), /* (1) */ safe_deduce(N >= 0, integer, H2), /* (2) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(12): N <= X div Y may_be_deduced_from [(1) Y * N <= X, % (2) Y > 0]. try_new_deduction_strategies(N <= X div Y, integer, Hs) :- i_am_using_rule(div_12a), simplify(Y*N, YtimesN), safe_deduce(YtimesN <= X, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(X div Y >= N, integer, Hs) :- i_am_using_rule(div_12b), simplify(Y*N, YtimesN), safe_deduce(YtimesN <= X, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). % Div(13): X div Y * Y >= N may_be_deduced_from [(1) X >= 0, % (2) Y > 0, % (3) {N <= 0 | N <= X-Y+1} one of]. try_new_deduction_strategies(XdivYtimesY >= N, integer, Hs) :- i_am_using_rule(div_13a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), ( safe_deduce(N <= 0, integer, H3) /* (3) */ ; safe_deduce(N <= X-Y+1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N <= XdivYtimesY, integer, Hs) :- i_am_using_rule(div_13b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), ( safe_deduce(N <= 0, integer, H3) /* (3) */ ; safe_deduce(N <= X-Y+1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(14): X div Y * Y <= N may_be_deduced_from [(1) X >= 0, % (2) Y > 0, % (3) X <= N]. try_new_deduction_strategies(XdivYtimesY <= N, integer, Hs) :- i_am_using_rule(div_14a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(X <= N, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= XdivYtimesY, integer, Hs) :- i_am_using_rule(div_14b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(X <= N, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(15): X div Y * Y = 0 may_be_deduced_from [(1) X >= 0, % (2) Y > X]. try_new_deduction_strategies(XdivYtimesY = 0, integer, Hs) :- i_am_using_rule(div_15a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > X, integer, H2) /* (2) */ ; safe_deduce(Y >= X + 1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(0 = XdivYtimesY, integer, Hs) :- i_am_using_rule(div_15b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > X, integer, H2) /* (2) */ ; safe_deduce(Y >= X + 1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(X - XdivYtimesY = X, integer, Hs) :- i_am_using_rule(div_15c), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > X, integer, H2) /* (2) */ ; safe_deduce(Y >= X + 1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(X = X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_15d), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > X, integer, H2) /* (2) */ ; safe_deduce(Y >= X + 1, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). % Div(16): X div Y * Y >= N may_be_deduced_from [(1) X <= 0, % (2) Y > 0, % (3) X >= N]. try_new_deduction_strategies(XdivYtimesY >= N, integer, Hs) :- i_am_using_rule(div_16a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(X >= N, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N <= XdivYtimesY, integer, Hs) :- i_am_using_rule(div_16b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(X >= N, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(17): X div Y * Y <= N may_be_deduced_from [(1) X <= 0, % (2) Y > 0, % (3) X + Y - 1 <= N]. try_new_deduction_strategies(X div Y * Y <= N, integer, Hs) :- i_am_using_rule(div_17a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(X + Y - 1 <= N, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= XdivYtimesY, integer, Hs) :- i_am_using_rule(div_17b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(X + Y - 1 <= N, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(18): X div Y * Y = 0 may_be_deduced_from [(1) X <= 0, % (2) Y > -X]. try_new_deduction_strategies(XdivYtimesY = 0, integer, Hs) :- i_am_using_rule(div_18a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > -X, integer, H2) /* (2) */ ; safe_deduce(Y - 1 >= -X, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(0 = XdivYtimesY, integer, Hs) :- i_am_using_rule(div_18b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > -X, integer, H2) /* (2) */ ; safe_deduce(Y - 1 >= -X, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(X - XdivYtimesY = X, integer, Hs) :- i_am_using_rule(div_18c), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > -X, integer, H2) /* (2) */ ; safe_deduce(Y - 1 >= -X, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(X = X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_18d), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > -X, integer, H2) /* (2) */ ; safe_deduce(Y - 1 >= -X, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). % Div(19): X div Y * Y >= -N may_be_deduced_from [(1) N > 1, % (2) -N <= X, % (3) X <= N-1, % (4) Y >= 1]. try_new_deduction_strategies(XdivYtimesY >= MinusN, integer, Hs) :- i_am_using_rule(div_19a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(MinusN <= X, integer, H2), /* (2) */ simplify(-MinusN, N), ( safe_deduce(N > 1, integer, H1) /* (1) */ ; safe_deduce(N >= 2, integer, H1) ), simplify(N-1, Nminus1), safe_deduce(X <= Nminus1, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(MinusN <= XdivYtimesY, integer, Hs) :- i_am_using_rule(div_19b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(MinusN <= X, integer, H2), /* (2) */ simplify(-MinusN, N), ( safe_deduce(N > 1, integer, H1) /* (1) */ ; safe_deduce(N >= 2, integer, H1) ), simplify(N-1, Nminus1), safe_deduce(X <= Nminus1, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(20): X div Y * Y <= M may_be_deduced_from [(1) M > 0, % (2) -(M+1) <= X, % (3) X <= M, % (4) Y >= 1]. try_new_deduction_strategies(XdivYtimesY <= M, integer, Hs) :- i_am_using_rule(div_20a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), ( safe_deduce(M > 0, integer, H1) /* (1) */ ; safe_deduce(M >= 1, integer, H1) ), simplify(-(M+1), MinusMplus1), safe_deduce(MinusMplus1 <= X, integer, H2), /* (2) */ safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(M >= XdivYtimesY, integer, Hs) :- i_am_using_rule(div_20b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), ( safe_deduce(M > 0, integer, H1) /* (1) */ ; safe_deduce(M >= 1, integer, H1) ), simplify(-(M+1), MinusMplus1), safe_deduce(MinusMplus1 <= X, integer, H2), /* (2) */ safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(21): X div Y * Y = 0 may_be_deduced_from [(1) M > 0, % (2) -(M+1) <= X, % (3) X <= M, % (4) Y >= M+2 ]. try_new_deduction_strategies(XdivYtimesY = 0, integer, Hs) :- i_am_using_rule(div_21a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), infrule(X <= M, H3), /* (3): generates candidate M */ ( int(M), H1 = [], M > 0 /* (1) */ ; safe_deduce(M > 0, integer, H1) ; safe_deduce(M >= 1, integer, H1) ), simplify(-(M+1), MinusMPlus1), safe_deduce(MinusMPlus1 <= X, integer, H2), /* (2) */ ( simplify(M+2, MPlusI), safe_deduce(Y >= MPlusI, integer, H4) /* (4) */ ; simplify(M+1, MPlusI), safe_deduce(Y > MPlusI, integer, H4) ), append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(0 = XdivYtimesY, integer, Hs) :- i_am_using_rule(div_21b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), infrule(X <= M, H3), /* (3): generates candidate M */ ( int(M), H1 = [], M > 0 /* (1) */ ; safe_deduce(M > 0, integer, H1) ; safe_deduce(M >= 1, integer, H1) ), simplify(-(M+1), MinusMPlus1), safe_deduce(MinusMPlus1 <= X, integer, H2), /* (2) */ ( simplify(M+2, MPlusI), safe_deduce(Y >= MPlusI, integer, H4) /* (4) */ ; simplify(M+1, MPlusI), safe_deduce(Y > MPlusI, integer, H4) ), append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(X - XdivYtimesY = X, integer, Hs) :- i_am_using_rule(div_21c), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), infrule(X <= M, H3), /* (3): generates candidate M */ ( int(M), H1 = [], M > 0 /* (1) */ ; safe_deduce(M > 0, integer, H1) ; safe_deduce(M >= 1, integer, H1) ), simplify(-(M+1), MinusMPlus1), safe_deduce(MinusMPlus1 <= X, integer, H2), /* (2) */ ( simplify(M+2, MPlusI), safe_deduce(Y >= MPlusI, integer, H4) /* (4) */ ; simplify(M+1, MPlusI), safe_deduce(Y > MPlusI, integer, H4) ), append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(X = X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_21d), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), infrule(X <= M, H3), /* (3): generates candidate M */ ( int(M), H1 = [], M > 0 /* (1) */ ; safe_deduce(M > 0, integer, H1) ; safe_deduce(M >= 1, integer, H1) ), simplify(-(M+1), MinusMPlus1), safe_deduce(MinusMPlus1 <= X, integer, H2), /* (2) */ ( simplify(M+2, MPlusI), safe_deduce(Y >= MPlusI, integer, H4) /* (4) */ ; simplify(M+1, MPlusI), safe_deduce(Y > MPlusI, integer, H4) ), append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(22): X - X div Y * Y <= N may_be_deduced_from [(1) X >= 0, % (2) Y > 0, % (3) {X <= N | Y - 1 <= N} one of]. try_new_deduction_strategies(X - XdivYtimesY <= N, integer, Hs) :- i_am_using_rule(div_22a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), ( safe_deduce(X <= N, integer, H3) /* (3) */ ; safe_deduce(Y - 1 <= N, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_22b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), ( safe_deduce(X <= N, integer, H3) /* (3) */ ; safe_deduce(Y - 1 <= N, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(23): X - X div Y * Y >= N may_be_deduced_from [(1) N <= 0, % (2) 0 <= X, % (3) Y > 0]. try_new_deduction_strategies(X - XdivYtimesY >= N, integer, Hs) :- i_am_using_rule(div_23a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(0 <= X, integer, H2), /* (2) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), safe_deduce(N <= 0, integer, H1), /* (1) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N <= X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_23b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(0 <= X, integer, H2), /* (2) */ ( safe_deduce(Y > 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ), safe_deduce(N <= 0, integer, H1), /* (1) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(24): X - X div Y * Y <= N may_be_deduced_from [(1) X <= 0, % (2) Y > 0, % (3) 0 <= N]. try_new_deduction_strategies(X - XdivYtimesY <= N, integer, Hs) :- i_am_using_rule(div_24a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(0 <= N, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_24b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(0 <= N, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(25): X - X div Y * Y >= N may_be_deduced_from [(1) X <= 0, % (2) Y > 0, % (3) N <= 1 - Y]. try_new_deduction_strategies(X - XdivYtimesY >= N, integer, Hs) :- i_am_using_rule(div_25a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(N <= 1 - Y, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N <= X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_25b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X <= 0, integer, H1), /* (1) */ ( safe_deduce(Y > 0, integer, H2) /* (2) */ ; safe_deduce(Y >= 1, integer, H2) ), safe_deduce(N <= 1 - Y, integer, H3), /* (3) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(26): X - X div Y * Y >= -N may_be_deduced_from [(1) N > 1, % (2) X >= 0, % (3) X <= N-1, % (4) Y >= 1 ]. try_new_deduction_strategies(X - XdivYtimesY >= MinusN, integer, Hs) :- i_am_using_rule(div_26a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H2), /* (2) */ simplify(-MinusN, N), ( safe_deduce(N > 1, integer, H1) /* (1) */ ; safe_deduce(N >= 2, integer, H1) ), simplify(N-1, Nminus1), safe_deduce(X <= Nminus1, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(MinusN <= X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_26b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H2), /* (2) */ simplify(-MinusN, N), ( safe_deduce(N > 1, integer, H1) /* (1) */ ; safe_deduce(N >= 2, integer, H1) ), simplify(N-1, Nminus1), safe_deduce(X <= Nminus1, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(27): X - X div Y * Y >= -N may_be_deduced_from [(1) N > 1, % (2) -N <= X, % (3) X <= N-1, % (4) Y >= 1, % (5) Y <= N+1 ]. try_new_deduction_strategies(X - XdivYtimesY >= MinusN, integer, Hs) :- i_am_using_rule(div_27a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(MinusN <= X, integer, H2), /* (2) */ simplify(-MinusN, N), ( safe_deduce(N > 1, integer, H1) /* (1) */ ; safe_deduce(N >= 2, integer, H1) ), simplify(N-1, Nminus1), safe_deduce(X <= Nminus1, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), ( simplify(N+1, Nplus1), safe_deduce(Y <= Nplus1, integer, H5) /* (5) */ ; simplify(N+2, Nplus2), safe_deduce(Y < Nplus2, integer, H5) ), append(H4, H5, H4to5), append(H3, H4to5, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(MinusN =< X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_27b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(MinusN <= X, integer, H2), /* (2) */ simplify(-MinusN, N), ( safe_deduce(N > 1, integer, H1) /* (1) */ ; safe_deduce(N >= 2, integer, H1) ), simplify(N-1, Nminus1), safe_deduce(X <= Nminus1, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), ( simplify(N+1, Nplus1), safe_deduce(Y <= Nplus1, integer, H5) /* (5) */ ; simplify(N+2, Nplus2), safe_deduce(Y < Nplus2, integer, H5) ), append(H4, H5, H4to5), append(H3, H4to5, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(28): X - X div Y * Y <= M may_be_deduced_from [(1) M > 0, % (2) X >= 0, % (3) X <= M, % (4) Y >= 1]. try_new_deduction_strategies(X - XdivYtimesY <= M, integer, Hs) :- i_am_using_rule(div_28a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H2), /* (2) */ ( safe_deduce(M > 0, integer, H1) /* (1) */ ; safe_deduce(M >= 1, integer, H1) ), safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(M >= X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_28b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), safe_deduce(X >= 0, integer, H2), /* (2) */ ( safe_deduce(M > 0, integer, H1) /* (1) */ ; safe_deduce(M >= 1, integer, H1) ), safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), append(H3, H4, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(29): X - X div Y * Y <= M may_be_deduced_from [(1) M > 0, % (2) -(M+1) <= X, % (3) X <= M, % (4) Y >= 1, % (5) Y <= M+2]. try_new_deduction_strategies(X - XdivYtimesY <= M, integer, Hs) :- i_am_using_rule(div_29a), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), simplify(-(M+1), MinusMplus1), safe_deduce(MinusMplus1 <= X, integer, H2), /* (2) */ ( safe_deduce(M > 0, integer, H1) /* (1) */ ; safe_deduce(M >= 1, integer, H1) ), safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), ( simplify(M+2, Mplus2), safe_deduce(Y <= Mplus2, integer, H5) /* (5) */ ; simplify(M+3, Mplus3), safe_deduce(Y < Mplus3, integer, H5) ), append(H4, H5, H4to5), append(H3, H4to5, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(M >= X - XdivYtimesY, integer, Hs) :- i_am_using_rule(div_29b), ( XdivYtimesY = X div Y * Y ; XdivYtimesY = Y * (X div Y) ), simplify(-(M+1), MinusMplus1), safe_deduce(MinusMplus1 <= X, integer, H2), /* (2) */ ( safe_deduce(M > 0, integer, H1) /* (1) */ ; safe_deduce(M >= 1, integer, H1) ), safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y > 0, integer, H4) /* (4) */ ; safe_deduce(Y >= 1, integer, H4) ), ( simplify(M+2, Mplus2), safe_deduce(Y <= Mplus2, integer, H5) /* (5) */ ; simplify(M+3, Mplus3), safe_deduce(Y < Mplus3, integer, H5) ), append(H4, H5, H4to5), append(H3, H4to5, Htail), append(H2, Htail, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(30): X <= Y may_be_deduced_from [(1) N > 0, % (2) M div N <= Y, % (3) X * N <= M]. try_new_deduction_strategies(X <= Y, integer, Hs) :- i_am_using_rule(div_30a), infrule(X * N <= M, H3), /* (3): generates candidate M, N */ ( safe_deduce(N > 0, integer, H1) /* (1) */ ; safe_deduce(N >= 1, integer, H1) ), safe_deduce(M div N <= Y, integer, H2), /* (2) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(Y >= X, integer, Hs) :- i_am_using_rule(div_30b), infrule(X * N <= M, H3), /* (3): generates candidate M, N */ ( safe_deduce(N > 0, integer, H1) /* (1) */ ; safe_deduce(N >= 1, integer, H1) ), safe_deduce(M div N <= Y, integer, H2), /* (2) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(31): X >= N may_be_deduced_from [(1) N <> 0, % (2) X >= 0, % (3) X div N <> 0]. try_new_deduction_strategies(X >= N, integer, Hs) :- i_am_using_rule(div_31a), safe_deduce(N <> 0, integer, H1), /* (1) */ safe_deduce(X div N <> 0, integer, H3), /* (3) */ safe_deduce(X >= 0, integer, H2), /* (2) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N <= X, integer, Hs) :- i_am_using_rule(div_31b), safe_deduce(N <> 0, integer, H1), /* (1) */ safe_deduce(X div N <> 0, integer, H3), /* (3) */ safe_deduce(X >= 0, integer, H2), /* (2) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Div(32): X div Y >= -(M+1) may_be_deduced_from [(1) M > 0, % (2) -(M+1) <= X, % (3) X <= M, % (4) Y <> 0]. try_new_deduction_strategies(X div Y >= MinusMplus1, integer, Hs) :- i_am_using_rule(div_32a), int(MinusMplus1), simplify(-MinusMplus1-1, M), simplify(M > 0, true), /* (1) */ safe_deduce(MinusMplus1 <= X, integer, H2), /* (2) */ safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y <> 0, integer, H4) /* (4) */ ; ( safe_deduce(Y > 0, integer, H4) ; safe_deduce(Y >= 1, integer, H4) ; safe_deduce(Y < 0, integer, H4) ; safe_deduce(Y <= - 1, integer, H4) ) ), append(H3, H4, Hrest), append(H2, Hrest, HL), /* N.B. There is no H1 */ sort(HL, Hs). try_new_deduction_strategies(MinusMplus1 <= X div Y, integer, Hs) :- i_am_using_rule(div_32b), int(MinusMplus1), simplify(-MinusMplus1-1, M), simplify(M > 0, true), /* (1) */ safe_deduce(MinusMplus1 <= X, integer, H2), /* (2) */ safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y <> 0, integer, H4) /* (4) */ ; ( safe_deduce(Y > 0, integer, H4) ; safe_deduce(Y >= 1, integer, H4) ; safe_deduce(Y < 0, integer, H4) ; safe_deduce(Y <= - 1, integer, H4) ) ), append(H3, H4, Hrest), append(H2, Hrest, HL), /* N.B. There is no H1 */ sort(HL, Hs). % Div(33): X div Y >= -(M+1) may_be_deduced_from [(1) M > 0, % (2) -M <= X, % (3) X <= M, % (4) Y <> 0]. try_new_deduction_strategies(X div Y >= MinusMplus1, integer, Hs) :- i_am_using_rule(div_33a), int(MinusMplus1), simplify(-MinusMplus1-1, M), simplify(M > 0, true), /* (1) */ simplify(-M, MinusM), safe_deduce(MinusM <= X, integer, H2), /* (2) */ safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y <> 0, integer, H4) /* (4) */ ; ( safe_deduce(Y > 0, integer, H4) ; safe_deduce(Y >= 1, integer, H4) ; safe_deduce(Y < 0, integer, H4) ; safe_deduce(Y <= - 1, integer, H4) ) ), append(H3, H4, Hrest), append(H2, Hrest, HL), /* N.B. There is no H1 */ sort(HL, Hs). try_new_deduction_strategies(MinusMplus1 <= X div Y, integer, Hs) :- i_am_using_rule(div_33b), int(MinusMplus1), simplify(-MinusMplus1-1, M), simplify(M > 0, true), /* (1) */ simplify(-M, MinusM), safe_deduce(MinusM <= X, integer, H2), /* (2) */ safe_deduce(X <= M, integer, H3), /* (3) */ ( safe_deduce(Y <> 0, integer, H4) /* (4) */ ; ( safe_deduce(Y > 0, integer, H4) ; safe_deduce(Y >= 1, integer, H4) ; safe_deduce(Y < 0, integer, H4) ; safe_deduce(Y <= - 1, integer, H4) ) ), append(H3, H4, Hrest), append(H2, Hrest, HL), /* N.B. There is no H1 */ sort(HL, Hs). % Div(34): X div Y <= M may_be_deduced_from [(1) M > 0, % (2) -(M+1) <= X, % (3) X <= M, % (4) Y <> 0, % (5) Y <> -1]. try_new_deduction_strategies(X div Y <= M, integer, Hs) :- i_am_using_rule(div_34a), int(M), simplify(M > 0, true), /* (1) */ safe_deduce(X <= M, integer, H3), /* (3) */ simplify(-(M+1), MinusMplus1), safe_deduce(MinusMplus1 <= X, integer, H2), /* (2) */ ( safe_deduce(Y <> 0, integer, H4), /* (4) */ safe_deduce(Y <> - 1, integer, H5) /* (5) */ ; ( safe_deduce(Y > 0, integer, H4) ; safe_deduce(Y >= 1, integer, H4) ; safe_deduce(Y < - 1, integer, H4) ; safe_deduce(Y <= - 2, integer, H4) ), H5 = [] ), append(H4, H5, H4to5), append(H3, H4to5, Hrest), append(H2, Hrest, HL), /* N.B. There is no H1 */ sort(HL, Hs). try_new_deduction_strategies(M >= X div Y, integer, Hs) :- i_am_using_rule(div_34b), int(M), simplify(M > 0, true), /* (1) */ safe_deduce(X <= M, integer, H3), /* (3) */ simplify(-(M+1), MinusMplus1), safe_deduce(MinusMplus1 <= X, integer, H2), /* (2) */ ( safe_deduce(Y <> 0, integer, H4), /* (4) */ safe_deduce(Y <> - 1, integer, H5) /* (5) */ ; ( safe_deduce(Y > 0, integer, H4) ; safe_deduce(Y >= 1, integer, H4) ; safe_deduce(Y < - 1, integer, H4) ; safe_deduce(Y <= - 2, integer, H4) ), H5 = [] ), append(H4, H5, H4to5), append(H3, H4to5, Hrest), append(H2, Hrest, HL), /* N.B. There is no H1 */ sort(HL, Hs). % Div(35): X div Y <= M may_be_deduced_from [(1) M > 0, % (2) -M <= X, % (3) X <= M, % (4) Y <> 0]. try_new_deduction_strategies(X div Y <= M, integer, Hs) :- i_am_using_rule(div_35a), int(M), simplify(M > 0, true), /* (1) */ safe_deduce(X <= M, integer, H3), /* (3) */ simplify(-M, MinusM), safe_deduce(MinusM <= X, integer, H2), /* (2) */ ( safe_deduce(Y <> 0, integer, H4) /* (4) */ ; safe_deduce(Y > 0, integer, H4) ; safe_deduce(Y >= 1, integer, H4) ; safe_deduce(Y < 0, integer, H4) ; safe_deduce(Y <= - 1, integer, H4) ), append(H3, H4, Hrest), append(H2, Hrest, HL), /* N.B. There is no H1 */ sort(HL, Hs). try_new_deduction_strategies(M >= X div Y, integer, Hs) :- i_am_using_rule(div_35b), int(M), simplify(M > 0, true), /* (1) */ safe_deduce(X <= M, integer, H3), /* (3) */ simplify(-M, MinusM), safe_deduce(MinusM <= X, integer, H2), /* (2) */ ( safe_deduce(Y <> 0, integer, H4) /* (4) */ ; safe_deduce(Y > 0, integer, H4) ; safe_deduce(Y >= 1, integer, H4) ; safe_deduce(Y < 0, integer, H4) ; safe_deduce(Y <= - 1, integer, H4) ), append(H3, H4, Hrest), append(H2, Hrest, HL), /* N.B. There is no H1 */ sort(HL, Hs). % Extra modulus rules %-------------------- % Mod(1): X > 0 may_be_deduced_from [(1) X >= 0, % (2) N > 0, % (3) X mod N <> 0]. try_new_deduction_strategies(X > 0, integer, Hs) :- i_am_using_rule(mod_1a), get_provenance_framework(spark), infrule(X mod N <> 0, H3), /* (3): generates candidate N */ safe_deduce(N > 0, integer, H2), /* (2) */ safe_deduce(X >= 0, integer, H1), /* (1) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(0 < X, integer, Hs) :- i_am_using_rule(mod_1b), get_provenance_framework(spark), infrule(X mod N <> 0, H3), /* (3): generates candidate N */ safe_deduce(N > 0, integer, H2), /* (2) */ safe_deduce(X >= 0, integer, H1), /* (1) */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Mod(2): X >= X mod N may_be_deduced_from [(1) X >= 0, % (2) N > 0]. try_new_deduction_strategies(X >= X mod N, integer, Hs) :- i_am_using_rule(mod_2a), get_provenance_framework(spark), safe_deduce(X >= 0, integer, H1), /* (1) */ safe_deduce(N > 0, integer, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(X mod N <= X, integer, Hs) :- i_am_using_rule(mod_2b), get_provenance_framework(spark), safe_deduce(X >= 0, integer, H1), /* (1) */ safe_deduce(N > 0, integer, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). % Mod(3): X div M mod N <= K may_be_deduced_from [(1) 0 <= X, % (2) X <= U, % (3) 0 < M, % (4) 0 < N, % (5) U div M = L, % (6) L < N {so L mod N = L}, % (7) L <= K]. try_new_deduction_strategies(X div M mod N <= K, integer, Hs) :- i_am_using_rule(mod_3a), get_provenance_framework(spark), safe_deduce(0 <= X, integer, H1), /* (1) */ find_upper_numeric_limit_for(X, U, integer, H2), /* (2) */ simplify(U>=0, true), /* sanity check */ safe_deduce(0 < M, integer, H3), /* (3) */ safe_deduce(0 < N, integer, H4), /* (4) */ simplify(U div M, L), /* (5) */ safe_deduce(L < N, integer, H6), /* (6) */ safe_deduce(L <= K, integer, H7), /* (7) */ append(H6, H7, H67), append(H4, H67, H467), /* N.B. no H5 */ append(H3, H467, H3467), append(H2, H3467, H23467), append(H1, H23467, HL), sort(HL, Hs). try_new_deduction_strategies(K >= X div M mod N, integer, Hs) :- i_am_using_rule(mod_3b), get_provenance_framework(spark), safe_deduce(0 <= X, integer, H1), /* (1) */ find_upper_numeric_limit_for(X, U, integer, H2), /* (2) */ simplify(U>=0, true), /* sanity check */ safe_deduce(0 < M, integer, H3), /* (3) */ safe_deduce(0 < N, integer, H4), /* (4) */ simplify(U div M, L), /* (5) */ safe_deduce(L < N, integer, H6), /* (6) */ safe_deduce(L <= K, integer, H7), /* (7) */ append(H6, H7, H67), append(H4, H67, H467), /* N.B. no H5 */ append(H3, H467, H3467), append(H2, H3467, H23467), append(H1, H23467, HL), sort(HL, Hs). % Mod(4): X mod N <= K may_be_deduced_from [(1) 0 <= X, % (2) X <= K, % (3) K <= N, % (4) 0 < N]. try_new_deduction_strategies(X mod N <= K, integer, Hs) :- i_am_using_rule(mod_4a), get_provenance_framework(spark), safe_deduce(N > 0, integer, H4), /* (4) */ safe_deduce(0 <= X, integer, H1), /* (1) */ safe_deduce(X <= K, integer, H2), /* (2) */ safe_deduce(K <= N, integer, H3), /* (3) */ append(H3, H4, H34), append(H2, H34, H234), append(H1, H234, HL), sort(HL, Hs). try_new_deduction_strategies(K >= X mod N, integer, Hs) :- i_am_using_rule(mod_4b), get_provenance_framework(spark), safe_deduce(N > 0, integer, H4), /* (4) */ safe_deduce(0 <= X, integer, H1), /* (1) */ safe_deduce(X <= K, integer, H2), /* (2) */ safe_deduce(K <= N, integer, H3), /* (3) */ append(H3, H4, H34), append(H2, H34, H234), append(H1, H234, HL), sort(HL, Hs). % Mod(5): Y + (X mod N) <= K may_be_deduced_from [(1) 0 <= X, % (2) X <= A, % (3) A <= N, % (4) 0 < N, % (5) Y <= B, % (6) A+B <= K]. */ try_new_deduction_strategies(Y + (X mod N) <= K, integer, Hs) :- i_am_using_rule(mod_5a), get_provenance_framework(spark), safe_deduce(N > 0, integer, H4), /* (4) */ safe_deduce(0 <= X, integer, H1), /* (1) */ infrule(X <= A, H2), /* (2) */ safe_deduce(A <= N, integer, H3), /* (3) */ infrule(Y <= B, H5), /* (5) */ safe_deduce(A + B <= K, integer, H6), /* (6) */ append(H5, H6, H56), append(H4, H56, H456), append(H3, H456, H3456), append(H2, H3456, H23456), append(H1, H23456, HL), sort(HL, Hs). try_new_deduction_strategies((X mod N) + Y <= K, integer, Hs) :- i_am_using_rule(mod_5b), get_provenance_framework(spark), safe_deduce(N > 0, integer, H4), /* (4) */ safe_deduce(0 <= X, integer, H1), /* (1) */ infrule(X <= A, H2), /* (2) */ safe_deduce(A <= N, integer, H3), /* (3) */ infrule(Y <= B, H5), /* (5) */ safe_deduce(A + B <= K, integer, H6), /* (6) */ append(H5, H6, H56), append(H4, H56, H456), append(H3, H456, H3456), append(H2, H3456, H23456), append(H1, H23456, HL), sort(HL, Hs). try_new_deduction_strategies(K >= Y + (X mod N), integer, Hs) :- i_am_using_rule(mod_5c), get_provenance_framework(spark), safe_deduce(N > 0, integer, H4), /* (4) */ safe_deduce(0 <= X, integer, H1), /* (1) */ infrule(X <= A, H2), /* (2) */ safe_deduce(A <= N, integer, H3), /* (3) */ infrule(Y <= B, H5), /* (5) */ safe_deduce(A + B <= K, integer, H6), /* (6) */ append(H5, H6, H56), append(H4, H56, H456), append(H3, H456, H3456), append(H2, H3456, H23456), append(H1, H23456, HL), sort(HL, Hs). try_new_deduction_strategies(K >= (X mod N) + Y, integer, Hs) :- i_am_using_rule(mod_5d), get_provenance_framework(spark), safe_deduce(N > 0, integer, H4), /* (4) */ safe_deduce(0 <= X, integer, H1), /* (1) */ infrule(X <= A, H2), /* (2) */ infrule(Y <= B, H5), /* (5) */ safe_deduce(A <= N, integer, H3), /* (3) */ safe_deduce(A + B <= K, integer, H6), /* (6) */ append(H5, H6, H56), append(H4, H56, H456), append(H3, H456, H3456), append(H2, H3456, H23456), append(H1, H23456, HL), sort(HL, Hs). % Mod(6): Y - X mod N <= K may_be_deduced_from [(1) X >= A, % (2) A >= 0, % (3) X <= N-1, % (4) 0 < N, % (5) Y <= B, % (6) B-A <= K]. try_new_deduction_strategies(Y - X mod N <= K, integer, Hs) :- i_am_using_rule(mod_6a), get_provenance_framework(spark), safe_deduce(N > 0, integer, H4), /* (4) */ infrule(X >= A, H1), /* (1) */ safe_deduce(A >= 0, integer, H2), /* (2) */ infrule(Y <= B, H5), /* (5) */ safe_deduce(B - A <= K, integer, H6), /* (6) */ ( safe_deduce(X <= N-1, integer, H3) /* (3) */ ; safe_deduce(X < N, integer, H3) ), append(H5, H6, H56), append(H4, H56, H456), append(H3, H456, H3456), append(H2, H3456, H23456), append(H1, H23456, HL), sort(HL, Hs). try_new_deduction_strategies(K >= Y - X mod N, integer, Hs) :- i_am_using_rule(mod_6b), get_provenance_framework(spark), safe_deduce(N > 0, integer, H4), /* (4) */ infrule(X >= A, H1), /* (1) */ safe_deduce(A >= 0, integer, H2), /* (2) */ infrule(Y <= B, H5), /* (5) */ safe_deduce(B - A <= K, integer, H6), /* (6) */ ( safe_deduce(X <= N-1, integer, H3) /* (3) */ ; safe_deduce(X < N, integer, H3) ), append(H5, H6, H56), append(H4, H56, H456), append(H3, H456, H3456), append(H2, H3456, H23456), append(H1, H23456, HL), sort(HL, Hs). % Exponentiation rules %--------------------- % Pow(1): X ** Y >= 0 may_be_deduced_from [(1) X >= 0, (2) Y >= 0]. try_new_deduction_strategies(X ** Y >= 0, T, Hs) :- i_am_using_rule(pow_1a), get_provenance_framework(spark), safe_deduce(Y >= 0, integer, H2), /* (2) */ safe_deduce(X >= 0, T, H1), /* (1) */ append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(0 <= X ** Y, T, Hs) :- i_am_using_rule(pow_1b), get_provenance_framework(spark), safe_deduce(Y >= 0, integer, H2), /* (2) */ safe_deduce(X >= 0, T, H1), /* (1) */ append(H1, H2, HL), sort(HL, Hs). % Pow(2): X ** Y <= N may_be_deduced_from [(1) 0 <= X, % (2) X <= A, % (3) Y >= 1, % (4) Y <= B, % (5) A ** B <= N]. try_new_deduction_strategies(X ** Y <= N, T, Hs) :- i_am_using_rule(pow_2a), get_provenance_framework(spark), safe_deduce(Y >= 1, integer, H3), /* (3) */ safe_deduce(X >= 0, integer, H1), /* (1) */ infrule(X <= A, H2), /* (2): generates candidate A */ infrule(Y <= B, H4), /* (4): generates candidate B */ simplify(A ** B, AtotheB), safe_deduce(AtotheB <= N, T, H5), /* (5) */ append(H4, H5, H4to5), append(H3, H4to5, H3to5), append(H2, H3to5, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= X ** Y, T, Hs) :- i_am_using_rule(pow_2b), get_provenance_framework(spark), safe_deduce(Y >= 1, integer, H3), /* (3) */ safe_deduce(X >= 0, integer, H1), /* (1) */ infrule(X <= A, H2), /* (2): generates candidate A */ infrule(Y <= B, H4), /* (4): generates candidate B */ simplify(A ** B, AtotheB), safe_deduce(AtotheB <= N, T, H5), /* (5) */ append(H4, H5, H4to5), append(H3, H4to5, H3to5), append(H2, H3to5, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Pow(3): X ** N = X ** (N-1) * X may_be_deduced_from [(1) N > 0]. try_new_deduction_strategies(X ** N = X ** Nminus1 * X, _, Hs) :- i_am_using_rule(pow_3a), get_provenance_framework(spark), ( safe_deduce(N > 0, integer, H1) /* (1) */ ; safe_deduce(N >= 1, integer, H1) ), simplify(Nminus1 = N-1, PowerEquality), safe_deduce(PowerEquality, integer, H2), append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(X ** Nminus1 * X = X ** N, _, Hs) :- i_am_using_rule(pow_3b), get_provenance_framework(spark), ( safe_deduce(N > 0, integer, H1) /* (1) */ ; safe_deduce(N >= 1, integer, H1) ), simplify(Nminus1 = N-1, PowerEquality), safe_deduce(PowerEquality, integer, H2), append(H1, H2, HL), sort(HL, Hs). % Pow(4): X ** Y * Z <= N may_be_deduced_from [(1) Z >= 0, % (2) 0 <= X, % (3) X <= A, % (4) Y >= 1, % (5) Y <= B, % (6) A ** B * Z <= N]. try_new_deduction_strategies(XtotheYtimesZ <= N, T, Hs) :- i_am_using_rule(pow_4a), get_provenance_framework(spark), ( XtotheYtimesZ = (X ** Y) * Z ; XtotheYtimesZ = Z * (X ** Y) ), safe_deduce(Z >= 0, integer, H1), /* (1) */ safe_deduce(Y >= 1, integer, H4), /* (4) */ safe_deduce(X >= 0, integer, H2), /* (2) */ ( int(X), A=X, H3=[] /* (3): generates candidate A */ ; infrule(X <= A, H3) ), ( int(Y), B=Y, H5=[] /* (5): generates candidate B */ ; infrule(Y <= B, H5) ), simplify(A ** B, AtotheB), simplify(AtotheB * Z, AtotheBtimesZ), safe_deduce(AtotheBtimesZ <= N, T, H6), /* (6) */ append(H5, H6, H5to6), append(H4, H5to6, H4to6), append(H3, H4to6, H3to6), append(H2, H3to6, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= XtotheYtimesZ, T, Hs) :- i_am_using_rule(pow_4b), get_provenance_framework(spark), ( XtotheYtimesZ = (X ** Y) * Z ; XtotheYtimesZ = Z * (X ** Y) ), safe_deduce(Z >= 0, integer, H1), /* (1) */ safe_deduce(Y >= 1, integer, H4), /* (4) */ safe_deduce(X >= 0, integer, H2), /* (2) */ ( int(X), A=X, H3=[] /* (3): generates candidate A */ ; infrule(X <= A, H3) ), ( int(Y), B=Y, H5=[] /* (5): generates candidate B */ ; infrule(Y <= B, H5) ), simplify(A ** B, AtotheB), simplify(AtotheB * Z, AtotheBtimesZ), safe_deduce(AtotheBtimesZ <= N, T, H6), /* (6) */ append(H5, H6, H5to6), append(H4, H5to6, H4to6), append(H3, H4to6, H3to6), append(H2, H3to6, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Arith(1): X <= N may_be_deduced_from [(1) X+A<=Y+A, % (2) Y<=N]. try_new_deduction_strategies(X <= N, T, Hs) :- i_am_using_rule(arith_1), ( infrule(X + A <= Y + A, H1) /* (1) */ ; infrule(X + A <= A + Y, H1) ; infrule(A + X <= Y + A, H1) ; infrule(A + X <= A + Y, H1) ; infrule(X - A <= Y - A, H1) /* special case */ ), safe_deduce(Y <= N, T, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). try_new_deduction_strategies(N >= X, T, Hs) :- i_am_using_rule(arith_1), ( infrule(X + A <= Y + A, H1) /* (1) */ ; infrule(X + A <= A + Y, H1) ; infrule(A + X <= Y + A, H1) ; infrule(A + X <= A + Y, H1) ; infrule(X - A <= Y - A, H1) /* special case */ ), safe_deduce(Y <= N, T, H2), /* (2) */ append(H1, H2, HL), sort(HL, Hs). % Arith(2): X <= N may_be_deduced_from [(1) X*Y + K <= Z, % (2) Y >= 1, % (3) Z <= M, % (4) (M-K) div Y <= N]. {K,M,Y integer literals} try_new_deduction_strategies(X <= N, integer, Hs) :- i_am_using_rule(arith_2a), ( XtimesY = X * Y ; XtimesY = Y * X ), ( Sum = XtimesY + K ; Sum = K + XtimesY ), infrule(Sum <= Z, H1), /* (1) */ int(Y), simplify(Y >= 1, true), /* (2): H2=[], effectively */ int(K), ( int(Z), M = Z, H3 = [] ; infrule(Z <= M, H3), /* (3) */ int(M) ), simplify((M - K) div Y, Bound), safe_deduce(Bound <= N, integer, H4), /* (4) */ append(H3, H4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N >= X, integer, Hs) :- i_am_using_rule(arith_2b), ( XtimesY = X * Y ; XtimesY = Y * X ), ( Sum = XtimesY + K ; Sum = K + XtimesY ), infrule(Sum <= Z, H1), /* (1) */ int(Y), simplify(Y >= 1, true), /* (2): H2=[], effectively */ int(K), ( int(Z), M = Z, H3 = [] ; infrule(Z <= M, H3), /* (3) */ int(M) ), simplify((M - K) div Y, Bound), safe_deduce(Bound <= N, integer, H4), /* (4) */ append(H3, H4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Arith(3): X >= N may_be_deduced_from [(1) X >= 0, % (2) X mod Y >= N, % (3) Y <> 0]. {restrict to literal N} try_new_deduction_strategies(X >= N, integer, Hs) :- get_provenance_framework(spark), i_am_using_rule(arith_3a), int(N), simplify(N <> 0, true), /* no need to use this rule if N=0 */ safe_deduce(X >= 0, integer, H1), /* (1) */ ( infrule(X mod Y >= N, H2) /* (2) */ ; infrule(X mod Y >= K, H2), int(K), simplify(K >= N, true) ), ( safe_deduce(Y <> 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ; safe_deduce(Y <= - 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). try_new_deduction_strategies(N <= X, integer, Hs) :- get_provenance_framework(spark), i_am_using_rule(arith_3b), int(N), simplify(N <> 0, true), /* no need to use this rule if N=0 */ safe_deduce(X >= 0, integer, H1), /* (1) */ ( infrule(X mod Y >= N, H2) /* (2) */ ; infrule(X mod Y >= K, H2), int(K), simplify(K >= N, true) ), ( safe_deduce(Y <> 0, integer, H3) /* (3) */ ; safe_deduce(Y >= 1, integer, H3) ; safe_deduce(Y <= - 1, integer, H3) ), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % Unification strategy: for a functor F, we can prove that F(X1,X2,...,Xn) % = F(Y1,Y2,...,Yn) if we can show that the lists of arguments are provably % equal, i.e. X1=Y1, X2=Y2, ..., Xn=Yn. The following clause therefore % attempts to prove this (by invoking deduce_equal_arguments/3, which tests % the argument lists for equality). try_new_deduction_strategies(X = Y, _, Hs) :- \+ atomic(X), \+ atomic(Y), X =.. [F|Xargs], Y =.. [F|Yargs], /* so goal is F(X1...Xn)=F(Y1...Yn) */ !, deduce_equal_arguments(Xargs, Yargs, Hs). /* show each Xi=Ji */ %=============================================================================== % int_enum_lit_or_const(+Identifier, -Which). %------------------------------------------------------------------------------- % Check identifier is an enumeration literal or constant. %=============================================================================== int_enum_lit_or_const(Identifier, Which) :- ( int_or_enum_lit(Identifier, Which) ; var_const(Identifier, _Type, c) ; base_rational(Identifier) /* allow rational literals, too */ ), !. %=============================================================================== % deduce_equal_arguments(+List1, +List2, -HypsUsed). %------------------------------------------------------------------------------- % Successfull where two lists provably equal, returning hypothisies used as % HypsUsed. %=============================================================================== deduce_equal_arguments([X], [X], []) :- !. deduce_equal_arguments([X], [Y], Hs) :- infer_subgoal(X=Y, Hs), !. deduce_equal_arguments([X|Xs], [X|Ys], Hs) :- !, deduce_equal_arguments(Xs, Ys, Hs), !. deduce_equal_arguments([X|Xs], [Y|Ys], Hyps) :- infer_subgoal(X=Y, H1), !, deduce_equal_arguments(Xs, Ys, Hs), !, append(H1, Hs, HL), sort(HL, Hyps), !. %=============================================================================== % try_new_numeric_strategies(+GOAL, +Type, -HypList). %------------------------------------------------------------------------------- % Deduce numeric expression GOAL, of type T, giving Hs instantiated to list % of hypotheses used. These new numeric strategies cater for numeric bounds % & repeated terms. %=============================================================================== % Numeric literal on either side of an inequality (ignore =, <>) %--------------------------------------------------------------- try_new_numeric_strategies(X <= N, TYPE, Hs) :- /*[[NewNum_1]]*/ base_rational(N), find_upper_numeric_limit_for(X, U, TYPE, Hs), simplify(U <= N, true), !. try_new_numeric_strategies(N <= X, TYPE, Hs) :- /*[[NewNum_2]]*/ base_rational(N), find_lower_numeric_limit_for(X, L, TYPE, Hs), simplify(L >= N, true), !. try_new_numeric_strategies(X >= N, TYPE, Hs) :- /*[[NewNum_3]]*/ base_rational(N), find_lower_numeric_limit_for(X, L, TYPE, Hs), simplify(L >= N, true), !. try_new_numeric_strategies(N >= X, TYPE, Hs) :- /*[[NewNum_4]]*/ base_rational(N), find_upper_numeric_limit_for(X, U, TYPE, Hs), simplify(U <= N, true), !. try_new_numeric_strategies(X < N, TYPE, Hs) :- /*[[NewNum_5]]*/ base_rational(N), find_upper_numeric_limit_for(X, U, TYPE, Hs), simplify(U < N, true), !. try_new_numeric_strategies(N < X, TYPE, Hs) :- /*[[NewNum_6]]*/ base_rational(N), find_lower_numeric_limit_for(X, L, TYPE, Hs), simplify(L > N, true), !. try_new_numeric_strategies(X > N, TYPE, Hs) :- /*[[NewNum_7]]*/ base_rational(N), find_lower_numeric_limit_for(X, L, TYPE, Hs), simplify(L > N, true), !. try_new_numeric_strategies(N > X, TYPE, Hs) :- /*[[NewNum_8]]*/ base_rational(N), find_upper_numeric_limit_for(X, U, TYPE, Hs), simplify(U < N, true), !. % Numeric literal on either side of a non-equality (<>) %------------------------------------------------------ try_new_numeric_strategies(X <> N, TYPE, Hs) :- /*[[NewNum_9]]*/ base_rational(N), ( find_lower_numeric_limit_for(X, L, TYPE, Hs), /* X >= L, and */ simplify(L > N, true) /* L > N, so X > N */ ; find_upper_numeric_limit_for(X, U, TYPE, Hs), /* X <= U, and */ simplify(U < N, true) /* U < N, so X < N */ ), !. try_new_numeric_strategies(N <> X, TYPE, Hs) :- /*[[NewNum_10]]*/ base_rational(N), ( find_lower_numeric_limit_for(X, L, TYPE, Hs), /* X >= L, and */ simplify(L > N, true) /* L > N, so X > N */ ; find_upper_numeric_limit_for(X, U, TYPE, Hs), /* X <= U, and */ simplify(U < N, true) /* U < N, so X < N */ ), !. % Extra inequalities rules with numeric literal sub-terms %-------------------------------------------------------- % Integer term in upper-bound. try_new_numeric_strategies(X + Y <= Z + N, TYPE, Hs) :- /*[[NewNum_11]]*/ base_rational(N), find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU */ evaluate_rational_expression(N - XU, A), safe_deduce(Y <= Z + A, TYPE, H2), /* Y <= Z + (N - XU) */ append(H1, H2, HL), sort(HL, Hs). /* So: X + Y <= Z + N */ try_new_numeric_strategies(X + Y <= N + Z, TYPE, Hs) :- /*[[NewNum_12]]*/ base_rational(N), find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU */ evaluate_rational_expression(N - XU, A), safe_deduce(Y <= Z + A, TYPE, H2), /* Y <= Z + (N - XU) */ append(H1, H2, HL), sort(HL, Hs). /* So: X + Y <= Z + N */ try_new_numeric_strategies(Y + X <= Z + N, TYPE, Hs) :- /*[[NewNum_13]]*/ base_rational(N), find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU */ evaluate_rational_expression(N - XU, A), safe_deduce(Y <= Z + A, TYPE, H2), /* Y <= Z + (N - XU) */ append(H1, H2, HL), sort(HL, Hs). /* So: X + Y <= Z + N */ try_new_numeric_strategies(Y + X <= N + Z, TYPE, Hs) :- /*[[NewNum_14]]*/ base_rational(N), find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU */ evaluate_rational_expression(N - XU, A), safe_deduce(Y <= Z + A, TYPE, H2), /* Y <= Z + (N - XU) */ append(H1, H2, HL), sort(HL, Hs). /* So: X + Y <= Z + N */ try_new_numeric_strategies(Z + N >= X + Y, TYPE, Hs) :- /*[[NewNum_15]]*/ base_rational(N), find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU */ evaluate_rational_expression(N - XU, A), safe_deduce(Y <= Z + A, TYPE, H2), /* Y <= Z + (N - XU) */ append(H1, H2, HL), sort(HL, Hs). /* So: X + Y <= Z + N */ try_new_numeric_strategies(N + Z >= X + Y, TYPE, Hs) :- /*[[NewNum_16]]*/ base_rational(N), find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU */ evaluate_rational_expression(N - XU, A), safe_deduce(Y <= Z + A, TYPE, H2), /* Y <= Z + (N - XU) */ append(H1, H2, HL), sort(HL, Hs). /* So: X + Y <= Z + N */ try_new_numeric_strategies(Z + N >= Y + X, TYPE, Hs) :- /*[[NewNum_17]]*/ base_rational(N), find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU */ evaluate_rational_expression(N - XU, A), safe_deduce(Y <= Z + A, TYPE, H2), /* Y <= Z + (N - XU) */ append(H1, H2, HL), sort(HL, Hs). /* So: X + Y <= Z + N */ try_new_numeric_strategies(N + Z >= Y + X, TYPE, Hs) :- /*[[NewNum_18]]*/ base_rational(N), find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU */ evaluate_rational_expression(N - XU, A), safe_deduce(Y <= Z + A, TYPE, H2), /* Y <= Z + (N - XU) */ append(H1, H2, HL), sort(HL, Hs). /* So: X + Y <= Z + N */ % Integer term in lower-bound. try_new_numeric_strategies(Z + N <= X + Y, TYPE, Hs) :- /*[[NewNum_19]]*/ base_rational(N), find_lower_numeric_limit_for(X, XL, TYPE, H1), /* XL <= X */ evaluate_rational_expression(N - XL, A), safe_deduce(Z + A <= Y, TYPE, H2), /* Z + (N - XL) <= Y */ append(H1, H2, HL), sort(HL, Hs). /* So: Z + N <= X + Y */ try_new_numeric_strategies(N + Z <= X + Y, TYPE, Hs) :- /*[[NewNum_20]]*/ base_rational(N), find_lower_numeric_limit_for(X, XL, TYPE, H1), /* XL <= X */ evaluate_rational_expression(N - XL, A), safe_deduce(Z + A <= Y, TYPE, H2), /* Z + (N - XL) <= Y */ append(H1, H2, HL), sort(HL, Hs). /* So: Z + N <= X + Y */ try_new_numeric_strategies(Z + N <= Y + X, TYPE, Hs) :- /*[[NewNum_21]]*/ base_rational(N), find_lower_numeric_limit_for(X, XL, TYPE, H1), /* XL <= X */ evaluate_rational_expression(N - XL, A), safe_deduce(Z + A <= Y, TYPE, H2), /* Z + (N - XL) <= Y */ append(H1, H2, HL), sort(HL, Hs). /* So: Z + N <= X + Y */ try_new_numeric_strategies(N + Z <= Y + X, TYPE, Hs) :- /*[[NewNum_22]]*/ base_rational(N), find_lower_numeric_limit_for(X, XL, TYPE, H1), /* XL <= X */ evaluate_rational_expression(N - XL, A), safe_deduce(Z + A <= Y, TYPE, H2), /* Z + (N - XL) <= Y */ append(H1, H2, HL), sort(HL, Hs). /* So: Z + N <= X + Y */ try_new_numeric_strategies(X + Y >= Z + N, TYPE, Hs) :- /*[[NewNum_23]]*/ base_rational(N), find_lower_numeric_limit_for(X, XL, TYPE, H1), /* XL <= X */ evaluate_rational_expression(N - XL, A), safe_deduce(Z + A <= Y, TYPE, H2), /* Z + (N - XL) <= Y */ append(H1, H2, HL), sort(HL, Hs). /* So: Z + N <= X + Y */ try_new_numeric_strategies(X + Y >= N + Z, TYPE, Hs) :- /*[[NewNum_24]]*/ base_rational(N), find_lower_numeric_limit_for(X, XL, TYPE, H1), /* XL <= X */ evaluate_rational_expression(N - XL, A), safe_deduce(Z + A <= Y, TYPE, H2), /* Z + (N - XL) <= Y */ append(H1, H2, HL), sort(HL, Hs). /* So: Z + N <= X + Y */ try_new_numeric_strategies(Y + X >= Z + N, TYPE, Hs) :- /*[[NewNum_25]]*/ base_rational(N), find_lower_numeric_limit_for(X, XL, TYPE, H1), /* XL <= X */ evaluate_rational_expression(N - XL, A), safe_deduce(Z + A <= Y, TYPE, H2), /* Z + (N - XL) <= Y */ append(H1, H2, HL), sort(HL, Hs). /* So: Z + N <= X + Y */ try_new_numeric_strategies(Y + X >= N + Z, TYPE, Hs) :- /*[[NewNum_26]]*/ base_rational(N), find_lower_numeric_limit_for(X, XL, TYPE, H1), /* XL <= X */ evaluate_rational_expression(N - XL, A), safe_deduce(Z + A <= Y, TYPE, H2), /* Z + (N - XL) <= Y */ append(H1, H2, HL), sort(HL, Hs). /* So: Z + N <= X + Y */ % Eliminate common terms on either side of an inequality %------------------------------------------------------- % Addition. try_new_numeric_strategies(X + A = Y + A, TYPE, Hs) :- /*[[NewNum_27]]*/ try_to_infer((=), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A = A + Y, TYPE, Hs) :- /*[[NewNum_28]]*/ try_to_infer((=), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X = Y + A, TYPE, Hs) :- /*[[NewNum_29]]*/ try_to_infer((=), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X = A + Y, TYPE, Hs) :- /*[[NewNum_30]]*/ try_to_infer((=), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A <> Y + A, TYPE, Hs) :- /*[[NewNum_31]]*/ try_to_infer((<>), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A <> A + Y, TYPE, Hs) :- /*[[NewNum_32]]*/ try_to_infer((<>), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X <> Y + A, TYPE, Hs) :- /*[[NewNum_33]]*/ try_to_infer((<>), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X <> A + Y, TYPE, Hs) :- /*[[NewNum_34]]*/ try_to_infer((<>), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A <= Y + A, TYPE, Hs) :- /*[[NewNum_35]]*/ try_to_infer((<=), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A <= A + Y, TYPE, Hs) :- /*[[NewNum_36]]*/ try_to_infer((<=), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X <= Y + A, TYPE, Hs) :- /*[[NewNum_37]]*/ try_to_infer((<=), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X <= A + Y, TYPE, Hs) :- /*[[NewNum_38]]*/ try_to_infer((<=), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A >= Y + A, TYPE, Hs) :- /*[[NewNum_39]]*/ try_to_infer((>=), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A >= A + Y, TYPE, Hs) :- /*[[NewNum_40]]*/ try_to_infer((>=), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X >= Y + A, TYPE, Hs) :- /*[[NewNum_41]]*/ try_to_infer((>=), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X >= A + Y, TYPE, Hs) :- /*[[NewNum_42]]*/ try_to_infer((>=), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A < Y + A, TYPE, Hs) :- /*[[NewNum_43]]*/ try_to_infer((<), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A < A + Y, TYPE, Hs) :- /*[[NewNum_44]]*/ try_to_infer((<), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X < Y + A, TYPE, Hs) :- /*[[NewNum_45]]*/ try_to_infer((<), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X < A + Y, TYPE, Hs) :- /*[[NewNum_46]]*/ try_to_infer((<), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A > Y + A, TYPE, Hs) :- /*[[NewNum_47]]*/ try_to_infer((>), X, Y, TYPE, Hs). try_new_numeric_strategies(X + A > A + Y, TYPE, Hs) :- /*[[NewNum_48]]*/ try_to_infer((>), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X > Y + A, TYPE, Hs) :- /*[[NewNum_49]]*/ try_to_infer((>), X, Y, TYPE, Hs). try_new_numeric_strategies(A + X > A + Y, TYPE, Hs) :- /*[[NewNum_50]]*/ try_to_infer((>), X, Y, TYPE, Hs). % Subtraction try_new_numeric_strategies(X - A = Y - A, TYPE, Hs) :- /*[[NewNum_51]]*/ try_to_infer((=), X, Y, TYPE, Hs). try_new_numeric_strategies(A - X = A - Y, TYPE, Hs) :- /*[[NewNum_52]]*/ try_to_infer((=), Y, X, TYPE, Hs). /* order reversal deliberate */ try_new_numeric_strategies(X - A <> Y - A, TYPE, Hs) :- /*[[NewNum_53]]*/ try_to_infer((<>), X, Y, TYPE, Hs). try_new_numeric_strategies(A - X <> A - Y, TYPE, Hs) :- /*[[NewNum_54]]*/ try_to_infer((<>), Y, X, TYPE, Hs). /* order reversal deliberate */ try_new_numeric_strategies(X - A <= Y - A, TYPE, Hs) :- /*[[NewNum_55]]*/ try_to_infer((<=), X, Y, TYPE, Hs). try_new_numeric_strategies(A - X <= A - Y, TYPE, Hs) :- /*[[NewNum_56]]*/ try_to_infer((<=), Y, X, TYPE, Hs). /* order reversal deliberate */ try_new_numeric_strategies(X - A >= Y - A, TYPE, Hs) :- /*[[NewNum_57]]*/ try_to_infer((>=), X, Y, TYPE, Hs). try_new_numeric_strategies(A - X >= A - Y, TYPE, Hs) :- /*[[NewNum_58]]*/ try_to_infer((>=), Y, X, TYPE, Hs). /* order reversal deliberate */ try_new_numeric_strategies(X - A < Y - A, TYPE, Hs) :- /*[[NewNum_59]]*/ try_to_infer((<), X, Y, TYPE, Hs). try_new_numeric_strategies(A - X < A - Y, TYPE, Hs) :- /*[[NewNum_60]]*/ try_to_infer((<), Y, X, TYPE, Hs). /* order reversal deliberate */ try_new_numeric_strategies(X - A > Y - A, TYPE, Hs) :- /*[[NewNum_61]]*/ try_to_infer((>), X, Y, TYPE, Hs). try_new_numeric_strategies(A - X > A - Y, TYPE, Hs) :- /*[[NewNum_62]]*/ try_to_infer((>), Y, X, TYPE, Hs). /* order reversal deliberate */ % Multiplication by a constant try_new_numeric_strategies(X * N = Y * N, TYPE, Hs) :- /*[[NewNum_63]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N<>0, true), !, try_to_infer((=), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(X * N = N * Y, TYPE, Hs) :- /*[[NewNum_64]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N<>0, true), !, try_to_infer((=), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(N * X = Y * N, TYPE, Hs) :- /*[[NewNum_65]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N<>0, true), !, try_to_infer((=), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(N * X = N * Y, TYPE, Hs) :- /*[[NewNum_66]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N<>0, true), !, try_to_infer((=), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(X = N * X, TYPE, Hs) :- /*[[NewNum_67]]*/ base_rational(N), ( simplify(N = 1, true), Hs = [] ; try_to_infer((=), X, 0, TYPE, Hs) ), !. try_new_numeric_strategies(N * X = X, TYPE, Hs) :- /*[[NewNum_68]]*/ base_rational(N), ( simplify(N = 1, true), Hs = [] ; try_to_infer((=), X, 0, TYPE, Hs) ), !. try_new_numeric_strategies(X = X * N, TYPE, Hs) :- /*[[NewNum_69]]*/ base_rational(N), ( simplify(N = 1, true), Hs = [] ; try_to_infer((=), X, 0, TYPE, Hs) ), !. try_new_numeric_strategies(X * N = X, TYPE, Hs) :- /*[[NewNum_70]]*/ base_rational(N), ( simplify(N = 1, true), Hs = [] ; try_to_infer((=), X, 0, TYPE, Hs) ), !. try_new_numeric_strategies(X * N <> Y * N, TYPE, Hs) :- /*[[NewNum_71]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N<>0, true), !, try_to_infer((<>), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(X * N <> N * Y, TYPE, Hs) :- /*[[NewNum_72]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N<>0, true), !, try_to_infer((<>), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(N * X <> Y * N, TYPE, Hs) :- /*[[NewNum_73]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N<>0, true), !, try_to_infer((<>), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(N * X <> N * Y, TYPE, Hs) :- /*[[NewNum_74]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N<>0, true), !, try_to_infer((<>), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(X * N <= Y * N, TYPE, Hs) :- /*[[NewNum_75]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N > 0, true), !, try_to_infer((<=), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((<=), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(X * N <= N * Y, TYPE, Hs) :- /*[[NewNum_76]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N > 0, true), !, try_to_infer((<=), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((<=), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(N * X <= Y * N, TYPE, Hs) :- /*[[NewNum_77]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N > 0, true), !, try_to_infer((<=), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((<=), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(N * X <= N * Y, TYPE, Hs) :- /*[[NewNum_78]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N > 0, true), !, try_to_infer((<=), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((<=), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(X * N >= Y * N, TYPE, Hs) :- /*[[NewNum_79]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N > 0, true), !, try_to_infer((>=), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((>=), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(X * N >= N * Y, TYPE, Hs) :- /*[[NewNum_80]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N > 0, true), !, try_to_infer((>=), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((>=), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(N * X >= Y * N, TYPE, Hs) :- /*[[NewNum_81]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N > 0, true), !, try_to_infer((>=), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((>=), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(N * X >= N * Y, TYPE, Hs) :- /*[[NewNum_82]]*/ base_rational(N), !, ( N = 0, !, Hs = [] ; simplify(N > 0, true), !, try_to_infer((>=), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((>=), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(X * N < Y * N, TYPE, Hs) :- /*[[NewNum_83]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N > 0, true), !, try_to_infer((<), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((<), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(X * N < N * Y, TYPE, Hs) :- /*[[NewNum_84]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N > 0, true), !, try_to_infer((<), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((<), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(N * X < Y * N, TYPE, Hs) :- /*[[NewNum_85]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N > 0, true), !, try_to_infer((<), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((<), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(N * X < N * Y, TYPE, Hs) :- /*[[NewNum_86]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N > 0, true), !, try_to_infer((<), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((<), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(X * N > Y * N, TYPE, Hs) :- /*[[NewNum_87]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N > 0, true), !, try_to_infer((>), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((>), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(X * N > N * Y, TYPE, Hs) :- /*[[NewNum_88]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N > 0, true), !, try_to_infer((>), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((>), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(N * X > Y * N, TYPE, Hs) :- /*[[NewNum_89]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N > 0, true), !, try_to_infer((>), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((>), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. try_new_numeric_strategies(N * X > N * Y, TYPE, Hs) :- /*[[NewNum_90]]*/ base_rational(N), !, ( N = 0, !, fail ; simplify(N > 0, true), !, try_to_infer((>), X, Y, TYPE, Hs) ; simplify(N < 0, true), !, try_to_infer((>), Y, X, TYPE, Hs) /* order reversal deliberate */ ), !. % Multiplication. try_new_numeric_strategies(X * A = Y * A, TYPE, Hs) :- /*[[NewNum_91]]*/ ( safe_deduce(A = 0, TYPE, Hs) ; try_to_infer((=), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(X * A = A * Y, TYPE, Hs) :- /*[[NewNum_92]]*/ ( safe_deduce(A = 0, TYPE, Hs) ; try_to_infer((=), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(A * X = Y * A, TYPE, Hs) :- /*[[NewNum_93]]*/ ( safe_deduce(A = 0, TYPE, Hs) ; try_to_infer((=), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(A * X = A * Y, TYPE, Hs) :- /*[[NewNum_94]]*/ ( safe_deduce(A = 0, TYPE, Hs) ; try_to_infer((=), X, Y, TYPE, Hs) ), !. try_new_numeric_strategies(X * A <> Y * A, TYPE, Hs) :- /*[[NewNum_95]]*/ ( safe_deduce(A <> 0, TYPE, H1), !, try_to_infer((<>), X, Y, TYPE, H2) ; safe_deduce(A = 0, TYPE, Hs), !, fail ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A <> A * Y, TYPE, Hs) :- /*[[NewNum_96]]*/ ( safe_deduce(A <> 0, TYPE, H1), !, try_to_infer((<>), X, Y, TYPE, H2) ; safe_deduce(A = 0, TYPE, Hs), !, fail ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X <> Y * A, TYPE, Hs) :- /*[[NewNum_97]]*/ ( safe_deduce(A <> 0, TYPE, H1), !, try_to_infer((<>), X, Y, TYPE, H2) ; safe_deduce(A = 0, TYPE, Hs), !, fail ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X <> A * Y, TYPE, Hs) :- /*[[NewNum_98]]*/ ( safe_deduce(A <> 0, TYPE, H1), !, try_to_infer((<>), X, Y, TYPE, H2) ; safe_deduce(A = 0, TYPE, Hs), !, fail ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A <= Y * A, TYPE, Hs) :- /*[[NewNum_99]]*/ ( safe_deduce(A = 0, TYPE, H2), !, H1 = [] ; safe_deduce(A >= 0, TYPE, H1), !, try_to_infer((<=), X, Y, TYPE, H2) ; safe_deduce(A <= 0, TYPE, H1), !, try_to_infer((<=), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A <= A * Y, TYPE, Hs) :- /*[[NewNum_100]]*/ ( safe_deduce(A = 0, TYPE, H2), !, H1 = [] ; safe_deduce(A >= 0, TYPE, H1), !, try_to_infer((<=), X, Y, TYPE, H2) ; safe_deduce(A <= 0, TYPE, H1), !, try_to_infer((<=), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X <= Y * A, TYPE, Hs) :- /*[[NewNum_101]]*/ ( safe_deduce(A = 0, TYPE, H2), !, H1 = [] ; safe_deduce(A >= 0, TYPE, H1), !, try_to_infer((<=), X, Y, TYPE, H2) ; safe_deduce(A <= 0, TYPE, H1), !, try_to_infer((<=), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X <= A * Y, TYPE, Hs) :- /*[[NewNum_102]]*/ ( safe_deduce(A = 0, TYPE, H2), !, H1 = [] ; safe_deduce(A >= 0, TYPE, H1), !, try_to_infer((<=), X, Y, TYPE, H2) ; safe_deduce(A <= 0, TYPE, H1), !, try_to_infer((<=), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A >= Y * A, TYPE, Hs) :- /*[[NewNum_103]]*/ ( safe_deduce(A = 0, TYPE, H2), !, H1 = [] ; safe_deduce(A >= 0, TYPE, H1), !, try_to_infer((>=), X, Y, TYPE, H2) ; safe_deduce(A <= 0, TYPE, H1), !, try_to_infer((>=), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A >= A * Y, TYPE, Hs) :- /*[[NewNum_104]]*/ ( safe_deduce(A = 0, TYPE, H2), !, H1 = [] ; safe_deduce(A >= 0, TYPE, H1), !, try_to_infer((>=), X, Y, TYPE, H2) ; safe_deduce(A <= 0, TYPE, H1), !, try_to_infer((>=), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X >= Y * A, TYPE, Hs) :- /*[[NewNum_105]]*/ ( safe_deduce(A = 0, TYPE, H2), !, H1 = [] ; safe_deduce(A >= 0, TYPE, H1), !, try_to_infer((>=), X, Y, TYPE, H2) ; safe_deduce(A <= 0, TYPE, H1), !, try_to_infer((>=), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X >= A * Y, TYPE, Hs) :- /*[[NewNum_106]]*/ ( safe_deduce(A = 0, TYPE, H2), !, H1 = [] ; safe_deduce(A >= 0, TYPE, H1), !, try_to_infer((>=), X, Y, TYPE, H2) ; safe_deduce(A <= 0, TYPE, H1), !, try_to_infer((>=), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A < Y * A, TYPE, Hs) :- /*[[NewNum_107]]*/ ( safe_deduce(A = 0, TYPE, H2), !, fail ; safe_deduce(A > 0, TYPE, H1), !, try_to_infer((<), X, Y, TYPE, H2) ; safe_deduce(A < 0, TYPE, H1), !, try_to_infer((<), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A < A * Y, TYPE, Hs) :- /*[[NewNum_108]]*/ ( safe_deduce(A = 0, TYPE, H2), !, fail ; safe_deduce(A > 0, TYPE, H1), !, try_to_infer((<), X, Y, TYPE, H2) ; safe_deduce(A < 0, TYPE, H1), !, try_to_infer((<), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X < Y * A, TYPE, Hs) :- /*[[NewNum_109]]*/ ( safe_deduce(A = 0, TYPE, H2), !, fail ; safe_deduce(A > 0, TYPE, H1), !, try_to_infer((<), X, Y, TYPE, H2) ; safe_deduce(A < 0, TYPE, H1), !, try_to_infer((<), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X < A * Y, TYPE, Hs) :- /*[[NewNum_110]]*/ ( safe_deduce(A = 0, TYPE, H2), !, fail ; safe_deduce(A > 0, TYPE, H1), !, try_to_infer((<), X, Y, TYPE, H2) ; safe_deduce(A < 0, TYPE, H1), !, try_to_infer((<), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A > Y * A, TYPE, Hs) :- /*[[NewNum_111]]*/ ( safe_deduce(A = 0, TYPE, H2), !, fail ; safe_deduce(A > 0, TYPE, H1), !, try_to_infer((>), X, Y, TYPE, H2) ; safe_deduce(A < 0, TYPE, H1), !, try_to_infer((>), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(X * A > A * Y, TYPE, Hs) :- /*[[NewNum_112]]*/ ( safe_deduce(A = 0, TYPE, H2), !, fail ; safe_deduce(A > 0, TYPE, H1), !, try_to_infer((>), X, Y, TYPE, H2) ; safe_deduce(A < 0, TYPE, H1), !, try_to_infer((>), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X > Y * A, TYPE, Hs) :- /*[[NewNum_113]]*/ ( safe_deduce(A = 0, TYPE, H2), !, fail ; safe_deduce(A > 0, TYPE, H1), !, try_to_infer((>), X, Y, TYPE, H2) ; safe_deduce(A < 0, TYPE, H1), !, try_to_infer((>), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. try_new_numeric_strategies(A * X > A * Y, TYPE, Hs) :- /*[[NewNum_114]]*/ ( safe_deduce(A = 0, TYPE, H2), !, fail ; safe_deduce(A > 0, TYPE, H1), !, try_to_infer((>), X, Y, TYPE, H2) ; safe_deduce(A < 0, TYPE, H1), !, try_to_infer((>), Y, X, TYPE, H2) /* order reversal deliberate */ ), append(H1, H2, HL), sort(HL, Hs), !. % Integer division. try_new_numeric_strategies(X div N = Y div N, integer, Hs) :- /*[[NewNum_124]]*/ int(N), simplify(N=0, false), !, safe_deduce(X = Y, integer, Hs). try_new_numeric_strategies(X div N <= Y div N, integer, Hs) :- /*[[NewNum_125]]*/ int(N), simplify(N>0, true), !, safe_deduce(X <= Y, integer, Hs). try_new_numeric_strategies(X div N <= Y div N, integer, Hs) :- /*[[NewNum_126]]*/ int(N), simplify(N<0, true), !, safe_deduce(X >= Y, integer, Hs). try_new_numeric_strategies(X div N >= Y div N, integer, Hs) :- /*[[NewNum_127]]*/ int(N), simplify(N>0, true), !, safe_deduce(X >= Y, integer, Hs). try_new_numeric_strategies(X div N >= Y div N, integer, Hs) :- /*[[NewNum_128]]*/ int(N), simplify(N<0, true), !, safe_deduce(X <= Y, integer, Hs). % Extra equality rules for integers %---------------------------------- try_new_numeric_strategies(X = N, integer, Hs) :- /*[[NewNum_115]]*/ int(N), !, ( strict_deduce(X >= N, integer, H1) ; M1 iss N-1, strict_deduce(X > M1, integer, H1) ), ( strict_deduce(X <= N, integer, H2) ; M2 iss N+1, strict_deduce(X < M2, integer, H2) ), !, append(H1, H2, HL), sort(HL, Hs). try_new_numeric_strategies(N = X, integer, Hs) :- /*[[NewNum_116]]*/ int(N), !, ( strict_deduce(X >= N, integer, H1) ; M1 iss N-1, strict_deduce(X > M1, integer, H1) ), ( strict_deduce(X <= N, integer, H2) ; M2 iss N+1, strict_deduce(X < M2, integer, H2) ), !, append(H1, H2, HL), sort(HL, Hs). try_new_numeric_strategies(X+1 < Y, integer, Hs) :- /*[[NewNum_118]*/ ( strict_deduce(X < Y, integer, H1) ; strict_deduce(X+1 <= Y, integer, H1) ), ( strict_deduce(X+1 <> Y, integer, H2) ; strict_deduce(X <> Y-1, integer, H2) ), !, append(H1, H2, HL), sort(HL, Hs). try_new_numeric_strategies(X < Y-1, integer, Hs) :- /*[[NewNum_119]*/ ( strict_deduce(X < Y, integer, H1) ; strict_deduce(X <= Y-1, integer, H1) ), ( strict_deduce(X <> Y-1, integer, H2) ; strict_deduce(X+1 <> Y, integer, H2) ), !, append(H1, H2, HL), sort(HL, Hs). try_new_numeric_strategies(Y > X+1, integer, Hs) :- /*[[NewNum_120]*/ ( strict_deduce(X < Y, integer, H1) ; strict_deduce(X+1 <= Y, integer, H1) ), ( strict_deduce(X+1 <> Y, integer, H2) ; strict_deduce(X <> Y-1, integer, H2) ), !, append(H1, H2, HL), sort(HL, Hs). try_new_numeric_strategies(Y-1 > X, integer, Hs) :- /*[[NewNum_121]*/ ( strict_deduce(X < Y, integer, H1) ; strict_deduce(X <= Y-1, integer, H1) ), ( strict_deduce(X <> Y-1, integer, H2) ; strict_deduce(X+1 <> Y, integer, H2) ), !, append(H1, H2, HL), sort(HL, Hs). % Extra inequality rules for common quantifier bound proofs %---------------------------------------------------------- try_new_numeric_strategies(X <= Y, integer, Hs) :- /*[[NewNum_117]]*/ \+ int(X), \+ int(Y), ( strict_deduce(X <= Y+1, integer, H1) ; strict_deduce(X-1 <= Y, integer, H1) ; strict_deduce(X < Y+2, integer, H1) ; strict_deduce(X-2 < Y, integer, H1) ), !, ( strict_deduce(X <> Y+1, integer, H2) ; strict_deduce(X-1 <> Y, integer, H2) ), !, append(H1, H2, HL), sort(HL, Hs). try_new_numeric_strategies(X <= Y, integer, Hs) :- /*[[NewNum_122]]*/ \+ int(X), int(Y), simplify(Y+1, Yplus1), ( strict_deduce(X <= Yplus1, integer, H1) ; simplify(Y+2, Yplus2), strict_deduce(X < Yplus2, integer, H1) ), !, strict_deduce(X <> Yplus1, integer, H2), !, append(H1, H2, HL), sort(HL, Hs). try_new_numeric_strategies(X <= Y, integer, Hs) :- /*[[NewNum_123]]*/ int(X), \+ int(Y), simplify(X-1, Xminus1), ( strict_deduce(Xminus1 <= Y, integer, H1) ; simplify(X-2, Xminus2), strict_deduce(Xminus2 < Y, integer, H1) ), !, strict_deduce(Xminus1 <> Y, integer, H2), !, append(H1, H2, HL), sort(HL, Hs). %=============================================================================== % find_upper_numeric_limit_for(+EXPRESSION, -NUM, +TYPE, -HYPS_USED). %------------------------------------------------------------------------------- % Determine an upper numeric limit for EXPRESSION of provided type TYPE as % NUM, reporting hypotheses used as HYPS_USED. %=============================================================================== find_upper_numeric_limit_for(X, U, Type, Hs) :- known_upper_numeric_limit(X, U, Type, Hs), /* first, look to see if we've already found it */ !. find_upper_numeric_limit_for(X, U, Type, Hs) :- /* if not, and it's an atom, calculate it */ atom(X), !, calculate_known_upper_limit_for(X, U, Type, Hs). find_upper_numeric_limit_for(N, N, integer, []) :- /* if not, and it's an integer, look no further */ int(N), /*formerly [[UppLim_1]]*/ !. find_upper_numeric_limit_for(N, N, real, []) :- /* if not, and it's a numeric literal, look no further */ base_rational(N), /*formerly [[UppLim_1]]*/ !. find_upper_numeric_limit_for(N, U, integer, []) :- /* if it's a numeric literal that isn't an integer, */ base_rational(N), /* e.g. 1/2, find the largest integer below it. */ !, find_largest_integer_literal_below(N, U). /* otherwise, (1) apply strategies, (2) use hypotheses, and (3) combine the results to get the least upper bound */ find_upper_numeric_limit_for(X, U, Type, Hs) :- do_find_upper_numeric_limit_for(X, U1, Type, H1s), assertz(candidate_upper(X, U1, Type, H1s)), calculate_known_upper_limit_for(X, U, Type, Hs), !. %=============================================================================== % do_find_upper_numeric_limit_for(+EXPRESSION, -NUM, +TYPE, -HYPS_USED). %------------------------------------------------------------------------------- % Determine an upper numeric limit for EXPRESSION of provided type TYPE as % NUM, reporting hypotheses used as HYPS_USED. %=============================================================================== % EXPRESSION is a numeric literal %-------------------------------- % This case has already been considered by the parent clause above. % EXPRESSION is -X (unary minus) %------------------------------- do_find_upper_numeric_limit_for(-X, U, TYPE, Hs) :- /*[[UppLim_2]]*/ find_lower_numeric_limit_for(X, L, TYPE, Hs), /* X >= L, so -X <= -L */ evaluate_rational_expression(-L, U). /* [Works for int and real] */ % EXPRESSION is X+Y (addition) %----------------------------- do_find_upper_numeric_limit_for(X+N, U, integer, Hs) :- /*[[UppLim_3(int)]]*/ int(N), !, find_upper_numeric_limit_for(X, XU, integer, Hs), U iss XU+N. do_find_upper_numeric_limit_for(X+N, U, real, Hs) :- /*[[UppLim_3(real)]]*/ base_rational(N), !, find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU+N, U). do_find_upper_numeric_limit_for(N+X, U, integer, Hs) :- /*[[UppLim_4(int)]]*/ int(N), !, find_upper_numeric_limit_for(X, XU, integer, Hs), U iss XU+N. do_find_upper_numeric_limit_for(N+X, U, real, Hs) :- /*[[UppLim_4(real)]]*/ base_rational(N), !, find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU+N, U). do_find_upper_numeric_limit_for(X+Y, U, TYPE, Hs) :- /*[[UppLim_5]]*/ find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU, and */ find_upper_numeric_limit_for(Y, YU, TYPE, H2), /* Y <= YU, so */ evaluate_rational_expression(XU+YU, U), /* X+Y <= XU+YU */ append(H1, H2, HL), /* [Works for int and real] */ sort(HL, Hs). % EXPRESSION is X - X div Y [special case] %----------------------------------------- do_find_upper_numeric_limit_for(X - X div N, U, integer, Hs) :- /*[[UppLim_6]]*/ int(N), simplify(N <> 0, true), !, find_upper_numeric_limit_for(X, XU, integer, Hs), U iss XU - XU div N. do_find_upper_numeric_limit_for(X - X div Y, U, integer, Hs) :- /*[[UppLim_7]]*/ ( find_lower_numeric_limit_for(Y, YL, integer, H1), simplify(YL > 0, true) ; safe_deduce(Y > 0, integer, H1) ), ( find_lower_numeric_limit_for(X, XL, integer, H2), simplify(XL >= 0, true) ; safe_deduce(X >= 0, integer, H2) ), find_upper_numeric_limit_for(X, XU, integer, H3), find_upper_numeric_limit_for(Y, YU, integer, H4), /* use biggest Y to get smallest X div Y */ U iss XU - XU div YU, append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % EXPRESSION is Y - (X - X div Y * Y) [special case] %--------------------------------------------------- do_find_upper_numeric_limit_for(Y - (X - X div Y * Y), U, integer, Hs) :- /*[[UppLim_28a]]*/ ( /* X +ve, Y +ve case only. */ find_lower_numeric_limit_for(X, XL, integer, H1), /* In this case, the upper limit */ simplify(XL >= 0, true) /* is YU */ ; safe_deduce(X >= 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y >= 1, integer, H2) ; safe_deduce(Y > 0, integer, H2) ), find_upper_numeric_limit_for(Y, U, integer, H3), simplify(U > 0, true), /* sanity check */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(Y - (X - Y * (X div Y)), U, integer, Hs) :- /*[[UppLim_28b]]*/ ( find_lower_numeric_limit_for(X, XL, integer, H1), simplify(XL >= 0, true) ; safe_deduce(X >= 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y >= 1, integer, H2) ; safe_deduce(Y > 0, integer, H2) ), find_upper_numeric_limit_for(Y, U, integer, H3), simplify(U > 0, true), /* sanity check */ append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % EXPRESSION is X - X div Y * Y [special case] %--------------------------------------------- do_find_upper_numeric_limit_for(X - X div Y * Y, U, integer, Hs) :- /*[[UppLim_29a]]*/ ( /* X +ve, Y +ve case. */ find_lower_numeric_limit_for(X, XL, integer, H1), /* In this case, the upper limit */ simplify(XL >= 0, true) /* is YU-1 */ ; safe_deduce(X >= 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y >= 1, integer, H2) ; safe_deduce(Y > 0, integer, H2) ), find_upper_numeric_limit_for(Y, YU, integer, H3), simplify(YU > 0, true), /* sanity check */ U iss YU - 1, append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X - Y * (X div Y), U, integer, Hs) :- /*[[UppLim_29b]]*/ ( find_lower_numeric_limit_for(X, XL, integer, H1), simplify(XL >= 0, true) ; safe_deduce(X >= 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y >= 1, integer, H2) ; safe_deduce(Y > 0, integer, H2) ), find_upper_numeric_limit_for(Y, YU, integer, H3), simplify(YU > 0, true), /* sanity check */ U iss YU - 1, append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X - X div Y * Y, U, integer, Hs) :- /*[[UppLim_30a]]*/ ( /* X +ve, Y -ve case. */ find_lower_numeric_limit_for(X, XL, integer, H1), /* In this case, the upper limit */ simplify(XL >= 0, true) /* is -YL-1, i.e. -(YL+1). */ ; safe_deduce(X >= 0, integer, H1) ), ( find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ; safe_deduce(Y <= (- 1), integer, H2) ; safe_deduce(Y < 0, integer, H2) ), find_lower_numeric_limit_for(Y, YL, integer, H3), simplify(YL < 0, true), /* sanity check */ U iss -(YL + 1), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X - Y* (X div Y), U, integer, Hs) :- /*[[UppLim_30b]]*/ ( find_lower_numeric_limit_for(X, XL, integer, H1), simplify(XL >= 0, true) ; safe_deduce(X >= 0, integer, H1) ), ( find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ; safe_deduce(Y <= (- 1), integer, H2) ; safe_deduce(Y < 0, integer, H2) ), find_lower_numeric_limit_for(Y, YL, integer, H3), simplify(YL < 0, true), /* sanity check */ U iss -(YL + 1), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X - X div Y * Y, 0, integer, Hs) :- /*[[UppLim_31a]]*/ ( /* X +ve case (Y's sign irrelevant). */ find_upper_numeric_limit_for(X, XU, integer, H1), /* In this case, the upper limit */ simplify(XU < 0, true) /* is zero, regardless of Y. */ ; safe_deduce(X < 0, integer, H1) ), ( safe_deduce(Y <> 0, integer, H2) ; find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ), append(H1, H2, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X - Y * (X div Y), 0, integer, Hs) :- /*[[UppLim_31b]]*/ ( find_upper_numeric_limit_for(X, XU, integer, H1), simplify(XU < 0, true) ; safe_deduce(X < 0, integer, H1) ), ( safe_deduce(Y <> 0, integer, H2) ; find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ), append(H1, H2, HL), sort(HL, Hs). % EXPRESSION is X-Y (subtraction) %-------------------------------- do_find_upper_numeric_limit_for(X-N, U, integer, Hs) :- /*[[UppLim_8(int)]]*/ int(N), !, find_upper_numeric_limit_for(X, XU, integer, Hs), U iss XU-N. do_find_upper_numeric_limit_for(X-N, U, real, Hs) :- /*[[UppLim_8(real)]]*/ base_rational(N), !, find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU-N, U). do_find_upper_numeric_limit_for(N-Y, U, integer, Hs) :- /*[[UppLim_9(int)]]*/ int(N), !, find_lower_numeric_limit_for(Y, YL, integer, Hs), /* Y>=YL, so -Y <= -YL, so N-Y<=N-YL */ U iss N-YL. do_find_upper_numeric_limit_for(N-Y, U, real, Hs) :- /*[[UppLim_9(real)]]*/ base_rational(N), !, find_lower_numeric_limit_for(Y, YL, real, Hs), /* Y>=YL, so -Y <= -YL, so N-Y<=N-YL */ evaluate_rational_expression(N-YL, U). do_find_upper_numeric_limit_for(X-Y, U, TYPE, Hs) :- /*[[UppLim_10]]*/ find_upper_numeric_limit_for(X, XU, TYPE, H1), /* X <= XU, and */ find_lower_numeric_limit_for(Y, YL, TYPE, H2), /* Y >= YL, i.e. -Y <= -YL, so */ evaluate_rational_expression(XU-YL, U), /* X-Y <= XU-YL */ append(H1, H2, HL), /* [Works for int and real] */ sort(HL, Hs). % EXPRESSION is X*Y (multiplication) %----------------------------------- do_find_upper_numeric_limit_for(X*N, U, integer, Hs) :- /*[[UppLim_11(int)]]*/ int(N), !, ( simplify(N>0, true), !, find_upper_numeric_limit_for(X, XU, integer, Hs), U iss XU*N ; N = 0, !, U = 0, Hs = [] ; simplify(N<0, true), !, find_lower_numeric_limit_for(X, XL, integer, Hs), U iss XL*N ). do_find_upper_numeric_limit_for(X*N, U, real, Hs) :- /*[[UppLim_11(real)]]*/ base_rational(N), !, ( simplify(N>0, true), !, find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU*N, U) ; N = 0, !, U = 0, Hs = [] ; simplify(N<0, true), !, find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL*N, U) ). do_find_upper_numeric_limit_for(N*X, U, integer, Hs) :- /*[[UppLim_12(int)]]*/ int(N), !, ( simplify(N>0, true), !, find_upper_numeric_limit_for(X, XU, integer, Hs), U iss XU*N ; N = 0, !, U = 0, Hs = [] ; simplify(N<0, true), !, find_lower_numeric_limit_for(X, XL, integer, Hs), U iss XL*N ). do_find_upper_numeric_limit_for(N*X, U, real, Hs) :- /*[[UppLim_12(real)]]*/ base_rational(N), !, ( simplify(N>0, true), !, find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU*N, U) ; N = 0, !, U = 0, Hs = [] ; simplify(N<0, true), !, find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL*N, U) ). do_find_upper_numeric_limit_for(X*X, U, TYPE, Hs) :- /*[[UppLim_28]] -- special case */ find_lower_numeric_limit_for(X, XL, TYPE, Hl), /* [Works for both int and real] */ simplify(XL>=0, true), find_upper_numeric_limit_for(X, XU, TYPE, Hu), base_rational(XU), simplify(XU>=XL, true), evaluate_rational_expression(XU*XU, U), append(Hl, Hu, H), sort(H, Hs). do_find_upper_numeric_limit_for(X*Y, U, TYPE, Hs) :- /*[[UppLim_13]]*/ find_lower_numeric_limit_for(X, XL, TYPE, Hxl), /* find X in XL..XU, Y in YL..YU, */ find_upper_numeric_limit_for(X, XU, TYPE, Hxu), /* calculate the four products */ find_lower_numeric_limit_for(Y, YL, TYPE, Hyl), /* XL*YL, XL*YU, XU*YL and XU*YU */ find_upper_numeric_limit_for(Y, YU, TYPE, Hyu), /* and determine the limits */ calc_product_bounds(XL, XU, YL, YU, TYPE, Hxl, Hxu, Hyl, Hyu, U, Hs, _, _). % EXPRESSION is X div Y (integer division) %----------------------------------------- do_find_upper_numeric_limit_for(X div N, U, integer, Hs) :- /*[[UppLim_14]]*/ int(N), !, ( simplify(N>0, true), find_upper_numeric_limit_for(X, XU, integer, Hs), U iss XU div N ; simplify(N<0, true), find_lower_numeric_limit_for(X, XL, integer, Hs), U iss XL div N ; search_for_upper_numeric_limit(X div N, U, integer, Hs) ). do_find_upper_numeric_limit_for(0 div Y, 0, integer, Hs) :- /*[[UppLim_15]]*/ !, safe_deduce(Y <> 0, integer, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(N div Y, U, integer, Hs) :- /*[[UppLim_16]]*/ int(N), simplify(N>=0, true), !, find_lower_numeric_limit_for(Y, YL, integer, H1), ( simplify(YL > 0, true), /* so Y must definitely be +ve */ H2 = [] ; ( /* otherwise, check it's -ve */ safe_deduce(Y < 0, integer, H2) ; safe_deduce(Y <= - 1, integer, H2) ), simplify(YL < 0, true) ), !, U iss N div YL, append(H1, H2, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(N div Y, U, integer, Hs) :- /*[[UppLim_17]]*/ int(N), simplify(N<0, true), !, find_upper_numeric_limit_for(Y, YU, integer, H1), ( simplify(YU < 0, true), /* so Y must definitely be -ve */ H2 = [] ; ( safe_deduce(Y > 0, integer, H2) ; safe_deduce(Y >= 1, integer, H2) ), simplify(YU > 0, true) ), !, U iss N div YU, append(H1, H2, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X div Y, U, integer, Hs) :- /*[[UppLim_18]]*/ find_lower_numeric_limit_for(Y, YL, integer, H1), /* For integer division, allow */ simplify(YL>0, true), /* only +ve or -ve divisors, i.e. */ find_lower_numeric_limit_for(X, XL, integer, H2), /* ignore cases where Y straddles */ simplify(XL>=0, true), /* and insist X +ve */ find_upper_numeric_limit_for(X, XU, integer, H3), simplify(XU>=0, true), /* (sanity check) */ U iss XU div YL, append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X div Y, U, integer, Hs) :- /*[[UppLim_19]]*/ ( find_upper_numeric_limit_for(Y, YU, integer, H1), simplify(YU < 0, true) ; safe_deduce(Y < 0, integer, H1) ; safe_deduce(Y <= - 1, integer, H1) ), find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL<0, true), find_lower_numeric_limit_for(X, XL, integer, H3), simplify(XL>=0, true), U iss XL div YL, append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % EXPRESSION is X mod Y (integer modulus) %---------------------------------------- do_find_upper_numeric_limit_for(X mod N, U, integer, Hs) :- /*[[UppLim_20]]*/ get_provenance_framework(spark), int(N), ( int(X), N \= 0, !, Hs = [], U iss X mod N ; simplify(N>0, true), !, ( ( find_lower_numeric_limit_for(X, XL, integer, H1), simplify(XL>=0, true) ; safe_deduce(X >= 0, integer, H1) ), find_upper_numeric_limit_for(X, XU, integer, H2), XU < N-1, /* 0 <= X <= XU <= N-1, so X mod N <= XU too */ !, append(H1, H2, HL), sort(HL, Hs), U = XU ; Hs = [], U iss N-1 /* X mod N <= N-1, for +ve N */ ) ; simplify(N<0, true), !, Hs = [], U = 0 /* X mod N <= 0, for -ve N */ ), !. do_find_upper_numeric_limit_for(_ mod Y, U, integer, Hs) :- /*[[UppLim_21]]*/ get_provenance_framework(spark), ( find_lower_numeric_limit_for(Y, YL, integer, H1), simplify(YL > 0, true) /* so Y must be +ve */ ; safe_deduce(Y > 0, integer, H1) ; safe_deduce(Y >= 1, integer, H1) ), !, find_upper_numeric_limit_for(Y, YU, integer, H2), U iss YU - 1, append(H1, H2, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(_ mod Y, 0, integer, Hs) :- /*[[UppLim_22]]*/ get_provenance_framework(spark), ( find_upper_numeric_limit_for(Y, YU, integer, Hs), simplify(YU < 0, true) /* so Y must be -ve */ ; safe_deduce(Y < 0, integer, Hs) ; safe_deduce(Y <= - 1, integer, Hs) ), !. % EXPRESSION is abs(X) (absolute value function) %----------------------------------------------- do_find_upper_numeric_limit_for(abs(N), U, integer, []) :- /*[[UppLim_23(int)]]*/ int(N), !, ( simplify(N>=0, true), !, U = N ; simplify(N<0, true), !, U iss -N ), !. do_find_upper_numeric_limit_for(abs(N), U, real, []) :- /*[[UppLim_23(real)]]*/ base_rational(N), !, ( simplify(N>=0, true), !, U = N ; simplify(N<0, true), !, evaluate_rational_expression(-N, U) ), !. do_find_upper_numeric_limit_for(abs(X), U, TYPE, Hs) :- /*[[UppLim_24]]*/ find_lower_numeric_limit_for(X, XL, TYPE, H1), /* [Works for int and real] */ find_upper_numeric_limit_for(X, XU, TYPE, H2), ( simplify(XL >= 0, true), simplify(XU >= XL, true), U = XU ; simplify(XU >= 0, true), simplify(0 >= XL, true), evaluate_rational_expression(-XL, MinusXL), choose_max([MinusXL, XU], U) ; simplify(0 >= XU, true), simplify(XU >= XL, true), evaluate_rational_expression(-XL, U) ), append(H1, H2, HL), sort(HL, Hs). % EXPRESSION is X**Y (exponentiation) %------------------------------------ do_find_upper_numeric_limit_for(X**N, U, integer, Hs) :- /*[[UppLim_25(int)]]*/ int(N), !, ( N = 0, U = 1, Hs = [] ; N = 1, !, find_upper_numeric_limit_for(X, U, integer, Hs) ; N < 0, !, fail /* rule out -ve powers */ ; int(X), U iss X**N, int(U), !, Hs = [] ; safe_deduce(X >= 0, integer, H1), find_upper_numeric_limit_for(X, XU, integer, H2), U iss XU**N, append(H1, H2, HL), sort(HL, Hs) ; simplify(N mod 2, 0), /* so even power */ find_upper_numeric_limit_for(abs(X), XU, integer, Hs), U iss XU**N ; safe_deduce(X < 0, integer, H1), simplify(N mod 2, 1), /* so odd power & -ve number */ find_upper_numeric_limit_for(X, XU, integer, H2), simplify(XU < 0, true), U iss XU**N, append(H1, H2, HL), sort(HL, Hs) ). do_find_upper_numeric_limit_for(X**N, U, real, Hs) :- /*[[UppLim_25(real)]]*/ int(N), !, ( N = 0, U = 1, Hs = [] ; N = 1, !, find_upper_numeric_limit_for(X, U, real, Hs) ; N < 0, !, fail /* rule out -ve powers */ ; base_rational(X), evaluate_rational_expression(X**N, U), base_rational(U), /* sanity check */ !, Hs = [] ; safe_deduce(X >= 0, real, H1), find_upper_numeric_limit_for(X, XU, real, H2), simplify(XU >= 0, true), /* sanity check */ evaluate_rational_expression(XU**N, U), append(H1, H2, HL), sort(HL, Hs) ; simplify(N mod 2, 0), /* so even power */ find_upper_numeric_limit_for(abs(X), XU, real, Hs), evaluate_rational_expression(XU**N, U) ; safe_deduce(X < 0, real, H1), simplify(N mod 2, 1), /* so odd power & -ve number */ find_upper_numeric_limit_for(X, XU, real, H2), simplify(XU < 0, true), evaluate_rational_expression(XU**N, U), append(H1, H2, HL), sort(HL, Hs) ). do_find_upper_numeric_limit_for(X**Y, U, integer, Hs) :- /*[[UppLim_26(int)]]*/ ( find_lower_numeric_limit_for(X, XL, integer, H1), /* Ignore real powers for now */ simplify(XL >= 0, true) ; safe_deduce(X >= 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL >= 0, true) ; safe_deduce(Y >= 0, integer, H2) ), find_upper_numeric_limit_for(X, XU, integer, H3), find_upper_numeric_limit_for(Y, YU, integer, H4), simplify(XU >= 1, true), simplify(YU >= 1, true), U iss XU ** YU, append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X**Y, U, real, Hs) :- /*[[UppLim_26(real)]]*/ ( find_lower_numeric_limit_for(X, XL, real, H1), /* Ignore real powers for now */ simplify(XL >= 0, true) ; safe_deduce(X >= 0, real, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL >= 0, true) ; safe_deduce(Y >= 0, integer, H2) ), find_upper_numeric_limit_for(X, XU, real, H3), find_upper_numeric_limit_for(Y, YU, integer, H4), simplify(XU >= 1, true), simplify(YU >= 1, true), evaluate_rational_expression(XU ** YU, U), append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % EXPRESSION is X/Y (real division) %---------------------------------- do_find_upper_numeric_limit_for(X/N, U, real, Hs) :- /*[[UppLim_32]]*/ base_rational(N), !, ( simplify(N>0, true), find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU/N, U) ; simplify(N<0, true), find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL/N, U) ; search_for_upper_numeric_limit(X/N, U, real, Hs) ). do_find_upper_numeric_limit_for(0/Y, 0, real, Hs) :- /*[[UppLim_33]]*/ !, safe_deduce(Y <> 0, real, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(N/Y, U, real, Hs) :- /*[[UppLim_34]]*/ base_rational(N), simplify(N>=0, true), !, find_lower_numeric_limit_for(Y, YL, real, H1), ( simplify(YL > 0, true), /* so Y must definitely be +ve */ H2 = [] ; safe_deduce(Y < 0, real, H2), /* otherwise, check it's -ve */ simplify(YL < 0, true) ), !, evaluate_rational_expression(N/YL, U), append(H1, H2, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(N/Y, U, real, Hs) :- /*[[UppLim_35]]*/ base_rational(N), simplify(N<0, true), !, find_upper_numeric_limit_for(Y, YU, real, H1), ( simplify(YU < 0, true), /* so Y must definitely be -ve */ H2 = [] ; safe_deduce(Y > 0, real, H2), /* otherwise, check it's +ve */ simplify(YU > 0, true) ), !, evaluate_rational_expression(N/YU, U), append(H1, H2, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X/Y, U, real, Hs) :- /*[[UppLim_36]]*/ find_lower_numeric_limit_for(Y, YL, real, H1), /* For real division, allow only */ simplify(YL>0, true), /* +ve or -ve divisors, i.e. */ find_lower_numeric_limit_for(X, XL, real, H2), /* ignore cases where Y straddles */ simplify(XL>=0, true), /* and insist X +ve */ find_upper_numeric_limit_for(X, XU, real, H3), simplify(XU>=0, true), /* (sanity check) */ evaluate_rational_expression(XU/YL, U), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_upper_numeric_limit_for(X/Y, U, real, Hs) :- /*[[UppLim_37]]*/ ( find_upper_numeric_limit_for(Y, YU, real, H1), simplify(YU < 0, true) ; safe_deduce(Y < 0, real, H1) ), find_lower_numeric_limit_for(Y, YL, real, H2), simplify(YL<0, true), find_lower_numeric_limit_for(X, XL, real, H3), simplify(XL>=0, true), evaluate_rational_expression(XL/YL, U), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % EXPRESSION is anything else (catch-all) %---------------------------------------- do_find_upper_numeric_limit_for(X, U, Type, Hs) :- /*[[UppLim_27]]*/ calculate_known_upper_limit_for(X, U, Type, Hs). /* Catch-all case */ %=============================================================================== % find_lower_numeric_limit_for(+EXPRESSION, -NUM, +TYPE, -HYPS_USED). %------------------------------------------------------------------------------- % Determine a lower numeric limit for EXPRESSION of provided type TYPE as % NUM, reporting hypotheses used as HYPS_USED. %=============================================================================== find_lower_numeric_limit_for(X, L, Type, Hs) :- known_lower_numeric_limit(X, L, Type, Hs), /* first, look to see if we've already found it */ !. find_lower_numeric_limit_for(X, L, Type, Hs) :- /* if not, and it's an atom, calculate it */ atom(X), !, calculate_known_lower_limit_for(X, L, Type, Hs). find_lower_numeric_limit_for(N, N, integer, []) :- /* if not, and it's a numeric literal, look no further */ int(N), /*formerly [[LowLim_1]]*/ !. find_lower_numeric_limit_for(N, N, real, []) :- /* if not, and it's a numeric literal, look no further */ base_rational(N), /*formerly [[LowLim_1]]*/ !. find_lower_numeric_limit_for(N, L, integer, []) :- /* if it's a numeric literal that isn't an integer, */ base_rational(N), /* e.g. 1/2, find the smallest integer above it. */ !, find_smallest_integer_literal_above(N, L). /* otherwise, (1) apply strategies, (2) use hypotheses, and (3) combine the results to get the least upper bound */ find_lower_numeric_limit_for(X, L, Type, Hs) :- do_find_lower_numeric_limit_for(X, L1, Type, H1s), assertz(candidate_lower(X, L1, Type, H1s)), calculate_known_lower_limit_for(X, L, Type, Hs), !. %=============================================================================== % find_lower_numeric_limit_for(+EXPRESSION, -NUM, +TYPE, -HYPS_USED). %------------------------------------------------------------------------------- % Determine a lower numeric limit for EXPRESSION of provided type TYPE as % NUM, reporting hypotheses used as HYPS_USED. %=============================================================================== % EXPRESSION is a numeric literal %-------------------------------- % This case has already been considered by the parent clause above. % EXPRESSION is -X (unary minus) %------------------------------- do_find_lower_numeric_limit_for(-X, L, TYPE, Hs) :- /*[[LowLim_2]]*/ find_upper_numeric_limit_for(X, U, TYPE, Hs), /* X <= U, so -X >= -U */ evaluate_rational_expression(-U, L). % EXPRESSION is X+Y (addition) %----------------------------- do_find_lower_numeric_limit_for(X+N, L, integer, Hs) :- /*[[LowLim_3(int)]]*/ int(N), !, find_lower_numeric_limit_for(X, XL, integer, Hs), L iss XL+N. do_find_lower_numeric_limit_for(X+N, L, real, Hs) :- /*[[LowLim_3(real)]]*/ base_rational(N), !, find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL+N, L). do_find_lower_numeric_limit_for(N+X, L, integer, Hs) :- /*[[LowLim_4(int)]]*/ int(N), !, find_lower_numeric_limit_for(X, XL, integer, Hs), L iss XL+N. do_find_lower_numeric_limit_for(N+X, L, real, Hs) :- /*[[LowLim_4(real)]]*/ base_rational(N), !, find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL+N, L). do_find_lower_numeric_limit_for(X+Y, L, TYPE, Hs) :- /*[[LowLim_5]]*/ find_lower_numeric_limit_for(X, XL, TYPE, H1), /* X >= XL, and */ find_lower_numeric_limit_for(Y, YL, TYPE, H2), /* Y >= YL, so */ evaluate_rational_expression(XL+YL, L), /* X+Y >= XL+YL */ append(H1, H2, HL), /* [Works for int and real] */ sort(HL, Hs). % EXPRESSION is X - X div Y [special case] %----------------------------------------- do_find_lower_numeric_limit_for(X - X div N, L, integer, Hs) :- /*[[LowLim_6]]*/ int(N), simplify(N <> 0, true), !, find_lower_numeric_limit_for(X, XL, integer, Hs), L iss XL - XL div N. do_find_lower_numeric_limit_for(X - X div Y, L, integer, Hs) :- /*[[LowLim_7]]*/ find_lower_numeric_limit_for(Y, YL, integer, H1), /* use biggest Y to get smallest X div Y */ simplify(YL > 0, true), /* and check it's strictly positive */ find_lower_numeric_limit_for(X, XL, integer, H2), ( simplify(XL >= 0, true), /* so X is >= 0, too */ H3 = [] ; safe_deduce(X >= 0, integer, H3) ), L iss XL - XL div YL, append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % EXPRESSION is Y - (X - X div Y * Y) [special case] %--------------------------------------------------- do_find_lower_numeric_limit_for(Y - (X - X div Y * Y), 1, integer, Hs) :- /*[[LowLim_28a]]*/ ( /* X +ve, Y +ve case only. */ find_lower_numeric_limit_for(X, XL, integer, H1), /* In this case, the lower limit */ simplify(XL >= 0, true) /* is one. */ ; safe_deduce(X >= 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y > 0, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(Y - (X - Y * (X div Y)), 1, integer, Hs) :- /*[[LowLim_28a]]*/ ( find_lower_numeric_limit_for(X, XL, integer, H1), simplify(XL >= 0, true) ; safe_deduce(X >= 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y > 0, integer, H2) ), append(H1, H2, HL), sort(HL, Hs). % EXPRESSION is X - X div Y * Y [special case] %--------------------------------------------- do_find_lower_numeric_limit_for(X - X div Y * Y, 0, integer, Hs) :- /*[[LowLim_29a]]*/ ( /* X +ve case (Y's sign irrelevant). */ find_lower_numeric_limit_for(X, XL, integer, H1), /* In this case, the lower limit */ simplify(XL >= 0, true) /* is zero, regardless of Y. */ ; safe_deduce(X >= 0, integer, H1) ), ( safe_deduce(Y <> 0, integer, H2) ; find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ), append(H1, H2, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X - Y * (X div Y), 0, integer, Hs) :- /*[[LowLim_29b]]*/ ( find_lower_numeric_limit_for(X, XL, integer, H1), simplify(XL >= 0, true) ; safe_deduce(X >= 0, integer, H1) ), ( safe_deduce(Y <> 0, integer, H2) ; find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ), append(H1, H2, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X - X div Y * Y, L, integer, Hs) :- /*[[LowLim_30a]]*/ ( /* X -ve, Y +ve case. */ find_upper_numeric_limit_for(X, XU, integer, H1), /* In this case, the lower limit */ simplify(XU < 0, true) /* is -(YU-1). */ ; safe_deduce(X < 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y >= 1, integer, H2) ; safe_deduce(Y > 0, integer, H2) ), find_upper_numeric_limit_for(Y, YU, integer, H3), simplify(YU > 0, true), /* sanity check */ L iss -(YU - 1), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X - Y * (X div Y), L, integer, Hs) :- /*[[LowLim_30b]]*/ ( find_upper_numeric_limit_for(X, XU, integer, H1), simplify(XU < 0, true) ; safe_deduce(X < 0, integer, H1) ), ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y >= 1, integer, H2) ; safe_deduce(Y > 0, integer, H2) ), find_upper_numeric_limit_for(Y, YU, integer, H3), simplify(YU > 0, true), /* sanity check */ L iss -(YU - 1), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X - X div Y * Y, L, integer, Hs) :- /*[[LowLim_31a]]*/ ( /* X -ve, Y -ve case. */ find_upper_numeric_limit_for(X, XU, integer, H1), /* In this case, the lower limit */ simplify(XU < 0, true) /* is YL+1. */ ; safe_deduce(X < 0, integer, H1) ), ( find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ; safe_deduce(Y <= (- 1), integer, H2) ; safe_deduce(Y < 0, integer, H2) ), find_lower_numeric_limit_for(Y, YL, integer, H3), simplify(YL < 0, true), /* sanity check */ L iss YL + 1, append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X - Y * (X div Y), L, integer, Hs) :- /*[[LowLim_31b]]*/ ( find_upper_numeric_limit_for(X, XU, integer, H1), simplify(XU < 0, true) ; safe_deduce(X < 0, integer, H1) ), ( find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ; safe_deduce(Y <= (- 1), integer, H2) ; safe_deduce(Y < 0, integer, H2) ), find_lower_numeric_limit_for(Y, YL, integer, H3), simplify(YL < 0, true), /* sanity check */ L iss YL + 1, append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % EXPRESSION is X-Y (subtraction) %-------------------------------- do_find_lower_numeric_limit_for(X-N, L, integer, Hs) :- /*[[LowLim_8(int)]]*/ int(N), !, find_lower_numeric_limit_for(X, XL, integer, Hs), L iss XL-N. do_find_lower_numeric_limit_for(X-N, L, real, Hs) :- /*[[LowLim_8(real)]]*/ base_rational(N), !, find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL-N, L). do_find_lower_numeric_limit_for(N-Y, L, integer, Hs) :- /*[[LowLim_9(int)]]*/ int(N), !, find_upper_numeric_limit_for(Y, YU, integer, Hs), L iss N-YU. do_find_lower_numeric_limit_for(N-Y, L, real, Hs) :- /*[[LowLim_9(real)]]*/ base_rational(N), !, find_upper_numeric_limit_for(Y, YU, real, Hs), evaluate_rational_expression(N-YU, L). do_find_lower_numeric_limit_for(X-Y, L, TYPE, Hs) :- /*[[LowLim_10]]*/ find_lower_numeric_limit_for(X, XL, TYPE, H1), /* X >= XL, and */ find_upper_numeric_limit_for(Y, YU, TYPE, H2), /* Y <= YU, i.e. -Y >= -YU, so */ evaluate_rational_expression(XL-YU, L), /* X-Y >= XL-YU */ append(H1, H2, HL), /* [Works for int and real] */ sort(HL, Hs). % EXPRESSION is X*Y (multiplication) %----------------------------------- do_find_lower_numeric_limit_for(X*N, L, integer, Hs) :- /*[[LowLim_11(int)]]*/ int(N), !, ( simplify(N>0, true), !, find_lower_numeric_limit_for(X, XL, integer, Hs), L iss XL*N ; 0 iss N, !, L = 0, Hs = [] ; simplify(N<0, true), !, find_upper_numeric_limit_for(X, XU, integer, Hs), L iss XU*N ). do_find_lower_numeric_limit_for(X*N, L, real, Hs) :- /*[[LowLim_11(real)]]*/ base_rational(N), !, ( simplify(N>0, true), !, find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL*N, L) ; N = 0, !, L = 0, Hs = [] ; simplify(N<0, true), !, find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU*N, L) ). do_find_lower_numeric_limit_for(N*X, L, integer, Hs) :- /*[[LowLim_12(int)]]*/ int(N), !, ( simplify(N>0, true), !, find_lower_numeric_limit_for(X, XL, integer, Hs), L iss XL*N ; N = 0, !, L = 0, Hs = [] ; simplify(N<0, true), !, find_upper_numeric_limit_for(X, XU, integer, Hs), L iss XU*N ). do_find_lower_numeric_limit_for(N*X, L, real, Hs) :- /*[[LowLim_12(real)]]*/ base_rational(N), !, ( simplify(N>0, true), !, find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL*N, L) ; N = 0, !, L = 0, Hs = [] ; simplify(N<0, true), !, find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU*N, L) ). do_find_lower_numeric_limit_for(X*X, L, TYPE, Hs) :- /*[[LowLim_28]] -- special case */ find_lower_numeric_limit_for(X, XL, TYPE, Hs), /* [Works for int and real] */ base_rational(XL), simplify(XL>=0, true), evaluate_rational_expression(XL*XL, L). do_find_lower_numeric_limit_for(X*Y, L, TYPE, Hs) :- /*[[LowLim_13]]*/ find_lower_numeric_limit_for(X, XL, TYPE, Hxl), /* find X in XL..XU, Y in YL..YU, */ find_upper_numeric_limit_for(X, XU, TYPE, Hxu), /* calculate the four products */ find_lower_numeric_limit_for(Y, YL, TYPE, Hyl), /* XL*YL, XL*YU, XU*YL and XU*YU */ find_upper_numeric_limit_for(Y, YU, TYPE, Hyu), /* and determine the limits */ calc_product_bounds(XL, XU, YL, YU, TYPE, Hxl, Hxu, Hyl, Hyu, _, _, L, Hs). % EXPRESSION is X div Y (integer division) %----------------------------------------- do_find_lower_numeric_limit_for(X div N, L, integer, Hs) :- /*[[LowLim_14]]*/ int(N), !, ( simplify(N>0, true), find_lower_numeric_limit_for(X, XL, integer, Hs), L iss XL div N ; simplify(N<0, true), find_upper_numeric_limit_for(X, XU, integer, Hs), L iss XU div N ; search_for_lower_numeric_limit(X div N, L, integer, Hs) ). do_find_lower_numeric_limit_for(0 div Y, 0, integer, Hs) :- /*[[LowLim_15]]*/ !, ( safe_deduce(Y <> 0, integer, HL) ; find_lower_numeric_limit_for(Y, YL, integer, HL), simplify(YL > 0, true) ; find_upper_numeric_limit_for(Y, YU, integer, HL), simplify(YU < 0, true) ), sort(HL, Hs). do_find_lower_numeric_limit_for(N div Y, L, integer, Hs) :- /*[[LowLim_16]]*/ int(N), simplify(N>=0, true), !, find_upper_numeric_limit_for(Y, YU, integer, H1), ( simplify(YU < 0, true), /* so Y must definitely be -ve, too */ H2 = [] ; ( find_lower_numeric_limit_for(Y, YL, integer, H2), simplify(YL > 0, true) ; safe_deduce(Y > 0, integer, H2) ; safe_deduce(Y >= 1, integer, H2) ), simplify(YU > 0, true) ), !, L iss N div YU, append(H1, H2, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(N div Y, L, integer, Hs) :- /*[[LowLim_17]]*/ int(N), simplify(N<0, true), !, find_lower_numeric_limit_for(Y, YL, integer, H1), ( simplify(YL > 0, true), /* so Y must be definitely +ve, too */ H2 = [] ; ( find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU < 0, true) ; safe_deduce(Y < 0, integer, H2) ; safe_deduce(Y <= - 1, integer, H2) ), simplify(YL < 0, true) ), !, L iss N div YL, append(H1, H2, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X div Y, L, integer, Hs) :- /*[[LowLim_18]]*/ ( /* For integer division, allow */ find_lower_numeric_limit_for(Y, YL, integer, H1), /* only +ve or -ve divisors, i.e. */ simplify(YL > 0, true) /* ignore cases where Y straddles */ ; /* zero (unlikely anyway) and */ safe_deduce(Y > 0, integer, H1) /* insist X +ve. */ ; safe_deduce(Y >= 1, integer, H1) ), find_upper_numeric_limit_for(Y, YU, integer, H2), simplify(YU>0, true), find_lower_numeric_limit_for(X, XL, integer, H3), simplify(XL>=0, true), /* So X>=0 also, given X>=XL */ L iss XL div YU, append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X div Y, L, integer, Hs) :- /*[[LowLim_19]]*/ find_upper_numeric_limit_for(Y, YU, integer, H1), /* so Y must be -ve, too */ simplify(YU<0, true), find_upper_numeric_limit_for(X, XU, integer, H2), simplify(XU>=0, true), L iss XU div YU, append(H1, H2, HL), sort(HL, Hs). % EXPRESSION is X mod Y (integer modulus) %---------------------------------------- do_find_lower_numeric_limit_for(X mod N, L, integer, Hs) :- /*[[LowLim_20]]*/ get_provenance_framework(spark), int(N), !, ( int(X), N \= 0, !, Hs = [], L iss X mod N ; simplify(N > 0, true), !, ( find_lower_numeric_limit_for(X, XL, integer, H1), simplify(XL > 0, true), find_upper_numeric_limit_for(X, XU, integer, H2), simplify(XU < N, true), /* so 0=XL */ simplify(XL<=XU, true), /* extra sanity check */ append(H1, H2, HL), sort(HL, Hs), L = XL ; Hs = [], L = 0 ) ; simplify(N < 0, true), Hs = [], L iss N+1 ), !. do_find_lower_numeric_limit_for(_ mod Y, 0, integer, Hs) :- /*[[LowLim_21]]*/ get_provenance_framework(spark), ( find_lower_numeric_limit_for(Y, YL, integer, Hs), simplify(YL > 0, true) ; safe_deduce(Y > 0, integer, Hs) ; safe_deduce(Y >= 1, integer, Hs) ), !. do_find_lower_numeric_limit_for(_ mod Y, L, integer, Hs) :- /*[[LowLim_22]]*/ get_provenance_framework(spark), ( find_upper_numeric_limit_for(Y, YU, integer, H1), simplify(YU < 0, true) ; safe_deduce(Y < 0, integer, H1) ; safe_deduce(Y <= - 1, integer, H1) ), !, find_lower_numeric_limit_for(Y, YL, integer, H2), L iss YL+1, append(H1, H2, HL), sort(HL, Hs). % EXPRESSION is abs(X) (absolute value function) %----------------------------------------------- do_find_lower_numeric_limit_for(abs(N), L, integer, []) :- /*[[LowLim_23(int)]]*/ int(N), !, ( simplify(N>=0, true), !, L = N ; simplify(N<0, true), !, L iss -N ), !. do_find_lower_numeric_limit_for(abs(N), L, real, []) :- /*[[LowLim_23(real)]]*/ base_rational(N), !, ( simplify(N>=0, true), !, L = N ; simplify(N<0, true), !, evaluate_rational_expression(-N, L) ), !. do_find_lower_numeric_limit_for(abs(X), L, TYPE, Hs) :- /*[[LowLim_24]]*/ find_lower_numeric_limit_for(X, XL, TYPE, H1), /* [Works for int and real] */ find_upper_numeric_limit_for(X, XU, TYPE, H2), ( simplify(XU>=XL, true), simplify(XL>=0, true), L = XL ; simplify(XU>=0, true), simplify(0>=XL, true), L = 0 ; simplify(0>=XU, true), simplify(XU>=XL, true), evaluate_rational_expression(-XU, L) ), append(H1, H2, HL), sort(HL, Hs). % EXPRESSION is X**Y (exponentiation) %------------------------------------ do_find_lower_numeric_limit_for(X**N, L, integer, Hs) :- /*[[LowLim_25(int)]]*/ int(N), !, ( N = 0, L = 1, Hs = [] ; N = 1, !, find_lower_numeric_limit_for(X, L, integer, Hs) ; N =< 0, !, fail /* rule out non +ve powers */ ; int(X), L iss X**N, int(L), !, Hs = [] ; safe_deduce(X >= 0, integer, H1), find_lower_numeric_limit_for(X, XL, integer, H2), simplify(XL >= 0, true), L iss XL**N, append(H1, H2, HL), sort(HL, Hs) ; simplify(N mod 2, 0), /* so even power */ find_lower_numeric_limit_for(abs(X), XL, integer, Hs), simplify(XL >= 0, true), /* sanity check */ L iss XL**N ; safe_deduce(X < 0, integer, H1), simplify(N mod 2, 1), find_lower_numeric_limit_for(X, XL, integer, H2), simplify(XL < 0, true), L iss XL**N, append(H1, H2, HL), sort(HL, Hs) ). do_find_lower_numeric_limit_for(X**N, L, real, Hs) :- /*[[LowLim_25(real)]]*/ int(N), !, ( N = 0, L = 1, Hs = [] ; N = 1, !, find_lower_numeric_limit_for(X, L, real, Hs) ; N =< 0, !, fail /* rule out non +ve powers */ ; base_rational(X), evaluate_rational_expression(X**N, L), base_rational(L), /* sanity check */ !, Hs = [] ; safe_deduce(X >= 0, real, H1), find_lower_numeric_limit_for(X, XL, real, H2), simplify(XL >= 0, true), /* sanity check */ evaluate_rational_expression(XL**N, L), append(H1, H2, HL), sort(HL, Hs) ; simplify(N mod 2, 0), /* so even power */ find_lower_numeric_limit_for(abs(X), XL, real, Hs), simplify(XL >= 0, true), /* sanity check */ evaluate_rational_expression(XL**N, L) ; safe_deduce(X < 0, real, H1), simplify(N mod 2, 1), find_lower_numeric_limit_for(X, XL, real, H2), simplify(XL < 0, true), evaluate_rational_expression(XL**N, L), append(H1, H2, HL), sort(HL, Hs) ). do_find_lower_numeric_limit_for(X**Y, L, integer, Hs) :- /*[[LowLim_26(int)]]*/ safe_deduce(X >= 0, integer, H1), /* Ignore real powers for now */ safe_deduce(Y >= 0, integer, H2), find_lower_numeric_limit_for(X, XL, integer, H3), /* Given integers, XL**YL is the lower limit. */ find_lower_numeric_limit_for(Y, YL, integer, H4), /* (This is NOT true for reals, e.g. X=1/2.) */ simplify(XL >= 0, true), simplify(YL >= 0, true), ( simplify(XL >= 1, true), L iss XL ** YL ; XL = 0, /* Rule out XL<0 case: too obscure. */ L = 0 ), append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X**Y, L, real, Hs) :- /*[[LowLim_26(real)]]*/ safe_deduce(X >= 0, real, H1), /* Ignore real powers for now */ safe_deduce(Y >= 0, integer, H2), find_lower_numeric_limit_for(X, XL, real, H3), /* Given integers, XL**YL is the lower limit. */ find_lower_numeric_limit_for(Y, YL, integer, H4), /* (This is NOT true for reals, e.g. X=1/2.) */ simplify(XL >= 0, true), simplify(YL >= 0, true), ( simplify(XL > 0, true), evaluate_rational_expression(XL ** YL, L) ; XL = 0, /* Rule out XL<0 case: too obscure. */ L = 0 ), append(H3, H4, H3to4), append(H2, H3to4, Hrest), append(H1, Hrest, HL), sort(HL, Hs). % EXPRESSION is X/Y (real division) %---------------------------------- do_find_lower_numeric_limit_for(X/N, L, real, Hs) :- /*[[LowLim_32]*/ base_rational(N), !, ( simplify(N>0, true), find_lower_numeric_limit_for(X, XL, real, Hs), evaluate_rational_expression(XL/N, L) ; simplify(N<0, true), find_upper_numeric_limit_for(X, XU, real, Hs), evaluate_rational_expression(XU/N, L) ; search_for_lower_numeric_limit(X/N, L, real, Hs) ). do_find_lower_numeric_limit_for(0/Y, 0, real, Hs) :- /*[[LowLim_33]]*/ !, ( safe_deduce(Y <> 0, real, HL) ; find_lower_numeric_limit_for(Y, YL, real, HL), simplify(YL > 0, true) ; find_upper_numeric_limit_for(Y, YU, real, HL), simplify(YU < 0, true) ), sort(HL, Hs). do_find_lower_numeric_limit_for(N/Y, L, real, Hs) :- /*[[LowLim_34]]*/ base_rational(N), simplify(N>=0, true), !, find_upper_numeric_limit_for(Y, YU, real, H1), ( simplify(YU < 0, true), /* so Y must definitely be -ve, too */ H2 = [] ; ( find_lower_numeric_limit_for(Y, YL, real, H2), simplify(YL > 0, true) ; safe_deduce(Y > 0, real, H2) ), simplify(YU > 0, true) ), !, evaluate_rational_expression(N/YU, L), append(H1, H2, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(N/Y, L, real, Hs) :- /*[[LowLim_35]]*/ base_rational(N), simplify(N<0, true), !, find_lower_numeric_limit_for(Y, YL, real, H1), ( simplify(YL > 0, true), /* so Y must be definitely +ve, too */ H2 = [] ; ( find_upper_numeric_limit_for(Y, YU, real, H2), simplify(YU < 0, true) ; safe_deduce(Y < 0, real, H2) ), simplify(YL < 0, true) ), !, evaluate_rational_expression(N/YL, L), append(H1, H2, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X/Y, L, real, Hs) :- /*[[LowLim_36]]*/ ( /* For real division, allow only */ find_lower_numeric_limit_for(Y, YL, real, H1), /* +ve or -ve divisors, i.e. */ simplify(YL > 0, true) /* ignore cases where Y straddles */ ; /* zero (unlikely anyway) and */ safe_deduce(Y > 0, real, H1) /* insist X +ve. */ ), find_upper_numeric_limit_for(Y, YU, real, H2), simplify(YU>0, true), find_lower_numeric_limit_for(X, XL, real, H3), simplify(XL>=0, true), /* So X>=0 also, given X>=XL */ evaluate_rational_expression(XL/YU, L), append(H2, H3, Hrest), append(H1, Hrest, HL), sort(HL, Hs). do_find_lower_numeric_limit_for(X/Y, L, real, Hs) :- /*[[LowLim_37]]*/ find_upper_numeric_limit_for(Y, YU, real, H1), /* so Y must be -ve, too */ simplify(YU<0, true), find_upper_numeric_limit_for(X, XU, real, H2), simplify(XU>=0, true), evaluate_rational_expression(XU/YU, L), append(H1, H2, HL), sort(HL, Hs). % EXPRESSION is anything else (catch-all) %---------------------------------------- do_find_lower_numeric_limit_for(X, L, Type, Hs) :- /*[[LowLim_27]]*/ calculate_known_lower_limit_for(X, L, Type, Hs). /* Catch-all case */ %=============================================================================== % calculate_known_upper_limit_for(+EXPRESSION, -NUM, +TYPE, -HYPS_USED). %------------------------------------------------------------------------------- % Determine a upper numeric limit for EXPRESSION of provided type TYPE as % NUM, reporting hypotheses used as HYPS_USED. Here, the limit is primarily % calcuated through deeper reasoning and exploiting additional information % (hypotheses). %=============================================================================== calculate_known_upper_limit_for(X, _, Type, _) :- search_for_upper_numeric_limit(X, U, Type, Hs), assertz(candidate_upper(X, U, Type, Hs)), fail. calculate_known_upper_limit_for(X, U, Type, Hs) :- candidate_upper(X, U, Type, Hs), \+ (( candidate_upper(X, N, Type, _), simplify(N X<=XU-1 */ append(H1, H2, Hs). search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_6]]*/ limited_extended_infrule(X<=A, H1, 1, Lim), \+ int(A), limited_extended_infrule(A X<=XU-1 */ append(H1, H2, Hs). /* Some other common searches for bounds... */ search_for_upper_numeric_limit(X, U, Type, Hs) :- /*[[SrchUpp_7]]*/ limited_extended_infrule(X<=A+I, H1, 1, Lim), /* X<=A+I, and */ \+ base_rational(A), ( int(I) ; Type = real, strict_rational(I) ), limited_extended_infrule(A<=J, H2, Lim, _), /* A<=J, so X<=I+J */ ( int(J) ; Type = real, strict_rational(J) ), evaluate_rational_expression(I+J, U), append(H1, H2, Hs). search_for_upper_numeric_limit(X, U, Type, Hs) :- /*[[SrchUpp_8]]*/ limited_extended_infrule(X<=I+A, H1, 1, Lim), /* X<=A+I, and */ \+ base_rational(A), ( int(I) ; Type = real, strict_rational(I) ), limited_extended_infrule(A<=J, H2, Lim, _), /* A<=J, so X<=I+J */ ( int(J) ; Type = real, strict_rational(J) ), evaluate_rational_expression(I+J, U), append(H1, H2, Hs). search_for_upper_numeric_limit(X, U, Type, Hs) :- /*[[SrchUpp_9]]*/ limited_extended_infrule(X<=A-I, H1, 1, Lim), /* X<=A-I, and */ \+ base_rational(A), ( int(I) ; Type = real, strict_rational(I) ), limited_extended_infrule(A<=J, H2, Lim, _), /* A<=J, so X<=J-I */ ( int(J) ; Type = real, strict_rational(J) ), evaluate_rational_expression(J-I, U), append(H1, H2, Hs). search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_10(int)]]*/ infrule(X*I<=J, Hs), /* X*I<=J (I,J integers), and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ U iss J div I. /* X <= J div I */ search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_11(int)]]*/ infrule(I*X<=J, Hs), /* I*X<=J (I,J integers), and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ U iss J div I. /* X <= J div I */ search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_12(int)]]*/ infrule((X+N)*I<=J, Hs), /* (X+N)*I<=J (I,J,N integers), and */ int(I), int(J), int(N), simplify(I>0, true), /* I > 0, so */ U iss J div I - N. /* X <= J div I - N */ search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_13(int)]]*/ infrule(I*(X+N)<=J, Hs), /* I*(X+N)<=J (I,J,N integers), and */ int(I), int(J), int(N), simplify(I>0, true), /* I > 0, so */ U iss J div I - N. /* X <= J div I - N */ search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_14(int)]]*/ infrule((N+X)*I<=J, Hs), /* (N+X)*I<=J (I,J,N integers), and */ int(I), int(J), int(N), simplify(I>0, true), /* I > 0, so */ U iss J div I - N. /* X <= J div I - N */ search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_15(int)]]*/ infrule(I*(N+X)<=J, Hs), /* I*(N+X)<=J (I,J,N integers), and */ int(I), int(J), int(N), simplify(I>0, true), /* I > 0, so */ U iss J div I - N. /* X <= J div I - N */ search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_16(int)]]*/ infrule((X-N)*I<=J, Hs), /* (X-N)*I<=J (I,J,N integers), and */ int(I), int(J), int(N), simplify(I>0, true), /* I > 0, so */ U iss J div I + N. /* X <= J div I + N */ search_for_upper_numeric_limit(X, U, integer, Hs) :- /*[[SrchUpp_17(int)]]*/ infrule(I*(X-N)<=J, Hs), /* I*(X-N)<=J (I,J,N integers), and */ int(I), int(J), int(N), simplify(I>0, true), /* I > 0, so */ U iss J div I + N. /* X <= J div I + N */ search_for_upper_numeric_limit(X, U, real, Hs) :- /*[[SrchUpp_10(real)]]*/ infrule(X*I<=J, Hs), /* X*I<=J (I,J literals), and */ base_rational(I), base_rational(J), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I, U). /* X <= J/I */ search_for_upper_numeric_limit(X, U, real, Hs) :- /*[[SrchUpp_11(real)]]*/ infrule(I*X<=J, Hs), /* I*X<=J (I,J literals), and */ base_rational(I), base_rational(J), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I, U). /* X <= J/I */ search_for_upper_numeric_limit(X, U, real, Hs) :- /*[[SrchUpp_12(real)]]*/ infrule((X+N)*I<=J, Hs), /* (X+N)*I<=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I - N, U). /* X <= J/I - N */ search_for_upper_numeric_limit(X, U, real, Hs) :- /*[[SrchUpp_13(real)]]*/ infrule(I*(X+N)<=J, Hs), /* I*(X+N)<=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I - N, U). /* X <= J/I - N */ search_for_upper_numeric_limit(X, U, real, Hs) :- /*[[SrchUpp_14(real)]]*/ infrule((N+X)*I<=J, Hs), /* (N+X)*I<=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I - N, U). /* X <= J/I - N */ search_for_upper_numeric_limit(X, U, real, Hs) :- /*[[SrchUpp_15(real)]]*/ infrule(I*(N+X)<=J, Hs), /* I*(N+X)<=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I - N, U). /* X <= J/I - N */ search_for_upper_numeric_limit(X, U, real, Hs) :- /*[[SrchUpp_16(real)]]*/ infrule((X-N)*I<=J, Hs), /* (X-N)*I<=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I + N, U). /* X <= J/I + N */ search_for_upper_numeric_limit(X, U, real, Hs) :- /*[[SrchUpp_17(real)]]*/ infrule(I*(X-N)<=J, Hs), /* I*(X-N)<=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I + N, U). /* X <= J/I + N */ %=============================================================================== % calculate_known_lower_limit_for(+EXPRESSION, -NUM, +TYPE, -HYPS_USED). %------------------------------------------------------------------------------- % Determine a lower numeric limit for EXPRESSION of provided type TYPE as % NUM, reporting hypotheses used as HYPS_USED. Here, the limit is primarily % calcuated through deeper reasoning and exploiting additional information % (hypotheses). %=============================================================================== calculate_known_lower_limit_for(X, _, Type, _) :- search_for_lower_numeric_limit(X, L, Type, Hs), assertz(candidate_lower(X, L, Type, Hs)), fail. calculate_known_lower_limit_for(X, L, Type, Hs) :- candidate_lower(X, L, Type, Hs), \+ (( candidate_lower(X, N, Type, _), simplify(N>L, true) )), /* so there's no *larger* lower */ !, assertz(known_lower_numeric_limit(X, L, Type, Hs)), retractall(candidate_lower(X, _, Type, _)), !. %------------------------------------------------------------------------------- search_for_lower_numeric_limit(X, L, Type, Hs) :- /*[[SrchLow_1]]*/ infrule(X>=L, Hs), ( int(L) ; Type = real, strict_rational(L) ). search_for_lower_numeric_limit(X, L, Type, Hs) :- /*[[SrchLow_2]]*/ limited_extended_infrule(X>=A, H1, 1, Lim), \+ base_rational(A), limited_extended_infrule(A>=L, H2, Lim, _), ( int(L) ; Type = real, strict_rational(L) ), append(H1, H2, Hs). search_for_lower_numeric_limit(X, L, Type, Hs) :- /*[[SrchLow_3]]*/ limited_extended_infrule(X>=A, H1, 1, Lim1), /* Go at most three deep, transitively. */ \+ base_rational(A), limited_extended_infrule(A>=B, H2, Lim1, Lim2), \+ base_rational(B), limited_extended_infrule(B>=L, H3, Lim2, _), ( int(L) ; Type = real, strict_rational(L) ), append(H2, H3, Hrest), append(H1, Hrest, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_4]]*/ infrule(X>XL, Hs), int(XL), L iss XL+1. /* since if X>XL, then X>=XL+1 */ search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_5]]*/ limited_extended_infrule(X>A, H1, 1, Lim), \+ int(A), limited_extended_infrule(A>=XL, H2, Lim, _), int(XL), L iss XL+1, /* since X>A>=XL -> X>=XL+1 */ append(H1, H2, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_6]]*/ limited_extended_infrule(X>=A, H1, 1, Lim), \+ int(A), limited_extended_infrule(A>XL, H2, Lim, _), int(XL), L iss XL+1, /* since X>=A>XL -> X>=XL+1 */ append(H1, H2, Hs). /* Some other common searches for bounds... */ search_for_lower_numeric_limit(X, L, Type, Hs) :- /*[[SrchLow_7]]*/ limited_extended_infrule(X>=A+I, H1, 1, Lim), /* X>=A+I, and */ \+ base_rational(A), ( int(I) ; Type = real, strict_rational(I) ), limited_extended_infrule(A>=J, H2, Lim, _), /* A>=J, so X>=I+J */ ( int(J) ; Type = real, strict_rational(J) ), evaluate_rational_expression(I+J, L), append(H1, H2, Hs). search_for_lower_numeric_limit(X, L, Type, Hs) :- /*[[SrchLow_8]]*/ limited_extended_infrule(X>=I+A, H1, 1, Lim), /* X>=A+I, and */ \+ base_rational(A), ( int(I) ; Type = real, strict_rational(I) ), limited_extended_infrule(A>=J, H2, Lim, _), /* A>=J, so X>=I+J */ ( int(J) ; Type = real, strict_rational(J) ), evaluate_rational_expression(I+J, L), append(H1, H2, Hs). search_for_lower_numeric_limit(X, L, Type, Hs) :- /*[[SrchLow_9]]*/ limited_extended_infrule(X>=A-I, H1, 1, Lim), /* X>=A-I, and */ \+ base_rational(A), ( int(I) ; Type = real, strict_rational(I) ), limited_extended_infrule(A>=J, H2, Lim, _), /* A>=J, so X>=J-I */ ( int(J) ; Type = real, strict_rational(J) ), evaluate_rational_expression(J-I, L), append(H1, H2, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_10(int)]]*/ infrule(X*I>=J, H1), /* X*I>=J (I,J integers), and */ safe_deduce(X >= 1, integer, H2), /* X >= 1, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-1) div I + 1, /* X >= (J-1) div I + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_11(int)]]*/ infrule(I*X>=J, H1), /* X*I>=J (I,J integers), and */ safe_deduce(X >= 1, integer, H2), /* X >= 1, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-1) div I + 1, /* X >= (J-1) div I + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_12(int)]]*/ infrule((X+N)*I>=J, H1), /* (X+N)*I>=J (I,J,N integers), and */ int(N), OneMinusN iss 1-N, safe_deduce(X >= OneMinusN, integer, H2), /* X >= 1-N, so X+N>=1, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-1) div I - N + 1, /* X >= (J-1) div I - N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_13(int)]]*/ infrule(I*(X+N)>=J, H1), /* I*(X+N)>=J (I,J,N integers), and */ int(N), OneMinusN iss 1-N, safe_deduce(X >= OneMinusN, integer, H2), /* X >= 1-N, so X+N>=1, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-1) div I - N + 1, /* X >= (J-1) div I - N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_14(int)]]*/ infrule((N+X)*I>=J, H1), /* (N+X)*I>=J (I,J,N integers), and */ int(N), OneMinusN iss 1-N, safe_deduce(X >= OneMinusN, integer, H2), /* X >= 1-N, so X+N>=1, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-1) div I - N + 1, /* X >= (J-1) div I - N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_15(int)]]*/ infrule(I*(N+X)>=J, H1), /* I*(N+X)>=J (I,J,N integers), and */ int(N), OneMinusN iss 1-N, safe_deduce(X >= OneMinusN, integer, H2), /* X >= 1-N, so X+N>=1, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-1) div I - N + 1, /* X >= (J-1) div I - N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_16(int)]]*/ infrule((X-N)*I>=J, H1), /* (X-N)*I>=J (I,J,N integers), and */ int(N), Nplus1 iss N+1, safe_deduce(X >= Nplus1, integer, H2), /* X >= N+1, so X-N>=1, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-1) div I + N + 1, /* X >= (J-1) div I + N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_17(int)]]*/ infrule(I*(X-N)>=J, H1), /* I*(X-N)>=J (I,J,N integers), and */ int(N), Nplus1 iss N+1, safe_deduce(X >= Nplus1, integer, H2), /* X >= N+1, so X-N>=1, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-1) div I + N + 1, /* X <= I div J + N */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_18(int)]]*/ infrule(X*I>=J, H1), /* X*I>=J (I,J integers), and */ safe_deduce(X <= 0, integer, H2), /* X <= 0, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-I) div I + 1, /* X >= (J-I) div I + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_19(int)]]*/ infrule(I*X>=J, H1), /* X*I>=J (I,J integers), and */ safe_deduce(X <= 0, integer, H2), /* X <= 0, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-I) div I + 1, /* X >= (J-I) div I + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_20(int)]]*/ infrule((X+N)*I>=J, H1), /* (X+N)*I>=J (I,J,N integers), and */ int(N), MinusN iss -N, safe_deduce(X <= MinusN, integer, H2), /* X <= -N, so X+N<=0, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-I) div I - N + 1, /* X >= (J-I) div I - N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_21(int)]]*/ infrule(I*(X+N)>=J, H1), /* I*(X+N)>=J (I,J,N integers), and */ int(N), MinusN iss -N, safe_deduce(X <= MinusN, integer, H2), /* X <= -N, so X+N<=0, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-I) div I - N + 1, /* X >= (J-1) div I - N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_22(int)]]*/ infrule((N+X)*I>=J, H1), /* (N+X)*I>=J (I,J,N integers), and */ int(N), MinusN iss -N, safe_deduce(X <= MinusN, integer, H2), /* X <= -N, so X+N<=0, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-I) div I - N + 1, /* X >= (J-1) div I - N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_23(int)]]*/ infrule(I*(N+X)>=J, H1), /* I*(N+X)>=J (I,J,N integers), and */ int(N), MinusN iss -N, safe_deduce(X <= MinusN, integer, H2), /* X <= -N, so X+N<=0, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-I) div I - N + 1, /* X >= (J-1) div I - N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_24(int)]]*/ infrule((X-N)*I>=J, H1), /* (X-N)*I>=J (I,J,N integers), and */ int(N), safe_deduce(X <= N, integer, H2), /* X <= N, so X-N<=0, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-I) div I + N + 1, /* X >= (J-1) div I + N + 1 */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, integer, Hs) :- /*[[SrchLow_25(int)]]*/ infrule(I*(X-N)>=J, H1), /* I*(X-N)>=J (I,J,N integers), and */ int(N), safe_deduce(X <= N, integer, H2), /* X <= N, so X-N<=0, and */ int(I), int(J), simplify(I>0, true), /* I > 0, so */ L iss (J-I) div I + N + 1, /* X <= I div J + N */ append(H1, H2, HL), sort(HL, Hs). search_for_lower_numeric_limit(X, L, real, Hs) :- /*[[SrchLow_10(real)]]*/ infrule(X*I>=J, H1), /* X*I>=J (I,J literals), and */ base_rational(I), base_rational(J), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I, L), /* X >= J/I */ sort(H1, Hs). search_for_lower_numeric_limit(X, L, real, Hs) :- /*[[SrchLow_11(real)]]*/ infrule(I*X>=J, H1), /* X*I>=J (I,J literals), and */ base_rational(I), base_rational(J), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I, L), /* X >= J/I */ sort(H1, Hs). search_for_lower_numeric_limit(X, L, real, Hs) :- /*[[SrchLow_12(real)]]*/ infrule((X+N)*I>=J, H1), /* (X+N)*I>=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I - N, L), /* X >= J/I - N */ sort(H1, Hs). search_for_lower_numeric_limit(X, L, real, Hs) :- /*[[SrchLow_13(real)]]*/ infrule(I*(X+N)>=J, H1), /* I*(X+N)>=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I - N, L), /* X >= J/I - N */ sort(H1, Hs). search_for_lower_numeric_limit(X, L, real, Hs) :- /*[[SrchLow_14(real)]]*/ infrule((N+X)*I>=J, H1), /* (N+X)*I>=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I - N, L), /* X >= J/I - N */ sort(H1, Hs). search_for_lower_numeric_limit(X, L, real, Hs) :- /*[[SrchLow_15(real)]]*/ infrule(I*(N+X)>=J, H1), /* I*(N+X)>=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I - N, L), /* X >= J/I - N */ sort(H1, Hs). search_for_lower_numeric_limit(X, L, real, Hs) :- /*[[SrchLow_16(real)]]*/ infrule((X-N)*I>=J, H1), /* (X-N)*I>=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I + N, L), /* X >= J/I + N */ sort(H1, Hs). search_for_lower_numeric_limit(X, L, real, Hs) :- /*[[SrchLow_17(real)]]*/ infrule(I*(X-N)>=J, H1), /* I*(X-N)>=J (I,J,N literals), and */ base_rational(I), base_rational(J), base_rational(N), simplify(I>0, true), /* I > 0, so */ evaluate_rational_expression(J/I + N, L), /* X >= J/I + N */ sort(H1, Hs). %=============================================================================== % limited_extended_infrule(+Goal, -HypsUsed, +StartLim, -EndLim). %------------------------------------------------------------------------------- % StartLim must be a non-negative integer. If it it zero, we've "used up" % our allowance of uses of extended_infrules (which use quantified % hypotheses) and may only use standard_infrules. If StartLim is strictly % positive, we may use extended_infrules too. The value of EndLim that is % returned by the predicate on succeeding is either StartLim (if no % extended_infrule was used) or StartLim-1 (if we did make use of an % extended_infrule). This returned limit value is used for the next call % of limited_extended_infrule within a chain of reasoning, so we can call % three infrules overall, and ensure that at most one of them is an % extended_infrule. This reduces the number of such matches, both for % efficiency of search and for termination within reasonable space and time % bounds. %=============================================================================== limited_extended_infrule(Goal, Hyps, StartLim, EndLim) :- StartLim = 0, EndLim = 0, !, standard_infrule(Goal, Hyps). limited_extended_infrule(Goal, Hyps, StartLim, EndLim) :- StartLim > 0, ( standard_infrule(Goal, Hyps), EndLim = StartLim ; extended_infrule(Goal, Hyps), EndLim is StartLim - 1 ). %=============================================================================== % calc_product_bounds(XL, XU, YL, YU, Type, Hxl, Hxu, Hyl, Hyu, U, HU, L, HL). %------------------------------------------------------------------------------- % Find the lower (L) and upper (U) limits on a product, and the hypotheses % used to deduce this. %=============================================================================== calc_product_bounds(XL, XU, YL, YU, integer, Hxl, Hxu, Hyl, Hyu, U, HU, L, HL) :- Bound1 iss XL * YL, Bound2 iss XL * YU, Bound3 iss XU * YL, Bound4 iss XU * YU, choose_max([Bound1, Bound2, Bound3, Bound4], U), choose_min([Bound1, Bound2, Bound3, Bound4], L), ( U = Bound1, append(Hxl, Hyl, HU) ; U = Bound2, append(Hxl, Hyu, HU) ; U = Bound3, append(Hxu, Hyl, HU) ; append(Hxu, Hyu, HU) ), !, ( L = Bound1, append(Hxl, Hyl, HL) ; L = Bound2, append(Hxl, Hyu, HL) ; L = Bound3, append(Hxu, Hyl, HL) ; append(Hxu, Hyu, HL) ), !. calc_product_bounds(XL, XU, YL, YU, real, Hxl, Hxu, Hyl, Hyu, U, HU, L, HL) :- evaluate_rational_expression(XL * YL, Bound1), evaluate_rational_expression(XL * YU, Bound2), evaluate_rational_expression(XU * YL, Bound3), evaluate_rational_expression(XU * YU, Bound4), choose_max([Bound1, Bound2, Bound3, Bound4], U), choose_min([Bound1, Bound2, Bound3, Bound4], L), ( U = Bound1, append(Hxl, Hyl, HU) ; U = Bound2, append(Hxl, Hyu, HU) ; U = Bound3, append(Hxu, Hyl, HU) ; append(Hxu, Hyu, HU) ), !, ( L = Bound1, append(Hxl, Hyl, HL) ; L = Bound2, append(Hxl, Hyu, HL) ; L = Bound3, append(Hxu, Hyl, HL) ; append(Hxu, Hyu, HL) ), !. %=============================================================================== % choose_max(+List, -Max). %------------------------------------------------------------------------------- % Return maximum number from List (of at least 2 elements). %=============================================================================== choose_max([A, B], Max) :- simplify(A >= B, true), !, Max = A. choose_max([A, B], Max) :- simplify(A < B, true), !, Max = B. choose_max([H|T], Max) :- choose_max(T, MaxT), !, ( simplify(H > MaxT, true), Max = H ; Max = MaxT ), !. %=============================================================================== % choose_max(+List, -Max). %------------------------------------------------------------------------------- % Return minimum number from List (of at least 2 elements). %=============================================================================== choose_min([A, B], Min) :- simplify(A >= B, true), !, Min = B. choose_min([A, B], Min) :- simplify(A < B, true), !, Min = A. choose_min([H|T], Min) :- choose_min(T, MinT), !, ( simplify(H < MinT, true), Min = H ; Min = MinT ), !. %=============================================================================== % find_largest_integer_literal_below(+Rational, -Integer). %------------------------------------------------------------------------------- % Return the largest integer below Rational [N.B. This is only ever called % when Rational is a strict rational number, not an integer literal.] %=============================================================================== find_largest_integer_literal_below(A/B, N) :- int(A), A >= 0, int(B), B > 0, !, N iss A div B. find_largest_integer_literal_below(-(A/B), N) :- int(A), A > 0, int(B), B > 0, !, N iss -(A div B) - 1. %=============================================================================== % find_smallest_integer_literal_above(+Rational, -Integer). %------------------------------------------------------------------------------- % Return the smallest integer above Rational [N.B. This is only ever called % when Rational is a strict rational number, not an integer literal.] %=============================================================================== find_smallest_integer_literal_above(R, N) :- find_largest_integer_literal_below(R, M), !, N iss M + 1. %=============================================================================== % safe_deduce(+Goal, +Type, -Hyps). %------------------------------------------------------------------------------- % Call deduce, but via try_to_infer. [This is safe, because no infinite % cycling is possible: try_to_infer imposes a recursive-call depth limit, % and buffers and restores the used(_) facts as it goes.] %=============================================================================== safe_deduce(Goal, _, Hyps) :- infer_subgoal(Goal, Hyps), !. %=============================================================================== % strict_deduce(+Goal, +Type, -Hyps). %------------------------------------------------------------------------------- % Call infer_subgoal, but temporarily turn off the new (deduction and % numeric) strategies, to prevent unbounded recursion. %=============================================================================== strict_deduce(Goal, _, Hyps) :- inhibit_new_strategies(NeedToRestore), !, ( infer_subgoal(Goal, Hyps), Success = true ; Success = fail ), !, restore_new_strategies(NeedToRestore), !, call(Success). %------------------------------------------------------------------------------- inhibit_new_strategies(true) :- retract(allow_new_strategies). inhibit_new_strategies(false). %------------------------------------------------------------------------------- restore_new_strategies(true) :- asserta(allow_new_strategies). restore_new_strategies(false). %=============================================================================== % i_am_using_rule(RuleName). %------------------------------------------------------------------------------- % Always succeeds once. Allows the 'name' (e.g. gt_trans_1) of the rule % being used to be observed when Simplifier is run in trace mode. Could % also be used if more information on the strategy were to be recorded, % e.g. for inclusion in the proof log -- this is not done at present. %=============================================================================== i_am_using_rule(_). %=============================================================================== % try_new_logic_strategies(+Formula, -Hs). %------------------------------------------------------------------------------- % Sucessfull where boolean typed Formula can be proved, reporting used % hypotheses as Hs. %=============================================================================== % Implication %------------ try_new_logic_strategies(Left -> Right, Hs) :- /*[[Logic_1]]*/ fetch_conjunction_list(Left, Ls), fetch_conjunction_list(Right, Rs), get_hyp(HLeft -> HRight, _, N), fetch_conjunction_list(HLeft, HLs), fetch_conjunction_list(HRight, HRs), establish_implies(Ls, HLs, H1), establish_implies(HRs, Rs, H2), !, append(H1, [N|H2], Hyps), sort(Hyps, Hs). % Equivalence %------------ try_new_logic_strategies(Left <-> Right, Hs) :- /*[[Logic_2]]*/ ( /* Transitivity of <-> and equivalence */ get_hyp(Left <-> Int, x, N) /* of A <-> B and (not A) <-> (not B). */ ; get_hyp(Int <-> Left, x, N) ; get_hyp((not Left) <-> NegIntL, x, N), simplify(not NegIntL, Int) ; get_hyp(NegIntL <-> (not Left), x, N), simplify(not NegIntL, Int) ), ( get_hyp(Right <-> Int, x, M) ; get_hyp(Int <-> Right, x, M) ; get_hyp((not Right) <-> NegIntR, x, N), simplify(not NegIntR, Int) ; get_hyp(NegIntR <-> (not Right), x, N), simplify(not NegIntR, Int) ), !, sort([M,N], Hs). try_new_logic_strategies(Left <-> Right, Hs) :- /*[[Logic_3]]*/ fetch_conjunction_list(Left, Ls), fetch_conjunction_list(Right, Rs), establish_implies(Ls, Rs, H1), /* so the Lefts imply the Rights... */ establish_implies(Rs, Ls, H2), /* ...and the Rights imply the Lefts */ !, append(H1, H2, Hyps), sort(Hyps, Hs). %=============================================================================== % fetch_conjunction_list(+Formula, List). %------------------------------------------------------------------------------- % Turn a Formula of the form: % A and B and (C1 or C2) and (D and E) % Into a list: % [A, B, (C1 or C2), D, E]. % (Order is not important.) %=============================================================================== fetch_conjunction_list(A and B, List) :- fetch_conjunction_list(A, As), fetch_conjunction_list(B, Bs), !, append(As, Bs, List). fetch_conjunction_list(not (A or B), List) :- /* N.B. This is (not A) and (not B) */ fetch_conjunction_list(not A, As), fetch_conjunction_list(not B, Bs), !, append(As, Bs, List). fetch_conjunction_list(A, [A]). %=============================================================================== % establish_implies(+LHS, +RHS, -Hs). %------------------------------------------------------------------------------- % Succeeds if the goals in list RHS all follow from the formulae in LHS, % together with the existing hypotheses. (Any hypotheses needed to % establish elements of RHS are included within the list of hypothesis % numbers Hs which is also returned.) %=============================================================================== establish_implies(List, [Goal|Goals], Hs) :- establish_atomic_implies(List, Goal, H1), !, establish_implies(List, Goals, H2), !, append(H1, H2, Hs). establish_implies(_, [], []). %=============================================================================== % establish_atomic_implies(+List, +Goal, -Hs). %------------------------------------------------------------------------------- % Succeeds if the single goal Goal follows from the formulae in list List, % together with the existing hypotheses. (The numbers of any hypotheses % which are used to infer Goal are included in the returned value of list % Hs.) %=============================================================================== establish_atomic_implies(List, Goal, []) :- is_in(Goal, List), /* If Goal is in List, it follows from it */ !. establish_atomic_implies(_, Goal, Hs) :- safe_deduce_in_logical_strategies(Goal, Hs), /* If Goal holds anyway, it follows */ !. establish_atomic_implies(List, _, Hs) :- find_false_element_in(List, Hs), !. /* If List contains a contradiction, Goal follows */ establish_atomic_implies(List, Goal, []) :- is_relational_expression(Goal, Type), /* Type: integer, real or enum */ find_rel_exp_in(List, Exp, Type), /* Find something that may imply it */ establish_implication(Exp, Goal, Type), /* and check via appropriate mechanism */ !. establish_atomic_implies(List, B1 or B2, Hs) :- /* Disjunction with two elements */ is_in(A1 or A2, List), /* and similar disjunction in List */ ( establish_atomic_implies(A1, B1, H1), /* If (A1->B1) and (A2->B2), then */ establish_atomic_implies(A2, B2, H2) /* (A1 or A2) -> (B1 or B2). */ ; establish_atomic_implies(A1, B2, H1), /* If (A1->B2) and (A2->B1), then */ establish_atomic_implies(A2, B1, H2) /* (A1 or A2) -> (B1 or B2). */ ), !, append(H1, H2, Hs). establish_atomic_implies(List, B1 or B2 or B3, Hs) :- /* 3 elements [N.B. ignore 4+] */ is_in(A1 or A2 or A3, List), /* and 3-element disjunction in List */ ( establish_atomic_implies(A1, B1, H1), /* N.B. 1,2,3 and 2,1,3 patterns */ establish_atomic_implies(A2, B3, H2), /* are already covered by the 2- */ establish_atomic_implies(A3, B2, H3) /* element rule above in effect. */ ; establish_atomic_implies(A1, B2, H1), establish_atomic_implies(A2, B3, H2), establish_atomic_implies(A3, B1, H3) ; establish_atomic_implies(A1, B3, H1), ( establish_atomic_implies(A2, B2, H2), establish_atomic_implies(A3, B1, H3) ; establish_atomic_implies(A2, B1, H2), establish_atomic_implies(A3, B2, H3) ) ), !, append(H2, H3, HL), append(H1, HL, Hs). %=============================================================================== % is_relational_expression(+XopY, +Type). %------------------------------------------------------------------------------- % Succeed if XopY is a relational expression of type Type. %=============================================================================== is_relational_expression(X=Y, Type) :- find_mutual_types(X, Y, Type). is_relational_expression(X<>Y, Type) :- find_mutual_types(X, Y, Type). is_relational_expression(X>=Y, Type) :- find_mutual_types(X, Y, Type). is_relational_expression(X<=Y, Type) :- find_mutual_types(X, Y, Type). is_relational_expression(X>Y, Type) :- find_mutual_types(X, Y, Type). is_relational_expression(X Y=X */ establish_implication(X<>Y, Y<>X, _). /* (b) X<>Y -> Y<>X */ establish_implication(X>=Y, Y<=X, _). /* (c) X>=Y -> Y<=X */ establish_implication(X<=Y, Y>=X, _). /* (d) X<=Y -> Y>=X */ establish_implication(X>Y, YY -> YX, _). /* (f) X Y>X */ % (2): simple 'weakening' cases %------------------------------ establish_implication(X=Y, X>=Y, _). /* (a) X=Y -> X>=Y */ establish_implication(X=Y, X<=Y, _). /* (b) X=Y -> X<=Y */ establish_implication(X=Y, Y>=X, _). /* (c) X=Y -> Y>=X */ establish_implication(X=Y, Y<=X, _). /* (d) X=Y -> Y<=X */ establish_implication(X>Y, X>=Y, _). /* (e) X>Y -> X>=Y */ establish_implication(X>Y, Y<=X, _). /* (f) X>Y -> Y<=X */ establish_implication(X>Y, X<>Y, _). /* (g) X>Y -> X<>Y */ establish_implication(X>Y, Y<>X, _). /* (h) X>Y -> Y<>X */ establish_implication(X X<=Y */ establish_implication(X=X, _). /* (j) X Y>=X */ establish_implication(XY, _). /* (k) X X<>Y */ establish_implication(XX, _). /* (l) X Y<>X */ % (3): X=E1 and E1 relop E2 %-------------------------- % Cases: % (3)(a)[1] (X=E1 and E1=E2) -> X=E2 : covered by: unit & (1)(a) % (3)(a)[2] (X=E1 and E1=E2) -> X>=E2 : covered by: (2)(a)..(2)(d) % (3)(a)[3] (X=E1 and E1=E2) -> X<=E2 : covered by: (2)(a)..(2)(d) % (3)(b) (X=E1 and E1>=E2) -> X>=E2 : implemented below % (3)(c) (X=E1 and E1<=E2) -> X<=E2 : implemented below % (3)(d)[1] (X=E1 and E1>E2) -> X>E2 : implemented below % (3)(d)[2] (X=E1 and E1>E2) -> X>=E2 : subsumed by (3)(b) % (3)(d)[3] (X=E1 and E1>E2) -> X<>E2 : subsumed by (3)(f) % (3)(e)[1] (X=E1 and E1 X X<=E2 : subsumed by (3)(c) % (3)(e)[3] (X=E1 and E1 X<>E2 : subsumed by (3)(f) % (3)(f) (X=E1 and E1<>E2) -> X<>E2 : implemented below. % (3)(a): cases omitted, as above. % (3)(b): (X=E1 and E1>=E2) -> X>=E2 establish_implication(X=E1, X>=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(X=E1, E2<=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1=X, X>=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1=X, E2<=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). % (3)(c): (X=E1 and E1<=E2) -> X<=E2 establish_implication(X=E1, X<=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(X=E1, E2>=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1=X, X<=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1=X, E2>=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). % (3)(d)[1]: (X=E1 and E1>E2) -> X>E2 establish_implication(X=E1, X>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>E2, true). establish_implication(X=E1, E2E2, true). establish_implication(E1=X, X>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>E2, true). establish_implication(E1=X, E2E2, true). % (3)(d)[2]: (X=E1 and E1>E2) -> X>=E2: subsumed by (3)(b) % (3)(d)[3]: (X=E1 and E1>E2) -> X<>E2: subsumed by (3)(f) % (3)(e)[1]: (X=E1 and E1 XX, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1 X<=E2: subsumed by (3)(c) % (3)(e)[3]: (X=E1 and E1 X<>E2: subsumed by (3)(f) % (3)(f): (X=E1 and E1<>E2) -> X<>E2 establish_implication(X=E1, X<>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<>E2, true). establish_implication(X=E1, E2<>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<>E2, true). establish_implication(E1=X, X<>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<>E2, true). establish_implication(E1=X, E2<>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<>E2, true). % (4): X>=E1 and E1 relop E2 %--------------------------- % Cases: % (4)(a) (X>=E1 and E1=E2) -> X>=E2 : covered by: unit & (1)(c)..(1)(d) % (4)(b) (X>=E1 and E1>=E2) -> X>=E2 : implemented below % (4)(c) (X>=E1 and E1<=E2) -> ----- : doesn't imply anything % (4)(d)[1] (X>=E1 and E1>E2) -> X>E2 : implemented below % (4)(d)[2] (X>=E1 and E1>E2) -> X>=E2 : subsumed by (4)(b) % (4)(d)[3] (X>=E1 and E1>E2) -> X<>E2 : implemented below % (4)(e) (X>=E1 and E1 ----- : doesn't imply anything % (4)(f) (X>=E1 and E1<>E2) -> ----- : doesn't imply anything. % (4)(a): cases omitted, as above. % (4)(b): (X>=E1 and E1>=E2) -> X>=E2 establish_implication(X>=E1, X>=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(X>=E1, E2<=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1<=X, X>=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1<=X, E2<=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). % (4)(c): (X>=E1 and E1<=E2): omitted % (4)(d)[1]: (X>=E1 and E1>E2) -> X>E2 establish_implication(X>=E1, X>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>E2, true). establish_implication(X>=E1, E2E2, true). establish_implication(E1<=X, X>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>E2, true). establish_implication(E1<=X, E2E2, true). % (4)(d)[2]: (X>=E1 and E1>E2) -> X>=E2: subsumed by (4)(b) % (4)(d)[3]: (X>=E1 and E1>E2) -> X<>E2 establish_implication(X>=E1, X<>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>E2, true). establish_implication(X>=E1, E2<>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>E2, true). establish_implication(E1<=X, X<>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>E2, true). establish_implication(E1<=X, E2<>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>E2, true). % (4)(e): (X>=E1 and E1=E1 and E1<>E2): omitted % (5): X<=E1 and E1 relop E2 %--------------------------- % Cases: % (5)(a) (X<=E1 and E1=E2) -> X<=E2 : covered by: unit & (1)(c)..(1)(d) % (5)(b) (X<=E1 and E1>=E2) -> ----- : doesn't imply anything % (5)(c) (X<=E1 and E1<=E2) -> X<=E2 : implemented below % (5)(d) (X<=E1 and E1>E2) -> ----- : doesn't imply anything % (5)(e)[1] (X<=E1 and E1 X X<=E2 : subsumed by (5)(c) % (5)(e)[3] (X<=E1 and E1 X<>E2 : implemented below % (5)(f) (X<=E1 and E1<>E2) -> ----- : doesn't imply anything. % (5)(a): cases omitted, as above. % (5)(b): (X<=E1 and E1>=E2): omitted % (5)(c): (X<=E1 and E1<=E2) -> X<=E2 establish_implication(X<=E1, X<=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(X<=E1, E2>=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1>=X, X<=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1>=X, E2>=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). % (5)(d): (X<=E1 and E1>E2): omitted % (5)(e)[1]: (X<=E1 and E1 XX, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1=X, X=X, E2>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1 X<=E2: subsumed by (5)(c) % (5)(e)[3]: (X<=E1 and E1 X<>E2 establish_implication(X<=E1, X<>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1=X, X<>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1=X, E2<>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1E2): omitted % (6): X>E1 and E1 relop E2 %-------------------------- % Cases: % (6)(a)[1] (X>E1 and E1=E2) -> X>E2 : covered by: unit & (1)(e)..(1)(f) % (6)(a)[2] (X>E1 and E1=E2) -> X>=E2 : covered by: (2)(e)..(2)(f) & (2)(i)..(2)(j) % (6)(a)[3] (X>E1 and E1=E2) -> X<>E2 : covered by: (2)(g)..(2)(h) & (2)(k)..(2)(l) % (6)(b)[1] (X>E1 and E1>=E2) -> X>E2 : implemented below % (6)(b)[2] (X>E1 and E1>=E2) -> X>=E2 : implemented below % (6)(b)[3] (X>E1 and E1>=E2) -> X<>E2 : implemented below % (6)(c) (X>E1 and E1<=E2) -> ----- : doesn't imply anything % (6)(d)[1] (X>E1 and E1>E2) -> X>E2 : subsumed by (6)(b)(1) % (6)(d)[2] (X>E1 and E1>E2) -> X>=E2 : subsumed by (6)(b)(2) % (6)(d)[3] (X>E1 and E1>E2) -> X<>E2 : subsumed by (6)(b)(3) % (6)(e) (X>E1 and E1 ----- : doesn't imply anything % (6)(f) (X>E1 and E1<>E2) -> ----- : doesn't imply anything. % (6)(a): cases omitted, as above. % (6)(b)[1]: (X>E1 and E1>=E2) -> X>E2 establish_implication(X>E1, X>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(X>E1, E2=E2, true). establish_implication(E1E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1=E2, true). % (6)(b)[2]: (X>E1 and E1>=E2) -> X>=E2 establish_implication(X>E1, X>=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(X>E1, E2<=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1=E2, true). /* (6)(b)[3] (X>E1 and E1>=E2) -> X<>E2 */ establish_implication(X>E1, X<>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(X>E1, E2<>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). establish_implication(E1X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1>=E2, true). %(6)(c): (X>E1 and E1<=E2): omitted %(6)(d)[1]: (X>E1 and E1>E2) -> X>E2: subsumed by (6)(b)(1) %(6)(d)[2]: (X>E1 and E1>E2) -> X>=E2: subsumed by (6)(b)(2) %(6)(d)[3]: (X>E1 and E1>E2) -> X<>E2: subsumed by (6)(b)(3) %(6)(e): (X>E1 and E1E1 and E1<>E2): omitted % (7): X X X<=E2 : covered by: (2)(e)..(2)(f) & (2)(i)..(2)(j) % (7)(a)[3] (X X<>E2 : covered by: (2)(e)..(2)(f) & (2)(i)..(2)(j) % (7)(b) (X=E2) -> ----- : doesn't imply anything % (7)(c)[1] (X X X<=E2 : implemented below % (7)(c)[3] (X X<>E2 : implemented below % (7)(d) (XE2) -> ----- : doesn't imply anything % (7)(e)[1] (X X X<=E2 : subsumed by (7)(c)(2) % (7)(e)[3] (X X<>E2 : subsumed by (7)(c)(3) % (7)(f) (XE2) -> ----- : doesn't imply anything. % (7)(c)[1]: (X XX, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1>X, XX, E2>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). % (7)(c)[2]: (X X<=E2 establish_implication(X=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1>X, X<=E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1>X, E2>=X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). % (7)(c)[3]: (X X<>E2 establish_implication(XE2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(XX, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1>X, X<>E2, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). establish_implication(E1>X, E2<>X, _) :- int_or_enum_lit(E1, T), int_or_enum_lit(E2, T), !, simplify(E1<=E2, true). % (7)(d): (XE2): omitted % (7)(e)[1]: (X X X<=E2: subsumed by (7)(c)(2) % (7)(e)[3]: (X X<>E2: subsumed by (7)(c)(3) % (7)(f): (XE2): omitted % (8): X<>E1 and E1 relop E2 %--------------------------- % Cases: % (8)(a) (X<>E1 and E1=E2) -> X<>E2 : covered by: unit & (1)(b) % (8)(b) (X<>E1 and E1>=E2) -> ----- : doesn't imply anything % (8)(c) (X<>E1 and E1<=E2) -> ----- : doesn't imply anything % (8)(d) (X<>E1 and E1>E2) -> ----- : doesn't imply anything % (8)(e) (X<>E1 and E1 ----- : doesn't imply anything % (8)(f) (X<>E1 and E1<>E2) -> ----- : doesn't imply anything. % (So no clauses at all for these combinations.) %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/makelog.pro0000644000175000017500000015462011753202337016756 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides support for outputting the simplifier log file. %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % assert_log_fact(+LogAction, +LogArgument). %------------------------------------------------------------------------------- % If a log file has been requested then store the log fact. These facts are % implicitly associated with the current cerification condition. %=============================================================================== assert_log_fact(_R, _F) :- no_log_file, /* don't assert anything if /nolog has been given */ !. assert_log_fact(R, F) :- assertz(log_fact(R, F)). %=============================================================================== % write_log_file_banner. %------------------------------------------------------------------------------- % Write out the log file banner. %=============================================================================== write_log_file_banner :- no_log_file, !. write_log_file_banner :- typecheck_only(on), !. write_log_file_banner :- typecheck_only(off), logfile_name(LOGFILE), telling(OLDOUT), tell(LOGFILE), current_output(Stream), display_banner(Stream), tell(OLDOUT), !. %=============================================================================== % close_log_file. %------------------------------------------------------------------------------- % Close the log file. %=============================================================================== close_log_file :- no_log_file, !. close_log_file :- typecheck_only(on), !. close_log_file :- typecheck_only(off), telling(OLDOUT), logfile_name(LOGFILE), tell(LOGFILE), told, tell(OLDOUT), !. %=============================================================================== % write_rules_read. %------------------------------------------------------------------------------- % Describe the user loaded rules. %=============================================================================== write_rules_read :- /* There is nothing to do if there are no user rule files. */ \+ log_fact(read_in_user_rule_file, _), !. write_rules_read :- \+ no_log_file, typecheck_only(off), /* There is at least 1 user defined rule file used */ logfile_name(LOGFILE), telling(OLDOUT), tell(LOGFILE), nl, nl, prefix(rules_read_sec), print('The following user defined rule files have been read:'), nl, write_the_rule_files, write_rule_syntax_errors, prefix(semantic_sec), print('No semantic checks are performed on the rules.'), nl, tell(OLDOUT), !. %------------------------------------------------------------------------------- write_the_rule_files :- retract(log_fact(read_in_user_rule_file, Name)), prefix(rulefile_read), print(Name), nl, fail. write_the_rule_files :- !. %------------------------------------------------------------------------------- write_rule_syntax_errors :- /* There is nothing to do if there are no syntax errors - succeed. */ \+ log_fact(rule_syntax_error(_, _, _), _), !. write_rule_syntax_errors :- /* There is at least 1 syntax error in a rule file - need a header. */ prefix(syntax_error_sec), print('The rule files contain the following syntax errors:'), nl, write_syntax_errors, !. %------------------------------------------------------------------------------- write_syntax_errors :- retract(log_fact(rule_syntax_error(Complaint, Argument, File), _)), maybe_write_filename(File), print(' '), print(Complaint), nl, ( Argument = [] ; Argument \= [], print(' Involving: '), print(Argument), nl ), fail. write_syntax_errors :- !. %------------------------------------------------------------------------------- maybe_write_filename(Name) :- \+ syntax_error_in_file(Name), !, convert_file_for_display(Name, DisplayFile), prefix(syntax_err_rep), print(DisplayFile), nl, assertz(syntax_error_in_file(Name)). maybe_write_filename(_) :- !. %=============================================================================== % write_rules_read. %------------------------------------------------------------------------------- % Describe the facts recorded in processing a path function or verification % condition. %=============================================================================== write_log_facts :- no_log_file, retractall(log_fact(_, _)), !. write_log_facts :- typecheck_only(on), retractall(log_fact(_, _)), !. write_log_facts :- typecheck_only(off), logfile_name(LOGFILE), telling(OLDOUT), tell(LOGFILE), nl, nl, write_vc_or_pf_header, process_log_facts, tell(OLDOUT), !. %------------------------------------------------------------------------------- write_overall_rule_summary :- overall_rule_summary(_, _), !, /* At least 1 user rule has been used in the proof of this subprogram */ \+ no_log_file, typecheck_only(off), /* There is at least 1 user defined rule file used */ logfile_name(LOGFILE), telling(OLDOUT), tell(LOGFILE), nl, nl, prefix(overall_summary), print('Overall summary of VCs using user rules.'), nl, write_the_overall_summary, tell(OLDOUT). write_overall_rule_summary :- !. %------------------------------------------------------------------------------- write_the_overall_summary :- overall_rule_summary(File:_, _), !, convert_file_to_base_name(File, DisplayFile), prefix(rulefile), print(DisplayFile), nl, overall_summary_of_file(File), write_the_overall_summary. write_the_overall_summary :- !. %------------------------------------------------------------------------------- overall_summary_of_file(File) :- overall_rule_summary(File:Rule, _), !, prefix(rule), print(Rule), nl, overall_summary_of_rule(File:Rule), overall_summary_of_file(File). overall_summary_of_file(_) :- !. %------------------------------------------------------------------------------- overall_summary_of_rule(RuleId) :- retract(overall_rule_summary(RuleId, VCn)), !, /* At least one VC has been proved using this rule */ prefix(vcs), print(VCn), overall_summary_of_rule_rep(RuleId), nl. overall_summary_of_rule(_) :- !. %------------------------------------------------------------------------------- overall_summary_of_rule_rep(File:Rule) :- retract(overall_rule_summary(File:Rule, VCn)), print(', '), print(VCn), fail. overall_summary_of_rule_rep(_) :- !. %------------------------------------------------------------------------------- write_vc_or_pf_header :- path_functions, write_path_functions_header, !. write_vc_or_pf_header :- print('@@@@@@@@@@ VC: '), vc_name(X), print(X), print(' @@@@@@@@@@'), nl, !. %------------------------------------------------------------------------------- /*** write_path_functions_header ***/ write_path_functions_header :- retract(stmt_line(L)), print(L), nl, fail. write_path_functions_header :- retract(succ_line(L)), print(L), nl, fail. write_path_functions_header :- retract(path_line(L)), print(L), nl, fail. write_path_functions_header :- !. %------------------------------------------------------------------------------- process_log_facts :- \+ log_fact(_, _), !, state_no_simplification_performed. process_log_facts :- retract(log_fact(ACTION, ARGUMENTS)), write_log_entry(ACTION, ARGUMENTS), nl, fail. process_log_facts :- !, summarise_user_rule_use. %------------------------------------------------------------------------------- state_no_simplification_performed :- print(' NO SIMPLIFICATION ACTIONS PERFORMED.'), nl, !. %------------------------------------------------------------------------------- write_log_entry(ACTION, ARGUMENTS) :- path_functions, !, write_pf_entry(ACTION, ARGUMENTS). write_log_entry(ACTION, ARGUMENTS) :- write_vc_entry(ACTION, ARGUMENTS). %------------------------------------------------------------------------------- summarise_user_rule_use :- rule_summary(_, _), !, nl, prefix(vc_number), current_vc_number(N), print(N), print(': Summary of user rule application.'), nl, summarise_user_rule_use_rep. summarise_user_rule_use :- !. %------------------------------------------------------------------------------- summarise_user_rule_use_rep :- rule_summary(File:_, _), !, convert_file_to_base_name(File, DisplayFile), prefix(rulefile), print(DisplayFile), nl, summary_of_file(File), summarise_user_rule_use_rep. summarise_user_rule_use_rep :- !. %------------------------------------------------------------------------------- summary_of_file(File) :- rule_summary(File:Rule, _), !, prefix(rule), print(Rule), nl, summary_of_rule(File:Rule), summary_of_file(File). summary_of_file(_) :- !. %------------------------------------------------------------------------------- summary_of_rule(RuleId) :- summary_of_conclusions(RuleId), summary_of_hypotheses(RuleId), add_to_overall_summary(RuleId), !. %------------------------------------------------------------------------------- add_to_overall_summary(RuleId) :- current_vc_number(VCn), \+ overall_rule_summary(RuleId, VCn), !, assertz(overall_rule_summary(RuleId, VCn)). add_to_overall_summary(_) :- !. %------------------------------------------------------------------------------- summary_of_conclusions(RuleId) :- retract(rule_summary(RuleId, conc(N))), !, /* At least one conclusion has been proved using this rule */ prefix(conclusion), print(N), summary_of_conclusions_rep(RuleId), nl. summary_of_conclusions(_) :- !. %------------------------------------------------------------------------------- summary_of_conclusions_rep(File:Rule) :- retract(rule_summary(File:Rule, conc(N))), print(', '), print(N), fail. summary_of_conclusions_rep(_) :- !. %------------------------------------------------------------------------------- summary_of_hypotheses(RuleId) :- retract(rule_summary(RuleId, hyp(N))), !, /* At least one hypothesis has been proved using this rule */ prefix(hypotheses), print(N), summary_of_hypotheses_rep(RuleId), nl. summary_of_hypotheses(_) :- !. %------------------------------------------------------------------------------- summary_of_hypotheses_rep(File:Rule) :- retract(rule_summary(File:Rule, hyp(N))), print(', '), print(N), fail. summary_of_hypotheses_rep(_) :- !. %=============================================================================== % write_vc_entry(+ACTION, +ARGUMENTS). %------------------------------------------------------------------------------- % Writes an entry for facts logged in proving verification conditions. %=============================================================================== /* 1 */ write_vc_entry(hyp_split, [N, [Na, A], [Nb, B], D]) :- get_indentation(D, Indent), prefix(restructuring), print('Hypothesis H'), print(N), print(' has now been split into two, giving:'), nl, print(Indent), indent5, prefix(restructuring), print('H'), print(Na), print(': '), print(A), nl, print(Indent), indent5, prefix(restructuring), print('H'), print(Nb), print(': '), print(B), !. /* 2 */ write_vc_entry(forwardchain, [Nab, Na, Nb, B, D]) :- get_indentation(D, Indent), prefix(addition), print('Using "A->B, A |- B" on hypotheses H'), print(Nab), print(' & H'), print(Na), print(' yields a new hypothesis:'), nl, print(Indent), indent5, prefix(addition), print('H'), print(Nb), print(': '), print(B), !. /* 3 */ write_vc_entry(backchain, [Nab, Nb, Na, A, D]) :- get_indentation(D, Indent), prefix(addition), print('Using "A->B, not B |- not A" on hypotheses H'), print(Nab), print(' & H'), print(Nb), print(' yields a new hypothesis:'), nl, print(Indent), indent5, prefix(addition), print('H'), print(Na), print(': '), print(A), !. /* 4 */ write_vc_entry(restructured, [N, F, D]) :- get_indentation(D, Indent), prefix(restructuring), print('Restructured hypothesis H'), print(N), print(' into:'), nl, print(Indent), indent5, prefix(restructuring), print('H'), print(N), print(': '), print(F), !. /* 5 */ write_vc_entry(simplified, [HorC, N, _OLD, NEW]) :- ( HorC = hyp, HC = 'H' ; HorC = conc, HC = 'C' ), prefix(simplification), print('Simplified '), print(HC), print(N), print(' on reading formula in, to give:'), nl, indent5, prefix(simplification), print(HC), print(N), print(': '), print(NEW), !. /* 5a */ write_vc_entry(simplified_conc, [N, _OLD, NEW, D]) :- get_indentation(D, Indent), prefix(simplification), print('Simplified C'), print(N), print(' further, to give:'), nl, print(Indent), indent5, prefix(simplification), print('C'), print(N), print(': '), print(NEW), !. /* 6 */ write_vc_entry(conc_split, [N, [Na, A], [Nb, B], D]) :- get_indentation(D, Indent), prefix(restructuring), print('Conclusion C'), print(N), print(' has now been split into two, giving:'), nl, print(Indent), indent5, prefix(restructuring), print('C'), print(Na), print(': '), print(A), nl, print(Indent), indent5, prefix(restructuring), print('C'), print(Nb), print(': '), print(B), !. /* 7 */ write_vc_entry(duplicate_hyp, [N, F]) :- prefix(elimination), print('Attempted addition of new hypothesis:'), nl, indent5, indent5, print(F), nl, indent5, print('eliminated: this already exists (as H'), print(N), print(').'), !. /* 7a */ write_vc_entry(repeat_hyp, [N, M]) :- prefix(elimination), print('Hypothesis H'), print(N), print(' has been replaced by "true". (It is already present, as H'), print(M), print(').'), !. /* 8 */ write_vc_entry(combined_hyps, [Na, Nb, N, Formula]) :- /* SEPR 1077 */ prefix(simplification), /* (generalised) */ print('Hypotheses H'), print(Na), print(' & H'), print(Nb), print(' together imply that'), nl, indent5, indent5, print(Formula), print('.'), nl, indent5, print('H'), print(Na), print(' & H'), print(Nb), print(' have therefore been deleted and a new H'), print(N), print(' added to this effect.'), !. /* 9 */ write_vc_entry(proved, [N, C, Hs, CC]) :- prefix(proof), print('Proved C'), print(N), print(': '), print(CC), ( CC = C ; nl, indent5, print('via its standard form, which is:'), nl, indent5, print('Std.Fm C'), print(N), print(': '), print(C) ), !, ( Hs=[] ; nl, indent5, ( Hs=[H], print('using hypothesis H'), print(H), print('.') ; print('using hypotheses H'), write_hyp_numbers_list(Hs, 'H'), print('.') ) ), !. /* 9a */ write_vc_entry(proved_subgoal, [N, C, Hs, CC, D]) :- get_indentation(D, Indent), prefix(proof), print('Proved subgoal C'), print(N), ( CC = C ; print(' via its simplified form, which is:'), nl, print(Indent), indent5, print('Std.Fm C'), print(N), print(': '), print(C) ), !, ( Hs=[] ; nl, print(Indent), indent5, ( Hs=[H], print('using hypothesis H'), print(H), print('.') ; print('using hypotheses H'), write_hyp_numbers_list(Hs, 'H'), print('.') ) ), !. /* 9b */ write_vc_entry(proved_by_framing, [N, C, Method]) :- prefix(proof), print('Proved C'), print(N), print(': '), print(C), nl, indent5, print('by '), print(Method), print('.'), !. /* 9c */ write_vc_entry(proved_by_framing_hyp, [ConcNumber, HypNumber, ConcExp, Method]) :- prefix(proof), print('Proved C'), print(ConcNumber), print(': '), print(ConcExp), nl, indent5, print('by '), print(Method), print(' using hypothesis H'), print(HypNumber), print('.'), !. /* 10 */ write_vc_entry(contradiction, [K, Hs]) :- prefix(contradiction), print('Established a contradiction ['), print(K), ( Hs=[], print('].') ; Hs=[H], print('] using hypothesis H'), print(H), print('.') ; print('] among the following hypotheses:'), nl, indent5, indent5, print('H'), write_hyp_numbers_list(Hs, 'H'), print('.') ), !. /* 11 */ write_vc_entry(forwardchain2, [N, Hs, B]) :- prefix(restructuring), print('Using "A->B, A |- B" on H'), print(N), ( Hs=[], print(', given that "A" is obvious, we simplify this to:') ; ( Hs=[H], print(', given H'), print(H) ; print(', given H'), write_hyp_numbers_list(Hs, 'H') ), print(', we simplify the former to:') ), nl, indent5, prefix(restructuring), print('H'), print(N), print(': '), print(B), !. /* 12 */ write_vc_entry(eliminated_hyp, [N, Message, Hs]) :- prefix(elimination), print('Eliminated hypothesis H'), print(N), print(' ('), print(Message), ( Hs=[] ; print(', given H'), ( Hs=[H], print(H) ; write_hyp_numbers_list(Hs, 'H') ) ), print(').'), !. /* 13 */ write_vc_entry(substituted, [D, N, V, E]) :- integer(N), get_indentation(D, Indent), prefix(substitution), print('Eliminated hypothesis H'), print(N), print('.'), nl, print(Indent), indent5, print('This was achieved by replacing all occurrences of '), print(V), print(' by:'), nl, print(Indent), indent5, indent5, print(E), print('.'), !. /* 13a */ write_vc_entry(substituted, [D, N, V, E]) :- \+ integer(N), get_indentation(D, Indent), prefix(substitution), print('Applied substitution rule '), print(N), print('.'), nl, print(Indent), indent5, print('This was achieved by replacing all occurrences of '), print(V), print(' by:'), nl, print(Indent), indent5, indent5, print(E), print('.'), !. /* 13b */ write_vc_entry(substituted_fld, [D, N, V, E]) :- integer(N), get_indentation(D, Indent), prefix(substitution), print('Substituted hypothesis H'), print(N), print('.'), nl, print(Indent), indent5, print('This was achieved by replacing all occurrences of '), print(V), print(' by:'), nl, print(Indent), indent5, indent5, print(E), print('.'), !. /* 13c */ write_vc_entry(substituted_fld, [D, N, V, E]) :- \+ integer(N), get_indentation(D, Indent), prefix(substitution), print('Applied substitution rule '), print(N), print('.'), nl, print(Indent), indent5, print('This was achieved by replacing all occurrences of '), print(V), print(' by:'), nl, print(Indent), indent5, indent5, print(E), print('.'), !. /* 14 */ write_vc_entry(proved_all, []) :- prefix(proof), print('PROVED VC.'), !. /* 15 */ write_vc_entry(subst_hyp, [D, N, F]) :- get_indentation(D, _Indent), prefix(substituted), print('New H'), print(N), print(': '), print(F), !. /* 16 */ write_vc_entry(subst_conc, [D, N, F]) :- get_indentation(D, _Indent), prefix(substituted), print('New C'), print(N), print(': '), print(F), !. /* 17 */ write_vc_entry(further_simplified, [HorC, N, _OLD, NEW | HYPS]) :- ( HorC = hyp, HC = 'H' ; HorC = conc, HC = 'C' ), prefix(simplification), print('Simplified '), print(HC), print(N), print(' further'), ( HYPS = [Hypotheses], Hypotheses \= [], print(' (given H'), ( Hypotheses = [SingleHyp], print(SingleHyp) ; write_hyp_numbers_list(Hypotheses, 'H') ), print(')') ; true ), print(', to give:'), nl, indent5, prefix(simplification), print(HC), print(N), print(': '), print(NEW), !. /* 18 */ write_vc_entry(subst_elim_hyp, [D, N, VAR]) :- integer(N), /* fail in other case */ get_indentation(D, Indent), prefix(elimination), print('Eliminated hypothesis H'), print(N), print(', which only specifies a value for '), print(VAR), print('.'), nl, print(Indent), indent5, print('This is not referred to anywhere else in the VC.'), !. /* 18a */ write_vc_entry(subst_fld, [D, N, VAR]) :- integer(N), /* fail in other case */ get_indentation(D, _Indent), prefix(elimination), print('Substituted hypothesis H'), print(N), print(', which specifies an equivalence for '), print(VAR), print('.'), !. /* 19 */ write_vc_entry(eliminated_conc, [N, M]) :- prefix(elimination), print('Eliminated conclusion C'), print(N), print(', which is a duplicate of C'), print(M), print('.'), !. /* 20 */ write_vc_entry(unwrapping, [N, D]) :- get_indentation(D, _Indent), prefix(restructuring), print('Attempting to prove quantified formula C'), print(N), print(' by "unwrapping" it.'), !. /* 21 */ write_vc_entry(implies_conc, [N, _P, _Q, D]) :- get_indentation(D, Indent), prefix(restructuring), print('Conclusion C'), print(N), print(' is an implication formula [P->Q].'), nl, print(Indent), indent5, print('Attempting to prove this by proving Q while adding P to the hypotheses.'), !. /* 22 */ write_vc_entry(by_cases, [N, [Case1, Case2], D]) :- get_indentation(D, Indent), prefix(restructuring), print('Attempting to prove C'), print(N), print(' by cases, depending on whether'), nl, write_out_cases(Indent, [Case1, Case2]), print(Indent), indent5, print('given the need to simplify update(A, [I], X) accesses in C'), print(N), print('.'), !. /* 22a */ write_vc_entry(by_cases_hyp, [N, CaseList, D]) :- get_indentation(D, Indent), prefix(restructuring), print('Attempting to prove C'), print(N), print(' by showing that each of the following cases'), nl, write_out_cases(Indent, CaseList), print(Indent), indent5, print('are provable.'), !. /* 23 */ write_vc_entry(add_imp_hyps, [D]) :- /* Ignore hypothesis list */ get_indentation(D, _Indent), prefix(addition), print('Added new hypotheses (in proving an implication formula).'), !. /* 24 */ write_vc_entry(new_hyp, [N, H, D]) :- get_indentation(D, _Indent), indent5, prefix(addition), print('New H'), print(N), print(': '), print(H), !. /* 25 */ write_vc_entry(new_hyp_for_case, [N, H, C, D]) :- get_indentation(D, _Indent), prefix(addition), print('Case '), print(C), print(' - New H'), print(N), print(': '), print(H), !. /* 26 */ write_vc_entry(new_goal, [N, C, D]) :- get_indentation(D, _Indent), prefix(addition), print('New subgoal C'), print(N), print(': '), print(C), !. /* 27a */ write_vc_entry(rule_proved_conc, [N, C, File:Rule, rewrite(NewC, Conditions), RuleSort]) :- prefix(proof), print('Proved C'), print(N), print(': '), print(C), nl, indent5, print('This was achieved by applying the rewrite rule '), print(Rule), nl, indent5, maybe_write_rulefile(File:Rule, RuleSort, conc(N)), print('to rewrite this conclusion to:'), nl, prefix(substituted), print('C'), print(N), print(': '), print(NewC), write_conditions_list(Conditions), !. /* 27b */ write_vc_entry(rule_proved_conc, [N, C, File:Rule, inference([]), RuleSort]) :- prefix(proof), print('Proved C'), print(N), print(': '), print(C), nl, indent5, print('This was achieved by applying the inference rule '), print(Rule), nl, indent5, maybe_write_rulefile(File:Rule, RuleSort, conc(N)), print('to infer this conclusion directly (rule has no side-conditions).'), !. /* 27c */ write_vc_entry(rule_proved_conc, [N, C, File:Rule, inference(Conditions), RuleSort]) :- prefix(proof), print('Proved C'), print(N), print(': '), print(C), nl, indent5, print('This was achieved by applying the inference rule '), print(Rule), nl, indent5, maybe_write_rulefile(File:Rule, RuleSort, conc(N)), print('to infer this conclusion from its side-conditions,'), nl, indent5, print('which were established as follows:'), write_conditions_list_items(Conditions), !. /* 28 */ write_vc_entry(applied_rule, [N, H, File:Rule, rewrite(OldH, Conditions), RuleSort]) :- prefix(addition), print('New H'), print(N), print(': '), print(H), nl, indent5, print('This was achieved by applying the rewrite rule '), print(Rule), nl, indent5, maybe_write_rulefile(File:Rule, RuleSort, hyp(N)), print('to rewrite existing hypothesis H'), ( get_hyp(OldH, _, M) ; M = '?' /* catch-all case: should never be used */ ), print(M), print(' to give the above formula.'), write_conditions_list(Conditions), !. /* 28b */ write_vc_entry(applied_rule, [N, H, File:Rule, inference([]), RuleSort]) :- prefix(addition), print('New H'), print(N), print(': '), print(H), nl, indent5, print('This was achieved by applying the inference rule '), print(Rule), nl, indent5, maybe_write_rulefile(File:Rule, RuleSort, hyp(N)), print('to infer this hypothesis directly (rule has no side-conditions).'), !. /* 28c */ write_vc_entry(applied_rule, [N, H, File:Rule, inference(Conditions), RuleSort]) :- prefix(addition), print('New H'), print(N), print(': '), print(H), nl, indent5, print('This was achieved by applying the inference rule '), print(Rule), nl, indent5, maybe_write_rulefile(File:Rule, RuleSort, hyp(N)), print('to infer this hypothesis from its side-conditions,'), nl, indent5, print('which were established as follows:'), write_conditions_list_items(Conditions), !. /* 30 */ write_vc_entry(composite_rewrite, [ConcNumber, Conc, HypNumberList]) :- prefix(restructuring), print('Conclusion C'), print(ConcNumber), print(': '), print(Conc), nl, indent5, print('is transformed by replacing variables with update structures via:'), nl, indent5, indent5, print('H'), write_hyp_numbers_list(HypNumberList, 'H'), print('.'), !. /* 31 */ write_vc_entry(zombiescope_exceed_limit, [_VCNumber]) :- prefix(hyp_limit), nl, print('Number of hyps in DPC exceeds limit specified by -hyp-limit.'), nl, print('Skipping all hypotheses in DPC.'), nl, print('No dead path detection performed for this DPC.'), nl, !. /* ?? */ write_vc_entry(UNEXPECTED, ARGS) :- print('??? '), print(UNEXPECTED), nl, indent5, indent5, print(ARGS), !. %------------------------------------------------------------------------------- write_out_cases(Indent, CaseList):- write_out_cases_x(Indent, 1, CaseList), !. write_out_cases_x(Indent, CaseNumber, [Case]):- print(Indent), indent5, print('('), print(CaseNumber), print(') '), print(Case), print(','), nl, !. write_out_cases_x(Indent, CaseNumber, [Case | CaseList]):- NextCaseNumber is CaseNumber+1, print(Indent), indent5, print('('), print(CaseNumber), print(') '), print(Case), print(', or'), nl, write_out_cases_x(Indent, NextCaseNumber, CaseList). %------------------------------------------------------------------------------- maybe_write_rulefile(File:Rule, user_inference_rule, Action) :- !, convert_file_to_base_name(File, DisplayFile), print('[from rulefile '), print(DisplayFile), print('] '), add_to_rule_summary(File:Rule, Action). maybe_write_rulefile(File:Rule, user_rewrite_rule, Action) :- !, convert_file_to_base_name(File, DisplayFile), print('[from rulefile '), print(DisplayFile), print('] '), add_to_rule_summary(File:Rule, Action). maybe_write_rulefile(_, _, _) :- !. %------------------------------------------------------------------------------- write_conditions_list([]) :- nl, indent5, print('This rule has an empty list of side-conditions.'), !. write_conditions_list([One]) :- nl, indent5, print('This rule could be applied because its side-condition holds, as follows:'), write_conditions_list_items([One]), !. write_conditions_list(List) :- nl, indent5, print('This rule could be applied because its side-conditions hold, as follows:'), write_conditions_list_items(List), !. %------------------------------------------------------------------------------- write_conditions_list_items([]) :- !. write_conditions_list_items([proved(goal(Condition), [], []) | Rest]) :- nl, indent5, prefix(sidecondition), print('Immediate condition '), print(Condition), write(' evaluated successfully'), !, write_conditions_list_items(Rest). write_conditions_list_items([proved(Condition, [], []) | Rest]) :- nl, indent5, prefix(sidecondition), print('By simple reasoning, proved: '), print(Condition), !, write_conditions_list_items(Rest). write_conditions_list_items([proved(Condition, Hyps, []) | Rest]) :- nl, indent5, prefix(sidecondition), print('From H'), write_hyp_numbers_list(Hyps, 'H'), print(', proved: '), print(Condition), !, write_conditions_list_items(Rest). %------------------------------------------------------------------------------- add_to_rule_summary(RuleId, Use) :- \+ rule_summary(RuleId, Use), !, assertz(rule_summary(RuleId, Use)). add_to_rule_summary(_, _) :- !. %=============================================================================== % write_pf_entry(+ACTION, +ARGUMENTS). %------------------------------------------------------------------------------- % Writes an entry for facts logged in proving path functions. %=============================================================================== /* 1 */ write_pf_entry(hyp_split, [N, [Na, A], [Nb, B]|_]) :- prefix(restructuring), print('Traversal condition TC#'), print(N), print(' has now been split into two, giving:'), nl, indent5, prefix(restructuring), print('TC#'), print(Na), print(': '), print(A), nl, indent5, prefix(restructuring), print('TC#'), print(Nb), print(': '), print(B), !. /* 2 */ write_pf_entry(forwardchain, [Nab, Na, Nb, B|_]) :- prefix(addition), print('Using "A->B, A |- B" on traversal condition TC#'), print(Nab), print(' & TC#'), print(Na), print(' yields a new traversal condition:'), nl, indent5, prefix(addition), print('TC#'), print(Nb), print(': '), print(B), !. /* 3 */ write_pf_entry(backchain, [Nab, Nb, Na, A|_]) :- prefix(addition), print('Using "A->B, not B |- not A" on traversal condition TC#'), print(Nab), print(' & TC#'), print(Nb), print(' yields a new traversal condition:'), nl, indent5, prefix(addition), print('TC#'), print(Na), print(': '), print(A), !. /* 4 */ write_pf_entry(restructured, [N, F|_]) :- prefix(restructuring), print('Restructured traversal condition TC#'), print(N), print(' into:'), nl, indent5, prefix(restructuring), print('TC#'), print(N), print(': '), print(F), !. /* 5a */ write_pf_entry(simplified, [hyp, N, _OLD, NEW]) :- prefix(simplification), print('Simplified TC#'), print(N), print(' on reading in, to give:'), nl, indent5, prefix(simplification), print('TC#'), print(N), print(': '), print(NEW), !. /* 5b */ write_pf_entry(simplified, [conc, _N, _OLD, NEW]) :- prefix(simplification), print('Simplified path action part on reading in, to give:'), nl, write_new_action_part(NEW), !. /* 6 */ /* conc_split should not occur with path functions! */ /* 7 */ write_pf_entry(duplicate_hyp, [N, F]) :- prefix(elimination), print('Attempted addition of new traversal condition:'), nl, indent5, indent5, print(F), nl, indent5, print('eliminated: this already exists (as TC#'), print(N), print(').'), !. /* 8 */ write_pf_entry(combined_hyps, [Na, Nb, N, A=B]) :- prefix(simplification), print('Traversal conditions TC#'), print(Na), print(' & TC#'), print(Nb), print(' together imply that'), nl, indent5, indent5, print(A=B), print('.'), nl, indent5, print('TC#'), print(Na), print(' & TC#'), print(Nb), print(' have therefore been deleted and a new TC#'), print(N), print(' added to this effect.'), !. /* 9 */ /* proved should not occur with path functions! */ /* 10 */ write_pf_entry(contradiction, [K, Hs]) :- prefix(contradiction), print('Established a contradiction ['), print(K), ( Hs=[], print('].') ; Hs=[H], print('] using traversal condition TC#'), print(H), print('.') ; print('] among the following traversal conditions:'), nl, indent5, indent5, print('TC#'), write_hyp_numbers_list(Hs, 'TC#'), print('.') ), !. /* 11 */ write_pf_entry(forwardchain2, [N, Hs, B]) :- prefix(restructuring), print('Using "A->B, A |- B" on TC#'), print(N), ( Hs=[], print(', given that "A" is obvious, we simplify this to:') ; print(', given TC#'), ( Hs=[H], print(H) ; write_hyp_numbers_list(Hs, 'TC#') ), print(', we simplify the former to:') ), nl, indent5, prefix(restructuring), print('TC#'), print(N), print(': '), print(B), !. /* 12 */ write_pf_entry(eliminated_hyp, [N, Message, Hs]) :- prefix(elimination), print('Eliminated traversal condition TC#'), print(N), print(' ('), print(Message), ( Hs=[] ; print(', given TC#'), ( Hs=[H], print(H) ; write_hyp_numbers_list(Hs, 'TC#') ) ), print(').'), !. /* 13 */ write_pf_entry(substituted, [_D, N, V, E]) :- /* ignore depth D */ prefix(substitution), print('Used traversal condition TC#'), print(N), print(' to replace all occurrences of '), print(V), print(' by:'), nl, indent5, indent5, print(E), print('.'), !. /* 14 */ write_pf_entry(proved_all, []) :- prefix(proof), print('PATH ELIMINATED.'), !. /* 15 */ write_pf_entry(subst_hyp, [_D, N, F]) :- /* ignore depth D */ prefix(substituted), print('New TC#'), print(N), print(': '), print(F), !. /* 16 */ write_pf_entry(subst_conc, [_D, 1, F]) :- /* ignore depth D */ prefix(substituted), print('New action part is:'), nl, write_new_action_part(F), !. /* 17a*/ write_pf_entry(further_simplified, [hyp, N, _OLD, NEW | HYPS]) :- prefix(simplification), print('Simplified TC#'), print(N), print(' further'), ( HYPS= [Hypotheses], Hypotheses \= [], print(' (given TC#'), ( Hypotheses = [SingleHyp], print(SingleHyp) ; write_hyp_numbers_list(Hypotheses, 'TC#') ), print(')') ; true ), print(', to give:'), nl, indent5, prefix(simplification), print('TC#'), print(N), print(': '), print(NEW), !. /* 17b*/ write_pf_entry(further_simplified, [conc, _N, _OLD, NEW | HYPS]) :- prefix(simplification), print('Simplified path action part'), ( HYPS = [Hypotheses], Hypotheses \= [], print(' (given TC#'), ( Hypotheses = [SingleHyp], print(SingleHyp) ; write_hyp_numbers_list(Hypotheses, 'TC#') ), print(')') ; true ), print(' to give:'), nl, write_new_action_part(NEW), !. /* ?? */ write_pf_entry(UNEXPECTED, ARGS) :- print('??? '), print(UNEXPECTED), nl, indent5, indent5, print(ARGS), !. %------------------------------------------------------------------------------- write_hyp_numbers_list([H1,H2], PREFIX) :- print(H1), print(' & '), print(PREFIX), print(H2), !. write_hyp_numbers_list([H1], _PREFIX) :- print(H1), !. write_hyp_numbers_list([H1|Hs], PREFIX) :- print(H1), print(', '), print(PREFIX), !, write_hyp_numbers_list(Hs, PREFIX), !. %------------------------------------------------------------------------------- write_new_action_part(X & Y) :- write_new_action_part(X), print(' &'), nl, write_new_action_part(Y), !. write_new_action_part(X := Y) :- indent5, indent5, print(X), print('\' := '), print(Y), !. %=============================================================================== % write_vc_entry(+ACTION). %------------------------------------------------------------------------------- % Write out an appropiate identifier for the action class. %=============================================================================== prefix( proof ) :- print('*** '), !. prefix( elimination ) :- print('--- '), !. prefix( restructuring ) :- print('>>> '), !. prefix( simplification ) :- print('%%% '), !. prefix( addition ) :- print('+++ '), !. prefix( contradiction ) :- print('### '), !. prefix( substitution ) :- print('-S- '), !. prefix( substituted ) :- print(' '), !. prefix( sidecondition ) :- print('<<< '), !. prefix( rules_read_sec ) :- print('RRS '), !. prefix( rulefile_read ) :- print('&&& '), !. prefix(syntax_error_sec) :- print('STX '), !. prefix( syntax_err_rep ) :- print('!!! '), !. prefix( semantic_sec ) :- print('SEM '), !. prefix( vc_number ) :- print('VCN '), !. prefix( rulefile ) :- print('FIL '), !. prefix( rule ) :- print('RUL '), !. prefix( conclusion ) :- print('CON '), !. prefix( hypotheses ) :- print('HYP '), !. prefix( overall_summary) :- print('OVR '), !. prefix( vcs ) :- print('VCS '), !. prefix( hyp_limit ) :- print('HYP LIMIT EXCEEDED '), !. %=============================================================================== % get_indentation(+D, -Indent). %------------------------------------------------------------------------------- % Calculate an indent as Indent given the current depth description as D. %=============================================================================== get_indentation(toplevel, '') :- !. get_indentation(D, Indent) :- already_know_indentation(D, Indent), !, print(Indent). get_indentation(D, Indent) :- max_proof_framing_depth(M), Num5spaces is M-D, create_spaces(Num5spaces, Indent), !, assertz(already_know_indentation(D, Indent)), print(Indent). %------------------------------------------------------------------------------- create_spaces(1, ' '). create_spaces(2, ' '). create_spaces(3, ' '). create_spaces(4, ' '). create_spaces(5, ' '). create_spaces(N, Indent) :- N > 5, Rest is N-5, create_spaces(Rest, R), name(R, L), append(L, " ", I), name(Indent, I), !. create_spaces(_, ''). /* Otherwise -- shouldn't get here! */ %=============================================================================== % indent5. %------------------------------------------------------------------------------- % Write a five space indent. %=============================================================================== indent5 :- print(' '), !. %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/usage_utilities.pro0000644000175000017500000001502111753202337020525 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Predicate to support saving, merging and viewing of usage data. %############################################################################### %############################################################################### % MODULE %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### :- use_module(gauge_simplifier, [view/1]). :- use_module(data__switches, [get_switch_usage/1]). :- use_module(library(file_systems)). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### % % Predicate used to save all the usage data across across multiple % executions of the Simplifier. % :- dynamic usage_data/3. % % Values for selection and resolution used in predicate profile_data. % selection([calls, choice_points, instructions]). resolution([predicate, clause]). %############################################################################### % PREDICATES %############################################################################### %############################################################################### % save_usage_data %------------------------------------------------------------------------------- % Save usage data. If usage data already exists then load in the data, merge % and then save it. %############################################################################### save_usage_data :- get_switch_usage(provided_usage_file(Filename)), save_usage_data_x(Filename), !. % File exists. save_usage_data_x(Filename):- file_exists(Filename), !, load_files(Filename), process_all_usage_data, save_predicates([usage_data/3], Filename). % File does not exist. save_usage_data_x(Filename):- !, process_all_usage_data, save_predicates([usage_data/3], Filename). %############################################################################### % process_all_usage_data %------------------------------------------------------------------------------- % Collect new usage data and merge it with saved existing usage data if % they exist. %############################################################################### process_all_usage_data :- selection(Sel_List), resolution(Res_List), process_usage_data_sel_list_res_list(Sel_List, Res_List). process_usage_data_sel_list_res_list(Sel_List, Res_List):- member(Selection, Sel_List), member(Resolution, Res_List), process_usage_data_sel_res(Selection, Resolution), fail. process_usage_data_sel_list_res_list(_Sel_List, _Res_List):- !. process_usage_data_sel_res(Selection, Resolution):- usage_data(Selection, Resolution, ExistingData), !, profile_data([_], Selection, Resolution, Data), retract(usage_data(Selection, Resolution, ExistingData)), merge_pair_lists(Data, ExistingData, NewData), assert(usage_data(Selection, Resolution, NewData)). process_usage_data_sel_res(Selection, Resolution):- \+ usage_data(Selection, Resolution, _), !, profile_data([_], Selection, Resolution, Data), assert(usage_data(Selection, Resolution, Data)). %############################################################################### % merge_pair_lists(+List1, +List2, -ListMerged) %------------------------------------------------------------------------------- % Merge two lists containing usage data together. Note that keysort % does not remove duplicates so no data is lost. %############################################################################### merge_pair_lists(List1, List2, MergedList):- append(List1, List2, TmpList), keysort(TmpList, SortedList), process_duplicate_keys(SortedList, MergedList), !. %############################################################################### % process_duplicate_keys(+SortedList, -MergedSortedList[]). %------------------------------------------------------------------------------- % Merge two lists containing usage data. Note that keysort (see above) % does not remove duplicates so no data is lost. %############################################################################### process_duplicate_keys([], []) :- !. process_duplicate_keys([H], [H]) :- !. % If the two elements at the head of the list have the same key then merge them. process_duplicate_keys([H1-V1,H1-V2|T], [H1-V_sum|L]) :- V_sum is V1 + V2, process_duplicate_keys(T, L), !. % Case when two head elements in the list have different keys. process_duplicate_keys([H1-V1,H2-V2|T], [H1-V1|L]) :- process_duplicate_keys([H2-V2|T], L), !. %############################################################################### % view_usage. % % Predicate for viewing collected usage data through predicates provided % by the modified gauge_simplifier. %############################################################################### view_usage:- get_switch_usage(provided_usage_file(Filename)), file_exists(Filename), load_files(Filename), view([_]). %############################################################################### % END-OF-FILE spark-2012.0.deb/simplifier/newutilities.pro0000644000175000017500000005016111753202337020057 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Generic utility predicates. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(newutilities, [atom_to_integer/2, integer_to_atom/2, spacer/1, spacer/2, implode_separator_content_list/3, explode_separator_content_as_list/3, pad_number_as_atom/3, trim_atom/3, generate_int_list/3, flatten_list/2, contains_no_dups/1, month_numeric_to_name/2, generate_integer/1, unique_atom/2, fetch_date_and_time/2]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module(library(lists)). :- use_module(library(system)). :- use_module('data__formats.pro', [add_state/2]). :- use_module('ioutilities.pro', [throw_error/2]). :- set_prolog_flag(double_quotes, chars). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### :- add_state(get_base_unique_id, get_base_unique_id('Base_Atom', 'Int')). :- dynamic(get_base_unique_id/2). %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % atom_to_integer(+Atom, -Int). %------------------------------------------------------------------------------- % Convert an atom to an integer. Will raise an exception if the atom can % not be transformed into an integer. %=============================================================================== atom_to_integer(Atom, Int):- atom_chars(Atom, CharList), number_chars(Int, CharList), !. %=============================================================================== % integer_to_atom(+Int, -Atom). %------------------------------------------------------------------------------- % Convert an integer to an atom. This is always possible. %=============================================================================== integer_to_atom(Int, Atom):- number_chars(Int, CharList), atom_chars(Atom, CharList), !. %=============================================================================== % spacer(+Int). %------------------------------------------------------------------------------- % Writes a number of spaces to the current output stream. %=============================================================================== spacer(0):- !. spacer(Int):- write(' '), Next_Int is Int-1, spacer(Next_Int). %------------------------------------------------------------------------------- spacer(_Stream, 0):- !. spacer(Stream, Int):- write(Stream, ' '), Next_Int is Int-1, spacer(Stream, Next_Int). %=============================================================================== % implode_separator_content_list(+Separator_Atom, % +Item_AtomList, % -Content_Atom). %------------------------------------------------------------------------------- % Given a separator atom (Separator_Atom) and a list of atoms % (Item_AtomList) generates (Content_Atom) by joining together all of the % atoms in the list, placing the separators in-between. Note that the % separator atom may be empty '' -- thus this predicate may also be used to % join a list of atoms. %=============================================================================== % Empty case. implode_separator_content_list(_Separator_Atom, [], ''):- !. % Single case. implode_separator_content_list(_Separator_Atom, [Item_Atom], Item_Atom):- !. % More than one. implode_separator_content_list(Separator_Atom, [H_Item_Atom | T_Item_AtomList], Content_Atom):- implode_separator_content_list_x(Separator_Atom, T_Item_AtomList, H_Item_Atom, Content_Atom), !. %------------------------------------------------------------------------------- % Last item. implode_separator_content_list_x(_Separator_Atom, [], SoFar__Final__Atom, SoFar__Final__Atom):- !. % Place separator between all non last items. implode_separator_content_list_x(Separator_Atom, [H_Item_Atom | T_Item_AtomList], SoFar_Atom, Final_Atom):- atom_concat(SoFar_Atom, Separator_Atom, SoFarSeparator_Atom), atom_concat(SoFarSeparator_Atom, H_Item_Atom, NewSoFar_Atom), implode_separator_content_list_x(Separator_Atom, T_Item_AtomList, NewSoFar_Atom, Final_Atom). %=============================================================================== %explode_separator_content_as_list(+Separator_Atom, % +Content_Atom, % -Item_AtomList). %------------------------------------------------------------------------------- % Given a separator atom (Separator_Atom) and content (Content_Atom) a list % of atoms (Item_AtomList) is generated by splitting the content at each % occurrence of the separator. The separator is not included in the item % list. If the separator is empty ('') then the content will be split % between every character. %=============================================================================== % Note that a simplistic two step solution is presented here, rather than % trying to cleverly (and confusingly) mix the identification of separators % and collection of items. explode_separator_content_as_list(Separator_Atom, Content_Atom, Item_AtomList):- atom_chars(Separator_Atom, Separator_CharList), atom_chars(Content_Atom, Content_CharList), % Replace all occurrences of separator in content with the single atom % 'split'. replace_separator_with_split(Separator_CharList, Content_CharList, SplitContent_CharList), % Split off atoms at each occurrence of atom 'split'. split_off_atoms(SplitContent_CharList, [], Item_AtomList), !. %------------------------------------------------------------------------------- %No more content to consider. replace_separator_with_split(_Separator_CharList, [], []):- !. %If separator is empty ('') at last character, then do not add an extra %split. replace_separator_with_split([], [H_Content_Char], [H_Content_Char]):- !. %If separator is empty ('') then split between every character. replace_separator_with_split([], [H_Content_Char | T_Content_CharList], [H_Content_Char, split | T_SplitContent_CharList]):- replace_separator_with_split([], T_Content_CharList, T_SplitContent_CharList). %Next block of context matches with separator. Make a split. replace_separator_with_split(Separator_CharList, Content_CharList, [split | T_SplitContent_CharList]):- append(Separator_CharList, RemainingContent_CharList, Content_CharList), replace_separator_with_split(Separator_CharList, RemainingContent_CharList, T_SplitContent_CharList). %From above, do not make a split here. replace_separator_with_split(Separator_CharList, [H_Content_Char | T_Content_CharList], [H_Content_Char | T_SplitContent_CharList]):- replace_separator_with_split(Separator_CharList, T_Content_CharList, T_SplitContent_CharList). %------------------------------------------------------------------------------- %No more content to consider. split_off_atoms([], Collected_CharList, [Collected_Atom]):- atom_chars(Collected_Atom, Collected_CharList), !. %Is a split point. split_off_atoms([split | T_SplitContent_CharList], Collected_CharList, [H_Item_Atom | T_Item_AtomList]):- atom_chars(H_Item_Atom, Collected_CharList), split_off_atoms(T_SplitContent_CharList, [], T_Item_AtomList). %From above is not a split point. split_off_atoms([H_SplitContent_Char | T_SplitContent_CharList], Collected_CharList, T_Item_AtomList):- append(Collected_CharList, [H_SplitContent_Char], NewCollected_CharList), split_off_atoms(T_SplitContent_CharList, NewCollected_CharList, T_Item_AtomList). %=============================================================================== % pad_number_as_atom(Input_Int, PadToLength_Int, Padded_Atom). %------------------------------------------------------------------------------- %Given an integer (Input_Int) and a length (PadToLength_Int) the integer is %converted to an atom and prefixed with zeros until the atom is the %requested length. %=============================================================================== pad_number_as_atom(Input_Int, PadToLength_Int, Padded_Atom):- number_chars(Input_Int, Input_CharList), length(Input_CharList, InputLength_Int), pad_number_as_atom_x(Input_Int, Input_CharList, InputLength_Int, PadToLength_Int, Padded_Atom), !. %------------------------------------------------------------------------------- % Number too big is an error. pad_number_as_atom_x(Input_Int, _Input_CharList, InputLength_Int, PadToLength_Int, _Padded_Atom):- InputLength_Int>PadToLength_Int, throw_error('Can not pad number: ~k to size: ~k as number is too big.\n', [Input_Int, PadToLength_Int]). pad_number_as_atom_x(_Input_Int, Input_CharList, InputLength_Int, PadToLength_Int, Padded_Atom):- retrieve_padding(InputLength_Int, PadToLength_Int, Padding_CharList), append(Padding_CharList, Input_CharList, Padded_CharList), atom_chars(Padded_Atom, Padded_CharList), !. %------------------------------------------------------------------------------- retrieve_padding(Start__Stop__Int, Start__Stop__Int, []):- !. retrieve_padding(Start_Int, Stop_Int, ['0' | T_Padding_CharList]):- NextStart_Int is Start_Int+1, retrieve_padding(NextStart_Int, Stop_Int, T_Padding_CharList). %=============================================================================== % month_numeric_to_name(?Month_Atom, ?MonthName_Atom). %=============================================================================== month_numeric_to_name('01', 'JAN'). month_numeric_to_name('02', 'FEB'). month_numeric_to_name('03', 'MAR'). month_numeric_to_name('04', 'APR'). month_numeric_to_name('05', 'MAY'). month_numeric_to_name('06', 'JUN'). month_numeric_to_name('07', 'JUL'). month_numeric_to_name('08', 'AUG'). month_numeric_to_name('09', 'SEP'). month_numeric_to_name('10', 'OCT'). month_numeric_to_name('11', 'NOV'). month_numeric_to_name('12', 'DEC'). %=============================================================================== % trim_atom(Atom, Trim_Int, Trimmed_Atom). %------------------------------------------------------------------------------- %Given an atom (Atom) and a length (Trimmed_Atom) any characters beyond the %length are trimmed and the resulting atom is returned as (Trimmed_Atom). %=============================================================================== trim_atom(Full_Atom, Trim_Int, Trimmed_Atom):- atom_chars(Full_Atom, Full_CharList), trim_atom_x(1, Trim_Int, Full_CharList, Trimmed_CharList), atom_chars(Trimmed_Atom, Trimmed_CharList), !. %------------------------------------------------------------------------------- % Halt at end of list. trim_atom_x(_At_Int, _Stop_Int, [], []):- !. % Halt when exceeding trim report. trim_atom_x(At_Int, Trim_Int, _Full_CharList, []):- At_Int>Trim_Int, !. % Continue otherwise. trim_atom_x(At_Int, Trim_Int, [H_Full__Trimmed__Char | T_Full_CharList], [H_Full__Trimmed__Char | T_Trimmed_CharList]):- NextAt_Int is At_Int+1, trim_atom_x(NextAt_Int, Trim_Int, T_Full_CharList, T_Trimmed_CharList). %=============================================================================== % generate_int_list(+Lower_Int, +Upper_Int, -IntList). %------------------------------------------------------------------------------- % Given a lower and upper integer, generate a list that includes every % value (inclusive) between these values. %=============================================================================== % Reached upper value. generate_int_list(Lower_m_Upper_m_Int, Lower_m_Upper_m_Int, [Lower_m_Upper_m_Int]) :- !. generate_int_list(Lower_Int, Upper_Int, [Lower_Int | T_IntList]) :- Lower_Int < Upper_Int, NextLower_Int is Lower_Int+1, !, generate_int_list(NextLower_Int, Upper_Int, T_IntList), !. %=============================================================================== % flatten_list(+In_AnyList, -Out_AnyList). %------------------------------------------------------------------------------- % Given a list of any number of nested lists (In_AnyList), this retrieves % every element as a single flat list (Out_AnyList). %=============================================================================== flatten_list(In_AnyList, Out_AnyList):- flatten_list_x(In_AnyList, FirstOut_AnyList), !, Out_AnyList=FirstOut_AnyList, !. %------------------------------------------------------------------------------- flatten_list_x([], []). flatten_list_x([H_In_Any | T_In_AnyList], Out_AnyList):- flatten_list_x(H_In_Any, FlatH_AnyList), flatten_list_x(T_In_AnyList, FlatT_AnyList), append(FlatH_AnyList, FlatT_AnyList, Out_AnyList). % From above, is not a list. Return item in flat list. flatten_list_x(In_m_Out_m_Any, [In_m_Out_m_Any]). %=============================================================================== % contains_no_dups(+In_AnyList). %------------------------------------------------------------------------------- % Is successful where the provided list (In_AnyList) does not contain any % duplicates. %=============================================================================== contains_no_dups(In_AnyList):- list_to_set(In_AnyList, NoDup_AnyList), length(In_AnyList, LengthIn_Int), length(NoDup_AnyList, LengthNoDup_Int), LengthIn_Int=LengthNoDup_Int, !. %=============================================================================== % generate_integer(Number_Int). %------------------------------------------------------------------------------- % Through backtracking, returns (Number_Int) as incremental additions from % one. %=============================================================================== generate_integer(1). generate_integer(NowNumber_Int):- generate_integer(Number_Int), NowNumber_Int is Number_Int+1. %=============================================================================== % unique_atom(+Base_Atom, -Unique_Atom). %------------------------------------------------------------------------------- % On successive invocations generates unique atoms as (Unique_Atom). The % base portion of the unique atoms is specified as (Base_Atom). %=============================================================================== unique_atom(Base_Atom, Unique_Atom):- retrieve_next_id(Base_Atom, Id_Int), integer_to_atom(Id_Int, Id_Atom), implode_separator_content_list('_', [Base_Atom, Id_Atom], Unique_Atom), !. %------------------------------------------------------------------------------- % Previous id is available. retrieve_next_id(Base_Atom, Next_Int):- retract(get_base_unique_id(Base_Atom, Current_Int)), Next_Int is Current_Int+1, assert(get_base_unique_id(Base_Atom, Next_Int)), !. % No previous id. Always start at 1. retrieve_next_id(Base_Atom, 1):- assert(get_base_unique_id(Base_Atom, 1)), !. %=============================================================================== % fetch_date_and_time(-Date_Atom, -Time_Atom). %=============================================================================== % Date is of the form: 09/Jan/1980 % Time is of the form: 01:59:01 %=============================================================================== fetch_date_and_time(Date_Atom, Time_Atom):- datime(datime(Year_Int, Month_Int, Day_Int, Hour_Int, Min_Int, Sec_Int)), pad_number_as_atom(Year_Int, 4, Year_Atom), pad_number_as_atom(Month_Int, 2, Month_Atom), pad_number_as_atom(Day_Int, 2, Day_Atom), pad_number_as_atom(Hour_Int, 2, Hour_Atom), pad_number_as_atom(Min_Int, 2, Min_Atom), pad_number_as_atom(Sec_Int, 2, Sec_Atom), month_numeric_to_name(Month_Atom, MonthName_Atom), implode_separator_content_list('-', [Day_Atom, MonthName_Atom, Year_Atom], Date_Atom), implode_separator_content_list(':', [Hour_Atom, Min_Atom, Sec_Atom], Time_Atom), !. :- set_prolog_flag(double_quotes, codes). %############################################################################### % END-OF-FILE spark-2012.0.deb/README0000644000175000017500000001166611753202341013327 0ustar eugeneugenSPARK Toolset - Source Distribution =================================== This is the SPARK Toolset source distribution. It contains source code and makefiles suitable for building the SPARK tools from source. Unpacking the Source Distribution ================================= The source distribution of the SPARK Toolset is supplied as a "compressed tarball" file. It is created using the Cygwin "tar" and "gzip" commands on Windows. To unpack the distribution: On Solaris, use "gzip -d" following by "gtar xf". Note that the standard Solaris "tar" command is not recommended or guaranteed to work, owing to the length of some of the filenames in the source distribution. On Linux or OS X machines: use "gzip -d" followed by "tar xf". On Windows machines: use the cygwin "gzip -d" and "tar xf" commands. do NOT use the WinZip utility to unpack the distribution - this does not currently work owing to the length of the filenames in the Examiner sources. Contents ======== Source code ----------- /examiner - Examiner (SPARK). /sparkformat - SparkFormat SPARK). /sparkmake - Sparkmake (SPARK). /sparksimp - Sparksimp (Ada). /pogs - Pogs (SPARK). /simplifier - Simplifier and ZombieScope (SICStus-Prolog). /checker - Checker (SICStus-Prolog). /common/versioning - Version information. (SPARK and SICStus-Prolog). Data files ---------- /lib/spark - Data files needed by the Examiner. /lib/checker - Data files needed by the Checker. Toolset ------- /bin - Initially empty. Used to store a built toolset. /dlls - Potentially required Microsoft Windows redistributable DLL files. Static analysis --------------- /analyse/referenceanalysis - Reference static analysis results. /analyse/generatedanalysis - Initially empty. Used in analysing a built toolset. Building -------- /makefile - Makefile for both building and analysing the SPARK toolset. Support ------- /COPYING3 - Your copy of the GNU GENERAL PUBLIC LICENSE, version 3. /COPYING.RUNTIME - Your copy of the GCC Runtime Library Exception /README - This file. Platforms ========= The SPARK Toolset source distribution is known to work on the following platforms: Red Hat Enterprise Linux 5.5 (32-bit) Red Hat Enterprise Linux 5.5 (64-bit) Mac OS X Leopard (10.5.8) Max OS X Snow Leopard (10.6.2 or later) Microsoft Windows (XP, 32-bit Vista, 64-bit Vista, 32-bit Windows 7, 64-bit Windows 7, Server 2003) SPARC/Solaris 8 Other Linux distributions may also work. Prerequisites ============= To build the SPARK toolset, the following tools must be available. The versions listed here are known to work. Later versions may also work. GNAT Ada Compiler ----------------- Version: GNAT Pro 6.3.2, GNAT GPL 2010 or later Website: http://www.adacore.com/ or https://libre.adacore.com/ SICStus Prolog -------------- Version: SICStus 4.1.1 or later Website: http://www.sics.se/sicstus/ Note that the SICStus Prolog complier requires a pre-installed C compiler and linker. This may be installed for each platform as detailed below. Linux and Solaris: The GNAT Ada Compiler is sufficient. Mac OS X: Apple's XCode tools are required. These tools are distributed with Mac OS X, but are not installed by default. The "makefile" supplied with the SPARK toolset assumes that Apple's version of gcc is installed in /usr/bin Microsoft Windows: Microsoft's compiler suite is required (C compiler, linker, libraries, etc.). The Microsoft Visual C++ 2005 Express Edition (with service pack 1 and the platform SDK) is known to work: http://www.microsoft.com/express/2005/download Unix tools ---------- The following standard Unix commands are used in the build process: make, cp, mv, rm, uname, chmod, diff, sed, gunzip, tar. These may be installed for each platform as detailed below. Linux, Mac OS X and SunOS: These tools should be available as standard. Microsoft Windows: These tools may be installed as part of the Cygwin package: http://www.cygwin.com/ How to build ============ With the prerequisites installed and on your PATH, change directory to the root of the SPARK Toolset source distribution and type: make This will build the tools, copy them to the toolset directory (./bin), and clean up the source tree. How to analyse ============== With the prerequisites installed and on your path, the toolset built and also on your PATH, change directory to the root of the SPARK Toolset source distribution and type: make analysetest The message 'Matched reference analysis results.' is reported to the screen if the expected results are generated. Otherwise, the message 'Did not match reference analysis results!' is reported to the screen, alongside the differences detected. Contacting us ============= While we do not offer a formal support service for users of the SPARK Toolset source distribution, we are pleased to hear from its users, either to report problems with the toolset or its documentation, or to suggest new ideas. SPARK team can be contacted at: sparkinfo@altran-praxis.com spark-2012.0.deb/sparkformat/0000755000175000017500000000000011753203756015001 5ustar eugeneugenspark-2012.0.deb/sparkformat/sparkprogram-annotations.adb0000644000175000017500000001665211753202340022512 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SPARKProgram) package body Annotations is procedure Initialize (This : out Anno_Type; Anno_Intro : in String; Anno_Succ : in String; Anno_Indent : in Natural) is Start_Str : Anno_Start_Type; Loc_Succ : E_Strings.T; begin Start_Str := Anno_Start_Type'(1 | 2 => '-', 3 => CommandLineData.Content.Anno_Char); if Anno_Succ = "" then Loc_Succ := E_Strings.Empty_String; else Loc_Succ := E_Strings.Copy_String (Str => Anno_Succ); end if; This := Anno_Type' (Anno_Start => Start_Str, Anno_Intro => E_Strings.Copy_String (Str => Anno_Intro), Anno_Succ => Loc_Succ, Anno_Indent => Anno_Indent); end Initialize; function Intro (This : Anno_Type) return E_Strings.T is begin return This.Anno_Intro; end Intro; function Indent (This : Anno_Type) return Natural is begin return This.Anno_Indent; end Indent; procedure Is_Anno_Start (This : in Anno_Type; Input_Line : in E_Strings.T; Index : in out E_Strings.Lengths; OK : out Boolean) is begin White_Space.Skip (Input_Line => Input_Line, Index => Index); if E_Strings.Get_Length (E_Str => Input_Line) >= Index + 2 and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index) = This.Anno_Start (1) and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 1) = This.Anno_Start (2) and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 2) = This.Anno_Start (3) then Index := Index + 3; OK := True; else OK := False; end if; end Is_Anno_Start; function Starts_With (Start_Str : E_Strings.T; Input_Line : E_Strings.T; Initial_Index : E_Strings.Positions) return Boolean is Index : E_Strings.Positions; Loc_OK : Boolean; begin Index := Initial_Index; White_Space.Skip (Input_Line => Input_Line, Index => Index); Loc_OK := E_Strings.Get_Length (E_Str => Input_Line) > 0 and then ((E_Strings.Get_Length (E_Str => Input_Line) - Index) + 1) >= E_Strings.Get_Length (E_Str => Start_Str); if Loc_OK then for I in E_Strings.Lengths range 1 .. E_Strings.Get_Length (E_Str => Start_Str) loop Loc_OK := Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Start_Str, Pos => I)) = Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Input_Line, Pos => (I + Index) - 1)); exit when not Loc_OK; end loop; end if; return Loc_OK; end Starts_With; function Is_Start_Of (This : Anno_Type; Input_Line : E_Strings.T) return Boolean is Index : E_Strings.Positions; Result : Boolean; begin Index := 1; Is_Anno_Start (This => This, Input_Line => Input_Line, Index => Index, OK => Result); Result := Result and then Starts_With (Start_Str => This.Anno_Intro, Input_Line => Input_Line, Initial_Index => Index); return Result; end Is_Start_Of; function Is_End_Of (This : Anno_Type; Input_Line : E_Strings.T) return Boolean is Index : E_Strings.Positions; Loc_OK : Boolean; Result : Boolean; begin Index := 1; Is_Anno_Start (This => This, Input_Line => Input_Line, Index => Index, OK => Loc_OK); if not Loc_OK then -- No longer in an annotation and therfore ended Result := True; else -- Otherwise only ended if this is the start of a successive annotation if E_Strings.Get_Length (E_Str => This.Anno_Succ) > 0 then Result := Starts_With (Start_Str => This.Anno_Succ, Input_Line => Input_Line, Initial_Index => Index); else Result := False; end if; end if; return Result; end Is_End_Of; function Proper_Start_Col (Start_Col : E_Strings.Positions) return E_Strings.Positions is begin return (Start_Col + Anno_Start_Type'Length) + 1; end Proper_Start_Col; procedure Write (This : in Anno_Type; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions) is begin SPARK_IO.Set_Col (Output, Start_Col); SPARK_IO.Put_String (Output, This.Anno_Start, 0); end Write; procedure Write_Intro (This : in Anno_Type; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions) is begin Write (This => This, Output => Output, Start_Col => Start_Col); SPARK_IO.Set_Col (Output, Proper_Start_Col (Start_Col => Start_Col)); E_Strings.Put_String (File => Output, E_Str => This.Anno_Intro); -- If Indent is not Inline, then don't put out a trailing space, -- since this will be rejected by GNAT -gnatyb style check mode. if This.Anno_Indent = SparkFormatCommandLineData.Inline then SPARK_IO.Put_String (Output, " ", 0); end if; end Write_Intro; function Name1_Start_Col (This : in Anno_Type; Start_Col : in E_Strings.Positions) return E_Strings.Positions is Name_Column : E_Strings.Positions; begin if This.Anno_Indent /= SparkFormatCommandLineData.Inline then -- Start_Col "is --# " but indent value is given from "--#" Name_Column := Proper_Start_Col (Start_Col => Start_Col) + (This.Anno_Indent - 1); else Name_Column := Proper_Start_Col (Start_Col => Start_Col) + (E_Strings.Get_Length (E_Str => This.Anno_Intro) + 1); end if; return Name_Column; end Name1_Start_Col; end Annotations; spark-2012.0.deb/sparkformat/sparkprogram.adb0000644000175000017500000077320511753202340020163 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Handling; with Ada.Characters.Latin_1; with CommandLineData; with E_Strings; with Heap; with LexTokenManager; with LexTokenManager.Relation_Algebra; with LexTokenManager.Relation_Algebra.String; with LexTokenManager.Seq_Algebra; with SeqAlgebra; with SparkFormatCommandLineData; with SparkLex; with SP_Symbols; use type LexTokenManager.Str_Comp_Result; use type SeqAlgebra.MemberOfSeq; use type SparkFormatCommandLineData.Expand_Or_Compress; use type SP_Symbols.SP_Terminal; package body SPARKProgram is --# inherit Ada.Characters.Latin_1, --# E_Strings; package White_Space is function Is_White_Space (Char : Character) return Boolean; procedure Skip (Input_Line : in E_Strings.T; Index : in out E_Strings.Lengths); --# derives Index from *, --# Input_Line; end White_Space; --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# SparkLex, --# SPARK_IO, --# SP_Symbols, --# White_Space; package File_IO is procedure Create (Temporary_File : in out SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# Temporary_File from SPARK_IO.File_Sys, --# Temporary_File; procedure Reset (Temporary_File : in out SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# Temporary_File from *, --# Temporary_File; procedure Delete (Temporary_File : in out SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# Temporary_File from *, --# Temporary_File; procedure Lex (Temporary_File : in SPARK_IO.File_Type; Token : out SP_Symbols.SP_Terminal; Lex_Val : out LexTokenManager.Lex_Value); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File; end File_IO; --# inherit Heap, --# LexTokenManager, --# LexTokenManager.Seq_Algebra, --# SeqAlgebra, --# White_Space; package Iteration is type Iterator is private; procedure Initialise (The_Heap : in Heap.HeapRecord; The_Seq : in LexTokenManager.Seq_Algebra.Seq; The_Iterator : out Iterator); --# global in LexTokenManager.State; --# derives The_Iterator from LexTokenManager.State, --# The_Heap, --# The_Seq; procedure Next (The_Heap : in Heap.HeapRecord; The_Iterator : in out Iterator); --# global in LexTokenManager.State; --# derives The_Iterator from *, --# LexTokenManager.State, --# The_Heap; function Complete (The_Iterator : Iterator) return Boolean; function Current_String (The_Iterator : Iterator) return LexTokenManager.Lex_String; function Current_Member (The_Iterator : Iterator) return LexTokenManager.Seq_Algebra.Member_Of_Seq; private type Iterator is record First_Member : LexTokenManager.Seq_Algebra.Member_Of_Seq; Current_Member : LexTokenManager.Seq_Algebra.Member_Of_Seq; Placeholder : LexTokenManager.Lex_String; Complete : Boolean; end record; end Iteration; --# inherit Ada.Characters.Handling, --# CommandLineData, --# E_Strings, --# SparkFormatCommandLineData, --# SPARK_IO, --# White_Space; package Annotations is type Anno_Type is private; procedure Initialize (This : out Anno_Type; Anno_Intro : in String; Anno_Succ : in String; Anno_Indent : in Natural); --# global in CommandLineData.Content; --# derives This from Anno_Indent, --# Anno_Intro, --# Anno_Succ, --# CommandLineData.Content; function Intro (This : Anno_Type) return E_Strings.T; function Indent (This : Anno_Type) return Natural; procedure Is_Anno_Start (This : in Anno_Type; Input_Line : in E_Strings.T; Index : in out E_Strings.Lengths; OK : out Boolean); --# derives Index, --# OK from Index, --# Input_Line, --# This; function Is_Start_Of (This : Anno_Type; Input_Line : E_Strings.T) return Boolean; function Is_End_Of (This : Anno_Type; Input_Line : E_Strings.T) return Boolean; function Proper_Start_Col (Start_Col : E_Strings.Positions) return E_Strings.Positions; procedure Write (This : in Anno_Type; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Output, --# Start_Col, --# This; procedure Write_Intro (This : in Anno_Type; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Output, --# Start_Col, --# This; function Name1_Start_Col (This : in Anno_Type; Start_Col : in E_Strings.Positions) return E_Strings.Positions; private subtype Anno_Start_Bounds is Positive range 1 .. 3; subtype Anno_Start_Type is String (Anno_Start_Bounds); type Anno_Type is record Anno_Start : Anno_Start_Type; Anno_Intro : E_Strings.T; Anno_Succ : E_Strings.T; Anno_Indent : Natural; end record; end Annotations; --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# Annotations, --# CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# File_IO, --# Heap, --# Iteration, --# LexTokenManager, --# LexTokenManager.Relation_Algebra, --# LexTokenManager.Relation_Algebra.String, --# LexTokenManager.Seq_Algebra, --# RelationAlgebra, --# SeqAlgebra, --# SparkFormatCommandLineData, --# SparkLex, --# SPARKProgram, --# SPARK_IO, --# SP_Symbols, --# Statistics, --# White_Space; package Reformatter is Allow_Dotted_Names_Const : constant Boolean := True; Disallow_Dotted_Names_Const : constant Boolean := False; type State is private; procedure Initialise (Anno : in Annotations.Anno_Type; Dotted_Names : in Boolean; The_Heap : in out Heap.HeapRecord; This : out State); --# global in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# The_Heap & --# This from Anno, --# Dotted_Names, --# The_Heap; procedure Parse (This : in out State; The_Heap : in out Heap.HeapRecord; Temporary_File : in out SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives LexTokenManager.State, --# The_Heap from *, --# CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Temporary_File, --# This & --# SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# Temporary_File, --# This & --# Statistics.TableUsage, --# This from *, --# CommandLineData.Content, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap, --# This & --# Temporary_File from *; procedure Reformat (This : in State; The_Heap : in out Heap.HeapRecord; Temporary_File : in out SPARK_IO.File_Type; Output : in SPARK_IO.File_Type; Success : out Boolean); --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives SPARK_IO.File_Sys from *, --# CommandLineData.Content, --# LexTokenManager.State, --# Output, --# SparkFormatCommandLineData.Content, --# Temporary_File, --# The_Heap, --# This & --# Statistics.TableUsage, --# The_Heap from *, --# LexTokenManager.State, --# SparkFormatCommandLineData.Content, --# The_Heap, --# This & --# Success from This & --# Temporary_File from *, --# This; procedure Finalise (This : in out State; The_Heap : in out Heap.HeapRecord); --# derives The_Heap, --# This from *, --# This; private type Relation_Type is record Modifier_Rel : LexTokenManager.Relation_Algebra.Relation; Type_Rel : LexTokenManager.Relation_Algebra.String.Relation; Property_Rel : LexTokenManager.Relation_Algebra.String.Relation; end record; type Variable_Modifier is ( Unmoded, In_Mode, Out_Mode, In_Out_Mode, Protected_Unmoded, Protected_In, Protected_Out, Task_Modifier); type Modifier_Use is array (Variable_Modifier) of Boolean; type Statistics_Type is record Start_Col : E_Strings.Positions; Max_Modifier_Length : E_Strings.Lengths; Max_Primary_Id_Length : E_Strings.Lengths; Max_Type_Mark_Length : E_Strings.Lengths; Modifiers_Present : Modifier_Use; Type_Mark_Present : Boolean; Property_List_Present : Boolean; end record; type State is record Anno : Annotations.Anno_Type; Relations : Relation_Type; Parse_Stats : Statistics_Type; Allow_Dotted_Names : Boolean; Success : Boolean; end record; end Reformatter; package body White_Space is function Is_White_Space (Char : Character) return Boolean is begin --Note that: Ada.Characters.Latin_1.HT is a tab. return Char = Ada.Characters.Latin_1.Space or Char = Ada.Characters.Latin_1.HT; end Is_White_Space; procedure Skip (Input_Line : in E_Strings.T; Index : in out E_Strings.Lengths) is begin loop exit when Index >= E_Strings.Get_Length (E_Str => Input_Line) or else not Is_White_Space (Char => E_Strings.Get_Element (E_Str => Input_Line, Pos => Index)); Index := Index + 1; end loop; end Skip; end White_Space; package body File_IO is procedure Create (Temporary_File : in out SPARK_IO.File_Type) is Unused : SPARK_IO.File_Status; begin --# accept Flow_Message, 10, Unused, "The variable Unused is not required" & --# Flow_Message, 33, Unused, "The variable Unused is not required"; SPARK_IO.Create (Temporary_File, 0, "", "", Unused); -- Not sure what we can do if we can't create a temporary file end Create; procedure Reset (Temporary_File : in out SPARK_IO.File_Type) is Unused : SPARK_IO.File_Status; begin --# accept Flow_Message, 10, Unused, "The variable Unused is not required" & --# Flow_Message, 33, Unused, "The variable Unused is not required"; SPARK_IO.Reset (Temporary_File, SPARK_IO.In_File, Unused); -- not sure what we can do if we can't reset the file end Reset; procedure Delete (Temporary_File : in out SPARK_IO.File_Type) is Unused : SPARK_IO.File_Status; begin --# accept Flow_Message, 10, Unused, "The variable Unused is not required" & --# Flow_Message, 33, Unused, "The variable Unused is not required"; SPARK_IO.Delete (Temporary_File, Unused); -- not sure what we can do if we can't delete the file end Delete; procedure Lex (Temporary_File : in SPARK_IO.File_Type; Token : out SP_Symbols.SP_Terminal; Lex_Val : out LexTokenManager.Lex_Value) is Unused : Boolean; begin --# accept Flow_Message, 10, Unused, "The variable Unused is not required" & --# Flow_Message, 33, Unused, "The variable Unused is not required"; SparkLex.SPARK_Format_Lex (Prog_Text => Temporary_File, Token => Token, Lex_Val => Lex_Val, Punct_Token => Unused); end Lex; end File_IO; procedure Copy (Input, Output : in SPARK_IO.File_Type) is Input_Line : E_Strings.T; begin while not SPARK_IO.End_Of_File (Input) loop E_Strings.Get_Line (File => Input, E_Str => Input_Line); E_Strings.Put_Line (File => Output, E_Str => Input_Line); end loop; end Copy; package body Iteration is separate; package body Annotations is separate; package body Reformatter is separate; procedure Reformat_Annotations (Input, Output : in SPARK_IO.File_Type) is type State_Type is ( Parsing_SPARK_Code, Parsing_Globals_List, Parsing_Derives_List, Parsing_Inherit_Clause, Parsing_Own_Variable_Clause, Parsing_Refinement_Definition, Parsing_Initialization_Specification); --# inherit Ada.Characters.Handling, --# CommandLineData, --# E_Strings, --# SPARK_IO, --# White_Space; package Annotation --# own Start_String; is procedure Initialize; --# global in CommandLineData.Content; --# out Start_String; --# derives Start_String from CommandLineData.Content; procedure Is_Start (Input_Line : in E_Strings.T; Index : in out E_Strings.Lengths; OK : out Boolean); --# global in Start_String; --# derives Index, --# OK from Index, --# Input_Line, --# Start_String; procedure Write (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions); --# global in Start_String; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Output, --# Start_Col, --# Start_String; end Annotation; --# inherit Ada.Characters.Handling, --# Annotation, --# E_Strings, --# White_Space; package SPARK_Keyword_Matching is function Match (Input_Line : E_Strings.T; Index : E_Strings.Lengths; Match_Keyword : String) return Boolean; function Match_At_Start (Input_Line : E_Strings.T; Match_Keyword : String) return Boolean; --# global in Annotation.Start_String; end SPARK_Keyword_Matching; --# inherit Annotation, --# E_Strings, --# SPARK_Keyword_Matching, --# White_Space; package Accept_List is function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; end Accept_List; --# inherit Annotation, --# E_Strings, --# SPARK_Keyword_Matching, --# White_Space; package End_List is function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; end End_List; --# inherit Annotation, --# E_Strings, --# SPARK_Keyword_Matching, --# White_Space; package For_List is function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; end For_List; --# inherit Annotation, --# E_Strings, --# SPARK_Keyword_Matching, --# White_Space; package Function_List is function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; end Function_List; --# inherit Annotation, --# E_Strings, --# SPARK_Keyword_Matching, --# White_Space; package Type_List is function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; end Type_List; --# inherit Annotation, --# E_Strings, --# SPARK_Keyword_Matching, --# White_Space; package Assert_List is function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; end Assert_List; --# inherit Ada.Characters.Handling, --# Annotation, --# CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# File_IO, --# Heap, --# LexTokenManager, --# LexTokenManager.Relation_Algebra, --# LexTokenManager.Relation_Algebra.String, --# LexTokenManager.Seq_Algebra, --# RelationAlgebra, --# SeqAlgebra, --# SparkFormatCommandLineData, --# SparkLex, --# SPARKProgram, --# SPARK_IO, --# SPARK_Keyword_Matching, --# SP_Symbols, --# Statistics, --# White_Space; package Derives_List --# own State; is procedure Initialize; --# global in out LexTokenManager.State; --# out State; --# derives LexTokenManager.State, --# State from LexTokenManager.State; function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; function Is_End (Input_Line : E_Strings.T) return Boolean; procedure Parse (Temporary_File : in out SPARK_IO.File_Type; Start_Col : out E_Strings.Positions; The_Heap : in out Heap.HeapRecord; Imports : out LexTokenManager.Seq_Algebra.Seq; Exports : out LexTokenManager.Seq_Algebra.Seq; Dependencies : out LexTokenManager.Relation_Algebra.String.Relation; OK : out Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in SparkFormatCommandLineData.Content; --# in State; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# out SparkLex.Curr_Line; --# derives Dependencies, --# Exports, --# Imports from The_Heap & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# OK, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Heap from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys, --# State, --# Temporary_File, --# The_Heap & --# Start_Col from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Temporary_File & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys, --# State, --# Temporary_File, --# The_Heap & --# Temporary_File from *; procedure Reformat (OK : in Boolean; Temporary_File : in out SPARK_IO.File_Type; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation); --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Dependencies, --# Exports, --# LexTokenManager.State, --# OK, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# State, --# Temporary_File, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dependencies, --# Exports, --# LexTokenManager.State, --# OK, --# SparkFormatCommandLineData.Content, --# State, --# The_Heap & --# Temporary_File from *, --# OK; end Derives_List; package Subprogram is type Subprogram_Type is (Is_Function, Is_Procedure, Is_Unknown); end Subprogram; --# inherit Ada.Characters.Handling, --# Annotation, --# CommandLineData, --# Derives_List, --# Dictionary, --# ErrorHandler, --# E_Strings, --# File_IO, --# Heap, --# LexTokenManager, --# RelationAlgebra, --# SeqAlgebra, --# SparkFormatCommandLineData, --# SparkLex, --# SPARKProgram, --# SPARK_IO, --# SPARK_Keyword_Matching, --# SP_Symbols, --# Statistics, --# Subprogram, --# White_Space; package Declare_List is function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; function Is_End (Input_Line : E_Strings.T) return Boolean; pragma Unreferenced (Is_End); -- for future use end Declare_List; --# inherit Ada.Characters.Handling, --# Annotation, --# CommandLineData, --# Derives_List, --# Dictionary, --# ErrorHandler, --# E_Strings, --# File_IO, --# Heap, --# LexTokenManager, --# RelationAlgebra, --# SeqAlgebra, --# SparkFormatCommandLineData, --# SparkLex, --# SPARKProgram, --# SPARK_IO, --# SPARK_Keyword_Matching, --# SP_Symbols, --# Statistics, --# Subprogram, --# White_Space; package Proof_List is function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; function Is_End (Input_Line : E_Strings.T) return Boolean; pragma Unreferenced (Is_End); -- for future use end Proof_List; --# inherit Accept_List, --# Ada.Characters.Handling, --# Annotation, --# Assert_List, --# CommandLineData, --# Declare_List, --# Derives_List, --# Dictionary, --# End_List, --# ErrorHandler, --# E_Strings, --# File_IO, --# For_List, --# Function_List, --# Heap, --# LexTokenManager, --# LexTokenManager.Seq_Algebra, --# Proof_List, --# RelationAlgebra, --# SeqAlgebra, --# SparkFormatCommandLineData, --# SparkLex, --# SPARKProgram, --# SPARK_IO, --# SPARK_Keyword_Matching, --# SP_Symbols, --# Statistics, --# Subprogram, --# Type_List, --# White_Space; package Globals_List --# own Consecutive_Empty_Lines; --# initializes Consecutive_Empty_Lines; is type Modes is (Unmoded, In_Mode, In_Out_Mode, Out_Mode); type Global_Variables_Type is array (Modes) of LexTokenManager.Seq_Algebra.Seq; function Is_Start (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; function Is_End (Input_Line : E_Strings.T) return Boolean; --# global in Annotation.Start_String; procedure Increment_Consecutive_Empty_Lines; --# global in out Consecutive_Empty_Lines; --# derives Consecutive_Empty_Lines from *; procedure Reset_Consecutive_Empty_Lines; --# global out Consecutive_Empty_Lines; --# derives Consecutive_Empty_Lines from ; function No_Empty_Lines return Boolean; --# global in Consecutive_Empty_Lines; procedure Parse (Temporary_File : in out SPARK_IO.File_Type; Start_Col : out E_Strings.Positions; The_Heap : in out Heap.HeapRecord; Global_Variables : out Global_Variables_Type; OK : out Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# out SparkLex.Curr_Line; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# OK, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Start_Col from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Temporary_File & --# Global_Variables from The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap & --# Temporary_File from *; procedure Reformat (OK : in Boolean; Temporary_File : in out SPARK_IO.File_Type; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; The_Heap : in out Heap.HeapRecord; Function_Or_Procedure : in Subprogram.Subprogram_Type; Global_Variables : in Global_Variables_Type; Imports : in LexTokenManager.Seq_Algebra.Seq; Exports : in LexTokenManager.Seq_Algebra.Seq); --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out Consecutive_Empty_Lines; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Consecutive_Empty_Lines from & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Consecutive_Empty_Lines, --# Exports, --# Function_Or_Procedure, --# Global_Variables, --# Imports, --# LexTokenManager.State, --# OK, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# Temporary_File, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Exports, --# Function_Or_Procedure, --# Global_Variables, --# Imports, --# LexTokenManager.State, --# OK, --# SparkFormatCommandLineData.Content, --# The_Heap & --# Temporary_File from *, --# OK; end Globals_List; Function_Or_Procedure : Subprogram.Subprogram_Type; State : State_Type; Input_Line : E_Strings.T; The_Heap : Heap.HeapRecord; Globals_List_File : SPARK_IO.File_Type; Derives_List_File : SPARK_IO.File_Type; Inherit_Clause_File : SPARK_IO.File_Type; Initialize_Spec_File : SPARK_IO.File_Type; Own_Var_Clause_File : SPARK_IO.File_Type; Global_Variables : Globals_List.Global_Variables_Type; Imports : LexTokenManager.Seq_Algebra.Seq; Exports : LexTokenManager.Seq_Algebra.Seq; Dependencies : LexTokenManager.Relation_Algebra.String.Relation; Start_Col : E_Strings.Positions; Globals_List_OK : Boolean; Derives_List_OK : Boolean; Inherit_Anno : Annotations.Anno_Type; Initializes_Anno : Annotations.Anno_Type; Own_Var_Anno : Annotations.Anno_Type; Inherit_Clause : Reformatter.State; Initialization_Spec : Reformatter.State; Own_Variable_Clause : Reformatter.State; File_IO_Required : Boolean; Reformat_Successful : Boolean; package body Annotation is Start_String : E_Strings.T; procedure Initialize is begin Start_String := E_Strings.Copy_String (Str => "--"); E_Strings.Append_Char (E_Str => Start_String, Ch => CommandLineData.Content.Anno_Char); end Initialize; procedure Is_Start (Input_Line : in E_Strings.T; Index : in out E_Strings.Lengths; OK : out Boolean) is begin White_Space.Skip (Input_Line => Input_Line, Index => Index); if E_Strings.Get_Length (E_Str => Input_Line) - Index >= 2 then if E_Strings.Get_Element (E_Str => Input_Line, Pos => Index) = E_Strings.Get_Element (E_Str => Start_String, Pos => 1) and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 1) = E_Strings.Get_Element (E_Str => Start_String, Pos => 2) and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 2) = E_Strings.Get_Element (E_Str => Start_String, Pos => 3) then Index := Index + 3; OK := True; else OK := False; end if; else OK := False; end if; end Is_Start; procedure Write (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions) is begin SPARK_IO.Set_Col (Output, Start_Col); E_Strings.Put_String (File => Output, E_Str => Start_String); end Write; end Annotation; package body SPARK_Keyword_Matching is -- Input_Line - A line from the spark input file. -- Index - An index position, expected to inside Input_Line. -- Match_Keyword - A keyword to match. -- -- The function returns true where Input_Line, at position index, contains -- the keyword Match_Keyword. For a match, the keyword must be terminated -- by a newline or white space. function Match (Input_Line : E_Strings.T; Index : E_Strings.Lengths; Match_Keyword : String) return Boolean is Is_Match : Boolean; Char_From_Line : Character; Char_From_Keyword : Character; begin -- E_Strings.Lengths'First is zero. This is used to describe empty strings. -- Can not find a match against an empty string. if Index = E_Strings.Lengths'First then Is_Match := False; else Is_Match := True; for I in E_Strings.Lengths range 1 .. Match_Keyword'Length loop if (Index - 1) + I >= (E_Strings.Get_Length (E_Str => Input_Line) + 1) then -- Reached or exceeded the end of line. Can not be a match. Is_Match := False; exit; end if; --# assert Match_Keyword=Match_Keyword% and --# Index > E_Strings.Lengths'First and --# Index <= E_Strings.Lengths'Last and --# I>=1 and I<=Match_Keyword'Length and --# (((Index - 1) + I) >= E_Strings.Lengths'First) and --# (((Index - 1) + I) <= E_Strings.Lengths'Last); Char_From_Line := Ada.Characters.Handling.To_Lower (E_Strings.Get_Element (E_Str => Input_Line, Pos => (Index - 1) + I)); Char_From_Keyword := Ada.Characters.Handling.To_Lower (Match_Keyword (I)); if not (Char_From_Line = Char_From_Keyword) then -- Are not the same at this index. Can not be a match. Is_Match := False; exit; end if; end loop; -- If detected a keyword match, then investigate what appears after -- the keyword. if Is_Match then -- If not terminated by line feed, is only a match if followed by -- white space. if ((Index + Match_Keyword'Length) <= E_Strings.Get_Length (E_Str => Input_Line)) then Is_Match := White_Space.Is_White_Space (Char => E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + Match_Keyword'Length)); end if; end if; end if; return Is_Match; end Match; -- Input_Line - A line from the spark input file. -- Match_Keyword - A sparkkwyword to match. -- -- The function returns true where Input_Line is an annotation line, starting -- with the keyword Match_Keyword. function Match_At_Start (Input_Line : E_Strings.T; Match_Keyword : String) return Boolean is Index : E_Strings.Lengths; OK : Boolean; begin if E_Strings.Get_Length (E_Str => Input_Line) > 0 then Index := 1; Annotation.Is_Start (Input_Line => Input_Line, Index => Index, OK => OK); if OK then White_Space.Skip (Input_Line => Input_Line, Index => Index); OK := Match (Input_Line, Index, Match_Keyword); end if; else OK := False; end if; return OK; end Match_At_Start; end SPARK_Keyword_Matching; package body Accept_List is Keyword : constant String := "accept"; -- checks it's at the start of a line up to "--# accept" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; end Accept_List; package body End_List is Keyword : constant String := "end"; -- checks it's at the start of a line up to "--# end" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; end End_List; package body For_List is Keyword : constant String := "for"; -- checks it's at the start of a line up to "--# for" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; end For_List; package body Function_List is Keyword : constant String := "function"; -- checks it's at the start of a line up to "--# function" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; end Function_List; package body Type_List is Keyword : constant String := "type"; -- checks it's at the start of a line up to "--# type" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; end Type_List; package body Assert_List is Keyword : constant String := "assert"; -- checks it's at the start of a line up to "--# assert" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; end Assert_List; package body Derives_List --# own State is Multiply_Token, --# RW_Null_Token; is Keyword : constant String := "derives"; Multiply_Token, RW_Null_Token : LexTokenManager.Lex_String; procedure Initialize --# global in out LexTokenManager.State; --# out Multiply_Token; --# out RW_Null_Token; --# derives LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token from LexTokenManager.State; is begin LexTokenManager.Insert_Examiner_String (Str => E_Strings.Copy_String (Str => "*"), Lex_Str => Multiply_Token); LexTokenManager.Insert_Examiner_String (Str => E_Strings.Copy_String (Str => "null"), Lex_Str => RW_Null_Token); end Initialize; -- checks it's at the start of a line up to "--# derives" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; -- checks for ";" at end of line function Is_End (Input_Line : E_Strings.T) return Boolean is Result : Boolean; begin Result := False; for I in E_Strings.Lengths range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => Input_Line) loop if E_Strings.Get_Element (E_Str => Input_Line, Pos => I) = ';' then Result := True; exit; end if; end loop; return Result; end Is_End; procedure Parse (Temporary_File : in out SPARK_IO.File_Type; Start_Col : out E_Strings.Positions; The_Heap : in out Heap.HeapRecord; Imports : out LexTokenManager.Seq_Algebra.Seq; Exports : out LexTokenManager.Seq_Algebra.Seq; Dependencies : out LexTokenManager.Relation_Algebra.String.Relation; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Multiply_Token; --# in RW_Null_Token; --# in SparkFormatCommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# out SparkLex.Curr_Line; --# derives Dependencies, --# Exports, --# Imports from The_Heap & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# OK, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Heap from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap & --# Start_Col from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Temporary_File & --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap & --# Temporary_File from *; is Token : SP_Symbols.SP_Terminal; Lex_Val : LexTokenManager.Lex_Value; procedure Parse_Dependency_Clauses (Temporary_File : in SPARK_IO.File_Type; Token : in out SP_Symbols.SP_Terminal; Lex_Val : in LexTokenManager.Lex_Value; The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Imports : in LexTokenManager.Seq_Algebra.Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Multiply_Token; --# in RW_Null_Token; --# in SparkFormatCommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Statistics.TableUsage, --# The_Heap, --# Token from *, --# CommandLineData.Content, --# Dependencies, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exports, --# Imports, --# LexTokenManager.State, --# Lex_Val, --# Multiply_Token, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap, --# Token & --# OK from CommandLineData.Content, --# Dependencies, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exports, --# Imports, --# LexTokenManager.State, --# Lex_Val, --# Multiply_Token, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap, --# Token; is Similar_Exports : LexTokenManager.Seq_Algebra.Seq; Imports_For_This_Export : LexTokenManager.Seq_Algebra.Seq; Next_Lex_Val : LexTokenManager.Lex_Value; Some_Exports : Boolean; procedure Parse_Export_List (Temporary_File : in SPARK_IO.File_Type; Token : in out SP_Symbols.SP_Terminal; Lex_Val : in out LexTokenManager.Lex_Value; The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Similar_Exports : in LexTokenManager.Seq_Algebra.Seq; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in RW_Null_Token; --# in SparkFormatCommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# OK, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# SparkFormatCommandLineData.Content, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# Token & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Exports, --# LexTokenManager.State, --# Lex_Val, --# RW_Null_Token, --# Similar_Exports, --# SparkFormatCommandLineData.Content, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap, --# Token; is Dotted_Simple_Name : E_Strings.T; Export : LexTokenManager.Lex_String; No_Exports_Excluded : Boolean; begin OK := True; -- CFR1753: Check to see if an export has been excluded. No_Exports_Excluded := E_Strings.Is_Empty (E_Str => SparkFormatCommandLineData.Content.Exclude_Export); case Token is when SP_Symbols.identifier => loop Dotted_Simple_Name := LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Val.Token_Str); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); loop exit when Token /= SP_Symbols.point; E_Strings.Append_String (E_Str => Dotted_Simple_Name, Str => "."); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); if Token = SP_Symbols.identifier then E_Strings.Append_Examiner_String (E_Str1 => Dotted_Simple_Name, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Val.Token_Str)); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); else OK := False; end if; exit when not OK; end loop; exit when not OK; -- CFR 1753: Check to see if this export has to be -- excluded. if No_Exports_Excluded or else not E_Strings.Eq_String (E_Str1 => Dotted_Simple_Name, E_Str2 => SparkFormatCommandLineData.Content.Exclude_Export) then LexTokenManager.Insert_Examiner_String (Str => Dotted_Simple_Name, Lex_Str => Export); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Exports, Given_Value => Export); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Similar_Exports, Given_Value => Export); end if; exit when Token = SP_Symbols.RWfrom; if Token = SP_Symbols.comma then File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); if Token /= SP_Symbols.identifier then OK := False; end if; else OK := False; end if; exit when not OK; end loop; when SP_Symbols.RWnull => LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Exports, Given_Value => RW_Null_Token); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Similar_Exports, Given_Value => RW_Null_Token); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); when SP_Symbols.semicolon => null; when others => OK := False; end case; end Parse_Export_List; procedure Parse_Import_List (Temporary_File : in SPARK_IO.File_Type; Token : in out SP_Symbols.SP_Terminal; Lex_Val : in out LexTokenManager.Lex_Value; The_Heap : in out Heap.HeapRecord; Similar_Exports : in LexTokenManager.Seq_Algebra.Seq; Imports : in LexTokenManager.Seq_Algebra.Seq; Imports_For_This_Export : in LexTokenManager.Seq_Algebra.Seq; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in Multiply_Token; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# OK, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# Similar_Exports, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap, --# Token & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Imports, --# Imports_For_This_Export, --# LexTokenManager.State, --# Lex_Val, --# Multiply_Token, --# Similar_Exports, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap, --# Token; is Dotted_Simple_Name : E_Strings.T; Export : LexTokenManager.Seq_Algebra.Member_Of_Seq; Import_Name : LexTokenManager.Lex_String; Some_Exports : Boolean; begin OK := True; -- CFR1753: There may be no exports in the export list if an -- excluded export was the only variable in the list. Some_Exports := not LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Similar_Exports); loop exit when Token = SP_Symbols.semicolon or Token = SP_Symbols.ampersand; case Token is when SP_Symbols.identifier => Dotted_Simple_Name := LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Val.Token_Str); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); loop exit when Token /= SP_Symbols.point; E_Strings.Append_String (E_Str => Dotted_Simple_Name, Str => "."); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); if Token = SP_Symbols.identifier then E_Strings.Append_Examiner_String (E_Str1 => Dotted_Simple_Name, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Val.Token_Str)); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); else OK := False; end if; exit when not OK; end loop; -- CFR1753: Ensure that the export list is non-empty -- Otherwise do not add imports to the import sets. if OK and Some_Exports then LexTokenManager.Insert_Examiner_String (Str => Dotted_Simple_Name, Lex_Str => Import_Name); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Imports, Given_Value => Import_Name); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Imports_For_This_Export, Given_Value => Import_Name); end if; when SP_Symbols.multiply => -- CFR1753: Ensure that the export list is non-empty -- Otherwise do not add imports to the import sets. --# accept F, 41, "Used to simplify loop structure"; if Some_Exports then Export := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Similar_Exports); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Export); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Imports, Given_Value => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export)); Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); end loop; LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Imports_For_This_Export, Given_Value => Multiply_Token); -- else -- SPARK_IO.Put_Line (SPARK_IO.Standard_Output, -- "ParseImporList: No exports discarding *", -- 0); end if; --# end accept; File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); when SP_Symbols.comma => null; when others => OK := False; end case; exit when not OK or Token = SP_Symbols.semicolon or Token = SP_Symbols.ampersand; if Token = SP_Symbols.comma then File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); else OK := False; end if; end loop; end Parse_Import_List; procedure Store_Dependencies (The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Imports : in LexTokenManager.Seq_Algebra.Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation) --# global in LexTokenManager.State; --# in Multiply_Token; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Dependencies, --# Exports, --# Imports, --# LexTokenManager.State, --# Multiply_Token, --# The_Heap; is Export, Import : LexTokenManager.Seq_Algebra.Member_Of_Seq; Export_Val, Import_Val : LexTokenManager.Lex_String; begin Export := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Exports); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Export); Export_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export); Import := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Imports); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Import); Import_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Import); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Export_Val, Lex_Str2 => Import_Val) = LexTokenManager.Str_Eq then Import_Val := Multiply_Token; end if; LexTokenManager.Relation_Algebra.String.Insert_Pair (The_Heap => The_Heap, R => Dependencies, I => Export_Val, J => Import_Val); Import := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Import); end loop; Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); end loop; end Store_Dependencies; begin OK := True; Next_Lex_Val := Lex_Val; loop exit when Token = SP_Symbols.semicolon; LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Similar_Exports); Parse_Export_List (Temporary_File => Temporary_File, Token => Token, Lex_Val => Next_Lex_Val, The_Heap => The_Heap, Exports => Exports, Similar_Exports => Similar_Exports, OK => OK); if OK then -- CFR1753: There may be no exports in the export list if an -- excluded export was the only variable in the list, -- however, we still need to parse the input list. Some_Exports := not LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Similar_Exports); if Token = SP_Symbols.RWfrom then File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Next_Lex_Val); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Imports_For_This_Export); Parse_Import_List (Temporary_File => Temporary_File, Token => Token, Lex_Val => Next_Lex_Val, The_Heap => The_Heap, Similar_Exports => Similar_Exports, Imports => Imports, Imports_For_This_Export => Imports_For_This_Export, OK => OK); -- CFR1753: Only add imports and store the dependencies -- if there are some exported variables associated -- this import list. if Some_Exports then LexTokenManager.Seq_Algebra.Augment_Seq (The_Heap => The_Heap, A => Imports, B => Imports_For_This_Export); Store_Dependencies (The_Heap => The_Heap, Exports => Similar_Exports, Imports => Imports_For_This_Export, Dependencies => Dependencies); end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Imports_For_This_Export); if OK then case Token is when SP_Symbols.ampersand => File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Next_Lex_Val); when SP_Symbols.semicolon => null; when others => OK := False; end case; end if; else OK := False; end if; end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Similar_Exports); exit when not OK; end loop; end Parse_Dependency_Clauses; begin File_IO.Reset (Temporary_File => Temporary_File); SparkLex.Clear_Line_Context; LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Exports); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Imports); LexTokenManager.Relation_Algebra.String.Create_Relation (The_Heap => The_Heap, R => Dependencies); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); if Token = SP_Symbols.annotation_start then Start_Col := Lex_Val.Position.Start_Pos; -- we know Start_Col can't be zero because the line can't be empty --# accept Flow_Message, 10, Lex_Val, "The next symbol should be 'derives', so we don't need the Lex_Val"; File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); --# end accept; if Token = SP_Symbols.RWderives then File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); Parse_Dependency_Clauses (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val, The_Heap => The_Heap, Exports => Exports, Imports => Imports, Dependencies => Dependencies, OK => OK); OK := OK and Token = SP_Symbols.semicolon; else OK := False; end if; else OK := False; end if; --# accept Flow_Message, 602, Start_Col, Start_Col, "Start_Col may not be set if not OK"; end Parse; procedure Reformat (OK : in Boolean; Temporary_File : in out SPARK_IO.File_Type; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in Multiply_Token; --# in RW_Null_Token; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Dependencies, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# OK, --# Output, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# Start_Col, --# Temporary_File, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dependencies, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# OK, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# The_Heap & --# Temporary_File from *, --# OK; is First_Export : Boolean; Export_Width : E_Strings.Lengths; function Derives_Col (Start_Col : E_Strings.Positions) return E_Strings.Positions is begin return Start_Col + 4; -- length of "--# " end Derives_Col; -- writes "derives " in correct position procedure Write_Derives (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions) --# global in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col; is begin SPARK_IO.Set_Col (Output, Derives_Col (Start_Col => Start_Col)); SPARK_IO.Put_String (Output, "derives", 0); -- If ExportIndent is not Inline, then don't put out a trailing space, -- since this will be rejected by GNAT -gnatyb style check mode. if SparkFormatCommandLineData.Content.Export_Indent = SparkFormatCommandLineData.Inline then SPARK_IO.Put_String (Output, " ", 0); end if; end Write_Derives; function Export_Col (Start_Col : E_Strings.Positions) return E_Strings.Positions --# global in SparkFormatCommandLineData.Content; is Export_Column : E_Strings.Positions; begin if SparkFormatCommandLineData.Content.Export_Indent = SparkFormatCommandLineData.Inline then Export_Column := Derives_Col (Start_Col => Start_Col) + 8; -- length of "derives " else Export_Column := Derives_Col (Start_Col => Start_Col) + (SparkFormatCommandLineData.Content.Export_Indent - 1); end if; return Export_Column; end Export_Col; -- only called when there is a value for separator indent, returns starting column function Separator_Col (Start_Col : E_Strings.Positions) return E_Strings.Positions --# global in SparkFormatCommandLineData.Content; is begin -- Start_Col "is --# " but indent value is given from "--#" return Derives_Col (Start_Col => Start_Col) + (SparkFormatCommandLineData.Content.Separator_Indent - 1); end Separator_Col; procedure Write_Export (First_Export : in out Boolean; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; The_Heap : in Heap.HeapRecord; Export : in LexTokenManager.Seq_Algebra.Member_Of_Seq) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives First_Export from * & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Export, --# First_Export, --# LexTokenManager.State, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is begin if First_Export then First_Export := False; if SparkFormatCommandLineData.Content.Export_Indent /= SparkFormatCommandLineData.Inline then Annotation.Write (Output => Output, Start_Col => Start_Col); end if; else if SparkFormatCommandLineData.Content.Separator_Indent = SparkFormatCommandLineData.Inline then SPARK_IO.Put_Line (Output, " &", 0); else Annotation.Write (Output => Output, Start_Col => Start_Col); SPARK_IO.Set_Col (Output, Separator_Col (Start_Col => Start_Col)); SPARK_IO.Put_Line (Output, "&", 0); end if; Annotation.Write (Output => Output, Start_Col => Start_Col); end if; SPARK_IO.Set_Col (Output, Export_Col (Start_Col => Start_Col)); E_Strings.Put_String (File => Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export))); end Write_Export; function Longest_Export (The_Heap : Heap.HeapRecord; Exports : LexTokenManager.Seq_Algebra.Seq) return E_Strings.Lengths --# global in LexTokenManager.State; is Max_Length : Natural; Export : LexTokenManager.Seq_Algebra.Member_Of_Seq; function Max (A, B : Natural) return Natural is Result : Natural; begin if A >= B then Result := A; else Result := B; end if; return Result; end Max; begin Max_Length := 0; Export := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Exports); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Export); Max_Length := Max (A => Max_Length, B => E_Strings.Get_Length (E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export)))); Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); end loop; return Max_Length; end Longest_Export; function From_Col (Start_Col : E_Strings.Positions; Export_Width : E_Strings.Lengths) return E_Strings.Positions --# global in SparkFormatCommandLineData.Content; is From_Column : E_Strings.Positions; begin if SparkFormatCommandLineData.Content.Separator_Indent = SparkFormatCommandLineData.Inline then From_Column := (Export_Col (Start_Col => Start_Col) + Export_Width) + 1; else From_Column := Separator_Col (Start_Col => Start_Col); end if; return From_Column; end From_Col; procedure Write_From (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; Export_Width : in E_Strings.Lengths) --# global in Annotation.Start_String; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Export_Width, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col; is begin if SparkFormatCommandLineData.Content.Separator_Indent /= SparkFormatCommandLineData.Inline then Annotation.Write (Output => Output, Start_Col => Start_Col); end if; SPARK_IO.Set_Col (Output, From_Col (Start_Col => Start_Col, Export_Width => Export_Width)); SPARK_IO.Put_String (Output, "from", 0); -- If ImportIndent is not Inline, then don't put out a trailing space, -- since this will be rejected by GNAT -gnatyb style check mode. if SparkFormatCommandLineData.Content.Import_Indent = SparkFormatCommandLineData.Inline then SPARK_IO.Put_String (Output, " ", 0); end if; end Write_From; procedure Write_Import (First_Import : in out Boolean; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; Export_Width : in E_Strings.Lengths; The_Heap : in Heap.HeapRecord; Import : in LexTokenManager.Seq_Algebra.Member_Of_Seq) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives First_Import from * & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Export_Width, --# First_Import, --# Import, --# LexTokenManager.State, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is function Import_Col (Start_Col : E_Strings.Positions; Export_Width : E_Strings.Lengths) return E_Strings.Positions --# global in SparkFormatCommandLineData.Content; is Import_Column : E_Strings.Positions; begin if SparkFormatCommandLineData.Content.Import_Indent = SparkFormatCommandLineData.Inline then Import_Column := From_Col (Start_Col => Start_Col, Export_Width => Export_Width) + 5; -- length of "from " else Import_Column := Derives_Col (Start_Col => Start_Col) + (SparkFormatCommandLineData.Content.Import_Indent - 1); end if; return Import_Column; end Import_Col; begin if First_Import then First_Import := False; if SparkFormatCommandLineData.Content.Import_Indent /= SparkFormatCommandLineData.Inline then Annotation.Write (Output => Output, Start_Col => Start_Col); end if; else SPARK_IO.Put_Line (Output, ",", 0); Annotation.Write (Output => Output, Start_Col => Start_Col); end if; SPARK_IO.Set_Col (Output, Import_Col (Start_Col => Start_Col, Export_Width => Export_Width)); E_Strings.Put_String (File => Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Import))); end Write_Import; procedure Write_Imports (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; Export_Width : in E_Strings.Lengths; The_Heap : in out Heap.HeapRecord; Imports : in LexTokenManager.Seq_Algebra.Seq) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in Multiply_Token; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Export_Width, --# Imports, --# LexTokenManager.State, --# Multiply_Token, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap & --# The_Heap from *, --# Imports, --# LexTokenManager.State, --# Multiply_Token; is Import : LexTokenManager.Seq_Algebra.Member_Of_Seq; First_Import : Boolean; The_Iterator : SPARKProgram.Iteration.Iterator; begin Import := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Imports); First_Import := True; -- Imports might contain a "*", but it might not be the first -- member of the sequence. SPARK _does_ require the "*" to appear -- first in the import list, so we scan for it alone on the first pass, -- write it out, then remove it from the sequence. -- Note that the sequence is disposed of after writing imports so it is -- OK to modify it here. loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Import); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Import), Lex_Str2 => Multiply_Token) = LexTokenManager.Str_Eq then Write_Import (First_Import => First_Import, Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, The_Heap => The_Heap, Import => Import); LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => Imports, Given_Value => Multiply_Token); exit; end if; Import := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Import); end loop; -- If '*' was the only import in the seq then the seq is now empty so we don't -- need to write any more imports. Need to guard against this case as the -- Initialise routine has an implicit precondition that the sequence is not -- empty. if not LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Imports) then SPARKProgram.Iteration.Initialise (The_Heap => The_Heap, The_Seq => Imports, The_Iterator => The_Iterator); Write_Import (First_Import => First_Import, Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, The_Heap => The_Heap, Import => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator)); loop SPARKProgram.Iteration.Next (The_Heap => The_Heap, The_Iterator => The_Iterator); exit when SPARKProgram.Iteration.Complete (The_Iterator => The_Iterator); Write_Import (First_Import => First_Import, Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, The_Heap => The_Heap, Import => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator)); end loop; end if; end Write_Imports; procedure Substitute_Self_For_Multiply (The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Dependencies_With_Multiply : in LexTokenManager.Relation_Algebra.String.Relation; Dependencies_Without_Multiply : in LexTokenManager.Relation_Algebra.String.Relation) --# global in LexTokenManager.State; --# in Multiply_Token; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Dependencies_Without_Multiply, --# Dependencies_With_Multiply, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# The_Heap; is Imports : LexTokenManager.Seq_Algebra.Seq; Export, Import : LexTokenManager.Seq_Algebra.Member_Of_Seq; Export_Val, Import_Val : LexTokenManager.Lex_String; begin Export := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Exports); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Export); Export_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export); LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => Dependencies_With_Multiply, Given_Index => Export_Val, S => Imports); Import := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Imports); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Import); Import_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Import); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Import_Val, Lex_Str2 => Multiply_Token) = LexTokenManager.Str_Eq then Import_Val := Export_Val; end if; LexTokenManager.Relation_Algebra.String.Insert_Pair (The_Heap => The_Heap, R => Dependencies_Without_Multiply, I => Export_Val, J => Import_Val); Import := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Import); end loop; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Imports); Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); end loop; end Substitute_Self_For_Multiply; procedure Write_Similar_Clauses (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; Export_Width : in E_Strings.Lengths; First_Export : in out Boolean; The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in Multiply_Token; --# in RW_Null_Token; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives First_Export, --# Statistics.TableUsage, --# The_Heap from *, --# Dependencies, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# The_Heap & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Dependencies, --# Exports, --# Export_Width, --# First_Export, --# LexTokenManager.State, --# Multiply_Token, --# Output, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is Compressed_Exports : LexTokenManager.Relation_Algebra.Relation; Compressed_Imports : LexTokenManager.Relation_Algebra.Relation; Compressed_Index : Natural; procedure Find_Similar_Exports (The_Heap : in out Heap.HeapRecord; Export : in LexTokenManager.Seq_Algebra.Member_Of_Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation; Similar_Exports : in LexTokenManager.Seq_Algebra.Seq; Imports : in LexTokenManager.Seq_Algebra.Seq) --# global in LexTokenManager.State; --# in RW_Null_Token; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Dependencies, --# Export, --# Imports, --# LexTokenManager.State, --# RW_Null_Token, --# Similar_Exports, --# The_Heap; is Other_Imports : LexTokenManager.Seq_Algebra.Seq; Other_Export : LexTokenManager.Seq_Algebra.Member_Of_Seq; Other_Export_Val : LexTokenManager.Lex_String; function Identical_Sequences (The_Heap : Heap.HeapRecord; S : LexTokenManager.Seq_Algebra.Seq; T : LexTokenManager.Seq_Algebra.Seq) return Boolean --# global in LexTokenManager.State; is M, N : LexTokenManager.Seq_Algebra.Member_Of_Seq; Identical : Boolean; begin M := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => S); N := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => T); Identical := True; loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => M) or else LexTokenManager.Seq_Algebra.Is_Null_Member (M => N); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => M), Lex_Str2 => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => N)) /= LexTokenManager.Str_Eq then Identical := False; exit; end if; M := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => M); N := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => N); end loop; return Identical and then LexTokenManager.Seq_Algebra.Is_Null_Member (M => M) and then LexTokenManager.Seq_Algebra.Is_Null_Member (M => N); end Identical_Sequences; begin Other_Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Other_Export); Other_Export_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Other_Export); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Other_Export_Val, Lex_Str2 => RW_Null_Token) /= LexTokenManager.Str_Eq then LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => Dependencies, Given_Index => Other_Export_Val, S => Other_Imports); if Identical_Sequences (The_Heap => The_Heap, S => Imports, T => Other_Imports) then LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Similar_Exports, Given_Value => Other_Export_Val); end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Other_Imports); end if; Other_Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Other_Export); end loop; end Find_Similar_Exports; procedure Swap_Sequence (A, B : in out LexTokenManager.Seq_Algebra.Seq) --# derives A from B & --# B from A; is Temp : LexTokenManager.Seq_Algebra.Seq; begin Temp := A; A := B; B := Temp; end Swap_Sequence; function Sequence_Length (The_Heap : Heap.HeapRecord; S : LexTokenManager.Seq_Algebra.Seq) return Natural is M : LexTokenManager.Seq_Algebra.Member_Of_Seq; Length : Natural; begin M := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => S); Length := 0; loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => M); Length := Length + 1; M := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => M); end loop; return Length; end Sequence_Length; procedure Build_Compressed_Dependencies (The_Heap : in out Heap.HeapRecord; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation; Exports : in LexTokenManager.Seq_Algebra.Seq; Compressed_Exports : out LexTokenManager.Relation_Algebra.Relation; Compressed_Imports : out LexTokenManager.Relation_Algebra.Relation; Compressed_Index : out Natural) --# global in LexTokenManager.State; --# in Multiply_Token; --# in RW_Null_Token; --# in out Statistics.TableUsage; --# derives Compressed_Exports, --# Compressed_Imports from The_Heap & --# Compressed_Index, --# The_Heap from Dependencies, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# The_Heap & --# Statistics.TableUsage from *, --# Dependencies, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# The_Heap; is Export_Val : LexTokenManager.Lex_String; Export : LexTokenManager.Seq_Algebra.Member_Of_Seq; Next_Export : LexTokenManager.Seq_Algebra.Member_Of_Seq; Similar_Exports : LexTokenManager.Seq_Algebra.Seq; Imports : LexTokenManager.Seq_Algebra.Seq; procedure Try_Adding_Self_Dependencies (The_Heap : in out Heap.HeapRecord; Export : in LexTokenManager.Seq_Algebra.Member_Of_Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation; Similar_Exports : in out LexTokenManager.Seq_Algebra.Seq; Imports : in out LexTokenManager.Seq_Algebra.Seq) --# global in LexTokenManager.State; --# in Multiply_Token; --# in RW_Null_Token; --# in out Statistics.TableUsage; --# derives Imports, --# Similar_Exports, --# Statistics.TableUsage, --# The_Heap from *, --# Dependencies, --# Export, --# Imports, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# Similar_Exports, --# The_Heap; is Other_Export : LexTokenManager.Seq_Algebra.Member_Of_Seq; procedure Try_Adding_Self_Dependency (The_Heap : in out Heap.HeapRecord; Export : in LexTokenManager.Seq_Algebra.Member_Of_Seq; Other_Export : in LexTokenManager.Seq_Algebra.Member_Of_Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation; Similar_Exports : in out LexTokenManager.Seq_Algebra.Seq; Imports : in out LexTokenManager.Seq_Algebra.Seq) --# global in LexTokenManager.State; --# in RW_Null_Token; --# in out Statistics.TableUsage; --# derives Imports, --# Similar_Exports, --# Statistics.TableUsage from *, --# Dependencies, --# Export, --# LexTokenManager.State, --# Other_Export, --# RW_Null_Token, --# Similar_Exports, --# The_Heap & --# The_Heap from *, --# Dependencies, --# Export, --# Imports, --# LexTokenManager.State, --# Other_Export, --# RW_Null_Token, --# Similar_Exports; is Export_Val, Other_Export_Val : LexTokenManager.Lex_String; Trial_Similar_Exports : LexTokenManager.Seq_Algebra.Seq; Trial_Imports : LexTokenManager.Seq_Algebra.Seq; TrialDependencies : LexTokenManager.Relation_Algebra.String.Relation; begin Export_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export); Other_Export_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Other_Export); LexTokenManager.Relation_Algebra.String.Create_Relation (The_Heap => The_Heap, R => TrialDependencies); LexTokenManager.Relation_Algebra.String.Augment_Relation (The_Heap => The_Heap, A => TrialDependencies, B => Dependencies); LexTokenManager.Relation_Algebra.String.Insert_Pair (The_Heap => The_Heap, R => TrialDependencies, I => Other_Export_Val, J => Other_Export_Val); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Trial_Similar_Exports); LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => TrialDependencies, Given_Index => Export_Val, S => Trial_Imports); Find_Similar_Exports (The_Heap => The_Heap, Export => Export, Dependencies => TrialDependencies, Similar_Exports => Trial_Similar_Exports, Imports => Trial_Imports); if Sequence_Length (The_Heap => The_Heap, S => Trial_Similar_Exports) > Sequence_Length (The_Heap => The_Heap, S => Similar_Exports) then Swap_Sequence (A => Similar_Exports, B => Trial_Similar_Exports); Swap_Sequence (A => Imports, B => Trial_Imports); LexTokenManager.Relation_Algebra.String.Insert_Pair (The_Heap => The_Heap, R => Dependencies, I => Other_Export_Val, J => Other_Export_Val); end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Trial_Similar_Exports); LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Trial_Imports); LexTokenManager.Relation_Algebra.String.Dispose_Of_Relation (The_Heap => The_Heap, R => TrialDependencies); end Try_Adding_Self_Dependency; begin if LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Imports, Given_Value => Multiply_Token) then Try_Adding_Self_Dependency (The_Heap => The_Heap, Export => Export, Other_Export => Export, Dependencies => Dependencies, Similar_Exports => Similar_Exports, Imports => Imports); Other_Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Other_Export); Try_Adding_Self_Dependency (The_Heap => The_Heap, Export => Export, Other_Export => Other_Export, Dependencies => Dependencies, Similar_Exports => Similar_Exports, Imports => Imports); Other_Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Other_Export); end loop; end if; end Try_Adding_Self_Dependencies; procedure Try_Without_Multiply (The_Heap : in out Heap.HeapRecord; Export : in LexTokenManager.Seq_Algebra.Member_Of_Seq; Exports : in LexTokenManager.Seq_Algebra.Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation; Similar_Exports : in out LexTokenManager.Seq_Algebra.Seq; Imports : in out LexTokenManager.Seq_Algebra.Seq) --# global in LexTokenManager.State; --# in Multiply_Token; --# in RW_Null_Token; --# in out Statistics.TableUsage; --# derives Imports, --# Similar_Exports from *, --# Dependencies, --# Export, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# Similar_Exports, --# The_Heap & --# Statistics.TableUsage from *, --# Dependencies, --# Export, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# The_Heap & --# The_Heap from *, --# Dependencies, --# Export, --# Exports, --# Imports, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# Similar_Exports; is Trial_Similar_Exports, Trial_Imports : LexTokenManager.Seq_Algebra.Seq; Export_Val : LexTokenManager.Lex_String; Dependencies_Without_Multiply : LexTokenManager.Relation_Algebra.String.Relation; begin Export_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export); LexTokenManager.Relation_Algebra.String.Create_Relation (The_Heap => The_Heap, R => Dependencies_Without_Multiply); Substitute_Self_For_Multiply (The_Heap => The_Heap, Exports => Exports, Dependencies_With_Multiply => Dependencies, Dependencies_Without_Multiply => Dependencies_Without_Multiply); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Trial_Similar_Exports); LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => Dependencies_Without_Multiply, Given_Index => Export_Val, S => Trial_Imports); Find_Similar_Exports (The_Heap => The_Heap, Export => Export, Dependencies => Dependencies_Without_Multiply, Similar_Exports => Trial_Similar_Exports, Imports => Trial_Imports); if Sequence_Length (The_Heap => The_Heap, S => Trial_Similar_Exports) > Sequence_Length (The_Heap => The_Heap, S => Similar_Exports) then Swap_Sequence (A => Similar_Exports, B => Trial_Similar_Exports); Swap_Sequence (A => Imports, B => Trial_Imports); end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Trial_Similar_Exports); LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Trial_Imports); LexTokenManager.Relation_Algebra.String.Dispose_Of_Relation (The_Heap => The_Heap, R => Dependencies_Without_Multiply); end Try_Without_Multiply; procedure Remove_Members (The_Heap : in out Heap.HeapRecord; A, B : in LexTokenManager.Seq_Algebra.Seq) --# global in LexTokenManager.State; --# derives The_Heap from *, --# A, --# B, --# LexTokenManager.State; is M, Next : LexTokenManager.Seq_Algebra.Member_Of_Seq; Value_Of_M : LexTokenManager.Lex_String; begin M := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => A); loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => M); Value_Of_M := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => M); if LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => B, Given_Value => Value_Of_M) then Next := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => M); LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => A, Given_Value => Value_Of_M); M := Next; else M := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => M); end if; end loop; end Remove_Members; begin -- Create relations to store sequences of exports and their imports. -- These two relations will be indexed by Compressed_Index, so for any given value -- of Compressed_Index one relation gives the sequence of exports and the other -- provides the sequence of imports from which they are derived. LexTokenManager.Relation_Algebra.Create_Relation (The_Heap => The_Heap, R => Compressed_Exports); LexTokenManager.Relation_Algebra.Create_Relation (The_Heap => The_Heap, R => Compressed_Imports); Compressed_Index := Natural'First; Export := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Exports); -- Loop condition avoids potential RTE. Compressed_Index can never exceed bounds of -- Natural in practice because it will never exceed the number of exports. while Compressed_Index < Natural'Last loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Export); -- Look for other exports with the same dependency list, and store them -- in the sequence Similar_Exports. Export_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export); if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Export_Val, Lex_Str2 => RW_Null_Token) /= LexTokenManager.Str_Eq then -- null derives comes last so ignore it here LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Similar_Exports); LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => Dependencies, Given_Index => Export_Val, S => Imports); Find_Similar_Exports (The_Heap => The_Heap, Export => Export, Dependencies => Dependencies, Similar_Exports => Similar_Exports, Imports => Imports); Try_Adding_Self_Dependencies (The_Heap => The_Heap, Export => Export, Dependencies => Dependencies, Similar_Exports => Similar_Exports, Imports => Imports); Try_Without_Multiply (The_Heap => The_Heap, Export => Export, Exports => Exports, Dependencies => Dependencies, Similar_Exports => Similar_Exports, Imports => Imports); -- Rather than write the dependencies out directly here (in whatever order they -- happen to be in) we build a new, compressed, dependency relation to be written -- out (in a given order) once it is complete. -- -- Note that if an export has no similar exports it *is* added to the compressed -- sequence but it is *not* removed from the sequence of exports. (The implementations -- of Find_Similar_Exports, Try_Adding_Self_Dependencies and Try_Without_Multiply depend on -- this being the case.) Consequently, the sequence of exports should not be used -- after calling this subprogram. Compressed_Index := Compressed_Index + 1; LexTokenManager.Relation_Algebra.Insert_Pair (The_Heap => The_Heap, R => Compressed_Exports, I => Compressed_Index, J => Export_Val); LexTokenManager.Relation_Algebra.Add_Row (The_Heap => The_Heap, R => Compressed_Imports, I => Compressed_Index, S => Imports); if LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Similar_Exports) then Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); else LexTokenManager.Relation_Algebra.Add_Row (The_Heap => The_Heap, R => Compressed_Exports, I => Compressed_Index, S => Similar_Exports); Remove_Members (The_Heap => The_Heap, A => Exports, B => Similar_Exports); Next_Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => Exports, Given_Value => Export_Val); Export := Next_Export; end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Similar_Exports); LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Imports); else -- if null token move on to next item Export := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Export); end if; end loop; end Build_Compressed_Dependencies; procedure Write_Dependencies_Alphabetic (The_Heap : in out Heap.HeapRecord; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; Export_Width : in E_Strings.Lengths; First_Export : in out Boolean; Compressed_Exports : in LexTokenManager.Relation_Algebra.Relation; Compressed_Imports : in LexTokenManager.Relation_Algebra.Relation; Compressed_Index : in Natural) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in Multiply_Token; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives First_Export, --# Statistics.TableUsage, --# The_Heap from *, --# Compressed_Exports, --# Compressed_Imports, --# Compressed_Index, --# LexTokenManager.State, --# Multiply_Token, --# The_Heap & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Compressed_Exports, --# Compressed_Imports, --# Compressed_Index, --# Export_Width, --# First_Export, --# LexTokenManager.State, --# Multiply_Token, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is Relations_Written : SeqAlgebra.Seq; Local_Index : Natural; Relation_Count : Natural; First_Alpha_Index : Natural; First_Alpha_Export : LexTokenManager.Lex_String; Exports_To_Write : LexTokenManager.Seq_Algebra.Seq; Imports_To_Write : LexTokenManager.Seq_Algebra.Seq; The_Iterator : SPARKProgram.Iteration.Iterator; Current_String : LexTokenManager.Lex_String; procedure Write_Dependencies (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; Export_Width : in E_Strings.Lengths; First_Export : in out Boolean; The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Imports : in LexTokenManager.Seq_Algebra.Seq) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in Multiply_Token; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives First_Export from * & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Exports, --# Export_Width, --# First_Export, --# Imports, --# LexTokenManager.State, --# Multiply_Token, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap & --# The_Heap from *, --# Imports, --# LexTokenManager.State, --# Multiply_Token; is The_Iterator : SPARKProgram.Iteration.Iterator; procedure Write_Similar_Export (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; The_Heap : in Heap.HeapRecord; Export : in LexTokenManager.Seq_Algebra.Member_Of_Seq) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Export, --# LexTokenManager.State, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is begin SPARK_IO.Put_Line (Output, ",", 0); Annotation.Write (Output => Output, Start_Col => Start_Col); SPARK_IO.Set_Col (Output, Export_Col (Start_Col => Start_Col)); E_Strings.Put_String (File => Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Export))); end Write_Similar_Export; begin -- Dependency consists of from , so start with -- the exports. SPARKProgram.Iteration.Initialise (The_Heap => The_Heap, The_Seq => Exports, The_Iterator => The_Iterator); Write_Export (First_Export => First_Export, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Export => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator)); SPARKProgram.Iteration.Next (The_Heap => The_Heap, The_Iterator => The_Iterator); -- if there are more exports derived from the same imports, write them out while not SPARKProgram.Iteration.Complete (The_Iterator => The_Iterator) loop Write_Similar_Export (Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Export => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator)); SPARKProgram.Iteration.Next (The_Heap => The_Heap, The_Iterator => The_Iterator); end loop; -- from Write_From (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width); -- the imports Write_Imports (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, The_Heap => The_Heap, Imports => Imports); end Write_Dependencies; begin -- Need to find the alphabetical first member out of all the export sequences in -- the Exports_To_Write relation. The corresponding dependency can then be written -- out, after which it can be disposed of. Repeat until all relations have been -- written. SeqAlgebra.CreateSeq (TheHeap => The_Heap, S => Relations_Written); Relation_Count := Compressed_Index; -- Keep a record of the total number of relations loop -- until all exports have been written First_Alpha_Index := Natural'First; Local_Index := Relation_Count; First_Alpha_Export := LexTokenManager.Null_String; -- Go through all the export sequences (skipping those we have already written) looking -- for the first export in alphabetical order. while Local_Index > Natural'First loop if not SeqAlgebra.IsMember (TheHeap => The_Heap, S => Relations_Written, GivenValue => Local_Index) then LexTokenManager.Relation_Algebra.Row_Extraction (The_Heap => The_Heap, R => Compressed_Exports, Given_Index => Local_Index, S => Exports_To_Write); -- Find the first member of the current export sequence. -- In this case we use initialise to find us the first member but we never go on to iterate -- over the list. SPARKProgram.Iteration.Initialise (The_Heap => The_Heap, The_Seq => Exports_To_Write, The_Iterator => The_Iterator); -- If this is the first iteration then the item just returned must be the best match so far. -- Otherwise, if it is better than our current best match then this export sequence becomes the one to --write Current_String := SPARKProgram.Iteration.Current_String (The_Iterator => The_Iterator); if First_Alpha_Index = Natural'First or else -- always do it on first iteration LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Current_String, Lex_Str2 => First_Alpha_Export) = LexTokenManager.Str_First then First_Alpha_Export := Current_String; First_Alpha_Index := Local_Index; end if; end if; Local_Index := Local_Index - 1; end loop; if First_Alpha_Index = Natural'First then -- defensive - should always be false exit; end if; -- First_Alpha_Index gives us the relation index for the next dependency to be written -- Write it out, then add it to the sequence of relations that have been written. LexTokenManager.Relation_Algebra.Row_Extraction (The_Heap => The_Heap, R => Compressed_Exports, Given_Index => First_Alpha_Index, S => Exports_To_Write); LexTokenManager.Relation_Algebra.Row_Extraction (The_Heap => The_Heap, R => Compressed_Imports, Given_Index => First_Alpha_Index, S => Imports_To_Write); Write_Dependencies (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, First_Export => First_Export, The_Heap => The_Heap, Exports => Exports_To_Write, Imports => Imports_To_Write); LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Exports_To_Write); LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Imports_To_Write); SeqAlgebra.AddMember (TheHeap => The_Heap, S => Relations_Written, GivenValue => First_Alpha_Index); -- Exit if all relations have now been added to Relations_Written exit when SeqAlgebra.Length (The_Heap, Relations_Written) >= Relation_Count; end loop; -- until all relations written SeqAlgebra.DisposeOfSeq (The_Heap, Relations_Written); end Write_Dependencies_Alphabetic; begin -- Write_Similar_Clauses -- Compress the dependencies if possible. Store the results in Compressed_Exports -- and Compressed_Imports, indexed by Compressed_Index. Build_Compressed_Dependencies (The_Heap => The_Heap, Dependencies => Dependencies, Exports => Exports, Compressed_Exports => Compressed_Exports, Compressed_Imports => Compressed_Imports, Compressed_Index => Compressed_Index); -- Extract the compressed dependencies and write them out. Write_Dependencies_Alphabetic (The_Heap => The_Heap, Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, First_Export => First_Export, Compressed_Exports => Compressed_Exports, Compressed_Imports => Compressed_Imports, Compressed_Index => Compressed_Index); end Write_Similar_Clauses; procedure Write_Clauses (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; Export_Width : in E_Strings.Lengths; First_Export : in out Boolean; The_Heap : in out Heap.HeapRecord; Exports : in LexTokenManager.Seq_Algebra.Seq; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in Multiply_Token; --# in RW_Null_Token; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives First_Export, --# Statistics.TableUsage, --# The_Heap from *, --# Dependencies, --# Exports, --# LexTokenManager.State, --# Multiply_Token, --# RW_Null_Token, --# The_Heap & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Dependencies, --# Exports, --# Export_Width, --# First_Export, --# LexTokenManager.State, --# Multiply_Token, --# Output, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is Imports : LexTokenManager.Seq_Algebra.Seq; Export_Val : LexTokenManager.Lex_String; Dependencies_Without_Multiply : LexTokenManager.Relation_Algebra.String.Relation; The_Iterator : SPARKProgram.Iteration.Iterator; begin -- This writes out the clauses in uncompressed form, ie no lists of exports sharing the -- same list of imports, and no '*'. LexTokenManager.Relation_Algebra.String.Create_Relation (The_Heap => The_Heap, R => Dependencies_Without_Multiply); Substitute_Self_For_Multiply (The_Heap => The_Heap, Exports => Exports, Dependencies_With_Multiply => Dependencies, Dependencies_Without_Multiply => Dependencies_Without_Multiply); SPARKProgram.Iteration.Initialise (The_Heap => The_Heap, The_Seq => Exports, The_Iterator => The_Iterator); while not SPARKProgram.Iteration.Complete (The_Iterator => The_Iterator) loop Export_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator)); -- Null dependency clause must come last so don't write it here if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Export_Val, Lex_Str2 => RW_Null_Token) /= LexTokenManager.Str_Eq then -- export Write_Export (First_Export => First_Export, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Export => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator)); Write_From (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width); --from -- imports LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => Dependencies_Without_Multiply, Given_Index => Export_Val, S => Imports); Write_Imports (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, The_Heap => The_Heap, Imports => Imports); LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Imports); end if; SPARKProgram.Iteration.Next (The_Heap => The_Heap, The_Iterator => The_Iterator); end loop; LexTokenManager.Relation_Algebra.String.Dispose_Of_Relation (The_Heap => The_Heap, R => Dependencies_Without_Multiply); end Write_Clauses; procedure Write_Null_Import_List (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; Export_Width : in E_Strings.Lengths; First_Export : in Boolean; The_Heap : in out Heap.HeapRecord; Dependencies : in LexTokenManager.Relation_Algebra.String.Relation) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in RW_Null_Token; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# Dependencies, --# Export_Width, --# First_Export, --# LexTokenManager.State, --# Output, --# RW_Null_Token, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Dependencies, --# LexTokenManager.State, --# RW_Null_Token, --# The_Heap; is Imports : LexTokenManager.Seq_Algebra.Seq; Import : LexTokenManager.Seq_Algebra.Member_Of_Seq; First_Import : Boolean; The_Iterator : SPARKProgram.Iteration.Iterator; procedure Write_Null (First_Export : in Boolean; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions) --# global in Annotation.Start_String; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# First_Export, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col; is begin if not First_Export then SPARK_IO.Put_Line (Output, " &", 0); Annotation.Write (Output => Output, Start_Col => Start_Col); elsif SparkFormatCommandLineData.Content.Export_Indent /= SparkFormatCommandLineData.Inline then Annotation.Write (Output => Output, Start_Col => Start_Col); end if; SPARK_IO.Set_Col (Output, Export_Col (Start_Col => Start_Col)); SPARK_IO.Put_String (Output, "null", 0); end Write_Null; begin LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => Dependencies, Given_Index => RW_Null_Token, S => Imports); -- This procedure can be called even if there is no null import list. If the -- sequence is null then don't write it out. Import := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Imports); if not LexTokenManager.Seq_Algebra.Is_Null_Member (M => Import) then SPARKProgram.Iteration.Initialise (The_Heap => The_Heap, The_Seq => Imports, The_Iterator => The_Iterator); Write_Null (First_Export => First_Export, Output => Output, Start_Col => Start_Col); -- null Write_From (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width); -- from -- Now write out the list of imports in the requested order First_Import := True; Write_Import (First_Import => First_Import, Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, The_Heap => The_Heap, Import => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator)); SPARKProgram.Iteration.Next (The_Heap => The_Heap, The_Iterator => The_Iterator); while not SPARKProgram.Iteration.Complete (The_Iterator => The_Iterator) loop Write_Import (First_Import => First_Import, Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, The_Heap => The_Heap, Import => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator)); SPARKProgram.Iteration.Next (The_Heap => The_Heap, The_Iterator => The_Iterator); end loop; end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Imports); end Write_Null_Import_List; begin if OK then Annotation.Write (Output => Output, Start_Col => Start_Col); Write_Derives (Output => Output, Start_Col => Start_Col); First_Export := True; Export_Width := Longest_Export (The_Heap => The_Heap, Exports => Exports); case SparkFormatCommandLineData.Content.Operation is when SparkFormatCommandLineData.Compress => Write_Similar_Clauses (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, First_Export => First_Export, The_Heap => The_Heap, Exports => Exports, Dependencies => Dependencies); when SparkFormatCommandLineData.Expand => Write_Clauses (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, First_Export => First_Export, The_Heap => The_Heap, Exports => Exports, Dependencies => Dependencies); end case; Write_Null_Import_List (Output => Output, Start_Col => Start_Col, Export_Width => Export_Width, First_Export => First_Export, The_Heap => The_Heap, Dependencies => Dependencies); SPARK_IO.Put_Line (Output, ";", 0); else File_IO.Reset (Temporary_File => Temporary_File); SPARKProgram.Copy (Input => Temporary_File, Output => Output); end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Exports); LexTokenManager.Relation_Algebra.String.Dispose_Of_Relation (The_Heap => The_Heap, R => Dependencies); File_IO.Delete (Temporary_File => Temporary_File); end Reformat; end Derives_List; package body Declare_List is Keyword : constant String := "declare"; -- checks it's at the start of a line up to "--# declare" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; -- checks for ";" at end of line function Is_End (Input_Line : E_Strings.T) return Boolean is Result : Boolean; begin Result := False; for I in E_Strings.Lengths range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => Input_Line) loop if E_Strings.Get_Element (E_Str => Input_Line, Pos => I) = ';' then Result := True; exit; end if; end loop; return Result; end Is_End; end Declare_List; package body Proof_List is Keyword_Pre : constant String := "pre"; Keyword_Post : constant String := "post"; Keyword_Return : constant String := "return"; -- checks it's at the start of a line up to "--# pre" or "--# post" or "--# return" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword_Pre) or SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword_Post) or SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword_Return); end Is_Start; -- The 'Is_End' for proof annotations below is not called anywhere, and thus has -- not been tested. -- checks for ";" at end of line function Is_End (Input_Line : E_Strings.T) return Boolean is Result : Boolean; begin Result := False; for I in E_Strings.Lengths range E_Strings.Positions'First .. E_Strings.Get_Length (E_Str => Input_Line) loop if E_Strings.Get_Element (E_Str => Input_Line, Pos => I) = ';' then Result := True; exit; end if; end loop; return Result; end Is_End; end Proof_List; package body Globals_List is Keyword : constant String := "global"; Consecutive_Empty_Lines : Natural := 0; -- checks it's at the start of a line up to "--# global" function Is_Start (Input_Line : E_Strings.T) return Boolean is begin return SPARK_Keyword_Matching.Match_At_Start (Input_Line => Input_Line, Match_Keyword => Keyword); end Is_Start; --Here the end of a list of global declarations is detected. --Unfortunately, the SPARK syntax for globals does not have a unique terminator. --The ';' is used as a separator between different modes or as a terminator. Thus, --The end of a list of globals is detected by the end of a SPARK comment block, or --the start of a new comment item. Every possible following SPARK annotation item --needs to be considered, otherwise the real end of a global declaration may not be --detected. See SEPR: 2674 function Is_End (Input_Line : E_Strings.T) return Boolean is Result : Boolean; Is_Annotation_Start : Boolean; Index : E_Strings.Lengths; begin if Derives_List.Is_Start (Input_Line => Input_Line) or Declare_List.Is_Start (Input_Line => Input_Line) or Proof_List.Is_Start (Input_Line => Input_Line) or Accept_List.Is_Start (Input_Line => Input_Line) or End_List.Is_Start (Input_Line => Input_Line) or For_List.Is_Start (Input_Line => Input_Line) or Function_List.Is_Start (Input_Line => Input_Line) or Type_List.Is_Start (Input_Line => Input_Line) or Assert_List.Is_Start (Input_Line => Input_Line) then Result := True; else Index := 1; --# accept Flow_Message, 10, Index, "Final value of Index is not required"; Annotation.Is_Start (Input_Line => Input_Line, Index => Index, OK => Is_Annotation_Start); --# end accept; Result := not (Is_Annotation_Start or E_Strings.Is_Empty (E_Str => E_Strings.Trim (Input_Line))); end if; return Result; end Is_End; procedure Increment_Consecutive_Empty_Lines is begin Consecutive_Empty_Lines := Consecutive_Empty_Lines + 1; end Increment_Consecutive_Empty_Lines; procedure Reset_Consecutive_Empty_Lines is begin Consecutive_Empty_Lines := 0; end Reset_Consecutive_Empty_Lines; function No_Empty_Lines return Boolean is begin return Consecutive_Empty_Lines = 0; end No_Empty_Lines; procedure Parse (Temporary_File : in out SPARK_IO.File_Type; Start_Col : out E_Strings.Positions; The_Heap : in out Heap.HeapRecord; Global_Variables : out Global_Variables_Type; OK : out Boolean) is Token : SP_Symbols.SP_Terminal; Lex_Val : LexTokenManager.Lex_Value; procedure Create (The_Heap : in out Heap.HeapRecord; Global_Variables : out Global_Variables_Type) --# global in out Statistics.TableUsage; --# derives Global_Variables, --# The_Heap from The_Heap & --# Statistics.TableUsage from *, --# The_Heap; is Unmoded_Global_Variables, In_Global_Variables, In_Out_Global_Variables, Out_Global_Variables : LexTokenManager.Seq_Algebra.Seq; begin LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Unmoded_Global_Variables); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => In_Global_Variables); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => In_Out_Global_Variables); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Out_Global_Variables); Global_Variables := Global_Variables_Type' (Unmoded => Unmoded_Global_Variables, In_Mode => In_Global_Variables, In_Out_Mode => In_Out_Global_Variables, Out_Mode => Out_Global_Variables); end Create; procedure Parse_Global_Variable_Clauses (Temporary_File : in SPARK_IO.File_Type; Token : in SP_Symbols.SP_Terminal; Lex_Val : in LexTokenManager.Lex_Value; The_Heap : in out Heap.HeapRecord; Global_Variables : in Global_Variables_Type; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# OK, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# Token & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Global_Variables, --# LexTokenManager.State, --# Lex_Val, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap, --# Token; is Next_Token : SP_Symbols.SP_Terminal; Next_Lex_Val : LexTokenManager.Lex_Value; procedure Parse_Global_Variable_List (Temporary_File : in SPARK_IO.File_Type; Token : in out SP_Symbols.SP_Terminal; Lex_Val : in out LexTokenManager.Lex_Value; The_Heap : in out Heap.HeapRecord; Global_Variables : in Global_Variables_Type; OK : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# OK, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# Lex_Val, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# Token & --# Statistics.TableUsage, --# The_Heap from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Global_Variables, --# LexTokenManager.State, --# Lex_Val, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Temporary_File, --# The_Heap, --# Token; is Mode : Modes; Dotted_Simple_Name : E_Strings.T; Global_Variable : LexTokenManager.Lex_String; begin OK := True; Mode := Unmoded; case Token is when SP_Symbols.RWin => File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); case Token is when SP_Symbols.RWout => Mode := In_Out_Mode; File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); when SP_Symbols.identifier => Mode := In_Mode; when others => OK := False; end case; when SP_Symbols.RWout => File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); Mode := Out_Mode; when SP_Symbols.identifier => Mode := Unmoded; when others => OK := False; end case; loop exit when not OK; case Token is when SP_Symbols.identifier => Dotted_Simple_Name := LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Val.Token_Str); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); loop exit when Token /= SP_Symbols.point; E_Strings.Append_String (E_Str => Dotted_Simple_Name, Str => "."); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); if Token = SP_Symbols.identifier then E_Strings.Append_Examiner_String (E_Str1 => Dotted_Simple_Name, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Val.Token_Str)); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); else OK := False; end if; exit when not OK; end loop; if OK then LexTokenManager.Insert_Examiner_String (Str => Dotted_Simple_Name, Lex_Str => Global_Variable); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Global_Variables (Mode), Given_Value => Global_Variable); end if; when SP_Symbols.comma => null; when others => OK := False; end case; exit when not OK or Token = SP_Symbols.semicolon; if Token = SP_Symbols.comma then File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); else OK := False; end if; end loop; end Parse_Global_Variable_List; begin OK := True; Next_Token := Token; Next_Lex_Val := Lex_Val; loop exit when Next_Token = SP_Symbols.annotation_end; Parse_Global_Variable_List (Temporary_File => Temporary_File, Token => Next_Token, Lex_Val => Next_Lex_Val, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => OK); if OK then File_IO.Lex (Temporary_File => Temporary_File, Token => Next_Token, Lex_Val => Next_Lex_Val); case Next_Token is when SP_Symbols.annotation_end | SP_Symbols.RWin | SP_Symbols.RWout | SP_Symbols.identifier => null; when others => OK := False; end case; end if; exit when not OK; end loop; end Parse_Global_Variable_Clauses; begin File_IO.Reset (Temporary_File => Temporary_File); SparkLex.Clear_Line_Context; Create (The_Heap => The_Heap, Global_Variables => Global_Variables); File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); if Token = SP_Symbols.annotation_start then Start_Col := Lex_Val.Position.Start_Pos; -- we know Start_Col can't be zero because the line can't be empty --# accept Flow_Message, 10, Lex_Val, "The next symbol should be 'global', so we don't need the Lex_Val"; File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); --# end accept; if Token = SP_Symbols.RWglobal then File_IO.Lex (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val); Parse_Global_Variable_Clauses (Temporary_File => Temporary_File, Token => Token, Lex_Val => Lex_Val, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => OK); else OK := False; end if; else OK := False; end if; --# accept Flow_Message, 602, Start_Col, Start_Col, "Start_Col may not be set if not OK"; end Parse; procedure Reformat (OK : in Boolean; Temporary_File : in out SPARK_IO.File_Type; Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; The_Heap : in out Heap.HeapRecord; Function_Or_Procedure : in Subprogram.Subprogram_Type; Global_Variables : in Global_Variables_Type; Imports : in LexTokenManager.Seq_Algebra.Seq; Exports : in LexTokenManager.Seq_Algebra.Seq) is Revised_Global_Variables : Global_Variables_Type; First_Global_Variable : Boolean; An_Export_Is_Excluded : Boolean; function Global_Col (Start_Col : E_Strings.Positions) return E_Strings.Positions is begin return Start_Col + 4; -- length of "--# " end Global_Col; procedure Write_Global (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions) --# global in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col; is begin SPARK_IO.Set_Col (Output, Global_Col (Start_Col => Start_Col)); SPARK_IO.Put_String (Output, "global", 0); -- If GlobalIndent is not Inline, then don't put out a trailing space, -- since this will be rejected by GNAT -gnatyb style check mode. if SparkFormatCommandLineData.Content.Global_Indent = SparkFormatCommandLineData.Inline then SPARK_IO.Put_String (Output, " ", 0); end if; end Write_Global; procedure Remove_Excluded_Export (The_Heap : in out Heap.HeapRecord; Imports : in LexTokenManager.Seq_Algebra.Seq; Exports : in LexTokenManager.Seq_Algebra.Seq; Global_Variables : in Global_Variables_Type) --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Exports, --# Global_Variables, --# Imports, --# LexTokenManager.State, --# The_Heap; is Global_Variable : LexTokenManager.Seq_Algebra.Member_Of_Seq; Global_Variable_Val : LexTokenManager.Lex_String; begin -- Iterate through all global variable modes to remove -- excluded export and all imported variables exclusively -- related to the excluded exported variable. -- An out mode global which is no longer exported should be removed Global_Variable := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Global_Variables (Out_Mode)); while not LexTokenManager.Seq_Algebra.Is_Null_Member (M => Global_Variable) loop Global_Variable_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Global_Variable); Global_Variable := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Global_Variable); if not LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Exports, Given_Value => Global_Variable_Val) then LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => Global_Variables (Out_Mode), Given_Value => Global_Variable_Val); end if; end loop; -- An in out mode global which is no longer exported should -- be removed from the in out mode globals and if it is still -- and imported variable it should be added to the in mode globals Global_Variable := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Global_Variables (In_Out_Mode)); while not LexTokenManager.Seq_Algebra.Is_Null_Member (M => Global_Variable) loop Global_Variable_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Global_Variable); Global_Variable := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Global_Variable); if not LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Exports, Given_Value => Global_Variable_Val) then LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => Global_Variables (In_Out_Mode), Given_Value => Global_Variable_Val); if LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Imports, Given_Value => Global_Variable_Val) then LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Global_Variables (In_Mode), Given_Value => Global_Variable_Val); end if; end if; end loop; -- An in mode global which is no longer imported should -- be removed from the in mode globals. Global_Variable := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Global_Variables (In_Mode)); while not LexTokenManager.Seq_Algebra.Is_Null_Member (M => Global_Variable) loop Global_Variable_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Global_Variable); Global_Variable := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Global_Variable); if not LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Imports, Given_Value => Global_Variable_Val) then LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => Global_Variables (In_Mode), Given_Value => Global_Variable_Val); end if; end loop; -- An unmoded global which is no longer exported or imported should -- be removed from the unmoded globals. Global_Variable := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Global_Variables (Unmoded)); while not LexTokenManager.Seq_Algebra.Is_Null_Member (M => Global_Variable) loop Global_Variable_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Global_Variable); Global_Variable := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Global_Variable); if not LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Exports, Given_Value => Global_Variable_Val) and then not LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Imports, Given_Value => Global_Variable_Val) then LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => Global_Variables (Unmoded), Given_Value => Global_Variable_Val); end if; end loop; end Remove_Excluded_Export; procedure Add_Modes (The_Heap : in out Heap.HeapRecord; Function_Or_Procedure : in Subprogram.Subprogram_Type; Imports : in LexTokenManager.Seq_Algebra.Seq; Exports : in LexTokenManager.Seq_Algebra.Seq; Global_Variables : in out Global_Variables_Type) --# global in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out Statistics.TableUsage; --# derives Global_Variables from *, --# Function_Or_Procedure, --# SparkFormatCommandLineData.Content, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Exports, --# Function_Or_Procedure, --# Global_Variables, --# Imports, --# LexTokenManager.State, --# SparkFormatCommandLineData.Content, --# The_Heap; is procedure Add_Procedure_Modes (The_Heap : in out Heap.HeapRecord; Imports : in LexTokenManager.Seq_Algebra.Seq; Exports : in LexTokenManager.Seq_Algebra.Seq; Global_Variables : in Global_Variables_Type) --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Statistics.TableUsage, --# The_Heap from *, --# Exports, --# Global_Variables, --# Imports, --# LexTokenManager.State, --# The_Heap; is Global_Variable : LexTokenManager.Seq_Algebra.Member_Of_Seq; Global_Variable_Val : LexTokenManager.Lex_String; Revised_Mode : Modes; begin Global_Variable := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Global_Variables (Unmoded)); loop Global_Variable_Val := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Global_Variable); if LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Imports, Given_Value => Global_Variable_Val) then if LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Exports, Given_Value => Global_Variable_Val) then Revised_Mode := In_Out_Mode; else Revised_Mode := In_Mode; end if; elsif LexTokenManager.Seq_Algebra.Is_Member (The_Heap => The_Heap, S => Exports, Given_Value => Global_Variable_Val) then Revised_Mode := Out_Mode; else Revised_Mode := Unmoded; end if; Global_Variable := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => The_Heap, M => Global_Variable); if Revised_Mode /= Unmoded then LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => Global_Variables (Unmoded), Given_Value => Global_Variable_Val); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => Global_Variables (Revised_Mode), Given_Value => Global_Variable_Val); end if; exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => Global_Variable); end loop; end Add_Procedure_Modes; procedure Add_Function_Modes (The_Heap : in out Heap.HeapRecord; Global_Variables : in out Global_Variables_Type) --# global in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out Statistics.TableUsage; --# derives Global_Variables from *, --# SparkFormatCommandLineData.Content, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Global_Variables, --# LexTokenManager.State, --# SparkFormatCommandLineData.Content, --# The_Heap; is procedure Copy (The_Heap : in out Heap.HeapRecord; Global_Variables : in out Global_Variables_Type; From_Mode : in Modes; To_Mode : in Modes) --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Global_Variables from *, --# From_Mode, --# The_Heap, --# To_Mode & --# Statistics.TableUsage, --# The_Heap from *, --# From_Mode, --# Global_Variables, --# LexTokenManager.State, --# The_Heap, --# To_Mode; is From_List : LexTokenManager.Seq_Algebra.Seq; To_List : LexTokenManager.Seq_Algebra.Seq; Revised_List : LexTokenManager.Seq_Algebra.Seq; begin From_List := Global_Variables (From_Mode); To_List := Global_Variables (To_Mode); LexTokenManager.Seq_Algebra.Union (The_Heap => The_Heap, A => From_List, B => To_List, C => Revised_List); LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => To_List); LexTokenManager.Seq_Algebra.Reduction (The_Heap => The_Heap, A => From_List, B => Revised_List); Global_Variables (From_Mode) := From_List; Global_Variables (To_Mode) := Revised_List; end Copy; begin case SparkFormatCommandLineData.Content.Default_Function_Mode is when SparkFormatCommandLineData.Unmoded => Copy (The_Heap => The_Heap, Global_Variables => Global_Variables, From_Mode => In_Mode, To_Mode => Unmoded); when SparkFormatCommandLineData.In_Mode => Copy (The_Heap => The_Heap, Global_Variables => Global_Variables, From_Mode => Unmoded, To_Mode => In_Mode); end case; end Add_Function_Modes; begin case Function_Or_Procedure is when Subprogram.Is_Unknown | Subprogram.Is_Procedure => Add_Procedure_Modes (The_Heap => The_Heap, Imports => Imports, Exports => Exports, Global_Variables => Global_Variables); when Subprogram.Is_Function => Add_Function_Modes (The_Heap => The_Heap, Global_Variables => Global_Variables); end case; end Add_Modes; procedure Write_Global_Variable_Clauses (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; First_Global_Variable : in out Boolean; The_Heap : in Heap.HeapRecord; Global_Variables : in Global_Variables_Type; Mode : in Modes) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives First_Global_Variable from *, --# Global_Variables, --# LexTokenManager.State, --# Mode, --# The_Heap & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# First_Global_Variable, --# Global_Variables, --# LexTokenManager.State, --# Mode, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is Global_Variable : LexTokenManager.Seq_Algebra.Member_Of_Seq; The_Iterator : SPARKProgram.Iteration.Iterator; procedure Write_Global_Variable (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; First_Global_Variable : in out Boolean; The_Heap : in Heap.HeapRecord; Global_Variable : in LexTokenManager.Seq_Algebra.Member_Of_Seq; Global_Variables : in Global_Variables_Type; Mode : in Modes) --# global in Annotation.Start_String; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives First_Global_Variable from * & --# SPARK_IO.File_Sys from *, --# Annotation.Start_String, --# First_Global_Variable, --# Global_Variable, --# Global_Variables, --# LexTokenManager.State, --# Mode, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is procedure Write_Mode (Output : in SPARK_IO.File_Type; Start_Col : in E_Strings.Positions; The_Heap : in Heap.HeapRecord; Global_Variables : in Global_Variables_Type; Mode : in Modes) --# global in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Global_Variables, --# Mode, --# Output, --# SparkFormatCommandLineData.Content, --# Start_Col, --# The_Heap; is function Mode_Col (Start_Col : E_Strings.Positions) return E_Strings.Positions --# global in SparkFormatCommandLineData.Content; is Mode_Column : E_Strings.Positions; begin if SparkFormatCommandLineData.Content.Global_Indent >= 1 then -- Start_Col "is --# " but indent value is given from "--#" Mode_Column := Global_Col (Start_Col => Start_Col) + (SparkFormatCommandLineData.Content.Global_Indent - 1); else Mode_Column := Global_Col (Start_Col => Start_Col) + 7; -- length of "global " end if; return Mode_Column; end Mode_Col; begin SPARK_IO.Set_Col (Output, Mode_Col (Start_Col => Start_Col)); case Mode is when In_Mode => SPARK_IO.Put_String (Output, "in ", 0); if not (LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Global_Variables (In_Out_Mode)) and then LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Global_Variables (Out_Mode))) then SPARK_IO.Put_String (Output, " ", 0); end if; when Out_Mode => if not (LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Global_Variables (In_Mode)) and then LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Global_Variables (In_Out_Mode))) then SPARK_IO.Put_String (Output, " ", 0); end if; SPARK_IO.Put_String (Output, "out ", 0); when In_Out_Mode => SPARK_IO.Put_String (Output, "in out ", 0); when Unmoded => null; end case; end Write_Mode; begin -- Write_Global_Variable if First_Global_Variable then First_Global_Variable := False; if SparkFormatCommandLineData.Content.Global_Indent >= 1 then Annotation.Write (Output => Output, Start_Col => Start_Col); end if; else case Mode is when Unmoded => SPARK_IO.Put_Line (Output, ",", 0); when In_Mode | Out_Mode | In_Out_Mode => SPARK_IO.Put_Line (Output, ";", 0); end case; Annotation.Write (Output => Output, Start_Col => Start_Col); end if; Write_Mode (Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Global_Variables => Global_Variables, Mode => Mode); E_Strings.Put_String (File => Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Global_Variable))); end Write_Global_Variable; begin -- Write_Global_Variable_Clauses -- This subprogram is called for each mode in turn. -- Each sequence of global variables could be null, in which case we don't write anything -- for that mode. Global_Variable := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Global_Variables (Mode)); if not LexTokenManager.Seq_Algebra.Is_Null_Member (M => Global_Variable) then SPARKProgram.Iteration.Initialise (The_Heap => The_Heap, The_Seq => Global_Variables (Mode), The_Iterator => The_Iterator); Write_Global_Variable (Output => Output, Start_Col => Start_Col, First_Global_Variable => First_Global_Variable, The_Heap => The_Heap, Global_Variable => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator), Global_Variables => Global_Variables, Mode => Mode); SPARKProgram.Iteration.Next (The_Heap => The_Heap, The_Iterator => The_Iterator); while not SPARKProgram.Iteration.Complete (The_Iterator => The_Iterator) loop Write_Global_Variable (Output => Output, Start_Col => Start_Col, First_Global_Variable => First_Global_Variable, The_Heap => The_Heap, Global_Variable => SPARKProgram.Iteration.Current_Member (The_Iterator => The_Iterator), Global_Variables => Global_Variables, Mode => Mode); SPARKProgram.Iteration.Next (The_Heap => The_Heap, The_Iterator => The_Iterator); end loop; end if; end Write_Global_Variable_Clauses; procedure Dispose (The_Heap : in out Heap.HeapRecord; Global_Variables : in Global_Variables_Type) --# derives The_Heap from *, --# Global_Variables; is begin for Mode in Modes loop LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Global_Variables (Mode)); end loop; end Dispose; begin -- Globals_List.Reformat Revised_Global_Variables := Global_Variables; if OK then -- CFR1753: Check to see if an export has been excluded. An_Export_Is_Excluded := not E_Strings.Is_Empty (E_Str => SparkFormatCommandLineData.Content.Exclude_Export); case Function_Or_Procedure is when Subprogram.Is_Procedure | Subprogram.Is_Unknown => if An_Export_Is_Excluded then Remove_Excluded_Export (The_Heap => The_Heap, Imports => Imports, Exports => Exports, Global_Variables => Revised_Global_Variables); end if; when Subprogram.Is_Function => null; end case; if not An_Export_Is_Excluded or else not LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Revised_Global_Variables (Out_Mode)) or else not LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Revised_Global_Variables (In_Out_Mode)) or else not LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Revised_Global_Variables (In_Mode)) or else not LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Revised_Global_Variables (Unmoded)) then Annotation.Write (Output => Output, Start_Col => Start_Col); Write_Global (Output => Output, Start_Col => Start_Col); if SparkFormatCommandLineData.Content.Add_Modes then Add_Modes (The_Heap => The_Heap, Function_Or_Procedure => Function_Or_Procedure, Imports => Imports, Exports => Exports, Global_Variables => Revised_Global_Variables); end if; First_Global_Variable := True; for Mode in Modes loop Write_Global_Variable_Clauses (Output => Output, Start_Col => Start_Col, First_Global_Variable => First_Global_Variable, The_Heap => The_Heap, Global_Variables => Revised_Global_Variables, Mode => Mode); end loop; SPARK_IO.Put_Line (Output, ";", 0); end if; else File_IO.Reset (Temporary_File => Temporary_File); SPARKProgram.Copy (Input => Temporary_File, Output => Output); end if; Dispose (The_Heap => The_Heap, Global_Variables => Revised_Global_Variables); File_IO.Delete (Temporary_File => Temporary_File); if not No_Empty_Lines then SPARK_IO.New_Line (Output, Consecutive_Empty_Lines); end if; Reset_Consecutive_Empty_Lines; end Reformat; end Globals_List; function Contains_Function_Specification (Input_Line : E_Strings.T) return Boolean is Index : E_Strings.Positions; begin Index := E_Strings.Positions'First; White_Space.Skip (Input_Line => Input_Line, Index => Index); return E_Strings.Get_Element (E_Str => Input_Line, Pos => Index) = 'f' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 1) = 'u' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 2) = 'n' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 3) = 'c' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 4) = 't' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 5) = 'i' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 6) = 'o' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 7) = 'n'; end Contains_Function_Specification; function Contains_Procedure_Specification (Input_Line : E_Strings.T) return Boolean is Index : E_Strings.Positions; begin Index := E_Strings.Positions'First; White_Space.Skip (Input_Line => Input_Line, Index => Index); return E_Strings.Get_Element (E_Str => Input_Line, Pos => Index) = 'p' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 1) = 'r' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 2) = 'o' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 3) = 'c' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 4) = 'e' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 5) = 'd' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 6) = 'u' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 7) = 'r' and then E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 8) = 'e'; end Contains_Procedure_Specification; begin -- Reformat_Annotations State := Parsing_SPARK_Code; Globals_List_File := SPARK_IO.Null_File; Inherit_Clause_File := SPARK_IO.Null_File; Initialize_Spec_File := SPARK_IO.Null_File; Own_Var_Clause_File := SPARK_IO.Null_File; Derives_List_File := SPARK_IO.Null_File; Function_Or_Procedure := Subprogram.Is_Unknown; Heap.Initialize (The_Heap); SparkLex.Clear_Line_Context; Annotation.Initialize; Derives_List.Initialize; Annotations.Initialize (This => Inherit_Anno, Anno_Intro => "inherit", Anno_Succ => "main_program", Anno_Indent => SparkFormatCommandLineData.Content.Inherit_Indent); Annotations.Initialize (This => Initializes_Anno, Anno_Intro => "initializes", Anno_Succ => "", Anno_Indent => SparkFormatCommandLineData.Content.Initialization_Indent); Annotations.Initialize (This => Own_Var_Anno, Anno_Intro => "own", Anno_Succ => "initializes", Anno_Indent => SparkFormatCommandLineData.Content.Own_Indent); if not SPARK_IO.End_Of_File (Input) then E_Strings.Get_Line (File => Input, E_Str => Input_Line); File_IO_Required := False; while not (File_IO_Required and then SPARK_IO.End_Of_File (Input)) loop if File_IO_Required then E_Strings.Get_Line (File => Input, E_Str => Input_Line); else File_IO_Required := True; end if; case State is when Parsing_SPARK_Code => if Contains_Function_Specification (Input_Line => Input_Line) then E_Strings.Put_Line (File => Output, E_Str => Input_Line); Function_Or_Procedure := Subprogram.Is_Function; elsif Contains_Procedure_Specification (Input_Line => Input_Line) then E_Strings.Put_Line (File => Output, E_Str => Input_Line); Function_Or_Procedure := Subprogram.Is_Procedure; elsif Globals_List.Is_Start (Input_Line => Input_Line) then File_IO.Create (Temporary_File => Globals_List_File); E_Strings.Put_Line (File => Globals_List_File, E_Str => Input_Line); if SPARK_IO.End_Of_File (Input) then -- This route only taken when global alone at end of file Globals_List.Parse (Temporary_File => Globals_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => Globals_List_OK); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Imports); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Exports); Globals_List.Reformat (OK => Globals_List_OK, Temporary_File => Globals_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Function_Or_Procedure => Function_Or_Procedure, Global_Variables => Global_Variables, Imports => Imports, Exports => Exports); Function_Or_Procedure := Subprogram.Is_Unknown; else -- Normal route State := Parsing_Globals_List; end if; elsif Derives_List.Is_Start (Input_Line => Input_Line) then File_IO.Create (Temporary_File => Derives_List_File); E_Strings.Put_Line (File => Derives_List_File, E_Str => Input_Line); if Derives_List.Is_End (Input_Line => Input_Line) then --# accept Flow_Message, 10, Imports, "There is no globals list, so no need for the list of imports."; Derives_List.Parse (Temporary_File => Derives_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Imports => Imports, Exports => Exports, Dependencies => Dependencies, OK => Derives_List_OK); --# end accept; Derives_List.Reformat (OK => Derives_List_OK, Temporary_File => Derives_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Exports => Exports, Dependencies => Dependencies); Function_Or_Procedure := Subprogram.Is_Unknown; elsif SPARK_IO.End_Of_File (Input) then File_IO.Reset (Temporary_File => Derives_List_File); Copy (Input => Derives_List_File, Output => Output); Function_Or_Procedure := Subprogram.Is_Unknown; else State := Parsing_Derives_List; end if; elsif Annotations.Is_Start_Of (This => Inherit_Anno, Input_Line => Input_Line) then File_IO.Create (Temporary_File => Inherit_Clause_File); State := Parsing_Inherit_Clause; File_IO_Required := False; elsif Annotations.Is_Start_Of (This => Initializes_Anno, Input_Line => Input_Line) then File_IO.Create (Temporary_File => Initialize_Spec_File); State := Parsing_Initialization_Specification; File_IO_Required := False; elsif Annotations.Is_Start_Of (This => Own_Var_Anno, Input_Line => Input_Line) then File_IO.Create (Temporary_File => Own_Var_Clause_File); State := Parsing_Own_Variable_Clause; File_IO_Required := False; else E_Strings.Put_Line (File => Output, E_Str => Input_Line); end if; when Parsing_Globals_List => if Globals_List.Is_End (Input_Line => Input_Line) then if Derives_List.Is_Start (Input_Line => Input_Line) then if Globals_List.No_Empty_Lines then File_IO.Create (Temporary_File => Derives_List_File); E_Strings.Put_Line (File => Derives_List_File, E_Str => Input_Line); if Derives_List.Is_End (Input_Line => Input_Line) then --# accept F, 10, SparkLex.Curr_Line, --# "Not used when both global and derives lists present" & --# F, 10, Start_Col, --# "If both globals and derives lists present, Start_Col value from derives list is used"; Globals_List.Parse (Temporary_File => Globals_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => Globals_List_OK); --# end accept; Derives_List.Parse (Temporary_File => Derives_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Imports => Imports, Exports => Exports, Dependencies => Dependencies, OK => Derives_List_OK); Globals_List.Reformat (OK => Globals_List_OK, Temporary_File => Globals_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Function_Or_Procedure => Function_Or_Procedure, Global_Variables => Global_Variables, Imports => Imports, Exports => Exports); Derives_List.Reformat (OK => Derives_List_OK, Temporary_File => Derives_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Exports => Exports, Dependencies => Dependencies); Function_Or_Procedure := Subprogram.Is_Unknown; State := Parsing_SPARK_Code; else State := Parsing_Derives_List; end if; else Globals_List.Parse (Temporary_File => Globals_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => Globals_List_OK); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Imports); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Exports); Globals_List.Reformat (OK => Globals_List_OK, Temporary_File => Globals_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Function_Or_Procedure => Function_Or_Procedure, Global_Variables => Global_Variables, Imports => Imports, Exports => Exports); State := Parsing_SPARK_Code; File_IO_Required := False; end if; else Globals_List.Parse (Temporary_File => Globals_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => Globals_List_OK); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Imports); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Exports); Globals_List.Reformat (OK => Globals_List_OK, Temporary_File => Globals_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Function_Or_Procedure => Function_Or_Procedure, Global_Variables => Global_Variables, Imports => Imports, Exports => Exports); Function_Or_Procedure := Subprogram.Is_Unknown; State := Parsing_SPARK_Code; File_IO_Required := False; -- Have another look at line in case is a function end if; elsif SPARK_IO.End_Of_File (Input) then E_Strings.Put_Line (File => Globals_List_File, E_Str => Input_Line); Globals_List.Parse (Temporary_File => Globals_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => Globals_List_OK); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Imports); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Exports); Globals_List.Reformat (OK => Globals_List_OK, Temporary_File => Globals_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Function_Or_Procedure => Function_Or_Procedure, Global_Variables => Global_Variables, Imports => Imports, Exports => Exports); Function_Or_Procedure := Subprogram.Is_Unknown; State := Parsing_SPARK_Code; else E_Strings.Put_Line (File => Globals_List_File, E_Str => Input_Line); if E_Strings.Is_Empty (E_Str => E_Strings.Trim (Input_Line)) then Globals_List.Increment_Consecutive_Empty_Lines; else Globals_List.Reset_Consecutive_Empty_Lines; end if; end if; when Parsing_Derives_List => E_Strings.Put_Line (File => Derives_List_File, E_Str => Input_Line); if Derives_List.Is_End (Input_Line => Input_Line) then Derives_List.Parse (Temporary_File => Derives_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Imports => Imports, Exports => Exports, Dependencies => Dependencies, OK => Derives_List_OK); if SPARK_IO.Is_Open (Globals_List_File) then Globals_List.Parse (Temporary_File => Globals_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => Globals_List_OK); Globals_List.Reformat (OK => Globals_List_OK, Temporary_File => Globals_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Function_Or_Procedure => Function_Or_Procedure, Global_Variables => Global_Variables, Imports => Imports, Exports => Exports); end if; Derives_List.Reformat (OK => Derives_List_OK, Temporary_File => Derives_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Exports => Exports, Dependencies => Dependencies); Function_Or_Procedure := Subprogram.Is_Unknown; State := Parsing_SPARK_Code; elsif SPARK_IO.End_Of_File (Input) then if SPARK_IO.Is_Open (Globals_List_File) then Globals_List.Parse (Temporary_File => Globals_List_File, Start_Col => Start_Col, The_Heap => The_Heap, Global_Variables => Global_Variables, OK => Globals_List_OK); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Imports); LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Exports); Globals_List.Reformat (OK => Globals_List_OK, Temporary_File => Globals_List_File, Output => Output, Start_Col => Start_Col, The_Heap => The_Heap, Function_Or_Procedure => Function_Or_Procedure, Global_Variables => Global_Variables, Imports => Imports, Exports => Exports); end if; File_IO.Reset (Temporary_File => Derives_List_File); Copy (Input => Derives_List_File, Output => Output); Function_Or_Procedure := Subprogram.Is_Unknown; State := Parsing_SPARK_Code; end if; when Parsing_Inherit_Clause => E_Strings.Put_Line (File => Inherit_Clause_File, E_Str => Input_Line); if Annotations.Is_End_Of (This => Inherit_Anno, Input_Line => Input_Line) or else SPARK_IO.End_Of_File (Input) then Reformatter.Initialise (Anno => Inherit_Anno, Dotted_Names => Reformatter.Allow_Dotted_Names_Const, The_Heap => The_Heap, This => Inherit_Clause); Reformatter.Parse (This => Inherit_Clause, The_Heap => The_Heap, Temporary_File => Inherit_Clause_File); Reformatter.Reformat (This => Inherit_Clause, The_Heap => The_Heap, Temporary_File => Inherit_Clause_File, Output => Output, Success => Reformat_Successful); --# accept F, 10, Inherit_Clause, --# "The state variable has been 'disposed' and should not be reused without initialisation"; Reformatter.Finalise (This => Inherit_Clause, The_Heap => The_Heap); --# end accept; File_IO.Delete (Temporary_File => Inherit_Clause_File); State := Parsing_SPARK_Code; File_IO_Required := not Reformat_Successful; end if; when Parsing_Initialization_Specification => E_Strings.Put_Line (File => Initialize_Spec_File, E_Str => Input_Line); if Annotations.Is_End_Of (This => Initializes_Anno, Input_Line => Input_Line) or else SPARK_IO.End_Of_File (Input) then Reformatter.Initialise (Anno => Initializes_Anno, Dotted_Names => Reformatter.Disallow_Dotted_Names_Const, The_Heap => The_Heap, This => Initialization_Spec); Reformatter.Parse (This => Initialization_Spec, The_Heap => The_Heap, Temporary_File => Initialize_Spec_File); Reformatter.Reformat (This => Initialization_Spec, The_Heap => The_Heap, Temporary_File => Initialize_Spec_File, Output => Output, Success => Reformat_Successful); --# accept F, 10, Initialization_Spec, --# "The state variable has been 'disposed' and should not be reused without initialisation"; Reformatter.Finalise (This => Initialization_Spec, The_Heap => The_Heap); --# end accept; File_IO.Delete (Temporary_File => Initialize_Spec_File); State := Parsing_SPARK_Code; File_IO_Required := not Reformat_Successful; end if; when Parsing_Own_Variable_Clause => E_Strings.Put_Line (File => Own_Var_Clause_File, E_Str => Input_Line); if Annotations.Is_End_Of (This => Own_Var_Anno, Input_Line => Input_Line) or else SPARK_IO.End_Of_File (Input) then Reformatter.Initialise (Anno => Own_Var_Anno, Dotted_Names => Reformatter.Disallow_Dotted_Names_Const, The_Heap => The_Heap, This => Own_Variable_Clause); Reformatter.Parse (This => Own_Variable_Clause, The_Heap => The_Heap, Temporary_File => Own_Var_Clause_File); Reformatter.Reformat (This => Own_Variable_Clause, The_Heap => The_Heap, Temporary_File => Own_Var_Clause_File, Output => Output, Success => Reformat_Successful); --# accept F, 10, Own_Variable_Clause, --# "The state variable has been 'disposed' and should not be reused without initialisation"; Reformatter.Finalise (This => Own_Variable_Clause, The_Heap => The_Heap); --# end accept; File_IO.Delete (Temporary_File => Own_Var_Clause_File); State := Parsing_SPARK_Code; File_IO_Required := not Reformat_Successful; end if; when Parsing_Refinement_Definition => -- To be completed State := Parsing_SPARK_Code; end case; end loop; end if; end Reformat_Annotations; end SPARKProgram; spark-2012.0.deb/sparkformat/all.wrn0000644000175000017500000000021111753202340016257 0ustar eugeneugen-- Warning control file for SPARKFormat default_loop_assertions direct hidden_parts notes pragma all rep static_expressions with_clauses spark-2012.0.deb/sparkformat/sparkformat.adb0000644000175000017500000004531111753202340017772 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Exceptions; with Ada.Command_Line; with CommandLineData; with Dictionary; with ErrorHandler; with E_Strings; with GNAT.Traceback.Symbolic; with ScreenEcho; with SparkFormatCommandLineData; with SparkFormatCommandLineHandler; with SparkLex; with SPARKProgram; with SPARK_IO; with Version; use type SPARK_IO.File_Status; --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# SparkFormatCommandLineData, --# SparkFormatCommandLineHandler, --# SparkLex, --# SPARKProgram, --# SPARK_IO, --# Statistics, --# Version; --# main_program procedure SPARKFormat --# global in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# out CommandLineData.Content; --# out Dictionary.Dict; --# out SparkFormatCommandLineData.Content; --# out SparkLex.Curr_Line; --# derives CommandLineData.Content, --# Dictionary.Dict, --# SparkFormatCommandLineData.Content from LexTokenManager.State, --# SPARK_IO.File_Sys & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage from *, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# SparkLex.Curr_Line from ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys; is Input : SPARK_IO.File_Type; Output : SPARK_IO.File_Type; Temporary_File : SPARK_IO.File_Type; Status : SPARK_IO.File_Status; Filename : E_Strings.T; procedure Print_Help --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *; -- -- Outputs the usage to the user. is --# hide Print_Help; begin -- Exclude is a hidden option do not include it in help text. SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Usage: sparkformat {option} [argument-list]", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Options - all may be abbreviated to the shortest unique prefix", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "option = modes_option | indent_option | defaultfunction_option | help_option |", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " annotation_option | expansion_option | order_option | noswitch_option", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "modes_option = add_modes_option | noadd_modes_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " add_modes_option = " & "-" & "add_modes - add modes to unmoded global variables.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: off", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " noadd_modes_option = " & "-" & "noadd_modes - do not add modes to unmoded global", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " variables.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: on", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "indent_option = export_option | import_option | global_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " | inherit_option | own_option | refinement_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " | constituent_option | initialization_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " | separator_option | properties_option", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " export_option = " & "-" & "export_indent - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the export variables from --# or keeps ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " import_option = " & "-" & "import_indent - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the import variables from --# or keeps ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " global_option = " & "-" & "global_indent - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the global variables from --# or keeps ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " inherit_option = " & "-" & "inherit_indent - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the package names from --# or keeps ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " own_option = " & "-" & "own_indent - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the own variables from --# or keeps ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " refinement_option = " & "-" & "refinement_indent", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the own variables from --# or keeps ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " constituent_option = " & "-" & "constituent_indent", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the constituents from --# or keeps ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " initialization_option = " & "-" & "initialization_indent", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the own variables from --# or keeps ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " separator_option = " & "-" & "separator_indent - specifies the degree of indentation of", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " the separators from and & from --# or ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " keeps them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " properties_option = " & "-" & "properties_indent - specifies the degree of indentation", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " of own variable properties from --# or ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " keeps them inline.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: inline", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "defaultfunction_option = " & "-" & "default_function_modes = in_mode | unmoded ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - when used in conjunction with /add_modes, force ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " global variables of functions to the specified ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " default function mode. ", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: unmoded", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "help_option = help_option | version_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " help_option = " & "-" & "help - print off help information.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " version_option = " & "-" & "version - print off version information.", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "annotation_option = " & "-" & "annotation_character - specify annotation character.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: #", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "expansion_option = expand_option | compress_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " expand_option = " & "-" & "expand - expands the dependency relations.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: off", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " compress_option = " & "-" & "compress - compresses the dependency relations.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " - Default: on", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "noswitch_option = -noswitch - ignore spark.sw file", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "argument-list = file-spec { separator file-spec } - File(s) to reformat", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line1, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line2, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line3, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line4, 0); end Print_Help; procedure Print_Version --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *; -- -- Outputs the usage to the user. is --# hide Print_Version; begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "SPARKFormat " & Version.Toolset_Banner_Line, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Copyright, 0); end Print_Version; begin CommandLineData.Initialize; -- Allow SPARK95 mode --# accept w, 169, CommandLineData.Content.Language_Profile, "Direct update OK here."; CommandLineData.Content.Language_Profile := CommandLineData.SPARK95; --# end accept; -- Always allow FDL reserved words as identifiers. Leave it to the Examiner -- to reject them later if required. --# accept w, 169, CommandLineData.Content.FDL_Reserved, "Direct update OK here."; CommandLineData.Content.FDL_Reserved := False; --# end accept; Dictionary.Initialize (Write_To_File => False); SparkFormatCommandLineData.Initialize; SparkFormatCommandLineHandler.Process; ErrorHandler.Spark_Make_Init; SparkLex.Clear_Line_Context; if SparkFormatCommandLineData.Content.Valid then if SparkFormatCommandLineData.Content.Help then Print_Help; elsif SparkFormatCommandLineData.Content.Version then Print_Version; elsif SparkFormatCommandLineData.Content.Number_Source > 0 then for SourceFile in SparkFormatCommandLineData.Source_File_Counts range 1 .. SparkFormatCommandLineData.Content.Number_Source loop Filename := SparkFormatCommandLineData.Content.Source_File_List (SourceFile).Source_Filename; Input := SPARK_IO.Null_File; E_Strings.Open (File => Input, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => Filename, Form_Of_File => "", Status => Status); if Status /= SPARK_IO.Ok then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Can't open ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Filename); else Temporary_File := SPARK_IO.Null_File; --# accept Flow_Message, 10, Status, "Status is ignored"; SPARK_IO.Create (Temporary_File, 0, "", "", Status); -- don't know what we can do if we can't create the temporary file --# end accept; SPARKProgram.Reformat_Annotations (Input => Input, Output => Temporary_File); --# accept Flow_Message, 10, Status, "Status is ignored" & --# Flow_Message, 10, Input, "File 'Input' is closed"; SPARK_IO.Close (Input, Status); -- don't know what we can do if we can't close the input file --# end accept; --# accept Flow_Message, 10, Status, "Status is ignored"; SPARK_IO.Reset (Temporary_File, SPARK_IO.In_File, Status); -- don't know what we can do if we can't reset the temporary file --# end accept; Output := SPARK_IO.Null_File; E_Strings.Create (File => Output, Name_Of_File => SparkFormatCommandLineData.Content.Source_File_List (SourceFile).Source_Filename, Form_Of_File => "", Status => Status); if Status /= SPARK_IO.Ok then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Can't write to ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Filename); else SPARKProgram.Copy (Temporary_File, Output); end if; end if; end loop; else SPARKProgram.Reformat_Annotations (Input => SPARK_IO.Standard_Input, Output => SPARK_IO.Standard_Output); end if; end if; exception --# hide SPARKFormat; when E : others => ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Unexpected internal error in SPARKFormat"); ScreenEcho.New_Line (1); ScreenEcho.Put_Line (Version.Toolset_Support_Line1); ScreenEcho.Put_Line (Version.Toolset_Support_Line2); ScreenEcho.Put_Line (Version.Toolset_Support_Line3); ScreenEcho.Put_Line (Version.Toolset_Support_Line4); ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Exception information:"); ScreenEcho.Put_Line (Ada.Exceptions.Exception_Information (E)); ScreenEcho.Put_Line ("Traceback:"); ScreenEcho.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); Ada.Command_Line.Set_Exit_Status (9); end SPARKFormat; spark-2012.0.deb/sparkformat/Makefile0000644000175000017500000000664311753202340016436 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for SPARKFormat # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=sparkformat # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # PLATFORM SPECIFIC CONFIGURATION ################################################################################ # Windows. ifeq (${TARGET},Windows) # The --stack option is Windows specific, and supports setting the # 'StackCommitSize' and 'StackReserveSize'. The interpretation of # these values is not treated consistently across different # Windows versions. Experience and testing on various Windows # versions has led to the current stack size being set at: # 0x10000000,0x100000 Do not change this value without considering # its full ramifications. LINK_ARGS:=-Xlinker --stack=0x10000000,0x100000 endif # Solaris. ifeq (${TARGET},SunOS) LINK_ARGS:= endif # Linux. ifeq (${TARGET},Linux) LINK_ARGS:= endif # Darwin (Mac OS X 10.5 or 10.6, 64-bit). ifeq (${TARGET},Darwin) LINK_ARGS:= endif ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: preamble prep parser gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} ${OUTPUT_NAME} -o $@ -bargs ${BIND_OPTS} -largs ${LINK_ARGS} self-analysis: preamble prep parser -spark -plain @${OUTPUT_NAME}.smf # Initialisations # =============== preamble: $(MAKE) -C ${ROOT}/examiner clean # Platform specific prepping # ========================== prep: $(MAKE) -C ${ROOT}/examiner prep parser: $(MAKE) -C ${ROOT}/examiner parser # Cleaning code base # ================== clean: standardclean reallyclean: clean targetclean vcclean preamble $(MAKE) -C ${ROOT}/examiner reallyclean ################################################################################ # END-OF-FILE spark-2012.0.deb/sparkformat/spark.sw0000644000175000017500000000022311753202340016455 0ustar eugeneugen-sparklib -output_directory=vcg -config_file=../common/gnat.cfg -listing_extension=ls_ -casing -index_file=sparkformat.idx -report=sparkformat.rep spark-2012.0.deb/sparkformat/sparkprogram.ads0000644000175000017500000000663711753202340020202 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# Heap, --# LexTokenManager, --# LexTokenManager.Relation_Algebra, --# LexTokenManager.Relation_Algebra.String, --# LexTokenManager.Seq_Algebra, --# RelationAlgebra, --# SeqAlgebra, --# SparkFormatCommandLineData, --# SparkLex, --# SPARK_IO, --# SP_Symbols, --# Statistics; package SPARKProgram is procedure Reformat_Annotations (Input, Output : in SPARK_IO.File_Type); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in SparkFormatCommandLineData.Content; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# out SparkLex.Curr_Line; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# Statistics.TableUsage from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Input, --# LexTokenManager.State, --# Output, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys & --# SparkLex.Curr_Line from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Input, --# LexTokenManager.State, --# Output, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys; procedure Copy (Input, Output : in SPARK_IO.File_Type); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Input, --# Output; end SPARKProgram; spark-2012.0.deb/sparkformat/sparkformatcommandlinehandler.ads0000644000175000017500000000342311753202340023556 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= --# inherit Ada.Characters.Handling, --# CommandLineData, --# CommandLineHandler, --# ExaminerConstants, --# E_Strings, --# FileSystem, --# ScreenEcho, --# SparkFormatCommandLineData, --# SPARK_IO; package SparkFormatCommandLineHandler is procedure Process; --# global in out CommandLineData.Content; --# in out SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys from CommandLineData.Content, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys; end SparkFormatCommandLineHandler; spark-2012.0.deb/sparkformat/sparkprogram-reformatter-simplelex.adb0000644000175000017500000004624211753202340024505 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SPARKProgram.Reformatter) package body SimpleLex is procedure Initialise (Input_File : in SPARK_IO.File_Type; Anno : in Annotations.Anno_Type; Lex_State : out State) is begin Lex_State := State' (File => Input_File, Anno => Anno, Line => E_Strings.Empty_String, Index => E_Strings.Positions'First, In_Annotation => False); end Initialise; procedure Next (This : in out State; Token_Rec : out Token_Record) is Index : E_Strings.Positions; Input_File : SPARK_IO.File_Type; Anno : Annotations.Anno_Type; Now_In_Annotation : Boolean; function Is_Alphanumeric (Ch : Character) return Boolean is begin return Ada.Characters.Handling.Is_Letter (Ch) or else Ada.Characters.Handling.Is_Digit (Ch); end Is_Alphanumeric; procedure Get_Identifier (Input_Line : in E_Strings.T; Index : in out E_Strings.Positions; Token_Rec : out Token_Record) --# derives Index, --# Token_Rec from Index, --# Input_Line; is Start_Pos : E_Strings.Positions; Searching : Boolean; begin Start_Pos := Index; Searching := True; Index := Index + 1; -- The first cheracter is alphanumeric while Searching and Index <= E_Strings.Get_Length (E_Str => Input_Line) loop if Is_Alphanumeric (Ch => E_Strings.Get_Element (E_Str => Input_Line, Pos => Index)) then Index := Index + 1; elsif E_Strings.Get_Element (E_Str => Input_Line, Pos => Index) = '_' then if Index < E_Strings.Get_Length (E_Str => Input_Line) and then Is_Alphanumeric (Ch => E_Strings.Get_Element (E_Str => Input_Line, Pos => Index + 1)) then Index := Index + 2; else Searching := False; end if; else Searching := False; end if; end loop; Token_Rec.Token_Value := E_Strings.Empty_String; for I in E_Strings.Positions range Start_Pos .. Index - 1 loop E_Strings.Append_Char (E_Str => Token_Rec.Token_Value, Ch => E_Strings.Get_Element (E_Str => Input_Line, Pos => I)); end loop; Token_Rec.Token := Identifier; end Get_Identifier; procedure Get_Property_List (Input_File : in SPARK_IO.File_Type; Anno : in Annotations.Anno_Type; Input_Line : in out E_Strings.T; Index : in out E_Strings.Positions; Token_Rec : out Token_Record) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Index, --# Input_Line, --# SPARK_IO.File_Sys from Anno, --# Index, --# Input_File, --# Input_Line, --# SPARK_IO.File_Sys & --# Token_Rec from Anno, --# CommandLineData.Content, --# Index, --# Input_File, --# Input_Line, --# SPARK_IO.File_Sys; is subtype String_1_Range is Integer range 1 .. 1; subtype String_1 is String (String_1_Range); Anno_Continuation : Boolean; Start_String : E_Strings.T; begin Token_Rec.Token := Property_List; Token_Rec.Token_Value := E_Strings.Copy_String (Str => String_1'(1 => E_Strings.Get_Element (E_Str => Input_Line, Pos => Index))); Anno_Continuation := True; Index := Index + 1; loop if E_Strings.Get_Length (E_Str => Input_Line) < Index then E_Strings.Get_Line (File => Input_File, E_Str => Input_Line); Index := 1; Annotations.Is_Anno_Start (This => Anno, Input_Line => Input_Line, Index => Index, OK => Anno_Continuation); if Anno_Continuation then Start_String := E_Strings.Copy_String (Str => "--"); E_Strings.Append_Char (E_Str => Start_String, Ch => CommandLineData.Content.Anno_Char); E_Strings.Append_Char (E_Str => Start_String, Ch => ' '); -- Add symbol to value string to denote coninuation of property_list E_Strings.Append_Examiner_String (E_Str1 => Token_Rec.Token_Value, E_Str2 => Start_String); White_Space.Skip (Input_Line => Input_Line, Index => Index); end if; end if; exit when not Anno_Continuation or else E_Strings.Get_Length (E_Str => Input_Line) < Index or else E_Strings.Get_Element (E_Str => Input_Line, Pos => Index) = ';'; E_Strings.Append_Char (E_Str => Token_Rec.Token_Value, Ch => E_Strings.Get_Element (E_Str => Input_Line, Pos => Index)); Index := Index + 1; end loop; end Get_Property_List; procedure Extended_Skip_White_Space (Input_File : in SPARK_IO.File_Type; Anno : in Annotations.Anno_Type; In_Annotation : in out Boolean; Input_Line : in out E_Strings.T; Index : in out E_Strings.Positions) --# global in out SPARK_IO.File_Sys; --# derives Index, --# Input_Line, --# In_Annotation, --# SPARK_IO.File_Sys from Anno, --# Index, --# Input_File, --# Input_Line, --# In_Annotation, --# SPARK_IO.File_Sys; is In_White_Space : Boolean; Is_Annotation_Start : Boolean; procedure Skip_White_Space (Input_Line : in E_Strings.T; Index : in out E_Strings.Positions; Still_White : out Boolean) --# derives Index, --# Still_White from Index, --# Input_Line; is In_White_Space : Boolean; function Is_White_Space (Char : Character) return Boolean is begin return Char = Ada.Characters.Latin_1.Space or Char = Ada.Characters.Latin_1.HT; end Is_White_Space; begin In_White_Space := True; while In_White_Space and Index <= E_Strings.Get_Length (E_Str => Input_Line) loop if Is_White_Space (Char => E_Strings.Get_Element (E_Str => Input_Line, Pos => Index)) then Index := Index + 1; else In_White_Space := False; end if; end loop; Still_White := In_White_Space; end Skip_White_Space; begin In_White_Space := True; while In_White_Space loop if Index > E_Strings.Get_Length (E_Str => Input_Line) then if SPARK_IO.End_Of_File (Input_File) then In_White_Space := False; In_Annotation := False; else E_Strings.Get_Line (File => Input_File, E_Str => Input_Line); Index := 1; -- Expect an annotation start Annotations.Is_Anno_Start (This => Anno, Input_Line => Input_Line, Index => Index, OK => Is_Annotation_Start); if In_Annotation and not Is_Annotation_Start then In_White_Space := False; In_Annotation := False; elsif not In_Annotation and Is_Annotation_Start then In_White_Space := False; In_Annotation := True; else -- Treat as whitespace null; end if; end if; end if; if In_White_Space then Skip_White_Space (Input_Line => Input_Line, Index => Index, Still_White => In_White_Space); end if; end loop; if not In_Annotation then Index := 1; Input_Line := E_Strings.Empty_String; end if; end Extended_Skip_White_Space; procedure Check_For_A_Reserved_Word (Tok_Rec : in out Token_Record) --# derives Tok_Rec from *; is procedure Check_For_RW (RWord : in String; Symbol : in Token_Type; Tok_Rec : in out Token_Record) --# derives Tok_Rec from *, --# RWord, --# Symbol; is begin if E_Strings.Eq1_String (E_Str => Tok_Rec.Token_Value, Str => RWord) then Tok_Rec.Token := Symbol; Tok_Rec.Token_Value := E_Strings.Copy_String (Str => RWord); end if; end Check_For_RW; begin case E_Strings.Get_Element (E_Str => Tok_Rec.Token_Value, Pos => 1) is when 'd' | 'D' => Check_For_RW (RWord => "derives", Symbol => RW_Derives, Tok_Rec => Tok_Rec); when 'f' | 'F' => Check_For_RW (RWord => "from", Symbol => RW_From, Tok_Rec => Tok_Rec); when 'g' | 'G' => Check_For_RW (RWord => "global", Symbol => RW_Global, Tok_Rec => Tok_Rec); when 'i' | 'I' => if E_Strings.Get_Length (E_Str => Tok_Rec.Token_Value) > 1 then case E_Strings.Get_Element (E_Str => Tok_Rec.Token_Value, Pos => 2) is when 'n' | 'N' => if E_Strings.Get_Length (E_Str => Tok_Rec.Token_Value) > 2 then case E_Strings.Get_Element (E_Str => Tok_Rec.Token_Value, Pos => 3) is when 'h' | 'H' => Check_For_RW (RWord => "inherit", Symbol => RW_Inherit, Tok_Rec => Tok_Rec); when 'i' | 'I' => Check_For_RW (RWord => "initializes", Symbol => RW_Initializes, Tok_Rec => Tok_Rec); when others => null; end case; else Check_For_RW (RWord => "in", Symbol => RW_In, Tok_Rec => Tok_Rec); end if; when 's' | 'S' => Check_For_RW (RWord => "is", Symbol => RW_Is, Tok_Rec => Tok_Rec); when others => null; end case; end if; when 'm' | 'M' => Check_For_RW (RWord => "main_program", Symbol => RW_Main_Program, Tok_Rec => Tok_Rec); when 'o' | 'O' => if E_Strings.Get_Length (E_Str => Tok_Rec.Token_Value) > 1 then case E_Strings.Get_Element (E_Str => Tok_Rec.Token_Value, Pos => 2) is when 'u' | 'U' => Check_For_RW (RWord => "out", Symbol => RW_Out, Tok_Rec => Tok_Rec); when 'w' | 'W' => Check_For_RW (RWord => "own", Symbol => RW_Own, Tok_Rec => Tok_Rec); when others => null; end case; end if; when 'p' | 'P' => Check_For_RW (RWord => "protected", Symbol => RW_Protected, Tok_Rec => Tok_Rec); when 't' | 'T' => Check_For_RW (RWord => "task", Symbol => RW_Task, Tok_Rec => Tok_Rec); when others => null; end case; end Check_For_A_Reserved_Word; procedure Get_Punctuation (Input_Line : in E_Strings.T; Index : in out E_Strings.Positions; Token : in Token_Type; Token_Rec : out Token_Record) --# derives Index from * & --# Token_Rec from Index, --# Input_Line, --# Token; is subtype String_1_Range is Integer range 1 .. 1; subtype String_1 is String (String_1_Range); begin Token_Rec.Token := Token; Token_Rec.Token_Value := E_Strings.Copy_String (Str => String_1'(1 => E_Strings.Get_Element (E_Str => Input_Line, Pos => Index))); Index := Index + 1; end Get_Punctuation; begin Index := This.Index; Input_File := This.File; Anno := This.Anno; Now_In_Annotation := This.In_Annotation; Extended_Skip_White_Space (Input_File => Input_File, Anno => Anno, In_Annotation => Now_In_Annotation, Input_Line => This.Line, Index => Index); if not Now_In_Annotation then This.In_Annotation := False; Token_Rec := Token_Record'(Token => Annotation_End, Token_Value => E_Strings.Empty_String); elsif not This.In_Annotation then This.In_Annotation := True; Token_Rec := Token_Record'(Token => Annotation_Start, Token_Value => E_Strings.Empty_String); else if Is_Alphanumeric (Ch => E_Strings.Get_Element (E_Str => This.Line, Pos => Index)) then Get_Identifier (Input_Line => This.Line, Index => Index, Token_Rec => Token_Rec); Check_For_A_Reserved_Word (Tok_Rec => Token_Rec); else case E_Strings.Get_Element (E_Str => This.Line, Pos => Index) is when ':' => Get_Punctuation (Input_Line => This.Line, Index => Index, Token => Colon, Token_Rec => Token_Rec); when ',' => Get_Punctuation (Input_Line => This.Line, Index => Index, Token => Comma, Token_Rec => Token_Rec); when '(' => Get_Property_List (Input_File => Input_File, Anno => Anno, Input_Line => This.Line, Index => Index, Token_Rec => Token_Rec); when '.' => Get_Punctuation (Input_Line => This.Line, Index => Index, Token => Point, Token_Rec => Token_Rec); when ';' => Get_Punctuation (Input_Line => This.Line, Index => Index, Token => Semicolon, Token_Rec => Token_Rec); when others => Get_Punctuation (Input_Line => This.Line, Index => Index, Token => Other_Punct, Token_Rec => Token_Rec); end case; end if; end if; This.Index := Index; end Next; function Get_Col_No (This : State) return E_Strings.Positions is begin return This.Index; end Get_Col_No; end SimpleLex; spark-2012.0.deb/sparkformat/sparkformat.smf0000644000175000017500000000035011753202340020023 0ustar eugeneugensparkformatcommandlinedata.adb -vcg sparkformatcommandlinehandler.adb sparkprogram.adb sparkprogram-iteration.adb -vcg sparkprogram-annotations.adb sparkprogram-reformatter.adb sparkprogram-reformatter-simplelex.adb sparkformat.adb spark-2012.0.deb/sparkformat/sparkformatcommandlinehandler.adb0000644000175000017500000015436111753202340023545 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with CommandLineHandler; with ExaminerConstants; with E_Strings; with FileSystem; with ScreenEcho; with SparkFormatCommandLineData; use type CommandLineHandler.S_Typs; package body SparkFormatCommandLineHandler is procedure Process is type Command_Line_Errors is ( ES_InvalidOption, ES_NoComma, ES_Source, EW_TooMany, ES_Anno, ES_Contradict, ES_Duplicate, ES_Indent, ES_Default, ES_InvExclude); Command_String : CommandLineHandler.Command_Strings; procedure Read_Command_Line (Command_String : out CommandLineHandler.Command_Strings) --# global in out SPARK_IO.File_Sys; --# derives Command_String, --# SPARK_IO.File_Sys from SPARK_IO.File_Sys; is Cmd_Line_Found : Boolean; Command_String_Content : E_Strings.T; begin --# accept F, 10, Cmd_Line_Found, "We don't care if we found anything or not"; FileSystem.Read_Cmd_Line (Cmd_Line_Found => Cmd_Line_Found, Cmd_Line => Command_String_Content); --# end accept; Command_String := CommandLineHandler.Command_Strings'(Current_Position => 1, Contents => Command_String_Content); --# accept F, 33, Cmd_Line_Found, "And we don't tell anyone about it either"; end Read_Command_Line; procedure Output_Error (E : in Command_Line_Errors) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E; is begin case E is when ES_InvalidOption => ScreenEcho.Put_String ("Invalid command line option"); when ES_Source => ScreenEcho.Put_String ("Source file incorrectly specified"); when ES_NoComma => ScreenEcho.Put_String ("Comma missing in line"); when EW_TooMany => ScreenEcho.Put_String ("Too many source files on command line "); when ES_Anno => ScreenEcho.Put_String ("Annotation character option incorrect"); when ES_Contradict => ScreenEcho.Put_String ("Command line option duplicated or contradictory options specified"); when ES_Duplicate => ScreenEcho.Put_String ("Command line options may only be specified once"); when ES_Indent => ScreenEcho.Put_String ("Indentation option incorrect"); when ES_Default => ScreenEcho.Put_String ("Default function mode option incorrect"); when ES_InvExclude => ScreenEcho.Put_String ("Invalid syntax for excluded export"); end case; end Output_Error; procedure Possible_Error (E : in Command_Line_Errors) --# global in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E, --# SparkFormatCommandLineData.Content; is begin if not SparkFormatCommandLineData.Content.Valid then Output_Error (E => E); ScreenEcho.New_Line (1); end if; end Possible_Error; procedure Possible_Error2 (E : in Command_Line_Errors; F : in E_Strings.T) --# global in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E, --# F, --# SparkFormatCommandLineData.Content; is begin if not SparkFormatCommandLineData.Content.Valid then Output_Error (E => E); ScreenEcho.Put_Char (' '); ScreenEcho.Put_ExaminerLine (F); end if; end Possible_Error2; procedure Parse_Command_Line (Command_String : in CommandLineHandler.Command_Strings) --# global in out CommandLineData.Content; --# in out SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys from *, --# Command_String, --# SparkFormatCommandLineData.Content; is Next_Symbol : CommandLineHandler.Symbols; Local_Command_String : CommandLineHandler.Command_Strings; procedure Get_Next_Symbol (Command_String : in out CommandLineHandler.Command_Strings; Next_Symbol : out CommandLineHandler.Symbols) --# derives Command_String, --# Next_Symbol from Command_String; is begin -- This procedure is intended to return Next_Symbol; however, if the -- symbol is not a string then the string field is not set. Although -- it is not used in these circcumstances its lack of definition -- causes so many flow errors its is better to use an aggregate to -- initialize Next_Symbol here, and then assign the final value -- to Next_Symbol (for compatibility with the Ada83 "out parameter" rule Next_Symbol := CommandLineHandler.Symbols'(Typ => CommandLineHandler.S_Empty, The_String => E_Strings.Empty_String); CommandLineHandler.Skip_Spaces (Command_String => Command_String); if Command_String.Current_Position <= E_Strings.Get_Length (E_Str => Command_String.Contents) then case E_Strings.Get_Element (E_Str => Command_String.Contents, Pos => Command_String.Current_Position) is when '=' => Next_Symbol.Typ := CommandLineHandler.S_Equal; Command_String.Current_Position := Command_String.Current_Position + 1; when ',' => --this condition is invariant for any particular system, we are actually --simulating conditional compilation for different target platforms. --Intended behaviour is correct despite flow error that will result. --# accept Flow_Message, 22, "Simulation of conditional compilation"; if FileSystem.Use_Unix_Command_Line then --# end accept; Next_Symbol.Typ := CommandLineHandler.S_String; CommandLineHandler.Read_The_String (Command_String => Command_String, Next_Symbol => Next_Symbol); else -- FileSystem.UseWindowsCommandLine assumed Next_Symbol.Typ := CommandLineHandler.S_Comma; Command_String.Current_Position := Command_String.Current_Position + 1; end if; when '-' => -- We use '-' as a switch character on all platforms Next_Symbol.Typ := CommandLineHandler.S_Switch_Character; Command_String.Current_Position := Command_String.Current_Position + 1; when others => Next_Symbol.Typ := CommandLineHandler.S_String; CommandLineHandler.Read_The_String (Command_String => Command_String, Next_Symbol => Next_Symbol); end case; else -- Exceeded maximum command line length Next_Symbol.Typ := CommandLineHandler.S_Empty; end if; end Get_Next_Symbol; procedure Parse_Command_Options (Command_String : in out CommandLineHandler.Command_Strings; Next_Symbol : in out CommandLineHandler.Symbols) --# global in out CommandLineData.Content; --# in out SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# Command_String, --# Next_Symbol, --# SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys from *, --# Command_String, --# Next_Symbol, --# SparkFormatCommandLineData.Content; is Annotation_Character_Found : Boolean; Compress_Found : Boolean; Expand_Found : Boolean; Add_Modes_Found : Boolean; Global_Indent_Found : Boolean; Export_Indent_Found : Boolean; Import_Indent_Found : Boolean; Inherit_Indent_Found : Boolean; Own_Indent_Found : Boolean; Refinement_Indent_Found : Boolean; Constituent_Indent_Found : Boolean; Initialization_Indent_Found : Boolean; Separator_Indent_Found : Boolean; Properties_Indent_Found : Boolean; Alphabetic_Ordering_Found : Boolean; Help_Found : Boolean; Version_Found : Boolean; Default_Function_Mode_Found : Boolean; Exclude_Export_Found : Boolean; Opt_Name : E_Strings.T; Opt_Val : E_Strings.T; Opt_Name_OK : Boolean; Opt_Val_OK : Boolean; OK : Boolean; Indent_Val : Integer; -- reads in the option name and the value assigned to it procedure Read_Option (Opt_Name : out E_Strings.T; Opt_Name_OK : out Boolean; Opt_Val : out E_Strings.T; Opt_Val_OK : out Boolean; Command_String : in out CommandLineHandler.Command_Strings; Next_Symbol : out CommandLineHandler.Symbols) --# derives Command_String, --# Next_Symbol, --# Opt_Name, --# Opt_Name_OK, --# Opt_Val, --# Opt_Val_OK from Command_String; -- pre Next_Symbol.Typ = CommandLineHandler.SSlash; is begin Opt_Val := E_Strings.Empty_String; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Next_Symbol); Opt_Name_OK := Next_Symbol.Typ = CommandLineHandler.S_String; Opt_Name := Next_Symbol.The_String; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Next_Symbol); if Opt_Name_OK and Next_Symbol.Typ = CommandLineHandler.S_Equal then Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Next_Symbol); Opt_Val_OK := Next_Symbol.Typ = CommandLineHandler.S_String; Opt_Val := Next_Symbol.The_String; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Next_Symbol); else Opt_Val_OK := False; end if; end Read_Option; -- converts an ExaminerString to a Natural. S.Content(1) is -- assumes to be a member of '0' .. '9' procedure Examiner_String_To_Natural (S : in E_Strings.T; I : out Natural; OK : out Boolean) --# derives I, --# OK from S; is Stop : Natural; begin if E_Strings.Get_Length (E_Str => S) >= 1 then E_Strings.Get_Int_From_String (Source => S, Item => I, Start_Pt => 1, Stop => Stop); if Stop = E_Strings.Get_Length (E_Str => S) then OK := True; else I := 0; OK := False; end if; else OK := False; I := 0; end if; end Examiner_String_To_Natural; procedure Parse_Indent_Option (Opt_Value : in E_Strings.T; Opt_Value_OK : in Boolean; Indent_Value : out Natural; OK : out Boolean) --# derives Indent_Value, --# OK from Opt_Value, --# Opt_Value_OK; is begin if Opt_Value_OK and then E_Strings.Get_Length (E_Str => Opt_Value) >= 1 then case E_Strings.Get_Element (E_Str => Opt_Value, Pos => 1) is -- if option is set to "inline" when 'i' => OK := CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Value, Str => "inline"); Indent_Value := SparkFormatCommandLineData.Inline; -- check if valid number when '1' .. '9' => Examiner_String_To_Natural (S => Opt_Value, I => Indent_Value, OK => OK); when others => OK := False; -- assign default value Indent_Value := SparkFormatCommandLineData.Inline; end case; else Indent_Value := SparkFormatCommandLineData.Inline; OK := False; end if; end Parse_Indent_Option; -- CFR 1753: A crude syntax check for a dotted simple name as -- required by the /exclude_export. -- The name must include at least one dot. function Check_For_Dotted_Name (S : E_Strings.T) return Boolean is type Symb_T is (Alpha, Digit, Dot, Under); Last_Symb : Symb_T; Dot_Found : Boolean; Error : Boolean; begin Dot_Found := False; Error := False; if E_Strings.Get_Length (E_Str => S) > 2 then -- We require at least 3 chars, i.e., X.Y if E_Strings.Get_Element (E_Str => S, Pos => 1) in 'A' .. 'Z' or else E_Strings.Get_Element (E_Str => S, Pos => 1) in 'a' .. 'z' then Last_Symb := Alpha; for I in E_Strings.Positions range 2 .. E_Strings.Get_Length (E_Str => S) loop case E_Strings.Get_Element (E_Str => S, Pos => I) is when '.' => if Last_Symb /= Dot and Last_Symb /= Under and E_Strings.Get_Length (E_Str => S) > I then Dot_Found := True; Last_Symb := Dot; else Error := True; end if; when 'A' .. 'Z' | 'a' .. 'z' => Last_Symb := Alpha; when '0' .. '9' => if Last_Symb /= Dot and Last_Symb /= Under then Last_Symb := Digit; else Error := True; end if; when '_' => if Last_Symb /= Dot and Last_Symb /= Under and E_Strings.Get_Length (E_Str => S) > I then Last_Symb := Under; else Error := True; end if; when others => Error := True; end case; exit when Error; end loop; end if; end if; return Dot_Found and not Error; end Check_For_Dotted_Name; begin -- Parse_Command_Options Annotation_Character_Found := False; Compress_Found := False; Expand_Found := False; Add_Modes_Found := False; Global_Indent_Found := False; Export_Indent_Found := False; Import_Indent_Found := False; Inherit_Indent_Found := False; Own_Indent_Found := False; Refinement_Indent_Found := False; Constituent_Indent_Found := False; Initialization_Indent_Found := False; Separator_Indent_Found := False; Properties_Indent_Found := False; Alphabetic_Ordering_Found := False; Help_Found := False; Version_Found := False; Default_Function_Mode_Found := False; Exclude_Export_Found := False; loop exit when Next_Symbol.Typ /= CommandLineHandler.S_Switch_Character; Read_Option (Opt_Name => Opt_Name, Opt_Name_OK => Opt_Name_OK, Opt_Val => Opt_Val, Opt_Val_OK => Opt_Val_OK, Command_String => Command_String, Next_Symbol => Next_Symbol); if Opt_Name_OK then case E_Strings.Get_Element (E_Str => Opt_Name, Pos => 1) is when 'a' | 'A' => case E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2) is when 'd' | 'D' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "add_modes") then SparkFormatCommandLineData.Content.Valid := not Add_Modes_Found; Possible_Error (E => ES_Contradict); Add_Modes_Found := True; SparkFormatCommandLineData.Content.Add_Modes := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'n' | 'N' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "annotation_character") then SparkFormatCommandLineData.Content.Valid := Opt_Val_OK and then E_Strings.Get_Length (E_Str => Opt_Val) = 1 and then not Annotation_Character_Found; Possible_Error (E => ES_Anno); Annotation_Character_Found := True; CommandLineData.Content.Anno_Char := E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1); else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when 'c' | 'C' => case E_Strings.Get_Element (E_Str => Opt_Name, Pos => 3) is when 'm' | 'M' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "compress") then SparkFormatCommandLineData.Content.Valid := not Compress_Found; Possible_Error (E => ES_Duplicate); Compress_Found := True; SparkFormatCommandLineData.Content.Operation := SparkFormatCommandLineData.Compress; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'n' | 'N' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "constituent_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Constituent_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Constituent_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Constituent_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when 'd' | 'D' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "default_function_modes") then SparkFormatCommandLineData.Content.Valid := Opt_Val_OK and then not Default_Function_Mode_Found; Possible_Error (E => ES_Default); if SparkFormatCommandLineData.Content.Valid then -- go on to check selection Default_Function_Mode_Found := True; case E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1) is when 'i' | 'I' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Val, Str => "in_mode") then SparkFormatCommandLineData.Content.Default_Function_Mode := SparkFormatCommandLineData.In_Mode; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error (E => ES_Default); end if; when 'u' | 'U' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Val, Str => "unmoded") then SparkFormatCommandLineData.Content.Default_Function_Mode := SparkFormatCommandLineData.Unmoded; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error (E => ES_Default); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error (E => ES_Default); end case; end if; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'e' | 'E' => case E_Strings.Get_Element (E_Str => Opt_Name, Pos => 4) is when 'a' | 'A' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "expand") then SparkFormatCommandLineData.Content.Valid := not Expand_Found; Possible_Error (E => ES_Duplicate); Expand_Found := True; SparkFormatCommandLineData.Content.Operation := SparkFormatCommandLineData.Expand; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'l' | 'L' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "exclude_export") then if not Exclude_Export_Found then if Opt_Val_OK and then Check_For_Dotted_Name (S => Opt_Val) then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Exclude_Export := Opt_Val; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error (E => ES_InvExclude); end if; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error (E => ES_Duplicate); end if; Exclude_Export_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'o' | 'O' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "export_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Export_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Export_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Export_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when 'g' | 'G' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "global_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Global_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Global_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Global_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'h' | 'H' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "help") then SparkFormatCommandLineData.Content.Valid := not Help_Found; Possible_Error (E => ES_Duplicate); Help_Found := True; SparkFormatCommandLineData.Content.Help := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'i' | 'I' => case E_Strings.Get_Element (E_Str => Opt_Name, Pos => 3) is when 'h' | 'H' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "inherit_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Inherit_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Inherit_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Inherit_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'i' | 'I' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "initialization_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Initialization_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Initialization_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Initialization_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'p' | 'P' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "import_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Import_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Import_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Import_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when 'n' | 'N' => case E_Strings.Get_Element (E_Str => Opt_Name, Pos => 3) is when 'a' | 'A' => -- 3rd letter if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "noadd_modes") then SparkFormatCommandLineData.Content.Valid := not Add_Modes_Found; Possible_Error (E => ES_Contradict); Add_Modes_Found := True; SparkFormatCommandLineData.Content.Add_Modes := False; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 's' | 'W' => -- 3rd letter if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "noswitch") then null; -- already dealt with, so ignore here else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when 'o' | 'O' => case E_Strings.Get_Element (E_Str => Opt_Name, Pos => 2) is when 'r' | 'R' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "order") then if Opt_Val_OK then case E_Strings.Get_Element (E_Str => Opt_Val, Pos => 1) is when 'a' | 'A' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Val, Str => "alphabetic") then SparkFormatCommandLineData.Content.Valid := not Alphabetic_Ordering_Found; Possible_Error (E => ES_Contradict); Alphabetic_Ordering_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Val); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Val); end case; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Val); end if; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'w' | 'W' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "own_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Own_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Own_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Own_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; when 'p' | 'P' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "properties_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Properties_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Properties_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Refinement_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'r' | 'R' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "refinement_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Refinement_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Refinement_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Refinement_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 's' | 'S' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "separator_indent") then Parse_Indent_Option (Opt_Value => Opt_Val, Opt_Value_OK => Opt_Val_OK, Indent_Value => Indent_Val, OK => OK); if OK and then not Separator_Indent_Found then SparkFormatCommandLineData.Content.Valid := True; SparkFormatCommandLineData.Content.Separator_Indent := Indent_Val; else SparkFormatCommandLineData.Content.Valid := False; end if; Possible_Error (E => ES_Indent); Separator_Indent_Found := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when 'v' | 'V' => if CommandLineHandler.Check_Option_Name (Opt_Name => Opt_Name, Str => "version") then SparkFormatCommandLineData.Content.Valid := not Version_Found; Possible_Error (E => ES_Duplicate); Version_Found := True; SparkFormatCommandLineData.Content.Version := True; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end case; else SparkFormatCommandLineData.Content.Valid := False; Possible_Error2 (E => ES_InvalidOption, F => Opt_Name); end if; exit when not SparkFormatCommandLineData.Content.Valid; end loop; end Parse_Command_Options; procedure ParseArguments (Command_String : in CommandLineHandler.Command_Strings; Next_Symbol : in CommandLineHandler.Symbols) --# global in out SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SparkFormatCommandLineData.Content, --# SPARK_IO.File_Sys from *, --# Command_String, --# Next_Symbol, --# SparkFormatCommandLineData.Content; -- pre Next_Symbol.Typ in {SString, SEmpty}; is Local_Next_Symbol : CommandLineHandler.Symbols; Local_Command_String : CommandLineHandler.Command_Strings; procedure ParseFileEntry (Command_String : in out CommandLineHandler.Command_Strings; Next_Symbol : in out CommandLineHandler.Symbols) --# global in out SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Command_String, --# SparkFormatCommandLineData.Content from *, --# Next_Symbol & --# Next_Symbol from *, --# Command_String & --# SPARK_IO.File_Sys from *, --# Next_Symbol, --# SparkFormatCommandLineData.Content; -- pre Next_Symbol.Typ = CommandLineHandler.SString; -- post Next_Symbol.Typ in {CommandLineHandler.SComma, CommandLineHandler.SEmpty}; is FileName : E_Strings.T; begin case Next_Symbol.Typ is when CommandLineHandler.S_String => SparkFormatCommandLineData.Content.Number_Source := SparkFormatCommandLineData.Content.Number_Source + 1; FileName := Next_Symbol.The_String; FileSystem.Check_Extension (Fn => FileName, Ext => SparkFormatCommandLineData.Content.Source_Extension); SparkFormatCommandLineData.Content.Source_File_List (SparkFormatCommandLineData.Content.Number_Source). Source_Filename := FileName; Get_Next_Symbol (Command_String => Command_String, Next_Symbol => Next_Symbol); -- end if; when CommandLineHandler.S_Empty => null; when others => SparkFormatCommandLineData.Content.Valid := False; Possible_Error (E => ES_Source); end case; end ParseFileEntry; begin Local_Next_Symbol := Next_Symbol; Local_Command_String := Command_String; SparkFormatCommandLineData.Content.Number_Source := 0; loop ParseFileEntry (Local_Command_String, Local_Next_Symbol); exit when Local_Next_Symbol.Typ = CommandLineHandler.S_Empty or not SparkFormatCommandLineData.Content.Valid; if SparkFormatCommandLineData.Content.Number_Source = ExaminerConstants.MaxFilesOnCommandLine then SparkFormatCommandLineData.Content.Valid := False; Output_Error (E => EW_TooMany); exit; end if; if FileSystem.Use_Windows_Command_Line and Local_Next_Symbol.Typ = CommandLineHandler.S_Comma then -- CFR 1824: Allow comma or space as separator on Windows Get_Next_Symbol (Command_String => Local_Command_String, Next_Symbol => Local_Next_Symbol); end if; end loop; end ParseArguments; begin -- Parse_Command_Line; Local_Command_String := Command_String; Get_Next_Symbol (Command_String => Local_Command_String, Next_Symbol => Next_Symbol); Parse_Command_Options (Command_String => Local_Command_String, Next_Symbol => Next_Symbol); if SparkFormatCommandLineData.Content.Valid then ParseArguments (Local_Command_String, Next_Symbol); end if; end Parse_Command_Line; begin -- Process CommandLineHandler.Process_Defaults_From_Switch_File; Read_Command_Line (Command_String => Command_String); Parse_Command_Line (Command_String => Command_String); end Process; end SparkFormatCommandLineHandler; spark-2012.0.deb/sparkformat/vcg/0000755000175000017500000000000011753203756015560 5ustar eugeneugenspark-2012.0.deb/sparkformat/sparkformatcommandlinedata.adb0000644000175000017500000000417011753202340023031 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body SparkFormatCommandLineData is Ada_Extension : constant String := "ada"; procedure Initialize is begin Content := Command_Line_Contents' (Valid => True, Source_Extension => E_Strings.Copy_String (Str => Ada_Extension), Exclude_Export => E_Strings.Empty_String, Number_Source => 0, Source_File_List => Source_File_Lists'(others => Source_File_Entry'(Source_Filename => E_Strings.Empty_String)), Operation => Compress, Add_Modes => False, Global_Indent => Inline, Export_Indent => Inline, Import_Indent => Inline, Inherit_Indent => Inline, Own_Indent => Inline, Refinement_Indent => Inline, Constituent_Indent => Inline, Initialization_Indent => Inline, Separator_Indent => Inline, Properties_Indent => Inline, Help => False, Version => False, Default_Function_Mode => Unmoded); end Initialize; end SparkFormatCommandLineData; spark-2012.0.deb/sparkformat/sparkformat.idx0000644000175000017500000000131611753202340020025 0ustar eugeneugensuperindex is in ../examiner/spark.idx sparkformatcommandlinedata specification is in sparkformatcommandlinedata.ads sparkformatcommandlinedata body is in sparkformatcommandlinedata.adb sparkformatcommandlinehandler specification is in sparkformatcommandlinehandler.ads sparkformatcommandlinehandler body is in sparkformatcommandlinehandler.adb sparkprogram specification is in sparkprogram.ads sparkprogram body is in sparkprogram.adb sparkprogram.iteration subunit is in sparkprogram-iteration.adb sparkprogram.annotations subunit is in subprogram-annotations.adb sparkprogram.reformatter subunit is in subprogram-reformatter.adb sparkprogram.reformatter.simplelex subunit is in subprogram-reformatter-simplelex.adb spark-2012.0.deb/sparkformat/sparkprogram-reformatter.adb0000644000175000017500000012520711753202340022504 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with E_Strings; with SparkFormatCommandLineData; separate (SPARKProgram) package body Reformatter is Unmoded_Str : constant String := ""; In_Mode_Str : constant String := "in"; Out_Mode_Str : constant String := "out"; In_Out_Mode_Str : constant String := "in out"; Protected_Unmoded_Str : constant String := "protected"; Protected_In_Str : constant String := "protected in"; Protected_Out_Str : constant String := "protected out"; Task_Modifier_Str : constant String := "task"; type Format_Info is record Start_Col : E_Strings.Positions; Modifier_Col : E_Strings.Positions; Primary_Id_Col : E_Strings.Positions; Type_Mark_Col : E_Strings.Positions; Properties_Col : E_Strings.Positions; Modifiers_Present : Modifier_Use; Type_Mark_Present : Boolean; Property_List_Present : Boolean; end record; -- A simple lexer is required so that the capitalisation of -- names - particularly predefined type_marks such as Natural -- are not converted to "NATURAL". Also simplifies parsing of -- property lists. -- The lexer assumes that the input file contains only annotations -- and thus the annotation start and continuation symbols can be treated -- as whitespace. --# inherit Ada.Characters.Handling, --# Ada.Characters.Latin_1, --# Annotations, --# CommandLineData, --# E_Strings, --# SPARK_IO, --# White_Space; package SimpleLex is type State is private; type Token_Type is ( RW_Derives, RW_Global, RW_Inherit, RW_Initializes, RW_Main_Program, RW_Own, RW_From, RW_In, RW_Is, RW_Out, RW_Protected, RW_Task, Colon, Comma, Point, Semicolon, Other_Punct, -- for simple annotations we do not need to know any others Annotation_Start, Annotation_End, Identifier, Property_List); subtype Annotation_Signifiers is Token_Type range RW_Derives .. RW_Own; type Token_Record is record Token : Token_Type; Token_Value : E_Strings.T; end record; procedure Initialise (Input_File : in SPARK_IO.File_Type; Anno : in Annotations.Anno_Type; Lex_State : out State); --# derives Lex_State from Anno, --# Input_File; procedure Next (This : in out State; Token_Rec : out Token_Record); --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# This from SPARK_IO.File_Sys, --# This & --# Token_Rec from CommandLineData.Content, --# SPARK_IO.File_Sys, --# This; function Get_Col_No (This : State) return E_Strings.Positions; private type State is record File : SPARK_IO.File_Type; Anno : Annotations.Anno_Type; Line : E_Strings.T; Index : E_Strings.Positions; In_Annotation : Boolean; end record; end SimpleLex; function "=" (Left, Right : SimpleLex.Token_Type) return Boolean renames SimpleLex."="; package body SimpleLex is separate; procedure Initialise (Anno : in Annotations.Anno_Type; Dotted_Names : in Boolean; The_Heap : in out Heap.HeapRecord; This : out State) is Loc_Modifier_Rel : LexTokenManager.Relation_Algebra.Relation; Loc_Type_Rel : LexTokenManager.Relation_Algebra.String.Relation; Loc_Property_Rel : LexTokenManager.Relation_Algebra.String.Relation; begin LexTokenManager.Relation_Algebra.Create_Relation (The_Heap => The_Heap, R => Loc_Modifier_Rel); LexTokenManager.Relation_Algebra.String.Create_Relation (The_Heap => The_Heap, R => Loc_Type_Rel); LexTokenManager.Relation_Algebra.String.Create_Relation (The_Heap => The_Heap, R => Loc_Property_Rel); This := State' (Anno => Anno, Relations => Relation_Type'(Modifier_Rel => Loc_Modifier_Rel, Type_Rel => Loc_Type_Rel, Property_Rel => Loc_Property_Rel), Parse_Stats => Statistics_Type'(Start_Col => E_Strings.Positions'First, Max_Modifier_Length => 0, Max_Primary_Id_Length => 0, Max_Type_Mark_Length => 0, Modifiers_Present => Modifier_Use'(others => False), Type_Mark_Present => False, Property_List_Present => False), Allow_Dotted_Names => Dotted_Names, Success => True); end Initialise; function Modifier_Length (Modifier : Variable_Modifier) return Natural is Result : Natural; begin case Modifier is when Unmoded => Result := Unmoded_Str'Length; when In_Mode => Result := In_Mode_Str'Length; when Out_Mode => Result := Out_Mode_Str'Length; when In_Out_Mode => Result := In_Out_Mode_Str'Length; when Protected_Unmoded => Result := Protected_Unmoded_Str'Length; when Protected_In => Result := Protected_In_Str'Length; when Protected_Out => Result := Protected_Out_Str'Length; when Task_Modifier => Result := Task_Modifier_Str'Length; end case; return Result; end Modifier_Length; function Modifier_String (Modifier : Variable_Modifier) return E_Strings.T is Result : E_Strings.T; begin case Modifier is when Unmoded => Result := E_Strings.Copy_String (Str => Unmoded_Str); when In_Mode => Result := E_Strings.Copy_String (Str => In_Mode_Str); when Out_Mode => Result := E_Strings.Copy_String (Str => Out_Mode_Str); when In_Out_Mode => Result := E_Strings.Copy_String (Str => In_Out_Mode_Str); when Protected_Unmoded => Result := E_Strings.Copy_String (Str => Protected_Unmoded_Str); when Protected_In => Result := E_Strings.Copy_String (Str => Protected_In_Str); when Protected_Out => Result := E_Strings.Copy_String (Str => Protected_Out_Str); when Task_Modifier => Result := E_Strings.Copy_String (Str => Task_Modifier_Str); end case; return Result; end Modifier_String; procedure Parse_Modifiers (Lex : in out SimpleLex.State; Token : in out SimpleLex.Token_Record; Modifier_Type : out Variable_Modifier) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Lex, --# Modifier_Type, --# SPARK_IO.File_Sys, --# Token from CommandLineData.Content, --# Lex, --# SPARK_IO.File_Sys, --# Token; is begin case Token.Token is when SimpleLex.RW_In => Modifier_Type := In_Mode; SimpleLex.Next (This => Lex, Token_Rec => Token); when SimpleLex.RW_Out => Modifier_Type := Out_Mode; SimpleLex.Next (This => Lex, Token_Rec => Token); when SimpleLex.RW_Protected => SimpleLex.Next (This => Lex, Token_Rec => Token); case Token.Token is when SimpleLex.RW_In => Modifier_Type := Protected_In; SimpleLex.Next (This => Lex, Token_Rec => Token); when SimpleLex.RW_Out => Modifier_Type := Protected_Out; SimpleLex.Next (This => Lex, Token_Rec => Token); when others => Modifier_Type := Protected_Unmoded; -- The current token is still required in this case end case; when SimpleLex.RW_Task => Modifier_Type := Task_Modifier; SimpleLex.Next (This => Lex, Token_Rec => Token); when others => Modifier_Type := Unmoded; -- The current token is still required in this case end case; end Parse_Modifiers; procedure Parse_Name (Lex : in out SimpleLex.State; Allow_Dotted_Names : in Boolean; Token : in out SimpleLex.Token_Record; Name : out E_Strings.T; OK : out Boolean) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Lex, --# OK, --# SPARK_IO.File_Sys, --# Token from Allow_Dotted_Names, --# CommandLineData.Content, --# Lex, --# SPARK_IO.File_Sys & --# Name from Allow_Dotted_Names, --# CommandLineData.Content, --# Lex, --# SPARK_IO.File_Sys, --# Token; is Loc_Name : E_Strings.T; Loc_OK : Boolean; begin Loc_OK := True; Loc_Name := Token.Token_Value; SimpleLex.Next (This => Lex, Token_Rec => Token); if Allow_Dotted_Names then loop exit when (not Loc_OK) or Token.Token /= SimpleLex.Point; E_Strings.Append_String (E_Str => Loc_Name, Str => "."); SimpleLex.Next (This => Lex, Token_Rec => Token); if Token.Token = SimpleLex.Identifier then E_Strings.Append_Examiner_String (E_Str1 => Loc_Name, E_Str2 => Token.Token_Value); SimpleLex.Next (This => Lex, Token_Rec => Token); else Loc_OK := False; end if; end loop; end if; Name := Loc_Name; OK := Loc_OK; end Parse_Name; procedure Parse_Name_List (Lex : in out SimpleLex.State; Allow_Dotted_Names : in Boolean; Token : in out SimpleLex.Token_Record; The_Heap : in out Heap.HeapRecord; The_Seq : in LexTokenManager.Seq_Algebra.Seq; Max_Name_Length : in out E_Strings.Lengths; OK : out Boolean) --# global in CommandLineData.Content; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives Lex, --# LexTokenManager.State, --# Max_Name_Length, --# SPARK_IO.File_Sys, --# Token from *, --# Allow_Dotted_Names, --# CommandLineData.Content, --# Lex, --# SPARK_IO.File_Sys, --# Token & --# OK from Allow_Dotted_Names, --# CommandLineData.Content, --# Lex, --# SPARK_IO.File_Sys, --# Token & --# Statistics.TableUsage, --# The_Heap from *, --# Allow_Dotted_Names, --# CommandLineData.Content, --# Lex, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# The_Heap, --# The_Seq, --# Token; is Loc_OK : Boolean; Name : E_Strings.T; Lex_Name : LexTokenManager.Lex_String; begin Loc_OK := Token.Token = SimpleLex.Identifier; while Loc_OK and Token.Token = SimpleLex.Identifier loop Parse_Name (Lex => Lex, Allow_Dotted_Names => Allow_Dotted_Names, Token => Token, Name => Name, OK => Loc_OK); if Loc_OK then LexTokenManager.Insert_Examiner_String (Str => Name, Lex_Str => Lex_Name); LexTokenManager.Seq_Algebra.Add_Member (The_Heap => The_Heap, S => The_Seq, Given_Value => Lex_Name); if E_Strings.Get_Length (E_Str => Name) > Max_Name_Length then Max_Name_Length := E_Strings.Get_Length (E_Str => Name); end if; case Token.Token is when SimpleLex.Comma => SimpleLex.Next (This => Lex, Token_Rec => Token); when SimpleLex.Identifier => -- Two successive identifiers are not syntactically correct Loc_OK := False; when others => -- loop will terminate at current token null; end case; end if; end loop; OK := Loc_OK; end Parse_Name_List; procedure Parse_Type_Mark (Lex : in out SimpleLex.State; Token : in out SimpleLex.Token_Record; Type_Mark : out E_Strings.T; OK : in out Boolean) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Lex, --# OK, --# SPARK_IO.File_Sys, --# Token, --# Type_Mark from CommandLineData.Content, --# Lex, --# OK, --# SPARK_IO.File_Sys, --# Token; is Type_Mark_Str : E_Strings.T; begin if OK and Token.Token = SimpleLex.Colon then SimpleLex.Next (This => Lex, Token_Rec => Token); if Token.Token = SimpleLex.Identifier then Parse_Name (Lex => Lex, Allow_Dotted_Names => Allow_Dotted_Names_Const, Token => Token, Name => Type_Mark_Str, OK => OK); else OK := False; Type_Mark_Str := E_Strings.Empty_String; end if; else Type_Mark_Str := E_Strings.Empty_String; end if; Type_Mark := Type_Mark_Str; end Parse_Type_Mark; procedure Parse_Properties (Lex : in out SimpleLex.State; Token : in out SimpleLex.Token_Record; Property_List : out E_Strings.T; OK : in Boolean) --# global in CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Lex, --# SPARK_IO.File_Sys from Lex, --# OK, --# SPARK_IO.File_Sys, --# Token & --# Property_List from OK, --# Token & --# Token from *, --# CommandLineData.Content, --# Lex, --# OK, --# SPARK_IO.File_Sys; is begin if OK and Token.Token = SimpleLex.Property_List then Property_List := Token.Token_Value; SimpleLex.Next (This => Lex, Token_Rec => Token); else Property_List := E_Strings.Empty_String; end if; end Parse_Properties; procedure Parse_The_Annotation (Lex : in SimpleLex.State; Token : in SimpleLex.Token_Record; Allow_Dotted_Names : in Boolean; The_Heap : in out Heap.HeapRecord; Relations : in Relation_Type; Parse_Stats : in out Statistics_Type; OK : out Boolean) --# global in CommandLineData.Content; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives LexTokenManager.State, --# SPARK_IO.File_Sys from *, --# Allow_Dotted_Names, --# CommandLineData.Content, --# Lex, --# SPARK_IO.File_Sys, --# Token & --# OK from Allow_Dotted_Names, --# CommandLineData.Content, --# Lex, --# SPARK_IO.File_Sys, --# Token & --# Parse_Stats, --# Statistics.TableUsage, --# The_Heap from *, --# Allow_Dotted_Names, --# CommandLineData.Content, --# Lex, --# LexTokenManager.State, --# Relations, --# SPARK_IO.File_Sys, --# The_Heap, --# Token; is Loc_OK : Boolean; Loc_Lex : SimpleLex.State; Next_Token : SimpleLex.Token_Record; Name_List : LexTokenManager.Seq_Algebra.Seq; First_Name : LexTokenManager.Seq_Algebra.Member_Of_Seq; Type_Mark : E_Strings.T; Property_List : E_Strings.T; First_Name_Lex : LexTokenManager.Lex_String; Type_Mark_Lex : LexTokenManager.Lex_String; Property_List_Lex : LexTokenManager.Lex_String; Modifier : Variable_Modifier; begin -- Parse_The_Annotation Loc_OK := True; Next_Token := Token; Loc_Lex := Lex; while Loc_OK and then not (Next_Token.Token in SimpleLex.Annotation_Signifiers or else Next_Token.Token = SimpleLex.Annotation_End) loop LexTokenManager.Seq_Algebra.Create_Seq (The_Heap => The_Heap, S => Name_List); Parse_Modifiers (Lex => Loc_Lex, Token => Next_Token, Modifier_Type => Modifier); Parse_Name_List (Lex => Loc_Lex, Allow_Dotted_Names => Allow_Dotted_Names, Token => Next_Token, The_Heap => The_Heap, The_Seq => Name_List, Max_Name_Length => Parse_Stats.Max_Primary_Id_Length, OK => Loc_OK); Parse_Type_Mark (Lex => Loc_Lex, Token => Next_Token, Type_Mark => Type_Mark, OK => Loc_OK); Parse_Properties (Lex => Loc_Lex, Token => Next_Token, Property_List => Property_List, OK => Loc_OK); Loc_OK := Loc_OK and Next_Token.Token = SimpleLex.Semicolon; if Loc_OK then if not E_Strings.Is_Empty (E_Str => Type_Mark) then -- A type_mark applies to all names in the list LexTokenManager.Insert_Examiner_String (Str => Type_Mark, Lex_Str => Type_Mark_Lex); LexTokenManager.Relation_Algebra.String.Add_Col (The_Heap => The_Heap, R => Relations.Type_Rel, J => Type_Mark_Lex, S => Name_List); Parse_Stats.Type_Mark_Present := True; if E_Strings.Get_Length (E_Str => Type_Mark) > Parse_Stats.Max_Type_Mark_Length then Parse_Stats.Max_Type_Mark_Length := E_Strings.Get_Length (E_Str => Type_Mark); end if; end if; if not E_Strings.Is_Empty (E_Str => Property_List) then -- A property_list applies to all names in the list LexTokenManager.Insert_Examiner_String (Str => Property_List, Lex_Str => Property_List_Lex); LexTokenManager.Relation_Algebra.String.Add_Col (The_Heap => The_Heap, R => Relations.Property_Rel, J => Property_List_Lex, S => Name_List); Parse_Stats.Property_List_Present := True; end if; Parse_Stats.Modifiers_Present (Modifier) := True; if Modifier_Length (Modifier => Modifier) > Parse_Stats.Max_Modifier_Length then Parse_Stats.Max_Modifier_Length := Modifier_Length (Modifier => Modifier); end if; if Modifier /= Unmoded then -- A modifier only applies to the first name in the list First_Name := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Name_List); First_Name_Lex := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => First_Name); LexTokenManager.Seq_Algebra.Remove_Member (The_Heap => The_Heap, S => Name_List, Given_Value => First_Name_Lex); LexTokenManager.Relation_Algebra.Insert_Pair (The_Heap => The_Heap, R => Relations.Modifier_Rel, I => Variable_Modifier'Pos (Modifier), J => First_Name_Lex); end if; -- The remaining names in the list are unmoded if not LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => Name_List) then LexTokenManager.Relation_Algebra.Add_Row (The_Heap => The_Heap, R => Relations.Modifier_Rel, I => Variable_Modifier'Pos (Unmoded), S => Name_List); Parse_Stats.Modifiers_Present (Unmoded) := True; end if; SimpleLex.Next (This => Loc_Lex, Token_Rec => Next_Token); end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Name_List); end loop; OK := Loc_OK; end Parse_The_Annotation; procedure Parse (This : in out State; The_Heap : in out Heap.HeapRecord; Temporary_File : in out SPARK_IO.File_Type) is Loc_OK : Boolean; Loc_Start_Col : E_Strings.Positions; Token : SimpleLex.Token_Record; Lex : SimpleLex.State; Parse_Stats : Statistics_Type; begin Parse_Stats := This.Parse_Stats; File_IO.Reset (Temporary_File); SimpleLex.Initialise (Input_File => Temporary_File, Anno => This.Anno, Lex_State => Lex); SimpleLex.Next (This => Lex, Token_Rec => Token); if Token.Token = SimpleLex.Annotation_Start then -- The start of an annotation has been located -- Take account of the length of the annotation_start symbol Loc_Start_Col := SimpleLex.Get_Col_No (This => Lex); if Loc_Start_Col >= 3 then Loc_Start_Col := Loc_Start_Col - 3; else Loc_Start_Col := 1; end if; SimpleLex.Next (This => Lex, Token_Rec => Token); if E_Strings.Eq_String (E_Str1 => Token.Token_Value, E_Str2 => Annotations.Intro (This.Anno)) then SimpleLex.Next (This => Lex, Token_Rec => Token); Parse_The_Annotation (Lex => Lex, Token => Token, Allow_Dotted_Names => This.Allow_Dotted_Names, The_Heap => The_Heap, Relations => This.Relations, Parse_Stats => Parse_Stats, OK => Loc_OK); else Loc_OK := False; end if; else Loc_OK := False; Loc_Start_Col := 1; -- Assume a Start_Col of 0 if not start of anno end if; This.Success := Loc_OK; Parse_Stats.Start_Col := Loc_Start_Col; This.Parse_Stats := Parse_Stats; end Parse; procedure Get_Related (The_Heap : in out Heap.HeapRecord; Relation : in LexTokenManager.Relation_Algebra.String.Relation; Var_Name : in LexTokenManager.Seq_Algebra.Member_Of_Seq; Related_Str : out E_Strings.T) --# global in LexTokenManager.State; --# in out Statistics.TableUsage; --# derives Related_Str, --# The_Heap from LexTokenManager.State, --# Relation, --# The_Heap, --# Var_Name & --# Statistics.TableUsage from *, --# LexTokenManager.State, --# Relation, --# The_Heap, --# Var_Name; is Seq : LexTokenManager.Seq_Algebra.Seq; First_Member : LexTokenManager.Seq_Algebra.Member_Of_Seq; Result : E_Strings.T; begin LexTokenManager.Relation_Algebra.String.Row_Extraction (The_Heap => The_Heap, R => Relation, Given_Index => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Var_Name), S => Seq); -- There should be only zero or one related string with a variable First_Member := LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => Seq); if LexTokenManager.Seq_Algebra.Is_Null_Member (M => First_Member) then Result := E_Strings.Empty_String; else Result := LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => First_Member)); end if; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => Seq); Related_Str := Result; end Get_Related; procedure Write_Property_List (Anno : in Annotations.Anno_Type; Output : in SPARK_IO.File_Type; Formatting : in Format_Info; Properties : in E_Strings.T) --# global in CommandLineData.Content; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Anno, --# CommandLineData.Content, --# Formatting, --# Output, --# Properties, --# SparkFormatCommandLineData.Content; is I : E_Strings.Positions; begin I := 1; if SparkFormatCommandLineData.Content.Properties_Indent /= SparkFormatCommandLineData.Inline then SPARK_IO.New_Line (Output, 1); Annotations.Write (Anno, Output, Formatting.Start_Col); end if; SPARK_IO.Set_Col (Output, Formatting.Properties_Col); while I <= E_Strings.Get_Length (E_Str => Properties) loop if I + 2 <= E_Strings.Get_Length (E_Str => Properties) and then E_Strings.Get_Element (E_Str => Properties, Pos => I) = '-' and then E_Strings.Get_Element (E_Str => Properties, Pos => I + 1) = '-' and then E_Strings.Get_Element (E_Str => Properties, Pos => I + 2) = CommandLineData.Content.Anno_Char then SPARK_IO.New_Line (Output, 1); Annotations.Write (Anno, Output, Formatting.Start_Col); SPARK_IO.Set_Col (Output, Formatting.Properties_Col); I := I + 3; else SPARK_IO.Put_Char (Output, E_Strings.Get_Element (E_Str => Properties, Pos => I)); I := I + 1; end if; end loop; end Write_Property_List; procedure Write_Name (Anno : in Annotations.Anno_Type; Output : in SPARK_IO.File_Type; Formatting : in Format_Info; First_Name : in out Boolean; May_Use_Comma : in out Boolean; The_Heap : in out Heap.HeapRecord; Name : in LexTokenManager.Seq_Algebra.Member_Of_Seq; Modifier : in Variable_Modifier; Relations : in Relation_Type) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives First_Name from * & --# May_Use_Comma from Formatting, --# LexTokenManager.State, --# Modifier, --# Name, --# Relations, --# The_Heap & --# SPARK_IO.File_Sys from *, --# Anno, --# CommandLineData.Content, --# First_Name, --# Formatting, --# LexTokenManager.State, --# May_Use_Comma, --# Modifier, --# Name, --# Output, --# Relations, --# SparkFormatCommandLineData.Content, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Formatting, --# LexTokenManager.State, --# Name, --# Relations, --# The_Heap; is Type_Mark : E_Strings.T; Property_List : E_Strings.T; Simple_Name_List : Boolean; begin if Formatting.Type_Mark_Present then Get_Related (The_Heap => The_Heap, Relation => Relations.Type_Rel, Var_Name => Name, Related_Str => Type_Mark); else Type_Mark := E_Strings.Empty_String; end if; if Formatting.Property_List_Present then Get_Related (The_Heap => The_Heap, Relation => Relations.Property_Rel, Var_Name => Name, Related_Str => Property_List); else Property_List := E_Strings.Empty_String; end if; Simple_Name_List := Modifier = Unmoded and then E_Strings.Is_Empty (E_Str => Type_Mark) and then E_Strings.Is_Empty (E_Str => Property_List); if First_Name then First_Name := False; if Annotations.Indent (Anno) /= SparkFormatCommandLineData.Inline then SPARK_IO.New_Line (Output, 1); Annotations.Write (Anno, Output, Formatting.Start_Col); end if; else if May_Use_Comma and Simple_Name_List then SPARK_IO.Put_Line (Output, ",", 0); else SPARK_IO.Put_Line (Output, ";", 0); end if; Annotations.Write (Anno, Output, Formatting.Start_Col); end if; --# assert True; if Modifier /= Unmoded then SPARK_IO.Set_Col (Output, Formatting.Modifier_Col); E_Strings.Put_String (File => Output, E_Str => Modifier_String (Modifier => Modifier)); end if; SPARK_IO.Set_Col (Output, Formatting.Primary_Id_Col); E_Strings.Put_String (File => Output, E_Str => LexTokenManager.Lex_String_To_String (Lex_Str => LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => The_Heap, M => Name))); if not E_Strings.Is_Empty (E_Str => Type_Mark) then SPARK_IO.Set_Col (Output, Formatting.Type_Mark_Col); SPARK_IO.Put_String (Output, ": ", 0); E_Strings.Put_String (File => Output, E_Str => Type_Mark); end if; if not E_Strings.Is_Empty (E_Str => Property_List) then Write_Property_List (Anno => Anno, Output => Output, Formatting => Formatting, Properties => Property_List); end if; May_Use_Comma := Simple_Name_List; end Write_Name; procedure Write_Name_List (Anno : in Annotations.Anno_Type; Output : in SPARK_IO.File_Type; Formatting : in Format_Info; The_Heap : in out Heap.HeapRecord; Relations : in Relation_Type) --# global in CommandLineData.Content; --# in LexTokenManager.State; --# in SparkFormatCommandLineData.Content; --# in out SPARK_IO.File_Sys; --# in out Statistics.TableUsage; --# derives SPARK_IO.File_Sys from *, --# Anno, --# CommandLineData.Content, --# Formatting, --# LexTokenManager.State, --# Output, --# Relations, --# SparkFormatCommandLineData.Content, --# The_Heap & --# Statistics.TableUsage, --# The_Heap from *, --# Formatting, --# LexTokenManager.State, --# Relations, --# The_Heap; is The_Seq : LexTokenManager.Seq_Algebra.Seq; First_Name : Boolean; May_Use_Comma : Boolean; The_Iterator : Iteration.Iterator; begin First_Name := True; May_Use_Comma := False; for Modifier in Variable_Modifier loop if Formatting.Modifiers_Present (Modifier) then -- extract by modifier LexTokenManager.Relation_Algebra.Row_Extraction (The_Heap => The_Heap, R => Relations.Modifier_Rel, Given_Index => Variable_Modifier'Pos (Modifier), S => The_Seq); Iteration.Initialise (The_Heap => The_Heap, The_Seq => The_Seq, The_Iterator => The_Iterator); while not Iteration.Complete (The_Iterator) loop Write_Name (Anno => Anno, Output => Output, Formatting => Formatting, First_Name => First_Name, May_Use_Comma => May_Use_Comma, The_Heap => The_Heap, Name => Iteration.Current_Member (The_Iterator), Modifier => Modifier, Relations => Relations); Iteration.Next (The_Heap, The_Iterator); end loop; LexTokenManager.Seq_Algebra.Dispose_Of_Seq (The_Heap => The_Heap, S => The_Seq); end if; end loop; end Write_Name_List; procedure Reformat (This : in State; The_Heap : in out Heap.HeapRecord; Temporary_File : in out SPARK_IO.File_Type; Output : in SPARK_IO.File_Type; Success : out Boolean) is Formatting : Format_Info; Modifier_Col : E_Strings.Positions; Primary_Id_Col : E_Strings.Positions; Type_Mark_Col : E_Strings.Positions; Properties_Col : E_Strings.Positions; begin Modifier_Col := Annotations.Name1_Start_Col (This => This.Anno, Start_Col => This.Parse_Stats.Start_Col); if This.Parse_Stats.Max_Modifier_Length > 0 then Primary_Id_Col := (Modifier_Col + This.Parse_Stats.Max_Modifier_Length) + 1; else Primary_Id_Col := Modifier_Col; end if; Type_Mark_Col := (Primary_Id_Col + This.Parse_Stats.Max_Primary_Id_Length) + 1; if SparkFormatCommandLineData.Content.Properties_Indent /= SparkFormatCommandLineData.Inline then Properties_Col := SparkFormatCommandLineData.Content.Properties_Indent + 4; else -- Extra 2 columns are for the ": " preceding the Type_Mark Properties_Col := (Type_Mark_Col + This.Parse_Stats.Max_Type_Mark_Length) + 3; end if; Formatting := Format_Info' (Start_Col => This.Parse_Stats.Start_Col, Modifier_Col => Modifier_Col, Primary_Id_Col => Primary_Id_Col, Type_Mark_Col => Type_Mark_Col, Properties_Col => Properties_Col, Modifiers_Present => This.Parse_Stats.Modifiers_Present, Type_Mark_Present => This.Parse_Stats.Type_Mark_Present, Property_List_Present => This.Parse_Stats.Property_List_Present); if This.Success then Annotations.Write_Intro (This => This.Anno, Output => Output, Start_Col => Formatting.Start_Col); -- Writes inherits and own variables but not globals and derives Write_Name_List (Anno => This.Anno, Output => Output, Formatting => Formatting, The_Heap => The_Heap, Relations => This.Relations); SPARK_IO.Put_Line (Output, ";", 0); else File_IO.Reset (Temporary_File); SPARKProgram.Copy (Temporary_File, Output); end if; Success := This.Success; end Reformat; procedure Finalise (This : in out State; The_Heap : in out Heap.HeapRecord) is Loc_Relations : Relation_Type; begin Loc_Relations := This.Relations; LexTokenManager.Relation_Algebra.Dispose_Of_Relation (The_Heap => The_Heap, R => This.Relations.Modifier_Rel); LexTokenManager.Relation_Algebra.String.Dispose_Of_Relation (The_Heap => The_Heap, R => This.Relations.Type_Rel); LexTokenManager.Relation_Algebra.String.Dispose_Of_Relation (The_Heap => The_Heap, R => This.Relations.Property_Rel); This.Relations := Loc_Relations; end Finalise; end Reformatter; spark-2012.0.deb/sparkformat/sparkformatcommandlinedata.ads0000644000175000017500000000546411753202340023061 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with ExaminerConstants; with E_Strings; --# inherit ExaminerConstants, --# E_Strings, --# FileSystem; package SparkFormatCommandLineData --# own Content : Command_Line_Contents; is Meta_File_Extension : constant String := "smf"; type Source_File_Entry is record Source_Filename : E_Strings.T; end record; subtype Source_File_Counts is Integer range 0 .. ExaminerConstants.MaxFilesOnCommandLine; subtype Source_File_Positions is Integer range 0 .. ExaminerConstants.MaxFilesOnCommandLine; type Source_File_Lists is array (Source_File_Positions) of Source_File_Entry; type Expand_Or_Compress is (Expand, Compress); type Function_Mode_Type is (In_Mode, Unmoded); type Command_Line_Contents is record Valid : Boolean; Source_Extension : E_Strings.T; Exclude_Export : E_Strings.T; Number_Source : Source_File_Counts; Source_File_List : Source_File_Lists; Operation : Expand_Or_Compress; Add_Modes : Boolean; Global_Indent : Natural; Export_Indent : Natural; Import_Indent : Natural; Inherit_Indent : Natural; Own_Indent : Natural; Refinement_Indent : Natural; Constituent_Indent : Natural; Initialization_Indent : Natural; Properties_Indent : Natural; Separator_Indent : Natural; Help : Boolean; Version : Boolean; Default_Function_Mode : Function_Mode_Type; end record; Content : Command_Line_Contents; -- 0 is used as a sentinel value to mean "inline" output -- of globals, imports, exports and separators. Inline : constant Natural := 0; procedure Initialize; --# global out Content; --# derives Content from ; end SparkFormatCommandLineData; spark-2012.0.deb/sparkformat/sparkprogram-iteration.adb0000644000175000017500000002043311753202340022143 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= separate (SPARKProgram) package body Iteration is procedure FindNextAlphabetic (TheHeap : in Heap.HeapRecord; TheIterator : in out Iterator) --# global in LexTokenManager.State; --# derives TheIterator from *, --# LexTokenManager.State, --# TheHeap; is FirstMember : LexTokenManager.Seq_Algebra.Member_Of_Seq; Placeholder : LexTokenManager.Lex_String; SeqComplete : Boolean; NextItem : LexTokenManager.Seq_Algebra.Member_Of_Seq; ThisMember : LexTokenManager.Seq_Algebra.Member_Of_Seq; ThisLexString : LexTokenManager.Lex_String; NextItemLex : LexTokenManager.Lex_String; begin --------------------------------------------------------------------------------------- -- We have a sequence of (lex) strings in no particular order. To return them -- in alphabetical order we go through the whole sequence looking for the first -- item in alphabetical order, return it, then start again looking for the next -- item and so on. To do this we need to use a placeholder to tell us what the last -- thing we returned was so that the state of the search is preserved between calls. -- -- Each time this routine is called it loops over the whole sequence, comparing each -- item with the placeholder to try and find the next best match. -- (Note that it may be possible, and more efficient, to do this by deleting items -- from the sequence once they have been returned, but need to be sure that sequences -- are never re-used, eg when several exports have the same set of imports.) -- Consider doing this later if performance is an issue. -- -- We know we have finished when we have traversed the whole sequence without finding -- a better match. -- -- Note: -- The sequence is very likely to be in alphabetical order already. If it is then -- we can just write it straight out. If SPARKFormat needs to be made faster then -- this subprogram could check whether the sequence is already sorted on the first -- pass through (easy to check). If it is then it could just be written out in the -- order in which items occur in the sequence. --------------------------------------------------------------------------------------- FirstMember := TheIterator.First_Member; Placeholder := TheIterator.Placeholder; ThisMember := FirstMember; SeqComplete := True; -- If this is the first call then initialize NextItemLex to first item in sequence. -- Otherwise, the best match so far is the last thing that was written. if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => Placeholder, Lex_Str2 => LexTokenManager.Null_String) = LexTokenManager.Str_Eq then NextItemLex := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => TheHeap, M => ThisMember); NextItem := ThisMember; else NextItemLex := Placeholder; NextItem := ThisMember; end if; loop exit when LexTokenManager.Seq_Algebra.Is_Null_Member (M => ThisMember); ThisLexString := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => TheHeap, M => ThisMember); -- For this to be the next item to write it has to come strictly after the last item that was written -- (Note that this test will fail in the case of MultiplyToken so we don't need a separate test to avoid -- writing it out in the middle of a list.) if LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => ThisLexString, Lex_Str2 => Placeholder) = LexTokenManager.Str_Second then -- If NextItemLex = Placeholder it indicates that we haven't updated NextItemLex on this -- pass, so NextItemLex becomes the current item (provided current item is after Placeholder). -- Or, if this item is before (or equal to) the current best match then it becomes the new best match. if (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => NextItemLex, Lex_Str2 => Placeholder) = LexTokenManager.Str_Eq) or else (LexTokenManager.Lex_String_Case_Insensitive_Compare (Lex_Str1 => NextItemLex, Lex_Str2 => ThisLexString) /= LexTokenManager.Str_First) then NextItemLex := ThisLexString; NextItem := ThisMember; SeqComplete := False; end if; end if; ThisMember := LexTokenManager.Seq_Algebra.Next_Member (The_Heap => TheHeap, M => ThisMember); end loop; TheIterator.Placeholder := LexTokenManager.Seq_Algebra.Value_Of_Member (The_Heap => TheHeap, M => NextItem); TheIterator.Current_Member := NextItem; TheIterator.Complete := SeqComplete; end FindNextAlphabetic; procedure Initialise (The_Heap : in Heap.HeapRecord; The_Seq : in LexTokenManager.Seq_Algebra.Seq; The_Iterator : out Iterator) is begin The_Iterator := Iterator' (First_Member => LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => The_Seq), Current_Member => LexTokenManager.Seq_Algebra.First_Member (The_Heap => The_Heap, S => The_Seq), Placeholder => LexTokenManager.Null_String, Complete => LexTokenManager.Seq_Algebra.Is_Empty_Seq (The_Heap => The_Heap, S => The_Seq)); FindNextAlphabetic (TheHeap => The_Heap, TheIterator => The_Iterator); end Initialise; procedure Next (The_Heap : in Heap.HeapRecord; The_Iterator : in out Iterator) is begin if not LexTokenManager.Seq_Algebra.Is_Null_Member (M => The_Iterator.Current_Member) then FindNextAlphabetic (TheHeap => The_Heap, TheIterator => The_Iterator); else -- This indicates that CurrentMember has not changed. The_Iterator.Complete := True; end if; end Next; function Complete (The_Iterator : Iterator) return Boolean is begin return The_Iterator.Complete; end Complete; function Current_String (The_Iterator : Iterator) return LexTokenManager.Lex_String is begin return The_Iterator.Placeholder; end Current_String; function Current_Member (The_Iterator : Iterator) return LexTokenManager.Seq_Algebra.Member_Of_Seq is begin return The_Iterator.Current_Member; end Current_Member; end Iteration; spark-2012.0.deb/sparkmake/0000755000175000017500000000000011753203757014427 5ustar eugeneugenspark-2012.0.deb/sparkmake/unit.ads0000644000175000017500000001142111753202340016061 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with StringList; --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# SparkLex, --# SparkMakeDebug, --# SparkMakeErrors, --# SPARK_IO, --# SP_Symbols, --# StringList, --# TokenManager; package Unit is type Kind is ( Package_Specification_Unit, Public_Child_Package_Specification_Unit, Private_Child_Package_Specification_Unit, Main_Program_Unit, Package_Body_Unit, Separate_Body_Unit); subtype Specification_Unit is Kind range Package_Specification_Unit .. Private_Child_Package_Specification_Unit; subtype Child_Specification_Unit is Kind range Public_Child_Package_Specification_Unit .. Private_Child_Package_Specification_Unit; type Id is record The_Name : E_Strings.T; The_Kind : Kind; end record; Null_Id : constant Id := Id'(The_Name => E_Strings.Empty_String, The_Kind => Kind'First); type Object is record The_Id : Id; The_File : E_Strings.T; The_Withed_Units : StringList.Object; The_Inherited_Units : StringList.Object; end record; Null_Object : constant Object := Object' (The_Id => Null_Id, The_File => E_Strings.Empty_String, The_Withed_Units => StringList.Null_Object, The_Inherited_Units => StringList.Null_Object); ----------------------------------------------------------------------------- function Kind_To_String (The_Unit : Kind) return E_Strings.T; ----------------------------------------------------------------------------- function Are_Equal (L, R : Id) return Boolean; ----------------------------------------------------------------------------- function Less_Than (L, R : Id) return Boolean; ----------------------------------------------------------------------------- function Prefix (E_Str : E_Strings.T) return E_Strings.T; ----------------------------------------------------------------------------- function Construct_Spec_Unit_Id (The_Name : E_Strings.T; Is_Private : Boolean) return Id; ----------------------------------------------------------------------------- procedure Get_Unit (In_File : in E_Strings.T; The_Unit : out Object); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# out SparkLex.Curr_Line; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Unit from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# In_File, --# LexTokenManager.State, --# SPARK_IO.File_Sys; -- -- Extracts the Unit information from the given file. ----------------------------------------------------------------------------- procedure Output_Object (The_Unit : in Object); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# The_Unit; -- -- For debug only procedure Output_Id (The_Unit : in Id); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# The_Unit; -- -- For debug only end Unit; spark-2012.0.deb/sparkmake/units.adb0000644000175000017500000001206311753202340016226 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SparkMakeErrors; package body Units is ---------------------------------------------------------- -- This package body is NOT SPARK, and should never be -- -- presented to the Examiner -- ---------------------------------------------------------- ---------------------------------------------------------- -- Sorting ---------------------------------------------------------- procedure Sort (TheUnits : in out Stack) is Result : Stack; Id : Unit.Id; function Insert (TheItem : Unit.Id; InStack : Stack) return Stack -- Inserts TheItem into InStack according to the Unit.LessThan -- function. is begin if InStack = NullStack then -- The stack is empty so return a one entry stack return new Node'(TheItem => TheItem, Next => null); elsif Unit.Less_Than (L => TheItem, R => InStack.all.TheItem) then -- TheItem belongs before the head. return new Node'(TheItem => TheItem, Next => InStack); else -- TheItem comes somewhere after the head return new Node'(TheItem => InStack.all.TheItem, Next => Insert (TheItem, InStack.all.Next)); end if; end Insert; begin Result := NullStack; while TheUnits /= NullStack loop Pop (TheStack => TheUnits, TheUnit => Id); Result := Insert (TheItem => Id, InStack => Result); end loop; TheUnits := Result; exception when others => SparkMakeErrors.Fatal ("Exception raised in Units.Sort"); end Sort; ---------------------------------------------------------- -- Stack operations ---------------------------------------------------------- function InStack (TheUnit : in Unit.Id; TheStack : in Stack) return Boolean is begin if TheStack = NullStack then return False; elsif Unit.Are_Equal (L => TheUnit, R => TheStack.all.TheItem) then return True; else return InStack (TheUnit => TheUnit, TheStack => TheStack.all.Next); end if; exception when others => SparkMakeErrors.Fatal ("Exception raised in Units.InStack"); return False; end InStack; procedure Push (TheStack : in out Stack; TheUnit : in Unit.Id) is begin TheStack := new Node'(TheItem => TheUnit, Next => TheStack); exception when others => SparkMakeErrors.Fatal ("Exception raised in Units.Push"); end Push; procedure Pop (TheStack : in out Stack; TheUnit : out Unit.Id) is begin TheUnit := TheStack.all.TheItem; TheStack := TheStack.all.Next; exception when others => SparkMakeErrors.Fatal ("Exception raised in Units.Pop"); end Pop; function IsEmpty (TheStack : Stack) return Boolean is begin return TheStack = NullStack; exception when others => SparkMakeErrors.Fatal ("Exception raised in Units.IsEmpty"); return False; end IsEmpty; ---------------------------------------------------------- -- Iteration ---------------------------------------------------------- procedure Init_Iterator (TheStack : in Stack; TheIterator : out Iterator) is begin TheIterator := Iterator (TheStack); end Init_Iterator; function Iterated (TheIterator : in Iterator) return Boolean is begin return TheIterator = Iterator (NullStack); end Iterated; procedure Iterate (TheIterator : in out Iterator; TheUnit : out Unit.Id) is begin TheUnit := TheIterator.all.TheItem; TheIterator := Iterator (TheIterator.all.Next); exception when others => SparkMakeErrors.Fatal ("Exception raised in Units.Iterate"); end Iterate; end Units; spark-2012.0.deb/sparkmake/unit.adb0000644000175000017500000012302411753202340016043 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLineData; with E_Strings.Not_SPARK; with SparkLex; with SPARK_IO; with SP_Symbols; with SparkMakeDebug; with SparkMakeErrors; with TokenManager; use type SP_Symbols.SP_Terminal; use type SPARK_IO.File_Status; use type TokenManager.Token; use type CommandLineData.Language_Profiles; package body Unit is function Kind_To_String (The_Unit : Kind) return E_Strings.T is Result : E_Strings.T; begin case The_Unit is when Specification_Unit => Result := E_Strings.Copy_String (Str => "specification"); when Package_Body_Unit => Result := E_Strings.Copy_String (Str => "body"); when Separate_Body_Unit | Main_Program_Unit => Result := E_Strings.Copy_String (Str => "subunit"); end case; return Result; end Kind_To_String; ----------------------------------------------------------------------------- function Are_Equal (L, R : Id) return Boolean is begin return E_Strings.Eq_String (E_Str1 => L.The_Name, E_Str2 => R.The_Name) and then L.The_Kind = R.The_Kind; end Are_Equal; ------------------------------------------------------------------------------ function Less_Than (L, R : Id) return Boolean is Result : Boolean; The_Order : E_Strings.Order_Types; begin The_Order := E_Strings.Lex_Order (First_Name => L.The_Name, Second_Name => R.The_Name); case The_Order is when E_Strings.First_One_First => Result := True; when E_Strings.Second_One_First => Result := False; when E_Strings.Neither_First => Result := L.The_Kind < R.The_Kind; end case; return Result; end Less_Than; ------------------------------------------------------------------------------ procedure Open (The_Filename : in E_Strings.T; The_File_Id : out SPARK_IO.File_Type; Success : out Boolean) --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys, --# Success, --# The_File_Id from SPARK_IO.File_Sys, --# The_Filename; is Status : SPARK_IO.File_Status; begin The_File_Id := SPARK_IO.Null_File; E_Strings.Open (File => The_File_Id, Mode_Of_File => SPARK_IO.In_File, Name_Of_File => The_Filename, Form_Of_File => "", Status => Status); Success := Status = SPARK_IO.Ok; end Open; ----------------------------------------------------------------------------- function Prefix (E_Str : E_Strings.T) return E_Strings.T -- Returns the text up to but not including the final dot. -- E.g. Prefix (A.B.C.D) returns A.B.C is Char_Found : Boolean; Char_Pos : E_Strings.Positions; Last_Pos : E_Strings.Lengths := 0; Result : E_Strings.T; begin -- Look for the last dot. loop E_Strings.Find_Char_After (E_Str => E_Str, Search_Start => Last_Pos + 1, Search_Char => '.', Char_Found => Char_Found, Char_Pos => Char_Pos); exit when not Char_Found; if Char_Found then Last_Pos := Char_Pos; end if; end loop; -- Last Pos contains the position of the last dot. if Last_Pos = 0 then -- There was not dot so return empty string. Result := E_Strings.Empty_String; else -- Return the text up to but not including the last dot. Result := E_Strings.Section (E_Str => E_Str, Start_Pos => 1, Length => Last_Pos - 1); end if; return Result; end Prefix; ----------------------------------------------------------------------------- function Construct_Spec_Unit_Id (The_Name : E_Strings.T; Is_Private : Boolean) return Id is The_Result : Id; begin if E_Strings.Is_Empty (E_Str => Prefix (The_Name)) then The_Result := Id'(The_Name => The_Name, The_Kind => Package_Specification_Unit); elsif Is_Private then The_Result := Id'(The_Name => The_Name, The_Kind => Private_Child_Package_Specification_Unit); else The_Result := Id'(The_Name => The_Name, The_Kind => Public_Child_Package_Specification_Unit); end if; return The_Result; end Construct_Spec_Unit_Id; ----------------------------------------------------------------------------- procedure Get_Unit (In_File : in E_Strings.T; The_Unit : out Object) is Token_It : TokenManager.Iterator; Current_Token : TokenManager.Token; The_File_Id : SPARK_IO.File_Type; Status : SPARK_IO.File_Status; Success : Boolean; Done : Boolean := False; ------------------------------------------------------------------ procedure Output_Unexpected_Token (Expected : in SP_Symbols.SP_Symbol; Found : in TokenManager.Token) --# derives null from Expected, --# Found; is --# hide Output_Unexpected_Token; E_Str : E_Strings.T; begin E_Str := TokenManager.To_String (Tok => Found); SparkMakeDebug.Report_Cond_Text (Cond => Expected /= SP_Symbols.SPEND, True_Text => "Unexpected token. " & "Expecting " & SP_Symbols.SP_Symbol'Image (Expected) & ", " & "but found " & E_Strings.Not_SPARK.Get_String (E_Str => E_Str), False_Text => "Unexpected token. " & "Found " & E_Strings.Not_SPARK.Get_String (E_Str => E_Str)); end Output_Unexpected_Token; ------------------------------------------------------------------ procedure Assert_Next_Token (Is_Symbol : in SP_Symbols.SP_Symbol) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Token_It; --# out Success; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# Success from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Symbol, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is Current_Token : TokenManager.Token; begin -- Get the next token. TokenManager.Next (It => Token_It); Current_Token := TokenManager.Current (Token_It); -- Is it what we were expecting? Success := Current_Token.Kind = Is_Symbol; if not Success then Output_Unexpected_Token (Expected => Is_Symbol, Found => Current_Token); end if; end Assert_Next_Token; ------------------------------------------------------------------ procedure Get_And_Assert_Next_Token (Is_Symbol : in SP_Symbols.SP_Symbol; Current_Token : out TokenManager.Token) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Token_It; --# out Success; --# derives Current_Token, --# Success from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Symbol, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is begin Assert_Next_Token (Is_Symbol => Is_Symbol); if Success then Current_Token := TokenManager.Current (Token_It); else Current_Token := TokenManager.Null_Token; end if; end Get_And_Assert_Next_Token; ------------------------------------------------------------------ procedure Process_With_Clause --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Unit; --# in out Token_It; --# out Success; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Unit, --# Token_It from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# Success from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is Current_Token : TokenManager.Token; begin SparkMakeDebug.Report_Text (Text => "found with clause"); Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); while Success loop -- Add this unit to the list of withed units StringList.Add_To_Front (To_List => The_Unit.The_Withed_Units, The_Item => Current_Token.Value); -- What's next? TokenManager.Next (It => Token_It); Current_Token := TokenManager.Current (Token_It); -- semicolon marks the end of the with clause. exit when Current_Token.Kind = SP_Symbols.semicolon; -- otherwise we must have a comma. Success := Current_Token.Kind = SP_Symbols.comma; if Success then Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); else Output_Unexpected_Token (Expected => SP_Symbols.comma, Found => Current_Token); end if; end loop; SparkMakeDebug.Report_List (Text => "The withed units are: ", List => The_Unit.The_Withed_Units); end Process_With_Clause; ------------------------------------------------------------------ procedure Process_Inherit_Clause --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Unit; --# in out Token_It; --# out Success; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Unit, --# Token_It from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# Success from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is Current_Token : TokenManager.Token; begin SparkMakeDebug.Report_Text (Text => "found inherit clause"); Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); while Success loop -- Add this unit to the list of inherited units StringList.Add_To_Front (To_List => The_Unit.The_Inherited_Units, The_Item => Current_Token.Value); -- What's next? TokenManager.Next (It => Token_It); Current_Token := TokenManager.Current (Token_It); -- semicolon marks the end of the inherit clause. if Current_Token.Kind = SP_Symbols.semicolon then Assert_Next_Token (Is_Symbol => SP_Symbols.annotation_end); exit; end if; -- otherwise we must have a comma. Success := Current_Token.Kind = SP_Symbols.comma; if Success then Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); else Output_Unexpected_Token (Expected => SP_Symbols.comma, Found => Current_Token); end if; end loop; SparkMakeDebug.Report_List (Text => "The inherited units are: ", List => The_Unit.The_Inherited_Units); end Process_Inherit_Clause; ------------------------------------------------------------------ procedure Process_Package_Specification (Is_Private : in Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Unit; --# in out Token_It; --# out Done; --# out Success; --# derives Done from & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Success, --# Token_It from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# The_Unit from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# Is_Private, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is Current_Token : TokenManager.Token; begin SparkMakeDebug.Report_Cond_Text (Cond => Is_Private, True_Text => "found private package specification", False_Text => "found package specification"); -- Get the package name Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); if Success then SparkMakeDebug.Report_Text_E_Text (Text => "with name ", E_Text => Current_Token.Value); The_Unit.The_Id := Construct_Spec_Unit_Id (The_Name => Current_Token.Value, Is_Private => Is_Private); end if; Done := True; end Process_Package_Specification; ------------------------------------------------------------------ procedure Process_Package_Body --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Unit; --# in out Token_It; --# out Done; --# out Success; --# derives Done from & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Unit, --# Token_It from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# Success from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is Current_Token : TokenManager.Token; begin SparkMakeDebug.Report_Text (Text => "found package body"); -- Get the package name. Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); if Success then SparkMakeDebug.Report_Text_E_Text (Text => "with name ", E_Text => Current_Token.Value); The_Unit.The_Id := Id'(The_Name => Current_Token.Value, The_Kind => Package_Body_Unit); end if; Done := True; end Process_Package_Body; ------------------------------------------------------------------ procedure Process_Package --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Unit; --# in out Token_It; --# out Done; --# out Success; --# derives Done, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Success, --# Token_It from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# The_Unit from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is Current_Token : TokenManager.Token; begin TokenManager.Look_Ahead (It => Token_It); Current_Token := TokenManager.Current (Token_It); -- Is this a package spec or body? if Current_Token.Kind = SP_Symbols.RWbody then -- Read over the reserved word body TokenManager.Next (It => Token_It); Process_Package_Body; else Process_Package_Specification (Is_Private => False); end if; end Process_Package; ------------------------------------------------------------------ procedure Process_Private_Child_Specification --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out Done; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Unit; --# in out Token_It; --# out Success; --# derives Done, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Unit, --# Token_It from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# Success from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is begin SparkMakeDebug.Report_Text (Text => "found reserved word private"); -- Must be reserved word package Assert_Next_Token (Is_Symbol => SP_Symbols.RWpackage); if Success then Process_Package_Specification (Is_Private => True); end if; end Process_Private_Child_Specification; ------------------------------------------------------------------ procedure Process_Separate --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Unit; --# in out Token_It; --# out Done; --# out Success; --# derives Done from & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Unit, --# Token_It from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# Success from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is Current_Token : TokenManager.Token; begin SparkMakeDebug.Report_Text (Text => "found separate"); Assert_Next_Token (Is_Symbol => SP_Symbols.left_paren); if Success then -- Get the parent name. Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); if Success then SparkMakeDebug.Report_Text_E_Text (Text => "of enclosing unit ", E_Text => Current_Token.Value); The_Unit.The_Id.The_Kind := Separate_Body_Unit; -- The parent The_Unit.The_Id.The_Name := Current_Token.Value; Assert_Next_Token (Is_Symbol => SP_Symbols.right_paren); if Success then TokenManager.Next (It => Token_It); Current_Token := TokenManager.Current (Token_It); -- If language is SPARK 2005 then allow for an optional overriding_indicator if CommandLineData.Content.Language_Profile = CommandLineData.SPARK2005 then if Current_Token.Kind = SP_Symbols.RWnot then Assert_Next_Token (Is_Symbol => SP_Symbols.RWoverriding); TokenManager.Next (Token_It); Current_Token := TokenManager.Current (Token_It); elsif Current_Token.Kind = SP_Symbols.RWoverriding then TokenManager.Next (Token_It); Current_Token := TokenManager.Current (Token_It); end if; end if; -- Is this a procedure, function, package or task? if Current_Token.Kind = SP_Symbols.RWpackage or Current_Token.Kind = SP_Symbols.RWtask then Assert_Next_Token (Is_Symbol => SP_Symbols.RWbody); elsif Current_Token.Kind /= SP_Symbols.RWprocedure and Current_Token.Kind /= SP_Symbols.RWfunction then Success := False; Output_Unexpected_Token (Expected => SP_Symbols.SPEND, Found => Current_Token); end if; if Success then -- Get the unit name. Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); if Success then E_Strings.Append_String (E_Str => The_Unit.The_Id.The_Name, Str => "."); E_Strings.Append_Examiner_String (E_Str1 => The_Unit.The_Id.The_Name, E_Str2 => Current_Token.Value); SparkMakeDebug.Report_Text_E_Text (Text => "Full unit name is ", E_Text => The_Unit.The_Id.The_Name); end if; end if; end if; end if; end if; Done := True; end Process_Separate; ------------------------------------------------------------------ procedure Process_Main_Program --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out The_Unit; --# in out Token_It; --# out Done; --# out Success; --# derives Done from & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# The_Unit, --# Token_It from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# Success from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It; is Current_Token : TokenManager.Token; begin SparkMakeDebug.Report_Text (Text => "found main program"); loop TokenManager.Next (It => Token_It); Current_Token := TokenManager.Current (Token_It); exit when Current_Token.Kind = SP_Symbols.SPEND or Current_Token.Kind = SP_Symbols.RWprocedure; end loop; if Current_Token.Kind = SP_Symbols.RWprocedure then -- Get the main program name. Get_And_Assert_Next_Token (Is_Symbol => SP_Symbols.identifier, Current_Token => Current_Token); if Success then SparkMakeDebug.Report_Text_E_Text (Text => "main program is ", E_Text => Current_Token.Value); The_Unit.The_Id := Id'(The_Name => Current_Token.Value, The_Kind => Main_Program_Unit); end if; else Success := False; Output_Unexpected_Token (Expected => SP_Symbols.RWprocedure, Found => Current_Token); end if; Done := True; end Process_Main_Program; ------------------------------------------------------------------ procedure Process_Annotation --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out Done; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# in out Success; --# in out The_Unit; --# in out Token_It; --# derives Done, --# Success, --# The_Unit from *, --# CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Token_It from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Success, --# Token_It; is Current_Token : TokenManager.Token; begin SparkMakeDebug.Report_Text (Text => "found annotation"); TokenManager.Next (It => Token_It); Current_Token := TokenManager.Current (Token_It); -- Is this the main program if Current_Token.Kind = SP_Symbols.RWmain_program then Process_Main_Program; elsif Current_Token.Kind = SP_Symbols.RWinherit then Process_Inherit_Clause; else -- read until annotation end while Success loop TokenManager.Next (It => Token_It); Current_Token := TokenManager.Current (Token_It); exit when Current_Token.Kind = SP_Symbols.annotation_end; if Current_Token.Kind = SP_Symbols.SPEND then Success := False; end if; end loop; end if; end Process_Annotation; ------------------------------------------------------------------ begin SparkLex.Clear_Line_Context; The_Unit := Null_Object; Open (The_Filename => In_File, The_File_Id => The_File_Id, Success => Success); if Success then SparkMakeDebug.Report_Text_E_Text (Text => "Parsing file ", E_Text => In_File); TokenManager.Get_First_Token (File_Id => The_File_Id, It => Token_It); if TokenManager.Is_Null (It => Token_It) then Success := False; else while Success and then not Done loop Current_Token := TokenManager.Current (Token_It); case Current_Token.Kind is when SP_Symbols.RWwith => Process_With_Clause; when SP_Symbols.RWprivate => Process_Private_Child_Specification; when SP_Symbols.RWpackage => Process_Package; when SP_Symbols.RWseparate => Process_Separate; when SP_Symbols.annotation_start => Process_Annotation; when others => -- ignore these tokens null; end case; if TokenManager.Is_Null (It => Token_It) then Success := False; else TokenManager.Next (It => Token_It); end if; end loop; end if; --# accept Flow, 10, The_File_Id, "Closed file handle not used"; SPARK_IO.Close (File => The_File_Id, Status => Status); --# end accept; if Status /= SPARK_IO.Ok then SparkMakeErrors.Fatal ("Cannot close file"); end if; else E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => In_File); SparkMakeErrors.Fatal ("Cannot open file"); end if; if Success then The_Unit.The_File := In_File; else The_Unit := Null_Object; end if; end Get_Unit; ----------------------------------------------------------------------------- procedure Output_Object (The_Unit : in Object) is --# hide Output_Object; begin SparkMakeDebug.Report_Text (Text => "Unit debug:"); SparkMakeDebug.Report_Cond_Text (Cond => The_Unit = Null_Object, True_Text => "Object is null", False_Text => "Object details below"); if The_Unit /= Null_Object then SparkMakeDebug.Report_Text_E_Text (Text => "File: ", E_Text => The_Unit.The_File); SparkMakeDebug.Report_Text_E_Text (Text => "Name: ", E_Text => The_Unit.The_Id.The_Name); SparkMakeDebug.Report_Text (Text => "Kind: " & Kind'Image (The_Unit.The_Id.The_Kind)); SparkMakeDebug.Report_List (Text => "The withed units: ", List => The_Unit.The_Withed_Units); SparkMakeDebug.Report_List (Text => "The inherited units: ", List => The_Unit.The_Inherited_Units); end if; end Output_Object; ----------------------------------------------------------------------------- procedure Output_Id (The_Unit : in Id) is --# hide Output_Id; begin SparkMakeDebug.Report_Text (Text => "Unit debug:"); SparkMakeDebug.Report_Cond_Text (Cond => The_Unit = Null_Id, True_Text => "Id is null", False_Text => "Id details below"); if The_Unit /= Null_Id then SparkMakeDebug.Report_Text_E_Text (Text => "Name: ", E_Text => The_Unit.The_Name); SparkMakeDebug.Report_Text (Text => "Kind: " & Kind'Image (The_Unit.The_Kind)); end if; end Output_Id; end Unit; spark-2012.0.deb/sparkmake/unitmanager.ads0000644000175000017500000001040611753202340017416 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with StringList; with Unit; with Units; use type Unit.Object; use type Unit.Id; use type Unit.Kind; --# inherit CommandLineData, --# Dictionary, --# Directory_Operations, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# RegularExpression, --# SparkLex, --# SparkMakeErrors, --# SPARK_IO, --# StringList, --# Unit, --# Units; package UnitManager --# own State; --# initializes State; is procedure Initialise (The_Directories : in StringList.Object; Include : in StringList.Object; Exclude : in StringList.Object; Root_File : in E_Strings.T; Duplicates : in Boolean; Success : out Boolean); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out State; --# out SparkLex.Curr_Line; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# State, --# Success from CommandLineData.Content, --# Dictionary.Dict, --# Duplicates, --# ErrorHandler.Error_Context, --# Exclude, --# Include, --# LexTokenManager.State, --# Root_File, --# SPARK_IO.File_Sys, --# State, --# The_Directories; ------------------------------------------------------------- function Get_All_Units return Units.Stack; --# global in State; procedure Get_Unit (In_File : in E_Strings.T; The_Unit : out Unit.Id; Found : out Boolean); --# global in State; --# derives Found, --# The_Unit from In_File, --# State; procedure Get_File (For_Unit : in Unit.Id; The_File : out E_Strings.T; Found : out Boolean); --# global in State; --# derives Found, --# The_File from For_Unit, --# State; function Get (The_Unit : Unit.Id) return Unit.Object; --# global in State; function Package_Body (For_Unit : Unit.Id) return Unit.Id; --# global in State; function Parent (Of_Unit : Unit.Id) return Unit.Id; --# global in State; function Separate_Units (For_Unit : Unit.Id) return Units.Stack; --# global in State; function Required_Units (For_Unit : Unit.Id) return Units.Stack; --# global in State; function Components (For_Unit : Unit.Id) return Units.Stack; --# global in State; -- Search all units in UnitManager and return a stack of root -- units, ie units that are not required by other units. function Find_Roots return Units.Stack; --# global in State; end UnitManager; spark-2012.0.deb/sparkmake/unitmanager.adb0000644000175000017500000006264111753202340017405 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SparkLex; with RegularExpression; with SPARK_IO; with Directory_Operations; with UnitManager.UnitStore; with SparkMakeErrors; package body UnitManager --# own State is UnitManager.UnitStore.State; is procedure Initialise (The_Directories : in StringList.Object; Include : in StringList.Object; Exclude : in StringList.Object; Root_File : in E_Strings.T; Duplicates : in Boolean; Success : out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out UnitStore.State; --# out SparkLex.Curr_Line; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# Success, --# UnitStore.State from CommandLineData.Content, --# Dictionary.Dict, --# Duplicates, --# ErrorHandler.Error_Context, --# Exclude, --# Include, --# LexTokenManager.State, --# Root_File, --# SPARK_IO.File_Sys, --# The_Directories, --# UnitStore.State; is Directory_It : StringList.Iterator; File_It : StringList.Iterator; Include_It : StringList.Iterator; Exclude_It : StringList.Iterator; Current_Dir : E_Strings.T; Current_File : E_Strings.T; Add_This_File : Boolean; The_Reg_Exp : RegularExpression.Object; -------------------------------------------------------------------------- procedure Add_File (Current_File : in E_Strings.T; Duplicates : in Boolean; Success : in out Boolean) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out UnitStore.State; --# out SparkLex.Curr_Line; --# derives ErrorHandler.Error_Context, --# LexTokenManager.State, --# UnitStore.State from *, --# CommandLineData.Content, --# Current_File, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# SparkLex.Curr_Line from CommandLineData.Content, --# Current_File, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys & --# SPARK_IO.File_Sys, --# Success from *, --# CommandLineData.Content, --# Current_File, --# Dictionary.Dict, --# Duplicates, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# UnitStore.State; is Current_Unit : Unit.Object; begin -- Extract the unit Unit.Get_Unit (In_File => Current_File, The_Unit => Current_Unit); if Current_Unit = Unit.Null_Object then -- This will be reported as warning and the unit ignored. SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Invalid_Unit, E_Str1 => Current_File, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); else UnitStore.Add (The_Unit => Current_Unit, Added => Success); if not Success then -- check to see if the filenames are different. Okay if unit appears -- twice in same file, but fail if filenames are different, i.e. -- the same unit appears in more than one file. if E_Strings.Eq_String (E_Str1 => Current_File, E_Str2 => UnitStore.Get (The_Unit => Current_Unit.The_Id).The_File) then Success := True; else -- check to see whether duplicates are errors switch is set -- report errors/warnings as appropriate if Duplicates then SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Duplicate_Errors, E_Str1 => Current_Unit.The_Id.The_Name, E_Str2 => Current_File, E_Str3 => UnitStore.Get (The_Unit => Current_Unit.The_Id).The_File); Success := False; else SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Duplicate_Okay, E_Str1 => Current_Unit.The_Id.The_Name, E_Str2 => Current_File, E_Str3 => UnitStore.Get (The_Unit => Current_Unit.The_Id).The_File); Success := True; end if; end if; end if; end if; end Add_File; -------------------------------------------------------------------------- begin Success := True; SparkLex.Clear_Line_Context; --ensure root file is added, even if not in current directory if not E_Strings.Is_Empty (E_Str => Root_File) then Add_File (Current_File => Root_File, Duplicates => Duplicates, Success => Success); end if; -- For all the directories -- Directory_It := StringList.Get_First (In_List => The_Directories); while Success and not StringList.Is_Null (It => Directory_It) loop Current_Dir := StringList.Value (Directory_It); SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Processing directory ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Current_Dir); -- For all the include file regular expressions -- Include_It := StringList.Get_First (In_List => Include); while Success and not StringList.Is_Null (It => Include_It) loop -- For all the files matching this regular expression -- The_Reg_Exp := RegularExpression.Create (StringList.Value (Include_It)); File_It := StringList.Get_First (In_List => Directory_Operations.Find_Files (Matching => The_Reg_Exp, In_Directory => Current_Dir, Recursively => True)); while Success and not StringList.Is_Null (It => File_It) loop Add_This_File := True; Current_File := StringList.Value (File_It); -- don't exclude the root file if not E_Strings.Eq_String (E_Str1 => Current_File, E_Str2 => Root_File) then -- For all the exclude file regular expressions -- Exclude_It := StringList.Get_First (In_List => Exclude); while not StringList.Is_Null (It => Exclude_It) loop if RegularExpression.Matches (E_Str => Current_File, The_Reg_Exp => RegularExpression.Create (StringList.Value (Exclude_It))) then Add_This_File := False; exit; end if; Exclude_It := StringList.Next (Exclude_It); end loop; end if; if Add_This_File then Add_File (Current_File => Current_File, Duplicates => Duplicates, Success => Success); end if; File_It := StringList.Next (File_It); end loop; Include_It := StringList.Next (Include_It); end loop; Directory_It := StringList.Next (Directory_It); end loop; end Initialise; ------------------------------------------------------------- function Get_All_Units return Units.Stack --# global in UnitStore.State; is begin return UnitStore.Get_All_Units; end Get_All_Units; ------------------------------------------------------------- procedure Get_File (For_Unit : in Unit.Id; The_File : out E_Strings.T; Found : out Boolean) --# global in UnitStore.State; --# derives Found, --# The_File from For_Unit, --# UnitStore.State; is begin The_File := UnitStore.Get (The_Unit => For_Unit).The_File; Found := not E_Strings.Is_Empty (E_Str => The_File); end Get_File; ------------------------------------------------------------- procedure Get_Unit (In_File : in E_Strings.T; The_Unit : out Unit.Id; Found : out Boolean) --# global in UnitStore.State; --# derives Found, --# The_Unit from In_File, --# UnitStore.State; is Current_Unit : Unit.Object; Id : Unit.Id; All_Units : Units.Stack; begin Found := False; The_Unit := Unit.Null_Id; All_Units := UnitStore.Get_All_Units; while not Units.IsEmpty (All_Units) loop Units.Pop (TheStack => All_Units, TheUnit => Id); Current_Unit := UnitStore.Get (The_Unit => Id); if E_Strings.Eq_String (E_Str1 => In_File, E_Str2 => Current_Unit.The_File) then Found := True; The_Unit := Current_Unit.The_Id; exit; end if; end loop; end Get_Unit; ------------------------------------------------------------- function Get (The_Unit : Unit.Id) return Unit.Object --# global in UnitStore.State; is begin return UnitStore.Get (The_Unit => The_Unit); end Get; ------------------------------------------------------------- function Parent (Of_Unit : Unit.Id) return Unit.Id --# global in UnitStore.State; is Parent_Unit : Unit.Id; begin case Of_Unit.The_Kind is when Unit.Package_Specification_Unit | Unit.Main_Program_Unit | Unit.Package_Body_Unit => -- These units do not have parents. Parent_Unit := Unit.Null_Id; when Unit.Separate_Body_Unit => Parent_Unit := UnitStore.Get_Body_Unit (With_Name => Unit.Prefix (Of_Unit.The_Name)).The_Id; when Unit.Child_Specification_Unit => Parent_Unit := UnitStore.Get_Specification_Unit (With_Name => Unit.Prefix (Of_Unit.The_Name)).The_Id; end case; return Parent_Unit; end Parent; ------------------------------------------------------------- function Package_Body (For_Unit : Unit.Id) return Unit.Id --# global in UnitStore.State; is Result : Unit.Id; begin case For_Unit.The_Kind is when Unit.Specification_Unit => Result := Unit.Id'(The_Name => For_Unit.The_Name, The_Kind => Unit.Package_Body_Unit); when Unit.Package_Body_Unit => Result := For_Unit; when Unit.Separate_Body_Unit => Result := For_Unit; while Result.The_Kind /= Unit.Package_Body_Unit loop Result := Parent (Of_Unit => Result); end loop; when Unit.Main_Program_Unit => Result := Unit.Null_Id; end case; return Result; end Package_Body; ------------------------------------------------------------- function Is_A_Component (This_Unit : Unit.Id; Of_This_Unit : Unit.Id) return Boolean --# global in UnitStore.State; -- -- Returns True is This_Unit is a component Of_This_Unit according to -- the rules given below is Result : Boolean := False; Parent_Unit : Unit.Id; begin case This_Unit.The_Kind is when Unit.Private_Child_Package_Specification_Unit => -- If This_Unit is a private child then it is a component -- Of_This_Unit if Of_This_Unit is its immediate parent. Result := E_Strings.Eq_String (E_Str1 => Of_This_Unit.The_Name, E_Str2 => Parent (Of_Unit => This_Unit).The_Name); when Unit.Public_Child_Package_Specification_Unit => -- If This_Unit is a public child then it is a component -- Of_This_Unit if there is exactly one private parent -- between it and Of_This_Unit and this private parent is an -- immediate child Of_This_Unit. Parent_Unit := Parent (Of_Unit => This_Unit); while Parent_Unit /= Unit.Null_Id loop if Parent_Unit.The_Kind = Unit.Private_Child_Package_Specification_Unit then Result := E_Strings.Eq_String (E_Str1 => Of_This_Unit.The_Name, E_Str2 => Parent (Of_Unit => Parent_Unit).The_Name); exit; end if; Parent_Unit := Parent (Of_Unit => Parent_Unit); end loop; when others => null; end case; return Result; end Is_A_Component; ------------------------------------------------------------- function Inherited_Units (For_Unit : Unit.Id) return Units.Stack --# global in UnitStore.State; is Result : Units.Stack; The_Unit : Unit.Object; Inherited_Unit : Unit.Object; It : StringList.Iterator; begin Result := Units.NullStack; The_Unit := UnitStore.Get (The_Unit => For_Unit); if The_Unit /= Unit.Null_Object then It := StringList.Get_First (In_List => The_Unit.The_Inherited_Units); while not StringList.Is_Null (It => It) loop Inherited_Unit := UnitStore.Get_Specification_Unit (With_Name => StringList.Value (It)); if Inherited_Unit /= Unit.Null_Object then Units.Push (TheStack => Result, TheUnit => Inherited_Unit.The_Id); end if; It := StringList.Next (It); end loop; end if; return Result; end Inherited_Units; ------------------------------------------------------------- function Withed_Units (For_Unit : Unit.Id) return Units.Stack --# global in UnitStore.State; is Result : Units.Stack; The_Unit : Unit.Object; Withed_Unit : Unit.Object; It : StringList.Iterator; begin Result := Units.NullStack; The_Unit := UnitStore.Get (The_Unit => For_Unit); if The_Unit /= Unit.Null_Object then It := StringList.Get_First (In_List => The_Unit.The_Withed_Units); while not StringList.Is_Null (It => It) loop Withed_Unit := UnitStore.Get_Specification_Unit (With_Name => StringList.Value (It)); if Withed_Unit /= Unit.Null_Object then Units.Push (TheStack => Result, TheUnit => Withed_Unit.The_Id); end if; It := StringList.Next (It); end loop; end if; return Result; end Withed_Units; ------------------------------------------------------------- function Withed_Components (For_Unit : Unit.Id) return Units.Stack --# global in UnitStore.State; is The_Withed_Units : Units.Stack; Result : Units.Stack; Id : Unit.Id; Parent_Unit : Unit.Id; begin Result := Units.NullStack; The_Withed_Units := Withed_Units (For_Unit => For_Unit); while not Units.IsEmpty (The_Withed_Units) loop Units.Pop (TheStack => The_Withed_Units, TheUnit => Id); Parent_Unit := For_Unit; while Parent_Unit /= Unit.Null_Id loop if Is_A_Component (This_Unit => Id, Of_This_Unit => Parent_Unit) then Units.Push (TheStack => Result, TheUnit => Id); end if; Parent_Unit := Parent (Of_Unit => Parent_Unit); end loop; end loop; return Result; end Withed_Components; ------------------------------------------------------------- function Separate_Units (For_Unit : Unit.Id) return Units.Stack --# global in UnitStore.State; is All_Units : Units.Stack; Id : Unit.Id; Result : Units.Stack; Current_Unit : Unit.Object; begin Result := Units.NullStack; All_Units := UnitStore.Get_All_Units; while not Units.IsEmpty (All_Units) loop Units.Pop (TheStack => All_Units, TheUnit => Id); Current_Unit := UnitStore.Get (The_Unit => Id); if Current_Unit.The_Id.The_Kind = Unit.Separate_Body_Unit then if Unit.Are_Equal (L => Parent (Of_Unit => Current_Unit.The_Id), R => For_Unit) then Units.Push (TheStack => Result, TheUnit => Current_Unit.The_Id); end if; end if; end loop; return Result; end Separate_Units; ------------------------------------------------------------- function Required_Units (For_Unit : Unit.Id) return Units.Stack --# global in UnitStore.State; is Result : Units.Stack; The_Withed_Components : Units.Stack; Id : Unit.Id; begin -- We'll need all the inherited units Result := Inherited_Units (For_Unit => For_Unit); if For_Unit.The_Kind = Unit.Separate_Body_Unit then -- .. we need any withed components as no inherit is required The_Withed_Components := Withed_Components (For_Unit => For_Unit); while not Units.IsEmpty (The_Withed_Components) loop Units.Pop (TheStack => The_Withed_Components, TheUnit => Id); Units.Push (TheStack => Result, TheUnit => Id); end loop; -- We'll also need the body if it's a separate ... Units.Push (TheStack => Result, TheUnit => Parent (Of_Unit => For_Unit)); elsif For_Unit.The_Kind = Unit.Package_Body_Unit then -- .. we need any withed components as no inherit is required The_Withed_Components := Withed_Components (For_Unit => For_Unit); while not Units.IsEmpty (The_Withed_Components) loop Units.Pop (TheStack => The_Withed_Components, TheUnit => Id); Units.Push (TheStack => Result, TheUnit => Id); end loop; -- ... and the spec ... Units.Push (TheStack => Result, TheUnit => UnitStore.Get_Specification_Unit (With_Name => For_Unit.The_Name).The_Id); elsif For_Unit.The_Kind in Unit.Child_Specification_Unit then -- ... or the parent if it's a child package specification Units.Push (TheStack => Result, TheUnit => Parent (Of_Unit => For_Unit)); end if; return Result; end Required_Units; ------------------------------------------------------------- function Components (For_Unit : Unit.Id) return Units.Stack --# global in UnitStore.State; is Result : Units.Stack; Id : Unit.Id; All_Units : Units.Stack; begin Result := Units.NullStack; All_Units := UnitStore.Get_All_Units; while not Units.IsEmpty (All_Units) loop Units.Pop (TheStack => All_Units, TheUnit => Id); if Is_A_Component (This_Unit => Id, Of_This_Unit => For_Unit) then Units.Push (TheStack => Result, TheUnit => Id); end if; end loop; return Result; end Components; ------------------------------------------------------------- -- Find the root units. Slightly trickier than it may seem at -- first because we are not simply dealing with relationships -- between packages (eg package P requires package Q) but have -- to consider relationships between all compilation units. -- For example, q.adb requires q.ads. If q.adb isn't required -- by anything else (ie no separates) then it would seem to be -- a 'root' but we must not treat it as one because any -- package bodies are automatically added to the meta file -- after their corresponding specs. So the roots we want to -- find are: -- - any main programs; -- - any package specifications that are not required by -- other packages or main programs. function Find_Roots return Units.Stack --# global in UnitStore.State; is All_Units : Units.Stack; The_Unit : Unit.Id; Result : Units.Stack; -- Return value indicates whether there are any other units -- that require the given unit. function Is_Required (This_Unit : Unit.Id) return Boolean --# global in UnitStore.State; is Other_Units : Units.Stack; Req_Units : Units.Stack; The_Req_Unit : Unit.Id; The_Other_Unit : Unit.Id; Found : Boolean := False; begin Other_Units := Get_All_Units; -- Check whether each other unit requires this unit. while not Found and not Units.IsEmpty (Other_Units) loop Units.Pop (TheStack => Other_Units, TheUnit => The_Other_Unit); -- Get the required units for the other unit Req_Units := Required_Units (For_Unit => The_Other_Unit); -- Check each other unit to see if it requires this unit. -- If any other unit requires it then it can't be a root. while not Found and not Units.IsEmpty (Req_Units) loop Units.Pop (TheStack => Req_Units, TheUnit => The_Req_Unit); -- Don't count a spec as 'required' if the only thing that needs it -- is its own body, otherwise the presence of a body will prevent a -- spec from being counted as a root. if The_Req_Unit = This_Unit and not (Package_Body (This_Unit) = The_Other_Unit) then Found := True; end if; end loop; end loop; return Found; end Is_Required; begin -- Find_Roots All_Units := Get_All_Units; Result := Units.NullStack; -- Check all the units we know about... while not Units.IsEmpty (All_Units) loop Units.Pop (TheStack => All_Units, TheUnit => The_Unit); -- If it's a main program, or a package spec that isn't -- required by any other units, then it must be a root. -- (Bodies always depend on their specs, and separates -- always depend on bodies, so they can never be roots.) if The_Unit.The_Kind = Unit.Main_Program_Unit or else ((The_Unit.The_Kind in Unit.Specification_Unit) and not Is_Required (This_Unit => The_Unit)) then Units.Push (TheStack => Result, TheUnit => The_Unit); end if; end loop; return Result; end Find_Roots; end UnitManager; spark-2012.0.deb/sparkmake/tokenmanager.ads0000644000175000017500000001316611753202340017565 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with SPARK_IO; with SP_Symbols; --# inherit CommandLineData, --# Dictionary, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# SparkLex, --# SparkMakeErrors, --# SPARK_IO, --# SP_Symbols; package TokenManager is type Token is record Kind : SP_Symbols.SP_Terminal; Value : E_Strings.T; end record; Null_Token : constant Token := Token'(Kind => SP_Symbols.SPEND, Value => E_Strings.Empty_String); type Iterator is private; Null_Iterator : constant Iterator; procedure Get_First_Token (File_Id : in SPARK_IO.File_Type; It : out Iterator); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# out SparkLex.Curr_Line; --# derives ErrorHandler.Error_Context, --# It, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# File_Id, --# LexTokenManager.State, --# SPARK_IO.File_Sys; procedure Next (It : in out Iterator); --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# It, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# It, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys; --# pre SparkLex.Curr_Line_Invariant (SparkLex.Curr_Line); --# post SparkLex.Curr_Line_Invariant (SparkLex.Curr_Line); procedure Look_Ahead (It : in out Iterator); -- -- Allows the client to look at the next symbol after the last returned -- by Next or NextIdentifier. Only one lookahead is allowed. Multiple, -- consecutive calls to Look_Ahead will thus return the same symbol. -- Subsequent calls to Next and NextIdentifier function as normal (i.e. as if -- the lookahead had not been made). -- --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# It, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# It, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys; --# pre SparkLex.Curr_Line_Invariant (SparkLex.Curr_Line); --# post SparkLex.Curr_Line_Invariant (SparkLex.Curr_Line); function Is_Null (It : Iterator) return Boolean; function Current (It : Iterator) return Token; function To_String (Tok : Token) return E_Strings.T; private type Iterator is record File : SPARK_IO.File_Type; -- The token we expect Current_Token : Token; -- Sometimes it is necessary to read a token too far. -- If this is the case then Next_Token stores what the token is so it can -- be returned on the next call to Next. Next_Token : Token; -- Allows the user to lookahead. Is_Look_Ahead : Boolean; end record; Null_Iterator : constant Iterator := Iterator'(File => SPARK_IO.Null_File, Current_Token => Null_Token, Next_Token => Null_Token, Is_Look_Ahead => False); end TokenManager; spark-2012.0.deb/sparkmake/sparkmake.smf0000644000175000017500000000016411753202340017100 0ustar eugeneugenunit.adb unitmanager.adb -vcg tokenmanager.adb -vcg sparkmakecommandline.adb sparkmakeerrors.adb -vcg sparkmake.adb spark-2012.0.deb/sparkmake/sparkmake.adb0000644000175000017500000010502511753202340017043 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- -- -- Specification -- -- -- -- INPUT OPTIONS -- -- ------------- -- -- -- -- Command Line -- -- 1. Invoking SPARKMake with no arguments will output an error message -- -- 2. All switches can be cut down to their smallest unique value -- -- -- -- /directory option -- -- 1. The default is the current directory -- -- 2. Any other directories, specified by this option are also searched -- -- 3. This switch may be repeated and the union of the default and all -- -- explicitly stated directories are searched -- -- 4. An error is reported if the specified directory does not exist -- -- 5. An error will be reported if there is no argument -- -- 6. A relative path may be used -- -- -- -- /include option -- -- 1. The default will find all *.ads and *.adb files (GNAT naming conv.) -- -- 2. A specified include option overrides the default -- -- 3. This switch may be duplicated and the union will be taken -- -- 4. An error will be reported if there is no argument -- -- 5. An error will be reported if the argument is not a regular expression -- -- 6. An error is output if the /inc option includes a file with an invalid -- -- compilation unit. -- -- -- -- /exclude -- -- 1. The default excludes no files -- -- 2. When specified, files matching this expression are excluded -- -- 3. This switch may be duplicated and the union will be taken -- -- 4. An error will be reported if there is no argument -- -- 5. An error will be reported if the argument is not a regular expression -- -- -- -- Argument -- -- 1. The argument consitutes the root file -- -- 2. It may appear anywhere in the command line -- -- 3. An error is reported if the root file cannot be found -- -- 4. The root file is always included regardless of the /inc, /exc switches -- -- 5. If no argument is provided then sparkmake will produce an index and -- -- meta file for the analysis of all files in the current directory (and -- -- any subdirectories). -- -- -- -- OUTPUT OPTIONS -- -- -------------- -- -- -- -- /index -- -- 1. The default is the root file name (or spark.idx if no root file given) -- -- 2. If the root file name has an extension it is replaced with idx -- -- 3. If the root file has no extension then idx is used -- -- 4. When specified the argument constitues the index file -- -- 5. If the file does not exist it is created -- -- 6. If the file exists then it will be overwritten -- -- 7. The index file contains entries for: -- -- a. All files specified by the /include switch -- -- b. That are not excluded by the /exclude switch -- -- c. The root file -- -- 8. The index file always specifies full pathnames -- -- 9. The format of the index files matches that described in the Examiner -- -- user manual. -- -- 10. An error is output if the same unit is duplicated in more than one file-- -- -- -- /meta -- -- 1. The default is the root file name (or spark.smf if no root file given) -- -- 2. If the root file name has an extension it is replaced with smf -- -- 3. If the root file has no extension then smf is used -- -- 4. When specified the argument constitues the meta file -- -- 5. If the file does not exist it is created -- -- 6. If the file exists then it will be overwritten -- -- 7. The meta file always specifies full path names -- -- 8. The root of the make is the root file specified as an argument on the -- -- command line -- -- 9. The make process will attempt to make an entire closure for the root -- -- file. i.e. it will make all the dependent units AND their bodies -- -- and separates. -- -- 10. All files involved in the make will have an entry in the index file -- -- 11. If a unit cannot be found it is ignored. -- -- -- -- /noindexfile -- -- 1. Default is False. -- -- 2. If True then the index file will not be generated. -- -- -- -- /nometafile -- -- 1. Default is False. -- -- 2. If True then the meta file will not be generated. -- -- -- -- /path -- -- 1. Default is "full" -- -- 2. User can select "full" or "relative" -- -- -- -- BEHAVIOUR OPTIONS -- -- ----------------- -- -- -- -- /duplicates_are_errors -- -- 1. Duplicate units are treated as an error -- -- -- -- /annotation_character -- -- 1. The annotation character can be specified -- -- 2. If it is not a character it will not be accepted -- -- 3. If not specified '#' will be used -- -------------------------------------------------------------------------------- with Ada.Exceptions; with GNAT.Traceback.Symbolic; with CommandLineData; with E_Strings; with ScreenEcho; with SPARK_IO; with ErrorHandler; with LexTokenManager; with Dictionary; with Unit; with Units; with UnitManager; with SparkLex; with SparkMakeCommandLine; with SparkMakeErrors; with SparkMakeDebug; with Directory_Operations; use type Unit.Kind; use type Unit.Object; use type Unit.Id; use type SPARK_IO.File_Status; use type SPARK_IO.File_Type; use type SparkMakeCommandLine.Path_Type; --# inherit CommandLine, --# CommandLineData, --# Dictionary, --# Directory_Operations, --# ErrorHandler, --# E_Strings, --# LexTokenManager, --# SparkLex, --# SparkMakeCommandLine, --# SparkMakeDebug, --# SparkMakeErrors, --# SPARK_IO, --# Unit, --# UnitManager, --# Units; --# main_program procedure Sparkmake --# global in CommandLine.State; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SPARK_IO.File_Sys; --# in out UnitManager.State; --# out CommandLineData.Content; --# out Dictionary.Dict; --# out SparkLex.Curr_Line; --# out SparkMakeCommandLine.State; --# derives CommandLineData.Content, --# SparkMakeCommandLine.State from CommandLine.State & --# Dictionary.Dict from LexTokenManager.State, --# SPARK_IO.File_Sys & --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys, --# UnitManager.State from CommandLine.State, --# ErrorHandler.Error_Context, --# LexTokenManager.State, --# SPARK_IO.File_Sys, --# UnitManager.State; is -- The entry type signifiers must all be the same length Main_Program_Sig : constant String := "main_program is in "; Specification_Sig : constant String := "specification is in "; Body_Sig : constant String := "body is in "; Subunit_Sig : constant String := "subunit is in "; Component_Sig : constant String := "components are "; -- This must equal the length of the above signifiers Sig_Length : constant E_Strings.Lengths := 20; -- The files output by this program The_Meta_File : SPARK_IO.File_Type; Success : Boolean; Help_Or_Ver_Found : Boolean; -------------------------------------------------------------------------- procedure Open_Or_Create_File (File : in E_Strings.T; Mode : in SPARK_IO.File_Mode; Id : in out SPARK_IO.File_Type; Success : out Boolean) --# global in out SPARK_IO.File_Sys; --# derives Id, --# SPARK_IO.File_Sys, --# Success from File, --# Id, --# Mode, --# SPARK_IO.File_Sys; is Status : SPARK_IO.File_Status; begin E_Strings.Open (File => Id, Mode_Of_File => Mode, Name_Of_File => File, Form_Of_File => "", Status => Status); if Status = SPARK_IO.Name_Error then E_Strings.Create (File => Id, Name_Of_File => File, Form_Of_File => "", Status => Status); end if; Success := Status = SPARK_IO.Ok; if not Success then SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Cannot_Open_File, E_Str1 => File, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end Open_Or_Create_File; -------------------------------------------------------------------------- procedure Close_File (File : in E_Strings.T; File_Id : in out SPARK_IO.File_Type) --# global in out SPARK_IO.File_Sys; --# derives File_Id from * & --# SPARK_IO.File_Sys from *, --# File, --# File_Id; is Status : SPARK_IO.File_Status; begin if File_Id /= SPARK_IO.Null_File then SPARK_IO.Close (File => File_Id, Status => Status); if Status /= SPARK_IO.Ok then SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Cannot_Close_File, E_Str1 => File, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end if; end Close_File; -------------------------------------------------------------------------- procedure Output_Debug (The_File : in E_Strings.T; Indent : in Integer) --# derives null from Indent, --# The_File; is E_Str : E_Strings.T; begin E_Str := E_Strings.Empty_String; for I in Integer range 1 .. Indent loop E_Strings.Append_String (E_Str => E_Str, Str => " "); end loop; E_Strings.Append_Examiner_String (E_Str1 => E_Str, E_Str2 => The_File); SparkMakeDebug.Report_Text_E_Text (Text => "Make: ", E_Text => E_Str); end Output_Debug; -------------------------------------------------------------------------- function Quote_If_Needed (F : in E_Strings.T) return E_Strings.T is Result : E_Strings.T; function Contains_Space return Boolean --# global in F; is Result : Boolean := False; begin for I in E_Strings.Positions range 1 .. E_Strings.Get_Length (E_Str => F) loop if E_Strings.Get_Element (E_Str => F, Pos => I) = ' ' then Result := True; exit; end if; end loop; return Result; end Contains_Space; begin if Contains_Space then Result := E_Strings.Empty_String; E_Strings.Append_String (E_Str => Result, Str => """"); E_Strings.Append_Examiner_String (E_Str1 => Result, E_Str2 => F); E_Strings.Append_String (E_Str => Result, Str => """"); else Result := F; end if; return Result; end Quote_If_Needed; -------------------------------------------------------------------------- procedure Make (The_File : in E_Strings.T) --# global in SparkMakeCommandLine.State; --# in The_Meta_File; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# SparkMakeCommandLine.State, --# The_File, --# The_Meta_File; --# is --# hide Make; The_Unit : Unit.Id; Found : Boolean; Main_Program_Found : Boolean := False; Main_Program_Name : E_Strings.T := E_Strings.Empty_String; -- Stores which units have been placed in the meta file. The_Made_Stack : Units.Stack; -- Root units for the make Root_Stack : Units.Stack; The_Root_Unit : Unit.Id; -- For debug purposes Indent : Integer := 0; -------------------------------------------------------------------------- -- Forward declaration. -- Make_Unit and Make_Units are mutually recursive. procedure Make_Units (The_Units : in Units.Stack); -------------------------------------------------------------------------- procedure Make_Unit (The_Unit : in Unit.Id) is Found : Boolean; The_Units : Units.Stack; The_File : E_Strings.T; begin -- don't make something twice or we'll recurse forever! -- if not Units.InStack (TheUnit => The_Unit, TheStack => The_Made_Stack) then -- Check we know about this unit UnitManager.Get_File (For_Unit => The_Unit, The_File => The_File, Found => Found); if Found then -- Record the fact we've made this unit. -- Units.Push (The_Made_Stack, The_Unit); if SparkMakeCommandLine.Debug_On then Output_Debug (The_File => The_File, Indent => Indent); end if; -- Get the required units -- The_Units := UnitManager.Required_Units (For_Unit => The_Unit); if not Units.IsEmpty (The_Units) then Indent := Indent + 3; -- Make the required units first -- Make_Units (The_Units => The_Units); Indent := Indent - 3; end if; -- Write this filename for this unit to the meta file. -- if SparkMakeCommandLine.Path_Required = SparkMakeCommandLine.Full then E_Strings.Put_Line (File => The_Meta_File, E_Str => Quote_If_Needed (F => The_File)); else E_Strings.Put_Line (File => The_Meta_File, E_Str => Quote_If_Needed (F => Directory_Operations.Relative_Name (Of_This_File_Or_Dir => The_File, To_This_Dir => Directory_Operations.Current_Directory))); end if; -- If we find multiple main programs in the make then warn the user -- because the resulting meta file will not Examiner cleanly. if The_Unit.The_Kind = Unit.Main_Program_Unit then if Main_Program_Found then -- Don't warn for the first one we find! SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Multiple_Main_Programs, E_Str1 => Main_Program_Name, E_Str2 => The_File, E_Str3 => E_Strings.Empty_String); else Main_Program_Name := The_File; -- only update this the first time round end if; Main_Program_Found := True; end if; if The_Unit.The_Kind in Unit.Specification_Unit then -- This was a spec so make the body Make_Unit (The_Unit => UnitManager.Package_Body (For_Unit => The_Unit)); else -- This was a body so make any separates Make_Units (The_Units => UnitManager.Separate_Units (For_Unit => The_Unit)); end if; end if; end if; end Make_Unit; -------------------------------------------------------------------------- procedure Make_Units (The_Units : in Units.Stack) is The_Unit : Unit.Id; Local_Units : Units.Stack; begin Local_Units := The_Units; while not Units.IsEmpty (Local_Units) loop Units.Pop (TheStack => Local_Units, TheUnit => The_Unit); Make_Unit (The_Unit => The_Unit); end loop; end Make_Units; -------------------------------------------------------------------------- procedure Warn_Of_Units_Not_In_Make --# global The_Made_Stack; is All_Units : Units.Stack; The_Unit : Unit.Id; Found : Boolean; The_File : E_Strings.T; Printed_Warning_Header : Boolean := False; begin All_Units := UnitManager.Get_All_Units; -- If any unit is not in the made stack then it wasn't written to the -- meta file. Warn the user that this is the case. while not Units.IsEmpty (All_Units) loop Units.Pop (TheStack => All_Units, TheUnit => The_Unit); if not Units.InStack (TheUnit => The_Unit, TheStack => The_Made_Stack) then -- Check we know about this unit UnitManager.Get_File (For_Unit => The_Unit, The_File => The_File, Found => Found); if Found then if not Printed_Warning_Header then SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "The following units were not included in the meta file:", 0); Printed_Warning_Header := True; end if; E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => The_File); end if; end if; end loop; end Warn_Of_Units_Not_In_Make; -------------------------------------------------------------------------- begin -- If no root filename specified then do it for the whole directory if E_Strings.Is_Empty (E_Str => The_File) then Root_Stack := UnitManager.Find_Roots; -- Get all the root units for this make -- Nothing made so far. The_Made_Stack := Units.NullStack; -- Iterate through all the roots, making the metafile entries -- for each one in turn. while not Units.IsEmpty (Root_Stack) loop Units.Pop (TheStack => Root_Stack, TheUnit => The_Root_Unit); -- kick off the recursive make for this root. Make_Unit (The_Unit => The_Root_Unit); end loop; else -- A filename was supplied so use that as the root for the make -- Nothing made so far. The_Made_Stack := Units.NullStack; -- Get the unit in this file UnitManager.Get_Unit (In_File => The_File, The_Unit => The_Unit, Found => Found); if Found then -- kick off the recursive make Make_Unit (The_Unit => The_Unit); else SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Cannot_Find_File, E_Str1 => The_File, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end if; -- The user may be expecting *all* files to be included in the make -- so warn about any that were not put in the meta file (ie those -- that are present in All_Units but not in The_Made_Stack). Warn_Of_Units_Not_In_Make; end Make; -------------------------------------------------------------------------- function Max_Unit_Name_Length (The_Stack : in Units.Stack) return E_Strings.Lengths is Iterator : Units.Iterator; Next_Unit : Unit.Id; Max_Id : E_Strings.Lengths; begin Units.Init_Iterator (The_Stack, Iterator); Max_Id := 0; while not Units.Iterated (Iterator) loop Units.Iterate (Iterator, Next_Unit); if E_Strings.Get_Length (E_Str => Next_Unit.The_Name) > Max_Id then Max_Id := E_Strings.Get_Length (E_Str => Next_Unit.The_Name); end if; end loop; return Max_Id; end Max_Unit_Name_Length; -------------------------------------------------------------------------- procedure Tab_To_Position (E_Str : in out E_Strings.T; Tab_Position : in E_Strings.Lengths) --# derives E_Str from *, --# Tab_Position; is begin for I in E_Strings.Lengths range E_Strings.Get_Length (E_Str => E_Str) .. Tab_Position loop E_Strings.Append_Char (E_Str => E_Str, Ch => ' '); end loop; end Tab_To_Position; -------------------------------------------------------------------------- function Line (For_Unit : Unit.Object; Tab_Position : E_Strings.Lengths) return E_Strings.T --# global in SparkMakeCommandLine.State; -- -- Returns is in is Temp : E_Strings.T; begin Temp := E_Strings.Empty_String; E_Strings.Append_Examiner_String (E_Str1 => Temp, E_Str2 => For_Unit.The_Id.The_Name); Tab_To_Position (E_Str => Temp, Tab_Position => Tab_Position); case For_Unit.The_Id.The_Kind is when Unit.Main_Program_Unit => E_Strings.Append_String (E_Str => Temp, Str => Main_Program_Sig); when Unit.Specification_Unit => E_Strings.Append_String (E_Str => Temp, Str => Specification_Sig); when Unit.Package_Body_Unit => E_Strings.Append_String (E_Str => Temp, Str => Body_Sig); when Unit.Separate_Body_Unit => E_Strings.Append_String (E_Str => Temp, Str => Subunit_Sig); end case; if SparkMakeCommandLine.Path_Required = SparkMakeCommandLine.Full then E_Strings.Append_Examiner_String (E_Str1 => Temp, E_Str2 => Quote_If_Needed (F => For_Unit.The_File)); else E_Strings.Append_Examiner_String (E_Str1 => Temp, E_Str2 => Quote_If_Needed (F => Directory_Operations.Relative_Name (Of_This_File_Or_Dir => For_Unit.The_File, To_This_Dir => Directory_Operations.Current_Directory))); end if; return Temp; end Line; -------------------------------------------------------------------------- procedure Build_Index_File (The_Name : in E_Strings.T; Success : out Boolean) --# global in SparkMakeCommandLine.State; --# in UnitManager.State; --# in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# SparkMakeCommandLine.State, --# The_Name, --# UnitManager.State & --# Success from SPARK_IO.File_Sys, --# The_Name; is Current_Component : Unit.Id; The_Components : Units.Stack; All_Units : Units.Stack; Current_Unit : Unit.Object; Id : Unit.Id; First_Component : Boolean; The_Index_File : SPARK_IO.File_Type := SPARK_IO.Null_File; Tab_Position : E_Strings.Lengths; Component_Line : E_Strings.T; begin SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Building index file ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => The_Name); Open_Or_Create_File (File => The_Name, Mode => SPARK_IO.Out_File, Id => The_Index_File, Success => Success); if Success then All_Units := UnitManager.Get_All_Units; Units.Sort (All_Units); Tab_Position := Max_Unit_Name_Length (The_Stack => All_Units); while not Units.IsEmpty (All_Units) loop Units.Pop (TheStack => All_Units, TheUnit => Id); Current_Unit := UnitManager.Get (The_Unit => Id); Unit.Output_Object (The_Unit => Current_Unit); -- Write the file -> unit mapping to the index file E_Strings.Put_Line (File => The_Index_File, E_Str => Line (For_Unit => Current_Unit, Tab_Position => Tab_Position)); -- if this is a specification -- Write any component information to the index file if Current_Unit.The_Id.The_Kind in Unit.Specification_Unit then The_Components := UnitManager.Components (For_Unit => Current_Unit.The_Id); First_Component := True; Component_Line := Current_Unit.The_Id.The_Name; while not Units.IsEmpty (The_Components) loop Units.Pop (TheStack => The_Components, TheUnit => Current_Component); if First_Component then -- Create the components are First_Component := False; Tab_To_Position (E_Str => Component_Line, Tab_Position => Tab_Position); E_Strings.Append_String (E_Str => Component_Line, Str => Component_Sig); else -- Add comma to continue list on new line E_Strings.Append_String (E_Str => Component_Line, Str => ","); E_Strings.Put_Line (File => The_Index_File, E_Str => Component_Line); -- Align component names Component_Line := E_Strings.Empty_String; Tab_To_Position (E_Str => Component_Line, Tab_Position => Tab_Position + Sig_Length); end if; -- Append a component name E_Strings.Append_Examiner_String (E_Str1 => Component_Line, E_Str2 => Current_Component.The_Name); end loop; if not First_Component then E_Strings.Put_Line (File => The_Index_File, E_Str => Component_Line); end if; end if; end loop; end if; --# accept Flow, 10, The_Index_File, "Ineffective assignment here OK"; Close_File (File => The_Name, File_Id => The_Index_File); end Build_Index_File; ---------------------------------------------------------------------------------- procedure Build_Meta_File (The_Name : in E_Strings.T; From_Root : in E_Strings.T) --# global in SparkMakeCommandLine.State; --# in out SPARK_IO.File_Sys; --# out The_Meta_File; --# derives SPARK_IO.File_Sys from *, --# From_Root, --# SparkMakeCommandLine.State, --# The_Name & --# The_Meta_File from SPARK_IO.File_Sys, --# The_Name; is Success : Boolean; begin The_Meta_File := SPARK_IO.Null_File; SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Building meta file ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => The_Name); Open_Or_Create_File (File => The_Name, Mode => SPARK_IO.Out_File, Id => The_Meta_File, Success => Success); if Success then Make (The_File => From_Root); Close_File (File => The_Name, File_Id => The_Meta_File); end if; end Build_Meta_File; begin -- Set Up CommandLineData.Initialize; LexTokenManager.Initialise_String_Table; Dictionary.Initialize (False); ErrorHandler.Spark_Make_Init; SparkLex.Clear_Line_Context; -- Read the command line SparkMakeCommandLine.Process (Success, Help_Or_Ver_Found); if Success and not Help_Or_Ver_Found then UnitManager.Initialise (The_Directories => SparkMakeCommandLine.The_Directory_Names, Include => SparkMakeCommandLine.The_Inc_File_Reg_Exps, Exclude => SparkMakeCommandLine.The_Exc_File_Reg_Exps, Root_File => SparkMakeCommandLine.Root_Filename, Duplicates => SparkMakeCommandLine.Duplicates_Error, Success => Success); if Success then if not SparkMakeCommandLine.No_Index_File then Build_Index_File (The_Name => SparkMakeCommandLine.Index_Filename, Success => Success); end if; if Success then if not SparkMakeCommandLine.No_Meta_File then --# accept Flow, 10, The_Meta_File, "Ineffective assignment here OK"; Build_Meta_File (The_Name => SparkMakeCommandLine.Meta_Filename, From_Root => SparkMakeCommandLine.Root_Filename); --# end accept; end if; end if; end if; end if; --# accept Flow, 33, The_Meta_File, "Metafile not referenced here"; exception --# hide Sparkmake; when E : others => ScreenEcho.New_Line (1); ScreenEcho.Put_Line ("Unhandled Exception in SPARKMake"); ScreenEcho.Put_Line ("Exception information:"); ScreenEcho.Put_Line (Ada.Exceptions.Exception_Information (E)); ScreenEcho.Put_Line ("Traceback:"); ScreenEcho.Put_Line (GNAT.Traceback.Symbolic.Symbolic_Traceback (E)); end Sparkmake; spark-2012.0.deb/sparkmake/commandline.adb0000644000175000017500000001006711753202340017354 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Latin_1; with GNAT.Command_Line; package body CommandLine is --# hide CommandLine; -- remainder is not SPARK. -- Switch_Delimiter is '-' on all platforms. This package is now *only* used -- by SPARKMake so no other tools are affected by this. Switch_Delimiter : constant Character := '-'; Switch_Arg_Separator : constant String := "="; -------------------------------------------------------------------------- procedure Setup is begin GNAT.Command_Line.Initialize_Option_Scan (Switch_Char => Switch_Delimiter); end Setup; -------------------------------------------------------------------------- procedure Read (Switch : out E_Strings.T; Argument : out E_Strings.T; Success : out Boolean) is The_Switch_String : E_Strings.T; LC_Switch_String : E_Strings.T; Arg_Found : Boolean; Arg_Pos : E_Strings.Positions; Char : Character; begin -- set up default out parameters Switch := E_Strings.Empty_String; Argument := E_Strings.Empty_String; Success := False; -- Read the command line Char := GNAT.Command_Line.Getopt ("* *" & Switch_Arg_Separator (1)); if Char /= Ada.Characters.Latin_1.NUL then -- Get the current switch The_Switch_String := E_Strings.Copy_String (Str => GNAT.Command_Line.Full_Switch); -- Convert to lower case LC_Switch_String := E_Strings.Lower_Case (E_Str => The_Switch_String); -- Is this a switch? if E_Strings.Get_Element (E_Str => The_Switch_String, Pos => E_Strings.Positions'First) = Switch_Delimiter then -- Yes, does it have an argument? E_Strings.Find_Sub_String (E_Str => LC_Switch_String, Search_String => Switch_Arg_Separator, String_Found => Arg_Found, String_Start => Arg_Pos); if Arg_Found then -- Yes it has an argument Success := True; Switch := E_Strings.Section (E_Str => LC_Switch_String, Start_Pos => 2, Length => Arg_Pos - 2); Argument := E_Strings.Section (E_Str => The_Switch_String, Start_Pos => Arg_Pos + 1, Length => E_Strings.Get_Length (E_Str => The_Switch_String) - Arg_Pos); else -- No argument Success := True; Switch := E_Strings.Section (E_Str => LC_Switch_String, Start_Pos => 2, Length => E_Strings.Get_Length (E_Str => LC_Switch_String) - 1); end if; else -- must be an argument Argument := E_Strings.Copy_String (Str => GNAT.Command_Line.Full_Switch); Success := not E_Strings.Is_Empty (E_Str => Argument); end if; end if; end Read; end CommandLine; spark-2012.0.deb/sparkmake/sparkmakeerrors.adb0000644000175000017500000001334411753202340020302 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; with SparkMakeDebug; with SystemErrors; package body SparkMakeErrors is subtype Error is Fault range Fault'First .. Cannot_Close_File; subtype Warning is Fault range Fault'Succ (Error'Last) .. Fault'Last; procedure Report (The_Fault : in Fault; E_Str1 : in E_Strings.T; E_Str2 : in E_Strings.T; E_Str3 : in E_Strings.T) is begin case The_Fault is when Error => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "!!! Sparkmake error: ", 0); when Warning => SPARK_IO.Put_String (SPARK_IO.Standard_Output, ">>> Sparkmake warning: ", 0); end case; case The_Fault is -- COMMAND LINE FAULTS when Invalid_Switch => E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " is not a recognised switch", 0); when Invalid_Argument => E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str1); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " is not a valid argument for switch ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Str2); when Duplicate_Switch => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "The switch ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " cannot be duplicated", 0); when Cannot_Find_File => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Could not find the file ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Str1); -- INDEX FILE FAULTS when Duplicate_Errors => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Unit ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str1); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " is duplicated in files ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str2); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " and ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Str3); when Duplicate_Okay => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Unit ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str1); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " in file ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str2); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " is already seen in ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Str3); when Invalid_Unit => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "File ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " does not contain a valid unit", 0); when Multiple_Main_Programs => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Files ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str1); SPARK_IO.Put_String (SPARK_IO.Standard_Output, " and ", 0); E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => E_Str2); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " both contain main programs", 0); -- GENERAL FILE HANDLING FAULTS when Cannot_Open_File => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Cannot open ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Str1); when Cannot_Close_File => SPARK_IO.Put_String (SPARK_IO.Standard_Output, "Cannot close ", 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Str1); end case; end Report; procedure Fatal (Text : in String) is --# hide Fatal; begin SparkMakeDebug.Report_Text (Text => "Fatal error: " & Text); SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Internal error. "); end Fatal; end SparkMakeErrors; spark-2012.0.deb/sparkmake/directory_operations.adb0000644000175000017500000003076011753202340021337 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings.Not_SPARK; with GNAT.Directory_Operations; with GNAT.OS_Lib; with SystemErrors; package body Directory_Operations is function Prefix (E_Str : E_Strings.T; Separator : Character; Include_Separator : Boolean) return E_Strings.T -- Returns the text up to the last occurrance of the separator. -- If Include_Separator is true then the string returned ends in the separator -- otherwise it does not. is Result : E_Strings.T := E_Strings.Empty_String; Last : Integer := 0; begin for I in reverse Integer range 1 .. E_Strings.Get_Length (E_Str => E_Str) loop if I /= E_Strings.Get_Length (E_Str => E_Str) and then E_Strings.Get_Element (E_Str => E_Str, Pos => I) = Separator then Last := I; exit; end if; end loop; if Last /= 0 then if Include_Separator then Result := E_Strings.Section (E_Str => E_Str, Start_Pos => 1, Length => Last); else Result := E_Strings.Section (E_Str => E_Str, Start_Pos => 1, Length => Last - 1); end if; end if; return Result; end Prefix; -------------------------------------------------------------------------------- function Directory_Separator return Character is begin return GNAT.OS_Lib.Directory_Separator; end Directory_Separator; -------------------------------------------------------------------------------- function Current_Directory return E_Strings.T is begin return E_Strings.Copy_String (Str => GNAT.Directory_Operations.Get_Current_Dir); end Current_Directory; -------------------------------------------------------------------------------- function Find_Files (Matching : RegularExpression.Object; In_Directory : E_Strings.T; Recursively : Boolean) return StringList.Object is The_Result : StringList.Object; procedure Scan_Directory (Dir : in GNAT.Directory_Operations.Dir_Name_Str) is D : GNAT.Directory_Operations.Dir_Type; Str : String (1 .. 1024); Last : Natural; begin GNAT.Directory_Operations.Open (D, Dir); loop GNAT.Directory_Operations.Read (D, Str, Last); exit when Last = 0; declare F : constant String := Dir & Str (1 .. Last); E_Str : E_Strings.T; begin if GNAT.OS_Lib.Is_Directory (F) then -- Ignore "." and ".." if ((Last = 1) and then (Str (1) = '.')) or ((Last = 2) and then (Str (1) = '.' and Str (2) = '.')) then null; elsif Recursively then -- Recurse here Scan_Directory (F & GNAT.OS_Lib.Directory_Separator); end if; else -- Does this file match the regular expression? E_Str := E_Strings.Copy_String (Str => F); if RegularExpression.Matches (E_Str => E_Str, The_Reg_Exp => Matching) then StringList.Add_In_Lex_Order (To_List => The_Result, The_Item => E_Str); end if; end if; end; end loop; GNAT.Directory_Operations.Close (D); exception when others => GNAT.Directory_Operations.Close (D); raise; end Scan_Directory; begin The_Result := StringList.Null_Object; Scan_Directory (Dir => E_Strings.Not_SPARK.Get_String (E_Str => In_Directory)); return The_Result; end Find_Files; -------------------------------------------------------------------------------- function File_Extension (Path : E_Strings.T) return E_Strings.T is begin return E_Strings.Copy_String (Str => GNAT.Directory_Operations.File_Extension (Path => E_Strings.Not_SPARK.Get_String (E_Str => Path))); end File_Extension; -------------------------------------------------------------------------------- procedure Set_Extension (Path : in out E_Strings.T; Ext : in E_Strings.T) is begin if not E_Strings.Is_Empty (E_Str => File_Extension (Path => Path)) then -- Has an extension so remove it Path := Prefix (E_Str => Path, Separator => '.', Include_Separator => False); end if; -- Add the given extension. E_Strings.Append_String (E_Str => Path, Str => "."); E_Strings.Append_Examiner_String (E_Str1 => Path, E_Str2 => Ext); end Set_Extension; -------------------------------------------------------------------------------- function Filename (Path : E_Strings.T) return E_Strings.T is begin return E_Strings.Copy_String (Str => GNAT.Directory_Operations.File_Name (Path => E_Strings.Not_SPARK.Get_String (E_Str => Path))); end Filename; -------------------------------------------------------------------------------- function Is_Directory (Path : E_Strings.T) return Boolean is begin return GNAT.OS_Lib.Is_Directory (Name => E_Strings.Not_SPARK.Get_String (E_Str => Path)); end Is_Directory; -------------------------------------------------------------------------------- function Is_File (Path : E_Strings.T) return Boolean is begin return GNAT.OS_Lib.Is_Regular_File (Name => E_Strings.Not_SPARK.Get_String (E_Str => Path)); end Is_File; -------------------------------------------------------------------------------- function Normalize_Path_Name (Name : E_Strings.T; Directory : E_Strings.T) return E_Strings.T is begin return E_Strings.Copy_String (Str => GNAT.OS_Lib.Normalize_Pathname (Name => E_Strings.Not_SPARK.Get_String (E_Str => Name), Directory => E_Strings.Not_SPARK.Get_String (E_Str => Directory), Resolve_Links => False)); end Normalize_Path_Name; -------------------------------------------------------------------------------- procedure Normalise_Dir (D : in out E_Strings.T) is begin if not Is_Directory (Path => D) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Internal error."); elsif E_Strings.Get_Element (E_Str => D, Pos => E_Strings.Get_Length (E_Str => D)) /= Directory_Separator then E_Strings.Append_Char (E_Str => D, Ch => Directory_Separator); end if; end Normalise_Dir; -------------------------------------------------------------------------------- function Up_Dir (D : E_Strings.T) return E_Strings.T is Result : E_Strings.T := E_Strings.Empty_String; begin if not Is_Directory (Path => D) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Internal error."); else Result := Prefix (E_Str => D, Separator => Directory_Separator, Include_Separator => True); end if; return Result; end Up_Dir; -------------------------------------------------------------------------------- function Relative_Name (Of_This_File_Or_Dir : E_Strings.T; To_This_Dir : E_Strings.T) return E_Strings.T is The_Common_Bit : E_Strings.T; Of_This : E_Strings.T; To_This : E_Strings.T; Result : E_Strings.T; function Common_Path (P1, P2 : E_Strings.T) return E_Strings.T is -- -- Returns the common directory. -- The last char is always the directory separator Result : E_Strings.T := E_Strings.Empty_String; begin for I in Integer range 1 .. E_Strings.Get_Length (E_Str => P1) loop exit when I > E_Strings.Get_Length (E_Str => P2); exit when E_Strings.Get_Element (E_Str => P1, Pos => I) /= E_Strings.Get_Element (E_Str => P2, Pos => I); E_Strings.Append_Char (E_Str => Result, Ch => E_Strings.Get_Element (E_Str => P1, Pos => I)); end loop; if E_Strings.Get_Element (E_Str => Result, Pos => E_Strings.Get_Length (E_Str => Result)) /= Directory_Separator then Result := Prefix (E_Str => Result, Separator => Directory_Separator, Include_Separator => True); end if; return Result; end Common_Path; begin Result := E_Strings.Empty_String; To_This := To_This_Dir; Of_This := Of_This_File_Or_Dir; -- Check the input parameters make sense. if Is_File (Path => Of_This) and not Is_Directory (Path => Of_This) then Of_This := Prefix (E_Str => Of_This, Separator => Directory_Separator, Include_Separator => True); end if; if Is_Directory (Path => To_This) then Normalise_Dir (D => To_This); end if; if not Is_Directory (Path => Of_This) or not Is_Directory (Path => To_This) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Internal error."); end if; The_Common_Bit := Common_Path (P1 => Of_This, P2 => To_This); if E_Strings.Is_Empty (E_Str => The_Common_Bit) then Result := Of_This_File_Or_Dir; elsif E_Strings.Eq_String (E_Str1 => The_Common_Bit, E_Str2 => To_This) then Result := E_Strings.Section (E_Str => Of_This_File_Or_Dir, Start_Pos => E_Strings.Get_Length (E_Str => The_Common_Bit) + 1, Length => E_Strings.Get_Length (E_Str => Of_This_File_Or_Dir) - E_Strings.Get_Length (E_Str => The_Common_Bit)); else loop To_This := Up_Dir (D => To_This); E_Strings.Append_String (E_Str => Result, Str => ".."); E_Strings.Append_Char (E_Str => Result, Ch => Directory_Separator); exit when E_Strings.Eq_String (E_Str1 => To_This, E_Str2 => The_Common_Bit); end loop; E_Strings.Append_Examiner_String (E_Str1 => Result, E_Str2 => E_Strings.Section (E_Str => Of_This_File_Or_Dir, Start_Pos => E_Strings.Get_Length (E_Str => The_Common_Bit) + 1, Length => E_Strings.Get_Length (E_Str => Of_This_File_Or_Dir) - E_Strings.Get_Length (E_Str => The_Common_Bit))); end if; return Result; end Relative_Name; end Directory_Operations; spark-2012.0.deb/sparkmake/all.wrn0000644000175000017500000000022511753202340015711 0ustar eugeneugen-- Warning control file for SPARKMake default_loop_assertions direct handler_parts hidden_parts notes pragma all rep static_expressions with_clauses spark-2012.0.deb/sparkmake/Makefile0000644000175000017500000000475211753202340016062 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for SPARKMake # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=sparkmake # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}: preamble prep parser gnatmake -j${SPARKCPUS} ${GNATMAKE_OPTS} ${OUTPUT_NAME} -o $@ -bargs ${BIND_OPTS} self-analysis: preamble prep parser -spark -plain @${OUTPUT_NAME}.smf # Initialisations # =============== preamble: $(MAKE) -C ${ROOT}/examiner clean # Platform specific prepping # ========================== prep: $(MAKE) -C ${ROOT}/examiner prep parser: $(MAKE) -C ${ROOT}/examiner parser # Cleaning code base # ================== clean: standardclean reallyclean: clean targetclean vcclean preamble $(MAKE) -C ${ROOT}/examiner reallyclean ################################################################################ # END-OF-FILE spark-2012.0.deb/sparkmake/spark.sw0000644000175000017500000000021711753202340016105 0ustar eugeneugen-sparklib -output_directory=vcg -config_file=../common/gnat.cfg -listing_extension=ls_ -casing -index_file=sparkmake.idx -report=sparkmake.rep spark-2012.0.deb/sparkmake/sparkmakeerrors.ads0000644000175000017500000000431511753202340020321 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit E_Strings, --# SparkMakeDebug, --# SPARK_IO, --# SystemErrors, --# Version; package SparkMakeErrors is type Fault is ( Duplicate_Errors, Invalid_Switch, Duplicate_Switch, Invalid_Argument, Cannot_Find_File, Cannot_Open_File, Cannot_Close_File, -- Items above this line are errors, items below are warnings. -- See subtypes in package body. Multiple_Main_Programs, Duplicate_Okay, Invalid_Unit); procedure Report (The_Fault : in Fault; E_Str1 : in E_Strings.T; E_Str2 : in E_Strings.T; E_Str3 : in E_Strings.T); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# E_Str1, --# E_Str2, --# E_Str3, --# The_Fault; procedure Fatal (Text : in String); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# Text; end SparkMakeErrors; spark-2012.0.deb/sparkmake/directory_operations.ads0000644000175000017500000000733111753202340021356 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -------------------------------------------------------------------------------- -- Directory_Operations -- -- Purpose: -- Provides a SPARK-friendly binding onto the facilities of -- GNAT.Directory_Operations -- -- Clients: -- SPARKMake - used to scan directories and match files against -- regular expressions. -------------------------------------------------------------------------------- with E_Strings; with RegularExpression; with StringList; --# inherit E_Strings, --# RegularExpression, --# StringList; package Directory_Operations is function Directory_Separator return Character; function Current_Directory return E_Strings.T; -- Looks for files InDirectory Matching the filter. -- Subdirectories are searched in Recursively is true. -- Setting Matching to TrueFilter will return all files -- -- The StringList returned is in lexicographic order - the -- files are sorted according to the ordering defined by -- Standard."<=" for type String. This ensures predictable -- behaviour on platforms where the order returned by -- GNAT.Directory_Operations.Read is not predictable. function Find_Files (Matching : RegularExpression.Object; In_Directory : E_Strings.T; Recursively : Boolean) return StringList.Object; function File_Extension (Path : E_Strings.T) return E_Strings.T; -- -- Return the file extension. This is the string after the last dot -- character in File_Name (Path). It returns the empty string if no -- extension is found. The returned value does contains the file -- extension separator (dot character). procedure Set_Extension (Path : in out E_Strings.T; Ext : in E_Strings.T); --# derives Path from *, --# Ext; -- -- Sets the file extension to Ext. If the file has and extension then it is replaced. -- If the file has no extension then it is added. function Filename (Path : E_Strings.T) return E_Strings.T; -- Returns the file name and the file extension if present. It removes all -- path information. function Is_File (Path : E_Strings.T) return Boolean; -- -- Returns true if the given path is a unique file function Is_Directory (Path : E_Strings.T) return Boolean; -- -- Returns true if the given path is a unique directory function Normalize_Path_Name (Name : E_Strings.T; Directory : E_Strings.T) return E_Strings.T; -- -- Resolves relative files names relative to the given directory. function Relative_Name (Of_This_File_Or_Dir : E_Strings.T; To_This_Dir : E_Strings.T) return E_Strings.T; -- -- Returns the relative file or directory name. end Directory_Operations; spark-2012.0.deb/sparkmake/sparkmakecommandline.adb0000644000175000017500000015435611753202340021265 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with CommandLine; with SPARK_IO; with Directory_Operations; with RegularExpression; with SparkMakeDebug; with SparkMakeErrors; with SystemErrors; with CommandLineData; with FileSystem; with Version; package body SparkMakeCommandLine --# own State is Anno_Char_Specified, --# Debug, --# Duplicates, --# Index_File, --# Language_Found, --# Meta_File, --# No_Index, --# No_Meta, --# Path, --# Root_File, --# The_Directories, --# The_Exc_Reg_Exps, --# The_Inc_Reg_Exps; is Path : Path_Type; Root_File : E_Strings.T; Meta_File : E_Strings.T; Index_File : E_Strings.T; Debug : Boolean; Duplicates : Boolean; No_Meta : Boolean; No_Index : Boolean; Anno_Char_Specified : Boolean; Language_Found : Boolean; The_Directories : StringList.Object; The_Inc_Reg_Exps : StringList.Object; The_Exc_Reg_Exps : StringList.Object; ------------------------------------------------------------------------------ function Normalise_Root_Filename (N : E_Strings.T) return E_Strings.T -- -- Ensures any relative path is fully expanded. is Result : E_Strings.T; begin Result := Directory_Operations.Normalize_Path_Name (Name => N, Directory => Directory_Operations.Current_Directory); return Result; end Normalise_Root_Filename; ------------------------------------------------------------------------------ function Normalise_Dir_Name (N : E_Strings.T) return E_Strings.T -- -- Ensures any relative path is fully expanded and that -- there is a directory separator at the end. is Result : E_Strings.T; begin Result := Normalise_Root_Filename (N => N); if E_Strings.Get_Length (E_Str => Result) > 0 and then E_Strings.Get_Element (E_Str => Result, Pos => E_Strings.Get_Length (E_Str => Result)) /= Directory_Operations.Directory_Separator then E_Strings.Append_Char (E_Str => Result, Ch => Directory_Operations.Directory_Separator); else Result := E_Strings.Empty_String; end if; return Result; end Normalise_Dir_Name; ------------------------------------------------------------------------------ function Reg_Exp (For_File : in E_Strings.T) return E_Strings.T -- -- Returns a regulat expression that will exactly match the given file. is Result : E_Strings.T; Char : Character; Escape : constant Character := '\'; begin Result := E_Strings.Empty_String; for I in E_Strings.Positions range 1 .. E_Strings.Get_Length (E_Str => For_File) loop Char := E_Strings.Get_Element (E_Str => For_File, Pos => I); if Char = '\' or else Char = '(' or else Char = ')' or else Char = '[' or else Char = ']' or else Char = '.' or else Char = '*' or else Char = '+' or else Char = '?' or else Char = '^' then -- We must escape these characters E_Strings.Append_Char (E_Str => Result, Ch => Escape); end if; E_Strings.Append_Char (E_Str => Result, Ch => Char); end loop; return Result; end Reg_Exp; ------------------------------------------------------------------------------ procedure Report_Usage --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *; -- -- Outputs the usage to the user. is --# hide Report_Usage; begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Usage: sparkmake [option] [rootfile]", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Options - all may be abbreviated to the shortest unique prefix", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "Options - all input options may be repeated as necessary", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "option = input_option | output_option | behaviour_option | help_option", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "input_option = dir_option | include_option | exclude_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "dir_option = " & "-" & "directory=dirname - Look in and under dirname as well as cwd.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: none", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "include_option = " & "-" & "include=reg_exp - Only include files if full path matches.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: *\.ad[bs]", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "exclude_option = " & "-" & "exclude=reg_exp - Exclude files if full path matches.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: none", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "reg_exp = term", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "term = char", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "term = elmt elmt ... -- concatenation", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "term = * -- any string of 0 or more characters", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "term = ? -- matches any character", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "term = [char char ...] -- matches any character listed", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "term = [char - char] -- matches any character in given range", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "elmt = nchr -- matches given character", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "elmt = [nchr nchr ...] -- matches any character listed", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "elmt = [^ nchr nchr ...] -- matches any character not listed", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "elmt = [char - char] -- matches chars in given range", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "elmt = . -- matches any single character", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "elmt = ( regexp ) -- parens used for grouping", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "char = any character, including special characters", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "nchr = any character except \()[].*+?^ or \char to match char", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "The characters '{' and '}' are NOT allowed to appear in any regular expression", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "behaviour_option = duplicates_option | annotation_option | language_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "duplicates_option = " & "-" & "duplicates_are_errors - Fail if duplicate units are found.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: false", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "annotation_option = " & "-" & "annotation_character - Specify annotation character.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: #", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "language_option = " & "-" & "language=83 | 95 | 2005 - Specify language profile.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: 95", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "output_option = index_option | meta_option | noindex_option | nometa_option |", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " path_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "index_option = " & "-" & "index=file-spec - The index file.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default rootfile.idx", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "meta_option = " & "-" & "meta=file-spec - The meta file.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default rootfile.smf", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "noindex_option = " & "-" & "noindexfile - Suppress generation of index file.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: false", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "nometa_option = " & "-" & "nometafile - Suppress generation of meta file.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: false", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "path_option = " & "-" & "path=full | relative - Produce relative or full pathnames.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: full", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "help_option = helper_option | version_option", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "helper_option = " & "-" & "help - print off help information.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "version_option = " & "-" & "version - print off version information.", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "rootfile = file-spec - The file to make.", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " Default: Produce index and metafile", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " for analysis of all files in and", 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, " under current directory.", 0); SPARK_IO.New_Line (SPARK_IO.Standard_Output, 1); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line1, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line2, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line3, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Support_Line4, 0); end Report_Usage; ------------------------------------------------------------------------------ procedure Report_Version --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *; -- -- Outputs the usage to the user. is --# hide Report_Version; begin SPARK_IO.Put_Line (SPARK_IO.Standard_Output, "SPARKMake " & Version.Toolset_Banner_Line, 0); SPARK_IO.Put_Line (SPARK_IO.Standard_Output, Version.Toolset_Copyright, 0); end Report_Version; ------------------------------------------------------------------------------ procedure Process_Root_File_Argument (Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out SPARK_IO.File_Sys; --# out Root_File; --# derives Root_File, --# Success from Arg & --# SPARK_IO.File_Sys from *, --# Arg & --# null from Debug; is begin -- Expand the root file to its full path Root_File := Normalise_Root_Filename (N => Arg); if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found root file argument: ", E_Text => Arg); SparkMakeDebug.Report_Text_E_Text (Text => "Expanded to: ", E_Text => Root_File); end if; -- Does the file exist? if Directory_Operations.Is_File (Path => Root_File) then Success := True; else Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Cannot_Find_File, E_Str1 => Root_File, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end Process_Root_File_Argument; ------------------------------------------------------------------------------ procedure Process_Include_Switch (Switch : in E_Strings.T; Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out SPARK_IO.File_Sys; --# in out The_Inc_Reg_Exps; --# derives SPARK_IO.File_Sys from *, --# Arg, --# Switch & --# Success from Arg & --# The_Inc_Reg_Exps from *, --# Arg & --# null from Debug; is begin if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found include switch: ", E_Text => Arg); end if; if E_Strings.Is_Empty (E_Str => Arg) or else RegularExpression.Is_Null (O => RegularExpression.Create (Arg)) then Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Invalid_Argument, E_Str1 => Arg, E_Str2 => Switch, E_Str3 => E_Strings.Empty_String); else Success := True; StringList.Add_To_Front (To_List => The_Inc_Reg_Exps, The_Item => Arg); end if; end Process_Include_Switch; ------------------------------------------------------------------------------ procedure Process_Exclude_Switch (Switch : in E_Strings.T; Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out SPARK_IO.File_Sys; --# in out The_Exc_Reg_Exps; --# derives SPARK_IO.File_Sys from *, --# Arg, --# Switch & --# Success from Arg & --# The_Exc_Reg_Exps from *, --# Arg & --# null from Debug; is begin if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found exclude switch: ", E_Text => Arg); end if; if E_Strings.Is_Empty (E_Str => Arg) or else RegularExpression.Is_Null (O => RegularExpression.Create (Arg)) then Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Invalid_Argument, E_Str1 => Arg, E_Str2 => Switch, E_Str3 => E_Strings.Empty_String); else Success := True; StringList.Add_To_Front (To_List => The_Exc_Reg_Exps, The_Item => Arg); end if; end Process_Exclude_Switch; ------------------------------------------------------------------------------ procedure Process_Directory_Switch (Switch : in E_Strings.T; Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out SPARK_IO.File_Sys; --# in out The_Directories; --# derives SPARK_IO.File_Sys from *, --# Arg, --# Switch & --# Success from Arg & --# The_Directories from *, --# Arg & --# null from Debug; is The_Dir : E_Strings.T; begin The_Dir := Normalise_Dir_Name (N => Arg); if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found directory switch: ", E_Text => Arg); SparkMakeDebug.Report_Text_E_Text (Text => "Normalised to: ", E_Text => The_Dir); end if; if Directory_Operations.Is_Directory (Path => The_Dir) then Success := True; -- the current directory is used by default so don't add it again if not E_Strings.Eq_String (E_Str1 => The_Dir, E_Str2 => Directory_Operations.Current_Directory) then StringList.Add_To_Back (To_List => The_Directories, The_Item => The_Dir); end if; else Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Invalid_Argument, E_Str1 => The_Dir, E_Str2 => Switch, E_Str3 => E_Strings.Empty_String); end if; end Process_Directory_Switch; ------------------------------------------------------------------------------ procedure Process_Index_Switch (Switch : in E_Strings.T; Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out Index_File; --# in out SPARK_IO.File_Sys; --# derives Index_File from *, --# Arg & --# SPARK_IO.File_Sys from *, --# Index_File, --# Switch & --# Success from Index_File & --# null from Debug; is begin if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found index switch: ", E_Text => Arg); end if; if E_Strings.Is_Empty (E_Str => Index_File) then Success := True; Index_File := Arg; FileSystem.Check_Extension (Fn => Index_File, Ext => E_Strings.Copy_String (Str => CommandLineData.Default_Index_Extension)); else Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Duplicate_Switch, E_Str1 => Switch, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end Process_Index_Switch; ------------------------------------------------------------------------------ procedure Process_Meta_Switch (Switch : in E_Strings.T; Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out Meta_File; --# in out SPARK_IO.File_Sys; --# derives Meta_File from *, --# Arg & --# SPARK_IO.File_Sys from *, --# Meta_File, --# Switch & --# Success from Meta_File & --# null from Debug; is begin if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found meta switch: ", E_Text => Arg); end if; if E_Strings.Is_Empty (E_Str => Meta_File) then Success := True; Meta_File := Arg; FileSystem.Check_Extension (Fn => Meta_File, Ext => E_Strings.Copy_String (CommandLineData.Meta_File_Extension)); else -- duplicate meta file switch Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Duplicate_Switch, E_Str1 => Switch, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end Process_Meta_Switch; ------------------------------------------------------------------------------ procedure Process_Path_Switch (Switch : in E_Strings.T; Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out Path; --# in out SPARK_IO.File_Sys; --# derives Path, --# Success from Arg, --# Path & --# SPARK_IO.File_Sys from *, --# Arg, --# Path, --# Switch & --# null from Debug; is begin if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found path switch: ", E_Text => Arg); end if; if Path = Undefined then if E_Strings.Eq1_String (E_Str => Arg, Str => "full") or else E_Strings.Eq1_String (E_Str => Arg, Str => "ful") or else E_Strings.Eq1_String (E_Str => Arg, Str => "fu") or else E_Strings.Eq1_String (E_Str => Arg, Str => "f") then Success := True; Path := Full; elsif E_Strings.Eq1_String (E_Str => Arg, Str => "relative") or else E_Strings.Eq1_String (E_Str => Arg, Str => "relativ") or else E_Strings.Eq1_String (E_Str => Arg, Str => "relati") or else E_Strings.Eq1_String (E_Str => Arg, Str => "relat") or else E_Strings.Eq1_String (E_Str => Arg, Str => "rela") or else E_Strings.Eq1_String (E_Str => Arg, Str => "rel") or else E_Strings.Eq1_String (E_Str => Arg, Str => "re") or else E_Strings.Eq1_String (E_Str => Arg, Str => "r") then Success := True; Path := Relative; else Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Invalid_Argument, E_Str1 => Arg, E_Str2 => Switch, E_Str3 => E_Strings.Empty_String); end if; else -- duplicate path switch Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Duplicate_Switch, E_Str1 => Switch, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end Process_Path_Switch; ------------------------------------------------------------------------------ procedure Process_Language_Switch (Switch : in E_Strings.T; Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out CommandLineData.Content; --# in out Language_Found; --# in out SPARK_IO.File_Sys; --# derives CommandLineData.Content, --# Language_Found from *, --# Arg, --# Language_Found & --# SPARK_IO.File_Sys from *, --# Arg, --# Language_Found, --# Switch & --# Success from Arg, --# Language_Found & --# null from Debug; is begin if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found language switch: ", E_Text => Arg); end if; if not Language_Found then if E_Strings.Eq1_String (E_Str => Arg, Str => "83") or else E_Strings.Eq1_String (E_Str => Arg, Str => "8") then Success := True; Language_Found := True; CommandLineData.Content.Language_Profile := CommandLineData.SPARK83; elsif E_Strings.Eq1_String (E_Str => Arg, Str => "95") or else E_Strings.Eq1_String (E_Str => Arg, Str => "9") then Success := True; Language_Found := True; CommandLineData.Content.Language_Profile := CommandLineData.SPARK95; elsif E_Strings.Eq1_String (E_Str => Arg, Str => "2005") or else E_Strings.Eq1_String (E_Str => Arg, Str => "200") or else E_Strings.Eq1_String (E_Str => Arg, Str => "20") or else E_Strings.Eq1_String (E_Str => Arg, Str => "2") then Success := True; Language_Found := True; CommandLineData.Content.Language_Profile := CommandLineData.SPARK2005; else Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Invalid_Argument, E_Str1 => Arg, E_Str2 => Switch, E_Str3 => E_Strings.Empty_String); end if; else -- duplicate language switch Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Duplicate_Switch, E_Str1 => Switch, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end Process_Language_Switch; ------------------------------------------------------------------------------ procedure Process_Anno_Char (Switch : in E_Strings.T; Arg : in E_Strings.T; Success : out Boolean) --# global in Debug; --# in out Anno_Char_Specified; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# derives Anno_Char_Specified, --# CommandLineData.Content from *, --# Anno_Char_Specified, --# Arg & --# SPARK_IO.File_Sys from *, --# Anno_Char_Specified, --# Arg, --# Switch & --# Success from Anno_Char_Specified, --# Arg & --# null from Debug; is begin if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Found annotation character argument: ", E_Text => Arg); end if; if not Anno_Char_Specified then if E_Strings.Get_Length (E_Str => Arg) = 1 then Success := True; Anno_Char_Specified := True; -- set AnnoChar in the Examiner CommandLineData.Content.Anno_Char := E_Strings.Get_Element (E_Str => Arg, Pos => 1); -- expect warning here else Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Invalid_Argument, E_Str1 => Arg, E_Str2 => Switch, E_Str3 => E_Strings.Empty_String); end if; else -- duplicate annochar switch Success := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Duplicate_Switch, E_Str1 => Switch, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; end Process_Anno_Char; ------------------------------------------------------------------------------ procedure Process (Success : out Boolean; Help_Or_Ver_Found : out Boolean) --# global in CommandLine.State; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# out Anno_Char_Specified; --# out Debug; --# out Duplicates; --# out Index_File; --# out Language_Found; --# out Meta_File; --# out No_Index; --# out No_Meta; --# out Path; --# out Root_File; --# out The_Directories; --# out The_Exc_Reg_Exps; --# out The_Inc_Reg_Exps; --# derives Anno_Char_Specified, --# Debug, --# Duplicates, --# Help_Or_Ver_Found, --# Index_File, --# Language_Found, --# Meta_File, --# No_Index, --# No_Meta, --# Path, --# Root_File, --# Success, --# The_Directories, --# The_Exc_Reg_Exps, --# The_Inc_Reg_Exps from CommandLine.State & --# CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLine.State; is Switch : E_Strings.T; Argument : E_Strings.T; The_Reg_Exp : E_Strings.T; Switch_Or_Arg_Found : Boolean; Done : Boolean; Option_OK : Boolean; begin Path := Undefined; Root_File := E_Strings.Empty_String; Meta_File := E_Strings.Empty_String; Index_File := E_Strings.Empty_String; Anno_Char_Specified := False; Debug := False; Duplicates := False; The_Inc_Reg_Exps := StringList.Null_Object; The_Exc_Reg_Exps := StringList.Null_Object; Success := True; Done := False; Help_Or_Ver_Found := False; No_Index := False; No_Meta := False; -- Default language profile is SPARK95 Language_Found := False; CommandLineData.Content.Language_Profile := CommandLineData.SPARK95; -- Always allow FDL reserved words as identifiers. Leave it to the Examiner -- to reject them later if required. CommandLineData.Content.FDL_Reserved := False; -- The current directory is always assumed The_Directories := StringList.Null_Object; StringList.Add_To_Front (To_List => The_Directories, The_Item => Directory_Operations.Current_Directory); -- Setup the command line CommandLine.Setup; -- Read options while not Done and not Help_Or_Ver_Found loop CommandLine.Read (Switch => Switch, Argument => Argument, Success => Switch_Or_Arg_Found); if Switch_Or_Arg_Found then if E_Strings.Is_Empty (E_Str => Switch) then -- ARGUMENT: root file Process_Root_File_Argument (Arg => Argument, Success => Option_OK); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "help") or else E_Strings.Eq1_String (E_Str => Switch, Str => "hel") or else E_Strings.Eq1_String (E_Str => Switch, Str => "he") or else E_Strings.Eq1_String (E_Str => Switch, Str => "h") then -- SWITCH: help Report_Usage; Help_Or_Ver_Found := True; Option_OK := True; elsif E_Strings.Eq1_String (E_Str => Switch, Str => "version") or else E_Strings.Eq1_String (E_Str => Switch, Str => "versio") or else E_Strings.Eq1_String (E_Str => Switch, Str => "versi") or else E_Strings.Eq1_String (E_Str => Switch, Str => "vers") or else E_Strings.Eq1_String (E_Str => Switch, Str => "ver") or else E_Strings.Eq1_String (E_Str => Switch, Str => "ve") or else E_Strings.Eq1_String (E_Str => Switch, Str => "v") then -- SWITCH: version Report_Version; Help_Or_Ver_Found := True; Option_OK := True; elsif E_Strings.Eq1_String (E_Str => Switch, Str => "language") or else E_Strings.Eq1_String (E_Str => Switch, Str => "languag") or else E_Strings.Eq1_String (E_Str => Switch, Str => "langua") or else E_Strings.Eq1_String (E_Str => Switch, Str => "langu") or else E_Strings.Eq1_String (E_Str => Switch, Str => "lang") or else E_Strings.Eq1_String (E_Str => Switch, Str => "lan") or else E_Strings.Eq1_String (E_Str => Switch, Str => "la") or else E_Strings.Eq1_String (E_Str => Switch, Str => "l") then -- SWITCH: language Process_Language_Switch (Switch => Switch, Arg => Argument, Success => Option_OK); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "path") or else E_Strings.Eq1_String (E_Str => Switch, Str => "pat") or else E_Strings.Eq1_String (E_Str => Switch, Str => "pa") or else E_Strings.Eq1_String (E_Str => Switch, Str => "p") then -- SWITCH: path Process_Path_Switch (Switch => Switch, Arg => Argument, Success => Option_OK); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "directory") or else E_Strings.Eq1_String (E_Str => Switch, Str => "director") or else E_Strings.Eq1_String (E_Str => Switch, Str => "directo") or else E_Strings.Eq1_String (E_Str => Switch, Str => "direct") or else E_Strings.Eq1_String (E_Str => Switch, Str => "direc") or else E_Strings.Eq1_String (E_Str => Switch, Str => "dire") or else E_Strings.Eq1_String (E_Str => Switch, Str => "dir") or else E_Strings.Eq1_String (E_Str => Switch, Str => "di") then -- SWITCH: directory Process_Directory_Switch (Switch => Switch, Arg => Argument, Success => Option_OK); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "include") or else E_Strings.Eq1_String (E_Str => Switch, Str => "includ") or else E_Strings.Eq1_String (E_Str => Switch, Str => "inclu") or else E_Strings.Eq1_String (E_Str => Switch, Str => "incl") or else E_Strings.Eq1_String (E_Str => Switch, Str => "inc") then -- SWITCH: include Process_Include_Switch (Switch => Switch, Arg => Argument, Success => Option_OK); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "exclude") or else E_Strings.Eq1_String (E_Str => Switch, Str => "exclud") or else E_Strings.Eq1_String (E_Str => Switch, Str => "exclu") or else E_Strings.Eq1_String (E_Str => Switch, Str => "excl") or else E_Strings.Eq1_String (E_Str => Switch, Str => "exc") or else E_Strings.Eq1_String (E_Str => Switch, Str => "ex") or else E_Strings.Eq1_String (E_Str => Switch, Str => "e") then -- SWITCH: exclude Process_Exclude_Switch (Switch => Switch, Arg => Argument, Success => Option_OK); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "meta") or else E_Strings.Eq1_String (E_Str => Switch, Str => "met") or else E_Strings.Eq1_String (E_Str => Switch, Str => "me") or else E_Strings.Eq1_String (E_Str => Switch, Str => "m") then -- SWITCH: meta Process_Meta_Switch (Switch => Switch, Arg => Argument, Success => Option_OK); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "index") or else E_Strings.Eq1_String (E_Str => Switch, Str => "inde") or else E_Strings.Eq1_String (E_Str => Switch, Str => "ind") then -- SWITCH: index Process_Index_Switch (Switch => Switch, Arg => Argument, Success => Option_OK); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "noindexfile") or else E_Strings.Eq1_String (E_Str => Switch, Str => "noindexfil") or else E_Strings.Eq1_String (E_Str => Switch, Str => "noindexfi") or else E_Strings.Eq1_String (E_Str => Switch, Str => "noindexf") or else E_Strings.Eq1_String (E_Str => Switch, Str => "noindex") or else E_Strings.Eq1_String (E_Str => Switch, Str => "noinde") or else E_Strings.Eq1_String (E_Str => Switch, Str => "noind") or else E_Strings.Eq1_String (E_Str => Switch, Str => "noin") or else E_Strings.Eq1_String (E_Str => Switch, Str => "noi") then -- SWITCH: noindexfile No_Index := True; Option_OK := True; SparkMakeDebug.Report_Text (Text => "Found noindexfile switch"); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "nometafile") or else E_Strings.Eq1_String (E_Str => Switch, Str => "nometafil") or else E_Strings.Eq1_String (E_Str => Switch, Str => "nometafi") or else E_Strings.Eq1_String (E_Str => Switch, Str => "nometaf") or else E_Strings.Eq1_String (E_Str => Switch, Str => "nometa") or else E_Strings.Eq1_String (E_Str => Switch, Str => "nomet") or else E_Strings.Eq1_String (E_Str => Switch, Str => "nome") or else E_Strings.Eq1_String (E_Str => Switch, Str => "nom") then -- SWITCH: nometafile No_Meta := True; Option_OK := True; SparkMakeDebug.Report_Text (Text => "Found nometafile switch"); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "debug") then -- SWITCH: debug Debug := True; Option_OK := True; SparkMakeDebug.Report_Text (Text => "Found debug switch"); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_are_errors") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_are_error") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_are_erro") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_are_err") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_are_er") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_are_e") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_are_") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_are") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_ar") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_a") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates_") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicates") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicate") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplicat") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplica") or else E_Strings.Eq1_String (E_Str => Switch, Str => "duplic") or else E_Strings.Eq1_String (E_Str => Switch, Str => "dupli") or else E_Strings.Eq1_String (E_Str => Switch, Str => "dupl") or else E_Strings.Eq1_String (E_Str => Switch, Str => "dup") or else E_Strings.Eq1_String (E_Str => Switch, Str => "du") then -- SWITCH: duplicates Duplicates := True; Option_OK := True; SparkMakeDebug.Report_Text (Text => "Found duplicates switch"); elsif E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_character") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_characte") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_charact") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_charac") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_chara") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_char") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_cha") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_ch") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_c") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation_") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotation") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotatio") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotati") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annotat") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annota") or else E_Strings.Eq1_String (E_Str => Switch, Str => "annot") or else E_Strings.Eq1_String (E_Str => Switch, Str => "anno") or else E_Strings.Eq1_String (E_Str => Switch, Str => "ann") or else E_Strings.Eq1_String (E_Str => Switch, Str => "an") or else E_Strings.Eq1_String (E_Str => Switch, Str => "a") then Process_Anno_Char (Switch => Switch, Arg => Argument, Success => Option_OK); else -- unrecognised switch Option_OK := False; SparkMakeErrors.Report (The_Fault => SparkMakeErrors.Invalid_Switch, E_Str1 => Switch, E_Str2 => E_Strings.Empty_String, E_Str3 => E_Strings.Empty_String); end if; Success := Success and Option_OK; else -- nothing more on the command line. Done := True; end if; end loop; -- If usage or version info requested then don't continue with any other processing if Success and not Help_Or_Ver_Found then -- The command line has parsed OK -- Set non specified switches to default values. if Path = Undefined then Path := Full; end if; if E_Strings.Is_Empty (E_Str => Index_File) then -- Index file not specified so index file is .idx if E_Strings.Is_Empty (E_Str => Root_File) then Index_File := E_Strings.Copy_String (Str => "spark"); else Index_File := Root_File; end if; Directory_Operations.Set_Extension (Path => Index_File, Ext => E_Strings.Copy_String (CommandLineData.Default_Index_Extension)); if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Using default index file: ", E_Text => Index_File); end if; end if; if E_Strings.Is_Empty (E_Str => Meta_File) then -- Meta file not specified so meta file is .smf if E_Strings.Is_Empty (E_Str => Root_File) then Meta_File := E_Strings.Copy_String (Str => "spark"); else Meta_File := Root_File; end if; Directory_Operations.Set_Extension (Path => Meta_File, Ext => E_Strings.Copy_String (CommandLineData.Meta_File_Extension)); if Debug then SparkMakeDebug.Report_Text_E_Text (Text => "Using default meta file: ", E_Text => Meta_File); end if; end if; if StringList.Is_Null (It => StringList.Get_First (In_List => The_Inc_Reg_Exps)) then -- No include was specified so use the default (GNAT file naming convention). StringList.Add_To_Front (To_List => The_Inc_Reg_Exps, The_Item => E_Strings.Copy_String (Str => "*\.ad[bs]")); if Debug then SparkMakeDebug.Report_Text (Text => "No include switch. Will use the GNAT naming convention"); end if; end if; if not E_Strings.Is_Empty (E_Str => Root_File) then -- Make sure the root file will be included. The_Reg_Exp := Reg_Exp (For_File => Root_File); if E_Strings.Is_Empty (E_Str => The_Reg_Exp) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Cannot generate regular expression for root file."); else SparkMakeDebug.Report_Text_E_Text (Text => "Root file will be included by ", E_Text => The_Reg_Exp); StringList.Add_To_Front (To_List => The_Inc_Reg_Exps, The_Item => The_Reg_Exp); end if; end if; end if; end Process; function Path_Required return Path_Type --# global in Path; is begin return Path; end Path_Required; function Meta_Filename return E_Strings.T --# global in Meta_File; is begin return Meta_File; end Meta_Filename; function Index_Filename return E_Strings.T --# global in Index_File; is begin return Index_File; end Index_Filename; function Root_Filename return E_Strings.T --# global in Root_File; is begin return Root_File; end Root_Filename; function Duplicates_Error return Boolean --# global in Duplicates; is begin return Duplicates; end Duplicates_Error; function Debug_On return Boolean --# global in Debug; is begin return Debug; end Debug_On; function No_Index_File return Boolean --# global in No_Index; is begin return No_Index; end No_Index_File; function No_Meta_File return Boolean --# global in No_Meta; is begin return No_Meta; end No_Meta_File; function The_Directory_Names return StringList.Object --# global in The_Directories; is begin return The_Directories; end The_Directory_Names; function The_Inc_File_Reg_Exps return StringList.Object --# global in The_Inc_Reg_Exps; is begin return The_Inc_Reg_Exps; end The_Inc_File_Reg_Exps; function The_Exc_File_Reg_Exps return StringList.Object --# global in The_Exc_Reg_Exps; is begin return The_Exc_Reg_Exps; end The_Exc_File_Reg_Exps; end SparkMakeCommandLine; spark-2012.0.deb/sparkmake/units.ads0000644000175000017500000000462011753202340016247 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Unit; --# inherit Unit; package Units is type Stack is private; NullStack : constant Stack; type Iterator is private; ---------------------------------------------------------- -- Id Stack ---------------------------------------------------------- function InStack (TheUnit : in Unit.Id; TheStack : in Stack) return Boolean; procedure Push (TheStack : in out Stack; TheUnit : in Unit.Id); --# derives TheStack from *, --# TheUnit; procedure Pop (TheStack : in out Stack; TheUnit : out Unit.Id); --# derives TheStack, --# TheUnit from TheStack; function IsEmpty (TheStack : Stack) return Boolean; procedure Sort (TheUnits : in out Stack); --# derives TheUnits from *; procedure Init_Iterator (TheStack : in Stack; TheIterator : out Iterator); --# derives TheIterator from TheStack; function Iterated (TheIterator : in Iterator) return Boolean; procedure Iterate (TheIterator : in out Iterator; TheUnit : out Unit.Id); --# derives TheIterator, --# TheUnit from TheIterator; private --# hide Units; type Node; type Stack is access Node; NullStack : constant Stack := null; type Node is record TheItem : Unit.Id; Next : Stack; end record; type Iterator is new Stack; end Units; spark-2012.0.deb/sparkmake/sparkmake.idx0000644000175000017500000000132511753202340017077 0ustar eugeneugensuperindex is in ../examiner/spark.idx regularexpression specification is in regularexpression.ads sparkmakecommandline specification is in sparkmakecommandline.ads sparkmakedebug specification is in sparkmakedebug.ads sparkmakeerrors specification is in sparkmakeerrors.ads stringlist specification is in stringlist.ads tokenmanager specification is in tokenmanager.ads unit specification is in unit.ads unitmanager specification is in unitmanager.ads unitmanager components are unitmanager.unitstore unitmanager.unitstore specification is in unitmanager-unitstore.ads units specification is in units.ads directory_operations specification is in directory_operations.ads commandline specification is in commandline.ads spark-2012.0.deb/sparkmake/tokenmanager.adb0000644000175000017500000001664011753202340017544 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with LexTokenManager; with SparkLex; with SP_Symbols; with SparkMakeErrors; use type SP_Symbols.SP_Terminal; package body TokenManager is procedure Next (It : in out Iterator) is Unused : Boolean; Token_Kind : SP_Symbols.SP_Terminal; Lex_Value : LexTokenManager.Lex_Value; procedure Get_Identifier (It : in out Iterator) --# global in CommandLineData.Content; --# in Dictionary.Dict; --# in out ErrorHandler.Error_Context; --# in out LexTokenManager.State; --# in out SparkLex.Curr_Line; --# in out SPARK_IO.File_Sys; --# derives ErrorHandler.Error_Context, --# It, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys from CommandLineData.Content, --# Dictionary.Dict, --# ErrorHandler.Error_Context, --# It, --# LexTokenManager.State, --# SparkLex.Curr_Line, --# SPARK_IO.File_Sys; --# pre SparkLex.Curr_Line_Invariant (SparkLex.Curr_Line); --# post SparkLex.Curr_Line_Invariant (SparkLex.Curr_Line); is Unused : Boolean; Token_Kind : SP_Symbols.SP_Terminal; Lex_Value : LexTokenManager.Lex_Value; begin while It /= Null_Iterator loop --# assert SparkLex.Curr_Line_Invariant (SparkLex.Curr_Line); -- is there more dotted notation? --# accept Flow, 10, Unused, "Ineffective assignment OK"; SparkLex.Examiner_Lex (Prog_Text => It.File, Token => Token_Kind, Lex_Val => Lex_Value, Punct_Token => Unused); --# end accept; -- No, this is the end of the identifier and we have read a token too far. if Token_Kind /= SP_Symbols.point then It.Next_Token := Token'(Kind => Token_Kind, Value => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Value.Token_Str)); exit; end if; -- Read the next part of the dotted identifier --# accept Flow, 10, Unused, "Ineffective assignment OK"; SparkLex.Examiner_Lex (Prog_Text => It.File, Token => Token_Kind, Lex_Val => Lex_Value, Punct_Token => Unused); --# end accept; if Token_Kind /= SP_Symbols.identifier then -- Invlaid syntax return a null iterator It := Null_Iterator; else E_Strings.Append_String (E_Str => It.Current_Token.Value, Str => "."); E_Strings.Append_Examiner_String (E_Str1 => It.Current_Token.Value, E_Str2 => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Value.Token_Str)); end if; end loop; --# accept Flow, 33, Unused, "Unused not references OK"; end Get_Identifier; begin if It = Null_Iterator then SparkMakeErrors.Fatal ("calling TokenManager.Next with a null iterator."); elsif It.Is_Look_Ahead then It.Is_Look_Ahead := False; elsif It.Next_Token /= Null_Token then It.Current_Token := It.Next_Token; It.Next_Token := Null_Token; else --# accept Flow, 10, Unused, "Ineffective assignment OK"; SparkLex.Examiner_Lex (Prog_Text => It.File, Token => Token_Kind, Lex_Val => Lex_Value, Punct_Token => Unused); --# end accept; if Token_Kind = SP_Symbols.SPEND then It := Null_Iterator; else It.Current_Token := Token'(Kind => Token_Kind, Value => LexTokenManager.Lex_String_To_String (Lex_Str => Lex_Value.Token_Str)); if Token_Kind = SP_Symbols.identifier then Get_Identifier (It => It); end if; end if; end if; --# accept Flow, 33, Unused, "Unused not references OK"; end Next; ----------------------------------------------------------------------------- procedure Look_Ahead (It : in out Iterator) is begin if It = Null_Iterator then SparkMakeErrors.Fatal ("calling TokenManager.Look_Ahead with a null iterator."); elsif not It.Is_Look_Ahead then Next (It => It); It.Is_Look_Ahead := True; end if; end Look_Ahead; ----------------------------------------------------------------------------- procedure Get_First_Token (File_Id : in SPARK_IO.File_Type; It : out Iterator) is New_It : Iterator; begin SparkLex.Clear_Line_Context; New_It := Iterator'(File => File_Id, Current_Token => Null_Token, Next_Token => Null_Token, Is_Look_Ahead => False); Next (It => New_It); It := New_It; end Get_First_Token; ----------------------------------------------------------------------------- function Is_Null (It : Iterator) return Boolean is begin return It = Null_Iterator; end Is_Null; ----------------------------------------------------------------------------- function Current (It : Iterator) return Token is Result : Token; begin if It = Null_Iterator then Result := Null_Token; else Result := It.Current_Token; end if; return Result; end Current; ----------------------------------------------------------------------------- function To_String (Tok : Token) return E_Strings.T is --# hide To_String; Result : E_Strings.T := E_Strings.Empty_String; begin E_Strings.Append_String (E_Str => Result, Str => SP_Symbols.SP_Terminal'Image (Tok.Kind) & ": "); E_Strings.Append_Examiner_String (E_Str1 => Result, E_Str2 => Tok.Value); return Result; end To_String; end TokenManager; spark-2012.0.deb/sparkmake/unitmanager-unitstore.adb0000644000175000017500000001330211753202340021425 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Characters.Handling; with E_Strings; with E_Strings.Not_SPARK; with GNAT.HTable; package body UnitManager.UnitStore is --------------------------------------------------------------------------- -- This package body is NOT SPARK, and should not be -- -- presented to the Examiner -- --------------------------------------------------------------------------- Max_Units : constant := 1000; type Index is range 1 .. Max_Units; function Hash (Id : Unit.Id) return Index; package Unit_Table is new GNAT.HTable.Simple_HTable ( Header_Num => Index, Element => Unit.Object, No_Element => Unit.Null_Object, Key => Unit.Id, Hash => Hash, Equal => Unit.Are_Equal); --------------------------------------------------------------------------- function Key (From_Id : Unit.Id) return E_Strings.T -- -- Returns a unique key (string) for the unit. is Result : E_Strings.T := E_Strings.Empty_String; begin E_Strings.Append_Examiner_String (E_Str1 => Result, E_Str2 => From_Id.The_Name); E_Strings.Append_String (E_Str => Result, Str => Unit.Kind'Image (From_Id.The_Kind)); return Result; end Key; -------------------------------------------------------------------------------- function Hash (Id : Unit.Id) return Index is The_Key : E_Strings.T; function Raw_Hash (Key : String) return Index is type Uns is mod 2 ** 32; -- GNAT-Specific Import here function Rotate_Left (Value : Uns; Amount : Natural) return Uns; pragma Import (Intrinsic, Rotate_Left); Tmp : Uns := 0; begin for J in Key'Range loop Tmp := Rotate_Left (Value => Tmp, Amount => 1) + Character'Pos (Ada.Characters.Handling.To_Upper (Key (J))); end loop; return Index'First + Index'Base (Tmp mod Index'Range_Length); -- also GNAT-defined attrib end Raw_Hash; begin The_Key := Key (From_Id => Id); return Raw_Hash (Key => E_Strings.Not_SPARK.Get_String (E_Str => The_Key)); end Hash; -------------------------------------------------------------------------------- procedure Add (The_Unit : in Unit.Object; Added : out Boolean) is begin if Get (The_Unit.The_Id) = Unit.Null_Object then Added := True; Unit_Table.Set (K => The_Unit.The_Id, E => The_Unit); else Added := False; end if; end Add; -------------------------------------------------------------------------------- function Get (The_Unit : Unit.Id) return Unit.Object is begin return Unit_Table.Get (K => The_Unit); end Get; -------------------------------------------------------------------------------- function Get_Body_Unit (With_Name : in E_Strings.T) return Unit.Object is The_Unit : Unit.Object; begin for Kind in Unit.Kind range Unit.Main_Program_Unit .. Unit.Separate_Body_Unit loop The_Unit := Get (The_Unit => Unit.Id'(The_Name => With_Name, The_Kind => Kind)); exit when The_Unit /= Unit.Null_Object; end loop; return The_Unit; end Get_Body_Unit; -------------------------------------------------------------------------------- function Get_Specification_Unit (With_Name : in E_Strings.T) return Unit.Object is The_Unit : Unit.Object; begin for Kind in Unit.Specification_Unit loop The_Unit := Get (The_Unit => Unit.Id'(The_Name => With_Name, The_Kind => Kind)); exit when The_Unit /= Unit.Null_Object; end loop; return The_Unit; end Get_Specification_Unit; -------------------------------------------------------------------------------- function Get_All_Units return Units.Stack is CurrentUnit : Unit.Object; The_Units : Units.Stack := Units.NullStack; begin CurrentUnit := Unit_Table.Get_First; while CurrentUnit /= Unit.Null_Object loop Units.Push (TheStack => The_Units, TheUnit => CurrentUnit.The_Id); CurrentUnit := Unit_Table.Get_Next; end loop; return The_Units; end Get_All_Units; begin Unit_Table.Reset; end UnitManager.UnitStore; spark-2012.0.deb/sparkmake/regularexpression.ads0000644000175000017500000000355211753202340020671 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit E_Strings; package RegularExpression is type Object is private; function Create (E_Str : E_Strings.T) return Object; -- -- Creates a regular expression from E_Str. -- If E_Str does not represent a valid regular expression a null object is returned. ------------------------------------------------------ -- Accessors ------------------------------------------------------ function Is_Null (O : Object) return Boolean; function Matches (E_Str : E_Strings.T; The_Reg_Exp : Object) return Boolean; -- -- Returns True is E_Str matches TheRegularExp private type Object is record The_Reg_Exp : E_Strings.T; Is_Null_Exp : Boolean; end record; Null_Object : constant Object := Object'(The_Reg_Exp => E_Strings.Empty_String, Is_Null_Exp => True); end RegularExpression; spark-2012.0.deb/sparkmake/unitmanager-unitstore.ads0000644000175000017500000000334411753202340021453 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Unit; with Units; --# inherit E_Strings, --# Unit, --# UnitManager, --# Units; private package UnitManager.UnitStore --# own State; --# initializes State; is procedure Add (The_Unit : in Unit.Object; Added : out Boolean); --# global in out State; --# derives Added, --# State from State, --# The_Unit; function Get (The_Unit : Unit.Id) return Unit.Object; --# global in State; function Get_Body_Unit (With_Name : in E_Strings.T) return Unit.Object; --# global in State; function Get_Specification_Unit (With_Name : in E_Strings.T) return Unit.Object; --# global in State; function Get_All_Units return Units.Stack; --# global in State; end UnitManager.UnitStore; spark-2012.0.deb/sparkmake/stringlist.ads0000644000175000017500000000665611753202340017322 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit E_Strings, --# SPARK_IO; package StringList is type Object is private; Null_Object : constant Object; type Iterator is private; ------------------------------------------------------------------------ -- Constructors ------------------------------------------------------------------------ procedure Add_To_Front (To_List : in out Object; The_Item : in E_Strings.T); --# derives To_List from *, --# The_Item; -- -- Adds the TheItem to the front of ToList. -- No change if TheItem is already present. procedure Add_To_Back (To_List : in out Object; The_Item : in E_Strings.T); --# derives To_List from *, --# The_Item; -- -- Adds the TheItem to the back of ToList. -- Assuming ToList is already sorted, then adds TheItem -- to the list so it remains sorted. Here, "sorted" is -- defined by the "<=" operator for Standard.String procedure Add_In_Lex_Order (To_List : in out Object; The_Item : in E_Strings.T); --# derives To_List from *, --# The_Item; function Get_First (In_List : Object) return Iterator; -- -- Returns an iterator for the first item in the list function Next (It : Iterator) return Iterator; -- -- Returns the next iterator. ------------------------------------------------------------------------ -- Accessors ------------------------------------------------------------------------ function Is_Null (It : Iterator) return Boolean; function Value (It : Iterator) return E_Strings.T; ------------------------------------------------------------------------ -- Debug support ------------------------------------------------------------------------ type Orientation is (Vertical, Horizontal); procedure Output (The_List : in Object; How : in Orientation); --# global in out SPARK_IO.File_Sys; --# derives SPARK_IO.File_Sys from *, --# How, --# The_List; private --# hide StringList; type Node; type Object is access Node; Null_Object : constant Object := null; type Node is record The_Item : E_Strings.T; Next : Object; end record; type Iterator is new Object; Null_Iterator : constant Iterator := Iterator (Null_Object); end StringList; spark-2012.0.deb/sparkmake/commandline.ads0000644000175000017500000000267211753202340017400 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; --# inherit E_Strings; package CommandLine --# own in State; is procedure Setup; --# derives ; -- Set up so that Read reads command lines of the form: -- command /sw1=arg1 /sw2=arg2 /sw3 arg1 arg2 procedure Read (Switch : out E_Strings.T; Argument : out E_Strings.T; Success : out Boolean); --# global in State; --# derives Argument, --# Success, --# Switch from State; end CommandLine; spark-2012.0.deb/sparkmake/vcg/0000755000175000017500000000000011753203757015206 5ustar eugeneugenspark-2012.0.deb/sparkmake/sparkmakedebug.ads0000644000175000017500000000337311753202340020076 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with StringList; --# inherit E_Strings, --# StringList; package SparkMakeDebug is procedure Report_Text (Text : in String); --# derives null from Text; procedure Report_Text_E_Text (Text : in String; E_Text : in E_Strings.T); --# derives null from E_Text, --# Text; procedure Report_List (Text : in String; List : in StringList.Object); --# derives null from List, --# Text; procedure Report_Cond_Text (Cond : in Boolean; True_Text : in String; False_Text : in String); --# derives null from Cond, --# False_Text, --# True_Text; end SparkMakeDebug; spark-2012.0.deb/sparkmake/regularexpression.adb0000644000175000017500000001010211753202340020635 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings.Not_SPARK; with GNAT.Regexp; with SparkMakeDebug; with SystemErrors; package body RegularExpression is ------------------------------------------------------ -- Constructor ------------------------------------------------------ function Create (E_Str : E_Strings.T) return Object -- -- This function compiles the regular expression and then doesn't use it! -- This is to avoid having to with GNAT.Regexp in the specification and -- thus keeping the spec within the SPARK boundary. -- This function checks that a regular expression *can* be compiled for the -- given string. If it cannot then a null Object is returned that will -- cause a fatal error when passed to Matches. -- The Matches operation must then recompile the expression - but of course -- it knows it will succeed. -- This body is NOT within the SPARK boundary. is R : GNAT.Regexp.Regexp; Valid : Boolean := True; Result : Object; pragma Unreferenced (R); begin -- prohibit the {} characters for I in 1 .. E_Strings.Get_Length (E_Str => E_Str) loop if E_Strings.Get_Element (E_Str => E_Str, Pos => I) = '{' or else E_Strings.Get_Element (E_Str => E_Str, Pos => I) = '}' then Valid := False; exit; end if; end loop; if Valid then R := GNAT.Regexp.Compile (Pattern => E_Strings.Not_SPARK.Get_String (E_Str => E_Str), Glob => True, Case_Sensitive => False); Result := Object'(The_Reg_Exp => E_Str, Is_Null_Exp => False); else SparkMakeDebug.Report_Text_E_Text (Text => "Invalid regular expression", E_Text => E_Str); Result := Null_Object; end if; return Result; exception when GNAT.Regexp.Error_In_Regexp => SparkMakeDebug.Report_Text_E_Text (Text => "Invalid regular expression", E_Text => E_Str); return Null_Object; end Create; ------------------------------------------------------ -- Accessors ------------------------------------------------------ function Is_Null (O : Object) return Boolean is begin return O.Is_Null_Exp; end Is_Null; function Matches (E_Str : E_Strings.T; The_Reg_Exp : Object) return Boolean is begin if Is_Null (O => The_Reg_Exp) then SystemErrors.Fatal_Error (Sys_Err => SystemErrors.Other_Internal_Error, Msg => "Illegal use of null regular expression"); return False; else return GNAT.Regexp.Match (S => E_Strings.Not_SPARK.Get_String (E_Str => E_Str), R => GNAT.Regexp.Compile (Pattern => E_Strings.Not_SPARK.Get_String (E_Str => The_Reg_Exp.The_Reg_Exp), Glob => True, Case_Sensitive => False)); end if; end Matches; end RegularExpression; spark-2012.0.deb/sparkmake/sparkmakecommandline.ads0000644000175000017500000001206411753202340021273 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings; with StringList; --# inherit CommandLine, --# CommandLineData, --# Directory_Operations, --# E_Strings, --# FileSystem, --# RegularExpression, --# SparkMakeDebug, --# SparkMakeErrors, --# SPARK_IO, --# StringList, --# SystemErrors, --# Version; package SparkMakeCommandLine --# own State; is -- Switches: -- -meta=filename filename is the name of the output meta file -- Default: rootfilename.smf -- -- -nometafile Do not produce a meta file -- Default: False -- -- -index=filename filename is the name of the index file output -- Default: rootfilename.idx -- -- -noindexfile Do not produce an index file -- Default: False -- -- -path=Full | Relative Full or Relative path names in output files -- Default: Full -- -- -language=83 | 95 | 2005 Select language. Default is 95. -- -- -debug outputs additional debug info as the make progresses -- Default: Full -- -- -duplicates_are_errors Fails if duplicate files are found -- Default : False -- -- -directory=dirname Tells make to look in and under these directories -- Current directory is always searched and is the default. -- -- -include=regexp include these files -- Default: *.ad[bs] -- -- -exclude=regexp exclude these files -- Default: exclude no files -- -- -annotation_character=char -- Recognise this character as annotation character -- Default: # -- -- -help prints off help information -- -- -version prints off version information -- -- Arguments -- rootfilename Filename is the root of the make. If not specified -- then index and meta files for analysis of all files -- in the directory (and subdirectories) will be generated -- named spark.idx and spark.smf. -- ---------------------------------------------------------------------- -- Constructor ---------------------------------------------------------------------- procedure Process (Success : out Boolean; Help_Or_Ver_Found : out Boolean); --# global in CommandLine.State; --# in out CommandLineData.Content; --# in out SPARK_IO.File_Sys; --# out State; --# derives CommandLineData.Content, --# SPARK_IO.File_Sys from *, --# CommandLine.State & --# Help_Or_Ver_Found, --# State, --# Success from CommandLine.State; ---------------------------------------------------------------------- -- Accessors ---------------------------------------------------------------------- type Path_Type is (Undefined, Full, Relative); function Path_Required return Path_Type; --# global in State; function Meta_Filename return E_Strings.T; --# global in State; function Index_Filename return E_Strings.T; --# global in State; function Root_Filename return E_Strings.T; --# global in State; function Duplicates_Error return Boolean; --# global in State; function The_Directory_Names return StringList.Object; --# global in State; function The_Inc_File_Reg_Exps return StringList.Object; --# global in State; function The_Exc_File_Reg_Exps return StringList.Object; --# global in State; function Debug_On return Boolean; --# global in State; function No_Meta_File return Boolean; --# global in State; function No_Index_File return Boolean; --# global in State; end SparkMakeCommandLine; spark-2012.0.deb/sparkmake/sparkmakedebug.adb0000644000175000017500000000727511753202340020062 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK_IO; with SparkMakeCommandLine; package body SparkMakeDebug is --------------------------------------------------------------------------- -- This package body is NOT SPARK --------------------------------------------------------------------------- procedure Start_Debug_Line is begin SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => ">>> Debug: ", Stop => 0); end Start_Debug_Line; ----------------------------------------------------------------------------------- procedure Report_Text (Text : in String) is begin if SparkMakeCommandLine.Debug_On then Start_Debug_Line; SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => Text, Stop => 0); end if; end Report_Text; ----------------------------------------------------------------------------------- procedure Report_Text_E_Text (Text : in String; E_Text : in E_Strings.T) is begin if SparkMakeCommandLine.Debug_On then Start_Debug_Line; SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => Text, Stop => 0); E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => E_Text); end if; end Report_Text_E_Text; ----------------------------------------------------------------------------------- procedure Report_List (Text : in String; List : in StringList.Object) is begin if SparkMakeCommandLine.Debug_On then Start_Debug_Line; SPARK_IO.Put_String (File => SPARK_IO.Standard_Output, Item => Text, Stop => 0); StringList.Output (The_List => List, How => StringList.Horizontal); end if; end Report_List; ----------------------------------------------------------------------------------- procedure Report_Cond_Text (Cond : in Boolean; True_Text : in String; False_Text : in String) is begin if SparkMakeCommandLine.Debug_On then Start_Debug_Line; if Cond then SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => True_Text, Stop => 0); else SPARK_IO.Put_Line (File => SPARK_IO.Standard_Output, Item => False_Text, Stop => 0); end if; end if; end Report_Cond_Text; end SparkMakeDebug; spark-2012.0.deb/sparkmake/stringlist.adb0000644000175000017500000001522711753202340017273 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with E_Strings.Not_SPARK; with SPARK_IO; with SparkMakeErrors; package body StringList is ------------------------------------------------------------------------ -- This package body is NOT SPARK ------------------------------------------------------------------------ function "<=" (Left, Right : in E_Strings.T) return Boolean is begin -- uses Standard."<=" for type String return E_Strings.Not_SPARK.Get_String (E_Str => Left) <= E_Strings.Not_SPARK.Get_String (E_Str => Right); end "<="; ------------------------------------------------------------------------ -- Constructors ------------------------------------------------------------------------ procedure Add_To_Front (To_List : in out Object; The_Item : in E_Strings.T) is begin To_List := new Node'(The_Item => The_Item, Next => To_List); exception when others => SparkMakeErrors.Fatal ("Exception raised in StringList.Add_To_Front"); end Add_To_Front; procedure Add_To_Back (To_List : in out Object; The_Item : in E_Strings.T) is Current_Node : Object; New_Node : Object; begin -- Create new node New_Node := new Node'(The_Item => The_Item, Next => Null_Object); if To_List = Null_Object then To_List := New_Node; else Current_Node := To_List; -- Find the final node in the list while Current_Node.Next /= Null_Object loop Current_Node := Current_Node.Next; end loop; -- Link in the new node Current_Node.Next := New_Node; end if; exception when others => SparkMakeErrors.Fatal ("Exception raised in StringList.Add_To_Back"); end Add_To_Back; procedure Add_In_Lex_Order (To_List : in out Object; The_Item : in E_Strings.T) is Current_Node : Object; Prev_Node : Object; New_Node : Object; begin -- Create new node New_Node := new Node'(The_Item => The_Item, Next => Null_Object); if To_List = Null_Object then -- To_List is empty, so To_List := New_Node; else Current_Node := To_List; Prev_Node := Null_Object; -- Find spot between Prev_Node and Current_Node where New_Node -- needs to be inserted. Note uses "<=" operator for -- EString.T defined above. while Current_Node.The_Item <= The_Item loop Prev_Node := Current_Node; Current_Node := Current_Node.Next; exit when Current_Node = Null_Object; end loop; -- Link New_Node in before Current_Node New_Node.Next := Current_Node; if Prev_Node = Null_Object then -- No previous node - New_Node must be the new head of To_List To_List := New_Node; else -- Link previous node to New_Node Prev_Node.Next := New_Node; end if; end if; exception when others => SparkMakeErrors.Fatal ("Exception raised in StringList.Add_In_Lex_Order"); end Add_In_Lex_Order; function Get_First (In_List : Object) return Iterator is begin return Iterator (In_List); exception when others => SparkMakeErrors.Fatal ("Exception raised in StringList.Get_First"); return Null_Iterator; end Get_First; function Next (It : Iterator) return Iterator is begin return Iterator (It.all.Next); exception when others => SparkMakeErrors.Fatal ("Exception raised in StringList.Next"); return Null_Iterator; end Next; ------------------------------------------------------------------------ -- Accessors ------------------------------------------------------------------------ function Is_Null (It : Iterator) return Boolean is begin return It = Null_Iterator; exception when others => SparkMakeErrors.Fatal ("Exception raised in StringList.Is_Null"); return False; end Is_Null; function Value (It : Iterator) return E_Strings.T is begin return It.all.The_Item; exception when others => SparkMakeErrors.Fatal ("Exception raised in StringList.Value"); return E_Strings.Empty_String; end Value; ------------------------------------------------------------------------ -- Debug support ------------------------------------------------------------------------ procedure Output (The_List : in Object; How : in Orientation) is It : Iterator; begin It := Get_First (In_List => The_List); if Is_Null (It => It) then SPARK_IO.Put_String (SPARK_IO.Standard_Output, "<>", 0); else loop if How = Vertical then -- expect stable expression E_Strings.Put_Line (File => SPARK_IO.Standard_Output, E_Str => Value (It)); else E_Strings.Put_String (File => SPARK_IO.Standard_Output, E_Str => Value (It)); end if; It := Next (It); exit when Is_Null (It => It); if How = Horizontal then SPARK_IO.Put_String (SPARK_IO.Standard_Output, ", ", 0); end if; end loop; end if; if How = Horizontal then SPARK_IO.New_Line (File => SPARK_IO.Standard_Output, Spacing => 1); end if; exception when others => SparkMakeErrors.Fatal ("Exception raised in StringList.Output"); end Output; end StringList; spark-2012.0.deb/common/0000755000175000017500000000000011753202341013725 5ustar eugeneugenspark-2012.0.deb/common/versioning/0000755000175000017500000000000011753202341016110 5ustar eugeneugenspark-2012.0.deb/common/versioning/version.ads0000644000175000017500000000331411753202341020267 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package Version is type Distribution_Sort is (Pro, GPL); Toolset_Distribution_Sort : constant Distribution_Sort := GPL; --# accept W, 3, "Pragma to suppress compiler warnings for this constant"; pragma Warnings (Off, Toolset_Distribution_Sort); Toolset_Version : constant String := "2012"; Toolset_Copyright : constant String := "Copyright (C) 2012 Altran Praxis Limited, Bath, U.K."; Toolset_Distribution : constant String := "GPL"; Toolset_Banner_Line : constant String := "GPL 2012"; Toolset_Support_Line1 : constant String := ""; Toolset_Support_Line2 : constant String := "Report bugs to: spark@adacore.com"; Toolset_Support_Line3 : constant String := ""; Toolset_Support_Line4 : constant String := ""; end Version; spark-2012.0.deb/common/versioning/version.pro0000644000175000017500000000732011753202341020321 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Describes version related information for all Sicstus systems. % % Note that, from the perspective of the Sicstus system, this file is static. % For each build, the file version.pro is regenerated to contain current % information. %############################################################################### %############################################################################### %MODULE %############################################################################### :- module(version, [toolset_version/1, toolset_copyright/1, toolset_banner_line/1, toolset_distribution/1, toolset_support_line1/1, toolset_support_line2/1, toolset_support_line3/1, toolset_support_line4/1]). %############################################################################### %DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % toolset_version(-Atom). % toolset_copyright(-Atom). % toolset_distribution(-Atom). % toolset_support_line1(-Atom). % toolset_support_line2(-Atom). % toolset_support_line3(-Atom). % toolset_support_line4(-Atom). %------------------------------------------------------------------------------- % Various predicates to return system version related information. % % Note that the INS items are placeholders, and will be replaced with their % actual values during a build. %=============================================================================== toolset_version('2012'). toolset_copyright('Copyright (C) 2012 Altran Praxis Limited, Bath, U.K.'). toolset_banner_line('GPL 2012'). toolset_distribution('GPL'). toolset_support_line1(''). toolset_support_line2('Report bugs to: spark@adacore.com'). toolset_support_line3(''). toolset_support_line4(''). %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/common/gnat.cfg0000644000175000017500000000055411753202341015343 0ustar eugeneugenpackage Standard is type Short_Short_Integer is range -128 .. 127; type Short_Integer is range -2**15 .. 2**15 - 1; type Integer is range -2**31 .. 2**31 - 1; type Long_Long_Integer is range -2**63 .. 2**63 - 1; end Standard; package System is Min_Int : constant := -2 ** 63; Max_Int : constant := 2 ** 63 - 1; end System; spark-2012.0.deb/common/Makefile.inc0000644000175000017500000001501311753202341016135 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Common Makefile # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ .SUFFIXES: ################################################################################ # BUILD CONFIGURATION ################################################################################ # The available configuration variables are as follows. # # DEBUG # ----- # yes (Build with debugging support) # no (Build without debugging support) # # COVER (COVERage) # ---------------- # yes (Build with coverage support) # no (Build without debugging support) ################################################################################ # DETERMINE HOST INFORMATION ################################################################################ # Determine which platform this Makefile is being run on. TARGET:=$(shell uname -s) GCC_TARGET:=$(shell gcc -dumpmachine) # Canonicalize the target string. ifeq ($(findstring CYGWIN_NT,${TARGET}),CYGWIN_NT) TARGET:=Windows endif ################################################################################ # DYNAMIC DEPENDENCIES ################################################################################ # By default, do not build with debug. ifeq (${DEBUG},) DEBUG:=no endif # By default, do not build with coverage. ifeq (${COVER},) COVER:=no endif # Check to make sure that that debug and coverage have not been simultaneously # requested. ifeq (${DEBUG},yes) ifeq (${COVER},yes) $(error Can not build with debug and coverage simultaneously) endif endif ################################################################################ # PLATFORM INDEPENDENT CONFIGURATION ################################################################################ # Location of lib CUSTOMER_LIB:=${ROOT}/lib/spark/current # Common compiler options. COMMON_OPTS:=-g -gnatwae -gnat05 -gnatwl -gnaty3abefhiklnprt -I${CUSTOMER_LIB} -I${COMMON}/versioning -I${ROOT}/examiner -gnatf -k GNATMAKE_OPTS:=${COMMON_OPTS} -O2 -fno-strict-aliasing BIND_OPTS:=-E # Override compiler options for debug. No optimization, plus full # validity and overflow checks. ifeq (${DEBUG},yes) GNATMAKE_OPTS:=${COMMON_OPTS} -O0 -gnatVa -gnato -fno-eliminate-unused-debug-types endif # Override compiler options for coverage. No optimization, plus # additional options for coverage analysis. ifeq (${COVER},yes) GNATMAKE_OPTS:=${COMMON_OPTS} -O0 -fprofile-arcs -ftest-coverage endif # Options to submit to gnatprep. # DEBUG: The Examiner raises a fatal error during self-analysis with # Source_Reference pragmas in place. Thus, the handy "-r" option # is removed until the error is fixed. PREP_OPTS:=-c # Common compiler options. SICSTUS_LIBS:=codesio SICSTUS_SWITCHES:=-f --nologo --noinfo SAV_EXTN:=.sav ################################################################################ # PLATFORM SPECIFIC CONFIGURATION ################################################################################ BUILD_VICTOR:=false # Linux. ifeq (${TARGET},Linux) EXE_EXTN:= TAR:=tar PREP_TARGET:=Intel_Linux ifeq (${GCC_TARGET},x86_64-pc-linux-gnu) ADDRESS_SIZE:=64 else ADDRESS_SIZE:=32 endif BUILD_VICTOR:=true SPLD_CONF:= endif # Windows. ifeq (${TARGET},Windows) EXE_EXTN:=.exe TAR:=tar PREP_TARGET:=Intel_WinNT ifeq (${GCC_TARGET},i686-pc-mingw32) ADDRESS_SIZE:=32 BUILD_VICTOR:=true else ADDRESS_SIZE:=64 endif # Do not embedded a manifest (the auto-generated manifest has false dependencies). SPLD_CONF:=--no-embed-manifest endif # Solaris. ifeq (${TARGET},SunOS) EXE_EXTN:= TAR:=gtar PREP_TARGET:=SPARC_Solaris ADDRESS_SIZE:=32 SPLD_CONF:= endif # Darwin (Mac OS X 10.5 or 10.6, 64-bit). ifeq (${TARGET},Darwin) EXE_EXTN:= TAR:=tar PREP_TARGET:=Darwin ADDRESS_SIZE:=64 BUILD_VICTOR:=true SPLD_CONF:= # The full-blown implementation of symbolic tracebacks is not # working on GNAT Pro at the moment, so # force local compilation of dummy GNAT.Traceback.Symbolic in # examiner/g-trasym.adb GNATMAKE_OPTS:=-a ${GNATMAKE_OPTS} endif # If SPARKCPUS is not explicitly set, default to 1. ifeq (${SPARKCPUS},) SPARKCPUS:=1 endif ################################################################################ # TARGETS ################################################################################ all: standardclean: rm -f *.o *.ali if [ -d vcg ]; then rm -f vcg/*.dic; fi targetclean: rm -f ${OUTPUT_NAME}${EXE_EXTN} rm -f b~*.ad[sb] vcclean: -find vcg -type f -name "*.vcg" -delete -find vcg -type f -name "*.fdl" -delete -find vcg -type f -name "*.rls" -delete -find vcg -type f -name "*.siv" -delete -find vcg -type f -name "*.slg" -delete -find vcg -type f -name "*.sli" -delete -find vcg -type f -name "*.lsb" -delete -find vcg -type f -name "*.log" -delete -find vcg -type f -name "*.dpc" -delete -find vcg -type f -name "*.sdp" -delete -find vcg -type f -name "*.zlg" -delete -find vcg -type f -name "*.zsl" -delete -find vcg -type f -name "*.vct" -delete -find vcg -type f -name "*.vlg" -delete -find vcg -type f -name "*.vsm" -delete rm -f vcg/*.rep vcg/mainunits95.xml -find vcg -depth -mindepth 1 -type d | grep -v -e '/\.svn' | xargs rmdir --ignore-fail-on-non-empty ################################################################################ # END-OF-FILE spark-2012.0.deb/victor/0000755000175000017500000000000011773666430013762 5ustar eugeneugenspark-2012.0.deb/victor/Makefile0000644000175000017500000000532311753202341015406 0ustar eugeneugen#------------------------------------------------------------------------------- # (C) Altran Praxis Limited #=============================================================================== ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for Victor # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ OUTPUT_NAME:=vct ALT_ERGO:=alt-ergo VICTOR:=vct # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # PLATFORM SPECIFIC CONFIGURATION ################################################################################ CPPFLAGS= LDFLAGS= # Windows. ifeq (${TARGET},Windows) ifeq (${GCC_TARGET},i686-pc-mingw32) # on windows, let the compiler know the gmplib location CPPFLAGS="-I /gmp/include" LDFLAGS="-L /gmp/lib" endif endif ################################################################################ # TARGETS ################################################################################ ALT_ERGO_DIR=$(shell ${TAR} ztf ${ALT_ERGO}-*.tar.gz | head -1) # Target to build vct and alt-ergo ifeq (${BUILD_VICTOR},true) all: ${VICTOR}-target ${ALT_ERGO}-target else all: endif ${VICTOR}-target: # Don't build Victor in parallel (-j). Missing dependencies in the Makefile. ifeq (${TARGET},Darwin) $(MAKE) -C ${VICTOR}/src CPPFLAGS=${CPPFLAGS} LDFLAGS=${LDFLAGS} STATIC_GMP_MAC=1 else $(MAKE) -C ${VICTOR}/src CPPFLAGS=${CPPFLAGS} LDFLAGS=${LDFLAGS} STATIC_GMP=1 endif ${ALT_ERGO}-target: ${ALT_ERGO} ifeq (${GCC_TARGET},i686-pc-mingw32) # on windows we mimick the build by copying the mingw binary provided # by the Alt-Ergo team to where a source built binary would have # appeared cp alt-ergo-*-mingw.exe ${ALT_ERGO}/alt-ergo.opt else # by default, we build Alt-Ergo from the sources # Don't build Alt-Ergo in parallel (-j). Missing dependencies in the Makefile. cd ${ALT_ERGO} && ./configure $(MAKE) -C ${ALT_ERGO} endif ${ALT_ERGO}: ${TAR} zxvf ${ALT_ERGO}-*.tar.gz mv ${ALT_ERGO_DIR} ${ALT_ERGO} # Cleaning code base # ================== clean: rm -Rf ${ALT_ERGO} -$(MAKE) -C ${VICTOR}/src clean rm -f ${VICTOR}/bin/${OUTPUT_NAME}${EXE_EXTN} reallyclean: clean ################################################################################ # END-OF-FILE spark-2012.0.deb/victor/vct/0000755000175000017500000000000011753202341014537 5ustar eugeneugenspark-2012.0.deb/victor/vct/src/0000755000175000017500000000000011753203760015334 5ustar eugeneugenspark-2012.0.deb/victor/vct/src/build/0000755000175000017500000000000011753202341016425 5ustar eugeneugenspark-2012.0.deb/victor/vct/src/lexer.hh0000644000175000017500000000315411753202341016771 0ustar eugeneugen//========================================================================== //========================================================================== // LEXER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef LEXER_HH #define LEXER_HH #include "parser.tab.hh" #include "pdriver.hh" // Announce to Flex the prototype we want for lexing function, ... // // Use of driver arg here is not strictly necessary. Lexer never // accesses driver object. (In calc++ example, the lexer reported error // messages to the driver) // # define YY_DECL \ yy::parser::token_type \ yylex (yy::parser::semantic_type* yylval, \ pdriver& driver) // Declare lexing function for use in parser YY_DECL; #endif // ! LEXER_HH spark-2012.0.deb/victor/vct/src/cvc-driver.cc0000644000175000017500000006523311753202341017712 0ustar eugeneugen//========================================================================== //========================================================================== // CVC-DRIVER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== // CVC3 specific code. //-------------------------------------------------------------------------- // C++ headers //-------------------------------------------------------------------------- #include #include using std::map; using std::pair; #include using std::vector; #include using std::cout; using std::endl; #include using std::ostringstream; #include //-------------------------------------------------------------------------- // My header files //-------------------------------------------------------------------------- #include "cvc-driver.hh" #include "utility.hh" #include "formatter.hh" //-------------------------------------------------------------------------- // CVC header files. //-------------------------------------------------------------------------- #include "vc.h" using CVC3::ValidityChecker; using CVC3::Type; using CVC3::Expr; using CVC3::Op; using CVC3::QueryResult; #include "theory_arith.h" // for arith kinds and expressions #include "command_line_flags.h" using CVC3::CLFlags; #include "exception.h" #include "expr_map.h" using CVC3::ExprMap; #include "theory_core.h" #include "vcl.h" // ValidityChecker Implementation using namespace z; //========================================================================== // CVCState class and member functions //========================================================================== // Allocate storage for flags in case create(flag) doesn't make its own // copy, but continually refers back. (Seems like a sensible behaviour for // it to have) class CVCState { private: CLFlags flags; ValidityChecker* vc; map varMap; map typeMap; map funMap; vector > bVarAlist; int bVarUId; // Unique ID number for bound variables. public: CVCState(string fileRoot); ~CVCState() { varMap.clear(); typeMap.clear(); funMap.clear(); bVarAlist.clear(); delete vc; } Expr pushBinding(Node* decl); // Returns bound var Expr void popBinding(); bool isBound(const string& s); Expr lookupBinding(const string& s); // Finds bound var nearest in scope Expr translateExpr(Node* n); Type translateType(Node* n); // Main functions for external use void processDecl(Node* decl); void assertFormula(Node* n); void outputCounterExample(); QueryResult doCheck(); bool outOfResources(); bool timeLimitReached(); bool incomplete(); bool incomplete(vector& reasons); }; //========================================================================== // CVCDriver Class //========================================================================== class CVCDriver : public SMTDriver { private: CVCState* state; Status status; public: CVCDriver() {} // virtual void initSession(); protected: virtual bool onlineInterface() {return true; }; virtual Node* translateUnit(Node* unit); virtual void initGoal(const string& unitName, int goalNum, int ConclNum); virtual void addDecl(Node* n); virtual void addHyp(Node* h, const string& hId, string& remarks); virtual void addConcl(Node* n, string& format); // virtual void finishSetup(); virtual bool checkGoal(string& format); virtual Status getResults(string& remarks); virtual void finaliseGoal(); // virtual void finaliseSession() }; SMTDriver* newCVCDriver() { return new CVCDriver(); } //========================================================================== // CVCState constructor and virtual functions //========================================================================== //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // CVCState() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ CVCState::CVCState(string fileRoot) : flags(ValidityChecker::createFlags()) { // flags = ValidityChecker::createFlags(); if (option("cvc-category")) flags.setFlag("category", optionVal("cvc-category")); string echoFileSuffix = option("cvc-echo-suffix") ? optionVal("cvc-echo-suffix") : "cvc"; string outputFileSuffix = option("cvc-echo-suffix") ? optionVal("cvc-echo-suffix") : "clog"; if (option("cvc-loginput")) flags.setFlag("dump-log", fileRoot + "." + echoFileSuffix); if (option("cvc-logoutput")) flags.setFlag("dump-trace", fileRoot + "." + outputFileSuffix); if (option("cvc-translate")) flags.setFlag("translate", true); if (option("cvc-echo-lang")) flags.setFlag("output-lang", optionVal("cvc-echo-lang")); if (option("cvc-echo-expected")) flags.setFlag("expResult", optionVal("cvc-echo-expected")); if (option("cvc-indent")) flags.setFlag("indent", optionVal("cvc-indent")); if (option("cnewarith")) flags.setFlag("arith-new", true); if (option("cvc-old-quant-inst")) flags.setFlag("quant-new", false); vc = ValidityChecker::create(flags); if (option("resourcelimit")) vc->setResourceLimit(intOptionVal("resourcelimit")); if (option("timeout")) vc->setTimeLimit(intOptionVal("timeout")); bVarUId = 0; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // pushBinding //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Expr CVCState::pushBinding(Node* decl) { // Returns bound var Expr Type ty = translateType(decl->child(0)); Expr bv = vc->boundVarExpr(decl->id, intToString(bVarUId), ty); bVarUId++; bVarAlist.push_back(pair(decl->id, bv)); return bv; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // popBinding //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void CVCState::popBinding() { bVarAlist.pop_back(); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // isBound //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ bool CVCState::isBound(const string& s) { for (int i = bVarAlist.size(); i > 0;) { i--; if (s == bVarAlist.at(i).first) return true; } return false; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // lookupBinding //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Expr CVCState::lookupBinding(const string& s) { // Finds bound var nearest in scope for (int i = bVarAlist.size(); i > 0;) { i--; if (s == bVarAlist.at(i).first) return bVarAlist.at(i).second; } throw std::runtime_error ("CVCState::lookupBinding: binding not found for: " + s); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // translateType //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Type CVCState::translateType(Node* n) { switch(n->kind) { case INT_TY: return vc->intType(); case REAL_TY: return vc->realType(); case BOOL_TY: return vc->boolType(); case BITVEC_TY: return vc->bitvecType(stringToInt(n->id)); case BIT_TY: return typeMap.find("bit__ty")->second; case SUBRANGE_TY: { return vc->subrangeType(translateExpr(n->child(0)), translateExpr(n->child(1)) ); } // RECORD_TY decl+ case RECORD_TY: { vector ids; vector tys; for (int i = 0; i != n->arity(); i++) { Node* decl = n->child(i); ids.push_back(decl->id); tys.push_back(translateType(decl->child(0))); } return vc->recordType(ids, tys); } // ARRAY_TY (SEQ ts) t case ARRAY_TY: { if (n->child(0)->arity() == 1) { return vc->arrayType(translateType(n->child(0)->child(0)), translateType(n->child(1)) ); } else { return vc->arrayType(translateType(n->child(0)), translateType(n->child(1)) ); } } case SEQ: { vector tys; for (int i = 0; i != n->arity(); i++) { tys.push_back(translateType(n->child(i))); } return vc->tupleType(tys); } case TYPE_ID: { if (typeMap.count(n->id) > 0) return typeMap.find(n->id)->second; else { throw std::runtime_error ("CVCState::translateType: unexpected TYPE_ID: " + (n->id)); } } case ENUM_TY: default: { throw std::runtime_error("CVCState::translateType: unrecognised kind " + kindString(n->kind)); } } // END switch } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // translateExpr //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Expr CVCState::translateExpr(Node* n) { // Special cases first. if (n->kind == FORALL || n->kind == EXISTS) { vector bvars; for ( int i = 0; i != n->child(0)->arity(); i++) { Node* decl = n->child(0)->child(i); Expr bvar = pushBinding(decl); bvars.push_back(bvar); } Expr eChild = translateExpr(n->child(1)); for ( int i = 0; i != n->child(0)->arity(); i++) { popBinding(); } if (n->kind == FORALL) return vc->forallExpr(bvars, eChild); else return vc->existsExpr(bvars, eChild); } // RCD_ELEMENT{rcd-id} exp [ TYPE_PARAM{t-id} ] if (n->kind == RCD_ELEMENT) return vc->recSelectExpr( translateExpr(n->child(0)), n->id); // RCD_UPDATE{rcd-id} exp exp [ TYPE_PARAM{t-id} ] if (n->kind == RCD_UPDATE) return vc->recUpdateExpr(translateExpr(n->child(0)), n->id, translateExpr(n->child(1)) ); // MK_RECORD{rcd-id} a1 ... an where ai = ASSIGN{fi} ei if (n->kind == MK_RECORD) { vector es; vector fnames; for (int i = 0; i != n->arity(); i++) { es.push_back( translateExpr(n->child(i)->child(0)) ); fnames.push_back(n->child(i)->id); } return vc->recordExpr(fnames, es); } // Uniform cases now follow // Translate children nodes. vector es; for (int i = 0; i != n->arity(); i++) { es.push_back( translateExpr(n->child(i)) ); } switch(n->kind) { case IFF: return vc->iffExpr(es.at(0), es.at(1)); case IMPLIES: return vc->impliesExpr(es.at(0), es.at(1)); case AND: { if (n->arity() == 2) return vc->andExpr(es.at(0), es.at(1)); else return vc->andExpr(es); } case OR: { if (n->arity() == 2) return vc->orExpr(es.at(0), es.at(1)); else return vc->orExpr(es); } case NOT: return vc->notExpr(es.at(0)); case EQ: return vc->eqExpr(es.at(0), es.at(1)); case NE: return vc->notExpr(vc->eqExpr(es.at(0), es.at(1))); case I_LT : case R_LT : return vc->ltExpr(es.at(0), es.at(1)); case I_LE: case R_LE: return vc->leExpr(es.at(0), es.at(1)); case I_UMINUS: case R_UMINUS: return vc->uminusExpr(es.at(0)); case I_PLUS: case R_PLUS: return vc->plusExpr(es.at(0),es.at(1)); case I_MINUS: case R_MINUS: return vc->minusExpr(es.at(0),es.at(1)); case I_TIMES: case R_TIMES: return vc->multExpr(es.at(0),es.at(1)); case RDIV: return vc->divideExpr(es.at(0),es.at(1)); case TO_REAL: return es.at(0); case TRUE: return vc->trueExpr(); case FALSE: return vc->falseExpr(); case TERM_TRUE: return vc->newBVConstExpr(string("1")); case TERM_FALSE: return vc->newBVConstExpr(string("0")); case TERM_AND: return vc->newBVAndExpr(es.at(0), es.at(1)); case TERM_OR: return vc->newBVOrExpr(es.at(0), es.at(1)); case TERM_NOT: return vc->newBVNegExpr(es.at(0)); case TERM_I_LT: return vc->iteExpr(vc->ltExpr(es.at(0),es.at(1)), vc->newBVConstExpr(string("1")), vc->newBVConstExpr(string("0"))); case TERM_I_LE: return vc->iteExpr(vc->leExpr(es.at(0),es.at(1)), vc->newBVConstExpr(string("1")), vc->newBVConstExpr(string("0"))); case TERM_EQ: return vc->iteExpr(vc->eqExpr(es.at(0),es.at(1)), vc->newBVConstExpr(string("1")), vc->newBVConstExpr(string("0"))); case TERM_NE: return vc->iteExpr(vc->eqExpr(es.at(0),es.at(1)), vc->newBVConstExpr(string("0")), vc->newBVConstExpr(string("1"))); case VAR: { if (isBound(n->id)) return lookupBinding(n->id); else throw std::runtime_error ("CVCState::translateExpr: unexpected VAR: " + (n->id)); } case CONST: { if (varMap.count(n->id) > 0) return varMap.find(n->id)->second; else { throw std::runtime_error ("CVCState::translateExpr: unexpected CONST: " + (n->id)); } } case NATNUM: return vc->ratExpr(n->id); case I_EXP: case R_EXP: return vc->powExpr(es.at(0),es.at(1)); case ARR_ELEMENT: { if (n->child(1)->arity() == 1) {// Catch 1D array special case return vc->readExpr(es.at(0), es.at(1).getKids().at(0)); } else { return vc->readExpr(es.at(0), es.at(1)); } } case ARR_UPDATE: { if (n->child(1)->arity() == 1) {// Catch 1D array special case return vc->writeExpr(es.at(0), es.at(1).getKids().at(0), es.at(2)); } else { return vc->writeExpr(es.at(0), es.at(1), es.at(2)); } } case SEQ: // Only expect this to be used to tuple up index components // for ARR_ELEMENT and ARR_UPDATE case TUPLE: // Currently unused return vc->tupleExpr(es); // FUN_AP{funid} es --> // FUN_AP{integer__succ} e1 --> // FUN_AP{integer__pred} e1 --> case PRED_AP: case FUN_AP: { if (funMap.count(n->id) > 0) { return vc->funExpr(funMap.find(n->id)->second, es); } if (es.size() == 0 && varMap.count(n->id) > 0) { // Nullary PRED_APs. return varMap.find(n->id)->second; } else { throw std::runtime_error ("CVCState::translateExpr: unbound FUN_AP/PRED_AP id: " + (n->id)); } } // END case FUN_AP default: { throw std::runtime_error("CVCState::translateExpr: unrecognised kind " + kindString(n->kind)); } } // END switch } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // outputCounterExample //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Only call when CVC query returns false. void CVCState::outputCounterExample() { if (! option("counterex")) return; ExprMap m; vc->getConcreteModel(m); ostringstream oss; oss << "Counter Example" << endl; for (ExprMap::iterator i = m.begin(); i != m.end(); i++ ) { vc->printExpr(i->first, oss); oss << " --> "; vc->printExpr(i->second, oss); oss << endl; } printMessage(INFOm, oss.str()); return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // processDecl //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void CVCState::processDecl(Node* decl) { string dId = decl->id; switch (decl->kind) { case DEF_TYPE: { // DEF_TYPE {id} type // DEF_TYPE {id} Type t; if (decl->arity() > 0) { t = vc->createType(dId, translateType(decl->child(0))); } else { t = vc->createType(dId); } typeMap.insert(pair(dId, t)); return; } case DEF_CONST: { // DEF_CONST {id} type exp // DEF_CONST {id} type Expr c; Type t (translateType(decl->child(0))); if (decl->arity() == 2) { c = vc->varExpr(dId, t, translateExpr(decl->child(1))); } else { c = vc->varExpr(dId, t); } varMap.insert(pair(dId, c)); return; } case DECL_VAR: { // DECL_VAR {id} type Expr v = vc->varExpr(dId, translateType(decl->child(0))); varMap.insert(pair(dId, v)); return; } case DECL_FUN: { // DECL_FUN {id} (SEQ type+) type int domArity = decl->child(0)->arity(); vector domTys; for (int i = 0; i != domArity; i++) { domTys.push_back(translateType(decl->child(0)->child(i))); } Type rangeTy = translateType(decl->child(1)); Type funTy = vc->funType(domTys, rangeTy); Op op = vc->createOp(dId, funTy); funMap.insert(pair(dId, op)); return; } default: { throw std::runtime_error("CVCState::processDecl: unrecognised kind " + kindString(decl->kind)); } } // END switch } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // assertFormula //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void CVCState::assertFormula(Node* n) { vc->assertFormula(translateExpr(n)); return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // doCheck //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ QueryResult CVCState::doCheck() { Expr falseExp = translateExpr(new Node(FALSE)); return vc->query(falseExp); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // outOfResources //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // CVC3 implementation dependency here. bool CVCState::outOfResources() { return ((CVC3::VCL*) vc)->core()->outOfResources(); } // This doesn't work because timeLimitReached is private bool CVCState::timeLimitReached() { // return ((CVC3::VCL*) vc)->core()->timeLimitReached(); return false; } bool CVCState::incomplete() { return vc->incomplete(); } bool CVCState::incomplete(vector& reasons) { return vc->incomplete(reasons); } //========================================================================== // CVCDriver Virtual Functions //========================================================================== //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // translateUnit() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Only do decls. Node* CVCDriver::translateUnit(Node* unit) { Node* newDecls = new Node(DECLS); Node* bitTy = new Node (BITVEC_TY, "1"); // Old alternative. No longer useful since need bit operations. // option("cvc-use-bitvec-for-bit-ty") // : new Node (SUBRANGE_TY, // new Node(NATNUM, "0"), // new Node(NATNUM, "1") // ); // DEF_TYPE{bit__ty} (SUBRANGE_TY NATNUM{0} NATNUM{1}) newDecls->addChild( new Node (DEF_TYPE, "bit__ty", bitTy)); newDecls->appendChildren(unit->child(0)); Node* newUnit = new Node(z::UNIT, "", newDecls, unit->child(1), unit->child(2)); return newUnit; } void CVCDriver::initGoal(const string& unitName, int goalNum, int conclNum) { string fullGoalFileRoot = getFullGoalFileRoot("cvc3", unitName, goalNum, conclNum); state = new CVCState(fullGoalFileRoot); } void CVCDriver::addDecl(Node* decl) { try { state->processDecl(decl); } catch (CVC3::Exception& e) { throw std::runtime_error ("CVCDriver::addDecl: caught CVC3::Exception" + ENDLs + e.toString()); } } void CVCDriver::addHyp(Node* h, const string& hId, string& remarks) { try { state->assertFormula(h); } catch (CVC3::Exception& e) { throw std::runtime_error ("CVCDriver::addHyp: caught CVC3::Exception" + ENDLs + e.toString()); } } void CVCDriver::addConcl(Node* n, string& format) { try { state->assertFormula(new Node(NOT, n)); } catch (CVC3::Exception& e) { throw std::runtime_error ("CVCDriver::addConcl: caught CVC3::Exception" + ENDLs + e.toString()); } } // virtual void finishSetup(); bool CVCDriver::checkGoal(string& remarks) { QueryResult queryResult; try { queryResult = state->doCheck(); } catch (CVC3::Exception& e) { appendCommaString(remarks, "check error"); printMessage (ERRORm, "CVCDriver::checkGoal: caught CVC3::Exception" + ENDLs + e.toString()); status = ERROR; return true; } switch (queryResult) { case CVC3::VALID: printMessage(FINEm, "concl true"); status = TRUE; return false; case CVC3::INVALID: appendCommaString(remarks, "definitely false"); printMessage(FINEm, "concl false"); state->outputCounterExample(); status = UNPROVEN; return false; case CVC3::UNKNOWN: { string reasonsString; vector reasons; state->incomplete(reasons); bool resourceLimitReached = false; for (int i = 0; i != (int) reasons.size(); i++) { appendCommaString(reasonsString, reasons.at(i)); if (reasons.at(i) == "Exhausted user-specified resource") { appendCommaString(remarks, "out of resources"); resourceLimitReached = true; } else if (reasons.at(i) == "Exhausted user-specified time limit") { appendCommaString(remarks, "timeout"); resourceLimitReached = true; } else if (reasons.at(i) == "Non-linear arithmetic equalities") { appendCommaString(remarks, "non-lin eqs"); } else if (reasons.at(i) == "Non-linear arithmetic inequalities") { appendCommaString(remarks, "non-lin ineqs"); } else if (reasons.at(i) == "Quantifier instantiation") { appendCommaString(remarks, "quant inst"); } else { appendCommaString(remarks, reasons.at(i)); } } printMessage(INFOm, "CVCDriver::checkGoal: got UNKNOWN query result" + ENDLs + reasonsString); state->outputCounterExample(); status = resourceLimitReached ? RESOURCE_LIMIT : UNPROVEN; return false; } case CVC3::ABORT: { appendCommaString(remarks, "query aborted"); bool returnVal = true; string reasonsString; vector reasons; state->incomplete(reasons); for (int i = 0; i != (int) reasons.size(); i++) { appendCommaString(reasonsString, reasons.at(i)); } printMessage(WARNINGm, "CVCDriver::checkGoal: got ABORT query result" + ENDLs + reasonsString); status = UNPROVEN; return returnVal; /* if ( state->outOfResources()) { appendCommaString(remarks, "out of resources"); printMessage(WARNINGm, "CVCDriver::checkGoal: CVC3 resource limit reached"); status = UNPROVEN; return false; } else if ( state->timeLimitReached()) { appendCommaString(remarks, "timeout"); printMessage(WARNINGm, "CVCDriver::checkGoal: CVC3 time limit reached"); status = UNPROVEN; return false; } else { appendCommaString(remarks, "query aborted"); printMessage(WARNINGm, "CVCDriver::checkGoal: got ABORT query result"); return true; } */ } default: assert(false); } // END switch assert(false); return true; } SMTDriver::Status CVCDriver::getResults(string& remarks) { return status; } void CVCDriver::finaliseGoal() { try { delete state; } catch (CVC3::Exception& e) { // Remarks not defined. // appendCommaString(remarks, "finalise goal error"); printMessage (ERRORm, "CVCDriver::finaliseGoal: caught CVC3::Exception" + ENDLs + e.toString()); // Not wonderful idea since smt-driver doesn't yet handle this! throw std::runtime_error ("CVCDriver::finaliseGoal: caught CVC3::Exception" + ENDLs + e.toString()); } return; } spark-2012.0.deb/victor/vct/src/rule-filter.hh0000644000175000017500000000410511753202341020101 0ustar eugeneugen//========================================================================== //========================================================================== // RULE-FILTER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== // Figure out which user-supplied rules are not needed by SMT solver. #ifndef RULE_FILTER_HH #define RULE_FILTER_HH #include "smt-driver.hh" #include "utility.hh" #include using std::map; class RuleFilter : public SMTDriver { private: map > unrequiredUnitRules; map > unrequiredDirRules; map > unitRLURuleNames; map > dirRLURuleNames; protected: virtual vector driveQuerySet(UnitInfo* unitInfo, Node* unit, set excludedRules, int startQuery, int endQuery); void saveExclusionInfo(UnitInfo* unitInfo, set exclRules); void saveRuleNames(UnitInfo* unitInfo, Node* unit); virtual void finaliseSession(); }; #endif // ! RULE_FILTER_HH spark-2012.0.deb/victor/vct/src/smtlib-driver.cc0000644000175000017500000012605411753202341020430 0ustar eugeneugen//========================================================================== //========================================================================== // SMTLIB-DRIVER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #include using std::endl; using std::cerr; #include // for exit() and system() #include #include using std::vector; #include "smtlib-driver.hh" #include "node-utils.hh" #include "formatter.hh" #include "pprinter.hh" using namespace z; //========================================================================== // Pretty print SMTLib abstract syntax //========================================================================== class SMTLibFormatter : public Formatter { public: virtual Box& addSyntax(z::Kind k, const string& id, vector& bs); static Formatter* getFormatter(); private: static Formatter* instance; SMTLibFormatter(); static set reservedWords; string standardiseId(const string& s); }; Box& SMTLibFormatter::addSyntax(z::Kind k, const std::string& id, vector& bs) { switch (k) { // Top level case(BENCHMARK): return makeStringAp("benchmark " + id, bs); case(LOGIC): return box(":logic " + id); case(EXTRASORTS): return PP::makeHVSeq(":extrasorts ", "(", "", ")", bs); case(EXTRAFUNS): return PP::makeHVSeq(":extrafuns ", "(", "", ")", bs); case(EXTRAPREDS): return PP::makeHVSeq(":extrapreds ", "(", "", ")", bs); case(ASSUMPTION): return box(":assumption") / *(bs.at(0)); case(FORMULA): return box(":formula") / *(bs.at(0)); case(STATUS): return box(":status " + id); // Declarations case(DECL_FUN): return makeStringAp(standardiseId(id), bs); case(DECL_PRED): return makeStringAp(standardiseId(id), bs); // Sorts case(TYPE_ID): case(TCONST): return box(standardiseId(id)); case(INT_TY): return box("Int"); // Formulas case(OR): return makeStringAp("or", bs); case(AND): return makeStringAp("and", bs); case(NOT): return makeStringAp("not", bs); case(IMPLIES): return makeStringAp("implies", bs); case(IFF): return makeStringAp("iff", bs); case(TRUE): return box("true"); case(FALSE): return box("false"); case(FORALL): return makeStringAp("forall", bs); case(EXISTS): return makeStringAp("exists", bs); case(PAT): return PP::makeHVSeq(":pat {", "", "", "}", bs); case(PRED_AP): { if (bs.size() == 0) return box(standardiseId(id)); else return makeStringAp(standardiseId(id), bs); } case(EQ): return makeStringAp("=", bs); case(DISTINCT): return makeStringAp("distinct", bs); // SEQ is not quite prettiest: will indent vertical sequence // extra amount. case(TUPLE): case(SEQ): return PP::makeHVSeq("", " ", "", "", bs); case(DECL): return makeStringAp("?" + id, bs); // Terms case(FUN_AP): return makeStringAp(standardiseId(id), bs); case(ITE): return makeStringAp("ite", bs); case(IDIV): return makeStringAp("div", bs); case(MOD): return makeStringAp("mod", bs); case(CONST): return box(standardiseId(id)); case(VAR): return box("?" + id); case(NATNUM): return box(id); case(REALNUM): return box(id + ".0"); default: printMessage(ERRORm, "SMTLibFormatter::addSyntax: " + ENDLs + "Encountered unsupported kind " + kindString(k)); return PP::makeHVSeq("***" + kindString(k) + "{" + id + "}***", "[", ";", "]", bs ); } } SMTLibFormatter::SMTLibFormatter() { // Generic reserved words reservedWords.insert("and"); reservedWords.insert("benchmark"); reservedWords.insert("distinct"); reservedWords.insert("exists"); reservedWords.insert("false"); reservedWords.insert("flet"); reservedWords.insert("forall"); reservedWords.insert("if_then_else"); reservedWords.insert("iff"); reservedWords.insert("implies"); reservedWords.insert("ite"); reservedWords.insert("let"); reservedWords.insert("logic"); reservedWords.insert("not"); reservedWords.insert("or"); reservedWords.insert("sat"); reservedWords.insert("theory"); reservedWords.insert("true"); reservedWords.insert("unknown"); reservedWords.insert("unsat"); reservedWords.insert("xor"); // Symbols used in AUFLIA that we should avoid clashing with. reservedWords.insert("Int"); reservedWords.insert("select"); reservedWords.insert("store"); // Keywords used in SMTLIB syntax // Yices and CVC3 are fussy about these being used as identifiers. reservedWords.insert("assumption"); reservedWords.insert("axioms"); reservedWords.insert("definition"); reservedWords.insert("extensions"); reservedWords.insert("formula"); reservedWords.insert("funs"); reservedWords.insert("extrafuns"); reservedWords.insert("extrasorts"); reservedWords.insert("extrapreds"); reservedWords.insert("language"); // Logic already covered previously // reservedWords.insert("logic"); reservedWords.insert("notes"); reservedWords.insert("preds"); reservedWords.insert("sorts"); reservedWords.insert("status"); reservedWords.insert("theory"); // Extra keywords accepted by Z3 in SMTLIB reading mode reservedWords.insert("div"); reservedWords.insert("mod"); reservedWords.insert("rem"); // Extra keywords recognised by Yices in SMTLIB reading mode reservedWords.insert("real"); reservedWords.insert("number"); }; // Differentiate id string from any of reserved words string SMTLibFormatter::standardiseId(const string& s) { if (reservedWords.find(s) != reservedWords.end()) return s + "'"; else return s; } Formatter* SMTLibFormatter::getFormatter() { if (instance == 0) instance = new SMTLibFormatter; return instance; } // Storage declarations for static class members. Formatter* SMTLibFormatter::instance = 0; set SMTLibFormatter::reservedWords; //========================================================================== // Alternate Simplify-format printer //========================================================================== class AltSimpFormatter : public Formatter { public: virtual Box& addSyntax(z::Kind k, const string& id, vector& bs); static Formatter* getFormatter(); private: static Formatter* instance; AltSimpFormatter(); static set reservedWords; string standardiseId(const string& s); }; Box& AltSimpFormatter::addSyntax(z::Kind k, const std::string& id, vector& bs) { switch (k) { // Top level case(BENCHMARK): return PP::makeHVSeq("", "", "", "", bs); case(LOGIC): return box(""); case(EXTRAPREDS): return PP::makeHVSeq("", "", "", "", bs); case(ASSUMPTION): return makeStringAp("BG_PUSH",bs); case(FORMULA): return *(bs.at(0)); case(STATUS): return box(""); // Declarations case(DECL_FUN): return box(""); case(DECL_PRED): return PP::makeHVSeq("(DEFPRED (|" + standardiseId(id) + "|", " ", " ", "))", bs); // Sorts case(TCONST): return box(""); case(TYPE_ID): return box(""); case(INT_TY): return box("Int"); // Formulas case(OR): return makeStringAp("OR", bs); case(AND): return makeStringAp("AND", bs); case(NOT): return makeStringAp("NOT", bs); case(IMPLIES): return makeStringAp("IMPLIES", bs); case(IFF): return makeStringAp("IFF", bs); case(TRUE): return box("TRUE"); case(FALSE): return box("FALSE"); case(FORALL): return makeStringAp("FORALL", bs); case(EXISTS): return makeStringAp("EXISTS", bs); case(PAT): return makeStringAp("PATS", bs); case(PRED_AP): return makeStringAp("|" + standardiseId(id) + "|", bs); case(EQ): return makeStringAp("EQ", bs); case(DISTINCT): return makeStringAp("DISTINCT", bs); // SEQ is not quite prettiest: will indent vertical sequence // extra amount. case(SEQ): return PP::makeHVSeq("(", " ", " ", ")", bs); case(DECL): return box("|?" + id + "|"); // Terms case(FUN_AP): return makeStringAp("|" + standardiseId(id) + "|", bs); case(ITE): return makeStringAp("ite", bs); case(CONST): return box("|" + standardiseId(id) + "|"); case(VAR): return box("|?" + id + "|"); case(NATNUM): return box(id); default: printMessage(ERRORm, "AltSimpFormatter::addSyntax: " + ENDLs + "Encountered unsupported kind " + kindString(k)); return PP::makeHVSeq("***" + kindString(k) + "{" + id + "}***", "[", ";", "]", bs ); } } AltSimpFormatter::AltSimpFormatter() { reservedWords.insert("select"); reservedWords.insert("store"); reservedWords.insert("DISTINCT"); reservedWords.insert("OR"); reservedWords.insert("AND"); reservedWords.insert("NOT"); reservedWords.insert("IMPLIES"); reservedWords.insert("IFF"); reservedWords.insert("TRUE"); reservedWords.insert("FALSE"); reservedWords.insert("FORALL"); reservedWords.insert("EXISTS"); reservedWords.insert("EQ"); reservedWords.insert("PATS"); reservedWords.insert("ite"); }; // Differentiate id string from any of reserved words string AltSimpFormatter::standardiseId(const string& s) { if (reservedWords.find(s) != reservedWords.end()) return s + "'"; else return s; } Formatter* AltSimpFormatter::getFormatter() { if (instance == 0) instance = new AltSimpFormatter; return instance; } // Storage declarations for static class members. Formatter* AltSimpFormatter::instance = 0; set AltSimpFormatter::reservedWords; /* ============================================================================ New SMTLIB Translator class ============================================================================ Cut down from old one. So far, no fixes for Alt Simplify translation. */ class NewSMTLibTranslator : public Translator { public: NewSMTLibTranslator() : Translator("NewSMTLib") {}; protected: virtual Node* translateAux (Node* oldN); }; Node* NewSMTLibTranslator::translateAux (Node* oldN) { /* ---------------------------------------------------------------------------- Top-level translation ---------------------------------------------------------------------------- Basic bottom up translation needs a little care to ensure some Node children serving as extra parameters (e.g. types on RCD_* operators) are not altered. */ //- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // top level for SMTLIB translation //- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (oldN->kind == UNIT && optionVal("interface-mode") == "smtlib") { Node* decls = oldN->child(0); // Node* rules = oldN->child(1); // Node* goals = oldN->child(2); Node* extrafuns = new Node(EXTRAFUNS); Node* extrapreds = new Node(EXTRAPREDS); Node* extrasorts = new Node(EXTRASORTS); for (int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); // decl == DECL_FUN{id} (SEQ t1 ... tn) t if (decl->kind == DECL_FUN) { if (decl->child(1)->kind == BOOL_TY) { // DECL_FUN{id} (SEQ t1 ... tn) BOOL_TY // --> DECL_PRED{id} t1 .. tn // Build result from SEQ node Node* newDecl = decl->child(0); newDecl->kind = DECL_PRED; newDecl->id = decl->id; extrapreds->addChild(newDecl); } else { // DECL_FUN{id} (SEQ t1 ... tn) t {t != BOOL_TY} // --> DECL_FUN{id} t1 .. tn t // Build result from SEQ node Node* newDecl = decl->child(0); newDecl->kind = DECL_FUN; newDecl->id = decl->id; newDecl->addChild(decl->child(1)); extrafuns->addChild(newDecl); } } else if (decl->kind == DEF_CONST || decl->kind == DECL_VAR) { // Assume constant value never specified. // True of Examiner output. // Flag value specified as error. if (decl->arity() > 1) { error("encountered DEF_CONST with value"); decl->popChild(); } if (decl->child(0)->kind == BOOL_TY) { // DEF_CONST{id} BOOL_TY --> DECL_PRED{id'} // DECL_VAR{id} BOOL_TY --> DECL_PRED{id'} decl->kind = DECL_PRED; decl->popChild(); extrapreds->addChild(decl); } else { // DEF_CONST{id} t --> DECL_FUN{id} t // DECL_VAR{id} t --> DECL_FUN{id'} t // t != BOOL_TY decl->kind = DECL_FUN; extrafuns->addChild(decl); } } else if (decl->kind == DEF_TYPE) { // DEF_TYPE{id} --> TCONST{id} // DEF_TYPE{id} T --> // If T in {REAL_TY, INT_TY, TYPE_ID} just ignore // otherwise flag as error. if (decl->arity() == 0) { extrasorts->addChild(new Node(TCONST, decl->id)); } else { // decl->arity() == 1 Node* type = decl->child(0); if (! (type->kind == TYPE_ID || type->kind == INT_TY || type->kind == REAL_TY)) { error("Unexpected type " + type->toShortString() + " in type definition"); } } } } // END for loop over decls // Add in new declarations to decls decls->clearChildren(); if (extrasorts->arity() > 0) decls->addChild(extrasorts); if (extrafuns->arity() > 0) decls->addChild(extrafuns); if (extrapreds->arity() > 0) decls->addChild(extrapreds); // Continue now with translation of lower levels. } // END if kind == UNIT && optionVal(interface-mode) is SMTLIB //- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // top level for Simplify translation //- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - else if (oldN->kind == UNIT) { // optionVal("interface-mode") == "simplify" Node* decls = oldN->child(0); // Node* rules = oldN->child(1); // Node* goals = oldN->child(2); Node* extrapreds = new Node(EXTRAPREDS); for (int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); if (decl->kind == DECL_FUN && decl->child(1)->kind == BOOL_TY) { // DECL_FUN{id} (SEQ t1 ... tn) BOOL_TY // --> DECL_PRED{id} "v"1 ... "v"n // (Simplify prints DECL_PRED using DEFPRED) Node* pred = new Node(DECL_PRED, decl->id); int domainArity = decl->child(0)->arity(); for (int i = 0; i != domainArity; i++) { pred->addChild(new Node(CONST, "v" + intToString(i))); } extrapreds->addChild(pred); } else if (decl->kind == DEF_CONST || decl->kind == DECL_VAR) { // Assume constant value never specified. // True of Examiner output. // Flag value specified as error. if (decl->arity() > 1) { error("encountered DEF_CONST with value"); decl->popChild(); } if (decl->child(0)->kind == BOOL_TY) { // DEF_CONST{id} BOOL_TY --> DECL_PRED{id'} // DECL_VAR{id} BOOL_TY --> DECL_PRED{id'} decl->kind = DECL_PRED; decl->popChild(); extrapreds->addChild(decl); } } } // End for loop over decls // Add in new declarations to decls decls->clearChildren(); if (extrapreds->arity() > 0) decls->addChild(extrapreds); // Continue now with translation of lower levels. } // End if UNIT and simplify interface mode /* ---------------------------------------------------------------------------- Translate subtrees of current node. ---------------------------------------------------------------------------- */ for (int i = 0; i != oldN->arity(); i++) { oldN->child(i) = translateAux(oldN->child(i)); } /* ---------------------------------------------------------------------------- Translate current node ---------------------------------------------------------------------------- */ switch (oldN->kind) { /* ---------------------------------------------------------------------------- Formulas ---------------------------------------------------------------------------- */ case FORALL: case EXISTS: { if (optionVal("interface-mode") == "simplify" && oldN->arity() == 3) { Node* decls = oldN->child(0); Node* fmla = oldN->child(1); Node* pat = oldN->child(2); oldN->child(0) = decls; oldN->child(1) = pat; oldN->child(2) = fmla; } return oldN; } case PAT: case IMPLIES: case AND: case OR: case NOT: case IFF: case EQ: case TRUE: case FALSE: case DISTINCT: case PRED_AP: return oldN; case NE: { oldN->kind = EQ; return new Node(NOT, oldN); } case I_LT: return oldN->updateKindAndId(PRED_AP, "<"); case I_LE: return oldN->updateKindAndId(PRED_AP, "<="); /* ---------------------------------------------------------------------------- Terms ---------------------------------------------------------------------------- */ case I_PLUS: return oldN->updateKindAndId(FUN_AP, "+"); case I_MINUS: return oldN->updateKindAndId(FUN_AP, "-"); case I_TIMES: return oldN->updateKindAndId(FUN_AP, "*"); case I_UMINUS: { if (optionVal("interface-mode") == "simplify") return new Node(FUN_AP, "-", new Node(NATNUM, "0"), oldN->child(0)); else return oldN->updateKindAndId(FUN_AP, "~"); } // IDIV and MOD are treated specially by pretty-printing case IDIV: return oldN; case MOD: return oldN; /* { // TO_REAL NATNUM{n} --> REALNUM{n} // TO_REAL (I_UMINUS NATNUM{n}) --> R_UMINUS REALNUM{n} // TO_REAL e --> FUN_AP{i.to_real} e otherwise Node* intNode = oldN->child(0); bool isNegated = false; if (intNode->kind == NATNUM) { intNode->kind = REALNUM; return intNode; } else if (intNode->kind == I_UMINUS && intNode->child(0)->kind == NATNUM) { intNode->kind = R_UMINUS; intNode->child(0)->kind = REALNUM; return intNode; } else { return oldN->updateKindAndId(FUN_AP, "i.to_real"); } } */ case CONST: case VAR: case NATNUM: return oldN; case DECL: return oldN; // Assume only use of DECL is in quantifiers // FUN_AP{} es --> FUN_AP{} es case FUN_AP: { return oldN; } case ITE: { return oldN; } /* ---------------------------------------------------------------------------- Multipurpose ---------------------------------------------------------------------------- */ case TUPLE: return oldN; case SEQ: return oldN; /* ---------------------------------------------------------------------------- Types ---------------------------------------------------------------------------- */ case TYPE_ID: case INT_TY: return oldN; case REAL_TY: return new Node(TYPE_ID, "real"); /* ---------------------------------------------------------------------------- Top level structure with no further changes ---------------------------------------------------------------------------- */ case UNIT: case DECLS: case EXTRAFUNS: case DECL_FUN: case EXTRAPREDS: case DECL_PRED: case EXTRASORTS: case TCONST: case RULES: case RULE: case GOALS: case GOAL: case HYPS: case CONCLS: return oldN; /* ---------------------------------------------------------------------------- Constructors without translation ---------------------------------------------------------------------------- */ case ENUM_TY: case SUBRANGE_TY: case RECORD_TY: case ARRAY_TY: case BIT_TY: case BOOL_TY: case LE: case GE: default: { error ("unrecognised kind " + kindString(oldN->kind) ); return oldN; } } // end switch(oldN->kind) } //======================================================================== // Virtual functions for SMTLIBDriver class //======================================================================== Node* SMTLibDriver::translateUnit(Node* unit) { NewSMTLibTranslator t; return t.translate(unit); } // Create a valid SMTLib identifier from argument. // Replaces all unacceptable characters with '.'s. // Not concerned here with ensuring function is injective. bool isSMTLibIdChar(char c) { return ('A' <= c && c <= 'Z') || ('a' <= c && c <= 'z') || ('0' <= c && c <= '9') || c == '.' || c == '\'' || c == '_'; } string mkSMTLibId(const string& s) { string result; for (int i = 0; i != (int) s.size(); i++) { char c = s.at(i); if (isSMTLibIdChar(c)) result.append(1,c); else result.append(1,'.'); } return result; } void SMTLibDriver::initGoal(const string& unitName, int goalNum, int conclNum) { string fullGoalFileRoot = getFullGoalFileRoot("smtsolver", unitName, goalNum, conclNum); if (optionVal("interface-mode") == "simplify") { solverInputFileName = fullGoalFileRoot + ".smp"; solverOutputFileName = fullGoalFileRoot + ".outsmp"; solverErrorFileName = fullGoalFileRoot + ".errsmp"; } else { solverInputFileName = fullGoalFileRoot + ".smt"; solverOutputFileName = fullGoalFileRoot + ".out"; solverErrorFileName = fullGoalFileRoot + ".err"; } string benchname = mkSMTLibId(unitName) + "." + intToString(goalNum); if (conclNum != 0) benchname += "." + intToString(conclNum); string logic(option("logic") ? optionVal("logic") : "AUFLIA"); benchmark = new Node(BENCHMARK, benchname, new Node(LOGIC,logic)); formula = new Node(SEQ); return; } void SMTLibDriver::addDecl(Node* decl) { benchmark->addChild(decl); } void SMTLibDriver::addRule(Node* hyp, const string& hId, string& remarks) { benchmark->addChild(new Node(ASSUMPTION,hyp)); } void SMTLibDriver::addHyp(Node* hyp, const string& hId, string& remarks) { if (option("smtlib-hyps-as-assums")) benchmark->addChild(new Node(ASSUMPTION,hyp)); else formula->addChild(hyp); } void SMTLibDriver::addConcl(Node* concl, string& remarks) { if (optionVal("interface-mode") == "simplify") { formula->addChild(concl); } else { formula->addChild(new Node(NOT, concl)); } } void SMTLibDriver::finishSetup() { Node* goal; if (formula->arity() == 0) { goal = new Node(z::TRUE); } else if (formula->arity() == 1) { goal = formula->child(0); } else { goal = formula; goal->kind = AND; } benchmark->addChild(new Node(FORMULA,goal)); benchmark->addChild(new Node(STATUS,"unknown")); ofstream solverInput; solverInput.open(solverInputFileName.c_str()); if (solverInput.fail()) { cerr << endl << "Error on trying to open file " << solverInputFileName << endl; exit(1); } if (optionVal("interface-mode") == "simplify") Formatter::setFormatter(AltSimpFormatter::getFormatter()); else Formatter::setFormatter(SMTLibFormatter::getFormatter()); solverInput << *benchmark << endl; solverInput.close(); } //--------------------------------------------------------------------------- // checkGoal //--------------------------------------------------------------------------- bool SMTLibDriver::checkGoal(string& remarks) { string cmd; if (! (option("prover") || option("prover-command") )) { return false; } if (option("prover-command")) { cmd = optionVal("prover-command") + " "; } // Must be case now that prover option set else if (optionVal("prover") == "yices") { cmd = "yices -smt "; // timeout value is timeout in sec. if (option("timeout")) cmd += "--timeout=" + optionVal("timeout") + " "; } else if (optionVal("prover") == "z3") { if (optionVal("interface-mode") == "simplify") cmd = "z3 -s "; else cmd = "z3 -smt "; if (option("z3-fourier-motzkin")) cmd += "FOURIER_MOTZKIN_ELIM=true "; if (option("timeout")) // Was not supported in Z3 v1.3 Linux. cmd += "SOFT_TIMEOUT=" + optionVal("timeout") + " "; } else if (optionVal("prover") == "cvc3") { cmd = "cvc3 -lang smt "; if (option("timeout")) cmd += "-timeout " + optionVal("timeout") + " "; if (option("resourcelimit")) cmd += "-resource " + optionVal("resourcelimit") + " "; } else if (optionVal("prover") == "simplify") { // -nosc = No satisfying conjunctions (for counterexamples) // Causes simplify to return just "valid" or "invalid". // See hpl/simplify/src/Simplify.1.html cmd = "simplify -nosc "; } else { printMessage(ERRORm, "Unrecognised prover option: " + optionVal("prover") + ENDLs); return false; } cmd = withTimeoutAndIO(cmd, solverInputFileName, solverOutputFileName, solverErrorFileName); if (option("doublerun")) cmd = cmd + " ; " + cmd; printMessage(INFOm, "Running command" + ENDLs + cmd + ENDLs); exitStatus = std::system(cmd.c_str()); // For SMT mode, exit status is not reliable guide for something going // wrong. z3 -smt returns non-zero status when result is unknown. printMessage(INFOm, "Exit status is " + intToString(exitStatus)); return false; } //--------------------------------------------------------------------------- // getResults //--------------------------------------------------------------------------- SMTLibDriver::Status SMTLibDriver::getResults(string& remarks) { // Do not check output files if none were generated in first place if (! (option("prover") || option("prover-command") )) { return UNPROVEN; } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Read in output and error files from solver // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ifstream solverOut (solverOutputFileName.c_str() ); ifstream solverErr (solverErrorFileName.c_str() ); if (!solverOut) { printMessage(ERRORm, "Cannot open output file " + solverOutputFileName); appendCommaString(remarks, "Outfile not found"); return ERROR; } if (!solverErr) { printMessage(ERRORm, "Cannot open error output file " + solverErrorFileName); appendCommaString(remarks, "Errorfile not found"); return ERROR; } vector solverOutput; vector solverErrOutput; { string line; while (getline(solverOut, line)) solverOutput.push_back(line); solverOut.close(); while (getline(solverErr, line)) solverErrOutput.push_back(line); solverErr.close(); } // flags for processing of standard error file bool seenTimeout = false; bool seenWarning = false; bool seenUnexpectedErrOutput = false; // flags for processing of standard output file bool seenSatOutput = false; bool seenUnsatOutput = false; bool seenUnknownOutput = false; bool seenUnexpectedOutput = false; if (! (optionVal("interface-mode") == "simplify")) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // interface-mode == smtlib // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // // First inspect error output. Check for // - warning messages than can be logged // - Timeout termination messages // for (int i = 0; i != (int) solverErrOutput.size(); i++) { string s = solverErrOutput.at(i); if (tokeniseString(s).size() == 0) { continue; } // Detect message produced by ulimit -t killing process // See below for alternate method of detecting this timeout. // Approach here works on Scientific Linux 5.3 but not // Ubuntu 10.4.2. if (hasPrefix(s, "sh: line 1:") && hasSubstring(s, "Killed")) { // appendCommaString(remarks, "timeout (ulim)"); // printMessage(INFOm, // "SMTLib solver reached ulimit time limit"); // seenTimeout = true; continue; } // Detect message produced by ulimit -v killing process if (hasPrefix(s, "Fatal error: out of memory.")) { appendCommaString(remarks, "stacklimit (ulim)"); printMessage(INFOm, "SMTLib solver reached ulimit stackspace limit"); // We treat this the same as a "normal" timeout seenTimeout = true; continue; } // Detect message from timeout.sh script // Script outputs // ./timeout.sh: line 37: 22246 Terminated sleep $timeout // When command executed has error return value. // Seems z3 has error return value on unsat! if (hasPrefix(s, "./timeout.sh: line 37") && hasSubstring(s, "Terminated")) { continue; } // cvc3 outputs on kill (because of shell timeout) // Interrupted by signal 15. /home/pbj/smt/cvc3/r2.1/bin/cvc3 is aborting. if (hasPrefix(s, "Interrupted by signal 15.")) { // Just ignore this. Should also see "timeout" on stdout continue; } // cvc3 outputs on stderr on self timeout // Interrupted by signal 14 (self-timeout). /home/pbj/smt/cvc3/r2.2/bin/cvc3 is aborting. // sh: line 1: 23491 Aborted /home/pbj/smt/cvc3/r2.2/bin/cvc3 -lang smt -timeout 1 /tmp/smtsolver-vulcan.inf.ed.ac.uk-23485.smt if (hasPrefix(s, "Interrupted by signal 14 (self-timeout).")) { appendCommaString(remarks, "timeout (cvc3)"); printMessage(INFOm, "SMTLib solver had self timeout"); seenTimeout = true; continue; } if (hasPrefix(s, "sh: line 1:") && hasSubstring(s, "Aborted")) { continue; } // z3 at least uses this. if (hasPrefix(s,"WARNING:")) { seenWarning = true; printMessage(WARNINGm, "Warning message from SMTLib solver" + ENDLs + s); continue; } seenUnexpectedErrOutput = true; } if (seenWarning) appendCommaString(remarks, "warning(s)"); // Allow for multiple lines: Alt-Ergo sometimes outputs // multiple lines of unsats when unsat discovered on // assumption assertion, it seeems for (int i = 0; i != (int) solverOutput.size(); i++) { string s = solverOutput.at(i); if (tokeniseString(s).size() == 0) { continue; } if (s == "unsat") { seenUnsatOutput = true; } else if (s == "sat") { seenSatOutput = true; appendCommaString(remarks, "sat"); } // "unknown (sat)" is output occasionally by Alt-Ergo else if (s == "unknown" || s == "unknown (sat)") { appendCommaString(remarks, "unknown"); seenUnknownOutput = true; } // Timeout message from timeout.sh else if (s == "timeout") { appendCommaString(remarks, "timeout (sh2)"); printMessage(INFOm, "SMTLib solver reached timeout.sh time limit"); seenTimeout = true; } // Proof steps limit message from Alt-Ergo else if (hasPrefix(s, "Steps limit reached")) { appendCommaString(remarks, "steps limit reached"); printMessage(INFOm, "Alt-Ergo proof steps limit reached"); seenTimeout = true; } else { seenUnexpectedOutput = true; } // Who will write out this? Is this in competition? // else if (solverOutput.at(0) == "timeout") { // appendCommaString(remarks, "timeout"); // status = UNPROVEN; // } } } else { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // interface-mode == simplify // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Sometimes see // - "Error" // - "ASSERT failed" // - "Bad input:" on stdout // - "Warning:" // - "Counterexample:" on stdout (with Z3 -s) // When it's clear which output stream these are seen on, can add // tests for them. for (int i = 0; i != (int) solverErrOutput.size(); i++) { string s = solverErrOutput.at(i); // Detect message produced by ulimit -t killing process if (hasPrefix(s, "sh: line 1:") && hasSubstring(s, "Killed")) { appendCommaString(remarks, "timeout"); printMessage(INFOm, "Solver reached ulimit time limit"); seenTimeout = true; continue; } if (hasSubstring(s, "ASSERT failed")) { appendCommaString(remarks, "ASSERT failed"); printMessage(INFOm, "Simplify ASSERT failed" + ENDLs); } if (tokeniseString(s).size() > 0) { seenUnexpectedErrOutput = true; } } for (int i = 0; i != (int) solverOutput.size(); i++) { string s = solverOutput.at(i); // Expect : Valid. if (hasSubstring(s,"Valid.")) { seenUnsatOutput = true; } else if (hasSubstring(s,"Invalid.")) { seenSatOutput = true; } else if (hasSubstring(s,"Counterexample:")) { continue; } else if (tokeniseString(s).size() > 0) { seenUnexpectedOutput = true; } } } // Detecting termination signals, not on windows platform: #ifndef _WIN32 // Code here discovered by trial and error and reading man pages. // Man page for getrlimit makes clear that a process is terminated using // a KILL signal when the time limit is reached. // // system(3) man page says that return status of system call is in format // specified on wait(2) man page. // wait(2) man page suggests using WIFSIGNALED(exitStatus) to // detect if process terminated by signal, and WTERMSIG to extract // this signal. // This doesn't work. Instead, even on exit because of signal, // are seeing WIFEXITED() returning 1 (true). Get this even without // running in sub-process with (). (see above). // With timeout, are seeing exitStatus == 35072 and // WEXITSTATUS(exitStatus) == 137. (As wait(2) man page remarks, // WEXITSTATUS picks out bits 15-8 of argument) // Some web pages remark on exit codes of form 128 + signumber being used // to flag processes terminated by signals. SIGKILL has value 9, hence // this 137. // Can see this 137 by e.g. doing: // ulimit -t 1 ; z3 -smt x.smt ; echo $? /* printMessageWithHeader ("DEBUG", "exitStatus = " + intToString(exitStatus) + ENDLs + "WIFSIGNALED = " + intToString(WIFSIGNALED(exitStatus)) + ENDLs + "WTERMSIG = " + intToString(WTERMSIG(exitStatus)) + ENDLs + "WIFEXITED = " + intToString(WIFEXITED(exitStatus)) + ENDLs + "WEXITSTATUS = " + intToString(WEXITSTATUS(exitStatus)) ); */ if (WIFEXITED(exitStatus)) { int exitCode = WEXITSTATUS(exitStatus); if (exitCode == 128 + SIGKILL) { appendCommaString(remarks, "timeout (exit code 137)"); printMessage(INFOm, "Solver killed. Assume ulimit time limit reached."); seenTimeout = true; } else if (exitCode != EXIT_SUCCESS) { // EXIT_SUCCESS == 0 usually appendCommaString(remarks, "exit code " + intToString(exitCode)); } } else if (WIFSIGNALED(exitStatus)) { printMessage(WARNINGm, "Subprocess termination on signal " + intToString(WTERMSIG(exitStatus))); } else { printMessage(WARNINGm, "Unexpected subprocess exit status " + intToString(exitStatus)); } #endif // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Report on output and decide return status // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (seenUnexpectedErrOutput || seenUnexpectedOutput) { string outMessage = concatStrings(solverOutput, ENDLs); string errMessage = concatStrings(solverErrOutput, ENDLs); printMessage(ERRORm, "Error(s) on prover output." + ENDLs + "On STDOUT:" + ENDLs + outMessage + ENDLs + "On STDERR: " + ENDLs + errMessage + ENDLs + "END of output"); return ERROR; } // All output expected. if (seenTimeout) return RESOURCE_LIMIT; if (seenSatOutput || seenUnknownOutput) return UNPROVEN; if (seenUnsatOutput) return TRUE; // Both outputs empty. printMessage(ERRORm, "Prover standard output and error output both empty"); return ERROR; } //--------------------------------------------------------------------------- // finaliseGoal() //--------------------------------------------------------------------------- void SMTLibDriver::finaliseGoal() { if (option("delete-working-files")) { tryRemoveFile(solverInputFileName); tryRemoveFile(solverOutputFileName); tryRemoveFile(solverErrorFileName); } return; } //========================================================================= // END OF FILE //========================================================================= spark-2012.0.deb/victor/vct/src/csvisect.cc0000644000175000017500000000401711753202341017462 0ustar eugeneugen/* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ // USAGE // csvisect filename1 filename2 // // Only return list of those in filename1 that also occur in filename2. #include #include using std::cout; using std::cin; using std::cerr; using std::endl; using std::istream; #include using std::ifstream; #include "utility.hh" int main (int argc, char *argv[]) { vector args = processCommandArgs(argc, argv); ifstream ifs1; ifstream ifs2; if (args.size() < 2) { cerr << "Usage" << endl << "" << endl << "csvisect filename1 filename2" << endl << "" << endl << "Only return list of those records in filename1 that also " << endl << "occur in filename2" << endl; exit(1); } string fileName1(args[0]); string fileName2(args[1]); ifs1.open( fileName1.c_str() ); if (!ifs1) { cerr << "Unable to open file 1: " << fileName1 << endl; exit(1); } ifs2.open( fileName2.c_str() ); if (!ifs2) { cerr << "Unable to open file 2: " << fileName2 << endl; exit(1); } string line1; string line2; set set1; while (getline(ifs1, line1)) { set1.insert(line1); } while (getline(ifs2, line2)) { if (set1.find(line2) != set1.end()) cout << line2 << endl; } return 0; } spark-2012.0.deb/victor/vct/src/csvproj.cc0000644000175000017500000000554711753202341017336 0ustar eugeneugen/* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ // USAGE // csvproj n1 ... nk [filename] // n1 ... nk are field numbers (1-based) // filename is input filename to use. // If filename not provided, cin is used instead. #include #include using std::cout; using std::cin; using std::cerr; using std::endl; using std::istream; #include using std::ifstream; #include "utility.hh" int main (int argc, char *argv[]) { vector nonOptions = processCommandArgs(argc, argv); if (nonOptions.size() < 1) { cerr << "Usage" << endl << endl << " csvproj n1 ... nk [filename] " << endl << endl << "Project out selected fields of input records."<< endl << "n1 ... nk are the (1-based) numbers of the fields to select." << endl << "filename is input filename to use." << endl << "If filename is not provided, stdin is used instead." << endl << "The result is written to stdout." << endl; return 0; } istream* isp = &cin; ifstream ifs; if (! isIntString(nonOptions.back()) ) { string fileName (nonOptions.back()); nonOptions.pop_back(); ifs.open( fileName.c_str() ); if (!ifs) { cerr << "Unable to open file " << fileName << endl; exit(1); } isp = &ifs; } string line; while (getline(*isp, line)) { vector vs = csvDigest(line); vector ws; for (int i = 0; i != nonOptions.size(); i++) { int pos = stringToInt(nonOptions[i]); if (pos > vs.size()) { cerr << "Position " << pos << " out of range for line" << endl; cerr << line << endl; continue; } ws.push_back(vs[pos-1]); } cout << csvConcat(ws) << endl; } return 0; } /* string s1("abc,d \"e,f\", ghi "); string s2(" 123, 4 \"56"); vector v1 = csvDigest(s1); vector v2 = csvDigest(s2); for (int i = 0; i != v1.size(); i++) { cout << "#" << v1[i] << "#" << endl; } cout << endl; for (int i = 0; i != v2.size(); i++) { cout << "#" << v2[i] << "#" << endl; } */ spark-2012.0.deb/victor/vct/src/refine-types.cc0000644000175000017500000014630711753202341020262 0ustar eugeneugen//======================================================================== //======================================================================== // REFINE-TYPES.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== //======================================================================== // Type Refinement //======================================================================== //======================================================================== /* Type refinement =============== Phases A Ensure all formulas fully typed. Add notion of `type name'. Applicable to TYPE_IDs and interpreted types such as BOOL_TY (QFOL), BIT_TY (FOL), INT_TY and REAL_TY. B Build map from named types to type representation structures. C Make pass over all rules and goal formulas Do uniformly - no specific code for particular types or operators. o Relativise quantifiers, as needed o Fix equalities, as needed D Make pass over all function, constant and predicate declarations Do uniformly - no specific code for particular types or operators. o update declarations - for now, the types need no updating since not renamed - Also declarations themselves not renamed o Add new rules - constant, function subtyping, if non-trivial - function, arity>0 predicate functionality, if non-trivial o update existing rules? E Make pass over all named types. This is proxy for map over type decls. Is better since includes also the interpreted types for which there are no declarations. Changes for optimisation purposes are flagged with `might'. For each named type T: o update associated declaration/definition - For now, reuse typename - For now, only need to change for Subrange type o add new context declarations + - (array) might add any_element decl. - (bit) add type definition - (ALL) add decls for mem_T and equiv_T if non-trivial o update existing rules - (array) might modify rule for const_A operator [how is this to be located?] - (array) might modify subtyping rule for const_A operator [But what if hasn't been even generated yet?] - (bit) might modify axioms for bitops - (bit) might modify axioms for all bit-valued relations (both user defined and built-ins) - (bit) might modify subtyping o add new rules - (array) might add subtyping and functionality axioms for built-ins select and update - (array) might add axiom for subtyping of any_element. - (bit) add defs for TERM_TRUE and TERM_FALSE - (ALL) add definitions for mem_T and equiv_T if non-trivial F Make pass over all rules and goal formulas o (bit) Eliminate b2p operator (only *after* equalities fixed) [This really doesn't belong to body of type refinement] */ //======================================================================== // TypeRep Class //======================================================================== // Class for type representations class TypeRep { private: string typeName; // Name of type being represented bool memTrivial; bool equivTrivial; Node* newDecls; // New decls to be added at *start* of declaration list Node* newRules; public: //-------------------------------------------------------------------- // Constructors and updaters //-------------------------------------------------------------------- TypeRep(const string& TyName, bool memTriv, bool equivTriv) : typeName(TyName), memTrivial(memTriv), equivTrivial(equivTriv) { newDecls = new Node(SEQ); newRules = new Node(SEQ); } void addNewDecl(Node* d) { newDecls->addChild(d); } void addNewRule(Node* r) { newRules->addChild(r); } //-------------------------------------------------------------------- // Accessors //-------------------------------------------------------------------- // Node* getBaseType(); string getTypeName() { return typeName;} bool hasMemTrivial() { return memTrivial;} bool hasEquivTrivial() { return equivTrivial;} Node* getNewRules() { return newRules; } Node* getNewDecls() {return newDecls; } //-------------------------------------------------------------------- // General instance methods //-------------------------------------------------------------------- Node* mkMemPred(Node* arg); Node* mkMemPredDecl(); Node* mkMemPredAxiom(Node* rhs); // Make instance of equivalence relation Node* mkEquivPred(Node* arg1, Node* arg2); Node* mkEquivPredDecl(); Node* mkEquivPredAxiom(Node* rhs); // Make application of bit-valued equivalence function Node* mkBitEquivFun(Node* arg1, Node* arg2); Node* mkBitEquivFunDecl(); Node* mkBitEquivFunAxiom(); //-------------------------------------------------------------------- // Static members //-------------------------------------------------------------------- static bool FOL; static TypeRep* boolTypeRep; }; // END TypeRep //-------------------------------------------------------------------- // Static data member definitions //-------------------------------------------------------------------- TypeRep* TypeRep::boolTypeRep; bool TypeRep::FOL = false; //-------------------------------------------------------------------- // Instance method definitions //-------------------------------------------------------------------- // Let = nameToType() Node* TypeRep::mkMemPred(Node* arg) { if (memTrivial) { return new Node(TRUE); } else { Kind k = FOL ? PRED_AP : FUN_AP; return new Node(k, typeName + "___member", arg); } } // Make DECL_FUN{___member} (SEQ ) BOOL_TY Node* TypeRep::mkMemPredDecl() { return new Node(DECL_FUN, typeName + "___member", new Node (SEQ, nameToType(typeName)), new Node (BOOL_TY) ); } // Make: All x:. ___member(x) <=> Node* TypeRep::mkMemPredAxiom(Node* rhs) { Node* lhs = mkMemPred(new Node (VAR, "x")); return new Node(FORALL, new Node (SEQ, new Node (DECL, "x", nameToType(typeName))), boolTypeRep->mkEquivPred(lhs,rhs) ); } Node* TypeRep::mkEquivPred(Node* arg1, Node* arg2) { if (FOL && typeName == string("boolean")) { // Useful special case, given how, in FOL case, BOOL_TY is used in // range type position of function declaration to signify a predicate // is being declared. return new Node(IFF, arg1, arg2); } else if (equivTrivial) { return new Node(EQ, "", arg1, arg2, nameToType(typeName)); } else { Kind k = (FOL) ? PRED_AP : FUN_AP; return new Node(k, typeName + "___equiv", arg1, arg2); } } // Make DECL_FUN{___equiv} (SEQ ) BOOL_TY Node* TypeRep::mkEquivPredDecl() { return new Node(DECL_FUN, typeName + "___equiv", new Node (SEQ, nameToType(typeName), nameToType(typeName)), new Node (BOOL_TY) ); } // Make: All x,y:. ___equiv(x,y) <=> Node* TypeRep::mkEquivPredAxiom(Node* rhs) { Node* lhs = mkEquivPred(new Node (VAR, "x"), new Node (VAR, "y")); return new Node(FORALL, new Node (SEQ, new Node (DECL, "x", nameToType(typeName)), new Node (DECL, "y", nameToType(typeName)) ), boolTypeRep->mkEquivPred(lhs,rhs) ); } Node* TypeRep::mkBitEquivFun(Node* arg1, Node* arg2) { if (equivTrivial) { return new Node(TERM_EQ, "", arg1, arg2, nameToType(typeName)); } else { return new Node(FUN_AP, typeName + "___bit_equiv", arg1, arg2); } } Node* TypeRep::mkBitEquivFunDecl() { return new Node(DECL_FUN, typeName + "___bit_equiv", new Node (SEQ, nameToType(typeName), nameToType(typeName)), new Node (BIT_TY) ); } Node* TypeRep::mkBitEquivFunAxiom() { Node* lhs = new Node(TO_PROP, mkBitEquivFun(new Node (VAR, "x"), new Node (VAR, "y")) ); Node* rhs = mkEquivPred(new Node (VAR, "x"), new Node (VAR, "y")); return new Node(FORALL, new Node (SEQ, new Node (DECL, "x", nameToType(typeName)), new Node (DECL, "y", nameToType(typeName)) ), boolTypeRep->mkEquivPred(lhs,rhs) ); } //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // RefineTypes class //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Class to hold state and methods for phases of refinement. class RefineTypes { private: Node* unit; FDLContext* ctxt; map typeReps; public: RefineTypes(FDLContext* c, Node* n) { ctxt = c; unit = n; TypeRep::FOL = ctxt->hasFeature("FOL"); } TypeRep* insertTypeRep(const string& typeName, bool memTriv, bool EquivTriv) { map::iterator i = typeReps.insert(make_pair(typeName, TypeRep(typeName, memTriv, EquivTriv) )).first; return &(i->second); } TypeRep* insertTrivialTypeRep(const string& typeName) { return insertTypeRep(typeName, true, true); } TypeRep* insertSubtypeTypeRep(const string& typeName) { return insertTypeRep(typeName, false, true); } TypeRep* insertQuotientTypeRep(const string& typeName) { return insertTypeRep(typeName, true, false); } TypeRep* insertGeneralTypeRep(const string& typeName) { return insertTypeRep(typeName, false, false); } TypeRep* lookupTypeRep(const string& typeName); bool allSubnodesMemTrivial(Node* typeTuple); bool allSubnodesEquivTrivial(Node* typeTuple); void initialiseTypeRepMap(); void refineFormulas(); void addSubtypingAndFunctionalityRules(); void addSpecialDeclsAndRules(); }; // END RefineTypes //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // RefineTypes methods //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ TypeRep* RefineTypes::lookupTypeRep(const string& typeName) { map::iterator i = typeReps.find(typeName); if (i == typeReps.end()) { printMessage(ERRORm, "RefineTypes::lookupTypeRep: lookup failed for typeName " + typeName ); return new TypeRep(typeName, true, true); } else { return & (i->second); } } //------------------------------------------------------------------------- // allSubnodesMemTrivial //------------------------------------------------------------------------- // Assumes typeTuple is of one of forms // TUPLE_TY T1 ... Tn // SEQ T1 ... Tn // RECORD_TY D1 ... Dn where Di = DECL{} Ti // // and Ti are all named types. // Would be nice to code up in HO Lang... bool RefineTypes::allSubnodesMemTrivial(Node* typeTuple) { for (int i = 0; i != typeTuple->arity(); i++) { Node* childType = (typeTuple->kind == RECORD_TY) ? typeTuple->child(i)->child(0) : typeTuple->child(i); if (! lookupTypeRep(typeToName(childType))->hasMemTrivial()) return false; } return true; } //------------------------------------------------------------------------- // allSubnodesEquivTrivial //------------------------------------------------------------------------- bool RefineTypes::allSubnodesEquivTrivial(Node* typeTuple) { for (int i = 0; i != typeTuple->arity(); i++) { Node* childType = (typeTuple->kind == RECORD_TY) ? typeTuple->child(i)->child(0) : typeTuple->child(i); if (! lookupTypeRep(typeToName(childType))->hasEquivTrivial()) return false; } return true; } //------------------------------------------------------------------------- // initialiseTypeRepMap //------------------------------------------------------------------------- void RefineTypes::initialiseTypeRepMap() { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Update static state of TypeRep class // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Rather ugly. But this saves passing in this info as an argument // to all the TypeRep constructors. TypeRep::FOL = ctxt->hasFeature("FOL"); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add special cases: int, real, bool // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - TypeRep::boolTypeRep = insertTrivialTypeRep("boolean"); insertTrivialTypeRep("integer"); insertTrivialTypeRep("real"); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add special case: bit // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (option("refine-bit-type-as-int-subtype")) { ctxt->addFeature("prim bit type has int interp"); TypeRep* t = insertSubtypeTypeRep("bit___type"); // Add DECL_FUN{bit___type___member} (SEQ BIT_TY) BOOL_TY t->addNewDecl(t->mkMemPredDecl()); // Add rule: // All x:bit. bit___type___member(x) <=> 0 <= x and x <= 1 Node* rhs = new Node(AND, new Node(I_LE, new Node(NATNUM, "0"), new Node(VAR, "x")), new Node(I_LE, new Node(VAR, "x"), new Node(NATNUM, "1"))); t->addNewRule(t->mkMemPredAxiom(rhs)); } else if (option("refine-bit-type-as-int-quotient")) { ctxt->addFeature("prim bit type has int interp"); TypeRep* t = insertQuotientTypeRep("bit___type"); // Add DECL_FUN{bit___type___equiv} (SEQ BIT_TY BIT_TY) BOOL_TY t->addNewDecl(t->mkEquivPredDecl()); // Add rule: // All x,y:bit. bit___type___equiv(x,y) <=> (b2p(x) <=> b2p(y)) Node* rhs = new Node(IFF, new Node(TO_PROP, new Node(VAR, "x")), new Node(TO_PROP, new Node(VAR, "y")) ); t->addNewRule(t->mkEquivPredAxiom(rhs)); } else { insertTrivialTypeRep("bit___type"); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add entry for each type decl // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != ctxt->typeSeq.arity(); i++) { Node* typeDecl = ctxt->typeSeq.child(i); if (typeDecl->arity() == 0) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DEF_TYPE{id} - uninterpreted type // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (option("refine-uninterpreted-types")) { TypeRep* t = insertSubtypeTypeRep(typeDecl->id); // Add DECL_FUN{id} (SEQ TYPE_ID{id}) BOOL_TY t->addNewDecl(t->mkMemPredDecl()); } else { insertTrivialTypeRep(typeDecl->id); } } else // Know typeDecl->arity() > 0 if (typeDecl->child(0)->kind == TYPE_ID || typeDecl->child(0)->kind == INT_TY || typeDecl->child(0)->kind == REAL_TY || typeDecl->child(0)->kind == BIT_TY || typeDecl->child(0)->kind == BOOL_TY ) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Def type is defining alias. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DEF_TYPE{new-id} TYPE_ID{old-id} // DEF_TYPE{new-id} INT_TY // DEF_TYPE{new-id} REAL_TY // DEF_TYPE{new-id} BIT_TY // DEF_TYPE{new-id} BOOL_TY // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - string newTypeId = typeDecl->id; string oldTypeName = typeToName(typeDecl->child(0)); TypeRep* oldTypeIdRep = lookupTypeRep(oldTypeName); TypeRep* newTypeIdRep = insertTypeRep(newTypeId, oldTypeIdRep->hasMemTrivial(), oldTypeIdRep->hasEquivTrivial()); if (!oldTypeIdRep->hasMemTrivial()) { // Add DECL_FUN{___member} (SEQ TYPE_ID{new-id}) BOOL_TY newTypeIdRep->addNewDecl(newTypeIdRep->mkMemPredDecl()); // Add rule: // All x:. ___member(x) <=> ___member(x) Node* rhs = oldTypeIdRep->mkMemPred(new Node(VAR, "x")); newTypeIdRep->addNewRule(newTypeIdRep->mkMemPredAxiom(rhs)); } if (!oldTypeIdRep->hasEquivTrivial()) { // Add DECL_FUN{___equiv} (SEQ TYPE_ID{new-id} // TYPE_ID{new-id} // ) BOOL_TY newTypeIdRep->addNewDecl(newTypeIdRep->mkEquivPredDecl()); // Add rule: // All x,y:bit. ___equiv(x,y) <=> ___equiv(x) Node* rhs = oldTypeIdRep->mkEquivPred(new Node(VAR, "x"), new Node(VAR, "y")); newTypeIdRep->addNewRule(newTypeIdRep->mkEquivPredAxiom(rhs)); } } else if (typeDecl->child(0)->kind == SUBRANGE_TY) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DEF_TYPE{id} (SUBRANGE_TY{t-id} ) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (option("refine-int-subrange-type")) { TypeRep* t = insertSubtypeTypeRep(typeDecl->id); // Add DECL_FUN{___member} (SEQ TYPE_ID{t-id}) BOOL_TY t->addNewDecl(t->mkMemPredDecl()); // Add rule: // All x:. ___member(x) <=> <= x /\ x <= Node* lower = typeDecl->child(0)->child(0); Node* upper = typeDecl->child(0)->child(1); Node* rhs = new Node(AND, new Node(I_LE, lower, new Node(VAR, "x")), new Node(I_LE, new Node(VAR, "x"), upper)); t->addNewRule(t->mkMemPredAxiom(rhs)); } else { insertTrivialTypeRep(typeDecl->id); } } else if (typeDecl->child(0)->kind == ENUM_TY) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DEF_TYPE{id} (ENUM_TY{type-id} ... ) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Expecting that usually enumerated types are abstracted or // eliminated by this stage. insertTrivialTypeRep(typeDecl->id); } else if (typeDecl->child(0)->kind == ARRAY_TY) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DEF_TYPE{} (ARRAY_TY{} (TUPLE_TY S1 ... Sn) T) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* indexTypes = typeDecl->child(0)->child(0); Node* elementType = typeDecl->child(0)->child(1); TypeRep* elementTypeRep = lookupTypeRep(typeToName(elementType)); bool allIndexTypesMemTrivial = allSubnodesMemTrivial(indexTypes); bool allIndexTypesEquivTrivial = allSubnodesEquivTrivial(indexTypes); if (! allIndexTypesEquivTrivial) { printMessage(ERRORm, "Type refinement cannot handle array type " + (typeDecl->id) + "with non-trivial equivalence" + ENDLs + "on one or more index types" ); insertTrivialTypeRep(typeDecl->id); continue; // Go on to next type decl } // Index types all have trivial equiv rel from now on bool arrayEquivTrivial; bool arrayMemTrivial; if (option("refine-array-types-with-quotient")) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Out of bounds elements unconstrained // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - arrayMemTrivial = elementTypeRep->hasMemTrivial(); arrayEquivTrivial = allIndexTypesMemTrivial && elementTypeRep->hasEquivTrivial(); TypeRep* arrayTypeRep = insertTypeRep(typeDecl->id, arrayMemTrivial, arrayEquivTrivial); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration and defining rule for mem predicate // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (!arrayMemTrivial) { // Add DECL_FUN{___member} (SEQ ) BOOL_TY arrayTypeRep->addNewDecl(arrayTypeRep->mkMemPredDecl()); // RHS of def is // // FORALL (SEQ (DECL{s1} S1) ... (DECL{sn} Sn)) // mem_S1(s1) /\ ... /\ mem_Sn(sn) // => // mem_T( select_A(x, (SEQ s1 ... sn)) // Node* forallDecls = new Node(SEQ); Node* hyps = new Node(AND); Node* indexes = new Node(SEQ); for (int i = 0; i != indexTypes->arity(); i++) { Node* indexType = indexTypes->child(i); TypeRep* t = lookupTypeRep(typeToName(indexType)); string varName = "s" + intToString(i); forallDecls->addChild(new Node (DECL, varName, indexTypes->child(i)->copy())); indexes->addChild(new Node(VAR, varName)); if (!t->hasMemTrivial()) { Node* memPred = t->mkMemPred(new Node(VAR, varName)); hyps->addChild(memPred); } } Node* concl = elementTypeRep->mkMemPred(new Node (ARR_ELEMENT, typeDecl->id, new Node(VAR, "x"), indexes) ); Node* rhs = new Node(FORALL, forallDecls, mkGeneralImplies(hyps, concl)); arrayTypeRep->addNewRule(arrayTypeRep->mkMemPredAxiom(rhs)); } // END IF !arrayMemTrival // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration and defining rule for equiv predicate // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (!arrayEquivTrivial) { // Add DECL_FUN{___equiv} (SEQ ) BOOL_TY arrayTypeRep->addNewDecl(arrayTypeRep->mkEquivPredDecl()); // RHS of def is // // FORALL (SEQ (DECL{s1} S1) ... (DECL{sn} Sn)) // mem_S1(s1) /\ ... /\ mem_Sn(sn) // => // equiv_T(select_A(x, (SEQ s1 ... sn)), // select_A(y, (SEQ s1 ... sn))) // Node* forallDecls = new Node(SEQ); Node* hyps = new Node(AND); Node* indexes = new Node(SEQ); for (int i = 0; i != indexTypes->arity(); i++) { Node* indexType = indexTypes->child(i); TypeRep* t = lookupTypeRep(typeToName(indexType)); string varName = "s" + intToString(i); forallDecls->addChild(new Node (DECL, varName, indexTypes->child(i)->copy())); indexes->addChild(new Node(VAR, varName)); if (!t->hasMemTrivial()) { Node* memPred = t->mkMemPred(new Node(VAR, varName)); hyps->addChild(memPred); } } Node* concl = elementTypeRep->mkEquivPred(new Node (ARR_ELEMENT, typeDecl->id, new Node(VAR, "x"), indexes), new Node (ARR_ELEMENT, typeDecl->id, new Node(VAR, "y"), indexes->copy()) ); Node* rhs = new Node(FORALL, forallDecls, mkGeneralImplies(hyps, concl)); arrayTypeRep->addNewRule(arrayTypeRep->mkEquivPredAxiom(rhs)); } // END IF !arrayEquivTrivial } else { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Out of bounds elements constrained // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - arrayMemTrivial = allIndexTypesMemTrivial && elementTypeRep->hasMemTrivial(); arrayEquivTrivial = elementTypeRep->hasEquivTrivial(); TypeRep* arrayTypeRep = insertTypeRep(typeDecl->id, arrayMemTrivial, arrayEquivTrivial); string defaultElementName = typeDecl->id + "___default_arr_element"; Node* defaultElementDecl = new Node (DEF_CONST, defaultElementName, elementType->copy()); arrayTypeRep->addNewDecl(defaultElementDecl); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration and defining rule for mem predicate // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (!arrayMemTrivial) { // Add DECL_FUN{___member} (SEQ ) BOOL_TY arrayTypeRep->addNewDecl(arrayTypeRep->mkMemPredDecl()); // RHS of def is // // FORALL (SEQ (DECL{s1} S1) ... (DECL{sn} Sn)) // mem_S1(s1) /\ ... /\ mem_Sn(sn) // => // mem_T( select_A(x, (SEQ s1 ... sn)) // AND // NOT (mem_S1(s1) /\ ... /\ mem_Sn(sn)) // => // equiv_T( select_A(x, (SEQ s1 ... sn), default_el_A) // // //We could use EQ over T rather than equiv_T. //May be easier to reason with??? // Allow for either Node* forallDecls = new Node(SEQ); Node* hyps = new Node(AND); Node* indexes = new Node(SEQ); for (int i = 0; i != indexTypes->arity(); i++) { Node* indexType = indexTypes->child(i); TypeRep* t = lookupTypeRep(typeToName(indexType)); string varName = "s" + intToString(i); forallDecls->addChild(new Node (DECL, varName, indexTypes->child(i)->copy())); indexes->addChild(new Node(VAR, varName)); if (!t->hasMemTrivial()) { Node* memPred = t->mkMemPred(new Node(VAR, varName)); hyps->addChild(memPred); } } Node* selectExp = new Node(ARR_ELEMENT, typeDecl->id, new Node(VAR, "x"), indexes); Node* defaultElement = new Node(CONST, defaultElementName); Node* extConstraint = option("refine-array-types-with-weak-extension-constraint") ? elementTypeRep->mkEquivPred(selectExp->copy(), defaultElement) : new Node(EQ, "", selectExp->copy(), defaultElement, elementType->copy() ); Node* concl = new Node(AND, mkGeneralImplies(hyps, elementTypeRep ->mkMemPred(selectExp) ), mkGeneralImplies(new Node (NOT, hyps->copy()), extConstraint )); Node* rhs = new Node(FORALL, forallDecls, concl); arrayTypeRep->addNewRule(arrayTypeRep->mkMemPredAxiom(rhs)); } // END IF !arrayMemTrival // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration and defining rule for equiv predicate // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (!arrayEquivTrivial) { // Add DECL_FUN{___equiv} (SEQ ) BOOL_TY arrayTypeRep->addNewDecl(arrayTypeRep->mkEquivPredDecl()); // RHS of def is // // FORALL (SEQ (DECL{s1} S1) ... (DECL{sn} Sn)) // equiv_T(select_A(x, (SEQ s1 ... sn)), // select_A(y, (SEQ s1 ... sn))) // Node* forallDecls = new Node(SEQ); Node* indexes = new Node(SEQ); for (int i = 0; i != indexTypes->arity(); i++) { // Node* indexType = indexTypes->child(i); string varName = "s" + intToString(i); forallDecls->addChild(new Node (DECL, varName, indexTypes->child(i)->copy())); indexes->addChild(new Node(VAR, varName)); } Node* concl = elementTypeRep->mkEquivPred(new Node (ARR_ELEMENT, typeDecl->id, new Node(VAR, "x"), indexes), new Node (ARR_ELEMENT, typeDecl->id, new Node(VAR, "y"), indexes->copy()) ); Node* rhs = new Node(FORALL, forallDecls, concl); arrayTypeRep->addNewRule(arrayTypeRep->mkEquivPredAxiom(rhs)); } // END IF !arrayEquivTrivial } } else if (typeDecl->child(0)->kind == RECORD_TY) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DEF_TYPE{R} (RECORD_TY{R} D1 ... Dn) // where Di = DECL{fi} Ti // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - bool recordMemTrivial = allSubnodesMemTrivial(typeDecl->child(0)); bool recordEquivTrivial = allSubnodesEquivTrivial(typeDecl->child(0)); TypeRep* recordTypeRep = insertTypeRep(typeDecl->id, recordMemTrivial, recordEquivTrivial ); Node* recordTy = typeDecl->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration and defining rule for mem predicate // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (!recordMemTrivial) { // Add DECL_FUN{___member} (SEQ ) BOOL_TY recordTypeRep->addNewDecl(recordTypeRep->mkMemPredDecl()); // RHS of def is // // mem_T1(RCD_ELEMENT{f1} x R) // /\ ... /\ mem_Tn(RCD_ELEMENT{fn} x R) Node* rhs = new Node(AND); for (int i = 0; i != recordTy->arity(); i++) { Node* decl = recordTy->child(i); Node* fieldTy = decl->child(0); TypeRep* t = lookupTypeRep(typeToName(fieldTy)); Node* conjunct = t->mkMemPred(new Node(RCD_ELEMENT, decl->id, new Node(VAR, "x"), new Node(TYPE_PARAM, recordTy->id) )); rhs->addChild(conjunct); } if (rhs->arity() == 1) rhs = rhs->child(0); recordTypeRep->addNewRule(recordTypeRep->mkMemPredAxiom(rhs)); } // END IF !recordMemTrivial // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration and defining rule for equiv predicate // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (!recordEquivTrivial) { // Add DECL_FUN{___equiv} (SEQ ) BOOL_TY recordTypeRep->addNewDecl(recordTypeRep->mkEquivPredDecl()); // RHS of def is // // equiv_T1 (RCD_ELEMENT{f1} x R) (RCD_ELEMENT{f1} y R) // /\ ... /\ ... // equiv_Tn (RCD_ELEMENT{fn} x R) (RCD_ELEMENT{fn} y R) Node* rhs = new Node(AND); for (int i = 0; i != recordTy->arity(); i++) { Node* decl = recordTy->child(i); Node* fieldTy = decl->child(0); TypeRep* t = lookupTypeRep(typeToName(fieldTy)); Node* conjunct = t->mkEquivPred(new Node(RCD_ELEMENT, decl->id, new Node(VAR, "x"), new Node(TYPE_PARAM, recordTy->id) ), new Node(RCD_ELEMENT, decl->id, new Node(VAR, "y"), new Node(TYPE_PARAM, recordTy->id) ) ); rhs->addChild(conjunct); } if (rhs->arity() == 1) rhs = rhs->child(0); recordTypeRep->addNewRule(recordTypeRep->mkEquivPredAxiom(rhs)); } } else { printMessage(ERRORm, "RefineTypes::initialiseTypeRepMap: unrecognised type " + kindString(typeDecl->child(0)->kind) ); } // END switch *** } // END for loop over type defs return; } //------------------------------------------------------------------------- // refineFormulas //------------------------------------------------------------------------- // Relativise quantifiers // Fix equalities, both bit-valued and propositional // Local class for closure. Need to map function over formula tree // that carries with it a pointer to the RefineTypes object containing // the typeReps map. class RefineFormulaClosure { private: RefineTypes* refineTypesState; public: RefineFormulaClosure(RefineTypes* r) : refineTypesState(r) {} Node* operator () (Node* n); }; // FORALL (SEQ d1 .. dn) B --> // FORALL (SEQ d1 .. dn) (Mem_T1(x1) /\ ... /\ Mem_T2(xn)) => B // EXISTS (SEQ d1 .. dn) B --> // EXISTS (SEQ d1 .. dn) Mem_T1(x1) /\ ... /\ Mem_T2(xn) /\ B // // where di = DECL{xi} Ti // // Mem_Ti omitted if it is trivial. // // Node* RefineFormulaClosure::operator() (Node* n) { if (n->kind == FORALL || n->kind == EXISTS) { Node* decls = n->child(0); Node* conditions = new Node(AND); for (int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); Node* ty = decl->child(0); TypeRep* t = refineTypesState->lookupTypeRep(typeToName(ty)); if (!t->hasMemTrivial()) { Node* memPred = t->mkMemPred(new Node(VAR, decl->id)); conditions->addChild(memPred); } } if (conditions->arity() > 0) { printOnOption("trace-refine-types-quant-relativisation", "Relativising " + kindString(n->kind) + " over " + n->child(0)->toString() ); if (n->kind == FORALL) { Node* hyps = (conditions->arity() >= 2) ? conditions : conditions->child(0); n->child(1) = new Node(IMPLIES, hyps, n->child(1)); } else { // n->kind == EXISTS conditions->addChild(n->child(1)); n->child(1) = conditions; } } } // EQ t1 t2 T else if (n->kind == EQ) { TypeRep* t = refineTypesState->lookupTypeRep(typeToName(n->child(2))); if (!t->hasEquivTrivial()) { Node* newEquivPredAp = t->mkEquivPred(n->child(0), n->child(1)); printOnOption("trace-refine-types-eq-refinement", "Refining equality over " + t->getTypeName()); return newEquivPredAp; } } else if (n->kind == TERM_EQ) { TypeRep* t = refineTypesState->lookupTypeRep(typeToName(n->child(2))); if (!t->hasEquivTrivial()) { printOnOption("trace-refine-types-bit-eq-refinement", "Refining bit-valued equality over " + t->getTypeName()); return t->mkBitEquivFun(n->child(0), n->child(1)); } } return n; } void RefineTypes::refineFormulas() { RefineFormulaClosure c(this); unit->mapOver1(c); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // addSubtypingAndFunctionalityRules() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void RefineTypes::addSubtypingAndFunctionalityRules() { Node* rules = unit->child(1); //------------------------------------------------------------------------ // Add rules for constant subtyping //------------------------------------------------------------------------ map::iterator i; for (i = ctxt->constMap.begin(); i != ctxt->constMap.end(); i++) { Node* decl = i->second; // DEF_CONST{id} type [exp] TypeRep* t = lookupTypeRep(typeToName(decl->child(0))); if (!t->hasMemTrivial()) { rules->addChild(t->mkMemPred(new Node(CONST, decl->id))); } } //------------------------------------------------------------------------ // Add rules for FDL global variable subtyping //------------------------------------------------------------------------ for (i = ctxt->varMap.begin(); i != ctxt->varMap.end(); i++) { Node* decl = i->second; // DECL_VAR{id} TypeRep* t = lookupTypeRep(typeToName(decl->child(0))); if (!t->hasMemTrivial()) { rules->addChild(t->mkMemPred(new Node(CONST, decl->id))); } } //------------------------------------------------------------------------ // Add rules for function subtyping and functionality, and relation // functionality //------------------------------------------------------------------------ for (i = ctxt->funMap.begin(); i != ctxt->funMap.end(); i++) { Node* decl = i->second; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Process function declaration DECL_FUN{f} (SEQ S1 ... Sn) T // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - string declId = decl->id; Node* domTypes = decl->child(0); Node* rangeType = decl->child(1); TypeRep* rangeTypeRep = lookupTypeRep(typeToName(rangeType)); if (!option("no-subtyping-axioms") && !rangeTypeRep->hasMemTrivial()) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add subtyping axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FORALL (SEQ (DECL{x1} S1) ... (DECL{xn} Sn)) // Mem_S1(x1) /\ ... /\ Mem_Sn(xn) => MemT(f(x1,...,xn)) // // or // // FORALL (SEQ (DECL{x1} S1) ... (DECL{xn} Sn)) MemT(f(x1,...,xn)) Node* forallDecls = new Node(SEQ); Node* hyps = new Node(AND); Node* funAp = new Node(FUN_AP, declId); for (int i = 0; i != domTypes->arity(); i++) { string varName = "x" + intToString(i); forallDecls->addChild(new Node (DECL, varName, domTypes->child(i)->copy())); funAp->addChild(new Node(VAR, varName)); if (!option("strong-subtyping-axioms")) { Node* ty = domTypes->child(i); TypeRep* t = lookupTypeRep(typeToName(ty)); if (!t->hasMemTrivial()) { Node* memPred = t->mkMemPred(new Node(VAR, varName)); hyps->addChild(memPred); } } } Node* memPredFunAp = rangeTypeRep->mkMemPred(funAp); Node* body = mkGeneralImplies(hyps, memPredFunAp); Node* subtypingAxiom = new Node(FORALL, forallDecls, body); rules->addChild(subtypingAxiom); } // Functionality lemma. if (!option("no-functionality-axioms") && !allSubnodesEquivTrivial(domTypes)) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add functionality axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // FORALL (SEQ (DECL{x1} S1) (DECL{y1} S1) ... // (DECL{xn} Sn) (DECL{yn} Sn) // ) // Mem_S1(x1) /\ Mem_S1(y1) /\ Equiv_S1(x1, y1) // /\ ... // /\ Mem_Sn(x1) /\ Mem_Sn(y1) /\ xn =_Sn yn // => Equiv_T(f(x1,...,xn), f(y1,...,yn)) // Node* forallDecls = new Node(SEQ); Node* hyps = new Node(AND); Kind apKind; if (rangeType->kind == BOOL_TY && ctxt->hasFeature("FOL")) apKind = PRED_AP; else apKind = FUN_AP; Node* funApX = new Node(apKind, decl->id); Node* funApY = new Node(apKind, decl->id); for (int i = 0; i != domTypes->arity(); i++) { string varNameXi = "x" + intToString(i); string varNameYi = "y" + intToString(i); forallDecls->addChild(new Node (DECL, varNameXi, domTypes->child(i)->copy())); forallDecls->addChild(new Node (DECL, varNameYi, domTypes->child(i)->copy())); funApX->addChild(new Node(VAR, varNameXi)); funApY->addChild(new Node(VAR, varNameYi)); Node* ty = domTypes->child(i); TypeRep* t = lookupTypeRep(typeToName(ty)); if (!t->hasMemTrivial()) { Node* memPredXi = t->mkMemPred(new Node(VAR, varNameXi)); Node* memPredYi = t->mkMemPred(new Node(VAR, varNameYi)); hyps->addChild(memPredXi); hyps->addChild(memPredYi); } Node* equivPredXYi = t->mkEquivPred(new Node(VAR, varNameXi), new Node(VAR, varNameYi)); hyps->addChild(equivPredXYi); } Node* equivPredFunAps = rangeTypeRep->mkEquivPred(funApX, funApY); Node* functionalityAxiom = new Node(FORALL, forallDecls, mkGeneralImplies(hyps, equivPredFunAps) ); rules->addChild(functionalityAxiom); } // END add functionality axiom } // END iteration over FUN_DECLs } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // addSpecialDeclsAndRules(); //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void RefineTypes::addSpecialDeclsAndRules() { Node* rules = unit->child(1); for (map::iterator i = typeReps.begin(); i != typeReps.end(); i++ ) { TypeRep* t = &(i->second); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add new entries to context // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* decls = t->getNewDecls(); for (int i = 0; i != decls->arity(); i++) { ctxt->insert(decls->child(i)); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add new rules // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (!option("disable-refinement-rules")) { rules->appendChildren(t->getNewRules()); } if (option("refine-bit-eq-equiv") && !t->hasEquivTrivial()) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add decl for bit-valued equiv rel // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ctxt->insert(t->mkBitEquivFunDecl()); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add defining axiom for bit-valued equiv rel // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - rules->addChild(t->mkBitEquivFunAxiom()); } } return; } void abstractSubrangeTypeDecl (Node* n) { if (n->kind == DEF_TYPE && n->arity() == 1 && n->child(0)->kind == SUBRANGE_TY) { n->child(0) = new Node(INT_TY); } } //======================================================================== // Master function for invoking type refinement //======================================================================== void refineTypes (FDLContext* ctxt, Node* unit) { RefineTypes state(ctxt, unit); state.initialiseTypeRepMap(); state.refineFormulas(); if (!option("disable-subtyping-functionality-rules")) state.addSubtypingAndFunctionalityRules(); state.addSpecialDeclsAndRules(); if (option("refine-int-subrange-type")) { ctxt->typeSeq.mapOver(abstractSubrangeTypeDecl); } return; } spark-2012.0.deb/victor/vct/src/smt-driver.cc0000644000175000017500000007330011753202341017734 0ustar eugeneugen//========================================================================== //========================================================================== // SMT-DRIVER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #include using std::endl; using std::cout; #include #include #include "smt-driver.hh" #include "utility.hh" #include "formatter.hh" //========================================================================== // Drive Goal //========================================================================== void SMTDriver::driveGoal(Node* decls, Node* rules, Node* hyps, Node* concl, UnitInfo* unitInfo, int goalNum, int currentConcl) { // --------------------------------------------------------------- // Initialise for solver invocation // --------------------------------------------------------------- printMessage(FINEm, "Considering concl"); string remarks; // for insertion into CSV report file. if (concl->kind == z::TRUE) { printMessage(FINEm, "Input concl is trivial"); remarks = "trivial concl"; } initGoal(unitInfo->getUnitName(), goalNum, currentConcl); // --------------------------------------------------------------- // Push decls, hyp and concl // --------------------------------------------------------------- try { // ----------------------------------------------------------- // Push decls // ----------------------------------------------------------- for ( int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINESTm, "pushing decl to solver" + ENDLs + decl->toString()); addDecl(decl); } // ----------------------------------------------------------- // Push rules as hyps // ----------------------------------------------------------- for ( int currentRule = 1; currentRule <= rules->arity(); currentRule++) { Node* rule = rules->child(currentRule - 1); string currentRuleStr; if (rule->kind == z::RULE) { currentRuleStr = rule->id; rule = rule->child(0); } else { currentRuleStr = "R" + intToString(currentRule); } Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINESTm, "pushing rule to solver" + ENDLs + rule->toString()); addRule(rule, currentRuleStr, remarks); } // ----------------------------------------------------------- // Push hyps // ----------------------------------------------------------- if (! option("skip-hyps") ) { for (int currentHyp = 1; currentHyp <= hyps->arity(); currentHyp++) { string currentHypStr ("H" + intToString(currentHyp)); Node* hyp = hyps->child(currentHyp-1); Formatter::setFormatter (VanillaFormatter::getFormatter()); printMessage(FINESTm, "Writing " + currentHypStr + ENDLs + hyp->toString() ); addHyp(hyp, currentHypStr, remarks); } // END: for each hyp in hyps } // ----------------------------------------------------------- // Push concl // ----------------------------------------------------------- if (! option("skip-concls") ) { Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINESTm,"Pushing concl: " + ENDLs + concl->toString()); addConcl(concl, remarks); } } catch (std::runtime_error e) { printMessage(ERRORm, string("Push of decl, rule, hyp or concl failed\n") + e.what() ); goalSliceTime = "0"; appendCommaString(remarks, "exception in setup"); printCSVRecord("error", remarks); errorConcls++; finishSetup(); finaliseGoal(); return; } finishSetup(); printMessage(INFOm, "setup time: " + goalTimer.toString() + " s"); // --------------------------------------------------------------- // Invoke solver // --------------------------------------------------------------- if (! option("gstime-inc-setup")) goalTimer.restart(); int checkRepeats = option("check-goal-repeats") ? intOptionVal("check-goal-repeats") : 1; string remarksAtCheckStart(remarks); bool checkError = true; for (int i = 0; i < checkRepeats; i++) { remarks = remarksAtCheckStart; checkError = checkGoal(remarks); } goalSliceTime = goalTimer.toString(); if (checkError) { printMessage(ERRORm, "Error flagged on check of goal\n"); // Let results processing have chance to give further // info on error. /* printCSVRecord("error", remarks); errorConcls++; finaliseGoal(); continue; */ } // --------------------------------------------------------------- // Process results // --------------------------------------------------------------- Status s = getResults(remarks); if (s == TRUE) { printCSVRecord("true", remarks); trueConcls++; } else if (s == UNPROVEN) { printCSVRecord("unproven", remarks); unprovenConcls++; } else if (s == RESOURCE_LIMIT) { printCSVRecord("unproven", remarks); unprovenConcls++; timeoutConcls++; } else {// s == ERROR printCSVRecord("error", remarks); errorConcls++; } finaliseGoal(); } //========================================================================== // Drive Unit //========================================================================== void SMTDriver::driveUnit(Node* unit, UnitInfo* unitInfo) { if (option("use-alt-solver-driver")) { return altDriveUnit(unit, unitInfo); } // --------------------------------------------------------------------- // Translate unit into solver-specific abstract syntax // --------------------------------------------------------------------- Node* solverUnit; try { solverUnit = translateUnit(unit); } catch (std::runtime_error e) { printMessage(ERRORm, "Exception in solver-specific translation" + ENDLs + e.what()); printCSVRecord("error", "solver translation failed"); return; } Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINESTm, "Solver-specific translated unit:" + ENDLs + solverUnit->toString()); // --------------------------------------------------------------------- // Break out unit components // --------------------------------------------------------------------- Node* decls = solverUnit->child(0); Node* rules = solverUnit->child(1); Node* goals = solverUnit->child(2); // ------------------------------------------------------------------- // Loop for each goal in goals // ------------------------------------------------------------------- for (int goalNum = 1; goalNum <= goals->arity(); goalNum++) { if (option("goal") && intOptionVal("goal") != goalNum) { continue; } if (option("gtick")) { if (option("longtick")) { cout << " " << goalNum; } else { cout << ";"; } cout.flush(); } Node* goal = goals->child(goalNum-1); // Extract out components of goal->id and store in global vars // used for output messages updateCurrentGoalInfo(goal->id); currentConcl = 0; if (goal->arity() < 2) { // "*** true" goals printMessage(FINEm, "Input goal is trivial"); if (option("count-trivial-goals")) { printCSVRecord("true", "trivial goal"); trueConcls++; } continue; } Node* hyps = goal->child(0); Node* concls = goal->child(1); if (option("fuse-concls") && (option("fuse-unary-concls") || concls->arity() > 1) ) { concls->kind = z::AND; concls = new Node(z::CONCLS, concls); } if (option("hkinds")) currentHypsKinds = gatherKinds(hyps); // ------------------------------------------------------------------- // Loop for each concl in concls of goal // ------------------------------------------------------------------- for ( currentConcl = 1; currentConcl <= concls->arity(); currentConcl++) { if (option("concl") && intOptionVal("concl") != currentConcl) { continue; } if (option("ctick")) { if (option("longtick")) { cout << "." << currentConcl; } else { cout << "."; } cout.flush(); } Node* concl = concls->child(currentConcl-1); if (option("ckinds")) currentConclKinds = gatherKinds(concl); if (! unitInfo->include(goalNum, currentConcl)) { goalSliceTime = "0"; printCSVRecord("unproven", "excluded"); unprovenConcls++; continue; } int driveGoalRepeats = option("drive-goal-repeats") ? intOptionVal("drive-goal-repeats") : 1; goalTimer.restart(); for (int i = 0; i < driveGoalRepeats; i++) { driveGoal(decls, rules, hyps, concl, unitInfo, goalNum, currentConcl); } } // END for each concl in concls } // END for each goal in goals return; } //========================================================================== // Default definitions for solver API //========================================================================== Node* SMTDriver::translateUnit(Node* unit) { return unit; } bool SMTDriver::checkGoal(string& remarks) { return false; } SMTDriver::Status SMTDriver::getResults(string& remarks) { return UNPROVEN; } // Default implementation for online (API) solver interface SMTDriver::Status SMTDriver::check(string& remarks) { bool checkError = checkGoal(remarks); if (checkError) { printMessage(ERRORm, "Error flagged on check of goal\n"); } return getResults(remarks); } //========================================================================== // Alternative Driver // ========================================================================== // Idea is to allow for exploiting incrementality of solvers. Hope is // to get some performance improvement, partly because of // incrementality, partly because of reduced number of solver runs and // overhead of starting solver. // To add: if goal has multiple concls and are not fusing concls, // should only add hyps once. //========================================================================== // Drive Query Set //========================================================================== // Drive queries queryRecords[startQuery..endQuery-1] vector SMTDriver::driveQuerySet(UnitInfo* unitInfo, Node* unit, set excludedRules, int startQuery, int endQuery) { string remarks(queryRecords.at(startQuery).remarks); { int startGoalNum = queryRecords.at(startQuery).goalNum; int lastGoalNum = queryRecords.at(endQuery-1).goalNum; // Set globals used in messages currentGoalNumStr = intToString(startGoalNum); currentConcl = queryRecords.at(startQuery).conclNum; // Set up header of query set initQuerySet(unitInfo->getUnitName(), startGoalNum, currentConcl); if (option("gtick")) { if (option("longtick")) { if (startQuery + 1 == endQuery) { cout << " " << startGoalNum; } else { cout << " " << startGoalNum << "-" << lastGoalNum; } } else { cout << ";"; } cout.flush(); } } // --------------------------------------------------------------------- // Break out unit components // --------------------------------------------------------------------- Node* decls = unit->child(0); Node* rules = unit->child(1); Node* goals = unit->child(2); vector results; Timer queryTimer; // --------------------------------------------------------------------- // Start block for capturing online interface exceptions // --------------------------------------------------------------------- try { // ----------------------------------------------------------- // Push decls // ----------------------------------------------------------- for ( int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINESTm, "pushing decl to solver" + ENDLs + decl->toString()); addDecl(decl); } // ----------------------------------------------------------- // Push rules // ----------------------------------------------------------- for ( int r = 0; r < rules->arity(); r++) { if (setMember(r, excludedRules)) continue; Node* rule = rules->child(r); string currentRuleStr; if (rule->kind == z::RULE) { currentRuleStr = rule->id; rule = rule->child(0); } else { currentRuleStr = "R" + intToString(r+1); } Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINESTm, "pushing rule to solver" + ENDLs + rule->toString()); addRule(rule, currentRuleStr, remarks); } // --------------------------------------------------------------------- // Push hyps and concls incrementally // --------------------------------------------------------------------- string declsRulesRemarks(remarks); // --------------------------------------------------------------------- // Loop over queries of queryset // --------------------------------------------------------------------- for (int query = startQuery; query < endQuery; query++) { remarks = declsRulesRemarks; int goalNum = queryRecords.at(query).goalNum; Node* goal = goals->child(goalNum-1); // Set global for message reporting currentGoalNumStr = intToString(goalNum); Node* hyps = goal->child(0); Node* concls = goal->child(1); int conclNum = queryRecords.at(query).conclNum; // Set global for message reporting currentConcl = conclNum; Node* queryConcl; if (conclNum == 0) { if (concls->arity() > 1) { queryConcl = new Node(z::AND); // Assume n-ary AND // supported by solver queryConcl->appendChildren(concls); } else { queryConcl = concls->child(0); } } else { queryConcl = concls->child(conclNum-1); } // ----------------------------------------------------------- // Push new empty assertion set onto assertion set stack // ----------------------------------------------------------- if (option("exploit-solver-incrementality")) push(); // ----------------------------------------------------------- // Push hyps // ----------------------------------------------------------- if (! option("skip-hyps") ) { for (int currentHyp = 1; currentHyp <= hyps->arity(); currentHyp++) { string currentHypStr ("H" + intToString(currentHyp)); Node* hyp = hyps->child(currentHyp-1); Formatter::setFormatter (VanillaFormatter::getFormatter()); printMessage(FINESTm, "Writing " + currentHypStr + ENDLs + hyp->toString() ); addHyp(hyp, currentHypStr, remarks); } // END: for each hyp in hyps } // END: if !skip-hyps option // ----------------------------------------------------------- // Push concl // ----------------------------------------------------------- if (! option("skip-concls") ) { Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINESTm,"Pushing concl: " + ENDLs + queryConcl->toString()); addConcl(queryConcl, remarks); } // ----------------------------------------------------------- // Check // ----------------------------------------------------------- if (onlineInterface() && !option("gstime-inc-setup") && query > startQuery) { queryTimer.restart(); } Status status = check(remarks); string queryTime; if (onlineInterface()) queryTime = queryTimer.toString(); // ----------------------------------------------------------- // Pop assertion set stack // ----------------------------------------------------------- if (option("exploit-solver-incrementality")) pop(); // ----------------------------------------------------------- // Record status if online interface // ----------------------------------------------------------- // Status ignored if have offline interface. // Status recording delayed to here to allow exception handler // to catch pop exception and record an alternate status. if (onlineInterface()) { results.push_back(QueryStatus(status,remarks,queryTime)); } } // END for each query // --------------------------------------------------------------------- // END of loop over queries of queryset // --------------------------------------------------------------------- // --------------------------------------------------------------------- // Handle online interface exceptions // --------------------------------------------------------------------- } catch (std::runtime_error e) { printMessage(ERRORm, string("Exception in driving query set\n") + e.what() ); appendCommaString(remarks, "exception in driving query set"); finaliseQuerySet(); results.push_back(QueryStatus(ERROR,remarks,"")); return results; } // --------------------------------------------------------------------- // Run query set offline on solver // --------------------------------------------------------------------- if (!onlineInterface()) { // With file level interface, write file. outputQuerySet(); queryTimer.restart(); bool runError = runQuerySet(remarks); string queryTime = queryTimer.toString(); if (runError) { printMessage(ERRORm, "Error flagged on run of solver\n"); } results = getRunResults(endQuery - startQuery); if (results.size() > 0) results.at(0).time = queryTime; } finaliseQuerySet(); return results; } //========================================================================== // Alternative Drive Unit //========================================================================== void SMTDriver::altDriveUnit(Node* unit, UnitInfo* unitInfo) { // --------------------------------------------------------------------- // Translate unit into solver-specific abstract syntax // --------------------------------------------------------------------- Node* solverUnit; try { solverUnit = translateUnit(unit); } catch (std::runtime_error e) { printMessage(ERRORm, "Exception in solver-specific translation" + ENDLs + e.what()); printCSVRecord("error", "solver translation failed"); return; } Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINESTm, "Solver-specific translated unit:" + ENDLs + solverUnit->toString()); //------------------------------------------------------------------------ // Initialise query and result tables //------------------------------------------------------------------------ queryRecords.clear(); resultRecords.clear(); Node* goals = solverUnit->child(2); for (int goalNum = 1; goalNum <= goals->arity(); goalNum++) { if (option("goal") && intOptionVal("goal") != goalNum) { continue; } Node* goal = goals->child(goalNum-1); // Set up result record for goal / goal slices // Initialise all fields except queryNum. ResultRecord rRcd; rRcd.goalNum = goalNum; string goalNumStr; extractGoalInfo(goal->id, rRcd.unitKind, goalNumStr, rRcd.origins); if (intToString(goalNum) != goalNumStr) { // Set currentGoalNumStr and currentConcl globals used by // printMessage currentGoalNumStr = goalNumStr; currentConcl = 0; printMessage(WARNINGm, "Mismatch between goal position " + intToString(goalNum) + " and numbering " + goalNumStr); } if (goal->arity() < 2) { // "*** true" goals if (option("count-trivial-goals")) { rRcd.queryNum = -1; resultRecords.push_back(rRcd); } continue; } // goal non-trivial // Set up query record for goal / goal slices QueryRecord qRcd; qRcd.goalNum = goalNum; qRcd.status = UNCHECKED; // Customise query and result records for goal / goal slices and save Node* concls = goal->child(1); int fromConcl = option("fuse-concls") ? 0 : 1; int toConcl = option("fuse-concls") ? 0 : concls->arity(); for (int conclNum = fromConcl; conclNum <= toConcl; conclNum++) { qRcd.conclNum = conclNum; queryRecords.push_back(qRcd); rRcd.queryNum = (int) queryRecords.size() - 1; resultRecords.push_back(rRcd); } } //------------------------------------------------------------------------ // Drive unit queries to solver //------------------------------------------------------------------------ // Allow for results from running a querySet to possibly be // shorter than number of queries in querySet. This can occur // because // - Processing a query throws an exception // - Processing a querySet hits a resource limit, e.g. a timeout, for // some particular query or for a prefix of the querySet. // Only references query table, not results table. int startQuery = 0; while (startQuery < (int) queryRecords.size()) { // --------------------------------------------------------------------- // Set range of queries to drive // --------------------------------------------------------------------- // Range is [startQuery,endQuery-1] // If don't want incrementality, just do 1 query. int endQuery; if (option("exploit-solver-incrementality")) { endQuery = queryRecords.size(); } else { endQuery = startQuery + 1; } // --------------------------------------------------------------------- // Drive queries and collect results // --------------------------------------------------------------------- vector queryResults = driveQuerySet(unitInfo, unit, unitInfo->getExcludedRules(), startQuery, endQuery); assert((int) queryResults.size() <= endQuery - startQuery); // Copy current result into query table for (int qr = 0; qr != (int) queryResults.size(); qr++) { int currentQuery = startQuery + qr; queryRecords.at(currentQuery).status = queryResults.at(qr).status; appendCommaString(queryRecords.at(currentQuery).remarks, queryResults.at(qr).remarks); queryRecords.at(currentQuery).time = queryResults.at(qr).time; } startQuery = startQuery + queryResults.size(); // Move on one query if get back no results. if (queryResults.size() == 0) { startQuery++; } // Redo last query if it didn't get use of whole of resource // allowance. else if (resourceLimitsForQuerySet() && queryResults.size() > 1 && queryResults.back().status == RESOURCE_LIMIT) { startQuery--; } } //------------------------------------------------------------------------ // Write results from query and results tables to VCT file. //------------------------------------------------------------------------ for (int i = 0; i != (int) resultRecords.size(); i++) { int queryNum = resultRecords.at(i).queryNum; string resultStatus; string time; string remarks; // currentGoalNumStr and currentConcl are globals from utility.cc currentGoalNumStr = intToString(resultRecords.at(i).goalNum); if (queryNum == -1) { resultStatus = "true"; currentConcl = 0; time = "0"; remarks = "trivial goal"; trueConcls++; } else { switch (queryRecords.at(queryNum).status) { case(TRUE): resultStatus = "true"; trueConcls++; break; case(UNPROVEN): resultStatus = "unproven"; unprovenConcls++; break; case(RESOURCE_LIMIT): resultStatus = "unproven"; unprovenConcls++; timeoutConcls++; break; case(ERROR): resultStatus = "error"; unprovenConcls++; timeoutConcls++; break; case(UNCHECKED): resultStatus = "error"; unprovenConcls++; printMessage(ERRORm, "Found unchecked query"); break; } currentConcl = queryRecords.at(queryNum).conclNum; time = queryRecords.at(queryNum).time; remarks = queryRecords.at(queryNum).remarks; } printCSVRecordAux(resultRecords.at(i).unitKind, resultRecords.at(i).origins, intToString(resultRecords.at(i).goalNum), currentConcl, resultStatus, time, remarks); } // END for return; } spark-2012.0.deb/victor/vct/src/csvmerge.cc0000644000175000017500000001142011753202341017446 0ustar eugeneugen/* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ // USAGE // csvmerge filename1 m1 ... mj filename2 n1 ... nk // // Create new csv record on stdout using // fields m1 .. mj from filename1 and fields n1 .. nk from filename2. // If j = 0 use all fields from filename1. // If k = 0 use all fields from filename2. // filename1 and filename2 must be same length. #include #include using std::cout; using std::cin; using std::cerr; using std::endl; using std::istream; #include using std::ifstream; #include "utility.hh" int main (int argc, char *argv[]) { vector args = processCommandArgs(argc, argv); if (args.size() < 2) { cerr << "Usage" << endl << "" << endl << " csvmerge filename1 m1 ... mj filename2 n1 ... nk" << endl << "" << endl << "Create new csv records on stdout using" << endl << "fields m1 .. mj of records from filename1 and" << endl << "fields n1 .. nk of recordsfrom filename2." << endl << "If j = 0 use all fields from filename1." << endl << "If k = 0 use all fields from filename2." << endl << "" << endl << "filename1 and filename2 must be same length." << endl; return 0; } ifstream ifs1; ifstream ifs2; bool readingSecondFileInfo = false; string fileName1(args[0]); string fileName2; vector cols1; vector cols2; for (int i = 1; i != args.size(); i++) { if (isIntString(args[i])) { int p = stringToInt(args[i]); if (readingSecondFileInfo) cols2.push_back(p); else cols1.push_back(p); } else { readingSecondFileInfo = true; fileName2 = args[i]; } } ifs1.open( fileName1.c_str() ); if (!ifs1) { cerr << "Unable to open file 1: " << fileName1 << endl; exit(1); } ifs2.open( fileName2.c_str() ); if (!ifs2) { cerr << "Unable to open file 2: " << fileName2 << endl; exit(1); } string line1; string line2; int numCols1(-1); int numCols2(-1); while (true) { bool notEOF1 = getline(ifs1, line1); bool notEOF2 = getline(ifs2, line2); if (notEOF1 != notEOF2) { cerr << "Files of differing lengths" << endl; exit(1); } if (!notEOF1) break; // cerr << "Read line pair " << endl; vector vs1 = csvDigest(line1); vector vs2 = csvDigest(line2); // cerr << "vs1 size " << vs1.size() << endl; // cerr << "vs2 size " << vs2.size() << endl; if (numCols1 < 0) numCols1 = vs1.size(); else if (numCols1 != vs1.size()) { cerr << "File 1 number of cols changed from " << numCols1 << " to " << vs1.size() << " at line " << endl; cerr << line1 << endl; exit(1); } if (numCols2 < 0) numCols2 = vs2.size(); else if (numCols2 != vs2.size()) { cerr << "File 2 number of cols changed from " << numCols2 << " to " << vs2.size() << " at line " << endl; cerr << line2 << endl; exit(1); } vector ws; if (cols1.size() == 0) { ws = vs1; } for (int i = 0; i != cols1.size(); i++) { if (cols1[i] < 1 || cols1[i] > vs1.size()) { cerr << "Position " << cols1[i] << " out of range for line from file 1" << endl; cerr << line1 << endl; continue; } ws.push_back(vs1[cols1[i] - 1]); } if (cols2.size() == 0) { ws.insert(ws.end(), vs2.begin(), vs2.end()); } for (int i = 0; i != cols2.size(); i++) { if (cols2[i] < 1 || cols2[i] > vs2.size()) { cerr << "Position " << cols2[i] << " out of range for line from file 2" << endl; cerr << line2 << endl; continue; } ws.push_back(vs2[cols2[i] - 1]); } cout << csvConcat(ws) << endl; } // while (true) return 0; } spark-2012.0.deb/victor/vct/src/rule-filter.cc0000644000175000017500000002437511753202341020102 0ustar eugeneugen//========================================================================== //========================================================================== // RULE-FILTER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #include "rule-filter.hh" #include using std::make_pair; using std::pair; #include using std::cout; using std::endl; //========================================================================== // Auxiliary functions //========================================================================== template void isectSetIntoMapVal(map >& m, const K& key, set& vals) { // C++ parser requires the typename here. Without it, the parser // doesn't recognise map >::iterator as a type. typedef typename map >::iterator iter; pair p = m.insert(make_pair(key,vals)); // pair >::iterator, bool> p = m.insert(make_pair(key,vals)); bool insertSuccess = p.second; if (insertSuccess) return; set& mapVals = (p.first)->second; set newVals; setIsect(mapVals, vals, newVals); mapVals = newVals; return; } //========================================================================== // Drive single query //========================================================================== vector RuleFilter::driveQuerySet(UnitInfo* unitInfo, Node* unit, set excludedRules, int startQuery, int endQuery) { vector normalResult = SMTDriver::driveQuerySet(unitInfo, unit, excludedRules, startQuery, endQuery); if (! (option("find-redundant-rules") && startQuery + 1 == endQuery && unitInfo->unitRLURulesEnd > 0 && normalResult.size() > 0 && normalResult.at(0).status == TRUE) ) { return normalResult; } printMessage(INFOm,"Starting rule filtering on goal"); string queryTime(normalResult.at(0).time); // Record translation from unit and directory rlu rule numbers to names. saveRuleNames(unitInfo, unit); // Try excluding single rules in range [0 .. unitRLUFileEnd - 1] // that are not already excluded because of unbound ids. set newExclRules(excludedRules); for (int i = 0; i != unitInfo->unitRLURulesEnd; i++) { if (setMember(i, excludedRules)) continue; set exclRules1Extra(newExclRules); exclRules1Extra.insert(i); vector result = SMTDriver::driveQuerySet(unitInfo, unit, exclRules1Extra, startQuery, startQuery+1); if (result.size() > 0 && result.at(0).status == TRUE) { newExclRules.insert(i); queryTime = result.at(0).time; } } saveExclusionInfo(unitInfo, newExclRules); if (option("report-excluded-rlu-rules-per-goal")) { Node* rules = unit->child(1); string report("RuleFilter: excluded RLU rules for goal"); report.append(ENDLs); report.append("(** = undecl ids, s, -- = absence doesn't falsify goal)"); report.append(ENDLs); for (int i = 0; i != unitInfo->unitRLURulesEnd; i++) { string ruleName(rules->child(i)->id); if (setMember(i,excludedRules)) report.append("** "); else if (setMember(i,newExclRules)) report.append("-- "); else report.append(" "); report.append(ruleName); report.append(ENDLs); } printMessage(INFOm, report); } vector result; result.push_back(QueryStatus(TRUE,"",queryTime)); return result; } void RuleFilter::saveExclusionInfo(UnitInfo* unitInfo, set exclRules) { // Partition excluded rules into sets for dir and unit rules files. set dirExclRules; set unitExclRules; for (set::iterator i = exclRules.begin(); i != exclRules.end(); i++) { if (*i >= unitInfo->dirRLURulesEnd) { // Adjust rule # so have index relative to start of dirRLU // file start unitExclRules.insert(*i - unitInfo->dirRLURulesEnd); } else { dirExclRules.insert(*i); } } // Save info. Index dir rules by unit path + dir name, // unit rules by unit name. // if (dirExclRules.size() > 0) if (unitInfo->dirRLURulesEnd > 0) { isectSetIntoMapVal(unrequiredDirRules, joinPaths(unitInfo->getUnitPath(), unitInfo->getUnitDirName()), dirExclRules); } // if (unitExclRules.size() > 0) if (unitInfo->unitRLURulesEnd > unitInfo->dirRLURulesEnd) { isectSetIntoMapVal(unrequiredUnitRules, unitInfo->getUnitName(), unitExclRules); } return; } void RuleFilter::saveRuleNames(UnitInfo* unitInfo, Node* unit) { Node* rules = unit->child(1); if (unitInfo->dirRLURulesEnd > 0) { string dirRLUFileIndex(joinPaths(unitInfo->getUnitPath(), unitInfo->getUnitDirName())); if (dirRLURuleNames.count(dirRLUFileIndex) == 0) { vector names; for (int i = 0; i < unitInfo->dirRLURulesEnd; i++) { names.push_back(rules->child(i)->id); } dirRLURuleNames.insert(make_pair(dirRLUFileIndex, names)); } } if (unitInfo->unitRLURulesEnd > unitInfo->dirRLURulesEnd) { string unitRLUFileIndex = unitInfo->getUnitName(); if (unitRLURuleNames.count(unitRLUFileIndex) == 0) { vector names; for (int i = unitInfo->dirRLURulesEnd; i < unitInfo->unitRLURulesEnd; i++) { names.push_back(rules->child(i)->id); } unitRLURuleNames.insert(make_pair(unitRLUFileIndex, names)); } } return; } void RuleFilter::finaliseSession() { // Write results to log file. // For each package RLU file, report which rules not needed // For each unit RLU file, report which rules not needed if (!option("find-redundant-rules")) return; int totalDirRLURules = 0; int totalUnrequiredDirRLURules = 0; int totalUnitRLURules = 0; int totalUnrequiredUnitRLURules = 0; for (map >::iterator i = unrequiredDirRules.begin(); i != unrequiredDirRules.end(); i++) { string dirRLURuleIndex = i->first; set unreqDirRules = i->second; vector& dirRuleNameTable = dirRLURuleNames.find(dirRLURuleIndex)->second; totalDirRLURules += dirRuleNameTable.size(); totalUnrequiredDirRLURules += unreqDirRules.size(); logStream << endl << endl; logStream << "Unrequired rules in package RLU file " << endl << dirRLURuleIndex << ".rlu" << "(" << dirRuleNameTable.size() << " rules):" << endl; for (set::iterator j = unreqDirRules.begin(); j != unreqDirRules.end(); j++) { int unreqRuleNum = *j; logStream << " "; if (unreqRuleNum < (int) dirRuleNameTable.size()) logStream << dirRuleNameTable.at(unreqRuleNum); else logStream << "Out of range rule number: " << unreqRuleNum; logStream << endl; } logStream << endl; } for (map >::iterator i = unrequiredUnitRules.begin(); i != unrequiredUnitRules.end(); i++) { string unitRLURuleIndex = i->first; set unreqUnitRules = i->second; vector& unitRuleNameTable = unitRLURuleNames.find(unitRLURuleIndex)->second; totalUnitRLURules += unitRuleNameTable.size(); totalUnrequiredUnitRLURules += unreqUnitRules.size(); logStream << endl << endl; logStream << "Unrequired rules in program unit RLU file " << endl << unitRLURuleIndex << ".rlu" << " (" << unitRuleNameTable.size() << " rules):" << endl; for (set::iterator j = unreqUnitRules.begin(); j != unreqUnitRules.end(); j++) { int unreqRuleNum = *j; logStream << " "; if (unreqRuleNum < (int) unitRuleNameTable.size()) logStream << unitRuleNameTable.at(unreqRuleNum); else logStream << "Out of range rule number: " << unreqRuleNum; logStream << endl; } logStream << endl; } logStream << "Package rules (Unrequired/Total): " << totalUnrequiredDirRLURules << "/" << totalDirRLURules << endl; logStream << "Program unit rules (Unrequired/Total): " << totalUnrequiredUnitRLURules << "/" << totalUnitRLURules << endl; return; } spark-2012.0.deb/victor/vct/src/main.cc0000644000175000017500000004101211753202341016557 0ustar eugeneugen//========================================================================== //========================================================================== // MAIN.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== // TOP LEVEL CODE #include #include #include #include #include using std::cout; using std::cerr; using std::endl; #include using std::ifstream; #include #include "utility.hh" #include "node.hh" #include "pdriver.hh" #include "formatter.hh" #include "processor.hh" #include "smt-driver.hh" #include "smtlib-driver.hh" #include "smtlib2-driver.hh" #ifdef LINK_YICES #include "yices-driver.hh" #endif #ifdef LINK_CVC3 #include "cvc-driver.hh" #endif #include "isab-driver.hh" using namespace z; //========================================================================== // Local utility functions //========================================================================== //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // readUnitList //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ /* Todo: Strip trailing whitespaces. Skip blank lines Add trailing "/" if missing. */ vector readUnitList(const string& filename) { vector result; ifstream ifs (filename.c_str() ); if (ifs.fail()) { cerr << endl << "Error on trying to open file " << filename << endl; exit(1); } string unitPath; while (getline(ifs, unitPath)) { if (unitPath[0] == '#') // Allow comment lines in listing. continue; else if (tokeniseString(unitPath).size() == 0) // Allow blank lines continue; else result.push_back(UnitInfo(unitPath)); } ifs.close(); return result; } // Needs updating to handle UnitInfo objects /* void printUnitList(vector v) { cout << "Units to be examined:" << endl; for (vector::iterator i = v.begin(); i != v.end(); i++) { cout << "#" << *i << "#" << endl; } cout << endl << endl; } */ /* rulefamily ::= RULE_FAMILY (SEQ typeassum+) (SEQ rule+) rlsfile ::= RLS_FILE rulefamily+ */ int countRules(Node* ruleFileAST) { int count = 0; for (int i = 0; i != ruleFileAST->arity(); i++) { Node* ruleFamily = ruleFileAST->child(i); count += ruleFamily->child(1)->arity(); } return count; } Node* readRuleFile(pdriver& driver, const string& ruleFile) { printMessage(FINEm, "Reading rule file " + ruleFile); if (driver.parseRuleFile(ruleFile) ) { printMessage(ERRORm, "Parse of rule file " + ruleFile + " failed"); exit(1); } return driver.result; } //========================================================================== // Parse unit. //========================================================================== // Parse triple of FDL, RLS and VCG files. //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // parseUnit //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* parseUnit(UnitInfo* unitInfo) { // fullUnitName == P1/.../Pn/D1/.../Dk/U string fullUnitName (unitInfo->getFullUnitName()); pdriver driver; Node* unitAST = new Node(UNIT); if (option("scantrace")) driver.trace_scanning = true; if (option("parsetrace")) driver.trace_parsing = true; // ------------------------------------------------------------------- // Read in and parse declarations files // ------------------------------------------------------------------- // Assemble list of files to read vector declFiles; if (! option("skip-unit-decls")) { if (option("read-all-decl-files-in-dir")) { // unitDir == P1/.../Pn/D1/.../Dk string unitDir(joinPaths(unitInfo->getUnitPathPrefix(), unitInfo->getUnitDirName())); vector dirContents = listDir(unitDir); for (int i = 0; i != (int) dirContents.size(); i++) { string dirItem = dirContents.at(i); if (hasSuffix(dirItem, ".fdl")) { string fullFDLFileName = unitDir + "/" + dirItem; declFiles.push_back(fullFDLFileName); } } } else { // Add P1/.../Pn/D1/.../Dk/U.fdl declFiles.push_back(fullUnitName + ".fdl"); } } if (option("decls")) { vector extraDeclFiles = optionVals("decls"); // This causes a glibc error message: // corrupted double-linked list!! // // copy(extraDeclFiles.begin(), extraDeclFiles.end(), declFiles.end()); declFiles.insert(declFiles.end(), extraDeclFiles.begin(), extraDeclFiles.end()); } vector unitDeclFiles(unitInfo->getDeclFiles()); declFiles.insert(declFiles.end(), unitDeclFiles.begin(), unitDeclFiles.end() ); // Read files in Node* decls = new Node(FDL_FILE); for (int i = 0; i != (int) declFiles.size(); i++) { string declFile = declFiles.at(i); printMessage(FINEm, "Reading declarations file " + declFile); if (driver.parseFDLFile(declFile) ) { printMessage(ERRORm, "Parse of declarations file " + declFile + " failed"); return 0; } Node* newDecls = driver.result; decls->appendChildren(newDecls); } unitAST->addChild(decls); // ------------------------------------------------------------------- // Read in and parse rules files // ------------------------------------------------------------------- Node* rules = new Node(RULE_FILE); if (option("read-directory-rlu-files")) { string dirRLUFile; if (unitInfo->getUnitDirName() != "" ) { dirRLUFile // == P1/.../Pn/D1/.../Dk/Dk.rlu if k>0 // == P1/.../Pn/Pn.rlu if n>0 // == .rlu o/w = joinPaths(unitInfo->getUnitPathPrefix(), joinPaths(unitInfo->getUnitPath(), unitInfo->getUnitDirName())) + ".rlu"; } else { // We don't have a directory in our unit name if the unit is // in the current directory. In this case the name of the // global user rule can be worked out from the current // directory. char cwd[MAXPATHLEN]; getcwd(cwd, MAXPATHLEN); dirRLUFile = basename(cwd); dirRLUFile += ".rlu"; } if (readableFileExists(dirRLUFile)) { Node* ruleFile = readRuleFile(driver, dirRLUFile); unitInfo->dirRLURulesEnd = countRules(ruleFile); unitInfo->unitRLURulesEnd = unitInfo->dirRLURulesEnd; rules->appendChildren(ruleFile); } } if (option("read-unit-rlu-files")) { string unitRLUFile // == P1/.../Pn/D1/.../Dk/U.rlu = fullUnitName + ".rlu"; if (readableFileExists(unitRLUFile)) { Node* ruleFile = readRuleFile(driver, unitRLUFile); unitInfo->unitRLURulesEnd = unitInfo->dirRLURulesEnd + countRules(ruleFile); rules->appendChildren(ruleFile); } } vector ruleFiles; ruleFiles.push_back(fullUnitName + ".rls"); if (option("rules")) { vector extraRuleFiles = optionVals("rules"); ruleFiles.insert(ruleFiles.end(), extraRuleFiles.begin(), extraRuleFiles.end()); } // Assemble list of rest of rules files to read vector unitRuleFiles(unitInfo->getRuleFiles()); ruleFiles.insert(ruleFiles.end(), unitRuleFiles.begin(), unitRuleFiles.end() ); // Do read of files for (int i = 0; i != (int) ruleFiles.size(); i++) { string ruleFile = ruleFiles.at(i); rules->appendChildren(readRuleFile(driver, ruleFile)); } unitAST->addChild(rules); // ------------------------------------------------------------------- // Read in and parse vcg or siv file // ------------------------------------------------------------------- string vcFileExt(".vcg"); if (option("siv")) vcFileExt = ".siv"; printMessage(FINEm, "Reading VCG file"); if (driver.parseVCGFile(fullUnitName + vcFileExt)) { printMessage(ERRORm, "Parse of VC file failed"); return 0; } unitAST->addChild(driver.result); return unitAST; } //========================================================================== // Process unit. //========================================================================== void processUnit(UnitInfo* unitInfo, SMTDriver* smtDriver) { if (! unitInfo->includeUnit()) return; // Set globals used in formatting message headers and in CSV reports initCurrentUnitInfo(unitInfo); if (option("utick")) { if (option("longtick")) { cout << endl << unitInfo->getUnitName(); } else { cout << "*"; } cout.flush(); } printMessage(INFOm, "Starting unit processing"); Node* unitAST = parseUnit(unitInfo); if (unitAST == 0) { printCSVRecord("error", "parsing failed"); return; } Formatter::setFormatter(VanillaFormatter::getFormatter()); string status = elaborateUnit(unitAST, unitInfo); if (status != "good") { printCSVRecord("error", status); return; } smtDriver->driveUnit(unitAST, unitInfo); if (!option("no-mm")) { Node::deletePool(); } return; } //========================================================================== // TOP LEVEL FUNCTION //========================================================================== //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Main //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // See usage.txt for usage. int main (int argc, char *argv[]) { // In order to guarantee consistent timestamps no matter the // system used, we explicitly set the time locale to POSIX. setlocale(LC_TIME, "POSIX"); // ------------------------------------------------------------------- // Read in command line arguments // ------------------------------------------------------------------- string unitName; vector nonOptions = processCommandArgs(argc, argv); if (nonOptions.size() > 0) { unitName = nonOptions.front(); } // --------------------------------------------------------------------- // Set tracing level // --------------------------------------------------------------------- // Make default level to just report warnings and errors. if (!option("level")) { messageThreshold = WARNINGm; } else { string s = optionVal("level"); if (s == "6" || s == "error") messageThreshold = 6; else if (s == "5" || s == "warning") messageThreshold = 5; else if (s == "4" || s == "info") messageThreshold = 4; else if (s == "3" || s == "fine") messageThreshold = 3; else if (s == "2" || s == "finer") messageThreshold = 2; else if (s == "1" || s == "finest") messageThreshold = 1; else messageThreshold = WARNINGm; } // --------------------------------------------------------------------- // Resolve Units to be processed. // --------------------------------------------------------------------- vector unitList; if (unitName != "") { unitList.push_back(UnitInfo(unitName)); } else if (option("units")) { unitList = readUnitList(optionVal("units")); } else { cerr << "No units specified. "<< endl; exit(1); } /* if (option("print-unit-list")) { printUnitList(unitList); } */ // --------------------------------------------------------------------- // Open report files // --------------------------------------------------------------------- openReportFiles(); // --------------------------------------------------------------------- // Initialise summary of results // --------------------------------------------------------------------- trueConcls = 0; unprovenConcls = 0; errorConcls = 0; // --------------------------------------------------------------------- // Get solver driver // --------------------------------------------------------------------- SMTDriver* smtDriver = 0; if (option("interface-mode")) { if (optionVal("interface-mode") == "api" && option("prover") && optionVal("prover") == "cvc3" ) { #ifdef LINK_CVC3 smtDriver = newCVCDriver(); #else cerr << "Cannot run CVC3 via API: it was not linked in" << endl; exit(1); #endif } else if (optionVal("interface-mode") == "api" && option("prover") && optionVal("prover") == "yices" ) { #ifdef LINK_YICES smtDriver = new YicesDriver(); #else cerr << "Cannot run Yices: it was not linked in"<< endl; exit(1); #endif } /* else if (optionVal("interface-mode") == "simplify-old" && option("prover") && optionVal("prover") == "simplify" ) { cout << "Init for Simplify" << endl; smtDriver = new SimplifyDriver(); } else if (optionVal("interface-mode") == "simplify-old" && option("prover") && optionVal("prover") == "z3" ) { smtDriver = new Z3Driver(); } */ else if (optionVal("interface-mode") == "smtlib" || optionVal("interface-mode") == "simplify") { smtDriver = new SMTLibDriver(); } else if (optionVal("interface-mode") == "smtlib2") { smtDriver = new SMTLib2Driver(); } else if (optionVal("interface-mode") == "isabelle") { smtDriver = new IsabDriver(); } else if (optionVal("interface-mode") == "dummy") { smtDriver = new SMTDriver(); } else { if (option("prover")) cerr << "Unrecognised interface & prover options: " << optionVal("interface-mode") << " & " << optionVal("prover") << endl; else cerr << "Unrecognised interface option: " << optionVal("interface-mode") << endl; exit(1); } } else { smtDriver = new SMTDriver(); } smtDriver->initSession(); // --------------------------------------------------------------------- // Process Units // --------------------------------------------------------------------- for (vector::iterator i = unitList.begin(); i != unitList.end(); i++) { processUnit(&(*i), smtDriver); } // --------------------------------------------------------------------- // Report summary of results // --------------------------------------------------------------------- printStats(); // --------------------------------------------------------------------- // Tidy up // --------------------------------------------------------------------- smtDriver->finaliseSession(); closeReportFiles(); if (option("utick") || option("gtick") || option("ctick")) cout << endl; return 0; } spark-2012.0.deb/victor/vct/src/yices-driver.cc0000644000175000017500000005331211753202341020246 0ustar eugeneugen//========================================================================== //========================================================================== // YICES-DRIVER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== // Yices specific code. #include using std::ostringstream; #include extern "C" { #include "yicesl_c.h" } #include "yices-driver.hh" #include "formatter.hh" #include "pprinter.hh" #include "node-utils.hh" #include "bignum.hh" using namespace z; //========================================================================== // Pretty print Yices abstract syntax //========================================================================== /* Most expressions have 1. lead string 2. list of subterms Always want "(" added front and ")" added matching at end. For vertical layout, use indent 2, with lead string on separate line For horizontal layout, single space after opening delimiter and after DECL Use infix "::". Append to end of prefix header. Indent rest. SEQ H or Vertical: For V, line up all at same indent. */ class YicesFormatter : public Formatter { public: virtual Box& addSyntax(z::Kind k, const string& id, vector& bs); static Formatter* getFormatter(); private: static Formatter* instance; YicesFormatter(); }; Box& YicesFormatter::addSyntax(z::Kind k, const std::string& id, vector& bs) { switch (k) { // TOP LEVEL case(DEF_TYPE): return makeStringAp("define-type " + id, bs); case(DEF_CONST): return PP::makeHVSeq("(define " + id, "::", "", ")", bs); case(ASSERT): return makeStringAp("assert", bs); case(CHECK): return box("(check)"); case(PUSH): return box("(push)"); case(POP): return box("(pop)"); // TYPES case(FUN_TY): return makeStringAp("->", bs); case(TUPLE_TY): return makeStringAp("tuple", bs); case(ENUM_TY): return makeStringAp("scalar", bs); case(SUBRANGE_TY): return makeStringAp("subrange", bs); case(REAL_TY): return box("real"); case(INT_TY): return box("int"); case(BOOL_TY): return box("bool"); case(RECORD_TY): return makeStringAp("record", bs); // EXPRS case(OR): return makeStringAp("or", bs); case(AND): return makeStringAp("and", bs); case(NOT): return makeStringAp("not", bs); case(IMPLIES): return makeStringAp("=>", bs); case(EQ): return makeStringAp("=", bs); case(NE): return makeStringAp("/=", bs); case(FORALL): return makeStringAp("forall", bs); case(EXISTS): return makeStringAp("exists", bs); case(LAMBDA): return makeStringAp("lambda", bs); case(TUPLE): case(SEQ): return PP::makeHVSeq("", "(", "", ")", bs); case(ASSIGN): case(DECL): return PP::makeHVSeq(id + "::", "", "", "", bs); case(I_LT): return makeStringAp("<", bs); case(I_LE): return makeStringAp("<=", bs); case(R_LT): return makeStringAp("<", bs); case(R_LE): return makeStringAp("<=", bs); case(I_PLUS): return makeStringAp("+", bs); case(I_MINUS): return makeStringAp("-", bs); case(I_TIMES): return makeStringAp("*", bs); case(I_UMINUS): return makeStringAp("-", bs); case(R_PLUS): return makeStringAp("+", bs); case(R_MINUS): return makeStringAp("-", bs); case(R_TIMES): return makeStringAp("*", bs); case(RDIV): return makeStringAp("/", bs); case(R_UMINUS): return makeStringAp("-", bs); // Yices 1.0.3 does not recognise "div". So introduce new uifunction // case(IDIV): return makeStringAp("i__div", bs); case(IDIV): return makeStringAp("div", bs); // Yices 1.0.9 does. case(MOD): return makeStringAp("mod", bs); case(APPLY): return PP::makeHVSeq("", "(", "", ")", bs); case(CONST): return box(id); case(VAR): return box(id); case(TYPE_ID): return box(id); case(TRUE): return box("true"); case(FALSE): return box("false"); case(NATNUM): return box(id); case(PENDING): return box("pending"); case(UPDATE): return makeStringAp("update", bs); case(SELECT): return makeStringAp("select", bs); case(MK_RECORD): return makeStringAp("mk-record", bs); case(GT): case(GE): case(LT): case(LE): default: std::cerr << "YicesFormatter::addSyntax: " << "Encountered unrecognised kind " << kindString(k) << endl; return PP::makeHVSeq("***" + kindString(k) + "{" + id + "}***", "[", ";", "]", bs ); } } YicesFormatter::YicesFormatter() {}; Formatter* YicesFormatter::instance = 0; Formatter* YicesFormatter::getFormatter() { if (instance == 0) instance = new YicesFormatter; return instance; } //========================================================================== // Translate FDL abstract syntax to Yices abstract syntax //========================================================================== // Translation non destructive. // Yices parser disallows use of its keywords as identifiers, type identifiers // and record fieldnames, so we have to make these all distinct: // ' suffix added to type ids // . suffix added to ids (vars and consts) and field selectors. class YicesTranslator : public Translator { public: YicesTranslator() : Translator("yices") {}; protected: virtual Node* translateAux (Node* oldN); }; Node* YicesTranslator::translateAux (Node* oldN) { /* ---------------------------------------------------------------------------- Main translation ---------------------------------------------------------------------------- */ // default value of new kind. Node* newN = new Node(oldN->kind, oldN->id); for (int i = 0; i != oldN->arity(); i++) { newN->addChild( translateAux(oldN->child(i)) ); } switch (newN->kind) { /* ---------------------------------------------------------------------------- Top level of units ---------------------------------------------------------------------------- */ case UNIT: case RULES: case RULE: case GOALS: case GOAL: case CONCLS: case HYPS: case DECLS: return newN; /* ---------------------------------------------------------------------------- fdl declarations ---------------------------------------------------------------------------- */ case DEF_TYPE: { newN->id.append("'"); return newN; } case DEF_CONST: { newN->id.append("."); return newN; } case DECL_VAR: { newN->kind = DEF_CONST; newN->id.append("."); return newN; } // DECL_FUN {id} (SEQ ts) t --> DEF_CONST {id} (FUN_TY ts' t') case DECL_FUN: { newN->id.append("."); newN->child(0)->addChild(newN->child(1)); newN->popChild(); newN->child(0)->kind = FUN_TY; newN->kind = DEF_CONST; return newN; } /* ---------------------------------------------------------------------------- types ---------------------------------------------------------------------------- */ // Unchanged case INT_TY: case REAL_TY: case BOOL_TY: case ENUM_TY: case SUBRANGE_TY: case RECORD_TY: return newN; // ARRAY_TY (SEQ ts) t --> FUN_TY ts' t' case ARRAY_TY: { newN->child(0)->addChild(newN->child(1)); newN->child(0)->kind = FUN_TY; return newN->child(0); } /* ---------------------------------------------------------------------------- expressions ---------------------------------------------------------------------------- */ // Unchanged case FORALL: case EXISTS: case IMPLIES: case AND: case OR: case NOT: case EQ: case NE: case I_LT: case I_LE: // case UMINUS: case I_PLUS: case I_TIMES: case I_MINUS: case IDIV: case MOD: case R_PLUS: case R_TIMES: case R_MINUS: case RDIV: case R_LT: case R_LE: case TRUE: case FALSE: case NATNUM: return newN; case TO_REAL: return newN->child(0); case VAR: { newN->id.append("."); return newN; } case CONST: { newN->id.append("."); return newN; } case TYPE_ID: { newN->id.append("'"); return newN; } // Work around Yices 1.0.3 bug where UMINUS is identity! case I_UMINUS: newN->kind = I_MINUS; newN->addLeftChild(new Node(NATNUM, "0")); return newN; case R_UMINUS: newN->kind = R_MINUS; newN->addLeftChild(new Node(NATNUM, "0")); return newN; // ARR_ELEMENT{ty-id} e1 (SEQ es) --> APPLY e1' es' case ARR_ELEMENT: { newN->child(1)->addLeftChild(newN->child(0)); newN->child(1)->kind = APPLY; return newN->child(1); } // ARR_UPDATE{ty-id} e1 (SEQ es) e3 --> UPDATE e1' (SEQ es') e3' case ARR_UPDATE: { newN->kind = UPDATE; newN->id.clear(); return newN; } // RCD_ELEMENT{rcd-id} e t-id --> SELECT e' CONST{rcd-id.} case RCD_ELEMENT: { if (newN->arity() == 2) newN->popChild(); newN->addChild(new Node(CONST, newN->id + ".")); newN->kind = SELECT; newN->id.clear(); return newN; } // RCD_UPDATE{rcd-id} e1 e2 t-id --> UPDATE e1' CONST{rcd-id.} e2' case RCD_UPDATE: { if (newN->arity() == 3) newN->popChild(); Node* e2 = newN->child(1); newN->popChild(); newN->addChild(new Node(CONST, newN->id + ".")); newN->addChild(e2); newN->id.clear(); newN->kind = UPDATE; return newN; } // These only occur as children of RCD_UPDATE and RCD_ELEMENT. // Will be ignored when those nodes are translated. case TYPE_PARAM: return newN; case ASSIGN: { newN->id.append("."); } case MK_RECORD: { return newN; } // IFF e1 e2 --> (EQ e1 e2) case IFF: { newN->kind = EQ; return newN; } // FUN_AP{funid} es --> APPLY CONST{funid} es case FUN_AP: { // Taken care of in prelude.rul newN->addLeftChild(new Node(CONST, newN->id + ".")); newN->id.clear(); newN->kind = APPLY; return newN; } /* ---------------------------------------------------------------------------- Multipurpose ---------------------------------------------------------------------------- */ case DECL: { newN->id.append("."); } case TUPLE: return newN; case SEQ: return newN; /* ---------------------------------------------------------------------------- Constructors without translation ---------------------------------------------------------------------------- */ case LE: case GE: default: { error ("unrecognised kind " + kindString(newN->kind) ); return newN; } } // end switch(newN->kind) } //========================================================================== // Calling Yices //========================================================================== // Virtual function definitions for YicesDriver class //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // yicesRead //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Pass expression represented by node tree to Yices // Returns true and sets yicesOutput string if are problems in read. bool YicesDriver::push(yicesl_context ctx, Node* n, string& yicesInput, string& yicesOutput) { Formatter::setFormatter(YicesFormatter::getFormatter()); ostringstream oss; oss << *n; const string nStr(oss.str()); // const string needed for c_str() call printMessage(FINESTm, "Yices reading" + ENDLs + nStr); if (yicesl_read(ctx, (char*) nStr.c_str()) == 0) { // cast away constness. // for yicesl_read yicesInput = oss.str(); yicesOutput = yicesl_get_last_error_message(); return true; } else return false; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // formatErrorString() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ string YicesDriver::formatErrorString(const string& yicesInput, const string& yicesErrorOutput) { ostringstream s; s << "Yices read error" << endl; s << "input to Yices:" << endl << endl << yicesInput << endl << endl; s << "output from Yices:" << endl << endl << yicesErrorOutput << endl; return s.str(); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // ignoreErrorMessage() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ bool YicesDriver::ignoreErrorMessage(const string& s) { return (s == "feature not supported: non linear problem."); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // translateUnit() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* YicesDriver::translateUnit(Node* unit) { YicesTranslator t; Formatter::setFormatter(YicesFormatter::getFormatter()); return t.translate(unit); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // initSession() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void YicesDriver::initSession() { // ------------------------------------------------------------------- // Process command line options // ------------------------------------------------------------------- if (option("yverb")) yicesl_set_verbosity(intOptionVal("yverb")); if (option ("ynotc")) yicesl_enable_type_checker(false); else yicesl_enable_type_checker(true); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // initGoal() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void YicesDriver::initGoal(const string& unitName, int goalNum, int conclNum) { string fullGoalFileRoot = getFullGoalFileRoot("yices", unitName, goalNum, conclNum); ctx = yicesl_mk_context(); // Log commands passed to Yices. if (option("yices-loginput")) { string saveFile = fullGoalFileRoot + ".yices"; yicesl_enable_log_file((char*) saveFile.c_str()); } // Redirect output messages from Yices (e.g. "unsat"). // With 1.0.9 this used to be set on a session basis. // With 1.0.16 seems that have to set for each new context. // Always set to something, since default is to send to standard output // which is just clutter. if (option("yices-logoutput")) { string outputFile = fullGoalFileRoot + ".ylog"; yicesl_set_output_file((char*) outputFile.c_str()); } else { yicesl_set_output_file((char*) "/dev/null"); } if (option("counterex")) { yicesl_read(ctx, (char*) "(set-evidence! true)"); } // --------------------------------------------------------------- // Add declarations of functions introduced by translation // --------------------------------------------------------------- // yicesl_read(ctx, "(define int__times :: (-> int int int))"); // yicesl_read(ctx, "(define real__times :: (-> int int int))"); // --------------------------------------------------------------- // Add declarations missing from API Lite context // --------------------------------------------------------------- // Yices executable is compiled to include these declarations, but // they are not included when calling Yices via API. // NB: For -ve dividend these declarations specify different values // for div and mod as expected for Ada/FDL. yicesl_read (ctx, (char*) "(define div:: \ (-> x::int y::int \ (subtype (r::int) (if (> y 0) \ (and (>= x (* y r)) \ (< x (* y (+ r 1)))) \ (and (<= x (* y r)) \ (> x (* y (+ r 1)))) \ ) \ )))" ); yicesl_read (ctx, (char*) "(define mod::(-> x::int y::int \ (subtype (r::int) (= (+ r (* (div x y) y)) x))))" ); return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // addDecl() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void YicesDriver::addDecl(Node* decl) { string yicesInput; string yicesOutput; if (push(ctx, decl, yicesInput, yicesOutput)) { throw std::runtime_error(formatErrorString(yicesInput, yicesOutput)); } return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // addHyp() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void YicesDriver::addHyp(Node* hyp, const string& hypId, string& remarks) { string yicesInput; string yicesOutput; if (push(ctx, new Node(z::ASSERT, hyp), yicesInput, yicesOutput)) { string errorMessage(formatErrorString(yicesInput, yicesOutput)); if( ignoreErrorMessage(yicesOutput) ) { printMessage(WARNINGm, "Yices rejected " + hypId + ":" + ENDLs + errorMessage); appendCommaString(remarks, "-" + hypId); return; } throw std::runtime_error(errorMessage); } return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // addConcl() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void YicesDriver::addConcl(Node* concl, string& remarks) { addHyp(new Node (z::NOT, concl), "C", remarks); return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // checkGoal() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ bool YicesDriver::checkGoal(string& remarks) { string yicesInput; string yicesOutput; if (push(ctx, new Node(z::CHECK), yicesInput, yicesOutput)) { // Detect whether problem due to non-linearity. // (non-linearity arises during checking // on instantiation of quantified formulas) if(ignoreErrorMessage(yicesOutput)) { printMessage(WARNINGm, "Yices error during check" + ENDLs + yicesOutput); appendCommaString(remarks, "non-linearity in check"); status = UNPROVEN; return false; } else { printMessage(ERRORm, "Yices error during check" + ENDLs + yicesOutput); // Remarks update taken care of by driveUnit status = ERROR; return true; } } if (yicesl_inconsistent(ctx)) status = TRUE; else status = UNPROVEN; return false; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // getResults() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ SMTDriver::Status YicesDriver::getResults(string& remarks) { return status; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // finaliseGoal() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void YicesDriver::finaliseGoal() { yicesl_del_context(ctx); return; } spark-2012.0.deb/victor/vct/src/node.cc0000644000175000017500000004205611753202341016571 0ustar eugeneugen//========================================================================== //========================================================================== // NODE.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include "node.hh" using namespace z; // For Kind type, kindString function using std::set; #include "pprinter.hh" // For printMessage. #include "utility.hh" // Tedious having to name Kinds twice. What's best way of avoiding this? std::string z::kindString(z::Kind k) { std::string s; switch (k) { #define c(id) case id: s = #id ; break c(FDL_FILE); c(DEF_TYPE); c(DECL_TYPE); // Isabelle c(DEF_RECORD); // Isabelle c(OUTER_DECL); // Isabelle c(DEF_CONST); c(DECL_CONST); // Isabelle c(DECL_VAR); c(DECL_FUN); c(RULE_FILE); c(RULE_FAMILY); c(REQUIRES); c(MAY_BE_REPLACED_BY); c(MAY_BE_DEDUCED); c(ARE_INTERCHANGEABLE); c(VCG_FILE); c(GOAL); c(ASSERT); // Yices extra c(CHECK); c(PUSH); c(POP); c(SCRIPT); c(DECLS); // Introduced by processing c(RULES); c(RULE); c(GOALS); c(HYPS); c(ASSUMPTION); c(CONCLS); c(CONCL); // Isabelle c(UNIT); c(THEORY); // Isabelle // Types c(INT_TY); c(REAL_TY); c(BOOL_TY); c(BIT_TY); c(BITVEC_TY); c(SUBRANGE_TY); c(ENUM_TY); c(ARRAY_TY); c(RECORD_TY); c(FUN_TY); // Yices, HOLs c(FUN_ARG_TY); // Isabelle/HOL c(TUPLE_TY); c(UNKNOWN); // Used when no constraints known on a type c(TYPE_UNIV); // Type of types c(INT_OR_REAL_TY); // Int or Real c(INT_REAL_OR_ENUM_TY); // Int or Real or Enum c(NO_TY); // Set of no types. // Expressions c(FORALL); c(EXISTS); c(PAT); c(IMPLIES); c(IFF); c(AND); c(OR); c(NOT); c(EQ); c(NE); c(LT); c(GT); c(LE); c(I_LT); c(I_LE); c(R_LT); c(R_LE); c(GE); c(TO_REAL); c(UMINUS); c(SUCC); c(PRED); c(PLUS); c(MINUS); c(TIMES); c(LIN_TIMES); c(NL_TIMES); c(I_UMINUS); c(I_SUCC); c(I_PRED); c(I_PLUS); c(I_MINUS); c(I_TIMES); c(I_LIN_TIMES); c(I_NL_TIMES); c(R_UMINUS); c(R_PLUS); c(R_MINUS); c(R_TIMES); c(R_LIN_TIMES); c(R_NL_TIMES); c(RDIV); c(IDIV); c(IDIV_E); // Euclidean IDIV c(IDIVM); // IDIV compatible with MOD c(MOD); c(MOD_E); // Euclidean MOD c(EXP); c(I_EXP); c(R_EXP); c(I_EXP_N);// Isabelle/HOL c(R_EXP_N);// Isabelle/HOL c(I_TO_NAT);// Isabelle/HOL c(ODD); c(ABS); c(I_ABS); c(R_ABS); c(SQR); c(I_SQR); c(R_SQR); c(TUPLE); c(ARR_ELEMENT); c(ARR_UPDATE); c(ARR_BOX_UPDATE); c(MK_ARRAY); c(RCD_ELEMENT); c(RCD_UPDATE); c(MK_RECORD); c(ASSIGN); c(INDEX_AND); c(FUN_AP); c(SUBRANGE); c(ID); c(TYPE_ID); c(TYPE_PARAM); c(NATNUM); c(REALNUM); c(TRUE); c(FALSE); c(TERM_TRUE); c(TERM_FALSE); c(TERM_AND); c(TERM_OR); c(TERM_IFF); c(TERM_NOT); c(TERM_EQ); c(TERM_NE); c(TERM_I_LT); c(TERM_I_LE); c(TO_PROP); c(TO_BIT); // prop to bit c(APPLY); c(LAMBDA); c(MK_TUPLE); c(UPDATE); c(SELECT); // Builders, misc c(SEQ); c(DECL); c(PENDING); // SMTLIB c(BENCHMARK); c(LOGIC); c(EXTRASORTS); c(EXTRAFUNS); c(EXTRAPREDS); c(FORMULA); c(STATUS); c(DECL_PRED); c(TCONST); c(PRED_AP); c(DISTINCT); c(ITE); c(CONST); c(VAR); // SMTLIB2 c(SET_OPTION); c(TO_INT); c(IS_INT); c(SET_INFO); c(INFO_STR); #undef c } return s; } // Memory management definitions. Nodes Node::pool; int Node::poolAllocCount = 0; void Node::deletePool() { for ( vector::iterator i = pool.begin(); i != pool.end(); i++) { delete (*i); } pool.clear(); return; } Node::Node(z::Kind k, Storage st) { kind = k; if (st == MANAGED) addToPool(); } /* Node::Node(Node* n1, Storage st) { kind = SEQ; addChild(n1); if (st == MANAGED) addToPool(); } */ Node::Node(Kind k, Node* n1, Storage st) { kind = k; addChild(n1); if (st == MANAGED) addToPool(); } Node::Node(Kind k, Node* n1, Node* n2, Storage st) { kind = k; addChild(n1); addChild(n2); if (st == MANAGED) addToPool(); } Node::Node(Kind k, Nodes& ns, Storage st) { kind = k; children = ns; if (st == MANAGED) addToPool(); } Node::Node(Kind k, const std::string& s, Storage st) { kind = k; id = s; if (st == MANAGED) addToPool(); } Node::Node(Kind k, const char* s, Storage st) { kind = k; id = string(s); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const std::string& s, Node* n1, Storage st) { kind = k; id = s; addChild(n1); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const char* s, Node* n1, Storage st) { kind = k; id = string(s); addChild(n1); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const std::string& s, Node* n1, Node* n2, Storage st) { kind = k; id = s; addChild(n1); addChild(n2); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const char* s, Node* n1, Node* n2, Storage st) { kind = k; id = string(s); addChild(n1); addChild(n2); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const std::string& s, Node* n1, Node* n2, Node* n3, Storage st) { kind = k; id = s; addChild(n1); addChild(n2); addChild(n3); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const char* s, Node* n1, Node* n2, Node* n3, Storage st) { kind = k; id = string(s); addChild(n1); addChild(n2); addChild(n3); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const std::string& s, Node* n1, Node* n2, Node* n3, Node* n4, Storage st) { kind = k; id = s; addChild(n1); addChild(n2); addChild(n3); addChild(n4); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const char* s, Node* n1, Node* n2, Node* n3, Node* n4, Storage st) { kind = k; id = string(s); addChild(n1); addChild(n2); addChild(n3); addChild(n4); if (st == MANAGED) addToPool(); } Node::Node(Kind k, const std::string& s, Nodes& ns, Storage st) { kind = k; id = s; children = ns; if (st == MANAGED) addToPool(); } Node::Node(Kind k, const char* s, Nodes& ns, Storage st) { kind = k; id = string(s); children = ns; if (st == MANAGED) addToPool(); } bool Node::operator==(const Node& n) const { if (kind == n.kind && id == n.id && arity() == n.arity()) { for (int i = 0; i != arity(); i++) { if ( ! ( * child(i) == * (n.child(i)) ) ) return false; } return true; } return false; } std::set Node::getIds(z::Kind k) { std::set result; if (kind == k && id.size() > 0) result.insert(id); for (int i = 0; i != arity(); i++) { std::set childIds = child(i)->getIds(k); result.insert(childIds.begin(), childIds.end()); } return result; } std::string Node::toShortString() { return kindString(kind) + "{" + id + "}[" + intToString(arity()) + "]"; } Node* Node::copy() { Node* result = new Node(kind, id); for (int i = 0; i != arity(); i++) { result->addChild(child(i)->copy()); } return result; } Node* Node::int_ty = new Node (z::INT_TY, UNMANAGED); Node* Node::bool_ty = new Node (z::BOOL_TY, UNMANAGED); Node* Node::bit_ty = new Node (z::BIT_TY, UNMANAGED); Node* Node::real_ty = new Node (z::REAL_TY, UNMANAGED); Node* Node::type_univ = new Node (z::TYPE_UNIV, UNMANAGED); Node* Node::unknown = new Node (z::UNKNOWN, UNMANAGED); Node* Node::int_or_real_ty = new Node(z::INT_OR_REAL_TY, UNMANAGED); Node* Node::int_real_or_enum_ty = new Node(z::INT_REAL_OR_ENUM_TY, UNMANAGED); Node* Node::no_ty = new Node(z::NO_TY, UNMANAGED); bool isDivOrMod(Node* n) { return n->kind == z::IDIV || n->kind == z::MOD; } // bool isCompoundProp(Node* n) { int k = n->kind; return k == z::UNIT || k == z::RULES || k == z::RULE || k == z::GOALS || k == z::GOAL || k == z::HYPS || k == z::CONCLS || k == z::AND || k == z::OR || k == z::NOT || k == z::FORALL || k == z::EXISTS || k == z::IFF || k == z::IMPLIES; } // NB: isAtomicProp() is not complete. It will flag nodes that // are definitely atomic propositions, but will miss those which might // or might not be: e.g. boolean-valued array or record elements, // boolean-valued user-defined function applications, and the // boolean-valued order relations on enumeration types. bool isAtomicProp(Node* n) { int k = n->kind; return k == z::TRUE || k == z::FALSE || k == z::EQ || k == z::NE || k == z::LT || k == z::I_LT || k == z::R_LT || k == z::GT || k == z::LE || k == z::I_LE || k == z::R_LE || k == z::GE; } bool isProp(Node* n) {return isAtomicProp(n) || isCompoundProp(n); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // getSubNodes //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Use `sub-nodes' to refer to `logical' children. Usually are the // same as children, except for operators built using more than one // node. // Need vector since might need to destructively update. void appendChildPtrsToVector(Node* n, vector& v) { for (int i = 0; i != n->arity(); i++) { v.push_back(&(n->child(i))); } return; } vector Node::getSubNodes() { vector result; switch(kind) { case FORALL: case EXISTS: { // FORALL (SEQ d1 ... nk) body [pat] // EXISTS (SEQ d1 ... nk) body [pat] // where di = DECL{id} type // Ignore pattern subterms. for (int i = 0; i != child(0)->arity(); i++) { Node* decl = child(0)->child(i); result.push_back(&(decl->child(0))); } result.push_back(&child(1)); break; } case ARR_ELEMENT: { // ARR_ELEMENT arr (SEQ i1 ... in), n >= 1 result.push_back(&(child(0))); appendChildPtrsToVector(child(1), result); break; } case ARR_UPDATE: { // ARR_UPDATE arr (SEQ i1 ... in) val, n >= 1 result.push_back(&(child(0))); appendChildPtrsToVector(child(1), result); result.push_back(&(child(2))); break; } case ARR_BOX_UPDATE: { // ARR_BOX_UPDATE arr (SEQ r1 ... rn) val, n >= 1 // where rj = SUBRANGE i1 i2 result.push_back(&(child(0))); Node* rangeTuple = child(1); for (int j = 0; j != rangeTuple->arity(); j++) { Node* range = rangeTuple->child(j); appendChildPtrsToVector(range, result); } result.push_back(&(child(2))); break; } case MK_ARRAY: { // MK_ARRAY{arrname} default a1 ... an, n >= 0 // MK_ARRAY{arrname} a1 ... an, n >= 1 // where ai = ASSIGN (SEQ i1 ... im) val, m >= 1 // ij = e | SUBRANGE e1 e2 for (int i = 0; i != arity(); i++) { Node* c = child(i); if (c->kind != ASSIGN) { result.push_back(&child(i)); continue; } Node* indexList = c->child(0); for (int j = 0; j != indexList->arity(); j++) { if (indexList->child(j)->kind == SUBRANGE) { result.push_back(& indexList->child(j)->child(0)); result.push_back(& indexList->child(j)->child(1)); } else { result.push_back(& indexList->child(j)); } } result.push_back(& c->child(1)); } break; } case MK_RECORD: { // MK_RECORD{rcdname} a1 ... an, n >= 1 // where ai = ASSIGN{fldname} val for (int i = 0; i < arity(); i++) { Node* assign = child(i); result.push_back(&(assign->child(0))); } break; } case RECORD_TY: { // RECORD_TY{rcdname} d1 ... dn, n >= 1 // where di = DECL{fldname} Ti for (int i = 0; i < arity(); i++) { Node* decl = child(i); result.push_back(&(decl->child(0))); } break; } default: { appendChildPtrsToVector(this, result); break; } } return result; } //======================================================================== // Term analysis. //======================================================================== class GatherOps { public: set OpSet; void operator() (Node* n); }; void GatherOps::operator() (Node* n) { string s (kindString(n->kind)); if (s != "ID" && s != "NATNUM" && n->id.size() != 0) { s.append("{"); s.append(n->id); s.append("}"); } OpSet.insert(s); return; } string gatherKinds(Node* n) { if (n == 0) return ""; GatherOps gFun; n->mapOver(gFun); set kinds = gFun.OpSet; kinds.erase("HYPS"); string result; for (set::iterator i = kinds.begin(); i!= kinds.end(); i++ ) { result.append(" "); result.append(*i); } return result; } // Collect names of bound variables class GatherBVs { public: set vSet; void operator() (Node* n); }; void GatherBVs::operator() (Node* n) { if (n->kind == FORALL || n->kind == EXISTS) { Node* decls = n->child(0); for (int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); vSet.insert(decl->id); } } return; } set gatherBoundVars(Node* n) { GatherBVs gFun; n->mapOver(gFun); return gFun.vSet; } Node* nameToType(const std::string& s) { if (s == "integer") return new Node(INT_TY); if (s == "real") return new Node(REAL_TY); if (s == "boolean") return new Node(BOOL_TY); if (s == "bit___type") return new Node(BIT_TY); else return new Node(TYPE_ID, s); } std::string typeToName(Node* n) { if (n->kind == INT_TY) return std::string("integer"); if (n->kind == REAL_TY) return std::string("real"); if (n->kind == BOOL_TY) return std::string("boolean"); if (n->kind == BIT_TY) return std::string("bit___type"); if (n->kind == TYPE_ID) return n->id; else { printMessage(ERRORm, "typeToName: bad arg " + kindString(n->kind)); return std::string("unknown___type"); } } // Expects hyps to be AND(h1 ... hk) or NOT AND(h1 ... hk) where k >= 0. // If k = 0 returns concl // If k = 1 return h1 => concl // If k > 1 return AND(h1 ... hk) => concl Node* mkGeneralImplies(Node* hyps, Node* concl) { if (hyps->kind == NOT) { Node* conj = hyps->child(0); if (conj->arity() == 0) { return new Node(TRUE); } else if (conj->arity() == 1) { return new Node(IMPLIES, new Node(NOT, conj->child(0)), concl); } else { return new Node(IMPLIES, hyps, concl); } } else { if (hyps->arity() == 0) { return concl; } else if (hyps->arity() == 1) { return new Node(IMPLIES, hyps->child(0), concl); } else { return new Node(IMPLIES, hyps, concl); } } } // Polymorphic nodes used in FDL. Expectation is that all polymorphism is // resolved in normalisation to Standard Form. bool isPolymorphicNode(Node* n) { switch(n->kind) { case LT : case GT: case LE: case GE: case UMINUS: case SUCC: case PRED: case PLUS: case MINUS: case TIMES: case EXP: case SQR: case ABS: return true; case ARR_ELEMENT: case ARR_UPDATE: return n->id.size() == 0; case RCD_ELEMENT: // RCD_ELEMENT{rcd-id} exp // RCD_ELEMENT{rcd-id} exp (TYPE_PARAM{type-id}) return n->arity() == 1; case TERM_EQ: case TERM_NE: case EQ: case NE: case RCD_UPDATE: // RCD_UPDATE{rcd-id} exp val // RCD_UPDATE{rcd-id} exp val (TYPE_PARAM{type-id}) return n->arity() == 2; case ITE: return n->arity() == 3; default: return false; } } spark-2012.0.deb/victor/vct/src/Makefile0000644000175000017500000002012111753202341016762 0ustar eugeneugen#========================================================================== # Makefile for Victor sources and CSV-file tools #========================================================================== # This file is part of Victor: a SPARK VC Translator and Prover Driver. # Copyright (C) 2009, 2010 University of Edinburgh # Author: Paul Jackson # Victor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or (at # your option) any later version. # Victor is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # A copy of the GNU General Public License V3 can be found in file # LICENSE.txt and online at http://www.gnu.org/licenses/. #========================================================================== # Tested with GNU Make v3.81. # References below to a GNU Make manual are to the manual for this version. # Tools used: # g++/gcc V4.4.5 # bison V2.4.1 # flex V2.5.35 # External libraries used: # gmp V4.3.1 #========================================================================== # Use #========================================================================== # Normal build # make vct # Build with alternate optimisation level # # make vct OPT= # # should be one of "none", "1", "2" or "3". # Default level is 2. # Build with debugging version of CVC3 # make DEBUG=on vct #========================================================================== # User customisation #========================================================================== # Linking with Yices # ------------------ # If linking with Yices, ensure following line is uncommented and value # is set appropriately. If not linking with Yices, ensure line is commented # out. # # The named directory is expected to contain lib/ and include/ # directories, as set up by the Yices installation. # YICESDIR=/home/pbj/smt/yices/yices-1.0.29 # Linking with CVC3 # ----------------- # If linking with CVC3, ensure following line is uncommented and value # is set appropriately. If not linking with CVC3, ensure line is commented # out. # The named directory is expected to contain lib/ and include/ # directories, as set up by the cvc3 installation. # If also a cvc3 library is created with debug options turned on, the named # directory should also contain at least the directory debug/lib/ # CVC3DIR=/home/pbj/smt/cvc3/r2.2 # CVC3DIR=/home/pbj/smt/cvc3/r2011-08-24 #========================================================================== # Main make code #========================================================================== # Nothing here should need modifying by user. VCTEXEC=vct # Name of Victor executable DEBUG=false # -Wall rather noisy. E.g. size() method returns unsigned int and # then get complaint about signed/unsigned comparisons. # Fixed for now but just adding (int) coercions to code. # # Wanting from flags: # detecting control reaching end of functions with implicit return # Is allowed by compiler when have T* return types. # unused vars. # Expected values of OPT set on make command line are "none", "1", # "2" or "3". OPT=2 ifeq ($(OPT),none) CXXFLAGS= -g -Wall else CXXFLAGS= -g -Wall -O$(OPT) endif CPPFLAGS= LDFLAGS= -Wall ifdef STATIC_GMP_MAC LDLIBS= /usr/local/lib/libgmpxx.a /usr/local/lib/libgmp.a else ifdef STATIC_GMP LDLIBS= -Xlinker -Bstatic -lgmpxx -lgmp -Xlinker -Bdynamic -static-libstdc++ -s else LDLIBS= -lgmpxx -lgmp endif endif OBJS= \ node.o \ node-utils.o \ lex.yy.o \ pdriver.o \ box.o \ formatter.o \ pprinter.o \ typesort.o \ utility.o \ bignum.o \ context.o \ normalisation.o \ processor.o \ translation.o \ main.o \ smt-driver.o \ rule-filter.o \ smtlib-driver.o \ smtlib2-driver.o \ isab-driver.o ifdef YICESDIR CPPFLAGS+= -I $(YICESDIR)/include -D LINK_YICES LDFLAGS+= -L $(YICESDIR)/lib # Yices documentation says to also use -lstdc++ and -lgmp with gcc. # However here # - assume with g++ that stdc++ is always linked in # - assume that since gmp is already statically linked in we don't need -lgmp LDLIBS+= -lyices OBJS+= yices-driver.o endif ifdef CVC3DIR ifeq ($(DEBUG),false) CPPFLAGS+= -I $(CVC3DIR)/include/cvc3 -D LINK_CVC3 LDFLAGS+= -L $(CVC3DIR)/lib else CPPFLAGS+= -I $(CVC3DIR)/debug/include/cvc3 -D LINK_CVC3 -D_CVC3_DEBUG_MODE LDFLAGS+= -L $(CVC3DIR)/debug/lib endif # CVC3 needs to be linked with gmp library. This library is already linked # in by default, so no need to include it here. LDLIBS+= -lcvc3 OBJS+= cvc-driver.o endif # Putting parser.tab.o last in OBJS list ensures that `include' # directive below will force running of bison before trying to extract # dependency information for .cc files that include .hh files # generated by bison: if these auto generated files are missing, the # g++ -MM dependency extraction doesn't work properly. Couldn't find # any explanation in Make manual as to why Make tries to regenerate # last included makefile first. OBJS+= parser.tab.o # All sources SRCS= $(OBJS:.o=.cc) # All dependency files DFILES= $(SRCS:.cc=.d) # $^ = all prerequisites # $@ = rule target vct: $(OBJS) $(CXX) -o ../bin/$(VCTEXEC) $(LDFLAGS) $^ $(LDLIBS) lex.yy.cc: lexer.ll flex -olex.yy.cc lexer.ll # Option "-r state" writes file parser.output describing parser. # Good for debugging parser. parser.tab.hh parser.tab.cc: parser.yy bison -r state parser.yy # Rules for compiling individual object files. # # For each .cc, the rule below creates a .d file containing # a single commandless rule of form # # .o .d : .cc .hh ... .hh # # where .hh ... .hh are all the header files included by .cc. # # Such rules provide additional prerequisites to the implicit rules for # generating .o and .d files. # # See the Make manual # # Writing Rules/Generating Prerequisites Automatically, p40 # Using Implicit Rules, p101, # # for details. # The "@" suppresses echoing of the commands. See # # Writing Commands / Command Echoing, p51. # Once consequence of this approach is that make can force bison/lex to # run when checking dependencies, even if one tries make -n! # This is problem if building on platform with wrong version of bison/flex # and only supplying .cc and .hh files. %.d: %.cc @echo Making dependency file $@ @set -e; rm -f $@; \ $(CXX) -MM $(CPPFLAGS) $< > $@.$$$$; \ sed 's,\($*\)\.o[ :]*,\1.o $@ : ,g' < $@.$$$$ > $@; \ rm -f $@.$$$$ # The include directive not only includes the named .d files, but also # attempts to generate missing .d files using the above rule. include $(DFILES) # Implicit rule (from make -p) used for compiling .cc to .o is: # # %.o: %.cc # $(COMPILE.cc) $(OUTPUT_OPTION) $< # where # COMPILE.cc = $(CXX) $(CXXFLAGS) $(CPPFLAGS) $(TARGET_ARCH) -c # OUTPUT_OPTION = -o $@ # CXX = g++ .PHONY: clean save-build test-bignum basicclean: -rm -f $(OBJS) *.d clean: -rm -f $(OBJS) \ lex.yy.cc \ parser.tab.cc parser.tab.hh \ location.hh position.hh stack.hh parser.output \ *.d save-build: -mkdir build cp parser.tab.cc parser.tab.hh location.hh position.hh stack.hh \ lex.yy.cc parser.output ../bin/* build/ test-bignum: test-bignum.cc bignum.o utility.o node.o \ pprinter.o box.o formatter.o g++ -g -o test-bignum -lgmpxx -lgmp $^ # Utility functions for manipulating CSV files. csvproj: csvproj.cc utility.o g++ -o ../bin/csvproj $^ csvfilt: csvfilt.cc utility.o g++ -o ../bin/csvfilt $^ csvmerge: csvmerge.cc utility.o g++ -o ../bin/csvmerge $^ csvisect: csvisect.cc utility.o g++ -o ../bin/csvisect $^ .PHONY: csvutils csvutils: csvproj csvfilt csvmerge csvisect # Run command for solver, with watchdog killing solver if no activity on # stdout for a specified period. watchdog: watchdogrun.c gcc -o ../run/watchdogrun $^ # END OF FILE spark-2012.0.deb/victor/vct/src/node-utils.cc0000644000175000017500000000454111753202341017724 0ustar eugeneugen//========================================================================== //========================================================================== // NODE-UTILS.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include using std::string; #include using std::endl; #include "node-utils.hh" #include "utility.hh" #include "formatter.hh" using namespace z; //======================================================================== // Translator base class //======================================================================== // // Provides error reporting and handling for Node->Node translation functions. Node* Translator::translate (Node* oldN) { errorFlag = false; errorMessages << name + " translator error(s): " << endl; Node* result = translateAux(oldN); if (errorFlag) { Formatter::setFormatter(VanillaFormatter::getFormatter()); // Printing whole parse trees is too verbose. Was OK at one // point when decls, rules, hyps or concls were translated one // at a time, but now whole units translated at once. /* errorMessages << "translation input: " << endl; errorMessages << *oldN << endl; errorMessages << "translation result: " << endl; errorMessages << *result << endl; */ throw std::runtime_error(errorMessages.str()); } return result; } void Translator::error (const string& message) { errorFlag = true; errorMessages << message << endl; return; } spark-2012.0.deb/victor/vct/src/smtlib2-driver.cc0000644000175000017500000010607511753202341020513 0ustar eugeneugen//========================================================================== //========================================================================== // SMTLIB-DRIVER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #include using std::endl; using std::cerr; #include // for exit() and system() #include #include using std::vector; #include "smtlib2-driver.hh" #include "node-utils.hh" #include "formatter.hh" #include "pprinter.hh" #include // For set operations using namespace z; //========================================================================== // Pretty print SMTLib2 abstract syntax //========================================================================== class SMTLib2Formatter : public Formatter { public: virtual Box& addSyntax(z::Kind k, const string& id, vector& bs); static Formatter* getFormatter(); private: static Formatter* instance; SMTLib2Formatter() {}; }; Box& SMTLib2Formatter::addSyntax(z::Kind k, const std::string& id, vector& bs) { switch (k) { // Top level case(SCRIPT): return PP::simpleVSeq(bs); case(LOGIC): return box("(set-logic " + id + ")"); case(SET_OPTION): return makeStringAp("set-option :" + id, bs); case(ASSERT): return makeStringAp("assert", bs); case(CHECK): return makeStringAp("check-sat", bs); case(PUSH): return box("(push 1)"); case(POP): return box("(pop 1)"); // Declarations case(DECL_TYPE): return box("(declare-sort " + id + " 0)"); case(DEF_TYPE): return makeStringAp("define-sort " + id + " ()", bs); case(DECL_FUN): return makeStringAp("declare-fun " + id, bs); case(DECL_CONST): return makeStringAp("declare-fun " + id + " ()", bs); // Sorts case(TYPE_ID): return box(id); case(INT_TY): return box("Int"); case(REAL_TY): return box("Real"); case(BOOL_TY): return box("Bool"); // Core logic terms and consts case(OR): return makeStringAp("or", bs); case(AND): return makeStringAp("and", bs); case(NOT): return makeStringAp("not", bs); case(IMPLIES): return makeStringAp("=>", bs); case(IFF): return makeStringAp("=", bs); case(TRUE): return box("true"); case(FALSE): return box("false"); case(FORALL): return makeStringAp("forall", bs); case(EXISTS): return makeStringAp("exists", bs); case(EQ): return makeStringAp("=", bs); case(DISTINCT): return makeStringAp("distinct", bs); case(ITE): return makeStringAp("ite", bs); // SEQ is not quite prettiest: will indent vertical sequence // extra amount. case(SEQ): return PP::makeHVSeq("", "(", "", ")", bs); case(DECL): return makeStringAp(id, bs); case(FUN_AP): return makeStringAp(id, bs); case(CONST): return box(id); case(VAR): return box(id); // Arithmetic operators and constants case(I_UMINUS): return makeStringAp("-", bs); case(I_MINUS): return makeStringAp("-", bs); case(I_PLUS): return makeStringAp("+", bs); case(I_TIMES): return makeStringAp("*", bs); case(IDIV_E): return makeStringAp("div", bs); case(MOD_E): return makeStringAp("mod", bs); case(I_ABS): return makeStringAp("abs", bs); case(I_LE): return makeStringAp("<=", bs); case(I_LT): return makeStringAp("<", bs); case(NATNUM): return box(id); case(R_UMINUS): return makeStringAp("-", bs); case(R_MINUS): return makeStringAp("-", bs); case(R_PLUS): return makeStringAp("+", bs); case(R_TIMES): return makeStringAp("*", bs); case(RDIV): return makeStringAp("/", bs); case(R_LE): return makeStringAp("<=", bs); case(R_LT): return makeStringAp("<", bs); case(TO_REAL): return makeStringAp("to_real", bs); case(TO_INT): return makeStringAp("to_int", bs); case(IS_INT): return makeStringAp("is_int", bs); case(SET_INFO): return makeStringAp("set-info :" + id, bs); case(INFO_STR): return box('"' + id + '"'); default: printMessage(ERRORm, "SMTLibFormatter::addSyntax: " + ENDLs + "Encountered unsupported kind " + kindString(k)); return PP::makeHVSeq("***" + kindString(k) + "{" + id + "}***", "[", ";", "]", bs ); } } Formatter* SMTLib2Formatter::getFormatter() { if (instance == 0) instance = new SMTLib2Formatter; return instance; } // Storage declarations for static class member. Formatter* SMTLib2Formatter::instance = 0; //======================================================================== // Standardise identifiers //======================================================================== /* 1. Type renaming. If type-id clashes with SMTLIB2 reserved word or type defined in one of SMTLIB2 standard theories, add "~" suffix. 2. Function renaming If function id clashes with reserved word or (possibly nullary) function defined in one of SMTLIB2 standard theories, add "~" suffix. 3. Constant renaming (Including bound variables in notion of constant here.) If constant id clashes with 1. reserved word, or (possibly nullary) function defined in one of SMTLIB2 standard theories, or 2. Any of function ids declared in unit, But not 1 & 2, add "~" suffix. If 1 & 2, add "~~" suffix. */ // funIdsInUnit: functions declared in unit // constIdsInUnit: constants declared in unit at top level and // in quantifier bindings class StandardiseIdsFun { public: StandardiseIdsFun(Node* unit); // Sets for use in standardising ids. set typeIdsToRename; set funIdsToRename; set constIdsToRename1; set constIdsToRename2; void operator() (Node* n); }; /* #include using std::endl; using std::cout; using std::cerr; */ StandardiseIdsFun::StandardiseIdsFun(Node* unit) { set typeIdsInUnit; set funIdsInUnit; set constIdsInUnit; Node* decls = unit->child(0); for (int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); if (decl->kind == DECL_FUN) funIdsInUnit.insert(decl->id); else if (decl->kind == DEF_CONST) constIdsInUnit.insert(decl->id); else if (decl->kind == DEF_TYPE) typeIdsInUnit.insert(decl->id); } set boundVars = gatherBoundVars(unit); constIdsInUnit.insert(boundVars.begin(), boundVars.end()); // Initialise reservedWords and stdFunIds set reservedWords; set& rw = reservedWords; rw.insert("par"); rw.insert("NUMERAL"); rw.insert("DECIMAL"); rw.insert("STRING"); rw.insert("_"); rw.insert("as"); rw.insert("let"); rw.insert("forall"); rw.insert("exists"); rw.insert("set"); rw.insert("push"); rw.insert("pop"); rw.insert("assert"); rw.insert("exit"); set stdFunIds; set& sfi = stdFunIds; sfi.insert("distinct"); sfi.insert("true"); sfi.insert("false"); sfi.insert("not"); sfi.insert("and"); sfi.insert("or"); sfi.insert("xor"); sfi.insert("ite"); sfi.insert("div"); sfi.insert("mod"); sfi.insert("abs"); sfi.insert("to_real"); sfi.insert("to_int"); sfi.insert("is_int"); sfi.insert("select"); sfi.insert("store"); // typeIdsToRename = typeIdsInUnit intersect (reservedWords U stdTypeIds) set s0; s0.insert(reservedWords.begin(), reservedWords.end()); s0.insert("Bool"); s0.insert("Int"); s0.insert("Real"); setIsect(typeIdsInUnit, s0, typeIdsToRename); // s1 = reservedWords U stdFunIds set s1; setUnion(reservedWords, stdFunIds, s1); // funIdsToRename = funIdsInUnit intersect (reservedWords U stdFunIds) // = funIdsInUnit intersect s1 setIsect(funIdsInUnit, s1, funIdsToRename); // s2 = symdiff funIdsInUnit (reservedWords U stdFunIds) // = symdiff funIdsInUnit s1 set s2; setSymDiff(funIdsInUnit, s1, s2); // constIdsToRename1 // = constIdsInUnit // intersect (symdiff (reservedWords U StdFunIds) // funIdsInUnit) // = constIdsInUnit intersect s2 setIsect(constIdsInUnit, s2, constIdsToRename1); // constIdsToRename2 = // constIdsInUnit intersect funIdsToRename setIsect(constIdsInUnit, funIdsToRename, constIdsToRename2); /* cout << endl; cout << "typeIdsToRename = " << typeIdsToRename.size() << endl; cout << "funIdsToRename = " << funIdsToRename.size() << endl; cout << "constIdsToRename1 = " << constIdsToRename1.size() << endl; cout << "constIdsToRename2 = " << constIdsToRename2.size() << endl; */ return; } void StandardiseIdsFun::operator() (Node* n) { switch(n->kind) { case(TYPE_ID): case(DEF_TYPE): case(DECL_TYPE): { if (typeIdsToRename.find(n->id) != typeIdsToRename.end()) n->id.append("~"); return; } case(FUN_AP): case(DECL_FUN): { if (funIdsToRename.find(n->id) != funIdsToRename.end()) n->id.append("~"); return; } case(CONST): case(VAR): case(DECL): // In quantifiers case(DECL_CONST): case(DEF_CONST): { if (constIdsToRename1.find(n->id) != constIdsToRename1.end()) n->id.append("~"); else if (constIdsToRename2.find(n->id) != constIdsToRename2.end()) n->id.append("~~"); return; } default: return; } return; } //======================================================================== // Virtual functions for SMTLIB2Driver class //======================================================================== Node* SMTLib2Driver::translateUnit(Node* unit) { // standardise identifiers StandardiseIdsFun f(unit); unit->mapOver(f); // Update declarations // DEF_TYPE{} --> DECL_TYPE{} // DEF_CONST{} --> DECL_CONST{} // DEF_TYPE{} Untouched // DECL_FUN{} (SEQ ) Untouched Node* decls = unit->child(0); for (int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); if (decl->kind == DEF_TYPE && decl->arity() == 0) { decl->kind = DECL_TYPE; } else if (decl->kind == DEF_CONST && decl->arity() == 1) { decl->kind = DECL_CONST; } } // CVC3 2.4 is not recognising primitive SMTLIB2 to_real operator. if (option("smtlib2-add-to_real-decl")) { decls->addChild(new Node(DECL_FUN,"to_real", new Node (SEQ, new Node(INT_TY)), new Node (REAL_TY))); } return unit; } // If watchdog timer used for timeouts, then time limit is for each query. bool SMTLib2Driver::resourceLimitsForQuerySet() { return !option("watchdog-timeout"); } void SMTLib2Driver::initQuerySet(const string& unitName, int goalNum, int conclNum) { string fullGoalFileRoot = getFullGoalFileRoot("smtsolver", unitName, goalNum, conclNum); solverInputFileName = fullGoalFileRoot + ".smt2"; solverOutputFileName = fullGoalFileRoot + ".out"; solverErrorFileName = fullGoalFileRoot + ".err"; string logic(option("logic") ? optionVal("logic") : "AUFNIRA"); script = new Node(SCRIPT); script->addChild(new Node(SET_INFO, "spark-source", new Node(INFO_STR, unitName))); if (!option("smtlib2-omit-set-option-command")) { script->addChild(new Node(SET_OPTION, "print-success", new Node(FALSE))); } if (option("smtlib2-soft-timeout")) { script->addChild(new Node(SET_OPTION, "soft-timeout", new Node(NATNUM, optionVal("smtlib2-soft-timeout"))) ); } script->addChild(new Node(LOGIC,logic)); return; } void SMTLib2Driver::initGoal(const string& unitName, int goalNum, int conclNum) { initQuerySet(unitName, goalNum, conclNum); } void SMTLib2Driver::addDecl(Node* decl) { script->addChild(decl); } void SMTLib2Driver::addRule(Node* rule, const string& hId, string& remarks) { script->addChild(new Node(ASSERT,rule)); } void SMTLib2Driver::addHyp(Node* hyp, const string& hId, string& remarks) { script->addChild(new Node(ASSERT,hyp)); } void SMTLib2Driver::addConcl(Node* concl, string& remarks) { script->addChild(new Node(ASSERT, new Node(NOT, concl))); } void SMTLib2Driver::push() { script->addChild(new Node(PUSH)); } SMTLib2Driver::Status SMTLib2Driver::check(string& remarks) { script->addChild(new Node(CHECK)); return UNCHECKED; } void SMTLib2Driver::pop() { script->addChild(new Node(POP)); } //--------------------------------------------------------------------------- // check //--------------------------------------------------------------------------- void SMTLib2Driver::finishSetup() { script->addChild(new Node(CHECK)); outputQuerySet(); return; } void SMTLib2Driver::outputQuerySet() { ofstream solverInput; solverInput.open(solverInputFileName.c_str()); if (solverInput.fail()) { cerr << endl << "Error on trying to open file " << solverInputFileName << endl; exit(1); } Formatter::setFormatter(SMTLib2Formatter::getFormatter()); solverInput << *script << endl; solverInput.close(); } //--------------------------------------------------------------------------- // check //--------------------------------------------------------------------------- bool SMTLib2Driver::checkGoal(string& remarks) { return runQuerySet(remarks); } bool SMTLib2Driver::runQuerySet(string& remarks) { string cmd; string cmdOptions; if (! (option("prover") || option("prover-command") )) { return false; } if (option("prover-command")) { cmd = optionVal("prover-command") + " "; } // Must be case now that prover option set else if (optionVal("prover") == "z3") { cmd = "z3"; if (option("z3-fourier-motzkin")) cmdOptions += "FOURIER_MOTZKIN_ELIM=true "; if (option("timeout")) // Was not supported in Z3 v1.3 Linux. cmdOptions += "SOFT_TIMEOUT=" + optionVal("timeout") + " "; } else if (optionVal("prover") == "cvc3") { cmd = "cvc3"; cmdOptions = "-lang smt2 "; if (option("timeout")) cmdOptions += "-timeout " + optionVal("timeout") + " "; if (option("resourcelimit")) cmdOptions += "-resource " + optionVal("resourcelimit") + " "; } else { printMessage(ERRORm, "Unsupported prover option: " + optionVal("prover") + ENDLs); return false; } // Modify cmd for I/O files and detecting timeouts. if (option("shell-timeout")) { // Use shell-level timeout utility // This will accept integer or fixed point time in sec. cmd = "./timeout.sh " + optionVal("shell-timeout") + " " + cmd + " " + cmdOptions + " 1> " + solverOutputFileName + " 2> " + solverErrorFileName; } else if (option("ulimit-timeout")) { // Use bash built-in timeout facility // This accepts integer time in sec. // cmd = "ulimit -t " + optionVal("ulimit-timeout") + " ; " + cmd; // Run in a subshell (y using enclosing ()) so we can catch output // to stderr on termination and avoid this output polluting // output from running vct. cmd = "( ulimit -t " + optionVal("ulimit-timeout") + " ; " + cmd + " " + cmdOptions + solverInputFileName + " )" + " 1> " + solverOutputFileName + " 2> " + solverErrorFileName; } else if (option("watchdog-timeout")) { // Use watchdogrun timeout facility: // Usage: // watchdogrun outfile errfile timeout cmd arg1 ... argn // timeout is read as floating-point number. cmd = "./watchdogrun " + solverOutputFileName + " " + solverErrorFileName + " " + optionVal("watchdog-timeout") + " " + cmd + " " + cmdOptions + " " + solverInputFileName; } else { cmd += " " + solverInputFileName + " 1> " + solverOutputFileName + " 2> " + solverErrorFileName; } if (option("doublerun")) cmd = cmd + " ; " + cmd; printMessage(INFOm, "Running command" + ENDLs + cmd + ENDLs); // Should rename this to terminationStatus. exitStatus is just // part of the terminationStatus. exitStatus = std::system(cmd.c_str()); // For SMT mode, exit status is not reliable guide for something going // wrong. E.g. z3 -smt returns exit status 103. printMessage(INFOm, "Exit status is " + intToString(exitStatus)); return false; } //--------------------------------------------------------------------------- // analyse Exit Status of solver //--------------------------------------------------------------------------- // Returns true iff exit code indicates that resource limit reached. bool SMTLib2Driver::analyseExitStatus(int exitStatus, string& remarks) { bool resourceLimitReached = false; // Detecting termination signals, not on windows platform: #ifndef _WIN32 /* Code here discovered by trial and error and reading man pages. Man page for getrlimit makes clear that a process is terminated using a KILL signal when the time limit is reached. system(3) man page says that return status of system call is in format specified on wait(2) man page. There is says: WIFEXITED(status) returns true if the child terminated normally, that is, by call- ing exit(3) or _exit(2), or by returning from main(). WEXITSTATUS(status) returns the exit status of the child. This consists of the least significant 8 bits of the status argument that the child specified in a call to exit(3) or _exit(2) or as the argument for a return statement in main(). This macro should only be employed if WIFEXITED returned true. WIFSIGNALED(status) returns true if the child process was terminated by a signal. WTERMSIG(status) returns the number of the signal that caused the child process to terminate. This macro should only be employed if WIFSIGNALED returned true. For z3 3.0 on Scientific Linux 6, are seeing: on unsat: exitStatus = 0 WIFSIGNALED = 0 WTERMSIG = 0 WIFEXITED = 1 WEXITSTATUS = 0 on ulimit timeout: exitStatus = 35072 (= 137 * 256) WIFSIGNALED = 0 WTERMSIG = 0 WIFEXITED = 1 WEXITSTATUS = 137 (bits 15-8 of exitStatus, as remarked on wait(2) page) WIFSIGNALED is *not* being set on a kill, and WTERMSIG is not pulling out any signal number. Some web pages remark on exit codes of form 128 + signumber being used to flag processes terminated by signals. SIGKILL has value 9, hence this 137. Can see this 137 by e.g. doing: ulimit -t 1 ; z3 -smt x.smt ; echo $? Watchdogrun routine uses SIGTERM (15) to kill z3. This results in z3 returning 128+15 = 143. */ /* printMessageWithHeader ("DEBUG", "exitStatus = " + intToString(exitStatus) + ENDLs + "WIFSIGNALED = " + intToString(WIFSIGNALED(exitStatus)) + ENDLs + "WTERMSIG = " + intToString(WTERMSIG(exitStatus)) + ENDLs + "WIFEXITED = " + intToString(WIFEXITED(exitStatus)) + ENDLs + "WEXITSTATUS = " + intToString(WEXITSTATUS(exitStatus)) ); */ if (WIFEXITED(exitStatus)) { int exitCode = WEXITSTATUS(exitStatus); if (exitCode == 128 + SIGKILL) { appendCommaString(remarks, "timeout (exit code 137)"); printMessage(INFOm, "Solver killed. Assume ulimit time limit reached."); resourceLimitReached = true; } else if (exitCode == 128 + SIGTERM) { appendCommaString(remarks, "timeout (exit code 143)"); printMessage(INFOm, "Solver killed. Assume by SIGTERM from watchdogrun."); resourceLimitReached = true; } else if (exitCode != EXIT_SUCCESS) { // EXIT_SUCCESS == 0 usually appendCommaString(remarks, "exit code " + intToString(exitCode)); } } else if (WIFSIGNALED(exitStatus)) { printMessage(WARNINGm, "Subprocess termination on signal " + intToString(WTERMSIG(exitStatus))); } else { printMessage(WARNINGm, "Unexpected subprocess exit status " + intToString(exitStatus)); } #endif return resourceLimitReached; } //--------------------------------------------------------------------------- // getResults //--------------------------------------------------------------------------- SMTLib2Driver::Status SMTLib2Driver::getResults(string& remarks) { // Do not check output files if none were generated in first place if (! (option("prover") || option("prover-command") )) { return UNPROVEN; } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Read in output and error files from solver // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ifstream solverOut (solverOutputFileName.c_str() ); ifstream solverErr (solverErrorFileName.c_str() ); if (!solverOut) { printMessage(ERRORm, "Cannot open output file " + solverOutputFileName); appendCommaString(remarks, "Outfile not found"); return ERROR; } if (!solverErr) { printMessage(ERRORm, "Cannot open error output file " + solverErrorFileName); appendCommaString(remarks, "Errorfile not found"); return ERROR; } vector solverOutput; vector solverErrOutput; { string line; while (getline(solverOut, line)) solverOutput.push_back(line); solverOut.close(); while (getline(solverErr, line)) solverErrOutput.push_back(line); solverErr.close(); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Initialise flags for tracking run status // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // flags for processing of standard error file bool seenTimeout = false; bool seenWarning = false; bool seenUnexpectedErrOutput = false; // flags for processing of standard output file bool seenSatOutput = false; bool seenUnsatOutput = false; bool seenUnknownOutput = false; bool seenUnexpectedOutput = false; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Check over stderr output from solver // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // First inspect error output. Check for // - warning messages than can be logged // - Timeout termination messages // // First cut, just echo output to log file. Minimally process. for (int i = 0; i != (int) solverErrOutput.size(); i++) { string s = solverErrOutput.at(i); if (tokeniseString(s).size() == 0) { continue; } // Detect message produced by ulimit -t killing process // See below for alternate method of detecting this timeout. // Approach here works on Scientific Linux 5.3 but not // Ubuntu 10.4.2. if (hasPrefix(s, "sh: line 1:") && hasSubstring(s, "Killed")) { // appendCommaString(remarks, "timeout (ulim)"); // printMessage(INFOm, // "SMTLib solver reached ulimit time limit"); // seenTimeout = true; continue; } // z3 at least uses this. if (hasPrefix(s,"WARNING:")) { if (option("log-smtsolver-warnings")) { seenWarning = true; printMessage(WARNINGm, "Warning message from SMTLib solver" + ENDLs + s); } continue; } seenUnexpectedErrOutput = true; } if (seenWarning) appendCommaString(remarks, "warning(s)"); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Check over stdout output from solver // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != (int) solverOutput.size(); i++) { string s = solverOutput.at(i); vector line = tokeniseString(s); if (line.size() == 0) { continue; } if (line.size() == 1 && line.at(0) == "unsat") { seenUnsatOutput = true; } else if (line.size() == 1 && line.at(0) == "sat") { seenSatOutput = true; } else if (line.size() == 1 && line.at(0) == "unknown") { seenUnknownOutput = true; } else { seenUnexpectedOutput = true; } } if (analyseExitStatus(exitStatus, remarks)) { seenTimeout = true; } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Report on output and decide return status // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (seenUnexpectedErrOutput || seenUnexpectedOutput) { string outMessage = concatStrings(solverOutput, ENDLs); string errMessage = concatStrings(solverErrOutput, ENDLs); printMessage(ERRORm, "Error(s) on prover output." + ENDLs + "On STDOUT:" + ENDLs + outMessage + ENDLs + "On STDERR: " + ENDLs + errMessage + ENDLs + "END of output"); return ERROR; } // All output expected. if (seenTimeout) return RESOURCE_LIMIT; if (seenSatOutput || seenUnknownOutput) return UNPROVEN; if (seenUnsatOutput) return TRUE; // Both outputs empty. printMessage(ERRORm, "Prover standard output and error output both empty"); return ERROR; } //--------------------------------------------------------------------------- // getRunResults //--------------------------------------------------------------------------- vector SMTLib2Driver::getRunResults(int numQueries) { vector results; // Do not check output files if none were generated in first place if (! (option("prover") || option("prover-command") )) { results.push_back(QueryStatus(UNPROVEN,"prover not run","")); return results; } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Read in output and error files from solver // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ifstream solverOut (solverOutputFileName.c_str() ); ifstream solverErr (solverErrorFileName.c_str() ); if (!solverOut) { printMessage(ERRORm, "Cannot open output file " + solverOutputFileName); results.push_back(QueryStatus(ERROR, ".out file not found","")); return results; } if (!solverErr) { printMessage(ERRORm, "Cannot open error output file " + solverErrorFileName); results.push_back(QueryStatus(ERROR, ".err file not found","")); return results; } vector solverOutput; vector solverErrOutput; { string line; while (getline(solverOut, line)) solverOutput.push_back(line); solverOut.close(); while (getline(solverErr, line)) solverErrOutput.push_back(line); solverErr.close(); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Initialise flags for tracking run status // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // flags for processing of standard error file bool seenTimeout = false; bool seenWarning = false; bool seenUnexpectedErrOutput = false; // flags for processing of standard output file bool seenUnexpectedOutput = false; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Check over stderr output from solver // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // First inspect error output. Check for // - warning messages than can be logged // - Timeout termination messages // // First cut, just echo output to log file. Minimally process. for (int i = 0; i != (int) solverErrOutput.size(); i++) { string s = solverErrOutput.at(i); if (tokeniseString(s).size() == 0) { continue; } // Detect message produced by ulimit -t killing process // See below for alternate method of detecting this timeout. // Approach here works on Scientific Linux 5.3 but not // Ubuntu 10.4.2. if (hasPrefix(s, "sh: line 1:") && hasSubstring(s, "Killed")) { // appendCommaString(remarks, "timeout (ulim)"); // printMessage(INFOm, // "SMTLib solver reached ulimit time limit"); // seenTimeout = true; continue; } // z3 at least uses this. if (hasPrefix(s,"WARNING:")) { if (option("log-smtsolver-warnings")) { seenWarning = true; printMessage(WARNINGm, "Warning message from SMTLib solver" + ENDLs + s); } continue; } seenUnexpectedErrOutput = true; } // If there are multiple goals, we really don't know which one the // warning messages are associated with. If we were to do // anything here, we would be conservative and record warning // message with all the goals. // if (seenWarning) appendCommaString(remarks, "warning(s)"); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Check over stdout output from solver // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != (int) solverOutput.size(); i++) { string s = solverOutput.at(i); vector line = tokeniseString(s); if (line.size() == 0) { continue; } if (line.size() == 1 && line.at(0) == "unsat") { results.push_back(QueryStatus(TRUE,"","")); } else if (line.size() == 1 && line.at(0) == "sat") { results.push_back(QueryStatus(UNPROVEN,"","")); } else if (line.size() == 1 && line.at(0) == "unknown") { results.push_back(QueryStatus(UNPROVEN,"","")); } else { seenUnexpectedOutput = true; } } // Make the assumption that the solver outputs "unknown" on stdout if // it is killed because of resource limit. This seems to be true of z3 // If not true, then an extra QueryStatus should be added to results. // NOT TRUE string exitStatusRemarks; if (analyseExitStatus(exitStatus, exitStatusRemarks)) { seenTimeout = true; } /* if (seenTimeout) { if (results.back().status == TRUE) { printMessage(ERRORm, "Resource limit detected on true goal. Something has gone wrong"); } results.back().status = RESOURCE_LIMIT; results.back().remarks = exitStatusRemarks; } */ if (seenTimeout) { results.push_back(QueryStatus(RESOURCE_LIMIT,exitStatusRemarks,"")); } // Ensure at least 1 result reported if (results.size() == 0) { results.push_back(QueryStatus(ERROR,"","")); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Report on output and decide return status // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (seenUnexpectedErrOutput || seenUnexpectedOutput) { string outMessage = concatStrings(solverOutput, ENDLs); string errMessage = concatStrings(solverErrOutput, ENDLs); printMessage(ERRORm, "Error(s) on prover output." + ENDLs + "On STDOUT:" + ENDLs + outMessage + ENDLs + "On STDERR: " + ENDLs + errMessage + ENDLs + "END of output"); // It's hard to know which of results an error is associated with. // Just pick to report it with the first. results.front().status = ERROR; } if ((int) results.size() > numQueries) { printMessage(ERRORm, "getRunResults is reporting more results than queries"); } return results; } //--------------------------------------------------------------------------- // finaliseQuerySet //--------------------------------------------------------------------------- void SMTLib2Driver::finaliseQuerySet() { if (option("delete-working-files")) { tryRemoveFile(solverInputFileName); tryRemoveFile(solverOutputFileName); tryRemoveFile(solverErrorFileName); } return; } //--------------------------------------------------------------------------- // finaliseGoal //--------------------------------------------------------------------------- void SMTLib2Driver::finaliseGoal() { finaliseQuerySet(); } //========================================================================= // END OF FILE //========================================================================= spark-2012.0.deb/victor/vct/src/lexer.ll0000644000175000017500000001502011753202341016774 0ustar eugeneugen /* ============================================================================= ============================================================================= LEXER.LL ============================================================================= ============================================================================= This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ /* Lexer for language used in fdl, rls and vcg files. */ /* Structure influenced by calc++ example distributed with Bison. */ %{ #include #include #include "parser.tab.hh" /* For token type */ #include "lexer.hh" /* For YY_DECL macro giving lexing function type */ // #include "pdriver.hh" #define yyterminate() return tok::FILE_END typedef yy::parser::token tok; %} %option noyywrap nounput batch debug DIGIT [0-9] ID [A-Za-z][A-Za-z0-9_]*[~]? PROLOGBLOCKCOMMENT "/*"([^*]+|[*]+[^*/])*"*"+"/" PROLOGLINECOMMENT "%".*$ FDLCOMMENT "{"[^}]*"}" WHITESPACE [ \t\n\r]+ VCGHEADER (.*\n){13} %x VCGInitial PrologBody FDLBody GoalOrigins %% %{ // Code for start of yylex to inject alternate start tokens into token // stream for different file types if (driver.at_start) { driver.at_start = false; if (driver.currentFileType == pdriver::FDL) return tok::START_FDL_FILE; if (driver.currentFileType == pdriver::RULE) return tok::START_RULE_FILE; else // driver.currentFileType == pdriver::VCG return tok::START_VCG_FILE; } %} <*>{WHITESPACE} /* eat up whitespace */ . /* ignore non-trigger characters in VCG file header */ ^"For" { BEGIN(GoalOrigins); return tok::FOR; } [^:]+ { BEGIN(PrologBody); yylval->sval = new std::string(yytext); return tok::GOAL_ORIGINS; } ^rule_family/" " { return tok::RULE_FAMILY; } { ":" { return tok::COLON; } "[" { return tok::LSB; } "]" { return tok::RSB; } "(" { return tok::LPAREN; } ")" { return tok::RPAREN; } "," { return tok::COMMA; } "&" { return tok::AMPERSAND; } ";" { return tok::SEMIC; } "=" { return tok::EQ; } "." { return tok::DOT; } ".." { return tok::DOTDOT; } requires { return tok::REQUIRES; } may_be_replaced_by { return tok::MAY_BE_REPLACED_BY; } may_be_deduced { return tok::MAY_BE_DEDUCED; } may_be_deduced_from { return tok::MAY_BE_DEDUCED_FROM; } are_interchangeable { return tok::ARE_INTERCHANGEABLE; } if {return tok::IF; } end {return tok::END; } task_type { // task_type can also be a valid identifier, so we need to // have a useful sval. yylval->sval = new std::string(yytext); return tok::TASK_TYPE; } title { // Like task_type, both a keyword and an identifier yylval->sval = new std::string(yytext); return tok::TITLE; } function { return tok::FUNCTION; } procedure { return tok::PROCEDURE; } type { return tok::TYPE; } var { return tok::VAR; } const { return tok::CONST; } array { return tok::ARRAY; } record { return tok::RECORD; } ":=" { return tok::ASSIGN; } of { return tok::OF; } "**" { return tok::STARSTAR; } "*" { return tok::STAR; } "/" { return tok::SLASH; } div { return tok::DIV; } mod { return tok::MOD; } "+" { return tok::PLUS; } "-" { return tok::MINUS; } "<>" { return tok::NE; } "<" { return tok::LT; } ">" { return tok::GT; } "<=" { return tok::LE; } ">=" { return tok::GE; } "<->" { return tok::IFF; } "->" { return tok::IMPLIES; } not { return tok::NOT; } and { return tok::AND; } or { return tok::OR; } for_some { return tok::FOR_SOME; } for_all { return tok::FOR_ALL; } "***" { return tok::TRIPLESTAR; } "!!!" { return tok::TRIPLEBANG; } "task_type_"{ID} | "function_"{ID} | "procedure_"{ID} { yylval->sval = new std::string(yytext); return tok::SUBPROG_ID; } "C"{DIGIT}+ { yylval->sval = new std::string(yytext); return tok::CONCL_ID; } "H"{DIGIT}+ { yylval->sval = new std::string(yytext); return tok::HYP_ID; } {ID} { yylval->sval = new std::string(yytext); return tok::ID; } {DIGIT}+ { yylval->sval = new std::string(yytext); return tok::NATNUM; } } { {PROLOGBLOCKCOMMENT} {} /* Skip comment */ {PROLOGLINECOMMENT} {} /* Skip comment */ } {FDLCOMMENT} {} /* Skip comment */ %% void pdriver::scan_begin () { yy_flex_debug = trace_scanning; if (!(yyin = fopen (file.c_str (), "r"))) error (std::string ("cannot open ") + file); // flex doesn't reset start condition on 2nd and subsequent scans, // so here we reset it explicitly. // Set start condition appropriate for file. // Doing this also addresses issue that flex doesn't reset start // condition on 2nd and subsequent scans, if (currentFileType == FDL) { BEGIN(FDLBody); } else if (currentFileType == RULE) { BEGIN(PrologBody); } else { // currentFileType == VCG BEGIN(VCGInitial); } // Set flag for triggering return of file-type-specific start token at_start = true; } void pdriver::scan_end () { fclose (yyin); } spark-2012.0.deb/victor/vct/src/isab-driver.cc0000644000175000017500000006064711753202341020061 0ustar eugeneugen//========================================================================== //========================================================================== // ISAB-DRIVER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ // TO FIX: // * standardising apart of function, constant, type and record fieldname ids // each other and Isabelle's reserved words. //========================================================================== #include using std::endl; using std::cerr; #include // for exit() and system() #include #include using std::vector; #include "isab-driver.hh" #include "node-utils.hh" #include "formatter.hh" #include "pprinter.hh" using namespace z; //========================================================================== // Pretty print Isabelle abstract syntax //========================================================================== class IsabFormatter : public Formatter { public: virtual Box& addSyntax(z::Kind k, const string& id, vector& bs); static Formatter* getFormatter(); private: static Formatter* instance; IsabFormatter(); static set reservedWords; string standardiseId(const string& s); }; Box& IsabFormatter::addSyntax(z::Kind k, const std::string& id, vector& bs) { switch (k) { // Top level //---------- case(THEORY): { Box* decls = bs.at(0); Box* hyps = bs.at(1); Box* concls = bs.at(2); return box("theory " + id) / box("imports Real") / box("") / box("begin") / box("") / *decls / box("") / box("lemma L:") / box("") / *hyps / box("") / *concls / box("") / box(option("proof-command") ? optionVal("proof-command") : "oops") / box("") / box("end"); } case(DECLS): return PP::simpleVSeq(bs); case(HYPS): return PP::simpleVSeq(bs); case(ASSUMPTION): return PP::makeHVSeq("assumes " + id + ": ", "\"", "", "\"", bs); case(CONCLS): return PP::makeHVSeq("", "shows", " and", "", bs); case(CONCL): return PP::makeHVSeq("", "\"", "", "\" (is \"?" + id + "\")", bs); // Declarations //------------- case(DEF_TYPE): return PP::makeHVSeq("types " + id + " = ", "\"", "", "\"", bs); case(DECL_TYPE): return box("typedecl " + id); case(DEF_RECORD): return PP::makeHVSeq("record " + id + " = ", "", "", "", bs); case(OUTER_DECL): return PP::makeHVSeq(id + " :: ", "\"", "", "\"", bs); case(DECL_CONST): return PP::makeHVSeq("consts " + id + " :: ", "\"", "", "\"", bs); // Types // ----- case(INT_TY): return box("int"); case(REAL_TY): return box("real"); case(BOOL_TY): return box("bool"); case(FUN_TY): return PP::makeHVSeq("", "", "=>", "", bs); case(FUN_ARG_TY): return PP::makeHVSeq("", "[", ",", "]", bs); case(TUPLE_TY): return PP::makeHVSeq("", "", "*", "", bs); case(TYPE_ID): return box(id); // Logic // ----- case(IMPLIES): return PP::makeHVSeq("", "", "-->", "", bs); case(IFF): return PP::makeHVSeq("", "", "<->", "", bs); case(OR): return PP::makeHVSeq("", "", "|", "", bs); case(AND): return PP::makeHVSeq("", "", "&", "", bs); case(NOT): return PP::makeHVSeq("", "~", "", "", bs); case(TRUE): return box("True"); case(FALSE): return box("False"); case(FORALL): return PP::makeHVSeq("", "ALL", ". ", "", bs); case(EXISTS): return PP::makeHVSeq("", "EX", ". ", "", bs); case(SEQ): return PP::makeHVSeq("", "", "", "", bs); case(DECL): return PP::makeHVSeq(id + " ::" , "", "", "", bs); // Polymorphic terms // ----------------- case(VAR): return box(id); case(CONST): return box(id); case(FUN_AP): return PP::makeHVSeq(id, "", "", "", bs); case(APPLY): return PP::makeHVSeq("", "", "", "", bs); case(EQ): return PP::makeHVSeq("", "", "=", "", bs); // pairs and tuples // ---------------- case(TUPLE): return PP::makeHVSeq("", "(", ",", ")", bs); // integers // -------- case(I_LE): return PP::makeHVSeq("", "", "<=", "", bs); case(I_LT): return PP::makeHVSeq("", "", "<", "", bs); case(I_PLUS): return PP::makeHVSeq("", "", "+", "", bs); case(I_MINUS): return PP::makeHVSeq("", "", "-", "", bs); case(I_TIMES): return PP::makeHVSeq("", "", "*", "", bs); case(IDIVM): return PP::makeHVSeq("", "", "div", "", bs); case(MOD): return PP::makeHVSeq("", "", "mod", "", bs); case(I_UMINUS): return PP::makeHVSeq("", "-", "", "", bs); case(I_EXP_N): return PP::makeHVSeq("", "", "^", "", bs); case(I_TO_NAT): return PP::makeHVSeq("", "nat", "", "", bs); case(NATNUM): return box("(" + id + " :: int)"); // reals // ----- case(TO_REAL): return PP::makeHVSeq("(of_int(", "", "", ") :: real)", bs); case(R_LE): return PP::makeHVSeq("", "", "<=", "", bs); case(R_LT): return PP::makeHVSeq("", "", "<", "", bs); case(R_PLUS): return PP::makeHVSeq("", "", "+", "", bs); case(R_MINUS): return PP::makeHVSeq("", "", "-", "", bs); case(R_TIMES): return PP::makeHVSeq("", "", "*", "", bs); case(RDIV): return PP::makeHVSeq("", "", "/", "", bs); case(R_UMINUS): return PP::makeHVSeq("", "-", "", "", bs); case(R_EXP_N): return PP::makeHVSeq("", "", "^", "", bs); // arrays // ------- // UPDATE formatting: // If doesn't fit on one line, go for // [b0] // ( [b1] // := [b2] // ) case(UPDATE): { std::vector newBs; newBs.push_back( bs[0] ); newBs.push_back( &(box("( ") + *(bs[1]))); newBs.push_back( &(box(" := ") + *(bs[2]))); newBs.push_back( & box(")") ); return PP::simpleHVSeq(newBs); } // records // ------- case(RCD_ELEMENT): return PP::makeHVSeq(id, "", "", "", bs); // RCD_UPDATE // If doesn't fit on one line, go for // [b0] // (| // := [b2] // |) case(RCD_UPDATE): { std::vector newBs; newBs.push_back( bs[0] ); newBs.push_back( &(box("(| " + id) ) ); newBs.push_back( &(box(" := ") + *(bs[1]))); newBs.push_back( & box("|)") ); return PP::simpleHVSeq(newBs); } // Otherwise // --------- default: printMessage(ERRORm, "IsabFormatter::addSyntax: " + ENDLs + "Encountered unsupported kind " + kindString(k)); return PP::makeHVSeq("***" + kindString(k) + "{" + id + "}***", "[", ";", "]", bs ); } } IsabFormatter::IsabFormatter() { addOpInfo(FUN_TY, 0, "LE"); // Varying arity expected addOpInfo(TUPLE_TY, 0, "LE"); // Varying arity expected addOpInfo(IMPLIES, 25, "LE"); addOpInfo(IFF, 25, "LE"); addOpInfo(OR, 30, "LE"); // Varying arity expected addOpInfo(AND, 35, "LE"); // Varying arity expected addOpInfo(NOT, 40, "E"); addOpInfo(EXISTS, 10, "*E"); addOpInfo(FORALL, 10, "*E"); addOpInfo(SEQ, 0, "L"); // Varying arity expected addOpInfo(DECL, 0, "E"); addOpInfo(FUN_AP, 1000, "L"); // Varying arity expected addOpInfo(APPLY, 1000, "L"); // Varying arity expected addOpInfo(EQ, 50, "LL"); // integers addOpInfo(I_LE, 50, "LL"); addOpInfo(I_LT, 50, "LL"); addOpInfo(I_PLUS, 65, "EL"); addOpInfo(I_MINUS, 65, "EL"); addOpInfo(I_TIMES, 70, "EL"); addOpInfo(IDIVM, 70, "EL"); addOpInfo(MOD, 70, "EL"); addOpInfo(I_UMINUS, 80, "L"); addOpInfo(I_EXP_N, 80, "LE"); addOpInfo(I_TO_NAT, 1000, "L"); // reals addOpInfo(R_LE, 50, "LL"); addOpInfo(R_LT, 50, "LL"); addOpInfo(R_PLUS, 65, "EL"); addOpInfo(R_MINUS, 65, "EL"); addOpInfo(R_TIMES, 70, "EL"); addOpInfo(RDIV, 70, "EL"); addOpInfo(MOD, 70, "EL"); addOpInfo(R_UMINUS, 80, "L"); addOpInfo(R_EXP_N, 80, "LE"); // Arrays addOpInfo(UPDATE, 1000, "L**"); // Records addOpInfo(RCD_ELEMENT, 1000, "L"); addOpInfo(RCD_UPDATE, 900, "E**"); return; }; Formatter* IsabFormatter::getFormatter() { if (instance == 0) instance = new IsabFormatter; return instance; } // Storage declarations for static class members. Formatter* IsabFormatter::instance = 0; /* ============================================================================ FDL to Isabelle/HOL abstract syntax translation ============================================================================ */ class IsabTranslator : public Translator { public: IsabTranslator() : Translator("Isabelle/HOL") {}; protected: virtual Node* translateAux (Node* oldN); private: string fixFunId(const string& s); string fixConstId(const string& s); string fixRcdFldId(const string& id, const string& typeId); string fixTypeId(const string& id); }; string IsabTranslator::fixFunId(const string& s) { return s + "'"; } string IsabTranslator::fixConstId(const string& s) { return s + "''"; } string IsabTranslator::fixRcdFldId(const string& id, const string& typeId) { return id + "'" + typeId ; } string IsabTranslator::fixTypeId(const string& id) { return id + "'"; } Node* IsabTranslator::translateAux (Node* oldN) { /* ---------------------------------------------------------------------------- Translate subtrees of current node. ---------------------------------------------------------------------------- */ for (int i = 0; i != oldN->arity(); i++) { oldN->child(i) = translateAux(oldN->child(i)); } /* ---------------------------------------------------------------------------- Translate current node ---------------------------------------------------------------------------- */ switch (oldN->kind) { /* ---------------------------------------------------------------------------- Top-level structure of unit ---------------------------------------------------------------------------- */ case UNIT: case DECLS: case RULES: case GOALS: case GOAL: case HYPS: case CONCLS: return oldN; /* ---------------------------------------------------------------------------- Declarations and definitions ---------------------------------------------------------------------------- */ case DEF_TYPE: { Node* newN; if (oldN->arity() == 0) { newN = oldN->updateKind(DECL_TYPE); } else if (oldN->child(0)->kind == RECORD_TY) { newN = oldN->child(0); newN->kind = DEF_RECORD; for (int i = 0; i != newN->arity(); i++) { Node* decl = newN->child(i); decl->kind = OUTER_DECL; decl->id = fixRcdFldId(decl->id,oldN->id); } } else { newN = oldN; } return newN->updateId(fixTypeId(oldN->id)); } case DEF_CONST: { oldN->id = fixConstId(oldN->id); if (oldN->arity() == 1) return oldN->updateKind(DECL_CONST); else return oldN; } case DECL_FUN: { Node* arg_tys = oldN->child(0); if (arg_tys->arity() == 1) { arg_tys = arg_tys->child(0); } else { arg_tys->kind = FUN_ARG_TY; } Node* range_ty = oldN->child(1); oldN->kind = DECL_CONST; oldN->id = fixFunId(oldN->id); oldN->popChild(); oldN->child(0) = new Node(FUN_TY, arg_tys, range_ty); return oldN; } /* ---------------------------------------------------------------------------- Types ---------------------------------------------------------------------------- */ case ARRAY_TY: { oldN->kind = FUN_TY; oldN->id = ""; if (oldN->child(0)->arity() == 1) { oldN->child(0) = oldN->child(0)->child(0); } else { oldN->child(0)->kind = TUPLE_TY; } return oldN; } case INT_TY: case REAL_TY: case BOOL_TY: return oldN; case TYPE_ID: return oldN->updateId(fixTypeId(oldN->id)); case TYPE_PARAM: // Used for type args of record operators return oldN; // Don't fix. // Node kinds eliminated later by translation of parent node case RECORD_TY: return oldN; // Not expecting ENUM_TY, SUBRANGE_TY, BIT_TY /* ---------------------------------------------------------------------------- Formulas ---------------------------------------------------------------------------- */ case IMPLIES: case IFF: case OR: case AND: case NOT: case TRUE: case FALSE: return oldN; case FORALL: case EXISTS: { Node* decls = oldN->child(0); for (int i = 0; i != decls->arity(); i++) { Node* decl = decls->child(i); decl->id = fixConstId(decl->id); } } case SEQ: case DECL: return oldN; case CONST: case VAR: return oldN->updateId(fixConstId(oldN->id)); case FUN_AP: return oldN->updateId(fixFunId(oldN->id)); case EQ: return oldN; /* ---------------------------------------------------------------------------- Integers ---------------------------------------------------------------------------- */ case I_LT: case I_LE: case I_PLUS: case I_MINUS: case I_TIMES: return oldN; case IDIV: return oldN->updateKind(IDIVM); // FIXME: Unsound!! case MOD: case I_UMINUS: return oldN; case I_EXP: { oldN->child(1) = new Node(I_TO_NAT, oldN->child(1)); return oldN->updateKind(I_EXP_N); } case NATNUM: return oldN; /* ---------------------------------------------------------------------------- reals ---------------------------------------------------------------------------- */ case TO_REAL: case R_LE: case R_LT: case R_PLUS: case R_MINUS: case R_TIMES: case RDIV: case R_UMINUS: return oldN; case R_EXP: { oldN->child(1) = new Node(I_TO_NAT, oldN->child(1)); return oldN->updateKind(R_EXP_N); } /* ---------------------------------------------------------------------------- Arrays ---------------------------------------------------------------------------- */ case ARR_ELEMENT: { if (oldN->child(1)->arity() > 1) { oldN->child(1)->kind = TUPLE; } else { oldN->child(1) = oldN->child(1)->child(0); } return oldN->updateKindAndId(APPLY, ""); } case ARR_UPDATE: { if (oldN->child(1)->arity() > 1) { oldN->child(1)->kind = TUPLE; } else { oldN->child(1) = oldN->child(1)->child(0); } return oldN->updateKindAndId(UPDATE, ""); } /* ---------------------------------------------------------------------------- Records ---------------------------------------------------------------------------- */ // Remove type argument. Fix fieldname. case RCD_ELEMENT: { string typeId = oldN->child(1)->id; oldN->id = fixRcdFldId(oldN->id, typeId); oldN->popChild(); return oldN; } case RCD_UPDATE: { string typeId = oldN->child(2)->id; oldN->id = fixRcdFldId(oldN->id, typeId); oldN->popChild(); return oldN; } /* ---------------------------------------------------------------------------- Otherwise ---------------------------------------------------------------------------- */ default: { error ("unrecognised kind " + kindString(oldN->kind) ); return oldN; } } // end switch(oldN->kind) } //======================================================================== // Virtual functions for IsabDriver class //======================================================================== Node* IsabDriver::translateUnit(Node* unit) { IsabTranslator t; return t.translate(unit); } // Create a valid Isabelle/HOL identifier from argument. // Replaces all unacceptable characters with 's (forward single quotes) // Not concerned here with ensuring function is injective. bool isIsabIdChar(char c) { return ('A' <= c && c <= 'Z') || ('a' <= c && c <= 'z') || ('0' <= c && c <= '9') || c == '\'' || c == '_'; } string mkIsabId(const string& s) { string result; for (int i = 0; i != (int) s.size(); i++) { char c = s.at(i); if (isIsabIdChar(c)) result.append(1,c); else result.append(1,'\''); } return result; } void IsabDriver::initGoal(const string& unitName, int goalNum, int conclNum) { string fullGoalFileRoot = getFullGoalFileRoot("vc", unitName, goalNum, conclNum); proverInputFileName = fullGoalFileRoot + ".thy"; proverOutputFileName = fullGoalFileRoot + ".out"; proverErrorFileName = fullGoalFileRoot + ".err"; string theoryname = mkIsabId(unitName) + "'" + intToString(goalNum); if (conclNum != 0) theoryname += "'" + intToString(conclNum); theory = new Node(THEORY, theoryname, new Node (DECLS), new Node (HYPS), new Node (CONCLS) ); return; } void IsabDriver::addDecl(Node* decl) { theory->child(0)->addChild(decl); return; } void IsabDriver::addRule(Node* hyp, const string& hId, string& remarks) { addHyp(hyp,hId,remarks); return; } void IsabDriver::addHyp(Node* hyp, const string& hId, string& remarks) { theory->child(1)->addChild(new Node(ASSUMPTION,hId,hyp)); return; } void IsabDriver::addConcl(Node* concl, string& remarks) { Node* concls = theory->child(2); if (concl->kind == AND) { for (int i = 0; i != concl->arity(); i++) { concls->addChild(new Node (CONCL, "C" + intToString(i + 1), concl->child(i) )); } } else { concls->addChild(new Node (CONCL, "C1", concl )); } return; } void IsabDriver::finishSetup() { // Handle case when skip-concls option asserted if (theory->child(2)->arity() == 0) { theory->child(2)->addChild(new Node (CONCL, "C1", new Node(FALSE))); } ofstream proverInput; proverInput.open(proverInputFileName.c_str()); if (proverInput.fail()) { cerr << endl << "Error on trying to open file " << proverInputFileName << endl; exit(1); } Formatter::setFormatter(IsabFormatter::getFormatter()); proverInput << *theory << endl; proverInput.close(); } //--------------------------------------------------------------------------- // checkGoal //--------------------------------------------------------------------------- bool IsabDriver::checkGoal(string& remarks) { string cmd; if (option("prover-command")) { cmd = optionVal("prover-command") + " "; } else { return false; } cmd = withTimeoutAndIO(cmd, proverInputFileName, proverOutputFileName, proverErrorFileName); printMessage(INFOm, "Running command" + ENDLs + cmd + ENDLs); int exitStatus = std::system(cmd.c_str()); printMessage(INFOm, "Exit status is " + intToString(exitStatus)); return false; } IsabDriver::Status IsabDriver::getResults(string& remarks) { // Do not check output files if none were generated in first place if (! (option("prover") || option("prover-command") )) { return UNPROVEN; } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Read in output and error files from solver // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ifstream proverOut (proverOutputFileName.c_str() ); ifstream proverErr (proverErrorFileName.c_str() ); if (!proverOut) { printMessage(ERRORm, "Cannot open output file " + proverOutputFileName); appendCommaString(remarks, "Outfile not found"); return ERROR; } if (!proverErr) { printMessage(ERRORm, "Cannot open error output file " + proverErrorFileName); appendCommaString(remarks, "Errorfile not found"); return ERROR; } vector proverOutput; vector proverErrOutput; { string line; while (getline(proverOut, line)) proverOutput.push_back(line); proverOut.close(); while (getline(proverErr, line)) proverErrOutput.push_back(line); proverErr.close(); } // flags for processing of standard error file bool seenStdErrNonEmpty = proverErrOutput.size() > 0; // flags for processing of standard output file bool seenWarningOnStdOut = false; vector stdOutWarnings; for (int i = 0; i != (int) proverOutput.size(); i++) { string s = proverOutput.at(i); if (hasPrefix(s, "***")) { seenWarningOnStdOut = true; stdOutWarnings.push_back(intToString(i) + ": " + s); } } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Report on output and decide return status // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (seenWarningOnStdOut || seenStdErrNonEmpty) { string outMessage = concatStrings(stdOutWarnings, ENDLs); string errMessage = concatStrings(proverErrOutput, ENDLs); printMessage(ERRORm, "Error(s) on prover output." + ENDLs + "On STDOUT:" + ENDLs + outMessage + ENDLs + "On STDERR: " + ENDLs + errMessage + ENDLs + "END of output"); return ERROR; } // All output expected. return UNPROVEN; } //========================================================================= // END OF FILE //========================================================================= spark-2012.0.deb/victor/vct/src/elim-bit-type.cc0000644000175000017500000005275311753202341020332 0ustar eugeneugen//======================================================================== //======================================================================== // ELIM-BIT-TYPE.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== //======================================================================== // Eliminating/abstracting bit type, ops, consts and rels. //======================================================================== //======================================================================== // Translations in groups. Ordering of groups should be followed, since // earlier groups use primitives that are removed by later groups. // Group 1 // * Bit ops: // // Add decl + axiom for each. Map away. // Add decl + axiom. Map away // // * TERM_EQ: Add decl + axiom. Map away TERM_EQ first to id. Must // do for every declared and primitive type. // But TERM_EQ on each type is abstracted by refinement if eq // non-triv. Could consider adding axiom at end of refinement, only if // it is not abstracted. // // Simpler for now to add all definitions. Later a much more effective // optimisation is to only add decl + axiom for those instances which occur. // * TERM_I_LE: Add decl + axiom. Map away TERM_I_LE first to id. // Group 2 // // *. TO_PROP and TO_BIT elimination. // // Group 3. // // * Eliminating BIT_TY primitive by using interpreted def. // // Replace all occurrences with TYPE_ID{bit___type} // Add type def: // If has std 2 element interp, then SUBRANGE{0,1} // If has int interp, then INT_TY // // *. Abstract Bit type. Switch to uninterpreted type. // // Bit consts: Go with type. // // If intro type def as subrange 0,1 or int // then add decl for each, and axiom for = 0 and =1 // // If abstract Bit type. Then just 2 axioms // All x:bit. x = false or x = true // and false != true. // // // // Could tidy up much of this code with some more elegant support. // e.g. one fun abstract_def def_lhs def_rhs. Do if/when port to OCAML. // //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Abstract Bit operations //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // operations are TERM_AND, TERM_OR, TERM_NOT, TERM_IFF // assumes bit type still declared. //--------------------------------------------------------------------------- // abstractBitOp //--------------------------------------------------------------------------- Node* abstractBitOp(Node* n) { if (n->kind == TERM_AND) { return n->updateKindAndId(FUN_AP, "bit___and"); } else if (n->kind == TERM_OR) { return n->updateKindAndId(FUN_AP, "bit___or"); } else if (n->kind == TERM_IFF) { return n->updateKindAndId(FUN_AP, "bit___iff"); } else if (n->kind == TERM_NOT) { return n->updateKindAndId(FUN_AP, "bit___not"); } else { return n; } assert(false); } //--------------------------------------------------------------------------- // abstractBitOps //--------------------------------------------------------------------------- void abstractBitOps(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); Node* Bit = Node::bit_ty; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declarations for new operations // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* andDecl = new Node(DECL_FUN, "bit___and", new Node(SEQ, Bit, Bit), Bit); Node* orDecl = new Node(DECL_FUN, "bit___or", new Node(SEQ, Bit, Bit), Bit); Node* iffDecl = new Node(DECL_FUN, "bit___iff", new Node(SEQ, Bit, Bit), Bit); Node* notDecl = new Node(DECL_FUN, "bit___not", new Node(SEQ, Bit), Bit); ctxt->insert(andDecl); ctxt->insert(orDecl); ctxt->insert(iffDecl); ctxt->insert(notDecl); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axioms for abstract operations // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Use primitive operations for convenience. Node* andAxiom = new Node(FORALL,"", new Node(SEQ, new Node(DECL,"x",Bit), new Node(DECL,"y",Bit)), new Node(IFF, new Node(TO_PROP, new Node(TERM_AND, new Node(VAR,"x"), new Node(VAR,"y") ) ), new Node(AND, new Node(TO_PROP, new Node(VAR,"x")), new Node(TO_PROP, new Node(VAR,"y")) ) ), new Node(PAT, new Node(TERM_AND, new Node(VAR,"x"), new Node(VAR,"y") ) ) ); Node* orAxiom = new Node(FORALL,"", new Node(SEQ, new Node(DECL,"x",Bit), new Node(DECL,"y",Bit)), new Node(IFF, new Node(TO_PROP, new Node(TERM_OR, new Node(VAR,"x"), new Node(VAR,"y") ) ), new Node(OR, new Node(TO_PROP, new Node(VAR,"x")), new Node(TO_PROP, new Node(VAR,"y")) ) ), new Node(PAT, new Node(TERM_OR, new Node(VAR,"x"), new Node(VAR,"y") ) ) ); Node* iffAxiom = new Node(FORALL,"", new Node(SEQ, new Node(DECL,"x",Bit), new Node(DECL,"y",Bit)), new Node(IFF, new Node(TO_PROP, new Node(TERM_IFF, new Node(VAR,"x"), new Node(VAR,"y") ) ), new Node(IFF, new Node(TO_PROP, new Node(VAR,"x")), new Node(TO_PROP, new Node(VAR,"y")) ) ), new Node(PAT, new Node(TERM_IFF, new Node(VAR,"x"), new Node(VAR,"y") ) ) ); Node* notAxiom = new Node(FORALL,"", new Node(SEQ, new Node(DECL,"x",Bit)), new Node(IFF, new Node(TO_PROP, new Node(TERM_NOT, new Node(VAR,"x") ) ), new Node(NOT, new Node(TO_PROP, new Node(VAR,"x")) ) ), new Node(PAT, new Node(TERM_NOT, new Node(VAR,"x") ) ) ); rules->addChild(andAxiom); rules->addChild(orAxiom); rules->addChild(iffAxiom); rules->addChild(notAxiom); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract away operation occurrences // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - unit->mapOver1(abstractBitOp); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // AbstractBit-valued equality //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ //--------------------------------------------------------------------------- // abstractBitValuedEq //--------------------------------------------------------------------------- // TERM_EQ x y T --> FUN_AP{___bit_eq} x y // where = typeToName(T); Node* abstractBitValuedEq(Node* n) { if (n->kind == TERM_EQ && n->arity() == 3) { string typeName = typeToName(n->child(2)); n->popChild(); return n->updateKindAndId(FUN_AP, typeName + "___bit_eq"); } else { return n; } } //--------------------------------------------------------------------------- // mkBitValuedEqDecl //--------------------------------------------------------------------------- // DECL_FUN{___bit_eq} (SEQ T T) BIT_TY Node* mkBitValuedEqDecl(const string& typeName) { return new Node(DECL_FUN, typeName + "___bit_eq", new Node(SEQ, nameToType(typeName), nameToType(typeName)), new Node(BIT_TY) ); } //--------------------------------------------------------------------------- // mkBitValuedEqAxiom //--------------------------------------------------------------------------- Node* mkBitValuedEqAxiom(const string& typeName) { string opName = typeName + "___bit_eq"; return new Node(FORALL,"", new Node(SEQ, new Node(DECL,"x",nameToType(typeName)), new Node(DECL,"y",nameToType(typeName))), new Node(IFF, new Node(TO_PROP, new Node(FUN_AP, opName, new Node(VAR,"x"), new Node(VAR,"y") ) ), new Node(EQ,"", new Node(VAR,"x"), new Node(VAR,"y"), nameToType(typeName) ) ), new Node(PAT, new Node(FUN_AP, opName, new Node(VAR,"x"), new Node(VAR,"y") ) ) ); } //--------------------------------------------------------------------------- // abstractBitValuedEqs //--------------------------------------------------------------------------- void abstractBitValuedEqs(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declarations for primitive types // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ctxt->insert(mkBitValuedEqDecl("integer")); ctxt->insert(mkBitValuedEqDecl("real")); ctxt->insert(mkBitValuedEqDecl("bit___type")); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axioms for primitive types // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - rules->addChild(mkBitValuedEqAxiom("integer")); rules->addChild(mkBitValuedEqAxiom("real")); rules->addChild(mkBitValuedEqAxiom("bit___type")); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Loop over declared types // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} string typeId = typeDecl->id; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration for declared type // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ctxt->insert(mkBitValuedEqDecl(typeId)); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axiom for declared type // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - rules->addChild(mkBitValuedEqAxiom(typeId)); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract away operation occurrences // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - unit->mapOver1(abstractBitValuedEq); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Abstract Bit-valued integer LE //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ //--------------------------------------------------------------------------- // abstractBitValuedIntLE //--------------------------------------------------------------------------- Node* abstractBitValuedIntLE(Node* n) { if (n->kind == TERM_I_LE) { return n->updateKindAndId(FUN_AP, "integer___bit_le"); } else { return n; } } //--------------------------------------------------------------------------- // abstractBitValuedIntLEs //--------------------------------------------------------------------------- void abstractBitValuedIntLEs(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* decl = new Node(DECL_FUN, "integer___bit_le", new Node(SEQ, Node::int_ty, Node::int_ty), Node::bit_ty ); ctxt->insert(decl); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* axiom = new Node(FORALL,"", new Node(SEQ, new Node(DECL,"x",Node::int_ty), new Node(DECL,"y",Node::int_ty)), new Node(IFF, new Node(TO_PROP, new Node(TERM_I_LE, new Node(VAR,"x"), new Node(VAR,"y") ) ), new Node(I_LE, new Node(VAR,"x"), new Node(VAR,"y") ) ), new Node(PAT, new Node(TERM_I_LE, new Node(VAR,"x"), new Node(VAR,"y") ) ) ); rules->addChild(axiom); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract away occurrences // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - unit->mapOver1(abstractBitValuedIntLE); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Expand coercions between prop(bool formulas) and bit. //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // TO_PROP TERM_TRUE --> TRUE // TO_PROP TERM_FALSE --> FALSE // TO_PROP b --> EQ b TERM_TRUE BIT_TY // // TO_BIT TRUE --> TERM_TRUE // TO_BIT FALSE --> TERM_FALSE // TO_BIT p --> ITE p TERM_TRUE TERM_FALSE BIT_TY Node* elimBitPropCoercion(Node* n) { if (n->kind == TO_PROP) { if (n->child(0)->kind == TERM_TRUE) { n->kind = TRUE; n->popChild(); } else if (n->child(0)->kind == TERM_FALSE) { n->kind = FALSE; n->popChild(); } else { n->kind = EQ; n->addChild(new Node(TERM_TRUE)); n->addChild(new Node(BIT_TY)); } } else if (n->kind == TO_BIT) { if (n->child(0)->kind == TRUE) { n->kind = TERM_TRUE; n->popChild(); } else if (n->child(0)->kind == FALSE) { n->kind = TERM_FALSE; n->popChild(); } else { n->kind = ITE; n->addChild(new Node(TERM_TRUE)); n->addChild(new Node(TERM_FALSE)); n->addChild(new Node(BIT_TY)); } } return n; } void elimBitPropCoercions(Node* unit) { unit->mapOver1(elimBitPropCoercion); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Elim Bit type and constants //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Make them a defined type and constants // // Node* abstractBitTypeTrueFalse(Node* n) { if (n->kind == BIT_TY) { return new Node(TYPE_ID, "bit___type"); } else if (n->kind == TERM_TRUE) { return new Node(CONST, "bit___true"); } else if (n->kind == TERM_FALSE) { return new Node(CONST, "bit___false"); } else return n; } void elimBitTypeAndConsts(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add definition for new named bit type // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* defType; if (ctxt->hasFeature("prim bit type has int interp")) { defType = new Node(DEF_TYPE, "bit___type", Node::int_ty); } else { defType = new Node(DEF_TYPE, "bit___type", new Node(SUBRANGE_TY, new Node (NATNUM, "0"), new Node (NATNUM, "1")) ); } ctxt->insert(defType); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declarations for consts // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* trueDecl = new Node(DEF_CONST, "bit___true", Node::bit_ty); Node* falseDecl = new Node(DEF_CONST, "bit___false", Node::bit_ty); ctxt->insert(trueDecl); ctxt->insert(falseDecl); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axioms for consts // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* falseAxiom = new Node(EQ,"", new Node(CONST, "bit___false"), new Node(NATNUM, "0"), new Node(BIT_TY)); Node* trueAxiom = new Node(EQ,"", new Node(CONST, "bit___true"), new Node(NATNUM, "1"), new Node(BIT_TY)); rules->addChild(falseAxiom); rules->addChild(trueAxiom); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract away occurrences of type and constants // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Map over all declarations to catch BIT_TY uses there. ctxt->typeSeq.mapOver1(abstractBitTypeTrueFalse); ctxt->termSeq.mapOver1(abstractBitTypeTrueFalse); unit->mapOver1(abstractBitTypeTrueFalse); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Abstract Bit type and constants //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // To implement. void abstractBitTypeAndConsts(FDLContext* ctxt, Node* unit) { assert(false); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Master function for Bit abstraction and elimination //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void abstractBit(FDLContext* ctxt, Node* unit) { if (option("abstract-bit-ops")) abstractBitOps(ctxt,unit); if (option("abstract-bit-valued-eqs")) abstractBitValuedEqs(ctxt,unit); if (option("abstract-bit-valued-int-le")) abstractBitValuedIntLEs(ctxt,unit); if (! option("no-elim-bit-prop-coercions")) elimBitPropCoercions(unit); if (option("elim-bit-type-and-consts")) elimBitTypeAndConsts(ctxt, unit); else if (option("abstract-bit-type-and-consts")) abstractBitTypeAndConsts(ctxt, unit); } spark-2012.0.deb/victor/vct/src/bignum.cc0000644000175000017500000002256411753202341017127 0ustar eugeneugen//========================================================================== //========================================================================== // BIGNUM.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include "bignum.hh" using std::string; using namespace z; #include #include using std::endl; #include "utility.hh" //======================================================================== // MyInt functions //======================================================================== // GMP documentation is vague about how storage for mpz_class objects is // managed. Assume some kind of reference counting going on. // Assume n satisfies isInt(). // id of NATNUM{id} is guaranteed string of digits by lexer, so no need // to check this. // MyInt::MyInt(const std::string& s) { num = mpz_class(s); } MyInt::MyInt(Node* n) { Node* nat = (n->kind == I_UMINUS) ? n->child(0) : n; num = mpz_class(nat->id); if (n->kind == I_UMINUS) num = num * -1; } bool MyInt::isInt(Node* n) { return (n->kind == NATNUM || (n->kind == I_UMINUS && n->child(0)->kind == NATNUM) ); } Node* MyInt::toNode() { if (num < 0) { return new Node(I_UMINUS, new Node(NATNUM, mpz_class(num * (-1)).get_str())); } else { return new Node(NATNUM, num.get_str()); } } bool MyInt::operator==(const MyInt& a) const { return num == a.num; } bool MyInt::operator<(const MyInt& a) const { return num < a.num; } std::string MyInt::toString() const { return num.get_str(); } MyInt MyInt::uminus(const MyInt& a) { return MyInt(a.num * (-1)); } MyInt MyInt::plus(const MyInt& a, const MyInt& b) { return MyInt(a.num + b.num); } MyInt MyInt::minus(const MyInt& a, const MyInt& b) { return MyInt(a.num - b.num); } MyInt MyInt::times(const MyInt& a, const MyInt& b) { return MyInt(a.num * b.num); } // Named gmp functions used for clarity. MyInt MyInt::div(const MyInt& a, const MyInt& b) { if (b.num == 0) throw std::runtime_error("MyInt::div: divide by zero"); mpz_class q; // tdiv_q returns quotient when rounding (t = truncating) towards zero. mpz_tdiv_q (q.get_mpz_t(), a.num.get_mpz_t(), b.num.get_mpz_t()); return MyInt(q); } // Sign of modulus here follows that of 1st arg. MyInt MyInt::rem(const MyInt& a, const MyInt& b) { if (b.num == 0) throw std::runtime_error("MyInt::rem: second argument zero"); mpz_class r; // tdiv_r returns remainder when division is rounding (t = // truncating) towards zero. mpz_tdiv_r (r.get_mpz_t(), a.num.get_mpz_t(), b.num.get_mpz_t()); return MyInt(r); } // mpz_mod is *not* used here. It always ignores sign of divisor and // always returns a +ve result. // Mimic Ada functionality where sign of modulus follows that of 2nd arg. MyInt MyInt::mod(const MyInt& a, const MyInt& b) { if (b.num == 0) throw std::runtime_error("MyInt::mod: second argument zero"); mpz_class r; // tdiv_r returns remainder when division is rounding (t = // truncating) towards zero. mpz_tdiv_r (r.get_mpz_t(), a.num.get_mpz_t(), b.num.get_mpz_t()); if (r != 0 && ((a.num > 0 && b.num < 0) || (a.num < 0 && b.num > 0))) r = r + b.num; return MyInt(r); } // Returns 1 if b <= 0; MyInt MyInt::exp(const MyInt& a, const MyInt& b) { if (b.num <= 0) return MyInt(mpz_class(1)); if (b.num > mpz_class(256)) // Exact value unimportant. Make smaller? throw std::runtime_error("MyInt::exp: too large exponent " + b.num.get_str()); mpz_class val; mpz_pow_ui(val.get_mpz_t(), a.num.get_mpz_t(), b.num.get_ui()); return MyInt(val); } //======================================================================== // Elimination of ground integer arithmetic expressions //======================================================================== void groundEvalTop(Node* n) { int k = n->kind; switch (k) { case I_PLUS: case I_MINUS: case I_TIMES: case IDIV: case MOD: case I_EXP: { if (MyInt::isInt(n->child(0)) && MyInt::isInt(n->child(1)) ) { MyInt a(n->child(0)); MyInt b(n->child(1)); MyInt res; if (k == I_PLUS) res = MyInt::plus(a,b); else if (k == I_MINUS) res = MyInt::minus(a,b); else if (k == I_TIMES) res = MyInt::times(a,b); else if (k == IDIV) res = MyInt::div(a,b); else if (k == MOD) res = MyInt::mod(a,b); else res = MyInt::exp(a,b); *n = * res.toNode(); } break; } case I_UMINUS: { if (MyInt::isInt(n->child(0))) { MyInt a(n->child(0)); MyInt res = MyInt::uminus(a); *n = * res.toNode(); } break; } default: break; } return; } void groundEval(Node* n) { n->mapOver(groundEvalTop); } //======================================================================== // Evaluation of constant arith exps and mult and div by constants //======================================================================== // // Rules: apply 1st that matches. // // e * k --> k * e (lin-times-1) // // k * (k' * e) --> kk' * e (lin-times-2) // (k * e) * k' --> kk' * e // // // (k * e) * (k' * e') --> kk' * (e * e') (lin-times-3) // e * (k * e') --> k * (e * e') (lin-times-4) // (k * e) * e' --> k * (e * e') (lin-times-5) // // // // (k * e) div k' --> (k div k') * e if k' divides k. (lin-times-div) // // Intent is that rules are applied in bottom up bool isIntNode(Node* n) { return MyInt::isInt(n); } // Require here that linear multiplication be always of form "k * e". bool isLinTimesNode(Node* n) { return (n->kind == I_TIMES && isIntNode(n->child(0))); } void constArithEvalTop(Node* n) { groundEvalTop(n); // Applying this rule first reduces need for 2 versions of next rule // e * k --> k * e if (n->kind == I_TIMES && isIntNode(n->child(1)) ) { Node* e = n->child(0); Node* k = n->child(1); printMessage(FINEm, "Applying arith-eval rule lin-times-1"); n->child(0) = k; n->child(1) = e; } // NB: No else here. // k1 * (k2 * e) --> k1k2 * e if (isLinTimesNode(n) && isLinTimesNode(n->child(1))) { printMessage(FINEm, "Applying arith-eval rule lin-times-2"); MyInt k1(n->child(0)); MyInt k2(n->child(1)->child(0)); Node* e = n->child(1)->child(1); n->child(0) = MyInt::times(k1,k2).toNode(); n->child(1) = e; } // (k * e) * (k' * e') --> kk' * (e * e') else if (n->kind == I_TIMES && isLinTimesNode(n->child(0)) && isLinTimesNode(n->child(1)) ) { printMessage(FINEm, "Applying arith-eval rule lin-times-3"); MyInt k1(n->child(0)->child(0)); MyInt k2(n->child(1)->child(0)); Node* e = n->child(0)->child(1); n->child(0) = MyInt::times(k1,k2).toNode(); n->child(1)->child(0) = e; } // e * (k * e') --> k * (e * e') else if (n->kind == I_TIMES && isLinTimesNode(n->child(1)) ) { printMessage(FINEm, "Applying arith-eval rule lin-times-4"); Node* e = n->child(0); Node* k = n->child(1)->child(0); n->child(0) = k; n->child(1)->child(0) = e; } // (k * e1) * e2 --> k * (e1 * e2) else if (n->kind == I_TIMES && isLinTimesNode(n->child(0)) ) { Node* k = n->child(0)->child(0); Node* e1 = n->child(0)->child(1); Node* e2 = n->child(1); printMessage(FINEm, "Applying arith-eval rule lin-times-5"); n->child(1) = n->child(0); n->child(0) = k; n->child(1)->child(0) = e1; n->child(1)->child(1) = e2; } // (k1 * e) div k2 --> (k1 div k2) * e if k2 divides k1 else if (n->kind == IDIV && isLinTimesNode(n->child(0)) && isIntNode(n->child(1)) ) { printMessage(FINEm, "Applying arith-eval rule lin-times-div"); MyInt k1(n->child(0)->child(0)); MyInt k2(n->child(1)); if (MyInt::rem(k1, k2) == MyInt(new Node(NATNUM,"0")) ) { n->child(1) = n->child(0)->child(1); n->child(0) = MyInt::div(k1,k2).toNode(); n->kind = I_TIMES; } } return; } void constArithEval(Node* n) { n->mapOver(constArithEvalTop); } spark-2012.0.deb/victor/vct/src/pprinter.hh0000644000175000017500000000656211753202341017523 0ustar eugeneugen//========================================================================== //========================================================================== // PPRINTER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef PPRINTER_HH #define PPRINTER_HH #include "box.hh" // Pretty printer class class PP { public: static int basicIndent; static int linearLimit; static string padLeft(const string& s, int width); static string padRight(const string& s, int width); static Box& makeHSeq(const string& prefix, const string& infix, const string& suffix, vector& bs); static Box& makeVSeq(const string& topPrefix, const string& leftPrefix, const string& infix, const string& suffix, vector& bs) ; static Box& makeHVSeq(const string& topPrefix, const string& leftPrefix, const string& infix, const string& suffix, vector& bs) ; static Box& simpleHSeq(vector& bs); static Box& simpleVSeq(vector& bs); static Box& simpleHVSeq(vector& bs); // Keep infix argument. Allows setting of indentation, even if // leftPrefix == "" static Box& makeHVSingleton(const string& topPrefix, const string& leftPrefix, const string& infix, const string& suffix, Box& b) { vector bs(1); bs[0] = &b; return makeHVSeq(topPrefix, leftPrefix, infix, suffix, bs); } static Box& makeHVPair(const string& topPrefix, const string& leftPrefix, const string& infix, const string& suffix, Box& b1, Box& b2) { vector bs(2); bs[0] = &b1; bs[1] = &b2; return makeHVSeq(topPrefix, leftPrefix, infix, suffix, bs); } /* static Box* makeRInfixSeq(string infixSym, vector* bs, PPMode m); static Box* makeLLLSeq(string startD, string sep, string endD, Box* b, PPMode m); static Box* addDelimsTB(string startD, string endD, Box* b, PPMode m); static Box* addDelimsLL(string startD, string endD, Box* b, PPMode m); */ }; #endif // ! defined PPRINTER_HH spark-2012.0.deb/victor/vct/src/formatter.hh0000644000175000017500000001133211753202341017652 0ustar eugeneugen//========================================================================== //========================================================================== // FORMATTER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef FORMATTER_HH #define FORMATTER_HH #include using std::vector; #include using std::string; #include using std::map; #include using std::ostream; using std::endl; #include "box.hh" #include "node.hh" /* A formatter specifies how to create a box tree from a node tree. It provides to other formatting code: 1. a parenthesisation function Input: kind of current node kind of parent of current node number indicating which subterm of the parent the current node is a formatted box for the current node Output: a box for the formatted node, possibly with parentheses added 2. A Box constructor, taking as input: a node kind a node id boxes for each of children and returns a box for term. To initialise a formatter, one provides 1. For each node kind used for an operator (a term with open left and/or right sides) a) a precedence number b) a list of subnode paren control constants, one for each of subterms. Options are: L Suppress parens if parent prec < subnode prec E Suppress parens if parent prec <= subnode prec * Suppress parens always E.g. a left associative operator + would use "EL", allowing (a + b) + c to be displayed as a + b + c. List provided simply as string: e.g. "L*E" This provides the information for controlling the parenthesisation function. In special cases, the control constant for the subterm i is not the character i in the list: - List is "EL" (left assoc operator) & operator arity n > 2: subterm gets E if i = 0. o/w gets L - List is "LE" (right assoc operator) & operator arity n > 2: subterm gets E if i for last child. o/w gets L - i >= length of list: subterm gets last constant in list. The idea of the first 2 is to treat an n-ary version of a normally binary operator as an iterated version of the binary operator, associated the standard parenthesis-avoiding way for the operator. 2. The box constructor function. Parenthesisation controlled entirely by first map. Formatters provided for a) Vanilla node tree syntax */ class Formatter { public: // functions for formatting code Box& addParens(z::Kind currentK, z::Kind parentK, int childNum, int parentArity, Box& currentBox); virtual Box& addSyntax(z::Kind k, const string& id, vector& bs) = 0; virtual ~Formatter() {} protected: // For use in constructors of concrete subclasses. void addOpInfo(z::Kind k, int prec, const string& subParens); private: map precMap; map subParensMap; // Static members private: static Formatter* formatter; public: static void setFormatter(Formatter* f); static Box& format(const Node& n); protected: static Box& makeStringAp(const std::string& id, std::vector& bs); }; ostream& operator<<(ostream& os, const Node& n); // These classes included here to provide access to getFormatter() methods. class VanillaFormatter : public Formatter { public: virtual Box& addSyntax(z::Kind k, const string& id, vector& bs); static Formatter* getFormatter(); private: VanillaFormatter(); static Formatter* instance; }; class TestFormatter : public Formatter { public: virtual Box& addSyntax(z::Kind k, const string& id, vector& bs); static Formatter* getFormatter(); private: static Formatter* instance; TestFormatter(); }; #endif // ! defined FORMATTER_HH spark-2012.0.deb/victor/vct/src/typesort.hh0000644000175000017500000000325211753202341017542 0ustar eugeneugen//========================================================================== //========================================================================== // TYPESORT.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef TYPESORT_HH #define TYPESORT_HH // Class for doing topological sort on type declarations. #include "node.hh" #include using std::map; class TypeSort { public: static Node* sort(Node* typeDecls); private: enum Colour {WHITE, GREY, BLACK}; class Wrapper { public: Colour colour; Node* node; std::vector children; Wrapper(Node* n) { colour = WHITE; node = n; }; }; static map wmap; static Node* outNodes; static void dfs(std::vector vs); }; #endif // ! defined TYPESORT_HH spark-2012.0.deb/victor/vct/src/cvc-driver.hh0000644000175000017500000000241511753202341017715 0ustar eugeneugen//========================================================================== //========================================================================== // CVC-DRIVER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #ifndef CVC_DRIVER_HH #define CVC_DRIVER_HH #include "smt-driver.hh" // Interface for CVC3 SMTDriver* newCVCDriver(); #endif // ! CVC_DRIVER_HH spark-2012.0.deb/victor/vct/src/pprinter.cc0000644000175000017500000001336511753202341017510 0ustar eugeneugen//========================================================================== //========================================================================== // PPRINTER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include "pprinter.hh" #include // for max function. using std::max; int PP::basicIndent = 2; int PP::linearLimit = 60; string PP::padLeft(const string& s, int width) { if ((int) s.size() < width) return string(width - s.size(), ' ') + s; else return s; } string PP::padRight(const string& s, int width) { if ((int) s.size() < width) return s + string(width - s.size(), ' '); else return s; } /* Glue boxes horizontally P Box0 I Box1 I ... I Boxn S Expect that all Boxes height 1, though 1 with greater height should be OK. Whitespace insertion is on safe side here. Could pass in additional spaces as extra parameters with default values for convenience. */ Box& PP::makeHSeq(const string& prefix, const string& infix, const string& suffix, vector& bs) { Box* accum; string paddedInfix( infix.size() == 0 ? " " : " " + infix + " " ); if (bs.size() == 0) { return box(prefix + suffix); // Assume no whitespace needed } else { accum = bs[0]; for (vector::iterator i = ++ bs.begin(); i != bs.end(); i++) { accum = & (*accum + paddedInfix + **i ); } } if (prefix.size() != 0) accum = & (prefix + " " + *accum) ; if (suffix.size() != 0) accum = & (*accum + " " + suffix) ; return *accum; } /* Make vertical sequence of form T L Box0 I Box1 ... I Boxn S As needed, L, I are padded on right to make them equal length, to ensure that at least 1 space after each, and to ensure indent is at least basicIndent. Line for T only added if T not "" Line for S only added if S not "" If both T and L are "", then Box0 is not indented at all. */ Box& PP::makeVSeq(const string& topPrefix, const string& leftPrefix, const string& infix, const string& suffix, vector& bs) { if (bs.size() == 0) { return box(topPrefix + leftPrefix) / box(suffix); } // bs.size != 0 from now on // Indent at least basicIndent and ensure at least one space after // leftPrefix and infix tokens. int leaderWidth = max((int) max(leftPrefix.size() + 1, infix.size() + 1), PP::basicIndent); // leading strings on lines string firstLeader = padRight(leftPrefix, leaderWidth); string restLeader = padRight(infix, leaderWidth); // Special case. If no prefix, first box must start right away if (topPrefix.size() == 0 && leftPrefix.size() == 0) firstLeader = ""; Box* accum = & (firstLeader + *bs[0] ); for (vector::iterator i = ++bs.begin(); i != bs.end(); i++) { accum = & ( *accum / (restLeader + **i) ); } if (topPrefix.size() != 0) accum = & (box(topPrefix) / *accum ); if (suffix.size() != 0) accum = & (*accum / box(suffix) ); return *accum; } Box& PP::makeHVSeq(const string& topPrefix, const string& leftPrefix, const string& infix, const string& suffix, vector& bs) { if (Box::allHeightOne(bs) ) { string prefix (topPrefix + leftPrefix); // Assume no space needed if // Both not "". Box& hbox = makeHSeq(prefix, infix, suffix, bs); if (hbox.getWidth() <= linearLimit) return hbox; } // If get here, only vertical layout is acceptable. return makeVSeq(topPrefix, leftPrefix, infix, suffix, bs); } Box& PP::simpleHSeq(vector& bs) { Box* accum; if (bs.size() == 0) { return box(""); } else { accum = bs[0]; for (vector::iterator i = ++ bs.begin(); i != bs.end(); i++) { accum = & (*accum + box(" ") + **i ); } } return *accum; } /* Make vertical sequence of form Box0 Box1 ... Boxn */ Box& PP::simpleVSeq(vector& bs) { if (bs.size() == 0) { return box(""); } // bs.size != 0 from now on Box* accum = bs[0]; for (vector::iterator i = ++bs.begin(); i != bs.end(); i++) { accum = & ( *accum / (**i) ); } return *accum; } Box& PP::simpleHVSeq(vector& bs) { if (Box::allHeightOne(bs) ) { Box& hbox = simpleHSeq(bs); if (hbox.getWidth() <= linearLimit) return hbox; } // If get here, only vertical layout is acceptable. return simpleVSeq(bs); } /* Box* PP::makeLLLSeq(string startD, string sep, string endD, Box* b, PPMode m); Box* PP::addDelimsTB(string startD, string endD, Box* b, PPMode m); Box* PP::addDelimsLL(string startD, string endD, Box* b, PPMode m); */ spark-2012.0.deb/victor/vct/src/csvfilt.cc0000644000175000017500000000555611753202341017322 0ustar eugeneugen/* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ // USAGE: // csvfilt [-v] n str [filename] // Filter csv records, returning just those with str a substring of field // n (1-based). // If -v provided, then return those records without str a substring of field // n. #include #include using std::cout; using std::cin; using std::cerr; using std::endl; using std::istream; #include "utility.hh" #include using std::ifstream; int main (int argc, char *argv[]) { vector nonOptions = processCommandArgs(argc, argv); if (nonOptions.size() < 2) { cerr << "Usage" << endl << endl << " csvfilt [-v] n str [filename] " << endl << endl << "Filter csv (comma-separated-value) records on stdin, returning" << endl << "just those with str a substring of field n (1-based)." << endl << endl << "Options: " << endl << " -v Select records without str a substring of field n." << endl << " filename Read input from file filename rather than stdin" << endl; return 0; } istream* isp = &cin; ifstream ifs; if (nonOptions.size() == 3) { string fileName (nonOptions[2]); ifs.open( fileName.c_str() ); if (!ifs) { cerr << "Unable to open file " << fileName << endl; exit(1); } isp = &ifs; } int pos = stringToInt(nonOptions[0]); string tag = nonOptions[1]; string line; while (getline(*isp, line)) { vector vs = csvDigest(line); if (pos > vs.size()) { cerr << "Position " << pos << " out of range for line" << endl; cerr << line << endl; continue; } bool tagfound = vs[pos-1].find(tag) != string::npos; if (tagfound ^ option("v")) { cout << line << endl; } } return 0; } /* string s1("abc,d \"e,f\", ghi "); string s2(" 123, 4 \"56"); vector v1 = csvDigest(s1); vector v2 = csvDigest(s2); for (int i = 0; i != v1.size(); i++) { cout << "#" << v1[i] << "#" << endl; } cout << endl; for (int i = 0; i != v2.size(); i++) { cout << "#" << v2[i] << "#" << endl; } */ spark-2012.0.deb/victor/vct/src/elim-enums.cc0000644000175000017500000006234211753202341017717 0ustar eugeneugen//======================================================================== //======================================================================== // ELIM-ENUMS.cc //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== // Enumeration type abstraction //======================================================================== void abstractEnums(FDLContext* ctxt, Node* unit) { // Add the enum const declarations to the const declarations for ( map::iterator i = ctxt->enumConstMap.begin(); i != ctxt->enumConstMap.end(); i++ ) { ctxt->constMap.insert(*i); } for (int i = 0; i != ctxt->typeSeq.arity(); i++) { Node* typeDecl = ctxt->typeSeq.child(i); if ( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == ENUM_TY) { // Make type declaration abstract typeDecl->clearChildren(); } } } //======================================================================== //======================================================================== // Replace enumeration types with subranges //======================================================================== //======================================================================== //-------------------------------------------------------------------- // isEnumRule //-------------------------------------------------------------------- bool isPosOrValEnumOp (Node* n) { if (n->kind != FUN_AP) return false; const string& s = n->id; return s != "character__pos" && s != "character__val" && (hasSuffix(s, "__pos") || hasSuffix(s, "__val")); } bool isEnumRule(Node* rule) { // return rule->orOver(&isPosOrValOp); return rule->orOver(isPosOrValEnumOp); } //-------------------------------------------------------------------- // fixEnumRel //-------------------------------------------------------------------- // E__LE, E__LT // FUN_AP{__LE}(X,Y) --> I_LE(X, Y) // FUN_AP{__LT}(X,Y) --> I_LT(X, Y) // // ( EQ X Y will get fixed automatically by redefinition of ) void fixEnumRel(Node* n) { if (n->kind != FUN_AP) return; if (hasSuffix(n->id, "__LE")) { n->id = ""; n->kind = I_LE; } if (hasSuffix(n->id, "__LT")) { n->id = ""; n->kind = I_LT; } return; } //-------------------------------------------------------------------- // enumsToIntSubranges //-------------------------------------------------------------------- void enumsToIntSubranges(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Add the enum const declarations to the const declarations // - - - - - - - - - - - - - - - - - - - - - - - - - - for ( map::iterator i = ctxt->enumConstMap.begin(); i != ctxt->enumConstMap.end(); i++ ) { Node* decl = i->second; ctxt->insert(decl); // ctxt->constMap.insert(*i); } int expectedNumRules = 0; // - - - - - - - - - - - - - - - - - - - - - - - - - - // Save existing rules. Get ready to build new rule list // - - - - - - - - - - - - - - - - - - - - - - - - - - Node* oldRules = new Node(*rules); // Use C++-defined copy constructor rules->clearChildren(); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Bring over all existing rules except those for enumeration types // - - - - - - - - - - - - - - - - - - - - - - - - - - int erules = 0; // number of rules eliminated for (int i = 0; i!=oldRules->arity(); i++) { Node* rule = oldRules->child(i); if ( isEnumRule(rule) ) { erules++; } else { rules->addChild(rule); } } // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each enum type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != ctxt->typeSeq.arity(); i++) { Node* typeDecl = ctxt->typeSeq.child(i); if ( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == ENUM_TY) { // - - - - - - - - - - - - - - - - - - - - - - - - - - // Add rules defining E__pos and E__val as identity functions // - - - - - - - - - - - - - - - - - - - - - - - - - - string enumSizeStr = intToString(typeDecl->child(0)->arity()); string enumName = typeDecl->id; rules->addChild ( new Node ( FORALL, new Node(SEQ, new Node(DECL, "i", nameToType(enumName))), new Node(EQ, "", new Node(FUN_AP, enumName + "__pos", new Node(VAR, "i") ), new Node(VAR, "i"), Node::int_ty ) ) // END FORALL ); rules->addChild ( new Node ( FORALL, new Node(SEQ, new Node(DECL, "i", nameToType(enumName))), new Node(EQ, "", new Node(FUN_AP, enumName + "__val", new Node(VAR, "i") ), new Node(VAR, "i"), Node::int_ty ) ) // END FORALL ); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Add rules defining E__succ and E__pred // - - - - - - - - - - - - - - - - - - - - - - - - - - string enumSizeMinusOneStr = intToString(typeDecl->child(0)->arity() - 1); rules->addChild ( new Node ( FORALL, new Node(SEQ, new Node(DECL, "i", nameToType(enumName))), new Node(IMPLIES, new Node(I_LT, new Node (VAR, "i"), new Node (NATNUM, enumSizeMinusOneStr) ), new Node(EQ,"", new Node(FUN_AP, enumName + "__succ", new Node(VAR, "i") ), new Node(I_SUCC, new Node(VAR, "i") ), Node::int_ty ) ) // END IMPLIES ) // END FORALL ); rules->addChild ( new Node ( FORALL, new Node(SEQ, new Node(DECL, "i", nameToType(enumName))), new Node(IMPLIES, new Node(I_LT, new Node (NATNUM, "0"), new Node (VAR, "i") ), new Node(EQ,"", new Node(FUN_AP, enumName + "__pred", new Node(VAR, "i") ), new Node(I_PRED, new Node(VAR, "i") ), Node::int_ty ) ) // END IMPLIES ) // END FORALL ); // - - - - - - - - - - - - - - - - - - - - - - - - - - // For each enum const, add a rule defining int value // - - - - - - - - - - - - - - - - - - - - - - - - - - Node* enumConstNames = typeDecl->child(0); for (int j = 0; j != enumConstNames->arity(); j++) { string enumConstId = enumConstNames->child(j)->id; rules->addChild( new Node (EQ,"", // MAY_BE_REPLACED_BY, new Node (CONST, enumConstId), new Node (NATNUM, intToString(j)), Node::int_ty ) ); } // - - - - - - - - - - - - - - - - - - - - - - - - - - // Fix enumeration type decl to use int or integer subrange // - - - - - - - - - - - - - - - - - - - - - - - - - - int typeSize = typeDecl->child(0)->arity(); Node* newEnumTy = new Node(SUBRANGE_TY, new Node (NATNUM, "0"), new Node (NATNUM, intToString(typeSize - 1)) ); // Use new type in enum type decl typeDecl->child(0) = newEnumTy; // - - - - - - - - - - - - - - - - - - - - - - - - - - // Compute expected number of rules // - - - - - - - - - - - - - - - - - - - - - - - - - - expectedNumRules += 14 + 2*typeSize; } // END if is enum type def } // END for loop over FDL decls // With read-all-decl-files-in-dir option, Victor reads in enum type // declarations without corresponding rule sets, making the // expectedNumRules calculation incorrect, and this check pointless. if (!option("read-all-decl-files-in-dir") && erules != expectedNumRules) { printMessage(WARNINGm, "elim-enums: found " + intToString(erules) + " rules, expected " + intToString(expectedNumRules) + " rules" ); } // - - - - - - - - - - - - - - - - - - - - - - - - - - - // Replace E__LT, E__LE with integer versions // - - - - - - - - - - - - - - - - - - - - - - - - - - - // FUN_AP{__LE}(X,Y) --> I_LE(X, Y) // FUN_AP{__LT}(X,Y) --> I_LT(X, Y) unit->mapOver(fixEnumRel); return; } //======================================================================== //======================================================================== // Add in axioms for isomorphism between abstract type and integer subrange //======================================================================== //======================================================================== // Uses node constructor macros from node.hh. void axiomatiseEnums(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Add the enum const declarations to the const declarations // - - - - - - - - - - - - - - - - - - - - - - - - - - for ( map::iterator i = ctxt->enumConstMap.begin(); i != ctxt->enumConstMap.end(); i++ ) { Node* decl = i->second; ctxt->insert(decl); // ctxt->constMap.insert(*i); } int expectedNumRules = 0; // - - - - - - - - - - - - - - - - - - - - - - - - - - // Save existing rules. Get ready to build new rule list // - - - - - - - - - - - - - - - - - - - - - - - - - - Node* oldRules = new Node(*rules); // Use C++-defined copy constructor rules->clearChildren(); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Bring over all existing rules except those for enumeration types // - - - - - - - - - - - - - - - - - - - - - - - - - - int numEliminatedRules = 0; for (int i = 0; i!=oldRules->arity(); i++) { Node* oldRule = oldRules->child(i); if ( isEnumRule(oldRule) ) { numEliminatedRules++; } else { rules->addChild(oldRule); } } // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each enum type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != ctxt->typeSeq.arity(); i++) { Node* typeDecl = ctxt->typeSeq.child(i); if ( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == ENUM_TY) { Node* enumType = typeDecl->child(0); int enumSize = enumType->arity(); // Also referred to as k below string enumSizeStr = intToString(enumSize); string enumSizeMinusOneStr = intToString(enumSize-1); string enumName = typeDecl->id; // Type introduces constants econst{0} ... econst{k-1} // - - - - - - - - - - - - - - - - - - - - - - - - - - // Rules for E__val of positions and E__pos of econsts // - - - - - - - - - - - - - - - - - - - - - - - - - - // E__val(i) == econst{i} i = 0 .. k-1 // E__pos(econst{i}) == i i = 0 .. k-1 for (int i = 0; i != enumSize; i++) { rules->addChild( nEQ( nFUNAP1( enumName + "__val", nNATNUM( intToString(i))), nCONST( enumType->child(i)->id), nTYPE_ID(enumName)) ); rules->addChild( nEQ( nFUNAP1( enumName + "__pos", nCONST( enumType->child(i)->id)), nNATNUM( intToString(i)), nINT_TY) ); } // - - - - - - - - - - - - - - - - - - - - - - - - - - // Rules for E__pos of E__first and E__last // - - - - - - - - - - - - - - - - - - - - - - - - - - // E__pos(E__first) == 0 // E__pos(E__last) == k-1 rules->addChild ( nEQ( nFUNAP1( enumName + "__pos", nCONST( enumName + "__first")), nNATNUM( "0"), nINT_TY) ); rules->addChild ( nEQ( nFUNAP1( enumName + "__pos", nCONST( enumName + "__last")), nNATNUM( enumSizeMinusOneStr), nINT_TY) ); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Rules for E__pos of E__succ and E__pred // - - - - - - - - - - - - - - - - - - - - - - - - - - // All X:E. X != const(k-1) // ==> E__pos(E__succ(X)) == E__pos(X) + 1 rules->addChild( nFORALL1( "X", nTYPE_ID(enumName), nIMPLIES( nNE( nVAR("X"), nCONST( enumName + "__last"), nTYPE_ID(enumName)), nEQ( nFUNAP1( enumName + "__pos", nFUNAP1( enumName + "__succ", nVAR("X"))), nI_PLUS( nFUNAP1( enumName + "__pos", nVAR("X")), nNATNUM("1")), nINT_TY))) ); // All X:E. X != const(0) ==> E__pos(E__pred(X)) == E__pos(X) - 1 rules->addChild( nFORALL1( "X", nTYPE_ID(enumName), nIMPLIES( nNE( nVAR("X"), nCONST( enumName + "__first"), nTYPE_ID(enumName)), nEQ( nFUNAP1( enumName + "__pos", nFUNAP1( enumName + "__pred", nVAR("X"))), nI_MINUS( nFUNAP1( enumName + "__pos", nVAR("X")), nNATNUM("1")), nINT_TY))) ); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Rules for E__pos bounds // - - - - - - - - - - - - - - - - - - - - - - - - - - // All X:E. 0 <= E__pos(X) // All X:E. E__pos(X) <= k-1 rules->addChild( nFORALL1("X", nTYPE_ID(enumName), nI_LE( nNATNUM("0"), nFUNAP1( enumName + "__pos", nVAR("X")))) ); rules->addChild( nFORALL1("X", nTYPE_ID(enumName), nI_LE( nFUNAP1( enumName + "__pos", nVAR("X")), nNATNUM(enumSizeMinusOneStr))) ); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Rules for E__succ and E__pred of E__val // - - - - - - - - - - - - - - - - - - - - - - - - - - // All I:Int. 0 <= I & I < k-1 // ==> E__succ(E__val(I)) == E__val(I+1) rules->addChild( nFORALL1( "I", nINT_TY, nIMPLIES( nAND( nI_LE( nNATNUM("0"), nVAR("I")), nI_LT( nVAR("I"), nNATNUM( enumSizeMinusOneStr))), nEQ( nFUNAP1( enumName + "__succ", nFUNAP1( enumName + "__val", nVAR("I"))), nFUNAP1( enumName + "__val", nI_PLUS( nVAR("I"), nNATNUM("1"))), nTYPE_ID(enumName)))) ); // All I:Int. 0 < I & I <= k-1 // ==> E__pred(E__val(I)) == E__val(I-1) rules->addChild( nFORALL1( "I", nINT_TY, nIMPLIES( nAND( nI_LT( nNATNUM("0"), nVAR("I")), nI_LE( nVAR("I"), nNATNUM( enumSizeMinusOneStr))), nEQ( nFUNAP1( enumName + "__pred", nFUNAP1( enumName + "__val", nVAR("I"))), nFUNAP1( enumName + "__val", nI_MINUS( nVAR("I"), nNATNUM("1"))), nTYPE_ID(enumName)))) ); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Rules for isomorphism // - - - - - - - - - - - - - - - - - - - - - - - - - - // All I:Int. 0 <= I & I <= k-1 ==> E__pos(E__val(I)) == I rules->addChild( nFORALL1( "I", nINT_TY, nIMPLIES( nAND( nI_LE( nNATNUM("0"), nVAR("I")), nI_LE( nVAR("I"), nNATNUM( enumSizeMinusOneStr))), nEQ( nFUNAP1( enumName + "__pos", nFUNAP1( enumName + "__val", nVAR("I"))), nVAR("I"), nINT_TY))) ); // All X:E. E__val(E__pos(X)) == X rules->addChild( nFORALL1( "X", nTYPE_ID(enumName), nEQ( nFUNAP1( enumName + "__val", nFUNAP1( enumName + "__pos", nVAR("X"))), nVAR("X"), nTYPE_ID(enumName)) ) // END FORALL ); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Rules for inequalities // - - - - - - - - - - - - - - - - - - - - - - - - - - // All X,Y:E. (X <= Y) <==> (E__pos(X) <= E__pos(Y)) rules->addChild( nFORALL2( "X", nTYPE_ID(enumName),"Y", nTYPE_ID(enumName), nIFF( nFUNAP2(enumName + "__LE", nVAR("X"), nVAR("Y")), nI_LE( nFUNAP1( enumName + "__pos", nVAR("X")), nFUNAP1( enumName + "__pos", nVAR("Y"))))) ); // All X,Y:E. (X < Y) <==> (E__pos(X) < E__pos(Y)) rules->addChild( nFORALL2( "X", nTYPE_ID(enumName), "Y", nTYPE_ID(enumName), nIFF( nFUNAP2( enumName + "__LT", nVAR("X"), nVAR("Y")), nI_LT( nFUNAP1( enumName + "__pos", nVAR("X")), nFUNAP1( enumName + "__pos", nVAR("Y"))))) ); // All I,J:Int. 0 <= I & I <= k-1 & 0 <= J & J <= k-1 // ==> I <= J <==> E__val(I) <= E__val(J) rules->addChild( nFORALL2( "I", nINT_TY,"J", nINT_TY, nIMPLIES( nAND( nAND( nI_LE( nNATNUM("0"), nVAR("I")), nI_LE( nVAR("I"), nNATNUM( enumSizeMinusOneStr))), nAND( nI_LE( nNATNUM("0"), nVAR("J")), nI_LE( nVAR("J"), nNATNUM( enumSizeMinusOneStr)))), nIFF( nI_LE( nVAR("I"), nVAR("J")), nFUNAP2( enumName + "__LE", nFUNAP1( enumName + "__val", nVAR("I")), nFUNAP1( enumName + "__val", nVAR("J")))))) ); // All I,J:Int. 0 <= I & I <= k-1 & 0 <= J & J <= k-1 // ==> I < J <==> E__val(I) < E__val(J) rules->addChild( nFORALL2( "I", nINT_TY,"J", nINT_TY, nIMPLIES( nAND( nAND( nI_LE( nNATNUM("0"), nVAR("I")), nI_LE( nVAR("I"), nNATNUM( enumSizeMinusOneStr))), nAND( nI_LE( nNATNUM("0"), nVAR("J")), nI_LE( nVAR("J"), nNATNUM( enumSizeMinusOneStr)))), nIFF( nI_LT( nVAR("I"), nVAR("J")), nFUNAP2( enumName + "__LT", nFUNAP1( enumName + "__val", nVAR("I")), nFUNAP1( enumName + "__val", nVAR("J")))))) ); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Make type declaration abstract // - - - - - - - - - - - - - - - - - - - - - - - - - - typeDecl->clearChildren(); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Update expected number of old enum type rules // - - - - - - - - - - - - - - - - - - - - - - - - - - expectedNumRules += 14 + 2*enumSize; } // END if is enum type def } // END for loop over FDL decls // With read-all-decl-files-in-dir option, Victor reads in enum type // declarations without corresponding rule sets, making the // expectedNumRules calculation incorrect, and this check pointless. if (!option("read-all-decl-files-in-dir") && numEliminatedRules != expectedNumRules) { printMessage(WARNINGm, "elim-enums: found " + intToString(numEliminatedRules) + " rules, expected " + intToString(expectedNumRules) + " rules" ); } return; } spark-2012.0.deb/victor/vct/src/processor.hh0000644000175000017500000000254211753202341017671 0ustar eugeneugen//======================================================================== //======================================================================== // PROCESSOR.HH //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== // Intermediate solver-independent processing of FDL unit components. #ifndef PROCESSOR_HH #define PROCESSOR_HH #include "node.hh" #include "utility.hh" std::string elaborateUnit(Node* unit, UnitInfo* unitInfo); #endif // ! defined PROCESSOR_HH spark-2012.0.deb/victor/vct/src/context.hh0000644000175000017500000002072311753202341017337 0ustar eugeneugen//======================================================================== //======================================================================== // CONTEXT.HH //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== // Class for unit declarations. /* Context includes a notion of Feature. Currently supported features are: SI = standard interpretation QFOL Logic is quasi-first order. Formulas are just terms of type Bool. FOL Logic is properly first order. Formulas distinct from individual type of Bits which replaces Bool type. ( Predicates declarations are distinct from formula declarations. Hoping for this, but not yet implemented) term_ITE ITE operator supported in term language Features in use: A = added, Q = queried AQ "prim bit type has int interp" AQ "FOL" A "Error" Proposed features: ctxt->addFeature("prim bit type has int interp"); "Bit type has Bool interp" "Bit type has Bool interp" "Bit ops have std Bool interp" (bit_and, bit_or, bit_not, bit_iff) "Bit consts have std Bool interp" (bit_true, bit_false) "Bit rels have std Bool interp" (bit_eq[T] bit_le[real] bit_le[int]) Bit refinement changes these... A */ #ifndef CONTEXT_HH #define CONTEXT_HH #include using std::set; #include using std::map; using std::make_pair; using std::pair; #include using std::vector; #include "node.hh" using std::string; using namespace z; //============================================================================ // Context class //============================================================================ /* Split declarations and definitions from FDL file into distinct maps for: o types o constants o enumeration constants from enumeration types o vars o functions. Maps are from strings to syntax trees for declarations and definitions. Range for enumeration constant map is constant declarations that have the type field set to a type id using the declared name for the enumeration type. Generally, the assumption is made that there is no overlap between constant, enumeration constant and variable names and that a particular enumeration constant never gets used in more than one enumeration type. SPARK Examiner tool seems to add prefixes to names that probably help in ensuring this. No checks yet done to confirm this assumption. */ // Class is only partially abstract. // Have need to access copies of several components and access iterators. // A hassle to provide these all via interface so just allow public member // access. class FDLContext { public: Node typeSeq; // Mirrors typeMap, but maintains types in // definition before use order Node termSeq; // All const, var, and fun declarations (not enum consts) map typeMap; map constMap; map enumConstMap; // Constants introduced for enum types map varMap; map funMap; map recordFieldMap; // Map from fieldnames to record types. vector pathKinds; vector pathAddr; Node bindings; bool isFormula; // bool isEmbeddedFormula; // bool strictTyping; // Affects behaviour of type inference // If true, expect all polymorphism to be resolved // so type of any operator immediately resolvable // without consulting wider context. // Flags for monitoring progress of resolution of polymorphic operators // and of types of free variables in rules // See normalisation.cc for use of these. bool typeResolutionMadeProgress; bool typeResolutionIncomplete; int typeResolutionPhase; FDLContext(Node* FDL_AST); void insert(Node* decl, bool atEnd = true); void insertTypeAtStart(Node* decl) { insert(decl, false); } void insertTypeAtEnd(Node* decl) { insert(decl, true); } Node* lookupType(const string& s); Node* lookupConst(const string& s); Node* lookupVar(const string& s); Node* lookupFun(const string& s); Node* lookupEnumConst(const string& s); Node* lookupRecordField(const string& s); // Returns TYPE_ID{rectypename} // or UNKNOWN() Node* normaliseType(Node* n); // Expand all top-level type defs Node* canoniseType(Node* n); // Expand top-level type defs down to // TYPE_ID, REAL_TY, INT_TY, BOOL_TY, // or BIT_TY void pushBinding(Node* decl); // Binding from FORALL or EXISTS expression. void popBinding(); // Pop most recently pushed. void pushPathStep(z::Kind k, int i); // Step from node kind k to child i void popPathStep(); string getPathString(); Node* lookupBinding(const string& s); Node* lookupId(const string& s); // Id = bvar, const, var, enum const Node* lookupConstId(const string& s); // Id = const, var, enum const Node* getType(Node* n); Node* getSubNodeTypes (Node* n); Node* extractDecls(); set features; void addFeature(const string& s); void removeFeature(const string& s); bool hasFeature(const string& s); }; //============================================================================ // Template functions on contexts //============================================================================ //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // mapOverWithContext //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Expect typing for f: // // Node* f(FDLContext* c, Node* n) ; // // Applies f in single bottom up pass over n, updating context with // bindings when descending through expr arg of quantifiers. // If f doesn't change node, it should just return it. // - isEmbeddedFormulaCtxt coding is probably incomplete // - // template Node* mapOverWithContextAux(BinFun& f, FDLContext* c, Node* n) { bool isFormulaCtxt = c->isFormula; bool isEmbeddedFormulaCtxt = c->isEmbeddedFormula; // [FORALL|EXISTS] (SEQ decl+) exp where decl ::= DECL{id} type if (n->kind == FORALL || n->kind == EXISTS) { c->isFormula = false; c->pushPathStep(n->kind, 0); n->child(0) = mapOverWithContextAux(f, c, n->child(0)); c->popPathStep(); c->isFormula = isFormulaCtxt; for ( int i = 0; i != n->child(0)->arity(); i++) { Node* decl = n->child(0)->child(i); c->pushBinding(decl); } c->pushPathStep(n->kind, 1); n->child(1) = mapOverWithContextAux(f, c, n->child(1)); c->popPathStep(); for ( int i = 0; i != n->child(0)->arity(); i++) { c->popBinding(); } } else { if (! isCompoundProp(n)) c->isFormula = false; for (int i = 0; i != n->arity(); i++) { c->pushPathStep(n->kind, i); n->child(i) = mapOverWithContextAux(f, c, n->child(i)); c->popPathStep(); } } // Restore formula status of n. c->isFormula = isFormulaCtxt; c->isEmbeddedFormula = isEmbeddedFormulaCtxt; return f(c,n); } template Node* mapOverWithContext(BinFun& f, FDLContext* c, Node* n) { // if (n->kind == UNIT || n->kind == RULES || n->kind == GOALS) { // c->isFormula = true; // } c->isFormula = true; return mapOverWithContextAux(f, c, n); } #endif // ! defined CONTEXT_HH spark-2012.0.deb/victor/vct/src/normalisation.hh0000644000175000017500000000255011753202341020530 0ustar eugeneugen//======================================================================== //======================================================================== // NORMALISATION.HH //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== // Put FDL into Standard Form #ifndef NORMALISATION_HH #define NORMALISATION_HH #include "node.hh" #include "context.hh" #include "utility.hh" FDLContext* putUnitInStandardForm(Node* unit, UnitInfo* unitInfo); #endif // ! defined NORMALISATION_HH spark-2012.0.deb/victor/vct/src/normalisation.cc0000644000175000017500000013150511753202341020521 0ustar eugeneugen//======================================================================== //======================================================================== // NORMALISATION.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== // Put FDL into Standard Form #include using std::map; using std::make_pair; using std::pair; #include #include // For swap. using std::swap; #include using std::set; #include "normalisation.hh" #include "utility.hh" #include "node-utils.hh" #include "formatter.hh" using std::string; using namespace z; //======================================================================== // Regularise Unit Structure //======================================================================== // Split off declarations into maps for each decl kind. // Turn rules into formulas. // Update top-level unit structure. //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // normaliseRule //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ /* Generate equivalent formula for rule. MAY_BE_REPLACED_BY e1 e2 --> EQ e1 e2 MAY_BE_REPLACED_BY e1 e2 (SEQ es) --> IMPLIES (AND es) (EQ e1 e2) ARE_INTERCHANGEABLE e1 e2 --> EQ e1 e2 ARE_INTERCHANGEABLE e1 e2 (SEQ es) --> IMPLIES (AND es) (EQ e1 e2) MAY_BE_DEDUCED e --> e MAY_BE_DEDUCED e (SEQ es) --> IMPLIES (AND es) e If any of preconditions are FUN_AP{goal} (FUN_AP{checktype} ID{a} ID{t} ) then strip out as DECl{a} nameToType(t). (nameToType converts most ids to TYPE_ID, except for for built-in types integer, real, boolean, bit___type, where it returns the special operator) If any are FUN_AP{goal} anything else, then strip out completely and flag error. If stripped out decls are non-empty, add them on to final formula in a universal quantifier. */ Node* normaliseRule(Node* rule) { // Target structure is // FORALL decls (IMPLIES (AND antecedents) consequent) // where FORALL, IMPLIES and AND are omitted when not needed. // Extract out rule preconditions and consequent Node* rulePreconditions; Node* consequent; switch(rule->kind) { case MAY_BE_REPLACED_BY: case ARE_INTERCHANGEABLE: { if (rule->arity() == 2) { rulePreconditions = new Node(SEQ); } else { // arity must be 3. rulePreconditions = rule->child(2); rule->popChild(); } consequent = rule->updateKind(EQ); break; } case MAY_BE_DEDUCED: { if (rule->arity() == 1) { rulePreconditions = new Node(SEQ); } else { // arity must be 2 rulePreconditions = rule->child(1); } consequent = rule->child(0); break; } default: assert(false); return 0; } // END switch // Process preconditions Node* decls = new Node(SEQ); Node* antecedents = new Node(AND); for (int i = 0; i != rulePreconditions->arity(); i++) { Node* p = rulePreconditions->child(i); if ( p->kind == FUN_AP && p->id == "goal") { Node* prologCmd = p->child(0); if ( prologCmd->kind == FUN_AP && prologCmd->id == "checktype" && prologCmd->child(0)->kind == ID && prologCmd->child(1)->kind == ID) { string id = prologCmd->child(0)->id; Node* type = nameToType(prologCmd->child(1)->id); decls->addChild(new Node (DECL, id, type)); } else { printMessage(ERRORm, "normaliseRule: unexpected Prolog precondition" + ENDLs + prologCmd->toString() ); } } else { antecedents->addChild(p); } } // Build rule formula Node* body = antecedents->arity() == 0 ? consequent : antecedents->arity() == 1 ? new Node (IMPLIES, antecedents->child(0), consequent) : // antecedents->arity() > 1 new Node (IMPLIES, antecedents, consequent) ; return decls->arity() == 0 ? body : new Node (FORALL, decls, body) ; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // flattenRules //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Flatten grouping of rules into rule families. // o Ignore type assumption information in each family. // o Replace each special rule form by appropriate logical expression. void flattenRules(Node* RLS_AST) { // ------------------------------------------------------------------- // Process RLS file // ------------------------------------------------------------------- Node* result = new Node(RULES); // Loop over rule families for (int i = 0; i != RLS_AST->arity(); i++) { Node* rFam = RLS_AST->child(i); Node* rules = rFam->child(1); // Loop over rules in family for (int j = 0; j != rules->arity(); j++) { Node* rule = rules->child(j); // rule = RULE{} rule->child(0) = normaliseRule(rule->child(0)); result->addChild(rule); } } // Destructively modify initial top Node object. * RLS_AST = * result; return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // RegulariseUnitStructure //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // void fixFDLVar(Node* n) { if (n->kind == ID) { if (hasSuffix(n->id,"~")) { n->id = fixSuffix(n->id, "~", // old suffix "", // new prefix "___init"); } else if (hasSuffix(n->id,"%")) { n->id = fixSuffix(n->id, "%", // old suffix "", // new prefix "___loopinit"); } } return; } FDLContext* RegulariseUnitStructure(Node* unit) { FDLContext* ctxt = new FDLContext(unit->child(0)); unit->child(0)->clearChildren(); // Delete context part of unit. flattenRules(unit->child(1)); unit->child(0)->kind = DECLS; unit->child(2)->kind = GOALS; unit->mapOver(fixFDLVar); return ctxt; } //======================================================================== // Augment const and var declarations //======================================================================== void augmentConstDecls(FDLContext* ctxt, Node* unit) { // ------------------------------------------------------------------- // Add X__base__first and X__base__last constants if not declared. // ------------------------------------------------------------------- map oldConstMap(ctxt->constMap);// Make reference copy of // constMap for (map::iterator i = oldConstMap.begin(); i != oldConstMap.end(); i++) { const char* sufs[] = {"__first", "__last"}; for (int j = 0 ; j != 2; j++) { string suf(sufs[j]); if (hasSuffix(i->first, suf) && ! hasSuffix(i->first, "__base" + suf)) { string newId = i->first; newId.erase(newId.size() - suf.size()); newId.append("__base"); newId.append(suf); if (oldConstMap.count(newId) == 0) { printMessage(FINEm, "Adding missing declaration for " + newId); Node* newDef = new Node(DEF_CONST, newId, i->second->children); ctxt->insert(newDef); } } } } //-------------------------------------------------------------------- // Add fun type decls for enumeration types //-------------------------------------------------------------------- //-------------------------------------------------------------------- // Add declarations for record size constants //-------------------------------------------------------------------- for (int i = 0; i != ctxt->typeSeq.arity(); i++) { Node* typeDecl = ctxt->typeSeq.child(i); if ( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1) { Node* type = typeDecl->child(0); if (type->kind == ENUM_TY) { string enumName = typeDecl->id; Node* enumTy = new Node(TYPE_ID, enumName); // E__pos : E -> int ctxt->insert(new Node(DECL_FUN, enumName + "__pos", new Node(SEQ, enumTy), Node::int_ty)); // E__val : int -> E ctxt->insert(new Node(DECL_FUN, enumName + "__val", new Node(SEQ, Node::int_ty), enumTy->copy())); // E__succ : E -> E ctxt->insert(new Node(DECL_FUN, enumName + "__succ", new Node(SEQ, enumTy->copy()), enumTy->copy())); // E__pred : E -> E ctxt->insert(new Node(DECL_FUN, enumName + "__pred", new Node(SEQ, enumTy->copy()), enumTy->copy())); // E__LE : E * E -> bool ctxt->insert(new Node(DECL_FUN, enumName + "__LE", new Node(SEQ, enumTy->copy(), enumTy->copy()), Node::bool_ty)); // E__LT : E * E -> bool ctxt->insert(new Node(DECL_FUN, enumName + "__LT", new Node(SEQ, enumTy->copy(), enumTy->copy()), Node::bool_ty)); } // END if type decl of enum type // V8.1.4 seems to require these declarations else if (type->kind == RECORD_TY) { string recordName = typeDecl->id; ctxt->insert(new Node(DEF_CONST, recordName + "__size", Node::int_ty)); } } // END if type is definition } // END for loop over typeSeq return; } //======================================================================== // Find undeclared constants and functions //======================================================================== class declCheck { public: set constIds; set funIds; Node* operator() (FDLContext* c, Node* n); }; Node* declCheck::operator() (FDLContext* c, Node* n) { if (n->kind == ID && !hasUpperCaseStart(n->id) && c->lookupId(n->id) == 0) { constIds.insert(n->id); } else if (n->kind == FUN_AP && c->lookupFun(n->id) == 0) { funIds.insert(n->id); } return n; } // return true if success void findUndeclaredIds(FDLContext* ctxt, Node* unit, string& undeclaredIds, string& undeclaredFuns) { declCheck c; mapOverWithContext(c, ctxt, unit); for (set::iterator i = c.constIds.begin() ; i != c.constIds.end() ; i++ ) { undeclaredIds += ENDLs + (*i); } for (set::iterator i = c.funIds.begin() ; i != c.funIds.end() ; i++ ) { undeclaredFuns += ENDLs + (*i); } return; } bool checkForUndeclaredIds(FDLContext* ctxt, Node* unit) { string idStr; string funStr; findUndeclaredIds(ctxt, unit, idStr, funStr); if (idStr.size() > 0) { printMessage(ERRORm, "Found undeclared identifiers:" + idStr); } if (funStr.size() > 0) { printMessage(ERRORm, "Found undeclared functions:" + funStr); } return idStr.size() + funStr.size() > 0 ; } // Rules with undeclared ids replaced with constant true. // Positions of deleted rules recorded in unitInfo->excludedRules. void deleteRulesWithUndeclaredIds(FDLContext* ctxt, Node* unit, UnitInfo* unitInfo) { Node* rules = unit->child(1); for (int ruleNum = 1; ruleNum <= rules->arity(); ruleNum++) { Node* rule = rules->child(ruleNum-1); string undeclaredIds; string undeclaredFuns; findUndeclaredIds(ctxt, rule, undeclaredIds, undeclaredFuns); if (undeclaredIds.size() + undeclaredFuns.size() > 0) { // rule = RULE{} string ruleName = rule->id; rule->child(0) = nTRUE; printMessage(INFOm, "Deleting rule " + ruleName + "because of " + ENDLs + "undeclared identifiers:" + undeclaredIds + ENDLs + "and undeclared functions:" + undeclaredFuns); unitInfo->addExcludedRule(ruleNum-1); } } // End for } //======================================================================== // normaliseIneqs //======================================================================== void normaliseIneqs(Node* n) { if (n->kind == GE) { n->kind = LE; swap(n->child(0), n->child(1)); } if (n->kind == GT) { n->kind = LT; swap(n->child(0), n->child(1)); } return; } //======================================================================== // Simplify array constructors //======================================================================== // exp ::= // MK_ARRAY{arr-name} a1 ... an // | MK_ARRAY{arr-name} exp a1 ... an // // a ::= ASSIGN indexset val // // indexset ::= // i // | INDEX_AND i1 ... ik, k > 1 // // i ::= SEQ ie1 ... iem // // ie ::= exp | SUBRANGE exp exp // 1. Eliminate use of ASSIGN for multiple index tuples // // (INDEX_AND i1 ... ik) ASSIGN e ---> (i1 ASSIGN e) ... (ik ASSIGN e) // 2. If one index in an index set uses a SUBRANGE expression, make sure all // of them do. Node* simpMkArray(Node* n) { if (n->kind != MK_ARRAY) return n; // Eliminate multiple index ASSIGNs Node* newN = new Node(MK_ARRAY, n->id); for (int i = 0; i != n->arity(); i++) { Node* c = n->child(i); if (c->kind != ASSIGN || c->child(0)->kind != INDEX_AND) { newN->addChild(c); } else { // c = ASSIGN (INDEX_AND i1 ... ik) e Node* indexSet = c->child(0); Node* e = c->child(1); // Reuse existing nodes for 1st of assignments c->child(0) = indexSet->child(0); newN->addChild(c); // Create new nodes for rest of assignments for (int j = 1; j < indexSet->arity(); j++) { newN->addChild(new Node (ASSIGN, indexSet->child(j), e->copy()) ); } } } // Make use of SUBRANGE uniform for (int i = 0; i != newN->arity(); i++) { if (newN->child(i)->kind != ASSIGN) continue; Node* indexSet = newN->child(i)->child(0); bool existsSubrangeExp = false; for (int j = 0; j != indexSet->arity(); j++) { if (indexSet->child(j)->kind == SUBRANGE) { existsSubrangeExp = true; break; } } if (existsSubrangeExp) { for (int j = 0; j != indexSet->arity(); j++) { if (indexSet->child(j)->kind != SUBRANGE) { indexSet->child(j) = new Node(SUBRANGE, indexSet->child(j), indexSet->child(j)->copy() ); } } } } return newN; } //======================================================================== // Close rules. //======================================================================== // Infer types for free variables in rules and universally quantify them. /* Strategy: - Consider formula f. - need to infer type for each free Prolog var. - Then replace f with universal closure of f, closing over these free vars. Input f. Output closure of f. Need to map over f, maintaining map of freevars to types. To allow for binders already in f, map over f function should maintain context. mapped function operation: - consider each subnode n of f in turn, maintaining context of bindings - consider each ith child n' of n in turn if n' is an unbound Upper-case var then Let Tyn' = best guess at type that node n expects n' to have if (n' is unbound in typing map) then create entry in typing map n' -> Tyn' else merge new guess with existing guess in map for n'. - At end, - Form closure using resolved bindings. - Flag if there any unresolved bindings - If, in phase 3 of type resolution, an unresolved binding is for ir or ire type, choose to use i. Hopefully this guess is right some of the time, maybe more often than other guesses. */ class VarTypingFun { public: map vMap; VarTypingFun() {}; Node* mergeVarTypings(Node* ty1, Node* ty2, FDLContext* c); Node* operator() (FDLContext* c, Node* n); }; Node* VarTypingFun::mergeVarTypings(Node* ty1, Node* ty2, FDLContext* c) { // Hopefully common case first. if (ty1->kind == ty2->kind) { return ty1; } Kind k1 = c->canoniseType(ty1)->kind; Kind k2 = c->canoniseType(ty2)->kind; if (k1 == UNKNOWN) { return ty2; } if (k2 == UNKNOWN) { return ty1; } if (k1 == INT_REAL_OR_ENUM_TY) { // Expect ty2 is INT_TY, REAL_TY, ENUM_TY, INT_OR_REAL_TY or // INT_REAL_OR_ENUM_TY. return ty2; } if (k2 == INT_REAL_OR_ENUM_TY) { // Expect ty1 is INT_TY, REAL_TY, ENUM_TY or INT_OR_REAL_TY return ty1; } if (k1 == INT_OR_REAL_TY) { // Expect ty2 is INT_TY, REAL_TY or INT_OR_REAL_TY return ty2; } if (k2 == INT_OR_REAL_TY) { // Expect ty2 is INT_TY or REAL_TY return ty1; } if (k1 == INT_TY && k2 == REAL_TY) { return ty1; } if (k1 == REAL_TY && k2 == INT_TY) { return ty2; } if (k1 == k2) { return ty1; } return Node::no_ty; // Types are incompatible. Hopefully never see this! } Node* VarTypingFun::operator() (FDLContext* c, Node* n) { Formatter::setFormatter(VanillaFormatter::getFormatter()); Node* subNodeTypes = c->getSubNodeTypes(n); vector subNodes = n->getSubNodes(); for ( int i = 0; i != (int) subNodes.size(); i++) { Node* subNode = *(subNodes.at(i)); string subNodeId = subNode->id; if ( subNode->kind == ID && hasUpperCaseStart(subNodeId) && c->lookupId(subNodeId) == 0 ) { Node* subNodeTy; if (subNodeTypes != 0) { subNodeTy = subNodeTypes->child(i); } else { subNodeTy = Node::unknown; } pair< map::iterator,bool > p = vMap.insert(make_pair(subNodeId, subNodeTy)); bool insertSuccess = p.second; if (!insertSuccess) { Node** typeInMap = & ((p.first)->second); *typeInMap = mergeVarTypings(*typeInMap, subNodeTy, c); } } } return n; } Node* closeExpr(FDLContext* c, const string& ruleName, Node* expr) { VarTypingFun varTypingFun; mapOverWithContext(varTypingFun, c, expr); if (varTypingFun.vMap.size() == 0) { return expr; } Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINEm, "closeExpr: vars found in rule " + ruleName + ENDLs + expr->toString()); Node* result; Node* decls; if (expr->kind == FORALL) { decls = expr->child(0); result = expr; } else { decls = new Node(SEQ); result = new Node(FORALL, decls, expr); } for (map::iterator i = varTypingFun.vMap.begin(); i != varTypingFun.vMap.end(); i++) { string id = i->first; Node* type = i->second; Kind tyKind = type->kind; if ( c->typeResolutionPhase == 3 && (tyKind == INT_OR_REAL_TY || tyKind == INT_REAL_OR_ENUM_TY || tyKind == UNKNOWN) ) { decls->addChild(new Node(DECL, id, new Node(INT_TY))); c->typeResolutionMadeProgress = true; if (tyKind == INT_OR_REAL_TY) { printMessage(INFOm, "closeExpr: Free variable " + id + " in rule " + ruleName + ENDLs + " is constrained to have integer or real type." + ENDLs + "Speculatively assigning it to have integer type "); } else if (tyKind == INT_REAL_OR_ENUM_TY) { printMessage(INFOm, "closeExpr: Free variable " + id + " in rule " + ruleName + ENDLs + " is constrained to have integer, real or enumeration type." + ENDLs + "Speculatively assigning it to have integer type "); } else { printMessage(INFOm, "closeExpr: Free variable " + id + " in rule " + ruleName + ENDLs + " has no constraints on its typing. " + ENDLs + "Speculatively assigning it to have integer type "); } } else if (tyKind == UNKNOWN || tyKind == INT_OR_REAL_TY || tyKind == INT_REAL_OR_ENUM_TY || tyKind == NO_TY) { c->typeResolutionIncomplete = true; } else { decls->addChild(new Node(DECL, id, type->copy())); c->typeResolutionMadeProgress = true; } } return result; } void closeRules(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); for (int i = 0; i != rules->arity(); i++) { Node* rule = rules->child(i); // rule = RULE{} rule->child(0) = closeExpr(ctxt, rule->id, rule->child(0)); } return; } //======================================================================== // Resolve overloading. //======================================================================== //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // resolveIneqs //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Resolve overloading of LT and LE by looking at types of children. Node* resolveIneqs (FDLContext* ctxt, Node* n) { string enumSuffix; Kind intRelKind; Kind realRelKind; if (n->kind == LE) { enumSuffix = "__LE"; intRelKind = I_LE; realRelKind = R_LE; } else if (n->kind == LT) { enumSuffix = "__LT"; intRelKind = I_LT; realRelKind = R_LT; } else { return n; } Node* child0BaseTy = ctxt->normaliseType(ctxt->getType(n->child(0)))->expandSubranges(); Node* child1BaseTy = ctxt->normaliseType(ctxt->getType(n->child(1)))->expandSubranges(); if (child0BaseTy->kind == INT_TY && child1BaseTy->kind == INT_TY) { n->kind = intRelKind; ctxt->typeResolutionMadeProgress = true; } else if (ctxt->typeResolutionPhase >= 2 && (child0BaseTy->kind == INT_TY || child1BaseTy->kind == INT_TY) ) { n->kind = intRelKind; ctxt->typeResolutionMadeProgress = true; printMessage(INFOm, "resolveIneqs: Speculatively inserting " + kindString(intRelKind) + " node at position " + ctxt->getPathString()); } else if (child0BaseTy->kind == REAL_TY || child1BaseTy->kind == REAL_TY) { n->kind = realRelKind; ctxt->typeResolutionMadeProgress = true; } else if (child0BaseTy->kind == ENUM_TY) { n->kind = FUN_AP; n->id = (child0BaseTy->id) + enumSuffix; ctxt->typeResolutionMadeProgress = true; } else if (child1BaseTy->kind == ENUM_TY) { n->kind = FUN_AP; n->id = (child1BaseTy->id) + enumSuffix; ctxt->typeResolutionMadeProgress = true; } else { ctxt->typeResolutionIncomplete = true; } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // resolveSuccPred //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* resolveSuccPred(FDLContext* ctxt, Node* n) { if (! (n->kind == SUCC || n->kind == PRED) ) return n; Node* childTy = ctxt->getType(n->child(0)); Node* baseChildTy = ctxt->normaliseType(childTy)->expandSubranges(); if (baseChildTy->kind == INT_TY) { if (n->kind == SUCC) { n->kind = I_SUCC; } else { n->kind = I_PRED; } ctxt->typeResolutionMadeProgress = true; return n; } if ( baseChildTy->kind == ENUM_TY) { if (n->kind == SUCC) { n->id = (baseChildTy->id) + "__succ"; } else { n->id = (baseChildTy->id) + "__pred"; } n->kind = FUN_AP; ctxt->typeResolutionMadeProgress = true; return n; } ctxt->typeResolutionIncomplete = true; return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // resolveEq //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Add type of equality or disequality as 3rd child. // Is there a coercion from ty1 to ty2? bool existsCoercion(Node* ty1, Node* ty2) { return ty1->kind == INT_TY && ty2->kind == REAL_TY; } bool hasSupertype(Node* ty) { return ty->kind == INT_TY; } Node* resolveEq(FDLContext* ctxt, Node* n) { if (! ((n->kind == EQ || n->kind == NE) && n->arity() == 2) ) return n; Node* child0Ty = ctxt->getType(n->child(0))->expandSubranges(); Node* child1Ty = ctxt->getType(n->child(1))->expandSubranges(); // Pick up an easy common case first if (child0Ty->equals(child1Ty) && child0Ty->kind != UNKNOWN) { n->addChild(child0Ty->copy()); ctxt->typeResolutionMadeProgress = true; return n; } Node* child0BaseTy = ctxt->canoniseType(child0Ty)->expandSubranges(); Node* child1BaseTy = ctxt->canoniseType(child1Ty)->expandSubranges(); /* Cases U = unknown type T = known type with no supertype S = known type with supertype U,T & T,U Use T T,T Use T S,T & T,S Use T S,S Use S (NB: no cases in FDL when have 2 distinct Ss) U,S & S,U Fail in Phase 1. Use S in Phases 2 & 3. U,U Fail */ if (child0Ty->kind != UNKNOWN && ! hasSupertype(child0Ty)) { // T,U T,S T,T n->addChild(child0BaseTy->copy()); ctxt->typeResolutionMadeProgress = true; return n; } if (child1Ty->kind != UNKNOWN && ! hasSupertype(child1Ty)) { // U,T S,T (T,T) n->addChild(child1BaseTy->copy()); ctxt->typeResolutionMadeProgress = true; return n; } if (child0BaseTy->equals(child1BaseTy) && child0Ty->kind != UNKNOWN) { // S,S (T,T) n->addChild(child0BaseTy->copy()); ctxt->typeResolutionMadeProgress = true; return n; } if (ctxt->typeResolutionPhase >= 2) { if (child0Ty->kind == UNKNOWN && child1Ty->kind != UNKNOWN) { // U,S (U,T) n->addChild(child1Ty->copy()); printMessage(INFOm, "resolveEq: Speculatively adding type " + child1Ty->toString() + " to " + kindString(n->kind) + " node at position " + ctxt->getPathString()); ctxt->typeResolutionMadeProgress = true; return n; } if (child0Ty->kind != UNKNOWN && child1Ty->kind == UNKNOWN) { // S,U (T,U) n->addChild(child0Ty->copy()); printMessage(INFOm, "resolveEq: Speculatively adding type " + child0Ty->toString() + " to " + kindString(n->kind) + " node at position " + ctxt->getPathString()); ctxt->typeResolutionMadeProgress = true; return n; } } ctxt->typeResolutionIncomplete = true; return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Resolve arithmetic Real/Int overloading //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* resolveUnaryArithNode(FDLContext* c, Node* n, Kind iKind, Kind rKind) { Node* ty0 = c->normaliseType(c->getType(n->child(0)))->expandSubranges(); if (ty0->kind == INT_TY) { c->typeResolutionMadeProgress = true; return n->updateKind(iKind); } else if (ty0->kind == REAL_TY) { c->typeResolutionMadeProgress = true; return n->updateKind(rKind); } else { c->typeResolutionIncomplete = true; return n; } } Node* resolveBinaryArithNode(FDLContext* c, Node* n, Kind iKind, Kind rKind) { Node* ty0 = c->normaliseType(c->getType(n->child(0)))->expandSubranges(); Node* ty1 = c->normaliseType(c->getType(n->child(1)))->expandSubranges(); if (ty0->kind == REAL_TY || ty1->kind == REAL_TY) { // R,R I,R R,I U,R R,U c->typeResolutionMadeProgress = true; return n->updateKind(rKind); } else if (ty0->kind == INT_TY && ty1->kind == INT_TY) { // I,I c->typeResolutionMadeProgress = true; return n->updateKind(iKind); } else if (c->typeResolutionPhase >= 2 && (ty0->kind == INT_TY || ty1->kind == INT_TY) ) { // I,U U,I Phases 2 & 3. c->typeResolutionMadeProgress = true; printMessage(INFOm, "resolveBinaryArithNode: Speculatively inserting " + kindString(iKind) + " at position " + c->getPathString()); return n->updateKind(iKind); } else { c->typeResolutionIncomplete = true; return n; } } Node* resolveArithOps(FDLContext* c, Node* n) { switch (n->kind) { case(PLUS): return resolveBinaryArithNode(c, n, I_PLUS, R_PLUS); case(TIMES): return resolveBinaryArithNode(c, n, I_TIMES, R_TIMES); case(MINUS): return resolveBinaryArithNode(c, n, I_MINUS, R_MINUS); case(EXP): return resolveBinaryArithNode(c, n, I_EXP, R_EXP); case(UMINUS): return resolveUnaryArithNode(c, n, I_UMINUS, R_UMINUS); case(ABS): return resolveUnaryArithNode(c, n, I_ABS, R_ABS); case(SQR): return resolveUnaryArithNode(c, n, I_SQR, R_SQR); default: return n; } } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Augmentation of array operators with array name information. //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ /* ARR_ELEMENT arr index-tup --> ARR_ELEMENT{type-id} arr index-tup ARR_UPDATE arr index-tup val --> ARR_UPDATE{type-id} arr index-tup val where t-id is the type-id for the type of argument rcd. */ Node* addTypeToArrayOp (FDLContext* c, Node* n) { if ((n->kind == ARR_ELEMENT && n->id.size() == 0) || (n->kind == ARR_UPDATE && n->id.size() == 0)) { Node* arrTy = c->getType(n->child(0)); Node* normArrTy = c->normaliseType(arrTy); if (normArrTy->kind == ARRAY_TY && normArrTy->id.size() > 0) { n->id = normArrTy->id; c->typeResolutionMadeProgress = true; } else { c->typeResolutionIncomplete = true; } } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Augmentation of record operators with record name information. //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ /* RCD_ELEMENT{rcd-id} rcd --> RCD_ELEMENT{rcd-id} rcd t-id RCD_UPDATE{rcd-id} rcd val --> RCD_UPDATE{rcd-id} rcd val t-id where t-id is the type-id for the type of argument rcd. Two strategies are used: 1. Look at record type declarations in FDL file and see if only one record type uses the rcd-id fieldname. If so, use that type. 2. Try to infer type of rcd argument. */ Node* addTypeToRecordOp (FDLContext* c, Node* n) { if ((n->kind == RCD_ELEMENT && n->arity() == 1) || (n->kind == RCD_UPDATE && n->arity() == 2)) { Node* rcdTyFromDecls = c->lookupRecordField(n->id); if (rcdTyFromDecls->kind != UNKNOWN) { n->addChild(rcdTyFromDecls->copy()); c->typeResolutionMadeProgress = true; return n; } Node* rcdTy = c->getType(n->child(0)); Node* normRcdTy = c->normaliseType(rcdTy); if (normRcdTy->kind == RECORD_TY) { n->addChild(new Node(TYPE_PARAM, normRcdTy->id)); c->typeResolutionMadeProgress = true; } else { c->typeResolutionIncomplete = true; } } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Add int to real coercions. //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Try to insert coercion. If see problem, just continue. Typechecking later // will catch it. Node* insertRealCoercion(FDLContext* c, Node* n) { // Skip n if is internal node of compound operator if (n->kind == SEQ || n->kind == ASSIGN || n->kind == SUBRANGE || n->kind == DECL) return n; vector subNodes = n->getSubNodes(); Node* subNodeTypes = c->getSubNodeTypes(n); // Expected types of subnodes for (int i = 0; i != (int) subNodes.size(); i++) { Node** subNode = subNodes.at(i); Node* subNodeTy = c->normaliseType(c->getType(*subNode))->expandSubranges(); if (subNodeTy->kind != INT_TY) continue; // Put test here rather than at top so we only report error // when coercion might be needed. if (subNodeTypes == 0) { printMessage(FINEm, "insertRealCoercion: can't find child types of " + kindString(n->kind)); return n; } Node* expectedSubNodeTy = c->normaliseType(subNodeTypes->child(i))->expandSubranges(); if (expectedSubNodeTy->kind == INT_TY) { continue; } else if (expectedSubNodeTy->kind == REAL_TY) { *subNode = new Node(TO_REAL, *subNode); } else { printMessage(INFOm, "insertRealCoercion: trying to coerce INT_TY to:" + kindString(expectedSubNodeTy->kind) + ", the reported type of subNode " + intToString(i+1) + " of " + kindString(n->kind) + " node"); } } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Assembly of main procedure for resolving overloading //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* resolveOverloadedOp(FDLContext* c, Node* n) { n = resolveSuccPred(c,n); n = resolveArithOps(c,n); n = resolveEq(c,n); n = resolveIneqs(c,n); n = addTypeToArrayOp(c,n); n = addTypeToRecordOp(c,n); return n; } void resolveOverloading(FDLContext* ctxt, Node* unit) { //-------------------------------------------------------------------- // Resolve overloaded / polymorphic operators //-------------------------------------------------------------------- // - succ and pred operators over int & enum types. // - int/real overloading of operators and relations // - equality and disequalities // - < and <= inequalities.over int, real and enum types. // - polymorphic array operators // - polymorphic record operators // Useful here that mapOver is bottom up, as resolution often depends // on examining typing of arguments mapOverWithContext(resolveOverloadedOp, ctxt, unit); } //======================================================================== // Type resolution //======================================================================== // Resolve all polymorphism / overloading and figure out types // of free variables. void resolveTyping(FDLContext* ctxt, Node* unit) { //-------------------------------------------------------------------- // Ensure type inference set up to be eager //-------------------------------------------------------------------- ctxt->strictTyping = false; //-------------------------------------------------------------------- // Iteratively close rules and resolve overloading //-------------------------------------------------------------------- // Victor 0.9.1 and before used a single call of closeRules() followed // resolveOverloading(). // Examples have been seen in the 2nd Tokeneer release of user-defined // rules for which this is not sufficient. // The typing resolutions in Phase 1 are always safe. Those in // Phases 2 and 3 are increasingly speculative. for (int phase = 1; phase <= 3; phase++) { ctxt->typeResolutionPhase = phase; const int maxLoopIt = 10; for (int i = 1 ; i <= maxLoopIt; i++) { ctxt->typeResolutionMadeProgress = false; ctxt->typeResolutionIncomplete = false; closeRules(ctxt, unit); resolveOverloading(ctxt, unit); if (! ctxt->typeResolutionMadeProgress || ! ctxt->typeResolutionIncomplete) break; if (i == maxLoopIt) { printMessage(WARNINGm, "resolveTyping: in phase " + intToString(phase) + ", reached loop iteration " + intToString(i) + ENDLs + "Could be due to Victor bug"); } } // End for i if (! ctxt->typeResolutionIncomplete) break; } // End for phase //-------------------------------------------------------------------- // Insert coercions //-------------------------------------------------------------------- mapOverWithContext(insertRealCoercion, ctxt, unit); return; } //======================================================================== // Eliminate standard FDL functions. //======================================================================== // Eliminate occurrences of ABS, SQR, EXP functions, ODD predicate. // SUCC and PRED taken care of separately. Node* elimStdFDLFun(Node* n) { switch (n->kind) { case I_ABS: return n->updateKindAndId(FUN_AP,"int___abs"); case R_ABS: return n->updateKindAndId(FUN_AP,"real___abs"); case I_SQR: { n->kind = I_EXP; n->addChild(new Node(NATNUM, "2")); return n; } case R_SQR: { n->kind = R_EXP; n->addChild(new Node(NATNUM, "2")); return n; } case ODD: return n->updateKindAndId(FUN_AP,"int___odd"); default: return n; } } void elimStdFDLFuns(Node* unit) { unit->mapOver1(elimStdFDLFun); } //======================================================================== // Fix IDs //======================================================================== // Replace IDs by CONST or VAR, as appropriate. // VAR only used for bound variables, *not* FDL vars. FDL vars // are mapped to CONSTs. // Distinction needed for SMTLIB format. Node* resolveIDs(FDLContext* c, Node* n) { if (n->kind == ID) { // Check bound vars before consts, in case bindings shadow a const. if (c->lookupBinding(n->id) != 0) return n->updateKind(VAR); if (c->lookupVar(n->id) != 0 || c->lookupConst(n->id) != 0 || c->lookupEnumConst(n->id) != 0 ) { return n->updateKind(CONST); } else { printMessage(INFOm, "resolveIDs: encountered unexpected id: " + n->id); return n; } } else return n; } //======================================================================== //======================================================================== // Main processing function //======================================================================== //======================================================================== FDLContext* putUnitInStandardForm(Node* unit, UnitInfo* unitInfo) { //-------------------------------------------------------------------- // Split off decls and turn rules into formula sequence //-------------------------------------------------------------------- FDLContext* ctxt = RegulariseUnitStructure(unit); // Now: unit == UNIT(DECLS(), RULES(...), GOALS(...)) // ctxt has maps for const, vars, funs, types and boundvars // ------------------------------------------------------------------- // Augment constant and var decls // ------------------------------------------------------------------- // Add decls for init state vars // Add missing X__base__first and X__base__last decls // Add decls for enum type functions augmentConstDecls(ctxt, unit); //-------------------------------------------------------------------- // Delete rules with undeclared functions and constants //-------------------------------------------------------------------- // Updates unitInfo with indexes of rules to exclude. if (option("delete-rules-with-undeclared-ids")) { deleteRulesWithUndeclaredIds(ctxt, unit, unitInfo); } //-------------------------------------------------------------------- // Check all function and constant identifiers have bindings //-------------------------------------------------------------------- // FIX: needs to be sensitive to unitInfo->excludedRules if (checkForUndeclaredIds(ctxt, unit)) return 0; //-------------------------------------------------------------------- // Simplify array constructors //-------------------------------------------------------------------- unit->mapOver1(simpMkArray); //-------------------------------------------------------------------- // Invert > and >= relations. //-------------------------------------------------------------------- unit->mapOver(normaliseIneqs); //-------------------------------------------------------------------- // Resolve typing //-------------------------------------------------------------------- // Resolve typing of overloaded & polymorphic operators and of // free vars in rules. resolveTyping(ctxt, unit); //-------------------------------------------------------------------- // Eliminate special operators for FDL standard functions //-------------------------------------------------------------------- elimStdFDLFuns(unit); //-------------------------------------------------------------------- // Resolve uses of IDs //-------------------------------------------------------------------- // Replace with VAR or CONST mapOverWithContext(resolveIDs, ctxt, unit); return ctxt; } spark-2012.0.deb/victor/vct/src/processor.cc0000644000175000017500000005400511753202341017660 0ustar eugeneugen//======================================================================== //======================================================================== // PROCESSOR.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== // // Intermediate solver-independent processing of FDL unit components. // #include using std::map; using std::make_pair; using std::pair; #include #include // For swap. using std::swap; #include using std::set; #include "processor.hh" #include "context.hh" #include "normalisation.hh" #include "translation.hh" #include "utility.hh" #include "node-utils.hh" #include "bignum.hh" #include "formatter.hh" using std::string; using namespace z; /* ============================================================================ New options ============================================================================ Options defined here and in translation.cc Option sections below presented in order of processing Enumerated type abstraction --------------------------- -abstract-enums # Use RLS file axioms -elim-enums # map to integer subranges Early array and record abstraction ---------------------------------- -abstract-arrays-records-early # Enable abstraction at this point # See "Late array and record abstraction" section below for rest of options QFOL->FOL conversion -------------------- -bit-type # Do conversion -bit-type-bool-eq-to-iff -bit-type-with-ite -bit-type-prefer-bit-vals -bit-type-prefer-props # If neither then uses bit-fun if instance in # individ position. O/w uses bool-fun. # Tracing options -trace-prop-to-bit-insertion -trace-intro-bit-ops-and-rels type refinement --------------- -refine-types # Master control -refine-bit-eq-equiv # Add in definition for bit-valued non-triv equiv rels # Use when -bit-type-with-ite not included -refine-int-subrange-type # types to refine -refine-bit-type-as-int-subtype -refine-bit-type-as-int-quotient -refine-array-types-with-quotient -refine-array-types-with-weak-extension-constraint -refine-uninterpreted-types # use if want to ensure infinite models # Options for axiom generation -no-subtyping-axioms -strong-subtyping-axioms -no-functionality-axioms # Tracing options -trace-refine-types-quant-relativisation -trace-refine-types-eq-refinement Late array and record abstraction ---------------------------------- -abstract-arrays-records-late # Enable abstraction at this point # Eliminate redundant operators -elim-array-constructors -elim-record-constructors -abstract-record-updates # Add axioms defining types axiomatically -add-array-select-update-axioms -add-array-extensionality-axioms -add-record-select-constructor-axioms -add-record-constructor-extensionality-axioms -add-record-select-update-axioms -add-record-eq-elements-extensionality-axioms # Introduce aliases for equalities to help with matching ext axioms -use-array-eq-aliases -use-record-eq-aliases # Abstract operators and types -abstract-array-select-updates -abstract-array-types -abstract-record-selects-constructors -abstract-record-selects-updates -abstract-record-types bit abstraction ---------------- -abstract-bit-ops -abstract-bit-valued-eqs -abstract-bit-valued-int-le -elim-bit-type-and-consts -abstract-bit-type-and-consts Arithmetic simplification ------------------------- -abstract-real-div -elim-consts -ground-eval-exp -ground-eval -expand-exp-const -arith-eval -sym-prefix -sym-consts Arithmetic abstraction ---------------------- -abstract-nonlin-times -abstract-exp -abstract-divmod -abstract-real-div -abstract-reals Miscellaneous ------------- -switch-types-to-int -lift-quants -strip-quantifier-patterns -elim-type-aliases */ /* =================================================================== Some old documentation =================================================================== (Some of this could be out of date) Terminology: - A `declaration' of a type|constant|variable|function introduces the name, along with a type in the latter 3 cases, but does not give a value. - A `definition' of a type or constant also provides a value. - An `array|record|enumeration type definition' is a type definition with an array|record|enumeration type on the rhs. - An `array|record|enumeration type name' is the type name introduced by an array|record|enumeration type definition. FDL syntax seems to ensure that every array|record|enumeration type gets a name. - An enumeration type definition declares one or more `enumeration constants'. */ //======================================================================== // Arithmetic translations //======================================================================== #include "arith.cc" //======================================================================== // Well-formedness check //======================================================================== // Check that only see expected Node kinds and check subnode typing // is as expected. Node* checkNodeWF(FDLContext* ctxt, Node* n) { switch (n->kind) { default: break; } return n; } void checkUnitWellFormedness(FDLContext* ctxt, Node* unit) { mapOverWithContext(checkNodeWF, ctxt, unit); } //======================================================================== // Printing of quantifier type info //======================================================================== class Fun { private: FDLContext* ctxt; public: Fun (FDLContext* c) : ctxt(c) {}; void operator() (Node* n) { // [FORALL|EXISTS] (SEQ decl+) exp where decl ::= DECL{id} type if (n->kind == FORALL || n->kind == EXISTS) { bool printFmla = false; for ( int i = 0; i != n->child(0)->arity(); i++) { Node* decl = n->child(0)->child(i); Node* normTy = ctxt->normaliseType(decl->child(0)); if (normTy->kind != INT_TY && normTy->kind != PENDING) printFmla = true; printMessage(INFOm, "quant types (" + intToString(i) + " " + decl->id + "): " + kindString(normTy->kind)); } if (printFmla) printMessage(FINEm, "Interesting quantification" + ENDLs + (n->toString())); } } }; //======================================================================== // Lift quantifiers //======================================================================== /* Provide rules such as: 1. IMPLIES e1 (FORALL (SEQ decls) e2) --> FORALL (SEQ decls) (IMPLIES e1 e2) provided decls don't capture any free variables in e1. 2. FORALL (SEQ decls1) (FORALL (SEQ decls2) e) --> FORALL (SEQ decls1 decls2) e */ set* getFreeVarsAux (Node* n) { if (n->arity() == 0) { set* s = new set(); if (n->kind == VAR) s->insert(n->id); return s; } else if (n->kind == FORALL || n->kind == EXISTS) { Node* decls = n->child(0); set* s = getFreeVarsAux(n->child(1)); for (int i = 0; i != (decls->arity()); i++) { s->erase(decls->child(i)->id); } return s; } else { set* s = getFreeVarsAux(n->child(0)); for (int i = 1; i != n->arity(); i++) { set* ss = getFreeVarsAux(n->child(i)); s->insert(ss->begin(), ss->end()); delete ss; } return s; } } set getFreeVars (Node* n) { set* s = getFreeVarsAux(n); set ss = *s; delete s; return ss; } // Apply rule(s). Node* liftQuants(FDLContext* c, Node* n) { if (n->kind == IMPLIES && n->child(1)->kind == FORALL) { Node* e1 = n->child(0); Node* decls = n->child(1)->child(0); set e1FreeVars = getFreeVars(e1); for (int i = 0; i != decls->arity(); i++) { if ( e1FreeVars.count(decls->child(i)->id) != 0 ) { printMessage(WARNINGm, "FORALL lift of " + (decls->child(i)->id) + " inhibited to avoid capture"); return n; } } n->kind = FORALL; n->child(0) = decls; n->child(1)->kind = IMPLIES; n->child(1)->child(0) = e1; } else if (n->kind == FORALL && n->child(1)->kind == FORALL) { Node* decls1 = n->child(0); Node* decls2 = n->child(1)->child(0); Node* e = n->child(1)->child(1); decls1->appendChildren(decls2); n->child(1) = e; } return n; } //======================================================================== // Use tuples for array function indexes //======================================================================== // Not used. Keep in case used in future. Node* useTupleForArrayIndex(Node* n) { // ARR_ELEMENT a i1 ... ik --> ARR_ELEMENT a (SEQ i1 ... ik) if (n->kind == ARR_ELEMENT) { Node* array = n->child(0); n->popLeftChild(); n->kind = SEQ; return new Node(ARR_ELEMENT, array, n); } // ARR_UPDATE a i1 ... ik v --> ARR_UPDATE a (SEQ i1 ... ik) v else if (n->kind == ARR_UPDATE) { Node* array = n->child(0); Node* newValue = n->lastChild(); n->popChild(); n->popLeftChild(); n->kind = SEQ; return new Node(ARR_UPDATE, "", array, n, newValue); } else return n; } void useTuplesForArrayIndexes(Node* unit) { unit->mapOver1(useTupleForArrayIndex); } //======================================================================== // Remove EQ, NE and ITE type arguments //======================================================================== Node* removeTypeArg(Node* n) { if ( ( (n->kind == EQ || n->kind == NE || n->kind == TERM_EQ) && n->arity() == 3) || (n->kind == ITE && n->arity() == 4) ) { n->popChild(); } return n; } void removeTypeArgs(Node* unit) { unit->mapOver1(removeTypeArg); } //======================================================================== // Remove Patterns from Quantifiers //======================================================================== // Patterns are used in the axioms characterising operations on the bit type. // May be used elsewhere too. Node* removeQuantifierPattern(Node* n) { if ( (n->kind == FORALL || n->kind == EXISTS) && n->arity() == 3) { n->popChild(); } return n; } void removeQuantifierPatterns(Node* unit) { unit->mapOver1(removeQuantifierPattern); } //======================================================================== // Use integer type for all types //======================================================================== // Replace all uses of type ids by integer type. // Check that all defined types are abstract or aliases for defined types // or int type. Node* typeIdToIntTy(Node* n) { switch(n->kind) { case TYPE_ID: return Node::int_ty; default: return n; } } void checkTypesAbstract(Node* n) { if (n->kind == DEF_TYPE) { if (n->arity() == 0 || n->child(0)->kind == TYPE_ID || n->child(0)->kind == INT_TY) return; printMessage(ERRORm, "Found non-abstract type def for " + (n->id)); } return; } void switchAllTypesToInt(FDLContext* ctxt, Node* unit) { unit->mapOver1(typeIdToIntTy); ctxt->termSeq.mapOver1(typeIdToIntTy); ctxt->typeSeq.mapOver(checkTypesAbstract); return; } //======================================================================== // Strip universal hyps and rules //======================================================================== void stripUnivHyps(Node* unit) { Node* oldRules = unit->child(1); Node* newRules = new Node(SEQ); for (int i = 0; i != oldRules->arity(); i++) { if (oldRules->child(i)->kind != FORALL) newRules->addChild(oldRules->child(i)); } unit->child(1)->children = newRules->children; Node* goals = unit->child(2); for (int i = 0; i != goals->arity(); i++) { Node* goal = goals->child(i); if (goal->arity() < 2) continue; Node* oldHyps = goal->child(0); Node* newHyps = new Node(SEQ); for (int j = 0; j != oldHyps->arity(); j++) { if (oldHyps->child(j)->kind != FORALL) newHyps->addChild(oldHyps->child(j)); } goal->child(0)->children = newHyps->children; } } //======================================================================== // Gather concls //======================================================================== Node* gatherConcls(Node* goals) { Node* result = new Node(AND); for (int i = 0; i != goals->arity(); i++) { if (goals->child(i)->arity() == 2) { // If goal is non-trivial result->addChild(goals->child(i)->child(1)); } } return result; } //======================================================================== // Tracing properties of units //======================================================================== Node* identifyEqAtCompoundTypes(FDLContext* ctxt, Node* n) { if (n->kind == EQ) { Node* baseType = ctxt->normaliseType(n->child(2)); if (baseType->kind == ARRAY_TY) { printMessageWithHeader("TRACE", "Found equality at array type " + typeToName(n->child(2)) + ENDLs + "Path: " + ctxt->getPathString() + ENDLs + n->toString()); } else if (baseType->kind == RECORD_TY) { printMessageWithHeader("TRACE", "Found equality at record type " + typeToName(n->child(2)) + ENDLs + "Path: " + ctxt->getPathString() + ENDLs + n->toString()); } } return n; } void identifyEqsAtCompoundTypes(FDLContext* ctxt, Node* unit) { if (option("analyse-only-concls")) { Node* concls = gatherConcls(unit->child(2)); mapOverWithContext(identifyEqAtCompoundTypes, ctxt, concls); } else { mapOverWithContext(identifyEqAtCompoundTypes, ctxt, unit); } return; } //======================================================================== // Theory translation //======================================================================== string translateUnit(FDLContext* ctxt, Node* unit) { printMessage(FINESTm, "Standard Form context:" + ENDLs + ctxt->extractDecls()->toString() ); printMessage(FINESTm, "Standard Form rules and goals:" + ENDLs + unit->toString()); //-------------------------------------------------------------------- // Check unit well typed //-------------------------------------------------------------------- printMessage(FINEm, "Standard Form type check starting"); if (!typeCheckUnit("Pre-translation", ctxt, unit)) return "Pre-translation typecheck failed"; printMessage(FINEm, "Standard Form type check ended"); //-------------------------------------------------------------------- // Analysis of unit //-------------------------------------------------------------------- // So far just this one query. Could imagine many more. if (option("identify-eqs-at-arr-rec-types")) { identifyEqsAtCompoundTypes(ctxt, unit); } //-------------------------------------------------------------------- // Eliminating enumeration type primitives //-------------------------------------------------------------------- if (option("abstract-enums")) abstractEnums(ctxt, unit); else if (option("elim-enums")) enumsToIntSubranges(ctxt, unit); else if (option("axiomatise-enums")) axiomatiseEnums(ctxt, unit); //-------------------------------------------------------------------- // Early array and record abstraction //-------------------------------------------------------------------- if (option("abstract-arrays-records-early")) abstractArraysRecords(ctxt, unit); //-------------------------------------------------------------------- // QFOL->FOL conversion //-------------------------------------------------------------------- if (option("bit-type")) introBitType(ctxt, unit); //-------------------------------------------------------------------- // Refining types //-------------------------------------------------------------------- if (option("refine-types")) refineTypes(ctxt, unit); //-------------------------------------------------------------------- // Abstracting Bit type and associated relations and operations //-------------------------------------------------------------------- abstractBit(ctxt, unit); //-------------------------------------------------------------------- // Late array and record abstraction //-------------------------------------------------------------------- if (option("abstract-arrays-records-late")) abstractArraysRecords(ctxt, unit); //-------------------------------------------------------------------- // Do arithmetic simplifications //-------------------------------------------------------------------- // Mostly these strive to make formulas more linear and ease // div/mod reasoning. arithSimp(ctxt, unit); //-------------------------------------------------------------------- // Abstract non-linear and real arithmetic primitives //-------------------------------------------------------------------- arithAbstract(ctxt, unit); //-------------------------------------------------------------------- // Miscellaneous normalisation steps //-------------------------------------------------------------------- // If quantifiers under implications, matching routines of solvers don't // always pick them up. E.g. with Simplify format. if (option("lift-quants")) mapOverWithContext(liftQuants, ctxt, unit); // Only SMTLib driver is expecting patterns if (option("strip-quantifier-patterns")) stripQuantPats(unit); // Needed for new SMTLib format translation. if (option("elim-type-aliases")) elimTypeAliases(ctxt, unit); // Needed for Simplify translation if (option("switch-types-to-int")) switchAllTypesToInt(ctxt, unit); // - Convert I_SUCC(x) to x + 1, I_PRED(x) to x - 1. elimIntSuccPreds(unit); //-------------------------------------------------------------------- // Check unit well typed //-------------------------------------------------------------------- printMessage(FINESTm, "Primary Translation End - context:" + ENDLs + ctxt->extractDecls()->toString() ); printMessage(FINESTm, "Primary Translation End - rules and goals:" + ENDLs + unit->toString()); Formatter::setFormatter(TestFormatter::getFormatter()); printOnOption("print-translation-end", "Primary Translation End - context:" + ENDLs + ctxt->extractDecls()->toString()); printOnOption("print-translation-end", "Primary Translation End - rules and goals:" + ENDLs + unit->toString()); Formatter::setFormatter(VanillaFormatter::getFormatter()); printMessage(FINEm, "Primary Translation End - type check starting"); if (!typeCheckUnit("Post-translation", ctxt, unit)) return "Post-translation typecheck failed"; printMessage(FINEm, "Primary Translation End - type check ended"); //-------------------------------------------------------------------- // Minor final formatting in preparation for passing to drivers //-------------------------------------------------------------------- // - Remove type argument from EQ, NE, TERM_EQ and ITE nodes. removeTypeArgs(unit); if (!option("include-patterns-in-quantifiers")) { removeQuantifierPatterns(unit); } //-------------------------------------------------------------------- // Insert modified declarations back into unit AST. //-------------------------------------------------------------------- unit->child(0) = ctxt->extractDecls(); delete ctxt; return "good"; } //======================================================================== // Exported top level of processing //======================================================================== string elaborateUnit(Node* unit, UnitInfo* unitInfo) { printMessage(FINESTm, "Unit parse tree:" + ENDLs + unit->toString()); // Modifies both unit and unitInfo FDLContext* ctxt = putUnitInStandardForm(unit, unitInfo); if (ctxt == 0) return "FDL normalisation failed"; return translateUnit(ctxt, unit); } spark-2012.0.deb/victor/vct/src/qfol-to-fol.cc0000644000175000017500000003367511753202341020012 0ustar eugeneugen//======================================================================== //======================================================================== // QFOL-TO-FOL.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== //======================================================================== // QFOL to FOL conversion //======================================================================== //======================================================================== // Introduce bit type for Boolean values at individual level. // Check if proposition in non-propositional context. // If there is a term valued alternative, fix operator kind. // Report if encounter unfixable boolean value/operator. //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // boolEqToIff //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* boolEqToIff(FDLContext* c, Node* n) { if (n->kind == EQ) { assert(n->arity() == 3); // Types should be added by this stage Node* baseType = c->normaliseType(n->child(2)); if (baseType->kind == BOOL_TY) { n->kind = IFF; n->popChild(); } } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // boolToBit //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* boolToBit(Node* n) { if (n->kind == BOOL_TY) { return Node::bit_ty; } else { return n; } } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // boolTypeParamsToBit //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // 1. Convert all uses of Bool as component types of arrays and records to // Bit. // 2. Ensure that all aliases for Bool become aliases for Bit. // 3. Convert all uses of Bool in quantifier types to Bit // 4. Convert all equalities over Bool to equalities over Bit // (both in EQ and TERM_EQ) // // void boolTypeParamsToBit(FDLContext* ctxt, Node* unit) { for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() == 0) continue; Node* type = typeDecl->child(0); if (type->kind == BOOL_TY) { typeDecl->child(0) = Node::bit_ty; } // ARRAY_TY (SEQ type+) type // RECORD_TY (DECL{id} type)+ else if (type->kind == ARRAY_TY || type->kind == RECORD_TY) { typeDecl->mapOver1(boolToBit); } } // Convert any uses of BOOL_TY in quantifiers and equalities. unit->mapOver1(boolToBit); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // introBitOpsAndRels //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* introBitOpsAndRel(FDLContext* ctxt, Node* n) { if (! ctxt->isFormula) { switch(n->kind) { case TRUE: { n->kind = TERM_TRUE; break; } case FALSE: { n->kind = TERM_FALSE; break; } case AND: { n->kind = TERM_AND; break; } case OR: { n->kind = TERM_OR; break; } case NOT: { n->kind = TERM_NOT; break; } case IMPLIES: { n->kind = TERM_OR; n->child(0) = new Node(TERM_NOT, n->child(0)); break; } case IFF: { n->kind = TERM_EQ; n->addChild(new Node(BOOL_TY)); break; } case EQ: { n->kind = TERM_EQ; break; } case I_LT: { // x < y <=> not (y <= x) Node* x = n->child(0); Node* y = n->child(1); n->kind = TERM_NOT; n->popChild(); n->child(0) = new Node(TERM_I_LE, y, x); break; } case I_LE: { n->kind = TERM_I_LE; break; } default: { if (isProp(n)) { printMessage(ERRORm, "Found " + kindString(n->kind) + "at individual position"); ctxt->addFeature("Error"); } return n; } } // END switch // Should only reach here if progress made if (n->kind == TERM_EQ) { printOnOption("trace-intro-bit-ops-and-rels", "Introducing TERM_EQ over " + typeToName(n->child(2))); } else { printOnOption("trace-intro-bit-ops-and-rels", "Introducing " + kindString(n->kind)); } } return n; } //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // IntroBitTypeFindIndividualIds class //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Class for function-object closure. // // // Find ids for consts, FDL vars and functions used in individual positions class IntroBitTypeFindIndividualIds { public: set indConstIds; set indFunIds; bool isIndConstId(string id) { return indConstIds.find(id) != indConstIds.end(); } bool isIndFunId(string id) { return indFunIds.find(id) != indFunIds.end(); } Node* operator() (FDLContext* c, Node* n); }; Node* IntroBitTypeFindIndividualIds::operator() (FDLContext* c, Node* n) { if (c->isFormula) return n; // In individual context if ( n->kind == CONST && c->lookupEnumConst(n->id) == 0) { // CONST should be for FDL var or const if (c->lookupId(n->id) == 0) { printMessage(ERRORm, "Undeclared CONST encountered: " + (n->id) ); } else { indConstIds.insert(n->id); } } if (n->kind == FUN_AP) { indFunIds.insert(n->id); } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // introBitFixFunConstDecls() //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void introBitFixFunConstDecls(FDLContext* ctxt, Node* unit) { bool preferBitVals = option("bit-type-prefer-bit-vals"); bool preferProps = option("bit-type-prefer-props"); if ( (preferBitVals && preferProps) || (preferProps && !option("bit-type-with-ite")) ) { printMessage(ERRORm, "Invalid combination of bit-type options"); } //---------------------------------------------------------------------- // Gather ids for CONSTs and FUN_APs in individual positions //---------------------------------------------------------------------- IntroBitTypeFindIndividualIds indIds; mapOverWithContext(indIds, ctxt, unit); //---------------------------------------------------------------------- // Change Bool types to bit types in declarations //---------------------------------------------------------------------- // Resolve use of Bool (or aliases of Bool) for constant, FDL // variable, and function range type to either Bool or Bit type. // If preferBitVals, always use Bit type. // If preferProps, always use Bool // Else use Bit just when const/var/fun is used in an individual position. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Work over constant decls and defs // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (map::iterator i = ctxt->constMap.begin(); i != ctxt->constMap.end(); i++) { // DEF_CONST {id} type exp // | DEF_CONST {id} type Node* constDecl = i->second; Node* baseType = ctxt->normaliseType(constDecl->child(0)); if (baseType->kind == BOOL_TY) { if ( (preferBitVals || indIds.isIndConstId(constDecl->id)) && !preferProps) { constDecl->child(0) = Node::bit_ty; } else { constDecl->child(0) = Node::bool_ty; } } } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Work over FDL variable decls // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (map::iterator i = ctxt->varMap.begin(); i != ctxt->varMap.end(); i++) { // DECL_VAR {id} type Node* varDecl = i->second; Node* baseType = ctxt->normaliseType(varDecl->child(0)); if (baseType->kind == BOOL_TY) { if ( (preferBitVals || indIds.isIndConstId(varDecl->id)) && !preferProps) { varDecl->child(0) = Node::bit_ty; } else { varDecl->child(0) = Node::bool_ty; } } } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Work over function decls // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (map::iterator i = ctxt->funMap.begin(); i != ctxt->funMap.end(); i++) { Node* funDecl = i->second; // DECL_FUN {id} (SEQ type+) type // Convert all uses of Bool as arg type to Bit. // (Any aliases for Bool are converted later by fixing alias typedefs) funDecl->child(0)->mapOver1(boolToBit); Node* baseRangeType = ctxt->normaliseType(funDecl->child(1)); if (baseRangeType->kind == BOOL_TY) { if ( (preferBitVals || indIds.isIndFunId(funDecl->id)) && !preferProps) { funDecl->child(1) = Node::bit_ty; } else { funDecl->child(1) = Node::bool_ty; } } } } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // fixAtomicFmlas //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // // 1. Change FUN_APs of bool-valued functions to PRED_APs // 2. Change bool-valued CONSTS to nullary PRED_APs. // Node* fixAtomicFmlas(FDLContext* c, Node* n) { if (n->kind == FUN_AP || n->kind == CONST) { Node* nType = c->getType(n); if (nType->kind == BOOL_TY) { n->kind = PRED_AP; } } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // addBitToProp //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* addBitToProp(FDLContext* c, Node* n) { vector subNodes = n->getSubNodes(); Node* subNodeTypes = c->getSubNodeTypes(n); for (int i = 0; i != (int) subNodes.size(); i++) { Node** subNode = subNodes.at(i); Node* actualType = c->normaliseType(c->getType(*subNode)); Node* expectedType = c->normaliseType(subNodeTypes->child(i)); if (expectedType->kind == BOOL_TY && actualType->kind == BIT_TY) { printMessage(FINERm, "Adding Bit to Prop coercion for" + ENDLs + (*subNode)->toString()); *subNode = new Node(TO_PROP, *subNode); } } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // addPropToBit //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Node* addPropToBit(FDLContext* c, Node* n) { vector subNodes = n->getSubNodes(); Node* subNodeTypes = c->getSubNodeTypes(n); for (int i = 0; i != (int) subNodes.size(); i++) { Node** subNode = subNodes.at(i); Node* actualType = c->normaliseType(c->getType(*subNode)); Node* expectedType = c->normaliseType(subNodeTypes->child(i)); if (expectedType->kind == BIT_TY && actualType->kind == BOOL_TY) { // Only trace non-trivial coercions. // Trivial ones are expanded away when TO_PROP expanded. if ( ! ( (*subNode)->kind == TRUE || (*subNode)->kind == FALSE ) ) { printOnOption("trace-prop-to-bit-insertion", "Adding Prop to Bit coercion for" + ENDLs + (*subNode)->toString()); printMessage(FINERm, "Adding Prop to Bit coercion for" + ENDLs + (*subNode)->toString()); } *subNode = new Node(TO_BIT, *subNode); } } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // introBitType //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Main function void introBitType (FDLContext* ctxt, Node* unit) { if (option("bit-type-bool-eq-to-iff")) { mapOverWithContext(boolEqToIff, ctxt, unit); } if (!option("bit-type-with-ite")) { mapOverWithContext(introBitOpsAndRel, ctxt, unit); } boolTypeParamsToBit(ctxt, unit); introBitFixFunConstDecls(ctxt, unit); mapOverWithContext(fixAtomicFmlas, ctxt, unit); mapOverWithContext(addBitToProp, ctxt, unit); if (option("bit-type-with-ite")) { mapOverWithContext(addPropToBit, ctxt, unit); } ctxt->removeFeature("QFOL"); ctxt->addFeature("FOL"); } spark-2012.0.deb/victor/vct/src/isab-driver.hh0000644000175000017500000000411211753202341020054 0ustar eugeneugen//========================================================================== //========================================================================== // ISAB-DRIVER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #ifndef ISAB_DRIVER_HH #define ISAB_DRIVER_HH #include using std::ifstream; using std::ofstream; #include "smt-driver.hh" // Interface for Isabelle format file-level solver interface. class IsabDriver : public SMTDriver { public: IsabDriver() {}; private: string proverInputFileName; string proverOutputFileName; string proverErrorFileName; Node* theory; protected: virtual Node* translateUnit(Node* n); virtual void initGoal(const string& unitName, int goalNum, int ConclNum); virtual void addDecl(Node* n); virtual void addRule(Node* h, const string& hId, string& remarks); virtual void addHyp(Node* h, const string& hId, string& remarks); virtual void addConcl(Node* n, string& remarks); virtual void finishSetup(); virtual bool checkGoal(string& remarks); virtual Status getResults(string& remarks); }; #endif // ! ISAB_DRIVER_HH spark-2012.0.deb/victor/vct/src/node-utils.hh0000644000175000017500000000513411753202341017735 0ustar eugeneugen//========================================================================== //========================================================================== // NODE-UTILS.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef NODE_UTILS_HH #define NODE_UTILS_HH #include #include "node.hh" //======================================================================== // Translator base class //======================================================================== // // To create Node to Node translator for application X: // // 1. Subclass Translator with XTranslator. // 2. Define virtual function translateAux in subclass. // As needed, this can call the error function to log errors. // 3. Define constructor for XTranslator that sets the name string to "X". // 4. Define function xTranslate which constructs an XTranslator // object and calls the translate virtual function on it. // // translate will accumulate errors and throw an exception if // any errors have been logged. // // Doesn't matter whether xTranslate is a static function of XTranslator // or a file level function. // [ Used to be that error message always included printouts of // before and after translation trees. Then it was required that // xTranslate should set Formatter appropriately for application X. // Now error messages do not include these printouts, so setting // formatter is no longer necessary ] class Translator { public: Node* translate (Node* oldN); virtual ~Translator() {}; private: bool errorFlag; std::ostringstream errorMessages; protected: std::string name; Translator(string s) : name(s) {}; virtual Node* translateAux (Node* oldN) = 0; void error(const std::string& s); }; #endif // ! NODE_UTILS_HH spark-2012.0.deb/victor/vct/src/box.hh0000644000175000017500000000667211753202341016452 0ustar eugeneugen//========================================================================== //========================================================================== // BOX.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef BOX_HH #define BOX_HH #include using std::ostream; #include using std::string; #include using std::vector; // For holding formatted text // Box are rectangles except that last line might be shorter. // Box printing: // Precond: pt is at current indent. Indent supplied as arg // printing routine adds indent of spaces after each linebreak // pt left at rh side of last line output. // Constructors defined as static members to abstract away from // implementation choices. class Box { private: static vector allocList; static int allocCount; protected: // Assume subclasses set values directly. int height; int width; int lastWidth; // Width of last line (maybe shorter). Box(); virtual ~Box(); // Needed so delete on Box* pointers correctly frees // storage used by objects from subclasses. // See Stroustrup, p422. public: /* constructors */ Box* hAppendT(Box* b); // T = Top align. this->height == 1 Box* hAppendB(Box* b); // B = Bottom align. b.height == 1 Box* hAppend(Box* b); Box* vAppend(Box* b); /* accessors */ int getWidth() {return width;} int getHeight() {return height;} int getLastWidth() {return lastWidth;} static int getAllocCount() {return allocCount;} /* utilities */ static bool allHeightOne(vector& bs) { for (vector::iterator i = bs.begin(); i != bs.end(); i++) { if ( (*i)->getHeight() > 1 ) return false; } return true; } static int sumWidths(vector& bs) { int sum = 0; for (vector::iterator i = bs.begin(); i != bs.end(); i++) { sum += (*i)->getWidth(); } return sum; } static void deleteAll(); // Not for public use. However compiler doesn't permit // recursive invocations in subclasses if protected. virtual void indentPrint(ostream& os, int indent) = 0; }; Box& box(const string& s); Box& box(const char* cstr); // Appending strings, cstrings and boxes. Box& operator+ (Box& b1, Box& b2); Box& operator+ (const string& s1, Box& b2); Box& operator+ (Box& b1, const string& s2); Box& operator+ (const char* cs1, Box& b2); Box& operator+ (Box& b1, const char* cs2); Box& operator/ (Box& b1, Box& b2); // Printing boxes ostream& operator<< (ostream& os, Box& b); #endif // ! BOX_HH spark-2012.0.deb/victor/vct/src/parser.yy0000644000175000017500000004421611753202341017214 0ustar eugeneugen/* -*- C++ -*- */ //========================================================================== //========================================================================== // PARSER.YY //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ // Bison parser for SPARK FDL. // This version works with bison 2.4. To produce a version working with // bison 2.3, comment out lines with comments "**bison 2.4**" and uncomment // lines with comments "**bison 2.3**". %skeleton "lalr1.cc" %defines /* Enable writing of "parser.hh" file with token defs */ %require "2.4.1" // **bison 2.4** // %code requires: Code region for bison 2.4 for type definitions used // in code generated from %union, %lex-param and %parse-param // directives. // %{ // **bison 2.3** %code requires { // **bison 2.4** /* First part of user declarations. Inserted near parser.tab.hh start */ #include /* C++ strings */ #include "node.hh" #include "pdriver.hh" using namespace std; } // **bison 2.4** // %} // **bison 2.3** %parse-param { pdriver& driver } %lex-param { pdriver& driver } %debug /* Enable compilation of trace facilities */ %error-verbose %union { int ival; string* sval; /* C++ strings */ Node* nval ; z::Kind kval; } // %{ // **bison 2.3** %code { // **bison 2.4** /* First user implementation prologue. Inserted at start of parser.tab.cc */ #include "lexer.hh" } // **bison 2.4** // %} // **bison 2.3** %token TITLE %token FOR RULE_FAMILY %token GOAL_ORIGINS %token COLON LSB RSB LPAREN RPAREN COMMA AMPERSAND SEMIC DOT DOTDOT %token REQUIRES %token MAY_BE_REPLACED_BY MAY_BE_DEDUCED MAY_BE_DEDUCED_FROM %token ARE_INTERCHANGEABLE %token IF END FUNCTION PROCEDURE TYPE VAR CONST %token FOR_SOME FOR_ALL %token ARRAY RECORD ASSIGN OF %token SUBPROG_ID CONCL_ID HYP_ID TASK_TYPE ID NATNUM %token TRIPLESTAR %token TRIPLEBANG %token START_FDL_FILE START_RULE_FILE START_VCG_FILE; %token FILE_END 0 %nonassoc ASSIGN %nonassoc AMPERSAND %nonassoc DOTDOT /* Operator precedences follow those listed in GenVCs manual. "Operator precedence" section (4.4.1.4 in Issue 8.12). On precedence of **: Gen VCs manual, (section 4.4.1 "Operators") says: the SPARK exponentiation operator ** is supported by the SPADE Automatic Simplifier and SPADE Proof Checker as though it were an extension of FDL's expression syntax; it has a precedence which is compatible with that used in Ada, relative to the other FDL operator precedences. Ada 95 Standard gives ** strongest precedence, so we do that here. On precedence of unary minus. GenVCs manual places it stronger than * / div mod, but doesn't say where it fits in relative to **. Following the above suggestion on ** being compatible with Ada where ** is *stronger* than unary minus, we also make ** stronger than unary minus. The precedence ordering we implement here agrees with that listed in the SPARK book, p294. NB: The SPARK Grammar in GenVCs manual, start of Section 4.4 "Expressions" places unary minus `on same level' with binary +/-. I assume this is a mistake. */ %nonassoc IFF IMPLIES %left OR %left AND %nonassoc NOT %nonassoc EQ NE GT GE LT LE %left PLUS MINUS %left STAR SLASH DIV MOD %nonassoc UMINUS %left STARSTAR %type top file %type fdl_file fdl_decl %type fdl_decls types %type rule_file rule_family typeassum rule rule_condition rule_body %type rule_families typeassums rules %type vcg_file hyp concl goal %type goalsets goalset goals hyps concls %type id typedecl expseq %type ids typedecls multidecl expseq1 %type id_str %type type type_id %type recordtypedecls %type exp exp_id aexp %type indexset aexpseq %start top %% /* ========================================================================== RULES ========================================================================== */ /* ========================================================================== Top level ========================================================================== */ // top: file FILE_END {driver.result = $1;} top: file {driver.result = $1;} ; file: START_FDL_FILE fdl_file { $$ = $2; } | START_RULE_FILE rule_file { $$ = $2; } | START_VCG_FILE vcg_file { $$ = $2; } ; /* ========================================================================== FDL files ========================================================================== */ fdl_file: TITLE program_kind id SEMIC fdl_decls END SEMIC { // Drop recording of $2 and $3 $$ = $5; $$->kind = z::FDL_FILE; } ; program_kind: TASK_TYPE | FUNCTION | PROCEDURE ; fdl_decls: /* empty */ { $$ = new Node(z::SEQ); } | fdl_decls fdl_decl { $$ = $1; $$->addChild($2);} ; fdl_decl: TYPE id_str EQ type SEMIC { if ($4->kind == z::PENDING) $$ = new Node(z::DEF_TYPE, * $2); else $$ = new Node(z::DEF_TYPE, * $2, $4); delete $2; } | CONST id_str COLON type EQ exp SEMIC { if ($6->kind == z::PENDING) $$ = new Node(z::DEF_CONST, * $2, $4); else $$ = new Node(z::DEF_CONST, * $2, $4, $6); delete $2; } | VAR id_str COLON type SEMIC // FIXME: allow multiple IDs { $$ = new Node(z::DECL_VAR, * $2, $4); delete $2; } | FUNCTION id_str LPAREN types RPAREN COLON type SEMIC { $$ = new Node(z::DECL_FUN, * $2, $4, $7); delete $2; } | FUNCTION id_str COLON type SEMIC { $$ = new Node(z::DEF_CONST, * $2, $4); delete $2; } ; types: type { $$ = new Node(z::SEQ,$1); } | types COMMA type { $$ = $1; $$->addChild($3); } ; /* ========================================================================== Rule files (RLS, RUL, RLU suffix) ========================================================================== Grammar allows for initial sequence of rules without an explicit RULE_FAMILY header. This is observed in RUL and RLU files. */ rule_file: rules rule_families { $$ = $2; $$->addLeftChild(new Node(z::RULE_FAMILY, "implicit", new Node(z::SEQ), $1)); $$->kind = z::RULE_FILE; } ; rule_families: /* empty */ { $$ = new Node(z::SEQ); } | rule_families rule_family { $$ = $1; $$->addChild($2); } ; rule_family: RULE_FAMILY id_str COLON typeassums DOT rules { $$ = new Node(z::RULE_FAMILY, *$2, $4, $6); } | RULE_FAMILY id_str COLON DOT rules { $$ = new Node(z::RULE_FAMILY, *$2, new Node(z::SEQ), $5); } ; typeassums: typeassum { $$ = new Node(z::SEQ,$1); } | typeassums AMPERSAND typeassum { $$ = $1; $$->addChild($3); } ; typeassum: exp REQUIRES LSB typedecls RSB { $$ = new Node(z::SEQ); } // Placeholder ; rules: /* empty */ { $$ = new Node(z::SEQ); } | rules rule { $$ = $1; $$->addChild($2); } ; rule: id_str LPAREN NATNUM RPAREN COLON rule_body DOT { $$ = new Node(z::RULE, (*$1) + "(" + (*$3) + ")", $6); delete $1; delete $3;} rule_body: exp MAY_BE_REPLACED_BY exp rule_condition { $$ = new Node(z::MAY_BE_REPLACED_BY, $1, $3); if ($4->kind != z::TRUE) $$->addChild($4); } | exp MAY_BE_DEDUCED { $$ = new Node(z::MAY_BE_DEDUCED, $1); } | exp MAY_BE_DEDUCED_FROM expseq { $$ = new Node(z::MAY_BE_DEDUCED, $1, $3); } | exp AMPERSAND exp ARE_INTERCHANGEABLE rule_condition { $$ = new Node(z::ARE_INTERCHANGEABLE, $1, $3); if ($5->kind != z::TRUE) $$->addChild($5); } ; rule_condition: /* empty */ { $$ = new Node(z::TRUE); } | IF expseq { $$ = $2; } ; /* ========================================================================== VCG files ========================================================================== */ vcg_file: goalsets { $$ = $1; $$->kind = z::VCG_FILE; } ; /* Run all goalsets together into one vector of goals */ goalsets: goalset { $$ = $1; } | goalsets goalset { $$ = $1; $$->appendChildren($2); } ; goalset: FOR GOAL_ORIGINS COLON goals { $$ = $4; for (int i = 0; i != $$->arity(); i++) { Node* c = $$->child(i); (c->id).append(" " + (* $2)); } delete $2; } ; goals: goal {$$ = new Node(z::SEQ,$1); } | goals goal {$$ = $1; $$->addChild($2); } ; goal: SUBPROG_ID DOT hyps IMPLIES concls { $3->kind = z::HYPS; $5->kind = z::CONCLS; $$ = new Node(z::GOAL, * $1, $3, $5); delete $1; } | SUBPROG_ID DOT TRIPLESTAR id_str DOT { $$ = new Node (z::GOAL, * $1); delete $1; } ; hyps: /* empty */ {$$ = new Node(z::SEQ); } | hyps hyp {$$ = $1; $$->addChild($2); } ; hyp: HYP_ID COLON exp DOT {$$ = $3; delete $1;} ; concls: /* empty */ {$$ = new Node(z::SEQ); } | concls concl {$$ = $1; $$->addChild($2); } ; concl: CONCL_ID COLON exp DOT {$$ = $3; delete $1;} ; /* ========================================================================== Multi-use non-terminals ========================================================================== IDs, type declarations and sequence expressions. */ // Note that the tokens "task_type" and "title" can also be a valid identifiers. id_str: ID { $$ = $1;} | HYP_ID { $$ = $1;} | CONCL_ID { $$ = $1;} | SUBPROG_ID { $$ = $1;} | TASK_TYPE { $$ = $1;} | TITLE { $$ = $1;} ; id: id_str { $$ = new Node(z::ID, * $1); delete $1;} ids: id { $$ = new Node(z::SEQ,$1); } | ids COMMA id { $$ = $1; $$->addChild($3); } ; typedecls: typedecl { $$ = new Node(z::SEQ,$1); } | typedecls COMMA typedecl { $$ = $1; $$->addChild($3); } ; typedecl: id_str COLON type { $$ = new Node(z::DECL, * $1, $3); delete $1; } ; multidecl: ids COLON type { $$ = new Node(z::SEQ); for (int i = 0; i != $1->arity(); i++) { $$->addChild(new Node(z::DECL, $1->child(i)->id, $3) ); } } ; expseq: LSB expseq1 RSB { $$ = $2; } ; expseq1: exp { $$ = new Node(z::SEQ, $1); } | expseq1 COMMA exp { $$ = $1; $$->addChild($3); } ; /* ========================================================================== Types ========================================================================== */ type: type_id { $$ = $1; } | LPAREN ids RPAREN { $$ = $2; $$->kind = z::ENUM_TY; } | ARRAY LSB types RSB OF type { $$ = new Node(z::ARRAY_TY, $3, $6); } | RECORD recordtypedecls END { $$ = $2; $$->kind = z::RECORD_TY; } ; recordtypedecls: multidecl { $$ = $1; } | recordtypedecls SEMIC multidecl { $$ = $1; $$->appendChildren($3); } ; type_id: id { const string& idstr = $1->id; if (idstr == "pending") $$ = new Node(z::PENDING); else if (idstr == "integer") $$ = Node::int_ty; else if (idstr == "real") $$ = Node::real_ty; else if (idstr == "boolean") $$ = Node::bool_ty; else { $1->kind = z::TYPE_ID; $$ = $1; } } ; /* ========================================================================== Value expressions ========================================================================== */ exp: FOR_ALL LPAREN multidecl COMMA exp RPAREN { $$ = new Node(z::FORALL, $3, $5); } | FOR_SOME LPAREN multidecl COMMA exp RPAREN { $$ = new Node(z::EXISTS, $3, $5); } | exp IMPLIES exp {$$ = new Node(z::IMPLIES, $1, $3); } | exp IFF exp {$$ = new Node(z::IFF, $1, $3); } | exp AND exp {$$ = new Node(z::AND, $1, $3); } | exp OR exp {$$ = new Node(z::OR, $1, $3); } | NOT exp {$$ = new Node(z::NOT, $2); } | exp EQ exp {$$ = new Node(z::EQ, $1, $3); } | exp NE exp {$$ = new Node(z::NOT, new Node(z::EQ, $1, $3)); } | exp LT exp {$$ = new Node(z::LT, $1, $3); } | exp GT exp {$$ = new Node(z::GT, $1, $3); } | exp LE exp {$$ = new Node(z::LE, $1, $3); } | exp GE exp {$$ = new Node(z::GE, $1, $3); } | PLUS exp %prec UMINUS {$$ = $2; } | MINUS exp %prec UMINUS {$$ = new Node(z::UMINUS, $2); } | exp PLUS exp {$$ = new Node(z::PLUS, $1, $3); } | exp MINUS exp {$$ = new Node(z::MINUS, $1, $3); } | exp STAR exp {$$ = new Node(z::TIMES, $1, $3); } | exp SLASH exp {$$ = new Node(z::RDIV, $1, $3); } | exp MOD exp {$$ = new Node(z::MOD, $1, $3); } | exp DIV exp {$$ = new Node(z::IDIV, $1, $3); } | exp STARSTAR exp{$$ = new Node(z::EXP, $1, $3); } | exp DOTDOT exp {$$ = new Node(z::SUBRANGE, $1, $3); } | LPAREN aexp RPAREN {$$ = $2; } // exp should be adequate here, but have // seen output of Examiner that parenthesise // aexps. | NATNUM {$$ = new Node(z::NATNUM, * $1); delete $1; } | expseq {$$ = $1;} | exp_id {$$ = $1;} | id_str LPAREN aexpseq RPAREN { string& idstr = * $1; int nargs = $3->arity(); // nargs > 0 by def of aexpseq if (idstr == "element" && nargs == 2) { $$ = $3; $$->kind = z::ARR_ELEMENT; } else if (idstr == "update" && nargs == 3) { $$ = $3; $$->kind = z::ARR_UPDATE; } else if (idstr.size() >= 4 && string(idstr, 0, 4) == "mk__") { // array or record constructor if ($3->child(0)->kind == z::ASSIGN && $3->child(0)->arity() == 1) { $$ = $3; $$->kind = z::MK_RECORD; $$->id = idstr.erase(0,4); } else { $$ = $3; $$->kind = z::MK_ARRAY; $$->id = idstr.erase(0,4); } } else if (idstr.size() >= 4 && string(idstr, 0, 4) == "fld_" && nargs == 1) { $$ = $3; $$->kind = z::RCD_ELEMENT; $$->id = idstr.erase(0,4); } else if (idstr.size() >= 4 && string(idstr, 0, 4) == "upf_" && nargs == 2) { $$ = $3; $$->kind = z::RCD_UPDATE; $$->id = idstr.erase(0,4); } else if (idstr == "succ") { $$ = $3; $$->kind = z::SUCC; } else if (idstr == "pred") { $$ = $3; $$->kind = z::PRED; } else if (idstr == "odd") { $$ = $3; $$->kind = z::ODD; } else if (idstr == "abs") { $$ = $3; $$->kind = z::ABS; } else if (idstr == "sqr") { $$ = $3; $$->kind = z::SQR; } else { $$ = $3; $$->kind = z::FUN_AP; $$->id = idstr; } delete $1; } ; aexpseq: aexp { $$ = new Node(z::SEQ,$1); } | aexpseq COMMA aexp { $$ = $1; $$->addChild($3); } ; aexp: exp {$$ = $1; } | indexset ASSIGN exp { if ($1->arity() == 1) { if ($1->child(0)->kind == z::ID) $$ = new Node(z::ASSIGN, $1->child(0)->id, $3); else $$ = new Node(z::ASSIGN, $1->child(0), $3); } else $$ = new Node(z::ASSIGN, $1, $3); } ; indexset: exp {$$ = new Node(z::INDEX_AND,$1); } | indexset AMPERSAND exp // Used only on lhs of array assignments { $$ = $1; $$->addChild($3); } ; exp_id: id { const string& idstr = $1->id; if (idstr == "pending") $$ = new Node(z::PENDING); else if (idstr == "true") $$ = new Node(z::TRUE); else if (idstr == "false") $$ = new Node(z::FALSE); else $$ = $1; } ; %% /* ========================================================================== Additional Code ========================================================================== */ void yy::parser::error (const yy::parser::location_type& l, const std::string& m) { driver.error (m); // Ignore location, since not maintained in lexer } spark-2012.0.deb/victor/vct/src/smtlib-driver.hh0000644000175000017500000000424211753202341020434 0ustar eugeneugen//========================================================================== //========================================================================== // SMTLIB-DRIVER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #ifndef SMTLIB_DRIVER_HH #define SMTLIB_DRIVER_HH #include using std::ifstream; using std::ofstream; #include "smt-driver.hh" // Interface for SMTLib format file-level solver interface. class SMTLibDriver : public SMTDriver { public: SMTLibDriver() {}; private: string solverInputFileName; string solverOutputFileName; string solverErrorFileName; int exitStatus; Node* benchmark; Node* formula; protected: virtual Node* translateUnit(Node* n); virtual void initGoal(const string& unitName, int goalNum, int ConclNum); virtual void addDecl(Node* n); virtual void addRule(Node* h, const string& hId, string& remarks); virtual void addHyp(Node* h, const string& hId, string& remarks); virtual void addConcl(Node* n, string& remarks); virtual void finishSetup(); virtual bool checkGoal(string& remarks); virtual Status getResults(string& remarks); virtual void finaliseGoal(); }; #endif // ! SMTLIB_DRIVER_HH spark-2012.0.deb/victor/vct/src/context.cc0000644000175000017500000007603611753202341017335 0ustar eugeneugen//======================================================================== //======================================================================== // CONTEXT.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include using std::map; using std::make_pair; using std::pair; #include #include using std::set; #include "context.hh" #include "utility.hh" #include "typesort.hh" //============================================================================ // Context functions //============================================================================ FDLContext::FDLContext(Node* FDL_AST) : typeSeq(SEQ, UNMANAGED), // Initialisers to make sure Nodes are outside pool termSeq(SEQ, UNMANAGED), bindings(SEQ, UNMANAGED) { for (int i = 0; i != FDL_AST->arity(); i++) { insert(FDL_AST->child(i)); } // Do topological sort on decls to ensure def before use. try { Node* sortResult = TypeSort::sort(&typeSeq); typeSeq = * (sortResult->child(0)); // Make sure new declarations find their way into the type map. for (int i = 0; i != sortResult->child(1)->arity(); i++) { Node* newDecl = sortResult->child(1)->child(i); typeMap.insert(make_pair(newDecl->id, newDecl)); } } catch (std::runtime_error e) { printMessage(ERRORm, string("Topological sort of type decls failed\n") + e.what() ); } } // NB: If ever modify this to merge new and existing bindings, should then // check the effect of the return val on callers. bool mapInsertWithCheck(const char* mapName, map& m, // Use & to modify arg, not copy const string& key, Node* val) { bool success = m.insert(make_pair(key, val)).second; if (!success) { Node* mapVal = m.find(key)->second; if (val->equals(mapVal)) { printMessage(INFOm, "Rejected repeated entry for " + key + " in map " + mapName); } else { printMessage(ERRORm, "New value for " + key + " in map " + mapName + + "conflicts with existing value." + ENDLs + "Existing value: " + ENDLs + mapVal->toString() + ENDLs + "New value: " + ENDLs + val->toString() + ENDLs ); } } return success; } void FDLContext::insert(Node* decl, bool atEnd) { switch (decl->kind) { case DEF_TYPE: { if (decl->arity() != 0) { Node* ty = decl->child(0); if (ty->kind == ENUM_TY || ty->kind == ARRAY_TY || ty->kind == RECORD_TY) { ty->id = decl->id; } } bool success = mapInsertWithCheck("typeMap", typeMap, decl->id, decl); if (!success) break; if (atEnd) { typeSeq.addChild(decl); } else { typeSeq.addLeftChild(decl); } if (decl->arity() == 0) break; Node* ty = decl->child(0); if (ty->kind == ENUM_TY) { Node* enumTy = ty; for ( int i = 0; i!= enumTy->arity(); i++) { Node* enumId = enumTy->child(i); // Ensure new type id constructed each time round Node* namedEnumTy = new Node(TYPE_ID, decl->id); Node* enumConstDecl = new Node (DEF_CONST, enumId->id, namedEnumTy); enumConstMap.insert(make_pair(enumId->id, enumConstDecl)); } } else if (ty->kind == RECORD_TY) { Node* recordTy = ty; Node* recordTyId = new Node(TYPE_ID, decl->id); for ( int i = 0; i!= recordTy->arity(); i++) { Node* fieldDecl = recordTy->child(i); // DECL{fname}(type) // If fname already in map, need to update val to UNKNOWN() // Try adding fname, recType pair to record field map pair < map::iterator, bool > res = recordFieldMap.insert(make_pair(fieldDecl->id,recordTyId)); if (!res.second) { // If already entry in map for fieldDecl->id, // update corresponding value to UNKNOWN() to flag // that type cannot be unambiguously figured out // from fieldname. (res.first)->second = Node::unknown; } } // End for } // End if break; } case DEF_CONST: { bool success = mapInsertWithCheck("constMap", constMap, decl->id, decl); if (success) { termSeq.addChild(decl); } break; } case DECL_VAR: { decl->kind = DEF_CONST; Node* declI = decl->copy(); // Initial value variant of var declI->id = decl->id + "___init"; Node* declL = decl->copy(); // loop start value variant of var declL->id = decl->id + "___loopinit"; bool success1 = mapInsertWithCheck("constMap", constMap, decl->id, decl); bool success2 = mapInsertWithCheck("constMap", constMap, declI->id, declI); bool success3 = mapInsertWithCheck("constMap", constMap, declL->id, declL); // varMap. insert(make_pair(decl->id, decl)); if (success1) termSeq.addChild(decl); if (success2) termSeq.addChild(declI); if (success3) termSeq.addChild(declL); break; } case DECL_FUN: { bool success = mapInsertWithCheck("funMap", funMap, decl->id, decl); if (success) termSeq.addChild(decl); break; } default: assert(false); } return; } Node* FDLContext::lookupType(const string& s) { map::iterator i = typeMap.find(s); if (i == typeMap.end()) return 0; else return i->second; } Node* FDLContext::lookupConst(const string& s) { map::iterator i = constMap.find(s); if (i == constMap.end()) return 0; else return i->second; } Node* FDLContext::lookupVar(const string& s) { map::iterator i = varMap.find(s); if (i == varMap.end()) return 0; else return i->second; } Node* FDLContext::lookupFun(const string& s) { map::iterator i = funMap.find(s); if (i == funMap.end()) return 0; else return i->second; } Node* FDLContext::lookupEnumConst(const string& s) { map::iterator i = enumConstMap.find(s); if (i == enumConstMap.end()) return 0; else return i->second; } Node* FDLContext::lookupRecordField(const string& s) { map::iterator i = recordFieldMap.find(s); if (i == recordFieldMap.end()) return Node::unknown; else return i->second; } // Normalise top level of type. // Returns PENDING when type declaration missing. // [Maybe push this error-case behaviour back into lookupType??] Node* FDLContext::normaliseType(Node* n) { if (n->kind == TYPE_ID || n->kind == TYPE_PARAM) { Node* typeDecl = lookupType(n->id); if (typeDecl == 0) { printMessage(ERRORm, "normaliseType: encountered undeclared type: " + n->id); return new Node(PENDING); } else if (typeDecl->arity() == 0) { if (n->kind == TYPE_ID) return n; else // n->kind == TYPE_PARAM return new Node(TYPE_ID, n->id); } else { return normaliseType(typeDecl->child(0)); } } return n; // If not TYPE_ID } // Put type into canonical (unique) form. Could be TYPE_ID if type is // named type for ENUM type, ARRAY or RECORD type. Node* FDLContext::canoniseType(Node* n) { if (n->kind == TYPE_ID || n->kind == TYPE_PARAM) { Node* typeDecl = lookupType(n->id); if (typeDecl == 0) { printMessage(ERRORm, "canoniseType: encountered undeclared type: " + n->id); return new Node(PENDING); } else if (typeDecl->arity() == 0 // typeDecl->arity() == 1 if we get to these tests || typeDecl->child(0)->kind == ARRAY_TY || typeDecl->child(0)->kind == RECORD_TY || typeDecl->child(0)->kind == ENUM_TY) { if (n->kind == TYPE_ID) return n; else // n->kind == TYPE_PARAM return new Node(TYPE_ID, n->id); } else { return canoniseType(typeDecl->child(0)); } } return n; // If not TYPE_ID } void FDLContext::pushBinding(Node* decl) {bindings.addChild(decl);} void FDLContext::popBinding() {bindings.popChild();} // Lookup starting from end. Node* FDLContext::lookupBinding(const string& s) { for (int i = bindings.arity(); i != 0; ) { i--; Node* decl = bindings.child(i); if (s == decl->id) return decl; } return 0; } void FDLContext::pushPathStep(z::Kind k, int i) { pathKinds.push_back(k); pathAddr.push_back(i); return; } void FDLContext::popPathStep() { pathAddr.pop_back(); pathKinds.pop_back(); return; } string FDLContext::getPathString() { string s; for (int i = 0; i != (int) pathAddr.size(); i++) { s += kindString(pathKinds.at(i)) + "." + intToString(pathAddr.at(i)) + "."; } return s; } // Id = const, enum const, var or bound var. Node* FDLContext::lookupId(const string& s) { Node* n; n = lookupBinding(s); if (n != 0) return n; n = lookupConst(s); if (n != 0) return n; n = lookupEnumConst(s); if (n != 0) return n; return lookupVar(s); } Node* FDLContext::lookupConstId(const string& s) { Node* n; n = lookupConst(s); if (n != 0) return n; n = lookupEnumConst(s); if (n != 0) return n; return lookupVar(s); } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // getType //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Returns Node(UNKNOWN) if fails. Never returns 0. Node* FDLContext::getType (Node* n) { switch(n->kind) { case TYPE_PARAM: case TYPE_ID: case INT_TY: case REAL_TY: case BOOL_TY: case BIT_TY: case BITVEC_TY: case SUBRANGE_TY: case ENUM_TY: case ARRAY_TY: case RECORD_TY: case FUN_TY: // Yices only case TUPLE_TY: return Node::type_univ; case UNIT: case DECLS: case RULES: case RULE: case GOALS: case GOAL: case HYPS: case CONCLS: case FORALL: case EXISTS: case IFF: case IMPLIES: case AND: case OR: case NOT: case TRUE: case FALSE: case EQ: case NE: case LT : case GT: case LE: case I_LT: case I_LE: case R_LT: case R_LE: case GE: case PRED_AP: case TO_PROP: return Node::bool_ty; case TO_BIT: case TERM_TRUE: case TERM_FALSE: case TERM_AND: case TERM_OR: case TERM_NOT: case TERM_EQ: case TERM_NE: case TERM_I_LT: case TERM_I_LE: return Node::bit_ty; case UMINUS: case PLUS: case MINUS: case TIMES: case EXP: return Node::unknown; case I_UMINUS: case I_SUCC: case I_PRED: case I_PLUS: case I_MINUS: case I_TIMES: case IDIV: case MOD: case IDIV_E: case MOD_E: case I_EXP: case I_SQR: case I_ABS: return Node::int_ty; case NATNUM: return new Node(SUBRANGE_TY, n->copy(), n->copy()); case TO_REAL: case R_UMINUS: case R_PLUS: case R_MINUS: case R_TIMES: case R_EXP: case R_SQR: case R_ABS: case RDIV: return Node::real_ty; case ID: { string idName = n->id; Node* decl = lookupId(idName); if (decl != 0) { // DECL{id} type // DEF_CONST {id} type [exp] // DECL_VAR {id} type // DEF_CONST {id} enumType return decl->child(0); } // Unbound upper-case ids are expected in some rules // Unbound lower-case ids are not expected. if (! ('A' <= idName[0] && idName[0] <= 'Z')) { printMessage(INFOm, "getType: encountered unexpected lower-case id: " + idName); } return Node::unknown; } case CONST: { string idName = n->id; Node* decl = lookupConstId(idName); if (decl != 0) { // DEF_CONST {id} type [exp] // DECL_VAR {id} type // DEF_CONST {id} enumType return decl->child(0); } else { printMessage(INFOm, "getType: encountered undeclared constant: " + idName); return Node::unknown; } } case VAR: { string idName = n->id; Node* decl = lookupBinding(idName); if (decl != 0) { // DECL{id} type return decl->child(0); } else { printMessage(INFOm, "getType: encountered unbound variable: " + idName); return Node::unknown; } } case TUPLE: { return Node::unknown; /* Node* type = new Node(TUPLE_TY); for (int i = 0; i != n->arity(); i++) { Node* childType = getType(n->child(i)); } */ } case FUN_AP: { string funName = n->id; Node* funDecl = lookupFun(funName); if (funDecl != 0) { // DECL_FUN {id} (SEQ type+) type return funDecl->child(1); } else { printMessage(INFOm, "getType: Encountered unexpected function: " + funName); return Node::unknown; } } case ARR_ELEMENT: { Node* arrTy; if (n->id.size() > 0) { arrTy = lookupType(n->id)->child(0); } else if (strictTyping) { arrTy = 0; } else { arrTy = normaliseType(getType(n->child(0))); } if (arrTy != 0) { if (arrTy->kind == ARRAY_TY) return arrTy->child(1); else { printMessage(INFOm, "getType: array has unexpected type " + kindString(arrTy->kind)); return Node::unknown; } } else { return Node::unknown; } } case ARR_UPDATE: { if (n->id.size() > 0) { return new Node(TYPE_ID, n->id); } else if (strictTyping) { return Node::unknown; } else { return getType (n->child(0)); } } case RCD_ELEMENT: { // RCD_ELEMENT{rcd-id} exp // RCD_ELEMENT{rcd-id} exp (TYPE_PARAM{type-id}) Node* rcdTy; if (n->arity() == 2) { rcdTy = lookupType(n->child(1)->id)->child(0); } else if (strictTyping) { rcdTy = 0; } else { rcdTy = normaliseType(getType(n->child(0))); } if (rcdTy == 0) return Node::unknown; if (rcdTy->kind != RECORD_TY) { printMessage(INFOm, "getType: record has unexpected type " + kindString(rcdTy->kind)); return Node::unknown; } string fieldName = n->id; for (int i = 0; i != rcdTy->arity(); i++ ) { Node* decl = rcdTy->child(i); if ( decl->id == fieldName) { return decl->child(0); } } printMessage(INFOm, "getType: attempt to access field " + fieldName + " of record " + (rcdTy->id)); return Node::unknown; } case RCD_UPDATE: { // RCD_UPDATE{rcd-id} exp val // RCD_UPDATE{rcd-id} exp val (TYPE_PARAM{type-id}) if (n->arity() == 3) { return new Node(TYPE_ID, n->child(2)->id); } else if (strictTyping) { return Node::unknown; } else { return getType (n->child(0)); } } case MK_ARRAY: case MK_RECORD: return new Node(TYPE_ID, n->id); case ITE: return n->child(3); default: return Node::unknown; } } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // getSubNodeTypes //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Infer types of subexpressions. // Always returns a SEQ node with arity same as argument node. If // inference of child type fails placeholder UNKNOWN Nodes is returned // as child's type. Node* FDLContext::getSubNodeTypes (Node* n) { Node* result = new Node(SEQ); switch(n->kind) { case FORALL: case EXISTS: { // FORALL (SEQ d1 ... nk) body // EXISTS (SEQ d1 ... nk) body // where di = DECL{id} type for (int i = 0; i != n->child(0)->arity(); i++) { result->addChild(Node::type_univ); } result->addChild(Node::bool_ty); return result; } case NOT: case TO_BIT: { result->addChild(Node::bool_ty); return result; } case IFF: case IMPLIES: { result->addChild(Node::bool_ty); result->addChild(Node::bool_ty); return result; } case UNIT: case RULES: case RULE: case GOALS: case GOAL: case HYPS: case CONCLS: case AND: case OR: { for (int i = 0; i != n->arity(); i++) { result->addChild(Node::bool_ty); } return result; } case TO_PROP: case TERM_AND: case TERM_OR: case TERM_NOT: { for (int i = 0; i != n->arity(); i++) { result->addChild(Node::bit_ty); } return result; } case TERM_EQ: case TERM_NE: case EQ: case NE: { if (n->arity() == 3) { result->addChild(n->child(2)); result->addChild(n->child(2)); result->addChild(Node::type_univ); return result; } else { result->addChild(Node::unknown); result->addChild(Node::unknown); return result; } } case TRUE: case FALSE: case NATNUM: case ID: return result; case LE: case LT: { result->addChild(Node::int_real_or_enum_ty); result->addChild(Node::int_real_or_enum_ty); return result; } case SUCC: case PRED: { result->addChild(Node::int_real_or_enum_ty); return result; } case UMINUS: { result->addChild(Node::int_or_real_ty); return result; } case PLUS: case MINUS: case TIMES: { result->addChild(Node::int_or_real_ty); result->addChild(Node::int_or_real_ty); return result; } case TERM_I_LT: case TERM_I_LE: case I_LT: case I_LE: case I_PLUS: case I_MINUS: case I_TIMES: case IDIV: case MOD: case IDIV_E: case MOD_E: case I_EXP: { result->addChild(Node::int_ty); result->addChild(Node::int_ty); return result; } case TO_REAL: case I_UMINUS: case I_SUCC: case I_PRED: case ODD: case I_ABS: case I_SQR: { result->addChild(Node::int_ty); return result; } case R_LT: case R_LE: case R_PLUS: case R_MINUS: case R_TIMES: case RDIV: { result->addChild(Node::real_ty); result->addChild(Node::real_ty); return result; } case R_EXP: { result->addChild(Node::real_ty); result->addChild(Node::int_ty); return result; } case R_ABS: case R_SQR: case R_UMINUS: { result->addChild(Node::real_ty); return result; } case EXP: { result->addChild(Node::int_or_real_ty); result->addChild(Node::int_ty); return result; } case PRED_AP: case FUN_AP: { if (n->arity() == 0) { // propositional variable return result; } string funName = n->id; Node* funDecl = lookupFun(funName); if (funDecl != 0) { // DECL_FUN {id} (SEQ type+) type return funDecl->child(0); } printMessage(INFOm, "getSubNodeTypes: Encountered unexpected function: " + funName ); break; } case ARR_ELEMENT: // ARR_ELEMENT{?} array (SEQ i1 ... ik) { Node* arrayTy = 0; if (n->id != "") { arrayTy = nameToType(n->id); } else if (strictTyping) break; else { arrayTy = getType(n->child(0)); } if (arrayTy->kind == UNKNOWN) break; // normalisedArrayTy = ARRAY_TY (SEQ T1 ...Tn) T Node* normalisedArrayTy = normaliseType(arrayTy); result->addChild(arrayTy); result->appendChildren(normalisedArrayTy->child(0)); return result; } case ARR_UPDATE: // ARR_UPDATE{?} array (SEQ i1 ...ik) value { Node* arrayTy = 0; if (n->id != "") { arrayTy = nameToType(n->id); } else if (strictTyping) break; else { arrayTy = getType(n->child(0)); } if (arrayTy->kind == UNKNOWN) break; // normalisedArrayTy = ARRAY_TY (SEQ T1 ...Tn) T Node* normalisedArrayTy = normaliseType(arrayTy); result->addChild(arrayTy); result->appendChildren(normalisedArrayTy->child(0)); result->addChild(normalisedArrayTy->child(1)); return result; } case ARR_BOX_UPDATE: // ARR_BOX_UPDATE{?} array (SEQ r1 ...rk) value // rj = SUBRANGE i1 i2 { Node* arrayTy = 0; if (n->id != "") { arrayTy = nameToType(n->id); } else if (strictTyping) break; else { arrayTy = getType(n->child(0)); } if (arrayTy->kind == UNKNOWN) break; // normalisedArrayTy = ARRAY_TY (SEQ T1 ...Tn) T Node* normalisedArrayTy = normaliseType(arrayTy); Node* indexTys = normalisedArrayTy->child(0); Node* valTy = normalisedArrayTy->child(1); result->addChild(arrayTy); for (int j = 0; j != indexTys->arity(); j++) { result->addChild(indexTys->child(j)); result->addChild(indexTys->child(j)); } result->addChild(valTy); return result; } case MK_ARRAY: { // MK_ARRAY{arrname} default a1 ... an, n >= 0 // MK_ARRAY{arrname} a1 ... an, n >= 1 // where ai = ASSIGN (SEQ i1 ... im) val, m >= 1 // i = e | SUBRANGE e1 e2 Node* typeDecl = lookupType(n->id); Node* arrTy = typeDecl->child(0); Node* indexTys = arrTy->child(0); Node* valTy = arrTy->child(1); for (int i = 0; i != n->arity(); i++) { Node* c = n->child(i); if (c->kind != ASSIGN) { result->addChild(valTy); continue; } Node* indexList = c->child(0); for (int j = 0; j != indexList->arity(); j++) { Node* indexTy = indexTys->child(j); if (indexList->child(j)->kind == SUBRANGE) { result->addChild(indexTy); result->addChild(indexTy); } else { result->addChild(indexTy); } } result->addChild(valTy); } return result; } case RCD_ELEMENT: // RCD_ELEMENT{field} record type? { Node* recordTy; if (n->arity() == 2) recordTy = n->child(1); else if (strictTyping) break; else recordTy = getType(n->child(0)); result->addChild(recordTy); if (n->arity() == 2) result->addChild(Node::type_univ); return result; } case RCD_UPDATE: // RCD_UPDATE{field} record value type? { Node* recordTy; if (n->arity() == 3) recordTy = n->child(2); else if (strictTyping) break; else recordTy = getType(n->child(0)); result->addChild(recordTy); Node* normRecordTy = normaliseType(recordTy); Node* fieldType = 0; string fieldName = n->id; if (normRecordTy->kind != RECORD_TY) { printMessage(INFOm, "getSubNodeTypes: record has unexpected type " + kindString(normRecordTy->kind)); fieldType = Node::unknown; } else { for (int i = 0; i != normRecordTy->arity(); i++ ) { Node* decl = normRecordTy->child(i); if ( decl->id == fieldName) { fieldType = decl->child(0); } } if (fieldType == 0) { printMessage(INFOm, "getSubnodeTypes: attempt to access field " + fieldName + " of record " + (normRecordTy->id)); fieldType = Node::unknown; } } result->addChild(fieldType); if (n->arity() == 3) result->addChild(Node::type_univ); return result; } case MK_RECORD: { // MK_RECORD{rcdname} a1 ... an, n >= 1 // where ai = ASSIGN{fldname} val string rcdName = n->id; Node* typeDecl = lookupType(rcdName); Node* rcdTy = typeDecl->child(0); for (int i = 0; i != n->arity(); i++) { Node* assign = n->child(i); string fldName = assign->id; // Search for corresponding DECL in type Node* fldTy = 0; for (int j = 0; j != rcdTy->arity(); j++) { if (rcdTy->child(j)->id == fldName) { fldTy = rcdTy->child(j)->child(0); break; } } if (fldTy == 0) { printMessage(INFOm, "getSubNodeTypes: can't find type of " + fldName + " field of " + rcdName + " record type"); fldTy = Node::unknown; } result->addChild(fldTy); } return result; } case ITE: { result->addChild(Node::bool_ty); result->addChild(n->child(3)); result->addChild(n->child(3)); result->addChild(Node::type_univ); return result; } default: break; } // Case for errors and unknowns int numSubNodes = n->getSubNodes().size(); for (int i = 0; i != numSubNodes; i++) { result->addChild(Node::unknown); } return result; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // extractDecls //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Form list of all declarations from contents of tables // Ensure that types are added in the stored order which respects // dependencies. // consts, vars and funs are ordered alphabetically. Node* FDLContext::extractDecls() { Node* decls = new Node(DECLS); for (int j = 0; j != typeSeq.arity(); j++) { decls->addChild(typeSeq.child(j)); } map::iterator i; for (i = constMap.begin(); i != constMap.end(); i++) decls->addChild(i->second); for (i = varMap.begin(); i != varMap.end(); i++) decls->addChild(i->second); for (i = funMap.begin(); i != funMap.end(); i++) decls->addChild(i->second); return decls; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Feature methods //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void FDLContext::addFeature(const string& s) { features.insert(s); return; } void FDLContext::removeFeature(const string& s) { features.erase(s); return; } bool FDLContext::hasFeature(const string& s) { return features.find(s) != features.end(); } spark-2012.0.deb/victor/vct/src/smt-driver.hh0000644000175000017500000001663711753202341017760 0ustar eugeneugen//========================================================================== //========================================================================== // SMT-DRIVER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== // Define generic interface for SMT solvers and a uniform procedure // for processing a unit using this interface. // Terminology: // Query: Either a `goal' (Verification Condition) comprising // Decls + rules + hyps of VC + concls of VC // or a `goal slice' // Decls + rules + hyps + 1 of concls of VC // // QuerySet: Set of related queries, all from the same Unit, so sharing // Decls and rules. // // Session: Collection of all solver interactions during one invocation of // Victor. // // Solver drivers inheriting from SMTDriver should only define those // virtual functions they need to override. #ifndef SMT_DRIVER_HH #define SMT_DRIVER_HH #include "node.hh" #include "utility.hh" // For UnitInfo decl // FILE: prefix for comment about use with offline file interface to solver // API: prefix for comment about use with online API interface class SMTDriver { public: enum Status { TRUE, UNPROVEN, ERROR, RESOURCE_LIMIT, UNCHECKED }; class QueryStatus { public: Status status; string remarks; string time; QueryStatus(Status s, const string& r, const string& t) : status(s), remarks(r), time(t) {}; }; class QueryRecord { public: int goalNum; // 1-based int conclNum; // 1-based. 0 if concls fused Status status; string time; string remarks; }; class ResultRecord { public: string unitKind; string origins; int goalNum; // Used for trivial goals. o/w copy of queryNum.goalNum int queryNum; // -1 for trivial goal ("*** true") }; vector queryRecords; vector resultRecords; void driveUnit(Node* unit, UnitInfo* unitInfo); // new alternative driver. void altDriveUnit(Node* unit, UnitInfo* unitInfo); // Auxiliary function used by altDriveUnit. // Overridden in RuleFilter child class. virtual vector driveQuerySet(UnitInfo* unitInfo, Node* unit, set excludedRules, int startQuery, int endQuery); // Virtual functions for solver-specific interfaces virtual ~SMTDriver() {}; virtual void initSession() {}; private: Timer goalTimer; void driveGoal(Node* decls, Node* rules, Node* hyps, Node* concl, UnitInfo* unitInfo, int goalNum, int currentConcl); protected: virtual Node* translateUnit(Node* n); virtual bool onlineInterface() {return false; }; // FILE: return false // API: return true virtual bool resourceLimitsForQuerySet() {return false; }; // Return true if resource limits can be set only for // processing of whole query set, not individual queries. // E.g. this is the case for FILE level interfaces where // shell-level ulimit command is used to set limits. virtual void initGoal(const string& unitName, int goalNum, int conclNum) {}; // Called at start of goal / goal slice // goalNum and conclNum misleading if // multiple queries. Need to fix. // OLD // Eventually replace initGoal with initQuerySet, since more accurate name virtual void initQuerySet(const string& unitName, int goalNum, int conclNum) { initGoal(unitName, goalNum, conclNum); return; } // add* methods should throw std::runtime_error if problems are // sufficiently severe that processing of current QuerySet needs // to be immedidately aborted. The thrown exception object's // string should include an explanation for output to log file. // virtual void addDecl(Node* n) {}; virtual void addHyp(Node* h, const string& hId, string& remarks) {}; virtual void addRule(Node* h, const string& hId, string& remarks) { addHyp(h, hId, remarks); }; virtual void addConcl(Node* n, string& remarks) {}; // Only called once virtual void push() {}; // FILE: add push command // API: do push virtual Status check(string& remarks); // FILE: add check command. Return dummy Status. // API: run check command virtual void pop() {}; // FILE: add pop command // API: o pop virtual void finishSetup() {}; // FILE only // Called after addConcl or on throw of // exception by any of add methods. // : Write QuerySet input file for solver // OLD virtual void outputQuerySet() {}; // FILE only virtual bool checkGoal(string& remarks); // Should return true if problems. Any exceptions // are uncaught Method must write log file with // explanation. // API: run solver // OLD virtual bool runQuerySet(string& remarks) {return true;}; // FILE: Run solver on query set // API: Unused virtual Status getResults(string& remarks); // OLD. virtual vector getRunResults(int numQueries) { return vector(); } // FILE: Read solver output files // API: Unused virtual void finaliseQuerySet() { finaliseGoal(); }; // Called at end of query, both in normal and // error cases. Replaces finaliseGoal() virtual void finaliseGoal() {}; // Called at end of goal / goal slice // both in normal and error cases. // OLD public: virtual void finaliseSession() {}; }; #endif // ! SMT_DRIVER_HH spark-2012.0.deb/victor/vct/src/bignum.hh0000644000175000017500000000522011753202341017127 0ustar eugeneugen//========================================================================== //========================================================================== // BIGNUM.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef BIGNUM_HH #define BIGNUM_HH #include "node.hh" #include // Put string decls into global namespace. // Already in std namespace because // included in node.hh. Apparently need this in // some cases #include //======================================================================== // MyInt //======================================================================== // Wrapper for GMP multiple precision integers. // class MyInt { private: mpz_class num; public: MyInt(Node* n); // Assume n satisfies isInt(). MyInt(const std::string& s); MyInt(const mpz_class& z) : num(z) {}; MyInt() : num(mpz_class(0)) {}; Node* toNode(); bool operator==(const MyInt& a) const; bool operator<(const MyInt& a) const; std::string toString() const; static bool isInt(Node* n); static MyInt uminus(const MyInt& a); static MyInt plus(const MyInt& a, const MyInt& b); static MyInt minus(const MyInt& a, const MyInt& b); static MyInt times(const MyInt& a, const MyInt& b); static MyInt div(const MyInt& a, const MyInt& b); static MyInt rem(const MyInt& a, const MyInt& b); static MyInt mod(const MyInt& a, const MyInt& b); static MyInt exp(const MyInt& a, const MyInt& b); }; //======================================================================== // MyInt-related utility functions //======================================================================== bool isIntNode(Node* n); void groundEval(Node* n); void constArithEval(Node* n); #endif // ! BIGNUM_HH spark-2012.0.deb/victor/vct/src/smtlib2-driver.hh0000644000175000017500000000547311753202341020525 0ustar eugeneugen//========================================================================== //========================================================================== // SMTLIB2-DRIVER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #ifndef SMTLIB2_DRIVER_HH #define SMTLIB2_DRIVER_HH #include using std::ifstream; using std::ofstream; #include "rule-filter.hh" // Interface for SMTLib format file-level solver interface. class SMTLib2Driver : public RuleFilter { // RuleFilter is a subclass // of SMTDriver public: SMTLib2Driver() {}; private: string solverInputFileName; string solverOutputFileName; string solverErrorFileName; int exitStatus; Node* script; Node* formula; bool analyseExitStatus(int exitStatus, string& remarks); protected: virtual Node* translateUnit(Node* n); virtual bool resourceLimitsForQuerySet(); virtual void initQuerySet(const string& unitName, int goalNum, int ConclNum); // For old smt driver virtual void initGoal(const string& unitName, int goalNum, int ConclNum); virtual void addDecl(Node* n); virtual void addRule(Node* h, const string& hId, string& remarks); virtual void addHyp(Node* h, const string& hId, string& remarks); virtual void addConcl(Node* n, string& remarks); virtual void push(); virtual Status check(string& remarks); virtual void pop(); virtual void finishSetup(); virtual void outputQuerySet(); // For old smt driver virtual bool checkGoal(string& remarks); virtual bool runQuerySet(string& remarks); virtual Status getResults(string& remarks); virtual vector getRunResults(int numQueries); virtual void finaliseQuerySet(); virtual void finaliseGoal(); }; #endif // ! SMTLIB2_DRIVER_HH spark-2012.0.deb/victor/vct/src/translation.hh0000644000175000017500000000342411753202341020210 0ustar eugeneugen//======================================================================== //======================================================================== // TRANSLATION.HH //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== // // New theory translations #ifndef TRANSLATION_HH #define TRANSLATION_HH #include "node.hh" #include "context.hh" void refineTypes(FDLContext* ctxt, Node* unit); void introBitType(FDLContext* ctxt, Node* unit); void abstractBit(FDLContext* ctxt, Node* unit); bool typeCheckUnit(const string& tcKind, FDLContext* c, Node* n); void abstractEnums(FDLContext* ctxt, Node* unit); void enumsToIntSubranges(FDLContext* ctxt, Node* unit); void axiomatiseEnums(FDLContext* ctxt, Node* unit); void abstractArraysRecords(FDLContext* ctxt, Node* unit); void stripQuantPats(Node* unit); void elimTypeAliases(FDLContext* ctxt, Node* unit); #endif // ! defined TRANSLATION_HH spark-2012.0.deb/victor/vct/src/test-bignum.cc0000644000175000017500000001043211753202341020073 0ustar eugeneugen/* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include using std::cout; using std::cerr; using std::endl; #include using std::string; #include #include #include "bignum.hh" #include "formatter.hh" // 2^30 = 1073741824 // 2^31 = 2147483648 // 2^32 = 4294967296 using namespace z; int main (int argc, char *argv[]) { Node* n0 = new Node(NATNUM, string("0")); Node* n1 = new Node(NATNUM, string("1")); Node* n2 = new Node(NATNUM, string("2")); Node* n3 = new Node(NATNUM, string("3")); Node* n4 = new Node(NATNUM, string("4")); Node* n6 = new Node(NATNUM, string("6")); Node* n8 = new Node(NATNUM, string("8")); Node* m1 = new Node(UMINUS, n1); Node* m2 = new Node(UMINUS, n2); Node* m3 = new Node(UMINUS, n3); Node* m6 = new Node(UMINUS, n6); Node* m8 = new Node(UMINUS, n8); MyInt i0(n0); MyInt i1(n1); MyInt i2(n2); MyInt i3(n3); MyInt i4(n4); MyInt i6(n6); MyInt i8(n8); MyInt j1(m1); MyInt j2(m2); MyInt j3(m3); MyInt j6(m6); MyInt j8(m8); assert(MyInt::uminus(i1) == j1); assert(MyInt::plus(i1,i2) == i3); assert(MyInt::plus(i3,j2) == i1); assert(MyInt::plus(j2,i3) == i1); assert(MyInt::minus(i1,i2) == j1); assert(MyInt::minus(i3,i1) == i2); assert(MyInt::times(i2,j3) == j6); assert(MyInt::div(i8,i3) == i2); assert(MyInt::div(i8,j6) == j1); assert(MyInt::rem(i8,i3) == i2); assert(MyInt::rem(i8,j3) == i2); assert(MyInt::rem(j8,i3) == j2); assert(MyInt::rem(j8,j3) == j2); assert(MyInt::mod(i8,i3) == i2); assert(MyInt::mod(i8,j3) == j1); assert(MyInt::mod(j8,i3) == i1); assert(MyInt::mod(j8,j3) == j2); assert(MyInt::mod(i6,i3) == i0); assert(MyInt::mod(i6,j3) == i0); assert(MyInt::mod(j6,i3) == i0); assert(MyInt::mod(j6,j3) == i0); assert(MyInt::exp(j2,j3) == i1); assert(MyInt::exp(j2,i3) == j8); assert(MyInt::exp(j2,j3) == i1); Node* na; Node* nb; Node* nc; Node* nd; na = new Node(PLUS, new Node(TIMES, m2, m3), n2); assert(! (* na == * n8) ); groundEval(na); assert(* na == * n8); assert(! (* na == * n6) ); Node* idW = new Node(ID, new string("w")); Node* idX = new Node(ID, new string("x")); Node* idY = new Node(ID, new string("y")); Node* idZ = new Node(ID, new string("z")); // [(x * 3) * 4 ] div 6 --> 2 * x na = new Node(IDIV, new Node(TIMES, new Node (TIMES, idX, n3), n4), n6 ); constArithEval(na); Formatter::setFormatter(VanillaFormatter::getFormatter()); // cout << "Node na = " << * na << endl; nb = new Node(TIMES, n2, idX); // cout << "Node nb = " << * nn << endl; assert(* na == * nb); // [(2 * x) * y ] * [z * (w * 3)] --> 6 * [(x * y) * (z * w)] nc= new Node (TIMES, new Node (TIMES, new Node (TIMES, n2, idX), idY), new Node (TIMES, idZ, new Node (TIMES, idW, n3) ) ); nd= new Node (TIMES, n6, new Node (TIMES, new Node (TIMES, idX, idY), new Node (TIMES, idZ, idW) ) ); assert ( ! (* nc == * nd)); constArithEval(nc); assert (* nc == * nd); cout << "All tests passed" << endl; return 0; } spark-2012.0.deb/victor/vct/src/box.cc0000644000175000017500000001356311753202341016435 0ustar eugeneugen//========================================================================== //========================================================================== // BOX.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include #include // for max function. #include "box.hh" using std::endl; using std::ostream; using std::string; /* Implementation of box class */ /* Design notes: * Disjoint union implemented using subclasses * Expect users to use explicit constructor methods and functions in main namespace. Considered defining constructors for Box(string) and Box(char*) and using main class Box for string boxes. However - implicit conversions using such constructors sometimes will use stack allocation which is wrong. Will almost invariably want lifetime of object to outspan the lexical scope of the point where the implicit conversion is inferred. - explicit ones verbose (need Box::new) - explicit ones have wrong return type for use with references (want ref types to allow use of overloaded * and + operators Memory management. - Expect all boxes to be allocated on heap. - Reference to each box kept. Boxes reclaimed en masse by explicit function call. */ // ======================================================================== // Box sub-classes // ======================================================================== class StrBox : public Box { private: string str; public: StrBox(const string& s) : str(s) { height = 1; width = s.size(); lastWidth = width; } virtual void indentPrint(ostream& os, int indent) { os << str; return; } }; Box& box(const string& s) { return * new StrBox(s); } Box& box(const char* cstr) { string s(cstr); return box(s); } class HBoxTAlign : public Box { public: Box* lbox; // Always height 1 Box* rbox; HBoxTAlign(Box* b1, Box* b2) : lbox(b1), rbox(b2) { height = rbox->getHeight(); width = lbox->getWidth() + rbox->getWidth(); lastWidth = lbox->getWidth() + rbox->getLastWidth(); } virtual void indentPrint(ostream& os, int indent) { lbox->indentPrint(os, indent); // Argument irrelevant rbox->indentPrint(os, indent + lbox->getWidth()); return; } }; class HBoxBAlign : public Box { public: Box* lbox; Box* rbox; // Always height 1 HBoxBAlign(Box* b1, Box* b2) : lbox(b1), rbox(b2) { height = lbox->getHeight(); lastWidth = lbox->getLastWidth() + rbox->getWidth(); width = std::max(lastWidth, lbox->getWidth()); } virtual void indentPrint(ostream& os, int indent) { lbox->indentPrint(os, indent); rbox->indentPrint(os, indent); return; } }; class VBox : public Box { public: Box* tbox; Box* bbox; VBox(Box* tb, Box* bb) : tbox(tb), bbox(bb) { height = tbox->getHeight() + bbox->getHeight(); width = std::max(tbox->getWidth(), bbox->getWidth()); lastWidth = bbox->getLastWidth(); } virtual void indentPrint(ostream& os, int indent) { tbox->indentPrint(os, indent); os << endl; for (int i = 0; i < indent; i++) os << " "; bbox->indentPrint(os, indent); return; } }; // ======================================================================== // Box functions and static field defs. // ======================================================================== vector Box::allocList; // Static field definition. int Box::allocCount = 0; Box::Box() { allocList.push_back(this); allocCount++;} Box::~Box () {} void Box::deleteAll() { for ( vector::iterator i = allocList.begin(); i != allocList.end(); i++) { delete (*i); } allocList.clear(); return; } // Ideally constructors of subboxes should not be in main namespace. // If made private, could probably use friend declaration to allow their // visibility in Box. Box* Box::hAppendT(Box* b) { assert(this->getHeight() == 1); return new HBoxTAlign(this,b); } Box* Box::hAppendB(Box* b) { assert(b->getHeight() == 1); return new HBoxBAlign(this,b); } Box* Box::hAppend(Box* b) { if (this->getHeight() == 1) return hAppendT(b); else return hAppendB(b); } Box* Box::vAppend(Box* b) {return new VBox(this,b);} // Overloaded operators for appending strings, cstrings and boxes. // const declarations here are *necessary*. Box& operator+ (Box& b1, Box& b2) {return *(b1.hAppend(&b2));} Box& operator+ (const string& s1, Box& b2) { Box& b1 = box(s1); return *(b1.hAppend(&b2)); } Box& operator+ (Box& b1, const string& s2) { Box& b2 = box(s2); return *(b1.hAppend(&b2)); } Box& operator+ (const char* cs1, Box& b2) { Box& b1 = box(cs1); return *(b1.hAppend(&b2)); } Box& operator+ (Box& b1, const char* cs2) { Box& b2 = box(cs2); return *(b1.hAppend(&b2)); } Box& operator/ (Box& b1, Box& b2) {return *(b1.vAppend(&b2));} std::ostream& operator<< (std::ostream& os, Box& b) { b.indentPrint(os, 0); return os; } spark-2012.0.deb/victor/vct/src/yices-driver.hh0000644000175000017500000000443211753202341020257 0ustar eugeneugen//========================================================================== //========================================================================== // YICES-DRIVER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //========================================================================== #ifndef YICES_DRIVER_HH #define YICES_DRIVER_HH extern "C" { #include "yicesl_c.h" } #include "smt-driver.hh" // Interface for Yices class YicesDriver : public SMTDriver { private: yicesl_context ctx; Status status; bool push(yicesl_context ctx, Node* n, string& input, string& output); string formatErrorString(const string& input, const string& output); bool ignoreErrorMessage(const string& s); public: YicesDriver() {}; virtual void initSession(); protected: virtual bool onlineInterface() {return true; }; virtual Node* translateUnit(Node* unit); virtual void initGoal(const string& unitName, int goalNum, int ConclNum); virtual void addDecl(Node* n); virtual void addHyp(Node* h, const string& hId, string& remarks); virtual void addConcl(Node* n, string& format); // virtual void finishSetup(); virtual bool checkGoal(string& format); virtual Status getResults(string& remarks); virtual void finaliseGoal(); // virtual void finaliseSession() }; #endif // ! YICES_DRIVER_HH spark-2012.0.deb/victor/vct/src/pdriver.hh0000644000175000017500000000430611753202341017325 0ustar eugeneugen//========================================================================== //========================================================================== // PDRIVER.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ // /* Driver for parser, updated for Bison 2.3. Needs updating for Bison 2.4... This file presents public interface of parser. Unlike calc++ example from bison distribution, the function prototype for yylex is moved to a separate file lexer.hh. Not only does this keep yylex private, but also prevents leak of YY_DECL macro definition used by lexer implementation into public space. */ #ifndef PDRIVER_HH #define PDRIVER_HH #include #include "node.hh" class pdriver { public: pdriver (); virtual ~pdriver (); Node* result; // Handling the lexer void scan_begin (); void scan_end (); bool trace_scanning; bool at_start; enum FileType {FDL, RULE, VCG}; FileType currentFileType; // Handling the parser. std::string file; bool trace_parsing; int parse (FileType ty, const std::string& f); int parseFDLFile(const std::string &f) {return parse(FDL,f); } int parseRuleFile(const std::string &f) {return parse(RULE,f); } int parseVCGFile(const std::string &f) {return parse(VCG,f); } // Error handling. void error (const std::string& m); bool errorFlag; }; #endif // ! PDRIVER_HH spark-2012.0.deb/victor/vct/src/pdriver.cc0000644000175000017500000000317011753202341017311 0ustar eugeneugen//========================================================================== //========================================================================== // PDRIVER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include "pdriver.hh" #include "parser.tab.hh" using namespace std; pdriver::pdriver () : trace_scanning (false), trace_parsing (false) { } pdriver::~pdriver () { } int pdriver::parse (FileType ty, const string &f) { currentFileType = ty; file = f; errorFlag = false; scan_begin (); if (errorFlag) return 1; yy::parser parser (*this); // Construct a new parser object parser.set_debug_level (trace_parsing); int pval = parser.parse (); scan_end (); return pval; } void pdriver::error (const std::string& m) { std::cerr << m << std::endl; errorFlag = true; } spark-2012.0.deb/victor/vct/src/utility.cc0000644000175000017500000012661611753202341017354 0ustar eugeneugen//========================================================================== //========================================================================== // UTILITY.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson, Altran Praxis, AdaCore Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include "utility.hh" // Moved to utility.hh // #include using std::endl; using std::cout; using std::cerr; #include using std::ostringstream; using std::istringstream; // Moved to utility.hh // #include using std::ofstream; #include using std::map; using std::make_pair; using std::pair; #include using std::setw; using std::setprecision; #include using std::fixed; #include // for find. #include using std::isdigit; #include // For atoi, getenv #include // for runtime_error extern "C" { #include // For mkdir #include // For access, unlink, gethostname, getpid #include // For opendir, readdir, closedir #include // For errno static var and error codes #include // For strerror - printing error codes #ifdef _WIN32 #include #else #include // For getrusage() resource name argument #include // For getrusage() #endif } //======================================================================== // String functions and constants //======================================================================== // Utility function for debugging. Normal C++ STL always inserts // conversion automatically from c strings to string type. string cStringToString(char* c) { return string(c); } bool isIntString(const string& s) { if (s.size() == 0) return false; int start = 0; if (s[0] == '+' || s[0] == '-') start = 1; for (int i = start; i != (int) s.size(); i++) { if (! isdigit(s[i])) return false; } return true; } string intToString(int i) { ostringstream oss; oss << i; return oss.str(); } string uLongToString(unsigned long i) { ostringstream oss; oss << i; return oss.str(); } // c_str returns (const char* const) value. // atoi expects a const char* argument. i.e. it promises not to change it. int stringToInt(const string& s) { const char* cs = s.c_str(); int i = atoi(cs); return i; } // Assume string is non-empty series of digits. unsigned long stringToULong(const string& s) { if (s.size() > 10 || (s.size() == 10 && s.compare("4294967296") >= 0)) { string msg = "stringToULong: number too large: "; msg.append(s); throw std::runtime_error(msg); } unsigned long i; istringstream iss(s); iss >> i; return i; } string boolToString(bool b) { if (b) return string("true"); else return string("false"); } // Is there platform sensitive way of doing this? Is endl platform sensitive? const string ENDLs ("\n"); bool hasSuffix(const string& s, const string& t) { string::size_type i = s.rfind(t, s.size()); return i != string::npos && (int) i == (int) s.size() - (int) t.size(); // (int) casts essential, since t might be 1 longer than s. // also need explicit comparison with npos, since otherwise npos gets // cast to -1. } bool hasPrefix(const string& s, const string& t) { string::size_type i = s.find(t, 0); return i != string::npos && i == 0; } bool hasSubstring(const string& s, const string& t) { string::size_type i = s.find(t, 0); return i != string::npos; } void appendCommaString(string& s, const string& t) { if (s.size() > 0) s.append(", "); s.append(t); return; } // if q == "/..." or p == "" then q else p + "/" + q string joinPaths(const string& p, const string& q) { if ((q.size() > 0 && q[0] == '/') || p.size() == 0) return q; else return p + "/" + q; } bool hasUpperCaseStart(const string& s) { return s.size() >= 0 && 'A' <= s[0] && s[0] <= 'Z'; } bool member(char c, const string& s) { string::size_type cpos = s.find(c); return cpos != string::npos; } // returns substring before first occurrence of c. // If no occurrence, returns whole string. string substringBefore(char c, const string& s) { string result(s); string::size_type cpos = s.find(c); if (cpos != string::npos) { // x ... x c y ... y // s[0] s[cpos] s[size-1] result.erase(cpos, s.size() - cpos); } return result; } // returns substring after first occurrence of c. // If no occurrence, returns empty string. string substringAfter(char c, const string& s) { string result(s); string::size_type cpos = s.find(c); if (cpos != string::npos) { // x ... x c y ... y // s[0] s[cpos] s[size-1] result.erase(0, cpos+1); return result; } return ""; } // Pull out words in string delimited by whitespace. bool isWhitespaceChar(const char& c) { return c == ' ' || c == '\t' || c == '\n' || c == '\r';; } vector tokeniseString(const string& s) { vector toks; string tok; bool inTok = false; for (int i = 0; i != (int) s.size(); i++) { if (inTok) { if (isWhitespaceChar(s[i])) { toks.push_back(tok); tok.clear(); inTok = false; } else { tok.push_back(s[i]); } } else { // !inTok if (isWhitespaceChar(s[i])) { continue; } else { tok.push_back(s[i]); inTok = true; } } // END if inTok } // END for i if (inTok) toks.push_back(tok); return toks; } // Strip front and back whitespace from string string stripWhitespaceEnds(const string& s) { string result(s); string::size_type start = result.find_first_not_of(" \n\t\r"); if (start != string::npos && start != 0) { result.erase(0, start); } string::size_type end = result.find_last_not_of(" \n\t\r"); if (end != string::npos && end + 1 != result.size()) { result.erase(end + 1, result.size() - (end + 1)); } return result; } // Split s into components delimited by string delimiter // If delimiter at start (end), returns empty string at start (end) // Length of result always num delimiters + 1 vector splitString(const string& s, const string& delimiter) { vector result; size_t left = 0; while (true) { size_t right = s.find_first_of(delimiter, left); result.push_back(s.substr(left, right - left)); if (right == string::npos) break; left = right + delimiter.size(); } return result; } // Join together strs, separating them with delimiter. string concatStrings (const vector& strs, const string& delimiter) { if (strs.size() == 0) return ""; string result = strs.at(0); for (int i = 1; i != (int) strs.size(); i++ ) { result += delimiter; result += strs.at(i); } return result; } // If s has oldSuffix, strip it and add on instead new prefix and suffix. string fixSuffix(const string& s, const string& oldSuffix, const string& newPrefix, const string& newSuffix) { string::size_type pos = s.rfind(oldSuffix, s.size()); if (pos == string::npos || pos + oldSuffix.size() != s.size()) return s; string result(s); result.erase(pos); // Take off old suffix return newPrefix + result + newSuffix; return s; } //======================================================================== // Command line option processing //======================================================================== map > commandLineOptions; vector commandLineOptionsList; vector plainCommandLineOptionsList; // Same as above, but for // showing the supplied // options in -plain mode. vector nonOptionArgs; /* processCommandArgs initialises the commandLineOptions map: Option Effect - "" added to end of list of map entries for -= | -=none | -=default | -=empty Option map entry for is erased -= added to end of list of map entries for If an entry list exists for a given key, it will always contain at least one entry. */ vector processCommandArgs (int argc, char *argv[]) { for (int i = 1; i != argc; i++) { string arg(argv[i]); if (arg[0] == '-') { string key(arg.replace(0,1,"")); commandLineOptionsList.push_back(key); string val; string::size_type eqpos = key.find('='); bool valSupplied = eqpos != string::npos; if (valSupplied) { val = key; // k ... k = v ... v // c[0] c[eqpos] c[size-1] key.erase(eqpos, key.size() - eqpos); val.erase(0, eqpos + 1); } // cout << "key: #" << key <<"# val: #" << val <<"#" << endl; if (valSupplied && (val == "" || val == "false" || val == "none" || val == "default")) { commandLineOptions.erase(key); continue; } pair >::iterator, bool> insResult = commandLineOptions.insert ( make_pair(key, vector(1,val)) ); // Check if the value is looking like a path; if so don't // show it in -plain mode. bool is_path = (val.find("/") != string::npos) || (val.find("\\") != string::npos); if (is_path) { plainCommandLineOptionsList.push_back(key + "=[SUPRESSED IN PLAIN MODE]"); } else { plainCommandLineOptionsList.push_back(arg); } if (! insResult.second) { // If already one or more entries for key in map, // insertion did not take place. // Add new value to vector of existing values. map >::iterator oldMapEntry = insResult.first; oldMapEntry->second.push_back(val); } } else { nonOptionArgs.push_back(arg); } } return nonOptionArgs; } // Return true just when option set and value is not // "false", "none" or "default". bool option(const string& s) { map >::iterator i = commandLineOptions.find(s); return i != commandLineOptions.end(); } string optionVal(const string& s) { map >::iterator i = commandLineOptions.find(s); if (i != commandLineOptions.end()) { if (i->second.size() == 0) { printMessage(ERRORm, "optionVal: Found empty val list for option " + s); return string(""); } return i ->second.back(); } else { printMessage(ERRORm, "optionVal: Erroneous lookup of value of option " + s); return string(""); } } vector optionVals(const string& s) { map >::iterator i = commandLineOptions.find(s); if (i != commandLineOptions.end()) { return i ->second; } else { return vector(); } } int intOptionVal(const string& s) { map >::iterator i = commandLineOptions.find(s); if (i != commandLineOptions.end()) { if (i->second.size() == 0) { printMessage(ERRORm, "intOptionVal: Found empty val list for option " + s); return 0; } return stringToInt(i ->second.back()); } else { printMessage(ERRORm, "intOptionVal: Erroneous lookup of value of option " + s); return 0; } } //======================================================================== // External executable and timeout //======================================================================== // On Windows ulimit is not usable, the way to support timeout on this // platform is to set the timeout-driver option to some executable that // will monitor the executable and kill it when necessary. The timeout is // the first parameter and is specificed with the shell-timeout parameter. string withTimeoutAndIO(const string &cmd, const string &inputFileName, const string &outputFileName, const string &errorFileName) { string new_cmd = cmd; string timeout_driver = "./timeout.sh"; if (option("timeout-driver")) { timeout_driver = optionVal("timeout-driver"); } if (option("shell-timeout")) { // Use shell-level timeout utility // This will accept integer or fixed point time in sec. new_cmd = timeout_driver + " " + optionVal("shell-timeout") + " " + new_cmd; } if (option("ulimit-timeout") || option("ulimit-memory")) { #ifdef _WIN32 cerr << endl << "Error ulimit not supported on Windows " << endl; exit(1); #else // Use bash built-in timeout facility // This accepts integer time in sec and integer memory limit in kbytes. // Need to run in a subshell so we can catch output to stderr on // termination string tmp = "( ulimit"; if (option("ulimit-timeout")) { tmp += " -t " + optionVal("ulimit-timeout"); } if (option("ulimit-memory")) { tmp += " -v " + optionVal("ulimit-memory"); } tmp += "; " + new_cmd + inputFileName + " )"; new_cmd = tmp; #endif } else { new_cmd += inputFileName ; } new_cmd += " 1> " + outputFileName + " 2> " + errorFileName; return new_cmd; } //======================================================================== // Handling information on each unit. //======================================================================== // token Selected Set entry Meaning // . , Select goal concl // ,0 Select goal all concls. // ::= * // ::= // ? // | // // = // // | . // | .fdl // | .rlu // | .rul // // Allocate storage for static fields of class. UnitInfo::Status UnitInfo::status = UnitInfo::BEFORE_RANGE; UnitInfo::UnitInfo(const string& unitInfoStr) : dirRLURulesEnd(0), unitRLURulesEnd(0) { vector toks(tokeniseString(unitInfoStr)); unitName = toks[0]; { if (option("prefix")) unitPathPrefix = optionVal("prefix"); // else unitPathPrefix == "" by default vector unitNameParts = splitString(unitName,"/"); unitFileName = unitNameParts.back(); unitNameParts.pop_back(); unitPath = concatStrings(unitNameParts, "/"); if (unitNameParts.size() > 0) { unitDirName = unitNameParts.back(); } else if (unitPathPrefix.size() > 0) { vector prefixParts = splitString(unitPathPrefix, "/"); unitDirName = prefixParts.back(); } // else unitDirName == "" by default } for (int i = 1; i != (int) toks.size(); i++ ) { string token(toks[i]); string goalConcl; vector splitToken = splitString(token, "?"); if (splitToken.size() > 1) { // tag has been specified // Process option if tag in active tags // Otherwise skip option string tag (splitToken.at(0)); vector activeTags; if (option("active-unit-tags")) { activeTags = splitString(optionVal("active-unit-tags"), ":"); } if (find(activeTags.begin(), activeTags.end(), tag) != activeTags.end()) { goalConcl = splitToken.at(1); } else { continue; } } else { // No tag specified for unit option goalConcl = token; } string goal; string concl; if (goalConcl.find(".",0) != string::npos) { goal = substringBefore('.', goalConcl); concl = substringAfter('.', goalConcl); } else { goal = goalConcl; concl = "0"; } if (isIntString(goal) && isIntString(concl)) selectedSet.insert(make_pair(stringToInt(goal), stringToInt(concl))); else if (concl == "fdl") declFiles.push_back(goalConcl); else if (concl == "rul" || concl == "rlu") ruleFiles.push_back(goalConcl); else printMessage(ERRORm, "Unrecognised unit listing info: " + goalConcl); } bool fromUnitGiven = option("from-unit"); bool atFromLimit = fromUnitGiven ? optionVal("from-unit") == unitName : false; bool toUnitGiven = option("to-unit"); bool atToLimit = toUnitGiven ? optionVal("to-unit") == unitName : false; // Logic here is to have unitIncluded true in range between from-unit // and to-unit inclusive, with range extended to end when respective // limit is missing. if (status == BEFORE_RANGE) { if (!fromUnitGiven || atFromLimit) { unitIncluded = true; if (toUnitGiven && atToLimit) { status = AFTER_RANGE; } else { status = IN_RANGE; } } else { unitIncluded = false; } } else if (status == IN_RANGE) { unitIncluded = true; if (toUnitGiven && atToLimit) status = AFTER_RANGE; } else { // status == AFTER_RANGE unitIncluded = false; } startGoal = (option("from-goal") && atFromLimit) ? intOptionVal("from-goal") : 1; } bool UnitInfo::include(int goal, int concl) { bool goalSliceSelected = selectedSet.find(make_pair(goal, concl)) != selectedSet.end() || selectedSet.find(make_pair(goal, 0)) != selectedSet.end(); if (option("include-selected-goals")) { return unitIncluded && goal >= startGoal && goalSliceSelected; } else if (option("exclude-selected-goals")) { return unitIncluded && goal >= startGoal && ! goalSliceSelected; } else { return unitIncluded && goal >= startGoal; } } bool UnitInfo::includeUnit() { return unitIncluded; } //======================================================================== // Goal origins formatting //======================================================================== string currentUnit; // [/] string currentUnitPath; // string currentUnitFile; // string currentUnitKind; // procedure | function | task_type string currentGoalNumStr; // string currentGoalOrigins; // Info about where in program goal comes from /* updateGoalInfo expects string of form: [function_|procedure_|task_type_]_ where ::= path(s) from to | checks of refinement integrity ::= start | from assertion of line 49 | from default assertion of line 49 ::= finish | run-time check associated with [\n][ ]+ statement of line 201 | assertion of line 49 | default assertion of line 49 | precondition check associated with [\n][ ]+ statement of line xx: ; Current version just captures minimum. Could also do abbreviations and line numbers. POGS .sum files use From::= start | | <> (empty) (in case of refinement) ; To::= rtc check @ | assert @ finish | assert @ | refinement | pre check @ 1514 ; POGS doesn't seem to distinguish between assertions and default assertions. */ // Called at start of processUnit in main.cc void initCurrentUnitInfo(UnitInfo* unitInfo) { currentUnit = unitInfo->getUnitName(); currentUnitPath = unitInfo->getUnitPath(); currentUnitFile = unitInfo->getUnitFileName(); currentGoalNumStr = ""; currentGoalOrigins = ","; // "," since two fields in report currentUnitKind = ""; currentConcl = 0; return; } void updateCurrentGoalInfo(const string& s) { extractGoalInfo(s, currentUnitKind, currentGoalNumStr, currentGoalOrigins); return; } void extractGoalInfo(const string& s, string& unitKind, string& goalNum, string& goalOrigins) { vector ss = tokeniseString(s); string currentGoalName = ss[0]; // __ vector goalNameParts = splitString(currentGoalName, "_"); // Could grab unit kind instead from VCG/SIV file header. Cleaner unitKind = goalNameParts.front(); if (unitKind == "task") unitKind = "task_type"; goalNum = goalNameParts.back(); string from; string to; if (ss[1] == "path(s)") { vector::iterator i = std::find(ss.begin(), ss.end(), "to"); if (i == ss.end()) { from = "*** path missing destination ***"; } else { i--; from = *i; i++;i++; if (*i == "finish") to = "assert @ finish"; else if (*i == "run-time") to = "rtc check @ " + ss.back(); else if (*i == "assertion" || *i == "default") to = "assert @ " + ss.back(); else if (*i == "precondition") to = "pre check @ " + ss.back(); else to = "*** unrecognised path destination ***"; } } else if (ss[1] == "checks" && ss[3] == "refinement") { to = "refinement"; } else { to = "*** Unrecognised goal description ***"; } goalOrigins = from + "," + to; return; } //======================================================================== // Report file management //======================================================================== ofstream csvStream; ofstream logStream; ofstream sumStream; void openReportFiles() { string reportName("report"); if (option("report")) reportName = optionVal("report"); if (option("report-dir")) { ensureDirPathExists(optionVal("report-dir")); reportName = optionVal("report-dir") + "/" + reportName; } string csvFile(reportName + ".vct"); string logFile(reportName + ".vlg"); string sumFile(reportName + ".vsm"); csvStream.open(csvFile.c_str()); if (csvStream.fail()) { cerr << endl << "Error on trying to open file " << csvFile << endl; exit(1); } logStream.open(logFile.c_str()); if (logStream.fail()) { cerr << endl << "Error on trying to open file " << logFile << endl; exit(1); } sumStream.open(sumFile.c_str()); if (sumStream.fail()) { cerr << endl << "Error on trying to open file " << sumFile << endl; exit(1); } // Write a prelude to log file identifying run. if (! option("plain")) { std::time_t rawtime; struct std::tm * timeinfo; std::time (&rawtime); timeinfo = std::localtime(&rawtime); char the_time[100]; std::strftime(the_time, sizeof (the_time), "%d-%b-%Y, %H:%M:%S", timeinfo); logStream << "Date: " << the_time << endl; } else { logStream << endl; } char hostname[100]; #ifdef _WIN32 DWORD len = 100; GetComputerName(hostname, &len); #else gethostname(hostname, 100); #endif if (! option("plain")) { logStream << "Host: " << hostname << endl; } logStream << "Non-option args: " << endl; for (int i = 0; i != (int) nonOptionArgs.size(); i++) { logStream << " " << nonOptionArgs.at(i) << endl; } logStream << "Option args: " << endl; if (option("plain")) { for (int i = 0; i != (int) plainCommandLineOptionsList.size(); i++) { logStream << " -" << plainCommandLineOptionsList.at(i) << " \\" << endl; } } else { for (int i = 0; i != (int) commandLineOptionsList.size(); i++) { logStream << " -" << commandLineOptionsList.at(i) << " \\" << endl; } } logStream << endl; return; } void closeReportFiles() { csvStream.close(); logStream.close(); sumStream.close(); return; } //======================================================================== // Message reporting //======================================================================== // See utility.hh for documentation. const int ERRORm = 6; const int WARNINGm = 5; const int INFOm = 4; const int FINEm = 3; const int FINERm = 2; const int FINESTm = 1; int messageThreshold; int currentConcl; string currentHypsKinds; string currentConclKinds; string goalSliceTime; Timer messageTimer; //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // printMessageWithHeader //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void printMessageWithHeader(const string& header, const string& message) { string s(header); // Message origins line s += ": " + messageTimer.toString() + "s "; s += "unit: " + currentUnit; if (currentGoalNumStr.size() > 0) { s += " goal: " + currentGoalNumStr; } if (currentConcl > 0 && !option("fuse-concls") ) { s += " concl: " + intToString(currentConcl); } if (header == "ERROR") cerr << endl << s << endl; s += ENDLs; // Message contents line + extra blank line separator after. s += message + ENDLs + ENDLs; logStream << s; return; } //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // printMessageAux //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ int printMessageAux1(int messageLevel, const string& message) { string header; if (messageLevel == FINESTm) header = "FINEST"; else if (messageLevel == FINERm) header = "FINER"; else if (messageLevel == FINEm) header = "FINE"; else if (messageLevel == INFOm) header = "INFO"; else if (messageLevel == WARNINGm) header = "WARNING"; else header = "ERROR"; printMessageAux2(messageLevel); printMessageWithHeader(header, message); return 0; } int numWarningMessages = 0; int numErrorMessages = 0; int printMessageAux2(int messageLevel) { if (messageLevel == WARNINGm) numWarningMessages++; if (messageLevel == ERRORm) numErrorMessages++; return 0; } //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // printMessageOnOptionAux //+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ int printMessageOnOptionAux(const string& opt, const string& message) { string header("OPTION(" + opt + ")"); printMessageWithHeader(header, message); return 0; } //======================================================================== // CSV formatting //======================================================================== // For CSV format, see http://en.wikipedia.org/wiki/Comma-separated_values //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // csvDigest //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Break up CSV string into its components // Basic. Should also: // - handle quoted " chars // - strip head and tail whitespace from each value. vector csvDigest(const string& s) { vector toks; string tok; bool inQuote = false; for (int i = 0; i != (int) s.size(); i++) { char c = s[i]; if (inQuote) { if (c == '"') { inQuote = false; } else { tok.push_back(c); } } else { // !inQuote if (c == '"') { inQuote = true; } else if (c == ',') { toks.push_back(stripWhitespaceEnds(tok)); tok.clear(); } else { tok.push_back(c); } } // END if inQuote } // END for i toks.push_back(stripWhitespaceEnds(tok)); return toks; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // csvConcat //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Concatenate together strings to form CSV record string csvConcat(const vector& ss) { string result; for (int i = 0; i != (int) ss.size(); i++) { if (ss[i].find(',') != string::npos) { result.push_back('"'); result.append(ss[i]); result.push_back('"'); } else { result.append(ss[i]); } if (i != (int) ss.size() - 1) { result.append(","); } } return result; } //======================================================================== // CSV file reporting //======================================================================== //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // printCSVRecord //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Print a comma separated value record. // Quote remarks field, since might have embedded ","s. void printCSVRecord(const string& status, const string& remarks) { printCSVRecordAux(currentUnitKind, currentGoalOrigins, currentGoalNumStr, currentConcl, status, goalSliceTime, remarks); return; } void printCSVRecordAux(const string& unitKind, const string& goalOrigins, const string& goalNumString, int conclNum, const string& status, const string& queryTime, const string& remarks) { if (! option("gstime")) goalSliceTime = ""; string kindString(option("csv-reports-include-unit-kind") ? unitKind : ""); string originsString(option("csv-reports-include-goal-origins") ? goalOrigins : ","); string conclString(conclNum > 0 && !option("fuse-concls") ? intToString(conclNum) : ""); string quotedRemarks(remarks.size() > 0 ? "\"" + remarks + "\"" : remarks); // Fields csvStream << currentUnitPath << "," // 1 << currentUnitFile << "," // 2 << kindString << "," // 3 << originsString << "," // 4, 5 << goalNumString << "," // 6 << conclString << "," // 7 << status << "," // 8 << queryTime << "," // 9 << quotedRemarks << "," // 10 << currentHypsKinds << "," // 11 << currentConclKinds // 12 << endl; return; } //======================================================================== // Reporting statistics //======================================================================== // Solver interface code responsible for updating globals. int trueConcls; int unprovenConcls; int errorConcls; int timeoutConcls; // Count also included in unproven Concls Timer totalTime; void printStats() { bool plain_mode = option("plain"); string reportName("report"); if (option("report")) reportName = optionVal("report"); ostringstream outStream; outStream << endl << endl << "Total ERROR messages: " << numErrorMessages << endl << "Total WARNING messages: " << numWarningMessages << endl << endl; int total = trueConcls + unprovenConcls + errorConcls; float ftotal = total; float fTrueConcls = trueConcls * 100 / ftotal; float fUnprovenConcls = unprovenConcls * 100 / ftotal; float fTimeoutConcls = timeoutConcls * 100 / ftotal; float fErrorConcls = errorConcls * 100 / ftotal; outStream << setprecision(1); // 1 decimal place for floats. outStream << fixed; outStream << "Summary Stats: " << endl; outStream << " true: " << setw(4) << trueConcls << " (" << setw(4) << fTrueConcls << "%)" << endl; outStream << "unproven: " << setw(4) << unprovenConcls << " (" << setw(4) << fUnprovenConcls << "%) "; // << endl; if (! plain_mode) { outStream << "[timeout: " << setw(4) << timeoutConcls << " (" << setw(4) << fTimeoutConcls << "%) ]" << endl; } else { outStream << endl; } outStream << " error: " << setw(4) << errorConcls << " (" << setw(4) << fErrorConcls << "%)" << endl; outStream << " total: " << setw(4) << total << endl << endl; if (! plain_mode ) { outStream << "Time: " << totalTime.toLongString() << endl; } logStream << outStream.str(); if (option("echo-final-stats")) { cout << outStream.str() << endl; // Add on root of report files. string reportName("report"); if (option("report")) reportName = optionVal("report"); if (option("report-dir")) { reportName = optionVal("report-dir") + "/" + reportName; } cout << "Report files: " << reportName << ".*" << endl; } sumStream << reportName << ","; sumStream << numErrorMessages << "," << numWarningMessages << ","; sumStream << setprecision(1); sumStream << fixed; sumStream << total << "," ; sumStream << trueConcls << "," ; sumStream << unprovenConcls << "," ; sumStream << timeoutConcls << "," ; sumStream << errorConcls << "," ; sumStream << setw(4) << fTrueConcls << "," ; sumStream << setw(4) << fUnprovenConcls << "," ; sumStream << setw(4) << fTimeoutConcls << "," ; sumStream << setw(4) << fErrorConcls << "," ; if (!option("plain")) { sumStream << totalTime.toString(); } sumStream << endl; } //======================================================================== // Timing //======================================================================== /* Previous versions ----------------- 1. Using clock (3). This returned a clock_t value (32 bit unsigned on 32-bit system, 64 bit unsigned on 64-bit system it seems from experiments. For time in sec had to divide by CLOCKS_PER_SEC = 1000000, with disadvantage that time would wrap in under an hour on 32 bit systems. clock() also doesn't give breakdown of time into user and system time, and doesn't give time spent in child processes. 2. Using times (2). Returns a struct tms { clock_t tms_utime; // user time clock_t tms_stime; // system time clock_t tms_cutime; // user time of children clock_t tms_cstime; // system time of children }; where must divide by sysconf(_SC_CLK_TCK) to get time in seconds. With SL5 and SL6, this is 100, giving a resolution of 0.01sec. This is rather coarse, given some SMT solver run times are of this order. Current version --------------- Uses getrusage (2). time (7) remarks this has a clock resolution of HZ which can be 0.01, 0.001 sec a couple of values inbetween, depending on kernel configuration. The SL6 appears to use 0.001, so better than times(). A local class Time is defined, similar to struct tms, except that it definitely has 64 bit values for the fields. struct rusage { struct timeval ru_utime; // user time used struct timeval ru_stime; // system time used ... } struct timeval { __time_t tv_sec; // Seconds. __suseconds_t tv_usec; // Microseconds. }; */ void Timer::getOSTimes (Timer::Time *tOS) { #ifdef _WIN32 FILETIME CreationTime; FILETIME ExitTime; FILETIME KernelTime; FILETIME UserTime; ULARGE_INTEGER conv; GetProcessTimes (GetCurrentProcess(), &CreationTime, &ExitTime, &KernelTime, &UserTime); conv.u.LowPart = UserTime.dwLowDateTime; conv.u.HighPart = UserTime.dwHighDateTime; tOS->uTime = (long long) conv.QuadPart; conv.u.LowPart = KernelTime.dwLowDateTime; conv.u.HighPart = KernelTime.dwHighDateTime; tOS->sTime = (long long) conv.QuadPart; // There is no notion of child process on windows, set this to 0 tOS->cuTime = 0ULL; tOS->csTime = 0ULL; #else // To use times() again. /* struct tms tms_time; times(&tms_time); tOS->sTime = tms_time->tms_stime; tOS->uTime = tms_time->tms_utime; tOS->csTime = tms_time->tms_cstime; tOS->cuTime = tms_time->tms_cutime; */ struct rusage selfRUsage; struct rusage childRUsage; getrusage(RUSAGE_SELF, &selfRUsage); getrusage(RUSAGE_CHILDREN, &childRUsage); timeval sT = selfRUsage.ru_stime; timeval uT = selfRUsage.ru_utime; timeval csT = childRUsage.ru_stime; timeval cuT = childRUsage.ru_utime; tOS->sTime = ((unsigned long long int) sT.tv_sec) * 1000000 + sT.tv_usec; tOS->uTime = ((unsigned long long int) uT.tv_sec) * 1000000 + uT.tv_usec; tOS->csTime = ((unsigned long long int) csT.tv_sec) * 1000000 + csT.tv_usec; tOS->cuTime = ((unsigned long long int) cuT.tv_sec) * 1000000 + cuT.tv_usec; #endif } Timer::Timer() { #ifdef _WIN32 // On Windows the granularity of a FILETIME is 100-nanosecond. ticksPerSec = 10000000; #else // If using times() // ticksPerSec = sysconf(_SC_CLK_TCK); // If using getrusage() ticksPerSec = 1000000; // This is the resolution of Time values, // *not* the timer resolution #endif getOSTimes(&startTimeTuple); } void Timer::restart() { getOSTimes(&startTimeTuple); } void Timer::grabTimes() { Time endTimeTuple; getOSTimes(&endTimeTuple); uTime = ((double) (endTimeTuple.uTime - startTimeTuple.uTime)) / ticksPerSec; sTime = ((double) (endTimeTuple.sTime - startTimeTuple.sTime)) / ticksPerSec; cuTime = ((double) (endTimeTuple.cuTime - startTimeTuple.cuTime)) / ticksPerSec; csTime = ((double) (endTimeTuple.csTime - startTimeTuple.csTime)) / ticksPerSec; } string Timer::toString() { grabTimes(); ostringstream oss; oss << setprecision(3) << fixed; oss << (uTime + sTime + cuTime + csTime); return oss.str(); } string Timer::toLongString() { grabTimes(); double totalTime = uTime + sTime + cuTime + csTime; double childTime = cuTime + csTime; double sysTime = sTime + csTime; double percentChild = childTime * 100 / totalTime; double percentSys = sysTime * 100 / totalTime; ostringstream oss; oss << setprecision(2) << fixed << totalTime << "s (u: " << uTime << "s, s: " << sTime << "s, cu: " << cuTime << "s, cs: " << csTime << "s, " << setprecision(1) << percentChild << "% ch, " << percentSys << "% sys)" << endl; return oss.str(); } //========================================================================== // File system access & manipulating names for working files //========================================================================== vector listDir (const string& dir) { DIR *dp; struct dirent *ep; vector contents; dp = opendir (dir.c_str()); if (dp == NULL) { cerr << "Error on trying to list directory " << dir << endl; return contents; } while ( (ep = readdir (dp)) ) { contents.push_back(ep->d_name); } (void) closedir (dp); return contents; } // Returns 0 if remove successful. // Returns -1 on failure and sets errNumber and errorMessage. int removeFile (const string& file, int& errNumber, string& errorMessage) { int failure = unlink(file.c_str()); if (failure != 0) { errorMessage = string( strerror(errno)); } return failure; } // Attempt to remove file. Silent if file doesn't exist. void tryRemoveFile (const string& file) { int errNumber; string errMessage; int failure = removeFile(file, errNumber, errMessage); if (failure != 0 && errNumber != ENOENT) { printMessage(ERRORm, "Attempt at removing file " + file + " failed with error: " + errMessage); } return; } bool readableFileExists(const string& s) { return (access(s.c_str(), R_OK) == 0); } void ensureDirPathExists(const string& s) { // Ensure dirPath != "" string dirPath(s.size() == 0 ? "." : s); vector parts = splitString(dirPath, "/"); bool absolute = parts.at(0).size() == 0; if (absolute) parts.erase(parts.begin()); string prefix = absolute ? "/" : ""; for (int i = 0; i < (int) parts.size(); i++) { prefix.append(parts.at(i)); if (access(prefix.c_str(), F_OK) != 0) { // prefix does not exists #ifdef _WIN32 int status = mkdir (prefix.c_str()); #else int status = mkdir (prefix.c_str(), S_IRWXU); #endif if (status != 0) { // throw std::runtime_error("Failed to make directory " + prefix); } } prefix.append("/"); } } // Return variation on input s containing no '/' characters. string flattenPathName(const string& s) { return concatStrings( splitString(s, "/"), "-"); } string getFullGoalFileRoot(const string& defaultFileRoot, const string& unitName, // = dir1/.../dirn/unitFileRoot int goalNum, int conclNum) { string goalRootDir = "/tmp"; if (option("working-dir")) goalRootDir = optionVal("working-dir"); if (goalRootDir.size() == 0) goalRootDir = "."; string goalName; if (option("hier-working-files") || option("flat-working-files") ) { goalName = unitName + "-" + intToString(goalNum); if (!option("fuse-concls")) { goalName += "-" + intToString(conclNum); } if (option("flat-working-files")) { goalName = flattenPathName(goalName); } } else if (option("unique-working-files")) { char hostname[100]; #ifdef _WIN32 DWORD len = 100; GetComputerName(hostname, &len); #else gethostname(hostname, 100); #endif goalName = defaultFileRoot + "-" + hostname + "-" + intToString(getpid()); } else { goalName = defaultFileRoot; } // Join working-dir to goalName string fullGoalName = goalRootDir + "/" + goalName; // Ensure existence of directories mentioned in fullGoalName size_t lastSlash = fullGoalName.find_last_of('/'); string goalDirPath = fullGoalName.substr(0, lastSlash); ensureDirPathExists(goalDirPath); return fullGoalName; } spark-2012.0.deb/victor/vct/src/utility.hh0000644000175000017500000003155511753202341017363 0ustar eugeneugen//========================================================================== //========================================================================== // UTILITY.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef UTILITY_HH #define UTILITY_HH #include using std::string; #include using std::vector; #include using std::set; #include using std::pair; #include #include #include #include #include #include //======================================================================== // String functions and constants //======================================================================== bool isIntString(const string& s); string intToString(int i); string uLongToString(unsigned long i); int stringToInt(const string& s); unsigned long stringToULong(const string& s); string boolToString(bool b); extern const string ENDLs; bool hasSuffix(const string& s, const string& t); // s has suffix t bool hasPrefix(const string& s, const string& t); // s has prefix t bool hasSubstring(const string& s, const string& t); // s has substring t void appendCommaString(string& s, const string& t); // s=="" ? t : "s, t" // joinPaths: if q == "/..." or p == "" then q else p + "/" + q string joinPaths(const string& p, const string& q); bool hasUpperCaseStart(const string& s); bool member(char c, const string& s); // c is char of s // returns substring before first occurrence of c. // If no occurrence, returns whole string. string substringBefore(char c, const string& s); string substringAfter(char c, const string& s); vector tokeniseString(const string& s); string stripWhitespaceEnds(const string& s); vector splitString(const string& s, const string& delimiter); string concatStrings (const vector& strs, const string& delimiter); string fixSuffix(const string& s, const string& oldSuffix, const string& newPrefix, const string& newSuffix); //======================================================================== // Set operations // ======================================================================== // Wrap the set operations from to make their invocation // more compact As needed, could make these more generic. template void setUnion(set& a, set& b, set& c) { set_union(a.begin(), a.end(), b.begin(), b.end(), inserter(c,c.begin())); } template void setSymDiff(set& a, set& b, set& c) { set_symmetric_difference(a.begin(), a.end(), b.begin(), b.end(), inserter(c,c.begin())); } template void setIsect(set& a, set& b, set& c) { set_intersection(a.begin(), a.end(), b.begin(), b.end(), inserter(c,c.begin())); } template bool setMember(T& x, set& a) { return a.find(x) != a.end(); } //======================================================================== // Command line option processing //======================================================================== // Expect options of form: // - // -= // Same key can be given multiple times. // Records command line options and returns non-option arguments. vector processCommandArgs (int argc, char *argv[]); bool option(const string& s); // Has option s been set? // returns false both when option never seen, and when last value is // one of "false", "default" or "none". string optionVal(const string& s);// Get last string value of option s vector optionVals(const string& s); // Get all values provided for s int intOptionVal(const string& s);// Get int value of option s //======================================================================== // External executable and timeout //======================================================================== string withTimeoutAndIO(const string &cmd, const string &inputFileName, const string &outputFileName, const string &errorFileName); // Returns new command with timeout support //======================================================================== // Handling information on each unit. //======================================================================== // class UnitInfo { private: enum Status {BEFORE_RANGE, IN_RANGE, AFTER_RANGE}; static Status status; // unitName = D1/.../Dk/U (as supplied on command line or .lis file) // program unit name U is w/o .fdl .rls .vcg suffix. // unitPathPrefix = string unitPathPrefix;// P1/.../Pn (as supplied with -prefix option) string unitName; // D1/.../Dk/U string unitPath; // D1/.../Dk ("" if k = 0) string unitDirName; // if k>0 then Dk else if n>0 then Pn else "" string unitFileName; // U set excludedRules; set > selectedSet; vector declFiles; vector ruleFiles; bool unitIncluded; int startGoal; public: int dirRLURulesEnd; int unitRLURulesEnd; UnitInfo(const string& s); string getUnitPathPrefix() {return unitPathPrefix;} string getUnitName() { return unitName;} string getUnitPath() {return unitPath;} string getUnitDirName() {return unitDirName;} string getUnitFileName() {return unitFileName;} string getFullUnitName() { // P1/.../Pn/D1/.../Dk/U return joinPaths(unitPathPrefix, unitName); } set getExcludedRules() {return excludedRules;} void addExcludedRule(int rNum) {excludedRules.insert(rNum);} vector getDeclFiles() {return declFiles; } vector getRuleFiles() {return ruleFiles; } bool includeUnit(); bool include(int goal, int concl); }; //======================================================================== // Goal origins formatting //======================================================================== void formatGoalOrigins(const string& s, string& goalNumber, string& origins); //======================================================================== // Report file management //======================================================================== extern std::ofstream logStream; void openReportFiles(); void closeReportFiles(); //======================================================================== // Message reporting //======================================================================== // Defines: // // printMessage(int messageLevel, const string& message) // // Levels are severity levels, the higher the level, the more severe the // message. // Only messages with priority level greater than or equal to set // threshold are output // Levels are: extern const int ERRORm; // Highest extern const int WARNINGm; extern const int INFOm; extern const int FINEm; extern const int FINERm; extern const int FINESTm; // Lowest // Guide to usage of levels: // 1 message per unit INFO // tracing phases of unit 4 messages / unit FINE // 1 brief message per goal/concl FINER // details of every transaction. FINEST // Message reporting relies on users initialising and maintaining the // following global variables: extern int messageThreshold; extern string currentGoalNumStr; extern int currentConcl; extern string currentHypsKinds; extern string currentConclKinds; extern string goalSliceTime; // Functions updating state of global variables used by message reporting void initCurrentUnitInfo(UnitInfo* unitInfo); void updateCurrentGoalInfo(const string& s); // Functional version of updateCurrentGoalInfo void extractGoalInfo(const string& s, string& unitKind, string& goalNum, string& goalOrigins); // Primitive used by printMessage, but also callable stand-alone. void printMessageWithHeader(const string& header, const string& message); // Macro provided to avoid expense of Node printing when not needed. // Use of `conditional' expression is a hack so that printMessage can be // used as expression in expression statement: // // printMessage(.., ..) ; // // Condition expressions don't allow void values of 2nd and 3rd args, so // use dummy int values. #define printMessage(messageLevel, message) \ ( (messageThreshold <= (messageLevel)) \ ? printMessageAux1((messageLevel), (message)) \ : printMessageAux2((messageLevel)) ) int printMessageAux1(int messageLevel, const string& message); int printMessageAux2(int messageLevel); // Variation where printing is enabled by user-supplied option. // By convention, use option names starting with P to make them stand // out and avoid accidental clashes. #define printOnOption(opt, message) \ (option(opt) \ ? printMessageOnOptionAux(opt, message) \ : 0 ) int printMessageOnOptionAux(const string& opt, const string& message); //======================================================================== // CSV formatting //======================================================================== vector csvDigest(const string& s); string csvConcat(const vector& ss); //======================================================================== // CSV file management //======================================================================== void printCSVRecordAux(const string& unitKind, const string& goalOrigins, const string& goalNumString, int conclNum, const string& status, const string& queryTime, const string& remarks); void printCSVRecord(const string& status, const string& remarks); //======================================================================== // Reporting statistics //======================================================================== // Main routine responsible for initialising globals. // Solver interface code responsible for updating globals. extern int trueConcls; extern int unprovenConcls; extern int errorConcls; extern int timeoutConcls; void printStats(); //======================================================================== // Timing //======================================================================== // Timer set running when created. class Timer { private: class Time { public: // user, sys, child user and child sys times // Times as multiples of (1/ticksPerSec) unsigned long long int uTime; unsigned long long int sTime; unsigned long long int cuTime; unsigned long long int csTime; }; Time startTimeTuple; void getOSTimes(Time *tOS); unsigned int ticksPerSec; void grabTimes(); // Times in sec since start time double uTime; double sTime; double cuTime; double csTime; public: Timer(); void restart(); string toString(); string toLongString(); }; extern Timer totalTime; //========================================================================== // Exception handling //========================================================================== // class VCTException { private: string message; public: VCTException() {} VCTException(string m) : message(m) {} string toString() {return message;} }; //========================================================================== // File system access & manipulating names for working files //========================================================================== vector listDir (const string& dir); void tryRemoveFile (const string& file); bool readableFileExists(const string& s); void ensureDirPathExists(const string& dirPath); string getFullGoalFileRoot(const string& defaultFileRoot, const string& unitName, int goalNum, int conclNum); //========================================================================== // Polymorphic map and reduce functions for vectors //========================================================================== #endif // ! defined UTILITY_HH spark-2012.0.deb/victor/vct/src/translation.cc0000644000175000017500000003107111753202341020175 0ustar eugeneugen//======================================================================== //======================================================================== // TRANSLATION.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== /* Relevant command-line options ============================= [] for legacy * for new one, not yet supported */ //-------------------------------------------------------------------------- // C++ headers //-------------------------------------------------------------------------- #include #include using std::map; using std::pair; //-------------------------------------------------------------------------- // My header files //-------------------------------------------------------------------------- #include "translation.hh" #include "utility.hh" //-------------------------------------------------------------------------- // Major translation components //-------------------------------------------------------------------------- #include "refine-types.cc" #include "qfol-to-fol.cc" #include "elim-bit-type.cc" #include "elim-enums.cc" #include "elim-arrays-records.cc" //============================================================================ //============================================================================ // Typechecking //============================================================================ //============================================================================ class nodeTypeChecker { public: string messages; void addMessage(const string& m); Node* operator() (FDLContext* c, Node* n); }; void nodeTypeChecker::addMessage(const string& m) { if (messages.size() > 0) messages += ENDLs; messages += m; return; } Node* nodeTypeChecker::operator() (FDLContext* c, Node* n) { // Don't attempt to check interior nodes if (n->kind == SEQ || n->kind == ASSIGN || n->kind == SUBRANGE || n->kind == DECL) { return n; } // Flag polymorphic nodes if (isPolymorphicNode(n)) { addMessage( "Found unexpected polymorphic node " + n->toShortString() + ENDLs + " at " + c->getPathString() + ENDLs ); return n; } // Flag unbound identifiers if (n->kind == ID) { addMessage( "Found unexpected " + n->toShortString() + ENDLs + " at " + c->getPathString() + ENDLs ); return n; } else if (n->kind == VAR && c->lookupBinding(n->id) == 0) { addMessage( "Found unbound " + n->toShortString() + ENDLs + " at " + c->getPathString() + ENDLs ); return n; } else if (n->kind == CONST && c->lookupVar(n->id) == 0 && c->lookupConst(n->id) == 0 && c->lookupEnumConst(n->id) == 0) { addMessage( "Found undeclared " + n->toShortString() + ENDLs + " at " + c->getPathString() + ENDLs ); return n; } vector subNodes = n->getSubNodes(); Node* subNodeTypes = c->getSubNodeTypes(n); if (subNodeTypes->arity() != (int) subNodes.size()) { addMessage( "For node " + n->toShortString() + ENDLs + " at " + c->getPathString() + ENDLs + "Have " + intToString((int) subNodes.size()) + " subnodes and " + intToString(subNodeTypes->arity()) + " subnode types" + ENDLs ); return n; // No point trying to continue typechecking of subnodes } for (int i = 0; i != (int) subNodes.size(); i++) { Node** subNode = subNodes.at(i); Node* actualType = c->normaliseType(c->getType(*subNode)); Node* expectedType = c->normaliseType(subNodeTypes->child(i)); printMessage(FINESTm, "Typechecking child " + intToString(i) + " of node " + n->toShortString() + ENDLs + "Expected type:" + expectedType->toShortString() + ENDLs + "Actual type:" + ENDLs + actualType->toShortString()); if (c->hasFeature("prim bit type has int interp")) { if (actualType->kind == BIT_TY) { actualType = actualType->copy()->updateKind(INT_TY); } if (expectedType->kind == BIT_TY) { expectedType = expectedType->copy()->updateKind(INT_TY); } } // Pickout good cases one by one. if (actualType->kind != UNKNOWN && actualType->equals(expectedType)) { continue; } if (actualType->kind == SUBRANGE_TY) { if (expectedType->kind == INT_TY) continue; if (expectedType->kind == SUBRANGE_TY && actualType->child(0)->kind == NATNUM && actualType->child(1)->kind == NATNUM && expectedType->child(0)->kind == NATNUM && expectedType->child(1)->kind == NATNUM ) { int actualL = stringToInt(actualType->child(0)->id); int actualH = stringToInt(actualType->child(1)->id); int expectedL = stringToInt(expectedType->child(0)->id); int expectedH = stringToInt(expectedType->child(1)->id); if (expectedL <= actualL && actualH <= expectedH) continue; } } printMessage(FINESTm, "type check failed of sub node " + (intToString(i)) + " of " + ENDLs + n->toString() + ENDLs + "Expected type:" + ENDLs + expectedType->toString() + ENDLs + "Actual type:" + ENDLs + actualType->toString() ); addMessage( "For node " + (*subNode)->toShortString() + ENDLs + " at " + c->getPathString() + kindString(n->kind) + "." + intToString(i) + "." + ENDLs + + "Expected type:" + expectedType->toShortString() + ENDLs + "Actual type:" + actualType->toShortString() + ENDLs ); /* addMessage( "type check failed of " + (*subNode)->toShortString() + ENDLs + "which is sub node " + (intToString(i)) + " of " + n->toShortString() + ENDLs + "Expected type:" + expectedType->toShortString() + ENDLs + "Actual type:" + actualType->toShortString() ); */ } // END for loop over children return n; } string typeCheck(FDLContext* c, Node* node, Node* expectedType) { nodeTypeChecker nTC; mapOverWithContext(nTC, c, node); Node* actualType = c->getType(node); if (! actualType->equals(expectedType)) { nTC.addMessage( "At top node " + node->toShortString() + ENDLs + "Expected type:" + expectedType->toShortString() + ENDLs + "Actual type:" + actualType->toShortString() + ENDLs ); } return nTC.messages; } // Return true if type check succeeds bool typeCheckFmla(const string& fmlaName, FDLContext* c, Node* node) { c->strictTyping = true; string messages = typeCheck(c,node,new Node(BOOL_TY)); if (messages.size() == 0) { return true; } else { printMessage (ERRORm, "Type check failed for " + fmlaName + ":" + ENDLs + node->toString() + ENDLs + ENDLs + messages + ENDLs ); return false; } } bool typeCheckUnit(const string& tcKind, FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); Node* goals = unit->child(2); bool typeCheckGood = true; for ( int ruleNum = 1; ruleNum <= rules->arity(); ruleNum++) { string ruleStr (tcKind + ", Rule " + intToString(ruleNum)); Node* rule = rules->child(ruleNum - 1); typeCheckGood = typeCheckGood & typeCheckFmla(ruleStr, ctxt, rule); } for (int goalNum = 1; goalNum <= goals->arity(); goalNum++) { Node* goal = goals->child(goalNum-1); if (goal->arity() < 2) { // "*** true" goals continue; } string goalStr (tcKind + ", Goal " + intToString(goalNum)); Node* hyps = goal->child(0); Node* concls = goal->child(1); for ( int hypNum = 1; hypNum <= hyps->arity(); hypNum++) { Node* hyp = hyps->child(hypNum-1); string hypStr (goalStr + ", Hyp " + intToString(hypNum)); typeCheckGood = typeCheckGood & typeCheckFmla(hypStr, ctxt, hyp); } for ( int conclNum = 1; conclNum <= concls->arity(); conclNum++) { Node* concl = concls->child(conclNum-1); string conclStr (goalStr + ", Concl " + intToString(conclNum)); typeCheckGood = typeCheckGood & typeCheckFmla(conclStr, ctxt, concl); } } // END for goalNum = ... return typeCheckGood; } //======================================================================== //======================================================================== // Miscellaneous //======================================================================== //======================================================================== //------------------------------------------------------------------------ // Strip quantifier patterns //------------------------------------------------------------------------ void stripQuantPat(Node* n) { if ((n->kind == FORALL || n->kind == EXISTS) && n->arity() == 3) { n->popChild(); } } void stripQuantPats(Node* unit) { unit->mapOver(stripQuantPat); } //------------------------------------------------------------------------ // Normalise all type expressions //------------------------------------------------------------------------ // Expand any type aliases. // Can ignore TYPE_PARAMs since they already are normalised. Node* expandTypeAlias(FDLContext* ctxt, Node* n) { if (n->kind != TYPE_ID) return n; Node* normType = ctxt->normaliseType(n); if (normType->kind == RECORD_TY || normType->kind == ENUM_TY || normType->kind == ARRAY_TY ) { return new Node(TYPE_ID, normType->id); } else { // Expect here normType->kind = BOOL_TY, INT_TY, REAL_TY or BIT_TY, // SUBRANGE_TY, TYPE_ID return normType->copy(); } } void elimTypeAliases(FDLContext* ctxt, Node* unit) { mapOverWithContext(expandTypeAlias, ctxt, &(ctxt->typeSeq)); mapOverWithContext(expandTypeAlias, ctxt, &(ctxt->termSeq)); mapOverWithContext(expandTypeAlias, ctxt, unit); // Turn all alias type definitions into abstract type declarations. // Eventually could do work of completely removing the unused // alias type declarations. for (int i = 0; i != ctxt->typeSeq.arity(); i++) { Node* typeDecl = ctxt->typeSeq.child(i); if (typeDecl->arity() == 1) { Node* declRHS = typeDecl->child(0); Kind k = declRHS->kind; if (! (k == RECORD_TY || k == ENUM_TY || k == ARRAY_TY) ) { typeDecl->popChild(); } } } return; } spark-2012.0.deb/victor/vct/src/node.hh0000644000175000017500000003241011753202341016574 0ustar eugeneugen//========================================================================== //========================================================================== // NODE.HH //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #ifndef NODE_HH #define NODE_HH #include /* C++ strings */ #include /* C++ dynamic arrays */ #include #include /* Definitions of node kinds for use in AST */ /* These are put in a separate namespace to avoid clashes with tokens of the same name */ namespace z { enum Kind { // Top level FDL_FILE, DEF_TYPE, DECL_TYPE, // Isabelle, SMTLIB2 DEF_RECORD, // Isabelle OUTER_DECL, // Isabelle DEF_CONST, DECL_CONST, // Isabelle, SMTLIB2 DECL_VAR, DECL_FUN, RULE_FILE, RULE_FAMILY, REQUIRES, MAY_BE_REPLACED_BY, MAY_BE_DEDUCED, ARE_INTERCHANGEABLE, VCG_FILE, GOAL, ASSERT, // Yices & SMTLib2 CHECK, PUSH, POP, SCRIPT, DECLS, // Introduced by processing RULES, RULE, GOALS, HYPS, ASSUMPTION, // Isabelle, SMTLib CONCLS, CONCL, // Isabelle UNIT, THEORY, // Isabelle // Types INT_TY, REAL_TY, BOOL_TY, BIT_TY, BITVEC_TY, SUBRANGE_TY, ENUM_TY, ARRAY_TY, RECORD_TY, FUN_TY, // Yices, HOLs FUN_ARG_TY, // Isabelle/HOL TUPLE_TY, UNKNOWN, TYPE_UNIV, INT_OR_REAL_TY, // Int or Real INT_REAL_OR_ENUM_TY, // Int or Real or Enum NO_TY, // Set of no types // Expressions FORALL, EXISTS, PAT, IMPLIES, IFF, AND, OR, NOT, EQ, NE, LT, GT, LE, I_LT, I_LE, R_LT, R_LE, GE, TO_REAL, UMINUS, SUCC, PRED, PLUS, MINUS, TIMES, LIN_TIMES, // Linear times: k * e NL_TIMES, // Nonlinear times I_UMINUS, I_SUCC, I_PRED, I_PLUS, I_MINUS, I_TIMES, I_LIN_TIMES, // Linear times: k * e I_NL_TIMES, // Nonlinear times R_UMINUS, R_PLUS, R_MINUS, R_TIMES, R_LIN_TIMES, // Linear times: k * e R_NL_TIMES, // Nonlinear times RDIV, IDIV, IDIV_E, // Euclidian IDIV IDIVM, // IDIV compatible with MOD MOD, MOD_E, // Euclidian MOD EXP, ABS, I_ABS, R_ABS, SQR, I_SQR, R_SQR, I_EXP, R_EXP, I_EXP_N, // Isabelle/HOL R_EXP_N, // Isabelle/HOL I_TO_NAT, // Isabelle/HOL ODD, TUPLE, ARR_ELEMENT, ARR_UPDATE, ARR_BOX_UPDATE, MK_ARRAY, RCD_ELEMENT, RCD_UPDATE, MK_RECORD, ASSIGN, INDEX_AND, FUN_AP, SUBRANGE, ID, TYPE_ID, TYPE_PARAM, NATNUM, REALNUM, TRUE, FALSE, TERM_TRUE, TERM_FALSE, TERM_AND, TERM_OR, TERM_IFF, TERM_NOT, TERM_EQ, TERM_NE, TERM_I_LT, TERM_I_LE, TO_PROP, // Bit to prop TO_BIT, // prop to bit APPLY, // Yices, HOLs LAMBDA, MK_TUPLE, UPDATE, // for arrays or functions SELECT, // Builders, misc SEQ, DECL, PENDING, // SMTLIB specific BENCHMARK, LOGIC, EXTRASORTS, EXTRAFUNS, EXTRAPREDS, FORMULA, STATUS, DECL_PRED, TCONST, PRED_AP, DISTINCT, ITE, CONST, VAR, // SMTLIB2 specific SET_OPTION, TO_INT, IS_INT, SET_INFO, INFO_STR }; std::string kindString(z::Kind k); } // End namespace z class Node; typedef std::vector< Node* > Nodes; // By default pointers to Node objects are stored in a pool, and all // current objects in the pool can be deleted at once by calling the // deletePool() method. // When node objects are statically or heap allocated, or // are intended to outlast pool deletions, they should be allocated outside // the pool by setting the optional storage argument to constructors to // UNMANAGED // (Was using bool type, but got nasty type bugs because Node* args were // getting coerced to bool. Now using enum type.) enum Storage { MANAGED, UNMANAGED }; class Node { // Memory management fields and methods private: static std::vector pool; static int poolAllocCount; void addToPool() { pool.push_back(this); poolAllocCount++; return; } public: static void deletePool(); static int getPoolAllocCount() {return poolAllocCount;} // Instance fields z::Kind kind; std::string id; Nodes children; // Constructors explicit Node(z::Kind k, Storage st = MANAGED) ; Node(z::Kind k, Node* n1, Storage st = MANAGED) ; Node(z::Kind k, Node* n1, Node* n2, Storage st = MANAGED) ; Node(z::Kind k, Nodes& ns, Storage st = MANAGED) ; // Constructors with char* needed to avoid compiler wrongly // inserting conversions. Node(z::Kind k, const std::string& s, Storage st = MANAGED) ; Node(z::Kind k, const char* s, Storage st = MANAGED) ; Node(z::Kind k, const std::string& s, Node* n1, Storage st = MANAGED) ; Node(z::Kind k, const char* s, Node* n1, Storage st = MANAGED) ; Node(z::Kind k, const std::string& s, Node* n1, Node* n2, Storage st = MANAGED) ; Node(z::Kind k, const char* s, Node* n1, Node* n2, Storage st = MANAGED) ; Node(z::Kind k, const std::string& s, Node* n1, Node* n2, Node* n3, Storage st = MANAGED) ; Node(z::Kind k, const char* s, Node* n1, Node* n2, Node* n3, Storage st = MANAGED) ; Node(z::Kind k, const std::string& s, Node* n1, Node* n2, Node* n3, Node* n4, Storage st = MANAGED) ; Node(z::Kind k, const char* s, Node* n1, Node* n2, Node* n3, Node* n4, Storage st = MANAGED) ; Node(z::Kind k, const std::string& s, Nodes& ns, Storage st = MANAGED) ; Node(z::Kind k, const char* s, Nodes& ns, Storage st = MANAGED) ; Node*& child(int i) { return children.at(i); } Node*& lastChild() { return children.at(children.size() - 1); } // This definition needed for use in correctly typed def of ==. Node* const & child(int i) const { return children.at(i); } // Add n as new (rightmost) child void addChild(Node* n) { children.push_back(n); } void addLeftChild(Node* n) { children.insert(children.begin(), n); } void popChild() { children.pop_back(); } void popLeftChild() { children.erase(children.begin()); } void clearChildren() { children.clear(); } // Add all children of n onto right of this's children. void appendChildren(Node* n) { children.insert(children.end(), n->children.begin(), n->children.end()); } int arity() const {return children.size(); } Node* updateKind(z::Kind k) { kind = k; return this; } Node* updateId(const std::string& s) { id = s; return this; } Node* updateKindAndId(z::Kind k, const std::string& s) { kind = k; id = s; return this; } bool operator==(const Node& n) const; bool equals(Node* n) { return * this == * n;} std::string toString(); std::string toShortString(); std::set getIds(z::Kind k); // Deep copy of node tree. Node* copy(); // Map f over Node tree in single bottom up pass. // f can be pointer to function or unary function object. template void mapOver(UnaryFun& f); template Node* mapOver1(UnaryFun& f); // Compute Or of result of f applied to each node of Node tree. // template bool orOver(UnaryBoolFun& f); template bool orOver(UnaryBoolFun f); // Get `logical' children. Usually are the same as children, // except for operators built using more than one node. std::vector getSubNodes(); // Node* expandSubranges() { if (kind == z::SUBRANGE_TY) return Node::int_ty; else return this; } // Some common atomic nodes. static Node* int_ty; static Node* bool_ty; static Node* bit_ty; static Node* real_ty; static Node* int_or_real_ty; static Node* int_real_or_enum_ty; static Node* type_univ; static Node* unknown; static Node* no_ty; }; // g++ doesn't support the "export" keyword which allows // this definition to be pushed down into node.cc. template void Node::mapOver(UnaryFun& f) { for (int i = 0; i != arity(); i++) { child(i)->mapOver(f); } f(this); } // Variation on mapOver, if want to destructively modify // node tree structure. template Node* Node::mapOver1(UnaryFun& f) { for (int i = 0; i != arity(); i++) { child(i) = child(i)->mapOver1(f); } return f(this); } // A function/function object adaptor. // Permits application of bool-valued operator a number of times. // reports if exist application that returns true. // UnaryBoolFun expected to be either // 1. Type of functions T->bool // 2. A class which overloads operator() with function returning bool on arg // of type T. // Version with references is not compiling properly. Not sure why. template class IteratedOr { private: bool result; // UnaryBoolFun& fun; UnaryBoolFun fun; public: // IteratedOr(UnaryBoolFun& f) { IteratedOr(UnaryBoolFun f) { result = false; fun = f; } void operator() (T t) { if ( fun(t) ) result = true; return; } bool value() {return result;} }; template bool Node::orOver(UnaryBoolFun f) { IteratedOr itOr(f); mapOver(itOr); return itOr.value(); // return f(this); } // Used in both yices.cc and cvc.cc. But perhaps here is too generic a place. //======================================================================== // Node tree analysis. //======================================================================== bool isDivOrMod(Node* n); bool isAtomicProp(Node* n); bool isCompoundProp(Node* n); bool isProp(Node* n); // Is a propositional node. Counts top level // constructors UNIT, RULES and GOALS as propositional // (effectively ANDs) std::string gatherKinds(Node* n); std::set gatherBoundVars(Node* n); // Expects hyps to be AND(h1 ... hk) where k >= 0. // If k = 0 returns concl // If k = 1 return h1 => concl // If k > 1 return AND(h1 ... hk) => concl Node* mkGeneralImplies(Node* hyps, Node* concl); Node* nameToType(const std::string& s); std::string typeToName(Node* n); bool isPolymorphicNode(Node* n); //======================================================================== // Node constructor Macros //======================================================================== #define nDECL(id,T) new Node(DECL,(id),(T)) #define nSEQ1(x) new Node(SEQ,(x)) #define nSEQ2(x,y) new Node(SEQ,(x),(y)) #define nSEQ3(x,y,z) new Node(SEQ,"",(x),(y),(z)) #define nFUNAP1(f,a1) new Node(FUN_AP,(f),(a1)) #define nFUNAP2(f,a1,a2) new Node(FUN_AP,(f),(a1),(a2)) #define nFUNAP3(f,a1,a2,a3) new Node(FUN_AP,(f),(a1),(a2)) #define nNATNUM(n) new Node(NATNUM,(n)) #define nCONST(c) new Node(CONST,(c)) #define nVAR(v) new Node(VAR,(v)) #define nINT_TY new Node(INT_TY) #define nTYPE_ID(id) new Node(TYPE_ID,(id)) #define nFORALL(ds,p) new Node(FORALL, (ds),(p)) #define nFORALL1(v,T,p) nFORALL(nSEQ1(nDECL((v),(T))), (p)) #define nFORALL2(v1,T1,v2,T2,p) \ nFORALL( nSEQ2( nDECL((v1),(T1)), nDECL((v2),(T2))), (p)) #define nIMPLIES(p,q) new Node(IMPLIES,(p),(q)) #define nNOT(p) new Node(NOT,(p)) #define nAND(p,q) new Node(AND,(p),(q)) #define nIFF(p,q) new Node(IFF,(p),(q)) #define nTRUE new Node(TRUE); #define nFALSE new Node(FALSE); #define nI_PLUS(i,j) new Node(I_PLUS,(i),(j)) #define nI_MINUS(i,j) new Node(I_MINUS,(i),(j)) #define nIDIV(i,j) new Node(IDIV,(i),(j)) #define nIDIV_E(i,j) new Node(IDIV_E,(i),(j)) #define nMOD(i,j) new Node(MOD,(i),(j)) #define nMOD_E(i,j) new Node(MOD_E,(i),(j)) #define nI_LE(i,j) new Node(I_LE,(i),(j)) #define nI_LT(i,j) new Node(I_LT,(i),(j)) #define nI_GE(i,j) new Node(I_LE,(j),(i)) #define nI_GT(i,j) new Node(I_LT,(j),(i)) #define nEQ(a,b,T) new Node(EQ,"",(a),(b),(T)) #define nI_EQ(a,b) new Node(EQ,"",(a),(b),nINT_TY) #define nNE(a,b,T) nNOT(nEQ((a),(b),(T))) #endif // ! NODE_HH spark-2012.0.deb/victor/vct/src/formatter.cc0000644000175000017500000002364211753202341017647 0ustar eugeneugen//========================================================================== //========================================================================== // FORMATTER.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include #include "formatter.hh" #include "pprinter.hh" #include using std::ostringstream; Box& Formatter::addParens(z::Kind kind, z::Kind parentKind, int childNum, int parentArity, Box& b) { // Decide whether to parenthesise // Suppress parentheses if either node or parent is not an operator if (subParensMap.count(parentKind) == 0 || subParensMap.count(kind) == 0) return b; string& s = subParensMap.find(parentKind)->second; // Decide format character, as discussed in formatter.hh char formatChar; if (s == "EL" && parentArity > 2) formatChar = (childNum == 0 ) ? 'E' : 'L'; else if (s == "LE" && parentArity > 2) formatChar = (childNum == parentArity - 1) ? 'E' : 'L'; else if (childNum >= (int) s.size()) formatChar = s.at(s.size() - 1); else formatChar = s.at(childNum); int prec = precMap.find(kind)->second; int parentPrec = precMap.find(parentKind)->second; if ( formatChar == '*' || (formatChar == 'E' && parentPrec <= prec) || (formatChar == 'L' && parentPrec < prec) ) { return b; // return if parentheses around operator should be suppressed } // Do parenthesisation return PP::makeHVSingleton("", "(", "", ")",b); }; // STATIC members // See below Vanilla Formatter for initialisation of formatter field. // Formatter* //Formatter::formatter = VanillaFormatter::getFormatter(); // Works??? Box& Formatter::format(const Node& node) { vector subBoxes; for (int i = 0; i != node.arity(); i++) { Node* subNode = node.child(i); Box& wrappedSubBox = formatter->addParens(subNode->kind, node.kind, i, node.arity(), format(*subNode)); subBoxes.push_back(&wrappedSubBox); } return formatter->addSyntax(node.kind, node.id, subBoxes); } void Formatter::addOpInfo(z::Kind k, int prec, const std::string& subParens) { precMap.insert(std::make_pair(k,prec)); subParensMap.insert(std::make_pair(k,subParens)); } Box& Formatter::makeStringAp(const string& id, std::vector& bs) { return PP::makeHVSeq("(" + id, "", "", ")", bs); } //========================================================================== // VANILLA FORMATTER //========================================================================== Box& VanillaFormatter::addSyntax(z::Kind k, const std::string& id, std::vector& bs) { string header = z::kindString(k) + (id != "" ? "{" + id + "}" : ""); if (bs.size() == 0) return box(header); else return PP::makeHVSeq(header, "[", ";", "]", bs ); } Formatter* VanillaFormatter::getFormatter() { if (instance == 0) instance = new VanillaFormatter; return instance; } VanillaFormatter::VanillaFormatter() {}; Formatter* VanillaFormatter::instance = 0; Formatter* Formatter::formatter = VanillaFormatter::getFormatter(); //========================================================================== // TEST FORMATTER //========================================================================== using namespace z; Box& TestFormatter::addSyntax(z::Kind k, const std::string& id, std::vector& bs) { switch (k) { case(VCG_FILE): return PP::makeVSeq("vcgfile", " ", " ", "", bs); // indent all goals case(GOAL): return PP::makeVSeq("goal", " ", "->", "", bs); case(SEQ): return PP::makeHVSeq("", "", ",", "", bs); case(DECL): return PP::makeHVSeq("", id + ":", "", "", bs); // TYPES case(INT_TY): return box("INT"); case(REAL_TY): return box("REAL"); case(BOOL_TY): return box("BOOL"); case(SUBRANGE_TY): return PP::makeHVSeq("", "{", "..", "}", bs); case(ENUM_TY): return PP::makeHVSeq("", id + "{", ",", "}", bs); case(ARRAY_TY): return PP::makeHVSeq("array{" + id + "}", "[", "] of ", "", bs); case(RECORD_TY): return PP::makeHVSeq("record{" + id + "}", "(", ",", ")", bs); case(TYPE_ID): case(TYPE_PARAM): return box(id); // EXPRESSIONS case(FORALL): return PP::makeHVSeq("", "All", ". ", "", bs); case(EXISTS): return PP::makeHVSeq("", "Exists", ". ", "", bs); case(IMPLIES): return PP::makeHVSeq("", "", "=>", "", bs); case(IFF): return PP::makeHVSeq("", "", "<=>", "", bs); case(OR): return PP::makeHVSeq("", "", "\\/", "", bs); case(AND): return PP::makeHVSeq("", "", "/\\", "", bs); case(NOT): return PP::makeHVSeq("!", "", "", "", bs); case(TERM_IFF): return PP::makeHVSeq("", "", "<=>b", "", bs); case(TERM_OR): return PP::makeHVSeq("", "", "\\/b", "", bs); case(TERM_AND): return PP::makeHVSeq("", "", "/\\b", "", bs); case(TERM_NOT): return PP::makeHVSeq("!b", "", "", "", bs); case(I_LE): return PP::makeHVSeq("", "", "<=", "", bs); case(I_LT): return PP::makeHVSeq("", "", "<", "", bs); case(EQ): { std::vector newBs; newBs.push_back(bs[0]); newBs.push_back( &(box("= ") + *(bs[1]))); if (bs.size() == 3) { newBs.push_back(&(box("in ") + *(bs[2]))); } return PP::simpleHVSeq(newBs); } case(I_PLUS): return PP::makeHVSeq("", "", "+", "", bs); case(I_MINUS): return PP::makeHVSeq("", "", "-", "", bs); case(I_TIMES): return PP::makeHVSeq("", "", "*", "", bs); case(IDIV): return PP::makeHVSeq("", "", "div", "", bs); case(MOD): return PP::makeHVSeq("", "", "mod", "", bs); case(EXP): return PP::makeHVSeq("", "", "**", "", bs); case(I_UMINUS): return PP::makeHVSeq("", "-", "", "", bs); case(ID): return box(id); case(VAR): return box("?" + id); case(CONST): return box(id); case(PENDING): return box("pending"); case(NATNUM): return box(id); case(FUN_AP): return PP::makeHVSeq(id + "(", "", ",", ")", bs); case(PRED_AP): return PP::makeHVSeq(id + "[", "", ",", "]", bs); default: { if (id.size() == 0) return PP::makeHVSeq(kindString(k), "[", ";", "]", bs ); else return PP::makeHVSeq(kindString(k) + "{" + id + "}", "[", ";", "]", bs ); } } } TestFormatter::TestFormatter() { addOpInfo(EXISTS, 100, "*L"); addOpInfo(FORALL, 100, "*L"); addOpInfo(IMPLIES, 110, "LL"); addOpInfo(IFF, 120, "LL"); addOpInfo(OR, 130, "LLL"); // Ensure n-ary operators easily visible addOpInfo(AND, 140, "LLL"); addOpInfo(NOT, 150, "E"); addOpInfo(TERM_IFF, 120, "LL"); addOpInfo(TERM_OR, 130, "LL"); addOpInfo(TERM_AND, 140, "LL"); addOpInfo(TERM_NOT, 150, "E"); addOpInfo(I_LE, 200, "LL"); addOpInfo(I_LT, 200, "LL"); addOpInfo(I_MINUS, 300, "LL"); addOpInfo(I_PLUS, 300, "EL"); addOpInfo(I_TIMES, 320, "EL"); addOpInfo(IDIV, 320, "EL"); addOpInfo(MOD, 340, "EL"); addOpInfo(EXP, 350, "EL"); addOpInfo(I_UMINUS, 360, "E"); return; }; Formatter* TestFormatter::instance = 0; Formatter* TestFormatter::getFormatter() { if (instance == 0) instance = new TestFormatter; return instance; } //========================================================================== // PRINTING NODES //========================================================================== void Formatter::setFormatter(Formatter* f) { formatter = f; } std::ostream& operator<<(std::ostream& os, const Node& n) { os << Formatter::format(n); Box::deleteAll(); return os; } std::string Node::toString() { ostringstream oss; oss << *this; return oss.str(); } // End of file spark-2012.0.deb/victor/vct/src/elim-arrays-records.cc0000644000175000017500000022067311753202341021533 0ustar eugeneugen//======================================================================== //======================================================================== // ELIM-ARRAYS-RECORDS.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== //======================================================================== // Abstracting interpreted arrays //======================================================================== //======================================================================== //======================================================================== // Eliminate subrange expressions in array indexes //======================================================================== Node* elimSubrangeExp(Node* n) { if (n->kind == SUBRANGE) { printMessage(WARNINGm, "Unsoundly eliminating array subrange expression"); return n->child(0); } else return n; } void elimSubranges(FDLContext* ctxt, Node* unit) { unit->mapOver1(elimSubrangeExp); return; } //======================================================================== // Eliminate array constructors //======================================================================== Node* elimMkArray(Node* oldN, Node* defaultArray, string constArray) { // 1. MK_ARRAY{arr-typeid} e0 ASSIGN(index<1>, e1) ... // ASSIGN(index, en) // --> node // // 2. MK_ARRAY{arr-typeid} ASSIGN(index<1>, e1) ... ASSIGN(index, en) // --> node // // where // // index is an index expression: a tuple of either all index values // or all index ranges. // // node<0> = FUN_AP{constArray} e0 (case 1) // node<0> = defaultArray (case 2) // node = ARR_UPDATE{arr-typeid}(node, index, e) // | ARR_BOX_UPDATE{arr-typeid}(node, index, e) // i = 1..n // // ARR_BOX_UPDATE used when index is tuple of index ranges. // // constArray should name a function which maps expressions to // constant arrays with value that expression: // Typically constArray(x) = \lambda y . x Node* newN; string arrTypeId = oldN->id; int start; if (oldN->child(0)->kind == z::ASSIGN) { newN = defaultArray->copy(); start = 0; } else { newN = new Node(FUN_AP, constArray, oldN->child(0)); start = 1; } for (int i = start; i != oldN->arity(); i++) { Node* assignN = oldN->child(i); Kind updateKind = (assignN->child(0)->child(0)->kind == SUBRANGE) ? z::ARR_BOX_UPDATE : z::ARR_UPDATE; newN = new Node(updateKind, arrTypeId, newN, assignN->child(0), assignN->child(1) ); } return newN; } Node* elimArrayConstructors(Node* n) { if (n->kind == MK_ARRAY) { string arrayName = n->id; string defaultArrName = arrayName + "___default_arr"; string constArrName = arrayName + "___mk_const_arr"; Node* defaultArray = new Node(CONST, defaultArrName); return elimMkArray(n, defaultArray, constArrName); } return n; } void elimMkArrays(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each array type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != ctxt->typeSeq.arity(); i++) { Node* typeDecl = ctxt->typeSeq.child(i); if ( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == ARRAY_TY) { // For array type ArrTy = Array(IndexTy1,...,IndexTyn, ValTy), // Add declarations // default_arr : ArrTy // mk_const_arr : (ValTy)ArrTy // and add rule // forall i1:IndexTy1. // ... // forall in:IndexTyn. // forall v:ValTy. mk_const_arr(v)[i1...in] = v string arrayName = typeDecl->id; Node* IndexTySeq = typeDecl->child(0)->child(0); int arrayDim = IndexTySeq->arity(); Node* ValTy = typeDecl->child(0)->child(1); string defaultArrName = arrayName + "___default_arr"; string constArrName = arrayName + "___mk_const_arr"; ctxt->insert(new Node( DEF_CONST, defaultArrName, new Node (TYPE_ID,arrayName))); ctxt->insert(new Node( DECL_FUN, constArrName, new Node (SEQ,ValTy->copy()), new Node (TYPE_ID,arrayName))); Node* qDecls = new Node(SEQ); Node* arrIndex = new Node(SEQ); for (int j = 1; j <= arrayDim; j++) { qDecls->addChild(new Node (DECL, "i" + intToString(j), IndexTySeq->child(j-1)->copy())); arrIndex->addChild(new Node(VAR, "i" + intToString(j))); } qDecls->addChild(new Node (DECL, "v", ValTy->copy())); rules->addChild (new Node (FORALL, qDecls, new Node (EQ,"", new Node(ARR_ELEMENT, arrayName, new Node(FUN_AP, constArrName, new Node (VAR, "v")), arrIndex), new Node(VAR, "v"), ValTy->copy() ))); } // END if is array type decl } // END for loop over type decl seq. // - - - - - - - - - - - - - - - - - - - - - - - - - - // Eliminate all occurrences of constructors // - - - - - - - - - - - - - - - - - - - - - - - - - - - unit->mapOver1(elimArrayConstructors); } //======================================================================== // Eliminate record constructors //======================================================================== Node* elimMkRecord(Node* oldN, Node* defaultRecord) { // MK_RECORD{rcd-typeid} (ASSIGN{i1}, e1) ... (ASSIGN{in}, en) // --> node // // where // // node<0> = defaultRecord // node = RCD_UPDATE{i1}(node, e1, TYPE_PARAM{rcd-typeid}) i = 1..n Node* newN = defaultRecord->copy(); for (int i = 0; i != oldN->arity(); i++) { Node* assignN = oldN->child(i); newN = new Node(z::RCD_UPDATE, assignN->id, newN, assignN->child(0), new Node(z::TYPE_PARAM, oldN->id) ); } return newN; } Node* elimRecordConstructors(Node* n) { if (n->kind == MK_RECORD) { string recordName = n->id; string defaultRecordName = recordName + "___default_rcd"; Node* defaultRecord = new Node(CONST, defaultRecordName); return elimMkRecord(n, defaultRecord); } return n; } void elimMkRecords(FDLContext* ctxt, Node* unit) { // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each record type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != ctxt->typeSeq.arity(); i++) { Node* typeDecl = ctxt->typeSeq.child(i); if ( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == RECORD_TY) { // Add declaration // default_rcd : RecordTy string recordName = typeDecl->id; string defaultRecordName = recordName + "___default_rcd"; ctxt->insert(new Node( DEF_CONST, defaultRecordName, new Node (TYPE_ID,recordName))); } // END if is record type decl } // END for loop over type decl seq. // - - - - - - - - - - - - - - - - - - - - - - - - - - // Eliminate all occurrences of constructors // - - - - - - - - - - - - - - - - - - - - - - - - - - - unit->mapOver1(elimRecordConstructors); } //========================================================================== // Add array axioms //========================================================================== //------------------------------------------------------------------------- // mkEqAliasRel //------------------------------------------------------------------------- Node* mkEqAliasRel(const string& typeName, Node* lhs, Node* rhs) { Kind k = (option("bit-type")) ? PRED_AP : FUN_AP; return new Node(k, typeName + "___eq", lhs, rhs); } //------------------------------------------------------------------------- // addArrayElementUpdateAxioms //------------------------------------------------------------------------- void addArrayElementUpdateAxioms(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() == 0) continue; Node* type = typeDecl->child(0); if (type->kind != ARRAY_TY) continue; // typeDecl is // DEF_TYPE {A} (ARRAY_TY{A} (SEQ S1 ... Sn) T) string arrName = typeDecl->id; Node* indexTypes = type->child(0); Node* elementType = type->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axioms // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Build iterated components of axioms. Node* sDecls = new Node(SEQ); // s1:S1, ... , sn:Sn, Node* spDecls = new Node(SEQ); // s1p:S1, ... , snp:Sn, Node* sIndexes = new Node(SEQ); // s1, ..., sn Node* spIndexes = new Node(SEQ); // s1p, ..., snp Node* eqIndexes = new Node(AND); // s1 =_S1 s1p /\ ... /\ sn = Sn snp for (int i = 0; i != indexTypes->arity(); i++) { string s = "s"+ intToString(i); string sp = s + "p"; Node* indexType = indexTypes->child(i); sDecls->addChild(new Node (DECL, s, indexType->copy())); spDecls->addChild(new Node (DECL, sp, indexType->copy())); sIndexes->addChild(new Node (VAR, s)); spIndexes->addChild(new Node (VAR, sp)); eqIndexes->addChild(new Node (EQ,"", new Node(VAR, s), new Node(VAR, sp), indexType->copy() )); } if (eqIndexes->arity() == 1) { eqIndexes = eqIndexes->child(0); } // RWEq axiom: // // All a:A, s1:S1, ... , sn:Sn, t:T. // ARR_ELEMENT{A} (ARR_UPDATE{A}(a, (SEQ s1 ... sn), t) // (SEQ s1 ... sn) // =_T t // Node* rWEqDecls = new Node(SEQ, new Node(DECL, "a", nameToType(arrName))); rWEqDecls->appendChildren(sDecls); rWEqDecls->addChild(new Node (DECL, "t", elementType->copy())); Node* rWEqAxiom = new Node(FORALL, rWEqDecls, new Node(EQ,"", new Node(ARR_ELEMENT, arrName, new Node(ARR_UPDATE, arrName, new Node(VAR, "a"), sIndexes, new Node(VAR, "t")), sIndexes->copy() ), new Node(VAR, "t"), elementType->copy() )); rules->addChild(rWEqAxiom); // RWNE axiom: // // All a:A, s1:S1, ... , sn:Sn, s1':S1, ... , sn':Sn, t:T. // NOT (s1 =_S1 s1p /\ ... /\ sn = Sn snp) // IMPLIES // ARR_ELEMENT{A} (ARR_UPDATE{A}(a, (SEQ s1 ... sn), t) // (SEQ s1' ... sn') // =_T ARR_ELEMENT{A} a (SEQ s1' ... sn') // // using OR Node* rWNEDecls = new Node(SEQ, new Node(DECL, "a", nameToType(arrName))); rWNEDecls->appendChildren(sDecls->copy()); rWNEDecls->appendChildren(spDecls); rWNEDecls->addChild(new Node (DECL, "t", elementType->copy())); Node* rWNEAxiom = new Node(FORALL, rWNEDecls, new Node(OR, eqIndexes, new Node(EQ,"", new Node(ARR_ELEMENT, arrName, new Node(ARR_UPDATE, arrName, new Node(VAR, "a"), sIndexes, new Node(VAR, "t")), spIndexes ), new Node(ARR_ELEMENT, arrName, new Node(VAR, "a"), spIndexes->copy() ), elementType->copy() ) )); rules->addChild(rWNEAxiom); } // END for loop over types in context return; } //------------------------------------------------------------------------- // addArrayElementBoxUpdateAxioms //------------------------------------------------------------------------- // normT expected to be normalised type, // either ENUM_TY{type-id} k1 .. kn, TYPE_ID{type-id} or INT_TY. // TYPE_ID{type-id} is assumed to be an abstract enumeration type. Node* mkLE(Node* e1, Node* e2, Node* normT) { if (normT->kind == ENUM_TY || normT->kind == TYPE_ID) return new Node (FUN_AP, normT->id + "__LE", e1, e2); else if (normT->kind == INT_TY || normT->kind == SUBRANGE_TY) return new Node (I_LE, e1, e2); else { printMessage(ERRORm, "Trying to make LE expression for type kind " + kindString(normT->kind)); return new Node(UNKNOWN); } } void addArrayElementBoxUpdateAxioms(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() == 0) continue; Node* type = typeDecl->child(0); if (type->kind != ARRAY_TY) continue; // typeDecl is // DEF_TYPE {A} (ARRAY_TY{A} (SEQ S1 ... Sn) T) string arrName = typeDecl->id; Node* indexTypes = type->child(0); Node* elementType = type->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axioms // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Build iterated components of axioms. Node* sDecls = new Node(SEQ); // s1:S1, ... , sn:Sn, Node* spDecls = new Node(SEQ); // s1p:S1, ... , snp:Sn, Node* sqDecls = new Node(SEQ); // s1q:S1, ... , snq:Sn, Node* sIndexes = new Node(SEQ); // s1, ..., sn Node* rIndexes = new Node(SEQ); // SUBRANGE(s1p,s1q), ..., // SUBRANGE(snp,snq) Node* indexBoxPred = new Node(AND); // s1p <=_S1 s1 /\ s1 <=_S1 s1q // /\ ... // /\ snp <=_Sn sn /\ sn <=_Sn snq for (int i = 0; i != indexTypes->arity(); i++) { string s = "s"+ intToString(i); string sp = s + "p"; string sq = s + "q"; Node* indexType = indexTypes->child(i); Node* normIndexType = ctxt->normaliseType(indexType); sDecls->addChild(new Node (DECL, s, indexType->copy())); spDecls->addChild(new Node (DECL, sp, indexType->copy())); sqDecls->addChild(new Node (DECL, sq, indexType->copy())); sIndexes->addChild(new Node (VAR, s)); rIndexes->addChild(new Node (SUBRANGE, new Node (VAR, sp), new Node (VAR, sq))); indexBoxPred->addChild(mkLE(new Node(VAR, sp), new Node(VAR, s), normIndexType)); indexBoxPred->addChild(mkLE(new Node(VAR, s), new Node(VAR, sq), normIndexType)); } // RInW axiom (Read inside of write box) // // All a:A, // s1:S1, ... , sn:Sn, // s1p:S1, ... , snp:Sn, // s1q:S1, ... , snq:Sn, // t:T. // s1p <=_S1 s1 /\ s1 <=_S1 s1q // /\ ... /\ snp <=_Sn sn /\ sn <=_Sn snq // IMPLIES // ARR_ELEMENT{A} (ARR_BOX_UPDATE{A}(a, (SEQ (r1 ... rn), t) // (SEQ s1 ... sn) // =_T t // // where rj = SUBRANGE sjp sjq // // using OR Node* rInWDecls = new Node(SEQ, new Node(DECL, "a", nameToType(arrName))); rInWDecls->appendChildren(sDecls); rInWDecls->appendChildren(spDecls); rInWDecls->appendChildren(sqDecls); rInWDecls->addChild(new Node (DECL, "t", elementType->copy())); Node* rInWAxiom = new Node(FORALL, rInWDecls, new Node(IMPLIES, indexBoxPred, new Node(EQ,"", new Node(ARR_ELEMENT, arrName, new Node(ARR_BOX_UPDATE, arrName, new Node(VAR, "a"), rIndexes, new Node(VAR, "t")), sIndexes ), new Node(VAR, "t"), elementType->copy() ) ) ); rules->addChild(rInWAxiom); // ROutW axiom (Read outside of write box) // // All a:A, // s1:S1, ... , sn:Sn, // s1p:S1, ... , snp:Sn, // s1q:S1, ... , snq:Sn, // t:T. // NOT (s1p <=_S1 s1 /\ s1 <=_S1 s1q // /\ ... /\ snp <=_Sn sn /\ sn <=_Sn snq // IMPLIES // ARR_ELEMENT{A} (ARR_BOX_UPDATE{A}(a, (SEQ (r1 ... rn), t) // (SEQ s1 ... sn) // =_T ARR_ELEMENT{A} a (SEQ s1 ... sn) // // where rj = SUBRANGE sjp sjq // // using OR Node* rOutWDecls = rInWDecls->copy(); Node* rOutWAxiom = new Node(FORALL, rOutWDecls, new Node(OR, indexBoxPred->copy(), new Node(EQ,"", new Node(ARR_ELEMENT, arrName, new Node(ARR_BOX_UPDATE, arrName, new Node(VAR, "a"), rIndexes->copy(), new Node(VAR, "t")), sIndexes->copy() ), new Node(ARR_ELEMENT, arrName, new Node(VAR, "a"), sIndexes->copy() ), elementType->copy() ) )); rules->addChild(rOutWAxiom); } // END for loop over types in context return; } //------------------------------------------------------------------------- // addArrayExtensionalityAxioms //------------------------------------------------------------------------- void addArrayExtensionalityAxioms(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() == 0) continue; Node* type = typeDecl->child(0); if (type->kind != ARRAY_TY) continue; // typeDecl is // DEF_TYPE {A} (ARRAY_TY{A} (SEQ S1 ... Sn) T) string arrName = typeDecl->id; Node* indexTypes = type->child(0); Node* elementType = type->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axioms // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Build iterated components of axioms. Node* sDecls = new Node(SEQ); // s1:S1, ... , sn:Sn, Node* sIndexes = new Node(SEQ); // s1, ..., sn for (int i = 0; i != indexTypes->arity(); i++) { string s = "s"+ intToString(i); string sp = s + "p"; Node* indexType = indexTypes->child(i); sDecls->addChild(new Node (DECL, s, indexType->copy())); sIndexes->addChild(new Node (VAR, s)); } // Extensionality axiom: // // All a,a':A // ( All s1:S1, ... , sn:Sn. // ARR_ELEMENT{A} a (SEQ s1 ... sn) // =_T // ARR_ELEMENT{A} a' (SEQ s1 ... sn) // ) // => a =_A a' Node* extAxiom = new Node (FORALL, new Node (SEQ, new Node(DECL, "a", nameToType(arrName)), new Node(DECL, "ap", nameToType(arrName)) ), new Node(IMPLIES, new Node(FORALL, sDecls, new Node(EQ,"", new Node(ARR_ELEMENT, arrName, new Node(VAR, "a"), sIndexes ), new Node(ARR_ELEMENT, arrName, new Node(VAR, "ap"), sIndexes->copy() ), elementType->copy() ) ), new Node(EQ,"", new Node(VAR,"a"), new Node(VAR,"ap"), nameToType(arrName)) ) ); rules->addChild(extAxiom); } // END for loop over types in context return; } //========================================================================== // Abstract array select and update operators //========================================================================== //------------------------------------------------------------------------- // abstractArrayOp //------------------------------------------------------------------------- Node* abstractArrayOp(Node* n) { if (n->kind == ARR_ELEMENT) { // ARR_ELEMENT{A} a (SEQ i1 ... in) // --> FUN_AP{___arr_element} a i1 ... in n->kind = FUN_AP; n->id = n->id + "___arr_element"; Node* indexes = n->child(1); n->popChild(); n->appendChildren(indexes); } else if (n->kind == ARR_UPDATE) { // ARR_UPDATE{A} a (SEQ i1 ... in) v // --> FUN_AP{___arr_update} a i1 ... in v n->kind = FUN_AP; n->id = n->id + "___arr_update"; Node* indexes = n->child(1); Node* val = n->child(2); n->popChild(); n->popChild(); n->appendChildren(indexes); n->addChild(val); } return n; } //------------------------------------------------------------------------- // abstractArraySelectUpdateOps //------------------------------------------------------------------------- void abstractArraySelectUpdateOps(FDLContext* ctxt, Node* unit) { for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() == 0) continue; Node* type = typeDecl->child(0); if (type->kind != ARRAY_TY) continue; // typeDecl is // DEF_TYPE {A} (ARRAY_TY{A} (SEQ S1 ... Sn) T) string arrName = typeDecl->id; Node* indexTypes = type->child(0); Node* elementType = type->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declarations for element and update functions // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DECL_FUN{___arr_element} (SEQ A S1 ... Sn) T Node* argTypes = new Node(SEQ, nameToType(arrName)); argTypes->appendChildren(indexTypes->copy()); Node* elementDecl = new Node(DECL_FUN, arrName + "___arr_element", argTypes, elementType->copy() ); ctxt->insert(elementDecl); // DECL_FUN{___arr_update} (A S1 ... Sn T) A argTypes = argTypes->copy(); argTypes->addChild(elementType->copy()); Node* updateDecl = new Node(DECL_FUN, arrName + "___arr_update", argTypes, nameToType(arrName) ); ctxt->insert(updateDecl); } // END of for loop over type decls // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract array operators in unit // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Done after axiom addition, since axioms use array operators unit->mapOver(abstractArrayOp); return; } //------------------------------------------------------------------------- // abstractArrayBoxUpdate //------------------------------------------------------------------------- Node* abstractArrayBoxUpdate(Node* n) { if (n->kind == ARR_BOX_UPDATE) { // ARR_BOX_UPDATE{A} a (SEQ r1 ... rn) v where rj = SUBRANGE ijp ijq // --> FUN_AP{___arr_box_update} a i1p i1q ... inp inq v n->kind = FUN_AP; n->id = n->id + "___arr_box_update"; Node* ranges = n->child(1); Node* val = n->child(2); n->popChild(); n->popChild(); for (int j = 0; j != ranges->arity(); j++) { n->appendChildren(ranges->child(j)); } n->addChild(val); } return n; } //------------------------------------------------------------------------- // abstractArrayBoxUpdates //------------------------------------------------------------------------- void abstractArrayBoxUpdates(FDLContext* ctxt, Node* unit) { for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() == 0) continue; Node* type = typeDecl->child(0); if (type->kind != ARRAY_TY) continue; // typeDecl is // DEF_TYPE {A} (ARRAY_TY{A} (SEQ S1 ... Sn) T) string arrName = typeDecl->id; Node* indexTypes = type->child(0); Node* elementType = type->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration for box update function // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DECL_FUN{___arr_box_update} (A S1 S1 ... Sn Sn T) A Node* argTypes = new Node(SEQ, nameToType(arrName)); for (int j = 0; j != indexTypes->arity(); j++) { argTypes->addChild(indexTypes->child(j)->copy()); argTypes->addChild(indexTypes->child(j)->copy()); } argTypes->addChild(elementType->copy()); Node* boxUpdateDecl = new Node(DECL_FUN, arrName + "___arr_box_update", argTypes, nameToType(arrName) ); ctxt->insert(boxUpdateDecl); } // END of for loop over type decls // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract array operators in unit // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Done after axiom addition, since axioms use array operators unit->mapOver(abstractArrayBoxUpdate); return; } //======================================================================== // Array extensionality treatment //======================================================================== /* We introduce a user-defined relation to alias equality over arrays to help along the typical quantifier instantiation algorithms which will not match against raw equalities. (At least this seems to be the case with Z3, indeed a remark in the Z3 FAQ talks about this). Keep option just to add an extensionality axiom as above. But also allow for the following: For each array type A = ARRAY(S1, ... Sn) T 1. Add strong extensionality axiom for relation All a,a':A ( All s1:S1, ... , sn:Sn. ARR_ELEMENT{A} a (SEQ s1 ... sn) =_T ARR_ELEMENT{A} a' (SEQ s1 ... sn) ) => ___ext_eq(a,a') Does it matter whether we use <=> or =>? - (For <=>): It might get converted into a boolean equality. This might be handled more efficiently. But then again, have predicate logic structure on one side, so need anyway to drop down to logic reasoning. - (For =>): More spartan. Not stating a redundant fact. Go for => for now. (Allow <=> as option?) [Done above in addArrayAxioms() function] 2a Everywhere replace a =_A a' with ___ext_eq(a,a') OR 2b Only do replacement in contexts where equality occurs +vely. (e.g. in concl under even number of negations) 3. Declare equality relation: ___ext_eq : (A,A) Bool 4. Add defining axiom for relation All a,a':A. ___ext_eq(a,a') <=> a =_A a' Ordering constraints: * Important that 4 is after 2a! * 2a is simplest to do by local map if the array type has not first been abstracted. So need to also fix order of standard abstraction. * Do we do 2a *after* introducing array read over write axioms? Seems like yes, so those axioms can be also updated as necessary (if for example we have nested arrays and records). This strategy is realised together for both arrays and records below. */ //======================================================================== //======================================================================== // Abstracting interpreted records //======================================================================== //======================================================================== // // THIS NEEDS UPDATING!!!! // // // 1. Eliminate constructor ops using updates // Add abstract default record constant [DONE] // // 2. Axiomatise and abstract update ops // // 3. Axiomatise type with select and update ops // Abstract type, select and update. // [PRECOND: 1 done] // 4. Axiomatise type with select and constructor ops // Abstract type, select and constructor // [PRECOND: 2 done] // // Common functionality: // // A: Adding declarations // B: mapping over unit, updating it // // 1 already written. // // Can we save effort by building multipurpose utilities for 2,3,4? // // B: // // Notation: // // Type: // R = RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti // // Operators: // MK_RECORD{R} A1 ... An where Ai = ASSIGN{fi} ti // RCD_ELEMENT{fi} r TYPE_PARAM{R} // RCD_UPDATE{fi} r t TYPE_PARAM{R} //======================================================================== // Utilities for record abstraction and elimination //======================================================================== // // Assuming assignments in proper order. Should check in each instance // // For i = 1 .. n // RCD_ELEMENT{fi} r TYPE_PARAM{R} --> FUN_AP{______rcd_element r} r // RCD_UPDATE{fi} r ti TYPE_PARAM{R} // --> FUN_AP{______rcd_update r} r ti // // Declarations for abstract operators: // // For i = 1 .. n // FUN_DECL{______rcd_element} (SEQ R) Ti // FUN_DECL{______rcd_update} (SEQ R Ti) R // FUN_DECL{___mk_rcd} (SEQ T1 ... Tn) R // //---------------------------------------------------------------------------- // abstractMkRecord //---------------------------------------------------------------------------- // MK_RECORD{R} A1 ... An --> FUN_AP{___mk_rcd} t1 ... tn // with check that assignments in expected order. Node* abstractMkRecord(FDLContext* c, Node* n) { if (n->kind != MK_RECORD) return n; string rcdName = n->id; // rcdType := RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti Node* rcdType = c->lookupType(rcdName)->child(0); if (n->arity() != rcdType->arity()) { printMessage(ERRORm, "Constructor for record type " + rcdName + " found with bad arity " + intToString(n->arity())); return n; } for (int i = 0; i != rcdType->arity(); i++) { if (n->child(i)->id != rcdType->child(i)->id) { printMessage(ERRORm, "Constructor for record type " + rcdName + " found with field " + n->child(i)->id + " out of order"); return n; } n->child(i) = n->child(i)->child(0); } n->kind = FUN_AP; n->id = rcdName + "___mk_rcd"; return n; } //---------------------------------------------------------------------------- // abstractRcdElement //---------------------------------------------------------------------------- // RCD_ELEMENT{fi} r TYPE_PARAM{R} --> FUN_AP{______rcd_element r} r Node* abstractRcdElement(Node* n) { if (n->kind != RCD_ELEMENT) return n; string rcdName = n->child(1)->id; string fieldName = n->id; n->popChild(); n->kind = FUN_AP; n->id = rcdName + "___" + fieldName + "___rcd_element"; return n; } //---------------------------------------------------------------------------- // abstractRcdUpdate //---------------------------------------------------------------------------- // RCD_UPDATE{fi} r ti TYPE_PARAM{R} // --> // FUN_AP{______rcd_update r} r ti Node* abstractRcdUpdate(Node* n) { if (n->kind != RCD_UPDATE) return n; string rcdName = n->child(2)->id; string fieldName = n->id; n->popChild(); n->kind = FUN_AP; n->id = rcdName + "___" + fieldName + "___rcd_update"; return n; } //---------------------------------------------------------------------------- // abstractRcdUpdates //---------------------------------------------------------------------------- // For record type R = RECORD_TY(D1 ... Dn) where Di = DECL{fi} Ti // // Add n axioms, where declaration i is // and add a declaration: // // FUN_DECL{______rcd_update} (SEQ R Ti) R // // and axiom i is // // FORALL r:R, t:Ti // // RCD_UPDATE{fi} r t TYPE_PARAM{R} // =_R // MK_RECORD{R} // (ASSIGN{f1} (RCD_ELEMENT{f1} r TYPE_PARAM{R}) ) // (ASSIGN{f(i-1)} (RCD_ELEMENT{f(i-1)} r TYPE_PARAM{R}) ) // (ASSIGN{fi} t // (ASSIGN{f(i+1)} (RCD_ELEMENT{f(i+1)} r TYPE_PARAM{R}) ) // (ASSIGN{fn} (RCD_ELEMENT{fn} r TYPE_PARAM{R}) ) // // // Then abstract the RCD_UPDATE expression: // // RCD_UPDATE{fi} r t TYPE_PARAM{R} --> ______rcd_update // // void abstractRcdUpdates(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each record type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int j = 0; j != ctxt->typeSeq.arity(); j++) { Node* typeDecl = ctxt->typeSeq.child(j); if (!( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == RECORD_TY)) { continue; } // rcdType = RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti Node* rcdType = typeDecl->child(0); string typeName = rcdType->id; for (int i = 0; i != rcdType->arity(); i++) { string fieldName_i = rcdType->child(i)->id; Node* fieldType_i = rcdType->child(i)->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DECL_FUN{______rcd_update} (SEQ R Ti) R Node* decl = new Node(DECL_FUN, typeName + "___" + fieldName_i + "___rcd_update", new Node(SEQ, nameToType(typeName), fieldType_i), nameToType(typeName) ); ctxt->insert(decl); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* mkRecordExp = new Node(MK_RECORD, typeName); for (int k = 0; k != rcdType->arity(); k++) { string fieldName_k = rcdType->child(k)->id; Node* assignVal = (k == i) ? new Node(VAR, "t") : new Node(RCD_ELEMENT, fieldName_k, new Node(VAR, "r"), new Node(TYPE_PARAM, typeName)); mkRecordExp->addChild(new Node(ASSIGN,fieldName_k,assignVal)); } Node* updateElimAxiom = new Node(FORALL, new Node(SEQ, new Node(DECL, "r", nameToType(typeName)), new Node(DECL, "t", fieldType_i->copy()) ), new Node (EQ,"", new Node(RCD_UPDATE, fieldName_i, new Node(VAR, "r"), new Node(VAR, "t"), new Node(TYPE_PARAM, typeName) ), mkRecordExp, nameToType(typeName) ) ); rules->addChild(updateElimAxiom); } // END For i over record components } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract record update operators // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - unit->mapOver1(abstractRcdUpdate); return; } //---------------------------------------------------------------------------- // addRcdElementUpdateAxioms //---------------------------------------------------------------------------- // // For all i = 1 .. n // For all k = 1 .. n // Add axiom: // // All r:R. All t:Tk // RCD_ELEMENT{fi} (RCD_UPDATE{fk} r t R) R =_Ti t if i = k // RCD_ELEMENT{fi} (RCD_UPDATE{fk} r t R) R // =_Ti RCD_ELEMENT{fi} r R if i != k // void addRcdElementUpdateAxioms(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each record type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int j = 0; j != ctxt->typeSeq.arity(); j++) { Node* typeDecl = ctxt->typeSeq.child(j); if (!( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == RECORD_TY)) { continue; } // rcdType = RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti Node* rcdType = typeDecl->child(0); string typeName = rcdType->id; int rcdSize = rcdType->arity(); printMessage(INFOm, "Generating " + intToString(rcdSize * rcdSize) + " element-update axioms for record type " + typeName + " with " + intToString(rcdSize) + " fields"); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Loop i over fields // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != rcdType->arity(); i++) { string fieldName_i = rcdType->child(i)->id; Node* fieldType_i = rcdType->child(i)->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Loop k over fields // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int k = 0; k != rcdType->arity(); k++) { string fieldName_k = rcdType->child(k)->id; Node* fieldType_k = rcdType->child(k)->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add element update axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* lhs = new Node(RCD_ELEMENT, fieldName_i, new Node(RCD_UPDATE, fieldName_k, new Node(VAR, "r"), new Node(VAR, "t"), new Node(TYPE_PARAM, typeName) ), new Node(TYPE_PARAM, typeName) ); Node* rhs = (i == k) ? new Node(VAR, "t") : new Node(RCD_ELEMENT, fieldName_i, new Node(VAR, "r"), new Node(TYPE_PARAM, typeName) ); Node* elementUpdateAxiom = new Node(FORALL, new Node(SEQ, new Node(DECL, "r", nameToType(typeName)), new Node(DECL, "t", fieldType_k->copy()) ), new Node(EQ,"", lhs, rhs, fieldType_i->copy()) ); rules->addChild(elementUpdateAxiom); } // END For k over fields } // END For i over fields } // END For j over type definitions return; } //---------------------------------------------------------------------------- // abstractRcdElementsUpdates //---------------------------------------------------------------------------- // Assumes MK_RECORD operators already eliminated // // // For all i = 1 .. n // add declarations: // // DECL_FUN{______rcd_element} (SEQ R) Ti // DECL_FUN{______rcd_update} (SEQ R Ti) R // // // Abstract all occurrences of element and update operators void abstractRcdElementsUpdates(FDLContext* ctxt, Node* unit) { // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each record type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int j = 0; j != ctxt->typeSeq.arity(); j++) { Node* typeDecl = ctxt->typeSeq.child(j); if (!( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == RECORD_TY)) { continue; } // rcdType = RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti Node* rcdType = typeDecl->child(0); string typeName = rcdType->id; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Loop i over fields // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != rcdType->arity(); i++) { string fieldName_i = rcdType->child(i)->id; Node* fieldType_i = rcdType->child(i)->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declarations // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DECL_FUN{______rcd_element} (SEQ R) Ti // DECL_FUN{______rcd_update} (SEQ R Ti) R Node* declE = new Node(DECL_FUN, typeName + "___" + fieldName_i + "___rcd_element", new Node(SEQ, nameToType(typeName) ), fieldType_i ); ctxt->insert(declE); Node* declU = new Node(DECL_FUN, typeName + "___" + fieldName_i + "___rcd_update", new Node(SEQ, nameToType(typeName), fieldType_i ), nameToType(typeName) ); ctxt->insert(declU); } // END For i over fields } // END For j over type definitions // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract record element and update operators // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - unit->mapOver1(abstractRcdElement); unit->mapOver1(abstractRcdUpdate); return; } //---------------------------------------------------------------------------- // addRcdEqElementsExtAxioms //---------------------------------------------------------------------------- // // For each record type // // Add axiom // All r,r': R. // (RCD_ELEMENT{f1} r R =_T1 RCD_ELEMENT{f1} r' R // /\ ... /\ ... // RCD_ELEMENT{fn} r R =_Tn RCD_ELEMENT{fn} r' R // ) // IMPLIES r =_R r' // void addRcdEqElementsExtAxioms(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each record type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int j = 0; j != ctxt->typeSeq.arity(); j++) { Node* typeDecl = ctxt->typeSeq.child(j); if (!( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == RECORD_TY)) { continue; } // rcdType = RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti Node* rcdType = typeDecl->child(0); string typeName = rcdType->id; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Initialise core part of extensionality axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* extAxiomHyps = new Node(AND); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Loop i over fields // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int i = 0; i != rcdType->arity(); i++) { string fieldName_i = rcdType->child(i)->id; Node* fieldType_i = rcdType->child(i)->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Augment core of extensionality axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* elementEq = new Node(EQ,"", new Node(RCD_ELEMENT, fieldName_i, new Node(VAR, "r"), new Node(TYPE_PARAM, typeName) ), new Node(RCD_ELEMENT, fieldName_i, new Node(VAR, "rp"), new Node(TYPE_PARAM, typeName) ), fieldType_i->copy() ); extAxiomHyps->addChild(elementEq); } // END For i over fields // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add extensionality axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* extAxiom = new Node(FORALL, new Node(SEQ, new Node (DECL, "r", nameToType(typeName)), new Node (DECL, "rp", nameToType(typeName)) ), mkGeneralImplies( extAxiomHyps, new Node(EQ,"", new Node (VAR,"r"), new Node (VAR,"rp"), nameToType(typeName) ) ) ); rules->addChild(extAxiom); } // END For j over type definitions return; } //---------------------------------------------------------------------------- // addRcdElementMkRcdAxioms //---------------------------------------------------------------------------- // // For i = 1 .. n // // add axiom // // All t1:T1, ..., tn:Tn. // RCD_ELEMENT{fi} (MK_RECORD{R} ASSIGN{f1} t1 ... ASSIGN{fn} tn) R // =_Ti // ti void addRcdElementMkRcdAxioms(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each record type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int j = 0; j != ctxt->typeSeq.arity(); j++) { Node* typeDecl = ctxt->typeSeq.child(j); if (!( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == RECORD_TY)) { continue; } // rcdType = RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti Node* rcdType = typeDecl->child(0); string typeName = rcdType->id; for (int i = 0; i != rcdType->arity(); i++) { string fieldName_i = rcdType->child(i)->id; Node* fieldType_i = rcdType->child(i)->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add element constructor axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* axiomMkRcd = new Node(MK_RECORD, typeName); Node* axiomDecls = new Node(SEQ); for (int k = 0; k != rcdType->arity(); k++) { string fieldName_k = rcdType->child(k)->id; Node* fieldType_k = rcdType->child(k)->child(0); string var_k = "t" + intToString(k); axiomDecls->addChild(new Node(DECL, var_k, fieldType_k->copy() ) ); axiomMkRcd->addChild(new Node(ASSIGN, fieldName_k, new Node(VAR, var_k) ) ); } Node* elementConstructorAxiom = new Node(FORALL, axiomDecls, new Node (EQ,"", new Node(RCD_ELEMENT, fieldName_i, axiomMkRcd, nameToType(typeName) ), new Node(VAR, "t" + intToString(i)), fieldType_i->copy() ) ); rules->addChild(elementConstructorAxiom); } // END For i over record components } // END For j over type defs return; } //---------------------------------------------------------------------------- // abstractRcdElementsMkRcds //---------------------------------------------------------------------------- // // Add declaration // DECL_FUN{___mk_rcd} (SEQ T1 ... Tn) R // // // For i = 1 .. n // // add declaration // // DECL_FUN{______rcd_element} (SEQ R) Ti // // Abstract all occurrences of the element and mk_rcd operators void abstractRcdElementsMkRcds(FDLContext* ctxt, Node* unit) { // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each record type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int j = 0; j != ctxt->typeSeq.arity(); j++) { Node* typeDecl = ctxt->typeSeq.child(j); if (!( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == RECORD_TY)) { continue; } // rcdType = RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti Node* rcdType = typeDecl->child(0); string typeName = rcdType->id; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Initialise argTypes for constructor declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* mkRcdArgTypes = new Node(SEQ); for (int i = 0; i != rcdType->arity(); i++) { string fieldName_i = rcdType->child(i)->id; Node* fieldType_i = rcdType->child(i)->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration for element operator // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // DECL_FUN{______rcd_element} (SEQ R) Ti Node* decl = new Node(DECL_FUN, typeName + "___" + fieldName_i + "___rcd_element", new Node(SEQ, nameToType(typeName)), fieldType_i ); ctxt->insert(decl); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Augment argTypes for constructor declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - mkRcdArgTypes->addChild(fieldType_i->copy()); } // END For i over record components // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add declaration for record constructor // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* mkRcdDecl = new Node(DECL_FUN, typeName + "___mk_rcd", mkRcdArgTypes, nameToType(typeName) ); ctxt->insert(mkRcdDecl); } // END For j over type defs // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract record element and constructor operators // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - unit->mapOver1(abstractRcdElement); mapOverWithContext(abstractMkRecord, ctxt, unit); return; } //---------------------------------------------------------------------------- // addMkRcdElementExtAxioms //---------------------------------------------------------------------------- // For each record type // // Add extensionality axiom // // All r:R. // MK_RECORD{R}( // ASSIGN{f1} (RCD_ELEMENT{f1} r R), // ... // ASSIGN{fn} (RCD_ELEMENT{fn} r R) // ) // =_R r // void addMkRcdElementExtAxioms(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); // - - - - - - - - - - - - - - - - - - - - - - - - - - // Iterate over each record type declaration // - - - - - - - - - - - - - - - - - - - - - - - - - - - for (int j = 0; j != ctxt->typeSeq.arity(); j++) { Node* typeDecl = ctxt->typeSeq.child(j); if (!( typeDecl->kind == DEF_TYPE && typeDecl->arity() == 1 && typeDecl->child(0)->kind == RECORD_TY)) { continue; } // rcdType = RECORD_TY{R}(D1 ... Dn) where Di = DECL{fi} Ti Node* rcdType = typeDecl->child(0); string typeName = rcdType->id; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Initialise ext axiom core // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* extAxiomLHS = new Node(MK_RECORD, typeName); for (int i = 0; i != rcdType->arity(); i++) { string fieldName_i = rcdType->child(i)->id; // Node* fieldType_i = rcdType->child(i)->child(0); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Augment extensionality axiom core // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - extAxiomLHS->addChild(new Node(ASSIGN, fieldName_i, new Node(RCD_ELEMENT, fieldName_i, new Node(VAR, "r"), nameToType(typeName) ) ) ); } // END For i over record components // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add extensionality axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* extAxiom = new Node(FORALL, new Node(SEQ, new Node(DECL, "r", nameToType(typeName)) ), new Node(EQ,"", extAxiomLHS, new Node (VAR,"r"), nameToType(typeName) ) ); rules->addChild(extAxiom); } // END For j over type defs return; } //======================================================================== // Abstracting arrays and records: functions common to both //======================================================================== //------------------------------------------------------------------------ // Introduce aliases for array and record equalities //------------------------------------------------------------------------ Node* introArrayEqAlias(FDLContext* ctxt, Node* n) { if (n->kind != EQ) return n; // n->kind == EQ Node* eqType = ctxt->normaliseType(n->child(2)); if (eqType->kind == ARRAY_TY) { return mkEqAliasRel(eqType->id, n->child(0), n->child(1)); } else { return n; } } //------------------------------------------------------------------------ // Introduce aliases for array and record equalities //------------------------------------------------------------------------ Node* introRecordEqAlias(FDLContext* ctxt, Node* n) { if (n->kind != EQ) return n; // n->kind == EQ Node* eqType = ctxt->normaliseType(n->child(2)); if (eqType->kind == RECORD_TY) { return mkEqAliasRel(eqType->id, n->child(0), n->child(1)); } else { return n; } } //--------------------------------------------------------------------------- // Add declarations and defining axioms for array and record equality aliases //--------------------------------------------------------------------------- void addDeclsAxiomsForArrRecEqAliases(FDLContext* ctxt, Node* unit) { Node* rules = unit->child(1); for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() == 0) continue; Node* type = typeDecl->child(0); if (! ( (type->kind == ARRAY_TY && option("use-array-eq-aliases")) || (type->kind == RECORD_TY && option("use-record-eq-aliases")) ) ) continue; // typeDecl is // DEF_TYPE {A} (ARRAY_TY{A} (SEQ S1 ... Sn) T) // DEF_TYPE {A} (RECORD_TY{R} D1 ... Dn where Di = DECL{fi} Ti string typeName = typeDecl->id; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Declare eq alias function/relation // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* eqAliasDecl = new Node(DECL_FUN, typeName + "___eq", new Node(SEQ, nameToType(typeName), nameToType(typeName)), Node::bool_ty ); ctxt->insert(eqAliasDecl); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add eq alias defining axiom // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Node* axiom = new Node(FORALL, new Node (SEQ, new Node (DECL, "x", nameToType(typeName)), new Node (DECL, "y", nameToType(typeName)) ), new Node(IFF, mkEqAliasRel(typeName, new Node(VAR, "x"), new Node(VAR, "y")), new Node(EQ,"", new Node(VAR, "x"), new Node(VAR, "y"), nameToType(typeName)) ) ); rules->addChild(axiom); } // END for loop return; } //------------------------------------------------------------------------ // abstractArrayTypeDecls //------------------------------------------------------------------------ void abstractArrayTypeDecls(FDLContext* ctxt) { for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() != 0 && typeDecl->child(0)->kind == ARRAY_TY) { typeDecl->popChild(); } } } //------------------------------------------------------------------------ // abstractRecordTypeDecls //------------------------------------------------------------------------ void abstractRecordTypeDecls(FDLContext* ctxt) { for (map::iterator i = ctxt->typeMap.begin(); i != ctxt->typeMap.end(); i++ ) { Node* typeDecl = i->second; // DEF_TYPE {id} type // | DEF_TYPE {id} if (typeDecl->arity() != 0 && typeDecl->child(0)->kind == RECORD_TY) { typeDecl->popChild(); } } } //========================================================================== // Array and record abtraction master function //========================================================================== void abstractArraysRecords(FDLContext* ctxt, Node* unit) { //----------------------------------------------------------------------- // Remove redundant array and record operators //----------------------------------------------------------------------- // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Eliminate array constructors. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // And add decls for default array and const array constructors. // and add axiom for const array constructor. if (option("elim-array-constructors")) elimMkArrays(ctxt, unit); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Eliminate record constructors // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // And add default array decls if (option("elim-record-constructors")) elimMkRecords(ctxt, unit); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Abstract record updates // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (option("abstract-record-updates")) abstractRcdUpdates(ctxt, unit); //----------------------------------------------------------------------- // Add array axioms //----------------------------------------------------------------------- // Axioms for axiomatically defining type and axioms for box updates // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add array select update axioms // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (option("add-array-select-update-axioms")) addArrayElementUpdateAxioms(ctxt, unit); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add array extensionality axioms // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (option("add-array-extensionality-axioms")) addArrayExtensionalityAxioms(ctxt, unit); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Add array box update axioms // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if (option("add-array-select-box-update-axioms")) addArrayElementBoxUpdateAxioms(ctxt, unit); //----------------------------------------------------------------------- // Add record axioms //----------------------------------------------------------------------- if (option("add-record-select-constructor-axioms")) { addRcdElementMkRcdAxioms(ctxt, unit); } if (option("add-record-constructor-extensionality-axioms")) { addMkRcdElementExtAxioms(ctxt, unit); } if (option("add-record-select-update-axioms")) { addRcdElementUpdateAxioms(ctxt, unit); } if (option("add-record-eq-elements-extensionality-axioms")) { addRcdEqElementsExtAxioms(ctxt, unit); } //----------------------------------------------------------------------- // Introduce aliases for array and record equalities //----------------------------------------------------------------------- if (option("use-array-eq-aliases")) { mapOverWithContext(introArrayEqAlias, ctxt, unit); } if (option("use-record-eq-aliases")) { mapOverWithContext(introRecordEqAlias, ctxt, unit); } if (option("use-array-eq-aliases") || option("use-record-eq-aliases")) { addDeclsAxiomsForArrRecEqAliases(ctxt, unit); } //-------------------------------------------------------------------- // Abstract array operators and types //-------------------------------------------------------------------- if (option("abstract-array-select-updates")) { abstractArraySelectUpdateOps(ctxt, unit); } if (option("abstract-array-box-updates")) { abstractArrayBoxUpdates(ctxt, unit); } if (option("abstract-array-types")) abstractArrayTypeDecls(ctxt); //-------------------------------------------------------------------- // Abstract record operator and types //-------------------------------------------------------------------- if (option("abstract-record-selects-constructors")) { abstractRcdElementsMkRcds(ctxt, unit); } if (option("abstract-record-selects-updates")) { abstractRcdElementsUpdates(ctxt, unit); } if (option("abstract-record-types")) abstractRecordTypeDecls(ctxt); } spark-2012.0.deb/victor/vct/src/arith.cc0000644000175000017500000005511611753202341016754 0ustar eugeneugen//======================================================================== //======================================================================== // ARITH.CC //======================================================================== //======================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ //======================================================================== /* Some older documentation on these arithmetic translations - If "elim-consts" option: o Harvest constant and variable definitions of form EQ ID NATNUM{n} or EQ ID (UMINUS NATNUM{n}) from rules and hyps. o Apply substitutions implied by definitions to all rules and goals, except when constant is shadowed by bound variable. This can make syntactically non-linear expressions linear in some cases. - If "ground-eval" option: Evaluate out integer arithmetic expressions involving integer literals and operators PLUS, MINUS, TIMES, IDIV, EXP and UMINUS. Exception thrown if number magnitude exceeds 2^32 - 1. If option not given, just evaluate out EXP occurrences. - If "arith-eval" option: Try hard to bring together integer literals in expressions involving TIMES and IDIV so as to make expression linear. - If "sym-const" option with natural number value k, o Add a new abstract constant declaration for each value occurring in rules or goals above k. o Replace each of these values with the corresponding abstract constant. o Add new rules asserting ordering of abstract constants. If new constants are c1 ... cn, then rules assert that k < c1 < c2 < ... < cn. o New constant for value is named where prefix is either supplied by "sym-prefix" option or defaults to "k@". */ //======================================================================== // Elimination of arithmetic constant definitions //======================================================================== bool isConstDef(Node* n) { return n->kind == EQ && n->child(0)->kind == CONST && (n->child(1)->kind == NATNUM || ( n->child(1)->kind == UMINUS && n->child(1)->child(0)->kind == NATNUM ) ); } class ApplyConstSubst { private: map constMap; public: ApplyConstSubst(map m) : constMap(m) {} Node* operator() (FDLContext* c, Node* n); }; Node* ApplyConstSubst::operator() (FDLContext* c, Node* n) { if (n->kind != CONST) return n; string id = n->id; map::iterator i = constMap.find(id); if (i != constMap.end()) { return i->second->copy(); } else { return n; } } void elimConsts(FDLContext* ctxt, Node* unit) { map constMap; Node* rules = unit->child(1); for (int i = 0; i != rules->arity(); i++) { Node* rule = rules->child(i); if (isConstDef(rule)) { constMap.insert(make_pair(rule->child(0)->id, rule->child(1))); } } Node* goals = unit->child(2); for (int i = 0; i != goals->arity(); i++) { Node* goal = goals->child(i); if (goal->arity() != 2) continue; Node* hyps = goal->child(0); for (int j = 0; j != hyps->arity(); j++) { Node* hyp = hyps->child(j); if (isConstDef(hyp)) { constMap.insert(make_pair(hyp->child(0)->id, hyp->child(1))); } } } ApplyConstSubst f(constMap); mapOverWithContext(f, ctxt, unit); return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // evalExp //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Evaluate special case of exponent with natnum arguments. // Superceded by groundEval, but kept for back comparison purposes. void evalExp(Node* n) { if (n->kind == I_EXP && n->child(0)->kind == NATNUM && n->child(1)->kind == NATNUM) { MyInt a1(n->child(0)); MyInt a2(n->child(1)); printMessage(FINEm, "evalExp: evaluating " + a1.toString() + " to power " + a2.toString() ); n->kind = NATNUM; n->id = MyInt::exp(a1,a2).toString(); n->clearChildren(); } return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // expandExpConst //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // Expand e ** k where k >= 0 is a constant integer. Node* expandExpConst(Node* n) { if ((n->kind == I_EXP || n->kind == R_EXP) && n->child(1)->kind == NATNUM) { Kind timesKind = (n->kind == I_EXP) ? I_TIMES : R_TIMES; Node* result; int k = stringToInt(n->child(1)->id); Node* e = n->child(0); printMessage(FINEm, "expandExpConst: expanding " + kindString(e->kind) + " to power " + intToString(k) ); if (k == 0) { result = n; result->kind = NATNUM; result->id = "1"; result->clearChildren(); if (n->kind == R_EXP) { result = new Node(TO_REAL, result); } } else if (k == 1) { result = e; } else { // k >= 2 result = n; result->kind = timesKind; // child(0) already set to e. result->child(1) = e->copy(); for (int i = 3; i <= k; i++) { // add on ith copy of e result = new Node(timesKind, e->copy(), result); } } return result; } return n; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // abstractNonLinMult //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ bool isRealConst(Node* n) { if (n->kind == R_UMINUS) return isRealConst(n->child(0)); else if (n->kind == RDIV && ! option("abstract-real-div")) return isRealConst(n->child(0)) && isRealConst(n->child(1)); else if (n->kind == TO_REAL) return isIntNode(n->child(0)); else return false; } void abstractNonLinMult(Node* n) { if (n->kind == I_TIMES) { if (! (isIntNode(n->child(0)) || isIntNode(n->child(1)))) { n->kind = FUN_AP; n->id = "int___times"; } } else if (n->kind == R_TIMES) { if (! (isRealConst(n->child(0)) || isRealConst(n->child(1)))) { n->kind = FUN_AP; n->id = "real___times"; } } return; } //======================================================================== // Creation of symbolic constants //======================================================================== class BigConsts { private: set bigConsts; MyInt threshold; string prefix; public: void setPrefix(string s) {prefix = s;} vector getNewIds(); BigConsts(string thresholdStr); void operator() (Node* n); // To map over expression Node* getRules(); }; BigConsts::BigConsts(string thresholdStr) { threshold = MyInt(thresholdStr); } void BigConsts::operator() (Node* n) { if (n->kind != NATNUM) return; MyInt val(n); if (threshold < val) { bigConsts.insert(val); n->kind = CONST; n->id = prefix + (n->id); } return; } vector BigConsts::getNewIds() { vector result; for (set::iterator i = bigConsts.begin(); i != bigConsts.end(); i++) { result.push_back(prefix + (*i).toString()); } return result; } Node* BigConsts::getRules() { Node* rules = new Node(SEQ); Node* prev = threshold.toNode(); for (set::iterator i = bigConsts.begin(); i != bigConsts.end(); i++) { Node* current = new Node(CONST, prefix + (*i).toString()); rules->addChild(new Node(I_LT, prev, current)); prev = current; } return rules; } //======================================================================== // Introduce symbolic constants //======================================================================== void introSymbolicConsts(FDLContext* ctxt, Node* unit) { string thresholdStr = optionVal("sym-consts"); BigConsts* constSet = new BigConsts(thresholdStr); string prefix; if (option("sym-prefix")) prefix = optionVal("sym-prefix"); else prefix = "k___"; constSet->setPrefix(prefix); unit->mapOver(* constSet); // Add new decls to existing decls vector newConsts(constSet->getNewIds()); for (int i = 0; i != (int) newConsts.size(); i++) { ctxt->insert(new Node( DEF_CONST, newConsts.at(i), Node::int_ty ) ); } // Add new rules to existing rules Node* newRules = constSet->getRules(); Node* currentRules = unit->child(1); currentRules->appendChildren(newRules); } //======================================================================== // Eliminate I_SUCC and I_PRED //======================================================================== Node* elimIntSuccPred(Node* n) { if (n->kind == I_SUCC) { n->kind = I_PLUS; n->addChild(new Node(NATNUM,"1")); } else if (n->kind == I_PRED) { n->kind = I_MINUS; n->addChild(new Node(NATNUM,"1")); } return n; } void elimIntSuccPreds(Node* unit) { unit->mapOver(elimIntSuccPred); } //======================================================================== // Abstract I_EXP and R_EXP //======================================================================== Node* abstractExp(Node* n) { switch(n->kind) { case I_EXP: return n->updateKindAndId(FUN_AP,"int___exp"); case R_EXP: return n->updateKindAndId(FUN_AP,"real___exp"); default: return n; } } void abstractExps(Node* unit) { unit->mapOver1(abstractExp); } //======================================================================== // Abstract IDIV and MOD //======================================================================== Node* abstractDivMod(Node* n) { switch(n->kind) { case IDIV: return n->updateKindAndId(FUN_AP,"int___div"); case MOD: return n->updateKindAndId(FUN_AP,"int___mod"); default: return n; } } void abstractDivMods(Node* unit) { unit->mapOver1(abstractDivMod); } //======================================================================== // Introduce axioms relating IDIV and MOD to IDIV_E and MOD_E //======================================================================== // IDIV_E and MOD_E are the primitives supported by SMTLIB2. /* In email to SMTLIB mail list (11/8/08), John Harrison argued for SMTLIB2 to use the Euclidean definitions of IDIV and MOD, as discussed in Raymond Boute: "The Euclidean definition of the functions div and mod". ACM TOPLAS vol. 14 (1992), pp127-144 He used this definition for HOL Light. Following Boute paper, we introduce explicit Kinds for each of the 3 kinds of div and mod. IDIV_T, MOD_T: Division truncates, rounds towards zero. IDIV_F, MOD_F: Division takes integer floor, rounds towards -infinity. IDIV_E, MOD_E: Euclidean division. Floor if divisor +ve Ceiling if divisor -ve Ensures that 0 <= MOD_E x y < |y|. All 3 obey remainder law: x = y * (x div y) + x mod y Coming from FDL, IDIV is IDIV_T, MOD is MOD_F. */ // i | j =def j mod_e i == 0 #define nDIVIDES(i,j) nEQ(nMOD_E((j),(i)), nNATNUM("0"), nINT_TY) // Axioms in terms of primitive IDIV and MOD, so abstraction of these // should occur after. void addEuclideanIdivModAxioms(Node* unit) { Node* rules = unit->child(1); // Axioms phrased so more awkward cases are as restricted as // possible, they have stronger preconditions. rules->addChild( // y > 0 => // MOD x y = MOD_E x y nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nI_GT(nVAR("Y"),nNATNUM("0")), nI_EQ(nMOD(nVAR("X"), nVAR("Y")), nMOD_E(nVAR("X"), nVAR("Y"))))) ); // y < 0 & y | x => // MOD x y = MOD_E x y rules->addChild( nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nAND(nI_LT(nVAR("Y"),nNATNUM("0")), nDIVIDES(nVAR("Y"),nVAR("X"))), nI_EQ(nMOD(nVAR("X"), nVAR("Y")), nMOD_E(nVAR("X"), nVAR("Y"))))) ); // y < 0 & ~(y | x) => // MOD x y = (MOD_E x y) + y rules->addChild( nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nAND(nI_LT(nVAR("Y"),nNATNUM("0")), nNOT(nDIVIDES(nVAR("Y"),nVAR("X")))), nI_EQ(nMOD(nVAR("X"), nVAR("Y")), nI_PLUS(nMOD_E(nVAR("X"), nVAR("Y")), nVAR("Y"))))) ); // x >= 0 & y > 0 => // IDIV x y = IDIV_E x y Both rounding down rules->addChild( nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nAND(nI_GE(nVAR("X"),nNATNUM("0")), nI_GT(nVAR("Y"),nNATNUM("0"))), nI_EQ(nIDIV(nVAR("X"), nVAR("Y")), nIDIV_E(nVAR("X"), nVAR("Y"))))) ); // x < 0 & y > 0 & y | x => // IDIV x y = IDIV_E x y rules->addChild( nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nAND(nI_LT(nVAR("X"),nNATNUM("0")), nAND(nI_GT(nVAR("Y"),nNATNUM("0")), nDIVIDES(nVAR("Y"), nVAR("X")))), nI_EQ(nIDIV(nVAR("X"), nVAR("Y")), nIDIV_E(nVAR("X"), nVAR("Y"))))) ); // x < 0 & y > 0 & ~(y | x) => // IDIV x y = IDIV_E x y + 1 // // IDIV -ve so rounding up, IDIV_E rounding down rules->addChild( nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nAND(nI_LT(nVAR("X"),nNATNUM("0")), nAND(nI_GT(nVAR("Y"),nNATNUM("0")), nNOT(nDIVIDES(nVAR("Y"), nVAR("X"))))), nI_EQ(nIDIV(nVAR("X"), nVAR("Y")), nI_PLUS(nIDIV_E(nVAR("X"), nVAR("Y")), nNATNUM("1"))))) ); // x >= 0 & y < 0 => // IDIV x y = IDIV_E x y Both rounding up rules->addChild( nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nAND(nI_GE(nVAR("X"),nNATNUM("0")), nI_LT(nVAR("Y"),nNATNUM("0"))), nI_EQ(nIDIV(nVAR("X"), nVAR("Y")), nIDIV_E(nVAR("X"), nVAR("Y"))))) ); // x < 0 & y < 0 & y | x => // IDIV x y = IDIV_E x y rules->addChild( nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nAND(nI_LT(nVAR("X"),nNATNUM("0")), nAND(nI_LT(nVAR("Y"),nNATNUM("0")), nDIVIDES(nVAR("Y"), nVAR("X")))), nI_EQ(nIDIV(nVAR("X"), nVAR("Y")), nIDIV_E(nVAR("X"), nVAR("Y"))))) ); // x < 0 & y < 0 & ~(y | x) => // IDIV x y = IDIV_E x y - 1 // // IDIV rounding down, IDIV_E rounding up rules->addChild( nFORALL2("X", nINT_TY, "Y", nINT_TY, nIMPLIES(nAND(nI_LT(nVAR("X"),nNATNUM("0")), nAND(nI_LT(nVAR("Y"),nNATNUM("0")), nNOT(nDIVIDES(nVAR("Y"), nVAR("X"))))), nI_EQ(nIDIV(nVAR("X"), nVAR("Y")), nI_MINUS(nIDIV_E(nVAR("X"), nVAR("Y")), nNATNUM("1"))))) ); return; } //======================================================================== // Abstract RDIV //======================================================================== Node* abstractRealDiv(Node* n) { switch(n->kind) { case RDIV: return n->updateKindAndId(FUN_AP,"real___div"); default: return n; } } void abstractRealDivs(Node* unit) { unit->mapOver1(abstractRealDiv); } //======================================================================== // Abstract Reals //======================================================================== // Abstract all real operators and real type // Assume declarations added already in prelude.fdl Node* abstractRealOpRelType(Node* n) { switch(n->kind) { case TO_REAL: return n->updateKindAndId(FUN_AP,"int___to_real"); case R_UMINUS: return n->updateKindAndId(FUN_AP,"real___uminus"); case R_PLUS: return n->updateKindAndId(FUN_AP,"real___plus"); case R_MINUS: return n->updateKindAndId(FUN_AP,"real___minus"); case R_TIMES: return n->updateKindAndId(FUN_AP,"real___times"); case RDIV: return n->updateKindAndId(FUN_AP,"real___div"); case R_LT: return n->updateKindAndId(FUN_AP,"real___lt"); case R_LE: return n->updateKindAndId(FUN_AP,"real___le"); case REAL_TY: return new Node(TYPE_ID, "real___type"); default: return n; } } Node* abstractRealType(Node* n) { switch(n->kind) { case REAL_TY: return new Node(TYPE_ID, "real___type"); default: return n; } } void abstractRealOpsRelsType(FDLContext* ctxt, Node* unit) { unit->mapOver1(abstractRealOpRelType); ctxt->termSeq.mapOver1(abstractRealType); ctxt->typeSeq.mapOver1(abstractRealType); ctxt->insert(new Node(DEF_TYPE, "real___type")); ctxt->insert(new Node(DECL_FUN, "real___le", new Node(SEQ, new Node(TYPE_ID, "real___type"), new Node(TYPE_ID, "real___type")), Node::bool_ty)); ctxt->insert(new Node(DECL_FUN, "real___lt", new Node(SEQ, new Node(TYPE_ID, "real___type"), new Node(TYPE_ID, "real___type")), Node::bool_ty)); return; } //======================================================================== // Define compound arithmetic transformation functions //======================================================================== //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // arithSimp //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void arithSimp(FDLContext* ctxt, Node* unit) { //-------------------------------------------------------------------- // Eliminate constants that have values supplied in rules //-------------------------------------------------------------------- if (option("elim-consts")) { elimConsts(ctxt, unit); } //-------------------------------------------------------------------- // Evaluate ground integer expressions. //-------------------------------------------------------------------- if (option("ground-eval-exp")) { unit->mapOver(evalExp); } if (option("ground-eval")) { groundEval(unit); // Eval all constant arith expressions, inc EXP. } //-------------------------------------------------------------------- // Expand constant powers. //-------------------------------------------------------------------- // Only expect this to be useful when prover can handle non-linear // arithmetic. if (option("expand-exp-const")) { unit->mapOver1(expandExpConst); } //-------------------------------------------------------------------- // Make arithmetic expressions more linear //-------------------------------------------------------------------- // Normalises expressions involving div and multiplication by constants. if (option("arith-eval")) constArithEval(unit); return; } //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ // arithAbstract //++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ void arithAbstract(FDLContext* ctxt, Node* unit) { //-------------------------------------------------------------------- // Introduce symbolic constants for large integers //-------------------------------------------------------------------- if (option("sym-consts")) introSymbolicConsts(ctxt, unit); //-------------------------------------------------------------------- // Abstract non-linear multiplications (R_TIMES and I_TIMES) to UIFs //-------------------------------------------------------------------- if (option("abstract-nonlin-times")) unit->mapOver(abstractNonLinMult); //-------------------------------------------------------------------- // Replace exponent functions (I_EXP and R_EXP) by UIFs //-------------------------------------------------------------------- if (option("abstract-exp")) abstractExps(unit); //-------------------------------------------------------------------- // Relate (IDIV) and modulus (MOD) to Euclidean definitions //-------------------------------------------------------------------- if (option("use-euclidean-divmod")) addEuclideanIdivModAxioms(unit); //-------------------------------------------------------------------- // Replace integer division (IDIV) and modulus (MOD) by UIFs //-------------------------------------------------------------------- if (option("abstract-divmod")) abstractDivMods(unit); //-------------------------------------------------------------------- // Replace real division (RDIV) by UIF //-------------------------------------------------------------------- if (option("abstract-real-div")) abstractRealDivs(unit); //-------------------------------------------------------------------- // Abstract real operators, relations and type //-------------------------------------------------------------------- if (option("abstract-reals")) abstractRealOpsRelsType(ctxt, unit); return; } spark-2012.0.deb/victor/vct/src/typesort.cc0000644000175000017500000000747711753202341017545 0ustar eugeneugen//========================================================================== //========================================================================== // TYPESORT.CC //========================================================================== //========================================================================== /* This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ #include #include #include "typesort.hh" using std::vector; using std::string; using std::set; #include "utility.hh" // For string manipulation functions #include void TypeSort::dfs(vector vs) { for (vector::iterator i = vs.begin(); i != vs.end(); i++ ) { Wrapper* v = *i; if (v->colour == GREY) throw std::runtime_error("Circular dependency in type decls"); else if (v->colour == BLACK) continue; else { v->colour = GREY; dfs(v->children); v->colour = BLACK; outNodes->addChild(v->node); } } } map TypeSort::wmap; Node* TypeSort::outNodes; Node* TypeSort::sort(Node* typeDecls) { vector inVertices; Node* newDecls = new Node(z::SEQ); // Initialise wmap.clear(); outNodes = new Node(z::SEQ); for (int i = 0; i != typeDecls->arity(); i++) { Node* typeDecl = typeDecls->child(i); Wrapper* w = new Wrapper(typeDecl); inVertices.push_back(w); wmap.insert(std::make_pair(typeDecl->id, w)); } // Build dependency graph // Iterate over wrapped initial declarations, adding child info // Further abstract declarations are added when missing for (int i = 0; i != typeDecls->arity(); i++) { Wrapper* w = inVertices.at(i); set typeIds = w->node->getIds(z::TYPE_ID); for (std::set::const_iterator j = typeIds.begin(); j != typeIds.end(); j++) { string id = *j; // Create declaration for undeclared type ids if ( wmap.count(id) == 0) { printMessage(WARNINGm, "Creating abstract type decl for type id: " + id); Node* newDecl = new Node(z::DEF_TYPE, id); newDecls->addChild(newDecl); Wrapper* newW = new Wrapper(newDecl); wmap.insert(std::make_pair(id, newW)); inVertices.push_back(newW); } Wrapper* wrappedIdDecl = wmap.find(id)->second; w->children.push_back(wrappedIdDecl); } } // Do sort. try { dfs(inVertices); } catch (std::runtime_error r) { for (vector::iterator i = inVertices.begin(); i != inVertices.end(); i++ ) { delete *i; } throw; } // Cleanup for (vector::iterator i = inVertices.begin(); i != inVertices.end(); i++ ) { delete *i; } return new Node(z::SEQ, outNodes, newDecls); } spark-2012.0.deb/victor/vct/src/lex.yy_sexp.cc0000644000175000017500000015055411753202341020136 0ustar eugeneugen#line 2 "lex.yy_sexp.cc" #line 4 "lex.yy_sexp.cc" #define YY_INT_ALIGNED short int /* A lexical scanner generated by flex */ /* %not-for-header */ /* %if-c-only */ /* %if-not-reentrant */ #define yy_create_buffer yy_sexp_create_buffer #define yy_delete_buffer yy_sexp_delete_buffer #define yy_flex_debug yy_sexp_flex_debug #define yy_init_buffer yy_sexp_init_buffer #define yy_flush_buffer yy_sexp_flush_buffer #define yy_load_buffer_state yy_sexp_load_buffer_state #define yy_switch_to_buffer yy_sexp_switch_to_buffer #define yyin yy_sexpin #define yyleng yy_sexpleng #define yylex yy_sexplex #define yylineno yy_sexplineno #define yyout yy_sexpout #define yyrestart yy_sexprestart #define yytext yy_sexptext #define yywrap yy_sexpwrap #define yyalloc yy_sexpalloc #define yyrealloc yy_sexprealloc #define yyfree yy_sexpfree /* %endif */ /* %endif */ /* %ok-for-header */ #define FLEX_SCANNER #define YY_FLEX_MAJOR_VERSION 2 #define YY_FLEX_MINOR_VERSION 5 #define YY_FLEX_SUBMINOR_VERSION 35 #if YY_FLEX_SUBMINOR_VERSION > 0 #define FLEX_BETA #endif /* %if-c++-only */ /* %endif */ /* %if-c-only */ /* %endif */ /* %if-c-only */ /* %endif */ /* First, we deal with platform-specific or compiler-specific issues. */ /* begin standard C headers. */ /* %if-c-only */ #include #include #include #include /* %endif */ /* %if-tables-serialization */ /* %endif */ /* end standard C headers. */ /* %if-c-or-c++ */ /* flex integer type definitions */ #ifndef FLEXINT_H #define FLEXINT_H /* C99 systems have . Non-C99 systems may or may not. */ #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L /* C99 says to define __STDC_LIMIT_MACROS before including stdint.h, * if you want the limit (max/min) macros for int types. */ #ifndef __STDC_LIMIT_MACROS #define __STDC_LIMIT_MACROS 1 #endif #include typedef int8_t flex_int8_t; typedef uint8_t flex_uint8_t; typedef int16_t flex_int16_t; typedef uint16_t flex_uint16_t; typedef int32_t flex_int32_t; typedef uint32_t flex_uint32_t; #else typedef signed char flex_int8_t; typedef short int flex_int16_t; typedef int flex_int32_t; typedef unsigned char flex_uint8_t; typedef unsigned short int flex_uint16_t; typedef unsigned int flex_uint32_t; #endif /* ! C99 */ /* Limits of integral types. */ #ifndef INT8_MIN #define INT8_MIN (-128) #endif #ifndef INT16_MIN #define INT16_MIN (-32767-1) #endif #ifndef INT32_MIN #define INT32_MIN (-2147483647-1) #endif #ifndef INT8_MAX #define INT8_MAX (127) #endif #ifndef INT16_MAX #define INT16_MAX (32767) #endif #ifndef INT32_MAX #define INT32_MAX (2147483647) #endif #ifndef UINT8_MAX #define UINT8_MAX (255U) #endif #ifndef UINT16_MAX #define UINT16_MAX (65535U) #endif #ifndef UINT32_MAX #define UINT32_MAX (4294967295U) #endif #endif /* ! FLEXINT_H */ /* %endif */ /* %if-c++-only */ /* %endif */ #ifdef __cplusplus /* The "const" storage-class-modifier is valid. */ #define YY_USE_CONST #else /* ! __cplusplus */ /* C99 requires __STDC__ to be defined as 1. */ #if defined (__STDC__) #define YY_USE_CONST #endif /* defined (__STDC__) */ #endif /* ! __cplusplus */ #ifdef YY_USE_CONST #define yyconst const #else #define yyconst #endif /* %not-for-header */ /* Returned upon end-of-file. */ #define YY_NULL 0 /* %ok-for-header */ /* %not-for-header */ /* Promotes a possibly negative, possibly signed char to an unsigned * integer for use as an array index. If the signed char is negative, * we want to instead treat it as an 8-bit unsigned char, hence the * double cast. */ #define YY_SC_TO_UI(c) ((unsigned int) (unsigned char) c) /* %ok-for-header */ /* %if-reentrant */ /* %endif */ /* %if-not-reentrant */ /* %endif */ /* Enter a start condition. This macro really ought to take a parameter, * but we do it the disgusting crufty way forced on us by the ()-less * definition of BEGIN. */ #define BEGIN (yy_start) = 1 + 2 * /* Translate the current start state into a value that can be later handed * to BEGIN to return to the state. The YYSTATE alias is for lex * compatibility. */ #define YY_START (((yy_start) - 1) / 2) #define YYSTATE YY_START /* Action number for EOF rule of a given start state. */ #define YY_STATE_EOF(state) (YY_END_OF_BUFFER + state + 1) /* Special action meaning "start processing a new file". */ #define YY_NEW_FILE yy_sexprestart(yy_sexpin ) #define YY_END_OF_BUFFER_CHAR 0 /* Size of default input buffer. */ #ifndef YY_BUF_SIZE #define YY_BUF_SIZE 16384 #endif /* The state buf must be large enough to hold one state per character in the main buffer. */ #define YY_STATE_BUF_SIZE ((YY_BUF_SIZE + 2) * sizeof(yy_state_type)) #ifndef YY_TYPEDEF_YY_BUFFER_STATE #define YY_TYPEDEF_YY_BUFFER_STATE typedef struct yy_buffer_state *YY_BUFFER_STATE; #endif /* %if-not-reentrant */ extern int yy_sexpleng; /* %endif */ /* %if-c-only */ /* %if-not-reentrant */ extern FILE *yy_sexpin, *yy_sexpout; /* %endif */ /* %endif */ #define EOB_ACT_CONTINUE_SCAN 0 #define EOB_ACT_END_OF_FILE 1 #define EOB_ACT_LAST_MATCH 2 #define YY_LESS_LINENO(n) /* Return all but the first "n" matched characters back to the input stream. */ #define yyless(n) \ do \ { \ /* Undo effects of setting up yy_sexptext. */ \ int yyless_macro_arg = (n); \ YY_LESS_LINENO(yyless_macro_arg);\ *yy_cp = (yy_hold_char); \ YY_RESTORE_YY_MORE_OFFSET \ (yy_c_buf_p) = yy_cp = yy_bp + yyless_macro_arg - YY_MORE_ADJ; \ YY_DO_BEFORE_ACTION; /* set up yy_sexptext again */ \ } \ while ( 0 ) #define unput(c) yyunput( c, (yytext_ptr) ) #ifndef YY_TYPEDEF_YY_SIZE_T #define YY_TYPEDEF_YY_SIZE_T typedef size_t yy_size_t; #endif #ifndef YY_STRUCT_YY_BUFFER_STATE #define YY_STRUCT_YY_BUFFER_STATE struct yy_buffer_state { /* %if-c-only */ FILE *yy_input_file; /* %endif */ /* %if-c++-only */ /* %endif */ char *yy_ch_buf; /* input buffer */ char *yy_buf_pos; /* current position in input buffer */ /* Size of input buffer in bytes, not including room for EOB * characters. */ yy_size_t yy_buf_size; /* Number of characters read into yy_ch_buf, not including EOB * characters. */ int yy_n_chars; /* Whether we "own" the buffer - i.e., we know we created it, * and can realloc() it to grow it, and should free() it to * delete it. */ int yy_is_our_buffer; /* Whether this is an "interactive" input source; if so, and * if we're using stdio for input, then we want to use getc() * instead of fread(), to make sure we stop fetching input after * each newline. */ int yy_is_interactive; /* Whether we're considered to be at the beginning of a line. * If so, '^' rules will be active on the next match, otherwise * not. */ int yy_at_bol; int yy_bs_lineno; /**< The line count. */ int yy_bs_column; /**< The column count. */ /* Whether to try to fill the input buffer when we reach the * end of it. */ int yy_fill_buffer; int yy_buffer_status; #define YY_BUFFER_NEW 0 #define YY_BUFFER_NORMAL 1 /* When an EOF's been seen but there's still some text to process * then we mark the buffer as YY_EOF_PENDING, to indicate that we * shouldn't try reading from the input source any more. We might * still have a bunch of tokens to match, though, because of * possible backing-up. * * When we actually see the EOF, we change the status to "new" * (via yy_sexprestart()), so that the user can continue scanning by * just pointing yy_sexpin at a new input file. */ #define YY_BUFFER_EOF_PENDING 2 }; #endif /* !YY_STRUCT_YY_BUFFER_STATE */ /* %if-c-only Standard (non-C++) definition */ /* %not-for-header */ /* %if-not-reentrant */ /* Stack of input buffers. */ static size_t yy_buffer_stack_top = 0; /**< index of top of stack. */ static size_t yy_buffer_stack_max = 0; /**< capacity of stack. */ static YY_BUFFER_STATE * yy_buffer_stack = 0; /**< Stack as an array. */ /* %endif */ /* %ok-for-header */ /* %endif */ /* We provide macros for accessing buffer states in case in the * future we want to put the buffer states in a more general * "scanner state". * * Returns the top of the stack, or NULL. */ #define YY_CURRENT_BUFFER ( (yy_buffer_stack) \ ? (yy_buffer_stack)[(yy_buffer_stack_top)] \ : NULL) /* Same as previous macro, but useful when we know that the buffer stack is not * NULL or when we need an lvalue. For internal use only. */ #define YY_CURRENT_BUFFER_LVALUE (yy_buffer_stack)[(yy_buffer_stack_top)] /* %if-c-only Standard (non-C++) definition */ /* %if-not-reentrant */ /* %not-for-header */ /* yy_hold_char holds the character lost when yy_sexptext is formed. */ static char yy_hold_char; static int yy_n_chars; /* number of characters read into yy_ch_buf */ int yy_sexpleng; /* Points to current character in buffer. */ static char *yy_c_buf_p = (char *) 0; static int yy_init = 0; /* whether we need to initialize */ static int yy_start = 0; /* start state number */ /* Flag which is used to allow yy_sexpwrap()'s to do buffer switches * instead of setting up a fresh yy_sexpin. A bit of a hack ... */ static int yy_did_buffer_switch_on_eof; /* %ok-for-header */ /* %endif */ void yy_sexprestart (FILE *input_file ); void yy_sexp_switch_to_buffer (YY_BUFFER_STATE new_buffer ); YY_BUFFER_STATE yy_sexp_create_buffer (FILE *file,int size ); void yy_sexp_delete_buffer (YY_BUFFER_STATE b ); void yy_sexp_flush_buffer (YY_BUFFER_STATE b ); void yy_sexppush_buffer_state (YY_BUFFER_STATE new_buffer ); void yy_sexppop_buffer_state (void ); static void yy_sexpensure_buffer_stack (void ); static void yy_sexp_load_buffer_state (void ); static void yy_sexp_init_buffer (YY_BUFFER_STATE b,FILE *file ); #define YY_FLUSH_BUFFER yy_sexp_flush_buffer(YY_CURRENT_BUFFER ) YY_BUFFER_STATE yy_sexp_scan_buffer (char *base,yy_size_t size ); YY_BUFFER_STATE yy_sexp_scan_string (yyconst char *yy_str ); YY_BUFFER_STATE yy_sexp_scan_bytes (yyconst char *bytes,int len ); /* %endif */ void *yy_sexpalloc (yy_size_t ); void *yy_sexprealloc (void *,yy_size_t ); void yy_sexpfree (void * ); #define yy_new_buffer yy_sexp_create_buffer #define yy_set_interactive(is_interactive) \ { \ if ( ! YY_CURRENT_BUFFER ){ \ yy_sexpensure_buffer_stack (); \ YY_CURRENT_BUFFER_LVALUE = \ yy_sexp_create_buffer(yy_sexpin,YY_BUF_SIZE ); \ } \ YY_CURRENT_BUFFER_LVALUE->yy_is_interactive = is_interactive; \ } #define yy_set_bol(at_bol) \ { \ if ( ! YY_CURRENT_BUFFER ){\ yy_sexpensure_buffer_stack (); \ YY_CURRENT_BUFFER_LVALUE = \ yy_sexp_create_buffer(yy_sexpin,YY_BUF_SIZE ); \ } \ YY_CURRENT_BUFFER_LVALUE->yy_at_bol = at_bol; \ } #define YY_AT_BOL() (YY_CURRENT_BUFFER_LVALUE->yy_at_bol) /* %% [1.0] yy_sexptext/yy_sexpin/yy_sexpout/yy_state_type/yy_sexplineno etc. def's & init go here */ /* Begin user sect3 */ #define yy_sexpwrap(n) 1 #define YY_SKIP_YYWRAP #define FLEX_DEBUG typedef unsigned char YY_CHAR; FILE *yy_sexpin = (FILE *) 0, *yy_sexpout = (FILE *) 0; typedef int yy_state_type; extern int yy_sexplineno; int yy_sexplineno = 1; extern char *yy_sexptext; #define yytext_ptr yy_sexptext /* %if-c-only Standard (non-C++) definition */ static yy_state_type yy_get_previous_state (void ); static yy_state_type yy_try_NUL_trans (yy_state_type current_state ); static int yy_get_next_buffer (void ); static void yy_fatal_error (yyconst char msg[] ); /* %endif */ /* Done after the current pattern has been matched and before the * corresponding action - sets up yy_sexptext. */ #define YY_DO_BEFORE_ACTION \ (yytext_ptr) = yy_bp; \ /* %% [2.0] code to fiddle yy_sexptext and yy_sexpleng for yymore() goes here \ */\ yy_sexpleng = (size_t) (yy_cp - yy_bp); \ (yy_hold_char) = *yy_cp; \ *yy_cp = '\0'; \ /* %% [3.0] code to copy yytext_ptr to yy_sexptext[] goes here, if %array \ */\ (yy_c_buf_p) = yy_cp; /* %% [4.0] data tables for the DFA and the user's section 1 definitions go here */ #define YY_NUM_RULES 7 #define YY_END_OF_BUFFER 8 /* This struct is not used in this scanner, but its presence is necessary. */ struct yy_trans_info { flex_int32_t yy_verify; flex_int32_t yy_nxt; }; static yyconst flex_int16_t yy_accept[22] = { 0, 0, 0, 0, 0, 0, 0, 8, 7, 1, 4, 2, 3, 5, 5, 7, 1, 4, 5, 0, 6, 0 } ; static yyconst flex_int32_t yy_ec[256] = { 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 4, 1, 1, 4, 4, 4, 1, 5, 6, 4, 4, 1, 4, 4, 4, 7, 8, 8, 8, 8, 8, 8, 8, 8, 8, 1, 9, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 1, 1, 1, 4, 1, 1, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 1, 1, 1, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 } ; static yyconst flex_int32_t yy_meta[10] = { 0, 1, 2, 2, 3, 1, 1, 4, 4, 1 } ; static yyconst flex_int16_t yy_base[27] = { 0, 0, 0, 0, 0, 0, 0, 24, 25, 0, 0, 25, 25, 25, 0, 20, 0, 0, 0, 19, 25, 25, 9, 19, 11, 16, 15 } ; static yyconst flex_int16_t yy_def[27] = { 0, 21, 1, 22, 22, 22, 22, 21, 21, 23, 24, 21, 21, 21, 25, 26, 23, 24, 25, 26, 21, 0, 21, 21, 21, 21, 21 } ; static yyconst flex_int16_t yy_nxt[35] = { 0, 8, 9, 9, 10, 11, 12, 13, 14, 15, 8, 8, 8, 8, 17, 17, 19, 19, 19, 19, 18, 16, 20, 20, 21, 7, 21, 21, 21, 21, 21, 21, 21, 21, 21 } ; static yyconst flex_int16_t yy_chk[35] = { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 22, 22, 22, 22, 24, 24, 26, 26, 26, 26, 25, 23, 19, 15, 7, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21 } ; static yy_state_type yy_last_accepting_state; static char *yy_last_accepting_cpos; extern int yy_sexp_flex_debug; int yy_sexp_flex_debug = 1; static yyconst flex_int16_t yy_rule_linenum[7] = { 0, 75, 78, 79, 82, 87, 91 } ; /* The intent behind this definition is that it'll catch * any uses of REJECT which flex missed. */ #define REJECT reject_used_but_not_detected #define yymore() yymore_used_but_not_detected #define YY_MORE_ADJ 0 #define YY_RESTORE_YY_MORE_OFFSET char *yy_sexptext; #line 1 "sexp-lexer.ll" /* ============================================================================= ============================================================================= LEXER.LL ============================================================================= ============================================================================= This file is part of Victor: a SPARK VC Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Author(s): Paul Jackson Victor is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Victor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. */ /* Lexer for S-expressions Assume flex run with option -Pyy_sexp to change yy prefix to yy_sexp. Main visible effect is that yy_sexplex is now called yy_explex */ /* Structure influenced by calc++ example distributed with Bison. */ #line 50 "sexp-lexer.ll" #include #include #include "sexp-parser.tab.hh" /* For token type */ #include "sexp-lexer.hh" /* For YY_DECL definition */ #define yyterminate() return tok::FILE_END typedef yy_sexp::parser::token tok; #line 621 "lex.yy_sexp.cc" #define INITIAL 0 #define body 1 #define goalorigins 2 #ifndef YY_NO_UNISTD_H /* Special case for "unistd.h", since it is non-ANSI. We include it way * down here because we want the user's section 1 to have been scanned first. * The user has a chance to override it with an option. */ /* %if-c-only */ #include /* %endif */ /* %if-c++-only */ /* %endif */ #endif #ifndef YY_EXTRA_TYPE #define YY_EXTRA_TYPE void * #endif /* %if-c-only Reentrant structure and macros (non-C++). */ /* %if-reentrant */ /* %if-c-only */ static int yy_init_globals (void ); /* %endif */ /* %if-reentrant */ /* %endif */ /* %endif End reentrant structures and macros. */ /* Accessor methods to globals. These are made visible to non-reentrant scanners for convenience. */ int yy_sexplex_destroy (void ); int yy_sexpget_debug (void ); void yy_sexpset_debug (int debug_flag ); YY_EXTRA_TYPE yy_sexpget_extra (void ); void yy_sexpset_extra (YY_EXTRA_TYPE user_defined ); FILE *yy_sexpget_in (void ); void yy_sexpset_in (FILE * in_str ); FILE *yy_sexpget_out (void ); void yy_sexpset_out (FILE * out_str ); int yy_sexpget_leng (void ); char *yy_sexpget_text (void ); int yy_sexpget_lineno (void ); void yy_sexpset_lineno (int line_number ); /* %if-bison-bridge */ /* %endif */ /* Macros after this point can all be overridden by user definitions in * section 1. */ #ifndef YY_SKIP_YYWRAP #ifdef __cplusplus extern "C" int yy_sexpwrap (void ); #else extern int yy_sexpwrap (void ); #endif #endif /* %not-for-header */ /* %ok-for-header */ /* %endif */ #ifndef yytext_ptr static void yy_flex_strncpy (char *,yyconst char *,int ); #endif #ifdef YY_NEED_STRLEN static int yy_flex_strlen (yyconst char * ); #endif #ifndef YY_NO_INPUT /* %if-c-only Standard (non-C++) definition */ /* %not-for-header */ #ifdef __cplusplus static int yyinput (void ); #else static int input (void ); #endif /* %ok-for-header */ /* %endif */ #endif /* %if-c-only */ /* %endif */ /* Amount of stuff to slurp up with each read. */ #ifndef YY_READ_BUF_SIZE #define YY_READ_BUF_SIZE 8192 #endif /* Copy whatever the last rule matched to the standard output. */ #ifndef ECHO /* %if-c-only Standard (non-C++) definition */ /* This used to be an fputs(), but since the string might contain NUL's, * we now use fwrite(). */ #define ECHO do { if (fwrite( yy_sexptext, yy_sexpleng, 1, yy_sexpout )) {} } while (0) /* %endif */ /* %if-c++-only C++ definition */ /* %endif */ #endif /* Gets input and stuffs it into "buf". number of characters read, or YY_NULL, * is returned in "result". */ #ifndef YY_INPUT #define YY_INPUT(buf,result,max_size) \ /* %% [5.0] fread()/read() definition of YY_INPUT goes here unless we're doing C++ \ */\ if ( YY_CURRENT_BUFFER_LVALUE->yy_is_interactive ) \ { \ int c = '*'; \ unsigned n; \ for ( n = 0; n < max_size && \ (c = getc( yy_sexpin )) != EOF && c != '\n'; ++n ) \ buf[n] = (char) c; \ if ( c == '\n' ) \ buf[n++] = (char) c; \ if ( c == EOF && ferror( yy_sexpin ) ) \ YY_FATAL_ERROR( "input in flex scanner failed" ); \ result = n; \ } \ else \ { \ errno=0; \ while ( (result = fread(buf, 1, max_size, yy_sexpin))==0 && ferror(yy_sexpin)) \ { \ if( errno != EINTR) \ { \ YY_FATAL_ERROR( "input in flex scanner failed" ); \ break; \ } \ errno=0; \ clearerr(yy_sexpin); \ } \ }\ \ /* %if-c++-only C++ definition \ */\ /* %endif */ #endif /* No semi-colon after return; correct usage is to write "yyterminate();" - * we don't want an extra ';' after the "return" because that will cause * some compilers to complain about unreachable statements. */ #ifndef yyterminate #define yyterminate() return YY_NULL #endif /* Number of entries by which start-condition stack grows. */ #ifndef YY_START_STACK_INCR #define YY_START_STACK_INCR 25 #endif /* Report a fatal error. */ #ifndef YY_FATAL_ERROR /* %if-c-only */ #define YY_FATAL_ERROR(msg) yy_fatal_error( msg ) /* %endif */ /* %if-c++-only */ /* %endif */ #endif /* %if-tables-serialization structures and prototypes */ /* %not-for-header */ /* %ok-for-header */ /* %not-for-header */ /* %tables-yydmap generated elements */ /* %endif */ /* end tables serialization structures and prototypes */ /* %ok-for-header */ /* Default declaration of generated scanner - a define so the user can * easily add parameters. */ #ifndef YY_DECL #define YY_DECL_IS_OURS 1 /* %if-c-only Standard (non-C++) definition */ extern int yy_sexplex (void); #define YY_DECL int yy_sexplex (void) /* %endif */ /* %if-c++-only C++ definition */ /* %endif */ #endif /* !YY_DECL */ /* Code executed at the beginning of each rule, after yy_sexptext and yy_sexpleng * have been set up. */ #ifndef YY_USER_ACTION #define YY_USER_ACTION #endif /* Code executed at the end of each rule. */ #ifndef YY_BREAK #define YY_BREAK break; #endif /* %% [6.0] YY_RULE_SETUP definition goes here */ #define YY_RULE_SETUP \ YY_USER_ACTION /* %not-for-header */ /** The main scanner function which does all the work. */ YY_DECL { register yy_state_type yy_current_state; register char *yy_cp, *yy_bp; register int yy_act; /* %% [7.0] user's declarations go here */ #line 72 "sexp-lexer.ll" #line 867 "lex.yy_sexp.cc" if ( !(yy_init) ) { (yy_init) = 1; #ifdef YY_USER_INIT YY_USER_INIT; #endif if ( ! (yy_start) ) (yy_start) = 1; /* first start state */ if ( ! yy_sexpin ) /* %if-c-only */ yy_sexpin = stdin; /* %endif */ /* %if-c++-only */ /* %endif */ if ( ! yy_sexpout ) /* %if-c-only */ yy_sexpout = stdout; /* %endif */ /* %if-c++-only */ /* %endif */ if ( ! YY_CURRENT_BUFFER ) { yy_sexpensure_buffer_stack (); YY_CURRENT_BUFFER_LVALUE = yy_sexp_create_buffer(yy_sexpin,YY_BUF_SIZE ); } yy_sexp_load_buffer_state( ); } while ( 1 ) /* loops until end-of-file is reached */ { /* %% [8.0] yymore()-related code goes here */ yy_cp = (yy_c_buf_p); /* Support of yy_sexptext. */ *yy_cp = (yy_hold_char); /* yy_bp points to the position in yy_ch_buf of the start of * the current run. */ yy_bp = yy_cp; /* %% [9.0] code to set up and find next match goes here */ yy_current_state = (yy_start); yy_match: do { register YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)]; if ( yy_accept[yy_current_state] ) { (yy_last_accepting_state) = yy_current_state; (yy_last_accepting_cpos) = yy_cp; } while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state ) { yy_current_state = (int) yy_def[yy_current_state]; if ( yy_current_state >= 22 ) yy_c = yy_meta[(unsigned int) yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c]; ++yy_cp; } while ( yy_current_state != 21 ); yy_cp = (yy_last_accepting_cpos); yy_current_state = (yy_last_accepting_state); yy_find_action: /* %% [10.0] code to find the action number goes here */ yy_act = yy_accept[yy_current_state]; YY_DO_BEFORE_ACTION; /* %% [11.0] code for yy_sexplineno update goes here */ do_action: /* This label is used only to access EOF actions. */ /* %% [12.0] debug code goes here */ if ( yy_sexp_flex_debug ) { if ( yy_act == 0 ) fprintf( stderr, "--scanner backing up\n" ); else if ( yy_act < 7 ) fprintf( stderr, "--accepting rule at line %ld (\"%s\")\n", (long)yy_rule_linenum[yy_act], yy_sexptext ); else if ( yy_act == 7 ) fprintf( stderr, "--accepting default rule (\"%s\")\n", yy_sexptext ); else if ( yy_act == 8 ) fprintf( stderr, "--(end of buffer or a NUL)\n" ); else fprintf( stderr, "--EOF (start condition %d)\n", YY_START ); } switch ( yy_act ) { /* beginning of action switch */ /* %% [13.0] actions go here */ case 0: /* must back up */ /* undo the effects of YY_DO_BEFORE_ACTION */ *yy_cp = (yy_hold_char); yy_cp = (yy_last_accepting_cpos); yy_current_state = (yy_last_accepting_state); goto yy_find_action; case 1: /* rule 1 can match eol */ YY_RULE_SETUP #line 75 "sexp-lexer.ll" /* eat up whitespace */ YY_BREAK case 2: YY_RULE_SETUP #line 78 "sexp-lexer.ll" { return tok::LPAREN; } YY_BREAK case 3: YY_RULE_SETUP #line 79 "sexp-lexer.ll" { return tok::RPAREN; } YY_BREAK /* don't bother with special || yet */ case 4: YY_RULE_SETUP #line 82 "sexp-lexer.ll" { yy_sexplval->sval = new std::string(yy_sexptext); return tok::SYMBOL; } YY_BREAK case 5: YY_RULE_SETUP #line 87 "sexp-lexer.ll" { yy_sexplval->sval = new std::string(yy_sexptext); return tok::NUMERAL; } YY_BREAK case 6: *yy_cp = (yy_hold_char); /* undo effects of setting up yy_sexptext */ (yy_c_buf_p) = yy_cp -= 1; YY_DO_BEFORE_ACTION; /* set up yy_sexptext again */ YY_RULE_SETUP #line 91 "sexp-lexer.ll" {} /* Eat up any lisp-style comments */ YY_BREAK case 7: YY_RULE_SETUP #line 95 "sexp-lexer.ll" ECHO; YY_BREAK #line 1022 "lex.yy_sexp.cc" case YY_STATE_EOF(INITIAL): case YY_STATE_EOF(body): case YY_STATE_EOF(goalorigins): yyterminate(); case YY_END_OF_BUFFER: { /* Amount of text matched not including the EOB char. */ int yy_amount_of_matched_text = (int) (yy_cp - (yytext_ptr)) - 1; /* Undo the effects of YY_DO_BEFORE_ACTION. */ *yy_cp = (yy_hold_char); YY_RESTORE_YY_MORE_OFFSET if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_NEW ) { /* We're scanning a new file or input source. It's * possible that this happened because the user * just pointed yy_sexpin at a new source and called * yy_sexplex(). If so, then we have to assure * consistency between YY_CURRENT_BUFFER and our * globals. Here is the right place to do so, because * this is the first action (other than possibly a * back-up) that will match for the new input source. */ (yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_n_chars; YY_CURRENT_BUFFER_LVALUE->yy_input_file = yy_sexpin; YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_NORMAL; } /* Note that here we test for yy_c_buf_p "<=" to the position * of the first EOB in the buffer, since yy_c_buf_p will * already have been incremented past the NUL character * (since all states make transitions on EOB to the * end-of-buffer state). Contrast this with the test * in input(). */ if ( (yy_c_buf_p) <= &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] ) { /* This was really a NUL. */ yy_state_type yy_next_state; (yy_c_buf_p) = (yytext_ptr) + yy_amount_of_matched_text; yy_current_state = yy_get_previous_state( ); /* Okay, we're now positioned to make the NUL * transition. We couldn't have * yy_get_previous_state() go ahead and do it * for us because it doesn't know how to deal * with the possibility of jamming (and we don't * want to build jamming into it because then it * will run more slowly). */ yy_next_state = yy_try_NUL_trans( yy_current_state ); yy_bp = (yytext_ptr) + YY_MORE_ADJ; if ( yy_next_state ) { /* Consume the NUL. */ yy_cp = ++(yy_c_buf_p); yy_current_state = yy_next_state; goto yy_match; } else { /* %% [14.0] code to do back-up for compressed tables and set up yy_cp goes here */ yy_cp = (yy_last_accepting_cpos); yy_current_state = (yy_last_accepting_state); goto yy_find_action; } } else switch ( yy_get_next_buffer( ) ) { case EOB_ACT_END_OF_FILE: { (yy_did_buffer_switch_on_eof) = 0; if ( yy_sexpwrap( ) ) { /* Note: because we've taken care in * yy_get_next_buffer() to have set up * yy_sexptext, we can now set up * yy_c_buf_p so that if some total * hoser (like flex itself) wants to * call the scanner after we return the * YY_NULL, it'll still work - another * YY_NULL will get returned. */ (yy_c_buf_p) = (yytext_ptr) + YY_MORE_ADJ; yy_act = YY_STATE_EOF(YY_START); goto do_action; } else { if ( ! (yy_did_buffer_switch_on_eof) ) YY_NEW_FILE; } break; } case EOB_ACT_CONTINUE_SCAN: (yy_c_buf_p) = (yytext_ptr) + yy_amount_of_matched_text; yy_current_state = yy_get_previous_state( ); yy_cp = (yy_c_buf_p); yy_bp = (yytext_ptr) + YY_MORE_ADJ; goto yy_match; case EOB_ACT_LAST_MATCH: (yy_c_buf_p) = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)]; yy_current_state = yy_get_previous_state( ); yy_cp = (yy_c_buf_p); yy_bp = (yytext_ptr) + YY_MORE_ADJ; goto yy_find_action; } break; } default: YY_FATAL_ERROR( "fatal flex scanner internal error--no action found" ); } /* end of action switch */ } /* end of scanning one token */ } /* end of yy_sexplex */ /* %ok-for-header */ /* %if-c++-only */ /* %not-for-header */ /* %ok-for-header */ /* %endif */ /* yy_get_next_buffer - try to read in a new buffer * * Returns a code representing an action: * EOB_ACT_LAST_MATCH - * EOB_ACT_CONTINUE_SCAN - continue scanning from current position * EOB_ACT_END_OF_FILE - end of file */ /* %if-c-only */ static int yy_get_next_buffer (void) /* %endif */ /* %if-c++-only */ /* %endif */ { register char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf; register char *source = (yytext_ptr); register int number_to_move, i; int ret_val; if ( (yy_c_buf_p) > &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] ) YY_FATAL_ERROR( "fatal flex scanner internal error--end of buffer missed" ); if ( YY_CURRENT_BUFFER_LVALUE->yy_fill_buffer == 0 ) { /* Don't try to fill the buffer, so this is an EOF. */ if ( (yy_c_buf_p) - (yytext_ptr) - YY_MORE_ADJ == 1 ) { /* We matched a single character, the EOB, so * treat this as a final EOF. */ return EOB_ACT_END_OF_FILE; } else { /* We matched some text prior to the EOB, first * process it. */ return EOB_ACT_LAST_MATCH; } } /* Try to read more data. */ /* First move last chars to start of buffer. */ number_to_move = (int) ((yy_c_buf_p) - (yytext_ptr)) - 1; for ( i = 0; i < number_to_move; ++i ) *(dest++) = *(source++); if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_EOF_PENDING ) /* don't do the read, it's not guaranteed to return an EOF, * just force an EOF */ YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars) = 0; else { int num_to_read = YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1; while ( num_to_read <= 0 ) { /* Not enough room in the buffer - grow it. */ /* just a shorter name for the current buffer */ YY_BUFFER_STATE b = YY_CURRENT_BUFFER; int yy_c_buf_p_offset = (int) ((yy_c_buf_p) - b->yy_ch_buf); if ( b->yy_is_our_buffer ) { int new_size = b->yy_buf_size * 2; if ( new_size <= 0 ) b->yy_buf_size += b->yy_buf_size / 8; else b->yy_buf_size *= 2; b->yy_ch_buf = (char *) /* Include room in for 2 EOB chars. */ yy_sexprealloc((void *) b->yy_ch_buf,b->yy_buf_size + 2 ); } else /* Can't grow it, we don't own it. */ b->yy_ch_buf = 0; if ( ! b->yy_ch_buf ) YY_FATAL_ERROR( "fatal error - scanner input buffer overflow" ); (yy_c_buf_p) = &b->yy_ch_buf[yy_c_buf_p_offset]; num_to_read = YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1; } if ( num_to_read > YY_READ_BUF_SIZE ) num_to_read = YY_READ_BUF_SIZE; /* Read in more data. */ YY_INPUT( (&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move]), (yy_n_chars), (size_t) num_to_read ); YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars); } if ( (yy_n_chars) == 0 ) { if ( number_to_move == YY_MORE_ADJ ) { ret_val = EOB_ACT_END_OF_FILE; yy_sexprestart(yy_sexpin ); } else { ret_val = EOB_ACT_LAST_MATCH; YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_EOF_PENDING; } } else ret_val = EOB_ACT_CONTINUE_SCAN; if ((yy_size_t) ((yy_n_chars) + number_to_move) > YY_CURRENT_BUFFER_LVALUE->yy_buf_size) { /* Extend the array by 50%, plus the number we really need. */ yy_size_t new_size = (yy_n_chars) + number_to_move + ((yy_n_chars) >> 1); YY_CURRENT_BUFFER_LVALUE->yy_ch_buf = (char *) yy_sexprealloc((void *) YY_CURRENT_BUFFER_LVALUE->yy_ch_buf,new_size ); if ( ! YY_CURRENT_BUFFER_LVALUE->yy_ch_buf ) YY_FATAL_ERROR( "out of dynamic memory in yy_get_next_buffer()" ); } (yy_n_chars) += number_to_move; YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] = YY_END_OF_BUFFER_CHAR; YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] = YY_END_OF_BUFFER_CHAR; (yytext_ptr) = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[0]; return ret_val; } /* yy_get_previous_state - get the state just before the EOB char was reached */ /* %if-c-only */ /* %not-for-header */ static yy_state_type yy_get_previous_state (void) /* %endif */ /* %if-c++-only */ /* %endif */ { register yy_state_type yy_current_state; register char *yy_cp; /* %% [15.0] code to get the start state into yy_current_state goes here */ yy_current_state = (yy_start); for ( yy_cp = (yytext_ptr) + YY_MORE_ADJ; yy_cp < (yy_c_buf_p); ++yy_cp ) { /* %% [16.0] code to find the next state goes here */ register YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1); if ( yy_accept[yy_current_state] ) { (yy_last_accepting_state) = yy_current_state; (yy_last_accepting_cpos) = yy_cp; } while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state ) { yy_current_state = (int) yy_def[yy_current_state]; if ( yy_current_state >= 22 ) yy_c = yy_meta[(unsigned int) yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c]; } return yy_current_state; } /* yy_try_NUL_trans - try to make a transition on the NUL character * * synopsis * next_state = yy_try_NUL_trans( current_state ); */ /* %if-c-only */ static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state ) /* %endif */ /* %if-c++-only */ /* %endif */ { register int yy_is_jam; /* %% [17.0] code to find the next state, and perhaps do backing up, goes here */ register char *yy_cp = (yy_c_buf_p); register YY_CHAR yy_c = 1; if ( yy_accept[yy_current_state] ) { (yy_last_accepting_state) = yy_current_state; (yy_last_accepting_cpos) = yy_cp; } while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state ) { yy_current_state = (int) yy_def[yy_current_state]; if ( yy_current_state >= 22 ) yy_c = yy_meta[(unsigned int) yy_c]; } yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c]; yy_is_jam = (yy_current_state == 21); return yy_is_jam ? 0 : yy_current_state; } /* %if-c-only */ /* %endif */ /* %if-c-only */ #ifndef YY_NO_INPUT #ifdef __cplusplus static int yyinput (void) #else static int input (void) #endif /* %endif */ /* %if-c++-only */ /* %endif */ { int c; *(yy_c_buf_p) = (yy_hold_char); if ( *(yy_c_buf_p) == YY_END_OF_BUFFER_CHAR ) { /* yy_c_buf_p now points to the character we want to return. * If this occurs *before* the EOB characters, then it's a * valid NUL; if not, then we've hit the end of the buffer. */ if ( (yy_c_buf_p) < &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] ) /* This was really a NUL. */ *(yy_c_buf_p) = '\0'; else { /* need more input */ int offset = (yy_c_buf_p) - (yytext_ptr); ++(yy_c_buf_p); switch ( yy_get_next_buffer( ) ) { case EOB_ACT_LAST_MATCH: /* This happens because yy_g_n_b() * sees that we've accumulated a * token and flags that we need to * try matching the token before * proceeding. But for input(), * there's no matching to consider. * So convert the EOB_ACT_LAST_MATCH * to EOB_ACT_END_OF_FILE. */ /* Reset buffer status. */ yy_sexprestart(yy_sexpin ); /*FALLTHROUGH*/ case EOB_ACT_END_OF_FILE: { if ( yy_sexpwrap( ) ) return EOF; if ( ! (yy_did_buffer_switch_on_eof) ) YY_NEW_FILE; #ifdef __cplusplus return yyinput(); #else return input(); #endif } case EOB_ACT_CONTINUE_SCAN: (yy_c_buf_p) = (yytext_ptr) + offset; break; } } } c = *(unsigned char *) (yy_c_buf_p); /* cast for 8-bit char's */ *(yy_c_buf_p) = '\0'; /* preserve yy_sexptext */ (yy_hold_char) = *++(yy_c_buf_p); /* %% [19.0] update BOL and yy_sexplineno */ return c; } /* %if-c-only */ #endif /* ifndef YY_NO_INPUT */ /* %endif */ /** Immediately switch to a different input stream. * @param input_file A readable stream. * * @note This function does not reset the start condition to @c INITIAL . */ /* %if-c-only */ void yy_sexprestart (FILE * input_file ) /* %endif */ /* %if-c++-only */ /* %endif */ { if ( ! YY_CURRENT_BUFFER ){ yy_sexpensure_buffer_stack (); YY_CURRENT_BUFFER_LVALUE = yy_sexp_create_buffer(yy_sexpin,YY_BUF_SIZE ); } yy_sexp_init_buffer(YY_CURRENT_BUFFER,input_file ); yy_sexp_load_buffer_state( ); } /** Switch to a different input buffer. * @param new_buffer The new input buffer. * */ /* %if-c-only */ void yy_sexp_switch_to_buffer (YY_BUFFER_STATE new_buffer ) /* %endif */ /* %if-c++-only */ /* %endif */ { /* TODO. We should be able to replace this entire function body * with * yy_sexppop_buffer_state(); * yy_sexppush_buffer_state(new_buffer); */ yy_sexpensure_buffer_stack (); if ( YY_CURRENT_BUFFER == new_buffer ) return; if ( YY_CURRENT_BUFFER ) { /* Flush out information for old buffer. */ *(yy_c_buf_p) = (yy_hold_char); YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = (yy_c_buf_p); YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars); } YY_CURRENT_BUFFER_LVALUE = new_buffer; yy_sexp_load_buffer_state( ); /* We don't actually know whether we did this switch during * EOF (yy_sexpwrap()) processing, but the only time this flag * is looked at is after yy_sexpwrap() is called, so it's safe * to go ahead and always set it. */ (yy_did_buffer_switch_on_eof) = 1; } /* %if-c-only */ static void yy_sexp_load_buffer_state (void) /* %endif */ /* %if-c++-only */ /* %endif */ { (yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_n_chars; (yytext_ptr) = (yy_c_buf_p) = YY_CURRENT_BUFFER_LVALUE->yy_buf_pos; yy_sexpin = YY_CURRENT_BUFFER_LVALUE->yy_input_file; (yy_hold_char) = *(yy_c_buf_p); } /** Allocate and initialize an input buffer state. * @param file A readable stream. * @param size The character buffer size in bytes. When in doubt, use @c YY_BUF_SIZE. * * @return the allocated buffer state. */ /* %if-c-only */ YY_BUFFER_STATE yy_sexp_create_buffer (FILE * file, int size ) /* %endif */ /* %if-c++-only */ /* %endif */ { YY_BUFFER_STATE b; b = (YY_BUFFER_STATE) yy_sexpalloc(sizeof( struct yy_buffer_state ) ); if ( ! b ) YY_FATAL_ERROR( "out of dynamic memory in yy_sexp_create_buffer()" ); b->yy_buf_size = size; /* yy_ch_buf has to be 2 characters longer than the size given because * we need to put in 2 end-of-buffer characters. */ b->yy_ch_buf = (char *) yy_sexpalloc(b->yy_buf_size + 2 ); if ( ! b->yy_ch_buf ) YY_FATAL_ERROR( "out of dynamic memory in yy_sexp_create_buffer()" ); b->yy_is_our_buffer = 1; yy_sexp_init_buffer(b,file ); return b; } /** Destroy the buffer. * @param b a buffer created with yy_sexp_create_buffer() * */ /* %if-c-only */ void yy_sexp_delete_buffer (YY_BUFFER_STATE b ) /* %endif */ /* %if-c++-only */ /* %endif */ { if ( ! b ) return; if ( b == YY_CURRENT_BUFFER ) /* Not sure if we should pop here. */ YY_CURRENT_BUFFER_LVALUE = (YY_BUFFER_STATE) 0; if ( b->yy_is_our_buffer ) yy_sexpfree((void *) b->yy_ch_buf ); yy_sexpfree((void *) b ); } /* %if-c-only */ #ifndef __cplusplus extern int isatty (int ); #endif /* __cplusplus */ /* %endif */ /* %if-c++-only */ /* %endif */ /* Initializes or reinitializes a buffer. * This function is sometimes called more than once on the same buffer, * such as during a yy_sexprestart() or at EOF. */ /* %if-c-only */ static void yy_sexp_init_buffer (YY_BUFFER_STATE b, FILE * file ) /* %endif */ /* %if-c++-only */ /* %endif */ { int oerrno = errno; yy_sexp_flush_buffer(b ); b->yy_input_file = file; b->yy_fill_buffer = 1; /* If b is the current buffer, then yy_sexp_init_buffer was _probably_ * called from yy_sexprestart() or through yy_get_next_buffer. * In that case, we don't want to reset the lineno or column. */ if (b != YY_CURRENT_BUFFER){ b->yy_bs_lineno = 1; b->yy_bs_column = 0; } /* %if-c-only */ b->yy_is_interactive = file ? (isatty( fileno(file) ) > 0) : 0; /* %endif */ /* %if-c++-only */ /* %endif */ errno = oerrno; } /** Discard all buffered characters. On the next scan, YY_INPUT will be called. * @param b the buffer state to be flushed, usually @c YY_CURRENT_BUFFER. * */ /* %if-c-only */ void yy_sexp_flush_buffer (YY_BUFFER_STATE b ) /* %endif */ /* %if-c++-only */ /* %endif */ { if ( ! b ) return; b->yy_n_chars = 0; /* We always need two end-of-buffer characters. The first causes * a transition to the end-of-buffer state. The second causes * a jam in that state. */ b->yy_ch_buf[0] = YY_END_OF_BUFFER_CHAR; b->yy_ch_buf[1] = YY_END_OF_BUFFER_CHAR; b->yy_buf_pos = &b->yy_ch_buf[0]; b->yy_at_bol = 1; b->yy_buffer_status = YY_BUFFER_NEW; if ( b == YY_CURRENT_BUFFER ) yy_sexp_load_buffer_state( ); } /* %if-c-or-c++ */ /** Pushes the new state onto the stack. The new state becomes * the current state. This function will allocate the stack * if necessary. * @param new_buffer The new state. * */ /* %if-c-only */ void yy_sexppush_buffer_state (YY_BUFFER_STATE new_buffer ) /* %endif */ /* %if-c++-only */ /* %endif */ { if (new_buffer == NULL) return; yy_sexpensure_buffer_stack(); /* This block is copied from yy_sexp_switch_to_buffer. */ if ( YY_CURRENT_BUFFER ) { /* Flush out information for old buffer. */ *(yy_c_buf_p) = (yy_hold_char); YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = (yy_c_buf_p); YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars); } /* Only push if top exists. Otherwise, replace top. */ if (YY_CURRENT_BUFFER) (yy_buffer_stack_top)++; YY_CURRENT_BUFFER_LVALUE = new_buffer; /* copied from yy_sexp_switch_to_buffer. */ yy_sexp_load_buffer_state( ); (yy_did_buffer_switch_on_eof) = 1; } /* %endif */ /* %if-c-or-c++ */ /** Removes and deletes the top of the stack, if present. * The next element becomes the new top. * */ /* %if-c-only */ void yy_sexppop_buffer_state (void) /* %endif */ /* %if-c++-only */ /* %endif */ { if (!YY_CURRENT_BUFFER) return; yy_sexp_delete_buffer(YY_CURRENT_BUFFER ); YY_CURRENT_BUFFER_LVALUE = NULL; if ((yy_buffer_stack_top) > 0) --(yy_buffer_stack_top); if (YY_CURRENT_BUFFER) { yy_sexp_load_buffer_state( ); (yy_did_buffer_switch_on_eof) = 1; } } /* %endif */ /* %if-c-or-c++ */ /* Allocates the stack if it does not exist. * Guarantees space for at least one push. */ /* %if-c-only */ static void yy_sexpensure_buffer_stack (void) /* %endif */ /* %if-c++-only */ /* %endif */ { int num_to_alloc; if (!(yy_buffer_stack)) { /* First allocation is just for 2 elements, since we don't know if this * scanner will even need a stack. We use 2 instead of 1 to avoid an * immediate realloc on the next call. */ num_to_alloc = 1; (yy_buffer_stack) = (struct yy_buffer_state**)yy_sexpalloc (num_to_alloc * sizeof(struct yy_buffer_state*) ); if ( ! (yy_buffer_stack) ) YY_FATAL_ERROR( "out of dynamic memory in yy_sexpensure_buffer_stack()" ); memset((yy_buffer_stack), 0, num_to_alloc * sizeof(struct yy_buffer_state*)); (yy_buffer_stack_max) = num_to_alloc; (yy_buffer_stack_top) = 0; return; } if ((yy_buffer_stack_top) >= ((yy_buffer_stack_max)) - 1){ /* Increase the buffer to prepare for a possible push. */ int grow_size = 8 /* arbitrary grow size */; num_to_alloc = (yy_buffer_stack_max) + grow_size; (yy_buffer_stack) = (struct yy_buffer_state**)yy_sexprealloc ((yy_buffer_stack), num_to_alloc * sizeof(struct yy_buffer_state*) ); if ( ! (yy_buffer_stack) ) YY_FATAL_ERROR( "out of dynamic memory in yy_sexpensure_buffer_stack()" ); /* zero only the new slots.*/ memset((yy_buffer_stack) + (yy_buffer_stack_max), 0, grow_size * sizeof(struct yy_buffer_state*)); (yy_buffer_stack_max) = num_to_alloc; } } /* %endif */ /* %if-c-only */ /** Setup the input buffer state to scan directly from a user-specified character buffer. * @param base the character buffer * @param size the size in bytes of the character buffer * * @return the newly allocated buffer state object. */ YY_BUFFER_STATE yy_sexp_scan_buffer (char * base, yy_size_t size ) { YY_BUFFER_STATE b; if ( size < 2 || base[size-2] != YY_END_OF_BUFFER_CHAR || base[size-1] != YY_END_OF_BUFFER_CHAR ) /* They forgot to leave room for the EOB's. */ return 0; b = (YY_BUFFER_STATE) yy_sexpalloc(sizeof( struct yy_buffer_state ) ); if ( ! b ) YY_FATAL_ERROR( "out of dynamic memory in yy_sexp_scan_buffer()" ); b->yy_buf_size = size - 2; /* "- 2" to take care of EOB's */ b->yy_buf_pos = b->yy_ch_buf = base; b->yy_is_our_buffer = 0; b->yy_input_file = 0; b->yy_n_chars = b->yy_buf_size; b->yy_is_interactive = 0; b->yy_at_bol = 1; b->yy_fill_buffer = 0; b->yy_buffer_status = YY_BUFFER_NEW; yy_sexp_switch_to_buffer(b ); return b; } /* %endif */ /* %if-c-only */ /** Setup the input buffer state to scan a string. The next call to yy_sexplex() will * scan from a @e copy of @a str. * @param yystr a NUL-terminated string to scan * * @return the newly allocated buffer state object. * @note If you want to scan bytes that may contain NUL values, then use * yy_sexp_scan_bytes() instead. */ YY_BUFFER_STATE yy_sexp_scan_string (yyconst char * yystr ) { return yy_sexp_scan_bytes(yystr,strlen(yystr) ); } /* %endif */ /* %if-c-only */ /** Setup the input buffer state to scan the given bytes. The next call to yy_sexplex() will * scan from a @e copy of @a bytes. * @param bytes the byte buffer to scan * @param len the number of bytes in the buffer pointed to by @a bytes. * * @return the newly allocated buffer state object. */ YY_BUFFER_STATE yy_sexp_scan_bytes (yyconst char * yybytes, int _yybytes_len ) { YY_BUFFER_STATE b; char *buf; yy_size_t n; int i; /* Get memory for full buffer, including space for trailing EOB's. */ n = _yybytes_len + 2; buf = (char *) yy_sexpalloc(n ); if ( ! buf ) YY_FATAL_ERROR( "out of dynamic memory in yy_sexp_scan_bytes()" ); for ( i = 0; i < _yybytes_len; ++i ) buf[i] = yybytes[i]; buf[_yybytes_len] = buf[_yybytes_len+1] = YY_END_OF_BUFFER_CHAR; b = yy_sexp_scan_buffer(buf,n ); if ( ! b ) YY_FATAL_ERROR( "bad buffer in yy_sexp_scan_bytes()" ); /* It's okay to grow etc. this buffer, and we should throw it * away when we're done. */ b->yy_is_our_buffer = 1; return b; } /* %endif */ #ifndef YY_EXIT_FAILURE #define YY_EXIT_FAILURE 2 #endif /* %if-c-only */ static void yy_fatal_error (yyconst char* msg ) { (void) fprintf( stderr, "%s\n", msg ); exit( YY_EXIT_FAILURE ); } /* %endif */ /* %if-c++-only */ /* %endif */ /* Redefine yyless() so it works in section 3 code. */ #undef yyless #define yyless(n) \ do \ { \ /* Undo effects of setting up yy_sexptext. */ \ int yyless_macro_arg = (n); \ YY_LESS_LINENO(yyless_macro_arg);\ yy_sexptext[yy_sexpleng] = (yy_hold_char); \ (yy_c_buf_p) = yy_sexptext + yyless_macro_arg; \ (yy_hold_char) = *(yy_c_buf_p); \ *(yy_c_buf_p) = '\0'; \ yy_sexpleng = yyless_macro_arg; \ } \ while ( 0 ) /* Accessor methods (get/set functions) to struct members. */ /* %if-c-only */ /* %if-reentrant */ /* %endif */ /** Get the current line number. * */ int yy_sexpget_lineno (void) { return yy_sexplineno; } /** Get the input stream. * */ FILE *yy_sexpget_in (void) { return yy_sexpin; } /** Get the output stream. * */ FILE *yy_sexpget_out (void) { return yy_sexpout; } /** Get the length of the current token. * */ int yy_sexpget_leng (void) { return yy_sexpleng; } /** Get the current token. * */ char *yy_sexpget_text (void) { return yy_sexptext; } /* %if-reentrant */ /* %endif */ /** Set the current line number. * @param line_number * */ void yy_sexpset_lineno (int line_number ) { yy_sexplineno = line_number; } /** Set the input stream. This does not discard the current * input buffer. * @param in_str A readable stream. * * @see yy_sexp_switch_to_buffer */ void yy_sexpset_in (FILE * in_str ) { yy_sexpin = in_str ; } void yy_sexpset_out (FILE * out_str ) { yy_sexpout = out_str ; } int yy_sexpget_debug (void) { return yy_sexp_flex_debug; } void yy_sexpset_debug (int bdebug ) { yy_sexp_flex_debug = bdebug ; } /* %endif */ /* %if-reentrant */ /* %if-bison-bridge */ /* %endif */ /* %endif if-c-only */ /* %if-c-only */ static int yy_init_globals (void) { /* Initialization is the same as for the non-reentrant scanner. * This function is called from yy_sexplex_destroy(), so don't allocate here. */ (yy_buffer_stack) = 0; (yy_buffer_stack_top) = 0; (yy_buffer_stack_max) = 0; (yy_c_buf_p) = (char *) 0; (yy_init) = 0; (yy_start) = 0; /* Defined in main.c */ #ifdef YY_STDINIT yy_sexpin = stdin; yy_sexpout = stdout; #else yy_sexpin = (FILE *) 0; yy_sexpout = (FILE *) 0; #endif /* For future reference: Set errno on error, since we are called by * yy_sexplex_init() */ return 0; } /* %endif */ /* %if-c-only SNIP! this currently causes conflicts with the c++ scanner */ /* yy_sexplex_destroy is for both reentrant and non-reentrant scanners. */ int yy_sexplex_destroy (void) { /* Pop the buffer stack, destroying each element. */ while(YY_CURRENT_BUFFER){ yy_sexp_delete_buffer(YY_CURRENT_BUFFER ); YY_CURRENT_BUFFER_LVALUE = NULL; yy_sexppop_buffer_state(); } /* Destroy the stack itself. */ yy_sexpfree((yy_buffer_stack) ); (yy_buffer_stack) = NULL; /* Reset the globals. This is important in a non-reentrant scanner so the next time * yy_sexplex() is called, initialization will occur. */ yy_init_globals( ); /* %if-reentrant */ /* %endif */ return 0; } /* %endif */ /* * Internal utility routines. */ #ifndef yytext_ptr static void yy_flex_strncpy (char* s1, yyconst char * s2, int n ) { register int i; for ( i = 0; i < n; ++i ) s1[i] = s2[i]; } #endif #ifdef YY_NEED_STRLEN static int yy_flex_strlen (yyconst char * s ) { register int n; for ( n = 0; s[n]; ++n ) ; return n; } #endif void *yy_sexpalloc (yy_size_t size ) { return (void *) malloc( size ); } void *yy_sexprealloc (void * ptr, yy_size_t size ) { /* The cast to (char *) in the following accommodates both * implementations that use char* generic pointers, and those * that use void* generic pointers. It works with the latter * because both ANSI C and C++ allow castless assignment from * any pointer type to void*, and deal with argument conversions * as though doing an assignment. */ return (void *) realloc( (char *) ptr, size ); } void yy_sexpfree (void * ptr ) { free( (char *) ptr ); /* see yy_sexprealloc() for (char *) cast */ } /* %if-tables-serialization definitions */ /* %define-yytables The name for this specific scanner's tables. */ #define YYTABLES_NAME "yytables" /* %endif */ /* %ok-for-header */ #line 95 "sexp-lexer.ll" void sexp_pdriver::scan_begin () { yy_sexp_flex_debug = trace_scanning; if (!(yy_sexpin = fopen (file.c_str (), "r"))) error (std::string ("cannot open ") + file); } void sexp_pdriver::scan_end () { fclose (yy_sexpin); } spark-2012.0.deb/victor/vct/README.txt0000644000175000017500000000150411753202341016235 0ustar eugeneugenVictor: A SPARK Verification Condition Translator and Prover Driver. Version 0.9.1 15th December 2010 Paul Jackson pbj@inf.ed.ac.uk See section "Installation and Testing" of the User Manual doc/vct-man.pdf for information on how to install and run Victor. Distribution organisation: README.txt # This file CHANGES.txt # Changes made in this and earlier releases. COPYING.txt # copyright and licensing information LICENSE.txt # GNU GPL V3 license doc/vct-man.pdf # User manual. src/ # Source code for Victor and auxiliary tools build/ # Example intermediate source files and executables run/ # Makefile for running Victor and various input files out-ref/ # Example report files. vc/ # Example VC sets bin/ # Default location for executables built from sources. Empty in distribution. spark-2012.0.deb/victor/vct/LICENSE.txt0000644000175000017500000007724611753202341016402 0ustar eugeneugen GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. spark-2012.0.deb/victor/vct/bin/0000755000175000017500000000000011753202734015315 5ustar eugeneugenspark-2012.0.deb/victor/vct/COPYING.txt0000644000175000017500000000151411753202341016411 0ustar eugeneugenVictor: A SPARK Verification Condition Translator and Prover Driver. Copyright (C) 2009, 2010 University of Edinburgh Authors: Paul Jackson, Altran Praxis, AdaCore This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. A copy of the GNU General Public License V3 can be found in file LICENSE.txt and online at http://www.gnu.org/licenses/. Paul Jackson School of Informatics, University of Edinburgh pbj@inf.ed.ac.uk spark-2012.0.deb/victor/vct/run/0000755000175000017500000000000011753202341015343 5ustar eugeneugenspark-2012.0.deb/victor/vct/run/divmod.rul0000644000175000017500000000423711753202341017357 0ustar eugeneugen/* Rules for div, mod and rem in .rls file format X div Y is rational value X/Y rounded towards zero to the nearest integer X mod Y, X int___rem Y are characterised to have the same behaviour */ rule_family divmod: X * Y requires [ X:i, Y:i ] & X mod Y requires [ X:i, Y:i ] & X div Y requires [ X:i, Y:i ] & X <= Y requires [ X:i, Y:i ] & X < Y requires [ X:i, Y:i ] & X >= Y requires [ X:i, Y:i ] & X > Y requires [ X:i, Y:i ]. divmod(1): 0 <= X mod Y may_be_deduced_from [0 < Y] . divmod(2): X mod Y < Y may_be_deduced_from [0 < Y] . divmod(3): X mod Y <= 0 may_be_deduced_from [Y < 0] . divmod(4): Y < X mod Y may_be_deduced_from [Y < 0] . /* X/Y - 1 < X div Y <= X/Y if X >= 0, Y > 0 */ divmod(5): X - Y < Y * (X div Y) may_be_deduced_from [0 <= X, 0 < Y] . divmod(6): Y * (X div Y) <= X may_be_deduced_from [0 <= X, 0 < Y] . /* X/Y <= X div Y < X/Y + 1 if X <= 0, Y > 0 */ divmod(7): X <= Y * (X div Y) may_be_deduced_from [X <= 0, 0 < Y] . divmod(8): Y * (X div Y) < X + Y may_be_deduced_from [X <= 0, 0 < Y] . /* X/Y <= X div Y < X/Y + 1 if X >= 0, Y < 0 */ divmod(9): X >= Y * (X div Y) may_be_deduced_from [0 <= X, Y < 0] . divmod(10): Y * (X div Y) > X + Y may_be_deduced_from [0 <= X, Y < 0] . /* X/Y - 1 < X div Y <= X/Y if X <= 0, Y < 0 */ divmod(11): X - Y > Y * (X div Y) may_be_deduced_from [X <= 0, Y < 0] . divmod(12): Y * (X div Y) >= X may_be_deduced_from [X <= 0, Y < 0] . divmod(13): Y * (X div Y) + int___rem(X,Y) = X may_be_deduced . divmod(14): X mod Y = 0 may_be_deduced_from [int___rem(X,Y) = 0] . divmod(15): X mod Y = int___rem(X,Y) may_be_deduced_from [0 <= X, 0 < Y] . divmod(16): X mod Y = int___rem(X,Y) + Y may_be_deduced_from [X <= 0, 0 < Y, int___rem(X,Y) <> 0] . divmod(17): X mod Y = int___rem(X,Y) + Y may_be_deduced_from [0 <= X, Y < 0, int___rem(X,Y) <> 0] . divmod(18): X mod Y = int___rem(X,Y) may_be_deduced_from [X <= 0, Y < 0] . /* divmod(20) Is deducible by Z3, but its performance is much better with this explicit. */ divmod(20): X mod Y = X may_be_deduced_from [0 <= X, X < Y] . /* End of File */ spark-2012.0.deb/victor/vct/run/Makefile-prelude.mk0000644000175000017500000003634011753202341021055 0ustar eugeneugen#============================================================================= #============================================================================= # Makefile-prelude.mk #============================================================================= #============================================================================= # This file is part of Victor: a SPARK VC Translator and Prover Driver. # Copyright (C) 2009, 2010 University of Edinburgh # Author(s): Paul Jackson # Victor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or (at # your option) any later version. # Victor is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # A copy of the GNU General Public License V3 can be found in file # LICENSE.txt and online at http://www.gnu.org/licenses/. # #============================================================================= #============================================================================= # Victor executable #============================================================================= VCT=vct #============================================================================= # Overridable options #============================================================================= # "ifdef " used for bool variables. is considered `true' just when # it is defined. # # #----------------------------------------------------------------------------- # Enabling timeouts #------------------ # 14/12/09: timeout using shell timout.sh is currently flakey. # Use with caution! # timeout_option = -ulimit-timeout=$(T) ifdef CT # CVC3 timeout for API & file-level interfaces. Units of 0.1sec. timeout_sfx = -ct$(CT) timeout_option = -timeout=$(CT) cvc3_timeout_flag = -timeout $(CT) # Option for cvc3 command else ifdef T # Ulimit timeout (integer sec): applies to any file-level solver timeout_sfx = -t$(T) else ifdef ST # Shell timeout (fixed pt sec): applies to any file-level solver timeout_sfx = -st$(ST) timeout_option = -shell-timeout=$(ST) else ifdef WT # Watchdog timeout (fixed pt sec): # applies to any file-level solver timeout_sfx = -w$(WT) timeout_option = -watchdog-timeout=$(WT) else ifdef ZT # Z3 soft timeout. Units of ms. timeout_sfx = -zt$(ZT) timeout_option = -smtlib2-soft-timeout=$(ZT) endif T=10# Delay setting default T to here so don't get suffix for default time. # (Important here that no trailing spaces in value, so can use CT=$(T)0 ) #----------------------------------------------------------------------------- # Fusing conclusions #------------------- ifdef U fuse_c_pfx = u fuse_concl_options = # empty else fuse_c_pfx = f fuse_concl_options = -fuse-concls endif #----------------------------------------------------------------------------- # Output directory #----------------- OUTDIR=out #----------------------------------------------------------------------------- # Arithmetic options #------------------- ifdef L lin_sfx=-lin lin_opt=-abstract-nonlin-times smtlib_logic=AUFLIA else ifdef LEC lin_sfx=-lin-ec lin_opt=-abstract-nonlin-times \ -elim-consts smtlib_logic=AUFLIA else ifdef LAE lin_sfx=-lin-ae lin_opt=-abstract-nonlin-times \ -elim-consts \ -ground-eval \ -arith-eval smtlib_logic=AUFLIA else ifdef EC lin_sfx=-ec lin_opt=-elim-consts smtlib_logic=AUFNIRA else ifdef AE lin_sfx=-ae lin_opt=-elim-consts \ -ground-eval \ -arith-eval smtlib_logic=AUFNIRA else lin_sfx= # empty lin_opt= # empty smtlib_logic=AUFNIRA endif #----------------------------------------------------------------------------- # Selecting VCG or SIV file for VCs #---------------------------------- ifdef SIV siv_flag=-siv siv_sfx = -siv endif #----------------------------------------------------------------------------- # Processing trivial goals #------------------------- ifdef TG tg_flag=-count-trivial-goals tg_sfx = -tg endif #----------------------------------------------------------------------------- # Solver call iteration #---------------------- # RD only for use with api interface mode # RC only for use with smtlib/simplify interface mode # # api interface modes must also assert -gstime-inc-setup in order # for cumulative times to be reported correctly # (Victor eventually should take care of these dependencies internally) ifdef RD # Repeat Drive goal / goal slice repeat_sfx=-rd$(RD) repeat_option = -drive-goal-repeats=$(RD) else ifdef RC # Repeat Check goal / goal slice repeat_sfx=-rc$(RC) repeat_option = -check-goal-repeats=$(RC) else repeat_sfx=# Empty repeat_option=# Empty endif #----------------------------------------------------------------------------- # Reading standard user-defined rules files #------------------------------------------ ifdef RLU std_rlu_sfx=-rlu std_rlu_option= -read-directory-rlu-files -read-unit-rlu-files else ifdef RLUA std_rlu_sfx=-rlua std_rlu_option= -read-directory-rlu-files -read-unit-rlu-files \ -read-all-decl-files-in-dir else ifdef RLUD std_rlu_sfx=-rlud std_rlu_option= -read-directory-rlu-files -read-unit-rlu-files \ -delete-rules-with-undeclared-ids endif #----------------------------------------------------------------------------- # Enum type options #------------------ ifdef AXE enum_sfx = -axe enum_option = -axiomatise-enums else ifdef ABE enum_sfx = -abe enum_option = -abstract-enums else enum_option = -elim-enums endif #----------------------------------------------------------------------------- # Focussing on single unit and goal #---------------------------------- ifdef UNIT unit_option = $(UNIT) endif ifdef GOAL goal_option = -goal=$(GOAL) -ctick endif #----------------------------------------------------------------------------- # Set threshold for symbolic numeric constants #--------------------------------------------- SYM_CONSTS=100000 #----------------------------------------------------------------------------- # Translation options for SMTLIB, SMTLIB2 and Simplify # ---------------------------------------------------- # # Possible values: # # A = 0,1 # B = 0,1 # C = 0,1,2 # D = 0,1 # E = 0,1 # F = 0,1 # G = 0,1,2,3 # First record whether any options provided or not. ifneq ($(A)$(B)$(C)$(D)$(E)$(F)$(G),) smtlib_options_provided = yes else smtlib_options_provided = no endif # A. Whether to treat bit type as subtype 0,1 of integers or quotient of # integers where 1 is true and all other values false. # SMTLIB and Simplify # A = 0 ifeq ($(A),0) A_opt = -refine-bit-type-as-int-subtype endif ifeq ($(A),1) A_opt = -refine-bit-type-as-int-quotient endif # B. Whether to introduce term-level bit ops and rels or use # prop-to-bit coercions # SMTLIB and Simplify # B = 0 ifeq ($(B),0) B_opt = -refine-bit-eq-equiv # -trace-intro-bit-ops-and-rels endif ifeq ($(B),1) B_opt = -bit-type-with-ite # -trace-prop-to-bit-insertion endif # C. Heuristics for how one chooses a bit-valued or prop-valued version for # each QFOL uninterpreted function to bool. # C = 2 is OK only if B = 1. # SMTLIB and Simplify C = 0 ifeq ($(C),0) C_opt = # (no option) use bit-valued ops iff is instance in term position endif ifeq ($(C),1) C_opt = -bit-type-prefer-bit-vals endif ifeq ($(C),2) C_opt = -bit-type-prefer-props endif # D. Approach to handling refinement of array types. Whether to # constrain array element values on extended indexes or not. # (Assert option to not constrain) # D = 0 ifeq ($(D),0) D_opt = # (no option) out of bounds element values constrained endif ifeq ($(D),1) D_opt = -refine-array-types-with-quotient endif # E. Whether to axiomatise arrays and records before type refinement or # after # E = 0 ifeq ($(E),0) E_opt = -abstract-arrays-records-late endif ifeq ($(E),1) E_opt = -abstract-arrays-records-early endif # F. Strategy for axiomatising records # Whether to use axiomisation that uses updates or constructors # F = 0 ifeq ($(F),0) F_opt = -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates endif ifeq ($(F),1) F_opt = -abstract-record-updates \ -add-record-select-constructor-axioms \ -abstract-record-selects-constructors endif # G. Approach for handling extensionality. Whether axioms are added and # if they are added, whether aliases are used for array and record # equalities to help with quantifier instantiation algorithms. # # NB: using the record constructor extensionality axioms (G = 2) only make # sense if option F = 1. # G = 0 ifeq ($(G),0) G_opt = # (no option) endif ifeq ($(G),1) G_opt = -add-array-extensionality-axioms \ -add-record-eq-elements-extensionality-axioms \ -identify-eqs-at-arr-rec-types endif ifeq ($(G),2) G_opt = -add-array-extensionality-axioms \ -add-record-constructor-extensionality-axioms \ -identify-eqs-at-arr-rec-types endif ifeq ($(G),3) G_opt = -add-array-extensionality-axioms \ -add-record-eq-elements-extensionality-axioms \ -use-array-eq-aliases \ -use-record-eq-aliases \ -identify-eqs-at-arr-rec-types endif ifeq ($(smtlib_options_provided),yes) smtlib_option_suffix =-$(A)$(B)$(C)$(D)$(E)$(F)$(G) else smtlib_option_suffix =# (empty string) endif #============================================================================= # Assembly of option lists #============================================================================= #SMTLIB2: will need to address alternate divmod support and abs abstraction # By using -expand-exp-const, are assuming solver can handle non-lin arith. report_root = $(fuse_c_pfx)-$@$(siv_sfx)$(tg_sfx)$(std_rlu_sfx)$(lin_sfx)$(enum_sfx)$(timeout_sfx)$(repeat_sfx)$(smtlib_option_suffix)$(SFX) std_options = \ $(unit_option) \ $(goal_option) \ $(fuse_concl_options)\ $(timeout_option)\ $(siv_flag)\ $(tg_flag)\ $(lin_opt)\ $(enum_option)\ $(repeat_option)\ $(std_rlu_option)\ -units=$($*_units)\ -report=$(report_root)\ -report-dir=$(OUTDIR)\ -prefix=$($*_prefix)\ -decls=prelude.fdl\ -unique-working-files\ -delete-working-files\ -rules=divmod.rul\ -rules=prelude.rul\ -ground-eval-exp\ -expand-exp-const\ -abstract-exp\ -abstract-divmod\ -gstime\ -utick\ -gtick\ -longtick\ -echo-final-stats\ -level=warning #---------------------------------------------------------------------------- # API CVC3 options #---------------------------------------------------------------------------- # CVC3 r2.2 segfaults on a few tokeneer goals, so we need to exclude them. # The relevant goals are tagged with "cvc3?" prefix in tokeneer-units.lis # and api_cvc3_options = \ -exclude-selected-goals\ -active-unit-tags=cvc3\ -ground-eval-exp=false\ -abstract-exp=false\ -bit-type \ -bit-type-bool-eq-to-iff\ -abstract-bit-valued-int-le\ -abstract-arrays-records-late\ -elim-array-constructors \ -add-array-select-box-update-axioms\ -abstract-array-box-updates\ -gstime-inc-setup \ -interface-mode=api \ -strip-quantifier-patterns\ -prover=cvc3\ $(EXTRAS) #---------------------------------------------------------------------------- # API Yices options #---------------------------------------------------------------------------- api_yices_options = \ -abstract-arrays-records-late\ -elim-record-constructors \ -elim-array-constructors \ -add-array-select-box-update-axioms\ -abstract-array-box-updates\ -strip-quantifier-patterns\ \ -gstime-inc-setup \ -interface-mode=api \ -prover=yices\ $(EXTRAS) #---------------------------------------------------------------------------- # SMTLIB Interface options #---------------------------------------------------------------------------- smtlib_base_options = \ -bit-type \ -bit-type-bool-eq-to-iff\ \ -refine-types\ -refine-int-subrange-type \ \ -elim-array-constructors\ -add-array-select-box-update-axioms\ -abstract-array-box-updates\ -add-array-select-update-axioms\ -abstract-array-select-updates\ -abstract-array-types\ \ -abstract-record-types\ \ -abstract-bit-ops\ -abstract-bit-valued-eqs\ -abstract-bit-valued-int-le\ -elim-bit-type-and-consts\ \ -abstract-reals\ \ -lift-quants \ -strip-quantifier-patterns\ -elim-type-aliases\ \ -interface-mode=smtlib \ -smtlib-hyps-as-assums smtlib_options = \ $(smtlib_base_options)\ $(A_opt)\ $(B_opt)\ $(C_opt)\ $(D_opt)\ $(E_opt)\ $(F_opt)\ $(G_opt)\ -logic=$(smtlib_logic)\ $(EXTRAS) #---------------------------------------------------------------------------- # SMTLIB2 Interface options #---------------------------------------------------------------------------- smtlib2_base_options = \ -refine-types\ -refine-int-subrange-type \ \ -elim-array-constructors\ -add-array-select-box-update-axioms\ -abstract-array-box-updates\ -add-array-select-update-axioms\ -abstract-array-select-updates\ -abstract-array-types\ \ -abstract-record-types\ \ -lift-quants \ -strip-quantifier-patterns\ \ -interface-mode=smtlib2 smtlib2_options = \ $(smtlib2_base_options)\ $(D_opt)\ $(E_opt)\ $(F_opt)\ $(G_opt)\ -logic=$(smtlib_logic)\ $(EXTRAS) #---------------------------------------------------------------------------- # Simplify interface options #---------------------------------------------------------------------------- # Setting of symbolic constant threshold omitted. Should be added later when # needed simplify_interface_options= \ $(smtlib_options) \ -refine-uninterpreted-types \ -switch-types-to-int \ -interface-mode=simplify \ $(EXTRAS) #---------------------------------------------------------------------------- # Isabelle options #---------------------------------------------------------------------------- # # These include a revised set of standard options. # # std_options removed: # $(lin_opt)\ # $(repeat_option)\ # -ground-eval-exp\ # -abstract-exp\ # -abstract-divmod\ # # # std options modified: # -rules=divmod.rul\ # -rules=prelude.rul\ # # # isabelle_options=\ $(unit_option) \ $(goal_option) \ $(fuse_concl_options)\ -fuse-unary-concls \ $(timeout_option)\ $(siv_flag)\ $(tg_flag)\ -units=$($*_units)\ -report=$(report_root)\ -report-dir=$(OUTDIR)\ -prefix=$($*_prefix)\ -decls=prelude.fdl\ -unique-working-files\ -rules=none\ -rules=prelude.rul\ -elim-enums\ -gstime\ -utick\ -gtick\ -longtick\ -echo-final-stats\ -level=warning\ \ -abstract-arrays-records-late\ -elim-record-constructors\ -elim-array-constructors\ -add-array-select-box-update-axioms\ -abstract-array-box-updates\ -refine-types\ -refine-int-subrange-type\ -interface-mode=isabelle\ # End of file spark-2012.0.deb/victor/vct/run/vsm-file-header.txt0000644000175000017500000000015711753202341021057 0ustar eugeneugenrun,ERRORs,WARNINGs,total,true,unproven,timeout,error,"true (%)","unproven (%)","timeout (%)","error (%)",time spark-2012.0.deb/victor/vct/run/prelude.rul0000644000175000017500000000646511753202341017542 0ustar eugeneugen/* VCT Prelude rules ================= Rules give properties or definitions for standard FDL functions. Variable typing convention: X,Y,Z : int. U,V,W : real */ rule_family prelude: . /* bit operator rules, drawn from bitwise.rul */ /*** Simplification of bitwise operators ***/ bitwise(1): bit__and(X,X) may_be_replaced_by X if [ 0 <= X ]. bitwise(2): bit__or(X,X) may_be_replaced_by X if [ 0 <= X ]. bitwise(3): bit__xor(X,X) may_be_replaced_by 0 if [ 0 <= X ]. /*** Properties of zero ***/ bitwise(11): bit__and(X,0) may_be_replaced_by 0 if [ 0 <= X ]. bitwise(12): bit__or(X,0) may_be_replaced_by X if [ 0 <= X ]. bitwise(13): bit__xor(X,0) may_be_replaced_by X if [ 0 <= X ]. /*** Lower bounds ***/ bitwise(51): 0 <= bit__and(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(52): 0 <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(53): 0 <= bit__xor(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(54): X <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(55): Y <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(56): X - Y <= bit__xor(X, Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(57): Y - X <= bit__xor(X, Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. /*** Upper bounds ***/ bitwise(61): bit__and(X,Y) <= X may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(62): bit__and(X,Y) <= Y may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(63): bit__or(X,Y) <= X + Y may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(64): bit__xor(X,Y) <= X + Y may_be_deduced_from [ 0 <= X, 0 <= Y ]. /* bitwise rules 66 & 67 are likely not to be instantiated with the default Victor options. To make use of them, add the option -ground-eval-exp=false after the std_options and make sure there are instances of the arith(3?) rules below for the values of N of interest. These arith rules help with triggering instantiation of these bitwise rules. */ bitwise(66): bit__or(X,Y) <= 2**N - 1 may_be_deduced_from [ 0 <= X, 0 <= Y, 0 <= N, X <= 2**N - 1, Y <= 2**N - 1]. bitwise(67): bit__xor(X,Y) <= 2**N - 1 may_be_deduced_from [ 0 <= X, 0 <= Y, 0 <= N, X <= 2**N - 1, Y <= 2**N - 1]. /*** Comparison ***/ bitwise(81): bit__and(X,Y) <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y]. bitwise(82): bit__xor(X,Y) <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y]. arith(10): int___abs(X) may_be_replaced_by X if [X >= 0]. arith(11): int___abs(X) may_be_replaced_by - X if [X < 0]. arith(12): real___abs(U) may_be_replaced_by U if [U >= 0]. arith(13): real___abs(U) may_be_replaced_by - U if [U < 0]. arith(20): int___odd(X) may_be_replaced_by int___abs(X) mod 2 = 1. /* Cases of abstract exponentiation, useful for instantiating bitwise rules 66 & 67. See above. */ arith(30): 2**8 may_be_replaced_by 256. arith(31): 2**16 may_be_replaced_by 65536. arith(32): 2**32 may_be_replaced_by 4294967296. arith(33): 2**64 may_be_replaced_by 18446744073709551616. spark-2012.0.deb/victor/vct/run/Makefile0000644000175000017500000003661311753202341017014 0ustar eugeneugen#============================================================================= # Make rules for running Victor #============================================================================= # This file is part of Victor: a SPARK VC Translator and Prover Driver. # Copyright (C) 2009, 2010 University of Edinburgh # Author(s): Paul Jackson # Victor is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or (at # your option) any later version. # Victor is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # A copy of the GNU General Public License V3 can be found in file # LICENSE.txt and online at http://www.gnu.org/licenses/. #========================================================================== #============================================================================= # User customisation #============================================================================= # Stand-alone executables for SMT solvers # ---------------------------------------- # The default assumption is that the solver executables are on the current # path. As desired, absolute paths can be set up for each here. Z3=z3 YICES=yices CVC3=cvc3 SIMPLIFY=simplify ALT_ERGO=alt-ergo # Locations of SPARK VC files # --------------------------- # Targets are set up below that can easily be used for different sets # of VC units. For a set of VC units named , the targets # assume the definition here of: # # 1. A Make variable "_prefix" for the root of the directory tree # for the set. # 2. A Make variable "_units" that names the .lis file listing # the unit names and, for each unit, specifies options. See the # user manual "Unit listing input file" section for details on how to # format this file. # # 3. A phony target "vcset-" against which to check matches for # the pattern rules below for running Victor. # Make variables for VC sets distributed with Victor. VCT_VCSETS=../vc autop_prefix = $(VCT_VCSETS)/autop/code autop_units = $(VCT_VCSETS)/autop/autop-units.lis .PHONY: vcset-autop hilton_prefix = $(VCT_VCSETS)/hilton/code hilton_units = $(VCT_VCSETS)/hilton/hilton-units.lis .PHONY: vcset-hilton #============================================================================= # Make features used #============================================================================= # # Automatic variables commonly used in commands are: # # $@ : Set to whole rule target # # $* : Set to the stem of the target (the stem is the string that the # % matches in the target) # Rules are made generic with respect to the VC set by using GNU Make # pattern rules. (rules with % in target) # Use of phony rather than real targets ensures that rules always # fires when make called. #============================================================================= # Variables settable in make calls #============================================================================= # # The behaviour of Victor runs can be customised by setting make variables # when invoking make. # # # Syntax for setting variable names is: # # make = .. = # # When make variables are set in this way, any definition in the makefile # is overridden. Variable descriptions are given below in rough order # of likely interest to users. # # # Format of documentation # # # <Varname> [=<value> : <type>] [<default>] <suffix> # # <Description> # # where # <value> : suggestive name for value. # <type> : One of `int' or `string' or `fixed'. or `bool'. # `fixed' means fixed point. E.g. 1.5 0.03 # `bool' values are `true' and `false'. # # <suffix> : Suffix added to report name. Usually includes <value> if # significant. #---------------------------------------------------------------------------- # Timeout for file-level interface # # T=<time> : int (10) -t<time> # # Set timeout in seconds for solver with file-level interface. # Uses process ulimit facility to set timeout. #---------------------------------------------------------------------------- # CVC3 timeout # # CT=<time> : int (100) -ct<time> # # Set timeout in units of 0.1 seconds for CVC3. Works for both API and SMTLIB # file-level interfaces. Uses CVC3's internal timeout capabilities. # If CT is set, T is ignored. #---------------------------------------------------------------------------- # Use unfused conclusions # # U=<val> : bool (false) u- | f- # # If <val> is true, run solvers on each goal slice at a time. If false, # fuse conclusions together so there is one solver run per goal. # If conclusions are unfused, add a u- prefix to report name. # If conclusions are fused, add an f- prefix to report name. # # Prefix rather than suffix added to report file name so that summary # files for each kind of run are separated out when files listed in # order. #---------------------------------------------------------------------------- # Directory for reports # # OUTDIR=<dirname> : string (out) # # Directory for .csv .log .sum report files #---------------------------------------------------------------------------- # Arithmetic abstraction and simplification # # L= <val> : bool (false) -lin # LEC=<val> : bool (false) -lin-ec # LAE=<val> : bool (false) -lin-ae # EC=<val> : bool (false) -ec # AE=<val> : bool (false) -ae # Various options for coping with solvers that don't support non-linear # arithmetic (e.g. Yices). At most one should be selected. # * EC substitutes out symbolic integer constants, constants introduced # with definition c = k where c is a constant, k an integer literal. # * AE Applies EC first and then does some partial evaluation and # simplification, trying to make more multiplications linear. # * L abstracts non-linear multiplications. Applied after EC or AE in LEC # and LAE. #---------------------------------------------------------------------------- # siv file # # SIV=<val> : bool (false) -siv # # If <val> is true, read goals from .siv files output by Simplifier # rather than .vcg files output by Examiner, and add -siv suffix to report # names. #---------------------------------------------------------------------------- # Track trivial goals # # TG=<val> : bool (false) -tg # # `Trivial goals' are goals in .vcg or .siv files with no hypotheses or # conclusions and listed as simply "*** true". # # If <val> is true, Victor includes a line in the .csv report file for each # of these goals. #---------------------------------------------------------------------------- # Report file suffix # # SFX=<suffix> : string "" # # Add an extra suffix to report file name. Use this option when some # parameter affecting behaviour is changed in a way that doesn't # otherwise add a suffix. #---------------------------------------------------------------------------- # Iterate calls of solver to increase precision of runtime measurements. # # RD=<count> : int (1) -rd<count> # RC=<count> : int (1) -rc<count> # # RD ("Repeat Drive") for use with API interface mode, RC ("Repeat # Check") for use with smtlib/simplify interface mode. #---------------------------------------------------------------------------- # Focus on single unit and possibly single goal # # UNIT=<unitname> : string ("") # GOAL=<goalnumber> : int (0) # # Setting UNIT overrides any units file listing of units to run on. # A default GOAL number of 0 indicates to try all goals. #---------------------------------------------------------------------------- # Set threshold for symbolic numeric constants # # SYM_CONSTS=<threshold> : int (100000) # # Use with Simplify solver to attempt to avoid overflows. #---------------------------------------------------------------------------- # Options for SMTLIB and Simplify translations # # A=<Aoption> : int (0) # B=<Boption> : int (0) # C=<Coption> : int (0) # D=<Doption> : int (0) # E=<Eoption> : int (0) # F=<Foption> : int (0) # G=<Goption> : int (0) # # Options A-F select translation alternatives and should not affect # completeness. Option G affects how array and record extensionality is # dealt with. The default for G is not to add in any support for # extensionality. # # See Makefile-prelude.mk for details on these options. #============================================================================= # Include prelude #============================================================================= # The prelude defines make variables that assemble together sets of # Victor options for different solvers and different customisation # options. include Makefile-prelude.mk #============================================================================ # Rules for running Victor #============================================================================ # Naming conventions # Make targets: # <unit-set>-<interface-m><prover> # # Output files: # <prefix><unit-set>-<interface-m><prover><suffix>.[vlg|vct|vsm] # <prefix> = # f-: conclusions fused # u-: conclusions unfused # <interface-m> = # a: API # s: SMT-Lib and file-level # t: SMT-Lib2 and file-level # p: Simplify and file-level # i: Isabelle/HOL and file-level # d: dummy # <prover>= # a: Alt-Ergo # c: cvc3 # y: yices # p: simplify # z: z3 # n: none # <suffix> = suffices added by setting make variables that customise # Victor behaviour # e.g. # # make autop-ac # # Runs CVC3 via API on autop VC set and creates report files # # f-autop-ac.vlg # f-autop-ac.vct # f-autop-ac.vsm # # in OUTDIR directory (by default ./out). # Also below we use <prefix><unit-set>-xx when analysing Simplifier # behaviour. #---------------------------------------------------------------------------- # API interface mode #------------------- %-ac: vcset-% $(VCT) \ $(std_options) \ $(api_cvc3_options)\ %-ay: vcset-% $(VCT) \ $(std_options)\ $(api_yices_options) \ #---------------------------------------------------------------------------- # SMTLIB file interface mode #--------------------------- %-sa: vcset-% $(VCT) \ $(std_options)\ $(smtlib_options) \ -prover-command=$(ALT_ERGO) \ %-sc: vcset-% $(VCT) \ $(std_options)\ $(smtlib_options) \ -prover-command='$(CVC3) -lang smt $(cvc3_timeout_flag)'\ %-sy: vcset-% $(VCT) \ $(std_options)\ $(smtlib_options) \ -abstract-nonlin-times\ -logic=AUFLIA\ -prover-command='$(YICES) -smt'\ %-sz: vcset-% $(VCT) \ $(std_options)\ $(smtlib_options) \ -prover-command='$(Z3) -smt' \ #---------------------------------------------------------------------------- # SMTLIB2 file interface mode #---------------------------- # Observed incompletenesses of SMTLIB2 support noted below. # (Partial) workaround options provided for some of these. # Alt-Ergo. 0.93 # - define-type not supported. # - to_real function not recognised. # - Instances of quantified Bool-typed variables not recognised. %-ta: vcset-% $(VCT) \ $(std_options)\ $(smtlib2_options) \ -elim-type-aliases\ -smtlib2-add-to_real-decl\ -prover-command=$(ALT_ERGO) \ # CVC3 2.4 (SMT-COMP 2011 version) # - define-type not supported. # - to_real function not recognised. # - (set-option :print-success false) flagged as syntax error. # - Bool cannot be used as function argument type. %-tc: vcset-% $(VCT) \ $(std_options)\ $(smtlib2_options) \ -elim-type-aliases\ -smtlib2-omit-set-option-command\ -smtlib2-add-to_real-decl\ -prover-command='$(CVC3) -lang smt2 $(cvc3_timeout_flag)'\ # Z3 2.20/3.0 preview (As submitted to SMT-COMP 2011). # No significant incompletenesses observed yet. %-tz: vcset-% $(VCT) \ $(std_options)\ $(smtlib2_options) \ -prover-command=$(Z3) \ #---------------------------------------------------------------------------- # Simplify file interface mode #----------------------------- %-pp: vcset-% $(VCT) \ $(std_options)\ $(simplify_interface_options) \ -sym-consts=$(SYM_CONSTS) \ -prover-command='$(SIMPLIFY) -nosc' \ # As %-pp, but use z3 %-pz: vcset-% $(VCT) \ $(std_options)\ $(simplify_interface_options) \ -sym-consts=$(SYM_CONSTS) \ -prover-command='$(Z3) -s' \ # As %-pz but no threshold %-pz-nt: vcset-% $(VCT) \ $(std_options)\ $(simplify_interface_options) \ -prover-command='$(Z3) -s' \ #---------------------------------------------------------------------------- # Isabelle file interface mode #----------------------------- %-in: vcset-% $(VCT) \ $(isabelle_options)\ -prover=none\ -working-dir=iwork \ -flat-working-files\ %-ii: vcset-% $(VCT) \ $(isabelle_options)\ -prover-command='isabelle tty <' \ -working-dir=iwork \ -flat-working-files\ #---------------------------------------------------------------------------- # Dummy interface mode #--------------------- # Uses include checking Victor's internal runtime and generating # reports on Praxis Simplifier performance %-dn: vcset-% $(VCT) \ $(std_options)\ -interface-mode=dummy \ -prover=none\ %-sn: vcset-% $(VCT) \ $(std_options)\ $(smtlib_options) \ -prover=none\ %-se: vcset-% $(VCT) \ $(std_options)\ $(smtlib_options) \ -prover-command='./echo.sh' \ #============================================================================ # Simplifier analysis #============================================================================ # Create .vct report for Simplifier results, excluding trivial goals. # For now, .vsm reports must be generated by hand. %-xx: vcset-% make $*-dn TG=true make $*-dn SIV=true TG=true csvmerge $(OUTDIR)/f-$*-dn-siv-tg.vct 1 2 3 4 5 6 7 8 \ $(OUTDIR)/f-$*-dn-tg.vct 8 | \ csvfilt 9 unproven | csvproj 1 2 3 4 5 6 7 8 > $(OUTDIR)/f-$*-xx.vct #============================================================================ # Gathering summary files #============================================================================ sum: cat vsm-file-header.txt $(OUTDIR)/*.vsm > $(OUTDIR)/all-sum.csv #============================================================================ # Sorting VCT file entries by run-time #============================================================================ %-sorted.vct: %.vct sort -s -n -t ',' -k 9,9 $*.vct | csvproj 9 8 1 6 > $@ #============================================================================ # Sets of runs #============================================================================ # e.g. make autop-all T=1 %-all: make $*-ac CT=$(T)0 make $*-ay EC=true make $*-sa make $*-sc make $*-sy LAE=true make $*-sz make $*-pp all-all: make autop-all make hilton-all #============================================================================ # Development Make rules #============================================================================ # include Makefile-dev.mk # End of file. ���������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/victor/vct/run/prelude.fdl���������������������������������������������������������0000644�0001750�0001750�00000002365�11753202341�017500� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ {*******************************************************} {FDL Declarations} {Prelude for standard functions} {*******************************************************} {DATE : 1-JUN-2009} title procedure any; function character__pos(integer) : integer; function character__val(integer) : integer; function bit__or(integer, integer) : integer; function bit__and(integer, integer) : integer; function bit__xor(integer, integer) : integer; function bit__not(integer, integer) : integer; function int___times(integer, integer) : integer; function int___div(integer, integer) : integer; function int___mod(integer, integer) : integer; function int___rem(integer, integer) : integer; function real___div(real, real) : real; function real___times(real, real) : real; function real___plus(real, real) : real; function real___minus(real, real) : real; function real___uminus(real) : real; function int___to_real(integer) : real; function int___abs(integer) : integer; function real___abs(real) : real; function int___exp(integer, integer) : integer; function real___exp(real, integer) : real; function int___odd(integer) : boolean; end; ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/victor/vct/run/timeout.sh����������������������������������������������������������0000755�0001750�0001750�00000002525�11753202341�017374� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# Execute a command with a timeout # Author: # http://www.pixelbeat.org/ # Notes: # If the timeout occurs the exit status is 128. # There is an asynchronous (and buggy) equivalent of this # script packaged with bash (under /usr/share/doc/ in my distro), # which I only noticed after writing this. # I noticed later again that there is a C equivalent of this packaged # with satan by Wietse Venema, and copied to forensics by Dan Farmer. # Changes: # V1.0, Nov 3 2006, Initial release # V1.1, Nov 20 2007, Brad Greenlee <brad@footle.org> # Make more portable by using the 'CHLD' # signal spec rather than 17. if [ "$#" -lt "2" ]; then echo "Usage: `basename $0` timeout_in_seconds command" >&2 echo "Example: `basename $0` 2 sleep 3 || echo timeout" >&2 exit 1 fi cleanup() { kill %1 &>/dev/null #kill sleep $timeout if running # kill %2 2>/dev/null && exit 128 #kill monitored job if running kill %2 2>/dev/null && echo timeout && exit 128 } set -m #enable job control trap "cleanup" CHLD #cleanup after timeout or command timeout=$1 && shift #first param is timeout in seconds sleep $timeout& #start the timeout # sleep $timeout &> /dev/null & #start the timeout, swallow any output on kill "$@" #start the job ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/victor/vct/run/echo.sh�������������������������������������������������������������0000755�0001750�0001750�00000000053�11753202341�016616� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# Dummy prover echo unknown # End of file �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/victor/vct/doc/��������������������������������������������������������������������0000755�0001750�0001750�00000000000�11753202341�015304� 5����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/victor/vct/doc/vct-man.pdf���������������������������������������������������������0000444�0001750�0001750�00000627666�11753202341�017371� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������%PDF-1.4 %ÐÔÅØ 5 0 obj << /S /GoTo /D (section.1) >> endobj 8 0 obj (Supported provers and prover languages) endobj 9 0 obj << /S /GoTo /D (subsection.1.1) >> endobj 12 0 obj (Simplify language) endobj 13 0 obj << /S /GoTo /D (subsection.1.2) >> endobj 16 0 obj (SMT-LIB language) endobj 17 0 obj << /S /GoTo /D (subsection.1.3) >> endobj 20 0 obj (Alt-Ergo) endobj 21 0 obj << /S /GoTo /D (subsection.1.4) >> endobj 24 0 obj (CVC3) endobj 25 0 obj << /S /GoTo /D (subsection.1.5) >> endobj 28 0 obj (Simplify) endobj 29 0 obj << /S /GoTo /D (subsection.1.6) >> endobj 32 0 obj (Yices) endobj 33 0 obj << /S /GoTo /D (subsection.1.7) >> endobj 36 0 obj (Z3) endobj 37 0 obj << /S /GoTo /D (section.2) >> endobj 40 0 obj (Installation and Testing) endobj 41 0 obj << /S /GoTo /D (section.3) >> endobj 44 0 obj (Operation) endobj 45 0 obj << /S /GoTo /D (subsection.3.1) >> endobj 48 0 obj (Terminology) endobj 49 0 obj << /S /GoTo /D (subsection.3.2) >> endobj 52 0 obj (Basic operation) endobj 53 0 obj << /S /GoTo /D (subsection.3.3) >> endobj 56 0 obj (Input and output files) endobj 57 0 obj << /S /GoTo /D (subsubsection.3.3.1) >> endobj 60 0 obj (Unit listing input file) endobj 61 0 obj << /S /GoTo /D (subsubsection.3.3.2) >> endobj 64 0 obj (VCT output file) endobj 65 0 obj << /S /GoTo /D (subsubsection.3.3.3) >> endobj 68 0 obj (VSM output file) endobj 69 0 obj << /S /GoTo /D (subsubsection.3.3.4) >> endobj 72 0 obj (VLG output file) endobj 73 0 obj << /S /GoTo /D (subsection.3.4) >> endobj 76 0 obj (Invocation of Victor) endobj 77 0 obj << /S /GoTo /D (subsection.3.5) >> endobj 80 0 obj (Examples) endobj 81 0 obj << /S /GoTo /D (subsection.3.6) >> endobj 84 0 obj (Performance tips) endobj 85 0 obj << /S /GoTo /D (section.4) >> endobj 88 0 obj (Command line options) endobj 89 0 obj << /S /GoTo /D (subsection.4.1) >> endobj 92 0 obj (Input options) endobj 93 0 obj << /S /GoTo /D (subsection.4.2) >> endobj 96 0 obj (Translation options) endobj 97 0 obj << /S /GoTo /D (subsection.4.3) >> endobj 100 0 obj (Prover and prover interface selection) endobj 101 0 obj << /S /GoTo /D (subsection.4.4) >> endobj 104 0 obj (Prover driving options) endobj 105 0 obj << /S /GoTo /D (subsection.4.5) >> endobj 108 0 obj (Output options) endobj 109 0 obj << /S /GoTo /D (subsubsection.4.5.1) >> endobj 112 0 obj (Screen output options) endobj 113 0 obj << /S /GoTo /D (subsubsection.4.5.2) >> endobj 116 0 obj (General report file options) endobj 117 0 obj << /S /GoTo /D (subsubsection.4.5.3) >> endobj 120 0 obj (VCT file options) endobj 121 0 obj << /S /GoTo /D (subsubsection.4.5.4) >> endobj 124 0 obj (Log file options) endobj 125 0 obj << /S /GoTo /D (subsection.4.6) >> endobj 128 0 obj (Debugging options) endobj 129 0 obj << /S /GoTo /D (subsection.4.7) >> endobj 132 0 obj (CVC3 options) endobj 133 0 obj << /S /GoTo /D (subsection.4.8) >> endobj 136 0 obj (Simplify options) endobj 137 0 obj << /S /GoTo /D (subsection.4.9) >> endobj 140 0 obj (Yices options) endobj 141 0 obj << /S /GoTo /D (subsection.4.10) >> endobj 144 0 obj (Z3 options) endobj 145 0 obj << /S /GoTo /D (section.5) >> endobj 148 0 obj (Translation) endobj 149 0 obj << /S /GoTo /D (subsection.5.1) >> endobj 152 0 obj (Standard Form translation) endobj 153 0 obj << /S /GoTo /D (subsection.5.2) >> endobj 156 0 obj (Type checking) endobj 157 0 obj << /S /GoTo /D (subsection.5.3) >> endobj 160 0 obj (Enumerated type abstraction) endobj 161 0 obj << /S /GoTo /D (subsection.5.4) >> endobj 164 0 obj (Early array and record abstraction) endobj 165 0 obj << /S /GoTo /D (subsection.5.5) >> endobj 168 0 obj (Separation of formulas and terms) endobj 169 0 obj << /S /GoTo /D (subsection.5.6) >> endobj 172 0 obj (Type refinement) endobj 173 0 obj << /S /GoTo /D (subsection.5.7) >> endobj 176 0 obj (Late array and record abstraction) endobj 177 0 obj << /S /GoTo /D (subsection.5.8) >> endobj 180 0 obj (Bit abstraction) endobj 181 0 obj << /S /GoTo /D (subsection.5.9) >> endobj 184 0 obj (Arithmetic simplification) endobj 185 0 obj << /S /GoTo /D (subsection.5.10) >> endobj 188 0 obj (Arithmetic abstraction) endobj 189 0 obj << /S /GoTo /D (subsection.5.11) >> endobj 192 0 obj (Final translation steps) endobj 193 0 obj << /S /GoTo /D (section.6) >> endobj 196 0 obj (CSV utilities) endobj 197 0 obj << /S /GoTo /D (subsection.6.1) >> endobj 200 0 obj (Filter CSV records) endobj 201 0 obj << /S /GoTo /D (subsection.6.2) >> endobj 204 0 obj (Merge two CSV files) endobj 205 0 obj << /S /GoTo /D (subsection.6.3) >> endobj 208 0 obj (Project out fields of CSV records) endobj 209 0 obj << /S /GoTo /D (section.7) >> endobj 212 0 obj (Future developments) endobj 213 0 obj << /S /GoTo /D [214 0 R /Fit ] >> endobj 240 0 obj << /Length 1024 /Filter /FlateDecode >> stream xÚí™[s›:€ßý+x¨º"©OmÜ4“žfN¦¡™éí:ØåƒË%Óþû³Bà$7u2™iœ¼bwõiµÚ•‰·ðˆw0!®{ÑäÙkzŒà0dÒ‹æžæXJá)F±ÐԋμOè4ÕEùÜQ(öN(:9ö•F/ßýc:õYˆ¦î>ò•AeœWY Ÿ¹¶8?s7Çea»žÛŸ¤{ùªLûç/Ñ›Ö"ª0£`XD)dž/ £Iï«þã£8·ß6qæžç½Ê2É’¸Jœ¹ûES«Á „Æœq/€QÚ‰\›X5_«äG“€\…j×–ü\ùAkbj-Ÿ¢VoÝë]m‡³ô3¡b×ia;æÐ¯êF¥=*0!kGE4fŠ‚) FÇœ ÇVcÜX‰ êM<³Ïß«"w˜¹,! kÎરáÜIX}ýïEšÏqr†ãn¾_«¦˜¼ÆDH÷#õ·Nk“ý²w Ú(ußsryB@1à#€O2¬u'a VrŠjÀƪÛAO(¸ñÊ…çnÞLœ0À.ek ;BÌz)Ô ÑI³Þ ®(ëÄN‹’hž-çö')+×è&môÖµe±¥E/gNï÷Þ'€F5Cl£•Ù…•0z)Á¢ag*8S@‰] ér•¥ó_`;•CÂp^( y¸aÌ ]†˜5Óˆ_iÓ¶NÐ’í�³žìQ¼=Üû=Ø0Ô5’;^îDtí«¼#ú2«ƒýrQ ÅJb¼wÞnu Ü¢Ã==µ!|ʇbuî–—þN.‡y VQþ„{n±5î°Ãý!7X!Øã !·Û·'¯:òÇEÈ]Þ ïÁïå ô‡kÐ'ƒ·RÙ•HÌå­‡yAf³ÿ| F¾(©ê4_ Í`’kÒÙqýì·ªº";Uÿº9)²HÎ(çHÝ!½åëô6ò!SNÊešY±U¨*ìj¼wR[.f¾Nv÷â*¹T·°Õ¦Ú4¹¡f{ÉnϸOóUS;ÆneZØMÝ5*d‹÷l¼™Mw=ËÕHßD¢±§&—k5@Û Ê(zŸ§Ppw$³ÔźqzÝ•"Ü)ÕÛ{h,F(4šFã…snBÇ5ß}ß4·àÉ{ž'GÃ…¾‘%3O,¯c)z–oþ˜¥¡o-oÃ’s(¹ fp%j®‘Šõ†dÎ풷ȹ£c{rìÖÿÜ1vgð£Ì1|T{ÿŸ¹o,°ÿ3^®®Ùݵ‘OuÑ4ÝòØ–¯Úÿ1’r^”Kðä8Ÿ%Ιët5†/å#¬87CÀë×Pºa®ì;B‡ì‹þ+÷£Éÿ ¾X endstream endobj 214 0 obj << /Type /Page /Contents 240 0 R /Resources 239 0 R /MediaBox [0 0 595.276 841.89] /Parent 248 0 R /Annots [ 215 0 R 216 0 R 217 0 R 218 0 R 219 0 R 220 0 R 221 0 R 222 0 R 223 0 R 224 0 R 225 0 R 226 0 R 227 0 R 228 0 R 229 0 R 230 0 R 231 0 R 232 0 R 233 0 R 234 0 R ] >> endobj 215 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [62.783 470.694 320.363 483.314] /Subtype /Link /A << /S /GoTo /D (section.1) >> >> endobj 216 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 456.249 200.305 468.868] /Subtype /Link /A << /S /GoTo /D (subsection.1.1) >> >> endobj 217 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 441.803 207.446 454.422] /Subtype /Link /A << /S /GoTo /D (subsection.1.2) >> >> endobj 218 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 427.357 153.963 439.976] /Subtype /Link /A << /S /GoTo /D (subsection.1.3) >> >> endobj 219 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 415.236 140.47 425.398] /Subtype /Link /A << /S /GoTo /D (subsection.1.4) >> >> endobj 220 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 398.465 151.53 411.085] /Subtype /Link /A << /S /GoTo /D (subsection.1.5) >> >> endobj 221 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 386.344 136.308 396.506] /Subtype /Link /A << /S /GoTo /D (subsection.1.6) >> >> endobj 222 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 371.898 122.265 382.06] /Subtype /Link /A << /S /GoTo /D (subsection.1.7) >> >> endobj 223 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [62.783 343.422 222.349 356.041] /Subtype /Link /A << /S /GoTo /D (section.2) >> >> endobj 224 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [62.783 317.27 141.612 329.889] /Subtype /Link /A << /S /GoTo /D (section.3) >> >> endobj 225 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 302.824 172.991 315.443] /Subtype /Link /A << /S /GoTo /D (subsection.3.1) >> >> endobj 226 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 288.378 188.825 300.865] /Subtype /Link /A << /S /GoTo /D (subsection.3.2) >> >> endobj 227 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 273.933 222.152 286.552] /Subtype /Link /A << /S /GoTo /D (subsection.3.3) >> >> endobj 228 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [107.266 259.487 255.059 272.106] /Subtype /Link /A << /S /GoTo /D (subsubsection.3.3.1) >> >> endobj 229 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [107.266 245.041 229.306 257.66] /Subtype /Link /A << /S /GoTo /D (subsubsection.3.3.2) >> >> endobj 230 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [107.266 230.595 229.952 243.214] /Subtype /Link /A << /S /GoTo /D (subsubsection.3.3.3) >> >> endobj 231 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [107.266 216.149 229.221 228.768] /Subtype /Link /A << /S /GoTo /D (subsubsection.3.3.4) >> >> endobj 232 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 204.028 212.328 214.323] /Subtype /Link /A << /S /GoTo /D (subsection.3.4) >> >> endobj 233 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 187.258 158.584 199.877] /Subtype /Link /A << /S /GoTo /D (subsection.3.5) >> >> endobj 234 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 172.812 195.978 185.431] /Subtype /Link /A << /S /GoTo /D (subsection.3.6) >> >> endobj 241 0 obj << /D [214 0 R /XYZ 63.78 803.424 null] >> endobj 242 0 obj << /D [214 0 R /XYZ 63.78 781.755 null] >> endobj 247 0 obj << /D [214 0 R /XYZ 63.78 488.461 null] >> endobj 239 0 obj << /Font << /F16 243 0 R /F18 244 0 R /F29 245 0 R /F30 246 0 R >> /ProcSet [ /PDF /Text ] >> endobj 279 0 obj << /Length 1280 /Filter /FlateDecode >> stream xÚíš[“›6€ß÷Wð3…HBô˜l×;é¤ÓLíÙ™6틵^Z Œ,o'ÿ>ޱ½dÓì…}1 °út87 x x—gÀ¡þž\xöä÷˳w³³7“xFœäÍn<G,ñåQâÍæÞ'!Ô?¯–Ë´œä_䥰gU­òª\Ï~ÙÈñ>…ˆ`�|ˆšö}¹o&0Ù YD÷Bˆ#Œ©“Á „�@ÿ}Y¯•‡è!qŒ2? B¢e~=`Î÷_Ρƒ�ÆqÒ?<áú·9ȳ A¾LËU‘ÄGq=ñxÿ§>1½dñ`²±#ûQVbþ]€_H–ùöKÒ„ëÍMænR?/Í•ò&Í„mZ‰Bdͼt^ŽCò¢!â#ç2¿ËËÅQUMbôCÿ¿ëö>WîCr_®¤åJ×ßÖê¤qÅ/ÚŽÞCmé)¼ˆFÅ]Ý%ƒúÓ�ú™¢´Š[†Îi<x`ŸrÜ.57Q ™œu€ÒÑû @\GµVúñ>˜áQዾ+c-ÏgÏWT‡•îËg€;”*­[‹{“¤Ž &b§€†ý†‘:çó³¸^/']8Gm&ƒ:sLϯLœtàzÍ—qÞènâ8Oóe]ä7ŸrŽ)<0w€ÿÈ3±:nØ“#ñ8á¿vM|0gtŸúƧÂ'àÓÖ¿Ç¡~ y·²¶U.í¡i2ó@lump¼]}éÚÀéFâÀršódSN›ª´œ§Ò &¦îSÉ¥Uuøˆ~ø™kAß4"0ðË![%3Mì³ÍH\8˜™¦[Ñþ5MG$†É«ãÕ~ÁÁÔÛrÚES[/…Ôæ*Ubî4w2Òë•Öçþ²Y‚F ²-©]¤²páK*eÚðs—¥ƒj‹¬’îòЇ¥ŽbÁ¿][6›Š:•Û…ò{¼1–Ô¨g‘®:,•Ëž… :"Í£‡ §&«.ÅRØâ÷ž„#/B"40%$›”ðƒ¶’'¾sa ”÷ùÎ MžkÀ†ð`…m“½w¹:éR†Æ#2˜o›ë½•¹º] •gn¹«I®EÈúcQð’ •ß�²Mæº è+%ôåÒŽZ”“¼l—Tjn/3¥D½ïÎ9Sö=_ê¶•L¯ìF’µÊ‹\åb¿† >JûÒMî;É Õ.³[ÑMìa\ß~y ¢Ñ™ð¡å:ºI†rᢌ&ûÏüTÒv大Â?¢8n8âí$!Áþ?"kí*²]’Å|µ›8îì †SôlË6ߥúƬš˜êÛZ­¥ÛÔ6wA¬3ߢªM>¢OÕþ7Š0p¯ae±A‚,=ƒF˜[!v�ñ§ëZ§<º³JÚª…–d¶ÿè+N®lãf›]sµwÝÖ»´\¬Ó…ýˆºÆO ŽY”`7¼«<S•þ†À¿5 ©9yûñ½>|w›‘»©*w¼¶=ˆ㊣˜ÅZNÓÿù]€Ÿõ½ Š#£öÁØöÚ Ìœ˜Ú¾v©Nèî¨ÕOºU8Õ´ÏgiiOÌ–ŸÀf¶C³Âý¹¯;Fu›6©¤ž~Î"È®cH³LÔÊ}5Ù �Àˆ@ÚŽkµTa‘_÷À4"”·šº@ªÚí]õög«‡©MbGˤg‘ê—…±~kf;ÙÍ�/fg_�ª¦~ó endstream endobj 278 0 obj << /Type /Page /Contents 279 0 R /Resources 277 0 R /MediaBox [0 0 595.276 841.89] /Parent 248 0 R /Annots [ 235 0 R 236 0 R 237 0 R 238 0 R 249 0 R 250 0 R 251 0 R 252 0 R 253 0 R 254 0 R 255 0 R 256 0 R 257 0 R 258 0 R 259 0 R 260 0 R 261 0 R 262 0 R 263 0 R 264 0 R 265 0 R 266 0 R 267 0 R 268 0 R 269 0 R 270 0 R 271 0 R 272 0 R 273 0 R 274 0 R 275 0 R 276 0 R ] >> endobj 235 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [62.783 766.479 215.383 779.098] /Subtype /Link /A << /S /GoTo /D (section.4) >> >> endobj 236 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 752.033 178.58 764.519] /Subtype /Link /A << /S /GoTo /D (subsection.4.1) >> >> endobj 237 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 737.587 208.564 750.206] /Subtype /Link /A << /S /GoTo /D (subsection.4.2) >> >> endobj 238 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 723.141 296.78 735.761] /Subtype /Link /A << /S /GoTo /D (subsection.4.3) >> >> endobj 249 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 708.696 223.943 721.315] /Subtype /Link /A << /S /GoTo /D (subsection.4.4) >> >> endobj 250 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 694.25 188.014 706.736] /Subtype /Link /A << /S /GoTo /D (subsection.4.5) >> >> endobj 251 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [107.266 679.804 259.29 692.29] /Subtype /Link /A << /S /GoTo /D (subsubsection.4.5.1) >> >> endobj 252 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [107.266 665.358 281.807 677.977] /Subtype /Link /A << /S /GoTo /D (subsubsection.4.5.2) >> >> endobj 253 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [107.266 650.912 231.972 663.532] /Subtype /Link /A << /S /GoTo /D (subsubsection.4.5.3) >> >> endobj 254 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [107.266 636.466 225.633 649.086] /Subtype /Link /A << /S /GoTo /D (subsubsection.4.5.4) >> >> endobj 255 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 622.021 204.758 634.64] /Subtype /Link /A << /S /GoTo /D (subsection.4.6) >> >> endobj 256 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 607.575 181.506 620.061] /Subtype /Link /A << /S /GoTo /D (subsection.4.7) >> >> endobj 257 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 593.129 192.566 605.748] /Subtype /Link /A << /S /GoTo /D (subsection.4.8) >> >> endobj 258 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 578.683 177.344 591.17] /Subtype /Link /A << /S /GoTo /D (subsection.4.9) >> >> endobj 259 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 564.237 163.301 576.724] /Subtype /Link /A << /S /GoTo /D (subsection.4.10) >> >> endobj 260 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [62.783 540.41 148.813 550.705] /Subtype /Link /A << /S /GoTo /D (section.5) >> >> endobj 261 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 525.964 246.054 536.259] /Subtype /Link /A << /S /GoTo /D (subsection.5.1) >> >> endobj 262 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 509.194 182.746 521.813] /Subtype /Link /A << /S /GoTo /D (subsection.5.2) >> >> endobj 263 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 494.748 259.061 507.367] /Subtype /Link /A << /S /GoTo /D (subsection.5.3) >> >> endobj 264 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 480.302 287.35 492.921] /Subtype /Link /A << /S /GoTo /D (subsection.5.4) >> >> endobj 265 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 465.856 280.101 478.476] /Subtype /Link /A << /S /GoTo /D (subsection.5.5) >> >> endobj 266 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 451.41 193.151 464.03] /Subtype /Link /A << /S /GoTo /D (subsection.5.6) >> >> endobj 267 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 436.965 282.472 449.584] /Subtype /Link /A << /S /GoTo /D (subsection.5.7) >> >> endobj 268 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 424.843 186.548 435.138] /Subtype /Link /A << /S /GoTo /D (subsection.5.8) >> >> endobj 269 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 408.073 237.11 420.692] /Subtype /Link /A << /S /GoTo /D (subsection.5.9) >> >> endobj 270 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 395.952 226.054 406.246] /Subtype /Link /A << /S /GoTo /D (subsection.5.10) >> >> endobj 271 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 379.181 224.398 391.801] /Subtype /Link /A << /S /GoTo /D (subsection.5.11) >> >> endobj 272 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [62.783 355.354 158.478 365.649] /Subtype /Link /A << /S /GoTo /D (section.6) >> >> endobj 273 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 340.908 205.729 351.203] /Subtype /Link /A << /S /GoTo /D (subsection.6.1) >> >> endobj 274 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 324.138 214.019 336.757] /Subtype /Link /A << /S /GoTo /D (subsection.6.2) >> >> endobj 275 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [80.342 309.692 278.957 322.311] /Subtype /Link /A << /S /GoTo /D (subsection.6.3) >> >> endobj 276 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [62.783 283.54 205.337 296.16] /Subtype /Link /A << /S /GoTo /D (section.7) >> >> endobj 280 0 obj << /D [278 0 R /XYZ 63.78 803.424 null] >> endobj 6 0 obj << /D [278 0 R /XYZ 63.78 266.521 null] >> endobj 277 0 obj << /Font << /F30 246 0 R /F18 244 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 287 0 obj << /Length 1813 /Filter /FlateDecode >> stream xÚÅXKsÛ6¾ûWðVjÆ„€0·&“vܦ™4v3“&9Ð$$¡¦H…¨ú÷]`š”ÇÓÌ´áA`±ûáÛz[/ô~¾mûüæbý=%Ì»Ùx '©ðÒ$#»)½>%tPÊbÿZí•ÚœVOc¿ÊëíoåêÓÍ/럨ð(%Y!¡pÃ"q³“°‡%S 0%À(õU‡³Ýp8¬˜hÚ^–øívÅ„Â~¿,,¯KÔ$ʦš° ô9hd4ùsI[AÂ0r+8Š;´Í û¢dÛÜ7‡*`YÂS/ˆb’E‰Ã‹9¼~» ^]=ÿWp-™‘(MœžÝ¾*u»$.JH&F“U­z•÷ Meê ãPï"PXzwžëý Œ`³sc°8cNÜ®ïÏÖëãñH@Ѐ4íÖÊBR«C ”Q'�Φ¨E)?†4ªe‡Ã›®‡«ÌÛGŽÀhÓ´cgŸ÷>®!ŽI1/€»‰Ü5ôªÞâUªú0ô–;ͰÀõXÄN¿ë}¿*K G#º¦rÔ¸§ŸéMUÉ¢ïq5“e]èÅ»}ÞÞuN)lû²KѤõ Z•%àˆ X—�=ûfd¥pGƒQù¶uÁ°Tˆ‰§1&NÄ=c,`‰¿×Ç ]?sM¡]ÓšmÛJæÅ¸©­6gâ2ÃД=U»l\8ÜU³UEwÆ:žN7\ó6äÐV$Ž|4=Û6× º‚ל¯ó‡M¥ò%½xJ„dzUg©ÿyÈk Z¯4¹%ð5 ñr ”UÞáŒªÇø\2dÕÓyÛæzîÔ]„ÞQãÊ^¶‡VšˆhuÑ«¦Æe¯T-s 9OønÏ0l²ºÎ\$oU¿ÛË^äq(-(ã „}’í"”@ÔQ.@É£è J=£s(õô J‰%(@€m‚&2D³JŸ@iuE(ckžsÿq'œÓš©pð?€] ì„°4›‚ÍÁ¦Ž·ÂMç¼ÕÖY¼éu9]Æ›ZêÂV‡7Íüº©ƒ9sˆ íVÕyd7W1º©�ƒ3ô+»ê?„^£¾, )Ÿû¯§vCÅ0u[3q¯:šr„!~Y¦;Í—¸Ö…ˆK+ÊQ–—ªÈ{i#*6ƒ¸;ÃìÕTMIÈYÏ#êõ€¹ dÇÙyÙ„“ª.ª¡4)<ô¬úàe»m.—rMIêPzñE¹XLÇœÄ÷¨smc"ü÷ªöЯԃ {”~³/ŠëpSïTÑ›âDCÛJ¿:¡UûüÎää‡uƒí�¤ì¬á&BkÓ¡NìŒD”ÍÉ´úµ×Ø´.‚hÿ‚:Øæý¦½Ï¯Óx³\¬†¶JM]•Êm•ê.ä±Â”ã¦q)”\Ü캅¸aÚkY]3´ I±˜÷)¤"‹o•[ Û–[xÔ¦möØ{õö ;º¤}•[ƶyߨVâ—Ò´‘ÿV¾l‘±ö³´ê_ÕX©Ïƒ45ª¹$ ï¡øAâ´WóG­ÓU¯;CÆWQÂ}{#oVù»àZS¡$ÚíÿÊîwOž}tÌU•ßV¶€2V>µJgP f ?+Ós¸1] €BPlRµŠlÚõã{ýleŸC™œÅ® µŒpnÅ�¨)mâÄ„ àV{ÙaÙkö.u¯•…Ä̪}HÏ`lÓ=Ãh½õ]H2¦ßS¸CW™C‡%,r¬ü¡[¢[Ä ]ÇO¨2»rvÓ:V2¨$Þs5ïy7y!žp^¨_³G”[K(³¯šÊÇúý+é¯òoY =Þ¼þª‰¡çKMuÕ䥿€Û;SVñÚùôìí ª'Ñ9ô¤=Ï|\«:‹õଅf¯¥êúVÝ6mDRRŸ W¡Í«®Á^ 7Øuy{Âao¦<€îµ#èM|‚>‚‰ØŸ‘7ƒ‹éä– A_Þ\|¾@ÂS¨¶9œÑáU“ ¯Ø_|øz%|›à‰*¼£Yº÷8IÌ{³ò®/~Ç?9æçñ4% tµ,xÄã©uš"ûê¸Ä¢õøZAŒÙ;‹Å}E@Ý’½H§àÚÈÝœ½æZf¤[{d;ÔëÅH)HŽÕ\ aO»äi%˜¿ü¯¢¦itþ¯EdóÁ‹w+Ný|É™Îþ¨xz~Öv ß=uóÇÍÁ>ƒ{ð¤‹úÉS^çt1]èóþjÔ$]ÃÁ¥Ä'tå4(q»ð_Ë#î|¯alÚ;œ~ïOλLŰÒV°4f$¥gT˜JÿØrïå+tz“:Ö‰ó„‘Ž #þŽ„A\þ×):RŸ"ËaÝ©[_ pcþxÖ`!‡Ò*šeq±Æ!&1øh‡ðšw´˜®oÿï9D endstream endobj 286 0 obj << /Type /Page /Contents 287 0 R /Resources 285 0 R /MediaBox [0 0 595.276 841.89] /Parent 248 0 R /Annots [ 282 0 R 283 0 R 284 0 R ] >> endobj 282 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [188.635 652.421 319.818 666.369] /Subtype/Link/A<</Type/Action/S/URI/URI(http://www.smtlib.org)>> >> endobj 283 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [290.747 305.543 434.732 318.494] /Subtype/Link/A<</Type/Action/S/URI/URI(http://alt-ergo.lri.fr/)>> >> endobj 284 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [246.299 116.084 451.305 129.036] /Subtype/Link/A<</Type/Action/S/URI/URI(http://www.cs.nyu.edu/acsys/cvc3/)>> >> endobj 288 0 obj << /D [286 0 R /XYZ 63.78 803.424 null] >> endobj 10 0 obj << /D [286 0 R /XYZ 63.78 781.755 null] >> endobj 14 0 obj << /D [286 0 R /XYZ 63.78 717.295 null] >> endobj 18 0 obj << /D [286 0 R /XYZ 63.78 384.53 null] >> endobj 22 0 obj << /D [286 0 R /XYZ 63.78 195.072 null] >> endobj 285 0 obj << /Font << /F30 246 0 R /F18 244 0 R /F49 281 0 R /F29 245 0 R /F37 289 0 R >> /ProcSet [ /PDF /Text ] >> endobj 295 0 obj << /Length 2324 /Filter /FlateDecode >> stream xÚXK“ÛÆ¾ëW°rÂV ƒ—oŽ"'RœÄU»v*‰rÃåxA€Á�ân~½ûà¢lY‡]æÕ=Ýßt=áæqnþü&”ß?>¼Ù~›( Ê47ûM–y±É³2(6õæ?ÞO¶ºþ΋ثtËÆ¶OÜ:üM<}÷߇ÛïTy½YÆA˜(ÚéÝç»öà‰K©q¤I4ML&!»^÷/ü¡Ûz¡Câ #ÚÔ½…­sϬê œH}­Ÿ­æ†Ëýö‡w}Û ¦oõ ÒQU,6~\&A &ð#(•ñÖt„$Î<ÛâBZÝ=ñ ¨Õ˜Y$aù¥çˆ¯Î[º,èë¦kIŠŠ¼gSƒÞ5ÒÕµëÌÀ‰³ÝqðÁ3k‚abšÍî»þ¨–þ)ŒTc\ F)’@å °s˜,q–¤Ê;h‡ÔÛEAUt4~ ³qƒ©ùól‡/�,pW£˜À½iŒvæ-|eà•8ˆ©y5¸¬}äû¾;òô¿w³3Ìñ..&á=OŒÃ° Öì£àq¬¾Ê1 Ö:6ŠRe§RR1¶}l-!Ê ˆ1<kHÛ3êIfx!W0ú/[ÇMâ+Ÿæ)X>Tù÷š¶ùæJYØíS˜†G’_Ä:,:õ£÷Hòë=r„ñ¼É§0Œ×$•A\–—Ii„~ÊÑÕ'RYÝðÉïLמ<Šz¼“sê^T#í©g‚ßð²,H’åeü eÌÁ"×åî´³•\Ÿñ„º^Û÷ÏÜ^a´íZ¸­Ñò­{@æÑ ¶bÁáêÿäÈ•–—È%ßm ÁÑmÞrk8XìËBÄ(zÝÙN–uû5'ÇaÉ×à’ű Ï¢ˆy®Ìi�‘òÍd8l°á¨e†«q‰X?†–ÅKóïõØàYUcüßÂ}îÍYú÷ü‹·| ÌQy²ûÓ]–ƒ žVCT\fÎ�"iP<’'A¹5xÅpÈ™¼¡å=ð¤ha\D@ÄF¥ÇÇzj0Ù!Jði²¼é½a¬w=‡7ØyG÷ŒÛñ^DÊÛ÷rØ#½n N½“m± Ú8s½—ìÐñïA7C€' ½‡»"ò¤Û´’n–õcë&d_{Ó" KQ §Þ8ÓVò…Ž”a']¤#EcžÞ¼£CÈ—Üî&àã0›È9KZÑVÈ?Jð泦ᙳÌ`Ƕë§EýË,r‡#­ë怿‡>`$ W€Î•ém%í î$›$!¤Ø QíâË6>$Ó8éEqêÝÛ㩱û—5@ÕØ¤"¾ÓT¦Ý46æQW24KÊvo¹ÐRsëØ9‰wm‡dáeæ.ôË„Ñ÷÷ï¶5žÃ® wì³f5Ê-RÞ߈Fz5FïFû ö¦QÆž2N¡€Ã½GYUóæÕx~šÎ÷0õ´dhÙ†áŒãÓ}O½¿üÀ=0>ñó0�9À( T]nž6Së¯À™ã%H2hÏ<÷0 §o$©ËÈâªo·çó98œø ªî¸­»sÛtºvÛªÇóož¶¶­ÍspŽèÁ”ýê à5áÌ>ØHª„ÜÔL¾ j0ð)JÔŸx„„rï5® ñ¦ èLoþ èú¿êÏ÷3£$§eœ"±ñ=rÝŸùSœ “)' oHe)±dlÌLƒ&Mc¦µã =ÿJä΋`H‚ãÄb©­z»ýµð…èùæÎÏ€#~þ]H‹@Å ÿo·LÑ›`¬êÀš-\²z¬·íNHb[†öý¨?ëxû«NNÂ<HË|€ Í•÷í£BïòânG7ˆŒ™”HWó·ìÍXOåKÊµÍ ]¹ ;„ÕäÞOvÅgfš2¶õu˜ã }ŽNKÅ dé´˜³´�ÎG!@?Ìü0÷w¶Õx ^‚ÿÛÓj˜eéÅž áÉ—©w€]‰ª@ûQ¢ü¯$¹˜žêHY”ƒN¶¸ÆÊÚ}×Ê")K9( öh÷W½v#+À’‚i/Ù:dîÙÔ«°„$ZYf{—ªq0æ7èf|SY,¦µý^s‰�ø!”× z1ÃHœH¨eª9N4…*sìCÍ1jMÛ‘Ip¿™¿@ç\éƒ~²óvÂW¢’è®cif$Šˆ"ÎwãÎÇ$F¹À8 â*/¼ÛÆÎUµšØ̯´“.ª W©sDê+j:ÌžJ]L®"©öÉÎé¡>åîå‘ /`Q}ñ„�"²<»…çl[O×’³TŠBF‰~ìùv‚00çLò >¯S9ô0§q¥t¿%+á-MÉ„¦púÛe*T_¹ÜøÝÞ¯ùºV‰?l•^žDîê‡rÈe3/s]#DgzjyŨ´Cv°ú¢Ue9GŸûÞ®ŠŒ¡ ™Eê/Ï#hÛ0¿I$/h¤ rMàzK„áW“E”‚/õô ¡è(�¨«‘!NsÊéTÈò®‚š;ÄM<mz”ƒ~BaA(¬o—r °\<%_DhÀUÑE…bª$óëgìøÍg%È PYÊ冯ümLµã®¡">/B*¢ôÅ =²4&J�„YŒ·2s7b¼‚z„ƒ´ÌÓ'€¥Ì¨Q(w˃´Üx<š~MmLÎ?Σ Ð9C i˜Þ: ÓºÇήu›F´s8Z}3Õ5¸áë+±ñ$Q×…ÌÝ*�¥¤ÏÁ²s…yDA>ƒ.YÜy•ñWP¬ïGç^¸©Ù÷Lí²ìªF‡ªšo[Ê!¸€Þ^ÀÏòüB{Ìù—™g,CÑh˜Š4›QG›Ž}oæ×µko Uúyd<ÅòFT^Ø 9Õ•ØæGº™LmI—TñÊ J ð §©šÑI´æŽÅñ°Ïöø‹N2%fBü Cz¡V©`=kî{ó¿ñrÖ"öÞ.*Ò‰NH¼¼¢¥ÞAw²ƒL‘Jž8¢¾ÞÛUƒÄ‡3ȵ q†A‰…nY)ù@-æ¼xó ›h" endstream endobj 294 0 obj << /Type /Page /Contents 295 0 R /Resources 293 0 R /MediaBox [0 0 595.276 841.89] /Parent 248 0 R /Annots [ 290 0 R 297 0 R 291 0 R 292 0 R ] >> endobj 290 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [499.74 507.076 532.492 520.028] /Subtype/Link/A<</Type/Action/S/URI/URI(http://www.hpl.hp.com/downloads/crl/jtk/index.html)>> >> endobj 297 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [62.783 492.631 341.613 505.582] /Subtype/Link/A<</Type/Action/S/URI/URI(http://www.hpl.hp.com/downloads/crl/jtk/index.html)>> >> endobj 291 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [121.204 437.736 430.793 450.688] /Subtype/Link/A<</Type/Action/S/URI/URI(http://secure.ucd.ie/products/opensource/ESCJava2/)>> >> endobj 292 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [368.817 248.006 524.608 260.957] /Subtype/Link/A<</Type/Action/S/URI/URI(http://yices.csl.sri.com/)>> >> endobj 296 0 obj << /D [294 0 R /XYZ 63.78 803.424 null] >> endobj 26 0 obj << /D [294 0 R /XYZ 63.78 599.945 null] >> endobj 30 0 obj << /D [294 0 R /XYZ 63.78 312.547 null] >> endobj 293 0 obj << /Font << /F18 244 0 R /F49 281 0 R /F37 289 0 R /F30 246 0 R /F29 245 0 R >> /ProcSet [ /PDF /Text ] >> endobj 301 0 obj << /Length 1827 /Filter /FlateDecode >> stream xÚ¥XI—Û6 ¾çWø(¿V´DŠZzkó’6ÝÍtúºh‰¶ÕH¢+J™™þúé-jš¦'C ‚X>€NVûU²úòY⿸{¶y™–«4e•”|u·[å‚åªÈ+V®îšÕ¯ÑÏm­í:U=´]G”ªk}œˆž‡öíš—‘­êº§øÏY k^DSû[’fº!©²žŽë~ÌtÐö¬t:5˜!îÚA«Ñ2®ÓV{=µ5±ôãqÔÖ¶f°Ÿ®¿ûn§˲œŒU—%Ud îêñ,)£Lrü^½AC4}ÍÖfGráx䵃ÂMÔ„G^˺K°uœ—"úqØ™qš5éîi]òèS ÊxæqÔ¤lÉî­ó‹>(”xÛšÙÙ "ãÌÎ"ôäдàaOE—>C¶¹±Û¯XÚ¡¶à$bŠ`_GC¡Ùy©iÒ=F?Frþ€é�ºn`ÛþWÏ£v?˜Q# 6À"O_{ƒk”ñœ% 'eߣ1eÕ£²Œ!/ËÈ;LJ—½×À¯NÀl­i½!Ê-\uÜïVHl´V·{G+.ß!¬ÑWûÁÉúX^å.0(­hóN?Ð÷=J=·ôñ["“ÁL$Ò.ºÍ€ MÚ¢ H#«'K< ŠË‡Æ]H„bqòacÓÚil·3FLÑ@™„²E‰?fRÍ£7®NõÑ/ìM‹é„¤»à ˆŒîb+½®Ñkû¥«@@˜%·wz‘6óä‰~í\£úªá€DÅžxrŽöÛ.BŒŸ˜dºÇHBýéî*rI¾ C\ åb¦PÌ3&C¿V!àÁµƒî½~»$Cy=ê?g&Jl5û¾­'w÷«ÔÏÚÅô¾Üà õ©Îúcš± YŽŸ€œ-:4n^fÕ%j§g‰(à *Æ~TØ’ä5¾g9“y!f}¸bL‡©ïv‰d.YîvÅ<-YžÈUœIVT¾5¤¬XÇiÊeô‹X² ®- pAâ¿,ÙS‚[D0G¬ñ U<-\qÁ'€Æ¤c³‹Á¡±Ø§E$KÓ êu?-Ç%«N^2/j:®ñ5ÞjS®èà|t®×Ö£±f71lFUôZk<º)žš¬Þ¬õ ´U~ Ár™‡£Ótül³^È]XVù ð}žW×iX›~£‡¡)šÁ v3÷ ¢éÍÐl�+þÐõd7‰·Šzü;H“)–øw9O¡¡QyqY‘%á–ëñ@÷ÆPg¸u _meê«Xß¶Nd~¤Ï H&"2)ë´²~g©À¦›p‡LÈ‚«oK÷Ô4ÊЄ.퇬¿nÈñ–ºo}á]ØÆþ!ÓãþK¦ @Ø»ŽQö†KX¤k„æÚ_ÁôÅ] WĸNj>'º6ç~9ªý ”àjêHB%Àá’þòc«ì"^ù-G5Â(6w Û ƒ;šfbºÛçí ¯¬µoì|ôãàè!u@ñj„o5†1ðýx ÙE9Ù)‚MÌ8³Œ`mž…ˆ|XÌ@°Oþ>\AO´ìa”(RÀݺñ÷ÒDçmóÆë¢²X°z1`evc¥~Ôõ<)( ²¤ˆ¯èÓÎÛØCä¬ò:xaºYHÈè.„ñKŸ{ÇMKó÷©â™š†zH½ÅÛBÏI³íc'(‰ó„v÷tÎ'\ÔtÑÓ ÆS™QO+V–¶2e èrš¸ïh¯pVí:?@‹BúXq·®2LãS/¾¶ »^ÁÊì&9y–RcƒßÈçÉ­™KøM¢çŸ|B šÁ€¸x4 Œ"‹&¤ÆÙM®°ÑxE'<u“[A»ÔSºé—3c0¢îæF_c¨ }²Ç ¨€bî(î� µ,¿î%³LçÀç>Û‰¤Ã24²cÛù=äV FìiÑÆÿþÔÂ[Åiri˜çn$/ÞÉ…¨Z;ýWV!-´7™cÒœNøØè†ö°›»ðª¡Nw @ï?÷/‹,‘Ñ‹¦u®ÞÎãþ€Ï@H¦çw˜Ä \ÄÖqÈ-àÑSVb ‰0þWÑÏAò+å~¹ÆÂo .i!ÄïØ:‹$zµø ãÓ´©o|g\`áID°Å)äˆýìfì€qôj�b™ÈÜ@(}e�h7{n†¦é4Ñuzq¤+™LNHp/X™¾×p CK8XR‰2e_×›=”ë2.U¹<YÞeÙûÍ‘%LÔùÿ²gÛZ3,Y#pŒ<‰Ýs°æßœ“³¬âޚ죬ÙuúñÅ™dBR’+Šëìðý&‡¸ŸZÇ#þi ÇÁ=ွá܃Úé”¶~„¡×ÜÉjè. «ÀÎX&Ð<èy%òâîÙß(í endstream endobj 300 0 obj << /Type /Page /Contents 301 0 R /Resources 299 0 R /MediaBox [0 0 595.276 841.89] /Parent 248 0 R /Annots [ 298 0 R 303 0 R ] >> endobj 298 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [377.453 547.946 532.492 560.897] /Subtype/Link/A<</Type/Action/S/URI/URI(http://research.microsoft.com/en-us/um/redmond/projects/z3/)>> >> endobj 303 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[0 1 1] /Rect [62.783 533.5 268.288 546.451] /Subtype/Link/A<</Type/Action/S/URI/URI(http://research.microsoft.com/en-us/um/redmond/projects/z3/)>> >> endobj 302 0 obj << /D [300 0 R /XYZ 63.78 803.424 null] >> endobj 34 0 obj << /D [300 0 R /XYZ 63.78 612.487 null] >> endobj 38 0 obj << /D [300 0 R /XYZ 63.78 395.889 null] >> endobj 299 0 obj << /Font << /F18 244 0 R /F49 281 0 R /F30 246 0 R /F29 245 0 R /F37 289 0 R >> /ProcSet [ /PDF /Text ] >> endobj 306 0 obj << /Length 2544 /Filter /FlateDecode >> stream xÚ½YKܸ¾Ï¯èÛj�7Í—Dj쉽pÅ:ñÄÀÆÙƒFR÷VKIm{üë·ŠE½Úêï"È¥›¤(²Xü꫇øf¿á›Ÿ®øÙÿË۫篕ÙÁÒ8–›ÛÝÆ &…Þ˜$evs[l>DÿáÂ\ÿvû·ñ­ç¯e:šVÁ?{8âä篅ϑ’©Ø“Þk¦˜Ày›m,X"åf«bÆuJÏßôÝõVÆ*:¶eîº;2*Ê#4lT^‹¨.Ê:we˜ÖÔôßß—ÃäO×ÒDeÛ¹¦“²¶¤F•µû²z y§úcÝàÜÏ5»ÞjÁ£_ê0¯¹ëÊöÓu¬£¬w´…Œ\7l•õÔêšCéÂ7[¡™Ö ¢÷«z1@àì#¼¨â4ê›ð_’¢–Ê4Š=jóÎu¸q,t”7E¹¦Ù$fV ó]½¶&HÅAª0瘵p.öð°¶Zœ²dº'’õ†×eŽ'sÀOÖ>à¨>ß—5=ïîÝ®wõžz»¶9¬ c9Kíò€+‚ â/€žQ{+knµL™´fy­­c=[\ÃÕ'JG7Íá�§C#�U*/)”[Æý.&œñDNT¸<ëQ£©Ýe Ðž:uY5=F`÷»€wßÉQ®û¬Þ—ÍÚ5mxÒŽ®"z›’\±8ÕAq¸L×ßÃ|™:beÂ8W4ã%ÚDbÁÐvÙ©êŸAÏ\jÊD?eö`ܱ&Uî®õ¸ÁM¼!%&*êì�:©ªð rõG»"L¬AGÞ6“èÍŽÆÚS]{¤a'£…:¨J*¿”ù©Ï·^œS^È*Ò”àœY¤Ÿ9p œ§�÷T²Žþî|ïÂ|Á‘8:‚Êw ôÛC÷ å“0WD÷®£çyVÓ«yvêJjÛ„:tÔËê‚.lrpûûÙžxÿ& ¯eçZ:v #:—îz@@¾ÆD¨Q¯- wu×—Y 5 n¯­Œz’Â\Ð öp<\"ýNkÆZ¦Íˆ„w·/nßÜx‡ñêöê¿WÈM|#62MY &¨„‘nòÃՇ߸¦€‡ 0SÀ ŸýÔÃF±$Åתͻ«“Zn©<¾¿V,‚Óøéç·éÛÓ*IÆ  ž""èl«R©7‰0ù}\]ÇòÑ®Áþ6q]з×^Läè¦Å`Ñ…EÞ\Ua¢Uº¼oZ¶f˜Ã-)¸¼?0–.)^q¼hpÜ‚-ç|Øê”GÿÛ¨ÏZZuà·/\×·îî„,ÂHúWl¿\âR “‰�™-SR÷‡;Ä"޾|Ý‘û”÷[à~ÆY¿ÿº¦Ò­�bâ:¬ŽîÍI*t/Í©*¨½/k°a¤Qìeô×7GjTˆ` Šº…ƒ(Tû°vÙRp€ÐHQ£Œ«(²LJ=Qy^èú¼x'²Öa7 NÖ¬E*&Gpum¾¶‘�‹ŠG‡ýlm„)1ó¢õÅeâG—Á{fgôäú'…2àÒI•««$_Ž‚£É¬]ŠdÆŒŠ.šïP’‡¬¸¬›´dñä¬.,ˆ1ê„ 4Áêð²1ÞÄ^†ŒþˆL¦4»Á¢¼€çÀˆÃSVyéÒèôö÷eKÏ—  ¬àvy>lõZv9(¹„‚ö ½Š"Ø3{ÇeRð¾[©™¨D®’ ÖÔàõôþDû™ÀG´÷nh”ÁIPo:rÜ>‚o|À^s¢Îg×…—û@häÀ`À;A3 hß³³¤Cñ³˜ö›³Ý|ºV"ÊÕgožE–™Ñ�f•à¯Ë: Ðö'ÂÆ‹·o¨Q´. Ü¡â2‡| ÏÞ*JÔeízçKƒ¨-ü§”¢u~·U°jfÓQÄ›÷7ê¯oþ¹†X î/Åþ~yÜy}á­²�,xc0à ç?ƒ“Û¹jÕÑÃŒŒçKH(†ûr}¸h:X…!RÀÝq2 d<Z²~ŽŸ!-m¯Ïòpj´s¸c‰±ðšýBP F¬¨È]fäÖ(:¶)¨ }1D"öÌÅjž"±1K‘ ¤×ÉÈaÝ¡ßBd»z?|Ë8Œè€'Ç•Ã%ù6b„PT¶»,/1ÜB×·E$$ ØÁ°ÿgAî‡Æœ 3ê-ÿ”« aŒža¨£¥‡DBYÙ¹Xf¼-¯àüÔÎeä€ì)Û…´™™ÓÈ!dpôïÜáX¹ÝÃãÆ .XL Å« "ÜÖÛ²A+ 1¹(‹õÌÜÑî.ȰêüÓ]º�ù&ìJ:-O!®Ôb£`%ߥÀËR+¿º¼ìW‰†x…‹sBSFŽ„†m¢hhxBSFœšUç„6{KÌÍŸyÛ5Xh‰ãGÙLCOöûë››Wï.Ш8I–¥ P¸d©>‹œ/sDvÂó“ܦ ûSSL¼ë@qÉ·W.}،ߒoøÍœYŒ¿Ó‹ì”J–XÈIŸ]j2¸ËÄ\"§‚×){œœøTÈ WÈ Ûnb&@·''eìHN*Ï “½ &æ)f£³S6öàÕ±^¦Ú,,Öž¨ ‚%ÿ?¨éßODà¯c¥þǤ4ÂWu)Žž dët„ŽøŒŽþˆ*„MXš ºiÝ‹ªß¾j÷Íã™N¦ü˜F@ÄêÒt2” ߢi;àã1ŽÆô›C.š.L8|ÙÖT¾ã<ª³CÙùhp lŽhÌYE=\±£x‘ÊA06‘tºPÓαæ¤Ë°ÅÛИÅÛØ›•µ°ï펋@Ó0àS`/Ånµ$½žIˆ’у<Æ—^»fqá]`~f§É²Êå‡)ë 7!|ˆ¢< ¥ Œ8$'ZÅj9S,Žâ~~ëžÍ©§QtzT ƒ'YÒ7|âS;¤.X6Tp.ý ¤ßÝ©¢±Ý A_vý¸ªÛÑ ‰øC·vT1ƲV}üçÛ÷�#& ¸¶í3¼G±n:zB…&%`dŸaZI"$Xã#:÷Ο7Ó2–ÓYm†ÓÁø¡¡ÚÅê‡ ®œ—#eŒŠ(h8$‡:Ô¡CAKG­ÿ^<Ô’÷Œƒê!_Ä7fé"êaVoîðìÃCézö&…‹ÞfÂîËÜsUëa†+ÊbÈF‡šøÛ6û⺺ՂŠÔŒH~w¼F_×~\uZ�ÒÉ·õ¤×¦úèúo²s>6s†Q«éùKdr‡Å/%¸¯>ÀߘŸ‹ð3qƒ@cðØ^cÓ$fÚŽlš«þ…Jq£¥ïiUðð‹€Ôô5êBu L^L!ڼЂ‡#MéóÙÃw¾ðF§æÉ"­f 7‹°Ç=Šfü Ø º¥à¦!yZÀgÈÛË­hQõAI»gCX7”—)¹€�×l¶šÅaÑd1åÕíÕï…‹s: endstream endobj 305 0 obj << /Type /Page /Contents 306 0 R /Resources 304 0 R /MediaBox [0 0 595.276 841.89] /Parent 248 0 R >> endobj 307 0 obj << /D [305 0 R /XYZ 63.78 803.424 null] >> endobj 308 0 obj << /D [305 0 R /XYZ 63.78 580.297 null] >> endobj 309 0 obj << /D [305 0 R /XYZ 63.78 470.661 null] >> endobj 310 0 obj << /D [305 0 R /XYZ 63.78 151.133 null] >> endobj 304 0 obj << /Font << /F37 289 0 R /F29 245 0 R /F18 244 0 R /F30 246 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 315 0 obj << /Length 2421 /Filter /FlateDecode >> stream xÚ­YM“Û¸½Ï¯Ð-TU�?¶ËN*•ªM<SIU¼{ HJÃ5E*$5öüû}ARCÍles‘ Ðht¿~Ýñ7Ç¿ùËõûááîÝg™l¤iªÍÃa“Æ"MâM¥"Ù<›¯ÞÏ~ègø#·¿<üÍ-Ý|Ý©ï}ÜJ¯+³¡jŽôùÝg•Î¥AŠ'X`$‰‚ç,w”‘Лó³/ƒºì·;­HÎÛ®¸!;…W²oåšl•ˆOvVwq’‡Çlà§<;—®¤—Ô+Êóv§b¯lвɫqöž‡-þ~§?eÙð—¾½t¹Ùz³“Q"Â(ÆC ‚ ZœG,·SR‘ÄTÈ'CïW ¤Þ—KÓ!vÛåšE’T¨0O»¯ú¶Y3ŠD„½í´sÖõeGr#ïX6e— mÇÛd ¼[¼º¥Ô¾ÚYøP—?nø!t;b΋ _Ø'‘8Kj 4Cb~‰-”=«ÚYéi†^ÖUí¥_UÀ› ÙÞ€¦ò£+h^«*aOY/|YÜòåß«æké§7}©ñø†v¡HSõ2p|À»=–Üý¯RyYÝŽÛ}¯†GžsîÚ­±á¹ã±ºÚw°Ø(&kŠW•T¡~äÔ<žÎ«ÎWB.Ty‹çm¢¼Q»¦à�Ò%é2€ª¦²zôjdT1îEÌJŠJzp~¼òËíë’W4Ù©,VÕ§}d0jö”7ÔסJÕLj¬ˆÔZ$2žB°y[bQæÆ$ÂZB&D¢�ØÃLûLŸMd©—#²Ùw ¸ªW•dÌ0ð2žÐ_ö»Iðúc_D¾CÏþRÕÅ ²H•ÓÕî;dð múlÏ lÚxl‚"©Jé¡,øKqéØMøñÛÇõCjëþw42£ÀûW…£P²á×ï�wÉy¤™˜³žsšÛ"G{Áƒc>Lí™’(Ž 3´0t_Îuuxæa‹ÏwŸƒ¥«ÃHL^üÏšñ‚ƒ—fÙ&ì’1ìÀ&»0ˆwÙûQ�ÉÁ2Ê”"7aâU0K •÷L²Ú ¿-ÿ6í@Ò{̦çOæˆô ‰•ÐÁ££2Ük¾ÂêkñKƒÇ7Œ/âÐM³?Ó£¦ykb¶ßÎôñ™ßææ™”^Q̤DŒÒaa§ß™öÚËp¾ Ë^ Ûí«ž¢Z™(£ïòÿ%ÀçÙEƒJå2Ö±–áÞÅ«ªE¾H¦ˆ›½»Á?A$Ò‰µ'õXxËv°ÈêV�+·Ó?Þ?üuÕ 0¸ï¸ðE†Çý4²·>\=ø‡KeÑ¢•ò.rü@Ú=ó�iLت¥ s<dMV?÷6/ÈÉlË8–i*ÂØÈ"×3A(üÀM¼A ð}”:ð?ÕǦIã`bÕÓ)Ûõ%J1¢Ê.õ¥d½'¨JUæË0pÕÿŽoCé£på¸ ‘à ƒ¢ÐÚ¥Iä}b/WÝ‹l-t’,Škˆ ½œ2î9 _;UœâTSÑÂ;=<V=+“g—¾´Ï \é•ü<åëõ: Ç®~Èû'P诫¸¦j$˜xg3 ue±þéPÕÃM‰úU‰±a0SîTvÇÕ^ÅœJ‡už8R3qU_®ƒq)nd-êTÏ>YQ”Å2ª­Ý×b:2Žþ`å2‹oò4öÃ%±E«ñý@ë‰Tƒ.7Ó a S@Ó«IOrÌbx°IÑN&HìéEºê¶2& ÏœçË‹ªºj@›rˆRå}|,sZûmšMáȤáU²¹™ENémÄ<N ”™·ÔŒFV¤Ê|¡w_Úb÷†®8¯Ýù›î¸á‡/ÆŽ´V‹ðÚ¼4|û¹¢$¥2D¯`çju P`Š …þyA)n¬Z¤T¡÷Ó™Šuºµj̯´½Olø. >²TxÚ¥Û4ÀÂSÕ´u{|^ÃÖ¢rŽUÀkÿM€ “ǨÕʃéNbCô+„a'ß±Îýy¡ ì¾­–�©˜ îàB¢3K=˜7dÔÔÁýØnC¼¼Ê}°ÝÔ¯qß± <>{ŸÎ�C¬m•‚rex½Ö�}]åô¾N+pV¢<€mœÕ´Êø•àgÒå‡Û}]S¹õÐR^U¦Hg4uíÉ^ð�Ûq¤Î7ôŒ^¡Æ;×¹ýzé^Ù66Œó¦îØfõŸz' ¯/T”Ú6+fUǦ…\€ùx\-u½ o•ŠP+>Ãû5³K%‚Éׯ4š{ÆUkG"˜5ƒ¤˜”v{pÅc…�AM~¬rã?Œaf¢9/~3þ�_»ìÄc´¹ {â(ïBg4:-ÄØ–…zçõ&4cBŠúíúìFró¥+xDwú$'y#í¢¥ ˆ[æ¨OùñV,¥Nýv½FAv™.?D_=½-©¿€]£ÔøÞ±âƒòEh<B£çlxd;ÓàÁvœôÌISLçÔŸM¢mÍ §+ÌC¦1‘󸹪¨d®#-O´‚1ËæÀã„ñU¾¹ªÕ9|b" È¿ …y\.r“ÍÑž«rM^ ôïåS5]$2˜ÅZnØ)é‹Hk"RŽ)BÙñ!ëAqà£ÐkWÍ«Éâa<ðÞ !Úk\+dÁ5\["é]y´Hœ¤:Bc‡ž1ö­íåjíò¥Ì¦KÒñr(ã׺ê‡åÞ-녱ž²>è\OW¥oZßO¯ÌÏû—¼¼¨DÛ^5\êCEš5ÝŽÌê,-Ê>G Åæ^¬¸Ù·ËTÍÓꢧ(ÿôDr\]p“•‘ÁòºÕZ¦«Î]8´+æðÁ¥Ã¹1®®åü_¯:|ø¨u‚«‹¡O¨«f¼QwÅÞØaÑ3߀ŽÕç*L^•ˆaˤ1…+ŽïcO½A`h‹åÝöøûéáî¿Vš¤Ú &8 ÎOw_ñ7>!; |7O™ Õ$dÝzs÷OþÏT¼‰°=m¡NÅöí›öç ¼ûœlÀò‘ŠØ“è½wZDÒÞþt&ÈêšÜjN¦øÍéâ8kø-sa?Û{x½¹ $owÈ¡i?£+˜]Ô»<4WJ‘·V.ê±C ¶Úwvÿ-“ ‚Tz+óµV'ŸÎÐÇì{æ€Éi¾çò„~"B­_FÇ|¿Ј|LùàúÚÿ¾Æßá¥}‘jÔ¡a)# ^¬²~ÞŒ G endstream endobj 314 0 obj << /Type /Page /Contents 315 0 R /Resources 313 0 R /MediaBox [0 0 595.276 841.89] /Parent 332 0 R /Annots [ 311 0 R 312 0 R ] >> endobj 311 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [385.803 497.936 402.753 510.555] /Subtype /Link /A << /S /GoTo /D (subsection.3.5) >> >> endobj 312 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [296.478 149.405 303.203 163.197] /Subtype /Link /A << /S /GoTo /D (Hfootnote.1) >> >> endobj 316 0 obj << /D [314 0 R /XYZ 63.78 803.424 null] >> endobj 317 0 obj << /D [314 0 R /XYZ 63.78 781.755 null] >> endobj 318 0 obj << /D [314 0 R /XYZ 63.78 753.073 null] >> endobj 319 0 obj << /D [314 0 R /XYZ 63.78 731.077 null] >> endobj 320 0 obj << /D [314 0 R /XYZ 63.78 712.07 null] >> endobj 321 0 obj << /D [314 0 R /XYZ 63.78 609.464 null] >> endobj 322 0 obj << /D [314 0 R /XYZ 63.78 585.475 null] >> endobj 323 0 obj << /D [314 0 R /XYZ 63.78 532.263 null] >> endobj 42 0 obj << /D [314 0 R /XYZ 63.78 481.133 null] >> endobj 46 0 obj << /D [314 0 R /XYZ 63.78 434.797 null] >> endobj 50 0 obj << /D [314 0 R /XYZ 63.78 286.754 null] >> endobj 325 0 obj << /D [314 0 R /XYZ 63.78 208.053 null] >> endobj 326 0 obj << /D [314 0 R /XYZ 63.78 184.064 null] >> endobj 329 0 obj << /D [314 0 R /XYZ 81.712 143.648 null] >> endobj 313 0 obj << /Font << /F18 244 0 R /F29 245 0 R /F49 281 0 R /F30 246 0 R /F52 324 0 R /F32 327 0 R /F7 328 0 R /F8 330 0 R /F56 331 0 R >> /ProcSet [ /PDF /Text ] >> endobj 335 0 obj << /Length 1791 /Filter /FlateDecode >> stream xÚ½XI³œ6¾ûWÌ‘©2²V$\•J%Uq*¹ä—\l0ÃÌ30añòïÓRKbàñüì8΄Z½÷×¢»ÓŽî~~BWïïž<{ÁÌŽ1’+ÅwwÇ6„½ÓYNÌîî°{™²}÷kܳ{™Êœ&¿´{n’w{®“î­}VûTð,)ìK'×¾‹ÿ«u-þ«ŠÒNŸqöÔÿßãÿS·g Nêdhê²Zq@w)—DÒ”› þD¦ñ:öç³<¿•ReDé–;ä]éW-u‡dT‡UO·i”œé —M:†(ía‹çDg|¦Ôœ¢D#ß}uݧÖ�ýˆ|E™lªàVAwL!3·5e†ÉÍ.•á,WìSƸƒZ]¥B+Ç¢t¨@7ö´·¸{A4—Hóîì}áϺ»>˪¬-íZûã¬Ë¸ã¬„ãfQpqÑïòÇ®iÀ¾õ£÷·ò.˜’ SÁ¢ „yQÿhk/TScÝžð£nWÂ~ެ–ƒ×º,šæ£Ý«½Ôvl’zÀw?µ8p1�‹.Eëvâ,DË©/.økîü¶b »l ¤ÊäÉž2ëÆn"—«à#Êšë(ë†óe <cvcX¹é|9aó*¯¢ &¬…KJ„àkž2ó •µ/åÉØá»n °±ò³g?‚Û#¾‡s75~wÙµC}¨zP„ÕÝ…mVs—¢Çcç.É—dà«©Û*œo›§¨H&ð¢6œ6{|Ì.`mîR(3ªÍ\5¦·?ß A7L¢H®µËY¹OYŽŸ KIŒ~þü»-j\•‰[û¶ÅÅSzq.'‹ùëø0k~EwkpÐ J‚££O¶(¥,Ä@®±rjÉî“|DÔ”QJ“—›>M‰aÑóÆbÓ‰¤µ<fÐï·ÎÌ [ǯ7O"|^ñŠókFäQ/Ùâßת¢I ¶T{ê—.âé¯/¥¹*4”äYc}‹ØBM¥% h›A%Ò°/fƒköÛ%‹’µˆÞFއí³5xEn¾íÙýôg«otv3m¹XÊ8#š*ðEŒ¸úý¹²ERq– OŽiò¦/J[Eíü+ªèË×ðdø]Q»ÁoÂ@µ¨Ënk»6«þRÃÌðæ4G�@ÕÛ*gׯ of C Ë"ò9nBÎ Ú䮵žM1EÎ cÙÒð‘[›ŒU„F”™NQ|!ÊTÉ¥CP¡’±¾TVH7>‡ÉHo˨ p˜fÛ9x•4Tc¾ÂKj€!ïç¶UéEÎy:BÈW�w¤zÈÕî±5sUMÍõŒÀdòº–T 2ÿB×—ª¯-ŒÓ2ƾ¶Æ99š�æàjR‹Ok’A q¤ÍĵÒ"RÈÇt�!Äõ¹kIO�èal©à»-Æ© ˆývê¸Øçt©~°XL"^ª¢õ0R&ÝqV€Ó!W”H½TaÙ]<vo+wÂèA…Ûm!/~zàb¡/ÉþŸað-Ž=Ð:™vºKÔûLEÑ«M+Û0¨eŸåäK ð‘%èôêª/zÄc)ã¬#v#sì=duM@H,rYê¡Kå*±ß¯ûÌ6 o7Íœ>·LÃôƃm›i>­´”ø`bÙm~¡ÚhÎn9âoñéØB]˜ç`Õ “YøãÚïíZÛ}ô„ÊÑÃS¿è艜«HÍwbáèfèp4L×kS‡£ŠHÞïìŽÞ¯Q†E DòÌvþÊæ“ɵpÌ ™ Q±.}ÆFcBJ÷Uj=!uÊÙ*Ô1ƒ­ØvÁ!]Ð`R=LßêS@ÿ·¿U' êô] Ë3"Í*çÍ-hó~ï¢Ð¢}ïé³²ñ(ï_ãPŸL‘Ù-|}(EJ¨7Ùÿ�WqÈ Æª¦*!È¥äɵèǺœÛÂÙo{!4àГ0gGïƒ0ÚOÛÎÙÎïìÀº ˜Y½9KpZïðh>‡B™ÝKÈ7.Ã£Ë ›­ƒVGGiÒdžUŠbU‡eØrZhhž=v¥iÆMiõáËÈG PFÌ$¡0*®VÁÚwþêíŒUäöÚ“ÃØ‚šóùàƒ p.ì<îº¹Ë ×zúÖb,ùöññïàýç\ØÝG[û/ýòÔ˜o¦uSýGÔfoo,âÅØÍ-Y†×+0ÙtÅá5Ûúi¸×³ùözŽ-Ü=‰¥”KÜÀ| dKÉir¨JÈ¥[€,ø¤þbo˜÷Ð<|(�_YE9\oóC ^òH¯ò³@t,s!£¡ËÒùª ÷>Nm9±·™®¢ôÃt‡© ¡Q·!.°f´^fq•Í)#YØÑ� ­žY¬ùéîÉ? õë¡ endstream endobj 334 0 obj << /Type /Page /Contents 335 0 R /Resources 333 0 R /MediaBox [0 0 595.276 841.89] /Parent 332 0 R >> endobj 336 0 obj << /D [334 0 R /XYZ 63.78 803.424 null] >> endobj 337 0 obj << /D [334 0 R /XYZ 63.78 781.755 null] >> endobj 338 0 obj << /D [334 0 R /XYZ 63.78 762.494 null] >> endobj 54 0 obj << /D [334 0 R /XYZ 63.78 726.006 null] >> endobj 58 0 obj << /D [334 0 R /XYZ 63.78 646.396 null] >> endobj 333 0 obj << /Font << /F18 244 0 R /F29 245 0 R /F30 246 0 R /F52 324 0 R /F37 289 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 341 0 obj << /Length 1343 /Filter /FlateDecode >> stream xÚ­WMoã6½çWè!rÑ¢(êcºénÑÚ´‰»—dŠDÛ‚õáRRvìïCIfV5A/MÑ33oÞŒ}gçøÎ¯¾y¾Ý\¬ßÓÄ¡”¤œÎfëDŒÄ‰G)IœMáÜ»×m]‹fÄn¿òXÄݪlD‡ËL ³¨ªVù¬>Dñfåq¸ý^tÂ:¹eS”+êæY/ Üy\‰ûŒëlõióaý>HO!qDRb è<cæ>á°cŽä Æ>“YÞ ‰�ÊŸ€ > egî”·ÕP7`óÀý¹êZåÃñ‚$&!8ðhHÂ0BãUÖàgA4…¼áaï$ „‚hK>ØŠˆï34qÓ˜Ã:^™úxƾEsGˆ‘GcUѯ‡¦ìG×]_6;|«îR“eg[’Cc6öæDÍšB›˜0>_Æž„jq’FÑo!‡*ÁÜU£¡ë5Y-pçòG²-ªKÜÿŠ{(Ì9iuëÊB¢>Ö_õ~´^Ãw—xä'|¨ûvöáºKi÷–ð•ö…! x´&tE)EÞ·Ò„¼ÝZ!îDoïT鹯= tG óO±@ŽÃ€ÄŒ" F V¥Œº×°s·úãÐãÚ$náf`A)!ZÛìÍ©Ð.øQ0ÒþinÞ/™ 8¡þtÐâK“WC1Ò¸™©¨«m+ñÈrÅé=~ÛµYe%ñ9îÄnW•¹�r„ŒkäSìDUtvtm£³W`þ‹¡$<¶sM‰}èÞ SßýS™Ìzc+‹g5– Ü<—å#ªˆfâpR,¨}YÙ˜r‹Üc¦æ°â!°H+:…*wP ?EÁ"È¿§ZVåse¡ì÷í`ö3£R¨Ø}±ÒqBo 19…#ÄÄ@ÜÆ~ 0¹zà*ÞDn’.Hà‚VSXÇ”ŽD;Ê6Å )Î9aQ<½Z2Ê'áßMÞ—pÙ%[ŒÐYÿ!DKØBЊÉ_Ÿu}ëw›‹.b¨f‡…œ@ÚÂ4",aN^_ÜòÞˆ–&Îg}²v‰Rõ«Ê¹»ø ;©íŽ… ´RªmÑÄÔqÿ|\Œd#ò'ü/Òá)„ Ê×bU¸˜´»9ÈÜpËLQ«!2ê5:<Bvv2«_ðÊèÝ9¾ðE׿hIà½Ét–þÃÿ$'èçÊÖS±=è>£Ž–ÆJ3Jøq*n)*=Jœ«¿hîGU ×/ÅLWûP«ÏGÝ5„<ŠxÑö5„¡UÂÚM‘xð¹?»Ñ [ôu^“åÜ÷à6ë‡nöØž”+ª†PkÀ_PAxÒS[c¶…” ½ÔèÎ…5]ÖÞ±ÛŽËzj�ÜŸØ'òoí{žÙ7…ÁnÉË[…±Û‘u&FÑ3Œ QQ[5@˜&”‚´^[Ne¡d^¨™rv^ÐérÛ¹?Gì$2ëGJ+MO#ËóAÊI©Çie¯§À£™TÔäÜ¥� þ ùÌÙ…ÇK`Í”ÌS§œ»ß_3á°y¡Kq`U‹n€9UOi¾?,êÅ>ëp!¾ô¢™á¾ût75õ©«¿Gyáz‘û[ô”�Þ3üJña¦ØÏ•ÚJ¯S³º"ïI·Üj0XOq«æŽ«Ç9È(ð;ñ#ûÆ4)Yù5íë;ÆŸ[ah$ûÓ±u6È+Æ•?”<%³<ò¾Ôµ)í»ÛÛ›Û¥ÈÃ<ÍÃk-ºNM*v—ªÚu;"~L¢ƒnÀ+n‚—Zg ¹ÿ Ú¹§ endstream endobj 340 0 obj << /Type /Page /Contents 341 0 R /Resources 339 0 R /MediaBox [0 0 595.276 841.89] /Parent 332 0 R >> endobj 342 0 obj << /D [340 0 R /XYZ 63.78 803.424 null] >> endobj 62 0 obj << /D [340 0 R /XYZ 63.78 638.367 null] >> endobj 343 0 obj << /D [340 0 R /XYZ 63.78 562.103 null] >> endobj 344 0 obj << /D [340 0 R /XYZ 63.78 537.695 null] >> endobj 345 0 obj << /D [340 0 R /XYZ 63.78 513.286 null] >> endobj 346 0 obj << /D [340 0 R /XYZ 63.78 488.546 null] >> endobj 347 0 obj << /D [340 0 R /XYZ 63.78 464.469 null] >> endobj 348 0 obj << /D [340 0 R /XYZ 63.78 440.061 null] >> endobj 349 0 obj << /D [340 0 R /XYZ 63.78 415.653 null] >> endobj 350 0 obj << /D [340 0 R /XYZ 63.78 390.58 null] >> endobj 351 0 obj << /D [340 0 R /XYZ 63.78 366.171 null] >> endobj 352 0 obj << /D [340 0 R /XYZ 63.78 341.763 null] >> endobj 353 0 obj << /D [340 0 R /XYZ 63.78 318.019 null] >> endobj 354 0 obj << /D [340 0 R /XYZ 63.78 293.61 null] >> endobj 66 0 obj << /D [340 0 R /XYZ 63.78 257.454 null] >> endobj 355 0 obj << /D [340 0 R /XYZ 63.78 169.069 null] >> endobj 356 0 obj << /D [340 0 R /XYZ 63.78 142.336 null] >> endobj 339 0 obj << /Font << /F18 244 0 R /F29 245 0 R /F30 246 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 359 0 obj << /Length 1133 /Filter /FlateDecode >> stream xÚÅWK¤6¾Ï¯àÒâ±á¸‰²£¢Q”´f“=8´»-˜ž™Ÿòó’¢ÝKL¹\﫪ÆÁ1ÀÁÍ^­¿ì®®?‘, åœÓ`wD†p&‘æ( vûà>LPôu÷ût&¸YŽÃÛ¡ŽhþÅT„ª‹âÖö`D¯?Ñ|¡’¡„8lÕ}ùø×íçÛ'¸¼›¥(Où(X«¾—GÕÕiX6=º0a•ZšJÍhS†Î"¶éÀ.ÊhØjY9uMÊObáSj|²[Y]GÄ®n§¯Êb4ïܵölôj¿º{{|@ù»jÕÛËÇ›ÓÙÍ"|(õÉ‚8 &Éâi6FSwƒÚŠ9åˆN¹éµÔC¿Š)Å)Ê’lÓôgø04ç.â8l/ªÙò…Cò}Ù™„¢Œ-ó!Þ¤5X¹˜Õ¼ê¡䜤v/eãfa[9 {wDzé²Ví Ÿ‘OAö3R :—n+þ F‚±—ãO(ºL@¾éÉŸÆÕÊFKo°qË“ôyO ´eX¼…iþ #r”C[¤ƒàãEÆPšþ¯Œà(OVïvÆs"›s\àϽ\–Mí9‘~ç„ØäDú6N�º~|^çDò '8x¬2¼¥k©GU ºl›ï12§‚8CŒC™3¹…Žj5~™œÎúæºO' "x¢ï¥¯ãCY©ø¤ä^uH?ê-ÿLÇåbrp,‹åÞÆî“n!n©ÊF™'m]˸WgÙImsm?÷Ú=ÙìÀjn/›£Wvh;÷ Oª÷®É |,õ@{÷4zP‹˜aˆ·ü{d‚æ ”ÛõµÔï¶Mö6ÎfóY·GætîûC €ÔØé$\Ý–¿Ý—ÍÞmÂq×¥T#øÍ{ÙxM¹O£ÖUŸ×þÜþþ¤”žf‘c'käÒ•àyºà,£H�ßýl— Å„PÞýqçeçA»ç9Z–‰M�A}w'/ŘHãt*@—êøLÉÅÉD§õ„ç[fQ SKê$ ‰œxœûÔo—._E:U´Ý~Y�loö÷XT6^ÀƒÕˆž É|«<K3ãyƵn½+ Ýv^œû¶kÓ%â<”]ÙÓ¤ (¯¥¹ðƒïͦ|É®¸ÉB(=­ŸSÇqùe¶«Š)K·`±á( c †¢ø£ynêºÁZ…¹&вâÌáÆØ¹œ¡{wʆ$îŒÄ¯^÷º]Aë™yXOXöRæ=‡ÃÖÊZùùÍVøf†yøÓÁR /Ö ÿ/(ÉÇ@Œ¸ÿÜD kàÇ$ÓßWTÃ…ƒ[]~ßÈ›M$A÷{r=H>º/‡1óøÍgzÄ•—X¢\×eœæ0Àð%'.Åfñ¦0Œñi"ºßju ¢|âïHƒ­>'˜þº}uo*„9‰áÙ�SêFÖ›†eH™Îpä2…>—!ž°q[ˆü¶»újºÌ endstream endobj 358 0 obj << /Type /Page /Contents 359 0 R /Resources 357 0 R /MediaBox [0 0 595.276 841.89] /Parent 332 0 R >> endobj 360 0 obj << /D [358 0 R /XYZ 63.78 803.424 null] >> endobj 361 0 obj << /D [358 0 R /XYZ 63.78 781.755 null] >> endobj 362 0 obj << /D [358 0 R /XYZ 63.78 762.494 null] >> endobj 363 0 obj << /D [358 0 R /XYZ 63.78 737.421 null] >> endobj 364 0 obj << /D [358 0 R /XYZ 63.78 713.013 null] >> endobj 365 0 obj << /D [358 0 R /XYZ 63.78 688.604 null] >> endobj 366 0 obj << /D [358 0 R /XYZ 63.78 664.196 null] >> endobj 367 0 obj << /D [358 0 R /XYZ 63.78 639.787 null] >> endobj 368 0 obj << /D [358 0 R /XYZ 63.78 615.379 null] >> endobj 369 0 obj << /D [358 0 R /XYZ 63.78 590.97 null] >> endobj 370 0 obj << /D [358 0 R /XYZ 63.78 566.562 null] >> endobj 371 0 obj << /D [358 0 R /XYZ 63.78 542.153 null] >> endobj 70 0 obj << /D [358 0 R /XYZ 63.78 415.802 null] >> endobj 372 0 obj << /D [358 0 R /XYZ 63.78 339.538 null] >> endobj 373 0 obj << /D [358 0 R /XYZ 63.78 315.13 null] >> endobj 374 0 obj << /D [358 0 R /XYZ 63.78 290.721 null] >> endobj 74 0 obj << /D [358 0 R /XYZ 63.78 254.565 null] >> endobj 357 0 obj << /Font << /F18 244 0 R /F29 245 0 R /F52 324 0 R /F30 246 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 377 0 obj << /Length 2304 /Filter /FlateDecode >> stream xÚ¥XK“ã¶¾ï¯ÐÍTÕ K Hcgrª¶*eO샓—„$f)R!¨™Ÿntƒ µN%' @¿¾þ€xwÚÅ»¿¼‹ùûýÓ»?Ê|'¥(Ò4Ù=wF‰,ße¦ùî©Þý=íþŸOýð#Œ/æ%ZÄ&Uü¤[׌]yá™ëÓD™…™åpº]l·O²hÜTb¢ÆÑ÷æl-èy¤æ‰Íñ•$%}\ÓZK“qk’ö}_ÎM…¿©K«eÑpãñ_›jìgÝ2#LVì‰q¬Xå}žDøW!ù/hÐ_ÔîYvÁmníØ\ýi@‚§qï÷gQñ'él˜+&J(YüWFT"/Ø6(»š÷w|¦s£ÝÃHýÍ#È´™Î–Gp[û´2Ð1»ƒÔBkCÓûëØ«3»ëþ�[Ý;-8*‹ÚÆàAvrw½±ÿKÝZrLÌ.Ih§ŸŽ4å3,žGýxKo„¦.D\¤ÿChz#Ϋ®-¥‘Ê?6”ÒÂÌy±4P9pÄ^‡~J<Cx×ï9âx'É„6éÿ±)%W5§®0jŽ{¥¤ˆa•UàO1.U4–_0À¬£®6ˆº¡¿±„¶Á`WÒD—ÒÏxå1t—ÔœŠ¹OEè’ `¼º pË¿t¶²Î•Ãë>—‘€ Rqô‹µô#Ú‡g}©ålåÕä`Y†åÑë"/‹j몡YX¥?~ÃÑRÆÂ$2XóSùÅ›öQVJðÌñ\r {gÞà nn„è*84ÝžaSÆŽNÐú*Þ&JÊ:‰p¬�ž´‰dDV"ݤLÒèã×òг1úXd‰ºƒñ;ZÁøsµµ’„½cæÔÍ`1J8¹Ï%G˜»}ö‰Æ› úä ×_8,œô¿ G,=“E¿bÔüà8Xu,s«<g^…àcKK�¹\ÍJ~øùãŸþüé£!–6ôÍk³)Ó§ãol5p>u GÊUÙT`–ã³MžÅ�oÄÙ]¥yèOÇ´qÌ$Jl¹Å$KŒq$)éó‰Óšåe/ÇSƒÕ® óIohÀö¡5t&„€¶××Hv�âÊ?P›] ­¦£ïÖÚV÷ 3ò¯Oâô›±˜AƽÕê5@` Z¯ã$(ƒÍ[õ©w-ÇÑOƒâôinB©µTÐð5h€5Gª’gÛÒ5ˆaØ.[×S‹rÁN»×þø`eò¥‹…s¶sË î;–¡/uf»q‡üòD~)f¯æ8L²òzm†þ:4åhßnZõ€ôúÐ6ÝÂXFP&ûʉ Tß Ú–NÈâM7PÑGAØ Û^-jŽü;W+ß>—¥á×0øt ³8ç°Ù#ÀÆ1ì€ dÍÙö™JšØÒe+c‘ËE*~øVAÈ2@Üô tÔv,›6 A‘‹T¥k~ó³=Z¨ˆzBWµW":=¦s†7Gþ3¬Øù‚N‚ãÐ_¨…q´GxÚTŠ‚Œ'0¼€2›à’‹ÌL³|ê< 8Ðã´žìŽ"Î’S¥×”U{«-Ÿ²¡z ªK‘›;69§ó„Ç0qépóa°Ç­Ã› 8ýä3Ÿ yô÷®ùºÉë !³Eu;n.™�§TSMðɯ²˜é¨¥_`2I\$t ±þó…DL ÕÙ—öõp² ùOr<ØwàYÁë¹*Ñk³‘©Ó"àp¹òÂ-_œ—CÅÄGÅÞ�­¤RâÙËHêÁÀ‹™cnòäBj"E‡pýmö ¨¯fîᙪ™¬ˆgèIÂ!ļRL­ €ëãÏqNa ß@À|º$ƒ áY¾"¡deìxJ1Oi¡°ô¥i[jÍî†ÎLX}ן6Îùç8½R©Èó #*÷ ‡ýצ} \Í&CÞÆ¦…1Ç^l8ØKPÏFVÊáf£îít¶ó=˶ÌE'ZЖ`QOt˜˜ µÖ7½“epš¸— +•,ö°?‘¾p3¤h_¬4UËõ­¿*ÛMZßóaÃ|øo{%ÁßHÍJÂÒ,…u®>}¦W7œVöÄ Š³æ5¥Xý²ûý ‹8úÍ{]Üs>ùê—Ë&±LRQÌ´Îõ\Ž|õK4¢J×cíMVLÃ%œú²…�MtŠ{%aÇ:Ž`7ß#Šãƒz#D(•ß-ÛKïx—¦«=„{¥m_i�#d¹>ŒÒ\q~E~I‘Ben±¢c¶Ä’J¡}Öà(æ3~O=å]Hæk8|ýã þ[^HD\[Tl°õ¥ñüæa~û @Pðžzüv -ËY6kÙ’€è‘ÛªùÁä:6srcgeÿÎó¿ðpô£ç±qœ z<Êù ¯µ<´ï/MGXOR2cÛç&Î!”"Q9’ìÔ³¤ßÒae 4fdžU>fàãÎ�q˜¾Øñóñöéy*r¨}¨ļð/p�#v‡²ÆŒôq‚][ß*ž9fãxÁ6ïÃχ©§£k‡ÇÖ ¡oÇØPG /E<BOû8†õåSÄ­oâäÇ»¤Æs)Þ+á¼1g~"îÀbçüË$z7æôm¢ƒ‡d1 .` ¥È±´”áð(¢üÆÝ6`4ÜëÂ%ÆGClYÕ\ØlWOÏ„Ë=(Ú@0™rªmœù¬Ášò£yUýûV2@ûBÂʯMA&¡Ó$úþ•„µ=–·vD÷+5_1”¿9þ‘>[§Á‡ªG;¯¶¥6Ó`•„òû6©L)³¤VoئÙL÷êæùÒ×~Ýd›)ÐùÕ#âæSïòMá:Xd4V̤0ók¯{eò -b],™ñƒˆ4óÅäþ⬎~âm˜ãC«*q!UD%Q�À’i‘J½¾´3‡ÐÒL^Ñ2%ŠŠÂÕ#¢ªâË#Î:–n¤™Õñba’žÀøJâç–å„%~¯¯ôþNK ö|Ëvf$ªçKöšyÕîžîfT‘p¿ËÐ.±ñwâr5éãÓ»ÿ�£¦‡ endstream endobj 376 0 obj << /Type /Page /Contents 377 0 R /Resources 375 0 R /MediaBox [0 0 595.276 841.89] /Parent 332 0 R >> endobj 378 0 obj << /D [376 0 R /XYZ 63.78 803.424 null] >> endobj 78 0 obj << /D [376 0 R /XYZ 63.78 643.847 null] >> endobj 82 0 obj << /D [376 0 R /XYZ 63.78 370.331 null] >> endobj 379 0 obj << /D [376 0 R /XYZ 63.78 332.655 null] >> endobj 380 0 obj << /D [376 0 R /XYZ 63.78 250.928 null] >> endobj 375 0 obj << /Font << /F18 244 0 R /F52 324 0 R /F29 245 0 R /F30 246 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 383 0 obj << /Length 2151 /Filter /FlateDecode >> stream xÚíYK“ÛD¾çWø(WE³š—F:pP¼*,\€ƒ°Çk,¹ôH6ÿžîé‘dÙãu[œ8dÝ3Íôãë¯{”dõ°JV_½Hüïç÷/îÞÈdÅ \«Õýn•Jf²•Is–­î·«_#µŽ9:zÝE½]ÇÒè¨*kKRsì˦îÖ¿ßs÷†g+ÎY®µÀ­’¬`™´Ñ[¿&UT´þý…‰ì¦ü-áÊnéñû²ßûçêµÈ¢¾x¤#D~z7K’ŽrGÄ´F/ÔHÁ¯¨‹ƒ é*9Ká5¿ªiƒ‡)–eòŸœµÜGÆ3>®úìæ>ïŠj+-™Òf\ÆÖ±6¹w39ïÝZëßîh¼)jòïätš¥AC+ kà,ˆ]"YªÔ*»•Jéß„lRLÏ ÷mX_‘1#äM'k19gWT]Øô„™t\Jñ—`‹ÊÁçýÐTÈ1ѰæÑos‹¾PÚYbÙ *šj9ÁJ‰ \0™¨SèT°Ëýv}[Ö„EeyôªöO)<ÁCs8ÿ–E>9§ìèàÒy£·í±µ½Ë3˜ì÷ÖÛäŒ'c°5½Úá±iTà&ðº³Æ×­Èsð ö$¹©ÐÕR{%!ò5„ç�tõ4y öSƒÜç1˜»ôy"¤­ÐL$é³›}ÿ)˹˜ wAT:ºÕ«X¤ÀŠ’ž}½Å%…Ú >ª±ˆ~!»aèì†ß‡ònýôaÌ¢ª/•µ/Ö/÷\ Ò9\ÚÖ’ãpR")â LQ™ÎŠ cxPp9+‰™2°}»¼+›¡]ê×ï‹Þœ,¹‰6Ρ:]OÒt8 ÝðÛ‘…€˜T‰èífStà‡¢ª>Ð H ë'ÓO÷ô›Q)aèìÖox¿·Ÿ…ÏVˆª÷XÔœº-öñX•›²wú�ì¨J6#$=›aj[5øø=wt8ðå÷,Q|€F1{"‰dÚÌL„`ýì6 _†¶J™Ì³åNWA–šOÞ®nêðnàNs«¤K&S¹Üokw<´e&X.'Ê ôÕÝêð‚N…©ª$›S¨ËjÎŒËPŸ¦Ätçê‡ó£>q §ö¡?x\Õœ×ÙÞ×4lì“*»1•B›7¶uŒûÆîëúè`t»™ƒÖP(zÝ\¤rzÉ 6 !´m*šx¿·­_ø bþµ[^ § �¶4Øáƒæð ¸obιÀ·£ßbx,«²h?Ðpk7UÑ'º8_¡Ð²O Í&wbóYAf@ÅöN5v$þ‚¼öŽö™QòÑ)]êBØüå_? ^g+»™v4_±­‹ÓÔŒŸc=žÁÈ{<ê²ïÆnQ,š/É’Ÿ.®Ê®Ge–,Cú dÓÂ#?cEœª(KS˜Û©6ÁFeÙ5M.24aBtPû´ObƒÜ€�:'Ë1è]ùöMŽW›ñŒc»ÖIdñ¨Ç¾ÉA^ºæç‘µ–'érúÊ ”©˜˜û[ì«”L b%rÈžÃýŽS`mâFŽp‰#œAç’Tc.A9fAí¸frÉ¥zgýGÎ’l2æ.´ã¢A-®õ¡Z²LO°ìöÍPmIcGyXåÉøñ‘zÄ‹?º¦z̦$¡Þ~[ uÌó&Í‹~_Sg“pO¢0My%1É„4’ý!¼‰ì‚ó…ß¶mÚâ@Ϝ˹k3a³/|pë\gK *ó{ÕMˆsæÀºDò55Oc œê3è#í} +l-~SyÛ?™Þ¬3áÜ/¤ˆ,yÞQ¯ä³“ð: É[#¿;R‡3W‡Špk3ñ�Ûm«àíÑ09×ó3º‡3ƨ¢ž©P¼ÂTpqÈsÝ;¼MŒ8½i$*ú~l ¿4ï’Ÿ_˜qGO tÿžû5š |q€˜ŠÉ¢ 90ƒKs:902BÑ|Zç ÷-ÔiÅR%Ï`‡¡‹ÜYñ®㲎·eû$´F‘%‘Š »§Ñœ}0 ÂCÉß8@Ú ®O ïzÅ:äÌutìîàÏÝSÜ•C¬çNÔApF©¬é·OóÅw$œÀ’@!rðy®®€Ú1·™àõ‚N¡sdivž3/O&iªª™òtÙ»vN€‹SMgþ9à %GX–dg [œ› qs§©qVbà sÝ¿ÐëÒ+˜Õœƒºs}o² BÆÍÒk÷ûÒ» ›³ã¬Ú8  € [Kÿ Ò?‡z3]tqÎá'TüÀääzá@>…%n«´C§lr¶•¡ý9S)_nïz¤'w^I`/¿Í@_íîçc‰ÃŽ ³-ä#ªv,s®õ%qf"9Hê<äÔMÐ2tÖo¹¸œ¤“Îåý“œ/_4ãÏæ5mY"³åç9 >´ŠS>4?[K™‰€ÛnL¢§…îâŽSuãg(Ï]¡¶^21}Ö¼0Âõ…߯r^`êÓ¾èòÃVð»£fùIí¨loc‡›|=C•Û »]êÌ¥åFM‘:cÜ侦ä «(ú¼WIÓ“Š‚£¹U%ª((QEA©¾ZNtÀæ_•aÆr‚'9®Jý¥Ñ)v–É8G™L5%ÓÌ$&TS‚ gЇçúÂ?>©»ü_ÿYõ~ï!õÓëŒG¯~üÖËñò™C3hY7ÝŽ©Ç溞N¬3^m÷%:#ü&FËü—˜`ân$R†ÀtIÿãèÙq4ÑÙ0q¥Tÿ!š¦O57 ´J3¦دUýçr.K¾¼ñ7€�eP endstream endobj 382 0 obj << /Type /Page /Contents 383 0 R /Resources 381 0 R /MediaBox [0 0 595.276 841.89] /Parent 332 0 R >> endobj 384 0 obj << /D [382 0 R /XYZ 63.78 803.424 null] >> endobj 86 0 obj << /D [382 0 R /XYZ 63.78 781.755 null] >> endobj 90 0 obj << /D [382 0 R /XYZ 63.78 588.67 null] >> endobj 381 0 obj << /Font << /F30 246 0 R /F18 244 0 R /F29 245 0 R /F58 385 0 R /F52 324 0 R >> /ProcSet [ /PDF /Text ] >> endobj 389 0 obj << /Length 2111 /Filter /FlateDecode >> stream xÚXI³¤6¾÷¯¨#/¨áÃÌ´Ûá‰Y"Üoìƒ=TUŒ)¨`iûýûÉEb©ÇÛúR•JRR*óËEЧCtøá]tóÿ×ûwï?ÆùAJ‘'I|¸?ŒivHM.²Ã}uø5û©±Ãwwÿ½ÿûû ³’ÕJDFÁJ$Hr¿ER7…çMÞ”ÙzV(a‹<‡-´6<ùã]]ª, ì—»8 lÿ€C\ûîÔþ6µõø yô¶¨X¢nùc1ýY7uá'’FüÅéE춸؊Ù0qç\*–Bæò‰ƒÝžGåBE³Ä]hdüsjÆúêwäÉÎ>„ÊDBçz{þ²À#Ä&ø|ÆY`y0\a–(k\”î4Ôí É4¸Ü°ßŠtÛzS)¢,óʱ/÷¡#‘ÉùÝu¬»v7~Ü®Æ Ìd$„Q¹Û¢²-ïþQçpj+[6EoïdP…u5< 8±ñÖIxÑÿ ¶‡…Ñ-Y!W³sôæ¥[ˆ¡»Ø±¾xþŶˆ*<Ø£Æs12U“á¯k†üá8µ%…‡EëÔ(»6$¿F[‡cÁûÁ­s·Rh"ÚÎqÈ<`œφ Ä3Or¢?üƒ c¤/´"ÿah0õ™aC0Á1DqJ; ¶ˆf‘ þÕõ—¢iö´ÿ¹.G\=‰ ¨>ÃaÁh2˜±^¶ )þc’HœÁàÄóÛ–ÝÄF±ýàå6*–t€zy¿�tXf<×Þ’ŒÒoðˆ©3U<½ŽoÉÖˆ•U›ÍI¾x vpÌh±ï/-s‹Ðcí‚LÖÆ-4ìãú˜¾Bp@ˆJ„Çã¥nêî÷T&9ÐrI3φ¦Ì ФMŸÉPøÕ¡ç´ü‰“¬2s®Ün5 7鬖ÛâÑÊ™0±ñRC}¹6KJƒµFCþé%šaÐMãu™þŒBL“‘ø´¬Õ3§/àcïÅŠ]­ã,yÏj)OOª=îû?‹KÝÚÞ¥q01‰mÜøsÄó9^J¢hóÛzêŠf·â*#r9ƒàô|M…½6‰ôßmƒŒcÌ[C]‘�°¸!ó)d¦ þº,âà¨ôŽZ “è&· ©dUµÒÁ=Æ2mäãEq¼¯vÙâ²â/”ãðÓ4Xæp^A–ORëúÉ-Ù9¡‚½%5X#»i:0{5®Ôú¾ö‡ Ùµ<(öÜ #(—fÆ3Î ±«Ø³BbDÍ.+úÓä+oÌ•ˆ¶ÛCj.…Z€Š;íÖo¥…1ñ¶~ó²l™48a¶‚΢}JÆÑæ‘�—²yEX¾¢õ˶^`LjhšLj)Úzª¹øù·(‰¨øyhêÜ„Y"™5c7óm¦öôƘL—¶¨Üõ 7[wuYè…ëw›>¨æy[�ã.Ò@ € Šî"ŠÍÖÕf…êÔùÎxT‡jD%÷‡7�~K®™‹Âïõ•Ýûо,ÙjýÅLœÆÁµf-Þã1ØÁ5«…eÎè&S*ÇÉÛrDÃ!» ®ÞèGL2sé…½õQ´v{ž¸/áßjÞ—'œmsÅ`§‘@M¸°çzÄãsn“|9É|ª hÝ×±Ÿ,S¾�œ´²<èŽü‘zSÜ©%¬€”òÁ³ÜJg·Ð™Êß�:àvÑdçÔ÷|5�Îׯt¿1Îý@8÷õHyò‘IÉ2 úâ �Òãš8d¬ €Ë>†Bâ¡`Ðß2ðh048ë€Çh@ý^@&Î-b3£Wí–‹EœÂhp×9¼À9(`]ûü|b¹eÎÖÇÿW6Ǿ»„˜vS·š˜ÊÝ*öt W™ÐÑm…ªÞ•²-õÊŸ8ÚúÓIžÝ.qD6h7§vuåö:cD2W¡ðÉ: ÷Ü4Óë¤Æ [?º‹5txûØ­üZÂ|c3Nëj[®ÕKO5q*tžnz£$ø`Üå·eÛ£‘Ôc’t£ØböÃKi:zƒd<Ùgn‘qz"ÜÁÄ`̯!ørˆ<ü»ðàf ù•�A% ܃6h‘ô¶Àïˆ7r\|"ù~á0åÃóÑâ+"¯W3wZ q†oh|ŸuŽ6`µG9y잎Ú$j¹q½2jc#do}ôiì A+èM*ÌÉÎ8ö1$á/rOÈX…r”¯L˜r(ƒDq„ÕFoØ*à &KÓ×vÎféœk§EÈp ºšk­b”ªݹãÊ•ÐéMÛ Y—³;­Åùj’cº¤>gà¦F–X’ÒÛ{.|E9Ö_,¹8‹Óþ몔«¡Üó>Î$œr›˜ÜÔ°8‰ Ñ„¶8gª'йÄ7´Àf»>ò‡‚LôÀ620Ólª×Àp‘ ,4$-¯U(BA¼Kñ»{5¡¥±Oò)‚;åOÖ²è'KOntjÄAtèO&~úáÝmÝVB ¹±ÐáWòÒÑ·Ó—®·ë>ÚÐӒ˾«[4Z›ï¹›šjSÍYd°×¢/f,̯@—]Ã/…ô D{K%1¸k¾C~»ŸHd2_êÐü/ÝݲTd¹ºA[xeC¹!çÉ·v{¿ðEc—•²„ö­ç±ï‹´{Âw,¦æ¾hzW#æ’D`tô³ðZ£U0áõ…ƒÚmÊ¡Ää>Å ÖpÀXà ¡ù´ní’í³æú*õ¿i=:ߤ½¢ÓÛºâ óL¤Šm$#y;©62ßß¿û?K„{ endstream endobj 388 0 obj << /Type /Page /Contents 389 0 R /Resources 387 0 R /MediaBox [0 0 595.276 841.89] /Parent 392 0 R /Annots [ 386 0 R ] >> endobj 386 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [92.048 168.283 118.103 182.23] /Subtype /Link /A << /S /GoTo /D (subsubsection.3.3.1) >> >> endobj 390 0 obj << /D [388 0 R /XYZ 63.78 803.424 null] >> endobj 387 0 obj << /Font << /F29 245 0 R /F52 324 0 R /F18 244 0 R /F34 391 0 R >> /ProcSet [ /PDF /Text ] >> endobj 396 0 obj << /Length 1698 /Filter /FlateDecode >> stream xÚíXK“œ6¾ï¯àÈT «rpR±Ë9¹â±sprÀ ™%f`Âcmç×§¥n˜eöåò-—™Ö«»Õ¯[„ÞÞ ½×WáâÿçíÕóW"õ8g©RÂÛî¼8b:ñtœ²ÄÛÞG?0_ój(LЙÊä½)‚}“UÝæ¯íoŸç¯xrÎD¤LÄÒ ¸dRÆÈçkSo‚(NýcÖöe>TY‹cädâ7gSHuU™›é¬54Iªàh7žÊÉP—=N”$±¿¦ƒ§¥ªìú²ÞãàÏËÊ<ƒNý¢±wƒ{ÍÔ¯{PÄ~;X¦Bûʼw’a²©ñ¿¿n:ƒËã­ì*í¢[Á"ÞŠYAÏ_E¡’";Û£ñdÌ''H&6çBùÛM*ý6«»*ëK'V+¿9ZºCvsWÀ5"p©Èèqê rwÞº‘îÐk÷¿¿¾ò>ºMjæe7ÉÉܰšáß±5¸nÕ*Ál³Û¸Ý`úŽD’–ÖÌ"ò»²Îiö|Cê}ވħùO›�¸š®Ç!HY•õL…¢ìò¡ëè>Kס*1Epl¬Œ«±i³ª¢Õ™QaíØ6N>8jÝSáÒGùè-œŒ8H€Ó¢‹²º@âØn¸{n£Þ´»Ì™¦0ÊG'rmž° …ÐŒË1]AíÓþ„ Ö³­"‹+8áv‚"*ôíæ;s9à)Ð|nÒwNµ…Y­ÁN†¥Å׊¶Äy¶ d¤ü›¼W•n»Ù( ÃÁŒ³[Ó_ð˜¥R¯_`©wF‰Óq/@Ç‹Å5#=»¦!ãid &Ã0è;­dñQJ~“GwK¡ýÖ¨Rè' éÊñ*wß~¸ oy´”£5ïB1­$yƒbû¥ð,8ceQÇB–„“ûë¦6kb�k5P£û²Yr7–â#átwÄQ^Ú‚a ˆe-µ¿½.;Ü0þÙ r(w¤fM\þ, Yê‹Íˆ ¡“Nö$wo�%j€§ÞéíiÈT¼ÈB—pzžpe}(1±ª!v]t–5{‘ o�®U0I&O^<åb;ʼX Çàq<¿Ò{‹ðÂ!|×›¬ÀEp…bÍ×)gQ"Nª\„„l8á–#bÜàÿÉÃ_nÆ|…dË˾¢qF¯ î•AóW8Oë”xòt8Ÿm×á\Ê£ÐÁ@¡V„¦ñ¹›jvê–[ÿ2¡vÔ6\V°¸ë÷[$¡\öÍaM0u3Ÿ’ÁNÞN†±ôZ,HÎëöY\C@—p¿=ިюv«É%WóD0®øý¾4×a´ôµ•0J5ÿ %A µ/¤WƒÿéOš£[¸>§î*øX§³DɹkïT6árT¶oVoýE¨ï4Éb€KÚuOêó0ai*¹_ÖÔ…‡¦0ÈýCc3¾0Ïö·˜@§î¦6eû]…,J×.r ;8 ÓäíDŒ`3+`,Ù±\S…ƒõÕc/ðúï»;Ì÷òíhM!p”çæØgŸ*²äHÊõ\Zëess+ävJȦ½?v§¾â–!xã ÇÏ3 ´Ü­³– ãB>�û¡uäâRb,œiÉ4<ƒ¿«M;ôUùé¢*üÜ�$Ön¯œñ„…ç<ƒ L¥�ÛM&ÀZ‡ 2K¤áØà�‹]dU®/±³2€•B¤ð@ùjò†.`l®ÁÛí,‚°’…)K’yn_Š*ƒ¢òÞ šáس5FÐß+ù°˜zTx®7¤ðôaü„ØTÍgTÐ݃ÑVø †T1S:™s;¶5)ÐA†"WÄûò Ö,Ôò)y°öZ¹³ö=Ÿ'BIá¿£“S,ÛÉ)–#ÉÉD0;‹e;qŠåAP ä)Š¡X`¤Ë,„ï‡Hç !ﺫŠçúÿ8|Zj(y ÿž8,†ÃaÕ1€°ú<c•` Ê(ñ»æà( ]Á.ª§sü2TЮþ:ëqסéÜKÁÎ }¾ÂaÝ@Ã[ïí›1Ný7õxߌБgýiS€¾,º¸DõfaÙõ¥ûh¥„ý”Ú¹lP‚( >ØÐÿiÊëK•ž«d3b²ìÿ•}±†| ¦öØŠ™}'Sô¥ï\¾ûòI)–Ÿäöß¶ÝZ̓¤’ò¬ °Ïª~µH(ÆÃi#jìWÍžÞ;Cû1¼xG•ô6šÞ]“£OÏ ytzi úYP AÑ„nÄålÓ¯Û«ÿ�µrØc endstream endobj 395 0 obj << /Type /Page /Contents 396 0 R /Resources 394 0 R /MediaBox [0 0 595.276 841.89] /Parent 392 0 R /Annots [ 393 0 R ] >> endobj 393 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [124.458 657.645 132.303 670.265] /Subtype /Link /A << /S /GoTo /D (section.5) >> >> endobj 397 0 obj << /D [395 0 R /XYZ 63.78 803.424 null] >> endobj 94 0 obj << /D [395 0 R /XYZ 63.78 721.855 null] >> endobj 98 0 obj << /D [395 0 R /XYZ 63.78 627.467 null] >> endobj 394 0 obj << /Font << /F29 245 0 R /F18 244 0 R /F30 246 0 R /F52 324 0 R /F37 289 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 400 0 obj << /Length 2063 /Filter /FlateDecode >> stream xÚ¥XMã6½Ï¯ðme â”(Rr˜lvYäC'{Ø,µEÛBdÉ+Qé¿EIK¶ìé™�6Uü*¾ªWU$Ý6tóÏwÔÿ~ÿôîýÇŒnXN²¼à›§ý¦ÈˆTY”DmžêÍ’œäÛ”1.’Ÿ‡~›±äÅþÓÃ6ͤHê¡yiº~ôgÓôݸýïÓ¿âï?òrÃ)…pëÓMÊ%a¹_=ÝO£Nw}·k¯ç15ŸÇK‹|“‚ªy^à䟷\&Õ8Âî<KúNcãÐoYRµøQÿ‹?¦9ùQ¦÷¿G'àɨ[½3ºFñ «¿Øz Û4ÏòäûWZë}5µ~å_›é? ´Ñ·ú 2°EÛì´=¦…a~Ô²@-¥Ó’<„1 xpX…–Ì?ûá°FZ7Ãwvúû0z6K–¤”tÃg£BŸ J$c^c“õÚœ®Ø®_ã" ƒ+‹U �.’ôZ&ýۆ¶@¿â§´ÆVðey«ý"àU5¶öÎ6°³¨ ÅΦ;O›Uç'ô“¡µ8/“3WªÙ•ûUÃa:éήçÍçTßßñ´K“Š”YNKV! ~Ä߬ω¤Ñ'ªxõNŸMõÜjï&=n×tu³«ŒÆ/ïÐ2ÙMàaÒECQñëV±„à&? C~…~MÿŒQ¢xÔ{oNçµ3€CT-޹Is˜+X ø•$çÒ»Pæ²[–Ô“Eysƒ�O‚N2A„6·Ã!ÒÀl°qßÿµfÿŽD…_�dcuòK¡‡ ´©ŸT ¾ß; ˆÐi@¤½»¼®±öÒé`[† ” Sç™íÜËö¼X/ô“ä,z¸Áoa½¸bý±ÑC¤þ¾±øl$·l†óZÿMÓí,⪘Q º<&E¢ÁAó#Šoh²¦ó@�›€Ì*ïlz犮Á°a¨ºƒÆu›Ð‹ŸöXÕ€[{·ØéyýSWëa-]Yé3AH(ˆX|á¯÷ñW’ˆ\^á¿o+óµø3ü¹ÊføƒRüm—Ãß6.øÛ1×^gG\á/ÿ;ø®Àw-˜§õm¿ãIŸì:Ïn =x)0tôL¤dê‘ÿ‹@Ï @*z}(¦®ùߤÿ"îÿnÌѹeÎí(O B饑ŸÐ~~c[ÒF a”i>Ê€(FB”X1)ñŠØœA”¯$&î,èEs úíŸ'ãZØúVƒxQ;ÕX›eerìGÓ¡Ž9õÌ0)ºå1C—ÛxZ;NÙ,Šæ>ŠZUržœª?Ü °á!Ђ b!K¬ÕüGØ¡+¸‰Œ.S=CBÄjÓg@Ÿ¹À.û©ÅþÆ'c_‡ÉŸ®\ÄlxÉtn챺X�{`�è ú:$ 7Vî'?í×Ð4”VÃñK‚wpãªNóWÝø·ÈUú¿o¦ÆÖ>h|ãv®’Àzˆ‡mñsõÏb{£ýtçÃÊùðêbòž JSÛœ“ÚºÔY­mKEò,Vž®ÎXÔ ‡ÅÇÁe88Ù4:Þp(ù5DVAÜûn±íDˆÓÖ-v7¾à²·¤}µÓ(=!µã ”½Cfˉä'Dlú´`)TË2XDd-¶fQYŒ­K‚Û]p¢Ó´kZøˆ1ø—3¶DÅ’„—¹¿;ÙVoýÁ6n’ÈbòYš}Ä^ô •ó1e ί xãþ¡…QÃÕ±]±žClÐk…«„Ú“fŸ]X¸ä†Q/[º¶“?ðx짶ƶŠþ8øÓUf …"†.ÏB-At Ø°ªÜ*\*…ÓÚ‘kv«,bÙßõfªŠxƒ•h…{…çýì«ÂÔxÔmû˜pó“åð/#ÀžþåEä4#ÿr¼^Ù_$L^¬ñ/G7‘| Z’¯àž|óµˆMm#¥•¸3ûænhÎfõVdëžlá¶€k>–†Ñ7üÊD,[PoYQÞZdë–µ©uÄÙ(Vzk£&H¸ÁÉ[ŠÁø.Å8eñŠ•4_¡˜=Ö®ê°ùåÎÒÍmôOH2ૺŠß¡\¨.ð“®Ó3>(`tõém…8÷7*¨&ONJتÊX|\QUFªo¤êUÀ$±*š]šÿn/õFûjƒ­Ÿš[¡$²m§µõóo)¶(« öýó4ú6^Ê2Tµ4–øK#ó‚B‰]ý*)¯¼£'-ð¨ÓIw–hhKQY”×õëhtUM°jûC³[ R9"¦‚¶ß ÿîq¤b‘¥ºT¹ä!RÙ¦ ÔÈ—Ï>’H~<™´mž×ÀÊÌÁZ1»A bÃUÀLsÙÙ5€RÂSÊ~öû+Í–à±RzyûÖÁ¶ª°[Ä#TÆ Ü@ü&öZðàø`\A�òzPà¤(b$I¡Z=…ÇÑYA+ãÕͧ¼ ûÞ#v—+èc‚¯À§28WDïÃ/úñÃj=f/Æìw](&HÉÙuö=€+=¾žÇ´²ãtúò[‚õÙLÒà³™dw}6š”oñYð 2»ã³v¯¹ÏfŠYVëÁàæ³|mûõìbqz.€IVíñëŽÆ¤‰oRyÕÁð•’R�].Q‹×]›W‚ó7™ç°Ùûo[ýcÎùrÈÅ"êñ­³)†ï¥%ƒÓe7 \:8 #JAUpFÿ:ÅÄbÌ?žÞý“d™ endstream endobj 399 0 obj << /Type /Page /Contents 400 0 R /Resources 398 0 R /MediaBox [0 0 595.276 841.89] /Parent 392 0 R >> endobj 401 0 obj << /D [399 0 R /XYZ 63.78 803.424 null] >> endobj 102 0 obj << /D [399 0 R /XYZ 63.78 781.755 null] >> endobj 398 0 obj << /Font << /F30 246 0 R /F29 245 0 R /F18 244 0 R /F52 324 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 404 0 obj << /Length 1440 /Filter /FlateDecode >> stream xÚíXM“Û6 ½çWø(טּâ—>¦ÓC›v3饙ÆMmŠLۚؒ‡’¶Ý€ dÉ–·»I/éÉ$Dà{�äh±]D‹7¯¢³ßïW¯îîE¶àœeZ‹Åj³ˆ%KÒEg,]¬Ö‹ßƒpmËnë|Zs4yÛ|»üsõÓÝ=¼0z‘ É"™‚_÷Z±ÔQPwU‹k‡ ïîy:~)Ì`»D-B®˜R1½ú‹9.C‘°þâ XÂ|G³£­qö°i`,ÚÒ ¬pö€öÚ½[ämYW³q꘥Q6çyx†*é—¶åÁ4l*ž«]ÙP8ýoט5ÚÚ?© kòÆôa›¢l0(7­7£ÓøÐá4€‘¦,ÑñÛU¸ƒX†RÄÁ|wÖ Ù²þµƒ¹uMYmq˜¹7}÷î- JzÇØM^vFÑT¡È˜ˆ"¡X¤zQ;S|ú_ÿiQ别½ ÷†vÛ_ÈݽŒ°·TñT*ŠÅ£˜^†œ üܵǨ’‰ê#âÞœ¼œÀŒ¡¶…ÞgÜ{x@¹°Ñü·«¢·"fYÜ˵kËâÓ“¢ëO3F÷õ0�+ÜSÈÓ iójÛ5YûÐðINGœÆÂ%œiÐËÍœ¤YyÊ:·yÑ:%ù÷ßî  ëmN3œ4‚hŠ%LÄs$‘[Z6º.°¸«ÊD+³,x»™ Z¨ˆiÀÃÇîëjÛcxq_e  ¥É¼|ßÔ¤¥Æì œe}Kº;Zº˜-M˪iM¾¦¥î€N§ =¯òÃe¨í×R®Eæ(×2QŽÖžrÏS®%K¹èÑûf5.YœÅWHwžû„õ6$MD:d6`Ü]l|x¢g˜£j)}…ëÇ ÓQê‘ÐÏ \tƒ¤¿ŽiŒÏ3k»Æþ‘r½ýâ!%jyQš¾–x^¢xŒpHCðdžxÅYœð6K<gR«+Ä;Ïí¢á¶£ ‰Çß â¥ð%1EâqVÔU±ï¨¸Œ_Í}=FBOQðbQòšVà|iÄŸ•P#qöR‘œÖ ™äKtÂeĤÔgBÿ´VÎPzo|dùG—rGÅú9™ê<SìêpSVÐKÓmóüxô¥v¹&íò‰va6hW8h9ÿÊÝ?°âæeˆ4ô°Ý9%Â(·†–”ÈÄÚx—ù¹;Sõ›mΞXß¾ÕÖõTX ÇˆRÂæ:„3%˜îOŒ}‚ð}ÂSëÎ�ÍÛGr·3ç/ïÀ!øšmnƇ n±g„ݱs´mè÷}²ÏåŒù‰_3·—,;Ý»«{]ÜA51GbãÈßJ ëG´`Ð*lhHîª,ÐÛ’mS[ò¯¦ùN$,K‡ àˆ ˜íšgC=¼s•°X ÍaÖ ¶š§ºj›qŸôÒ{Úog=i¦äœÆêtØ×k™?'ïö-QÂÔãwÍeS¥,ŽÄ”Ë®çò¬·Š˜ñho).>4@‡|ZKž(…1S¸ÿ$³ïp]ÚY'Ð¥ˆ¡Áª§åªàÖéì¬lRJ‘g‚ÁCH“²¢_Ørmg?Âå´i<åú&W¬8«±àÇ>z uÅÆS–ªöš¿!ÛÝR~*½­ô‹ð# j”s�zöGKzê¹Náƒ_ROÅ«³ÖŒ;‹)^–ñ¤Ïx–pä׫'Áø¶¶|(¡�aм¸ þ¶LE`ËÃÈýÝÀtnG˜]]3…¹\ÂhšÓçä ÉpÐÉ��¸Æ4åö:õÀh.+*}ðÀ·›`t5 ,›%–‹Ãl;k6HíææÒ5ÛÍ&Ûi=�éÄ;Þz,>`8¯ßÐÄßÛ Ð¨¤Œ%ôá?¾È '‹~\½ú …í endstream endobj 403 0 obj << /Type /Page /Contents 404 0 R /Resources 402 0 R /MediaBox [0 0 595.276 841.89] /Parent 392 0 R >> endobj 405 0 obj << /D [403 0 R /XYZ 63.78 803.424 null] >> endobj 106 0 obj << /D [403 0 R /XYZ 63.78 669.101 null] >> endobj 110 0 obj << /D [403 0 R /XYZ 63.78 631.426 null] >> endobj 114 0 obj << /D [403 0 R /XYZ 63.78 338.498 null] >> endobj 118 0 obj << /D [403 0 R /XYZ 63.78 184.829 null] >> endobj 402 0 obj << /Font << /F29 245 0 R /F52 324 0 R /F18 244 0 R /F30 246 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 408 0 obj << /Length 1615 /Filter /FlateDecode >> stream xÚ­XKsÛ6¾çWèHÏ„ñ"ÈC}$w:“Lí¶Ói{ )Hâ„ AÙÍ¿ï.¤D™±#§c¹X`ß>ådµ]%«÷¯’°þpûêÍ;ž­8g¹Öbu»Yå’%J­Lš³lu»^ý ;ëìU,ˆŠ>À#bÛµ#rßwWÂD÷W"‹lK¼;ä|>;óöߢ©ZÛ³«XÃçu½¹þˆ¿®jë^ÃW&_ Þ ún†'Æ\1¥RzÈdLªÃ i:sÅ£ž]&qS5ûºB£Èd-¢Û`ˆ*¸uè+눱+ŽfÇ ÅpphÑ›w"?[r@™ 0”î–Äæ>Ë¥ª–®õÕk22#^цo´!ˆ¥Ê˜Éò9e×4Áð%³tÂTÂVU÷UQÃ˹ò€/™h 3BÏL鉉 Š'ŒLâð.R r¦„¡#¿y@ú»B$…ŠºýPu-q‹ºk·D>TÃn&°ðn4ËÄY¼98—][Öî Ö§ùôÞŽôœ9Ò›fI±GÁ³Êî@8:únŠ¡ÄïÝx¢sÁBK¦¹™;bÓw Aî6ÑÇïoFôtò¢:n·!Þ‡»ºÚ#.&º94 ÄnÑWÎöp€{§jù”ïç0Å"g"Õ°‚MIÈ÷x÷©j×îìàYD†s§oùÕîIi?5†¾äÑ@ÆØùD Ò˜ßäñ4"ÅþðB¥σ‹¼Õì5¬ÊÔätðœˆÊa1¡4ãÉ”vTož*SLpÊRå…HéK‘ò!K2ŸPæxÄ“ð$‚%æ«á‘Á£Sÿ¼9<[7Týö@ðéMÈ¡øJ¼óTÔžµ€>äUwÜÃKKë„mŒ¹œF®®Ê ¤ëÏ7«p3ÀóÉ[ xËÔ0)^ŽÏ$®J–:Ë\<®Ú2vv8ì/†þÂí°”Î"ºÂ“ÞžBp%UÐ3™@?ÄèýòwÎA¯86,¤Ö¶/×Eï‹6•CO>øA`¡™?ª!6ÔjèiŸ;<:8¼ðXOªírÈÜÁ¥0Âlàl×Û)ßÆÙ¦ƒ¤ß×ŸÎæ§«ÁÙzÃ&›OzÜ5Æsž{<p-öpÁ¾¯ŠÁÛ‡Þ  äïÆ#!R;Ÿ@•E]“I^4ì?² xÐǃæpôû×D`¯ùÂß!| Á|†þÍ‹Á mNNÁ,K¨cø‡Ú~:x %ÒÛ+2}æ G»ëŽVïmƒSu[ÌJäŸfµ‡�™ØqímiatÙ"ªªÚM×7>ÐÂHŒž½ñ˜ó€)0å<DÆ`ûMá«OÈ¿°Epq2Ñ ¼ ü-^9öäô¤uÜâ°>ôÕ¢¾Šv´dN.åÍf,mÓÀ1ÔkbºnŒæ°†j»À(Æt›ÅðIŠUc)¡k^2ˆ”î>îíú€‹Ãu1Vä¸ë«mÕºo(o>¥§( ­cÑ÷œ 'ÈÓ:@à*Z|ÕAB‡“Ë-8O™ÑêkZBÊd¢Î[Bl¤‰~²›âPÁ¨`†÷‹¢~Gs?„t¢³ùûg>í #§”ø<uh«!Æéån’2›»IJrnàýDy%~¯ {Gá—¤å >Q 3¹úº6-dþÈ'*Í>ñµ”+’ºYwò.?Ÿ4e‰”箚ר7ïd²è%˜‘œ)¦$çBG¿tØŒk§é“{ÒÃP0„a\¾­í½­¿#@âDRAˆ¥r^îI_Ç\Ôù3Ó^s@z¨ëÅvjíA¢ ¿ÖQøŽÏªac¨:,’~8\0]$ЗeöÈöG>Œói†CoK?ÿI ó:ëq¤l×cut¡1aa ̶hì(ØÏÇb€œ¦!Ÿftú¤£€4I:ÚfûJûÓðÓÙL¢/PñPô-¶Ó‹•¨ ”`à_®A^ aƒÿï¹Xƒ¸Pà |Á/TáËüƒ&#ç‘t»³‹i XªõóY`X¦ù±Djõ!Ô”ÒÏ&l3EO4¥¥¥‚?(ü¶ƒ°IÌø/73¥O†é¹Nd(ƒð_p™ 9¼õTzMHH˜Ýkú¬ÜÒ£c- ÕÒy:‰íó×û‘kÀ,q¹6,W€¶–,OÂeÜÌ„ÞÞ¾ú„\ endstream endobj 407 0 obj << /Type /Page /Contents 408 0 R /Resources 406 0 R /MediaBox [0 0 595.276 841.89] /Parent 392 0 R >> endobj 409 0 obj << /D [407 0 R /XYZ 63.78 803.424 null] >> endobj 122 0 obj << /D [407 0 R /XYZ 63.78 351.078 null] >> endobj 406 0 obj << /Font << /F18 244 0 R /F29 245 0 R /F49 281 0 R /F30 246 0 R /F52 324 0 R >> /ProcSet [ /PDF /Text ] >> endobj 412 0 obj << /Length 1694 /Filter /FlateDecode >> stream xÚ­XK“œ6¾ûWLNÖT âƒãØ)çàJÅëää Í e6f¼ÿ>ÝjÁ cìµ]>ìÒ4’úõõCoö›xóë“8<¾yòìUo„â‰Êäæf·ÉžëMž\onªÍßLñl !Sö‹½÷ûºÝo£$OYw?Ô]ë¶ÿÞü6øì•,6Bð"Mýiñ&’9*œ¹Ò´CoJ{µKèË]²à2S›ÔR*£­mµd}=X.sÖØ¶G2cÕ…ZðZ·»®¿3¨1†Ž¶¸Á´•é+âvãp?ŸU>šô G\îMïlÿ]Œ £¾×qœ•W—bŽebùâÏm"Ø‹ä£P^›L�RÑ ïÚÆ:»â‚uÃÁö§z+0C£bÍÜýbÎlYÿ e«-$ƒµÎ†}Aš1}Ø×µÍ±zÛØã6M™i·pÒ@ÜÓÁ¶´²öì#þëޣǼÚj;° \!A}¯ö X-Y¹f LxšˆiaBÂŽµ ²† çóß_sÜî(4×y"›Ò¾›ƒ¥ˆÜ™º¥H™àš`)¼“¥°ÄÆ®k`AÓ¡!'ÇK¤$åE€•ÝØ¶·¾ƒAÓ?,¨ëR£%à4ïj<ÑÜÝCŒIU�aÐÕ4nR¿ àÛ÷-)0úßhûÚ~d �2(×¹@@=L&D•ÒÁœ®v€öVxT§S„^o£4)ØÁ-RšµÝ@,ÄÛ~ì·i X„±!Gˆx°8t'"0©ðYÕç]å°†'‰ˆ|OKm?ÂÓ¥ %?’% £(Fg§‡ÅhRiË‚F6èãH Ížo#•'ìÔõï#Ó‡+b¢L•KÀ²=g]$~©ÝrOx “ëá>¦E”¿´Á€ÊÆK-!Ûƒox€G9'eT˨n¡€5Ý~Í•:åi>/'±XN’ „ ªåËP^çÀ"‚jeÓµv-¤Bç\§âKC*/K ÈÃeÌmvP­y¾ÌÀ½m­•NW´Ké7)Gz¯I€ù|u‰D"xúa·†úâ3Ôw²ç§5±)Ô×lÎ\ùÙJIpD,—Žxk½Žšzyô‚UJ3((/Wq}q…çØRe²ÛÑ3扂9[vm¾RàqÞ°–á ¿¡c "³ŒÝRUÅtA%ŽXû ÑÐ;‚m0ûK�4W‘,³ `i®’”½sÖ­é¬1ŸÄ—ª|ÉS7M¡ê·¦!ŽïÉñè?uˆÐ2 Ð<¡@ •’<=–ó&=ä;!ôÑæ•.=ó²¥dƒ`Aµ`,¨wè¼RÈC,H<ŽŒÒ4#Ò[­¡mëE!ãiúõ%¾¡¬£È0ô:å`¤d‚Á¤s‘\úMÍShôS•ÄâFؤVÉs.u¾,’„-ÃĤ˜óšAçp˜{Rä8Wá`‚<_8‘Gú«>òÜÐõ¶"žÇ12?e‚ïh=›p¨¡ãLvìêÆ®ŽšÚƒ:ï ­åúl óC6c>Ú5fxüh@¬‚xåK4] ¢Ôúð µÃ'Eß5Ä�ARÃ|ÛÓ;%1~襪ÝP·å@o¡K‡ 4â<6l¥ŒbÆ¢üxäìU=o?˜yÜ ç¸q­u]”úa59ϲbò‡\…4\õâI HðkÈ(‘>Û§f€8zñö!á'1$z £ üð¯¤AÓ„¾#uª'Þ-zz±!£éQÓO%(èRxiîì*LÀ‹\æÙÒA`xò)ë”1g>*žë‹ð(¥ÃT¤°8RD°?áHçë~ð„Ä}ou7º°¾ÅäÏ‹ï .,9û^ȇ@8s>»"VÝ®ÞpR uòõýê© åGZpÚÂCËëå4F�Ù˜v?š}¸¹Îw�¸Ï1GÑß èw< r(4ºÌÂ×ÕF¬q´ø¦ì;ZÑÔç…á &ØÜP©í&š ¸(Ó¥jGp‡QLôŒ´xeÐÚä\ª°§U}}´=/W5ÕZs(ùAÎnìñ M²BÜSÅ ^©›0U²P¦�A™(ØL2dû¤Â~�"µ]Ø‚õjY6€IŠõó²Æ WSê 2ž%W7n ýƒ?ƒºýõÏ ñõ :üÀð¶†ëE½{øÚÞhœkóåÕ¹ûÞR}ö¿x ‘ßRð[3‡xž*àæ¦ycŒc®eNb„^¬zyóäiÑõ endstream endobj 411 0 obj << /Type /Page /Contents 412 0 R /Resources 410 0 R /MediaBox [0 0 595.276 841.89] /Parent 392 0 R >> endobj 413 0 obj << /D [411 0 R /XYZ 63.78 803.424 null] >> endobj 126 0 obj << /D [411 0 R /XYZ 63.78 781.755 null] >> endobj 130 0 obj << /D [411 0 R /XYZ 63.78 670.298 null] >> endobj 134 0 obj << /D [411 0 R /XYZ 63.78 230.893 null] >> endobj 410 0 obj << /Font << /F30 246 0 R /F29 245 0 R /F18 244 0 R /F49 281 0 R /F52 324 0 R /F58 385 0 R >> /ProcSet [ /PDF /Text ] >> endobj 416 0 obj << /Length 2036 /Filter /FlateDecode >> stream xÚ¥XK“ã6¾Ï¯ðžF®ZqDJ¢¤CI*³•gMí8©JvsPÛt[YrD¹=ý^õlR¹Ø$’�|�m7Ñæ_¯"ùÿb÷êÍÛ8ÚèDʼn5›Ýqcc•å›Ì*ßì›ÿ‰*¶¡Ö& ~®öÎoÃ8KƒöÒWmã·¿î¾yóVç­U‘¦t@´ c8Ã$¼ýǦvwÁmrÝ­òާþ² M¸}õßH'îðO&ÓÀ1\C“²¨MýÌ£ÎÕîi›ÂZ³…“z¦ÞN®áQEä'üiÛj˜?ò  ŸªR˜{¡|þîk…šfzóÖ÷*æ0Ãg<-¬ÛǪ¹\û»­Kë˜B›lB°x’XÞÿUS>Ô ›!clMœZ’ í‘ÿA(Ø`_ÖµgÚ±ídW¹GOÌ!ÊáBwmxз¼Vò ^;µ “È?z¹,Ï/ºT7ÖV™<}XÝ[Ûýò…‡ª[s€Ìª4Nn~ÂQàãFåº+¤Ìè>ˆs;ÁK9zl˜¡L0Ð"2+Ï4ß·;0­lL|A åÊêQ‰SåºQ“cNº¦‹6‘JŠqÞ±r´1*±£Æá±.ûÿ4¸šV©M—¾0y¼ÍéÑb[{0 xEßµ5ÀÃ1”˜‹·è5è7ž*ßW;ç d<4«ç9ǰ]½“ÝäL¸69’ÁƒÀMR«ƒ·¤ _YߥğgÒcb®Ç©#«ˆ!ì‡5û¥™ŠòÑÈŠÂiÍd±U…žøÀ{M|}äó'…M1z ¸ÏIadkÚžûýê|O°c P¦®™4öÌr«˜ŒT<qlŸ–i&R€ì}D VÔ ‹X…YŒ”~Yu›)›™AuDJÍ_*9ÖqÐN¦ïn]Õ3êÁÇ .{ªÚ«þ‘ Õ6Ø ô‘Åv—4 |‰ŽÈgx­jx‰Pç5;C(â.4ô=xjÙá6�"†JfA]‚€:¸–Ž ìÑ„u Ðþyàx">‹hæyÃx˜å˜a Ã•è-ß–5¨Ë÷Áí¯=bï=ÜCJ4 äÁLh•D€V1b(ó»ðJsØÀ|:æOÕ¾'¤.ò v½çH¶M#ŠHSAG“ÿ9üc'ÎÁW›°®¢•rÀ…'Ã)”`;DïùZ—Ž©!ÿyÚ)4qL�²kÀ%ä�|>üg!_¯sÝâ9”:‹YêH’ÑaÌ\‚8Ž‚sõxêyáT> ‘ߦ€×d±ÆzŸÛ'’õ ϼxO(€ô3¦ñÑróSYÊò T’lE™ÁWÒhîãÄnoɾ®«Ÿ?]„iª"Laà'èw…@{íÿT%.sà{ÌI\ŒÉǤzÃ[ó¡DÄwÇÿ pF@¬²Lîo•eqðÖ¾ä„<?e~„œÆwÀñ˜gp2ì´eÕ]]㺲§ ³áéæU ή@Öî#|ñ}IȬ1ðï˳ãÑ,ý0�¸U[ÄàÕ,]@)k²1M\µÝ?”)&+æùrß{Éct-+š<S&²K;Lî…xçWK«T™$Ÿ2H³šø"•Ù¹qJyNžÕfÂâZJwHí0N`³2qÆÃ¦‹ž«ç¡\ûÈŒ6RÑTíHÞzAp %ŽžYð“!•@™§ö.¦öíµé¡üð—£i¨«IEÇžÕv’¥´¦,7Pû�¡ûPž/æ3GÌÁÕ~ž\²1‘†šÓ`�ÆOê8ȹP±¯ÎŽüŒ-¸L%€3Pq DÎOÚ!4Vé(^:"ÁJœç”áOîÃI\8µv•(Ë,r°êŸÏDŽä<&g ¡#©´È)­IÙ…³)Å {Ã:&wQ©, ;î!í=¬ùSb”µScÎ){Á$‚ôåë`?>—èžÇÏ*„×«Ñ M¯ÒSaµ&YŠ[,»ÅŒË;4Úc >@[¥Ç.[GÒfÿÿé;æÍ?´kVÕ9”+ƒ@¿¬“«\E8ðV(§g-÷P÷Á`‰ËØ‚>ËÂõ2We1¤wp(Yç²B­Ég ˜™þ= ¥BŒ§ ýcqgrq¾0&x¬ž@Kv “¥*‹ôÒöWè1ÏìA—n^YØq>åê­êO/õ VÙxt(8ð 0g8c-Öš—½ ’ŽÆ¯÷p4`gª©a%þT<j·-’ +_sîÁ£2•'²sGáœ!öù}WIgðY&ŸaW:L+h&*4ø»°œ¸›‡µJ(Pœ&:ž>t•“sÙ˜Y.™+Cª!µÀpk åð»�“ÝiµÀ»»ŸsïØEÀÓyA„kspï™{B?b<…q£¶H[+®#]~‡*eçSØ<08tåQ6_JqËn èã$ƒòd ‡w\ŽÊU0|ÿn  ðù¿¿…iR?á PÅ ¥‘–LE±¼ ¿8¯†�J£Ágñ‚ïw<ðm n¹þ=$•Íí¼%+PX@XVõ”sÝ9b x]�O ­36D BÎ=øªçÌ Â˘ɢÈ,¿"&i6ÿŠ˜¤9¶u\Ò$؅ηP‘K î"[ Ïp°/;p¹3P*DjÕÈ ÝŠÞó~5yæÉ¸£âaü�€‹U³æŒÂSó§™eq°1ñ Tµ9<œ¨ª‹ÓW»W�µ“ endstream endobj 415 0 obj << /Type /Page /Contents 416 0 R /Resources 414 0 R /MediaBox [0 0 595.276 841.89] /Parent 418 0 R >> endobj 417 0 obj << /D [415 0 R /XYZ 63.78 803.424 null] >> endobj 138 0 obj << /D [415 0 R /XYZ 63.78 781.755 null] >> endobj 142 0 obj << /D [415 0 R /XYZ 63.78 372.105 null] >> endobj 146 0 obj << /D [415 0 R /XYZ 63.78 276.098 null] >> endobj 414 0 obj << /Font << /F30 246 0 R /F18 244 0 R /F29 245 0 R /F52 324 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 421 0 obj << /Length 2711 /Filter /FlateDecode >> stream xÚÕZKo举ûWô%€¼™ÖŠoÈ%ÁÎ Á¹ììAV«m!jµ-u¯g.ùí©b‘z´éîñŒ7@.nŠb±X¯Š.Vw«bõç«"üþéæêÇ¢X1™ ©ùêf»Ò"7ve´Ëíêf³ú9S9»^3ÆUöñPv›²ß\¯…QÙ‡k'³}¿£¯C_vC[š}wýËÍ_üÀ슱Ü)å©«µ�Â\Í¿í‡Ãõšëb±:X6ê‡ÆšÐõ¯¦:\3Ø‹>˾¦ñªìû¦ÞPïþ(¡";vÍa ±&t•q‹xìÜÂòëµd"ûKØðpÛ5ÃÉl8žd%¥¦sléü\geÛbÃdÛcç¹… ÔËiN_ÓAú¼/¯aôWüS"´�8<†®y�V`Ö»Ðw_÷a¸ {t{úÞO$ûv_nšî.Œs'ÆoîO“=^R‰—AìSÞÇr¯CØëáx F4ޤ¹ 3`æ}Î:‰¿HäÀÖ¨ˆ …f®/ÌäRHP ]Ai€Us²h©dдFüìûQ&«@â°= Ù”…Íž¸b?Ø•»:tîáØ[jú{õj ”ç;8ž ¡ã•gçýÍÕãƒ>°£‡L±•V:·F¬ªÝÕÏ¿« Â%äÂÙÕ“Ÿº[‰\1­võñêŸdŠÜÍwãNåZ OK› …Ûr¨Ÿo+x‘;#Þd[ÁAí\n»mPK–-€GÁ£DÀFSbS97æŒÔ„©iõ6ì[àÈ译šd 5kÞd[‰~,¶m˴и Gyx†ºxЙ¡þÝ¢iyõÜûõ` XE‹˜»ŸM]µ°zƒî ü3úIï$ÍÊÍf ÖÔÚÏ‹¾;·4ÌûX¡aÜS÷h*Ö› v=Ý×µÀí|¡EåCpHeØ náÇ¢7Ã]3 àÞ¥'Q\du~‡ì ‰ô±‹/ 6©N ìJ¹sV3\‚æ.˜}Ÿr‘[«=-cù%s¹f#käþÄxQt¾O…*h-\èl­æ¹dlq¬Sò6·n4$TOžÜ’Žà%õ焪°”™7‘ˆdà¡k!‘haÏTRŒ,ÃÑñ¢¢ `[SûÒͯ…„Ëp|©L p�_܃À¿þžÓ²BÀi­§…¾ÿŒ?aÜ‚3´o³­�—(ÝrÛ³j§ùRítT3sç‚î<§õZc�(ì·D]Äž=¹´!pÉLÆIÛM›Ô•Ëɪ˜´>4+àΣ lßzDDmï2°ñþs¹kºº§/ò¨Ð.ˆ>6 /èÉn'2 mæÆ©ÈÑû„UY˜Š­”u9ãæë/|N‚#S¯'‘2Lkc °‰‡ |ØIi‹Ü£´ÑSö&™•8±·„ ÐHx3e@_ã^æ$ $!öõ$RÆ\6’’ÑAýZ¦µŽÏý“Gï¢Ë‘±µÛuc¦ÿòî긫ûÅ`ç¶§|=:aºuʚ˅±&b%`³¬j¯Ö ¯6aÏf÷Ð6Ush›cÐñ¤—‚d@zUY:Ò{°êP÷)n”¹YœÏÇo…¶?„¬bij> Š �“:¤MwÁ–³‹ÞHº½ÑKŽFxm “~½VØkÊ[¬÷8ÜÊdBCR³©}ðö~‚ȆzUSAò÷…Fýe`c(w5M,‡¤GÔBÄ«]ä1ò±ì=ª¢õð 6 b_Ȉ‘ƒëåGUÞùáo ÇîR,*™‰K,JïµÒ²L)¾„l<ÓçM�ЊÏõ^: iÀ-Y^ÙÝ‘a*Ê5¥ô¹3ŽÏ„ï¡ 3A½}À²®~§M*ëåÇ�¥s¶©±™ÔØhÂÒØ \2XÁ†Ÿzi‘Ú–0ã?)±�=ËÒUFé¯ßéwé `Óä)Ý�^¸0—uC#žUÔqR$ßuVt×ÿ7¢[îä•Ó`åg[%‰]s€™‚'¶àßzF†ÔÆ*ØjÊ” qÖ”fJ#ð*CÁæ¡ß{Ï]Õ›#ùåY•ª¥±ýC¬ÿ@’DzXàéë!x}ˆ“ThßgyvÁ•kíï‚–ÿ¨ªcß×]U£PB•)‰9!à’]ö°nŠþûp4p±q°ŒÇäaúðØc*÷Ù'5 â@È„zÇØ×mYQôÀ°Hl†5‘BN×»VLe?ü€¿2ãÉ,@çŽ- ÷å’Í®Ä}Ë•üÔ ‡ËkÎìRz%Yû8 ƒ^ñl¶9úóã ªD°€c°gVž…nˆ&‡û¦ˆhŸm*ÐùûwÁD§ÆæŠË)©(xiq禈“ .qŠ…@¦x‰Œ¥ºÅ±+û/¨Išk ¨™ŸuÎwsâ·M¤š$ìXûµ™÷5"BCAÒC©ïj¼ì!s‡w]¶ƒ³lê/;ú­?ª¤¯jD_Ž'×¶¦mg—;ù{÷nŽC'Wc5,‚ø¬Ž} ¨ëãâ\•æ¾FÓB/»¤÷¤Áo¢õ ]‹¢¾± Œ"ž6(<¸oÇO­eÛbŽñœì¼®¿ ïewç ¡íãNënÏ“•Ó7†áâ³ß+>Ï4Ä£\Š/Ji·Î`g'õeè!­{_Ÿiy;œwë P¹^ºuà÷… WñÂU¼på/ŠZdnÓê´øŽò/±�ç¥OèéeùÔë§Ë%Ýh(åçf¿;“yM4al¨Ë ÚÛgG³œå½áÕç·Ö•?Ætr~<CÇ›?JÍg&~_Ò XÄèW!ƒÈÿ Ï;Ô)íQ¤Ò#½ýfsV}^¤ÁG(eßã®è!LXs'Jñ*Œaÿ¨-Öm±v(ô‘@­}I \r_m¿h’Z½Æ"­ÑÛmsHß8 1È¡¹µ9Wê|ÁH{bÏŠo‚åÊ~‰TñMà+—ô´ ¥ã{Øy]�Í™Jž`ÌšóìïûŵP’`xî¬K%£KåÿâãÿáÄöødLMåú©"ê_¢+€ÿã~k ©`,À…‰Š‘P[vwÇò1äI¼lñJÿf\€™‡C©F‡”2m1ýXÿX%dð9ebbÌ,OŸrfÕÜ—|)Ïç):àKt€Ÿã#… 'òœ”öbj™A2©ç(æ?$3H$žp©Xš åŸ ./"XPvãÔ �«'h�{¨”/u:¼â€Wr)_Ì9×EŸ×*øé …˜ŽÙtX%£¿·à’uÐÀqÃå˜æÒ¾Žï6³‡ÐØ‹µÕ`ú¡Xm)h›Þ}hŒµÚ¹ÌÿÖ°FÎÞkð‹žwúÈ›ô®‰­ym¿1—ˆõW>„©Ù:*äô.Ž„º­Ox|pö¦Jáf”[xüó¶ÃÔ¸u‘Í´]ùïk¢WÑ„Çcx\÷<WÓ‹¼ó¬ð–—LùRz\ºŠÿ7å|Ε໫ëQ%Æ'Œù¿+kB“Ï\¤W:{ºoæÐ/ tö?u¿-«ÓäaÅœÍ ˜ÕÚA8ßæyâÅbÄôÿ Ás endstream endobj 420 0 obj << /Type /Page /Contents 421 0 R /Resources 419 0 R /MediaBox [0 0 595.276 841.89] /Parent 418 0 R >> endobj 422 0 obj << /D [420 0 R /XYZ 63.78 803.424 null] >> endobj 150 0 obj << /D [420 0 R /XYZ 63.78 781.755 null] >> endobj 419 0 obj << /Font << /F30 246 0 R /F18 244 0 R /F37 289 0 R /F52 324 0 R /F29 245 0 R /F34 391 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 426 0 obj << /Length 1949 /Filter /FlateDecode >> stream xÚ½XKsÛ6¾ûWð(Í„ñ"È;M:íLm<½$=Ðd³¦H•¤ìøßw HE«IÓéE„–Ëž¿²ä>É’o2ÿüþöæí{™%\1©r‘Üî’\2S$&/Y‘Ün“+ÍÄ:å\èÕíZòÕËaÂïѫ ’¬{<ÖíýúÛŸß¾çEÂ9+µv³$• T(’÷{½»>—z5®Eá$Â%J…qáñ8Û±­Ç^W»Ñ†ûªšj¬»–u‹âÆŽþ cÕn«~Kÿv]¿÷ZOšˆªš†^^ÈF{Ð*4ܤTNf<T¨áÓÚkë;0ì¬Å/E¾ª‡¦¶[F.‰ ²TÎ?¹Xz¿kѓǽí«ÑnÉÇãZŠÕ 0¬"×Wwh»qªÂ§˜¾}/Ê™û…a\ù½Òð]jÛã~˜}GN”Lä*¶û7{hª è J±[Á䳺H‘äb Έö\´ »/ñ¾AbéÂh #oa3$QJ·káwu·›®Åˆ»Oý®ÕbàZÈ„ ÃeñGo–ûü€—kNtzÆ}Š�yn3Ò";<ÿçºÛl*È…Û;Dtú¬ê=ñÒäí’êwnàÑ©è )ó¹Ú×­]óUO,�ä JªÐÛ¶¯KÔXý4Ò<ímã œ»cë²k ¿Ã‘Ò&äÛ·ï!Q& #‹Œ S‚ÞNßw.»ÞÝÞüuÃ%¨B2Åe’gšAÂ'›ýÍÇ?²d /Á\&Ë"yv¬ûD2ͬšäÃͯ‘Ãra¾^„ëvqq¨’3.K¥X)¼›Ÿ =ú˜àL•&؆i²àTñ’éRÄ[pÏ$ì¬]f,/þxf ¡_-aÁ œg¬È¤Ð'ݰèÁ¤.¦NpÍnSµÔÛO{b5øfHCÕµ+>|‡•�<`Cúz ÝŽx]‚#Á·õ¨8HÀ©—8þ¨—°«=1xS²€ÎAÊBg´M½ÿÒ®¨_銒CW¬B¥¸“‚#Xá³ÆçBEδŒ*i®‡ ÊD`¡‹Â],¸ ]\kïÒm8ÞÄÝÛ×Õ0Ñ:g9?©±[RC³ÒœÊ"[2ưå9¾$Ë =òðïqisŽ–§fò)˲E/pÐP.¾$H³âìÊÅ¥dælåóƒíƒcL®y`{\”* òYý×X¹ö9 fûœÀÄöcCO/\öçeÖ�q†qHªÛ%E…ÐÎÔÕì)™ÌOaœòê»i”@—Æ…Kcü7]$ž‚ú$ŠÇÜT€¯DÉc7¸. Ï,t$áïÔT`RÛn‰‰š‹¦ŽAËûú ÇM·¦áPœ@Ÿí@xZpUÍÑ"Ãx÷ƒmìh½°Æ«`?×ÃxæŸFø«IÿÉX¦.0H'l7aÌ4Á XÔ-=[û|É>kRÇLàñý b/L üF·žñÛ9y¿ …âWà›—0ÄšpS±L‰[e�ùõ`Kd: L¥„#¯£÷p-ØL&Wì0—‹ù"çLCiþ'öçše2í¸yÁ”ÎÏõ~dž(<>–áä�Û~ó١ГöâþÍû(Ô— $wË«(‰1-c”Ä+¿ .©£ÚþÐ[š~ç{¿¡ƒ‡«i÷íÖs…F⹄©y¤Ü€í¥Œ{Ȥ¼yr8ìþ°¦“¬I£bª‘c™–yNã<ë¡Ûwýá¡Þx±]ꃉ~ÈÏsÀ\BOsåý\Ê/�}ÿóSf6ɽþš‰¢¸ þ°,ÎíA_©’‚ t¶òº>ßôôA9/Š�‰3ÐGÒ+ /3XóýIH‹Láá”ÄBVc®¡F!¯‘Üûª%\‡Ã—‚¹!òY(x4$¼~¸DÎÓ"[<ý:¦pÞÅu(\û#oïEµžý€Ù¹k™zëY+zìŽM³„‘ÑÄ_ÜÐdR’¨ý)2@ó3€›­/Îó«ž44ÂøÂG… Ÿªo^ü…NßWî†ÍÿmýýOo7]¿ý.}p“—!%™Cj+44øÚVþ®­îëg‰‰Rt0™cpbíçòR%”M^²,`3?øÁz±ìÙNBÏþ>¡Åo?Þ$| [1©ïßøk\B<“È»x„Eo‡1>nvš•®ÜÖ•!xÚmÈˆŽ“eüe#Þã5®AOƒ =}?\»%õ(þ“/qŸÓ3ç!f·]žt`°§ÞìtÁÜ%]¤ñ·N°øóèœ × –Ù ÍÅx ¤ï©@»ÆV-Ôy±ú…Ó 1|õ¹ €Þ ìí_Ç:lI€„‹¾ÚÖè9w�§Œ«~=tBB˜rÖ™!a±äœ šî¡N@ ¶nx÷ɇïî Z>¯Ã,²’+ ï !|Xð#ç�´)Ô‘R±:XpdhȾ_5ô OØô¾ìe!…çç|ÍAb"K8Mh®Žs–Á)?¾;wW.M3„"ó%ƒf¤}rf7D›Fmqèçœñ³*Üáqj Ü �³Z {`È‚xå09„k4Á#˜£ÿc$ endstream endobj 425 0 obj << /Type /Page /Contents 426 0 R /Resources 424 0 R /MediaBox [0 0 595.276 841.89] /Parent 418 0 R /Annots [ 423 0 R ] >> endobj 423 0 obj << /Type /Annot /Border[0 0 0]/H/I/C[1 0 0] /Rect [124.565 267.118 141.515 279.737] /Subtype /Link /A << /S /GoTo /D (subsection.5.7) >> >> endobj 427 0 obj << /D [425 0 R /XYZ 63.78 803.424 null] >> endobj 154 0 obj << /D [425 0 R /XYZ 63.78 781.755 null] >> endobj 158 0 obj << /D [425 0 R /XYZ 63.78 702.577 null] >> endobj 162 0 obj << /D [425 0 R /XYZ 63.78 378.152 null] >> endobj 166 0 obj << /D [425 0 R /XYZ 63.78 251.385 null] >> endobj 424 0 obj << /Font << /F30 246 0 R /F18 244 0 R /F29 245 0 R /F52 324 0 R /F34 391 0 R /F37 289 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 430 0 obj << /Length 1980 /Filter /FlateDecode >> stream xÚ­ÙnÛFðÝ_Á·P@¸å¼úÖ�m‘-ÐÆhڥɕĆ"’Jê¿ïÌÎ,-Ê´SG~÷˜ûXÅÁ.ˆƒ¯â³ï›ë«o~PE ¥(’D×Û Õ"˃,-D\×ÁatÓLÑtwk7]ÿ4Ÿü晟S…P© "i„1)ü¾+oZ»‰´JÃÑÞ–C95}'Îð,¯<"ˆâ⌄è¦ïÛÈ~Œ¦>j¶ÛÿOQBˆÞvÍÔ”m{GDU}·QYø ì0ÑbéÖÂ~<–-œ±#γ°ÜÈp¢á›~Á§o-œpÀ“;xë–-M=mýÝl=öšvúÎS[Y¾/’ÌçfÚGÍô|-½ßÛβ€ô¢‘ƒ<ìDZõ½"ã$l@*y8 ½Û« ‡ yŒcا²«œˆ`Úoé;í-¡Þ#˜t‘mGûŠXJ`ý@K0Ü{ ¦½“)b±Ã!Bž€ŸÙ-ÓÜ’™ l¤G,Þ|Y?6h%Ÿhû]S1è)5^Ó¤(ÐØÔ<à`[gÃÏRÕ¹E·ƒÝÚÁÍ?•íøl}Ĥ:ÜÛãÐŒ“£ ¦['H°Uƒ®ZÐRÛt;Zÿ¼·$aœÌ\Áx抷Kƒi(»¶lÍ󞿨'ÿœäkÆIŒ ­©êÓ&1aÙm Pr™\…ÛcWñ8EÚMŽh¤Æ)ê‡$Ä*rðýp@®Žm¾„ðÑ¡­DE¬Ã÷à ´3í¾`Öë5 *˜J™e=!ÒYÂÀÁŒ8Øú%Ì�íó+l ëY™R-,A*¶ –�¼Ü[�Ü[LfK$IŒp̳ôÆ�` cÀyÏßÙœ3®×› ðÏ– òøTÈ8EÚUV¬[‚ƒw–{K0ÞðЩ%³%à© ®uÆ€“GŒ“ĉ©‘º‘‡Q„8XDŽëfâ®)„Qlo!>)™‡m8ðÉŒ —û[DI« @pn^Ræ€-¼_Ž/ç&%“LRú³þÒѶ¶CÐWºàÔv[Û‰€n(XîK—;›þè¨+ÏŒÈUºfㄒ̉ð8ZZ(ésêyävÉl4ûç8rf³å5ãñôKÝ?,zB9 ¡:‚:8U¹Ð>£ãó“ÉækR4xNe›cÙ‚ CÆ„r9|®ÛÿfÙ ]µ’¤˜—á›a6£ôWt%³À`TÔ¹3ASõœ+’}B–ò¢Úé°`ˆ)˜$ /ñ5 ¦ùZÊø+>¶È¸÷žÔxªëç×¥¬›â“4„Ö2y.ŠD_(ïB‘¼ Íò†…YÞ…òò.DE·vbÖˆh$Dg…FÇÊ ºð)€Ë5 N9;p®5¬æßÚ"‘­O$ oŽM‹¶Wz  6Kö¶fY+ïËú±ºwöß•2Xpl`ÙêÄmˆ¬_Þ|‹a†ÜÏÅ(³Ð°QB¹Qïn7)ÆÖx& ·@5õÍZxKcQèÔmËnw,w–nÛ—# þyEÞJ^ H¹\‹·FŠ,óHÑmК¾¿¾úx…4d�ò9´OZkT‡«?þŠƒ6A+B;Ÿè!Ð"-ðX¼»úõ…VBÇÙóQ¬ôrZ'¢(-rí]oXMx«ñ̽^ã?*WO³ŸAóx!ûY.ÔK±ŸK‘Ÿ³uý ÿJ‚ÎÆ6-Pš=-cnu‰Œ¼žÊIÀ‰µÁ¿˜�\›ÌNƒô}×{;®{g,ò\{èm½ê‡‘1™È¤\Æ£óvKAø˜Êj›U2w›vgiʆ+‡Ýñ`©¦õKØ©!Ì`§ãÐÑbC 3Ãj„RÆUˆR"Í@�´A@s‹Á‡ª ‡Õà:r‚pDÒ:îKí¯¢£ÕD«Çnlv.ãì¦éÊáŽÆóí0vqÿ‰ÛåU¹úü“ìùàâ„=®¤ábW=>vçj›¶h¡M-4*‡R¼^J$˜@møÝ¿MðºùÀÈGî©C©½ˆª¼e1 ãGíB>;Xêd`ƒëŽÌü pýv‡™éÄÒN:_ÖCRl& yºÛaŸ€‹^%®ÞܸÌ_;[,ñ–ßp¸\LÃß›Šž/ÜU¾ {†R-¤ž]%*oF,S&_ ¬:Ž?´`Ð5³Ð”A¦Çiò$i¼Ñ¸:áÏn-·Ûä“‘ Ëû47J!ªÆÉ×D6hÑ„”—¡ÈµHŒ~>Š•à¨òL(Mä¤ñ¥é£§ùbu óôöµRerŠTÄ&y j]ˆL§K >–aµÃý… i*~ÚMœ §.‘±H’ QQäù‹HÑ@æM³b)Åÿ›¤Ó"å$}ïê” zŒ™#ÕßX™é$Y6aß>àK+¥ sÎÁ…= ¬†Ø‘–ÿÀDM"w/°qƒÑèŽÆs¤„1½¬â`à›j|Œé0Gâl;ô‡{RVK\d…Yé ÜñÇJ>uþð·ö,ƒM¡Mê0@¶1PKî5‘Bÿ!ÔH-]Ãä(Ã"ƒZ.L–ÿ2xª•ñíDHã_»m:ëÞržß±þ\Ž“ÿo¸—àS<îAʲ,™â~L©8ÝdsŸö endstream endobj 429 0 obj << /Type /Page /Contents 430 0 R /Resources 428 0 R /MediaBox [0 0 595.276 841.89] /Parent 418 0 R >> endobj 431 0 obj << /D [429 0 R /XYZ 63.78 803.424 null] >> endobj 170 0 obj << /D [429 0 R /XYZ 63.78 213.66 null] >> endobj 428 0 obj << /Font << /F29 245 0 R /F18 244 0 R /F49 281 0 R /F30 246 0 R >> /ProcSet [ /PDF /Text ] >> endobj 434 0 obj << /Length 1146 /Filter /FlateDecode >> stream xÚÅWÁŽã6 ½ÏWøè9Ø;vl»m·@-ÐΞÚœ˜I„:rV’“Éß—%Çö3›Ù F%RäÓ#©,‚]°~yXÌÆO>¥U$q•çið´ V˸(ƒbUÅeðÔ…‘‚­­…‰à þõâôøÏÓ¯ƒ‘Ÿ’rl!­ât•Q’ÅY¶b#?4Íc”&e($EØÀß‹$“ˆNòʶS¼Dó,¬Û–ÉNFF‰“¨[°vÈ °PA[“A?F˪ hÀ|Þƒ$·çñ.«<^æ FÂÑÒéær„è,Ì>Xibäµ&AvGŽf™®ÐaCExTpÝcöº½°HC M<ƒqêXäqLñˆÅì2„4‘îת–;°îNLùÿEË2.óüåUÚkí-ôðM3Ùëf¾ô™ Í]vj¥ê‹µ¤u4„vÌÛ†ò7 ¡FCÿFðl@j¼œhƒì0ª3˯rØò£WEBåIˆìË-E5Ï»-x¹¤ZZ††µlÜ yÁ„̘ Øxå^ ¹ãÏ#EØi-ÖDÚ:e?Yº²ã³ €Ñ,c>út`FòX°¨….¡#©!w/tr&O¶Ìß`[÷­ae¡y4+ôX€žÔ­0dë2Þ² 7W¸ì{ð:Crëxp»Š³´`·—­3U­�ÍÄÖÅÁYv#•“$“"Rùz·ˆb·’;]¬â*Yy3>ô!£¿2}±<`‰ðù[M}ê%24ľÝ]SÿàJ âuÁ‰îÔ…§t€½`‰³/Š)æí}ḠQÙY`5[ ^hãi¬y @áüÊñùˆgñìg?[’ 䋞¾:1ƒ{5¬ÖöúxáYhã6:k¼–ç–~8ž÷bCíÊ-œÞr—N™ÅŒ¢MMW•…«ôRÁù*šÄä Ç`Y´eŠîà¸-]7t¢ÁÜÓ<¹dgKùåˆÅ$ªŸEw¸ŸUöGôW;Òï@‚Š–%D}˟ξÝf{·u¼_G5[ÐHvT«, Ó ¸„î\Bo{¹±]Û%½lxÝš‘Õï镇·M5êòÿ@2vatùWTPŸ€ùJTFoœûÁ‚Ü!5¾™$Ÿ})¿†áº¯ !•Û®7ó†@™¦Ç]iÜœÜCªvýÁö8ÖzOZà¡ð•—û�v-|°(žà< ]û®rg víZŸÉìÛ98(±[—æg<Ÿ¡T|° (!ï#lˆ‚4@Mû.ß:ºù¾Wã ð)oç„íîyÝ =Ë9ô,s¡£`…çiÜu zƒØÏÙ#·ž.dY{)w¼†Å¶1 ¸æéä•Dòùo„WžHß T÷I¨$þÀV [-°(€­s`+zoò ¶£CÔºµ–e3¤q÷i\·H[ééʵ`2ÍÓ1Ò7@}q0¿Ý]PÊñO¶q9›^@Te\dôû*YÆeºdÃér²ë秇ÿ�áY�F endstream endobj 433 0 obj << /Type /Page /Contents 434 0 R /Resources 432 0 R /MediaBox [0 0 595.276 841.89] /Parent 418 0 R >> endobj 435 0 obj << /D [433 0 R /XYZ 63.78 803.424 null] >> endobj 432 0 obj << /Font << /F29 245 0 R /F18 244 0 R >> /ProcSet [ /PDF /Text ] >> endobj 438 0 obj << /Length 1036 /Filter /FlateDecode >> stream xÚÅWM“Û6 ½ï¯ÐQ>H±d}›NÒi§§Žom\‰»RF¦¼$•ÿ}’-ÙÙ‰Õíôb’0�µõž½­÷ËÖÇû‡Ÿw[/JÂ]’ÅÞþÉËva^xyV†…·¯½?ý4Ì7AÅ©ÿ»°rìòÔZ‹Í.òO¼T5M´¬zÍsñh¬•m{µù{ÿÛtæ‡ÏqéEQX¦©;pë;83Nè¸`Üà!'MtÂn"_.lEÅ¥­¸ ã,ñˆ'I22øI‰Çý޳™O È}&-MmÓÒ:nX÷­Ú ˜d-o€íÊ€+`!|3¬`ô%_¶Œe »ö@U¯À™¡²½6÷vZE÷qu¹×;׫jÐZªJr@ý«áµÁÿ'Z¾áÁÇB·ó3q uèx¸Ì{ƒ)—¬ã€†c Ý˯DM¾×Cå2ˆö­í@›Š–Õ&.üFà‰R·ÆñÉÐ_O½¦É9™r NÖœ€ ¤.`'s…LXäÀ:Û‚h0ßœÃ)9<»9Â_1ª¸ Aþk%²«IÉÈNº=á°ëš“€Ì0Ú€çõ‡ûaÿɘá02Ä6‚sš_áó±6.¾â³£Ô†³þ‘@–œür$j½&÷ÏQËoV*W.ºÖž lˆº]†=Ž®@a‘&v˜¥ ߢ§ǵwÃðœgÙ‚†˜Œ((ˆ†HC{&X ‚5ÿXaû½È–׈\\±ƒ¬Î^‡LÍñÏ.²] ¶‚(Õw_[õüÝrusÙe’å.É\Mãz8ËùµœSgʼ[|'Ö¬J»bäGöiÇ1ËhZò •5ïLޏÈo’å#9pn,”z¤.-œ1Z‘ïÔ ¨[’m¾`S‰¿ ¤âÞ,ø÷kƒO"Å›`Þ±¦£®ëÉ¢{<¹¹|ù º5©69±—�æž&™¾Ñ$ËØíÁbÇ}¤€"ÚŽ8Š¡öÅk–9§«ºŽÿ¸R“ê”5²;’äµµ É AWh¥¡¬Å³ñNƒYßäkÿnX‹ê™£wÃúÖÛ#-/`…Á “¬¨eiœ2æÖ5O®ñ¬ç0…qÄÕzS˜½b#w°Âb+ž} kúÀ:ÿ”˜•<̆ûþ¹ê™Q=j¨J¶ŠBˆ-ðLJ Ô•›ñ;ä«+†yÞVÛåëŽpG3 õ ¨ÏÀ[ï¨%”F¾ÝAU7éꘫÊçC{:®ÀíyìD%o~…\å-Œœ’øâP÷EœW›þU\³~hðEvõÇÌDü\Pdñ 8és»¿õ&XöÏñá¸�«[\�1ráÆµ{QY„yyAº 󈼎“™Î§ýÃ?ŒñV endstream endobj 437 0 obj << /Type /Page /Contents 438 0 R /Resources 436 0 R /MediaBox [0 0 595.276 841.89] /Parent 418 0 R >> endobj 439 0 obj << /D [437 0 R /XYZ 63.78 803.424 null] >> endobj 174 0 obj << /D [437 0 R /XYZ 63.78 781.755 null] >> endobj 436 0 obj << /Font << /F30 246 0 R /F29 245 0 R /F18 244 0 R >> /ProcSet [ /PDF /Text ] >> endobj 442 0 obj << /Length 1494 /Filter /FlateDecode >> stream xÚµXKsã6 ¾çWè(O#®H½w¦‡ngÓi\vº=0m³•%/%åñï ”lÉJv“L/1 ðø�%ôv^èýv.~?Ý^}¸…Ç9+’Dx·[/X–{YZ°Ü»­¼¿ü@Þu½‘eU¶¦ :U«²ï‚áXm¸/{Õmþ¾ýc²ùá†ççEÁD{Y§dó×½lvjDYâ>è^ßoDî;Dë¯!UmbŸŽnMGïds©”øxuÜ`®çhc‹‚ÌWFžvëf¢¾u»|Îý^™£Q½ª0(hæ÷vhÊ^·MÇ!Ï1 Ƙì ŸÃ±:¾º?Õ±–%†%ÒPzÿäâìHö û=iRxÙYxVÜ£lÚd£úp…œÅé<˜8e‚äEÂòMÀ¹HüOºw—áÂx^ ãñ,wºÚã{‰Ã³lÊl6E…vgq’æEJ€Ìá«5¼@<%�=R²r %nÚK H™`–C ¹«;Ýì}ù¨ÛC÷ŠT*Ö0»—õ ª@}û »ß$±o 5õmµ&@I«¥j›# Eø&0qQ­ß Ú÷ ˜¿€¢nú Vï�’ó Aä€L& A6b²Sf,°ÙE–‚Ìe)¬.IËŠOÀ®ÀH0#n•cŽM\�›°poÁVÕú`qE– àÌ úw¤¨�_ ‹¢;ä\,J\�%PøRÁ aÆõ ÷ÕÐ/Œ£Âlæ_k‘‚ïÔ HgCŠ,5Bg%>×ȘˆŠQããÇ5#Ëó|Ták®�'Ѩ±[³X&“#Ýpg¨å^|Å›ø•rÏM… €‰DaD=¿`w(RHÂ=d†"Tñy…a÷²£ww$Un«Qد›‰P¤©õh¦5$lÚžm‚8Ë_æ(­Ò;÷mºIªŽÕ:ØÚ£°šA¹„Kc+ëεر…fp™T6£ÿv™í²píòöª×%uÍN޵F<JùêÖi‹ìuõöꆤ”Š"ŸÈ~P*0IœÁU<€ûn‡¬kZ�Ü<ÔÒ½‡Àrv3-dKgG“4òÛíÙî•*‰Šˆ‰|Jàǵç!Ë`—SùyÍ ?Yù÷+ù© §ÊŸ™ b‘³0Îæ>®×]‘EKŸfµË‹O,ò5 Wiî¬È_tFôdòû*Q­‰8&ßÃ1bTÑ2‚€D'Ýx–ƒ"eE:a¾­ê5ƒ"bÜ£Òy*¡iªr<‚f £å]my¨ )`í!‡±»ü0Sô™>YÓñÖ•á€59 ÷B§­‘yDèßî51’&‹Ób~ûj¬ {Ïé5‡aðx”FQA‘¤{¢èq<.I»i› †ŽÑýÅXvG{P$Y­ {L \Ñ .k(;ûiÎ ÿ÷~ôtGÜ‚'P±¦~"v:µjG#5~Ñåô)±×4'УQÿàGà¤oÁt €œº—½£Ë££eihç)ðùˆJsÅδ  Ƶ@=¿O{É‚öÆq–˜l­{e9¸3nJô„n?:ä›éBQ<NXó‘žÒÌFjYq‘f¶Ï˜Ý€ƒ•šèõ]@¼ Ÿ¬Ñz>@žõ,Q€ ~aÕu4¼£Œ6Ùø¶¾·ô⟮×犔g— ¸ %ÁòôL+ ‡Fš'繋ß((„·¼Z=އ;ÏÏŽÏ L'½¦Â£„‰E ÍÆv¸óJßkDâš/ÞZúg¸7@^/f K¸ TËŸKµ·$ ˜ÄÑ~hLx}¦XÀ(É”ÛÁÄž¹Mëܦ5*¸JÁ×ö¿9øÁ'(‚oZÎæ Ü4™7 ëuœÇü:Û׺“ŒÃt�@càI[„+”×9þ)5ø^Êq®ëá¼þ Æ®tF{袂 %…ù˜¹ pÔãEβ˜{AV°4u— ’™ÒçÛ«ÿ�¼õ„ endstream endobj 441 0 obj << /Type /Page /Contents 442 0 R /Resources 440 0 R /MediaBox [0 0 595.276 841.89] /Parent 444 0 R >> endobj 443 0 obj << /D [441 0 R /XYZ 63.78 803.424 null] >> endobj 178 0 obj << /D [441 0 R /XYZ 63.78 683 null] >> endobj 182 0 obj << /D [441 0 R /XYZ 63.78 408.72 null] >> endobj 440 0 obj << /Font << /F29 245 0 R /F18 244 0 R /F30 246 0 R /F37 289 0 R /F34 391 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 447 0 obj << /Length 2431 /Filter /FlateDecode >> stream xÚíYIw¤È¾ëWp3õ^C“ I2Ë¡ýìör±gFs𛙢²TŒ)P³H­„‚RªÔjÉvŸ/ª$ b/"SIp$ÁŸ.’“ßß_^¼}Ïó€±8OS\î%âL™Êc\nƒŸÂ¨èªa™Û¢Þürù×ùÛ·ï™^~Èó˜+DLÆR*úöÝÍM}¿‰Wá°7¸ÈÂÎÜCC»ÝX›Ù¾}/äJ ó„×±d’¸ýÓfKÂ<f2…,ÅÏI‰h­-Tiâ“)c\$kaà‹8Ïf•ŠeT¹pæ%>…ÒX¦,ˆdG­ÖòÖZŸâÀŒ«…Þli!Âo}섈S½Ð?Òkã°ü4;økÙ1¦ã\bzd±éÓ§xuõ0ê¯à_° R1ž°G²GûÜ~’ª<ÎR½ò»7Ú,•}Ñ"Bt¹‹®\9|-GÇÉ1GŸåË7)«pò ½(Ôé-YœæêÓÓ›ñéý›AÐÓЬ¢Ò'‹j.OO$~)Xûâ8<Z‡â³2ìõÜ둱4‰À䪞ƒ$¯–¶¯ÿlüt<ÞR¯“\üu’K½ ¹–e’®ûÒo–ÿ=ú¹SÇÿ3mÎ4K•ÿO �ò?>ßZh{Ì[\ÄêyÞú|ò—õ²W*àe1l"©v^6IœiÎEG}Æ:«<Cëf²mu[mýgt‘ÃÙ|=}厹†'‡ù˽»8UC7Eu ­v7ßôÓ­ÁXoXzاªw-=›º:�ƒ£¯š~(šÒô8úB%p”àSè$B:[ÜÞl"”ÝCÛÅ'w#ëK•HÀX#rH{.!˜îb¥¿?De ŠöO_¬¤ë‹•ïÍM]”ÖVš¢Ü€"{zÚVýP5å@OM1Œ]Qホ ×áx@â+§;QÕÕ`fªë΀Ý›a_4Óª3ý¾­·äÐmé@™áµ‘óÌàsžŽu6é]58u §§¹#éàˆ U¤©r†;HŨ¢þhžÊâٺEß›nÚüXµ‡žÖÀ®4]S5×ô¼oñë;z˜ò –$Ÿ> :G–„EçˆÚnk`ÍÂíW›(å ‹qbtdƒÊ'ëpÍL3Ë”ë<Ĥçy¿%ø»· ⾕A¯P®¯Öt½tnÂ¥£á«~–@ýF °Ê•›wÚVq³Á‚ Ìb*£5=MÒ…�x“­4þö£�%ToвŒü…ÖN¡:c.`GªÄ1µ ouø þd^Û9ÏØÓÆK@L¾Ôíëµ¹Kž�¯\ó§yb`ä’ç7¤®—' `×’'Žžë ¹Ÿ¯iÓ2Ábžæ¾˜ŒBLè&Õ7æü{ÔâŒ#ÒóŽx4™²%ϘpÆ­c0©õÔ\*›T)€ôPµ ­§½*–ÐN°”i;‰ýíî×ÙŽ´voï;ø°3FÓ”Ž þW#µ·çì]CU:Z¤¸ÝXüý9aéI<—Œù¡:ÜÔÕî~£9ØeR…u�)$ÏÃÀ,’"±Šàïü}²f!åŒâ'ä,IÞ@°¡>eІ¶ãõžÞÜYEGú“ö äÑÆÖ§ìÒ@°üh£kßú«•:ø\8ŠºE�¾ss{ :&OôÙ©Wb›…WÇ>{Ó™]õñ[_ïÊD¬ù 07Ý&MB:ËgÛr$qŒ×'a2é òˆ…µ`×v´íÚœ A§cnkL û7vhµî 1F& ÿ`vÅX$b)Paª[K×î0€écuÛ1ñâ—.p¨IH È2H˜í4̹åáâ§_’` /!Êv„¹³¤‡�¦ò?«ƒ.¾[²ÂþëæE,2¨õÏ`aÿŸµ“” fO²Håj�C &S©Ü–eq*‰¤Š“Ì €‡ `ãiøÎ–2ձȠ´¯ú¡+J 0¼‚¢�Wd\,^É2Ð P 2h³¢ÐšnWØ¡U2Eà ¾¼é–åÞÞnR èr?ow‡IJYãHÔÓƒ­r丰ÀîGX0‹{"/‹ÆWÞ0nkƒ(¤š…3””Üa-r‘0‰×õ„kö]chq7&çÑí²plfG@BfK¯wccÝÜ#>ÁDFj·}_]áÿ-¿íg(º}{pG ‚æÅš®¢™Ë¢ŽBÞ8pÄÚw$yà ˦hšÖÕ9V©rÌö<FaZdP†:MŽˆš¶©«&ªƒé?ý?­.MßÍîÂʼn`‘[רïO˜†Ü¢Þ‡k˜?ᡦ‡ƒ=#ÔCÝ£,¨y"“±©MßÓºpbj\ÝÒÆW¸TÌx–¸»fž}­B_¸,Šaà-ŽÍª;$ Åÿ"ÓqeŒñÌp1 ¼k|¨LOÕ ÷I,J|"ÎþÓŒ‚ÏçãÌt-H¹Å¡ËéÞ<Ì­0Ù$g& Cµž¯¦ž2ÿgÝ|„šš˜àèä’>N¹g²“뫞ë³J¨8×3…Á°±H­Oüîu»ˆ‰§Ï©×âô–Lò'ýžÁËØÊïpt¸+Ûa¹ëÚ­ìÁÕž°–9ËŒ¡Ña,ç‚uÕr®@ñYðÐF6=âI˜Ï*[ó‚è•((%„mwÆ…Žœäë 8Æ£5¡…‹3l d‹„‰=�»¡/Àî=‘» ɹ½ÂÁ¬�ÄVB†ß?xAOäL±@n;ñLÀþ¯£-vXM¹¬^‡CÃt­ão+áXŸfs ñŽ@%ÿ}±´ÓÿI½9βƒZmo—”ÌæŠ:=`JóðõÙTC¢ÂYà±ɇHÔ™_M9ôôPA;†#ÿmµm¶ÂÖÅÞ»n„· •£ÅÚDnxýPäc܇AÐ"ÁX×QÚ®ëm¶ÇÖWlŒn"h¦Àt–"t ˆmD-Ã?/âx‹¸n:lrx o·.={z,ÊÒܸWk1Ü'†+ïðat†ÓŽD<ç`åÒ~¼ È8M9øî®ªk\Ùã mMò¹uÿ|/SMâqßž¼ÈôÙäem¥ +˜l`s€ÊLmQo|J[·è4,Ç®s-Ë–“Æa¢ÜÙ&¶l´²ýÊ6S‚M]�V„çêˆ:6\×[á˜.ìQ©Ü…¬z<Á9锈 8sÍ­$ ¶â—sô<˜î% ¢4‰S÷1W+8 ü «%9 endstream endobj 446 0 obj << /Type /Page /Contents 447 0 R /Resources 445 0 R /MediaBox [0 0 595.276 841.89] /Parent 444 0 R >> endobj 448 0 obj << /D [446 0 R /XYZ 63.78 803.424 null] >> endobj 186 0 obj << /D [446 0 R /XYZ 63.78 420.786 null] >> endobj 445 0 obj << /Font << /F29 245 0 R /F18 244 0 R /F34 391 0 R /F37 289 0 R /F38 449 0 R /F52 324 0 R /F32 327 0 R /F35 450 0 R /F30 246 0 R >> /ProcSet [ /PDF /Text ] >> endobj 453 0 obj << /Length 2161 /Filter /FlateDecode >> stream xÚ­XYoä¸~Ÿ_ÑoQ#Gu؇ÝA&H6AÖX ÉäA–ØnbÔR¯{ýïS—ŽnÓNo&O¼ŠE²Ž¯ªìwÁîOi¸ÿðñs˜ïÂPZG»ûîˆU$»,-T¾»¯wÿòîfÿïû¿|üœ[ÂH«0.D3œF¿±LwÉ0ÉTê…pzð›îÑVƒ‹§N”ÚrEÞthléä©<ŽÚ¶v^2VyP¼bØ;9ê@+LJ½e^7÷~&^o~™loxP6 wN{ ™šÑž÷¡×تm×¼6vÜ2#’áÎ’XðD?LT’¤|Tc[Sö )Õ|ü]¼Ä ¥ rP <‰6úåÃ0öe5úæ×óÕöË·Í»éXÍ»ÿnÎMY¼^ìut˪šúÞ´•dö€mâÙvåÞhMÏ (oZéMÙð^DÖšÅ2 ž5}9v½°}Àõî·æ™;SkyŸéϽMÍÓ‡©­|^p)5µÇÚûD×¦Ý ð£Ôk;nkó%àýˆãÌ+µÝiൊa:Ÿkê;¦èsz<òÐŽLõŒtSSópV,јrƒÇùàŽW†N-×/Te³[Y’†öf%Â{x¢k›îË3Š/‡SB¯%z¸õad5hO$> öaÞbžö-t[Ä7ãÜcßM¬0=kN{ˆo,WUi‡b·´ ,Äùª¿¶³üG7l$… ƒ7>=¡KV.o×ÕñBûO¶ô¿ÿÛŸ™ù¹ïð‚O{²,QfbiáÕ<Ë“<:GxwcfõšAÖ¯­“~^¡å¡÷_œž¢ò"}Ë+kûtêêÛ3½rÌ8 ŽI³ng—!ÇÄ 8ÒdI8"ecçÔ‘ÕâÔDLÂ×ÂÙ5±O®‰ ×ä[ÛDA_·ü,CHAÑýf¹}/àU‘Kp4{àVp+ÚJ Gè½Ø–Ü0:E`z~1âPx?Êî áòXâeLÜÁ®ä€"¸°@Y>CYÆÖž{O¶6µÓëþaá}èwΨš«¬(nu¹hu9v· y+ºêTåÉâ¢ÿt±ÌUÄ+G—BeZß’H¤*ZŸA!˜îÖ4„hªA qžF^y¶¼±‡XD2ÞRvn\FÎp'Ñ:ˆU¶{wŽD†T"Dˆa:*¾¶|)AŸçc)”_m[ 1ù0¶äšU…!gur Usx*«ÊœÉæèÐv9lfb7å`Þ4È%†B L+ Ïwh! ð¾M iºèKr9âÏBèÛ5OpcΈBsH„)L°òrR Ù)‘bÖ úU¬o(‡a:)T’†Â SE^…(<®Ð )’zñÂøèE„førØ)LcO¢ ¹Ðb_Ðqào!º+–5@Ë¥Ó$ö~¶˜ÓÕÌÆ íïFr¾PÌç Õ÷E»Iìf˜*$9ÎñïÕ{¾r‡oÁÛÄ™A&’AB+x ½²¿<™ÑVB±:'Ï8ù%ÐÁï!‹R„Þýö…­1ζ׊Aÿ:XìöK.£…ËùL$l@ݽ“)”aܳXy:ø¤*ÖÛ«ÅΪCY~Í&y“Íwn¹Ž×ƒt(Oܦíä.qÚ^á™ä«bʨC1Ö¾B…Ü µõ5”-¿LecGk†ËÜ—3¬ÚùKØÄébÝ6W¸­«„>Ò˜µQ¬m±m©)C))ˆÒ™z†“[N«l kœ/ןì j„Ã|F_¶CS.EH½U(ã±émîâືs~KºK‚Âsv©¼W0†Ä ¬Ø)ÇÑ�Nâ âœ&OåWIq4 Ò!E²£L r†,¸*è…7ÆZéNN·Lâ·dc¹ž·Å— ÉÝÿ`Ày wv•vvýjšÃ{ÕVžÜH¡ýyNăhа@Ãç£i*Í´Šóÿ¡Ša¶lRС¼Mæz#wÚDQV—CÄ±í¹ úg„‹› E®Š0ºnØ&…ïÞO5JIJÞzyºÁk%èì€Yœ¤—‘ó³P" þ ç0ÒÞg€YäõåÆgxbÍyx7n£L…‰üEù¦ýñål|ÀÈx~{Ðú±ëO°— À"ã|önœ`#x9/?l€l\ªäгøÇ`¨†Â•ֱ鎃¶U·La <D,°Ÿ¡¶@%ˆ´ÅCmª¦\bf¬Ù¦>_Ë7vÀANˆÂSS"Xé0_Ä0k¨–h¸Úî θ½ ^ \ì*ç¹w9L©4_jÉàt‰š¸E·„X¡a3ÿïÌrº>4 ÁW× LVT:©Ê¹Ì\—ñ#U@Rìoè¾sžtÉÅ@’ï 7Í‚ë $Ïvà5±X²3¥H!—GZ©�€zã:1N6¿¯ÿÇ)u©ðyúRX}î-â½Ý|°àâØ0“Ãþ+K)ûí§kLÙÎ…U˜+9×?KË_ÆÝRél~7èŸî0äÑÜ]§"sx˜ÿê.î‚)Ñ%Ý\°Ùq0ÍAÊ-ÇN¡ªß *+»5#Á¹%*½•Ž€ê4¹-$ê´¸HHïšÐ§æÈÛœäý«ê=Œ£õÀ�%su$|ü±óm;~ËçU,âü^Žå{Ûk‘ÃÔ‚®hÍ8÷ÀvÑœ`4Îõò#'óÏW¼b(5}W 6—éüI ÊuR)á­¥A)/ñ7ö8â]›ïr\Ù>ÒEí}:ªß¿ónÄ:ìÉû^x(Gy@ú oé<a!^»UÙ�Ì*õUŠ’¢ì‚è÷þ/½Á endstream endobj 452 0 obj << /Type /Page /Contents 453 0 R /Resources 451 0 R /MediaBox [0 0 595.276 841.89] /Parent 444 0 R >> endobj 454 0 obj << /D [452 0 R /XYZ 63.78 803.424 null] >> endobj 190 0 obj << /D [452 0 R /XYZ 63.78 305.416 null] >> endobj 451 0 obj << /Font << /F18 244 0 R /F49 281 0 R /F29 245 0 R /F37 289 0 R /F34 391 0 R /F30 246 0 R /F38 449 0 R >> /ProcSet [ /PDF /Text ] >> endobj 457 0 obj << /Length 1499 /Filter /FlateDecode >> stream xÚ½WË®Û6Ýç+¼”Š_zd×MÑÒÆÍ¢Iºm3‘%U”®sÿ¾C)KŽ|tÑ)r8<ó:C%›ã&Ùüü"ñã»/_Ó|C))¤d›ÝaSp’±ÉÒ‚ä›]µy•Í6扈ÊZ—ÆNeth{¿Ö´ÃIùJ}H¨hT…ŸÃ–eÑS·aPxîzì¾JЇÝXƒ:ý …d –G»“6~·/S—ƒn½r3¨nûq÷+XSA„H½=ÆŒgeO±n+;Ë¢öíˆ÷–õ.éÆÙ¥…ßçÖA¨T'Õm¯Í€‰z´ZTÿ„2ã–FM°¨ïz587YK«à‚–!`o#ȶ7dÃZ¥*Ô™]Q 3aòVŸ»Z<ð•ÅãÝå.râåkVÌ"fa©€|˜)]Cü÷X6`øòô2Âáy�¾ïºàZ/‹zué'÷c­Z¯Ú¹øJ»ty±'A0› RFDÁ@‡ùHjýG¹^j…)Xá…¿ ÈM•"ãAâÕš’”È«Änç äœu¿¯ä`I>ÝùÝ6¦Lfëð8')ÿ ýGx«þcùÒ+J©$,KƒÔïkÈbfCÁSŒY1éKÖô "%ÖP˜r™ #°¡àÑ¡W6• G"+Š%ÜM'ÍoÖ]H³›„±ú¨›§e]ãÄVζŽÇº´Ì”ò ªV![%®îµ¥GfŒFPºÔ© ¾@à¤ÆxEï ªÑ~ýÁ1Ð.¢‹bå"!,¡÷_k–å$I&Û¹#œXpFdQÜeS­‘Çe ¼ÛöŸñ á¨aPžr.'Õàll´'ASÖ¸të‹ÀažÓN3.´Åš.{O èt»=ÂçÖ ßÂ^Ò³—ïh±zÝ!}éƒV}ܕրæ¸ TAæP<NÕÛöl‘A¼Ûƒ¥ÁÂÂüú ®›Ú™_¡`ùE·gƒ§}‹è}‹÷AèÁ¹Æ_±hu°y*íî£ Œ¿ <bã„°.^ë„hrÌ¥ˆŽúQ7GœŸ<¿å.‘2:µVÿ?tckK’ÖàúÞµvÒTyí£2FUP’¡ŸYÕ¡ŸÁI“°>8q9°=ŒøòN64#ŒJ±º¤@ûx …RN~|ûTg2]ëA+³V#pÏH.XèÀ&õt _ !1çí¤Œ:ŒõâI�²MY?t³MåP^ûöÜ•ý´Þ+3Öƒ×ï² ßéýšz?.ì„‹4 )xÁ§yJ¨7úµ®±>³™zµoûê®ùœdÐeœ¢?MyT¯îtäe½M5"I£;¾7�°vÀ-Ò©c¼G‘¥FN˜œˆ8~\íŒð+¥}DØš‰dÄæFè6¹›qkÚÀ)òLKm L¶”ñ½ò.Irž/pÙ,Π÷æKù�­1:‡&(ƒÈ2íW]ÉRpÂäJ\xHÉàkûÆe˜°IÕàhK¸*ûʯŽCgëÎÎ?ÆÏ({ߨ®\à[‡ö³ô <aH~õÌÃÛ¶(¦Uz,ãƒe€ âGÇ“uµvYÌÓœdÉÍã²¹“ Læ³nžÐø¡F²Ýök™G¿Ö²/c$¹>iî¤_F’ëã²î¶d¬á5nÝ„7¸–(’ÄÂÎé­gÝ.V¤û¸€-®¹’¢¸&Ö}7§™˜».7ÏßÞá°á×>ãpš°ytW=mß•S¶ÿ¸H!>–æqú#ØÊ“ܳhRDUï:Ù¥ÁõCßžqc^d7¨„€*ãßPP×l ¥> b½µžÀ©ÁÑŒ,‹Çu,Á›<0!¤œ2Ò'”öØ¡–Ÿ]ø ûµÌìºn ÊVé;¦à(.‹[gžÅSýQ!w[N¡9>F笎öÿ?¬~v€Vb"9¡4_†„"éžýž¥‹!…ä;ÂM<Áð£yîDóùÙ'~–ø&ÎíKÀÎòÅ™Ÿv/þJ9AÞ endstream endobj 456 0 obj << /Type /Page /Contents 457 0 R /Resources 455 0 R /MediaBox [0 0 595.276 841.89] /Parent 444 0 R >> endobj 458 0 obj << /D [456 0 R /XYZ 63.78 803.424 null] >> endobj 194 0 obj << /D [456 0 R /XYZ 63.78 516.334 null] >> endobj 198 0 obj << /D [456 0 R /XYZ 63.78 428.289 null] >> endobj 202 0 obj << /D [456 0 R /XYZ 63.78 247.525 null] >> endobj 455 0 obj << /Font << /F18 244 0 R /F29 245 0 R /F34 391 0 R /F37 289 0 R /F49 281 0 R /F30 246 0 R /F52 324 0 R >> /ProcSet [ /PDF /Text ] >> endobj 461 0 obj << /Length 1857 /Filter /FlateDecode >> stream xÚµXKsÛ6¾çWèHÍD0ñ"ÁÎôxêÆÎ43qÝC’#Q>TŠãß],H‘ å¸ô"‹°ÏoŠw‹xñë‹8|_ß¼¸¸âfÁ9Ë´‹›í"‘,5‹4ɘYÜl]±\)•Db®Ê¢[~¼ùíâ ˜G›tÌT¬àH¿ƒ91NO)3f`ÌëÍÜiB1žééiâìi²g¬–"#iw9N¿àOßõŠtyFµßâ7~Z®µ¥…fKß¶X7í¦cËUši0…툾nª ¥÷“ªhïÈ.‹—1“IÅ`$ƒ3Ú¢Ûûšzcë;ÝŸŽ“$Ú¶K5M¼¬ÈâP¶üiˆÐ{ÁO¼Èß`³Û\7쯈¥©éÛ9`ÎÛS~ÐL ANF6¯‹÷ ˜u¢Ä ¬T2ÊÛ‚`ˆ WÛ<iÛ¢8BY‹r32<LH>8ÖDG ×ñè“ ï缟Ä,SC,Ùšîô–ÃÁD^X‘Á–èì9a›�ã8j¿«ž–Œ+Ño©ŸÒ®þ<w¥N˜”z¤/¥S–%|êR9#•a2¯rª™J“çäXW €¨Z‰èz;›¯œ=8bÖWKÄÀñ3 ¿„oÊ£¼,‘ÀöôËÍì]Rž‹ç¹ÌX†‚ˆY¯?£Á\bð¬£8*“ÿ—n³¾AÔai¦Î%¨H‚ŽQGˆŒÀ/Uâ‰1š 2Ú áQÉ…ŽÞ¶�5 ä=Ì£bíPÀ‘CŒ4Aò–¾—ïniÐK1#6Ü(¡º@ øÿìò»â'dêQÿ½¸ÙÄ™€+ÉY’ê�ªÝ—}ÛÜϬ“óÕ'ß$ëúä‹ãsɆ‰lz?w‡dBŸøj6:43Òô|C‘€z™ >ÕçõÁ–ˆÎiÕÅ¢q<*@%\ÅÑ“¸ gg‰œ(§ß*¯ìÏ(å÷¨:ï )\Ùc( m"^¨E1FÊÀáúvKjαõŒŒl" Õ8×Ó¾¦»¤Ý…ŠnlZ(ó&z¨iJÌ&9-< Í<ÃuÐ ÅÁné\ıA’î°ß—ÖÈJ%ˆ'×C�È»=žír0Ägß™|#÷P¦)P2`¦Fñ`}‰œ‹cù£2Êv˜ðgêOOqw¥ù$PÁ¤ÓkÞ¶Ö÷]hIlEÐ&5(ž‚WÜš "£ÀÓóþ ß‚¬×‡–V|3�tlêLÔ¸Ýlr ß׵ò”¯k•Lžö¼Êšð$º„>*om×É©B€á W}é‡ì�Í%´|$Ò=uÁ\!ú¶Îµ¾ïÄqñ÷!/­o-—FDªJB_ÜвuáÛÑ!]QwÖÙ¡—†%XvÖAg›¯ ^8Ÿ”žCàçØÏö–=  Uʇ¦‚Êâ%yÓP¦®–èç>« ðlŠ/K «lö\ Cw¶ A¡UœŽ{E¦³k»ÏoaÆ7u úRtóøXdÇÉ’§S¨aJªiôƒªéɦ؇ûâw€/Ô_·Ž.Ý6-Éq+ÆrÑ›Á뫲éy�¬ÇÇYW¹Ui?ÍYʦNÄëó1à3OP ÃR�çý°UåØFE÷(ÎÃkª.гNÒЛ á^Q0z°ØáZàŸlv®§u# eK™è=uÎC\1ôækÊ/î(œýŠŽEö¶ÈK"†,"Q6†Cq‡w‹}lÝ®* @⹉.§ù†<·víšÀ¿!,ò/´xÚ¼îÊK_Aaz4Í©XÂqœ QñMϹVqÁÔÈcàÚðL “$;ãuèÞ“l@v0l•ã«0…œ}Ó„×'¤4e‹POQñl¬/N¡ðá‹2>õ,xÃì®É˰Ñ·�˜¬ñ®MÉ›ÞQèÅÍ3oÃðàâO4¥J ñè=ÍøíC¬ã+D;Ÿ?°X|Í«}Y¼¤Ýv ŽôÁ©×ÇÓ}(×áàœ>ÛC½vÖc6Ìl}ÕÄæÑQèeæ—†°ê&”æÈ8ûæ=HΟęUÀŒ•,KͪÅ>>¤Üˆ)‰ömCÅ6 Ñ¡6`Óu—‡Ü.ËââÍ¿ëM‹ÿWôç Ü´C›3Fzâk,}`ë>‰Öe‘ûvA¡‘×åaã} K]S…{_J[Ù:oÃaäøM`!Å”òp…©ŸÅÑu5‘nC kR|îÜåẠjQÏáÌ&$T+b E&ï«o0ûÿ~r{4¿‘!¹Âß6´³fÿ"Z²ÿ-¯‚÷eù¸¢¿§ÚσˆÙzCТdO¡Ñ" N]*Æ.ÎQ7C„£?F¶IïJÜ0r|‡xD·½ –ò$,Z7µ-fjÖ½;ÜH¯pNѹð‚lO »6Lëo"›ðüróâ®ì\L endstream endobj 460 0 obj << /Type /Page /Contents 461 0 R /Resources 459 0 R /MediaBox [0 0 595.276 841.89] /Parent 444 0 R >> endobj 462 0 obj << /D [460 0 R /XYZ 63.78 803.424 null] >> endobj 206 0 obj << /D [460 0 R /XYZ 63.78 696.026 null] >> endobj 210 0 obj << /D [460 0 R /XYZ 63.78 405.049 null] >> endobj 459 0 obj << /Font << /F18 244 0 R /F52 324 0 R /F30 246 0 R /F29 245 0 R /F37 289 0 R /F49 281 0 R >> /ProcSet [ /PDF /Text ] >> endobj 463 0 obj [644.7] endobj 464 0 obj [826.4 295.1 826.4 531.3 826.4 531.3 826.4 826.4 826.4 826.4 826.4 826.4 826.4 1062.5 531.3 531.3 826.4 826.4 826.4 826.4 826.4 826.4 826.4 826.4 826.4 826.4 826.4 826.4 1062.5 1062.5 826.4 826.4 1062.5 1062.5 531.3 531.3 1062.5 1062.5 1062.5 826.4 1062.5 1062.5 649.3 649.3 1062.5 1062.5 1062.5 826.4 288.2] endobj 465 0 obj [272 272 761.6 489.6 761.6 489.6 516.9 734 743.9 700.5 813 724.8 633.8 772.4 811.3 431.9 541.2 833 666.2 947.3 784.1 748.3 631.1 775.5 745.3 602.2 573.9 665 570.8 924.4 812.6 568.1 670.2 380.8 380.8 380.8 979.2 979.2 410.9 514 416.3 421.4 508.8 453.8 482.6 468.9 563.7 334 405.1 509.3 291.7 856.5 584.5 470.7 491.4 434.1 441.3 461.2 353.6 557.3 473.4 699.9 556.4] endobj 466 0 obj [525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525] endobj 467 0 obj [525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525] endobj 468 0 obj [555.6 555.6 833.3 833.3 277.8 305.6 500 500 500 500 500 750 444.4 500 722.2 777.8 500 902.8 1013.9 777.8 277.8 277.8 500 833.3 500 833.3 777.8 277.8 388.9 388.9 500 777.8 277.8 333.3 277.8 500 500 500 500 500 500 500 500 500 500 500 277.8 277.8 277.8 777.8 472.2 472.2 777.8 750 708.3 722.2 763.9 680.6 652.8 784.7 750 361.1 513.9 777.8 625 916.7 750 777.8 680.6 777.8 736.1 555.6 722.2 750 750 1027.8 750 750 611.1 277.8 500 277.8 500 277.8 277.8 500 555.6 444.4 555.6 444.4 305.6 500 555.6 277.8 305.6 527.8 277.8 833.3 555.6 500 555.6 527.8 391.7 394.4 388.9 555.6 527.8 722.2 527.8 527.8] endobj 469 0 obj [569.5] endobj 470 0 obj [531.3 531.3] endobj 471 0 obj [550 575 862.5 875 300 325 500 500 500 500 500 814.8 450 525 700 700 500 863.4 963.4 750 250 300 500 800 755.2 800 750 300 400 400 500 750 300 350 300 500 500 500 500 500 500 500 500 500 500 500 300 300 300 750 500 500 750 726.9 688.4 700 738.4 663.4 638.4 756.7 726.9 376.9 513.4 751.9 613.4 876.9 726.9 750 663.4 750 713.4 550 700 726.9 726.9 976.9 726.9 726.9 600 300 500 300 500 300 300 500 450 450 500 450 300 450 500 300 300 450 250 800 550 500 500 450 412.5 400 325 525 450 650 450 475] endobj 472 0 obj [777.8 277.8 777.8 500 777.8 500 777.8 777.8 777.8 777.8 777.8 777.8 777.8 1000 500 500 777.8 777.8 777.8 777.8 777.8 777.8 777.8 777.8 777.8 777.8 777.8 777.8 1000 1000 777.8 777.8 1000 1000 500 500 1000 1000 1000 777.8 1000 1000 611.1 611.1 1000 1000 1000 777.8 275 1000 666.7 666.7 888.9 888.9 0 0 555.6 555.6 666.7 500 722.2 722.2 777.8 777.8 611.1 798.5 656.8 526.5 771.4 527.8 718.7 594.9 844.5 544.5 677.8 761.9 689.7 1200.9 820.5 796.1 695.6 816.7 847.5 605.6 544.6 625.8 612.8 987.8 713.3 668.3 724.7 666.7 666.7 666.7 666.7 666.7 611.1 611.1 444.4 444.4 444.4 444.4 500 500 388.9 388.9 277.8] endobj 473 0 obj [377.8 319.4 552.8 552.8 552.8 552.8 552.8 552.8 552.8 552.8 552.8 552.8 552.8 319.4 319.4 844.4 844.4 844.4 523.6 844.4 813.9 770.8 786.1 829.2 741.7 712.5 851.4 813.9 405.5 566.7 843 683.3 988.9 813.9 844.4 741.7 844.4 800 611.1 786.1 813.9 813.9 1105.5 813.9 813.9 669.4 319.4 552.8 319.4 552.8 319.4 319.4 613.3 580 591.1 624.4 557.8 535.6 641.1 613.3 302.2 424.4 635.6 513.3 746.7 613.3 635.6 557.8 635.6 602.2 457.8 591.1 613.3 613.3] endobj 474 0 obj [625 625 937.5 937.5 312.5 343.7 562.5 562.5 562.5 562.5 562.5 849.5 500 574.1 812.5 875 562.5 1018.5 1143.5 875 312.5 342.6 581 937.5 562.5 937.5 875 312.5 437.5 437.5 562.5 875 312.5 375 312.5 562.5 562.5 562.5 562.5 562.5 562.5 562.5 562.5 562.5 562.5 562.5 312.5 312.5 342.6 875 531.3 531.3 875 849.5 799.8 812.5 862.3 738.4 707.2 884.3 879.6 419 581 880.8 675.9 1067.1 879.6 844.9 768.5 844.9 839.1 625 782.4 864.6 849.5 1162 849.5 849.5 687.5 312.5 581 312.5 562.5 312.5 312.5 546.9 625 500 625 513.3 343.7 562.5 625 312.5 343.7 593.8 312.5 937.5 625 562.5 625 593.8 459.5 443.8 437.5 625 593.8 812.5 593.8 593.8] endobj 475 0 obj [514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6 514.6] endobj 476 0 obj [571.2 544 544 816 816 272 299.2 489.6 489.6 489.6 489.6 489.6 734 435.2 489.6 707.2 761.6 489.6 883.8 992.6 761.6 272 272 489.6 816 489.6 816 761.6 272 380.8 380.8 489.6 761.6 272 326.4 272 489.6 489.6 489.6 489.6 489.6 489.6 489.6 489.6 489.6 489.6 489.6 272 272 272 761.6 462.4 462.4 761.6 734 693.4 707.2 747.8 666.2 639 768.3 734 353.2 503 761.2 611.8 897.2 734 761.6 666.2 761.6 720.6 544 707.2 734 734 1006 734 734 598.4 272 489.6 272 489.6 272 272 489.6 544 435.2 544 435.2 299.2 489.6 544 272 299.2 516.8 272 816 544 489.6 544 516.8 380.8 386.2 380.8 544 516.8 707.2 516.8 516.8] endobj 477 0 obj [499.3 499.3 748.9 748.9 249.6 275.8 458.6 458.6 458.6 458.6 458.6 693.3 406.4 458.6 667.6 719.8 458.6 837.2 941.7 719.8 249.6 249.6 458.6 772.1 458.6 772.1 719.8 249.6 354.1 354.1 458.6 719.8 249.6 301.9 249.6 458.6 458.6 458.6 458.6 458.6 458.6 458.6 458.6 458.6 458.6 458.6 249.6 249.6 249.6 719.8 432.5 432.5 719.8 693.3 654.3 667.6 706.6 628.2 602.1 726.3 693.3 327.6 471.5 719.4 576 850 693.3 719.8 628.2 719.8 680.5 510.9 667.6 693.3 693.3 954.5 693.3 693.3 563.1 249.6 458.6 249.6 458.6 249.6 249.6 458.6 510.9 406.4 510.9 406.4 275.8 458.6 510.9 249.6 275.8 484.7 249.6 772.1 510.9 458.6 510.9 484.7 354.1 359.4 354.1 510.9 484.7 667.6 484.7] endobj 478 0 obj << /Length1 1592 /Length2 9248 /Length3 0 /Length 10143 /Filter /FlateDecode >> stream xÚ­”e\ÔïÖîi¤d莡»†nî𡻤»¤‘néîN)é’FJRé8óßûì­çyÞžoæ{¯µ®u­uÿ¸©ÉUÞ3‹™;˜‚¥ ®Ì@ ?�¤(® d�YØP©©AÎ`Wkˆ„‰+˜�äãÄÜ,ìl� 7?'?'*5�äàèålmiå  Ñÿ“ij;[›™@�Š&®V`{¨†™‰གྷ™5ØÕ‹ fgPû§Â v;»ƒÍYP@€¹µ™+Àli AeýÇ‘,ÄÂÀóïcs7Çÿ„ÜÁÎ.PS�:¨Iz�Ô¢¹ÄÎ `¶@eUr€öCüÿ0õ?Å¥Üìì”Lìÿ‘ÿ×–þWÜÄÞÚÎëÿf8Ø;º¹‚Šæ`gÈÿLÕÿÛœ¸ƒÝÿj#ëjbgm&±´Øþ}dí"eí 6W±v5³X˜Ø¹€ÿu†˜ÿO еýË�«´¦¶’¸&ãÿ½ÏUL¬!®ê^Žÿ•ý'û_ üÃÐí8[{ôØXØØ€ÐDèß~üf’3skôƒàâ˜8;›x¡B¿ (q|€�kˆ9Ø�ö„:fe8¸BK�Еø,œQÿ¹Nn.�«Ø?Gÿ&n�«øâ°‚þ/€UâñX%ÿK<l�V©?„Nþ‡8�¬²ÚAáA;(þ!¨¦ò‰ª©ò‡ *ïÿ'€UýAgÐøCКª©ó_âƒjêþ!hw“?ÏôAëÌþK\И™ƒô{ÿÏ *eþBgÿɇzø÷ÇõŸ¨è5Ø™ØÿUÃ`µøƒÿõ ŽÐý/ÍânÎÕCS,ÿBèR¬þ‹œÐ¥Xy9Zÿö =³þ ¡.mþBè:lÿBèÌv!t!yBÇÿ£Ì-…XCÀÅ¡ûpøcZìðÿ„¡Ã8þ C8B‡¿ „Nó׬@¨u—?ýþ!°û_³qAÓ] ÿ2  šÖϵãjå þkÐ\=þ*€ÎàöBÇwÿ ¡xüuWÐ꿚±Cå½þŒ-õ;ÿ[û? ââž>ÌÐûef‡Ú9ø�<\l~ÿO¢™›³3âú¯WúÀü‡-¬¡Ïì 6C]^p0±Im +ó—,˜.G`€·lNPªï™íD ^J„µ+“wbø^§}_‘‹±°OæñHìÕá«:!uè”6ÿ¼ïn¼ŸáÝJ¬}™¡èµçHq|ŒÙÞ4÷ë„ VùÛ÷ñ²dÂÁìó¡ã<: õHkä0½zî ½™!Ü<ÚRvá¡u4äjXiNeœáëXÉá×§‚ým‘[W/:Üpî s“ž^™OhßÈŸ~òz½£{Ÿðõ§öƒAiÔŽ[®bºNÜ—GS½5², žÝeLæí“«~zU—ãJÕ\N¡î22!g¹t¤Ïk먶ÎBr ˜Ú„jš€©æõü+c]¥ëw_ H¬ä¥_#¼~n6ÌbĵuÒÐ)4o1ªé2é©^ä8äC—ãB%Ÿ¾4¤¿sI¨Å8°‘*ªŽ°èÁ¢P¡šß c–ýÞ¸t2ž²,<ˆÊ-dU˜à$¡‰u.¸cŒ"‰eÛS_.™„¥}`‡bä¤oQý.}¸¯›®*eã$¹‰ï6ɪ5hï|õzÍb)/LCï üŠX_ÀJ[+*ò3Т`pQ´¾¢–vJh§.”«p¾J]ÂåZøE™Ù@) „¨>ÀID¼*LãÑ›ì®ôE{0ôWÎ2èóóÆ2L¬Åë=´ÌÅüRçô`ŽÅb q«æ"(/3iä“?Ñ®Óg6´gm‹ó…ÃAC"ªÙf¤³ê¹�ióÃÌ‘Z¬Ø@ýËÊ/÷vRfl¡–‚‡QÍñFAŸòi*´Î}„ý”|w0f›_³q âÅ–.ßsÊpâ=•î¬åØIçŸf`窌"šÍdroêÄS%í©ù3çª^æŒOnT$>0_´ï¯%qRÓ‰ `ßQľÂ}ôµ¨3ëðª\¿+ˆìo…ðj “"¤¿G²·Â‹iUóáwô;óŒÂ~O&ƒ¶i‚Æ*¼¬t<±[ù—³O¼„×z–¶Ûå¼ê~¼Ž s”ÝÐCÆLÕò´Ìzh ¯Á„!ÙÔvÞÏ.^Å*°ÃŸÈâøØ+K¦[Q_i¨TÆpc£æˆ£YËað°ƒ.RïY6*(í8Râ #¯Öã»zþÝ,xÿB1ýñ½  bPr¾¨¯ûp² oTVfÃÆþe¸ßw1º"WxËŸbô=‚—ŽK“GÀN)ÒœbT;3ûS¯³øMç¥ÆuÚñ¹Ò¤O•Jx{Ì÷09ïÎZ†ÔaõæÃÞÒe€Ù•†ñJ+湨 ‡Ð½Î6s1¥œà²š¯‡ –ÏZèÔ¯%«ôEüÈz×fd®2ï´"æWÄO³Áë •ïÚÏáŒÜË»¶JQWj'ß‚X-ñ˜F5i.̘œõÑ×’ÄNÖt•—c<ÏèTÌÝ‚•­FºÒìðEf {– 튜{²ÍžTŽÞsbŒ’ÅïMí#¦º>6Þié”á‹=Ô¯QK"•‹^§biì g™î ¬iLŠþÄ‚í—WêÒB™CîDŒ¥¶\‹v_º|Òp­.—22$7gûx´S®€(š¾¬¿•n¥4©¯¹}„ÕXç¾ú¹ŒïûɼW ƒ(°À~gyŸý7?Áô|`!£yÇ,u±¹[¬hà¤ÔNZ‘?’¨JL¢û•^Ö^»ž‚xµH†Á8’0ÖÞ£Xó‹uq¦C¡’b–±ØË½Ñä÷›ÌÛY%Hj½yë.ýA¢¥îÉØ;Á/BÆÙzúŸ0¢|ÝsÕ‚õB9ÃÜWqø‰GZ^AêÉÁ•g“ÛÕx}­¼ñ‰B!êG(<˜mC¶z—Þ–©6º&ªãI½Ü%$˸)ð¶yyc'¦J!=dm.rs wç)ø™Ð†Åùë*¯æ‚b3FxÛ^ËT1uþÈRxUty¯›íŒWüÞÔ;}±-ÃEËYCYíµzƒÅ Ÿ•qÆmyBAÞéùsú`�‰øre(¹¾µó“¢ÛlÜHüÀHä°e}–§¢Ô93ØcñÊkoÿÉåëÂÍ„”sæ=]%®›ÅCìÍo?àjæRŸ&M鵟|^%}ˆö,­’ðÆh÷x¢\!Öæ¡± ‹-V‚±YV•:|­‘ÚBEc?ºP~S¤ÝNÎRz3Cد�׺DÖI„Fc3k;ª‰ù ¯’T|ïä3é¡'¼³Åø%©*s6Ë*Î~*du¾:¦|êÃÇåïÈ~(Çv’XI¦Þ6ëÎ ×ââ“ þwpyG×T¸"Í!ùŠ˜~¡]›p‡ùÖ²Üûœ‚¼/=ðb Xµø)œL‰ùq&0<£©¥Ë Õ™¹÷º>^`kïúío@:/CФîäq4#·WÙ,ßIq¬¤ ŒT(s“M´Ñ±§†NâûóÍô‘.žgùB†ó)<™¤¤ÇDíVxe€¶+ÏV§ñÁQ,³ºbRÔºè÷T¿x3ÀÑÅ·)#¯¼ž~gð®DkIÉïN>F"ýNœVÙ·ìæè¶ý©UáùÑaì¡×;õr¹­¸áõn8Ö'Üeį»9+úð§-Ðá5Àƒàˆø½7m1µ1¹&?ãðË“`»¤ŠðSl&Çq¾„ WÞz¨‘\a¦ü¶Ÿúĸpâª1 ~}Â^ö=¸ùN –‰þfôsaïÍ´¾UÞZ –\‘Î á*ZýuÍöB’àLˆjªð×UÜ%{ÕÞƒs¨;üæÆ‹z¶ŠøÛ)Ê–NÍÐKþ¥à.³4×ÄÕ:ßœ™9js+Ó n*»ª¼gŒá²J¹ƒ1d¯ÝY~IvöoOöj�b¯ñü#¢4«K–*y3êBútèûÆZ7瓃…·Æ Ê Ø`Å 4)‚ë{a³ a„àe–:Â1‚”sg ׬š=€©!šTîÜ9Ç× õ àZ¡tQ«ÙÔ&ãñ© \”—¡,c˜!óÓ@ÖkCÜwÓwˆô;#Oðp 'ì+Ã’‚¹Üö#Êýr:ƒp÷d Σy~Á_,/»dÆž`¥mçÞx a~$6m„'b—Ž"muy>eȪ a⟃€ ‰ËúÑÿcwhÚ5ÉЧjg³g~:ûPÙüºÉdí¥´A¶þ\_šŸL@æè5Ör³1ò…°ûgR,tXÑ·ræ³_‘B𺂕¼œI¤?R©L°à@Ά›ã…høô^Ù#¥¬ÓhíéL”µ9åê=fê,P/(×í„ß¹ñ¹H#§I¿v9Ü\‰ÿöär`2r£{)‚[J7¢²}êÚÎf|òê> ¬ÈzÈyä!p¥Ãàhù©Î$4$mä@µ®‡ǿʬÉuí««fþûUŽ›…âC¦\’” Xàîbôû㤯ÿ¯œ¾¯QIJuî>½c2 1mƒ!9YN¹] Wh½0s8¿ÃœMtƒÌµÓ_ºSûŠòº”…€…„»îè5Î}© áþ4¨Ås«á<4@¸Xò㟚‰íÊÄ;Kb‚—Ss*§ÉT_:ñ°ZL÷ø €w¼à-Í}P.œ9×ç|=ø¨Ó¸v¡ dž ò_¤ñíi:”:¶.”– ¶ŽOœÞ!1a«2³Õgx ^H¸-3Þ«at‹Oᯒ _¯à\¾µÈT" }µê„©ÄZ,ðæK§d/7£hÊ¢ ('WÕ5VçÔMøˆ§›¬B–&êÝžËy à¯|²ã·Çmºsçxú>}FÆÁInzÒËõÝíJi™Ðâ·Ñá×ü¶>u;.„£6‹ÄêL>|A‘ýµzáÛ.™t¿‚’c{Æw%Ëe›DW�Õø½D·Â‰È¼…ÐWæŒGß”®z5Mlé‰É œôª~ëú”‡ob…ÞÏn¯jú¹4'Œ}äÂålCçõ|záqæ¶“æm&Í1²’åy}oz̦]x¥Š\œ€ß ‹(Ã@'®º!ò0—”d³™9þN!e(C¼ñö%…º8îVï™1,÷i€òëW£³qè/‰*�H¹ª‘†¶3 £èc›Jà)0›§êD•µ…ÜÓÍ‹¤¯ÓSÖì׎À±y< A˜L±Øº÷ÑÔyþNú>]o§T[~®M/‚ÐÑ·ÑÞq¹YxËÒ@Ã3ù(jGCM­}Í£¦¬%ÌCÁ’c-»f¸EÃ…:Md¬‚¢=ëyœ™ž`“�ð&SA[ß½‹¤œÝ›ìÞã°§¬‡&/ìFŸ÷`^a×9߬߈©Cb¾5ô‚ÐÒ£D’ÐîV¤(,¾OëöÌv&¿9#Þºqp5êåÌWE9QÕCl˜„¸óó—X0«jv¯;-Gç²u>¨ZU©Õ†Úâ{è2VÅí–9ÆœŒ} ©‡Å$y2Ý!§üúéb ¦v#žq( ´1ô½¸_é­¿«üdûœÑ€¹½r«+Ê„]‹ý ôiˆÐPtØÒQ5}Û£‘}þûe­5A× ¼²AÃ>Îp%”ÖÚû,þ*¡ÔÚ/%¦ &2?~\Ä%—J#bwÆôS‘X¨Vûî¥3Ôk‚™‰wÄ8~nWÚ¿›½¼Ûi2úiŸ·g¼ÃúØ´`Å×rŸ<Ñ ö@xú%ù$¶öÖ+î+ÅŸ573“—* ?_sÔ›Ô¨¯²uTÊ$dÜÅ·|”ø$Ò6‰`ŠÁ®ŸL;‡Ó+ƒ$‡¾‘ñ*Äk¿m½û‡ëPpÜ’áÂÊþ·ƒœŠÎäu¢p! ³€Y»ÉÈÒù4'W”÷öê¤Qz?ÉKÕçꈃWŠé-è䥊PûdƬ*í U|±,ð3É&0Ê•h‡)UªãYd²¢¬{+âw»é.™O>ÚXÆÒu¢·šdˆü¢D$Cê•x‚?8e˜òÑ=!tå¼ą̃ÅÓ“‹Ê…Ù™›<F\#c{ŠPCfÅìÏ£9ÚI}¦Ó6:TÒå\ðbŠ$Lͤ…qU¨*–¾c^Õç!Q2HŒ‡7Tyì:â—ØHzÎüF’ˆœÐgp* ÔJㇸ¤3Z§AÖB'œö\‚¯pðŒ¾ oò{r ž; –=«NÀ¼ƒ¾¿Æ„Á7±lÌ{ÖÃy ò_%´‚¿uôœ†RþP×™¥:ÿ,Ïd{G&t«“òÐAò¶¹ZêðÆ{…Z¸Q¶ñâÒ0¦k»Ý[G¾ñÕP §*ù¬ÖV&}¤Ú¨Zh�ª$œë-ºžŽÃ§ƒ·Å¸%P=i‹sLúsI¾é&#ð²SF…!¥:IGÑ–½ŒÀ”^XèZäœýæŸ çgŠÜ”¦kìÅYZÛ<Ö°sJº£×öÁ ³ã[¼Šl·ÄÍw‰]íšþu{pªYüô[Ü“èüB§·ï8°íDTZÌüžåm•<d;Î@¡=LL‡0Ud—ØÞS·´À”)qAa…$=øÛ½=‹|h×sÁ´U~ÍúÖ(Il—=Í]M*]ÔJ2ç][e§|pÚâ ¸$t*ZÐ8±×«uÇÞ ãk-ÚÃ)üaJ°.¦ß‡5‹Hx÷rTù ;!|¡ f#éçÙñ­Ûúôfã$åå†7‹Û!=´»W“á&yjTN¼Ä,QªÝê›ä ·õå_tž‹qÛŸÉyÖJŒé$[¦ñÕ:[12ë‚òÐñIX, ¼p–á9ÙÄÀ3t=Ëb(ZòŸ… ÊÇÝ—j¬Çö;ØNûÖ$9=[,àúg†}¶teÆ <F5_Ó{(ŒÚ6¹åØ¾ÞÆŒ‚ýùª„xkJTA´—3‘4°^Š}¡ê)=ÚAUçœÒó î/ø·øî>¶oL½Õ×IѾh¬g™¡À^0RI¦À=•~É}ê8Û‘¨òWê௦ˆ’_¸Râ²á¹x'!Á9·‘¼,U)ÂÐ3ï.Wé-¨újh¼ q¼Ø¹Ë­" è?y•±ØS ‹©îC¥Ÿ¯¢ûæÝwÝ8p;`årëÒ” Œæ…^ö\KÖ¦ÂÍCÅ*núøFôðˆ“^À°dÓY0�â˨<O÷×dÏD"þ¤"™¿oò–W0qùd'‹=ÒXâ]䃎Ñ-ŸW@b>Òl‰[†Ç`Y˜  ì8·Yv œ¼ŽKìˆå¦Ê9ÏükãYŸ²2k·vzgJªrŸB[°ðox=WbÍxDì öâмá¾í(îÜà©·öÜ¿"¿Ý‚|<%ž'¤½¦BB-6â&mp¢uò„Œ÷[òpýË�™šØ¨|ˆÉºì­Ç¨£Î‹–»;:,&Øç</ÚáëùïnŠ<½—§äìºÉâ{Òcq¥P`·±÷2c˜ÕGªºq¸I…ïS"‡¯_®Tà§=Wv9|#Õ׸\Kpz?êg«¼Ì>ì9 PÙUþÆS9¾ðò` å.Ü–ÓÁ Ƭ3) „ýœ3ÄÔ>ðH@3uF{¯Ì8v<¯ Ð0:Q/cR÷›ÿZa(9·HXɨâCiG´Û³IòƒPŸvÕÊòbÞgÃÛþÅeóYœ27S„`ØöaÄ$¹‚³‚w5¯©Æ.$ŽâMݬ‘i¿8Ãùcµ”àGìWÌž´~I`«Ñú*Y¶_Ì£Œ¥Ø'$„Á ¸8¹m×1à]ïS*å–ku 2Vd´Úpp¦ÉÚôí6#̺Ð?3LßMR,ï¹¶¦T»ÂÜ~ê±!éÚY%{ŒÚá(c¬è¦ }0ð”8ª( ä#?¯‡è,<Ál^¯ÐâcGòe*¡y~¿-¯ªêƒèìn_gM¡YO\7æ`î£á×ó×Ið‘„›õ×võM¾Ü‘oIFp?k%²Ëƒ#ìÆÑ¡zƒ*´Q­>™ä°‚Wˆ/+Ý>%@>@óME+Cô8RpgîH*l@WyCÃ'+¼33a¡ü »Ä}ø®cGÎwNÍàÃûtýfp­¬¹È0Ê ¡¦P!ÍëNbÁ2à×Â3×§¨ˆK}ÚEêú¬ˆC¯‚îâyb÷_†¯bkAò?ï¨rgnè)Ó§…íúíM}LôÝ¿zo:mãùrgNŠÉ©“R»¾u^QÄô’&ù.ú¼˜ g¢Ÿ2ºÊî®b†P’U"k‹s\BÄ—’Lq¢k«ITHh–)6¤Šøà.2<üv*¸s:uŸ5T¨±mõ7DõDûu½S²Úû×Ûܸ·Ž‹’£—Ï?I0@ GMÎ2”YšdgÐ[Ég¬;uàF}¿óP%” FÈ̦ñâûq{U}{wÌ‘im`«¼,–qú¨ ¼zMËJØÞ RY¡ä@eK,BÄuÈ­CQÛ=K>£¶ÓKzJe…œÃQ"“ŠèMÆy–"F“S×µâÕ~ Ak"%°)[šhÚIT^kJáuáj½B>{yPyHµÃj°sýYZâbB¸ØU/Ñ5tuôÒfGÔ†™j2˜¢Î†Î¿jV`:‚¬êGñ“ÓÙ\(Òå?}¾3Lf\ý £2ï ±ÛŒ@9^3ÐZCü@p^ûƒa®5(«[ž„/ Ì7rvfˆÙ&¤Š3:˜k~Ý@¬¶z¿£Ñ!Ô%¨´žŠ”//à0I¦Ðp³™Zí Î’°PŸî¡°£:”À椻Ì1*,òÆ”‚#šˆ’ÕÐßµ@.ê�Ÿ±ZÀφe¦”(@^8ù¦Åp5î?Ρ7T¸gÆä p„Ct8¦ûÝ"a«…Yc¥WhV˜®½ªVxþçãDz ¤`¥Ó0%=ãST@ìEó²)Bø´�Kyhîï)ŸÑFŽÊÚC»4ÖªHD÷èã¬fØ'·‚]+žždeÀÈu¼:¥})‚…’ü>]Q)\Î\0B••ïÈÖH»LïŠ%rßfÿÐyÿŸ±« ¨¦|Ö–õCMzÀúGÇ–Âuì­‹ck½nŸ.Æ“^¡9¶MäI1vy¤¾£|@)Æ“R»×uü?èøY4-nX_~*šp«ë߀†Ó”üß›"ê(ÂÄ#qDx Ƥ ‡ìå,‹±[ª½ñY²YDamu¢q·¬G³u6oåP‘Dµ$ÐÕaí»@H¯½Ú=2•i ¡»úz{ å z¿9æ[0#PÈq+)è«Ú@N[‚÷é#Ú‘Ð/šÈP@,·‰à$VS“H$Šø–êæïâË/¨Yþ«m³ðvÚZ'5^#½ÌC7yoÂe<Àòá;£FÄgFíŽyoD–D\Æ£ª·oj5A.Ëwõ#xa6è tӈ—]Ó'%¯�è[õAú‹xŠZ'"çú™<“7ï-R=\Ÿ2Ñ[¸¯–”ò¸ï–⢲I±rpf¤j“9*v¸8Ezñéñ{'1DÙ^ÆJ£õh”MvÞÚ¾Òj+ŸÉšاå:8@Ý£ŠËÄïd=šÍTÕ$Ÿö݆àïZ3hsŒ=ûÛ•=GžŽv*!ó&†»œïÙ¿>·52iãßö™–«ôÎÙáê†ßE½C¢nìÞìÒÔR(¸[=µ0ìߢômoz» ñÇ(ÁÝâ%ÉÏÑ2ðƒÕÉG¼wIéº<ûÞ¬ñµZ®ÇzŒëb¸ÿv‡ÿB7¡›uàÃ÷ìd§£˜ "Ï~W¸#l¯ù+´ -¶êÊ~½Œ*·%÷@6¹ •Ç׉–'Zá²y§Žÿ~€±éQEc?,¸£“÷-ŠN3ÏU¿OA,Õðì'ÝBfEP “a(œ™÷y]Kx‰r|8Ê;þ>F zFô ¡ž¢ØÐ7x”‹cÞ˜ö|8dô¤ÏUMë)N±žÕÓhrD+æš{¦\ü^”Ž%ˆ§7‹¸x«\/j9XAÕ@àoŒ™…Œ·ÃR0ï].T…=ZŸîfS=škÞà™ZÓ>Vã0p:ÐÓÎ/¤0%lR(­g_¹Ï˜xa´­o”©nФD·eÚ!1Ì{2Éü¨9°iþdù†ê M2&ò[¥­¸¥0C]0Þa·‚(içù™8™ 9”"�¾[œ9®¼ 9Rú7¬ã—FnA=©0\ðæ©Eámæ#ï¤M?!bЍÃù‡ÑyfÿÚþž”Gãð„Ã*ÁŒíýI/[1ó…t/ÔWX ]K’hs‚Y,ÕîŒuþJoÀ Ù,ì80+7%ó&uITZ MÚçt *u"àq´—ënHÀíXIÖ&³3Ë’e·8y±Èbÿ~KkáÁøÖÉ5nšÒÿm`Ó¢wȉîãçsA½ñÚÁÛ©4¾úóšg~q,“Wô"95Ti<ª#duT]Ô{‡Œ2¯Ô2nËÄ5©‰`qºT´‘ÚVþ<à_äwBh¥ÁÓ§¨ÈZ³m’'ÁÀÏ…¨¨Iief€(kó6ªBÂÃPûŽÓ-ö–§r1æÊ”L£•¹ÖcéjÅ‚¯¯¢/s$>é`GV`dá|gn¨thèý(( Tà‡²¢5ØÆûN®Àùjâ¿`îepÃŒ¤eS£ZHŠŒÿ°@Ë ‚ÍGŠœÈqz(ê^çJ3ƒ7d“7ÃÎÑL?³áÉܼÙ#Ï  ÉÁ¿MM?ŒDD¶rñØ­‚/áwžÏ <,RÞý9V±‡î?¯1™Þ£‰t«¨û]åÈlšù?š%Ù6º˜3ÆÓâ ×ÝÀ%(:]à6d£ª€-=(j?Z‘Á€ùó"®é̼)éí„< z‘·È¦ôOEõÔ ç£?¯—Šëy›¼!¶Í¾ï¥âVÉ7:ß^MZJÝŒw äË€]w–O0n:4©Ö±:G*ú1l—ÌÊŽ†P›Xƒ„͉kx¬â{…3ØÚÞÇætöÊY"ï1ß3Œá;ëÛä4‰*«„á –S›3G¥Ìœ2ˆ¾Û*Rx…ªù¾Ý¾�©r ƒ‹Û 9q ²¾14p]ß-×Ô߃QnÞ!‘®è:EäøüFŠœNi!š']Ø"ŠD8€H½TDçðT«b!(À¨‚ø©«ÂC‡~Ë.Yl?| 9âxo¦Kµ…”ËŸŒz§ÞÛ$áôü˜i˜“ôÔ{SâØó‹† …ú¶…~÷r¸s^—ªþlh0ãGœÅ–©YÌ"ÿÉõìÅï EŠuHd _žé•?Á·’y³þl+Óµ[dy’o¨ø AŸ >_}HÐIâæÀ/rXYž 3»Ý>>:ù]æöâÍôÚï|Bc¹]!UÈœ+oU'ØÌO:ð”ºÒÛ¥M" p”’h£|ýž¾;Òk³‚|Ô¬èœ ixTQžàWIw²œÄì¤Ë/VŽv]º¾_›þû‚1äh#~×À*åãÅülhZwÉdÞ®ÿRáxã€9µò-HQ!/µ'ñÓãäŽÑkV)·s24ÜôºV’ØðçÐÓɸ]~e]Qo¿=¿6qŸ|ãŸîÁõ õ×Ý›’½PÜ6öÉ*B Zsÿ(¡58CF,ÃŒ<ª”O~ÍÉ,6òüÙ„#àt.ûÈ>KÔŸ*‰ßuu8QœÉ¥K°ˆ,Ü•äÜ8"Y”B”*çxië$¯å¿p*±ãN^ž£ø‰ºñØçy‘b€®’Ðç:Ÿ„(…X®Y|ö«I[É»ðÞW]ëÇ#âFˆ§;mÛ­uÚÅuŒ…å¥=ôp]ÊÒsºÓÊrbnM— <'p¤c´7šë»p ò†÷Ÿv8´ËíÒË4z¿93­7ðIbNég˜G‹ûë› §±æZ…Íø°ëÉgÅÚëµ3ÉK~õ«­“Uv4¹3ò?…åÖCêyŒ»‰Qn†]‡¹c~æ|¬Wɘk#Q相Q#±Eålxl££/ò ÖêUož}·(Z„߃k|úæ…b¹-›Dˆ$÷ŵUk{,Ï:MöÙé+÷Ó~N3Úæ[ÁÁ:.ܳ©׋—ýí5Ÿöð¨Í¸ûïLgaƒ ,x2—WàÖ€n9iñűûl–úŒñI6XÚ#?@„¿>LLj½ÿØ•*‡1B;ø°ãÇcƒ†š@|¤v••jâÜ73„=�§¨ÎJÚöå”6ÄãòLÍpå^Ó"øJУDíþ}søåEÇiÛ£¾‰=æÏËPüí "޾Ù:›3ìÒΚªSÍÝ.7;`ôx#5cY½ët·$Ø6£ãÏw©,ä¦)h¶jïE3œÝ޽הˆQÊ•iVsÑHfHÞP/G(‹sñúÉã0±OßÖIÒI¥³) 2e{ °>O²Mí»®~úMò¶fŠY?ëw×(Æ[ÕZ«3Ôe1Û¢„è6ÖHëVæ½OTec8ÎG3 WTÃ(Gó=]Ìd G‘Tªq9%5ÊWí@lÔÏ“TãØÄºk[¹ÃÓ–{È•«Ùjsb{Lƒ.¸³BSJ¤y0}œ™¨ábв�ÊßZpO@·Øù„‡p‘Í/å×xAÌq¢ 1ÎÜûHùÀ­]³oè3©ÉØ;—aöpÃÅ+ìxöžý8:l¢†4/ù·=BU¿ÐÝKûã‰Z²¤úÔ\˜yíìûpøj±óÔ¶7Z›Ñ½/·›}›œ}`;Ú#¿±mÈ´®V_¾G½¡!H¬ÜÖ `ä ~+r„¤áwƒI¾[tç_Ñ:ò„ˆ,ÙÝ÷¹ÏÀM”ÍŠÊ|/+úâ<ÝC<ÜÐhÀ/ÍDðlÅŠ>´oRω¶&™z—`þŠl2K3�þ™ïðM,<—¦L „Qms˜6sç÷e8œT�F~ÛÅ)<ýh˜"• ±‚ÍÇp…2÷é‘'6Æÿ|êŠ4(¤à·¹Ž#²OÙT»°ª2"ûAÁç Õ¯PUTûsƯ»þÛ¦wŠ«Ø30ö^q&Ý¿¨ÐÌ”o϶º´fŠbTê_BDóoíüµfüûD€;»»µ„Så÷Š–Ý;O¹9tŒ"4O>ô1—<£íÆå ÒS´Hy¿«ðÁ'Å¿knÇWâá5bŠŸ‘j9äM{ÄKíh[US£–¾=Œ[T!|ؾÒî£�‰P­{‹˜Sa€˜•ÊØ‡æóM“o¯ìÈŒœ\ß x#´*àW/O`¿Ù¿¶Œƒçx1"Ú½$¹BL¶Na ÕÚ “Å+{“›GïwaÒ`Ýõ¾þ`áWÇÕ¡z¤^ð¨>x–?©sÓE¯LWƒ€³tA%H1ÁÉþ¶é׈Øð½ÒyÏ7Mã­hóÿŽÖ endstream endobj 479 0 obj << /Type /FontDescriptor /FontName /GVXNBV+CMBX12 /Flags 4 /FontBBox [-53 -251 1139 750] /Ascent 694 /CapHeight 686 /Descent -194 /ItalicAngle 0 /StemV 109 /XHeight 444 /CharSet (/A/B/C/D/E/F/G/I/L/M/O/P/S/T/U/V/Y/Z/a/b/c/colon/d/e/eight/exclam/f/fi/five/four/g/h/hyphen/i/j/k/l/m/n/nine/o/one/p/period/r/s/seven/six/t/three/two/u/v/w/x/y/zero) /FontFile 478 0 R >> endobj 480 0 obj << /Length1 1040 /Length2 4189 /Length3 0 /Length 4848 /Filter /FlateDecode >> stream xÚ­“y<”íÛÆ…’A!c_n;Yf,c—-{d QdÌ †YK¡Eö}/û.Td_³g‰ì!Ù—’,!y§ž÷ù=½Ïïß÷sÿs}Ïë¸Îãøœ×u òš˜Kh ñ(<ÎSBJRJ Ð2Ò2×’‚R’P  ÷Dãq—áž(%@JQQ Ððr¤�)y%Y9%YE  …w»C@;9{"Z¢¿Dò€E@#à8ÀîéŒÂ’z àÀ@£<ïH `öë„`†ò@¼QHI”€D#<”‚üФsÄò•‘^noy£¤P€)¤(@ŠˆÄã0w�$Ê1Æ“¼P¤$ÿ¡þÝ\Ç ƒ1†cµÿkLÿ%€cј;ÿ+Ácݼ<QÀDpÿ–Z¡þJg„B¢½°ÿÞÕ÷„cÐ œ@ÿ*¡=tÐDÒí‰páÔï: ‡üwÒä~G€˜ê˜‰ý}§¿wMàhœçµ;nÿéûKþ›¥þaÒ„h"p* …J‘„¤ïï•í¿Ü´q<s¤ar�œ@€ß‘^‰`€¯€Æ!QD�E$E†Hâðž¤#�i*~€#ž�úu¥rò�DëWé7)È�óÿ"€ØüC$%üR� ÿ"�Aü‡¤ ¤ƒÈ?P€8þ$§ÿ , €8ßqsFáþPjè?äìú’¬1 Éû’æ ù£•ÉÛí” „?däñÊÏ?äëõ’|½ãß·¦&žèKj.! ƒþ ,Èà~ÿG‡ð"P8Ïß?éÑüÍŽhÒC¡ˆ(hbPtI¬ *ô×Î,:-êÁ™œØ×øèe²²äúŒ2ã¡zàŠ{›áƒ º˜>aÓ°‚t„ͱUiÜíô5‡<gè¤cÍL1§ú‚X¿iGÞõ´£õ<6=ty‚gÁ­ZìË$ÿî¬wãéçWV¥ä³Ø6Ã( ÇF¿ù»¼› IBò/&€lŸÖ]‰GY®ÌF÷nèâ³ñ¹],7Sv/üœ`kýðÎuš.Y1ü©ÅûdÕÔPQ¢ãÇ· èº7y¬|ô¹ EÚÜæ¶gÚ"”W…\3}‚;Sã;Fõtß4­×÷Ý Í(ìâ?‘¹ iDޤ:ë„+d‡,… ù&Qá9ѹÏëEìn€#KžÆà¹04Q\Vm\‚eÂŒ0Ötñ IËÐ9^hhƒÐãÜ/ÁÞÈâ"Cb¦Íš¬ÛõÐb¥“×ñÊ7d,ŠR«#@³ V¿hÑŒÕ{+&Ûo«„ ZïóˆíšZoW‰æ ÅÚÇ7©K…ž‡Æ<½®ÒÝ×j8ÛÃ@oQœ»•è¸ëwo¿}jÔfY'ª¾Ô+Pð¶”2¸L!ëÔÑÖGñ;V¶ûùlê_—©¸m»zLz™ÇÝ$X#-™& Þý4fÅSOè? (¼«û¶”CãKv‹¾N‹ÈØÓ±)š°oívh?ž§:‘~Ц5{¹ÿÍəܛZ`µŒƒŽÓÒÃKE«–ÊiÔ–C›³C.üEûôý•ã°üÔÕXÞ  ít• AKìûà]g“œmõk’?;zæ½'>Á<µ.+N&, ) <2ë—¸Ýb£ß–÷Ü8Î�Ø)´h7Üå€F:ékàP©#Ô]—‘J½‹çü¨è<C%3â$v#Cz¶ºø8¾¹î·ÃšÄ• $wzW®¥²A`u&(Cmº·h€ÎÌ^FvÂèBOñwõ'|….†a ?«­óÂ.72 áØx^ÝÏ*Q)ä€=›Æ² }œZþó‹u ÌæG}¾ÌyEœfÔøtªó£RRwýÔ½&§äû†kFÍûƒ¤‰µU¥Í°\6Ín×Mc²?5àî7oÓÒh³v ÂÉâcÚ.:\6”qdm³WÚrsQü¸'¥ˆøÒÜú8ƒòûláͦ\ÁmUzV6ƒC‰,¢æÛ½±ü=ºn5 5¤ÎÕ¦§ô¦í_H7eó™ãÅÇÇŊʤ¢øƒ¶½šw¿× ¤©d$3ù¨Ã²çÍ/šûn‰~µö]¡úÞü+äýë÷1F¿ÈEÃ|ý¥ùØó;?"{ú͇™—¶VV§ª ¶qq5j¾Ñ¾ÜvÖ9¼[.ZâTø.}¾¿vûQdÓ§G–Ñyw u-ä?Œ ü¸Hm‘À˜‘–ÒÛ ¥9êŽ jx"¾¸¹kÜ7Ë¿enÃû:ŸZåÛúgˆê'´í³!i¤ =C=÷´1äåšÑ%„*h‹Á+Ë®e¹*ó‰ù3ãó×_Œ Í–¼Û/+1ÖkÈ|w^qÉ)Á4~ç’f|Ly=ŸùÊË~¶Kq¹œ„¹ Ò°˜wËT¹G¿1ùÃ@?Ç¿+8O{<Ô=Ë·òAué9lÝÐq1#tsËl“]Í⎈¸˜_”ªÜRyÐy@x¯¼Wd2_Jäs¥¾-Ö¿Ó稈±j*µu¥r»¯Ü"}d ¬§û¾(¤=[éˆÎ>ºí¡ .|FI~݇òʧ×*ºCÎA¦ Y»ÌØkÏ“®ŠQ±;˜„+ôÔó×?Ý?ÍR1G}þþò·Bí:—ð öøRƒ¯ù¡jFj3]§xÊ^Ñ`7†>7¥D[ªE¦ ø­}|¤X6â~B]×Õº‡v˜£z|N¨ëË4§6Û‡tP…¬)G‘ixÊè{eâE±Â-‰ÜiÞ–\~ƒ§åsóDÆÞiù…®…™RS¯'¶pƒ5 µ™tŸ ¥g×lwo”K9lLÖ¹c‘` —g=Öª¦7L"Ûo™¼0 ·ˆ©7-›X0öE¾SÓ�™K4¨—+¥ÚÎÍsóÞ~bLñ³{\‡k Ǿ°ZemØ8œÀý Ìêˤ¿{ìÒY¼°ñq¤ÄK¼Â” ?úišÑWÆV}Yj1Ú'( a npºÃv#wHÙÞ-uMäÝò´gP²éý!‰-˜X³d’~OqŸÍfH@þ4Í·˜ÖJ›¼ÑÕžìk•‡ÐC+C™©Ý a_˶à-ûðÏ?1ú¯ µÁN Ï÷Æ,>_×OŽI_˜8–ýÉòîEbp'’ý¤3S@ç˜8¥³âZÿ‰Çê#t¥rÿ$oP?mЕðÓ¸QØ[EÅdPß¿BØw¬ã9-}A0Ÿ¶¡Ò[krS! s0î[w ‡?F¾Ç‰Ï¡Â¿?U‹‰»juHÉëÒÿ¸O”V·©ÁžÚIRÿqÒ›˜’bZöÅÝZí\rÿµQ*£€1ÐT³*TrèRYèÀל}…mÔ•(µ¢É"F ÑŒØÙj~vÒ™#ëG? \Ë™ÉÁ*&ÃÅXçNi¨íÒJS¿ð¼0ðø8}QY3BÅ)±òJ«�’Õ‚|o¹_éóóöe§y[Cù«|¦ !kÃ7ñ(JßsSGþl’óÓ5éºUnÂdñí3àŽG “™}hã>Z²è+èD“ ŸåW:…yC.™ÜYE!š”hÙ~ÛjwzlÕwYL$ó‡‡^(zO2±[—ü­m9³àNø:ï\yöËÕ°ÁƒÎûÌ©«yÒ­¨—¯%iœÇ­/ÿï«E42©íÁ ì3ÄTi¶…åÐ Tžµ )Ô¤ÕRjËEœúËUø}ðl 7Ë»–]ìÖuÿD¯9H¿èqןûÆï³¶aaT>Dú®´• ÖûŸÈsJ§õ@ÇÞñ´@,;]‰s¤â›Ïoj…ô*#‹&›×¼ªïUØÕÇ+>èëGƒŠðT'L霯|dvމXÑêÂûáâ­u Œ ÂìÈ™…Ò’Ápõç°—ª3Ÿ|£"ÎÆ25á«r´qá¹(k ‚Uü¸ŠÇOkTh9›•¶kI,·/ HU ×WŒ”à‰š^(B°ì¦¡„Lˆ·;«³¡ïMnˆqµhvÂŒBTLûì”[À+ݰ(ß’`W\ß~ ¦J#ð×µÖñ ²â9¶Ö:üy?ß“ÜÂ9脪.O7nUb”z6cøÖއÞáp,Õ]Eí@ùŠÆê …Á¢Ä 8½“àƒ†ô3Ëþ(£Ò¼t–Ùk½q£hµþña¼ôû!‹«fbO¸⺼}Î qØÆÒ_2|ã¯ÔÛaîl$¸éôýG`Á)e±»éöƒQ—ç#¾k‰z¥Û,Ý&9í0–œFÛ}¹¬DZ-¨õÞÉwÔ vÐ6«gÿ|ú}ÍŒ&lÁ=rx— ¡÷ ‚$»(_r‹:MˆqJíf–ŒÚp›Ý$‘Cbq>Ï7I…n"Wöœ«h áÃÎ »ï•ÏÜ <¶Ÿ·=4r(¬þÄÏ5ôòì~\ùV=ö\Ôã!ü¼l_ÿCõ®½ò;P¨Œ±f¶Ya¶¯!ïÍ‹}¹?Õ.<i-Iér#´?D5xc_FHH˽:½•J”›5×½²] …酎ל–LÌzXøθaz×ÓzÜêÆVÝwש„fï–›ÉÞ\üÇYµ}F Éàå²ÂÓJfpÌ™zÅLy²³ÔyàöÞ¤·.’Wyªâm«s:ÔW?8FÐI²J|ÊjµQéÃEŒâD¤ÜN“›^J ¹«÷$2 pÞëø­ë¹‚�y:~J?,Êú¦«ãSÁÆ:kðŒ¢{~©©;§½\ŸÜ÷¨þèÁÓyÞ{ò¾Á3ª¶J±Í/Ÿl÷î8CTÇSNeƒ+'Ÿ«²ºIogˆ±zG Íå;Çê ½‚– M»·ù)/è~&x¹‹Ÿ‰=ó¢â¤‹?âÒr‹ÔDÛ ¿ì:ÿâÍå’OÒåm¸p±¸Dð¹Ì“Äå{f2C¼·Ù©êfL9꺄Ä4¸ó¾r9Â2n¹ ìÏ·2S™Ïnšããzá ³ý! ÎF|Ãõo‚Û|?ìÎÆ?ŸT~Ñ]Q<Ìø*Kü 5_YJT¾¼N÷ÊL3 Ýå„~nAmT0·4æì{¥O¤Ð.‡,9ˆæ<“YW"Þð@þô´îtQåó¯#q½§L”ýË̦Ó7&éË”ï-$ñÝžM`ë1‰\K>’é’k=$—`ÞÍiÓ#†´keËŸµS*¹käý“ÆPO7ÉÅÁm¾W-j(4‰"ûÍSpízÆ–ê aìžTkL¢Yí{òBÁk·ØÅ|ÎŒ)¥+Þ‡*ÌuREC¹™ë3o¥””xжÉ$JtN2ˆû”U$7Ô×(Zòô´²ÁsÏS˜P còæÕŸ±™aè‹&í‚rFcTäÈŠ„R,ÑI+z«õþåOZ\DÁ샑-QÆL×¼*D]eDª:„9È_-°Ó*Z$ 7‹¤ßÜà©b)ž@È~¦½îîcUdØ„> yººh½¤3Œ¨c-ú>0 LÍÕ}àyy¾à^—^·“G’”•;„Ï`¯Òº˜'Â2Ç­ö*7÷0¢²buŸE!ÇGÛycÓT1˜×'êVÝæ¡Ogæ픢R%ÈÔÃû6LþÝ&ÛÒ˜UzíhSØà´%ØNó$dÞ&Øü^w¿€þ….ãÖd¥Ú›g§çÁaÓÍXãÝ©3/ûL·ÜñolŽÒPõä®lR€ÔO-`—Ù õc$;c“7’›âÆ>8nzPÏøælôgÃÅÐ B¢„[Üã=íK“fà ƥÚáË¢üáe1}Á”8‚ýê»ó'l"<L_¦ƒ0{Åt̷ÉÓ²]— ¬pVŽ'- Ôù¼¬Ü­¡»$ñ†ËêÈ9 V$1â"vz³&½|l– ü¦ô-s¾Oc\£ 0No¬U+­„˜;vK›¥Ã‡|S$fqPݯao¬¤Rp{•–!`Õ|][­x'Ðòu¼³næò.J/žU¤êcjï]¯-›æˆ;ÊuΓ‚7µKÄC¾s?kâVéŸÎ+2=>½Ö“¦XõS¾èšk6¦ãUûÌ*žSÇT\~Ÿ¯›Rôë „ãŽ¦ÄŽGš2ÁÓ;š/S�·AñŸ³PIðœér}DG Õø4*Øi€À…ÞzòòãMñE£^ì)Ÿ/8uwØ&@/ÑÏÛ„‰YTà­ç¹ºÌ­¥ÇŒŒ£ï§O=výP† m¥ž¦SnlkÌ â´be6y]èô5Ò§pfÊù•2öð[þˆq"î‡ø~¨´½Û+p ¬AÍ‚Ž÷[RgLúûð×ykÅ„áØ——ëfpõtZ†þÇ&|¾ 2¶‚üùêÆtɺvé-y“ãÏ,ÇN¿[³ö Œ>ŸÎfÛ„Âô¿ìÉÈúšgZ³]N¬w‰¾^F¯rê€SŽ‚JóþEþ�ÎØÜ¥¹lÙ¨ ¡”¥6ØeÛ€a‹9pÂgpnðaA¿£|X1K8CP‹–òAbJÄYÊ”˜àŒ/¡ªß¬ÚˆMû&‹K–ãÿ3 g endstream endobj 481 0 obj << /Type /FontDescriptor /FontName /PKFRMM+CMCSC10 /Flags 4 /FontBBox [14 -250 1077 750] /Ascent 514 /CapHeight 683 /Descent 0 /ItalicAngle 0 /StemV 72 /XHeight 431 /CharSet (/C/S/Z/a/b/c/d/f/g/hyphen/i/k/l/m/n/p/r/s/t/u/v) /FontFile 480 0 R >> endobj 482 0 obj << /Length1 942 /Length2 3113 /Length3 0 /Length 3729 /Filter /FlateDecode >> stream xÚ­’y<”}÷ÇER";#á²eˌɾÜÊckvBŒ™ “1Ã4ö]öHö-K–»)¡¬É’²“}‹dß#ëoªßýÜ==ÿ>¯ëŸë}Îç{ÎçuÎà¹a$¦ŒÆÛêxQ …Ë*ºcc¸8�‡ŠS ¨@$ƒÇ©"‰ <�—“ƒÊn�\€ËÈKÊÊK‘D€ ÞÅ‹€qp$B*ÂßE2€²3HÀ 8@ItI5PH,`„Ga@¢PÆbÃï/î�†àࢡÔp8€Æ ˆ€è€ÁQþ[Bàìñ€ÌÏ0ÚÍåï”;H¸C2‘L $‹h<ë A{j˜žÔ $9ù_˜ú½¸º«‡tþ^þç˜þK€tÆ`½þ_‚wvq#‚@ ¸ß¥fàOwº ãæü{ADb1(eœÄà’PqÉŸqÌuŒ'ˆ¾!¢"Á üqèߦ÷ÃLWËBÏ@Gôï½þÈÞ@bpDc/ÿGþƒáÿ0iJŒ'`)‡“„¤ïï?ëߺ©áPx4ç�\•’Ò‹štA$’|à�‡=ГäÅቤ'�i2~€=ž@ý}­¤ÝÀßC?I€¡þMpRGø J�0‡£¤�sôrqq¿(H1Ì/( À°¿ ©ºó?Hšì—·¤„áÁ«�Ìå”`Ä_dÛí$5rÿÿ½Šý ïéC*.vUB––¤å¤ýþC†r#@ñÇ©“Öù7ÛcH�‚ž ŠzxR¹RV诖×]D)|çRÚã÷u¡/Ò K Œý„—]Æ ×&C(ÃmL1}òŠv m/5Å^©ìÛêc bh¥e~”nDµ†Z²ºEÞ–ÝÒDçœ9?Ì=ëòRtm„ogÒ½Žò™Î\&—}%êtáÇÁ£-§€SÃçSÑ|sÉÔÖÙ¯t’@Ó/“:ö—5ðyøá‚66«ô¦ãaö¦ñ^§1Ú4¹{Ùºdº¶HþùŸý`Ž;ÓÓçâó§ëÓˆt±E³’©FmÌéîÍl*â%DdÃü¨X¾ìŸ°ÊÛ>ë>‚SäR¬¬§¡åg“luy¸ÑÖzL´/Ñ•ŸŒQí n0 ÷x³ZRiΜ/z®y´uÔœ^ÄV]2+Ýìz¤<G¿‘SÚò¼(³«4;‘Ìþ!•0=ïž¾¾ù¿.ŒŽ)P4U1E½¥D~ÑÄŽÚ;a«ÑzšÚ¡!™5àt“’ÃMY-Ç¡r=ïd™^ î¿ÑÇŠœöW>[-Ð3^hîˆ| ÿõ¤óöýÀ»¡³Í§ôo"4‰3oÜb0‘ÅŽÍ{°øj•”ÄÃëZÂÝa+N‚iR¯åù×5®k–w„ ÐúŽøÁŒ ·Ž}‰”· â?mkzáôªN¥7M¹Éx¸Þ­ ?êIübR¼/õì±ÄéѲ &V5üÆyà ‘—¸˜ÏÇnjihGntUœûÆýn‡qÚ9„©¦¡}ôÎoe‘¼!AÙ·²ÑÖóy½l>ý ù½ÚÉÛ,“5±'|‰—aצ'“Jì™xë Ø²®Zö”­v"0_5"O8¾LX}Ô4qÍ!ºÖb‹5ãjÊxët´8\Ý·”åKîªq¸.FíJRmA³õЦ÷ ½ !¹ún:⾔ήÏÉãû:¾geŒT_°­‹G±œÎ²\ß¡`±‚"P&xvun•µ@qaÆ' ŽZ¬ñO,ê_«p0?)8<Ó\la“ù;·„1 ”U½™5>CóM4ïl.Ü»-Ø@ñevZ¢Ø†ŸŽÖg¤wéLÀfUhk…f`óíùkÝiÍQˆÅR*l©»<Úølîþm!DëZäeYþ…¦‰’IŽØ¦å󼮉þ-m›žü­ãWÞϤ¨¦Išî‹«JO+Stûí\Cø¼oúÚ·]m¿ Ø5œsAp”Ûü¤°øÁ «¾´Â—ñ¡Ù±E˜ë¸‰Á—ÃÒjÑ[ ™}?» ¦”¥´Ç‡{m¯g“ç,s dVÝvÒ¦ü*"•%³½òîþäiüÛk:n¯™|M” bÚmV\1ý•‚|¾%)ƇÕ_ˆzNÚ¯Q4é‚ïS)™)û#nc”•^™ÖSæýyß«A—d¹Ê 1Vo›Ðh1{Eeå™ Ø+ÖºL3­æØè=–Îqͨ)c_6Ù÷‘T­›WüzÍÆ¼ßRá43=f´íŠB¨¤×Š%s»Lò²2ÆFHnìö ÆAú“!´.]"l„ùöø´—ú"±^­TMÒ=:.ýäÁ&& ˆ›E63fÞêän|é¯v)ÆöâÞ÷Ìà–ØuŸ×ÙÄjÙW¼×柫½~ô:B#WH¢ÓðAÏϺ«wó¹»´÷ñ‡²ˆwÙ.«'•Ñ=³&<jb8ÿ÷ šŽ• [6’ú¼ë¢R}êð‡’aGA2ŒA-}õ½B)ÕŸŸl3j½¥)˜Ü\Ê¡ð“e¼Nûjr£Ù^)oè íi7€än±íF±œÅV‚ìæôÁµõ›ÏÏ7wxL9ð°[&Û>â^t>ûn[ÊàŽ3üÎ_úN… ¥Å¾ñ«Îf+u®>:±{®;·Ù—ŒhÊkÖG’)&Ž: ïå¯ ô½A¶%Ìç#ׇ~w[kCçO½ &·ë~™(È·³$Á*çt½ô˜?liäXi?*CèiÔìJÒé*0«óòf|öäš[pj7¤Ù‘¿’­%ôDóKÌøqîÖ>çÊn:kƘŸã¡8ᦰéÞ'„ìîù{âá’ÚLK+æµBþ>zÒXψËñáeVëeåxи¨bXEzäb1N.Âñ^âáEf[™½ ÷¡œ»½Ñ©“Ý~ئˆñ|5=—ì‘ü^°(ËÁS`$ñr¿ù5jKº0³˜Ou€ÿ>$ÊyEJÑ@7»°3>f—VÔÛ-Ú¶ñFúÑK]­Ïˆ˜éf~ŒÕB û­¬e>¨’¾o0Dó¯9µFZnE·mÊi]3ûƒÕ±I>Ó‡Q§eNXík ²úÇéù´ãi¤’žž!/}(€l@dz8¦·çïT¯hÏ·Ëó ~m+„Ô‡ÄWÂK«åÚücbOôc˜)ún‹Œ‚"æ‚oë¸ 7U´^».)0ûáËX{QW; F@£ÒèÊ£S§&õTG­}•ô'è'f•£sGg9Éú .ùc†Jë¾]*#0-AÙcÉ„rXè₤ç'¯‡Ü‚>â?ÕùhŒ¹÷ìBžŸ·íU…;ÏF½Cø6WH3Ù•¹“!YAéÄ äD¡æå>ª÷݃tÀ3²„Íq¾J^ÄÓ¿F§º¸÷=b€$×'ÔœT&rcmƒvîK1‘z.÷ ¥Bpàb8ÃÙ;ŸÄ¹"›¢(Ý7“ø|3->êlͽö0¢¥RE.¨~64KäÒ>¥7±±=ÿ‚üs¦›J$—*ƒÕÍÞÊBï‹Zjt#|ïD¸y/êW­„[ú?ªn]³üç]ÎR¶Øf�?7õ ;±ß<uFØëÇ­:S¾Áæýåš®»í-œVõÛŠÅ%>]÷yÌ[á ox%búøÁ…¹]M.õãä¼×D-:ÖSD‚¼éÛ~½â¹ÈæµæÜ¾ Ëg‚ïŒWŸ ž]r ŒßHîõg¿TcL`“|QB®þöQ|Œr@° ­Ú§Í{^ì;ë "JeÊͪëÅ®L@ì®ëéÅg讥„d+<]ŽÞ+ øËzNÔé]Š|�ÄÄŽp^¾w“á-–˜XŒt¸Ô†éi’9èÒ_¨¢r0|e3q FˆÅÖé»hr¬'Ó«rß"mByÛ«;æÌmªLí˜Ô/¤æ­zŽ;…°8$‘ý!ò0þ¥±Jì>«=¿KŒ»6ÊZØ)ß'W½%”ªXH—4ê„ÓõŠÅ%݃òœ•Nx(¿êĤê…í.` L1X;ÛùØQ寕ÚZeá›HÐŽ¥¯àéh :â…*¾äÔà_7ƒd|ëáÎxÝi»3{Б5dŸat5élôâÚ·ÌœÅÙ´ ‹GÖ%ÒPëfNø2~Ï¢š®õ¼l”L—æ™2_Ì�9Ò£Ñ&î³ ´ðN5ÂtºwF|œÎËËJÏ톹«Wzò;TÊx‡•ŸÖÍ^Áœ®|ðáûRtxv€Â‡‡–Ûš5÷hd½cÛŸ –Œox* Y‹>O,7ˆûƒc­I­©Â³8ŸÞ?ŠYî•8—½tx :fc‘͹Z®y¡ÞEÃ}A´÷ZïÃy·ð¹­®Ä9…ªœvYÿÐîª÷ö ®§‹kaEŒ®1—‚‡9&ÞøM·Æñ.†Xp„®ë‹Ô^P6làÏœU¤Äišl´OõF•SÐå†ì5e/—}|:ÿ%ûl¨þÓp·Xcà ûP®JKMÀò uî;Þ’¥·G›£äÍã“*kín–o1›v]]­´¹—x,`-—®óâôð3álH˜G°…ú©Ø¾i²øø:‹…ÉSsí×O¢?õšæ:¬IuɆRÛKšŒJ¶VÝ`8ªªQ¶Q¶ Ú(»k9 /¯Glhuöȱï/’ƒcÜÁ¥áT%ŸÓºO8¹Ï‘ÿ™àvÄvð\l@Æß=Û½ël²j ŸºãXé¡'}¤Ï2ο®EªÈ’cC95tEO°ª¿¿ Ùkò£q!cXf Kw©$iÕ³”åñ`]ì`¡—ù1Ÿ t¥¹!êT¦†YKŸÈ9@kÆoŒk¨áOóÞ5ñ’¬)ÄqE6'ÛhÜe»/TȪ@ûÝ+õïŽÂúeG$FçKϤš^ª[î½[R`ÿG ? endstream endobj 483 0 obj << /Type /FontDescriptor /FontName /MJYNQL+CMITT10 /Flags 4 /FontBBox [11 -233 669 696] /Ascent 611 /CapHeight 611 /Descent -222 /ItalicAngle -14 /StemV 69 /XHeight 431 /CharSet (/a/c/e/g/hyphen/i/l/m/n/o/p/t/u/v) /FontFile 482 0 R >> endobj 484 0 obj << /Length1 966 /Length2 4096 /Length3 0 /Length 4741 /Filter /FlateDecode >> stream xÚ­“y8”mûÇeI¶»Bw–LiÌŒ}ɾeßeϘŒf‘;cK¶PÙ÷-’½²UˆA¢²E‘ìeKÖW=ïûôþž÷ßßqÿs}Îå{}ó¼nѳ¦uÁ­CÀ“ p ¸" id¤—ŽÎ0£¨¨¦AÂðZZ€+(ÈúÞX@R€É+ÊÈ)ÊÈ0ŠšO/Œ›; �kžÿU$¨ãÐ^$!HîhÜ‘,H šä/¨c±€ù¯"`Ž&¢½|Ð( F8@a$Àí†Á3ByÒû�¹¿Â(oÏÿ¤|Ð^Ä#S�ø·ÍóÀ‘IõPhWF¨1áè6ô‘—ÿ[ÿ×ñÆb¸_ò¿'õ?yƒõÿwçéMB{FÚ ÿÏÒ+è¿Ì¡QoÜ?³z$ƒTÇ»aÑ�.-“þ+Ž!ê`üÐ(S é¸"°Dôï8ú§“£ùýöÕ4³Ô5×ÿ÷j'M<ÉÒß ÀþTÿfø>’ư‡IÀ`ð£Â£ï?'Ç\¦GP¼ )# ¼¼þŒ°#)I `ð(´€ö;r •ÀHG-ÀÑd‚W‚㯽Ê*�Pí_¡ß$ ¦@Íþ4�µü›Žú“Ì/"àpˆ¿#ð#·Pô¡�uû/” ×þFÙ£k±h"ñO~Áÿ‘— žGúÓr$OÄ"ˆîÿÕ# @IPòHÂï7þïv44~)�‘”9šLR“ÿŸB¤·—Oúýíø?ìŠ9zh´É8>B@*ÝðH«¿ù€¬]ô¦Œî<‘?ãþ×-‘µJ‹”Øßz5ô[¢×@‘ L9[ê²A%å*3ÅW¯Ôhu±pž/È´ _E.:8Q¿Ì{AaÅåÄÌŸ™ñl_}'¼ùѧ…®Úp.WÈ·Kû`td|-¤wjœ)%<›Êè˜÷Ì0mýõãíW;Kº„"ÂxñK‡ÌMŽƒq>Êäൠ– …¸<#š‘ÒoöºÕ)«àˆ÷éA›æ¦Çøæeñõû\ÊŠlÌe.쟇ÇnCâ%qÒä Í;ݵ¨#÷¸rN¦¿Mìô»;ÑâO£•KǤ%×í…ëÒqÁ:õ-,hnÅwx4ˆ 4]aÅ&ðÀçuE9&ie@ð,dýdEîí'L6–‚Ä.AAÅm„-õÚÂ]¨±;Ò!é}Ä9ëê×_µšúM¼9)Z¸1[­C‘î·½Àÿu=÷tÁ£âdk¹7¹Ò(møÔ·ÒŒZͨéý$EIÅý’§$:éb¶ZOF.TÀîú¬bøsI–q7 dÖL6Ô¿ =¹šÊœSW (PT1×U‰'i„µKíû>l~}ÁùáÅ¡{·;Ÿp¶®3º)ÿ´ Iš Z°•œäƒY3›°¤Yö†:â"µ¬*à ¡Ö +Ú3¬ƒÉÍe¼Òá¬1è·Ùîà”C.Qøjšh¢µœ6õMÇ‚¹ÖÎÝ BÃP‡ê¢4œÁÎvwk‹ ‡[aØXdeñ½Ǩwã;uÒÖÝdƒO’¶[‹(˜² Ñ‚[ M·ëLå~&@Á½þUm.-PrG4Ñ)¾–‡Ëf¯%+ïá6ëXÓèê² Fxì2øÖ&¦”çXµIÜoáºÂS¯ÕRâävE€°  ªuµ]_]¹Ûžì]‡^᨞QˆAqa¨7€bÿȾU-ä²’8Æd òqì8ÈçT³8^š“$aõݘ×s°èÿ¤ýØÄ7lݯª:)¹ÙñÃýué ©½œßó_Y+bÀÊáÄéö{«´…Cõ¦Æ+ï 5_Ò.ùP)r0èo~búó±¬}sÞŹ¿|/À+ý¾+HÆ‘¿':ºë›3c¡²ŽÖEpCÄ ¡;L$Ê;nÉ´9ˆ‡&ÜWGHÿó�Æ|‚ì¡R»¿nD–]9ºÉn?OyÖ7 EmÈÎüöü‹ª½çóZoX‹F¾¸ÖV1 9+‘"#WV?´ªú­x<à^Õº×Å!=Ê SNÆr‚¨dýÝû› ý·ÁO”ÛŸïwˆÐŽ’B½¿ï_îÛâÍãŽÃk¡9{!4 tB¡&¤~¦\:Ú®´HHäÛËë7›e$¤Š²ñcîTZñot7Øzyõí8‚ÆJnŒ‚,O9xüHº¥Ö‘˜îÔ&i¯`ÜU°Ä£ä­`ŠàúD^ãÞ[JX:œ}ì¹)!?gú§„[·|s¼|@íÓ›pøZ p.M_�ev`¬™W|YsÂ"ë`ØèBIöüõ8知ÚòëÔó;)Šú>­¥‰¡‘?[à6g\PÄ^ 4{@[ ÑªË[>C ‹šœãùD® ªvá0ÖŠ’FÜÁ ±åÊ:üă¾Î&ÃXMdFM¤lSÜhâ3b+hDþø‘ñœÄ¶w¶[Äì¶Xã+¤ÂeÅRõèÊ÷O¦M¿d&®f±Yfôä„;$R{®5ÆcàwÖÁ>ËçRô¾ö«Åv¡æƶtᦹŸÒ9F VwBmeÅšÍWŽ«Áš®9ç%ÚèëÈ1wÑÒà$AÞ}JéÓ[ªÕAÎ*‹Æ5·H¨k—ù-mÚ=}Ä.³pŸ“t¸×ðìó¥ JZ\_Ú­ð$ÉÑ“úÐøÜV&ŽÉaËq¿ÀG䚥œwSþêzò/‹Ú6T·i|Ó×5æà›˜¡ÂpªHlèm9VÆ“í½uÇ"ÃPT&KÔ0‰¦Ìz=¥¿çiÄgïÁ¶˜WÁÏ9;Ëh¨ái“‡—pNm;òYÝÛã·ÝmLi–Hô0-`MâÖÓ»ÞaÙHìYË_ò¦]@oÄø™Qæßéà-ó^}a£beŽ@iYJ·[mœ|Þ <åqÓ¢‡MO¤<aÀ³jÿÙ—® víë·e+]ÔÊzFkÌçMjöÝûü©ŸW¦íxQæwàfÙÅQ†+EDkÇ—…DÇxátœ30 žÜ#l1”E›J¯[¯¥7¸´>{ ò 2kŸšƒ0sªI¾CÖpÈýÒ3î’:{±)xT¶êܱCúîgg+Ùplý¡6‘åŸs‘žÙÝß]úƶx.†¹Â²+Ô…ôd40º'B:çkPiã)e>YÝcŸóÇœ¨îÅo³ÉA “b]ßÖú tí‘u³æº»›½ ÚgV;¤ïîV¦Ø½×$ÓêTË…)£Êhûír'Nnx cGŽR—ß#v"xd½b¦ô¿çmlÐùx#¯wÛÏS¬K7–WÞX"5{KJ¹¸‰Ï¾è>ÅÙuëØH³úd0¿üý@MþSh«Çê…’#->ØYhI`ŸYXŸk�iœ@–Þtê?I¯>?§Zìl•#€GiÁÊéûJ­Q[i]ÿI&aé±´Sgehß}úÒ¿Ì`ÖaÙŸáSxÑ x¼�®ÅÙ_qöÄÝïÝ‘ÓoGbÞ£~8ÜOÚ“dm¡u“6$5V@ë\/×.•´1šß_ºøÌ‚l¢y•a–§~Ù\ÛnÓÇ+:£ ;9xNÊ·Îá‹Ø²<]9Dãøàà1þ™/wd”&$éž ÙñøO#¢“0ù&WÍÔñ™)–Y3“£×ÀåJña‚«m£¿Ì937!Ä�6Ý6û Ý˜Š¸an†ÝèW*&Äè±0úÓWj©šÏòBÃKO ì'ù ]xŸØ³øþºÉ—¡T}U%y…a«>Ì�Â7Ÿ.5Þ:ÁÕËuZ͈Îò5ëçÌ…løž>9¢»ÍY‚2%Nm£ ï{rçžÜÙ·}ióbÎU[¶²ÈÀ9–ÒȲF%›ŸŸÂ4ˆÞ’hù¸A’¿‰kÐGY”!öTU1`†ì “–gB¶•<.1UŒp=~vã¡w ëçS,Ó¤yÆzë#ì’jÕ6‹›úêet `žÅ ¾„ćo×™/ rß�ë1_ªÁ5gn F™ V¤¹¤5lšÁˆ¾÷®´ÆÍÄ# Ö«M€àƒÞ yd« Íè'×çÅKêë!óM-—{ÊÚ,K-­ú(<ZßÅ×.T= ïÓo°·ç,K{W?hâÿz—PXýÅsB×ãòˡº3’YoÐSãS!g¯‡»IŸ ͲîóIÇåsJTš©, Timî#’±°nbû7¶E¾ö‡³Ññû ›Q›E[)±ò ‡Lc²÷cæÔgn>dzÈÓ�åêÉ¡3uìÌ/õWÔ2DÆËOæÇŠ9«,7yÉOrÉëÌ-ŒOª‡ q]¢;ufØl›ýx˜NêÃáÇÙ`#!Ów£Ù§ bxGRr)Ôo€®ÃR¨Èi‡>Õƒ|©*b·:ãGìwh)ó»qñ}ƒÎÄL•3Œ#¹*î#Œ¯1ëê‘@Ö-©§Ÿ$Å¥¶.­êDð®IÑ~6‡¸œ§«“ÛU ͙ЛÎЛäë•ãå øØT?ï=ñÉ5 ¸ÞsN8¿²¯;¸Qd'¦Ãâiÿ[ â‹3ÓŞ˜¸ìÀ¯BÔK`Zí о©± %ÒD´³Ð±VèÃû\ïBjÊåâôX毽fTªƒ>Ôy·SCk¿â k´»“ £sâ«ïYË™œr2O2ÝÛ¢*{Ùæ»h¢¨¢$[shÓqµò›´‚sXQŽ÷«Ä Å!2GÎä]–`óVy¨Kç|+GYHï ¥èŽ4Mf`ÈZ(cfiØaè ´ÿ5¯ÜÔ\÷+j&ŒF®eþ~z~{¶ñš¶Mïý³S÷çÙ#S¢®Òç‹z=`�gz$Ó½g¡¦-ÌöôçÉ® †8¯˜>[|pÆ m?ºùùVÆåjZÊè³BEL´ Ï²üÆy_ä¸-ŠOõ$·ª]H0&X%Ö+7&ܪ¬‰:]“a4w l50s/h×üžOÍQ)Ð&ÅeÁÃ:4²ÛMó€#üÐtÃX½ÿöµrÖ¼þ‘[hIM$üëy©C‡ƒPEΗnòŸo:OªZ§0…©ï™Fñ¬Ä×ÌÍ=Ê›%f›ú½ü»à6bÀã-„+Ë5Žáú§¾VÐÜ÷QÑ…Ô˜~sÿ“”|÷ìâÓõì¢@\™Gö]6g”w¬Y{¼áDA¿[Ó*ø˜ót˹â2O0jþfßã§¢r ²È I¹ÒØ®èÖ‘fÛwêÀ;ǧ7S£«Âg¤Ä`üZ™’æqûÛÉÖì[ » @“¥X ê‹ÿÝŒV[\žÁÛï{ê`jø¸NTC»ëE¡^Ëo[Þ>‘n?ôø :²@f/®óé®ÖßèÙ{f±òAÕ e²=BôM’CžÄÉ j¬¿ ¶¬véú>½±§Ÿ¥9Jmí^g„ ÉÅÐX£àkz6)|Rc¦º?ËÜòÕÓº“?6øb4/­ðB6¢Õïmæiê »Ðĉ�=¶Ka(¹‹b1\ný({M–‘Êœ¹uU¡˜¥\åŠÆw9m)—‡.Ë7‘{Ì>D/ ù~û$pËλ"ãëÙrý×ħ Dâl«õói¦ö§7ŽCÛUš/ž¯ø½‰îäyËM«XòÄüdb|ʵ¯ZN*œ»kÛéQõ*©<Š0}jâú©kèVãjÍ+T!¿ßè€1|­ˆí!N{ù1PfYçu:üZ hŠÂ(hë'­Q{@^s.6v$Ô[L*í.„ºz‡ª?ôpx¼b§5ºÊ©’ƒ ð°V?õ–½MÓÕa+k 2Û¤Ç ÆGZg†î¾ÌÎs+€ó‚N<±¾ûS0¥fç›Ð©€BB^³éUìÜK³~!×x{ˆíH|Ïs=NN}•¬¯\râz\ZP6sŽ“ò4~|·ƒò&ŠmùÚÉû7Ô6Óø7 5xZ>!Í={¾;ùÔ 玿)K• XÃðÞŽ‚½Uè­ÝŠœJ¯VS¹,’\÷f•¶ÀMq:åX¯]æóT2,<KëËñìú‹°zådG[?šÍ6pïE.;.ŸžwÆŽ‘R–‹ÉÌ„Sœz)Ûá Þ>̡ߟJ˜¾3böÞ*Ä;)–§ÉŸ+ŽuøfzqQEË~ ¬m¶§_u×*÷¤AÎ!ˆ1àै ƒXw~/ØG^ó XIƒó¡žEGþéc¯ayr…g{â­g+×éŸÕ-6ÃûÓh2¢ÄÔ^ÛsU”þ P© endstream endobj 485 0 obj << /Type /FontDescriptor /FontName /CQTGRC+CMMI12 /Flags 4 /FontBBox [-30 -250 1026 750] /Ascent 694 /CapHeight 683 /Descent -194 /ItalicAngle -14 /StemV 65 /XHeight 431 /CharSet (/E/P/Q/T/c/comma/e/g/k/less/n/period/slash/t/x) /FontFile 484 0 R >> endobj 486 0 obj << /Length1 746 /Length2 1163 /Length3 0 /Length 1695 /Filter /FlateDecode >> stream xÚ­’yXSWÆ+£8€Pe(ä¸`Ù²B‚‰D–0HÆ"Ì„ä’{á’@¢ ¢àƒ€P"mQYb]�qY­â‘ Ž,J¡Òʨ™J'àøôúï<÷Ÿó}ß{Þó;ï¹v›ƒøD–‚|PDJ¤’¨LÀæñ|w�ý’BÁÛÙ±1H …Q„#BL@uwg�?™Ð\�ÅIwaÒ]ñv€&¤bpL¬س–Dn€%0X(@�O …$z¡@ ø¨†¤©$À‹AðÒŽ$ %AX2$"á©T ‚…RÅÀž¼„ä‹D£Àí}[$Kø0J†°$=°_Æt�zHŠˆSŠÆ“QýižåÿµÒÜG& $KöKAýn,ÀâÔÿ PI‚L a€‡Š Y) ƒÞ³ñ ,“¬œúJbXÈBbÄ R]I×÷}8É–C¢ X*ŒÑq´Ü‡ÑJ}|Ëd.wopÛéýÃ.Ï‚0" IM€�å7ñrMý­ÖG„ÁrN!Q(T½Pÿ}XE¬8k7"DE0ht`˜ OÑ[ÑètF0"‚ä�’ëÉ$•ê·�}0 bø¥W¥R)€Œ,õ𿿃·7*O#Ò\‘F×›.iÝèÅÿ…2 ƒéòo¢OâC ëà 9$ÄkP¡ÇѸ/®fÕÚ]ù çô‰ªêèwmÊ+*Òô¨‡¹»ÖÙΙ)Íâà‹¦Å?úבEÑôï­®²Ò»Óødå Y·‰…ÃWgøÆ³Âé‘=]í””¡!Œ'\sšÚº MnÃ5LQݾ¶úñ„QÍÓwó‡âßû^³¶D´u¢QÑP…¾Ôžº«›á¢•¨æ|ÏÆg6,j¬ÚGÅ›¨Üs*x†Õ?…sŠfík:φKî¤/­²±šb WßYîbš®«2{¢q?•tø$Mâšvh˜ß«å(¯ÿbY¶¾Dm—×)/yÍ9?Ù^wˆäy[TX|qzRò𒳓G×bgYêþ¤þlH÷뼨Ô·©}bÒñߡïŽq>3¯ñ¾í7¹éᢵYME-&HÛŸ;;f9KÙûÜþ52õªSU±æ¸]Æ“¹G§>¸¥ªHQdW=¾ÿ臘Ÿë/[Š^ í eŽ|sy͈ï,k‹ìÜ÷¥EkK\“ufÖÓDÒú4«øÈjìúæÀÒc¯ÛqZü;îÂ<7º¬«/\’=ºm2sôŽØ'Þ†©C}ŒiAƒÎþAw[Ö2T4Á‘UóêMQ)¦ü¦ÞÉgÞM«,ÿ•Vfœ žÈ¶s W³o¾ÕõǶm+…p‘7q4ß(ƒÈîrËýŒEšfn ›?wNLÿëìf4þ›J£æ×Ám3Œ;œµûâfg~iNo©â§Ý5n7åº0g°Ê”éš…òfp¹RÓìgâRä0äa·5ÂÛ Óܼxm.±õñÖö3´^žºftçõÁ˜5é!Š’uÌôûN)›ØY&çpg}½?ÿGÝIÿ‹ž1-1îØN£zÜÛ±p'çÝÇÌþö•²'t§Éw5žÅ;x‰_«¸{*»{®7„ÿ){Gݧ§YšóÓbÂsqÉMƒ¿Ñ©Û†b«?­w|óèÀ¹ j»w-ûϯZN4ûgnÌêȹôíB/Í”MJó�'ÈuþÞ¦è†‚Ê oNfVöût­f<ýÅRÔo£m5ïs{t0µ‘àø+1áÙ+Ã~]x–±}Kô¸Ž=ìéÜî°•@ÀŽÆIkÖgÞh°Ö?`L™è ôë¼UˆhÕûl˶0½XþÓ~ÏÌ]^Í œªÀ”÷‡qÛæÁ¹äêÀç½õ­éw±ÚxØR8œÖ@´†YG þž×Y`ŸÄÑyÙâæ'¹] þ3/ Þ»p.GvŒæ¦xÞ»â‰S v9YGóªÞõ«xö}K¨®«—TXÝþõÂtdÅÌÎý®Û4ϺíÊ>·Û¾¹§‹püñlAv_F².埽A§MNÆ)”Ž&ø¶[¤Ña­dt]n_ÏP©=Pœ}ãÃAÌøƒƒ©‘S[}7(üÇ­ *û-uáåùÓ¥§ŒŽäKÖ©×;L¯UÅìã(šo, Ë žYô´É×TïÛÉ·½~eJfxh�¼zl`˜}ë®(Ç–©æœí½¶E\Ò$îíÐäävüg6€ endstream endobj 487 0 obj << /Type /FontDescriptor /FontName /GGQRTC+CMMI8 /Flags 4 /FontBBox [-24 -250 1110 750] /Ascent 694 /CapHeight 683 /Descent -194 /ItalicAngle -14 /StemV 78 /XHeight 431 /CharSet (/n) /FontFile 486 0 R >> endobj 488 0 obj << /Length1 1056 /Length2 5304 /Length3 0 /Length 5971 /Filter /FlateDecode >> stream xÚ­“uXTkׯA)éA`¤{†Tº›¡K`€A˜¡;%%”Aº»Q”I)Ié–’üFÏ÷žãwÞ¿kïk_û·ÖzÖ}ïõ<›å¡–´5ª€€»òðñò‰dÕÁ| �/$ƒËÂ"‹„B\a¸Ä*àå(@-Q/¨[ì‘ Ø#~\€,ÂÉ ³µs°Ërü*H;B‘0+ qµƒ:¢zXA�:+ÔÕ‹ íà��ÿZá�C] Hw¨5/.Àfå °„ÚÂà¸À_–”á6€ð_ak7§ÿ¤Ü¡H”)�ûo›�”IkÜÁ ` µÁj PjP”—ÿ[ÿn®àæà qüÕþ× þ+ q„9xýoÂÑÉÍŠ¨#¬¡Hø¿K  yS‡ZÃÜÿUv…8À¬¤á¶P�è¯ÌEæ µÖ‚¹ZÙl .Ðßq(Üúß&P“ûm¨£¬¤ æúkOç´ 0¸«®—Óß]ÿf¾5$Ì`B—Uˆºþófö/-y¸Â·ð?@Hˆ.êô èÀ‡�ƒ[C=PO”a /áŠZ@ÍÄ`ƒ@âþÚPaQ�PóWè7‰ ,ÿM¨ýBþ!�ÐòB­³ú›ø@ �Ðúä�¡ ?�hóþ"ØYA�Ðî|�þ™EÉ:ü(]Ç5U üDé"þ@”’Óß((„"ÔñCüa”¥üQÚ. j…눚‡ÛŸóúÿ½û22OþG|�Ôôk>¢�Q!Q¿ÿSiå†DBᮿ.Ô!úÛÀPG õ„ZáNAX‰‡Ú§4†•øËç}*ÅäD—±mŠ×¨kŸxòõ%ºCဪ3ç·Z˲t2Â5Ì5+Z—¨V_ía…ý çøÔÉ›5w‹µtïZãtu¯ïNŒg!ÛDï?ï<B×ÿ6X’h”ß“yл£Å.§»Ž=÷­ÃĽ¾##THØP!ÝA/üy-«ÀC0qªsI„`ø¼qâ+ŒùÑûpœf®ÙCD«éENvÂéE‰U+ƒ�x™“¼.¹SœNLTsÛÝ_%•°ÕAÖ0gpÀÊ!YdaU™Þ4òæcT#òêîûÜ-­†¯Y„̾®š p™Ûå«|-�¼ÊC”ǰýÄñ¾Ù$Ÿ‹æ¶ª&0/îû%¸æÄè›rc>$h±g¢2îIÏøDîÍGxüZFÚ—ÑI2ñÖÇM*ÝLگжB|a/Ãqå¯i‰GÔÝûXk3‹*«^MµL…ÒÇJ.ÉÄzñV-škM¼�“¨_«ëÞñmc!'A`X}“Uè4Jçú­%z†Ž<²¡U¦ÌÉÂå Ã÷è…9ƒž¬UbjÈRa¹»¯½«çLg.o•¨ðñ¿)ÖܺtÔ5©míÎzJ™b]–™¨çðù�‡yß Ã­qN¬*O²7˜)+.ã:5 Tg¥ÐA+Û‡bŠ?äVBqÎ)RKOÜEK(¸„/º+VMRwòЛ+íHˆR…“¿b‘+k/˜¸aðç‚®óÜŒEÖS8Ë:˜°²iҮΩ3›g“´Þn½>lñmëJVŽJˆ¦¡"nt‚úm2͆ûã*a¹°¯¯üìò£Ž¨ —E—Ç_l¥ØÛB[}1lñãJ|ÿët7º ”¨ãxWŸµæ6Åß—âhe j¶+ËæâÄñý­!¨e#Jй‹ß~ü‡¦bƒ¢h}K…MÚÙÕ–šÊŽœT›c)ZÚ·k~ËGɦVÖ^ƒ£`;ýÞ\^T8x6~‡Tc§|àêà¯×‘r;U¦Ý4;}¤ ál›ДGx¨ržßüøÚ+ŒN»!ry+W7JQH¦HŸ×—3ìùD¢žAeä;À-Ã^MÎfù+áY_س ¬å ¢*ž¥kûkóiÿÒFò$»”¢VOTh5áF}ÆËØ åÃã„Ä»[3Íbê/xVÅ!T”uµ4²F}D8½d6¿«úEOfA±Õçí[úµª—áË©ƒÞe±’ õ}6”Å üáNè{±ÄnÜã:Ç“\ˆ‰³¤ÿâ(fŸîµûKÄÎê,;¶€šàÒ²tÜ-­GuÙó�z&_·ukã]pþ7lDS‘ý“'ðu36͇–'bÙ^”tÑ@ó¹-4Àu_½J5 ¿â¶¿»Ìã§Ôê.!Ÿ›î*ᜮ®pï#*^[¸àÏ:„N:GÁ"õuÛqÍAçè³ò)´ªœŸ…üæÂœw”VûUHÚ‡º}g‹Í¶ »{nJ"«¬©-¯ðk!7ÄMRln:šÐ¨’×û¡ÈTc&çä¾±†<Óc¼]™„î _9Fϧ³¤¤D ŠÑÃ#8o6äŸ(Ï6JN锬²¦ñ~iÞß«PSù¡¸¼í>3îyXûeéã´ö:C±±OݲMÛ ¥R“%ä7EB\¥ìciºWãábe€<Öq†ÂèùÉÒ^6w¥¹†­Ýîkcéƒ=µ“ íS˸¬;™¦,õý8yòÈ øjïŽ4Â]ïNÄéA—\ ꦩJÁZ‡ÿ);ಉìCð«öý·&KYq24Þaá!‹…MäÛéƒ\!ãv!ÛŒUX-.‰7ʬ¬—/\ý7‹_B~JRÅx3—q#_‹«¸À¸×ªÂ¸¤<ô÷ߌ,Š|¡Xv31·‰5®ü’¦±xîa(ÄHœÌ%ˆV‰¿`ÀÌ[\4ó’ül§1ëkÁ¸ m¨ºÇù½ûÁˆòֹޢo€ŸöÇêýnm\[Y³U¤1ý±�éâ`JÛ£ÏGá"Øߌ¶¯è:Çü{-íy·xµîL I=¡Q0Ýûf4¦õ¸-?€öAüïÕè’(—®6îÄ øWEìSLyJB6+oƒ.Rgãå–Tv¯xÊßK É$+8Ÿäº¹ßï]Ð>Êl˜g¾Tk¼/™L¸u›ß*%ú‚ÎLÞ-¸Ä®FŽU6a>Ä‘øFÓ×ʾ¢²¬y"UÇ}DhÌ­x4_ðgš¦å”ócœ5ÔX- T¼\'e*+w=Ëò¯\­`¥´ÄÓÏõ08î—^“êäBÛá­o-¹FX‹f·ÔÑ›ºú Åú^æ4 ÓØ5¶x:ûÁ^¼æÃá%)‡Q€îüøXK°á1f_T:v5µHBð¶€ú”z’“Ó §Ì Ó!²ÕXÁþèÎjœ:‘4óØ‘™¯oÿ¤Ç‰ž}×U_’Åןæ÷ŸÎEî[k ´›é¦™¨6a=VB«2™"zÿ€ŸÔ}7†¾dˆÖ¡+ÑÜ8"’ƒF´ˆãU ìï·T,®üdi"3Ñ–%¡Ê1ÛÛ~B_m—ž_§4Æ.¿Ú»mI’É8&Ûî³e Z”}xS(„£’sÙN}ÕªMín+ö*×Ãφ=Òö΄Sß§ž‘‚vºéÛ 0œ¾› F^ì�$Ž÷…ãt™ºK&‰­Á7®¯¾¯V©3ö¡›·í´&|äˆGà™ó½U¬•¶$—f!ØÀ§ç6TôÊùtÖÿCJ½„HjYTŽ´Á©}1⮥»(«Ø7}fÎ5‹ã“•—›tž¡}›‹‹ê™¢þ’{6¶ü>Ä„\¢2çµ±ë¯Á;±ípšrÇTn~uÚO3îo4hï,øl~Ñúá›1üQárÿÉÏŽÃÜÅ‘JÂÕ¤ÞIÚ×Ï@È/*Ÿï¤dvÉfx URÚ°¢M{ØÊŒ]¦½IJsD[tƒøF~ÐMò. J­*('ÂIN}ô:ñjãek®Cê‚oÊÌxÉ}ñ S�ÛªR?|{\táÁóvbg¸²ÊV*±ÂúB0›Xº„ùd³TðM¿Çlì°ûírµš‹`ÔãÊ:ž ñÝgŽ¥ëË-¾AÙÎh×; )Ðîvéà:‹·ÛKÑm/öî×_õ¨“Ñ ¼è@ªÜúçúá¬Ùý'FÚýø-¼Ú½àèÕs?õÖwó[ûÅœ2Í—™ô0ê€Ìf;‚RYYßX¨2v'j*wºÕ`¿ÅÀlÙÄËêáã‚ó^J9Æã¨7v³ï>,ZÖ¼ó9ôDä~Â1^PÇmUÜЋso‘ÊÁí.Auóõ¨±ª’žî‹RÓ[Ý"V×.›õå’ îKð¢k³Cx’M2óˆ”Hl F*EÕž‘•üu±¢6s)•¸”tÔz錕,#7Ûûã‰9äûðY‰x0·aãq¾ÏÏÒäçy&,Ìä+*¯ø¢Îº½Ž„°†ž¬ÎL{:2|öîH)m-v=‘¤ÀmˉÐg.Nà@Ö•dÄ.mM>ï÷y RbÀùdo1ù.ÝùÑ�ûö:N?z¥xN¬w€‡“Û'²Í–Tò­ìh‡§ ’Ó#Ÿ9´ãÆpô„01®y˜©’½ñ ÔCGOñNF#(¡ÚV¡U¢" MîJ¸çþìñ„mõÉïŠS%ª^&ò¡ßä—;£ßùÙ—é*0õ¡ø­ùƒ¡ñJJg&L*ƒ`Óù<2Ii'mþoGªƒö‚×Céè8ôî£-¾Ña”öæÔL”o螂ǘuïöŒG<¹Äµdí?kð{JÐÝç½,{Û›ÈÓÀ¥í<„o¤¤,#¼ƒÁE(!¾5ð]ËïêJ4@.ÒN@Q²YN·á½þßÒýé)¹/gËÊ-÷ʆ¨ ¦ ðîµúœ¿Jj†{mø E¼Ø^ÜË'àq¨N¿¸ô‰÷MH'X’S#íyšøRÈtÇø´éÎ{ Ÿ1j™=Ê ùÚKµ±61¤Ê™ÎÞ‚t®fVSçC5³³ñ…\^5åNÙO½>»°NzúiQÍ¶>/§›»,á˜å¬òø1­>Ùçp}žf™|¸«ÃÞKƒææMŸ†x!Aii*Ú‡æúó,m‘†êFCðžHcC¥%¡«<®ÇzŽg¾ZWUWz8í{ÞD£“Öva¶Ãs~t4—z H}àí¦¬Gøã½†Ž®‚Zârô¸#GCÈœZn%v5Lž¡ûñ[ÇVº{\\¨ƒ>áÇaེ‡øK^-˜$êfs›'¦×q™DÛ¾§ñ¼Ÿ|Sòš>ÂZ\š±KõÐæ|Nãáv7PeÞú€Jm#÷@·Þ¬J³5ç—®O¼æf€u°š(aV‚u6bu!­”œ¢n5Ç.ô²9"‚œí§ûrmæ3`Ì9õ‹WÐÆ@8·‘@C‡sB+Î;(LÔ‰9I 7aíaVØ÷¨PÞÅ'Ðy±ÎRPÝuB'Ü §}ž±àq\ Å~þI.G•eYñ†°ŸÚï|¢¾ê.Üã1Zâ*ŸáI´µá¬H޵ÑÏo>™¡³�…KkŒËrÍ•ªvš¥77½]ºL¬9+íKŸ“?ƒ×È7±ÔazñK«4Gg=œÄHUnëà}s˜ìo?ôSÙ¹¯fˆ,õ¿÷àgzø£€ÎQ—טrsEjPÁ±¯.†[Øå^‹øË]ôr¼mʼon2…+4yÞ,¾ø`'ª÷|²sOD÷éãúñŽw-…r@Dšu ]ОÛáˆpZÚO½ÞÕf[hAžDN~æœz¹…èÆãñÆZЧ®<E=fÔxîì6øpV²†í}¥:ˆÌä6s1üåÞ~�_y2ÝÄ{¥ÇQ¹ä’Þ³L›0$M3´Q)Ør¹°•hŒôújõB<Q°Úë)ûš={h8ç-M6žâèdyŒžý]Ú§±ò;ÅçùXo&læ]„ßÔ„l> MngŠ4³»ø(>‘Nó*uôµÈ(Ò©À@LË»y ñýÞ²¦ 3ž§É8‘ e»ô›4›$œ óòT¦£à¸Ur{¾åõŒ=ÙôáîÒÖ±û£ØÇžôUô |ƒvÈÔú†Zf‚¼\Á“KUô‰…3›7Ôjî:º¬;öšgYˆEú|&¶(}ÄÄïý{5pȰOÀù;”&î=¥aòЧÈC:ôX´Wç±ÁFW!÷â¾Ãl1Çv¹5ì™é,o–L”8êÓB_ØÕ1ü$)ñÄ[ºNoÅeåø¢µ¸ï;ÃÕ&(è¹LZC&ý#£8¨ÜñÑ»›¢LÏf¢Aäaî=b çБñ«FšÒò¼Ðφ–ë]Û×ê¶-Ï^쀒Üò´1”fmA[—Ów@9w ÒŒƒóœ‚Åì¯d¯0§Þß™ëîÊ9&ÁCë9¨,”¤úˆån닟�~gj“,A^yÿŠ®$rôýò·"} »H e#Yš¼ôksŠ`‹àKrCBãœ<±³áøþ+­®Nùû:‡ù¢ þV¹ÖÚ{¥/v–ň¿³˜êäŒë˜œO™PÊ:7/3³„®HI ï!iÞL £ã”ÍF¦?*‹_`T´GK=%ω"BîEK^îäš»Æ* –ÓÕÒñ6¬M•äœû}½=å„8ÿà&)ùD9ø.ÉüÁB®þ0{úÊrC;¿“¼3ýmòG¿à•lázÕ©›¶Ø¤í\w\ì­Œš¼ÄJÕAº>ÉbJ¤m2§_dås¯5Zßøï8Ä‹õŸMÐ4BSá¶S篜¿dŸjÁT¿>Äl6UÃÉLcš_m/ûF_«(ÌP˜¾0RÊ-0Œã<÷]c»G2XÿY|&1�VŽSÜœÏ5F•GÒä¿ôÜŠò¨åpp˜”D¥\l~-ôû·da›pµü¯ÛCÔQ97ØšQ}Ïtçó÷¼HŠMe5ìÑù¦ÓÉ*ܙޟ¸¢2Îa1ó‘άÅ äôS™e,KûíƒÁ”Mò{oyiÎß*À;×Êí»Ê?s~m×”ÔœBøä“&±:}E[ºíH/ÿÒ›ðDV¾ Ö û·4ÈS¼s_µO $•~Ò-ößÙ)kXËUÒgéŒÖp.Žú>ZóòU>­‚10D‡V]òé++£q˜âýhª³Ìrü‰†wâx>2rÛAÑ%’ï<@!S/r¸ëÛ¾T½ÉÓõ4ŠÓ°·ëq>?˜,C½ßEÂð£B® ކŸÞ^úcù´† ôMUv[ùéBúÔJ+‡ùWß”æçÙÞ,âýºhÔ6}íD3ÓL€ˆòü+XîYõ\;ÎUSNÙdLW¿l&CWnô|<ƒi1£t}¬ÍQÒÞ�QQ7SãËì›0ª©‰9þ<2¼{À‘ã²ð¾×=SÙ±ï€e*²2 ýµ£$ÁµLIÙÊÂ\ö X÷_Mì×N´� Rû°Ú"·ÇÛûçTÁ·…éü•õÞëJq³‰†,©vg03f©@¶AnÏÇöv± |zkY\ååºÖùÛêÈÈÏÞXìe#ÚS¾?ˆ³-Üá®èIÿBf޾¤ÒŒÛ»2Ub?d„D3Ý‘‹¥·Œ?‹|S&R¸­t;Up"íZ<>à}Fú´÷T¾ŸüåÝ+;ÙDɤ§tGÂÃçèñÉO¤ ûC¦n€C…;ëøé´J‹c[92TyÏOJê(Fú\ÞY™a2˜hTIXd4"ŠýãwÉØ>:Éês‡_ázß•ð3ûW±²¢æÇÇ:¼¼V¹ßV'Ã,>Hćу®hq– ‚¹² n™`»õ³D¸ÏêöY£Õ3·ƒD…x{¶ÜÕ;é,µl¤cWº´²¬µü\ O"€~'å–·—ŽÖÁq múæ¸?›Z®ö'ûð°jaæ-õvhSݹ»ºça«ÔÖÔÃ(h¾·;ñ?"«›B endstream endobj 489 0 obj << /Type /FontDescriptor /FontName /SIHFPR+CMR10 /Flags 4 /FontBBox [-251 -250 1009 969] /Ascent 694 /CapHeight 683 /Descent -194 /ItalicAngle 0 /StemV 69 /XHeight 431 /CharSet (/O/S/a/b/c/d/e/f/fi/h/i/l/m/n/o/p/period/r/s/t/u/y) /FontFile 488 0 R >> endobj 490 0 obj << /Length1 1995 /Length2 13264 /Length3 0 /Length 14335 /Filter /FlateDecode >> stream xÚ­·UX]Ͳ¶w—�·à.Á!¸»ëÄÝ݃»»'¸;·@pw îþÏw¯½W²Öwú_œpWUW=]Õ£Ç$ ÊôB&vF¦âv¶ÎôÌ Ì<�Y%f�3<…ˆ£©¡³…­¨¡³)€™›› äb`a0sð°rñ0±ÃS�Dìì=-ÌÌÔ"4ÿq„lL-Œ m²†Îæ¦6 ƆÖ�e;c Sg€µ5@éŸN�%S'SGWSxff€‰…±3ÀÈÔÌžñA’¶@;�ç¿Ì&.öÿçr5ut‰PƒDÒ�@Mìl­=�&¦@xF9;P-S’ÿ?Dýwrqkk9C›ÒÿÓ¤ÿÇmhcaíñ¿v6ö.ΦŽ�Y;SGÛÿU7ý—6YS ›ÿöJ:Z[ ÙšY›˜þe²p·p75Q°p66� ­LÿÇnjkòß"@}û Œ²ZêŠÂÂtÿšçÿø -lU<ìÿõŸàÿaæ? ꎣ…;@›‰‰‰úû¿ÿtÿ«–˜­±‰…-è@°s�  =àA'Dì�/f€…­‰©;ÀÔ$˜‘ÁÖδ�ê‰�hçÿÏ89ØŒBÿ˜þE�Fá?Ä `ùC\�FÑ?Ä `û7q2Åÿ3€ñËb0Jü!V�£äb0Jý!é?Ò"ó‡@ZdÿH‹Üi‘ÿ7q´(ü!Pu¥?ª®ü‡@ÕUþ¨ºêUWûC êêTOóßÄ ª§õ‡@‘†ÿ&fP¤¡1è þñƒ”ý!P§Œ ­L­MÎì¬ÿ¶ÿëú·”ÐøßÄJflg z ÿÏÂÆöÅÆæ/ L }&!¨ä9 3Ãhú8þñ;¸€žÊ?K@=þAP�ð/dû-þð?ÁõtýS‘ùßäìÿ„Û¹8þU `ö‚ò›ÿÙhHæö榶E€lÕgmÉò/ Äê/µìï­úió×Ö@½ú“™´ÔÖÂö/íÿìÝîÐb»ÿpƒ6cÿÇ Jfoèhjû£ecþ_ë– ¤Út7Úý¨öÖ.NåYþœP5;gS#ëÿ(ÁÊöÇñ_LJã=ÿÏý¿Öÿ fþg´ †Ôç?bØÿ!S׿Á wÝZÿÖjŸ“µ¡“ù_)@õÿ`µÂÙÜÑô¯ãj™³›Ý_ @9\þBÐô\ÿBp·¿Nhµû_JïñG h©§©ã¿rÿ¿×µ°°»=¨sô, YÜ\\ ÛˆÉç?âŒ]AcsþŸ7!èÎÿ?Z€Þ¦¦î¦ÆðKóvƼÁ–©M_Ë|Å – 6kŽ“«ïùÕ‰´n]<*í@»^§ñT‘¼ÿaŸØí…À)¢Ã[qBü<À!.mîmßÕ`?ó•@ã*CÖcÏžô>襽iöæ7;¸üÌúXY’fÑ@öÅàI¾µ¨ÊÌ* X¯¶kCof0§†x†µjhH%+‰jšCY[èšjRÔÚT¯e(l ÝÊ¥]‡ úS!l^â›ÎÍÑŒ 6vÖ‡®íäÜ‚ù¼µ¹ad”qTaLfkÛËSU£ÀOÖ&ʸ4ÛÙÏY‰†b-d s£[øÞ Ù¾ «³!cy2n, uû%ÙN«xr𽀂åöÌ{†OWjk¶r¿Û4À+–X½ ó¦­¨$åàlnyWí)¡)"Ì>ú†ˆK°±i’èvdk›L÷unkét"Ѳ¾g¥cEüØR˜xù<M8i‡£-‡‰Ú2ú’Ϫõ‹e¹La2¹„K†õÝñ¸_;ÜÕÖX_ìGy•ï¡w,d@w«,Í;œæ{ºátr™Ä΢7s QBò7’Pm¡´XOì| {…þ!9ÕvÎ+Õù:ÎÉ”+³{”ü”wý›8ó¤¬q«‡ïês÷™­Sz§ºiê­)>™)SNsóú÷ZOí@'AܶmMGÀpî´“¥«T"ÄO÷†dœ¨ÐØyT˜³‚µZ¼ïa¾SçÒ»˜ðlÉ„«ìæ¥2#d%áø¨<ølö ¯Å¿ïãÌf0½¿HÅ)µÍ Û׳`i -áøz\žþ ‹•÷#È“é4‰¡­©û¶cp³íÚÏšÉÜtÜ^ã.‚l ×ÎéÃdX€@ÔšÒbàû»Ã"Iì¸Øyê#Ï¿2ŠGI{WDlV=ÕÍÁ4"ò„F$3‘ád|ß׬W­ÁT#)ŽŒDÇêv©ÐŠË–ã¦0@PúùÚ5DBàëô³{˜€€¥´Ÿkô·òðÃKcˆŒ5©¸õyju`m©¦qfÂøÄèmæGº–¹°QYtõî{+å4Ù›DûÄòs°l–‚ ìgCD3 ‰O ‡Bˆ(`U¶f¨jMªl¹¦å®ä¢ƒ 6q©bÑ?îÈÌnÈ,œL¥Ptpoøâ¡Ó{H ŒúÒ; U¶PW9EõC´ø€Aü&`¶{În¸ü§”®ýûǬÝç.‚3ô)­ïÉí¿á‡¾Z|R tµÏ?*ÕJW#„Ä$D9Á9ê0¸¦sÔê9…ß¿YfO¸s=½ÛTô‹QÒ>Î3)—é¿}i'¹|å™T:÷¢ †áÏ"€ýµ¥HÂá|îÇ{å f0d¹Ã~ÙÊvI†ùL$‡¦ˆŠ+º/•,õalw“ªÕ…âìgfßÉ)ÖÚÒæÍ†Dè†ÞoísÉ+‘Ûu5„ Ï7RÅûÏ×\p=UÔžáÖ¤FZŽ…PsxAÌ|›:µˆJAêËZr®tœwï1´uñó„«=¹@¥=Fê ;yM¤÷úz¸:ŧ¤ÏÑ _ZêÍGž3§Ô÷³=2IÙIº×�q;)mÆS6îa1}’6!þ"ÞÌdM¥ÎÇ&ϺP(©sá@Ó©ðM  BINhºeÜ¢ÉÀ=d?Mo™îÍ% ³£1AÍæ¹”Ô Ž®w®+Nl £KñÊg‰¿#½Fùó"FÕ±æ¾ßÊ|ÝûŸš×Wt–€i‚4Ã"¦žHæ#ÂD4øãDà^B•Ãã$”‰¥”ê"5»Aþž°«•¾pói:ëæ(Yâþ¾2WÈ< Oö˜â!Ëò G þM5¾›bøF|ÚüéG—¹ƒ–b ØübÓxA .‹ÝáMD—Y2œ¨ž˜Ö9yúbTW`úDf•Î [dãw(ݸ�”§}›tY7¯Äe»RÉÞf¡ýü…O6ˆå�þ§Ï¾7œ_ç„•êªY:¬ oÐRI¤B"r’ ŠBOkWËóùÚßßôF“HOç!‹Æau¿¡ß^ÓE âÝØ§ð Ú-V%°¿…l¢Bö?¢›„×;FèvÌ€÷öý®ÈÐ Os;éŸ7{kmÙ!wìI‰òu;¬ÁíRŠ®Cn·ØŒsL¨mIdÚƒc‚ÆQfÿý©tDìæ§Ï›÷ÑËâ©,PF7ûøšÁ-¾ýW&¹†Ëê±b'ã »é’fn£&ä6¬ÀFeÃúÔ.‹$d‘v‰ôÑy“½I.4‹äïœ*érG¬5¢AãÝWR³¿ØY±lÊã;egúü€U) ´éK0¿Ú3n½úîvÌë&ñ¯…™'xæá0+y/þÕžu³ƒ#urÈ­zîzKއØDj 2¢ª“W½S6cøuíÎNì¡v\:ªtÊ6}ÜÜÒ´U´vB6²ñc~Ôf¤¤_TÝ•°N9ì„̦P£f Ç:[èò£t±¨è8.ŬaHxo V|”©Þ®¬`i`ðs<ï GÏ›¨z*6æ®Cjʉ314gS—‡™Í!‚Ãi]b[fvè¢ÛûO9‰jåÐåL!ó–)ƒ®`µÖÚ>>‹8ãáw1Ôb]ókxÊ‚ ß½:hyÑõˆrèØ"&øØž^Ü„=¦ûë ãNTê8Fì°hv¯¢¡:>òßíV)|Î"øÒßµõó%!KeL¨Ü­½aÀ°Ý¿V¶nK`1A2`ˆ;ý9qüåhä Áxg¯µåòéGHY)3W_Yëc͵\®0»qJUóL™âï²}4BŠ©–i5¸¿|ZsÄŒ7C¨^jÃøÝWÏÅÚâá©¿Bº¸AF3 (Àyá{£ ËÍ'¢Z Ãá˜[±iA32So'qm·n2Æ>…ÀW?{AÆlzêLèUÙ:º–0ÏZlNvè³ùPÃÄÈcqVÉñiâàE•MrãYëµÀ›_“}‚uE®Éa«â”n{õ1p„sYˆÖ<áµë)ÑÖë„ãæåù‰ršðͨ §ã‡?øÍ7Ê^Zw m¾™]Ý‚KÏ÷ïúh}œú·MȳôñWS¶ÝϘ&úO¯M¯$_Ô¹a‰UÍå5ƒß¼QèiݶpŠ`ô;x›15é7›êÐçÁv¾“ó%kšy?"x"fr/(çèÒâüº0÷r½a[¦NòÑbnÈÅj›V·4Ì ¼3îàˆ: ×Ïsx5ϘòÕõÇvK g–0›Y8¼ s k~õÑovìÛíK¬éÍ×�iÓìyÈî=Iñ,Eƒ‰[EŸ>a)pÁHè¶’ …Ä)ÂÍ]^e‰Ã²ùÖ¯Š”ŠŠ½ÈÙ^†ÁÄK®¯©.™ õÉíï)=Ðs‹šácè!óM{¼-ü²‚^Äðʺ¦ŠKÖ571:¯œ€I îä;ém¸xÒün#Ç•E’Î7ÅEê£Çk6³`ì¡®!K@¨îzÕ›LîñÞ³€èH >ËgÙ9á¯s=ö Ÿh_] îÂ>*ƒ7/zY”Þï´Üßa,3š\àë\‰.šGž °m̾ìqB€µ‚ýÌê‡3§ß[K‡FM6Èu¢ßß�øT9Gr:«aUß1P—Et}Å¿É~„ÛšÜ+™B°‘¼lyú&.œò©VΰwnŠ¥qA”V=í\K p»2MB›“¿IÁiQ€±Âá§ö%$ZŠÃÓÌ�ß¡lM9Η=ù›Åtž8M=�ÑœÜ÷»ÑÐJz#Äûà˜Žo¯äCox”K‡“‚úÕŠ²aœÈ³»êÀ oûv¹µOIìF€$ìµè’<^’Åt'•îûÁþK5¾-ÛŒq½§1ø}~"¸–º-tâk?t)ÑÝ"Î(yµ¶ŽwäqYI¾ZB«²¤Èí=¡H¼¸_VÛJh¹µ$‚'VV-À|FsÂGø:PØÍí±·!ò r+¦ÕÒq'ë¾GfÀ’Õ3®¥Ò¦/÷ ÓðDËÝ5Ê$¥MY>Ö$È­<f•WCŠ’âf «…¯PÔÜkW¾`¹oãÐeš^òò ®‡f ‰ºÎ>ÂSCÜ®O<­È¤Ý?Ä3éÄ=°7:'fú“×µÅêýÑ2Ü它?L•c\pj7ÌN ‚}\@©­(ò„“é:¨Pù÷ˆbú ¦>Ø­­9`º—ÌÄÅÁ½:r‰ûþp:+ÿ¸)蕚+êïåW(* Hï´@ó«ø “âÙoÜèÝ}—‡)–xÃW7“ø_~Þás‡\ºÉØ)f0’þZ)ÓŽ<Lš®bE_‡ó( ÷ò†^p ê—²8óý¹ë4dÆm÷Kޝo7·ä8˜zì͉ÖúN36Lè’oªGf{ª:2¥½Î©=ÆKOâXZ=¹®F­á{Ž‘gàƒÒÊ’Õê8Z”õ™ž"|C«!Jײy­#<¢^ÐÜðŠTìhá';Ò&ƒpÖB<qæ89ȸÑ*â-ñW8东ù4,RÐó=øèSPñtbùÂJ¾7o«­*+EÅþz¥ÉGÖý–˜ÚcšÐK7ßÃÅ»›æ€ŸÑ—á<)ãÒ~ûèÇÊŠ2¿ß=Ó£ÛéL$Hó0–ŽÓUá@t0eðãqLLívë\Ùëu%¹AÝi€§<¡2aÏ?ÝJÔ~%âXôrYÿbµå&ÎgmfШ\†±\›Ä{l[~“¾×žJ™=¯YõIË ‰#Ö@µ0MyßcD&¢A·æ\HkNPÔùÀŽ4¹>ökôGÖßUÔÀ—zEòD*‡DyÕzf‹CK;ƒZ ^iù­°î»£°ÕÂÄ„4›/Ë?ÚWl—Dh1¦t-¶X ªRžìèucMçs8íÂíϦ £Ú:¹º– {„Íz�)=$Ì?�ùX<dÅmÝŽ›ÐŽu»‹Î::nv, §9ñ»:ûnE-0þY GP„è¿èÛ?õ»ÿHÉš _¡"ù‚-il{) ÔÞ²óê>úâï,C÷®…¹z* `ñxú”ÕJ'¢åh) ¿öº& û|•7KÊ“¬´¦»ÚýŠÖT1OÊ¢š ÇK¦?G£è.|Úhâiµa“d·Ûyü믛5[œë4$‹Ðøß</ºAj¼[°4$Ëç9Ù“ñî=iÉ fue能rwí)$¢¶7.¢¤yEÆO¯w€ žcùû1.[ï/‰{¹p÷ â°ôš)©½oþÜÞmtÅH³ÈtìSȵdeý™‡w~ø¯Üè í)2*/ÛeH˜Úuuˆ#¤ÙFºé)«%×¥e jFÝÑÅM›»ˆ„Æ÷ºt¬P;~¦™ý'm'wYé|%–{E©ä/Ù¼i~¹ï”*�ÂJ¥SÁÌ8‰‘¯Mô9»\*�Îl¹ÃVre{û¡T2µA ê¼ÕuA/Ô­’JÁQV]¤¢÷†CÙ/Š8)âˆ5áý`‹À›ðP„ÑI$Æ)'È߆´‰¾ûS^àU€ïÝ&…zFoü㇀R«).BÔá‹~ë|+F»|²:™QDîÏ2àl`çéát$¿*¶{‘©c0jÀÌ8˜¶§ï!r Ú(ãO²œVnLrŽü4·;<ˆMU³¶C^ÈFœ°y,~`Š }ýÍ}€9í³Á?)#gHN'홀{¦ãùú3ù[u=Î"ÜP4Êæ–ÆdR¥g4ë†ñïÜžjjB×�AN'9y/w@�¹tc)ûÊDqÂnæIäœ_&6ÄàGì`+Ó·›ZAf°µyf¯žë†eItž»¢%†®—#­Ù€ŠCá•Å´àoã=ÀBXbh›%Ÿß²­Å¾y©4U)XÙ+'Öûú;“Sœ÷)ôÚö‡‹liÒRòQVe#ú¦®€a­çüªžu;d>—ãO´ Ìè ­Ã’cw1ÿ0)ûTˆ¥&‹Å0("¿ú™#Fª’Ús—c>N�ûM÷ûå€yÊ q pw0K…¥ûˆ‚ ”\惴”Œ|%Ê¡¡/Cÿ„Œò(Ë ÚgË$Uqö1¹­²q'¤_Ù{KáUUvëzLxdê^d„\jÐ"ì%±FÉ«ûêgû* Ø|•j­Ûµ89¡óÆt¡ßÒ÷âÁwÓ"–î݆TµöãHŽuØ`n7!ÅêæòæÉª­ï…ß+ D| C°kØ—À [ˆñ4Þ‚N³% ƒõR†.fš…[$ºÑ­-ü†U¥Þ0¹™°{ºC~D³R%:úƪÓX;åz93ÓàèŠé»ÞÍü"!`‘hIž½?¼29Û2¢—>Ênú% åÌàÂΰ‘`ر*|Y»ÀÚ²2 emmÁ¹ÿ¦·F´êuŠÂß1á4tÔ7Å‚ÛI àœï/NÑ|X‘,Âm¯Øòáò°jª0ãþÎŒ*²÷ìÆ(X.AÜý§¾:áºBó+,þn÷UÙù+i‡�—)î,6å´=lQ¡PþlÏJå±kìežVnC›ü DÛ8íynÝoèGx²$·sa»±ó&íÛÏqD„ÈàìFîàøk(ȃV‹/R’.ò&ñ6ß0½¢ ¢’i‚÷™šÔz_6Däe¼½ÔÞ­õ(¢;H«¤tË�¥²‚õ¾èJÎ)•˜f¬b’N¾Ý]d\»>¡Ç•´Ó—à_ñ‘*^øÀáÃø©Uõ±¦3º¦-èZCQø3é­ã¨G ŒÚÝ+õÜìMþûJ¬P¹îtpHjšÑšQÜÜc2±Uå²h?ú1ÆÈ¼ÜäÔ…¬bœ¯kÂÖ£2ÈÖTgd}q$„¹7r€*'±{7¶ëê—¡s6i|Etøõ5Á¯ˆ‰iÔiC> º°T«a•Ú=~¸Ö=Ž]›Ë¶´§eˆó…âTŸX‡¼Á¡çr§óø³-Œ]¼kD’#ï‡ÖWH¥ü$ÕNàômÎ7yØ3P q¢ðÖ26ž9ôSFE£k\iÝ-Œ.xFÐÿáM6ñéÈÌW^†Cü J’•……w”må’1×C:&›>†›kï§[³°ž4TxÃY [ÆWÎ`(µ×M<Y²r¸.©¯¯'n¨¢+¤àîèŠÜ[BN"¯¿­,Ò‡f¶”p7Ý}Ý#3.*í5 :. z˜§jeˆ=DuÁn¹ º¨°8óÊ«:wÛ qìç@o#V>«ÍºÈe¯QíÚ®üÁø=Ë IC(CðÞ²&7½¨P[ù=5ª‚&¹Åª¨ÐÃRŽ®AֻߛÛB‡¯‚gÏh‹‚Î÷OD–'Aù³âwyC3x›„ü "ଽB:èÙ|`oØöH†*z&½ˆNHÇ¿R˜¦Óšîó®¬ðù0ðš=åÞÄ]Äw2]efáÉ)¿wÊ-ùµ×_Qéä…Ì4lÌgþh»½nùUm­yøSÎv%¹p(.m€íë ™uð¸WAñâ2°é<R~_r™ÃJŽÝ@Ϻ,^»|Ï”9äõ>§i…%8TÞ|g‚Õ'ª3Ë”ÁUÔ ZNÝû­ðØ¥yшϚ’ý1ÊEà‘õ[ÅËKTµ›9ùÜaŸ'Uãu®e¢5zu£ˆ¶üñ–*rû´6€ÎÞgÍ.,FòÚz¶¼e9øžOàJS¡>»” :ïù~\ôµÜ2þÐ¥>x8mú³; ±z��3÷Õ¡;GðGíiäÊÈ®õ7¨&-Vh9ËÊ:ž8=NÄš¸ctûÌúŠbÎâSÙ¿Í´t‰Ú"²öòõ8^ÓT‹G­&sŽãT ÖØæµ 1ˆè¼½Î¢™„PF£Y=³ãœNôF=;Q`Î-ù0«^¤›S%ž¼’×=IY WÖjã Á-ÈNÎj؃U<Š› Œô¯íWÀ3”ž:?°YEw9ØB\}¼…/Íý­ì†\‡!”ÄÅp dÇ"³lƒ¥׿¹â ›UJÄŸ]K!©•/‡Æ„·ùíE‹kxÕÉÚ·ßÄù[…ÛA;¼‘¼FóâK‚þ@1i–• �×Õ¸¬†­.b.1·Ì»ÖÕ:0ÖPbH î«·T`¡†í5¥=æl„WÙ8å"i_â ”© –“y ?”«áé„ËmLJ$²ÃouiZÁ†rQÒÔ-(O#ßAŸìÒqµ")’èoþ༭3¯ÉÛïÞöÈÔ&ß\ÙsNèòÛ>£ãvÂP)¶üJ%ãh+Ydµàºà ˆr ”ox¾ÑZúh…BjE]"Þ8|×&Qç\-åôåžQÁx¼mµû·™\—šž¾»[Â,ÆåerÄýŒ!’lB¹ðfo ?—Šºð+vq*@`ºñt^ªÃd# ¯î3úQú‚Ý»õí{sDÊ ”ìæÏè=«=(> ޳èÓ«qdS»èz_S㈬Xœ%•ÈÊH[¤:¼c 0™$X¯I5Ö–}ˆ÷<S±ªµ`á¥Öå_òxˆ±2Ë×Û¨ãb,Çø“Ê6`BÞß½í¾C+˜Éü2 òÃXK4æö–MlÖv<Ø1D`QW†uÄâ …_'Ñ`î~d—¸õ oÅäUˆì`+8Á×TË€[÷㓞SÐi¸+?¬ùlæ*D&øb¢ c¼°Þ¹ä3g‰’NgFë|o‚öSÆ´yâ7ßs©§9æücïc²Ìæ$ŒÍ.ôEør#NpõètúºˆGÁ˜=]RB¬€°t?==ü½Ý]¤RGµƒìé‡VF¾€A’A¤Tžn~ž•ƒàM–﹄›jƒøèç-‡e†äÁQÅöMÚöC‚E0'íaH:õJò7@Võëæ£Y/šZoô§šÂXêsÉR˜Dši®dC˜OYì¢qe QðÒ;q›‰ïDŠNY/ÀxçŽ`ÏúéÔÌ­v«NKûïÍ?=ê=|Zµc‘ZZZÜï—Fv@~ýD'îsY`ÂÈޏCZÏG%ÕBÜo·Úž'ź¸ìÛUá,­ßåC<º þ¥áÝ·nhc‚(ªÐœ2šÚV±nâ$}4Ç/´Â‰ç FF#ñîÜ­¥™Vfèj X¶jW;Ç;žDAL‹ä6d,5ÿž©ÙŸÇýGi]gn)ØÆ2±J8³1@ ô™@Z<¤)ku=¾d‡ª³U¾Nioß*!$õµÛ·7¤vŒ“©|&¢~?Çè^"܆0,°‚Ÿ)s×öx ¦cEhÞ(®@AdJ},ùØÏ¬ùwIiýòä»3.ðáFEE¹ô ”¯MfñôÝ÷˜wØG!ùŽkDAæPg'¤Ì{—ô@ m£të´ÒÆ\ÓëuœI~æäüûì0¸¹I\¡Dm' åI¹„YÔvr/98qËÙ ŠFt­úÛz¯¢Þ3Wu“õ×®' ªzÔ•ŸÜU?0U›ás¢äN 3¿AŸ18òžûôQm_J°0 3¾iÑýBI°VCS‡EÓB:wPw‹²tDþYþ> ¶òM÷`kXBîjØ8 ëÊøìÌî˜/UïÓ€ÕÊ•œåYM¡X:F ¸]á÷æ§JA’}¡diÙÔ§_ƒ=ˆ�¿°Æ¹Ò)›´òŒn~yßé°,¯1d7Ó®rRdœˆ&ÿ*¢ÌQzöqë(+#åf‹ëT9c¾$78­d² �…œÇ }AOïl~´®5ĉ í~O":‰Æ®ø²¸ž²i=½Ô¯@ÕµÎ>.Ëfsâöôõ*¹ÊO™Í;®³ÅövóóbªC˜&Vß3õˆg�iBXœ¯AbMϨuÊš¬zFdtø´·S{K´R™ñBo¾ñ¡~ÛÊ`κʼVáÕ(!º;*þ¼/•%Fo‰ñ*±}k’NÜ䙜ïá?kʆ·Øâ[úü\ñ¬8 Ö?¢ »*UÝ|ÕGa"H…êk˜tÊ{'s»Îúöq=Ô¡¡ák¦/v%c”’ÚFã.íØN†Pθ¡zò;^UØOÖ ic›"¯òþRL0®e޽tŸ$Ô(Šç1®†¨ê éÂâVè‰éWfYvm\6¾ Ñô+ßr= d<W<#J9(@³-þ”y"Ôѳ¼Gï×ü|·øBi(òªªÂê±îË ÿ ›^ Y…x¢Œ‰˜Â| í¥HU²ÏµõfìèªÕ*€8É=£w…–€tû¥°¯tWwÍ<ØOU0=À=*ÙMHc§ácäd„àùkA´r}÷5 àÎ~¢På0ãTçNR÷ãÐrl]庲&ÉM…nKŒrÝ#\ :B}h(ÊØ—7øÌ;I%_iiö'~)‡+_Ÿ…~Š­Z<HÛ~á°‘sa©d�áS†– ·KÍF!QŽÚøÒÖ<“1æH–)¬C†ñ‡ 6ÍÊúÊwCdîþùÎâ»·K æÚ¼§Šª^>"²Êùä¸T‡i!þ2ijmh`¦nN° ¢h7ê™®³µ%BtÇ)À¤…Ô·+r}Ñ\ZãÉúP‚žÇÿÇ×¥rTIõîêFšøùOWGÎQO„“x`¶ší‹]­N•g·§j˵lüñÃݪ1–áh:TôŠ”#KáÖO´?§®upq#$¥dT®zXŸ*±—Ë%míst%L‚™¶,t¢Pò§m„»ŽE,¾P>R/}NðmEfE×!@‰›EѼ¾’‡óξÒAøÑJ¸Ó�arÉ$J›@Àú¸ZƦÇéÞ7F ûLÅ1®G•Çæ•äâVóùô˜,aþ»tãýÐI"‘³×çE¼ÂnW``uÖdŽ1:©K[„ð,åá.6ŸßËT7­¢2DëèñÛÆôÁQˆ¸äø\AIm3`gL£ƒ‰ÝíîÆ(ûo? Ü虹Ñ^ßF.ÄOæZc\å—%:¡¤n v{ ›™'ËSŒÁdÍ";ü¦¡l—`÷Çg’`ãrCÑÛ¥X%“P‡¼ˆâг–üÊmïØµl}M·˜cCß2VDVz ‡ª3££­SqÁLQ¡b} ê6?iøUiÁæx1™Ééä»8¿™ÌœXoìèq®*ƒq-Š#_Ó·ô©žMá i¾ æ~Jœn’TÜ«{øÜàC‚FÕ9ÃÄô£wñ¡¿[,å²?i ÕYç{ ƒ1¶„Eá’ý¤Í ©ÇšGŠù9|šZ1Ãp§ÿR&ôlêO`•=ÆX¿¯ þåT+snºQl&{¦¦TV±%`Ùb s=¯_ý¡œº\<!l5W›`çp×6�AÅÒæ4Ñ5®#÷aLžz]ƒr—öÄU6¸'§ö€$ÐAÇE+oU]9Ž´X€­p;†Ë>a‹3‰ÉbÔÑ‹áEÛ^Ó=Áëý³š~dÀ)Ú^¶^WG!xù±Ò­.¸O'öÀö GõI!œÂ³›ƒ{*\ÞÝæpàã.¥€bõ±÷yœ,2r‰æ·ËÝ!£žÐ ÚE¾‡’÷g"dF< cÛ!×N”È;¬=¡gØL>X”àdEâÖ´íï‘®IߘåÐóÃTÅó¨ïŸ±Ç_‰Ò½'P^ÂE9w*âõgɬÁZÄà-)]åçOxÑE–õ*ýÜOæârM/R`¸DIY`Yøûj¦k§QŽú¸þSéó„´XŒôä42áø±Æü¤ã÷gÙùq·Òèdo­†oJržU#ØÐId&î/‰Éò¬Þ×¶ŠºµkOÆå"mø w{N¹zÂâµ\%¥ôÖs­A;ímNv6Ó+üÐê¿X1˜vªwÛ߇ë×Ý,OÙŒB~s5Z–DäwµoCS!idÕË­¯š…nW†2Õ+ø$³¢*0oðPZÝëö!J‘OÉHéÈ|î\ºv;ö˜ŸE1>fENæk…#í´ÍðN ¬j*·¶¬.ðn[eHv{AÒ¼ÌóÓ®÷añ“›„-bŠÈë÷#Ï0¿äí1°…²,XPNëùŠ ˜˜UöE‹VÁ†åHÁÉRädÞAÆò,ãÉûP°D¯iÁä{„*Ë-|k~Gõþá]‹º&|¾æ•-îOøÔSÁÙ–¸æ9\6ÕÅÖ,ÄK²™ƒ»—0ÄÌræI–2ß•ÕH1òÞ³Þ³T¾\æ oT¸¹³Ði«ü^ðÚF¡ý²H~E/K¿~‚Å2‰ Ä͈N²$Foã ‡kÐÏnGƒE>Î*c²{¥þÒ¸*"‰Ý’h†«D]Šíc²¤OŠð¼Š…ÏçE%€q|0\pŸ·ß–Ù7`F)x®¯¼æAðŒ];®çOl|ÂÁ']\h­5ÆÛ íDcPìPþñËvÞp­™”¡Eغ-¯;˜Ñ…®ã.Â/–È Ú—ð¡N†>Š'Øc$¢t‡£­®I¬ û‰’šmNraØBôœx¦ì©k;Ò W ËÌŸ2 ß$Øû9Ô„vžË—핳¬UæPì¡ÖZ¾C:§8½ÈÉxMœN¡ æ®À¡%¥¨‡Å¸Ò‰ŽgýVœ(7Ao¾Ã'=Ty Ã9»Ô6é©[·N8t«#›Žó9jâ÷±Êí“Ί¿`‡HÜ=q?a©ÿ~žÅë^*¼äÔÌú^ þÃÍô³N šß½ÀØ€˜WüQ‚¿àÊÝJ�zW¶®�¸âÛ}ð˜¿J1py!FxIèR߃º {œ ‹òñùµrkaºøF¤qwMs5K8=µÈðàŽ†€°¬·®CØâúäÁ.³qñÊo5è$:®ö4]xPpþ"êËRͤiKºìõl6ÀcìÓbófiãï!ùÏáL\| ž/vªŽ%¦.¤°eFìˇ4"Ÿô{8pÍ0š|íZ¼q©L×G6M¾}?ž9¬ÀŠ¿Ã>uSÉHJ‰Qfžü¹D’¯üá‘HÇШ‰¿ïŠx_½µÕ«r w¤çBÎø9ŸÀ¾'äãëçn„í×<\èo#߯­• HîÖ3ÚbE¤BÃ×l%¶Ÿ”¾¾5žØXåÜ ³"á|æ|ÉþÙ¹RjEºÉ6½É‡(·9œ£¸"ÜóƒmªO›j™/_SóîF@(_*_ïu¿„9ø)ž Wæò¡ÓLaT/µCYápRŽ'$ª„73î-5á—µÂŧ´ù&;Œ§K–0*£zÜHRt&èÖ"ì³ …Š»}ªœ©ËMe‘uÍMótv¾1úhgORñFHKÿˆSà )ï´ðûð]Á㪡Niâ‘=Ó¿@¢Ó`‰Ï׸5�©Õßå ³¿l ×M¿m®SÔ²-ª _*L_§ýú8û#ÖÝyg|a•œf/L`C}þƒq×»_ Ï.Á¹„ /¡ ÚïÃó¡-ëtLóèŸA¹P~ÝË?ÕÖ^Q"–!4I"KïØ`NJU/žåè:UCøíçx¡mYˆ5Ëèzá³¶!ð ¿ “žÙÌß~i¼1ÄÌ;°óŽY! xIGtbp6Và·JìùÎ#¤·\ë»zNŠçå/”p[iz èÂøädÞ2mzo²¯fÜ Wؘ…ç1êýðw7Ý|H7™´d?̳½~*×-Ck-e÷ìlÉ=•Ão[}$çÇÈ ùBQA£²¯hR?3óCA!àÖ÷Á2-@5|IŸ¬ÁÌ#bãgrœÁÂ^a®¸‰@I‘C�:5ñXñÉ[2"ûÑ~¦€-¾C“Ñ‚qjUHYuj/lŒíˆûÐfÑ{'2öW¯ySÅ6_Í®¤g-„ F ¥¡ÕþøûFPkV£<þõNoTÝsBýá‰X$æÝ´©ÌˆÆÚqQÏxà¤~Î$ŽË„DŸ„Žc&YD:²5c÷â$i¼Ý­2Ƶ’˗謅2¯¡êöWûûÙñî(*|Ï ]Ó4žˆÌ“Q»9ò¾ ‹þ‚Æ÷#·ˆ’?ÎxΜ6pЕà¢ì3Σj¹u<î¯e1&>?š]J2cÒÿ*q^±„õ „/Õh²mG,ã [òE¦.%Ÿü®àFE¾É6ª!FjY¼×OA8.ìûKÕiÃé3^ñî€Bû‰²¸kÁ“rNQËNe§¦Ê¬*ù >Rýv†:ìM¢WÑ×í¦ÏÙ„Uƒ;@’H8îê+´d’Nº6w{8:ÛÔà‡}ÅŠµß§°k‚œkߟKí탗2ßBr^¨tÓ‚m‘òô¨WR$wÒ£ÌÑ4±êbZDß0æ=nD6ð‘hô<ƒß]H,ŽÇì§—Ù3b+ævøaânû‚ïf…�^Bv$hà(¿ïƒQuÿõ϶ÅþÓs¤ez¾æÈ´(LÚ̇ZZ�“²ʦ>¤üÔ ßï/ž£MHpâì'K×whüK6ZýW7*ŒcfP/zxÚ}¼4ªÞËnR_KèB‘k§œO³¡„ 2ùêEt,ßäPØ%m+FgXã§ VÄT5-'ñÉXÑ¥Õ1ÖjÑÂZd·n´)]gO„Ú"øG_‡˜‘(yË^³HiŠçÀ‡V`uÈRº÷ëñ51Ts<-vÕlÑi;rGœG¬¾_¦¬Á•!Ý<´{‹N?%SëЮzm£›ÇqR6>“ksŒsú_»ä‰jG÷›¸>TŽ¥"jÞ]])ÜÃÔàHz›xë|š3€øåø›ûg{˜1q¶Öw¡ÚRèC¬*ižŠE|sC‚ù‚>³ÞEkVyq|óï껦_ºkm!˜!s‰*Ï×סP^צM ‘Ï1› °_QâøÊ7!>ÁG'«ÿÚ)áÓk¿êÔ¢Iš_ϱÀšþ(Wkn- }æš©6@¾År%Bó|ãÿ©Aëõ²>àÆ'ÓïãXkþ‹ÇD†çs‚kýwƒ:eÐñMöõî×JÆÆ‘H0\áÕ›ÒÕ-ß°iCD»#Ɖ凛ÐÊ›f$?™k‰|Œ†lFrsè_—Ü‚$-<dXÅþÁ5û#Ûêh&úeP⦚æžÿG­¥kÄ:š^Ü@òvÀNøþXø#7Lø…±zæcNï‘è¬=ö[‰‹ãYPœÔmÍ&9-Î?Ö¢‹„¿ù#%ãþ’“úýKËmÒˆÕb¨ºTÆ:þP¾hq u¢š^A`9ü+²J|5—`’½±Úœª£^£êèŽÕzÕ„Ñ¡ p†1}ÁËi¾EBÑôÊû¨Ä—5÷È7ý 5Ó¯³öDMñH)ƒ$þjZèA‘Ìm€+N##v¤ÿÝ ò~7LTÊSS|Aõª˜å„ß “áÞÌB€Å V÷±¨óµ%>cøIû¹‹"Y«öšß6OÖrÇÕ²ˆ«¾Ò‘Äyüf±µÆPgßõˆ…æ½0õä[dŽJ �Ϲ;ÁÜä¤5'„fŸo6Da(*%ñ¬«çRÂ|&¼¿×eŒ†¾ii¢‡\uu ·àØ£Ù=õ6ŒL/+C1€ÝÓ¾·áóyWV/;sªV<4 ‘iþêü1k ¥Iú‰UÍߤ†9¥Dɱvh¿ÎOäAÉn³2Qf'”×ÅPª@§ C1¦.¼½ý#Ê>Ø‘ùMØ‹«ÞØQ[.†ŠòéAh6³ä“MW·�V\?QŒ¡$ØDê‹[0WZŸã‰pD5{£‡PÄQõö)&»`"VHÀ¬†Ü˜;TžŒêÓ–XùÜß“S¬ È€d©&š¬ Gªh=¸“„¹NëAìÆ‰ ßÞŸ†¦r…ÈIJW´†æ¦®�#$žžhYåŠÎQ«;ðê²íÂòöÛA88l§‹y|çÁ“Ÿ~1€Üò‡#‚6¶M1Ü4F7 py›3‘�RÈ×»퀔MUÎIþÊ®ÿ“?cä¡@‡ Lm굪œKòWTTæº 5 cîŸ>,Ï1ÕPî°õ<ÌÌ# TŠ‹Òæ™¤~—vƒi¸îí«[sVs]MCµâK 3ò^¾Ÿ¿ôÝýtuÑúÛRW4c4–ùËÉ:ô1™£n/Øç°%Áîr$¥QÐY¼Ëêú«VÔáY~ÄÊÔÅ‚$Õæ²ýíôãr¤/q¤–’žâš•ô– ¢“ó¶~k@a§nXÕ˜ LjägˆÂ1s=üFÆu:âU²Ké£xXE4¡f>“|ïè2„É)G;�¸1ÙÄ… 9(ì�¹?âç¯üܬü{ÒÊiìFoõ÷€üiKôéÛpŠè÷YóɃïÛØ ˜¿CöÇ}Ì.õVuê:@_u²†¾Òf0%^ý™WH �>nx‰ÌœÏ:ÂŽ´òÊOœØ3!PeÏ>Mõ0•4a«Ö‹Y«²šýØ›²c6¯ŒýðZÆŽ¿ã ·¢L‚$2zæ²u8Q#Ò_P’Òñ©DÛ€È/²ÅÌ`VÅ9òkßcc•É4 áèZe.i?¡_}ý]–ls íèá@î,Мhšïò(̩춪 HN?%é$¨¦U‘­$†ëóK˜”þx• Îv£‰P•ºø‘C10âÄÎËÌ·¸? ¶¢ÏžHhïD«!þÙà!]0 ¢1.Vð }*Ïú ’Z°ÜrÞµ©‰·°Ñó8ÌæàðÓø\M† ˜ï6o—öhÈÛ|++}ð�oæ.òÀô‡‘둊…«ÉˆÙ´‘—÷ŸlÖFÝöŒ v‡…º›â­3ÞY/(DôSM¤Œb…¹Èêòøãž×˜=”ß*4l9[ú­aŸ/ßÍÜ1<Ø£Á‰ Ư–ºTçð”–+ùc b3Ǿånmg*P-Úc¥ÍNeÁsŸ×„_xéØY4óÜ ¶ý¦­Æ<6j‰-�'°6nÚ¢¨PŒ—âÇߢÛÖ¬4â—Õ  ï¡M_±ëtkÕe©!Íé®vw «°Œt?:iÄ!Øy<uÂçôð:v†Ö¼™¿ýkh_ôw`-ü©}.<vÂämÅÓê%ø´USŠ¿Õ¸gšw(ÍBž\m² ‰§nÕŽ„ â2D|&ünü¡6mÍ_J~ xWðb}ªÎ§ä"= VàÔpCÃõÒ¹ºgRQ=ÄÏ+öàx?Hdx þÿÞ•ÏT–GŸ¹…D«±°å{©v¯`Ø3ÓÎ'Úk˜”È=ëóä3wÄŒí(µ·vë;•_tC$PTÉú¦¢Ìê„ä ÑÞKùmùœßGkak®¼bgT–ˆY×úÛÈC4¢Üc¶)*›Õ ¬HP¨E%wd…@Ë5Hç·HmÊ|«û—f]tY(8ôâöîþqMÙbvó4³,5MMb¹ÀD¼ÍóŠ"¸c³J‹¬ùp ¼ÊŒuQb‘èIÆYäˆjW%{Ÿ›Ý R\;Q÷ôÒ ]Yé©qS¥„µ£ñ0ŽÛ°5+wÚølyƒ ™Rúí>S˨¼Ë~1\P 8‡C¦?»]É%—¶7ÿP`+L¹€®šùÿÀ³J endstream endobj 491 0 obj << /Type /FontDescriptor /FontName /MZWQBB+CMR12 /Flags 4 /FontBBox [-34 -251 988 750] /Ascent 694 /CapHeight 683 /Descent -194 /ItalicAngle 0 /StemV 65 /XHeight 431 /CharSet (/A/B/C/D/E/F/G/H/I/J/K/L/M/N/O/P/R/S/T/U/V/W/Y/Z/a/acute/b/bracketleft/bracketright/c/colon/comma/d/e/eight/equal/f/ff/ffi/fi/five/fl/four/g/h/hyphen/i/j/k/l/m/n/nine/o/one/p/parenleft/parenright/period/plus/q/quotedblleft/quotedblright/quoteleft/quoteright/r/s/seven/six/slash/t/three/two/u/v/w/x/y/zero) /FontFile 490 0 R >> endobj 492 0 obj << /Length1 1259 /Length2 6573 /Length3 0 /Length 7327 /Filter /FlateDecode >> stream xÚ­”u\”[×° ‘né¡‘ºîFÒb€¡[BB¤PJAºK¤»;i$¥»ãsÎ÷}Ÿ÷ßï7ÿ̵×Úk]÷º÷¾™è4u8¥­-ÀòŽPWN P «¦ �¹x°˜˜da`+Äúä �ED€�i·×�^�PP”OX”—‹ ëè䃼¶q°Ê>û+I í�†A,AP€ÈÕ쀨a ²è8ZBÀ®^\�i{{€ö_;\�Ú`0ÌlÅ…¬ –®� ðk‹û/!%¨µ#@èŸe+7§ÿ„ÜÁ0„€!ù €P´r„Ú{¬ÀÖXÜꎈ^`„Éÿ©ÿ..ïfo¯rø«ü_Cú?aÄÞëÿ%8:8¹¹‚a�5G+0 úß©àÜÔÀV7‡ÿŽ*¹‚ì!–ÒÐ×ö`�Ï?Kyˆ'ØJâji°Ù»€ÿ^C­þ[1·¿¸_è¨j(±ÿó>ÿŽi‚ PW]/§«þ•ü73b:0ˆ'À˜‡‹‡ˆHDüþóÏô¿zÉA-­ PÄ€`0âd H�à@ V`O�Ø!ÌÍutEl fâ°v„aýõ:�ÜÒ-ýCB�nÙß$ à~ñ/ !2U~"Sí_æpkþ&^�·öoâpëü&~�·îoBÔÔûM‚�ný qZ¸A¿ ábñ›D�Ü–ÿ’�"féh8ÿYò „¬þ@ €ü"­ã_ù#ŠúÕíÿ@Dk‡ßD4úÝW�á …@ÿh…¸QÜŽÿ"?b³ãÿ #z;ý#à„¸ ލãsþóƒýW—?QÀõDè¸ýˆ'qÿã±êž¿{#‚Þ`Ø?®ÿ÷8ËÈ8zúpòñ8yx�"ü�!~¿ÿ•g郡®)wâ?l AÜ 0Øl‰õ㻣¥XˆmrMh¿Ü—ÑB46d™×ßbÕ«Z'š°ƒgâísûUœÙ+ ¯‹Rˆñ6Ð6h=n)]Þ5új É:Ç~˜ºßpµ‘â]Gixœ¢æµîD¼ƒßP3yº+€¬1¾8P�™Ý•~ؽ“©ÉúBw}ž©Íؽº-5DPÈP>Å^/ìm%36Áç‚pþ°xü£…‘`Û0ŒZö¹#ÇF7¢ë/ŸîÍÑÍ>gb‘”ï¡M¡¦gSòV@¿ '3e?«2.ñ‚ÝòF‹²^9öÏr>üTp3ó¸Dn*ÇKMŰ›ÛØ ¡,ë>cÕØ·|þ¶^}ùÖš”4¥ç¦H‰äs%ž­½/å<#UXõcq-&©+¯Ôx}>~‹úÁŒ wGJ”ý©ï檺݄7ød8O%È3³<=ïW©F}ÃÞÙ—e?aSm…ŽÂ¢èohJnÐúèS¹2e%߯âA‰)9ÇhA¸­ñ‘:Wâ‡Ä‹> %b˜~³¼2ChÈK<ƒ"d{%w¦ätEø6^€e_Á� K»ðÈ“†›…XZˆe¢÷l%¹—BÚç²ËK Õ-}}Ê:Q{ò6dØž–Ò¡d_áC!‡øpMN2u$Ìt¾Âu_–ëÑAM…É`²O¼!™í\b» ®õ,ÒT_fÕó±àô†éwÏAø¸¾ÜËŸ®?Ð?è¡M1É'ãïOàUœ bÒ4æ¥5H=7"!Ï‘,ørÝcÊZ×µÜu.ðYkUÙ$J‘õ2qÖÜ<)²ÒQQÒdt¨åûÀ.<´ÉÙâZSÞö&n¥âÒ÷»£Ú(ÃØ.›‡`Á« ¶VZQBaÞ!›‹ðœdÙó“cIRƒÓ-!E¦&Î=ÁØ^eš†h†àœµ ¶£9¾¹Y§øéãýA#ÓàW•€EõÜ.¼'ŠóÌYOÃ;ØiÏJˆ¯uv牓9Ü×`Û⪋5Œ ‰$)ÅKU¤ æcüEø´=èá@ã_æV;s´"bϤFïê7zcƒ�Äĸ¬ñüDK “ºzïh¶^‰±òzp™|ö{Z¥¬`9U{A&U{‹É㓼!.ŽåhÑÅ×$§Zø®/Î%‡Ëáå6éj`«Mᕱ÷RÞÊÓú[cü ¥W®œ£ø.y]*hÇb9VT6o‰v¿ÞlÐ~,»FÏû.ü2hSEO| Kz{CphC‘Ú2˜lLÞw×Ì;¹ŸXˆÆês;‡ déËøãøkç—(œÆW8µ*þ„¡Pº$ø¸sOLÕW¿Æ‚v¶µ§[oã'óÂ0UÜDœâ$ Î KY`ü„ë”™ë,¸nAâ!5ì²J{&„‹º‚Â'¢²nnœ2ãZÐc«¤£!'®Çç΢ˆñdOžjˆÊ5Yä§xUöî3¹eˆq5ÜÇÖ”ƒ¾Áò8j´&óéNõž[¼[[Ý‰Ïøü´¯$j?¢ñ@øîUØ4º–Ê·ª ÖçÇcîñd[…î Ecí¦­âc1:°= 1ò6OŠ×/åÑÙa xZÏÜY–FoŸ ”P¼Ç&´à0áP_ž…{µÕ<Åxß`nѧ'†ÇDfQ½â0ì°‰PÇÎ8*;Ûë7ˆ`“|†6ûþÐRc¼ÒÕûæ {P»v2çj:ñ Œ]/>ÙFkuwW ã«‘½øŽúéÑþ­Ÿq8ŠøU&ZïÏô!îì×›‹%œÃ„/È0 –/™d£±ÕAVÒJ¹©Ò±–W:\Ñ–!¹d ÕIÄú+‘̬¹úbðÓµ– æ=ŸÉléV’âØnÉz„,k¿îZrcg+³TîÖwçGeS‡1„‹ ´F‡*טVdŒØ:rDí̹~é’¼ÃPDª{ªM¶Š5Z¥ò¥${îapŘ”Wqb5+ªq݇¥r™²=D¥z£íË!úíÛ=niÛXéägÈœùómdËàÏxDk›ôø'EZvà EÙ ?j¥é,õnò«N·Æ$‡ ¥VúŽ$ùƃæÆ6O¶S\NÃú¯*—‹ï›_á}:Еoæ‡ 5<¬¿­äë™MNÛgçžß㈒»ö ®^»›¹¤= ±ì¬W}tÔ†mŒwIEܺl•£4˜h(‡\nQ®Üz°¨æBAT(ý†¡[TÄŒ8ŽùÕÅBžxQÒ@òÁDÀ@%%8aç…œì*šGËí‰ Ùy±`÷1…ÎLIÕ2®hPD«:áèûª£Å­8¡+†M•;Ï­î§gRtW~[•%‹”á8ÍGøª€ÛÓþ•OdƒŠí÷mtü‡dÏž‘jÍžÚãí¤7.8(ºõfó­@ngYpÖîö„•ðæÉ´XøÑ ¾…hÈÊ1ÁNú±Þä:»W’‰í§S¥¸}f·"ʬ‰Ó3®Y4!³ôQ£Þ÷3è³ç"í&]m meŸ¡Õöò»ÈB¸^L@©Ø„ÇOuŸJ³ç®ØýÞ*‡•‹-VÔÙ+KqûÖv”Ò¼&qNó¢W Ð>Ó·ÌÏj ¥$^fa â-ÄöÖ¥ïmá´« 3È%u8µ¥7ÃÏ^7FCstaKOQÔkc§g«‘âîËt‰å£6·?m t»²¿ÚM«0¢¤¦oç½—ìGRS0íÝÄø«‘Åsˆ¨ •Âm>–¾¢3)‰ÍO12ý²Ÿ¸[hOþy=ðe��]\I9Q-¬zn> ’@[ú6G^%¹®‘ÝuÒŽ·²éÇÕ–ÇI°‰d�1- É1ª–Óö¢§™Än¯ä¦–þˆKFz,°p¶»“iF‘¸¹n’ «%h›Ézì‡]¨&Æ©þõ~ò–&Z¾]”™òº6‡ÿÝ|êþé€~žè³!LÆìFÆvƒ_‡`aøåª.OFŒ ¤÷4ÉRÎvKÀð씽Ã;;ƒ‚Ëþwæd•œ¡Ýá–*¢-”³uÃJ#˜ŒNßý:–8Ù²Âv&¿TÄ¡c™6¼E­Ö Aš¦lÉHëh|܆¯7¬MºUW:OïI˾òyý‹çYÎUmK/ …ùøg„~[ÛðçÊ ^C°Z§y¥ÈâQÒím§ñ8ù¨:C‘Çù¨XM.laŸ5æy’Štýï&3øõ}OXTDóHcU#/168QF7ôì¦G†Ä.1SQ[‹ž]àBˆ/© ¯¼¥¥mɘ„�²…%ó2qRÙ^ÍêF5ï áÔ˜Ÿ¡—¾FØÈÂ2&Ü>·Ýñ·X…o¦ðܬÊ˿Ǡöv{ ³õå/Té{¶ ˆ£=<¹.UU¼­3ÃýúKwÚ2‹¿Îž­È–Åúž-©SH.¶| \z;„˜ãMñ‚ríå»ñ‡z=ÍtJ%¢U V±Õb¸Ýèjl­VLô‹Üt|MŽyÁá®—‰Ÿ"Iër2ÓÙ�÷óï†8ý-×ìÔh;aÛ‰³ý—j!}3Ì‘ª¼Q¤âIºùð¸sªïà/ øîTbðp>Ýöxœ¹ãÍ¡ú‚ÂÒÓ?L÷$,Ëç¦d1çñ¹ÃàYBß•%à ƒEL1Z ïÇͶvrÛ@ÕG³m…ù`¼™‘ ʾŸ$2'½úi º†ÕåÆœ§Ïž±~ñÔ_åwÍ[Ýí¤…'ÏÃuU""™xµø““ïÅi÷ÁgÖ¬rÕý˸C—ZͶ®~¦o1ÏÜÜtoÏ4D“Œ–ËAÈ&ÕéœCæë¸×¶B®w­Hò³¢‘Ô#U,ò‡8Ùr©}-WÆ<ùО/$,€ÙÖ½@8ko#r:‘�-èx™FsZÊ4Ͼ¥ø¢Bëýa9— Ïí¨r•ô‘è᪲ì¾"´jôð2cÔ•µpE5h\fS¾òFè–æ‰ìIÉÞ¢À´uÛÈe>'jÛÄOézµÊÁk˜Àì�d‰JnÖ7o¤×†ºb>ñ~4ožØ½Ÿe¸Ê?ó’5D÷xnY3}5¦¢#ߪÄù¾æ+�²Ý»Y÷âà.…µÁoô’&Mg·i&lãýrœ©Ñq³ÇÍß»œ0³;œ½ô¼†åÐ÷½³ŽãqUíÀ­ yVø[Mf å °ÔÏrÝMì”,f8ÃôÛgþæ',ú¨0ÇqŸ¸r_dH±µÀj>E‚Ò=ªY¨ãõ*ûy£Yk”ýrgº8¥mcyZqéãùOEôØòKÔ~(Dë±½j $KW Vož•ìñ±Ý‡%£QOžMj;N ·sïPÎ{äÏ<õ¿£TàQS´ýaº"1LI{:RIq4õ(3#O{ò£Š±Z²2©0¾Ò·Â SIœ†ü††š‘Ì„…î+†„¡IâÓ÷eïâÍš |&‚ Ù¿3öSɤ+À+ƒ</XËœÛ 8U(Ú4Ö­URóq[µõ¸¹#‹3 `—±ré Œ&y÷W­!ýºÂÎÕ;£³õæûv5åä½ò0׉õ„û` .¨}LcðqºX\XÂŽLê[x¢šžš¶²å (KYÊØGvx2m‰U•B.Þ²«2Q-Ii5§©Ê3›Ÿ‚iZ¹¬Û%ƒ}¬ÈV;ƒó…ú1媔“ŸÙ‚Eµø—ø,E{¦NÏ#YÕü}†Ã:ºS”øÊѹ_Š3†o Mn3»œƒ1qTÝ6Kz­J»”ûµ[àjO³òûeS²›pwEXÝgìQC<“±JÍò½P#(ä_¼=ò6 åûv}þ-E»o˜@PérÏk*½6ÉÒ[</R½«fÍàÜg–¿~R?£çFèDÆøÚÒ«”Ú¿:¬l³Û:|Ý-cŒàÅð3Æc™i8ß:Õ>¡&ë-Þ·œú9I£ÝøG(ÝÃåxÒnH›PciÝã‡ÙÂ/YÖ½ÒÑuš ¤-rÄÁx�¢ìcµ 1re67ÛœZ ßóø‘¼ QçY'qOÕðýuïç}é+&©6{¦¥bÊeŽ–Û…ÜVÃØÍE=Þ÷Ó„¡«Φ3†ÏüæT¯€vkÂm÷ ¸®¾%ª‘¢ãQ¬^Ye¿Ó¬E½ `†‡3ég8…pÍDÖê‰Áìg½ãDòPIÇ þèCìùHHÞŒ=–†ª²�úóüñ¼hr#›!VCQ„·‰KÑõM±ÀññBŽH”ê[ GÙŒµX«™ ªÆÇå�\“J#iª÷šÓÀfÝ=‹¦ ÈIÛ"›ñp’{dô/5[¹—¯R••~ß|°V üxØ÷=ÞÏùyبú'Ê‘¨¯ïñgs{®6øöâã9(šÑ*ˆøMÀ„|A${Ϋ¶Û̯üŸê»ÑIÍÌ0ÖMi`Þ^q'Àœ˜xãÝÀñ½ÊÒ¥ÐjI*ÓµTP·Þ•Cñ¶+ç¼?Ë2>ø>‡Íg þãÓx¦%R3Ø ï¨w FÌ1ò½z…©,žEM2½gÔOšômó2+.(@g]…Åæõ‰|ÿ”©ƒ^&Ÿ 5Ú-ÝJ|d͈ÁfR.ŠŽÀ>KXó¡ÊL æ‘ÛC'àüqôñõYbMÖxÁ­7îb“8¼@»žÂ$.ÚõüüË÷… +]6UGð¨C–Þý Ý!Y4[CL< ìH›oZzˆ>o!YVÓý4X­g/`fÍÐ$Úq›fÈY•Y³±1¢5fÏô¯º¯Bë¬ÏžnÍeÌwVåòh©H'¬vÏ¿å){–ÈÍ9çòkGÉ^š Â\sÜÆÆ¾ ¥°p~YòÑ¥¿’�‚Š{Çkñ.÷EɾiaÊ„š“RúÍù¸{5µ½xâÁÐŽÈ9?¬’³T›9õõÃùÐ~Up#Q‹¾lyå-)3 õ¾LŠ–d›^q:̧gAõÄ»: —èì¥Dev2z5è!Áî:3³Á.àZ®SÛÙeØ>w¾þŒ] ¾8 )Ih­Mü/6LýÂkø‡”úzÕë{§¦œ×®Ö_ OlÚÏ]'Ã3]Æò˜‚w­±˜º(—³•J*½§ŽÚ=‡¾c(h‰¼Ç¨öŠáÀÛAeÅ9ócв‘>Zo=dIx¤¢|œ/ý£¥¬ï.½ä9^ä<ÚŽ¤rz ™9©ÍIÛèS\òñ$ùzÊzÒW;lªM§Œo¢Ý�ß?ÆëÒ)ÄÇÙÍ™ÌU/{gg@cýoî'·æêµŠð[kíìÏ»æ`M É89uoÓ¾M "¹I»®¬Brâäôã~’3¤ô«¾›÷•_×7 ×ÒŠôXÿRôB§8 zñÚÅÖUËAÕC›ÿ4Cã¢è…?ç×’¡¥z]Œf»&Yësr²Åvu�.p—ª?áÛFEêJ­~c#îNµ‡_†hŸÑ"O–˜'Ÿ™lãze…½É÷nw ËÇl‚§�‰ó³Ô[ˆwØÓÀiqiVH0¾WÎÔ(¾ÖH up(±$’dxä|(¦ÿ6y!;ä`Ȉ*Cõ9U1}ÚKnòyct—™t½œTkºþþÈÜ‚ ÿ|ºè ¡sa5‰—߉ޔ}P0TŸJN\éG¹žpâÔœ„â˜uùwAòŸ¾ê%Kê¡9N:ž6¢÷”Ô PqíÂN%B§E¯Ì[NÙŽ˜š¶Ž=‚ùÙ‹¡4½ÛgÜØ"x»ˆ»ž->[£½EC¸Èœy[ð%7n_ N…ÑβÕ"´[R×+óŽlÓ¶‡Œ×–?þAütéÃ= çyå7W� ÝTF·šœ#¬­ :´¤·™,ù)ùs-üÓ ûŠIG‚Âk¨ñ½Úù/¢>û²)]ð;~æ<½Ëê-öˆ+Ý«+ ÛLî8M›ÃCgoŒ8/ùvÁƒ,'—6Χ´Ã²Fô (AóDk„”Ûs†Â;–³ø18¤Fö¬òg+¹ÓWiýMÕÈæ*ŠÓ]Ñj,]èÎLþG@Úl<öÂ2E¶À‡)‰½‹ëàží‡ã+ýÜIÖªwô5Ä ƒ>gø—D¤•°þ_—R›‰†¯7B@ƒÍ*~§°\­k+i½PŸÒ? ç©×ÀÙ\”‰î7Y c;Èz(ÈíIV…j¶ª"î*Lb’@ŽË ™Êûn ëšñ;ã9©ÌrµýÌm~@Y/÷ÍI 2Ú8géÒœ«MâK ®Fiæäh‘û³šw\ä?‹Ç¯BÔ 1LõÇÆ^/ÖÙßwØgHk¤¦~R ·»%\þñ|…!CòÉ ¯Þ…­ež pˆ³ÓÉDm Q\¼HÚJ| ò0ßJhõ¸^˲ôpýÔÜ êv™U“lªH ã]L%#KJ…çëtŽóÚÕ8ŠxIN¹ÀËj–kùÞŸ•>ì—6ÕQI¤È,x—Cï’UŽäª7µ„ 4ªÞÞäV<U­k6V¢$¿ _Òü¤²y£3í„vÔyâ…ýዞ‘ÄAº*¹×NÏI4*e€ƒ»Ék‘àMÞ\i-\C¦•Ó}€¿m'‰ ”[èûxKvÇ.ÃLjçÐoË Ÿbƒ3ÃZª×xmmú)ù8¸æR©P2<óÐ…bdÖèò6Ý€Ä_í>µXžrñÞš^îÈÏ=v«äÁ’P€®)ÐYoÏc>ÍY<’cà½+7[ÍXQ(þ(Yw ÃE6Â%q£2°¬*îîaÛÍæ$š\¨ÈcG™ xf±æÁí‹)ù¾¯„î õþŽ|¨…ìÐ ¬«“Rÿl|eÆ€ž”àä¢Äì1}!„ñ¾ßèõinä«M1ÏûXnØ» k9Ú›NÉC´Iæz>æP!ßÍPg%¶€€2ú¨XÉøö8¿^¨·ºÚàKÕ£¦ÙwÒ˜ ääe÷Ô"Xú+üÎÔ£rŽb(Îs‰»µbvWfº©½X"Ÿíí9ÂD½ªŸ›npûÞþiÎe£æ==;XîGE†}cXÛO6Ž ü+ óJEÊh1/íqiÃÚBíç‚Ûá#ø‰ þ'r¼øi,'”ïž“¦~(U$pŒœ^ñðiWoÎ Â'žm?æšïÖ½¾`UôiDñbYèhéŒ4wâê ¦œ=)f= ¨ÇC‰ ’h’~`¯Yú(8>&›*F¯‚ÙEü²ºQM›G(çv¼–]»ä²+8‰d´ö¸è+?·@>GšÓ0‰ÏÊ -̓Z·²>kçnr–ÚRÿ¬1%`ôçˆtÔ‰'¾—JFÎAkx!—~ô ].¶ŽQ¯5¯]LvšcêÓ;ÞÇÄš§¬«¥¾P)®¬äDç—ýˆ™ ¹ˆú¸ ÕÕ¾tkN9=áRBï ÚçšÊ)#Uz ¬•Ã2Å5¸©ã𲈳.½'ÝJ›G™nvƒãëä¥V›üŸ’eíLº*’ÞÇïÀ&›xÂñyrŠ$à —•oý¼k2oRYÇTa\iÉ„Hæú‚ñw§R×Ä™|F;4µ{c„Ê_6Ì^ˆ G´™ðûÂczÅ0›tÒˆ¸N²?…ÛÂf¤p“{t½„sÓÖ~I >ªö˜WÏÙáªÏæH2k2^“¿ ²QÆ'õе¤÷´0õ¤5—'jbÚý>¹X^‹¨L?ŒJO<2í(ÝUæA‡ Õôr™‹Ô `0¢ˆÛ=ÂÃŽaq>^é-<W…9¡ vîG]‘(Ðñ+épíß˜ê» 0¢üu]ùîËø?Í>mº endstream endobj 493 0 obj << /Type /FontDescriptor /FontName /HDSLOI+CMR17 /Flags 4 /FontBBox [-33 -250 945 749] /Ascent 694 /CapHeight 683 /Descent -195 /ItalicAngle 0 /StemV 53 /XHeight 430 /CharSet (/A/C/D/K/M/P/R/S/T/U/V/a/b/c/colon/d/e/f/fi/i/l/m/n/nine/o/one/p/period/q/r/s/t/u/v/x/zero) /FontFile 492 0 R >> endobj 494 0 obj << /Length1 735 /Length2 1053 /Length3 0 /Length 1567 /Filter /FlateDecode >> stream xÚ­R{8”Yf·–ì º*’[13æ’Qi0&j¢I¡T>3¾i|ßß0C)ŒK[Y›TšlÖ%m±‘B$ÑMJ¢mY¹”J²ûѶû¬þÝçüs~¿÷=ïyÏ{~¦ÞkrDX쎡øBª• \ù ÚQH®2 uƒp˜ ¨ŽŽTÀ‘‡�P™lG6J²�®˜T)CBBq`åj=FbN,C„ ø ‡BHÖ`BÆ•v€#‘�Á؉ €#`Y$,²#Q©@„q‡ (É~֧̌¶H.ý E²Â°"LZ¢C%J ‚ƒIö«0â.˜pò˜š(î.—HVAacòDF_ P"QþcaR9Ë�Á2t"Õþd‹yØDÔ‡$ˆƒ†H`@ùÔB"Ü,òFpa(†$ðxFEM±[°çzrÖø¬´ÿÍqÈBPÜG)ýGtŒ;^Sÿ­‰ldˆl ØQ(T‚H¬Ï»®â¢BL„ Ä80˜�’É %‰˜ ¢b€*@P¬�°‚ðko‡b8q‘lÁ˜Œ4ö™tG`¡ðX“ôå\\0EÌB ,¤1U*X ʶÿ…r™ Fñññ ’ø\#Dn0¬€…¤÷0¡“J|à\R^,÷ø­üÉ6š.!çÓV•TÜ.Ÿ’вWSr¢nE¸M[±ßpA–¹kr—IÔˆQÄ÷e[W7¸¿Š O˼;ÚØ•}ÑÈo ‹¯ì”š½MèÖ½tîÎ놦Ws[}^ºε#}ÕÝǼ­Ü|^h=6Õ¨Üy¶òŠÉòsÏ’¬MN,žï`*ÐË ÏK¡'·Fé¥ï›ÔÚ˜+NÖ¾`û¨+“O>®}tÿhÀ› u—²G$Uý/‹hôîÀÑmg ÿ¸¤ªuRï|ŒèØÐ}‡L>)­H‰æÍ®åµqöÞx´ ¢o¾%ýH¹:(oáìiåò¬Õ«K†æmM‰_äÿñžžÍo½£m"ž¯xx¦‰eNüô¼ºû”u†ÈsEqÒ€¹Õ@‘êÕÌ%Ïõ/³ W{äWû0’UÓY<I/°Vã;çöË\ƒÅïxê^*щſkö¨òÍ>jlÂU±š.[BÓ2fokzCÚLKlï×oRÓG~†õnL=FÛu]5Ç·ö[÷æ+#qô ¢žÚê›ZÞÌ›¹Œì:ã´Š¡¤ªK–½ÃÜ»Ëß–k­ì"‰½ª³_²×Ì3`+˜ö”Ô=7v´êx´³®j—vì‹&Žæïžv(cÀ²\­Ž;•÷°{ÐÄÛuG±öTµ~cv·¾Ý•,ç’ò¥q;²«­éɹÜM%”ÍgËÝÓmÏÌ[ôã|žY½¿¡Ä.÷¿(ÆÏØgøœšáA3ÕAgú­02¬ ã;ëÏĆª6Æ,=ºzrïs¨ÙÞ˜ðk˜{ÍZs^jd(M>gèZµ¨ü¤ï]'ÁºnaöS®¸‰ªïB/*Ô)Èq ê~è¨ÜP$=°ŽíßÞŸ‘’ì8˜”gå;eÐk©Åº”“ït§´: =ýeä|³QWY.¡ëf®ÞŽ}ó  ¶å5V™´kó¦âIx~Ô‚þ·­f ¶ôIík<Ÿ‹U7‘n›WFxh°6ƒÆ†àèÓéÏjêM•W­¬¯?g¼È1žds«ô¨É» shG ßùf‹Ï"ª´gÃïè2³¶°Uè$¹)ü}‡vÎßÛîöŽYYm?ë˜Ý‡ð¯]oÓ‚L”kGXZ+;ß×ô«9Z‰³rîïe“Z §¾¿èž§¼ð­h¦u}õC!ÔMN7 ×.£ŒÇÒÜÊ|ÁòI#©™Už¶O6¨Í(.Y|¾7qÔkAˌҪæèVë$ryê  ,—¶ñ ”›Ø`>¹Œ]=ûZgqUÔØçPïü ñMDÎ}áó-©é<^Íú¨•Q?ì^ê£îàÏÐkT$ä$Ö<¡ r×»YýÄ<²¯‚¾‡{Ù³gÓthîtãM}ÓÒtùCl¿ÇæN½)œÀ/×'6µ-‘ëþ T|öw endstream endobj 495 0 obj << /Type /FontDescriptor /FontName /EJASTL+CMR7 /Flags 4 /FontBBox [-27 -250 1122 750] /Ascent 694 /CapHeight 683 /Descent -194 /ItalicAngle 0 /StemV 79 /XHeight 431 /CharSet (/one) /FontFile 494 0 R >> endobj 496 0 obj << /Length1 751 /Length2 1206 /Length3 0 /Length 1733 /Filter /FlateDecode >> stream xÚ­R}<”é&DÓŠÕÊÇ&ÏØØ c^ãëHƒÄ˜’3ïðjæ}Ç;¾íf”µ²eI!ýZYë;i“H”ˆ=Î!Thk±Ù—¶s~Çþ{~Ï?Ïu_×s?×s=·>™áeLç`A°3†ŠŒ! dÝ<­�D¡’ôõq˜%B0Ô‰%‚m�dm º8˜RD³1³¶1'DÀDâHpˆlsܾ,²t>Œ#l ÜX¢˜Oô`³xÀ c#°(’è<ð\>!ž°Æ%0‡B‚ ÀAØ"#(ÉdÙ+ÊÅ€å‡2G,øHI`\H˜Û“Ûa‘ƒ¡¼HÀ¹$wŒ¸ &œü?L­nî,æñÜYüåöDFcY|„ùñbŒ7Œãèj©/üÁšÌAÄüÕ¬«ˆÅCØt4˜ê‡"tF"`±C�—ÅÂ+uå¬6AĶbÁ„ÁÜï²ÛÓpå7W( AEÞ‘‚ÿ4]Ö®`还ÈG"€•B¥B„Xwþ«®Ú²1‚ã`A,gE’ˆ¹ ˆ†�‚rà�G~M((&"Ž�"’XÀÅpÒògš[ …—‹+Ø‚ LDáØ þû›°ˆhc306%„Õ’ ,-¨±ÿ#d‹qFE+ãB$ós"GŽ€Ù¤Ç1¶mBhN͉²¸ÝÅÝå ;d‚Nw¿Úò i½´ÿ{Y^é½a;†«˜óy7Œ)Œé†/~.LiŒñèt~y,,=·ïý˜$p,/ªîsæLž[ä¨@ï­ô…JCMïܤ…ìþžá޲¬ƒ%m¯n½(blsòþ—â²Ìu?Iõõüš%Ó9w ñx•ÙS57¬,É<q(\5+C~¨Kš¨Tk8øk«Í+Ë|xn¼g—ߦ3}4ô^ߣ‡|^ëÛTCsÝÔâ´üºúÅùÊ3&öÝf5é¹h¯Ý™Lmwí}wžjÇTÄQâŸôï( 7ÑkÐÁ¥•Á~°Ë… ä¢tR§|§ná=ñ‰‡]$µÔ>ßöÑGå;ËâêdkÝ¿ÖõŸ#I¤OMü6,\3yÒL‘{䓵©fã JkÀÅåçóÈs+ÖÅ0—¦[×ÊÖZñ ÷'->L¹ªa©zm9ìTÒ±÷P§¯¶v`özõFVÊ‘‰Ï†ÌT&@<˜¦­{[¹Ó*ÐÔ¾‡÷©­¼vÒí2ÌŒ!9qçUwÙmmÈQßáZëpý›‘æ/½'ÉݾÆéAÐ&éúâÓk®¶ÍÌ É:Ú* N·Ä6= é^šò@»í\ª^1+ƒúƒSƒøíųžêF§ë³wþr_§öÊ2IcäFÛ¡þÂ:žéÿPœ;;=}SÝ#;TdõÒ~ÚÁˆï®`&}—Ú2œÊÝ;_I{Y�Dß*tGпbçxè÷àóýö’1Õ¥w4êgÍž|¦’°­ÄP0ÿý–öZDáµÍû/ÖßÄKÕ3kïn?úÝÓ']jŒ?|;6ÈŽ°•ɘÊR¶<çIis_W—°XRK)Šo|m`óƒÓ•¸–‰ôÍŠQ gg©Ž \föWæE9öN8Dj6Ñ÷K`hãÑÀ›{b¸¦Íï­r~JÕú2”ªepãòŒø(M¨n”6»V?¶¸h-¯ƒiHk.— ´¾¹Êˆ˜–sHjÏKÆ›ºègä/­’ªÍ=Ë,¿÷¸´©=5°Ùa,wWóøÛ Zlr Ù©ÜTj) ²Áѧò¿PýZ+¨Âè·ó;ªDC^ÒŠKNzïì2Mc}dÔÚà‘³”IùÒéú‚À€&=Ûƒß=—9Ç2}¼º+÷ærfMâìƒ úæK*r­Q¿H“Â×ì9B+=;TŠ]S<U8wë7 zç>¹Ž”s½™a|ÅûáûÈbeY/»˜Ûzɔꬋz§Æ½bæ"ŠÝ s.mÔí7{êvpøÂ°²–äR´SªÆ íÁd .;[ûc—­ÏbB,Ï2çŸöûÉRdÇVyñÑ…žwQÃUÌÃbrÿIÌÿnÆT¦SIào5>¨äÃú9,Ѩ~d€f³†[xú¥vw!ƒÍ[ ÃG_,è´¤IÔìûF*K£üKfõÉ%ÞR—¤ÿ|¦hkQçÛü›zAÉa2WšÇ4˶­mXˆQ׋WßÓ›cj¿4\WŸ�S^»«{öUlZãIíS† ßä¿Ý'ïô)iË?…wŸÅomß‚_üEçßúJëÜÍ5[Èš+3–‹`²¹êwÝÞwë®Ò°óOs<D\ endstream endobj 497 0 obj << /Type /FontDescriptor /FontName /PXOHER+CMR8 /Flags 4 /FontBBox [-36 -250 1070 750] /Ascent 694 /CapHeight 683 /Descent -194 /ItalicAngle 0 /StemV 76 /XHeight 431 /CharSet (/one/two) /FontFile 496 0 R >> endobj 498 0 obj << /Length1 988 /Length2 2309 /Length3 0 /Length 2962 /Filter /FlateDecode >> stream xÚ­Sy<T ¶f+-–‰¨cßga¬#dDö”1sŒÑlÆ #”¢®,‘Ê–¥®²«‹ìn([²´Ú²E-ÄwÚ¾îíû÷ûÎû<Ïûžç÷¼ïQqpÒÀh> ÊÔ@ÁQû€ýX'w@Á‘‚ û ŽI¢QÍpLp€ÒÓC@iHÝ}šÈ}Úº‚ À~=„A"ú1åý*_Eº�†2HxÀâ˜~ šÇ‘'ž2Cà�†L¿vŽ` È pA ðLÀ$’¨‚ˆ¯Ž¬¨¾4@÷;L`ÑRA #2(C&U�È"F%‡�ÐWaGƒ¾BNþ¦~nÁ"“íp”¯ã¿¥ô?<ŽB"‡üPÐ(td�XdP—º‚ßÍaA‰EùµbâÈ$<†J$ƒ€ Gji'H$6Hp 1ñ~€/Ž~ÃA*áw+P|ߌ œÍ±Ž–j?öútÀ‘¨ÌC!t@þR«Q¿j(%‰ x"áH$ BÏÏ7¯ß>fNÅÓ$*ÐÔÖp .Dº ¨ÒBQ�‰J�Ù�Ȇ#àTj hÂ_CðëZÑh�uÑ‚ >dÓï+û@ý"¾/ó'£Pº¤ÀcÐ"u :�ÂÇø   $ƒ¾ÌÀZ?àÏ….ámüAæ¤ ü/¢‰d00 `Aô…@ ‰Ê ü%ƒ�™I¢C1ýÄ |,*éëÿhýßµ™šÒØ¡šz€†ž# ¥èêj‡ÿKˆg1 •ùíÏ€–ÿ³ö%A‚l/84@ÃëGù§Vž-<ažÓ]Ä‹øÔ~kzˆï¾[õ‰ !ÜI;ë  ‘™;s}´"¾ v¬g§5^ÿ’£\"Îî‘÷Ù"7+b®å»%Dh×Ã`!‘1)[ÖÕ”f¬š\Ò2ÏŒîÚWÏ»|^N}è½ÜÉöÍY„)SœºÏ„v¨:&©ÊJêð=㇅<{Ý·g©údj›!~¾kØÖÊh£Ôãk$äó·2.èåÔÓ£vpÓÐ?5¼“°¶ÍþB½'{Núœl´Âb³S>{9æÄ®Ð'è|°Þs³q×—Ãìë¥M[žu7Eµ".F¨“¶•jÓò[•lW³K%"Ÿkq•NVKtø;ß)AEoLñIæø˜;–Y•,m6w<àÆ=Ãΰ¸p¸MešûBî½&¡{b(ãþ¡ý?]°ëÜ£‰eU¬¤TûÅÏ"`oÐÝrÇ"(›%®„ôªŒ³ÇY¬oŠk˜xÛBG/OI/Dg9~7YpôE}ùÚêÚ€R€¶y"ô­þKŽò¨Õvþ?½–0i®sw’ë=Ú‚jþ48T·ût¼0(š±°¥$ðá8‡VŒÇ¦aç¨&KÄ1³uÔ–iîCÕNr-•éÆ^#ú„]Ÿ³ W\9¶÷¼ãá¹½s0/nÍ= ÕhnLh¥dÑZÜCãüD,_zôDuXd?–¹b… NYy·©?iDþEycšàeì㘚œj3…üÒ°|qûÆž…“tƒm‹Êó‹«S435Ͼœà[ælv‘î€Üt7 'Pˆ5ÿc‡¸äQþÝ®<Ú«§Oóïóß/=—~óþ{ÞWÖÎUû»¥‹+'8dKe%lÛ8²ÉzR½!Ø0+wÔVC•_‘hQæ)Qu\¯Œí.½¸`Ÿ³ë\EšP³¸ÉXD/[y/ý¼e„ߢ›»õBúû€Õ™¹œÞZoÛ6»Ú« ³1F8.6øËÊÀaåbÒ4X…^ºåTÂK)óÏk×Í컉C5ïK¸³Þ×>Qn6~.Õ¤))=8žIè’ÉWŠö(äÑ/’Ùñ Ñì½6#ÃS9O¹µ‘™•¹5^¦¶¤t¯e;¼˜šï~÷oZ¶GâÞ˧êXm>Þy~mÒ ñ„TÝcc¢ø´…´© ]<Áì^¥Wgœ£*u†µ´·6ä^zÈ©RøÀ¾7b5 ‚nHAd–çéùf[ö_+~mø™]roØIÎÑT¥áN¥éá¨sy†“­E•Ÿ@?Î/±N5¼Sÿp[ºÎ[¤èiš?i»øp¤4öG²nÂÀ6#“2E>Tý°BñûëÝCGâŒÃõqµþN/aIÔP×::ùHò¬˜ψømW=¯9o‹a¡T)tóÇ¿#]й ‘ʳõ*ë ÑÛ£ƒ.Ößy5;îãàyôÑIƒšÙj³9[z f«ùÛ¾HäœjÇ‹³¯’Éá²'VµW¦KsNÒ*S�‘Ú�¾7W“”ÁimAWkŠHd¡‚†xœuŸœ9ðö@¸hµåáî)è’ëS(ÜgÒ§×¢ÕÇöVnÿE5Щ‰ßÎÌ\Ñ)»°‹2£ÿaŸ¾P¦¼0Ÿ¯ YEÂs=œÙl‚X`Yz•Õ;«×J3Â/D­ƬêQrÀÀx”Ík)ÜèZùùË,›y ¯te48KK»Æ =˜^À˜â2“@)Æ ð UâlU•u íDEÆ¡ c³\v¥…_·»¡|hˆ((.M>iüt¤.Ã,,'³å$¦1O,�EêZPòF»ü6“‡M„žÊ]äû¼¥òóZuT«¾?ݱý£¼X¾º7¬_{§ ¬j¹³Å*žÇmƒ‹kþh P5ÅË77m‘é68Ð5X„uÍ/qtæ}m•†QªñðǸøtK¦jÁ¨~¹*Ò®>݇B•žèEÕ—Éjˆò8žl˜V[»½Á~£?|õÉØAîns>Ô $„¹ò½Z[Ôlº\ÓÛ'Lj)5âÛ.VŠ‘´s¹‰ü9tYRDS¥òòÈa·Þ=¯89ùÛò:%ã7Ù˜Ù'*Aûš÷¤4I©×Þ°j2Ý#Å]•ñøzuìÅjlúZ¤AóF|Éî­T¢÷¨äÍñÁ&k³=‘&›oñÚG÷&øO6åwHÌt`´™Ä¾ôëÜ,#Q%'v×^¨æObE#Ã7nr‰™†¤t2Ê2$DY31ò©ó†ž/:×mËV:̹ƒ¶6|ÚÚœb$¾—LÛujíJXš¾ŠÓo]²Eêç語í¶ô¦);ìÔn(ÏVµ\ÆÖ|Lñ³ä“V)…âçCæ]|pŠãÊ!á3Ê©–á%°Ö«oh‹Á·ðSÜ {&RÎÚ’¯O¯þÑ¿>ˆeÂÃKÛºÕf¿èÃlœ]Ôö\H7ê £Q¬f 5…f‰~Œ~7þ¦5IB;Ôò2<£b zôd5êyŒá¦ _i£Œ{º#=è‘16ŸðlÉúi‡ÉÔ•Ç9/uf{ ^ô¦å †H=Î3nÍ^>-·ÞœVðZMTþEíøî©¿±ÜIŽƒf½yIhªìñ×JýCÇÜh zo‰¬öØ-wù̸jô}78=¶Ç%nž¦d¿46ƒ…÷‡ñ¢ÒÕÍï>‹]#KG¨YdÒk­š[åo•%y,•ïð)LÛ¢s¼ï¯Ý9GSÊÔ)®7m.¥šr…úÍr¨£¦qOh ûãaçTÛT3c‹Ü…¼ºý8Y÷“ÆÞl)Î1yN³¦N=ÒV˜âáYÍ½v*÷}ÿKWŒÍ9ñõ¹æÓ4ÈѸÂê»Âèw»»µWÇ‚x9†ÎeKtz¤¸a1Geo7tÍç0“o¸Dg¸zãëÜ‹Ûç³svXÉp <®È-7$'ÜXë™°œ˜™¸\”3ƒö*íõoD-¿U\}Ê;“+sºdu÷�é„SñrB£’j7WAÄýÓ—:¨Ä²'…VÄ6¸róšµÉ=ã¶ ÕÞ•ƒ¬æ#-Éy ¶©2í´äÍ—ßeü÷ÙºJ endstream endobj 499 0 obj << /Type /FontDescriptor /FontName /UEUMRH+CMSY10 /Flags 4 /FontBBox [-29 -960 1116 775] /Ascent 750 /CapHeight 683 /Descent -194 /ItalicAngle -14 /StemV 85 /XHeight 431 /CharSet (/arrowdblboth/arrowdblright/asteriskmath/bar/braceleft/braceright/bullet/divide/lessequal/minus/multiply/universal) /FontFile 498 0 R >> endobj 500 0 obj << /Length1 763 /Length2 672 /Length3 0 /Length 1197 /Filter /FlateDecode >> stream xÚ­’yLgƃÑE+‚ʵ ß‚ˆrìÎ,,wCY*Ë%*—v˜ùvYggq·ÈÑI=" E€ x`Œ XB‰biSSŽˆ †¥x�ŠJÑVmg¡ÆÿmæŸyŸ÷ùÞ÷7Ï7î¢ø$9ÁdÂ(†æ|P1 "b“R*Fîî,Ä8’¡7` hP äz5@e� –JƒQÞ"­‘%ÕYX±Þl �r dI£A,ÆeA ?Ç(Äà$äŒb §(h>¡‰PÙlHˆ( ç@&T“´@bRÐ*ÌÉ„^û®• YÖñëH04eT $J†ßy’ÿjþð(=E)1y¼9¤Ú˜†¤ŒÿVÏAÄ2déùÖ­pŽ-¤^3¿«à0ŠÄå´š‚Àõ#¾²¹©‹" ˆ'9< ¨0JguHóQøôfA$[6(S7zÍÝêl/#in³Q òÞ<[£ïk>#–4€4DŒ (oäŸwoóvEÒ8C´Heþ�cYÌ(@øQR™ ä €¤ h�ÐÀKÄ4ÃñG�ŸL.P1¬À|©hHZ¯3«³‚_ hY’‡7+~Wx8cÈññE€O¿Ee (÷?F\ϲæfÿ>wµŠä…Ð�qÁÐ�ƒ‡ìßY~åËsy‘u7Ï[J–¼ºqqlȪ}[K^%Iä&ɪDj®~¼ö^óáû]½1xH™ÛäxHùõö§ì›‹kÎl;Z »&L²7_öÖËc\ѱ{Ë7Uî9_cïŸ8èæ=ôÂ-ÿÆÒjâqxjÝÕ™‘k[ÏÄcž®NCg­L«í¦Gý¯EK «ÓûÞs¯sÆá¼âš4›ñˆ³O±Kin}`pæä…)‹D~ƒ1í6_év)˜6 Ýrm¼üƒïÚ¸OY#ì–v¾yé/]ÔrÞÿ®ÕÊ=Ó lÅäŸ6 .=cG鮨pÉ‚›%뢵 ÜÝþºm¿º+NìýKÿ”îõUÚÔÂ]tOïÐþ2sePXçŽtPêÕ\Dû¿íE‡_/‰N»åõ+p(íK5UNu¥ÿqÚÂzËæFë;·~ÒŒ_ü´Y5#N¸ßV° /j}šä{¬­ÍuÏîîÎrºqGþ ûlÒϱ¼ñŒŒéùûzriæøŽÏ‡-×lß_1øÐ®üÛöÒž§NÏ軩…ì’'ã&å›®ºÈ„SJ«Ö#}K'¯uôäœTؼÕûTMá‹•Y‡†l>ÿt‚.5øùZ×%•žm:ðHýû«˜Ý‘Òç-U a½íû&åãÑ•eeM¶Ë‡OMWÜ8¿Í–êÝ”¼é­këåÓwQÆ“¶Q*ëÊÚ~_á°Ë«ç ¥ 9…¶ÉõN„÷VGï])%öº²š^g<;2¯ãcáÈÆúÊ걌ÞÄb…ÅY^‘pÙD–ËrуÕ¢ָœƒ'fFŠÇµ’Ÿ¾ƒÉƒe1]㑯úBY‘ÜPž:ªt<wdeUm.nüH ,¤j÷ùyzsÊr²?#qo~`¶fSù?&Ê=Z endstream endobj 501 0 obj << /Type /FontDescriptor /FontName /WQDNZG+CMSY8 /Flags 4 /FontBBox [-30 -955 1185 779] /Ascent 750 /CapHeight 683 /Descent -194 /ItalicAngle -14 /StemV 89 /XHeight 431 /CharSet (/minus/prime) /FontFile 500 0 R >> endobj 502 0 obj << /Length1 1417 /Length2 9528 /Length3 0 /Length 10359 /Filter /FlateDecode >> stream xÚ­–e\”ïÖï¥Kº‘ºº»C‘†fèî.én i¤é–”éîFºóðßûy¶ž}ޞϼ™ïµê·Öu¯¹‡š\EYÌÔÎ,mgëÌÌÆÂÆPÔcc°±°"SSK8‚AÎv¶’ g0?€ æb`ã°³ñ³rósq S$ìì=-ÌÌtôÿ8ñ�ÄlÀŽ& [€"ÈÙlóšÃd P·3±�;{°�Ĭ­jÿD8ÔÀN`GW°) 2ÀÔÂÄ` 6³°Eþ£HÎbàù÷±©‹ýÿš\ÁŽN¯¢�t¯"é¯Míl­=�¦`2PÉîµøUÉÿQÿ\ÚÅÚZ dóOúMéÿ±ƒl,¬=þÇÃÎÆÞÅìP´3;Úþ·ëGð¿Å)‚M-\lþÛ*ç ²¶0³5³˜Ù8YX9ÿ}ná$má6U±p61@@ÖNàƒmMÿ[Éëôþ¥(þQ^CJ‰ñ®õ_F…­³†‡=ÀúÇû_Ìö‡_‡äháÐeeaee{u|ýüï7ýÿ*&ekbgjak`çâ€AȬ¯©Ø¹¸�^l� [S°;�ìþªÈbkçüxŒ�bçˆüÏ­rs€bÿý›x�@‰?Ä�Jý‡x8�@¹?ô§ð‡^ãÿC¼¬� Êb�ÕþÐkõ?Ä �jü!n�Pó½V×þ½>B@Ðâ�ÿЫ§Éˆó5§‰Ío6ÖWA¦!�þ _Bþà?dñ—õU°Ù_øšÝüO­×)˜{Ø›ƒmÿòx=û;þµ)˿𵫿ðµë¿ðµ›?Èöªû¯Ì¯K ´ûSûÕ×Îö¯>þQnÿÇüZ×þuçìþ4Îñáàb÷ºÿzüÿ¾öäø¾6àô'Ï«`'k“ù_¯©ÿ„s½ªr6wÿQÂõ*ÛÙÍ×.ákÓ®á«,·¿æÿíþ¾¦÷øþ¿K&.nçîÅÌÁ `fUÁöÏ]ñp±úü_Ž&.ŽŽ`[çýŒ½®êÿ2Äâu»Á`w° òüŒ‰À'ËÔ†_©üŸ¥pôNÄiEŸF:‚ëÓXW°¦Ç5À¤ç˜Á˜–e)¿*€¦®5Â1ï/u`ðæ�*}nº:‰ɡžô`vºÍ—ðÝy²-ûFƓʫU׸ª÷ûl<y„¿#`Kfgž.|­üF׿Q>›Rn§ ëg·¾Okî­Æ ßÉØåÛÍâë¥_a?Ïö,ÿ²ZBMã‹ÌV„™)>Õ•©J>¡+é-.}îõ¾RS¹XÜîD¨Ý†4Þù,{¯7긷'Ðvr}yÃh¹ÑçÉñ<uMd\³Ṉ̃bD¿Á1;š†r–ß䭢ϡ¹™4/·£i \ôr’Ð%´›ûbËQh²ÒN6˜Á 6íø lö§œÌô« (œn2Ëùj‘³8(€›R¬ Ë]néñ¦Ñ¬}d Yó Çӈìˆ32ÄeŸÿ‘fýhÌÁo¡½®ÜÈÛæ‰GYÇiÛEZ i꣣uƒù ž«öáuÏÚÅ:-Fa†]¯Eú^ÐJTn#=CÞ´ƒ‚õÎ Úi +RG‹2§Ž¾MMYOËì"6˜TÂëAPzÁ˘<Åè‘;êbÒ^±‚¿Ôë€î”U¸&†$þbûŘ̋ڎG [Ã…×>/±c;ôĸ­¤*‚õ¯^|Ä~C=;ø$D„Ú}ßúÕåïã—è·–ò{"¿ÅÌŸ‘L^Í$Ãeóe¸ej¡®/˾À^ëŠÜd mkâÖ›_~—2OðÉ#äQ¹/Qfÿ*ÉDc {ÎèQÒòAòjvÄÍ;¥¢.lbkyxòAÕÕ£¹ñXGhy” ·JkŒâàCã¥=v”­G¤èÍzì2›6VGÚÿ ú²zD„°þvÄu„†ÃN6A¬ˆ/¬Öœ,@SvxÐZÑÃ'8hM] ±ÄÂUxçB3©·ÓˆL5Œ½çNž³ $ 4´V©ŒTØô+e jQ4‹ ·Ø!úIa÷=2³‰o"^n>45#¥Ø¶!C ¡LÂG¼z89 ðŸÞÆjã8R®UOx¥x°£ŽpeèmºÄƃ|vŒ&ˆR4Ĭ4nDÚÛ)[µ×$Õî¬Ùì€Wp ïxi PãoZº‘Ž…À, âÁì‹Ýï03›ÕóÅ¿èëÐoáiº` =\ €)ß]ۧĬ;ÔÙú¸áŽ[j˜«Á0æ%$/vc¯¨ö?#ŽPFÒÅnã@úzЯ€M—ÃúŒ¼iª›€ö(‘¦r¹#žeŸq¶)…ôÞÝ„䎚àÍQöTg©OyÕ–ÿ¦Á‹ñZœ ËqN–¬ð—Pá”Á,%æîÒ¾± áóôœ€¡Bþ>‘«éFsºw@ŽüUsr‰HmX¯Ø¿äø‹7ÒC¡„7žg-ެ¼Ÿëh $QGጃwL^v¥·înž³Œlµ=Ü)”Þ:?ĬñðÑÔÛ^(¼éMbJ}¢6þAq0ªñÇ}a—˜C7YK›òE{¼0XD4eÚ÷\~‰œeÒ/ÊEø­×˜ÓÃíßbNúZñ6œààÏ�4#ÖOÄïÝ0¦°èÌýt•²¸ !iæšU88ÃÌ ,‡t̆,½!Žþò!/~ý6¹\u{;QY¶i`æí\û Øûüó2§ËL½7&|éŒãÚ¬®ŒÕ‰÷`â—•AÞ)»y›Ã9—ˆïWiávpHgQVßÖÉ6…B)hÀ'>a<c÷ù÷Õi>f¥·‹x,~‰ûïm-MÁ”§ «7ú¾:âŸ×èâ}¾µ\!:ý*ñ=†L¢¶n¡¾ `.‡¸ÃÍÌtFÓßÊôÓY}ç?î¢ëÄrOÖðU K#RåýedjRèõñËrÌrOùÐçæÜÐEgœKÃn~¹v?e¹áxÛ¥Ä`J{®1 擲 Bž9ŸRƒbÔÝ||Ôt“Á„,~çô …¦C oV TÚÙi"|^Aß¡G²×|ÜHõñ]¦ÆÊeÒbͽrÉteÚs š¾°Æ ¹ƒ ˆp Y&/-.ª qŸ™³æ­¡¦ÅÞÔ²Uþ¦þñ2Ö,TEÜz0ú®hi—°0é·vŸàp¢Ð.K*T"âÞhbé(Í[§ÕqC}ª^ïÜùæ}„OfSŸŽ“²D �û39’šz”¹20žkk‘¸¯û¹­ 3›CPA}|2Ѧ½&'âôYˆ‹Pܹ| QMXi>îÔ9d'mf¶{ax°–Ÿ¤#M"¡iùVvì&‰¾7×,-8hœÅß®¶¸³v/Æî(ÅÇ]<Ÿ¢¡gñÍ÷ª½ºq^ÞÞA—ùJîuÒ_¬PêvÖB¥GNJ˜{tÖÔç-Êõ}󢯙Àf=KéèdÊJÊáû1ÓÝäƒCìãX–ý™ÅLtÒÃOÙqßÄ_ŠÚs?8‰{)§S'øa8fó±¿ÃÉuke¼×ghO!Ý©+¾ªšÀ¼n²àM¿¬MeÑYIÙ#Kô~–%š¥ b«øNXÞØ$²á®’úF2mß s­KzÌÝ:õQ‘ÙDÒ§@%§Ra!ä÷¸^ŸžÁ"]þ†‡fÁKôú*[ ­c;ï 1š¥“ò¥ì²g1QùÚJ ňSèƒç1ó{c|[éϺPATÒƒïOv|ј“Îò®ðÑÝ’÷®&‚Tsíëèœèʨ¾¶Ì93þp)‚dªÄ§¶Ç}‰˜«|G¡9Ø‹äÜI5YCÛQì,b¹bå=üEü#´¢Ç" ŽFZö¬¢/;ˆv#.|¡G³<ñX1[D`W‡&'è7_î/K™Ñ¹ÆÖ™jõ–×ýaÊìnEäƒFqÖzA^…Cóñ{y6¯ào”Û´9Û’®È8ø°cN®àñV܆•…,ñ3¿ Wý˜^_óŒØãÕQüd“ûöÄ*¬|;¾'¢yyЧ˜Gûú C¿­An¬öÉFø·•å×+ún’Hï*GÐ@ö[ÁŠô󱦾Z ·È›+ÃO…O|¡£GÊï™R ¯,öE++7ptr~é¸<—EƦ*”LŠuX>lÎ ’ÀäÜÃÑ+c&­[c@ìMR°jý<§’ýµb9Ð3lK Žì†×žÍP˜lmGTÆP ¬\7]SsOa‹ë6½î¯[l),•kt$’`̬ñ¹Uf«Wse ¿èŸ<…ÆÀ#‹B[…›á”júIÃе#¥,¦¬-˜ÊO–±› 4-yB–U»öI)’ßã„}G ”:»íÕÕ¦²ä•òèYßn¿ÚŒÈâÁ/B] @œ …T­{‹5Õ'T W_#u­Ÿýjq—Ë”áFîË6ýµí "‚.Ë™Ýa>ø~z©cÐÿͬrŒÈÞþájã6¨–h€¿³Š·‚•ïLC¤Á/–€ñõõ/Cpû8Gâo‡Å˜õÀt˜ëÃÌHP)醒Œoê5-ÈÅÑXõjÿŽ—*’1L­Ç•ÁvðÊò×üãNCƒºË,y$"_‚9äb•±‚ú8ω!…•5Oþ†p)Ne2g¬.g¤>ºL…*3÷cÒÊãt33Ü®âR~îwìPr~¦¸½¨Á‚ËŸçØ÷I·’mï¾;`\÷qòm’ÆOw3c¸©üL/3=wÎÁ3è ØòöÇ¢…æ-†T‘ÿ<ë–ØœP–ýüÐüŽQ0T\D`VMj̈HÃf„FŽ¥¾^±á˜g›žÜxƒÚ*&hÿAbçe !uË·?Šøîó×Z(ý:ÞSbÇ Ôg7µê{!FC×åD%áàmÙ=.•NÚ:%bì8¦t]êîþ¡ìZ8îòñ 'Ý'jzlsM°WIôoõžëtþ6$àªÒ.N5ÞÄFËêeÍTç÷ÜK°ºänÉ#ë»Ö,-\NüX–ë@ÝäžïÅ z'ÆêÓñÙ»ðü2‡KƒQɇ·ñYdïŒð+ÏcÉp.—0 ·;lz“*p³(L´¾ê~ÜÆ<LŒ¬4ä¶äè.ŽKnإ눥ØáÖ·éƒÚ$œ;’Ž\ªi1ÚM'^îÿøJ²§_€µØˆóE7ß´–ÔìÓ:™©e­šÌ Ü½{|„¥ÎæFˆK!Ý9f/qÒŸ›!U_ÀŸ”±ôП47·vfdî›Dæ™n}B ¹fòg¸ì}ê0}X5¾PßëÌlÅdxwŒó¤õ—1†fóŸ¡ÐçH?:L¥iXvfé|àôÛ¸ï¤E7û¸aàjļÛ&>9O<zqâT]=Mt³Gi·Ù¿¾qy8GL¸ª5ÊÊ)b”˜ZÚÞ&áPÂßú’Ö‘”ëZ�5øÓ¤ù‚Ìä»! ˜LÇ~Ú?’¯¤¹QîDD‹ y/4Øiç qäjçŽ ÑÞhxH…»mñ\+ü8~OUnC«‰EZ{|7ÊgYsTéÍ™Fô¡4‹$×ÜþŒq½ÆíD0BÇ2¯f<5Cüýhv÷ý©çÔWú¯‹›z&ãŽzËKKjhæZоïñn5?>ÛÓ ƒ"EFMR¡çï´ø/‰ˆÅE0¥õ‰ÍÒãÕf‚ù"<û P…„óCÛàTŸâÔ¼¡Ûš,X*‡[ç&oO ]¿›!Rm!4„®çÄ¤Òæ(||pemdY&?KÙ+O(QºÓ:lÔqý¯®sI=ÝK Q4xïjÉ–[’ˆ¢îÛÞ|ûb… j‰CU»Ã 5 [pvÖ ¬OåíÑ’ÿ¦"yÃDÙ÷YØP¡8Pvˆaú4ˆÐÇ“ZJ–G–£ÀÈx—@Cv˜æ|²ãÍ©¨3ÐF³ôn¬°ªF7ÛN®Ì¿¨óï š­‘*¶i+áñ‘=¸J©»ëƒ:hjòÓÄÛË~ºÛyV^5òr'ñì•8¸®3óMl™:8‘K{‘䩨´9Ê÷iƒxÇ¢ Ïþ¬ÂØÄ o(eŽa¦2ã$(~Ò0(“}¹ÄQGèQí(:¾êÐ,ƒaŽ»øí‡ƒ CâåœÛ®ÿŒâ¨ÊeÃôŽì'4ì2Cu „~mhS]%DZ)ë Z ¬}›ïºQªÍDƒÐ‡_£¼? ¶H0´rÂúsÙéQVákU+&œ Ýobõùk_\z¸²WZtôT–`,c+2ß{žhêÛÙ%³W6õŽ×ÃòÍ„Ùü÷N»žå›ô‚;'.·Q™‘\ýªb*$´ºªŒtžãÃ#ƒŠlÍ…¼m©Úêo~TÒë -!ÈøžÊ©Êð\?™ilâæ™y°­ä*òÜ�¯=?îO»x]M0Eæ˜a¿å¹ß6\»5Áï½±½Àag—ÂL¦ƒF)æw¦ø]ô^¹÷³ièdŒTWý'¼9—äå©«Ü9ÿ@˜c;LåGãÆI¹PÊ |.º|¤„r×வ ÔÏ* ¬ø›Köœæ€{ñ£‡‘oˆA_·Q>¼]HgCÊ/oüX³ÃÉä�Õú}ïË7OªŸhá%”V]ÓÝŽ5?ë£Ïë6ñœµ ò’“b`º×\U`gÚ.+çÕÈ¢Àú©«‹>+»Æefûß(‡cå•®‚—Û°iÞBfÍHBìÚuñ¨q™(ošÕÙ<Bð‹…P}8ÏÜòú¥'S°Ìà‹Î*§ŒcŽÎ«Ä=F™Ý/N/òëí½�#Ñl 7¾Þ\ð6 Å™eÁBê½6aÔÞÎw1Å‹nJæ,#C9¸:sGÕ Bgv€vÚ…ãÒ·›lgýùÁ=¦IN‚º¸m=v‘$ˆ#Ù .­‚.«æ#Î5jbQðþž}éGBQưÊÔò0ôsØŸX}Š8¥Ãá.†)XŽv+h凜Øq]rMÇü"~][`Ö!ê;}ÍΨQy Öö ¤è¿æ·I.p¢><³u+Þ˜ž–´®\éE)É®}lÆ=rÞ¨gfCä:wô­Ïius•zŠ|É[/I£J8sKû½Ö+²>jaaÕ Ò˜‹ý[¾ÝMåYë­—0¬ŸßD|¥DL4õL;~ó®׌ìãqfAÏØ3Ñf󣇿Ëßo†‘Xà¨Óf»z4î7}ÀJom!c5™d†á·/åã½(h”6¯s”=˜™–³¯×¦Üu~®9ƒ\‰Ô¤ýÖ;>‡9Ù¿S9_Êbk¿0ƒ6¼£“c°­O@±±qªÍóä/5F÷TšÐ<#ÇïòTÑëþæ“Äk«˜=|þ¾§Ä¨æÍ¥yОÚ߀pÂǾjqÑÓ(¦À’wª'!µ®0Ø¿zn®º© ê§<Ï›% ZpmZ®‹TÐ,õèÀ¤v»ú` É4«7?Ùƒ§ÎÁø  Æ$v~›²€B½Õ«&ÒÓs+ÛAç =1‚xæsg=/]Æ©?ºÀÝæ`zuGäÁ² Ó(HsRRÄçáý ñÖXjÏ–©í &:“±ˆš£á|tÅß ':|%V܈çXx3r²I2#K{=`~£¤æf1Ûxf ÂêØ¢÷ât¢½ŒïÁ¦)Æâ½üÙFäšO!íЍö4dý«_º/èA10dfÕRÊ Ÿ«KÕ>vÚÈhbQÜûÚ¹qoȰÎàô³ójqVÛr[uÐÍ<Ï.¢ÕŸñ2Ï2…‘»‡a‹Ï+7ùËÔÑDr#½­…®”ïÜ€Þæ Ý_ËCSù3¨s3=oŽªM×&~xD)‘[êhƒµ?ÙË`ÐóÅY*íÙb-î7lLŸ<ÁˆÆ<®gðgeöË‚r_Ƥ%Þ~Þr*jˆŠ#¤Oo8»e…O{¤ ®;gºXOJô3Óf¼1C8Ʀ•¤¶ÔË(‘ïŠvïç·Šx¤Ùlð¼G¼ÀtÏ EhÒ\r‘·Š<ýþâkµ8e”ÑiLþ²¾ðç*}VrŒ€ŽÉ;úžbBzË wXŸ5Tz¾Û1ºù¾”ä ØÁ5—ž”å T Ð7’eÓ`gaï}êoWÇ]ÊÑ{ÎZ;Íuý±~ÍQ àZÌ´\" ÂØ° ®Øì¿G3]õ2ÆŒww®ýàM:²wèë:!V”6a(«Ûå]¾ óRÒ§¦CÞË‹Xjq¥†:™Ž|Þ®tÎ!¡Êͪs0G„9Â%a…­è=þÉ<rŒR€.›Ô^=r_ÆaMÊѬf¯­G‹üDYZ=FÆ!iÇùd÷–Œe£ƒØNguÝ´Ê9ÊHúXË>–Bò•é÷|ö`•$=« …*4͆»øi$߬'�m�ÅV˜Ê¤BFÐ(ÊSj—A›¸ó5¡V"JFT°`5ÜgX ø´’ðÞÆIÎßÕ}bã\5›…Ïú¡ØE"“ÌÍ÷æJO ‹§ ©D°Æ^õÈü@ÎŽ’Œ×sÛ¦tŠÓ×óŒ+ŸÈï~4¡è Z®ÒEµðÓ¨0ÌE_»uœŽDùÄÛýN¢(ÙGÏÎDE”R$K³›œØniM’j‹ˆiªgµ´ò0’\=Ž]>2 0–5Ìe9ë@±i¼µó®ÌÀef}| Œá Ü«°5rÁ‰&š–_(Oà|ËIb?¦ŽÕ$ÿS„aG]¸Œ!�ÍÐÇL2L¢m‘¾ÆÎ;@Æ’ <«oth~1,V^Â%~P³ÿcD²^° ÿnÈ¢òþ›M€ÁÇÊ®¦Øæ÷iM÷㚈Q°ð?ĉ×Õ/–ªåöÄ«o&ÎWá%j‹¶Öï}Å'`(Î:àˆ3–…‚>ÉõJ%wÝMCº!ÑP¶F¬‰ G•ÄÈþ›Žåíà�D¿”šmÌþŠ®Ô ÀGª Ф±°ºü¥‚Œåï ½v¾TH!VÑrSX£/6™E“k¿c9kÁ~Ì$O¬êä#ìí¡­\Ôc¤_Ò ó�Dcð¼ãR\ÙJ~š_H™>HvÛÂ?¶sN5y¿ â 9h¸|Mö3|›‚準¸Ž ùüž6ëÙÉž£c2þ&¹8#RgGŽÞ¯}Ûì†0ò‰½ T"³iaJ¼÷þÑ‚€ßKî¨KÂÁÆÜÈðR˜“æFJÅœ[[ñKŒ!©Ž?ÚCO-8ÜZi“Švpã- Wg#5ùõgW.òO ¦/°UF_.am×͇BÒß¶ˆºë¤d¬ úUpt-q«ÝE‹pÃÞdOÌ>½ ÐÀ7Eå£ÅW2šfÓ8˜˜¢…Ú|„Ø.¯ù¢§ãdN+gðÖ# ÓS©½ìøIQÈ;—÷FíÄ6fíWpá]‹4 9‘¥ª¬Úuc«¨˜Ë2ÿ\Þzq[xeêܹMRt+.ƒuöÁõ#zQÐÏËfDƒ<NS„3Ëh¬ó²OL/Ö$Ù3 £’›n;Úª™‚ªÈN/~•° Ÿ#ßY—ÂF­Q)ä¾0]ö–¶†ËgjS^!ø1?©fåŠáz®$ ’n&�‰çÆ©2s¹Ý³?1óe«ÒŽYc£ð¼9nò—²²¥ â·%N›ÑHD ˜¤0¬uÊmøæ‡¤ÄÆæïˆâLýîbØÞH/ÃŒ9¼ïr¨³¶—ÃözdÙÿ¾"lˆ>d~ÙŠ„qúã0#¦q\y 5H70FMª˜@Ø”r\„Â=$XET³÷§Pìñ®ÚI®™2ýØ-Òã-vŸ£-7Ø*eõ¹mǃñP¿U{ü)¯×J⬢(/¾ÝÚyÊ¥JuÀLd$aTì‡kÉè@·)eá.ö6-P-¢)’º[ŒXû8ÞWØ»²÷s¾So0…ó]Ä‚_ 'Â:ª/ÿ¼(¢:ñsoã‚?¢¾u’\7p朣“¥X WN¤†±­ï)4ŸÙKW‘Ü={F^û{Ôi¼û ¯L~‚¶xTgÕÊY¤ïKßæÑÔvêÅ·Pm.Æ_醂[k1äG¼Ý=5O#Dµ'Ú1çT˜$Ï–ßÓM¸îEKÉœõ pôû§QœØ`,ÐyÁó3²7Ë{ù+îè%œò ‡$a—Ñ|ÉÁ±œÓïÕôGòs¬[ƒÀ#¼Ò„_¿{¯LúØÎév±‡¯a‘Ãtäúê ñXéh¼Ÿ"„DoݰIÎHNŽ+â-d%Ù±Ú· K}—…zÑ”âfÔokáç3ÏÝ@ÔðzP»õBÑu׉²ºÝ ÌŽŸÁÕkCוa’ÞÐãQâþ?.…¸ò«:tÎéÑÓ è‹ª¦¦Â32œÇJRI2ü¡I:y”–Ÿër.Î |s ·5)"A‡j2:–JÚÎr•ý4;Q—ÊŠêÊåàø[™Šžòv ‘è~º+†\–¯¦°±JÝ µD•Y¢<hìe°ÚÁ—ÛÀª·ÚW³'KÖ¦MºKŒo>:1¤ <A´Põ«ŸèÞE,µd§&Âþ\ cµ–ØÝ)ìì<M-†ƒÈÄXó¥}ËK—|Ž º^¿ÄéB#’Ò6Ñš3 jè !;÷@øLy”O7Adªèú¢ œ77Á@ŠèÖ Ö`ÁËÊS&ÓA¹-Õ*,qRº?a°N'áx|â~WC  ˆÉ:Ãbéʤ¾ÈÁƧ¯>t~¦£–Áüá÷l±Ù}ÅâÕÑN R±·1®Û¾"›üå—7ÃÂN ¥SC4g/¾à²s‡N1éÐÝ>Úr¬l*ÂíœG¿Z»Óû´ªC8|÷¼àAÓ€áKîV¤ëúmvc¤Ò�¦Â•ÓãWΧé‚!‰ÇüÈvKÁ/Ý&€Š¯±ÇffÀ�"W ·ô¢´:œaí†Áô~ ‘ {sd>LHi²9®³>¨˜½u›Ä^ßΞçÓ×â<E =Ã[£HoBhàm.›­&£ÝÊ‚î?å­>½ J•8ík¤†seú~GX½7¸ìOmƒÍVo¢HæàhsŸ:›sÁüdéNâRóöµTc“r¸•&&°¬Ëj*d¼ñ¥î:%“¨�:§äI–ï–£éžFWER\þ—ð‹zÊ7þŽ#ì ¶§¶‰8ÑëÕ\å¿9coC¬.Ï�йÛª¿Žîû’?l•nóqEêÒÕ|Hèm:ÙO¼F³¸BÄ É_dôÍ•éCYãçºV´Žׂ¤ž/Сš/¬»Èõ~ .æáÑå èˆÑØÛ☠¡Ñä¾o6›ü}¥Ñ‚ŒË1f3óqûø)µ7~Oß}r+zíàí×yñÍåÁöâaÛuYj ‚ÅŽrGRå l––ŒÛ±œ,únjÓ«c­Û›óil¸T-=9¾œÇÐóÓ®uX’ãl'uGÚÆx‡ ÁŒZŸ¨ŽÃ×,’·ÖONÛºó“�ÎÛµ³ë­“ÆIúÛòŒZëG¦¾=°íæˆõ|¹þpäˆ<ª-xdu#D™Úg²¨§k† ƒ•ÇÔÒLj£çË¡0mÒ–bO›èv§H=Éã ™G‡ƒ“õ(­¾å}‚8™<‘ÑÛ°õ³ä3zþÂ1¼A ýî\jQdu¿h³ h\Ÿ¶ù­m"ñR )Ó‚“[ٜܔRý®…r÷ƒ=çï—¤ˆUFí¸¼*HM£ð§~–*tÙGlGÝÕN~1÷VñîŒÌ¬Ì?ƒêŽ˜Fô¶Å6޵W èŒš‘ ¿kâá6ë‹]IBg˜úÊ$Ògù…Æjt¯|ßôLÂwl¨÷ é¥ÙgF·c£>½µÚ‚±Þ=Iêý¿õP›÷¡ùn-¼ü$XCŒLøõÆ÷V6¦:,£Î[ã¡FSSÏ 1Qí + –µ5W¶åeÌ ~·Á‘MFoÕÝmÑŠ8¡ä®±m¡M)°î€Ýȯ(`d•µ0r\äU­> ~@•h}Åi×,,œ•xk¶qꟷHf«¢^[¾mQ/ÛŽ/"}ì²–ÇÒl¡Ê>,uìÍ7Î Ú½]„̉+Œ¤^‡,­èk¥`ô·Ò·þÈì8VŠ*”< zëjûëfį’¼âc#áïŸ ®C"ÏXoñ=übâ~‘¯¥äSL$3‹‰è%yHfx‰ÂÂÀ²sÅ+°k?ëû¥µÇj(WI~‘6té ¤6 itX´³ëÜM á¦íEcN˜¡Ô£»˜I™ˆ ÉhXA¤€aL>“ÊßÞ/,V"Å žÜé ø¸ë< ÑÒOʇªÿ¢Öœ²°¬^™¡u”ã%µ~&Ž5É">}øS-ÇËÄçŽñ½ö‘e ¸fΚÁ ºõxQ¯_UÒ%âˆí%”N¨¬MʦÂI»Ö¯Æñ¨tp<—Ž“Iì•ûÃÛ<b‹©M[ ˆe®‡îùSñ•N¢è‘\…öb܆geCÄ?¶5ªj×±f3¼®d;ỈŸO@ØžHª¦BkÃY€$»aŸýKÍ4t!JßiöK¼Ts†¶NVBeÁンœ²LbR¬„´¦õh¸–¸¬³Øšäýù§£ç»É/v~Í—®´YØ—Ò¯ô g…AÑf–ϺL$–a]”Ö§u" kDÀ—Á¬]·â‘˜L¼±~ôÆM‚Ÿx°\o^Êà¾gí j|†i@ô*±ii_¼ÌïD—s§ÖŠüjµdxò9A’Iã0Å\U^+§ÊY5Kí“(Xráià\JZv'>¡gIÿMb·ƒÖøÅ'&¹zÜ3ˆ÷#eÚå¬(®žS»g¯¥ÔÕ l¾™–CFËOm^‘I‡w ÒÄïoi¶'*Bt>þÐ B1Ƹ×è;ܵ˜K%¯fþ½R*fxîî2»n$6U¹`N¹„¾¯üÜÊ?ÐOHn3ËÛLEM¦êÕQ¸É «ë×É©A,Â|j®è&œõõãšÃŸàކ*ÌiÇÌ™ì[ÉÇ‹v×4~ãÃ(oØ)(éeQ¨®3‹ *¾q{1BËN‹L2¦î Öˆlk%$s=}úPSQ¾»ÂsmkF{Ìák9Ëëcš²Ø[m´IŒCÆD\[Ü b(Lj§þ>c¸ónþAÑn)Õcš§ ÐÝr‡Ô Ã~ÙÆàðFžIb–|[ƒÍqòrä·û=„õ›ÃÄŽ11 :ƒåø×£ÑûÐŒ¬yR‘ª–{².¾¯H€k!µ˜ŽÂ«é¸¥¶3M@doîR–àëO.V¤‚ÙØÖ L]JĸlOT?@nÚpÉ[‡öN«–ûb pJ,ÁÀíÎ`BsÚßâQåjŒ¼•—Õý]5D>'=cóo082~ëúw1š13Eu‰ó©ó0s캜KÆ™ÃV�¶¬9öA–a6Ço"Q=ÞØ¬ˆé–_f•õÅa’A Õ¿N…r%™ÄéA]Ža+\Lݘ!FnðùèØ³ ÜÊðiX÷áí°ÀN ÉXö¨Ê…¢»§A‡+z±6üN„ˆ1ªõ2gün=ÏlNÑXÛÑT)rŸ ÿ/åË7 wsä]V<ÔN±¨€ËbK„¬U-þQ|á>úæ°4#2´ög©ŽÓÿŽ[?æ endstream endobj 503 0 obj << /Type /FontDescriptor /FontName /BWJTEN+CMTI12 /Flags 4 /FontBBox [-36 -251 1103 750] /Ascent 694 /CapHeight 683 /Descent -194 /ItalicAngle -14 /StemV 63 /XHeight 431 /CharSet (/A/C/E/I/K/M/P/R/S/T/V/Y/a/b/c/comma/d/e/f/fi/g/h/hyphen/i/j/k/l/m/n/o/one/p/period/quoteright/r/s/slash/t/three/two/u/v/w/x/y) /FontFile 502 0 R >> endobj 504 0 obj << /Length1 819 /Length2 2379 /Length3 0 /Length 2941 /Filter /FlateDecode >> stream xÚ­’y<T}Æ­aŠ}©c+mÆNˆ±“%*4$fÎ0–™13–!dßJ¶²$ÊšPöÈ–j*’•’È2Ö=²öNžõíý÷ýœóǹîûúÝ¿ï纜”½£’ï šáqd%¸2\0¶9u àÊ0"'gL=ÈX<Îă êpUÀˆ@T58LW]•ñBä�c<BÄzy“yã£?MZ€‘?HÄ¢<p€ÙôgÌ@yøŽx$S”#??Àáç à�’@bˆV†Àá�‹"ž QùÉd‰Ãà­?Êè@Â_­ Hb@ò»˜G$ó£�hQ±Å3n,ÿ¬_‡›úùÙzøÿ¿›Ôÿô=ü±~”?xB $6x4HÄýjuÿ€³ÑØ@ÿ_»–d?,Êçå°?JX’6DÛcÉ(o€L wË ý+#¹]só³FÆÎ .u·iïÅ‘OQOýéÞÕð4#"6@ÂùÂFÆó×—Û/—™âPx4ç¨jhD¢Âø}JƒX À°Š2Of™„<òs£::€ êgiWÁaj€Š×¿¤ ‚ý[ªk*Æñèp†ƒô/© ¨íÊÿÀ‡„)©Jªj€–ІÁÂÿˆ $Ay÷wcÄú—Æ`;�ÁÆ£ŽÅúd7Ä—G˜T°C™^i¶õ^?Ü3’Îìwû™u�ôCóFeÞž/ì_o‰“’Û.œ|a¶–CÛùtþK^h³¸ó×<ÊAz-†¾¿µahu^ƒÙnðµ<Ë¥¤÷Æòcz‘½¼É©iŽ÷RLÈ û×c5µœÍòüN'ÄÕV“ràÍ (OTO æÍÊ`ë‰ðIàlRx·‚o äß(æ,ÌÜqgZGíI‰VEŒ4öûè'ä)‰j¹šx­]r˜êow¨Èy^¿Û™{7e¨7Úºö¶cà“cKm'…ihk‚ËúBtºï„<Ç€Éi£ÒŒÛ®kÚXeÃëKb­Ž±ˆƒ©Þ1]Î:~ÝnTˆ9[’}¤ì>_èIë—1ð6Ä _hÇF7b�ªÞx—bÕ˜iæW“NÛyž³¬zd2¾K0·!œvøkÔV;æÒ9ðõg Ø ÏnõtNþ&_D—›ÔÛ<(ô(Bþ¹Æß!|Ãæ|öÅŽ¡Ž8^-e­˜åüôDIûæ+¨Ô 6—R)[že®\Äs7˜Ë‹«›¯bÐ 6Š,•$B6ñŽW]î,¬gˆç˜Þ|.Ç熴‰jÔGõ9÷§OQºíÖ·d(T¾›‘íåbûÕõÐ@ûA…”/ÛÖó½‹Ü]4̲¬¶©¼4unÙ`v®LçÊÜ£QhdsHgû;«ðÍRüpM<®iÇüáʳÍ0Ü»»Οî¤ù²²"¥ôQ•œ| ÖÍòaÒ¡Ê¥läóBÜýdª´t §ZîSýéQ ’ÕÛ·¡ž|CV4ƒb¤ÓÔÀƒ­½"À¤ªô»Ô-ÚkÁõ˜]EGy¼­óvu%ÓÁ®7ð“N\É0«”¡ÂªÜ‹‘;ž;ÕÚÆK—L?0}–ÄìþOC•)-ä—G2oGDÝ-.žÌÌÖö%±½Ï0? q0Œ098½^_6™o3J>‰�fš¡×xëpEwøù_f‡ˆíypI,ÂìÓ@fÆüÈñ™9S3¦”tz©´Ï©Mø¦dºfûfÃJÄåY\† ˦èr×!ˆå¡«’1ÙJóo e9üË®™yø{¤Ò9K{lÑØŠdoÒ଄ @%yÕ¶©Þ<ý’%Ð'�…>ކA¿E\5ìñ}ëSÅå•ðmœÎ"Q]v~[®nJ†Å%ìÇ~þ7]'D¼9àÁ,—&‚¶àú>¼"9¯n·BFhhÜ¢o†qLzÎ9—°ØÿQ³IôiOU÷æ‹–0k•‚Zo ÈŒÔþ®Ç’ÅÛ"®àGý&ç¶µsWUÅà§"öÅzGã+%“-Š W|u‹>h°Œí¤—:¾£~£º}êaÌçž8VèaìëãJ)#5çšÛÂ>]9×Xbø˜4,'(ÙÆ¿¬VDä鈃Mó߀¤U}Š£;×Êk¤×/?Í]"Öu5!ÏÞðQBŒ[ Ÿä°ŠlZ¬ŽõN\“åu)¿nçXît&GiñÉË{ÅË‹Àèzì~F·»YB,´5‰ÝÏÝ‹¬†\fáN"F"ùÍ' ê"â°9xƒ¡Áëoã_�'y>Mµ{R¬¡™´Ž/Þˆ¯8a¯wáÖ€sCaÆhk5¢áø€f]í„ÒŒTþèÁddríî¹(—â¡Êu³Ó+6bK÷é‡ú÷!tã{/´Ì¬WÚ¨Âfù&' tdC]ë×»C¿[Îö.{ ”øôæI~æ¦A‰²¼c¡|©¼ÓÖ,ãÊZûV™m^[)ÓÅ(v&EÓMc›ä]àT‰¬+}}æå&bgÄ[Y-š—7TÇÜŨƒ†®ÌbS((ÒÛ43õM_dmo·AP¿±gœüµˆFîÏöÇl)?ó¼DÝ.»fžz¼7{!Qݒܯ¶MFIÓ cì²­ˆ%¹¯¸BÇ‚ilõ>øoŠb"g1í]óQ-—yy?—S ÖÊÙVÇ#™í¬ßByzŠuG> þþJ…3­}ÅN}°\¶±­§é~Ë´H]r9ò˜ì±™™%— ©¡@}Ö‘¤r¡~Ñ—UÈô\½C¶¢`mÓ¦&^ÓK×ß–Éýµ¶è9öŽS†¢Vivï|Ó^EyŸ!pi^ÉM°ZŽ¢º“Ý0ÏÓ–ëb µ™e Žd‡ó¿‹“nÒ}K¢oIT×q¤GàR KÑÇ„IlfÃiÌO}qIÚÍzfwº‘êÝ[•Yveïí¦õÇKvV]ÂÛïj‚C`ú5ë÷”'w©VMð0•V5á€ÈD> ÙÄ_“ !³Ÿ§Öwó†–…xìEG­bºK}zt ¹Ëäë‹ËSÆ…6Ó£êSñÉ/å&¾€áÄ¤Ï s#·ËÈ“täùaæîÞóÊÊ6:2¹d&g²†äÓJîá#Ä$îÙú¢é Ïõau†VlG±ºK7g½†~¨+ R/¦Ǫ́oëVtÝÄeã×òoé¤Y£Iv¾¶b⤭‹ Eú£¬KûåV^èc*„?Wô¾>’ÕŸTuË›©'¸CXRn 9øÃ^¨ä(*ÓY±sªeÀâ~1Žªí¸Ï‘ ‰=ÿmÑír^VTñZ[éÌòQ]¾uko°·¾ÊçþA~z¯—dR{ñ.…Í"çEø@ä¸ÝŸ}Û÷h¥µ2;ÇöTÕ^ÔÃïyósjqVš‘ŠBOWûkó»[VKŠ'º ŸÖI´Œ4ÃòI„Þdy¿ã$#c8RdËJ ÈDZ¥òæ[V CåY o¿[DòÒZ³¨´ªPô†r„ó7ÖØ NSÔÇh—çæ²#%¬uÒkk®¢Œ0¨\ ³¦ñF¯Á’·"y´fÁÈÊÕ:~¼@íí{ÕGĤ䕂_a|Qƒ÷"³l„r×í·[i~²r9óžÜZÚ"‹¯™‡¾W>pœ™ˆ()à¶ áæ¤f ñçM³åê·û¡K£'… Å'K„µ²låß=ó¬* Øã!£w(våðÙBñWõ˵ï)èÔ4i …_¿¶i4Ø {‡´P\(~‚egf"ì™uoŽûmð„aWžCúYýGTêW'Þäým®ç•¶Y3$g©˜ K=H9´]âú›ÕVÚêiVÒ-Ñ>¤¡Zi»Ûv/ò†'â?ÃP—* endstream endobj 505 0 obj << /Type /FontDescriptor /FontName /GGZACX+CMTT10 /Flags 4 /FontBBox [-4 -235 731 800] /Ascent 611 /CapHeight 611 /Descent -222 /ItalicAngle 0 /StemV 69 /XHeight 431 /CharSet (/c/g/i/period/s/v) /FontFile 504 0 R >> endobj 506 0 obj << /Length1 1937 /Length2 11977 /Length3 0 /Length 13053 /Filter /FlateDecode >> stream xÚ­weTm²%îîÎÁÝ‚»»»ÜÝ Ü Npw Ü5h°àœàn™óußÛdzþÎ⻞ªÚ»ä-Td*êL¢Nf@)'Gw&6f6~€¸¢†;€™ŠJÜhênãä(aêä°ññ±D=¬�ì¬�6n~N.~Nn*€¸“³«•µ;€Vœî'€¨ÐÕÆÜÔ hên t�å07µ¨;™Û�Ý}˜¢öö�µ"Ü�j@7 «'Ђ `acî0ZÙ8"°ü£HÖÑÒ Àóo³…‡óÿ>y]Ý@¢�´ ‘t�D 'G{€ÐEÉ Ä)ùÿ!ê¿“KyØÛ+™:ü“þ_]úÞMlì}þÇÃÉÁÙÃè Pt²�º:þ·«6ðßâ6ÿý*ënjoc.êhe°þÛdã&eã ´P±q7·¸»z�ÿe:Zü·Pßþ¥€EOUETR›áú¯GSGw çÿdýÇû_˜íƒÚãjã Ðgefee9‚~þ÷7Ãÿ"“t4w²°qm7ÀÔÕÕÔ´ ÄðcØ8Z�½@o`fG'wP�Ô“�€¥“+Â?óäæ°ˆþcú7â°ˆ¿#^�‹Ä;â°Hþñ°X¤Þ€Eú±XdÞ€EöqXäÞ7€EáØßˆ]騕ÿƒxAì*ïħöŽ@|êïħñŽ@Õj¾#»Ö;±k¿#ŸîhuYLÿƒØØA¦næ66î6öÀÿØ9Ùÿ1ƒÎÆÍî½m ¦îï™@U™½#P„™©¹›½©›õ_ù9ÿ1»¾»Ä˜ÿqR˜;Ùƒ¾¾ÿ°‚úañã]÷?ÈÅô¡½;€x-ß‚:fiãùÁõϳ“‡ë_ «÷Œ w«îðoè÷8A¶öq¶þ­d³ù ‚ºhûõØî/ªòoÅ 8¼C6PÁ@¡Ž6Žïp€˜=Ìþ¹RV)�8§w œNE±¾Qç÷g‡³©+ÐÑhù>@N¶ÿ±þû´ý‡$ÁèjtüË•û_6§÷Ñp‚úèlïáö'ÈâòÞÙÐퟋÿž›ï£Ó?‹õ7%¨ã € Tô{b.PÐÁæÿÞ®|€žM… ”Ä tkþ£TÈ­#¨ŽwZ.PÜ­]­ ¨Qî^N€rxüA£ôü ‚”yýµì hï¿ (½Ï_4ßwq L¾@×Sý¿GWLÌÉÛ‰ ÀÄÎÁ ZbNÐÍâ ø¿ÜÌ=\A£sÿן3Ðáþ_liºò@ 7Ðá碓¹@˜mz˧Š@É¢™Jhzp1«ÖD¥¦Þ]ˆ¡ËIàö¥ãò.ô:OUY˜(ûÐû¤^/„nÑþª“RçÁ.‰ oûž&ûY¾ßu®²}öœÉïCOP;ZæoN¹À•ç6&*Ru‹‡r/†O Th%4`×ÈÀúô=›û²Ã¸yt¤²ì5#©9ÈÔÐ2\*"9#Ö½ÐR“¡Ö§Cm#àÚV/:=0žŠàòSÞŒ!-&µLÞ0ÿ€w°rd'QF‰õ_,/dSÖÝ.º„ÕÚ}ÞŒM£•­pzív©H&t³ŸPLDŸt¤ÕS*t©²¨3ÛÓ§EÄõã£åÅÌÅ·%7±öiç·R ¾òìŠPîOng=@F³›äsÅ-4¬ªë° WùnÐMǵÃ'/TH¹n>b»]Çe¿ho›î14cqŒ’›þ òÉ ¸ƒL1¤Çóý|_hˆ•ÐÔßRÉn—ÁÈ>ܨcA²Ô ÆV´¡ÇÃVæs\_m°?T@7ˆ¨ÉðÑz“gÌß'õ’ÍöQˆãO " ) ×ržß¦ÌYšo7RÜ•âDOf„4Äo*« ÿ¦4ïïáë/,’Cjíi¢:8F³N§ƒ»ôƒ?šN>J­­óŒ±l^^» ¶éTU}Ô_ì1ŠÙ˜†o§ç¥#–,î3¾LrÚ9•~…G?É1½ý¡Ù½9¶Š«.íÉ?tÑðcîO3ä@öà•œ]{  &uõ3?+‘ÖS}î…Ÿ!TŒ´µ›ÌÆ7'áõ)­õßùð&“ëÔŸSM?)¢·ó&Å!b8ɸ ‚3n8E:4NåËCuˆoÍäÛP¹Ã}[µŽ`+}¼hx} Úör¿ä{¸ŠuK,&ìKÚ…˜¨ëBF·#Íè%¼!˜t]‰ÕzÈçÑÚGºfºÓþsÃã'~WtÍ–ôxwíÔ´Â…µ=7Á²ïÛä5Ü…3Kúg‘º­ ýäpFóôrFX[6dW‡�/žÃ=CÀÙæ~9Öœ›Ê«EËî3õì–áø™ôý.·LžÂäÞÍöºÑUëç˰S}1¯ùœ;ž¢ªJeR{y'¬.ê tokzý? I\ÂãÞvÓ7_¹Üá(|{ÝRcÍ(Kº4D»ýûý?]ÏNàÀ»ú}½œïýZYÅQ…¸¶ÈÊ™ùñÙVż–Û«ûAuf)¯uAÓbn¼öcɃñ]Ê íöПE'"s,3ÚJ~+¼¬…W¹lhÜÀõüܬ«\âÏëxÚFßý%æ‡u0a«¯W ¬:0NáÇì[‹ZRI…® åž[„ÈÝKëÌ=QûÈÚn>øÕõ9´|˜°£Fx†»øPêæ[ôú=6Õ4únƒÌ+#þà †‰u¶—ìÛã‘v¬ßrÈŒã{¦ƒ‹ªÞë qáÜ™bm oµX"ìéMRñW#‡ŒÁ2¸öÌDÜÔÄ/aÛ—ntVÞ3bjÌ2Ä£hÚñîM£®R$rö\ÎÓ>ã—‰u©wØ:ñ4†¡‹·ž"0¥JÔ?m$¬6òlnĺ7C{V~'sÝ«®ösì7PæôjÓ Ÿ%÷jß´îíB®Ô³3¡´Ç¦v6¸Ð0¼ëùTÖ1‘o_¦¦MeáifÇT¢²dIIóîö‡ %ï‡V’"2>²* áo}ížOì§Õä} Ĥí§ÓOúù:ˆN‡…é;uVZA{v:¿#yÁˆ!œ§MÖÀ1w­uTzô“Äìø´(:`D,¬ÚÑsÍÅ<Ódùû„u¹ÒJCOÆè¥/å "ýŒë†më3õ÷ÉP¤škf“ДÕ׬ÃçðÍnÑu'͸ËË«|5W�ÒÃmr»CHZêçe\÷Šºé“à”JöÛó”¤S+Ø&x}I›þ߀ËÖÄnF¿Þø‘ühWþÞ¶(Ož9Ö‘èÚ4K5Õ$÷™½– ‘Ö} …Ø[©£à,5c“¶×ý"lc±¡k¡¹¤|ø¨ $o¤"XÜ„½åUça&†‹ÔÒt_K}ÒO%T]ì-й}¦Ü}Y•Ö·•IyØBbè&°^–0(§™‰íÜ’ýkŒdTGõˆ:±ï}p zk)š²Ó®µ³ÀÌ-!¸?BíàÙ à1ò9£xF¶ ÷æBû”¿V]ò;/<oáÕXCT~¹'¡HÑ5[ë«!3²š®bO#„»}åGâ269R\¤ñ¼`ë›®|NÛœa/¶÷ðÃ4ûÚÉï1+ Ç¹ˆemDO°¶ìŸpI@؆·YëìFmY¢¥Ù \,à!ÃêãV¡‚S GôH_ÿް]²£ô÷[ðïe»Ž!Ÿ“‡r”6âa_L%Õ—ÓwqSL¬ÔœX‹I×ÖçÍAˆ›„Mt½¸ß‹ [$Tž›ûõOÁ²r½8â«eo¶ˆËеöç V72Ês&f‡IÛ䙦Þ_ëóÕ¥(õÀ¼îzS¢„T‘ÑK2Úå£Э“~½1_§'L„-Q*=ö?-–ÑyJÇèTë< ô¾-QÜæry„öÁn{6X)Îãê)¥Ö«ÀÎàlŠ~ëî.­í3ë0|ÚÅx2…â*—*ÓpêȰoF± žÕÚØ0Dn ¾(Jÿt•õÖúAjVdz×êPÝJoñ‚¤XºÖV]ŽJ î& f£ "±7ß æ«RwB·â´Cùû®NÇ÷úâabëâKó5¯¿Ð¤ç|Îiqû=î™ë§ÍAøý f(­bcë½@þ¬Ú&¶äN¤t†:]N¼;½›>¡®�g¸É+Ìç5Ã?P©ž×ÍŒNDmmÈ<à6FMö¿3™-F×V;©Ï´1§¿æÃ`’VßðpÞO~Ó Åz*qydHâ iû±šÎÐ<R>œdZ¬ õgÑ”Ô4»ôž­J|©ùQXBs©°•úÞëY9F‘»^z-WIàN¨ôgì¾¢"ºÁªòÈçÆ?åäé6žË!hÎI ß×âòu¯/¶£Û…¿\ïkϧ9\¦ZAØ;×Õ¡’I°æî€ô¢ljóc³Zâ§ÿ”:«¼À¹ÉO¤_{ëî'%À(UÈÕK¡÷ËçbŸÆ03÷¸³8ñiƒoÍñQ‘ˆEC-¦ï+J#Vß`~Bx,KL.½§À›äo$Ú>ã¥Ûédÿ²aœòd²Ã)!"†…åiÜþ‡BmašŒ'¨4I÷ç©Ï¬…vLñŒÛÕ8-Át Ÿö¥¨†´ý–˨9œ[Àd-Mʘ²ÁF«¤=—dx»qj8W'z æ—xªn)d,*R(ty¿À ÿ (ðI´¹þVUõ(Næ86Àiœ”y±Õ„oµÈѳ§@\6sfC Gô39¢÷ä•¿Ã\´G¾çÕÀ–Œ‚z<êT¤‘‚MFïÌb`¢Öª)9!·ý-#†Õ&øÖû0Ce¿ˆ’7ºUíÀ>yŠâ.¿máÐüºLÅáK…“`F >ÚÛ¡#¿â6Û‡  úl-�c,}œô•äãK(‚*÷*‰*iP½Ói*…aWHÆ—Ÿœ¼6‰ôç\Î L%½¿†šj‰Ä›µ;­~„âP“ªA*he'5H…BëÁK§i×sˆ5{v©{µþ½Ø¢µœ-ɧv6 }Öhwþ:Ã/„ÈÎ îhö‹ÞE€rÜÁzúмÈ{Ù¦X¹ gIC´ùgèx,-¼5EÿAM¶rÞ:ßö¡jÅ.M}™;®›U‰a¯§õy÷ÚÑêÇv"€]+¯pÿ‰ZÀø°FúËÑÇNÃaÚL¢]4¦ûõ¯Rý›F Ú\-˜bÛB ÀÌ£¡W1}tátš¢àèŠÊDíƒ;¶ îÍèJIO,ÎÝ¡�Ï=zz8Óîp~;Çõç+´ Å:û !“C‚m¬päJÉ”´›4"þ𵢓8ìÁŒÅÈ£ƒ‡(MG¬Þæ†e!†–"]éÌ€öKº`D¢éµòÆ•@ŒVŠÈlîMÕï&¤_kÔÜ;ùL&0g ®Ô:T,ýÄ%〷¸nx¤Ï` |[Íþ‹Þêýu§•>㌅gw‡•‡¨÷yŒËóÊ0Vå±UB%}ެý€±Ž™R%IuŠX"Ž·,RÛd°«.V'¤&5@ >T¸¶›ü˜ð3ôbø°ö5ʇ(Ó‹Ì!ropš¤qÜw…¢’ØVÁÂÝ®&5¸Ç}?ÎÚænf¶YÖ·³¼Æ„a +P¼Ë”e$ >xA[ßwÓ€‰lu7QhŠhÝÅç<æÝÒræ¢k”áeJÕ5àï\Ù>WÀ�=, é|aÖT†QÚ ‡"ªè"ÂXHèÛr1¤~Àwh¹÷2Æ*­£/wÖž2»Q"êwéYn ° j³èbsc¢Ï”˜·âÂéSæ¨ ôhaî0\]8É£ÃbzS`õ°­#Ó}jÑWÞ÷éV|Â?õ_ýÊþ|ð´²ô&ÉPïs”&Ug¢®J3f3\¿%Wóñ²‡¤•Lô5OjÜÓý˜Ú¤äØÒÁÎé·ªæ%┺øßáîè¶dœö‰…/}d)NÞEÅx›kÒÙD0ÀÁ¾o„xUªNAP#K¼Ã °e}ÃÝ¿—r¸ì‡5°Òí…«”úIKt¨äδÛà“?ŠcóvTX•v’ûÕ4óz »×ç'uŒFn<Ø/žâFÁþJD¦0„"_/cµ6Ão¬t%óÜ҈QPpE›Ùrõ韈ğñ&ý&üƒÌªó—£Cvä œŽ¦ ÇKB<™Ÿ¥Â&ì7‚:Äi¿c"C »É]ÆáAQÖ½xÙz†€uñ¿–’£ã7Û½‚ꈕEçÛ$ë¹æA×KFW û8lÒØÓ‘œ¹k U<MYÅ×GH{!T>ºÁÏÝárnw2£0—êãEû"úÁáQV÷“ÿõ­0V®"qT�™DòÕ¬‡ [§îb'·è·$™Êýo¤½#1|†}¤ÙÏr|µÓ®¦”¬EúRÒ `,‡¼÷KjWlðn$!,ó5/òl&’r>û +Ì}ý¿ŠEZ|ÉTÙ½9²W"½¸å¯74CZ’Ž:°IG�E’2”l´ƒ×¹JE»Ÿ1ºZJkŠúÂUî´S:a¶Ù}ç¡/Ü,óäÿ_>Šô-`uIHo8Ÿ»LV¿ÌœTÓ’(X00MÎ!ßndüpj¥æ:ö¹º¥¥‚…y¾B4\#GjLº=Q)|ãU! §0WÀé‚©‡ó:•1U"âh ÐÖ? äcó |!éc€lNæ5:UªÃÈÉ4qyL{ZRÓÝ|Âx²ñ^ ʶXâ”zÊvF{,o[xžòï[G©ÞzOL(­©bÁ!ä®»š=‘óïBÀSú«.ýDê"A™¾ÕÇ ½ù7áä{§{¢Ø{_|b Ì >É:¦ò°<É’øc8;Á¡§ýBë2–L(3LŸ²8±15î¾Ç€QÁFšž[.$-ƺá™rÄ_­¿•ì¾ÚÚÛ·V½µŒ´Ý,fC£ú]ø$´ÄÖéä.´j À‰û~Rs2Õ-˜å•0‹Ú»¨ 2ÀŒŸ¤tqi˜þNÂI1#œ!qOÊá1¿A†ðr[>«û6­o«‰È‰©æj|êÊPåçqXʃ.î]œý’]äUPy4‚Á|ªÒ´)ë<Œ±#ùÅ>3Ä‚|!?3 Íø¬`¾pœMíèù}À/wQ´�º+E—Üü‚©îBõÐ#¶3Âó7üᎷ-yâ`/¹À528ò‡§M½Y°¤x—¾èL˜éͱÚeHMS‚™kð-ÝÀÎÑÌÂnøR˜|«NžuÔ»îÇêsv>¿Dûë‹5ço7P WýJcgvc ¥ß¢+ïÜ…Ö<|‘oh ~œÁÄ0æO,$“Ë•RGÄwˆ¼Rè}¼C¯ÈyøÑ_nèXò!ÂÁZ¾x^Ñã Z ¨Ó�”dùÀÒkC({¬.Ê](F #h±<냀„P.ŒêȪMþÌÖî¤GYĬº¸NŠGHÏä…ùÑ6@´‹¡Šþ¡±œªp /tÒõΛróÉ?à¢ñ«U-‰ö†¥ù©Rwx^’úÕPküÀ˜±ó@ã!ÍåœÃm^Û‡¾”u±©¥ ÞÑ%fNW:3|¡»»;Ô—,ÇÛÂIü%äBÒë8O×·šÚda¼?)šô5ùHz8Úà&{gV«é°lg¾±q©G©ëÛrv³~éÃÇ;0"¹•q»øIJ¹>( ²|1{�‚Þ¥$Ç?¸ö„>ŒüâÈŒû˜&ÁÀ36½,a=y%ã‰[eØQ˜c†$„Ùza±ôp·ÞŠFˆÆŽ)& ÿ£9†]£´Ì^êY`.îÈeõöѹJ•RÕdJOóŠrXu¡´H-ÕbãSÓL)Õê4ò®T®yš­$¥ F/)æ0à}¹q@â'U§t'ÿ+Ý)—)Ùâ2à^þ‰ÞfQK!Ê‹zø²Z~8Õñ$é/A/ýòy­{ÆÁnÒFh}g%CaX솨¦ä¯Àõü.Qäw/ã±ç8☿œ®'íÆà£ Ørr3¨¢1 Ì*Q~Ž‚[š)ïNÅjìÞó™•Σ±¬eÈÑfU°Çl¨×¯J”äð&®ˆó w·0X§Ê¿ &l TÖ,ÂFbýðÙ+•Ï’×æg¶H]'r¼IÍ}+ڑ݉\: KoS‹g>o•k¦Þ¬¡ÁÈøð )Hp€OÏt a}A®SæãªÙBÔÎk¼—G˜’©Î5#Íîló þÆþž}@ θ,Ël$0ýYP¬ ÜPº)0Þï@@l;ž4)° Ñ‹üœ4Í3%F‹ÖÛQw÷±M9Sd‚TO1NL¤&Bê>ˆ¤¡ŠõíªºØÂ˜wnð mƒÓ¾}{‡eµ¾16ðÌŽä³\±«ñƒH†Š¿ûLÁ%–o¤÷ƒD…gé¶ ¯9A(é–Öo4“[nCÿÅð߸£î ߌâ¼ic¼bùF‘c²£ ÷úNЦVNù™vwÙYQÑ7p.ž[¬]·-²^‚x2~|fÚ¹Lóºgû‚p‹ÓRŽà@.Aó ŠÁèì¹K^Ð0DëØLu7®BiÎÉš@ y²ÉÖ{?ybBo@¥à?ì}weÂ'Ó=aÔ³—â<ëQòE­Šð¼tͺù2B×nNé«tbów¥óñ¡ËE•†“‡¼j[`4Zâ5u¾ˆ–O”%å•vñÒú9ý¶»QاÆÍ0F¤+f6ùsI0E7k¡P?—%¦”ù{S[¬Æ“cÉ/,<†C‘Œí–»Þ0Õ+ã®…]»$ªÍì̺Äͯí‡In©ýkZH ÓŽ$U•l8Tú¨9î„PéVÔUŒL™y á\Pý½¬é2›dmΠ!ºéŠf¥àÜ|œ*Ä•‘‹Z7›òÉY|{‘[¡°®NeUˆ©ï#T ÍLøÚC¨äúÑRlJ1‰{bVŒþGŸEÎPœutÀ둯 ãêݪ{yð²’»Œ‡­€âirЃãRØ ¯&Ox@A‘¯rã:„È]Vg_tdµ*jÑNvxQ逦†èÁsù‚þ"|û¶‰J»:Ð?Y˜ ìò+áŽ[×Â3ÈVµœ·j¹|$"VÀ˜<ÄGäøG(sím´tÛŒ-̺ÍaTȲaG¨8|rÊ©¶˜špã#;¼²LW|•ÅÝ/h*ª|夅çGÕ'éé†w¤œp ´p”IÀÒ®<zã °PÏ ˜ûÊ|ÓúÙkýÌ:ŠÄ\[ö¬‘Ï=Maç×Ä‹i³†4ZöËììu^+ÃÛþ?ÈáÔ8òg¡Ü.­´ÌxÅ Fͤg·ÌËÑ¥ÍðçûÍ`vêßmžI²wÌ¿/ì¦á÷”W ²éì˜]m“qf–D`ôÜÞøUNãz©ÏÕD7mÄãÚ„BÔJÖEçtmôE²ôo6»d¯Íÿ2‚A ô³�È&úø9LA€;SCä‰ÈÔ ‰Š'Á¸•ã-�7`ø’ÜJö)â£vsÙp,XÎ4>-S²óA:¥ý~²¶@²ÈÚ0í ½ƒl¥˜ëÆ„ÆHâŸÕùº‚¯y¾w®å2Âxöé¼Õ4qa»Çë5Û½‹‹4°ÉK áuÞäª_BýXᵅǨð"&œ·0«Þú¯<r¨ûë[‰2åiĉè¢þð¬ÁÒó׋/—’?n†¤£àHˆK4x›¼n[µ%×* ^·+sN¤!|\͉¯ªYqq‡~‡ÔéE2ý2f„Š¡ÅsÞ5&ß²žë:Q&‚È’¸ ?Âì¨èɃa×¾ˆÆR ~ˆü~üÂý+’ÂQ$%‰î£3î‚<”؃¢> 7Sš•3z6ÎrÙˆwèo\žaš¼HóΑí#ÇwÕÞ3iæ$®&ç0œH‘Ël- Të‹QÓK¼§\r-©×ÇDzv óÁk>²˜AwF£ARK“�ô\"Ádé!rQH¼ßâ,w£ƒÜ?׌n‹.Ä-„íRÔ—®ôÍ•_1üf-  |Žó’+î^³hö¢WJH;üôy”åMà•ñNÜOC&± 2M–IçE­²:„å( Á¦ü@É(õ>Å "·º&ñëÀ ÎZ ŸŠ¡VmD_iÍ՗ܯÙóàIͨ^Éħ—Õþ–¿Ãjš‰#›¥¥wɲ‰3¸ý‡%›RæÚÇ­[^ž—ÇÔ=* NÝ^nb§è¢u€µð£P ¯Âi ï¿ÈД¼�¶_µårï×á’¼ÂÚMíÖe0oO§y­k .Ç7-r›¿ˆéž#ã^:}V*ÙIÇc»³¡¢N‡*xâp´E‚]þ\D qó¹ÒÂß ÷5æèˆŒ°+‰^måµO9ê²|²'Q7kÌj' úø‹­1ÕÅ\+øTþó¯ÙߨwP²Û6GãÅŠÅPm›Àë¿fŸª>Q(·He!ZÕ%)×<á¯W3‚M¯™àÃ%ûò“lb  Áimî±Z%¥n!P˜âƒkp�åÉå/Ú®æ+áë3Å×â£|«ï)e<ùÔP›ß ½ÇŸ‘×Çÿâ¨vÔÏîa–Ñpôå%L#IÜvAiŸhfF{cE)šÌsÍðj"“€YÞ~†à…}ꑎÝ(·úIi“©lGø4X‘O6×úký/¸:ÓxÍåæß(¥‚Ë^!c²«jˆ’¦’?^ДÊñ|Yó½,që£רKYÏóáªt:9j¢7Ý¿TÞöœI½î_‘ñzX L6ÌO¹Âæ™#qiR~͹ ·*v-ë¼z…Š<GiüòJ#·µ†:øÊ6ËAh>½¢hÉÆŠ—£M´²+ÔÏHz>¿êÊÒf£®Y¤­§#R´ƒP™ª.”pÖ¾²*ZÌT²K8àÉcнéEAÅeÁâ:l;ù.ôà›˜„Çîö‰o†ðU–gdè<–“Ö³9Ý€Ó¸Ä6·›q¡=Y´lÌtÿèÏͳþÔðõ§µµD¯Úä§êÇí�#d{T}U!^aÒIJWeq(=È’™P¶\®Ö$Æ·Oº,U= ƒMâð°3DÀôV« tèpÖÖ²[r sΦWíò•åošÙ“—ûÉhÎÌD¢äç{!Ðõ¦ÄôæÛ0ÌN«‚O®÷Ë‹«qŽe ½Fh(¦§ž,ŦŒg†ô¸ÂÈ”‘­±/ÔË]­&gF=à7å–K_ÜXÔ‰¹ú:ôñ{FFEðÛàêØ /ýnËçྡã%k@ÃÃQS—¥ð ;wûůÁ'¾búÉW­å¾g”ûgÆMÇ÷vm²\šÇGäPm—™¯TÄsKUÌÝØ²'p«7Ï´¡‘álä±ÝëNèo¾Ç -‡#¥0Glp?ÒÑ^Gòf‹ˆñó\à ‚“½|Î'Y2>+%Ò Çxç'° zH·U­Tn!öß’ôÊé1Û 1o]kcþ­Ùû„eI‡ÎèÇÕÉ^àdI•þm=‰GAÌnd.žêO›œôX‡Æ 2“8ù¸œ;Ši[*ðFN�{SRâI>×O¦Ûj¬ òên¡7Y˺²í×hR›Ræl2À:̶-øóÚ9"&4—¼f0Yá°r½<_ûQŸ¨wÝc¦Æ‚ÜñË¥™Üf‚ÝÝiúíº.ôÞÀ½æH̯«ZTËà_èQ¼àXñšè‡rzqë.h/ (ª":l=ù°9ù Ô»å·C~%×ð´S§¡4~.ÉÆûóј¸;þ­˜;PBCµ¨kº3 âË÷X ­Œ”Fθ ÊÕ"Apa?YM Pé²ÂþhG,…h6;×f1Î!¾+óZ}¯Ê*J‡d-2ÓR™×¬ˆQHêA"iŠG‰Ç$ûz•1«˜åqÒÚ®Mn—øIâ˜=qgÚÄBj.W!ƒO¯~xsw´›ñšïãjEj°aØ‘ÓÐ…çyØÆtçìZÝCKc6ÿ‹äYÕÌ×iN–»"ml^Dh¯êLñrxÖAŸ) •SnºŒ¯FÃêÆ"W1+»Ç*šé/âP9[ÈW>¯hyòíSÛG}·G#u ñþäîñ)ªÊõ¥H8î­V4<F—ö’Ð>gÐ(ˆš(¤Õ}8ižÿ&ôîùîôf£ã‹\ë­7.]õ/Ú âà#äw¨ºøÜ*6ŽÇ©½ 04ÖƒŒ3*¤;ŽÞ7ø¢ËºÙãÈÈç[š¾Á´Ö ?žâ¢l¼¬®'ê7(l›¶m|§v”i@ã׉[ÂÚ,Œ_îºýù³fGþyÐ,rÀÜÙÿýRô ööbÚµ'¦YöI°ùAˆwÅ lq9÷w†e·b9âùB�ò9V9ÙyëVê • €6蔌YûZÅ, í'ÏíEP>Ï7X'ð6žK®möp-ô§Çm"L€þ“Åi‘•»WTí-ÊÛ*dÙ ¶2úÙdS£^}qŒiìž¶'Åàó!A3Ýx%ÎÒO1i]zþá­ó¸1NÞ¤w‘ÇWUêD.IŸJÕJ_Ša6lz9XªùYi‘>šJ¾½ ¨Àp.WU0pžÈshJbh…Cš½Q²¿¼]Ø~Ǥ®„»a¡T½žN<P Î+´°‹Ì[ß®ÇÜ)Ü…kòªÜ(nKÆÅ ÌËøfXÛ‡qZñEX¹ri2 –¯¼ßlÝÙ‡°3EÔ“èt7ª*úÆ’ºà‹¾²°^)$7;=XYÇ ±ºáúwÝ®ÞúóFl>&}„Fxãáð~ù¾è« Ù[àQ<?¦b5ò†•Ã8»z}´m$»–»8²]pá¦ÊñµõƒI»¬äÍüàžL{õäQŠñÝpŠí \u*,}a2 e ·àb|ï’qÜðůçÊŸ‚ ~~×q«w"¥óéxä?,fpP7’©÷ØÞ'úüº¢Ü‡ô,e#Æ# ã®æøÅ~tk#í;žÐ€¤ÖêJ™´Ym¿*°&±ž<•7orkH£˜Eßz×'©×Ã{0Ç2</�×z,²c.ÎÖb¬¿º Üù·X˜ÁÞ(©z†lʃhVQ žòïX-`0xäL·~bP LªLž ®¹àĸ<Rºö\C‹Ï­•¤b‡É’õ©ó‘©Rkªpu¿|JŽ?ÜtцN·cØ{¡ìÓ®uÃõë Í¢39¡zØE™ÎiT>íÖ':–S~U®þ2Þ|‹–H ¡Œ~¦h'+uÈ·£€¹q’Hj¢HXU·CÁ5‹êLžÄ ×÷k/-¬¶9aò¸øLEoZÄøeêÆfËÈÙ†ÀUµc\° ÷§hA "¾¶•hõ¦¡‹¼åÔgF›—ÉÏ\Ü,üb²-¹0#Ñx¡®¤*Ïjýy…l›_÷g à«Ü¸Ý2y%mn°ßfOehr®Ÿ áü3ø ÇÚýݱîX(Är*îáßÕìÆK,$‰£1±œiJ“ö‡,¿ä|¯?ÍàL]Ã$¿r?ìCZ:ħ¥îòûóJæ n°ï²’û“¿I°µ÷É—Yéгkî4ß´&WÛN¨ŸªÍWUüÆÒ]Õ¿øJ¼_çZÐ íQ—Ô)¸É§Òk ¶éJ(DvEDÒmÛÒ~u¡ÌñŽ!…dXT‹Fê+áÓ6ô™ø‘/Rg¾j%ñiPaíV˜þ)G|÷îøõJ ÷¯¬¦#@ªß^ô1Ñ>ÈãëÖ ƒú?}ù<¦¨èFªL¡w:• îóû}šúr\Æ�èrj“¤È(Ú'ÒöG·Ók`*&"ËúšÓl¿(*´hÍÛË‘lün˜7‰“…¸ƒ¤yT„mÜ»÷Y‡>…$k¾ÆQïþ&Z N”¬aá3 Ùt6¤¤*x= ²©1#cYÞ ‘7M–Fa(·¥£Q ù}Ì:NÅИfW—ÝÊWF $2ùÐ9Xß`kåxu¯>pÌt…À×1?ßÄïøUÛZñDåÂRx:† š Á|²n×,¸îÓkƒ]T\GK§„ðW1ý€Lm¦RŒ´'ûxo×ðC5§Olñ´ê]³™\å̶l¥8&NÞÈä ––Ϥõ^ÝU±9PGœE`xª®Q!O/?²`Û§ýÒߢ!±»½ÑyCË|»iUS×Y&O™úb7‘ˆ6pëŠY~l—™ÕÉM[™í ®K)a Hkùf@•^ªP÷ýÉ/:„::Ö¬¿>š{‡qºâ±(�HÒðjyms’˜¯I~öö‡Ò¹öƒSÓ‡‚(¸:„¦<ƒ¥§œ•Ξ-q± ³t:s›æk¼W^1Þú’‹ú­ƒE0±ŸEÔÄnåźÇÌþK ¼b¾TÅårTD˜—vÝXÏÛ)è¤ia¡¬a…*òâ/k‹ƒP뉋ÓZ!†Wèâ‡a•Ïé z'?¼ÆQ!öü/qÏË:~Á‚¯%ŸË¦Â‘Å’ ó—xN,Ì¿9¯:ä'ÑÃNDsT¡Ì)ù€zx}Éÿa†®jä10‹2D"ro«KöE¥©&Š~Ä·IïÊñÚt“9”“¢ëƒÙáËk4ƒÑôB?·ä6‰3bø\ ®ƒ~Ëa›ýL|Þ wü€9+¼<þª[ò›®Žhß:†MûÑûõÅ£Ñ{÷y Ì—ý4Î~´…Y Œ½* I_Ú¼@nGB «Ëø—Ä KÈÖpvëéTΨ¸F(ùÇqyœI‰P„ÛfŸ‰ûPW°×7óÜ.±æD«auzV"Òò'þ”J ;oKfÅMÉ0ß®®ï`?HÞ|û®ÄU?2§{ŸePTþlÝ!&½ÍjFü–’6À’ ùüüm“ ½fáhÛliF¾ðѰcõñw²^{–U£dÌËÇb]kì|ƒåØâöéÑWXaT¾Í Ô…üîk2«øö¤´C÷Jè¯ íO" [£¹³s‰q牼DW;iAüMZ\»ò²eÔNýÄñuUƒJª•š<–`¿mÊÈn_Ü×eø4Ζ-dçÞ¼”»¢xÝ(‹¬È0:¶•ì½£¡¦Ýg^äX žQLØõí3~§qR\ ôÖQ ø—l¼Íã°A— Z*•·e&ö9¡ágè]É`c–`Õ[ÀÓ#›u·å&ÑÏ9‹­•¶žÏP™ßÝÛGÛðdmìYoXºéç²sÇçÖ€;dÛ ¿YŸŠ™Nà“ܪ6òéÙ6ÀFM—E¨Øïð”‚Û¹D#nË‹×æeÇž§=ž¨šfYóUWrÆoÖÚðS�àќўduί÷{õžM?lþ|h”Ó@µ²mñöØBä Wa‡H¼mbk´×Rej´úªÛ3°~s(mÛü»¶%É›©•seK4é‰^Ôk3ª`¾~D‰¦ïé°~YPKý]€>Ƨ³%!§oŽwU“D„ º¸™/îÀ ­þÌïÏnRØ$˜øæÚ|ñÑËjU£ú>O‹¡U°ÉHÆ¿F„o)pFX¸+Ý Ÿ-tÄ¡c™Ãê 7ÏüA1ëµ-9N#’ç ¿ª—il¾Vïû#t~Säý ­Æ¬A¯TòŸ«XÍ=kZˆ›ÌXãC»‹Úeï3Šõ"¶`º°¹W_ 7’–hãm¤nÂ2ÅñJFˤ~l•–;ůQ]NѦ üâÜf&|s®bë±Äf@Zçç‚ã pÛÈœ+ƒCÏB^îÊé{Ì#¢A‚pßÜ—«]¹„œ¯†s³àŠ#4Þsµ?-W˜¬‘¸yÅß<!‡+P¾| †!!›ñ‡w{ºZ¸åZÕ]ÆF@}(!dÓ"ŸëN¥äÔDÅ+JCþYƒß¡T"²ƒ|Öh¶;gV¾""5@˜WÞP¦·…íÖê@{2ØÕ8&‘§Ì6jèþ}£Ü»ªñ5@÷r[í™óÛO°×2äIÎË^Ó6ü LÚÉ£.ßÓ$2Æåç� ]ÛÒ\$æ-Ò*h’ZÄ9³€ A£~:—Üì< J2™ËÕ˜‹“‹[Ï'3u'hTñ`HRx›t‹ÇçÄ` Ô–Þ Q•Å£u^toª…;ŽôBâ¤}Boõê·ZÔ‰½¢ižLÁa‚JâÜo?’øê3&æeúÁЧWæoË”ý€Ï·àž/ÍÝ»È_—¼éK#æ }1G¯¹¯¹dBuÜq,ýR¨Í,1_æ¬Gëz\κÄm"N;|¿ñI/ ™ä‰uq`¾…â4í+³0·âœƒ&¸’Ò>½¡ÿÅ—úcñ3ý´e w {”’Âeæîm9|V¶Ú’;ÆÆø`¥T®Gb­35¾y×ÍÙÂúF©‰§”Ìý9­KØÔ¯²:%8ä`{_Ÿâ ©¡*rÀå§Î®ë µP:„œ85OƒyX¹Ï¯Õ¸JÂóˆ<_šà®­ƒæT)²ÏOIµ’Õ^ÿ¡{Ò΢R¸»9¸WÉíàW ¥ÞTŸ£6ô„E^z!íáXóOdM§éïãÆÕëp1EŽ•¶íVŠô ž!ó†[JÆù‰k}–ô )ëŠ?;fE1Öʺê"¢Öš€Ô¾M]skÊœ `/þjN¸Q5ļŹÂY<%‘Yr^„ 0Ì‹ýQº½×§3ÏrúÜó½òùôTùgy}à+Ø2¢í+ãCC}[+ÿŠ*T_>"g5ı}éöMf™ºú¬®¾Ò•‘Måg~¨*” ÞZßXDµG×o“M—¢ÑMøê2‘qw’ âÔUs”»Z¿Eï³_›Ê×|±5ð,Éfç1=S A›e¹ÑrNš- _C\©J ‘">@Ça{.ˆ“aàE!GÿñurQÎ9xã‘D"°nÆt06ðÏ |‹ñIÓ™GOmTÕK\Û” ššžidG|RïýF,ŽƒGfXò¡²ëê­™BÄ_%3W®ä*EüÁ Þ§×ûU„]‹z!:-Kf¹8h+÷•[ÅfûsHbüí¸è ¿Rä¨ç"/‚áÀhBÑh¢s»@?ê“ê]®¾aŸ©¥×Å/P ½BÈ|®aL_±^rÉY2Hj¯ÀŠ3ÀOe/6“‚8¶ãKˆ;?3„Øý û?1,ÍY endstream endobj 507 0 obj << /Type /FontDescriptor /FontName /ZQPAEW+CMTT12 /Flags 4 /FontBBox [-1 -234 524 695] /Ascent 611 /CapHeight 611 /Descent -222 /ItalicAngle 0 /StemV 65 /XHeight 431 /CharSet (/A/C/D/E/F/G/H/I/J/L/M/N/O/P/R/S/T/U/V/W/Y/a/asciitilde/asterisk/at/b/backslash/bar/c/colon/d/e/equal/f/five/four/g/greater/h/hyphen/i/j/k/l/m/n/nine/numbersign/o/one/p/parenleft/parenright/percent/period/plus/q/question/quoteright/r/s/semicolon/seven/six/slash/t/three/two/u/v/w/x/y/z/zero) /FontFile 506 0 R >> endobj 246 0 obj << /Type /Font /Subtype /Type1 /BaseFont /GVXNBV+CMBX12 /FontDescriptor 479 0 R /FirstChar 12 /LastChar 121 /Widths 474 0 R >> endobj 281 0 obj << /Type /Font /Subtype /Type1 /BaseFont /PKFRMM+CMCSC10 /FontDescriptor 481 0 R /FirstChar 45 /LastChar 118 /Widths 473 0 R >> endobj 385 0 obj << /Type /Font /Subtype /Type1 /BaseFont /MJYNQL+CMITT10 /FontDescriptor 483 0 R /FirstChar 45 /LastChar 118 /Widths 466 0 R >> endobj 391 0 obj << /Type /Font /Subtype /Type1 /BaseFont /CQTGRC+CMMI12 /FontDescriptor 485 0 R /FirstChar 58 /LastChar 120 /Widths 465 0 R >> endobj 450 0 obj << /Type /Font /Subtype /Type1 /BaseFont /GGQRTC+CMMI8 /FontDescriptor 487 0 R /FirstChar 110 /LastChar 110 /Widths 463 0 R >> endobj 330 0 obj << /Type /Font /Subtype /Type1 /BaseFont /SIHFPR+CMR10 /FontDescriptor 489 0 R /FirstChar 12 /LastChar 121 /Widths 468 0 R >> endobj 244 0 obj << /Type /Font /Subtype /Type1 /BaseFont /MZWQBB+CMR12 /FontDescriptor 491 0 R /FirstChar 11 /LastChar 121 /Widths 476 0 R >> endobj 243 0 obj << /Type /Font /Subtype /Type1 /BaseFont /HDSLOI+CMR17 /FontDescriptor 493 0 R /FirstChar 12 /LastChar 120 /Widths 477 0 R >> endobj 328 0 obj << /Type /Font /Subtype /Type1 /BaseFont /EJASTL+CMR7 /FontDescriptor 495 0 R /FirstChar 49 /LastChar 49 /Widths 469 0 R >> endobj 327 0 obj << /Type /Font /Subtype /Type1 /BaseFont /PXOHER+CMR8 /FontDescriptor 497 0 R /FirstChar 49 /LastChar 50 /Widths 470 0 R >> endobj 289 0 obj << /Type /Font /Subtype /Type1 /BaseFont /UEUMRH+CMSY10 /FontDescriptor 499 0 R /FirstChar 0 /LastChar 106 /Widths 472 0 R >> endobj 449 0 obj << /Type /Font /Subtype /Type1 /BaseFont /WQDNZG+CMSY8 /FontDescriptor 501 0 R /FirstChar 0 /LastChar 48 /Widths 464 0 R >> endobj 324 0 obj << /Type /Font /Subtype /Type1 /BaseFont /BWJTEN+CMTI12 /FontDescriptor 503 0 R /FirstChar 12 /LastChar 121 /Widths 471 0 R >> endobj 331 0 obj << /Type /Font /Subtype /Type1 /BaseFont /GGZACX+CMTT10 /FontDescriptor 505 0 R /FirstChar 46 /LastChar 118 /Widths 467 0 R >> endobj 245 0 obj << /Type /Font /Subtype /Type1 /BaseFont /ZQPAEW+CMTT12 /FontDescriptor 507 0 R /FirstChar 35 /LastChar 126 /Widths 475 0 R >> endobj 248 0 obj << /Type /Pages /Count 6 /Parent 508 0 R /Kids [214 0 R 278 0 R 286 0 R 294 0 R 300 0 R 305 0 R] >> endobj 332 0 obj << /Type /Pages /Count 6 /Parent 508 0 R /Kids [314 0 R 334 0 R 340 0 R 358 0 R 376 0 R 382 0 R] >> endobj 392 0 obj << /Type /Pages /Count 6 /Parent 508 0 R /Kids [388 0 R 395 0 R 399 0 R 403 0 R 407 0 R 411 0 R] >> endobj 418 0 obj << /Type /Pages /Count 6 /Parent 508 0 R /Kids [415 0 R 420 0 R 425 0 R 429 0 R 433 0 R 437 0 R] >> endobj 444 0 obj << /Type /Pages /Count 5 /Parent 508 0 R /Kids [441 0 R 446 0 R 452 0 R 456 0 R 460 0 R] >> endobj 508 0 obj << /Type /Pages /Count 29 /Kids [248 0 R 332 0 R 392 0 R 418 0 R 444 0 R] >> endobj 509 0 obj << /Type /Outlines /First 7 0 R /Last 211 0 R /Count 7 >> endobj 211 0 obj << /Title 212 0 R /A 209 0 R /Parent 509 0 R /Prev 195 0 R >> endobj 207 0 obj << /Title 208 0 R /A 205 0 R /Parent 195 0 R /Prev 203 0 R >> endobj 203 0 obj << /Title 204 0 R /A 201 0 R /Parent 195 0 R /Prev 199 0 R /Next 207 0 R >> endobj 199 0 obj << /Title 200 0 R /A 197 0 R /Parent 195 0 R /Next 203 0 R >> endobj 195 0 obj << /Title 196 0 R /A 193 0 R /Parent 509 0 R /Prev 147 0 R /Next 211 0 R /First 199 0 R /Last 207 0 R /Count -3 >> endobj 191 0 obj << /Title 192 0 R /A 189 0 R /Parent 147 0 R /Prev 187 0 R >> endobj 187 0 obj << /Title 188 0 R /A 185 0 R /Parent 147 0 R /Prev 183 0 R /Next 191 0 R >> endobj 183 0 obj << /Title 184 0 R /A 181 0 R /Parent 147 0 R /Prev 179 0 R /Next 187 0 R >> endobj 179 0 obj << /Title 180 0 R /A 177 0 R /Parent 147 0 R /Prev 175 0 R /Next 183 0 R >> endobj 175 0 obj << /Title 176 0 R /A 173 0 R /Parent 147 0 R /Prev 171 0 R /Next 179 0 R >> endobj 171 0 obj << /Title 172 0 R /A 169 0 R /Parent 147 0 R /Prev 167 0 R /Next 175 0 R >> endobj 167 0 obj << /Title 168 0 R /A 165 0 R /Parent 147 0 R /Prev 163 0 R /Next 171 0 R >> endobj 163 0 obj << /Title 164 0 R /A 161 0 R /Parent 147 0 R /Prev 159 0 R /Next 167 0 R >> endobj 159 0 obj << /Title 160 0 R /A 157 0 R /Parent 147 0 R /Prev 155 0 R /Next 163 0 R >> endobj 155 0 obj << /Title 156 0 R /A 153 0 R /Parent 147 0 R /Prev 151 0 R /Next 159 0 R >> endobj 151 0 obj << /Title 152 0 R /A 149 0 R /Parent 147 0 R /Next 155 0 R >> endobj 147 0 obj << /Title 148 0 R /A 145 0 R /Parent 509 0 R /Prev 87 0 R /Next 195 0 R /First 151 0 R /Last 191 0 R /Count -11 >> endobj 143 0 obj << /Title 144 0 R /A 141 0 R /Parent 87 0 R /Prev 139 0 R >> endobj 139 0 obj << /Title 140 0 R /A 137 0 R /Parent 87 0 R /Prev 135 0 R /Next 143 0 R >> endobj 135 0 obj << /Title 136 0 R /A 133 0 R /Parent 87 0 R /Prev 131 0 R /Next 139 0 R >> endobj 131 0 obj << /Title 132 0 R /A 129 0 R /Parent 87 0 R /Prev 127 0 R /Next 135 0 R >> endobj 127 0 obj << /Title 128 0 R /A 125 0 R /Parent 87 0 R /Prev 107 0 R /Next 131 0 R >> endobj 123 0 obj << /Title 124 0 R /A 121 0 R /Parent 107 0 R /Prev 119 0 R >> endobj 119 0 obj << /Title 120 0 R /A 117 0 R /Parent 107 0 R /Prev 115 0 R /Next 123 0 R >> endobj 115 0 obj << /Title 116 0 R /A 113 0 R /Parent 107 0 R /Prev 111 0 R /Next 119 0 R >> endobj 111 0 obj << /Title 112 0 R /A 109 0 R /Parent 107 0 R /Next 115 0 R >> endobj 107 0 obj << /Title 108 0 R /A 105 0 R /Parent 87 0 R /Prev 103 0 R /Next 127 0 R /First 111 0 R /Last 123 0 R /Count -4 >> endobj 103 0 obj << /Title 104 0 R /A 101 0 R /Parent 87 0 R /Prev 99 0 R /Next 107 0 R >> endobj 99 0 obj << /Title 100 0 R /A 97 0 R /Parent 87 0 R /Prev 95 0 R /Next 103 0 R >> endobj 95 0 obj << /Title 96 0 R /A 93 0 R /Parent 87 0 R /Prev 91 0 R /Next 99 0 R >> endobj 91 0 obj << /Title 92 0 R /A 89 0 R /Parent 87 0 R /Next 95 0 R >> endobj 87 0 obj << /Title 88 0 R /A 85 0 R /Parent 509 0 R /Prev 43 0 R /Next 147 0 R /First 91 0 R /Last 143 0 R /Count -10 >> endobj 83 0 obj << /Title 84 0 R /A 81 0 R /Parent 43 0 R /Prev 79 0 R >> endobj 79 0 obj << /Title 80 0 R /A 77 0 R /Parent 43 0 R /Prev 75 0 R /Next 83 0 R >> endobj 75 0 obj << /Title 76 0 R /A 73 0 R /Parent 43 0 R /Prev 55 0 R /Next 79 0 R >> endobj 71 0 obj << /Title 72 0 R /A 69 0 R /Parent 55 0 R /Prev 67 0 R >> endobj 67 0 obj << /Title 68 0 R /A 65 0 R /Parent 55 0 R /Prev 63 0 R /Next 71 0 R >> endobj 63 0 obj << /Title 64 0 R /A 61 0 R /Parent 55 0 R /Prev 59 0 R /Next 67 0 R >> endobj 59 0 obj << /Title 60 0 R /A 57 0 R /Parent 55 0 R /Next 63 0 R >> endobj 55 0 obj << /Title 56 0 R /A 53 0 R /Parent 43 0 R /Prev 51 0 R /Next 75 0 R /First 59 0 R /Last 71 0 R /Count -4 >> endobj 51 0 obj << /Title 52 0 R /A 49 0 R /Parent 43 0 R /Prev 47 0 R /Next 55 0 R >> endobj 47 0 obj << /Title 48 0 R /A 45 0 R /Parent 43 0 R /Next 51 0 R >> endobj 43 0 obj << /Title 44 0 R /A 41 0 R /Parent 509 0 R /Prev 39 0 R /Next 87 0 R /First 47 0 R /Last 83 0 R /Count -6 >> endobj 39 0 obj << /Title 40 0 R /A 37 0 R /Parent 509 0 R /Prev 7 0 R /Next 43 0 R >> endobj 35 0 obj << /Title 36 0 R /A 33 0 R /Parent 7 0 R /Prev 31 0 R >> endobj 31 0 obj << /Title 32 0 R /A 29 0 R /Parent 7 0 R /Prev 27 0 R /Next 35 0 R >> endobj 27 0 obj << /Title 28 0 R /A 25 0 R /Parent 7 0 R /Prev 23 0 R /Next 31 0 R >> endobj 23 0 obj << /Title 24 0 R /A 21 0 R /Parent 7 0 R /Prev 19 0 R /Next 27 0 R >> endobj 19 0 obj << /Title 20 0 R /A 17 0 R /Parent 7 0 R /Prev 15 0 R /Next 23 0 R >> endobj 15 0 obj << /Title 16 0 R /A 13 0 R /Parent 7 0 R /Prev 11 0 R /Next 19 0 R >> endobj 11 0 obj << /Title 12 0 R /A 9 0 R /Parent 7 0 R /Next 15 0 R >> endobj 7 0 obj << /Title 8 0 R /A 5 0 R /Parent 509 0 R /Next 39 0 R /First 11 0 R /Last 35 0 R /Count -7 >> endobj 510 0 obj << /Names [(Doc-Start) 242 0 R (Hfootnote.1) 329 0 R (Item.1) 308 0 R (Item.10) 323 0 R (Item.11) 325 0 R (Item.12) 326 0 R] /Limits [(Doc-Start) (Item.12)] >> endobj 511 0 obj << /Names [(Item.13) 337 0 R (Item.14) 338 0 R (Item.15) 343 0 R (Item.16) 344 0 R (Item.17) 345 0 R (Item.18) 346 0 R] /Limits [(Item.13) (Item.18)] >> endobj 512 0 obj << /Names [(Item.19) 347 0 R (Item.2) 309 0 R (Item.20) 348 0 R (Item.21) 349 0 R (Item.22) 350 0 R (Item.23) 351 0 R] /Limits [(Item.19) (Item.23)] >> endobj 513 0 obj << /Names [(Item.24) 352 0 R (Item.25) 353 0 R (Item.26) 354 0 R (Item.27) 355 0 R (Item.28) 356 0 R (Item.29) 361 0 R] /Limits [(Item.24) (Item.29)] >> endobj 514 0 obj << /Names [(Item.3) 310 0 R (Item.30) 362 0 R (Item.31) 363 0 R (Item.32) 364 0 R (Item.33) 365 0 R (Item.34) 366 0 R] /Limits [(Item.3) (Item.34)] >> endobj 515 0 obj << /Names [(Item.35) 367 0 R (Item.36) 368 0 R (Item.37) 369 0 R (Item.38) 370 0 R (Item.39) 371 0 R (Item.4) 317 0 R] /Limits [(Item.35) (Item.4)] >> endobj 516 0 obj << /Names [(Item.40) 372 0 R (Item.41) 373 0 R (Item.42) 374 0 R (Item.43) 379 0 R (Item.44) 380 0 R (Item.5) 318 0 R] /Limits [(Item.40) (Item.5)] >> endobj 517 0 obj << /Names [(Item.6) 319 0 R (Item.7) 320 0 R (Item.8) 321 0 R (Item.9) 322 0 R (page.1) 241 0 R (page.10) 360 0 R] /Limits [(Item.6) (page.10)] >> endobj 518 0 obj << /Names [(page.11) 378 0 R (page.12) 384 0 R (page.13) 390 0 R (page.14) 397 0 R (page.15) 401 0 R (page.16) 405 0 R] /Limits [(page.11) (page.16)] >> endobj 519 0 obj << /Names [(page.17) 409 0 R (page.18) 413 0 R (page.19) 417 0 R (page.2) 280 0 R (page.20) 422 0 R (page.21) 427 0 R] /Limits [(page.17) (page.21)] >> endobj 520 0 obj << /Names [(page.22) 431 0 R (page.23) 435 0 R (page.24) 439 0 R (page.25) 443 0 R (page.26) 448 0 R (page.27) 454 0 R] /Limits [(page.22) (page.27)] >> endobj 521 0 obj << /Names [(page.28) 458 0 R (page.29) 462 0 R (page.3) 288 0 R (page.4) 296 0 R (page.5) 302 0 R (page.6) 307 0 R] /Limits [(page.28) (page.6)] >> endobj 522 0 obj << /Names [(page.7) 316 0 R (page.8) 336 0 R (page.9) 342 0 R (section*.1) 247 0 R (section.1) 6 0 R (section.2) 38 0 R] /Limits [(page.7) (section.2)] >> endobj 523 0 obj << /Names [(section.3) 42 0 R (section.4) 86 0 R (section.5) 146 0 R (section.6) 194 0 R (section.7) 210 0 R (subsection.1.1) 10 0 R] /Limits [(section.3) (subsection.1.1)] >> endobj 524 0 obj << /Names [(subsection.1.2) 14 0 R (subsection.1.3) 18 0 R (subsection.1.4) 22 0 R (subsection.1.5) 26 0 R (subsection.1.6) 30 0 R (subsection.1.7) 34 0 R] /Limits [(subsection.1.2) (subsection.1.7)] >> endobj 525 0 obj << /Names [(subsection.3.1) 46 0 R (subsection.3.2) 50 0 R (subsection.3.3) 54 0 R (subsection.3.4) 74 0 R (subsection.3.5) 78 0 R (subsection.3.6) 82 0 R] /Limits [(subsection.3.1) (subsection.3.6)] >> endobj 526 0 obj << /Names [(subsection.4.1) 90 0 R (subsection.4.10) 142 0 R (subsection.4.2) 94 0 R (subsection.4.3) 98 0 R (subsection.4.4) 102 0 R (subsection.4.5) 106 0 R] /Limits [(subsection.4.1) (subsection.4.5)] >> endobj 527 0 obj << /Names [(subsection.4.6) 126 0 R (subsection.4.7) 130 0 R (subsection.4.8) 134 0 R (subsection.4.9) 138 0 R (subsection.5.1) 150 0 R (subsection.5.10) 186 0 R] /Limits [(subsection.4.6) (subsection.5.10)] >> endobj 528 0 obj << /Names [(subsection.5.11) 190 0 R (subsection.5.2) 154 0 R (subsection.5.3) 158 0 R (subsection.5.4) 162 0 R (subsection.5.5) 166 0 R (subsection.5.6) 170 0 R] /Limits [(subsection.5.11) (subsection.5.6)] >> endobj 529 0 obj << /Names [(subsection.5.7) 174 0 R (subsection.5.8) 178 0 R (subsection.5.9) 182 0 R (subsection.6.1) 198 0 R (subsection.6.2) 202 0 R (subsection.6.3) 206 0 R] /Limits [(subsection.5.7) (subsection.6.3)] >> endobj 530 0 obj << /Names [(subsubsection.3.3.1) 58 0 R (subsubsection.3.3.2) 62 0 R (subsubsection.3.3.3) 66 0 R (subsubsection.3.3.4) 70 0 R (subsubsection.4.5.1) 110 0 R (subsubsection.4.5.2) 114 0 R] /Limits [(subsubsection.3.3.1) (subsubsection.4.5.2)] >> endobj 531 0 obj << /Names [(subsubsection.4.5.3) 118 0 R (subsubsection.4.5.4) 122 0 R] /Limits [(subsubsection.4.5.3) (subsubsection.4.5.4)] >> endobj 532 0 obj << /Kids [510 0 R 511 0 R 512 0 R 513 0 R 514 0 R 515 0 R] /Limits [(Doc-Start) (Item.4)] >> endobj 533 0 obj << /Kids [516 0 R 517 0 R 518 0 R 519 0 R 520 0 R 521 0 R] /Limits [(Item.40) (page.6)] >> endobj 534 0 obj << /Kids [522 0 R 523 0 R 524 0 R 525 0 R 526 0 R 527 0 R] /Limits [(page.7) (subsection.5.10)] >> endobj 535 0 obj << /Kids [528 0 R 529 0 R 530 0 R 531 0 R] /Limits [(subsection.5.11) (subsubsection.4.5.4)] >> endobj 536 0 obj << /Kids [532 0 R 533 0 R 534 0 R 535 0 R] /Limits [(Doc-Start) (subsubsection.4.5.4)] >> endobj 537 0 obj << /Dests 536 0 R >> endobj 538 0 obj << /Type /Catalog /Pages 508 0 R /Outlines 509 0 R /Names 537 0 R /PageMode/UseOutlines /OpenAction 213 0 R >> endobj 539 0 obj << /Author()/Title()/Subject()/Creator(LaTeX with hyperref package)/Producer(pdfTeX-1.40.3)/Keywords() /CreationDate (D:20110722120813+01'00') /ModDate (D:20110722120813+01'00') /Trapped /False /PTEX.Fullbanner (This is pdfTeX using libpoppler, Version 3.141592-1.40.3-2.2 (Web2C 7.5.6) kpathsea version 3.5.6) >> endobj xref 0 540 0000000001 65535 f 0000000002 00000 f 0000000003 00000 f 0000000004 00000 f 0000000000 00000 f 0000000015 00000 n 0000016314 00000 n 0000192553 00000 n 0000000060 00000 n 0000000116 00000 n 0000019100 00000 n 0000192481 00000 n 0000000166 00000 n 0000000202 00000 n 0000019159 00000 n 0000192395 00000 n 0000000253 00000 n 0000000288 00000 n 0000019218 00000 n 0000192309 00000 n 0000000339 00000 n 0000000366 00000 n 0000019276 00000 n 0000192223 00000 n 0000000417 00000 n 0000000440 00000 n 0000022852 00000 n 0000192137 00000 n 0000000491 00000 n 0000000518 00000 n 0000022911 00000 n 0000192051 00000 n 0000000569 00000 n 0000000593 00000 n 0000025620 00000 n 0000191978 00000 n 0000000644 00000 n 0000000665 00000 n 0000025679 00000 n 0000191891 00000 n 0000000711 00000 n 0000000754 00000 n 0000032409 00000 n 0000191766 00000 n 0000000800 00000 n 0000000828 00000 n 0000032468 00000 n 0000191692 00000 n 0000000879 00000 n 0000000909 00000 n 0000032527 00000 n 0000191605 00000 n 0000000960 00000 n 0000000994 00000 n 0000035112 00000 n 0000191481 00000 n 0000001045 00000 n 0000001086 00000 n 0000035171 00000 n 0000191407 00000 n 0000001142 00000 n 0000001184 00000 n 0000036970 00000 n 0000191320 00000 n 0000001240 00000 n 0000001274 00000 n 0000037747 00000 n 0000191233 00000 n 0000001330 00000 n 0000001364 00000 n 0000040089 00000 n 0000191159 00000 n 0000001420 00000 n 0000001454 00000 n 0000040327 00000 n 0000191072 00000 n 0000001505 00000 n 0000001544 00000 n 0000043074 00000 n 0000190985 00000 n 0000001595 00000 n 0000001622 00000 n 0000043133 00000 n 0000190911 00000 n 0000001673 00000 n 0000001708 00000 n 0000045847 00000 n 0000190783 00000 n 0000001754 00000 n 0000001793 00000 n 0000045906 00000 n 0000190709 00000 n 0000001844 00000 n 0000001876 00000 n 0000050880 00000 n 0000190622 00000 n 0000001927 00000 n 0000001965 00000 n 0000050939 00000 n 0000190533 00000 n 0000002016 00000 n 0000002073 00000 n 0000053458 00000 n 0000190442 00000 n 0000002125 00000 n 0000002167 00000 n 0000055342 00000 n 0000190311 00000 n 0000002219 00000 n 0000002253 00000 n 0000055402 00000 n 0000190232 00000 n 0000002310 00000 n 0000002351 00000 n 0000055462 00000 n 0000190139 00000 n 0000002408 00000 n 0000002455 00000 n 0000055522 00000 n 0000190046 00000 n 0000002512 00000 n 0000002548 00000 n 0000057581 00000 n 0000189967 00000 n 0000002605 00000 n 0000002641 00000 n 0000059719 00000 n 0000189875 00000 n 0000002693 00000 n 0000002730 00000 n 0000059779 00000 n 0000189783 00000 n 0000002782 00000 n 0000002814 00000 n 0000059839 00000 n 0000189691 00000 n 0000002866 00000 n 0000002902 00000 n 0000062332 00000 n 0000189599 00000 n 0000002954 00000 n 0000002987 00000 n 0000062392 00000 n 0000189521 00000 n 0000003040 00000 n 0000003070 00000 n 0000062452 00000 n 0000189389 00000 n 0000003117 00000 n 0000003148 00000 n 0000065606 00000 n 0000189310 00000 n 0000003200 00000 n 0000003245 00000 n 0000068202 00000 n 0000189217 00000 n 0000003297 00000 n 0000003330 00000 n 0000068262 00000 n 0000189124 00000 n 0000003382 00000 n 0000003429 00000 n 0000068322 00000 n 0000189031 00000 n 0000003481 00000 n 0000003535 00000 n 0000068382 00000 n 0000188938 00000 n 0000003587 00000 n 0000003639 00000 n 0000070832 00000 n 0000188845 00000 n 0000003691 00000 n 0000003726 00000 n 0000073789 00000 n 0000188752 00000 n 0000003778 00000 n 0000003831 00000 n 0000075701 00000 n 0000188659 00000 n 0000003883 00000 n 0000003918 00000 n 0000075757 00000 n 0000188566 00000 n 0000003970 00000 n 0000004015 00000 n 0000078644 00000 n 0000188473 00000 n 0000004068 00000 n 0000004110 00000 n 0000081301 00000 n 0000188394 00000 n 0000004163 00000 n 0000004206 00000 n 0000083270 00000 n 0000188262 00000 n 0000004253 00000 n 0000004286 00000 n 0000083330 00000 n 0000188183 00000 n 0000004338 00000 n 0000004376 00000 n 0000083390 00000 n 0000188090 00000 n 0000004428 00000 n 0000004467 00000 n 0000085717 00000 n 0000188011 00000 n 0000004519 00000 n 0000004572 00000 n 0000085777 00000 n 0000187932 00000 n 0000004619 00000 n 0000004658 00000 n 0000005815 00000 n 0000006106 00000 n 0000006257 00000 n 0000006413 00000 n 0000006569 00000 n 0000006725 00000 n 0000006880 00000 n 0000007035 00000 n 0000007191 00000 n 0000007346 00000 n 0000007497 00000 n 0000007647 00000 n 0000007803 00000 n 0000007959 00000 n 0000008115 00000 n 0000008277 00000 n 0000008438 00000 n 0000008600 00000 n 0000008762 00000 n 0000008918 00000 n 0000009074 00000 n 0000011269 00000 n 0000011420 00000 n 0000011575 00000 n 0000011731 00000 n 0000009410 00000 n 0000004710 00000 n 0000009230 00000 n 0000009290 00000 n 0000186045 00000 n 0000185902 00000 n 0000187042 00000 n 0000185037 00000 n 0000009350 00000 n 0000187186 00000 n 0000011886 00000 n 0000012042 00000 n 0000012197 00000 n 0000012357 00000 n 0000012519 00000 n 0000012681 00000 n 0000012843 00000 n 0000012998 00000 n 0000013154 00000 n 0000013310 00000 n 0000013465 00000 n 0000013622 00000 n 0000013772 00000 n 0000013928 00000 n 0000014084 00000 n 0000014240 00000 n 0000014395 00000 n 0000014551 00000 n 0000014705 00000 n 0000014861 00000 n 0000015017 00000 n 0000015172 00000 n 0000015329 00000 n 0000015486 00000 n 0000015637 00000 n 0000015793 00000 n 0000015949 00000 n 0000016105 00000 n 0000016372 00000 n 0000010882 00000 n 0000009521 00000 n 0000016254 00000 n 0000185181 00000 n 0000018519 00000 n 0000018688 00000 n 0000018859 00000 n 0000019335 00000 n 0000018364 00000 n 0000016470 00000 n 0000019040 00000 n 0000186470 00000 n 0000022027 00000 n 0000022421 00000 n 0000022619 00000 n 0000022970 00000 n 0000021864 00000 n 0000019459 00000 n 0000022792 00000 n 0000022224 00000 n 0000025149 00000 n 0000025738 00000 n 0000025002 00000 n 0000023094 00000 n 0000025560 00000 n 0000025356 00000 n 0000028846 00000 n 0000028487 00000 n 0000025862 00000 n 0000028606 00000 n 0000028666 00000 n 0000028726 00000 n 0000028786 00000 n 0000031619 00000 n 0000031776 00000 n 0000032767 00000 n 0000031472 00000 n 0000028970 00000 n 0000031930 00000 n 0000031990 00000 n 0000032050 00000 n 0000032110 00000 n 0000032170 00000 n 0000032229 00000 n 0000032289 00000 n 0000032349 00000 n 0000186754 00000 n 0000032586 00000 n 0000032646 00000 n 0000186329 00000 n 0000186188 00000 n 0000032706 00000 n 0000185759 00000 n 0000186898 00000 n 0000187303 00000 n 0000035230 00000 n 0000034813 00000 n 0000032941 00000 n 0000034932 00000 n 0000034992 00000 n 0000035052 00000 n 0000037926 00000 n 0000036791 00000 n 0000035367 00000 n 0000036910 00000 n 0000037029 00000 n 0000037089 00000 n 0000037149 00000 n 0000037209 00000 n 0000037269 00000 n 0000037329 00000 n 0000037389 00000 n 0000037449 00000 n 0000037508 00000 n 0000037568 00000 n 0000037628 00000 n 0000037688 00000 n 0000037806 00000 n 0000037866 00000 n 0000040386 00000 n 0000039251 00000 n 0000038037 00000 n 0000039370 00000 n 0000039430 00000 n 0000039490 00000 n 0000039550 00000 n 0000039610 00000 n 0000039670 00000 n 0000039730 00000 n 0000039790 00000 n 0000039850 00000 n 0000039910 00000 n 0000039969 00000 n 0000040029 00000 n 0000040148 00000 n 0000040208 00000 n 0000040267 00000 n 0000043312 00000 n 0000042895 00000 n 0000040510 00000 n 0000043014 00000 n 0000043192 00000 n 0000043252 00000 n 0000045964 00000 n 0000045668 00000 n 0000043436 00000 n 0000045787 00000 n 0000185326 00000 n 0000048419 00000 n 0000048639 00000 n 0000048280 00000 n 0000046088 00000 n 0000048579 00000 n 0000185471 00000 n 0000187420 00000 n 0000050668 00000 n 0000050998 00000 n 0000050529 00000 n 0000048750 00000 n 0000050820 00000 n 0000053518 00000 n 0000053279 00000 n 0000051135 00000 n 0000053398 00000 n 0000055582 00000 n 0000055163 00000 n 0000053642 00000 n 0000055282 00000 n 0000057641 00000 n 0000057402 00000 n 0000055706 00000 n 0000057521 00000 n 0000059899 00000 n 0000059540 00000 n 0000057765 00000 n 0000059659 00000 n 0000062512 00000 n 0000062153 00000 n 0000060036 00000 n 0000062272 00000 n 0000187537 00000 n 0000065666 00000 n 0000065427 00000 n 0000062636 00000 n 0000065546 00000 n 0000067985 00000 n 0000068442 00000 n 0000067846 00000 n 0000065816 00000 n 0000068142 00000 n 0000070891 00000 n 0000070653 00000 n 0000068592 00000 n 0000070772 00000 n 0000072408 00000 n 0000072229 00000 n 0000071002 00000 n 0000072348 00000 n 0000073849 00000 n 0000073610 00000 n 0000072493 00000 n 0000073729 00000 n 0000075816 00000 n 0000075522 00000 n 0000073947 00000 n 0000075641 00000 n 0000187654 00000 n 0000078704 00000 n 0000078465 00000 n 0000075953 00000 n 0000078584 00000 n 0000186613 00000 n 0000185615 00000 n 0000081361 00000 n 0000081122 00000 n 0000078880 00000 n 0000081241 00000 n 0000083450 00000 n 0000083091 00000 n 0000081511 00000 n 0000083210 00000 n 0000085837 00000 n 0000085538 00000 n 0000083600 00000 n 0000085657 00000 n 0000085974 00000 n 0000085999 00000 n 0000086325 00000 n 0000086706 00000 n 0000087021 00000 n 0000087332 00000 n 0000087943 00000 n 0000087968 00000 n 0000087999 00000 n 0000088510 00000 n 0000089130 00000 n 0000089588 00000 n 0000090225 00000 n 0000090796 00000 n 0000091402 00000 n 0000092071 00000 n 0000102334 00000 n 0000102717 00000 n 0000107685 00000 n 0000107947 00000 n 0000111795 00000 n 0000112047 00000 n 0000116907 00000 n 0000117173 00000 n 0000118987 00000 n 0000119208 00000 n 0000125299 00000 n 0000125567 00000 n 0000140023 00000 n 0000140543 00000 n 0000147990 00000 n 0000148296 00000 n 0000149982 00000 n 0000150202 00000 n 0000152054 00000 n 0000152278 00000 n 0000155359 00000 n 0000155693 00000 n 0000157008 00000 n 0000157239 00000 n 0000167718 00000 n 0000168064 00000 n 0000171124 00000 n 0000171357 00000 n 0000184531 00000 n 0000187763 00000 n 0000187857 00000 n 0000192662 00000 n 0000192839 00000 n 0000193009 00000 n 0000193178 00000 n 0000193348 00000 n 0000193516 00000 n 0000193684 00000 n 0000193852 00000 n 0000194016 00000 n 0000194186 00000 n 0000194355 00000 n 0000194525 00000 n 0000194690 00000 n 0000194862 00000 n 0000195055 00000 n 0000195275 00000 n 0000195495 00000 n 0000195719 00000 n 0000195947 00000 n 0000196175 00000 n 0000196401 00000 n 0000196663 00000 n 0000196809 00000 n 0000196919 00000 n 0000197027 00000 n 0000197143 00000 n 0000197256 00000 n 0000197363 00000 n 0000197401 00000 n 0000197529 00000 n trailer << /Size 540 /Root 538 0 R /Info 539 0 R /ID [<A779C8AE6F2E1125526DB60590087780> <A779C8AE6F2E1125526DB60590087780>] >> startxref 197860 %%EOF ��������������������������������������������������������������������������spark-2012.0.deb/victor/vct/doc/vct-man.tex���������������������������������������������������������0000644�0001750�0001750�00000225323�11753202341�017402� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������\documentclass[12pt,fleqn]{article} %\usepackage[leftbars,dvips]{changebar} %\usepackage[leftbars,pdflatex]{changebar} \title{Victor: a SPARK VC Translator and Prover Driver \\[4ex] \Large User Manual for release 0.9.1 \\ and subsequent experimental modifications \\[1ex] % \normalsize % \cbstart % \emph{Changebars indicate changes from User Manual for release 0.6 }\\[1ex] % \cbend % \hbox{ } } \author{Paul Jackson \\ \texttt{pbj@inf.ed.ac.uk}} \date{20th July 2011} %\usepackage{vpage} %\usepackage{vmargin} %\setpapersize{Afour} % margins l t r b, headheight, headsep, footheight, footskip %\setmarginsrb{1.2in}{1.2in}{1.2in}{0.6in}{0.3in}{0.2inm}{0.3in}{0.3in} \usepackage[a4paper,DIV14]{typearea} % LaTeX Companion, 2nd Ed, pp203-6 \usepackage{url} \setlength{\parindent}{0pt} %My macros %---------------------------------------------------------------------------- % Abbreviations %---------------------------------------------------------------------------- \usepackage{amssymb} \newcommand{\integer}{\ensuremath{\mathbf{Z}}} \newcommand{\subrange}[2]{\{{#1}\:..\:{#2}\}} \newcommand{\fixme}[1]{\textbf{FIXME: {#1}}} \newcommand{\spark}{\textsc{Spark}} \newcommand{\sparkb}{S{\footnotesize{}PARK}} % for use in bold environments. \newcommand{\ada}{Ada} \newcommand{\sparkada}{SPARK-Ada} \newcommand{\cvcthree}{\textsc{Cvc}3} \newcommand{\zthree}{\textsc{Z}3} \newcommand{\yices}{Yices} \newcommand{\altergo}{Alt-Ergo} \newcommand{\blast}{\textsc{Blast}} \newcommand{\slam}{\textsc{Slam}} \newcommand{\sat}{\textsc{Sat}} \newcommand{\smt}{\textsc{Smt}} \newcommand{\smtb}{S{\footnotesize{}MT}} \newcommand{\sal}{\textsc{Sal}} \newcommand{\etb}{\textsc{Etb}} \newcommand{\sri}{\textsc{Sri}} \newcommand{\smtlib}{\textsc{smt-lib}} \newcommand{\fdl}{\textsc{fdl}} \newcommand{\rls}{\textsc{rls}} \newcommand{\vcg}{\textsc{vcg}} \newcommand{\siv}{\textsc{siv}} %NB the extensions here changed in V0.9, for SPARK tool compatibility \newcommand{\logfile}{\textsc{vlg}} \newcommand{\csv}{\textsc{vct}} \newcommand{\sumfile}{\textsc{vsm}} %\renewcommand{\And}{\ensuremath{\wedge}} \newcommand{\And}{\ensuremath{\wedge}} \newcommand{\Not}{\ensuremath{\neg}} \newcommand{\Or}{\ensuremath{\vee}} \newcommand{\Iff}{\ensuremath{\Leftrightarrow}} \newcommand{\Implies}{\ensuremath{\Rightarrow}} \newcommand{\tttilde}{{\tt \char`\~}} % Boolean option with short one-line description \newcommand{\optionbs}[1]{\item[\texttt{-{#1}}]} % Boolean option with long description \newcommand{\optionb}[1]{\item[\texttt{-{#1}}]\ \\} % Value option \newcommand{\optionv}[2]{\item[\texttt{-{#1}=}\mdseries\textit{#2}]\ \\} %---------------------------------------------------------------------------- % Draft support %---------------------------------------------------------------------------- \usepackage{comment} \specialcomment{outline}% {\begingroup\bfseries\slshape\begin{itemize}}% {\end{itemize}\endgroup} \specialcomment{question}% {\begin{itemize}\item[\Huge \textbf{?}]\itshape}% {\end{itemize}} \specialcomment{remark}% {\begin{itemize}\item[\Huge \textbf{!}]\itshape}% {\end{itemize}} % \excludecomment{outline} % \excludecomment{question} % \excludecomment{remark} \newcommand{\todo}[1]{\textbf{ToDo:} \emph{#1}} %\newcommand{\todo}[1]{\emph{\bfseries{#1}}} \newenvironment{todoenv}{\begingroup\itshape}{\endgroup} \usepackage[colorlinks=true]{hyperref} \begin{document} \maketitle %\begin{abstract} %This is a user guide for V0.8. %\end{abstract} \tableofcontents \setlength{\parskip}{0.8\baselineskip} %============================================================================ %\section{Introduction} %============================================================================ %============================================================================ \section{Supported provers and prover languages} %============================================================================ Victor has API interfaces to the \cvcthree{} and \yices{} provers, and can drive any prover that accepts Simplify or \smtlib{} format input files. %---------------------------------------------------------------------------- \subsection{Simplify language} %---------------------------------------------------------------------------- The Simplify language is supported by the Simplify and \zthree{} provers. %---------------------------------------------------------------------------- \subsection{SMT-LIB language} %---------------------------------------------------------------------------- The \smtlib{} initiative (\url{http://www.smtlib.org}) defines a standard language for formatting input to \smt{} solvers and collects benchmarks in this format. Provers taking \smtlib{} input must support at least one of the \smtlib{} sub-logics \begin{itemize} \item \textsc{auflia}: quantifier formulas involving arrays, uninterpreted functions, linear integer arithmetic. \item \textsc{auflira}: quantifier formulas involving arrays, uninterpreted functions, linear integer and linear real arithmetic. \item \textsc{aufnira}: quantifier formulas involving arrays, uninterpreted functions, non-linear integer and non-linear real arithmetic. \item \textsc{ufnia}: Non-linear integer arithmetic with uninterpreted sort, function, and predicate symbols. \end{itemize} Such provers include Alt-Ergo, \cvcthree, \yices{} and \zthree. Victor currently makes no use of the support for arrays or the reals. %---------------------------------------------------------------------------- \subsection{Alt-Ergo} %---------------------------------------------------------------------------- Alt-Ergo is an open-source \smt{} solver from LRI (Laboratoire de Recherche en Informatique) at Universit\'e Paris-Sud. It is available from \url{http://alt-ergo.lri.fr/}. Victor has been tested most recently with the V0.92.2 release using Alt-Ergo's \smtlib{} file-level interface. If the standalone Alt-Ergo executable is downloaded rather than built from the Alt-Ergo source distribution, it is also necessary to copy the file \texttt{smt\_prelude.mlw} from the source distribution to the \texttt{run/} directory. %---------------------------------------------------------------------------- \subsection{CVC3} %---------------------------------------------------------------------------- \cvcthree{} is an open-source \smt{} solver jointly developed at New York University and the University of Iowa. It is available from \url{http://www.cs.nyu.edu/acsys/cvc3/}. Victor can link to a \cvcthree{} library and can then drive \cvcthree{} via its API. Alternatively Victor can invoke a \cvcthree{} stand-alone executable on \smtlib{} format files. Victor has been tested with the latest release, V2.2, dating from November 2009. % % The \cvcthree{} developers also make available daily % development releases. Victor has not yet been tested with these. % \cvcthree{} is significantly slower than \yices{} or \zthree{} (maybe 5-10$\times$), especially when VCs are unprovable. % It has some basic support for non-linear arithmetic. When driven via its API, this version of \cvcthree{} throws exceptions and has some segmentation faults on a few of the \spark{} VCs from the tokeneer set, % The exceptions are caught and reported by Victor, but % the segmentation faults cause Victor to halt. To enable Victor runs in the presence of these faulting problems, it is possible to tell Victor to ignore trying to run \cvcthree{} on certain VCs. %---------------------------------------------------------------------------- \subsection{Simplify} %---------------------------------------------------------------------------- Simplify is a legacy prover, used most notably in the ESC/Java tool. The Modula-3 sources and some documentation are available from HP labs. Visit \url{http://www.hpl.hp.com/downloads/crl/jtk/index.html} and follow the ``Download Simplify here'' link. Executables for Linux and other platforms can be pulled out of the ESC/Java2 distribution: visit \url{http://secure.ucd.ie/products/opensource/ESCJava2/}. In October 2007, the executables for V1.5.4 were found in a file \texttt{Simplify-1.5.5-13-06-07-binary.zip}. Simplify has good performance, but is unsound and sometimes crashes because it uses fixed-precision integer arithmetic. Victor interfaces to Simplify using temporary files and by invoking the Simplify executable in a sub-process. Unlike the case with \cvcthree{}, Victor can tolerate Simplify crashing. Victor provides notifications of Simplify crashes in its output files. %---------------------------------------------------------------------------- \subsection{Yices} %---------------------------------------------------------------------------- \yices{} is a state-of-the-art \smt{} solver available from \sri{} at \url{http://yices.csl.sri.com/}. Victor links with a \yices{} library provided with the \yices{} distribution. Victor has been tested with the latest public release, V1.0.24. This version, bug fixes apart, dates from summer 2007 and essentially is the version that lead the field in the 2007 \smt{} competition. % \yices{} 1 is no longer under development: \sri{} is % currently working on a re-implementation, \yices{} 2, which had a % preliminary showing at the 2008 \smt{} competition. \yices{} 2 is not % currently available, and \yices{} is fussy about VCs containing non-linear arithmetic expressions. Victor currently just has \yices{} ignore any hypotheses or conclusions containing such expressions, and, not infrequently, VCs are provable despite these ignored VC clauses. % \yices{} will accept universally-quantified hypotheses with non-linear arithmetic expressions, and sometimes can make use of linear instantiations of these. % Unfortunately, the current behaviour on finding a non-linear instantiation is abandon the proof attempt rather than simply ignore the instantiation. No crashes have been observed with recent versions of \yices. However, on a few VCs (not in the test sets provided with the distribution), \yices{} just keeps going on and on. No mechanism for timing out on such cases has yet been implemented, the only way to deal with them is to request that Victor ignore them. Victor can also drive Yices using \smtlib{} format files. %---------------------------------------------------------------------------- \subsection{Z3} %---------------------------------------------------------------------------- \zthree{} is a state-of-the-art \smt{} solver developed at Microsoft. See \url{http://research.microsoft.com/en-us/um/redmond/projects/z3/}. Victor has been tested most recently with the Linux version of release 2.13. No problems have been observed with this version. % This website only provides a Windows version of a Z3 executable. % Microsoft research staff have verbally advertised the existence of a % Linux version. Victor has been tested with such a version (V1.3), % obtained from Leonardo de Moura, \texttt{leonardo@microsoft.com}, one % of \zthree's developers. \zthree{} has good performance and better VC coverage than other solvers tried. In particular, it has the best support for non-linear arithmetic. Victor interfaces to \zthree{} using temporary files and by invoking the \zthree{} executable in a sub-process. The temporary files can be in either \smtlib{} or Simplify format. % Despite the similarity of the format, \zthree's performance can be radically % different, depending on the format: it seems that \zthree{} has a soft % timeout of under 0.1s when taking Simplify format input, but no such timeout % on \smtlib{} format input. %============================================================================ \section{Installation and Testing} %============================================================================ Victor is written in C++ and currently only runs on Linux. The current distribution includes some preliminary code to allow it to compile and run on Windows. However, this code has not yet been fully tested. At Edinburgh, Victor is curently compiled and run on a Red Hat Fedora 13 machine. It makes use the following tools: \begin{itemize} \item \texttt{make} V3.81 \item \texttt{gcc/g++} V4.4.4 \item \texttt{bison} V2.4.1 \item \texttt{flex} V2.5.35 \end{itemize} % For SL5: % \begin{itemize} % \item \texttt{make} V3.81 % \item \texttt{gcc/g++} V4.1.2 % \item \texttt{bison} V2.3 % \item \texttt{flex} V2.5.4 % \end{itemize} The main external library it uses is \begin{itemize} \item \texttt{gmp} V4.3.1 %\item \texttt{gmp} V4.1.4 \end{itemize} Its precise dependencies on these versions are largely unknown. % One observation is that some tweaks to the \texttt{bison code} in \texttt{parser.yy} were necessary when shifting from \texttt{bison} V2.3 to \texttt{bison} V2.4. Comments in \texttt{parser.yy} indicate what needs to be changed for compilation with V2.3 By default, the \texttt{gmp} library is dynamically linked in. If running a single executable on several different Linux platforms, this can cause problems and it might be desirable to use static linking instead. To achieve this, use \texttt{STATIC\_GMP=true} on the \texttt{make} command line when building Victor. To install: \begin{enumerate} \item Untar the distribution. E.g. \begin{verbatim} tar xzf vct-0.9.0.tgz \end{verbatim} This should generate a top level directory \texttt{vct-0.9.0} including subdirectories \texttt{src}, \texttt{bin}, \texttt{run}, \texttt{vc} and \texttt{doc}. The \texttt{doc} directory includes a copy of this manual. Other directories are described below. \item Configure Victor for each of the provers you wish to use it with. \begin{description} \item[\cvcthree:] To enable the API driver, uncomment the definition of variable \texttt{CVC3DIR} in file \texttt{src/Makefile} and edit its value to be that of your \cvcthree{} installation. To use the \smtlib{} format file interface, ensure that an executable \texttt{cvc3} is on your current path. \item[Simplify:] Ensure an executable called \texttt{simplify} is on your current path. \item[\yices:] To enable the API driver, uncomment the definition of variable \texttt{YICESDIR} in file \texttt{src/Makefile}, and edit its value to be that of your \yices{} installation. To use the \smtlib{} format file interface, ensure that an executable \texttt{yices} is on your current path. \item[\zthree:] Ensure an executable called \texttt{z3} is on your current path. \item[\altergo:] Ensure an executable called \texttt{alt-ergo} is on your current path. \end{description} Alternate names, and optional paths can be specified for each executable at the top of the \texttt{Makefile} in the \texttt{run} directory. Victor can be run without driving any prover. This is useful for testing if Victor's parser can handle certain VCs and for gathering information on VCs. This mode can be used for compiling reports on the coverage obtained with the Simplifier prover provided with Praxis's \spark{} toolkit. \item Build a Victor executable by \texttt{cd}ing to the \texttt{src} directory and typing \texttt{make}. This does a variety of things, including \begin{enumerate} \item Creating \texttt{.d} files recording \texttt{make} rules that capture dependencies between source files. \item Running the \texttt{bison} parser generator and the \texttt{flex} lexer generator. \item Compiling various \texttt{.o} files. \item Linking the \texttt{.o} files together, along with prover libraries and the \texttt{gmp} library, and installing the resulting executable named \texttt{vct} in the \texttt{bin} directory. \end{enumerate} For convenience, a sub-directory \texttt{build} contains copies of files created during a build of Victor where it was configured for running with the Simplify and \zthree{} provers. For example, if you do not have the correct version of \texttt{bison}, you could copy over the \texttt{bison} output files to the \texttt{src} directory. \item Add the \texttt{vct/bin} directory to your \texttt{PATH}. % if you % wish to run Victor without specifying the path to this \texttt{bin} % directory. \item Build utility tools for analysing the \csv{} and \logfile{} comma-separated-value output files created by Victor. Enter \texttt{make csvutils}. This causes the executables \texttt{csvproj}, \texttt{csvfilt}, \texttt{csvmerge} and \texttt{csvisect} to be added to the \texttt{bin} directory. \item Try running Victor on the example VCs provided with the distribution. Check the output files match the provided output files. See Section \ref{sec:examples} for details. \end{enumerate} %============================================================================ \section{Operation} %============================================================================ %---------------------------------------------------------------------------- \subsection{Terminology} %---------------------------------------------------------------------------- We refer to \spark{} VCs as \emph{goals} and use the term \emph{goal slice} to refer to a proof obligation build from a VC by considering just one of the goal's conclusions and ignoring the others. A \emph{unit name} is the hierarchical name of a program unit. The unit name with a \texttt{.fdl}, \texttt{.rls}, \texttt{.vcg} or \texttt{.siv} suffix gives a pathname for the corresponding VC file relative to the root directory of all the VC files for a \spark{} program. %---------------------------------------------------------------------------- \subsection{Basic operation} %---------------------------------------------------------------------------- The basic operation of Victor is to \begin{enumerate} \item Read in a list of names of \spark{} program units. \item Read in the VCs described in the \texttt{.fdl}, \texttt{.rls}, \texttt{.vcg} file triples output by the \spark{} Examiner for the named program units.\footnote{ Optionally it can read in the simplified \texttt{.siv} files output by the \spark{} Simplifier instead of the \texttt{.vcg} files.} \item Invoke a prover on each goal or goal slice. \item Output \texttt{.vct}, \texttt{.vsm} and \texttt{.vlg} report files. \end{enumerate} %---------------------------------------------------------------------------- \subsection{Input and output files} %---------------------------------------------------------------------------- The Victor-specific input and output files are as follows. % - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \subsubsection{Unit listing input file} % - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \label{sec:unit-listing-file} Typically Victor is run on many program units at once. An input \emph{unit listing} \texttt{.lis} file is used to indicate the units it should consider. % The grammar for each line in a unit listing file is given by \noindent \begin{tabular}{lll} \textit{line} & ::= & \textit{unitname} \{\textit{option}\} \\[2ex] % \textit{option} & ::= & [\textit{tag}\texttt{?}]\textit{val} \\[2ex] % \textit{val} & ::= & \textit{goal} \\ & $|$ & \textit{goal}\texttt{.}\textit{concl} \\ & $|$ & \textit{filename}\texttt{.fdl} \\ & $|$ & \textit{filename}\texttt{.rul} \\ & $|$ & \textit{filename}\texttt{.rlu} \end{tabular} \noindent where square braces ([]) enclose optional non-terminals, curly braces (\{\}) enclose non-terminals repeated 0 or more times, % the terminals \textit{unitname}, \textit{tag} and \textit{filename} are alphanumeric strings, and the terminals \textit{goal} and \textit{concl} are natural numbers. % The meaning of the components of a line are as follows. \begin{itemize} \item \textit{unitname} is the hierarchical name of a unit (\spark{} subprogram). \item \textit{tag} tags an option. A tagged option is only active if the tag is also supplied as one of the values of the \texttt{-active-unit-tags} Victor command-line option. Untagged options are always active. \item \textit{goal} and \textit{goal}\texttt{.}\textit{concl} select particular goals and goal slices in the unit. The Victor command-line options \texttt{-include-selected-goals} and \texttt{-exclude-selected-goals} control how Victor treats these selected goals and goal slices. \item \textit{filename}\texttt{.rul} and \textit{filename}\texttt{.rlu} are auxiliary rules files to load \item \textit{filename}\texttt{.fdl} is an auxiliary declarations file. For example, this can declare constants and functions introduced in an auxiliary rules file. \end{itemize} Comment lines are allowed: these are indicated by a \texttt{\#} character in the first column. Also blank lines are allowed. One way to prepare a unit listing file is to run the command \begin{verbatim} find . -name '*.fdl' | sed -r 's/\.\/|\.fdl//g' > units.lis \end{verbatim} in the root directory of a set of VC files. % - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \subsubsection{VCT output file} % - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - The \csv{} file includes one line for each goal or goal slice. The fields of each line are: \begin{enumerate} \item Path to unit. This describes the containing packages \item Unit name, without a prefix for the containing packages. \item Unit kind. One of \texttt{procedure}, \texttt{function} or \texttt{task\_type}. \item Source of path in subprogram for VC \item Destination of path for VC, or VC kind if not path related \item VC goal number \item Conclusion (goal slice) number \item Status (one of true, unproven, error) \item Proof time (in sec) \item Brief remarks about goal and solver interactions \item Operator kinds occurring in hypotheses \item Operator kinds occurring in conclusion \end{enumerate} % - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \subsubsection{VSM output file} % - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - The run summary file has extension \texttt{.vsm}. It is a 1 line comma-separated-value file with the fields \begin{enumerate} \item Report file name. \item Number of \texttt{ERROR} messages in log file \item Number of \texttt{WARNING} messages in log file \item Total number of goal/goal slices processed. \item Number of goal/goal slices with \emph{true} status \item Number of goal/goal slices with \emph{unproven} status \item Number of unproven goal/goal slices that involved a timeout. \item Number of goal/goal slices with \emph{error} status \item Percent of goal/goal slices with \emph{true} status \item Percent of goal/goal slices with \emph{unproven} status \item Percent of unproven goal/goal slices that involved a timeout. \item Percent of goal/goal slices with \emph{error} status \item Total execution time \end{enumerate} A file \texttt{vsm-file-header.txt} provides a 1 line comma-separated list of headings for these summary files. Summary files can be concatenated together with a header file and then viewed in any spreadsheet program. % - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \subsubsection{VLG output file} % - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - The \logfile{} log file includes \begin{enumerate} \item a record of the command line options passed to Victor, \item various information, warning and error messages, \item statistics on the run, including numbers of VCs proven and unproven, and time taken. \end{enumerate} %---------------------------------------------------------------------------- \subsection{Invocation of Victor} %---------------------------------------------------------------------------- The command line syntax for invoking Victor is \begin{quote} \texttt{vct} [\textit{options}] [\textit{unitname}] \end{quote} The \textit{unitname} argument is used to identify a single unit on which to run Victor. To run Victor on multiple units, omit the \texttt{unitname} argument and use instead the \texttt{units} option to specify a unit listing input file. If both a \textit{unitname} and a \texttt{units} option are provided, the \texttt{units} option is ignored. Victor takes numerous options, many of which are currently necessary. See the next section for a description of a \texttt{Makefile} that provides standard option sets. %---------------------------------------------------------------------------- \subsection{Examples} %---------------------------------------------------------------------------- \label{sec:examples} The \texttt{vc} directory has subdirectories for some example sets of VCs. % that % Victor has so far been tested on. % \begingroup % \renewcommand{\descriptionlabel}[1]{\texttt{#1}} % \begin{description} % \item[autop] The autopilot example distributed with the % \spark{} book: ``High Integrity Software: The \spark{} Approach to % Safety and Security'' by John Barnes. % \item[hilton] Adrian Hilton's SPARK Missile Guidance Simulator. % \item[tokeneer] The Tokeneer ID station case study prepared by Praxis % for the NSA. % \end{description} % \endgroup See the file \texttt{vc/README.txt} for further information on these sets. % sources of these VC sets. The \texttt{run} directory provides a Makefile with rules for running Victor on the VC sets in the \texttt{vc} directory. % These rules use Make patterns in their targets, and can easily also be used for running Victor on users' own VC sets. % The rules set appropriate Victor command-line options and so allow starting Victor users to ignore having to figure these out for themselves. % See \texttt{run/Makefile} for details. Reference report files obtained from running \texttt{make} on some of these targets are included in directory \texttt{run/out-ref}. % Unix \texttt{diff} can be used to check that newly-generated report files are the same as the reference files. % If the command line option \texttt{-gstime} is used to include times of prover runs in report files, it will be necessary to use the \texttt{csvproj} utility to remove the field for these times in order to get files that are expected to be identical. %---------------------------------------------------------------------------- \subsection{Performance tips} %---------------------------------------------------------------------------- \begin{enumerate} \item When \smt{} solvers cannot prove a goal, they often keep trying almost indefinitely rather than halting, so it is good to run them with some kind of time-out. % When several VCs cannot be proven, Victor's total run-time can be dominated by the runs that go to time-out. Setting a shorter time-out can therefore sometimes radically reduce Victor's run-time, often with little or no drop in number of goals proven. \item \smt{} solver performance on goals they can prove is often dependent on the number of quantified axioms. By default, Victor uses a number of quantified axioms from the rules files \texttt{divmod.rul} and \texttt{prelude.rul} in the \texttt{run/} directory. % In some cases, not all these axioms are necessary, and faster run-times are achievable with alternate rules files that prune down these axiom sets. \end{enumerate} %============================================================================ \section{Command line options} %============================================================================ Options are specified with syntax \texttt{-\emph{name}} or \texttt{-\emph{name}=\emph{value}}. % Option values can be boolean (\texttt{true} or \texttt{false}), natural numbers (e.g. \texttt{42}) or strings. % An option \texttt{-\emph{name}} is interpreted the same as An option \texttt{-\emph{name}=true}. An unset boolean option is interpreted as \texttt{-\emph{name}=false}. If the same option is given multiple times with different values, the usual behaviour is that the last value is taken. Occasionally all multiple values are used. These cases are always explicitly pointed out below. A later option \texttt{-name=}, \texttt{-name=false}, \texttt{-name=none} or \texttt{-name=default} Clears all earlier values given for the option and makes it unset. %---------------------------------------------------------------------------- \subsection{Input options} %---------------------------------------------------------------------------- These options control where VCs are read from, provision of auxiliary declarations and rules, and filtering of VCs before invoking the selected prover. \begin{description} \optionv{units}{unit-listing} Run on units named in \textit{unit-listing} file. \optionv{prefix}{prefix} Use \textit{prefix} as a common prefix for all unit names. \textit{prefix}\texttt{/}\textit{unitname} should give an absolute or relative pathname for the VC file set of a program unit. Default is that no prefix is used. \optionv{decls}{declfile} For every program unit, read auxiliary \texttt{.fdl} declarations file named in \textit{declfile}. Multiple files can be specified using multiple \texttt{-decls} options. % For example, the Examiner does not provide declarations for built-in % bit operations such as \texttt{bit\_\_or} and \texttt{bit\_\_not}. \optionb{read-all-decl-files-in-dir} For each program unit with full name \textit{dirs/dir/unitname}, read in all FDL declaration files in directory \textit{dirs/dir/} rather than just the FDL declaration file \textit{dirs/dir/unitname.fdl}. This option is useful in conjunction with \texttt{-read-directory-rlu-files} and \texttt{-read-unit-rlu-files} when the user-defined rules files for a unit (see options \texttt{-read-directory-rlu-files} and \texttt{-read-unit-rlu-files}) refer to identifiers that are not declared in \textit{dirs/dir/unitname.fdl}. An alternative option is \texttt{-delete-rules-with-undeclared-ids}. \optionb{read-directory-rlu-files} For each program unit with full name \textit{dirs/dir/unitname}, read in the user-defined rules file \textit{dirs/dir/dir.rlu}. The SPARK Simplifier reads in such rules files by default. \optionb{read-unit-rlu-files} For each program unit with full name \textit{dirs/dir/unitname}, read in the user-defined rules file \textit{dirs/dir/unitname.rlu}. The SPARK Simplifier reads in such rules files by default. \optionv{rules}{rulesfile} For every program unit, read in auxiliary rules file named in \textit{rulesfile}. Multiple files can be specified using multiple \texttt{-rules} options. \optionb{delete-rules-with-undeclared-ids} User-defined rules files can sometimes mention rules that involve functions and constants that are not declared in the FDL file for a unit being processed. Normally Victor abandons processing a unit when it encounters undeclared ids. With this option, the rules involving the undeclared ids are deleted. An alternative option is \texttt{read-all-decl-files-in-dir}. \optionb{siv} Read in \texttt{.siv} simplified VC files output by the Simplifier rather than \texttt{.vcg} Examiner VC files. % See smt-driver.cc \optionv{goal}{g} Only consider goal number $g$. % Skipped over goals are not reported in \csv{} output file. % This option is intended for use when Victor is run on a single unit, when a \textit{unit-name} argument and no \texttt{units} option is given. \optionv{concl}{c} Only consider conclusions (goal slices) numbered $c$. This option is intended for use when Victor is run on a single unit. \optionb{skip-concls} Do not pass conclusion formulae to the selected prover. This option is good for helping to identify goals or goal slices true because of an inconsistency in the hypotheses or rules. \optionb{skip-hyps} Do not pass hypothesis formulae to the selected prover. This option is good for helping to identify goals or goal slices true because of an inconsistency in the rules. % See utility.cc \optionv{from-unit}{unit-name} Only drive to selected prover the units listed in the \texttt{-units} option starting with \textit{unit-name}. Default is to start with first. \optionv{from-goal}{g} In first unit to be passed to prover, start driving goals / goal slices to prover at goal $g$. \optionv{to-unit}{unit-name} Stop driving units to the selected prover after \textit{unit-name} is encountered. Default is to continue until the last listed unit. \optionv{active-unit-tags}{tags} Identify which tagged options (if any) in the unit listing file to make active. See Section \ref{sec:unit-listing-file} for more on this. % Multiple tags should be separated by colons (\texttt{:}). \optionb{include-selected-goals} When particular goals or goal slices are selected for a unit in the unit listing file, run Victor on just those goals or goal slices. \optionb{exclude-selected-goals} When particular goals or goal slices are selected for a unit in the unit listing file, do not run Victor on those goals or goal slices. \end{description} %---------------------------------------------------------------------------- \subsection{Translation options} %---------------------------------------------------------------------------- See Section \ref{sec:translation} for a presentation of these options, since they make best sense in a discussion of the overall translation process. %---------------------------------------------------------------------------- \subsection{Prover and prover interface selection} %---------------------------------------------------------------------------- \begin{description} \optionv{prover}{prover} Select the prover to drive. Valid values of \emph{prover} are: \begin{itemize} \item \texttt{cvc3} \item \texttt{simplify} \item \texttt{yices} \item \texttt{z3} \end{itemize} A value of \texttt{none} can also be specified. This is useful if one just wants to generate prover input files. \optionv{prover-command}{prover-command} Use instead of the \texttt{prover} option to specify explicitly a shell-level command for invoking the prover. This allows alternate provers or custom prover options to be specified. Selecting neither this option or the \texttt{prover} option is equivalent to setting the value of \texttt{prover} to \texttt{none}. \optionv{interface-mode}{mode} Select the prover interface mode. Valid values of \emph{mode} are: \begin{itemize} \item \texttt{api}: Use prover API. Acceptable with \texttt{cvc3} or \texttt{yices} value for \texttt{prover}. \item \texttt{smtlib}: Use \smtlib{}-format files and stand-alone prover executable. Acceptable with \texttt{cvc3}, \texttt{yices} or \texttt{z3} value for \texttt{prover}, and with \texttt{prover-command} option. \item \texttt{simplify}: Use Simplify-format files and stand-alone prover executable. Acceptable with \texttt{simplify} or \texttt{z3} value for \texttt{prover}, and with \texttt{prover-command} option. % \item \texttt{alt-simplify}: Use alternate Simplify-format files and % stand-alone prover executable. % Acceptable with \texttt{simplify} or \texttt{z3} value for % \texttt{prover}, % and with \texttt{prover-command} option. % These alternate Simplify-format files are generated by a customisation % of the code generating \smtlib{} interface. \item \texttt{dummy}: Use some default code that mostly does nothing. In this case, Victor still parses the VC files, does a prover independent translation of the goals, and generates \csv{} and log output files. This is the default option. \end{itemize} \end{description} % \begingroup % \newcommand{\tick}{$\bullet$} % \begin{tabular}{l|ccccc} % Mode & \multicolumn{5}{c}{Prover} \\ % & cvc3 & simplify & yices & z3 & \textit{\rmfamily custom}\\ % \hline % api & \tick & & \tick & & \\ % simplify & & \tick & &\tick & \tick \\ % smtlib & \tick & & \tick &\tick & \tick % \end{tabular} % \endgroup %---------------------------------------------------------------------------- \subsection{Prover driving options} %---------------------------------------------------------------------------- \begin{description} \optionb{fuse-concls} Pass one goal at a time to the selected prover. By default Victor passes one goal slice at a time. \optionv{working-dir}{working-dir} Use \textit{working-dir} as root of directory tree of files used for prover input and output. An argument of `\texttt{.}' is acceptable to indicate the current directory. Defaults to \texttt{/tmp}. % Option is currently only relevant for Simplify and \zthree. Unless one of the next three options is used, the same file names are used for every every prover run and every Victor run. \optionb{hier-working-files} Use distinct files for each prover invocation and arrange in a hierarchical tree under \emph{working-dir}. \optionb{flat-working-files} Use distinct files for each prover invocation and arrange all as members of \emph{working-dir}. \optionb{unique-working-files} Within a given Victor run, use the same file names for each prover invocation, but, by including hostname and process number in file names, make the names unique to the Victor run. % This option is useful if one wants to have simultaneous Victor runs. \optionb{delete-working-files} Delete the files used for prover input and output after each prover invocation. \optionv{ulimit-timeout}{time} If using either of the file-level interface modes, use the Linux \emph{ulimit} process limit facility to time out prover invocations after \emph{time} seconds. The \texttt{time} value should be a natural number. The default is not to time out prover invocations. \optionv{shell-timeout}{time} If using either of the file-level interface modes, use the provided shell script \texttt{timeout.sh} to time out prover invocations after \emph{time} seconds. The \texttt{time} value can be an integer or a fixed-point number (e.g. 0.1). The default is not to time out prover invocations. Currently this option is not that robust and use of \texttt{-ulimit-timeout} is recommended instead. \optionv{logic}{logic} If using the \smtlib{} interface mode, set the value of the \texttt{:logic} attribute in the \smtlib{}-format files to \emph{logic}. The default is \texttt{AUFLIA}. \optionb{smtlib-hyps-as-assums} If using the \smtlib{} interface mode, insert each hypothesis into the \smtlib{}-format file as the value of a distinct \texttt{:assumption} attribute. % smt-driver.cc \optionv{drive-goal-repeats}{count} Repeat each prover invocation \textit{count} times. This is used to increase precision of prover runtime measurements when using an API interface. \optionv{check-goal-repeats}{count} Repeat each prover invocation \textit{count} times. This is used to increase precision of prover runtime measurements when using a file-level interface. \end{description} %---------------------------------------------------------------------------- \subsection{Output options} %---------------------------------------------------------------------------- %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \subsubsection{Screen output options} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \begin{description} \optionb{utick} Print to standard output a \texttt{*} character at the start of processing each unit. If \texttt{-longtick} also selected, print instead the unit name. \optionb{gtick} Print to standard output a \texttt{;} character at the start of processing each goal. If \texttt{-longtick} also selected, print instead the goal number. \optionb{ctick} Print to standard output a \texttt{.} character at the start of processing each conclusion of a goal. If \texttt{-longtick} also selected, print also the conclusion number. \optionb{longtick} See above. \optionb{echo-final-stats} Print to standard output the final statistics that are included at the end of the report file. \end{description} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \subsubsection{General report file options} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \begin{description} \optionv{report}{report-file} Use \textit{report-file} as body of filenames for \csv{}, \sumfile{} and \logfile{} report files. Default is to use \texttt{report}. \optionv{report-dir}{dir} Put report files in directory \textit{dir}. If directory does not exist, it is created. Default is to use current directory. \end{description} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \subsubsection{VCT file options} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \begin{description} \optionb{count-trivial-goals} Write an entry to the \csv{} file for each input goal of form \texttt{*** true}. In VCG files, these are the goals proven by the Examiner. In SIV files, these are the goals proven by the Examiner or the Simplifier. These entries have status \texttt{true} in field 8 and the comment \texttt{trivial goal} in field 10. Use this option along with option \texttt{-fuse-concls} to have the goal counts match those from the POGS (Proof Obligation Summariser) tool. \optionb{hkinds} Report list of hypothesis kinds in field 11 of \csv{} file \optionb{ckinds} Report list of concl kinds in field 12 of \csv{} file \optionb{gstime} Report time taken by prover to process a goal slice or goal in field 9 of \csv{} file. \optionb{gstime-inc-setup} Include setup time in gstime. This setup time is time to send declarations, rules, hypotheses and conclusions to the prover before invoking prover itself. It is appropriate to include this time when calling the prover via an API (\yices and \cvcthree{} cases) since the provers do incremental processing on receiving this information. When the prover interface is via files, this setup time is the time to write an input file for the prover, so it is not as appropriate to include it. \optionb{csv-reports-include-goal-origins} Include information on goal origins in fields 4 and 5 of \csv{} file. Default is not to include this information. \optionb{csv-reports-include-unit-kind} Include information on unit kind in fields 3 of \csv{} file. Default is not to include this information. \end{description} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \subsubsection{Log file options} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \begin{description} \optionv{level}{level} Report all messages at or above priority \textit{level}. The levels and associated names are \begin{enumerate} \item [6] \texttt{error} \item [5] \texttt{warning} \item [4] \texttt{info} \item [3] \texttt{fine} \item [2] \texttt{finer} \item [1] \texttt{finest} \end{enumerate} The \textit{level} value can either be a number or the associated name. The default level is \emph{warning}. \end{description} %---------------------------------------------------------------------------- \subsection{Debugging options} %---------------------------------------------------------------------------- \begin{description} \optionb{scantrace} Write lexer debugging information to standard output \optionb{parsertrace} Write parser debugging information to standard output \end{description} %---------------------------------------------------------------------------- \subsection{CVC3 options} %---------------------------------------------------------------------------- Unless otherwise specified, these options are only relevant when invoking \cvcthree{} via its API. % The main options are as follows. \begin{description} \optionb{counterex} Report counterexamples for false and unknown queries. \begin{remark} I have not figured out yet how to direct \cvcthree{} to write counter-examples to files. A work-around to view counter-examples is to run with this option and the \texttt{-cvc-inputlog} option, and then run the standalone \cvcthree{} executable on the generated \cvcthree{} input file. \end{remark} % \optionv{resourcelimit}{limit} % Set resource limit for \cvcthree{} runs. This provides a soft timeout % option. % A value of 25,000 gives run time limits of 7-10sec on a 1.86GHz Intel % Core 2 processor. Default is 0, no limit. % This option also works when invoking \cvcthree{} on \smtlib{}-format input % files. \optionv{timeout}{time} Set a timeout period in units of 0.1 seconds for runs of \cvcthree{}, both via API and via executable. Uses \cvcthree{}'s internal support for timing out. \optionb{cvc-loginput} Enable echoing of API calls for each \cvcthree{} run to a file. Use the \texttt{-working-dir} option to set where the file is stored and the \texttt{-hier-working-files} and \texttt{-flat-working-files} options to control whether and how distinct files are used for each run. % Files have suffix \texttt{.cvc}. % If distinct files are not requested, all runs will be echoed to a file named \texttt{cvc3.cvc}, each run overwriting the previous one. % These files are saved in \cvcthree{}'s standard input language and can be used as input to a \cvcthree{} stand-alone executable. % \optionv{cvc-echo-suffix}{suffix} % Set suffix to use for file logging \cvcthree{} calls. Defaults to % \texttt{.clog}. \end{description} See the file \texttt{cvc-driver.cc} for further available options. Not all of these have been tried out yet. %---------------------------------------------------------------------------- \subsection{Simplify options} %---------------------------------------------------------------------------- No options are currently available. %---------------------------------------------------------------------------- \subsection{Yices options} %---------------------------------------------------------------------------- Unless otherwise specified, these options are only relevant when invoking \yices{} via its API. \begin{description} \optionb{yices-loginput} Enable echoing of API calls for each \yices{} run to a file. Use the \texttt{-working-dir} option to set where the file is stored and the \texttt{-hier-working-files} and \texttt{-flat-working-files} options to control whether and how distinct files are used for each run. % Files always have suffix \texttt{.yices}. If distinct files are not requested, all runs will be echoed to the file \texttt{yices.yices}, each run overwriting the previous one. % These files are saved in \yices{}'s standard input language and can be used as input to a \yices{} stand-alone executable. \begin{remark} % The \yices{} executable preloads declarations of functions \texttt{div} % and \texttt{mod}. The \yices{} input files output by Victor might % need editing to remove declarations of these functions before the input % files will execute properly. % Victor lets \yices{} reject non-linear parts of formulae - see the warnings in Victor's log file. These formulae might have to be removed by hand for \yices{} to load these input files properly. \end{remark} \optionb{yices-logoutput} Set file for output of each run of \yices{}. Location of file and whether distinct files generated for each run are specified in same way as with \texttt{-yices-loginput}. Suffix of files is \texttt{.ylog}. If distinct files not requested, all runs written to \texttt{yices.ylog}. % \optionv{yverb}{n} % Set verbosity of \yices's output to \textit{n}. \optionb{counterex} Enable reporting of counter-example models to output log file. % -ynotc Disable Yices type checking (enabled by default). % -abstract-nonlin-divmod % Abstract nonlinear divmod exps to uninterpreted functions. % Default is to abstract all divmod exps. % -abstract-nonlin-times % Abstract nonlinear times exps to uninterpreted functions. % Default is to abstract no times exps. \optionv{timeout}{time} Set a timeout period in seconds for runs of the \yices{} executable on \smtlib{}-format input files. Uses \yices{}'s \texttt{--timeout} option. \end{description} %\fixme{Do anything about non-linear arithmetic options?} %---------------------------------------------------------------------------- \subsection{Z3 options} %---------------------------------------------------------------------------- No \zthree{} options are specifically supported by Victor. \zthree{} options can be specified by giving a custom prover command with the \texttt{prover-command} option. % \begin{description} % \optionb{z3-fourier-motzkin} % Use Fourier-Motzkin elimination to eliminate all quantifiers over linear % arithmetic expressions. % \end{description} %============================================================================ \section{Translation} %============================================================================ \label{sec:translation} % As defined in (p:) processor.cc and (t:) translation.cc The description of the translation process here is rather brief and not self-contained. % The process is best understood by first having a read of the draft paper \emph{Proving SPARK Verification Conditions with SMT solvers}, available from the author's website. Unless otherwise stated, translation steps are carried out in order they are described in below. %---------------------------------------------------------------------------- \subsection{Standard Form translation} %---------------------------------------------------------------------------- Most translation steps in Victor are carried out on units in a standard form. In this standard form all functions and relations have a unique type, there is no overloading. The first translation step is to put units into this standard form. % p;putUnitInStandardForm \begin{itemize} % p:augmentConstDecls \item Some constants with names of form $\mathit{c}\mathtt{\_base\_first}$ or $\mathit{c}\mathtt{\_base\_last}$ are used but not declared. Victor adds declarations for such constants when they appear to be missing, when e.g. the constant $\mathit{c}\mathtt{\_first}$ is declared ($c$ not with suffix $\mathtt{\_base}$) and the constant $\mathit{c}\mathtt{\_base\_first}$ is not declared. \item The \fdl{} files output by the Examiner are missing declarations of the $\mathit{E}\_\_\mathtt{pos}$ and $\mathit{E}\_\_\mathtt{val}$ functions used by each enumeration type $E$, including the implicitly declared \texttt{character} type. These declarations are added in. \item \fdl{} variables are considered as semantically the same as \fdl{} constants. Each declaration of an \fdl{} variable $x$, is changed to a constant declaration, and new declarations are added for names \textit{x}\tttilde{} and \textit{x}\texttt{\%}. \fdl{} units use the names \textit{x}\tttilde{} and \textit{x}\texttt{\%} to refer to the value of $x$ at procedure and loop starts respectively. \item Occurrences of the \fdl{} operator \texttt{sqr(x)} are replaced by \texttt{x ** 2}. \item Distinct operators are introduced for the standard arithmetic operations $+$, $\times$, $-$(unary), $-$(binary) over the integers and reals, and an explicit coercion operator is introduced for converting integers to reals. \item Distinct relations are introduced for the inequality relations over integers, reals, and enumeration types. \item Distinct versions of the \fdl{} operator \texttt{abs(x)} are introduced for the real and integer types. Defining axioms are added to the set of rules for each unit. \item A defining axiom is added for the \fdl{} predicate \texttt{odd(x)}. \item Some characterising axioms are added for the \fdl{} operator \texttt{bit\_\_or(x)}. No axioms are added yet for other bit-wise arithmetic operators. \item The \fdl{} language overloads the functions \texttt{succ} and \texttt{pred} and inequality relations such as $<$ and $\leq$. % Distinct versions are introduced for the \fdl{} \texttt{integer} type and each enumeration type and declarations are added for each of these versions. \item The Examiner outputs rules with implicitly quantified variables. Victor infers the types of these variables and makes the quantifications explicit. The explicit quantification is needed by all the provers to which Victor interfaces. \end{itemize} %---------------------------------------------------------------------------- \subsection{Type checking} %---------------------------------------------------------------------------- Victor type checks units after translation into standard form and after all translation steps have been applied. %---------------------------------------------------------------------------- \subsection{Enumerated type abstraction} %---------------------------------------------------------------------------- \begin{description} \optionb{abstract-enums} Replace enumerated types with abstract types, introduce all enumeration constants as normal constants, and keep all enumerated type axioms. % These axioms are introduced by the Examiner to characterise enumerated-type-related functions such as $\mathit{E}\mathtt{\_\_val}$ and $\mathit{E}\mathtt{\_\_pos}$ and can serve as a partial axiomatisation of the introduced abstract types. \optionb{elim-enums} Replace each enumeration type $E$ with an integer subrange type $\{0 \ldots k - 1 \}$ where $k$ is the number of enumeration constants in $E$. % Declare each enumeration constant as a normal constant, and add an axiom giving its integer value. % Delete all existing enumerated type axioms and add in new axioms characterising enumerated-type-related functions such as $\mathit{E}\mathtt{\_\_val}$ and $\mathit{E}\mathtt{\_\_pos}$. \optionb{axiomatise-enums} Replace each enumeration type $E$ with an uninterpreted type, and add axioms characterising the the uninterpreted type as isomorphic to the integer subrange $\{0 \ldots k - 1 \}$ where $k$ is the number of enumeration constants in $E$. % The added axioms replace the enumerated-type-related axioms introduced by the Examiner and provide a full axiomatisation of the enumerated types. \end{description} %---------------------------------------------------------------------------- \subsection{Early array and record abstraction} %---------------------------------------------------------------------------- \begin{description} \optionb{abstract-arrays-records-early} Enable abstraction at this point \end{description} See Section~\ref{sec:late-arr-rec-abs} below for rest of options %---------------------------------------------------------------------------- \subsection{Separation of formulas and terms} %---------------------------------------------------------------------------- In \fdl{} formulas are just terms of type Boolean. Many provers require the traditional first-order logic distinction between formulas and terms. % The options here control the introduction of this distinction. Victor calls the term-level Booleans \emph{bits}. \begin{description} \optionb{bit-type} Enable separation. \optionb{bit-type-bool-eq-to-iff} Initially convert any equalities at Boolean type to `if and only if's. \optionb{bit-type-with-ite} Whenever possible, introduce instances of the `if-then-else' operator rather than term-level versions of propositional logic operators and atomic relations. \optionb{bit-type-prefer-bit-vals} A heuristic for controlling whether atomic relations are translated to term-level (bit-valued) functions or first-order-logic formula-valued relations. % With this heuristic, bit-valued functions are preferred. \optionb{bit-type-prefer-props} Another heuristic for controlling whether atomic relations are translated to term-level (bit-valued) functions or first-order-logic formula-valued relations. With this heuristic, formula (propositional) relations are preferred. If neither this option or \texttt{-bit-type-prefer-bit-vals} is selected, the default behaviour is to use a bit-valued function just when there is one or more occurrences at the term level. \optionb{trace-prop-to-bit-insertion} Report in log file when a proposition-to-bit coercion (encoded using the `if then else' operator) is added. \optionb{trace-intro-bit-ops-and-rels} Report in log file when term-level function is introduced for a function (either user-defined or built-in) that initially had Boolean value type. \end{description} NB: the \spark{} \fdl{} language has `bit' operators \texttt{bit\_\_or}, \texttt{bit\_\_and} and \texttt{bit\_\_xor}. % These \fdl{} operators take integers as arguments and return integers as results. Their result values correspond to the correct unsigned binary result for the respective operations on unsigned binary versions of the arguments. Axioms on these operators capture the arithmetic properties of Boolean operations on finite-length binary words. % If the Victor option \texttt{-abstract-bit-ops} is used, Victor introduces operators \texttt{bit\_\_\_or}, \texttt{bit\_\_\_and} and \texttt{bit\_\_\_xor}. % These operators work on the term-level Booleans introduced by Victor and are distinct from the \spark{} \fdl{} bit operators. %---------------------------------------------------------------------------- \subsection{Type refinement} %---------------------------------------------------------------------------- \begin{description} \optionb{refine-types} Master control \optionb{refine-bit-eq-equiv} Add in definition for bit-valued non-trivial equivalence relations. Needed when \texttt{-bit-type-with-ite} option not previously selected. \optionb{refine-int-subrange-type} \optionb{refine-bit-type-as-int-subtype} \optionb{refine-bit-type-as-int-quotient} \optionb{refine-array-types-with-quotient} \optionb{refine-array-types-with-weak-extension-constraint} Constrain values of element and extended indices using possibly non-trivial equivalence relation on element type. Default is to use equality to constrain these values. Only applies if option \texttt{-refine-array-types-with-quotient} is not selected. \optionb{refine-uninterpreted-types} Refine every uninterpreted type to be predicate subtype of a new uninterpreted type. Use this to ensure that exists model in which every uninterpreted type can be interpreted by some infinite set. \optionb{no-subtyping-axioms} Suppress generation of axioms for sub-typing properties of functions and constants. \optionb{no-functionality-axioms} Suppress generation of axioms for functionality properties of functions and relations. \optionb{strong-subtyping-axioms} Use subtyping axioms without constraints on values of arguments. \optionb{trace-refine-types-quant-relativisation} Report in log file whenever a quantifier is relativised. \optionb{trace-refine-types-eq-refinement} Report in log file whenever an equality relation is refined to a non-trivial equivalence relation. \optionb{trace-refine-types-bit-eq-refinement} Report in log file whenever an term-level equality relation is refined to a non-trivial term-level equivalence relation. \end{description} %---------------------------------------------------------------------------- \subsection{Late array and record abstraction} %---------------------------------------------------------------------------- \label{sec:late-arr-rec-abs} \begin{description} \optionb{abstract-arrays-records-late} Enable abstraction at this point in translation. % Eliminate redundant operators %------------------------------ \optionb{elim-array-constructors} Eliminate all occurrences of array constructors \optionb{elim-record-constructors} Eliminate all occurrences of record constructors \optionb{abstract-record-updates} Introduce axiomatic characterisations for record update operators in terms of record constructors and record field selectors. % Add axioms defining types axiomatically %----------------------------------------- \optionb{add-array-select-update-axioms} Assumes that array constructors have first been eliminated. \optionb{add-array-extensionality-axioms} \optionb{add-record-select-constructor-axioms} Assumes that record update operators have first been eliminated. \optionb{add-record-constructor-extensionality-axioms} Add extensionality axioms involving record constructors and field select operators. \optionb{add-record-select-update-axioms} Assumes that record constructors have first been eliminated. \optionb{add-record-eq-elements-extensionality-axioms} Add extensionality axioms stating that records are equal just when all fields are equal. \optionb{use-array-eq-aliases} Introduce aliases for equalities at array types in order to help with matching extensionality axioms. \optionb{use-record-eq-aliases} Introduce aliases for equalities at record types in order to help with matching extensionality axioms. % Abstract operators and types %------------------------------ \optionb{abstract-array-select-updates} Change primitive array element select and update operators into uninterpreted functions. \optionb{abstract-array-types} Replace array types with uninterpreted types. \optionb{abstract-record-selects-constructors} Change primitive record field selectors and constructors into uninterpreted functions. \optionb{abstract-record-selects-updates} Change primitive record field selectors and field update operators into uninterpreted functions. \optionb{abstract-record-types} Replace record types with uninterpreted types. \end{description} %---------------------------------------------------------------------------- \subsection{Bit abstraction} %---------------------------------------------------------------------------- \begin{description} \optionb{abstract-bit-ops} Replace primitive bit-type operators with uninterpreted functions and add characterising axioms \optionb{abstract-bit-valued-eqs} Replace primitive bit-valued equality operators with uninterpreted functions and add characterising axioms \optionb{abstract-bit-valued-int-le} Replace primitive bit-valued integer inequality operators with uninterpreted functions and add characterising axioms \optionb{elim-bit-type-and-consts} Replace primitive bit type with either integer type or $\subrange{0}{1}$ subrange type, depending on whether type has been refined earlier or not. Replace primitive bit-type constants for true and false with $0$ and $1$. %\optionb{abstract-bit-type-and-consts} % Not implemented \end{description} %---------------------------------------------------------------------------- \subsection{Arithmetic simplification} %---------------------------------------------------------------------------- \begin{description} \optionb{elim-consts} % Eliminate integer constants. Rewrite all formulae using hypotheses of form $x = k$ or $x = -k$ where $x$ is an \fdl{} constant or variable, and $k$ is a natural number literal. This eliminates the apparent syntactic non-linearity of some hypotheses and conclusions. It is particularly useful for Yices which rejects formulae that appear non-linear. \optionb{ground-eval-exp} Evaluate occurrences of exponent function with natural number arguments. \optionb{ground-eval} Evaluate ground integer arithmetic expressions involving $+$, $-$ (unary and binary), $\times$, integer division, integer modulus, and the exponent function. \optionb{expand-exp-const} Expand natural-number powers of integer and real expressions into products, with special-case treatment for exponents 0 and 1. \optionb{arith-eval} Apply the rewrite rules \begin{eqnarray*} k \times (k' \times e) & = & kk' \times e \\ (k \times e) \times k' & = & kk' \times e \\ e \times k & = & k \times e \\ (k \times e) \times (k' \times e') & = & kk' \times (e \times e') \\ e \times (k \times e') & = & k \times (e \times e') \\ (k \times e) \times e' & = & k \times (e \times e') \\ (k \times e) \div k' & = & (k \div k') \times e \quad\mbox{if $k'$ divides $k$}. \end{eqnarray*} The main aim of these rules is to eliminate instances of the $\div$ operator. \optionb{sym-consts} Replace each distinct natural number literal greater than threshold \textit{t} with a new constant and assert axioms concerning how these new constants are ordered: if the new constants in increasing order are $c_1 \ldots c_n$, the axioms are $t < c_1, c_1 < c_2, \ldots, c_{n-1} < c_n$. This option is used to try to reduce the frequency of machine arithmetic overflow with Simplify. Other users of Simplify try thresholds of 100,000, though we've observed overflows with thresholds as low as 1000. \optionv{sym-prefix}{prefix} Set prefix for new symbolic number constants. Default prefix is \texttt{k\_\_\_}. \end{description} %---------------------------------------------------------------------------- \subsection{Arithmetic abstraction} %---------------------------------------------------------------------------- The different interfaces and provers vary in the classes of arithmetic operations they can handle. These options allow one to abstract to uninterpreted functions, possibly adding some characterising axioms, when operations cannot be handled. \begin{description} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \optionb{abstract-nonlin-times} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Abstract each integer and real multiplication unless at least one of the arguments is a fixed integer or real constant. An \emph{integer constant} is a natural number $n$ or the expression $-n$. A \emph{real constant} is built from an integer constant using the \texttt{to-real} coercion, unary minus on the reals, and, optionally real division. Real division is allowed just when the option \texttt{-abstract-real-div} is not chosen. The \yices{} API usually rejects individual hypothesis or conclusion formulas if they have non-linear multiplications. However, it does accept non-linear multiplications in quantified formulas, and will use linear instantiations of these formulas. % Unfortunately, it currently aborts on finding a non-linear instantiation rather than simply rejecting the instantiation. The \smtlib{} sub-logics \textsc{auflia} and \textsc{auflira} both require all multiplications to be linear. %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \optionb{abstract-exp} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Replace occurrences of integer and real exponent operators by new uninterpreted functions. Currently no defining axioms are supplied, though it would be easy to do so. This abstraction only happens after possibly evaluating ground and constant exponent instances. Only the \cvcthree{}-via-API prover alternative can handle these operators directly. %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \optionb{abstract-divmod} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Replace occurrences of integer division and modulus operators by new uninterpreted functions. % There are two alternative rule sets provided % that can be loaded using the \texttt{-rules} option. % \begin{itemize} % \item \texttt{divmod.rul}: Exactly characterises \texttt{div} and gives % bounds on \texttt{mod} values. % \item \texttt{divmod-full.rul} % Exactly characterises both \texttt{div} and \texttt{mod}. % These rules are probably more than is necessary in most cases. % % % They have not been tried recently. % \end{itemize} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \optionb{abstract-real-div} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Abstract occurrences of real division to a new uninterpreted function. No characterising axioms are currently provided. \yices-API, \cvcthree-API and \zthree-\smtlib{} all allow input with the real division operator, though it is not known what kinds of occurrences are accepted in each case. The official \smtlib{} logics involving reals do not allow real division. The assumption is that pre-processing has eliminated all occurrences of real division. Victor doesn't yet carry out such pre-processing. %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \optionb{abstract-reals} %- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Abstract occurrences of real arithmetic operations ($+$, unary $-$, binary $-$, $*$, $/$), integer to real coercions, and real inequalities to new uninterpreted functions. Currently this is needed by the \smtlib{} and Simplify translations. The \smtlib{} driver does not attempt to make use of the limited support in some of the \smtlib{} sub-logics for reals. This option is not necessary when \cvcthree{} and \yices{} are invoked via their APIs, as both APIs support real arithmetic. \end{description} %---------------------------------------------------------------------------- \subsection{Final translation steps} %---------------------------------------------------------------------------- \begin{description} \optionb{elim-type-aliases} Normalise all occurrences of type identifiers in type, constant, function and relation declarations and in all formulas. % Normalisation eliminates all occurrences of type ids $T$ that have a definition $T \doteq T'$ where $T'$ is either a primitive atomic type (Boolean, integer, integer subrange, real or bit type) or is itself a type id. This is needed for the \smtlib{} and Simplify translations. \optionb{switch-types-to-int} Replace all occurrences of type identifiers in constant, function and relation declarations and in all formulas with the integer type. % Checks that every defined type is either an alias for another defined type or an alias for the integer type. % This translation step assumes that a countably infinite model exists for every uninterpreted type. This option is is needed for the Simplify translation. \optionb{lift-quants} Apply the rewrite rule \[ P \Implies \forall x:T.\ Q \quad\Iff\quad \forall x:T.\ P \Implies Q \] ($x$ not free in $P$) to all formulae. The quantifier instantiation heuristics in both \zthree{} and Simplify work better when universal quantifiers in hypotheses are all outermost. \optionb{strip-quantifier-patterns} Some of the universally-quantified axioms introduced by translation have trigger patterns giving hints on how instantiations can be guessed. % This option strips out these patterns. \end{description} %============================================================================ \section{CSV utilities} %============================================================================ These utilities are very useful for analysing and comparing results of Victor runs. %---------------------------------------------------------------------------- \subsection{Filter CSV records} %---------------------------------------------------------------------------- Usage: \begin{quote} \texttt{csvfilt} [\texttt{-v}] \textit{n str} [\textit{file}] \end{quote} Filter \csv{} records, returning on standard output just those with \textit{str} a substring of field \textit{n} (1-based). % If \texttt{-v} provided, then return those records without \textit{str} a substring of field \textit{n}. Records are drawn from file \textit{file} if it is supplied. If not, they are taken from standard input. %---------------------------------------------------------------------------- \subsection{Merge two CSV files} %---------------------------------------------------------------------------- Usage: \begin{quote} \texttt{csvmerge} \textit{file1 m1 \ldots{} mj file2 n1 \ldots{} nk} \end{quote} The files \textit{file1} and \textit{file2} must have the same number of records. This command merges corresponding records from the two files and outputs them on standard output. The merged records are composed from fields \textit{m1 \ldots{} mj} in the records in \textit{file1} and fields \textit{n1 \ldots{} nk} in the records in \textit{file2}. If \textit{j} = 0, all fields of \textit{file1} records are used. If \textit{k} = 0, all fields of \textit{file2} records are used. %---------------------------------------------------------------------------- \subsection{Project out fields of CSV records} %---------------------------------------------------------------------------- Usage: \begin{quote} \texttt{csvproj} \textit{n1 \ldots{} nk} [\textit{file}] \end{quote} Build new records from fields \textit{n1 \ldots{} nk} of the input records and output to standard output. Input records are drawn from file \textit{file} if it is supplied. If not, they are taken from standard input. %---------------------------------------------------------------------------- %\subsection{Intersecting CSV files} %---------------------------------------------------------------------------- Usage: \begin{quote} \texttt{csvisect} \textit{file1} \textit{file2} \end{quote} Print on standard output those records that occur in both \textit{file1} and \textit{file2}. Comparison of records currently just uses string equality, so it is sensitive to whitespace between record fields. %============================================================================ %\section{Known limitations} %============================================================================ % \begin{enumerate} % \item % \end{enumerate} %============================================================================ %\section{Known problems} %============================================================================ % \begin{enumerate} % \item % \end{enumerate} %============================================================================ \section{Future developments} %============================================================================ Anticipated by end of 2010 are \begin{itemize} \item Support for V2 of the \smtlib{} standard. One major benefit of this will be better support for VCs involving mixed real and integer arithmetic. Currently Victor does translate real arithmetic to V1.2 of the \smtlib{} format. However, V1.2 does not support well goals in which integers and reals are mixed. (For example, it does not define a function injecting the integers into the reals.) \item Support for outputing VCs for proof using the Isabelle/HOL theorem prover. The current release includes some preliminary code for this. Improved code has been developed and is waiting to be merged in. \item A fully-working Windows port. The current release has some code for this, but it is not yet fully tested. \end{itemize} % Items in the pipeline include % \begin{enumerate} % \cbstart % \item Improving support in \smtlib{} translation for reals. % \item API interface for \zthree. % \cbend % \item Output of VCs in format for reading into interactive % theorem provers such as PVS and HOL Light. % \item Exploiting bit-vector handling capabilities of \smt{} solvers. % \end{enumerate} \end{document} %%% Local Variables: %%% mode: latex %%% TeX-master: t %%% End: �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/victor/vct/CHANGES.txt�������������������������������������������������������������0000644�0001750�0001750�00000002576�11753202341�016362� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Victor Changes ============== Changes made by Paul Jackson unless otherwise stated. Release 0.9.1, 15th December 2010 --------------------------------- * Corrects vc/README.txt. (Wrong file included in 0.9.0 release) * Allows task_type to be valid identifier in .vcg files [Altran Praxis] Release 0.9.0, 5th November 2010 -------------------------------- * Incorporates several minor changes to make compatible with Altran Praxis's SPARK toolkit tool-flow. [Altran Praxis, AdaCore, Paul Jackson] * Fixes several bugs Release 0.8.0, 26th April 2010 ------------------------------ * Adds interface for Isabelle/HOL. See src/isab-driver.cc. Release 0.7.2, 27th January 2010 -------------------------------- * Fixes minor bug in file context.cc. This was causing Victor to crash on Examiner VCs. * Rearranges slightly organisation of examples, so unit listing input files are now grouped with example VCs rather than included in run directory. Release 0.7.1, 7th January 2010 ------------------------------- * Fixes SMT-LIB file-level interface to accept output from Alt-Ergo. Release 0.7, 23rd December 2009 ------------------------------- * Major improvements to translation for SMT-LIB and Simplify interfaces. Release 0.6, 11th June 2009 --------------------------- * Adds support for SMT-LIB Release 0.5, 12th January 2009 ------------------------------ * First release ����������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/lib/�������������������������������������������������������������������������������0000755�0001750�0001750�00000000000�11753202331�013202� 5����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/lib/spark/�������������������������������������������������������������������������0000755�0001750�0001750�00000000000�11753202341�014323� 5����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/lib/spark/spark_io.adb�������������������������������������������������������������0000644�0001750�0001750�00000055154�11753202341�016614� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Unchecked_Deallocation; package body SPARK_IO is --# hide SPARK_IO; pragma Warnings ("Y"); -- Turn off warnings for Ada 2005 features -- File Management Standard_Input_Constant : constant File_Type := File_Type'(null, Stdin); Standard_Output_Constant : constant File_Type := File_Type'(null, Stdout); function Standard_Input return File_Type is begin return Standard_Input_Constant; end Standard_Input; function Standard_Output return File_Type is begin return Standard_Output_Constant; end Standard_Output; procedure Dispose is new Unchecked_Deallocation (Text_IO.File_Type, File_PTR); procedure Create (File : out File_Type; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is begin Create_Flex (File => File, Name_Length => Name_Of_File'Length, Name_Of_File => Name_Of_File, Form_Of_File => Form_Of_File, Status => Status); end Create; procedure Create_Flex (File : out File_Type; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is begin File.File := new Text_IO.File_Type; Text_IO.Create (File.File.all, Text_IO.Out_File, Name_Of_File (Name_Of_File'First .. Name_Length), Form_Of_File); Status := Ok; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File.File); when Text_IO.Name_Error => Status := Name_Error; Dispose (File.File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File.File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File.File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File.File); when Standard.Program_Error => Status := Program_Error; Dispose (File.File); end Create_Flex; procedure Open (File : out File_Type; Mode_Of_File : in File_Mode; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is begin Open_Flex (File => File, Mode_Of_File => Mode_Of_File, Name_Length => Name_Of_File'Length, Name_Of_File => Name_Of_File, Form_Of_File => Form_Of_File, Status => Status); end Open; procedure Open_Flex (File : out File_Type; Mode_Of_File : in File_Mode; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is F_Mode : Text_IO.File_Mode; begin File.File := new Text_IO.File_Type; case Mode_Of_File is when In_File => F_Mode := Text_IO.In_File; when Out_File => F_Mode := Text_IO.Out_File; when Append_File => F_Mode := Text_IO.Append_File; end case; Text_IO.Open (File.File.all, F_Mode, Name_Of_File (Name_Of_File'First .. Name_Length), Form_Of_File); Status := Ok; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File.File); when Text_IO.Name_Error => Status := Name_Error; Dispose (File.File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File.File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File.File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File.File); when Standard.Program_Error => Status := Program_Error; Dispose (File.File); end Open_Flex; procedure Close (File : in out File_Type; Status : out File_Status) is begin if File.File = null then Status := Status_Error; else Text_IO.Close (File.File.all); Dispose (File.File); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File.File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File.File); when Constraint_Error => Status := Use_Error; Dispose (File.File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File.File); when Standard.Program_Error => Status := Program_Error; Dispose (File.File); end Close; procedure Delete (File : in out File_Type; Status : out File_Status) is begin if File.File = null then Status := Status_Error; else Text_IO.Delete (File.File.all); Dispose (File.File); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File.File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File.File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File.File); when Constraint_Error => Status := Use_Error; Dispose (File.File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File.File); when Standard.Program_Error => Status := Program_Error; Dispose (File.File); end Delete; procedure Reset (File : in out File_Type; Mode_Of_File : in File_Mode; Status : out File_Status) is F_Mode : Text_IO.File_Mode; begin if File.File = null then Status := Status_Error; else case Mode_Of_File is when In_File => F_Mode := Text_IO.In_File; when Out_File => F_Mode := Text_IO.Out_File; when Append_File => F_Mode := Text_IO.Append_File; end case; Text_IO.Reset (File.File.all, F_Mode); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File.File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File.File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File.File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File.File); when Standard.Program_Error => Status := Program_Error; Dispose (File.File); end Reset; function Valid_File (File : File_Type) return Boolean is Valid : Boolean; begin case File.IO_Sort is when Stdin => Valid := True; when Stdout => Valid := True; when NamedFile => Valid := (File.File /= null); end case; return Valid; end Valid_File; function File_Ref (File : File_Type) return Text_IO.File_Type; function File_Ref (File : File_Type) return Text_IO.File_Type is begin case File.IO_Sort is when Stdin => return Text_IO.Standard_Input; when Stdout => return Text_IO.Standard_Output; when NamedFile => return File.File.all; end case; end File_Ref; function Is_Open (File : File_Type) return Boolean is begin return Valid_File (File) and then Text_IO.Is_Open (File_Ref (File)); end Is_Open; function Mode (File : File_Type) return File_Mode is F_Mode : File_Mode; begin if Is_Open (File) and then Text_IO.Is_Open (File_Ref (File)) then case Text_IO.Mode (File_Ref (File)) is when Text_IO.In_File => F_Mode := In_File; when Text_IO.Out_File => F_Mode := Out_File; when Text_IO.Append_File => F_Mode := Append_File; end case; else F_Mode := In_File; end if; return F_Mode; end Mode; function Is_In (File : File_Type) return Boolean; function Is_In (File : File_Type) return Boolean is begin return Is_Open (File) and then Mode (File) = In_File; end Is_In; function Is_Out (File : File_Type) return Boolean; function Is_Out (File : File_Type) return Boolean is begin return Is_Open (File) and then (Mode (File) = Out_File or Mode (File) = Append_File); end Is_Out; procedure Name (File : in File_Type; Name_Of_File : out String; Stop : out Natural) is begin if Is_Open (File) then declare FN : constant String := Text_IO.Name (File_Ref (File)); begin if Name_Of_File'Length >= FN'Length then Name_Of_File (FN'Range) := FN; Stop := FN'Length; else Name_Of_File := FN (Name_Of_File'Range); Stop := Name_Of_File'Length; end if; end; else Stop := Name_Of_File'First - 1; end if; exception when others => Stop := Name_Of_File'First - 1; end Name; procedure Form (File : in File_Type; Form_Of_File : out String; Stop : out Natural) is begin if Is_Open (File) then declare FM : constant String := Text_IO.Form (File_Ref (File)); begin if Form_Of_File'Length >= FM'Length then Form_Of_File (FM'Range) := FM; Stop := FM'Length; else Form_Of_File := FM (Form_Of_File'Range); Stop := Form_Of_File'Length; end if; end; else Stop := Form_Of_File'First - 1; end if; exception when others => Stop := Form_Of_File'First - 1; end Form; -- Line and file terminator control function P_To_PC (P : Positive) return Text_IO.Positive_Count; function P_To_PC (P : Positive) return Text_IO.Positive_Count is begin return Text_IO.Positive_Count (P); end P_To_PC; function PC_To_P (PC : Text_IO.Positive_Count) return Positive; function PC_To_P (PC : Text_IO.Positive_Count) return Positive is begin return Positive (PC); end PC_To_P; procedure New_Line (File : in File_Type; Spacing : in Positive) is Gap : Text_IO.Positive_Count; begin if Is_Out (File) then Gap := P_To_PC (Spacing); Text_IO.New_Line (File_Ref (File), Gap); end if; exception when others => null; end New_Line; procedure Skip_Line (File : in File_Type; Spacing : in Positive) is Gap : Text_IO.Positive_Count; begin if Is_In (File) then Gap := P_To_PC (Spacing); Text_IO.Skip_Line (File_Ref (File), Gap); end if; exception when others => null; end Skip_Line; procedure New_Page (File : in File_Type) is begin if Is_Out (File) then Text_IO.New_Page (File_Ref (File)); end if; exception when others => null; end New_Page; function End_Of_Line (File : File_Type) return Boolean is EOLN : Boolean; begin if Is_In (File) then EOLN := Text_IO.End_Of_Line (File_Ref (File)); else EOLN := False; end if; return EOLN; end End_Of_Line; function End_Of_File (File : File_Type) return Boolean is EOF : Boolean; begin if Is_In (File) then EOF := Text_IO.End_Of_File (File_Ref (File)); else EOF := True; end if; return EOF; end End_Of_File; procedure Set_Col (File : in File_Type; Posn : in Positive); procedure Set_Col (File : in File_Type; Posn : in Positive) is Col : Text_IO.Positive_Count; begin if Is_Open (File) then Col := P_To_PC (Posn); Text_IO.Set_Col (File_Ref (File), Col); end if; exception when others => null; end Set_Col; procedure Set_In_File_Col (File : in File_Type; Posn : in Positive) is begin if Is_In (File) then Set_Col (File, Posn); end if; end Set_In_File_Col; procedure Set_Out_File_Col (File : in File_Type; Posn : in Positive) is begin if Is_Out (File) then Set_Col (File, Posn); end if; end Set_Out_File_Col; function Col (File : File_Type) return Positive; function Col (File : File_Type) return Positive is Posn : Positive; Col : Text_IO.Positive_Count; begin if Is_Open (File) then Col := Text_IO.Col (File_Ref (File)); Posn := PC_To_P (Col); else Posn := 1; end if; return Posn; exception when Text_IO.Status_Error => return 1; when Text_IO.Layout_Error => return PC_To_P (Text_IO.Count'Last); when Text_IO.Device_Error => return 1; when Standard.Storage_Error => return 1; when Standard.Program_Error => return 1; end Col; function In_File_Col (File : File_Type) return Positive is begin if Is_In (File) then return Col (File); else return 1; end if; end In_File_Col; function Out_File_Col (File : File_Type) return Positive is begin if Is_Out (File) then return Col (File); else return 1; end if; end Out_File_Col; function Line (File : File_Type) return Positive; function Line (File : File_Type) return Positive is Posn : Positive; Line : Text_IO.Positive_Count; begin if Is_Open (File) then Line := Text_IO.Line (File_Ref (File)); Posn := PC_To_P (Line); else Posn := 1; end if; return Posn; exception when Text_IO.Status_Error => return 1; when Text_IO.Layout_Error => return PC_To_P (Text_IO.Count'Last); when Text_IO.Device_Error => return 1; when Standard.Storage_Error => return 1; when Standard.Program_Error => return 1; end Line; function In_File_Line (File : File_Type) return Positive is begin if Is_In (File) then return Line (File); else return 1; end if; end In_File_Line; function Out_File_Line (File : File_Type) return Positive is begin if Is_Out (File) then return Line (File); else return 1; end if; end Out_File_Line; -- Character IO procedure Get_Char (File : in File_Type; Item : out Character) is begin if Is_In (File) then Text_IO.Get (File_Ref (File), Item); else Item := Character'First; end if; exception when others => null; end Get_Char; procedure Put_Char (File : in File_Type; Item : in Character) is begin if Is_Out (File) then Text_IO.Put (File_Ref (File), Item); end if; exception when others => null; end Put_Char; procedure Get_Char_Immediate (File : in File_Type; Item : out Character; Status : out File_Status) is begin if Is_In (File) then Text_IO.Get_Immediate (File_Ref (File), Item); Status := Ok; else Item := Character'First; Status := Mode_Error; end if; exception when others => Item := Character'First; Status := End_Error; end Get_Char_Immediate; -- String IO procedure Get_String (File : in File_Type; Item : out String; Stop : out Natural) is LSTP : Natural; begin if Is_In (File) then LSTP := Item'First - 1; loop exit when End_Of_File (File); LSTP := LSTP + 1; Get_Char (File, Item (LSTP)); exit when LSTP = Item'Last; end loop; Stop := LSTP; else Stop := Item'First - 1; end if; end Get_String; -- CFR 718 The behaviour of Put_String is now as follows: -- If Stop is 0 then all characters in Item are output. -- If Stop <= Item'Last then output Item(Item'First .. Stop). -- If Stop > Item'Last then output all characters in Item, then pad with -- spaces to width specified by Stop. procedure Put_String (File : in File_Type; Item : in String; Stop : in Natural) is Pad : Natural; begin if Is_Out (File) then if Stop = 0 then Text_IO.Put (File_Ref (File), Item); elsif Stop <= Item'Last then Text_IO.Put (File_Ref (File), Item (Item'First .. Stop)); else Pad := Stop - Item'Last; Text_IO.Put (File_Ref (File), Item); while Pad > 0 loop Text_IO.Put (File_Ref (File), ' '); Pad := Pad - 1; end loop; end if; end if; exception when others => null; end Put_String; procedure Get_Line (File : in File_Type; Item : out String; Stop : out Natural) is begin if Is_In (File) then Text_IO.Get_Line (File_Ref (File), Item, Stop); else Stop := Item'First - 1; end if; exception when others => Stop := Item'First - 1; end Get_Line; procedure Put_Line (File : in File_Type; Item : in String; Stop : in Natural) is ES : Positive; begin if Stop = 0 then ES := Item'Last; else ES := Stop; end if; if Is_Out (File) then Text_IO.Put_Line (File_Ref (File), Item (Item'First .. ES)); end if; exception when others => null; end Put_Line; -- Integer IO package Integer_IO is new Text_IO.Integer_IO (Integer); procedure Get_Integer (File : in File_Type; Item : out Integer; Width : in Natural; Read : out Boolean) is begin if Is_In (File) then Integer_IO.Get (File_Ref (File), Item, Width); Read := True; else Read := False; end if; exception when others => Read := False; end Get_Integer; procedure Put_Integer (File : in File_Type; Item : in Integer; Width : in Natural; Base : in Number_Base) is begin if Is_Out (File) then Integer_IO.Put (File_Ref (File), Item, Width, Base); end if; exception when others => null; end Put_Integer; procedure Get_Int_From_String (Source : in String; Item : out Integer; Start_Pos : in Positive; Stop : out Natural) is begin Integer_IO.Get (Source (Start_Pos .. Source'Last), Item, Stop); exception when others => Stop := Start_Pos - 1; end Get_Int_From_String; procedure Put_Int_To_String (Dest : in out String; Item : in Integer; Start_Pos : in Positive; Base : in Number_Base) is begin Integer_IO.Put (Dest (Start_Pos .. Dest'Last), Item, Base); exception when others => null; end Put_Int_To_String; -- Float IO package Real_IO is new Text_IO.Float_IO (Float); procedure Get_Float (File : in File_Type; Item : out Float; Width : in Natural; Read : out Boolean) is begin if Is_In (File) then Real_IO.Get (File_Ref (File), Item, Width); Read := True; else Read := False; end if; exception when others => Read := False; end Get_Float; procedure Put_Float (File : in File_Type; Item : in Float; Fore : in Natural; Aft : in Natural; Exp : in Natural) is begin if Is_Out (File) then Real_IO.Put (File_Ref (File), Item, Fore, Aft, Exp); end if; exception when others => null; end Put_Float; procedure Get_Float_From_String (Source : in String; Item : out Float; Start_Pos : in Positive; Stop : out Natural) is begin Real_IO.Get (Source (Start_Pos .. Source'Last), Item, Stop); exception when others => Stop := Start_Pos - 1; end Get_Float_From_String; procedure Put_Float_To_String (Dest : in out String; Item : in Float; Start_Pos : in Positive; Aft : in Natural; Exp : in Natural) is begin Real_IO.Put (Dest (Start_Pos .. Dest'Last), Item, Aft, Exp); exception when others => null; end Put_Float_To_String; end SPARK_IO; ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/lib/spark/spark_io_05.ads����������������������������������������������������������0000644�0001750�0001750�00000041165�11753202341�017136� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK_IO -- -- -- -- Description -- -- This is a thick binding to package Ada.Text_IO -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : Ada 2005 -- -- Body : Ada 2005 -- -- -- -- Runtime Requirements and Dependencies -- -- Full Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- package SPARK_IO_05 --# own State : State_Type; --# Inputs : Inputs_Type; --# Outputs : Outputs_Type; --# initializes State, --# Inputs, --# Outputs; is --# type State_Type is abstract; --# type Inputs_Type is abstract; --# type Outputs_Type is abstract; type File_Type is private; type File_Mode is (In_File, Out_File, Append_File); type File_Status is (Ok, Status_Error, Mode_Error, Name_Error, Use_Error, Device_Error, End_Error, Data_Error, Layout_Error, Storage_Error, Program_Error); subtype Number_Base is Integer range 2 .. 16; function Standard_Input return File_Type; --# global in Inputs; function Standard_Output return File_Type; --# global in Outputs; Null_File : constant File_Type; --------------------- -- File Management -- --------------------- procedure Create (File : out File_Type; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out State; --# derives File, --# State, --# Status from Form_Of_File, --# Name_Of_File, --# State; --# declare delay; procedure Create_Flex (File : out File_Type; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out State; --# derives File, --# State, --# Status from Form_Of_File, --# Name_Length, --# Name_Of_File, --# State; --# declare delay; procedure Open (File : out File_Type; Mode_Of_File : in File_Mode; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out State; --# derives File, --# State, --# Status from Form_Of_File, --# Mode_Of_File, --# Name_Of_File, --# State; --# declare delay; procedure Open_Flex (File : out File_Type; Mode_Of_File : in File_Mode; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out State; --# derives File, --# State, --# Status from Form_Of_File, --# Mode_Of_File, --# Name_Length, --# Name_Of_File, --# State; --# declare delay; procedure Close (File : in out File_Type; Status : out File_Status); --# global in out State; --# derives State, --# Status from File, --# State & --# File from ; --# declare delay; procedure Delete (File : in out File_Type; Status : out File_Status); --# global in out State; --# derives State, --# Status from File, --# State & --# File from ; --# declare delay; procedure Reset (File : in out File_Type; Mode_Of_File : in File_Mode; Status : out File_Status); --# derives File, --# Status from File, --# Mode_Of_File; --# declare delay; function Valid_File (File : File_Type) return Boolean; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function Mode (File : File_Type) return File_Mode; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. procedure Name (File : in File_Type; Name_Of_File : out String; Stop : out Natural); --# derives Name_Of_File, --# Stop from File; --# declare delay; procedure Form (File : in File_Type; Form_Of_File : out String; Stop : out Natural); --# derives Form_Of_File, --# Stop from File; --# declare delay; function Is_Open (File : File_Type) return Boolean; --# global State; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. ----------------------------------------------- -- Control of default input and output files -- ----------------------------------------------- -- -- Not supported in SPARK_IO -- -------------------------------------------- -- Specification of line and page lengths -- -------------------------------------------- -- -- Not supported in SPARK_IO -- ----------------------------------- -- Column, Line and Page Control -- ----------------------------------- procedure New_Line (File : in File_Type; Spacing : in Positive); --# global in out Outputs; --# derives Outputs from *, --# File, --# Spacing; --# declare delay; procedure Skip_Line (File : in File_Type; Spacing : in Positive); --# global in out Inputs; --# derives Inputs from *, --# File, --# Spacing; --# declare delay; procedure New_Page (File : in File_Type); --# global in out Outputs; --# derives Outputs from *, --# File; --# declare delay; function End_Of_Line (File : File_Type) return Boolean; --# global Inputs; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function End_Of_File (File : File_Type) return Boolean; --# global Inputs; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. procedure Set_In_File_Col (File : in File_Type; Posn : in Positive); --# global in out Inputs; --# derives Inputs from *, --# File, --# Posn; --# declare delay; --# pre Mode (File) = In_File; procedure Set_Out_File_Col (File : in File_Type; Posn : in Positive); --# global in out Outputs; --# derives Outputs from *, --# File, --# Posn; --# declare delay; --# pre Mode( File ) = Out_File or --# Mode (File) = Append_File; function In_File_Col (File : File_Type) return Positive; --# global Inputs; --# pre Mode (File) = In_File; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function Out_File_Col (File : File_Type) return Positive; --# global Outputs; --# pre Mode (File) = Out_File or --# Mode (File) = Append_File; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function In_File_Line (File : File_Type) return Positive; --# global Inputs; --# pre Mode (File) = In_File; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function Out_File_Line (File : File_Type) return Positive; --# global Outputs; --# pre Mode (File) = Out_File or --# Mode (File) = Append_File; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. ---------------------------- -- Character Input-Output -- ---------------------------- procedure Get_Char (File : in File_Type; Item : out Character); --# global in out Inputs; --# derives Inputs, --# Item from File, --# Inputs; --# declare delay; procedure Put_Char (File : in File_Type; Item : in Character); --# global in out Outputs; --# derives Outputs from *, --# File, --# Item; --# declare delay; procedure Get_Char_Immediate (File : in File_Type; Item : out Character; Status : out File_Status); --# global in out Inputs; --# derives Inputs, --# Item, --# Status from File, --# Inputs; --# declare delay; -- NOTE. Only the variant of Get_Immediate that waits for a character to -- become available is supported. -- On return Status is one of Ok, Mode_Error or End_Error. See ALRM A.10.7 -- Item is Character'First if Status /= Ok ------------------------- -- String Input-Output -- ------------------------- procedure Get_String (File : in File_Type; Item : out String; Stop : out Natural); --# global in out Inputs; --# derives Inputs, --# Item, --# Stop from File, --# Inputs; --# declare delay; procedure Put_String (File : in File_Type; Item : in String; Stop : in Natural); --# global in out Outputs; --# derives Outputs from *, --# File, --# Item, --# Stop; --# declare delay; procedure Get_Line (File : in File_Type; Item : out String; Stop : out Natural); --# global in out Inputs; --# derives Inputs, --# Item, --# Stop from File, --# Inputs; --# declare delay; procedure Put_Line (File : in File_Type; Item : in String; Stop : in Natural); --# global in out Outputs; --# derives Outputs from *, --# File, --# Item, --# Stop; --# declare delay; -------------------------- -- Integer Input-Output -- -------------------------- -- SPARK_IO only supports input-output of -- the built-in integer type Integer procedure Get_Integer (File : in File_Type; Item : out Integer; Width : in Natural; Read : out Boolean); --# global in out Inputs; --# derives Inputs, --# Item, --# Read from File, --# Inputs, --# Width; --# declare delay; procedure Put_Integer (File : in File_Type; Item : in Integer; Width : in Natural; Base : in Number_Base); --# global in out Outputs; --# derives Outputs from *, --# Base, --# File, --# Item, --# Width; --# declare delay; procedure Get_Int_From_String (Source : in String; Item : out Integer; Start_Pos : in Positive; Stop : out Natural); --# derives Item, --# Stop from Source, --# Start_Pos; --# declare delay; procedure Put_Int_To_String (Dest : in out String; Item : in Integer; Start_Pos : in Positive; Base : in Number_Base); --# derives Dest from *, --# Base, --# Item, --# Start_Pos; --# declare delay; ------------------------ -- Float Input-Output -- ------------------------ -- SPARK_IO only supports input-output of -- the built-in real type Float procedure Get_Float (File : in File_Type; Item : out Float; Width : in Natural; Read : out Boolean); --# global in out Inputs; --# derives Inputs, --# Item, --# Read from File, --# Inputs, --# Width; --# declare delay; procedure Put_Float (File : in File_Type; Item : in Float; Fore : in Natural; Aft : in Natural; Exp : in Natural); --# global in out Outputs; --# derives Outputs from *, --# Aft, --# Exp, --# File, --# Fore, --# Item; --# declare delay; procedure Get_Float_From_String (Source : in String; Item : out Float; Start_Pos : in Positive; Stop : out Natural); --# derives Item, --# Stop from Source, --# Start_Pos; --# declare delay; procedure Put_Float_To_String (Dest : in out String; Item : in Float; Start_Pos : in Positive; Aft : in Natural; Exp : in Natural); --# derives Dest from *, --# Aft, --# Exp, --# Item, --# Start_Pos; --# declare delay; private --# hide SPARK_IO_05; type File_Descriptor; type File_Type is access all File_Descriptor; Null_File : constant File_Type := null; end SPARK_IO_05; �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������spark-2012.0.deb/lib/spark/errors.htm���������������������������������������������������������������0000644�0001750�0001750�00000642634�11753202341�016370� 0����������������������������������������������������������������������������������������������������ustar �eugen���������������������������eugen������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������<html> <head> <title>SPARK Errors

    *** Syntax Error : ";" expected.

    If this is reported at the end of the input file it may well be caused by the misspelling of an identifier in a hide directive. The parser then skips all the following text looking for the misspelled identifier but finds the end of file first where it reports a syntax error.

    *** Syntax Error : No APRAGMA can be start with reserved word "IS".

    This can occur when a stub for an embedded subprogram is wrongly terminated by a semicolon.

    *** Syntax Error : No complete PROCEDURE_SPECIFICATION can be followed by ANNOTATION_START here.

    This can occur when the reserved word body has been omitted from the declaration of a package body. This error will occur at the annotation placed between the specification and the reserved word is of the first subprogram.

    *** Syntax Error : No complete PROCEDURE_SPECIFICATION can be followed by reserved word "IS" here.

    This can occur when the reserved word body has been omitted from the declaration of a package body. This error will occur at the reserved word is which introduces the body of the first subprogram.

    *** Syntax Error : reserved word "INHERIT" expected.

    This occurs where the annotation on a subprogram body is placed after the reserved word is instead of before it.

    *** Syntax Error : No complete SIMPLE_EXPRESSION can be followed by ")" here.

    This can occur in an aggregate expression when there is a mixure of named and positional association being used.

    *** Syntax Error : No complete SIMPLE_EXPRESSION can be followed by "," here.

    This can occur in an aggregate expression when there is a mixure of named and positional association being used.

    *** Semantic Error : 1 : The identifier YYY.XXX is either undeclared or not visible at this point.

    If the identifier is declared in a separate (or parent) package, the package must be included in an inherit clause and the identifier prefixed with the package name. Ensure that there are no errors in the declaration of the identifier.

    *** Semantic Error : 2 : XXX does not denote a formal parameter for YYY.

    *** Semantic Error : 3 : Incorrect number of actual parameters for call of subprogram XXX.

    *** Semantic Error : 4 : More than one parameter association is given for formal parameter XXX.

    *** Semantic Error : 5 : Illegal use of identifier XXX.

    Usually associated with the use of an identifier other than a package name as a prefix in a selected component.

    *** Semantic Error : 6 : Identifier XXX is not the name of a variable.

    *** Semantic Error : 7 : Identifier XXX is not the name of a procedure.

    *** Semantic Error : 8 : There is no field named XXX in this entity.

    Issued when the selector in a selected component of a record references a non-existent field.

    *** Semantic Error : 9 : Selected components are not allowed for XXX.

    Occurs if the prefix to a selected component representing a procedure in a procedure call statement or a type mark is not a package. Also occurs if a selector is applied in an expression to an object which is not a record variable.

    *** Semantic Error : 10 : Illegal redeclaration of identifier XXX.

    *** Semantic Error : 11 : There is no package declaration for XXX.

    Issued if a package body is encountered for which there is no package specification.

    *** Semantic Error : 12 : Own variable XXX can only be completed by a variable declaration, not a constant.

    If the object in question is really a constant, then remove it from the enclosing package's own variable annotation.

    *** Semantic Error : 13 : A body for subprogram XXX has already been declared.

    *** Semantic Error : 14 : Illegal parent unit name.

    Issued if the name in a "separate" clause of a subunit does not correctly identify a compilation unit. Common causes of this error are a syntax error in the parent unit or omitting the parent unit specification and/or parent unit body entries from the index file.

    *** Semantic Error : 15 : The stub for XXX is either undeclared or cannot be located.

    Common causes of this error are an error in the declaration of the stub or the omission of the parent unit body from the index file.

    *** Semantic Error : 16 : A body for package XXX has already been declared.

    *** Semantic Error : 17 : A body stub for package XXX has already been declared.

    *** Semantic Error : 18 : Identifier XXX is not the name of a package.

    *** Semantic Error : 19 : Identifier XXX is not the name of a procedure.

    *** Semantic Error : 20 : Illegal operator symbol.

    Issued if a renaming declaration contains a non-existent operator.

    *** Semantic Error : 21 : This entity is not an array.

    Issued if an attempt is made to index into a name which does not represent an array.

    *** Semantic Error : 22 : The type in this declaration is not consistent with the previous declaration of XXX.

    Occurs when the type given in the Ada declaration of an own variable differs from that "announced" in the package's own variable clause.

    *** Semantic Error : 23 : No parameter association is given for formal parameter XXX.

    *** Semantic Error : 24 : The identifier XXX (exported by called subprogram) is not visible at this point.

    When a procedure is called any global variables exported by that procedure must be visible at the point of call. This error message indicates that the global variable concerned is not visible. It may be that it needs to be added to the global annotation of the procedure containing the call (or some further enclosing subprogram) or it may be that an inherit clause is missing from the package containing the call.

    *** Semantic Error : 25 : The identifier XXX (imported by called subprogram) is not visible at this point.

    When a procedure is called any global variables imported by that procedure must be visible at the point of call. This error message indicates that the global variable concerned is not visible. It may be that it needs to be added to the global annotation of the subprogram containing the call (or some further enclosing subprogram) or it may be that an inherit clause is missing from the package containing the call.

    *** Semantic Error : 26 : The deferred constant XXX does not have an associated full definition.

    Issued at the end of a package specification if no full declaration has been supplied for a deferred constant declared in the package specification.

    *** Semantic Error : 27 : The private type XXX does not have an associated full definition.

    Issued at the end of a package specification if no full declaration has been supplied for a private type declared in the package specification.

    *** Semantic Error : 28 : The own variable XXX does not have a definition.

    Issued at the end of a package body if an own variable announced in the package specification has neither been given an Ada declaration nor refined.

    *** Semantic Error : 29 : The subprogram XXX, declared in the package specification, does not have an associated body.

    *** Semantic Error : 30 : Attribute XXX is not yet implemented in the Examiner.

    The attribute is identified in Annex K of the SPARK 95 report as a valid SPARK 95 attribute but the Examiner does not currently support it. It is possible to work round the omission by putting the use of the attribute inside a suitable function which is hidden from the Examiner.

    *** Semantic Error : 31 : The prefix of this attribute is not an object or type.

    *** Semantic Error : 32 : Illegal type conversion.

    Likely causes are type conversions involving record types or non-convertible arrays.

    *** Semantic Error : 33 : Illegal aggregate.

    Issued if the prefix of an aggregate is not a composite type.

    *** Semantic Error : 34 : Illegal procedure call.

    Issued if a call is made to a user-defined subprogram in a package initialization part.

    *** Semantic Error : 35 : Binary operator is not declared for types XXX and YYY.

    Indicates use of an undeclared binary operator; this message means that the type on each side of the operator cannot appear with the operator used. e.g. attempting to add an integer to an enumeration literal.

    *** Semantic Error : 36 : Expression is not static.

    *** Semantic Error : 37 : Expression is not constant.

    *** Semantic Error : 38 : Expression is not of the expected type.

    *** Semantic Error : 39 : Illegal use of unconstrained type.

    An unconstrained array type or variable of such a type is illegally used. Use of unconstrained arrays in SPARK is limited to passing them as parameters, indexing into them and taking attributes of them. This message also arises if a string literal is used as an actual parameter where the formal parameter is a string subtype. In this case, the error can be removed by qualifying the string literal with the subtype name.

    *** Semantic Error : 40 : Numeric or Time_Span type required.

    This operator is only defined for numeric types and, if the Ravenscar Profile is selected, for type Ada.Real_Time.Time_Span.

    *** Semantic Error : 41 : Array type required.

    Issued if a subtype declaration taking the form of a constrained subtype of an unconstrained array type is encountered but with a type mark which does not represent an array.

    *** Semantic Error : 42 : Incompatible types.

    Issued when a name represents an object which is not of the required type.

    *** Semantic Error : 43 : Range is not constant.

    *** Semantic Error : 44 : Scalar type required.

    The bounds of an explicit range must be scalar types.

    *** Semantic Error : 45 : Range is not static.

    *** Semantic Error : 46 : Discrete type required.

    *** Semantic Error : 47 : The definition of this type contains errors which may make this array definition invalid.

    Issued if an array type definition is encountered where one or more of the index types used in the definition contained errors in its original declaration. For example, SPARK requires array index bounds to be constant (known at compile time) so an attempt to use an illegal subtype with variable bounds as an array index will generate this message.

    *** Semantic Error : 49 : Attribute XXX takes only one argument.

    Only SPARK 95 attributes 'Min and 'Max require two arguments.

    *** Semantic Error : 50 : Initializing expression must be constant.

    To assign a non-constant expression to a variable, an assignment statement in the body of the program unit (following the 'begin') must be used.

    *** Semantic Error : 51 : Arrays may not be ordered.

    Issued if an ordering operator such as "<" is encountered between objects of an array type other than string or a constrained subtype of string.

    *** Semantic Error : 52 : Only Scalar, String and Time types may be ordered.

    Ordering operators are only defined for scalar types and type String plus, if the Ravenscar Profile is selected, types Time and Time_Span in package Ada.Real_Time.

    *** Semantic Error : 53 : Illegal others clause.

    In SPARK record aggregates may not contain an others clause.

    *** Semantic Error : 54 : Illegal attribute: XXX.

    Issued when an attribute not supported by SPARK is used.

    *** Semantic Error : 55 : Attribute XXX takes no argument.

    *** Semantic Error : 56 : Argument expected.

    *** Semantic Error : 57 : Fixed type definition must have associated range constraint.

    *** Semantic Error : 58 : XXX expected, to repeat initial identifier.

    Occurs at the end of a package, subprogram, protected type, task type or loop if the terminal identifier does not match the name or label originally given.

    *** Semantic Error : 59 : Composite subtype definition may not have associated range constraint.

    A subtype of the form applicable to a subrange of a scalar type has been encountered but the type provided is not a scalar type.

    *** Semantic Error : 60 : Illegal choice in record aggregate.

    In SPARK record aggregates may not contain multiple choices, each field must be assigned a value individually.

    *** Semantic Error : 61 : Illegal occurrence of body stub - a body stub may only occur in a compilation unit.

    *** Semantic Error : 62 : A body for the embedded package XXX is required.

    Issued if an embedded package declares subprograms or own variables and no body is provided.

    *** Semantic Error : 63 : XXX is not a type mark.

    *** Semantic Error : 64 : Parameters of function subprograms must be of mode in.

    *** Semantic Error : 65 : Formal parameters of renamed operators may not be renamed.

    The names of the parameters used in renaming declarations may not be altered from Left, Right for binary operators and Right for unary operators. These are the names given for the parameters in the ARM and the SPARK Definition requires that parameter names are not changed.

    *** Semantic Error : 66 : Unexpected package initialization - no own variables of package XXX require initialization.

    Either the package does not have an initializes annotation or all the own variables requiring initialization were given values at the point of declaration.

    *** Semantic Error : 67 : Illegal machine code insertion. Machine code functions are not permitted in SPARK 83.

    This is an Ada 83 rule. Machine code can only be used in procedures.

    *** Semantic Error : 68 : Illegal operator renaming - operators are defined on types not subtypes.

    Issued if an attempt is made to rename an operator using a subtype of the type for which it was originally implicitly declared.

    *** Semantic Error : 69 : pragma XXX has two parameters.

    *** Semantic Error : 70 : pragma Import expected.

    *** Semantic Error : 70 : pragma Interface expected.

    *** Semantic Error : 71 : This expression does not represent the expected subprogram or variable name XXX.

    Issued if the name supplied in a pragma interface, import or attach_handler does not match the name of the associated subprogram or variable.

    *** Semantic Error : 72 : Unexpected pragma Import.

    Pragma import may only occur in a body stub, or immediately after a subprogram declaration in the visible part of a package, or immediately after a variable declaration.

    *** Semantic Error : 72 : Unexpected pragma Interface.

    Pragma interface may only occur in a body stub or immediately after a subprogram declaration in the visible part of a package.

    *** Semantic Error : 73 : XXX has already been declared or refined.

    Issued if an Ada declaration is given for an own variable which has been refined, or in a refinement clause if an own variable is refined more than once.

    *** Semantic Error : 74 : XXX does not occur in the package own variable list.

    A subject of a refinement definition of a package must be an own variable of that package.

    *** Semantic Error : 75 : Illegal use of inherited package.

    Issued if an attempt is made to refine an own variable onto an own variable of a non-embedded package.

    *** Semantic Error : 76 : Identifier XXX is already declared and cannot be the name of an embedded package.

    Issued when a refinement clause in a package body attempts to name an embedded package own variable as a refinement constituent and the name given for the embedded package is already in use.

    *** Semantic Error : 77 : Variable XXX should occur in this own variable clause.

    Occurs in the own variable clause of a package embedded in another package if an own variable which is a refinement constituent of an own variable of the enclosing package is omitted.

    *** Semantic Error : 78 : Initialization of own variable XXX is ineffective.

    Issued if an own variable occurs in the initialization clause of an embedded package and the own variable concerned is a refinement constituent of another own variable which is not listed in the initialization specification of its package.

    *** Semantic Error : 79 : Variable XXX should occur in this initialization specification.

    Occurs in the initialization clause of a package embedded in another package if an own variable which is a refinement constituent of an initialized own variable of the enclosing package is omitted.

    *** Semantic Error : 80 : Unexpected own variable clause - no variable in this clause is a refinement constituent.

    *** Semantic Error : 81 : Own variable clause expected - own variables of this package occur as refinement constituents.

    *** Semantic Error : 82 : Unexpected initialization specification - no own variables of this package require initialization.

    An own variable initialization clause and that of its refinement constituents must be consistent.

    *** Semantic Error : 83 : Initialization specification expected - own variables of this package require initialization.

    Issued if an own variable does not occur in the initialization clause of an embedded package and the own variable concerned is a refinement constituent of another own variable which is listed in the initialization clause of its package.

    *** Semantic Error : 84 : The refinement constituent XXX does not have a declaration.

    Issued at the end of a package if a refinement constituent of a refined own variable has not been given an Ada declaration or further refined.

    *** Semantic Error : 85 : XXX is not a constituent of any abstract own variable appearing in the earlier global definition for this subprogram.

    A variable XXX which has occurred in a refined global annotation is neither a variable that occurred in the earlier global definition nor a refinement constituent of any such variable.

    *** Semantic Error : 86 : At least one constituent of XXX was expected in this refined global definition.

    If the global annotation of a procedure specification contains an own variable and that own variable is later refined then at least one refinement constituent of the own variable shall appear in the second global annotation supplied for the procedure body.

    *** Semantic Error : 87 : Refined global definition expected for subprogram XXX.

    A global definition containing abstract own variables was given in the definition for subprogram XXX, in a package specification. A refined global definition is required in the package body.

    *** Semantic Error : 88 : Variable XXX is not a refinement constituent.

    *** Semantic Error : 89 : XXX is not a private type declared in this package.

    *** Semantic Error : 90 : This operator may not be applied to ranges.

    *** Semantic Error : 91 : Ranges may not be assigned.

    *** Semantic Error : 92 : Named association may not be used here.

    *** Semantic Error : 93 : Number of index expressions differs from number of dimensions of array XXX.

    *** Semantic Error : 94 : Condition is not boolean.

    Issued anywhere a boolean expression is required (e.g. in if, exit and while statements) and the expression provided is not of type boolean.

    *** Semantic Error : 95 : Type mark expected.

    *** Semantic Error : 96 : Attribute XXX is not valid with this prefix.

    *** Semantic Error : 97 : Attribute BASE may only appear as a prefix.

    'BASE may only be used as a prefix to another attribute.

    *** Semantic Error : 98 : This expression is not a range.

    *** Semantic Error : 99 : Unconstrained array expected.

    Occurs if a subtype is declared of an array which is already constrained.

    *** Semantic Error : 100 : Floating point type mark expected.

    *** Semantic Error : 101 : Fixed point type mark expected.

    *** Semantic Error : 102 : This is not the name of a field of record XXX.

    *** Semantic Error : 103 : A value has already been supplied for field XXX.

    *** Semantic Error : 104 : No value has been supplied for field XXX.

    *** Semantic Error : 105 : More values have been supplied than number of fields in record XXX.

    *** Semantic Error : 106 : Range is not of the expected type.

    *** Semantic Error : 107 : Expression is not of the expected type. Actual type is XXX. Expected type is YYY.

    *** Semantic Error : 108 : Expression is not of the expected type. Expected any Integer type.

    *** Semantic Error : 109 : Expression is not of the expected type. Expected any Real type.

    *** Semantic Error : 110 : Use type clauses following an embedded package are not currently supported by the Examiner.

    *** Semantic Error : 111 : Package renaming is not currently supported by the Examiner.

    *** Semantic Error : 112 : A use type clause may not appear here. They are only permitted as part of a context clause or directly following an embedded package specification.

    *** Semantic Error : 113 : Private subprogram declarations are not permitted in SPARK 83.

    Private subprograms would not be callable in SPARK 83 and are therefore not permitted; they may be declared and called in SPARK 95.

    *** Semantic Error : 114 : Subtype mark or Range may not be used in an expression in this context.

    A subtype mark or an explicit Range attribute may not be used in a context where a simple expression is expected.

    *** Semantic Error : 115 : In a package body, an own variable annotation must include one or more refinement constituents.

    Annotation should be of the form 'own S is A, B, C;'.

    *** Semantic Error : 116 : View conversion to own type is not permitted in target of an assignment.

    *** Semantic Error : 117 : Aggregate must be qualified with subtype mark.

    Aggregates are qualified expressions so they must be prefixed with a subtype mark. An exception is made in the case of aggregate assignments to unconstrained arrays as the rules of Ada do not permit unconstrained array aggregates to be qualified.

    *** Semantic Error : 118 : Aggregate assignment to unconstrained multi-dimensional array not permitted.

    Unqualified aggregates may only be used in assignments to one-dimensional unconstrained arrays. SPARK does not permit aggregate assignment to multi-dimensional unconstrained arrays.

    *** Semantic Error : 119 : Unary operator is not declared for type XXX.

    Indicates use of an undeclared unary operator; this message means that the type on the right hand side of the operator cannot appear with the operator used. e.g. attempting to negate an enumeration literal.

    *** Semantic Error : 120 : Pragma import not allowed here because variable XXX is already initialized. See ALRM B.1(24).

    *** Semantic Error : 121 : 'Flow_Message' or 'Warning_Message' expected.

    The identifier indicating what kind of message to justify must be either 'Flow_Message' or 'Warning_Message' or some unique abbreviation of them such as 'Fl' or even 'F'. Case is ignored.

    *** Semantic Error : 122 : Error or warning number expected.

    This item should be an integer literal representing the error or warning message that is being marked as expected.

    *** Semantic Error : 123 : This warning number may not appear in an accept annotation.

    It does not make sense to allow certain warnings to be justified with the accept annotation. In particular, attempting to justify warnings raised by the justification system itself could lead to some special kind of recursive hell that we would not wish to enter.

    *** Semantic Error : 124 : Incorrect number of names in accept annotation: should be 0.

    This class of error does not reference any variables, and therefore requires no names.

    *** Semantic Error : 125 : Incorrect number of names in accept annotation: should be 1.

    This class of error references one variable, and therefore requires one name.

    *** Semantic Error : 126 : Incorrect number of names in accept annotation: should be 2.

    This class of error references two variables, and therefore requires two names. Two names are need to justify expected information flow messages such as "X is not derived from Y". Note that for messages of this kind the accept annotation should list the names in the order "export, import".

    *** Semantic Error : 127 : Incorrect number of names in accept annotation: should be 0 or 1.

    This class of error references either zero or one variable, and therefore requires either zero or one name. An ineffective assignment error requires the name of variable being assigned to. An ineffective statement error has no name associated with it.

    *** Semantic Error : 128 : Incorrect number of names in accept annotation: should be 1 or 2.

    This class of error references either one or two variables, and therefore requires either one or two names. One name is required when the export is a function return value.

    *** Semantic Error : 129 : Assignment to view conversion is not currently implemented.

    *** Semantic Error : 130 : A type from the current package should not appear in a use type clause.

    *** Semantic Error : 131 : The package name XXX should appear in a with clause preceding the use type clause.

    *** Semantic Error : 132 : The unit name or the name of an enclosing package of the unit should not appear in its with clause.

    A package should not 'with' itself and a subunit should not 'with' the package (or main program) which declares its stub.

    *** Semantic Error : 133 : Name in with clause is locally redeclared.

    *** Semantic Error : 134 : A package name should not appear in its own inherit clause.

    *** Semantic Error : 135 : The package XXX is undeclared or not visible, or there is a circularity in the list of inherited packages.

    Possible causes of this error are an error in the inherited package specification or omitting an entry for the package specification from the index file or circular inheritance.

    *** Semantic Error : 136 : The own variable XXX is not declared in the own variable clause of the corresponding package declaration.

    A refinement clause of a package body defines the constituent parts of own variables given in the own variable clause of the corresponding package declaration.

    *** Semantic Error : 137 : The child package XXX is either undeclared or not visible at this point.

    Possible causes of this error are an error in the child package specification or omitting the child from the parent's component list in the index file or omitting the child specification entry from the index file.

    *** Semantic Error : 138 : Child package own variable XXX is does not appear in the own variable clause of the child package.

    A constituent of a refinement clause which is defined in a child package must be an own variable of the child package.

    *** Semantic Error : 139 : The variable XXX is not declared in the own variable clause of this package.

    A package can only initialize variables declared in its own variable clause.

    *** Semantic Error : 140 : The predecessor package XXX is either undeclared or not visible at this point.

    The parent of a child package must be a library package and must be declared prior to a child package. If using an index file the parent must have an entry in the index file and the child package must be listed as a component of the parent package.

    *** Semantic Error : 141 : The private type XXX is either undeclared or not visible at this point.

    *** Semantic Error : 142 : The subprogram prefix XXX is either undeclared or not visible at this point.

    The prefix should appear in the inherit clause of the current package.

    *** Semantic Error : 143 : The subprogram YYY.XXX is either undeclared or not visible at this point.

    *** Semantic Error : 144 : The dotted name YYY.XXX is either undeclared or not visible at this point.

    The name must denote an entire variable or an own variable of a package. If the variable or own variable is declared in a separate (or parent) package, the package must be included in an inherit clause and the identifier prefixed with the package name.

    *** Semantic Error : 145 : The identifier YYY.XXX is either undeclared or not visible at this point.

    The identifier should be a typemark. If the typemark is declared in a separate (or parent) package, the package must be included in an inherit clause and the identifier prefixed with the package name. Ensure that there are no errors in the declaration of the typemark.

    *** Semantic Error : 148 : The abstract proof type XXX may not be used to define an own variable in another package.

    Own variables may be "type announced" as being of an abstract proof type only where that type is declared later in the same package. Thus --# own State : T; is legal if --# type T is abstract; appears later in the package; however, --# own State : P.T; is illegal if T is an abstract proof type declared in remote package P.

    *** Semantic Error : 149 : More than one own variable has been announced as being of type XXX which may not therefore be declared as an abstract proof type.

    Occurs when an own variable clause announces more than one own variable as being of a type XXX and XXX is later declared as being of an abstract proof type. Each abstract own variable must be of a unique type.

    *** Semantic Error : 150 : Entire variable expected. The names of constants never appear in mandatory annotations.

    Issued when a the name of a constant is found in a mandatory annotation such as a global or derives annotation. Constants should not appear in such annotations.

    *** Semantic Error : 151 : The variable XXX does not occur either in the package own variable list or as a refinement constituent.

    A variable declared in a package must have been previously announced as either an own variable or as a concrete refinement constituent of an own variable.

    *** Semantic Error : 152 : The number of formal parameters is not consistent with the previous declaration of XXX.

    *** Semantic Error : 153 : The declaration of formal parameter XXX is not consistent with the subprogram's previous declaration.

    Issued if the name, type or parameter mode of a parameter is different in the subprogram body declaration from that declared originally.

    *** Semantic Error : 154 : The subprogram or task body XXX does not have an annotation.

    A subprogram or task body must have a global annotation if it references global variables; a procedure or task body must have a dependency relation to perform information flow analysis.

    *** Semantic Error : 155 : Unexpected annotation - all annotations required for procedure or task body XXX have already occurred.

    Do not repeat global or derives annotations in the body (or body stub) of a subprogram, entry or task except for state (own variable) refinement.

    *** Semantic Error : 156 : Entire variable expected.

    Issued when an identifier which SPARK requires to be an entire variable represents something other than this. Most commonly this message occurs when a component of a structured variable appears in a core annotation.

    *** Semantic Error : 157 : The name XXX already appears in the global variable list.

    *** Semantic Error : 158 : XXX is a formal parameter of this subprogram.

    Issued in a global annotation if it names a formal parameter of the subprogram.

    *** Semantic Error : 159 : The name XXX has already appeared as an exported variable.

    *** Semantic Error : 160 : The name XXX already appears in the list of imported variables.

    *** Semantic Error : 161 : Exportation of XXX is incompatible with its parameter mode.

    Issued if a parameter appears as an export to a procedure when it is of parameter mode in.

    *** Semantic Error : 162 : Importation of XXX is incompatible with its parameter mode.

    Issued if a parameter appears as an import to a procedure when it is of parameter mode out.

    *** Semantic Error : 163 : Subprogram XXX cannot be called from here.

    SPARK contains rules to prevent construction of programs containing recursive subprogram calls; this error message occurs if a procedure or function is called before its body has been declared. Re-ordering of subprogram bodies in the package concerned will be required.

    *** Semantic Error : 165 : This parameter is overlapped by another one, which is exported.

    Violation of the anti-aliasing rule.

    *** Semantic Error : 166 : This parameter is overlapped by an exported global variable.

    Violation of the anti-aliasing rule.

    *** Semantic Error : 167 : Imported variable XXX is not named in the initialization specification of its package.

    Issued when an own variable which is imported into the main program procedure (or a task when the Ravenscar profile is enabled) has not been declared as being initialized by its package. At the main program level the only imports that are permitted are initialized own variables of inherited packages. There are two possible cases to consider: (1) the main program should be importing the variable in which case it should be annotated in its package with --# initializes (and, of course, actually initialized in some way) or be an external variable or protected variable which is implicitly initialized; or (2) the own variable concerned is not initialized at elaboration, should not therefore be considered an import to the main program and should be removed from the main program's import list.

    *** Semantic Error : 168 : XXX is a loop parameter, whose updating is not allowed.

    *** Semantic Error : 169 : Global variables of function subprograms must be of mode in.

    It is an important property of SPARK that functions cannot have side-effects, therefore only the reading of global variable is permitted. It is usually convenient to omit modes from function global annotations but use of mode 'in' is permitted.

    *** Semantic Error : 170 : XXX is a formal parameter of mode in, whose updating is not allowed.

    *** Semantic Error : 171 : XXX is a formal parameter of mode out, whose value cannot be read.

    *** Semantic Error : 172 : The actual parameter associated with an exported formal parameter must be an entire variable.

    Issued if an actual parameter which is an array element is associated with an exported formal parameter in a procedure call. Exported parameters must be either entire variables or a record field.

    *** Semantic Error : 173 : This exported parameter is named in the global definition of the procedure.

    Violation of the anti-aliasing rule.

    *** Semantic Error : 174 : XXX is not an own variable.

    Occurs in initialization specifications if something other than a variable is listed as being initialized.

    A justification of an error requires the actual variables named in the error message to be referenced. The keyword "all" can only be used with language profiles for auto-code generators such as SCADE KCG. Such profiles are only available with the SPARK Pro Toolset.

    *** Semantic Error : 176 : XXX does not have a derives annotation so it may not be called from YYY which does have a derives annotation.

    When analysing with flow=auto, a procedure or entry without a derives annotation may not be called by a procedure, task or entry with a derives annotation. This is because the body of the caller must be checked against its derives annotation. In order to calculate the correct dependency relation for the body of the caller there must be derives annotations present on all called procedures or entries.

    *** Semantic Error : 180 : Entire composite constant expected.

    Issued when an identifier which SPARK requires to be an entire composite constant represents something other than this.

    *** Semantic Error : 181 : Invalid policy for constant proof rule generation.

    *** Semantic Error : 182 : Rule Policy for YYY.XXX already declared in current scope.

    Issued when a rule policy has already been declared for this constant within this declarative region. This rule policy will be ineffective.

    *** Semantic Error : 190 : The name XXX already appears in the inherit clause.

    *** Semantic Error : 191 : The name XXX already appears in the with clause.

    *** Semantic Error : 200 : The parameter XXX is neither imported nor exported.

    Each formal parameter of a subprogram shall be imported or exported or both.

    *** Semantic Error : 201 : The global variable XXX is neither imported nor exported.

    Every variable in a global definition must also appear in the associated derives annotation where it will be either imported or exported or both.

    *** Semantic Error : 250 : The 'Size value for type XXX has already been set.

    *** Semantic Error : 251 : The attribute value for XXX'Size must be of an integer type.

    *** Semantic Error : 252 : The attribute value for XXX'Size must be a static simple expression.

    The value of 'Size must be static and must be of an integer type.

    *** Semantic Error : 253 : The attribute value for XXX'Size must not be negative.

    The value of 'Size must be a positive integer or zero.

    *** Semantic Error : 254 : The Size attribute can only be specified for a first subtype.

    Setting 'Size for a user-defined non-first subtype is not permitted. See Ada95 LRM 13.3(48).

    *** Semantic Error : 255 : The Address attribute can only be specified for a variable, a constant, or a program unit.

    Ada95 LRM Annex N.31 defines a program unit to be either a package, a task unit, a protected unit, a protected entry, a generic unit, or an explicitly declared subprogram other than an enumeration literal.

    *** Semantic Error : 273 : Own variable XXX may not be refined because it was declared with a type mark which has not subsequently been declared as an abstract proof type.

    Where a type mark is included in an own variable declaration it indicates that the own variable will either be of a concrete type of that name (which may be either already declared or be declared later in the package) or of an abstract proof type declared in the package specification. In the former case the refinement is illegal because own variables of concrete Ada types may not be refined. In the latter case it is legal; however, no suitable proof type declaration has been found in this case.

    *** Semantic Error : 300 : Renaming declarations are not allowed here.

    A renaming declaration must be the first declarative item of a package body or main program or it must be placed immediately after the declaration of an embedded package.

    *** Semantic Error : 301 : Renaming or use type declarations here can only rename subprograms in package XXX.

    A renaming declaration may be placed immediately after the declaration of an embedded package; in this case it may only rename subprograms declared in that package.

    *** Semantic Error : 302 : The subprogram specification in this renaming declaration is not consistent with the declaration of subprogram XXX.

    Issued in a subprogram renaming declaration if it contains parameter names, numbers or types which differ from those originally declared.

    *** Semantic Error : 303 : An operator can only be renamed by the same operator.

    Issued if a renaming declaration has a different operator on each side of the reserved word RENAMES.

    *** Semantic Error : 304 : A renaming declaration for operator XXX is not allowed.

    *** Semantic Error : 305 : The specification in this renaming declaration is not consistent with the implicit declaration of operator XXX.

    Issued in an operator renaming declaration if it contains types which differ from those applicable to the operator being renamed.

    *** Semantic Error : 306 : Operator XXX is already visible.

    Occurs in an operator renaming declaration if an attempt is made to rename an operator which is already visible. (The message will also appear as a secondary consequence of trying to rename an operator between undeclared types.).

    *** Semantic Error : 307 : The implicit declaration of this operator does not occur in package XXX.

    *** Semantic Error : 308 : Type is limited.

    Issued if an attempt is made to assign a variable of a type which is limited or which contains a limited type.

    *** Semantic Error : 309 : Operator not visible for these types.

    This message means that the operator exists between the types on each side of it but that it is not visible. The most likely cause is that the types concerned are defined in another package and that renaming is required to make the operator visible.

    *** Semantic Error : 310 : The % operator may only appear in an assert or check statement in a for loop.

    The % operator is used to indicate the value of a variable on entry to a for loop. This is because the variable may be used in the exit expression of the loop and may also be modified in the body of the loop. Since the semantics of Ada require the exit expression to be fixed after evaluation we require a way of reasoning about the original value of a variable prior to any alteration in the loop body. No other situation requires this value so % may not be used anywhere else.

    *** Semantic Error : 311 : Announced own variable types may not be implemented as unconstrained arrays.

    Where an own variable is announced as being of some type, SPARK requires that type to be declared; the declaration cannot be in the form of an unconstrained array because SPARK prohibits unconstrained variables.

    *** Semantic Error : 312 : A subprogram can only be renamed to the same name with the package prefix removed.

    *** Semantic Error : 313 : Only one main program is permitted.

    *** Semantic Error : 314 : Own variable XXX has been refined and may not appear here.

    Issued if an attempt is made to use, in a second annotation, an own variable which has been refined. Second annotations should use the appropriate refinement constituents of the own variable.

    *** Semantic Error : 315 : Unsupported proof context.

    Certain proof contexts have been included in the syntax of SPARK but are not yet supported; this error message results if one is found.

    *** Semantic Error : 316 : Selected components are not allowed for XXX since type YYY is private here.

    If a type is private, then record field selectors may not be used. In pre- and post-conditions, a proof function can be declared to yield the required attribute of a private type.

    *** Semantic Error : 317 : Tilde, in a function return annotation, may only be applied to an external variable of mode IN.

    The tilde decoration indicates the initial value of a variable or parameter which is both imported and exported. A function may not have an explicit side effect on a program variable and so cannot be regarded as exporting such a variable. For modelling purposes a read of an external (stream) variable is regarded as having a side effect (outside the SPARK boundary). Since it may be necessary to refer to the initial value of the external variable, before this implicit side effect occurs, the use of tilde is allowed only for external variables of mode IN which are globally referenced by function.

    *** Semantic Error : 318 : Tilde or Percent may only be applied to variables.

    The tilde decoration indicates the initial value of a variable or parameter which is both imported and exported. Percent indicates the value of a variable on entry to a for loop; this message occurs if either operator is applied to any other object.

    *** Semantic Error : 319 : Tilde may only be applied to a variable which is both imported and exported.

    The tilde decoration indicates the initial value of a variable or parameter which is both imported and exported; this message occurs if the variable concerned is either exported only or imported only in which case no distinction between its initial and final value is required.

    *** Semantic Error : 320 : Tilde or Percent may only be applied to an entire variable.

    Tilde (and %) may not be applied to an element of an array or field of a record. e.g. to indicate the initial value of the Ith element of array V use V~(I) not V(I)~.

    *** Semantic Error : 321 : Tilde may not appear in pre-conditions.

    Since it does not make sense to refer to anything other than the initial value of a variable in a pre-condition there is no need to use tilde to distinguish initial from final values.

    *** Semantic Error : 322 : Only imports may be referenced in pre-conditions or return expressions.

    Pre-conditions are concerned with the initial values of information carried into a subprogram. Since only imports can do this only imports can appear in pre-condition expressions.

    *** Semantic Error : 323 : Updates may only be applied to records or arrays.

    The extended SPARK update syntax is only used to express changes to components of a structured variable.

    *** Semantic Error : 324 : Only one field name may appear here.

    When using the extended SPARK update syntax for a record, you can not update more than one element in each clause of the update. For example, you cannot use [x,y => z], you must instead use [x => z; y => z].

    *** Semantic Error : 325 : Type XXX has not been declared.

    Occurs if a type is "announced" as part of an own variable clause and the end of the package is reached without an Ada declaration for a type of this name being found.

    *** Semantic Error : 326 : Predicate is not boolean.

    Occurs anywhere where a proof context is found not to be a boolean expression.

    *** Semantic Error : 327 : XXX is a global variable which may not be updated in a function subprogram.

    *** Semantic Error : 328 : The identifier XXX (exported by called subprogram) may not be updated in a function subprogram.

    Occurs if a function calls a procedure which exports a global variable; this would create an illegal side-effect of the function.

    *** Semantic Error : 329 : Illegal function call.

    Issued if a call is made to a user-defined subprogram in a package initialization part.

    *** Semantic Error : 330 : Illegal use of an own variable not of this package.

    Issued if an attempt is made, in a package initialization part, to update an own variable of a non-enclosing package.

    *** Semantic Error : 331 : Private types may not be unconstrained arrays.

    *** Semantic Error : 332 : This private type was not declared as limited.

    Issued where the type contains a component which is a limited private type, but where the declaration of this type in the visible part of the package does not specify that the type is limited.

    *** Semantic Error : 333 : Initialization of XXX is not announced in the initialization clause of this package.

    Issued when an own variable is initialized either by assignment or by having a pragma Import attached to it when initialization of the variable is not announced in its package's own variable initialization specification.

    *** Semantic Error : 334 : Identifier XXX is not the name of a function.

    *** Semantic Error : 335 : This annotation should be placed with the declaration of function XXX.

    Issued if a function is declared in a package specification without an annotation but one is then supplied on the function body.

    *** Semantic Error : 336 : Unexpected annotation - all annotations required for function XXX have already occurred.

    *** Semantic Error : 337 : Package XXX may not be used as a prefix here.

    Selected component notation may not be used in places where an item is directly visible.

    *** Semantic Error : 338 : Scalar parameter XXX is of mode in out and must appear as an import.

    Parameters passed as mode in out must be listed as imports in the subprogram's dependency relation if they are of scalar types. The rule also applies to a parameter of a private type if its full declaration is scalar.

    *** Semantic Error : 339 : Subprogram XXX was not declared in package YYY.

    *** Semantic Error : 340 : Only operators may be renamed in package specifications.

    User-declared subprograms may not be renamed in package specifications although the implicitly declared function subprograms associated with operators may be.

    *** Semantic Error : 341 : A range may not appear here.

    Issued if a range is found where a single value is expected, for example, if an array slice is constructed.

    *** Semantic Error : 342 : This proof annotation should be placed with the declaration of subprogram XXX.

    Like global and derives annotations, proof annotations should be placed on the first appearance of a subprogram. There may also be a requirement for a second proof annotation on a subprogram body where it references an abstract own variable.

    *** Semantic Error : 343 : Unexpected proof annotation - all annotations required for subprogram XXX have already occurred.

    Issued if a second proof annotation for a subprogram is found but the subprogram does not reference any abstract own variables. A second annotation is only required where it is necessary to express both an abstract (external) and a refined (internal) view of an operation.

    *** Semantic Error : 399 : Range error in annotation expression.

    Issued if a proof annotation contains an expression that would cause a constraint error if it were in an executable Ada statement. For example: "--# post X = T'Succ(T'Last);" VCs generated from such malformed predicates would always be unprovable.

    *** Semantic Error : 400 : Expression contains division by zero.

    Issued when a static expression, evaluated using perfect arithmetic, is found to contain a division by zero.

    *** Semantic Error : 401 : Illegal numeric literal.

    Issued when a numeric literal is illegal because it contains, for example, digits not compatible with its number base.

    *** Semantic Error : 402 : Constraint_Error will be raised here.

    Issued whenever a static expression would cause a constraint error. e.g. assigning a value to a constant outside the constant's type range. In SPARK a static expression may not yield a value which violates a range constraint.

    *** Semantic Error : 403 : Argument value is inconsistent with the number of dimensions of array type XXX.

    Issued when an array attribute containing an argument is found and the value of the argument is inconsistent with the number of dimensions of the array type to which it is being applied.

    *** Semantic Error : 407 : This choice overlaps a previous one.

    Choices in case statements and array aggregates may not overlap.

    *** Semantic Error : 408 : Case statement is incomplete.

    A case statement must either explicitly supply choices to cover the whole range of the (sub)type of the controlling expression, or it must supply an others choice.

    *** Semantic Error : 409 : Empty range specified.

    In SPARK, no static range is permitted to be null.

    *** Semantic Error : 410 : Choice out of range.

    The choices in case statements and array aggregates must be within the constraints of the appropriate (sub)type.

    *** Semantic Error : 411 : Others clause required.

    Issued where an others clause is required to satisfy the Ada language rules.

    *** Semantic Error : 412 : Explicit boolean range not permitted.

    *** Semantic Error : 413 : Invalid range constraint.

    Issued where a range constraint is outside the range of the (sub)type to which the constraint applies.

    *** Semantic Error : 414 : Array aggregate is incomplete.

    An array aggregate must either explicitly supply values for all array elements or provide an others clause.

    *** Semantic Error : 415 : Too many entries in array aggregate.

    Issued where an array aggregate using positional association contains more entries than required by the array index type.

    *** Semantic Error : 416 : Type may not have an empty range.

    *** Semantic Error : 417 : String subtypes must have a lower index bound of 1.

    *** Semantic Error : 418 : Index upper and/or lower bounds do not match those expected.

    Issued where assignment, association or type conversion is attempted between two different constrained subtypes of the same unconstrained array type, and where the index bounds do not match.

    *** Semantic Error : 419 : YYY.XXX has been renamed locally, so the prefix YYY must not be used.

    When an entity is renamed, the fully qualified name is no longer visible, and so must not be used.

    *** Semantic Error : 420 : Array index(es) not convertible.

    Issued when an attempt is made to convert between two arrays whose indexes are neither of the same type nor numeric.

    *** Semantic Error : 421 : Array components are not of the expected type.

    Issued when a type conversion attempts to convert between two array types whose components are of different types.

    *** Semantic Error : 422 : Array component constraints do not match those expected.

    Issued when a type conversion attempts to convert between two array types whose components are of the same type but do not have constraints which can be statically determined to be identical.

    *** Semantic Error : 423 : Array has different number of dimensions from that expected.

    Issued when attempting to convert between two array types which have different numbers of dimensions.

    *** Semantic Error : 424 : Attributes are not permitted in a String concatenation expression.

    Character attributes such as 'Val, 'Pos, 'Succ and 'Pred are not permitted below a concatentation operator in a String expression.

    *** Semantic Error : 425 : String literals may not be converted.

    Issued if the argument of a type conversion is a string literal. A common cause is an attempt to type qualify a string and accidentally omitting the tick character.

    *** Semantic Error : 500 : Mode expected.

    Issued when performing data flow analysis only where a subprogram has no dependency clause and its global variables have not been given modes in the global annotation.

    *** Semantic Error : 501 : Dependency relation expected.

    A dependency relation is required for each procedure if information flow analysis is to be performed.

    *** Semantic Error : 502 : Exportation of XXX is incompatible with its global mode.

    Issued when a procedure has both a global annotation with modes and a dependency relation, and a global of mode in is listed as an export in the dependency relation.

    *** Semantic Error : 503 : Importation of XXX is incompatible with its global mode.

    Issued when a procedure has both a global annotation with modes and a dependency relation, and a global of mode out is listed as an import in the dependency relation.

    *** Semantic Error : 504 : Parameter XXX is of mode in out and must appear as an import.

    *** Semantic Error : 505 : Global variable XXX is of mode in out and must appear as an import.

    Issued where a procedure has both a global annotation with modes and a dependency relation, and a global variable of mode in out is not listed as an import in the dependency relation.

    *** Semantic Error : 506 : Parameter XXX is of mode in out and must appear as an export.

    *** Semantic Error : 507 : Global variable XXX is of mode in out and must appear as an export.

    Issued where a procedure has both a global annotation with modes and a dependency relation, and a global variable of mode in out is not listed as an export in the dependency relation.

    *** Semantic Error : 508 : This global variable is a parameter of mode in and can only have the global mode in.

    *** Semantic Error : 509 : Unexpected refined dependency relation.

    When using refinement in automatic flow analysis mode, if there is a dependency relation on the subprogram specification then there must also be one on the body. Similarly, if there is no dependency relation on the specification then the body is not permitted to have one.

    *** Semantic Error : 550 : use type clauses may only be used in SPARK95: clause ignored.

    *** Semantic Error : 551 : All operators for type XXX are already visible.

    *** Semantic Error : 552 : The type XXX already appears in the use type clause.

    *** Semantic Error : 554 : XXX is a limited private type for which no operators can be made visible.

    *** Semantic Error : 555 : XXX is not mentioned in an earlier with clause of this compilation unit.

    *** Semantic Error : 600 : pragma Import has a minimum of 2 and a maximum of 4 parameters.

    *** Semantic Error : 601 : Convention, Entity, External_Name or Link_Name expected.

    *** Semantic Error : 602 : An association for XXX has already been given.

    *** Semantic Error : 603 : No association for XXX was given.

    *** Semantic Error : 604 : This package may not have a body - consider use of pragma Elaborate_Body.

    In Ada 95, a package body is illegal unless it is required for the purpose of providing a subprogram body, or unless this pragma is used. This error is issued where a package body is found for a package whose specification does not require a body.

    *** Semantic Error : 605 : pragma Elaborate_Body has one parameter.

    *** Semantic Error : 606 : This expression does not represent the expected package name XXX.

    Issued when the parameter to a pragma Elaborate_Body is invalid.

    *** Semantic Error : 607 : This package requires a body and must therefore include either pragma Elaborate_Body or a subprogram declaration.

    Issued where a package specification contains no subprogram declarations, but whose own variables (as specified in the package annotation) are not all declared (and initialized where appropriate) in the package specification. This is because such a package is not allowed a body in Ada 95 unless either the pragma is given or a subprogram declared.

    *** Semantic Error : 608 : Reduced accuracy subtypes of real numbers are considered obsolescent and are not supported by SPARK.

    *** Semantic Error : 609 : This entity cannot be assigned to.

    *** Semantic Error : 610 : Child packages may not be used in SPARK83.

    *** Semantic Error : 611 : Illegal use of deferred constant prior to its full declaration.

    *** Semantic Error : 613 : Illegal name for body stub.

    Issued if a dotted name appears in a body stub as in "package body P.Q is separate". No legal stub could ever have such a name.

    *** Semantic Error : 614 : Child packages may be declared only at library level.

    Issued if an attempt is made to declare a child package which is embedded in a package or subprogram.

    *** Semantic Error : 615 : Name does not match name of package.

    Issued if the closing identifier of a package has a different number of identifiers from the name originally given for the package. For example "package P.Q is ... end P.Q.R;".

    *** Semantic Error : 616 : The private package XXX is not visible at this point.

    Issued if an attempt is made to with or inherit a private package from the visible part of a public package.

    *** Semantic Error : 617 : Public sibling XXX is not visible at this point.

    Arises from attempting to inherit a public sibling child package from a private child package.

    *** Semantic Error : 618 : The owner of the current package does not inherit the package XXX.

    A private descendent (although it may be a public package) can only inherit a remote package if its parent also inherits it; this is a analogous to the behaviour of embedded packages which may also only inherit a remote package if their enclosing package also does so.

    *** Semantic Error : 619 : The package XXX is not owned by the current package.

    This message indicates an attempt to claim that own variables of a package other than a private child package of the current package are refinement constituents of an abstract own variable of the current package.

    *** Semantic Error : 620 : Own variables here must be refinement constituents in package owner XXX.

    Own variables of private child packages must appear as refinement constituents of the package which owns the child. If the Examiner has seen the owner package body before processing the child and has not found the required refinement constituent then this message results on processing the child.

    *** Semantic Error : 621 : Own variable XXX expected as a refinement constituent in this package.

    Own variables of private child packages must appear as refinement constituents of the package which owns the child. If the Examiner has seen a child package which declares an own variable before examining its owner’s body then this message is issued if the owner lacks the required refinement constituent declaration.

    *** Semantic Error : 622 : Own variable XXX did not occur in an initialization specification.

    Issued if an own variable appears in an initialization clause and is also a refinement constituent of an own variable which is not marked as initialized.

    *** Semantic Error : 623 : Own variable XXX occurred in an initialization specification.

    Issued if an own variable does not appear in an initialization clause and is also a refinement constituent of an own variable that is marked as initialized.

    *** Semantic Error : 624 : All operators from ancestor package XXX are already visible.

    A package must appear in a with clause before types declared in it can be specified in a use type clause.

    *** Semantic Error : 626 : Global/derives/declare on generic subprogram instantiation is not allowed.

    *** Semantic Error : 628 : Formal parameter of the instantiation of subprogram XXX is not allowed.

    *** Semantic Error : 629 : The generic subprogram XXX has no generic formal parameters.

    *** Semantic Error : 630 : XXX is not the name of generic subprogram.

    Only generic subprogram can be instantiated.

    *** Semantic Error : 631 : Generic function found where a generic procedure was expected.

    Subprogram kind of generic and its instantiation must match.

    *** Semantic Error : 632 : Generic procedure found where a generic function was expected.

    Subprogram kind of generic and its instantiation must match.

    *** Semantic Error : 635 : Incorrect number of generic actual parameters for instantiation of generic unit XXX.

    The number of generic formal and actual parameters must match exactly.

    *** Semantic Error : 636 : Type XXX is not compatible with generic formal parameter YYY.

    See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

    *** Semantic Error : 637 : User-defined generic units are not permitted in SPARK 83.

    There are weaknesses in the generic type model of Ada 83 that prevent the implementation of a safe subset of generics in SPARK 83. These deficiencies are overcome in Ada 95. SPARK 83 users may employ the predefined unit Unchecked_Conversion only.

    *** Semantic Error : 638 : Unexpected global annotation. A generic subprogram may not reference or update global variables.

    A standalone generic subprogram may not have a global annotation. Note that a subprogram in a generic package may have a global annotation as long as it only refers to own variables that are local to the package.

    *** Semantic Error : 639 : A generic formal object may only have default mode or mode in.

    SPARK restricts formal objects to being constants in order to avoid concealed information flows.

    *** Semantic Error : 640 : A generic formal object may only be instantiated with a constant expression.

    SPARK restricts formal objects to being constants in order to avoid concealed information flows.

    *** Semantic Error : 641 : There is no generic subprogram declaration named XXX so a generic body of that name cannot be declared here.

    A generic body must be preceded by a generic declaration of the same name.

    *** Semantic Error : 645 : Actual array element XXX is not compatible with the element type YYY of the generic formal parameter.

    See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

    *** Semantic Error : 646 : Actual array index XXX is not compatible with the index type YYY of the generic formal parameter.

    See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

    *** Semantic Error : 647 : Actual array XXX has more dimensions than formal array YYY.

    See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

    *** Semantic Error : 648 : Actual array XXX has fewer dimensions than formal array YYY.

    See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

    *** Semantic Error : 649 : Actual array XXX is constrained but the associated formal YYY is unconstrained.

    See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

    *** Semantic Error : 650 : Actual array XXX is unconstrained but the associated formal YYY is constrained.

    See ALRM 12.5. Each generic formal type parameter must be supplied with an actual type which is of a compatible class. Note that SPARK does not have default values for such associations.

    *** Semantic Error : 651 : Variables of generic types may not be initialized at declaration.

    In non-generic code we statically know the value being assigned to the variable and can check that it is in range. In the case of a generic we cannot do this because we do not know the bounds of the variable's type. The variable may, however, be assigned to in the sequence of statements in the generic body because generation of run-time checks will provide suitable protection from out-of-range values.

    *** Semantic Error : 652 : Subtypes of generic types are not permitted.

    In non-generic code we statically know the values being used as the range bounds for a subtype and can check that they are in range. In the case of a generic we cannot do this because we do not know the bounds of the variable's type.

    *** Semantic Error : 653 : Constants of generic types are not permitted.

    In non-generic code we statically know the value being assigned to the constant and can check that it is in range. In the case of a generic we cannot do this because we do not know the bounds of the constant's type. A variable, assigned to in the sequence of statements in the generic body, may be a suitable substitute for such a constant.

    *** Semantic Error : 654 : XXX is a generic subprogram which must be instantiated before it can be called.

    Generic units provide a template for creating callable units and are not directly callable.

    *** Semantic Error : 655 : Invalid prefix, XXX is a generic package.

    Components of generic packages cannot be accessed directly. First instantiate the package and then access components of the instantiation.

    *** Semantic Error : 656 : The only currently supported attribute in this context is 'Always_Valid.

    *** Semantic Error : 657 : A 'Always_Valid assertion requires a variable here.

    The 'Always_Valid assertion can only be applied to variables or to components of record variables.

    *** Semantic Error : 658 : The object in this assertion must be scalar or a non-tagged aggregation of scalar components.

    The 'Always_Valid assertion can only be applied to objects which are: (1) of a scalar type, (2) a one dimensional array of scalar components, (3) an entire record variable of a non-tagged type with all components that are either scalar or an array of scalar components, (4) an array variable whose components are records satisfying (3). Additionally a field of a record satisfying these constraints may be marked individually as always valid.

    *** Semantic Error : 659 : A 'Always_Valid assertion must be in the same declarative region as contains the declaration of the variable to which it refers.

    *** Semantic Error : 660 : A 'Always_Valid assertion must not be applied to an object already marked as always valid.

    *** Semantic Error : 662 : Only Mode in own variables and constituents can be marked using 'Always_Valid.

    The 'Always_Valid assertion can only be applied to variables which are own variables with the mode in, or to subcomponents of records which are mode in own variables.

    *** Semantic Error : 700 : Mode 'in out' may not be applied to own variables or their refinement constituents.

    Own variables may be given a mode to indicate that they are system level inputs or outputs (i.e. they obtain values from or pass values to the external environment). Since effective SPARK design strictly separates inputs from outputs the mode 'in out' is not permitted.

    *** Semantic Error : 701 : The mode of this refinement constituent is not consistent with its subject: XXX.

    If an abstract own variable is given a mode then its refinement constituents must all be of the same mode.

    *** Semantic Error : 702 : Own variable XXX must be given the mode 'in' to match its earlier announcement .

    Issued if an own variable of an embedded package is not given the same mode as the earlier refinement constituent that announced it would exist.

    *** Semantic Error : 703 : Own variable XXX must be given the mode 'out' to match its earlier announcement .

    Issued if an own variable of an embedded package is not given the same mode as the earlier refinement constituent that announced it would exist.

    *** Semantic Error : 704 : Own variable XXX may not have a mode because one was not present in its earlier announcement .

    Issued if an own variable of an embedded package is given a mode when the earlier refinement constituent that announced it would exist did not have one.

    *** Semantic Error : 705 : Refinement constituent XXX must be given the mode 'in' to match the child package own variable with which it is being associated.

    If a refinement constituent is an own variable of a private package then the constituent must have the same mode as the own variable to which it refers.

    *** Semantic Error : 706 : Refinement constituent XXX must be given the mode 'out' to match the child package own variable with which it is being associated.

    If a refinement constituent is an own variable of a private package then the constituent must have the same mode as the own variable to which it refers.

    *** Semantic Error : 707 : Refinement constituent XXX may not have a mode because one was not present on the child package own variable with which it is being associated.

    If a refinement constituent is an own variable of a private package then the constituent can only be given a mode if the own variable to which it refers has one.

    *** Semantic Error : 708 : Own variable XXX has a mode and may not appear in an initializes clause.

    Mode own variables (stream variables) are implicitly initialized by the environment to which they are connected and may not appear in initializes clauses since this would require their explicit initialization.

    *** Semantic Error : 709 : Own variable or constituent XXX has mode 'out' and may not be referenced by a function.

    Functions are permitted to reference own variables that are either unmoded or of mode 'in'. Since mode 'out' own variables represent outputs to the environment, reading them in a function does not make sense and is not allowed.

    *** Semantic Error : 710 : The own variable or constituent XXX is of mode 'in' and can only have global mode 'in'.

    Global modes, if given, must be consistent with the modes of own variables that appear in the global list.

    *** Semantic Error : 711 : The own variable or constituent XXX is of mode 'out' and can only have global mode 'out'.

    Global modes, if given, must be consistent with the modes of own variables that appear in the global list.

    *** Semantic Error : 712 : The own variable or constituent XXX is of either mode 'in' or mode 'out' and may not have global mode 'in out'.

    Global modes, if given, must be consistent with the modes of own variables that appear in the global list.

    *** Semantic Error : 713 : The own variable or constituent XXX is of mode 'in' and may not appear in a dependency clause as an export.

    Own variables with mode 'in' denote system-level inputs; their exportation is not allowed.

    *** Semantic Error : 714 : The own variable or constituent XXX is of mode 'out' and may not appear in a dependency clause as an import.

    Own variables with mode 'out' denote system-level outputs; their importation is not allowed.

    *** Semantic Error : 715 : Function XXX references external (stream) variables and may only appear directly in an assignment or return statement.

    To avoid ordering effects, functions which globally access own variables which have modes (indicating that they are connected to the external environment) may only appear directly in assignment or return statements. They may not appear as actual parameters or in any other form of expression.

    *** Semantic Error : 716 : External (stream) variable XXX may only appear directly in an assignment or return statement; or as an actual parameter to an unchecked conversion.

    To avoid ordering effects, own variables which have modes (indicating that they are connected to the external environment) may only appear directly in assignment or return statements. They may not appear as actual parameters (other than to instantiations of Unchecked_Conversion) or in any other form of expression.

    *** Semantic Error : 717 : External (stream) variable XXX is of mode 'in' and may not be assigned to.

    Own variables with mode 'in' represent inputs to the system from the external environment. As such, assigning to them does not make sense and is not permitted.

    *** Semantic Error : 718 : External (stream) variable XXX is of mode 'out' and may not be referenced.

    Own variables with mode 'out' represent outputs to the external environment from the system. As such, referencing them does not make sense and is not permitted.

    *** Semantic Error : 719 : External (stream) variables may not be referenced or updated during package elaboration.

    Own variables with modes represent inputs and outputs between the external environment and the system. Referencing or updating them during package elaboration would introduce ordering effects and is not permitted.

    *** Semantic Error : 720 : Variable XXX is an external (stream) variable and may not be initialized at declaration.

    Own variables with modes represent inputs and outputs between the external environment and the system. Referencing or updating them during package elaboration would introduce ordering effects and is not permitted.

    *** Semantic Error : 721 : This refined function global annotation may not reference XXX because it is an external (stream) variable whose abstract subject YYY does not have a mode.

    Functions may be used to reference external (stream) variables and the Examiner generates the appropriate information flow to show that the value returned by the function is 'volatile'. If the abstract view of the same function shows it referencing an own variable which is not an external stream then the volatility of the function is concealed. The error can be removed either by making the abstract own variable a mode 'in' stream or by using a procedure instead of a function to read the refined stream variable.

    *** Semantic Error : 722 : The mode on abstract global variable YYY must be made 'in out' to make it consistent with the referencing of mode 'in' external (stream) constituent XXX in the refined global annotation.

    Where a procedure references an external (stream) variable of mode 'in' the Examiner constructs appropriate information flow to show that the input stream is 'volatile'. If the abstract view shows that the procedure obtains its result by simply reading an own variable which is not an external stream then the volatility is concealed. The error can be removed either by making the global mode of XXX 'in out' or making XXX an external (stream) variable of mode 'in'.

    *** Semantic Error : 723 : Variable XXX must appear in this refined global annotation.

    Issued when a global variable which is present in the first (abstract) global annotation is omitted from the second (refined) one.

    *** Semantic Error : 724 : Exit label must match the label of the most closely enclosing loop statement.

    If an exit statement names a loop label, then the most closely enclosing loop statement must have a matching label.

    *** Semantic Error : 725 : Protected function or variable XXX may only appear directly in an assignment or return statement.

    To avoid ordering effects, protected functions may only appear directly in assignment or return statements. They may not appear as actual parameters or in any other form of expression. Ordering effects occur because the global state referenced by the protected function may be updated by another process during expression evaluation.

    *** Semantic Error : 730 : A loop with no iteration scheme or exit statements may only appear as the last statement in the outermost scope of the main subprogram (or a task body when using the Ravenscar profile).

    If a loop has neither an iteration scheme nor any exit statements then it will run forever. Any statements following it will be unreachable. SPARK only allows one such loop which must be the last statement of the main program.

    *** Semantic Error : 750 : The identifier YYY.XXX is either undeclared or not visible at this point. An array type may not be used as its own index type.

    The type mark used for the index of an array type declaration must not be the same as the name of the array type being declared.

    *** Semantic Error : 751 : The identifier YYY.XXX is either undeclared or not visible at this point. A record type may not include fields of its own type.

    The type mark given for a field in a record type declaration must not be the same as the name of the record type being declared.

    *** Semantic Error : 752 : The identifier YYY.XXX is either undeclared or not visible at this point. This identifier must appear in a preceding legal global annotation or formal parameter list.

    For an identifier to appear legally as an import in a derives annotation, it must be a formal parameter or must appear legally in a preceding global annotation and must be of mode 'in' or mode 'in out'.

    *** Semantic Error : 753 : The identifier YYY.XXX is either undeclared or not visible at this point. This identifier must appear in a preceding legal global annotation or formal parameter list.

    For an identifier to appear legally as an export in a derives annotation, it must be a formal parameter or must appear legally in a preceding global annotation and must be of mode 'out' or mode 'in out'.

    *** Semantic Error : 754 : The identifier YYY.XXX is either undeclared or not visible at this point. This package must be both inherited and withed to be visible here.

    For a package name to be visible in Ada context, it must appear in both the inherit clause and the with clause of the enclosing package.

    *** Semantic Error : 755 : The identifier YYY.XXX is either undeclared or not visible at this point. A parent of a child package must be inherited to be visible here.

    A parent of a child package must be inherited (but not withed) to be visible in that child.

    *** Semantic Error : 756 : The identifier YYY.XXX is either undeclared or not visible at this point. The grandparent of a child package should not appear in this prefix.

    A grandparent of a child package should not be included in prefixes referencing a declaration of the child package.

    *** Semantic Error : 757 : The identifer XXX is either undeclared or not visible at this point. A record field name cannot be the same as its indicated type.

    *** Semantic Error : 770 : If Any_Priority is defined, Priority and Interrupt_Priority must also be defined.

    If the type Any_Priority is defined in package System, then the subtypes Priority and Interrupt_Priority must also be defined; if support for tasking is not required, then the definition of Any_Priority may be removed.

    *** Semantic Error : 771 : The parent type of this subtype must be Any_Priority.

    Ada 95 requires that both Priority and Interrupt_Priority be immediate subtypes of Any_Priority.

    *** Semantic Error : 772 : The range of Priority must contain at least 30 values; LRM D.1(26).

    Ada 95 requires that the range of the subtype Priority include at least 30 values; this requirement is stated in the Ada 95 Language Reference Manual at D.1(26).

    *** Semantic Error : 773 : Priority'First must equal Any_Priority'First; LRM D.1(10).

    Ada 95 requires that task priority types meet the following criteria, the second of which is relevant to this error:

  • subtype Any_Priority is Integer range implementation-defined;
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last.

    *** Semantic Error : 774 : Interrupt_Priority'First must equal Priority'Last + 1; LRM D.1(10).

    Ada 95 requires that task priority types meet the following criteria, the third of which is relevant to this error:

  • subtype Any_Priority is Integer range implementation-defined;
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last.

    *** Semantic Error : 775 : Interrupt_Priority'Last must equal Any_Priority'Last; LRM D.1(10).

    Ada 95 requires that task priority types meet the following criteria, the third of which is relevant to this error:

  • subtype Any_Priority is Integer range implementation-defined;
  • subtype Priority is Any_Priority range Any_Priority'First .. implementation-defined;
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last.

    *** Semantic Error : 776 : In SPARK95 mode, only packages Standard, System, Ada.Real_Time and Ada.Interrupts may be specified in the config file.

    In SPARK95 mode, the packages that may be specified in the target configuration file are: Standard, System, Ada.Real_Time and Ada.Interrupts. The latter two are ignored unless the Ravenscar profile is selected.

    *** Semantic Error : 777 : In package System, Priority must be an immediate subtype of Integer.

    Ada 95, and hence SPARK95, defines Priority as being an immediate subtype of Integer.

    *** Semantic Error : 778 : This identifier is not valid at this point in the target configuration file.

    The specified identifier cannot be used here; it is most probably either not valid in the target configuration file at all, or might be valid in a different package, but not here.

    *** Semantic Error : 779 : Definition of this package in the target configuration file is not allowed in SPARK83 mode.

    In SPARK83 mode, only package Standard may be specified in the target configuration file.

    *** Semantic Error : 780 : Type XXX must be private.

    This type may only be declared as private in the target configuration file.

    *** Semantic Error : 781 : The lower bound of a signed integer type declaration must be greater than or equal to System.Min_Int.

    This error can only be generated in SPARK95 mode when the configuration file specifies a value for System.Min_Int.

    *** Semantic Error : 782 : The upper bound of a signed integer type declaration must be less than or equal to System.Max_Int.

    This error can only be generated in SPARK95 mode when the configuration file specifies a value for System.Max_Int.

    *** Semantic Error : 783 : Modulus must be less than or equal to System.Max_Binary_Modulus.

    This error can only be generated in SPARK95 mode when the configuration file specifies a value for System.Max_Binary_Modulus.

    *** Semantic Error : 784 : System.Max_Binary_Modulus must be a positive power of 2.

    *** Semantic Error : 785 : The number of digits specified exceeds the value defined for System.Max_Digits.

    The maximum decimal precision for a floating point type, where a range specification has not been included, is defined by System.Max_Digits.

    *** Semantic Error : 786 : The number of digits specified exceeds the value defined for System.Max_Base_Digits.

    The maximum decimal precision for a floating point type, where a range specification has been included, is defined by System.Max_Base_Digits.

    *** Semantic Error : 787 : Digits value must be positive.

    *** Semantic Error : 788 : Delta value must be positive.

    *** Semantic Error : 789 : The only currently supported type attribute in this context is 'Base.

    *** Semantic Error : 790 : A base type assertion requires a type here.

    *** Semantic Error : 791 : The base type in this assertion must be a predefined type.

    Predefined types are those defined either by the language, or in package Standard, using the configuration file mechanism.

    *** Semantic Error : 792 : The types in this assertion must both be either floating point or signed integer.

    *** Semantic Error : 793 : This base type must have a defined range in the configuration file.

    If a predefined type is to be used in a base type assertion or in a derived type declaration, then it must appear in the configuration file and have a well-defined range.

    *** Semantic Error : 794 : Range of subtype exceeds range of base type.

    *** Semantic Error : 795 : A base type assertion must be in the same declarative region as that of the full type definition.

    *** Semantic Error : 796 : This type already has a base type: either it already has a base type assertion, or is explicitly derived, or is a predefined type.

    A base type assertion can only be given exactly once. Explicitly derived scalar types and predefined types never need a base type assertion.

    *** Semantic Error : 797 : The base type in a floating point base type assertion must have a defined accuracy.

    *** Semantic Error : 798 : The accuracy of the base type in a base type assertion must be at least that of the type which is the subject of the assertion.

    *** Semantic Error : 799 : Only a simple type can be the subject of a base type assertion .

    *** Semantic Error : 800 : Modulus must be a positive power of 2.

    In SPARK, modular types must have a modulus which is a positive power of 2.

    *** Semantic Error : 801 : Modular types may only be used in SPARK95.

    Ada83 (and hence SPARK83) does not include modular types.

    *** Semantic Error : 803 : Unary arithmetic operators are not permitted for modular types.

    Unary arithmetic operators are of little value. The "abs" and "+" operators have no effect for modular types, and so are not required. The unary minus operator is a source of potential confusion, and so is not permitted in SPARK.

    *** Semantic Error : 804 : Universal expression may not be implicitly converted to a modular type here. Left hand operand requires qualification to type XXX.

    A universal expression cannot be used as the left hand operand of a binary operator if the right hand operand is of a modular type. Qualification of the left hand expression is required in this case.

    *** Semantic Error : 805 : Universal expression may not be implicitly converted to a modular type here. Right hand operand requires qualification to type XXX.

    A universal expression cannot be used as the right hand operand of a binary operator if the left hand operand is of a modular type. Qualification of the right hand expression is required in this case.

    *** Semantic Error : 806 : Universal expression may not be implicitly converted to a modular type here. Right hand operand requires qualification.

    A universal expression cannot be used as operand of an unary "not" operator if no type can be determined from the context of the expression. Qualification of the operand is required in this case.

    *** Semantic Error : 814 : Default_Bit_Order must be of type Bit_Order.

    The only possible type for the constant System.Default_Bit_Order is System.Bit_Order when it appears in the configuration file.

    *** Semantic Error : 815 : The only allowed values of Default_Bit_Order are Low_Order_First and High_Order_First.

    System.Bit_Order is implicity declared in package System when a configuration file is given. This is an enumeration type with only two literals Low_Order_First and High_Order_First.

    *** Semantic Error : 820 : Abstract types are not currently permitted in SPARK.

    Only non-abstract tagged types are currently supported. It is hoped to lift this restriction in a future Examiner release.

    *** Semantic Error : 821 : This type declaration must be a tagged record because it's private type is tagged.

    If a type is declared as "tagged private" then its full declaration must be a tagged record.

    *** Semantic Error : 822 : XXX is not a tagged type; only tagged types may be extended.

    In SPARK, "new" can only be used to declare a type extension; other derived types are not permitted.

    *** Semantic Error : 823 : This type may not be extended in the same package in which it is declared.

    SPARK only permits types from another library package to be extended. This rule prevents overloading of inherited operations.

    *** Semantic Error : 824 : This package already extends a type from package XXX. Only one type extension per package is permitted.

    SPARK only permits one type extension per package. This rule prevents overloading of inherited operations.

    *** Semantic Error : 825 : Type XXX expected in order to complete earlier private extension.

    Since SPARK only permits one type extension per package it follows that the declaration "new XXX with private" in a package visible part must be paired with "new XXX with record..." in its private part. The ancestor type XXX must be the same in both declarations.

    *** Semantic Error : 826 : Type extension is not permitted in SPARK 83.

    Type extension is an Ada 95 feature not included in Ada or SPARK 83.

    *** Semantic Error : 827 : The actual parameter associated with a tagged formal parameter in an inherited operation must be an object not an expression.

    There are several reasons for this SPARK rule. Firstly, Ada requires tagged parameters to be passed by reference and so an object must exist at least implicitly. Secondly, in order to perform flow analysis of inherited subprogram calls, the Examiner needs identify what subset of the information available at the point of call is passed to and from the called subprogram. Since information can only flow through objects it follows that actual parameter must be an object.

    *** Semantic Error : 828 : Tagged types and tagged type extensions may only be declared in library-level package specifications.

    This SPARK rule facilitates the main uses of tagged types while greatly simplifying visibility rules.

    *** Semantic Error : 829 : Illegal re-declaration: this subprogram shares the same name as the inheritable root operation XXX but does not override it.

    To avoid overloading, SPARK prohibits more than one potentially visible subprogram having the same name.

    *** Semantic Error : 830 : A private type may not be implemented as a tagged type or an extension of a tagged type.

    This rule means that a private type can only be implemented as a tagged type if the private type itself is tagged.

    *** Semantic Error : 831 : Extended tagged types may only be converted in the direction of their root type.

    This is an Ada rule: type conversions simply omit unused fields of the extended type. It follows that conversions must be in the direction of the root type.

    *** Semantic Error : 832 : Only tagged objects, not expressions, may be converted.

    For flow analysis purposes the Examiner needs to know what subset of the information in the unconverted view is available in the converted view. Since information can only flow through objects it follows that only objects can be converted.

    *** Semantic Error : 833 : Invalid record aggregate: type XXX has a private ancestor.

    If an extended type has a private ancestor then an extension aggregate must be used rather than a normal aggregate.

    *** Semantic Error : 834 : Null records are only permitted if they are tagged.

    An empty record can have no use in a SPARK program others than as a root type from which other types can be derived and extended. For this reason, null records are only allowed if they are tagged.

    *** Semantic Error : 835 : XXX is not an extended tagged record type.

    An extension aggregate is only appropriate if the record type it is defining is an extended record. A normal aggregate should be used for other record (and array) types.

    *** Semantic Error : 836 : This expression does not represent a valid ancestor type of the aggregate XXX.

    The expression before the reserved word "with" must be of an ancestor type of the overall aggregate type. In SPARK, the ancestor expression may not be a subtype mark.

    *** Semantic Error : 837 : Invalid record aggregate: there is a private ancestor between the type of this expression and the type of the aggregate XXX.

    The ancestor type can be an tagged type with a private extension; however, there must be no private extensions between the ancestor type and the type of the aggregate.

    *** Semantic Error : 838 : Incomplete aggregate: null record cannot be used here because fields in XXX require values.

    The aggregate form "with null record" can only be used if the type of the aggregate is a null record extension of the ancestor type. If any fields are added between the ancestor type and the aggregate type then values need to be supplied for them so "null record" is inappropriate.

    *** Semantic Error : 839 : This package already contains a root tagged type or tagged type extension. Only one such declaration per package is permitted.

    SPARK permits one root tagged type or one tagged type extension per package, but not both. This rule prevents the declaration of illegal operations with more than one controlling parameter.

    *** Semantic Error : 840 : A tagged or extended type may not appear here. SPARK does not permit the declaration of primitive functions with controlling results.

    A primitive function controlled by its return result would be almost unusable in SPARK because a data flow error would occur wherever it was used.

    *** Semantic Error : 841 : The return type in the declaration of this function contained an error. It is not possible to check the validity of this return type.

    Issued when there is an error in the return type on a function's initial declaration. In this situation we cannot be sure what return type is expected in the function's body. It would be misleading to simply report a type mismatch since the types might match perfectly and both be wrong. Instead, the Examiner reports the above error and refuses to analyse the function body until its specification is corrected.

    *** Semantic Error : 842 : Pragma Atomic_Components is not permitted in SPARK when the Ravenscar profile is selected.

    *** Semantic Error : 843 : Pragma Volatile_Components is not permitted in SPARK when the Ravenscar profile is selected.

    *** Semantic Error : 844 : Missing or contradictory overriding_indicator for operation XXX. This operation successfully overrides its parent operation.

    In SPARK2005, an operation which successfully overrides a parent operation must be specified as Overriding.

    *** Semantic Error : 845 : Subprogram XXX does not successfully override a parent operation.

    In SPARK2005, an overriding operation must successfully override an operation inherited from the parent.

    *** Semantic Error : 850 : This construct may only be used when the Ravenscar profile is selected.

    Support for concurrent features of the SPARK language, including protected objects, tasking, etc. are only supported when the Ravenscar profile is selected.

    *** Semantic Error : 851 : The parameter to pragma Atomic must be a simple_name.

    The parameter to pragma Atomic must be a simple_name; and may not be passed using a named association.

    *** Semantic Error : 852 : pragma Atomic may only appear in the same immediate scope as the type to which it applies.

    This is an Ada rule (pragma Atomic takes a local name see LRM 13.1(1)). Note that this precludes the use of pragma Atomic on a predefined type.

    *** Semantic Error : 853 : pragma Atomic may only apply to a scalar base type, or to a non-tagged record type with exactly 1 field that is a predefined scalar type.

    pragma Atomic may only be applied to base types that are scalar. (i.e. enumeration types, integer types, real types, modular types) or a non-tagged record type with a single field which is a predefined scalar type, such as Integer, Character or Boolean. As an additional special case, a record type with a single field of type System.Address is also allowed.

    *** Semantic Error : 854 : pragma Atomic takes exactly one parameter.

    *** Semantic Error : 855 : The type of own variable XXX is not consistent with its modifier.

    An own variable with a task modifier must be of a task type. A task own variable must have the task modifier. An own variable with a protected modifier must be a protected object, suspension object or pragma atomic type. A protected or suspension object own variable must have the protected modifier.

    *** Semantic Error : 858 : A variable that appears in a protects property list may not appear in a refinement clause.

    A variable in a protects list is effectively protected and hence cannot be refined.

    *** Semantic Error : 859 : A protected own variable may not appear in a refinement clause.

    Protected state cannot be refined or be used as refinement constituents.

    *** Semantic Error : 860 : Own variable XXX appears in a protects list and hence must appear in the initializes clause.

    Protected state (including all refinement constituents) must be initialized.

    *** Semantic Error : 861 : Both abstract own variable XXX and refinement constitutent YYY must have an Integrity property.

    If an abstract own variable has an Integrity property, then so must all its refinement constituents, and vice-versa.

    *** Semantic Error : 862 : Both abstract own variable XXX and refinement constitutent YYY must have the same Integrity value.

    If both an abstract own variable and a refinement constituent have Integrity properties specified, then the value of the Integrity must be the same.

    *** Semantic Error : 863 : Own variable XXX is protected and may not appear in an initializes clause.

    Protected own variables must always be initialized, and should not appear in initializes annotations.

    *** Semantic Error : 864 : Unexpected initialization specification - all own variables of this package are either implicitly initialized, or do not require initialization.

    An own variable initialization clause and that of its refinement constituents must be consistent.

    *** Semantic Error : 865 : Field XXX is part of the ancestor part of this aggregate and does not require a value here.

    An extension aggregate must supply values for all fields that are part of the overall aggregate type but not those which are part of the ancestor part.

    *** Semantic Error : 866 : The expression in a delay_until statement must be of type Ada.Real_Time.Time.

    When the Ravenscar Profile is selected, the delay until statement may be used. The argument of this statement must be of type Ada.Real_Time.Time.

    *** Semantic Error : 867 : Subprogram XXX contains a delay statement but does not have a delay property.

    Any subprogram that may call delay until must have a delay property in a declare annotation. Your subprogram is directly or indirectly making a call to delay until.

    *** Semantic Error : 868 : Protected object XXX may only be declared immediately within a library package.

    This error message is issued if a type mark representing a protected type appears anywhere other than in a library level variable declaration or library-level own variable type announcement.

    *** Semantic Error : 869 : Protected type XXX already contains an Entry declaration; only one Entry is permitted.

    The Ravenscar profile prohibits a protected type from declaring more than one entry.

    *** Semantic Error : 870 : Protected type XXX does not have any operations, at least one operation must be declared.

    A protected type which provides no operations can never be used so SPARK requires the declaration of at least one.

    *** Semantic Error : 871 : A type can only be explicitly derived from a predefined Integer or Floating Point type or from a tagged record type.

    *** Semantic Error : 872 : Variable XXX is not protected; only protected items may be globally accessed by protected operations.

    In order to avoid the possibility of shared data corruption, SPARK prohibits protected operations from accessing unprotected data items.

    *** Semantic Error : 873 : This subprogram requires a global annotation which references the protected type name XXX.

    In order to statically-detect certain bounded errors defined by the Ravenscar profile, SPARK requires every visible operation of protected type to globally reference the abstract state of the type.

    *** Semantic Error : 874 : Protected state XXX must be initialized at declaration.

    Because there is no guarantee that a concurrent thread that initializes a protected object will be executed before one that reads it, the only way we can be sure that a protected object is properly initialized is to do so at the point of declaration. You have either declared some protected state and not included an initialization or you have tried to initialize some protected state in package body elaboration.

    *** Semantic Error : 875 : Protected type expected; access discriminants may only refer to protected types in SPARK.

    Access discriminants have been allowed in SPARK solely to allow devices made up of co-operating Ravenscar-compliant units to be constructed. For this reason only protected types may appear in access discriminants.

    *** Semantic Error : 876 : This protected type or task declaration must include either a pragma Priority or pragma Interrupt_Priority.

    To allow the static detection of certain bounded errors defined by the Ravenscar profile, SPARK requires an explicitly-set priority for each protected type, task type or object of those types. The System.Default_Priority may used explicitly provided package System has been defined in the configuration file.

    *** Semantic Error : 877 : Priority values require an argument which is an expression of type integer.

    *** Semantic Error : 878 : This protected type declaration contains a pragma Attach_Handler and must therefore also include a pragma Interrupt_Priority.

    To allow the static detection of certain bounded errors defined by the Ravenscar profile, SPARK requires an explicitly-set priority for each protected type or object. The System.Default_Priority may used explicitly provided package System has been defined in the configuration file.

    *** Semantic Error : 879 : Unexpected pragma XXX: this pragma may not appear here.

    pragma Interrupt_Priority must be the first item in a protected type declaration or task type declaration; pragma Priority must be the first item in a protected type declaration, task type declaration or the main program.

    *** Semantic Error : 880 : Pragma Priority or Interrupt_Priority expected here.

    Issued when a pragma other than Priority or Interrupt_Priority appears as the first item in a protected type or task type declaration.

    *** Semantic Error : 881 : The priority of XXX must be in the range YYY.

    See LRM D.1(17).

    *** Semantic Error : 882 : Integrity property requires an argument which is an expression of type Natural.

    *** Semantic Error : 883 : Pragma Interrupt_Handler may not be used; SPARK does not support the dynamic attachment of interrupt handlers [LRM C3.1(9)].

    Interrupt_Handler is of no use unless dynamic attachment of interrupt handlers is to be used.

    *** Semantic Error : 884 : Pragma Attach_Handler is only permitted immediately after the corresponding protected procedure declaration in a protected type declaration.

    Pragma Attach_Handler may only be used within a protected type declaration. Furthermore, it must immediately follow a protected procedure declaration with the same name as the first argument to the pragma.

    *** Semantic Error : 885 : Pragma Attach_Handler may only be applied to a procedure with no parameters.

    See LRM C.3.1(5).

    *** Semantic Error : 887 : A discriminant may only appear alone, not in an expression.

    Issued when a task or protected type priority is set using an expression involving a discriminant. The use of such an expression greatly complicates the static evaluation of the priority of task or protected subtypes thus preventing the static elimination of certain Ravenscar bounded errors.

    *** Semantic Error : 888 : Unexpected Delay, XXX already has a Delay property.

    A procedure may only have a maximum of one delay annotation.

    *** Semantic Error : 889 : The own variable XXX must have the suspendable property.

    The type used to declare this object must be a protected type with and entry or a suspension object type.

    *** Semantic Error : 890 : The name XXX already appears in the suspends list.

    Items may not appear more than once in an a suspends list.

    *** Semantic Error : 891 : Task type or protected type required.

    Issued in a subtype declaration where the constraint is a discriminant constraint. Only task and protected types may take a discriminant constraint as part of a subtype declaration.

    *** Semantic Error : 892 : Array type, task type or protected type required.

    Issued in a subtype declaration where the constraint is a either a discriminant constraint or an index constraint (these two forms cannot always be distinguished syntactically). Only task and protected types may take a discriminant constraint and only array types may take an index constraint as part of a subtype declaration.

    *** Semantic Error : 893 : Number of discriminant constraints differs from number of known discriminants of type XXX.

    Issued in a subtype declaration if too many or two few discriminant constraints are supplied.

    *** Semantic Error : 894 : Only variables of a protected type may be aliased.

    SPARK supports the keyword aliased in variable declarations only so that protected and task types can support access discriminants. Since it has no other purpose it may not be used except in a protected object declaration.

    *** Semantic Error : 895 : Attribute Access may only be applied to variables which are declared as aliased, variable XXX is not aliased.

    This is a slightly annoying Ada issue. Marking a variable as aliased prevents it being placed in a register which would make pointing at it hazardous; however, SPARK only permits 'Access on protected types which are limited and therefore always passed by reference anyway and immune from register optimization. Requiring aliased on protected objects that will appear in discriminant constraints is therefore unwanted syntactic sugar only.

    *** Semantic Error : 896 : The task type XXX does not have an associated body.

    Issued at the end of a package body if a task type declared in its specification contains neither a body nor a body stub for it.

    *** Semantic Error : 897 : The protected type XXX does not have an associated body.

    Issued at the end of a package body if a protected type declared in its specification contains neither a body nor a body stub for it.

    *** Semantic Error : 898 : XXX is not a protected or task type which requires a body.

    Issued if a body or body stub for a task or protected type is encountered and there is no matching specification.

    *** Semantic Error : 899 : A body for type XXX has already been declared.

    Issued if a body or body stub for a task or protected type is encountered and an earlier body has already been encountered.

    *** Semantic Error : 901 : Suspension object XXX may only be declared immediately within a library package specification or body.

    Suspension objects must be declared at library level. They cannot be used in protected type state or as local variables in subprograms.

    *** Semantic Error : 902 : Recursive use of typemark XXX in known descriminant.

    *** Semantic Error : 903 : Protected or suspension object types cannot be used to declare constants.

    Protected and suspension objects are used to ensure integrity of shared objects. If it is necessary to share constant data then these constructs should not be used.

    *** Semantic Error : 904 : Protected or suspension objects cannot be used as subprogram parameters.

    SPARK does not currently support this feature.

    *** Semantic Error : 905 : Protected or suspension objects cannot be returned from functions.

    SPARK does not currently support this feature.

    *** Semantic Error : 906 : Protected or suspension objects cannot be used in composite types.

    Protected and suspension objects cannot be used in record or array structures.

    *** Semantic Error : 907 : Delay until must be called from a task or unprotected procedure body.

    You are calling delay until from an invalid construct. Any construct that calls delay until must have a delay property in the declare annotation. This construct must be one of a task or procedure body.

    *** Semantic Error : 908 : Blocking properties are not allowed in protected scope.

    Procedures in protected scope must not block and therefore blocking properties are prohibited.

    *** Semantic Error : 909 : Object XXX cannot suspend.

    You are either applying the suspendable property to an own variable that cannot suspend or you have declared a variable (whose own variable has the suspendable property) which cannot suspend. Or you have used an item in a suspends list that does not have the suspendable property. An object can only suspend if it is a suspension object or a protected type with an entry.

    *** Semantic Error : 910 : Name XXX must appear in the suspends list property for the enclosing unit.

    Protected entry calls and calls to Ada.Synchronous_Task_Control.Suspend_Until_True may block the currently executing task. SPARK requires you announce this fact by placing the actual callee name in the suspends list for the enclosing unit.

    *** Semantic Error : 911 : The argument in pragma Priority for the main program must be an integer literal or a local constant of static integer value.

    If the main program priority is not an integer literal then you should declare a constant that has the required value in the declarative part of the main program prior to the position of the pragma.

    *** Semantic Error : 912 : This call contains a delay property that is not propagated to the enclosing unit.

    The call being made has a declare annotation that contains a delay property. SPARK requires that this property is propagated up the call chain and hence must appear in a declare annotation for the enclosing unit.

    *** Semantic Error : 913 : This call has a name in its suspends list which is not propagated to the enclosing unit.

    The call being made has a declare annotation that contains a suspends list. SPARK requires that the entire list is propagated up the call chain and hence must appear in a declare annotation for the enclosing unit.

    *** Semantic Error : 914 : The name XXX specified in the suspends list can never be called.

    You have specified the name of a protected or suspension object in the suspends list that can never be called by this procedure or task.

    *** Semantic Error : 915 : Procedure XXX has a delay property but cannot delay.

    You have specified a delay property for this procedure but delay until can never be called from it.

    *** Semantic Error : 916 : Protected object XXX has a circular dependency in subprogram YYY.

    The type of the protected object mentions the protected object name in the derives list for the given subprogram.

    *** Semantic Error : 917 : Procedure XXX cannot be called from a protected action.

    The procedure being called may block and hence cannot be called from a protected action.

    *** Semantic Error : 918 : The delay property is not allowed for XXX.

    The delay property may only be applied to a procedure.

    *** Semantic Error : 919 : The priority property is not allowed for XXX.

    The priority property can only be applied to protected own variables which are type announced. If the type has been declared it must be a protected type.

    *** Semantic Error : 920 : The suspends property is not allowed for XXX.

    The suspends property may only be applied to task type specifications and procedures.

    *** Semantic Error : 921 : The identifier XXX is not recognised as a component of a property list.

    The property list can only specify the reserved word delay, suspends or priority.

    *** Semantic Error : 922 : The own variable XXX must have the priority property.

    In order to perform the ceiling priority checks the priority property must be given to all own variables of protected type.

    *** Semantic Error : 923 : The procedure XXX cannot be called from a function as it has a blocking side effect.

    Blocking is seen as a side effect and hence procedures that potentially block cannot be called from functions.

    *** Semantic Error : 924 : The suspendable property is not allowed for XXX.

    Objects that suspend must be declared as own protected variables.

    *** Semantic Error : 925 : The own variable or task XXX must have a type announcement.

    Own variables of protected type and own tasks must have a type announcement.

    *** Semantic Error : 926 : Illegal declaration of task XXX. Task objects must be declared at library level.

    Task objects must be declared in library level package specifications or bodies.

    *** Semantic Error : 927 : The own task annotation for this task is missing the name XXX in its suspends list.

    The task type declaration has name XXX in its list and this must appear in the own task annotation.

    *** Semantic Error : 928 : Private elements are not allowed for protected type XXX.

    Protected type XXX has been used to declare a protected, moded own variable. Protected, moded own variables are refined onto a set of virtual elements with the same mode. As such private elements are not allowed.

    *** Semantic Error : 929 : Unexpected declare annotation. Procedure XXX should have the declare annotation on the specification.

    Declare annotations cannot appear on the procedure body if it appears on the procedure specification.

    *** Semantic Error : 930 : Task XXX does not appear in the own task annotation for this package.

    A task has been declared that is not specified as an own task of the package.

    *** Semantic Error : 931 : Task XXX does not have a definition.

    A task name appears in the own task annotation for this package but is never declared.

    *** Semantic Error : 932 : The priority for protected object XXX does not match that given in the own variable declaration.

    The priority given in the priority property must match that given in the protected type.

    *** Semantic Error : 933 : A pragma Priority is required for the main program when Ravenscar Profile is enabled.

    When SPARK profile Ravenscar is selected, all tasks, protected objects and the main program must explicitly be assigned a priority.

    *** Semantic Error : 934 : Priority ceiling check failure: the priority of YYY is less than that of XXX.

    The active priority of a task is the higher of its base priority and the ceiling priorities of all protected objects that it is executing. The active priority at the point of a call to a protected operation must not exceed the ceiling priority of the callee.

    *** Semantic Error : 935 : The own variable XXX must have the interrupt property.

    An own variable has been declared using a protected type with a pragma attach handler. Such objects are used in interrupt processing and must have the interrupt property specified in their own variable declaration.

    *** Semantic Error : 936 : The interrupt property is not allowed for XXX.

    The interrupt property can only be applied to protected own variables that are type announced. If the type is declared then it must be a protected type that contains an attach handler.

    *** Semantic Error : 937 : The protects property is not allowed for XXX.

    The protects property can only be applied to protected own variables that are type announced. If the type is declared then it must be a protected type.

    *** Semantic Error : 938 : The unprotected variable XXX is shared by YYY and ZZZ.

    XXX is an unprotected variable that appears in the global list of the threads YYY and ZZZ. Unprotected variables cannot be shared between threads in SPARK. A thread is one of: the main program, a task, an interrupt handler.

    *** Semantic Error : 939 : The suspendable item XXX is referenced by YYY and ZZZ.

    XXX is an own variable with the suspends property that appears in the suspends list of the threads YYY and ZZZ. SPARK prohibits this to prevent more than one thread being suspended on the same item at any one time. A thread is one of: the main program, a task, an interrupt handler.

    *** Semantic Error : 940 : XXX is a protected own variable. Protected variables may not be used in proof contexts.

    The use of protected variables in pre and postconditions or other proof annotations is not (currently) supported. Protected variables are volatile because they can be changed at any time by another program thread and this may invalidate some common proof techniques. The prohibition of protected variables does not prevent proof of absence of run-time errors nor proof of protected operation bodies. See the manual "SPARK Proof Manual" for more details.

    *** Semantic Error : 941 : The type of own variable XXX must be local to this package.

    The type used to an announce an own variable with a protects property must be declared in the same package.

    *** Semantic Error : 942 : Only one instance of the type XXX is allowed.

    Type XXX has a protects property. This means there can be only one object in the package that has this type or any subtype of this type.

    *** Semantic Error : 943 : The name XXX cannot appear in a protects list.

    All items in a protects list must be unprotected own variables owned by this package.

    *** Semantic Error : 944 : The name XXX is already protected by YYY.

    The name XXX appears in more than one protects list. The first time it appeared was for own variable YYY. XXX should appear in at most one protects list.

    *** Semantic Error : 945 : The property XXX must be given a static expression for its value.

    This property can only accept a static expression.

    *** Semantic Error : 946 : The own variable XXX must only ever be accessed from operations in protected type YYY.

    The own variable XXX is protected by the protected type YYY and hence must never be accessed from anywhere else.

    *** Semantic Error : 947 : The own variable XXX appears in a protects list for type YYY but is not used in the body.

    The protected type YYY claims to protect XXX via a protects property. However, the variable XXX is not used by any operation in YYY.

    *** Semantic Error : 948 : The type of own variable or task XXX must be a base type.

    Own tasks and protected own variables of a protected type must be announced using the base type. The subsequent variable declaration may be a subtype of the base type.

    *** Semantic Error : 949 : Unexpected partition annotation: a global annotation may only appear here when the Ravenscar profile is selected.

    When the sequential SPARK profile is selected, the global and derives annotation on the main program describes the entire program's behaviour. No additional, partition annotation is required or permitted. Note that an annotation must appear here if the Ravenscar profile is selected.

    *** Semantic Error : 950 : Partition annotation expected: a global and, optionally, a derives annotation must appear after 'main_program' when the Ravenscar profile is selected.

    When the Ravenscar profile is selected the global and derives annotation on the main program describes the behaviour of the environment task only, not the entire program. An additional annotation, called the partition annotation, is required to describe the entire program's behaviour; this annotation follows immediately after 'main_program;'.

    *** Semantic Error : 951 : Inherited package XXX contains tasks and/or interrupt handlers and must therefore appear in the preceding WITH clause.

    In order to ensure that a Ravenscar program is complete, SPARK requires that all 'active' packages inherited by the environment task also appear in a corresponding with clause. This check ensures that any program entities described in the partition annotation are also linked into the program itself.

    *** Semantic Error : 952 : Subprogram XXX is an interrupt handler and cannot be called.

    Interrupt handler operations cannot be called.

    *** Semantic Error : 953 : Interrupt property error for own variable YYY. XXX is not an interrupt handler in type ZZZ.

    The handler names in an interrupt property must match one in the protected type of the own variable.

    *** Semantic Error : 954 : Interrupt property error for own variable XXX. Interrupt stream name YYY is illegal.

    The stream name must be unprefixed and not already in use within the scope of the package.

    *** Semantic Error : 955 : XXX can only appear in the partition wide flow annotation.

    Interrupt stream variables are used only to enhance the partition wide flow annotation and must not be used elsewhere.

    *** Semantic Error : 956 : XXX already appears in as an interrupt handler in the interrupt mappings.

    An interrupt handler can be mapped onto exactly one interrupt stream variable. An interrupt stream variable may be mapped onto many interrupt handlers.

    *** Semantic Error : 957 : Consecutive updates of protected variable XXX are disallowed when they do not depend directly on its preceding value.

    A protected variable cannot be updated without direct reference to its preceding value more than once within a subprogram or task. Each update of a protected variable may have a wider effect than just the change of value of the protected variable. The overall change is considered to be the accumulation of all updates and reads of the protected variable and to preseve this information flow successive updates must directly depend on the preceding value of the variable.

    *** Semantic Error : 958 : A task may not import the unprotected state XXX.

    A task may not import unprotected state unless it is mode in. This is because under the concurrent elaboration policy, the task cannot rely on the state being initialized before it is run.

    *** Semantic Error : 959 : Unprotected state XXX is exported by a task and hence must not appear in an initializes clause.

    Own variable XXX is being accessed by a task. The order in which the task is run and the own variable initialized is non-deterministic under a concurrent elaboration policy. In this case SPARK forces the task to perform the initialization and as such the own variable must not appear in an initializes clause.

    *** Semantic Error : 960 : The function Ada.Real_Time.Clock can only be used directly (1) in an assignment or return statement or (2) to initialize a library a level constant.

  • To avoid ordering effects, functions which globally access own variables which have modes (indicating that they are connected to the external environment) may only appear directly in assignment or return statements. They may not appear as actual parameters or in any other form of expression.
  • SPARK relaxes the illegal use of function calls in elaboration code in the case of the function Ada.Real_Time.Clock. However the function can only be used to directly initialize a constant value.

    *** Semantic Error : 961 : This property value is of an incorrect format.

    Please check the user manual for valid property value formats.

    *** Semantic Error : 962 : Error(s) detected by VC Generator. See the .vcg file for more information.

    This message is echoed to the screen if an unrecoverable error occurs which makes the generation of VCs for the current subprogram impossible. Another message more precisely identifying the problem will be placed in the .vcg file.

    *** Semantic Error : 986 : A protected function may not call a locally-declared protected procedure.

    See LRM 9.5.1 (2). A protected function has read access to the protected elements of the type whereas the called procedure has read-write access. There is no way in which an Ada compiler can determine whether the procedure will illegally update the protected state or not so the call is prohibited by the rules of Ada. (Of course, in SPARK, we know there is no function side effect but the rules of Ada must prevail nonetheless).

    *** Semantic Error : 987 : Task types and protected types may only be declared in package specifications.

    The Examiner performs certain important checks at the whole program level such as detection of illegal sharing of unprotected state and partition-level information flow analysis. These checks require visibility of task types and protected types (especially those containing interrupt handlers). SPARK therefore requires these types to be declare in package specifications. Subtypes and objects of task types, protected types and their subtypes may be declared in package bodies.

    *** Semantic Error : 988 : Illegal re-use of identifier XXX; this identifier is used in a directly visible protected type.

    SPARK does not allow the re-use of operation names which are already in use in a directly visible protected type. The restriction is necessary to avoid overload resolution issues in the protected body. For example, type PT in package P declares operation K. Package P also declares an operation K. From inside the body of PT, a call to K could refer to either of the two Ks since both are directly visible.

    *** Semantic Error : 989 : The last statement of a task body must be a plain loop with no exits.

    To prevent any possibility of a task terminating (which can lead to a bounded error), SPARK requires each task to end with a non-terminating loop. The environment task (or "main program") does not need to end in a plain loop provided the program closure includes at least one other task. If there are no other tasks, then the environment task must be made non-terminating with a plain loop.

    *** Semantic Error : 990 : Unexpected annotation, a task body may have only global and derives annotations.

    Issued if a pre, post or declare annotation is attached to a task body.

    *** Semantic Error : 991 : Unexpected task body, XXX is not the name of a task declared in this package specification.

    Issued if task body is encountered for which there is no preceding declaration.

    *** Semantic Error : 992 : A body for task type XXX has already been declared.

    Issued if a duplicate body or body stub is encountered for a task.

    *** Semantic Error : 993 : There is no protected type declaration for XXX.

    Issued if a body is found for a protected types for which there is no preceding declaration.

    *** Semantic Error : 994 : Invalid guard, XXX is not a Boolean protected element of this protected type.

    The SPARK Ravenscar rules require a simple Boolean guard which must be one of the protected elements of the type declaring the entry.

    *** Semantic Error : 995 : Unexpected entry body, XXX is not the name of an entry declared in this protected type.

    Local entries are not permitted so a protected body can declare at most one entry body and that must have declared in the protected type specification.

    *** Semantic Error : 996 : The protected operation XXX, declared in this type, does not have an associated body.

    Each exported protected operation must have a matching implementation in the associated protected body.

    *** Semantic Error : 997 : A body for protected type XXX has already been declared.

    Each protected type declaration must have exactly one matching protected body or body stub.

    *** Semantic Error : 998 : There is no protected type declaration for XXX.

    Issued if a protected body or body stub is found and there is no matching declaration for it.

    *** Semantic Error : 999 : This feature of Generics is not yet implemented.

    Generics are currently limited to instantiation of Unchecked_Conversion.

    Warning : No semantic checks carried out, text may not be legal SPARK.

    Issued when the Examiner is used solely to check the syntax of a SPARK text: this does not check the semantics of a program (e.g. the correctness of the annotations) and therefore does not guarantee that a program is legal SPARK.

    Note: Information flow analysis not carried out.

    This is issued as a reminder that information flow analysis has not been carried out in this run of the Examiner: information flow errors may be present undetected in the text analysed.

    Note: Flow analysis mode is automatic.

    This is issued as a reminder that the Examiner will perform information flow analysis if it encounters full derives annotations and will perform data flow analysis if only moded global annotations are present. Information flow errors may be present undetected in the text analysed.

    Note: Ada 83 language rules selected.

    Issued when the Examiner is used in SPARK 83 mode.

    --- Warning : 1 : The identifier XXX is either undeclared or not visible at this point.

    This warning will appear against an identifier in a with clause if it is not also present in an inherit clause. Such an identifier cannot be used in any non-hidden part of a SPARK program. The use of with without inherit is permitted to allow reference in hidden parts of the text to imported packages which are not legal SPARK. For example, the body of SPARK_IO is hidden and implements the exported operations of the package by use of package TEXT_IO. For this reason TEXT_IO must appear in the with clause of SPARK_IO. (warning control file keyword: with_clauses).

    --- Warning : 2 : Representation clause - ignored by the Examiner.

    The significance of representation clauses cannot be assessed by the Examiner because it depends on the specific memory architecture of the target system. Like pragmas, representation clauses can change the meaning of a SPARK program and the warning highlights the need to ensure their correctness by other means. (warning control file keyword: representation_clauses).

    --- Warning : 3 : Pragma - ignored by the Examiner.

    All pragmas encountered by the Examiner generate this warning. While many pragmas (e.g. pragma page) are harmless others can change a program's meaning, for example by causing two variables to share a single memory location. (warning control file keyword: pragma pragma_identifier or pragma all).

    --- Warning : 4 : declare annotation - ignored by the Examiner.

    The declare annotation is ignored by the Examiner if the profile is not Ravenscar. (warning control file keyword: declare_annotations).

    --- Warning : 5 : XXX contains interrupt handlers; it is important that an interrupt identifier is not used by more than one handler.

    Interrupt identifiers are implementation defined and the Examiner cannot check that values are used only once. Duplication can occur by declaring more than object of a single (sub)type where that type defines handlers. It may also occur if interrupt identifiers are set via discriminants and two or more actual discriminants generate the same value. (warning control file keyword: interrupt_handlers).

    --- Warning : 6 : Machine code insertion. Code insertions are ignored by the Examiner.

    Machine code is inherently implementation dependent and cannot be analysed by the Examiner. Users are responsible for ensuring that the behaviour of the inserted machine code matches the annotation of the subprogram containing it.

    --- Warning : 7 : This identifier is an Ada2005 reserved word.

    Such identifiers will be rejected by an Ada2005 compiler and by the SPARK Examiner for SPARK2005. It is recommended to rename such identifiers for future upward compatibility. (warning control file keyword: ada2005_reserved_words).

    --- Warning : 11 : Unnecessary others clause - case statement is already complete.

    The others clause is non-executable because all case choices have already been covered explicitly. If the range of the case choice is altered later then the others clause may be executed with unexpected results. It is better to omit the others clause in which case any extension of the case range will result in a compilation error.

    --- Warning : 12 : Function XXX is an instantiation of Unchecked_Conversion.

    See ALRM 13.9. The use of Unchecked_Conversion can result in implementation-defined values being returned. The function should be used with great care. The principal use of Unchecked_Conversion is SPARK programs is the for the reading of external ports prior to performing a validity check; here the suppression of constraint checking prior to validation is useful. The Examiner does not assume that the value returned by an unchecked conversion is valid and so unprovable run-time check VCs will result if a suitable validity check is not carried out before the value is used. (warning control file keyword: unchecked_conversion).

    --- Warning : 13 : Function XXX is an instantiation of Unchecked_Conversion returning a type for which run-time checks are not generated. Users must take steps to ensure the validity of the returned value.

    See ALRM 13.9. The use of Unchecked_Conversion can result in invalid values being returned. The function should be used with great care especially, as in this case, where the type returned does not generate Ada run-time checks nor SPARK run-time verification conditions. For such types, this warning is the ONLY reminder the Examiner generates that the generated value may have an invalid representation. For this reason the warning is NOT suppressed by the warning control file keyword unchecked_conversion. The principal use of Unchecked_Conversion is SPARK programs is the for the reading of external ports prior to performing a validity check; here the suppression of constraint checking prior to validation is useful.

    --- Warning : 120 : Unexpected unmatched 'end accept' annotation ignored.

    This end accept annotation does not match any preceding start accept in this unit.

    --- Warning : 121 : No warning message matches this accept annotation.

    The accept annotation is used to indicate that a particular flow error or semantic warning message is expected and can be justified. This error indicates that the expected message did not actually occur. Note that when matching any information flow error messages containing two variable names, the export should be placed first and the import second (the order in the error message may differ from this depending on the style of information flow error reporting selected). For example: --# accept Flow, 601, X, Y, "..."; justifies the message: "X may be derived from the imported value(s) of Y" or the alternative form: "Y may be used in the derivation of X".

    --- Warning : 122 : Maximum number of error or warning justifications reached, subsequent accept annotations will be ignored.

    The number of justifications per source file is limited. If you reach this limit it is worth careful consideration of why the code generates so many warnings.

    --- Warning : 169 : Direct update of own variable XXX, which is an own variable of a non-enclosing package.

    With the publication of Edition 3.1 of the SPARK Definition the previous restriction prohibiting the direct updating of own variables of non-enclosing packages was removed; however, the preferred use of packages as abstract state machines is compromised by such action which is therefore discouraged. (warning control file keyword: direct_updates).

    --- Warning : 200 : This static expression cannot be evaluated by the Examiner.

    Issued if a static expression exceeds the internal limits of the Examiner because its value is, for example, too large to be evaluated using infinite precision arithmetic. No value will be recorded for the expression and this may limit the Examiner's ability to detect certain sorts of errors such as numeric constraints. (warning control file keyword: static_expressions).

    --- Warning : 201 : This expression cannot be evaluated statically because its value may be implementation-defined.

    Raised, for example, when evaluating 'Size of a type that does not have an explicit Size representation clause. Attributes of implementation-defined types, such as Integer'Last may also be unknown to be Examiner if they are not specified in the configuration file (warning control file keyword: static_expressions).

    --- Warning : 202 : An arithmetic overflow has occurred. Constraint checks have not been performed.

    Raised when comparing two real numbers. The examiner cannot deal with real numbers specified to such a high degree of precision. Consider reducing the precision of these numbers.

    --- Warning : 300 : VCs cannot be built for multi-dimensional array aggregates.

    Issued when an aggregate of a multi-dimensional array is found. Suppresses generation of VCs for that subprogram. Can be worked round by using arrays of arrays.

    --- Warning : 301 : Called subprogram exports abstract types for which RTCs are not possible.

    --- Warning : 302 : This expression may be re-ordered by a compiler. Add parentheses to remove ambiguity.

    Issued when a potentially re-orderable expression is encountered. For example x := a + b + c; Whether intermediate sub-expression values overflow may depend on the order of evaluation which is compiler-dependent. Therefore, code generating this warning should be parenthesized to remove the ambiguity. e.g. x := (a + b) + c;.

    --- Warning : 303 : Overlapping choices may not be detected.

    Issued where choices in an array aggregate or case statement are outside the range which can be detected because of limits on the size of a table internal to the Examiner.

    --- Warning : 304 : Case statement may be incomplete.

    Issued when the Examiner cannot determine the completeness of a case statement because the bounds of the type of the controlling expression exceed the size of the internal table used to perform the checks.

    --- Warning : 305 : Value too big for internal representation.

    Issued when the Examiner cannot determine the completeness of an array aggregate or case statement because the number used in a choice exceed the size allowed in the internal table used to perform the checks.

    --- Warning : 306 : Aggregate may be incomplete.

    Issued when the Examiner cannot determine the completeness of an array aggregate because its bounds exceed the size of the internal table used to perform the checks.

    --- Warning : 307 : Completeness checking incomplete: index type(s) undefined or not discrete.

    Issued where the array index (sub)type is inappropriate: this is probably because there is an error in its definition, which will have been indicated by a previous error message.

    --- Warning : 308 : Use of equality operator with floating point type.

    The use of this operator is discouraged in SPARK because of the difficulty in determining exactly what it means to say that two instances of a floating point number are equal.

    --- Warning : 309 : Type conversion to own type, consider using type qualification instead.

    Issued where a type conversion is either converting from a (sub)type to the same (sub)type or is converting between two subtypes of the same type. In the former case the type conversion may be safely removed because no constraint check is required; in the latter case the type conversion may be safely replaced by a type qualification which preserves the constraint check.(warning control file keyword: type_conversions).

    --- Warning : 310 : Use of obsolescent Ada 83 language feature.

    Issued when a language feature defined by Ada 95 to be obsolescent is used. Use of such features is not recommended because compiler support for them cannot be guaranteed.(warning control file keyword:obsolescent_features).

    --- Warning : 311 : Priority pragma for XXX is unavailable and has not been considered in the ceiling priority check.

    --- Warning : 312 : Replacement rules cannot be built for multi-dimensional array constant XXX.

    Issued when a VC or PF references a multi-dimensional array constant. Can be worked round by using arrays of arrays.

    --- Warning : 313 : The constant XXX has semantic errors in its initializing expression or has a hidden completion which prevent generation of a replacement rule.

    Issued when replacement rules are requested for a composite constant which had semantic errors in its initializing expression, or is a deferred constant whose completion is hidden from the Examiner. Semantic errors must be eliminated before replacement rules can be generated.

    --- Warning : 314 : The constant XXX has semantic errors in its type which prevent generation of rules.

    Issued when an attempt is made to generate type deduction rules for a constant which has semantic errors in its type. These semantic errors must be eliminated before type deduction rules can be generated.

    --- Warning : 315 : The procedure XXX does not have a derives annotation. The analysis of this call assumes that each of its exports is derived from all of its imports.

    Issued in flow=auto mode when a function calls a procedure that does not have a derives annotation. In most cases this assumption will not affect the validity of the analysis, but if the called procedure derives null from an import this can have an impact. Note that functions are considered to have implicit derives annotations so this warning is not issued for calls to functions.

    --- Warning : 320 : The proof function XXX has a non-boolean return and a return annotation. Please make sure that the return is always in-type.

    Any proof function with a non-bool return can introduce unsoundness if the result could overflow. For example a return of (x + 1) is not ok if x can take the value of integer'last. (warning control file keyword: proof_function_non_boolean).

    --- Warning : 321 : The proof function XXX has an implicit return annotation. Please be careful not to introduce unsoundness.

    Any proof function with an implicit return can easily introduce unsoundness as they do not have a body which we can check to expose any contradictions. For example: return B => False. (warning control file keyword: proof_function_implicit).

    --- Warning : 322 : The return refinement for proof function XXX is assumed to hold as it is axiomatic and thus cannot be checked.

    (warning control file keyword: proof_function_refinement).

    --- Warning : 323 : The precondition refinement for proof function XXX is assumed to hold as it is axiomatic and thus cannot be checked.

    (warning control file keyword: proof_function_refinement).

    --- Warning : 350 : Unexpected pragma Import. Variable XXX is not identified as an external (stream) variable.

    The presence of a pragma Import makes it possible that the variable is connected to some external device. The behaviour of such variables is best captured by making them moded own variables (or "stream" variables). If variables connected to the external environment are treated as if they are normal program variables then misleading analysis results are inevitable. The use of pragma Import on local variables of subprograms is particularly deprecated. The warning may safely be disregarded if the variable is not associated with memory-mapped input/output or if the variable concerned is an own variable and the operations on it are suitably annotated to indicate volatile, stream-like behaviour. Where pragma Import is used, it is essential that the variable is properly initialized at the point from which it is imported. (warning control file keyword:imported_objects).

    --- Warning : 351 : Unexpected address clause. XXX is a constant.

    Great care is needed when attaching an address clause to a constant. The use of such a clause is safe if, and only if, the address supplied provides a valid value for the constant which does not vary during the execution life of the program, for example, mapping the constant to PROM data. If the address clause causes the constant to have a value which may alter, or worse, change dynamically under the influence of some device external to the program, then misleading or incorrect analysis is certain to result. If the intention is to create an input port of some kind, then a constant should not be used. Instead a moded own variable (or "stream" variables) should be used. (warning control file keyword: address_clauses).

    --- Warning : 360 : This pragma must have zero or one arguments.

    --- Warning : 361 : This pragma must have exactly one argument.

    --- Warning : 362 : This pragma must have exactly two arguments.

    --- Warning : 363 : This pragma must have at least one argument.

    --- Warning : 364 : This pragma must have between two and four arguments.

    --- Warning : 365 : This pragma must have exactly zero arguments.

    --- Warning : 366 : This pragma must have one or two arguments.

    --- Warning : 380 : Casing inconsistent with declaration. Expected casing is XXX.

    The Examiner checks the case used for an identifier against the declaration of that identifier and warns if they do not match. (warning control file keyword:style_check_casing).

    --- Warning : 389 : Generation of VCs for consistency of generic and instantiated subprogram constraints is not yet supported. It will be supported in a future release of the Examiner.

    --- Warning : 390 : This generic subprogram has semantic errors in its declaration which prevent instantiations of it.

    Issued to inform the user that a generic subprogram instantiation cannot be completed because of earlier errors in the generic declaration.

    --- Warning : 391 : If the identifier XXX represents a package which contains a task or an interrupt handler then the partition-level analysis performed by the Examiner will be incomplete. Such packages must be inherited as well as withed.

    --- Warning : 392 : External variable XXX may have an invalid representation and its assignment may cause a run-time exception which is outside the scope of the absence of RTE proof.

    Where values are read from external variables (i.e. variables connected to the external environment) there is no guarantee that the bit pattern read will be a valid representation for the type of the external variable. Unexpected behaviour may result if invalid values are used in expressions. If the code is compiled with Ada run-time checks enabled the assignment of an invalid value may (but need not) raise a run-time exception dependent on the compiler. A compiler may provide facilities to apply extended checking which may also raise a run-time exception if an invalid value is used. The SPARK Toolset does not check the validity of the external variable and therefore any possible exception arising from its assignment is outside the scope of proof of absence of RTE. To ensure that a run-time exception cannot occur make the type of the external variable such that any possible bit pattern that may be read from the external source is a valid value. If the desired type is such a type then the always_valid assertion may be applied to the external variable; otherwise use explicit tests to ensure it has a valid value for the desired type before converting to an object of the desired type. In SPARK 95 the 'Valid attribute (see ALRM 13.9.2) may be used to determine the validity of a value if it can be guaranteed that the assignment of an invalid value read from an external variable will not raise a run time exception, either by compiling the code with checks off or by ensuring the compiler does not apply constraint checks when assigning same subtype objects. Note that when the Examiner is used to generate run-time checks, it will not be possible to discharge those involving external variables unless one of the above steps is taken. More information on interfacing can be found in the INFORMED manual and the SPARK Proof Manual. (warning control file keyword: external_assignment).

    --- Warning : 393 : External variable XXX may have an invalid representation and is of a type for which run-time checks are not generated but its assignment may cause a run-time exception. Users must take steps to ensure the validity of the assigned or returned value.

    Where values are read from external variables (i.e. variables connected to the external environment) there is no guarantee that the bit pattern read will be a valid representation for the type of the external variable. Unexpected behaviour may result if invalid values are used in expressions. If the code is compiled with Ada run-time checks enabled the assignment of an invalid value may (but need not) raise a run-time exception dependent on the compiler. A compiler may provide facilities to apply extended checking which may also raise a run-time exception if an invalid value is used The SPARK Toolset does not check the validity of the external variable and therefore any possible exception arising from its assignment is outside the scope of proof of absence of RTE. Where, as in this case, the type is one for which Ada run-time checks need not be generated and SPARK run-time verification conditions are not generated, extra care is required. For such types, this warning is the ONLY reminder the Examiner generates that the external value may have an invalid representation. For this reason the warning is NOT suppressed by the warning control file keyword external_assignment. To ensure that a run-time exception cannot occur make the type of the external variable such that any possible bit pattern that may be read from the external source is a valid value. Explicit tests of the value may then be used to determine the value of an object of the desired type. In SPARK 95 the 'Valid attribute (see ALRM 13.9.2) may be used to determine the validity of a value if it can be guaranteed that the assignment of an invalid value read from an external variable will not raise a run time exception, either by compiling the code with checks off or by ensuring the compiler does not apply constraint checks when assigning same subtype objects. Boolean external variables require special care since the Examiner does not generate run-time checks for Boolean variables; use of 'Valid is essential when reading Boolean external variables. More information on interfacing can be found in the INFORMED manual and the SPARK Proof Manual.

    --- Warning : 394 : Variables of type XXX cannot be initialized using the facilities of this package.

    A variable of a private type can only be used (without generating a data flow error) if there is some way of giving it an initial value. For a limited private type only a procedure that has an export of that type and no imports of that type is suitable. For a private type either a procedure, function or (deferred) constant is required. The required facility may be placed in, or already available in, a public child package. (warning control file keyword: private_types).

    --- Warning : 395 : Variable XXX is an external (stream) variable but does not have an address clause or a pragma import.

    When own variables are given modes they are considered to be inputs from or outputs to the external environment. The Examiner regards them as being volatile (i.e. their values can change in ways not visible from an inspection of the source code). If a variable is declared in that way but it is actually an ordinary variable which is NOT connected to the environment then misleading analysis is inevitable. The Examiner expects to find an address clause or pragma import for variables of this kind to indicate that they are indeed memory-mapped input/output ports. This warning is issued if an address clause or pragma import is not found.

    --- Warning : 396 : Unexpected address clause. Variable XXX is not identified as an external (stream) variable.

    The presence of an address clause makes it possible that the variable is connected to some external device. The behaviour of such variables is best captured by making them moded own variables (or "stream" variables). If variables connected to the external environment are treated as if they are normal program variables then misleading analysis results are inevitable. The use of address clauses on local variables of subprograms is particularly deprecated. The warning may safely be disregarded if the variable is not associated with memory-mapped input/output or if the variable concerned is an own variable and the operations on it are suitably annotated to indicate volatile, stream-like behaviour. (warning control file keyword: address_clauses).

    --- Warning : 397 : Variables of type XXX can never be initialized before use.

    A variable of a private type can only be used (without generating a data flow error) if there is some way of giving it an initial value. For a limited private type only a procedure that has an export of that type and no imports of that type is suitable. For a private type either a procedure, function or (deferred) constant is required.

    --- Warning : 398 : The own variable XXX can never be initialized before use.

    The own variable can only be used (without generating a data flow error) if there is some way of giving it an initial value. If it is initialized during package elaboration (or implicitly by the environment because it represents an input port) it should be placed in an "initializes" annotation. Otherwise there needs to be some way of assigning an initial value during program execution. Either the own variable needs to be declared in the visible part of the package so that a direct assignment can be made to it or, more usually, the package must declare at least one procedure for which the own variable is an export but not an import. Note that if the own variable is an abstract own variable with some constituents initialized during elaboration and some during program execution then it will never be possible correctly to initialize it; such abstract own variables must be divided into separate initialized and uninitialized components.

    --- Warning : 399 : The called subprogram has semantic errors in its interface (parameters and/or annotations) which prevent flow analysis of this call.

    Issued to inform the user that flow analysis has been suppressed because of the error in the called subprogram's interface.

    --- Warning : 9 : The body of XXX has a hidden exception handler - analysis and verification of contracts for this handler have not been performed.

    Issued when a --# hide XXX annotation is used to hide a user-defined exception handler. (warning control file keyword: handler_parts).

    --- Warning : 10 : XXX is hidden - hidden text is ignored by the Examiner.

    Issued when a --# hide XXX annotation is used. (warning control file keyword: hidden_parts).

    --- Warning : 400 : Variable XXX is declared but not used.

    Issued when a variable declared in a subprogram is neither referenced, nor updated. (warning control file keyword: unused_variables).

    --- Warning : 402 : Default assertion planted to cut loop.

    In order to prove properties of code containing loops, the loop must be "cut" with a suitable assertion statement. When generating run-time checks, the Examiner inserts a simple assertion to cut any loops which do not have one supplied by the user. The assertion is placed at the point where this warning appears in the listing file. The default assertion asserts that the subprogram's precondition (if any) is satisfied, that all imports to it are in their subtypes and that any for loop counter is in its subtype. In many cases this provides sufficient information to complete a proof of absence of run-time errors. If more information is required, then the user can supply an assertion and the Examiner will append the above information to it. (warning control file keyword: default_loop_assertions).

    --- Warning : 403 : XXX is declared as a variable but used as a constant.

    XXX is a variable which was initialized at declaration but whose value is only ever read not updated; it could therefore have been declared as a constant. (warning control file keyword: constant_variables).

    --- Warning : 404 : Subprogram imports variables of abstract types for which run-time checks cannot be generated.

    --- Warning : 405 : VCs for statements including real numbers are approximate.

    The Examiner generates VCs associated with real numbers using perfect arithmetic rather than the machine approximations used on the target platform. It is possible that rounding errors might cause a Constraint_Error even if these run-time check proofs are completed satisfactorily. (warning control file keyword: real_rtcs).

    --- Warning : 406 : VC Generator unable to create output files. Permission is required to create directories and files in the output directory.

    This message is echoed to the screen if the Examiner is unable to create output files for the VCs being generated (for instance, if the user does not have write permission for the output directory).

    --- Warning : 407 : This package requires a body. Care should be taken to provide one because an Ada compiler will not detect its omission.

    Issued where SPARK own variable and initialization annotations make it clear that a package requires a body but where no Ada requirement for a body exists.

    --- Warning : 408 : VCs could not be generated for this subprogram owing to semantic errors in its specification or body. Unprovable (False) VC generated.

    Semantic errors prevent VC Generation, so a single False VC is produced. This will be detected and reported by POGS.

    --- Warning : 409 : VCs could not be generated for this subprogram due to its size and/or complexity exceeding the capacity of the VC Generator. Unprovable (False) VC generated.

    A subprogram which has excessive complexity of data structure or number of paths may cause the VC Generator to exceed its capacity. A single False VC is generated in this case to make sure this error is detected in subsequent proof and analysis with POGS.

    --- Warning : 410 : Task or interrupt handler XXX is either unavailable (hidden) or has semantic errors in its specification which prevent partition-wide flow analysis being carried out.

    Partition-wide flow analysis is performed by checking all packages withed by the main program for tasks and interrupt handlers and constructing an overall flow relation that captures their cumulative effect. It is for this reason that SPARK requires task and protected types to be declared in package specifications. If a task or protected type which contains an interrupt handler, is hidden from the Examiner (in a hidden package private part) or contains errors in it specification, the partition-wide flow analysis cannot be constructed correctly and is therefore suppressed. Correct the specification of the affected tasks and (temporarily if desired) make them visible to the Examiner.

    --- Warning : 411 : Task type XXX is unavailable and has not been considered in the shared variable check.

    The Examiner checks that there is no potential sharing of unprotected data between tasks. If a task type is hidden from the Examiner in a hidden package private part, then it is not possible to check whether that task may share unprotected data.

    --- Warning : 412 : Task type XXX is unavailable and has not been considered in the max-one-in-a-queue check.

    The Examiner checks that no more than one task can suspend on a single object. If a task is hidden from the Examiner in a hidden package private part, then it is not possible to check whether that task may suspend on the same object as another task.

    --- Warning : 413 : Task or main program XXX has errors in its annotations. The shared variable and max-one-in-a-queue checks may be incomplete.

    The Examiner checks that no more than one task can suspend on a single object and that there is no potential sharing of unprotected data between tasks. These checks depend on the accuracy of the annotations on the task types withed by the main program. If these annotations contain errors, then any reported violations of the shared variable and max-one-in-a-queue checks will be correct; however, the check may be incomplete. The errors in the task annotations should be corrected.

    --- Warning : 414 : Long output file name has been truncated.

    Raised if an output file name is longer than the limit imposed by the operating system and has been truncated. Section 4.7 of the Examiner User Manual describes how the output file names are constructed. If this message is seen there is a possibility that the output from two or more subprograms will be written to the same file name, if they have a sufficiently large number of characters in common.

    --- Warning : 415 : The analysis of generic packages is not yet supported. It will be supported in a future release of the Examiner.

    --- Warning : 420 : Instance of SEPR 2124 found. An extra VC will be generated here and must be discharged to ensure absence of run-time errors. Please seek advice for assistance with this issue.

    In release 7.5 of the Examiner, a flaw in the VC generation was fixed such that subcomponents of records and elements of arrays when used as "out" or "in out" parameters will now generate an additional VC to verify absence of run-time errors. This warning flags an instance of this occurrence. Please read the release note and/or seek advice for assistance with this issue.

    --- Warning : 425 : The -vcg switch should be used with the selected language profile.

    A code generator language profile such as KCG is in use and so conditional flow errors may be present in the subprogram. Therefore the -vcg switch must be used to generate VCs and the VCs related to definedness discharged using the proof tools.

    --- Warning : 426 : The with_clause contains a reference to a public child of the package. The Examiner will not detect mutual recursion between subprograms of the two packages.

    A code generator language profile such as KCG allows a package body to with its own public child which is not normally permitted in SPARK. The removal of this restriction means that the Examiner will not detect mutual recursion between subprograms declared in the visible parts of the package and its child. The code generator is expected to guarantee the absence of recursion.

    --- Warning : 430 : SLI generation abandoned owing to syntax or semantic errors or multiple units in a single source file.

    --- Warning : 431 : Preconditions on the main program are assumed to be true and not checked by the VC generation system.

    --- Warning : 444 : Assumptions cannot be checked and must be justified with an accept annotation.

    --- Warning : 495 : The VC file XXX has a pathname longer than 255 characters which can produce unexpected problems on Windows with respect to the SPARK tools (undischarged VCs) and other tools.

    There is little that can be done to work around this as this is a fundamental limitation of Windows. You could try one of the following: Perform analysis higher up in the directory tree (i.e. in C:\a instead of C:\project_name\spark\analysis). You could try remapping a directory to a new drive to do the same (google for subst). You could try renaming or restructuring your program to flatten the structure a bit. And finally you can perform analysis on a UNIX system such as Mac OSX or GNU/Linux as they do not suffer from this problem.

    ??? Flow Error : 501 : Expression contains reference(s) to variable XXX, which may have an undefined value.

    The expression may be that in an assignment or return statement, an actual parameter, or a condition occurring in an if or case statement, an iteration scheme or exit statement. The Examiner has identified at least one syntactic path to this point where the variable has NOT been given a value. Conditional data flow errors are extremely serious and must be carefully investigated. NOTE: the presence of random and possibly invalid values introduced by data flow errors invalidates proof of exception freedom for the subprogram body which contains them. All reports of data flow errors must be eliminated or shown to be associated with semantically infeasible paths before attempting exception freedom proofs. See the manual "SPARK Proof Manual " for full details.

    ??? Flow Error : 504 : Statement contains reference(s) to variable XXX, which may have an undefined value.

    The statement here is a procedure call, and the variable XXX may appear in an actual parameter, whose value is imported when the procedure is executed. If the variable XXX does not occur in the actual parameter list, it is an imported global variable of the procedure (named in its global definition). The Examiner has identified at least one syntactic path to this point where the variable has NOT been given a value. Conditional data flow errors are extremely serious and must be carefully investigated. NOTE: the presence of random and possibly invalid values introduced by data flow errors invalidates proof of exception freedom for the subprogram body which contains them. All reports of data flow errors must be eliminated or shown to be associated with semantically infeasible paths before attempting exception freedom proofs. See the manual "SPARK Proof Manual " for full details.

    ??? Flow Error : 601 : YYY may be derived from the imported value(s) of XXX.

    Here the item on the left of "may be derived from ..." is an exported variable and the item(s) on the right are imports of a procedure subprogram. The message reports a possible dependency, found in the code, which does not appear in the specified dependency relation (derives annotation). The discrepancy could be caused by an error in the subprogram code which implements an unintended dependency. It could also be in an error in the subprogram derives annotation which omits a necessary and intended dependency. Finally, the Examiner may be reporting a false coupling between two items resulting from a non-executable code path or the sharing of disjoint parts of structured or abstract data (e.g one variable writing to one element of an array and another variable reading back a different element). Unexpected dependencies should be investigated carefully and only accepted without modification of either code or annotation if it is certain they are of "false coupling" kind.

    ??? Flow Error : 601 : The imported value of XXX may be used in the derivation of YYY.

    Here first item is an import and the second is an export of a procedure subprogram. The message reports a possible dependency, found in the code, which does not appear in the specified dependency relation. This version of the message has been retained for backward compatibility.

    ??? Flow Error : 602 : The undefined initial value of XXX may be used in the derivation of YYY.

    Here XXX is a non-imported variable, and YYY is an export, of a procedure subprogram.

    ??? Flow Error : 605 : Information flow from XXX to YYY violates information flow policy.

    This message indicates a violation of security or safety policy, such as information flow from a Secret input to an Unclassified output.

    ??? Flow Error : 606 : The imported value of XXX may be used in the derivation of YYY. Furthermore, this information flow violates information flow policy.

    Here XXX is an import and YYY is an export of a procedure subprogram. The message reports a possible dependency, found in the code, which does not appear in the specified dependency relation. If this dependency did appear in the dependency relation, then it would also constitute an integrity violation.

    !!! Flow Error : 20 : Expression contains reference(s) to variable XXX which has an undefined value.

    The expression may be that in an assignment or return statement, an actual parameter, or a condition occurring in an if or case statement, an iteration scheme or exit statement. NOTE: the presence of random and possibly invalid values introduced by data flow errors invalidates proof of exception freedom for the subprogram body which contains them. All unconditional data flow errors must be eliminated before attempting exception freedom proofs. See the manual "SPARK Proof Manual" for full details.

    !!! Flow Error : 23 : Statement contains reference(s) to variable XXX which has an undefined value.

    The statement here is a procedure call or an assignment to an array element, and the variable XXX may appear in an actual parameter, whose value is imported when the procedure is executed. If the variable XXX does not occur in the actual parameter list, it is an imported global variable of the procedure (named in its global definition). NOTE: the presence of random and possibly invalid values introduced by data flow errors invalidates proof of exception freedom for the subprogram body which contains them. All unconditional data flow errors must be eliminated before attempting exception freedom proofs. See the manual "SPARK Proof Manual" for full details.

    !!! Flow Error : 22 : Value of expression is invariant.

    The expression is either a case expression or a condition (Boolean-valued expression) associated with an if-statement, not contained in a loop statement. The message indicates that the expression takes the same value whenever it is evaluated, in all program executions. Note that if the expression depends on values obtained by a call to another other subprogram then a possible source for its invariance might be an incorrect annotation on the called subprogram.

    !!! Flow Error : 30 : The variable XXX is imported but neither referenced nor exported.

    !!! Flow Error : 31 : The variable XXX is exported but not (internally) defined.

    !!! Flow Error : 32 : The variable XXX is neither imported nor defined.

    !!! Flow Error : 33 : The variable XXX is neither referenced nor exported.

    !!! Flow Error : 34 : The imported, non-exported variable XXX may be redefined.

    The updating of imported-only variables is forbidden under all circumstances.

    !!! Flow Error : 35 : Importation of the initial value of variable XXX is ineffective.

    The meaning of this message is explained in Section 4.2 of Appendix A.

    !!! Flow Error : 36 : The referencing of variable XXX by a task or interrupt handler has been omitted from the partition annotation.

    This message is only issued when processing the partition annotation. The partition annotation must describe all the actions of the tasks and interrupt handlers making up the program. Therefore, if a variable is imported somewhere in the program by a task or interrupt handler, then it must also be an import at the partition level. As well as the omission of explicit imports, this message is also generated if the implicit imports of tasks and interrupt handlers are omitted. For tasks this means any variable the task suspends on and for interrupt handlers it means the name of the protected object containing the handler or, if given, the name of the interrupt stream associated with the handler.

    !!! Flow Error : 37 : The updating of variable XXX by a task or interrupt handler has been omitted from the partition annotation.

    This message is only issued when processing the partition annotation. The partition annotation must describe all the actions of the tasks and interrupt handlers making up the program. Therefore, if a variable is exported somewhere in the program by a task or interrupt handler, then it must also be an export at the partition level.

    !!! Flow Error : 38 : The protected element XXX must be initialized at its point of declaration.

    To avoid potential race conditions during program startup, all elements of a protected type must be initialized with a constant value at the point of declaration.

    !!! Flow Error : 50 : YYY is not derived from the imported value(s) of XXX.

    The item before "is not derived ..." is an export or function return value and the item(s) after are imports of the subprogram. The message indicates that a dependency, stated in the dependency relation (derives annotation) or implied by the function signature is not present in the code. The absence of a stated dependency is always an error in either code or annotation.

    !!! Flow Error : 50 : The imported value of XXX is not used in the derivation of YYY.

    The variable XXX, which appears in the dependency relation of a procedure subprogram, as an import from which the export YYY is derived, is not used in the code for that purpose. YYY may be a function return value. This version of the message has been retained for backward compatibility.

    !!! Flow Error : 53 : The package initialization of XXX is ineffective.

    Here XXX is an own variable of a package, initialized in the package initialization. The message states that XXX is updated elsewhere, before being read.

    !!! Flow Error : 54 : The initialization at declaration of XXX is ineffective.

    Issued if the value assigned to a variable at declaration cannot affect the final value of any exported variable of the subprogram in which it occurs because, for example, it is overwritten before it is used.

    !!! Flow Error : 57 : Information flow from XXX to YYY violates the selected information flow policy.

    Issued if safety or security policy checking is enabled and the specified dependency relation contains a relationship in which the flow of information from state or input to state or output violates the selected policy.

    !!! Flow Error : 1 : The previously stated updating of XXX has been omitted.

    XXX occurred as an export in the earlier dependency relation but neither XXX nor any refinement constituent of it occurs in the refined dependency relation.

    !!! Flow Error : 2 : The updating of XXX has not been previously stated.

    A refinement constituent of XXX occurs as an export in the refined dependency relation but XXX does not occur as an export in the earlier dependency relation.

    !!! Flow Error : 3 : The previously stated dependency of the exported value of XXX on the imported value of YYY has been omitted.

    The dependency of the exported value of XXX on the imported value of YYY occurs in the earlier dependency relation but in the refined dependency relation, no constituents of XXX depend on any constituents of YYY.

    !!! Flow Error : 4 : The dependency of the exported value of XXX on the imported value of YYY has not been previously stated.

    A refined dependency relation states a dependency of XXX or a constituent of XXX on YYY or a constituent of YYY, but in the earlier relation, no dependency of XXX on YYY is stated.

    !!! Flow Error : 5 : The (possibly implicit) dependency of the exported value of XXX on its imported value has not been previously stated.

    Either a dependency of a constituent of XXX on at least one constituent of XXX occurs in the refined dependency relation, or not all the constituents of XXX occur as exports in the refined dependency relation. However, the dependency of XXX on itself does not occur in the earlier dependency relation.

    !!! Flow Error : 40 : Exit condition is stable, of index 0.

    !!! Flow Error : 40 : Exit condition is stable, of index 1.

    !!! Flow Error : 40 : Exit condition is stable, of index greater than 1.

    In these cases the (loop) exit condition occurs in an iteration scheme, an exit statement, or an if-statement whose (unique) sequence of statements ends with an unconditional exit statement - see the SPARK Definition. The concept of loop stability is explained in Section 4.4 of Appendix A. A loop exit condition which is stable of index 0 takes the same value at every iteration around the loop, and with a stability index of 1, it always takes the same value after the first iteration. Stability with indices greater than 0 does not necessarily indicate a program error, but the conditions for loop termination require careful consideration.

    !!! Flow Error : 41 : Expression is stable, of index 0.

    !!! Flow Error : 41 : Expression is stable, of index 1.

    !!! Flow Error : 41 : Expression is stable, of index greater than 1.

    The expression, occurring within a loop, is either a case expression or a condition (Boolean-valued expression) associated with an if-statement, whose value determines the path taken through the body of the loop, but does not (directly) cause loop termination. Information flow analysis shows that the expression does not vary as the loop is executed, so the same branch of the case or if statement will be taken on every loop iteration. An Index of 0 means that the expression is immediately stable, 1 means it becomes stable after the first pass through the loop and so on. The stability index is given with reference to the loop most closely-containing the expression. Stable conditionals are not necessarily an error but do require careful evaluation; they can often be removed by lifting them outside the loop.

    !!! Flow Error : 10 : Ineffective statement.

    Execution of this statement cannot affect the final value of any exported variable of the subprogram in which it occurs. The cause may be a data-flow anomaly (i.e. the statement could be an assignment to a variable, which is always updated again before it is read. However, statements may be ineffective for other reasons - see Section 4.1 of Appendix A.

    !!! Flow Error : 10 : Assignment to XXX is ineffective.

    This message always relates to a procedure call or an assignment to a record. The variable XXX may be an actual parameter corresponding to a formal one that is exported; otherwise XXX is an exported global variable of the procedure. The message indicates that the updating of XXX, as a result of the procedure call, has no effect on any final values of exported variables of the calling subprogram. Where the ineffective assignment is expected (e.g. calling a supplied procedure that returns more parameters than are needed for the immediate purpose), it can be a useful convention to choose a distinctive name, such as "Unused" for the actual parameter concerned. The message "Assignment to Unused is ineffective" is then self-documenting.

    *** Illegal Structure : 1 : An exit statement may not occur here.

    Exit statements must be of the form "exit when c;" where the closest enclosing statement is a loop or "if c then S; exit;" where the if statement has no else part and its closest enclosing statement is a loop. See the SPARK Definition for details.

    *** Illegal Structure : 2 : A return statement may not occur here.

    A return statement may only occur as the last statement of a function.

    *** Illegal Structure : 3 : The last statement of this function is not a return statement.

    SPARK requires that the last statement of a function be a return statement.

    *** Illegal Structure : 4 : Return statements may not occur in procedure subprograms.

    --- note : 1 : This dependency relation was not used for this analysis and has not been checked for accuracy.

    Issued when information flow analysis is not performed and when modes were specified in the global annotation. It is a reminder that the dependencies specified in this annotation (including whether each variable is an import or an export) have not been checked against the code, and may therefore be incorrect. (warning control file keyword: notes).

    --- note : 2 : This dependency relation has been used only to identify imports and exports, dependencies have been ignored.

    Issued as a reminder when information flow analysis is not performed in SPARK 83. The dependencies specified in this annotation have not been checked against the code, and may therefore be incorrect. (warning control file keyword: notes).

    --- note : 3 : The deferred constant Null_Address has been implicitly defined here.

    Issued as a reminder that the declaration of the type Address within the target configuration file implicitly defines a deferred constant of type Null_Address. (warning control file keyword: notes).

    --- note : 4 : The constant Default_Priority, of type Priority, has been implicitly defined here.

    Issued as a reminder that the declaration of the subtype Priority within the target configuration file implicitly defines a constant Default_Priority, of type Priority, with the value (Priority'First + Priority'Last) / 2. (warning control file keyword: notes).

    !!! Program has a cyclic path without an assertion.

    SPARK generates VCs for paths between cutpoints in the code; these must be chosen by the developer in such a way that every loop traverses at least one cutpoint. If the SPARK Examiner detects a loop which is not broken by a cutpoint, it cannot generate verification conditions for the subprogram in which the loop is located, and instead, issues this warning. This can only be corrected by formulating a suitable loop-invariant assertion for the loop and including it as an assertion in the SPARK text at the appropriate point.

    !!! Unexpected node kind in main tree.

    This message indicates corruption of the syntax tree being processed by the VC Generator. It should not be seen in normal operation.

    spark-2012.0.deb/lib/spark/confgen.adb0000644000175000017500000001503011753202341016411 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= -- This utility is intended to generate a target configuration file for use -- with the SPARK Examiner. -- *** IMPORTANT WARNING *** -- If you are using a cross-compiler, please note that this program -- must be compiled for and run on the target, rather than the host, -- in order to generate valid output. -- If you are using an embedded target which does not support Text_IO, -- but does have another mechanism for string output, change the -- following 'with' clause and update the 'renames' clause in the -- definition of package Output_Method below. with Text_IO; with System; procedure Confgen is package Output_Method renames Text_IO; use Output_Method; type T is range System.Min_Int .. System.Max_Int; type Tmod is mod System.Max_Binary_Modulus; type Tfloat is digits System.Max_Base_Digits range -1.0 .. 1.0; begin ------------------------- -- Output header block -- ------------------------- Put_Line ("-- Auto-generated SPARK target configuration file"); Put_Line ("-- Target claims to be '" & System.Name'Image (System.System_Name) & "'"); New_Line; ---------------------- -- Package Standard -- ---------------------- Put_Line ("package Standard is"); -- type Integer and Float are mandatory for SPARK95 Put_Line (" type Integer is range " & Integer'Image (Integer'First) & " .. " & Integer'Image (Integer'Last) & ";"); Put_Line (" type Float is digits " & Integer'Image (Float'Digits) & " range " & Float'Image (Float'First) & " .. " & Float'Image (Float'Last) & ";"); -- Uncomment any of the following that are supported by your -- compiler and target -- Optional signed integer types -- Put_Line (" type Short_Short_Integer is range " & -- Short_Short_Integer'Image (Short_Short_Integer'First) & -- " .. " & -- Short_Short_Integer'Image (Short_Short_Integer'Last) & ";"); -- Put_Line (" type Short_Integer is range " & -- Short_Integer'Image (Short_Integer'First) & " .. " & -- Short_Integer'Image (Short_Integer'Last) & ";"); -- Put_Line (" type Long_Integer is range " & -- Long_Integer'Image (Long_Integer'First) & " .. " & -- Long_Integer'Image (Long_Integer'Last) & ";"); -- Put_Line (" type Long_Long_Integer is range " & -- Long_Long_Integer'Image (Long_Long_Integer'First) & " .. " & -- Long_Long_Integer'Image (Long_Long_Integer'Last) & ";"); -- Optional floating point types -- Put_Line (" type Short_Short_Float is digits " & -- Integer'Image (Short_Short_Float'Digits) & " range " & -- Short_Short_Float'Image (Short_Short_Float'First) & " .. " & -- Short_Short_Float'Image (Short_Short_Float'Last) & ";"); -- Put_Line (" type Short_Float is digits " & -- Integer'Image (Short_Float'Digits) & " range " & -- Short_Float'Image (Short_Float'First) & " .. " & -- Short_Float'Image (Short_Float'Last) & ";"); -- Put_Line (" type Long_Float is digits " & -- Integer'Image (Long_Float'Digits) & " range " & -- Long_Float'Image (Long_Float'First) & " .. " & -- Long_Float'Image (Long_Float'Last) & ";"); -- Put_Line (" type Long_Long_Float is digits " & -- Integer'Image (Long_Long_Float'Digits) & " range " & -- Long_Long_Float'Image (Long_Long_Float'First) & " .. " & -- Long_Long_Float'Image (Long_Long_Float'Last) & ";"); Put_Line ("end Standard;"); New_Line; -------------------- -- Package System -- -------------------- Put_Line ("package System is"); -- The definition of type System.Address is optional; if it is specified, -- it must be private. Put_Line (" type Address is private;"); Put_Line (" Min_Int : constant := " & T'Image (System.Min_Int) & ";"); Put_Line (" Max_Int : constant := " & T'Image (System.Max_Int) & ";"); Put_Line (" Max_Binary_Modulus : constant := " & Tmod'Image (Tmod'Last) & " + 1;"); Put_Line (" Max_Digits : constant := " & T'Image (System.Max_Digits) & ";"); Put_Line (" Max_Base_Digits : constant := " & T'Image (System.Max_Base_Digits) & ";"); Put_Line (" Max_Mantissa : constant := " & T'Image (System.Max_Mantissa) & ";"); Put_Line (" Storage_Unit : constant := " & T'Image (System.Storage_Unit) & ";"); Put_Line (" Word_Size : constant := " & T'Image (System.Word_Size) & ";"); Put_Line (" Fine_Delta : constant := " & Tfloat'Image (System.Fine_Delta) & ";"); Put_Line (" subtype Any_Priority is Integer range " & Integer'Image (System.Any_Priority'First) & " .. " & Integer'Image (System.Any_Priority'Last) & ";"); Put_Line (" subtype Priority is Any_Priority range " & Integer'Image (System.Priority'First) & " .. " & Integer'Image (System.Priority'Last) & ";"); Put_Line (" subtype Interrupt_Priority is Any_Priority range " & Integer'Image (System.Interrupt_Priority'First) & " .. " & Integer'Image (System.Interrupt_Priority'Last) & ";"); Put (" Default_Bit_Order : constant Bit_Order := "); case System.Default_Bit_Order is when System.Low_Order_First => Put ("Low"); when System.High_Order_First => Put ("High"); end case; Put_Line ("_Order_First;"); Put_Line ("end System;"); end Confgen; spark-2012.0.deb/lib/spark/current/0000755000175000017500000000000011753202341016005 5ustar eugeneugenspark-2012.0.deb/lib/spark/current/sparklib.idx0000644000175000017500000000015711526254022020326 0ustar eugeneugenAda auxindex is in ada.idx Interfaces auxindex is in interfaces.idx SPARK auxindex is in spark.idx spark-2012.0.deb/lib/spark/current/spark-ada.ads0000644000175000017500000000531711753202341020347 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada -- -- -- -- Description -- -- This is a binding to package Ada -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- package SPARK.Ada is end SPARK.Ada; spark-2012.0.deb/lib/spark/current/spark.idx0000644000175000017500000000236711712513676017656 0ustar eugeneugenSPARK specification is in spark.ads SPARK.Ada specification is in spark-ada.ads SPARK.Ada.Command_Line specification is in spark-ada-command_line.ads SPARK.Ada.Command_Line.Unbounded_String specification is in spark-ada-command_line-unbounded_string.ads SPARK.Ada.Containers specification is in spark-ada-containers.ads SPARK.Ada.Strings specification is in spark-ada-strings.ads SPARK.Ada.Strings.Unbounded specification is in spark-ada-strings-unbounded.ads SPARK.Ada.Strings.Maps specification is in spark-ada-strings-maps.ads SPARK.Ada.Text_IO specification is in spark-ada-text_io.ads SPARK.Ada.Text_IO.Unbounded_String specification is in spark-ada-text_io-unbounded_string.ads SPARK.Crypto specification is in spark-crypto.ads SPARK.Crypto.Debug specification is in spark-crypto-debug.ads SPARK.Crypto.Hash specification is in spark-crypto-hash.ads SPARK.Crypto.Hash.Skein specification is in spark-crypto-hash-skein.ads SPARK.Unsigned specification is in spark-unsigned.shs spark-2012.0.deb/lib/spark/current/spark-crypto-hash-skein.adb0000644000175000017500000011424511753202341023152 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Crypto.Hash.Skein -- -- -- -- Implementation Notes -- -- -- -- Originally based on the C reference implementation supplied by the Skein -- -- design team. Performance is very close or better than that of the C code -- -- at all optimization levels used. See www.skein-hash.info -- ------------------------------------------------------------------------------- with Ada.Unchecked_Conversion; package body SPARK.Crypto.Hash.Skein is -- Identifiers can't start with a digit in SPARK, so... -- type Bit_Size is (S256, S512, S1024); -- Number of rounds for the different block sizes Skein_512_Rounds_Total : constant := 72; -- Skein_512 round rotation constants subtype Rotation_Count is Natural range 2 .. 60; -- These constants are the values from the revised -- version 1.2 Skein Specification, -- -- The values from the earlier version 1.1 of the spec -- follow each declaration as a comment. R_512_0_0 : constant Rotation_Count := 46; -- 38; R_512_0_1 : constant Rotation_Count := 36; -- 30; R_512_0_2 : constant Rotation_Count := 19; -- 50; R_512_0_3 : constant Rotation_Count := 37; -- 53; R_512_1_0 : constant Rotation_Count := 33; -- 48; R_512_1_1 : constant Rotation_Count := 27; -- 20; R_512_1_2 : constant Rotation_Count := 14; -- 43; R_512_1_3 : constant Rotation_Count := 42; -- 31; R_512_2_0 : constant Rotation_Count := 17; -- 34; R_512_2_1 : constant Rotation_Count := 49; -- 14; R_512_2_2 : constant Rotation_Count := 36; -- 15; R_512_2_3 : constant Rotation_Count := 39; -- 27; R_512_3_0 : constant Rotation_Count := 44; -- 26; R_512_3_1 : constant Rotation_Count := 9; -- 12; R_512_3_2 : constant Rotation_Count := 54; -- 58; R_512_3_3 : constant Rotation_Count := 56; -- 7; R_512_4_0 : constant Rotation_Count := 39; -- 33; R_512_4_1 : constant Rotation_Count := 30; -- 49; R_512_4_2 : constant Rotation_Count := 34; -- 8; R_512_4_3 : constant Rotation_Count := 24; -- 42; R_512_5_0 : constant Rotation_Count := 13; -- 39; R_512_5_1 : constant Rotation_Count := 50; -- 27; R_512_5_2 : constant Rotation_Count := 10; -- 41; R_512_5_3 : constant Rotation_Count := 17; -- 14; R_512_6_0 : constant Rotation_Count := 25; -- 29; R_512_6_1 : constant Rotation_Count := 29; -- 26; R_512_6_2 : constant Rotation_Count := 39; -- 11; R_512_6_3 : constant Rotation_Count := 43; -- 9; R_512_7_0 : constant Rotation_Count := 8; -- 33; R_512_7_1 : constant Rotation_Count := 35; -- 51; R_512_7_2 : constant Rotation_Count := 56; -- 39; R_512_7_3 : constant Rotation_Count := 22; -- 35; Skein_Version : constant := 1; Skein_ID_String_LE : constant Unsigned.U32 := 16#33414853#; -- "SHA3" (little endian) Skein_Schema_Ver : constant Unsigned.U64 := (Unsigned.U64 (Skein_Version) * 2**32) + Unsigned.U64 (Skein_ID_String_LE); -- Revised Key Schedule Parity constant "C240" from version 1.3 -- of the Skein specification. Skein_KS_Parity : constant Unsigned.U64 := 16#1BD11BDA_A9FC1A22#; Skein_Cfg_Tree_Info_Sequential : constant := 0; Skein_Cfg_Str_Len : constant := 4*8; procedure Put_64_LSB_First (Dst : in out Crypto.Byte_Seq; Dst_Offset : in Natural; Src : in Crypto.U64_Seq; Byte_Count : in Natural) --# derives Dst from Dst, Dst_Offset, Src, Byte_Count; --# pre Dst'First = 0 and --# Src'First = 0 and --# Dst'Last >= Dst_Offset + (Byte_Count - 1) and --# Byte_Count <= (Src'Last + 1) * 8; is begin pragma Assert (Dst'First = 0, "Put_64_LSB_First - Dst'First is not zero"); pragma Assert (Src'First = 0, "Put_64_LSB_First - Src'First is not zero"); pragma Assert (Dst'Last >= Dst_Offset + (Byte_Count - 1), "Put_64_LSB_First - Not enough room in Dst"); pragma Assert ((Src'Last + 1) * 8 >= Byte_Count, "Put_64_LSB_First - Not enough bytes in Src for Byte_Count"); if Byte_Count >= 1 then for N in Natural range 0 .. (Byte_Count - 1) loop --# assert Byte_Count >= 1 and --# N >= 0 and --# N <= Byte_Count - 1 and --# N < Byte_Count and --# Byte_Count <= (Src'Last + 1) * 8 and --# N < (Src'Last + 1) * 8 and --# N <= (Src'Last * 8) + 7; --# check N / 8 >= 0 and --# N / 8 <= Src'Last; Dst (Dst_Offset + N) := Unsigned.Byte (Unsigned.Shift_Right_64 (Src (N / 8), 8 * (N mod 8)) and 16#FF#); end loop; end if; end Put_64_LSB_First; pragma Inline (Put_64_LSB_First); -- This version is fully portable (big-endian or little-endian), but slow procedure Get_64_LSB_First (Dst : out Crypto.U64_Seq; Src : in Crypto.Byte_Seq; Src_Offset : in Natural) --# derives Dst from Src, Src_Offset; --# pre Src'First = 0 and --# Dst'First = 0 and --# Src_Offset <= Src'Last and --# Src_Offset + (Dst'Last * 8) + 7 >= Src'First and --# Src_Offset + (Dst'Last * 8) + 7 <= Src'Last and --# Src_Offset + 7 <= Src'Last and --# Src_Offset + Dst'Last * 8 <= Natural'Last; --# post for all I in Natural range Dst'First .. Dst'Last => (Dst (I) in Unsigned.U64); is Dst_Index : Crypto.Word_Count_T; Src_Index : Natural; begin pragma Assert (Src'First = 0, "Get_64_LSB_First - Src'First is not zero"); pragma Assert (Dst'First = 0, "Get_64_LSB_First - Dst'First is not zero"); pragma Assert (Src_Offset <= Src'Last, "Get_64_LSB_First - Src_Offset is larger than Src'Last"); pragma Assert (Src_Offset + (Dst'Last * 8) + 7 <= Src'Last, "Get_64_LSB_First - Not enough bytes in Src for given Offset and Word_Count"); Dst_Index := 0; Src_Index := Src_Offset; loop --# check Src_Index in Src'Range and --# Src_Index + 1 in Src'Range and --# Src_Index + 2 in Src'Range and --# Src_Index + 3 in Src'Range and --# Src_Index + 4 in Src'Range and --# Src_Index + 5 in Src'Range and --# Src_Index + 6 in Src'Range and --# Src_Index + 7 in Src'Range; --# accept F, 23, Dst, "OK"; Dst (Dst_Index) := Unsigned.U64 (Src (Src_Index)) + Unsigned.Shift_Left_64 (Unsigned.U64 (Src (Src_Index + 1)), 8) + Unsigned.Shift_Left_64 (Unsigned.U64 (Src (Src_Index + 2)), 16) + Unsigned.Shift_Left_64 (Unsigned.U64 (Src (Src_Index + 3)), 24) + Unsigned.Shift_Left_64 (Unsigned.U64 (Src (Src_Index + 4)), 32) + Unsigned.Shift_Left_64 (Unsigned.U64 (Src (Src_Index + 5)), 40) + Unsigned.Shift_Left_64 (Unsigned.U64 (Src (Src_Index + 6)), 48) + Unsigned.Shift_Left_64 (Unsigned.U64 (Src (Src_Index + 7)), 56); --# end accept; --# assert (for all I in Natural range Dst'First .. Dst_Index => (Dst (I) in Unsigned.U64)) and --# Dst_Index in Dst'Range and --# Dst'Last <= Crypto.Word_Count_T'Last and --# Src_Index = Src_Offset + (Dst_Index * 8) and --# Src_Index >= Src_Offset and --# Src_Index <= Src_Offset + (Dst'Last * 8) and --# (Dst_Index /= Dst'Last -> (Dst_Index + 1 <= Natural'Last)) and --# (Dst_Index /= Dst'Last -> (Src_Index + 8 <= Natural'Last)); exit when Dst_Index = Dst'Last; --# check Dst_Index + 1 <= Natural'Last; Dst_Index := Dst_Index + 1; --# check Src_Index + 8 <= Natural'Last; Src_Index := Src_Index + 8; end loop; --# accept F, 602, Dst, Dst, "OK"; end Get_64_LSB_First; pragma Inline (Get_64_LSB_First); procedure Skein_Start_New_Type (Field_Type : in Unsigned.U6; First_Block : in Boolean; Final_Block : in Boolean; Ctx : in out Context_Header) --# derives Ctx from *, Field_Type, First_Block, Final_Block; --# post Ctx = Ctx~[Tweak_Words => Tweak_Value'(Byte_Count_LSB => 0, --# Byte_Count_MSB => 0, --# Reserved => 0, --# Tree_Level => 0, --# Bit_Pad => False, --# Field_Type => Field_Type, --# First_Block => First_Block, --# Final_Block => Final_Block); --# Byte_Count => 0] and --# Ctx.Hash_Bit_Len = Ctx~.Hash_Bit_Len and --# Ctx.Byte_Count = 0; is begin Ctx.Tweak_Words := Tweak_Value'(Byte_Count_LSB => 0, Byte_Count_MSB => 0, Reserved => 0, Tree_Level => 0, Bit_Pad => False, Field_Type => Field_Type, First_Block => First_Block, Final_Block => Final_Block); Ctx.Byte_Count := 0; end Skein_Start_New_Type; procedure Skein_512_Process_Block (Ctx : in out Skein_512_Context; Block : in Crypto.Byte_Seq; Starting_Offset : in Natural; Block_Count : in Positive_Block_512_Count_T; Byte_Count_Add : in Natural) --# derives Ctx from Ctx, Block, Starting_Offset, Block_Count, Byte_Count_Add; --# pre Ctx.H.Hash_Bit_Len in Initialized_Hash_Bit_Length and --# Ctx.H.Byte_Count in Skein_512_Block_Bytes_Count and --# Block'First = 0 and --# Starting_Offset + ((Block_Count - 1) * Skein_512_Block_Bytes_C) + 63 <= Block'Last and --# Starting_Offset + 63 <= Block'Last and --# Block'Last <= Natural'Last and --# Starting_Offset <= Natural'Last - 63; --# post Ctx.H.Hash_Bit_Len in Initialized_Hash_Bit_Length and --# Ctx.H.Hash_Bit_Len = Ctx~.H.Hash_Bit_Len and --# Ctx.H.Byte_Count in Skein_512_Block_Bytes_Count and --# Ctx.H.Byte_Count = Ctx~.H.Byte_Count; is WCNT : constant := Skein_512_State_Words_C; TS : Crypto.U64_Seq_3; -- Key schedule: tweak KS : Crypto.U64_Seq_9; -- Key schedule: chaining vars X : Crypto.U64_Seq_8; -- Local copy of vars W : Crypto.U64_Seq_8; -- Local copy of input block J : Positive_Block_512_Count_T; -- loop counter Src_Offset : Natural; procedure Inject_Key (R : in Unsigned.U64) --# global in KS; --# in TS; --# in out X; --# derives X from X, R, KS, TS; is subtype Injection_Range is Natural range 0 .. (WCNT - 1); KS_Modulus : constant Unsigned.U64 := WCNT + 1; begin for I in Injection_Range loop X (I) := X (I) + KS (Natural ((R + Unsigned.U64 (I)) mod KS_Modulus)); end loop; X (WCNT - 3) := X (WCNT - 3) + TS (Natural (R mod 3)); X (WCNT - 2) := X (WCNT - 2) + TS (Natural ((R + 1) mod 3)); X (WCNT - 1) := X (WCNT - 1) + R; -- Avoid slide attacks end Inject_Key; pragma Inline (Inject_Key); procedure Round_1 --# global in out X; --# derives X from X; is begin X (0) := X (0) + X (1); X (1) := Unsigned.Rotate_Left_64 (X (1), R_512_0_0); X (1) := X (1) xor X (0); -- Extra cuts here to avoid VCG complexity explosion. --# assert True; X (2) := X (2) + X (3); X (3) := Unsigned.Rotate_Left_64 (X (3), R_512_0_1); X (3) := X (3) xor X (2); --# assert True; X (4) := X (4) + X (5); X (5) := Unsigned.Rotate_Left_64 (X (5), R_512_0_2); X (5) := X (5) xor X (4); --# assert True; X (6) := X (6) + X (7); X (7) := Unsigned.Rotate_Left_64 (X (7), R_512_0_3); X (7) := X (7) xor X (6); end Round_1; pragma Inline (Round_1); procedure Round_2 --# global in out X; --# derives X from X; is begin X (2) := X (2) + X (1); X (1) := Unsigned.Rotate_Left_64 (X (1), R_512_1_0); X (1) := X (1) xor X (2); -- Extra cuts here to avoid VCG complexity explosion. --# assert True; X (4) := X (4) + X (7); X (7) := Unsigned.Rotate_Left_64 (X (7), R_512_1_1); X (7) := X (7) xor X (4); --# assert True; X (6) := X (6) + X (5); X (5) := Unsigned.Rotate_Left_64 (X (5), R_512_1_2); X (5) := X (5) xor X (6); --# assert True; X (0) := X (0) + X (3); X (3) := Unsigned.Rotate_Left_64 (X (3), R_512_1_3); X (3) := X (3) xor X (0); end Round_2; pragma Inline (Round_2); procedure Round_3 --# global in out X; --# derives X from X; is begin X (4) := X (4) + X (1); X (1) := Unsigned.Rotate_Left_64 (X (1), R_512_2_0); X (1) := X (1) xor X (4); -- Extra cuts here to avoid VCG complexity explosion. --# assert True; X (6) := X (6) + X (3); X (3) := Unsigned.Rotate_Left_64 (X (3), R_512_2_1); X (3) := X (3) xor X (6); --# assert True; X (0) := X (0) + X (5); X (5) := Unsigned.Rotate_Left_64 (X (5), R_512_2_2); X (5) := X (5) xor X (0); --# assert True; X (2) := X (2) + X (7); X (7) := Unsigned.Rotate_Left_64 (X (7), R_512_2_3); X (7) := X (7) xor X (2); end Round_3; pragma Inline (Round_3); procedure Round_4 --# global in out X; --# derives X from X; is begin X (6) := X (6) + X (1); X (1) := Unsigned.Rotate_Left_64 (X (1), R_512_3_0); X (1) := X (1) xor X (6); -- Extra cuts here to avoid VCG complexity explosion. --# assert True; X (0) := X (0) + X (7); X (7) := Unsigned.Rotate_Left_64 (X (7), R_512_3_1); X (7) := X (7) xor X (0); --# assert True; X (2) := X (2) + X (5); X (5) := Unsigned.Rotate_Left_64 (X (5), R_512_3_2); X (5) := X (5) xor X (2); --# assert True; X (4) := X (4) + X (3); X (3) := Unsigned.Rotate_Left_64 (X (3), R_512_3_3); X (3) := X (3) xor X (4); end Round_4; pragma Inline (Round_4); procedure Round_5 --# global in out X; --# derives X from X; is begin X (0) := X (0) + X (1); X (1) := Unsigned.Rotate_Left_64 (X (1), R_512_4_0); X (1) := X (1) xor X (0); -- Extra cuts here to avoid VCG complexity explosion. --# assert True; X (2) := X (2) + X (3); X (3) := Unsigned.Rotate_Left_64 (X (3), R_512_4_1); X (3) := X (3) xor X (2); --# assert True; X (4) := X (4) + X (5); X (5) := Unsigned.Rotate_Left_64 (X (5), R_512_4_2); X (5) := X (5) xor X (4); --# assert True; X (6) := X (6) + X (7); X (7) := Unsigned.Rotate_Left_64 (X (7), R_512_4_3); X (7) := X (7) xor X (6); end Round_5; pragma Inline (Round_5); procedure Round_6 --# global in out X; --# derives X from X; is begin X (2) := X (2) + X (1); X (1) := Unsigned.Rotate_Left_64 (X (1), R_512_5_0); X (1) := X (1) xor X (2); -- Extra cuts here to avoid VCG complexity explosion. --# assert True; X (4) := X (4) + X (7); X (7) := Unsigned.Rotate_Left_64 (X (7), R_512_5_1); X (7) := X (7) xor X (4); --# assert True; X (6) := X (6) + X (5); X (5) := Unsigned.Rotate_Left_64 (X (5), R_512_5_2); X (5) := X (5) xor X (6); --# assert True; X (0) := X (0) + X (3); X (3) := Unsigned.Rotate_Left_64 (X (3), R_512_5_3); X (3) := X (3) xor X (0); end Round_6; pragma Inline (Round_6); procedure Round_7 --# global in out X; --# derives X from X; is begin X (4) := X (4) + X (1); X (1) := Unsigned.Rotate_Left_64 (X (1), R_512_6_0); X (1) := X (1) xor X (4); -- Extra cuts here to avoid VCG complexity explosion. --# assert True; X (6) := X (6) + X (3); X (3) := Unsigned.Rotate_Left_64 (X (3), R_512_6_1); X (3) := X (3) xor X (6); --# assert True; X (0) := X (0) + X (5); X (5) := Unsigned.Rotate_Left_64 (X (5), R_512_6_2); X (5) := X (5) xor X (0); --# assert True; X (2) := X (2) + X (7); X (7) := Unsigned.Rotate_Left_64 (X (7), R_512_6_3); X (7) := X (7) xor X (2); end Round_7; pragma Inline (Round_7); procedure Round_8 --# global in out X; --# derives X from X; is begin X (6) := X (6) + X (1); X (1) := Unsigned.Rotate_Left_64 (X (1), R_512_7_0); X (1) := X (1) xor X (6); -- Extra cuts here to avoid VCG complexity explosion. --# assert True; X (0) := X (0) + X (7); X (7) := Unsigned.Rotate_Left_64 (X (7), R_512_7_1); X (7) := X (7) xor X (0); --# assert True; X (2) := X (2) + X (5); X (5) := Unsigned.Rotate_Left_64 (X (5), R_512_7_2); X (5) := X (5) xor X (2); --# assert True; X (4) := X (4) + X (3); X (3) := Unsigned.Rotate_Left_64 (X (3), R_512_7_3); X (3) := X (3) xor X (4); end Round_8; pragma Inline (Round_8); procedure Initialize_Key_Schedule --# global in Ctx; --# out KS; --# derives KS from Ctx; is begin -- For speed, we avoid a complete aggregate assignemnt to KS here. -- This generates a false-alarm from the flow-analyser, but this is -- OK, since type-safety is later re-established by the proof system. --# accept F, 23, KS, "Initialization here is total"; KS (WCNT) := Skein_KS_Parity; for I in Crypto.I8 loop KS (I) := Ctx.X (I); KS (WCNT) := KS (WCNT) xor Ctx.X (I); -- Compute overall parity --# assert (for all J in Crypto.I8 range Crypto.I8'First .. I => (KS (J) in Unsigned.U64)) and --# KS (WCNT) in Unsigned.U64; end loop; --# accept F, 602, KS, KS, "Initialization here is total"; end Initialize_Key_Schedule; pragma Inline (Initialize_Key_Schedule); procedure Initialize_TS --# global in Ctx; --# out TS; --# derives TS from Ctx; is W0 : Unsigned.U64; W1 : Unsigned.U64; function Tweak_To_Words --# return R => (for all I in Modifier_Words_Index => (R (I) in Unsigned.U64)); is new Ada.Unchecked_Conversion (Tweak_Value, Modifier_Words); begin --# accept W, 13, Tweak_To_Words, "Unchecked_Conversion here OK"; W0 := Tweak_To_Words (Ctx.H.Tweak_Words)(0); W1 := Tweak_To_Words (Ctx.H.Tweak_Words)(1); --# end accept; TS := Crypto.U64_Seq_3'(0 => W0, 1 => W1, 2 => W0 xor W1); end Initialize_TS; pragma Inline (Initialize_TS); procedure Do_First_Key_Injection --# global in W; --# in KS; --# in TS; --# out X; --# derives X from W, KS, TS; is begin X := Crypto.U64_Seq_8'(0 => W (0) + KS (0), 1 => W (1) + KS (1), 2 => W (2) + KS (2), 3 => W (3) + KS (3), 4 => W (4) + KS (4), 5 => W (5) + KS (5), 6 => W (6) + KS (6), 7 => W (7) + KS (7)); X (WCNT - 3) := X (WCNT - 3) + TS (0); X (WCNT - 2) := X (WCNT - 2) + TS (1); end Do_First_Key_Injection; pragma Inline (Do_First_Key_Injection); procedure Threefish_Block --# global in KS; --# in TS; --# in out X; --# derives X from X, KS, TS; is begin for R in Unsigned.U64 range 1 .. (Skein_512_Rounds_Total / 8) loop Round_1; Round_2; Round_3; Round_4; Inject_Key (R * 2 - 1); Round_5; Round_6; Round_7; Round_8; Inject_Key (R * 2); end loop; end Threefish_Block; pragma Inline (Threefish_Block); procedure Update_Context --# global in out Ctx; --# in W; --# in X; --# derives Ctx from Ctx, W, X; --# post Ctx.H.Hash_Bit_Len = Ctx~.H.Hash_Bit_Len and --# Ctx.H.Byte_Count = Ctx~.H.Byte_Count; is begin Ctx.X := Skein_512_State_Words'(0 => X (0) xor W (0), 1 => X (1) xor W (1), 2 => X (2) xor W (2), 3 => X (3) xor W (3), 4 => X (4) xor W (4), 5 => X (5) xor W (5), 6 => X (6) xor W (6), 7 => X (7) xor W (7)); end Update_Context; pragma Inline (Update_Context); begin Src_Offset := Starting_Offset; J := 1; loop --# assert Ctx.H.Hash_Bit_Len = Ctx~.H.Hash_Bit_Len and --# Ctx.H.Byte_Count = Ctx~.H.Byte_Count and --# J >= 1 and --# J <= Block_Count and --# Src_Offset = Starting_Offset + (J - 1) * Skein_512_Block_Bytes_C and --# Src_Offset + 63 <= Block'Last and --# Src_Offset + W'Last * 8 <= Natural'Last and --# Starting_Offset + ((Block_Count - 1) * Skein_512_Block_Bytes_C) + 63 <= Block'Last and --# Block'Last <= Natural'Last and --# ((J < Block_Count) -> (Src_Offset + Skein_512_Block_Bytes_C <= Natural'Last)); -- This implementation only supports 2**31 input bytes, -- so no carry over to Byte_Count_MSB here. Ctx.H.Tweak_Words.Byte_Count_LSB := Ctx.H.Tweak_Words.Byte_Count_LSB + Unsigned.U64 (Byte_Count_Add); Initialize_Key_Schedule; Initialize_TS; Get_64_LSB_First (Dst => W, Src => Block, Src_Offset => Src_Offset); --# check for all I in Crypto.I8 => (W (I) in Unsigned.U64); -- Do the first full key injection Do_First_Key_Injection; Threefish_Block; -- Do the final "feedforward" xor, update context chaining vars Update_Context; Ctx.H.Tweak_Words.First_Block := False; exit when J >= Block_Count; J := J + 1; Src_Offset := Src_Offset + Skein_512_Block_Bytes_C; end loop; end Skein_512_Process_Block; procedure Skein_512_Init (Ctx : out Skein_512_Context; HashBitLen : in Initialized_Hash_Bit_Length) --# post Ctx.H.Hash_Bit_Len in Initialized_Hash_Bit_Length and --# Ctx.H.Hash_Bit_Len = HashBitLen and --# Ctx.H.Byte_Count = 0 and --# Ctx.H.Byte_Count in Skein_512_Block_Bytes_Count; is Cfg : Skein_512_State_Words; function Skein_512_State_Words_To_Bytes is new Ada.Unchecked_Conversion (Skein_512_State_Words, Skein_512_State_Bytes); begin -- Build/Process config block for hashing Ctx := Null_Skein_512_Context; Ctx.H.Hash_Bit_Len := HashBitLen; -- output has byte count Skein_Start_New_Type (Skein_Block_Type_Cfg, True, True, Ctx.H); --# check Ctx.H.Hash_Bit_Len = HashBitLen; -- Set the schema version, hash result length, and tree info. -- All others words are 0 Cfg := Skein_512_State_Words'(0 => Unsigned.To_LittleEndian (Skein_Schema_Ver), 1 => Unsigned.To_LittleEndian (Unsigned.U64 (HashBitLen)), 2 => Unsigned.To_LittleEndian (Skein_Cfg_Tree_Info_Sequential), others => 0); -- Compute the initial chaining values from config block -- First, zero the chaining bytes Ctx.X := Skein_512_State_Words'(others => 0); --# check Ctx.H.Hash_Bit_Len = HashBitLen; --# accept W, 13, Skein_512_State_Words_To_Bytes, "Unchecked Conversion OK"; Skein_512_Process_Block (Ctx => Ctx, Block => Skein_512_State_Words_To_Bytes (Cfg), Starting_Offset => 0, Block_Count => 1, Byte_Count_Add => Skein_Cfg_Str_Len); --# end accept; --# check Ctx.H.Hash_Bit_Len = HashBitLen; -- Set up to process the data message portion of the hash (default) Skein_Start_New_Type (Skein_Block_Type_Msg, True, False, Ctx.H); end Skein_512_Init; procedure Skein_512_Update (Ctx : in out Skein_512_Context; Msg : in Crypto.Byte_Seq) --# pre Ctx.H.Hash_Bit_Len in Initialized_Hash_Bit_Length and --# Ctx.H.Byte_Count in Skein_512_Block_Bytes_Count and --# Msg'First = 0 and --# Msg'Last < Natural'Last and --# Msg'Last + Skein_512_Block_Bytes_C + 1 <= Natural'Last; --# post Ctx.H.Hash_Bit_Len in Initialized_Hash_Bit_Length and --# Ctx.H.Hash_Bit_Len = Ctx~.H.Hash_Bit_Len and --# Ctx.H.Byte_Count in Skein_512_Block_Bytes_Count; is Msg_Byte_Count : Natural; N : Skein_512_Block_Bytes_Index; Block_Count : Positive_Block_512_Count_T; Current_Msg_Offset : Natural; Bytes_Hashed : Natural; Tmp_B : Skein_512_Block_Bytes; procedure Copy_Msg_To_B (Msg_Offset : in Natural; Num_Bytes : in Natural) --# global in out Ctx; --# in Msg; --# derives Ctx from Ctx, Msg, Msg_Offset, Num_Bytes; --# pre Ctx.H.Hash_Bit_Len > 0 and --# Msg'First = 0 and --# Msg_Offset in Msg'Range and --# (Msg_Offset + (Num_Bytes - 1)) <= Msg'Last and --# (Ctx.H.Byte_Count + (Num_Bytes - 1)) <= Ctx.B'Last; --# post Ctx.H.Hash_Bit_Len > 0 and --# Ctx.H.Hash_Bit_Len = Ctx~.H.Hash_Bit_Len and --# Ctx.H.Byte_Count = Ctx~.H.Byte_Count + Num_Bytes and --# Ctx.H.Byte_Count in Skein_512_Block_Bytes_Count; is Src : Natural; Dst : Skein_512_Block_Bytes_Index; Final_Dst : Skein_512_Block_Bytes_Index; Final_Src : Natural; begin if Num_Bytes > 0 then Src := Msg_Offset; Dst := Ctx.H.Byte_Count; Final_Dst := Dst + (Num_Bytes - 1); Final_Src := Src + (Num_Bytes - 1); loop Ctx.B (Dst) := Msg (Src); --# assert Ctx.H.Hash_Bit_Len > 0 and --# Ctx.H.Hash_Bit_Len = Ctx~.H.Hash_Bit_Len and --# Ctx.H.Byte_Count = Ctx~.H.Byte_Count and --# Ctx.H.Byte_Count in Skein_512_Block_Bytes_Count and --# (Ctx.H.Byte_Count + Num_Bytes) - 1 <= Ctx.B'Last and --# Final_Src <= Msg'Last; exit when Dst >= Final_Dst or Src >= Final_Src; Dst := Dst + 1; Src := Src + 1; end loop; Ctx.H.Byte_Count := Ctx.H.Byte_Count + Num_Bytes; end if; end Copy_Msg_To_B; begin Msg_Byte_Count := Msg'Last + 1; Current_Msg_Offset := 0; if (Msg_Byte_Count + Ctx.H.Byte_Count > Skein_512_Block_Bytes_C) then if Ctx.H.Byte_Count > 0 then N := Skein_512_Block_Bytes_C - Ctx.H.Byte_Count; -- number of bytes free in Ctx.B --# check N < Msg_Byte_Count; --# check N <= Msg'Last + 1; Copy_Msg_To_B (Current_Msg_Offset, N); Msg_Byte_Count := Msg_Byte_Count - N; Current_Msg_Offset := Current_Msg_Offset + N; --# check Ctx.H.Byte_Count = Skein_512_Block_Bytes_C; Tmp_B := Ctx.B; Skein_512_Process_Block (Ctx => Ctx, Block => Tmp_B, Starting_Offset => 0, Block_Count => 1, Byte_Count_Add => Skein_512_Block_Bytes_C); Ctx.H.Byte_Count := 0; end if; -- Now process any remaining full blocks, directly from input message data if Msg_Byte_Count > Skein_512_Block_Bytes_C then Block_Count := (Msg_Byte_Count - 1) / Skein_512_Block_Bytes_C; -- Number of full blocks to process Skein_512_Process_Block (Ctx => Ctx, Block => Msg, Starting_Offset => Current_Msg_Offset, Block_Count => Block_Count, Byte_Count_Add => Skein_512_Block_Bytes_C); Bytes_Hashed := Block_Count * Skein_512_Block_Bytes_C; --# check Bytes_Hashed < Msg_Byte_Count; Msg_Byte_Count := Msg_Byte_Count - Bytes_Hashed; Current_Msg_Offset := Current_Msg_Offset + Bytes_Hashed; end if; end if; -- Finally, there might be fewer than Skein_512_Block_Bytes_C bytes left -- over that are not yet hashed. Copy these to Ctx.B for processing -- in any subsequent call to _Update or _Final. Copy_Msg_To_B (Current_Msg_Offset, Msg_Byte_Count); end Skein_512_Update; procedure Skein_512_Final (Ctx : in Skein_512_Context; Result : out Crypto.Byte_Seq) --# pre Ctx.H.Hash_Bit_Len in Initialized_Hash_Bit_Length and --# Ctx.H.Byte_Count in Skein_512_Block_Bytes_Count and --# Result'First = 0 and --# (Ctx.H.Hash_Bit_Len + 7) / 8 <= Result'Last + 1; is subtype Output_Byte_Count_T is Natural range 1 .. (Hash_Bit_Length'Last + 7) / 8; subtype Output_Block_Count_T is Natural range 0 .. (Output_Byte_Count_T'Last + 63) / Skein_512_Block_Bytes_C; subtype Positive_Output_Block_Count_T is Output_Block_Count_T range 1 .. Output_Block_Count_T'Last; Local_Ctx : Skein_512_Context; N : Natural; Blocks_Done : Output_Block_Count_T; Blocks_Required : Positive_Output_Block_Count_T; Byte_Count : Output_Byte_Count_T; X : Skein_512_State_Words; Tmp_B : Skein_512_Block_Bytes; Tmp_Byte_Count_Add : Natural; procedure Zero_Pad_B --# global in out Local_Ctx; --# derives Local_Ctx from Local_Ctx; --# pre Local_Ctx.H.Byte_Count < Skein_512_Block_Bytes_C and --# Local_Ctx.H.Hash_Bit_Len > 0; --# post Local_Ctx.H.Hash_Bit_Len = Local_Ctx~.H.Hash_Bit_Len and --# Local_Ctx.H.Hash_Bit_Len > 0 and --# Local_Ctx.H.Byte_Count < Skein_512_Block_Bytes_C and --# Local_Ctx.H.Byte_Count = Local_Ctx~.H.Byte_Count; is begin for I in Skein_512_Block_Bytes_Index range Local_Ctx.H.Byte_Count .. Skein_512_Block_Bytes_Index'Last loop --# assert Local_Ctx.H.Hash_Bit_Len = Local_Ctx~.H.Hash_Bit_Len and --# Local_Ctx.H.Hash_Bit_Len > 0 and --# Local_Ctx.H.Byte_Count < Skein_512_Block_Bytes_C and --# Local_Ctx.H.Byte_Count = Local_Ctx~.H.Byte_Count; Local_Ctx.B (I) := 0; end loop; end Zero_Pad_B; pragma Inline (Zero_Pad_B); procedure Set_B_Counter (Counter : in Unsigned.U64) --# global in out Local_Ctx; --# derives Local_Ctx from Local_Ctx, Counter; --# pre Local_Ctx.H.Hash_Bit_Len > 0; --# post Local_Ctx.H.Hash_Bit_Len > 0; is begin Local_Ctx.B (0) := Unsigned.Byte (Counter and 16#FF#); Local_Ctx.B (1) := Unsigned.Byte (Unsigned.Shift_Right_64 (Counter, 8) and 16#FF#); Local_Ctx.B (2) := Unsigned.Byte (Unsigned.Shift_Right_64 (Counter, 16) and 16#FF#); Local_Ctx.B (3) := Unsigned.Byte (Unsigned.Shift_Right_64 (Counter, 24) and 16#FF#); Local_Ctx.B (4) := Unsigned.Byte (Unsigned.Shift_Right_64 (Counter, 32) and 16#FF#); Local_Ctx.B (5) := Unsigned.Byte (Unsigned.Shift_Right_64 (Counter, 40) and 16#FF#); Local_Ctx.B (6) := Unsigned.Byte (Unsigned.Shift_Right_64 (Counter, 48) and 16#FF#); Local_Ctx.B (7) := Unsigned.Byte (Unsigned.Shift_Right_64 (Counter, 56) and 16#FF#); end Set_B_Counter; pragma Inline (Set_B_Counter); begin Local_Ctx := Ctx; --# check Local_Ctx.H.Hash_Bit_Len > 0; Result := (others => 0); Local_Ctx.H.Tweak_Words.Final_Block := True; -- Tag as the final block if (Local_Ctx.H.Byte_Count < Skein_512_Block_Bytes_C) then Zero_Pad_B; end if; Tmp_B := Local_Ctx.B; Tmp_Byte_Count_Add := Local_Ctx.H.Byte_Count; Skein_512_Process_Block (Ctx => Local_Ctx, Block => Tmp_B, Starting_Offset => 0, Block_Count => 1, Byte_Count_Add => Tmp_Byte_Count_Add); -- Now output the result Byte_Count := (Local_Ctx.H.Hash_Bit_Len + 7) / 8; -- Total number of output bytes --# check Byte_Count <= Result'Last + 1; -- Run Threefish in "counter mode" to generate more output Local_Ctx.B := Skein_512_Block_Bytes'(others => 0); -- Zero out Local_Ctx.B, so it can hold the counter X := Local_Ctx.X; -- Keep a local copy of counter mode "key" Blocks_Required := (Byte_Count + 63) / 64; Blocks_Done := 0; loop --# assert Local_Ctx.H.Hash_Bit_Len > 0 and --# Byte_Count <= Result'Last + 1 and --# Blocks_Done * Skein_512_Block_Bytes_C < Byte_Count and --# Blocks_Done * Skein_512_Block_Bytes_C < Result'Last + 1 and --# Blocks_Done < Blocks_Required and --# Blocks_Required = (Byte_Count + 63) / 64; Set_B_Counter (Unsigned.U64 (Blocks_Done)); Skein_Start_New_Type (Field_Type => Skein_Block_Type_Out, First_Block => True, Final_Block => True, Ctx => Local_Ctx.H); -- Run "Counter Mode" Tmp_B := Local_Ctx.B; Skein_512_Process_Block (Ctx => Local_Ctx, Block => Tmp_B, Starting_Offset => 0, Block_Count => 1, Byte_Count_Add => 8); N := Byte_Count - (Blocks_Done * Skein_512_Block_Bytes_C); -- number of output bytes left to go if (N >= Skein_512_Block_Bytes_C) then N := Skein_512_Block_Bytes_C; end if; -- Push the output Local_Ctx.X into output buffer Hash Put_64_LSB_First (Dst => Result, Dst_Offset => Blocks_Done * Skein_512_Block_Bytes_C, Src => Local_Ctx.X, Byte_Count => N); Local_Ctx.X := X; -- restore the counter mode key for next time Blocks_Done := Blocks_Done + 1; exit when Blocks_Done >= Blocks_Required; end loop; end Skein_512_Final; function Skein_512_Hash (Data : in Crypto.Byte_Seq) return Skein_512_State_Bytes is Ctx : Skein_512_Context; Result : Skein_512_State_Bytes; begin Skein_512_Init (Ctx => Ctx, HashBitLen => 512); Skein_512_Update (Ctx => Ctx, Msg => Data); Skein_512_Final (Ctx => Ctx, Result => Result); return Result; end Skein_512_Hash; end SPARK.Crypto.Hash.Skein; spark-2012.0.deb/lib/spark/current/spark-ada-command_line.ads0000644000175000017500000000627011753202341022771 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Command_Line -- -- -- -- Description -- -- This is a binding to package Ada.Command_Line -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : SPARK -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- Full Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- ------------------------------------------------------------------------------- package SPARK.Ada.Command_Line --# own State; --# initializes State; is -- function Argument_Count return Natural; function Argument_Count return Natural; --# global State; type Exit_Status is new Integer; Success : constant Exit_Status; Failure : constant Exit_Status; -- procedure Set_Exit_Status (Code : Exit_Status); procedure Set_Exit_Status (Code : in Exit_Status); --# global in out State; --# derives State from *, --# Code; private Success : constant Exit_Status := 0; Failure : constant Exit_Status := 1; end SPARK.Ada.Command_Line; spark-2012.0.deb/lib/spark/current/spark-ada-command_line-unbounded_string.adb0000644000175000017500000000316611753202341026320 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Command_Line; package body SPARK.Ada.Command_Line.Unbounded_String is --# hide SPARK.Ada.Command_Line.Unbounded_String; function Argument (Number : Positive) return Strings.Unbounded.Unbounded_String is begin return SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Standard.Ada.Command_Line.Argument (Number => Number)); end Argument; function Command_Name return Strings.Unbounded.Unbounded_String is begin return SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Standard.Ada.Command_Line.Command_Name); end Command_Name; end SPARK.Ada.Command_Line.Unbounded_String; spark-2012.0.deb/lib/spark/current/spark.ads0000644000175000017500000000530711753202341017623 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK -- -- -- -- Description -- -- This is the top level package of the SPARK library. -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- package SPARK is end SPARK; spark-2012.0.deb/lib/spark/current/spark-ada-strings-maps-not_spark.ads0000644000175000017500000000613511753202341024771 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Strings.Maps.Not_SPARK -- -- -- -- Description -- -- This package provides the features of Ada.Strings.Maps which are not -- -- compatible with SPARK. Please refer to the Ada LRM description of -- -- Ada.Strings.Maps for usage information. -- -- -- ------------------------------------------------------------------------------- with Ada.Strings.Maps; package SPARK.Ada.Strings.Maps.Not_SPARK is type Character_Ranges is new Standard.Ada.Strings.Maps.Character_Ranges; function To_Set (Ranges : Character_Ranges) return Character_Set; function To_Ranges (Set : Character_Set) return Character_Ranges; function "<=" (Left : Character_Set; Right : Character_Set) return Boolean renames Is_Subset; subtype Character_Sequence is String; -- Alternative representation for a set of character values function To_Sequence (Set : Character_Set) return Character_Sequence; function To_Domain (Map : Character_Mapping) return Character_Sequence; function To_Range (Map : Character_Mapping) return Character_Sequence; type Character_Mapping_Function is new Standard.Ada.Strings.Maps.Character_Mapping_Function; ------------------------------------------------ -- Conversion functions from SPARK.Ada to Ada -- ------------------------------------------------ function To_Character_Set (From : Character_Set) return Standard.Ada.Strings.Maps.Character_Set; function To_Character_Range (From : Character_Range) return Standard.Ada.Strings.Maps.Character_Range; function To_Character_Mapping (From : Character_Mapping) return Standard.Ada.Strings.Maps.Character_Mapping; end SPARK.Ada.Strings.Maps.Not_SPARK; spark-2012.0.deb/lib/spark/current/spark-unsigned.shs0000644000175000017500000002235711753202341021467 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Unsigned -- -- -- -- Description -- -- This file is a "Shadow" specification for package SPARK.Unsigned. -- -- It must be kept in sync with the corresponding specification in -- -- spark-unsigned.ads -- -- -- -- This package presents a SPARK-compatible binding to the various Shift -- -- and Rotate functions that are supplied in Ada's package Interfaces. -- -- -- -- This package provides a de-overloading of these functions names to be -- -- compatible with SPARK. -- -- -- -- This package declares the functions using the same naming scheme as -- -- package Interfaces but with the suffix "_32", "_64" and so on -- -- appended to disambiguate each name. -- -- -- -- For efficiency, we also supply an Ada (not SPARK) version of this -- -- specification (in "spark-unsigned.ads") that must be submitted to your -- -- Ada compiler. This declares the new functions as renamings of the -- -- original versions, which are normally Intrinsic in most implementations -- -- of Ada. -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime, assuming Shift_ and Rotate_ functions are -- -- Intrinsic on the target platform. -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- with Interfaces; --# inherit Interfaces; package SPARK.Unsigned is -------------------------------------------------------------------- -- Shorthand names for common unsigned types -------------------------------------------------------------------- type U6 is mod 2**6; type U7 is mod 2**7; subtype Byte is Interfaces.Unsigned_8; subtype U16 is Interfaces.Unsigned_16; subtype U32 is Interfaces.Unsigned_32; subtype U64 is Interfaces.Unsigned_64; -------------------------------------------------------------------- -- Shift and Rotate functions -- -- These functions supply a non-overloaded, and therefore -- SPARK-compatible, declaration of the standard Shift and -- Rotate functions for the standard modular types. -- -- For the Examiner, these are declared as just plain function -- declarations with no body. -- -- For a compiler, a distinct version of this package specification -- supplies these declarations as renamings of the (overloaded) -- functions in package Interfaces, thus yielded the efficiency -- of the Intrinsic functions. -- -- The de-overloading scheme used is to replace function -- XXX for type Unsigned_N with a function called -- XXX_N which renames the original entity. -------------------------------------------------------------------- -- Interfaces uses "Natural" for the Amount parameter of each -- function below, but we choose to introduce a named subtype here -- to ease RTC proof of calling units in SPARK. subtype Shift_Count is Natural range 0 .. 64; -- Rotate towards MSB function Rotate_Left_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8; -- Rotate towards LSB function Rotate_Right_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8; -- Shift towards MSB function Shift_Left_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8; -- Shift towards LSB function Shift_Right_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8; -- Arithmetic Shift towards LSB function Shift_Right_Arithmetic_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8; -- Rotate towards MSB function Rotate_Left_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16; -- Rotate towards LSB function Rotate_Right_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16; -- Shift towards MSB function Shift_Left_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16; -- Shift towards LSB function Shift_Right_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16; -- Arithmetic Shift towards LSB function Shift_Right_Arithmetic_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16; -- Rotate towards MSB function Rotate_Left_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32; -- Rotate towards LSB function Rotate_Right_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32; -- Shift towards MSB function Shift_Left_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32; -- Shift towards LSB function Shift_Right_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32; -- Arithmetic Shift towards LSB function Shift_Right_Arithmetic_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32; -- Rotate towards MSB function Rotate_Left_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64; -- Rotate towards LSB function Rotate_Right_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64; -- Shift towards MSB function Shift_Left_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64; -- Shift towards LSB function Shift_Right_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64; -- Arithmetic Shift towards LSB function Shift_Right_Arithmetic_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64; -------------------------------------------------------------------- -- Endian-ness conversion functions -------------------------------------------------------------------- -- Returns W in Little-Endian format. -- -- On a machine which is naturally Little-Endian, this function is a no-op. -- On a big-endian machine, the 8 bytes of W are reversed. function To_LittleEndian (W : in Interfaces.Unsigned_64) return Interfaces.Unsigned_64; pragma Inline (To_LittleEndian); end SPARK.Unsigned; spark-2012.0.deb/lib/spark/current/interfaces.shs0000644000175000017500000000760211753202341020654 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- Interfaces -- -- -- -- Description -- -- This is a SHADOW specification of the predefined package Interfaces. -- -- It presents a subset of the facilities of the standard Ada package, -- -- but is SPARK compatible. -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- package Interfaces is type Integer_8 is range -2 ** 7 .. 2 ** 7 - 1; for Integer_8'Size use 8; type Integer_16 is range -2 ** 15 .. 2 ** 15 - 1; for Integer_16'Size use 16; type Integer_32 is range -2 ** 31 .. 2 ** 31 - 1; for Integer_32'Size use 32; type Integer_64 is range -2 ** 63 .. 2 ** 63 - 1; for Integer_64'Size use 64; type Unsigned_8 is mod 2**8; for Unsigned_8'Size use 8; type Unsigned_16 is mod 2**16; for Unsigned_16'Size use 16; type Unsigned_32 is mod 2**32; for Unsigned_32'Size use 32; type Unsigned_64 is mod 2**64; for Unsigned_64'Size use 64; ------------------------------------------------------------------------- -- For SPARK, the various Shift_ and Rotate_ functions are declared in -- a distinct package SPARK.Unsigned. These offer a SPARK-friendly -- non-overloaded view of these functions, but having the efficiency -- of the Intrinsic operations supplied by most Ada compilers. ------------------------------------------------------------------------- end Interfaces; spark-2012.0.deb/lib/spark/current/all.wrn0000644000175000017500000000022311712513676017315 0ustar eugeneugen-- Warning control file for the SPARK library hidden_parts with_clauses representation_clauses pragma assert pragma inline default_loop_assertions spark-2012.0.deb/lib/spark/current/ada-characters-handling.shs0000644000175000017500000001215711753202341023156 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- Ada.Characters.Handling -- -- -- -- Description -- -- -- -- This package is a SPARK-compatible shadow specifiction of the -- -- standard Ada package Ada.Characters.Handling. -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- Whatever is required by Ada.Characters.Handling -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- package Ada.Characters.Handling is ---------------------------------------- -- Character Classification Functions -- ---------------------------------------- function Is_Control (Item : Character) return Boolean; function Is_Graphic (Item : Character) return Boolean; function Is_Letter (Item : Character) return Boolean; function Is_Lower (Item : Character) return Boolean; function Is_Upper (Item : Character) return Boolean; function Is_Basic (Item : Character) return Boolean; function Is_Digit (Item : Character) return Boolean; function Is_Decimal_Digit (Item : Character) return Boolean; function Is_Hexadecimal_Digit (Item : Character) return Boolean; function Is_Alphanumeric (Item : Character) return Boolean; function Is_Special (Item : Character) return Boolean; --------------------------------------------------- -- Conversion Functions for Character and String -- --------------------------------------------------- function To_Lower (Item : Character) return Character; function To_Upper (Item : Character) return Character; function To_Basic (Item : Character) return Character; -- The following functions cannot be supported in SPARK, -- since they return unconstrained String. -- function To_Lower (Item : String) return String; -- function To_Upper (Item : String) return String; -- function To_Basic (Item : String) return String; ---------------------------------------------------------------------- -- Classifications of and Conversions Between Character and ISO 646 -- ---------------------------------------------------------------------- subtype ISO_646 is Character range Character'Val (0) .. Character'Val (127); function Is_ISO_646 (Item : Character) return Boolean; function To_ISO_646 (Item : Character; Substitute : ISO_646) return ISO_646; -- The following functions cannot be supported in SPARK, -- since they return unconstrained String or are overloaded. -- function Is_ISO_646 (Item : String) return Boolean; -- -- function To_ISO_646 -- (Item : String; -- Substitute : ISO_646 := ' ') return String; end Ada.Characters.Handling; spark-2012.0.deb/lib/spark/current/gnat.cfg0000644000175000017500000000033211712513676017430 0ustar eugeneugenpackage Standard is type Integer is range -2**31 .. 2**31 - 1; end Standard; package System is -- for IA32/x86 - little-endian machine Default_Bit_Order : constant Bit_Order := Low_Order_First; end System; spark-2012.0.deb/lib/spark/current/spark.sw0000644000175000017500000000007511712513676017515 0ustar eugeneugen-w=all -conf=gnat -index=sparklib -listing=ls_ -plain -vcg spark-2012.0.deb/lib/spark/current/spark-ada-strings-unbounded-not_spark.adb0000644000175000017500000001050311753202341025765 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Strings.Maps; with SPARK.Ada.Strings.Not_SPARK; package body SPARK.Ada.Strings.Unbounded.Not_SPARK is --# hide SPARK.Ada.Strings.Unbounded.Not_SPARK; pragma Warnings ("Y"); -- Turn off warnings for Ada 2005 features procedure Free (X : in out String_Access) is begin Standard.Ada.Strings.Unbounded.Free (X => Standard.Ada.Strings.Unbounded.String_Access (X)); end Free; function To_String (Source : Unbounded_String) return String is begin return Standard.Ada.Strings.Unbounded.To_String (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source)); end To_String; function Slice (Source : Unbounded_String; Low : Positive; High : Natural) return String is begin return Standard.Ada.Strings.Unbounded.Slice (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Low => Low, High => High); end Slice; function Index (Source : Unbounded_String; Pattern : String; Going : Direction := Direction_Forward; Mapping : Maps.Not_SPARK.Character_Mapping_Function) return Natural is begin return Standard.Ada.Strings.Unbounded.Index (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Pattern => Pattern, Going => Strings.Not_SPARK.To_Direction (From => Going), Mapping => Standard.Ada.Strings.Maps.Character_Mapping_Function (Mapping)); end Index; -- Index is only defined in Ada 2005 function Index (Source : Unbounded_String; Pattern : String; From : Positive; Going : Direction := Direction_Forward; Mapping : Maps.Not_SPARK.Character_Mapping_Function) return Natural is begin return Standard.Ada.Strings.Unbounded.Index (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Pattern => Pattern, From => From, Going => Strings.Not_SPARK.To_Direction (From => Going), Mapping => Standard.Ada.Strings.Maps.Character_Mapping_Function (Mapping)); end Index; function Count (Source : Unbounded_String; Pattern : String; Mapping : Maps.Not_SPARK.Character_Mapping_Function) return Natural is begin return Standard.Ada.Strings.Unbounded.Count (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Pattern => Pattern, Mapping => Standard.Ada.Strings.Maps.Character_Mapping_Function (Mapping)); end Count; function Translate (Source : Unbounded_String; Mapping : Maps.Not_SPARK.Character_Mapping_Function) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Translate (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Mapping => Standard.Ada.Strings.Maps.Character_Mapping_Function (Mapping))); end Translate; procedure Translate (Source : in out Unbounded_String; Mapping : in Maps.Not_SPARK.Character_Mapping_Function) is begin Standard.Ada.Strings.Unbounded.Translate (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Mapping => Standard.Ada.Strings.Maps.Character_Mapping_Function (Mapping)); end Translate; end SPARK.Ada.Strings.Unbounded.Not_SPARK; spark-2012.0.deb/lib/spark/current/spark-ada-strings-not_spark.adb0000644000175000017500000000601111753202341024003 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body SPARK.Ada.Strings.Not_SPARK is --# hide SPARK.Ada.Strings.Not_SPARK; ------------------------------------------------ -- Conversion functions from SPARK.Ada to Ada -- ------------------------------------------------ function To_Alignment (From : Alignment) return Standard.Ada.Strings.Alignment is begin case From is when Alignment_Left => return Standard.Ada.Strings.Left; when Alignment_Right => return Standard.Ada.Strings.Right; when Alignment_Center => return Standard.Ada.Strings.Center; end case; end To_Alignment; function To_Truncation (From : Truncation) return Standard.Ada.Strings.Truncation is begin case From is when Truncation_Left => return Standard.Ada.Strings.Left; when Truncation_Right => return Standard.Ada.Strings.Right; when Truncation_Error => return Standard.Ada.Strings.Error; end case; end To_Truncation; function To_Membership (From : Membership) return Standard.Ada.Strings.Membership is begin case From is when Membership_Inside => return Standard.Ada.Strings.Inside; when Membership_Outside => return Standard.Ada.Strings.Outside; end case; end To_Membership; function To_Direction (From : Direction) return Standard.Ada.Strings.Direction is begin case From is when Direction_Forward => return Standard.Ada.Strings.Forward; when Direction_Backward => return Standard.Ada.Strings.Backward; end case; end To_Direction; function To_Trim_End (From : Trim_End) return Standard.Ada.Strings.Trim_End is begin case From is when Trim_End_Left => return Standard.Ada.Strings.Left; when Trim_End_Right => return Standard.Ada.Strings.Right; when Trim_End_Both => return Standard.Ada.Strings.Both; end case; end To_Trim_End; end SPARK.Ada.Strings.Not_SPARK; spark-2012.0.deb/lib/spark/current/spark-crypto-hash.ads0000644000175000017500000000541411753202341022061 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Crypto.Hash -- -- -- -- Description -- -- -- -- Root package for all cryptographic hash functions -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- None -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- ------------------------------------------------------------------------------- --# inherit SPARK, --# SPARK.Crypto; package SPARK.Crypto.Hash is end SPARK.Crypto.Hash; spark-2012.0.deb/lib/spark/current/spark-ada-command_line.adb0000644000175000017500000000263011753202341022744 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Command_Line; package body SPARK.Ada.Command_Line is --# hide SPARK.Ada.Command_Line; function Argument_Count return Natural is begin return Standard.Ada.Command_Line.Argument_Count; end Argument_Count; procedure Set_Exit_Status (Code : in Exit_Status) is begin Standard.Ada.Command_Line.Set_Exit_Status (Code => Standard.Ada.Command_Line.Exit_Status (Code)); end Set_Exit_Status; end SPARK.Ada.Command_Line; spark-2012.0.deb/lib/spark/current/spark-ada-text_io.adb0000644000175000017500000011565711753202341022010 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Exceptions; with SPARK.Ada.Text_IO.Not_SPARK; package body SPARK.Ada.Text_IO is --# hide SPARK.Ada.Text_IO; pragma Warnings ("Y"); -- Turn off warnings for Ada 2005 features function Get_Exception_T (The_Exception_Name : String) return Exception_T is begin if The_Exception_Name = "ADA.IO_EXCEPTIONS.STATUS_ERROR" then return Status_Error; elsif The_Exception_Name = "ADA.IO_EXCEPTIONS.MODE_ERROR" then return Mode_Error; elsif The_Exception_Name = "ADA.IO_EXCEPTIONS.NAME_ERROR" then return Name_Error; elsif The_Exception_Name = "ADA.IO_EXCEPTIONS.USE_ERROR" then return Use_Error; elsif The_Exception_Name = "ADA.IO_EXCEPTIONS.DEVICE_ERROR" then return Device_Error; elsif The_Exception_Name = "ADA.IO_EXCEPTIONS.END_ERROR" then return End_Error; elsif The_Exception_Name = "ADA.IO_EXCEPTIONS.DATA_ERROR" then return Data_Error; elsif The_Exception_Name = "ADA.IO_EXCEPTIONS.LAYOUT_ERROR" then return Layout_Error; else return No_Exception; end if; end Get_Exception_T; procedure Raise_Exception (The_Exception : in Exception_T) is begin case The_Exception is when No_Exception => null; when Status_Error => raise Standard.Ada.Text_IO.Status_Error; when Mode_Error => raise Standard.Ada.Text_IO.Mode_Error; when Name_Error => raise Standard.Ada.Text_IO.Name_Error; when Use_Error => raise Standard.Ada.Text_IO.Use_Error; when Device_Error => raise Standard.Ada.Text_IO.Device_Error; when End_Error => raise Standard.Ada.Text_IO.End_Error; when Data_Error => raise Standard.Ada.Text_IO.Data_Error; when Layout_Error => raise Standard.Ada.Text_IO.Layout_Error; end case; end Raise_Exception; --------------------- -- File Management -- --------------------- procedure Create (File : out File_Type; Mode : in File_Mode; Name : in String; Form : in String) is begin Standard.Ada.Text_IO.Create (File => File.The_File_Type, Mode => SPARK.Ada.Text_IO.Not_SPARK.To_File_Mode (From => Mode), Name => Name, Form => Form); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Create; procedure Open (File : out File_Type; Mode : in File_Mode; Name : in String; Form : in String) is begin Standard.Ada.Text_IO.Open (File => File.The_File_Type, Mode => SPARK.Ada.Text_IO.Not_SPARK.To_File_Mode (From => Mode), Name => Name, Form => Form); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Open; procedure Close (File : in out File_Type) is begin Standard.Ada.Text_IO.Close (File => File.The_File_Type); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Close; procedure Delete (File : in out File_Type) is begin Standard.Ada.Text_IO.Delete (File => File.The_File_Type); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Delete; procedure Reset_Mode (File : in out File_Type; Mode : in File_Mode) is begin Standard.Ada.Text_IO.Reset (File => File.The_File_Type, Mode => SPARK.Ada.Text_IO.Not_SPARK.To_File_Mode (From => Mode)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Reset_Mode; procedure Reset (File : in out File_Type) is begin Standard.Ada.Text_IO.Reset (File => File.The_File_Type); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Reset; function Mode (File : File_Type) return File_Mode is function To_File_Mode (From : Standard.Ada.Text_IO.File_Mode) return File_Mode; function To_File_Mode (From : Standard.Ada.Text_IO.File_Mode) return File_Mode is begin case From is when Standard.Ada.Text_IO.In_File => return In_File; when Standard.Ada.Text_IO.Out_File => return Out_File; when Standard.Ada.Text_IO.Append_File => return Append_File; end case; end To_File_Mode; begin return To_File_Mode (From => Standard.Ada.Text_IO.Mode (File => File.The_File_Type)); end Mode; function Is_Open (File : File_Type) return Boolean is begin return Standard.Ada.Text_IO.Is_Open (File => File.The_File_Type); end Is_Open; -------------------- -- Buffer control -- -------------------- procedure Flush_File (File : in out File_Type) is begin Standard.Ada.Text_IO.Flush (File => File.The_File_Type); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Flush_File; procedure Flush_Output is begin Standard.Ada.Text_IO.Flush (File => Standard.Ada.Text_IO.Standard_Output); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Flush_Output; procedure Flush_Error is begin Standard.Ada.Text_IO.Flush (File => Standard.Ada.Text_IO.Standard_Error); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Flush_Error; -------------------------------------------- -- Specification of line and page lengths -- -------------------------------------------- procedure Set_Line_Length_File (File : in out File_Type; To : in Count) is begin Standard.Ada.Text_IO.Set_Line_Length (File => File.The_File_Type, To => Standard.Ada.Text_IO.Count (To)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Set_Line_Length_File; procedure Set_Line_Length_Output (To : in Count) is begin Standard.Ada.Text_IO.Set_Line_Length (File => Standard.Ada.Text_IO.Standard_Output, To => Standard.Ada.Text_IO.Count (To)); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Set_Line_Length_Output; procedure Set_Line_Length_Error (To : in Count) is begin Standard.Ada.Text_IO.Set_Line_Length (File => Standard.Ada.Text_IO.Standard_Error, To => Standard.Ada.Text_IO.Count (To)); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Set_Line_Length_Error; procedure Set_Page_Length_File (File : in out File_Type; To : in Count) is begin Standard.Ada.Text_IO.Set_Page_Length (File => File.The_File_Type, To => Standard.Ada.Text_IO.Count (To)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Set_Page_Length_File; procedure Set_Page_Length_Output (To : in Count) is begin Standard.Ada.Text_IO.Set_Page_Length (File => Standard.Ada.Text_IO.Standard_Output, To => Standard.Ada.Text_IO.Count (To)); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Set_Page_Length_Output; procedure Set_Page_Length_Error (To : in Count) is begin Standard.Ada.Text_IO.Set_Page_Length (File => Standard.Ada.Text_IO.Standard_Error, To => Standard.Ada.Text_IO.Count (To)); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Set_Page_Length_Error; function Line_Length_File (File : File_Type) return Count is begin return Count (Standard.Ada.Text_IO.Line_Length (File => File.The_File_Type)); end Line_Length_File; function Line_Length_Output return Count is begin return Count (Standard.Ada.Text_IO.Line_Length (File => Standard.Ada.Text_IO.Standard_Output)); end Line_Length_Output; function Line_Length_Error return Count is begin return Count (Standard.Ada.Text_IO.Line_Length (File => Standard.Ada.Text_IO.Standard_Error)); end Line_Length_Error; function Page_Length_File (File : File_Type) return Count is begin return Count (Standard.Ada.Text_IO.Page_Length (File => File.The_File_Type)); end Page_Length_File; function Page_Length_Output return Count is begin return Count (Standard.Ada.Text_IO.Page_Length (File => Standard.Ada.Text_IO.Standard_Output)); end Page_Length_Output; function Page_Length_Error return Count is begin return Count (Standard.Ada.Text_IO.Page_Length (File => Standard.Ada.Text_IO.Standard_Error)); end Page_Length_Error; ------------------------------------ -- Column, Line, and Page Control -- ------------------------------------ procedure New_Line_File (File : in out File_Type; Spacing : in Positive_Count) is begin Standard.Ada.Text_IO.New_Line (File => File.The_File_Type, Spacing => Standard.Ada.Text_IO.Positive_Count (Spacing)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end New_Line_File; procedure New_Line_Output (Spacing : in Positive_Count) is begin Standard.Ada.Text_IO.New_Line (File => Standard.Ada.Text_IO.Standard_Output, Spacing => Standard.Ada.Text_IO.Positive_Count (Spacing)); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end New_Line_Output; procedure New_Line_Error (Spacing : in Positive_Count) is begin Standard.Ada.Text_IO.New_Line (File => Standard.Ada.Text_IO.Standard_Error, Spacing => Standard.Ada.Text_IO.Positive_Count (Spacing)); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end New_Line_Error; procedure Skip_Line_File (File : in out File_Type; Spacing : in Positive_Count) is begin Standard.Ada.Text_IO.Skip_Line (File => File.The_File_Type, Spacing => Standard.Ada.Text_IO.Positive_Count (Spacing)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Skip_Line_File; procedure Skip_Line_Input (Spacing : in Positive_Count) is begin Standard.Ada.Text_IO.Skip_Line (File => Standard.Ada.Text_IO.Standard_Input, Spacing => Standard.Ada.Text_IO.Positive_Count (Spacing)); The_Standard_Input_Exception := No_Exception; exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Skip_Line_Input; function End_Of_Line_File (File : File_Type) return Boolean is begin return Standard.Ada.Text_IO.End_Of_Line (File => File.The_File_Type); end End_Of_Line_File; function End_Of_Line_Input return Boolean is begin return Standard.Ada.Text_IO.End_Of_Line (File => Standard.Ada.Text_IO.Standard_Input); end End_Of_Line_Input; procedure New_Page_File (File : in out File_Type) is begin Standard.Ada.Text_IO.New_Page (File => File.The_File_Type); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end New_Page_File; procedure New_Page_Output is begin Standard.Ada.Text_IO.New_Page (File => Standard.Ada.Text_IO.Standard_Output); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end New_Page_Output; procedure New_Page_Error is begin Standard.Ada.Text_IO.New_Page (File => Standard.Ada.Text_IO.Standard_Error); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end New_Page_Error; procedure Skip_Page_File (File : in out File_Type) is begin Standard.Ada.Text_IO.Skip_Page (File => File.The_File_Type); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Skip_Page_File; procedure Skip_Page_Input is begin Standard.Ada.Text_IO.Skip_Page (File => Standard.Ada.Text_IO.Standard_Input); The_Standard_Input_Exception := No_Exception; exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Skip_Page_Input; function End_Of_Page_File (File : File_Type) return Boolean is begin return Standard.Ada.Text_IO.End_Of_Page (File => File.The_File_Type); end End_Of_Page_File; function End_Of_Page_Input return Boolean is begin return Standard.Ada.Text_IO.End_Of_Page (File => Standard.Ada.Text_IO.Standard_Input); end End_Of_Page_Input; function End_Of_File_File (File : File_Type) return Boolean is begin return Standard.Ada.Text_IO.End_Of_File (File => File.The_File_Type); end End_Of_File_File; function End_Of_File_Input return Boolean is begin return Standard.Ada.Text_IO.End_Of_File (File => Standard.Ada.Text_IO.Standard_Input); end End_Of_File_Input; procedure Set_Col_File (File : in out File_Type; To : in Positive_Count) is begin Standard.Ada.Text_IO.Set_Col (File => File.The_File_Type, To => Standard.Ada.Text_IO.Positive_Count (To)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Set_Col_File; procedure Set_Col_Input (To : in Positive_Count) is begin Standard.Ada.Text_IO.Set_Col (File => Standard.Ada.Text_IO.Standard_Input, To => Standard.Ada.Text_IO.Positive_Count (To)); The_Standard_Input_Exception := No_Exception; exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Set_Col_Input; procedure Set_Col_Output (To : in Positive_Count) is begin Standard.Ada.Text_IO.Set_Col (File => Standard.Ada.Text_IO.Standard_Output, To => Standard.Ada.Text_IO.Positive_Count (To)); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Set_Col_Output; procedure Set_Col_Error (To : in Positive_Count) is begin Standard.Ada.Text_IO.Set_Col (File => Standard.Ada.Text_IO.Standard_Error, To => Standard.Ada.Text_IO.Positive_Count (To)); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Set_Col_Error; procedure Set_Line_File (File : in out File_Type; To : in Positive_Count) is begin Standard.Ada.Text_IO.Set_Line (File => File.The_File_Type, To => Standard.Ada.Text_IO.Positive_Count (To)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Set_Line_File; procedure Set_Line_Input (To : in Positive_Count) is begin Standard.Ada.Text_IO.Set_Line (File => Standard.Ada.Text_IO.Standard_Input, To => Standard.Ada.Text_IO.Positive_Count (To)); The_Standard_Input_Exception := No_Exception; exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Set_Line_Input; procedure Set_Line_Output (To : in Positive_Count) is begin Standard.Ada.Text_IO.Set_Line (File => Standard.Ada.Text_IO.Standard_Output, To => Standard.Ada.Text_IO.Positive_Count (To)); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Set_Line_Output; procedure Set_Line_Error (To : in Positive_Count) is begin Standard.Ada.Text_IO.Set_Line (File => Standard.Ada.Text_IO.Standard_Error, To => Standard.Ada.Text_IO.Positive_Count (To)); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Set_Line_Error; function Col_File (File : File_Type) return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Col (File => File.The_File_Type)); end Col_File; function Col_Input return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Col (File => Standard.Ada.Text_IO.Standard_Input)); end Col_Input; function Col_Output return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Col (File => Standard.Ada.Text_IO.Standard_Output)); end Col_Output; function Col_Error return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Col (File => Standard.Ada.Text_IO.Standard_Error)); end Col_Error; function Line_File (File : File_Type) return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Line (File => File.The_File_Type)); end Line_File; function Line_Input return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Line (File => Standard.Ada.Text_IO.Standard_Input)); end Line_Input; function Line_Output return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Line (File => Standard.Ada.Text_IO.Standard_Output)); end Line_Output; function Line_Error return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Line (File => Standard.Ada.Text_IO.Standard_Error)); end Line_Error; function Page_File (File : File_Type) return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Page (File => File.The_File_Type)); end Page_File; function Page_Input return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Page (File => Standard.Ada.Text_IO.Standard_Input)); end Page_Input; function Page_Output return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Page (File => Standard.Ada.Text_IO.Standard_Output)); end Page_Output; function Page_Error return Positive_Count is begin return Positive_Count (Standard.Ada.Text_IO.Page (File => Standard.Ada.Text_IO.Standard_Error)); end Page_Error; ---------------------------- -- Character Input-Output -- ---------------------------- procedure Get_Character_File (File : in out File_Type; Item : out Character) is begin Standard.Ada.Text_IO.Get (File => File.The_File_Type, Item => Item); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Get_Character_File; procedure Get_Character_Input (Item : out Character) is begin Standard.Ada.Text_IO.Get (File => Standard.Ada.Text_IO.Standard_Input, Item => Item); The_Standard_Input_Exception := No_Exception; exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Get_Character_Input; procedure Put_Character_File (File : in out File_Type; Item : in Character) is begin Standard.Ada.Text_IO.Put (File => File.The_File_Type, Item => Item); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Put_Character_File; procedure Put_Character_Output (Item : in Character) is begin Standard.Ada.Text_IO.Put (File => Standard.Ada.Text_IO.Standard_Output, Item => Item); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Put_Character_Output; procedure Put_Character_Error (Item : in Character) is begin Standard.Ada.Text_IO.Put (File => Standard.Ada.Text_IO.Standard_Error, Item => Item); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Put_Character_Error; procedure Look_Ahead_File (File : in File_Type; Item : out Character; End_Of_Line : out Boolean) is begin Standard.Ada.Text_IO.Look_Ahead (File => File.The_File_Type, Item => Item, End_Of_Line => End_Of_Line); end Look_Ahead_File; procedure Look_Ahead_Input (Item : out Character; End_Of_Line : out Boolean) is begin Standard.Ada.Text_IO.Look_Ahead (File => Standard.Ada.Text_IO.Standard_Input, Item => Item, End_Of_Line => End_Of_Line); end Look_Ahead_Input; procedure Get_Immediate_File (File : in out File_Type; Item : out Character) is begin Standard.Ada.Text_IO.Get_Immediate (File => File.The_File_Type, Item => Item); exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Get_Immediate_File; procedure Get_Immediate_Input (Item : out Character) is begin Standard.Ada.Text_IO.Get_Immediate (File => Standard.Ada.Text_IO.Standard_Input, Item => Item); exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Get_Immediate_Input; procedure Get_Immediate_Available_File (File : in out File_Type; Item : out Character; Available : out Boolean) is begin Standard.Ada.Text_IO.Get_Immediate (File => File.The_File_Type, Item => Item, Available => Available); exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Get_Immediate_Available_File; procedure Get_Immediate_Available_Input (Item : out Character; Available : out Boolean) is begin Standard.Ada.Text_IO.Get_Immediate (File => Standard.Ada.Text_IO.Standard_Input, Item => Item, Available => Available); exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Get_Immediate_Available_Input; ------------------------- -- String Input-Output -- ------------------------- procedure Get_File (File : in out File_Type; Item : out String) is begin Standard.Ada.Text_IO.Get (File => File.The_File_Type, Item => Item); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Get_File; procedure Get_Input (Item : out String) is begin Standard.Ada.Text_IO.Get (File => Standard.Ada.Text_IO.Standard_Input, Item => Item); The_Standard_Input_Exception := No_Exception; exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Get_Input; procedure Put_File (File : in out File_Type; Item : in String) is begin Standard.Ada.Text_IO.Put (File => File.The_File_Type, Item => Item); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Put_File; procedure Put_Output (Item : in String) is begin Standard.Ada.Text_IO.Put (File => Standard.Ada.Text_IO.Standard_Output, Item => Item); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Put_Output; procedure Put_Error (Item : in String) is begin Standard.Ada.Text_IO.Put (File => Standard.Ada.Text_IO.Standard_Error, Item => Item); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Put_Error; procedure Procedure_Get_Line_File (File : in out File_Type; Item : out String; Arg_Last : out Natural) is begin Standard.Ada.Text_IO.Get_Line (File => File.The_File_Type, Item => Item, Last => Arg_Last); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Procedure_Get_Line_File; procedure Procedure_Get_Line_Input (Item : out String; Arg_Last : out Natural) is begin Standard.Ada.Text_IO.Get_Line (File => Standard.Ada.Text_IO.Standard_Input, Item => Item, Last => Arg_Last); The_Standard_Input_Exception := No_Exception; exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Procedure_Get_Line_Input; procedure Put_Line_File (File : in out File_Type; Item : in String) is begin Standard.Ada.Text_IO.Put_Line (File => File.The_File_Type, Item => Item); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Put_Line_File; procedure Put_Line_Output (Item : in String) is begin Standard.Ada.Text_IO.Put_Line (File => Standard.Ada.Text_IO.Standard_Output, Item => Item); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Put_Line_Output; procedure Put_Line_Error (Item : in String) is begin Standard.Ada.Text_IO.Put_Line (File => Standard.Ada.Text_IO.Standard_Error, Item => Item); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Put_Line_Error; function Get_Last_Exception_File (File : File_Type) return Exception_T is begin return File.The_Exception; end Get_Last_Exception_File; function Get_Last_Exception_Input return Exception_T is begin return The_Standard_Input_Exception; end Get_Last_Exception_Input; function Get_Last_Exception_Output return Exception_T is begin return The_Standard_Output_Exception; end Get_Last_Exception_Output; function Get_Last_Exception_Error return Exception_T is begin return The_Standard_Error_Exception; end Get_Last_Exception_Error; procedure Raise_Last_Exception_File (File : in File_Type) is begin Raise_Exception (The_Exception => File.The_Exception); end Raise_Last_Exception_File; procedure Raise_Last_Exception_Input is begin Raise_Exception (The_Exception => The_Standard_Input_Exception); end Raise_Last_Exception_Input; procedure Raise_Last_Exception_Output is begin Raise_Exception (The_Exception => The_Standard_Output_Exception); end Raise_Last_Exception_Output; procedure Raise_Last_Exception_Error is begin Raise_Exception (The_Exception => The_Standard_Error_Exception); end Raise_Last_Exception_Error; begin The_Standard_Input_Exception := No_Exception; The_Standard_Output_Exception := No_Exception; The_Standard_Error_Exception := No_Exception; end SPARK.Ada.Text_IO; spark-2012.0.deb/lib/spark/current/spark_/0000755000175000017500000000000011753202331017263 5ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/0000755000175000017500000000000011753202331020742 5ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/0000755000175000017500000000000011753202331022024 5ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/0000755000175000017500000000000011753202331023135 5ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.vcg0000644000175000017500000046336411712513676027031 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Get_64_LSB_First For path(s) from start to run-time check associated with statement of line 148: procedure_get_64_lsb_first_1. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . -> C1: 0 >= spark__crypto__word_count_t__first . C2: 0 <= spark__crypto__word_count_t__last . For path(s) from start to run-time check associated with statement of line 149: procedure_get_64_lsb_first_2. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . -> C1: src_offset >= natural__first . C2: src_offset <= natural__last . For path(s) from start to check associated with statement of line 151: procedure_get_64_lsb_first_3. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . -> C1: src_offset >= src__index__subtype__1__first . C2: src_offset <= src__index__subtype__1__last . C3: src_offset + 1 >= src__index__subtype__1__first . C4: src_offset + 1 <= src__index__subtype__1__last . C5: src_offset + 2 >= src__index__subtype__1__first . C6: src_offset + 2 <= src__index__subtype__1__last . C7: src_offset + 3 >= src__index__subtype__1__first . C8: src_offset + 3 <= src__index__subtype__1__last . C9: src_offset + 4 >= src__index__subtype__1__first . C10: src_offset + 4 <= src__index__subtype__1__last . C11: src_offset + 5 >= src__index__subtype__1__first . C12: src_offset + 5 <= src__index__subtype__1__last . C13: src_offset + 6 >= src__index__subtype__1__first . C14: src_offset + 6 <= src__index__subtype__1__last . C15: src_offset + 7 >= src__index__subtype__1__first . C16: src_offset + 7 <= src__index__subtype__1__last . For path(s) from assertion of line 172 to check associated with statement of line 151: procedure_get_64_lsb_first_4. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . -> C1: src_index + 8 >= src__index__subtype__1__first . C2: src_index + 8 <= src__index__subtype__1__last . C3: src_index + 8 + 1 >= src__index__subtype__1__first . C4: src_index + 8 + 1 <= src__index__subtype__1__last . C5: src_index + 8 + 2 >= src__index__subtype__1__first . C6: src_index + 8 + 2 <= src__index__subtype__1__last . C7: src_index + 8 + 3 >= src__index__subtype__1__first . C8: src_index + 8 + 3 <= src__index__subtype__1__last . C9: src_index + 8 + 4 >= src__index__subtype__1__first . C10: src_index + 8 + 4 <= src__index__subtype__1__last . C11: src_index + 8 + 5 >= src__index__subtype__1__first . C12: src_index + 8 + 5 <= src__index__subtype__1__last . C13: src_index + 8 + 6 >= src__index__subtype__1__first . C14: src_index + 8 + 6 <= src__index__subtype__1__last . C15: src_index + 8 + 7 >= src__index__subtype__1__first . C16: src_index + 8 + 7 <= src__index__subtype__1__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_5. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . -> C1: 8 >= spark__unsigned__shift_count__first . C2: 8 <= spark__unsigned__shift_count__last . C3: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . C4: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . C5: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . C6: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . C7: src_offset + 1 >= src__index__subtype__1__first . C8: src_offset + 1 <= src__index__subtype__1__last . C9: src_offset + 1 >= integer__base__first . C10: src_offset + 1 <= integer__base__last . C11: element(src, [src_offset]) >= spark__unsigned__u64__first . C12: element(src, [src_offset]) <= spark__unsigned__u64__last . C13: src_offset >= src__index__subtype__1__first . C14: src_offset <= src__index__subtype__1__last . C15: 0 >= dst__index__subtype__1__first . C16: 0 <= dst__index__subtype__1__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_6. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . -> C1: 8 >= spark__unsigned__shift_count__first . C2: 8 <= spark__unsigned__shift_count__last . C3: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . C4: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . C5: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . C6: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . C7: src_index + 8 + 1 >= src__index__subtype__1__first . C8: src_index + 8 + 1 <= src__index__subtype__1__last . C9: src_index + 8 + 1 >= integer__base__first . C10: src_index + 8 + 1 <= integer__base__last . C11: element(src, [src_index + 8]) >= spark__unsigned__u64__first . C12: element(src, [src_index + 8]) <= spark__unsigned__u64__last . C13: src_index + 8 >= src__index__subtype__1__first . C14: src_index + 8 <= src__index__subtype__1__last . C15: dst_index + 1 >= dst__index__subtype__1__first . C16: dst_index + 1 <= dst__index__subtype__1__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_7. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . H31: 8 >= spark__unsigned__shift_count__first . H32: 8 <= spark__unsigned__shift_count__last . H33: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . H34: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . H35: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . H36: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . H37: src_offset + 1 >= src__index__subtype__1__first . H38: src_offset + 1 <= src__index__subtype__1__last . H39: src_offset + 1 >= integer__base__first . H40: src_offset + 1 <= integer__base__last . H41: element(src, [src_offset]) >= spark__unsigned__u64__first . H42: element(src, [src_offset]) <= spark__unsigned__u64__last . H43: src_offset >= src__index__subtype__1__first . H44: src_offset <= src__index__subtype__1__last . H45: 0 >= dst__index__subtype__1__first . H46: 0 <= dst__index__subtype__1__last . H47: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) <= interfaces__unsigned_64__last . -> C1: 16 >= spark__unsigned__shift_count__first . C2: 16 <= spark__unsigned__shift_count__last . C3: element(src, [src_offset + 2]) >= interfaces__unsigned_64__first . C4: element(src, [src_offset + 2]) <= interfaces__unsigned_64__last . C5: element(src, [src_offset + 2]) >= spark__unsigned__u64__first . C6: element(src, [src_offset + 2]) <= spark__unsigned__u64__last . C7: src_offset + 2 >= src__index__subtype__1__first . C8: src_offset + 2 <= src__index__subtype__1__last . C9: src_offset + 2 >= integer__base__first . C10: src_offset + 2 <= integer__base__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_8. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . H43: 8 >= spark__unsigned__shift_count__first . H44: 8 <= spark__unsigned__shift_count__last . H45: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . H46: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . H47: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . H48: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . H49: src_index + 8 + 1 >= src__index__subtype__1__first . H50: src_index + 8 + 1 <= src__index__subtype__1__last . H51: src_index + 8 + 1 >= integer__base__first . H52: src_index + 8 + 1 <= integer__base__last . H53: element(src, [src_index + 8]) >= spark__unsigned__u64__first . H54: element(src, [src_index + 8]) <= spark__unsigned__u64__last . H55: src_index + 8 >= src__index__subtype__1__first . H56: src_index + 8 <= src__index__subtype__1__last . H57: dst_index + 1 >= dst__index__subtype__1__first . H58: dst_index + 1 <= dst__index__subtype__1__last . H59: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) <= interfaces__unsigned_64__last . -> C1: 16 >= spark__unsigned__shift_count__first . C2: 16 <= spark__unsigned__shift_count__last . C3: element(src, [src_index + 8 + 2]) >= interfaces__unsigned_64__first . C4: element(src, [src_index + 8 + 2]) <= interfaces__unsigned_64__last . C5: element(src, [src_index + 8 + 2]) >= spark__unsigned__u64__first . C6: element(src, [src_index + 8 + 2]) <= spark__unsigned__u64__last . C7: src_index + 8 + 2 >= src__index__subtype__1__first . C8: src_index + 8 + 2 <= src__index__subtype__1__last . C9: src_index + 8 + 2 >= integer__base__first . C10: src_index + 8 + 2 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_9. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . H31: 8 >= spark__unsigned__shift_count__first . H32: 8 <= spark__unsigned__shift_count__last . H33: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . H34: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . H35: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . H36: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . H37: src_offset + 1 >= src__index__subtype__1__first . H38: src_offset + 1 <= src__index__subtype__1__last . H39: src_offset + 1 >= integer__base__first . H40: src_offset + 1 <= integer__base__last . H41: element(src, [src_offset]) >= spark__unsigned__u64__first . H42: element(src, [src_offset]) <= spark__unsigned__u64__last . H43: src_offset >= src__index__subtype__1__first . H44: src_offset <= src__index__subtype__1__last . H45: 0 >= dst__index__subtype__1__first . H46: 0 <= dst__index__subtype__1__last . H47: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) <= interfaces__unsigned_64__last . H49: 16 >= spark__unsigned__shift_count__first . H50: 16 <= spark__unsigned__shift_count__last . H51: element(src, [src_offset + 2]) >= interfaces__unsigned_64__first . H52: element(src, [src_offset + 2]) <= interfaces__unsigned_64__last . H53: element(src, [src_offset + 2]) >= spark__unsigned__u64__first . H54: element(src, [src_offset + 2]) <= spark__unsigned__u64__last . H55: src_offset + 2 >= src__index__subtype__1__first . H56: src_offset + 2 <= src__index__subtype__1__last . H57: src_offset + 2 >= integer__base__first . H58: src_offset + 2 <= integer__base__last . H59: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) <= interfaces__unsigned_64__last . -> C1: 24 >= spark__unsigned__shift_count__first . C2: 24 <= spark__unsigned__shift_count__last . C3: element(src, [src_offset + 3]) >= interfaces__unsigned_64__first . C4: element(src, [src_offset + 3]) <= interfaces__unsigned_64__last . C5: element(src, [src_offset + 3]) >= spark__unsigned__u64__first . C6: element(src, [src_offset + 3]) <= spark__unsigned__u64__last . C7: src_offset + 3 >= src__index__subtype__1__first . C8: src_offset + 3 <= src__index__subtype__1__last . C9: src_offset + 3 >= integer__base__first . C10: src_offset + 3 <= integer__base__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_10. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . H43: 8 >= spark__unsigned__shift_count__first . H44: 8 <= spark__unsigned__shift_count__last . H45: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . H46: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . H47: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . H48: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . H49: src_index + 8 + 1 >= src__index__subtype__1__first . H50: src_index + 8 + 1 <= src__index__subtype__1__last . H51: src_index + 8 + 1 >= integer__base__first . H52: src_index + 8 + 1 <= integer__base__last . H53: element(src, [src_index + 8]) >= spark__unsigned__u64__first . H54: element(src, [src_index + 8]) <= spark__unsigned__u64__last . H55: src_index + 8 >= src__index__subtype__1__first . H56: src_index + 8 <= src__index__subtype__1__last . H57: dst_index + 1 >= dst__index__subtype__1__first . H58: dst_index + 1 <= dst__index__subtype__1__last . H59: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) <= interfaces__unsigned_64__last . H61: 16 >= spark__unsigned__shift_count__first . H62: 16 <= spark__unsigned__shift_count__last . H63: element(src, [src_index + 8 + 2]) >= interfaces__unsigned_64__first . H64: element(src, [src_index + 8 + 2]) <= interfaces__unsigned_64__last . H65: element(src, [src_index + 8 + 2]) >= spark__unsigned__u64__first . H66: element(src, [src_index + 8 + 2]) <= spark__unsigned__u64__last . H67: src_index + 8 + 2 >= src__index__subtype__1__first . H68: src_index + 8 + 2 <= src__index__subtype__1__last . H69: src_index + 8 + 2 >= integer__base__first . H70: src_index + 8 + 2 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) <= interfaces__unsigned_64__last . -> C1: 24 >= spark__unsigned__shift_count__first . C2: 24 <= spark__unsigned__shift_count__last . C3: element(src, [src_index + 8 + 3]) >= interfaces__unsigned_64__first . C4: element(src, [src_index + 8 + 3]) <= interfaces__unsigned_64__last . C5: element(src, [src_index + 8 + 3]) >= spark__unsigned__u64__first . C6: element(src, [src_index + 8 + 3]) <= spark__unsigned__u64__last . C7: src_index + 8 + 3 >= src__index__subtype__1__first . C8: src_index + 8 + 3 <= src__index__subtype__1__last . C9: src_index + 8 + 3 >= integer__base__first . C10: src_index + 8 + 3 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_11. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . H31: 8 >= spark__unsigned__shift_count__first . H32: 8 <= spark__unsigned__shift_count__last . H33: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . H34: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . H35: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . H36: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . H37: src_offset + 1 >= src__index__subtype__1__first . H38: src_offset + 1 <= src__index__subtype__1__last . H39: src_offset + 1 >= integer__base__first . H40: src_offset + 1 <= integer__base__last . H41: element(src, [src_offset]) >= spark__unsigned__u64__first . H42: element(src, [src_offset]) <= spark__unsigned__u64__last . H43: src_offset >= src__index__subtype__1__first . H44: src_offset <= src__index__subtype__1__last . H45: 0 >= dst__index__subtype__1__first . H46: 0 <= dst__index__subtype__1__last . H47: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) <= interfaces__unsigned_64__last . H49: 16 >= spark__unsigned__shift_count__first . H50: 16 <= spark__unsigned__shift_count__last . H51: element(src, [src_offset + 2]) >= interfaces__unsigned_64__first . H52: element(src, [src_offset + 2]) <= interfaces__unsigned_64__last . H53: element(src, [src_offset + 2]) >= spark__unsigned__u64__first . H54: element(src, [src_offset + 2]) <= spark__unsigned__u64__last . H55: src_offset + 2 >= src__index__subtype__1__first . H56: src_offset + 2 <= src__index__subtype__1__last . H57: src_offset + 2 >= integer__base__first . H58: src_offset + 2 <= integer__base__last . H59: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) <= interfaces__unsigned_64__last . H61: 24 >= spark__unsigned__shift_count__first . H62: 24 <= spark__unsigned__shift_count__last . H63: element(src, [src_offset + 3]) >= interfaces__unsigned_64__first . H64: element(src, [src_offset + 3]) <= interfaces__unsigned_64__last . H65: element(src, [src_offset + 3]) >= spark__unsigned__u64__first . H66: element(src, [src_offset + 3]) <= spark__unsigned__u64__last . H67: src_offset + 3 >= src__index__subtype__1__first . H68: src_offset + 3 <= src__index__subtype__1__last . H69: src_offset + 3 >= integer__base__first . H70: src_offset + 3 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) <= interfaces__unsigned_64__last . -> C1: 32 >= spark__unsigned__shift_count__first . C2: 32 <= spark__unsigned__shift_count__last . C3: element(src, [src_offset + 4]) >= interfaces__unsigned_64__first . C4: element(src, [src_offset + 4]) <= interfaces__unsigned_64__last . C5: element(src, [src_offset + 4]) >= spark__unsigned__u64__first . C6: element(src, [src_offset + 4]) <= spark__unsigned__u64__last . C7: src_offset + 4 >= src__index__subtype__1__first . C8: src_offset + 4 <= src__index__subtype__1__last . C9: src_offset + 4 >= integer__base__first . C10: src_offset + 4 <= integer__base__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_12. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . H43: 8 >= spark__unsigned__shift_count__first . H44: 8 <= spark__unsigned__shift_count__last . H45: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . H46: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . H47: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . H48: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . H49: src_index + 8 + 1 >= src__index__subtype__1__first . H50: src_index + 8 + 1 <= src__index__subtype__1__last . H51: src_index + 8 + 1 >= integer__base__first . H52: src_index + 8 + 1 <= integer__base__last . H53: element(src, [src_index + 8]) >= spark__unsigned__u64__first . H54: element(src, [src_index + 8]) <= spark__unsigned__u64__last . H55: src_index + 8 >= src__index__subtype__1__first . H56: src_index + 8 <= src__index__subtype__1__last . H57: dst_index + 1 >= dst__index__subtype__1__first . H58: dst_index + 1 <= dst__index__subtype__1__last . H59: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) <= interfaces__unsigned_64__last . H61: 16 >= spark__unsigned__shift_count__first . H62: 16 <= spark__unsigned__shift_count__last . H63: element(src, [src_index + 8 + 2]) >= interfaces__unsigned_64__first . H64: element(src, [src_index + 8 + 2]) <= interfaces__unsigned_64__last . H65: element(src, [src_index + 8 + 2]) >= spark__unsigned__u64__first . H66: element(src, [src_index + 8 + 2]) <= spark__unsigned__u64__last . H67: src_index + 8 + 2 >= src__index__subtype__1__first . H68: src_index + 8 + 2 <= src__index__subtype__1__last . H69: src_index + 8 + 2 >= integer__base__first . H70: src_index + 8 + 2 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) <= interfaces__unsigned_64__last . H73: 24 >= spark__unsigned__shift_count__first . H74: 24 <= spark__unsigned__shift_count__last . H75: element(src, [src_index + 8 + 3]) >= interfaces__unsigned_64__first . H76: element(src, [src_index + 8 + 3]) <= interfaces__unsigned_64__last . H77: element(src, [src_index + 8 + 3]) >= spark__unsigned__u64__first . H78: element(src, [src_index + 8 + 3]) <= spark__unsigned__u64__last . H79: src_index + 8 + 3 >= src__index__subtype__1__first . H80: src_index + 8 + 3 <= src__index__subtype__1__last . H81: src_index + 8 + 3 >= integer__base__first . H82: src_index + 8 + 3 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) <= interfaces__unsigned_64__last . -> C1: 32 >= spark__unsigned__shift_count__first . C2: 32 <= spark__unsigned__shift_count__last . C3: element(src, [src_index + 8 + 4]) >= interfaces__unsigned_64__first . C4: element(src, [src_index + 8 + 4]) <= interfaces__unsigned_64__last . C5: element(src, [src_index + 8 + 4]) >= spark__unsigned__u64__first . C6: element(src, [src_index + 8 + 4]) <= spark__unsigned__u64__last . C7: src_index + 8 + 4 >= src__index__subtype__1__first . C8: src_index + 8 + 4 <= src__index__subtype__1__last . C9: src_index + 8 + 4 >= integer__base__first . C10: src_index + 8 + 4 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_13. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . H31: 8 >= spark__unsigned__shift_count__first . H32: 8 <= spark__unsigned__shift_count__last . H33: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . H34: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . H35: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . H36: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . H37: src_offset + 1 >= src__index__subtype__1__first . H38: src_offset + 1 <= src__index__subtype__1__last . H39: src_offset + 1 >= integer__base__first . H40: src_offset + 1 <= integer__base__last . H41: element(src, [src_offset]) >= spark__unsigned__u64__first . H42: element(src, [src_offset]) <= spark__unsigned__u64__last . H43: src_offset >= src__index__subtype__1__first . H44: src_offset <= src__index__subtype__1__last . H45: 0 >= dst__index__subtype__1__first . H46: 0 <= dst__index__subtype__1__last . H47: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) <= interfaces__unsigned_64__last . H49: 16 >= spark__unsigned__shift_count__first . H50: 16 <= spark__unsigned__shift_count__last . H51: element(src, [src_offset + 2]) >= interfaces__unsigned_64__first . H52: element(src, [src_offset + 2]) <= interfaces__unsigned_64__last . H53: element(src, [src_offset + 2]) >= spark__unsigned__u64__first . H54: element(src, [src_offset + 2]) <= spark__unsigned__u64__last . H55: src_offset + 2 >= src__index__subtype__1__first . H56: src_offset + 2 <= src__index__subtype__1__last . H57: src_offset + 2 >= integer__base__first . H58: src_offset + 2 <= integer__base__last . H59: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) <= interfaces__unsigned_64__last . H61: 24 >= spark__unsigned__shift_count__first . H62: 24 <= spark__unsigned__shift_count__last . H63: element(src, [src_offset + 3]) >= interfaces__unsigned_64__first . H64: element(src, [src_offset + 3]) <= interfaces__unsigned_64__last . H65: element(src, [src_offset + 3]) >= spark__unsigned__u64__first . H66: element(src, [src_offset + 3]) <= spark__unsigned__u64__last . H67: src_offset + 3 >= src__index__subtype__1__first . H68: src_offset + 3 <= src__index__subtype__1__last . H69: src_offset + 3 >= integer__base__first . H70: src_offset + 3 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) <= interfaces__unsigned_64__last . H73: 32 >= spark__unsigned__shift_count__first . H74: 32 <= spark__unsigned__shift_count__last . H75: element(src, [src_offset + 4]) >= interfaces__unsigned_64__first . H76: element(src, [src_offset + 4]) <= interfaces__unsigned_64__last . H77: element(src, [src_offset + 4]) >= spark__unsigned__u64__first . H78: element(src, [src_offset + 4]) <= spark__unsigned__u64__last . H79: src_offset + 4 >= src__index__subtype__1__first . H80: src_offset + 4 <= src__index__subtype__1__last . H81: src_offset + 4 >= integer__base__first . H82: src_offset + 4 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) <= interfaces__unsigned_64__last . -> C1: 40 >= spark__unsigned__shift_count__first . C2: 40 <= spark__unsigned__shift_count__last . C3: element(src, [src_offset + 5]) >= interfaces__unsigned_64__first . C4: element(src, [src_offset + 5]) <= interfaces__unsigned_64__last . C5: element(src, [src_offset + 5]) >= spark__unsigned__u64__first . C6: element(src, [src_offset + 5]) <= spark__unsigned__u64__last . C7: src_offset + 5 >= src__index__subtype__1__first . C8: src_offset + 5 <= src__index__subtype__1__last . C9: src_offset + 5 >= integer__base__first . C10: src_offset + 5 <= integer__base__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_14. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . H43: 8 >= spark__unsigned__shift_count__first . H44: 8 <= spark__unsigned__shift_count__last . H45: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . H46: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . H47: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . H48: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . H49: src_index + 8 + 1 >= src__index__subtype__1__first . H50: src_index + 8 + 1 <= src__index__subtype__1__last . H51: src_index + 8 + 1 >= integer__base__first . H52: src_index + 8 + 1 <= integer__base__last . H53: element(src, [src_index + 8]) >= spark__unsigned__u64__first . H54: element(src, [src_index + 8]) <= spark__unsigned__u64__last . H55: src_index + 8 >= src__index__subtype__1__first . H56: src_index + 8 <= src__index__subtype__1__last . H57: dst_index + 1 >= dst__index__subtype__1__first . H58: dst_index + 1 <= dst__index__subtype__1__last . H59: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) <= interfaces__unsigned_64__last . H61: 16 >= spark__unsigned__shift_count__first . H62: 16 <= spark__unsigned__shift_count__last . H63: element(src, [src_index + 8 + 2]) >= interfaces__unsigned_64__first . H64: element(src, [src_index + 8 + 2]) <= interfaces__unsigned_64__last . H65: element(src, [src_index + 8 + 2]) >= spark__unsigned__u64__first . H66: element(src, [src_index + 8 + 2]) <= spark__unsigned__u64__last . H67: src_index + 8 + 2 >= src__index__subtype__1__first . H68: src_index + 8 + 2 <= src__index__subtype__1__last . H69: src_index + 8 + 2 >= integer__base__first . H70: src_index + 8 + 2 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) <= interfaces__unsigned_64__last . H73: 24 >= spark__unsigned__shift_count__first . H74: 24 <= spark__unsigned__shift_count__last . H75: element(src, [src_index + 8 + 3]) >= interfaces__unsigned_64__first . H76: element(src, [src_index + 8 + 3]) <= interfaces__unsigned_64__last . H77: element(src, [src_index + 8 + 3]) >= spark__unsigned__u64__first . H78: element(src, [src_index + 8 + 3]) <= spark__unsigned__u64__last . H79: src_index + 8 + 3 >= src__index__subtype__1__first . H80: src_index + 8 + 3 <= src__index__subtype__1__last . H81: src_index + 8 + 3 >= integer__base__first . H82: src_index + 8 + 3 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) <= interfaces__unsigned_64__last . H85: 32 >= spark__unsigned__shift_count__first . H86: 32 <= spark__unsigned__shift_count__last . H87: element(src, [src_index + 8 + 4]) >= interfaces__unsigned_64__first . H88: element(src, [src_index + 8 + 4]) <= interfaces__unsigned_64__last . H89: element(src, [src_index + 8 + 4]) >= spark__unsigned__u64__first . H90: element(src, [src_index + 8 + 4]) <= spark__unsigned__u64__last . H91: src_index + 8 + 4 >= src__index__subtype__1__first . H92: src_index + 8 + 4 <= src__index__subtype__1__last . H93: src_index + 8 + 4 >= integer__base__first . H94: src_index + 8 + 4 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) <= interfaces__unsigned_64__last . -> C1: 40 >= spark__unsigned__shift_count__first . C2: 40 <= spark__unsigned__shift_count__last . C3: element(src, [src_index + 8 + 5]) >= interfaces__unsigned_64__first . C4: element(src, [src_index + 8 + 5]) <= interfaces__unsigned_64__last . C5: element(src, [src_index + 8 + 5]) >= spark__unsigned__u64__first . C6: element(src, [src_index + 8 + 5]) <= spark__unsigned__u64__last . C7: src_index + 8 + 5 >= src__index__subtype__1__first . C8: src_index + 8 + 5 <= src__index__subtype__1__last . C9: src_index + 8 + 5 >= integer__base__first . C10: src_index + 8 + 5 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_15. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . H31: 8 >= spark__unsigned__shift_count__first . H32: 8 <= spark__unsigned__shift_count__last . H33: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . H34: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . H35: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . H36: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . H37: src_offset + 1 >= src__index__subtype__1__first . H38: src_offset + 1 <= src__index__subtype__1__last . H39: src_offset + 1 >= integer__base__first . H40: src_offset + 1 <= integer__base__last . H41: element(src, [src_offset]) >= spark__unsigned__u64__first . H42: element(src, [src_offset]) <= spark__unsigned__u64__last . H43: src_offset >= src__index__subtype__1__first . H44: src_offset <= src__index__subtype__1__last . H45: 0 >= dst__index__subtype__1__first . H46: 0 <= dst__index__subtype__1__last . H47: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) <= interfaces__unsigned_64__last . H49: 16 >= spark__unsigned__shift_count__first . H50: 16 <= spark__unsigned__shift_count__last . H51: element(src, [src_offset + 2]) >= interfaces__unsigned_64__first . H52: element(src, [src_offset + 2]) <= interfaces__unsigned_64__last . H53: element(src, [src_offset + 2]) >= spark__unsigned__u64__first . H54: element(src, [src_offset + 2]) <= spark__unsigned__u64__last . H55: src_offset + 2 >= src__index__subtype__1__first . H56: src_offset + 2 <= src__index__subtype__1__last . H57: src_offset + 2 >= integer__base__first . H58: src_offset + 2 <= integer__base__last . H59: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) <= interfaces__unsigned_64__last . H61: 24 >= spark__unsigned__shift_count__first . H62: 24 <= spark__unsigned__shift_count__last . H63: element(src, [src_offset + 3]) >= interfaces__unsigned_64__first . H64: element(src, [src_offset + 3]) <= interfaces__unsigned_64__last . H65: element(src, [src_offset + 3]) >= spark__unsigned__u64__first . H66: element(src, [src_offset + 3]) <= spark__unsigned__u64__last . H67: src_offset + 3 >= src__index__subtype__1__first . H68: src_offset + 3 <= src__index__subtype__1__last . H69: src_offset + 3 >= integer__base__first . H70: src_offset + 3 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) <= interfaces__unsigned_64__last . H73: 32 >= spark__unsigned__shift_count__first . H74: 32 <= spark__unsigned__shift_count__last . H75: element(src, [src_offset + 4]) >= interfaces__unsigned_64__first . H76: element(src, [src_offset + 4]) <= interfaces__unsigned_64__last . H77: element(src, [src_offset + 4]) >= spark__unsigned__u64__first . H78: element(src, [src_offset + 4]) <= spark__unsigned__u64__last . H79: src_offset + 4 >= src__index__subtype__1__first . H80: src_offset + 4 <= src__index__subtype__1__last . H81: src_offset + 4 >= integer__base__first . H82: src_offset + 4 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) <= interfaces__unsigned_64__last . H85: 40 >= spark__unsigned__shift_count__first . H86: 40 <= spark__unsigned__shift_count__last . H87: element(src, [src_offset + 5]) >= interfaces__unsigned_64__first . H88: element(src, [src_offset + 5]) <= interfaces__unsigned_64__last . H89: element(src, [src_offset + 5]) >= spark__unsigned__u64__first . H90: element(src, [src_offset + 5]) <= spark__unsigned__u64__last . H91: src_offset + 5 >= src__index__subtype__1__first . H92: src_offset + 5 <= src__index__subtype__1__last . H93: src_offset + 5 >= integer__base__first . H94: src_offset + 5 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40) <= interfaces__unsigned_64__last . -> C1: 48 >= spark__unsigned__shift_count__first . C2: 48 <= spark__unsigned__shift_count__last . C3: element(src, [src_offset + 6]) >= interfaces__unsigned_64__first . C4: element(src, [src_offset + 6]) <= interfaces__unsigned_64__last . C5: element(src, [src_offset + 6]) >= spark__unsigned__u64__first . C6: element(src, [src_offset + 6]) <= spark__unsigned__u64__last . C7: src_offset + 6 >= src__index__subtype__1__first . C8: src_offset + 6 <= src__index__subtype__1__last . C9: src_offset + 6 >= integer__base__first . C10: src_offset + 6 <= integer__base__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_16. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . H43: 8 >= spark__unsigned__shift_count__first . H44: 8 <= spark__unsigned__shift_count__last . H45: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . H46: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . H47: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . H48: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . H49: src_index + 8 + 1 >= src__index__subtype__1__first . H50: src_index + 8 + 1 <= src__index__subtype__1__last . H51: src_index + 8 + 1 >= integer__base__first . H52: src_index + 8 + 1 <= integer__base__last . H53: element(src, [src_index + 8]) >= spark__unsigned__u64__first . H54: element(src, [src_index + 8]) <= spark__unsigned__u64__last . H55: src_index + 8 >= src__index__subtype__1__first . H56: src_index + 8 <= src__index__subtype__1__last . H57: dst_index + 1 >= dst__index__subtype__1__first . H58: dst_index + 1 <= dst__index__subtype__1__last . H59: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) <= interfaces__unsigned_64__last . H61: 16 >= spark__unsigned__shift_count__first . H62: 16 <= spark__unsigned__shift_count__last . H63: element(src, [src_index + 8 + 2]) >= interfaces__unsigned_64__first . H64: element(src, [src_index + 8 + 2]) <= interfaces__unsigned_64__last . H65: element(src, [src_index + 8 + 2]) >= spark__unsigned__u64__first . H66: element(src, [src_index + 8 + 2]) <= spark__unsigned__u64__last . H67: src_index + 8 + 2 >= src__index__subtype__1__first . H68: src_index + 8 + 2 <= src__index__subtype__1__last . H69: src_index + 8 + 2 >= integer__base__first . H70: src_index + 8 + 2 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) <= interfaces__unsigned_64__last . H73: 24 >= spark__unsigned__shift_count__first . H74: 24 <= spark__unsigned__shift_count__last . H75: element(src, [src_index + 8 + 3]) >= interfaces__unsigned_64__first . H76: element(src, [src_index + 8 + 3]) <= interfaces__unsigned_64__last . H77: element(src, [src_index + 8 + 3]) >= spark__unsigned__u64__first . H78: element(src, [src_index + 8 + 3]) <= spark__unsigned__u64__last . H79: src_index + 8 + 3 >= src__index__subtype__1__first . H80: src_index + 8 + 3 <= src__index__subtype__1__last . H81: src_index + 8 + 3 >= integer__base__first . H82: src_index + 8 + 3 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) <= interfaces__unsigned_64__last . H85: 32 >= spark__unsigned__shift_count__first . H86: 32 <= spark__unsigned__shift_count__last . H87: element(src, [src_index + 8 + 4]) >= interfaces__unsigned_64__first . H88: element(src, [src_index + 8 + 4]) <= interfaces__unsigned_64__last . H89: element(src, [src_index + 8 + 4]) >= spark__unsigned__u64__first . H90: element(src, [src_index + 8 + 4]) <= spark__unsigned__u64__last . H91: src_index + 8 + 4 >= src__index__subtype__1__first . H92: src_index + 8 + 4 <= src__index__subtype__1__last . H93: src_index + 8 + 4 >= integer__base__first . H94: src_index + 8 + 4 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) <= interfaces__unsigned_64__last . H97: 40 >= spark__unsigned__shift_count__first . H98: 40 <= spark__unsigned__shift_count__last . H99: element(src, [src_index + 8 + 5]) >= interfaces__unsigned_64__first . H100: element(src, [src_index + 8 + 5]) <= interfaces__unsigned_64__last . H101: element(src, [src_index + 8 + 5]) >= spark__unsigned__u64__first . H102: element(src, [src_index + 8 + 5]) <= spark__unsigned__u64__last . H103: src_index + 8 + 5 >= src__index__subtype__1__first . H104: src_index + 8 + 5 <= src__index__subtype__1__last . H105: src_index + 8 + 5 >= integer__base__first . H106: src_index + 8 + 5 <= integer__base__last . H107: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40) <= interfaces__unsigned_64__last . -> C1: 48 >= spark__unsigned__shift_count__first . C2: 48 <= spark__unsigned__shift_count__last . C3: element(src, [src_index + 8 + 6]) >= interfaces__unsigned_64__first . C4: element(src, [src_index + 8 + 6]) <= interfaces__unsigned_64__last . C5: element(src, [src_index + 8 + 6]) >= spark__unsigned__u64__first . C6: element(src, [src_index + 8 + 6]) <= spark__unsigned__u64__last . C7: src_index + 8 + 6 >= src__index__subtype__1__first . C8: src_index + 8 + 6 <= src__index__subtype__1__last . C9: src_index + 8 + 6 >= integer__base__first . C10: src_index + 8 + 6 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_17. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . H31: 8 >= spark__unsigned__shift_count__first . H32: 8 <= spark__unsigned__shift_count__last . H33: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . H34: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . H35: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . H36: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . H37: src_offset + 1 >= src__index__subtype__1__first . H38: src_offset + 1 <= src__index__subtype__1__last . H39: src_offset + 1 >= integer__base__first . H40: src_offset + 1 <= integer__base__last . H41: element(src, [src_offset]) >= spark__unsigned__u64__first . H42: element(src, [src_offset]) <= spark__unsigned__u64__last . H43: src_offset >= src__index__subtype__1__first . H44: src_offset <= src__index__subtype__1__last . H45: 0 >= dst__index__subtype__1__first . H46: 0 <= dst__index__subtype__1__last . H47: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) <= interfaces__unsigned_64__last . H49: 16 >= spark__unsigned__shift_count__first . H50: 16 <= spark__unsigned__shift_count__last . H51: element(src, [src_offset + 2]) >= interfaces__unsigned_64__first . H52: element(src, [src_offset + 2]) <= interfaces__unsigned_64__last . H53: element(src, [src_offset + 2]) >= spark__unsigned__u64__first . H54: element(src, [src_offset + 2]) <= spark__unsigned__u64__last . H55: src_offset + 2 >= src__index__subtype__1__first . H56: src_offset + 2 <= src__index__subtype__1__last . H57: src_offset + 2 >= integer__base__first . H58: src_offset + 2 <= integer__base__last . H59: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) <= interfaces__unsigned_64__last . H61: 24 >= spark__unsigned__shift_count__first . H62: 24 <= spark__unsigned__shift_count__last . H63: element(src, [src_offset + 3]) >= interfaces__unsigned_64__first . H64: element(src, [src_offset + 3]) <= interfaces__unsigned_64__last . H65: element(src, [src_offset + 3]) >= spark__unsigned__u64__first . H66: element(src, [src_offset + 3]) <= spark__unsigned__u64__last . H67: src_offset + 3 >= src__index__subtype__1__first . H68: src_offset + 3 <= src__index__subtype__1__last . H69: src_offset + 3 >= integer__base__first . H70: src_offset + 3 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) <= interfaces__unsigned_64__last . H73: 32 >= spark__unsigned__shift_count__first . H74: 32 <= spark__unsigned__shift_count__last . H75: element(src, [src_offset + 4]) >= interfaces__unsigned_64__first . H76: element(src, [src_offset + 4]) <= interfaces__unsigned_64__last . H77: element(src, [src_offset + 4]) >= spark__unsigned__u64__first . H78: element(src, [src_offset + 4]) <= spark__unsigned__u64__last . H79: src_offset + 4 >= src__index__subtype__1__first . H80: src_offset + 4 <= src__index__subtype__1__last . H81: src_offset + 4 >= integer__base__first . H82: src_offset + 4 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) <= interfaces__unsigned_64__last . H85: 40 >= spark__unsigned__shift_count__first . H86: 40 <= spark__unsigned__shift_count__last . H87: element(src, [src_offset + 5]) >= interfaces__unsigned_64__first . H88: element(src, [src_offset + 5]) <= interfaces__unsigned_64__last . H89: element(src, [src_offset + 5]) >= spark__unsigned__u64__first . H90: element(src, [src_offset + 5]) <= spark__unsigned__u64__last . H91: src_offset + 5 >= src__index__subtype__1__first . H92: src_offset + 5 <= src__index__subtype__1__last . H93: src_offset + 5 >= integer__base__first . H94: src_offset + 5 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40) <= interfaces__unsigned_64__last . H97: 48 >= spark__unsigned__shift_count__first . H98: 48 <= spark__unsigned__shift_count__last . H99: element(src, [src_offset + 6]) >= interfaces__unsigned_64__first . H100: element(src, [src_offset + 6]) <= interfaces__unsigned_64__last . H101: element(src, [src_offset + 6]) >= spark__unsigned__u64__first . H102: element(src, [src_offset + 6]) <= spark__unsigned__u64__last . H103: src_offset + 6 >= src__index__subtype__1__first . H104: src_offset + 6 <= src__index__subtype__1__last . H105: src_offset + 6 >= integer__base__first . H106: src_offset + 6 <= integer__base__last . H107: spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48) <= interfaces__unsigned_64__last . -> C1: 56 >= spark__unsigned__shift_count__first . C2: 56 <= spark__unsigned__shift_count__last . C3: element(src, [src_offset + 7]) >= interfaces__unsigned_64__first . C4: element(src, [src_offset + 7]) <= interfaces__unsigned_64__last . C5: element(src, [src_offset + 7]) >= spark__unsigned__u64__first . C6: element(src, [src_offset + 7]) <= spark__unsigned__u64__last . C7: src_offset + 7 >= src__index__subtype__1__first . C8: src_offset + 7 <= src__index__subtype__1__last . C9: src_offset + 7 >= integer__base__first . C10: src_offset + 7 <= integer__base__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_18. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . H43: 8 >= spark__unsigned__shift_count__first . H44: 8 <= spark__unsigned__shift_count__last . H45: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . H46: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . H47: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . H48: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . H49: src_index + 8 + 1 >= src__index__subtype__1__first . H50: src_index + 8 + 1 <= src__index__subtype__1__last . H51: src_index + 8 + 1 >= integer__base__first . H52: src_index + 8 + 1 <= integer__base__last . H53: element(src, [src_index + 8]) >= spark__unsigned__u64__first . H54: element(src, [src_index + 8]) <= spark__unsigned__u64__last . H55: src_index + 8 >= src__index__subtype__1__first . H56: src_index + 8 <= src__index__subtype__1__last . H57: dst_index + 1 >= dst__index__subtype__1__first . H58: dst_index + 1 <= dst__index__subtype__1__last . H59: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) <= interfaces__unsigned_64__last . H61: 16 >= spark__unsigned__shift_count__first . H62: 16 <= spark__unsigned__shift_count__last . H63: element(src, [src_index + 8 + 2]) >= interfaces__unsigned_64__first . H64: element(src, [src_index + 8 + 2]) <= interfaces__unsigned_64__last . H65: element(src, [src_index + 8 + 2]) >= spark__unsigned__u64__first . H66: element(src, [src_index + 8 + 2]) <= spark__unsigned__u64__last . H67: src_index + 8 + 2 >= src__index__subtype__1__first . H68: src_index + 8 + 2 <= src__index__subtype__1__last . H69: src_index + 8 + 2 >= integer__base__first . H70: src_index + 8 + 2 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) <= interfaces__unsigned_64__last . H73: 24 >= spark__unsigned__shift_count__first . H74: 24 <= spark__unsigned__shift_count__last . H75: element(src, [src_index + 8 + 3]) >= interfaces__unsigned_64__first . H76: element(src, [src_index + 8 + 3]) <= interfaces__unsigned_64__last . H77: element(src, [src_index + 8 + 3]) >= spark__unsigned__u64__first . H78: element(src, [src_index + 8 + 3]) <= spark__unsigned__u64__last . H79: src_index + 8 + 3 >= src__index__subtype__1__first . H80: src_index + 8 + 3 <= src__index__subtype__1__last . H81: src_index + 8 + 3 >= integer__base__first . H82: src_index + 8 + 3 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) <= interfaces__unsigned_64__last . H85: 32 >= spark__unsigned__shift_count__first . H86: 32 <= spark__unsigned__shift_count__last . H87: element(src, [src_index + 8 + 4]) >= interfaces__unsigned_64__first . H88: element(src, [src_index + 8 + 4]) <= interfaces__unsigned_64__last . H89: element(src, [src_index + 8 + 4]) >= spark__unsigned__u64__first . H90: element(src, [src_index + 8 + 4]) <= spark__unsigned__u64__last . H91: src_index + 8 + 4 >= src__index__subtype__1__first . H92: src_index + 8 + 4 <= src__index__subtype__1__last . H93: src_index + 8 + 4 >= integer__base__first . H94: src_index + 8 + 4 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) <= interfaces__unsigned_64__last . H97: 40 >= spark__unsigned__shift_count__first . H98: 40 <= spark__unsigned__shift_count__last . H99: element(src, [src_index + 8 + 5]) >= interfaces__unsigned_64__first . H100: element(src, [src_index + 8 + 5]) <= interfaces__unsigned_64__last . H101: element(src, [src_index + 8 + 5]) >= spark__unsigned__u64__first . H102: element(src, [src_index + 8 + 5]) <= spark__unsigned__u64__last . H103: src_index + 8 + 5 >= src__index__subtype__1__first . H104: src_index + 8 + 5 <= src__index__subtype__1__last . H105: src_index + 8 + 5 >= integer__base__first . H106: src_index + 8 + 5 <= integer__base__last . H107: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40) <= interfaces__unsigned_64__last . H109: 48 >= spark__unsigned__shift_count__first . H110: 48 <= spark__unsigned__shift_count__last . H111: element(src, [src_index + 8 + 6]) >= interfaces__unsigned_64__first . H112: element(src, [src_index + 8 + 6]) <= interfaces__unsigned_64__last . H113: element(src, [src_index + 8 + 6]) >= spark__unsigned__u64__first . H114: element(src, [src_index + 8 + 6]) <= spark__unsigned__u64__last . H115: src_index + 8 + 6 >= src__index__subtype__1__first . H116: src_index + 8 + 6 <= src__index__subtype__1__last . H117: src_index + 8 + 6 >= integer__base__first . H118: src_index + 8 + 6 <= integer__base__last . H119: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48) >= interfaces__unsigned_64__first . H120: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48) <= interfaces__unsigned_64__last . -> C1: 56 >= spark__unsigned__shift_count__first . C2: 56 <= spark__unsigned__shift_count__last . C3: element(src, [src_index + 8 + 7]) >= interfaces__unsigned_64__first . C4: element(src, [src_index + 8 + 7]) <= interfaces__unsigned_64__last . C5: element(src, [src_index + 8 + 7]) >= spark__unsigned__u64__first . C6: element(src, [src_index + 8 + 7]) <= spark__unsigned__u64__last . C7: src_index + 8 + 7 >= src__index__subtype__1__first . C8: src_index + 8 + 7 <= src__index__subtype__1__last . C9: src_index + 8 + 7 >= integer__base__first . C10: src_index + 8 + 7 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_19. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . H31: 8 >= spark__unsigned__shift_count__first . H32: 8 <= spark__unsigned__shift_count__last . H33: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . H34: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . H35: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . H36: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . H37: src_offset + 1 >= src__index__subtype__1__first . H38: src_offset + 1 <= src__index__subtype__1__last . H39: src_offset + 1 >= integer__base__first . H40: src_offset + 1 <= integer__base__last . H41: element(src, [src_offset]) >= spark__unsigned__u64__first . H42: element(src, [src_offset]) <= spark__unsigned__u64__last . H43: src_offset >= src__index__subtype__1__first . H44: src_offset <= src__index__subtype__1__last . H45: 0 >= dst__index__subtype__1__first . H46: 0 <= dst__index__subtype__1__last . H47: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) <= interfaces__unsigned_64__last . H49: 16 >= spark__unsigned__shift_count__first . H50: 16 <= spark__unsigned__shift_count__last . H51: element(src, [src_offset + 2]) >= interfaces__unsigned_64__first . H52: element(src, [src_offset + 2]) <= interfaces__unsigned_64__last . H53: element(src, [src_offset + 2]) >= spark__unsigned__u64__first . H54: element(src, [src_offset + 2]) <= spark__unsigned__u64__last . H55: src_offset + 2 >= src__index__subtype__1__first . H56: src_offset + 2 <= src__index__subtype__1__last . H57: src_offset + 2 >= integer__base__first . H58: src_offset + 2 <= integer__base__last . H59: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) <= interfaces__unsigned_64__last . H61: 24 >= spark__unsigned__shift_count__first . H62: 24 <= spark__unsigned__shift_count__last . H63: element(src, [src_offset + 3]) >= interfaces__unsigned_64__first . H64: element(src, [src_offset + 3]) <= interfaces__unsigned_64__last . H65: element(src, [src_offset + 3]) >= spark__unsigned__u64__first . H66: element(src, [src_offset + 3]) <= spark__unsigned__u64__last . H67: src_offset + 3 >= src__index__subtype__1__first . H68: src_offset + 3 <= src__index__subtype__1__last . H69: src_offset + 3 >= integer__base__first . H70: src_offset + 3 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) <= interfaces__unsigned_64__last . H73: 32 >= spark__unsigned__shift_count__first . H74: 32 <= spark__unsigned__shift_count__last . H75: element(src, [src_offset + 4]) >= interfaces__unsigned_64__first . H76: element(src, [src_offset + 4]) <= interfaces__unsigned_64__last . H77: element(src, [src_offset + 4]) >= spark__unsigned__u64__first . H78: element(src, [src_offset + 4]) <= spark__unsigned__u64__last . H79: src_offset + 4 >= src__index__subtype__1__first . H80: src_offset + 4 <= src__index__subtype__1__last . H81: src_offset + 4 >= integer__base__first . H82: src_offset + 4 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) <= interfaces__unsigned_64__last . H85: 40 >= spark__unsigned__shift_count__first . H86: 40 <= spark__unsigned__shift_count__last . H87: element(src, [src_offset + 5]) >= interfaces__unsigned_64__first . H88: element(src, [src_offset + 5]) <= interfaces__unsigned_64__last . H89: element(src, [src_offset + 5]) >= spark__unsigned__u64__first . H90: element(src, [src_offset + 5]) <= spark__unsigned__u64__last . H91: src_offset + 5 >= src__index__subtype__1__first . H92: src_offset + 5 <= src__index__subtype__1__last . H93: src_offset + 5 >= integer__base__first . H94: src_offset + 5 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40) <= interfaces__unsigned_64__last . H97: 48 >= spark__unsigned__shift_count__first . H98: 48 <= spark__unsigned__shift_count__last . H99: element(src, [src_offset + 6]) >= interfaces__unsigned_64__first . H100: element(src, [src_offset + 6]) <= interfaces__unsigned_64__last . H101: element(src, [src_offset + 6]) >= spark__unsigned__u64__first . H102: element(src, [src_offset + 6]) <= spark__unsigned__u64__last . H103: src_offset + 6 >= src__index__subtype__1__first . H104: src_offset + 6 <= src__index__subtype__1__last . H105: src_offset + 6 >= integer__base__first . H106: src_offset + 6 <= integer__base__last . H107: spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48) <= interfaces__unsigned_64__last . H109: 56 >= spark__unsigned__shift_count__first . H110: 56 <= spark__unsigned__shift_count__last . H111: element(src, [src_offset + 7]) >= interfaces__unsigned_64__first . H112: element(src, [src_offset + 7]) <= interfaces__unsigned_64__last . H113: element(src, [src_offset + 7]) >= spark__unsigned__u64__first . H114: element(src, [src_offset + 7]) <= spark__unsigned__u64__last . H115: src_offset + 7 >= src__index__subtype__1__first . H116: src_offset + 7 <= src__index__subtype__1__last . H117: src_offset + 7 >= integer__base__first . H118: src_offset + 7 <= integer__base__last . H119: spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56) >= interfaces__unsigned_64__first . H120: spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56) <= interfaces__unsigned_64__last . -> C1: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_20. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . H43: 8 >= spark__unsigned__shift_count__first . H44: 8 <= spark__unsigned__shift_count__last . H45: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . H46: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . H47: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . H48: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . H49: src_index + 8 + 1 >= src__index__subtype__1__first . H50: src_index + 8 + 1 <= src__index__subtype__1__last . H51: src_index + 8 + 1 >= integer__base__first . H52: src_index + 8 + 1 <= integer__base__last . H53: element(src, [src_index + 8]) >= spark__unsigned__u64__first . H54: element(src, [src_index + 8]) <= spark__unsigned__u64__last . H55: src_index + 8 >= src__index__subtype__1__first . H56: src_index + 8 <= src__index__subtype__1__last . H57: dst_index + 1 >= dst__index__subtype__1__first . H58: dst_index + 1 <= dst__index__subtype__1__last . H59: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) <= interfaces__unsigned_64__last . H61: 16 >= spark__unsigned__shift_count__first . H62: 16 <= spark__unsigned__shift_count__last . H63: element(src, [src_index + 8 + 2]) >= interfaces__unsigned_64__first . H64: element(src, [src_index + 8 + 2]) <= interfaces__unsigned_64__last . H65: element(src, [src_index + 8 + 2]) >= spark__unsigned__u64__first . H66: element(src, [src_index + 8 + 2]) <= spark__unsigned__u64__last . H67: src_index + 8 + 2 >= src__index__subtype__1__first . H68: src_index + 8 + 2 <= src__index__subtype__1__last . H69: src_index + 8 + 2 >= integer__base__first . H70: src_index + 8 + 2 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) <= interfaces__unsigned_64__last . H73: 24 >= spark__unsigned__shift_count__first . H74: 24 <= spark__unsigned__shift_count__last . H75: element(src, [src_index + 8 + 3]) >= interfaces__unsigned_64__first . H76: element(src, [src_index + 8 + 3]) <= interfaces__unsigned_64__last . H77: element(src, [src_index + 8 + 3]) >= spark__unsigned__u64__first . H78: element(src, [src_index + 8 + 3]) <= spark__unsigned__u64__last . H79: src_index + 8 + 3 >= src__index__subtype__1__first . H80: src_index + 8 + 3 <= src__index__subtype__1__last . H81: src_index + 8 + 3 >= integer__base__first . H82: src_index + 8 + 3 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) <= interfaces__unsigned_64__last . H85: 32 >= spark__unsigned__shift_count__first . H86: 32 <= spark__unsigned__shift_count__last . H87: element(src, [src_index + 8 + 4]) >= interfaces__unsigned_64__first . H88: element(src, [src_index + 8 + 4]) <= interfaces__unsigned_64__last . H89: element(src, [src_index + 8 + 4]) >= spark__unsigned__u64__first . H90: element(src, [src_index + 8 + 4]) <= spark__unsigned__u64__last . H91: src_index + 8 + 4 >= src__index__subtype__1__first . H92: src_index + 8 + 4 <= src__index__subtype__1__last . H93: src_index + 8 + 4 >= integer__base__first . H94: src_index + 8 + 4 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) <= interfaces__unsigned_64__last . H97: 40 >= spark__unsigned__shift_count__first . H98: 40 <= spark__unsigned__shift_count__last . H99: element(src, [src_index + 8 + 5]) >= interfaces__unsigned_64__first . H100: element(src, [src_index + 8 + 5]) <= interfaces__unsigned_64__last . H101: element(src, [src_index + 8 + 5]) >= spark__unsigned__u64__first . H102: element(src, [src_index + 8 + 5]) <= spark__unsigned__u64__last . H103: src_index + 8 + 5 >= src__index__subtype__1__first . H104: src_index + 8 + 5 <= src__index__subtype__1__last . H105: src_index + 8 + 5 >= integer__base__first . H106: src_index + 8 + 5 <= integer__base__last . H107: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40) <= interfaces__unsigned_64__last . H109: 48 >= spark__unsigned__shift_count__first . H110: 48 <= spark__unsigned__shift_count__last . H111: element(src, [src_index + 8 + 6]) >= interfaces__unsigned_64__first . H112: element(src, [src_index + 8 + 6]) <= interfaces__unsigned_64__last . H113: element(src, [src_index + 8 + 6]) >= spark__unsigned__u64__first . H114: element(src, [src_index + 8 + 6]) <= spark__unsigned__u64__last . H115: src_index + 8 + 6 >= src__index__subtype__1__first . H116: src_index + 8 + 6 <= src__index__subtype__1__last . H117: src_index + 8 + 6 >= integer__base__first . H118: src_index + 8 + 6 <= integer__base__last . H119: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48) >= interfaces__unsigned_64__first . H120: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48) <= interfaces__unsigned_64__last . H121: 56 >= spark__unsigned__shift_count__first . H122: 56 <= spark__unsigned__shift_count__last . H123: element(src, [src_index + 8 + 7]) >= interfaces__unsigned_64__first . H124: element(src, [src_index + 8 + 7]) <= interfaces__unsigned_64__last . H125: element(src, [src_index + 8 + 7]) >= spark__unsigned__u64__first . H126: element(src, [src_index + 8 + 7]) <= spark__unsigned__u64__last . H127: src_index + 8 + 7 >= src__index__subtype__1__first . H128: src_index + 8 + 7 <= src__index__subtype__1__last . H129: src_index + 8 + 7 >= integer__base__first . H130: src_index + 8 + 7 <= integer__base__last . H131: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56) >= interfaces__unsigned_64__first . H132: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56) <= interfaces__unsigned_64__last . -> C1: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56)) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56)) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . For path(s) from start to assertion of line 172: procedure_get_64_lsb_first_21. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset <= src__index__subtype__1__last . H4: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H5: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H6: src_offset + 7 <= src__index__subtype__1__last . H7: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H9: src_offset >= natural__first . H10: src_offset <= natural__last . H11: 0 >= spark__crypto__word_count_t__first . H12: 0 <= spark__crypto__word_count_t__last . H13: src_offset >= natural__first . H14: src_offset <= natural__last . H15: src_offset >= src__index__subtype__1__first . H16: src_offset <= src__index__subtype__1__last . H17: src_offset + 1 >= src__index__subtype__1__first . H18: src_offset + 1 <= src__index__subtype__1__last . H19: src_offset + 2 >= src__index__subtype__1__first . H20: src_offset + 2 <= src__index__subtype__1__last . H21: src_offset + 3 >= src__index__subtype__1__first . H22: src_offset + 3 <= src__index__subtype__1__last . H23: src_offset + 4 >= src__index__subtype__1__first . H24: src_offset + 4 <= src__index__subtype__1__last . H25: src_offset + 5 >= src__index__subtype__1__first . H26: src_offset + 5 <= src__index__subtype__1__last . H27: src_offset + 6 >= src__index__subtype__1__first . H28: src_offset + 6 <= src__index__subtype__1__last . H29: src_offset + 7 >= src__index__subtype__1__first . H30: src_offset + 7 <= src__index__subtype__1__last . H31: 8 >= spark__unsigned__shift_count__first . H32: 8 <= spark__unsigned__shift_count__last . H33: element(src, [src_offset + 1]) >= interfaces__unsigned_64__first . H34: element(src, [src_offset + 1]) <= interfaces__unsigned_64__last . H35: element(src, [src_offset + 1]) >= spark__unsigned__u64__first . H36: element(src, [src_offset + 1]) <= spark__unsigned__u64__last . H37: src_offset + 1 >= src__index__subtype__1__first . H38: src_offset + 1 <= src__index__subtype__1__last . H39: src_offset + 1 >= integer__base__first . H40: src_offset + 1 <= integer__base__last . H41: element(src, [src_offset]) >= spark__unsigned__u64__first . H42: element(src, [src_offset]) <= spark__unsigned__u64__last . H43: src_offset >= src__index__subtype__1__first . H44: src_offset <= src__index__subtype__1__last . H45: 0 >= dst__index__subtype__1__first . H46: 0 <= dst__index__subtype__1__last . H47: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8) <= interfaces__unsigned_64__last . H49: 16 >= spark__unsigned__shift_count__first . H50: 16 <= spark__unsigned__shift_count__last . H51: element(src, [src_offset + 2]) >= interfaces__unsigned_64__first . H52: element(src, [src_offset + 2]) <= interfaces__unsigned_64__last . H53: element(src, [src_offset + 2]) >= spark__unsigned__u64__first . H54: element(src, [src_offset + 2]) <= spark__unsigned__u64__last . H55: src_offset + 2 >= src__index__subtype__1__first . H56: src_offset + 2 <= src__index__subtype__1__last . H57: src_offset + 2 >= integer__base__first . H58: src_offset + 2 <= integer__base__last . H59: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16) <= interfaces__unsigned_64__last . H61: 24 >= spark__unsigned__shift_count__first . H62: 24 <= spark__unsigned__shift_count__last . H63: element(src, [src_offset + 3]) >= interfaces__unsigned_64__first . H64: element(src, [src_offset + 3]) <= interfaces__unsigned_64__last . H65: element(src, [src_offset + 3]) >= spark__unsigned__u64__first . H66: element(src, [src_offset + 3]) <= spark__unsigned__u64__last . H67: src_offset + 3 >= src__index__subtype__1__first . H68: src_offset + 3 <= src__index__subtype__1__last . H69: src_offset + 3 >= integer__base__first . H70: src_offset + 3 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24) <= interfaces__unsigned_64__last . H73: 32 >= spark__unsigned__shift_count__first . H74: 32 <= spark__unsigned__shift_count__last . H75: element(src, [src_offset + 4]) >= interfaces__unsigned_64__first . H76: element(src, [src_offset + 4]) <= interfaces__unsigned_64__last . H77: element(src, [src_offset + 4]) >= spark__unsigned__u64__first . H78: element(src, [src_offset + 4]) <= spark__unsigned__u64__last . H79: src_offset + 4 >= src__index__subtype__1__first . H80: src_offset + 4 <= src__index__subtype__1__last . H81: src_offset + 4 >= integer__base__first . H82: src_offset + 4 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32) <= interfaces__unsigned_64__last . H85: 40 >= spark__unsigned__shift_count__first . H86: 40 <= spark__unsigned__shift_count__last . H87: element(src, [src_offset + 5]) >= interfaces__unsigned_64__first . H88: element(src, [src_offset + 5]) <= interfaces__unsigned_64__last . H89: element(src, [src_offset + 5]) >= spark__unsigned__u64__first . H90: element(src, [src_offset + 5]) <= spark__unsigned__u64__last . H91: src_offset + 5 >= src__index__subtype__1__first . H92: src_offset + 5 <= src__index__subtype__1__last . H93: src_offset + 5 >= integer__base__first . H94: src_offset + 5 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40) <= interfaces__unsigned_64__last . H97: 48 >= spark__unsigned__shift_count__first . H98: 48 <= spark__unsigned__shift_count__last . H99: element(src, [src_offset + 6]) >= interfaces__unsigned_64__first . H100: element(src, [src_offset + 6]) <= interfaces__unsigned_64__last . H101: element(src, [src_offset + 6]) >= spark__unsigned__u64__first . H102: element(src, [src_offset + 6]) <= spark__unsigned__u64__last . H103: src_offset + 6 >= src__index__subtype__1__first . H104: src_offset + 6 <= src__index__subtype__1__last . H105: src_offset + 6 >= integer__base__first . H106: src_offset + 6 <= integer__base__last . H107: spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48) <= interfaces__unsigned_64__last . H109: 56 >= spark__unsigned__shift_count__first . H110: 56 <= spark__unsigned__shift_count__last . H111: element(src, [src_offset + 7]) >= interfaces__unsigned_64__first . H112: element(src, [src_offset + 7]) <= interfaces__unsigned_64__last . H113: element(src, [src_offset + 7]) >= spark__unsigned__u64__first . H114: element(src, [src_offset + 7]) <= spark__unsigned__u64__last . H115: src_offset + 7 >= src__index__subtype__1__first . H116: src_offset + 7 <= src__index__subtype__1__last . H117: src_offset + 7 >= integer__base__first . H118: src_offset + 7 <= integer__base__last . H119: spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56) >= interfaces__unsigned_64__first . H120: spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56) <= interfaces__unsigned_64__last . H121: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H122: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . -> C1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= 0)) -> ((element(update(dst, [0], (((((((element( src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus), [ i_]) >= spark__unsigned__u64__first) and (element(update( dst, [0], (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [ src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus), [ i_]) <= spark__unsigned__u64__last))) . C2: 0 >= dst__index__subtype__1__first . C3: 0 <= dst__index__subtype__1__last . C4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . C5: src_offset = src_offset + 0 * 8 . C6: src_offset >= src_offset . C7: src_offset <= src_offset + dst__index__subtype__1__last * 8 . C8: (0 <> dst__index__subtype__1__last) -> (0 + 1 <= natural__last) . C9: (0 <> dst__index__subtype__1__last) -> (src_offset + 8 <= natural__last) . C10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . C11: src_offset >= natural__first . C12: src_offset <= natural__last . C13: src__index__subtype__1__first = 0 . C14: dst__index__subtype__1__first = 0 . C15: src_offset <= src__index__subtype__1__last . C16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . C17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . C18: src_offset + 7 <= src__index__subtype__1__last . C19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . For path(s) from assertion of line 172 to assertion of line 172: procedure_get_64_lsb_first_22. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . H25: src_index + 8 >= natural__first . H26: src_index + 8 <= natural__last . H27: src_index + 8 >= src__index__subtype__1__first . H28: src_index + 8 <= src__index__subtype__1__last . H29: src_index + 8 + 1 >= src__index__subtype__1__first . H30: src_index + 8 + 1 <= src__index__subtype__1__last . H31: src_index + 8 + 2 >= src__index__subtype__1__first . H32: src_index + 8 + 2 <= src__index__subtype__1__last . H33: src_index + 8 + 3 >= src__index__subtype__1__first . H34: src_index + 8 + 3 <= src__index__subtype__1__last . H35: src_index + 8 + 4 >= src__index__subtype__1__first . H36: src_index + 8 + 4 <= src__index__subtype__1__last . H37: src_index + 8 + 5 >= src__index__subtype__1__first . H38: src_index + 8 + 5 <= src__index__subtype__1__last . H39: src_index + 8 + 6 >= src__index__subtype__1__first . H40: src_index + 8 + 6 <= src__index__subtype__1__last . H41: src_index + 8 + 7 >= src__index__subtype__1__first . H42: src_index + 8 + 7 <= src__index__subtype__1__last . H43: 8 >= spark__unsigned__shift_count__first . H44: 8 <= spark__unsigned__shift_count__last . H45: element(src, [src_index + 8 + 1]) >= interfaces__unsigned_64__first . H46: element(src, [src_index + 8 + 1]) <= interfaces__unsigned_64__last . H47: element(src, [src_index + 8 + 1]) >= spark__unsigned__u64__first . H48: element(src, [src_index + 8 + 1]) <= spark__unsigned__u64__last . H49: src_index + 8 + 1 >= src__index__subtype__1__first . H50: src_index + 8 + 1 <= src__index__subtype__1__last . H51: src_index + 8 + 1 >= integer__base__first . H52: src_index + 8 + 1 <= integer__base__last . H53: element(src, [src_index + 8]) >= spark__unsigned__u64__first . H54: element(src, [src_index + 8]) <= spark__unsigned__u64__last . H55: src_index + 8 >= src__index__subtype__1__first . H56: src_index + 8 <= src__index__subtype__1__last . H57: dst_index + 1 >= dst__index__subtype__1__first . H58: dst_index + 1 <= dst__index__subtype__1__last . H59: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8) <= interfaces__unsigned_64__last . H61: 16 >= spark__unsigned__shift_count__first . H62: 16 <= spark__unsigned__shift_count__last . H63: element(src, [src_index + 8 + 2]) >= interfaces__unsigned_64__first . H64: element(src, [src_index + 8 + 2]) <= interfaces__unsigned_64__last . H65: element(src, [src_index + 8 + 2]) >= spark__unsigned__u64__first . H66: element(src, [src_index + 8 + 2]) <= spark__unsigned__u64__last . H67: src_index + 8 + 2 >= src__index__subtype__1__first . H68: src_index + 8 + 2 <= src__index__subtype__1__last . H69: src_index + 8 + 2 >= integer__base__first . H70: src_index + 8 + 2 <= integer__base__last . H71: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16) <= interfaces__unsigned_64__last . H73: 24 >= spark__unsigned__shift_count__first . H74: 24 <= spark__unsigned__shift_count__last . H75: element(src, [src_index + 8 + 3]) >= interfaces__unsigned_64__first . H76: element(src, [src_index + 8 + 3]) <= interfaces__unsigned_64__last . H77: element(src, [src_index + 8 + 3]) >= spark__unsigned__u64__first . H78: element(src, [src_index + 8 + 3]) <= spark__unsigned__u64__last . H79: src_index + 8 + 3 >= src__index__subtype__1__first . H80: src_index + 8 + 3 <= src__index__subtype__1__last . H81: src_index + 8 + 3 >= integer__base__first . H82: src_index + 8 + 3 <= integer__base__last . H83: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24) <= interfaces__unsigned_64__last . H85: 32 >= spark__unsigned__shift_count__first . H86: 32 <= spark__unsigned__shift_count__last . H87: element(src, [src_index + 8 + 4]) >= interfaces__unsigned_64__first . H88: element(src, [src_index + 8 + 4]) <= interfaces__unsigned_64__last . H89: element(src, [src_index + 8 + 4]) >= spark__unsigned__u64__first . H90: element(src, [src_index + 8 + 4]) <= spark__unsigned__u64__last . H91: src_index + 8 + 4 >= src__index__subtype__1__first . H92: src_index + 8 + 4 <= src__index__subtype__1__last . H93: src_index + 8 + 4 >= integer__base__first . H94: src_index + 8 + 4 <= integer__base__last . H95: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32) <= interfaces__unsigned_64__last . H97: 40 >= spark__unsigned__shift_count__first . H98: 40 <= spark__unsigned__shift_count__last . H99: element(src, [src_index + 8 + 5]) >= interfaces__unsigned_64__first . H100: element(src, [src_index + 8 + 5]) <= interfaces__unsigned_64__last . H101: element(src, [src_index + 8 + 5]) >= spark__unsigned__u64__first . H102: element(src, [src_index + 8 + 5]) <= spark__unsigned__u64__last . H103: src_index + 8 + 5 >= src__index__subtype__1__first . H104: src_index + 8 + 5 <= src__index__subtype__1__last . H105: src_index + 8 + 5 >= integer__base__first . H106: src_index + 8 + 5 <= integer__base__last . H107: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40) <= interfaces__unsigned_64__last . H109: 48 >= spark__unsigned__shift_count__first . H110: 48 <= spark__unsigned__shift_count__last . H111: element(src, [src_index + 8 + 6]) >= interfaces__unsigned_64__first . H112: element(src, [src_index + 8 + 6]) <= interfaces__unsigned_64__last . H113: element(src, [src_index + 8 + 6]) >= spark__unsigned__u64__first . H114: element(src, [src_index + 8 + 6]) <= spark__unsigned__u64__last . H115: src_index + 8 + 6 >= src__index__subtype__1__first . H116: src_index + 8 + 6 <= src__index__subtype__1__last . H117: src_index + 8 + 6 >= integer__base__first . H118: src_index + 8 + 6 <= integer__base__last . H119: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48) >= interfaces__unsigned_64__first . H120: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48) <= interfaces__unsigned_64__last . H121: 56 >= spark__unsigned__shift_count__first . H122: 56 <= spark__unsigned__shift_count__last . H123: element(src, [src_index + 8 + 7]) >= interfaces__unsigned_64__first . H124: element(src, [src_index + 8 + 7]) <= interfaces__unsigned_64__last . H125: element(src, [src_index + 8 + 7]) >= spark__unsigned__u64__first . H126: element(src, [src_index + 8 + 7]) <= spark__unsigned__u64__last . H127: src_index + 8 + 7 >= src__index__subtype__1__first . H128: src_index + 8 + 7 <= src__index__subtype__1__last . H129: src_index + 8 + 7 >= integer__base__first . H130: src_index + 8 + 7 <= integer__base__last . H131: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56) >= interfaces__unsigned_64__first . H132: spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56) <= interfaces__unsigned_64__last . H133: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56)) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H134: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56)) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . -> C1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index + 1)) -> ((element(update(dst, [ dst_index + 1], (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56)) mod interfaces__unsigned_64__modulus), [i_]) >= spark__unsigned__u64__first) and (element(update( dst, [dst_index + 1], (((((((element(src, [ src_index + 8]) + spark__unsigned__shift_left_64(element( src, [src_index + 8 + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [ src_index + 8 + 7]), 56)) mod interfaces__unsigned_64__modulus), [i_]) <= spark__unsigned__u64__last))) . C2: dst_index + 1 >= dst__index__subtype__1__first . C3: dst_index + 1 <= dst__index__subtype__1__last . C4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . C5: src_index + 8 = src_offset + (dst_index + 1) * 8 . C6: src_index + 8 >= src_offset . C7: src_index + 8 <= src_offset + dst__index__subtype__1__last * 8 . C8: (dst_index + 1 <> dst__index__subtype__1__last) -> ( dst_index + 1 + 1 <= natural__last) . C9: (dst_index + 1 <> dst__index__subtype__1__last) -> ( src_index + 8 + 8 <= natural__last) . C10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . C11: src_offset >= natural__first . C12: src_offset <= natural__last . C13: src__index__subtype__1__first = 0 . C14: dst__index__subtype__1__first = 0 . C15: src_offset <= src__index__subtype__1__last . C16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . C17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . C18: src_offset + 7 <= src__index__subtype__1__last . C19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . For path(s) from assertion of line 172 to check associated with statement of line 183: procedure_get_64_lsb_first_23. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . -> C1: dst_index + 1 <= natural__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 184: procedure_get_64_lsb_first_24. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . -> C1: dst_index + 1 >= spark__crypto__word_count_t__first . C2: dst_index + 1 <= spark__crypto__word_count_t__last . For path(s) from assertion of line 172 to check associated with statement of line 186: procedure_get_64_lsb_first_25. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . -> C1: src_index + 8 <= natural__last . For path(s) from assertion of line 172 to run-time check associated with statement of line 187: procedure_get_64_lsb_first_26. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: not (dst_index = dst__index__subtype__1__last) . H21: dst_index + 1 <= natural__last . H22: dst_index + 1 >= spark__crypto__word_count_t__first . H23: dst_index + 1 <= spark__crypto__word_count_t__last . H24: src_index + 8 <= natural__last . -> C1: src_index + 8 >= natural__first . C2: src_index + 8 <= natural__last . For path(s) from assertion of line 172 to finish: procedure_get_64_lsb_first_27. H1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst_index)) -> ((element(dst, [i_]) >= spark__unsigned__u64__first) and (element(dst, [ i_]) <= spark__unsigned__u64__last))) . H2: dst_index >= dst__index__subtype__1__first . H3: dst_index <= dst__index__subtype__1__last . H4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last . H5: src_index = src_offset + dst_index * 8 . H6: src_index >= src_offset . H7: src_index <= src_offset + dst__index__subtype__1__last * 8 . H8: (dst_index <> dst__index__subtype__1__last) -> ( dst_index + 1 <= natural__last) . H9: (dst_index <> dst__index__subtype__1__last) -> ( src_index + 8 <= natural__last) . H10: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__byte__first) and (element( src, [i___1]) <= spark__unsigned__byte__last))) . H11: src_offset >= natural__first . H12: src_offset <= natural__last . H13: src__index__subtype__1__first = 0 . H14: dst__index__subtype__1__first = 0 . H15: src_offset <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H18: src_offset + 7 <= src__index__subtype__1__last . H19: src_offset + dst__index__subtype__1__last * 8 <= natural__last . H20: dst_index = dst__index__subtype__1__last . -> C1: for_all(i_: integer, ((i_ >= dst__index__subtype__1__first) and ( i_ <= dst__index__subtype__1__last)) -> ((element( dst, [i_]) >= spark__unsigned__u64__first) and (element( dst, [i_]) <= spark__unsigned__u64__last))) . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.vlg0000644000175000017500000000326611712765060026537 0ustar eugeneugen Non-option args: skein_512_final Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=skein_512_final \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 2 (100.0%) unproven: 0 ( 0.0%) error: 0 ( 0.0%) total: 2 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.siv0000644000175000017500000004710411712513676027041 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Get_64_LSB_First For path(s) from start to run-time check associated with statement of line 148: procedure_get_64_lsb_first_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 149: procedure_get_64_lsb_first_2. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 151: procedure_get_64_lsb_first_3. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to check associated with statement of line 151: procedure_get_64_lsb_first_4. H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) . H2: dst_index >= dst__index__subtype__1__first . H3: dst__index__subtype__1__last <= 268435455 . H4: src_offset + dst_index * 8 >= src_offset . H5: dst_index <= 2147483646 . H6: src_offset + dst_index * 8 <= 2147483639 . H7: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1]) and element(src, [i___1]) <= 255) . H8: src_offset >= 0 . H9: src__index__subtype__1__first = 0 . H10: dst__index__subtype__1__first = 0 . H11: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H12: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H13: src_offset + 7 <= src__index__subtype__1__last . H14: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 . H15: dst_index <= 268435454 . H16: src_offset + dst_index * 8 <= 2147483639 . H17: dst_index < dst__index__subtype__1__last . H18: integer__size >= 0 . H19: natural__size >= 0 . H20: spark__unsigned__byte__size >= 0 . H21: spark__unsigned__u64__size >= 0 . H22: spark__unsigned__shift_count__size >= 0 . H23: spark__crypto__word_count_t__size >= 0 . H24: dst__index__subtype__1__first <= dst__index__subtype__1__last . H25: src__index__subtype__1__first <= src__index__subtype__1__last . H26: src__index__subtype__1__first >= 0 . H27: src__index__subtype__1__last >= 0 . H28: src__index__subtype__1__last <= 2147483647 . H29: src__index__subtype__1__first <= 2147483647 . H30: dst__index__subtype__1__first >= 0 . H31: dst__index__subtype__1__last >= 0 . H32: dst__index__subtype__1__first <= 268435455 . -> C1: src_offset + dst_index * 8 + 8 <= src__index__subtype__1__last . C2: src_offset + dst_index * 8 + 9 <= src__index__subtype__1__last . C3: src_offset + dst_index * 8 + 10 <= src__index__subtype__1__last . C4: src_offset + dst_index * 8 + 11 <= src__index__subtype__1__last . C5: src_offset + dst_index * 8 + 12 <= src__index__subtype__1__last . C6: src_offset + dst_index * 8 + 13 <= src__index__subtype__1__last . C7: src_offset + dst_index * 8 + 14 <= src__index__subtype__1__last . C8: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last . For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_6. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_8. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_10. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_12. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_14. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_16. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_18. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 161: procedure_get_64_lsb_first_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 161: procedure_get_64_lsb_first_20. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 172: procedure_get_64_lsb_first_21. H1: src__index__subtype__1__first = 0 . H2: dst__index__subtype__1__first = 0 . H3: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H4: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H5: src_offset + 7 <= src__index__subtype__1__last . H6: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 . H7: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1]) and element(src, [i___1]) <= 255) . H8: src_offset >= 0 . H9: src_offset >= src__index__subtype__1__first . H10: element(src, [src_offset + 1]) >= 0 . H11: element(src, [src_offset + 1]) <= 18446744073709551615 . H12: element(src, [src_offset]) >= 0 . H13: element(src, [src_offset]) <= 18446744073709551615 . H14: 0 <= dst__index__subtype__1__last . H15: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 . H16: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 . H17: element(src, [src_offset + 2]) >= 0 . H18: element(src, [src_offset + 2]) <= 18446744073709551615 . H19: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) >= 0 . H20: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) <= 18446744073709551615 . H21: element(src, [src_offset + 3]) >= 0 . H22: element(src, [src_offset + 3]) <= 18446744073709551615 . H23: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) >= 0 . H24: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) <= 18446744073709551615 . H25: element(src, [src_offset + 4]) >= 0 . H26: element(src, [src_offset + 4]) <= 18446744073709551615 . H27: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) >= 0 . H28: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) <= 18446744073709551615 . H29: element(src, [src_offset + 5]) >= 0 . H30: element(src, [src_offset + 5]) <= 18446744073709551615 . H31: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) >= 0 . H32: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) <= 18446744073709551615 . H33: element(src, [src_offset + 6]) >= 0 . H34: element(src, [src_offset + 6]) <= 18446744073709551615 . H35: spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48) >= 0 . H36: spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48) <= 18446744073709551615 . H37: element(src, [src_offset + 7]) >= 0 . H38: element(src, [src_offset + 7]) <= 18446744073709551615 . H39: src_offset <= 2147483640 . H40: spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56) >= 0 . H41: spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56) <= 18446744073709551615 . H42: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64( element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 7]), 56)) mod 18446744073709551616 >= 0 . H43: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64( element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 7]), 56)) mod 18446744073709551616 <= 18446744073709551615 . H44: integer__size >= 0 . H45: natural__size >= 0 . H46: spark__unsigned__byte__size >= 0 . H47: spark__unsigned__u64__size >= 0 . H48: spark__unsigned__shift_count__size >= 0 . H49: spark__crypto__word_count_t__size >= 0 . H50: dst__index__subtype__1__first <= dst__index__subtype__1__last . H51: src__index__subtype__1__first <= src__index__subtype__1__last . H52: src__index__subtype__1__first >= 0 . H53: src__index__subtype__1__last >= 0 . H54: src__index__subtype__1__last <= 2147483647 . H55: src__index__subtype__1__first <= 2147483647 . H56: dst__index__subtype__1__first >= 0 . H57: dst__index__subtype__1__last >= 0 . H58: dst__index__subtype__1__last <= 268435455 . H59: dst__index__subtype__1__first <= 268435455 . -> C1: 0 <> dst__index__subtype__1__last -> src_offset <= 2147483639 . For path(s) from assertion of line 172 to assertion of line 172: procedure_get_64_lsb_first_22. H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) . H2: dst_index >= dst__index__subtype__1__first . H3: dst__index__subtype__1__last <= 268435455 . H4: src_offset + dst_index * 8 >= src_offset . H5: src_offset + dst_index * 8 <= src_offset + dst__index__subtype__1__last * 8 . H6: dst_index <= 2147483646 . H7: src_offset + dst_index * 8 <= 2147483639 . H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1]) and element(src, [i___1]) <= 255) . H9: src_offset >= 0 . H10: src_offset <= 2147483647 . H11: src__index__subtype__1__first = 0 . H12: dst__index__subtype__1__first = 0 . H13: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first . H14: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last . H15: src_offset + 7 <= src__index__subtype__1__last . H16: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 . H17: dst_index <= 268435454 . H18: src_offset + dst_index * 8 >= - 8 . H19: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first . H20: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last . H21: element(src, [src_offset + dst_index * 8 + 9]) >= 0 . H22: element(src, [src_offset + dst_index * 8 + 9]) <= 18446744073709551615 . H23: element(src, [src_offset + dst_index * 8 + 8]) >= 0 . H24: element(src, [src_offset + dst_index * 8 + 8]) <= 18446744073709551615 . H25: dst_index + 1 <= dst__index__subtype__1__last . H26: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) >= 0 . H27: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) <= 18446744073709551615 . H28: element(src, [src_offset + dst_index * 8 + 10]) >= 0 . H29: element(src, [src_offset + dst_index * 8 + 10]) <= 18446744073709551615 . H30: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) >= 0 . H31: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) <= 18446744073709551615 . H32: element(src, [src_offset + dst_index * 8 + 11]) >= 0 . H33: element(src, [src_offset + dst_index * 8 + 11]) <= 18446744073709551615 . H34: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) >= 0 . H35: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) <= 18446744073709551615 . H36: element(src, [src_offset + dst_index * 8 + 12]) >= 0 . H37: element(src, [src_offset + dst_index * 8 + 12]) <= 18446744073709551615 . H38: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) >= 0 . H39: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) <= 18446744073709551615 . H40: element(src, [src_offset + dst_index * 8 + 13]) >= 0 . H41: element(src, [src_offset + dst_index * 8 + 13]) <= 18446744073709551615 . H42: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40) >= 0 . H43: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40) <= 18446744073709551615 . H44: element(src, [src_offset + dst_index * 8 + 14]) >= 0 . H45: element(src, [src_offset + dst_index * 8 + 14]) <= 18446744073709551615 . H46: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48) >= 0 . H47: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48) <= 18446744073709551615 . H48: element(src, [src_offset + dst_index * 8 + 15]) >= 0 . H49: element(src, [src_offset + dst_index * 8 + 15]) <= 18446744073709551615 . H50: src_offset + dst_index * 8 <= 2147483632 . H51: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 15]), 56) >= 0 . H52: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 15]), 56) <= 18446744073709551615 . H53: (((((((element(src, [src_offset + dst_index * 8 + 8]) + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 15]), 56)) mod 18446744073709551616 >= 0 . H54: (((((((element(src, [src_offset + dst_index * 8 + 8]) + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 15]), 56)) mod 18446744073709551616 <= 18446744073709551615 . H55: integer__size >= 0 . H56: natural__size >= 0 . H57: spark__unsigned__byte__size >= 0 . H58: spark__unsigned__u64__size >= 0 . H59: spark__unsigned__shift_count__size >= 0 . H60: spark__crypto__word_count_t__size >= 0 . H61: dst__index__subtype__1__first <= dst__index__subtype__1__last . H62: src__index__subtype__1__first <= src__index__subtype__1__last . H63: src__index__subtype__1__first >= 0 . H64: src__index__subtype__1__last >= 0 . H65: src__index__subtype__1__last <= 2147483647 . H66: src__index__subtype__1__first <= 2147483647 . H67: dst__index__subtype__1__first >= 0 . H68: dst__index__subtype__1__last >= 0 . H69: dst__index__subtype__1__first <= 268435455 . -> C1: 8 * dst__index__subtype__1__last - 8 * dst_index > 7 . C2: dst_index + 1 <> dst__index__subtype__1__last -> src_offset + dst_index * 8 <= 2147483631 . For path(s) from assertion of line 172 to check associated with statement of line 183: procedure_get_64_lsb_first_23. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 184: procedure_get_64_lsb_first_24. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to check associated with statement of line 186: procedure_get_64_lsb_first_25. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to run-time check associated with statement of line 187: procedure_get_64_lsb_first_26. *** true . /* all conclusions proved */ For path(s) from assertion of line 172 to finish: procedure_get_64_lsb_first_27. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.slg0000644000175000017500000316326511712513676026551 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Final RRS The following user defined rule files have been read: &&& skein.rlu SEM No semantic checks are performed on the rules. @@@@@@@@@@ VC: procedure_skein_512_final_1. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C1: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_2. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New C1: true -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_3. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c %%% Simplified C1 on reading formula in, to give: %%% C1: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c %%% Simplified C2 on reading formula in, to give: %%% C2: fld_hash_bit_len(fld_h(ctx)) > 0 *** Proved C1: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c using hypothesis H48. *** Proved C2: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H26. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_4. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H48). %%% Simplified H50 on reading formula in, to give: %%% H50: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified H51 on reading formula in, to give: %%% H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(ctx)) %%% Simplified H55 on reading formula in, to give: %%% H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). --- Hypothesis H78 has been replaced by "true". (It is already present, as H59). --- Hypothesis H79 has been replaced by "true". (It is already present, as H60). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H87 has been replaced by "true". (It is already present, as H68). --- Hypothesis H88 has been replaced by "true". (It is already present, as H69). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H91 has been replaced by "true". (It is already present, as H72). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H55). --- Hypothesis H94 has been replaced by "true". (It is already present, as H56). --- Hypothesis H95 has been replaced by "true". (It is already present, as H57). --- Hypothesis H96 has been replaced by "true". (It is already present, as H58). --- Hypothesis H97 has been replaced by "true". (It is already present, as H59). --- Hypothesis H98 has been replaced by "true". (It is already present, as H60). --- Hypothesis H101 has been replaced by "true". (It is already present, as H63). --- Hypothesis H102 has been replaced by "true". (It is already present, as H64). --- Hypothesis H104 has been replaced by "true". (It is already present, as H66). --- Hypothesis H105 has been replaced by "true". (It is already present, as H67). --- Hypothesis H106 has been replaced by "true". (It is already present, as H68). --- Hypothesis H107 has been replaced by "true". (It is already present, as H69). --- Hypothesis H108 has been replaced by "true". (It is already present, as H70). --- Hypothesis H109 has been replaced by "true". (It is already present, as H71). --- Hypothesis H110 has been replaced by "true". (It is already present, as H72). --- Hypothesis H111 has been replaced by "true". (It is already present, as H73). *** Proved C1: fld_byte_count(fld_h(local_ctx__1)) >= natural__first using hypothesis H57. *** Proved C2: fld_byte_count(fld_h(local_ctx__1)) <= natural__last using hypothesis H58. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_5. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: not fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). --- Hypothesis H51 has been replaced by "true". (It is already present, as H31). --- Hypothesis H52 has been replaced by "true". (It is already present, as H32). --- Hypothesis H53 has been replaced by "true". (It is already present, as H33). --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H57 has been replaced by "true". (It is already present, as H37). --- Hypothesis H58 has been replaced by "true". (It is already present, as H38). --- Hypothesis H60 has been replaced by "true". (It is already present, as H40). --- Hypothesis H61 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H42). --- Hypothesis H63 has been replaced by "true". (It is already present, as H43). --- Hypothesis H64 has been replaced by "true". (It is already present, as H44). --- Hypothesis H65 has been replaced by "true". (It is already present, as H45). --- Hypothesis H66 has been replaced by "true". (It is already present, as H46). --- Hypothesis H67 has been replaced by "true". (It is already present, as H47). --- Hypothesis H68 has been replaced by "true". (It is already present, as H29). --- Hypothesis H69 has been replaced by "true". (It is already present, as H30). --- Hypothesis H70 has been replaced by "true". (It is already present, as H31). --- Hypothesis H71 has been replaced by "true". (It is already present, as H32). --- Hypothesis H72 has been replaced by "true". (It is already present, as H33). --- Hypothesis H73 has been replaced by "true". (It is already present, as H34). --- Hypothesis H76 has been replaced by "true". (It is already present, as H37). --- Hypothesis H77 has been replaced by "true". (It is already present, as H38). --- Hypothesis H79 has been replaced by "true". (It is already present, as H40). --- Hypothesis H80 has been replaced by "true". (It is already present, as H41). --- Hypothesis H81 has been replaced by "true". (It is already present, as H42). --- Hypothesis H82 has been replaced by "true". (It is already present, as H43). --- Hypothesis H83 has been replaced by "true". (It is already present, as H44). --- Hypothesis H84 has been replaced by "true". (It is already present, as H45). --- Hypothesis H85 has been replaced by "true". (It is already present, as H46). --- Hypothesis H86 has been replaced by "true". (It is already present, as H47). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified C2 on reading formula in, to give: %%% C2: fld_byte_count(fld_h(ctx)) <= natural__last *** Proved C1: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H9. *** Proved C2: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H10. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_6. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H48). %%% Simplified H50 on reading formula in, to give: %%% H50: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified H51 on reading formula in, to give: %%% H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(ctx)) %%% Simplified H55 on reading formula in, to give: %%% H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). --- Hypothesis H78 has been replaced by "true". (It is already present, as H59). --- Hypothesis H79 has been replaced by "true". (It is already present, as H60). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H87 has been replaced by "true". (It is already present, as H68). --- Hypothesis H88 has been replaced by "true". (It is already present, as H69). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H91 has been replaced by "true". (It is already present, as H72). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H55). --- Hypothesis H94 has been replaced by "true". (It is already present, as H56). --- Hypothesis H95 has been replaced by "true". (It is already present, as H57). --- Hypothesis H96 has been replaced by "true". (It is already present, as H58). --- Hypothesis H97 has been replaced by "true". (It is already present, as H59). --- Hypothesis H98 has been replaced by "true". (It is already present, as H60). --- Hypothesis H101 has been replaced by "true". (It is already present, as H63). --- Hypothesis H102 has been replaced by "true". (It is already present, as H64). --- Hypothesis H104 has been replaced by "true". (It is already present, as H66). --- Hypothesis H105 has been replaced by "true". (It is already present, as H67). --- Hypothesis H106 has been replaced by "true". (It is already present, as H68). --- Hypothesis H107 has been replaced by "true". (It is already present, as H69). --- Hypothesis H108 has been replaced by "true". (It is already present, as H70). --- Hypothesis H109 has been replaced by "true". (It is already present, as H71). --- Hypothesis H110 has been replaced by "true". (It is already present, as H72). --- Hypothesis H111 has been replaced by "true". (It is already present, as H73). --- Hypothesis H112 has been replaced by "true". (It is already present, as H57). --- Hypothesis H113 has been replaced by "true". (It is already present, as H58). --- Hypothesis H114 has been replaced by "true". (It is already present, as H55). --- Hypothesis H115 has been replaced by "true". (It is already present, as H56). --- Hypothesis H116 has been replaced by "true". (It is already present, as H57). --- Hypothesis H117 has been replaced by "true". (It is already present, as H58). --- Hypothesis H118 has been replaced by "true". (It is already present, as H59). --- Hypothesis H119 has been replaced by "true". (It is already present, as H60). --- Hypothesis H122 has been replaced by "true". (It is already present, as H63). --- Hypothesis H123 has been replaced by "true". (It is already present, as H64). --- Hypothesis H125 has been replaced by "true". (It is already present, as H66). --- Hypothesis H126 has been replaced by "true". (It is already present, as H67). --- Hypothesis H127 has been replaced by "true". (It is already present, as H68). --- Hypothesis H128 has been replaced by "true". (It is already present, as H69). --- Hypothesis H129 has been replaced by "true". (It is already present, as H70). --- Hypothesis H130 has been replaced by "true". (It is already present, as H71). --- Hypothesis H131 has been replaced by "true". (It is already present, as H72). --- Hypothesis H132 has been replaced by "true". (It is already present, as H73). %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H57). --- Hypothesis H135 has been replaced by "true". (It is already present, as H58). %%% Simplified C12 on reading formula in, to give: %%% C12: 63 <= skein_512_block_bytes_index__last %%% Simplified C13 on reading formula in, to give: %%% C13: 63 <= skein_512_block_bytes_index__last %%% Simplified C15 on reading formula in, to give: %%% C15: 63 <= natural__last *** Proved C1: fld_byte_count(fld_h(local_ctx__1)) >= natural__first using hypothesis H57. *** Proved C2: fld_byte_count(fld_h(local_ctx__1)) <= natural__last using hypothesis H58. *** Proved C7: fld_hash_bit_len(fld_h(local_ctx__1)) >= initialized_hash_bit_length__first using hypotheses H1 & H51. *** Proved C8: fld_hash_bit_len(fld_h(local_ctx__1)) <= initialized_hash_bit_length__last using hypotheses H2 & H51. *** Proved C9: fld_byte_count(fld_h(local_ctx__1)) >= skein_512_block_bytes_count__first using hypotheses H3 & H54. *** Proved C10: fld_byte_count(fld_h(local_ctx__1)) <= skein_512_block_bytes_count__last using hypotheses H4 & H54. -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New C3: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New C4: true -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H57: fld_byte_count(fld_h(local_ctx__1)) >= 0 New C5: true -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H58: fld_byte_count(fld_h(local_ctx__1)) <= 2147483647 New C6: true New C14: skein_512_block_bytes_index__last <= 2147483647 New C15: true -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= spark__unsigned__byte__last) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= spark__unsigned__byte__last) New C12: true New C13: true New C14: true *** Proved C3: true *** Proved C4: true *** Proved C5: true *** Proved C6: true *** Proved C15: true *** Proved C12: true *** Proved C13: true *** Proved C14: true -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: fld_byte_count(fld_h(ctx)) < 64 New H53: fld_byte_count(fld_h(local_ctx__1)) < 64 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= 255) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= spark__unsigned__u64__last) New H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__1) , [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1) , [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1) , [i___1]) <= 255) New C11: true *** Proved C11: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_7. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: not fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). --- Hypothesis H51 has been replaced by "true". (It is already present, as H31). --- Hypothesis H52 has been replaced by "true". (It is already present, as H32). --- Hypothesis H53 has been replaced by "true". (It is already present, as H33). --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H57 has been replaced by "true". (It is already present, as H37). --- Hypothesis H58 has been replaced by "true". (It is already present, as H38). --- Hypothesis H60 has been replaced by "true". (It is already present, as H40). --- Hypothesis H61 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H42). --- Hypothesis H63 has been replaced by "true". (It is already present, as H43). --- Hypothesis H64 has been replaced by "true". (It is already present, as H44). --- Hypothesis H65 has been replaced by "true". (It is already present, as H45). --- Hypothesis H66 has been replaced by "true". (It is already present, as H46). --- Hypothesis H67 has been replaced by "true". (It is already present, as H47). --- Hypothesis H68 has been replaced by "true". (It is already present, as H29). --- Hypothesis H69 has been replaced by "true". (It is already present, as H30). --- Hypothesis H70 has been replaced by "true". (It is already present, as H31). --- Hypothesis H71 has been replaced by "true". (It is already present, as H32). --- Hypothesis H72 has been replaced by "true". (It is already present, as H33). --- Hypothesis H73 has been replaced by "true". (It is already present, as H34). --- Hypothesis H76 has been replaced by "true". (It is already present, as H37). --- Hypothesis H77 has been replaced by "true". (It is already present, as H38). --- Hypothesis H79 has been replaced by "true". (It is already present, as H40). --- Hypothesis H80 has been replaced by "true". (It is already present, as H41). --- Hypothesis H81 has been replaced by "true". (It is already present, as H42). --- Hypothesis H82 has been replaced by "true". (It is already present, as H43). --- Hypothesis H83 has been replaced by "true". (It is already present, as H44). --- Hypothesis H84 has been replaced by "true". (It is already present, as H45). --- Hypothesis H85 has been replaced by "true". (It is already present, as H46). --- Hypothesis H86 has been replaced by "true". (It is already present, as H47). --- Hypothesis H87 has been replaced by "true". (It is already present, as H31). --- Hypothesis H88 has been replaced by "true". (It is already present, as H32). --- Hypothesis H89 has been replaced by "true". (It is already present, as H29). --- Hypothesis H90 has been replaced by "true". (It is already present, as H30). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H93 has been replaced by "true". (It is already present, as H33). --- Hypothesis H94 has been replaced by "true". (It is already present, as H34). --- Hypothesis H97 has been replaced by "true". (It is already present, as H37). --- Hypothesis H98 has been replaced by "true". (It is already present, as H38). --- Hypothesis H100 has been replaced by "true". (It is already present, as H40). --- Hypothesis H101 has been replaced by "true". (It is already present, as H41). --- Hypothesis H102 has been replaced by "true". (It is already present, as H42). --- Hypothesis H103 has been replaced by "true". (It is already present, as H43). --- Hypothesis H104 has been replaced by "true". (It is already present, as H44). --- Hypothesis H105 has been replaced by "true". (It is already present, as H45). --- Hypothesis H106 has been replaced by "true". (It is already present, as H46). --- Hypothesis H107 has been replaced by "true". (It is already present, as H47). %%% Simplified H108 on reading formula in, to give: %%% H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H109 has been replaced by "true". (It is already present, as H31). --- Hypothesis H110 has been replaced by "true". (It is already present, as H32). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified C2 on reading formula in, to give: %%% C2: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified C7 on reading formula in, to give: %%% C7: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first %%% Simplified C8 on reading formula in, to give: %%% C8: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last %%% Simplified C9 on reading formula in, to give: %%% C9: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified C10 on reading formula in, to give: %%% C10: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified C12 on reading formula in, to give: %%% C12: 63 <= skein_512_block_bytes_index__last %%% Simplified C13 on reading formula in, to give: %%% C13: 63 <= skein_512_block_bytes_index__last %%% Simplified C15 on reading formula in, to give: %%% C15: 63 <= natural__last *** Proved C1: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H9. *** Proved C2: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H10. *** Proved C7: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first using hypothesis H1. *** Proved C8: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last using hypothesis H2. *** Proved C9: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first using hypothesis H3. *** Proved C10: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last using hypothesis H4. -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New C3: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New C4: true -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New C5: true -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New C6: true New C14: skein_512_block_bytes_index__last <= 2147483647 New C15: true -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) New C12: true New C13: true New C14: true *** Proved C3: true *** Proved C4: true *** Proved C5: true *** Proved C6: true *** Proved C15: true *** Proved C12: true *** Proved C13: true *** Proved C14: true >>> Restructured hypothesis H48 into: >>> H48: skein_512_block_bytes_c <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: 64 <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New C11: true %%% Hypotheses H48 & H4 together imply that 64 = fld_byte_count(fld_h(ctx)). H48 & H4 have therefore been deleted and a new H111 added to this effect. *** Proved C11: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_8. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H48). %%% Simplified H50 on reading formula in, to give: %%% H50: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified H51 on reading formula in, to give: %%% H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(ctx)) %%% Simplified H55 on reading formula in, to give: %%% H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). --- Hypothesis H78 has been replaced by "true". (It is already present, as H59). --- Hypothesis H79 has been replaced by "true". (It is already present, as H60). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H87 has been replaced by "true". (It is already present, as H68). --- Hypothesis H88 has been replaced by "true". (It is already present, as H69). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H91 has been replaced by "true". (It is already present, as H72). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H55). --- Hypothesis H94 has been replaced by "true". (It is already present, as H56). --- Hypothesis H95 has been replaced by "true". (It is already present, as H57). --- Hypothesis H96 has been replaced by "true". (It is already present, as H58). --- Hypothesis H97 has been replaced by "true". (It is already present, as H59). --- Hypothesis H98 has been replaced by "true". (It is already present, as H60). --- Hypothesis H101 has been replaced by "true". (It is already present, as H63). --- Hypothesis H102 has been replaced by "true". (It is already present, as H64). --- Hypothesis H104 has been replaced by "true". (It is already present, as H66). --- Hypothesis H105 has been replaced by "true". (It is already present, as H67). --- Hypothesis H106 has been replaced by "true". (It is already present, as H68). --- Hypothesis H107 has been replaced by "true". (It is already present, as H69). --- Hypothesis H108 has been replaced by "true". (It is already present, as H70). --- Hypothesis H109 has been replaced by "true". (It is already present, as H71). --- Hypothesis H110 has been replaced by "true". (It is already present, as H72). --- Hypothesis H111 has been replaced by "true". (It is already present, as H73). --- Hypothesis H112 has been replaced by "true". (It is already present, as H57). --- Hypothesis H113 has been replaced by "true". (It is already present, as H58). --- Hypothesis H114 has been replaced by "true". (It is already present, as H55). --- Hypothesis H115 has been replaced by "true". (It is already present, as H56). --- Hypothesis H116 has been replaced by "true". (It is already present, as H57). --- Hypothesis H117 has been replaced by "true". (It is already present, as H58). --- Hypothesis H118 has been replaced by "true". (It is already present, as H59). --- Hypothesis H119 has been replaced by "true". (It is already present, as H60). --- Hypothesis H122 has been replaced by "true". (It is already present, as H63). --- Hypothesis H123 has been replaced by "true". (It is already present, as H64). --- Hypothesis H125 has been replaced by "true". (It is already present, as H66). --- Hypothesis H126 has been replaced by "true". (It is already present, as H67). --- Hypothesis H127 has been replaced by "true". (It is already present, as H68). --- Hypothesis H128 has been replaced by "true". (It is already present, as H69). --- Hypothesis H129 has been replaced by "true". (It is already present, as H70). --- Hypothesis H130 has been replaced by "true". (It is already present, as H71). --- Hypothesis H131 has been replaced by "true". (It is already present, as H72). --- Hypothesis H132 has been replaced by "true". (It is already present, as H73). %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H57). --- Hypothesis H135 has been replaced by "true". (It is already present, as H58). --- Hypothesis H136 has been replaced by "true". (It is already present, as H57). --- Hypothesis H137 has been replaced by "true". (It is already present, as H58). %%% Simplified H147 on reading formula in, to give: %%% H147: 63 <= skein_512_block_bytes_index__last %%% Simplified H148 on reading formula in, to give: %%% H148: 63 <= skein_512_block_bytes_index__last %%% Simplified H150 on reading formula in, to give: %%% H150: 63 <= natural__last %%% Simplified H157 on reading formula in, to give: %%% H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H158 on reading formula in, to give: %%% H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H176 has been replaced by "true". (It is already present, as H157). --- Hypothesis H177 has been replaced by "true". (It is already present, as H158). --- Hypothesis H178 has been replaced by "true". (It is already present, as H159). --- Hypothesis H179 has been replaced by "true". (It is already present, as H160). --- Hypothesis H180 has been replaced by "true". (It is already present, as H161). --- Hypothesis H181 has been replaced by "true". (It is already present, as H162). --- Hypothesis H184 has been replaced by "true". (It is already present, as H165). --- Hypothesis H185 has been replaced by "true". (It is already present, as H166). --- Hypothesis H187 has been replaced by "true". (It is already present, as H168). --- Hypothesis H188 has been replaced by "true". (It is already present, as H169). --- Hypothesis H189 has been replaced by "true". (It is already present, as H170). --- Hypothesis H190 has been replaced by "true". (It is already present, as H171). --- Hypothesis H191 has been replaced by "true". (It is already present, as H172). --- Hypothesis H192 has been replaced by "true". (It is already present, as H173). --- Hypothesis H193 has been replaced by "true". (It is already present, as H174). --- Hypothesis H194 has been replaced by "true". (It is already present, as H175). %%% Simplified C3 on reading formula in, to give: %%% C3: true *** Proved C3: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New C1: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New C2: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 268435455 -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C4: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C5: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 *** Proved C4: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 using hypotheses H52 & H153. -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: fld_byte_count(fld_h(ctx)) < 64 New H53: fld_byte_count(fld_h(local_ctx__1)) < 64 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H57: fld_byte_count(fld_h(local_ctx__1)) >= 0 New H140: true New H159: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H58: fld_byte_count(fld_h(local_ctx__1)) <= 2147483647 New H141: true New H149: skein_512_block_bytes_index__last <= 2147483647 New H150: true New H160: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= 63 New H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= 127 New H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= spark__unsigned__byte__last) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= spark__unsigned__byte__last) New H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= 255) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= 255) New H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= 65535 New H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= 4294967295 New H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= spark__unsigned__u64__last) New H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= 18446744073709551615 New H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= 18446744073709551615) New H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= 0 New H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 New H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= 1 New H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 New H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__1) , [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= 18446744073709551615) New H158: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1) , [i___1]) <= 18446744073709551615) New H158: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H144: fld_byte_count(fld_h(local_ctx__1)) >= 0 New H154: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H145: fld_byte_count(fld_h(local_ctx__1)) <= 64 New H155: fld_byte_count(fld_h(local_ctx__2)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H146: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__1) , [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__1) , [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= 255) New H157: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__2) , [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H147: true New H148: true New H149: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1) , [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1) , [i___1]) <= 255) New H157: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H138: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H139: true *** Proved C5: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 using hypothesis H162. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H101 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H163 (true-hypothesis). --- Eliminated hypothesis H164 (true-hypothesis). --- Eliminated hypothesis H167 (true-hypothesis). --- Eliminated hypothesis H176 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H179 (true-hypothesis). --- Eliminated hypothesis H180 (true-hypothesis). --- Eliminated hypothesis H181 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H184 (true-hypothesis). --- Eliminated hypothesis H185 (true-hypothesis). --- Eliminated hypothesis H186 (true-hypothesis). --- Eliminated hypothesis H187 (true-hypothesis). --- Eliminated hypothesis H188 (true-hypothesis). --- Eliminated hypothesis H189 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H192 (true-hypothesis). --- Eliminated hypothesis H193 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H141 (true-hypothesis). --- Eliminated hypothesis H150 (true-hypothesis). --- Eliminated hypothesis H27 (true-hypothesis). --- Eliminated hypothesis H28 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H148 (true-hypothesis). --- Eliminated hypothesis H149 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H50 (duplicate of H26). --- Eliminated hypothesis H142 (duplicate of H52). --- Eliminated hypothesis H31 (duplicate of H9). --- Eliminated hypothesis H144 (duplicate of H57). --- Eliminated hypothesis H32 (duplicate of H10). --- Eliminated hypothesis H37 (duplicate of H15). --- Eliminated hypothesis H38 (duplicate of H16). --- Eliminated hypothesis H40 (duplicate of H18). --- Eliminated hypothesis H41 (duplicate of H19). --- Eliminated hypothesis H42 (duplicate of H20). --- Eliminated hypothesis H43 (duplicate of H21). --- Eliminated hypothesis H44 (duplicate of H22). --- Eliminated hypothesis H45 (duplicate of H23). --- Eliminated hypothesis H46 (duplicate of H24). --- Eliminated hypothesis H47 (duplicate of H25). --- Eliminated hypothesis H33 (duplicate of H11). --- Eliminated hypothesis H34 (duplicate of H12). --- Eliminated hypothesis H143 (duplicate of H60). --- Eliminated hypothesis H12 (duplicate of H2). --- Eliminated hypothesis H162 (duplicate of H152). --- Eliminated hypothesis H30 (duplicate of H8). --- Eliminated hypothesis H9 (duplicate of H3). --- Eliminated hypothesis H159 (duplicate of H154). --- Eliminated hypothesis H29 (duplicate of H7). --- Eliminated hypothesis H26 (duplicate of H1). --- Eliminated hypothesis H4 (redundant, given H48). --- Eliminated hypothesis H10 (redundant, given H48). --- Eliminated hypothesis H11 (redundant, given H1). --- Eliminated hypothesis H58 (redundant, given H53). --- Eliminated hypothesis H59 (redundant, given H52). --- Eliminated hypothesis H145 (redundant, given H53). --- Eliminated hypothesis H160 (redundant, given H155). --- Eliminated hypothesis H161 (redundant, given H151). -S- Substituted hypothesis H51. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H54. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__1)) by: fld_byte_count(fld_h(ctx)). -S- Substituted hypothesis H153. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H156. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__2)) by: fld_byte_count(fld_h(ctx)). *** Proved C1: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 >= 1 using hypothesis H1. *** Proved C2: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= 268435455 using hypothesis H12. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_9. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: not fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). --- Hypothesis H51 has been replaced by "true". (It is already present, as H31). --- Hypothesis H52 has been replaced by "true". (It is already present, as H32). --- Hypothesis H53 has been replaced by "true". (It is already present, as H33). --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H57 has been replaced by "true". (It is already present, as H37). --- Hypothesis H58 has been replaced by "true". (It is already present, as H38). --- Hypothesis H60 has been replaced by "true". (It is already present, as H40). --- Hypothesis H61 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H42). --- Hypothesis H63 has been replaced by "true". (It is already present, as H43). --- Hypothesis H64 has been replaced by "true". (It is already present, as H44). --- Hypothesis H65 has been replaced by "true". (It is already present, as H45). --- Hypothesis H66 has been replaced by "true". (It is already present, as H46). --- Hypothesis H67 has been replaced by "true". (It is already present, as H47). --- Hypothesis H68 has been replaced by "true". (It is already present, as H29). --- Hypothesis H69 has been replaced by "true". (It is already present, as H30). --- Hypothesis H70 has been replaced by "true". (It is already present, as H31). --- Hypothesis H71 has been replaced by "true". (It is already present, as H32). --- Hypothesis H72 has been replaced by "true". (It is already present, as H33). --- Hypothesis H73 has been replaced by "true". (It is already present, as H34). --- Hypothesis H76 has been replaced by "true". (It is already present, as H37). --- Hypothesis H77 has been replaced by "true". (It is already present, as H38). --- Hypothesis H79 has been replaced by "true". (It is already present, as H40). --- Hypothesis H80 has been replaced by "true". (It is already present, as H41). --- Hypothesis H81 has been replaced by "true". (It is already present, as H42). --- Hypothesis H82 has been replaced by "true". (It is already present, as H43). --- Hypothesis H83 has been replaced by "true". (It is already present, as H44). --- Hypothesis H84 has been replaced by "true". (It is already present, as H45). --- Hypothesis H85 has been replaced by "true". (It is already present, as H46). --- Hypothesis H86 has been replaced by "true". (It is already present, as H47). --- Hypothesis H87 has been replaced by "true". (It is already present, as H31). --- Hypothesis H88 has been replaced by "true". (It is already present, as H32). --- Hypothesis H89 has been replaced by "true". (It is already present, as H29). --- Hypothesis H90 has been replaced by "true". (It is already present, as H30). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H93 has been replaced by "true". (It is already present, as H33). --- Hypothesis H94 has been replaced by "true". (It is already present, as H34). --- Hypothesis H97 has been replaced by "true". (It is already present, as H37). --- Hypothesis H98 has been replaced by "true". (It is already present, as H38). --- Hypothesis H100 has been replaced by "true". (It is already present, as H40). --- Hypothesis H101 has been replaced by "true". (It is already present, as H41). --- Hypothesis H102 has been replaced by "true". (It is already present, as H42). --- Hypothesis H103 has been replaced by "true". (It is already present, as H43). --- Hypothesis H104 has been replaced by "true". (It is already present, as H44). --- Hypothesis H105 has been replaced by "true". (It is already present, as H45). --- Hypothesis H106 has been replaced by "true". (It is already present, as H46). --- Hypothesis H107 has been replaced by "true". (It is already present, as H47). %%% Simplified H108 on reading formula in, to give: %%% H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H109 has been replaced by "true". (It is already present, as H31). --- Hypothesis H110 has been replaced by "true". (It is already present, as H32). --- Hypothesis H111 has been replaced by "true". (It is already present, as H31). --- Hypothesis H112 has been replaced by "true". (It is already present, as H32). %%% Simplified H117 on reading formula in, to give: %%% H117: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first %%% Simplified H118 on reading formula in, to give: %%% H118: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last %%% Simplified H119 on reading formula in, to give: %%% H119: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified H120 on reading formula in, to give: %%% H120: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified H122 on reading formula in, to give: %%% H122: 63 <= skein_512_block_bytes_index__last %%% Simplified H123 on reading formula in, to give: %%% H123: 63 <= skein_512_block_bytes_index__last %%% Simplified H125 on reading formula in, to give: %%% H125: 63 <= natural__last %%% Simplified H128 on reading formula in, to give: %%% H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H131 on reading formula in, to give: %%% H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(ctx) ) %%% Simplified H132 on reading formula in, to give: %%% H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H151 has been replaced by "true". (It is already present, as H132). --- Hypothesis H152 has been replaced by "true". (It is already present, as H133). --- Hypothesis H153 has been replaced by "true". (It is already present, as H134). --- Hypothesis H154 has been replaced by "true". (It is already present, as H135). --- Hypothesis H155 has been replaced by "true". (It is already present, as H136). --- Hypothesis H156 has been replaced by "true". (It is already present, as H137). --- Hypothesis H159 has been replaced by "true". (It is already present, as H140). --- Hypothesis H160 has been replaced by "true". (It is already present, as H141). --- Hypothesis H162 has been replaced by "true". (It is already present, as H143). --- Hypothesis H163 has been replaced by "true". (It is already present, as H144). --- Hypothesis H164 has been replaced by "true". (It is already present, as H145). --- Hypothesis H165 has been replaced by "true". (It is already present, as H146). --- Hypothesis H166 has been replaced by "true". (It is already present, as H147). --- Hypothesis H167 has been replaced by "true". (It is already present, as H148). --- Hypothesis H168 has been replaced by "true". (It is already present, as H149). --- Hypothesis H169 has been replaced by "true". (It is already present, as H150). %%% Simplified C3 on reading formula in, to give: %%% C3: true *** Proved C3: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New C1: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New C2: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 268435455 -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C4: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C5: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 *** Proved C4: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 using hypotheses H26 & H128. >>> Restructured hypothesis H48 into: >>> H48: skein_512_block_bytes_c <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: 64 <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H115: true New H134: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H116: true New H124: skein_512_block_bytes_index__last <= 2147483647 New H125: true New H135: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) New H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H117: fld_hash_bit_len(fld_h(ctx)) >= 1 New H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H118: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H119: fld_byte_count(fld_h(ctx)) >= 0 New H129: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H120: fld_byte_count(fld_h(ctx)) <= 64 New H130: fld_byte_count(fld_h(local_ctx__2)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H121: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__2) , [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H122: true New H123: true New H124: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H113: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H114: true %%% Hypotheses H48 & H4 together imply that 64 = fld_byte_count(fld_h(ctx)). H48 & H4 have therefore been deleted and a new H170 added to this effect. *** Proved C5: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 using hypothesis H137. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H52 (true-hypothesis). --- Eliminated hypothesis H53 (true-hypothesis). --- Eliminated hypothesis H54 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H57 (true-hypothesis). --- Eliminated hypothesis H58 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H101 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H151 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H157 (true-hypothesis). --- Eliminated hypothesis H158 (true-hypothesis). --- Eliminated hypothesis H159 (true-hypothesis). --- Eliminated hypothesis H160 (true-hypothesis). --- Eliminated hypothesis H161 (true-hypothesis). --- Eliminated hypothesis H162 (true-hypothesis). --- Eliminated hypothesis H163 (true-hypothesis). --- Eliminated hypothesis H164 (true-hypothesis). --- Eliminated hypothesis H165 (true-hypothesis). --- Eliminated hypothesis H166 (true-hypothesis). --- Eliminated hypothesis H167 (true-hypothesis). --- Eliminated hypothesis H168 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H27 (true-hypothesis). --- Eliminated hypothesis H28 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H117 (duplicate of H26). --- Eliminated hypothesis H31 (duplicate of H9). --- Eliminated hypothesis H119 (duplicate of H9). --- Eliminated hypothesis H32 (duplicate of H10). --- Eliminated hypothesis H37 (duplicate of H15). --- Eliminated hypothesis H38 (duplicate of H16). --- Eliminated hypothesis H40 (duplicate of H18). --- Eliminated hypothesis H41 (duplicate of H19). --- Eliminated hypothesis H42 (duplicate of H20). --- Eliminated hypothesis H43 (duplicate of H21). --- Eliminated hypothesis H44 (duplicate of H22). --- Eliminated hypothesis H45 (duplicate of H23). --- Eliminated hypothesis H46 (duplicate of H24). --- Eliminated hypothesis H47 (duplicate of H25). --- Eliminated hypothesis H33 (duplicate of H11). --- Eliminated hypothesis H34 (duplicate of H12). --- Eliminated hypothesis H118 (duplicate of H12). --- Eliminated hypothesis H12 (duplicate of H2). --- Eliminated hypothesis H137 (duplicate of H127). --- Eliminated hypothesis H30 (duplicate of H8). --- Eliminated hypothesis H9 (duplicate of H3). --- Eliminated hypothesis H134 (duplicate of H129). --- Eliminated hypothesis H29 (duplicate of H7). --- Eliminated hypothesis H26 (duplicate of H1). --- Eliminated hypothesis H3 (redundant, given H170). --- Eliminated hypothesis H10 (redundant, given H120). --- Eliminated hypothesis H11 (redundant, given H1). --- Eliminated hypothesis H120 (redundant, given H170). --- Eliminated hypothesis H135 (redundant, given H130). --- Eliminated hypothesis H136 (redundant, given H126). -S- Substituted hypothesis H128. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H131. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__2)) by: fld_byte_count(fld_h(ctx)). *** Proved C1: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 >= 1 using hypothesis H1. *** Proved C2: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= 268435455 using hypothesis H12. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_10. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H48). %%% Simplified H50 on reading formula in, to give: %%% H50: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified H51 on reading formula in, to give: %%% H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(ctx)) %%% Simplified H55 on reading formula in, to give: %%% H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). --- Hypothesis H78 has been replaced by "true". (It is already present, as H59). --- Hypothesis H79 has been replaced by "true". (It is already present, as H60). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H87 has been replaced by "true". (It is already present, as H68). --- Hypothesis H88 has been replaced by "true". (It is already present, as H69). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H91 has been replaced by "true". (It is already present, as H72). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H55). --- Hypothesis H94 has been replaced by "true". (It is already present, as H56). --- Hypothesis H95 has been replaced by "true". (It is already present, as H57). --- Hypothesis H96 has been replaced by "true". (It is already present, as H58). --- Hypothesis H97 has been replaced by "true". (It is already present, as H59). --- Hypothesis H98 has been replaced by "true". (It is already present, as H60). --- Hypothesis H101 has been replaced by "true". (It is already present, as H63). --- Hypothesis H102 has been replaced by "true". (It is already present, as H64). --- Hypothesis H104 has been replaced by "true". (It is already present, as H66). --- Hypothesis H105 has been replaced by "true". (It is already present, as H67). --- Hypothesis H106 has been replaced by "true". (It is already present, as H68). --- Hypothesis H107 has been replaced by "true". (It is already present, as H69). --- Hypothesis H108 has been replaced by "true". (It is already present, as H70). --- Hypothesis H109 has been replaced by "true". (It is already present, as H71). --- Hypothesis H110 has been replaced by "true". (It is already present, as H72). --- Hypothesis H111 has been replaced by "true". (It is already present, as H73). --- Hypothesis H112 has been replaced by "true". (It is already present, as H57). --- Hypothesis H113 has been replaced by "true". (It is already present, as H58). --- Hypothesis H114 has been replaced by "true". (It is already present, as H55). --- Hypothesis H115 has been replaced by "true". (It is already present, as H56). --- Hypothesis H116 has been replaced by "true". (It is already present, as H57). --- Hypothesis H117 has been replaced by "true". (It is already present, as H58). --- Hypothesis H118 has been replaced by "true". (It is already present, as H59). --- Hypothesis H119 has been replaced by "true". (It is already present, as H60). --- Hypothesis H122 has been replaced by "true". (It is already present, as H63). --- Hypothesis H123 has been replaced by "true". (It is already present, as H64). --- Hypothesis H125 has been replaced by "true". (It is already present, as H66). --- Hypothesis H126 has been replaced by "true". (It is already present, as H67). --- Hypothesis H127 has been replaced by "true". (It is already present, as H68). --- Hypothesis H128 has been replaced by "true". (It is already present, as H69). --- Hypothesis H129 has been replaced by "true". (It is already present, as H70). --- Hypothesis H130 has been replaced by "true". (It is already present, as H71). --- Hypothesis H131 has been replaced by "true". (It is already present, as H72). --- Hypothesis H132 has been replaced by "true". (It is already present, as H73). %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H57). --- Hypothesis H135 has been replaced by "true". (It is already present, as H58). --- Hypothesis H136 has been replaced by "true". (It is already present, as H57). --- Hypothesis H137 has been replaced by "true". (It is already present, as H58). %%% Simplified H147 on reading formula in, to give: %%% H147: 63 <= skein_512_block_bytes_index__last %%% Simplified H148 on reading formula in, to give: %%% H148: 63 <= skein_512_block_bytes_index__last %%% Simplified H150 on reading formula in, to give: %%% H150: 63 <= natural__last %%% Simplified H157 on reading formula in, to give: %%% H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H158 on reading formula in, to give: %%% H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H176 has been replaced by "true". (It is already present, as H157). --- Hypothesis H177 has been replaced by "true". (It is already present, as H158). --- Hypothesis H178 has been replaced by "true". (It is already present, as H159). --- Hypothesis H179 has been replaced by "true". (It is already present, as H160). --- Hypothesis H180 has been replaced by "true". (It is already present, as H161). --- Hypothesis H181 has been replaced by "true". (It is already present, as H162). --- Hypothesis H184 has been replaced by "true". (It is already present, as H165). --- Hypothesis H185 has been replaced by "true". (It is already present, as H166). --- Hypothesis H187 has been replaced by "true". (It is already present, as H168). --- Hypothesis H188 has been replaced by "true". (It is already present, as H169). --- Hypothesis H189 has been replaced by "true". (It is already present, as H170). --- Hypothesis H190 has been replaced by "true". (It is already present, as H171). --- Hypothesis H191 has been replaced by "true". (It is already present, as H172). --- Hypothesis H192 has been replaced by "true". (It is already present, as H173). --- Hypothesis H193 has been replaced by "true". (It is already present, as H174). --- Hypothesis H194 has been replaced by "true". (It is already present, as H175). %%% Simplified H197 on reading formula in, to give: %%% H197: true -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: fld_byte_count(fld_h(ctx)) < 64 New H53: fld_byte_count(fld_h(local_ctx__1)) < 64 -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H198: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H199: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H57: fld_byte_count(fld_h(local_ctx__1)) >= 0 New H140: true New H159: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H58: fld_byte_count(fld_h(local_ctx__1)) <= 2147483647 New H141: true New H149: skein_512_block_bytes_index__last <= 2147483647 New H150: true New H160: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= 63 New H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= 127 New H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= spark__unsigned__byte__last) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= spark__unsigned__byte__last) New H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= 255) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= 255) New H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= 65535 New H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= 4294967295 New H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= spark__unsigned__u64__last) New H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= 18446744073709551615 New H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= 18446744073709551615) New H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= 0 New H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 New H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= 1 New H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 New H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__1) , [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= 18446744073709551615) New H158: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1) , [i___1]) <= 18446744073709551615) New H158: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H144: fld_byte_count(fld_h(local_ctx__1)) >= 0 New H154: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H145: fld_byte_count(fld_h(local_ctx__1)) <= 64 New H155: fld_byte_count(fld_h(local_ctx__2)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H146: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__1) , [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__1) , [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= 255) New H157: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__2) , [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H147: true New H148: true New H149: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1) , [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1) , [i___1]) <= 255) New H157: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H138: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H139: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H195: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H196: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 268435455 --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H101 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H163 (true-hypothesis). --- Eliminated hypothesis H164 (true-hypothesis). --- Eliminated hypothesis H167 (true-hypothesis). --- Eliminated hypothesis H176 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H179 (true-hypothesis). --- Eliminated hypothesis H180 (true-hypothesis). --- Eliminated hypothesis H181 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H184 (true-hypothesis). --- Eliminated hypothesis H185 (true-hypothesis). --- Eliminated hypothesis H186 (true-hypothesis). --- Eliminated hypothesis H187 (true-hypothesis). --- Eliminated hypothesis H188 (true-hypothesis). --- Eliminated hypothesis H189 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H192 (true-hypothesis). --- Eliminated hypothesis H193 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H141 (true-hypothesis). --- Eliminated hypothesis H150 (true-hypothesis). --- Eliminated hypothesis H27 (true-hypothesis). --- Eliminated hypothesis H28 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H148 (true-hypothesis). --- Eliminated hypothesis H149 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H50 (duplicate of H26). --- Eliminated hypothesis H142 (duplicate of H52). --- Eliminated hypothesis H31 (duplicate of H9). --- Eliminated hypothesis H144 (duplicate of H57). --- Eliminated hypothesis H32 (duplicate of H10). --- Eliminated hypothesis H37 (duplicate of H15). --- Eliminated hypothesis H38 (duplicate of H16). --- Eliminated hypothesis H40 (duplicate of H18). --- Eliminated hypothesis H41 (duplicate of H19). --- Eliminated hypothesis H42 (duplicate of H20). --- Eliminated hypothesis H43 (duplicate of H21). --- Eliminated hypothesis H44 (duplicate of H22). --- Eliminated hypothesis H45 (duplicate of H23). --- Eliminated hypothesis H46 (duplicate of H24). --- Eliminated hypothesis H47 (duplicate of H25). --- Eliminated hypothesis H33 (duplicate of H11). --- Eliminated hypothesis H34 (duplicate of H12). --- Eliminated hypothesis H143 (duplicate of H60). --- Eliminated hypothesis H199 (duplicate of H162). --- Eliminated hypothesis H12 (duplicate of H2). --- Eliminated hypothesis H162 (duplicate of H152). --- Eliminated hypothesis H30 (duplicate of H8). --- Eliminated hypothesis H9 (duplicate of H3). --- Eliminated hypothesis H159 (duplicate of H154). --- Eliminated hypothesis H29 (duplicate of H7). --- Eliminated hypothesis H26 (duplicate of H1). --- Eliminated hypothesis H4 (redundant, given H48). --- Eliminated hypothesis H10 (redundant, given H48). --- Eliminated hypothesis H11 (redundant, given H1). --- Eliminated hypothesis H58 (redundant, given H53). --- Eliminated hypothesis H59 (redundant, given H52). --- Eliminated hypothesis H145 (redundant, given H53). --- Eliminated hypothesis H160 (redundant, given H155). --- Eliminated hypothesis H161 (redundant, given H151). --- Eliminated hypothesis H198 (redundant, given H151). -S- Substituted hypothesis H51. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H54. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__1)) by: fld_byte_count(fld_h(ctx)). -S- Substituted hypothesis H153. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H156. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__2)) by: fld_byte_count(fld_h(ctx)). *** Proved C1: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 using hypothesis H6. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_11. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: not fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). --- Hypothesis H51 has been replaced by "true". (It is already present, as H31). --- Hypothesis H52 has been replaced by "true". (It is already present, as H32). --- Hypothesis H53 has been replaced by "true". (It is already present, as H33). --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H57 has been replaced by "true". (It is already present, as H37). --- Hypothesis H58 has been replaced by "true". (It is already present, as H38). --- Hypothesis H60 has been replaced by "true". (It is already present, as H40). --- Hypothesis H61 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H42). --- Hypothesis H63 has been replaced by "true". (It is already present, as H43). --- Hypothesis H64 has been replaced by "true". (It is already present, as H44). --- Hypothesis H65 has been replaced by "true". (It is already present, as H45). --- Hypothesis H66 has been replaced by "true". (It is already present, as H46). --- Hypothesis H67 has been replaced by "true". (It is already present, as H47). --- Hypothesis H68 has been replaced by "true". (It is already present, as H29). --- Hypothesis H69 has been replaced by "true". (It is already present, as H30). --- Hypothesis H70 has been replaced by "true". (It is already present, as H31). --- Hypothesis H71 has been replaced by "true". (It is already present, as H32). --- Hypothesis H72 has been replaced by "true". (It is already present, as H33). --- Hypothesis H73 has been replaced by "true". (It is already present, as H34). --- Hypothesis H76 has been replaced by "true". (It is already present, as H37). --- Hypothesis H77 has been replaced by "true". (It is already present, as H38). --- Hypothesis H79 has been replaced by "true". (It is already present, as H40). --- Hypothesis H80 has been replaced by "true". (It is already present, as H41). --- Hypothesis H81 has been replaced by "true". (It is already present, as H42). --- Hypothesis H82 has been replaced by "true". (It is already present, as H43). --- Hypothesis H83 has been replaced by "true". (It is already present, as H44). --- Hypothesis H84 has been replaced by "true". (It is already present, as H45). --- Hypothesis H85 has been replaced by "true". (It is already present, as H46). --- Hypothesis H86 has been replaced by "true". (It is already present, as H47). --- Hypothesis H87 has been replaced by "true". (It is already present, as H31). --- Hypothesis H88 has been replaced by "true". (It is already present, as H32). --- Hypothesis H89 has been replaced by "true". (It is already present, as H29). --- Hypothesis H90 has been replaced by "true". (It is already present, as H30). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H93 has been replaced by "true". (It is already present, as H33). --- Hypothesis H94 has been replaced by "true". (It is already present, as H34). --- Hypothesis H97 has been replaced by "true". (It is already present, as H37). --- Hypothesis H98 has been replaced by "true". (It is already present, as H38). --- Hypothesis H100 has been replaced by "true". (It is already present, as H40). --- Hypothesis H101 has been replaced by "true". (It is already present, as H41). --- Hypothesis H102 has been replaced by "true". (It is already present, as H42). --- Hypothesis H103 has been replaced by "true". (It is already present, as H43). --- Hypothesis H104 has been replaced by "true". (It is already present, as H44). --- Hypothesis H105 has been replaced by "true". (It is already present, as H45). --- Hypothesis H106 has been replaced by "true". (It is already present, as H46). --- Hypothesis H107 has been replaced by "true". (It is already present, as H47). %%% Simplified H108 on reading formula in, to give: %%% H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H109 has been replaced by "true". (It is already present, as H31). --- Hypothesis H110 has been replaced by "true". (It is already present, as H32). --- Hypothesis H111 has been replaced by "true". (It is already present, as H31). --- Hypothesis H112 has been replaced by "true". (It is already present, as H32). %%% Simplified H117 on reading formula in, to give: %%% H117: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first %%% Simplified H118 on reading formula in, to give: %%% H118: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last %%% Simplified H119 on reading formula in, to give: %%% H119: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified H120 on reading formula in, to give: %%% H120: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified H122 on reading formula in, to give: %%% H122: 63 <= skein_512_block_bytes_index__last %%% Simplified H123 on reading formula in, to give: %%% H123: 63 <= skein_512_block_bytes_index__last %%% Simplified H125 on reading formula in, to give: %%% H125: 63 <= natural__last %%% Simplified H128 on reading formula in, to give: %%% H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H131 on reading formula in, to give: %%% H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(ctx) ) %%% Simplified H132 on reading formula in, to give: %%% H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H151 has been replaced by "true". (It is already present, as H132). --- Hypothesis H152 has been replaced by "true". (It is already present, as H133). --- Hypothesis H153 has been replaced by "true". (It is already present, as H134). --- Hypothesis H154 has been replaced by "true". (It is already present, as H135). --- Hypothesis H155 has been replaced by "true". (It is already present, as H136). --- Hypothesis H156 has been replaced by "true". (It is already present, as H137). --- Hypothesis H159 has been replaced by "true". (It is already present, as H140). --- Hypothesis H160 has been replaced by "true". (It is already present, as H141). --- Hypothesis H162 has been replaced by "true". (It is already present, as H143). --- Hypothesis H163 has been replaced by "true". (It is already present, as H144). --- Hypothesis H164 has been replaced by "true". (It is already present, as H145). --- Hypothesis H165 has been replaced by "true". (It is already present, as H146). --- Hypothesis H166 has been replaced by "true". (It is already present, as H147). --- Hypothesis H167 has been replaced by "true". (It is already present, as H148). --- Hypothesis H168 has been replaced by "true". (It is already present, as H149). --- Hypothesis H169 has been replaced by "true". (It is already present, as H150). %%% Simplified H172 on reading formula in, to give: %%% H172: true >>> Restructured hypothesis H48 into: >>> H48: skein_512_block_bytes_c <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: 64 <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H173: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H174: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H115: true New H134: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H116: true New H124: skein_512_block_bytes_index__last <= 2147483647 New H125: true New H135: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) New H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H117: fld_hash_bit_len(fld_h(ctx)) >= 1 New H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H118: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H119: fld_byte_count(fld_h(ctx)) >= 0 New H129: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H120: fld_byte_count(fld_h(ctx)) <= 64 New H130: fld_byte_count(fld_h(local_ctx__2)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H121: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__2) , [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H122: true New H123: true New H124: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H113: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H114: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H170: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H171: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 268435455 %%% Hypotheses H48 & H4 together imply that 64 = fld_byte_count(fld_h(ctx)). H48 & H4 have therefore been deleted and a new H175 added to this effect. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H52 (true-hypothesis). --- Eliminated hypothesis H53 (true-hypothesis). --- Eliminated hypothesis H54 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H57 (true-hypothesis). --- Eliminated hypothesis H58 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H101 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H151 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H157 (true-hypothesis). --- Eliminated hypothesis H158 (true-hypothesis). --- Eliminated hypothesis H159 (true-hypothesis). --- Eliminated hypothesis H160 (true-hypothesis). --- Eliminated hypothesis H161 (true-hypothesis). --- Eliminated hypothesis H162 (true-hypothesis). --- Eliminated hypothesis H163 (true-hypothesis). --- Eliminated hypothesis H164 (true-hypothesis). --- Eliminated hypothesis H165 (true-hypothesis). --- Eliminated hypothesis H166 (true-hypothesis). --- Eliminated hypothesis H167 (true-hypothesis). --- Eliminated hypothesis H168 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H172 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H27 (true-hypothesis). --- Eliminated hypothesis H28 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H117 (duplicate of H26). --- Eliminated hypothesis H31 (duplicate of H9). --- Eliminated hypothesis H119 (duplicate of H9). --- Eliminated hypothesis H32 (duplicate of H10). --- Eliminated hypothesis H37 (duplicate of H15). --- Eliminated hypothesis H38 (duplicate of H16). --- Eliminated hypothesis H40 (duplicate of H18). --- Eliminated hypothesis H41 (duplicate of H19). --- Eliminated hypothesis H42 (duplicate of H20). --- Eliminated hypothesis H43 (duplicate of H21). --- Eliminated hypothesis H44 (duplicate of H22). --- Eliminated hypothesis H45 (duplicate of H23). --- Eliminated hypothesis H46 (duplicate of H24). --- Eliminated hypothesis H47 (duplicate of H25). --- Eliminated hypothesis H33 (duplicate of H11). --- Eliminated hypothesis H34 (duplicate of H12). --- Eliminated hypothesis H118 (duplicate of H12). --- Eliminated hypothesis H174 (duplicate of H137). --- Eliminated hypothesis H12 (duplicate of H2). --- Eliminated hypothesis H137 (duplicate of H127). --- Eliminated hypothesis H30 (duplicate of H8). --- Eliminated hypothesis H9 (duplicate of H3). --- Eliminated hypothesis H134 (duplicate of H129). --- Eliminated hypothesis H29 (duplicate of H7). --- Eliminated hypothesis H26 (duplicate of H1). --- Eliminated hypothesis H3 (redundant, given H175). --- Eliminated hypothesis H10 (redundant, given H120). --- Eliminated hypothesis H11 (redundant, given H1). --- Eliminated hypothesis H120 (redundant, given H175). --- Eliminated hypothesis H135 (redundant, given H130). --- Eliminated hypothesis H136 (redundant, given H126). --- Eliminated hypothesis H173 (redundant, given H126). -S- Substituted hypothesis H128. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H131. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__2)) by: fld_byte_count(fld_h(ctx)). *** Proved C1: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 using hypothesis H6. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_12. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H48). %%% Simplified H50 on reading formula in, to give: %%% H50: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified H51 on reading formula in, to give: %%% H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(ctx)) %%% Simplified H55 on reading formula in, to give: %%% H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). --- Hypothesis H78 has been replaced by "true". (It is already present, as H59). --- Hypothesis H79 has been replaced by "true". (It is already present, as H60). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H87 has been replaced by "true". (It is already present, as H68). --- Hypothesis H88 has been replaced by "true". (It is already present, as H69). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H91 has been replaced by "true". (It is already present, as H72). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H55). --- Hypothesis H94 has been replaced by "true". (It is already present, as H56). --- Hypothesis H95 has been replaced by "true". (It is already present, as H57). --- Hypothesis H96 has been replaced by "true". (It is already present, as H58). --- Hypothesis H97 has been replaced by "true". (It is already present, as H59). --- Hypothesis H98 has been replaced by "true". (It is already present, as H60). --- Hypothesis H101 has been replaced by "true". (It is already present, as H63). --- Hypothesis H102 has been replaced by "true". (It is already present, as H64). --- Hypothesis H104 has been replaced by "true". (It is already present, as H66). --- Hypothesis H105 has been replaced by "true". (It is already present, as H67). --- Hypothesis H106 has been replaced by "true". (It is already present, as H68). --- Hypothesis H107 has been replaced by "true". (It is already present, as H69). --- Hypothesis H108 has been replaced by "true". (It is already present, as H70). --- Hypothesis H109 has been replaced by "true". (It is already present, as H71). --- Hypothesis H110 has been replaced by "true". (It is already present, as H72). --- Hypothesis H111 has been replaced by "true". (It is already present, as H73). --- Hypothesis H112 has been replaced by "true". (It is already present, as H57). --- Hypothesis H113 has been replaced by "true". (It is already present, as H58). --- Hypothesis H114 has been replaced by "true". (It is already present, as H55). --- Hypothesis H115 has been replaced by "true". (It is already present, as H56). --- Hypothesis H116 has been replaced by "true". (It is already present, as H57). --- Hypothesis H117 has been replaced by "true". (It is already present, as H58). --- Hypothesis H118 has been replaced by "true". (It is already present, as H59). --- Hypothesis H119 has been replaced by "true". (It is already present, as H60). --- Hypothesis H122 has been replaced by "true". (It is already present, as H63). --- Hypothesis H123 has been replaced by "true". (It is already present, as H64). --- Hypothesis H125 has been replaced by "true". (It is already present, as H66). --- Hypothesis H126 has been replaced by "true". (It is already present, as H67). --- Hypothesis H127 has been replaced by "true". (It is already present, as H68). --- Hypothesis H128 has been replaced by "true". (It is already present, as H69). --- Hypothesis H129 has been replaced by "true". (It is already present, as H70). --- Hypothesis H130 has been replaced by "true". (It is already present, as H71). --- Hypothesis H131 has been replaced by "true". (It is already present, as H72). --- Hypothesis H132 has been replaced by "true". (It is already present, as H73). %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H57). --- Hypothesis H135 has been replaced by "true". (It is already present, as H58). --- Hypothesis H136 has been replaced by "true". (It is already present, as H57). --- Hypothesis H137 has been replaced by "true". (It is already present, as H58). %%% Simplified H147 on reading formula in, to give: %%% H147: 63 <= skein_512_block_bytes_index__last %%% Simplified H148 on reading formula in, to give: %%% H148: 63 <= skein_512_block_bytes_index__last %%% Simplified H150 on reading formula in, to give: %%% H150: 63 <= natural__last %%% Simplified H157 on reading formula in, to give: %%% H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H158 on reading formula in, to give: %%% H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H176 has been replaced by "true". (It is already present, as H157). --- Hypothesis H177 has been replaced by "true". (It is already present, as H158). --- Hypothesis H178 has been replaced by "true". (It is already present, as H159). --- Hypothesis H179 has been replaced by "true". (It is already present, as H160). --- Hypothesis H180 has been replaced by "true". (It is already present, as H161). --- Hypothesis H181 has been replaced by "true". (It is already present, as H162). --- Hypothesis H184 has been replaced by "true". (It is already present, as H165). --- Hypothesis H185 has been replaced by "true". (It is already present, as H166). --- Hypothesis H187 has been replaced by "true". (It is already present, as H168). --- Hypothesis H188 has been replaced by "true". (It is already present, as H169). --- Hypothesis H189 has been replaced by "true". (It is already present, as H170). --- Hypothesis H190 has been replaced by "true". (It is already present, as H171). --- Hypothesis H191 has been replaced by "true". (It is already present, as H172). --- Hypothesis H192 has been replaced by "true". (It is already present, as H173). --- Hypothesis H193 has been replaced by "true". (It is already present, as H174). --- Hypothesis H194 has been replaced by "true". (It is already present, as H175). %%% Simplified H197 on reading formula in, to give: %%% H197: true *** Proved C1: 0 >= spark__unsigned__byte__first using hypothesis H27. *** Proved C2: 0 <= spark__unsigned__byte__last using hypothesis H28. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_13. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: not fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). --- Hypothesis H51 has been replaced by "true". (It is already present, as H31). --- Hypothesis H52 has been replaced by "true". (It is already present, as H32). --- Hypothesis H53 has been replaced by "true". (It is already present, as H33). --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H57 has been replaced by "true". (It is already present, as H37). --- Hypothesis H58 has been replaced by "true". (It is already present, as H38). --- Hypothesis H60 has been replaced by "true". (It is already present, as H40). --- Hypothesis H61 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H42). --- Hypothesis H63 has been replaced by "true". (It is already present, as H43). --- Hypothesis H64 has been replaced by "true". (It is already present, as H44). --- Hypothesis H65 has been replaced by "true". (It is already present, as H45). --- Hypothesis H66 has been replaced by "true". (It is already present, as H46). --- Hypothesis H67 has been replaced by "true". (It is already present, as H47). --- Hypothesis H68 has been replaced by "true". (It is already present, as H29). --- Hypothesis H69 has been replaced by "true". (It is already present, as H30). --- Hypothesis H70 has been replaced by "true". (It is already present, as H31). --- Hypothesis H71 has been replaced by "true". (It is already present, as H32). --- Hypothesis H72 has been replaced by "true". (It is already present, as H33). --- Hypothesis H73 has been replaced by "true". (It is already present, as H34). --- Hypothesis H76 has been replaced by "true". (It is already present, as H37). --- Hypothesis H77 has been replaced by "true". (It is already present, as H38). --- Hypothesis H79 has been replaced by "true". (It is already present, as H40). --- Hypothesis H80 has been replaced by "true". (It is already present, as H41). --- Hypothesis H81 has been replaced by "true". (It is already present, as H42). --- Hypothesis H82 has been replaced by "true". (It is already present, as H43). --- Hypothesis H83 has been replaced by "true". (It is already present, as H44). --- Hypothesis H84 has been replaced by "true". (It is already present, as H45). --- Hypothesis H85 has been replaced by "true". (It is already present, as H46). --- Hypothesis H86 has been replaced by "true". (It is already present, as H47). --- Hypothesis H87 has been replaced by "true". (It is already present, as H31). --- Hypothesis H88 has been replaced by "true". (It is already present, as H32). --- Hypothesis H89 has been replaced by "true". (It is already present, as H29). --- Hypothesis H90 has been replaced by "true". (It is already present, as H30). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H93 has been replaced by "true". (It is already present, as H33). --- Hypothesis H94 has been replaced by "true". (It is already present, as H34). --- Hypothesis H97 has been replaced by "true". (It is already present, as H37). --- Hypothesis H98 has been replaced by "true". (It is already present, as H38). --- Hypothesis H100 has been replaced by "true". (It is already present, as H40). --- Hypothesis H101 has been replaced by "true". (It is already present, as H41). --- Hypothesis H102 has been replaced by "true". (It is already present, as H42). --- Hypothesis H103 has been replaced by "true". (It is already present, as H43). --- Hypothesis H104 has been replaced by "true". (It is already present, as H44). --- Hypothesis H105 has been replaced by "true". (It is already present, as H45). --- Hypothesis H106 has been replaced by "true". (It is already present, as H46). --- Hypothesis H107 has been replaced by "true". (It is already present, as H47). %%% Simplified H108 on reading formula in, to give: %%% H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H109 has been replaced by "true". (It is already present, as H31). --- Hypothesis H110 has been replaced by "true". (It is already present, as H32). --- Hypothesis H111 has been replaced by "true". (It is already present, as H31). --- Hypothesis H112 has been replaced by "true". (It is already present, as H32). %%% Simplified H117 on reading formula in, to give: %%% H117: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first %%% Simplified H118 on reading formula in, to give: %%% H118: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last %%% Simplified H119 on reading formula in, to give: %%% H119: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified H120 on reading formula in, to give: %%% H120: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified H122 on reading formula in, to give: %%% H122: 63 <= skein_512_block_bytes_index__last %%% Simplified H123 on reading formula in, to give: %%% H123: 63 <= skein_512_block_bytes_index__last %%% Simplified H125 on reading formula in, to give: %%% H125: 63 <= natural__last %%% Simplified H128 on reading formula in, to give: %%% H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H131 on reading formula in, to give: %%% H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(ctx) ) %%% Simplified H132 on reading formula in, to give: %%% H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H151 has been replaced by "true". (It is already present, as H132). --- Hypothesis H152 has been replaced by "true". (It is already present, as H133). --- Hypothesis H153 has been replaced by "true". (It is already present, as H134). --- Hypothesis H154 has been replaced by "true". (It is already present, as H135). --- Hypothesis H155 has been replaced by "true". (It is already present, as H136). --- Hypothesis H156 has been replaced by "true". (It is already present, as H137). --- Hypothesis H159 has been replaced by "true". (It is already present, as H140). --- Hypothesis H160 has been replaced by "true". (It is already present, as H141). --- Hypothesis H162 has been replaced by "true". (It is already present, as H143). --- Hypothesis H163 has been replaced by "true". (It is already present, as H144). --- Hypothesis H164 has been replaced by "true". (It is already present, as H145). --- Hypothesis H165 has been replaced by "true". (It is already present, as H146). --- Hypothesis H166 has been replaced by "true". (It is already present, as H147). --- Hypothesis H167 has been replaced by "true". (It is already present, as H148). --- Hypothesis H168 has been replaced by "true". (It is already present, as H149). --- Hypothesis H169 has been replaced by "true". (It is already present, as H150). %%% Simplified H172 on reading formula in, to give: %%% H172: true *** Proved C1: 0 >= spark__unsigned__byte__first using hypothesis H27. *** Proved C2: 0 <= spark__unsigned__byte__last using hypothesis H28. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_14. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H48). %%% Simplified H50 on reading formula in, to give: %%% H50: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified H51 on reading formula in, to give: %%% H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(ctx)) %%% Simplified H55 on reading formula in, to give: %%% H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). --- Hypothesis H78 has been replaced by "true". (It is already present, as H59). --- Hypothesis H79 has been replaced by "true". (It is already present, as H60). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H87 has been replaced by "true". (It is already present, as H68). --- Hypothesis H88 has been replaced by "true". (It is already present, as H69). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H91 has been replaced by "true". (It is already present, as H72). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H55). --- Hypothesis H94 has been replaced by "true". (It is already present, as H56). --- Hypothesis H95 has been replaced by "true". (It is already present, as H57). --- Hypothesis H96 has been replaced by "true". (It is already present, as H58). --- Hypothesis H97 has been replaced by "true". (It is already present, as H59). --- Hypothesis H98 has been replaced by "true". (It is already present, as H60). --- Hypothesis H101 has been replaced by "true". (It is already present, as H63). --- Hypothesis H102 has been replaced by "true". (It is already present, as H64). --- Hypothesis H104 has been replaced by "true". (It is already present, as H66). --- Hypothesis H105 has been replaced by "true". (It is already present, as H67). --- Hypothesis H106 has been replaced by "true". (It is already present, as H68). --- Hypothesis H107 has been replaced by "true". (It is already present, as H69). --- Hypothesis H108 has been replaced by "true". (It is already present, as H70). --- Hypothesis H109 has been replaced by "true". (It is already present, as H71). --- Hypothesis H110 has been replaced by "true". (It is already present, as H72). --- Hypothesis H111 has been replaced by "true". (It is already present, as H73). --- Hypothesis H112 has been replaced by "true". (It is already present, as H57). --- Hypothesis H113 has been replaced by "true". (It is already present, as H58). --- Hypothesis H114 has been replaced by "true". (It is already present, as H55). --- Hypothesis H115 has been replaced by "true". (It is already present, as H56). --- Hypothesis H116 has been replaced by "true". (It is already present, as H57). --- Hypothesis H117 has been replaced by "true". (It is already present, as H58). --- Hypothesis H118 has been replaced by "true". (It is already present, as H59). --- Hypothesis H119 has been replaced by "true". (It is already present, as H60). --- Hypothesis H122 has been replaced by "true". (It is already present, as H63). --- Hypothesis H123 has been replaced by "true". (It is already present, as H64). --- Hypothesis H125 has been replaced by "true". (It is already present, as H66). --- Hypothesis H126 has been replaced by "true". (It is already present, as H67). --- Hypothesis H127 has been replaced by "true". (It is already present, as H68). --- Hypothesis H128 has been replaced by "true". (It is already present, as H69). --- Hypothesis H129 has been replaced by "true". (It is already present, as H70). --- Hypothesis H130 has been replaced by "true". (It is already present, as H71). --- Hypothesis H131 has been replaced by "true". (It is already present, as H72). --- Hypothesis H132 has been replaced by "true". (It is already present, as H73). %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H57). --- Hypothesis H135 has been replaced by "true". (It is already present, as H58). --- Hypothesis H136 has been replaced by "true". (It is already present, as H57). --- Hypothesis H137 has been replaced by "true". (It is already present, as H58). %%% Simplified H147 on reading formula in, to give: %%% H147: 63 <= skein_512_block_bytes_index__last %%% Simplified H148 on reading formula in, to give: %%% H148: 63 <= skein_512_block_bytes_index__last %%% Simplified H150 on reading formula in, to give: %%% H150: 63 <= natural__last %%% Simplified H157 on reading formula in, to give: %%% H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H158 on reading formula in, to give: %%% H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H176 has been replaced by "true". (It is already present, as H157). --- Hypothesis H177 has been replaced by "true". (It is already present, as H158). --- Hypothesis H178 has been replaced by "true". (It is already present, as H159). --- Hypothesis H179 has been replaced by "true". (It is already present, as H160). --- Hypothesis H180 has been replaced by "true". (It is already present, as H161). --- Hypothesis H181 has been replaced by "true". (It is already present, as H162). --- Hypothesis H184 has been replaced by "true". (It is already present, as H165). --- Hypothesis H185 has been replaced by "true". (It is already present, as H166). --- Hypothesis H187 has been replaced by "true". (It is already present, as H168). --- Hypothesis H188 has been replaced by "true". (It is already present, as H169). --- Hypothesis H189 has been replaced by "true". (It is already present, as H170). --- Hypothesis H190 has been replaced by "true". (It is already present, as H171). --- Hypothesis H191 has been replaced by "true". (It is already present, as H172). --- Hypothesis H192 has been replaced by "true". (It is already present, as H173). --- Hypothesis H193 has been replaced by "true". (It is already present, as H174). --- Hypothesis H194 has been replaced by "true". (It is already present, as H175). %%% Simplified H197 on reading formula in, to give: %%% H197: true --- Hypothesis H201 has been replaced by "true". (It is already present, as H27). --- Hypothesis H202 has been replaced by "true". (It is already present, as H28). %%% Simplified H203 on reading formula in, to give: %%% H203: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= 0 and 0 <= spark__unsigned__byte__last) %%% Simplified H204 on reading formula in, to give: %%% H204: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H205 on reading formula in, to give: %%% H205: fld_byte_count(fld_h(local_ctx__2)) >= natural__first %%% Simplified H206 on reading formula in, to give: %%% H206: fld_byte_count(fld_h(local_ctx__2)) <= natural__last %%% Simplified H207 on reading formula in, to give: %%% H207: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first %%% Simplified H208 on reading formula in, to give: %%% H208: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last %%% Simplified H211 on reading formula in, to give: %%% H211: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first %%% Simplified H212 on reading formula in, to give: %%% H212: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last %%% Simplified H214 on reading formula in, to give: %%% H214: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first %%% Simplified H215 on reading formula in, to give: %%% H215: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last %%% Simplified H216 on reading formula in, to give: %%% H216: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first %%% Simplified H217 on reading formula in, to give: %%% H217: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last %%% Simplified H218 on reading formula in, to give: %%% H218: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first %%% Simplified H219 on reading formula in, to give: %%% H219: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last %%% Simplified H220 on reading formula in, to give: %%% H220: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first %%% Simplified H221 on reading formula in, to give: %%% H221: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last --- Hypothesis H222 has been replaced by "true". (It is already present, as H195). --- Hypothesis H223 has been replaced by "true". (It is already present, as H196). %%% Simplified C3 on reading formula in, to give: %%% C3: true *** Proved C3: true -S- Applied substitution rule skein_512_fi_rules(135). This was achieved by replacing all occurrences of positive_output_block_count_t__first by: 1. New C1: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= 1 -S- Applied substitution rule skein_512_fi_rules(136). This was achieved by replacing all occurrences of positive_output_block_count_t__last by: 4194304. New C2: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= 4194304 -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H198: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 New C4: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= - 2147483711 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H199: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 New C5: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 2147483584 -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: fld_byte_count(fld_h(ctx)) < 64 New H53: fld_byte_count(fld_h(local_ctx__1)) < 64 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H57: fld_byte_count(fld_h(local_ctx__1)) >= 0 New H140: true New H159: fld_byte_count(fld_h(local_ctx__2)) >= 0 New H205: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H58: fld_byte_count(fld_h(local_ctx__1)) <= 2147483647 New H141: true New H149: skein_512_block_bytes_index__last <= 2147483647 New H150: true New H160: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 New H206: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H211: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= 63 New H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 New H212: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H214: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= 127 New H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 New H215: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= spark__unsigned__byte__last) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= spark__unsigned__byte__last) New H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= spark__unsigned__byte__last) New H203: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= 255) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= 255) New H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= 255) New H203: true -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H216: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= 65535 New H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 New H217: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H218: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= 4294967295 New H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 New H219: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= spark__unsigned__u64__last) New H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H204: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H220: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= 18446744073709551615 New H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H221: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= 18446744073709551615) New H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) New H204: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= 0 New H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 New H207: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 New H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 New H208: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= 1 New H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 New H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__1) , [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= 18446744073709551615) New H158: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) New H204: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1) , [i___1]) <= 18446744073709551615) New H158: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) New H204: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H144: fld_byte_count(fld_h(local_ctx__1)) >= 0 New H154: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H145: fld_byte_count(fld_h(local_ctx__1)) <= 64 New H155: fld_byte_count(fld_h(local_ctx__2)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H146: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__1) , [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__1) , [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= 255) New H157: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__2) , [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H147: true New H148: true New H149: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1) , [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1) , [i___1]) <= 255) New H157: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H138: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H139: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H195: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H196: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 268435455 *** Proved C4: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= - 2147483711 using hypothesis H195. *** Proved C5: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 2147483584 using hypothesis H196. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H101 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H163 (true-hypothesis). --- Eliminated hypothesis H164 (true-hypothesis). --- Eliminated hypothesis H167 (true-hypothesis). --- Eliminated hypothesis H176 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H179 (true-hypothesis). --- Eliminated hypothesis H180 (true-hypothesis). --- Eliminated hypothesis H181 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H184 (true-hypothesis). --- Eliminated hypothesis H185 (true-hypothesis). --- Eliminated hypothesis H186 (true-hypothesis). --- Eliminated hypothesis H187 (true-hypothesis). --- Eliminated hypothesis H188 (true-hypothesis). --- Eliminated hypothesis H189 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H192 (true-hypothesis). --- Eliminated hypothesis H193 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H209 (true-hypothesis). --- Eliminated hypothesis H210 (true-hypothesis). --- Eliminated hypothesis H213 (true-hypothesis). --- Eliminated hypothesis H222 (true-hypothesis). --- Eliminated hypothesis H223 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H141 (true-hypothesis). --- Eliminated hypothesis H150 (true-hypothesis). --- Eliminated hypothesis H27 (true-hypothesis). --- Eliminated hypothesis H28 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H148 (true-hypothesis). --- Eliminated hypothesis H149 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H50 (duplicate of H26). --- Eliminated hypothesis H142 (duplicate of H52). --- Eliminated hypothesis H208 (duplicate of H199). --- Eliminated hypothesis H31 (duplicate of H9). --- Eliminated hypothesis H144 (duplicate of H57). --- Eliminated hypothesis H205 (duplicate of H159). --- Eliminated hypothesis H32 (duplicate of H10). --- Eliminated hypothesis H206 (duplicate of H160). --- Eliminated hypothesis H37 (duplicate of H15). --- Eliminated hypothesis H211 (duplicate of H165). --- Eliminated hypothesis H38 (duplicate of H16). --- Eliminated hypothesis H212 (duplicate of H166). --- Eliminated hypothesis H40 (duplicate of H18). --- Eliminated hypothesis H214 (duplicate of H168). --- Eliminated hypothesis H41 (duplicate of H19). --- Eliminated hypothesis H215 (duplicate of H169). --- Eliminated hypothesis H42 (duplicate of H20). --- Eliminated hypothesis H216 (duplicate of H170). --- Eliminated hypothesis H43 (duplicate of H21). --- Eliminated hypothesis H217 (duplicate of H171). --- Eliminated hypothesis H44 (duplicate of H22). --- Eliminated hypothesis H218 (duplicate of H172). --- Eliminated hypothesis H45 (duplicate of H23). --- Eliminated hypothesis H219 (duplicate of H173). --- Eliminated hypothesis H46 (duplicate of H24). --- Eliminated hypothesis H220 (duplicate of H174). --- Eliminated hypothesis H47 (duplicate of H25). --- Eliminated hypothesis H221 (duplicate of H175). --- Eliminated hypothesis H33 (duplicate of H11). --- Eliminated hypothesis H207 (duplicate of H161). --- Eliminated hypothesis H34 (duplicate of H12). --- Eliminated hypothesis H143 (duplicate of H60). --- Eliminated hypothesis H199 (duplicate of H162). --- Eliminated hypothesis H12 (duplicate of H2). --- Eliminated hypothesis H162 (duplicate of H152). --- Eliminated hypothesis H30 (duplicate of H8). --- Eliminated hypothesis H204 (duplicate of H158). --- Eliminated hypothesis H9 (duplicate of H3). --- Eliminated hypothesis H159 (duplicate of H154). --- Eliminated hypothesis H29 (duplicate of H7). --- Eliminated hypothesis H26 (duplicate of H1). --- Eliminated hypothesis H4 (redundant, given H48). --- Eliminated hypothesis H10 (redundant, given H48). --- Eliminated hypothesis H11 (redundant, given H1). --- Eliminated hypothesis H58 (redundant, given H53). --- Eliminated hypothesis H59 (redundant, given H52). --- Eliminated hypothesis H145 (redundant, given H53). --- Eliminated hypothesis H160 (redundant, given H155). --- Eliminated hypothesis H161 (redundant, given H151). --- Eliminated hypothesis H198 (redundant, given H151). -S- Substituted hypothesis H51. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H54. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__1)) by: fld_byte_count(fld_h(ctx)). -S- Substituted hypothesis H153. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H156. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__2)) by: fld_byte_count(fld_h(ctx)). *** Proved C1: ((fld_hash_bit_len(fld_h(ctx)) + 7) div 8 + 63) div 64 >= 1 using hypothesis H195. *** Proved C2: ((fld_hash_bit_len(fld_h(ctx)) + 7) div 8 + 63) div 64 <= 4194304 using hypothesis H196. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_15. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: not fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). --- Hypothesis H51 has been replaced by "true". (It is already present, as H31). --- Hypothesis H52 has been replaced by "true". (It is already present, as H32). --- Hypothesis H53 has been replaced by "true". (It is already present, as H33). --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H57 has been replaced by "true". (It is already present, as H37). --- Hypothesis H58 has been replaced by "true". (It is already present, as H38). --- Hypothesis H60 has been replaced by "true". (It is already present, as H40). --- Hypothesis H61 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H42). --- Hypothesis H63 has been replaced by "true". (It is already present, as H43). --- Hypothesis H64 has been replaced by "true". (It is already present, as H44). --- Hypothesis H65 has been replaced by "true". (It is already present, as H45). --- Hypothesis H66 has been replaced by "true". (It is already present, as H46). --- Hypothesis H67 has been replaced by "true". (It is already present, as H47). --- Hypothesis H68 has been replaced by "true". (It is already present, as H29). --- Hypothesis H69 has been replaced by "true". (It is already present, as H30). --- Hypothesis H70 has been replaced by "true". (It is already present, as H31). --- Hypothesis H71 has been replaced by "true". (It is already present, as H32). --- Hypothesis H72 has been replaced by "true". (It is already present, as H33). --- Hypothesis H73 has been replaced by "true". (It is already present, as H34). --- Hypothesis H76 has been replaced by "true". (It is already present, as H37). --- Hypothesis H77 has been replaced by "true". (It is already present, as H38). --- Hypothesis H79 has been replaced by "true". (It is already present, as H40). --- Hypothesis H80 has been replaced by "true". (It is already present, as H41). --- Hypothesis H81 has been replaced by "true". (It is already present, as H42). --- Hypothesis H82 has been replaced by "true". (It is already present, as H43). --- Hypothesis H83 has been replaced by "true". (It is already present, as H44). --- Hypothesis H84 has been replaced by "true". (It is already present, as H45). --- Hypothesis H85 has been replaced by "true". (It is already present, as H46). --- Hypothesis H86 has been replaced by "true". (It is already present, as H47). --- Hypothesis H87 has been replaced by "true". (It is already present, as H31). --- Hypothesis H88 has been replaced by "true". (It is already present, as H32). --- Hypothesis H89 has been replaced by "true". (It is already present, as H29). --- Hypothesis H90 has been replaced by "true". (It is already present, as H30). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H93 has been replaced by "true". (It is already present, as H33). --- Hypothesis H94 has been replaced by "true". (It is already present, as H34). --- Hypothesis H97 has been replaced by "true". (It is already present, as H37). --- Hypothesis H98 has been replaced by "true". (It is already present, as H38). --- Hypothesis H100 has been replaced by "true". (It is already present, as H40). --- Hypothesis H101 has been replaced by "true". (It is already present, as H41). --- Hypothesis H102 has been replaced by "true". (It is already present, as H42). --- Hypothesis H103 has been replaced by "true". (It is already present, as H43). --- Hypothesis H104 has been replaced by "true". (It is already present, as H44). --- Hypothesis H105 has been replaced by "true". (It is already present, as H45). --- Hypothesis H106 has been replaced by "true". (It is already present, as H46). --- Hypothesis H107 has been replaced by "true". (It is already present, as H47). %%% Simplified H108 on reading formula in, to give: %%% H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H109 has been replaced by "true". (It is already present, as H31). --- Hypothesis H110 has been replaced by "true". (It is already present, as H32). --- Hypothesis H111 has been replaced by "true". (It is already present, as H31). --- Hypothesis H112 has been replaced by "true". (It is already present, as H32). %%% Simplified H117 on reading formula in, to give: %%% H117: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first %%% Simplified H118 on reading formula in, to give: %%% H118: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last %%% Simplified H119 on reading formula in, to give: %%% H119: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified H120 on reading formula in, to give: %%% H120: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified H122 on reading formula in, to give: %%% H122: 63 <= skein_512_block_bytes_index__last %%% Simplified H123 on reading formula in, to give: %%% H123: 63 <= skein_512_block_bytes_index__last %%% Simplified H125 on reading formula in, to give: %%% H125: 63 <= natural__last %%% Simplified H128 on reading formula in, to give: %%% H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H131 on reading formula in, to give: %%% H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(ctx) ) %%% Simplified H132 on reading formula in, to give: %%% H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H151 has been replaced by "true". (It is already present, as H132). --- Hypothesis H152 has been replaced by "true". (It is already present, as H133). --- Hypothesis H153 has been replaced by "true". (It is already present, as H134). --- Hypothesis H154 has been replaced by "true". (It is already present, as H135). --- Hypothesis H155 has been replaced by "true". (It is already present, as H136). --- Hypothesis H156 has been replaced by "true". (It is already present, as H137). --- Hypothesis H159 has been replaced by "true". (It is already present, as H140). --- Hypothesis H160 has been replaced by "true". (It is already present, as H141). --- Hypothesis H162 has been replaced by "true". (It is already present, as H143). --- Hypothesis H163 has been replaced by "true". (It is already present, as H144). --- Hypothesis H164 has been replaced by "true". (It is already present, as H145). --- Hypothesis H165 has been replaced by "true". (It is already present, as H146). --- Hypothesis H166 has been replaced by "true". (It is already present, as H147). --- Hypothesis H167 has been replaced by "true". (It is already present, as H148). --- Hypothesis H168 has been replaced by "true". (It is already present, as H149). --- Hypothesis H169 has been replaced by "true". (It is already present, as H150). %%% Simplified H172 on reading formula in, to give: %%% H172: true --- Hypothesis H176 has been replaced by "true". (It is already present, as H27). --- Hypothesis H177 has been replaced by "true". (It is already present, as H28). %%% Simplified H178 on reading formula in, to give: %%% H178: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= 0 and 0 <= spark__unsigned__byte__last) %%% Simplified H179 on reading formula in, to give: %%% H179: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H180 on reading formula in, to give: %%% H180: fld_byte_count(fld_h(local_ctx__2)) >= natural__first %%% Simplified H181 on reading formula in, to give: %%% H181: fld_byte_count(fld_h(local_ctx__2)) <= natural__last %%% Simplified H182 on reading formula in, to give: %%% H182: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first %%% Simplified H183 on reading formula in, to give: %%% H183: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last %%% Simplified H186 on reading formula in, to give: %%% H186: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first %%% Simplified H187 on reading formula in, to give: %%% H187: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last %%% Simplified H189 on reading formula in, to give: %%% H189: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first %%% Simplified H190 on reading formula in, to give: %%% H190: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last %%% Simplified H191 on reading formula in, to give: %%% H191: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first %%% Simplified H192 on reading formula in, to give: %%% H192: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last %%% Simplified H193 on reading formula in, to give: %%% H193: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first %%% Simplified H194 on reading formula in, to give: %%% H194: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last %%% Simplified H195 on reading formula in, to give: %%% H195: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first %%% Simplified H196 on reading formula in, to give: %%% H196: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last --- Hypothesis H197 has been replaced by "true". (It is already present, as H170). --- Hypothesis H198 has been replaced by "true". (It is already present, as H171). %%% Simplified C3 on reading formula in, to give: %%% C3: true *** Proved C3: true -S- Applied substitution rule skein_512_fi_rules(135). This was achieved by replacing all occurrences of positive_output_block_count_t__first by: 1. New C1: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= 1 -S- Applied substitution rule skein_512_fi_rules(136). This was achieved by replacing all occurrences of positive_output_block_count_t__last by: 4194304. New C2: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= 4194304 -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H173: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 New C4: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= - 2147483711 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H174: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 New C5: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 2147483584 >>> Restructured hypothesis H48 into: >>> H48: skein_512_block_bytes_c <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: 64 <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H115: true New H134: fld_byte_count(fld_h(local_ctx__2)) >= 0 New H180: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H116: true New H124: skein_512_block_bytes_index__last <= 2147483647 New H125: true New H135: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 New H181: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H186: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 New H187: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H189: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 New H190: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) New H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= spark__unsigned__byte__last) New H178: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= 255) New H178: true -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H191: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 New H192: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H193: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 New H194: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H179: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H195: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H196: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) New H179: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 New H182: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 New H183: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H117: fld_hash_bit_len(fld_h(ctx)) >= 1 New H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H118: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) New H179: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) New H179: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H119: fld_byte_count(fld_h(ctx)) >= 0 New H129: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H120: fld_byte_count(fld_h(ctx)) <= 64 New H130: fld_byte_count(fld_h(local_ctx__2)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H121: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__2) , [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H122: true New H123: true New H124: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H113: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H114: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H170: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H171: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 268435455 %%% Hypotheses H48 & H4 together imply that 64 = fld_byte_count(fld_h(ctx)). H48 & H4 have therefore been deleted and a new H199 added to this effect. *** Proved C4: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= - 2147483711 using hypothesis H170. *** Proved C5: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 2147483584 using hypothesis H171. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H52 (true-hypothesis). --- Eliminated hypothesis H53 (true-hypothesis). --- Eliminated hypothesis H54 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H57 (true-hypothesis). --- Eliminated hypothesis H58 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H101 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H151 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H157 (true-hypothesis). --- Eliminated hypothesis H158 (true-hypothesis). --- Eliminated hypothesis H159 (true-hypothesis). --- Eliminated hypothesis H160 (true-hypothesis). --- Eliminated hypothesis H161 (true-hypothesis). --- Eliminated hypothesis H162 (true-hypothesis). --- Eliminated hypothesis H163 (true-hypothesis). --- Eliminated hypothesis H164 (true-hypothesis). --- Eliminated hypothesis H165 (true-hypothesis). --- Eliminated hypothesis H166 (true-hypothesis). --- Eliminated hypothesis H167 (true-hypothesis). --- Eliminated hypothesis H168 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H172 (true-hypothesis). --- Eliminated hypothesis H176 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H184 (true-hypothesis). --- Eliminated hypothesis H185 (true-hypothesis). --- Eliminated hypothesis H188 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H27 (true-hypothesis). --- Eliminated hypothesis H28 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H117 (duplicate of H26). --- Eliminated hypothesis H183 (duplicate of H174). --- Eliminated hypothesis H31 (duplicate of H9). --- Eliminated hypothesis H119 (duplicate of H9). --- Eliminated hypothesis H180 (duplicate of H134). --- Eliminated hypothesis H32 (duplicate of H10). --- Eliminated hypothesis H181 (duplicate of H135). --- Eliminated hypothesis H37 (duplicate of H15). --- Eliminated hypothesis H186 (duplicate of H140). --- Eliminated hypothesis H38 (duplicate of H16). --- Eliminated hypothesis H187 (duplicate of H141). --- Eliminated hypothesis H40 (duplicate of H18). --- Eliminated hypothesis H189 (duplicate of H143). --- Eliminated hypothesis H41 (duplicate of H19). --- Eliminated hypothesis H190 (duplicate of H144). --- Eliminated hypothesis H42 (duplicate of H20). --- Eliminated hypothesis H191 (duplicate of H145). --- Eliminated hypothesis H43 (duplicate of H21). --- Eliminated hypothesis H192 (duplicate of H146). --- Eliminated hypothesis H44 (duplicate of H22). --- Eliminated hypothesis H193 (duplicate of H147). --- Eliminated hypothesis H45 (duplicate of H23). --- Eliminated hypothesis H194 (duplicate of H148). --- Eliminated hypothesis H46 (duplicate of H24). --- Eliminated hypothesis H195 (duplicate of H149). --- Eliminated hypothesis H47 (duplicate of H25). --- Eliminated hypothesis H196 (duplicate of H150). --- Eliminated hypothesis H33 (duplicate of H11). --- Eliminated hypothesis H182 (duplicate of H136). --- Eliminated hypothesis H34 (duplicate of H12). --- Eliminated hypothesis H118 (duplicate of H12). --- Eliminated hypothesis H174 (duplicate of H137). --- Eliminated hypothesis H12 (duplicate of H2). --- Eliminated hypothesis H137 (duplicate of H127). --- Eliminated hypothesis H30 (duplicate of H8). --- Eliminated hypothesis H179 (duplicate of H133). --- Eliminated hypothesis H9 (duplicate of H3). --- Eliminated hypothesis H134 (duplicate of H129). --- Eliminated hypothesis H29 (duplicate of H7). --- Eliminated hypothesis H26 (duplicate of H1). --- Eliminated hypothesis H3 (redundant, given H199). --- Eliminated hypothesis H10 (redundant, given H120). --- Eliminated hypothesis H11 (redundant, given H1). --- Eliminated hypothesis H120 (redundant, given H199). --- Eliminated hypothesis H135 (redundant, given H130). --- Eliminated hypothesis H136 (redundant, given H126). --- Eliminated hypothesis H173 (redundant, given H126). -S- Substituted hypothesis H128. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H131. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__2)) by: fld_byte_count(fld_h(ctx)). *** Proved C1: ((fld_hash_bit_len(fld_h(ctx)) + 7) div 8 + 63) div 64 >= 1 using hypothesis H170. *** Proved C2: ((fld_hash_bit_len(fld_h(ctx)) + 7) div 8 + 63) div 64 <= 4194304 using hypothesis H171. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_16. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H48). %%% Simplified H50 on reading formula in, to give: %%% H50: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified H51 on reading formula in, to give: %%% H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(ctx)) %%% Simplified H55 on reading formula in, to give: %%% H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). --- Hypothesis H78 has been replaced by "true". (It is already present, as H59). --- Hypothesis H79 has been replaced by "true". (It is already present, as H60). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H87 has been replaced by "true". (It is already present, as H68). --- Hypothesis H88 has been replaced by "true". (It is already present, as H69). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H91 has been replaced by "true". (It is already present, as H72). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H55). --- Hypothesis H94 has been replaced by "true". (It is already present, as H56). --- Hypothesis H95 has been replaced by "true". (It is already present, as H57). --- Hypothesis H96 has been replaced by "true". (It is already present, as H58). --- Hypothesis H97 has been replaced by "true". (It is already present, as H59). --- Hypothesis H98 has been replaced by "true". (It is already present, as H60). --- Hypothesis H101 has been replaced by "true". (It is already present, as H63). --- Hypothesis H102 has been replaced by "true". (It is already present, as H64). --- Hypothesis H104 has been replaced by "true". (It is already present, as H66). --- Hypothesis H105 has been replaced by "true". (It is already present, as H67). --- Hypothesis H106 has been replaced by "true". (It is already present, as H68). --- Hypothesis H107 has been replaced by "true". (It is already present, as H69). --- Hypothesis H108 has been replaced by "true". (It is already present, as H70). --- Hypothesis H109 has been replaced by "true". (It is already present, as H71). --- Hypothesis H110 has been replaced by "true". (It is already present, as H72). --- Hypothesis H111 has been replaced by "true". (It is already present, as H73). --- Hypothesis H112 has been replaced by "true". (It is already present, as H57). --- Hypothesis H113 has been replaced by "true". (It is already present, as H58). --- Hypothesis H114 has been replaced by "true". (It is already present, as H55). --- Hypothesis H115 has been replaced by "true". (It is already present, as H56). --- Hypothesis H116 has been replaced by "true". (It is already present, as H57). --- Hypothesis H117 has been replaced by "true". (It is already present, as H58). --- Hypothesis H118 has been replaced by "true". (It is already present, as H59). --- Hypothesis H119 has been replaced by "true". (It is already present, as H60). --- Hypothesis H122 has been replaced by "true". (It is already present, as H63). --- Hypothesis H123 has been replaced by "true". (It is already present, as H64). --- Hypothesis H125 has been replaced by "true". (It is already present, as H66). --- Hypothesis H126 has been replaced by "true". (It is already present, as H67). --- Hypothesis H127 has been replaced by "true". (It is already present, as H68). --- Hypothesis H128 has been replaced by "true". (It is already present, as H69). --- Hypothesis H129 has been replaced by "true". (It is already present, as H70). --- Hypothesis H130 has been replaced by "true". (It is already present, as H71). --- Hypothesis H131 has been replaced by "true". (It is already present, as H72). --- Hypothesis H132 has been replaced by "true". (It is already present, as H73). %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H57). --- Hypothesis H135 has been replaced by "true". (It is already present, as H58). --- Hypothesis H136 has been replaced by "true". (It is already present, as H57). --- Hypothesis H137 has been replaced by "true". (It is already present, as H58). %%% Simplified H147 on reading formula in, to give: %%% H147: 63 <= skein_512_block_bytes_index__last %%% Simplified H148 on reading formula in, to give: %%% H148: 63 <= skein_512_block_bytes_index__last %%% Simplified H150 on reading formula in, to give: %%% H150: 63 <= natural__last %%% Simplified H157 on reading formula in, to give: %%% H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H158 on reading formula in, to give: %%% H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H176 has been replaced by "true". (It is already present, as H157). --- Hypothesis H177 has been replaced by "true". (It is already present, as H158). --- Hypothesis H178 has been replaced by "true". (It is already present, as H159). --- Hypothesis H179 has been replaced by "true". (It is already present, as H160). --- Hypothesis H180 has been replaced by "true". (It is already present, as H161). --- Hypothesis H181 has been replaced by "true". (It is already present, as H162). --- Hypothesis H184 has been replaced by "true". (It is already present, as H165). --- Hypothesis H185 has been replaced by "true". (It is already present, as H166). --- Hypothesis H187 has been replaced by "true". (It is already present, as H168). --- Hypothesis H188 has been replaced by "true". (It is already present, as H169). --- Hypothesis H189 has been replaced by "true". (It is already present, as H170). --- Hypothesis H190 has been replaced by "true". (It is already present, as H171). --- Hypothesis H191 has been replaced by "true". (It is already present, as H172). --- Hypothesis H192 has been replaced by "true". (It is already present, as H173). --- Hypothesis H193 has been replaced by "true". (It is already present, as H174). --- Hypothesis H194 has been replaced by "true". (It is already present, as H175). %%% Simplified H197 on reading formula in, to give: %%% H197: true --- Hypothesis H201 has been replaced by "true". (It is already present, as H27). --- Hypothesis H202 has been replaced by "true". (It is already present, as H28). %%% Simplified H203 on reading formula in, to give: %%% H203: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= 0 and 0 <= spark__unsigned__byte__last) %%% Simplified H204 on reading formula in, to give: %%% H204: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H205 on reading formula in, to give: %%% H205: fld_byte_count(fld_h(local_ctx__2)) >= natural__first %%% Simplified H206 on reading formula in, to give: %%% H206: fld_byte_count(fld_h(local_ctx__2)) <= natural__last %%% Simplified H207 on reading formula in, to give: %%% H207: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first %%% Simplified H208 on reading formula in, to give: %%% H208: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last %%% Simplified H211 on reading formula in, to give: %%% H211: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first %%% Simplified H212 on reading formula in, to give: %%% H212: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last %%% Simplified H214 on reading formula in, to give: %%% H214: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first %%% Simplified H215 on reading formula in, to give: %%% H215: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last %%% Simplified H216 on reading formula in, to give: %%% H216: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first %%% Simplified H217 on reading formula in, to give: %%% H217: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last %%% Simplified H218 on reading formula in, to give: %%% H218: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first %%% Simplified H219 on reading formula in, to give: %%% H219: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last %%% Simplified H220 on reading formula in, to give: %%% H220: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first %%% Simplified H221 on reading formula in, to give: %%% H221: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last --- Hypothesis H222 has been replaced by "true". (It is already present, as H195). --- Hypothesis H223 has been replaced by "true". (It is already present, as H196). %%% Simplified H226 on reading formula in, to give: %%% H226: true -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New C1: true -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_17. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: not fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). --- Hypothesis H51 has been replaced by "true". (It is already present, as H31). --- Hypothesis H52 has been replaced by "true". (It is already present, as H32). --- Hypothesis H53 has been replaced by "true". (It is already present, as H33). --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H57 has been replaced by "true". (It is already present, as H37). --- Hypothesis H58 has been replaced by "true". (It is already present, as H38). --- Hypothesis H60 has been replaced by "true". (It is already present, as H40). --- Hypothesis H61 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H42). --- Hypothesis H63 has been replaced by "true". (It is already present, as H43). --- Hypothesis H64 has been replaced by "true". (It is already present, as H44). --- Hypothesis H65 has been replaced by "true". (It is already present, as H45). --- Hypothesis H66 has been replaced by "true". (It is already present, as H46). --- Hypothesis H67 has been replaced by "true". (It is already present, as H47). --- Hypothesis H68 has been replaced by "true". (It is already present, as H29). --- Hypothesis H69 has been replaced by "true". (It is already present, as H30). --- Hypothesis H70 has been replaced by "true". (It is already present, as H31). --- Hypothesis H71 has been replaced by "true". (It is already present, as H32). --- Hypothesis H72 has been replaced by "true". (It is already present, as H33). --- Hypothesis H73 has been replaced by "true". (It is already present, as H34). --- Hypothesis H76 has been replaced by "true". (It is already present, as H37). --- Hypothesis H77 has been replaced by "true". (It is already present, as H38). --- Hypothesis H79 has been replaced by "true". (It is already present, as H40). --- Hypothesis H80 has been replaced by "true". (It is already present, as H41). --- Hypothesis H81 has been replaced by "true". (It is already present, as H42). --- Hypothesis H82 has been replaced by "true". (It is already present, as H43). --- Hypothesis H83 has been replaced by "true". (It is already present, as H44). --- Hypothesis H84 has been replaced by "true". (It is already present, as H45). --- Hypothesis H85 has been replaced by "true". (It is already present, as H46). --- Hypothesis H86 has been replaced by "true". (It is already present, as H47). --- Hypothesis H87 has been replaced by "true". (It is already present, as H31). --- Hypothesis H88 has been replaced by "true". (It is already present, as H32). --- Hypothesis H89 has been replaced by "true". (It is already present, as H29). --- Hypothesis H90 has been replaced by "true". (It is already present, as H30). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H93 has been replaced by "true". (It is already present, as H33). --- Hypothesis H94 has been replaced by "true". (It is already present, as H34). --- Hypothesis H97 has been replaced by "true". (It is already present, as H37). --- Hypothesis H98 has been replaced by "true". (It is already present, as H38). --- Hypothesis H100 has been replaced by "true". (It is already present, as H40). --- Hypothesis H101 has been replaced by "true". (It is already present, as H41). --- Hypothesis H102 has been replaced by "true". (It is already present, as H42). --- Hypothesis H103 has been replaced by "true". (It is already present, as H43). --- Hypothesis H104 has been replaced by "true". (It is already present, as H44). --- Hypothesis H105 has been replaced by "true". (It is already present, as H45). --- Hypothesis H106 has been replaced by "true". (It is already present, as H46). --- Hypothesis H107 has been replaced by "true". (It is already present, as H47). %%% Simplified H108 on reading formula in, to give: %%% H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H109 has been replaced by "true". (It is already present, as H31). --- Hypothesis H110 has been replaced by "true". (It is already present, as H32). --- Hypothesis H111 has been replaced by "true". (It is already present, as H31). --- Hypothesis H112 has been replaced by "true". (It is already present, as H32). %%% Simplified H117 on reading formula in, to give: %%% H117: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first %%% Simplified H118 on reading formula in, to give: %%% H118: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last %%% Simplified H119 on reading formula in, to give: %%% H119: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified H120 on reading formula in, to give: %%% H120: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified H122 on reading formula in, to give: %%% H122: 63 <= skein_512_block_bytes_index__last %%% Simplified H123 on reading formula in, to give: %%% H123: 63 <= skein_512_block_bytes_index__last %%% Simplified H125 on reading formula in, to give: %%% H125: 63 <= natural__last %%% Simplified H128 on reading formula in, to give: %%% H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H131 on reading formula in, to give: %%% H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(ctx) ) %%% Simplified H132 on reading formula in, to give: %%% H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H151 has been replaced by "true". (It is already present, as H132). --- Hypothesis H152 has been replaced by "true". (It is already present, as H133). --- Hypothesis H153 has been replaced by "true". (It is already present, as H134). --- Hypothesis H154 has been replaced by "true". (It is already present, as H135). --- Hypothesis H155 has been replaced by "true". (It is already present, as H136). --- Hypothesis H156 has been replaced by "true". (It is already present, as H137). --- Hypothesis H159 has been replaced by "true". (It is already present, as H140). --- Hypothesis H160 has been replaced by "true". (It is already present, as H141). --- Hypothesis H162 has been replaced by "true". (It is already present, as H143). --- Hypothesis H163 has been replaced by "true". (It is already present, as H144). --- Hypothesis H164 has been replaced by "true". (It is already present, as H145). --- Hypothesis H165 has been replaced by "true". (It is already present, as H146). --- Hypothesis H166 has been replaced by "true". (It is already present, as H147). --- Hypothesis H167 has been replaced by "true". (It is already present, as H148). --- Hypothesis H168 has been replaced by "true". (It is already present, as H149). --- Hypothesis H169 has been replaced by "true". (It is already present, as H150). %%% Simplified H172 on reading formula in, to give: %%% H172: true --- Hypothesis H176 has been replaced by "true". (It is already present, as H27). --- Hypothesis H177 has been replaced by "true". (It is already present, as H28). %%% Simplified H178 on reading formula in, to give: %%% H178: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= 0 and 0 <= spark__unsigned__byte__last) %%% Simplified H179 on reading formula in, to give: %%% H179: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H180 on reading formula in, to give: %%% H180: fld_byte_count(fld_h(local_ctx__2)) >= natural__first %%% Simplified H181 on reading formula in, to give: %%% H181: fld_byte_count(fld_h(local_ctx__2)) <= natural__last %%% Simplified H182 on reading formula in, to give: %%% H182: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first %%% Simplified H183 on reading formula in, to give: %%% H183: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last %%% Simplified H186 on reading formula in, to give: %%% H186: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first %%% Simplified H187 on reading formula in, to give: %%% H187: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last %%% Simplified H189 on reading formula in, to give: %%% H189: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first %%% Simplified H190 on reading formula in, to give: %%% H190: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last %%% Simplified H191 on reading formula in, to give: %%% H191: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first %%% Simplified H192 on reading formula in, to give: %%% H192: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last %%% Simplified H193 on reading formula in, to give: %%% H193: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first %%% Simplified H194 on reading formula in, to give: %%% H194: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last %%% Simplified H195 on reading formula in, to give: %%% H195: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first %%% Simplified H196 on reading formula in, to give: %%% H196: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last --- Hypothesis H197 has been replaced by "true". (It is already present, as H170). --- Hypothesis H198 has been replaced by "true". (It is already present, as H171). %%% Simplified H201 on reading formula in, to give: %%% H201: true -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New C1: true -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_18. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H48). %%% Simplified H50 on reading formula in, to give: %%% H50: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified H51 on reading formula in, to give: %%% H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(ctx)) %%% Simplified H55 on reading formula in, to give: %%% H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). --- Hypothesis H78 has been replaced by "true". (It is already present, as H59). --- Hypothesis H79 has been replaced by "true". (It is already present, as H60). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H87 has been replaced by "true". (It is already present, as H68). --- Hypothesis H88 has been replaced by "true". (It is already present, as H69). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H91 has been replaced by "true". (It is already present, as H72). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H55). --- Hypothesis H94 has been replaced by "true". (It is already present, as H56). --- Hypothesis H95 has been replaced by "true". (It is already present, as H57). --- Hypothesis H96 has been replaced by "true". (It is already present, as H58). --- Hypothesis H97 has been replaced by "true". (It is already present, as H59). --- Hypothesis H98 has been replaced by "true". (It is already present, as H60). --- Hypothesis H101 has been replaced by "true". (It is already present, as H63). --- Hypothesis H102 has been replaced by "true". (It is already present, as H64). --- Hypothesis H104 has been replaced by "true". (It is already present, as H66). --- Hypothesis H105 has been replaced by "true". (It is already present, as H67). --- Hypothesis H106 has been replaced by "true". (It is already present, as H68). --- Hypothesis H107 has been replaced by "true". (It is already present, as H69). --- Hypothesis H108 has been replaced by "true". (It is already present, as H70). --- Hypothesis H109 has been replaced by "true". (It is already present, as H71). --- Hypothesis H110 has been replaced by "true". (It is already present, as H72). --- Hypothesis H111 has been replaced by "true". (It is already present, as H73). --- Hypothesis H112 has been replaced by "true". (It is already present, as H57). --- Hypothesis H113 has been replaced by "true". (It is already present, as H58). --- Hypothesis H114 has been replaced by "true". (It is already present, as H55). --- Hypothesis H115 has been replaced by "true". (It is already present, as H56). --- Hypothesis H116 has been replaced by "true". (It is already present, as H57). --- Hypothesis H117 has been replaced by "true". (It is already present, as H58). --- Hypothesis H118 has been replaced by "true". (It is already present, as H59). --- Hypothesis H119 has been replaced by "true". (It is already present, as H60). --- Hypothesis H122 has been replaced by "true". (It is already present, as H63). --- Hypothesis H123 has been replaced by "true". (It is already present, as H64). --- Hypothesis H125 has been replaced by "true". (It is already present, as H66). --- Hypothesis H126 has been replaced by "true". (It is already present, as H67). --- Hypothesis H127 has been replaced by "true". (It is already present, as H68). --- Hypothesis H128 has been replaced by "true". (It is already present, as H69). --- Hypothesis H129 has been replaced by "true". (It is already present, as H70). --- Hypothesis H130 has been replaced by "true". (It is already present, as H71). --- Hypothesis H131 has been replaced by "true". (It is already present, as H72). --- Hypothesis H132 has been replaced by "true". (It is already present, as H73). %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H57). --- Hypothesis H135 has been replaced by "true". (It is already present, as H58). --- Hypothesis H136 has been replaced by "true". (It is already present, as H57). --- Hypothesis H137 has been replaced by "true". (It is already present, as H58). %%% Simplified H147 on reading formula in, to give: %%% H147: 63 <= skein_512_block_bytes_index__last %%% Simplified H148 on reading formula in, to give: %%% H148: 63 <= skein_512_block_bytes_index__last %%% Simplified H150 on reading formula in, to give: %%% H150: 63 <= natural__last %%% Simplified H157 on reading formula in, to give: %%% H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H158 on reading formula in, to give: %%% H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H176 has been replaced by "true". (It is already present, as H157). --- Hypothesis H177 has been replaced by "true". (It is already present, as H158). --- Hypothesis H178 has been replaced by "true". (It is already present, as H159). --- Hypothesis H179 has been replaced by "true". (It is already present, as H160). --- Hypothesis H180 has been replaced by "true". (It is already present, as H161). --- Hypothesis H181 has been replaced by "true". (It is already present, as H162). --- Hypothesis H184 has been replaced by "true". (It is already present, as H165). --- Hypothesis H185 has been replaced by "true". (It is already present, as H166). --- Hypothesis H187 has been replaced by "true". (It is already present, as H168). --- Hypothesis H188 has been replaced by "true". (It is already present, as H169). --- Hypothesis H189 has been replaced by "true". (It is already present, as H170). --- Hypothesis H190 has been replaced by "true". (It is already present, as H171). --- Hypothesis H191 has been replaced by "true". (It is already present, as H172). --- Hypothesis H192 has been replaced by "true". (It is already present, as H173). --- Hypothesis H193 has been replaced by "true". (It is already present, as H174). --- Hypothesis H194 has been replaced by "true". (It is already present, as H175). %%% Simplified H197 on reading formula in, to give: %%% H197: true --- Hypothesis H201 has been replaced by "true". (It is already present, as H27). --- Hypothesis H202 has been replaced by "true". (It is already present, as H28). %%% Simplified H203 on reading formula in, to give: %%% H203: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= 0 and 0 <= spark__unsigned__byte__last) %%% Simplified H204 on reading formula in, to give: %%% H204: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H205 on reading formula in, to give: %%% H205: fld_byte_count(fld_h(local_ctx__2)) >= natural__first %%% Simplified H206 on reading formula in, to give: %%% H206: fld_byte_count(fld_h(local_ctx__2)) <= natural__last %%% Simplified H207 on reading formula in, to give: %%% H207: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first %%% Simplified H208 on reading formula in, to give: %%% H208: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last %%% Simplified H211 on reading formula in, to give: %%% H211: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first %%% Simplified H212 on reading formula in, to give: %%% H212: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last %%% Simplified H214 on reading formula in, to give: %%% H214: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first %%% Simplified H215 on reading formula in, to give: %%% H215: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last %%% Simplified H216 on reading formula in, to give: %%% H216: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first %%% Simplified H217 on reading formula in, to give: %%% H217: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last %%% Simplified H218 on reading formula in, to give: %%% H218: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first %%% Simplified H219 on reading formula in, to give: %%% H219: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last %%% Simplified H220 on reading formula in, to give: %%% H220: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first %%% Simplified H221 on reading formula in, to give: %%% H221: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last --- Hypothesis H222 has been replaced by "true". (It is already present, as H195). --- Hypothesis H223 has been replaced by "true". (It is already present, as H196). %%% Simplified H226 on reading formula in, to give: %%% H226: true %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(local_ctx__2)) > 0 %%% Simplified C3 on reading formula in, to give: %%% C3: 0 < (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 %%% Simplified C4 on reading formula in, to give: %%% C4: result__index__subtype__1__last > - 1 %%% Simplified C6 on reading formula in, to give: %%% C6: true %%% Simplified C7 on reading formula in, to give: %%% C7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C8 on reading formula in, to give: %%% C8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(local_ctx__2)) > 0 using hypotheses H52 & H153. *** Proved C2: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 using hypothesis H200. *** Proved C6: true *** Proved C7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) using hypothesis H7. *** Proved C8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H8. *** Proved C9: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H9. *** Proved C10: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H10. *** Proved C11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H11. *** Proved C12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H12. *** Proved C13: true *** Proved C14: true *** Proved C15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H15. *** Proved C16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H16. *** Proved C17: true *** Proved C18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H18. *** Proved C19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H19. *** Proved C20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H20. *** Proved C21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H21. *** Proved C22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H22. *** Proved C23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H23. *** Proved C24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H24. *** Proved C25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H25. *** Proved C26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first using hypothesis H1. *** Proved C27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last using hypothesis H2. *** Proved C28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first using hypothesis H3. *** Proved C29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last using hypothesis H4. *** Proved C30: result__index__subtype__1__first = 0 using hypothesis H5. *** Proved C31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 using hypothesis H6. -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: fld_byte_count(fld_h(ctx)) < 64 New H53: fld_byte_count(fld_h(local_ctx__1)) < 64 -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H198: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 New H227: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= - 2147483711 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H199: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 New H228: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 2147483584 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H57: fld_byte_count(fld_h(local_ctx__1)) >= 0 New H140: true New H159: fld_byte_count(fld_h(local_ctx__2)) >= 0 New H205: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H58: fld_byte_count(fld_h(local_ctx__1)) <= 2147483647 New H141: true New H149: skein_512_block_bytes_index__last <= 2147483647 New H150: true New H160: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 New H206: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H211: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= 63 New H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 New H212: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H214: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= 127 New H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 New H215: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= spark__unsigned__byte__last) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= spark__unsigned__byte__last) New H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= spark__unsigned__byte__last) New H203: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1), [i___2] ) <= 255) New H133: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1), [i___1] ) <= 255) New H157: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= 255) New H203: true -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H216: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= 65535 New H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 New H217: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H218: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= 4294967295 New H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 New H219: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= spark__unsigned__u64__last) New H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= 0 New H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H204: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H220: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= 18446744073709551615 New H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H221: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1), [i___1] ) <= 18446744073709551615) New H158: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) New H204: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= 0 New H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 New H207: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 New H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 New H208: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= 1 New H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= 2147483640 New H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__1) , [i___1]) and element(fld_x(local_ctx__1), [i___1]) <= 18446744073709551615) New H158: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) New H204: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__1), [i___1]) and element(fld_x(local_ctx__1) , [i___1]) <= 18446744073709551615) New H158: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) New H204: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H144: fld_byte_count(fld_h(local_ctx__1)) >= 0 New H154: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H145: fld_byte_count(fld_h(local_ctx__1)) <= 64 New H155: fld_byte_count(fld_h(local_ctx__2)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H146: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__1) , [i___2]) and element(fld_b(local_ctx__1), [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__1) , [i___1]) and element(fld_b(local_ctx__1), [i___1]) <= 255) New H157: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__2) , [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H147: true New H148: true New H149: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H55: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___2]) and element(fld_b(local_ctx__1) , [i___2]) <= 255) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__1), [i___1]) and element(fld_b(local_ctx__1) , [i___1]) <= 255) New H157: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H138: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H139: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H195: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H196: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H229: true -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H230: true -S- Applied substitution rule skein_512_fi_rules(135). This was achieved by replacing all occurrences of positive_output_block_count_t__first by: 1. New H224: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= 1 -S- Applied substitution rule skein_512_fi_rules(136). This was achieved by replacing all occurrences of positive_output_block_count_t__last by: 4194304. New H225: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= 4194304 *** Proved C3: 0 < (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 using hypothesis H195. *** Proved C5: 0 < ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 using hypothesis H224. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H101 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H163 (true-hypothesis). --- Eliminated hypothesis H164 (true-hypothesis). --- Eliminated hypothesis H167 (true-hypothesis). --- Eliminated hypothesis H176 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H179 (true-hypothesis). --- Eliminated hypothesis H180 (true-hypothesis). --- Eliminated hypothesis H181 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H184 (true-hypothesis). --- Eliminated hypothesis H185 (true-hypothesis). --- Eliminated hypothesis H186 (true-hypothesis). --- Eliminated hypothesis H187 (true-hypothesis). --- Eliminated hypothesis H188 (true-hypothesis). --- Eliminated hypothesis H189 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H192 (true-hypothesis). --- Eliminated hypothesis H193 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H209 (true-hypothesis). --- Eliminated hypothesis H210 (true-hypothesis). --- Eliminated hypothesis H213 (true-hypothesis). --- Eliminated hypothesis H222 (true-hypothesis). --- Eliminated hypothesis H223 (true-hypothesis). --- Eliminated hypothesis H226 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H141 (true-hypothesis). --- Eliminated hypothesis H150 (true-hypothesis). --- Eliminated hypothesis H27 (true-hypothesis). --- Eliminated hypothesis H28 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H148 (true-hypothesis). --- Eliminated hypothesis H149 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H229 (true-hypothesis). --- Eliminated hypothesis H230 (true-hypothesis). --- Eliminated hypothesis H50 (duplicate of H26). --- Eliminated hypothesis H142 (duplicate of H52). --- Eliminated hypothesis H208 (duplicate of H199). --- Eliminated hypothesis H31 (duplicate of H9). --- Eliminated hypothesis H144 (duplicate of H57). --- Eliminated hypothesis H205 (duplicate of H159). --- Eliminated hypothesis H32 (duplicate of H10). --- Eliminated hypothesis H206 (duplicate of H160). --- Eliminated hypothesis H37 (duplicate of H15). --- Eliminated hypothesis H211 (duplicate of H165). --- Eliminated hypothesis H38 (duplicate of H16). --- Eliminated hypothesis H212 (duplicate of H166). --- Eliminated hypothesis H40 (duplicate of H18). --- Eliminated hypothesis H214 (duplicate of H168). --- Eliminated hypothesis H41 (duplicate of H19). --- Eliminated hypothesis H215 (duplicate of H169). --- Eliminated hypothesis H42 (duplicate of H20). --- Eliminated hypothesis H216 (duplicate of H170). --- Eliminated hypothesis H43 (duplicate of H21). --- Eliminated hypothesis H217 (duplicate of H171). --- Eliminated hypothesis H44 (duplicate of H22). --- Eliminated hypothesis H218 (duplicate of H172). --- Eliminated hypothesis H45 (duplicate of H23). --- Eliminated hypothesis H219 (duplicate of H173). --- Eliminated hypothesis H46 (duplicate of H24). --- Eliminated hypothesis H220 (duplicate of H174). --- Eliminated hypothesis H47 (duplicate of H25). --- Eliminated hypothesis H221 (duplicate of H175). --- Eliminated hypothesis H33 (duplicate of H11). --- Eliminated hypothesis H207 (duplicate of H161). --- Eliminated hypothesis H34 (duplicate of H12). --- Eliminated hypothesis H143 (duplicate of H60). --- Eliminated hypothesis H199 (duplicate of H162). --- Eliminated hypothesis H12 (duplicate of H2). --- Eliminated hypothesis H162 (duplicate of H152). --- Eliminated hypothesis H30 (duplicate of H8). --- Eliminated hypothesis H204 (duplicate of H158). --- Eliminated hypothesis H9 (duplicate of H3). --- Eliminated hypothesis H159 (duplicate of H154). --- Eliminated hypothesis H29 (duplicate of H7). --- Eliminated hypothesis H26 (duplicate of H1). --- Eliminated hypothesis H4 (redundant, given H48). --- Eliminated hypothesis H10 (redundant, given H48). --- Eliminated hypothesis H11 (redundant, given H1). --- Eliminated hypothesis H58 (redundant, given H53). --- Eliminated hypothesis H59 (redundant, given H52). --- Eliminated hypothesis H145 (redundant, given H53). --- Eliminated hypothesis H160 (redundant, given H155). --- Eliminated hypothesis H161 (redundant, given H151). --- Eliminated hypothesis H198 (redundant, given H151). --- Eliminated hypothesis H227 (redundant, given H195). --- Eliminated hypothesis H228 (redundant, given H196). -S- Substituted hypothesis H51. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H54. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__1)) by: fld_byte_count(fld_h(ctx)). -S- Substituted hypothesis H153. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H156. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__2)) by: fld_byte_count(fld_h(ctx)). +++ New H231: integer__size >= 0 +++ New H232: natural__size >= 0 +++ New H233: spark__unsigned__u6__size >= 0 +++ New H234: spark__unsigned__u7__size >= 0 +++ New H235: spark__unsigned__byte__size >= 0 +++ New H236: spark__unsigned__u16__size >= 0 +++ New H237: spark__unsigned__u32__size >= 0 +++ New H238: spark__unsigned__u64__size >= 0 +++ New H239: spark__crypto__word_count_t__size >= 0 +++ New H240: hash_bit_length__size >= 0 +++ New H241: initialized_hash_bit_length__size >= 0 +++ New H242: skein_512_state_words_index__size >= 0 +++ New H243: skein_512_block_bytes_count__size >= 0 +++ New H244: skein_512_block_bytes_index__size >= 0 +++ New H245: positive_block_512_count_t__size >= 0 +++ New H246: skein_512_context__size >= 0 +++ New H247: result__index__subtype__1__first <= result__index__subtype__1__last +++ New H248: context_header__size >= 0 +++ New H249: output_byte_count_t__size >= 0 +++ New H250: output_block_count_t__size >= 0 +++ New H251: positive_output_block_count_t__size >= 0 +++ New H252: result__index__subtype__1__first >= 0 +++ New H253: result__index__subtype__1__last >= 0 +++ New H254: result__index__subtype__1__last <= 2147483647 +++ New H255: result__index__subtype__1__first <= 2147483647 *** Proved C4: result__index__subtype__1__last > - 1 using hypothesis H253. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_19. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H33 on reading formula in, to give: %%% H33: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified H34 on reading formula in, to give: %%% H34: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified H37 on reading formula in, to give: %%% H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified H40 on reading formula in, to give: %%% H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified H41 on reading formula in, to give: %%% H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified H42 on reading formula in, to give: %%% H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified H43 on reading formula in, to give: %%% H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified H44 on reading formula in, to give: %%% H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified H45 on reading formula in, to give: %%% H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified H46 on reading formula in, to give: %%% H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified H47 on reading formula in, to give: %%% H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified H48 on reading formula in, to give: %%% H48: not fld_byte_count(fld_h(ctx)) < skein_512_block_bytes_c --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). --- Hypothesis H51 has been replaced by "true". (It is already present, as H31). --- Hypothesis H52 has been replaced by "true". (It is already present, as H32). --- Hypothesis H53 has been replaced by "true". (It is already present, as H33). --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H57 has been replaced by "true". (It is already present, as H37). --- Hypothesis H58 has been replaced by "true". (It is already present, as H38). --- Hypothesis H60 has been replaced by "true". (It is already present, as H40). --- Hypothesis H61 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H42). --- Hypothesis H63 has been replaced by "true". (It is already present, as H43). --- Hypothesis H64 has been replaced by "true". (It is already present, as H44). --- Hypothesis H65 has been replaced by "true". (It is already present, as H45). --- Hypothesis H66 has been replaced by "true". (It is already present, as H46). --- Hypothesis H67 has been replaced by "true". (It is already present, as H47). --- Hypothesis H68 has been replaced by "true". (It is already present, as H29). --- Hypothesis H69 has been replaced by "true". (It is already present, as H30). --- Hypothesis H70 has been replaced by "true". (It is already present, as H31). --- Hypothesis H71 has been replaced by "true". (It is already present, as H32). --- Hypothesis H72 has been replaced by "true". (It is already present, as H33). --- Hypothesis H73 has been replaced by "true". (It is already present, as H34). --- Hypothesis H76 has been replaced by "true". (It is already present, as H37). --- Hypothesis H77 has been replaced by "true". (It is already present, as H38). --- Hypothesis H79 has been replaced by "true". (It is already present, as H40). --- Hypothesis H80 has been replaced by "true". (It is already present, as H41). --- Hypothesis H81 has been replaced by "true". (It is already present, as H42). --- Hypothesis H82 has been replaced by "true". (It is already present, as H43). --- Hypothesis H83 has been replaced by "true". (It is already present, as H44). --- Hypothesis H84 has been replaced by "true". (It is already present, as H45). --- Hypothesis H85 has been replaced by "true". (It is already present, as H46). --- Hypothesis H86 has been replaced by "true". (It is already present, as H47). --- Hypothesis H87 has been replaced by "true". (It is already present, as H31). --- Hypothesis H88 has been replaced by "true". (It is already present, as H32). --- Hypothesis H89 has been replaced by "true". (It is already present, as H29). --- Hypothesis H90 has been replaced by "true". (It is already present, as H30). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H93 has been replaced by "true". (It is already present, as H33). --- Hypothesis H94 has been replaced by "true". (It is already present, as H34). --- Hypothesis H97 has been replaced by "true". (It is already present, as H37). --- Hypothesis H98 has been replaced by "true". (It is already present, as H38). --- Hypothesis H100 has been replaced by "true". (It is already present, as H40). --- Hypothesis H101 has been replaced by "true". (It is already present, as H41). --- Hypothesis H102 has been replaced by "true". (It is already present, as H42). --- Hypothesis H103 has been replaced by "true". (It is already present, as H43). --- Hypothesis H104 has been replaced by "true". (It is already present, as H44). --- Hypothesis H105 has been replaced by "true". (It is already present, as H45). --- Hypothesis H106 has been replaced by "true". (It is already present, as H46). --- Hypothesis H107 has been replaced by "true". (It is already present, as H47). %%% Simplified H108 on reading formula in, to give: %%% H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H109 has been replaced by "true". (It is already present, as H31). --- Hypothesis H110 has been replaced by "true". (It is already present, as H32). --- Hypothesis H111 has been replaced by "true". (It is already present, as H31). --- Hypothesis H112 has been replaced by "true". (It is already present, as H32). %%% Simplified H117 on reading formula in, to give: %%% H117: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first %%% Simplified H118 on reading formula in, to give: %%% H118: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last %%% Simplified H119 on reading formula in, to give: %%% H119: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified H120 on reading formula in, to give: %%% H120: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified H122 on reading formula in, to give: %%% H122: 63 <= skein_512_block_bytes_index__last %%% Simplified H123 on reading formula in, to give: %%% H123: 63 <= skein_512_block_bytes_index__last %%% Simplified H125 on reading formula in, to give: %%% H125: 63 <= natural__last %%% Simplified H128 on reading formula in, to give: %%% H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( ctx)) %%% Simplified H131 on reading formula in, to give: %%% H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(ctx) ) %%% Simplified H132 on reading formula in, to give: %%% H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H133 on reading formula in, to give: %%% H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H151 has been replaced by "true". (It is already present, as H132). --- Hypothesis H152 has been replaced by "true". (It is already present, as H133). --- Hypothesis H153 has been replaced by "true". (It is already present, as H134). --- Hypothesis H154 has been replaced by "true". (It is already present, as H135). --- Hypothesis H155 has been replaced by "true". (It is already present, as H136). --- Hypothesis H156 has been replaced by "true". (It is already present, as H137). --- Hypothesis H159 has been replaced by "true". (It is already present, as H140). --- Hypothesis H160 has been replaced by "true". (It is already present, as H141). --- Hypothesis H162 has been replaced by "true". (It is already present, as H143). --- Hypothesis H163 has been replaced by "true". (It is already present, as H144). --- Hypothesis H164 has been replaced by "true". (It is already present, as H145). --- Hypothesis H165 has been replaced by "true". (It is already present, as H146). --- Hypothesis H166 has been replaced by "true". (It is already present, as H147). --- Hypothesis H167 has been replaced by "true". (It is already present, as H148). --- Hypothesis H168 has been replaced by "true". (It is already present, as H149). --- Hypothesis H169 has been replaced by "true". (It is already present, as H150). %%% Simplified H172 on reading formula in, to give: %%% H172: true --- Hypothesis H176 has been replaced by "true". (It is already present, as H27). --- Hypothesis H177 has been replaced by "true". (It is already present, as H28). %%% Simplified H178 on reading formula in, to give: %%% H178: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= 0 and 0 <= spark__unsigned__byte__last) %%% Simplified H179 on reading formula in, to give: %%% H179: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H180 on reading formula in, to give: %%% H180: fld_byte_count(fld_h(local_ctx__2)) >= natural__first %%% Simplified H181 on reading formula in, to give: %%% H181: fld_byte_count(fld_h(local_ctx__2)) <= natural__last %%% Simplified H182 on reading formula in, to give: %%% H182: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first %%% Simplified H183 on reading formula in, to give: %%% H183: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last %%% Simplified H186 on reading formula in, to give: %%% H186: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first %%% Simplified H187 on reading formula in, to give: %%% H187: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last %%% Simplified H189 on reading formula in, to give: %%% H189: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first %%% Simplified H190 on reading formula in, to give: %%% H190: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last %%% Simplified H191 on reading formula in, to give: %%% H191: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first %%% Simplified H192 on reading formula in, to give: %%% H192: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last %%% Simplified H193 on reading formula in, to give: %%% H193: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first %%% Simplified H194 on reading formula in, to give: %%% H194: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last %%% Simplified H195 on reading formula in, to give: %%% H195: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first %%% Simplified H196 on reading formula in, to give: %%% H196: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last --- Hypothesis H197 has been replaced by "true". (It is already present, as H170). --- Hypothesis H198 has been replaced by "true". (It is already present, as H171). %%% Simplified H201 on reading formula in, to give: %%% H201: true %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(local_ctx__2)) > 0 %%% Simplified C3 on reading formula in, to give: %%% C3: 0 < (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 %%% Simplified C4 on reading formula in, to give: %%% C4: result__index__subtype__1__last > - 1 %%% Simplified C6 on reading formula in, to give: %%% C6: true %%% Simplified C7 on reading formula in, to give: %%% C7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C8 on reading formula in, to give: %%% C8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(local_ctx__2)) > 0 using hypotheses H26 & H128. *** Proved C2: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 using hypothesis H175. *** Proved C6: true *** Proved C7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) using hypothesis H7. *** Proved C8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H8. *** Proved C9: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H9. *** Proved C10: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H10. *** Proved C11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H11. *** Proved C12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H12. *** Proved C13: true *** Proved C14: true *** Proved C15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H15. *** Proved C16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H16. *** Proved C17: true *** Proved C18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H18. *** Proved C19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H19. *** Proved C20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H20. *** Proved C21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H21. *** Proved C22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H22. *** Proved C23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H23. *** Proved C24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H24. *** Proved C25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H25. *** Proved C26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first using hypothesis H1. *** Proved C27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last using hypothesis H2. *** Proved C28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first using hypothesis H3. *** Proved C29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last using hypothesis H4. *** Proved C30: result__index__subtype__1__first = 0 using hypothesis H5. *** Proved C31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 using hypothesis H6. >>> Restructured hypothesis H48 into: >>> H48: skein_512_block_bytes_c <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H48: 64 <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H173: fld_hash_bit_len(fld_h(local_ctx__2)) >= - 2147483655 New H202: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= - 2147483711 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H174: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 New H203: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 2147483584 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H31: fld_byte_count(fld_h(ctx)) >= 0 New H115: true New H134: fld_byte_count(fld_h(local_ctx__2)) >= 0 New H180: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: fld_byte_count(fld_h(ctx)) <= 2147483647 New H116: true New H124: skein_512_block_bytes_index__last <= 2147483647 New H125: true New H135: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 New H181: fld_byte_count(fld_h(local_ctx__2)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H37: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H186: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H38: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 New H187: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H40: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H189: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H41: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 New H190: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: true New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= spark__unsigned__byte__last) New H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= spark__unsigned__byte__last) New H178: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H28: true New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2), [i___2] ) <= 255) New H178: true -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H42: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H191: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H43: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 New H192: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H44: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H193: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H45: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 New H194: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H46: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 New H179: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= spark__unsigned__u64__last) New H195: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H47: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H196: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) New H179: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H33: fld_hash_bit_len(fld_h(ctx)) >= 0 New H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 New H182: fld_hash_bit_len(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H34: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 New H183: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H117: fld_hash_bit_len(fld_h(ctx)) >= 1 New H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H118: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) New H179: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__2) , [i___1]) and element(fld_x(local_ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H133: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) New H179: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__2), [i___1]) and element(fld_x(local_ctx__2) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H119: fld_byte_count(fld_h(ctx)) >= 0 New H129: fld_byte_count(fld_h(local_ctx__2)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H120: fld_byte_count(fld_h(ctx)) <= 64 New H130: fld_byte_count(fld_h(local_ctx__2)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H121: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__2) , [i___2]) and element(fld_b(local_ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H122: true New H123: true New H124: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H108: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx), [i___1]) and element(fld_b(ctx), [i___1]) <= 255) New H132: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__2), [i___2]) and element(fld_b(local_ctx__2) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H113: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H114: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H170: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H171: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H204: true -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H205: true -S- Applied substitution rule skein_512_fi_rules(135). This was achieved by replacing all occurrences of positive_output_block_count_t__first by: 1. New H199: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= 1 -S- Applied substitution rule skein_512_fi_rules(136). This was achieved by replacing all occurrences of positive_output_block_count_t__last by: 4194304. New H200: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= 4194304 %%% Hypotheses H48 & H4 together imply that 64 = fld_byte_count(fld_h(ctx)). H48 & H4 have therefore been deleted and a new H206 added to this effect. *** Proved C3: 0 < (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 using hypothesis H170. *** Proved C5: 0 < ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 using hypothesis H199. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H52 (true-hypothesis). --- Eliminated hypothesis H53 (true-hypothesis). --- Eliminated hypothesis H54 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H57 (true-hypothesis). --- Eliminated hypothesis H58 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H101 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H151 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H157 (true-hypothesis). --- Eliminated hypothesis H158 (true-hypothesis). --- Eliminated hypothesis H159 (true-hypothesis). --- Eliminated hypothesis H160 (true-hypothesis). --- Eliminated hypothesis H161 (true-hypothesis). --- Eliminated hypothesis H162 (true-hypothesis). --- Eliminated hypothesis H163 (true-hypothesis). --- Eliminated hypothesis H164 (true-hypothesis). --- Eliminated hypothesis H165 (true-hypothesis). --- Eliminated hypothesis H166 (true-hypothesis). --- Eliminated hypothesis H167 (true-hypothesis). --- Eliminated hypothesis H168 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H172 (true-hypothesis). --- Eliminated hypothesis H176 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H184 (true-hypothesis). --- Eliminated hypothesis H185 (true-hypothesis). --- Eliminated hypothesis H188 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H27 (true-hypothesis). --- Eliminated hypothesis H28 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H204 (true-hypothesis). --- Eliminated hypothesis H205 (true-hypothesis). --- Eliminated hypothesis H117 (duplicate of H26). --- Eliminated hypothesis H183 (duplicate of H174). --- Eliminated hypothesis H31 (duplicate of H9). --- Eliminated hypothesis H119 (duplicate of H9). --- Eliminated hypothesis H180 (duplicate of H134). --- Eliminated hypothesis H32 (duplicate of H10). --- Eliminated hypothesis H181 (duplicate of H135). --- Eliminated hypothesis H37 (duplicate of H15). --- Eliminated hypothesis H186 (duplicate of H140). --- Eliminated hypothesis H38 (duplicate of H16). --- Eliminated hypothesis H187 (duplicate of H141). --- Eliminated hypothesis H40 (duplicate of H18). --- Eliminated hypothesis H189 (duplicate of H143). --- Eliminated hypothesis H41 (duplicate of H19). --- Eliminated hypothesis H190 (duplicate of H144). --- Eliminated hypothesis H42 (duplicate of H20). --- Eliminated hypothesis H191 (duplicate of H145). --- Eliminated hypothesis H43 (duplicate of H21). --- Eliminated hypothesis H192 (duplicate of H146). --- Eliminated hypothesis H44 (duplicate of H22). --- Eliminated hypothesis H193 (duplicate of H147). --- Eliminated hypothesis H45 (duplicate of H23). --- Eliminated hypothesis H194 (duplicate of H148). --- Eliminated hypothesis H46 (duplicate of H24). --- Eliminated hypothesis H195 (duplicate of H149). --- Eliminated hypothesis H47 (duplicate of H25). --- Eliminated hypothesis H196 (duplicate of H150). --- Eliminated hypothesis H33 (duplicate of H11). --- Eliminated hypothesis H182 (duplicate of H136). --- Eliminated hypothesis H34 (duplicate of H12). --- Eliminated hypothesis H118 (duplicate of H12). --- Eliminated hypothesis H174 (duplicate of H137). --- Eliminated hypothesis H12 (duplicate of H2). --- Eliminated hypothesis H137 (duplicate of H127). --- Eliminated hypothesis H30 (duplicate of H8). --- Eliminated hypothesis H179 (duplicate of H133). --- Eliminated hypothesis H9 (duplicate of H3). --- Eliminated hypothesis H134 (duplicate of H129). --- Eliminated hypothesis H29 (duplicate of H7). --- Eliminated hypothesis H26 (duplicate of H1). --- Eliminated hypothesis H3 (redundant, given H206). --- Eliminated hypothesis H10 (redundant, given H120). --- Eliminated hypothesis H11 (redundant, given H1). --- Eliminated hypothesis H120 (redundant, given H206). --- Eliminated hypothesis H135 (redundant, given H130). --- Eliminated hypothesis H136 (redundant, given H126). --- Eliminated hypothesis H173 (redundant, given H126). --- Eliminated hypothesis H202 (redundant, given H170). --- Eliminated hypothesis H203 (redundant, given H171). -S- Substituted hypothesis H128. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H131. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__2)) by: fld_byte_count(fld_h(ctx)). +++ New H207: integer__size >= 0 +++ New H208: natural__size >= 0 +++ New H209: spark__unsigned__u6__size >= 0 +++ New H210: spark__unsigned__u7__size >= 0 +++ New H211: spark__unsigned__byte__size >= 0 +++ New H212: spark__unsigned__u16__size >= 0 +++ New H213: spark__unsigned__u32__size >= 0 +++ New H214: spark__unsigned__u64__size >= 0 +++ New H215: spark__crypto__word_count_t__size >= 0 +++ New H216: hash_bit_length__size >= 0 +++ New H217: initialized_hash_bit_length__size >= 0 +++ New H218: skein_512_state_words_index__size >= 0 +++ New H219: skein_512_block_bytes_count__size >= 0 +++ New H220: skein_512_block_bytes_index__size >= 0 +++ New H221: positive_block_512_count_t__size >= 0 +++ New H222: skein_512_context__size >= 0 +++ New H223: result__index__subtype__1__first <= result__index__subtype__1__last +++ New H224: context_header__size >= 0 +++ New H225: output_byte_count_t__size >= 0 +++ New H226: output_block_count_t__size >= 0 +++ New H227: positive_output_block_count_t__size >= 0 +++ New H228: result__index__subtype__1__first >= 0 +++ New H229: result__index__subtype__1__last >= 0 +++ New H230: result__index__subtype__1__last <= 2147483647 +++ New H231: result__index__subtype__1__first <= 2147483647 *** Proved C4: result__index__subtype__1__last > - 1 using hypothesis H229. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_20. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified H153 on reading formula in, to give: %%% H153: 63 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: 63 <= skein_512_block_bytes_index__last %%% Simplified H156 on reading formula in, to give: %%% H156: 63 <= natural__last %%% Simplified H163 on reading formula in, to give: %%% H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H164 on reading formula in, to give: %%% H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H182 has been replaced by "true". (It is already present, as H32). --- Hypothesis H183 has been replaced by "true". (It is already present, as H33). --- Hypothesis H190 has been replaced by "true". (It is already present, as H186). --- Hypothesis H191 has been replaced by "true". (It is already present, as H187). %%% Simplified H195 on reading formula in, to give: %%% H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result, [i___1]) and element( result, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H196 has been replaced by "true". (It is already present, as H32). --- Hypothesis H197 has been replaced by "true". (It is already present, as H33). --- Hypothesis H198 has been replaced by "true". (It is already present, as H163). --- Hypothesis H199 has been replaced by "true". (It is already present, as H164). --- Hypothesis H200 has been replaced by "true". (It is already present, as H165). --- Hypothesis H201 has been replaced by "true". (It is already present, as H166). --- Hypothesis H202 has been replaced by "true". (It is already present, as H167). --- Hypothesis H203 has been replaced by "true". (It is already present, as H168). --- Hypothesis H206 has been replaced by "true". (It is already present, as H171). --- Hypothesis H207 has been replaced by "true". (It is already present, as H172). --- Hypothesis H209 has been replaced by "true". (It is already present, as H174). --- Hypothesis H210 has been replaced by "true". (It is already present, as H175). --- Hypothesis H211 has been replaced by "true". (It is already present, as H176). --- Hypothesis H212 has been replaced by "true". (It is already present, as H177). --- Hypothesis H213 has been replaced by "true". (It is already present, as H178). --- Hypothesis H214 has been replaced by "true". (It is already present, as H179). --- Hypothesis H215 has been replaced by "true". (It is already present, as H180). --- Hypothesis H216 has been replaced by "true". (It is already present, as H181). --- Hypothesis H217 has been replaced by "true". (It is already present, as H193). --- Hypothesis H218 has been replaced by "true". (It is already present, as H194). --- Hypothesis H219 has been replaced by "true". (It is already present, as H193). --- Hypothesis H220 has been replaced by "true". (It is already present, as H194). --- Hypothesis H223 has been replaced by "true". (It is already present, as H30). --- Hypothesis H227 has been replaced by "true". (It is already present, as H188). --- Hypothesis H228 has been replaced by "true". (It is already present, as H189). %%% Simplified H229 on reading formula in, to give: %%% H229: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result__6, [i___1]) and element(result__6, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H230 on reading formula in, to give: %%% H230: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [ i___1]) <= spark__unsigned__u64__last) --- Hypothesis H231 has been replaced by "true". (It is already present, as H32). --- Hypothesis H232 has been replaced by "true". (It is already present, as H33). --- Hypothesis H235 has been replaced by "true". (It is already present, as H233). --- Hypothesis H236 has been replaced by "true". (It is already present, as H234). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(local_ctx__5)) > 0 %%% Simplified C7 on reading formula in, to give: %%% C7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C8 on reading formula in, to give: %%% C8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(local_ctx__5)) > 0 using hypotheses H39, H81 & H159. *** Proved C2: byte_count <= result__index__subtype__1__last + 1 using hypothesis H2. *** Proved C5: blocks_done + 1 < blocks_required using hypothesis H239. *** Proved C6: blocks_required = (byte_count + 63) div 64 using hypothesis H6. *** Proved C7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) using hypothesis H7. *** Proved C8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H8. *** Proved C9: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H9. *** Proved C10: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H10. *** Proved C11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H11. *** Proved C12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H12. *** Proved C13: true *** Proved C14: true *** Proved C15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H15. *** Proved C16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H16. *** Proved C17: true *** Proved C18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H18. *** Proved C19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H19. *** Proved C20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H20. *** Proved C21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H21. *** Proved C22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H22. *** Proved C23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H23. *** Proved C24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H24. *** Proved C25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H25. *** Proved C26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first using hypothesis H26. *** Proved C27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last using hypothesis H27. *** Proved C28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first using hypothesis H28. *** Proved C29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last using hypothesis H29. *** Proved C30: result__index__subtype__1__first = 0 using hypothesis H30. *** Proved C31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 using hypothesis H31. >>> Restructured hypothesis H239 into: >>> H239: blocks_done + 1 < blocks_required -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 New H186: byte_count - blocks_done * 64 >= natural__first New H187: byte_count - blocks_done * 64 <= natural__last New H188: blocks_done * 64 >= integer__base__first New H189: blocks_done * 64 <= integer__base__last New H192: byte_count - blocks_done * 64 >= 64 New H193: 64 >= natural__first New H194: 64 <= natural__last New H221: blocks_done * 64 >= natural__first New H222: blocks_done * 64 <= natural__last New H225: result__index__subtype__1__last >= blocks_done * 64 + 63 New H226: 64 <= (skein_512_state_words_index__last + 1) * 8 New C3: (blocks_done + 1) * 64 < byte_count New C4: (blocks_done + 1) * 64 < result__index__subtype__1__last + 1 -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New H78: 63 >= spark__unsigned__u6__first New H79: 63 <= spark__unsigned__u6__last New H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H188: blocks_done * 64 >= - 2147483648 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H189: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H142: true New H146: true New H165: fld_byte_count(fld_h(local_ctx__5)) >= 0 New H186: byte_count - blocks_done * 64 >= 0 New H193: true New H221: blocks_done * 64 >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H43: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 New H84: fld_byte_count(fld_h(local_ctx__4)) <= 2147483647 New H143: true New H147: true New H155: skein_512_block_bytes_index__last <= 2147483647 New H156: true New H166: fld_byte_count(fld_h(local_ctx__5)) <= 2147483647 New H187: byte_count - blocks_done * 64 <= 2147483647 New H194: true New H222: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H78: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 New H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 New H79: true -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 New H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 New H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= spark__unsigned__byte__last) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= spark__unsigned__byte__last) New H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= spark__unsigned__byte__last) New H229: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result__6, [i___1]) and element(result__6, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= 255) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= 255) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) New H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= 255) New H229: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result__6, [i___1]) and element(result__6, [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 New H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 New H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 New H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 New H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H34: blocks_done >= 0 New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= spark__unsigned__u64__last) New H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= spark__unsigned__u64__last) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= spark__unsigned__u64__last) New H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H230: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H35: blocks_done <= 18446744073709551615 New H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 New H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 New H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= 18446744073709551615) New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= 18446744073709551615) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= 18446744073709551615) New H230: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= 0 New H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= 0 New H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 New H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 New H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 New H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H224: true New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__3) , [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__4) , [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__5) , [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) New H230: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H226: true New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3) , [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4) , [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5) , [i___1]) <= 18446744073709551615) New H230: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New H150: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H160: fld_byte_count(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New H151: fld_byte_count(fld_h(local_ctx__4)) <= 64 New H161: fld_byte_count(fld_h(local_ctx__5)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H152: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__3) , [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__5) , [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H153: true New H154: true New H155: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3) , [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4) , [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4) , [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H144: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H145: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H184: byte_count >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H185: byte_count <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 New H233: blocks_done >= - 1 -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 New H234: blocks_done <= 4194303 -S- Applied substitution rule skein_512_fi_rules(135). This was achieved by replacing all occurrences of positive_output_block_count_t__first by: 1. New H237: blocks_required >= 1 -S- Applied substitution rule skein_512_fi_rules(136). This was achieved by replacing all occurrences of positive_output_block_count_t__last by: 4194304. New H238: blocks_required <= 4194304 --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H46 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H133 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H170 (true-hypothesis). --- Eliminated hypothesis H173 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H196 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H199 (true-hypothesis). --- Eliminated hypothesis H200 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H204 (true-hypothesis). --- Eliminated hypothesis H205 (true-hypothesis). --- Eliminated hypothesis H206 (true-hypothesis). --- Eliminated hypothesis H207 (true-hypothesis). --- Eliminated hypothesis H208 (true-hypothesis). --- Eliminated hypothesis H209 (true-hypothesis). --- Eliminated hypothesis H210 (true-hypothesis). --- Eliminated hypothesis H211 (true-hypothesis). --- Eliminated hypothesis H212 (true-hypothesis). --- Eliminated hypothesis H213 (true-hypothesis). --- Eliminated hypothesis H214 (true-hypothesis). --- Eliminated hypothesis H215 (true-hypothesis). --- Eliminated hypothesis H216 (true-hypothesis). --- Eliminated hypothesis H217 (true-hypothesis). --- Eliminated hypothesis H218 (true-hypothesis). --- Eliminated hypothesis H219 (true-hypothesis). --- Eliminated hypothesis H220 (true-hypothesis). --- Eliminated hypothesis H223 (true-hypothesis). --- Eliminated hypothesis H227 (true-hypothesis). --- Eliminated hypothesis H228 (true-hypothesis). --- Eliminated hypothesis H231 (true-hypothesis). --- Eliminated hypothesis H232 (true-hypothesis). --- Eliminated hypothesis H235 (true-hypothesis). --- Eliminated hypothesis H236 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H193 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H224 (true-hypothesis). --- Eliminated hypothesis H226 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H144 (true-hypothesis). --- Eliminated hypothesis H145 (true-hypothesis). --- Eliminated hypothesis H222 (duplicate of H189). --- Eliminated hypothesis H28 (duplicate of H9). --- Eliminated hypothesis H150 (duplicate of H83). --- Eliminated hypothesis H27 (duplicate of H12). --- Eliminated hypothesis H149 (duplicate of H86). --- Eliminated hypothesis H168 (duplicate of H158). --- Eliminated hypothesis H165 (duplicate of H160). --- Eliminated hypothesis H34 (duplicate of H32). --- Eliminated hypothesis H3 (redundant, given H192). --- Eliminated hypothesis H4 (redundant, given H225). --- Eliminated hypothesis H5 (redundant, given H239). --- Eliminated hypothesis H10 (redundant, given H29). --- Eliminated hypothesis H11 (redundant, given H26). --- Eliminated hypothesis H32 (redundant, given H221). --- Eliminated hypothesis H33 (redundant, given H234). --- Eliminated hypothesis H35 (redundant, given H189). --- Eliminated hypothesis H44 (redundant, given H39). --- Eliminated hypothesis H83 (redundant, given H82). --- Eliminated hypothesis H84 (redundant, given H82). --- Eliminated hypothesis H85 (redundant, given H148). --- Eliminated hypothesis H151 (redundant, given H82). --- Eliminated hypothesis H166 (redundant, given H161). --- Eliminated hypothesis H167 (redundant, given H157). --- Eliminated hypothesis H186 (redundant, given H192). --- Eliminated hypothesis H188 (redundant, given H221). --- Eliminated hypothesis H189 (redundant, given H234). --- Eliminated hypothesis H233 (redundant, given H221). -S- Eliminated hypothesis H6. This was achieved by replacing all occurrences of blocks_required by: (byte_count + 63) div 64. New H239: blocks_done + 1 < (byte_count + 63) div 64 New H237: (byte_count + 63) div 64 >= 1 New H238: (byte_count + 63) div 64 <= 4194304 -S- Substituted hypothesis H81. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__4)) by: fld_hash_bit_len(fld_h(local_ctx__3)). -S- Substituted hypothesis H100. This was achieved by replacing all occurrences of fld_x(local_ctx__4) by: fld_x(local_ctx__3). -S- Substituted hypothesis H101. This was achieved by replacing all occurrences of fld_b(local_ctx__4) by: fld_b(local_ctx__3). -S- Substituted hypothesis H162. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__5)) by: fld_byte_count(fld_h(local_ctx__4)). -S- Substituted hypothesis H159. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__5)) by: fld_hash_bit_len(fld_h(local_ctx__3)). +++ New H240: integer__size >= 0 +++ New H241: natural__size >= 0 +++ New H242: spark__unsigned__u6__size >= 0 +++ New H243: spark__unsigned__u7__size >= 0 +++ New H244: spark__unsigned__byte__size >= 0 +++ New H245: spark__unsigned__u16__size >= 0 +++ New H246: spark__unsigned__u32__size >= 0 +++ New H247: spark__unsigned__u64__size >= 0 +++ New H248: spark__crypto__word_count_t__size >= 0 +++ New H249: hash_bit_length__size >= 0 +++ New H250: initialized_hash_bit_length__size >= 0 +++ New H251: skein_512_state_words_index__size >= 0 +++ New H252: skein_512_block_bytes_count__size >= 0 +++ New H253: skein_512_block_bytes_index__size >= 0 +++ New H254: positive_block_512_count_t__size >= 0 +++ New H255: skein_512_context__size >= 0 +++ New H256: result__index__subtype__1__first <= result__index__subtype__1__last +++ New H257: context_header__size >= 0 +++ New H258: output_byte_count_t__size >= 0 +++ New H259: output_block_count_t__size >= 0 +++ New H260: positive_output_block_count_t__size >= 0 +++ New H261: result__index__subtype__1__first >= 0 +++ New H262: result__index__subtype__1__last >= 0 +++ New H263: result__index__subtype__1__last <= 2147483647 +++ New H264: result__index__subtype__1__first <= 2147483647 @@@@@@@@@@ VC: procedure_skein_512_final_21. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified H153 on reading formula in, to give: %%% H153: 63 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: 63 <= skein_512_block_bytes_index__last %%% Simplified H156 on reading formula in, to give: %%% H156: 63 <= natural__last %%% Simplified H163 on reading formula in, to give: %%% H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H164 on reading formula in, to give: %%% H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H182 has been replaced by "true". (It is already present, as H32). --- Hypothesis H183 has been replaced by "true". (It is already present, as H33). --- Hypothesis H190 has been replaced by "true". (It is already present, as H186). --- Hypothesis H191 has been replaced by "true". (It is already present, as H187). %%% Simplified H193 on reading formula in, to give: %%% H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result, [i___1]) and element( result, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H194 has been replaced by "true". (It is already present, as H32). --- Hypothesis H195 has been replaced by "true". (It is already present, as H33). --- Hypothesis H196 has been replaced by "true". (It is already present, as H163). --- Hypothesis H197 has been replaced by "true". (It is already present, as H164). --- Hypothesis H198 has been replaced by "true". (It is already present, as H165). --- Hypothesis H199 has been replaced by "true". (It is already present, as H166). --- Hypothesis H200 has been replaced by "true". (It is already present, as H167). --- Hypothesis H201 has been replaced by "true". (It is already present, as H168). --- Hypothesis H204 has been replaced by "true". (It is already present, as H171). --- Hypothesis H205 has been replaced by "true". (It is already present, as H172). --- Hypothesis H207 has been replaced by "true". (It is already present, as H174). --- Hypothesis H208 has been replaced by "true". (It is already present, as H175). --- Hypothesis H209 has been replaced by "true". (It is already present, as H176). --- Hypothesis H210 has been replaced by "true". (It is already present, as H177). --- Hypothesis H211 has been replaced by "true". (It is already present, as H178). --- Hypothesis H212 has been replaced by "true". (It is already present, as H179). --- Hypothesis H213 has been replaced by "true". (It is already present, as H180). --- Hypothesis H214 has been replaced by "true". (It is already present, as H181). --- Hypothesis H215 has been replaced by "true". (It is already present, as H186). --- Hypothesis H216 has been replaced by "true". (It is already present, as H187). --- Hypothesis H217 has been replaced by "true". (It is already present, as H186). --- Hypothesis H218 has been replaced by "true". (It is already present, as H187). --- Hypothesis H221 has been replaced by "true". (It is already present, as H30). --- Hypothesis H225 has been replaced by "true". (It is already present, as H188). --- Hypothesis H226 has been replaced by "true". (It is already present, as H189). %%% Simplified H227 on reading formula in, to give: %%% H227: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result__6, [i___1]) and element(result__6, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H228 on reading formula in, to give: %%% H228: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [ i___1]) <= spark__unsigned__u64__last) --- Hypothesis H229 has been replaced by "true". (It is already present, as H32). --- Hypothesis H230 has been replaced by "true". (It is already present, as H33). --- Hypothesis H233 has been replaced by "true". (It is already present, as H231). --- Hypothesis H234 has been replaced by "true". (It is already present, as H232). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(local_ctx__5)) > 0 %%% Simplified C7 on reading formula in, to give: %%% C7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C8 on reading formula in, to give: %%% C8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(local_ctx__5)) > 0 using hypotheses H39, H81 & H159. *** Proved C2: byte_count <= result__index__subtype__1__last + 1 using hypothesis H2. *** Proved C5: blocks_done + 1 < blocks_required using hypothesis H237. *** Proved C6: blocks_required = (byte_count + 63) div 64 using hypothesis H6. *** Proved C7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) using hypothesis H7. *** Proved C8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H8. *** Proved C9: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H9. *** Proved C10: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H10. *** Proved C11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H11. *** Proved C12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H12. *** Proved C13: true *** Proved C14: true *** Proved C15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H15. *** Proved C16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H16. *** Proved C17: true *** Proved C18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H18. *** Proved C19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H19. *** Proved C20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H20. *** Proved C21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H21. *** Proved C22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H22. *** Proved C23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H23. *** Proved C24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H24. *** Proved C25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H25. *** Proved C26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first using hypothesis H26. *** Proved C27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last using hypothesis H27. *** Proved C28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first using hypothesis H28. *** Proved C29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last using hypothesis H29. *** Proved C30: result__index__subtype__1__first = 0 using hypothesis H30. *** Proved C31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 using hypothesis H31. >>> Restructured hypothesis H192 into: >>> H192: byte_count - blocks_done * skein_512_block_bytes_c < skein_512_block_bytes_c >>> Restructured hypothesis H237 into: >>> H237: blocks_done + 1 < blocks_required -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 New H186: byte_count - blocks_done * 64 >= natural__first New H187: byte_count - blocks_done * 64 <= natural__last New H188: blocks_done * 64 >= integer__base__first New H189: blocks_done * 64 <= integer__base__last New H192: byte_count - blocks_done * 64 < 64 New H219: blocks_done * 64 >= natural__first New H220: blocks_done * 64 <= natural__last New H223: result__index__subtype__1__last >= blocks_done * 64 + ( byte_count - blocks_done * 64 - 1) New H224: byte_count - blocks_done * 64 <= ( skein_512_state_words_index__last + 1) * 8 New C3: (blocks_done + 1) * 64 < byte_count New C4: (blocks_done + 1) * 64 < result__index__subtype__1__last + 1 -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New H78: 63 >= spark__unsigned__u6__first New H79: 63 <= spark__unsigned__u6__last New H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H188: blocks_done * 64 >= - 2147483648 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H189: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H142: true New H146: true New H165: fld_byte_count(fld_h(local_ctx__5)) >= 0 New H186: byte_count - blocks_done * 64 >= 0 New H219: blocks_done * 64 >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H43: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 New H84: fld_byte_count(fld_h(local_ctx__4)) <= 2147483647 New H143: true New H147: true New H155: skein_512_block_bytes_index__last <= 2147483647 New H156: true New H166: fld_byte_count(fld_h(local_ctx__5)) <= 2147483647 New H187: byte_count - blocks_done * 64 <= 2147483647 New H220: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H78: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 New H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 New H79: true -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 New H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 New H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= spark__unsigned__byte__last) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= spark__unsigned__byte__last) New H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= spark__unsigned__byte__last) New H227: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result__6, [i___1]) and element(result__6, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= 255) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= 255) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) New H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= 255) New H227: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result__6, [i___1]) and element(result__6, [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 New H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 New H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 New H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 New H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H34: blocks_done >= 0 New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= spark__unsigned__u64__last) New H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= spark__unsigned__u64__last) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= spark__unsigned__u64__last) New H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H228: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H35: blocks_done <= 18446744073709551615 New H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 New H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 New H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= 18446744073709551615) New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= 18446744073709551615) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= 18446744073709551615) New H228: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= 0 New H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= 0 New H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 New H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 New H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 New H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H222: true New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__3) , [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__4) , [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__5) , [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) New H228: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H224: byte_count - blocks_done * 64 <= 64 New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3) , [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4) , [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5) , [i___1]) <= 18446744073709551615) New H228: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New H150: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H160: fld_byte_count(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New H151: fld_byte_count(fld_h(local_ctx__4)) <= 64 New H161: fld_byte_count(fld_h(local_ctx__5)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H152: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__3) , [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__5) , [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H153: true New H154: true New H155: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3) , [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4) , [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4) , [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H144: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H145: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H184: byte_count >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H185: byte_count <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 New H231: blocks_done >= - 1 -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 New H232: blocks_done <= 4194303 -S- Applied substitution rule skein_512_fi_rules(135). This was achieved by replacing all occurrences of positive_output_block_count_t__first by: 1. New H235: blocks_required >= 1 -S- Applied substitution rule skein_512_fi_rules(136). This was achieved by replacing all occurrences of positive_output_block_count_t__last by: 4194304. New H236: blocks_required <= 4194304 --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H46 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H133 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H170 (true-hypothesis). --- Eliminated hypothesis H173 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H195 (true-hypothesis). --- Eliminated hypothesis H196 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H199 (true-hypothesis). --- Eliminated hypothesis H200 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H204 (true-hypothesis). --- Eliminated hypothesis H205 (true-hypothesis). --- Eliminated hypothesis H206 (true-hypothesis). --- Eliminated hypothesis H207 (true-hypothesis). --- Eliminated hypothesis H208 (true-hypothesis). --- Eliminated hypothesis H209 (true-hypothesis). --- Eliminated hypothesis H210 (true-hypothesis). --- Eliminated hypothesis H211 (true-hypothesis). --- Eliminated hypothesis H212 (true-hypothesis). --- Eliminated hypothesis H213 (true-hypothesis). --- Eliminated hypothesis H214 (true-hypothesis). --- Eliminated hypothesis H215 (true-hypothesis). --- Eliminated hypothesis H216 (true-hypothesis). --- Eliminated hypothesis H217 (true-hypothesis). --- Eliminated hypothesis H218 (true-hypothesis). --- Eliminated hypothesis H221 (true-hypothesis). --- Eliminated hypothesis H225 (true-hypothesis). --- Eliminated hypothesis H226 (true-hypothesis). --- Eliminated hypothesis H229 (true-hypothesis). --- Eliminated hypothesis H230 (true-hypothesis). --- Eliminated hypothesis H233 (true-hypothesis). --- Eliminated hypothesis H234 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H222 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H144 (true-hypothesis). --- Eliminated hypothesis H145 (true-hypothesis). --- Eliminated hypothesis H220 (duplicate of H189). --- Eliminated hypothesis H28 (duplicate of H9). --- Eliminated hypothesis H150 (duplicate of H83). --- Eliminated hypothesis H27 (duplicate of H12). --- Eliminated hypothesis H149 (duplicate of H86). --- Eliminated hypothesis H168 (duplicate of H158). --- Eliminated hypothesis H165 (duplicate of H160). --- Eliminated hypothesis H34 (duplicate of H32). --- Eliminated hypothesis H223 (duplicate of H2). --- Eliminated hypothesis H5 (redundant, given H237). --- Eliminated hypothesis H10 (redundant, given H29). --- Eliminated hypothesis H11 (redundant, given H26). --- Eliminated hypothesis H32 (redundant, given H219). --- Eliminated hypothesis H33 (redundant, given H232). --- Eliminated hypothesis H35 (redundant, given H189). --- Eliminated hypothesis H44 (redundant, given H39). --- Eliminated hypothesis H83 (redundant, given H82). --- Eliminated hypothesis H84 (redundant, given H82). --- Eliminated hypothesis H85 (redundant, given H148). --- Eliminated hypothesis H151 (redundant, given H82). --- Eliminated hypothesis H166 (redundant, given H161). --- Eliminated hypothesis H167 (redundant, given H157). --- Eliminated hypothesis H186 (redundant, given H3). --- Eliminated hypothesis H187 (redundant, given H192). --- Eliminated hypothesis H188 (redundant, given H219). --- Eliminated hypothesis H189 (redundant, given H232). --- Eliminated hypothesis H224 (redundant, given H192). --- Eliminated hypothesis H231 (redundant, given H219). --- Eliminated hypothesis H232 (redundant, given H3 & H185). -S- Eliminated hypothesis H6. This was achieved by replacing all occurrences of blocks_required by: (byte_count + 63) div 64. New H237: blocks_done + 1 < (byte_count + 63) div 64 New H235: (byte_count + 63) div 64 >= 1 New H236: (byte_count + 63) div 64 <= 4194304 -S- Substituted hypothesis H81. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__4)) by: fld_hash_bit_len(fld_h(local_ctx__3)). -S- Substituted hypothesis H100. This was achieved by replacing all occurrences of fld_x(local_ctx__4) by: fld_x(local_ctx__3). -S- Substituted hypothesis H101. This was achieved by replacing all occurrences of fld_b(local_ctx__4) by: fld_b(local_ctx__3). -S- Substituted hypothesis H162. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__5)) by: fld_byte_count(fld_h(local_ctx__4)). -S- Substituted hypothesis H159. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__5)) by: fld_hash_bit_len(fld_h(local_ctx__3)). +++ New H238: integer__size >= 0 +++ New H239: natural__size >= 0 +++ New H240: spark__unsigned__u6__size >= 0 +++ New H241: spark__unsigned__u7__size >= 0 +++ New H242: spark__unsigned__byte__size >= 0 +++ New H243: spark__unsigned__u16__size >= 0 +++ New H244: spark__unsigned__u32__size >= 0 +++ New H245: spark__unsigned__u64__size >= 0 +++ New H246: spark__crypto__word_count_t__size >= 0 +++ New H247: hash_bit_length__size >= 0 +++ New H248: initialized_hash_bit_length__size >= 0 +++ New H249: skein_512_state_words_index__size >= 0 +++ New H250: skein_512_block_bytes_count__size >= 0 +++ New H251: skein_512_block_bytes_index__size >= 0 +++ New H252: positive_block_512_count_t__size >= 0 +++ New H253: skein_512_context__size >= 0 +++ New H254: result__index__subtype__1__first <= result__index__subtype__1__last +++ New H255: context_header__size >= 0 +++ New H256: output_byte_count_t__size >= 0 +++ New H257: output_block_count_t__size >= 0 +++ New H258: positive_output_block_count_t__size >= 0 +++ New H259: result__index__subtype__1__first >= 0 +++ New H260: result__index__subtype__1__last >= 0 +++ New H261: result__index__subtype__1__last <= 2147483647 +++ New H262: result__index__subtype__1__first <= 2147483647 @@@@@@@@@@ VC: procedure_skein_512_final_22. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) *** Proved C3: fld_hash_bit_len(fld_h(local_ctx)) > 0 using hypothesis H1. -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New C1: blocks_done >= 0 New C4: blocks_done >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C2: blocks_done <= 18446744073709551615 New C5: blocks_done <= 18446744073709551615 -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 *** Proved C1: blocks_done >= 0 using hypothesis H32. *** Proved C4: blocks_done >= 0 using hypothesis H32. *** Proved C2: blocks_done <= 18446744073709551615 using hypothesis H33. *** Proved C5: blocks_done <= 18446744073709551615 using hypothesis H33. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_23. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New C1: 63 >= spark__unsigned__u6__first New C2: 63 <= spark__unsigned__u6__last -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New C1: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_24. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified C12 on reading formula in, to give: %%% C12: 63 <= skein_512_block_bytes_index__last %%% Simplified C13 on reading formula in, to give: %%% C13: 63 <= skein_512_block_bytes_index__last %%% Simplified C15 on reading formula in, to give: %%% C15: 63 <= natural__last *** Proved C1: 8 >= natural__first using hypotheses H82 & H83. *** Proved C5: 0 >= natural__first using hypotheses H82 & H83. *** Proved C6: 0 <= natural__last using hypotheses H82 & H84. -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H43: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 New H84: fld_byte_count(fld_h(local_ctx__4)) <= 2147483647 New C2: true New C14: skein_512_block_bytes_index__last <= 2147483647 New C15: true -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New C3: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New C4: true -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 New C7: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New C8: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New C9: fld_byte_count(fld_h(local_ctx__4)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New C10: fld_byte_count(fld_h(local_ctx__4)) <= 64 -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= spark__unsigned__byte__last) New C12: true New C13: true New C14: true *** Proved C2: true *** Proved C15: true *** Proved C3: true *** Proved C4: true *** Proved C7: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 using hypotheses H39 & H81. *** Proved C9: fld_byte_count(fld_h(local_ctx__4)) >= 0 using hypothesis H82. *** Proved C10: fld_byte_count(fld_h(local_ctx__4)) <= 64 using hypothesis H82. *** Proved C12: true *** Proved C13: true *** Proved C14: true -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New H78: 63 >= spark__unsigned__u6__first New H79: 63 <= spark__unsigned__u6__last New H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H78: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 New H79: true -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 New H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= 255) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= 255) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 New H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 New H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H34: blocks_done >= 0 New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= spark__unsigned__u64__last) New H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H35: blocks_done <= 18446744073709551615 New H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 New H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= 18446744073709551615) New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= 0 New H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 New H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__3) , [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__4) , [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3) , [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3) , [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4) , [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4) , [i___1]) <= 255) New C11: true -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 *** Proved C8: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 using hypothesis H86. *** Proved C11: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_25. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified H153 on reading formula in, to give: %%% H153: 63 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: 63 <= skein_512_block_bytes_index__last %%% Simplified H156 on reading formula in, to give: %%% H156: 63 <= natural__last %%% Simplified H163 on reading formula in, to give: %%% H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H164 on reading formula in, to give: %%% H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H182 has been replaced by "true". (It is already present, as H32). --- Hypothesis H183 has been replaced by "true". (It is already present, as H33). -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H142: true New H146: true New H165: fld_byte_count(fld_h(local_ctx__5)) >= 0 New C1: byte_count - blocks_done * skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H43: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 New H84: fld_byte_count(fld_h(local_ctx__4)) <= 2147483647 New H143: true New H147: true New H155: skein_512_block_bytes_index__last <= 2147483647 New H156: true New H166: fld_byte_count(fld_h(local_ctx__5)) <= 2147483647 New C2: byte_count - blocks_done * skein_512_block_bytes_c <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C3: blocks_done * skein_512_block_bytes_c >= - 2147483648 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C4: blocks_done * skein_512_block_bytes_c <= 2147483647 *** Proved C1: byte_count - blocks_done * skein_512_block_bytes_c >= 0 using hypothesis H3. -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 New C2: byte_count - blocks_done * 64 <= 2147483647 New C3: blocks_done * 64 >= - 2147483648 New C4: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New H78: 63 >= spark__unsigned__u6__first New H79: 63 <= spark__unsigned__u6__last New H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H78: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 New H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 New H79: true -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 New H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 New H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= spark__unsigned__byte__last) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= 255) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= 255) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 New H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 New H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 New H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 New H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H34: blocks_done >= 0 New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= spark__unsigned__u64__last) New H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= spark__unsigned__u64__last) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= spark__unsigned__u64__last) New H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H35: blocks_done <= 18446744073709551615 New H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 New H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 New H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= 18446744073709551615) New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= 18446744073709551615) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= 0 New H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= 0 New H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 New H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 New H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 New H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__3) , [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__4) , [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__5) , [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3) , [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4) , [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5) , [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New H150: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H160: fld_byte_count(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New H151: fld_byte_count(fld_h(local_ctx__4)) <= 64 New H161: fld_byte_count(fld_h(local_ctx__5)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H152: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__3) , [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__5) , [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H153: true New H154: true New H155: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3) , [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4) , [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4) , [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H144: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H145: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H184: byte_count >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H185: byte_count <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 *** Proved C3: blocks_done * 64 >= - 2147483648 using hypothesis H34. *** Proved C4: blocks_done * 64 <= 2147483647 using hypothesis H33. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H46 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H133 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H170 (true-hypothesis). --- Eliminated hypothesis H173 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H144 (true-hypothesis). --- Eliminated hypothesis H145 (true-hypothesis). --- Eliminated hypothesis H28 (duplicate of H9). --- Eliminated hypothesis H150 (duplicate of H83). --- Eliminated hypothesis H27 (duplicate of H12). --- Eliminated hypothesis H149 (duplicate of H86). --- Eliminated hypothesis H168 (duplicate of H158). --- Eliminated hypothesis H165 (duplicate of H160). --- Eliminated hypothesis H34 (duplicate of H32). --- Eliminated hypothesis H10 (redundant, given H29). --- Eliminated hypothesis H11 (redundant, given H26). --- Eliminated hypothesis H33 (redundant, given H3 & H185). --- Eliminated hypothesis H35 (redundant, given H3 & H185). --- Eliminated hypothesis H44 (redundant, given H39). --- Eliminated hypothesis H83 (redundant, given H82). --- Eliminated hypothesis H84 (redundant, given H82). --- Eliminated hypothesis H85 (redundant, given H148). --- Eliminated hypothesis H151 (redundant, given H82). --- Eliminated hypothesis H166 (redundant, given H161). --- Eliminated hypothesis H167 (redundant, given H157). -S- Eliminated hypothesis H6. This was achieved by replacing all occurrences of blocks_required by: (byte_count + 63) div 64. New H5: blocks_done < (byte_count + 63) div 64 -S- Substituted hypothesis H81. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__4)) by: fld_hash_bit_len(fld_h(local_ctx__3)). -S- Substituted hypothesis H100. This was achieved by replacing all occurrences of fld_x(local_ctx__4) by: fld_x(local_ctx__3). -S- Substituted hypothesis H101. This was achieved by replacing all occurrences of fld_b(local_ctx__4) by: fld_b(local_ctx__3). -S- Substituted hypothesis H162. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__5)) by: fld_byte_count(fld_h(local_ctx__4)). -S- Substituted hypothesis H159. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__5)) by: fld_hash_bit_len(fld_h(local_ctx__3)). *** Proved C2: byte_count - blocks_done * 64 <= 2147483647 using hypotheses H34 & H185. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_26. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified H153 on reading formula in, to give: %%% H153: 63 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: 63 <= skein_512_block_bytes_index__last %%% Simplified H156 on reading formula in, to give: %%% H156: 63 <= natural__last %%% Simplified H163 on reading formula in, to give: %%% H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H164 on reading formula in, to give: %%% H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H182 has been replaced by "true". (It is already present, as H32). --- Hypothesis H183 has been replaced by "true". (It is already present, as H33). --- Hypothesis H190 has been replaced by "true". (It is already present, as H186). --- Hypothesis H191 has been replaced by "true". (It is already present, as H187). *** Proved C2: skein_512_block_bytes_c <= natural__last using hypotheses H187 & H192. -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 New H186: byte_count - blocks_done * 64 >= natural__first New H187: byte_count - blocks_done * 64 <= natural__last New H188: blocks_done * 64 >= integer__base__first New H189: blocks_done * 64 <= integer__base__last New H192: byte_count - blocks_done * 64 >= 64 New C1: 64 >= natural__first -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H142: true New H146: true New H165: fld_byte_count(fld_h(local_ctx__5)) >= 0 New H186: byte_count - blocks_done * 64 >= 0 New C1: true *** Proved C1: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_27. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified H153 on reading formula in, to give: %%% H153: 63 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: 63 <= skein_512_block_bytes_index__last %%% Simplified H156 on reading formula in, to give: %%% H156: 63 <= natural__last %%% Simplified H163 on reading formula in, to give: %%% H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H164 on reading formula in, to give: %%% H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H182 has been replaced by "true". (It is already present, as H32). --- Hypothesis H183 has been replaced by "true". (It is already present, as H33). --- Hypothesis H190 has been replaced by "true". (It is already present, as H186). --- Hypothesis H191 has been replaced by "true". (It is already present, as H187). %%% Simplified H195 on reading formula in, to give: %%% H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result, [i___1]) and element( result, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H196 has been replaced by "true". (It is already present, as H32). --- Hypothesis H197 has been replaced by "true". (It is already present, as H33). --- Hypothesis H198 has been replaced by "true". (It is already present, as H163). --- Hypothesis H199 has been replaced by "true". (It is already present, as H164). --- Hypothesis H200 has been replaced by "true". (It is already present, as H165). --- Hypothesis H201 has been replaced by "true". (It is already present, as H166). --- Hypothesis H202 has been replaced by "true". (It is already present, as H167). --- Hypothesis H203 has been replaced by "true". (It is already present, as H168). --- Hypothesis H206 has been replaced by "true". (It is already present, as H171). --- Hypothesis H207 has been replaced by "true". (It is already present, as H172). --- Hypothesis H209 has been replaced by "true". (It is already present, as H174). --- Hypothesis H210 has been replaced by "true". (It is already present, as H175). --- Hypothesis H211 has been replaced by "true". (It is already present, as H176). --- Hypothesis H212 has been replaced by "true". (It is already present, as H177). --- Hypothesis H213 has been replaced by "true". (It is already present, as H178). --- Hypothesis H214 has been replaced by "true". (It is already present, as H179). --- Hypothesis H215 has been replaced by "true". (It is already present, as H180). --- Hypothesis H216 has been replaced by "true". (It is already present, as H181). --- Hypothesis H217 has been replaced by "true". (It is already present, as H193). --- Hypothesis H218 has been replaced by "true". (It is already present, as H194). *** Proved C1: skein_512_block_bytes_c >= natural__first using hypothesis H193. *** Proved C2: skein_512_block_bytes_c <= natural__last using hypothesis H194. *** Proved C5: result__index__subtype__1__first = 0 using hypothesis H30. *** Proved C9: blocks_done * skein_512_block_bytes_c >= integer__base__first using hypothesis H188. *** Proved C10: blocks_done * skein_512_block_bytes_c <= integer__base__last using hypothesis H189. -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H142: true New H146: true New H165: fld_byte_count(fld_h(local_ctx__5)) >= 0 New H186: byte_count - blocks_done * skein_512_block_bytes_c >= 0 New H193: skein_512_block_bytes_c >= 0 New C3: blocks_done * skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H43: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 New H84: fld_byte_count(fld_h(local_ctx__4)) <= 2147483647 New H143: true New H147: true New H155: skein_512_block_bytes_index__last <= 2147483647 New H156: true New H166: fld_byte_count(fld_h(local_ctx__5)) <= 2147483647 New H187: byte_count - blocks_done * skein_512_block_bytes_c <= 2147483647 New H194: skein_512_block_bytes_c <= 2147483647 New C4: blocks_done * skein_512_block_bytes_c <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 New H188: blocks_done * 64 >= integer__base__first New H189: blocks_done * 64 <= integer__base__last New H192: byte_count - blocks_done * 64 >= 64 New H186: byte_count - blocks_done * 64 >= 0 New H193: true New H187: byte_count - blocks_done * 64 <= 2147483647 New H194: true New C7: result__index__subtype__1__last >= blocks_done * 64 + 63 New C8: 64 <= (skein_512_state_words_index__last + 1) * 8 New C3: blocks_done * 64 >= 0 New C4: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New H78: 63 >= spark__unsigned__u6__first New H79: 63 <= spark__unsigned__u6__last New H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H188: blocks_done * 64 >= - 2147483648 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H189: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H78: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 New H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 New H79: true -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 New H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 New H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= spark__unsigned__byte__last) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= spark__unsigned__byte__last) New H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= 255) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= 255) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) New H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 New H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 New H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 New H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 New H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H34: blocks_done >= 0 New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= spark__unsigned__u64__last) New H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= spark__unsigned__u64__last) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= spark__unsigned__u64__last) New H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H35: blocks_done <= 18446744073709551615 New H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 New H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 New H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= 18446744073709551615) New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= 18446744073709551615) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= 0 New H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= 0 New H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 New H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 New H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 New H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__3) , [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__4) , [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__5) , [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) New C6: true -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3) , [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4) , [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5) , [i___1]) <= 18446744073709551615) New C8: true -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New H150: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H160: fld_byte_count(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New H151: fld_byte_count(fld_h(local_ctx__4)) <= 64 New H161: fld_byte_count(fld_h(local_ctx__5)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H152: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__3) , [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__5) , [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H153: true New H154: true New H155: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3) , [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4) , [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4) , [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H144: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H145: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H184: byte_count >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H185: byte_count <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 *** Proved C3: blocks_done * 64 >= 0 using hypothesis H34. *** Proved C4: blocks_done * 64 <= 2147483647 using hypothesis H189. *** Proved C6: true *** Proved C8: true --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H46 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H133 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H170 (true-hypothesis). --- Eliminated hypothesis H173 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H193 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H196 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H199 (true-hypothesis). --- Eliminated hypothesis H200 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H204 (true-hypothesis). --- Eliminated hypothesis H205 (true-hypothesis). --- Eliminated hypothesis H206 (true-hypothesis). --- Eliminated hypothesis H207 (true-hypothesis). --- Eliminated hypothesis H208 (true-hypothesis). --- Eliminated hypothesis H209 (true-hypothesis). --- Eliminated hypothesis H210 (true-hypothesis). --- Eliminated hypothesis H211 (true-hypothesis). --- Eliminated hypothesis H212 (true-hypothesis). --- Eliminated hypothesis H213 (true-hypothesis). --- Eliminated hypothesis H214 (true-hypothesis). --- Eliminated hypothesis H215 (true-hypothesis). --- Eliminated hypothesis H216 (true-hypothesis). --- Eliminated hypothesis H217 (true-hypothesis). --- Eliminated hypothesis H218 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H144 (true-hypothesis). --- Eliminated hypothesis H145 (true-hypothesis). --- Eliminated hypothesis H28 (duplicate of H9). --- Eliminated hypothesis H150 (duplicate of H83). --- Eliminated hypothesis H27 (duplicate of H12). --- Eliminated hypothesis H149 (duplicate of H86). --- Eliminated hypothesis H168 (duplicate of H158). --- Eliminated hypothesis H165 (duplicate of H160). --- Eliminated hypothesis H34 (duplicate of H32). --- Eliminated hypothesis H3 (redundant, given H192). --- Eliminated hypothesis H4 (redundant, given H2 & H192). --- Eliminated hypothesis H10 (redundant, given H29). --- Eliminated hypothesis H11 (redundant, given H26). --- Eliminated hypothesis H35 (redundant, given H33). --- Eliminated hypothesis H44 (redundant, given H39). --- Eliminated hypothesis H83 (redundant, given H82). --- Eliminated hypothesis H84 (redundant, given H82). --- Eliminated hypothesis H85 (redundant, given H148). --- Eliminated hypothesis H151 (redundant, given H82). --- Eliminated hypothesis H166 (redundant, given H161). --- Eliminated hypothesis H167 (redundant, given H157). --- Eliminated hypothesis H184 (redundant, given H32 & H192). --- Eliminated hypothesis H186 (redundant, given H192). --- Eliminated hypothesis H188 (redundant, given H32). --- Eliminated hypothesis H189 (redundant, given H33). -S- Eliminated hypothesis H6. This was achieved by replacing all occurrences of blocks_required by: (byte_count + 63) div 64. New H5: blocks_done < (byte_count + 63) div 64 -S- Substituted hypothesis H81. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__4)) by: fld_hash_bit_len(fld_h(local_ctx__3)). -S- Substituted hypothesis H100. This was achieved by replacing all occurrences of fld_x(local_ctx__4) by: fld_x(local_ctx__3). -S- Substituted hypothesis H101. This was achieved by replacing all occurrences of fld_b(local_ctx__4) by: fld_b(local_ctx__3). -S- Substituted hypothesis H162. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__5)) by: fld_byte_count(fld_h(local_ctx__4)). -S- Substituted hypothesis H159. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__5)) by: fld_hash_bit_len(fld_h(local_ctx__3)). *** Proved C7: result__index__subtype__1__last >= blocks_done * 64 + 63 via its standard form, which is: Std.Fm C7: - (64 * blocks_done) + result__index__subtype__1__last > 62 using hypotheses H2 & H192. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_28. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified H153 on reading formula in, to give: %%% H153: 63 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: 63 <= skein_512_block_bytes_index__last %%% Simplified H156 on reading formula in, to give: %%% H156: 63 <= natural__last %%% Simplified H163 on reading formula in, to give: %%% H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H164 on reading formula in, to give: %%% H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H182 has been replaced by "true". (It is already present, as H32). --- Hypothesis H183 has been replaced by "true". (It is already present, as H33). --- Hypothesis H190 has been replaced by "true". (It is already present, as H186). --- Hypothesis H191 has been replaced by "true". (It is already present, as H187). %%% Simplified H193 on reading formula in, to give: %%% H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result, [i___1]) and element( result, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H194 has been replaced by "true". (It is already present, as H32). --- Hypothesis H195 has been replaced by "true". (It is already present, as H33). --- Hypothesis H196 has been replaced by "true". (It is already present, as H163). --- Hypothesis H197 has been replaced by "true". (It is already present, as H164). --- Hypothesis H198 has been replaced by "true". (It is already present, as H165). --- Hypothesis H199 has been replaced by "true". (It is already present, as H166). --- Hypothesis H200 has been replaced by "true". (It is already present, as H167). --- Hypothesis H201 has been replaced by "true". (It is already present, as H168). --- Hypothesis H204 has been replaced by "true". (It is already present, as H171). --- Hypothesis H205 has been replaced by "true". (It is already present, as H172). --- Hypothesis H207 has been replaced by "true". (It is already present, as H174). --- Hypothesis H208 has been replaced by "true". (It is already present, as H175). --- Hypothesis H209 has been replaced by "true". (It is already present, as H176). --- Hypothesis H210 has been replaced by "true". (It is already present, as H177). --- Hypothesis H211 has been replaced by "true". (It is already present, as H178). --- Hypothesis H212 has been replaced by "true". (It is already present, as H179). --- Hypothesis H213 has been replaced by "true". (It is already present, as H180). --- Hypothesis H214 has been replaced by "true". (It is already present, as H181). --- Hypothesis H215 has been replaced by "true". (It is already present, as H186). --- Hypothesis H216 has been replaced by "true". (It is already present, as H187). *** Proved C1: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first using hypothesis H186. *** Proved C2: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last using hypothesis H187. *** Proved C5: result__index__subtype__1__first = 0 using hypothesis H30. *** Proved C9: blocks_done * skein_512_block_bytes_c >= integer__base__first using hypothesis H188. *** Proved C10: blocks_done * skein_512_block_bytes_c <= integer__base__last using hypothesis H189. -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H142: true New H146: true New H165: fld_byte_count(fld_h(local_ctx__5)) >= 0 New H186: byte_count - blocks_done * skein_512_block_bytes_c >= 0 New C3: blocks_done * skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H43: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 New H84: fld_byte_count(fld_h(local_ctx__4)) <= 2147483647 New H143: true New H147: true New H155: skein_512_block_bytes_index__last <= 2147483647 New H156: true New H166: fld_byte_count(fld_h(local_ctx__5)) <= 2147483647 New H187: byte_count - blocks_done * skein_512_block_bytes_c <= 2147483647 New C4: blocks_done * skein_512_block_bytes_c <= 2147483647 >>> Restructured hypothesis H192 into: >>> H192: byte_count - blocks_done * skein_512_block_bytes_c < skein_512_block_bytes_c -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 New H186: byte_count - blocks_done * 64 >= 0 New H187: byte_count - blocks_done * 64 <= 2147483647 New H188: blocks_done * 64 >= integer__base__first New H189: blocks_done * 64 <= integer__base__last New H192: byte_count - blocks_done * 64 < 64 New C7: result__index__subtype__1__last >= blocks_done * 64 + (byte_count - blocks_done * 64 - 1) New C8: byte_count - blocks_done * 64 <= ( skein_512_state_words_index__last + 1) * 8 New C3: blocks_done * 64 >= 0 New C4: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New H78: 63 >= spark__unsigned__u6__first New H79: 63 <= spark__unsigned__u6__last New H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H188: blocks_done * 64 >= - 2147483648 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H189: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H78: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 New H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 New H79: true -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 New H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 New H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= spark__unsigned__byte__last) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= spark__unsigned__byte__last) New H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= 255) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= 255) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) New H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 New H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 New H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 New H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 New H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H34: blocks_done >= 0 New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= spark__unsigned__u64__last) New H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= spark__unsigned__u64__last) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= spark__unsigned__u64__last) New H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H35: blocks_done <= 18446744073709551615 New H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 New H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 New H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= 18446744073709551615) New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= 18446744073709551615) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= 0 New H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= 0 New H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 New H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 New H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 New H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__3) , [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__4) , [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__5) , [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) New C6: true -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3) , [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4) , [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5) , [i___1]) <= 18446744073709551615) New C8: byte_count - blocks_done * 64 <= 64 -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New H150: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H160: fld_byte_count(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New H151: fld_byte_count(fld_h(local_ctx__4)) <= 64 New H161: fld_byte_count(fld_h(local_ctx__5)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H152: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__3) , [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__5) , [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H153: true New H154: true New H155: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3) , [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4) , [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4) , [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H144: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H145: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H184: byte_count >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H185: byte_count <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 *** Proved C7: result__index__subtype__1__last >= blocks_done * 64 + ( byte_count - blocks_done * 64 - 1) via its standard form, which is: Std.Fm C7: - byte_count + result__index__subtype__1__last > - 2 using hypothesis H2. *** Proved C3: blocks_done * 64 >= 0 using hypothesis H34. *** Proved C4: blocks_done * 64 <= 2147483647 using hypothesis H189. *** Proved C6: true *** Proved C8: byte_count - blocks_done * 64 <= 64 using hypothesis H192. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_29. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified H153 on reading formula in, to give: %%% H153: 63 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: 63 <= skein_512_block_bytes_index__last %%% Simplified H156 on reading formula in, to give: %%% H156: 63 <= natural__last %%% Simplified H163 on reading formula in, to give: %%% H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H164 on reading formula in, to give: %%% H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H182 has been replaced by "true". (It is already present, as H32). --- Hypothesis H183 has been replaced by "true". (It is already present, as H33). --- Hypothesis H190 has been replaced by "true". (It is already present, as H186). --- Hypothesis H191 has been replaced by "true". (It is already present, as H187). %%% Simplified H195 on reading formula in, to give: %%% H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result, [i___1]) and element( result, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H196 has been replaced by "true". (It is already present, as H32). --- Hypothesis H197 has been replaced by "true". (It is already present, as H33). --- Hypothesis H198 has been replaced by "true". (It is already present, as H163). --- Hypothesis H199 has been replaced by "true". (It is already present, as H164). --- Hypothesis H200 has been replaced by "true". (It is already present, as H165). --- Hypothesis H201 has been replaced by "true". (It is already present, as H166). --- Hypothesis H202 has been replaced by "true". (It is already present, as H167). --- Hypothesis H203 has been replaced by "true". (It is already present, as H168). --- Hypothesis H206 has been replaced by "true". (It is already present, as H171). --- Hypothesis H207 has been replaced by "true". (It is already present, as H172). --- Hypothesis H209 has been replaced by "true". (It is already present, as H174). --- Hypothesis H210 has been replaced by "true". (It is already present, as H175). --- Hypothesis H211 has been replaced by "true". (It is already present, as H176). --- Hypothesis H212 has been replaced by "true". (It is already present, as H177). --- Hypothesis H213 has been replaced by "true". (It is already present, as H178). --- Hypothesis H214 has been replaced by "true". (It is already present, as H179). --- Hypothesis H215 has been replaced by "true". (It is already present, as H180). --- Hypothesis H216 has been replaced by "true". (It is already present, as H181). --- Hypothesis H217 has been replaced by "true". (It is already present, as H193). --- Hypothesis H218 has been replaced by "true". (It is already present, as H194). --- Hypothesis H219 has been replaced by "true". (It is already present, as H193). --- Hypothesis H220 has been replaced by "true". (It is already present, as H194). --- Hypothesis H223 has been replaced by "true". (It is already present, as H30). --- Hypothesis H227 has been replaced by "true". (It is already present, as H188). --- Hypothesis H228 has been replaced by "true". (It is already present, as H189). %%% Simplified H229 on reading formula in, to give: %%% H229: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result__6, [i___1]) and element(result__6, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H230 on reading formula in, to give: %%% H230: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [ i___1]) <= spark__unsigned__u64__last) --- Hypothesis H231 has been replaced by "true". (It is already present, as H32). --- Hypothesis H232 has been replaced by "true". (It is already present, as H33). *** Proved C1: blocks_done + 1 >= output_block_count_t__first using hypothesis H32. -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 New C2: blocks_done <= 4194303 -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 New H186: byte_count - blocks_done * 64 >= natural__first New H187: byte_count - blocks_done * 64 <= natural__last New H188: blocks_done * 64 >= integer__base__first New H189: blocks_done * 64 <= integer__base__last New H192: byte_count - blocks_done * 64 >= 64 New H193: 64 >= natural__first New H194: 64 <= natural__last New H221: blocks_done * 64 >= natural__first New H222: blocks_done * 64 <= natural__last New H225: result__index__subtype__1__last >= blocks_done * 64 + 63 New H226: 64 <= (skein_512_state_words_index__last + 1) * 8 -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New H78: 63 >= spark__unsigned__u6__first New H79: 63 <= spark__unsigned__u6__last New H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H188: blocks_done * 64 >= - 2147483648 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H189: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H142: true New H146: true New H165: fld_byte_count(fld_h(local_ctx__5)) >= 0 New H186: byte_count - blocks_done * 64 >= 0 New H193: true New H221: blocks_done * 64 >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H43: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 New H84: fld_byte_count(fld_h(local_ctx__4)) <= 2147483647 New H143: true New H147: true New H155: skein_512_block_bytes_index__last <= 2147483647 New H156: true New H166: fld_byte_count(fld_h(local_ctx__5)) <= 2147483647 New H187: byte_count - blocks_done * 64 <= 2147483647 New H194: true New H222: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H78: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 New H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 New H79: true -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 New H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 New H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= spark__unsigned__byte__last) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= spark__unsigned__byte__last) New H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= spark__unsigned__byte__last) New H229: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result__6, [i___1]) and element(result__6, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= 255) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= 255) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) New H195: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= 255) New H229: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result__6, [i___1]) and element(result__6, [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 New H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 New H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 New H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 New H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H34: blocks_done >= 0 New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= spark__unsigned__u64__last) New H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= spark__unsigned__u64__last) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= spark__unsigned__u64__last) New H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H230: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H35: blocks_done <= 18446744073709551615 New H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 New H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 New H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= 18446744073709551615) New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= 18446744073709551615) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= 18446744073709551615) New H230: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= 0 New H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= 0 New H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 New H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 New H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 New H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H224: true New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__3) , [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__4) , [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__5) , [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) New H230: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H226: true New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3) , [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4) , [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5) , [i___1]) <= 18446744073709551615) New H230: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New H150: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H160: fld_byte_count(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New H151: fld_byte_count(fld_h(local_ctx__4)) <= 64 New H161: fld_byte_count(fld_h(local_ctx__5)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H152: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__3) , [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__5) , [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H153: true New H154: true New H155: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3) , [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4) , [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4) , [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H144: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H145: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H184: byte_count >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H185: byte_count <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H46 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H133 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H170 (true-hypothesis). --- Eliminated hypothesis H173 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H196 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H199 (true-hypothesis). --- Eliminated hypothesis H200 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H204 (true-hypothesis). --- Eliminated hypothesis H205 (true-hypothesis). --- Eliminated hypothesis H206 (true-hypothesis). --- Eliminated hypothesis H207 (true-hypothesis). --- Eliminated hypothesis H208 (true-hypothesis). --- Eliminated hypothesis H209 (true-hypothesis). --- Eliminated hypothesis H210 (true-hypothesis). --- Eliminated hypothesis H211 (true-hypothesis). --- Eliminated hypothesis H212 (true-hypothesis). --- Eliminated hypothesis H213 (true-hypothesis). --- Eliminated hypothesis H214 (true-hypothesis). --- Eliminated hypothesis H215 (true-hypothesis). --- Eliminated hypothesis H216 (true-hypothesis). --- Eliminated hypothesis H217 (true-hypothesis). --- Eliminated hypothesis H218 (true-hypothesis). --- Eliminated hypothesis H219 (true-hypothesis). --- Eliminated hypothesis H220 (true-hypothesis). --- Eliminated hypothesis H223 (true-hypothesis). --- Eliminated hypothesis H227 (true-hypothesis). --- Eliminated hypothesis H228 (true-hypothesis). --- Eliminated hypothesis H231 (true-hypothesis). --- Eliminated hypothesis H232 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H193 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H224 (true-hypothesis). --- Eliminated hypothesis H226 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H144 (true-hypothesis). --- Eliminated hypothesis H145 (true-hypothesis). --- Eliminated hypothesis H222 (duplicate of H189). --- Eliminated hypothesis H28 (duplicate of H9). --- Eliminated hypothesis H150 (duplicate of H83). --- Eliminated hypothesis H27 (duplicate of H12). --- Eliminated hypothesis H149 (duplicate of H86). --- Eliminated hypothesis H168 (duplicate of H158). --- Eliminated hypothesis H165 (duplicate of H160). --- Eliminated hypothesis H34 (duplicate of H32). --- Eliminated hypothesis H3 (redundant, given H192). --- Eliminated hypothesis H4 (redundant, given H225). --- Eliminated hypothesis H10 (redundant, given H29). --- Eliminated hypothesis H11 (redundant, given H26). --- Eliminated hypothesis H32 (redundant, given H221). --- Eliminated hypothesis H35 (redundant, given H33). --- Eliminated hypothesis H44 (redundant, given H39). --- Eliminated hypothesis H83 (redundant, given H82). --- Eliminated hypothesis H84 (redundant, given H82). --- Eliminated hypothesis H85 (redundant, given H148). --- Eliminated hypothesis H151 (redundant, given H82). --- Eliminated hypothesis H166 (redundant, given H161). --- Eliminated hypothesis H167 (redundant, given H157). --- Eliminated hypothesis H186 (redundant, given H192). --- Eliminated hypothesis H188 (redundant, given H221). --- Eliminated hypothesis H189 (redundant, given H33). -S- Eliminated hypothesis H6. This was achieved by replacing all occurrences of blocks_required by: (byte_count + 63) div 64. New H5: blocks_done < (byte_count + 63) div 64 -S- Substituted hypothesis H81. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__4)) by: fld_hash_bit_len(fld_h(local_ctx__3)). -S- Substituted hypothesis H100. This was achieved by replacing all occurrences of fld_x(local_ctx__4) by: fld_x(local_ctx__3). -S- Substituted hypothesis H101. This was achieved by replacing all occurrences of fld_b(local_ctx__4) by: fld_b(local_ctx__3). -S- Substituted hypothesis H162. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__5)) by: fld_byte_count(fld_h(local_ctx__4)). -S- Substituted hypothesis H159. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__5)) by: fld_hash_bit_len(fld_h(local_ctx__3)). *** Proved C2: blocks_done <= 4194303 using hypotheses H3 & H185. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_30. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H34). --- Hypothesis H38 has been replaced by "true". (It is already present, as H35). %%% Simplified H40 on reading formula in, to give: %%% H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H41 on reading formula in, to give: %%% H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H59 has been replaced by "true". (It is already present, as H40). --- Hypothesis H60 has been replaced by "true". (It is already present, as H41). --- Hypothesis H61 has been replaced by "true". (It is already present, as H42). --- Hypothesis H62 has been replaced by "true". (It is already present, as H43). --- Hypothesis H63 has been replaced by "true". (It is already present, as H44). --- Hypothesis H64 has been replaced by "true". (It is already present, as H45). --- Hypothesis H67 has been replaced by "true". (It is already present, as H48). --- Hypothesis H68 has been replaced by "true". (It is already present, as H49). --- Hypothesis H70 has been replaced by "true". (It is already present, as H51). --- Hypothesis H71 has been replaced by "true". (It is already present, as H52). --- Hypothesis H72 has been replaced by "true". (It is already present, as H53). --- Hypothesis H73 has been replaced by "true". (It is already present, as H54). --- Hypothesis H74 has been replaced by "true". (It is already present, as H55). --- Hypothesis H75 has been replaced by "true". (It is already present, as H56). --- Hypothesis H76 has been replaced by "true". (It is already present, as H57). --- Hypothesis H77 has been replaced by "true". (It is already present, as H58). %%% Simplified H80 on reading formula in, to give: %%% H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)) %%% Simplified H103 on reading formula in, to give: %%% H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H104 on reading formula in, to give: %%% H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H105 has been replaced by "true". (It is already present, as H83). --- Hypothesis H106 has been replaced by "true". (It is already present, as H84). --- Hypothesis H107 has been replaced by "true". (It is already present, as H85). --- Hypothesis H108 has been replaced by "true". (It is already present, as H86). --- Hypothesis H111 has been replaced by "true". (It is already present, as H89). --- Hypothesis H112 has been replaced by "true". (It is already present, as H90). --- Hypothesis H114 has been replaced by "true". (It is already present, as H92). --- Hypothesis H115 has been replaced by "true". (It is already present, as H93). --- Hypothesis H116 has been replaced by "true". (It is already present, as H94). --- Hypothesis H117 has been replaced by "true". (It is already present, as H95). --- Hypothesis H118 has been replaced by "true". (It is already present, as H96). --- Hypothesis H119 has been replaced by "true". (It is already present, as H97). --- Hypothesis H120 has been replaced by "true". (It is already present, as H98). --- Hypothesis H121 has been replaced by "true". (It is already present, as H99). --- Hypothesis H122 has been replaced by "true". (It is already present, as H103). --- Hypothesis H123 has been replaced by "true". (It is already present, as H104). --- Hypothesis H124 has been replaced by "true". (It is already present, as H83). --- Hypothesis H125 has been replaced by "true". (It is already present, as H84). --- Hypothesis H126 has been replaced by "true". (It is already present, as H85). --- Hypothesis H127 has been replaced by "true". (It is already present, as H86). --- Hypothesis H130 has been replaced by "true". (It is already present, as H89). --- Hypothesis H131 has been replaced by "true". (It is already present, as H90). --- Hypothesis H133 has been replaced by "true". (It is already present, as H92). --- Hypothesis H134 has been replaced by "true". (It is already present, as H93). --- Hypothesis H135 has been replaced by "true". (It is already present, as H94). --- Hypothesis H136 has been replaced by "true". (It is already present, as H95). --- Hypothesis H137 has been replaced by "true". (It is already present, as H96). --- Hypothesis H138 has been replaced by "true". (It is already present, as H97). --- Hypothesis H139 has been replaced by "true". (It is already present, as H98). --- Hypothesis H140 has been replaced by "true". (It is already present, as H99). %%% Simplified H141 on reading formula in, to give: %%% H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last) %%% Simplified H153 on reading formula in, to give: %%% H153: 63 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: 63 <= skein_512_block_bytes_index__last %%% Simplified H156 on reading formula in, to give: %%% H156: 63 <= natural__last %%% Simplified H163 on reading formula in, to give: %%% H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H164 on reading formula in, to give: %%% H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H182 has been replaced by "true". (It is already present, as H32). --- Hypothesis H183 has been replaced by "true". (It is already present, as H33). --- Hypothesis H190 has been replaced by "true". (It is already present, as H186). --- Hypothesis H191 has been replaced by "true". (It is already present, as H187). %%% Simplified H193 on reading formula in, to give: %%% H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result, [i___1]) and element( result, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H194 has been replaced by "true". (It is already present, as H32). --- Hypothesis H195 has been replaced by "true". (It is already present, as H33). --- Hypothesis H196 has been replaced by "true". (It is already present, as H163). --- Hypothesis H197 has been replaced by "true". (It is already present, as H164). --- Hypothesis H198 has been replaced by "true". (It is already present, as H165). --- Hypothesis H199 has been replaced by "true". (It is already present, as H166). --- Hypothesis H200 has been replaced by "true". (It is already present, as H167). --- Hypothesis H201 has been replaced by "true". (It is already present, as H168). --- Hypothesis H204 has been replaced by "true". (It is already present, as H171). --- Hypothesis H205 has been replaced by "true". (It is already present, as H172). --- Hypothesis H207 has been replaced by "true". (It is already present, as H174). --- Hypothesis H208 has been replaced by "true". (It is already present, as H175). --- Hypothesis H209 has been replaced by "true". (It is already present, as H176). --- Hypothesis H210 has been replaced by "true". (It is already present, as H177). --- Hypothesis H211 has been replaced by "true". (It is already present, as H178). --- Hypothesis H212 has been replaced by "true". (It is already present, as H179). --- Hypothesis H213 has been replaced by "true". (It is already present, as H180). --- Hypothesis H214 has been replaced by "true". (It is already present, as H181). --- Hypothesis H215 has been replaced by "true". (It is already present, as H186). --- Hypothesis H216 has been replaced by "true". (It is already present, as H187). --- Hypothesis H217 has been replaced by "true". (It is already present, as H186). --- Hypothesis H218 has been replaced by "true". (It is already present, as H187). --- Hypothesis H221 has been replaced by "true". (It is already present, as H30). --- Hypothesis H225 has been replaced by "true". (It is already present, as H188). --- Hypothesis H226 has been replaced by "true". (It is already present, as H189). %%% Simplified H227 on reading formula in, to give: %%% H227: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> spark__unsigned__byte__first <= element(result__6, [i___1]) and element(result__6, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H228 on reading formula in, to give: %%% H228: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [ i___1]) <= spark__unsigned__u64__last) --- Hypothesis H229 has been replaced by "true". (It is already present, as H32). --- Hypothesis H230 has been replaced by "true". (It is already present, as H33). *** Proved C1: blocks_done + 1 >= output_block_count_t__first using hypothesis H32. -S- Applied substitution rule skein_512_fi_rules(131). This was achieved by replacing all occurrences of output_block_count_t__last by: 4194304. New H33: blocks_done <= 4194304 New C2: blocks_done <= 4194303 >>> Restructured hypothesis H192 into: >>> H192: byte_count - blocks_done * skein_512_block_bytes_c < skein_512_block_bytes_c -S- Applied substitution rule skein_512_fi_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H3: blocks_done * 64 < byte_count New H4: blocks_done * 64 < result__index__subtype__1__last + 1 New H186: byte_count - blocks_done * 64 >= natural__first New H187: byte_count - blocks_done * 64 <= natural__last New H188: blocks_done * 64 >= integer__base__first New H189: blocks_done * 64 <= integer__base__last New H192: byte_count - blocks_done * 64 < 64 New H219: blocks_done * 64 >= natural__first New H220: blocks_done * 64 <= natural__last New H223: result__index__subtype__1__last >= blocks_done * 64 + ( byte_count - blocks_done * 64 - 1) New H224: byte_count - blocks_done * 64 <= ( skein_512_state_words_index__last + 1) * 8 -S- Applied substitution rule skein_512_fi_rules(2). This was achieved by replacing all occurrences of skein_block_type_out by: 63. New H78: 63 >= spark__unsigned__u6__first New H79: 63 <= spark__unsigned__u6__last New H80: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h( local_ctx__3), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_fi_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H188: blocks_done * 64 >= - 2147483648 -S- Applied substitution rule skein_512_fi_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H189: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 New H42: fld_byte_count(fld_h(local_ctx__3)) >= 0 New H83: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H142: true New H146: true New H165: fld_byte_count(fld_h(local_ctx__5)) >= 0 New H186: byte_count - blocks_done * 64 >= 0 New H219: blocks_done * 64 >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 New H43: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 New H84: fld_byte_count(fld_h(local_ctx__4)) <= 2147483647 New H143: true New H147: true New H155: skein_512_block_bytes_index__last <= 2147483647 New H156: true New H166: fld_byte_count(fld_h(local_ctx__5)) <= 2147483647 New H187: byte_count - blocks_done * 64 <= 2147483647 New H220: blocks_done * 64 <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H78: true -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 New H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 New H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 New H79: true -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 New H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 New H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= spark__unsigned__byte__last) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= spark__unsigned__byte__last) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= spark__unsigned__byte__last) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= spark__unsigned__byte__last) New H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= spark__unsigned__byte__last) New H227: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result__6, [i___1]) and element(result__6, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) New H103: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4), [i___2] ) <= 255) New H141: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4), [i___1] ) <= 255) New H163: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) New H193: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result, [i___1]) and element(result, [i___1]) <= 255) New H227: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element( result__6, [i___1]) and element(result__6, [i___1]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 New H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 New H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 New H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 New H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H34: blocks_done >= 0 New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= spark__unsigned__u64__last) New H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 New H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= spark__unsigned__u64__last) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= spark__unsigned__u64__last) New H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 New H228: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H35: blocks_done <= 18446744073709551615 New H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 New H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 New H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1] ) <= 18446744073709551615) New H104: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4), [i___1] ) <= 18446744073709551615) New H164: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1] ) <= 18446744073709551615) New H228: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 New H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= 0 New H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= 0 New H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 New H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H26: fld_hash_bit_len(fld_h(ctx)) >= 1 New H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= 1 New H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H27: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= 2147483640 New H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H222: true New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__3) , [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__4) , [i___1]) and element(fld_x(local_ctx__4), [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx__5) , [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) New H228: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H224: byte_count - blocks_done * 64 <= 64 New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__3), [i___1]) and element(fld_x(local_ctx__3) , [i___1]) <= 18446744073709551615) New H104: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__4), [i___1]) and element(fld_x(local_ctx__4) , [i___1]) <= 18446744073709551615) New H164: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx__5), [i___1]) and element(fld_x(local_ctx__5) , [i___1]) <= 18446744073709551615) New H228: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New H150: fld_byte_count(fld_h(local_ctx__4)) >= 0 New H160: fld_byte_count(fld_h(local_ctx__5)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New H151: fld_byte_count(fld_h(local_ctx__4)) <= 64 New H161: fld_byte_count(fld_h(local_ctx__5)) <= 64 -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H152: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__3) , [i___2]) and element(fld_b(local_ctx__3), [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___2]) and element(fld_b(local_ctx__4), [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__4) , [i___1]) and element(fld_b(local_ctx__4), [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx__5) , [i___2]) and element(fld_b(local_ctx__5), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H153: true New H154: true New H155: true New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3) , [i___2]) <= 255) New H103: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___2]) and element(fld_b(local_ctx__4) , [i___2]) <= 255) New H141: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(local_ctx__4), [i___1]) and element(fld_b(local_ctx__4) , [i___1]) <= 255) New H163: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5) , [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H144: true -S- Applied substitution rule skein_512_fi_rules(109). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H145: true -S- Applied substitution rule skein_512_fi_rules(125). This was achieved by replacing all occurrences of output_byte_count_t__first by: 1. New H184: byte_count >= 1 -S- Applied substitution rule skein_512_fi_rules(126). This was achieved by replacing all occurrences of output_byte_count_t__last by: 268435455. New H185: byte_count <= 268435455 -S- Applied substitution rule skein_512_fi_rules(130). This was achieved by replacing all occurrences of output_block_count_t__first by: 0. New H32: blocks_done >= 0 --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H46 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H63 (true-hypothesis). --- Eliminated hypothesis H64 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H70 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H75 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H113 (true-hypothesis). --- Eliminated hypothesis H114 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H117 (true-hypothesis). --- Eliminated hypothesis H118 (true-hypothesis). --- Eliminated hypothesis H119 (true-hypothesis). --- Eliminated hypothesis H120 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H123 (true-hypothesis). --- Eliminated hypothesis H124 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H126 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H129 (true-hypothesis). --- Eliminated hypothesis H130 (true-hypothesis). --- Eliminated hypothesis H131 (true-hypothesis). --- Eliminated hypothesis H132 (true-hypothesis). --- Eliminated hypothesis H133 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H138 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H169 (true-hypothesis). --- Eliminated hypothesis H170 (true-hypothesis). --- Eliminated hypothesis H173 (true-hypothesis). --- Eliminated hypothesis H182 (true-hypothesis). --- Eliminated hypothesis H183 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H194 (true-hypothesis). --- Eliminated hypothesis H195 (true-hypothesis). --- Eliminated hypothesis H196 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H199 (true-hypothesis). --- Eliminated hypothesis H200 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H204 (true-hypothesis). --- Eliminated hypothesis H205 (true-hypothesis). --- Eliminated hypothesis H206 (true-hypothesis). --- Eliminated hypothesis H207 (true-hypothesis). --- Eliminated hypothesis H208 (true-hypothesis). --- Eliminated hypothesis H209 (true-hypothesis). --- Eliminated hypothesis H210 (true-hypothesis). --- Eliminated hypothesis H211 (true-hypothesis). --- Eliminated hypothesis H212 (true-hypothesis). --- Eliminated hypothesis H213 (true-hypothesis). --- Eliminated hypothesis H214 (true-hypothesis). --- Eliminated hypothesis H215 (true-hypothesis). --- Eliminated hypothesis H216 (true-hypothesis). --- Eliminated hypothesis H217 (true-hypothesis). --- Eliminated hypothesis H218 (true-hypothesis). --- Eliminated hypothesis H221 (true-hypothesis). --- Eliminated hypothesis H225 (true-hypothesis). --- Eliminated hypothesis H226 (true-hypothesis). --- Eliminated hypothesis H229 (true-hypothesis). --- Eliminated hypothesis H230 (true-hypothesis). --- Eliminated hypothesis H142 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H156 (true-hypothesis). --- Eliminated hypothesis H78 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H222 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H144 (true-hypothesis). --- Eliminated hypothesis H145 (true-hypothesis). --- Eliminated hypothesis H220 (duplicate of H189). --- Eliminated hypothesis H28 (duplicate of H9). --- Eliminated hypothesis H150 (duplicate of H83). --- Eliminated hypothesis H27 (duplicate of H12). --- Eliminated hypothesis H149 (duplicate of H86). --- Eliminated hypothesis H168 (duplicate of H158). --- Eliminated hypothesis H165 (duplicate of H160). --- Eliminated hypothesis H34 (duplicate of H32). --- Eliminated hypothesis H223 (duplicate of H2). --- Eliminated hypothesis H10 (redundant, given H29). --- Eliminated hypothesis H11 (redundant, given H26). --- Eliminated hypothesis H32 (redundant, given H219). --- Eliminated hypothesis H33 (redundant, given H3 & H185). --- Eliminated hypothesis H35 (redundant, given H189). --- Eliminated hypothesis H44 (redundant, given H39). --- Eliminated hypothesis H83 (redundant, given H82). --- Eliminated hypothesis H84 (redundant, given H82). --- Eliminated hypothesis H85 (redundant, given H148). --- Eliminated hypothesis H151 (redundant, given H82). --- Eliminated hypothesis H166 (redundant, given H161). --- Eliminated hypothesis H167 (redundant, given H157). --- Eliminated hypothesis H186 (redundant, given H3). --- Eliminated hypothesis H187 (redundant, given H192). --- Eliminated hypothesis H188 (redundant, given H219). --- Eliminated hypothesis H189 (redundant, given H3 & H185). --- Eliminated hypothesis H224 (redundant, given H192). -S- Eliminated hypothesis H6. This was achieved by replacing all occurrences of blocks_required by: (byte_count + 63) div 64. New H5: blocks_done < (byte_count + 63) div 64 -S- Substituted hypothesis H81. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__4)) by: fld_hash_bit_len(fld_h(local_ctx__3)). -S- Substituted hypothesis H100. This was achieved by replacing all occurrences of fld_x(local_ctx__4) by: fld_x(local_ctx__3). -S- Substituted hypothesis H101. This was achieved by replacing all occurrences of fld_b(local_ctx__4) by: fld_b(local_ctx__3). -S- Substituted hypothesis H162. This was achieved by replacing all occurrences of fld_byte_count(fld_h( local_ctx__5)) by: fld_byte_count(fld_h(local_ctx__4)). -S- Substituted hypothesis H159. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( local_ctx__5)) by: fld_hash_bit_len(fld_h(local_ctx__3)). *** Proved C2: blocks_done <= 4194303 using hypotheses H3 & H185. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_31. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_32. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_final_33. @@@@@@@@@@ %%% Simplified H7 on reading formula in, to give: %%% H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) *** Proved C5: result__index__subtype__1__first = 0 using hypothesis H5. -S- Applied substitution rule skein_512_fi_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: hash_bit_len_of(ctx) >= 1 New C1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_fi_rules(89). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: hash_bit_len_of(ctx) <= 2147483640 New C2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: byte_count_of(ctx) >= 0 New C3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(99). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: byte_count_of(ctx) <= 64 New C4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_fi_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H9: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_fi_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_fi_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_fi_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_fi_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H7: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_fi_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_fi_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_fi_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H8: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(83). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H11: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_fi_rules(84). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H12: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_fi_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(94). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_fi_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_fi_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H7: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 using hypothesis H12. *** Proved C3: fld_byte_count(fld_h(ctx)) >= 0 using hypothesis H9. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). +++ New H26: integer__size >= 0 +++ New H27: natural__size >= 0 +++ New H28: spark__unsigned__u6__size >= 0 +++ New H29: spark__unsigned__u7__size >= 0 +++ New H30: spark__unsigned__byte__size >= 0 +++ New H31: spark__unsigned__u16__size >= 0 +++ New H32: spark__unsigned__u32__size >= 0 +++ New H33: spark__unsigned__u64__size >= 0 +++ New H34: spark__crypto__word_count_t__size >= 0 +++ New H35: hash_bit_length__size >= 0 +++ New H36: initialized_hash_bit_length__size >= 0 +++ New H37: skein_512_state_words_index__size >= 0 +++ New H38: skein_512_block_bytes_count__size >= 0 +++ New H39: skein_512_block_bytes_index__size >= 0 +++ New H40: positive_block_512_count_t__size >= 0 +++ New H41: skein_512_context__size >= 0 +++ New H42: result__index__subtype__1__first <= result__index__subtype__1__last +++ New H43: context_header__size >= 0 +++ New H44: output_byte_count_t__size >= 0 +++ New H45: output_block_count_t__size >= 0 +++ New H46: positive_output_block_count_t__size >= 0 +++ New H47: result__index__subtype__1__first >= 0 +++ New H48: result__index__subtype__1__last >= 0 +++ New H49: result__index__subtype__1__last <= 2147483647 +++ New H50: result__index__subtype__1__first <= 2147483647 *** Proved C6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C6: (hash_bit_len_of(ctx) + 7) div 8 <= result__index__subtype__1__last + 1 This rule could be applied because its side-conditions hold, as follows: <<< From H6, proved: (hash_bit_len_of(ctx) + 7) div 8 <= result__index__subtype__1__last + 1 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C1: fld_hash_bit_len(fld_h(ctx)) >= 1 This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C1: hash_bit_len_of(ctx) >= 1 This rule could be applied because its side-conditions hold, as follows: <<< From H1, proved: hash_bit_len_of(ctx) >= 1 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C4: fld_byte_count(fld_h(ctx)) <= 64 This was achieved by applying the rewrite rule skein_rules(1) [from rulefile skein.rlu] to rewrite this conclusion to: C4: byte_count_of(ctx) <= 64 This rule could be applied because its side-conditions hold, as follows: <<< From H4, proved: byte_count_of(ctx) <= 64 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** PROVED VC. VCN 33: Summary of user rule application. FIL skein.rlu RUL skein_rules(2) CON 6, 1 RUL skein_rules(1) CON 4 @@@@@@@@@@ VC: procedure_skein_512_final_34. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. OVR Overall summary of VCs using user rules. FIL skein.rlu RUL skein_rules(2) VCS 33 RUL skein_rules(1) VCS 33 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/0000755000175000017500000000000011753202331027545 5ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.log0000644000175000017500000000202611712513676031636 0ustar eugeneugenSPARK Simplifier Pro Edition Reading round_4.fdl (for inherited FDL type declarations) Processing round_4.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Automatic simplification completed. Simplified output sent to round_4.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.rls0000644000175000017500000000622211712513676031661 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Round_6*/ rule_family round_6_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. round_6_rules(1): r_512_5_0 may_be_replaced_by 13. round_6_rules(2): r_512_5_1 may_be_replaced_by 50. round_6_rules(3): r_512_5_2 may_be_replaced_by 10. round_6_rules(4): r_512_5_3 may_be_replaced_by 17. round_6_rules(5): integer__size >= 0 may_be_deduced. round_6_rules(6): integer__first may_be_replaced_by -2147483648. round_6_rules(7): integer__last may_be_replaced_by 2147483647. round_6_rules(8): integer__base__first may_be_replaced_by -2147483648. round_6_rules(9): integer__base__last may_be_replaced_by 2147483647. round_6_rules(10): interfaces__unsigned_64__size >= 0 may_be_deduced. round_6_rules(11): interfaces__unsigned_64__size may_be_replaced_by 64. round_6_rules(12): interfaces__unsigned_64__first may_be_replaced_by 0. round_6_rules(13): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. round_6_rules(14): interfaces__unsigned_64__base__first may_be_replaced_by 0. round_6_rules(15): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. round_6_rules(16): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. round_6_rules(17): spark__unsigned__u64__size >= 0 may_be_deduced. round_6_rules(18): spark__unsigned__u64__first may_be_replaced_by 0. round_6_rules(19): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. round_6_rules(20): spark__unsigned__u64__base__first may_be_replaced_by 0. round_6_rules(21): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. round_6_rules(22): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. round_6_rules(23): spark__unsigned__shift_count__size >= 0 may_be_deduced. round_6_rules(24): spark__unsigned__shift_count__first may_be_replaced_by 0. round_6_rules(25): spark__unsigned__shift_count__last may_be_replaced_by 64. round_6_rules(26): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. round_6_rules(27): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. round_6_rules(28): spark__crypto__i8__size >= 0 may_be_deduced. round_6_rules(29): spark__crypto__i8__first may_be_replaced_by 0. round_6_rules(30): spark__crypto__i8__last may_be_replaced_by 7. round_6_rules(31): spark__crypto__i8__base__first may_be_replaced_by -2147483648. round_6_rules(32): spark__crypto__i8__base__last may_be_replaced_by 2147483647. round_6_rules(33): spark__crypto__word_count_t__size >= 0 may_be_deduced. round_6_rules(34): spark__crypto__word_count_t__first may_be_replaced_by 0. round_6_rules(35): spark__crypto__word_count_t__last may_be_replaced_by 268435455. round_6_rules(36): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. round_6_rules(37): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.vlg0000644000175000017500000000324511712765060031647 0ustar eugeneugen Non-option args: round_6 Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=round_6 \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.slg0000644000175000017500000025145211712513676031654 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_5 @@@@@@@@@@ VC: procedure_round_5_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= 0 New C2: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [1]) <= interfaces__unsigned_64__last *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_5_rules(1). This was achieved by replacing all occurrences of r_512_4_0 by: 39. New C1: 39 >= spark__unsigned__shift_count__first New C2: 39 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_5_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_5_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_5_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [1]) >= 0 -S- Applied substitution rule round_5_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [1]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [1]) >= 0 using hypothesis H2. *** Proved C4: element(x, [1]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) <= spark__unsigned__u64__last -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(1). This was achieved by replacing all occurrences of r_512_4_0 by: 39. New H11: 39 >= spark__unsigned__shift_count__first New H12: 39 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [1]), 39) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [1]), 39) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [1]), 39) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [1]), 39) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [1]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [1]), 39) >= 0 -S- Applied substitution rule round_5_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [1]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [1]), 39) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= 0 New H4: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_5_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_5_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [1]), 39) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [1]), 39) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 0 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 0 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_5. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_4_0), (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= 0 New C2: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_7. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [3]) <= interfaces__unsigned_64__last *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_5_rules(2). This was achieved by replacing all occurrences of r_512_4_1 by: 30. New C1: 30 >= spark__unsigned__shift_count__first New C2: 30 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_5_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_5_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_5_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [3]) >= 0 -S- Applied substitution rule round_5_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [3]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [3]) >= 0 using hypothesis H2. *** Proved C4: element(x, [3]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) <= spark__unsigned__u64__last -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(2). This was achieved by replacing all occurrences of r_512_4_1 by: 30. New H11: 30 >= spark__unsigned__shift_count__first New H12: 30 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [3]), 30) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [3]), 30) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [3]), 30) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [3]), 30) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [3]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [3]), 30) >= 0 -S- Applied substitution rule round_5_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [3]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [3]), 30) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= 0 New H4: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_5_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_5_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [3]), 30) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [3]), 30) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 2 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 2 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_4_1), (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= 0 New C2: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_12. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [5]) <= interfaces__unsigned_64__last *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_5_rules(3). This was achieved by replacing all occurrences of r_512_4_2 by: 34. New C1: 34 >= spark__unsigned__shift_count__first New C2: 34 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_5_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_5_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_5_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [5]) >= 0 -S- Applied substitution rule round_5_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [5]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [5]) >= 0 using hypothesis H2. *** Proved C4: element(x, [5]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) <= spark__unsigned__u64__last -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(3). This was achieved by replacing all occurrences of r_512_4_2 by: 34. New H11: 34 >= spark__unsigned__shift_count__first New H12: 34 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [5]), 34) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [5]), 34) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [5]), 34) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [5]), 34) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [5]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [5]), 34) >= 0 -S- Applied substitution rule round_5_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [5]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [5]), 34) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= 0 New H4: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_5_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_5_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [5]), 34) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [5]), 34) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_14. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 4 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 4 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_15. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_4_2), (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_16. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= 0 New C2: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_17. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [7]) <= interfaces__unsigned_64__last *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_5_rules(4). This was achieved by replacing all occurrences of r_512_4_3 by: 24. New C1: 24 >= spark__unsigned__shift_count__first New C2: 24 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_5_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_5_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_5_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [7]) >= 0 -S- Applied substitution rule round_5_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [7]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [7]) >= 0 using hypothesis H2. *** Proved C4: element(x, [7]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_18. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) <= spark__unsigned__u64__last -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(4). This was achieved by replacing all occurrences of r_512_4_3 by: 24. New H11: 24 >= spark__unsigned__shift_count__first New H12: 24 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [7]), 24) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [7]), 24) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [7]), 24) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [7]), 24) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [7]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [7]), 24) >= 0 -S- Applied substitution rule round_5_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [7]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [7]), 24) <= 18446744073709551615 -S- Applied substitution rule round_5_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= 0 New H4: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_5_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_5_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_5_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_5_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [7]), 24) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [7]), 24) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_19. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 6 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 6 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_5_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_5_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_4_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_5_20. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.siv0000644000175000017500000000710411712513676031664 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_8 For path(s) from start to run-time check associated with statement of line 486: procedure_round_8_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 487: procedure_round_8_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 487: procedure_round_8_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 488: procedure_round_8_4. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 491: procedure_round_8_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 491 to run-time check associated with statement of line 493: procedure_round_8_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 491 to run-time check associated with statement of line 494: procedure_round_8_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 491 to run-time check associated with statement of line 494: procedure_round_8_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 491 to run-time check associated with statement of line 495: procedure_round_8_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 491 to assertion of line 497: procedure_round_8_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 497 to run-time check associated with statement of line 499: procedure_round_8_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 497 to run-time check associated with statement of line 500: procedure_round_8_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 497 to run-time check associated with statement of line 500: procedure_round_8_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 497 to run-time check associated with statement of line 501: procedure_round_8_14. *** true . /* all conclusions proved */ For path(s) from assertion of line 497 to assertion of line 503: procedure_round_8_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 503 to run-time check associated with statement of line 505: procedure_round_8_16. *** true . /* all conclusions proved */ For path(s) from assertion of line 503 to run-time check associated with statement of line 506: procedure_round_8_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 503 to run-time check associated with statement of line 506: procedure_round_8_18. *** true . /* all conclusions proved */ For path(s) from assertion of line 503 to run-time check associated with statement of line 507: procedure_round_8_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 503 to finish: procedure_round_8_20. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.slg0000644000175000017500000025145211712513676031652 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_3 @@@@@@@@@@ VC: procedure_round_3_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= 0 New C2: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [1]) <= interfaces__unsigned_64__last *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_3_rules(1). This was achieved by replacing all occurrences of r_512_2_0 by: 17. New C1: 17 >= spark__unsigned__shift_count__first New C2: 17 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_3_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_3_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_3_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [1]) >= 0 -S- Applied substitution rule round_3_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [1]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [1]) >= 0 using hypothesis H2. *** Proved C4: element(x, [1]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) <= spark__unsigned__u64__last -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(1). This was achieved by replacing all occurrences of r_512_2_0 by: 17. New H11: 17 >= spark__unsigned__shift_count__first New H12: 17 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [1]), 17) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [1]), 17) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [1]), 17) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [1]), 17) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [1]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [1]), 17) >= 0 -S- Applied substitution rule round_3_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [1]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [1]), 17) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= 0 New H4: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_3_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_3_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [1]), 17) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [1]), 17) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 4 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 4 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_5. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_2_0), (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= 0 New C2: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_7. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [3]) <= interfaces__unsigned_64__last *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_3_rules(2). This was achieved by replacing all occurrences of r_512_2_1 by: 49. New C1: 49 >= spark__unsigned__shift_count__first New C2: 49 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_3_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_3_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_3_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [3]) >= 0 -S- Applied substitution rule round_3_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [3]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [3]) >= 0 using hypothesis H2. *** Proved C4: element(x, [3]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) <= spark__unsigned__u64__last -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(2). This was achieved by replacing all occurrences of r_512_2_1 by: 49. New H11: 49 >= spark__unsigned__shift_count__first New H12: 49 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [3]), 49) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [3]), 49) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [3]), 49) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [3]), 49) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [3]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [3]), 49) >= 0 -S- Applied substitution rule round_3_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [3]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [3]), 49) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= 0 New H4: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_3_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_3_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [3]), 49) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [3]), 49) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 6 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 6 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_2_1), (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= 0 New C2: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_12. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [5]) <= interfaces__unsigned_64__last *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_3_rules(3). This was achieved by replacing all occurrences of r_512_2_2 by: 36. New C1: 36 >= spark__unsigned__shift_count__first New C2: 36 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_3_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_3_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_3_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [5]) >= 0 -S- Applied substitution rule round_3_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [5]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [5]) >= 0 using hypothesis H2. *** Proved C4: element(x, [5]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) <= spark__unsigned__u64__last -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(3). This was achieved by replacing all occurrences of r_512_2_2 by: 36. New H11: 36 >= spark__unsigned__shift_count__first New H12: 36 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [5]), 36) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [5]), 36) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [5]), 36) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [5]), 36) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [5]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [5]), 36) >= 0 -S- Applied substitution rule round_3_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [5]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [5]), 36) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= 0 New H4: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_3_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_3_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [5]), 36) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [5]), 36) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_14. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 0 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 0 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_15. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_2_2), (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_16. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= 0 New C2: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_17. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [7]) <= interfaces__unsigned_64__last *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_3_rules(4). This was achieved by replacing all occurrences of r_512_2_3 by: 39. New C1: 39 >= spark__unsigned__shift_count__first New C2: 39 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_3_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_3_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_3_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [7]) >= 0 -S- Applied substitution rule round_3_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [7]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [7]) >= 0 using hypothesis H2. *** Proved C4: element(x, [7]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_18. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) <= spark__unsigned__u64__last -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(4). This was achieved by replacing all occurrences of r_512_2_3 by: 39. New H11: 39 >= spark__unsigned__shift_count__first New H12: 39 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [7]), 39) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [7]), 39) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [7]), 39) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [7]), 39) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [7]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [7]), 39) >= 0 -S- Applied substitution rule round_3_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [7]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [7]), 39) <= 18446744073709551615 -S- Applied substitution rule round_3_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= 0 New H4: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_3_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_3_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_3_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_3_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [7]), 39) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [7]), 39) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_19. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 2 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 2 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_3_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_3_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_2_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_3_20. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.vsm0000644000175000017500000000005311712765060031653 0ustar eugeneugenround_3,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.vlg0000644000175000017500000000324511712765060031642 0ustar eugeneugen Non-option args: round_1 Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=round_1 \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.slg0000644000175000017500000025145211712513676031651 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_2 @@@@@@@@@@ VC: procedure_round_2_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= 0 New C2: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [1]) <= interfaces__unsigned_64__last *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_2_rules(1). This was achieved by replacing all occurrences of r_512_1_0 by: 33. New C1: 33 >= spark__unsigned__shift_count__first New C2: 33 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_2_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_2_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_2_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [1]) >= 0 -S- Applied substitution rule round_2_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [1]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [1]) >= 0 using hypothesis H2. *** Proved C4: element(x, [1]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) <= spark__unsigned__u64__last -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(1). This was achieved by replacing all occurrences of r_512_1_0 by: 33. New H11: 33 >= spark__unsigned__shift_count__first New H12: 33 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [1]), 33) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [1]), 33) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [1]), 33) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [1]), 33) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [1]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [1]), 33) >= 0 -S- Applied substitution rule round_2_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [1]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [1]), 33) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= 0 New H4: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_2_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_2_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [1]), 33) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [1]), 33) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 2 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 2 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_5. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_1_0), (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= 0 New C2: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_7. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [7]) <= interfaces__unsigned_64__last *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_2_rules(2). This was achieved by replacing all occurrences of r_512_1_1 by: 27. New C1: 27 >= spark__unsigned__shift_count__first New C2: 27 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_2_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_2_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_2_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [7]) >= 0 -S- Applied substitution rule round_2_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [7]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [7]) >= 0 using hypothesis H2. *** Proved C4: element(x, [7]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) <= spark__unsigned__u64__last -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(2). This was achieved by replacing all occurrences of r_512_1_1 by: 27. New H11: 27 >= spark__unsigned__shift_count__first New H12: 27 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [7]), 27) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [7]), 27) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [7]), 27) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [7]), 27) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [7]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [7]), 27) >= 0 -S- Applied substitution rule round_2_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [7]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [7]), 27) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= 0 New H4: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_2_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_2_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [7]), 27) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [7]), 27) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 4 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 4 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_1_1), (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= 0 New C2: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_12. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [5]) <= interfaces__unsigned_64__last *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_2_rules(3). This was achieved by replacing all occurrences of r_512_1_2 by: 14. New C1: 14 >= spark__unsigned__shift_count__first New C2: 14 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_2_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_2_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_2_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [5]) >= 0 -S- Applied substitution rule round_2_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [5]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [5]) >= 0 using hypothesis H2. *** Proved C4: element(x, [5]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) <= spark__unsigned__u64__last -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(3). This was achieved by replacing all occurrences of r_512_1_2 by: 14. New H11: 14 >= spark__unsigned__shift_count__first New H12: 14 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [5]), 14) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [5]), 14) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [5]), 14) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [5]), 14) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [5]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [5]), 14) >= 0 -S- Applied substitution rule round_2_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [5]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [5]), 14) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= 0 New H4: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_2_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_2_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [5]), 14) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [5]), 14) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_14. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 6 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 6 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_15. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_1_2), (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_16. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= 0 New C2: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_17. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [3]) <= interfaces__unsigned_64__last *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_2_rules(4). This was achieved by replacing all occurrences of r_512_1_3 by: 42. New C1: 42 >= spark__unsigned__shift_count__first New C2: 42 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_2_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_2_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_2_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [3]) >= 0 -S- Applied substitution rule round_2_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [3]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [3]) >= 0 using hypothesis H2. *** Proved C4: element(x, [3]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_18. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) <= spark__unsigned__u64__last -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(4). This was achieved by replacing all occurrences of r_512_1_3 by: 42. New H11: 42 >= spark__unsigned__shift_count__first New H12: 42 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [3]), 42) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [3]), 42) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [3]), 42) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [3]), 42) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [3]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [3]), 42) >= 0 -S- Applied substitution rule round_2_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [3]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [3]), 42) <= 18446744073709551615 -S- Applied substitution rule round_2_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= 0 New H4: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_2_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_2_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_2_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_2_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [3]), 42) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [3]), 42) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_19. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 0 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 0 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_2_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_2_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_1_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_2_20. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.fdl0000644000175000017500000000577111712513676031632 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Round_2} title procedure round_2; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const r_512_1_0 : integer = pending; const r_512_1_1 : integer = pending; const r_512_1_2 : integer = pending; const r_512_1_3 : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var x : spark__crypto__u64_seq; function spark__unsigned__rotate_left_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.vlgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000000326511712765060032466 0ustar eugeneugen Non-option args: threefish_block Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=threefish_block \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.vctspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000000000011712513676032463 0ustar eugeneugen././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.vcgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000002450511712513676032437 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Initialize_TS For path(s) from start to run-time check associated with statement of line 547: procedure_initialize_ts_1. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H21: for_all(i_: integer, ((i_ >= modifier_words_index__first) and ( i_ <= modifier_words_index__last)) -> ((element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) >= spark__unsigned__u64__first) and (element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last))) . -> C1: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= spark__unsigned__u64__first . C2: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= spark__unsigned__u64__last . C3: 0 >= modifier_words_index__first . C4: 0 <= modifier_words_index__last . For path(s) from start to run-time check associated with statement of line 548: procedure_initialize_ts_2. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H21: for_all(i_: integer, ((i_ >= modifier_words_index__first) and ( i_ <= modifier_words_index__last)) -> ((element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) >= spark__unsigned__u64__first) and (element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last))) . H22: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= spark__unsigned__u64__first . H23: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= spark__unsigned__u64__last . H24: 0 >= modifier_words_index__first . H25: 0 <= modifier_words_index__last . H26: for_all(i_: integer, ((i_ >= modifier_words_index__first) and ( i_ <= modifier_words_index__last)) -> ((element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) >= spark__unsigned__u64__first) and (element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last))) . -> C1: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) >= spark__unsigned__u64__first . C2: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) <= spark__unsigned__u64__last . C3: 1 >= modifier_words_index__first . C4: 1 <= modifier_words_index__last . For path(s) from start to run-time check associated with statement of line 550: procedure_initialize_ts_3. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H21: for_all(i_: integer, ((i_ >= modifier_words_index__first) and ( i_ <= modifier_words_index__last)) -> ((element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) >= spark__unsigned__u64__first) and (element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last))) . H22: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= spark__unsigned__u64__first . H23: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= spark__unsigned__u64__last . H24: 0 >= modifier_words_index__first . H25: 0 <= modifier_words_index__last . H26: for_all(i_: integer, ((i_ >= modifier_words_index__first) and ( i_ <= modifier_words_index__last)) -> ((element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) >= spark__unsigned__u64__first) and (element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last))) . H27: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) >= spark__unsigned__u64__first . H28: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) <= spark__unsigned__u64__last . H29: 1 >= modifier_words_index__first . H30: 1 <= modifier_words_index__last . -> C1: bit__xor(element(tweak_to_words(fld_tweak_words(fld_h( ctx))), [0]), element(tweak_to_words(fld_tweak_words(fld_h( ctx))), [1])) >= spark__unsigned__u64__first . C2: bit__xor(element(tweak_to_words(fld_tweak_words(fld_h( ctx))), [0]), element(tweak_to_words(fld_tweak_words(fld_h( ctx))), [1])) <= spark__unsigned__u64__last . C3: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) >= spark__unsigned__u64__first . C4: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) <= spark__unsigned__u64__last . C5: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= spark__unsigned__u64__first . C6: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= spark__unsigned__u64__last . For path(s) from start to finish: procedure_initialize_ts_4. *** true . /* trivially true VC removed by Examiner */ ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.logspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000000111211712513676032510 0ustar eugeneugenSPARK Simplifier Pro Edition Reading initialize_key_schedule.fdl (for inherited FDL type declarations) Processing initialize_key_schedule.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - 1 conclusion remains unproven Simplified VC: 8 - All conclusions proved Automatic simplification completed. Simplified output sent to initialize_key_schedule.siv. ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.fdlspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000002041611712513676032535 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Update_Context} title procedure update_context; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_index = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const skein_512_context__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ctx : skein_512_context; var x : spark__crypto__u64_seq; var w : spark__crypto__u64_seq; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.slg0000644000175000017500000025145211712513676031656 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_7 @@@@@@@@@@ VC: procedure_round_7_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= 0 New C2: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [1]) <= interfaces__unsigned_64__last *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_7_rules(1). This was achieved by replacing all occurrences of r_512_6_0 by: 25. New C1: 25 >= spark__unsigned__shift_count__first New C2: 25 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_7_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_7_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_7_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [1]) >= 0 -S- Applied substitution rule round_7_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [1]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [1]) >= 0 using hypothesis H2. *** Proved C4: element(x, [1]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) <= spark__unsigned__u64__last -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(1). This was achieved by replacing all occurrences of r_512_6_0 by: 25. New H11: 25 >= spark__unsigned__shift_count__first New H12: 25 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [1]), 25) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [1]), 25) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [1]), 25) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [1]), 25) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [1]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [1]), 25) >= 0 -S- Applied substitution rule round_7_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [1]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [1]), 25) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 >= 0 New H4: (element(x, [4]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_7_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_7_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [1]), 25) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [1]), 25) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 4 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 4 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_5. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_6_0), (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= 0 New C2: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_7. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [3]) <= interfaces__unsigned_64__last *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_7_rules(2). This was achieved by replacing all occurrences of r_512_6_1 by: 29. New C1: 29 >= spark__unsigned__shift_count__first New C2: 29 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_7_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_7_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_7_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [3]) >= 0 -S- Applied substitution rule round_7_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [3]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [3]) >= 0 using hypothesis H2. *** Proved C4: element(x, [3]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) <= spark__unsigned__u64__last -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(2). This was achieved by replacing all occurrences of r_512_6_1 by: 29. New H11: 29 >= spark__unsigned__shift_count__first New H12: 29 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [3]), 29) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [3]), 29) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [3]), 29) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [3]), 29) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [3]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [3]), 29) >= 0 -S- Applied substitution rule round_7_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [3]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [3]), 29) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 >= 0 New H4: (element(x, [6]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_7_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_7_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [3]), 29) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [3]), 29) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 6 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 6 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_6_1), (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= 0 New C2: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_12. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [5]) <= interfaces__unsigned_64__last *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_7_rules(3). This was achieved by replacing all occurrences of r_512_6_2 by: 39. New C1: 39 >= spark__unsigned__shift_count__first New C2: 39 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_7_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_7_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_7_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [5]) >= 0 -S- Applied substitution rule round_7_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [5]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [5]) >= 0 using hypothesis H2. *** Proved C4: element(x, [5]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) <= spark__unsigned__u64__last -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(3). This was achieved by replacing all occurrences of r_512_6_2 by: 39. New H11: 39 >= spark__unsigned__shift_count__first New H12: 39 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [5]), 39) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [5]), 39) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [5]), 39) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [5]), 39) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [5]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [5]), 39) >= 0 -S- Applied substitution rule round_7_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [5]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [5]), 39) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 >= 0 New H4: (element(x, [0]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_7_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_7_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [5]), 39) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [5]), 39) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_14. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 0 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 0 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_15. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_6_2), (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_16. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= 0 New C2: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_17. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [7]) <= interfaces__unsigned_64__last *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_7_rules(4). This was achieved by replacing all occurrences of r_512_6_3 by: 43. New C1: 43 >= spark__unsigned__shift_count__first New C2: 43 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_7_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_7_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_7_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [7]) >= 0 -S- Applied substitution rule round_7_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [7]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [7]) >= 0 using hypothesis H2. *** Proved C4: element(x, [7]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_18. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) <= spark__unsigned__u64__last -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(4). This was achieved by replacing all occurrences of r_512_6_3 by: 43. New H11: 43 >= spark__unsigned__shift_count__first New H12: 43 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [7]), 43) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [7]), 43) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [7]), 43) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [7]), 43) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [7]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [7]), 43) >= 0 -S- Applied substitution rule round_7_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [7]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [7]), 43) <= 18446744073709551615 -S- Applied substitution rule round_7_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 >= 0 New H4: (element(x, [2]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_7_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_7_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_7_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_7_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [7]), 43) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [7]), 43) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_19. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 2 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 2 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_7_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_7_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_6_3), (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_7_20. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.vcgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000004453311712513676032475 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Threefish_Block For path(s) from start to run-time check associated with statement of line 584: procedure_threefish_block_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus >= interfaces__unsigned_64__first . C2: skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus <= interfaces__unsigned_64__last . C3: 8 <> 0 . C4: 1 >= interfaces__unsigned_64__first . C5: 1 <= interfaces__unsigned_64__last . For path(s) from start to run-time check associated with statement of line 584: procedure_threefish_block_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H5: skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus >= interfaces__unsigned_64__first . H6: skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus <= interfaces__unsigned_64__last . H7: 8 <> 0 . H8: 1 >= interfaces__unsigned_64__first . H9: 1 <= interfaces__unsigned_64__last . -> C1: (1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus) -> (( skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first) and ( skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last)) . C2: (1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus) -> ((1 >= spark__unsigned__u64__first) and (1 <= spark__unsigned__u64__last)) . For path(s) from start to default assertion of line 584: procedure_threefish_block_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H5: skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus >= interfaces__unsigned_64__first . H6: skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus <= interfaces__unsigned_64__last . H7: 8 <> 0 . H8: 1 >= interfaces__unsigned_64__first . H9: 1 <= interfaces__unsigned_64__last . H10: (1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus) -> (( skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first) and ( skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last)) . H11: (1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus) -> ((1 >= spark__unsigned__u64__first) and (1 <= spark__unsigned__u64__last)) . H12: 1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus . -> C1: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . C3: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . C4: 1 >= spark__unsigned__u64__first . C5: 1 <= spark__unsigned__u64__last . C6: 1 >= 1 . C7: 1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus . For path(s) from default assertion of line 584 to default assertion of line 584: procedure_threefish_block_4. H1: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H4: loop__1__r >= spark__unsigned__u64__first . H5: loop__1__r <= spark__unsigned__u64__last . H6: loop__1__r >= 1 . H7: loop__1__r <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus . H8: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__1, [ i___1]) >= spark__unsigned__u64__first) and (element(x__1, [ i___1]) <= spark__unsigned__u64__last))) . H9: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__2, [ i___1]) >= spark__unsigned__u64__first) and (element(x__2, [ i___1]) <= spark__unsigned__u64__last))) . H10: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__3, [ i___1]) >= spark__unsigned__u64__first) and (element(x__3, [ i___1]) <= spark__unsigned__u64__last))) . H11: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__4, [ i___1]) >= spark__unsigned__u64__first) and (element(x__4, [ i___1]) <= spark__unsigned__u64__last))) . H12: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H13: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H14: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__5, [ i___1]) >= spark__unsigned__u64__first) and (element(x__5, [ i___1]) <= spark__unsigned__u64__last))) . H15: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__6, [ i___1]) >= spark__unsigned__u64__first) and (element(x__6, [ i___1]) <= spark__unsigned__u64__last))) . H16: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__7, [ i___1]) >= spark__unsigned__u64__first) and (element(x__7, [ i___1]) <= spark__unsigned__u64__last))) . H17: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__8, [ i___1]) >= spark__unsigned__u64__first) and (element(x__8, [ i___1]) <= spark__unsigned__u64__last))) . H18: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__9, [ i___1]) >= spark__unsigned__u64__first) and (element(x__9, [ i___1]) <= spark__unsigned__u64__last))) . H19: loop__1__r * 2 mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H20: loop__1__r * 2 mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H21: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__10, [ i___1]) >= spark__unsigned__u64__first) and (element(x__10, [ i___1]) <= spark__unsigned__u64__last))) . H22: not (loop__1__r = skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus) . -> C1: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . C3: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__10, [ i___1]) >= spark__unsigned__u64__first) and (element(x__10, [ i___1]) <= spark__unsigned__u64__last))) . C4: loop__1__r + 1 >= spark__unsigned__u64__first . C5: loop__1__r + 1 <= spark__unsigned__u64__last . C6: loop__1__r + 1 >= 1 . C7: loop__1__r + 1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus . For path(s) from default assertion of line 584 to run-time check associated with statement of line 589: procedure_threefish_block_5. H1: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H4: loop__1__r >= spark__unsigned__u64__first . H5: loop__1__r <= spark__unsigned__u64__last . H6: loop__1__r >= 1 . H7: loop__1__r <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus . H8: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__1, [ i___1]) >= spark__unsigned__u64__first) and (element(x__1, [ i___1]) <= spark__unsigned__u64__last))) . H9: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__2, [ i___1]) >= spark__unsigned__u64__first) and (element(x__2, [ i___1]) <= spark__unsigned__u64__last))) . H10: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__3, [ i___1]) >= spark__unsigned__u64__first) and (element(x__3, [ i___1]) <= spark__unsigned__u64__last))) . H11: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__4, [ i___1]) >= spark__unsigned__u64__first) and (element(x__4, [ i___1]) <= spark__unsigned__u64__last))) . -> C1: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . For path(s) from default assertion of line 584 to run-time check associated with statement of line 594: procedure_threefish_block_6. H1: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H4: loop__1__r >= spark__unsigned__u64__first . H5: loop__1__r <= spark__unsigned__u64__last . H6: loop__1__r >= 1 . H7: loop__1__r <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus . H8: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__1, [ i___1]) >= spark__unsigned__u64__first) and (element(x__1, [ i___1]) <= spark__unsigned__u64__last))) . H9: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__2, [ i___1]) >= spark__unsigned__u64__first) and (element(x__2, [ i___1]) <= spark__unsigned__u64__last))) . H10: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__3, [ i___1]) >= spark__unsigned__u64__first) and (element(x__3, [ i___1]) <= spark__unsigned__u64__last))) . H11: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__4, [ i___1]) >= spark__unsigned__u64__first) and (element(x__4, [ i___1]) <= spark__unsigned__u64__last))) . H12: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H13: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H14: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__5, [ i___1]) >= spark__unsigned__u64__first) and (element(x__5, [ i___1]) <= spark__unsigned__u64__last))) . H15: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__6, [ i___1]) >= spark__unsigned__u64__first) and (element(x__6, [ i___1]) <= spark__unsigned__u64__last))) . H16: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__7, [ i___1]) >= spark__unsigned__u64__first) and (element(x__7, [ i___1]) <= spark__unsigned__u64__last))) . H17: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__8, [ i___1]) >= spark__unsigned__u64__first) and (element(x__8, [ i___1]) <= spark__unsigned__u64__last))) . H18: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__9, [ i___1]) >= spark__unsigned__u64__first) and (element(x__9, [ i___1]) <= spark__unsigned__u64__last))) . -> C1: loop__1__r * 2 mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: loop__1__r * 2 mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . For path(s) from start to finish: procedure_threefish_block_7. *** true . /* trivially true VC removed by Examiner */ For path(s) from default assertion of line 584 to finish: procedure_threefish_block_8. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.vcg0000644000175000017500000015342411712513676031647 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Round_6 For path(s) from start to run-time check associated with statement of line 426: procedure_round_6_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i8__first . C4: 1 <= spark__crypto__i8__last . C5: 2 >= spark__crypto__i8__first . C6: 2 <= spark__crypto__i8__last . C7: 2 >= spark__crypto__i8__first . C8: 2 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 427: procedure_round_6_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . -> C1: r_512_5_0 >= spark__unsigned__shift_count__first . C2: r_512_5_0 <= spark__unsigned__shift_count__last . C3: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . C4: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 427: procedure_round_6_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_5_0 >= spark__unsigned__shift_count__first . H12: r_512_5_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 428: procedure_round_6_4. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_5_0 >= spark__unsigned__shift_count__first . H12: r_512_5_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [2])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [2])) <= spark__unsigned__u64__last . C3: 2 >= spark__crypto__i8__first . C4: 2 <= spark__crypto__i8__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to assertion of line 431: procedure_round_6_5. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_5_0 >= spark__unsigned__shift_count__first . H12: r_512_5_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [2])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [2])) <= spark__unsigned__u64__last . H25: 2 >= spark__crypto__i8__first . H26: 2 <= spark__crypto__i8__last . H27: 1 >= spark__crypto__i8__first . H28: 1 <= spark__crypto__i8__last . H29: 1 >= spark__crypto__i8__first . H30: 1 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [1], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [2]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [1], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_5_0)), [2]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 431 to run-time check associated with statement of line 433: procedure_round_6_6. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 4 >= spark__crypto__i8__first . C6: 4 <= spark__crypto__i8__last . C7: 4 >= spark__crypto__i8__first . C8: 4 <= spark__crypto__i8__last . For path(s) from assertion of line 431 to run-time check associated with statement of line 434: procedure_round_6_7. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . -> C1: r_512_5_1 >= spark__unsigned__shift_count__first . C2: r_512_5_1 <= spark__unsigned__shift_count__last . C3: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . C4: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 431 to run-time check associated with statement of line 434: procedure_round_6_8. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_5_1 >= spark__unsigned__shift_count__first . H12: r_512_5_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) <= spark__unsigned__u64__last . For path(s) from assertion of line 431 to run-time check associated with statement of line 435: procedure_round_6_9. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_5_1 >= spark__unsigned__shift_count__first . H12: r_512_5_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [4])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [4])) <= spark__unsigned__u64__last . C3: 4 >= spark__crypto__i8__first . C4: 4 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 431 to assertion of line 437: procedure_round_6_10. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_5_1 >= spark__unsigned__shift_count__first . H12: r_512_5_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [4])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [4])) <= spark__unsigned__u64__last . H25: 4 >= spark__crypto__i8__first . H26: 4 <= spark__crypto__i8__last . H27: 7 >= spark__crypto__i8__first . H28: 7 <= spark__crypto__i8__last . H29: 7 >= spark__crypto__i8__first . H30: 7 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [7], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [4]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [7], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_5_1)), [4]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 437 to run-time check associated with statement of line 439: procedure_round_6_11. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 5 >= spark__crypto__i8__first . C4: 5 <= spark__crypto__i8__last . C5: 6 >= spark__crypto__i8__first . C6: 6 <= spark__crypto__i8__last . C7: 6 >= spark__crypto__i8__first . C8: 6 <= spark__crypto__i8__last . For path(s) from assertion of line 437 to run-time check associated with statement of line 440: procedure_round_6_12. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . -> C1: r_512_5_2 >= spark__unsigned__shift_count__first . C2: r_512_5_2 <= spark__unsigned__shift_count__last . C3: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . C4: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 437 to run-time check associated with statement of line 440: procedure_round_6_13. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_5_2 >= spark__unsigned__shift_count__first . H12: r_512_5_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) <= spark__unsigned__u64__last . For path(s) from assertion of line 437 to run-time check associated with statement of line 441: procedure_round_6_14. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_5_2 >= spark__unsigned__shift_count__first . H12: r_512_5_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [6])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [6])) <= spark__unsigned__u64__last . C3: 6 >= spark__crypto__i8__first . C4: 6 <= spark__crypto__i8__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 437 to assertion of line 443: procedure_round_6_15. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_5_2 >= spark__unsigned__shift_count__first . H12: r_512_5_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [6])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [6])) <= spark__unsigned__u64__last . H25: 6 >= spark__crypto__i8__first . H26: 6 <= spark__crypto__i8__last . H27: 5 >= spark__crypto__i8__first . H28: 5 <= spark__crypto__i8__last . H29: 5 >= spark__crypto__i8__first . H30: 5 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [5], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [6]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [5], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_5_2)), [6]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 443 to run-time check associated with statement of line 445: procedure_round_6_16. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 3 >= spark__crypto__i8__first . C4: 3 <= spark__crypto__i8__last . C5: 0 >= spark__crypto__i8__first . C6: 0 <= spark__crypto__i8__last . C7: 0 >= spark__crypto__i8__first . C8: 0 <= spark__crypto__i8__last . For path(s) from assertion of line 443 to run-time check associated with statement of line 446: procedure_round_6_17. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . -> C1: r_512_5_3 >= spark__unsigned__shift_count__first . C2: r_512_5_3 <= spark__unsigned__shift_count__last . C3: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . C4: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 443 to run-time check associated with statement of line 446: procedure_round_6_18. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_5_3 >= spark__unsigned__shift_count__first . H12: r_512_5_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3) <= spark__unsigned__u64__last . For path(s) from assertion of line 443 to run-time check associated with statement of line 447: procedure_round_6_19. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_5_3 >= spark__unsigned__shift_count__first . H12: r_512_5_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3)), [3]), element(update(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3)), [0])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3)), [3]), element(update(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_5_3)), [0])) <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i8__first . C4: 0 <= spark__crypto__i8__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 443 to finish: procedure_round_6_20. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.siv0000644000175000017500000000710411712513676031660 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_4 For path(s) from start to run-time check associated with statement of line 366: procedure_round_4_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 367: procedure_round_4_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 367: procedure_round_4_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 368: procedure_round_4_4. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 371: procedure_round_4_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 371 to run-time check associated with statement of line 373: procedure_round_4_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 371 to run-time check associated with statement of line 374: procedure_round_4_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 371 to run-time check associated with statement of line 374: procedure_round_4_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 371 to run-time check associated with statement of line 375: procedure_round_4_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 371 to assertion of line 377: procedure_round_4_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 377 to run-time check associated with statement of line 379: procedure_round_4_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 377 to run-time check associated with statement of line 380: procedure_round_4_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 377 to run-time check associated with statement of line 380: procedure_round_4_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 377 to run-time check associated with statement of line 381: procedure_round_4_14. *** true . /* all conclusions proved */ For path(s) from assertion of line 377 to assertion of line 383: procedure_round_4_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 383 to run-time check associated with statement of line 385: procedure_round_4_16. *** true . /* all conclusions proved */ For path(s) from assertion of line 383 to run-time check associated with statement of line 386: procedure_round_4_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 383 to run-time check associated with statement of line 386: procedure_round_4_18. *** true . /* all conclusions proved */ For path(s) from assertion of line 383 to run-time check associated with statement of line 387: procedure_round_4_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 383 to finish: procedure_round_4_20. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.rls0000644000175000017500000000622211712513676031655 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Round_2*/ rule_family round_2_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. round_2_rules(1): r_512_1_0 may_be_replaced_by 33. round_2_rules(2): r_512_1_1 may_be_replaced_by 27. round_2_rules(3): r_512_1_2 may_be_replaced_by 14. round_2_rules(4): r_512_1_3 may_be_replaced_by 42. round_2_rules(5): integer__size >= 0 may_be_deduced. round_2_rules(6): integer__first may_be_replaced_by -2147483648. round_2_rules(7): integer__last may_be_replaced_by 2147483647. round_2_rules(8): integer__base__first may_be_replaced_by -2147483648. round_2_rules(9): integer__base__last may_be_replaced_by 2147483647. round_2_rules(10): interfaces__unsigned_64__size >= 0 may_be_deduced. round_2_rules(11): interfaces__unsigned_64__size may_be_replaced_by 64. round_2_rules(12): interfaces__unsigned_64__first may_be_replaced_by 0. round_2_rules(13): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. round_2_rules(14): interfaces__unsigned_64__base__first may_be_replaced_by 0. round_2_rules(15): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. round_2_rules(16): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. round_2_rules(17): spark__unsigned__u64__size >= 0 may_be_deduced. round_2_rules(18): spark__unsigned__u64__first may_be_replaced_by 0. round_2_rules(19): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. round_2_rules(20): spark__unsigned__u64__base__first may_be_replaced_by 0. round_2_rules(21): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. round_2_rules(22): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. round_2_rules(23): spark__unsigned__shift_count__size >= 0 may_be_deduced. round_2_rules(24): spark__unsigned__shift_count__first may_be_replaced_by 0. round_2_rules(25): spark__unsigned__shift_count__last may_be_replaced_by 64. round_2_rules(26): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. round_2_rules(27): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. round_2_rules(28): spark__crypto__i8__size >= 0 may_be_deduced. round_2_rules(29): spark__crypto__i8__first may_be_replaced_by 0. round_2_rules(30): spark__crypto__i8__last may_be_replaced_by 7. round_2_rules(31): spark__crypto__i8__base__first may_be_replaced_by -2147483648. round_2_rules(32): spark__crypto__i8__base__last may_be_replaced_by 2147483647. round_2_rules(33): spark__crypto__word_count_t__size >= 0 may_be_deduced. round_2_rules(34): spark__crypto__word_count_t__first may_be_replaced_by 0. round_2_rules(35): spark__crypto__word_count_t__last may_be_replaced_by 268435455. round_2_rules(36): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. round_2_rules(37): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.vsm0000644000175000017500000000005311712765060031655 0ustar eugeneugenround_5,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.log0000644000175000017500000000202611712513676031642 0ustar eugeneugenSPARK Simplifier Pro Edition Reading round_8.fdl (for inherited FDL type declarations) Processing round_8.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Automatic simplification completed. Simplified output sent to round_8.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.vlg0000644000175000017500000000324511712765060031643 0ustar eugeneugen Non-option args: round_2 Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=round_2 \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.siv0000644000175000017500000000710411712513676031657 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_3 For path(s) from start to run-time check associated with statement of line 336: procedure_round_3_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 337: procedure_round_3_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 337: procedure_round_3_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 338: procedure_round_3_4. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 341: procedure_round_3_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 341 to run-time check associated with statement of line 343: procedure_round_3_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 341 to run-time check associated with statement of line 344: procedure_round_3_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 341 to run-time check associated with statement of line 344: procedure_round_3_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 341 to run-time check associated with statement of line 345: procedure_round_3_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 341 to assertion of line 347: procedure_round_3_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 347 to run-time check associated with statement of line 349: procedure_round_3_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 347 to run-time check associated with statement of line 350: procedure_round_3_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 347 to run-time check associated with statement of line 350: procedure_round_3_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 347 to run-time check associated with statement of line 351: procedure_round_3_14. *** true . /* all conclusions proved */ For path(s) from assertion of line 347 to assertion of line 353: procedure_round_3_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 353 to run-time check associated with statement of line 355: procedure_round_3_16. *** true . /* all conclusions proved */ For path(s) from assertion of line 353 to run-time check associated with statement of line 356: procedure_round_3_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 353 to run-time check associated with statement of line 356: procedure_round_3_18. *** true . /* all conclusions proved */ For path(s) from assertion of line 353 to run-time check associated with statement of line 357: procedure_round_3_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 353 to finish: procedure_round_3_20. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.vlg0000644000175000017500000000324511712765060031650 0ustar eugeneugen Non-option args: round_7 Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=round_7 \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.vcg0000644000175000017500000015342411712513676031651 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Round_8 For path(s) from start to run-time check associated with statement of line 486: procedure_round_8_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i8__first . C4: 1 <= spark__crypto__i8__last . C5: 6 >= spark__crypto__i8__first . C6: 6 <= spark__crypto__i8__last . C7: 6 >= spark__crypto__i8__first . C8: 6 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 487: procedure_round_8_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . -> C1: r_512_7_0 >= spark__unsigned__shift_count__first . C2: r_512_7_0 <= spark__unsigned__shift_count__last . C3: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . C4: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 487: procedure_round_8_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_7_0 >= spark__unsigned__shift_count__first . H12: r_512_7_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 488: procedure_round_8_4. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_7_0 >= spark__unsigned__shift_count__first . H12: r_512_7_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [6])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [6])) <= spark__unsigned__u64__last . C3: 6 >= spark__crypto__i8__first . C4: 6 <= spark__crypto__i8__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to assertion of line 491: procedure_round_8_5. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_7_0 >= spark__unsigned__shift_count__first . H12: r_512_7_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [6])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [6])) <= spark__unsigned__u64__last . H25: 6 >= spark__crypto__i8__first . H26: 6 <= spark__crypto__i8__last . H27: 1 >= spark__crypto__i8__first . H28: 1 <= spark__crypto__i8__last . H29: 1 >= spark__crypto__i8__first . H30: 1 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [1], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [6]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [1], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_7_0)), [6]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 491 to run-time check associated with statement of line 493: procedure_round_8_6. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 0 >= spark__crypto__i8__first . C6: 0 <= spark__crypto__i8__last . C7: 0 >= spark__crypto__i8__first . C8: 0 <= spark__crypto__i8__last . For path(s) from assertion of line 491 to run-time check associated with statement of line 494: procedure_round_8_7. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . -> C1: r_512_7_1 >= spark__unsigned__shift_count__first . C2: r_512_7_1 <= spark__unsigned__shift_count__last . C3: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . C4: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 491 to run-time check associated with statement of line 494: procedure_round_8_8. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_7_1 >= spark__unsigned__shift_count__first . H12: r_512_7_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) <= spark__unsigned__u64__last . For path(s) from assertion of line 491 to run-time check associated with statement of line 495: procedure_round_8_9. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_7_1 >= spark__unsigned__shift_count__first . H12: r_512_7_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [0])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [0])) <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i8__first . C4: 0 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 491 to assertion of line 497: procedure_round_8_10. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_7_1 >= spark__unsigned__shift_count__first . H12: r_512_7_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [0])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [0])) <= spark__unsigned__u64__last . H25: 0 >= spark__crypto__i8__first . H26: 0 <= spark__crypto__i8__last . H27: 7 >= spark__crypto__i8__first . H28: 7 <= spark__crypto__i8__last . H29: 7 >= spark__crypto__i8__first . H30: 7 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [7], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [0]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [7], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_7_1)), [0]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 497 to run-time check associated with statement of line 499: procedure_round_8_11. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 5 >= spark__crypto__i8__first . C4: 5 <= spark__crypto__i8__last . C5: 2 >= spark__crypto__i8__first . C6: 2 <= spark__crypto__i8__last . C7: 2 >= spark__crypto__i8__first . C8: 2 <= spark__crypto__i8__last . For path(s) from assertion of line 497 to run-time check associated with statement of line 500: procedure_round_8_12. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . -> C1: r_512_7_2 >= spark__unsigned__shift_count__first . C2: r_512_7_2 <= spark__unsigned__shift_count__last . C3: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . C4: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 497 to run-time check associated with statement of line 500: procedure_round_8_13. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_7_2 >= spark__unsigned__shift_count__first . H12: r_512_7_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) <= spark__unsigned__u64__last . For path(s) from assertion of line 497 to run-time check associated with statement of line 501: procedure_round_8_14. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_7_2 >= spark__unsigned__shift_count__first . H12: r_512_7_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [2])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [2])) <= spark__unsigned__u64__last . C3: 2 >= spark__crypto__i8__first . C4: 2 <= spark__crypto__i8__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 497 to assertion of line 503: procedure_round_8_15. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_7_2 >= spark__unsigned__shift_count__first . H12: r_512_7_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [2])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [2])) <= spark__unsigned__u64__last . H25: 2 >= spark__crypto__i8__first . H26: 2 <= spark__crypto__i8__last . H27: 5 >= spark__crypto__i8__first . H28: 5 <= spark__crypto__i8__last . H29: 5 >= spark__crypto__i8__first . H30: 5 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [5], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [2]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [5], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_7_2)), [2]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 503 to run-time check associated with statement of line 505: procedure_round_8_16. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 3 >= spark__crypto__i8__first . C4: 3 <= spark__crypto__i8__last . C5: 4 >= spark__crypto__i8__first . C6: 4 <= spark__crypto__i8__last . C7: 4 >= spark__crypto__i8__first . C8: 4 <= spark__crypto__i8__last . For path(s) from assertion of line 503 to run-time check associated with statement of line 506: procedure_round_8_17. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . -> C1: r_512_7_3 >= spark__unsigned__shift_count__first . C2: r_512_7_3 <= spark__unsigned__shift_count__last . C3: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . C4: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 503 to run-time check associated with statement of line 506: procedure_round_8_18. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_7_3 >= spark__unsigned__shift_count__first . H12: r_512_7_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3) <= spark__unsigned__u64__last . For path(s) from assertion of line 503 to run-time check associated with statement of line 507: procedure_round_8_19. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_7_3 >= spark__unsigned__shift_count__first . H12: r_512_7_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3)), [3]), element(update(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3)), [4])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3)), [3]), element(update(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_7_3)), [4])) <= spark__unsigned__u64__last . C3: 4 >= spark__crypto__i8__first . C4: 4 <= spark__crypto__i8__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 503 to finish: procedure_round_8_20. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.log0000644000175000017500000000202611712513676031641 0ustar eugeneugenSPARK Simplifier Pro Edition Reading round_7.fdl (for inherited FDL type declarations) Processing round_7.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Automatic simplification completed. Simplified output sent to round_7.siv. ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.rlsspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000000677411712513676032502 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Threefish_Block*/ rule_family threefish_bl_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. threefish_bl_rules(1): skein_512_rounds_total may_be_replaced_by 72. threefish_bl_rules(2): integer__size >= 0 may_be_deduced. threefish_bl_rules(3): integer__first may_be_replaced_by -2147483648. threefish_bl_rules(4): integer__last may_be_replaced_by 2147483647. threefish_bl_rules(5): integer__base__first may_be_replaced_by -2147483648. threefish_bl_rules(6): integer__base__last may_be_replaced_by 2147483647. threefish_bl_rules(7): interfaces__unsigned_64__size >= 0 may_be_deduced. threefish_bl_rules(8): interfaces__unsigned_64__size may_be_replaced_by 64. threefish_bl_rules(9): interfaces__unsigned_64__first may_be_replaced_by 0. threefish_bl_rules(10): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. threefish_bl_rules(11): interfaces__unsigned_64__base__first may_be_replaced_by 0. threefish_bl_rules(12): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. threefish_bl_rules(13): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. threefish_bl_rules(14): spark__unsigned__u64__size >= 0 may_be_deduced. threefish_bl_rules(15): spark__unsigned__u64__first may_be_replaced_by 0. threefish_bl_rules(16): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. threefish_bl_rules(17): spark__unsigned__u64__base__first may_be_replaced_by 0. threefish_bl_rules(18): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. threefish_bl_rules(19): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. threefish_bl_rules(20): spark__crypto__i3__size >= 0 may_be_deduced. threefish_bl_rules(21): spark__crypto__i3__first may_be_replaced_by 0. threefish_bl_rules(22): spark__crypto__i3__last may_be_replaced_by 2. threefish_bl_rules(23): spark__crypto__i3__base__first may_be_replaced_by -2147483648. threefish_bl_rules(24): spark__crypto__i3__base__last may_be_replaced_by 2147483647. threefish_bl_rules(25): spark__crypto__i8__size >= 0 may_be_deduced. threefish_bl_rules(26): spark__crypto__i8__first may_be_replaced_by 0. threefish_bl_rules(27): spark__crypto__i8__last may_be_replaced_by 7. threefish_bl_rules(28): spark__crypto__i8__base__first may_be_replaced_by -2147483648. threefish_bl_rules(29): spark__crypto__i8__base__last may_be_replaced_by 2147483647. threefish_bl_rules(30): spark__crypto__i9__size >= 0 may_be_deduced. threefish_bl_rules(31): spark__crypto__i9__first may_be_replaced_by 0. threefish_bl_rules(32): spark__crypto__i9__last may_be_replaced_by 8. threefish_bl_rules(33): spark__crypto__i9__base__first may_be_replaced_by -2147483648. threefish_bl_rules(34): spark__crypto__i9__base__last may_be_replaced_by 2147483647. threefish_bl_rules(35): spark__crypto__word_count_t__size >= 0 may_be_deduced. threefish_bl_rules(36): spark__crypto__word_count_t__first may_be_replaced_by 0. threefish_bl_rules(37): spark__crypto__word_count_t__last may_be_replaced_by 268435455. threefish_bl_rules(38): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. threefish_bl_rules(39): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.rls0000644000175000017500000000622211712513676031654 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Round_1*/ rule_family round_1_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. round_1_rules(1): r_512_0_0 may_be_replaced_by 46. round_1_rules(2): r_512_0_1 may_be_replaced_by 36. round_1_rules(3): r_512_0_2 may_be_replaced_by 19. round_1_rules(4): r_512_0_3 may_be_replaced_by 37. round_1_rules(5): integer__size >= 0 may_be_deduced. round_1_rules(6): integer__first may_be_replaced_by -2147483648. round_1_rules(7): integer__last may_be_replaced_by 2147483647. round_1_rules(8): integer__base__first may_be_replaced_by -2147483648. round_1_rules(9): integer__base__last may_be_replaced_by 2147483647. round_1_rules(10): interfaces__unsigned_64__size >= 0 may_be_deduced. round_1_rules(11): interfaces__unsigned_64__size may_be_replaced_by 64. round_1_rules(12): interfaces__unsigned_64__first may_be_replaced_by 0. round_1_rules(13): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. round_1_rules(14): interfaces__unsigned_64__base__first may_be_replaced_by 0. round_1_rules(15): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. round_1_rules(16): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. round_1_rules(17): spark__unsigned__u64__size >= 0 may_be_deduced. round_1_rules(18): spark__unsigned__u64__first may_be_replaced_by 0. round_1_rules(19): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. round_1_rules(20): spark__unsigned__u64__base__first may_be_replaced_by 0. round_1_rules(21): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. round_1_rules(22): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. round_1_rules(23): spark__unsigned__shift_count__size >= 0 may_be_deduced. round_1_rules(24): spark__unsigned__shift_count__first may_be_replaced_by 0. round_1_rules(25): spark__unsigned__shift_count__last may_be_replaced_by 64. round_1_rules(26): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. round_1_rules(27): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. round_1_rules(28): spark__crypto__i8__size >= 0 may_be_deduced. round_1_rules(29): spark__crypto__i8__first may_be_replaced_by 0. round_1_rules(30): spark__crypto__i8__last may_be_replaced_by 7. round_1_rules(31): spark__crypto__i8__base__first may_be_replaced_by -2147483648. round_1_rules(32): spark__crypto__i8__base__last may_be_replaced_by 2147483647. round_1_rules(33): spark__crypto__word_count_t__size >= 0 may_be_deduced. round_1_rules(34): spark__crypto__word_count_t__first may_be_replaced_by 0. round_1_rules(35): spark__crypto__word_count_t__last may_be_replaced_by 268435455. round_1_rules(36): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. round_1_rules(37): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.vct0000644000175000017500000000000011712513676031635 0ustar eugeneugen././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.logspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000000105311712513676032463 0ustar eugeneugenSPARK Simplifier Pro Edition Reading threefish_block.fdl (for inherited FDL type declarations) Processing threefish_block.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Automatic simplification completed. Simplified output sent to threefish_block.siv. ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.slgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000022735511712513676032533 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Initialize_Key_Schedule @@@@@@@@@@ VC: procedure_initialize_key_schedule_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule initialize_k_rules(1). This was achieved by replacing all occurrences of skein_ks_parity by: 2004413935125273122. New C1: 2004413935125273122 >= spark__unsigned__u64__first New C2: 2004413935125273122 <= spark__unsigned__u64__last -S- Applied substitution rule initialize_k_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New C1: true -S- Applied substitution rule initialize_k_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C2: true -S- Applied substitution rule initialize_k_rules(2). This was achieved by replacing all occurrences of wcnt by: 8. New C3: 8 >= spark__crypto__i9__first New C4: 8 <= spark__crypto__i9__last -S- Applied substitution rule initialize_k_rules(83). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New C3: true -S- Applied substitution rule initialize_k_rules(84). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New C4: true *** Proved C1: true *** Proved C2: true *** Proved C3: true *** Proved C4: true *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_key_schedule_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule initialize_k_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H21: skein_ks_parity >= 0 New C1: element(fld_x(ctx), [spark__crypto__i8__first]) >= 0 -S- Applied substitution rule initialize_k_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H22: skein_ks_parity <= 18446744073709551615 New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C2: element(fld_x(ctx), [spark__crypto__i8__first]) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(78). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New C3: 0 >= skein_512_state_words_index__first New C4: 0 <= skein_512_state_words_index__last New C5: 0 >= spark__crypto__i9__first New C6: 0 <= spark__crypto__i9__last New C1: element(fld_x(ctx), [0]) >= 0 New C2: element(fld_x(ctx), [0]) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(98). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C3: true -S- Applied substitution rule initialize_k_rules(99). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C4: true -S- Applied substitution rule initialize_k_rules(83). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H23: wcnt >= 0 New C5: true -S- Applied substitution rule initialize_k_rules(84). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H24: wcnt <= 8 New C6: true *** Proved C1: element(fld_x(ctx), [0]) >= 0 using hypothesis H3. *** Proved C2: element(fld_x(ctx), [0]) <= 18446744073709551615 using hypothesis H3. *** Proved C3: true *** Proved C4: true *** Proved C5: true *** Proved C6: true *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_key_schedule_3. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> spark__unsigned__u64__first <= element(ks, [j_]) and element(ks, [j_]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). -S- Applied substitution rule initialize_k_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= spark__unsigned__u64__last) New H2: element(ks, [wcnt]) >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New C1: element(fld_x(ctx), [loop__1__i + 1]) >= 0 -S- Applied substitution rule initialize_k_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: element(ks, [wcnt]) <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= 18446744073709551615) New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C2: element(fld_x(ctx), [loop__1__i + 1]) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(98). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C3: loop__1__i >= - 1 -S- Applied substitution rule initialize_k_rules(99). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C4: loop__1__i <= 6 -S- Applied substitution rule initialize_k_rules(83). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New C5: loop__1__i >= - 1 -S- Applied substitution rule initialize_k_rules(84). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New C6: loop__1__i <= 7 >>> Restructured hypothesis H27 into: >>> H27: loop__1__i <> spark__crypto__i8__last -S- Applied substitution rule initialize_k_rules(2). This was achieved by replacing all occurrences of wcnt by: 8. New H2: element(ks, [8]) >= 0 New H3: element(ks, [8]) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule initialize_k_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule initialize_k_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule initialize_k_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule initialize_k_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule initialize_k_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule initialize_k_rules(78). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H1: for_all(j_ : integer, 0 <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= 18446744073709551615) New H23: loop__1__i >= 0 -S- Applied substitution rule initialize_k_rules(79). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H24: loop__1__i <= 7 New H27: loop__1__i <> 7 -S- Applied substitution rule initialize_k_rules(93). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(94). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule initialize_k_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) %%% Hypotheses H24 & H27 together imply that loop__1__i < 7. H24 & H27 have therefore been deleted and a new H28 added to this effect. *** Proved C1: element(fld_x(ctx), [loop__1__i + 1]) >= 0 using hypotheses H5, H23 & H28. *** Proved C2: element(fld_x(ctx), [loop__1__i + 1]) <= 18446744073709551615 using hypotheses H5, H23 & H28. *** Proved C3: loop__1__i >= - 1 using hypothesis H23. *** Proved C4: loop__1__i <= 6 using hypothesis H28. *** Proved C5: loop__1__i >= - 1 using hypothesis H23. *** Proved C6: loop__1__i <= 7 using hypothesis H28. *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_key_schedule_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) *** Proved C3: spark__crypto__i8__first >= skein_512_state_words_index__first using hypothesis H27. *** Proved C4: spark__crypto__i8__first <= skein_512_state_words_index__last using hypothesis H28. *** Proved C5: wcnt >= spark__crypto__i9__first using hypothesis H23. *** Proved C6: wcnt <= spark__crypto__i9__last using hypothesis H24. *** Proved C7: wcnt >= spark__crypto__i9__first using hypothesis H23. *** Proved C8: wcnt <= spark__crypto__i9__last using hypothesis H24. -S- Applied substitution rule initialize_k_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H21: skein_ks_parity >= 0 New H25: element(fld_x(ctx), [spark__crypto__i8__first]) >= 0 New C1: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first])) >= 0 -S- Applied substitution rule initialize_k_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H22: skein_ks_parity <= 18446744073709551615 New H26: element(fld_x(ctx), [spark__crypto__i8__first]) <= 18446744073709551615 New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C2: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first])) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(1). This was achieved by replacing all occurrences of skein_ks_parity by: 2004413935125273122. New H21: true New H22: true New C1: bit__xor(element(update(update(ks, [wcnt], 2004413935125273122), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first])) >= 0 New C2: bit__xor(element(update(update(ks, [wcnt], 2004413935125273122), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first])) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(2). This was achieved by replacing all occurrences of wcnt by: 8. New H23: 8 >= spark__crypto__i9__first New H24: 8 <= spark__crypto__i9__last New C1: bit__xor(element(update(update(ks, [8], 2004413935125273122), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [8]), element(fld_x(ctx), [ spark__crypto__i8__first])) >= 0 New C2: bit__xor(element(update(update(ks, [8], 2004413935125273122), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [8]), element(fld_x(ctx), [ spark__crypto__i8__first])) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H5: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule initialize_k_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule initialize_k_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule initialize_k_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule initialize_k_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule initialize_k_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule initialize_k_rules(78). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H25: element(fld_x(ctx), [0]) >= 0 New H26: element(fld_x(ctx), [0]) <= 18446744073709551615 New H27: 0 >= skein_512_state_words_index__first New H28: 0 <= skein_512_state_words_index__last New H29: 0 >= spark__crypto__i9__first New H30: 0 <= spark__crypto__i9__last New C1: bit__xor(2004413935125273122, element(fld_x(ctx), [0])) >= 0 New C2: bit__xor(2004413935125273122, element(fld_x(ctx), [0])) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(83). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H23: true New H29: true -S- Applied substitution rule initialize_k_rules(84). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H24: true New H30: true -S- Applied substitution rule initialize_k_rules(93). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H6: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(94). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H7: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule initialize_k_rules(98). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H27: true -S- Applied substitution rule initialize_k_rules(99). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H28: true New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H2: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H2: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C1: bit__xor(2004413935125273122, element(fld_x(ctx), [0])) >= 0 using hypothesis H25. *** Proved C2: bit__xor(2004413935125273122, element(fld_x(ctx), [0])) <= 18446744073709551615 using hypotheses H25 & H26. *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_key_schedule_5. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> spark__unsigned__u64__first <= element(ks, [j_]) and element(ks, [j_]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). *** Proved C3: loop__1__i + 1 >= skein_512_state_words_index__first using hypothesis H30. *** Proved C4: loop__1__i + 1 <= skein_512_state_words_index__last using hypothesis H31. -S- Applied substitution rule initialize_k_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= spark__unsigned__u64__last) New H2: element(ks, [wcnt]) >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H28: element(fld_x(ctx), [loop__1__i + 1]) >= 0 New C1: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [loop__1__i + 1])) >= 0 -S- Applied substitution rule initialize_k_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: element(ks, [wcnt]) <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H29: element(fld_x(ctx), [loop__1__i + 1]) <= 18446744073709551615 New H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= 18446744073709551615) New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C2: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [loop__1__i + 1])) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(2). This was achieved by replacing all occurrences of wcnt by: 8. New H2: element(ks, [8]) >= 0 New H3: element(ks, [8]) <= 18446744073709551615 New C5: 8 >= spark__crypto__i9__first New C6: 8 <= spark__crypto__i9__last New C7: 8 >= spark__crypto__i9__first New C8: 8 <= spark__crypto__i9__last New C1: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [8]), element(fld_x(ctx), [loop__1__i + 1])) >= 0 New C2: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [8]), element(fld_x(ctx), [loop__1__i + 1])) <= 18446744073709551615 -S- Applied substitution rule initialize_k_rules(83). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H32: loop__1__i >= - 1 New C5: true New C7: true -S- Applied substitution rule initialize_k_rules(84). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H33: loop__1__i <= 7 New C6: true New C8: true *** Proved C5: true *** Proved C7: true *** Proved C6: true *** Proved C8: true >>> Restructured hypothesis H27 into: >>> H27: loop__1__i <> spark__crypto__i8__last -S- Applied substitution rule initialize_k_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule initialize_k_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule initialize_k_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule initialize_k_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule initialize_k_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule initialize_k_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule initialize_k_rules(78). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H1: for_all(j_ : integer, 0 <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= 18446744073709551615) New H23: loop__1__i >= 0 -S- Applied substitution rule initialize_k_rules(79). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H24: loop__1__i <= 7 New H27: loop__1__i <> 7 -S- Applied substitution rule initialize_k_rules(93). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(94). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule initialize_k_rules(98). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H30: loop__1__i >= - 1 -S- Applied substitution rule initialize_k_rules(99). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H31: loop__1__i <= 6 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) %%% Hypotheses H33 & H27 together imply that loop__1__i < 7. H33 & H27 have therefore been deleted and a new H34 added to this effect. %%% Simplified C1 further, to give: %%% C1: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1])) >= 0 %%% Simplified C2 further, to give: %%% C2: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1])) <= 18446744073709551615 *** Proved C1: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1] )) >= 0 using hypotheses H2 & H28. *** Proved C2: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1] )) <= 18446744073709551615 using hypotheses H2, H3, H28 & H29. *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_key_schedule_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H33 has been replaced by "true". (It is already present, as H27). --- Hypothesis H34 has been replaced by "true". (It is already present, as H28). --- Hypothesis H35 has been replaced by "true". (It is already present, as H23). --- Hypothesis H36 has been replaced by "true". (It is already present, as H24). --- Hypothesis H37 has been replaced by "true". (It is already present, as H23). --- Hypothesis H38 has been replaced by "true". (It is already present, as H24). %%% Simplified C1 on reading formula in, to give: %%% C1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= spark__crypto__i8__first -> spark__unsigned__u64__first <= element( update(update(update(ks, [wcnt], skein_ks_parity), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt], bit__xor(element(update(update( ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element( fld_x(ctx), [spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx) , [spark__crypto__i8__first]))), [j_]) and element(update(update( update(ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element(fld_x(ctx), [spark__crypto__i8__first])), [wcnt], bit__xor( element(update(update(ks, [wcnt], skein_ks_parity), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first]))), [j_]) <= spark__unsigned__u64__last) %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first])) >= spark__unsigned__u64__first %%% Simplified C3 on reading formula in, to give: %%% C3: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first])) <= spark__unsigned__u64__last %%% Simplified C4 on reading formula in, to give: %%% C4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C5 on reading formula in, to give: %%% C5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C23 on reading formula in, to give: %%% C23: true %%% Simplified C25 on reading formula in, to give: %%% C25: true *** Proved C2: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first])) >= spark__unsigned__u64__first using hypothesis H31. *** Proved C3: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [ spark__crypto__i8__first], element(fld_x(ctx), [ spark__crypto__i8__first])), [wcnt]), element(fld_x(ctx), [ spark__crypto__i8__first])) <= spark__unsigned__u64__last using hypothesis H32. *** Proved C4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) using hypothesis H2. *** Proved C5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H3. *** Proved C6: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H4. *** Proved C7: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H5. *** Proved C8: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H6. *** Proved C9: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H7. *** Proved C10: true *** Proved C11: true *** Proved C12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H10. *** Proved C13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H11. *** Proved C14: true *** Proved C15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H13. *** Proved C16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H14. *** Proved C17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H15. *** Proved C18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H16. *** Proved C19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H17. *** Proved C20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H18. *** Proved C21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H19. *** Proved C22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H20. *** Proved C23: true *** Proved C25: true -S- Applied substitution rule initialize_k_rules(78). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H25: element(fld_x(ctx), [0]) >= spark__unsigned__u64__first New H26: element(fld_x(ctx), [0]) <= spark__unsigned__u64__last New H27: 0 >= skein_512_state_words_index__first New H28: 0 <= skein_512_state_words_index__last New H29: 0 >= spark__crypto__i9__first New H30: 0 <= spark__crypto__i9__last New H31: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [0] , element(fld_x(ctx), [0])), [wcnt]), element(fld_x(ctx), [0])) >= spark__unsigned__u64__first New H32: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [0] , element(fld_x(ctx), [0])), [wcnt]), element(fld_x(ctx), [0])) <= spark__unsigned__u64__last New C1: for_all(j_ : integer, 0 <= j_ and j_ <= 0 -> spark__unsigned__u64__first <= element(update(update(update(ks, [wcnt] , skein_ks_parity), [0], element(fld_x(ctx), [0])), [wcnt], bit__xor( element(update(update(ks, [wcnt], skein_ks_parity), [0], element( fld_x(ctx), [0])), [wcnt]), element(fld_x(ctx), [0]))), [j_]) and element(update(update(update(ks, [wcnt], skein_ks_parity), [0], element(fld_x(ctx), [0])), [wcnt], bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [0], element(fld_x(ctx), [0])), [wcnt]), element(fld_x(ctx), [0]))), [j_]) <= spark__unsigned__u64__last) New C24: 0 <= spark__crypto__i8__last New C26: 0 <= spark__crypto__i8__last -S- Applied substitution rule initialize_k_rules(79). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New C24: true New C26: true *** Proved C24: true *** Proved C26: true -S- Applied substitution rule initialize_k_rules(1). This was achieved by replacing all occurrences of skein_ks_parity by: 2004413935125273122. New H21: 2004413935125273122 >= spark__unsigned__u64__first New H22: 2004413935125273122 <= spark__unsigned__u64__last New H31: bit__xor(element(update(update(ks, [wcnt], 2004413935125273122), [0], element(fld_x(ctx), [0])), [wcnt]), element(fld_x(ctx), [0])) >= spark__unsigned__u64__first New H32: bit__xor(element(update(update(ks, [wcnt], 2004413935125273122), [0], element(fld_x(ctx), [0])), [wcnt]), element(fld_x(ctx), [0])) <= spark__unsigned__u64__last New C1: for_all(j_ : integer, 0 <= j_ and j_ <= 0 -> spark__unsigned__u64__first <= element(update(update(update(ks, [wcnt] , 2004413935125273122), [0], element(fld_x(ctx), [0])), [wcnt], bit__xor(element(update(update(ks, [wcnt], 2004413935125273122), [0], element(fld_x(ctx), [0])), [wcnt]), element(fld_x(ctx), [0]))), [j_]) and element(update(update(update(ks, [wcnt], 2004413935125273122), [0] , element(fld_x(ctx), [0])), [wcnt], bit__xor(element(update(update( ks, [wcnt], 2004413935125273122), [0], element(fld_x(ctx), [0])), [ wcnt]), element(fld_x(ctx), [0]))), [j_]) <= spark__unsigned__u64__last) -S- Applied substitution rule initialize_k_rules(2). This was achieved by replacing all occurrences of wcnt by: 8. New H23: 8 >= spark__crypto__i9__first New H24: 8 <= spark__crypto__i9__last New H31: bit__xor(2004413935125273122, element(fld_x(ctx), [0])) >= spark__unsigned__u64__first New H32: bit__xor(2004413935125273122, element(fld_x(ctx), [0])) <= spark__unsigned__u64__last New C1: for_all(j_ : integer, 0 <= j_ and j_ <= 0 -> spark__unsigned__u64__first <= element(update(update(update(ks, [8], 2004413935125273122), [0], element(fld_x(ctx), [0])), [8], bit__xor( 2004413935125273122, element(fld_x(ctx), [0]))), [j_]) and element( update(update(update(ks, [8], 2004413935125273122), [0], element( fld_x(ctx), [0])), [8], bit__xor(2004413935125273122, element(fld_x( ctx), [0]))), [j_]) <= spark__unsigned__u64__last) -S- Applied substitution rule initialize_k_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H5: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule initialize_k_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule initialize_k_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule initialize_k_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule initialize_k_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule initialize_k_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule initialize_k_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H25: element(fld_x(ctx), [0]) >= 0 New H21: true New H31: bit__xor(2004413935125273122, element(fld_x(ctx), [0])) >= 0 New C1: for_all(j_ : integer, 0 <= j_ and j_ <= 0 -> 0 <= element(update( update(update(ks, [8], 2004413935125273122), [0], element(fld_x(ctx), [0])), [8], bit__xor(2004413935125273122, element(fld_x(ctx), [0]))), [j_]) and element(update(update(update(ks, [8], 2004413935125273122), [0], element(fld_x(ctx), [0])), [8], bit__xor(2004413935125273122, element(fld_x(ctx), [0]))), [j_]) <= spark__unsigned__u64__last) -S- Applied substitution rule initialize_k_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H26: element(fld_x(ctx), [0]) <= 18446744073709551615 New H22: true New H32: bit__xor(2004413935125273122, element(fld_x(ctx), [0])) <= 18446744073709551615 New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C1: for_all(j_ : integer, 0 <= j_ and j_ <= 0 -> 0 <= element(update( update(update(ks, [8], 2004413935125273122), [0], element(fld_x(ctx), [0])), [8], bit__xor(2004413935125273122, element(fld_x(ctx), [0]))), [j_]) and element(update(update(update(ks, [8], 2004413935125273122), [0], element(fld_x(ctx), [0])), [8], bit__xor(2004413935125273122, element(fld_x(ctx), [0]))), [j_]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(83). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H29: true New H23: true -S- Applied substitution rule initialize_k_rules(84). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H30: true New H24: true -S- Applied substitution rule initialize_k_rules(93). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H6: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(94). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H7: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule initialize_k_rules(98). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H27: true New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(99). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H28: true New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H2: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H2: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C1: for_all(j_ : integer, 0 <= j_ and j_ <= 0 -> 0 <= element( update(update(update(ks, [8], 2004413935125273122), [0], element( fld_x(ctx), [0])), [8], bit__xor(2004413935125273122, element(fld_x( ctx), [0]))), [j_]) and element(update(update(update(ks, [8], 2004413935125273122), [0], element(fld_x(ctx), [0])), [8], bit__xor( 2004413935125273122, element(fld_x(ctx), [0]))), [j_]) <= 18446744073709551615) using hypotheses H25 & H26. *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_key_schedule_7. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> spark__unsigned__u64__first <= element(ks, [j_]) and element(ks, [j_]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H36 has been replaced by "true". (It is already present, as H30). --- Hypothesis H37 has been replaced by "true". (It is already present, as H31). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H41 has been replaced by "true". (It is already present, as H39). %%% Simplified C1 on reading formula in, to give: %%% C1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i + 1 -> spark__unsigned__u64__first <= element(update( update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [ wcnt], bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [loop__1__i + 1]))), [j_]) and element(update(update(ks, [loop__1__i + 1], element( fld_x(ctx), [loop__1__i + 1])), [wcnt], bit__xor(element(update(ks, [ loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [loop__1__i + 1]))), [j_]) <= spark__unsigned__u64__last) %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x(ctx) , [loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [loop__1__i + 1])) >= spark__unsigned__u64__first %%% Simplified C3 on reading formula in, to give: %%% C3: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x(ctx) , [loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [loop__1__i + 1])) <= spark__unsigned__u64__last %%% Simplified C4 on reading formula in, to give: %%% C4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C5 on reading formula in, to give: %%% C5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) *** Proved C2: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [loop__1__i + 1])) >= spark__unsigned__u64__first using hypothesis H34. *** Proved C3: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [loop__1__i + 1])) <= spark__unsigned__u64__last using hypothesis H35. *** Proved C4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) using hypothesis H4. *** Proved C5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H5. *** Proved C6: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H6. *** Proved C7: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H7. *** Proved C8: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H8. *** Proved C9: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H9. *** Proved C10: true *** Proved C11: true *** Proved C12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H12. *** Proved C13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H13. *** Proved C14: true *** Proved C15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H15. *** Proved C16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H16. *** Proved C17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H17. *** Proved C18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H18. *** Proved C19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H19. *** Proved C20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H20. *** Proved C21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H21. *** Proved C22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H22. *** Proved C23: loop__1__i + 1 >= spark__crypto__i8__first using hypothesis H23. *** Proved C25: loop__1__i + 1 >= spark__crypto__i8__first using hypothesis H23. -S- Applied substitution rule initialize_k_rules(79). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H24: loop__1__i <= 7 New H27: not loop__1__i = 7 New C24: loop__1__i <= 6 New C26: loop__1__i <= 6 *** Proved C24: loop__1__i <= 6 using hypotheses H24 & H27. *** Proved C26: loop__1__i <= 6 using hypotheses H24 & H27. >>> Restructured hypothesis H27 into: >>> H27: loop__1__i <> 7 -S- Applied substitution rule initialize_k_rules(2). This was achieved by replacing all occurrences of wcnt by: 8. New H2: element(ks, [8]) >= spark__unsigned__u64__first New H3: element(ks, [8]) <= spark__unsigned__u64__last New H34: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1])) >= spark__unsigned__u64__first New H35: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1])) <= spark__unsigned__u64__last New H38: 8 >= spark__crypto__i9__first New H39: 8 <= spark__crypto__i9__last New C1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i + 1 -> spark__unsigned__u64__first <= element(update( update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [ 8], bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1]))) , [j_]) and element(update(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [8], bit__xor(element(ks, [8]), element( fld_x(ctx), [loop__1__i + 1]))), [j_]) <= spark__unsigned__u64__last) -S- Applied substitution rule initialize_k_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule initialize_k_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(43). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule initialize_k_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(49). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule initialize_k_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule initialize_k_rules(55). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(61). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule initialize_k_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule initialize_k_rules(67). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule initialize_k_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= spark__unsigned__u64__last) New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H28: element(fld_x(ctx), [loop__1__i + 1]) >= 0 New H2: element(ks, [8]) >= 0 New H34: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1])) >= 0 New C1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i + 1 -> 0 <= element(update(update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [8], bit__xor(element(ks, [8]) , element(fld_x(ctx), [loop__1__i + 1]))), [j_]) and element(update( update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [ 8], bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1]))) , [j_]) <= spark__unsigned__u64__last) -S- Applied substitution rule initialize_k_rules(73). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H29: element(fld_x(ctx), [loop__1__i + 1]) <= 18446744073709551615 New H3: element(ks, [8]) <= 18446744073709551615 New H35: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1])) <= 18446744073709551615 New H1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= 18446744073709551615) New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C1: for_all(j_ : integer, spark__crypto__i8__first <= j_ and j_ <= loop__1__i + 1 -> 0 <= element(update(update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [8], bit__xor(element(ks, [8]) , element(fld_x(ctx), [loop__1__i + 1]))), [j_]) and element(update( update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [ 8], bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1]))) , [j_]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(78). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H23: loop__1__i >= 0 New H1: for_all(j_ : integer, 0 <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [j_]) and element(ks, [j_]) <= 18446744073709551615) New C1: for_all(j_ : integer, 0 <= j_ and j_ <= loop__1__i + 1 -> 0 <= element(update(update(ks, [loop__1__i + 1], element(fld_x(ctx), [ loop__1__i + 1])), [8], bit__xor(element(ks, [8]), element(fld_x(ctx) , [loop__1__i + 1]))), [j_]) and element(update(update(ks, [ loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [8], bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1]))), [ j_]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(83). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H32: loop__1__i >= - 1 New H38: true -S- Applied substitution rule initialize_k_rules(84). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H33: loop__1__i <= 7 New H39: true -S- Applied substitution rule initialize_k_rules(93). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule initialize_k_rules(94). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule initialize_k_rules(98). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H30: loop__1__i >= - 1 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(99). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H31: loop__1__i <= 6 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule initialize_k_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule initialize_k_rules(104). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) %%% Hypotheses H24 & H27 together imply that loop__1__i < 7. H24 & H27 have therefore been deleted and a new H42 added to this effect. --- Eliminated hypothesis H10 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H25 (true-hypothesis). --- Eliminated hypothesis H26 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H40 (true-hypothesis). --- Eliminated hypothesis H41 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H32 (duplicate of H30). --- Eliminated hypothesis H42 (duplicate of H31). --- Eliminated hypothesis H30 (redundant, given H23). --- Eliminated hypothesis H33 (redundant, given H31). +++ New H43: integer__size >= 0 +++ New H44: natural__size >= 0 +++ New H45: spark__unsigned__u6__size >= 0 +++ New H46: spark__unsigned__u7__size >= 0 +++ New H47: spark__unsigned__byte__size >= 0 +++ New H48: spark__unsigned__u16__size >= 0 +++ New H49: spark__unsigned__u32__size >= 0 +++ New H50: spark__unsigned__u64__size >= 0 +++ New H51: spark__crypto__i8__size >= 0 +++ New H52: spark__crypto__i9__size >= 0 +++ New H53: spark__crypto__word_count_t__size >= 0 +++ New H54: hash_bit_length__size >= 0 +++ New H55: skein_512_state_words_index__size >= 0 +++ New H56: skein_512_block_bytes_index__size >= 0 +++ New H57: skein_512_context__size >= 0 +++ New H58: context_header__size >= 0 @@@@@@@@@@ VC: procedure_initialize_key_schedule_8. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.rlsspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000002257511712513676032545 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Update_Context*/ rule_family update_conte_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. update_conte_rules(1): integer__size >= 0 may_be_deduced. update_conte_rules(2): integer__first may_be_replaced_by -2147483648. update_conte_rules(3): integer__last may_be_replaced_by 2147483647. update_conte_rules(4): integer__base__first may_be_replaced_by -2147483648. update_conte_rules(5): integer__base__last may_be_replaced_by 2147483647. update_conte_rules(6): natural__size >= 0 may_be_deduced. update_conte_rules(7): natural__first may_be_replaced_by 0. update_conte_rules(8): natural__last may_be_replaced_by 2147483647. update_conte_rules(9): natural__base__first may_be_replaced_by -2147483648. update_conte_rules(10): natural__base__last may_be_replaced_by 2147483647. update_conte_rules(11): interfaces__unsigned_8__size >= 0 may_be_deduced. update_conte_rules(12): interfaces__unsigned_8__size may_be_replaced_by 8. update_conte_rules(13): interfaces__unsigned_8__first may_be_replaced_by 0. update_conte_rules(14): interfaces__unsigned_8__last may_be_replaced_by 255. update_conte_rules(15): interfaces__unsigned_8__base__first may_be_replaced_by 0. update_conte_rules(16): interfaces__unsigned_8__base__last may_be_replaced_by 255. update_conte_rules(17): interfaces__unsigned_8__modulus may_be_replaced_by 256. update_conte_rules(18): interfaces__unsigned_16__size >= 0 may_be_deduced. update_conte_rules(19): interfaces__unsigned_16__size may_be_replaced_by 16. update_conte_rules(20): interfaces__unsigned_16__first may_be_replaced_by 0. update_conte_rules(21): interfaces__unsigned_16__last may_be_replaced_by 65535. update_conte_rules(22): interfaces__unsigned_16__base__first may_be_replaced_by 0. update_conte_rules(23): interfaces__unsigned_16__base__last may_be_replaced_by 65535. update_conte_rules(24): interfaces__unsigned_16__modulus may_be_replaced_by 65536. update_conte_rules(25): interfaces__unsigned_32__size >= 0 may_be_deduced. update_conte_rules(26): interfaces__unsigned_32__size may_be_replaced_by 32. update_conte_rules(27): interfaces__unsigned_32__first may_be_replaced_by 0. update_conte_rules(28): interfaces__unsigned_32__last may_be_replaced_by 4294967295. update_conte_rules(29): interfaces__unsigned_32__base__first may_be_replaced_by 0. update_conte_rules(30): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. update_conte_rules(31): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. update_conte_rules(32): interfaces__unsigned_64__size >= 0 may_be_deduced. update_conte_rules(33): interfaces__unsigned_64__size may_be_replaced_by 64. update_conte_rules(34): interfaces__unsigned_64__first may_be_replaced_by 0. update_conte_rules(35): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. update_conte_rules(36): interfaces__unsigned_64__base__first may_be_replaced_by 0. update_conte_rules(37): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. update_conte_rules(38): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. update_conte_rules(39): spark__unsigned__u6__size >= 0 may_be_deduced. update_conte_rules(40): spark__unsigned__u6__first may_be_replaced_by 0. update_conte_rules(41): spark__unsigned__u6__last may_be_replaced_by 63. update_conte_rules(42): spark__unsigned__u6__base__first may_be_replaced_by 0. update_conte_rules(43): spark__unsigned__u6__base__last may_be_replaced_by 63. update_conte_rules(44): spark__unsigned__u6__modulus may_be_replaced_by 64. update_conte_rules(45): spark__unsigned__u7__size >= 0 may_be_deduced. update_conte_rules(46): spark__unsigned__u7__first may_be_replaced_by 0. update_conte_rules(47): spark__unsigned__u7__last may_be_replaced_by 127. update_conte_rules(48): spark__unsigned__u7__base__first may_be_replaced_by 0. update_conte_rules(49): spark__unsigned__u7__base__last may_be_replaced_by 127. update_conte_rules(50): spark__unsigned__u7__modulus may_be_replaced_by 128. update_conte_rules(51): spark__unsigned__byte__size >= 0 may_be_deduced. update_conte_rules(52): spark__unsigned__byte__first may_be_replaced_by 0. update_conte_rules(53): spark__unsigned__byte__last may_be_replaced_by 255. update_conte_rules(54): spark__unsigned__byte__base__first may_be_replaced_by 0. update_conte_rules(55): spark__unsigned__byte__base__last may_be_replaced_by 255. update_conte_rules(56): spark__unsigned__byte__modulus may_be_replaced_by 256. update_conte_rules(57): spark__unsigned__u16__size >= 0 may_be_deduced. update_conte_rules(58): spark__unsigned__u16__first may_be_replaced_by 0. update_conte_rules(59): spark__unsigned__u16__last may_be_replaced_by 65535. update_conte_rules(60): spark__unsigned__u16__base__first may_be_replaced_by 0. update_conte_rules(61): spark__unsigned__u16__base__last may_be_replaced_by 65535. update_conte_rules(62): spark__unsigned__u16__modulus may_be_replaced_by 65536. update_conte_rules(63): spark__unsigned__u32__size >= 0 may_be_deduced. update_conte_rules(64): spark__unsigned__u32__first may_be_replaced_by 0. update_conte_rules(65): spark__unsigned__u32__last may_be_replaced_by 4294967295. update_conte_rules(66): spark__unsigned__u32__base__first may_be_replaced_by 0. update_conte_rules(67): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. update_conte_rules(68): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. update_conte_rules(69): spark__unsigned__u64__size >= 0 may_be_deduced. update_conte_rules(70): spark__unsigned__u64__first may_be_replaced_by 0. update_conte_rules(71): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. update_conte_rules(72): spark__unsigned__u64__base__first may_be_replaced_by 0. update_conte_rules(73): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. update_conte_rules(74): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. update_conte_rules(75): spark__crypto__i8__size >= 0 may_be_deduced. update_conte_rules(76): spark__crypto__i8__first may_be_replaced_by 0. update_conte_rules(77): spark__crypto__i8__last may_be_replaced_by 7. update_conte_rules(78): spark__crypto__i8__base__first may_be_replaced_by -2147483648. update_conte_rules(79): spark__crypto__i8__base__last may_be_replaced_by 2147483647. update_conte_rules(80): spark__crypto__word_count_t__size >= 0 may_be_deduced. update_conte_rules(81): spark__crypto__word_count_t__first may_be_replaced_by 0. update_conte_rules(82): spark__crypto__word_count_t__last may_be_replaced_by 268435455. update_conte_rules(83): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. update_conte_rules(84): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. update_conte_rules(85): hash_bit_length__size >= 0 may_be_deduced. update_conte_rules(86): hash_bit_length__first may_be_replaced_by 0. update_conte_rules(87): hash_bit_length__last may_be_replaced_by 2147483640. update_conte_rules(88): hash_bit_length__base__first may_be_replaced_by -2147483648. update_conte_rules(89): hash_bit_length__base__last may_be_replaced_by 2147483647. update_conte_rules(90): skein_512_state_words_index__size >= 0 may_be_deduced. update_conte_rules(91): skein_512_state_words_index__first may_be_replaced_by 0. update_conte_rules(92): skein_512_state_words_index__last may_be_replaced_by 7. update_conte_rules(93): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. update_conte_rules(94): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. update_conte_rules(95): skein_512_block_bytes_index__size >= 0 may_be_deduced. update_conte_rules(96): skein_512_block_bytes_index__first may_be_replaced_by 0. update_conte_rules(97): skein_512_block_bytes_index__last may_be_replaced_by 63. update_conte_rules(98): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. update_conte_rules(99): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. update_conte_rules(100): skein_512_context__size >= 0 may_be_deduced. update_conte_rules(101): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. update_conte_rules(102): tweak_value__size >= 0 may_be_deduced. update_conte_rules(103): tweak_value__size may_be_replaced_by 128. update_conte_rules(104): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. update_conte_rules(105): context_header__size >= 0 may_be_deduced. update_conte_rules(106): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.sivspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000000116511712513676032535 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Update_Context For path(s) from start to run-time check associated with statement of line 608: procedure_update_context_1. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_update_context_2. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.slg0000644000175000017500000025143411712513676031653 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_4 @@@@@@@@@@ VC: procedure_round_4_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= 0 New C2: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [1]) <= interfaces__unsigned_64__last *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_4_rules(1). This was achieved by replacing all occurrences of r_512_3_0 by: 44. New C1: 44 >= spark__unsigned__shift_count__first New C2: 44 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_4_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_4_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_4_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [1]) >= 0 -S- Applied substitution rule round_4_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [1]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [1]) >= 0 using hypothesis H2. *** Proved C4: element(x, [1]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) <= spark__unsigned__u64__last -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(1). This was achieved by replacing all occurrences of r_512_3_0 by: 44. New H11: 44 >= spark__unsigned__shift_count__first New H12: 44 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [1]), 44) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [1]), 44) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [1]), 44) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [1]), 44) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [1]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [1]), 44) >= 0 -S- Applied substitution rule round_4_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [1]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [1]), 44) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= 0 New H4: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_4_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_4_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [1]), 44) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [1]), 44) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 6 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 6 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_5. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_3_0), (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= 0 New C2: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_7. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [7]) <= interfaces__unsigned_64__last *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_4_rules(2). This was achieved by replacing all occurrences of r_512_3_1 by: 9. New C1: 9 >= spark__unsigned__shift_count__first New C2: 9 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_4_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_4_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_4_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [7]) >= 0 -S- Applied substitution rule round_4_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [7]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [7]) >= 0 using hypothesis H2. *** Proved C4: element(x, [7]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) <= spark__unsigned__u64__last -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(2). This was achieved by replacing all occurrences of r_512_3_1 by: 9. New H11: 9 >= spark__unsigned__shift_count__first New H12: 9 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [7]), 9) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [7]), 9) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [7]), 9) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [7]), 9) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [7]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [7]), 9) >= 0 -S- Applied substitution rule round_4_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [7]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [7]), 9) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= 0 New H4: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_4_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_4_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [7]), 9) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [7]), 9) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 0 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 0 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_3_1), (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= 0 New C2: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_12. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [5]) <= interfaces__unsigned_64__last *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_4_rules(3). This was achieved by replacing all occurrences of r_512_3_2 by: 54. New C1: 54 >= spark__unsigned__shift_count__first New C2: 54 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_4_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_4_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_4_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [5]) >= 0 -S- Applied substitution rule round_4_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [5]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [5]) >= 0 using hypothesis H2. *** Proved C4: element(x, [5]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) <= spark__unsigned__u64__last -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(3). This was achieved by replacing all occurrences of r_512_3_2 by: 54. New H11: 54 >= spark__unsigned__shift_count__first New H12: 54 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [5]), 54) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [5]), 54) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [5]), 54) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [5]), 54) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [5]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [5]), 54) >= 0 -S- Applied substitution rule round_4_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [5]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [5]), 54) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= 0 New H4: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_4_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_4_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [5]), 54) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [5]), 54) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_14. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 2 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 2 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_15. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_3_2), (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_16. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= 0 New C2: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_17. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [3]) <= interfaces__unsigned_64__last *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_4_rules(4). This was achieved by replacing all occurrences of r_512_3_3 by: 56. New C1: 56 >= spark__unsigned__shift_count__first New C2: 56 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_4_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_4_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_4_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [3]) >= 0 -S- Applied substitution rule round_4_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [3]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [3]) >= 0 using hypothesis H2. *** Proved C4: element(x, [3]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_18. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) <= spark__unsigned__u64__last -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(4). This was achieved by replacing all occurrences of r_512_3_3 by: 56. New H11: 56 >= spark__unsigned__shift_count__first New H12: 56 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [3]), 56) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [3]), 56) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [3]), 56) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [3]), 56) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [3]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [3]), 56) >= 0 -S- Applied substitution rule round_4_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [3]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [3]), 56) <= 18446744073709551615 -S- Applied substitution rule round_4_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= 0 New H4: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_4_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_4_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_4_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_4_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [3]), 56) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [3]), 56) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_19. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 4 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 4 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_4_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_4_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_3_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_4_20. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.vsm0000644000175000017500000000005311712765060031657 0ustar eugeneugenround_7,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.vctspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000000000011712513676032417 0ustar eugeneugen././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.vsmspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000000007211712765060032470 0ustar eugeneugendo_first_key_injection,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.slg0000644000175000017500000025145211712513676031655 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_6 @@@@@@@@@@ VC: procedure_round_6_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= 0 New C2: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [1]) <= interfaces__unsigned_64__last *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_6_rules(1). This was achieved by replacing all occurrences of r_512_5_0 by: 13. New C1: 13 >= spark__unsigned__shift_count__first New C2: 13 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_6_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_6_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_6_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [1]) >= 0 -S- Applied substitution rule round_6_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [1]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [1]) >= 0 using hypothesis H2. *** Proved C4: element(x, [1]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) <= spark__unsigned__u64__last -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(1). This was achieved by replacing all occurrences of r_512_5_0 by: 13. New H11: 13 >= spark__unsigned__shift_count__first New H12: 13 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [1]), 13) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [1]), 13) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [1]), 13) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [1]), 13) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [1]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [1]), 13) >= 0 -S- Applied substitution rule round_6_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [1]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [1]), 13) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 >= 0 New H4: (element(x, [2]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_6_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_6_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [1]), 13) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [1]), 13) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 2 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 2 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_5. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_5_0), (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= 0 New C2: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_7. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [7]) <= interfaces__unsigned_64__last *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_6_rules(2). This was achieved by replacing all occurrences of r_512_5_1 by: 50. New C1: 50 >= spark__unsigned__shift_count__first New C2: 50 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_6_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_6_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_6_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [7]) >= 0 -S- Applied substitution rule round_6_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [7]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [7]) >= 0 using hypothesis H2. *** Proved C4: element(x, [7]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) <= spark__unsigned__u64__last -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(2). This was achieved by replacing all occurrences of r_512_5_1 by: 50. New H11: 50 >= spark__unsigned__shift_count__first New H12: 50 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [7]), 50) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [7]), 50) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [7]), 50) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [7]), 50) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [7]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [7]), 50) >= 0 -S- Applied substitution rule round_6_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [7]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [7]), 50) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 >= 0 New H4: (element(x, [4]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_6_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_6_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [7]), 50) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [7]), 50) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 4 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 4 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_5_1), (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= 0 New C2: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_12. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [5]) <= interfaces__unsigned_64__last *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_6_rules(3). This was achieved by replacing all occurrences of r_512_5_2 by: 10. New C1: 10 >= spark__unsigned__shift_count__first New C2: 10 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_6_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_6_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_6_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [5]) >= 0 -S- Applied substitution rule round_6_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [5]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [5]) >= 0 using hypothesis H2. *** Proved C4: element(x, [5]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) <= spark__unsigned__u64__last -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(3). This was achieved by replacing all occurrences of r_512_5_2 by: 10. New H11: 10 >= spark__unsigned__shift_count__first New H12: 10 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [5]), 10) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [5]), 10) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [5]), 10) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [5]), 10) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [5]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [5]), 10) >= 0 -S- Applied substitution rule round_6_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [5]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [5]), 10) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 >= 0 New H4: (element(x, [6]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_6_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_6_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [5]), 10) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [5]), 10) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_14. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 6 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 6 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_15. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_5_2), (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_16. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= 0 New C2: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_17. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [3]) <= interfaces__unsigned_64__last *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_6_rules(4). This was achieved by replacing all occurrences of r_512_5_3 by: 17. New C1: 17 >= spark__unsigned__shift_count__first New C2: 17 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_6_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_6_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_6_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [3]) >= 0 -S- Applied substitution rule round_6_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [3]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [3]) >= 0 using hypothesis H2. *** Proved C4: element(x, [3]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_18. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) <= spark__unsigned__u64__last -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(4). This was achieved by replacing all occurrences of r_512_5_3 by: 17. New H11: 17 >= spark__unsigned__shift_count__first New H12: 17 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [3]), 17) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [3]), 17) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [3]), 17) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [3]), 17) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [3]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [3]), 17) >= 0 -S- Applied substitution rule round_6_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [3]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [3]), 17) <= 18446744073709551615 -S- Applied substitution rule round_6_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 >= 0 New H4: (element(x, [0]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_6_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_6_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_6_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_6_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [3]), 17) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [3]), 17) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_19. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 0 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 0 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_6_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_6_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_5_3), (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_6_20. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.vct0000644000175000017500000000000011712513676031636 0ustar eugeneugen././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.fdlspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000002126511712513676032523 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Initialize_Key_Schedule} title procedure initialize_key_schedule; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__i8 = integer; type spark__crypto__i9 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_index = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_ks_parity : interfaces__unsigned_64 = pending; const wcnt : integer = pending; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i9__base__first : integer = pending; const spark__crypto__i9__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const skein_512_context__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i9__first : integer = pending; const spark__crypto__i9__last : integer = pending; const spark__crypto__i9__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ctx : skein_512_context; var ks : spark__crypto__u64_seq; var loop__1__i : integer; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.rls0000644000175000017500000000622111712513676031656 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Round_4*/ rule_family round_4_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. round_4_rules(1): r_512_3_0 may_be_replaced_by 44. round_4_rules(2): r_512_3_1 may_be_replaced_by 9. round_4_rules(3): r_512_3_2 may_be_replaced_by 54. round_4_rules(4): r_512_3_3 may_be_replaced_by 56. round_4_rules(5): integer__size >= 0 may_be_deduced. round_4_rules(6): integer__first may_be_replaced_by -2147483648. round_4_rules(7): integer__last may_be_replaced_by 2147483647. round_4_rules(8): integer__base__first may_be_replaced_by -2147483648. round_4_rules(9): integer__base__last may_be_replaced_by 2147483647. round_4_rules(10): interfaces__unsigned_64__size >= 0 may_be_deduced. round_4_rules(11): interfaces__unsigned_64__size may_be_replaced_by 64. round_4_rules(12): interfaces__unsigned_64__first may_be_replaced_by 0. round_4_rules(13): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. round_4_rules(14): interfaces__unsigned_64__base__first may_be_replaced_by 0. round_4_rules(15): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. round_4_rules(16): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. round_4_rules(17): spark__unsigned__u64__size >= 0 may_be_deduced. round_4_rules(18): spark__unsigned__u64__first may_be_replaced_by 0. round_4_rules(19): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. round_4_rules(20): spark__unsigned__u64__base__first may_be_replaced_by 0. round_4_rules(21): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. round_4_rules(22): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. round_4_rules(23): spark__unsigned__shift_count__size >= 0 may_be_deduced. round_4_rules(24): spark__unsigned__shift_count__first may_be_replaced_by 0. round_4_rules(25): spark__unsigned__shift_count__last may_be_replaced_by 64. round_4_rules(26): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. round_4_rules(27): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. round_4_rules(28): spark__crypto__i8__size >= 0 may_be_deduced. round_4_rules(29): spark__crypto__i8__first may_be_replaced_by 0. round_4_rules(30): spark__crypto__i8__last may_be_replaced_by 7. round_4_rules(31): spark__crypto__i8__base__first may_be_replaced_by -2147483648. round_4_rules(32): spark__crypto__i8__base__last may_be_replaced_by 2147483647. round_4_rules(33): spark__crypto__word_count_t__size >= 0 may_be_deduced. round_4_rules(34): spark__crypto__word_count_t__first may_be_replaced_by 0. round_4_rules(35): spark__crypto__word_count_t__last may_be_replaced_by 268435455. round_4_rules(36): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. round_4_rules(37): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.vsm0000644000175000017500000000005311712765060031651 0ustar eugeneugenround_1,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.rlsspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000002361111712513676032520 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Initialize_Key_Schedule*/ rule_family initialize_k_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. initialize_k_rules(1): skein_ks_parity may_be_replaced_by 2004413935125273122. initialize_k_rules(2): wcnt may_be_replaced_by 8. initialize_k_rules(3): integer__size >= 0 may_be_deduced. initialize_k_rules(4): integer__first may_be_replaced_by -2147483648. initialize_k_rules(5): integer__last may_be_replaced_by 2147483647. initialize_k_rules(6): integer__base__first may_be_replaced_by -2147483648. initialize_k_rules(7): integer__base__last may_be_replaced_by 2147483647. initialize_k_rules(8): natural__size >= 0 may_be_deduced. initialize_k_rules(9): natural__first may_be_replaced_by 0. initialize_k_rules(10): natural__last may_be_replaced_by 2147483647. initialize_k_rules(11): natural__base__first may_be_replaced_by -2147483648. initialize_k_rules(12): natural__base__last may_be_replaced_by 2147483647. initialize_k_rules(13): interfaces__unsigned_8__size >= 0 may_be_deduced. initialize_k_rules(14): interfaces__unsigned_8__size may_be_replaced_by 8. initialize_k_rules(15): interfaces__unsigned_8__first may_be_replaced_by 0. initialize_k_rules(16): interfaces__unsigned_8__last may_be_replaced_by 255. initialize_k_rules(17): interfaces__unsigned_8__base__first may_be_replaced_by 0. initialize_k_rules(18): interfaces__unsigned_8__base__last may_be_replaced_by 255. initialize_k_rules(19): interfaces__unsigned_8__modulus may_be_replaced_by 256. initialize_k_rules(20): interfaces__unsigned_16__size >= 0 may_be_deduced. initialize_k_rules(21): interfaces__unsigned_16__size may_be_replaced_by 16. initialize_k_rules(22): interfaces__unsigned_16__first may_be_replaced_by 0. initialize_k_rules(23): interfaces__unsigned_16__last may_be_replaced_by 65535. initialize_k_rules(24): interfaces__unsigned_16__base__first may_be_replaced_by 0. initialize_k_rules(25): interfaces__unsigned_16__base__last may_be_replaced_by 65535. initialize_k_rules(26): interfaces__unsigned_16__modulus may_be_replaced_by 65536. initialize_k_rules(27): interfaces__unsigned_32__size >= 0 may_be_deduced. initialize_k_rules(28): interfaces__unsigned_32__size may_be_replaced_by 32. initialize_k_rules(29): interfaces__unsigned_32__first may_be_replaced_by 0. initialize_k_rules(30): interfaces__unsigned_32__last may_be_replaced_by 4294967295. initialize_k_rules(31): interfaces__unsigned_32__base__first may_be_replaced_by 0. initialize_k_rules(32): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. initialize_k_rules(33): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. initialize_k_rules(34): interfaces__unsigned_64__size >= 0 may_be_deduced. initialize_k_rules(35): interfaces__unsigned_64__size may_be_replaced_by 64. initialize_k_rules(36): interfaces__unsigned_64__first may_be_replaced_by 0. initialize_k_rules(37): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. initialize_k_rules(38): interfaces__unsigned_64__base__first may_be_replaced_by 0. initialize_k_rules(39): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. initialize_k_rules(40): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. initialize_k_rules(41): spark__unsigned__u6__size >= 0 may_be_deduced. initialize_k_rules(42): spark__unsigned__u6__first may_be_replaced_by 0. initialize_k_rules(43): spark__unsigned__u6__last may_be_replaced_by 63. initialize_k_rules(44): spark__unsigned__u6__base__first may_be_replaced_by 0. initialize_k_rules(45): spark__unsigned__u6__base__last may_be_replaced_by 63. initialize_k_rules(46): spark__unsigned__u6__modulus may_be_replaced_by 64. initialize_k_rules(47): spark__unsigned__u7__size >= 0 may_be_deduced. initialize_k_rules(48): spark__unsigned__u7__first may_be_replaced_by 0. initialize_k_rules(49): spark__unsigned__u7__last may_be_replaced_by 127. initialize_k_rules(50): spark__unsigned__u7__base__first may_be_replaced_by 0. initialize_k_rules(51): spark__unsigned__u7__base__last may_be_replaced_by 127. initialize_k_rules(52): spark__unsigned__u7__modulus may_be_replaced_by 128. initialize_k_rules(53): spark__unsigned__byte__size >= 0 may_be_deduced. initialize_k_rules(54): spark__unsigned__byte__first may_be_replaced_by 0. initialize_k_rules(55): spark__unsigned__byte__last may_be_replaced_by 255. initialize_k_rules(56): spark__unsigned__byte__base__first may_be_replaced_by 0. initialize_k_rules(57): spark__unsigned__byte__base__last may_be_replaced_by 255. initialize_k_rules(58): spark__unsigned__byte__modulus may_be_replaced_by 256. initialize_k_rules(59): spark__unsigned__u16__size >= 0 may_be_deduced. initialize_k_rules(60): spark__unsigned__u16__first may_be_replaced_by 0. initialize_k_rules(61): spark__unsigned__u16__last may_be_replaced_by 65535. initialize_k_rules(62): spark__unsigned__u16__base__first may_be_replaced_by 0. initialize_k_rules(63): spark__unsigned__u16__base__last may_be_replaced_by 65535. initialize_k_rules(64): spark__unsigned__u16__modulus may_be_replaced_by 65536. initialize_k_rules(65): spark__unsigned__u32__size >= 0 may_be_deduced. initialize_k_rules(66): spark__unsigned__u32__first may_be_replaced_by 0. initialize_k_rules(67): spark__unsigned__u32__last may_be_replaced_by 4294967295. initialize_k_rules(68): spark__unsigned__u32__base__first may_be_replaced_by 0. initialize_k_rules(69): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. initialize_k_rules(70): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. initialize_k_rules(71): spark__unsigned__u64__size >= 0 may_be_deduced. initialize_k_rules(72): spark__unsigned__u64__first may_be_replaced_by 0. initialize_k_rules(73): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. initialize_k_rules(74): spark__unsigned__u64__base__first may_be_replaced_by 0. initialize_k_rules(75): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. initialize_k_rules(76): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. initialize_k_rules(77): spark__crypto__i8__size >= 0 may_be_deduced. initialize_k_rules(78): spark__crypto__i8__first may_be_replaced_by 0. initialize_k_rules(79): spark__crypto__i8__last may_be_replaced_by 7. initialize_k_rules(80): spark__crypto__i8__base__first may_be_replaced_by -2147483648. initialize_k_rules(81): spark__crypto__i8__base__last may_be_replaced_by 2147483647. initialize_k_rules(82): spark__crypto__i9__size >= 0 may_be_deduced. initialize_k_rules(83): spark__crypto__i9__first may_be_replaced_by 0. initialize_k_rules(84): spark__crypto__i9__last may_be_replaced_by 8. initialize_k_rules(85): spark__crypto__i9__base__first may_be_replaced_by -2147483648. initialize_k_rules(86): spark__crypto__i9__base__last may_be_replaced_by 2147483647. initialize_k_rules(87): spark__crypto__word_count_t__size >= 0 may_be_deduced. initialize_k_rules(88): spark__crypto__word_count_t__first may_be_replaced_by 0. initialize_k_rules(89): spark__crypto__word_count_t__last may_be_replaced_by 268435455. initialize_k_rules(90): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. initialize_k_rules(91): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. initialize_k_rules(92): hash_bit_length__size >= 0 may_be_deduced. initialize_k_rules(93): hash_bit_length__first may_be_replaced_by 0. initialize_k_rules(94): hash_bit_length__last may_be_replaced_by 2147483640. initialize_k_rules(95): hash_bit_length__base__first may_be_replaced_by -2147483648. initialize_k_rules(96): hash_bit_length__base__last may_be_replaced_by 2147483647. initialize_k_rules(97): skein_512_state_words_index__size >= 0 may_be_deduced. initialize_k_rules(98): skein_512_state_words_index__first may_be_replaced_by 0. initialize_k_rules(99): skein_512_state_words_index__last may_be_replaced_by 7. initialize_k_rules(100): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. initialize_k_rules(101): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. initialize_k_rules(102): skein_512_block_bytes_index__size >= 0 may_be_deduced. initialize_k_rules(103): skein_512_block_bytes_index__first may_be_replaced_by 0. initialize_k_rules(104): skein_512_block_bytes_index__last may_be_replaced_by 63. initialize_k_rules(105): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. initialize_k_rules(106): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. initialize_k_rules(107): skein_512_context__size >= 0 may_be_deduced. initialize_k_rules(108): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. initialize_k_rules(109): tweak_value__size >= 0 may_be_deduced. initialize_k_rules(110): tweak_value__size may_be_replaced_by 128. initialize_k_rules(111): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. initialize_k_rules(112): context_header__size >= 0 may_be_deduced. initialize_k_rules(113): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.logspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000000063011712513676032474 0ustar eugeneugenSPARK Simplifier Pro Edition Reading do_first_key_injection.fdl (for inherited FDL type declarations) Processing do_first_key_injection.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Automatic simplification completed. Simplified output sent to do_first_key_injection.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.siv0000644000175000017500000000710411712513676031663 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_7 For path(s) from start to run-time check associated with statement of line 456: procedure_round_7_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 457: procedure_round_7_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 457: procedure_round_7_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 458: procedure_round_7_4. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 461: procedure_round_7_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 461 to run-time check associated with statement of line 463: procedure_round_7_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 461 to run-time check associated with statement of line 464: procedure_round_7_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 461 to run-time check associated with statement of line 464: procedure_round_7_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 461 to run-time check associated with statement of line 465: procedure_round_7_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 461 to assertion of line 467: procedure_round_7_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 467 to run-time check associated with statement of line 469: procedure_round_7_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 467 to run-time check associated with statement of line 470: procedure_round_7_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 467 to run-time check associated with statement of line 470: procedure_round_7_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 467 to run-time check associated with statement of line 471: procedure_round_7_14. *** true . /* all conclusions proved */ For path(s) from assertion of line 467 to assertion of line 473: procedure_round_7_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 473 to run-time check associated with statement of line 475: procedure_round_7_16. *** true . /* all conclusions proved */ For path(s) from assertion of line 473 to run-time check associated with statement of line 476: procedure_round_7_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 473 to run-time check associated with statement of line 476: procedure_round_7_18. *** true . /* all conclusions proved */ For path(s) from assertion of line 473 to run-time check associated with statement of line 477: procedure_round_7_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 473 to finish: procedure_round_7_20. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.vlg0000644000175000017500000000324511712765060031651 0ustar eugeneugen Non-option args: round_8 Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=round_8 \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.vcg0000644000175000017500000015342411712513676031645 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Round_4 For path(s) from start to run-time check associated with statement of line 366: procedure_round_4_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i8__first . C4: 1 <= spark__crypto__i8__last . C5: 6 >= spark__crypto__i8__first . C6: 6 <= spark__crypto__i8__last . C7: 6 >= spark__crypto__i8__first . C8: 6 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 367: procedure_round_4_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . -> C1: r_512_3_0 >= spark__unsigned__shift_count__first . C2: r_512_3_0 <= spark__unsigned__shift_count__last . C3: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . C4: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 367: procedure_round_4_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_3_0 >= spark__unsigned__shift_count__first . H12: r_512_3_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 368: procedure_round_4_4. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_3_0 >= spark__unsigned__shift_count__first . H12: r_512_3_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [6])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [6])) <= spark__unsigned__u64__last . C3: 6 >= spark__crypto__i8__first . C4: 6 <= spark__crypto__i8__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to assertion of line 371: procedure_round_4_5. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_3_0 >= spark__unsigned__shift_count__first . H12: r_512_3_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [6])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [6])) <= spark__unsigned__u64__last . H25: 6 >= spark__crypto__i8__first . H26: 6 <= spark__crypto__i8__last . H27: 1 >= spark__crypto__i8__first . H28: 1 <= spark__crypto__i8__last . H29: 1 >= spark__crypto__i8__first . H30: 1 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [1], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [6]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [1], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [1]), element(update(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_3_0)), [6]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 371 to run-time check associated with statement of line 373: procedure_round_4_6. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 0 >= spark__crypto__i8__first . C6: 0 <= spark__crypto__i8__last . C7: 0 >= spark__crypto__i8__first . C8: 0 <= spark__crypto__i8__last . For path(s) from assertion of line 371 to run-time check associated with statement of line 374: procedure_round_4_7. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . -> C1: r_512_3_1 >= spark__unsigned__shift_count__first . C2: r_512_3_1 <= spark__unsigned__shift_count__last . C3: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . C4: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 371 to run-time check associated with statement of line 374: procedure_round_4_8. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_3_1 >= spark__unsigned__shift_count__first . H12: r_512_3_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) <= spark__unsigned__u64__last . For path(s) from assertion of line 371 to run-time check associated with statement of line 375: procedure_round_4_9. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_3_1 >= spark__unsigned__shift_count__first . H12: r_512_3_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [0])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [0])) <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i8__first . C4: 0 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 371 to assertion of line 377: procedure_round_4_10. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_3_1 >= spark__unsigned__shift_count__first . H12: r_512_3_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [0])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [0])) <= spark__unsigned__u64__last . H25: 0 >= spark__crypto__i8__first . H26: 0 <= spark__crypto__i8__last . H27: 7 >= spark__crypto__i8__first . H28: 7 <= spark__crypto__i8__last . H29: 7 >= spark__crypto__i8__first . H30: 7 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [7], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [0]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [7], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [7]), element(update(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_3_1)), [0]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 377 to run-time check associated with statement of line 379: procedure_round_4_11. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 5 >= spark__crypto__i8__first . C4: 5 <= spark__crypto__i8__last . C5: 2 >= spark__crypto__i8__first . C6: 2 <= spark__crypto__i8__last . C7: 2 >= spark__crypto__i8__first . C8: 2 <= spark__crypto__i8__last . For path(s) from assertion of line 377 to run-time check associated with statement of line 380: procedure_round_4_12. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . -> C1: r_512_3_2 >= spark__unsigned__shift_count__first . C2: r_512_3_2 <= spark__unsigned__shift_count__last . C3: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . C4: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 377 to run-time check associated with statement of line 380: procedure_round_4_13. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_3_2 >= spark__unsigned__shift_count__first . H12: r_512_3_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) <= spark__unsigned__u64__last . For path(s) from assertion of line 377 to run-time check associated with statement of line 381: procedure_round_4_14. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_3_2 >= spark__unsigned__shift_count__first . H12: r_512_3_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [2])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [2])) <= spark__unsigned__u64__last . C3: 2 >= spark__crypto__i8__first . C4: 2 <= spark__crypto__i8__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 377 to assertion of line 383: procedure_round_4_15. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_3_2 >= spark__unsigned__shift_count__first . H12: r_512_3_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [2])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [2])) <= spark__unsigned__u64__last . H25: 2 >= spark__crypto__i8__first . H26: 2 <= spark__crypto__i8__last . H27: 5 >= spark__crypto__i8__first . H28: 5 <= spark__crypto__i8__last . H29: 5 >= spark__crypto__i8__first . H30: 5 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [5], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [2]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [5], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [5]), element(update(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_3_2)), [2]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 383 to run-time check associated with statement of line 385: procedure_round_4_16. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 3 >= spark__crypto__i8__first . C4: 3 <= spark__crypto__i8__last . C5: 4 >= spark__crypto__i8__first . C6: 4 <= spark__crypto__i8__last . C7: 4 >= spark__crypto__i8__first . C8: 4 <= spark__crypto__i8__last . For path(s) from assertion of line 383 to run-time check associated with statement of line 386: procedure_round_4_17. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . -> C1: r_512_3_3 >= spark__unsigned__shift_count__first . C2: r_512_3_3 <= spark__unsigned__shift_count__last . C3: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . C4: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 383 to run-time check associated with statement of line 386: procedure_round_4_18. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_3_3 >= spark__unsigned__shift_count__first . H12: r_512_3_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3) <= spark__unsigned__u64__last . For path(s) from assertion of line 383 to run-time check associated with statement of line 387: procedure_round_4_19. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_3_3 >= spark__unsigned__shift_count__first . H12: r_512_3_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3)), [3]), element(update(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3)), [4])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3)), [3]), element(update(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_3_3)), [4])) <= spark__unsigned__u64__last . C3: 4 >= spark__crypto__i8__first . C4: 4 <= spark__crypto__i8__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 383 to finish: procedure_round_4_20. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.vct0000644000175000017500000000000011712513676031641 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.siv0000644000175000017500000000710411712513676031655 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_1 For path(s) from start to run-time check associated with statement of line 275: procedure_round_1_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 276: procedure_round_1_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 276: procedure_round_1_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 277: procedure_round_1_4. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 280: procedure_round_1_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 280 to run-time check associated with statement of line 282: procedure_round_1_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 280 to run-time check associated with statement of line 283: procedure_round_1_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 280 to run-time check associated with statement of line 283: procedure_round_1_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 280 to run-time check associated with statement of line 284: procedure_round_1_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 280 to assertion of line 286: procedure_round_1_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 286 to run-time check associated with statement of line 288: procedure_round_1_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 286 to run-time check associated with statement of line 289: procedure_round_1_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 286 to run-time check associated with statement of line 289: procedure_round_1_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 286 to run-time check associated with statement of line 290: procedure_round_1_14. *** true . /* all conclusions proved */ For path(s) from assertion of line 286 to assertion of line 292: procedure_round_1_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 292 to run-time check associated with statement of line 294: procedure_round_1_16. *** true . /* all conclusions proved */ For path(s) from assertion of line 292 to run-time check associated with statement of line 295: procedure_round_1_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 292 to run-time check associated with statement of line 295: procedure_round_1_18. *** true . /* all conclusions proved */ For path(s) from assertion of line 292 to run-time check associated with statement of line 296: procedure_round_1_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 292 to finish: procedure_round_1_20. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.slg0000644000175000017500000024171011712513676032421 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Inject_Key @@@@@@@@@@ VC: procedure_inject_key_1. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H6 on reading formula in, to give: %%% H6: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C3 on reading formula in, to give: %%% C3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C4 on reading formula in, to give: %%% C4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C5 on reading formula in, to give: %%% C5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C6 on reading formula in, to give: %%% C6: true %%% Simplified C8 on reading formula in, to give: %%% C8: true *** Proved C1: r >= spark__unsigned__u64__first using hypothesis H2. *** Proved C2: r <= spark__unsigned__u64__last using hypothesis H3. *** Proved C3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) using hypothesis H4. *** Proved C4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) using hypothesis H5. *** Proved C5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) using hypothesis H6. *** Proved C6: true *** Proved C8: true -S- Applied substitution rule inject_key_rules(47). This was achieved by replacing all occurrences of injection_range__first by: 0. New C7: 0 <= injection_range__last New C9: 0 <= injection_range__last -S- Applied substitution rule inject_key_rules(48). This was achieved by replacing all occurrences of injection_range__last by: 7. New C7: true New C9: true *** Proved C7: true *** Proved C9: true *** PROVED VC. @@@@@@@@@@ VC: procedure_inject_key_2. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H8 has been replaced by "true". (It is already present, as H6). --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H21 has been replaced by "true". (It is already present, as H19). --- Hypothesis H22 has been replaced by "true". (It is already present, as H20). %%% Simplified C3 on reading formula in, to give: %%% C3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C4 on reading formula in, to give: %%% C4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C5 on reading formula in, to give: %%% C5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [i___1]) and element(update(x, [ loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: r >= spark__unsigned__u64__first using hypothesis H1. *** Proved C2: r <= spark__unsigned__u64__last using hypothesis H2. *** Proved C3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) using hypothesis H3. *** Proved C4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) using hypothesis H4. *** Proved C5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [i___1]) and element(update(x, [ loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [i___1]) <= spark__unsigned__u64__last) using hypotheses H5, H10, H11, H19 & H20. *** Proved C6: loop__1__i + 1 >= injection_range__first using hypothesis H6. *** Proved C8: loop__1__i + 1 >= injection_range__first using hypothesis H6. -S- Applied substitution rule inject_key_rules(48). This was achieved by replacing all occurrences of injection_range__last by: 7. New H7: loop__1__i <= 7 New H23: not loop__1__i = 7 New C7: loop__1__i <= 6 New C9: loop__1__i <= 6 *** Proved C7: loop__1__i <= 6 using hypotheses H7 & H23. *** Proved C9: loop__1__i <= 6 using hypotheses H7 & H23. *** PROVED VC. @@@@@@@@@@ VC: procedure_inject_key_3. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H8 has been replaced by "true". (It is already present, as H6). --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). -S- Applied substitution rule inject_key_rules(21). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: r >= 0 New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New C8: loop__1__i >= 0 -S- Applied substitution rule inject_key_rules(22). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: r <= 18446744073709551615 New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C9: loop__1__i <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(37). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) New C3: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(38). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New C4: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= 8 -S- Applied substitution rule inject_key_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New C5: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New C6: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= 2147483647 -S- Applied substitution rule inject_key_rules(32). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C10: loop__1__i >= 0 New C12: loop__1__i >= 0 -S- Applied substitution rule inject_key_rules(33). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C11: loop__1__i <= 7 New C13: loop__1__i <= 7 -S- Applied substitution rule inject_key_rules(2). This was achieved by replacing all occurrences of ks_modulus by: 9. New C7: true New C1: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New C2: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C3: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= 0 New C4: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= 8 New C5: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= 0 New C6: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= 2147483647 -S- Applied substitution rule inject_key_rules(19). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New C2: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New C3: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 New C4: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 8 New C5: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 New C6: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 2147483647 -S- Applied substitution rule inject_key_rules(27). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule inject_key_rules(28). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) -S- Applied substitution rule inject_key_rules(47). This was achieved by replacing all occurrences of injection_range__first by: 0. New H6: loop__1__i >= 0 -S- Applied substitution rule inject_key_rules(48). This was achieved by replacing all occurrences of injection_range__last by: 7. New H7: loop__1__i <= 7 *** Proved C8: loop__1__i >= 0 using hypothesis H6. *** Proved C9: loop__1__i <= 18446744073709551615 using hypothesis H7. *** Proved C10: loop__1__i >= 0 using hypothesis H6. *** Proved C12: loop__1__i >= 0 using hypothesis H6. *** Proved C11: loop__1__i <= 7 using hypothesis H7. *** Proved C13: loop__1__i <= 7 using hypothesis H7. *** Proved C7: true *** Proved C1: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C3: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 *** Proved C5: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 --- Eliminated hypothesis H8 (true-hypothesis). --- Eliminated hypothesis H9 (true-hypothesis). *** Proved C4: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 8 *** Proved C6: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 2147483647 *** PROVED VC. @@@@@@@@@@ VC: procedure_inject_key_4. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H8 has been replaced by "true". (It is already present, as H6). --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H21 has been replaced by "true". (It is already present, as H19). --- Hypothesis H22 has been replaced by "true". (It is already present, as H20). %%% Simplified C7 on reading formula in, to give: %%% C7: true *** Proved C7: true -S- Applied substitution rule inject_key_rules(21). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: r >= 0 New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H17: loop__1__i >= 0 New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(22). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: r <= 18446744073709551615 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H18: loop__1__i <= 18446744073709551615 New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(27). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) New C3: r mod 3 mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(28). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New C4: r mod 3 mod interfaces__unsigned_64__modulus <= 2 -S- Applied substitution rule inject_key_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= 0 New C5: r mod 3 mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= 2147483647 New C6: r mod 3 mod interfaces__unsigned_64__modulus <= 2147483647 -S- Applied substitution rule inject_key_rules(32). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H19: loop__1__i >= 0 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C8: wcnt >= 3 New C12: wcnt >= 3 -S- Applied substitution rule inject_key_rules(1). This was achieved by replacing all occurrences of wcnt by: 8. New C9: 5 <= spark__crypto__i8__last New C10: 5 >= integer__base__first New C11: 5 <= integer__base__last New C13: 5 <= spark__crypto__i8__last New C14: 5 >= integer__base__first New C15: 5 <= integer__base__last New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C8: true New C12: true -S- Applied substitution rule inject_key_rules(33). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H20: loop__1__i <= 7 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C9: true New C13: true -S- Applied substitution rule inject_key_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C10: true New C14: true -S- Applied substitution rule inject_key_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C11: true New C15: true *** Proved C8: true *** Proved C12: true *** Proved C9: true *** Proved C13: true *** Proved C10: true *** Proved C14: true *** Proved C11: true *** Proved C15: true -S- Applied substitution rule inject_key_rules(2). This was achieved by replacing all occurrences of ks_modulus by: 9. New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H12: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= spark__crypto__i9__first New H13: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= spark__crypto__i9__last New H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= 0 New H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= 2147483647 New H16: true New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(19). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New H12: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= spark__crypto__i9__first New H13: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= spark__crypto__i9__last New H14: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 New H15: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 2147483647 New C3: r mod 3 mod 18446744073709551616 >= 0 New C4: r mod 3 mod 18446744073709551616 <= 2 New C5: r mod 3 mod 18446744073709551616 >= 0 New C6: r mod 3 mod 18446744073709551616 <= 2147483647 New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [5]) + element(ts, [r mod 3 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [5]) + element(ts, [r mod 3 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(37). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) New H12: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 -S- Applied substitution rule inject_key_rules(38). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H13: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 8 New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) -S- Applied substitution rule inject_key_rules(47). This was achieved by replacing all occurrences of injection_range__first by: 0. New H6: loop__1__i >= 0 -S- Applied substitution rule inject_key_rules(48). This was achieved by replacing all occurrences of injection_range__last by: 7. New H7: loop__1__i <= 7 New H23: loop__1__i = 7 *** Proved C3: r mod 3 mod 18446744073709551616 >= 0 *** Proved C5: r mod 3 mod 18446744073709551616 >= 0 *** Proved C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [5]) + element(ts, [r mod 3 mod 18446744073709551616])) mod 18446744073709551616 >= 0 *** Proved C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [5]) + element(ts, [r mod 3 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 --- Eliminated hypothesis H8 (true-hypothesis). --- Eliminated hypothesis H9 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H22 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H19 (duplicate of H17). --- Eliminated hypothesis H14 (duplicate of H12). --- Eliminated hypothesis H17 (duplicate of H6). --- Eliminated hypothesis H20 (duplicate of H7). --- Eliminated hypothesis H6 (redundant, given H23). --- Eliminated hypothesis H7 (redundant, given H23). --- Eliminated hypothesis H15 (redundant, given H13). --- Eliminated hypothesis H18 (redundant, given H23). -S- Eliminated hypothesis H23. This was achieved by replacing all occurrences of loop__1__i by: 7. New H10: (element(x, [7]) + element(ks, [(r + 7) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New H11: (element(x, [7]) + element(ks, [(r + 7) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New H12: (r + 7) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 New H13: (r + 7) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 8 *** Proved C4: r mod 3 mod 18446744073709551616 <= 2 *** Proved C6: r mod 3 mod 18446744073709551616 <= 2147483647 *** PROVED VC. @@@@@@@@@@ VC: procedure_inject_key_5. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H8 has been replaced by "true". (It is already present, as H6). --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H21 has been replaced by "true". (It is already present, as H19). --- Hypothesis H22 has been replaced by "true". (It is already present, as H20). %%% Simplified H30 on reading formula in, to give: %%% H30: true --- Hypothesis H35 has been replaced by "true". (It is already present, as H31). --- Hypothesis H36 has been replaced by "true". (It is already present, as H32). --- Hypothesis H37 has been replaced by "true". (It is already present, as H33). --- Hypothesis H38 has been replaced by "true". (It is already present, as H34). %%% Simplified C1 on reading formula in, to give: %%% C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last %%% Simplified C7 on reading formula in, to give: %%% C7: true *** Proved C7: true -S- Applied substitution rule inject_key_rules(21). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: r >= 0 New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H17: loop__1__i >= 0 New H24: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(22). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: r <= 18446744073709551615 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H18: loop__1__i <= 18446744073709551615 New H25: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(27). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H26: r mod 3 mod interfaces__unsigned_64__modulus >= 0 New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) New C3: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(28). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H27: r mod 3 mod interfaces__unsigned_64__modulus <= 2 New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New C4: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus <= 2 -S- Applied substitution rule inject_key_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= 0 New H28: r mod 3 mod interfaces__unsigned_64__modulus >= 0 New C5: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= 2147483647 New H29: r mod 3 mod interfaces__unsigned_64__modulus <= 2147483647 New C6: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus <= 2147483647 -S- Applied substitution rule inject_key_rules(32). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H19: loop__1__i >= 0 New H31: wcnt >= 3 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C8: wcnt >= 2 New C12: wcnt >= 2 -S- Applied substitution rule inject_key_rules(1). This was achieved by replacing all occurrences of wcnt by: 8. New H32: 5 <= spark__crypto__i8__last New H33: 5 >= integer__base__first New H34: 5 <= integer__base__last New H24: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H25: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H31: true New C9: 6 <= spark__crypto__i8__last New C10: 6 >= integer__base__first New C11: 6 <= integer__base__last New C13: 6 <= spark__crypto__i8__last New C14: 6 >= integer__base__first New C15: 6 <= integer__base__last New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [6]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [6]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C8: true New C12: true -S- Applied substitution rule inject_key_rules(33). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H20: loop__1__i <= 7 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H32: true New C9: true New C13: true -S- Applied substitution rule inject_key_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H33: true New C10: true New C14: true -S- Applied substitution rule inject_key_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H34: true New C11: true New C15: true *** Proved C8: true *** Proved C12: true *** Proved C9: true *** Proved C13: true *** Proved C10: true *** Proved C14: true *** Proved C11: true *** Proved C15: true -S- Applied substitution rule inject_key_rules(2). This was achieved by replacing all occurrences of ks_modulus by: 9. New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H12: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= spark__crypto__i9__first New H13: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= spark__crypto__i9__last New H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= 0 New H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= 2147483647 New H16: true New H24: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H25: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [6]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [6]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(19). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H26: r mod 3 mod 18446744073709551616 >= 0 New H27: r mod 3 mod 18446744073709551616 <= 2 New H28: r mod 3 mod 18446744073709551616 >= 0 New H29: r mod 3 mod 18446744073709551616 <= 2147483647 New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New H12: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= spark__crypto__i9__first New H13: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= spark__crypto__i9__last New H14: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 New H15: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 2147483647 New H24: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [5]) + element(ts, [r mod 3 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New H25: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [5]) + element(ts, [r mod 3 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New C3: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 >= 0 New C4: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 <= 2 New C5: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 >= 0 New C6: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 <= 2147483647 New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [6]) + element(ts, [(r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [6]) + element(ts, [(r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(37). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) New H12: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 -S- Applied substitution rule inject_key_rules(38). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H13: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 8 New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) -S- Applied substitution rule inject_key_rules(47). This was achieved by replacing all occurrences of injection_range__first by: 0. New H6: loop__1__i >= 0 -S- Applied substitution rule inject_key_rules(48). This was achieved by replacing all occurrences of injection_range__last by: 7. New H7: loop__1__i <= 7 New H23: loop__1__i = 7 *** Proved C3: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 >= 0 *** Proved C5: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 >= 0 *** Proved C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [6]) + element(ts, [(r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616])) mod 18446744073709551616 >= 0 *** Proved C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [6]) + element(ts, [(r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 --- Eliminated hypothesis H8 (true-hypothesis). --- Eliminated hypothesis H9 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H22 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H34 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H19 (duplicate of H17). --- Eliminated hypothesis H28 (duplicate of H26). --- Eliminated hypothesis H14 (duplicate of H12). --- Eliminated hypothesis H17 (duplicate of H6). --- Eliminated hypothesis H20 (duplicate of H7). --- Eliminated hypothesis H6 (redundant, given H23). --- Eliminated hypothesis H7 (redundant, given H23). --- Eliminated hypothesis H15 (redundant, given H13). --- Eliminated hypothesis H18 (redundant, given H23). --- Eliminated hypothesis H29 (redundant, given H27). -S- Eliminated hypothesis H23. This was achieved by replacing all occurrences of loop__1__i by: 7. New H10: (element(x, [7]) + element(ks, [(r + 7) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New H11: (element(x, [7]) + element(ks, [(r + 7) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New H24: (element(x, [5]) + element(ts, [r mod 3 mod 18446744073709551616] )) mod 18446744073709551616 >= 0 New H25: (element(x, [5]) + element(ts, [r mod 3 mod 18446744073709551616] )) mod 18446744073709551616 <= 18446744073709551615 New H12: (r + 7) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 New H13: (r + 7) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 8 *** Proved C4: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 <= 2 *** Proved C6: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 <= 2147483647 *** PROVED VC. @@@@@@@@@@ VC: procedure_inject_key_6. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H8 has been replaced by "true". (It is already present, as H6). --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H21 has been replaced by "true". (It is already present, as H19). --- Hypothesis H22 has been replaced by "true". (It is already present, as H20). %%% Simplified H30 on reading formula in, to give: %%% H30: true --- Hypothesis H35 has been replaced by "true". (It is already present, as H31). --- Hypothesis H36 has been replaced by "true". (It is already present, as H32). --- Hypothesis H37 has been replaced by "true". (It is already present, as H33). --- Hypothesis H38 has been replaced by "true". (It is already present, as H34). %%% Simplified H39 on reading formula in, to give: %%% H39: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first %%% Simplified H40 on reading formula in, to give: %%% H40: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last --- Hypothesis H45 has been replaced by "true". (It is already present, as H30). --- Hypothesis H50 has been replaced by "true". (It is already present, as H46). --- Hypothesis H51 has been replaced by "true". (It is already present, as H47). --- Hypothesis H52 has been replaced by "true". (It is already present, as H48). --- Hypothesis H53 has been replaced by "true". (It is already present, as H49). %%% Simplified C1 on reading formula in, to give: %%% C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 1]) + r) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 1]) + r) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last -S- Applied substitution rule inject_key_rules(21). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: r >= 0 New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H17: loop__1__i >= 0 New H24: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H39: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 1]) + r) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(22). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: r <= 18446744073709551615 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H18: loop__1__i <= 18446744073709551615 New H25: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H40: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H5: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 1]) + r) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(32). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H19: loop__1__i >= 0 New H31: wcnt >= 3 New H46: wcnt >= 2 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: wcnt >= 1 New C7: wcnt >= 1 -S- Applied substitution rule inject_key_rules(1). This was achieved by replacing all occurrences of wcnt by: 8. New H32: 5 <= spark__crypto__i8__last New H33: 5 >= integer__base__first New H34: 5 <= integer__base__last New H47: 6 <= spark__crypto__i8__last New H48: 6 >= integer__base__first New H49: 6 <= integer__base__last New H24: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H39: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [6]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H25: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H40: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [6]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H31: true New H46: true New C4: 7 <= spark__crypto__i8__last New C5: 7 >= integer__base__first New C6: 7 <= integer__base__last New C8: 7 <= spark__crypto__i8__last New C9: 7 >= integer__base__first New C10: 7 <= integer__base__last New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [7]) + r) mod interfaces__unsigned_64__modulus >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [7]) + r) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C3: true New C7: true -S- Applied substitution rule inject_key_rules(33). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H20: loop__1__i <= 7 New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H32: true New H47: true New C4: true New C8: true -S- Applied substitution rule inject_key_rules(6). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H33: true New H48: true New C5: true New C9: true -S- Applied substitution rule inject_key_rules(7). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H34: true New H49: true New C6: true New C10: true *** Proved C3: true *** Proved C7: true *** Proved C4: true *** Proved C8: true *** Proved C5: true *** Proved C9: true *** Proved C6: true *** Proved C10: true -S- Applied substitution rule inject_key_rules(2). This was achieved by replacing all occurrences of ks_modulus by: 9. New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H12: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= spark__crypto__i9__first New H13: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= spark__crypto__i9__last New H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= natural__first New H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= natural__last New H16: true New H24: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H25: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [5]) + element(ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H39: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [6]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= 0 New H40: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [6]) + element(ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [7]) + r) mod interfaces__unsigned_64__modulus >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [7]) + r) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(9). This was achieved by replacing all occurrences of natural__first by: 0. New H28: r mod 3 mod interfaces__unsigned_64__modulus >= 0 New H43: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus >= 0 New H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule inject_key_rules(10). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H29: r mod 3 mod interfaces__unsigned_64__modulus <= 2147483647 New H44: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus <= 2147483647 New H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod 9 mod interfaces__unsigned_64__modulus <= 2147483647 -S- Applied substitution rule inject_key_rules(19). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H26: r mod 3 mod 18446744073709551616 >= spark__crypto__i3__first New H27: r mod 3 mod 18446744073709551616 <= spark__crypto__i3__last New H41: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 >= spark__crypto__i3__first New H42: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 <= spark__crypto__i3__last New H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New H12: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= spark__crypto__i9__first New H13: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= spark__crypto__i9__last New H24: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [5]) + element(ts, [r mod 3 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New H25: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [5]) + element(ts, [r mod 3 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New H39: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [6]) + element(ts, [(r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616])) mod 18446744073709551616 >= 0 New H40: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [6]) + element(ts, [(r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616])) mod 18446744073709551616 <= 18446744073709551615 New H28: r mod 3 mod 18446744073709551616 >= 0 New H43: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 >= 0 New H14: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 New H29: r mod 3 mod 18446744073709551616 <= 2147483647 New H44: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 <= 2147483647 New H15: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 2147483647 New C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [7]) + r) mod 18446744073709551616 >= 0 New C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [7]) + r) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule inject_key_rules(27). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) New H26: r mod 3 mod 18446744073709551616 >= 0 New H41: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 >= 0 -S- Applied substitution rule inject_key_rules(28). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H27: r mod 3 mod 18446744073709551616 <= 2 New H42: (r + 1) mod 18446744073709551616 mod 3 mod 18446744073709551616 <= 2 New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) -S- Applied substitution rule inject_key_rules(37). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) New H12: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 >= 0 -S- Applied substitution rule inject_key_rules(38). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H13: (r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616 <= 8 New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) -S- Applied substitution rule inject_key_rules(47). This was achieved by replacing all occurrences of injection_range__first by: 0. New H6: loop__1__i >= 0 -S- Applied substitution rule inject_key_rules(48). This was achieved by replacing all occurrences of injection_range__last by: 7. New H7: loop__1__i <= 7 New H23: loop__1__i = 7 *** Proved C1: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [7]) + r) mod 18446744073709551616 >= 0 *** Proved C2: (element(update(x, [loop__1__i], (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod 18446744073709551616 mod 9 mod 18446744073709551616])) mod 18446744073709551616), [7]) + r) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_inject_key_7. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.vcg0000644000175000017500000007653411712513676032425 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Inject_Key For path(s) from start to default assertion of line 260: procedure_inject_key_1. H1: true . H2: r >= spark__unsigned__u64__first . H3: r <= spark__unsigned__u64__last . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H5: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H6: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: r >= spark__unsigned__u64__first . C2: r <= spark__unsigned__u64__last . C3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . C4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . C5: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . C6: injection_range__first >= injection_range__first . C7: injection_range__first <= injection_range__last . C8: injection_range__first >= injection_range__first . C9: injection_range__first <= injection_range__last . For path(s) from default assertion of line 260 to default assertion of line 260: procedure_inject_key_2. H1: r >= spark__unsigned__u64__first . H2: r <= spark__unsigned__u64__last . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H5: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H6: loop__1__i >= injection_range__first . H7: loop__1__i <= injection_range__last . H8: loop__1__i >= injection_range__first . H9: loop__1__i <= injection_range__last . H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H12: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= spark__crypto__i9__first . H13: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= spark__crypto__i9__last . H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= natural__first . H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= natural__last . H16: ks_modulus <> 0 . H17: loop__1__i >= spark__unsigned__u64__first . H18: loop__1__i <= spark__unsigned__u64__last . H19: loop__1__i >= spark__crypto__i8__first . H20: loop__1__i <= spark__crypto__i8__last . H21: loop__1__i >= spark__crypto__i8__first . H22: loop__1__i <= spark__crypto__i8__last . H23: not (loop__1__i = injection_range__last) . -> C1: r >= spark__unsigned__u64__first . C2: r <= spark__unsigned__u64__last . C3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . C4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . C5: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [i___1]) >= spark__unsigned__u64__first) and (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [i___1]) <= spark__unsigned__u64__last))) . C6: loop__1__i + 1 >= injection_range__first . C7: loop__1__i + 1 <= injection_range__last . C8: loop__1__i + 1 >= injection_range__first . C9: loop__1__i + 1 <= injection_range__last . For path(s) from default assertion of line 260 to run-time check associated with statement of line 261: procedure_inject_key_3. H1: r >= spark__unsigned__u64__first . H2: r <= spark__unsigned__u64__last . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H5: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H6: loop__1__i >= injection_range__first . H7: loop__1__i <= injection_range__last . H8: loop__1__i >= injection_range__first . H9: loop__1__i <= injection_range__last . -> C1: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= spark__crypto__i9__first . C4: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= spark__crypto__i9__last . C5: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= natural__first . C6: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= natural__last . C7: ks_modulus <> 0 . C8: loop__1__i >= spark__unsigned__u64__first . C9: loop__1__i <= spark__unsigned__u64__last . C10: loop__1__i >= spark__crypto__i8__first . C11: loop__1__i <= spark__crypto__i8__last . C12: loop__1__i >= spark__crypto__i8__first . C13: loop__1__i <= spark__crypto__i8__last . For path(s) from default assertion of line 260 to run-time check associated with statement of line 264: procedure_inject_key_4. H1: r >= spark__unsigned__u64__first . H2: r <= spark__unsigned__u64__last . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H5: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H6: loop__1__i >= injection_range__first . H7: loop__1__i <= injection_range__last . H8: loop__1__i >= injection_range__first . H9: loop__1__i <= injection_range__last . H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H12: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= spark__crypto__i9__first . H13: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= spark__crypto__i9__last . H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= natural__first . H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= natural__last . H16: ks_modulus <> 0 . H17: loop__1__i >= spark__unsigned__u64__first . H18: loop__1__i <= spark__unsigned__u64__last . H19: loop__1__i >= spark__crypto__i8__first . H20: loop__1__i <= spark__crypto__i8__last . H21: loop__1__i >= spark__crypto__i8__first . H22: loop__1__i <= spark__crypto__i8__last . H23: loop__1__i = injection_range__last . -> C1: (element(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: r mod 3 mod interfaces__unsigned_64__modulus >= spark__crypto__i3__first . C4: r mod 3 mod interfaces__unsigned_64__modulus <= spark__crypto__i3__last . C5: r mod 3 mod interfaces__unsigned_64__modulus >= natural__first . C6: r mod 3 mod interfaces__unsigned_64__modulus <= natural__last . C7: 3 <> 0 . C8: wcnt - 3 >= spark__crypto__i8__first . C9: wcnt - 3 <= spark__crypto__i8__last . C10: wcnt - 3 >= integer__base__first . C11: wcnt - 3 <= integer__base__last . C12: wcnt - 3 >= spark__crypto__i8__first . C13: wcnt - 3 <= spark__crypto__i8__last . C14: wcnt - 3 >= integer__base__first . C15: wcnt - 3 <= integer__base__last . For path(s) from default assertion of line 260 to run-time check associated with statement of line 265: procedure_inject_key_5. H1: r >= spark__unsigned__u64__first . H2: r <= spark__unsigned__u64__last . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H5: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H6: loop__1__i >= injection_range__first . H7: loop__1__i <= injection_range__last . H8: loop__1__i >= injection_range__first . H9: loop__1__i <= injection_range__last . H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H12: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= spark__crypto__i9__first . H13: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= spark__crypto__i9__last . H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= natural__first . H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= natural__last . H16: ks_modulus <> 0 . H17: loop__1__i >= spark__unsigned__u64__first . H18: loop__1__i <= spark__unsigned__u64__last . H19: loop__1__i >= spark__crypto__i8__first . H20: loop__1__i <= spark__crypto__i8__last . H21: loop__1__i >= spark__crypto__i8__first . H22: loop__1__i <= spark__crypto__i8__last . H23: loop__1__i = injection_range__last . H24: (element(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H25: (element(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H26: r mod 3 mod interfaces__unsigned_64__modulus >= spark__crypto__i3__first . H27: r mod 3 mod interfaces__unsigned_64__modulus <= spark__crypto__i3__last . H28: r mod 3 mod interfaces__unsigned_64__modulus >= natural__first . H29: r mod 3 mod interfaces__unsigned_64__modulus <= natural__last . H30: 3 <> 0 . H31: wcnt - 3 >= spark__crypto__i8__first . H32: wcnt - 3 <= spark__crypto__i8__last . H33: wcnt - 3 >= integer__base__first . H34: wcnt - 3 <= integer__base__last . H35: wcnt - 3 >= spark__crypto__i8__first . H36: wcnt - 3 <= spark__crypto__i8__last . H37: wcnt - 3 >= integer__base__first . H38: wcnt - 3 <= integer__base__last . -> C1: (element(update(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3], (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element( ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(update(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3], (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element( ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus >= spark__crypto__i3__first . C4: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus <= spark__crypto__i3__last . C5: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus >= natural__first . C6: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus <= natural__last . C7: 3 <> 0 . C8: wcnt - 2 >= spark__crypto__i8__first . C9: wcnt - 2 <= spark__crypto__i8__last . C10: wcnt - 2 >= integer__base__first . C11: wcnt - 2 <= integer__base__last . C12: wcnt - 2 >= spark__crypto__i8__first . C13: wcnt - 2 <= spark__crypto__i8__last . C14: wcnt - 2 >= integer__base__first . C15: wcnt - 2 <= integer__base__last . For path(s) from default assertion of line 260 to run-time check associated with statement of line 266: procedure_inject_key_6. H1: r >= spark__unsigned__u64__first . H2: r <= spark__unsigned__u64__last . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H5: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H6: loop__1__i >= injection_range__first . H7: loop__1__i <= injection_range__last . H8: loop__1__i >= injection_range__first . H9: loop__1__i <= injection_range__last . H10: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H11: (element(x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H12: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= spark__crypto__i9__first . H13: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= spark__crypto__i9__last . H14: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus >= natural__first . H15: (r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus <= natural__last . H16: ks_modulus <> 0 . H17: loop__1__i >= spark__unsigned__u64__first . H18: loop__1__i <= spark__unsigned__u64__last . H19: loop__1__i >= spark__crypto__i8__first . H20: loop__1__i <= spark__crypto__i8__last . H21: loop__1__i >= spark__crypto__i8__first . H22: loop__1__i <= spark__crypto__i8__last . H23: loop__1__i = injection_range__last . H24: (element(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H25: (element(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H26: r mod 3 mod interfaces__unsigned_64__modulus >= spark__crypto__i3__first . H27: r mod 3 mod interfaces__unsigned_64__modulus <= spark__crypto__i3__last . H28: r mod 3 mod interfaces__unsigned_64__modulus >= natural__first . H29: r mod 3 mod interfaces__unsigned_64__modulus <= natural__last . H30: 3 <> 0 . H31: wcnt - 3 >= spark__crypto__i8__first . H32: wcnt - 3 <= spark__crypto__i8__last . H33: wcnt - 3 >= integer__base__first . H34: wcnt - 3 <= integer__base__last . H35: wcnt - 3 >= spark__crypto__i8__first . H36: wcnt - 3 <= spark__crypto__i8__last . H37: wcnt - 3 >= integer__base__first . H38: wcnt - 3 <= integer__base__last . H39: (element(update(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3], (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element( ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H40: (element(update(update(x, [loop__1__i], (element(x, [ loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3], (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element( ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H41: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus >= spark__crypto__i3__first . H42: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus <= spark__crypto__i3__last . H43: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus >= natural__first . H44: (r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus <= natural__last . H45: 3 <> 0 . H46: wcnt - 2 >= spark__crypto__i8__first . H47: wcnt - 2 <= spark__crypto__i8__last . H48: wcnt - 2 >= integer__base__first . H49: wcnt - 2 <= integer__base__last . H50: wcnt - 2 >= spark__crypto__i8__first . H51: wcnt - 2 <= spark__crypto__i8__last . H52: wcnt - 2 >= integer__base__first . H53: wcnt - 2 <= integer__base__last . -> C1: (element(update(update(update(x, [loop__1__i], (element( x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3], (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2], (element(update(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3], (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element( ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 1]) + r) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(update(update(update(x, [loop__1__i], (element( x, [loop__1__i]) + element(ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3], (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2], (element(update(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3], (element(update( x, [loop__1__i], (element(x, [loop__1__i]) + element( ks, [(r + loop__1__i) mod interfaces__unsigned_64__modulus mod ks_modulus mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element( ts, [r mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element( ts, [(r + 1) mod interfaces__unsigned_64__modulus mod 3 mod interfaces__unsigned_64__modulus])) mod interfaces__unsigned_64__modulus), [wcnt - 1]) + r) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: wcnt - 1 >= spark__crypto__i8__first . C4: wcnt - 1 <= spark__crypto__i8__last . C5: wcnt - 1 >= integer__base__first . C6: wcnt - 1 <= integer__base__last . C7: wcnt - 1 >= spark__crypto__i8__first . C8: wcnt - 1 <= spark__crypto__i8__last . C9: wcnt - 1 >= integer__base__first . C10: wcnt - 1 <= integer__base__last . For path(s) from default assertion of line 260 to finish: procedure_inject_key_7. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.vct0000644000175000017500000000000011712513676031642 0ustar eugeneugen././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.vsmspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000000006311712765060032457 0ustar eugeneugenthreefish_block,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.log0000644000175000017500000000202611712513676031634 0ustar eugeneugenSPARK Simplifier Pro Edition Reading round_2.fdl (for inherited FDL type declarations) Processing round_2.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Automatic simplification completed. Simplified output sent to round_2.siv. ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.slgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000004153411712513676032541 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Update_Context @@@@@@@@@@ VC: procedure_update_context_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H21 on reading formula in, to give: %%% H21: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H22 on reading formula in, to give: %%% H22: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule update_conte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H21: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) New H22: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: bit__xor(element(x, [7]), element(w, [7])) >= 0 New C7: bit__xor(element(x, [6]), element(w, [6])) >= 0 New C13: bit__xor(element(x, [5]), element(w, [5])) >= 0 New C19: bit__xor(element(x, [4]), element(w, [4])) >= 0 New C25: bit__xor(element(x, [3]), element(w, [3])) >= 0 New C31: bit__xor(element(x, [2]), element(w, [2])) >= 0 New C37: bit__xor(element(x, [1]), element(w, [1])) >= 0 New C43: bit__xor(element(x, [0]), element(w, [0])) >= 0 -S- Applied substitution rule update_conte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H21: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= 18446744073709551615) New H22: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(element(x, [7]), element(w, [7])) <= 18446744073709551615 New C8: bit__xor(element(x, [6]), element(w, [6])) <= 18446744073709551615 New C14: bit__xor(element(x, [5]), element(w, [5])) <= 18446744073709551615 New C20: bit__xor(element(x, [4]), element(w, [4])) <= 18446744073709551615 New C26: bit__xor(element(x, [3]), element(w, [3])) <= 18446744073709551615 New C32: bit__xor(element(x, [2]), element(w, [2])) <= 18446744073709551615 New C38: bit__xor(element(x, [1]), element(w, [1])) <= 18446744073709551615 New C44: bit__xor(element(x, [0]), element(w, [0])) <= 18446744073709551615 -S- Applied substitution rule update_conte_rules(76). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H21: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [ i___1]) <= 18446744073709551615) New H22: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C9: true New C11: true New C15: true New C17: true New C21: true New C23: true New C27: true New C29: true New C33: true New C35: true New C39: true New C41: true New C45: true New C47: true -S- Applied substitution rule update_conte_rules(77). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H21: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= 18446744073709551615) New H22: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C10: true New C12: true New C16: true New C18: true New C22: true New C24: true New C28: true New C30: true New C34: true New C36: true New C40: true New C42: true New C46: true New C48: true *** Proved C3: true *** Proved C5: true *** Proved C9: true *** Proved C11: true *** Proved C15: true *** Proved C17: true *** Proved C21: true *** Proved C23: true *** Proved C27: true *** Proved C29: true *** Proved C33: true *** Proved C35: true *** Proved C39: true *** Proved C41: true *** Proved C45: true *** Proved C47: true *** Proved C4: true *** Proved C6: true *** Proved C10: true *** Proved C12: true *** Proved C16: true *** Proved C18: true *** Proved C22: true *** Proved C24: true *** Proved C28: true *** Proved C30: true *** Proved C34: true *** Proved C36: true *** Proved C40: true *** Proved C42: true *** Proved C46: true *** Proved C48: true -S- Applied substitution rule update_conte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule update_conte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H5: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule update_conte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule update_conte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule update_conte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule update_conte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule update_conte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule update_conte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule update_conte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule update_conte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule update_conte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule update_conte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule update_conte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H6: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule update_conte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H7: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule update_conte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule update_conte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule update_conte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H2: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule update_conte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H2: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) --- Eliminated hypothesis H1 (true-hypothesis). --- Eliminated hypothesis H8 (true-hypothesis). --- Eliminated hypothesis H9 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). *** Proved C1: bit__xor(element(x, [7]), element(w, [7])) >= 0 using hypotheses H21 & H22. *** Proved C7: bit__xor(element(x, [6]), element(w, [6])) >= 0 using hypotheses H21 & H22. *** Proved C13: bit__xor(element(x, [5]), element(w, [5])) >= 0 using hypotheses H21 & H22. *** Proved C19: bit__xor(element(x, [4]), element(w, [4])) >= 0 using hypotheses H21 & H22. *** Proved C25: bit__xor(element(x, [3]), element(w, [3])) >= 0 using hypotheses H21 & H22. *** Proved C31: bit__xor(element(x, [2]), element(w, [2])) >= 0 using hypotheses H21 & H22. *** Proved C37: bit__xor(element(x, [1]), element(w, [1])) >= 0 using hypotheses H21 & H22. *** Proved C43: bit__xor(element(x, [0]), element(w, [0])) >= 0 using hypotheses H21 & H22. *** Proved C2: bit__xor(element(x, [7]), element(w, [7])) <= 18446744073709551615 using hypotheses H21 & H22. *** Proved C8: bit__xor(element(x, [6]), element(w, [6])) <= 18446744073709551615 using hypotheses H21 & H22. *** Proved C14: bit__xor(element(x, [5]), element(w, [5])) <= 18446744073709551615 using hypotheses H21 & H22. *** Proved C20: bit__xor(element(x, [4]), element(w, [4])) <= 18446744073709551615 using hypotheses H21 & H22. *** Proved C26: bit__xor(element(x, [3]), element(w, [3])) <= 18446744073709551615 using hypotheses H21 & H22. *** Proved C32: bit__xor(element(x, [2]), element(w, [2])) <= 18446744073709551615 using hypotheses H21 & H22. *** Proved C38: bit__xor(element(x, [1]), element(w, [1])) <= 18446744073709551615 using hypotheses H21 & H22. *** Proved C44: bit__xor(element(x, [0]), element(w, [0])) <= 18446744073709551615 using hypotheses H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_update_context_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H21 on reading formula in, to give: %%% H21: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H22 on reading formula in, to give: %%% H22: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H27 has been replaced by "true". (It is already present, as H25). --- Hypothesis H28 has been replaced by "true". (It is already present, as H26). --- Hypothesis H33 has been replaced by "true". (It is already present, as H31). --- Hypothesis H34 has been replaced by "true". (It is already present, as H32). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H43). --- Hypothesis H46 has been replaced by "true". (It is already present, as H44). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H57 has been replaced by "true". (It is already present, as H55). --- Hypothesis H58 has been replaced by "true". (It is already present, as H56). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H69 has been replaced by "true". (It is already present, as H67). --- Hypothesis H70 has been replaced by "true". (It is already present, as H68). %%% Simplified C1 on reading formula in, to give: %%% C1: true %%% Simplified C2 on reading formula in, to give: %%% C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.vlgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000000330611712765060032513 0ustar eugeneugen Non-option args: initialize_key_schedule Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=initialize_key_schedule \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 1 (100.0%) unproven: 0 ( 0.0%) error: 0 ( 0.0%) total: 1 ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.sivspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000001063211712513676032517 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Initialize_Key_Schedule For path(s) from start to run-time check associated with statement of line 522: procedure_initialize_key_schedule_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 525: procedure_initialize_key_schedule_2. *** true . /* all conclusions proved */ For path(s) from assertion of line 527 to run-time check associated with statement of line 525: procedure_initialize_key_schedule_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 526: procedure_initialize_key_schedule_4. *** true . /* all conclusions proved */ For path(s) from assertion of line 527 to run-time check associated with statement of line 526: procedure_initialize_key_schedule_5. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 527: procedure_initialize_key_schedule_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 527 to assertion of line 527: procedure_initialize_key_schedule_7. H1: for_all(j_ : integer, 0 <= j_ and j_ <= loop__1__i -> 0 <= element(ks, [ j_]) and element(ks, [j_]) <= 18446744073709551615) . H2: element(ks, [8]) >= 0 . H3: element(ks, [8]) <= 18446744073709551615 . H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H6: fld_byte_count(fld_h(ctx)) >= 0 . H7: fld_byte_count(fld_h(ctx)) <= 2147483647 . H8: fld_hash_bit_len(fld_h(ctx)) >= 0 . H9: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H12: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H14: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H16: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H18: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H20: loop__1__i >= 0 . H21: element(fld_x(ctx), [loop__1__i + 1]) >= 0 . H22: element(fld_x(ctx), [loop__1__i + 1]) <= 18446744073709551615 . H23: loop__1__i <= 6 . H24: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1])) >= 0 . H25: bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1])) <= 18446744073709551615 . H26: integer__size >= 0 . H27: natural__size >= 0 . H28: spark__unsigned__u6__size >= 0 . H29: spark__unsigned__u7__size >= 0 . H30: spark__unsigned__byte__size >= 0 . H31: spark__unsigned__u16__size >= 0 . H32: spark__unsigned__u32__size >= 0 . H33: spark__unsigned__u64__size >= 0 . H34: spark__crypto__i8__size >= 0 . H35: spark__crypto__i9__size >= 0 . H36: spark__crypto__word_count_t__size >= 0 . H37: hash_bit_length__size >= 0 . H38: skein_512_state_words_index__size >= 0 . H39: skein_512_block_bytes_index__size >= 0 . H40: skein_512_context__size >= 0 . H41: context_header__size >= 0 . -> C1: for_all(j_ : integer, 0 <= j_ and j_ <= loop__1__i + 1 -> 0 <= element( update(update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [8], bit__xor(element(ks, [8]), element(fld_x(ctx), [loop__1__i + 1]))), [j_]) and element(update(update(ks, [loop__1__i + 1], element(fld_x(ctx), [loop__1__i + 1])), [8], bit__xor(element(ks, [8]) , element(fld_x(ctx), [loop__1__i + 1]))), [j_]) <= 18446744073709551615) . For path(s) from assertion of line 527 to finish: procedure_initialize_key_schedule_8. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.vcg0000644000175000017500000015342411712513676031642 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Round_1 For path(s) from start to run-time check associated with statement of line 275: procedure_round_1_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i8__first . C4: 1 <= spark__crypto__i8__last . C5: 0 >= spark__crypto__i8__first . C6: 0 <= spark__crypto__i8__last . C7: 0 >= spark__crypto__i8__first . C8: 0 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 276: procedure_round_1_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . -> C1: r_512_0_0 >= spark__unsigned__shift_count__first . C2: r_512_0_0 <= spark__unsigned__shift_count__last . C3: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . C4: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 276: procedure_round_1_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_0_0 >= spark__unsigned__shift_count__first . H12: r_512_0_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 277: procedure_round_1_4. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_0_0 >= spark__unsigned__shift_count__first . H12: r_512_0_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [0])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [0])) <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i8__first . C4: 0 <= spark__crypto__i8__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to assertion of line 280: procedure_round_1_5. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_0_0 >= spark__unsigned__shift_count__first . H12: r_512_0_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [0])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [0])) <= spark__unsigned__u64__last . H25: 0 >= spark__crypto__i8__first . H26: 0 <= spark__crypto__i8__last . H27: 1 >= spark__crypto__i8__first . H28: 1 <= spark__crypto__i8__last . H29: 1 >= spark__crypto__i8__first . H30: 1 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [1], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [0]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [1], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_0_0)), [0]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 280 to run-time check associated with statement of line 282: procedure_round_1_6. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 3 >= spark__crypto__i8__first . C4: 3 <= spark__crypto__i8__last . C5: 2 >= spark__crypto__i8__first . C6: 2 <= spark__crypto__i8__last . C7: 2 >= spark__crypto__i8__first . C8: 2 <= spark__crypto__i8__last . For path(s) from assertion of line 280 to run-time check associated with statement of line 283: procedure_round_1_7. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . -> C1: r_512_0_1 >= spark__unsigned__shift_count__first . C2: r_512_0_1 <= spark__unsigned__shift_count__last . C3: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . C4: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 280 to run-time check associated with statement of line 283: procedure_round_1_8. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_0_1 >= spark__unsigned__shift_count__first . H12: r_512_0_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) <= spark__unsigned__u64__last . For path(s) from assertion of line 280 to run-time check associated with statement of line 284: procedure_round_1_9. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_0_1 >= spark__unsigned__shift_count__first . H12: r_512_0_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [2])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [2])) <= spark__unsigned__u64__last . C3: 2 >= spark__crypto__i8__first . C4: 2 <= spark__crypto__i8__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 280 to assertion of line 286: procedure_round_1_10. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_0_1 >= spark__unsigned__shift_count__first . H12: r_512_0_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [2])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [2])) <= spark__unsigned__u64__last . H25: 2 >= spark__crypto__i8__first . H26: 2 <= spark__crypto__i8__last . H27: 3 >= spark__crypto__i8__first . H28: 3 <= spark__crypto__i8__last . H29: 3 >= spark__crypto__i8__first . H30: 3 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [3], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [2]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [3], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_0_1)), [2]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 286 to run-time check associated with statement of line 288: procedure_round_1_11. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 5 >= spark__crypto__i8__first . C4: 5 <= spark__crypto__i8__last . C5: 4 >= spark__crypto__i8__first . C6: 4 <= spark__crypto__i8__last . C7: 4 >= spark__crypto__i8__first . C8: 4 <= spark__crypto__i8__last . For path(s) from assertion of line 286 to run-time check associated with statement of line 289: procedure_round_1_12. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . -> C1: r_512_0_2 >= spark__unsigned__shift_count__first . C2: r_512_0_2 <= spark__unsigned__shift_count__last . C3: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . C4: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 286 to run-time check associated with statement of line 289: procedure_round_1_13. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_0_2 >= spark__unsigned__shift_count__first . H12: r_512_0_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) <= spark__unsigned__u64__last . For path(s) from assertion of line 286 to run-time check associated with statement of line 290: procedure_round_1_14. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_0_2 >= spark__unsigned__shift_count__first . H12: r_512_0_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [4])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [4])) <= spark__unsigned__u64__last . C3: 4 >= spark__crypto__i8__first . C4: 4 <= spark__crypto__i8__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 286 to assertion of line 292: procedure_round_1_15. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_0_2 >= spark__unsigned__shift_count__first . H12: r_512_0_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [4])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [4])) <= spark__unsigned__u64__last . H25: 4 >= spark__crypto__i8__first . H26: 4 <= spark__crypto__i8__last . H27: 5 >= spark__crypto__i8__first . H28: 5 <= spark__crypto__i8__last . H29: 5 >= spark__crypto__i8__first . H30: 5 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [5], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [4]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [5], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_0_2)), [4]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 292 to run-time check associated with statement of line 294: procedure_round_1_16. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 6 >= spark__crypto__i8__first . C6: 6 <= spark__crypto__i8__last . C7: 6 >= spark__crypto__i8__first . C8: 6 <= spark__crypto__i8__last . For path(s) from assertion of line 292 to run-time check associated with statement of line 295: procedure_round_1_17. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . -> C1: r_512_0_3 >= spark__unsigned__shift_count__first . C2: r_512_0_3 <= spark__unsigned__shift_count__last . C3: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . C4: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 292 to run-time check associated with statement of line 295: procedure_round_1_18. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_0_3 >= spark__unsigned__shift_count__first . H12: r_512_0_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3) <= spark__unsigned__u64__last . For path(s) from assertion of line 292 to run-time check associated with statement of line 296: procedure_round_1_19. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_0_3 >= spark__unsigned__shift_count__first . H12: r_512_0_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3)), [7]), element(update(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3)), [6])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3)), [7]), element(update(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_0_3)), [6])) <= spark__unsigned__u64__last . C3: 6 >= spark__crypto__i8__first . C4: 6 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 292 to finish: procedure_round_1_20. *** true . /* trivially true VC removed by Examiner */ ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.fdlspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000000702211712513676032465 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Threefish_Block} title procedure threefish_block; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__crypto__i3 = integer; type spark__crypto__i8 = integer; type spark__crypto__i9 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const skein_512_rounds_total : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i9__base__first : integer = pending; const spark__crypto__i9__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__crypto__i3__base__first : integer = pending; const spark__crypto__i3__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i9__first : integer = pending; const spark__crypto__i9__last : integer = pending; const spark__crypto__i9__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__crypto__i3__first : integer = pending; const spark__crypto__i3__last : integer = pending; const spark__crypto__i3__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ts : spark__crypto__u64_seq; var ks : spark__crypto__u64_seq; var x : spark__crypto__u64_seq; var loop__1__r : interfaces__unsigned_64; var x__10 : spark__crypto__u64_seq; var x__9 : spark__crypto__u64_seq; var x__8 : spark__crypto__u64_seq; var x__7 : spark__crypto__u64_seq; var x__6 : spark__crypto__u64_seq; var x__5 : spark__crypto__u64_seq; var x__4 : spark__crypto__u64_seq; var x__3 : spark__crypto__u64_seq; var x__2 : spark__crypto__u64_seq; var x__1 : spark__crypto__u64_seq; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.vcg0000644000175000017500000015342411712513676031650 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Round_7 For path(s) from start to run-time check associated with statement of line 456: procedure_round_7_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i8__first . C4: 1 <= spark__crypto__i8__last . C5: 4 >= spark__crypto__i8__first . C6: 4 <= spark__crypto__i8__last . C7: 4 >= spark__crypto__i8__first . C8: 4 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 457: procedure_round_7_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . -> C1: r_512_6_0 >= spark__unsigned__shift_count__first . C2: r_512_6_0 <= spark__unsigned__shift_count__last . C3: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . C4: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 457: procedure_round_7_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_6_0 >= spark__unsigned__shift_count__first . H12: r_512_6_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 458: procedure_round_7_4. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_6_0 >= spark__unsigned__shift_count__first . H12: r_512_6_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [4])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [4])) <= spark__unsigned__u64__last . C3: 4 >= spark__crypto__i8__first . C4: 4 <= spark__crypto__i8__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to assertion of line 461: procedure_round_7_5. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_6_0 >= spark__unsigned__shift_count__first . H12: r_512_6_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [4])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [4])) <= spark__unsigned__u64__last . H25: 4 >= spark__crypto__i8__first . H26: 4 <= spark__crypto__i8__last . H27: 1 >= spark__crypto__i8__first . H28: 1 <= spark__crypto__i8__last . H29: 1 >= spark__crypto__i8__first . H30: 1 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [1], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [4]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [1], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_6_0)), [4]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 461 to run-time check associated with statement of line 463: procedure_round_7_6. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 3 >= spark__crypto__i8__first . C4: 3 <= spark__crypto__i8__last . C5: 6 >= spark__crypto__i8__first . C6: 6 <= spark__crypto__i8__last . C7: 6 >= spark__crypto__i8__first . C8: 6 <= spark__crypto__i8__last . For path(s) from assertion of line 461 to run-time check associated with statement of line 464: procedure_round_7_7. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . -> C1: r_512_6_1 >= spark__unsigned__shift_count__first . C2: r_512_6_1 <= spark__unsigned__shift_count__last . C3: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . C4: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 461 to run-time check associated with statement of line 464: procedure_round_7_8. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_6_1 >= spark__unsigned__shift_count__first . H12: r_512_6_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) <= spark__unsigned__u64__last . For path(s) from assertion of line 461 to run-time check associated with statement of line 465: procedure_round_7_9. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_6_1 >= spark__unsigned__shift_count__first . H12: r_512_6_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [6])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [6])) <= spark__unsigned__u64__last . C3: 6 >= spark__crypto__i8__first . C4: 6 <= spark__crypto__i8__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 461 to assertion of line 467: procedure_round_7_10. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_6_1 >= spark__unsigned__shift_count__first . H12: r_512_6_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [6])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [6])) <= spark__unsigned__u64__last . H25: 6 >= spark__crypto__i8__first . H26: 6 <= spark__crypto__i8__last . H27: 3 >= spark__crypto__i8__first . H28: 3 <= spark__crypto__i8__last . H29: 3 >= spark__crypto__i8__first . H30: 3 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [3], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [6]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [3], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_6_1)), [6]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 467 to run-time check associated with statement of line 469: procedure_round_7_11. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 5 >= spark__crypto__i8__first . C4: 5 <= spark__crypto__i8__last . C5: 0 >= spark__crypto__i8__first . C6: 0 <= spark__crypto__i8__last . C7: 0 >= spark__crypto__i8__first . C8: 0 <= spark__crypto__i8__last . For path(s) from assertion of line 467 to run-time check associated with statement of line 470: procedure_round_7_12. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . -> C1: r_512_6_2 >= spark__unsigned__shift_count__first . C2: r_512_6_2 <= spark__unsigned__shift_count__last . C3: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . C4: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 467 to run-time check associated with statement of line 470: procedure_round_7_13. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_6_2 >= spark__unsigned__shift_count__first . H12: r_512_6_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) <= spark__unsigned__u64__last . For path(s) from assertion of line 467 to run-time check associated with statement of line 471: procedure_round_7_14. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_6_2 >= spark__unsigned__shift_count__first . H12: r_512_6_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [0])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [0])) <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i8__first . C4: 0 <= spark__crypto__i8__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 467 to assertion of line 473: procedure_round_7_15. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_6_2 >= spark__unsigned__shift_count__first . H12: r_512_6_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [0])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [0])) <= spark__unsigned__u64__last . H25: 0 >= spark__crypto__i8__first . H26: 0 <= spark__crypto__i8__last . H27: 5 >= spark__crypto__i8__first . H28: 5 <= spark__crypto__i8__last . H29: 5 >= spark__crypto__i8__first . H30: 5 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [5], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [0]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [5], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_6_2)), [0]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 473 to run-time check associated with statement of line 475: procedure_round_7_16. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 2 >= spark__crypto__i8__first . C6: 2 <= spark__crypto__i8__last . C7: 2 >= spark__crypto__i8__first . C8: 2 <= spark__crypto__i8__last . For path(s) from assertion of line 473 to run-time check associated with statement of line 476: procedure_round_7_17. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . -> C1: r_512_6_3 >= spark__unsigned__shift_count__first . C2: r_512_6_3 <= spark__unsigned__shift_count__last . C3: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . C4: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 473 to run-time check associated with statement of line 476: procedure_round_7_18. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_6_3 >= spark__unsigned__shift_count__first . H12: r_512_6_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3) <= spark__unsigned__u64__last . For path(s) from assertion of line 473 to run-time check associated with statement of line 477: procedure_round_7_19. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_6_3 >= spark__unsigned__shift_count__first . H12: r_512_6_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3)), [7]), element(update(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3)), [2])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3)), [7]), element(update(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_6_3)), [2])) <= spark__unsigned__u64__last . C3: 2 >= spark__crypto__i8__first . C4: 2 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 473 to finish: procedure_round_7_20. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.rls0000644000175000017500000000622211712513676031656 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Round_3*/ rule_family round_3_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. round_3_rules(1): r_512_2_0 may_be_replaced_by 17. round_3_rules(2): r_512_2_1 may_be_replaced_by 49. round_3_rules(3): r_512_2_2 may_be_replaced_by 36. round_3_rules(4): r_512_2_3 may_be_replaced_by 39. round_3_rules(5): integer__size >= 0 may_be_deduced. round_3_rules(6): integer__first may_be_replaced_by -2147483648. round_3_rules(7): integer__last may_be_replaced_by 2147483647. round_3_rules(8): integer__base__first may_be_replaced_by -2147483648. round_3_rules(9): integer__base__last may_be_replaced_by 2147483647. round_3_rules(10): interfaces__unsigned_64__size >= 0 may_be_deduced. round_3_rules(11): interfaces__unsigned_64__size may_be_replaced_by 64. round_3_rules(12): interfaces__unsigned_64__first may_be_replaced_by 0. round_3_rules(13): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. round_3_rules(14): interfaces__unsigned_64__base__first may_be_replaced_by 0. round_3_rules(15): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. round_3_rules(16): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. round_3_rules(17): spark__unsigned__u64__size >= 0 may_be_deduced. round_3_rules(18): spark__unsigned__u64__first may_be_replaced_by 0. round_3_rules(19): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. round_3_rules(20): spark__unsigned__u64__base__first may_be_replaced_by 0. round_3_rules(21): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. round_3_rules(22): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. round_3_rules(23): spark__unsigned__shift_count__size >= 0 may_be_deduced. round_3_rules(24): spark__unsigned__shift_count__first may_be_replaced_by 0. round_3_rules(25): spark__unsigned__shift_count__last may_be_replaced_by 64. round_3_rules(26): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. round_3_rules(27): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. round_3_rules(28): spark__crypto__i8__size >= 0 may_be_deduced. round_3_rules(29): spark__crypto__i8__first may_be_replaced_by 0. round_3_rules(30): spark__crypto__i8__last may_be_replaced_by 7. round_3_rules(31): spark__crypto__i8__base__first may_be_replaced_by -2147483648. round_3_rules(32): spark__crypto__i8__base__last may_be_replaced_by 2147483647. round_3_rules(33): spark__crypto__word_count_t__size >= 0 may_be_deduced. round_3_rules(34): spark__crypto__word_count_t__first may_be_replaced_by 0. round_3_rules(35): spark__crypto__word_count_t__last may_be_replaced_by 268435455. round_3_rules(36): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. round_3_rules(37): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.vlgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000000326111712765060032427 0ustar eugeneugen Non-option args: initialize_ts Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=initialize_ts \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.fdl0000644000175000017500000000577111712513676031634 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Round_4} title procedure round_4; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const r_512_3_0 : integer = pending; const r_512_3_1 : integer = pending; const r_512_3_2 : integer = pending; const r_512_3_3 : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var x : spark__crypto__u64_seq; function spark__unsigned__rotate_left_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.vctspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000000000011712513676032452 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.vct0000644000175000017500000000000011712513676031634 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.fdl0000644000175000017500000000577111712513676031633 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Round_3} title procedure round_3; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const r_512_2_0 : integer = pending; const r_512_2_1 : integer = pending; const r_512_2_2 : integer = pending; const r_512_2_3 : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var x : spark__crypto__u64_seq; function spark__unsigned__rotate_left_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.rlsspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000002261311712513676032435 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Initialize_TS*/ rule_family initialize_t_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. initialize_t_rules(1): integer__size >= 0 may_be_deduced. initialize_t_rules(2): integer__first may_be_replaced_by -2147483648. initialize_t_rules(3): integer__last may_be_replaced_by 2147483647. initialize_t_rules(4): integer__base__first may_be_replaced_by -2147483648. initialize_t_rules(5): integer__base__last may_be_replaced_by 2147483647. initialize_t_rules(6): natural__size >= 0 may_be_deduced. initialize_t_rules(7): natural__first may_be_replaced_by 0. initialize_t_rules(8): natural__last may_be_replaced_by 2147483647. initialize_t_rules(9): natural__base__first may_be_replaced_by -2147483648. initialize_t_rules(10): natural__base__last may_be_replaced_by 2147483647. initialize_t_rules(11): interfaces__unsigned_8__size >= 0 may_be_deduced. initialize_t_rules(12): interfaces__unsigned_8__size may_be_replaced_by 8. initialize_t_rules(13): interfaces__unsigned_8__first may_be_replaced_by 0. initialize_t_rules(14): interfaces__unsigned_8__last may_be_replaced_by 255. initialize_t_rules(15): interfaces__unsigned_8__base__first may_be_replaced_by 0. initialize_t_rules(16): interfaces__unsigned_8__base__last may_be_replaced_by 255. initialize_t_rules(17): interfaces__unsigned_8__modulus may_be_replaced_by 256. initialize_t_rules(18): interfaces__unsigned_16__size >= 0 may_be_deduced. initialize_t_rules(19): interfaces__unsigned_16__size may_be_replaced_by 16. initialize_t_rules(20): interfaces__unsigned_16__first may_be_replaced_by 0. initialize_t_rules(21): interfaces__unsigned_16__last may_be_replaced_by 65535. initialize_t_rules(22): interfaces__unsigned_16__base__first may_be_replaced_by 0. initialize_t_rules(23): interfaces__unsigned_16__base__last may_be_replaced_by 65535. initialize_t_rules(24): interfaces__unsigned_16__modulus may_be_replaced_by 65536. initialize_t_rules(25): interfaces__unsigned_32__size >= 0 may_be_deduced. initialize_t_rules(26): interfaces__unsigned_32__size may_be_replaced_by 32. initialize_t_rules(27): interfaces__unsigned_32__first may_be_replaced_by 0. initialize_t_rules(28): interfaces__unsigned_32__last may_be_replaced_by 4294967295. initialize_t_rules(29): interfaces__unsigned_32__base__first may_be_replaced_by 0. initialize_t_rules(30): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. initialize_t_rules(31): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. initialize_t_rules(32): interfaces__unsigned_64__size >= 0 may_be_deduced. initialize_t_rules(33): interfaces__unsigned_64__size may_be_replaced_by 64. initialize_t_rules(34): interfaces__unsigned_64__first may_be_replaced_by 0. initialize_t_rules(35): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. initialize_t_rules(36): interfaces__unsigned_64__base__first may_be_replaced_by 0. initialize_t_rules(37): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. initialize_t_rules(38): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. initialize_t_rules(39): spark__unsigned__u6__size >= 0 may_be_deduced. initialize_t_rules(40): spark__unsigned__u6__first may_be_replaced_by 0. initialize_t_rules(41): spark__unsigned__u6__last may_be_replaced_by 63. initialize_t_rules(42): spark__unsigned__u6__base__first may_be_replaced_by 0. initialize_t_rules(43): spark__unsigned__u6__base__last may_be_replaced_by 63. initialize_t_rules(44): spark__unsigned__u6__modulus may_be_replaced_by 64. initialize_t_rules(45): spark__unsigned__u7__size >= 0 may_be_deduced. initialize_t_rules(46): spark__unsigned__u7__first may_be_replaced_by 0. initialize_t_rules(47): spark__unsigned__u7__last may_be_replaced_by 127. initialize_t_rules(48): spark__unsigned__u7__base__first may_be_replaced_by 0. initialize_t_rules(49): spark__unsigned__u7__base__last may_be_replaced_by 127. initialize_t_rules(50): spark__unsigned__u7__modulus may_be_replaced_by 128. initialize_t_rules(51): spark__unsigned__byte__size >= 0 may_be_deduced. initialize_t_rules(52): spark__unsigned__byte__first may_be_replaced_by 0. initialize_t_rules(53): spark__unsigned__byte__last may_be_replaced_by 255. initialize_t_rules(54): spark__unsigned__byte__base__first may_be_replaced_by 0. initialize_t_rules(55): spark__unsigned__byte__base__last may_be_replaced_by 255. initialize_t_rules(56): spark__unsigned__byte__modulus may_be_replaced_by 256. initialize_t_rules(57): spark__unsigned__u16__size >= 0 may_be_deduced. initialize_t_rules(58): spark__unsigned__u16__first may_be_replaced_by 0. initialize_t_rules(59): spark__unsigned__u16__last may_be_replaced_by 65535. initialize_t_rules(60): spark__unsigned__u16__base__first may_be_replaced_by 0. initialize_t_rules(61): spark__unsigned__u16__base__last may_be_replaced_by 65535. initialize_t_rules(62): spark__unsigned__u16__modulus may_be_replaced_by 65536. initialize_t_rules(63): spark__unsigned__u32__size >= 0 may_be_deduced. initialize_t_rules(64): spark__unsigned__u32__first may_be_replaced_by 0. initialize_t_rules(65): spark__unsigned__u32__last may_be_replaced_by 4294967295. initialize_t_rules(66): spark__unsigned__u32__base__first may_be_replaced_by 0. initialize_t_rules(67): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. initialize_t_rules(68): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. initialize_t_rules(69): spark__unsigned__u64__size >= 0 may_be_deduced. initialize_t_rules(70): spark__unsigned__u64__first may_be_replaced_by 0. initialize_t_rules(71): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. initialize_t_rules(72): spark__unsigned__u64__base__first may_be_replaced_by 0. initialize_t_rules(73): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. initialize_t_rules(74): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. initialize_t_rules(75): spark__crypto__word_count_t__size >= 0 may_be_deduced. initialize_t_rules(76): spark__crypto__word_count_t__first may_be_replaced_by 0. initialize_t_rules(77): spark__crypto__word_count_t__last may_be_replaced_by 268435455. initialize_t_rules(78): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. initialize_t_rules(79): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. initialize_t_rules(80): hash_bit_length__size >= 0 may_be_deduced. initialize_t_rules(81): hash_bit_length__first may_be_replaced_by 0. initialize_t_rules(82): hash_bit_length__last may_be_replaced_by 2147483640. initialize_t_rules(83): hash_bit_length__base__first may_be_replaced_by -2147483648. initialize_t_rules(84): hash_bit_length__base__last may_be_replaced_by 2147483647. initialize_t_rules(85): skein_512_state_words_index__size >= 0 may_be_deduced. initialize_t_rules(86): skein_512_state_words_index__first may_be_replaced_by 0. initialize_t_rules(87): skein_512_state_words_index__last may_be_replaced_by 7. initialize_t_rules(88): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. initialize_t_rules(89): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. initialize_t_rules(90): skein_512_block_bytes_index__size >= 0 may_be_deduced. initialize_t_rules(91): skein_512_block_bytes_index__first may_be_replaced_by 0. initialize_t_rules(92): skein_512_block_bytes_index__last may_be_replaced_by 63. initialize_t_rules(93): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. initialize_t_rules(94): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. initialize_t_rules(95): skein_512_context__size >= 0 may_be_deduced. initialize_t_rules(96): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. initialize_t_rules(97): modifier_words_index__size >= 0 may_be_deduced. initialize_t_rules(98): modifier_words_index__first may_be_replaced_by 0. initialize_t_rules(99): modifier_words_index__last may_be_replaced_by 1. initialize_t_rules(100): modifier_words_index__base__first may_be_replaced_by -2147483648. initialize_t_rules(101): modifier_words_index__base__last may_be_replaced_by 2147483647. initialize_t_rules(102): tweak_value__size >= 0 may_be_deduced. initialize_t_rules(103): tweak_value__size may_be_replaced_by 128. initialize_t_rules(104): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. initialize_t_rules(105): context_header__size >= 0 may_be_deduced. initialize_t_rules(106): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.rls0000644000175000017500000001020311712513676032423 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Inject_Key*/ rule_family inject_key_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. inject_key_rules(1): wcnt may_be_replaced_by 8. inject_key_rules(2): ks_modulus may_be_replaced_by 9. inject_key_rules(3): integer__size >= 0 may_be_deduced. inject_key_rules(4): integer__first may_be_replaced_by -2147483648. inject_key_rules(5): integer__last may_be_replaced_by 2147483647. inject_key_rules(6): integer__base__first may_be_replaced_by -2147483648. inject_key_rules(7): integer__base__last may_be_replaced_by 2147483647. inject_key_rules(8): natural__size >= 0 may_be_deduced. inject_key_rules(9): natural__first may_be_replaced_by 0. inject_key_rules(10): natural__last may_be_replaced_by 2147483647. inject_key_rules(11): natural__base__first may_be_replaced_by -2147483648. inject_key_rules(12): natural__base__last may_be_replaced_by 2147483647. inject_key_rules(13): interfaces__unsigned_64__size >= 0 may_be_deduced. inject_key_rules(14): interfaces__unsigned_64__size may_be_replaced_by 64. inject_key_rules(15): interfaces__unsigned_64__first may_be_replaced_by 0. inject_key_rules(16): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. inject_key_rules(17): interfaces__unsigned_64__base__first may_be_replaced_by 0. inject_key_rules(18): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. inject_key_rules(19): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. inject_key_rules(20): spark__unsigned__u64__size >= 0 may_be_deduced. inject_key_rules(21): spark__unsigned__u64__first may_be_replaced_by 0. inject_key_rules(22): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. inject_key_rules(23): spark__unsigned__u64__base__first may_be_replaced_by 0. inject_key_rules(24): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. inject_key_rules(25): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. inject_key_rules(26): spark__crypto__i3__size >= 0 may_be_deduced. inject_key_rules(27): spark__crypto__i3__first may_be_replaced_by 0. inject_key_rules(28): spark__crypto__i3__last may_be_replaced_by 2. inject_key_rules(29): spark__crypto__i3__base__first may_be_replaced_by -2147483648. inject_key_rules(30): spark__crypto__i3__base__last may_be_replaced_by 2147483647. inject_key_rules(31): spark__crypto__i8__size >= 0 may_be_deduced. inject_key_rules(32): spark__crypto__i8__first may_be_replaced_by 0. inject_key_rules(33): spark__crypto__i8__last may_be_replaced_by 7. inject_key_rules(34): spark__crypto__i8__base__first may_be_replaced_by -2147483648. inject_key_rules(35): spark__crypto__i8__base__last may_be_replaced_by 2147483647. inject_key_rules(36): spark__crypto__i9__size >= 0 may_be_deduced. inject_key_rules(37): spark__crypto__i9__first may_be_replaced_by 0. inject_key_rules(38): spark__crypto__i9__last may_be_replaced_by 8. inject_key_rules(39): spark__crypto__i9__base__first may_be_replaced_by -2147483648. inject_key_rules(40): spark__crypto__i9__base__last may_be_replaced_by 2147483647. inject_key_rules(41): spark__crypto__word_count_t__size >= 0 may_be_deduced. inject_key_rules(42): spark__crypto__word_count_t__first may_be_replaced_by 0. inject_key_rules(43): spark__crypto__word_count_t__last may_be_replaced_by 268435455. inject_key_rules(44): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. inject_key_rules(45): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. inject_key_rules(46): injection_range__size >= 0 may_be_deduced. inject_key_rules(47): injection_range__first may_be_replaced_by 0. inject_key_rules(48): injection_range__last may_be_replaced_by 7. inject_key_rules(49): injection_range__base__first may_be_replaced_by -2147483648. inject_key_rules(50): injection_range__base__last may_be_replaced_by 2147483647. ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.vlgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000000326311712765060032532 0ustar eugeneugen Non-option args: update_context Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=update_context \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.vcgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000007340711712513676032530 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Initialize_Key_Schedule For path(s) from start to run-time check associated with statement of line 522: procedure_initialize_key_schedule_1. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . -> C1: skein_ks_parity >= spark__unsigned__u64__first . C2: skein_ks_parity <= spark__unsigned__u64__last . C3: wcnt >= spark__crypto__i9__first . C4: wcnt <= spark__crypto__i9__last . For path(s) from start to run-time check associated with statement of line 525: procedure_initialize_key_schedule_2. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H21: skein_ks_parity >= spark__unsigned__u64__first . H22: skein_ks_parity <= spark__unsigned__u64__last . H23: wcnt >= spark__crypto__i9__first . H24: wcnt <= spark__crypto__i9__last . -> C1: element(fld_x(ctx), [spark__crypto__i8__first]) >= spark__unsigned__u64__first . C2: element(fld_x(ctx), [spark__crypto__i8__first]) <= spark__unsigned__u64__last . C3: spark__crypto__i8__first >= skein_512_state_words_index__first . C4: spark__crypto__i8__first <= skein_512_state_words_index__last . C5: spark__crypto__i8__first >= spark__crypto__i9__first . C6: spark__crypto__i8__first <= spark__crypto__i9__last . For path(s) from assertion of line 527 to run-time check associated with statement of line 525: procedure_initialize_key_schedule_3. H1: for_all(j_: integer, ((j_ >= spark__crypto__i8__first) and ( j_ <= loop__1__i)) -> ((element(ks, [j_]) >= spark__unsigned__u64__first) and (element(ks, [j_]) <= spark__unsigned__u64__last))) . H2: element(ks, [wcnt]) >= spark__unsigned__u64__first . H3: element(ks, [wcnt]) <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(ctx)) >= natural__first . H7: fld_byte_count(fld_h(ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H23: loop__1__i >= spark__crypto__i8__first . H24: loop__1__i <= spark__crypto__i8__last . H25: loop__1__i >= spark__crypto__i8__first . H26: loop__1__i <= spark__crypto__i8__last . H27: not (loop__1__i = spark__crypto__i8__last) . -> C1: element(fld_x(ctx), [loop__1__i + 1]) >= spark__unsigned__u64__first . C2: element(fld_x(ctx), [loop__1__i + 1]) <= spark__unsigned__u64__last . C3: loop__1__i + 1 >= skein_512_state_words_index__first . C4: loop__1__i + 1 <= skein_512_state_words_index__last . C5: loop__1__i + 1 >= spark__crypto__i9__first . C6: loop__1__i + 1 <= spark__crypto__i9__last . For path(s) from start to run-time check associated with statement of line 526: procedure_initialize_key_schedule_4. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H21: skein_ks_parity >= spark__unsigned__u64__first . H22: skein_ks_parity <= spark__unsigned__u64__last . H23: wcnt >= spark__crypto__i9__first . H24: wcnt <= spark__crypto__i9__last . H25: element(fld_x(ctx), [spark__crypto__i8__first]) >= spark__unsigned__u64__first . H26: element(fld_x(ctx), [spark__crypto__i8__first]) <= spark__unsigned__u64__last . H27: spark__crypto__i8__first >= skein_512_state_words_index__first . H28: spark__crypto__i8__first <= skein_512_state_words_index__last . H29: spark__crypto__i8__first >= spark__crypto__i9__first . H30: spark__crypto__i8__first <= spark__crypto__i9__last . -> C1: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element(fld_x( ctx), [spark__crypto__i8__first])), [wcnt]), element(fld_x( ctx), [spark__crypto__i8__first])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element(fld_x( ctx), [spark__crypto__i8__first])), [wcnt]), element(fld_x( ctx), [spark__crypto__i8__first])) <= spark__unsigned__u64__last . C3: spark__crypto__i8__first >= skein_512_state_words_index__first . C4: spark__crypto__i8__first <= skein_512_state_words_index__last . C5: wcnt >= spark__crypto__i9__first . C6: wcnt <= spark__crypto__i9__last . C7: wcnt >= spark__crypto__i9__first . C8: wcnt <= spark__crypto__i9__last . For path(s) from assertion of line 527 to run-time check associated with statement of line 526: procedure_initialize_key_schedule_5. H1: for_all(j_: integer, ((j_ >= spark__crypto__i8__first) and ( j_ <= loop__1__i)) -> ((element(ks, [j_]) >= spark__unsigned__u64__first) and (element(ks, [j_]) <= spark__unsigned__u64__last))) . H2: element(ks, [wcnt]) >= spark__unsigned__u64__first . H3: element(ks, [wcnt]) <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(ctx)) >= natural__first . H7: fld_byte_count(fld_h(ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H23: loop__1__i >= spark__crypto__i8__first . H24: loop__1__i <= spark__crypto__i8__last . H25: loop__1__i >= spark__crypto__i8__first . H26: loop__1__i <= spark__crypto__i8__last . H27: not (loop__1__i = spark__crypto__i8__last) . H28: element(fld_x(ctx), [loop__1__i + 1]) >= spark__unsigned__u64__first . H29: element(fld_x(ctx), [loop__1__i + 1]) <= spark__unsigned__u64__last . H30: loop__1__i + 1 >= skein_512_state_words_index__first . H31: loop__1__i + 1 <= skein_512_state_words_index__last . H32: loop__1__i + 1 >= spark__crypto__i9__first . H33: loop__1__i + 1 <= spark__crypto__i9__last . -> C1: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt]), element(fld_x( ctx), [loop__1__i + 1])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt]), element(fld_x( ctx), [loop__1__i + 1])) <= spark__unsigned__u64__last . C3: loop__1__i + 1 >= skein_512_state_words_index__first . C4: loop__1__i + 1 <= skein_512_state_words_index__last . C5: wcnt >= spark__crypto__i9__first . C6: wcnt <= spark__crypto__i9__last . C7: wcnt >= spark__crypto__i9__first . C8: wcnt <= spark__crypto__i9__last . For path(s) from start to assertion of line 527: procedure_initialize_key_schedule_6. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H21: skein_ks_parity >= spark__unsigned__u64__first . H22: skein_ks_parity <= spark__unsigned__u64__last . H23: wcnt >= spark__crypto__i9__first . H24: wcnt <= spark__crypto__i9__last . H25: element(fld_x(ctx), [spark__crypto__i8__first]) >= spark__unsigned__u64__first . H26: element(fld_x(ctx), [spark__crypto__i8__first]) <= spark__unsigned__u64__last . H27: spark__crypto__i8__first >= skein_512_state_words_index__first . H28: spark__crypto__i8__first <= skein_512_state_words_index__last . H29: spark__crypto__i8__first >= spark__crypto__i9__first . H30: spark__crypto__i8__first <= spark__crypto__i9__last . H31: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element(fld_x( ctx), [spark__crypto__i8__first])), [wcnt]), element(fld_x( ctx), [spark__crypto__i8__first])) >= spark__unsigned__u64__first . H32: bit__xor(element(update(update(ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element(fld_x( ctx), [spark__crypto__i8__first])), [wcnt]), element(fld_x( ctx), [spark__crypto__i8__first])) <= spark__unsigned__u64__last . H33: spark__crypto__i8__first >= skein_512_state_words_index__first . H34: spark__crypto__i8__first <= skein_512_state_words_index__last . H35: wcnt >= spark__crypto__i9__first . H36: wcnt <= spark__crypto__i9__last . H37: wcnt >= spark__crypto__i9__first . H38: wcnt <= spark__crypto__i9__last . -> C1: for_all(j_: integer, ((j_ >= spark__crypto__i8__first) and ( j_ <= spark__crypto__i8__first)) -> ((element(update(update(update( ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element( fld_x(ctx), [spark__crypto__i8__first])), [wcnt], bit__xor(element(update(update( ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element( fld_x(ctx), [spark__crypto__i8__first])), [wcnt]), element( fld_x(ctx), [spark__crypto__i8__first]))), [j_]) >= spark__unsigned__u64__first) and (element(update(update(update( ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element( fld_x(ctx), [spark__crypto__i8__first])), [wcnt], bit__xor(element(update(update( ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element( fld_x(ctx), [spark__crypto__i8__first])), [wcnt]), element( fld_x(ctx), [spark__crypto__i8__first]))), [j_]) <= spark__unsigned__u64__last))) . C2: element(update(update(update(ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element(fld_x( ctx), [spark__crypto__i8__first])), [wcnt], bit__xor(element(update(update( ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element( fld_x(ctx), [spark__crypto__i8__first])), [wcnt]), element( fld_x(ctx), [spark__crypto__i8__first]))), [wcnt]) >= spark__unsigned__u64__first . C3: element(update(update(update(ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element(fld_x( ctx), [spark__crypto__i8__first])), [wcnt], bit__xor(element(update(update( ks, [wcnt], skein_ks_parity), [spark__crypto__i8__first], element( fld_x(ctx), [spark__crypto__i8__first])), [wcnt]), element( fld_x(ctx), [spark__crypto__i8__first]))), [wcnt]) <= spark__unsigned__u64__last . C4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . C5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . C6: fld_byte_count(fld_h(ctx)) >= natural__first . C7: fld_byte_count(fld_h(ctx)) <= natural__last . C8: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . C9: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . C10: true . C11: true . C12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . C13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . C14: true . C15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . C16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . C17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . C18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . C19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . C20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . C21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . C22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . C23: spark__crypto__i8__first >= spark__crypto__i8__first . C24: spark__crypto__i8__first <= spark__crypto__i8__last . C25: spark__crypto__i8__first >= spark__crypto__i8__first . C26: spark__crypto__i8__first <= spark__crypto__i8__last . For path(s) from assertion of line 527 to assertion of line 527: procedure_initialize_key_schedule_7. H1: for_all(j_: integer, ((j_ >= spark__crypto__i8__first) and ( j_ <= loop__1__i)) -> ((element(ks, [j_]) >= spark__unsigned__u64__first) and (element(ks, [j_]) <= spark__unsigned__u64__last))) . H2: element(ks, [wcnt]) >= spark__unsigned__u64__first . H3: element(ks, [wcnt]) <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(ctx)) >= natural__first . H7: fld_byte_count(fld_h(ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H23: loop__1__i >= spark__crypto__i8__first . H24: loop__1__i <= spark__crypto__i8__last . H25: loop__1__i >= spark__crypto__i8__first . H26: loop__1__i <= spark__crypto__i8__last . H27: not (loop__1__i = spark__crypto__i8__last) . H28: element(fld_x(ctx), [loop__1__i + 1]) >= spark__unsigned__u64__first . H29: element(fld_x(ctx), [loop__1__i + 1]) <= spark__unsigned__u64__last . H30: loop__1__i + 1 >= skein_512_state_words_index__first . H31: loop__1__i + 1 <= skein_512_state_words_index__last . H32: loop__1__i + 1 >= spark__crypto__i9__first . H33: loop__1__i + 1 <= spark__crypto__i9__last . H34: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt]), element(fld_x( ctx), [loop__1__i + 1])) >= spark__unsigned__u64__first . H35: bit__xor(element(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt]), element(fld_x( ctx), [loop__1__i + 1])) <= spark__unsigned__u64__last . H36: loop__1__i + 1 >= skein_512_state_words_index__first . H37: loop__1__i + 1 <= skein_512_state_words_index__last . H38: wcnt >= spark__crypto__i9__first . H39: wcnt <= spark__crypto__i9__last . H40: wcnt >= spark__crypto__i9__first . H41: wcnt <= spark__crypto__i9__last . -> C1: for_all(j_: integer, ((j_ >= spark__crypto__i8__first) and ( j_ <= loop__1__i + 1)) -> ((element(update(update( ks, [loop__1__i + 1], element(fld_x(ctx), [ loop__1__i + 1])), [wcnt], bit__xor(element(update( ks, [loop__1__i + 1], element(fld_x(ctx), [ loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [ loop__1__i + 1]))), [j_]) >= spark__unsigned__u64__first) and (element(update(update( ks, [loop__1__i + 1], element(fld_x(ctx), [ loop__1__i + 1])), [wcnt], bit__xor(element(update( ks, [loop__1__i + 1], element(fld_x(ctx), [ loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [ loop__1__i + 1]))), [j_]) <= spark__unsigned__u64__last))) . C2: element(update(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt], bit__xor(element(update( ks, [loop__1__i + 1], element(fld_x(ctx), [ loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [ loop__1__i + 1]))), [wcnt]) >= spark__unsigned__u64__first . C3: element(update(update(ks, [loop__1__i + 1], element(fld_x( ctx), [loop__1__i + 1])), [wcnt], bit__xor(element(update( ks, [loop__1__i + 1], element(fld_x(ctx), [ loop__1__i + 1])), [wcnt]), element(fld_x(ctx), [ loop__1__i + 1]))), [wcnt]) <= spark__unsigned__u64__last . C4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . C5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . C6: fld_byte_count(fld_h(ctx)) >= natural__first . C7: fld_byte_count(fld_h(ctx)) <= natural__last . C8: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . C9: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . C10: true . C11: true . C12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . C13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . C14: true . C15: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . C16: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . C17: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . C18: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . C19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . C20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . C21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . C22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . C23: loop__1__i + 1 >= spark__crypto__i8__first . C24: loop__1__i + 1 <= spark__crypto__i8__last . C25: loop__1__i + 1 >= spark__crypto__i8__first . C26: loop__1__i + 1 <= spark__crypto__i8__last . For path(s) from assertion of line 527 to finish: procedure_initialize_key_schedule_8. *** true . /* trivially true VC removed by Examiner */ ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.sivspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000000313011712513676032461 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Threefish_Block For path(s) from start to run-time check associated with statement of line 584: procedure_threefish_block_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 584: procedure_threefish_block_2. *** true . /* all conclusions proved */ For path(s) from start to default assertion of line 584: procedure_threefish_block_3. *** true . /* all conclusions proved */ For path(s) from default assertion of line 584 to default assertion of line 584: procedure_threefish_block_4. *** true . /* all conclusions proved */ For path(s) from default assertion of line 584 to run-time check associated with statement of line 589: procedure_threefish_block_5. *** true . /* all conclusions proved */ For path(s) from default assertion of line 584 to run-time check associated with statement of line 594: procedure_threefish_block_6. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_threefish_block_7. *** true . /* all conclusions proved */ For path(s) from default assertion of line 584 to finish: procedure_threefish_block_8. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.rls0000644000175000017500000000622211712513676031660 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Round_5*/ rule_family round_5_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. round_5_rules(1): r_512_4_0 may_be_replaced_by 39. round_5_rules(2): r_512_4_1 may_be_replaced_by 30. round_5_rules(3): r_512_4_2 may_be_replaced_by 34. round_5_rules(4): r_512_4_3 may_be_replaced_by 24. round_5_rules(5): integer__size >= 0 may_be_deduced. round_5_rules(6): integer__first may_be_replaced_by -2147483648. round_5_rules(7): integer__last may_be_replaced_by 2147483647. round_5_rules(8): integer__base__first may_be_replaced_by -2147483648. round_5_rules(9): integer__base__last may_be_replaced_by 2147483647. round_5_rules(10): interfaces__unsigned_64__size >= 0 may_be_deduced. round_5_rules(11): interfaces__unsigned_64__size may_be_replaced_by 64. round_5_rules(12): interfaces__unsigned_64__first may_be_replaced_by 0. round_5_rules(13): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. round_5_rules(14): interfaces__unsigned_64__base__first may_be_replaced_by 0. round_5_rules(15): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. round_5_rules(16): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. round_5_rules(17): spark__unsigned__u64__size >= 0 may_be_deduced. round_5_rules(18): spark__unsigned__u64__first may_be_replaced_by 0. round_5_rules(19): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. round_5_rules(20): spark__unsigned__u64__base__first may_be_replaced_by 0. round_5_rules(21): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. round_5_rules(22): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. round_5_rules(23): spark__unsigned__shift_count__size >= 0 may_be_deduced. round_5_rules(24): spark__unsigned__shift_count__first may_be_replaced_by 0. round_5_rules(25): spark__unsigned__shift_count__last may_be_replaced_by 64. round_5_rules(26): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. round_5_rules(27): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. round_5_rules(28): spark__crypto__i8__size >= 0 may_be_deduced. round_5_rules(29): spark__crypto__i8__first may_be_replaced_by 0. round_5_rules(30): spark__crypto__i8__last may_be_replaced_by 7. round_5_rules(31): spark__crypto__i8__base__first may_be_replaced_by -2147483648. round_5_rules(32): spark__crypto__i8__base__last may_be_replaced_by 2147483647. round_5_rules(33): spark__crypto__word_count_t__size >= 0 may_be_deduced. round_5_rules(34): spark__crypto__word_count_t__first may_be_replaced_by 0. round_5_rules(35): spark__crypto__word_count_t__last may_be_replaced_by 268435455. round_5_rules(36): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. round_5_rules(37): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.fdl0000644000175000017500000000577111712513676031635 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Round_5} title procedure round_5; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const r_512_4_0 : integer = pending; const r_512_4_1 : integer = pending; const r_512_4_2 : integer = pending; const r_512_4_3 : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var x : spark__crypto__u64_seq; function spark__unsigned__rotate_left_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.sivspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000000173511712513676032503 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Do_First_Key_Injection For path(s) from start to run-time check associated with statement of line 564: procedure_do_first_key_injection_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 572: procedure_do_first_key_injection_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 573: procedure_do_first_key_injection_3. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_do_first_key_injection_4. *** true . /* all conclusions proved */ ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.logspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000000057511712513676032440 0ustar eugeneugenSPARK Simplifier Pro Edition Reading initialize_ts.fdl (for inherited FDL type declarations) Processing initialize_ts.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Automatic simplification completed. Simplified output sent to initialize_ts.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.siv0000644000175000017500000000710411712513676031661 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_5 For path(s) from start to run-time check associated with statement of line 396: procedure_round_5_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 397: procedure_round_5_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 397: procedure_round_5_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 398: procedure_round_5_4. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 401: procedure_round_5_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 401 to run-time check associated with statement of line 403: procedure_round_5_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 401 to run-time check associated with statement of line 404: procedure_round_5_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 401 to run-time check associated with statement of line 404: procedure_round_5_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 401 to run-time check associated with statement of line 405: procedure_round_5_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 401 to assertion of line 407: procedure_round_5_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 407 to run-time check associated with statement of line 409: procedure_round_5_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 407 to run-time check associated with statement of line 410: procedure_round_5_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 407 to run-time check associated with statement of line 410: procedure_round_5_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 407 to run-time check associated with statement of line 411: procedure_round_5_14. *** true . /* all conclusions proved */ For path(s) from assertion of line 407 to assertion of line 413: procedure_round_5_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 413 to run-time check associated with statement of line 415: procedure_round_5_16. *** true . /* all conclusions proved */ For path(s) from assertion of line 413 to run-time check associated with statement of line 416: procedure_round_5_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 413 to run-time check associated with statement of line 416: procedure_round_5_18. *** true . /* all conclusions proved */ For path(s) from assertion of line 413 to run-time check associated with statement of line 417: procedure_round_5_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 413 to finish: procedure_round_5_20. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.siv0000644000175000017500000000710411712513676031656 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_2 For path(s) from start to run-time check associated with statement of line 305: procedure_round_2_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 306: procedure_round_2_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 306: procedure_round_2_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 307: procedure_round_2_4. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 310: procedure_round_2_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 310 to run-time check associated with statement of line 312: procedure_round_2_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 310 to run-time check associated with statement of line 313: procedure_round_2_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 310 to run-time check associated with statement of line 313: procedure_round_2_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 310 to run-time check associated with statement of line 314: procedure_round_2_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 310 to assertion of line 316: procedure_round_2_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 316 to run-time check associated with statement of line 318: procedure_round_2_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 316 to run-time check associated with statement of line 319: procedure_round_2_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 316 to run-time check associated with statement of line 319: procedure_round_2_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 316 to run-time check associated with statement of line 320: procedure_round_2_14. *** true . /* all conclusions proved */ For path(s) from assertion of line 316 to assertion of line 322: procedure_round_2_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 322 to run-time check associated with statement of line 324: procedure_round_2_16. *** true . /* all conclusions proved */ For path(s) from assertion of line 322 to run-time check associated with statement of line 325: procedure_round_2_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 322 to run-time check associated with statement of line 325: procedure_round_2_18. *** true . /* all conclusions proved */ For path(s) from assertion of line 322 to run-time check associated with statement of line 326: procedure_round_2_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 322 to finish: procedure_round_2_20. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.vlg0000644000175000017500000000325311712765060032416 0ustar eugeneugen Non-option args: inject_key Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=inject_key \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.vct0000644000175000017500000000000011712513676031640 0ustar eugeneugen././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.vctspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000000006111712513676032512 0ustar eugeneugen,initialize_key_schedule,procedure,,,7,,true,,,, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.log0000644000175000017500000000202611712513676031633 0ustar eugeneugenSPARK Simplifier Pro Edition Reading round_1.fdl (for inherited FDL type declarations) Processing round_1.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Automatic simplification completed. Simplified output sent to round_1.siv. ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.sivspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000000166011712513676032434 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Initialize_TS For path(s) from start to run-time check associated with statement of line 547: procedure_initialize_ts_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 548: procedure_initialize_ts_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 550: procedure_initialize_ts_3. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_initialize_ts_4. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.vcg0000644000175000017500000015342411712513676031643 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Round_2 For path(s) from start to run-time check associated with statement of line 305: procedure_round_2_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i8__first . C4: 1 <= spark__crypto__i8__last . C5: 2 >= spark__crypto__i8__first . C6: 2 <= spark__crypto__i8__last . C7: 2 >= spark__crypto__i8__first . C8: 2 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 306: procedure_round_2_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . -> C1: r_512_1_0 >= spark__unsigned__shift_count__first . C2: r_512_1_0 <= spark__unsigned__shift_count__last . C3: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . C4: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 306: procedure_round_2_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_1_0 >= spark__unsigned__shift_count__first . H12: r_512_1_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 307: procedure_round_2_4. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_1_0 >= spark__unsigned__shift_count__first . H12: r_512_1_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [2])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [2])) <= spark__unsigned__u64__last . C3: 2 >= spark__crypto__i8__first . C4: 2 <= spark__crypto__i8__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to assertion of line 310: procedure_round_2_5. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_1_0 >= spark__unsigned__shift_count__first . H12: r_512_1_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [2])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [2])) <= spark__unsigned__u64__last . H25: 2 >= spark__crypto__i8__first . H26: 2 <= spark__crypto__i8__last . H27: 1 >= spark__crypto__i8__first . H28: 1 <= spark__crypto__i8__last . H29: 1 >= spark__crypto__i8__first . H30: 1 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [1], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [2]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [1], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [1]), element(update(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_1_0)), [2]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 310 to run-time check associated with statement of line 312: procedure_round_2_6. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 4 >= spark__crypto__i8__first . C6: 4 <= spark__crypto__i8__last . C7: 4 >= spark__crypto__i8__first . C8: 4 <= spark__crypto__i8__last . For path(s) from assertion of line 310 to run-time check associated with statement of line 313: procedure_round_2_7. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . -> C1: r_512_1_1 >= spark__unsigned__shift_count__first . C2: r_512_1_1 <= spark__unsigned__shift_count__last . C3: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . C4: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 310 to run-time check associated with statement of line 313: procedure_round_2_8. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_1_1 >= spark__unsigned__shift_count__first . H12: r_512_1_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) <= spark__unsigned__u64__last . For path(s) from assertion of line 310 to run-time check associated with statement of line 314: procedure_round_2_9. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_1_1 >= spark__unsigned__shift_count__first . H12: r_512_1_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [4])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [4])) <= spark__unsigned__u64__last . C3: 4 >= spark__crypto__i8__first . C4: 4 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 310 to assertion of line 316: procedure_round_2_10. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_1_1 >= spark__unsigned__shift_count__first . H12: r_512_1_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [4])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [4])) <= spark__unsigned__u64__last . H25: 4 >= spark__crypto__i8__first . H26: 4 <= spark__crypto__i8__last . H27: 7 >= spark__crypto__i8__first . H28: 7 <= spark__crypto__i8__last . H29: 7 >= spark__crypto__i8__first . H30: 7 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [7], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [4]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [7], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [7]), element(update(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_1_1)), [4]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 316 to run-time check associated with statement of line 318: procedure_round_2_11. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 5 >= spark__crypto__i8__first . C4: 5 <= spark__crypto__i8__last . C5: 6 >= spark__crypto__i8__first . C6: 6 <= spark__crypto__i8__last . C7: 6 >= spark__crypto__i8__first . C8: 6 <= spark__crypto__i8__last . For path(s) from assertion of line 316 to run-time check associated with statement of line 319: procedure_round_2_12. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . -> C1: r_512_1_2 >= spark__unsigned__shift_count__first . C2: r_512_1_2 <= spark__unsigned__shift_count__last . C3: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . C4: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 316 to run-time check associated with statement of line 319: procedure_round_2_13. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_1_2 >= spark__unsigned__shift_count__first . H12: r_512_1_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) <= spark__unsigned__u64__last . For path(s) from assertion of line 316 to run-time check associated with statement of line 320: procedure_round_2_14. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_1_2 >= spark__unsigned__shift_count__first . H12: r_512_1_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [6])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [6])) <= spark__unsigned__u64__last . C3: 6 >= spark__crypto__i8__first . C4: 6 <= spark__crypto__i8__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 316 to assertion of line 322: procedure_round_2_15. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_1_2 >= spark__unsigned__shift_count__first . H12: r_512_1_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [6])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [6])) <= spark__unsigned__u64__last . H25: 6 >= spark__crypto__i8__first . H26: 6 <= spark__crypto__i8__last . H27: 5 >= spark__crypto__i8__first . H28: 5 <= spark__crypto__i8__last . H29: 5 >= spark__crypto__i8__first . H30: 5 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [5], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [6]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [5], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [5]), element(update(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_1_2)), [6]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 322 to run-time check associated with statement of line 324: procedure_round_2_16. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 3 >= spark__crypto__i8__first . C4: 3 <= spark__crypto__i8__last . C5: 0 >= spark__crypto__i8__first . C6: 0 <= spark__crypto__i8__last . C7: 0 >= spark__crypto__i8__first . C8: 0 <= spark__crypto__i8__last . For path(s) from assertion of line 322 to run-time check associated with statement of line 325: procedure_round_2_17. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . -> C1: r_512_1_3 >= spark__unsigned__shift_count__first . C2: r_512_1_3 <= spark__unsigned__shift_count__last . C3: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . C4: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 322 to run-time check associated with statement of line 325: procedure_round_2_18. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_1_3 >= spark__unsigned__shift_count__first . H12: r_512_1_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3) <= spark__unsigned__u64__last . For path(s) from assertion of line 322 to run-time check associated with statement of line 326: procedure_round_2_19. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_1_3 >= spark__unsigned__shift_count__first . H12: r_512_1_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3)), [3]), element(update(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3)), [0])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3)), [3]), element(update(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_1_3)), [0])) <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i8__first . C4: 0 <= spark__crypto__i8__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 322 to finish: procedure_round_2_20. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.vlg0000644000175000017500000000324511712765060031646 0ustar eugeneugen Non-option args: round_5 Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=round_5 \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.slgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000012331011712513676032475 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Do_First_Key_Injection @@@@@@@@@@ VC: procedure_do_first_key_injection_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule do_first_key_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New C1: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus >= 0 New C7: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus >= 0 New C13: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus >= 0 New C19: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus >= 0 New C25: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus >= 0 New C31: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus >= 0 New C37: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus >= 0 New C43: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule do_first_key_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New C2: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C8: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C14: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C20: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C26: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C32: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C38: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C44: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule do_first_key_rules(31). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) New C3: true New C9: true New C15: true New C21: true New C27: true New C33: true New C39: true New C45: true -S- Applied substitution rule do_first_key_rules(32). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New C4: true New C10: true New C16: true New C22: true New C28: true New C34: true New C40: true New C46: true -S- Applied substitution rule do_first_key_rules(26). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [ i___1]) <= 18446744073709551615) New C5: true New C11: true New C17: true New C23: true New C29: true New C35: true New C41: true New C47: true -S- Applied substitution rule do_first_key_rules(27). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= 18446744073709551615) New C6: true New C12: true New C18: true New C24: true New C30: true New C36: true New C42: true New C48: true *** Proved C3: true *** Proved C9: true *** Proved C15: true *** Proved C21: true *** Proved C27: true *** Proved C33: true *** Proved C39: true *** Proved C45: true *** Proved C4: true *** Proved C10: true *** Proved C16: true *** Proved C22: true *** Proved C28: true *** Proved C34: true *** Proved C40: true *** Proved C46: true *** Proved C5: true *** Proved C11: true *** Proved C17: true *** Proved C23: true *** Proved C29: true *** Proved C35: true *** Proved C41: true *** Proved C47: true *** Proved C6: true *** Proved C12: true *** Proved C18: true *** Proved C24: true *** Proved C30: true *** Proved C36: true *** Proved C42: true *** Proved C48: true -S- Applied substitution rule do_first_key_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(w, [7]) + element(ks, [7])) mod 18446744073709551616 >= 0 New C7: (element(w, [6]) + element(ks, [6])) mod 18446744073709551616 >= 0 New C13: (element(w, [5]) + element(ks, [5])) mod 18446744073709551616 >= 0 New C19: (element(w, [4]) + element(ks, [4])) mod 18446744073709551616 >= 0 New C25: (element(w, [3]) + element(ks, [3])) mod 18446744073709551616 >= 0 New C31: (element(w, [2]) + element(ks, [2])) mod 18446744073709551616 >= 0 New C37: (element(w, [1]) + element(ks, [1])) mod 18446744073709551616 >= 0 New C43: (element(w, [0]) + element(ks, [0])) mod 18446744073709551616 >= 0 New C2: (element(w, [7]) + element(ks, [7])) mod 18446744073709551616 <= 18446744073709551615 New C8: (element(w, [6]) + element(ks, [6])) mod 18446744073709551616 <= 18446744073709551615 New C14: (element(w, [5]) + element(ks, [5])) mod 18446744073709551616 <= 18446744073709551615 New C20: (element(w, [4]) + element(ks, [4])) mod 18446744073709551616 <= 18446744073709551615 New C26: (element(w, [3]) + element(ks, [3])) mod 18446744073709551616 <= 18446744073709551615 New C32: (element(w, [2]) + element(ks, [2])) mod 18446744073709551616 <= 18446744073709551615 New C38: (element(w, [1]) + element(ks, [1])) mod 18446744073709551616 <= 18446744073709551615 New C44: (element(w, [0]) + element(ks, [0])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule do_first_key_rules(21). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule do_first_key_rules(22). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) *** Proved C1: (element(w, [7]) + element(ks, [7])) mod 18446744073709551616 >= 0 *** Proved C7: (element(w, [6]) + element(ks, [6])) mod 18446744073709551616 >= 0 *** Proved C13: (element(w, [5]) + element(ks, [5])) mod 18446744073709551616 >= 0 *** Proved C19: (element(w, [4]) + element(ks, [4])) mod 18446744073709551616 >= 0 *** Proved C25: (element(w, [3]) + element(ks, [3])) mod 18446744073709551616 >= 0 *** Proved C31: (element(w, [2]) + element(ks, [2])) mod 18446744073709551616 >= 0 *** Proved C37: (element(w, [1]) + element(ks, [1])) mod 18446744073709551616 >= 0 *** Proved C43: (element(w, [0]) + element(ks, [0])) mod 18446744073709551616 >= 0 *** Proved C2: (element(w, [7]) + element(ks, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C8: (element(w, [6]) + element(ks, [6])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C14: (element(w, [5]) + element(ks, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C20: (element(w, [4]) + element(ks, [4])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C26: (element(w, [3]) + element(ks, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C32: (element(w, [2]) + element(ks, [2])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C38: (element(w, [1]) + element(ks, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C44: (element(w, [0]) + element(ks, [0])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_do_first_key_injection_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule do_first_key_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H5: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus >= 0 New H11: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus >= 0 New H17: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus >= 0 New H23: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus >= 0 New H29: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus >= 0 New H35: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus >= 0 New H41: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus >= 0 New H47: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus >= 0 New C1: (element(mk__spark__crypto__u64_seq([0] := (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus, [1] := ( element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus, [2] := (element(w, [2]) + element( ks, [2])) mod interfaces__unsigned_64__modulus, [3] := (element(w, [3] ) + element(ks, [3])) mod interfaces__unsigned_64__modulus, [4] := ( element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus, [5] := (element(w, [5]) + element( ks, [5])) mod interfaces__unsigned_64__modulus, [6] := (element(w, [6] ) + element(ks, [6])) mod interfaces__unsigned_64__modulus, [7] := ( element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [0])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule do_first_key_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H6: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H12: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H18: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H24: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H30: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H36: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H42: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H48: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New C2: (element(mk__spark__crypto__u64_seq([0] := (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus, [1] := ( element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus, [2] := (element(w, [2]) + element( ks, [2])) mod interfaces__unsigned_64__modulus, [3] := (element(w, [3] ) + element(ks, [3])) mod interfaces__unsigned_64__modulus, [4] := ( element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus, [5] := (element(w, [5]) + element( ks, [5])) mod interfaces__unsigned_64__modulus, [6] := (element(w, [6] ) + element(ks, [6])) mod interfaces__unsigned_64__modulus, [7] := ( element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [0])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule do_first_key_rules(21). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) New C3: true -S- Applied substitution rule do_first_key_rules(22). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New C4: true -S- Applied substitution rule do_first_key_rules(26). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H9: true New H15: true New H21: true New H27: true New H33: true New H39: true New H45: true New H51: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [ i___1]) <= 18446744073709551615) New C5: wcnt >= 3 New C9: wcnt >= 3 -S- Applied substitution rule do_first_key_rules(1). This was achieved by replacing all occurrences of wcnt by: 8. New C6: 5 <= spark__crypto__i8__last New C7: 5 >= integer__base__first New C8: 5 <= integer__base__last New C10: 5 <= spark__crypto__i8__last New C11: 5 >= integer__base__first New C12: 5 <= integer__base__last New C1: ((element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus + element(ts, [0])) mod interfaces__unsigned_64__modulus >= 0 New C2: ((element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus + element(ts, [0])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C5: true New C9: true -S- Applied substitution rule do_first_key_rules(27). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H10: true New H16: true New H22: true New H28: true New H34: true New H40: true New H46: true New H52: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= 18446744073709551615) New C6: true New C10: true -S- Applied substitution rule do_first_key_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C7: true New C11: true -S- Applied substitution rule do_first_key_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C8: true New C12: true *** Proved C3: true *** Proved C4: true *** Proved C5: true *** Proved C9: true *** Proved C6: true *** Proved C10: true *** Proved C7: true *** Proved C11: true *** Proved C8: true *** Proved C12: true -S- Applied substitution rule do_first_key_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H5: (element(w, [7]) + element(ks, [7])) mod 18446744073709551616 >= 0 New H6: (element(w, [7]) + element(ks, [7])) mod 18446744073709551616 <= 18446744073709551615 New H11: (element(w, [6]) + element(ks, [6])) mod 18446744073709551616 >= 0 New H12: (element(w, [6]) + element(ks, [6])) mod 18446744073709551616 <= 18446744073709551615 New H17: (element(w, [5]) + element(ks, [5])) mod 18446744073709551616 >= 0 New H18: (element(w, [5]) + element(ks, [5])) mod 18446744073709551616 <= 18446744073709551615 New H23: (element(w, [4]) + element(ks, [4])) mod 18446744073709551616 >= 0 New H24: (element(w, [4]) + element(ks, [4])) mod 18446744073709551616 <= 18446744073709551615 New H29: (element(w, [3]) + element(ks, [3])) mod 18446744073709551616 >= 0 New H30: (element(w, [3]) + element(ks, [3])) mod 18446744073709551616 <= 18446744073709551615 New H35: (element(w, [2]) + element(ks, [2])) mod 18446744073709551616 >= 0 New H36: (element(w, [2]) + element(ks, [2])) mod 18446744073709551616 <= 18446744073709551615 New H41: (element(w, [1]) + element(ks, [1])) mod 18446744073709551616 >= 0 New H42: (element(w, [1]) + element(ks, [1])) mod 18446744073709551616 <= 18446744073709551615 New H47: (element(w, [0]) + element(ks, [0])) mod 18446744073709551616 >= 0 New H48: (element(w, [0]) + element(ks, [0])) mod 18446744073709551616 <= 18446744073709551615 New C1: ((element(w, [5]) + element(ks, [5])) mod 18446744073709551616 + element(ts, [0])) mod 18446744073709551616 >= 0 New C2: ((element(w, [5]) + element(ks, [5])) mod 18446744073709551616 + element(ts, [0])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule do_first_key_rules(31). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) New H7: true New H13: true New H19: true New H25: true New H31: true New H37: true New H43: true New H49: true -S- Applied substitution rule do_first_key_rules(32). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H8: true New H14: true New H20: true New H26: true New H32: true New H38: true New H44: true New H50: true New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) *** Proved C1: ((element(w, [5]) + element(ks, [5])) mod 18446744073709551616 + element(ts, [0])) mod 18446744073709551616 >= 0 *** Proved C2: ((element(w, [5]) + element(ks, [5])) mod 18446744073709551616 + element(ts, [0])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_do_first_key_injection_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H61 has been replaced by "true". (It is already present, as H57). --- Hypothesis H62 has been replaced by "true". (It is already present, as H58). --- Hypothesis H63 has been replaced by "true". (It is already present, as H59). --- Hypothesis H64 has been replaced by "true". (It is already present, as H60). %%% Simplified C1 on reading formula in, to give: %%% C1: (element(mk__spark__crypto__u64_seq([0] := (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus, [1] := ( element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus, [2] := (element(w, [2]) + element( ks, [2])) mod interfaces__unsigned_64__modulus, [3] := (element(w, [3] ) + element(ks, [3])) mod interfaces__unsigned_64__modulus, [4] := ( element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus, [5] := (element(w, [5]) + element( ks, [5])) mod interfaces__unsigned_64__modulus, [6] := (element(w, [6] ) + element(ks, [6])) mod interfaces__unsigned_64__modulus, [7] := ( element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: (element(mk__spark__crypto__u64_seq([0] := (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus, [1] := ( element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus, [2] := (element(w, [2]) + element( ks, [2])) mod interfaces__unsigned_64__modulus, [3] := (element(w, [3] ) + element(ks, [3])) mod interfaces__unsigned_64__modulus, [4] := ( element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus, [5] := (element(w, [5]) + element( ks, [5])) mod interfaces__unsigned_64__modulus, [6] := (element(w, [6] ) + element(ks, [6])) mod interfaces__unsigned_64__modulus, [7] := ( element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last *** Proved C3: 1 >= spark__crypto__i3__first using hypothesis H55. -S- Applied substitution rule do_first_key_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= spark__unsigned__u64__last) New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H5: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus >= 0 New H11: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus >= 0 New H17: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus >= 0 New H23: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus >= 0 New H29: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus >= 0 New H35: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus >= 0 New H41: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus >= 0 New H47: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus >= 0 New H53: (element(mk__spark__crypto__u64_seq([0] := (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus, [1] := ( element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus, [2] := (element(w, [2]) + element( ks, [2])) mod interfaces__unsigned_64__modulus, [3] := (element(w, [3] ) + element(ks, [3])) mod interfaces__unsigned_64__modulus, [4] := ( element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus, [5] := (element(w, [5]) + element( ks, [5])) mod interfaces__unsigned_64__modulus, [6] := (element(w, [6] ) + element(ks, [6])) mod interfaces__unsigned_64__modulus, [7] := ( element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [0])) mod interfaces__unsigned_64__modulus >= 0 New C1: (element(mk__spark__crypto__u64_seq([0] := (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus, [1] := ( element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus, [2] := (element(w, [2]) + element( ks, [2])) mod interfaces__unsigned_64__modulus, [3] := (element(w, [3] ) + element(ks, [3])) mod interfaces__unsigned_64__modulus, [4] := ( element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus, [5] := (element(w, [5]) + element( ks, [5])) mod interfaces__unsigned_64__modulus, [6] := (element(w, [6] ) + element(ks, [6])) mod interfaces__unsigned_64__modulus, [7] := ( element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule do_first_key_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H6: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H12: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H18: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H24: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H30: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H36: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H42: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H48: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H54: (element(mk__spark__crypto__u64_seq([0] := (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus, [1] := ( element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus, [2] := (element(w, [2]) + element( ks, [2])) mod interfaces__unsigned_64__modulus, [3] := (element(w, [3] ) + element(ks, [3])) mod interfaces__unsigned_64__modulus, [4] := ( element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus, [5] := (element(w, [5]) + element( ks, [5])) mod interfaces__unsigned_64__modulus, [6] := (element(w, [6] ) + element(ks, [6])) mod interfaces__unsigned_64__modulus, [7] := ( element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus), [wcnt - 3]) + element(ts, [0])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New C2: (element(mk__spark__crypto__u64_seq([0] := (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus, [1] := ( element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus, [2] := (element(w, [2]) + element( ks, [2])) mod interfaces__unsigned_64__modulus, [3] := (element(w, [3] ) + element(ks, [3])) mod interfaces__unsigned_64__modulus, [4] := ( element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus, [5] := (element(w, [5]) + element( ks, [5])) mod interfaces__unsigned_64__modulus, [6] := (element(w, [6] ) + element(ks, [6])) mod interfaces__unsigned_64__modulus, [7] := ( element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus), [wcnt - 2]) + element(ts, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule do_first_key_rules(22). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H56: true New H4: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New C4: true -S- Applied substitution rule do_first_key_rules(26). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H9: true New H15: true New H21: true New H27: true New H33: true New H39: true New H45: true New H51: true New H57: wcnt >= 3 New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w, [i___1]) and element(w, [ i___1]) <= 18446744073709551615) New C5: wcnt >= 2 New C9: wcnt >= 2 -S- Applied substitution rule do_first_key_rules(1). This was achieved by replacing all occurrences of wcnt by: 8. New H58: 5 <= spark__crypto__i8__last New H59: 5 >= integer__base__first New H60: 5 <= integer__base__last New H53: ((element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus + element(ts, [0])) mod interfaces__unsigned_64__modulus >= 0 New H54: ((element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus + element(ts, [0])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H57: true New C6: 6 <= spark__crypto__i8__last New C7: 6 >= integer__base__first New C8: 6 <= integer__base__last New C10: 6 <= spark__crypto__i8__last New C11: 6 >= integer__base__first New C12: 6 <= integer__base__last New C1: ((element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus + element(ts, [1])) mod interfaces__unsigned_64__modulus >= 0 New C2: ((element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus + element(ts, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C5: true New C9: true -S- Applied substitution rule do_first_key_rules(27). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H10: true New H16: true New H22: true New H28: true New H34: true New H40: true New H46: true New H52: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(w, [i___1]) and element(w, [i___1]) <= 18446744073709551615) New H58: true New C6: true New C10: true -S- Applied substitution rule do_first_key_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H59: true New C7: true New C11: true -S- Applied substitution rule do_first_key_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H60: true New C8: true New C12: true *** Proved C4: true *** Proved C5: true *** Proved C9: true *** Proved C6: true *** Proved C10: true *** Proved C7: true *** Proved C11: true *** Proved C8: true *** Proved C12: true -S- Applied substitution rule do_first_key_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H5: (element(w, [7]) + element(ks, [7])) mod 18446744073709551616 >= 0 New H6: (element(w, [7]) + element(ks, [7])) mod 18446744073709551616 <= 18446744073709551615 New H11: (element(w, [6]) + element(ks, [6])) mod 18446744073709551616 >= 0 New H12: (element(w, [6]) + element(ks, [6])) mod 18446744073709551616 <= 18446744073709551615 New H17: (element(w, [5]) + element(ks, [5])) mod 18446744073709551616 >= 0 New H18: (element(w, [5]) + element(ks, [5])) mod 18446744073709551616 <= 18446744073709551615 New H23: (element(w, [4]) + element(ks, [4])) mod 18446744073709551616 >= 0 New H24: (element(w, [4]) + element(ks, [4])) mod 18446744073709551616 <= 18446744073709551615 New H29: (element(w, [3]) + element(ks, [3])) mod 18446744073709551616 >= 0 New H30: (element(w, [3]) + element(ks, [3])) mod 18446744073709551616 <= 18446744073709551615 New H35: (element(w, [2]) + element(ks, [2])) mod 18446744073709551616 >= 0 New H36: (element(w, [2]) + element(ks, [2])) mod 18446744073709551616 <= 18446744073709551615 New H41: (element(w, [1]) + element(ks, [1])) mod 18446744073709551616 >= 0 New H42: (element(w, [1]) + element(ks, [1])) mod 18446744073709551616 <= 18446744073709551615 New H47: (element(w, [0]) + element(ks, [0])) mod 18446744073709551616 >= 0 New H48: (element(w, [0]) + element(ks, [0])) mod 18446744073709551616 <= 18446744073709551615 New H53: ((element(w, [5]) + element(ks, [5])) mod 18446744073709551616 + element(ts, [0])) mod 18446744073709551616 >= 0 New H54: ((element(w, [5]) + element(ks, [5])) mod 18446744073709551616 + element(ts, [0])) mod 18446744073709551616 <= 18446744073709551615 New C1: ((element(w, [6]) + element(ks, [6])) mod 18446744073709551616 + element(ts, [1])) mod 18446744073709551616 >= 0 New C2: ((element(w, [6]) + element(ks, [6])) mod 18446744073709551616 + element(ts, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule do_first_key_rules(21). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H55: true -S- Applied substitution rule do_first_key_rules(31). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) New H7: true New H13: true New H19: true New H25: true New H31: true New H37: true New H43: true New H49: true -S- Applied substitution rule do_first_key_rules(32). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H8: true New H14: true New H20: true New H26: true New H32: true New H38: true New H44: true New H50: true New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) *** Proved C1: ((element(w, [6]) + element(ks, [6])) mod 18446744073709551616 + element(ts, [1])) mod 18446744073709551616 >= 0 *** Proved C2: ((element(w, [6]) + element(ks, [6])) mod 18446744073709551616 + element(ts, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_do_first_key_injection_4. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. ././@LongLink0000000000000000000000000000016200000000000011564 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.vsmspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key0000644000175000017500000000007411712765060032512 0ustar eugeneugeninitialize_key_schedule,0,0,1,1,0,0,0,100.0, 0.0, 0.0, 0.0, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.vcg0000644000175000017500000015342411712513676031644 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Round_3 For path(s) from start to run-time check associated with statement of line 336: procedure_round_3_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i8__first . C4: 1 <= spark__crypto__i8__last . C5: 4 >= spark__crypto__i8__first . C6: 4 <= spark__crypto__i8__last . C7: 4 >= spark__crypto__i8__first . C8: 4 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 337: procedure_round_3_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . -> C1: r_512_2_0 >= spark__unsigned__shift_count__first . C2: r_512_2_0 <= spark__unsigned__shift_count__last . C3: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . C4: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 337: procedure_round_3_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_2_0 >= spark__unsigned__shift_count__first . H12: r_512_2_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 338: procedure_round_3_4. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_2_0 >= spark__unsigned__shift_count__first . H12: r_512_2_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [4])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [4])) <= spark__unsigned__u64__last . C3: 4 >= spark__crypto__i8__first . C4: 4 <= spark__crypto__i8__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to assertion of line 341: procedure_round_3_5. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_2_0 >= spark__unsigned__shift_count__first . H12: r_512_2_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [4])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [4])) <= spark__unsigned__u64__last . H25: 4 >= spark__crypto__i8__first . H26: 4 <= spark__crypto__i8__last . H27: 1 >= spark__crypto__i8__first . H28: 1 <= spark__crypto__i8__last . H29: 1 >= spark__crypto__i8__first . H30: 1 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [1], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [4]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [1], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [1]), element(update(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_2_0)), [4]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 341 to run-time check associated with statement of line 343: procedure_round_3_6. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 3 >= spark__crypto__i8__first . C4: 3 <= spark__crypto__i8__last . C5: 6 >= spark__crypto__i8__first . C6: 6 <= spark__crypto__i8__last . C7: 6 >= spark__crypto__i8__first . C8: 6 <= spark__crypto__i8__last . For path(s) from assertion of line 341 to run-time check associated with statement of line 344: procedure_round_3_7. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . -> C1: r_512_2_1 >= spark__unsigned__shift_count__first . C2: r_512_2_1 <= spark__unsigned__shift_count__last . C3: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . C4: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 341 to run-time check associated with statement of line 344: procedure_round_3_8. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_2_1 >= spark__unsigned__shift_count__first . H12: r_512_2_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) <= spark__unsigned__u64__last . For path(s) from assertion of line 341 to run-time check associated with statement of line 345: procedure_round_3_9. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_2_1 >= spark__unsigned__shift_count__first . H12: r_512_2_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [6])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [6])) <= spark__unsigned__u64__last . C3: 6 >= spark__crypto__i8__first . C4: 6 <= spark__crypto__i8__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 341 to assertion of line 347: procedure_round_3_10. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_2_1 >= spark__unsigned__shift_count__first . H12: r_512_2_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [6])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [6])) <= spark__unsigned__u64__last . H25: 6 >= spark__crypto__i8__first . H26: 6 <= spark__crypto__i8__last . H27: 3 >= spark__crypto__i8__first . H28: 3 <= spark__crypto__i8__last . H29: 3 >= spark__crypto__i8__first . H30: 3 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [3], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [6]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [3], bit__xor(element(update(update( x, [6], (element(x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [3]), element(update(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_2_1)), [6]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 347 to run-time check associated with statement of line 349: procedure_round_3_11. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 5 >= spark__crypto__i8__first . C4: 5 <= spark__crypto__i8__last . C5: 0 >= spark__crypto__i8__first . C6: 0 <= spark__crypto__i8__last . C7: 0 >= spark__crypto__i8__first . C8: 0 <= spark__crypto__i8__last . For path(s) from assertion of line 347 to run-time check associated with statement of line 350: procedure_round_3_12. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . -> C1: r_512_2_2 >= spark__unsigned__shift_count__first . C2: r_512_2_2 <= spark__unsigned__shift_count__last . C3: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . C4: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 347 to run-time check associated with statement of line 350: procedure_round_3_13. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_2_2 >= spark__unsigned__shift_count__first . H12: r_512_2_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) <= spark__unsigned__u64__last . For path(s) from assertion of line 347 to run-time check associated with statement of line 351: procedure_round_3_14. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_2_2 >= spark__unsigned__shift_count__first . H12: r_512_2_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [0])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [0])) <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i8__first . C4: 0 <= spark__crypto__i8__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 347 to assertion of line 353: procedure_round_3_15. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_2_2 >= spark__unsigned__shift_count__first . H12: r_512_2_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [0])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [0])) <= spark__unsigned__u64__last . H25: 0 >= spark__crypto__i8__first . H26: 0 <= spark__crypto__i8__last . H27: 5 >= spark__crypto__i8__first . H28: 5 <= spark__crypto__i8__last . H29: 5 >= spark__crypto__i8__first . H30: 5 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [5], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [0]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [5], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [5]), element(update(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_2_2)), [0]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 353 to run-time check associated with statement of line 355: procedure_round_3_16. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 2 >= spark__crypto__i8__first . C6: 2 <= spark__crypto__i8__last . C7: 2 >= spark__crypto__i8__first . C8: 2 <= spark__crypto__i8__last . For path(s) from assertion of line 353 to run-time check associated with statement of line 356: procedure_round_3_17. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . -> C1: r_512_2_3 >= spark__unsigned__shift_count__first . C2: r_512_2_3 <= spark__unsigned__shift_count__last . C3: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . C4: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 353 to run-time check associated with statement of line 356: procedure_round_3_18. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_2_3 >= spark__unsigned__shift_count__first . H12: r_512_2_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3) <= spark__unsigned__u64__last . For path(s) from assertion of line 353 to run-time check associated with statement of line 357: procedure_round_3_19. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_2_3 >= spark__unsigned__shift_count__first . H12: r_512_2_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3)), [7]), element(update(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3)), [2])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3)), [7]), element(update(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_2_3)), [2])) <= spark__unsigned__u64__last . C3: 2 >= spark__crypto__i8__first . C4: 2 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 353 to finish: procedure_round_3_20. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.fdl0000644000175000017500000000577111712513676031636 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Round_6} title procedure round_6; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const r_512_5_0 : integer = pending; const r_512_5_1 : integer = pending; const r_512_5_2 : integer = pending; const r_512_5_3 : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var x : spark__crypto__u64_seq; function spark__unsigned__rotate_left_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.logspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000000045411712513676032535 0ustar eugeneugenSPARK Simplifier Pro Edition Reading update_context.fdl (for inherited FDL type declarations) Processing update_context.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Automatic simplification completed. Simplified output sent to update_context.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.fdl0000644000175000017500000000577111712513676031640 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Round_8} title procedure round_8; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const r_512_7_0 : integer = pending; const r_512_7_1 : integer = pending; const r_512_7_2 : integer = pending; const r_512_7_3 : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var x : spark__crypto__u64_seq; function spark__unsigned__rotate_left_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.vsmspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000000006111712765060032422 0ustar eugeneugeninitialize_ts,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.vct0000644000175000017500000000000011712513676031643 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.fdl0000644000175000017500000000741111712513676032377 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Inject_Key} title procedure inject_key; function round__(real) : integer; type natural = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__crypto__i3 = integer; type spark__crypto__i8 = integer; type spark__crypto__i9 = integer; type spark__crypto__word_count_t = integer; type injection_range = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const wcnt : integer = pending; const ks_modulus : interfaces__unsigned_64 = pending; const injection_range__base__first : integer = pending; const injection_range__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i9__base__first : integer = pending; const spark__crypto__i9__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__crypto__i3__base__first : integer = pending; const spark__crypto__i3__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const injection_range__first : integer = pending; const injection_range__last : integer = pending; const injection_range__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i9__first : integer = pending; const spark__crypto__i9__last : integer = pending; const spark__crypto__i9__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__crypto__i3__first : integer = pending; const spark__crypto__i3__last : integer = pending; const spark__crypto__i3__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ts : spark__crypto__u64_seq; var ks : spark__crypto__u64_seq; var x : spark__crypto__u64_seq; var r : interfaces__unsigned_64; var loop__1__i : integer; end; ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.slgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000003172711712513676032443 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Initialize_TS @@@@@@@@@@ VC: procedure_initialize_ts_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H21 on reading formula in, to give: %%% H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> spark__unsigned__u64__first <= element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last) -S- Applied substitution rule initialize_t_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> 0 <= element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last) New C1: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= 0 -S- Applied substitution rule initialize_t_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> 0 <= element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) <= 18446744073709551615) New C2: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= 18446744073709551615 -S- Applied substitution rule initialize_t_rules(98). This was achieved by replacing all occurrences of modifier_words_index__first by: 0. New H21: for_all(i_ : integer, 0 <= i_ and i_ <= modifier_words_index__last -> 0 <= element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) <= 18446744073709551615) New C3: true -S- Applied substitution rule initialize_t_rules(99). This was achieved by replacing all occurrences of modifier_words_index__last by: 1. New H21: for_all(i_ : integer, 0 <= i_ and i_ <= 1 -> 0 <= element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) and element( tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= 18446744073709551615) New C4: true *** Proved C1: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= 0 using hypothesis H21. *** Proved C2: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= 18446744073709551615 using hypothesis H21. *** Proved C3: true *** Proved C4: true *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_ts_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H21 on reading formula in, to give: %%% H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> spark__unsigned__u64__first <= element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H21). *** Proved C3: 1 >= modifier_words_index__first using hypothesis H24. -S- Applied substitution rule initialize_t_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> 0 <= element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last) New H22: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= 0 New C1: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) >= 0 -S- Applied substitution rule initialize_t_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H23: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= 18446744073709551615 New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> 0 <= element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) <= 18446744073709551615) New C2: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) <= 18446744073709551615 -S- Applied substitution rule initialize_t_rules(99). This was achieved by replacing all occurrences of modifier_words_index__last by: 1. New H25: true New H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= 1 -> 0 <= element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_] ) and element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= 18446744073709551615) New C4: true *** Proved C1: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) >= 0 using hypotheses H21 & H24. *** Proved C2: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) <= 18446744073709551615 using hypotheses H21 & H24. *** Proved C4: true *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_ts_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H21 on reading formula in, to give: %%% H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> spark__unsigned__u64__first <= element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H21). *** Proved C3: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) >= spark__unsigned__u64__first using hypothesis H27. *** Proved C4: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) <= spark__unsigned__u64__last using hypothesis H28. *** Proved C5: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= spark__unsigned__u64__first using hypothesis H22. *** Proved C6: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= spark__unsigned__u64__last using hypothesis H23. -S- Applied substitution rule initialize_t_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> 0 <= element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) <= spark__unsigned__u64__last) New H22: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) >= 0 New H27: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) >= 0 New C1: bit__xor(element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) , element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1])) >= 0 -S- Applied substitution rule initialize_t_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H23: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) <= 18446744073709551615 New H28: element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1]) <= 18446744073709551615 New H3: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H21: for_all(i_ : integer, modifier_words_index__first <= i_ and i_ <= modifier_words_index__last -> 0 <= element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) and element(tweak_to_words( fld_tweak_words(fld_h(ctx))), [i_]) <= 18446744073709551615) New C2: bit__xor(element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [0]) , element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1])) <= 18446744073709551615 *** Proved C1: bit__xor(element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [ 0]), element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1])) >= 0 using hypotheses H22 & H27. *** Proved C2: bit__xor(element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [ 0]), element(tweak_to_words(fld_tweak_words(fld_h(ctx))), [1])) <= 18446744073709551615 using hypotheses H22, H23, H27 & H28. *** PROVED VC. @@@@@@@@@@ VC: procedure_initialize_ts_4. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_block.slgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/threefish_bloc0000644000175000017500000017711611712513676032501 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Threefish_Block @@@@@@@@@@ VC: procedure_threefish_block_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C3 on reading formula in, to give: %%% C3: true *** Proved C3: true -S- Applied substitution rule threefish_bl_rules(9). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C1: skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus >= 0 New C4: true -S- Applied substitution rule threefish_bl_rules(10). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C2: skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C5: true *** Proved C4: true *** Proved C5: true -S- Applied substitution rule threefish_bl_rules(1). This was achieved by replacing all occurrences of skein_512_rounds_total by: 72. New C1: 9 mod interfaces__unsigned_64__modulus >= 0 New C2: 9 mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule threefish_bl_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: true New C2: true -S- Applied substitution rule threefish_bl_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule threefish_bl_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(21). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(22). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(26). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(27). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(31). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(32). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_threefish_block_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H7 on reading formula in, to give: %%% H7: true -S- Applied substitution rule threefish_bl_rules(1). This was achieved by replacing all occurrences of skein_512_rounds_total by: 72. New H5: 9 mod interfaces__unsigned_64__modulus >= interfaces__unsigned_64__first New H6: 9 mod interfaces__unsigned_64__modulus <= interfaces__unsigned_64__last New C1: 1 <= 9 mod interfaces__unsigned_64__modulus -> 9 mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first and 9 mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last New C2: 1 <= 9 mod interfaces__unsigned_64__modulus -> 1 >= spark__unsigned__u64__first and 1 <= spark__unsigned__u64__last -S- Applied substitution rule threefish_bl_rules(9). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H8: true New H5: 9 mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule threefish_bl_rules(10). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H9: true New H6: 9 mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule threefish_bl_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H5: true New H6: true New C1: 9 >= spark__unsigned__u64__first and 9 <= spark__unsigned__u64__last New C2: 1 >= spark__unsigned__u64__first and 1 <= spark__unsigned__u64__last -S- Applied substitution rule threefish_bl_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: 9 <= spark__unsigned__u64__last New C2: 1 <= spark__unsigned__u64__last -S- Applied substitution rule threefish_bl_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C1: true New C2: true -S- Applied substitution rule threefish_bl_rules(21). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(22). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(26). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(27). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(31). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(32). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_threefish_block_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H7 on reading formula in, to give: %%% H7: true %%% Simplified C1 on reading formula in, to give: %%% C1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C3 on reading formula in, to give: %%% C3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C6 on reading formula in, to give: %%% C6: true *** Proved C1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) using hypothesis H2. *** Proved C2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) using hypothesis H3. *** Proved C3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) using hypothesis H4. *** Proved C6: true *** Proved C7: 1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus using hypothesis H12. -S- Applied substitution rule threefish_bl_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H10: 1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus -> skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus >= 0 and skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last New H11: 1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus -> 1 <= spark__unsigned__u64__last New C4: true -S- Applied substitution rule threefish_bl_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H4: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H10: 1 <= skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus -> skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus >= 0 and skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H11: true New C5: true *** Proved C4: true *** Proved C5: true *** PROVED VC. @@@@@@@@@@ VC: procedure_threefish_block_4. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__1, [i___1]) and element(x__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__2, [i___1]) and element(x__2, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__3, [i___1]) and element(x__3, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H14 on reading formula in, to give: %%% H14: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H15 on reading formula in, to give: %%% H15: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__6, [i___1]) and element(x__6, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H16 on reading formula in, to give: %%% H16: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__7, [i___1]) and element(x__7, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H17 on reading formula in, to give: %%% H17: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__8, [i___1]) and element(x__8, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H18 on reading formula in, to give: %%% H18: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__9, [i___1]) and element(x__9, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H21 on reading formula in, to give: %%% H21: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__10, [i___1]) and element(x__10, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C1 on reading formula in, to give: %%% C1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C3 on reading formula in, to give: %%% C3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__10, [i___1]) and element(x__10, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C6 on reading formula in, to give: %%% C6: loop__1__r >= 0 *** Proved C1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) using hypothesis H1. *** Proved C2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) using hypothesis H2. *** Proved C3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__10, [i___1]) and element(x__10, [i___1]) <= spark__unsigned__u64__last) using hypothesis H21. *** Proved C4: loop__1__r + 1 >= spark__unsigned__u64__first using hypothesis H4. *** Proved C6: loop__1__r >= 0 using hypothesis H6. -S- Applied substitution rule threefish_bl_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H5: loop__1__r <= 18446744073709551615 New H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__1, [i___1]) and element(x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__2, [i___1]) and element(x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__3, [i___1]) and element(x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H13: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H14: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) New H15: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__6, [i___1]) and element(x__6, [i___1]) <= 18446744073709551615) New H16: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__7, [i___1]) and element(x__7, [i___1]) <= 18446744073709551615) New H17: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__8, [i___1]) and element(x__8, [i___1]) <= 18446744073709551615) New H18: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__9, [i___1]) and element(x__9, [i___1]) <= 18446744073709551615) New H20: loop__1__r * 2 mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H21: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__10, [i___1]) and element(x__10, [i___1]) <= 18446744073709551615) New C5: loop__1__r <= 18446744073709551614 >>> Restructured hypothesis H22 into: >>> H22: loop__1__r <> skein_512_rounds_total div 8 mod interfaces__unsigned_64__modulus -S- Applied substitution rule threefish_bl_rules(1). This was achieved by replacing all occurrences of skein_512_rounds_total by: 72. New H7: loop__1__r <= 9 mod interfaces__unsigned_64__modulus New H22: loop__1__r <> 9 mod interfaces__unsigned_64__modulus New C7: loop__1__r + 1 <= 9 mod interfaces__unsigned_64__modulus -S- Applied substitution rule threefish_bl_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H12: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 >= spark__unsigned__u64__first New H13: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 <= 18446744073709551615 New H19: loop__1__r * 2 mod 18446744073709551616 >= spark__unsigned__u64__first New H20: loop__1__r * 2 mod 18446744073709551616 <= 18446744073709551615 New H7: loop__1__r <= 9 New H22: loop__1__r <> 9 New C7: loop__1__r <= 8 -S- Applied substitution rule threefish_bl_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H4: loop__1__r >= 0 New H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__1, [i___1]) and element(x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__2, [i___1]) and element(x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__3, [i___1]) and element(x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H14: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) New H15: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__6, [i___1]) and element(x__6, [i___1]) <= 18446744073709551615) New H16: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__7, [i___1]) and element(x__7, [i___1]) <= 18446744073709551615) New H17: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__8, [i___1]) and element(x__8, [i___1]) <= 18446744073709551615) New H18: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__9, [i___1]) and element(x__9, [i___1]) <= 18446744073709551615) New H21: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__10, [i___1]) and element(x__10, [i___1]) <= 18446744073709551615) New H12: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 >= 0 New H19: loop__1__r * 2 mod 18446744073709551616 >= 0 -S- Applied substitution rule threefish_bl_rules(21). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(22). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(26). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__1, [i___1]) and element( x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__2, [i___1]) and element( x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__3, [i___1]) and element( x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element( x__4, [i___1]) <= 18446744073709551615) New H14: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element( x__5, [i___1]) <= 18446744073709551615) New H15: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__6, [i___1]) and element( x__6, [i___1]) <= 18446744073709551615) New H16: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__7, [i___1]) and element( x__7, [i___1]) <= 18446744073709551615) New H17: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__8, [i___1]) and element( x__8, [i___1]) <= 18446744073709551615) New H18: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__9, [i___1]) and element( x__9, [i___1]) <= 18446744073709551615) New H21: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__10, [i___1]) and element( x__10, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(27). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__1, [i___1]) and element(x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__2, [i___1]) and element(x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__3, [i___1]) and element(x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H14: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) New H15: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__6, [i___1]) and element(x__6, [i___1]) <= 18446744073709551615) New H16: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__7, [i___1]) and element(x__7, [i___1]) <= 18446744073709551615) New H17: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__8, [i___1]) and element(x__8, [i___1]) <= 18446744073709551615) New H18: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__9, [i___1]) and element(x__9, [i___1]) <= 18446744073709551615) New H21: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__10, [i___1]) and element(x__10, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(31). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H1: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(32). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H1: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) %%% Hypotheses H7 & H22 together imply that loop__1__r < 9. H7 & H22 have therefore been deleted and a new H23 added to this effect. *** Proved C5: loop__1__r <= 18446744073709551614 using hypothesis H23. *** Proved C7: loop__1__r <= 8 using hypothesis H23. *** PROVED VC. @@@@@@@@@@ VC: procedure_threefish_block_5. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__1, [i___1]) and element(x__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__2, [i___1]) and element(x__2, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__3, [i___1]) and element(x__3, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule threefish_bl_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H4: loop__1__r >= 0 New H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__1, [i___1]) and element(x__1, [i___1]) <= spark__unsigned__u64__last) New H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__2, [i___1]) and element(x__2, [i___1]) <= spark__unsigned__u64__last) New H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__3, [i___1]) and element(x__3, [i___1]) <= spark__unsigned__u64__last) New H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) New C1: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule threefish_bl_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H5: loop__1__r <= 18446744073709551615 New H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__1, [i___1]) and element(x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__2, [i___1]) and element(x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__3, [i___1]) and element(x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New C2: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule threefish_bl_rules(1). This was achieved by replacing all occurrences of skein_512_rounds_total by: 72. New H7: loop__1__r <= 9 mod interfaces__unsigned_64__modulus -S- Applied substitution rule threefish_bl_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H7: loop__1__r <= 9 New C1: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 >= 0 New C2: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule threefish_bl_rules(21). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(22). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(26). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__1, [i___1]) and element( x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__2, [i___1]) and element( x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__3, [i___1]) and element( x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element( x__4, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(27). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__1, [i___1]) and element(x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__2, [i___1]) and element(x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__3, [i___1]) and element(x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(31). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H1: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(32). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H1: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) *** Proved C1: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 >= 0 *** Proved C2: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_threefish_block_6. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__1, [i___1]) and element(x__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__2, [i___1]) and element(x__2, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__3, [i___1]) and element(x__3, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H14 on reading formula in, to give: %%% H14: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H15 on reading formula in, to give: %%% H15: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__6, [i___1]) and element(x__6, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H16 on reading formula in, to give: %%% H16: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__7, [i___1]) and element(x__7, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H17 on reading formula in, to give: %%% H17: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__8, [i___1]) and element(x__8, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H18 on reading formula in, to give: %%% H18: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__9, [i___1]) and element(x__9, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule threefish_bl_rules(15). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= spark__unsigned__u64__last) New H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= spark__unsigned__u64__last) New H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H4: loop__1__r >= 0 New H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__1, [i___1]) and element(x__1, [i___1]) <= spark__unsigned__u64__last) New H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__2, [i___1]) and element(x__2, [i___1]) <= spark__unsigned__u64__last) New H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__3, [i___1]) and element(x__3, [i___1]) <= spark__unsigned__u64__last) New H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) New H12: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus >= 0 New H14: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) New H15: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__6, [i___1]) and element(x__6, [i___1]) <= spark__unsigned__u64__last) New H16: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__7, [i___1]) and element(x__7, [i___1]) <= spark__unsigned__u64__last) New H17: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__8, [i___1]) and element(x__8, [i___1]) <= spark__unsigned__u64__last) New H18: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__9, [i___1]) and element(x__9, [i___1]) <= spark__unsigned__u64__last) New C1: loop__1__r * 2 mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule threefish_bl_rules(16). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H5: loop__1__r <= 18446744073709551615 New H13: (loop__1__r * 2 mod interfaces__unsigned_64__modulus - 1) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H1: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) New H2: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) New H3: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H8: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__1, [i___1]) and element(x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__2, [i___1]) and element(x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__3, [i___1]) and element(x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H14: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) New H15: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__6, [i___1]) and element(x__6, [i___1]) <= 18446744073709551615) New H16: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__7, [i___1]) and element(x__7, [i___1]) <= 18446744073709551615) New H17: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__8, [i___1]) and element(x__8, [i___1]) <= 18446744073709551615) New H18: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__9, [i___1]) and element(x__9, [i___1]) <= 18446744073709551615) New C2: loop__1__r * 2 mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule threefish_bl_rules(1). This was achieved by replacing all occurrences of skein_512_rounds_total by: 72. New H7: loop__1__r <= 9 mod interfaces__unsigned_64__modulus -S- Applied substitution rule threefish_bl_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H12: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 >= 0 New H13: (loop__1__r * 2 mod 18446744073709551616 - 1) mod 18446744073709551616 <= 18446744073709551615 New H7: loop__1__r <= 9 New C1: loop__1__r * 2 mod 18446744073709551616 >= 0 New C2: loop__1__r * 2 mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule threefish_bl_rules(21). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts, [i___1]) and element(ts, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(22). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts, [i___1]) and element(ts, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(26). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__1, [i___1]) and element( x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__2, [i___1]) and element( x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__3, [i___1]) and element( x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element( x__4, [i___1]) <= 18446744073709551615) New H14: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element( x__5, [i___1]) <= 18446744073709551615) New H15: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__6, [i___1]) and element( x__6, [i___1]) <= 18446744073709551615) New H16: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__7, [i___1]) and element( x__7, [i___1]) <= 18446744073709551615) New H17: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__8, [i___1]) and element( x__8, [i___1]) <= 18446744073709551615) New H18: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__9, [i___1]) and element( x__9, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(27). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H3: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New H8: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__1, [i___1]) and element(x__1, [i___1]) <= 18446744073709551615) New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__2, [i___1]) and element(x__2, [i___1]) <= 18446744073709551615) New H10: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__3, [i___1]) and element(x__3, [i___1]) <= 18446744073709551615) New H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H14: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) New H15: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__6, [i___1]) and element(x__6, [i___1]) <= 18446744073709551615) New H16: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__7, [i___1]) and element(x__7, [i___1]) <= 18446744073709551615) New H17: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__8, [i___1]) and element(x__8, [i___1]) <= 18446744073709551615) New H18: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__9, [i___1]) and element(x__9, [i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(31). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H1: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks, [i___1]) and element(ks, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule threefish_bl_rules(32). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H1: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks, [i___1]) and element(ks, [i___1]) <= 18446744073709551615) *** Proved C1: loop__1__r * 2 mod 18446744073709551616 >= 0 *** Proved C2: loop__1__r * 2 mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_threefish_block_7. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. @@@@@@@@@@ VC: procedure_threefish_block_8. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.vsm0000644000175000017500000000005311712765060031654 0ustar eugeneugenround_4,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.vct0000644000175000017500000000000011712513676032411 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.vsm0000644000175000017500000000005311712765060031656 0ustar eugeneugenround_6,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.vct0000644000175000017500000000000011712513676031637 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.rls0000644000175000017500000000622111712513676031662 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Round_8*/ rule_family round_8_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. round_8_rules(1): r_512_7_0 may_be_replaced_by 8. round_8_rules(2): r_512_7_1 may_be_replaced_by 35. round_8_rules(3): r_512_7_2 may_be_replaced_by 56. round_8_rules(4): r_512_7_3 may_be_replaced_by 22. round_8_rules(5): integer__size >= 0 may_be_deduced. round_8_rules(6): integer__first may_be_replaced_by -2147483648. round_8_rules(7): integer__last may_be_replaced_by 2147483647. round_8_rules(8): integer__base__first may_be_replaced_by -2147483648. round_8_rules(9): integer__base__last may_be_replaced_by 2147483647. round_8_rules(10): interfaces__unsigned_64__size >= 0 may_be_deduced. round_8_rules(11): interfaces__unsigned_64__size may_be_replaced_by 64. round_8_rules(12): interfaces__unsigned_64__first may_be_replaced_by 0. round_8_rules(13): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. round_8_rules(14): interfaces__unsigned_64__base__first may_be_replaced_by 0. round_8_rules(15): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. round_8_rules(16): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. round_8_rules(17): spark__unsigned__u64__size >= 0 may_be_deduced. round_8_rules(18): spark__unsigned__u64__first may_be_replaced_by 0. round_8_rules(19): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. round_8_rules(20): spark__unsigned__u64__base__first may_be_replaced_by 0. round_8_rules(21): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. round_8_rules(22): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. round_8_rules(23): spark__unsigned__shift_count__size >= 0 may_be_deduced. round_8_rules(24): spark__unsigned__shift_count__first may_be_replaced_by 0. round_8_rules(25): spark__unsigned__shift_count__last may_be_replaced_by 64. round_8_rules(26): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. round_8_rules(27): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. round_8_rules(28): spark__crypto__i8__size >= 0 may_be_deduced. round_8_rules(29): spark__crypto__i8__first may_be_replaced_by 0. round_8_rules(30): spark__crypto__i8__last may_be_replaced_by 7. round_8_rules(31): spark__crypto__i8__base__first may_be_replaced_by -2147483648. round_8_rules(32): spark__crypto__i8__base__last may_be_replaced_by 2147483647. round_8_rules(33): spark__crypto__word_count_t__size >= 0 may_be_deduced. round_8_rules(34): spark__crypto__word_count_t__first may_be_replaced_by 0. round_8_rules(35): spark__crypto__word_count_t__last may_be_replaced_by 268435455. round_8_rules(36): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. round_8_rules(37): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.fdl0000644000175000017500000000577111712513676031637 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Round_7} title procedure round_7; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const r_512_6_0 : integer = pending; const r_512_6_1 : integer = pending; const r_512_6_2 : integer = pending; const r_512_6_3 : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var x : spark__crypto__u64_seq; function spark__unsigned__rotate_left_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.slg0000644000175000017500000025145211712513676031650 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_1 @@@@@@@@@@ VC: procedure_round_1_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= 0 New C2: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [1]) <= interfaces__unsigned_64__last *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_1_rules(1). This was achieved by replacing all occurrences of r_512_0_0 by: 46. New C1: 46 >= spark__unsigned__shift_count__first New C2: 46 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_1_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_1_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_1_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [1]) >= 0 -S- Applied substitution rule round_1_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [1]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [1]) >= 0 using hypothesis H2. *** Proved C4: element(x, [1]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) <= spark__unsigned__u64__last -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(1). This was achieved by replacing all occurrences of r_512_0_0 by: 46. New H11: 46 >= spark__unsigned__shift_count__first New H12: 46 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [1]), 46) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [1]), 46) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [1]), 46) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [1]), 46) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [1]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [1]), 46) >= 0 -S- Applied substitution rule round_1_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [1]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [1]), 46) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 >= 0 New H4: (element(x, [0]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_1_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_1_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [1]), 46) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [1]), 46) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 0 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 0 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_5. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_0_0), (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= 0 New C2: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_7. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [3]) <= interfaces__unsigned_64__last *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_1_rules(2). This was achieved by replacing all occurrences of r_512_0_1 by: 36. New C1: 36 >= spark__unsigned__shift_count__first New C2: 36 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_1_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_1_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_1_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [3]) >= 0 -S- Applied substitution rule round_1_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [3]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [3]) >= 0 using hypothesis H2. *** Proved C4: element(x, [3]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) <= spark__unsigned__u64__last -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(2). This was achieved by replacing all occurrences of r_512_0_1 by: 36. New H11: 36 >= spark__unsigned__shift_count__first New H12: 36 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [3]), 36) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [3]), 36) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [3]), 36) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [3]), 36) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [3]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [3]), 36) >= 0 -S- Applied substitution rule round_1_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [3]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [3]), 36) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 >= 0 New H4: (element(x, [2]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_1_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_1_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [3]), 36) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [3]), 36) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 2 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 2 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], bit__xor( spark__unsigned__rotate_left_64(element(x, [3]), r_512_0_1), (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= 0 New C2: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_12. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [5]) <= interfaces__unsigned_64__last *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_1_rules(3). This was achieved by replacing all occurrences of r_512_0_2 by: 19. New C1: 19 >= spark__unsigned__shift_count__first New C2: 19 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_1_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_1_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_1_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [5]) >= 0 -S- Applied substitution rule round_1_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [5]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [5]) >= 0 using hypothesis H2. *** Proved C4: element(x, [5]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) <= spark__unsigned__u64__last -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(3). This was achieved by replacing all occurrences of r_512_0_2 by: 19. New H11: 19 >= spark__unsigned__shift_count__first New H12: 19 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [5]), 19) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [5]), 19) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [5]), 19) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [5]), 19) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [5]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [5]), 19) >= 0 -S- Applied substitution rule round_1_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [5]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [5]), 19) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 >= 0 New H4: (element(x, [4]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_1_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_1_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [5]), 19) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [5]), 19) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_14. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 4 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 4 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_15. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_0_2), (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_16. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= 0 New C2: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_17. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [7]) <= interfaces__unsigned_64__last *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_1_rules(4). This was achieved by replacing all occurrences of r_512_0_3 by: 37. New C1: 37 >= spark__unsigned__shift_count__first New C2: 37 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_1_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_1_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_1_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [7]) >= 0 -S- Applied substitution rule round_1_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [7]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [7]) >= 0 using hypothesis H2. *** Proved C4: element(x, [7]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_18. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) <= spark__unsigned__u64__last -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(4). This was achieved by replacing all occurrences of r_512_0_3 by: 37. New H11: 37 >= spark__unsigned__shift_count__first New H12: 37 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [7]), 37) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [7]), 37) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [7]), 37) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [7]), 37) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [7]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [7]), 37) >= 0 -S- Applied substitution rule round_1_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [7]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [7]), 37) <= 18446744073709551615 -S- Applied substitution rule round_1_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 >= 0 New H4: (element(x, [6]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_1_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_1_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_1_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_1_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [7]), 37) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [7]), 37) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_19. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 6 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 6 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_1_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_1_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_0_3), (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_1_20. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.fdlspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000000615411712513676032503 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Do_First_Key_Injection} title procedure do_first_key_injection; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__crypto__i3 = integer; type spark__crypto__i8 = integer; type spark__crypto__i9 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const wcnt : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i9__base__first : integer = pending; const spark__crypto__i9__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__crypto__i3__base__first : integer = pending; const spark__crypto__i3__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i9__first : integer = pending; const spark__crypto__i9__last : integer = pending; const spark__crypto__i9__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__crypto__i3__first : integer = pending; const spark__crypto__i3__last : integer = pending; const spark__crypto__i3__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ts : spark__crypto__u64_seq; var ks : spark__crypto__u64_seq; var w : spark__crypto__u64_seq; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.log0000644000175000017500000000202611712513676031640 0ustar eugeneugenSPARK Simplifier Pro Edition Reading round_6.fdl (for inherited FDL type declarations) Processing round_6.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Automatic simplification completed. Simplified output sent to round_6.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.vlg0000644000175000017500000000324511712765060031644 0ustar eugeneugen Non-option args: round_3 Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=round_3 \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.vcgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000005501311712513676032501 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Do_First_Key_Injection For path(s) from start to run-time check associated with statement of line 564: procedure_do_first_key_injection_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( w, [i___1]) >= spark__unsigned__u64__first) and (element( w, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i9__first . C4: 7 <= spark__crypto__i9__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C8: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C9: 6 >= spark__crypto__i9__first . C10: 6 <= spark__crypto__i9__last . C11: 6 >= spark__crypto__i8__first . C12: 6 <= spark__crypto__i8__last . C13: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C14: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C15: 5 >= spark__crypto__i9__first . C16: 5 <= spark__crypto__i9__last . C17: 5 >= spark__crypto__i8__first . C18: 5 <= spark__crypto__i8__last . C19: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C20: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C21: 4 >= spark__crypto__i9__first . C22: 4 <= spark__crypto__i9__last . C23: 4 >= spark__crypto__i8__first . C24: 4 <= spark__crypto__i8__last . C25: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C26: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C27: 3 >= spark__crypto__i9__first . C28: 3 <= spark__crypto__i9__last . C29: 3 >= spark__crypto__i8__first . C30: 3 <= spark__crypto__i8__last . C31: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C32: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C33: 2 >= spark__crypto__i9__first . C34: 2 <= spark__crypto__i9__last . C35: 2 >= spark__crypto__i8__first . C36: 2 <= spark__crypto__i8__last . C37: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C38: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C39: 1 >= spark__crypto__i9__first . C40: 1 <= spark__crypto__i9__last . C41: 1 >= spark__crypto__i8__first . C42: 1 <= spark__crypto__i8__last . C43: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C44: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C45: 0 >= spark__crypto__i9__first . C46: 0 <= spark__crypto__i9__last . C47: 0 >= spark__crypto__i8__first . C48: 0 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 572: procedure_do_first_key_injection_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( w, [i___1]) >= spark__unsigned__u64__first) and (element( w, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H5: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H6: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H7: 7 >= spark__crypto__i9__first . H8: 7 <= spark__crypto__i9__last . H9: 7 >= spark__crypto__i8__first . H10: 7 <= spark__crypto__i8__last . H11: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H12: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H13: 6 >= spark__crypto__i9__first . H14: 6 <= spark__crypto__i9__last . H15: 6 >= spark__crypto__i8__first . H16: 6 <= spark__crypto__i8__last . H17: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H18: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H19: 5 >= spark__crypto__i9__first . H20: 5 <= spark__crypto__i9__last . H21: 5 >= spark__crypto__i8__first . H22: 5 <= spark__crypto__i8__last . H23: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H24: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H25: 4 >= spark__crypto__i9__first . H26: 4 <= spark__crypto__i9__last . H27: 4 >= spark__crypto__i8__first . H28: 4 <= spark__crypto__i8__last . H29: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H30: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H31: 3 >= spark__crypto__i9__first . H32: 3 <= spark__crypto__i9__last . H33: 3 >= spark__crypto__i8__first . H34: 3 <= spark__crypto__i8__last . H35: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H36: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H37: 2 >= spark__crypto__i9__first . H38: 2 <= spark__crypto__i9__last . H39: 2 >= spark__crypto__i8__first . H40: 2 <= spark__crypto__i8__last . H41: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H42: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H43: 1 >= spark__crypto__i9__first . H44: 1 <= spark__crypto__i9__last . H45: 1 >= spark__crypto__i8__first . H46: 1 <= spark__crypto__i8__last . H47: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H48: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H49: 0 >= spark__crypto__i9__first . H50: 0 <= spark__crypto__i9__last . H51: 0 >= spark__crypto__i8__first . H52: 0 <= spark__crypto__i8__last . -> C1: (element(mk__spark__crypto__u64_seq([0] := ((element( w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus), [1] := ((element( w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus), [2] := ((element( w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus), [3] := ((element( w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus), [4] := ((element( w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus), [5] := ((element( w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus), [6] := ((element( w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus), [7] := ((element( w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus)), [wcnt - 3]) + element( ts, [0])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(mk__spark__crypto__u64_seq([0] := ((element( w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus), [1] := ((element( w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus), [2] := ((element( w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus), [3] := ((element( w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus), [4] := ((element( w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus), [5] := ((element( w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus), [6] := ((element( w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus), [7] := ((element( w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus)), [wcnt - 3]) + element( ts, [0])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i3__first . C4: 0 <= spark__crypto__i3__last . C5: wcnt - 3 >= spark__crypto__i8__first . C6: wcnt - 3 <= spark__crypto__i8__last . C7: wcnt - 3 >= integer__base__first . C8: wcnt - 3 <= integer__base__last . C9: wcnt - 3 >= spark__crypto__i8__first . C10: wcnt - 3 <= spark__crypto__i8__last . C11: wcnt - 3 >= integer__base__first . C12: wcnt - 3 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 573: procedure_do_first_key_injection_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( w, [i___1]) >= spark__unsigned__u64__first) and (element( w, [i___1]) <= spark__unsigned__u64__last))) . H3: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element( ks, [i___1]) >= spark__unsigned__u64__first) and (element( ks, [i___1]) <= spark__unsigned__u64__last))) . H4: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element( ts, [i___1]) >= spark__unsigned__u64__first) and (element( ts, [i___1]) <= spark__unsigned__u64__last))) . H5: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H6: (element(w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H7: 7 >= spark__crypto__i9__first . H8: 7 <= spark__crypto__i9__last . H9: 7 >= spark__crypto__i8__first . H10: 7 <= spark__crypto__i8__last . H11: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H12: (element(w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H13: 6 >= spark__crypto__i9__first . H14: 6 <= spark__crypto__i9__last . H15: 6 >= spark__crypto__i8__first . H16: 6 <= spark__crypto__i8__last . H17: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H18: (element(w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H19: 5 >= spark__crypto__i9__first . H20: 5 <= spark__crypto__i9__last . H21: 5 >= spark__crypto__i8__first . H22: 5 <= spark__crypto__i8__last . H23: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H24: (element(w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H25: 4 >= spark__crypto__i9__first . H26: 4 <= spark__crypto__i9__last . H27: 4 >= spark__crypto__i8__first . H28: 4 <= spark__crypto__i8__last . H29: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H30: (element(w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H31: 3 >= spark__crypto__i9__first . H32: 3 <= spark__crypto__i9__last . H33: 3 >= spark__crypto__i8__first . H34: 3 <= spark__crypto__i8__last . H35: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H36: (element(w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H37: 2 >= spark__crypto__i9__first . H38: 2 <= spark__crypto__i9__last . H39: 2 >= spark__crypto__i8__first . H40: 2 <= spark__crypto__i8__last . H41: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H42: (element(w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H43: 1 >= spark__crypto__i9__first . H44: 1 <= spark__crypto__i9__last . H45: 1 >= spark__crypto__i8__first . H46: 1 <= spark__crypto__i8__last . H47: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H48: (element(w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H49: 0 >= spark__crypto__i9__first . H50: 0 <= spark__crypto__i9__last . H51: 0 >= spark__crypto__i8__first . H52: 0 <= spark__crypto__i8__last . H53: (element(mk__spark__crypto__u64_seq([0] := ((element( w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus), [1] := ((element( w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus), [2] := ((element( w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus), [3] := ((element( w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus), [4] := ((element( w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus), [5] := ((element( w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus), [6] := ((element( w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus), [7] := ((element( w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus)), [wcnt - 3]) + element( ts, [0])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H54: (element(mk__spark__crypto__u64_seq([0] := ((element( w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus), [1] := ((element( w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus), [2] := ((element( w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus), [3] := ((element( w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus), [4] := ((element( w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus), [5] := ((element( w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus), [6] := ((element( w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus), [7] := ((element( w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus)), [wcnt - 3]) + element( ts, [0])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H55: 0 >= spark__crypto__i3__first . H56: 0 <= spark__crypto__i3__last . H57: wcnt - 3 >= spark__crypto__i8__first . H58: wcnt - 3 <= spark__crypto__i8__last . H59: wcnt - 3 >= integer__base__first . H60: wcnt - 3 <= integer__base__last . H61: wcnt - 3 >= spark__crypto__i8__first . H62: wcnt - 3 <= spark__crypto__i8__last . H63: wcnt - 3 >= integer__base__first . H64: wcnt - 3 <= integer__base__last . -> C1: (element(update(mk__spark__crypto__u64_seq([0] := ((element( w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus), [1] := ((element( w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus), [2] := ((element( w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus), [3] := ((element( w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus), [4] := ((element( w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus), [5] := ((element( w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus), [6] := ((element( w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus), [7] := ((element( w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus)), [wcnt - 3], (element( mk__spark__crypto__u64_seq([0] := ((element(w, [0]) + element( ks, [0])) mod interfaces__unsigned_64__modulus), [1] := ((element( w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus), [2] := ((element( w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus), [3] := ((element( w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus), [4] := ((element( w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus), [5] := ((element( w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus), [6] := ((element( w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus), [7] := ((element( w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus)), [wcnt - 3]) + element( ts, [0])) mod interfaces__unsigned_64__modulus), [ wcnt - 2]) + element(ts, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(update(mk__spark__crypto__u64_seq([0] := ((element( w, [0]) + element(ks, [0])) mod interfaces__unsigned_64__modulus), [1] := ((element( w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus), [2] := ((element( w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus), [3] := ((element( w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus), [4] := ((element( w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus), [5] := ((element( w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus), [6] := ((element( w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus), [7] := ((element( w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus)), [wcnt - 3], (element( mk__spark__crypto__u64_seq([0] := ((element(w, [0]) + element( ks, [0])) mod interfaces__unsigned_64__modulus), [1] := ((element( w, [1]) + element(ks, [1])) mod interfaces__unsigned_64__modulus), [2] := ((element( w, [2]) + element(ks, [2])) mod interfaces__unsigned_64__modulus), [3] := ((element( w, [3]) + element(ks, [3])) mod interfaces__unsigned_64__modulus), [4] := ((element( w, [4]) + element(ks, [4])) mod interfaces__unsigned_64__modulus), [5] := ((element( w, [5]) + element(ks, [5])) mod interfaces__unsigned_64__modulus), [6] := ((element( w, [6]) + element(ks, [6])) mod interfaces__unsigned_64__modulus), [7] := ((element( w, [7]) + element(ks, [7])) mod interfaces__unsigned_64__modulus)), [wcnt - 3]) + element( ts, [0])) mod interfaces__unsigned_64__modulus), [ wcnt - 2]) + element(ts, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i3__first . C4: 1 <= spark__crypto__i3__last . C5: wcnt - 2 >= spark__crypto__i8__first . C6: wcnt - 2 <= spark__crypto__i8__last . C7: wcnt - 2 >= integer__base__first . C8: wcnt - 2 <= integer__base__last . C9: wcnt - 2 >= spark__crypto__i8__first . C10: wcnt - 2 <= spark__crypto__i8__last . C11: wcnt - 2 >= integer__base__first . C12: wcnt - 2 <= integer__base__last . For path(s) from start to finish: procedure_do_first_key_injection_4. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_4.vlg0000644000175000017500000000324511712765060031645 0ustar eugeneugen Non-option args: round_4 Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=round_4 \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.fdlspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/initialize_ts.0000644000175000017500000002043311712513676032433 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Initialize_TS} title procedure initialize_ts; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_index = integer; type modifier_words_index = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const modifier_words_index__base__first : integer = pending; const modifier_words_index__base__last : integer = pending; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const modifier_words_index__first : integer = pending; const modifier_words_index__last : integer = pending; const modifier_words_index__size : integer = pending; const skein_512_context__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ctx : skein_512_context; function tweak_to_words(tweak_value) : spark__crypto__u64_seq; end; ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.vsmspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000000006211712765060032524 0ustar eugeneugenupdate_context,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_2.vsm0000644000175000017500000000005311712765060031652 0ustar eugeneugenround_2,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.vlgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000000330311712765060032470 0ustar eugeneugen Non-option args: do_first_key_injection Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=do_first_key_injection \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.vcg0000644000175000017500000015342411712513676031646 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Round_5 For path(s) from start to run-time check associated with statement of line 396: procedure_round_5_1. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 1 >= spark__crypto__i8__first . C4: 1 <= spark__crypto__i8__last . C5: 0 >= spark__crypto__i8__first . C6: 0 <= spark__crypto__i8__last . C7: 0 >= spark__crypto__i8__first . C8: 0 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 397: procedure_round_5_2. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . -> C1: r_512_4_0 >= spark__unsigned__shift_count__first . C2: r_512_4_0 <= spark__unsigned__shift_count__last . C3: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . C4: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to run-time check associated with statement of line 397: procedure_round_5_3. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_4_0 >= spark__unsigned__shift_count__first . H12: r_512_4_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 398: procedure_round_5_4. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_4_0 >= spark__unsigned__shift_count__first . H12: r_512_4_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [0])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [0])) <= spark__unsigned__u64__last . C3: 0 >= spark__crypto__i8__first . C4: 0 <= spark__crypto__i8__last . C5: 1 >= spark__crypto__i8__first . C6: 1 <= spark__crypto__i8__last . C7: 1 >= spark__crypto__i8__first . C8: 1 <= spark__crypto__i8__last . For path(s) from start to assertion of line 401: procedure_round_5_5. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 1 >= spark__crypto__i8__first . H6: 1 <= spark__crypto__i8__last . H7: 0 >= spark__crypto__i8__first . H8: 0 <= spark__crypto__i8__last . H9: 0 >= spark__crypto__i8__first . H10: 0 <= spark__crypto__i8__last . H11: r_512_4_0 >= spark__unsigned__shift_count__first . H12: r_512_4_0 <= spark__unsigned__shift_count__last . H13: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) >= interfaces__unsigned_64__first . H14: element(update(x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]) <= interfaces__unsigned_64__last . H15: 1 >= spark__crypto__i8__first . H16: 1 <= spark__crypto__i8__last . H17: 1 >= spark__crypto__i8__first . H18: 1 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [0])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [0], (element(x, [0]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [0])) <= spark__unsigned__u64__last . H25: 0 >= spark__crypto__i8__first . H26: 0 <= spark__crypto__i8__last . H27: 1 >= spark__crypto__i8__first . H28: 1 <= spark__crypto__i8__last . H29: 1 >= spark__crypto__i8__first . H30: 1 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [1], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [0]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [1], bit__xor(element(update(update( x, [0], (element(x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [1]), element(update(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], spark__unsigned__rotate_left_64(element(update(x, [0], (element( x, [0]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1]), r_512_4_0)), [0]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 401 to run-time check associated with statement of line 403: procedure_round_5_6. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 3 >= spark__crypto__i8__first . C4: 3 <= spark__crypto__i8__last . C5: 2 >= spark__crypto__i8__first . C6: 2 <= spark__crypto__i8__last . C7: 2 >= spark__crypto__i8__first . C8: 2 <= spark__crypto__i8__last . For path(s) from assertion of line 401 to run-time check associated with statement of line 404: procedure_round_5_7. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . -> C1: r_512_4_1 >= spark__unsigned__shift_count__first . C2: r_512_4_1 <= spark__unsigned__shift_count__last . C3: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . C4: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 401 to run-time check associated with statement of line 404: procedure_round_5_8. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_4_1 >= spark__unsigned__shift_count__first . H12: r_512_4_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) <= spark__unsigned__u64__last . For path(s) from assertion of line 401 to run-time check associated with statement of line 405: procedure_round_5_9. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_4_1 >= spark__unsigned__shift_count__first . H12: r_512_4_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [2])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [2])) <= spark__unsigned__u64__last . C3: 2 >= spark__crypto__i8__first . C4: 2 <= spark__crypto__i8__last . C5: 3 >= spark__crypto__i8__first . C6: 3 <= spark__crypto__i8__last . C7: 3 >= spark__crypto__i8__first . C8: 3 <= spark__crypto__i8__last . For path(s) from assertion of line 401 to assertion of line 407: procedure_round_5_10. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 3 >= spark__crypto__i8__first . H6: 3 <= spark__crypto__i8__last . H7: 2 >= spark__crypto__i8__first . H8: 2 <= spark__crypto__i8__last . H9: 2 >= spark__crypto__i8__first . H10: 2 <= spark__crypto__i8__last . H11: r_512_4_1 >= spark__unsigned__shift_count__first . H12: r_512_4_1 <= spark__unsigned__shift_count__last . H13: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) >= interfaces__unsigned_64__first . H14: element(update(x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]) <= interfaces__unsigned_64__last . H15: 3 >= spark__crypto__i8__first . H16: 3 <= spark__crypto__i8__last . H17: 3 >= spark__crypto__i8__first . H18: 3 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [2])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [2], (element(x, [2]) + element( x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [2])) <= spark__unsigned__u64__last . H25: 2 >= spark__crypto__i8__first . H26: 2 <= spark__crypto__i8__last . H27: 3 >= spark__crypto__i8__first . H28: 3 <= spark__crypto__i8__last . H29: 3 >= spark__crypto__i8__first . H30: 3 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [3], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [2]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [3], bit__xor(element(update(update( x, [2], (element(x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [3]), element(update(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3], spark__unsigned__rotate_left_64(element(update(x, [2], (element( x, [2]) + element(x, [3])) mod interfaces__unsigned_64__modulus), [3]), r_512_4_1)), [2]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 407 to run-time check associated with statement of line 409: procedure_round_5_11. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 5 >= spark__crypto__i8__first . C4: 5 <= spark__crypto__i8__last . C5: 4 >= spark__crypto__i8__first . C6: 4 <= spark__crypto__i8__last . C7: 4 >= spark__crypto__i8__first . C8: 4 <= spark__crypto__i8__last . For path(s) from assertion of line 407 to run-time check associated with statement of line 410: procedure_round_5_12. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . -> C1: r_512_4_2 >= spark__unsigned__shift_count__first . C2: r_512_4_2 <= spark__unsigned__shift_count__last . C3: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . C4: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 407 to run-time check associated with statement of line 410: procedure_round_5_13. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_4_2 >= spark__unsigned__shift_count__first . H12: r_512_4_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) <= spark__unsigned__u64__last . For path(s) from assertion of line 407 to run-time check associated with statement of line 411: procedure_round_5_14. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_4_2 >= spark__unsigned__shift_count__first . H12: r_512_4_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [4])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [4])) <= spark__unsigned__u64__last . C3: 4 >= spark__crypto__i8__first . C4: 4 <= spark__crypto__i8__last . C5: 5 >= spark__crypto__i8__first . C6: 5 <= spark__crypto__i8__last . C7: 5 >= spark__crypto__i8__first . C8: 5 <= spark__crypto__i8__last . For path(s) from assertion of line 407 to assertion of line 413: procedure_round_5_15. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 5 >= spark__crypto__i8__first . H6: 5 <= spark__crypto__i8__last . H7: 4 >= spark__crypto__i8__first . H8: 4 <= spark__crypto__i8__last . H9: 4 >= spark__crypto__i8__first . H10: 4 <= spark__crypto__i8__last . H11: r_512_4_2 >= spark__unsigned__shift_count__first . H12: r_512_4_2 <= spark__unsigned__shift_count__last . H13: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) >= interfaces__unsigned_64__first . H14: element(update(x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]) <= interfaces__unsigned_64__last . H15: 5 >= spark__crypto__i8__first . H16: 5 <= spark__crypto__i8__last . H17: 5 >= spark__crypto__i8__first . H18: 5 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2) <= spark__unsigned__u64__last . H23: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [4])) >= spark__unsigned__u64__first . H24: bit__xor(element(update(update(x, [4], (element(x, [4]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [4])) <= spark__unsigned__u64__last . H25: 4 >= spark__crypto__i8__first . H26: 4 <= spark__crypto__i8__last . H27: 5 >= spark__crypto__i8__first . H28: 5 <= spark__crypto__i8__last . H29: 5 >= spark__crypto__i8__first . H30: 5 <= spark__crypto__i8__last . -> C1: true . C2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(update(update(update( x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [5], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [4]))), [i___1]) >= spark__unsigned__u64__first) and (element(update(update(update( x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [5], bit__xor(element(update(update( x, [4], (element(x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [5]), element(update(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], spark__unsigned__rotate_left_64(element(update(x, [4], (element( x, [4]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5]), r_512_4_2)), [4]))), [i___1]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 413 to run-time check associated with statement of line 415: procedure_round_5_16. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 6 >= spark__crypto__i8__first . C6: 6 <= spark__crypto__i8__last . C7: 6 >= spark__crypto__i8__first . C8: 6 <= spark__crypto__i8__last . For path(s) from assertion of line 413 to run-time check associated with statement of line 416: procedure_round_5_17. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . -> C1: r_512_4_3 >= spark__unsigned__shift_count__first . C2: r_512_4_3 <= spark__unsigned__shift_count__last . C3: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . C4: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 413 to run-time check associated with statement of line 416: procedure_round_5_18. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_4_3 >= spark__unsigned__shift_count__first . H12: r_512_4_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3) <= interfaces__unsigned_64__last . -> C1: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3) >= spark__unsigned__u64__first . C2: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3) <= spark__unsigned__u64__last . For path(s) from assertion of line 413 to run-time check associated with statement of line 417: procedure_round_5_19. H1: true . H2: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H3: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H4: (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H5: 7 >= spark__crypto__i8__first . H6: 7 <= spark__crypto__i8__last . H7: 6 >= spark__crypto__i8__first . H8: 6 <= spark__crypto__i8__last . H9: 6 >= spark__crypto__i8__first . H10: 6 <= spark__crypto__i8__last . H11: r_512_4_3 >= spark__unsigned__shift_count__first . H12: r_512_4_3 <= spark__unsigned__shift_count__last . H13: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) >= interfaces__unsigned_64__first . H14: element(update(x, [6], (element(x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]) <= interfaces__unsigned_64__last . H15: 7 >= spark__crypto__i8__first . H16: 7 <= spark__crypto__i8__last . H17: 7 >= spark__crypto__i8__first . H18: 7 <= spark__crypto__i8__last . H19: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3) >= interfaces__unsigned_64__first . H20: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3) <= interfaces__unsigned_64__last . H21: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3) >= spark__unsigned__u64__first . H22: spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3) <= spark__unsigned__u64__last . -> C1: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3)), [7]), element(update(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3)), [6])) >= spark__unsigned__u64__first . C2: bit__xor(element(update(update(x, [6], (element(x, [6]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3)), [7]), element(update(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], spark__unsigned__rotate_left_64(element(update(x, [6], (element( x, [6]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7]), r_512_4_3)), [6])) <= spark__unsigned__u64__last . C3: 6 >= spark__crypto__i8__first . C4: 6 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: 7 >= spark__crypto__i8__first . C8: 7 <= spark__crypto__i8__last . For path(s) from assertion of line 413 to finish: procedure_round_5_20. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.vsm0000644000175000017500000000005311712765060031660 0ustar eugeneugenround_8,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_6.siv0000644000175000017500000000710411712513676031662 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_6 For path(s) from start to run-time check associated with statement of line 426: procedure_round_6_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 427: procedure_round_6_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 427: procedure_round_6_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 428: procedure_round_6_4. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 431: procedure_round_6_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 431 to run-time check associated with statement of line 433: procedure_round_6_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 431 to run-time check associated with statement of line 434: procedure_round_6_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 431 to run-time check associated with statement of line 434: procedure_round_6_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 431 to run-time check associated with statement of line 435: procedure_round_6_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 431 to assertion of line 437: procedure_round_6_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 437 to run-time check associated with statement of line 439: procedure_round_6_11. *** true . /* all conclusions proved */ For path(s) from assertion of line 437 to run-time check associated with statement of line 440: procedure_round_6_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 437 to run-time check associated with statement of line 440: procedure_round_6_13. *** true . /* all conclusions proved */ For path(s) from assertion of line 437 to run-time check associated with statement of line 441: procedure_round_6_14. *** true . /* all conclusions proved */ For path(s) from assertion of line 437 to assertion of line 443: procedure_round_6_15. *** true . /* all conclusions proved */ For path(s) from assertion of line 443 to run-time check associated with statement of line 445: procedure_round_6_16. *** true . /* all conclusions proved */ For path(s) from assertion of line 443 to run-time check associated with statement of line 446: procedure_round_6_17. *** true . /* all conclusions proved */ For path(s) from assertion of line 443 to run-time check associated with statement of line 446: procedure_round_6_18. *** true . /* all conclusions proved */ For path(s) from assertion of line 443 to run-time check associated with statement of line 447: procedure_round_6_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 443 to finish: procedure_round_6_20. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_1.fdl0000644000175000017500000000577111712513676031631 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block.Round_1} title procedure round_1; function round__(real) : integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__i8 = integer; type spark__crypto__word_count_t = integer; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const r_512_0_0 : integer = pending; const r_512_0_1 : integer = pending; const r_512_0_2 : integer = pending; const r_512_0_3 : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var x : spark__crypto__u64_seq; function spark__unsigned__rotate_left_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.vctspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000000000011712513676032520 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_3.log0000644000175000017500000000202611712513676031635 0ustar eugeneugenSPARK Simplifier Pro Edition Reading round_3.fdl (for inherited FDL type declarations) Processing round_3.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Automatic simplification completed. Simplified output sent to round_3.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_7.rls0000644000175000017500000000622211712513676031662 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Round_7*/ rule_family round_7_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. round_7_rules(1): r_512_6_0 may_be_replaced_by 25. round_7_rules(2): r_512_6_1 may_be_replaced_by 29. round_7_rules(3): r_512_6_2 may_be_replaced_by 39. round_7_rules(4): r_512_6_3 may_be_replaced_by 43. round_7_rules(5): integer__size >= 0 may_be_deduced. round_7_rules(6): integer__first may_be_replaced_by -2147483648. round_7_rules(7): integer__last may_be_replaced_by 2147483647. round_7_rules(8): integer__base__first may_be_replaced_by -2147483648. round_7_rules(9): integer__base__last may_be_replaced_by 2147483647. round_7_rules(10): interfaces__unsigned_64__size >= 0 may_be_deduced. round_7_rules(11): interfaces__unsigned_64__size may_be_replaced_by 64. round_7_rules(12): interfaces__unsigned_64__first may_be_replaced_by 0. round_7_rules(13): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. round_7_rules(14): interfaces__unsigned_64__base__first may_be_replaced_by 0. round_7_rules(15): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. round_7_rules(16): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. round_7_rules(17): spark__unsigned__u64__size >= 0 may_be_deduced. round_7_rules(18): spark__unsigned__u64__first may_be_replaced_by 0. round_7_rules(19): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. round_7_rules(20): spark__unsigned__u64__base__first may_be_replaced_by 0. round_7_rules(21): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. round_7_rules(22): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. round_7_rules(23): spark__unsigned__shift_count__size >= 0 may_be_deduced. round_7_rules(24): spark__unsigned__shift_count__first may_be_replaced_by 0. round_7_rules(25): spark__unsigned__shift_count__last may_be_replaced_by 64. round_7_rules(26): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. round_7_rules(27): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. round_7_rules(28): spark__crypto__i8__size >= 0 may_be_deduced. round_7_rules(29): spark__crypto__i8__first may_be_replaced_by 0. round_7_rules(30): spark__crypto__i8__last may_be_replaced_by 7. round_7_rules(31): spark__crypto__i8__base__first may_be_replaced_by -2147483648. round_7_rules(32): spark__crypto__i8__base__last may_be_replaced_by 2147483647. round_7_rules(33): spark__crypto__word_count_t__size >= 0 may_be_deduced. round_7_rules(34): spark__crypto__word_count_t__first may_be_replaced_by 0. round_7_rules(35): spark__crypto__word_count_t__last may_be_replaced_by 268435455. round_7_rules(36): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. round_7_rules(37): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_injection.rlsspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/do_first_key_i0000644000175000017500000000675511712513676032512 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block.Do_First_Key_Injection*/ rule_family do_first_key_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. do_first_key_rules(1): wcnt may_be_replaced_by 8. do_first_key_rules(2): integer__size >= 0 may_be_deduced. do_first_key_rules(3): integer__first may_be_replaced_by -2147483648. do_first_key_rules(4): integer__last may_be_replaced_by 2147483647. do_first_key_rules(5): integer__base__first may_be_replaced_by -2147483648. do_first_key_rules(6): integer__base__last may_be_replaced_by 2147483647. do_first_key_rules(7): interfaces__unsigned_64__size >= 0 may_be_deduced. do_first_key_rules(8): interfaces__unsigned_64__size may_be_replaced_by 64. do_first_key_rules(9): interfaces__unsigned_64__first may_be_replaced_by 0. do_first_key_rules(10): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. do_first_key_rules(11): interfaces__unsigned_64__base__first may_be_replaced_by 0. do_first_key_rules(12): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. do_first_key_rules(13): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. do_first_key_rules(14): spark__unsigned__u64__size >= 0 may_be_deduced. do_first_key_rules(15): spark__unsigned__u64__first may_be_replaced_by 0. do_first_key_rules(16): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. do_first_key_rules(17): spark__unsigned__u64__base__first may_be_replaced_by 0. do_first_key_rules(18): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. do_first_key_rules(19): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. do_first_key_rules(20): spark__crypto__i3__size >= 0 may_be_deduced. do_first_key_rules(21): spark__crypto__i3__first may_be_replaced_by 0. do_first_key_rules(22): spark__crypto__i3__last may_be_replaced_by 2. do_first_key_rules(23): spark__crypto__i3__base__first may_be_replaced_by -2147483648. do_first_key_rules(24): spark__crypto__i3__base__last may_be_replaced_by 2147483647. do_first_key_rules(25): spark__crypto__i8__size >= 0 may_be_deduced. do_first_key_rules(26): spark__crypto__i8__first may_be_replaced_by 0. do_first_key_rules(27): spark__crypto__i8__last may_be_replaced_by 7. do_first_key_rules(28): spark__crypto__i8__base__first may_be_replaced_by -2147483648. do_first_key_rules(29): spark__crypto__i8__base__last may_be_replaced_by 2147483647. do_first_key_rules(30): spark__crypto__i9__size >= 0 may_be_deduced. do_first_key_rules(31): spark__crypto__i9__first may_be_replaced_by 0. do_first_key_rules(32): spark__crypto__i9__last may_be_replaced_by 8. do_first_key_rules(33): spark__crypto__i9__base__first may_be_replaced_by -2147483648. do_first_key_rules(34): spark__crypto__i9__base__last may_be_replaced_by 2147483647. do_first_key_rules(35): spark__crypto__word_count_t__size >= 0 may_be_deduced. do_first_key_rules(36): spark__crypto__word_count_t__first may_be_replaced_by 0. do_first_key_rules(37): spark__crypto__word_count_t__last may_be_replaced_by 268435455. do_first_key_rules(38): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. do_first_key_rules(39): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.vsm0000644000175000017500000000005611712765060032431 0ustar eugeneugeninject_key,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context.vcgspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/update_context0000644000175000017500000002743211712513676032542 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block.Update_Context For path(s) from start to run-time check associated with statement of line 608: procedure_update_context_1. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H21: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( w, [i___1]) >= spark__unsigned__u64__first) and (element( w, [i___1]) <= spark__unsigned__u64__last))) . H22: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . -> C1: bit__xor(element(x, [7]), element(w, [7])) >= spark__unsigned__u64__first . C2: bit__xor(element(x, [7]), element(w, [7])) <= spark__unsigned__u64__last . C3: 7 >= spark__crypto__i8__first . C4: 7 <= spark__crypto__i8__last . C5: 7 >= spark__crypto__i8__first . C6: 7 <= spark__crypto__i8__last . C7: bit__xor(element(x, [6]), element(w, [6])) >= spark__unsigned__u64__first . C8: bit__xor(element(x, [6]), element(w, [6])) <= spark__unsigned__u64__last . C9: 6 >= spark__crypto__i8__first . C10: 6 <= spark__crypto__i8__last . C11: 6 >= spark__crypto__i8__first . C12: 6 <= spark__crypto__i8__last . C13: bit__xor(element(x, [5]), element(w, [5])) >= spark__unsigned__u64__first . C14: bit__xor(element(x, [5]), element(w, [5])) <= spark__unsigned__u64__last . C15: 5 >= spark__crypto__i8__first . C16: 5 <= spark__crypto__i8__last . C17: 5 >= spark__crypto__i8__first . C18: 5 <= spark__crypto__i8__last . C19: bit__xor(element(x, [4]), element(w, [4])) >= spark__unsigned__u64__first . C20: bit__xor(element(x, [4]), element(w, [4])) <= spark__unsigned__u64__last . C21: 4 >= spark__crypto__i8__first . C22: 4 <= spark__crypto__i8__last . C23: 4 >= spark__crypto__i8__first . C24: 4 <= spark__crypto__i8__last . C25: bit__xor(element(x, [3]), element(w, [3])) >= spark__unsigned__u64__first . C26: bit__xor(element(x, [3]), element(w, [3])) <= spark__unsigned__u64__last . C27: 3 >= spark__crypto__i8__first . C28: 3 <= spark__crypto__i8__last . C29: 3 >= spark__crypto__i8__first . C30: 3 <= spark__crypto__i8__last . C31: bit__xor(element(x, [2]), element(w, [2])) >= spark__unsigned__u64__first . C32: bit__xor(element(x, [2]), element(w, [2])) <= spark__unsigned__u64__last . C33: 2 >= spark__crypto__i8__first . C34: 2 <= spark__crypto__i8__last . C35: 2 >= spark__crypto__i8__first . C36: 2 <= spark__crypto__i8__last . C37: bit__xor(element(x, [1]), element(w, [1])) >= spark__unsigned__u64__first . C38: bit__xor(element(x, [1]), element(w, [1])) <= spark__unsigned__u64__last . C39: 1 >= spark__crypto__i8__first . C40: 1 <= spark__crypto__i8__last . C41: 1 >= spark__crypto__i8__first . C42: 1 <= spark__crypto__i8__last . C43: bit__xor(element(x, [0]), element(w, [0])) >= spark__unsigned__u64__first . C44: bit__xor(element(x, [0]), element(w, [0])) <= spark__unsigned__u64__last . C45: 0 >= spark__crypto__i8__first . C46: 0 <= spark__crypto__i8__last . C47: 0 >= spark__crypto__i8__first . C48: 0 <= spark__crypto__i8__last . For path(s) from start to finish: procedure_update_context_2. H1: true . H2: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H3: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H4: fld_byte_count(fld_h(ctx)) >= natural__first . H5: fld_byte_count(fld_h(ctx)) <= natural__last . H6: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H8: true . H9: true . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H11: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H12: true . H13: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H15: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H17: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H19: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H21: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( w, [i___1]) >= spark__unsigned__u64__first) and (element( w, [i___1]) <= spark__unsigned__u64__last))) . H22: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H23: bit__xor(element(x, [7]), element(w, [7])) >= spark__unsigned__u64__first . H24: bit__xor(element(x, [7]), element(w, [7])) <= spark__unsigned__u64__last . H25: 7 >= spark__crypto__i8__first . H26: 7 <= spark__crypto__i8__last . H27: 7 >= spark__crypto__i8__first . H28: 7 <= spark__crypto__i8__last . H29: bit__xor(element(x, [6]), element(w, [6])) >= spark__unsigned__u64__first . H30: bit__xor(element(x, [6]), element(w, [6])) <= spark__unsigned__u64__last . H31: 6 >= spark__crypto__i8__first . H32: 6 <= spark__crypto__i8__last . H33: 6 >= spark__crypto__i8__first . H34: 6 <= spark__crypto__i8__last . H35: bit__xor(element(x, [5]), element(w, [5])) >= spark__unsigned__u64__first . H36: bit__xor(element(x, [5]), element(w, [5])) <= spark__unsigned__u64__last . H37: 5 >= spark__crypto__i8__first . H38: 5 <= spark__crypto__i8__last . H39: 5 >= spark__crypto__i8__first . H40: 5 <= spark__crypto__i8__last . H41: bit__xor(element(x, [4]), element(w, [4])) >= spark__unsigned__u64__first . H42: bit__xor(element(x, [4]), element(w, [4])) <= spark__unsigned__u64__last . H43: 4 >= spark__crypto__i8__first . H44: 4 <= spark__crypto__i8__last . H45: 4 >= spark__crypto__i8__first . H46: 4 <= spark__crypto__i8__last . H47: bit__xor(element(x, [3]), element(w, [3])) >= spark__unsigned__u64__first . H48: bit__xor(element(x, [3]), element(w, [3])) <= spark__unsigned__u64__last . H49: 3 >= spark__crypto__i8__first . H50: 3 <= spark__crypto__i8__last . H51: 3 >= spark__crypto__i8__first . H52: 3 <= spark__crypto__i8__last . H53: bit__xor(element(x, [2]), element(w, [2])) >= spark__unsigned__u64__first . H54: bit__xor(element(x, [2]), element(w, [2])) <= spark__unsigned__u64__last . H55: 2 >= spark__crypto__i8__first . H56: 2 <= spark__crypto__i8__last . H57: 2 >= spark__crypto__i8__first . H58: 2 <= spark__crypto__i8__last . H59: bit__xor(element(x, [1]), element(w, [1])) >= spark__unsigned__u64__first . H60: bit__xor(element(x, [1]), element(w, [1])) <= spark__unsigned__u64__last . H61: 1 >= spark__crypto__i8__first . H62: 1 <= spark__crypto__i8__last . H63: 1 >= spark__crypto__i8__first . H64: 1 <= spark__crypto__i8__last . H65: bit__xor(element(x, [0]), element(w, [0])) >= spark__unsigned__u64__first . H66: bit__xor(element(x, [0]), element(w, [0])) <= spark__unsigned__u64__last . H67: 0 >= spark__crypto__i8__first . H68: 0 <= spark__crypto__i8__last . H69: 0 >= spark__crypto__i8__first . H70: 0 <= spark__crypto__i8__last . -> C1: fld_hash_bit_len(fld_h(upf_x(ctx, mk__spark__crypto__u64_seq([0] := bit__xor(element( x, [0]), element(w, [0])), [1] := bit__xor(element( x, [1]), element(w, [1])), [2] := bit__xor(element( x, [2]), element(w, [2])), [3] := bit__xor(element( x, [3]), element(w, [3])), [4] := bit__xor(element( x, [4]), element(w, [4])), [5] := bit__xor(element( x, [5]), element(w, [5])), [6] := bit__xor(element( x, [6]), element(w, [6])), [7] := bit__xor(element( x, [7]), element(w, [7])))))) = fld_hash_bit_len(fld_h( ctx)) . C2: fld_byte_count(fld_h(upf_x(ctx, mk__spark__crypto__u64_seq([0] := bit__xor(element( x, [0]), element(w, [0])), [1] := bit__xor(element( x, [1]), element(w, [1])), [2] := bit__xor(element( x, [2]), element(w, [2])), [3] := bit__xor(element( x, [3]), element(w, [3])), [4] := bit__xor(element( x, [4]), element(w, [4])), [5] := bit__xor(element( x, [5]), element(w, [5])), [6] := bit__xor(element( x, [6]), element(w, [6])), [7] := bit__xor(element( x, [7]), element(w, [7])))))) = fld_byte_count(fld_h( ctx)) . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_5.log0000644000175000017500000000202611712513676031637 0ustar eugeneugenSPARK Simplifier Pro Edition Reading round_5.fdl (for inherited FDL type declarations) Processing round_5.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Automatic simplification completed. Simplified output sent to round_5.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.log0000644000175000017500000000076211712513676032415 0ustar eugeneugenSPARK Simplifier Pro Edition Reading inject_key.fdl (for inherited FDL type declarations) Processing inject_key.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Automatic simplification completed. Simplified output sent to inject_key.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/round_8.slg0000644000175000017500000025143411712513676031657 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Round_8 @@@@@@@@@@ VC: procedure_round_8_1. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= 0 New C2: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_2. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [1]) <= interfaces__unsigned_64__last *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_8_rules(1). This was achieved by replacing all occurrences of r_512_7_0 by: 8. New C1: 8 >= spark__unsigned__shift_count__first New C2: 8 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_8_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_8_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_8_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [1]) >= 0 -S- Applied substitution rule round_8_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [1]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [1]) >= 0 using hypothesis H2. *** Proved C4: element(x, [1]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_3. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) <= spark__unsigned__u64__last -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(1). This was achieved by replacing all occurrences of r_512_7_0 by: 8. New H11: 8 >= spark__unsigned__shift_count__first New H12: 8 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [1]), 8) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [1]), 8) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [1]), 8) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [1]), 8) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [1]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [1]), 8) >= 0 -S- Applied substitution rule round_8_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [1]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [1]), 8) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 >= 0 New H4: (element(x, [6]) + element(x, [1])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_8_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_8_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [1]), 8) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [1]), 8) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_4. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 6 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 6 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 1 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 1 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 1 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_5. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [1]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [1]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [6], (element(x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [6], (element(x, [6]) + element( x, [1])) mod interfaces__unsigned_64__modulus), [1], bit__xor( spark__unsigned__rotate_left_64(element(x, [1]), r_512_7_0), (element( x, [6]) + element(x, [1])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= 0 New C2: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_7. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [7]) <= interfaces__unsigned_64__last *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_8_rules(2). This was achieved by replacing all occurrences of r_512_7_1 by: 35. New C1: 35 >= spark__unsigned__shift_count__first New C2: 35 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_8_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_8_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_8_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [7]) >= 0 -S- Applied substitution rule round_8_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [7]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [7]) >= 0 using hypothesis H2. *** Proved C4: element(x, [7]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) <= spark__unsigned__u64__last -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(2). This was achieved by replacing all occurrences of r_512_7_1 by: 35. New H11: 35 >= spark__unsigned__shift_count__first New H12: 35 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [7]), 35) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [7]), 35) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [7]), 35) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [7]), 35) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [7]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [7]), 35) >= 0 -S- Applied substitution rule round_8_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [7]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [7]), 35) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 >= 0 New H4: (element(x, [0]) + element(x, [7])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_8_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_8_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [7]), 35) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [7]), 35) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 0 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 0 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 7 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 7 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 7 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [7]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [7]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [0], (element(x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [0], (element(x, [0]) + element( x, [7])) mod interfaces__unsigned_64__modulus), [7], bit__xor( spark__unsigned__rotate_left_64(element(x, [7]), r_512_7_1), (element( x, [0]) + element(x, [7])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= 0 New C2: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_12. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [5]) <= interfaces__unsigned_64__last *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_8_rules(3). This was achieved by replacing all occurrences of r_512_7_2 by: 56. New C1: 56 >= spark__unsigned__shift_count__first New C2: 56 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_8_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_8_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_8_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [5]) >= 0 -S- Applied substitution rule round_8_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [5]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [5]) >= 0 using hypothesis H2. *** Proved C4: element(x, [5]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) <= spark__unsigned__u64__last -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(3). This was achieved by replacing all occurrences of r_512_7_2 by: 56. New H11: 56 >= spark__unsigned__shift_count__first New H12: 56 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [5]), 56) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [5]), 56) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [5]), 56) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [5]), 56) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [5]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [5]), 56) >= 0 -S- Applied substitution rule round_8_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [5]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [5]), 56) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 >= 0 New H4: (element(x, [2]) + element(x, [5])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_8_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_8_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [5]), 56) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [5]), 56) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_14. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 2 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 2 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 5 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 5 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 5 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_15. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [5]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [5]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2) <= spark__unsigned__u64__last %%% Simplified H23 on reading formula in, to give: %%% H23: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified H24 on reading formula in, to give: %%% H24: bit__xor(spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last --- Hypothesis H25 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H26 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H27 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H28 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H29 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . %%% Simplified C2 on reading formula in, to give: %%% C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) *** Proved C1: true *** Proved C2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(update(update(x, [2], (element(x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) and element(update(update(x, [2], (element(x, [2]) + element( x, [5])) mod interfaces__unsigned_64__modulus), [5], bit__xor( spark__unsigned__rotate_left_64(element(x, [5]), r_512_7_2), (element( x, [2]) + element(x, [5])) mod interfaces__unsigned_64__modulus)), [ i___1]) <= spark__unsigned__u64__last) using hypotheses H2, H3, H4, H5, H6, H7, H8, H23 & H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_16. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New C1: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New C3: true New C5: true New C7: true -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C4: true New C6: true New C8: true *** Proved C3: true *** Proved C5: true *** Proved C7: true *** Proved C4: true *** Proved C6: true *** Proved C8: true -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= 0 New C2: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** Proved C1: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= 0 *** Proved C2: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_17. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified C3 on reading formula in, to give: %%% C3: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(x, [3]) <= interfaces__unsigned_64__last *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_8_rules(4). This was achieved by replacing all occurrences of r_512_7_3 by: 22. New C1: 22 >= spark__unsigned__shift_count__first New C2: 22 <= spark__unsigned__shift_count__last -S- Applied substitution rule round_8_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule round_8_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule round_8_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(x, [3]) >= 0 -S- Applied substitution rule round_8_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(x, [3]) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= spark__unsigned__u64__first New H4: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H5: true New H7: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C3: element(x, [3]) >= 0 using hypothesis H2. *** Proved C4: element(x, [3]) <= 18446744073709551615 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_18. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) <= spark__unsigned__u64__last -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New C1: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(4). This was achieved by replacing all occurrences of r_512_7_3 by: 22. New H11: 22 >= spark__unsigned__shift_count__first New H12: 22 <= spark__unsigned__shift_count__last New H19: spark__unsigned__rotate_left_64(element(x, [3]), 22) >= interfaces__unsigned_64__first New H20: spark__unsigned__rotate_left_64(element(x, [3]), 22) <= interfaces__unsigned_64__last New C1: spark__unsigned__rotate_left_64(element(x, [3]), 22) >= 0 New C2: spark__unsigned__rotate_left_64(element(x, [3]), 22) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(12). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H13: element(x, [3]) >= 0 New H19: spark__unsigned__rotate_left_64(element(x, [3]), 22) >= 0 -S- Applied substitution rule round_8_rules(13). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H14: element(x, [3]) <= 18446744073709551615 New H20: spark__unsigned__rotate_left_64(element(x, [3]), 22) <= 18446744073709551615 -S- Applied substitution rule round_8_rules(16). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H3: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 >= 0 New H4: (element(x, [4]) + element(x, [3])) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule round_8_rules(24). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H11: true -S- Applied substitution rule round_8_rules(25). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H12: true -S- Applied substitution rule round_8_rules(29). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [ i___1]) <= 18446744073709551615) New H5: true New H7: true -S- Applied substitution rule round_8_rules(30). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H6: true New H8: true New H2: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) *** Proved C1: spark__unsigned__rotate_left_64(element(x, [3]), 22) >= 0 using hypothesis H19. *** Proved C2: spark__unsigned__rotate_left_64(element(x, [3]), 22) <= 18446744073709551615 using hypothesis H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_19. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H9 has been replaced by "true". (It is already present, as H7). --- Hypothesis H10 has been replaced by "true". (It is already present, as H8) . %%% Simplified H13 on reading formula in, to give: %%% H13: element(x, [3]) >= interfaces__unsigned_64__first %%% Simplified H14 on reading formula in, to give: %%% H14: element(x, [3]) <= interfaces__unsigned_64__last --- Hypothesis H15 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H16 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H18 has been replaced by "true". (It is already present, as H6) . %%% Simplified H19 on reading formula in, to give: %%% H19: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) >= interfaces__unsigned_64__first %%% Simplified H20 on reading formula in, to give: %%% H20: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) <= interfaces__unsigned_64__last %%% Simplified H21 on reading formula in, to give: %%% H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) >= spark__unsigned__u64__first %%% Simplified H22 on reading formula in, to give: %%% H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= spark__unsigned__u64__last *** Proved C3: 4 >= spark__crypto__i8__first using hypothesis H7. *** Proved C4: 4 <= spark__crypto__i8__last using hypothesis H8. *** Proved C5: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C6: 3 <= spark__crypto__i8__last using hypothesis H6. *** Proved C7: 3 >= spark__crypto__i8__first using hypothesis H5. *** Proved C8: 3 <= spark__crypto__i8__last using hypothesis H6. -S- Applied substitution rule round_8_rules(18). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= spark__unsigned__u64__last) New H3: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus >= 0 New H21: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) >= 0 New C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 -S- Applied substitution rule round_8_rules(19). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H4: (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New H22: spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3) <= 18446744073709551615 New H2: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x, [i___1]) and element(x, [i___1]) <= 18446744073709551615) New C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 *** Proved C1: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) >= 0 using hypotheses H3 & H21. *** Proved C2: bit__xor(spark__unsigned__rotate_left_64(element(x, [3]), r_512_7_3), (element(x, [4]) + element(x, [3])) mod interfaces__unsigned_64__modulus) <= 18446744073709551615 using hypotheses H3, H4, H21 & H22. *** PROVED VC. @@@@@@@@@@ VC: procedure_round_8_20. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block/inject_key.siv0000644000175000017500000000300311712513676032424 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block.Inject_Key For path(s) from start to default assertion of line 260: procedure_inject_key_1. *** true . /* all conclusions proved */ For path(s) from default assertion of line 260 to default assertion of line 260: procedure_inject_key_2. *** true . /* all conclusions proved */ For path(s) from default assertion of line 260 to run-time check associated with statement of line 261: procedure_inject_key_3. *** true . /* all conclusions proved */ For path(s) from default assertion of line 260 to run-time check associated with statement of line 264: procedure_inject_key_4. *** true . /* all conclusions proved */ For path(s) from default assertion of line 260 to run-time check associated with statement of line 265: procedure_inject_key_5. *** true . /* all conclusions proved */ For path(s) from default assertion of line 260 to run-time check associated with statement of line 266: procedure_inject_key_6. *** true . /* all conclusions proved */ For path(s) from default assertion of line 260 to finish: procedure_inject_key_7. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.vcg0000644000175000017500000001575211712513676030124 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_Start_New_Type For path(s) from start to run-time check associated with statement of line 211: procedure_skein_start_new_type_1. H1: true . H2: field_type >= spark__unsigned__u6__first . H3: field_type <= spark__unsigned__u6__last . H4: true . H5: true . H6: fld_byte_count(ctx) >= natural__first . H7: fld_byte_count(ctx) <= natural__last . H8: fld_hash_bit_len(ctx) >= hash_bit_length__first . H9: fld_hash_bit_len(ctx) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(ctx)) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(ctx)) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(ctx)) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(ctx)) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(ctx)) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(ctx)) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(ctx)) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(ctx)) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(ctx)) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(ctx)) <= spark__unsigned__u64__last . -> C1: field_type >= spark__unsigned__u6__first . C2: field_type <= spark__unsigned__u6__last . C3: 0 >= spark__unsigned__u7__first . C4: 0 <= spark__unsigned__u7__last . C5: 0 >= spark__unsigned__u16__first . C6: 0 <= spark__unsigned__u16__last . C7: 0 >= spark__unsigned__u32__first . C8: 0 <= spark__unsigned__u32__last . C9: 0 >= spark__unsigned__u64__first . C10: 0 <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 219: procedure_skein_start_new_type_2. H1: true . H2: field_type >= spark__unsigned__u6__first . H3: field_type <= spark__unsigned__u6__last . H4: true . H5: true . H6: fld_byte_count(ctx) >= natural__first . H7: fld_byte_count(ctx) <= natural__last . H8: fld_hash_bit_len(ctx) >= hash_bit_length__first . H9: fld_hash_bit_len(ctx) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(ctx)) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(ctx)) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(ctx)) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(ctx)) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(ctx)) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(ctx)) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(ctx)) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(ctx)) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(ctx)) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(ctx)) <= spark__unsigned__u64__last . H23: field_type >= spark__unsigned__u6__first . H24: field_type <= spark__unsigned__u6__last . H25: 0 >= spark__unsigned__u7__first . H26: 0 <= spark__unsigned__u7__last . H27: 0 >= spark__unsigned__u16__first . H28: 0 <= spark__unsigned__u16__last . H29: 0 >= spark__unsigned__u32__first . H30: 0 <= spark__unsigned__u32__last . H31: 0 >= spark__unsigned__u64__first . H32: 0 <= spark__unsigned__u64__last . -> C1: 0 >= natural__first . C2: 0 <= natural__last . For path(s) from start to finish: procedure_skein_start_new_type_3. H1: true . H2: field_type >= spark__unsigned__u6__first . H3: field_type <= spark__unsigned__u6__last . H4: true . H5: true . H6: fld_byte_count(ctx) >= natural__first . H7: fld_byte_count(ctx) <= natural__last . H8: fld_hash_bit_len(ctx) >= hash_bit_length__first . H9: fld_hash_bit_len(ctx) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(ctx)) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(ctx)) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(ctx)) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(ctx)) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(ctx)) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(ctx)) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(ctx)) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(ctx)) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(ctx)) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(ctx)) <= spark__unsigned__u64__last . H23: field_type >= spark__unsigned__u6__first . H24: field_type <= spark__unsigned__u6__last . H25: 0 >= spark__unsigned__u7__first . H26: 0 <= spark__unsigned__u7__last . H27: 0 >= spark__unsigned__u16__first . H28: 0 <= spark__unsigned__u16__last . H29: 0 >= spark__unsigned__u32__first . H30: 0 <= spark__unsigned__u32__last . H31: 0 >= spark__unsigned__u64__first . H32: 0 <= spark__unsigned__u64__last . H33: 0 >= natural__first . H34: 0 <= natural__last . -> C1: upf_byte_count(upf_tweak_words(ctx, mk__tweak_value( byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := field_type, first_block := first_block, final_block := final_block)), 0) = upf_byte_count(upf_tweak_words(ctx, mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := field_type, first_block := first_block, final_block := final_block)), 0) . C2: fld_hash_bit_len(upf_byte_count(upf_tweak_words(ctx, mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := field_type, first_block := first_block, final_block := final_block)), 0)) = fld_hash_bit_len(ctx) . C3: fld_byte_count(upf_byte_count(upf_tweak_words(ctx, mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := field_type, first_block := first_block, final_block := final_block)), 0)) = 0 . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.fdl0000644000175000017500000002545711712513676026526 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Final} title procedure skein_512_final; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type initialized_hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_count = integer; type skein_512_block_bytes_index = integer; type positive_block_512_count_t = integer; type output_byte_count_t = integer; type output_block_count_t = integer; type positive_output_block_count_t = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_c : integer = pending; const skein_block_type_out : spark__unsigned__u6 = pending; const positive_output_block_count_t__base__first : integer = pending; const positive_output_block_count_t__base__last : integer = pending; const output_block_count_t__base__first : integer = pending; const output_block_count_t__base__last : integer = pending; const output_byte_count_t__base__first : integer = pending; const output_byte_count_t__base__last : integer = pending; const positive_block_512_count_t__base__first : integer = pending; const positive_block_512_count_t__base__last : integer = pending; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_block_bytes_count__base__first : integer = pending; const skein_512_block_bytes_count__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const initialized_hash_bit_length__base__first : integer = pending; const initialized_hash_bit_length__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const positive_output_block_count_t__first : integer = pending; const positive_output_block_count_t__last : integer = pending; const positive_output_block_count_t__size : integer = pending; const output_block_count_t__first : integer = pending; const output_block_count_t__last : integer = pending; const output_block_count_t__size : integer = pending; const output_byte_count_t__first : integer = pending; const output_byte_count_t__last : integer = pending; const output_byte_count_t__size : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const result__index__subtype__1__first : integer = pending; const result__index__subtype__1__last : integer = pending; const skein_512_context__size : integer = pending; const positive_block_512_count_t__first : integer = pending; const positive_block_512_count_t__last : integer = pending; const positive_block_512_count_t__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_block_bytes_count__first : integer = pending; const skein_512_block_bytes_count__last : integer = pending; const skein_512_block_bytes_count__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const initialized_hash_bit_length__first : integer = pending; const initialized_hash_bit_length__last : integer = pending; const initialized_hash_bit_length__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ctx : skein_512_context; var result : spark__crypto__byte_seq; var local_ctx : skein_512_context; var blocks_done : integer; var blocks_required : integer; var byte_count : integer; var x : spark__crypto__u64_seq; function hash_bit_len_of(skein_512_context) : integer; function byte_count_of(skein_512_context) : integer; var result__6 : spark__crypto__byte_seq; var local_ctx__5 : skein_512_context; var local_ctx__4 : skein_512_context; var local_ctx__3 : skein_512_context; var local_ctx__2 : skein_512_context; var local_ctx__1 : skein_512_context; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.log0000644000175000017500000000132711712513676030267 0ustar eugeneugenSPARK Simplifier Pro Edition Reading skein_512_process_block.fdl (for inherited FDL type declarations) Reading skein.rlu (for user-defined proof rules) Processing skein_512_process_block.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - 1 conclusion remains unproven Simplified VC: 4 - 3 conclusions remain unproven Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Automatic simplification completed. Simplified output sent to skein_512_process_block.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.vlg0000644000175000017500000000326311712765060026406 0ustar eugeneugen Non-option args: skein_512_init Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=skein_512_init \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.vsm0000644000175000017500000000006511712765060026737 0ustar eugeneugenskein_512_update,0,0,3,3,0,0,0,100.0, 0.0, 0.0, 0.0, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.vcg0000644000175000017500000004071711712513676026366 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* function Skein.Skein_512_Hash For path(s) from start to run-time check associated with statement of line 979: function_skein_512_hash_1. H1: data__index__subtype__1__first = 0 . H2: data__index__subtype__1__last + skein_512_block_bytes_c < natural__last . H3: for_all(i___1: integer, ((i___1 >= data__index__subtype__1__first) and (i___1 <= data__index__subtype__1__last)) -> ((element( data, [i___1]) >= spark__unsigned__byte__first) and (element( data, [i___1]) <= spark__unsigned__byte__last))) . -> C1: 512 >= initialized_hash_bit_length__first . C2: 512 <= initialized_hash_bit_length__last . For path(s) from start to precondition check associated with statement of line 982: function_skein_512_hash_2. H1: data__index__subtype__1__first = 0 . H2: data__index__subtype__1__last + skein_512_block_bytes_c < natural__last . H3: for_all(i___1: integer, ((i___1 >= data__index__subtype__1__first) and (i___1 <= data__index__subtype__1__last)) -> ((element( data, [i___1]) >= spark__unsigned__byte__first) and (element( data, [i___1]) <= spark__unsigned__byte__last))) . H4: 512 >= initialized_hash_bit_length__first . H5: 512 <= initialized_hash_bit_length__last . H6: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H8: fld_hash_bit_len(fld_h(ctx__1)) = 512 . H9: fld_byte_count(fld_h(ctx__1)) = 0 . H10: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H11: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H14: fld_byte_count(fld_h(ctx__1)) >= natural__first . H15: fld_byte_count(fld_h(ctx__1)) <= natural__last . H16: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H17: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H18: true . H19: true . H20: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H21: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H22: true . H23: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H24: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H25: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H26: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H31: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H32: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H33: fld_byte_count(fld_h(ctx__1)) >= natural__first . H34: fld_byte_count(fld_h(ctx__1)) <= natural__last . H35: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H36: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H37: true . H38: true . H39: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H40: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H41: true . H42: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H43: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H44: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H45: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H46: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H47: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H48: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H49: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . C3: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . C4: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . C5: data__index__subtype__1__first = 0 . C6: data__index__subtype__1__last < natural__last . C7: data__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . For path(s) from start to precondition check associated with statement of line 985: function_skein_512_hash_3. H1: data__index__subtype__1__first = 0 . H2: data__index__subtype__1__last + skein_512_block_bytes_c < natural__last . H3: for_all(i___1: integer, ((i___1 >= data__index__subtype__1__first) and (i___1 <= data__index__subtype__1__last)) -> ((element( data, [i___1]) >= spark__unsigned__byte__first) and (element( data, [i___1]) <= spark__unsigned__byte__last))) . H4: 512 >= initialized_hash_bit_length__first . H5: 512 <= initialized_hash_bit_length__last . H6: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H7: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H8: fld_hash_bit_len(fld_h(ctx__1)) = 512 . H9: fld_byte_count(fld_h(ctx__1)) = 0 . H10: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H11: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H14: fld_byte_count(fld_h(ctx__1)) >= natural__first . H15: fld_byte_count(fld_h(ctx__1)) <= natural__last . H16: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H17: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H18: true . H19: true . H20: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H21: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H22: true . H23: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H24: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H25: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H26: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H31: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H32: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H33: fld_byte_count(fld_h(ctx__1)) >= natural__first . H34: fld_byte_count(fld_h(ctx__1)) <= natural__last . H35: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H36: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H37: true . H38: true . H39: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H40: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H41: true . H42: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H43: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H44: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H45: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H46: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H47: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H48: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H49: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H50: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H51: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H52: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H53: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H54: data__index__subtype__1__first = 0 . H55: data__index__subtype__1__last < natural__last . H56: data__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H57: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H58: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H59: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H60: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H61: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H62: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H63: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H64: fld_byte_count(fld_h(ctx__2)) >= natural__first . H65: fld_byte_count(fld_h(ctx__2)) <= natural__last . H66: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H67: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H68: true . H69: true . H70: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H71: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H72: true . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H74: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H75: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H76: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H78: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H80: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H81: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H82: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H83: fld_byte_count(fld_h(ctx__2)) >= natural__first . H84: fld_byte_count(fld_h(ctx__2)) <= natural__last . H85: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . C3: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . C4: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . C5: skein_512_state_bytes_index__first = 0 . C6: (fld_hash_bit_len(fld_h(ctx__2)) + 7) div 8 <= skein_512_state_bytes_index__last + 1 . For path(s) from start to finish: function_skein_512_hash_4. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.vlg0000644000175000017500000000326311712765060026366 0ustar eugeneugen Non-option args: skein_512_hash Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=skein_512_hash \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.rls0000644000175000017500000003021711712513676026547 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Final*/ rule_family skein_512_fi_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. skein_512_fi_rules(1): skein_512_block_bytes_c may_be_replaced_by 64. skein_512_fi_rules(2): skein_block_type_out may_be_replaced_by 63. skein_512_fi_rules(3): integer__size >= 0 may_be_deduced. skein_512_fi_rules(4): integer__first may_be_replaced_by -2147483648. skein_512_fi_rules(5): integer__last may_be_replaced_by 2147483647. skein_512_fi_rules(6): integer__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(7): integer__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(8): natural__size >= 0 may_be_deduced. skein_512_fi_rules(9): natural__first may_be_replaced_by 0. skein_512_fi_rules(10): natural__last may_be_replaced_by 2147483647. skein_512_fi_rules(11): natural__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(12): natural__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(13): interfaces__unsigned_8__size >= 0 may_be_deduced. skein_512_fi_rules(14): interfaces__unsigned_8__size may_be_replaced_by 8. skein_512_fi_rules(15): interfaces__unsigned_8__first may_be_replaced_by 0. skein_512_fi_rules(16): interfaces__unsigned_8__last may_be_replaced_by 255. skein_512_fi_rules(17): interfaces__unsigned_8__base__first may_be_replaced_by 0. skein_512_fi_rules(18): interfaces__unsigned_8__base__last may_be_replaced_by 255. skein_512_fi_rules(19): interfaces__unsigned_8__modulus may_be_replaced_by 256. skein_512_fi_rules(20): interfaces__unsigned_16__size >= 0 may_be_deduced. skein_512_fi_rules(21): interfaces__unsigned_16__size may_be_replaced_by 16. skein_512_fi_rules(22): interfaces__unsigned_16__first may_be_replaced_by 0. skein_512_fi_rules(23): interfaces__unsigned_16__last may_be_replaced_by 65535. skein_512_fi_rules(24): interfaces__unsigned_16__base__first may_be_replaced_by 0. skein_512_fi_rules(25): interfaces__unsigned_16__base__last may_be_replaced_by 65535. skein_512_fi_rules(26): interfaces__unsigned_16__modulus may_be_replaced_by 65536. skein_512_fi_rules(27): interfaces__unsigned_32__size >= 0 may_be_deduced. skein_512_fi_rules(28): interfaces__unsigned_32__size may_be_replaced_by 32. skein_512_fi_rules(29): interfaces__unsigned_32__first may_be_replaced_by 0. skein_512_fi_rules(30): interfaces__unsigned_32__last may_be_replaced_by 4294967295. skein_512_fi_rules(31): interfaces__unsigned_32__base__first may_be_replaced_by 0. skein_512_fi_rules(32): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. skein_512_fi_rules(33): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. skein_512_fi_rules(34): interfaces__unsigned_64__size >= 0 may_be_deduced. skein_512_fi_rules(35): interfaces__unsigned_64__size may_be_replaced_by 64. skein_512_fi_rules(36): interfaces__unsigned_64__first may_be_replaced_by 0. skein_512_fi_rules(37): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. skein_512_fi_rules(38): interfaces__unsigned_64__base__first may_be_replaced_by 0. skein_512_fi_rules(39): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. skein_512_fi_rules(40): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. skein_512_fi_rules(41): spark__unsigned__u6__size >= 0 may_be_deduced. skein_512_fi_rules(42): spark__unsigned__u6__first may_be_replaced_by 0. skein_512_fi_rules(43): spark__unsigned__u6__last may_be_replaced_by 63. skein_512_fi_rules(44): spark__unsigned__u6__base__first may_be_replaced_by 0. skein_512_fi_rules(45): spark__unsigned__u6__base__last may_be_replaced_by 63. skein_512_fi_rules(46): spark__unsigned__u6__modulus may_be_replaced_by 64. skein_512_fi_rules(47): spark__unsigned__u7__size >= 0 may_be_deduced. skein_512_fi_rules(48): spark__unsigned__u7__first may_be_replaced_by 0. skein_512_fi_rules(49): spark__unsigned__u7__last may_be_replaced_by 127. skein_512_fi_rules(50): spark__unsigned__u7__base__first may_be_replaced_by 0. skein_512_fi_rules(51): spark__unsigned__u7__base__last may_be_replaced_by 127. skein_512_fi_rules(52): spark__unsigned__u7__modulus may_be_replaced_by 128. skein_512_fi_rules(53): spark__unsigned__byte__size >= 0 may_be_deduced. skein_512_fi_rules(54): spark__unsigned__byte__first may_be_replaced_by 0. skein_512_fi_rules(55): spark__unsigned__byte__last may_be_replaced_by 255. skein_512_fi_rules(56): spark__unsigned__byte__base__first may_be_replaced_by 0. skein_512_fi_rules(57): spark__unsigned__byte__base__last may_be_replaced_by 255. skein_512_fi_rules(58): spark__unsigned__byte__modulus may_be_replaced_by 256. skein_512_fi_rules(59): spark__unsigned__u16__size >= 0 may_be_deduced. skein_512_fi_rules(60): spark__unsigned__u16__first may_be_replaced_by 0. skein_512_fi_rules(61): spark__unsigned__u16__last may_be_replaced_by 65535. skein_512_fi_rules(62): spark__unsigned__u16__base__first may_be_replaced_by 0. skein_512_fi_rules(63): spark__unsigned__u16__base__last may_be_replaced_by 65535. skein_512_fi_rules(64): spark__unsigned__u16__modulus may_be_replaced_by 65536. skein_512_fi_rules(65): spark__unsigned__u32__size >= 0 may_be_deduced. skein_512_fi_rules(66): spark__unsigned__u32__first may_be_replaced_by 0. skein_512_fi_rules(67): spark__unsigned__u32__last may_be_replaced_by 4294967295. skein_512_fi_rules(68): spark__unsigned__u32__base__first may_be_replaced_by 0. skein_512_fi_rules(69): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. skein_512_fi_rules(70): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. skein_512_fi_rules(71): spark__unsigned__u64__size >= 0 may_be_deduced. skein_512_fi_rules(72): spark__unsigned__u64__first may_be_replaced_by 0. skein_512_fi_rules(73): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. skein_512_fi_rules(74): spark__unsigned__u64__base__first may_be_replaced_by 0. skein_512_fi_rules(75): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. skein_512_fi_rules(76): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. skein_512_fi_rules(77): spark__crypto__word_count_t__size >= 0 may_be_deduced. skein_512_fi_rules(78): spark__crypto__word_count_t__first may_be_replaced_by 0. skein_512_fi_rules(79): spark__crypto__word_count_t__last may_be_replaced_by 268435455. skein_512_fi_rules(80): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(81): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(82): hash_bit_length__size >= 0 may_be_deduced. skein_512_fi_rules(83): hash_bit_length__first may_be_replaced_by 0. skein_512_fi_rules(84): hash_bit_length__last may_be_replaced_by 2147483640. skein_512_fi_rules(85): hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(86): hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(87): initialized_hash_bit_length__size >= 0 may_be_deduced. skein_512_fi_rules(88): initialized_hash_bit_length__first may_be_replaced_by 1. skein_512_fi_rules(89): initialized_hash_bit_length__last may_be_replaced_by 2147483640. skein_512_fi_rules(90): initialized_hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(91): initialized_hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(92): skein_512_state_words_index__size >= 0 may_be_deduced. skein_512_fi_rules(93): skein_512_state_words_index__first may_be_replaced_by 0. skein_512_fi_rules(94): skein_512_state_words_index__last may_be_replaced_by 7. skein_512_fi_rules(95): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(96): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(97): skein_512_block_bytes_count__size >= 0 may_be_deduced. skein_512_fi_rules(98): skein_512_block_bytes_count__first may_be_replaced_by 0. skein_512_fi_rules(99): skein_512_block_bytes_count__last may_be_replaced_by 64. skein_512_fi_rules(100): skein_512_block_bytes_count__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(101): skein_512_block_bytes_count__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(102): skein_512_block_bytes_index__size >= 0 may_be_deduced. skein_512_fi_rules(103): skein_512_block_bytes_index__first may_be_replaced_by 0. skein_512_fi_rules(104): skein_512_block_bytes_index__last may_be_replaced_by 63. skein_512_fi_rules(105): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(106): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(107): positive_block_512_count_t__size >= 0 may_be_deduced. skein_512_fi_rules(108): positive_block_512_count_t__first may_be_replaced_by 1. skein_512_fi_rules(109): positive_block_512_count_t__last may_be_replaced_by 33554431. skein_512_fi_rules(110): positive_block_512_count_t__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(111): positive_block_512_count_t__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(112): skein_512_context__size >= 0 may_be_deduced. skein_512_fi_rules(113): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. skein_512_fi_rules(114): result__index__subtype__1__first >= natural__first may_be_deduced. skein_512_fi_rules(115): result__index__subtype__1__last <= natural__last may_be_deduced. skein_512_fi_rules(116): result__index__subtype__1__first <= result__index__subtype__1__last may_be_deduced. skein_512_fi_rules(117): result__index__subtype__1__last >= natural__first may_be_deduced. skein_512_fi_rules(118): result__index__subtype__1__first <= natural__last may_be_deduced. skein_512_fi_rules(119): tweak_value__size >= 0 may_be_deduced. skein_512_fi_rules(120): tweak_value__size may_be_replaced_by 128. skein_512_fi_rules(121): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. skein_512_fi_rules(122): context_header__size >= 0 may_be_deduced. skein_512_fi_rules(123): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. skein_512_fi_rules(124): output_byte_count_t__size >= 0 may_be_deduced. skein_512_fi_rules(125): output_byte_count_t__first may_be_replaced_by 1. skein_512_fi_rules(126): output_byte_count_t__last may_be_replaced_by 268435455. skein_512_fi_rules(127): output_byte_count_t__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(128): output_byte_count_t__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(129): output_block_count_t__size >= 0 may_be_deduced. skein_512_fi_rules(130): output_block_count_t__first may_be_replaced_by 0. skein_512_fi_rules(131): output_block_count_t__last may_be_replaced_by 4194304. skein_512_fi_rules(132): output_block_count_t__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(133): output_block_count_t__base__last may_be_replaced_by 2147483647. skein_512_fi_rules(134): positive_output_block_count_t__size >= 0 may_be_deduced. skein_512_fi_rules(135): positive_output_block_count_t__first may_be_replaced_by 1. skein_512_fi_rules(136): positive_output_block_count_t__last may_be_replaced_by 4194304. skein_512_fi_rules(137): positive_output_block_count_t__base__first may_be_replaced_by -2147483648. skein_512_fi_rules(138): positive_output_block_count_t__base__last may_be_replaced_by 2147483647. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.slg0000644000175000017500000127443011712513676027032 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Get_64_LSB_First RRS The following user defined rule files have been read: &&& skein.rlu SEM No semantic checks are performed on the rules. @@@@@@@@@@ VC: procedure_get_64_lsb_first_1. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New C1: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_2. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) *** Proved C1: src_offset >= natural__first using hypothesis H9. *** Proved C2: src_offset <= natural__last using hypothesis H10. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_3. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). *** Proved C2: src_offset <= src__index__subtype__1__last using hypothesis H3. *** Proved C4: src_offset + 1 <= src__index__subtype__1__last using hypothesis H6. *** Proved C6: src_offset + 2 <= src__index__subtype__1__last using hypothesis H6. *** Proved C8: src_offset + 3 <= src__index__subtype__1__last using hypothesis H6. *** Proved C10: src_offset + 4 <= src__index__subtype__1__last using hypothesis H6. *** Proved C12: src_offset + 5 <= src__index__subtype__1__last using hypothesis H6. *** Proved C14: src_offset + 6 <= src__index__subtype__1__last using hypothesis H6. *** Proved C16: src_offset + 7 <= src__index__subtype__1__last using hypothesis H6. -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C1: src_offset >= src__index__subtype__1__first using hypotheses H1 & H9. *** Proved C3: src_offset + 1 >= src__index__subtype__1__first using hypotheses H1 & H9. *** Proved C5: src_offset + 2 >= src__index__subtype__1__first using hypotheses H1 & H9. *** Proved C7: src_offset + 3 >= src__index__subtype__1__first using hypotheses H1 & H9. *** Proved C9: src_offset + 4 >= src__index__subtype__1__first using hypotheses H1 & H9. *** Proved C11: src_offset + 5 >= src__index__subtype__1__first using hypotheses H1 & H9. *** Proved C13: src_offset + 6 >= src__index__subtype__1__first using hypotheses H1 & H9. *** Proved C15: src_offset + 7 >= src__index__subtype__1__first using hypotheses H1 & H9. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_4. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified C3 on reading formula in, to give: %%% C3: src_index + 9 >= src__index__subtype__1__first %%% Simplified C4 on reading formula in, to give: %%% C4: src_index + 9 <= src__index__subtype__1__last %%% Simplified C5 on reading formula in, to give: %%% C5: src_index + 10 >= src__index__subtype__1__first %%% Simplified C6 on reading formula in, to give: %%% C6: src_index + 10 <= src__index__subtype__1__last %%% Simplified C7 on reading formula in, to give: %%% C7: src_index + 11 >= src__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: src_index + 11 <= src__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: src_index + 12 >= src__index__subtype__1__first %%% Simplified C10 on reading formula in, to give: %%% C10: src_index + 12 <= src__index__subtype__1__last %%% Simplified C11 on reading formula in, to give: %%% C11: src_index + 13 >= src__index__subtype__1__first %%% Simplified C12 on reading formula in, to give: %%% C12: src_index + 13 <= src__index__subtype__1__last %%% Simplified C13 on reading formula in, to give: %%% C13: src_index + 14 >= src__index__subtype__1__first %%% Simplified C14 on reading formula in, to give: %%% C14: src_index + 14 <= src__index__subtype__1__last %%% Simplified C15 on reading formula in, to give: %%% C15: src_index + 15 >= src__index__subtype__1__first %%% Simplified C16 on reading formula in, to give: %%% C16: src_index + 15 <= src__index__subtype__1__last >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H27 added to this effect. *** Proved C1: src_index + 8 >= src__index__subtype__1__first using hypotheses H6, H11 & H13. *** Proved C3: src_index + 9 >= src__index__subtype__1__first using hypotheses H6, H11 & H13. *** Proved C5: src_index + 10 >= src__index__subtype__1__first using hypotheses H6, H11 & H13. *** Proved C7: src_index + 11 >= src__index__subtype__1__first using hypotheses H6, H11 & H13. *** Proved C9: src_index + 12 >= src__index__subtype__1__first using hypotheses H6, H11 & H13. *** Proved C11: src_index + 13 >= src__index__subtype__1__first using hypotheses H6, H11 & H13. *** Proved C13: src_index + 14 >= src__index__subtype__1__first using hypotheses H6, H11 & H13. *** Proved C15: src_index + 15 >= src__index__subtype__1__first using hypotheses H6, H11 & H13. --- Eliminated hypothesis H26 (true-hypothesis). >>> Using "A->B, A |- B" on H8, given H27, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H27, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H7 (redundant, given H5 & H27). --- Eliminated hypothesis H12 (redundant, given H6 & H24). --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H25 (redundant, given H6 & H11). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H24: src_offset + dst_index * 8 <= 2147483639 New H9: src_offset + dst_index * 8 <= 2147483639 New C2: src_offset + dst_index * 8 + 8 <= src__index__subtype__1__last New C4: src_offset + dst_index * 8 + 9 <= src__index__subtype__1__last New C6: src_offset + dst_index * 8 + 10 <= src__index__subtype__1__last New C8: src_offset + dst_index * 8 + 11 <= src__index__subtype__1__last New C10: src_offset + dst_index * 8 + 12 <= src__index__subtype__1__last New C12: src_offset + dst_index * 8 + 13 <= src__index__subtype__1__last New C14: src_offset + dst_index * 8 + 14 <= src__index__subtype__1__last New C16: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last +++ New H28: integer__size >= 0 +++ New H29: natural__size >= 0 +++ New H30: spark__unsigned__byte__size >= 0 +++ New H31: spark__unsigned__u64__size >= 0 +++ New H32: spark__unsigned__shift_count__size >= 0 +++ New H33: spark__crypto__word_count_t__size >= 0 +++ New H34: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H35: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H36: src__index__subtype__1__first >= 0 +++ New H37: src__index__subtype__1__last >= 0 +++ New H38: src__index__subtype__1__last <= 2147483647 +++ New H39: src__index__subtype__1__first <= 2147483647 +++ New H40: dst__index__subtype__1__first >= 0 +++ New H41: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H42: dst__index__subtype__1__first <= 268435455 @@@@@@@@@@ VC: procedure_get_64_lsb_first_5. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . *** Proved C7: src_offset + 1 >= src__index__subtype__1__first using hypothesis H17. *** Proved C8: src_offset + 1 <= src__index__subtype__1__last using hypothesis H18. *** Proved C13: src_offset >= src__index__subtype__1__first using hypothesis H15. *** Proved C14: src_offset <= src__index__subtype__1__last using hypothesis H3. *** Proved C15: 0 >= dst__index__subtype__1__first using hypothesis H2. -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(src, [src_offset + 1]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(src, [src_offset + 1]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New C5: element(src, [src_offset + 1]) >= 0 New C11: element(src, [src_offset]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New C6: element(src, [src_offset + 1]) <= 18446744073709551615 New C12: element(src, [src_offset]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C9: src_offset >= - 2147483649 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C10: src_offset <= 2147483646 *** Proved C1: true *** Proved C2: true *** Proved C9: src_offset >= - 2147483649 using hypotheses H1 & H15. -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C3: element(src, [src_offset + 1]) >= 0 using hypotheses H8, H17 & H18. *** Proved C4: element(src, [src_offset + 1]) <= 18446744073709551615 using hypotheses H8, H17 & H18. *** Proved C5: element(src, [src_offset + 1]) >= 0 using hypotheses H8, H17 & H18. *** Proved C11: element(src, [src_offset]) >= 0 using hypotheses H3, H8 & H15. *** Proved C6: element(src, [src_offset + 1]) <= 18446744073709551615 using hypotheses H8, H17 & H18. *** Proved C12: element(src, [src_offset]) <= 18446744073709551615 using hypotheses H3, H8 & H15. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H3 (redundant, given H6). --- Eliminated hypothesis H17 (redundant, given H15). --- Eliminated hypothesis H18 (redundant, given H6). --- Eliminated hypothesis H19 (redundant, given H15). --- Eliminated hypothesis H20 (redundant, given H6). --- Eliminated hypothesis H21 (redundant, given H15). --- Eliminated hypothesis H22 (redundant, given H6). --- Eliminated hypothesis H23 (redundant, given H15). --- Eliminated hypothesis H24 (redundant, given H6). --- Eliminated hypothesis H25 (redundant, given H15). --- Eliminated hypothesis H26 (redundant, given H6). --- Eliminated hypothesis H27 (redundant, given H15). --- Eliminated hypothesis H28 (redundant, given H6). --- Eliminated hypothesis H29 (redundant, given H15). *** Proved C16: 0 <= dst__index__subtype__1__last *** Proved C10: src_offset <= 2147483646 using hypothesis H6. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_6. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified C3 on reading formula in, to give: %%% C3: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified C5 on reading formula in, to give: %%% C5: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified C6 on reading formula in, to give: %%% C6: element(src, [src_index + 9]) <= spark__unsigned__u64__last %%% Simplified C7 on reading formula in, to give: %%% C7: src_index + 9 >= src__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: src_index + 9 <= src__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: src_index + 9 >= integer__base__first %%% Simplified C10 on reading formula in, to give: %%% C10: src_index + 9 <= integer__base__last *** Proved C7: src_index + 9 >= src__index__subtype__1__first using hypothesis H29. *** Proved C8: src_index + 9 <= src__index__subtype__1__last using hypothesis H30. *** Proved C13: src_index + 8 >= src__index__subtype__1__first using hypothesis H27. *** Proved C14: src_index + 8 <= src__index__subtype__1__last using hypothesis H28. *** Proved C15: dst_index + 1 >= dst__index__subtype__1__first using hypothesis H2. -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: element(src, [src_index + 9]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: element(src, [src_index + 9]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New C5: element(src, [src_index + 9]) >= 0 New C11: element(src, [src_index + 8]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) New C6: element(src, [src_index + 9]) <= 18446744073709551615 New C12: element(src, [src_index + 8]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C9: src_index >= - 2147483657 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C10: src_index <= 2147483638 *** Proved C1: true *** Proved C2: true >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H43 added to this effect. *** Proved C16: dst_index + 1 <= dst__index__subtype__1__last via its standard form, which is: Std.Fm C16: dst__index__subtype__1__last - dst_index > 0 using hypothesis H43. *** Proved C3: element(src, [src_index + 9]) >= 0 using hypotheses H10, H29 & H30. *** Proved C4: element(src, [src_index + 9]) <= 18446744073709551615 using hypotheses H10, H29 & H30. *** Proved C5: element(src, [src_index + 9]) >= 0 using hypotheses H10, H29 & H30. *** Proved C11: element(src, [src_index + 8]) >= 0 using hypotheses H10, H27 & H28. *** Proved C6: element(src, [src_index + 9]) <= 18446744073709551615 using hypotheses H10, H29 & H30. *** Proved C12: element(src, [src_index + 8]) <= 18446744073709551615 using hypotheses H10, H27 & H28. *** Proved C9: src_index >= - 2147483657 using hypothesis H25. --- Eliminated hypothesis H26 (true-hypothesis). >>> Using "A->B, A |- B" on H8, given H43, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H43, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H7 (redundant, given H5 & H43). --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H28 (redundant, given H30). --- Eliminated hypothesis H29 (redundant, given H27). --- Eliminated hypothesis H30 (redundant, given H32). --- Eliminated hypothesis H31 (redundant, given H27). --- Eliminated hypothesis H32 (redundant, given H34). --- Eliminated hypothesis H33 (redundant, given H27). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H35 (redundant, given H27). --- Eliminated hypothesis H36 (redundant, given H38). --- Eliminated hypothesis H37 (redundant, given H27). --- Eliminated hypothesis H38 (redundant, given H40). --- Eliminated hypothesis H39 (redundant, given H27). --- Eliminated hypothesis H40 (redundant, given H42). --- Eliminated hypothesis H41 (redundant, given H27). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H27: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first New H42: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last New H25: src_offset + dst_index * 8 >= - 8 New H24: src_offset + dst_index * 8 <= 2147483639 New H9: src_offset + dst_index * 8 <= 2147483639 New C10: src_offset + dst_index * 8 <= 2147483638 +++ New H44: integer__size >= 0 +++ New H45: natural__size >= 0 +++ New H46: spark__unsigned__byte__size >= 0 +++ New H47: spark__unsigned__u64__size >= 0 +++ New H48: spark__unsigned__shift_count__size >= 0 +++ New H49: spark__crypto__word_count_t__size >= 0 +++ New H50: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H51: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H52: src__index__subtype__1__first >= 0 +++ New H53: src__index__subtype__1__last >= 0 +++ New H54: src__index__subtype__1__last <= 2147483647 +++ New H55: src__index__subtype__1__first <= 2147483647 +++ New H56: dst__index__subtype__1__first >= 0 +++ New H57: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H58: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset + dst_index * 8 <= 2147483638 using hypotheses H42 & H54. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_7. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H17). --- Hypothesis H38 has been replaced by "true". (It is already present, as H18). --- Hypothesis H43 has been replaced by "true". (It is already present, as H15). --- Hypothesis H44 has been replaced by "true". (It is already present, as H3) . *** Proved C1: 16 >= spark__unsigned__shift_count__first using hypothesis H31. *** Proved C7: src_offset + 2 >= src__index__subtype__1__first using hypothesis H19. *** Proved C8: src_offset + 2 <= src__index__subtype__1__last using hypothesis H20. *** Proved C9: src_offset + 2 >= integer__base__first using hypothesis H39. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H32: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H33: element(src, [src_offset + 1]) >= 0 New H47: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 New C3: element(src, [src_offset + 2]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H34: element(src, [src_offset + 1]) <= 18446744073709551615 New H48: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 New C4: element(src, [src_offset + 2]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H35: element(src, [src_offset + 1]) >= 0 New H41: element(src, [src_offset]) >= 0 New C5: element(src, [src_offset + 2]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H36: element(src, [src_offset + 1]) <= 18446744073709551615 New H42: element(src, [src_offset]) <= 18446744073709551615 New C6: element(src, [src_offset + 2]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H40: src_offset <= 2147483646 New C10: src_offset <= 2147483645 *** Proved C2: true -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H39: src_offset >= - 2147483649 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H31: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C3: element(src, [src_offset + 2]) >= 0 using hypotheses H8, H19 & H20. *** Proved C4: element(src, [src_offset + 2]) <= 18446744073709551615 using hypotheses H8, H19 & H20. *** Proved C5: element(src, [src_offset + 2]) >= 0 using hypotheses H8, H19 & H20. *** Proved C6: element(src, [src_offset + 2]) <= 18446744073709551615 using hypotheses H8, H19 & H20. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H35 (duplicate of H33). --- Eliminated hypothesis H36 (duplicate of H34). --- Eliminated hypothesis H3 (redundant, given H6). --- Eliminated hypothesis H10 (redundant, given H40). --- Eliminated hypothesis H17 (redundant, given H15). --- Eliminated hypothesis H18 (redundant, given H6). --- Eliminated hypothesis H19 (redundant, given H15). --- Eliminated hypothesis H20 (redundant, given H6). --- Eliminated hypothesis H21 (redundant, given H15). --- Eliminated hypothesis H22 (redundant, given H6). --- Eliminated hypothesis H23 (redundant, given H15). --- Eliminated hypothesis H24 (redundant, given H6). --- Eliminated hypothesis H25 (redundant, given H15). --- Eliminated hypothesis H26 (redundant, given H6). --- Eliminated hypothesis H27 (redundant, given H15). --- Eliminated hypothesis H28 (redundant, given H6). --- Eliminated hypothesis H29 (redundant, given H15). --- Eliminated hypothesis H39 (redundant, given H9). --- Eliminated hypothesis H45 (redundant, given H2). +++ New H49: integer__size >= 0 +++ New H50: natural__size >= 0 +++ New H51: spark__unsigned__byte__size >= 0 +++ New H52: spark__unsigned__u64__size >= 0 +++ New H53: spark__unsigned__shift_count__size >= 0 +++ New H54: spark__crypto__word_count_t__size >= 0 +++ New H55: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H56: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H57: src__index__subtype__1__first >= 0 +++ New H58: src__index__subtype__1__last >= 0 +++ New H59: src__index__subtype__1__last <= 2147483647 +++ New H60: src__index__subtype__1__first <= 2147483647 +++ New H61: dst__index__subtype__1__first >= 0 +++ New H62: dst__index__subtype__1__last >= 0 +++ New H63: dst__index__subtype__1__last <= 268435455 +++ New H64: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset <= 2147483645 using hypotheses H6 & H59. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_8. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified H45 on reading formula in, to give: %%% H45: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified H46 on reading formula in, to give: %%% H46: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified H47 on reading formula in, to give: %%% H47: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified H48 on reading formula in, to give: %%% H48: element(src, [src_index + 9]) <= spark__unsigned__u64__last --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). %%% Simplified H51 on reading formula in, to give: %%% H51: src_index + 9 >= integer__base__first %%% Simplified H52 on reading formula in, to give: %%% H52: src_index + 9 <= integer__base__last --- Hypothesis H55 has been replaced by "true". (It is already present, as H27). --- Hypothesis H56 has been replaced by "true". (It is already present, as H28). %%% Simplified H59 on reading formula in, to give: %%% H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= interfaces__unsigned_64__first %%% Simplified H60 on reading formula in, to give: %%% H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= interfaces__unsigned_64__last %%% Simplified C3 on reading formula in, to give: %%% C3: element(src, [src_index + 10]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(src, [src_index + 10]) <= interfaces__unsigned_64__last %%% Simplified C5 on reading formula in, to give: %%% C5: element(src, [src_index + 10]) >= spark__unsigned__u64__first %%% Simplified C6 on reading formula in, to give: %%% C6: element(src, [src_index + 10]) <= spark__unsigned__u64__last %%% Simplified C7 on reading formula in, to give: %%% C7: src_index + 10 >= src__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: src_index + 10 <= src__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: src_index + 10 >= integer__base__first %%% Simplified C10 on reading formula in, to give: %%% C10: src_index + 10 <= integer__base__last *** Proved C1: 16 >= spark__unsigned__shift_count__first using hypothesis H43. *** Proved C7: src_index + 10 >= src__index__subtype__1__first using hypothesis H31. *** Proved C8: src_index + 10 <= src__index__subtype__1__last using hypothesis H32. *** Proved C9: src_index + 10 >= integer__base__first using hypothesis H51. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H44: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H45: element(src, [src_index + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= 0 New C3: element(src, [src_index + 10]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H46: element(src, [src_index + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= 18446744073709551615 New C4: element(src, [src_index + 10]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New H47: element(src, [src_index + 9]) >= 0 New H53: element(src, [src_index + 8]) >= 0 New C5: element(src, [src_index + 10]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H48: element(src, [src_index + 9]) <= 18446744073709551615 New H54: element(src, [src_index + 8]) <= 18446744073709551615 New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) New C6: element(src, [src_index + 10]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H52: src_index <= 2147483638 New C10: src_index <= 2147483637 *** Proved C2: true >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H51: src_index >= - 2147483657 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H43: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H61 added to this effect. *** Proved C3: element(src, [src_index + 10]) >= 0 using hypotheses H10, H31 & H32. *** Proved C4: element(src, [src_index + 10]) <= 18446744073709551615 using hypotheses H10, H31 & H32. *** Proved C5: element(src, [src_index + 10]) >= 0 using hypotheses H10, H31 & H32. *** Proved C6: element(src, [src_index + 10]) <= 18446744073709551615 using hypotheses H10, H31 & H32. --- Eliminated hypothesis H26 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H47 (duplicate of H45). --- Eliminated hypothesis H48 (duplicate of H46). --- Eliminated hypothesis H61 (duplicate of H58). >>> Using "A->B, A |- B" on H8, given H61, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H61, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H24 (redundant, given H52). --- Eliminated hypothesis H28 (redundant, given H30). --- Eliminated hypothesis H29 (redundant, given H27). --- Eliminated hypothesis H30 (redundant, given H32). --- Eliminated hypothesis H31 (redundant, given H27). --- Eliminated hypothesis H32 (redundant, given H34). --- Eliminated hypothesis H33 (redundant, given H27). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H35 (redundant, given H27). --- Eliminated hypothesis H36 (redundant, given H38). --- Eliminated hypothesis H37 (redundant, given H27). --- Eliminated hypothesis H38 (redundant, given H40). --- Eliminated hypothesis H39 (redundant, given H27). --- Eliminated hypothesis H40 (redundant, given H42). --- Eliminated hypothesis H41 (redundant, given H27). --- Eliminated hypothesis H51 (redundant, given H25). --- Eliminated hypothesis H57 (redundant, given H2). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H7: src_offset + dst_index * 8 <= src_offset + dst__index__subtype__1__last * 8 New H27: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first New H42: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last New H45: element(src, [src_offset + dst_index * 8 + 9]) >= 0 New H46: element(src, [src_offset + dst_index * 8 + 9]) <= 18446744073709551615 New H52: src_offset + dst_index * 8 <= 2147483638 New H53: element(src, [src_offset + dst_index * 8 + 8]) >= 0 New H54: element(src, [src_offset + dst_index * 8 + 8]) <= 18446744073709551615 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) >= 0 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) <= 18446744073709551615 New H25: src_offset + dst_index * 8 >= - 8 New H9: src_offset + dst_index * 8 <= 2147483639 New C10: src_offset + dst_index * 8 <= 2147483637 +++ New H62: integer__size >= 0 +++ New H63: natural__size >= 0 +++ New H64: spark__unsigned__byte__size >= 0 +++ New H65: spark__unsigned__u64__size >= 0 +++ New H66: spark__unsigned__shift_count__size >= 0 +++ New H67: spark__crypto__word_count_t__size >= 0 +++ New H68: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H69: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H70: src__index__subtype__1__first >= 0 +++ New H71: src__index__subtype__1__last >= 0 +++ New H72: src__index__subtype__1__last <= 2147483647 +++ New H73: src__index__subtype__1__first <= 2147483647 +++ New H74: dst__index__subtype__1__first >= 0 +++ New H75: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H76: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset + dst_index * 8 <= 2147483637 using hypotheses H42 & H72. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_9. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H17). --- Hypothesis H38 has been replaced by "true". (It is already present, as H18). --- Hypothesis H43 has been replaced by "true". (It is already present, as H15). --- Hypothesis H44 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H55 has been replaced by "true". (It is already present, as H19). --- Hypothesis H56 has been replaced by "true". (It is already present, as H20). *** Proved C1: 24 >= spark__unsigned__shift_count__first using hypothesis H31. *** Proved C7: src_offset + 3 >= src__index__subtype__1__first using hypothesis H21. *** Proved C8: src_offset + 3 <= src__index__subtype__1__last using hypothesis H22. *** Proved C9: src_offset + 3 >= integer__base__first using hypothesis H39. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H32: true New H50: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H33: element(src, [src_offset + 1]) >= 0 New H47: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 New H51: element(src, [src_offset + 2]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) >= 0 New C3: element(src, [src_offset + 3]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H34: element(src, [src_offset + 1]) <= 18446744073709551615 New H48: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 New H52: element(src, [src_offset + 2]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) <= 18446744073709551615 New C4: element(src, [src_offset + 3]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H35: element(src, [src_offset + 1]) >= 0 New H41: element(src, [src_offset]) >= 0 New H53: element(src, [src_offset + 2]) >= 0 New C5: element(src, [src_offset + 3]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H36: element(src, [src_offset + 1]) <= 18446744073709551615 New H42: element(src, [src_offset]) <= 18446744073709551615 New H54: element(src, [src_offset + 2]) <= 18446744073709551615 New C6: element(src, [src_offset + 3]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H40: src_offset <= 2147483646 New H58: src_offset <= 2147483645 New C10: src_offset <= 2147483644 *** Proved C2: true -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H39: src_offset >= - 2147483649 New H57: src_offset >= - 2147483650 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H31: true New H49: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C3: element(src, [src_offset + 3]) >= 0 using hypotheses H8, H21 & H22. *** Proved C4: element(src, [src_offset + 3]) <= 18446744073709551615 using hypotheses H8, H21 & H22. *** Proved C5: element(src, [src_offset + 3]) >= 0 using hypotheses H8, H21 & H22. *** Proved C6: element(src, [src_offset + 3]) <= 18446744073709551615 using hypotheses H8, H21 & H22. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H35 (duplicate of H33). --- Eliminated hypothesis H36 (duplicate of H34). --- Eliminated hypothesis H53 (duplicate of H51). --- Eliminated hypothesis H54 (duplicate of H52). --- Eliminated hypothesis H3 (redundant, given H6). --- Eliminated hypothesis H10 (redundant, given H40). --- Eliminated hypothesis H17 (redundant, given H15). --- Eliminated hypothesis H18 (redundant, given H6). --- Eliminated hypothesis H19 (redundant, given H15). --- Eliminated hypothesis H20 (redundant, given H6). --- Eliminated hypothesis H21 (redundant, given H15). --- Eliminated hypothesis H22 (redundant, given H6). --- Eliminated hypothesis H23 (redundant, given H15). --- Eliminated hypothesis H24 (redundant, given H6). --- Eliminated hypothesis H25 (redundant, given H15). --- Eliminated hypothesis H26 (redundant, given H6). --- Eliminated hypothesis H27 (redundant, given H15). --- Eliminated hypothesis H28 (redundant, given H6). --- Eliminated hypothesis H29 (redundant, given H15). --- Eliminated hypothesis H39 (redundant, given H9). --- Eliminated hypothesis H40 (redundant, given H58). --- Eliminated hypothesis H45 (redundant, given H2). --- Eliminated hypothesis H57 (redundant, given H9). +++ New H61: integer__size >= 0 +++ New H62: natural__size >= 0 +++ New H63: spark__unsigned__byte__size >= 0 +++ New H64: spark__unsigned__u64__size >= 0 +++ New H65: spark__unsigned__shift_count__size >= 0 +++ New H66: spark__crypto__word_count_t__size >= 0 +++ New H67: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H68: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H69: src__index__subtype__1__first >= 0 +++ New H70: src__index__subtype__1__last >= 0 +++ New H71: src__index__subtype__1__last <= 2147483647 +++ New H72: src__index__subtype__1__first <= 2147483647 +++ New H73: dst__index__subtype__1__first >= 0 +++ New H74: dst__index__subtype__1__last >= 0 +++ New H75: dst__index__subtype__1__last <= 268435455 +++ New H76: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset <= 2147483644 using hypotheses H6 & H71. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_10. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified H45 on reading formula in, to give: %%% H45: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified H46 on reading formula in, to give: %%% H46: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified H47 on reading formula in, to give: %%% H47: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified H48 on reading formula in, to give: %%% H48: element(src, [src_index + 9]) <= spark__unsigned__u64__last --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). %%% Simplified H51 on reading formula in, to give: %%% H51: src_index + 9 >= integer__base__first %%% Simplified H52 on reading formula in, to give: %%% H52: src_index + 9 <= integer__base__last --- Hypothesis H55 has been replaced by "true". (It is already present, as H27). --- Hypothesis H56 has been replaced by "true". (It is already present, as H28). %%% Simplified H59 on reading formula in, to give: %%% H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= interfaces__unsigned_64__first %%% Simplified H60 on reading formula in, to give: %%% H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= interfaces__unsigned_64__last %%% Simplified H63 on reading formula in, to give: %%% H63: element(src, [src_index + 10]) >= interfaces__unsigned_64__first %%% Simplified H64 on reading formula in, to give: %%% H64: element(src, [src_index + 10]) <= interfaces__unsigned_64__last %%% Simplified H65 on reading formula in, to give: %%% H65: element(src, [src_index + 10]) >= spark__unsigned__u64__first %%% Simplified H66 on reading formula in, to give: %%% H66: element(src, [src_index + 10]) <= spark__unsigned__u64__last --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). %%% Simplified H69 on reading formula in, to give: %%% H69: src_index + 10 >= integer__base__first %%% Simplified H70 on reading formula in, to give: %%% H70: src_index + 10 <= integer__base__last %%% Simplified H71 on reading formula in, to give: %%% H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= interfaces__unsigned_64__first %%% Simplified H72 on reading formula in, to give: %%% H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= interfaces__unsigned_64__last %%% Simplified C3 on reading formula in, to give: %%% C3: element(src, [src_index + 11]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(src, [src_index + 11]) <= interfaces__unsigned_64__last %%% Simplified C5 on reading formula in, to give: %%% C5: element(src, [src_index + 11]) >= spark__unsigned__u64__first %%% Simplified C6 on reading formula in, to give: %%% C6: element(src, [src_index + 11]) <= spark__unsigned__u64__last %%% Simplified C7 on reading formula in, to give: %%% C7: src_index + 11 >= src__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: src_index + 11 <= src__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: src_index + 11 >= integer__base__first %%% Simplified C10 on reading formula in, to give: %%% C10: src_index + 11 <= integer__base__last *** Proved C1: 24 >= spark__unsigned__shift_count__first using hypothesis H43. *** Proved C7: src_index + 11 >= src__index__subtype__1__first using hypothesis H33. *** Proved C8: src_index + 11 <= src__index__subtype__1__last using hypothesis H34. *** Proved C9: src_index + 11 >= integer__base__first using hypothesis H51. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H44: true New H62: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H45: element(src, [src_index + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= 0 New H63: element(src, [src_index + 10]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= 0 New C3: element(src, [src_index + 11]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H46: element(src, [src_index + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= 18446744073709551615 New H64: element(src, [src_index + 10]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= 18446744073709551615 New C4: element(src, [src_index + 11]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New H47: element(src, [src_index + 9]) >= 0 New H53: element(src, [src_index + 8]) >= 0 New H65: element(src, [src_index + 10]) >= 0 New C5: element(src, [src_index + 11]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H48: element(src, [src_index + 9]) <= 18446744073709551615 New H54: element(src, [src_index + 8]) <= 18446744073709551615 New H66: element(src, [src_index + 10]) <= 18446744073709551615 New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) New C6: element(src, [src_index + 11]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H52: src_index <= 2147483638 New H70: src_index <= 2147483637 New C10: src_index <= 2147483636 *** Proved C2: true >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H51: src_index >= - 2147483657 New H69: src_index >= - 2147483658 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H43: true New H61: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H73 added to this effect. *** Proved C3: element(src, [src_index + 11]) >= 0 using hypotheses H10, H33 & H34. *** Proved C4: element(src, [src_index + 11]) <= 18446744073709551615 using hypotheses H10, H33 & H34. *** Proved C5: element(src, [src_index + 11]) >= 0 using hypotheses H10, H33 & H34. *** Proved C6: element(src, [src_index + 11]) <= 18446744073709551615 using hypotheses H10, H33 & H34. --- Eliminated hypothesis H26 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H47 (duplicate of H45). --- Eliminated hypothesis H48 (duplicate of H46). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H73 (duplicate of H58). >>> Using "A->B, A |- B" on H8, given H73, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H73, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H24 (redundant, given H52). --- Eliminated hypothesis H28 (redundant, given H30). --- Eliminated hypothesis H29 (redundant, given H27). --- Eliminated hypothesis H30 (redundant, given H32). --- Eliminated hypothesis H31 (redundant, given H27). --- Eliminated hypothesis H32 (redundant, given H34). --- Eliminated hypothesis H33 (redundant, given H27). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H35 (redundant, given H27). --- Eliminated hypothesis H36 (redundant, given H38). --- Eliminated hypothesis H37 (redundant, given H27). --- Eliminated hypothesis H38 (redundant, given H40). --- Eliminated hypothesis H39 (redundant, given H27). --- Eliminated hypothesis H40 (redundant, given H42). --- Eliminated hypothesis H41 (redundant, given H27). --- Eliminated hypothesis H51 (redundant, given H25). --- Eliminated hypothesis H52 (redundant, given H70). --- Eliminated hypothesis H57 (redundant, given H2). --- Eliminated hypothesis H69 (redundant, given H25). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H7: src_offset + dst_index * 8 <= src_offset + dst__index__subtype__1__last * 8 New H27: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first New H42: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last New H45: element(src, [src_offset + dst_index * 8 + 9]) >= 0 New H46: element(src, [src_offset + dst_index * 8 + 9]) <= 18446744073709551615 New H53: element(src, [src_offset + dst_index * 8 + 8]) >= 0 New H54: element(src, [src_offset + dst_index * 8 + 8]) <= 18446744073709551615 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) >= 0 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) <= 18446744073709551615 New H63: element(src, [src_offset + dst_index * 8 + 10]) >= 0 New H64: element(src, [src_offset + dst_index * 8 + 10]) <= 18446744073709551615 New H70: src_offset + dst_index * 8 <= 2147483637 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) >= 0 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) <= 18446744073709551615 New H25: src_offset + dst_index * 8 >= - 8 New H9: src_offset + dst_index * 8 <= 2147483639 New C10: src_offset + dst_index * 8 <= 2147483636 +++ New H74: integer__size >= 0 +++ New H75: natural__size >= 0 +++ New H76: spark__unsigned__byte__size >= 0 +++ New H77: spark__unsigned__u64__size >= 0 +++ New H78: spark__unsigned__shift_count__size >= 0 +++ New H79: spark__crypto__word_count_t__size >= 0 +++ New H80: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H81: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H82: src__index__subtype__1__first >= 0 +++ New H83: src__index__subtype__1__last >= 0 +++ New H84: src__index__subtype__1__last <= 2147483647 +++ New H85: src__index__subtype__1__first <= 2147483647 +++ New H86: dst__index__subtype__1__first >= 0 +++ New H87: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H88: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset + dst_index * 8 <= 2147483636 using hypotheses H42 & H84. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_11. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H17). --- Hypothesis H38 has been replaced by "true". (It is already present, as H18). --- Hypothesis H43 has been replaced by "true". (It is already present, as H15). --- Hypothesis H44 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H55 has been replaced by "true". (It is already present, as H19). --- Hypothesis H56 has been replaced by "true". (It is already present, as H20). --- Hypothesis H67 has been replaced by "true". (It is already present, as H21). --- Hypothesis H68 has been replaced by "true". (It is already present, as H22). *** Proved C1: 32 >= spark__unsigned__shift_count__first using hypothesis H31. *** Proved C7: src_offset + 4 >= src__index__subtype__1__first using hypothesis H23. *** Proved C8: src_offset + 4 <= src__index__subtype__1__last using hypothesis H24. *** Proved C9: src_offset + 4 >= integer__base__first using hypothesis H39. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H32: true New H50: true New H62: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H33: element(src, [src_offset + 1]) >= 0 New H47: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 New H51: element(src, [src_offset + 2]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) >= 0 New H63: element(src, [src_offset + 3]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) >= 0 New C3: element(src, [src_offset + 4]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H34: element(src, [src_offset + 1]) <= 18446744073709551615 New H48: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 New H52: element(src, [src_offset + 2]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) <= 18446744073709551615 New H64: element(src, [src_offset + 3]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) <= 18446744073709551615 New C4: element(src, [src_offset + 4]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H35: element(src, [src_offset + 1]) >= 0 New H41: element(src, [src_offset]) >= 0 New H53: element(src, [src_offset + 2]) >= 0 New H65: element(src, [src_offset + 3]) >= 0 New C5: element(src, [src_offset + 4]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H36: element(src, [src_offset + 1]) <= 18446744073709551615 New H42: element(src, [src_offset]) <= 18446744073709551615 New H54: element(src, [src_offset + 2]) <= 18446744073709551615 New H66: element(src, [src_offset + 3]) <= 18446744073709551615 New C6: element(src, [src_offset + 4]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H40: src_offset <= 2147483646 New H58: src_offset <= 2147483645 New H70: src_offset <= 2147483644 New C10: src_offset <= 2147483643 *** Proved C2: true -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H39: src_offset >= - 2147483649 New H57: src_offset >= - 2147483650 New H69: src_offset >= - 2147483651 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H31: true New H49: true New H61: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C3: element(src, [src_offset + 4]) >= 0 using hypotheses H8, H23 & H24. *** Proved C4: element(src, [src_offset + 4]) <= 18446744073709551615 using hypotheses H8, H23 & H24. *** Proved C5: element(src, [src_offset + 4]) >= 0 using hypotheses H8, H23 & H24. *** Proved C6: element(src, [src_offset + 4]) <= 18446744073709551615 using hypotheses H8, H23 & H24. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H35 (duplicate of H33). --- Eliminated hypothesis H36 (duplicate of H34). --- Eliminated hypothesis H53 (duplicate of H51). --- Eliminated hypothesis H54 (duplicate of H52). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H3 (redundant, given H6). --- Eliminated hypothesis H10 (redundant, given H40). --- Eliminated hypothesis H17 (redundant, given H15). --- Eliminated hypothesis H18 (redundant, given H6). --- Eliminated hypothesis H19 (redundant, given H15). --- Eliminated hypothesis H20 (redundant, given H6). --- Eliminated hypothesis H21 (redundant, given H15). --- Eliminated hypothesis H22 (redundant, given H6). --- Eliminated hypothesis H23 (redundant, given H15). --- Eliminated hypothesis H24 (redundant, given H6). --- Eliminated hypothesis H25 (redundant, given H15). --- Eliminated hypothesis H26 (redundant, given H6). --- Eliminated hypothesis H27 (redundant, given H15). --- Eliminated hypothesis H28 (redundant, given H6). --- Eliminated hypothesis H29 (redundant, given H15). --- Eliminated hypothesis H39 (redundant, given H9). --- Eliminated hypothesis H40 (redundant, given H58). --- Eliminated hypothesis H45 (redundant, given H2). --- Eliminated hypothesis H57 (redundant, given H9). --- Eliminated hypothesis H58 (redundant, given H70). --- Eliminated hypothesis H69 (redundant, given H9). +++ New H73: integer__size >= 0 +++ New H74: natural__size >= 0 +++ New H75: spark__unsigned__byte__size >= 0 +++ New H76: spark__unsigned__u64__size >= 0 +++ New H77: spark__unsigned__shift_count__size >= 0 +++ New H78: spark__crypto__word_count_t__size >= 0 +++ New H79: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H80: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H81: src__index__subtype__1__first >= 0 +++ New H82: src__index__subtype__1__last >= 0 +++ New H83: src__index__subtype__1__last <= 2147483647 +++ New H84: src__index__subtype__1__first <= 2147483647 +++ New H85: dst__index__subtype__1__first >= 0 +++ New H86: dst__index__subtype__1__last >= 0 +++ New H87: dst__index__subtype__1__last <= 268435455 +++ New H88: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset <= 2147483643 using hypotheses H6 & H83. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_12. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified H45 on reading formula in, to give: %%% H45: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified H46 on reading formula in, to give: %%% H46: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified H47 on reading formula in, to give: %%% H47: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified H48 on reading formula in, to give: %%% H48: element(src, [src_index + 9]) <= spark__unsigned__u64__last --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). %%% Simplified H51 on reading formula in, to give: %%% H51: src_index + 9 >= integer__base__first %%% Simplified H52 on reading formula in, to give: %%% H52: src_index + 9 <= integer__base__last --- Hypothesis H55 has been replaced by "true". (It is already present, as H27). --- Hypothesis H56 has been replaced by "true". (It is already present, as H28). %%% Simplified H59 on reading formula in, to give: %%% H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= interfaces__unsigned_64__first %%% Simplified H60 on reading formula in, to give: %%% H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= interfaces__unsigned_64__last %%% Simplified H63 on reading formula in, to give: %%% H63: element(src, [src_index + 10]) >= interfaces__unsigned_64__first %%% Simplified H64 on reading formula in, to give: %%% H64: element(src, [src_index + 10]) <= interfaces__unsigned_64__last %%% Simplified H65 on reading formula in, to give: %%% H65: element(src, [src_index + 10]) >= spark__unsigned__u64__first %%% Simplified H66 on reading formula in, to give: %%% H66: element(src, [src_index + 10]) <= spark__unsigned__u64__last --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). %%% Simplified H69 on reading formula in, to give: %%% H69: src_index + 10 >= integer__base__first %%% Simplified H70 on reading formula in, to give: %%% H70: src_index + 10 <= integer__base__last %%% Simplified H71 on reading formula in, to give: %%% H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= interfaces__unsigned_64__first %%% Simplified H72 on reading formula in, to give: %%% H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= interfaces__unsigned_64__last %%% Simplified H75 on reading formula in, to give: %%% H75: element(src, [src_index + 11]) >= interfaces__unsigned_64__first %%% Simplified H76 on reading formula in, to give: %%% H76: element(src, [src_index + 11]) <= interfaces__unsigned_64__last %%% Simplified H77 on reading formula in, to give: %%% H77: element(src, [src_index + 11]) >= spark__unsigned__u64__first %%% Simplified H78 on reading formula in, to give: %%% H78: element(src, [src_index + 11]) <= spark__unsigned__u64__last --- Hypothesis H79 has been replaced by "true". (It is already present, as H33). --- Hypothesis H80 has been replaced by "true". (It is already present, as H34). %%% Simplified H81 on reading formula in, to give: %%% H81: src_index + 11 >= integer__base__first %%% Simplified H82 on reading formula in, to give: %%% H82: src_index + 11 <= integer__base__last %%% Simplified H83 on reading formula in, to give: %%% H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= interfaces__unsigned_64__first %%% Simplified H84 on reading formula in, to give: %%% H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= interfaces__unsigned_64__last %%% Simplified C3 on reading formula in, to give: %%% C3: element(src, [src_index + 12]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(src, [src_index + 12]) <= interfaces__unsigned_64__last %%% Simplified C5 on reading formula in, to give: %%% C5: element(src, [src_index + 12]) >= spark__unsigned__u64__first %%% Simplified C6 on reading formula in, to give: %%% C6: element(src, [src_index + 12]) <= spark__unsigned__u64__last %%% Simplified C7 on reading formula in, to give: %%% C7: src_index + 12 >= src__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: src_index + 12 <= src__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: src_index + 12 >= integer__base__first %%% Simplified C10 on reading formula in, to give: %%% C10: src_index + 12 <= integer__base__last *** Proved C1: 32 >= spark__unsigned__shift_count__first using hypothesis H43. *** Proved C7: src_index + 12 >= src__index__subtype__1__first using hypothesis H35. *** Proved C8: src_index + 12 <= src__index__subtype__1__last using hypothesis H36. *** Proved C9: src_index + 12 >= integer__base__first using hypothesis H51. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H44: true New H62: true New H74: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H45: element(src, [src_index + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= 0 New H63: element(src, [src_index + 10]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= 0 New H75: element(src, [src_index + 11]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= 0 New C3: element(src, [src_index + 12]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H46: element(src, [src_index + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= 18446744073709551615 New H64: element(src, [src_index + 10]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= 18446744073709551615 New H76: element(src, [src_index + 11]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= 18446744073709551615 New C4: element(src, [src_index + 12]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New H47: element(src, [src_index + 9]) >= 0 New H53: element(src, [src_index + 8]) >= 0 New H65: element(src, [src_index + 10]) >= 0 New H77: element(src, [src_index + 11]) >= 0 New C5: element(src, [src_index + 12]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H48: element(src, [src_index + 9]) <= 18446744073709551615 New H54: element(src, [src_index + 8]) <= 18446744073709551615 New H66: element(src, [src_index + 10]) <= 18446744073709551615 New H78: element(src, [src_index + 11]) <= 18446744073709551615 New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) New C6: element(src, [src_index + 12]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H52: src_index <= 2147483638 New H70: src_index <= 2147483637 New H82: src_index <= 2147483636 New C10: src_index <= 2147483635 *** Proved C2: true >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H51: src_index >= - 2147483657 New H69: src_index >= - 2147483658 New H81: src_index >= - 2147483659 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H43: true New H61: true New H73: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H85 added to this effect. *** Proved C3: element(src, [src_index + 12]) >= 0 using hypotheses H10, H35 & H36. *** Proved C4: element(src, [src_index + 12]) <= 18446744073709551615 using hypotheses H10, H35 & H36. *** Proved C5: element(src, [src_index + 12]) >= 0 using hypotheses H10, H35 & H36. *** Proved C6: element(src, [src_index + 12]) <= 18446744073709551615 using hypotheses H10, H35 & H36. --- Eliminated hypothesis H26 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H47 (duplicate of H45). --- Eliminated hypothesis H48 (duplicate of H46). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H85 (duplicate of H58). >>> Using "A->B, A |- B" on H8, given H85, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H85, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H24 (redundant, given H52). --- Eliminated hypothesis H28 (redundant, given H30). --- Eliminated hypothesis H29 (redundant, given H27). --- Eliminated hypothesis H30 (redundant, given H32). --- Eliminated hypothesis H31 (redundant, given H27). --- Eliminated hypothesis H32 (redundant, given H34). --- Eliminated hypothesis H33 (redundant, given H27). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H35 (redundant, given H27). --- Eliminated hypothesis H36 (redundant, given H38). --- Eliminated hypothesis H37 (redundant, given H27). --- Eliminated hypothesis H38 (redundant, given H40). --- Eliminated hypothesis H39 (redundant, given H27). --- Eliminated hypothesis H40 (redundant, given H42). --- Eliminated hypothesis H41 (redundant, given H27). --- Eliminated hypothesis H51 (redundant, given H25). --- Eliminated hypothesis H52 (redundant, given H70). --- Eliminated hypothesis H57 (redundant, given H2). --- Eliminated hypothesis H69 (redundant, given H25). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H25). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H7: src_offset + dst_index * 8 <= src_offset + dst__index__subtype__1__last * 8 New H27: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first New H42: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last New H45: element(src, [src_offset + dst_index * 8 + 9]) >= 0 New H46: element(src, [src_offset + dst_index * 8 + 9]) <= 18446744073709551615 New H53: element(src, [src_offset + dst_index * 8 + 8]) >= 0 New H54: element(src, [src_offset + dst_index * 8 + 8]) <= 18446744073709551615 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) >= 0 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) <= 18446744073709551615 New H63: element(src, [src_offset + dst_index * 8 + 10]) >= 0 New H64: element(src, [src_offset + dst_index * 8 + 10]) <= 18446744073709551615 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) >= 0 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) <= 18446744073709551615 New H75: element(src, [src_offset + dst_index * 8 + 11]) >= 0 New H76: element(src, [src_offset + dst_index * 8 + 11]) <= 18446744073709551615 New H82: src_offset + dst_index * 8 <= 2147483636 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) >= 0 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) <= 18446744073709551615 New H25: src_offset + dst_index * 8 >= - 8 New H9: src_offset + dst_index * 8 <= 2147483639 New C10: src_offset + dst_index * 8 <= 2147483635 +++ New H86: integer__size >= 0 +++ New H87: natural__size >= 0 +++ New H88: spark__unsigned__byte__size >= 0 +++ New H89: spark__unsigned__u64__size >= 0 +++ New H90: spark__unsigned__shift_count__size >= 0 +++ New H91: spark__crypto__word_count_t__size >= 0 +++ New H92: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H93: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H94: src__index__subtype__1__first >= 0 +++ New H95: src__index__subtype__1__last >= 0 +++ New H96: src__index__subtype__1__last <= 2147483647 +++ New H97: src__index__subtype__1__first <= 2147483647 +++ New H98: dst__index__subtype__1__first >= 0 +++ New H99: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H100: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset + dst_index * 8 <= 2147483635 using hypotheses H42 & H96. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_13. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H17). --- Hypothesis H38 has been replaced by "true". (It is already present, as H18). --- Hypothesis H43 has been replaced by "true". (It is already present, as H15). --- Hypothesis H44 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H55 has been replaced by "true". (It is already present, as H19). --- Hypothesis H56 has been replaced by "true". (It is already present, as H20). --- Hypothesis H67 has been replaced by "true". (It is already present, as H21). --- Hypothesis H68 has been replaced by "true". (It is already present, as H22). --- Hypothesis H79 has been replaced by "true". (It is already present, as H23). --- Hypothesis H80 has been replaced by "true". (It is already present, as H24). *** Proved C1: 40 >= spark__unsigned__shift_count__first using hypothesis H31. *** Proved C7: src_offset + 5 >= src__index__subtype__1__first using hypothesis H25. *** Proved C8: src_offset + 5 <= src__index__subtype__1__last using hypothesis H26. *** Proved C9: src_offset + 5 >= integer__base__first using hypothesis H39. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H32: true New H50: true New H62: true New H74: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H33: element(src, [src_offset + 1]) >= 0 New H47: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 New H51: element(src, [src_offset + 2]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) >= 0 New H63: element(src, [src_offset + 3]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) >= 0 New H75: element(src, [src_offset + 4]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) >= 0 New C3: element(src, [src_offset + 5]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H34: element(src, [src_offset + 1]) <= 18446744073709551615 New H48: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 New H52: element(src, [src_offset + 2]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) <= 18446744073709551615 New H64: element(src, [src_offset + 3]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) <= 18446744073709551615 New H76: element(src, [src_offset + 4]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) <= 18446744073709551615 New C4: element(src, [src_offset + 5]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H35: element(src, [src_offset + 1]) >= 0 New H41: element(src, [src_offset]) >= 0 New H53: element(src, [src_offset + 2]) >= 0 New H65: element(src, [src_offset + 3]) >= 0 New H77: element(src, [src_offset + 4]) >= 0 New C5: element(src, [src_offset + 5]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H36: element(src, [src_offset + 1]) <= 18446744073709551615 New H42: element(src, [src_offset]) <= 18446744073709551615 New H54: element(src, [src_offset + 2]) <= 18446744073709551615 New H66: element(src, [src_offset + 3]) <= 18446744073709551615 New H78: element(src, [src_offset + 4]) <= 18446744073709551615 New C6: element(src, [src_offset + 5]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H40: src_offset <= 2147483646 New H58: src_offset <= 2147483645 New H70: src_offset <= 2147483644 New H82: src_offset <= 2147483643 New C10: src_offset <= 2147483642 *** Proved C2: true -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H39: src_offset >= - 2147483649 New H57: src_offset >= - 2147483650 New H69: src_offset >= - 2147483651 New H81: src_offset >= - 2147483652 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H31: true New H49: true New H61: true New H73: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C3: element(src, [src_offset + 5]) >= 0 using hypotheses H8, H25 & H26. *** Proved C4: element(src, [src_offset + 5]) <= 18446744073709551615 using hypotheses H8, H25 & H26. *** Proved C5: element(src, [src_offset + 5]) >= 0 using hypotheses H8, H25 & H26. *** Proved C6: element(src, [src_offset + 5]) <= 18446744073709551615 using hypotheses H8, H25 & H26. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H35 (duplicate of H33). --- Eliminated hypothesis H36 (duplicate of H34). --- Eliminated hypothesis H53 (duplicate of H51). --- Eliminated hypothesis H54 (duplicate of H52). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H3 (redundant, given H6). --- Eliminated hypothesis H10 (redundant, given H40). --- Eliminated hypothesis H17 (redundant, given H15). --- Eliminated hypothesis H18 (redundant, given H6). --- Eliminated hypothesis H19 (redundant, given H15). --- Eliminated hypothesis H20 (redundant, given H6). --- Eliminated hypothesis H21 (redundant, given H15). --- Eliminated hypothesis H22 (redundant, given H6). --- Eliminated hypothesis H23 (redundant, given H15). --- Eliminated hypothesis H24 (redundant, given H6). --- Eliminated hypothesis H25 (redundant, given H15). --- Eliminated hypothesis H26 (redundant, given H6). --- Eliminated hypothesis H27 (redundant, given H15). --- Eliminated hypothesis H28 (redundant, given H6). --- Eliminated hypothesis H29 (redundant, given H15). --- Eliminated hypothesis H39 (redundant, given H9). --- Eliminated hypothesis H40 (redundant, given H58). --- Eliminated hypothesis H45 (redundant, given H2). --- Eliminated hypothesis H57 (redundant, given H9). --- Eliminated hypothesis H58 (redundant, given H70). --- Eliminated hypothesis H69 (redundant, given H9). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H9). +++ New H85: integer__size >= 0 +++ New H86: natural__size >= 0 +++ New H87: spark__unsigned__byte__size >= 0 +++ New H88: spark__unsigned__u64__size >= 0 +++ New H89: spark__unsigned__shift_count__size >= 0 +++ New H90: spark__crypto__word_count_t__size >= 0 +++ New H91: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H92: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H93: src__index__subtype__1__first >= 0 +++ New H94: src__index__subtype__1__last >= 0 +++ New H95: src__index__subtype__1__last <= 2147483647 +++ New H96: src__index__subtype__1__first <= 2147483647 +++ New H97: dst__index__subtype__1__first >= 0 +++ New H98: dst__index__subtype__1__last >= 0 +++ New H99: dst__index__subtype__1__last <= 268435455 +++ New H100: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset <= 2147483642 using hypotheses H6 & H95. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_14. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified H45 on reading formula in, to give: %%% H45: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified H46 on reading formula in, to give: %%% H46: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified H47 on reading formula in, to give: %%% H47: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified H48 on reading formula in, to give: %%% H48: element(src, [src_index + 9]) <= spark__unsigned__u64__last --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). %%% Simplified H51 on reading formula in, to give: %%% H51: src_index + 9 >= integer__base__first %%% Simplified H52 on reading formula in, to give: %%% H52: src_index + 9 <= integer__base__last --- Hypothesis H55 has been replaced by "true". (It is already present, as H27). --- Hypothesis H56 has been replaced by "true". (It is already present, as H28). %%% Simplified H59 on reading formula in, to give: %%% H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= interfaces__unsigned_64__first %%% Simplified H60 on reading formula in, to give: %%% H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= interfaces__unsigned_64__last %%% Simplified H63 on reading formula in, to give: %%% H63: element(src, [src_index + 10]) >= interfaces__unsigned_64__first %%% Simplified H64 on reading formula in, to give: %%% H64: element(src, [src_index + 10]) <= interfaces__unsigned_64__last %%% Simplified H65 on reading formula in, to give: %%% H65: element(src, [src_index + 10]) >= spark__unsigned__u64__first %%% Simplified H66 on reading formula in, to give: %%% H66: element(src, [src_index + 10]) <= spark__unsigned__u64__last --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). %%% Simplified H69 on reading formula in, to give: %%% H69: src_index + 10 >= integer__base__first %%% Simplified H70 on reading formula in, to give: %%% H70: src_index + 10 <= integer__base__last %%% Simplified H71 on reading formula in, to give: %%% H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= interfaces__unsigned_64__first %%% Simplified H72 on reading formula in, to give: %%% H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= interfaces__unsigned_64__last %%% Simplified H75 on reading formula in, to give: %%% H75: element(src, [src_index + 11]) >= interfaces__unsigned_64__first %%% Simplified H76 on reading formula in, to give: %%% H76: element(src, [src_index + 11]) <= interfaces__unsigned_64__last %%% Simplified H77 on reading formula in, to give: %%% H77: element(src, [src_index + 11]) >= spark__unsigned__u64__first %%% Simplified H78 on reading formula in, to give: %%% H78: element(src, [src_index + 11]) <= spark__unsigned__u64__last --- Hypothesis H79 has been replaced by "true". (It is already present, as H33). --- Hypothesis H80 has been replaced by "true". (It is already present, as H34). %%% Simplified H81 on reading formula in, to give: %%% H81: src_index + 11 >= integer__base__first %%% Simplified H82 on reading formula in, to give: %%% H82: src_index + 11 <= integer__base__last %%% Simplified H83 on reading formula in, to give: %%% H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= interfaces__unsigned_64__first %%% Simplified H84 on reading formula in, to give: %%% H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= interfaces__unsigned_64__last %%% Simplified H87 on reading formula in, to give: %%% H87: element(src, [src_index + 12]) >= interfaces__unsigned_64__first %%% Simplified H88 on reading formula in, to give: %%% H88: element(src, [src_index + 12]) <= interfaces__unsigned_64__last %%% Simplified H89 on reading formula in, to give: %%% H89: element(src, [src_index + 12]) >= spark__unsigned__u64__first %%% Simplified H90 on reading formula in, to give: %%% H90: element(src, [src_index + 12]) <= spark__unsigned__u64__last --- Hypothesis H91 has been replaced by "true". (It is already present, as H35). --- Hypothesis H92 has been replaced by "true". (It is already present, as H36). %%% Simplified H93 on reading formula in, to give: %%% H93: src_index + 12 >= integer__base__first %%% Simplified H94 on reading formula in, to give: %%% H94: src_index + 12 <= integer__base__last %%% Simplified H95 on reading formula in, to give: %%% H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= interfaces__unsigned_64__first %%% Simplified H96 on reading formula in, to give: %%% H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= interfaces__unsigned_64__last %%% Simplified C3 on reading formula in, to give: %%% C3: element(src, [src_index + 13]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(src, [src_index + 13]) <= interfaces__unsigned_64__last %%% Simplified C5 on reading formula in, to give: %%% C5: element(src, [src_index + 13]) >= spark__unsigned__u64__first %%% Simplified C6 on reading formula in, to give: %%% C6: element(src, [src_index + 13]) <= spark__unsigned__u64__last %%% Simplified C7 on reading formula in, to give: %%% C7: src_index + 13 >= src__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: src_index + 13 <= src__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: src_index + 13 >= integer__base__first %%% Simplified C10 on reading formula in, to give: %%% C10: src_index + 13 <= integer__base__last *** Proved C1: 40 >= spark__unsigned__shift_count__first using hypothesis H43. *** Proved C7: src_index + 13 >= src__index__subtype__1__first using hypothesis H37. *** Proved C8: src_index + 13 <= src__index__subtype__1__last using hypothesis H38. *** Proved C9: src_index + 13 >= integer__base__first using hypothesis H51. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H44: true New H62: true New H74: true New H86: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H45: element(src, [src_index + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= 0 New H63: element(src, [src_index + 10]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= 0 New H75: element(src, [src_index + 11]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= 0 New H87: element(src, [src_index + 12]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= 0 New C3: element(src, [src_index + 13]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H46: element(src, [src_index + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= 18446744073709551615 New H64: element(src, [src_index + 10]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= 18446744073709551615 New H76: element(src, [src_index + 11]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= 18446744073709551615 New H88: element(src, [src_index + 12]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= 18446744073709551615 New C4: element(src, [src_index + 13]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New H47: element(src, [src_index + 9]) >= 0 New H53: element(src, [src_index + 8]) >= 0 New H65: element(src, [src_index + 10]) >= 0 New H77: element(src, [src_index + 11]) >= 0 New H89: element(src, [src_index + 12]) >= 0 New C5: element(src, [src_index + 13]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H48: element(src, [src_index + 9]) <= 18446744073709551615 New H54: element(src, [src_index + 8]) <= 18446744073709551615 New H66: element(src, [src_index + 10]) <= 18446744073709551615 New H78: element(src, [src_index + 11]) <= 18446744073709551615 New H90: element(src, [src_index + 12]) <= 18446744073709551615 New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) New C6: element(src, [src_index + 13]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H52: src_index <= 2147483638 New H70: src_index <= 2147483637 New H82: src_index <= 2147483636 New H94: src_index <= 2147483635 New C10: src_index <= 2147483634 *** Proved C2: true >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H51: src_index >= - 2147483657 New H69: src_index >= - 2147483658 New H81: src_index >= - 2147483659 New H93: src_index >= - 2147483660 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H43: true New H61: true New H73: true New H85: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H97 added to this effect. *** Proved C3: element(src, [src_index + 13]) >= 0 using hypotheses H10, H37 & H38. *** Proved C4: element(src, [src_index + 13]) <= 18446744073709551615 using hypotheses H10, H37 & H38. *** Proved C5: element(src, [src_index + 13]) >= 0 using hypotheses H10, H37 & H38. *** Proved C6: element(src, [src_index + 13]) <= 18446744073709551615 using hypotheses H10, H37 & H38. --- Eliminated hypothesis H26 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H47 (duplicate of H45). --- Eliminated hypothesis H48 (duplicate of H46). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H89 (duplicate of H87). --- Eliminated hypothesis H90 (duplicate of H88). --- Eliminated hypothesis H97 (duplicate of H58). >>> Using "A->B, A |- B" on H8, given H97, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H97, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H24 (redundant, given H52). --- Eliminated hypothesis H28 (redundant, given H30). --- Eliminated hypothesis H29 (redundant, given H27). --- Eliminated hypothesis H30 (redundant, given H32). --- Eliminated hypothesis H31 (redundant, given H27). --- Eliminated hypothesis H32 (redundant, given H34). --- Eliminated hypothesis H33 (redundant, given H27). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H35 (redundant, given H27). --- Eliminated hypothesis H36 (redundant, given H38). --- Eliminated hypothesis H37 (redundant, given H27). --- Eliminated hypothesis H38 (redundant, given H40). --- Eliminated hypothesis H39 (redundant, given H27). --- Eliminated hypothesis H40 (redundant, given H42). --- Eliminated hypothesis H41 (redundant, given H27). --- Eliminated hypothesis H51 (redundant, given H25). --- Eliminated hypothesis H52 (redundant, given H70). --- Eliminated hypothesis H57 (redundant, given H2). --- Eliminated hypothesis H69 (redundant, given H25). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H25). --- Eliminated hypothesis H82 (redundant, given H94). --- Eliminated hypothesis H93 (redundant, given H25). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H7: src_offset + dst_index * 8 <= src_offset + dst__index__subtype__1__last * 8 New H27: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first New H42: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last New H45: element(src, [src_offset + dst_index * 8 + 9]) >= 0 New H46: element(src, [src_offset + dst_index * 8 + 9]) <= 18446744073709551615 New H53: element(src, [src_offset + dst_index * 8 + 8]) >= 0 New H54: element(src, [src_offset + dst_index * 8 + 8]) <= 18446744073709551615 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) >= 0 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) <= 18446744073709551615 New H63: element(src, [src_offset + dst_index * 8 + 10]) >= 0 New H64: element(src, [src_offset + dst_index * 8 + 10]) <= 18446744073709551615 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) >= 0 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) <= 18446744073709551615 New H75: element(src, [src_offset + dst_index * 8 + 11]) >= 0 New H76: element(src, [src_offset + dst_index * 8 + 11]) <= 18446744073709551615 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) >= 0 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) <= 18446744073709551615 New H87: element(src, [src_offset + dst_index * 8 + 12]) >= 0 New H88: element(src, [src_offset + dst_index * 8 + 12]) <= 18446744073709551615 New H94: src_offset + dst_index * 8 <= 2147483635 New H95: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) >= 0 New H96: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) <= 18446744073709551615 New H25: src_offset + dst_index * 8 >= - 8 New H9: src_offset + dst_index * 8 <= 2147483639 New C10: src_offset + dst_index * 8 <= 2147483634 +++ New H98: integer__size >= 0 +++ New H99: natural__size >= 0 +++ New H100: spark__unsigned__byte__size >= 0 +++ New H101: spark__unsigned__u64__size >= 0 +++ New H102: spark__unsigned__shift_count__size >= 0 +++ New H103: spark__crypto__word_count_t__size >= 0 +++ New H104: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H105: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H106: src__index__subtype__1__first >= 0 +++ New H107: src__index__subtype__1__last >= 0 +++ New H108: src__index__subtype__1__last <= 2147483647 +++ New H109: src__index__subtype__1__first <= 2147483647 +++ New H110: dst__index__subtype__1__first >= 0 +++ New H111: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H112: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset + dst_index * 8 <= 2147483634 using hypotheses H42 & H108. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_15. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H17). --- Hypothesis H38 has been replaced by "true". (It is already present, as H18). --- Hypothesis H43 has been replaced by "true". (It is already present, as H15). --- Hypothesis H44 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H55 has been replaced by "true". (It is already present, as H19). --- Hypothesis H56 has been replaced by "true". (It is already present, as H20). --- Hypothesis H67 has been replaced by "true". (It is already present, as H21). --- Hypothesis H68 has been replaced by "true". (It is already present, as H22). --- Hypothesis H79 has been replaced by "true". (It is already present, as H23). --- Hypothesis H80 has been replaced by "true". (It is already present, as H24). --- Hypothesis H91 has been replaced by "true". (It is already present, as H25). --- Hypothesis H92 has been replaced by "true". (It is already present, as H26). *** Proved C1: 48 >= spark__unsigned__shift_count__first using hypothesis H31. *** Proved C7: src_offset + 6 >= src__index__subtype__1__first using hypothesis H27. *** Proved C8: src_offset + 6 <= src__index__subtype__1__last using hypothesis H28. *** Proved C9: src_offset + 6 >= integer__base__first using hypothesis H39. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H32: true New H50: true New H62: true New H74: true New H86: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H33: element(src, [src_offset + 1]) >= 0 New H47: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 New H51: element(src, [src_offset + 2]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) >= 0 New H63: element(src, [src_offset + 3]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) >= 0 New H75: element(src, [src_offset + 4]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) >= 0 New H87: element(src, [src_offset + 5]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) >= 0 New C3: element(src, [src_offset + 6]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H34: element(src, [src_offset + 1]) <= 18446744073709551615 New H48: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 New H52: element(src, [src_offset + 2]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) <= 18446744073709551615 New H64: element(src, [src_offset + 3]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) <= 18446744073709551615 New H76: element(src, [src_offset + 4]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) <= 18446744073709551615 New H88: element(src, [src_offset + 5]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) <= 18446744073709551615 New C4: element(src, [src_offset + 6]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H35: element(src, [src_offset + 1]) >= 0 New H41: element(src, [src_offset]) >= 0 New H53: element(src, [src_offset + 2]) >= 0 New H65: element(src, [src_offset + 3]) >= 0 New H77: element(src, [src_offset + 4]) >= 0 New H89: element(src, [src_offset + 5]) >= 0 New C5: element(src, [src_offset + 6]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H36: element(src, [src_offset + 1]) <= 18446744073709551615 New H42: element(src, [src_offset]) <= 18446744073709551615 New H54: element(src, [src_offset + 2]) <= 18446744073709551615 New H66: element(src, [src_offset + 3]) <= 18446744073709551615 New H78: element(src, [src_offset + 4]) <= 18446744073709551615 New H90: element(src, [src_offset + 5]) <= 18446744073709551615 New C6: element(src, [src_offset + 6]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H40: src_offset <= 2147483646 New H58: src_offset <= 2147483645 New H70: src_offset <= 2147483644 New H82: src_offset <= 2147483643 New H94: src_offset <= 2147483642 New C10: src_offset <= 2147483641 *** Proved C2: true -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H39: src_offset >= - 2147483649 New H57: src_offset >= - 2147483650 New H69: src_offset >= - 2147483651 New H81: src_offset >= - 2147483652 New H93: src_offset >= - 2147483653 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H31: true New H49: true New H61: true New H73: true New H85: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C3: element(src, [src_offset + 6]) >= 0 using hypotheses H8, H27 & H28. *** Proved C4: element(src, [src_offset + 6]) <= 18446744073709551615 using hypotheses H8, H27 & H28. *** Proved C5: element(src, [src_offset + 6]) >= 0 using hypotheses H8, H27 & H28. *** Proved C6: element(src, [src_offset + 6]) <= 18446744073709551615 using hypotheses H8, H27 & H28. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H35 (duplicate of H33). --- Eliminated hypothesis H36 (duplicate of H34). --- Eliminated hypothesis H53 (duplicate of H51). --- Eliminated hypothesis H54 (duplicate of H52). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H89 (duplicate of H87). --- Eliminated hypothesis H90 (duplicate of H88). --- Eliminated hypothesis H3 (redundant, given H6). --- Eliminated hypothesis H10 (redundant, given H40). --- Eliminated hypothesis H17 (redundant, given H15). --- Eliminated hypothesis H18 (redundant, given H6). --- Eliminated hypothesis H19 (redundant, given H15). --- Eliminated hypothesis H20 (redundant, given H6). --- Eliminated hypothesis H21 (redundant, given H15). --- Eliminated hypothesis H22 (redundant, given H6). --- Eliminated hypothesis H23 (redundant, given H15). --- Eliminated hypothesis H24 (redundant, given H6). --- Eliminated hypothesis H25 (redundant, given H15). --- Eliminated hypothesis H26 (redundant, given H6). --- Eliminated hypothesis H27 (redundant, given H15). --- Eliminated hypothesis H28 (redundant, given H6). --- Eliminated hypothesis H29 (redundant, given H15). --- Eliminated hypothesis H39 (redundant, given H9). --- Eliminated hypothesis H40 (redundant, given H58). --- Eliminated hypothesis H45 (redundant, given H2). --- Eliminated hypothesis H57 (redundant, given H9). --- Eliminated hypothesis H58 (redundant, given H70). --- Eliminated hypothesis H69 (redundant, given H9). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H9). --- Eliminated hypothesis H82 (redundant, given H94). --- Eliminated hypothesis H93 (redundant, given H9). +++ New H97: integer__size >= 0 +++ New H98: natural__size >= 0 +++ New H99: spark__unsigned__byte__size >= 0 +++ New H100: spark__unsigned__u64__size >= 0 +++ New H101: spark__unsigned__shift_count__size >= 0 +++ New H102: spark__crypto__word_count_t__size >= 0 +++ New H103: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H104: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H105: src__index__subtype__1__first >= 0 +++ New H106: src__index__subtype__1__last >= 0 +++ New H107: src__index__subtype__1__last <= 2147483647 +++ New H108: src__index__subtype__1__first <= 2147483647 +++ New H109: dst__index__subtype__1__first >= 0 +++ New H110: dst__index__subtype__1__last >= 0 +++ New H111: dst__index__subtype__1__last <= 268435455 +++ New H112: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset <= 2147483641 using hypotheses H6 & H107. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_16. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified H45 on reading formula in, to give: %%% H45: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified H46 on reading formula in, to give: %%% H46: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified H47 on reading formula in, to give: %%% H47: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified H48 on reading formula in, to give: %%% H48: element(src, [src_index + 9]) <= spark__unsigned__u64__last --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). %%% Simplified H51 on reading formula in, to give: %%% H51: src_index + 9 >= integer__base__first %%% Simplified H52 on reading formula in, to give: %%% H52: src_index + 9 <= integer__base__last --- Hypothesis H55 has been replaced by "true". (It is already present, as H27). --- Hypothesis H56 has been replaced by "true". (It is already present, as H28). %%% Simplified H59 on reading formula in, to give: %%% H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= interfaces__unsigned_64__first %%% Simplified H60 on reading formula in, to give: %%% H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= interfaces__unsigned_64__last %%% Simplified H63 on reading formula in, to give: %%% H63: element(src, [src_index + 10]) >= interfaces__unsigned_64__first %%% Simplified H64 on reading formula in, to give: %%% H64: element(src, [src_index + 10]) <= interfaces__unsigned_64__last %%% Simplified H65 on reading formula in, to give: %%% H65: element(src, [src_index + 10]) >= spark__unsigned__u64__first %%% Simplified H66 on reading formula in, to give: %%% H66: element(src, [src_index + 10]) <= spark__unsigned__u64__last --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). %%% Simplified H69 on reading formula in, to give: %%% H69: src_index + 10 >= integer__base__first %%% Simplified H70 on reading formula in, to give: %%% H70: src_index + 10 <= integer__base__last %%% Simplified H71 on reading formula in, to give: %%% H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= interfaces__unsigned_64__first %%% Simplified H72 on reading formula in, to give: %%% H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= interfaces__unsigned_64__last %%% Simplified H75 on reading formula in, to give: %%% H75: element(src, [src_index + 11]) >= interfaces__unsigned_64__first %%% Simplified H76 on reading formula in, to give: %%% H76: element(src, [src_index + 11]) <= interfaces__unsigned_64__last %%% Simplified H77 on reading formula in, to give: %%% H77: element(src, [src_index + 11]) >= spark__unsigned__u64__first %%% Simplified H78 on reading formula in, to give: %%% H78: element(src, [src_index + 11]) <= spark__unsigned__u64__last --- Hypothesis H79 has been replaced by "true". (It is already present, as H33). --- Hypothesis H80 has been replaced by "true". (It is already present, as H34). %%% Simplified H81 on reading formula in, to give: %%% H81: src_index + 11 >= integer__base__first %%% Simplified H82 on reading formula in, to give: %%% H82: src_index + 11 <= integer__base__last %%% Simplified H83 on reading formula in, to give: %%% H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= interfaces__unsigned_64__first %%% Simplified H84 on reading formula in, to give: %%% H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= interfaces__unsigned_64__last %%% Simplified H87 on reading formula in, to give: %%% H87: element(src, [src_index + 12]) >= interfaces__unsigned_64__first %%% Simplified H88 on reading formula in, to give: %%% H88: element(src, [src_index + 12]) <= interfaces__unsigned_64__last %%% Simplified H89 on reading formula in, to give: %%% H89: element(src, [src_index + 12]) >= spark__unsigned__u64__first %%% Simplified H90 on reading formula in, to give: %%% H90: element(src, [src_index + 12]) <= spark__unsigned__u64__last --- Hypothesis H91 has been replaced by "true". (It is already present, as H35). --- Hypothesis H92 has been replaced by "true". (It is already present, as H36). %%% Simplified H93 on reading formula in, to give: %%% H93: src_index + 12 >= integer__base__first %%% Simplified H94 on reading formula in, to give: %%% H94: src_index + 12 <= integer__base__last %%% Simplified H95 on reading formula in, to give: %%% H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= interfaces__unsigned_64__first %%% Simplified H96 on reading formula in, to give: %%% H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= interfaces__unsigned_64__last %%% Simplified H99 on reading formula in, to give: %%% H99: element(src, [src_index + 13]) >= interfaces__unsigned_64__first %%% Simplified H100 on reading formula in, to give: %%% H100: element(src, [src_index + 13]) <= interfaces__unsigned_64__last %%% Simplified H101 on reading formula in, to give: %%% H101: element(src, [src_index + 13]) >= spark__unsigned__u64__first %%% Simplified H102 on reading formula in, to give: %%% H102: element(src, [src_index + 13]) <= spark__unsigned__u64__last --- Hypothesis H103 has been replaced by "true". (It is already present, as H37). --- Hypothesis H104 has been replaced by "true". (It is already present, as H38). %%% Simplified H105 on reading formula in, to give: %%% H105: src_index + 13 >= integer__base__first %%% Simplified H106 on reading formula in, to give: %%% H106: src_index + 13 <= integer__base__last %%% Simplified H107 on reading formula in, to give: %%% H107: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) >= interfaces__unsigned_64__first %%% Simplified H108 on reading formula in, to give: %%% H108: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) <= interfaces__unsigned_64__last %%% Simplified C3 on reading formula in, to give: %%% C3: element(src, [src_index + 14]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(src, [src_index + 14]) <= interfaces__unsigned_64__last %%% Simplified C5 on reading formula in, to give: %%% C5: element(src, [src_index + 14]) >= spark__unsigned__u64__first %%% Simplified C6 on reading formula in, to give: %%% C6: element(src, [src_index + 14]) <= spark__unsigned__u64__last %%% Simplified C7 on reading formula in, to give: %%% C7: src_index + 14 >= src__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: src_index + 14 <= src__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: src_index + 14 >= integer__base__first %%% Simplified C10 on reading formula in, to give: %%% C10: src_index + 14 <= integer__base__last *** Proved C1: 48 >= spark__unsigned__shift_count__first using hypothesis H43. *** Proved C7: src_index + 14 >= src__index__subtype__1__first using hypothesis H39. *** Proved C8: src_index + 14 <= src__index__subtype__1__last using hypothesis H40. *** Proved C9: src_index + 14 >= integer__base__first using hypothesis H51. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H44: true New H62: true New H74: true New H86: true New H98: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H45: element(src, [src_index + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= 0 New H63: element(src, [src_index + 10]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= 0 New H75: element(src, [src_index + 11]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= 0 New H87: element(src, [src_index + 12]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= 0 New H99: element(src, [src_index + 13]) >= 0 New H107: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) >= 0 New C3: element(src, [src_index + 14]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H46: element(src, [src_index + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= 18446744073709551615 New H64: element(src, [src_index + 10]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= 18446744073709551615 New H76: element(src, [src_index + 11]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= 18446744073709551615 New H88: element(src, [src_index + 12]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= 18446744073709551615 New H100: element(src, [src_index + 13]) <= 18446744073709551615 New H108: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) <= 18446744073709551615 New C4: element(src, [src_index + 14]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New H47: element(src, [src_index + 9]) >= 0 New H53: element(src, [src_index + 8]) >= 0 New H65: element(src, [src_index + 10]) >= 0 New H77: element(src, [src_index + 11]) >= 0 New H89: element(src, [src_index + 12]) >= 0 New H101: element(src, [src_index + 13]) >= 0 New C5: element(src, [src_index + 14]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H48: element(src, [src_index + 9]) <= 18446744073709551615 New H54: element(src, [src_index + 8]) <= 18446744073709551615 New H66: element(src, [src_index + 10]) <= 18446744073709551615 New H78: element(src, [src_index + 11]) <= 18446744073709551615 New H90: element(src, [src_index + 12]) <= 18446744073709551615 New H102: element(src, [src_index + 13]) <= 18446744073709551615 New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) New C6: element(src, [src_index + 14]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H52: src_index <= 2147483638 New H70: src_index <= 2147483637 New H82: src_index <= 2147483636 New H94: src_index <= 2147483635 New H106: src_index <= 2147483634 New C10: src_index <= 2147483633 *** Proved C2: true >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H51: src_index >= - 2147483657 New H69: src_index >= - 2147483658 New H81: src_index >= - 2147483659 New H93: src_index >= - 2147483660 New H105: src_index >= - 2147483661 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H43: true New H61: true New H73: true New H85: true New H97: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H109 added to this effect. *** Proved C3: element(src, [src_index + 14]) >= 0 using hypotheses H10, H39 & H40. *** Proved C4: element(src, [src_index + 14]) <= 18446744073709551615 using hypotheses H10, H39 & H40. *** Proved C5: element(src, [src_index + 14]) >= 0 using hypotheses H10, H39 & H40. *** Proved C6: element(src, [src_index + 14]) <= 18446744073709551615 using hypotheses H10, H39 & H40. --- Eliminated hypothesis H26 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H47 (duplicate of H45). --- Eliminated hypothesis H48 (duplicate of H46). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H89 (duplicate of H87). --- Eliminated hypothesis H90 (duplicate of H88). --- Eliminated hypothesis H101 (duplicate of H99). --- Eliminated hypothesis H102 (duplicate of H100). --- Eliminated hypothesis H109 (duplicate of H58). >>> Using "A->B, A |- B" on H8, given H109, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H109, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H24 (redundant, given H52). --- Eliminated hypothesis H28 (redundant, given H30). --- Eliminated hypothesis H29 (redundant, given H27). --- Eliminated hypothesis H30 (redundant, given H32). --- Eliminated hypothesis H31 (redundant, given H27). --- Eliminated hypothesis H32 (redundant, given H34). --- Eliminated hypothesis H33 (redundant, given H27). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H35 (redundant, given H27). --- Eliminated hypothesis H36 (redundant, given H38). --- Eliminated hypothesis H37 (redundant, given H27). --- Eliminated hypothesis H38 (redundant, given H40). --- Eliminated hypothesis H39 (redundant, given H27). --- Eliminated hypothesis H40 (redundant, given H42). --- Eliminated hypothesis H41 (redundant, given H27). --- Eliminated hypothesis H51 (redundant, given H25). --- Eliminated hypothesis H52 (redundant, given H70). --- Eliminated hypothesis H57 (redundant, given H2). --- Eliminated hypothesis H69 (redundant, given H25). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H25). --- Eliminated hypothesis H82 (redundant, given H94). --- Eliminated hypothesis H93 (redundant, given H25). --- Eliminated hypothesis H94 (redundant, given H106). --- Eliminated hypothesis H105 (redundant, given H25). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H7: src_offset + dst_index * 8 <= src_offset + dst__index__subtype__1__last * 8 New H27: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first New H42: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last New H45: element(src, [src_offset + dst_index * 8 + 9]) >= 0 New H46: element(src, [src_offset + dst_index * 8 + 9]) <= 18446744073709551615 New H53: element(src, [src_offset + dst_index * 8 + 8]) >= 0 New H54: element(src, [src_offset + dst_index * 8 + 8]) <= 18446744073709551615 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) >= 0 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) <= 18446744073709551615 New H63: element(src, [src_offset + dst_index * 8 + 10]) >= 0 New H64: element(src, [src_offset + dst_index * 8 + 10]) <= 18446744073709551615 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) >= 0 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) <= 18446744073709551615 New H75: element(src, [src_offset + dst_index * 8 + 11]) >= 0 New H76: element(src, [src_offset + dst_index * 8 + 11]) <= 18446744073709551615 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) >= 0 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) <= 18446744073709551615 New H87: element(src, [src_offset + dst_index * 8 + 12]) >= 0 New H88: element(src, [src_offset + dst_index * 8 + 12]) <= 18446744073709551615 New H95: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) >= 0 New H96: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) <= 18446744073709551615 New H99: element(src, [src_offset + dst_index * 8 + 13]) >= 0 New H100: element(src, [src_offset + dst_index * 8 + 13]) <= 18446744073709551615 New H106: src_offset + dst_index * 8 <= 2147483634 New H107: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40) >= 0 New H108: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40) <= 18446744073709551615 New H25: src_offset + dst_index * 8 >= - 8 New H9: src_offset + dst_index * 8 <= 2147483639 New C10: src_offset + dst_index * 8 <= 2147483633 +++ New H110: integer__size >= 0 +++ New H111: natural__size >= 0 +++ New H112: spark__unsigned__byte__size >= 0 +++ New H113: spark__unsigned__u64__size >= 0 +++ New H114: spark__unsigned__shift_count__size >= 0 +++ New H115: spark__crypto__word_count_t__size >= 0 +++ New H116: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H117: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H118: src__index__subtype__1__first >= 0 +++ New H119: src__index__subtype__1__last >= 0 +++ New H120: src__index__subtype__1__last <= 2147483647 +++ New H121: src__index__subtype__1__first <= 2147483647 +++ New H122: dst__index__subtype__1__first >= 0 +++ New H123: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H124: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset + dst_index * 8 <= 2147483633 using hypotheses H42 & H120. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_17. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H17). --- Hypothesis H38 has been replaced by "true". (It is already present, as H18). --- Hypothesis H43 has been replaced by "true". (It is already present, as H15). --- Hypothesis H44 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H55 has been replaced by "true". (It is already present, as H19). --- Hypothesis H56 has been replaced by "true". (It is already present, as H20). --- Hypothesis H67 has been replaced by "true". (It is already present, as H21). --- Hypothesis H68 has been replaced by "true". (It is already present, as H22). --- Hypothesis H79 has been replaced by "true". (It is already present, as H23). --- Hypothesis H80 has been replaced by "true". (It is already present, as H24). --- Hypothesis H91 has been replaced by "true". (It is already present, as H25). --- Hypothesis H92 has been replaced by "true". (It is already present, as H26). --- Hypothesis H103 has been replaced by "true". (It is already present, as H27). --- Hypothesis H104 has been replaced by "true". (It is already present, as H28). *** Proved C1: 56 >= spark__unsigned__shift_count__first using hypothesis H31. *** Proved C7: src_offset + 7 >= src__index__subtype__1__first using hypothesis H29. *** Proved C8: src_offset + 7 <= src__index__subtype__1__last using hypothesis H6. *** Proved C9: src_offset + 7 >= integer__base__first using hypothesis H39. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H32: true New H50: true New H62: true New H74: true New H86: true New H98: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H33: element(src, [src_offset + 1]) >= 0 New H47: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 New H51: element(src, [src_offset + 2]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) >= 0 New H63: element(src, [src_offset + 3]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) >= 0 New H75: element(src, [src_offset + 4]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) >= 0 New H87: element(src, [src_offset + 5]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) >= 0 New H99: element(src, [src_offset + 6]) >= 0 New H107: spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48) >= 0 New C3: element(src, [src_offset + 7]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H34: element(src, [src_offset + 1]) <= 18446744073709551615 New H48: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 New H52: element(src, [src_offset + 2]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) <= 18446744073709551615 New H64: element(src, [src_offset + 3]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) <= 18446744073709551615 New H76: element(src, [src_offset + 4]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) <= 18446744073709551615 New H88: element(src, [src_offset + 5]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) <= 18446744073709551615 New H100: element(src, [src_offset + 6]) <= 18446744073709551615 New H108: spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48) <= 18446744073709551615 New C4: element(src, [src_offset + 7]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H35: element(src, [src_offset + 1]) >= 0 New H41: element(src, [src_offset]) >= 0 New H53: element(src, [src_offset + 2]) >= 0 New H65: element(src, [src_offset + 3]) >= 0 New H77: element(src, [src_offset + 4]) >= 0 New H89: element(src, [src_offset + 5]) >= 0 New H101: element(src, [src_offset + 6]) >= 0 New C5: element(src, [src_offset + 7]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H36: element(src, [src_offset + 1]) <= 18446744073709551615 New H42: element(src, [src_offset]) <= 18446744073709551615 New H54: element(src, [src_offset + 2]) <= 18446744073709551615 New H66: element(src, [src_offset + 3]) <= 18446744073709551615 New H78: element(src, [src_offset + 4]) <= 18446744073709551615 New H90: element(src, [src_offset + 5]) <= 18446744073709551615 New H102: element(src, [src_offset + 6]) <= 18446744073709551615 New C6: element(src, [src_offset + 7]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H40: src_offset <= 2147483646 New H58: src_offset <= 2147483645 New H70: src_offset <= 2147483644 New H82: src_offset <= 2147483643 New H94: src_offset <= 2147483642 New H106: src_offset <= 2147483641 New C10: src_offset <= 2147483640 *** Proved C2: true -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H39: src_offset >= - 2147483649 New H57: src_offset >= - 2147483650 New H69: src_offset >= - 2147483651 New H81: src_offset >= - 2147483652 New H93: src_offset >= - 2147483653 New H105: src_offset >= - 2147483654 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H31: true New H49: true New H61: true New H73: true New H85: true New H97: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C3: element(src, [src_offset + 7]) >= 0 using hypotheses H6, H8 & H29. *** Proved C4: element(src, [src_offset + 7]) <= 18446744073709551615 using hypotheses H6, H8 & H29. *** Proved C5: element(src, [src_offset + 7]) >= 0 using hypotheses H6, H8 & H29. *** Proved C6: element(src, [src_offset + 7]) <= 18446744073709551615 using hypotheses H6, H8 & H29. --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H35 (duplicate of H33). --- Eliminated hypothesis H36 (duplicate of H34). --- Eliminated hypothesis H53 (duplicate of H51). --- Eliminated hypothesis H54 (duplicate of H52). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H89 (duplicate of H87). --- Eliminated hypothesis H90 (duplicate of H88). --- Eliminated hypothesis H101 (duplicate of H99). --- Eliminated hypothesis H102 (duplicate of H100). --- Eliminated hypothesis H3 (redundant, given H6). --- Eliminated hypothesis H10 (redundant, given H40). --- Eliminated hypothesis H17 (redundant, given H15). --- Eliminated hypothesis H18 (redundant, given H6). --- Eliminated hypothesis H19 (redundant, given H15). --- Eliminated hypothesis H20 (redundant, given H6). --- Eliminated hypothesis H21 (redundant, given H15). --- Eliminated hypothesis H22 (redundant, given H6). --- Eliminated hypothesis H23 (redundant, given H15). --- Eliminated hypothesis H24 (redundant, given H6). --- Eliminated hypothesis H25 (redundant, given H15). --- Eliminated hypothesis H26 (redundant, given H6). --- Eliminated hypothesis H27 (redundant, given H15). --- Eliminated hypothesis H28 (redundant, given H6). --- Eliminated hypothesis H29 (redundant, given H15). --- Eliminated hypothesis H39 (redundant, given H9). --- Eliminated hypothesis H40 (redundant, given H58). --- Eliminated hypothesis H45 (redundant, given H2). --- Eliminated hypothesis H57 (redundant, given H9). --- Eliminated hypothesis H58 (redundant, given H70). --- Eliminated hypothesis H69 (redundant, given H9). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H9). --- Eliminated hypothesis H82 (redundant, given H94). --- Eliminated hypothesis H93 (redundant, given H9). --- Eliminated hypothesis H94 (redundant, given H106). --- Eliminated hypothesis H105 (redundant, given H9). +++ New H109: integer__size >= 0 +++ New H110: natural__size >= 0 +++ New H111: spark__unsigned__byte__size >= 0 +++ New H112: spark__unsigned__u64__size >= 0 +++ New H113: spark__unsigned__shift_count__size >= 0 +++ New H114: spark__crypto__word_count_t__size >= 0 +++ New H115: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H116: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H117: src__index__subtype__1__first >= 0 +++ New H118: src__index__subtype__1__last >= 0 +++ New H119: src__index__subtype__1__last <= 2147483647 +++ New H120: src__index__subtype__1__first <= 2147483647 +++ New H121: dst__index__subtype__1__first >= 0 +++ New H122: dst__index__subtype__1__last >= 0 +++ New H123: dst__index__subtype__1__last <= 268435455 +++ New H124: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset <= 2147483640 using hypotheses H6 & H119. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_18. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified H45 on reading formula in, to give: %%% H45: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified H46 on reading formula in, to give: %%% H46: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified H47 on reading formula in, to give: %%% H47: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified H48 on reading formula in, to give: %%% H48: element(src, [src_index + 9]) <= spark__unsigned__u64__last --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). %%% Simplified H51 on reading formula in, to give: %%% H51: src_index + 9 >= integer__base__first %%% Simplified H52 on reading formula in, to give: %%% H52: src_index + 9 <= integer__base__last --- Hypothesis H55 has been replaced by "true". (It is already present, as H27). --- Hypothesis H56 has been replaced by "true". (It is already present, as H28). %%% Simplified H59 on reading formula in, to give: %%% H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= interfaces__unsigned_64__first %%% Simplified H60 on reading formula in, to give: %%% H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= interfaces__unsigned_64__last %%% Simplified H63 on reading formula in, to give: %%% H63: element(src, [src_index + 10]) >= interfaces__unsigned_64__first %%% Simplified H64 on reading formula in, to give: %%% H64: element(src, [src_index + 10]) <= interfaces__unsigned_64__last %%% Simplified H65 on reading formula in, to give: %%% H65: element(src, [src_index + 10]) >= spark__unsigned__u64__first %%% Simplified H66 on reading formula in, to give: %%% H66: element(src, [src_index + 10]) <= spark__unsigned__u64__last --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). %%% Simplified H69 on reading formula in, to give: %%% H69: src_index + 10 >= integer__base__first %%% Simplified H70 on reading formula in, to give: %%% H70: src_index + 10 <= integer__base__last %%% Simplified H71 on reading formula in, to give: %%% H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= interfaces__unsigned_64__first %%% Simplified H72 on reading formula in, to give: %%% H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= interfaces__unsigned_64__last %%% Simplified H75 on reading formula in, to give: %%% H75: element(src, [src_index + 11]) >= interfaces__unsigned_64__first %%% Simplified H76 on reading formula in, to give: %%% H76: element(src, [src_index + 11]) <= interfaces__unsigned_64__last %%% Simplified H77 on reading formula in, to give: %%% H77: element(src, [src_index + 11]) >= spark__unsigned__u64__first %%% Simplified H78 on reading formula in, to give: %%% H78: element(src, [src_index + 11]) <= spark__unsigned__u64__last --- Hypothesis H79 has been replaced by "true". (It is already present, as H33). --- Hypothesis H80 has been replaced by "true". (It is already present, as H34). %%% Simplified H81 on reading formula in, to give: %%% H81: src_index + 11 >= integer__base__first %%% Simplified H82 on reading formula in, to give: %%% H82: src_index + 11 <= integer__base__last %%% Simplified H83 on reading formula in, to give: %%% H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= interfaces__unsigned_64__first %%% Simplified H84 on reading formula in, to give: %%% H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= interfaces__unsigned_64__last %%% Simplified H87 on reading formula in, to give: %%% H87: element(src, [src_index + 12]) >= interfaces__unsigned_64__first %%% Simplified H88 on reading formula in, to give: %%% H88: element(src, [src_index + 12]) <= interfaces__unsigned_64__last %%% Simplified H89 on reading formula in, to give: %%% H89: element(src, [src_index + 12]) >= spark__unsigned__u64__first %%% Simplified H90 on reading formula in, to give: %%% H90: element(src, [src_index + 12]) <= spark__unsigned__u64__last --- Hypothesis H91 has been replaced by "true". (It is already present, as H35). --- Hypothesis H92 has been replaced by "true". (It is already present, as H36). %%% Simplified H93 on reading formula in, to give: %%% H93: src_index + 12 >= integer__base__first %%% Simplified H94 on reading formula in, to give: %%% H94: src_index + 12 <= integer__base__last %%% Simplified H95 on reading formula in, to give: %%% H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= interfaces__unsigned_64__first %%% Simplified H96 on reading formula in, to give: %%% H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= interfaces__unsigned_64__last %%% Simplified H99 on reading formula in, to give: %%% H99: element(src, [src_index + 13]) >= interfaces__unsigned_64__first %%% Simplified H100 on reading formula in, to give: %%% H100: element(src, [src_index + 13]) <= interfaces__unsigned_64__last %%% Simplified H101 on reading formula in, to give: %%% H101: element(src, [src_index + 13]) >= spark__unsigned__u64__first %%% Simplified H102 on reading formula in, to give: %%% H102: element(src, [src_index + 13]) <= spark__unsigned__u64__last --- Hypothesis H103 has been replaced by "true". (It is already present, as H37). --- Hypothesis H104 has been replaced by "true". (It is already present, as H38). %%% Simplified H105 on reading formula in, to give: %%% H105: src_index + 13 >= integer__base__first %%% Simplified H106 on reading formula in, to give: %%% H106: src_index + 13 <= integer__base__last %%% Simplified H107 on reading formula in, to give: %%% H107: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) >= interfaces__unsigned_64__first %%% Simplified H108 on reading formula in, to give: %%% H108: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) <= interfaces__unsigned_64__last %%% Simplified H111 on reading formula in, to give: %%% H111: element(src, [src_index + 14]) >= interfaces__unsigned_64__first %%% Simplified H112 on reading formula in, to give: %%% H112: element(src, [src_index + 14]) <= interfaces__unsigned_64__last %%% Simplified H113 on reading formula in, to give: %%% H113: element(src, [src_index + 14]) >= spark__unsigned__u64__first %%% Simplified H114 on reading formula in, to give: %%% H114: element(src, [src_index + 14]) <= spark__unsigned__u64__last --- Hypothesis H115 has been replaced by "true". (It is already present, as H39). --- Hypothesis H116 has been replaced by "true". (It is already present, as H40). %%% Simplified H117 on reading formula in, to give: %%% H117: src_index + 14 >= integer__base__first %%% Simplified H118 on reading formula in, to give: %%% H118: src_index + 14 <= integer__base__last %%% Simplified H119 on reading formula in, to give: %%% H119: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) >= interfaces__unsigned_64__first %%% Simplified H120 on reading formula in, to give: %%% H120: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) <= interfaces__unsigned_64__last %%% Simplified C3 on reading formula in, to give: %%% C3: element(src, [src_index + 15]) >= interfaces__unsigned_64__first %%% Simplified C4 on reading formula in, to give: %%% C4: element(src, [src_index + 15]) <= interfaces__unsigned_64__last %%% Simplified C5 on reading formula in, to give: %%% C5: element(src, [src_index + 15]) >= spark__unsigned__u64__first %%% Simplified C6 on reading formula in, to give: %%% C6: element(src, [src_index + 15]) <= spark__unsigned__u64__last %%% Simplified C7 on reading formula in, to give: %%% C7: src_index + 15 >= src__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: src_index + 15 <= src__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: src_index + 15 >= integer__base__first %%% Simplified C10 on reading formula in, to give: %%% C10: src_index + 15 <= integer__base__last *** Proved C1: 56 >= spark__unsigned__shift_count__first using hypothesis H43. *** Proved C7: src_index + 15 >= src__index__subtype__1__first using hypothesis H41. *** Proved C8: src_index + 15 <= src__index__subtype__1__last using hypothesis H42. *** Proved C9: src_index + 15 >= integer__base__first using hypothesis H51. -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H44: true New H62: true New H74: true New H86: true New H98: true New H110: true New C2: true -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H45: element(src, [src_index + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= 0 New H63: element(src, [src_index + 10]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= 0 New H75: element(src, [src_index + 11]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= 0 New H87: element(src, [src_index + 12]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= 0 New H99: element(src, [src_index + 13]) >= 0 New H107: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) >= 0 New H111: element(src, [src_index + 14]) >= 0 New H119: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) >= 0 New C3: element(src, [src_index + 15]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H46: element(src, [src_index + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= 18446744073709551615 New H64: element(src, [src_index + 10]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= 18446744073709551615 New H76: element(src, [src_index + 11]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= 18446744073709551615 New H88: element(src, [src_index + 12]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= 18446744073709551615 New H100: element(src, [src_index + 13]) <= 18446744073709551615 New H108: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) <= 18446744073709551615 New H112: element(src, [src_index + 14]) <= 18446744073709551615 New H120: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) <= 18446744073709551615 New C4: element(src, [src_index + 15]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New H47: element(src, [src_index + 9]) >= 0 New H53: element(src, [src_index + 8]) >= 0 New H65: element(src, [src_index + 10]) >= 0 New H77: element(src, [src_index + 11]) >= 0 New H89: element(src, [src_index + 12]) >= 0 New H101: element(src, [src_index + 13]) >= 0 New H113: element(src, [src_index + 14]) >= 0 New C5: element(src, [src_index + 15]) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H48: element(src, [src_index + 9]) <= 18446744073709551615 New H54: element(src, [src_index + 8]) <= 18446744073709551615 New H66: element(src, [src_index + 10]) <= 18446744073709551615 New H78: element(src, [src_index + 11]) <= 18446744073709551615 New H90: element(src, [src_index + 12]) <= 18446744073709551615 New H102: element(src, [src_index + 13]) <= 18446744073709551615 New H114: element(src, [src_index + 14]) <= 18446744073709551615 New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) New C6: element(src, [src_index + 15]) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H52: src_index <= 2147483638 New H70: src_index <= 2147483637 New H82: src_index <= 2147483636 New H94: src_index <= 2147483635 New H106: src_index <= 2147483634 New H118: src_index <= 2147483633 New C10: src_index <= 2147483632 *** Proved C2: true >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H51: src_index >= - 2147483657 New H69: src_index >= - 2147483658 New H81: src_index >= - 2147483659 New H93: src_index >= - 2147483660 New H105: src_index >= - 2147483661 New H117: src_index >= - 2147483662 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H43: true New H61: true New H73: true New H85: true New H97: true New H109: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H121 added to this effect. *** Proved C3: element(src, [src_index + 15]) >= 0 using hypotheses H10, H41 & H42. *** Proved C4: element(src, [src_index + 15]) <= 18446744073709551615 using hypotheses H10, H41 & H42. *** Proved C5: element(src, [src_index + 15]) >= 0 using hypotheses H10, H41 & H42. *** Proved C6: element(src, [src_index + 15]) <= 18446744073709551615 using hypotheses H10, H41 & H42. --- Eliminated hypothesis H26 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H47 (duplicate of H45). --- Eliminated hypothesis H48 (duplicate of H46). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H89 (duplicate of H87). --- Eliminated hypothesis H90 (duplicate of H88). --- Eliminated hypothesis H101 (duplicate of H99). --- Eliminated hypothesis H102 (duplicate of H100). --- Eliminated hypothesis H113 (duplicate of H111). --- Eliminated hypothesis H114 (duplicate of H112). --- Eliminated hypothesis H121 (duplicate of H58). >>> Using "A->B, A |- B" on H8, given H121, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H121, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H24 (redundant, given H52). --- Eliminated hypothesis H28 (redundant, given H30). --- Eliminated hypothesis H29 (redundant, given H27). --- Eliminated hypothesis H30 (redundant, given H32). --- Eliminated hypothesis H31 (redundant, given H27). --- Eliminated hypothesis H32 (redundant, given H34). --- Eliminated hypothesis H33 (redundant, given H27). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H35 (redundant, given H27). --- Eliminated hypothesis H36 (redundant, given H38). --- Eliminated hypothesis H37 (redundant, given H27). --- Eliminated hypothesis H38 (redundant, given H40). --- Eliminated hypothesis H39 (redundant, given H27). --- Eliminated hypothesis H40 (redundant, given H42). --- Eliminated hypothesis H41 (redundant, given H27). --- Eliminated hypothesis H51 (redundant, given H25). --- Eliminated hypothesis H52 (redundant, given H70). --- Eliminated hypothesis H57 (redundant, given H2). --- Eliminated hypothesis H69 (redundant, given H25). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H25). --- Eliminated hypothesis H82 (redundant, given H94). --- Eliminated hypothesis H93 (redundant, given H25). --- Eliminated hypothesis H94 (redundant, given H106). --- Eliminated hypothesis H105 (redundant, given H25). --- Eliminated hypothesis H106 (redundant, given H118). --- Eliminated hypothesis H117 (redundant, given H25). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H7: src_offset + dst_index * 8 <= src_offset + dst__index__subtype__1__last * 8 New H27: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first New H42: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last New H45: element(src, [src_offset + dst_index * 8 + 9]) >= 0 New H46: element(src, [src_offset + dst_index * 8 + 9]) <= 18446744073709551615 New H53: element(src, [src_offset + dst_index * 8 + 8]) >= 0 New H54: element(src, [src_offset + dst_index * 8 + 8]) <= 18446744073709551615 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) >= 0 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) <= 18446744073709551615 New H63: element(src, [src_offset + dst_index * 8 + 10]) >= 0 New H64: element(src, [src_offset + dst_index * 8 + 10]) <= 18446744073709551615 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) >= 0 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) <= 18446744073709551615 New H75: element(src, [src_offset + dst_index * 8 + 11]) >= 0 New H76: element(src, [src_offset + dst_index * 8 + 11]) <= 18446744073709551615 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) >= 0 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) <= 18446744073709551615 New H87: element(src, [src_offset + dst_index * 8 + 12]) >= 0 New H88: element(src, [src_offset + dst_index * 8 + 12]) <= 18446744073709551615 New H95: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) >= 0 New H96: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) <= 18446744073709551615 New H99: element(src, [src_offset + dst_index * 8 + 13]) >= 0 New H100: element(src, [src_offset + dst_index * 8 + 13]) <= 18446744073709551615 New H107: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40) >= 0 New H108: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40) <= 18446744073709551615 New H111: element(src, [src_offset + dst_index * 8 + 14]) >= 0 New H112: element(src, [src_offset + dst_index * 8 + 14]) <= 18446744073709551615 New H118: src_offset + dst_index * 8 <= 2147483633 New H119: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48) >= 0 New H120: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48) <= 18446744073709551615 New H25: src_offset + dst_index * 8 >= - 8 New H9: src_offset + dst_index * 8 <= 2147483639 New C10: src_offset + dst_index * 8 <= 2147483632 +++ New H122: integer__size >= 0 +++ New H123: natural__size >= 0 +++ New H124: spark__unsigned__byte__size >= 0 +++ New H125: spark__unsigned__u64__size >= 0 +++ New H126: spark__unsigned__shift_count__size >= 0 +++ New H127: spark__crypto__word_count_t__size >= 0 +++ New H128: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H129: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H130: src__index__subtype__1__first >= 0 +++ New H131: src__index__subtype__1__last >= 0 +++ New H132: src__index__subtype__1__last <= 2147483647 +++ New H133: src__index__subtype__1__first <= 2147483647 +++ New H134: dst__index__subtype__1__first >= 0 +++ New H135: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H136: dst__index__subtype__1__first <= 268435455 *** Proved C10: src_offset + dst_index * 8 <= 2147483632 using hypotheses H42 & H132. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_19. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H17). --- Hypothesis H38 has been replaced by "true". (It is already present, as H18). --- Hypothesis H43 has been replaced by "true". (It is already present, as H15). --- Hypothesis H44 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H55 has been replaced by "true". (It is already present, as H19). --- Hypothesis H56 has been replaced by "true". (It is already present, as H20). --- Hypothesis H67 has been replaced by "true". (It is already present, as H21). --- Hypothesis H68 has been replaced by "true". (It is already present, as H22). --- Hypothesis H79 has been replaced by "true". (It is already present, as H23). --- Hypothesis H80 has been replaced by "true". (It is already present, as H24). --- Hypothesis H91 has been replaced by "true". (It is already present, as H25). --- Hypothesis H92 has been replaced by "true". (It is already present, as H26). --- Hypothesis H103 has been replaced by "true". (It is already present, as H27). --- Hypothesis H104 has been replaced by "true". (It is already present, as H28). --- Hypothesis H115 has been replaced by "true". (It is already present, as H29). --- Hypothesis H116 has been replaced by "true". (It is already present, as H6). -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H35: element(src, [src_offset + 1]) >= 0 New H41: element(src, [src_offset]) >= 0 New H53: element(src, [src_offset + 2]) >= 0 New H65: element(src, [src_offset + 3]) >= 0 New H77: element(src, [src_offset + 4]) >= 0 New H89: element(src, [src_offset + 5]) >= 0 New H101: element(src, [src_offset + 6]) >= 0 New H113: element(src, [src_offset + 7]) >= 0 New C1: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H36: element(src, [src_offset + 1]) <= 18446744073709551615 New H42: element(src, [src_offset]) <= 18446744073709551615 New H54: element(src, [src_offset + 2]) <= 18446744073709551615 New H66: element(src, [src_offset + 3]) <= 18446744073709551615 New H78: element(src, [src_offset + 4]) <= 18446744073709551615 New H90: element(src, [src_offset + 5]) <= 18446744073709551615 New H102: element(src, [src_offset + 6]) <= 18446744073709551615 New H114: element(src, [src_offset + 7]) <= 18446744073709551615 New C2: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H39: src_offset >= - 2147483649 New H57: src_offset >= - 2147483650 New H69: src_offset >= - 2147483651 New H81: src_offset >= - 2147483652 New H93: src_offset >= - 2147483653 New H105: src_offset >= - 2147483654 New H117: src_offset >= - 2147483655 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H40: src_offset <= 2147483646 New H58: src_offset <= 2147483645 New H70: src_offset <= 2147483644 New H82: src_offset <= 2147483643 New H94: src_offset <= 2147483642 New H106: src_offset <= 2147483641 New H118: src_offset <= 2147483640 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H33: element(src, [src_offset + 1]) >= 0 New H47: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 New H51: element(src, [src_offset + 2]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) >= 0 New H63: element(src, [src_offset + 3]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) >= 0 New H75: element(src, [src_offset + 4]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) >= 0 New H87: element(src, [src_offset + 5]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) >= 0 New H99: element(src, [src_offset + 6]) >= 0 New H107: spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48) >= 0 New H111: element(src, [src_offset + 7]) >= 0 New H119: spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H34: element(src, [src_offset + 1]) <= 18446744073709551615 New H48: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 New H52: element(src, [src_offset + 2]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) <= 18446744073709551615 New H64: element(src, [src_offset + 3]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) <= 18446744073709551615 New H76: element(src, [src_offset + 4]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) <= 18446744073709551615 New H88: element(src, [src_offset + 5]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) <= 18446744073709551615 New H100: element(src, [src_offset + 6]) <= 18446744073709551615 New H108: spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48) <= 18446744073709551615 New H112: element(src, [src_offset + 7]) <= 18446744073709551615 New H120: spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(24). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56)) mod 18446744073709551616 >= 0 New C2: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56)) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H31: true New H49: true New H61: true New H73: true New H85: true New H97: true New H109: true -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H32: true New H50: true New H62: true New H74: true New H86: true New H98: true New H110: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true *** Proved C1: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56)) mod 18446744073709551616 >= 0 *** Proved C2: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56)) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_20. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified H45 on reading formula in, to give: %%% H45: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified H46 on reading formula in, to give: %%% H46: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified H47 on reading formula in, to give: %%% H47: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified H48 on reading formula in, to give: %%% H48: element(src, [src_index + 9]) <= spark__unsigned__u64__last --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). %%% Simplified H51 on reading formula in, to give: %%% H51: src_index + 9 >= integer__base__first %%% Simplified H52 on reading formula in, to give: %%% H52: src_index + 9 <= integer__base__last --- Hypothesis H55 has been replaced by "true". (It is already present, as H27). --- Hypothesis H56 has been replaced by "true". (It is already present, as H28). %%% Simplified H59 on reading formula in, to give: %%% H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= interfaces__unsigned_64__first %%% Simplified H60 on reading formula in, to give: %%% H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= interfaces__unsigned_64__last %%% Simplified H63 on reading formula in, to give: %%% H63: element(src, [src_index + 10]) >= interfaces__unsigned_64__first %%% Simplified H64 on reading formula in, to give: %%% H64: element(src, [src_index + 10]) <= interfaces__unsigned_64__last %%% Simplified H65 on reading formula in, to give: %%% H65: element(src, [src_index + 10]) >= spark__unsigned__u64__first %%% Simplified H66 on reading formula in, to give: %%% H66: element(src, [src_index + 10]) <= spark__unsigned__u64__last --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). %%% Simplified H69 on reading formula in, to give: %%% H69: src_index + 10 >= integer__base__first %%% Simplified H70 on reading formula in, to give: %%% H70: src_index + 10 <= integer__base__last %%% Simplified H71 on reading formula in, to give: %%% H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= interfaces__unsigned_64__first %%% Simplified H72 on reading formula in, to give: %%% H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= interfaces__unsigned_64__last %%% Simplified H75 on reading formula in, to give: %%% H75: element(src, [src_index + 11]) >= interfaces__unsigned_64__first %%% Simplified H76 on reading formula in, to give: %%% H76: element(src, [src_index + 11]) <= interfaces__unsigned_64__last %%% Simplified H77 on reading formula in, to give: %%% H77: element(src, [src_index + 11]) >= spark__unsigned__u64__first %%% Simplified H78 on reading formula in, to give: %%% H78: element(src, [src_index + 11]) <= spark__unsigned__u64__last --- Hypothesis H79 has been replaced by "true". (It is already present, as H33). --- Hypothesis H80 has been replaced by "true". (It is already present, as H34). %%% Simplified H81 on reading formula in, to give: %%% H81: src_index + 11 >= integer__base__first %%% Simplified H82 on reading formula in, to give: %%% H82: src_index + 11 <= integer__base__last %%% Simplified H83 on reading formula in, to give: %%% H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= interfaces__unsigned_64__first %%% Simplified H84 on reading formula in, to give: %%% H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= interfaces__unsigned_64__last %%% Simplified H87 on reading formula in, to give: %%% H87: element(src, [src_index + 12]) >= interfaces__unsigned_64__first %%% Simplified H88 on reading formula in, to give: %%% H88: element(src, [src_index + 12]) <= interfaces__unsigned_64__last %%% Simplified H89 on reading formula in, to give: %%% H89: element(src, [src_index + 12]) >= spark__unsigned__u64__first %%% Simplified H90 on reading formula in, to give: %%% H90: element(src, [src_index + 12]) <= spark__unsigned__u64__last --- Hypothesis H91 has been replaced by "true". (It is already present, as H35). --- Hypothesis H92 has been replaced by "true". (It is already present, as H36). %%% Simplified H93 on reading formula in, to give: %%% H93: src_index + 12 >= integer__base__first %%% Simplified H94 on reading formula in, to give: %%% H94: src_index + 12 <= integer__base__last %%% Simplified H95 on reading formula in, to give: %%% H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= interfaces__unsigned_64__first %%% Simplified H96 on reading formula in, to give: %%% H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= interfaces__unsigned_64__last %%% Simplified H99 on reading formula in, to give: %%% H99: element(src, [src_index + 13]) >= interfaces__unsigned_64__first %%% Simplified H100 on reading formula in, to give: %%% H100: element(src, [src_index + 13]) <= interfaces__unsigned_64__last %%% Simplified H101 on reading formula in, to give: %%% H101: element(src, [src_index + 13]) >= spark__unsigned__u64__first %%% Simplified H102 on reading formula in, to give: %%% H102: element(src, [src_index + 13]) <= spark__unsigned__u64__last --- Hypothesis H103 has been replaced by "true". (It is already present, as H37). --- Hypothesis H104 has been replaced by "true". (It is already present, as H38). %%% Simplified H105 on reading formula in, to give: %%% H105: src_index + 13 >= integer__base__first %%% Simplified H106 on reading formula in, to give: %%% H106: src_index + 13 <= integer__base__last %%% Simplified H107 on reading formula in, to give: %%% H107: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) >= interfaces__unsigned_64__first %%% Simplified H108 on reading formula in, to give: %%% H108: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) <= interfaces__unsigned_64__last %%% Simplified H111 on reading formula in, to give: %%% H111: element(src, [src_index + 14]) >= interfaces__unsigned_64__first %%% Simplified H112 on reading formula in, to give: %%% H112: element(src, [src_index + 14]) <= interfaces__unsigned_64__last %%% Simplified H113 on reading formula in, to give: %%% H113: element(src, [src_index + 14]) >= spark__unsigned__u64__first %%% Simplified H114 on reading formula in, to give: %%% H114: element(src, [src_index + 14]) <= spark__unsigned__u64__last --- Hypothesis H115 has been replaced by "true". (It is already present, as H39). --- Hypothesis H116 has been replaced by "true". (It is already present, as H40). %%% Simplified H117 on reading formula in, to give: %%% H117: src_index + 14 >= integer__base__first %%% Simplified H118 on reading formula in, to give: %%% H118: src_index + 14 <= integer__base__last %%% Simplified H119 on reading formula in, to give: %%% H119: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) >= interfaces__unsigned_64__first %%% Simplified H120 on reading formula in, to give: %%% H120: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) <= interfaces__unsigned_64__last %%% Simplified H123 on reading formula in, to give: %%% H123: element(src, [src_index + 15]) >= interfaces__unsigned_64__first %%% Simplified H124 on reading formula in, to give: %%% H124: element(src, [src_index + 15]) <= interfaces__unsigned_64__last %%% Simplified H125 on reading formula in, to give: %%% H125: element(src, [src_index + 15]) >= spark__unsigned__u64__first %%% Simplified H126 on reading formula in, to give: %%% H126: element(src, [src_index + 15]) <= spark__unsigned__u64__last --- Hypothesis H127 has been replaced by "true". (It is already present, as H41). --- Hypothesis H128 has been replaced by "true". (It is already present, as H42). %%% Simplified H129 on reading formula in, to give: %%% H129: src_index + 15 >= integer__base__first %%% Simplified H130 on reading formula in, to give: %%% H130: src_index + 15 <= integer__base__last %%% Simplified H131 on reading formula in, to give: %%% H131: spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56) >= interfaces__unsigned_64__first %%% Simplified H132 on reading formula in, to give: %%% H132: spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56) <= interfaces__unsigned_64__last %%% Simplified C1 on reading formula in, to give: %%% C1: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first %%% Simplified C2 on reading formula in, to give: %%% C2: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New H47: element(src, [src_index + 9]) >= 0 New H53: element(src, [src_index + 8]) >= 0 New H65: element(src, [src_index + 10]) >= 0 New H77: element(src, [src_index + 11]) >= 0 New H89: element(src, [src_index + 12]) >= 0 New H101: element(src, [src_index + 13]) >= 0 New H113: element(src, [src_index + 14]) >= 0 New H125: element(src, [src_index + 15]) >= 0 New C1: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H48: element(src, [src_index + 9]) <= 18446744073709551615 New H54: element(src, [src_index + 8]) <= 18446744073709551615 New H66: element(src, [src_index + 10]) <= 18446744073709551615 New H78: element(src, [src_index + 11]) <= 18446744073709551615 New H90: element(src, [src_index + 12]) <= 18446744073709551615 New H102: element(src, [src_index + 13]) <= 18446744073709551615 New H114: element(src, [src_index + 14]) <= 18446744073709551615 New H126: element(src, [src_index + 15]) <= 18446744073709551615 New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) New C2: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus <= 18446744073709551615 >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H51: src_index >= - 2147483657 New H69: src_index >= - 2147483658 New H81: src_index >= - 2147483659 New H93: src_index >= - 2147483660 New H105: src_index >= - 2147483661 New H117: src_index >= - 2147483662 New H129: src_index >= - 2147483663 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H52: src_index <= 2147483638 New H70: src_index <= 2147483637 New H82: src_index <= 2147483636 New H94: src_index <= 2147483635 New H106: src_index <= 2147483634 New H118: src_index <= 2147483633 New H130: src_index <= 2147483632 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H45: element(src, [src_index + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= 0 New H63: element(src, [src_index + 10]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= 0 New H75: element(src, [src_index + 11]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= 0 New H87: element(src, [src_index + 12]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= 0 New H99: element(src, [src_index + 13]) >= 0 New H107: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) >= 0 New H111: element(src, [src_index + 14]) >= 0 New H119: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) >= 0 New H123: element(src, [src_index + 15]) >= 0 New H131: spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H46: element(src, [src_index + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= 18446744073709551615 New H64: element(src, [src_index + 10]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= 18446744073709551615 New H76: element(src, [src_index + 11]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= 18446744073709551615 New H88: element(src, [src_index + 12]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= 18446744073709551615 New H100: element(src, [src_index + 13]) <= 18446744073709551615 New H108: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) <= 18446744073709551615 New H112: element(src, [src_index + 14]) <= 18446744073709551615 New H120: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) <= 18446744073709551615 New H124: element(src, [src_index + 15]) <= 18446744073709551615 New H132: spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(24). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [ src_index + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56)) mod 18446744073709551616 >= 0 New C2: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [ src_index + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56)) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H43: true New H61: true New H73: true New H85: true New H97: true New H109: true New H121: true -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H44: true New H62: true New H74: true New H86: true New H98: true New H110: true New H122: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H133 added to this effect. *** Proved C1: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [ src_index + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56)) mod 18446744073709551616 >= 0 *** Proved C2: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [ src_index + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56)) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_21. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H13 has been replaced by "true". (It is already present, as H9) . --- Hypothesis H14 has been replaced by "true". (It is already present, as H10). --- Hypothesis H16 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H30 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H37 has been replaced by "true". (It is already present, as H17). --- Hypothesis H38 has been replaced by "true". (It is already present, as H18). --- Hypothesis H43 has been replaced by "true". (It is already present, as H15). --- Hypothesis H44 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H55 has been replaced by "true". (It is already present, as H19). --- Hypothesis H56 has been replaced by "true". (It is already present, as H20). --- Hypothesis H67 has been replaced by "true". (It is already present, as H21). --- Hypothesis H68 has been replaced by "true". (It is already present, as H22). --- Hypothesis H79 has been replaced by "true". (It is already present, as H23). --- Hypothesis H80 has been replaced by "true". (It is already present, as H24). --- Hypothesis H91 has been replaced by "true". (It is already present, as H25). --- Hypothesis H92 has been replaced by "true". (It is already present, as H26). --- Hypothesis H103 has been replaced by "true". (It is already present, as H27). --- Hypothesis H104 has been replaced by "true". (It is already present, as H28). --- Hypothesis H115 has been replaced by "true". (It is already present, as H29). --- Hypothesis H116 has been replaced by "true". (It is already present, as H6). %%% Simplified C1 on reading formula in, to give: %%% C1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= 0 -> spark__unsigned__u64__first <= element(update(dst, [0], (((((( (element(src, [src_offset]) + spark__unsigned__shift_left_64(element( src, [src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus), [i_]) and element(update(dst, [0], (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64( element(src, [src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus), [i_]) <= spark__unsigned__u64__last) %%% Simplified C5 on reading formula in, to give: %%% C5: true %%% Simplified C6 on reading formula in, to give: %%% C6: true %%% Simplified C8 on reading formula in, to give: %%% C8: 0 <> dst__index__subtype__1__last -> 1 <= natural__last %%% Simplified C10 on reading formula in, to give: %%% C10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) *** Proved C1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= 0 -> spark__unsigned__u64__first <= element(update(dst, [0], ((( ((((element(src, [src_offset]) + spark__unsigned__shift_left_64( element(src, [src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus), [i_]) and element(update(dst, [0], (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64( element(src, [src_offset + 1]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 2]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 3]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 4]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 5]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 6]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_offset + 7]), 56)) mod interfaces__unsigned_64__modulus), [i_]) <= spark__unsigned__u64__last) using hypotheses H2, H121 & H122. *** Proved C2: 0 >= dst__index__subtype__1__first using hypothesis H45. *** Proved C3: 0 <= dst__index__subtype__1__last using hypothesis H46. *** Proved C5: true *** Proved C6: true *** Proved C10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) using hypothesis H8. *** Proved C11: src_offset >= natural__first using hypothesis H9. *** Proved C12: src_offset <= natural__last using hypothesis H10. *** Proved C13: src__index__subtype__1__first = 0 using hypothesis H1. *** Proved C14: dst__index__subtype__1__first = 0 using hypothesis H2. *** Proved C15: src_offset <= src__index__subtype__1__last using hypothesis H3. *** Proved C16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first using hypothesis H4. *** Proved C17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last using hypothesis H5. *** Proved C18: src_offset + 7 <= src__index__subtype__1__last using hypothesis H6. *** Proved C19: src_offset + dst__index__subtype__1__last * 8 <= natural__last using hypothesis H7. -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H12: true New C4: dst__index__subtype__1__last <= 268435455 -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H39: src_offset >= - 2147483649 New H57: src_offset >= - 2147483650 New H69: src_offset >= - 2147483651 New H81: src_offset >= - 2147483652 New H93: src_offset >= - 2147483653 New H105: src_offset >= - 2147483654 New H117: src_offset >= - 2147483655 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H40: src_offset <= 2147483646 New H58: src_offset <= 2147483645 New H70: src_offset <= 2147483644 New H82: src_offset <= 2147483643 New H94: src_offset <= 2147483642 New H106: src_offset <= 2147483641 New H118: src_offset <= 2147483640 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H10: src_offset <= 2147483647 New C8: true New C9: 0 <> dst__index__subtype__1__last -> src_offset <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H33: element(src, [src_offset + 1]) >= 0 New H47: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) >= 0 New H51: element(src, [src_offset + 2]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) >= 0 New H63: element(src, [src_offset + 3]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) >= 0 New H75: element(src, [src_offset + 4]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) >= 0 New H87: element(src, [src_offset + 5]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) >= 0 New H99: element(src, [src_offset + 6]) >= 0 New H107: spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48) >= 0 New H111: element(src, [src_offset + 7]) >= 0 New H119: spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H34: element(src, [src_offset + 1]) <= 18446744073709551615 New H48: spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8) <= 18446744073709551615 New H52: element(src, [src_offset + 2]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + 2]), 16) <= 18446744073709551615 New H64: element(src, [src_offset + 3]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24) <= 18446744073709551615 New H76: element(src, [src_offset + 4]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + 4]), 32) <= 18446744073709551615 New H88: element(src, [src_offset + 5]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40) <= 18446744073709551615 New H100: element(src, [src_offset + 6]) <= 18446744073709551615 New H108: spark__unsigned__shift_left_64(element(src, [src_offset + 6]), 48) <= 18446744073709551615 New H112: element(src, [src_offset + 7]) <= 18446744073709551615 New H120: spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(24). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H121: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56)) mod 18446744073709551616 >= spark__unsigned__u64__first New H122: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56)) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H35: element(src, [src_offset + 1]) >= 0 New H41: element(src, [src_offset]) >= 0 New H53: element(src, [src_offset + 2]) >= 0 New H65: element(src, [src_offset + 3]) >= 0 New H77: element(src, [src_offset + 4]) >= 0 New H89: element(src, [src_offset + 5]) >= 0 New H101: element(src, [src_offset + 6]) >= 0 New H113: element(src, [src_offset + 7]) >= 0 New H121: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56)) mod 18446744073709551616 >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H36: element(src, [src_offset + 1]) <= 18446744073709551615 New H42: element(src, [src_offset]) <= 18446744073709551615 New H54: element(src, [src_offset + 2]) <= 18446744073709551615 New H66: element(src, [src_offset + 3]) <= 18446744073709551615 New H78: element(src, [src_offset + 4]) <= 18446744073709551615 New H90: element(src, [src_offset + 5]) <= 18446744073709551615 New H102: element(src, [src_offset + 6]) <= 18446744073709551615 New H114: element(src, [src_offset + 7]) <= 18446744073709551615 New H122: (((((((element(src, [src_offset]) + spark__unsigned__shift_left_64(element(src, [src_offset + 1]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 2]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 3]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 4]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 5]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_offset + 6]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + 7]), 56)) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H31: true New H49: true New H61: true New H73: true New H85: true New H97: true New H109: true -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H32: true New H50: true New H62: true New H74: true New H86: true New H98: true New H110: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H11: true *** Proved C7: src_offset <= src_offset + dst__index__subtype__1__last * 8 via its standard form, which is: Std.Fm C7: 8 * dst__index__subtype__1__last > - 1 using hypothesis H46. *** Proved C8: true --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H35 (duplicate of H33). --- Eliminated hypothesis H53 (duplicate of H51). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H89 (duplicate of H87). --- Eliminated hypothesis H101 (duplicate of H99). --- Eliminated hypothesis H113 (duplicate of H111). --- Eliminated hypothesis H36 (duplicate of H34). --- Eliminated hypothesis H54 (duplicate of H52). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H90 (duplicate of H88). --- Eliminated hypothesis H102 (duplicate of H100). --- Eliminated hypothesis H114 (duplicate of H112). --- Eliminated hypothesis H3 (redundant, given H6). --- Eliminated hypothesis H10 (redundant, given H40). --- Eliminated hypothesis H17 (redundant, given H15). --- Eliminated hypothesis H18 (redundant, given H6). --- Eliminated hypothesis H19 (redundant, given H15). --- Eliminated hypothesis H20 (redundant, given H6). --- Eliminated hypothesis H21 (redundant, given H15). --- Eliminated hypothesis H22 (redundant, given H6). --- Eliminated hypothesis H23 (redundant, given H15). --- Eliminated hypothesis H24 (redundant, given H6). --- Eliminated hypothesis H25 (redundant, given H15). --- Eliminated hypothesis H26 (redundant, given H6). --- Eliminated hypothesis H27 (redundant, given H15). --- Eliminated hypothesis H28 (redundant, given H6). --- Eliminated hypothesis H29 (redundant, given H15). --- Eliminated hypothesis H39 (redundant, given H9). --- Eliminated hypothesis H40 (redundant, given H58). --- Eliminated hypothesis H45 (redundant, given H2). --- Eliminated hypothesis H57 (redundant, given H9). --- Eliminated hypothesis H58 (redundant, given H70). --- Eliminated hypothesis H69 (redundant, given H9). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H9). --- Eliminated hypothesis H82 (redundant, given H94). --- Eliminated hypothesis H93 (redundant, given H9). --- Eliminated hypothesis H94 (redundant, given H106). --- Eliminated hypothesis H105 (redundant, given H9). --- Eliminated hypothesis H106 (redundant, given H118). --- Eliminated hypothesis H117 (redundant, given H9). *** Proved C4: dst__index__subtype__1__last <= 268435455 +++ New H123: integer__size >= 0 +++ New H124: natural__size >= 0 +++ New H125: spark__unsigned__byte__size >= 0 +++ New H126: spark__unsigned__u64__size >= 0 +++ New H127: spark__unsigned__shift_count__size >= 0 +++ New H128: spark__crypto__word_count_t__size >= 0 +++ New H129: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H130: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H131: src__index__subtype__1__first >= 0 +++ New H132: src__index__subtype__1__last >= 0 +++ New H133: src__index__subtype__1__last <= 2147483647 +++ New H134: src__index__subtype__1__first <= 2147483647 +++ New H135: dst__index__subtype__1__first >= 0 +++ New H136: dst__index__subtype__1__last >= 0 +++ New H137: dst__index__subtype__1__last <= 268435455 +++ New H138: dst__index__subtype__1__first <= 268435455 @@@@@@@@@@ VC: procedure_get_64_lsb_first_22. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). %%% Simplified H29 on reading formula in, to give: %%% H29: src_index + 9 >= src__index__subtype__1__first %%% Simplified H30 on reading formula in, to give: %%% H30: src_index + 9 <= src__index__subtype__1__last %%% Simplified H31 on reading formula in, to give: %%% H31: src_index + 10 >= src__index__subtype__1__first %%% Simplified H32 on reading formula in, to give: %%% H32: src_index + 10 <= src__index__subtype__1__last %%% Simplified H33 on reading formula in, to give: %%% H33: src_index + 11 >= src__index__subtype__1__first %%% Simplified H34 on reading formula in, to give: %%% H34: src_index + 11 <= src__index__subtype__1__last %%% Simplified H35 on reading formula in, to give: %%% H35: src_index + 12 >= src__index__subtype__1__first %%% Simplified H36 on reading formula in, to give: %%% H36: src_index + 12 <= src__index__subtype__1__last %%% Simplified H37 on reading formula in, to give: %%% H37: src_index + 13 >= src__index__subtype__1__first %%% Simplified H38 on reading formula in, to give: %%% H38: src_index + 13 <= src__index__subtype__1__last %%% Simplified H39 on reading formula in, to give: %%% H39: src_index + 14 >= src__index__subtype__1__first %%% Simplified H40 on reading formula in, to give: %%% H40: src_index + 14 <= src__index__subtype__1__last %%% Simplified H41 on reading formula in, to give: %%% H41: src_index + 15 >= src__index__subtype__1__first %%% Simplified H42 on reading formula in, to give: %%% H42: src_index + 15 <= src__index__subtype__1__last %%% Simplified H45 on reading formula in, to give: %%% H45: element(src, [src_index + 9]) >= interfaces__unsigned_64__first %%% Simplified H46 on reading formula in, to give: %%% H46: element(src, [src_index + 9]) <= interfaces__unsigned_64__last %%% Simplified H47 on reading formula in, to give: %%% H47: element(src, [src_index + 9]) >= spark__unsigned__u64__first %%% Simplified H48 on reading formula in, to give: %%% H48: element(src, [src_index + 9]) <= spark__unsigned__u64__last --- Hypothesis H49 has been replaced by "true". (It is already present, as H29). --- Hypothesis H50 has been replaced by "true". (It is already present, as H30). %%% Simplified H51 on reading formula in, to give: %%% H51: src_index + 9 >= integer__base__first %%% Simplified H52 on reading formula in, to give: %%% H52: src_index + 9 <= integer__base__last --- Hypothesis H55 has been replaced by "true". (It is already present, as H27). --- Hypothesis H56 has been replaced by "true". (It is already present, as H28). %%% Simplified H59 on reading formula in, to give: %%% H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= interfaces__unsigned_64__first %%% Simplified H60 on reading formula in, to give: %%% H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= interfaces__unsigned_64__last %%% Simplified H63 on reading formula in, to give: %%% H63: element(src, [src_index + 10]) >= interfaces__unsigned_64__first %%% Simplified H64 on reading formula in, to give: %%% H64: element(src, [src_index + 10]) <= interfaces__unsigned_64__last %%% Simplified H65 on reading formula in, to give: %%% H65: element(src, [src_index + 10]) >= spark__unsigned__u64__first %%% Simplified H66 on reading formula in, to give: %%% H66: element(src, [src_index + 10]) <= spark__unsigned__u64__last --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). %%% Simplified H69 on reading formula in, to give: %%% H69: src_index + 10 >= integer__base__first %%% Simplified H70 on reading formula in, to give: %%% H70: src_index + 10 <= integer__base__last %%% Simplified H71 on reading formula in, to give: %%% H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= interfaces__unsigned_64__first %%% Simplified H72 on reading formula in, to give: %%% H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= interfaces__unsigned_64__last %%% Simplified H75 on reading formula in, to give: %%% H75: element(src, [src_index + 11]) >= interfaces__unsigned_64__first %%% Simplified H76 on reading formula in, to give: %%% H76: element(src, [src_index + 11]) <= interfaces__unsigned_64__last %%% Simplified H77 on reading formula in, to give: %%% H77: element(src, [src_index + 11]) >= spark__unsigned__u64__first %%% Simplified H78 on reading formula in, to give: %%% H78: element(src, [src_index + 11]) <= spark__unsigned__u64__last --- Hypothesis H79 has been replaced by "true". (It is already present, as H33). --- Hypothesis H80 has been replaced by "true". (It is already present, as H34). %%% Simplified H81 on reading formula in, to give: %%% H81: src_index + 11 >= integer__base__first %%% Simplified H82 on reading formula in, to give: %%% H82: src_index + 11 <= integer__base__last %%% Simplified H83 on reading formula in, to give: %%% H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= interfaces__unsigned_64__first %%% Simplified H84 on reading formula in, to give: %%% H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= interfaces__unsigned_64__last %%% Simplified H87 on reading formula in, to give: %%% H87: element(src, [src_index + 12]) >= interfaces__unsigned_64__first %%% Simplified H88 on reading formula in, to give: %%% H88: element(src, [src_index + 12]) <= interfaces__unsigned_64__last %%% Simplified H89 on reading formula in, to give: %%% H89: element(src, [src_index + 12]) >= spark__unsigned__u64__first %%% Simplified H90 on reading formula in, to give: %%% H90: element(src, [src_index + 12]) <= spark__unsigned__u64__last --- Hypothesis H91 has been replaced by "true". (It is already present, as H35). --- Hypothesis H92 has been replaced by "true". (It is already present, as H36). %%% Simplified H93 on reading formula in, to give: %%% H93: src_index + 12 >= integer__base__first %%% Simplified H94 on reading formula in, to give: %%% H94: src_index + 12 <= integer__base__last %%% Simplified H95 on reading formula in, to give: %%% H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= interfaces__unsigned_64__first %%% Simplified H96 on reading formula in, to give: %%% H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= interfaces__unsigned_64__last %%% Simplified H99 on reading formula in, to give: %%% H99: element(src, [src_index + 13]) >= interfaces__unsigned_64__first %%% Simplified H100 on reading formula in, to give: %%% H100: element(src, [src_index + 13]) <= interfaces__unsigned_64__last %%% Simplified H101 on reading formula in, to give: %%% H101: element(src, [src_index + 13]) >= spark__unsigned__u64__first %%% Simplified H102 on reading formula in, to give: %%% H102: element(src, [src_index + 13]) <= spark__unsigned__u64__last --- Hypothesis H103 has been replaced by "true". (It is already present, as H37). --- Hypothesis H104 has been replaced by "true". (It is already present, as H38). %%% Simplified H105 on reading formula in, to give: %%% H105: src_index + 13 >= integer__base__first %%% Simplified H106 on reading formula in, to give: %%% H106: src_index + 13 <= integer__base__last %%% Simplified H107 on reading formula in, to give: %%% H107: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) >= interfaces__unsigned_64__first %%% Simplified H108 on reading formula in, to give: %%% H108: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) <= interfaces__unsigned_64__last %%% Simplified H111 on reading formula in, to give: %%% H111: element(src, [src_index + 14]) >= interfaces__unsigned_64__first %%% Simplified H112 on reading formula in, to give: %%% H112: element(src, [src_index + 14]) <= interfaces__unsigned_64__last %%% Simplified H113 on reading formula in, to give: %%% H113: element(src, [src_index + 14]) >= spark__unsigned__u64__first %%% Simplified H114 on reading formula in, to give: %%% H114: element(src, [src_index + 14]) <= spark__unsigned__u64__last --- Hypothesis H115 has been replaced by "true". (It is already present, as H39). --- Hypothesis H116 has been replaced by "true". (It is already present, as H40). %%% Simplified H117 on reading formula in, to give: %%% H117: src_index + 14 >= integer__base__first %%% Simplified H118 on reading formula in, to give: %%% H118: src_index + 14 <= integer__base__last %%% Simplified H119 on reading formula in, to give: %%% H119: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) >= interfaces__unsigned_64__first %%% Simplified H120 on reading formula in, to give: %%% H120: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) <= interfaces__unsigned_64__last %%% Simplified H123 on reading formula in, to give: %%% H123: element(src, [src_index + 15]) >= interfaces__unsigned_64__first %%% Simplified H124 on reading formula in, to give: %%% H124: element(src, [src_index + 15]) <= interfaces__unsigned_64__last %%% Simplified H125 on reading formula in, to give: %%% H125: element(src, [src_index + 15]) >= spark__unsigned__u64__first %%% Simplified H126 on reading formula in, to give: %%% H126: element(src, [src_index + 15]) <= spark__unsigned__u64__last --- Hypothesis H127 has been replaced by "true". (It is already present, as H41). --- Hypothesis H128 has been replaced by "true". (It is already present, as H42). %%% Simplified H129 on reading formula in, to give: %%% H129: src_index + 15 >= integer__base__first %%% Simplified H130 on reading formula in, to give: %%% H130: src_index + 15 <= integer__base__last %%% Simplified H131 on reading formula in, to give: %%% H131: spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56) >= interfaces__unsigned_64__first %%% Simplified H132 on reading formula in, to give: %%% H132: spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56) <= interfaces__unsigned_64__last %%% Simplified H133 on reading formula in, to give: %%% H133: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first %%% Simplified H134 on reading formula in, to give: %%% H134: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last %%% Simplified C1 on reading formula in, to give: %%% C1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index + 1 -> spark__unsigned__u64__first <= element(update( dst, [dst_index + 1], (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus), [i_]) and element(update(dst, [ dst_index + 1], (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus), [i_]) <= spark__unsigned__u64__last) %%% Simplified C8 on reading formula in, to give: %%% C8: dst_index + 1 <> dst__index__subtype__1__last -> dst_index + 2 <= natural__last %%% Simplified C9 on reading formula in, to give: %%% C9: dst_index + 1 <> dst__index__subtype__1__last -> src_index + 16 <= natural__last %%% Simplified C10 on reading formula in, to give: %%% C10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) *** Proved C1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index + 1 -> spark__unsigned__u64__first <= element(update( dst, [dst_index + 1], (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus), [i_]) and element(update(dst, [ dst_index + 1], (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 10]), 16)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 11]), 24)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 12]), 32)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 13]), 40)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 14]), 48)) mod interfaces__unsigned_64__modulus + spark__unsigned__shift_left_64( element(src, [src_index + 15]), 56)) mod interfaces__unsigned_64__modulus), [i_]) <= spark__unsigned__u64__last) using hypotheses H1, H133 & H134. *** Proved C2: dst_index + 1 >= dst__index__subtype__1__first using hypothesis H57. *** Proved C3: dst_index + 1 <= dst__index__subtype__1__last using hypothesis H58. *** Proved C4: dst__index__subtype__1__last <= spark__crypto__word_count_t__last using hypothesis H4. *** Proved C6: src_index + 8 >= src_offset using hypothesis H6. *** Proved C10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) using hypothesis H10. *** Proved C11: src_offset >= natural__first using hypothesis H11. *** Proved C12: src_offset <= natural__last using hypothesis H12. *** Proved C13: src__index__subtype__1__first = 0 using hypothesis H13. *** Proved C14: dst__index__subtype__1__first = 0 using hypothesis H14. *** Proved C15: src_offset <= src__index__subtype__1__last using hypothesis H15. *** Proved C16: src_offset + dst__index__subtype__1__last * 8 + 7 >= src__index__subtype__1__first using hypothesis H16. *** Proved C17: src_offset + dst__index__subtype__1__last * 8 + 7 <= src__index__subtype__1__last using hypothesis H17. *** Proved C18: src_offset + 7 <= src__index__subtype__1__last using hypothesis H18. *** Proved C19: src_offset + dst__index__subtype__1__last * 8 <= natural__last using hypothesis H19. >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index + 1 <= natural__last eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index + 1 <= natural__last --- Attempted addition of new hypothesis: src_index + 8 <= natural__last eliminated: this already exists (as H24). +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index + 8 <= natural__last -S- Applied substitution rule get_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H51: src_index >= - 2147483657 New H69: src_index >= - 2147483658 New H81: src_index >= - 2147483659 New H93: src_index >= - 2147483660 New H105: src_index >= - 2147483661 New H117: src_index >= - 2147483662 New H129: src_index >= - 2147483663 -S- Applied substitution rule get_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H52: src_index <= 2147483638 New H70: src_index <= 2147483637 New H82: src_index <= 2147483636 New H94: src_index <= 2147483635 New H106: src_index <= 2147483634 New H118: src_index <= 2147483633 New H130: src_index <= 2147483632 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 New H25: src_index >= - 8 -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New H24: src_index <= 2147483639 New C8: dst_index + 1 <> dst__index__subtype__1__last -> dst_index <= 2147483645 New C9: dst_index + 1 <> dst__index__subtype__1__last -> src_index <= 2147483631 -S- Applied substitution rule get_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H45: element(src, [src_index + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) >= 0 New H63: element(src, [src_index + 10]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) >= 0 New H75: element(src, [src_index + 11]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) >= 0 New H87: element(src, [src_index + 12]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) >= 0 New H99: element(src, [src_index + 13]) >= 0 New H107: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) >= 0 New H111: element(src, [src_index + 14]) >= 0 New H119: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) >= 0 New H123: element(src, [src_index + 15]) >= 0 New H131: spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56) >= 0 -S- Applied substitution rule get_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H46: element(src, [src_index + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8) <= 18446744073709551615 New H64: element(src, [src_index + 10]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_index + 10]), 16) <= 18446744073709551615 New H76: element(src, [src_index + 11]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24) <= 18446744073709551615 New H88: element(src, [src_index + 12]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_index + 12]), 32) <= 18446744073709551615 New H100: element(src, [src_index + 13]) <= 18446744073709551615 New H108: spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40) <= 18446744073709551615 New H112: element(src, [src_index + 14]) <= 18446744073709551615 New H120: spark__unsigned__shift_left_64(element(src, [src_index + 14]), 48) <= 18446744073709551615 New H124: element(src, [src_index + 15]) <= 18446744073709551615 New H132: spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56) <= 18446744073709551615 -S- Applied substitution rule get_64_lsb_f_rules(24). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H133: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [ src_index + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56)) mod 18446744073709551616 >= spark__unsigned__u64__first New H134: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [ src_index + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56)) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) New H47: element(src, [src_index + 9]) >= 0 New H53: element(src, [src_index + 8]) >= 0 New H65: element(src, [src_index + 10]) >= 0 New H77: element(src, [src_index + 11]) >= 0 New H89: element(src, [src_index + 12]) >= 0 New H101: element(src, [src_index + 13]) >= 0 New H113: element(src, [src_index + 14]) >= 0 New H125: element(src, [src_index + 15]) >= 0 New H133: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [ src_index + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56)) mod 18446744073709551616 >= 0 -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H48: element(src, [src_index + 9]) <= 18446744073709551615 New H54: element(src, [src_index + 8]) <= 18446744073709551615 New H66: element(src, [src_index + 10]) <= 18446744073709551615 New H78: element(src, [src_index + 11]) <= 18446744073709551615 New H90: element(src, [src_index + 12]) <= 18446744073709551615 New H102: element(src, [src_index + 13]) <= 18446744073709551615 New H114: element(src, [src_index + 14]) <= 18446744073709551615 New H126: element(src, [src_index + 15]) <= 18446744073709551615 New H134: (((((((element(src, [src_index + 8]) + spark__unsigned__shift_left_64(element(src, [src_index + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [ src_index + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element( src, [src_index + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_index + 15]), 56)) mod 18446744073709551616 <= 18446744073709551615 New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) -S- Applied substitution rule get_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H43: true New H61: true New H73: true New H85: true New H97: true New H109: true New H121: true -S- Applied substitution rule get_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H44: true New H62: true New H74: true New H86: true New H98: true New H110: true New H122: true -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H135 added to this effect. *** Proved C5: src_index + 8 = src_offset + (dst_index + 1) * 8 via its standard form, which is: Std.Fm C5: 8 * dst_index - src_index + src_offset = 0 using hypothesis H5. *** Proved C8: dst_index + 1 <> dst__index__subtype__1__last -> dst_index <= 2147483645 using hypothesis H23. --- Eliminated hypothesis H26 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H115 (true-hypothesis). --- Eliminated hypothesis H116 (true-hypothesis). --- Eliminated hypothesis H127 (true-hypothesis). --- Eliminated hypothesis H128 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H73 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H74 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H47 (duplicate of H45). --- Eliminated hypothesis H65 (duplicate of H63). --- Eliminated hypothesis H77 (duplicate of H75). --- Eliminated hypothesis H89 (duplicate of H87). --- Eliminated hypothesis H101 (duplicate of H99). --- Eliminated hypothesis H113 (duplicate of H111). --- Eliminated hypothesis H125 (duplicate of H123). --- Eliminated hypothesis H48 (duplicate of H46). --- Eliminated hypothesis H66 (duplicate of H64). --- Eliminated hypothesis H78 (duplicate of H76). --- Eliminated hypothesis H90 (duplicate of H88). --- Eliminated hypothesis H102 (duplicate of H100). --- Eliminated hypothesis H114 (duplicate of H112). --- Eliminated hypothesis H126 (duplicate of H124). --- Eliminated hypothesis H135 (duplicate of H58). >>> Using "A->B, A |- B" on H8, given H135, we simplify the former to: >>> H8: dst_index <= 2147483646 >>> Using "A->B, A |- B" on H9, given H135, we simplify the former to: >>> H9: src_index <= 2147483639 --- Eliminated hypothesis H15 (redundant, given H18). --- Eliminated hypothesis H21 (redundant, given H23). --- Eliminated hypothesis H22 (redundant, given H2 & H14). --- Eliminated hypothesis H24 (redundant, given H52). --- Eliminated hypothesis H28 (redundant, given H30). --- Eliminated hypothesis H29 (redundant, given H27). --- Eliminated hypothesis H30 (redundant, given H32). --- Eliminated hypothesis H31 (redundant, given H27). --- Eliminated hypothesis H32 (redundant, given H34). --- Eliminated hypothesis H33 (redundant, given H27). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H35 (redundant, given H27). --- Eliminated hypothesis H36 (redundant, given H38). --- Eliminated hypothesis H37 (redundant, given H27). --- Eliminated hypothesis H38 (redundant, given H40). --- Eliminated hypothesis H39 (redundant, given H27). --- Eliminated hypothesis H40 (redundant, given H42). --- Eliminated hypothesis H41 (redundant, given H27). --- Eliminated hypothesis H51 (redundant, given H25). --- Eliminated hypothesis H52 (redundant, given H70). --- Eliminated hypothesis H57 (redundant, given H2). --- Eliminated hypothesis H69 (redundant, given H25). --- Eliminated hypothesis H70 (redundant, given H82). --- Eliminated hypothesis H81 (redundant, given H25). --- Eliminated hypothesis H82 (redundant, given H94). --- Eliminated hypothesis H93 (redundant, given H25). --- Eliminated hypothesis H94 (redundant, given H106). --- Eliminated hypothesis H105 (redundant, given H25). --- Eliminated hypothesis H106 (redundant, given H118). --- Eliminated hypothesis H117 (redundant, given H25). --- Eliminated hypothesis H118 (redundant, given H130). --- Eliminated hypothesis H129 (redundant, given H25). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_index by: src_offset + dst_index * 8. New H6: src_offset + dst_index * 8 >= src_offset New H7: src_offset + dst_index * 8 <= src_offset + dst__index__subtype__1__last * 8 New H27: src_offset + dst_index * 8 + 8 >= src__index__subtype__1__first New H42: src_offset + dst_index * 8 + 15 <= src__index__subtype__1__last New H130: src_offset + dst_index * 8 <= 2147483632 New H25: src_offset + dst_index * 8 >= - 8 New H45: element(src, [src_offset + dst_index * 8 + 9]) >= 0 New H59: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) >= 0 New H63: element(src, [src_offset + dst_index * 8 + 10]) >= 0 New H71: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) >= 0 New H75: element(src, [src_offset + dst_index * 8 + 11]) >= 0 New H83: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) >= 0 New H87: element(src, [src_offset + dst_index * 8 + 12]) >= 0 New H95: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) >= 0 New H99: element(src, [src_offset + dst_index * 8 + 13]) >= 0 New H107: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40) >= 0 New H111: element(src, [src_offset + dst_index * 8 + 14]) >= 0 New H119: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48) >= 0 New H123: element(src, [src_offset + dst_index * 8 + 15]) >= 0 New H131: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 15]), 56) >= 0 New H46: element(src, [src_offset + dst_index * 8 + 9]) <= 18446744073709551615 New H60: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8) <= 18446744073709551615 New H64: element(src, [src_offset + dst_index * 8 + 10]) <= 18446744073709551615 New H72: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16) <= 18446744073709551615 New H76: element(src, [src_offset + dst_index * 8 + 11]) <= 18446744073709551615 New H84: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24) <= 18446744073709551615 New H88: element(src, [src_offset + dst_index * 8 + 12]) <= 18446744073709551615 New H96: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32) <= 18446744073709551615 New H100: element(src, [src_offset + dst_index * 8 + 13]) <= 18446744073709551615 New H108: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40) <= 18446744073709551615 New H112: element(src, [src_offset + dst_index * 8 + 14]) <= 18446744073709551615 New H120: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48) <= 18446744073709551615 New H124: element(src, [src_offset + dst_index * 8 + 15]) <= 18446744073709551615 New H132: spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 15]), 56) <= 18446744073709551615 New H53: element(src, [src_offset + dst_index * 8 + 8]) >= 0 New H133: (((((((element(src, [src_offset + dst_index * 8 + 8]) + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 15]), 56)) mod 18446744073709551616 >= 0 New H54: element(src, [src_offset + dst_index * 8 + 8]) <= 18446744073709551615 New H134: (((((((element(src, [src_offset + dst_index * 8 + 8]) + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 9]), 8)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 10]), 16)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 11]), 24)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 12]), 32)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 13]), 40)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 14]), 48)) mod 18446744073709551616 + spark__unsigned__shift_left_64(element(src, [src_offset + dst_index * 8 + 15]), 56)) mod 18446744073709551616 <= 18446744073709551615 New H9: src_offset + dst_index * 8 <= 2147483639 New C7: src_offset + dst_index * 8 + 8 <= src_offset + dst__index__subtype__1__last * 8 New C9: dst_index + 1 <> dst__index__subtype__1__last -> src_offset + dst_index * 8 <= 2147483631 +++ New H136: integer__size >= 0 +++ New H137: natural__size >= 0 +++ New H138: spark__unsigned__byte__size >= 0 +++ New H139: spark__unsigned__u64__size >= 0 +++ New H140: spark__unsigned__shift_count__size >= 0 +++ New H141: spark__crypto__word_count_t__size >= 0 +++ New H142: dst__index__subtype__1__first <= dst__index__subtype__1__last +++ New H143: src__index__subtype__1__first <= src__index__subtype__1__last +++ New H144: src__index__subtype__1__first >= 0 +++ New H145: src__index__subtype__1__last >= 0 +++ New H146: src__index__subtype__1__last <= 2147483647 +++ New H147: src__index__subtype__1__first <= 2147483647 +++ New H148: dst__index__subtype__1__first >= 0 +++ New H149: dst__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: dst__index__subtype__1__last <= 268435455 eliminated: this already exists (as H4). +++ New H4: dst__index__subtype__1__last <= 268435455 +++ New H150: dst__index__subtype__1__first <= 268435455 @@@@@@@@@@ VC: procedure_get_64_lsb_first_23. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New C1: dst_index <= 2147483646 >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index <= 2147483646 +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H22: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H23 added to this effect. *** Proved C1: dst_index <= 2147483646 using hypothesis H21. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_24. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New C1: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New C2: dst_index <= 268435454 *** Proved C1: dst_index >= - 1 using hypotheses H2 & H14. *** Proved C2: dst_index <= 268435454 using hypotheses H3, H4 & H20. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_25. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: dst_index <> dst__index__subtype__1__last -> dst_index <= 2147483646 New H9: dst_index <> dst__index__subtype__1__last -> src_index <= 2147483639 New H12: src_offset <= 2147483647 New H19: src_offset + dst__index__subtype__1__last * 8 <= 2147483647 New H21: dst_index <= 2147483646 New C1: src_index <= 2147483639 >>> Restructured hypothesis H20 into: >>> H20: dst_index <> dst__index__subtype__1__last --- Attempted addition of new hypothesis: dst_index <= 2147483646 eliminated: this already exists (as H21). +++ Using "A->B, A |- B" on hypotheses H8 & H20 yields a new hypothesis: +++ H21: dst_index <= 2147483646 +++ Using "A->B, A |- B" on hypotheses H9 & H20 yields a new hypothesis: +++ H24: src_index <= 2147483639 -S- Applied substitution rule get_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H11: src_offset >= 0 -S- Applied substitution rule get_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule get_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 255) -S- Applied substitution rule get_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) -S- Applied substitution rule get_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> 0 <= element(dst, [i_]) and element(dst, [i_]) <= 18446744073709551615) -S- Applied substitution rule get_64_lsb_f_rules(43). This was achieved by replacing all occurrences of spark__crypto__word_count_t__first by: 0. New H22: dst_index >= - 1 -S- Applied substitution rule get_64_lsb_f_rules(44). This was achieved by replacing all occurrences of spark__crypto__word_count_t__last by: 268435455. New H4: dst__index__subtype__1__last <= 268435455 New H23: dst_index <= 268435454 %%% Hypotheses H3 & H20 together imply that dst_index < dst__index__subtype__1__last. H3 & H20 have therefore been deleted and a new H25 added to this effect. *** Proved C1: src_index <= 2147483639 using hypothesis H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_26. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) *** Proved C1: src_index + 8 >= natural__first using hypotheses H6 & H11. *** Proved C2: src_index + 8 <= natural__last using hypothesis H24. *** PROVED VC. @@@@@@@@@@ VC: procedure_get_64_lsb_first_27. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst_index -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__byte__first <= element(src, [i___1]) and element( src, [i___1]) <= spark__unsigned__byte__last) %%% Simplified C1 on reading formula in, to give: %%% C1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst__index__subtype__1__last -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) *** Proved C1: for_all(i_ : integer, dst__index__subtype__1__first <= i_ and i_ <= dst__index__subtype__1__last -> spark__unsigned__u64__first <= element(dst, [i_]) and element(dst, [i_]) <= spark__unsigned__u64__last) using hypotheses H1 & H20. *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.log0000644000175000017500000000264411712513676027021 0ustar eugeneugenSPARK Simplifier Pro Edition Reading get_64_lsb_first.fdl (for inherited FDL type declarations) Reading skein.rlu (for user-defined proof rules) Processing get_64_lsb_first.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - 8 conclusions remain unproven Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - All conclusions proved Simplified VC: 21 - 1 conclusion remains unproven Simplified VC: 22 - 2 conclusions remain unproven Simplified VC: 23 - All conclusions proved Simplified VC: 24 - All conclusions proved Simplified VC: 25 - All conclusions proved Simplified VC: 26 - All conclusions proved Simplified VC: 27 - All conclusions proved Automatic simplification completed. Simplified output sent to get_64_lsb_first.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.vlg0000644000175000017500000000326711712765060027057 0ustar eugeneugen Non-option args: put_64_lsb_first Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=put_64_lsb_first \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.log0000644000175000017500000000066111712513676026362 0ustar eugeneugenSPARK Simplifier Pro Edition Reading skein_512_hash.fdl (for inherited FDL type declarations) Reading skein.rlu (for user-defined proof rules) Processing skein_512_hash.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Automatic simplification completed. Simplified output sent to skein_512_hash.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.siv0000644000175000017500000000343711712513676027073 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Put_64_LSB_First For path(s) from start to run-time check associated with statement of line 102: procedure_put_64_lsb_first_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 102: procedure_put_64_lsb_first_2. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 103: procedure_put_64_lsb_first_3. *** true . /* all conclusions proved */ For path(s) from assertion of line 103 to assertion of line 103: procedure_put_64_lsb_first_4. *** true . /* all conclusions proved */ For path(s) from assertion of line 103 to check associated with statement of line 111: procedure_put_64_lsb_first_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 103 to run-time check associated with statement of line 113: procedure_put_64_lsb_first_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 103 to run-time check associated with statement of line 113: procedure_put_64_lsb_first_7. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_put_64_lsb_first_8. *** true . /* all conclusions proved */ procedure_put_64_lsb_first_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 103 to finish: procedure_put_64_lsb_first_10. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein.rlu0000644000175000017500000000102311712513676025002 0ustar eugeneugen/* skein_rules(1) and skein_rules(2) */ /* These rules form the "retrieve" relation for the refinement of Skein_512_Context */ /* from a limited private to a concrete type */ skein_rules(1): byte_count_of(C) may_be_replaced_by fld_byte_count(fld_h(C)) if [goal(checktype(C,skein_512_context))] . skein_rules(2): hash_bit_len_of(C) may_be_replaced_by fld_hash_bit_len(fld_h(C)) if [goal(checktype(C,skein_512_context))] . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.vsm0000644000175000017500000000006211712765060026415 0ustar eugeneugenskein_512_init,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.slg0000644000175000017500000040040611712513676030274 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block RRS The following user defined rule files have been read: &&& skein.rlu SEM No semantic checks are performed on the rules. @@@@@@@@@@ VC: procedure_skein_512_process_block_1. @@@@@@@@@@ %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) *** Proved C1: starting_offset >= natural__first using hypothesis H30. *** Proved C2: starting_offset <= natural__last using hypothesis H31. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_process_block_2. @@@@@@@@@@ %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H30). --- Hypothesis H37 has been replaced by "true". (It is already present, as H31). -S- Applied substitution rule skein_512_pr_rules(122). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H32: block_count >= 1 New C1: true -S- Applied substitution rule skein_512_pr_rules(123). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H33: block_count <= 33554431 New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_process_block_3. @@@@@@@@@@ %%% Simplified H10 on reading formula in, to give: %%% H10: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H29 on reading formula in, to give: %%% H29: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H36 has been replaced by "true". (It is already present, as H30). --- Hypothesis H37 has been replaced by "true". (It is already present, as H31). %%% Simplified C1 on reading formula in, to give: %%% C1: true %%% Simplified C2 on reading formula in, to give: %%% C2: true %%% Simplified C3 on reading formula in, to give: %%% C3: true %%% Simplified C5 on reading formula in, to give: %%% C5: true %%% Simplified C11 on reading formula in, to give: %%% C11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C12 on reading formula in, to give: %%% C12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C30 on reading formula in, to give: %%% C30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) *** Proved C1: true *** Proved C2: true *** Proved C3: true *** Proved C5: true *** Proved C6: starting_offset + 63 <= block__index__subtype__1__last using hypothesis H7. *** Proved C8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last using hypothesis H6. *** Proved C9: block__index__subtype__1__last <= natural__last using hypothesis H8. *** Proved C11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) using hypothesis H10. *** Proved C12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H11. *** Proved C13: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H12. *** Proved C14: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H13. *** Proved C15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H14. *** Proved C16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H15. *** Proved C17: true *** Proved C18: true *** Proved C19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H18. *** Proved C20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H19. *** Proved C21: true *** Proved C22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H21. *** Proved C23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H22. *** Proved C24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H23. *** Proved C25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H24. *** Proved C26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H25. *** Proved C27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H26. *** Proved C28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H27. *** Proved C29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H28. *** Proved C30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) using hypothesis H29. *** Proved C31: starting_offset >= natural__first using hypothesis H30. *** Proved C32: starting_offset <= natural__last using hypothesis H31. *** Proved C33: block_count >= positive_block_512_count_t__first using hypothesis H32. *** Proved C34: block_count <= positive_block_512_count_t__last using hypothesis H33. *** Proved C35: byte_count_add >= natural__first using hypothesis H34. *** Proved C36: byte_count_add <= natural__last using hypothesis H35. *** Proved C37: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first using hypothesis H1. *** Proved C38: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last using hypothesis H2. *** Proved C39: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first using hypothesis H3. *** Proved C40: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last using hypothesis H4. *** Proved C41: block__index__subtype__1__first = 0 using hypothesis H5. *** Proved C42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last using hypothesis H6. *** Proved C43: starting_offset + 63 <= block__index__subtype__1__last using hypothesis H7. *** Proved C44: block__index__subtype__1__last <= natural__last using hypothesis H8. *** Proved C45: starting_offset <= natural__last - 63 using hypothesis H9. -S- Applied substitution rule skein_512_pr_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: block__index__subtype__1__last <= 2147483647 New H9: starting_offset <= 2147483584 New H13: fld_byte_count(fld_h(ctx)) <= 2147483647 New H31: starting_offset <= 2147483647 New H35: byte_count_add <= 2147483647 New C7: starting_offset + spark__crypto__i8__last * 8 <= 2147483647 New C10: 1 < block_count -> starting_offset + skein_512_block_bytes_c <= 2147483647 -S- Applied substitution rule skein_512_pr_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H6: starting_offset + (block_count - 1) * 64 + 63 <= block__index__subtype__1__last New C10: 1 < block_count -> starting_offset <= 2147483583 -S- Applied substitution rule skein_512_pr_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H12: fld_byte_count(fld_h(ctx)) >= 0 New H30: starting_offset >= 0 New H34: byte_count_add >= 0 -S- Applied substitution rule skein_512_pr_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H18: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_pr_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H21: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_pr_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H10: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H29: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_pr_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H10: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H29: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= 255) -S- Applied substitution rule skein_512_pr_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H23: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_pr_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_pr_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H11: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H11: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(83). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New C7: starting_offset <= 2147483591 -S- Applied substitution rule skein_512_pr_rules(97). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H14: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_pr_rules(98). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H15: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(102). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_pr_rules(103). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(107). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(108). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(112). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_pr_rules(113). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_pr_rules(117). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H10: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(118). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H10: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(122). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H32: block_count >= 1 New H38: true -S- Applied substitution rule skein_512_pr_rules(123). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H33: block_count <= 33554431 New H39: true *** Proved C4: 1 <= block_count using hypothesis H32. *** Proved C7: starting_offset <= 2147483591 using hypothesis H9. --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H20 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H15 (duplicate of H2). --- Eliminated hypothesis H12 (duplicate of H3). --- Eliminated hypothesis H13 (redundant, given H4). --- Eliminated hypothesis H14 (redundant, given H1). --- Eliminated hypothesis H31 (redundant, given H9). +++ New H40: integer__size >= 0 +++ New H41: natural__size >= 0 +++ New H42: spark__unsigned__u6__size >= 0 +++ New H43: spark__unsigned__u7__size >= 0 +++ New H44: spark__unsigned__byte__size >= 0 +++ New H45: spark__unsigned__u16__size >= 0 +++ New H46: spark__unsigned__u32__size >= 0 +++ New H47: spark__unsigned__u64__size >= 0 +++ New H48: spark__crypto__i3__size >= 0 +++ New H49: spark__crypto__i8__size >= 0 +++ New H50: spark__crypto__i9__size >= 0 +++ New H51: spark__crypto__word_count_t__size >= 0 +++ New H52: hash_bit_length__size >= 0 +++ New H53: initialized_hash_bit_length__size >= 0 +++ New H54: skein_512_state_words_index__size >= 0 +++ New H55: skein_512_block_bytes_count__size >= 0 +++ New H56: skein_512_block_bytes_index__size >= 0 +++ New H57: positive_block_512_count_t__size >= 0 +++ New H58: skein_512_context__size >= 0 +++ New H59: context_header__size >= 0 +++ New H60: block__index__subtype__1__first <= block__index__subtype__1__last +++ New H61: block__index__subtype__1__first >= 0 +++ New H62: block__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: block__index__subtype__1__last <= 2147483647 eliminated: this already exists (as H8). +++ New H8: block__index__subtype__1__last <= 2147483647 +++ New H63: block__index__subtype__1__first <= 2147483647 @@@@@@@@@@ VC: procedure_skein_512_process_block_4. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H2 on reading formula in, to give: %%% H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H37 on reading formula in, to give: %%% H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified H39 on reading formula in, to give: %%% H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified H40 on reading formula in, to give: %%% H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last --- Hypothesis H42 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H44 has been replaced by "true". (It is already present, as H9) . %%% Simplified H50 on reading formula in, to give: %%% H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H51 on reading formula in, to give: %%% H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H54 has been replaced by "true". (It is already present, as H52). --- Hypothesis H55 has been replaced by "true". (It is already present, as H53). --- Hypothesis H56 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H7) . %%% Simplified H63 on reading formula in, to give: %%% H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element( w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) %%% Simplified H64 on reading formula in, to give: %%% H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w__3, [i___1]) and element(w__3, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H65 has been replaced by "true". (It is already present, as H63). %%% Simplified H66 on reading formula in, to give: %%% H66: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H67 on reading formula in, to give: %%% H67: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H68 on reading formula in, to give: %%% H68: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx)) %%% Simplified H69 on reading formula in, to give: %%% H69: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx)) %%% Simplified H70 on reading formula in, to give: %%% H70: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H71 on reading formula in, to give: %%% H71: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H89). --- Hypothesis H93 has been replaced by "true". (It is already present, as H90). --- Hypothesis H96 has been replaced by "true". (It is already present, as H52). --- Hypothesis H97 has been replaced by "true". (It is already present, as H53). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified C2 on reading formula in, to give: %%% C2: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx~)) %%% Simplified C3 on reading formula in, to give: %%% C3: j >= 0 %%% Simplified C5 on reading formula in, to give: %%% C5: src_offset + skein_512_block_bytes_c = starting_offset + j * skein_512_block_bytes_c %%% Simplified C11 on reading formula in, to give: %%% C11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C12 on reading formula in, to give: %%% C12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C13 on reading formula in, to give: %%% C13: fld_byte_count(fld_h(ctx__6)) >= natural__first %%% Simplified C14 on reading formula in, to give: %%% C14: fld_byte_count(fld_h(ctx__6)) <= natural__last %%% Simplified C15 on reading formula in, to give: %%% C15: fld_hash_bit_len(fld_h(ctx__6)) >= hash_bit_length__first %%% Simplified C16 on reading formula in, to give: %%% C16: fld_hash_bit_len(fld_h(ctx__6)) <= hash_bit_length__last %%% Simplified C19 on reading formula in, to give: %%% C19: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u6__first %%% Simplified C20 on reading formula in, to give: %%% C20: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u6__last %%% Simplified C22 on reading formula in, to give: %%% C22: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u7__first %%% Simplified C23 on reading formula in, to give: %%% C23: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u7__last %%% Simplified C24 on reading formula in, to give: %%% C24: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u16__first %%% Simplified C25 on reading formula in, to give: %%% C25: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u16__last %%% Simplified C26 on reading formula in, to give: %%% C26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u32__first %%% Simplified C27 on reading formula in, to give: %%% C27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u32__last %%% Simplified C28 on reading formula in, to give: %%% C28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u64__first %%% Simplified C29 on reading formula in, to give: %%% C29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u64__last %%% Simplified C30 on reading formula in, to give: %%% C30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) %%% Simplified C37 on reading formula in, to give: %%% C37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified C38 on reading formula in, to give: %%% C38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified C39 on reading formula in, to give: %%% C39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified C40 on reading formula in, to give: %%% C40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last *** Proved C1: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx~)) using hypotheses H1 & H68. *** Proved C2: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx~)) using hypotheses H2 & H69. *** Proved C3: j >= 0 using hypothesis H3. *** Proved C8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last using hypothesis H8. *** Proved C9: block__index__subtype__1__last <= natural__last using hypothesis H9. *** Proved C11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last) using hypothesis H70. *** Proved C12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last) using hypothesis H71. *** Proved C13: fld_byte_count(fld_h(ctx__6)) >= natural__first using hypothesis H72. *** Proved C14: fld_byte_count(fld_h(ctx__6)) <= natural__last using hypothesis H73. *** Proved C15: fld_hash_bit_len(fld_h(ctx__6)) >= hash_bit_length__first using hypothesis H74. *** Proved C16: fld_hash_bit_len(fld_h(ctx__6)) <= hash_bit_length__last using hypothesis H75. *** Proved C17: true *** Proved C18: true *** Proved C19: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u6__first using hypothesis H78. *** Proved C20: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u6__last using hypothesis H79. *** Proved C21: true *** Proved C22: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u7__first using hypothesis H81. *** Proved C23: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u7__last using hypothesis H82. *** Proved C24: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u16__first using hypothesis H83. *** Proved C25: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u16__last using hypothesis H84. *** Proved C26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u32__first using hypothesis H85. *** Proved C27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u32__last using hypothesis H86. *** Proved C28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u64__first using hypothesis H87. *** Proved C29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u64__last using hypothesis H88. *** Proved C30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) using hypothesis H30. *** Proved C31: starting_offset >= natural__first using hypothesis H31. *** Proved C32: starting_offset <= natural__last using hypothesis H32. *** Proved C33: block_count >= positive_block_512_count_t__first using hypothesis H33. *** Proved C34: block_count <= positive_block_512_count_t__last using hypothesis H34. *** Proved C35: byte_count_add >= natural__first using hypothesis H35. *** Proved C36: byte_count_add <= natural__last using hypothesis H36. *** Proved C37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first using hypothesis H37. *** Proved C38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last using hypothesis H38. *** Proved C39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first using hypothesis H39. *** Proved C40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last using hypothesis H40. *** Proved C41: block__index__subtype__1__first = 0 using hypothesis H41. *** Proved C42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last using hypothesis H8. *** Proved C43: starting_offset + 63 <= block__index__subtype__1__last using hypothesis H43. *** Proved C44: block__index__subtype__1__last <= natural__last using hypothesis H9. *** Proved C45: starting_offset <= natural__last - 63 using hypothesis H45. -S- Applied substitution rule skein_512_pr_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + spark__crypto__i8__last * 8 <= 2147483647 New H9: block__index__subtype__1__last <= 2147483647 New H10: j < block_count -> src_offset + skein_512_block_bytes_c <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: starting_offset <= 2147483647 New H36: byte_count_add <= 2147483647 New H45: starting_offset <= 2147483584 New H53: src_offset <= 2147483647 New H73: fld_byte_count(fld_h(ctx__6)) <= 2147483647 New H99: src_offset + skein_512_block_bytes_c <= 2147483647 New C7: src_offset + skein_512_block_bytes_c + spark__crypto__i8__last * 8 <= 2147483647 New C10: j + 1 < block_count -> src_offset + skein_512_block_bytes_c + skein_512_block_bytes_c <= 2147483647 >>> Restructured hypothesis H91 into: >>> H91: j < block_count --- Attempted addition of new hypothesis: src_offset + skein_512_block_bytes_c <= 2147483647 eliminated: this already exists (as H99). +++ Using "A->B, A |- B" on hypotheses H10 & H91 yields a new hypothesis: +++ H99: src_offset + skein_512_block_bytes_c <= 2147483647 -S- Applied substitution rule skein_512_pr_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H5: src_offset = starting_offset + (j - 1) * 64 New H8: starting_offset + (block_count - 1) * 64 + 63 <= block__index__subtype__1__last New H10: j < block_count -> src_offset <= 2147483583 New H98: src_offset + 64 >= natural__first New H99: src_offset <= 2147483583 New C5: src_offset + 64 = starting_offset + j * 64 New C6: src_offset + 127 <= block__index__subtype__1__last New C7: src_offset + 64 + spark__crypto__i8__last * 8 <= 2147483647 New C10: j + 1 < block_count -> src_offset <= 2147483519 -S- Applied substitution rule skein_512_pr_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H13: fld_byte_count(fld_h(ctx)) >= 0 New H31: starting_offset >= 0 New H35: byte_count_add >= 0 New H52: src_offset >= 0 New H72: fld_byte_count(fld_h(ctx__6)) >= 0 New H98: src_offset >= - 64 -S- Applied substitution rule skein_512_pr_rules(39). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= spark__unsigned__u64__first New H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule skein_512_pr_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H78: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= 0 -S- Applied substitution rule skein_512_pr_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H79: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= 63 -S- Applied substitution rule skein_512_pr_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H81: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= 0 -S- Applied substitution rule skein_512_pr_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H82: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= 127 -S- Applied substitution rule skein_512_pr_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= spark__unsigned__byte__last) New H70: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_pr_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= 255) New H70: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H83: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= 0 -S- Applied substitution rule skein_512_pr_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H84: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= 65535 -S- Applied substitution rule skein_512_pr_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H85: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= 0 -S- Applied substitution rule skein_512_pr_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H86: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= 4294967295 -S- Applied substitution rule skein_512_pr_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H48: byte_count_add >= 0 New H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) New H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) New H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> 0 <= element(w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) New H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w__3, [i___1]) and element(w__3, [i___1]) <= spark__unsigned__u64__last) New H66: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) New H67: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) New H71: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last) New H87: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= 0 New H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= 0 -S- Applied substitution rule skein_512_pr_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H49: byte_count_add <= 18446744073709551615 New H88: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= 18446744073709551615 New H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= 18446744073709551615) New H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= 18446744073709551615) New H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> 0 <= element(w__3, [i_]) and element(w__3, [i_]) <= 18446744073709551615) New H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w__3, [i___1]) and element(w__3, [i___1]) <= 18446744073709551615) New H66: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H67: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) New H71: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(77). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H51: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element( ts__2, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(78). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H51: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(82). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H57: true New H63: for_all(i_ : integer, 0 <= i_ and i_ <= spark__crypto__i8__last -> 0 <= element(w__3, [i_]) and element(w__3, [i_]) <= 18446744073709551615) New H64: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w__3, [i___1]) and element( w__3, [i___1]) <= 18446744073709551615) New H66: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element( x__4, [i___1]) <= 18446744073709551615) New H67: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element( x__5, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(83). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H7: src_offset <= 2147483591 New H59: src_offset + 56 + 7 >= block__index__subtype__1__first New H60: src_offset + 56 + 7 <= block__index__subtype__1__last New H63: for_all(i_ : integer, 0 <= i_ and i_ <= 7 -> 0 <= element(w__3, [ i_]) and element(w__3, [i_]) <= 18446744073709551615) New H64: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(w__3, [i___1]) and element(w__3, [i___1]) <= 18446744073709551615) New H66: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H67: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) New C7: src_offset + 64 <= 2147483591 -S- Applied substitution rule skein_512_pr_rules(87). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H50: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element( ks__1, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(88). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H50: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(97). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 New H74: fld_hash_bit_len(fld_h(ctx__6)) >= 0 -S- Applied substitution rule skein_512_pr_rules(98). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H75: fld_hash_bit_len(fld_h(ctx__6)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(102). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H37: fld_hash_bit_len(fld_h(ctx~)) >= 1 -S- Applied substitution rule skein_512_pr_rules(103). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H38: fld_hash_bit_len(fld_h(ctx~)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(107). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H71: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__6), [ i___1]) and element(fld_x(ctx__6), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(108). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H71: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(112). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H39: fld_byte_count(fld_h(ctx~)) >= 0 -S- Applied substitution rule skein_512_pr_rules(113). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H40: fld_byte_count(fld_h(ctx~)) <= 64 -S- Applied substitution rule skein_512_pr_rules(117). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H70: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__6), [ i___2]) and element(fld_b(ctx__6), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(118). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H70: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(122). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H33: block_count >= 1 New H89: j >= 1 New H94: j >= 0 -S- Applied substitution rule skein_512_pr_rules(123). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H34: block_count <= 33554431 New H90: j <= 33554431 New H95: j <= 33554430 %%% Simplified H59 further, to give: %%% H59: src_offset + 63 >= block__index__subtype__1__first %%% Simplified H60 further, to give: %%% H60: src_offset + 63 <= block__index__subtype__1__last %%% Simplified C7 further, to give: %%% C7: src_offset <= 2147483527 *** Proved C4: j + 1 <= block_count via its standard form, which is: Std.Fm C4: block_count - j > 0 using hypothesis H91. *** Proved C5: src_offset + 64 = starting_offset + j * 64 via its standard form, which is: Std.Fm C5: - (64 * j) + src_offset - starting_offset = - 64 using hypothesis H5. --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H54 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H65 (true-hypothesis). --- Eliminated hypothesis H76 (true-hypothesis). --- Eliminated hypothesis H77 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H57 (true-hypothesis). --- Eliminated hypothesis H89 (duplicate of H3). --- Eliminated hypothesis H60 (duplicate of H6). --- Eliminated hypothesis H48 (duplicate of H35). >>> Using "A->B, A |- B" on H10, given H91, we simplify the former to: >>> H10: src_offset <= 2147483583 --- Eliminated hypothesis H4 (redundant, given H91). --- Eliminated hypothesis H7 (redundant, given H99). --- Eliminated hypothesis H14 (redundant, given H2 & H40). --- Eliminated hypothesis H15 (redundant, given H1 & H37). --- Eliminated hypothesis H32 (redundant, given H45). --- Eliminated hypothesis H33 (redundant, given H3 & H91). --- Eliminated hypothesis H49 (redundant, given H36). --- Eliminated hypothesis H53 (redundant, given H99). --- Eliminated hypothesis H58 (redundant, given H6). --- Eliminated hypothesis H61 (redundant, given H6). --- Eliminated hypothesis H90 (redundant, given H95). --- Eliminated hypothesis H94 (redundant, given H3). --- Eliminated hypothesis H98 (redundant, given H52). -S- Eliminated hypothesis H5. This was achieved by replacing all occurrences of src_offset by: starting_offset + (j - 1) * 64. New H6: starting_offset + (j - 1) * 64 + 63 <= block__index__subtype__1__last New H99: starting_offset + (j - 1) * 64 <= 2147483583 New H52: starting_offset + (j - 1) * 64 >= 0 New H59: starting_offset + (j - 1) * 64 + 63 >= block__index__subtype__1__first New H10: starting_offset + (j - 1) * 64 <= 2147483583 New C6: starting_offset + (j - 1) * 64 + 127 <= block__index__subtype__1__last New C10: j + 1 < block_count -> starting_offset + (j - 1) * 64 <= 2147483519 New C7: starting_offset + (j - 1) * 64 <= 2147483527 -S- Substituted hypothesis H68. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__6)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H69. This was achieved by replacing all occurrences of fld_byte_count(fld_h( ctx__6)) by: fld_byte_count(fld_h(ctx)). +++ New H100: integer__size >= 0 +++ New H101: natural__size >= 0 +++ New H102: spark__unsigned__u6__size >= 0 +++ New H103: spark__unsigned__u7__size >= 0 +++ New H104: spark__unsigned__byte__size >= 0 +++ New H105: spark__unsigned__u16__size >= 0 +++ New H106: spark__unsigned__u32__size >= 0 +++ New H107: spark__unsigned__u64__size >= 0 +++ New H108: spark__crypto__i3__size >= 0 +++ New H109: spark__crypto__i8__size >= 0 +++ New H110: spark__crypto__i9__size >= 0 +++ New H111: spark__crypto__word_count_t__size >= 0 +++ New H112: hash_bit_length__size >= 0 +++ New H113: initialized_hash_bit_length__size >= 0 +++ New H114: skein_512_state_words_index__size >= 0 +++ New H115: skein_512_block_bytes_count__size >= 0 +++ New H116: skein_512_block_bytes_index__size >= 0 +++ New H117: positive_block_512_count_t__size >= 0 +++ New H118: skein_512_context__size >= 0 +++ New H119: context_header__size >= 0 +++ New H120: block__index__subtype__1__first <= block__index__subtype__1__last +++ New H121: block__index__subtype__1__first >= 0 +++ New H122: block__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: block__index__subtype__1__last <= 2147483647 eliminated: this already exists (as H9). +++ New H9: block__index__subtype__1__last <= 2147483647 +++ New H123: block__index__subtype__1__first <= 2147483647 @@@@@@@@@@ VC: procedure_skein_512_process_block_5. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H2 on reading formula in, to give: %%% H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H37 on reading formula in, to give: %%% H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified H39 on reading formula in, to give: %%% H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified H40 on reading formula in, to give: %%% H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last --- Hypothesis H42 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H44 has been replaced by "true". (It is already present, as H9) . -S- Applied substitution rule skein_512_pr_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New C1: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus >= 0 New C3: byte_count_add >= 0 -S- Applied substitution rule skein_512_pr_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New C2: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus <= 18446744073709551615 New C4: byte_count_add <= 18446744073709551615 -S- Applied substitution rule skein_512_pr_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H5: src_offset = starting_offset + (j - 1) * 64 New H8: starting_offset + (block_count - 1) * 64 + 63 <= block__index__subtype__1__last New H10: j < block_count -> src_offset + 64 <= natural__last -S- Applied substitution rule skein_512_pr_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H13: fld_byte_count(fld_h(ctx)) >= 0 New H31: starting_offset >= 0 New H35: byte_count_add >= 0 -S- Applied substitution rule skein_512_pr_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + spark__crypto__i8__last * 8 <= 2147483647 New H9: block__index__subtype__1__last <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: starting_offset <= 2147483647 New H36: byte_count_add <= 2147483647 New H45: starting_offset <= 2147483584 New H10: j < block_count -> src_offset <= 2147483583 -S- Applied substitution rule skein_512_pr_rules(39). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New C1: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= 0 New C2: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= 18446744073709551615 -S- Applied substitution rule skein_512_pr_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_pr_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_pr_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_pr_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= 255) -S- Applied substitution rule skein_512_pr_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_pr_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_pr_rules(83). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H7: src_offset <= 2147483591 -S- Applied substitution rule skein_512_pr_rules(97). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_pr_rules(98). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(102). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H37: fld_hash_bit_len(fld_h(ctx~)) >= 1 -S- Applied substitution rule skein_512_pr_rules(103). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H38: fld_hash_bit_len(fld_h(ctx~)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(107). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(108). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(112). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H39: fld_byte_count(fld_h(ctx~)) >= 0 -S- Applied substitution rule skein_512_pr_rules(113). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H40: fld_byte_count(fld_h(ctx~)) <= 64 -S- Applied substitution rule skein_512_pr_rules(117). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(118). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(122). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H33: block_count >= 1 -S- Applied substitution rule skein_512_pr_rules(123). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H34: block_count <= 33554431 *** Proved C3: byte_count_add >= 0 using hypothesis H35. *** Proved C4: byte_count_add <= 18446744073709551615 using hypothesis H36. *** Proved C1: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= 0 *** Proved C2: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= 18446744073709551615 *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_process_block_6. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H2 on reading formula in, to give: %%% H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H37 on reading formula in, to give: %%% H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified H39 on reading formula in, to give: %%% H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified H40 on reading formula in, to give: %%% H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last --- Hypothesis H42 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H44 has been replaced by "true". (It is already present, as H9) . %%% Simplified H50 on reading formula in, to give: %%% H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H51 on reading formula in, to give: %%% H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) *** Proved C1: src_offset >= natural__first using hypothesis H52. *** Proved C2: src_offset <= natural__last using hypothesis H53. *** Proved C3: block__index__subtype__1__first = 0 using hypothesis H41. *** Proved C5: src_offset <= block__index__subtype__1__last using hypothesis H6. *** Proved C8: src_offset + 7 <= block__index__subtype__1__last using hypothesis H6. *** Proved C9: src_offset + spark__crypto__i8__last * 8 <= natural__last using hypothesis H7. -S- Applied substitution rule skein_512_pr_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H5: src_offset = starting_offset + (j - 1) * 64 New H8: starting_offset + (block_count - 1) * 64 + 63 <= block__index__subtype__1__last New H10: j < block_count -> src_offset + 64 <= natural__last -S- Applied substitution rule skein_512_pr_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H13: fld_byte_count(fld_h(ctx)) >= 0 New H31: starting_offset >= 0 New H35: byte_count_add >= 0 New H52: src_offset >= 0 -S- Applied substitution rule skein_512_pr_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + spark__crypto__i8__last * 8 <= 2147483647 New H9: block__index__subtype__1__last <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: starting_offset <= 2147483647 New H36: byte_count_add <= 2147483647 New H45: starting_offset <= 2147483584 New H53: src_offset <= 2147483647 New H10: j < block_count -> src_offset <= 2147483583 -S- Applied substitution rule skein_512_pr_rules(39). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= spark__unsigned__u64__first New H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule skein_512_pr_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_pr_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_pr_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_pr_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= 255) -S- Applied substitution rule skein_512_pr_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_pr_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_pr_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_pr_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H48: byte_count_add >= 0 New H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) New H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) New H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= 0 -S- Applied substitution rule skein_512_pr_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H49: byte_count_add <= 18446744073709551615 New H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= 18446744073709551615) New H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(77). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H51: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element( ts__2, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(78). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H51: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(82). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New C4: true -S- Applied substitution rule skein_512_pr_rules(83). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H7: src_offset <= 2147483591 New C6: src_offset + 56 + 7 >= block__index__subtype__1__first New C7: src_offset + 56 + 7 <= block__index__subtype__1__last -S- Applied substitution rule skein_512_pr_rules(87). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H50: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element( ks__1, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(88). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H50: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(97). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_pr_rules(98). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(102). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H37: fld_hash_bit_len(fld_h(ctx~)) >= 1 -S- Applied substitution rule skein_512_pr_rules(103). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H38: fld_hash_bit_len(fld_h(ctx~)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(107). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(108). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(112). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H39: fld_byte_count(fld_h(ctx~)) >= 0 -S- Applied substitution rule skein_512_pr_rules(113). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H40: fld_byte_count(fld_h(ctx~)) <= 64 -S- Applied substitution rule skein_512_pr_rules(117). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(118). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(122). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H33: block_count >= 1 -S- Applied substitution rule skein_512_pr_rules(123). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H34: block_count <= 33554431 %%% Simplified C6 further, to give: %%% C6: src_offset + 63 >= block__index__subtype__1__first %%% Simplified C7 further, to give: %%% C7: src_offset + 63 <= block__index__subtype__1__last *** Proved C4: true *** Proved C6: src_offset + 63 >= block__index__subtype__1__first using hypotheses H41 & H52. *** Proved C7: src_offset + 63 <= block__index__subtype__1__last using hypothesis H6. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_process_block_7. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H2 on reading formula in, to give: %%% H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H37 on reading formula in, to give: %%% H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified H39 on reading formula in, to give: %%% H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified H40 on reading formula in, to give: %%% H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last --- Hypothesis H42 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H44 has been replaced by "true". (It is already present, as H9) . %%% Simplified H50 on reading formula in, to give: %%% H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H51 on reading formula in, to give: %%% H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H54 has been replaced by "true". (It is already present, as H52). --- Hypothesis H55 has been replaced by "true". (It is already present, as H53). --- Hypothesis H56 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H7) . %%% Simplified H63 on reading formula in, to give: %%% H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element( w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) %%% Simplified H64 on reading formula in, to give: %%% H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w__3, [i___1]) and element(w__3, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C1 on reading formula in, to give: %%% C1: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element( w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) *** Proved C1: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element( w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) using hypothesis H63. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_process_block_8. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H2 on reading formula in, to give: %%% H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H37 on reading formula in, to give: %%% H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified H39 on reading formula in, to give: %%% H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified H40 on reading formula in, to give: %%% H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last --- Hypothesis H42 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H44 has been replaced by "true". (It is already present, as H9) . %%% Simplified H50 on reading formula in, to give: %%% H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H51 on reading formula in, to give: %%% H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H54 has been replaced by "true". (It is already present, as H52). --- Hypothesis H55 has been replaced by "true". (It is already present, as H53). --- Hypothesis H56 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H7) . %%% Simplified H63 on reading formula in, to give: %%% H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element( w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) %%% Simplified H64 on reading formula in, to give: %%% H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w__3, [i___1]) and element(w__3, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H65 has been replaced by "true". (It is already present, as H63). %%% Simplified H66 on reading formula in, to give: %%% H66: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H67 on reading formula in, to give: %%% H67: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H68 on reading formula in, to give: %%% H68: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx)) %%% Simplified H69 on reading formula in, to give: %%% H69: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx)) %%% Simplified H70 on reading formula in, to give: %%% H70: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H71 on reading formula in, to give: %%% H71: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H89). --- Hypothesis H93 has been replaced by "true". (It is already present, as H90). *** Proved C1: j + 1 >= positive_block_512_count_t__first using hypothesis H89. -S- Applied substitution rule skein_512_pr_rules(123). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H34: block_count <= 33554431 New H90: j <= 33554431 New C2: j <= 33554430 *** Proved C2: j <= 33554430 using hypotheses H34 & H91. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_process_block_9. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H2 on reading formula in, to give: %%% H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H37 on reading formula in, to give: %%% H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified H39 on reading formula in, to give: %%% H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified H40 on reading formula in, to give: %%% H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last --- Hypothesis H42 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H44 has been replaced by "true". (It is already present, as H9) . %%% Simplified H50 on reading formula in, to give: %%% H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H51 on reading formula in, to give: %%% H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H54 has been replaced by "true". (It is already present, as H52). --- Hypothesis H55 has been replaced by "true". (It is already present, as H53). --- Hypothesis H56 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H7) . %%% Simplified H63 on reading formula in, to give: %%% H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element( w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) %%% Simplified H64 on reading formula in, to give: %%% H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w__3, [i___1]) and element(w__3, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H65 has been replaced by "true". (It is already present, as H63). %%% Simplified H66 on reading formula in, to give: %%% H66: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H67 on reading formula in, to give: %%% H67: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H68 on reading formula in, to give: %%% H68: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx)) %%% Simplified H69 on reading formula in, to give: %%% H69: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx)) %%% Simplified H70 on reading formula in, to give: %%% H70: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H71 on reading formula in, to give: %%% H71: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H89). --- Hypothesis H93 has been replaced by "true". (It is already present, as H90). --- Hypothesis H96 has been replaced by "true". (It is already present, as H52). --- Hypothesis H97 has been replaced by "true". (It is already present, as H53). -S- Applied substitution rule skein_512_pr_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H13: fld_byte_count(fld_h(ctx)) >= 0 New H31: starting_offset >= 0 New H35: byte_count_add >= 0 New H52: src_offset >= 0 New H72: fld_byte_count(fld_h(ctx__6)) >= 0 New C1: src_offset + skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_pr_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: src_offset + spark__crypto__i8__last * 8 <= 2147483647 New H9: block__index__subtype__1__last <= 2147483647 New H10: j < block_count -> src_offset + skein_512_block_bytes_c <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 New H32: starting_offset <= 2147483647 New H36: byte_count_add <= 2147483647 New H45: starting_offset <= 2147483584 New H53: src_offset <= 2147483647 New H73: fld_byte_count(fld_h(ctx__6)) <= 2147483647 New C2: src_offset + skein_512_block_bytes_c <= 2147483647 >>> Restructured hypothesis H91 into: >>> H91: j < block_count +++ Using "A->B, A |- B" on hypotheses H10 & H91 yields a new hypothesis: +++ H98: src_offset + skein_512_block_bytes_c <= 2147483647 -S- Applied substitution rule skein_512_pr_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H5: src_offset = starting_offset + (j - 1) * 64 New H8: starting_offset + (block_count - 1) * 64 + 63 <= block__index__subtype__1__last New H10: j < block_count -> src_offset <= 2147483583 New H98: src_offset <= 2147483583 New C1: src_offset >= - 64 New C2: src_offset <= 2147483583 -S- Applied substitution rule skein_512_pr_rules(39). This was achieved by replacing all occurrences of interfaces__unsigned_64__modulus by: 18446744073709551616. New H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= spark__unsigned__u64__first New H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= spark__unsigned__u64__last -S- Applied substitution rule skein_512_pr_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H78: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= 0 -S- Applied substitution rule skein_512_pr_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H79: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= 63 -S- Applied substitution rule skein_512_pr_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H81: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= 0 -S- Applied substitution rule skein_512_pr_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H82: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= 127 -S- Applied substitution rule skein_512_pr_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= spark__unsigned__byte__last) New H70: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_pr_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element( block, [i___1]) and element(block, [i___1]) <= 255) New H70: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H83: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= 0 -S- Applied substitution rule skein_512_pr_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H84: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= 65535 -S- Applied substitution rule skein_512_pr_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H85: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= 0 -S- Applied substitution rule skein_512_pr_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H86: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= 4294967295 -S- Applied substitution rule skein_512_pr_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H48: byte_count_add >= 0 New H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) New H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) New H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> 0 <= element(w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) New H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w__3, [i___1]) and element(w__3, [i___1]) <= spark__unsigned__u64__last) New H66: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) New H67: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) New H71: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last) New H87: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= 0 New H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= 0 -S- Applied substitution rule skein_512_pr_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H49: byte_count_add <= 18446744073709551615 New H88: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= 18446744073709551615 New H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= 18446744073709551615) New H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= 18446744073709551615) New H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> 0 <= element(w__3, [i_]) and element(w__3, [i_]) <= 18446744073709551615) New H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w__3, [i___1]) and element(w__3, [i___1]) <= 18446744073709551615) New H66: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H67: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) New H71: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(77). This was achieved by replacing all occurrences of spark__crypto__i3__first by: 0. New H51: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i3__last -> 0 <= element(ts__2, [i___1]) and element( ts__2, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(78). This was achieved by replacing all occurrences of spark__crypto__i3__last by: 2. New H51: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(82). This was achieved by replacing all occurrences of spark__crypto__i8__first by: 0. New H57: true New H63: for_all(i_ : integer, 0 <= i_ and i_ <= spark__crypto__i8__last -> 0 <= element(w__3, [i_]) and element(w__3, [i_]) <= 18446744073709551615) New H64: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(w__3, [i___1]) and element( w__3, [i___1]) <= 18446744073709551615) New H66: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__4, [i___1]) and element( x__4, [i___1]) <= 18446744073709551615) New H67: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i8__last -> 0 <= element(x__5, [i___1]) and element( x__5, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(83). This was achieved by replacing all occurrences of spark__crypto__i8__last by: 7. New H7: src_offset <= 2147483591 New H59: src_offset + 56 + 7 >= block__index__subtype__1__first New H60: src_offset + 56 + 7 <= block__index__subtype__1__last New H63: for_all(i_ : integer, 0 <= i_ and i_ <= 7 -> 0 <= element(w__3, [ i_]) and element(w__3, [i_]) <= 18446744073709551615) New H64: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(w__3, [i___1]) and element(w__3, [i___1]) <= 18446744073709551615) New H66: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) New H67: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(87). This was achieved by replacing all occurrences of spark__crypto__i9__first by: 0. New H50: for_all(i___1 : integer, 0 <= i___1 and i___1 <= spark__crypto__i9__last -> 0 <= element(ks__1, [i___1]) and element( ks__1, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(88). This was achieved by replacing all occurrences of spark__crypto__i9__last by: 8. New H50: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(97). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 New H74: fld_hash_bit_len(fld_h(ctx__6)) >= 0 -S- Applied substitution rule skein_512_pr_rules(98). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H75: fld_hash_bit_len(fld_h(ctx__6)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(102). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H37: fld_hash_bit_len(fld_h(ctx~)) >= 1 -S- Applied substitution rule skein_512_pr_rules(103). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H38: fld_hash_bit_len(fld_h(ctx~)) <= 2147483640 -S- Applied substitution rule skein_512_pr_rules(107). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H71: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__6), [ i___1]) and element(fld_x(ctx__6), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(108). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H71: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_pr_rules(112). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H39: fld_byte_count(fld_h(ctx~)) >= 0 -S- Applied substitution rule skein_512_pr_rules(113). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H40: fld_byte_count(fld_h(ctx~)) <= 64 -S- Applied substitution rule skein_512_pr_rules(117). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H70: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__6), [ i___2]) and element(fld_b(ctx__6), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(118). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H70: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= 255) -S- Applied substitution rule skein_512_pr_rules(122). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H33: block_count >= 1 New H89: j >= 1 New H94: j >= 0 -S- Applied substitution rule skein_512_pr_rules(123). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H34: block_count <= 33554431 New H90: j <= 33554431 New H95: j <= 33554430 %%% Simplified H59 further, to give: %%% H59: src_offset + 63 >= block__index__subtype__1__first %%% Simplified H60 further, to give: %%% H60: src_offset + 63 <= block__index__subtype__1__last *** Proved C1: src_offset >= - 64 using hypothesis H52. *** Proved C2: src_offset <= 2147483583 using hypothesis H98. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_process_block_10. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H2 on reading formula in, to give: %%% H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> spark__unsigned__byte__first <= element(block, [i___1]) and element( block, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H37 on reading formula in, to give: %%% H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified H38 on reading formula in, to give: %%% H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified H39 on reading formula in, to give: %%% H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified H40 on reading formula in, to give: %%% H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last --- Hypothesis H42 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H44 has been replaced by "true". (It is already present, as H9) . %%% Simplified H50 on reading formula in, to give: %%% H50: for_all(i___1 : integer, spark__crypto__i9__first <= i___1 and i___1 <= spark__crypto__i9__last -> spark__unsigned__u64__first <= element(ks__1, [i___1]) and element(ks__1, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H51 on reading formula in, to give: %%% H51: for_all(i___1 : integer, spark__crypto__i3__first <= i___1 and i___1 <= spark__crypto__i3__last -> spark__unsigned__u64__first <= element(ts__2, [i___1]) and element(ts__2, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H54 has been replaced by "true". (It is already present, as H52). --- Hypothesis H55 has been replaced by "true". (It is already present, as H53). --- Hypothesis H56 has been replaced by "true". (It is already present, as H41). --- Hypothesis H62 has been replaced by "true". (It is already present, as H7) . %%% Simplified H63 on reading formula in, to give: %%% H63: for_all(i_ : integer, spark__crypto__i8__first <= i_ and i_ <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element( w__3, [i_]) and element(w__3, [i_]) <= spark__unsigned__u64__last) %%% Simplified H64 on reading formula in, to give: %%% H64: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(w__3, [i___1]) and element(w__3, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H65 has been replaced by "true". (It is already present, as H63). %%% Simplified H66 on reading formula in, to give: %%% H66: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__4, [i___1]) and element(x__4, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H67 on reading formula in, to give: %%% H67: for_all(i___1 : integer, spark__crypto__i8__first <= i___1 and i___1 <= spark__crypto__i8__last -> spark__unsigned__u64__first <= element(x__5, [i___1]) and element(x__5, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H68 on reading formula in, to give: %%% H68: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx)) %%% Simplified H69 on reading formula in, to give: %%% H69: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx)) %%% Simplified H70 on reading formula in, to give: %%% H70: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H71 on reading formula in, to give: %%% H71: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(ctx__6)) >= initialized_hash_bit_length__first %%% Simplified C2 on reading formula in, to give: %%% C2: fld_hash_bit_len(fld_h(ctx__6)) <= initialized_hash_bit_length__last %%% Simplified C3 on reading formula in, to give: %%% C3: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified C4 on reading formula in, to give: %%% C4: fld_byte_count(fld_h(ctx__6)) >= skein_512_block_bytes_count__first %%% Simplified C5 on reading formula in, to give: %%% C5: fld_byte_count(fld_h(ctx__6)) <= skein_512_block_bytes_count__last %%% Simplified C6 on reading formula in, to give: %%% C6: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx~)) *** Proved C1: fld_hash_bit_len(fld_h(ctx__6)) >= initialized_hash_bit_length__first using hypotheses H1, H37 & H68. *** Proved C2: fld_hash_bit_len(fld_h(ctx__6)) <= initialized_hash_bit_length__last using hypotheses H1, H38 & H68. *** Proved C3: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx~)) using hypotheses H1 & H68. *** Proved C4: fld_byte_count(fld_h(ctx__6)) >= skein_512_block_bytes_count__first using hypotheses H2, H39 & H69. *** Proved C5: fld_byte_count(fld_h(ctx__6)) <= skein_512_block_bytes_count__last using hypotheses H2, H40 & H69. *** Proved C6: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx~)) using hypotheses H2 & H69. *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.siv0000644000175000017500000002541411712513676030312 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Process_Block For path(s) from start to run-time check associated with statement of line 620: procedure_skein_512_process_block_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 621: procedure_skein_512_process_block_2. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 624: procedure_skein_512_process_block_3. H1: fld_hash_bit_len(fld_h(ctx)) >= 1 . H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H3: fld_byte_count(fld_h(ctx)) >= 0 . H4: fld_byte_count(fld_h(ctx)) <= 64 . H5: block__index__subtype__1__first = 0 . H6: starting_offset + (block_count - 1) * 64 + 63 <= block__index__subtype__1__last . H7: starting_offset + 63 <= block__index__subtype__1__last . H8: block__index__subtype__1__last <= 2147483647 . H9: starting_offset <= 2147483584 . H10: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H11: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H15: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H17: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H22: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element(block, [i___1] ) and element(block, [i___1]) <= 255) . H23: starting_offset >= 0 . H24: block_count >= 1 . H25: block_count <= 33554431 . H26: byte_count_add >= 0 . H27: byte_count_add <= 2147483647 . H28: integer__size >= 0 . H29: natural__size >= 0 . H30: spark__unsigned__u6__size >= 0 . H31: spark__unsigned__u7__size >= 0 . H32: spark__unsigned__byte__size >= 0 . H33: spark__unsigned__u16__size >= 0 . H34: spark__unsigned__u32__size >= 0 . H35: spark__unsigned__u64__size >= 0 . H36: spark__crypto__i3__size >= 0 . H37: spark__crypto__i8__size >= 0 . H38: spark__crypto__i9__size >= 0 . H39: spark__crypto__word_count_t__size >= 0 . H40: hash_bit_length__size >= 0 . H41: initialized_hash_bit_length__size >= 0 . H42: skein_512_state_words_index__size >= 0 . H43: skein_512_block_bytes_count__size >= 0 . H44: skein_512_block_bytes_index__size >= 0 . H45: positive_block_512_count_t__size >= 0 . H46: skein_512_context__size >= 0 . H47: context_header__size >= 0 . H48: block__index__subtype__1__first <= block__index__subtype__1__last . H49: block__index__subtype__1__first >= 0 . H50: block__index__subtype__1__last >= 0 . H51: block__index__subtype__1__first <= 2147483647 . -> C1: 1 < block_count -> starting_offset <= 2147483583 . For path(s) from assertion of line 624 to assertion of line 624: procedure_skein_512_process_block_4. H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) . H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) . H3: j >= 1 . H4: starting_offset + (j - 1) * 64 + 63 <= block__index__subtype__1__last . H5: starting_offset + (block_count - 1) * 64 + 63 <= block__index__subtype__1__last . H6: block__index__subtype__1__last <= 2147483647 . H7: starting_offset + (j - 1) * 64 <= 2147483583 . H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H10: fld_byte_count(fld_h(ctx)) >= 0 . H11: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H12: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H13: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H14: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H15: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H16: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H17: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H18: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H19: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H22: for_all(i___1 : integer, block__index__subtype__1__first <= i___1 and i___1 <= block__index__subtype__1__last -> 0 <= element(block, [i___1] ) and element(block, [i___1]) <= 255) . H23: starting_offset >= 0 . H24: block_count <= 33554431 . H25: byte_count_add >= 0 . H26: byte_count_add <= 2147483647 . H27: fld_hash_bit_len(fld_h(ctx~)) >= 1 . H28: fld_hash_bit_len(fld_h(ctx~)) <= 2147483640 . H29: fld_byte_count(fld_h(ctx~)) >= 0 . H30: fld_byte_count(fld_h(ctx~)) <= 64 . H31: block__index__subtype__1__first = 0 . H32: starting_offset + 63 <= block__index__subtype__1__last . H33: starting_offset <= 2147483584 . H34: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 >= 0 . H35: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod 18446744073709551616 <= 18446744073709551615 . H36: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 8 -> 0 <= element( ks__1, [i___1]) and element(ks__1, [i___1]) <= 18446744073709551615) . H37: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 2 -> 0 <= element( ts__2, [i___1]) and element(ts__2, [i___1]) <= 18446744073709551615) . H38: starting_offset + (j - 1) * 64 >= 0 . H39: starting_offset + (j - 1) * 64 + 63 >= block__index__subtype__1__first . H40: for_all(i_ : integer, 0 <= i_ and i_ <= 7 -> 0 <= element(w__3, [i_]) and element(w__3, [i_]) <= 18446744073709551615) . H41: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(w__3, [i___1]) and element(w__3, [i___1]) <= 18446744073709551615) . H42: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__4, [i___1]) and element(x__4, [i___1]) <= 18446744073709551615) . H43: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x__5, [i___1]) and element(x__5, [i___1]) <= 18446744073709551615) . H44: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(ctx)) . H45: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(ctx)) . H46: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__6), [i___2]) and element(fld_b(ctx__6), [i___2]) <= 255) . H47: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__6), [i___1]) and element(fld_x(ctx__6), [i___1]) <= 18446744073709551615) . H48: fld_byte_count(fld_h(ctx)) >= 0 . H49: fld_byte_count(fld_h(ctx)) <= 2147483647 . H50: fld_hash_bit_len(fld_h(ctx)) >= 0 . H51: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H52: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= 0 . H53: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= 63 . H54: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= 0 . H55: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= 127 . H56: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= 0 . H57: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= 65535 . H58: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= 0 . H59: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= 4294967295 . H60: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= 0 . H61: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= 18446744073709551615 . H62: j < block_count . H63: j <= 33554430 . H64: starting_offset + (j - 1) * 64 <= 2147483583 . H65: integer__size >= 0 . H66: natural__size >= 0 . H67: spark__unsigned__u6__size >= 0 . H68: spark__unsigned__u7__size >= 0 . H69: spark__unsigned__byte__size >= 0 . H70: spark__unsigned__u16__size >= 0 . H71: spark__unsigned__u32__size >= 0 . H72: spark__unsigned__u64__size >= 0 . H73: spark__crypto__i3__size >= 0 . H74: spark__crypto__i8__size >= 0 . H75: spark__crypto__i9__size >= 0 . H76: spark__crypto__word_count_t__size >= 0 . H77: hash_bit_length__size >= 0 . H78: initialized_hash_bit_length__size >= 0 . H79: skein_512_state_words_index__size >= 0 . H80: skein_512_block_bytes_count__size >= 0 . H81: skein_512_block_bytes_index__size >= 0 . H82: positive_block_512_count_t__size >= 0 . H83: skein_512_context__size >= 0 . H84: context_header__size >= 0 . H85: block__index__subtype__1__first <= block__index__subtype__1__last . H86: block__index__subtype__1__first >= 0 . H87: block__index__subtype__1__last >= 0 . H88: block__index__subtype__1__first <= 2147483647 . -> C1: starting_offset + (j - 1) * 64 + 127 <= block__index__subtype__1__last . C2: starting_offset + (j - 1) * 64 <= 2147483527 . C3: j + 1 < block_count -> starting_offset + (j - 1) * 64 <= 2147483519 . For path(s) from assertion of line 624 to run-time check associated with statement of line 637: procedure_skein_512_process_block_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 624 to precondition check associated with statement of line 643: procedure_skein_512_process_block_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 624 to check associated with statement of line 647: procedure_skein_512_process_block_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 624 to run-time check associated with statement of line 661: procedure_skein_512_process_block_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 624 to run-time check associated with statement of line 662: procedure_skein_512_process_block_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 624 to finish: procedure_skein_512_process_block_10. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.vcg0000644000175000017500000004652211712513676027053 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Put_64_LSB_First For path(s) from start to run-time check associated with statement of line 102: procedure_put_64_lsb_first_1. H1: dst__index__subtype__1__first = 0 . H2: src__index__subtype__1__first = 0 . H3: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . H4: byte_count <= (src__index__subtype__1__last + 1) * 8 . H5: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(dst, [ i___1]) >= spark__unsigned__byte__first) and (element( dst, [i___1]) <= spark__unsigned__byte__last))) . H6: dst_offset >= natural__first . H7: dst_offset <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . H9: byte_count >= natural__first . H10: byte_count <= natural__last . H11: byte_count >= 1 . -> C1: byte_count - 1 >= integer__first . C2: byte_count - 1 <= integer__last . C3: byte_count - 1 >= integer__base__first . C4: byte_count - 1 <= integer__base__last . C5: 0 >= integer__first . C6: 0 <= integer__last . For path(s) from start to run-time check associated with statement of line 102: procedure_put_64_lsb_first_2. H1: dst__index__subtype__1__first = 0 . H2: src__index__subtype__1__first = 0 . H3: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . H4: byte_count <= (src__index__subtype__1__last + 1) * 8 . H5: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(dst, [ i___1]) >= spark__unsigned__byte__first) and (element( dst, [i___1]) <= spark__unsigned__byte__last))) . H6: dst_offset >= natural__first . H7: dst_offset <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . H9: byte_count >= natural__first . H10: byte_count <= natural__last . H11: byte_count >= 1 . H12: byte_count - 1 >= integer__first . H13: byte_count - 1 <= integer__last . H14: byte_count - 1 >= integer__base__first . H15: byte_count - 1 <= integer__base__last . H16: 0 >= integer__first . H17: 0 <= integer__last . -> C1: (0 <= byte_count - 1) -> ((byte_count - 1 >= natural__first) and ( byte_count - 1 <= natural__last)) . C2: (0 <= byte_count - 1) -> ((0 >= natural__first) and (0 <= natural__last)) . For path(s) from start to assertion of line 103: procedure_put_64_lsb_first_3. H1: dst__index__subtype__1__first = 0 . H2: src__index__subtype__1__first = 0 . H3: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . H4: byte_count <= (src__index__subtype__1__last + 1) * 8 . H5: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(dst, [ i___1]) >= spark__unsigned__byte__first) and (element( dst, [i___1]) <= spark__unsigned__byte__last))) . H6: dst_offset >= natural__first . H7: dst_offset <= natural__last . H8: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . H9: byte_count >= natural__first . H10: byte_count <= natural__last . H11: byte_count >= 1 . H12: byte_count - 1 >= integer__first . H13: byte_count - 1 <= integer__last . H14: byte_count - 1 >= integer__base__first . H15: byte_count - 1 <= integer__base__last . H16: 0 >= integer__first . H17: 0 <= integer__last . H18: (0 <= byte_count - 1) -> ((byte_count - 1 >= natural__first) and ( byte_count - 1 <= natural__last)) . H19: (0 <= byte_count - 1) -> ((0 >= natural__first) and (0 <= natural__last)) . H20: 0 <= byte_count - 1 . -> C1: byte_count >= 1 . C2: 0 >= 0 . C3: 0 <= byte_count - 1 . C4: 0 < byte_count . C5: byte_count <= (src__index__subtype__1__last + 1) * 8 . C6: 0 < (src__index__subtype__1__last + 1) * 8 . C7: 0 <= src__index__subtype__1__last * 8 + 7 . C8: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(dst, [ i___1]) >= spark__unsigned__byte__first) and (element( dst, [i___1]) <= spark__unsigned__byte__last))) . C9: dst_offset >= natural__first . C10: dst_offset <= natural__last . C11: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . C12: byte_count >= natural__first . C13: byte_count <= natural__last . C14: dst__index__subtype__1__first = 0 . C15: src__index__subtype__1__first = 0 . C16: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . C17: byte_count <= (src__index__subtype__1__last + 1) * 8 . C18: 0 >= natural__first . C19: 0 <= natural__last . C20: 0 >= 0 . C21: 0 <= byte_count - 1 . For path(s) from assertion of line 103 to assertion of line 103: procedure_put_64_lsb_first_4. H1: byte_count >= 1 . H2: loop__1__n >= 0 . H3: loop__1__n <= byte_count - 1 . H4: loop__1__n < byte_count . H5: byte_count <= (src__index__subtype__1__last + 1) * 8 . H6: loop__1__n < (src__index__subtype__1__last + 1) * 8 . H7: loop__1__n <= src__index__subtype__1__last * 8 + 7 . H8: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(dst, [ i___1]) >= spark__unsigned__byte__first) and (element( dst, [i___1]) <= spark__unsigned__byte__last))) . H9: dst_offset >= natural__first . H10: dst_offset <= natural__last . H11: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . H12: byte_count >= natural__first . H13: byte_count <= natural__last . H14: dst__index__subtype__1__first = 0 . H15: src__index__subtype__1__first = 0 . H16: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . H17: byte_count <= (src__index__subtype__1__last + 1) * 8 . H18: loop__1__n >= natural__first . H19: loop__1__n <= natural__last . H20: loop__1__n >= 0 . H21: loop__1__n <= byte_count - 1 . H22: loop__1__n div 8 >= 0 . H23: loop__1__n div 8 <= src__index__subtype__1__last . H24: 8 * (loop__1__n mod 8) >= spark__unsigned__shift_count__first . H25: 8 * (loop__1__n mod 8) <= spark__unsigned__shift_count__last . H26: 8 * (loop__1__n mod 8) >= integer__base__first . H27: 8 * (loop__1__n mod 8) <= integer__base__last . H28: loop__1__n mod 8 >= integer__base__first . H29: loop__1__n mod 8 <= integer__base__last . H30: 8 <> 0 . H31: element(src, [loop__1__n div 8]) >= interfaces__unsigned_64__first . H32: element(src, [loop__1__n div 8]) <= interfaces__unsigned_64__last . H33: loop__1__n div 8 >= src__index__subtype__1__first . H34: loop__1__n div 8 <= src__index__subtype__1__last . H35: loop__1__n div 8 >= integer__base__first . H36: loop__1__n div 8 <= integer__base__last . H37: 8 <> 0 . H38: dst_offset + loop__1__n >= dst__index__subtype__1__first . H39: dst_offset + loop__1__n <= dst__index__subtype__1__last . H40: dst_offset + loop__1__n >= integer__base__first . H41: dst_offset + loop__1__n <= integer__base__last . H42: spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)) >= interfaces__unsigned_64__first . H43: spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)) <= interfaces__unsigned_64__last . H44: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= spark__unsigned__byte__first . H45: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= spark__unsigned__byte__last . H46: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= spark__unsigned__byte__first . H47: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= spark__unsigned__byte__last . H48: not (loop__1__n = byte_count - 1) . -> C1: byte_count >= 1 . C2: loop__1__n + 1 >= 0 . C3: loop__1__n + 1 <= byte_count - 1 . C4: loop__1__n + 1 < byte_count . C5: byte_count <= (src__index__subtype__1__last + 1) * 8 . C6: loop__1__n + 1 < (src__index__subtype__1__last + 1) * 8 . C7: loop__1__n + 1 <= src__index__subtype__1__last * 8 + 7 . C8: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(update( dst, [dst_offset + loop__1__n], bit__and( spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255)), [ i___1]) >= spark__unsigned__byte__first) and (element(update( dst, [dst_offset + loop__1__n], bit__and( spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255)), [ i___1]) <= spark__unsigned__byte__last))) . C9: dst_offset >= natural__first . C10: dst_offset <= natural__last . C11: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . C12: byte_count >= natural__first . C13: byte_count <= natural__last . C14: dst__index__subtype__1__first = 0 . C15: src__index__subtype__1__first = 0 . C16: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . C17: byte_count <= (src__index__subtype__1__last + 1) * 8 . C18: loop__1__n + 1 >= natural__first . C19: loop__1__n + 1 <= natural__last . C20: loop__1__n + 1 >= 0 . C21: loop__1__n + 1 <= byte_count - 1 . For path(s) from assertion of line 103 to check associated with statement of line 111: procedure_put_64_lsb_first_5. H1: byte_count >= 1 . H2: loop__1__n >= 0 . H3: loop__1__n <= byte_count - 1 . H4: loop__1__n < byte_count . H5: byte_count <= (src__index__subtype__1__last + 1) * 8 . H6: loop__1__n < (src__index__subtype__1__last + 1) * 8 . H7: loop__1__n <= src__index__subtype__1__last * 8 + 7 . H8: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(dst, [ i___1]) >= spark__unsigned__byte__first) and (element( dst, [i___1]) <= spark__unsigned__byte__last))) . H9: dst_offset >= natural__first . H10: dst_offset <= natural__last . H11: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . H12: byte_count >= natural__first . H13: byte_count <= natural__last . H14: dst__index__subtype__1__first = 0 . H15: src__index__subtype__1__first = 0 . H16: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . H17: byte_count <= (src__index__subtype__1__last + 1) * 8 . H18: loop__1__n >= natural__first . H19: loop__1__n <= natural__last . H20: loop__1__n >= 0 . H21: loop__1__n <= byte_count - 1 . -> C1: loop__1__n div 8 >= 0 . C2: loop__1__n div 8 <= src__index__subtype__1__last . For path(s) from assertion of line 103 to run-time check associated with statement of line 113: procedure_put_64_lsb_first_6. H1: byte_count >= 1 . H2: loop__1__n >= 0 . H3: loop__1__n <= byte_count - 1 . H4: loop__1__n < byte_count . H5: byte_count <= (src__index__subtype__1__last + 1) * 8 . H6: loop__1__n < (src__index__subtype__1__last + 1) * 8 . H7: loop__1__n <= src__index__subtype__1__last * 8 + 7 . H8: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(dst, [ i___1]) >= spark__unsigned__byte__first) and (element( dst, [i___1]) <= spark__unsigned__byte__last))) . H9: dst_offset >= natural__first . H10: dst_offset <= natural__last . H11: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . H12: byte_count >= natural__first . H13: byte_count <= natural__last . H14: dst__index__subtype__1__first = 0 . H15: src__index__subtype__1__first = 0 . H16: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . H17: byte_count <= (src__index__subtype__1__last + 1) * 8 . H18: loop__1__n >= natural__first . H19: loop__1__n <= natural__last . H20: loop__1__n >= 0 . H21: loop__1__n <= byte_count - 1 . H22: loop__1__n div 8 >= 0 . H23: loop__1__n div 8 <= src__index__subtype__1__last . -> C1: 8 * (loop__1__n mod 8) >= spark__unsigned__shift_count__first . C2: 8 * (loop__1__n mod 8) <= spark__unsigned__shift_count__last . C3: 8 * (loop__1__n mod 8) >= integer__base__first . C4: 8 * (loop__1__n mod 8) <= integer__base__last . C5: loop__1__n mod 8 >= integer__base__first . C6: loop__1__n mod 8 <= integer__base__last . C7: 8 <> 0 . C8: element(src, [loop__1__n div 8]) >= interfaces__unsigned_64__first . C9: element(src, [loop__1__n div 8]) <= interfaces__unsigned_64__last . C10: loop__1__n div 8 >= src__index__subtype__1__first . C11: loop__1__n div 8 <= src__index__subtype__1__last . C12: loop__1__n div 8 >= integer__base__first . C13: loop__1__n div 8 <= integer__base__last . C14: 8 <> 0 . C15: dst_offset + loop__1__n >= dst__index__subtype__1__first . C16: dst_offset + loop__1__n <= dst__index__subtype__1__last . C17: dst_offset + loop__1__n >= integer__base__first . C18: dst_offset + loop__1__n <= integer__base__last . For path(s) from assertion of line 103 to run-time check associated with statement of line 113: procedure_put_64_lsb_first_7. H1: byte_count >= 1 . H2: loop__1__n >= 0 . H3: loop__1__n <= byte_count - 1 . H4: loop__1__n < byte_count . H5: byte_count <= (src__index__subtype__1__last + 1) * 8 . H6: loop__1__n < (src__index__subtype__1__last + 1) * 8 . H7: loop__1__n <= src__index__subtype__1__last * 8 + 7 . H8: for_all(i___1: integer, ((i___1 >= dst__index__subtype__1__first) and (i___1 <= dst__index__subtype__1__last)) -> ((element(dst, [ i___1]) >= spark__unsigned__byte__first) and (element( dst, [i___1]) <= spark__unsigned__byte__last))) . H9: dst_offset >= natural__first . H10: dst_offset <= natural__last . H11: for_all(i___1: integer, ((i___1 >= src__index__subtype__1__first) and (i___1 <= src__index__subtype__1__last)) -> ((element(src, [ i___1]) >= spark__unsigned__u64__first) and (element( src, [i___1]) <= spark__unsigned__u64__last))) . H12: byte_count >= natural__first . H13: byte_count <= natural__last . H14: dst__index__subtype__1__first = 0 . H15: src__index__subtype__1__first = 0 . H16: dst__index__subtype__1__last >= dst_offset + ( byte_count - 1) . H17: byte_count <= (src__index__subtype__1__last + 1) * 8 . H18: loop__1__n >= natural__first . H19: loop__1__n <= natural__last . H20: loop__1__n >= 0 . H21: loop__1__n <= byte_count - 1 . H22: loop__1__n div 8 >= 0 . H23: loop__1__n div 8 <= src__index__subtype__1__last . H24: 8 * (loop__1__n mod 8) >= spark__unsigned__shift_count__first . H25: 8 * (loop__1__n mod 8) <= spark__unsigned__shift_count__last . H26: 8 * (loop__1__n mod 8) >= integer__base__first . H27: 8 * (loop__1__n mod 8) <= integer__base__last . H28: loop__1__n mod 8 >= integer__base__first . H29: loop__1__n mod 8 <= integer__base__last . H30: 8 <> 0 . H31: element(src, [loop__1__n div 8]) >= interfaces__unsigned_64__first . H32: element(src, [loop__1__n div 8]) <= interfaces__unsigned_64__last . H33: loop__1__n div 8 >= src__index__subtype__1__first . H34: loop__1__n div 8 <= src__index__subtype__1__last . H35: loop__1__n div 8 >= integer__base__first . H36: loop__1__n div 8 <= integer__base__last . H37: 8 <> 0 . H38: dst_offset + loop__1__n >= dst__index__subtype__1__first . H39: dst_offset + loop__1__n <= dst__index__subtype__1__last . H40: dst_offset + loop__1__n >= integer__base__first . H41: dst_offset + loop__1__n <= integer__base__last . H42: spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)) >= interfaces__unsigned_64__first . H43: spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)) <= interfaces__unsigned_64__last . -> C1: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= spark__unsigned__byte__first . C2: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= spark__unsigned__byte__last . C3: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= spark__unsigned__byte__first . C4: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= spark__unsigned__byte__last . For path(s) from start to finish: procedure_put_64_lsb_first_8. *** true . /* trivially true VC removed by Examiner */ procedure_put_64_lsb_first_9. *** true . /* trivially true VC removed by Examiner */ For path(s) from assertion of line 103 to finish: procedure_put_64_lsb_first_10. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.siv0000644000175000017500000000166611712513676026410 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition function Skein.Skein_512_Hash For path(s) from start to run-time check associated with statement of line 979: function_skein_512_hash_1. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 982: function_skein_512_hash_2. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 985: function_skein_512_hash_3. *** true . /* all conclusions proved */ For path(s) from start to finish: function_skein_512_hash_4. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.vct0000644000175000017500000000014211712513676030274 0ustar eugeneugen,skein_512_process_block,procedure,,,3,,true,,,, ,skein_512_process_block,procedure,,,4,,true,,,, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.vcg0000644000175000017500000222675211712513676026543 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Final For path(s) from start to check associated with statement of line 900: procedure_skein_512_final_1. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx)) > 0 . For path(s) from start to run-time check associated with statement of line 901: procedure_skein_512_final_2. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . -> C1: 0 >= spark__unsigned__byte__first . C2: 0 <= spark__unsigned__byte__last . For path(s) from start to precondition check associated with statement of line 905: procedure_skein_512_final_3. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . -> C1: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . C2: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . For path(s) from start to run-time check associated with statement of line 909: procedure_skein_512_final_4. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H49: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H50: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H52: fld_hash_bit_len(fld_h(local_ctx__1)) > 0 . H53: fld_byte_count(fld_h(local_ctx__1)) < skein_512_block_bytes_c . H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H55: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H56: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H57: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H58: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H61: true . H62: true . H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H65: true . H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H74: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H75: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H76: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H77: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H78: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H79: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H80: true . H81: true . H82: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H83: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H84: true . H85: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H86: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H87: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H88: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H89: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H90: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H92: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H93: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H94: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H95: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H96: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H97: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H98: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H99: true . H100: true . H101: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H102: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H103: true . H104: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H105: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H106: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H107: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H108: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H109: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H110: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H111: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . -> C1: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . C2: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . procedure_skein_512_final_5. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: not (fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c) . H49: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H50: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H51: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H52: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H53: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H54: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H55: true . H56: true . H57: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H58: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H59: true . H60: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H61: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H62: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H63: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H64: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H65: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H66: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H67: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H68: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H69: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H70: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H71: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H72: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H73: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H74: true . H75: true . H76: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H77: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H78: true . H79: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H80: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H81: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H82: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H83: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H84: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H85: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H86: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . -> C1: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . C2: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . For path(s) from start to precondition check associated with statement of line 911: procedure_skein_512_final_6. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H49: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H50: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H52: fld_hash_bit_len(fld_h(local_ctx__1)) > 0 . H53: fld_byte_count(fld_h(local_ctx__1)) < skein_512_block_bytes_c . H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H55: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H56: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H57: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H58: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H61: true . H62: true . H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H65: true . H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H74: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H75: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H76: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H77: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H78: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H79: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H80: true . H81: true . H82: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H83: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H84: true . H85: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H86: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H87: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H88: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H89: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H90: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H92: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H93: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H94: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H95: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H96: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H97: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H98: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H99: true . H100: true . H101: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H102: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H103: true . H104: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H105: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H106: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H107: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H108: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H109: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H110: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H111: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H112: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H113: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H114: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H115: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H116: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H117: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H118: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H119: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H120: true . H121: true . H122: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H123: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H124: true . H125: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H126: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H127: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H128: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H129: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H130: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H131: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H133: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H134: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . -> C1: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . C2: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . C3: 1 >= positive_block_512_count_t__first . C4: 1 <= positive_block_512_count_t__last . C5: 0 >= natural__first . C6: 0 <= natural__last . C7: fld_hash_bit_len(fld_h(local_ctx__1)) >= initialized_hash_bit_length__first . C8: fld_hash_bit_len(fld_h(local_ctx__1)) <= initialized_hash_bit_length__last . C9: fld_byte_count(fld_h(local_ctx__1)) >= skein_512_block_bytes_count__first . C10: fld_byte_count(fld_h(local_ctx__1)) <= skein_512_block_bytes_count__last . C11: skein_512_block_bytes_index__first = 0 . C12: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . C13: 0 + 63 <= skein_512_block_bytes_index__last . C14: skein_512_block_bytes_index__last <= natural__last . C15: 0 <= natural__last - 63 . procedure_skein_512_final_7. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: not (fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c) . H49: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H50: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H51: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H52: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H53: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H54: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H55: true . H56: true . H57: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H58: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H59: true . H60: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H61: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H62: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H63: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H64: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H65: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H66: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H67: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H68: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H69: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H70: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H71: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H72: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H73: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H74: true . H75: true . H76: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H77: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H78: true . H79: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H80: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H81: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H82: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H83: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H84: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H85: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H86: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H87: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H88: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H89: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H90: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H91: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H92: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H93: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H94: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H95: true . H96: true . H97: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H98: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H99: true . H100: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H101: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H102: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H103: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H104: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H105: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H106: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H107: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H108: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__byte__last))) . H109: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H110: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . -> C1: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . C2: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . C3: 1 >= positive_block_512_count_t__first . C4: 1 <= positive_block_512_count_t__last . C5: 0 >= natural__first . C6: 0 <= natural__last . C7: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= initialized_hash_bit_length__first . C8: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= initialized_hash_bit_length__last . C9: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= skein_512_block_bytes_count__first . C10: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= skein_512_block_bytes_count__last . C11: skein_512_block_bytes_index__first = 0 . C12: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . C13: 0 + 63 <= skein_512_block_bytes_index__last . C14: skein_512_block_bytes_index__last <= natural__last . C15: 0 <= natural__last - 63 . For path(s) from start to run-time check associated with statement of line 918: procedure_skein_512_final_8. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H49: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H50: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H52: fld_hash_bit_len(fld_h(local_ctx__1)) > 0 . H53: fld_byte_count(fld_h(local_ctx__1)) < skein_512_block_bytes_c . H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H55: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H56: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H57: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H58: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H61: true . H62: true . H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H65: true . H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H74: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H75: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H76: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H77: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H78: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H79: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H80: true . H81: true . H82: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H83: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H84: true . H85: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H86: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H87: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H88: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H89: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H90: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H92: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H93: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H94: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H95: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H96: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H97: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H98: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H99: true . H100: true . H101: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H102: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H103: true . H104: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H105: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H106: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H107: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H108: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H109: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H110: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H111: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H112: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H113: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H114: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H115: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H116: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H117: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H118: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H119: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H120: true . H121: true . H122: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H123: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H124: true . H125: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H126: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H127: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H128: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H129: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H130: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H131: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H133: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H134: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H136: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H137: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H138: 1 >= positive_block_512_count_t__first . H139: 1 <= positive_block_512_count_t__last . H140: 0 >= natural__first . H141: 0 <= natural__last . H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= initialized_hash_bit_length__first . H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= initialized_hash_bit_length__last . H144: fld_byte_count(fld_h(local_ctx__1)) >= skein_512_block_bytes_count__first . H145: fld_byte_count(fld_h(local_ctx__1)) <= skein_512_block_bytes_count__last . H146: skein_512_block_bytes_index__first = 0 . H147: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H148: 0 + 63 <= skein_512_block_bytes_index__last . H149: skein_512_block_bytes_index__last <= natural__last . H150: 0 <= natural__last - 63 . H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H153: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( local_ctx__1)) . H154: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H155: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H156: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h( local_ctx__1)) . H157: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H158: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H159: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H160: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H163: true . H164: true . H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H167: true . H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H176: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H177: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H178: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H179: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H180: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H181: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H182: true . H183: true . H184: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H185: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H186: true . H187: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H188: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H189: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H190: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H191: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H192: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H193: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H194: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . -> C1: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . C2: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . C3: 8 <> 0 . C4: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . C5: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . procedure_skein_512_final_9. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: not (fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c) . H49: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H50: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H51: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H52: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H53: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H54: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H55: true . H56: true . H57: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H58: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H59: true . H60: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H61: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H62: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H63: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H64: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H65: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H66: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H67: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H68: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H69: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H70: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H71: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H72: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H73: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H74: true . H75: true . H76: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H77: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H78: true . H79: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H80: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H81: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H82: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H83: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H84: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H85: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H86: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H87: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H88: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H89: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H90: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H91: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H92: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H93: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H94: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H95: true . H96: true . H97: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H98: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H99: true . H100: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H101: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H102: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H103: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H104: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H105: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H106: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H107: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H108: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__byte__last))) . H109: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H110: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H111: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H112: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H113: 1 >= positive_block_512_count_t__first . H114: 1 <= positive_block_512_count_t__last . H115: 0 >= natural__first . H116: 0 <= natural__last . H117: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= initialized_hash_bit_length__first . H118: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= initialized_hash_bit_length__last . H119: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= skein_512_block_bytes_count__first . H120: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= skein_512_block_bytes_count__last . H121: skein_512_block_bytes_index__first = 0 . H122: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H123: 0 + 63 <= skein_512_block_bytes_index__last . H124: skein_512_block_bytes_index__last <= natural__last . H125: 0 <= natural__last - 63 . H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H129: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H130: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H132: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H133: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H134: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H138: true . H139: true . H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H142: true . H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H151: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H152: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H153: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H154: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H155: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H156: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H157: true . H158: true . H159: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H160: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H161: true . H162: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H163: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H164: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H165: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H166: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H167: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H168: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H169: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . -> C1: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . C2: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . C3: 8 <> 0 . C4: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . C5: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . For path(s) from start to check associated with statement of line 920: procedure_skein_512_final_10. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H49: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H50: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H52: fld_hash_bit_len(fld_h(local_ctx__1)) > 0 . H53: fld_byte_count(fld_h(local_ctx__1)) < skein_512_block_bytes_c . H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H55: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H56: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H57: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H58: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H61: true . H62: true . H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H65: true . H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H74: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H75: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H76: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H77: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H78: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H79: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H80: true . H81: true . H82: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H83: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H84: true . H85: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H86: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H87: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H88: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H89: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H90: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H92: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H93: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H94: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H95: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H96: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H97: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H98: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H99: true . H100: true . H101: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H102: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H103: true . H104: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H105: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H106: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H107: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H108: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H109: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H110: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H111: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H112: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H113: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H114: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H115: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H116: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H117: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H118: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H119: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H120: true . H121: true . H122: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H123: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H124: true . H125: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H126: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H127: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H128: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H129: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H130: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H131: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H133: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H134: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H136: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H137: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H138: 1 >= positive_block_512_count_t__first . H139: 1 <= positive_block_512_count_t__last . H140: 0 >= natural__first . H141: 0 <= natural__last . H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= initialized_hash_bit_length__first . H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= initialized_hash_bit_length__last . H144: fld_byte_count(fld_h(local_ctx__1)) >= skein_512_block_bytes_count__first . H145: fld_byte_count(fld_h(local_ctx__1)) <= skein_512_block_bytes_count__last . H146: skein_512_block_bytes_index__first = 0 . H147: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H148: 0 + 63 <= skein_512_block_bytes_index__last . H149: skein_512_block_bytes_index__last <= natural__last . H150: 0 <= natural__last - 63 . H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H153: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( local_ctx__1)) . H154: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H155: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H156: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h( local_ctx__1)) . H157: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H158: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H159: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H160: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H163: true . H164: true . H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H167: true . H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H176: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H177: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H178: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H179: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H180: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H181: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H182: true . H183: true . H184: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H185: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H186: true . H187: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H188: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H189: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H190: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H191: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H192: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H193: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H194: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H195: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H196: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H197: 8 <> 0 . H198: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H199: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . -> C1: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . procedure_skein_512_final_11. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: not (fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c) . H49: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H50: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H51: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H52: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H53: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H54: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H55: true . H56: true . H57: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H58: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H59: true . H60: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H61: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H62: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H63: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H64: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H65: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H66: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H67: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H68: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H69: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H70: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H71: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H72: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H73: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H74: true . H75: true . H76: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H77: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H78: true . H79: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H80: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H81: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H82: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H83: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H84: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H85: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H86: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H87: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H88: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H89: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H90: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H91: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H92: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H93: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H94: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H95: true . H96: true . H97: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H98: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H99: true . H100: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H101: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H102: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H103: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H104: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H105: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H106: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H107: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H108: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__byte__last))) . H109: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H110: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H111: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H112: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H113: 1 >= positive_block_512_count_t__first . H114: 1 <= positive_block_512_count_t__last . H115: 0 >= natural__first . H116: 0 <= natural__last . H117: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= initialized_hash_bit_length__first . H118: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= initialized_hash_bit_length__last . H119: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= skein_512_block_bytes_count__first . H120: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= skein_512_block_bytes_count__last . H121: skein_512_block_bytes_index__first = 0 . H122: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H123: 0 + 63 <= skein_512_block_bytes_index__last . H124: skein_512_block_bytes_index__last <= natural__last . H125: 0 <= natural__last - 63 . H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H129: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H130: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H132: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H133: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H134: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H138: true . H139: true . H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H142: true . H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H151: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H152: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H153: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H154: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H155: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H156: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H157: true . H158: true . H159: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H160: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H161: true . H162: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H163: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H164: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H165: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H166: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H167: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H168: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H169: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H170: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H171: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H172: 8 <> 0 . H173: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H174: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . -> C1: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . For path(s) from start to run-time check associated with statement of line 923: procedure_skein_512_final_12. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H49: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H50: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H52: fld_hash_bit_len(fld_h(local_ctx__1)) > 0 . H53: fld_byte_count(fld_h(local_ctx__1)) < skein_512_block_bytes_c . H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H55: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H56: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H57: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H58: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H61: true . H62: true . H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H65: true . H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H74: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H75: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H76: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H77: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H78: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H79: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H80: true . H81: true . H82: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H83: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H84: true . H85: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H86: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H87: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H88: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H89: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H90: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H92: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H93: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H94: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H95: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H96: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H97: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H98: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H99: true . H100: true . H101: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H102: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H103: true . H104: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H105: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H106: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H107: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H108: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H109: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H110: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H111: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H112: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H113: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H114: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H115: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H116: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H117: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H118: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H119: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H120: true . H121: true . H122: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H123: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H124: true . H125: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H126: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H127: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H128: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H129: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H130: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H131: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H133: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H134: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H136: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H137: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H138: 1 >= positive_block_512_count_t__first . H139: 1 <= positive_block_512_count_t__last . H140: 0 >= natural__first . H141: 0 <= natural__last . H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= initialized_hash_bit_length__first . H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= initialized_hash_bit_length__last . H144: fld_byte_count(fld_h(local_ctx__1)) >= skein_512_block_bytes_count__first . H145: fld_byte_count(fld_h(local_ctx__1)) <= skein_512_block_bytes_count__last . H146: skein_512_block_bytes_index__first = 0 . H147: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H148: 0 + 63 <= skein_512_block_bytes_index__last . H149: skein_512_block_bytes_index__last <= natural__last . H150: 0 <= natural__last - 63 . H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H153: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( local_ctx__1)) . H154: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H155: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H156: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h( local_ctx__1)) . H157: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H158: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H159: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H160: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H163: true . H164: true . H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H167: true . H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H176: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H177: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H178: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H179: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H180: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H181: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H182: true . H183: true . H184: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H185: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H186: true . H187: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H188: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H189: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H190: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H191: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H192: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H193: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H194: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H195: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H196: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H197: 8 <> 0 . H198: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H199: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . H200: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . -> C1: 0 >= spark__unsigned__byte__first . C2: 0 <= spark__unsigned__byte__last . procedure_skein_512_final_13. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: not (fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c) . H49: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H50: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H51: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H52: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H53: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H54: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H55: true . H56: true . H57: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H58: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H59: true . H60: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H61: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H62: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H63: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H64: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H65: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H66: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H67: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H68: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H69: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H70: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H71: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H72: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H73: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H74: true . H75: true . H76: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H77: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H78: true . H79: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H80: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H81: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H82: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H83: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H84: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H85: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H86: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H87: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H88: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H89: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H90: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H91: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H92: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H93: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H94: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H95: true . H96: true . H97: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H98: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H99: true . H100: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H101: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H102: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H103: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H104: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H105: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H106: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H107: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H108: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__byte__last))) . H109: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H110: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H111: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H112: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H113: 1 >= positive_block_512_count_t__first . H114: 1 <= positive_block_512_count_t__last . H115: 0 >= natural__first . H116: 0 <= natural__last . H117: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= initialized_hash_bit_length__first . H118: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= initialized_hash_bit_length__last . H119: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= skein_512_block_bytes_count__first . H120: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= skein_512_block_bytes_count__last . H121: skein_512_block_bytes_index__first = 0 . H122: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H123: 0 + 63 <= skein_512_block_bytes_index__last . H124: skein_512_block_bytes_index__last <= natural__last . H125: 0 <= natural__last - 63 . H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H129: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H130: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H132: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H133: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H134: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H138: true . H139: true . H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H142: true . H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H151: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H152: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H153: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H154: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H155: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H156: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H157: true . H158: true . H159: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H160: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H161: true . H162: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H163: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H164: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H165: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H166: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H167: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H168: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H169: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H170: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H171: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H172: 8 <> 0 . H173: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H174: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . H175: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . -> C1: 0 >= spark__unsigned__byte__first . C2: 0 <= spark__unsigned__byte__last . For path(s) from start to run-time check associated with statement of line 926: procedure_skein_512_final_14. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H49: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H50: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H52: fld_hash_bit_len(fld_h(local_ctx__1)) > 0 . H53: fld_byte_count(fld_h(local_ctx__1)) < skein_512_block_bytes_c . H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H55: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H56: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H57: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H58: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H61: true . H62: true . H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H65: true . H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H74: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H75: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H76: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H77: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H78: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H79: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H80: true . H81: true . H82: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H83: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H84: true . H85: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H86: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H87: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H88: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H89: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H90: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H92: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H93: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H94: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H95: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H96: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H97: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H98: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H99: true . H100: true . H101: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H102: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H103: true . H104: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H105: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H106: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H107: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H108: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H109: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H110: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H111: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H112: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H113: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H114: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H115: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H116: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H117: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H118: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H119: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H120: true . H121: true . H122: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H123: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H124: true . H125: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H126: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H127: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H128: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H129: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H130: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H131: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H133: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H134: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H136: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H137: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H138: 1 >= positive_block_512_count_t__first . H139: 1 <= positive_block_512_count_t__last . H140: 0 >= natural__first . H141: 0 <= natural__last . H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= initialized_hash_bit_length__first . H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= initialized_hash_bit_length__last . H144: fld_byte_count(fld_h(local_ctx__1)) >= skein_512_block_bytes_count__first . H145: fld_byte_count(fld_h(local_ctx__1)) <= skein_512_block_bytes_count__last . H146: skein_512_block_bytes_index__first = 0 . H147: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H148: 0 + 63 <= skein_512_block_bytes_index__last . H149: skein_512_block_bytes_index__last <= natural__last . H150: 0 <= natural__last - 63 . H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H153: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( local_ctx__1)) . H154: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H155: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H156: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h( local_ctx__1)) . H157: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H158: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H159: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H160: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H163: true . H164: true . H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H167: true . H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H176: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H177: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H178: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H179: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H180: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H181: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H182: true . H183: true . H184: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H185: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H186: true . H187: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H188: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H189: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H190: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H191: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H192: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H193: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H194: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H195: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H196: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H197: 8 <> 0 . H198: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H199: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . H200: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . H201: 0 >= spark__unsigned__byte__first . H202: 0 <= spark__unsigned__byte__last . H203: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) >= spark__unsigned__byte__first) and (element(fld_b( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) <= spark__unsigned__byte__last))) . H204: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) >= spark__unsigned__u64__first) and (element(fld_x( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) <= spark__unsigned__u64__last))) . H205: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= natural__first . H206: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= natural__last . H207: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= hash_bit_length__first . H208: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= hash_bit_length__last . H209: true . H210: true . H211: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u6__first . H212: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u6__last . H213: true . H214: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u7__first . H215: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u7__last . H216: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u16__first . H217: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u16__last . H218: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u32__first . H219: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u32__last . H220: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u64__first . H221: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u64__last . H222: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H223: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . -> C1: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= positive_output_block_count_t__first . C2: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= positive_output_block_count_t__last . C3: 64 <> 0 . C4: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 >= integer__base__first . C5: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 <= integer__base__last . procedure_skein_512_final_15. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: not (fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c) . H49: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H50: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H51: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H52: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H53: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H54: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H55: true . H56: true . H57: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H58: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H59: true . H60: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H61: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H62: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H63: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H64: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H65: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H66: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H67: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H68: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H69: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H70: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H71: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H72: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H73: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H74: true . H75: true . H76: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H77: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H78: true . H79: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H80: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H81: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H82: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H83: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H84: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H85: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H86: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H87: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H88: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H89: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H90: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H91: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H92: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H93: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H94: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H95: true . H96: true . H97: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H98: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H99: true . H100: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H101: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H102: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H103: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H104: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H105: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H106: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H107: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H108: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__byte__last))) . H109: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H110: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H111: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H112: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H113: 1 >= positive_block_512_count_t__first . H114: 1 <= positive_block_512_count_t__last . H115: 0 >= natural__first . H116: 0 <= natural__last . H117: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= initialized_hash_bit_length__first . H118: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= initialized_hash_bit_length__last . H119: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= skein_512_block_bytes_count__first . H120: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= skein_512_block_bytes_count__last . H121: skein_512_block_bytes_index__first = 0 . H122: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H123: 0 + 63 <= skein_512_block_bytes_index__last . H124: skein_512_block_bytes_index__last <= natural__last . H125: 0 <= natural__last - 63 . H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H129: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H130: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H132: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H133: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H134: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H138: true . H139: true . H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H142: true . H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H151: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H152: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H153: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H154: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H155: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H156: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H157: true . H158: true . H159: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H160: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H161: true . H162: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H163: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H164: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H165: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H166: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H167: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H168: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H169: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H170: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H171: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H172: 8 <> 0 . H173: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H174: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . H175: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . H176: 0 >= spark__unsigned__byte__first . H177: 0 <= spark__unsigned__byte__last . H178: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) >= spark__unsigned__byte__first) and (element(fld_b( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) <= spark__unsigned__byte__last))) . H179: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) >= spark__unsigned__u64__first) and (element(fld_x( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) <= spark__unsigned__u64__last))) . H180: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= natural__first . H181: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= natural__last . H182: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= hash_bit_length__first . H183: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= hash_bit_length__last . H184: true . H185: true . H186: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u6__first . H187: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u6__last . H188: true . H189: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u7__first . H190: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u7__last . H191: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u16__first . H192: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u16__last . H193: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u32__first . H194: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u32__last . H195: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u64__first . H196: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u64__last . H197: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H198: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . -> C1: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= positive_output_block_count_t__first . C2: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= positive_output_block_count_t__last . C3: 64 <> 0 . C4: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 >= integer__base__first . C5: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 927: procedure_skein_512_final_16. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H49: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H50: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H52: fld_hash_bit_len(fld_h(local_ctx__1)) > 0 . H53: fld_byte_count(fld_h(local_ctx__1)) < skein_512_block_bytes_c . H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H55: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H56: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H57: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H58: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H61: true . H62: true . H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H65: true . H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H74: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H75: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H76: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H77: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H78: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H79: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H80: true . H81: true . H82: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H83: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H84: true . H85: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H86: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H87: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H88: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H89: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H90: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H92: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H93: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H94: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H95: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H96: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H97: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H98: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H99: true . H100: true . H101: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H102: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H103: true . H104: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H105: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H106: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H107: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H108: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H109: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H110: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H111: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H112: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H113: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H114: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H115: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H116: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H117: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H118: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H119: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H120: true . H121: true . H122: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H123: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H124: true . H125: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H126: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H127: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H128: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H129: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H130: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H131: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H133: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H134: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H136: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H137: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H138: 1 >= positive_block_512_count_t__first . H139: 1 <= positive_block_512_count_t__last . H140: 0 >= natural__first . H141: 0 <= natural__last . H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= initialized_hash_bit_length__first . H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= initialized_hash_bit_length__last . H144: fld_byte_count(fld_h(local_ctx__1)) >= skein_512_block_bytes_count__first . H145: fld_byte_count(fld_h(local_ctx__1)) <= skein_512_block_bytes_count__last . H146: skein_512_block_bytes_index__first = 0 . H147: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H148: 0 + 63 <= skein_512_block_bytes_index__last . H149: skein_512_block_bytes_index__last <= natural__last . H150: 0 <= natural__last - 63 . H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H153: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( local_ctx__1)) . H154: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H155: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H156: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h( local_ctx__1)) . H157: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H158: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H159: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H160: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H163: true . H164: true . H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H167: true . H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H176: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H177: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H178: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H179: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H180: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H181: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H182: true . H183: true . H184: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H185: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H186: true . H187: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H188: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H189: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H190: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H191: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H192: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H193: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H194: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H195: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H196: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H197: 8 <> 0 . H198: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H199: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . H200: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . H201: 0 >= spark__unsigned__byte__first . H202: 0 <= spark__unsigned__byte__last . H203: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) >= spark__unsigned__byte__first) and (element(fld_b( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) <= spark__unsigned__byte__last))) . H204: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) >= spark__unsigned__u64__first) and (element(fld_x( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) <= spark__unsigned__u64__last))) . H205: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= natural__first . H206: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= natural__last . H207: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= hash_bit_length__first . H208: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= hash_bit_length__last . H209: true . H210: true . H211: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u6__first . H212: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u6__last . H213: true . H214: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u7__first . H215: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u7__last . H216: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u16__first . H217: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u16__last . H218: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u32__first . H219: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u32__last . H220: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u64__first . H221: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u64__last . H222: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H223: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H224: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= positive_output_block_count_t__first . H225: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= positive_output_block_count_t__last . H226: 64 <> 0 . H227: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 >= integer__base__first . H228: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 <= integer__base__last . -> C1: 0 >= output_block_count_t__first . C2: 0 <= output_block_count_t__last . procedure_skein_512_final_17. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: not (fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c) . H49: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H50: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H51: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H52: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H53: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H54: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H55: true . H56: true . H57: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H58: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H59: true . H60: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H61: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H62: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H63: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H64: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H65: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H66: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H67: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H68: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H69: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H70: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H71: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H72: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H73: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H74: true . H75: true . H76: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H77: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H78: true . H79: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H80: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H81: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H82: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H83: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H84: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H85: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H86: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H87: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H88: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H89: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H90: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H91: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H92: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H93: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H94: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H95: true . H96: true . H97: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H98: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H99: true . H100: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H101: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H102: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H103: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H104: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H105: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H106: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H107: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H108: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__byte__last))) . H109: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H110: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H111: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H112: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H113: 1 >= positive_block_512_count_t__first . H114: 1 <= positive_block_512_count_t__last . H115: 0 >= natural__first . H116: 0 <= natural__last . H117: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= initialized_hash_bit_length__first . H118: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= initialized_hash_bit_length__last . H119: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= skein_512_block_bytes_count__first . H120: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= skein_512_block_bytes_count__last . H121: skein_512_block_bytes_index__first = 0 . H122: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H123: 0 + 63 <= skein_512_block_bytes_index__last . H124: skein_512_block_bytes_index__last <= natural__last . H125: 0 <= natural__last - 63 . H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H129: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H130: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H132: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H133: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H134: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H138: true . H139: true . H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H142: true . H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H151: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H152: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H153: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H154: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H155: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H156: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H157: true . H158: true . H159: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H160: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H161: true . H162: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H163: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H164: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H165: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H166: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H167: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H168: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H169: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H170: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H171: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H172: 8 <> 0 . H173: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H174: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . H175: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . H176: 0 >= spark__unsigned__byte__first . H177: 0 <= spark__unsigned__byte__last . H178: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) >= spark__unsigned__byte__first) and (element(fld_b( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) <= spark__unsigned__byte__last))) . H179: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) >= spark__unsigned__u64__first) and (element(fld_x( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) <= spark__unsigned__u64__last))) . H180: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= natural__first . H181: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= natural__last . H182: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= hash_bit_length__first . H183: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= hash_bit_length__last . H184: true . H185: true . H186: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u6__first . H187: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u6__last . H188: true . H189: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u7__first . H190: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u7__last . H191: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u16__first . H192: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u16__last . H193: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u32__first . H194: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u32__last . H195: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u64__first . H196: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u64__last . H197: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H198: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H199: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= positive_output_block_count_t__first . H200: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= positive_output_block_count_t__last . H201: 64 <> 0 . H202: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 >= integer__base__first . H203: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 <= integer__base__last . -> C1: 0 >= output_block_count_t__first . C2: 0 <= output_block_count_t__last . For path(s) from start to assertion of line 930: procedure_skein_512_final_18. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H49: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c . H50: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) > 0 . H51: fld_hash_bit_len(fld_h(local_ctx__1)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H52: fld_hash_bit_len(fld_h(local_ctx__1)) > 0 . H53: fld_byte_count(fld_h(local_ctx__1)) < skein_512_block_bytes_c . H54: fld_byte_count(fld_h(local_ctx__1)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H55: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H56: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H57: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H58: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H59: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H60: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H61: true . H62: true . H63: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H64: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H65: true . H66: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H67: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H68: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H69: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H70: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H71: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H74: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H75: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H76: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H77: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H78: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H79: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H80: true . H81: true . H82: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H83: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H84: true . H85: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H86: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H87: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H88: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H89: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H90: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H92: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H93: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H94: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H95: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H96: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H97: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H98: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H99: true . H100: true . H101: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H102: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H103: true . H104: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H105: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H106: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H107: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H108: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H109: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H110: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H111: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H112: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H113: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H114: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H115: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H116: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H117: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H118: fld_hash_bit_len(fld_h(local_ctx__1)) >= hash_bit_length__first . H119: fld_hash_bit_len(fld_h(local_ctx__1)) <= hash_bit_length__last . H120: true . H121: true . H122: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u6__first . H123: fld_field_type(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u6__last . H124: true . H125: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u7__first . H126: fld_tree_level(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u7__last . H127: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u16__first . H128: fld_reserved(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u16__last . H129: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u32__first . H130: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u32__last . H131: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) >= spark__unsigned__u64__first . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__1))) <= spark__unsigned__u64__last . H133: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H134: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H136: fld_byte_count(fld_h(local_ctx__1)) >= natural__first . H137: fld_byte_count(fld_h(local_ctx__1)) <= natural__last . H138: 1 >= positive_block_512_count_t__first . H139: 1 <= positive_block_512_count_t__last . H140: 0 >= natural__first . H141: 0 <= natural__last . H142: fld_hash_bit_len(fld_h(local_ctx__1)) >= initialized_hash_bit_length__first . H143: fld_hash_bit_len(fld_h(local_ctx__1)) <= initialized_hash_bit_length__last . H144: fld_byte_count(fld_h(local_ctx__1)) >= skein_512_block_bytes_count__first . H145: fld_byte_count(fld_h(local_ctx__1)) <= skein_512_block_bytes_count__last . H146: skein_512_block_bytes_index__first = 0 . H147: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H148: 0 + 63 <= skein_512_block_bytes_index__last . H149: skein_512_block_bytes_index__last <= natural__last . H150: 0 <= natural__last - 63 . H151: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H152: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H153: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( local_ctx__1)) . H154: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H155: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H156: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h( local_ctx__1)) . H157: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H158: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H159: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H160: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H161: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H162: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H163: true . H164: true . H165: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H166: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H167: true . H168: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H169: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H170: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H171: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H172: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H173: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H174: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H175: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H176: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H177: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H178: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H179: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H180: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H181: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H182: true . H183: true . H184: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H185: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H186: true . H187: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H188: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H189: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H190: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H191: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H192: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H193: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H194: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H195: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H196: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H197: 8 <> 0 . H198: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H199: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . H200: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . H201: 0 >= spark__unsigned__byte__first . H202: 0 <= spark__unsigned__byte__last . H203: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) >= spark__unsigned__byte__first) and (element(fld_b( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) <= spark__unsigned__byte__last))) . H204: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) >= spark__unsigned__u64__first) and (element(fld_x( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) <= spark__unsigned__u64__last))) . H205: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= natural__first . H206: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= natural__last . H207: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= hash_bit_length__first . H208: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= hash_bit_length__last . H209: true . H210: true . H211: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u6__first . H212: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u6__last . H213: true . H214: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u7__first . H215: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u7__last . H216: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u16__first . H217: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u16__last . H218: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u32__first . H219: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u32__last . H220: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u64__first . H221: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u64__last . H222: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H223: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H224: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= positive_output_block_count_t__first . H225: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= positive_output_block_count_t__last . H226: 64 <> 0 . H227: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 >= integer__base__first . H228: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 <= integer__base__last . H229: 0 >= output_block_count_t__first . H230: 0 <= output_block_count_t__last . -> C1: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) > 0 . C2: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . C3: 0 * skein_512_block_bytes_c < (fld_hash_bit_len(fld_h( local_ctx__2)) + 7) div 8 . C4: 0 * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . C5: 0 < ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 . C6: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 = (( fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 . C7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . C8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . C9: fld_byte_count(fld_h(ctx)) >= natural__first . C10: fld_byte_count(fld_h(ctx)) <= natural__last . C11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . C12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . C13: true . C14: true . C15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . C16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . C17: true . C18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . C19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . C20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . C21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . C22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . C23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . C24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . C25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . C26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . C27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . C28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . C30: result__index__subtype__1__first = 0 . C31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . procedure_skein_512_final_19. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) > 0 . H27: 0 >= spark__unsigned__byte__first . H28: 0 <= spark__unsigned__byte__last . H29: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H30: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H31: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H32: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H33: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H34: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H35: true . H36: true . H37: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H38: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H39: true . H40: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H41: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H42: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H43: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H44: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H45: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H46: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H47: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H48: not (fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) < skein_512_block_bytes_c) . H49: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H50: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H51: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H52: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H53: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H54: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H55: true . H56: true . H57: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H58: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H59: true . H60: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H61: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H62: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H63: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H64: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H65: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H66: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H67: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H68: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H69: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H70: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H71: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H72: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H73: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H74: true . H75: true . H76: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H77: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H78: true . H79: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H80: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H81: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H82: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H83: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H84: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H85: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H86: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H87: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H88: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H89: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___2]) <= spark__unsigned__byte__last))) . H90: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__u64__last))) . H91: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H92: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H93: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= hash_bit_length__first . H94: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= hash_bit_length__last . H95: true . H96: true . H97: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u6__first . H98: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u6__last . H99: true . H100: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u7__first . H101: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u7__last . H102: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) >= spark__unsigned__u16__first . H103: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words( fld_h(ctx), upf_final_block(fld_tweak_words(fld_h( ctx)), true)))))) <= spark__unsigned__u16__last . H104: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u32__first . H105: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u32__last . H106: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) >= spark__unsigned__u64__first . H107: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block(fld_tweak_words( fld_h(ctx)), true)))))) <= spark__unsigned__u64__last . H108: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) >= spark__unsigned__byte__first) and (element(fld_b(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true)))), [i___1]) <= spark__unsigned__byte__last))) . H109: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H110: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H111: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= natural__first . H112: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= natural__last . H113: 1 >= positive_block_512_count_t__first . H114: 1 <= positive_block_512_count_t__last . H115: 0 >= natural__first . H116: 0 <= natural__last . H117: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= initialized_hash_bit_length__first . H118: fld_hash_bit_len(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= initialized_hash_bit_length__last . H119: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) >= skein_512_block_bytes_count__first . H120: fld_byte_count(fld_h(upf_h(ctx, upf_tweak_words(fld_h( ctx), upf_final_block(fld_tweak_words(fld_h(ctx)), true))))) <= skein_512_block_bytes_count__last . H121: skein_512_block_bytes_index__first = 0 . H122: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H123: 0 + 63 <= skein_512_block_bytes_index__last . H124: skein_512_block_bytes_index__last <= natural__last . H125: 0 <= natural__last - 63 . H126: fld_hash_bit_len(fld_h(local_ctx__2)) >= initialized_hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__2)) <= initialized_hash_bit_length__last . H128: fld_hash_bit_len(fld_h(local_ctx__2)) = fld_hash_bit_len(fld_h( upf_h(ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H129: fld_byte_count(fld_h(local_ctx__2)) >= skein_512_block_bytes_count__first . H130: fld_byte_count(fld_h(local_ctx__2)) <= skein_512_block_bytes_count__last . H131: fld_byte_count(fld_h(local_ctx__2)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_final_block( fld_tweak_words(fld_h(ctx)), true))))) . H132: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H133: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H134: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H135: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H136: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H137: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H138: true . H139: true . H140: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H141: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H142: true . H143: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H144: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H145: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H146: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H147: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H148: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H149: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H150: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H151: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H152: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H153: fld_byte_count(fld_h(local_ctx__2)) >= natural__first . H154: fld_byte_count(fld_h(local_ctx__2)) <= natural__last . H155: fld_hash_bit_len(fld_h(local_ctx__2)) >= hash_bit_length__first . H156: fld_hash_bit_len(fld_h(local_ctx__2)) <= hash_bit_length__last . H157: true . H158: true . H159: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u6__first . H160: fld_field_type(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u6__last . H161: true . H162: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u7__first . H163: fld_tree_level(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u7__last . H164: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u16__first . H165: fld_reserved(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u16__last . H166: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u32__first . H167: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u32__last . H168: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) >= spark__unsigned__u64__first . H169: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__2))) <= spark__unsigned__u64__last . H170: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H171: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H172: 8 <> 0 . H173: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 >= integer__base__first . H174: fld_hash_bit_len(fld_h(local_ctx__2)) + 7 <= integer__base__last . H175: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . H176: 0 >= spark__unsigned__byte__first . H177: 0 <= spark__unsigned__byte__last . H178: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) >= spark__unsigned__byte__first) and (element(fld_b( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___2]) <= spark__unsigned__byte__last))) . H179: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) >= spark__unsigned__u64__first) and (element(fld_x( upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))), [ i___1]) <= spark__unsigned__u64__last))) . H180: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= natural__first . H181: fld_byte_count(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= natural__last . H182: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) >= hash_bit_length__first . H183: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) <= hash_bit_length__last . H184: true . H185: true . H186: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u6__first . H187: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u6__last . H188: true . H189: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u7__first . H190: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u7__last . H191: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u16__first . H192: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u16__last . H193: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u32__first . H194: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u32__last . H195: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) >= spark__unsigned__u64__first . H196: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0))))) <= spark__unsigned__u64__last . H197: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 >= output_byte_count_t__first . H198: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= output_byte_count_t__last . H199: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 >= positive_output_block_count_t__first . H200: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 <= positive_output_block_count_t__last . H201: 64 <> 0 . H202: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 >= integer__base__first . H203: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63 <= integer__base__last . H204: 0 >= output_block_count_t__first . H205: 0 <= output_block_count_t__last . -> C1: fld_hash_bit_len(fld_h(upf_b(local_ctx__2, mk__spark__crypto__byte_seq(0)))) > 0 . C2: (fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 <= result__index__subtype__1__last + 1 . C3: 0 * skein_512_block_bytes_c < (fld_hash_bit_len(fld_h( local_ctx__2)) + 7) div 8 . C4: 0 * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . C5: 0 < ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 . C6: ((fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 = (( fld_hash_bit_len(fld_h(local_ctx__2)) + 7) div 8 + 63) div 64 . C7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . C8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . C9: fld_byte_count(fld_h(ctx)) >= natural__first . C10: fld_byte_count(fld_h(ctx)) <= natural__last . C11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . C12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . C13: true . C14: true . C15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . C16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . C17: true . C18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . C19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . C20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . C21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . C22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . C23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . C24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . C25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . C26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . C27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . C28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . C30: result__index__subtype__1__first = 0 . C31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . For path(s) from assertion of line 930 to assertion of line 930: procedure_skein_512_final_20. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . H142: 8 >= natural__first . H143: 8 <= natural__last . H144: 1 >= positive_block_512_count_t__first . H145: 1 <= positive_block_512_count_t__last . H146: 0 >= natural__first . H147: 0 <= natural__last . H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . H150: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . H151: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . H152: skein_512_block_bytes_index__first = 0 . H153: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H154: 0 + 63 <= skein_512_block_bytes_index__last . H155: skein_512_block_bytes_index__last <= natural__last . H156: 0 <= natural__last - 63 . H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= initialized_hash_bit_length__first . H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= initialized_hash_bit_length__last . H159: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__4)) . H160: fld_byte_count(fld_h(local_ctx__5)) >= skein_512_block_bytes_count__first . H161: fld_byte_count(fld_h(local_ctx__5)) <= skein_512_block_bytes_count__last . H162: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h( local_ctx__4)) . H163: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H164: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H165: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H166: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H169: true . H170: true . H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H173: true . H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H182: blocks_done >= output_block_count_t__first . H183: blocks_done <= output_block_count_t__last . H184: byte_count >= output_byte_count_t__first . H185: byte_count <= output_byte_count_t__last . H186: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H187: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H188: blocks_done * skein_512_block_bytes_c >= integer__base__first . H189: blocks_done * skein_512_block_bytes_c <= integer__base__last . H190: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H191: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H192: byte_count - blocks_done * skein_512_block_bytes_c >= skein_512_block_bytes_c . H193: skein_512_block_bytes_c >= natural__first . H194: skein_512_block_bytes_c <= natural__last . H195: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element( result, [i___1]) >= spark__unsigned__byte__first) and (element( result, [i___1]) <= spark__unsigned__byte__last))) . H196: blocks_done >= output_block_count_t__first . H197: blocks_done <= output_block_count_t__last . H198: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H199: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H200: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H201: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H202: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H203: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H204: true . H205: true . H206: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H207: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H208: true . H209: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H210: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H211: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H212: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H213: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H214: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H215: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H216: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H217: skein_512_block_bytes_c >= natural__first . H218: skein_512_block_bytes_c <= natural__last . H219: skein_512_block_bytes_c >= natural__first . H220: skein_512_block_bytes_c <= natural__last . H221: blocks_done * skein_512_block_bytes_c >= natural__first . H222: blocks_done * skein_512_block_bytes_c <= natural__last . H223: result__index__subtype__1__first = 0 . H224: skein_512_state_words_index__first = 0 . H225: result__index__subtype__1__last >= blocks_done * skein_512_block_bytes_c + ( skein_512_block_bytes_c - 1) . H226: skein_512_block_bytes_c <= ( skein_512_state_words_index__last + 1) * 8 . H227: blocks_done * skein_512_block_bytes_c >= integer__base__first . H228: blocks_done * skein_512_block_bytes_c <= integer__base__last . H229: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element(result__6, [ i___1]) >= spark__unsigned__byte__first) and (element( result__6, [i___1]) <= spark__unsigned__byte__last))) . H230: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H231: blocks_done >= output_block_count_t__first . H232: blocks_done <= output_block_count_t__last . H233: blocks_done + 1 >= output_block_count_t__first . H234: blocks_done + 1 <= output_block_count_t__last . H235: blocks_done + 1 >= output_block_count_t__first . H236: blocks_done + 1 <= output_block_count_t__last . H237: blocks_required >= positive_output_block_count_t__first . H238: blocks_required <= positive_output_block_count_t__last . H239: not (blocks_done + 1 >= blocks_required) . -> C1: fld_hash_bit_len(fld_h(upf_x(local_ctx__5, x))) > 0 . C2: byte_count <= result__index__subtype__1__last + 1 . C3: (blocks_done + 1) * skein_512_block_bytes_c < byte_count . C4: (blocks_done + 1) * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . C5: blocks_done + 1 < blocks_required . C6: blocks_required = (byte_count + 63) div 64 . C7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . C8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . C9: fld_byte_count(fld_h(ctx)) >= natural__first . C10: fld_byte_count(fld_h(ctx)) <= natural__last . C11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . C12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . C13: true . C14: true . C15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . C16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . C17: true . C18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . C19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . C20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . C21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . C22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . C23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . C24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . C25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . C26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . C27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . C28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . C30: result__index__subtype__1__first = 0 . C31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . procedure_skein_512_final_21. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . H142: 8 >= natural__first . H143: 8 <= natural__last . H144: 1 >= positive_block_512_count_t__first . H145: 1 <= positive_block_512_count_t__last . H146: 0 >= natural__first . H147: 0 <= natural__last . H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . H150: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . H151: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . H152: skein_512_block_bytes_index__first = 0 . H153: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H154: 0 + 63 <= skein_512_block_bytes_index__last . H155: skein_512_block_bytes_index__last <= natural__last . H156: 0 <= natural__last - 63 . H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= initialized_hash_bit_length__first . H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= initialized_hash_bit_length__last . H159: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__4)) . H160: fld_byte_count(fld_h(local_ctx__5)) >= skein_512_block_bytes_count__first . H161: fld_byte_count(fld_h(local_ctx__5)) <= skein_512_block_bytes_count__last . H162: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h( local_ctx__4)) . H163: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H164: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H165: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H166: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H169: true . H170: true . H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H173: true . H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H182: blocks_done >= output_block_count_t__first . H183: blocks_done <= output_block_count_t__last . H184: byte_count >= output_byte_count_t__first . H185: byte_count <= output_byte_count_t__last . H186: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H187: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H188: blocks_done * skein_512_block_bytes_c >= integer__base__first . H189: blocks_done * skein_512_block_bytes_c <= integer__base__last . H190: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H191: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H192: not (byte_count - blocks_done * skein_512_block_bytes_c >= skein_512_block_bytes_c) . H193: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element( result, [i___1]) >= spark__unsigned__byte__first) and (element( result, [i___1]) <= spark__unsigned__byte__last))) . H194: blocks_done >= output_block_count_t__first . H195: blocks_done <= output_block_count_t__last . H196: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H197: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H198: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H199: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H200: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H201: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H202: true . H203: true . H204: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H205: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H206: true . H207: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H208: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H209: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H210: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H211: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H212: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H213: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H214: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H215: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H216: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H217: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H218: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H219: blocks_done * skein_512_block_bytes_c >= natural__first . H220: blocks_done * skein_512_block_bytes_c <= natural__last . H221: result__index__subtype__1__first = 0 . H222: skein_512_state_words_index__first = 0 . H223: result__index__subtype__1__last >= blocks_done * skein_512_block_bytes_c + (byte_count - blocks_done * skein_512_block_bytes_c - 1) . H224: byte_count - blocks_done * skein_512_block_bytes_c <= ( skein_512_state_words_index__last + 1) * 8 . H225: blocks_done * skein_512_block_bytes_c >= integer__base__first . H226: blocks_done * skein_512_block_bytes_c <= integer__base__last . H227: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element(result__6, [ i___1]) >= spark__unsigned__byte__first) and (element( result__6, [i___1]) <= spark__unsigned__byte__last))) . H228: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H229: blocks_done >= output_block_count_t__first . H230: blocks_done <= output_block_count_t__last . H231: blocks_done + 1 >= output_block_count_t__first . H232: blocks_done + 1 <= output_block_count_t__last . H233: blocks_done + 1 >= output_block_count_t__first . H234: blocks_done + 1 <= output_block_count_t__last . H235: blocks_required >= positive_output_block_count_t__first . H236: blocks_required <= positive_output_block_count_t__last . H237: not (blocks_done + 1 >= blocks_required) . -> C1: fld_hash_bit_len(fld_h(upf_x(local_ctx__5, x))) > 0 . C2: byte_count <= result__index__subtype__1__last + 1 . C3: (blocks_done + 1) * skein_512_block_bytes_c < byte_count . C4: (blocks_done + 1) * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . C5: blocks_done + 1 < blocks_required . C6: blocks_required = (byte_count + 63) div 64 . C7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . C8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . C9: fld_byte_count(fld_h(ctx)) >= natural__first . C10: fld_byte_count(fld_h(ctx)) <= natural__last . C11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . C12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . C13: true . C14: true . C15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . C16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . C17: true . C18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . C19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . C20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . C21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . C22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . C23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . C24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . C25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . C26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . C27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . C28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . C30: result__index__subtype__1__first = 0 . C31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . For path(s) from assertion of line 930 to precondition check associated with statement of line 937: procedure_skein_512_final_22. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . -> C1: blocks_done >= spark__unsigned__u64__first . C2: blocks_done <= spark__unsigned__u64__last . C3: fld_hash_bit_len(fld_h(local_ctx)) > 0 . C4: blocks_done >= spark__unsigned__u64__first . C5: blocks_done <= spark__unsigned__u64__last . For path(s) from assertion of line 930 to run-time check associated with statement of line 939: procedure_skein_512_final_23. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . -> C1: skein_block_type_out >= spark__unsigned__u6__first . C2: skein_block_type_out <= spark__unsigned__u6__last . For path(s) from assertion of line 930 to precondition check associated with statement of line 947: procedure_skein_512_final_24. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . -> C1: 8 >= natural__first . C2: 8 <= natural__last . C3: 1 >= positive_block_512_count_t__first . C4: 1 <= positive_block_512_count_t__last . C5: 0 >= natural__first . C6: 0 <= natural__last . C7: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . C8: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . C9: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . C10: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . C11: skein_512_block_bytes_index__first = 0 . C12: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . C13: 0 + 63 <= skein_512_block_bytes_index__last . C14: skein_512_block_bytes_index__last <= natural__last . C15: 0 <= natural__last - 63 . For path(s) from assertion of line 930 to run-time check associated with statement of line 953: procedure_skein_512_final_25. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . H142: 8 >= natural__first . H143: 8 <= natural__last . H144: 1 >= positive_block_512_count_t__first . H145: 1 <= positive_block_512_count_t__last . H146: 0 >= natural__first . H147: 0 <= natural__last . H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . H150: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . H151: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . H152: skein_512_block_bytes_index__first = 0 . H153: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H154: 0 + 63 <= skein_512_block_bytes_index__last . H155: skein_512_block_bytes_index__last <= natural__last . H156: 0 <= natural__last - 63 . H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= initialized_hash_bit_length__first . H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= initialized_hash_bit_length__last . H159: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__4)) . H160: fld_byte_count(fld_h(local_ctx__5)) >= skein_512_block_bytes_count__first . H161: fld_byte_count(fld_h(local_ctx__5)) <= skein_512_block_bytes_count__last . H162: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h( local_ctx__4)) . H163: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H164: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H165: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H166: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H169: true . H170: true . H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H173: true . H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H182: blocks_done >= output_block_count_t__first . H183: blocks_done <= output_block_count_t__last . H184: byte_count >= output_byte_count_t__first . H185: byte_count <= output_byte_count_t__last . -> C1: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . C2: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . C3: blocks_done * skein_512_block_bytes_c >= integer__base__first . C4: blocks_done * skein_512_block_bytes_c <= integer__base__last . For path(s) from assertion of line 930 to run-time check associated with statement of line 955: procedure_skein_512_final_26. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . H142: 8 >= natural__first . H143: 8 <= natural__last . H144: 1 >= positive_block_512_count_t__first . H145: 1 <= positive_block_512_count_t__last . H146: 0 >= natural__first . H147: 0 <= natural__last . H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . H150: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . H151: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . H152: skein_512_block_bytes_index__first = 0 . H153: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H154: 0 + 63 <= skein_512_block_bytes_index__last . H155: skein_512_block_bytes_index__last <= natural__last . H156: 0 <= natural__last - 63 . H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= initialized_hash_bit_length__first . H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= initialized_hash_bit_length__last . H159: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__4)) . H160: fld_byte_count(fld_h(local_ctx__5)) >= skein_512_block_bytes_count__first . H161: fld_byte_count(fld_h(local_ctx__5)) <= skein_512_block_bytes_count__last . H162: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h( local_ctx__4)) . H163: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H164: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H165: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H166: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H169: true . H170: true . H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H173: true . H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H182: blocks_done >= output_block_count_t__first . H183: blocks_done <= output_block_count_t__last . H184: byte_count >= output_byte_count_t__first . H185: byte_count <= output_byte_count_t__last . H186: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H187: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H188: blocks_done * skein_512_block_bytes_c >= integer__base__first . H189: blocks_done * skein_512_block_bytes_c <= integer__base__last . H190: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H191: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H192: byte_count - blocks_done * skein_512_block_bytes_c >= skein_512_block_bytes_c . -> C1: skein_512_block_bytes_c >= natural__first . C2: skein_512_block_bytes_c <= natural__last . For path(s) from assertion of line 930 to precondition check associated with statement of line 959: procedure_skein_512_final_27. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . H142: 8 >= natural__first . H143: 8 <= natural__last . H144: 1 >= positive_block_512_count_t__first . H145: 1 <= positive_block_512_count_t__last . H146: 0 >= natural__first . H147: 0 <= natural__last . H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . H150: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . H151: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . H152: skein_512_block_bytes_index__first = 0 . H153: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H154: 0 + 63 <= skein_512_block_bytes_index__last . H155: skein_512_block_bytes_index__last <= natural__last . H156: 0 <= natural__last - 63 . H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= initialized_hash_bit_length__first . H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= initialized_hash_bit_length__last . H159: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__4)) . H160: fld_byte_count(fld_h(local_ctx__5)) >= skein_512_block_bytes_count__first . H161: fld_byte_count(fld_h(local_ctx__5)) <= skein_512_block_bytes_count__last . H162: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h( local_ctx__4)) . H163: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H164: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H165: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H166: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H169: true . H170: true . H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H173: true . H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H182: blocks_done >= output_block_count_t__first . H183: blocks_done <= output_block_count_t__last . H184: byte_count >= output_byte_count_t__first . H185: byte_count <= output_byte_count_t__last . H186: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H187: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H188: blocks_done * skein_512_block_bytes_c >= integer__base__first . H189: blocks_done * skein_512_block_bytes_c <= integer__base__last . H190: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H191: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H192: byte_count - blocks_done * skein_512_block_bytes_c >= skein_512_block_bytes_c . H193: skein_512_block_bytes_c >= natural__first . H194: skein_512_block_bytes_c <= natural__last . H195: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element( result, [i___1]) >= spark__unsigned__byte__first) and (element( result, [i___1]) <= spark__unsigned__byte__last))) . H196: blocks_done >= output_block_count_t__first . H197: blocks_done <= output_block_count_t__last . H198: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H199: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H200: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H201: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H202: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H203: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H204: true . H205: true . H206: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H207: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H208: true . H209: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H210: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H211: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H212: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H213: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H214: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H215: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H216: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H217: skein_512_block_bytes_c >= natural__first . H218: skein_512_block_bytes_c <= natural__last . -> C1: skein_512_block_bytes_c >= natural__first . C2: skein_512_block_bytes_c <= natural__last . C3: blocks_done * skein_512_block_bytes_c >= natural__first . C4: blocks_done * skein_512_block_bytes_c <= natural__last . C5: result__index__subtype__1__first = 0 . C6: skein_512_state_words_index__first = 0 . C7: result__index__subtype__1__last >= blocks_done * skein_512_block_bytes_c + ( skein_512_block_bytes_c - 1) . C8: skein_512_block_bytes_c <= ( skein_512_state_words_index__last + 1) * 8 . C9: blocks_done * skein_512_block_bytes_c >= integer__base__first . C10: blocks_done * skein_512_block_bytes_c <= integer__base__last . procedure_skein_512_final_28. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . H142: 8 >= natural__first . H143: 8 <= natural__last . H144: 1 >= positive_block_512_count_t__first . H145: 1 <= positive_block_512_count_t__last . H146: 0 >= natural__first . H147: 0 <= natural__last . H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . H150: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . H151: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . H152: skein_512_block_bytes_index__first = 0 . H153: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H154: 0 + 63 <= skein_512_block_bytes_index__last . H155: skein_512_block_bytes_index__last <= natural__last . H156: 0 <= natural__last - 63 . H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= initialized_hash_bit_length__first . H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= initialized_hash_bit_length__last . H159: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__4)) . H160: fld_byte_count(fld_h(local_ctx__5)) >= skein_512_block_bytes_count__first . H161: fld_byte_count(fld_h(local_ctx__5)) <= skein_512_block_bytes_count__last . H162: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h( local_ctx__4)) . H163: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H164: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H165: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H166: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H169: true . H170: true . H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H173: true . H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H182: blocks_done >= output_block_count_t__first . H183: blocks_done <= output_block_count_t__last . H184: byte_count >= output_byte_count_t__first . H185: byte_count <= output_byte_count_t__last . H186: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H187: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H188: blocks_done * skein_512_block_bytes_c >= integer__base__first . H189: blocks_done * skein_512_block_bytes_c <= integer__base__last . H190: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H191: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H192: not (byte_count - blocks_done * skein_512_block_bytes_c >= skein_512_block_bytes_c) . H193: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element( result, [i___1]) >= spark__unsigned__byte__first) and (element( result, [i___1]) <= spark__unsigned__byte__last))) . H194: blocks_done >= output_block_count_t__first . H195: blocks_done <= output_block_count_t__last . H196: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H197: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H198: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H199: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H200: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H201: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H202: true . H203: true . H204: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H205: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H206: true . H207: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H208: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H209: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H210: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H211: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H212: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H213: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H214: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H215: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H216: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . -> C1: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . C2: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . C3: blocks_done * skein_512_block_bytes_c >= natural__first . C4: blocks_done * skein_512_block_bytes_c <= natural__last . C5: result__index__subtype__1__first = 0 . C6: skein_512_state_words_index__first = 0 . C7: result__index__subtype__1__last >= blocks_done * skein_512_block_bytes_c + (byte_count - blocks_done * skein_512_block_bytes_c - 1) . C8: byte_count - blocks_done * skein_512_block_bytes_c <= ( skein_512_state_words_index__last + 1) * 8 . C9: blocks_done * skein_512_block_bytes_c >= integer__base__first . C10: blocks_done * skein_512_block_bytes_c <= integer__base__last . For path(s) from assertion of line 930 to run-time check associated with statement of line 966: procedure_skein_512_final_29. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . H142: 8 >= natural__first . H143: 8 <= natural__last . H144: 1 >= positive_block_512_count_t__first . H145: 1 <= positive_block_512_count_t__last . H146: 0 >= natural__first . H147: 0 <= natural__last . H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . H150: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . H151: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . H152: skein_512_block_bytes_index__first = 0 . H153: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H154: 0 + 63 <= skein_512_block_bytes_index__last . H155: skein_512_block_bytes_index__last <= natural__last . H156: 0 <= natural__last - 63 . H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= initialized_hash_bit_length__first . H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= initialized_hash_bit_length__last . H159: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__4)) . H160: fld_byte_count(fld_h(local_ctx__5)) >= skein_512_block_bytes_count__first . H161: fld_byte_count(fld_h(local_ctx__5)) <= skein_512_block_bytes_count__last . H162: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h( local_ctx__4)) . H163: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H164: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H165: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H166: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H169: true . H170: true . H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H173: true . H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H182: blocks_done >= output_block_count_t__first . H183: blocks_done <= output_block_count_t__last . H184: byte_count >= output_byte_count_t__first . H185: byte_count <= output_byte_count_t__last . H186: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H187: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H188: blocks_done * skein_512_block_bytes_c >= integer__base__first . H189: blocks_done * skein_512_block_bytes_c <= integer__base__last . H190: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H191: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H192: byte_count - blocks_done * skein_512_block_bytes_c >= skein_512_block_bytes_c . H193: skein_512_block_bytes_c >= natural__first . H194: skein_512_block_bytes_c <= natural__last . H195: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element( result, [i___1]) >= spark__unsigned__byte__first) and (element( result, [i___1]) <= spark__unsigned__byte__last))) . H196: blocks_done >= output_block_count_t__first . H197: blocks_done <= output_block_count_t__last . H198: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H199: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H200: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H201: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H202: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H203: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H204: true . H205: true . H206: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H207: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H208: true . H209: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H210: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H211: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H212: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H213: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H214: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H215: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H216: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H217: skein_512_block_bytes_c >= natural__first . H218: skein_512_block_bytes_c <= natural__last . H219: skein_512_block_bytes_c >= natural__first . H220: skein_512_block_bytes_c <= natural__last . H221: blocks_done * skein_512_block_bytes_c >= natural__first . H222: blocks_done * skein_512_block_bytes_c <= natural__last . H223: result__index__subtype__1__first = 0 . H224: skein_512_state_words_index__first = 0 . H225: result__index__subtype__1__last >= blocks_done * skein_512_block_bytes_c + ( skein_512_block_bytes_c - 1) . H226: skein_512_block_bytes_c <= ( skein_512_state_words_index__last + 1) * 8 . H227: blocks_done * skein_512_block_bytes_c >= integer__base__first . H228: blocks_done * skein_512_block_bytes_c <= integer__base__last . H229: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element(result__6, [ i___1]) >= spark__unsigned__byte__first) and (element( result__6, [i___1]) <= spark__unsigned__byte__last))) . H230: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H231: blocks_done >= output_block_count_t__first . H232: blocks_done <= output_block_count_t__last . -> C1: blocks_done + 1 >= output_block_count_t__first . C2: blocks_done + 1 <= output_block_count_t__last . procedure_skein_512_final_30. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * skein_512_block_bytes_c < byte_count . H4: blocks_done * skein_512_block_bytes_c < result__index__subtype__1__last + 1 . H5: blocks_done < blocks_required . H6: blocks_required = (byte_count + 63) div 64 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H26: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H27: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H30: result__index__subtype__1__first = 0 . H31: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H32: blocks_done >= output_block_count_t__first . H33: blocks_done <= output_block_count_t__last . H34: blocks_done >= spark__unsigned__u64__first . H35: blocks_done <= spark__unsigned__u64__last . H36: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H37: blocks_done >= spark__unsigned__u64__first . H38: blocks_done <= spark__unsigned__u64__last . H39: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H40: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H41: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H42: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H43: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H44: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H45: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H46: true . H47: true . H48: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H49: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H50: true . H51: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H52: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H53: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H54: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H55: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H56: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H57: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H58: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H59: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H60: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H61: fld_byte_count(fld_h(local_ctx__3)) >= natural__first . H62: fld_byte_count(fld_h(local_ctx__3)) <= natural__last . H63: fld_hash_bit_len(fld_h(local_ctx__3)) >= hash_bit_length__first . H64: fld_hash_bit_len(fld_h(local_ctx__3)) <= hash_bit_length__last . H65: true . H66: true . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u6__first . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u6__last . H69: true . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u7__first . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u7__last . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u16__first . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u16__last . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u32__first . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u32__last . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= spark__unsigned__u64__first . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= spark__unsigned__u64__last . H78: skein_block_type_out >= spark__unsigned__u6__first . H79: skein_block_type_out <= spark__unsigned__u6__last . H80: fld_h(local_ctx__4) = upf_byte_count(upf_tweak_words(fld_h( local_ctx__3), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_out, first_block := true, final_block := true)), 0) . H81: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H82: fld_byte_count(fld_h(local_ctx__4)) = 0 . H83: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H84: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H85: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H86: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H87: true . H88: true . H89: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H90: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H91: true . H92: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H93: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H94: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H95: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H96: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H97: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H98: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H99: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H100: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H101: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H102: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H103: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H104: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H105: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H106: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H107: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H108: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H109: true . H110: true . H111: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H112: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H113: true . H114: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H115: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H116: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H117: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H118: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H119: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H120: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H121: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H122: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H123: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H124: fld_byte_count(fld_h(local_ctx__4)) >= natural__first . H125: fld_byte_count(fld_h(local_ctx__4)) <= natural__last . H126: fld_hash_bit_len(fld_h(local_ctx__4)) >= hash_bit_length__first . H127: fld_hash_bit_len(fld_h(local_ctx__4)) <= hash_bit_length__last . H128: true . H129: true . H130: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u6__first . H131: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u6__last . H132: true . H133: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u7__first . H134: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u7__last . H135: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u16__first . H136: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u16__last . H137: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u32__first . H138: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u32__last . H139: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= spark__unsigned__u64__first . H140: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= spark__unsigned__u64__last . H141: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__4), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__4), [i___1]) <= spark__unsigned__byte__last))) . H142: 8 >= natural__first . H143: 8 <= natural__last . H144: 1 >= positive_block_512_count_t__first . H145: 1 <= positive_block_512_count_t__last . H146: 0 >= natural__first . H147: 0 <= natural__last . H148: fld_hash_bit_len(fld_h(local_ctx__4)) >= initialized_hash_bit_length__first . H149: fld_hash_bit_len(fld_h(local_ctx__4)) <= initialized_hash_bit_length__last . H150: fld_byte_count(fld_h(local_ctx__4)) >= skein_512_block_bytes_count__first . H151: fld_byte_count(fld_h(local_ctx__4)) <= skein_512_block_bytes_count__last . H152: skein_512_block_bytes_index__first = 0 . H153: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H154: 0 + 63 <= skein_512_block_bytes_index__last . H155: skein_512_block_bytes_index__last <= natural__last . H156: 0 <= natural__last - 63 . H157: fld_hash_bit_len(fld_h(local_ctx__5)) >= initialized_hash_bit_length__first . H158: fld_hash_bit_len(fld_h(local_ctx__5)) <= initialized_hash_bit_length__last . H159: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__4)) . H160: fld_byte_count(fld_h(local_ctx__5)) >= skein_512_block_bytes_count__first . H161: fld_byte_count(fld_h(local_ctx__5)) <= skein_512_block_bytes_count__last . H162: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h( local_ctx__4)) . H163: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H164: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H165: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H166: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H167: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H168: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H169: true . H170: true . H171: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H172: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H173: true . H174: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H175: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H176: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H177: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H178: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H179: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H180: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H181: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H182: blocks_done >= output_block_count_t__first . H183: blocks_done <= output_block_count_t__last . H184: byte_count >= output_byte_count_t__first . H185: byte_count <= output_byte_count_t__last . H186: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H187: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H188: blocks_done * skein_512_block_bytes_c >= integer__base__first . H189: blocks_done * skein_512_block_bytes_c <= integer__base__last . H190: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H191: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H192: not (byte_count - blocks_done * skein_512_block_bytes_c >= skein_512_block_bytes_c) . H193: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element( result, [i___1]) >= spark__unsigned__byte__first) and (element( result, [i___1]) <= spark__unsigned__byte__last))) . H194: blocks_done >= output_block_count_t__first . H195: blocks_done <= output_block_count_t__last . H196: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx__5), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx__5), [i___2]) <= spark__unsigned__byte__last))) . H197: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx__5), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx__5), [i___1]) <= spark__unsigned__u64__last))) . H198: fld_byte_count(fld_h(local_ctx__5)) >= natural__first . H199: fld_byte_count(fld_h(local_ctx__5)) <= natural__last . H200: fld_hash_bit_len(fld_h(local_ctx__5)) >= hash_bit_length__first . H201: fld_hash_bit_len(fld_h(local_ctx__5)) <= hash_bit_length__last . H202: true . H203: true . H204: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u6__first . H205: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u6__last . H206: true . H207: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u7__first . H208: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u7__last . H209: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u16__first . H210: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u16__last . H211: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u32__first . H212: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u32__last . H213: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= spark__unsigned__u64__first . H214: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= spark__unsigned__u64__last . H215: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H216: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H217: byte_count - blocks_done * skein_512_block_bytes_c >= natural__first . H218: byte_count - blocks_done * skein_512_block_bytes_c <= natural__last . H219: blocks_done * skein_512_block_bytes_c >= natural__first . H220: blocks_done * skein_512_block_bytes_c <= natural__last . H221: result__index__subtype__1__first = 0 . H222: skein_512_state_words_index__first = 0 . H223: result__index__subtype__1__last >= blocks_done * skein_512_block_bytes_c + (byte_count - blocks_done * skein_512_block_bytes_c - 1) . H224: byte_count - blocks_done * skein_512_block_bytes_c <= ( skein_512_state_words_index__last + 1) * 8 . H225: blocks_done * skein_512_block_bytes_c >= integer__base__first . H226: blocks_done * skein_512_block_bytes_c <= integer__base__last . H227: for_all(i___1: integer, ((i___1 >= result__index__subtype__1__first) and (i___1 <= result__index__subtype__1__last)) -> ((element(result__6, [ i___1]) >= spark__unsigned__byte__first) and (element( result__6, [i___1]) <= spark__unsigned__byte__last))) . H228: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element( x, [i___1]) >= spark__unsigned__u64__first) and (element( x, [i___1]) <= spark__unsigned__u64__last))) . H229: blocks_done >= output_block_count_t__first . H230: blocks_done <= output_block_count_t__last . -> C1: blocks_done + 1 >= output_block_count_t__first . C2: blocks_done + 1 <= output_block_count_t__last . For path(s) from assertion of line 930 to finish: procedure_skein_512_final_31. *** true . /* trivially true VC removed by Examiner */ procedure_skein_512_final_32. *** true . /* trivially true VC removed by Examiner */ For checks of refinement integrity: procedure_skein_512_final_33. H1: hash_bit_len_of(ctx) >= initialized_hash_bit_length__first . H2: hash_bit_len_of(ctx) <= initialized_hash_bit_length__last . H3: byte_count_of(ctx) >= skein_512_block_bytes_count__first . H4: byte_count_of(ctx) <= skein_512_block_bytes_count__last . H5: result__index__subtype__1__first = 0 . H6: (hash_bit_len_of(ctx) + 7) div 8 <= result__index__subtype__1__last + 1 . H7: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H8: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H9: fld_byte_count(fld_h(ctx)) >= natural__first . H10: fld_byte_count(fld_h(ctx)) <= natural__last . H11: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H12: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H13: true . H14: true . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H17: true . H18: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H20: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H22: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H24: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . C3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . C5: result__index__subtype__1__first = 0 . C6: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . procedure_skein_512_final_34. *** true . /* trivially true VC removed by Examiner */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.fdl0000644000175000017500000002267011712513676026711 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Update} title procedure skein_512_update; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type initialized_hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_count = integer; type skein_512_block_bytes_index = integer; type positive_block_512_count_t = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_c : integer = pending; const positive_block_512_count_t__base__first : integer = pending; const positive_block_512_count_t__base__last : integer = pending; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_block_bytes_count__base__first : integer = pending; const skein_512_block_bytes_count__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const initialized_hash_bit_length__base__first : integer = pending; const initialized_hash_bit_length__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const msg__index__subtype__1__first : integer = pending; const msg__index__subtype__1__last : integer = pending; const skein_512_context__size : integer = pending; const positive_block_512_count_t__first : integer = pending; const positive_block_512_count_t__last : integer = pending; const positive_block_512_count_t__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_block_bytes_count__first : integer = pending; const skein_512_block_bytes_count__last : integer = pending; const skein_512_block_bytes_count__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const initialized_hash_bit_length__first : integer = pending; const initialized_hash_bit_length__last : integer = pending; const initialized_hash_bit_length__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ctx : skein_512_context; var msg : spark__crypto__byte_seq; function hash_bit_len_of(skein_512_context) : integer; function byte_count_of(skein_512_context) : integer; var ctx__4 : skein_512_context; var ctx__3 : skein_512_context; var ctx__2 : skein_512_context; var ctx__1 : skein_512_context; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.siv0000644000175000017500000000500411712513676026416 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Init For path(s) from start to run-time check associated with statement of line 682: procedure_skein_512_init_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 684: procedure_skein_512_init_2. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 686: procedure_skein_512_init_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 690: procedure_skein_512_init_4. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 690: procedure_skein_512_init_5. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 690: procedure_skein_512_init_6. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 690: procedure_skein_512_init_7. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 698: procedure_skein_512_init_8. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 701: procedure_skein_512_init_9. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 704: procedure_skein_512_init_10. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 711: procedure_skein_512_init_11. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 714: procedure_skein_512_init_12. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_skein_512_init_13. *** true . /* all conclusions proved */ For checks of refinement integrity: procedure_skein_512_init_14. *** true . /* all conclusions proved */ procedure_skein_512_init_15. *** true . /* proved using user-defined proof rules. */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.fdl0000644000175000017500000002520411712513676030253 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Process_Block} title procedure skein_512_process_block; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__i3 = integer; type spark__crypto__i8 = integer; type spark__crypto__i9 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type initialized_hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_count = integer; type skein_512_block_bytes_index = integer; type positive_block_512_count_t = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_c : integer = pending; const positive_block_512_count_t__base__first : integer = pending; const positive_block_512_count_t__base__last : integer = pending; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_block_bytes_count__base__first : integer = pending; const skein_512_block_bytes_count__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const initialized_hash_bit_length__base__first : integer = pending; const initialized_hash_bit_length__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__crypto__i9__base__first : integer = pending; const spark__crypto__i9__base__last : integer = pending; const spark__crypto__i8__base__first : integer = pending; const spark__crypto__i8__base__last : integer = pending; const spark__crypto__i3__base__first : integer = pending; const spark__crypto__i3__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const block__index__subtype__1__first : integer = pending; const block__index__subtype__1__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const skein_512_context__size : integer = pending; const positive_block_512_count_t__first : integer = pending; const positive_block_512_count_t__last : integer = pending; const positive_block_512_count_t__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_block_bytes_count__first : integer = pending; const skein_512_block_bytes_count__last : integer = pending; const skein_512_block_bytes_count__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const initialized_hash_bit_length__first : integer = pending; const initialized_hash_bit_length__last : integer = pending; const initialized_hash_bit_length__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__crypto__i9__first : integer = pending; const spark__crypto__i9__last : integer = pending; const spark__crypto__i9__size : integer = pending; const spark__crypto__i8__first : integer = pending; const spark__crypto__i8__last : integer = pending; const spark__crypto__i8__size : integer = pending; const spark__crypto__i3__first : integer = pending; const spark__crypto__i3__last : integer = pending; const spark__crypto__i3__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ctx : skein_512_context; var block : spark__crypto__byte_seq; var starting_offset : integer; var block_count : integer; var byte_count_add : integer; var ts : spark__crypto__u64_seq; var ks : spark__crypto__u64_seq; var x : spark__crypto__u64_seq; var w : spark__crypto__u64_seq; var j : integer; var src_offset : integer; var ctx__6 : skein_512_context; var x__5 : spark__crypto__u64_seq; var x__4 : spark__crypto__u64_seq; var w__3 : spark__crypto__u64_seq; var ts__2 : spark__crypto__u64_seq; var ks__1 : spark__crypto__u64_seq; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.vcg0000644000175000017500000134377311712513676026736 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Update For path(s) from start to run-time check associated with statement of line 785: procedure_skein_512_update_1. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . -> C1: msg__index__subtype__1__last + 1 >= natural__first . C2: msg__index__subtype__1__last + 1 <= natural__last . For path(s) from start to run-time check associated with statement of line 786: procedure_skein_512_update_2. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . -> C1: 0 >= natural__first . C2: 0 <= natural__last . For path(s) from start to run-time check associated with statement of line 788: procedure_skein_512_update_3. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . -> C1: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . C2: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . For path(s) from start to run-time check associated with statement of line 791: procedure_skein_512_update_4. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . -> C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . C2: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . For path(s) from start to check associated with statement of line 793: procedure_skein_512_update_5. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . -> C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . For path(s) from start to check associated with statement of line 795: procedure_skein_512_update_6. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . -> C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . For path(s) from start to precondition check associated with statement of line 796: procedure_skein_512_update_7. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . -> C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . C2: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . C3: 0 >= natural__first . C4: 0 <= natural__last . C5: fld_hash_bit_len(fld_h(ctx)) > 0 . C6: msg__index__subtype__1__first = 0 . C7: 0 >= msg__index__subtype__1__first . C8: 0 <= msg__index__subtype__1__last . C9: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . C10: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 797: procedure_skein_512_update_8. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . -> C1: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . C2: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . For path(s) from start to run-time check associated with statement of line 798: procedure_skein_512_update_9. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . -> C1: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . C2: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . For path(s) from start to check associated with statement of line 800: procedure_skein_512_update_10. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . -> C1: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . For path(s) from start to precondition check associated with statement of line 803: procedure_skein_512_update_11. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . -> C1: skein_512_block_bytes_c >= natural__first . C2: skein_512_block_bytes_c <= natural__last . C3: 1 >= positive_block_512_count_t__first . C4: 1 <= positive_block_512_count_t__last . C5: 0 >= natural__first . C6: 0 <= natural__last . C7: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . C8: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . C9: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . C10: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . C11: skein_512_block_bytes_index__first = 0 . C12: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . C13: 0 + 63 <= skein_512_block_bytes_index__last . C14: skein_512_block_bytes_index__last <= natural__last . C15: 0 <= natural__last - 63 . For path(s) from start to run-time check associated with statement of line 808: procedure_skein_512_update_12. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . -> C1: 0 >= natural__first . C2: 0 <= natural__last . For path(s) from start to run-time check associated with statement of line 814: procedure_skein_512_update_13. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c . H139: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . -> C1: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . C2: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . C3: skein_512_block_bytes_c <> 0 . C4: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first . C5: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last . procedure_skein_512_update_14. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: msg__index__subtype__1__last + 1 > skein_512_block_bytes_c . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . -> C1: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . C2: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . C3: skein_512_block_bytes_c <> 0 . C4: msg__index__subtype__1__last + 1 - 1 >= integer__base__first . C5: msg__index__subtype__1__last + 1 - 1 <= integer__base__last . For path(s) from start to precondition check associated with statement of line 816: procedure_skein_512_update_15. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c . H139: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H142: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H143: skein_512_block_bytes_c <> 0 . H144: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first . H145: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last . H146: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H147: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H148: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H149: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . -> C1: skein_512_block_bytes_c >= natural__first . C2: skein_512_block_bytes_c <= natural__last . C3: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . C4: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . C5: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . C6: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . C7: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= initialized_hash_bit_length__first . C8: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= initialized_hash_bit_length__last . C9: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= skein_512_block_bytes_count__first . C10: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= skein_512_block_bytes_count__last . C11: msg__index__subtype__1__first = 0 . C12: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + ((msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . C13: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + 63 <= msg__index__subtype__1__last . C14: msg__index__subtype__1__last <= natural__last . C15: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last - 63 . procedure_skein_512_update_16. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: msg__index__subtype__1__last + 1 > skein_512_block_bytes_c . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . H43: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H44: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H45: skein_512_block_bytes_c <> 0 . H46: msg__index__subtype__1__last + 1 - 1 >= integer__base__first . H47: msg__index__subtype__1__last + 1 - 1 <= integer__base__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H51: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . -> C1: skein_512_block_bytes_c >= natural__first . C2: skein_512_block_bytes_c <= natural__last . C3: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . C4: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . C5: 0 >= natural__first . C6: 0 <= natural__last . C7: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . C8: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . C9: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C10: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . C11: msg__index__subtype__1__first = 0 . C12: 0 + ((msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . C13: 0 + 63 <= msg__index__subtype__1__last . C14: msg__index__subtype__1__last <= natural__last . C15: 0 <= natural__last - 63 . For path(s) from start to run-time check associated with statement of line 822: procedure_skein_512_update_17. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c . H139: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H142: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H143: skein_512_block_bytes_c <> 0 . H144: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first . H145: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last . H146: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H147: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H148: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H149: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H150: skein_512_block_bytes_c >= natural__first . H151: skein_512_block_bytes_c <= natural__last . H152: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H153: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H154: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H155: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H156: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= initialized_hash_bit_length__first . H157: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= initialized_hash_bit_length__last . H158: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= skein_512_block_bytes_count__first . H159: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= skein_512_block_bytes_count__last . H160: msg__index__subtype__1__first = 0 . H161: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + ((msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H162: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + 63 <= msg__index__subtype__1__last . H163: msg__index__subtype__1__last <= natural__last . H164: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last - 63 . H165: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H166: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H168: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H169: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H170: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H171: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H172: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H173: fld_byte_count(fld_h(ctx__3)) >= natural__first . H174: fld_byte_count(fld_h(ctx__3)) <= natural__last . H175: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H176: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H177: true . H178: true . H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H181: true . H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H190: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H191: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . -> C1: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C2: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . procedure_skein_512_update_18. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: msg__index__subtype__1__last + 1 > skein_512_block_bytes_c . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . H43: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H44: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H45: skein_512_block_bytes_c <> 0 . H46: msg__index__subtype__1__last + 1 - 1 >= integer__base__first . H47: msg__index__subtype__1__last + 1 - 1 <= integer__base__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H51: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H52: skein_512_block_bytes_c >= natural__first . H53: skein_512_block_bytes_c <= natural__last . H54: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H55: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H56: 0 >= natural__first . H57: 0 <= natural__last . H58: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H59: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H60: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H61: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H62: msg__index__subtype__1__first = 0 . H63: 0 + ((msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H64: 0 + 63 <= msg__index__subtype__1__last . H65: msg__index__subtype__1__last <= natural__last . H66: 0 <= natural__last - 63 . H67: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H68: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H69: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx)) . H70: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H71: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H72: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h( ctx)) . H73: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H74: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H75: fld_byte_count(fld_h(ctx__3)) >= natural__first . H76: fld_byte_count(fld_h(ctx__3)) <= natural__last . H77: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H78: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H79: true . H80: true . H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H83: true . H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H92: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H93: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . -> C1: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C2: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . For path(s) from start to check associated with statement of line 824: procedure_skein_512_update_19. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c . H139: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H142: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H143: skein_512_block_bytes_c <> 0 . H144: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first . H145: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last . H146: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H147: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H148: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H149: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H150: skein_512_block_bytes_c >= natural__first . H151: skein_512_block_bytes_c <= natural__last . H152: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H153: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H154: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H155: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H156: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= initialized_hash_bit_length__first . H157: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= initialized_hash_bit_length__last . H158: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= skein_512_block_bytes_count__first . H159: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= skein_512_block_bytes_count__last . H160: msg__index__subtype__1__first = 0 . H161: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + ((msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H162: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + 63 <= msg__index__subtype__1__last . H163: msg__index__subtype__1__last <= natural__last . H164: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last - 63 . H165: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H166: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H168: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H169: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H170: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H171: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H172: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H173: fld_byte_count(fld_h(ctx__3)) >= natural__first . H174: fld_byte_count(fld_h(ctx__3)) <= natural__last . H175: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H176: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H177: true . H178: true . H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H181: true . H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H190: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H191: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H192: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H193: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . -> C1: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . procedure_skein_512_update_20. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: msg__index__subtype__1__last + 1 > skein_512_block_bytes_c . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . H43: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H44: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H45: skein_512_block_bytes_c <> 0 . H46: msg__index__subtype__1__last + 1 - 1 >= integer__base__first . H47: msg__index__subtype__1__last + 1 - 1 <= integer__base__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H51: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H52: skein_512_block_bytes_c >= natural__first . H53: skein_512_block_bytes_c <= natural__last . H54: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H55: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H56: 0 >= natural__first . H57: 0 <= natural__last . H58: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H59: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H60: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H61: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H62: msg__index__subtype__1__first = 0 . H63: 0 + ((msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H64: 0 + 63 <= msg__index__subtype__1__last . H65: msg__index__subtype__1__last <= natural__last . H66: 0 <= natural__last - 63 . H67: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H68: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H69: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx)) . H70: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H71: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H72: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h( ctx)) . H73: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H74: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H75: fld_byte_count(fld_h(ctx__3)) >= natural__first . H76: fld_byte_count(fld_h(ctx__3)) <= natural__last . H77: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H78: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H79: true . H80: true . H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H83: true . H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H92: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H93: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H94: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H95: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . -> C1: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 . For path(s) from start to run-time check associated with statement of line 825: procedure_skein_512_update_21. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c . H139: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H142: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H143: skein_512_block_bytes_c <> 0 . H144: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first . H145: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last . H146: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H147: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H148: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H149: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H150: skein_512_block_bytes_c >= natural__first . H151: skein_512_block_bytes_c <= natural__last . H152: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H153: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H154: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H155: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H156: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= initialized_hash_bit_length__first . H157: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= initialized_hash_bit_length__last . H158: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= skein_512_block_bytes_count__first . H159: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= skein_512_block_bytes_count__last . H160: msg__index__subtype__1__first = 0 . H161: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + ((msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H162: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + 63 <= msg__index__subtype__1__last . H163: msg__index__subtype__1__last <= natural__last . H164: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last - 63 . H165: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H166: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H168: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H169: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H170: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H171: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H172: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H173: fld_byte_count(fld_h(ctx__3)) >= natural__first . H174: fld_byte_count(fld_h(ctx__3)) <= natural__last . H175: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H176: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H177: true . H178: true . H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H181: true . H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H190: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H191: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H192: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H193: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H194: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H195: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H196: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H197: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H198: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . -> C1: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C2: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . procedure_skein_512_update_22. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: msg__index__subtype__1__last + 1 > skein_512_block_bytes_c . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . H43: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H44: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H45: skein_512_block_bytes_c <> 0 . H46: msg__index__subtype__1__last + 1 - 1 >= integer__base__first . H47: msg__index__subtype__1__last + 1 - 1 <= integer__base__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H51: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H52: skein_512_block_bytes_c >= natural__first . H53: skein_512_block_bytes_c <= natural__last . H54: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H55: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H56: 0 >= natural__first . H57: 0 <= natural__last . H58: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H59: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H60: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H61: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H62: msg__index__subtype__1__first = 0 . H63: 0 + ((msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H64: 0 + 63 <= msg__index__subtype__1__last . H65: msg__index__subtype__1__last <= natural__last . H66: 0 <= natural__last - 63 . H67: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H68: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H69: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx)) . H70: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H71: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H72: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h( ctx)) . H73: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H74: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H75: fld_byte_count(fld_h(ctx__3)) >= natural__first . H76: fld_byte_count(fld_h(ctx__3)) <= natural__last . H77: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H78: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H79: true . H80: true . H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H83: true . H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H92: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H93: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H94: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H95: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H96: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 . H97: msg__index__subtype__1__last + 1 >= natural__first . H98: msg__index__subtype__1__last + 1 <= natural__last . H99: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H100: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . -> C1: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C2: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . For path(s) from start to run-time check associated with statement of line 827: procedure_skein_512_update_23. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c . H139: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H142: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H143: skein_512_block_bytes_c <> 0 . H144: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first . H145: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last . H146: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H147: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H148: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H149: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H150: skein_512_block_bytes_c >= natural__first . H151: skein_512_block_bytes_c <= natural__last . H152: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H153: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H154: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H155: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H156: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= initialized_hash_bit_length__first . H157: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= initialized_hash_bit_length__last . H158: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= skein_512_block_bytes_count__first . H159: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= skein_512_block_bytes_count__last . H160: msg__index__subtype__1__first = 0 . H161: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + ((msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H162: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + 63 <= msg__index__subtype__1__last . H163: msg__index__subtype__1__last <= natural__last . H164: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last - 63 . H165: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H166: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H168: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H169: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H170: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H171: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H172: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H173: fld_byte_count(fld_h(ctx__3)) >= natural__first . H174: fld_byte_count(fld_h(ctx__3)) <= natural__last . H175: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H176: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H177: true . H178: true . H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H181: true . H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H190: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H191: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H192: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H193: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H194: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H195: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H196: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H197: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H198: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H199: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H200: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H201: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H202: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H203: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H204: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . -> C1: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C2: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . procedure_skein_512_update_24. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: msg__index__subtype__1__last + 1 > skein_512_block_bytes_c . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . H43: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H44: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H45: skein_512_block_bytes_c <> 0 . H46: msg__index__subtype__1__last + 1 - 1 >= integer__base__first . H47: msg__index__subtype__1__last + 1 - 1 <= integer__base__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H51: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H52: skein_512_block_bytes_c >= natural__first . H53: skein_512_block_bytes_c <= natural__last . H54: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H55: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H56: 0 >= natural__first . H57: 0 <= natural__last . H58: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H59: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H60: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H61: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H62: msg__index__subtype__1__first = 0 . H63: 0 + ((msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H64: 0 + 63 <= msg__index__subtype__1__last . H65: msg__index__subtype__1__last <= natural__last . H66: 0 <= natural__last - 63 . H67: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H68: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H69: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx)) . H70: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H71: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H72: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h( ctx)) . H73: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H74: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H75: fld_byte_count(fld_h(ctx__3)) >= natural__first . H76: fld_byte_count(fld_h(ctx__3)) <= natural__last . H77: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H78: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H79: true . H80: true . H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H83: true . H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H92: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H93: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H94: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H95: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H96: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 . H97: msg__index__subtype__1__last + 1 >= natural__first . H98: msg__index__subtype__1__last + 1 <= natural__last . H99: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H100: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H101: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H102: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H103: 0 >= natural__first . H104: 0 <= natural__last . H105: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H106: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . -> C1: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C2: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . For path(s) from start to precondition check associated with statement of line 835: procedure_skein_512_update_25. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c . H139: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H142: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H143: skein_512_block_bytes_c <> 0 . H144: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first . H145: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last . H146: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H147: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H148: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H149: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H150: skein_512_block_bytes_c >= natural__first . H151: skein_512_block_bytes_c <= natural__last . H152: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H153: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H154: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H155: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H156: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= initialized_hash_bit_length__first . H157: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= initialized_hash_bit_length__last . H158: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= skein_512_block_bytes_count__first . H159: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= skein_512_block_bytes_count__last . H160: msg__index__subtype__1__first = 0 . H161: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + ((msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H162: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + 63 <= msg__index__subtype__1__last . H163: msg__index__subtype__1__last <= natural__last . H164: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last - 63 . H165: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H166: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H168: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H169: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H170: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H171: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H172: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H173: fld_byte_count(fld_h(ctx__3)) >= natural__first . H174: fld_byte_count(fld_h(ctx__3)) <= natural__last . H175: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H176: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H177: true . H178: true . H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H181: true . H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H190: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H191: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H192: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H193: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H194: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H195: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H196: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H197: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H198: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H199: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H200: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H201: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H202: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H203: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H204: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H205: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H206: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H207: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H208: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H209: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H210: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . -> C1: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C2: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . C3: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C4: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . C5: fld_hash_bit_len(fld_h(ctx__3)) > 0 . C6: msg__index__subtype__1__first = 0 . C7: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= msg__index__subtype__1__first . C8: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= msg__index__subtype__1__last . C9: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c + ( msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= msg__index__subtype__1__last . C10: fld_byte_count(fld_h(ctx__3)) + ( msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= skein_512_block_bytes_index__last . procedure_skein_512_update_26. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: msg__index__subtype__1__last + 1 > skein_512_block_bytes_c . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . H43: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H44: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H45: skein_512_block_bytes_c <> 0 . H46: msg__index__subtype__1__last + 1 - 1 >= integer__base__first . H47: msg__index__subtype__1__last + 1 - 1 <= integer__base__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H51: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H52: skein_512_block_bytes_c >= natural__first . H53: skein_512_block_bytes_c <= natural__last . H54: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H55: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H56: 0 >= natural__first . H57: 0 <= natural__last . H58: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H59: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H60: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H61: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H62: msg__index__subtype__1__first = 0 . H63: 0 + ((msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H64: 0 + 63 <= msg__index__subtype__1__last . H65: msg__index__subtype__1__last <= natural__last . H66: 0 <= natural__last - 63 . H67: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H68: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H69: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx)) . H70: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H71: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H72: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h( ctx)) . H73: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H74: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H75: fld_byte_count(fld_h(ctx__3)) >= natural__first . H76: fld_byte_count(fld_h(ctx__3)) <= natural__last . H77: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H78: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H79: true . H80: true . H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H83: true . H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H92: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H93: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H94: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H95: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H96: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 . H97: msg__index__subtype__1__last + 1 >= natural__first . H98: msg__index__subtype__1__last + 1 <= natural__last . H99: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H100: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H101: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H102: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H103: 0 >= natural__first . H104: 0 <= natural__last . H105: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H106: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H107: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H108: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H109: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H110: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H111: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H112: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . -> C1: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C2: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . C3: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . C4: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . C5: fld_hash_bit_len(fld_h(ctx__3)) > 0 . C6: msg__index__subtype__1__first = 0 . C7: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= msg__index__subtype__1__first . C8: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= msg__index__subtype__1__last . C9: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c + ( msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= msg__index__subtype__1__last . C10: fld_byte_count(fld_h(ctx__3)) + ( msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= skein_512_block_bytes_index__last . procedure_skein_512_update_27. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: not (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c) . H139: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H142: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . -> C1: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . C2: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . C3: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . C4: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . C5: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) > 0 . C6: msg__index__subtype__1__first = 0 . C7: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= msg__index__subtype__1__first . C8: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= msg__index__subtype__1__last . C9: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) <= msg__index__subtype__1__last . C10: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) <= skein_512_block_bytes_index__last . procedure_skein_512_update_28. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: not (msg__index__subtype__1__last + 1 > skein_512_block_bytes_c) . H41: 0 >= natural__first . H42: 0 <= natural__last . H43: msg__index__subtype__1__last + 1 >= natural__first . H44: msg__index__subtype__1__last + 1 <= natural__last . -> C1: msg__index__subtype__1__last + 1 >= natural__first . C2: msg__index__subtype__1__last + 1 <= natural__last . C3: 0 >= natural__first . C4: 0 <= natural__last . C5: fld_hash_bit_len(fld_h(ctx)) > 0 . C6: msg__index__subtype__1__first = 0 . C7: 0 >= msg__index__subtype__1__first . C8: 0 <= msg__index__subtype__1__last . C9: 0 + (msg__index__subtype__1__last + 1 - 1) <= msg__index__subtype__1__last . C10: fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - 1) <= skein_512_block_bytes_index__last . procedure_skein_512_update_29. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: not (msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c) . H37: 0 >= natural__first . H38: 0 <= natural__last . H39: msg__index__subtype__1__last + 1 >= natural__first . H40: msg__index__subtype__1__last + 1 <= natural__last . -> C1: msg__index__subtype__1__last + 1 >= natural__first . C2: msg__index__subtype__1__last + 1 <= natural__last . C3: 0 >= natural__first . C4: 0 <= natural__last . C5: fld_hash_bit_len(fld_h(ctx)) > 0 . C6: msg__index__subtype__1__first = 0 . C7: 0 >= msg__index__subtype__1__first . C8: 0 <= msg__index__subtype__1__last . C9: 0 + (msg__index__subtype__1__last + 1 - 1) <= msg__index__subtype__1__last . C10: fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - 1) <= skein_512_block_bytes_index__last . For path(s) from start to finish: procedure_skein_512_update_30. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c . H139: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H142: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H143: skein_512_block_bytes_c <> 0 . H144: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first . H145: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last . H146: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H147: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H148: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H149: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H150: skein_512_block_bytes_c >= natural__first . H151: skein_512_block_bytes_c <= natural__last . H152: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H153: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H154: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H155: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H156: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= initialized_hash_bit_length__first . H157: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= initialized_hash_bit_length__last . H158: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) >= skein_512_block_bytes_count__first . H159: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) <= skein_512_block_bytes_count__last . H160: msg__index__subtype__1__first = 0 . H161: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + ((msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H162: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + 63 <= msg__index__subtype__1__last . H163: msg__index__subtype__1__last <= natural__last . H164: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last - 63 . H165: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H166: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H168: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H169: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H170: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H171: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H172: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H173: fld_byte_count(fld_h(ctx__3)) >= natural__first . H174: fld_byte_count(fld_h(ctx__3)) <= natural__last . H175: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H176: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H177: true . H178: true . H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H181: true . H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H190: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H191: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H192: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H193: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H194: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H195: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H196: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H197: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H198: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H199: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H200: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H201: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H202: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H203: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H204: (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H205: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H206: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H207: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H208: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H209: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H210: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H211: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H212: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H213: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H214: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H215: fld_hash_bit_len(fld_h(ctx__3)) > 0 . H216: msg__index__subtype__1__first = 0 . H217: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= msg__index__subtype__1__first . H218: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= msg__index__subtype__1__last . H219: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c + ( msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= msg__index__subtype__1__last . H220: fld_byte_count(fld_h(ctx__3)) + ( msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= skein_512_block_bytes_index__last . H221: fld_hash_bit_len(fld_h(ctx__4)) > 0 . H222: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h(ctx__3)) . H223: fld_byte_count(fld_h(ctx__4)) = fld_byte_count(fld_h(ctx__3)) + ( msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c) . H224: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . H225: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . H226: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H227: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H228: fld_byte_count(fld_h(ctx__4)) >= natural__first . H229: fld_byte_count(fld_h(ctx__4)) <= natural__last . H230: fld_hash_bit_len(fld_h(ctx__4)) >= hash_bit_length__first . H231: fld_hash_bit_len(fld_h(ctx__4)) <= hash_bit_length__last . H232: true . H233: true . H234: fld_field_type(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u6__first . H235: fld_field_type(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u6__last . H236: true . H237: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u7__first . H238: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u7__last . H239: fld_reserved(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u16__first . H240: fld_reserved(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u16__last . H241: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u32__first . H242: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u32__last . H243: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u64__first . H244: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last . C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h( ctx)) . C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . procedure_skein_512_update_31. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: msg__index__subtype__1__last + 1 > skein_512_block_bytes_c . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . H43: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H44: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H45: skein_512_block_bytes_c <> 0 . H46: msg__index__subtype__1__last + 1 - 1 >= integer__base__first . H47: msg__index__subtype__1__last + 1 - 1 <= integer__base__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H51: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H52: skein_512_block_bytes_c >= natural__first . H53: skein_512_block_bytes_c <= natural__last . H54: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H55: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H56: 0 >= natural__first . H57: 0 <= natural__last . H58: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H59: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H60: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H61: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H62: msg__index__subtype__1__first = 0 . H63: 0 + ((msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last . H64: 0 + 63 <= msg__index__subtype__1__last . H65: msg__index__subtype__1__last <= natural__last . H66: 0 <= natural__last - 63 . H67: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . H68: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . H69: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx)) . H70: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . H71: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . H72: fld_byte_count(fld_h(ctx__3)) = fld_byte_count(fld_h( ctx)) . H73: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__3), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last))) . H74: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__3), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last))) . H75: fld_byte_count(fld_h(ctx__3)) >= natural__first . H76: fld_byte_count(fld_h(ctx__3)) <= natural__last . H77: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H78: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H79: true . H80: true . H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H83: true . H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H92: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first . H93: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last . H94: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H95: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H96: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 . H97: msg__index__subtype__1__last + 1 >= natural__first . H98: msg__index__subtype__1__last + 1 <= natural__last . H99: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H100: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H101: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H102: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H103: 0 >= natural__first . H104: 0 <= natural__last . H105: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H106: (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H107: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H108: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H109: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H110: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H111: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H112: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H113: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H114: msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H115: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first . H116: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last . H117: fld_hash_bit_len(fld_h(ctx__3)) > 0 . H118: msg__index__subtype__1__first = 0 . H119: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= msg__index__subtype__1__first . H120: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= msg__index__subtype__1__last . H121: 0 + (msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c + ( msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= msg__index__subtype__1__last . H122: fld_byte_count(fld_h(ctx__3)) + ( msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= skein_512_block_bytes_index__last . H123: fld_hash_bit_len(fld_h(ctx__4)) > 0 . H124: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h(ctx__3)) . H125: fld_byte_count(fld_h(ctx__4)) = fld_byte_count(fld_h(ctx__3)) + ( msg__index__subtype__1__last + 1 - ( msg__index__subtype__1__last + 1 - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c) . H126: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . H127: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . H128: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H129: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H130: fld_byte_count(fld_h(ctx__4)) >= natural__first . H131: fld_byte_count(fld_h(ctx__4)) <= natural__last . H132: fld_hash_bit_len(fld_h(ctx__4)) >= hash_bit_length__first . H133: fld_hash_bit_len(fld_h(ctx__4)) <= hash_bit_length__last . H134: true . H135: true . H136: fld_field_type(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u6__first . H137: fld_field_type(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u6__last . H138: true . H139: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u7__first . H140: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u7__last . H141: fld_reserved(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u16__first . H142: fld_reserved(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u16__last . H143: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u32__first . H144: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u32__last . H145: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u64__first . H146: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last . C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h( ctx)) . C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . procedure_skein_512_update_32. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: fld_byte_count(fld_h(ctx)) > 0 . H38: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H40: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 . H41: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 . H42: 0 >= natural__first . H43: 0 <= natural__last . H44: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H45: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first . H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last . H48: 0 >= natural__first . H49: 0 <= natural__last . H50: fld_hash_bit_len(fld_h(ctx)) > 0 . H51: msg__index__subtype__1__first = 0 . H52: 0 >= msg__index__subtype__1__first . H53: 0 <= msg__index__subtype__1__last . H54: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= msg__index__subtype__1__last . H55: fld_byte_count(fld_h(ctx)) + ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)) - 1) <= skein_512_block_bytes_index__last . H56: fld_hash_bit_len(fld_h(ctx__1)) > 0 . H57: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h( ctx)) . H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h( ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) . H59: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H60: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H61: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last))) . H62: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__1), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last))) . H63: fld_byte_count(fld_h(ctx__1)) >= natural__first . H64: fld_byte_count(fld_h(ctx__1)) <= natural__last . H65: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H66: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H67: true . H68: true . H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H71: true . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H80: msg__index__subtype__1__last + 1 >= natural__first . H81: msg__index__subtype__1__last + 1 <= natural__last . H82: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H83: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H84: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H85: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H86: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H87: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H88: 0 >= natural__first . H89: 0 <= natural__last . H90: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H91: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H92: fld_byte_count(fld_h(ctx__1)) = skein_512_block_bytes_c . H93: for_all(i___1: integer, ((i___1 >= skein_512_block_bytes_index__first) and (i___1 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__1), [i___1]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last))) . H94: skein_512_block_bytes_c >= natural__first . H95: skein_512_block_bytes_c <= natural__last . H96: 1 >= positive_block_512_count_t__first . H97: 1 <= positive_block_512_count_t__last . H98: 0 >= natural__first . H99: 0 <= natural__last . H100: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first . H101: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last . H102: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first . H103: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last . H104: skein_512_block_bytes_index__first = 0 . H105: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_block_bytes_index__last . H106: 0 + 63 <= skein_512_block_bytes_index__last . H107: skein_512_block_bytes_index__last <= natural__last . H108: 0 <= natural__last - 63 . H109: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H110: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H111: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1)) . H112: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H113: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H114: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H115: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H116: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H117: fld_byte_count(fld_h(ctx__2)) >= natural__first . H118: fld_byte_count(fld_h(ctx__2)) <= natural__last . H119: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H120: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H121: true . H122: true . H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H125: true . H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H134: 0 >= natural__first . H135: 0 <= natural__last . H136: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H137: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H138: not (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) > skein_512_block_bytes_c) . H139: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H140: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H141: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H142: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H143: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H144: msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H145: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= natural__first . H146: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= natural__last . H147: fld_hash_bit_len(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) > 0 . H148: msg__index__subtype__1__first = 0 . H149: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) >= msg__index__subtype__1__first . H150: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) <= msg__index__subtype__1__last . H151: 0 + (skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) <= msg__index__subtype__1__last . H152: fld_byte_count(fld_h(upf_h(ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx))) - 1) <= skein_512_block_bytes_index__last . H153: fld_hash_bit_len(fld_h(ctx__4)) > 0 . H154: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) . H155: fld_byte_count(fld_h(ctx__4)) = fld_byte_count(fld_h(upf_h( ctx__2, upf_byte_count(fld_h(ctx__2), 0)))) + ( msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h( ctx)))) . H156: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . H157: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . H158: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H159: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H160: fld_byte_count(fld_h(ctx__4)) >= natural__first . H161: fld_byte_count(fld_h(ctx__4)) <= natural__last . H162: fld_hash_bit_len(fld_h(ctx__4)) >= hash_bit_length__first . H163: fld_hash_bit_len(fld_h(ctx__4)) <= hash_bit_length__last . H164: true . H165: true . H166: fld_field_type(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u6__first . H167: fld_field_type(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u6__last . H168: true . H169: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u7__first . H170: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u7__last . H171: fld_reserved(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u16__first . H172: fld_reserved(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u16__last . H173: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u32__first . H174: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u32__last . H175: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u64__first . H176: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last . C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h( ctx)) . C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . procedure_skein_512_update_33. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c . H37: not (fld_byte_count(fld_h(ctx)) > 0) . H38: msg__index__subtype__1__last + 1 >= natural__first . H39: msg__index__subtype__1__last + 1 <= natural__last . H40: not (msg__index__subtype__1__last + 1 > skein_512_block_bytes_c) . H41: 0 >= natural__first . H42: 0 <= natural__last . H43: msg__index__subtype__1__last + 1 >= natural__first . H44: msg__index__subtype__1__last + 1 <= natural__last . H45: msg__index__subtype__1__last + 1 >= natural__first . H46: msg__index__subtype__1__last + 1 <= natural__last . H47: 0 >= natural__first . H48: 0 <= natural__last . H49: fld_hash_bit_len(fld_h(ctx)) > 0 . H50: msg__index__subtype__1__first = 0 . H51: 0 >= msg__index__subtype__1__first . H52: 0 <= msg__index__subtype__1__last . H53: 0 + (msg__index__subtype__1__last + 1 - 1) <= msg__index__subtype__1__last . H54: fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - 1) <= skein_512_block_bytes_index__last . H55: fld_hash_bit_len(fld_h(ctx__4)) > 0 . H56: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h( ctx)) . H57: fld_byte_count(fld_h(ctx__4)) = fld_byte_count(fld_h( ctx)) + (msg__index__subtype__1__last + 1) . H58: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . H59: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . H60: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H61: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H62: fld_byte_count(fld_h(ctx__4)) >= natural__first . H63: fld_byte_count(fld_h(ctx__4)) <= natural__last . H64: fld_hash_bit_len(fld_h(ctx__4)) >= hash_bit_length__first . H65: fld_hash_bit_len(fld_h(ctx__4)) <= hash_bit_length__last . H66: true . H67: true . H68: fld_field_type(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u6__first . H69: fld_field_type(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u6__last . H70: true . H71: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u7__first . H72: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u7__last . H73: fld_reserved(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u16__first . H74: fld_reserved(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u16__last . H75: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u32__first . H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u32__last . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u64__first . H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last . C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h( ctx)) . C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . procedure_skein_512_update_34. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H28: msg__index__subtype__1__last + 1 >= natural__first . H29: msg__index__subtype__1__last + 1 <= natural__last . H30: 0 >= natural__first . H31: 0 <= natural__last . H32: msg__index__subtype__1__last + 1 >= natural__first . H33: msg__index__subtype__1__last + 1 <= natural__last . H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) >= integer__base__first . H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) <= integer__base__last . H36: not (msg__index__subtype__1__last + 1 + fld_byte_count(fld_h( ctx)) > skein_512_block_bytes_c) . H37: 0 >= natural__first . H38: 0 <= natural__last . H39: msg__index__subtype__1__last + 1 >= natural__first . H40: msg__index__subtype__1__last + 1 <= natural__last . H41: msg__index__subtype__1__last + 1 >= natural__first . H42: msg__index__subtype__1__last + 1 <= natural__last . H43: 0 >= natural__first . H44: 0 <= natural__last . H45: fld_hash_bit_len(fld_h(ctx)) > 0 . H46: msg__index__subtype__1__first = 0 . H47: 0 >= msg__index__subtype__1__first . H48: 0 <= msg__index__subtype__1__last . H49: 0 + (msg__index__subtype__1__last + 1 - 1) <= msg__index__subtype__1__last . H50: fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - 1) <= skein_512_block_bytes_index__last . H51: fld_hash_bit_len(fld_h(ctx__4)) > 0 . H52: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h( ctx)) . H53: fld_byte_count(fld_h(ctx__4)) = fld_byte_count(fld_h( ctx)) + (msg__index__subtype__1__last + 1) . H54: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . H55: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . H56: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__4), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last))) . H57: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__4), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last))) . H58: fld_byte_count(fld_h(ctx__4)) >= natural__first . H59: fld_byte_count(fld_h(ctx__4)) <= natural__last . H60: fld_hash_bit_len(fld_h(ctx__4)) >= hash_bit_length__first . H61: fld_hash_bit_len(fld_h(ctx__4)) <= hash_bit_length__last . H62: true . H63: true . H64: fld_field_type(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u6__first . H65: fld_field_type(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u6__last . H66: true . H67: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u7__first . H68: fld_tree_level(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u7__last . H69: fld_reserved(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u16__first . H70: fld_reserved(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u16__last . H71: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u32__first . H72: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u32__last . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) >= spark__unsigned__u64__first . H74: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__4))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last . C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h( ctx)) . C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last . For checks of refinement integrity: procedure_skein_512_update_35. H1: hash_bit_len_of(ctx) >= initialized_hash_bit_length__first . H2: hash_bit_len_of(ctx) <= initialized_hash_bit_length__last . H3: byte_count_of(ctx) >= skein_512_block_bytes_count__first . H4: byte_count_of(ctx) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c < natural__last . H8: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H9: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H10: fld_byte_count(fld_h(ctx)) >= natural__first . H11: fld_byte_count(fld_h(ctx)) <= natural__last . H12: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H13: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H14: true . H15: true . H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H18: true . H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H27: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . -> C1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . C3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . C5: msg__index__subtype__1__first = 0 . C6: msg__index__subtype__1__last < natural__last . C7: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . procedure_skein_512_update_36. H1: hash_bit_len_of(ctx~) >= initialized_hash_bit_length__first . H2: hash_bit_len_of(ctx~) <= initialized_hash_bit_length__last . H3: byte_count_of(ctx~) >= skein_512_block_bytes_count__first . H4: byte_count_of(ctx~) <= skein_512_block_bytes_count__last . H5: msg__index__subtype__1__first = 0 . H6: msg__index__subtype__1__last < natural__last . H7: msg__index__subtype__1__last + skein_512_block_bytes_c < natural__last . H8: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . H9: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . H10: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . H11: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . H12: msg__index__subtype__1__first = 0 . H13: msg__index__subtype__1__last < natural__last . H14: msg__index__subtype__1__last + skein_512_block_bytes_c + 1 <= natural__last . H15: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx~), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx~), [i___2]) <= spark__unsigned__byte__last))) . H16: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx~), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx~), [i___1]) <= spark__unsigned__u64__last))) . H17: fld_byte_count(fld_h(ctx~)) >= natural__first . H18: fld_byte_count(fld_h(ctx~)) <= natural__last . H19: fld_hash_bit_len(fld_h(ctx~)) >= hash_bit_length__first . H20: fld_hash_bit_len(fld_h(ctx~)) <= hash_bit_length__last . H21: true . H22: true . H23: fld_field_type(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u6__first . H24: fld_field_type(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u6__last . H25: true . H26: fld_tree_level(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u7__first . H27: fld_tree_level(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u7__last . H28: fld_reserved(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u16__first . H29: fld_reserved(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u16__last . H30: fld_byte_count_msb(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u32__first . H31: fld_byte_count_msb(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u32__last . H32: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u64__first . H33: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u64__last . H34: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H35: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H36: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H37: fld_byte_count(fld_h(ctx)) >= natural__first . H38: fld_byte_count(fld_h(ctx)) <= natural__last . H39: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H40: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H41: true . H42: true . H43: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H44: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H45: true . H46: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H47: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H48: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H49: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H50: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H51: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H52: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H53: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H54: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H55: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H56: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H57: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H58: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . -> C1: hash_bit_len_of(ctx) >= initialized_hash_bit_length__first . C2: hash_bit_len_of(ctx) <= initialized_hash_bit_length__last . C3: hash_bit_len_of(ctx) = hash_bit_len_of(ctx~) . C4: byte_count_of(ctx) >= skein_512_block_bytes_count__first . C5: byte_count_of(ctx) <= skein_512_block_bytes_count__last . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.vlg0000644000175000017500000000330611712765060030271 0ustar eugeneugen Non-option args: skein_512_process_block Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=skein_512_process_block \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 2 (100.0%) unproven: 0 ( 0.0%) error: 0 ( 0.0%) total: 2 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.vsm0000644000175000017500000000006411712765060027064 0ustar eugeneugenput_64_lsb_first,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.slg0000644000175000017500000010150311712513676026363 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition function Skein.Skein_512_Hash RRS The following user defined rule files have been read: &&& skein.rlu SEM No semantic checks are performed on the rules. @@@@@@@@@@ VC: function_skein_512_hash_1. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, data__index__subtype__1__first <= i___1 and i___1 <= data__index__subtype__1__last -> spark__unsigned__byte__first <= element(data, [i___1]) and element( data, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_ha_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New C1: true -S- Applied substitution rule skein_512_ha_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: function_skein_512_hash_2. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, data__index__subtype__1__first <= i___1 and i___1 <= data__index__subtype__1__last -> spark__unsigned__byte__first <= element(data, [i___1]) and element( data, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H13 on reading formula in, to give: %%% H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H31 has been replaced by "true". (It is already present, as H12). --- Hypothesis H32 has been replaced by "true". (It is already present, as H13). --- Hypothesis H33 has been replaced by "true". (It is already present, as H14). --- Hypothesis H34 has been replaced by "true". (It is already present, as H15). --- Hypothesis H35 has been replaced by "true". (It is already present, as H16). --- Hypothesis H36 has been replaced by "true". (It is already present, as H17). --- Hypothesis H39 has been replaced by "true". (It is already present, as H20). --- Hypothesis H40 has been replaced by "true". (It is already present, as H21). --- Hypothesis H42 has been replaced by "true". (It is already present, as H23). --- Hypothesis H43 has been replaced by "true". (It is already present, as H24). --- Hypothesis H44 has been replaced by "true". (It is already present, as H25). --- Hypothesis H45 has been replaced by "true". (It is already present, as H26). --- Hypothesis H46 has been replaced by "true". (It is already present, as H27). --- Hypothesis H47 has been replaced by "true". (It is already present, as H28). --- Hypothesis H48 has been replaced by "true". (It is already present, as H29). --- Hypothesis H49 has been replaced by "true". (It is already present, as H30). *** Proved C1: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first using hypothesis H6. *** Proved C2: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last using hypothesis H7. *** Proved C3: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first using hypothesis H10. *** Proved C4: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last using hypothesis H11. *** Proved C5: data__index__subtype__1__first = 0 using hypothesis H1. -S- Applied substitution rule skein_512_ha_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H2: data__index__subtype__1__last + skein_512_block_bytes_c < 2147483647 New H15: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New C6: data__index__subtype__1__last < 2147483647 New C7: data__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 *** Proved C7: data__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 using hypothesis H2. -S- Applied substitution rule skein_512_ha_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H2: data__index__subtype__1__last < 2147483583 -S- Applied substitution rule skein_512_ha_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H14: fld_byte_count(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_ha_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H20: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_ha_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H21: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 -S- Applied substitution rule skein_512_ha_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_ha_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H24: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 -S- Applied substitution rule skein_512_ha_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H3: for_all(i___1 : integer, data__index__subtype__1__first <= i___1 and i___1 <= data__index__subtype__1__last -> 0 <= element(data, [ i___1]) and element(data, [i___1]) <= spark__unsigned__byte__last) New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_ha_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H3: for_all(i___1 : integer, data__index__subtype__1__first <= i___1 and i___1 <= data__index__subtype__1__last -> 0 <= element(data, [ i___1]) and element(data, [i___1]) <= 255) New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) -S- Applied substitution rule skein_512_ha_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H25: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_ha_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H26: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 -S- Applied substitution rule skein_512_ha_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_ha_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 -S- Applied substitution rule skein_512_ha_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_ha_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_ha_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H16: fld_hash_bit_len(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_ha_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H17: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 -S- Applied substitution rule skein_512_ha_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H4: true New H6: fld_hash_bit_len(fld_h(ctx__1)) >= 1 -S- Applied substitution rule skein_512_ha_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H5: true New H7: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 -S- Applied substitution rule skein_512_ha_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_ha_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_ha_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H10: fld_byte_count(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_ha_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H11: fld_byte_count(fld_h(ctx__1)) <= 64 -S- Applied substitution rule skein_512_ha_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) -S- Applied substitution rule skein_512_ha_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) *** Proved C6: data__index__subtype__1__last < 2147483647 using hypothesis H2. *** PROVED VC. @@@@@@@@@@ VC: function_skein_512_hash_3. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___1 : integer, data__index__subtype__1__first <= i___1 and i___1 <= data__index__subtype__1__last -> spark__unsigned__byte__first <= element(data, [i___1]) and element( data, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H13 on reading formula in, to give: %%% H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H31 has been replaced by "true". (It is already present, as H12). --- Hypothesis H32 has been replaced by "true". (It is already present, as H13). --- Hypothesis H33 has been replaced by "true". (It is already present, as H14). --- Hypothesis H34 has been replaced by "true". (It is already present, as H15). --- Hypothesis H35 has been replaced by "true". (It is already present, as H16). --- Hypothesis H36 has been replaced by "true". (It is already present, as H17). --- Hypothesis H39 has been replaced by "true". (It is already present, as H20). --- Hypothesis H40 has been replaced by "true". (It is already present, as H21). --- Hypothesis H42 has been replaced by "true". (It is already present, as H23). --- Hypothesis H43 has been replaced by "true". (It is already present, as H24). --- Hypothesis H44 has been replaced by "true". (It is already present, as H25). --- Hypothesis H45 has been replaced by "true". (It is already present, as H26). --- Hypothesis H46 has been replaced by "true". (It is already present, as H27). --- Hypothesis H47 has been replaced by "true". (It is already present, as H28). --- Hypothesis H48 has been replaced by "true". (It is already present, as H29). --- Hypothesis H49 has been replaced by "true". (It is already present, as H30). --- Hypothesis H50 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H51 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H52 has been replaced by "true". (It is already present, as H10). --- Hypothesis H53 has been replaced by "true". (It is already present, as H11). --- Hypothesis H54 has been replaced by "true". (It is already present, as H1) . %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H63 on reading formula in, to give: %%% H63: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H81 has been replaced by "true". (It is already present, as H62). --- Hypothesis H82 has been replaced by "true". (It is already present, as H63). --- Hypothesis H83 has been replaced by "true". (It is already present, as H64). --- Hypothesis H84 has been replaced by "true". (It is already present, as H65). --- Hypothesis H85 has been replaced by "true". (It is already present, as H66). --- Hypothesis H86 has been replaced by "true". (It is already present, as H67). --- Hypothesis H89 has been replaced by "true". (It is already present, as H70). --- Hypothesis H90 has been replaced by "true". (It is already present, as H71). --- Hypothesis H92 has been replaced by "true". (It is already present, as H73). --- Hypothesis H93 has been replaced by "true". (It is already present, as H74). --- Hypothesis H94 has been replaced by "true". (It is already present, as H75). --- Hypothesis H95 has been replaced by "true". (It is already present, as H76). --- Hypothesis H96 has been replaced by "true". (It is already present, as H77). --- Hypothesis H97 has been replaced by "true". (It is already present, as H78). --- Hypothesis H98 has been replaced by "true". (It is already present, as H79). --- Hypothesis H99 has been replaced by "true". (It is already present, as H80). *** Proved C1: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first using hypothesis H57. *** Proved C2: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last using hypothesis H58. *** Proved C3: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first using hypothesis H60. *** Proved C4: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last using hypothesis H61. -S- Applied substitution rule skein_512_ha_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H2: data__index__subtype__1__last + 64 < natural__last New H56: data__index__subtype__1__last + 65 <= natural__last -S- Applied substitution rule skein_512_ha_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H14: fld_byte_count(fld_h(ctx__1)) >= 0 New H64: fld_byte_count(fld_h(ctx__2)) >= 0 -S- Applied substitution rule skein_512_ha_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H15: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H55: data__index__subtype__1__last < 2147483647 New H65: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H2: data__index__subtype__1__last < 2147483583 New H56: data__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_ha_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H20: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_ha_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H21: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H71: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 -S- Applied substitution rule skein_512_ha_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_ha_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H24: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 New H74: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 -S- Applied substitution rule skein_512_ha_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H3: for_all(i___1 : integer, data__index__subtype__1__first <= i___1 and i___1 <= data__index__subtype__1__last -> 0 <= element(data, [ i___1]) and element(data, [i___1]) <= spark__unsigned__byte__last) New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H62: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_ha_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H3: for_all(i___1 : integer, data__index__subtype__1__first <= i___1 and i___1 <= data__index__subtype__1__last -> 0 <= element(data, [ i___1]) and element(data, [i___1]) <= 255) New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H62: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_ha_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H25: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_ha_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H26: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 New H76: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 -S- Applied substitution rule skein_512_ha_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_ha_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 New H78: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_ha_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H63: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_ha_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H80: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H63: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_ha_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H16: fld_hash_bit_len(fld_h(ctx__1)) >= 0 New H66: fld_hash_bit_len(fld_h(ctx__2)) >= 0 -S- Applied substitution rule skein_512_ha_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H17: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H67: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_ha_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H4: true New H6: fld_hash_bit_len(fld_h(ctx__1)) >= 1 New H57: fld_hash_bit_len(fld_h(ctx__2)) >= 1 -S- Applied substitution rule skein_512_ha_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H5: true New H7: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H58: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_ha_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H63: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__2), [ i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_ha_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H63: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_ha_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H10: fld_byte_count(fld_h(ctx__1)) >= 0 New H60: fld_byte_count(fld_h(ctx__2)) >= 0 -S- Applied substitution rule skein_512_ha_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H11: fld_byte_count(fld_h(ctx__1)) <= 64 New H61: fld_byte_count(fld_h(ctx__2)) <= 64 -S- Applied substitution rule skein_512_ha_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H62: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__2), [ i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_ha_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H62: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_ha_rules(107). This was achieved by replacing all occurrences of skein_512_state_bytes_index__first by: 0. New C5: true -S- Applied substitution rule skein_512_ha_rules(108). This was achieved by replacing all occurrences of skein_512_state_bytes_index__last by: 63. New C6: (fld_hash_bit_len(fld_h(ctx__2)) + 7) div 8 <= 64 *** Proved C5: true --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H19 (true-hypothesis). --- Eliminated hypothesis H22 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H34 (true-hypothesis). --- Eliminated hypothesis H35 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H40 (true-hypothesis). --- Eliminated hypothesis H41 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H46 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H52 (true-hypothesis). --- Eliminated hypothesis H53 (true-hypothesis). --- Eliminated hypothesis H54 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H69 (true-hypothesis). --- Eliminated hypothesis H72 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H84 (true-hypothesis). --- Eliminated hypothesis H85 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H90 (true-hypothesis). --- Eliminated hypothesis H91 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H4 (true-hypothesis). --- Eliminated hypothesis H5 (true-hypothesis). --- Eliminated hypothesis H17 (duplicate of H7). --- Eliminated hypothesis H67 (duplicate of H58). --- Eliminated hypothesis H14 (duplicate of H10). --- Eliminated hypothesis H64 (duplicate of H60). --- Eliminated hypothesis H56 (duplicate of H2). --- Eliminated hypothesis H6 (redundant, given H8). --- Eliminated hypothesis H7 (redundant, given H8). --- Eliminated hypothesis H10 (redundant, given H9). --- Eliminated hypothesis H11 (redundant, given H9). --- Eliminated hypothesis H15 (redundant, given H9). --- Eliminated hypothesis H16 (redundant, given H8). --- Eliminated hypothesis H55 (redundant, given H2). --- Eliminated hypothesis H57 (redundant, given H8 & H59). --- Eliminated hypothesis H58 (redundant, given H8 & H59). --- Eliminated hypothesis H65 (redundant, given H61). --- Eliminated hypothesis H66 (redundant, given H8 & H59). -S- Substituted hypothesis H59. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__2)) by: fld_hash_bit_len(fld_h(ctx__1)). *** Proved C6: (fld_hash_bit_len(fld_h(ctx__1)) + 7) div 8 <= 64 using hypothesis H8. *** PROVED VC. @@@@@@@@@@ VC: function_skein_512_hash_4. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/0000755000175000017500000000000011753202331026177 5ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.siv0000644000175000017500000001257411712513676031572 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Update.Copy_Msg_To_B For path(s) from start to run-time check associated with statement of line 758: procedure_copy_msg_to_b_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 760: procedure_copy_msg_to_b_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 762: procedure_copy_msg_to_b_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 763: procedure_copy_msg_to_b_4. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 766: procedure_copy_msg_to_b_5. *** true . /* all conclusions proved */ For path(s) from assertion of line 768 to run-time check associated with statement of line 766: procedure_copy_msg_to_b_6. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) . H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) . H4: fld_byte_count(fld_h(ctx)) >= 0 . H5: fld_byte_count(fld_h(ctx)) <= 64 . H6: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 . H7: final_src <= msg__index__subtype__1__last . H8: msg_offset >= 0 . H9: msg_offset <= 2147483647 . H10: num_bytes >= 0 . H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H14: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H15: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H17: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H18: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H19: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H21: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H23: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H24: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1]) and element(msg, [i___1]) <= 255) . H25: fld_hash_bit_len(fld_h(ctx~)) > 0 . H26: msg__index__subtype__1__first = 0 . H27: msg_offset >= msg__index__subtype__1__first . H28: msg_offset <= msg__index__subtype__1__last . H29: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H30: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= 63 . H31: src >= 0 . H32: dst >= 0 . H33: final_dst >= 0 . H34: final_dst <= 63 . H35: final_src <= 2147483647 . H36: dst <= 62 . H37: src <= 2147483646 . H38: dst < final_dst . H39: src < final_src . H40: integer__size >= 0 . H41: natural__size >= 0 . H42: spark__unsigned__u6__size >= 0 . H43: spark__unsigned__u7__size >= 0 . H44: spark__unsigned__byte__size >= 0 . H45: spark__unsigned__u16__size >= 0 . H46: spark__unsigned__u32__size >= 0 . H47: spark__unsigned__u64__size >= 0 . H48: spark__crypto__word_count_t__size >= 0 . H49: hash_bit_length__size >= 0 . H50: skein_512_state_words_index__size >= 0 . H51: skein_512_block_bytes_count__size >= 0 . H52: skein_512_block_bytes_index__size >= 0 . H53: skein_512_context__size >= 0 . H54: msg__index__subtype__1__first <= msg__index__subtype__1__last . H55: context_header__size >= 0 . H56: msg__index__subtype__1__first >= 0 . H57: msg__index__subtype__1__last >= 0 . H58: msg__index__subtype__1__last <= 2147483647 . H59: msg__index__subtype__1__first <= 2147483647 . -> C1: element(msg, [src + 1]) >= 0 . C2: element(msg, [src + 1]) <= 255 . For path(s) from start to assertion of line 768: procedure_copy_msg_to_b_7. *** true . /* all conclusions proved */ For path(s) from assertion of line 768 to assertion of line 768: procedure_copy_msg_to_b_8. *** true . /* all conclusions proved */ For path(s) from assertion of line 768 to run-time check associated with statement of line 776: procedure_copy_msg_to_b_9. *** true . /* all conclusions proved */ For path(s) from assertion of line 768 to run-time check associated with statement of line 777: procedure_copy_msg_to_b_10. *** true . /* all conclusions proved */ For path(s) from assertion of line 768 to run-time check associated with statement of line 780: procedure_copy_msg_to_b_11. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_copy_msg_to_b_12. *** true . /* all conclusions proved */ For path(s) from assertion of line 768 to finish: procedure_copy_msg_to_b_13. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.vcg0000644000175000017500000017022411712513676031545 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Update.Copy_Msg_To_B For path(s) from start to run-time check associated with statement of line 758: procedure_copy_msg_to_b_1. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: msg__index__subtype__1__first = 0 . H3: msg_offset >= msg__index__subtype__1__first . H4: msg_offset <= msg__index__subtype__1__last . H5: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H7: msg_offset >= natural__first . H8: msg_offset <= natural__last . H9: num_bytes >= natural__first . H10: num_bytes <= natural__last . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H31: num_bytes > 0 . -> C1: msg_offset >= natural__first . C2: msg_offset <= natural__last . For path(s) from start to run-time check associated with statement of line 760: procedure_copy_msg_to_b_2. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: msg__index__subtype__1__first = 0 . H3: msg_offset >= msg__index__subtype__1__first . H4: msg_offset <= msg__index__subtype__1__last . H5: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H7: msg_offset >= natural__first . H8: msg_offset <= natural__last . H9: num_bytes >= natural__first . H10: num_bytes <= natural__last . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H31: num_bytes > 0 . H32: msg_offset >= natural__first . H33: msg_offset <= natural__last . -> C1: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . C2: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 762: procedure_copy_msg_to_b_3. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: msg__index__subtype__1__first = 0 . H3: msg_offset >= msg__index__subtype__1__first . H4: msg_offset <= msg__index__subtype__1__last . H5: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H7: msg_offset >= natural__first . H8: msg_offset <= natural__last . H9: num_bytes >= natural__first . H10: num_bytes <= natural__last . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H31: num_bytes > 0 . H32: msg_offset >= natural__first . H33: msg_offset <= natural__last . H34: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H35: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H36: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H37: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . -> C1: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) >= skein_512_block_bytes_index__first . C2: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . C3: num_bytes - 1 >= integer__base__first . C4: num_bytes - 1 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 763: procedure_copy_msg_to_b_4. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: msg__index__subtype__1__first = 0 . H3: msg_offset >= msg__index__subtype__1__first . H4: msg_offset <= msg__index__subtype__1__last . H5: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H7: msg_offset >= natural__first . H8: msg_offset <= natural__last . H9: num_bytes >= natural__first . H10: num_bytes <= natural__last . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H31: num_bytes > 0 . H32: msg_offset >= natural__first . H33: msg_offset <= natural__last . H34: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H35: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H36: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H37: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H38: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) >= skein_512_block_bytes_index__first . H39: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H40: num_bytes - 1 >= integer__base__first . H41: num_bytes - 1 <= integer__base__last . H42: msg_offset >= natural__first . H43: msg_offset <= natural__last . -> C1: msg_offset + (num_bytes - 1) >= natural__first . C2: msg_offset + (num_bytes - 1) <= natural__last . C3: num_bytes - 1 >= integer__base__first . C4: num_bytes - 1 <= integer__base__last . For path(s) from start to run-time check associated with statement of line 766: procedure_copy_msg_to_b_5. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: msg__index__subtype__1__first = 0 . H3: msg_offset >= msg__index__subtype__1__first . H4: msg_offset <= msg__index__subtype__1__last . H5: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H7: msg_offset >= natural__first . H8: msg_offset <= natural__last . H9: num_bytes >= natural__first . H10: num_bytes <= natural__last . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H31: num_bytes > 0 . H32: msg_offset >= natural__first . H33: msg_offset <= natural__last . H34: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H35: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H36: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H37: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H38: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) >= skein_512_block_bytes_index__first . H39: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H40: num_bytes - 1 >= integer__base__first . H41: num_bytes - 1 <= integer__base__last . H42: msg_offset >= natural__first . H43: msg_offset <= natural__last . H44: msg_offset + (num_bytes - 1) >= natural__first . H45: msg_offset + (num_bytes - 1) <= natural__last . H46: num_bytes - 1 >= integer__base__first . H47: num_bytes - 1 <= integer__base__last . H48: msg_offset >= natural__first . H49: msg_offset <= natural__last . -> C1: element(msg, [msg_offset]) >= spark__unsigned__byte__first . C2: element(msg, [msg_offset]) <= spark__unsigned__byte__last . C3: msg_offset >= msg__index__subtype__1__first . C4: msg_offset <= msg__index__subtype__1__last . C5: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . C6: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . For path(s) from assertion of line 768 to run-time check associated with statement of line 766: procedure_copy_msg_to_b_6. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last . H7: final_src <= msg__index__subtype__1__last . H8: msg_offset >= natural__first . H9: msg_offset <= natural__last . H10: num_bytes >= natural__first . H11: num_bytes <= natural__last . H12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H14: fld_byte_count(fld_h(ctx)) >= natural__first . H15: fld_byte_count(fld_h(ctx)) <= natural__last . H16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H18: true . H19: true . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H22: true . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H31: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H32: fld_hash_bit_len(fld_h(ctx~)) > 0 . H33: msg__index__subtype__1__first = 0 . H34: msg_offset >= msg__index__subtype__1__first . H35: msg_offset <= msg__index__subtype__1__last . H36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H38: src >= natural__first . H39: src <= natural__last . H40: dst >= skein_512_block_bytes_index__first . H41: dst <= skein_512_block_bytes_index__last . H42: final_dst >= skein_512_block_bytes_index__first . H43: final_dst <= skein_512_block_bytes_index__last . H44: final_src >= natural__first . H45: final_src <= natural__last . H46: not ((dst >= final_dst) or (src >= final_src)) . H47: dst >= skein_512_block_bytes_index__first . H48: dst <= skein_512_block_bytes_index__last . H49: dst + 1 >= skein_512_block_bytes_index__first . H50: dst + 1 <= skein_512_block_bytes_index__last . H51: src >= natural__first . H52: src <= natural__last . H53: src + 1 >= natural__first . H54: src + 1 <= natural__last . H55: src + 1 >= natural__first . H56: src + 1 <= natural__last . -> C1: element(msg, [src + 1]) >= spark__unsigned__byte__first . C2: element(msg, [src + 1]) <= spark__unsigned__byte__last . C3: src + 1 >= msg__index__subtype__1__first . C4: src + 1 <= msg__index__subtype__1__last . C5: dst + 1 >= skein_512_block_bytes_index__first . C6: dst + 1 <= skein_512_block_bytes_index__last . For path(s) from start to assertion of line 768: procedure_copy_msg_to_b_7. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: msg__index__subtype__1__first = 0 . H3: msg_offset >= msg__index__subtype__1__first . H4: msg_offset <= msg__index__subtype__1__last . H5: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H7: msg_offset >= natural__first . H8: msg_offset <= natural__last . H9: num_bytes >= natural__first . H10: num_bytes <= natural__last . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H31: num_bytes > 0 . H32: msg_offset >= natural__first . H33: msg_offset <= natural__last . H34: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H35: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H36: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H37: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . H38: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) >= skein_512_block_bytes_index__first . H39: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H40: num_bytes - 1 >= integer__base__first . H41: num_bytes - 1 <= integer__base__last . H42: msg_offset >= natural__first . H43: msg_offset <= natural__last . H44: msg_offset + (num_bytes - 1) >= natural__first . H45: msg_offset + (num_bytes - 1) <= natural__last . H46: num_bytes - 1 >= integer__base__first . H47: num_bytes - 1 <= integer__base__last . H48: msg_offset >= natural__first . H49: msg_offset <= natural__last . H50: element(msg, [msg_offset]) >= spark__unsigned__byte__first . H51: element(msg, [msg_offset]) <= spark__unsigned__byte__last . H52: msg_offset >= msg__index__subtype__1__first . H53: msg_offset <= msg__index__subtype__1__last . H54: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first . H55: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last . -> C1: fld_hash_bit_len(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) > 0 . C2: fld_hash_bit_len(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) = fld_hash_bit_len(fld_h(ctx)) . C3: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) = fld_byte_count(fld_h(ctx)) . C4: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) <= skein_512_block_bytes_count__last . C6: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) + num_bytes - 1 <= skein_512_block_bytes_index__last . C7: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . C8: msg_offset >= natural__first . C9: msg_offset <= natural__last . C10: num_bytes >= natural__first . C11: num_bytes <= natural__last . C12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( ctx, update(fld_b(ctx), [fld_byte_count(fld_h( ctx))], element(msg, [msg_offset])))), [i___2]) >= spark__unsigned__byte__first) and (element(fld_b(upf_b( ctx, update(fld_b(ctx), [fld_byte_count(fld_h( ctx))], element(msg, [msg_offset])))), [i___2]) <= spark__unsigned__byte__last))) . C13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( ctx, update(fld_b(ctx), [fld_byte_count(fld_h( ctx))], element(msg, [msg_offset])))), [i___1]) >= spark__unsigned__u64__first) and (element(fld_x(upf_b( ctx, update(fld_b(ctx), [fld_byte_count(fld_h( ctx))], element(msg, [msg_offset])))), [i___1]) <= spark__unsigned__u64__last))) . C14: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) >= natural__first . C15: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) <= natural__last . C16: fld_hash_bit_len(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) >= hash_bit_length__first . C17: fld_hash_bit_len(fld_h(upf_b(ctx, update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset]))))) <= hash_bit_length__last . C18: true . C19: true . C20: fld_field_type(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) >= spark__unsigned__u6__first . C21: fld_field_type(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) <= spark__unsigned__u6__last . C22: true . C23: fld_tree_level(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) >= spark__unsigned__u7__first . C24: fld_tree_level(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) <= spark__unsigned__u7__last . C25: fld_reserved(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) >= spark__unsigned__u16__first . C26: fld_reserved(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) <= spark__unsigned__u16__last . C27: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) >= spark__unsigned__u32__first . C28: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) <= spark__unsigned__u32__last . C29: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) >= spark__unsigned__u64__first . C30: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [fld_byte_count(fld_h(ctx))], element(msg, [ msg_offset])))))) <= spark__unsigned__u64__last . C31: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . C32: fld_hash_bit_len(fld_h(ctx)) > 0 . C33: msg__index__subtype__1__first = 0 . C34: msg_offset >= msg__index__subtype__1__first . C35: msg_offset <= msg__index__subtype__1__last . C36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . C37: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . For path(s) from assertion of line 768 to assertion of line 768: procedure_copy_msg_to_b_8. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last . H7: final_src <= msg__index__subtype__1__last . H8: msg_offset >= natural__first . H9: msg_offset <= natural__last . H10: num_bytes >= natural__first . H11: num_bytes <= natural__last . H12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H14: fld_byte_count(fld_h(ctx)) >= natural__first . H15: fld_byte_count(fld_h(ctx)) <= natural__last . H16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H18: true . H19: true . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H22: true . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H31: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H32: fld_hash_bit_len(fld_h(ctx~)) > 0 . H33: msg__index__subtype__1__first = 0 . H34: msg_offset >= msg__index__subtype__1__first . H35: msg_offset <= msg__index__subtype__1__last . H36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H38: src >= natural__first . H39: src <= natural__last . H40: dst >= skein_512_block_bytes_index__first . H41: dst <= skein_512_block_bytes_index__last . H42: final_dst >= skein_512_block_bytes_index__first . H43: final_dst <= skein_512_block_bytes_index__last . H44: final_src >= natural__first . H45: final_src <= natural__last . H46: not ((dst >= final_dst) or (src >= final_src)) . H47: dst >= skein_512_block_bytes_index__first . H48: dst <= skein_512_block_bytes_index__last . H49: dst + 1 >= skein_512_block_bytes_index__first . H50: dst + 1 <= skein_512_block_bytes_index__last . H51: src >= natural__first . H52: src <= natural__last . H53: src + 1 >= natural__first . H54: src + 1 <= natural__last . H55: src + 1 >= natural__first . H56: src + 1 <= natural__last . H57: element(msg, [src + 1]) >= spark__unsigned__byte__first . H58: element(msg, [src + 1]) <= spark__unsigned__byte__last . H59: src + 1 >= msg__index__subtype__1__first . H60: src + 1 <= msg__index__subtype__1__last . H61: dst + 1 >= skein_512_block_bytes_index__first . H62: dst + 1 <= skein_512_block_bytes_index__last . -> C1: fld_hash_bit_len(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) > 0 . C2: fld_hash_bit_len(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) = fld_hash_bit_len( fld_h(ctx~)) . C3: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) = fld_byte_count(fld_h( ctx~)) . C4: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) <= skein_512_block_bytes_count__last . C6: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) + num_bytes - 1 <= skein_512_block_bytes_index__last . C7: final_src <= msg__index__subtype__1__last . C8: msg_offset >= natural__first . C9: msg_offset <= natural__last . C10: num_bytes >= natural__first . C11: num_bytes <= natural__last . C12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( ctx, update(fld_b(ctx), [dst + 1], element(msg, [ src + 1])))), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(upf_b(ctx, update(fld_b(ctx), [dst + 1], element( msg, [src + 1])))), [i___2]) <= spark__unsigned__byte__last))) . C13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( ctx, update(fld_b(ctx), [dst + 1], element(msg, [ src + 1])))), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(upf_b(ctx, update(fld_b(ctx), [dst + 1], element( msg, [src + 1])))), [i___1]) <= spark__unsigned__u64__last))) . C14: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) >= natural__first . C15: fld_byte_count(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) <= natural__last . C16: fld_hash_bit_len(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) >= hash_bit_length__first . C17: fld_hash_bit_len(fld_h(upf_b(ctx, update(fld_b(ctx), [ dst + 1], element(msg, [src + 1]))))) <= hash_bit_length__last . C18: true . C19: true . C20: fld_field_type(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) >= spark__unsigned__u6__first . C21: fld_field_type(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) <= spark__unsigned__u6__last . C22: true . C23: fld_tree_level(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) >= spark__unsigned__u7__first . C24: fld_tree_level(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) <= spark__unsigned__u7__last . C25: fld_reserved(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) >= spark__unsigned__u16__first . C26: fld_reserved(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) <= spark__unsigned__u16__last . C27: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) >= spark__unsigned__u32__first . C28: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) <= spark__unsigned__u32__last . C29: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) >= spark__unsigned__u64__first . C30: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b(ctx, update(fld_b( ctx), [dst + 1], element(msg, [src + 1])))))) <= spark__unsigned__u64__last . C31: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . C32: fld_hash_bit_len(fld_h(ctx~)) > 0 . C33: msg__index__subtype__1__first = 0 . C34: msg_offset >= msg__index__subtype__1__first . C35: msg_offset <= msg__index__subtype__1__last . C36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . C37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . For path(s) from assertion of line 768 to run-time check associated with statement of line 776: procedure_copy_msg_to_b_9. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last . H7: final_src <= msg__index__subtype__1__last . H8: msg_offset >= natural__first . H9: msg_offset <= natural__last . H10: num_bytes >= natural__first . H11: num_bytes <= natural__last . H12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H14: fld_byte_count(fld_h(ctx)) >= natural__first . H15: fld_byte_count(fld_h(ctx)) <= natural__last . H16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H18: true . H19: true . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H22: true . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H31: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H32: fld_hash_bit_len(fld_h(ctx~)) > 0 . H33: msg__index__subtype__1__first = 0 . H34: msg_offset >= msg__index__subtype__1__first . H35: msg_offset <= msg__index__subtype__1__last . H36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H38: src >= natural__first . H39: src <= natural__last . H40: dst >= skein_512_block_bytes_index__first . H41: dst <= skein_512_block_bytes_index__last . H42: final_dst >= skein_512_block_bytes_index__first . H43: final_dst <= skein_512_block_bytes_index__last . H44: final_src >= natural__first . H45: final_src <= natural__last . H46: not ((dst >= final_dst) or (src >= final_src)) . H47: dst >= skein_512_block_bytes_index__first . H48: dst <= skein_512_block_bytes_index__last . -> C1: dst + 1 >= skein_512_block_bytes_index__first . C2: dst + 1 <= skein_512_block_bytes_index__last . For path(s) from assertion of line 768 to run-time check associated with statement of line 777: procedure_copy_msg_to_b_10. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last . H7: final_src <= msg__index__subtype__1__last . H8: msg_offset >= natural__first . H9: msg_offset <= natural__last . H10: num_bytes >= natural__first . H11: num_bytes <= natural__last . H12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H14: fld_byte_count(fld_h(ctx)) >= natural__first . H15: fld_byte_count(fld_h(ctx)) <= natural__last . H16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H18: true . H19: true . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H22: true . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H31: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H32: fld_hash_bit_len(fld_h(ctx~)) > 0 . H33: msg__index__subtype__1__first = 0 . H34: msg_offset >= msg__index__subtype__1__first . H35: msg_offset <= msg__index__subtype__1__last . H36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H38: src >= natural__first . H39: src <= natural__last . H40: dst >= skein_512_block_bytes_index__first . H41: dst <= skein_512_block_bytes_index__last . H42: final_dst >= skein_512_block_bytes_index__first . H43: final_dst <= skein_512_block_bytes_index__last . H44: final_src >= natural__first . H45: final_src <= natural__last . H46: not ((dst >= final_dst) or (src >= final_src)) . H47: dst >= skein_512_block_bytes_index__first . H48: dst <= skein_512_block_bytes_index__last . H49: dst + 1 >= skein_512_block_bytes_index__first . H50: dst + 1 <= skein_512_block_bytes_index__last . H51: src >= natural__first . H52: src <= natural__last . -> C1: src + 1 >= natural__first . C2: src + 1 <= natural__last . For path(s) from assertion of line 768 to run-time check associated with statement of line 780: procedure_copy_msg_to_b_11. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last . H7: final_src <= msg__index__subtype__1__last . H8: msg_offset >= natural__first . H9: msg_offset <= natural__last . H10: num_bytes >= natural__first . H11: num_bytes <= natural__last . H12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H14: fld_byte_count(fld_h(ctx)) >= natural__first . H15: fld_byte_count(fld_h(ctx)) <= natural__last . H16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H18: true . H19: true . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H22: true . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H31: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H32: fld_hash_bit_len(fld_h(ctx~)) > 0 . H33: msg__index__subtype__1__first = 0 . H34: msg_offset >= msg__index__subtype__1__first . H35: msg_offset <= msg__index__subtype__1__last . H36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H38: src >= natural__first . H39: src <= natural__last . H40: dst >= skein_512_block_bytes_index__first . H41: dst <= skein_512_block_bytes_index__last . H42: final_dst >= skein_512_block_bytes_index__first . H43: final_dst <= skein_512_block_bytes_index__last . H44: final_src >= natural__first . H45: final_src <= natural__last . H46: (dst >= final_dst) or (src >= final_src) . -> C1: fld_byte_count(fld_h(ctx)) + num_bytes >= natural__first . C2: fld_byte_count(fld_h(ctx)) + num_bytes <= natural__last . For path(s) from start to finish: procedure_copy_msg_to_b_12. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: msg__index__subtype__1__first = 0 . H3: msg_offset >= msg__index__subtype__1__first . H4: msg_offset <= msg__index__subtype__1__last . H5: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H7: msg_offset >= natural__first . H8: msg_offset <= natural__last . H9: num_bytes >= natural__first . H10: num_bytes <= natural__last . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H31: not (num_bytes > 0) . -> C1: fld_hash_bit_len(fld_h(ctx)) > 0 . C2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx)) . C3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx)) + num_bytes . C4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . For path(s) from assertion of line 768 to finish: procedure_copy_msg_to_b_13. H1: fld_hash_bit_len(fld_h(ctx)) > 0 . H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last . H7: final_src <= msg__index__subtype__1__last . H8: msg_offset >= natural__first . H9: msg_offset <= natural__last . H10: num_bytes >= natural__first . H11: num_bytes <= natural__last . H12: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H13: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H14: fld_byte_count(fld_h(ctx)) >= natural__first . H15: fld_byte_count(fld_h(ctx)) <= natural__last . H16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H18: true . H19: true . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H22: true . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H31: for_all(i___1: integer, ((i___1 >= msg__index__subtype__1__first) and (i___1 <= msg__index__subtype__1__last)) -> ((element(msg, [ i___1]) >= spark__unsigned__byte__first) and (element( msg, [i___1]) <= spark__unsigned__byte__last))) . H32: fld_hash_bit_len(fld_h(ctx~)) > 0 . H33: msg__index__subtype__1__first = 0 . H34: msg_offset >= msg__index__subtype__1__first . H35: msg_offset <= msg__index__subtype__1__last . H36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last . H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last . H38: src >= natural__first . H39: src <= natural__last . H40: dst >= skein_512_block_bytes_index__first . H41: dst <= skein_512_block_bytes_index__last . H42: final_dst >= skein_512_block_bytes_index__first . H43: final_dst <= skein_512_block_bytes_index__last . H44: final_src >= natural__first . H45: final_src <= natural__last . H46: (dst >= final_dst) or (src >= final_src) . H47: fld_byte_count(fld_h(ctx)) + num_bytes >= natural__first . H48: fld_byte_count(fld_h(ctx)) + num_bytes <= natural__last . -> C1: fld_hash_bit_len(fld_h(upf_h(ctx, upf_byte_count(fld_h( ctx), fld_byte_count(fld_h(ctx)) + num_bytes)))) > 0 . C2: fld_hash_bit_len(fld_h(upf_h(ctx, upf_byte_count(fld_h( ctx), fld_byte_count(fld_h(ctx)) + num_bytes)))) = fld_hash_bit_len(fld_h(ctx~)) . C3: fld_byte_count(fld_h(upf_h(ctx, upf_byte_count(fld_h( ctx), fld_byte_count(fld_h(ctx)) + num_bytes)))) = fld_byte_count(fld_h(ctx~)) + num_bytes . C4: fld_byte_count(fld_h(upf_h(ctx, upf_byte_count(fld_h( ctx), fld_byte_count(fld_h(ctx)) + num_bytes)))) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(upf_h(ctx, upf_byte_count(fld_h( ctx), fld_byte_count(fld_h(ctx)) + num_bytes)))) <= skein_512_block_bytes_count__last . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.vsm0000644000175000017500000000006211712765060031557 0ustar eugeneugencopy_msg_to_b,0,0,1,1,0,0,0,100.0, 0.0, 0.0, 0.0, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.fdl0000644000175000017500000002106411712513676031530 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Update.Copy_Msg_To_B} title procedure copy_msg_to_b; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_count = integer; type skein_512_block_bytes_index = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_block_bytes_count__base__first : integer = pending; const skein_512_block_bytes_count__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const msg__index__subtype__1__first : integer = pending; const msg__index__subtype__1__last : integer = pending; const skein_512_context__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_block_bytes_count__first : integer = pending; const skein_512_block_bytes_count__last : integer = pending; const skein_512_block_bytes_count__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ctx : skein_512_context; var msg : spark__crypto__byte_seq; var msg_offset : integer; var num_bytes : integer; var src : integer; var dst : integer; var final_dst : integer; var final_src : integer; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.rls0000644000175000017500000002357111712513676031570 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Update.Copy_Msg_To_B*/ rule_family copy_msg_to__rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. copy_msg_to__rules(1): integer__size >= 0 may_be_deduced. copy_msg_to__rules(2): integer__first may_be_replaced_by -2147483648. copy_msg_to__rules(3): integer__last may_be_replaced_by 2147483647. copy_msg_to__rules(4): integer__base__first may_be_replaced_by -2147483648. copy_msg_to__rules(5): integer__base__last may_be_replaced_by 2147483647. copy_msg_to__rules(6): natural__size >= 0 may_be_deduced. copy_msg_to__rules(7): natural__first may_be_replaced_by 0. copy_msg_to__rules(8): natural__last may_be_replaced_by 2147483647. copy_msg_to__rules(9): natural__base__first may_be_replaced_by -2147483648. copy_msg_to__rules(10): natural__base__last may_be_replaced_by 2147483647. copy_msg_to__rules(11): interfaces__unsigned_8__size >= 0 may_be_deduced. copy_msg_to__rules(12): interfaces__unsigned_8__size may_be_replaced_by 8. copy_msg_to__rules(13): interfaces__unsigned_8__first may_be_replaced_by 0. copy_msg_to__rules(14): interfaces__unsigned_8__last may_be_replaced_by 255. copy_msg_to__rules(15): interfaces__unsigned_8__base__first may_be_replaced_by 0. copy_msg_to__rules(16): interfaces__unsigned_8__base__last may_be_replaced_by 255. copy_msg_to__rules(17): interfaces__unsigned_8__modulus may_be_replaced_by 256. copy_msg_to__rules(18): interfaces__unsigned_16__size >= 0 may_be_deduced. copy_msg_to__rules(19): interfaces__unsigned_16__size may_be_replaced_by 16. copy_msg_to__rules(20): interfaces__unsigned_16__first may_be_replaced_by 0. copy_msg_to__rules(21): interfaces__unsigned_16__last may_be_replaced_by 65535. copy_msg_to__rules(22): interfaces__unsigned_16__base__first may_be_replaced_by 0. copy_msg_to__rules(23): interfaces__unsigned_16__base__last may_be_replaced_by 65535. copy_msg_to__rules(24): interfaces__unsigned_16__modulus may_be_replaced_by 65536. copy_msg_to__rules(25): interfaces__unsigned_32__size >= 0 may_be_deduced. copy_msg_to__rules(26): interfaces__unsigned_32__size may_be_replaced_by 32. copy_msg_to__rules(27): interfaces__unsigned_32__first may_be_replaced_by 0. copy_msg_to__rules(28): interfaces__unsigned_32__last may_be_replaced_by 4294967295. copy_msg_to__rules(29): interfaces__unsigned_32__base__first may_be_replaced_by 0. copy_msg_to__rules(30): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. copy_msg_to__rules(31): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. copy_msg_to__rules(32): interfaces__unsigned_64__size >= 0 may_be_deduced. copy_msg_to__rules(33): interfaces__unsigned_64__size may_be_replaced_by 64. copy_msg_to__rules(34): interfaces__unsigned_64__first may_be_replaced_by 0. copy_msg_to__rules(35): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. copy_msg_to__rules(36): interfaces__unsigned_64__base__first may_be_replaced_by 0. copy_msg_to__rules(37): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. copy_msg_to__rules(38): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. copy_msg_to__rules(39): spark__unsigned__u6__size >= 0 may_be_deduced. copy_msg_to__rules(40): spark__unsigned__u6__first may_be_replaced_by 0. copy_msg_to__rules(41): spark__unsigned__u6__last may_be_replaced_by 63. copy_msg_to__rules(42): spark__unsigned__u6__base__first may_be_replaced_by 0. copy_msg_to__rules(43): spark__unsigned__u6__base__last may_be_replaced_by 63. copy_msg_to__rules(44): spark__unsigned__u6__modulus may_be_replaced_by 64. copy_msg_to__rules(45): spark__unsigned__u7__size >= 0 may_be_deduced. copy_msg_to__rules(46): spark__unsigned__u7__first may_be_replaced_by 0. copy_msg_to__rules(47): spark__unsigned__u7__last may_be_replaced_by 127. copy_msg_to__rules(48): spark__unsigned__u7__base__first may_be_replaced_by 0. copy_msg_to__rules(49): spark__unsigned__u7__base__last may_be_replaced_by 127. copy_msg_to__rules(50): spark__unsigned__u7__modulus may_be_replaced_by 128. copy_msg_to__rules(51): spark__unsigned__byte__size >= 0 may_be_deduced. copy_msg_to__rules(52): spark__unsigned__byte__first may_be_replaced_by 0. copy_msg_to__rules(53): spark__unsigned__byte__last may_be_replaced_by 255. copy_msg_to__rules(54): spark__unsigned__byte__base__first may_be_replaced_by 0. copy_msg_to__rules(55): spark__unsigned__byte__base__last may_be_replaced_by 255. copy_msg_to__rules(56): spark__unsigned__byte__modulus may_be_replaced_by 256. copy_msg_to__rules(57): spark__unsigned__u16__size >= 0 may_be_deduced. copy_msg_to__rules(58): spark__unsigned__u16__first may_be_replaced_by 0. copy_msg_to__rules(59): spark__unsigned__u16__last may_be_replaced_by 65535. copy_msg_to__rules(60): spark__unsigned__u16__base__first may_be_replaced_by 0. copy_msg_to__rules(61): spark__unsigned__u16__base__last may_be_replaced_by 65535. copy_msg_to__rules(62): spark__unsigned__u16__modulus may_be_replaced_by 65536. copy_msg_to__rules(63): spark__unsigned__u32__size >= 0 may_be_deduced. copy_msg_to__rules(64): spark__unsigned__u32__first may_be_replaced_by 0. copy_msg_to__rules(65): spark__unsigned__u32__last may_be_replaced_by 4294967295. copy_msg_to__rules(66): spark__unsigned__u32__base__first may_be_replaced_by 0. copy_msg_to__rules(67): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. copy_msg_to__rules(68): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. copy_msg_to__rules(69): spark__unsigned__u64__size >= 0 may_be_deduced. copy_msg_to__rules(70): spark__unsigned__u64__first may_be_replaced_by 0. copy_msg_to__rules(71): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. copy_msg_to__rules(72): spark__unsigned__u64__base__first may_be_replaced_by 0. copy_msg_to__rules(73): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. copy_msg_to__rules(74): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. copy_msg_to__rules(75): spark__crypto__word_count_t__size >= 0 may_be_deduced. copy_msg_to__rules(76): spark__crypto__word_count_t__first may_be_replaced_by 0. copy_msg_to__rules(77): spark__crypto__word_count_t__last may_be_replaced_by 268435455. copy_msg_to__rules(78): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. copy_msg_to__rules(79): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. copy_msg_to__rules(80): hash_bit_length__size >= 0 may_be_deduced. copy_msg_to__rules(81): hash_bit_length__first may_be_replaced_by 0. copy_msg_to__rules(82): hash_bit_length__last may_be_replaced_by 2147483640. copy_msg_to__rules(83): hash_bit_length__base__first may_be_replaced_by -2147483648. copy_msg_to__rules(84): hash_bit_length__base__last may_be_replaced_by 2147483647. copy_msg_to__rules(85): skein_512_state_words_index__size >= 0 may_be_deduced. copy_msg_to__rules(86): skein_512_state_words_index__first may_be_replaced_by 0. copy_msg_to__rules(87): skein_512_state_words_index__last may_be_replaced_by 7. copy_msg_to__rules(88): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. copy_msg_to__rules(89): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. copy_msg_to__rules(90): skein_512_block_bytes_count__size >= 0 may_be_deduced. copy_msg_to__rules(91): skein_512_block_bytes_count__first may_be_replaced_by 0. copy_msg_to__rules(92): skein_512_block_bytes_count__last may_be_replaced_by 64. copy_msg_to__rules(93): skein_512_block_bytes_count__base__first may_be_replaced_by -2147483648. copy_msg_to__rules(94): skein_512_block_bytes_count__base__last may_be_replaced_by 2147483647. copy_msg_to__rules(95): skein_512_block_bytes_index__size >= 0 may_be_deduced. copy_msg_to__rules(96): skein_512_block_bytes_index__first may_be_replaced_by 0. copy_msg_to__rules(97): skein_512_block_bytes_index__last may_be_replaced_by 63. copy_msg_to__rules(98): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. copy_msg_to__rules(99): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. copy_msg_to__rules(100): skein_512_context__size >= 0 may_be_deduced. copy_msg_to__rules(101): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. copy_msg_to__rules(102): msg__index__subtype__1__first >= natural__first may_be_deduced. copy_msg_to__rules(103): msg__index__subtype__1__last <= natural__last may_be_deduced. copy_msg_to__rules(104): msg__index__subtype__1__first <= msg__index__subtype__1__last may_be_deduced. copy_msg_to__rules(105): msg__index__subtype__1__last >= natural__first may_be_deduced. copy_msg_to__rules(106): msg__index__subtype__1__first <= natural__last may_be_deduced. copy_msg_to__rules(107): tweak_value__size >= 0 may_be_deduced. copy_msg_to__rules(108): tweak_value__size may_be_replaced_by 128. copy_msg_to__rules(109): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. copy_msg_to__rules(110): context_header__size >= 0 may_be_deduced. copy_msg_to__rules(111): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.slg0000644000175000017500000035762011712513676031562 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Update.Copy_Msg_To_B @@@@@@@@@@ VC: procedure_copy_msg_to_b_1. @@@@@@@@@@ %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) *** Proved C1: msg_offset >= natural__first using hypothesis H7. *** Proved C2: msg_offset <= natural__last using hypothesis H8. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_2. @@@@@@@@@@ %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H33 has been replaced by "true". (It is already present, as H8) . -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New C1: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= 63 New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New C2: fld_byte_count(fld_h(ctx)) <= 63 -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H7: msg_offset >= 0 New H9: num_bytes >= 0 New H13: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: msg_offset <= 2147483647 New H10: num_bytes <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) *** Proved C1: fld_byte_count(fld_h(ctx)) >= 0 using hypothesis H13. --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H9 (redundant, given H31). --- Eliminated hypothesis H10 (redundant, given H6 & H13). --- Eliminated hypothesis H14 (redundant, given H6 & H31). --- Eliminated hypothesis H15 (redundant, given H1). *** Proved C2: fld_byte_count(fld_h(ctx)) <= 63 via its standard form, which is: Std.Fm C2: - fld_byte_count(fld_h(ctx)) > - 64 using hypotheses H6 & H31. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_3. @@@@@@@@@@ %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H33 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H36 has been replaced by "true". (It is already present, as H34). --- Hypothesis H37 has been replaced by "true". (It is already present, as H35). *** Proved C2: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last using hypothesis H6. -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H34: fld_byte_count(fld_h(ctx)) >= 0 New C1: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) >= 0 -S- Applied substitution rule copy_msg_to__rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C3: num_bytes >= - 2147483647 -S- Applied substitution rule copy_msg_to__rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C4: num_bytes <= 2147483648 *** Proved C1: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) >= 0 using hypotheses H31 & H34. *** Proved C3: num_bytes >= - 2147483647 using hypothesis H31. -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H7: msg_offset >= 0 New H9: num_bytes >= 0 New H13: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: msg_offset <= 2147483647 New H10: num_bytes <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= 63 New H35: fld_byte_count(fld_h(ctx)) <= 63 New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C4: num_bytes <= 2147483648 using hypothesis H10. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_4. @@@@@@@@@@ %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H33 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H36 has been replaced by "true". (It is already present, as H34). --- Hypothesis H37 has been replaced by "true". (It is already present, as H35). --- Hypothesis H39 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H42 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H43 has been replaced by "true". (It is already present, as H8) . *** Proved C3: num_bytes - 1 >= integer__base__first using hypothesis H40. *** Proved C4: num_bytes - 1 <= integer__base__last using hypothesis H41. -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H7: msg_offset >= 0 New H9: num_bytes >= 0 New H13: fld_byte_count(fld_h(ctx)) >= 0 New C1: msg_offset + (num_bytes - 1) >= 0 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: msg_offset <= 2147483647 New H10: num_bytes <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 New C2: msg_offset + (num_bytes - 1) <= 2147483647 *** Proved C1: msg_offset + (num_bytes - 1) >= 0 using hypotheses H7 & H31. -S- Applied substitution rule copy_msg_to__rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H40: num_bytes >= - 2147483647 -S- Applied substitution rule copy_msg_to__rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H41: num_bytes <= 2147483648 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H34: fld_byte_count(fld_h(ctx)) >= 0 New H38: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) >= 0 New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= 63 New H35: fld_byte_count(fld_h(ctx)) <= 63 New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H36 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H34 (duplicate of H13). --- Eliminated hypothesis H9 (redundant, given H31). --- Eliminated hypothesis H10 (redundant, given H6 & H13). --- Eliminated hypothesis H14 (redundant, given H35). --- Eliminated hypothesis H15 (redundant, given H1). --- Eliminated hypothesis H40 (redundant, given H31). --- Eliminated hypothesis H41 (redundant, given H6 & H13). *** Proved C2: msg_offset + (num_bytes - 1) <= 2147483647 using hypothesis H5. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_5. @@@@@@@@@@ %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H33 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H36 has been replaced by "true". (It is already present, as H34). --- Hypothesis H37 has been replaced by "true". (It is already present, as H35). --- Hypothesis H39 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H42 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H43 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H46 has been replaced by "true". (It is already present, as H40). --- Hypothesis H47 has been replaced by "true". (It is already present, as H41). --- Hypothesis H48 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H49 has been replaced by "true". (It is already present, as H8) . *** Proved C1: element(msg, [msg_offset]) >= spark__unsigned__byte__first using hypotheses H3, H4 & H30. *** Proved C2: element(msg, [msg_offset]) <= spark__unsigned__byte__last using hypotheses H3, H4 & H30. *** Proved C3: msg_offset >= msg__index__subtype__1__first using hypothesis H3. *** Proved C4: msg_offset <= msg__index__subtype__1__last using hypothesis H4. *** Proved C5: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first using hypothesis H34. *** Proved C6: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last using hypothesis H35. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_6. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H3 on reading formula in, to give: %%% H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H13 on reading formula in, to give: %%% H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H32 on reading formula in, to give: %%% H32: fld_hash_bit_len(fld_h(ctx~)) > 0 %%% Simplified H37 on reading formula in, to give: %%% H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last --- Hypothesis H47 has been replaced by "true". (It is already present, as H40). --- Hypothesis H48 has been replaced by "true". (It is already present, as H41). --- Hypothesis H51 has been replaced by "true". (It is already present, as H38). --- Hypothesis H52 has been replaced by "true". (It is already present, as H39). --- Hypothesis H55 has been replaced by "true". (It is already present, as H53). --- Hypothesis H56 has been replaced by "true". (It is already present, as H54). *** Proved C5: dst + 1 >= skein_512_block_bytes_index__first using hypothesis H49. *** Proved C6: dst + 1 <= skein_512_block_bytes_index__last using hypothesis H50. -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New C1: element(msg, [src + 1]) >= 0 -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New C2: element(msg, [src + 1]) <= 255 >>> Restructured hypothesis H46 into: >>> H46: dst < final_dst and src < final_src >>> Hypothesis H46 has now been split into two, giving: >>> H57: dst < final_dst >>> H58: src < final_src -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H8: msg_offset >= 0 New H10: num_bytes >= 0 New H14: fld_byte_count(fld_h(ctx)) >= 0 New H38: src >= 0 New H44: final_src >= 0 New H53: src >= - 1 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H9: msg_offset <= 2147483647 New H11: num_bytes <= 2147483647 New H15: fld_byte_count(fld_h(ctx)) <= 2147483647 New H39: src <= 2147483647 New H45: final_src <= 2147483647 New H54: src <= 2147483646 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H16: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H17: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(91). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H5: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H40: dst >= 0 New H42: final_dst >= 0 New H49: dst >= - 1 -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 New H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= 63 New H41: dst <= 63 New H43: final_dst <= 63 New H50: dst <= 62 New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C3: src + 1 >= msg__index__subtype__1__first using hypotheses H33 & H38. *** Proved C4: src + 1 <= msg__index__subtype__1__last via its standard form, which is: Std.Fm C4: msg__index__subtype__1__last - src > 0 using hypotheses H7 & H58. --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H19 (true-hypothesis). --- Eliminated hypothesis H22 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H52 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H14 (duplicate of H4). --- Eliminated hypothesis H11 (redundant, given H4 & H6). --- Eliminated hypothesis H15 (redundant, given H5). --- Eliminated hypothesis H16 (redundant, given H1). --- Eliminated hypothesis H39 (redundant, given H54). --- Eliminated hypothesis H41 (redundant, given H50). --- Eliminated hypothesis H44 (redundant, given H38 & H58). --- Eliminated hypothesis H49 (redundant, given H40). --- Eliminated hypothesis H53 (redundant, given H38). +++ New H59: integer__size >= 0 +++ New H60: natural__size >= 0 +++ New H61: spark__unsigned__u6__size >= 0 +++ New H62: spark__unsigned__u7__size >= 0 +++ New H63: spark__unsigned__byte__size >= 0 +++ New H64: spark__unsigned__u16__size >= 0 +++ New H65: spark__unsigned__u32__size >= 0 +++ New H66: spark__unsigned__u64__size >= 0 +++ New H67: spark__crypto__word_count_t__size >= 0 +++ New H68: hash_bit_length__size >= 0 +++ New H69: skein_512_state_words_index__size >= 0 +++ New H70: skein_512_block_bytes_count__size >= 0 +++ New H71: skein_512_block_bytes_index__size >= 0 +++ New H72: skein_512_context__size >= 0 +++ New H73: msg__index__subtype__1__first <= msg__index__subtype__1__last +++ New H74: context_header__size >= 0 +++ New H75: msg__index__subtype__1__first >= 0 +++ New H76: msg__index__subtype__1__last >= 0 +++ New H77: msg__index__subtype__1__last <= 2147483647 +++ New H78: msg__index__subtype__1__first <= 2147483647 @@@@@@@@@@ VC: procedure_copy_msg_to_b_7. @@@@@@@@@@ %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H33 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H36 has been replaced by "true". (It is already present, as H34). --- Hypothesis H37 has been replaced by "true". (It is already present, as H35). --- Hypothesis H39 has been replaced by "true". (It is already present, as H6) . --- Hypothesis H42 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H43 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H46 has been replaced by "true". (It is already present, as H40). --- Hypothesis H47 has been replaced by "true". (It is already present, as H41). --- Hypothesis H48 has been replaced by "true". (It is already present, as H7) . --- Hypothesis H49 has been replaced by "true". (It is already present, as H8) . --- Hypothesis H52 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H53 has been replaced by "true". (It is already present, as H4) . --- Hypothesis H54 has been replaced by "true". (It is already present, as H34). --- Hypothesis H55 has been replaced by "true". (It is already present, as H35). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified C2 on reading formula in, to give: %%% C2: true %%% Simplified C3 on reading formula in, to give: %%% C3: true %%% Simplified C4 on reading formula in, to give: %%% C4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified C5 on reading formula in, to give: %%% C5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified C6 on reading formula in, to give: %%% C6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last %%% Simplified C12 on reading formula in, to give: %%% C12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [msg_offset])), [i___2]) and element(update(fld_b(ctx), [fld_byte_count(fld_h(ctx))], element( msg, [msg_offset])), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C13 on reading formula in, to give: %%% C13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C14 on reading formula in, to give: %%% C14: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified C15 on reading formula in, to give: %%% C15: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified C16 on reading formula in, to give: %%% C16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified C17 on reading formula in, to give: %%% C17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified C20 on reading formula in, to give: %%% C20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified C21 on reading formula in, to give: %%% C21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified C23 on reading formula in, to give: %%% C23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified C24 on reading formula in, to give: %%% C24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified C25 on reading formula in, to give: %%% C25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified C26 on reading formula in, to give: %%% C26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified C27 on reading formula in, to give: %%% C27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified C28 on reading formula in, to give: %%% C28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified C29 on reading formula in, to give: %%% C29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified C30 on reading formula in, to give: %%% C30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified C31 on reading formula in, to give: %%% C31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) *** Proved C1: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** Proved C2: true *** Proved C3: true *** Proved C7: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last using hypothesis H5. *** Proved C8: msg_offset >= natural__first using hypothesis H7. *** Proved C9: msg_offset <= natural__last using hypothesis H8. *** Proved C10: num_bytes >= natural__first using hypothesis H9. *** Proved C11: num_bytes <= natural__last using hypothesis H10. *** Proved C12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(update(fld_b(ctx), [ fld_byte_count(fld_h(ctx))], element(msg, [msg_offset])), [i___2]) and element(update(fld_b(ctx), [fld_byte_count(fld_h(ctx))], element( msg, [msg_offset])), [i___2]) <= spark__unsigned__byte__last) using hypotheses H11, H34, H35, H50 & H51. *** Proved C13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H12. *** Proved C14: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H13. *** Proved C15: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H14. *** Proved C16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H15. *** Proved C17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H16. *** Proved C18: true *** Proved C19: true *** Proved C20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H19. *** Proved C21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H20. *** Proved C22: true *** Proved C23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H22. *** Proved C24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H23. *** Proved C25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H24. *** Proved C26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H25. *** Proved C27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H26. *** Proved C28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H27. *** Proved C29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H28. *** Proved C30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H29. *** Proved C31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) using hypothesis H30. *** Proved C32: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** Proved C33: msg__index__subtype__1__first = 0 using hypothesis H2. *** Proved C34: msg_offset >= msg__index__subtype__1__first using hypothesis H3. *** Proved C35: msg_offset <= msg__index__subtype__1__last using hypothesis H4. *** Proved C36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last using hypothesis H5. *** Proved C37: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= skein_512_block_bytes_index__last using hypothesis H6. -S- Applied substitution rule copy_msg_to__rules(91). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New C4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New C5: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= 63 New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H35: fld_byte_count(fld_h(ctx)) <= 63 New C6: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 *** Proved C5: fld_byte_count(fld_h(ctx)) <= 64 using hypothesis H35. -S- Applied substitution rule copy_msg_to__rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H40: num_bytes >= - 2147483647 -S- Applied substitution rule copy_msg_to__rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H41: num_bytes <= 2147483648 -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H7: msg_offset >= 0 New H9: num_bytes >= 0 New H13: fld_byte_count(fld_h(ctx)) >= 0 New H44: msg_offset + (num_bytes - 1) >= 0 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: msg_offset <= 2147483647 New H10: num_bytes <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 New H45: msg_offset + (num_bytes - 1) <= 2147483647 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H50: element(msg, [msg_offset]) >= 0 -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H51: element(msg, [msg_offset]) <= 255 New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H34: fld_byte_count(fld_h(ctx)) >= 0 New H38: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) >= 0 New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C4: fld_byte_count(fld_h(ctx)) >= 0 using hypothesis H13. *** Proved C6: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 via its standard form, which is: Std.Fm C6: - num_bytes - fld_byte_count(fld_h(ctx)) > - 65 using hypothesis H6. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_8. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H3 on reading formula in, to give: %%% H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H13 on reading formula in, to give: %%% H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H32 on reading formula in, to give: %%% H32: fld_hash_bit_len(fld_h(ctx~)) > 0 %%% Simplified H37 on reading formula in, to give: %%% H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last --- Hypothesis H47 has been replaced by "true". (It is already present, as H40). --- Hypothesis H48 has been replaced by "true". (It is already present, as H41). --- Hypothesis H51 has been replaced by "true". (It is already present, as H38). --- Hypothesis H52 has been replaced by "true". (It is already present, as H39). --- Hypothesis H55 has been replaced by "true". (It is already present, as H53). --- Hypothesis H56 has been replaced by "true". (It is already present, as H54). --- Hypothesis H61 has been replaced by "true". (It is already present, as H49). --- Hypothesis H62 has been replaced by "true". (It is already present, as H50). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified C2 on reading formula in, to give: %%% C2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified C3 on reading formula in, to give: %%% C3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified C4 on reading formula in, to give: %%% C4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first %%% Simplified C5 on reading formula in, to give: %%% C5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last %%% Simplified C6 on reading formula in, to give: %%% C6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last %%% Simplified C12 on reading formula in, to give: %%% C12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(update(fld_b(ctx), [dst + 1], element(msg, [src + 1])), [i___2]) and element(update(fld_b(ctx), [ dst + 1], element(msg, [src + 1])), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C13 on reading formula in, to give: %%% C13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C14 on reading formula in, to give: %%% C14: fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified C15 on reading formula in, to give: %%% C15: fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified C16 on reading formula in, to give: %%% C16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first %%% Simplified C17 on reading formula in, to give: %%% C17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last %%% Simplified C20 on reading formula in, to give: %%% C20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first %%% Simplified C21 on reading formula in, to give: %%% C21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last %%% Simplified C23 on reading formula in, to give: %%% C23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first %%% Simplified C24 on reading formula in, to give: %%% C24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last %%% Simplified C25 on reading formula in, to give: %%% C25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first %%% Simplified C26 on reading formula in, to give: %%% C26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last %%% Simplified C27 on reading formula in, to give: %%% C27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first %%% Simplified C28 on reading formula in, to give: %%% C28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last %%% Simplified C29 on reading formula in, to give: %%% C29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first %%% Simplified C30 on reading formula in, to give: %%% C30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last %%% Simplified C31 on reading formula in, to give: %%% C31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified C32 on reading formula in, to give: %%% C32: fld_hash_bit_len(fld_h(ctx~)) > 0 %%% Simplified C37 on reading formula in, to give: %%% C37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last *** Proved C1: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** Proved C2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) using hypothesis H2. *** Proved C3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) using hypothesis H3. *** Proved C4: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first using hypothesis H4. *** Proved C5: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last using hypothesis H5. *** Proved C6: fld_byte_count(fld_h(ctx)) + num_bytes - 1 <= skein_512_block_bytes_index__last using hypothesis H6. *** Proved C7: final_src <= msg__index__subtype__1__last using hypothesis H7. *** Proved C8: msg_offset >= natural__first using hypothesis H8. *** Proved C9: msg_offset <= natural__last using hypothesis H9. *** Proved C10: num_bytes >= natural__first using hypothesis H10. *** Proved C11: num_bytes <= natural__last using hypothesis H11. *** Proved C12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(update(fld_b(ctx), [dst + 1], element(msg, [src + 1])), [i___2]) and element(update(fld_b(ctx), [ dst + 1], element(msg, [src + 1])), [i___2]) <= spark__unsigned__byte__last) using hypotheses H12, H49, H50, H57 & H58. *** Proved C13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H13. *** Proved C14: fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H14. *** Proved C15: fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H15. *** Proved C16: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first using hypothesis H16. *** Proved C17: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last using hypothesis H17. *** Proved C18: true *** Proved C19: true *** Proved C20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first using hypothesis H20. *** Proved C21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last using hypothesis H21. *** Proved C22: true *** Proved C23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first using hypothesis H23. *** Proved C24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last using hypothesis H24. *** Proved C25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first using hypothesis H25. *** Proved C26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last using hypothesis H26. *** Proved C27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first using hypothesis H27. *** Proved C28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last using hypothesis H28. *** Proved C29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first using hypothesis H29. *** Proved C30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last using hypothesis H30. *** Proved C31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) using hypothesis H31. *** Proved C32: fld_hash_bit_len(fld_h(ctx~)) > 0 using hypothesis H32. *** Proved C33: msg__index__subtype__1__first = 0 using hypothesis H33. *** Proved C34: msg_offset >= msg__index__subtype__1__first using hypothesis H34. *** Proved C35: msg_offset <= msg__index__subtype__1__last using hypothesis H35. *** Proved C36: msg_offset + (num_bytes - 1) <= msg__index__subtype__1__last using hypothesis H36. *** Proved C37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last using hypothesis H37. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_9. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H3 on reading formula in, to give: %%% H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H13 on reading formula in, to give: %%% H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H32 on reading formula in, to give: %%% H32: fld_hash_bit_len(fld_h(ctx~)) > 0 %%% Simplified H37 on reading formula in, to give: %%% H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last --- Hypothesis H47 has been replaced by "true". (It is already present, as H40). --- Hypothesis H48 has been replaced by "true". (It is already present, as H41). *** Proved C1: dst + 1 >= skein_512_block_bytes_index__first using hypothesis H40. -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= 63 New H41: dst <= 63 New H43: final_dst <= 63 New C2: dst <= 62 >>> Restructured hypothesis H46 into: >>> H46: dst < final_dst and src < final_src >>> Hypothesis H46 has now been split into two, giving: >>> H49: dst < final_dst >>> H50: src < final_src -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H8: msg_offset >= 0 New H10: num_bytes >= 0 New H14: fld_byte_count(fld_h(ctx)) >= 0 New H38: src >= 0 New H44: final_src >= 0 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H9: msg_offset <= 2147483647 New H11: num_bytes <= 2147483647 New H15: fld_byte_count(fld_h(ctx)) <= 2147483647 New H39: src <= 2147483647 New H45: final_src <= 2147483647 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H16: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H17: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(91). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H5: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H40: dst >= 0 New H42: final_dst >= 0 New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C2: dst <= 62 using hypotheses H43 & H49. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_10. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H3 on reading formula in, to give: %%% H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H13 on reading formula in, to give: %%% H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H32 on reading formula in, to give: %%% H32: fld_hash_bit_len(fld_h(ctx~)) > 0 %%% Simplified H37 on reading formula in, to give: %%% H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last --- Hypothesis H47 has been replaced by "true". (It is already present, as H40). --- Hypothesis H48 has been replaced by "true". (It is already present, as H41). --- Hypothesis H51 has been replaced by "true". (It is already present, as H38). --- Hypothesis H52 has been replaced by "true". (It is already present, as H39). *** Proved C1: src + 1 >= natural__first using hypothesis H38. -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H9: msg_offset <= 2147483647 New H11: num_bytes <= 2147483647 New H15: fld_byte_count(fld_h(ctx)) <= 2147483647 New H39: src <= 2147483647 New H45: final_src <= 2147483647 New C2: src <= 2147483646 >>> Restructured hypothesis H46 into: >>> H46: dst < final_dst and src < final_src >>> Hypothesis H46 has now been split into two, giving: >>> H53: dst < final_dst >>> H54: src < final_src -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H8: msg_offset >= 0 New H10: num_bytes >= 0 New H14: fld_byte_count(fld_h(ctx)) >= 0 New H38: src >= 0 New H44: final_src >= 0 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H16: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H17: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(91). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H5: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H40: dst >= 0 New H42: final_dst >= 0 New H49: dst >= - 1 New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 New H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= 63 New H41: dst <= 63 New H43: final_dst <= 63 New H50: dst <= 62 New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C2: src <= 2147483646 using hypotheses H45 & H54. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_11. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H3 on reading formula in, to give: %%% H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H13 on reading formula in, to give: %%% H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H32 on reading formula in, to give: %%% H32: fld_hash_bit_len(fld_h(ctx~)) > 0 %%% Simplified H37 on reading formula in, to give: %%% H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H8: msg_offset >= 0 New H10: num_bytes >= 0 New H14: fld_byte_count(fld_h(ctx)) >= 0 New H38: src >= 0 New H44: final_src >= 0 New C1: fld_byte_count(fld_h(ctx)) + num_bytes >= 0 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H9: msg_offset <= 2147483647 New H11: num_bytes <= 2147483647 New H15: fld_byte_count(fld_h(ctx)) <= 2147483647 New H39: src <= 2147483647 New H45: final_src <= 2147483647 New C2: fld_byte_count(fld_h(ctx)) + num_bytes <= 2147483647 *** Proved C1: fld_byte_count(fld_h(ctx)) + num_bytes >= 0 using hypotheses H10 & H14. -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H16: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H17: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(91). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H5: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H40: dst >= 0 New H42: final_dst >= 0 New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 New H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= 63 New H41: dst <= 63 New H43: final_dst <= 63 New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C2: fld_byte_count(fld_h(ctx)) + num_bytes <= 2147483647 using hypothesis H6. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_12. @@@@@@@@@@ %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H30 on reading formula in, to give: %%% H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified C2 on reading formula in, to give: %%% C2: true *** Proved C1: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** Proved C2: true -S- Applied substitution rule copy_msg_to__rules(91). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New C4: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New C5: fld_byte_count(fld_h(ctx)) <= 64 >>> Restructured hypothesis H31 into: >>> H31: num_bytes <= 0 -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H7: msg_offset >= 0 New H9: num_bytes >= 0 New H13: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: msg_offset <= 2147483647 New H10: num_bytes <= 2147483647 New H14: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H11: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H30: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H12: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H15: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H16: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H12: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + (num_bytes - 1) <= 63 New H11: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) %%% Hypotheses H31 & H9 together imply that num_bytes = 0. H31 & H9 have therefore been deleted and a new H32 added to this effect. *** Proved C3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx)) + num_bytes via its standard form, which is: Std.Fm C3: num_bytes = 0 using hypothesis H32. *** Proved C4: fld_byte_count(fld_h(ctx)) >= 0 using hypothesis H13. --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H10 (redundant, given H32). --- Eliminated hypothesis H14 (redundant, given H6 & H32). --- Eliminated hypothesis H15 (redundant, given H1). -S- Eliminated hypothesis H32. This was achieved by replacing all occurrences of num_bytes by: 0. New H5: msg_offset + - 1 <= msg__index__subtype__1__last New H6: fld_byte_count(fld_h(ctx)) <= 64 *** Proved C5: fld_byte_count(fld_h(ctx)) <= 64 using hypothesis H6. *** PROVED VC. @@@@@@@@@@ VC: procedure_copy_msg_to_b_13. @@@@@@@@@@ %%% Simplified H2 on reading formula in, to give: %%% H2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified H3 on reading formula in, to give: %%% H3: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h(ctx~)) %%% Simplified H12 on reading formula in, to give: %%% H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H13 on reading formula in, to give: %%% H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H31 on reading formula in, to give: %%% H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H32 on reading formula in, to give: %%% H32: fld_hash_bit_len(fld_h(ctx~)) > 0 %%% Simplified H37 on reading formula in, to give: %%% H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= skein_512_block_bytes_index__last %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(ctx)) > 0 %%% Simplified C2 on reading formula in, to give: %%% C2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified C3 on reading formula in, to give: %%% C3: fld_byte_count(fld_h(ctx)) + num_bytes = fld_byte_count(fld_h( ctx~)) + num_bytes %%% Simplified C4 on reading formula in, to give: %%% C4: fld_byte_count(fld_h(ctx)) + num_bytes >= skein_512_block_bytes_count__first %%% Simplified C5 on reading formula in, to give: %%% C5: fld_byte_count(fld_h(ctx)) + num_bytes <= skein_512_block_bytes_count__last *** Proved C1: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** Proved C2: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) using hypothesis H2. -S- Applied substitution rule copy_msg_to__rules(91). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H4: fld_byte_count(fld_h(ctx)) >= 0 New C4: fld_byte_count(fld_h(ctx)) + num_bytes >= 0 -S- Applied substitution rule copy_msg_to__rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H5: fld_byte_count(fld_h(ctx)) <= 64 New C5: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 -S- Applied substitution rule copy_msg_to__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H8: msg_offset >= 0 New H10: num_bytes >= 0 New H14: fld_byte_count(fld_h(ctx)) >= 0 New H38: src >= 0 New H44: final_src >= 0 New H47: fld_byte_count(fld_h(ctx)) + num_bytes >= 0 -S- Applied substitution rule copy_msg_to__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H9: msg_offset <= 2147483647 New H11: num_bytes <= 2147483647 New H15: fld_byte_count(fld_h(ctx)) <= 2147483647 New H39: src <= 2147483647 New H45: final_src <= 2147483647 New H48: fld_byte_count(fld_h(ctx)) + num_bytes <= 2147483647 -S- Applied substitution rule copy_msg_to__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H20: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H21: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule copy_msg_to__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H24: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule copy_msg_to__rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule copy_msg_to__rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H12: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H31: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule copy_msg_to__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H25: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H26: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule copy_msg_to__rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H28: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule copy_msg_to__rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule copy_msg_to__rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H30: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H13: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(81). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H16: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule copy_msg_to__rules(82). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H17: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule copy_msg_to__rules(86). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H13: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule copy_msg_to__rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H40: dst >= 0 New H42: final_dst >= 0 New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule copy_msg_to__rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H6: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 New H37: fld_byte_count(fld_h(ctx~)) + (num_bytes - 1) <= 63 New H41: dst <= 63 New H43: final_dst <= 63 New H12: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C3: fld_byte_count(fld_h(ctx)) + num_bytes = fld_byte_count(fld_h( ctx~)) + num_bytes via its standard form, which is: Std.Fm C3: fld_byte_count(fld_h(ctx)) - fld_byte_count(fld_h(ctx~)) = 0 using hypothesis H3. *** Proved C4: fld_byte_count(fld_h(ctx)) + num_bytes >= 0 using hypothesis H47. *** Proved C5: fld_byte_count(fld_h(ctx)) + num_bytes <= 64 using hypothesis H6. *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.vlg0000644000175000017500000000326211712765060031547 0ustar eugeneugen Non-option args: copy_msg_to_b Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=copy_msg_to_b \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 1 (100.0%) unproven: 0 ( 0.0%) error: 0 ( 0.0%) total: 1 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.vct0000644000175000017500000000004711712513676031555 0ustar eugeneugen,copy_msg_to_b,procedure,,,6,,true,,,, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.log0000644000175000017500000000140211712513676031536 0ustar eugeneugenSPARK Simplifier Pro Edition Reading copy_msg_to_b.fdl (for inherited FDL type declarations) Processing copy_msg_to_b.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - 2 conclusions remain unproven Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Automatic simplification completed. Simplified output sent to copy_msg_to_b.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.siv0000644000175000017500000000142511712513676030136 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_Start_New_Type For path(s) from start to run-time check associated with statement of line 211: procedure_skein_start_new_type_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 219: procedure_skein_start_new_type_2. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_skein_start_new_type_3. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.siv0000644000175000017500000005217611712513676026560 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Final For path(s) from start to check associated with statement of line 900: procedure_skein_512_final_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 901: procedure_skein_512_final_2. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 905: procedure_skein_512_final_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 909: procedure_skein_512_final_4. *** true . /* all conclusions proved */ procedure_skein_512_final_5. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 911: procedure_skein_512_final_6. *** true . /* all conclusions proved */ procedure_skein_512_final_7. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 918: procedure_skein_512_final_8. *** true . /* all conclusions proved */ procedure_skein_512_final_9. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 920: procedure_skein_512_final_10. *** true . /* all conclusions proved */ procedure_skein_512_final_11. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 923: procedure_skein_512_final_12. *** true . /* all conclusions proved */ procedure_skein_512_final_13. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 926: procedure_skein_512_final_14. *** true . /* all conclusions proved */ procedure_skein_512_final_15. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 927: procedure_skein_512_final_16. *** true . /* all conclusions proved */ procedure_skein_512_final_17. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 930: procedure_skein_512_final_18. *** true . /* all conclusions proved */ procedure_skein_512_final_19. *** true . /* all conclusions proved */ For path(s) from assertion of line 930 to assertion of line 930: procedure_skein_512_final_20. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H5: fld_byte_count(fld_h(ctx)) >= 0 . H6: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H7: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H8: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H9: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H10: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H11: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H12: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H13: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H14: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H15: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H16: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H17: fld_hash_bit_len(fld_h(ctx)) >= 1 . H18: fld_byte_count(fld_h(ctx)) <= 64 . H19: result__index__subtype__1__first = 0 . H20: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H21: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H22: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) . H23: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) . H24: fld_byte_count(fld_h(local_ctx__3)) >= 0 . H25: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 . H26: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 . H27: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H28: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 . H29: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H30: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 . H31: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H32: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 . H33: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H34: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 . H35: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H36: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 . H37: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h(local_ctx__3) , 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) . H38: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H39: fld_byte_count(fld_h(local_ctx__4)) = 0 . H40: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 . H41: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H42: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 . H43: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H44: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 . H45: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H46: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 . H47: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H48: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 . H49: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H50: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 . H51: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H52: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H53: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H54: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) . H55: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) . H56: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element( fld_b(local_ctx__3), [i___1]) and element(fld_b(local_ctx__3), [i___1] ) <= 255) . H57: fld_hash_bit_len(fld_h(local_ctx__3)) >= 1 . H58: fld_hash_bit_len(fld_h(local_ctx__3)) >= 1 . H59: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 . H60: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H61: fld_byte_count(fld_h(local_ctx__4)) >= 0 . H62: fld_byte_count(fld_h(local_ctx__4)) <= 64 . H63: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h(local_ctx__4)) . H64: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) . H65: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) . H66: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H67: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 . H68: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H69: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 . H70: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H71: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 . H72: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H73: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 . H74: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H75: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 . H76: byte_count >= 1 . H77: byte_count <= 268435455 . H78: byte_count - blocks_done * 64 <= 2147483647 . H79: byte_count - blocks_done * 64 >= 64 . H80: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element(result, [ i___1]) and element(result, [i___1]) <= 255) . H81: blocks_done * 64 >= 0 . H82: result__index__subtype__1__last >= blocks_done * 64 + 63 . H83: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element(result__6, [ i___1]) and element(result__6, [i___1]) <= 255) . H84: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [ i___1]) and element(x, [i___1]) <= 18446744073709551615) . H85: blocks_done <= 4194303 . H86: (byte_count + 63) div 64 >= 1 . H87: (byte_count + 63) div 64 <= 4194304 . H88: blocks_done + 1 < (byte_count + 63) div 64 . H89: integer__size >= 0 . H90: natural__size >= 0 . H91: spark__unsigned__u6__size >= 0 . H92: spark__unsigned__u7__size >= 0 . H93: spark__unsigned__byte__size >= 0 . H94: spark__unsigned__u16__size >= 0 . H95: spark__unsigned__u32__size >= 0 . H96: spark__unsigned__u64__size >= 0 . H97: spark__crypto__word_count_t__size >= 0 . H98: hash_bit_length__size >= 0 . H99: initialized_hash_bit_length__size >= 0 . H100: skein_512_state_words_index__size >= 0 . H101: skein_512_block_bytes_count__size >= 0 . H102: skein_512_block_bytes_index__size >= 0 . H103: positive_block_512_count_t__size >= 0 . H104: skein_512_context__size >= 0 . H105: result__index__subtype__1__first <= result__index__subtype__1__last . H106: context_header__size >= 0 . H107: output_byte_count_t__size >= 0 . H108: output_block_count_t__size >= 0 . H109: positive_output_block_count_t__size >= 0 . H110: result__index__subtype__1__first >= 0 . H111: result__index__subtype__1__last >= 0 . H112: result__index__subtype__1__last <= 2147483647 . H113: result__index__subtype__1__first <= 2147483647 . -> C1: (blocks_done + 1) * 64 < byte_count . C2: (blocks_done + 1) * 64 < result__index__subtype__1__last + 1 . procedure_skein_512_final_21. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: byte_count <= result__index__subtype__1__last + 1 . H3: blocks_done * 64 < byte_count . H4: blocks_done * 64 < result__index__subtype__1__last + 1 . H5: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H6: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H7: fld_byte_count(fld_h(ctx)) >= 0 . H8: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H9: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H10: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H11: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H12: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H13: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H14: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H15: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H16: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H17: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H18: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H19: fld_hash_bit_len(fld_h(ctx)) >= 1 . H20: fld_byte_count(fld_h(ctx)) <= 64 . H21: result__index__subtype__1__first = 0 . H22: (fld_hash_bit_len(fld_h(ctx)) + 7) div 8 <= result__index__subtype__1__last + 1 . H23: fld_hash_bit_len(fld_h(local_ctx__3)) > 0 . H24: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) . H25: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) . H26: fld_byte_count(fld_h(local_ctx__3)) >= 0 . H27: fld_byte_count(fld_h(local_ctx__3)) <= 2147483647 . H28: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 . H29: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H30: fld_field_type(fld_tweak_words(fld_h(local_ctx__3))) <= 63 . H31: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H32: fld_tree_level(fld_tweak_words(fld_h(local_ctx__3))) <= 127 . H33: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H34: fld_reserved(fld_tweak_words(fld_h(local_ctx__3))) <= 65535 . H35: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H36: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__3))) <= 4294967295 . H37: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) >= 0 . H38: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__3))) <= 18446744073709551615 . H39: fld_h(local_ctx__4) = upf_tweak_words(upf_byte_count(fld_h(local_ctx__3) , 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 63, first_block := true, final_block := true)) . H40: fld_hash_bit_len(fld_h(local_ctx__4)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H41: fld_byte_count(fld_h(local_ctx__4)) = 0 . H42: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 . H43: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H44: fld_field_type(fld_tweak_words(fld_h(local_ctx__4))) <= 63 . H45: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H46: fld_tree_level(fld_tweak_words(fld_h(local_ctx__4))) <= 127 . H47: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H48: fld_reserved(fld_tweak_words(fld_h(local_ctx__4))) <= 65535 . H49: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H50: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__4))) <= 4294967295 . H51: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) >= 0 . H52: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__4))) <= 18446744073709551615 . H53: fld_x(local_ctx__4) = fld_x(local_ctx__3) . H54: fld_b(local_ctx__4) = fld_b(local_ctx__3) . H55: local_ctx__4 = upf_h(local_ctx__3, fld_h(local_ctx__4)) . H56: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(local_ctx__3), [i___2]) and element(fld_b(local_ctx__3), [i___2] ) <= 255) . H57: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( local_ctx__3), [i___1]) and element(fld_x(local_ctx__3), [i___1]) <= 18446744073709551615) . H58: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element( fld_b(local_ctx__3), [i___1]) and element(fld_b(local_ctx__3), [i___1] ) <= 255) . H59: fld_hash_bit_len(fld_h(local_ctx__3)) >= 1 . H60: fld_hash_bit_len(fld_h(local_ctx__3)) >= 1 . H61: fld_hash_bit_len(fld_h(local_ctx__3)) <= 2147483640 . H62: fld_hash_bit_len(fld_h(local_ctx__5)) = fld_hash_bit_len(fld_h( local_ctx__3)) . H63: fld_byte_count(fld_h(local_ctx__4)) >= 0 . H64: fld_byte_count(fld_h(local_ctx__4)) <= 64 . H65: fld_byte_count(fld_h(local_ctx__5)) = fld_byte_count(fld_h(local_ctx__4)) . H66: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(local_ctx__5), [i___2]) and element(fld_b(local_ctx__5), [i___2] ) <= 255) . H67: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( local_ctx__5), [i___1]) and element(fld_x(local_ctx__5), [i___1]) <= 18446744073709551615) . H68: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H69: fld_field_type(fld_tweak_words(fld_h(local_ctx__5))) <= 63 . H70: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H71: fld_tree_level(fld_tweak_words(fld_h(local_ctx__5))) <= 127 . H72: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H73: fld_reserved(fld_tweak_words(fld_h(local_ctx__5))) <= 65535 . H74: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H75: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx__5))) <= 4294967295 . H76: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) >= 0 . H77: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx__5))) <= 18446744073709551615 . H78: byte_count >= 1 . H79: byte_count <= 268435455 . H80: byte_count - blocks_done * 64 < 64 . H81: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element(result, [ i___1]) and element(result, [i___1]) <= 255) . H82: blocks_done * 64 >= 0 . H83: for_all(i___1 : integer, result__index__subtype__1__first <= i___1 and i___1 <= result__index__subtype__1__last -> 0 <= element(result__6, [ i___1]) and element(result__6, [i___1]) <= 255) . H84: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(x, [ i___1]) and element(x, [i___1]) <= 18446744073709551615) . H85: (byte_count + 63) div 64 >= 1 . H86: (byte_count + 63) div 64 <= 4194304 . H87: blocks_done + 1 < (byte_count + 63) div 64 . H88: integer__size >= 0 . H89: natural__size >= 0 . H90: spark__unsigned__u6__size >= 0 . H91: spark__unsigned__u7__size >= 0 . H92: spark__unsigned__byte__size >= 0 . H93: spark__unsigned__u16__size >= 0 . H94: spark__unsigned__u32__size >= 0 . H95: spark__unsigned__u64__size >= 0 . H96: spark__crypto__word_count_t__size >= 0 . H97: hash_bit_length__size >= 0 . H98: initialized_hash_bit_length__size >= 0 . H99: skein_512_state_words_index__size >= 0 . H100: skein_512_block_bytes_count__size >= 0 . H101: skein_512_block_bytes_index__size >= 0 . H102: positive_block_512_count_t__size >= 0 . H103: skein_512_context__size >= 0 . H104: result__index__subtype__1__first <= result__index__subtype__1__last . H105: context_header__size >= 0 . H106: output_byte_count_t__size >= 0 . H107: output_block_count_t__size >= 0 . H108: positive_output_block_count_t__size >= 0 . H109: result__index__subtype__1__first >= 0 . H110: result__index__subtype__1__last >= 0 . H111: result__index__subtype__1__last <= 2147483647 . H112: result__index__subtype__1__first <= 2147483647 . -> C1: (blocks_done + 1) * 64 < byte_count . C2: (blocks_done + 1) * 64 < result__index__subtype__1__last + 1 . For path(s) from assertion of line 930 to precondition check associated with statement of line 937: procedure_skein_512_final_22. *** true . /* all conclusions proved */ For path(s) from assertion of line 930 to run-time check associated with statement of line 939: procedure_skein_512_final_23. *** true . /* all conclusions proved */ For path(s) from assertion of line 930 to precondition check associated with statement of line 947: procedure_skein_512_final_24. *** true . /* all conclusions proved */ For path(s) from assertion of line 930 to run-time check associated with statement of line 953: procedure_skein_512_final_25. *** true . /* all conclusions proved */ For path(s) from assertion of line 930 to run-time check associated with statement of line 955: procedure_skein_512_final_26. *** true . /* all conclusions proved */ For path(s) from assertion of line 930 to precondition check associated with statement of line 959: procedure_skein_512_final_27. *** true . /* all conclusions proved */ procedure_skein_512_final_28. *** true . /* all conclusions proved */ For path(s) from assertion of line 930 to run-time check associated with statement of line 966: procedure_skein_512_final_29. *** true . /* all conclusions proved */ procedure_skein_512_final_30. *** true . /* all conclusions proved */ For path(s) from assertion of line 930 to finish: procedure_skein_512_final_31. *** true . /* all conclusions proved */ procedure_skein_512_final_32. *** true . /* all conclusions proved */ For checks of refinement integrity: procedure_skein_512_final_33. *** true . /* proved using user-defined proof rules. */ procedure_skein_512_final_34. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.rls0000644000175000017500000001205111712513676027062 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Put_64_LSB_First*/ rule_family put_64_lsb_f_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. put_64_lsb_f_rules(1): integer__size >= 0 may_be_deduced. put_64_lsb_f_rules(2): integer__first may_be_replaced_by -2147483648. put_64_lsb_f_rules(3): integer__last may_be_replaced_by 2147483647. put_64_lsb_f_rules(4): integer__base__first may_be_replaced_by -2147483648. put_64_lsb_f_rules(5): integer__base__last may_be_replaced_by 2147483647. put_64_lsb_f_rules(6): natural__size >= 0 may_be_deduced. put_64_lsb_f_rules(7): natural__first may_be_replaced_by 0. put_64_lsb_f_rules(8): natural__last may_be_replaced_by 2147483647. put_64_lsb_f_rules(9): natural__base__first may_be_replaced_by -2147483648. put_64_lsb_f_rules(10): natural__base__last may_be_replaced_by 2147483647. put_64_lsb_f_rules(11): interfaces__unsigned_8__size >= 0 may_be_deduced. put_64_lsb_f_rules(12): interfaces__unsigned_8__size may_be_replaced_by 8. put_64_lsb_f_rules(13): interfaces__unsigned_8__first may_be_replaced_by 0. put_64_lsb_f_rules(14): interfaces__unsigned_8__last may_be_replaced_by 255. put_64_lsb_f_rules(15): interfaces__unsigned_8__base__first may_be_replaced_by 0. put_64_lsb_f_rules(16): interfaces__unsigned_8__base__last may_be_replaced_by 255. put_64_lsb_f_rules(17): interfaces__unsigned_8__modulus may_be_replaced_by 256. put_64_lsb_f_rules(18): interfaces__unsigned_64__size >= 0 may_be_deduced. put_64_lsb_f_rules(19): interfaces__unsigned_64__size may_be_replaced_by 64. put_64_lsb_f_rules(20): interfaces__unsigned_64__first may_be_replaced_by 0. put_64_lsb_f_rules(21): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. put_64_lsb_f_rules(22): interfaces__unsigned_64__base__first may_be_replaced_by 0. put_64_lsb_f_rules(23): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. put_64_lsb_f_rules(24): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. put_64_lsb_f_rules(25): spark__unsigned__byte__size >= 0 may_be_deduced. put_64_lsb_f_rules(26): spark__unsigned__byte__first may_be_replaced_by 0. put_64_lsb_f_rules(27): spark__unsigned__byte__last may_be_replaced_by 255. put_64_lsb_f_rules(28): spark__unsigned__byte__base__first may_be_replaced_by 0. put_64_lsb_f_rules(29): spark__unsigned__byte__base__last may_be_replaced_by 255. put_64_lsb_f_rules(30): spark__unsigned__byte__modulus may_be_replaced_by 256. put_64_lsb_f_rules(31): spark__unsigned__u64__size >= 0 may_be_deduced. put_64_lsb_f_rules(32): spark__unsigned__u64__first may_be_replaced_by 0. put_64_lsb_f_rules(33): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. put_64_lsb_f_rules(34): spark__unsigned__u64__base__first may_be_replaced_by 0. put_64_lsb_f_rules(35): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. put_64_lsb_f_rules(36): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. put_64_lsb_f_rules(37): spark__unsigned__shift_count__size >= 0 may_be_deduced. put_64_lsb_f_rules(38): spark__unsigned__shift_count__first may_be_replaced_by 0. put_64_lsb_f_rules(39): spark__unsigned__shift_count__last may_be_replaced_by 64. put_64_lsb_f_rules(40): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. put_64_lsb_f_rules(41): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. put_64_lsb_f_rules(42): spark__crypto__word_count_t__size >= 0 may_be_deduced. put_64_lsb_f_rules(43): spark__crypto__word_count_t__first may_be_replaced_by 0. put_64_lsb_f_rules(44): spark__crypto__word_count_t__last may_be_replaced_by 268435455. put_64_lsb_f_rules(45): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. put_64_lsb_f_rules(46): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. put_64_lsb_f_rules(47): dst__index__subtype__1__first >= natural__first may_be_deduced. put_64_lsb_f_rules(48): dst__index__subtype__1__last <= natural__last may_be_deduced. put_64_lsb_f_rules(49): dst__index__subtype__1__first <= dst__index__subtype__1__last may_be_deduced. put_64_lsb_f_rules(50): dst__index__subtype__1__last >= natural__first may_be_deduced. put_64_lsb_f_rules(51): dst__index__subtype__1__first <= natural__last may_be_deduced. put_64_lsb_f_rules(52): src__index__subtype__1__first >= spark__crypto__word_count_t__first may_be_deduced. put_64_lsb_f_rules(53): src__index__subtype__1__last <= spark__crypto__word_count_t__last may_be_deduced. put_64_lsb_f_rules(54): src__index__subtype__1__first <= src__index__subtype__1__last may_be_deduced. put_64_lsb_f_rules(55): src__index__subtype__1__last >= spark__crypto__word_count_t__first may_be_deduced. put_64_lsb_f_rules(56): src__index__subtype__1__first <= spark__crypto__word_count_t__last may_be_deduced. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.slg0000644000175000017500000222064711712513676026737 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Update RRS The following user defined rule files have been read: &&& skein.rlu SEM No semantic checks are performed on the rules. @@@@@@@@@@ VC: procedure_skein_512_update_1. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New C1: msg__index__subtype__1__last >= - 1 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New C2: msg__index__subtype__1__last <= 2147483646 *** Proved C2: msg__index__subtype__1__last <= 2147483646 using hypothesis H6. -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H10 (duplicate of H3). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H4). --- Eliminated hypothesis H12 (redundant, given H1). *** Proved C1: msg__index__subtype__1__last >= - 1 *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_2. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New C1: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_3. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C1: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C2: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H7: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C1: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 using hypotheses H10 & H28. *** Proved C2: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 using hypotheses H4 & H7. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_4. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New C2: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 63 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New C1: fld_byte_count(fld_h(ctx)) <= 64 New C2: 1 <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H7: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 *** Proved C1: fld_byte_count(fld_h(ctx)) <= 64 using hypothesis H4. *** Proved C2: 1 <= fld_byte_count(fld_h(ctx)) using hypothesis H37. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_5. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 64 - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last New C1: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H7: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H39: 1 <= fld_byte_count(fld_h(ctx)) New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C1: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 via its standard form, which is: Std.Fm C1: msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)) > 63 using hypothesis H36. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_6. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). *** Proved C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 using hypothesis H40. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_7. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). %%% Simplified C9 on reading formula in, to give: %%% C9: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last *** Proved C2: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last using hypotheses H29 & H41. *** Proved C3: 0 >= natural__first using hypothesis H30. *** Proved C4: 0 <= natural__last using hypothesis H31. *** Proved C6: msg__index__subtype__1__first = 0 using hypothesis H5. *** Proved C7: 0 >= msg__index__subtype__1__first using hypothesis H5. -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 63 New C10: fld_byte_count(fld_h(ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1) <= 63 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 1 <= fld_byte_count(fld_h(ctx)) New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New C9: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New C1: fld_byte_count(fld_h(ctx)) <= 64 New C10: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H7: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C5: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** Proved C9: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last via its standard form, which is: Std.Fm C9: msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)) > 62 using hypothesis H41. *** Proved C1: fld_byte_count(fld_h(ctx)) <= 64 using hypothesis H4. *** Proved C10: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 via its standard form, which is: Std.Fm C10: true --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H39 (duplicate of H37). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H10 (duplicate of H3). --- Eliminated hypothesis H38 (duplicate of H4). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H40 (duplicate of H36). --- Eliminated hypothesis H3 (redundant, given H37). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H4). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H4 & H36). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H41 (redundant, given H36). *** Proved C8: 0 <= msg__index__subtype__1__last *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_8. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New C1: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New C2: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= 2147483647 *** Proved C1: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) >= 0 using hypothesis H41. *** Proved C2: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= 2147483647 using hypotheses H29 & H46. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_9. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified C1 on reading formula in, to give: %%% C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified C2 on reading formula in, to give: %%% C2: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last *** Proved C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H46. *** Proved C2: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H47. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_10. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 64 - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H47: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= skein_512_block_bytes_index__last New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= natural__first New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= natural__last New H90: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H91: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New C1: fld_byte_count(fld_h(ctx__1)) = 64 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H90: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H7: msg__index__subtype__1__last <= 2147483582 New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H39: 1 <= fld_byte_count(fld_h(ctx)) New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) *** Proved C1: fld_byte_count(fld_h(ctx__1)) = 64 using hypothesis H58. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_11. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) %%% Simplified C12 on reading formula in, to give: %%% C12: 63 <= skein_512_block_bytes_index__last %%% Simplified C13 on reading formula in, to give: %%% C13: 63 <= skein_512_block_bytes_index__last %%% Simplified C15 on reading formula in, to give: %%% C15: 63 <= natural__last *** Proved C1: skein_512_block_bytes_c >= natural__first using hypotheses H37 & H46. *** Proved C2: skein_512_block_bytes_c <= natural__last using hypotheses H64 & H92. *** Proved C5: 0 >= natural__first using hypothesis H30. *** Proved C6: 0 <= natural__last using hypothesis H31. *** Proved C7: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first using hypotheses H1 & H57. *** Proved C8: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last using hypotheses H2 & H57. *** Proved C9: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first using hypothesis H59. *** Proved C10: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last using hypothesis H60. -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New C3: true -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New C4: true -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 63 New H55: fld_byte_count(fld_h(ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New C12: true New C13: true New C14: 63 <= natural__last -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H85: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= 2147483647 New H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New C15: true New C14: true *** Proved C3: true *** Proved C4: true *** Proved C12: true *** Proved C13: true *** Proved C15: true *** Proved C14: true -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last <= 2147483582 New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 1 <= fld_byte_count(fld_h(ctx)) New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= natural__first New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H90: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H92: fld_byte_count(fld_h(ctx__1)) = 64 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H90: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New C11: true *** Proved C11: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_12. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: 0 >= natural__first using hypothesis H30. *** Proved C2: 0 <= natural__last using hypothesis H31. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_13. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H84). --- Hypothesis H140 has been replaced by "true". (It is already present, as H85). -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H96: true New C1: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H97: true New C2: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c <= 33554431 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New C4: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) >= - 2147483647 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New C5: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= 2147483648 *** Proved C4: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) >= - 2147483647 using hypothesis H41. -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 64 - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H47: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= skein_512_block_bytes_index__last New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= natural__first New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= natural__last New H90: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H91: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H92: fld_byte_count(fld_h(ctx__1)) = 64 New H94: 64 >= natural__first New H95: 64 <= natural__last New H138: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) > 64 New C3: true New C1: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= 1 New C2: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= 33554431 New C5: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx) )) <= 2147483648 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H117: fld_byte_count(fld_h(ctx__2)) >= 0 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H90: fld_byte_count(fld_h(ctx)) <= 64 New H94: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H107: skein_512_block_bytes_index__last <= 2147483647 New H108: true New H118: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H7: msg__index__subtype__1__last <= 2147483582 New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H95: true -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 New H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 New H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 New H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 New H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 New H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) New H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 New H119: fld_hash_bit_len(fld_h(ctx__2)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H120: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H100: fld_hash_bit_len(fld_h(ctx__1)) >= 1 New H109: fld_hash_bit_len(fld_h(ctx__2)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H101: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H110: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__2), [ i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 New H112: fld_byte_count(fld_h(ctx__2)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 New H113: fld_byte_count(fld_h(ctx__2)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H104: true New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__2), [ i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H105: true New H106: true New H39: 1 <= fld_byte_count(fld_h(ctx)) New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H107: true New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) *** Proved C3: true *** Proved C5: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483648 using hypothesis H85. --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H39 (duplicate of H37). --- Eliminated hypothesis H100 (duplicate of H56). --- Eliminated hypothesis H90 (duplicate of H46). --- Eliminated hypothesis H91 (duplicate of H47). --- Eliminated hypothesis H101 (duplicate of H66). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H120 (duplicate of H110). --- Eliminated hypothesis H10 (duplicate of H3). --- Eliminated hypothesis H63 (duplicate of H59). --- Eliminated hypothesis H117 (duplicate of H112). --- Eliminated hypothesis H46 (duplicate of H4). --- Eliminated hypothesis H38 (duplicate of H4). --- Eliminated hypothesis H50 (duplicate of H1). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H40 (duplicate of H36). --- Eliminated hypothesis H54 (duplicate of H41). --- Eliminated hypothesis H84 (duplicate of H41). --- Eliminated hypothesis H92 (duplicate of H58). --- Eliminated hypothesis H3 (redundant, given H37). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H4). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H53). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H36 (redundant, given H138). --- Eliminated hypothesis H41 (redundant, given H138). --- Eliminated hypothesis H47 (redundant, given H37). --- Eliminated hypothesis H52 (redundant, given H5). --- Eliminated hypothesis H53 (redundant, given H4 & H138). --- Eliminated hypothesis H59 (redundant, given H58). --- Eliminated hypothesis H60 (redundant, given H58). --- Eliminated hypothesis H64 (redundant, given H58). --- Eliminated hypothesis H65 (redundant, given H56). --- Eliminated hypothesis H85 (redundant, given H35). --- Eliminated hypothesis H118 (redundant, given H113). --- Eliminated hypothesis H119 (redundant, given H109). -S- Substituted hypothesis H57. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H114. This was achieved by replacing all occurrences of fld_byte_count(fld_h( ctx__2)) by: fld_byte_count(fld_h(ctx__1)). -S- Substituted hypothesis H111. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). *** Proved C1: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= 1 using hypothesis H138. *** Proved C2: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= 33554431 using hypothesis H85. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_14. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). %%% Simplified C1 on reading formula in, to give: %%% C1: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified C2 on reading formula in, to give: %%% C2: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified C4 on reading formula in, to give: %%% C4: msg__index__subtype__1__last >= integer__base__first %%% Simplified C5 on reading formula in, to give: %%% C5: msg__index__subtype__1__last <= integer__base__last -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New C1: msg__index__subtype__1__last div skein_512_block_bytes_c >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New C2: msg__index__subtype__1__last div skein_512_block_bytes_c <= 33554431 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New C4: msg__index__subtype__1__last >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New C5: msg__index__subtype__1__last <= 2147483647 >>> Restructured hypothesis H37 into: >>> H37: fld_byte_count(fld_h(ctx)) <= 0 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H40: msg__index__subtype__1__last > 63 New C3: true New C1: msg__index__subtype__1__last div 64 >= 1 New C2: msg__index__subtype__1__last div 64 <= 33554431 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H7: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) %%% Hypotheses H37 & H10 together imply that fld_byte_count(fld_h(ctx)) = 0. H37 & H10 have therefore been deleted and a new H43 added to this effect. *** Proved C4: msg__index__subtype__1__last >= - 2147483648 using hypothesis H40. *** Proved C5: msg__index__subtype__1__last <= 2147483647 using hypothesis H6. *** Proved C3: true --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H41 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H3 (redundant, given H43). --- Eliminated hypothesis H4 (redundant, given H43). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H43). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H40). --- Eliminated hypothesis H34 (redundant, given H36). *** Proved C1: msg__index__subtype__1__last div 64 >= 1 using hypothesis H40. *** Proved C2: msg__index__subtype__1__last div 64 <= 33554431 using hypothesis H7. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_15. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H84). --- Hypothesis H140 has been replaced by "true". (It is already present, as H85). --- Hypothesis H146 has been replaced by "true". (It is already present, as H90). --- Hypothesis H147 has been replaced by "true". (It is already present, as H91). --- Hypothesis H148 has been replaced by "true". (It is already present, as H141). --- Hypothesis H149 has been replaced by "true". (It is already present, as H142). %%% Simplified C5 on reading formula in, to give: %%% C5: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified C6 on reading formula in, to give: %%% C6: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified C7 on reading formula in, to give: %%% C7: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first %%% Simplified C8 on reading formula in, to give: %%% C8: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last %%% Simplified C9 on reading formula in, to give: %%% C9: 0 >= skein_512_block_bytes_count__first %%% Simplified C10 on reading formula in, to give: %%% C10: 0 <= skein_512_block_bytes_count__last %%% Simplified C12 on reading formula in, to give: %%% C12: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified C13 on reading formula in, to give: %%% C13: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last %%% Simplified C15 on reading formula in, to give: %%% C15: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last - 63 *** Proved C1: skein_512_block_bytes_c >= natural__first using hypothesis H94. *** Proved C2: skein_512_block_bytes_c <= natural__last using hypothesis H95. *** Proved C3: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c >= positive_block_512_count_t__first using hypothesis H141. *** Proved C4: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c <= positive_block_512_count_t__last using hypothesis H142. *** Proved C5: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H46. *** Proved C6: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H47. *** Proved C7: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first using hypothesis H109. *** Proved C8: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last using hypothesis H110. *** Proved C10: 0 <= skein_512_block_bytes_count__last using hypotheses H4 & H37. *** Proved C11: msg__index__subtype__1__first = 0 using hypothesis H5. *** Proved C14: msg__index__subtype__1__last <= natural__last using hypothesis H6. -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 New H112: fld_byte_count(fld_h(ctx__2)) >= 0 New C9: true *** Proved C9: true -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 64 - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H47: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= skein_512_block_bytes_index__last New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= natural__first New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= natural__last New H90: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H91: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H92: fld_byte_count(fld_h(ctx__1)) = 64 New H94: 64 >= natural__first New H95: 64 <= natural__last New H138: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) > 64 New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= positive_block_512_count_t__first New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= positive_block_512_count_t__last New H143: true New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last New C12: 64 - fld_byte_count(fld_h(ctx)) + ((msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last New C13: 64 - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last New C15: 64 - fld_byte_count(fld_h(ctx)) <= natural__last - 63 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= - 2147483647 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483648 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H117: fld_byte_count(fld_h(ctx__2)) >= 0 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H90: fld_byte_count(fld_h(ctx)) <= 64 New H94: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H107: skein_512_block_bytes_index__last <= 2147483647 New H108: true New H118: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H7: msg__index__subtype__1__last <= 2147483582 New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H95: true New C15: - 2147483520 <= fld_byte_count(fld_h(ctx)) -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 New H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 New H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 New H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 New H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 New H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) New H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 New H119: fld_hash_bit_len(fld_h(ctx__2)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H120: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H100: fld_hash_bit_len(fld_h(ctx__1)) >= 1 New H109: fld_hash_bit_len(fld_h(ctx__2)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H101: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H110: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__2), [ i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 New H113: fld_byte_count(fld_h(ctx__2)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H104: true New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__2), [ i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H105: true New H106: true New H39: 1 <= fld_byte_count(fld_h(ctx)) New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H107: true New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H96: true New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H97: true New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= 33554431 *** Proved C15: - 2147483520 <= fld_byte_count(fld_h(ctx)) using hypothesis H37. --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H148 (true-hypothesis). --- Eliminated hypothesis H149 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H10 (duplicate of H3). --- Eliminated hypothesis H39 (duplicate of H37). --- Eliminated hypothesis H100 (duplicate of H56). --- Eliminated hypothesis H63 (duplicate of H59). --- Eliminated hypothesis H117 (duplicate of H112). --- Eliminated hypothesis H90 (duplicate of H46). --- Eliminated hypothesis H91 (duplicate of H47). --- Eliminated hypothesis H101 (duplicate of H66). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H120 (duplicate of H110). --- Eliminated hypothesis H46 (duplicate of H4). --- Eliminated hypothesis H38 (duplicate of H4). --- Eliminated hypothesis H50 (duplicate of H1). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H40 (duplicate of H36). --- Eliminated hypothesis H54 (duplicate of H41). --- Eliminated hypothesis H84 (duplicate of H41). --- Eliminated hypothesis H92 (duplicate of H58). --- Eliminated hypothesis H3 (redundant, given H37). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H4). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H53). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H36 (redundant, given H138). --- Eliminated hypothesis H41 (redundant, given H138). --- Eliminated hypothesis H47 (redundant, given H37). --- Eliminated hypothesis H52 (redundant, given H5). --- Eliminated hypothesis H53 (redundant, given H4 & H138). --- Eliminated hypothesis H59 (redundant, given H58). --- Eliminated hypothesis H60 (redundant, given H58). --- Eliminated hypothesis H64 (redundant, given H58). --- Eliminated hypothesis H65 (redundant, given H56). --- Eliminated hypothesis H85 (redundant, given H35). --- Eliminated hypothesis H118 (redundant, given H113). --- Eliminated hypothesis H119 (redundant, given H109). --- Eliminated hypothesis H144 (redundant, given H138). --- Eliminated hypothesis H145 (redundant, given H35). -S- Substituted hypothesis H57. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H114. This was achieved by replacing all occurrences of fld_byte_count(fld_h( ctx__2)) by: fld_byte_count(fld_h(ctx__1)). -S- Substituted hypothesis H111. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). +++ New H150: integer__size >= 0 +++ New H151: natural__size >= 0 +++ New H152: spark__unsigned__u6__size >= 0 +++ New H153: spark__unsigned__u7__size >= 0 +++ New H154: spark__unsigned__byte__size >= 0 +++ New H155: spark__unsigned__u16__size >= 0 +++ New H156: spark__unsigned__u32__size >= 0 +++ New H157: spark__unsigned__u64__size >= 0 +++ New H158: spark__crypto__word_count_t__size >= 0 +++ New H159: hash_bit_length__size >= 0 +++ New H160: initialized_hash_bit_length__size >= 0 +++ New H161: skein_512_state_words_index__size >= 0 +++ New H162: skein_512_block_bytes_count__size >= 0 +++ New H163: skein_512_block_bytes_index__size >= 0 +++ New H164: positive_block_512_count_t__size >= 0 +++ New H165: skein_512_context__size >= 0 +++ New H166: msg__index__subtype__1__first <= msg__index__subtype__1__last +++ New H167: context_header__size >= 0 +++ New H168: msg__index__subtype__1__first >= 0 +++ New H169: msg__index__subtype__1__last >= 0 +++ New H170: msg__index__subtype__1__last <= 2147483647 +++ New H171: msg__index__subtype__1__first <= 2147483647 @@@@@@@@@@ VC: procedure_skein_512_update_16. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). %%% Simplified H43 on reading formula in, to give: %%% H43: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified H44 on reading formula in, to give: %%% H44: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified H46 on reading formula in, to give: %%% H46: msg__index__subtype__1__last >= integer__base__first %%% Simplified H47 on reading formula in, to give: %%% H47: msg__index__subtype__1__last <= integer__base__last --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H50 has been replaced by "true". (It is already present, as H43). --- Hypothesis H51 has been replaced by "true". (It is already present, as H44). %%% Simplified C3 on reading formula in, to give: %%% C3: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified C4 on reading formula in, to give: %%% C4: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified C12 on reading formula in, to give: %%% C12: (msg__index__subtype__1__last div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified C13 on reading formula in, to give: %%% C13: 63 <= msg__index__subtype__1__last %%% Simplified C15 on reading formula in, to give: %%% C15: 63 <= natural__last *** Proved C2: skein_512_block_bytes_c <= natural__last using hypotheses H29, H36 & H37. *** Proved C3: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first using hypothesis H43. *** Proved C4: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last using hypothesis H44. *** Proved C5: 0 >= natural__first using hypothesis H30. *** Proved C6: 0 <= natural__last using hypothesis H31. *** Proved C7: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first using hypothesis H1. *** Proved C8: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last using hypothesis H2. *** Proved C9: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first using hypothesis H3. *** Proved C10: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last using hypothesis H4. *** Proved C11: msg__index__subtype__1__first = 0 using hypothesis H5. *** Proved C14: msg__index__subtype__1__last <= natural__last using hypothesis H6. -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H40: msg__index__subtype__1__last > 63 New H43: msg__index__subtype__1__last div 64 >= positive_block_512_count_t__first New H44: msg__index__subtype__1__last div 64 <= positive_block_512_count_t__last New H45: true New C1: 64 >= natural__first New C12: (msg__index__subtype__1__last div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New C1: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H7: msg__index__subtype__1__last <= 2147483582 New C15: true *** Proved C13: 63 <= msg__index__subtype__1__last using hypothesis H40. *** Proved C1: true *** Proved C15: true >>> Restructured hypothesis H37 into: >>> H37: fld_byte_count(fld_h(ctx)) <= 0 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H46: msg__index__subtype__1__last >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H47: msg__index__subtype__1__last <= 2147483647 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H43: msg__index__subtype__1__last div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H44: msg__index__subtype__1__last div 64 <= 33554431 %%% Hypotheses H37 & H10 together imply that fld_byte_count(fld_h(ctx)) = 0. H37 & H10 have therefore been deleted and a new H52 added to this effect. --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H41 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H3 (redundant, given H52). --- Eliminated hypothesis H4 (redundant, given H52). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H52). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H40). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H46 (redundant, given H40). --- Eliminated hypothesis H47 (redundant, given H7). *** Proved C12: (msg__index__subtype__1__last div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last via its standard form, which is: Std.Fm C12: msg__index__subtype__1__last - 64 * ( msg__index__subtype__1__last div 64) > - 2 using hypothesis H40. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_17. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H84). --- Hypothesis H140 has been replaced by "true". (It is already present, as H85). --- Hypothesis H146 has been replaced by "true". (It is already present, as H90). --- Hypothesis H147 has been replaced by "true". (It is already present, as H91). --- Hypothesis H148 has been replaced by "true". (It is already present, as H141). --- Hypothesis H149 has been replaced by "true". (It is already present, as H142). --- Hypothesis H150 has been replaced by "true". (It is already present, as H94). --- Hypothesis H151 has been replaced by "true". (It is already present, as H95). --- Hypothesis H152 has been replaced by "true". (It is already present, as H141). --- Hypothesis H153 has been replaced by "true". (It is already present, as H142). --- Hypothesis H154 has been replaced by "true". (It is already present, as H90). --- Hypothesis H155 has been replaced by "true". (It is already present, as H91). %%% Simplified H156 on reading formula in, to give: %%% H156: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first %%% Simplified H157 on reading formula in, to give: %%% H157: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last %%% Simplified H158 on reading formula in, to give: %%% H158: 0 >= skein_512_block_bytes_count__first %%% Simplified H159 on reading formula in, to give: %%% H159: 0 <= skein_512_block_bytes_count__last --- Hypothesis H160 has been replaced by "true". (It is already present, as H5). %%% Simplified H161 on reading formula in, to give: %%% H161: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H162 on reading formula in, to give: %%% H162: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last %%% Simplified H164 on reading formula in, to give: %%% H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last - 63 %%% Simplified H167 on reading formula in, to give: %%% H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx__2)) %%% Simplified H170 on reading formula in, to give: %%% H170: fld_byte_count(fld_h(ctx__3)) = 0 %%% Simplified H171 on reading formula in, to give: %%% H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H172 on reading formula in, to give: %%% H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H190 has been replaced by "true". (It is already present, as H141). --- Hypothesis H191 has been replaced by "true". (It is already present, as H142). -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H84: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) >= 0 New H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 New H94: skein_512_block_bytes_c >= 0 New H117: fld_byte_count(fld_h(ctx__2)) >= 0 New H173: fld_byte_count(fld_h(ctx__3)) >= 0 New C1: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H85: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= 2147483647 New H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New H95: skein_512_block_bytes_c <= 2147483647 New H107: skein_512_block_bytes_index__last <= 2147483647 New H108: true New H118: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H163: msg__index__subtype__1__last <= 2147483647 New H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483584 New H174: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New C2: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last <= 2147483582 New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 64 - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= skein_512_block_bytes_index__last New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H90: fld_byte_count(fld_h(ctx)) <= 64 New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H92: fld_byte_count(fld_h(ctx__1)) = 64 New H94: true New H95: true New H138: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) > 64 New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= positive_block_512_count_t__first New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= positive_block_512_count_t__last New H143: true New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last New H161: 64 - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last New H162: 64 - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last New H164: - 2147483520 <= fld_byte_count(fld_h(ctx)) New C1: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 >= 0 New C2: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 <= 2147483647 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= - 2147483647 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483648 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 New H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 New H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 New H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 New H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 New H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 New H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 New H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 New H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 New H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 New H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 New H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 New H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 New H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) New H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 New H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) New H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 New H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 New H119: fld_hash_bit_len(fld_h(ctx__2)) >= 0 New H175: fld_hash_bit_len(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H120: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H176: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H100: fld_hash_bit_len(fld_h(ctx__1)) >= 1 New H109: fld_hash_bit_len(fld_h(ctx__2)) >= 1 New H156: fld_hash_bit_len(fld_h(ctx__2)) >= 1 New H165: fld_hash_bit_len(fld_h(ctx__3)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H101: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H110: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H157: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H166: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__2), [ i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__3), [ i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 New H112: fld_byte_count(fld_h(ctx__2)) >= 0 New H158: true New H168: fld_byte_count(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 New H113: fld_byte_count(fld_h(ctx__2)) <= 64 New H159: true New H169: fld_byte_count(fld_h(ctx__3)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H104: true New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__2), [ i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__3), [ i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H105: true New H106: true New H107: true New H39: 1 <= fld_byte_count(fld_h(ctx)) New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H96: true New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H97: true New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= 33554431 *** Proved C1: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 >= 0 using hypothesis H141. *** Proved C2: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 <= 2147483647 using hypothesis H142. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_18. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). %%% Simplified H43 on reading formula in, to give: %%% H43: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified H44 on reading formula in, to give: %%% H44: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified H46 on reading formula in, to give: %%% H46: msg__index__subtype__1__last >= integer__base__first %%% Simplified H47 on reading formula in, to give: %%% H47: msg__index__subtype__1__last <= integer__base__last --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H50 has been replaced by "true". (It is already present, as H43). --- Hypothesis H51 has been replaced by "true". (It is already present, as H44). --- Hypothesis H54 has been replaced by "true". (It is already present, as H43). --- Hypothesis H55 has been replaced by "true". (It is already present, as H44). --- Hypothesis H56 has been replaced by "true". (It is already present, as H30). --- Hypothesis H57 has been replaced by "true". (It is already present, as H31). --- Hypothesis H58 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H59 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H60 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H61 has been replaced by "true". (It is already present, as H4) . --- Hypothesis H62 has been replaced by "true". (It is already present, as H5) . %%% Simplified H63 on reading formula in, to give: %%% H63: (msg__index__subtype__1__last div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H64 on reading formula in, to give: %%% H64: 63 <= msg__index__subtype__1__last %%% Simplified H66 on reading formula in, to give: %%% H66: 63 <= natural__last %%% Simplified H73 on reading formula in, to give: %%% H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H74 on reading formula in, to give: %%% H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H43). --- Hypothesis H93 has been replaced by "true". (It is already present, as H44). %%% Simplified C1 on reading formula in, to give: %%% C1: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified C2 on reading formula in, to give: %%% C2: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H52: skein_512_block_bytes_c >= 0 New H75: fld_byte_count(fld_h(ctx__3)) >= 0 New C1: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H53: skein_512_block_bytes_c <= 2147483647 New H65: msg__index__subtype__1__last <= 2147483647 New H66: true New H76: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New C2: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 >>> Restructured hypothesis H37 into: >>> H37: fld_byte_count(fld_h(ctx)) <= 0 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last <= 2147483582 New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H40: msg__index__subtype__1__last > 63 New H43: msg__index__subtype__1__last div 64 >= positive_block_512_count_t__first New H44: msg__index__subtype__1__last div 64 <= positive_block_512_count_t__last New H45: true New H52: true New H53: true New H63: (msg__index__subtype__1__last div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last New C1: msg__index__subtype__1__last div 64 * 64 >= 0 New C2: msg__index__subtype__1__last div 64 * 64 <= 2147483647 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H46: msg__index__subtype__1__last >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H47: msg__index__subtype__1__last <= 2147483647 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) New H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H77: fld_hash_bit_len(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H78: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H67: fld_hash_bit_len(fld_h(ctx__3)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H68: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__3), [ i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H70: fld_byte_count(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H71: fld_byte_count(fld_h(ctx__3)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H73: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__3), [ i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H73: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H43: msg__index__subtype__1__last div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H44: msg__index__subtype__1__last div 64 <= 33554431 %%% Hypotheses H37 & H10 together imply that fld_byte_count(fld_h(ctx)) = 0. H37 & H10 have therefore been deleted and a new H94 added to this effect. *** Proved C1: msg__index__subtype__1__last div 64 * 64 >= 0 using hypothesis H43. *** Proved C2: msg__index__subtype__1__last div 64 * 64 <= 2147483647 using hypothesis H44. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_19. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H84). --- Hypothesis H140 has been replaced by "true". (It is already present, as H85). --- Hypothesis H146 has been replaced by "true". (It is already present, as H90). --- Hypothesis H147 has been replaced by "true". (It is already present, as H91). --- Hypothesis H148 has been replaced by "true". (It is already present, as H141). --- Hypothesis H149 has been replaced by "true". (It is already present, as H142). --- Hypothesis H150 has been replaced by "true". (It is already present, as H94). --- Hypothesis H151 has been replaced by "true". (It is already present, as H95). --- Hypothesis H152 has been replaced by "true". (It is already present, as H141). --- Hypothesis H153 has been replaced by "true". (It is already present, as H142). --- Hypothesis H154 has been replaced by "true". (It is already present, as H90). --- Hypothesis H155 has been replaced by "true". (It is already present, as H91). %%% Simplified H156 on reading formula in, to give: %%% H156: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first %%% Simplified H157 on reading formula in, to give: %%% H157: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last %%% Simplified H158 on reading formula in, to give: %%% H158: 0 >= skein_512_block_bytes_count__first %%% Simplified H159 on reading formula in, to give: %%% H159: 0 <= skein_512_block_bytes_count__last --- Hypothesis H160 has been replaced by "true". (It is already present, as H5). %%% Simplified H161 on reading formula in, to give: %%% H161: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H162 on reading formula in, to give: %%% H162: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last %%% Simplified H164 on reading formula in, to give: %%% H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last - 63 %%% Simplified H167 on reading formula in, to give: %%% H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx__2)) %%% Simplified H170 on reading formula in, to give: %%% H170: fld_byte_count(fld_h(ctx__3)) = 0 %%% Simplified H171 on reading formula in, to give: %%% H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H172 on reading formula in, to give: %%% H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H190 has been replaced by "true". (It is already present, as H141). --- Hypothesis H191 has been replaced by "true". (It is already present, as H142). -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 64 - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H47: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= skein_512_block_bytes_index__last New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= natural__first New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= natural__last New H90: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H91: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H92: fld_byte_count(fld_h(ctx__1)) = 64 New H94: 64 >= natural__first New H95: 64 <= natural__last New H138: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) > 64 New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= positive_block_512_count_t__first New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= positive_block_512_count_t__last New H143: true New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last New H161: 64 - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last New H162: 64 - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last New H164: 64 - fld_byte_count(fld_h(ctx)) <= natural__last - 63 New H192: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 >= natural__first New H193: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 <= natural__last New C1: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 < msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= - 2147483647 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483648 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H117: fld_byte_count(fld_h(ctx__2)) >= 0 New H173: fld_byte_count(fld_h(ctx__3)) >= 0 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H90: fld_byte_count(fld_h(ctx)) <= 64 New H94: true New H192: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H107: skein_512_block_bytes_index__last <= 2147483647 New H108: true New H118: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H163: msg__index__subtype__1__last <= 2147483647 New H174: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New H7: msg__index__subtype__1__last <= 2147483582 New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H95: true New H164: - 2147483520 <= fld_byte_count(fld_h(ctx)) New H193: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 <= 2147483647 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 New H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 New H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 New H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 New H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 New H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 New H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 New H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 New H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 New H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 New H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 New H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 New H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 New H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) New H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 New H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) New H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 New H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 New H119: fld_hash_bit_len(fld_h(ctx__2)) >= 0 New H175: fld_hash_bit_len(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H120: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H176: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H100: fld_hash_bit_len(fld_h(ctx__1)) >= 1 New H109: fld_hash_bit_len(fld_h(ctx__2)) >= 1 New H156: fld_hash_bit_len(fld_h(ctx__2)) >= 1 New H165: fld_hash_bit_len(fld_h(ctx__3)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H101: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H110: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H157: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H166: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__2), [ i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__3), [ i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 New H112: fld_byte_count(fld_h(ctx__2)) >= 0 New H158: true New H168: fld_byte_count(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 New H113: fld_byte_count(fld_h(ctx__2)) <= 64 New H159: true New H169: fld_byte_count(fld_h(ctx__3)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H104: true New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__2), [ i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__3), [ i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H105: true New H106: true New H39: 1 <= fld_byte_count(fld_h(ctx)) New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H107: true New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H96: true New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H97: true New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= 33554431 --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H148 (true-hypothesis). --- Eliminated hypothesis H149 (true-hypothesis). --- Eliminated hypothesis H150 (true-hypothesis). --- Eliminated hypothesis H151 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H160 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H181 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H158 (true-hypothesis). --- Eliminated hypothesis H159 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H39 (duplicate of H37). --- Eliminated hypothesis H100 (duplicate of H56). --- Eliminated hypothesis H90 (duplicate of H46). --- Eliminated hypothesis H91 (duplicate of H47). --- Eliminated hypothesis H101 (duplicate of H66). --- Eliminated hypothesis H157 (duplicate of H120). --- Eliminated hypothesis H156 (duplicate of H109). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H120 (duplicate of H110). --- Eliminated hypothesis H176 (duplicate of H166). --- Eliminated hypothesis H10 (duplicate of H3). --- Eliminated hypothesis H63 (duplicate of H59). --- Eliminated hypothesis H117 (duplicate of H112). --- Eliminated hypothesis H173 (duplicate of H168). --- Eliminated hypothesis H46 (duplicate of H4). --- Eliminated hypothesis H38 (duplicate of H4). --- Eliminated hypothesis H50 (duplicate of H1). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H40 (duplicate of H36). --- Eliminated hypothesis H54 (duplicate of H41). --- Eliminated hypothesis H84 (duplicate of H41). --- Eliminated hypothesis H92 (duplicate of H58). --- Eliminated hypothesis H3 (redundant, given H37). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H4). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H53). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H36 (redundant, given H138). --- Eliminated hypothesis H41 (redundant, given H138). --- Eliminated hypothesis H47 (redundant, given H37). --- Eliminated hypothesis H52 (redundant, given H5). --- Eliminated hypothesis H53 (redundant, given H4 & H138). --- Eliminated hypothesis H59 (redundant, given H58). --- Eliminated hypothesis H60 (redundant, given H58). --- Eliminated hypothesis H64 (redundant, given H58). --- Eliminated hypothesis H65 (redundant, given H56). --- Eliminated hypothesis H85 (redundant, given H35). --- Eliminated hypothesis H118 (redundant, given H113). --- Eliminated hypothesis H119 (redundant, given H109). --- Eliminated hypothesis H144 (redundant, given H138). --- Eliminated hypothesis H145 (redundant, given H35). --- Eliminated hypothesis H162 (redundant, given H138). --- Eliminated hypothesis H163 (redundant, given H7). --- Eliminated hypothesis H164 (redundant, given H37). --- Eliminated hypothesis H168 (redundant, given H170). --- Eliminated hypothesis H169 (redundant, given H170). --- Eliminated hypothesis H174 (redundant, given H170). --- Eliminated hypothesis H175 (redundant, given H165). -S- Substituted hypothesis H57. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H114. This was achieved by replacing all occurrences of fld_byte_count(fld_h( ctx__2)) by: fld_byte_count(fld_h(ctx__1)). -S- Substituted hypothesis H167. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__3)) by: fld_hash_bit_len(fld_h(ctx__2)). -S- Substituted hypothesis H111. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). +++ New H194: integer__size >= 0 +++ New H195: natural__size >= 0 +++ New H196: spark__unsigned__u6__size >= 0 +++ New H197: spark__unsigned__u7__size >= 0 +++ New H198: spark__unsigned__byte__size >= 0 +++ New H199: spark__unsigned__u16__size >= 0 +++ New H200: spark__unsigned__u32__size >= 0 +++ New H201: spark__unsigned__u64__size >= 0 +++ New H202: spark__crypto__word_count_t__size >= 0 +++ New H203: hash_bit_length__size >= 0 +++ New H204: initialized_hash_bit_length__size >= 0 +++ New H205: skein_512_state_words_index__size >= 0 +++ New H206: skein_512_block_bytes_count__size >= 0 +++ New H207: skein_512_block_bytes_index__size >= 0 +++ New H208: positive_block_512_count_t__size >= 0 +++ New H209: skein_512_context__size >= 0 +++ New H210: msg__index__subtype__1__first <= msg__index__subtype__1__last +++ New H211: context_header__size >= 0 +++ New H212: msg__index__subtype__1__first >= 0 +++ New H213: msg__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: msg__index__subtype__1__last <= 2147483647 eliminated: this already exists (as H163). +++ New H163: msg__index__subtype__1__last <= 2147483647 +++ New H214: msg__index__subtype__1__first <= 2147483647 @@@@@@@@@@ VC: procedure_skein_512_update_20. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). %%% Simplified H43 on reading formula in, to give: %%% H43: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified H44 on reading formula in, to give: %%% H44: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified H46 on reading formula in, to give: %%% H46: msg__index__subtype__1__last >= integer__base__first %%% Simplified H47 on reading formula in, to give: %%% H47: msg__index__subtype__1__last <= integer__base__last --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H50 has been replaced by "true". (It is already present, as H43). --- Hypothesis H51 has been replaced by "true". (It is already present, as H44). --- Hypothesis H54 has been replaced by "true". (It is already present, as H43). --- Hypothesis H55 has been replaced by "true". (It is already present, as H44). --- Hypothesis H56 has been replaced by "true". (It is already present, as H30). --- Hypothesis H57 has been replaced by "true". (It is already present, as H31). --- Hypothesis H58 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H59 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H60 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H61 has been replaced by "true". (It is already present, as H4) . --- Hypothesis H62 has been replaced by "true". (It is already present, as H5) . %%% Simplified H63 on reading formula in, to give: %%% H63: (msg__index__subtype__1__last div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H64 on reading formula in, to give: %%% H64: 63 <= msg__index__subtype__1__last %%% Simplified H66 on reading formula in, to give: %%% H66: 63 <= natural__last %%% Simplified H73 on reading formula in, to give: %%% H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H74 on reading formula in, to give: %%% H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H43). --- Hypothesis H93 has been replaced by "true". (It is already present, as H44). %%% Simplified H94 on reading formula in, to give: %%% H94: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H95 on reading formula in, to give: %%% H95: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last %%% Simplified C1 on reading formula in, to give: %%% C1: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 >>> Restructured hypothesis H37 into: >>> H37: fld_byte_count(fld_h(ctx)) <= 0 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H40: msg__index__subtype__1__last > 63 New H43: msg__index__subtype__1__last div 64 >= positive_block_512_count_t__first New H44: msg__index__subtype__1__last div 64 <= positive_block_512_count_t__last New H45: true New H52: 64 >= natural__first New H53: 64 <= natural__last New H63: (msg__index__subtype__1__last div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last New H94: msg__index__subtype__1__last div 64 * 64 >= natural__first New H95: msg__index__subtype__1__last div 64 * 64 <= natural__last New C1: msg__index__subtype__1__last div 64 * 64 < msg__index__subtype__1__last + 1 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H46: msg__index__subtype__1__last >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H47: msg__index__subtype__1__last <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H75: fld_byte_count(fld_h(ctx__3)) >= 0 New H52: true New H94: msg__index__subtype__1__last div 64 * 64 >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H65: msg__index__subtype__1__last <= 2147483647 New H66: true New H76: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New H7: msg__index__subtype__1__last <= 2147483582 New H53: true New H95: msg__index__subtype__1__last div 64 * 64 <= 2147483647 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) New H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H77: fld_hash_bit_len(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H78: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H67: fld_hash_bit_len(fld_h(ctx__3)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H68: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__3), [ i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H70: fld_byte_count(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H71: fld_byte_count(fld_h(ctx__3)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H73: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__3), [ i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H73: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H43: msg__index__subtype__1__last div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H44: msg__index__subtype__1__last div 64 <= 33554431 %%% Hypotheses H37 & H10 together imply that fld_byte_count(fld_h(ctx)) = 0. H37 & H10 have therefore been deleted and a new H96 added to this effect. --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H41 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H54 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H57 (true-hypothesis). --- Eliminated hypothesis H58 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H52 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H53 (true-hypothesis). --- Eliminated hypothesis H65 (duplicate of H47). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H78 (duplicate of H68). --- Eliminated hypothesis H75 (duplicate of H70). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H3 (redundant, given H96). --- Eliminated hypothesis H4 (redundant, given H96). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H96). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H40). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H44 (redundant, given H95). --- Eliminated hypothesis H46 (redundant, given H40). --- Eliminated hypothesis H47 (redundant, given H7). --- Eliminated hypothesis H64 (redundant, given H40). --- Eliminated hypothesis H76 (redundant, given H71). --- Eliminated hypothesis H77 (redundant, given H67). --- Eliminated hypothesis H94 (redundant, given H43). --- Eliminated hypothesis H95 (redundant, given H7 & H63). -S- Substituted hypothesis H69. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__3)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H72. This was achieved by replacing all occurrences of fld_byte_count(fld_h( ctx__3)) by: fld_byte_count(fld_h(ctx)). *** Proved C1: msg__index__subtype__1__last div 64 * 64 < msg__index__subtype__1__last + 1 via its standard form, which is: Std.Fm C1: msg__index__subtype__1__last - 64 * ( msg__index__subtype__1__last div 64) > - 1 using hypothesis H40. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_21. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H84). --- Hypothesis H140 has been replaced by "true". (It is already present, as H85). --- Hypothesis H146 has been replaced by "true". (It is already present, as H90). --- Hypothesis H147 has been replaced by "true". (It is already present, as H91). --- Hypothesis H148 has been replaced by "true". (It is already present, as H141). --- Hypothesis H149 has been replaced by "true". (It is already present, as H142). --- Hypothesis H150 has been replaced by "true". (It is already present, as H94). --- Hypothesis H151 has been replaced by "true". (It is already present, as H95). --- Hypothesis H152 has been replaced by "true". (It is already present, as H141). --- Hypothesis H153 has been replaced by "true". (It is already present, as H142). --- Hypothesis H154 has been replaced by "true". (It is already present, as H90). --- Hypothesis H155 has been replaced by "true". (It is already present, as H91). %%% Simplified H156 on reading formula in, to give: %%% H156: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first %%% Simplified H157 on reading formula in, to give: %%% H157: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last %%% Simplified H158 on reading formula in, to give: %%% H158: 0 >= skein_512_block_bytes_count__first %%% Simplified H159 on reading formula in, to give: %%% H159: 0 <= skein_512_block_bytes_count__last --- Hypothesis H160 has been replaced by "true". (It is already present, as H5). %%% Simplified H161 on reading formula in, to give: %%% H161: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H162 on reading formula in, to give: %%% H162: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last %%% Simplified H164 on reading formula in, to give: %%% H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last - 63 %%% Simplified H167 on reading formula in, to give: %%% H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx__2)) %%% Simplified H170 on reading formula in, to give: %%% H170: fld_byte_count(fld_h(ctx__3)) = 0 %%% Simplified H171 on reading formula in, to give: %%% H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H172 on reading formula in, to give: %%% H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H190 has been replaced by "true". (It is already present, as H141). --- Hypothesis H191 has been replaced by "true". (It is already present, as H142). --- Hypothesis H195 has been replaced by "true". (It is already present, as H84). --- Hypothesis H196 has been replaced by "true". (It is already present, as H85). --- Hypothesis H197 has been replaced by "true". (It is already present, as H192). --- Hypothesis H198 has been replaced by "true". (It is already present, as H193). -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H84: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) >= 0 New H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 New H94: skein_512_block_bytes_c >= 0 New H117: fld_byte_count(fld_h(ctx__2)) >= 0 New H173: fld_byte_count(fld_h(ctx__3)) >= 0 New H192: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 New C1: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H85: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= 2147483647 New H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New H95: skein_512_block_bytes_c <= 2147483647 New H107: skein_512_block_bytes_index__last <= 2147483647 New H108: true New H118: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H163: msg__index__subtype__1__last <= 2147483647 New H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483584 New H174: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New H193: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 New C2: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 *** Proved C1: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 using hypothesis H194. *** Proved C2: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 using hypotheses H85 & H192. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_22. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). %%% Simplified H43 on reading formula in, to give: %%% H43: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified H44 on reading formula in, to give: %%% H44: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified H46 on reading formula in, to give: %%% H46: msg__index__subtype__1__last >= integer__base__first %%% Simplified H47 on reading formula in, to give: %%% H47: msg__index__subtype__1__last <= integer__base__last --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H50 has been replaced by "true". (It is already present, as H43). --- Hypothesis H51 has been replaced by "true". (It is already present, as H44). --- Hypothesis H54 has been replaced by "true". (It is already present, as H43). --- Hypothesis H55 has been replaced by "true". (It is already present, as H44). --- Hypothesis H56 has been replaced by "true". (It is already present, as H30). --- Hypothesis H57 has been replaced by "true". (It is already present, as H31). --- Hypothesis H58 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H59 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H60 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H61 has been replaced by "true". (It is already present, as H4) . --- Hypothesis H62 has been replaced by "true". (It is already present, as H5) . %%% Simplified H63 on reading formula in, to give: %%% H63: (msg__index__subtype__1__last div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H64 on reading formula in, to give: %%% H64: 63 <= msg__index__subtype__1__last %%% Simplified H66 on reading formula in, to give: %%% H66: 63 <= natural__last %%% Simplified H73 on reading formula in, to give: %%% H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H74 on reading formula in, to give: %%% H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H43). --- Hypothesis H93 has been replaced by "true". (It is already present, as H44). %%% Simplified H94 on reading formula in, to give: %%% H94: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H95 on reading formula in, to give: %%% H95: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last %%% Simplified H96 on reading formula in, to give: %%% H96: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 --- Hypothesis H97 has been replaced by "true". (It is already present, as H28). --- Hypothesis H98 has been replaced by "true". (It is already present, as H29). --- Hypothesis H99 has been replaced by "true". (It is already present, as H94). --- Hypothesis H100 has been replaced by "true". (It is already present, as H95). %%% Simplified C1 on reading formula in, to give: %%% C1: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified C2 on reading formula in, to give: %%% C2: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H52: skein_512_block_bytes_c >= 0 New H75: fld_byte_count(fld_h(ctx__3)) >= 0 New H94: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 New C1: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H53: skein_512_block_bytes_c <= 2147483647 New H65: msg__index__subtype__1__last <= 2147483647 New H66: true New H76: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New H95: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 New C2: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 *** Proved C1: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 using hypothesis H96. *** Proved C2: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 using hypotheses H29 & H94. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_23. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H84). --- Hypothesis H140 has been replaced by "true". (It is already present, as H85). --- Hypothesis H146 has been replaced by "true". (It is already present, as H90). --- Hypothesis H147 has been replaced by "true". (It is already present, as H91). --- Hypothesis H148 has been replaced by "true". (It is already present, as H141). --- Hypothesis H149 has been replaced by "true". (It is already present, as H142). --- Hypothesis H150 has been replaced by "true". (It is already present, as H94). --- Hypothesis H151 has been replaced by "true". (It is already present, as H95). --- Hypothesis H152 has been replaced by "true". (It is already present, as H141). --- Hypothesis H153 has been replaced by "true". (It is already present, as H142). --- Hypothesis H154 has been replaced by "true". (It is already present, as H90). --- Hypothesis H155 has been replaced by "true". (It is already present, as H91). %%% Simplified H156 on reading formula in, to give: %%% H156: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first %%% Simplified H157 on reading formula in, to give: %%% H157: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last %%% Simplified H158 on reading formula in, to give: %%% H158: 0 >= skein_512_block_bytes_count__first %%% Simplified H159 on reading formula in, to give: %%% H159: 0 <= skein_512_block_bytes_count__last --- Hypothesis H160 has been replaced by "true". (It is already present, as H5). %%% Simplified H161 on reading formula in, to give: %%% H161: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H162 on reading formula in, to give: %%% H162: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last %%% Simplified H164 on reading formula in, to give: %%% H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last - 63 %%% Simplified H167 on reading formula in, to give: %%% H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx__2)) %%% Simplified H170 on reading formula in, to give: %%% H170: fld_byte_count(fld_h(ctx__3)) = 0 %%% Simplified H171 on reading formula in, to give: %%% H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H172 on reading formula in, to give: %%% H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H190 has been replaced by "true". (It is already present, as H141). --- Hypothesis H191 has been replaced by "true". (It is already present, as H142). --- Hypothesis H195 has been replaced by "true". (It is already present, as H84). --- Hypothesis H196 has been replaced by "true". (It is already present, as H85). --- Hypothesis H197 has been replaced by "true". (It is already present, as H192). --- Hypothesis H198 has been replaced by "true". (It is already present, as H193). --- Hypothesis H201 has been replaced by "true". (It is already present, as H90). --- Hypothesis H202 has been replaced by "true". (It is already present, as H91). --- Hypothesis H203 has been replaced by "true". (It is already present, as H192). --- Hypothesis H204 has been replaced by "true". (It is already present, as H193). %%% Simplified C1 on reading formula in, to give: %%% C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified C2 on reading formula in, to give: %%% C2: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H46: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H84: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) >= 0 New H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= 0 New H94: skein_512_block_bytes_c >= 0 New H117: fld_byte_count(fld_h(ctx__2)) >= 0 New H173: fld_byte_count(fld_h(ctx__3)) >= 0 New H192: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 New H199: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 New C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H47: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H85: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= 2147483647 New H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483647 New H95: skein_512_block_bytes_c <= 2147483647 New H107: skein_512_block_bytes_index__last <= 2147483647 New H108: true New H118: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H163: msg__index__subtype__1__last <= 2147483647 New H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 2147483584 New H174: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New H193: (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 New H200: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 New C2: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= 2147483647 *** Proved C1: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= 0 using hypotheses H46 & H192. -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last <= 2147483582 New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 64 - fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_index__last New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= skein_512_block_bytes_index__last New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H90: fld_byte_count(fld_h(ctx)) <= 64 New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H92: fld_byte_count(fld_h(ctx__1)) = 64 New H94: true New H95: true New H138: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) > 64 New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= positive_block_512_count_t__first New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= positive_block_512_count_t__last New H143: true New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last New H161: 64 - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last New H162: 64 - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last New H164: - 2147483520 <= fld_byte_count(fld_h(ctx)) New H192: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 >= 0 New H193: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 <= 2147483647 New H194: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 < msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) New H199: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count( fld_h(ctx))) - 1) div 64 * 64 >= 0 New H200: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count( fld_h(ctx))) - 1) div 64 * 64 <= 2147483647 New C2: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 <= 2147483647 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= - 2147483647 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483648 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 New H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 New H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 New H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 New H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 New H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 New H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 New H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 New H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 New H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 New H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 New H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 New H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 New H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) New H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 New H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) New H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 New H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 New H119: fld_hash_bit_len(fld_h(ctx__2)) >= 0 New H175: fld_hash_bit_len(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H120: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H176: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H100: fld_hash_bit_len(fld_h(ctx__1)) >= 1 New H109: fld_hash_bit_len(fld_h(ctx__2)) >= 1 New H156: fld_hash_bit_len(fld_h(ctx__2)) >= 1 New H165: fld_hash_bit_len(fld_h(ctx__3)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H101: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H110: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H157: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H166: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__2), [ i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__3), [ i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 New H112: fld_byte_count(fld_h(ctx__2)) >= 0 New H158: true New H168: fld_byte_count(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 New H113: fld_byte_count(fld_h(ctx__2)) <= 64 New H159: true New H169: fld_byte_count(fld_h(ctx__3)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H104: true New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__1), [ i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__2), [ i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx__3), [ i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H105: true New H106: true New H107: true New H39: 1 <= fld_byte_count(fld_h(ctx)) New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H96: true New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H97: true New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= 33554431 --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H148 (true-hypothesis). --- Eliminated hypothesis H149 (true-hypothesis). --- Eliminated hypothesis H150 (true-hypothesis). --- Eliminated hypothesis H151 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H160 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H181 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H195 (true-hypothesis). --- Eliminated hypothesis H196 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H204 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H158 (true-hypothesis). --- Eliminated hypothesis H159 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H39 (duplicate of H37). --- Eliminated hypothesis H100 (duplicate of H56). --- Eliminated hypothesis H90 (duplicate of H46). --- Eliminated hypothesis H91 (duplicate of H47). --- Eliminated hypothesis H101 (duplicate of H66). --- Eliminated hypothesis H157 (duplicate of H120). --- Eliminated hypothesis H156 (duplicate of H109). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H120 (duplicate of H110). --- Eliminated hypothesis H176 (duplicate of H166). --- Eliminated hypothesis H10 (duplicate of H3). --- Eliminated hypothesis H63 (duplicate of H59). --- Eliminated hypothesis H117 (duplicate of H112). --- Eliminated hypothesis H173 (duplicate of H168). --- Eliminated hypothesis H46 (duplicate of H4). --- Eliminated hypothesis H38 (duplicate of H4). --- Eliminated hypothesis H50 (duplicate of H1). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H40 (duplicate of H36). --- Eliminated hypothesis H54 (duplicate of H41). --- Eliminated hypothesis H84 (duplicate of H41). --- Eliminated hypothesis H92 (duplicate of H58). --- Eliminated hypothesis H199 (duplicate of H161). --- Eliminated hypothesis H3 (redundant, given H37). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H4). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H53). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H36 (redundant, given H138). --- Eliminated hypothesis H41 (redundant, given H138). --- Eliminated hypothesis H47 (redundant, given H37). --- Eliminated hypothesis H52 (redundant, given H5). --- Eliminated hypothesis H53 (redundant, given H4 & H138). --- Eliminated hypothesis H59 (redundant, given H58). --- Eliminated hypothesis H60 (redundant, given H58). --- Eliminated hypothesis H64 (redundant, given H58). --- Eliminated hypothesis H65 (redundant, given H56). --- Eliminated hypothesis H85 (redundant, given H35). --- Eliminated hypothesis H118 (redundant, given H113). --- Eliminated hypothesis H119 (redundant, given H109). --- Eliminated hypothesis H144 (redundant, given H138). --- Eliminated hypothesis H145 (redundant, given H35). --- Eliminated hypothesis H161 (redundant, given H194). --- Eliminated hypothesis H162 (redundant, given H138). --- Eliminated hypothesis H163 (redundant, given H7). --- Eliminated hypothesis H164 (redundant, given H37). --- Eliminated hypothesis H168 (redundant, given H170). --- Eliminated hypothesis H169 (redundant, given H170). --- Eliminated hypothesis H174 (redundant, given H170). --- Eliminated hypothesis H175 (redundant, given H165). -S- Substituted hypothesis H57. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H114. This was achieved by replacing all occurrences of fld_byte_count(fld_h( ctx__2)) by: fld_byte_count(fld_h(ctx__1)). -S- Substituted hypothesis H167. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__3)) by: fld_hash_bit_len(fld_h(ctx__2)). -S- Substituted hypothesis H111. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). *** Proved C2: 64 - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 <= 2147483647 using hypotheses H7, H39 & H46. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_24. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). %%% Simplified H43 on reading formula in, to give: %%% H43: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified H44 on reading formula in, to give: %%% H44: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified H46 on reading formula in, to give: %%% H46: msg__index__subtype__1__last >= integer__base__first %%% Simplified H47 on reading formula in, to give: %%% H47: msg__index__subtype__1__last <= integer__base__last --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H50 has been replaced by "true". (It is already present, as H43). --- Hypothesis H51 has been replaced by "true". (It is already present, as H44). --- Hypothesis H54 has been replaced by "true". (It is already present, as H43). --- Hypothesis H55 has been replaced by "true". (It is already present, as H44). --- Hypothesis H56 has been replaced by "true". (It is already present, as H30). --- Hypothesis H57 has been replaced by "true". (It is already present, as H31). --- Hypothesis H58 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H59 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H60 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H61 has been replaced by "true". (It is already present, as H4) . --- Hypothesis H62 has been replaced by "true". (It is already present, as H5) . %%% Simplified H63 on reading formula in, to give: %%% H63: (msg__index__subtype__1__last div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H64 on reading formula in, to give: %%% H64: 63 <= msg__index__subtype__1__last %%% Simplified H66 on reading formula in, to give: %%% H66: 63 <= natural__last %%% Simplified H73 on reading formula in, to give: %%% H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H74 on reading formula in, to give: %%% H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H43). --- Hypothesis H93 has been replaced by "true". (It is already present, as H44). %%% Simplified H94 on reading formula in, to give: %%% H94: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H95 on reading formula in, to give: %%% H95: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last %%% Simplified H96 on reading formula in, to give: %%% H96: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 --- Hypothesis H97 has been replaced by "true". (It is already present, as H28). --- Hypothesis H98 has been replaced by "true". (It is already present, as H29). --- Hypothesis H99 has been replaced by "true". (It is already present, as H94). --- Hypothesis H100 has been replaced by "true". (It is already present, as H95). %%% Simplified H101 on reading formula in, to give: %%% H101: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H102 on reading formula in, to give: %%% H102: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last --- Hypothesis H103 has been replaced by "true". (It is already present, as H30). --- Hypothesis H104 has been replaced by "true". (It is already present, as H31). --- Hypothesis H105 has been replaced by "true". (It is already present, as H94). --- Hypothesis H106 has been replaced by "true". (It is already present, as H95). %%% Simplified C1 on reading formula in, to give: %%% C1: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified C2 on reading formula in, to give: %%% C2: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last *** Proved C1: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first using hypothesis H94. *** Proved C2: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last using hypothesis H95. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_25. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H84). --- Hypothesis H140 has been replaced by "true". (It is already present, as H85). --- Hypothesis H146 has been replaced by "true". (It is already present, as H90). --- Hypothesis H147 has been replaced by "true". (It is already present, as H91). --- Hypothesis H148 has been replaced by "true". (It is already present, as H141). --- Hypothesis H149 has been replaced by "true". (It is already present, as H142). --- Hypothesis H150 has been replaced by "true". (It is already present, as H94). --- Hypothesis H151 has been replaced by "true". (It is already present, as H95). --- Hypothesis H152 has been replaced by "true". (It is already present, as H141). --- Hypothesis H153 has been replaced by "true". (It is already present, as H142). --- Hypothesis H154 has been replaced by "true". (It is already present, as H90). --- Hypothesis H155 has been replaced by "true". (It is already present, as H91). %%% Simplified H156 on reading formula in, to give: %%% H156: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first %%% Simplified H157 on reading formula in, to give: %%% H157: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last %%% Simplified H158 on reading formula in, to give: %%% H158: 0 >= skein_512_block_bytes_count__first %%% Simplified H159 on reading formula in, to give: %%% H159: 0 <= skein_512_block_bytes_count__last --- Hypothesis H160 has been replaced by "true". (It is already present, as H5). %%% Simplified H161 on reading formula in, to give: %%% H161: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H162 on reading formula in, to give: %%% H162: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last %%% Simplified H164 on reading formula in, to give: %%% H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last - 63 %%% Simplified H167 on reading formula in, to give: %%% H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx__2)) %%% Simplified H170 on reading formula in, to give: %%% H170: fld_byte_count(fld_h(ctx__3)) = 0 %%% Simplified H171 on reading formula in, to give: %%% H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H172 on reading formula in, to give: %%% H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H190 has been replaced by "true". (It is already present, as H141). --- Hypothesis H191 has been replaced by "true". (It is already present, as H142). --- Hypothesis H195 has been replaced by "true". (It is already present, as H84). --- Hypothesis H196 has been replaced by "true". (It is already present, as H85). --- Hypothesis H197 has been replaced by "true". (It is already present, as H192). --- Hypothesis H198 has been replaced by "true". (It is already present, as H193). --- Hypothesis H201 has been replaced by "true". (It is already present, as H90). --- Hypothesis H202 has been replaced by "true". (It is already present, as H91). --- Hypothesis H203 has been replaced by "true". (It is already present, as H192). --- Hypothesis H204 has been replaced by "true". (It is already present, as H193). %%% Simplified H205 on reading formula in, to give: %%% H205: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H206 on reading formula in, to give: %%% H206: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last --- Hypothesis H207 has been replaced by "true". (It is already present, as H205). --- Hypothesis H208 has been replaced by "true". (It is already present, as H206). --- Hypothesis H209 has been replaced by "true". (It is already present, as H199). --- Hypothesis H210 has been replaced by "true". (It is already present, as H200). %%% Simplified C3 on reading formula in, to give: %%% C3: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified C4 on reading formula in, to give: %%% C4: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last %%% Simplified C7 on reading formula in, to give: %%% C7: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= msg__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= msg__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= msg__index__subtype__1__last *** Proved C1: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first using hypothesis H199. *** Proved C2: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last using hypothesis H200. *** Proved C3: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first using hypothesis H205. *** Proved C4: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last using hypothesis H206. *** Proved C5: fld_hash_bit_len(fld_h(ctx__3)) > 0 using hypotheses H56, H111 & H167. *** Proved C6: msg__index__subtype__1__first = 0 using hypothesis H5. -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 63 New H55: fld_byte_count(fld_h(ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H105: true New H106: true New H107: 63 <= natural__last New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) New C10: fld_byte_count(fld_h(ctx__3)) + (msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= 63 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 1 <= fld_byte_count(fld_h(ctx)) New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H47: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= natural__first New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= natural__last New H90: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H91: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H92: fld_byte_count(fld_h(ctx__1)) = 64 New H94: 64 >= natural__first New H95: 64 <= natural__last New H138: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) > 64 New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= positive_block_512_count_t__first New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= positive_block_512_count_t__last New H143: true New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 >= integer__base__first New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1 <= integer__base__last New H161: 64 - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last New H162: 64 - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last New H164: 64 - fld_byte_count(fld_h(ctx)) <= natural__last - 63 New H192: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 >= natural__first New H193: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 <= natural__last New H194: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 < msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) New H199: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count( fld_h(ctx))) - 1) div 64 * 64 >= natural__first New H200: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count( fld_h(ctx))) - 1) div 64 * 64 <= natural__last New H205: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 >= natural__first New H206: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 <= natural__last New C7: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 >= msg__index__subtype__1__first New C8: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 <= msg__index__subtype__1__last New C9: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 + ( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx)) ) - 1) div 64 * 64 - 1) <= msg__index__subtype__1__last New C10: fld_byte_count(fld_h(ctx__3)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 - 1) <= 63 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H144: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= - 2147483647 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H145: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483648 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H117: fld_byte_count(fld_h(ctx__2)) >= 0 New H173: fld_byte_count(fld_h(ctx__3)) >= 0 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H90: fld_byte_count(fld_h(ctx)) <= 64 New H94: true New H192: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 >= 0 New H199: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count( fld_h(ctx))) - 1) div 64 * 64 >= 0 New H205: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H107: true New H108: true New H118: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H163: msg__index__subtype__1__last <= 2147483647 New H174: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New H7: msg__index__subtype__1__last <= 2147483582 New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H95: true New H164: - 2147483520 <= fld_byte_count(fld_h(ctx)) New H193: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 * 64 <= 2147483647 New H200: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count( fld_h(ctx))) - 1) div 64 * 64 <= 2147483647 New H206: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 <= 2147483647 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 New H179: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 New H180: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 New H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 New H182: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 New H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 New H183: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 New H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 New H184: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 New H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 New H185: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 New H186: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 New H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 New H187: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) New H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 New H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) New H188: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 New H189: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 New H119: fld_hash_bit_len(fld_h(ctx__2)) >= 0 New H175: fld_hash_bit_len(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H120: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H176: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H100: fld_hash_bit_len(fld_h(ctx__1)) >= 1 New H109: fld_hash_bit_len(fld_h(ctx__2)) >= 1 New H156: fld_hash_bit_len(fld_h(ctx__2)) >= 1 New H165: fld_hash_bit_len(fld_h(ctx__3)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H101: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H110: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H157: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 New H166: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__2), [ i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__3), [ i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) New H172: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 New H112: fld_byte_count(fld_h(ctx__2)) >= 0 New H158: true New H168: fld_byte_count(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 New H113: fld_byte_count(fld_h(ctx__2)) <= 64 New H159: true New H169: fld_byte_count(fld_h(ctx__3)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H104: true New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) New H171: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H96: true New H141: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H97: true New H142: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) - 1) div 64 <= 33554431 *** Proved C7: 64 - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 >= msg__index__subtype__1__first using hypotheses H46, H52 & H192. *** Proved C8: 64 - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 <= msg__index__subtype__1__last via its standard form, which is: Std.Fm C8: msg__index__subtype__1__last + 64 * ((64 - ( msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)))) div 64) + fld_byte_count(fld_h(ctx)) > 63 using hypothesis H194. --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H43 (true-hypothesis). --- Eliminated hypothesis H44 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H67 (true-hypothesis). --- Eliminated hypothesis H68 (true-hypothesis). --- Eliminated hypothesis H71 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H81 (true-hypothesis). --- Eliminated hypothesis H82 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H86 (true-hypothesis). --- Eliminated hypothesis H87 (true-hypothesis). --- Eliminated hypothesis H88 (true-hypothesis). --- Eliminated hypothesis H89 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H102 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H121 (true-hypothesis). --- Eliminated hypothesis H122 (true-hypothesis). --- Eliminated hypothesis H125 (true-hypothesis). --- Eliminated hypothesis H134 (true-hypothesis). --- Eliminated hypothesis H135 (true-hypothesis). --- Eliminated hypothesis H136 (true-hypothesis). --- Eliminated hypothesis H137 (true-hypothesis). --- Eliminated hypothesis H139 (true-hypothesis). --- Eliminated hypothesis H140 (true-hypothesis). --- Eliminated hypothesis H146 (true-hypothesis). --- Eliminated hypothesis H147 (true-hypothesis). --- Eliminated hypothesis H148 (true-hypothesis). --- Eliminated hypothesis H149 (true-hypothesis). --- Eliminated hypothesis H150 (true-hypothesis). --- Eliminated hypothesis H151 (true-hypothesis). --- Eliminated hypothesis H152 (true-hypothesis). --- Eliminated hypothesis H153 (true-hypothesis). --- Eliminated hypothesis H154 (true-hypothesis). --- Eliminated hypothesis H155 (true-hypothesis). --- Eliminated hypothesis H160 (true-hypothesis). --- Eliminated hypothesis H177 (true-hypothesis). --- Eliminated hypothesis H178 (true-hypothesis). --- Eliminated hypothesis H181 (true-hypothesis). --- Eliminated hypothesis H190 (true-hypothesis). --- Eliminated hypothesis H191 (true-hypothesis). --- Eliminated hypothesis H195 (true-hypothesis). --- Eliminated hypothesis H196 (true-hypothesis). --- Eliminated hypothesis H197 (true-hypothesis). --- Eliminated hypothesis H198 (true-hypothesis). --- Eliminated hypothesis H201 (true-hypothesis). --- Eliminated hypothesis H202 (true-hypothesis). --- Eliminated hypothesis H203 (true-hypothesis). --- Eliminated hypothesis H204 (true-hypothesis). --- Eliminated hypothesis H207 (true-hypothesis). --- Eliminated hypothesis H208 (true-hypothesis). --- Eliminated hypothesis H209 (true-hypothesis). --- Eliminated hypothesis H210 (true-hypothesis). --- Eliminated hypothesis H143 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H94 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H107 (true-hypothesis). --- Eliminated hypothesis H108 (true-hypothesis). --- Eliminated hypothesis H95 (true-hypothesis). --- Eliminated hypothesis H158 (true-hypothesis). --- Eliminated hypothesis H159 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H96 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H39 (duplicate of H37). --- Eliminated hypothesis H100 (duplicate of H56). --- Eliminated hypothesis H90 (duplicate of H46). --- Eliminated hypothesis H91 (duplicate of H47). --- Eliminated hypothesis H101 (duplicate of H66). --- Eliminated hypothesis H157 (duplicate of H120). --- Eliminated hypothesis H156 (duplicate of H109). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H120 (duplicate of H110). --- Eliminated hypothesis H176 (duplicate of H166). --- Eliminated hypothesis H10 (duplicate of H3). --- Eliminated hypothesis H63 (duplicate of H59). --- Eliminated hypothesis H117 (duplicate of H112). --- Eliminated hypothesis H173 (duplicate of H168). --- Eliminated hypothesis H46 (duplicate of H4). --- Eliminated hypothesis H38 (duplicate of H4). --- Eliminated hypothesis H50 (duplicate of H1). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H40 (duplicate of H36). --- Eliminated hypothesis H54 (duplicate of H41). --- Eliminated hypothesis H84 (duplicate of H41). --- Eliminated hypothesis H92 (duplicate of H58). --- Eliminated hypothesis H199 (duplicate of H161). --- Eliminated hypothesis H3 (redundant, given H37). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H4). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H53). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H36 (redundant, given H138). --- Eliminated hypothesis H41 (redundant, given H138). --- Eliminated hypothesis H47 (redundant, given H37). --- Eliminated hypothesis H52 (redundant, given H5). --- Eliminated hypothesis H53 (redundant, given H4 & H138). --- Eliminated hypothesis H59 (redundant, given H58). --- Eliminated hypothesis H60 (redundant, given H58). --- Eliminated hypothesis H64 (redundant, given H58). --- Eliminated hypothesis H65 (redundant, given H56). --- Eliminated hypothesis H85 (redundant, given H35). --- Eliminated hypothesis H118 (redundant, given H113). --- Eliminated hypothesis H119 (redundant, given H109). --- Eliminated hypothesis H142 (redundant, given H4 & H206). --- Eliminated hypothesis H144 (redundant, given H138). --- Eliminated hypothesis H145 (redundant, given H35). --- Eliminated hypothesis H161 (redundant, given H194). --- Eliminated hypothesis H162 (redundant, given H138). --- Eliminated hypothesis H163 (redundant, given H7). --- Eliminated hypothesis H164 (redundant, given H37). --- Eliminated hypothesis H168 (redundant, given H170). --- Eliminated hypothesis H169 (redundant, given H170). --- Eliminated hypothesis H174 (redundant, given H170). --- Eliminated hypothesis H175 (redundant, given H165). --- Eliminated hypothesis H193 (redundant, given H4 & H206). -S- Substituted hypothesis H57. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__1)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H114. This was achieved by replacing all occurrences of fld_byte_count(fld_h( ctx__2)) by: fld_byte_count(fld_h(ctx__1)). -S- Substituted hypothesis H167. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__3)) by: fld_hash_bit_len(fld_h(ctx__2)). -S- Substituted hypothesis H111. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__2)) by: fld_hash_bit_len(fld_h(ctx)). +++ New H211: integer__size >= 0 +++ New H212: natural__size >= 0 +++ New H213: spark__unsigned__u6__size >= 0 +++ New H214: spark__unsigned__u7__size >= 0 +++ New H215: spark__unsigned__byte__size >= 0 +++ New H216: spark__unsigned__u16__size >= 0 +++ New H217: spark__unsigned__u32__size >= 0 +++ New H218: spark__unsigned__u64__size >= 0 +++ New H219: spark__crypto__word_count_t__size >= 0 +++ New H220: hash_bit_length__size >= 0 +++ New H221: initialized_hash_bit_length__size >= 0 +++ New H222: skein_512_state_words_index__size >= 0 +++ New H223: skein_512_block_bytes_count__size >= 0 +++ New H224: skein_512_block_bytes_index__size >= 0 +++ New H225: positive_block_512_count_t__size >= 0 +++ New H226: skein_512_context__size >= 0 +++ New H227: msg__index__subtype__1__first <= msg__index__subtype__1__last +++ New H228: context_header__size >= 0 +++ New H229: msg__index__subtype__1__first >= 0 +++ New H230: msg__index__subtype__1__last >= 0 --- Attempted addition of new hypothesis: msg__index__subtype__1__last <= 2147483647 eliminated: this already exists (as H163). +++ New H163: msg__index__subtype__1__last <= 2147483647 +++ New H231: msg__index__subtype__1__first <= 2147483647 @@@@@@@@@@ VC: procedure_skein_512_update_26. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). %%% Simplified H43 on reading formula in, to give: %%% H43: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified H44 on reading formula in, to give: %%% H44: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified H46 on reading formula in, to give: %%% H46: msg__index__subtype__1__last >= integer__base__first %%% Simplified H47 on reading formula in, to give: %%% H47: msg__index__subtype__1__last <= integer__base__last --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H50 has been replaced by "true". (It is already present, as H43). --- Hypothesis H51 has been replaced by "true". (It is already present, as H44). --- Hypothesis H54 has been replaced by "true". (It is already present, as H43). --- Hypothesis H55 has been replaced by "true". (It is already present, as H44). --- Hypothesis H56 has been replaced by "true". (It is already present, as H30). --- Hypothesis H57 has been replaced by "true". (It is already present, as H31). --- Hypothesis H58 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H59 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H60 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H61 has been replaced by "true". (It is already present, as H4) . --- Hypothesis H62 has been replaced by "true". (It is already present, as H5) . %%% Simplified H63 on reading formula in, to give: %%% H63: (msg__index__subtype__1__last div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H64 on reading formula in, to give: %%% H64: 63 <= msg__index__subtype__1__last %%% Simplified H66 on reading formula in, to give: %%% H66: 63 <= natural__last %%% Simplified H73 on reading formula in, to give: %%% H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H74 on reading formula in, to give: %%% H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H43). --- Hypothesis H93 has been replaced by "true". (It is already present, as H44). %%% Simplified H94 on reading formula in, to give: %%% H94: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H95 on reading formula in, to give: %%% H95: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last %%% Simplified H96 on reading formula in, to give: %%% H96: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 --- Hypothesis H97 has been replaced by "true". (It is already present, as H28). --- Hypothesis H98 has been replaced by "true". (It is already present, as H29). --- Hypothesis H99 has been replaced by "true". (It is already present, as H94). --- Hypothesis H100 has been replaced by "true". (It is already present, as H95). %%% Simplified H101 on reading formula in, to give: %%% H101: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H102 on reading formula in, to give: %%% H102: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last --- Hypothesis H103 has been replaced by "true". (It is already present, as H30). --- Hypothesis H104 has been replaced by "true". (It is already present, as H31). --- Hypothesis H105 has been replaced by "true". (It is already present, as H94). --- Hypothesis H106 has been replaced by "true". (It is already present, as H95). %%% Simplified H107 on reading formula in, to give: %%% H107: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H108 on reading formula in, to give: %%% H108: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last --- Hypothesis H109 has been replaced by "true". (It is already present, as H107). --- Hypothesis H110 has been replaced by "true". (It is already present, as H108). --- Hypothesis H111 has been replaced by "true". (It is already present, as H101). --- Hypothesis H112 has been replaced by "true". (It is already present, as H102). %%% Simplified C1 on reading formula in, to give: %%% C1: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified C2 on reading formula in, to give: %%% C2: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last %%% Simplified C3 on reading formula in, to give: %%% C3: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified C4 on reading formula in, to give: %%% C4: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last %%% Simplified C7 on reading formula in, to give: %%% C7: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= msg__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= msg__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c + (msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= msg__index__subtype__1__last %%% Simplified C10 on reading formula in, to give: %%% C10: fld_byte_count(fld_h(ctx__3)) + (msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= skein_512_block_bytes_index__last *** Proved C1: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first using hypothesis H101. *** Proved C2: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last using hypothesis H102. *** Proved C3: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first using hypothesis H94. *** Proved C4: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last using hypothesis H95. *** Proved C6: msg__index__subtype__1__first = 0 using hypothesis H5. -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) New C10: fld_byte_count(fld_h(ctx__3)) + (msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= 63 >>> Restructured hypothesis H37 into: >>> H37: fld_byte_count(fld_h(ctx)) <= 0 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H40: msg__index__subtype__1__last > 63 New H43: msg__index__subtype__1__last div 64 >= positive_block_512_count_t__first New H44: msg__index__subtype__1__last div 64 <= positive_block_512_count_t__last New H45: true New H52: 64 >= natural__first New H53: 64 <= natural__last New H63: (msg__index__subtype__1__last div 64 - 1) * 64 + 63 <= msg__index__subtype__1__last New H94: msg__index__subtype__1__last div 64 * 64 >= natural__first New H95: msg__index__subtype__1__last div 64 * 64 <= natural__last New H96: msg__index__subtype__1__last div 64 * 64 < msg__index__subtype__1__last + 1 New H101: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div 64 * 64 >= natural__first New H102: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div 64 * 64 <= natural__last New H107: msg__index__subtype__1__last div 64 * 64 >= natural__first New H108: msg__index__subtype__1__last div 64 * 64 <= natural__last New C7: msg__index__subtype__1__last div 64 * 64 >= msg__index__subtype__1__first New C8: msg__index__subtype__1__last div 64 * 64 <= msg__index__subtype__1__last New C9: msg__index__subtype__1__last div 64 * 64 + ( msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div 64 * 64 - 1) <= msg__index__subtype__1__last New C10: fld_byte_count(fld_h(ctx__3)) + (msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div 64 * 64 - 1) <= 63 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 New H46: msg__index__subtype__1__last >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 New H47: msg__index__subtype__1__last <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H75: fld_byte_count(fld_h(ctx__3)) >= 0 New H52: true New H94: msg__index__subtype__1__last div 64 * 64 >= 0 New H101: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div 64 * 64 >= 0 New H107: msg__index__subtype__1__last div 64 * 64 >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H65: msg__index__subtype__1__last <= 2147483647 New H66: true New H76: fld_byte_count(fld_h(ctx__3)) <= 2147483647 New H7: msg__index__subtype__1__last <= 2147483582 New H53: true New H95: msg__index__subtype__1__last div 64 * 64 <= 2147483647 New H102: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div 64 * 64 <= 2147483647 New H108: msg__index__subtype__1__last div 64 * 64 <= 2147483647 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H81: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H82: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H84: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H85: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H86: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H87: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H88: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H89: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) New H90: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H91: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H77: fld_hash_bit_len(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H78: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H67: fld_hash_bit_len(fld_h(ctx__3)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H68: fld_hash_bit_len(fld_h(ctx__3)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__3), [ i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H74: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H70: fld_byte_count(fld_h(ctx__3)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H71: fld_byte_count(fld_h(ctx__3)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H73: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H43: msg__index__subtype__1__last div 64 >= 1 -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H44: msg__index__subtype__1__last div 64 <= 33554431 %%% Hypotheses H37 & H10 together imply that fld_byte_count(fld_h(ctx)) = 0. H37 & H10 have therefore been deleted and a new H113 added to this effect. *** Proved C5: fld_hash_bit_len(fld_h(ctx__3)) > 0 using hypothesis H67. *** Proved C7: msg__index__subtype__1__last div 64 * 64 >= msg__index__subtype__1__first using hypotheses H5 & H43. *** Proved C8: msg__index__subtype__1__last div 64 * 64 <= msg__index__subtype__1__last via its standard form, which is: Std.Fm C8: msg__index__subtype__1__last - 64 * ( msg__index__subtype__1__last div 64) > - 1 using hypothesis H96. *** Proved C9: msg__index__subtype__1__last div 64 * 64 + ( msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div 64 * 64 - 1) <= msg__index__subtype__1__last via its standard form, which is: Std.Fm C9: true --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H41 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H48 (true-hypothesis). --- Eliminated hypothesis H49 (true-hypothesis). --- Eliminated hypothesis H50 (true-hypothesis). --- Eliminated hypothesis H51 (true-hypothesis). --- Eliminated hypothesis H54 (true-hypothesis). --- Eliminated hypothesis H55 (true-hypothesis). --- Eliminated hypothesis H56 (true-hypothesis). --- Eliminated hypothesis H57 (true-hypothesis). --- Eliminated hypothesis H58 (true-hypothesis). --- Eliminated hypothesis H59 (true-hypothesis). --- Eliminated hypothesis H60 (true-hypothesis). --- Eliminated hypothesis H61 (true-hypothesis). --- Eliminated hypothesis H62 (true-hypothesis). --- Eliminated hypothesis H79 (true-hypothesis). --- Eliminated hypothesis H80 (true-hypothesis). --- Eliminated hypothesis H83 (true-hypothesis). --- Eliminated hypothesis H92 (true-hypothesis). --- Eliminated hypothesis H93 (true-hypothesis). --- Eliminated hypothesis H97 (true-hypothesis). --- Eliminated hypothesis H98 (true-hypothesis). --- Eliminated hypothesis H99 (true-hypothesis). --- Eliminated hypothesis H100 (true-hypothesis). --- Eliminated hypothesis H103 (true-hypothesis). --- Eliminated hypothesis H104 (true-hypothesis). --- Eliminated hypothesis H105 (true-hypothesis). --- Eliminated hypothesis H106 (true-hypothesis). --- Eliminated hypothesis H109 (true-hypothesis). --- Eliminated hypothesis H110 (true-hypothesis). --- Eliminated hypothesis H111 (true-hypothesis). --- Eliminated hypothesis H112 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H52 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H66 (true-hypothesis). --- Eliminated hypothesis H53 (true-hypothesis). --- Eliminated hypothesis H65 (duplicate of H47). --- Eliminated hypothesis H107 (duplicate of H94). --- Eliminated hypothesis H108 (duplicate of H95). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H78 (duplicate of H68). --- Eliminated hypothesis H75 (duplicate of H70). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H101 (duplicate of H63). --- Eliminated hypothesis H3 (redundant, given H113). --- Eliminated hypothesis H4 (redundant, given H113). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H11 (redundant, given H113). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H28 (redundant, given H40). --- Eliminated hypothesis H34 (redundant, given H36). --- Eliminated hypothesis H44 (redundant, given H95). --- Eliminated hypothesis H46 (redundant, given H40). --- Eliminated hypothesis H47 (redundant, given H7). --- Eliminated hypothesis H63 (redundant, given H96). --- Eliminated hypothesis H64 (redundant, given H40). --- Eliminated hypothesis H76 (redundant, given H71). --- Eliminated hypothesis H77 (redundant, given H67). --- Eliminated hypothesis H94 (redundant, given H43). --- Eliminated hypothesis H95 (redundant, given H7 & H96). -S- Substituted hypothesis H69. This was achieved by replacing all occurrences of fld_hash_bit_len(fld_h( ctx__3)) by: fld_hash_bit_len(fld_h(ctx)). -S- Substituted hypothesis H72. This was achieved by replacing all occurrences of fld_byte_count(fld_h( ctx__3)) by: fld_byte_count(fld_h(ctx)). *** Proved C10: fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div 64 * 64 - 1) <= 63 via its standard form, which is: Std.Fm C10: msg__index__subtype__1__last - 64 * ( msg__index__subtype__1__last div 64) + fld_byte_count(fld_h(ctx)) <= 63 using hypotheses H40 & H113. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_27. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H90). --- Hypothesis H140 has been replaced by "true". (It is already present, as H91). --- Hypothesis H141 has been replaced by "true". (It is already present, as H84). --- Hypothesis H142 has been replaced by "true". (It is already present, as H85). %%% Simplified C3 on reading formula in, to give: %%% C3: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified C4 on reading formula in, to give: %%% C4: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified C5 on reading formula in, to give: %%% C5: fld_hash_bit_len(fld_h(ctx__2)) > 0 %%% Simplified C7 on reading formula in, to give: %%% C7: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= msg__index__subtype__1__first %%% Simplified C8 on reading formula in, to give: %%% C8: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last %%% Simplified C9 on reading formula in, to give: %%% C9: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) <= msg__index__subtype__1__last %%% Simplified C10 on reading formula in, to give: %%% C10: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1 <= skein_512_block_bytes_index__last *** Proved C1: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) >= natural__first using hypothesis H84. *** Proved C2: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= natural__last using hypothesis H85. *** Proved C3: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first using hypothesis H46. *** Proved C4: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last using hypothesis H47. *** Proved C5: fld_hash_bit_len(fld_h(ctx__2)) > 0 using hypotheses H56 & H111. *** Proved C6: msg__index__subtype__1__first = 0 using hypothesis H5. *** Proved C7: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= msg__index__subtype__1__first using hypotheses H38, H52 & H104. -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H39: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= 63 New H55: fld_byte_count(fld_h(ctx)) + (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H105: true New H106: true New H107: 63 <= natural__last New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) New C10: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= 64 >>> Restructured hypothesis H138 into: >>> H138: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) <= skein_512_block_bytes_c -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H38: 64 - fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_index__first New H39: 1 <= fld_byte_count(fld_h(ctx)) New H40: 64 - fld_byte_count(fld_h(ctx)) < msg__index__subtype__1__last + 1 New H41: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last + 1 New H46: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H47: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H54: 64 - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last New H55: fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx)) - 1) <= 63 New H58: fld_byte_count(fld_h(ctx__1)) = fld_byte_count(fld_h(ctx)) + (64 - fld_byte_count(fld_h(ctx))) New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= natural__first New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= natural__last New H90: 64 - fld_byte_count(fld_h(ctx)) >= natural__first New H91: 64 - fld_byte_count(fld_h(ctx)) <= natural__last New H92: fld_byte_count(fld_h(ctx__1)) = 64 New H94: 64 >= natural__first New H95: 64 <= natural__last New H138: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 64 New C8: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last New C9: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) <= msg__index__subtype__1__last New C10: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 64 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true New H63: fld_byte_count(fld_h(ctx__1)) >= 0 New H117: fld_byte_count(fld_h(ctx__2)) >= 0 New H46: fld_byte_count(fld_h(ctx)) <= 64 New H84: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) >= 0 New H90: fld_byte_count(fld_h(ctx)) <= 64 New H94: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H64: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New H107: true New H108: true New H118: fld_byte_count(fld_h(ctx__2)) <= 2147483647 New H7: msg__index__subtype__1__last <= 2147483582 New H47: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H85: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 2147483647 New H91: - 2147483583 <= fld_byte_count(fld_h(ctx)) New H95: true -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 New H69: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H123: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 New H70: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H124: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 New H72: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 New H126: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 New H73: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 New H127: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 New H74: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 New H128: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 New H75: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 New H129: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 New H76: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H130: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 New H77: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 New H131: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) New H78: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) New H132: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H79: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H133: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 New H65: fld_hash_bit_len(fld_h(ctx__1)) >= 0 New H119: fld_hash_bit_len(fld_h(ctx__2)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H66: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H120: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 New H100: fld_hash_bit_len(fld_h(ctx__1)) >= 1 New H109: fld_hash_bit_len(fld_h(ctx__2)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New H101: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 New H110: fld_hash_bit_len(fld_h(ctx__2)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__1), [ i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx__2), [ i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) New H62: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) New H116: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 New H59: fld_byte_count(fld_h(ctx__1)) >= 0 New H112: fld_byte_count(fld_h(ctx__2)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 New H60: fld_byte_count(fld_h(ctx__1)) <= 64 New H113: fld_byte_count(fld_h(ctx__2)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H104: true New H38: fld_byte_count(fld_h(ctx)) <= 64 New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H61: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) New H93: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) New H115: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(107). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New H96: true -S- Applied substitution rule skein_512_up_rules(108). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New H97: true *** Proved C8: 64 - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last via its standard form, which is: Std.Fm C8: msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)) > 63 using hypothesis H36. *** Proved C9: 64 - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) <= msg__index__subtype__1__last via its standard form, which is: Std.Fm C9: true *** Proved C10: msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h( ctx))) <= 64 using hypothesis H138. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_28. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H30). --- Hypothesis H42 has been replaced by "true". (It is already present, as H31). --- Hypothesis H43 has been replaced by "true". (It is already present, as H28). --- Hypothesis H44 has been replaced by "true". (It is already present, as H29). %%% Simplified C9 on reading formula in, to give: %%% C9: true %%% Simplified C10 on reading formula in, to give: %%% C10: fld_byte_count(fld_h(ctx)) + msg__index__subtype__1__last <= skein_512_block_bytes_index__last *** Proved C1: msg__index__subtype__1__last + 1 >= natural__first using hypothesis H28. *** Proved C2: msg__index__subtype__1__last + 1 <= natural__last using hypothesis H29. *** Proved C3: 0 >= natural__first using hypothesis H30. *** Proved C4: 0 <= natural__last using hypothesis H31. *** Proved C6: msg__index__subtype__1__first = 0 using hypothesis H5. *** Proved C7: 0 >= msg__index__subtype__1__first using hypothesis H5. *** Proved C9: true -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New C10: fld_byte_count(fld_h(ctx)) + msg__index__subtype__1__last <= 63 >>> Restructured hypothesis H37 into: >>> H37: fld_byte_count(fld_h(ctx)) <= 0 >>> Restructured hypothesis H40 into: >>> H40: msg__index__subtype__1__last + 1 <= skein_512_block_bytes_c -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) > 64 New H40: msg__index__subtype__1__last <= 63 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H7: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) %%% Hypotheses H37 & H10 together imply that fld_byte_count(fld_h(ctx)) = 0. H37 & H10 have therefore been deleted and a new H45 added to this effect. *** Proved C5: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** Proved C10: fld_byte_count(fld_h(ctx)) + msg__index__subtype__1__last <= 63 using hypotheses H40 & H45. ### Established a contradiction [P-and-not-P] among the following hypotheses: H36, H40 & H45. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_29. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H37 has been replaced by "true". (It is already present, as H30). --- Hypothesis H38 has been replaced by "true". (It is already present, as H31). --- Hypothesis H39 has been replaced by "true". (It is already present, as H28). --- Hypothesis H40 has been replaced by "true". (It is already present, as H29). %%% Simplified C9 on reading formula in, to give: %%% C9: true %%% Simplified C10 on reading formula in, to give: %%% C10: fld_byte_count(fld_h(ctx)) + msg__index__subtype__1__last <= skein_512_block_bytes_index__last *** Proved C1: msg__index__subtype__1__last + 1 >= natural__first using hypothesis H28. *** Proved C2: msg__index__subtype__1__last + 1 <= natural__last using hypothesis H29. *** Proved C3: 0 >= natural__first using hypothesis H30. *** Proved C4: 0 <= natural__last using hypothesis H31. *** Proved C6: msg__index__subtype__1__first = 0 using hypothesis H5. *** Proved C7: 0 >= msg__index__subtype__1__first using hypothesis H5. *** Proved C9: true -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New C10: fld_byte_count(fld_h(ctx)) + msg__index__subtype__1__last <= 63 >>> Restructured hypothesis H36 into: >>> H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_c -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 65 <= natural__last New H36: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(5). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H34: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) >= - 2147483648 -S- Applied substitution rule skein_512_up_rules(6). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H35: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 New H28: msg__index__subtype__1__last >= - 1 New H30: true -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New H29: msg__index__subtype__1__last <= 2147483646 New H31: true New H7: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C5: fld_hash_bit_len(fld_h(ctx)) > 0 using hypothesis H1. *** Proved C10: fld_byte_count(fld_h(ctx)) + msg__index__subtype__1__last <= 63 via its standard form, which is: Std.Fm C10: - msg__index__subtype__1__last - fld_byte_count(fld_h(ctx)) > - 64 using hypothesis H36. --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H32 (true-hypothesis). --- Eliminated hypothesis H33 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H38 (true-hypothesis). --- Eliminated hypothesis H39 (true-hypothesis). --- Eliminated hypothesis H40 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H31 (true-hypothesis). --- Eliminated hypothesis H13 (duplicate of H2). --- Eliminated hypothesis H10 (duplicate of H3). --- Eliminated hypothesis H29 (duplicate of H6). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H7 (redundant, given H3 & H36). --- Eliminated hypothesis H11 (redundant, given H4). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H35 (redundant, given H36). *** Proved C8: 0 <= msg__index__subtype__1__last *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_30. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H84). --- Hypothesis H140 has been replaced by "true". (It is already present, as H85). --- Hypothesis H146 has been replaced by "true". (It is already present, as H90). --- Hypothesis H147 has been replaced by "true". (It is already present, as H91). --- Hypothesis H148 has been replaced by "true". (It is already present, as H141). --- Hypothesis H149 has been replaced by "true". (It is already present, as H142). --- Hypothesis H150 has been replaced by "true". (It is already present, as H94). --- Hypothesis H151 has been replaced by "true". (It is already present, as H95). --- Hypothesis H152 has been replaced by "true". (It is already present, as H141). --- Hypothesis H153 has been replaced by "true". (It is already present, as H142). --- Hypothesis H154 has been replaced by "true". (It is already present, as H90). --- Hypothesis H155 has been replaced by "true". (It is already present, as H91). %%% Simplified H156 on reading formula in, to give: %%% H156: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first %%% Simplified H157 on reading formula in, to give: %%% H157: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last %%% Simplified H158 on reading formula in, to give: %%% H158: 0 >= skein_512_block_bytes_count__first %%% Simplified H159 on reading formula in, to give: %%% H159: 0 <= skein_512_block_bytes_count__last --- Hypothesis H160 has been replaced by "true". (It is already present, as H5). %%% Simplified H161 on reading formula in, to give: %%% H161: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + (( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H162 on reading formula in, to give: %%% H162: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last %%% Simplified H164 on reading formula in, to give: %%% H164: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last - 63 %%% Simplified H167 on reading formula in, to give: %%% H167: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h( ctx__2)) %%% Simplified H170 on reading formula in, to give: %%% H170: fld_byte_count(fld_h(ctx__3)) = 0 %%% Simplified H171 on reading formula in, to give: %%% H171: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H172 on reading formula in, to give: %%% H172: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H190 has been replaced by "true". (It is already present, as H141). --- Hypothesis H191 has been replaced by "true". (It is already present, as H142). --- Hypothesis H195 has been replaced by "true". (It is already present, as H84). --- Hypothesis H196 has been replaced by "true". (It is already present, as H85). --- Hypothesis H197 has been replaced by "true". (It is already present, as H192). --- Hypothesis H198 has been replaced by "true". (It is already present, as H193). --- Hypothesis H201 has been replaced by "true". (It is already present, as H90). --- Hypothesis H202 has been replaced by "true". (It is already present, as H91). --- Hypothesis H203 has been replaced by "true". (It is already present, as H192). --- Hypothesis H204 has been replaced by "true". (It is already present, as H193). %%% Simplified H205 on reading formula in, to give: %%% H205: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H206 on reading formula in, to give: %%% H206: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last --- Hypothesis H207 has been replaced by "true". (It is already present, as H205). --- Hypothesis H208 has been replaced by "true". (It is already present, as H206). --- Hypothesis H209 has been replaced by "true". (It is already present, as H199). --- Hypothesis H210 has been replaced by "true". (It is already present, as H200). --- Hypothesis H211 has been replaced by "true". (It is already present, as H199). --- Hypothesis H212 has been replaced by "true". (It is already present, as H200). --- Hypothesis H213 has been replaced by "true". (It is already present, as H205). --- Hypothesis H214 has been replaced by "true". (It is already present, as H206). --- Hypothesis H216 has been replaced by "true". (It is already present, as H5). %%% Simplified H217 on reading formula in, to give: %%% H217: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c >= msg__index__subtype__1__first %%% Simplified H218 on reading formula in, to give: %%% H218: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c <= msg__index__subtype__1__last %%% Simplified H219 on reading formula in, to give: %%% H219: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c + (msg__index__subtype__1__last + 1 - ( skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= msg__index__subtype__1__last %%% Simplified H226 on reading formula in, to give: %%% H226: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__4), [i___2]) and element(fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H227 on reading formula in, to give: %%% H227: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__4), [i___1]) and element(fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first using hypotheses H165 & H222. *** Proved C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last using hypotheses H166 & H222. *** Proved C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h(ctx)) using hypotheses H57, H111, H167 & H222. *** Proved C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first using hypothesis H224. *** Proved C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last using hypothesis H225. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_31. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). %%% Simplified H43 on reading formula in, to give: %%% H43: msg__index__subtype__1__last div skein_512_block_bytes_c >= positive_block_512_count_t__first %%% Simplified H44 on reading formula in, to give: %%% H44: msg__index__subtype__1__last div skein_512_block_bytes_c <= positive_block_512_count_t__last %%% Simplified H46 on reading formula in, to give: %%% H46: msg__index__subtype__1__last >= integer__base__first %%% Simplified H47 on reading formula in, to give: %%% H47: msg__index__subtype__1__last <= integer__base__last --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H50 has been replaced by "true". (It is already present, as H43). --- Hypothesis H51 has been replaced by "true". (It is already present, as H44). --- Hypothesis H54 has been replaced by "true". (It is already present, as H43). --- Hypothesis H55 has been replaced by "true". (It is already present, as H44). --- Hypothesis H56 has been replaced by "true". (It is already present, as H30). --- Hypothesis H57 has been replaced by "true". (It is already present, as H31). --- Hypothesis H58 has been replaced by "true". (It is already present, as H1) . --- Hypothesis H59 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H60 has been replaced by "true". (It is already present, as H3) . --- Hypothesis H61 has been replaced by "true". (It is already present, as H4) . --- Hypothesis H62 has been replaced by "true". (It is already present, as H5) . %%% Simplified H63 on reading formula in, to give: %%% H63: (msg__index__subtype__1__last div skein_512_block_bytes_c - 1) * skein_512_block_bytes_c + 63 <= msg__index__subtype__1__last %%% Simplified H64 on reading formula in, to give: %%% H64: 63 <= msg__index__subtype__1__last %%% Simplified H66 on reading formula in, to give: %%% H66: 63 <= natural__last %%% Simplified H73 on reading formula in, to give: %%% H73: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H74 on reading formula in, to give: %%% H74: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H92 has been replaced by "true". (It is already present, as H43). --- Hypothesis H93 has been replaced by "true". (It is already present, as H44). %%% Simplified H94 on reading formula in, to give: %%% H94: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H95 on reading formula in, to give: %%% H95: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last %%% Simplified H96 on reading formula in, to give: %%% H96: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c < msg__index__subtype__1__last + 1 --- Hypothesis H97 has been replaced by "true". (It is already present, as H28). --- Hypothesis H98 has been replaced by "true". (It is already present, as H29). --- Hypothesis H99 has been replaced by "true". (It is already present, as H94). --- Hypothesis H100 has been replaced by "true". (It is already present, as H95). %%% Simplified H101 on reading formula in, to give: %%% H101: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H102 on reading formula in, to give: %%% H102: msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last --- Hypothesis H103 has been replaced by "true". (It is already present, as H30). --- Hypothesis H104 has been replaced by "true". (It is already present, as H31). --- Hypothesis H105 has been replaced by "true". (It is already present, as H94). --- Hypothesis H106 has been replaced by "true". (It is already present, as H95). %%% Simplified H107 on reading formula in, to give: %%% H107: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= natural__first %%% Simplified H108 on reading formula in, to give: %%% H108: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= natural__last --- Hypothesis H109 has been replaced by "true". (It is already present, as H107). --- Hypothesis H110 has been replaced by "true". (It is already present, as H108). --- Hypothesis H111 has been replaced by "true". (It is already present, as H101). --- Hypothesis H112 has been replaced by "true". (It is already present, as H102). --- Hypothesis H113 has been replaced by "true". (It is already present, as H101). --- Hypothesis H114 has been replaced by "true". (It is already present, as H102). --- Hypothesis H115 has been replaced by "true". (It is already present, as H107). --- Hypothesis H116 has been replaced by "true". (It is already present, as H108). --- Hypothesis H118 has been replaced by "true". (It is already present, as H5). %%% Simplified H119 on reading formula in, to give: %%% H119: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c >= msg__index__subtype__1__first %%% Simplified H120 on reading formula in, to give: %%% H120: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c <= msg__index__subtype__1__last %%% Simplified H121 on reading formula in, to give: %%% H121: msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c + (msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= msg__index__subtype__1__last %%% Simplified H122 on reading formula in, to give: %%% H122: fld_byte_count(fld_h(ctx__3)) + (msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c - 1) <= skein_512_block_bytes_index__last %%% Simplified H125 on reading formula in, to give: %%% H125: fld_byte_count(fld_h(ctx__4)) = fld_byte_count(fld_h(ctx__3)) + (msg__index__subtype__1__last + 1 - msg__index__subtype__1__last div skein_512_block_bytes_c * skein_512_block_bytes_c) %%% Simplified H128 on reading formula in, to give: %%% H128: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__4), [i___2]) and element(fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H129 on reading formula in, to give: %%% H129: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__4), [i___1]) and element(fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first using hypotheses H67 & H124. *** Proved C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last using hypotheses H68 & H124. *** Proved C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h(ctx)) using hypotheses H69 & H124. *** Proved C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first using hypothesis H126. *** Proved C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last using hypothesis H127. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_32. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H42 has been replaced by "true". (It is already present, as H30). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H38). --- Hypothesis H45 has been replaced by "true". (It is already present, as H39). --- Hypothesis H48 has been replaced by "true". (It is already present, as H30). --- Hypothesis H49 has been replaced by "true". (It is already present, as H31). --- Hypothesis H51 has been replaced by "true". (It is already present, as H5) . %%% Simplified H54 on reading formula in, to give: %%% H54: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) - 1 <= msg__index__subtype__1__last %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H62 on reading formula in, to give: %%% H62: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H80 has been replaced by "true". (It is already present, as H28). --- Hypothesis H81 has been replaced by "true". (It is already present, as H29). --- Hypothesis H82 has been replaced by "true". (It is already present, as H38). --- Hypothesis H83 has been replaced by "true". (It is already present, as H39). --- Hypothesis H86 has been replaced by "true". (It is already present, as H38). --- Hypothesis H87 has been replaced by "true". (It is already present, as H39). --- Hypothesis H88 has been replaced by "true". (It is already present, as H30). --- Hypothesis H89 has been replaced by "true". (It is already present, as H31). %%% Simplified H90 on reading formula in, to give: %%% H90: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= natural__first %%% Simplified H91 on reading formula in, to give: %%% H91: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= natural__last %%% Simplified H93 on reading formula in, to give: %%% H93: for_all(i___1 : integer, skein_512_block_bytes_index__first <= i___1 and i___1 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H98 has been replaced by "true". (It is already present, as H30). --- Hypothesis H99 has been replaced by "true". (It is already present, as H31). --- Hypothesis H102 has been replaced by "true". (It is already present, as H59). --- Hypothesis H103 has been replaced by "true". (It is already present, as H60). %%% Simplified H105 on reading formula in, to give: %%% H105: 63 <= skein_512_block_bytes_index__last %%% Simplified H106 on reading formula in, to give: %%% H106: 63 <= skein_512_block_bytes_index__last %%% Simplified H108 on reading formula in, to give: %%% H108: 63 <= natural__last %%% Simplified H115 on reading formula in, to give: %%% H115: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H116 on reading formula in, to give: %%% H116: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H134 has been replaced by "true". (It is already present, as H30). --- Hypothesis H135 has been replaced by "true". (It is already present, as H31). --- Hypothesis H136 has been replaced by "true". (It is already present, as H84). --- Hypothesis H137 has been replaced by "true". (It is already present, as H85). --- Hypothesis H139 has been replaced by "true". (It is already present, as H90). --- Hypothesis H140 has been replaced by "true". (It is already present, as H91). --- Hypothesis H141 has been replaced by "true". (It is already present, as H84). --- Hypothesis H142 has been replaced by "true". (It is already present, as H85). --- Hypothesis H143 has been replaced by "true". (It is already present, as H84). --- Hypothesis H144 has been replaced by "true". (It is already present, as H85). --- Hypothesis H145 has been replaced by "true". (It is already present, as H90). --- Hypothesis H146 has been replaced by "true". (It is already present, as H91). %%% Simplified H147 on reading formula in, to give: %%% H147: fld_hash_bit_len(fld_h(ctx__2)) > 0 --- Hypothesis H148 has been replaced by "true". (It is already present, as H5). %%% Simplified H149 on reading formula in, to give: %%% H149: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) >= msg__index__subtype__1__first %%% Simplified H150 on reading formula in, to give: %%% H150: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) <= msg__index__subtype__1__last %%% Simplified H151 on reading formula in, to give: %%% H151: skein_512_block_bytes_c - fld_byte_count(fld_h(ctx)) + ( msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1) <= msg__index__subtype__1__last %%% Simplified H152 on reading formula in, to give: %%% H152: msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) - 1 <= skein_512_block_bytes_index__last %%% Simplified H154 on reading formula in, to give: %%% H154: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h( ctx__2)) %%% Simplified H155 on reading formula in, to give: %%% H155: fld_byte_count(fld_h(ctx__4)) = msg__index__subtype__1__last + 1 - (skein_512_block_bytes_c - fld_byte_count(fld_h(ctx))) %%% Simplified H158 on reading formula in, to give: %%% H158: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__4), [i___2]) and element(fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H159 on reading formula in, to give: %%% H159: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__4), [i___1]) and element(fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first using hypotheses H109 & H154. *** Proved C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last using hypotheses H110 & H154. *** Proved C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h(ctx)) using hypotheses H57, H111 & H154. *** Proved C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first using hypothesis H156. *** Proved C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last using hypothesis H157. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_33. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H38 has been replaced by "true". (It is already present, as H28). --- Hypothesis H39 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H30). --- Hypothesis H42 has been replaced by "true". (It is already present, as H31). --- Hypothesis H43 has been replaced by "true". (It is already present, as H28). --- Hypothesis H44 has been replaced by "true". (It is already present, as H29). --- Hypothesis H45 has been replaced by "true". (It is already present, as H28). --- Hypothesis H46 has been replaced by "true". (It is already present, as H29). --- Hypothesis H47 has been replaced by "true". (It is already present, as H30). --- Hypothesis H48 has been replaced by "true". (It is already present, as H31). --- Hypothesis H50 has been replaced by "true". (It is already present, as H5) . %%% Simplified H53 on reading formula in, to give: %%% H53: true %%% Simplified H54 on reading formula in, to give: %%% H54: fld_byte_count(fld_h(ctx)) + msg__index__subtype__1__last <= skein_512_block_bytes_index__last %%% Simplified H60 on reading formula in, to give: %%% H60: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__4), [i___2]) and element(fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H61 on reading formula in, to give: %%% H61: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__4), [i___1]) and element(fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first using hypotheses H1 & H56. *** Proved C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last using hypotheses H2 & H56. *** Proved C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h(ctx)) using hypothesis H56. *** Proved C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first using hypothesis H58. *** Proved C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last using hypothesis H59. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_34. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) --- Hypothesis H32 has been replaced by "true". (It is already present, as H28). --- Hypothesis H33 has been replaced by "true". (It is already present, as H29). --- Hypothesis H37 has been replaced by "true". (It is already present, as H30). --- Hypothesis H38 has been replaced by "true". (It is already present, as H31). --- Hypothesis H39 has been replaced by "true". (It is already present, as H28). --- Hypothesis H40 has been replaced by "true". (It is already present, as H29). --- Hypothesis H41 has been replaced by "true". (It is already present, as H28). --- Hypothesis H42 has been replaced by "true". (It is already present, as H29). --- Hypothesis H43 has been replaced by "true". (It is already present, as H30). --- Hypothesis H44 has been replaced by "true". (It is already present, as H31). --- Hypothesis H46 has been replaced by "true". (It is already present, as H5) . %%% Simplified H49 on reading formula in, to give: %%% H49: true %%% Simplified H50 on reading formula in, to give: %%% H50: fld_byte_count(fld_h(ctx)) + msg__index__subtype__1__last <= skein_512_block_bytes_index__last %%% Simplified H56 on reading formula in, to give: %%% H56: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__4), [i___2]) and element(fld_b(ctx__4), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H57 on reading formula in, to give: %%% H57: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__4), [i___1]) and element(fld_x(ctx__4), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(ctx__4)) >= initialized_hash_bit_length__first using hypotheses H1 & H52. *** Proved C2: fld_hash_bit_len(fld_h(ctx__4)) <= initialized_hash_bit_length__last using hypotheses H2 & H52. *** Proved C3: fld_hash_bit_len(fld_h(ctx__4)) = fld_hash_bit_len(fld_h(ctx)) using hypothesis H52. *** Proved C4: fld_byte_count(fld_h(ctx__4)) >= skein_512_block_bytes_count__first using hypothesis H54. *** Proved C5: fld_byte_count(fld_h(ctx__4)) <= skein_512_block_bytes_count__last using hypothesis H55. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_update_35. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H9 on reading formula in, to give: %%% H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H27 on reading formula in, to give: %%% H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) *** Proved C5: msg__index__subtype__1__first = 0 using hypothesis H5. *** Proved C6: msg__index__subtype__1__last < natural__last using hypothesis H6. -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: hash_bit_len_of(ctx) >= 1 New C1: fld_hash_bit_len(fld_h(ctx)) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: hash_bit_len_of(ctx) <= 2147483640 New C2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: byte_count_of(ctx) >= 0 New C3: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: byte_count_of(ctx) <= 64 New C4: fld_byte_count(fld_h(ctx)) <= 64 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H7: msg__index__subtype__1__last + skein_512_block_bytes_c < 2147483647 New H11: fld_byte_count(fld_h(ctx)) <= 2147483647 New C7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 *** Proved C7: msg__index__subtype__1__last + skein_512_block_bytes_c <= 2147483646 using hypothesis H7. -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last < 2147483583 -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H10: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H16: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H17: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H19: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) New H27: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H21: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H22: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H23: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H25: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H9: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H12: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H13: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H9: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H8: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) *** Proved C2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 using hypothesis H13. *** Proved C3: fld_byte_count(fld_h(ctx)) >= 0 using hypothesis H10. --- Eliminated hypothesis H14 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H18 (true-hypothesis). --- Eliminated hypothesis H6 (redundant, given H7). +++ New H28: integer__size >= 0 +++ New H29: natural__size >= 0 +++ New H30: spark__unsigned__u6__size >= 0 +++ New H31: spark__unsigned__u7__size >= 0 +++ New H32: spark__unsigned__byte__size >= 0 +++ New H33: spark__unsigned__u16__size >= 0 +++ New H34: spark__unsigned__u32__size >= 0 +++ New H35: spark__unsigned__u64__size >= 0 +++ New H36: spark__crypto__word_count_t__size >= 0 +++ New H37: hash_bit_length__size >= 0 +++ New H38: initialized_hash_bit_length__size >= 0 +++ New H39: skein_512_state_words_index__size >= 0 +++ New H40: skein_512_block_bytes_count__size >= 0 +++ New H41: skein_512_block_bytes_index__size >= 0 +++ New H42: positive_block_512_count_t__size >= 0 +++ New H43: skein_512_context__size >= 0 +++ New H44: msg__index__subtype__1__first <= msg__index__subtype__1__last +++ New H45: context_header__size >= 0 +++ New H46: msg__index__subtype__1__first >= 0 +++ New H47: msg__index__subtype__1__last >= 0 +++ New H48: msg__index__subtype__1__last <= 2147483647 +++ New H49: msg__index__subtype__1__first <= 2147483647 *** Proved C1: fld_hash_bit_len(fld_h(ctx)) >= 1 This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C1: hash_bit_len_of(ctx) >= 1 This rule could be applied because its side-conditions hold, as follows: <<< From H1, proved: hash_bit_len_of(ctx) >= 1 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C4: fld_byte_count(fld_h(ctx)) <= 64 This was achieved by applying the rewrite rule skein_rules(1) [from rulefile skein.rlu] to rewrite this conclusion to: C4: byte_count_of(ctx) <= 64 This rule could be applied because its side-conditions hold, as follows: <<< From H4, proved: byte_count_of(ctx) <= 64 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** PROVED VC. VCN 35: Summary of user rule application. FIL skein.rlu RUL skein_rules(2) CON 1 RUL skein_rules(1) CON 4 @@@@@@@@@@ VC: procedure_skein_512_update_36. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: hash_bit_len_of(ctx~) >= initialized_hash_bit_length__first %%% Simplified H2 on reading formula in, to give: %%% H2: hash_bit_len_of(ctx~) <= initialized_hash_bit_length__last %%% Simplified H3 on reading formula in, to give: %%% H3: byte_count_of(ctx~) >= skein_512_block_bytes_count__first %%% Simplified H4 on reading formula in, to give: %%% H4: byte_count_of(ctx~) <= skein_512_block_bytes_count__last %%% Simplified H8 on reading formula in, to give: %%% H8: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last %%% Simplified H10 on reading formula in, to give: %%% H10: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first %%% Simplified H11 on reading formula in, to give: %%% H11: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last --- Hypothesis H12 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H13 has been replaced by "true". (It is already present, as H6) . %%% Simplified H15 on reading formula in, to give: %%% H15: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx~), [i___2]) and element(fld_b(ctx~), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H16 on reading formula in, to give: %%% H16: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx~), [i___1]) and element(fld_x(ctx~), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H17 on reading formula in, to give: %%% H17: fld_byte_count(fld_h(ctx~)) >= natural__first %%% Simplified H18 on reading formula in, to give: %%% H18: fld_byte_count(fld_h(ctx~)) <= natural__last %%% Simplified H19 on reading formula in, to give: %%% H19: fld_hash_bit_len(fld_h(ctx~)) >= hash_bit_length__first %%% Simplified H20 on reading formula in, to give: %%% H20: fld_hash_bit_len(fld_h(ctx~)) <= hash_bit_length__last %%% Simplified H23 on reading formula in, to give: %%% H23: fld_field_type(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u6__first %%% Simplified H24 on reading formula in, to give: %%% H24: fld_field_type(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u6__last %%% Simplified H26 on reading formula in, to give: %%% H26: fld_tree_level(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u7__first %%% Simplified H27 on reading formula in, to give: %%% H27: fld_tree_level(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u7__last %%% Simplified H28 on reading formula in, to give: %%% H28: fld_reserved(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u16__first %%% Simplified H29 on reading formula in, to give: %%% H29: fld_reserved(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u16__last %%% Simplified H30 on reading formula in, to give: %%% H30: fld_byte_count_msb(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u32__first %%% Simplified H31 on reading formula in, to give: %%% H31: fld_byte_count_msb(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u32__last %%% Simplified H32 on reading formula in, to give: %%% H32: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx~))) >= spark__unsigned__u64__first %%% Simplified H33 on reading formula in, to give: %%% H33: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx~))) <= spark__unsigned__u64__last %%% Simplified H34 on reading formula in, to give: %%% H34: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> spark__unsigned__byte__first <= element(msg, [i___1]) and element( msg, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H35 on reading formula in, to give: %%% H35: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H36 on reading formula in, to give: %%% H36: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H56 on reading formula in, to give: %%% H56: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx~)) %%% Simplified C3 on reading formula in, to give: %%% C3: hash_bit_len_of(ctx) = hash_bit_len_of(ctx~) -S- Applied substitution rule skein_512_up_rules(87). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H1: hash_bit_len_of(ctx~) >= 1 New H8: fld_hash_bit_len(fld_h(ctx~)) >= 1 New H54: fld_hash_bit_len(fld_h(ctx)) >= 1 New C1: hash_bit_len_of(ctx) >= 1 -S- Applied substitution rule skein_512_up_rules(88). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H2: hash_bit_len_of(ctx~) <= 2147483640 New H9: fld_hash_bit_len(fld_h(ctx~)) <= 2147483640 New H55: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New C2: hash_bit_len_of(ctx) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H3: byte_count_of(ctx~) >= 0 New H10: fld_byte_count(fld_h(ctx~)) >= 0 New H57: fld_byte_count(fld_h(ctx)) >= 0 New C4: byte_count_of(ctx) >= 0 -S- Applied substitution rule skein_512_up_rules(98). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H4: byte_count_of(ctx~) <= 64 New H11: fld_byte_count(fld_h(ctx~)) <= 64 New H58: fld_byte_count(fld_h(ctx)) <= 64 New C5: byte_count_of(ctx) <= 64 -S- Applied substitution rule skein_512_up_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H7: msg__index__subtype__1__last + 64 < natural__last New H14: msg__index__subtype__1__last + 65 <= natural__last -S- Applied substitution rule skein_512_up_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H17: fld_byte_count(fld_h(ctx~)) >= 0 New H37: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: msg__index__subtype__1__last < 2147483647 New H18: fld_byte_count(fld_h(ctx~)) <= 2147483647 New H38: fld_byte_count(fld_h(ctx)) <= 2147483647 New H7: msg__index__subtype__1__last < 2147483583 New H14: msg__index__subtype__1__last <= 2147483582 -S- Applied substitution rule skein_512_up_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H23: fld_field_type(fld_tweak_words(fld_h(ctx~))) >= 0 New H43: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H24: fld_field_type(fld_tweak_words(fld_h(ctx~))) <= 63 New H44: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_up_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H26: fld_tree_level(fld_tweak_words(fld_h(ctx~))) >= 0 New H46: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H27: fld_tree_level(fld_tweak_words(fld_h(ctx~))) <= 127 New H47: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_up_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H15: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx~), [i___2]) and element(fld_b(ctx~), [i___2]) <= spark__unsigned__byte__last) New H34: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= spark__unsigned__byte__last) New H35: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_up_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H15: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx~), [i___2]) and element(fld_b(ctx~), [i___2]) <= 255) New H34: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1] ) and element(msg, [i___1]) <= 255) New H35: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H28: fld_reserved(fld_tweak_words(fld_h(ctx~))) >= 0 New H48: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H29: fld_reserved(fld_tweak_words(fld_h(ctx~))) <= 65535 New H49: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_up_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H30: fld_byte_count_msb(fld_tweak_words(fld_h(ctx~))) >= 0 New H50: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H31: fld_byte_count_msb(fld_tweak_words(fld_h(ctx~))) <= 4294967295 New H51: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_up_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H16: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx~), [i___1]) and element(fld_x(ctx~), [i___1]) <= spark__unsigned__u64__last) New H32: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx~))) >= 0 New H36: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H52: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_up_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H33: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx~))) <= 18446744073709551615 New H53: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H16: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx~), [i___1]) and element(fld_x(ctx~), [i___1]) <= 18446744073709551615) New H36: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H19: fld_hash_bit_len(fld_h(ctx~)) >= 0 New H39: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_up_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H20: fld_hash_bit_len(fld_h(ctx~)) <= 2147483640 New H40: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_up_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H16: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx~), [i___1] ) and element(fld_x(ctx~), [i___1]) <= 18446744073709551615) New H36: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(93). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H16: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx~), [i___1]) and element(fld_x(ctx~), [i___1]) <= 18446744073709551615) New H36: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_up_rules(102). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H15: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx~), [i___2] ) and element(fld_b(ctx~), [i___2]) <= 255) New H35: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_up_rules(103). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H15: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx~), [i___2]) and element(fld_b(ctx~), [i___2]) <= 255) New H35: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H13 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H22 (true-hypothesis). --- Eliminated hypothesis H25 (true-hypothesis). --- Eliminated hypothesis H41 (true-hypothesis). --- Eliminated hypothesis H42 (true-hypothesis). --- Eliminated hypothesis H45 (true-hypothesis). --- Eliminated hypothesis H20 (duplicate of H9). --- Eliminated hypothesis H17 (duplicate of H10). --- Eliminated hypothesis H57 (duplicate of H37). --- Eliminated hypothesis H55 (duplicate of H40). --- Eliminated hypothesis H14 (duplicate of H7). --- Eliminated hypothesis H6 (redundant, given H7). --- Eliminated hypothesis H18 (redundant, given H11). --- Eliminated hypothesis H19 (redundant, given H8). --- Eliminated hypothesis H38 (redundant, given H58). --- Eliminated hypothesis H39 (redundant, given H54). +++ New H59: integer__size >= 0 +++ New H60: natural__size >= 0 +++ New H61: spark__unsigned__u6__size >= 0 +++ New H62: spark__unsigned__u7__size >= 0 +++ New H63: spark__unsigned__byte__size >= 0 +++ New H64: spark__unsigned__u16__size >= 0 +++ New H65: spark__unsigned__u32__size >= 0 +++ New H66: spark__unsigned__u64__size >= 0 +++ New H67: spark__crypto__word_count_t__size >= 0 +++ New H68: hash_bit_length__size >= 0 +++ New H69: initialized_hash_bit_length__size >= 0 +++ New H70: skein_512_state_words_index__size >= 0 +++ New H71: skein_512_block_bytes_count__size >= 0 +++ New H72: skein_512_block_bytes_index__size >= 0 +++ New H73: positive_block_512_count_t__size >= 0 +++ New H74: skein_512_context__size >= 0 +++ New H75: msg__index__subtype__1__first <= msg__index__subtype__1__last +++ New H76: context_header__size >= 0 +++ New H77: msg__index__subtype__1__first >= 0 +++ New H78: msg__index__subtype__1__last >= 0 +++ New H79: msg__index__subtype__1__last <= 2147483647 +++ New H80: msg__index__subtype__1__first <= 2147483647 *** Proved C1: hash_bit_len_of(ctx) >= 1 This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C1: fld_hash_bit_len(fld_h(ctx)) >= 1 This rule could be applied because its side-conditions hold, as follows: <<< From H54, proved: fld_hash_bit_len(fld_h(ctx)) >= 1 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C2: hash_bit_len_of(ctx) <= 2147483640 This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 This rule could be applied because its side-conditions hold, as follows: <<< From H40, proved: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C4: byte_count_of(ctx) >= 0 This was achieved by applying the rewrite rule skein_rules(1) [from rulefile skein.rlu] to rewrite this conclusion to: C4: fld_byte_count(fld_h(ctx)) >= 0 This rule could be applied because its side-conditions hold, as follows: <<< From H37, proved: fld_byte_count(fld_h(ctx)) >= 0 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C5: byte_count_of(ctx) <= 64 This was achieved by applying the rewrite rule skein_rules(1) [from rulefile skein.rlu] to rewrite this conclusion to: C5: fld_byte_count(fld_h(ctx)) <= 64 This rule could be applied because its side-conditions hold, as follows: <<< From H58, proved: fld_byte_count(fld_h(ctx)) <= 64 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully +++ New H81: fld_hash_bit_len(fld_h(ctx)) = hash_bit_len_of(ctx~) This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite existing hypothesis H56 to give the above formula. This rule could be applied because its side-condition holds, as follows: <<< Immediate condition checktype(ctx~, skein_512_context) evaluated successfully *** Proved C3: hash_bit_len_of(ctx) = hash_bit_len_of(ctx~) This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C3: fld_hash_bit_len(fld_h(ctx)) = hash_bit_len_of(ctx~) This rule could be applied because its side-conditions hold, as follows: <<< From H81, proved: fld_hash_bit_len(fld_h(ctx)) = hash_bit_len_of(ctx~) <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** PROVED VC. VCN 36: Summary of user rule application. FIL skein.rlu RUL skein_rules(2) CON 1, 2, 3 HYP 81 RUL skein_rules(1) CON 4, 5 OVR Overall summary of VCs using user rules. FIL skein.rlu RUL skein_rules(2) VCS 35, 36 RUL skein_rules(1) VCS 35, 36 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.vsm0000644000175000017500000000006411712765060026545 0ustar eugeneugenskein_512_final,0,0,2,2,0,0,0,100.0, 0.0, 0.0, 0.0, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.rls0000644000175000017500000002545711712513676026752 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Update*/ rule_family skein_512_up_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. skein_512_up_rules(1): skein_512_block_bytes_c may_be_replaced_by 64. skein_512_up_rules(2): integer__size >= 0 may_be_deduced. skein_512_up_rules(3): integer__first may_be_replaced_by -2147483648. skein_512_up_rules(4): integer__last may_be_replaced_by 2147483647. skein_512_up_rules(5): integer__base__first may_be_replaced_by -2147483648. skein_512_up_rules(6): integer__base__last may_be_replaced_by 2147483647. skein_512_up_rules(7): natural__size >= 0 may_be_deduced. skein_512_up_rules(8): natural__first may_be_replaced_by 0. skein_512_up_rules(9): natural__last may_be_replaced_by 2147483647. skein_512_up_rules(10): natural__base__first may_be_replaced_by -2147483648. skein_512_up_rules(11): natural__base__last may_be_replaced_by 2147483647. skein_512_up_rules(12): interfaces__unsigned_8__size >= 0 may_be_deduced. skein_512_up_rules(13): interfaces__unsigned_8__size may_be_replaced_by 8. skein_512_up_rules(14): interfaces__unsigned_8__first may_be_replaced_by 0. skein_512_up_rules(15): interfaces__unsigned_8__last may_be_replaced_by 255. skein_512_up_rules(16): interfaces__unsigned_8__base__first may_be_replaced_by 0. skein_512_up_rules(17): interfaces__unsigned_8__base__last may_be_replaced_by 255. skein_512_up_rules(18): interfaces__unsigned_8__modulus may_be_replaced_by 256. skein_512_up_rules(19): interfaces__unsigned_16__size >= 0 may_be_deduced. skein_512_up_rules(20): interfaces__unsigned_16__size may_be_replaced_by 16. skein_512_up_rules(21): interfaces__unsigned_16__first may_be_replaced_by 0. skein_512_up_rules(22): interfaces__unsigned_16__last may_be_replaced_by 65535. skein_512_up_rules(23): interfaces__unsigned_16__base__first may_be_replaced_by 0. skein_512_up_rules(24): interfaces__unsigned_16__base__last may_be_replaced_by 65535. skein_512_up_rules(25): interfaces__unsigned_16__modulus may_be_replaced_by 65536. skein_512_up_rules(26): interfaces__unsigned_32__size >= 0 may_be_deduced. skein_512_up_rules(27): interfaces__unsigned_32__size may_be_replaced_by 32. skein_512_up_rules(28): interfaces__unsigned_32__first may_be_replaced_by 0. skein_512_up_rules(29): interfaces__unsigned_32__last may_be_replaced_by 4294967295. skein_512_up_rules(30): interfaces__unsigned_32__base__first may_be_replaced_by 0. skein_512_up_rules(31): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. skein_512_up_rules(32): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. skein_512_up_rules(33): interfaces__unsigned_64__size >= 0 may_be_deduced. skein_512_up_rules(34): interfaces__unsigned_64__size may_be_replaced_by 64. skein_512_up_rules(35): interfaces__unsigned_64__first may_be_replaced_by 0. skein_512_up_rules(36): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. skein_512_up_rules(37): interfaces__unsigned_64__base__first may_be_replaced_by 0. skein_512_up_rules(38): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. skein_512_up_rules(39): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. skein_512_up_rules(40): spark__unsigned__u6__size >= 0 may_be_deduced. skein_512_up_rules(41): spark__unsigned__u6__first may_be_replaced_by 0. skein_512_up_rules(42): spark__unsigned__u6__last may_be_replaced_by 63. skein_512_up_rules(43): spark__unsigned__u6__base__first may_be_replaced_by 0. skein_512_up_rules(44): spark__unsigned__u6__base__last may_be_replaced_by 63. skein_512_up_rules(45): spark__unsigned__u6__modulus may_be_replaced_by 64. skein_512_up_rules(46): spark__unsigned__u7__size >= 0 may_be_deduced. skein_512_up_rules(47): spark__unsigned__u7__first may_be_replaced_by 0. skein_512_up_rules(48): spark__unsigned__u7__last may_be_replaced_by 127. skein_512_up_rules(49): spark__unsigned__u7__base__first may_be_replaced_by 0. skein_512_up_rules(50): spark__unsigned__u7__base__last may_be_replaced_by 127. skein_512_up_rules(51): spark__unsigned__u7__modulus may_be_replaced_by 128. skein_512_up_rules(52): spark__unsigned__byte__size >= 0 may_be_deduced. skein_512_up_rules(53): spark__unsigned__byte__first may_be_replaced_by 0. skein_512_up_rules(54): spark__unsigned__byte__last may_be_replaced_by 255. skein_512_up_rules(55): spark__unsigned__byte__base__first may_be_replaced_by 0. skein_512_up_rules(56): spark__unsigned__byte__base__last may_be_replaced_by 255. skein_512_up_rules(57): spark__unsigned__byte__modulus may_be_replaced_by 256. skein_512_up_rules(58): spark__unsigned__u16__size >= 0 may_be_deduced. skein_512_up_rules(59): spark__unsigned__u16__first may_be_replaced_by 0. skein_512_up_rules(60): spark__unsigned__u16__last may_be_replaced_by 65535. skein_512_up_rules(61): spark__unsigned__u16__base__first may_be_replaced_by 0. skein_512_up_rules(62): spark__unsigned__u16__base__last may_be_replaced_by 65535. skein_512_up_rules(63): spark__unsigned__u16__modulus may_be_replaced_by 65536. skein_512_up_rules(64): spark__unsigned__u32__size >= 0 may_be_deduced. skein_512_up_rules(65): spark__unsigned__u32__first may_be_replaced_by 0. skein_512_up_rules(66): spark__unsigned__u32__last may_be_replaced_by 4294967295. skein_512_up_rules(67): spark__unsigned__u32__base__first may_be_replaced_by 0. skein_512_up_rules(68): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. skein_512_up_rules(69): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. skein_512_up_rules(70): spark__unsigned__u64__size >= 0 may_be_deduced. skein_512_up_rules(71): spark__unsigned__u64__first may_be_replaced_by 0. skein_512_up_rules(72): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. skein_512_up_rules(73): spark__unsigned__u64__base__first may_be_replaced_by 0. skein_512_up_rules(74): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. skein_512_up_rules(75): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. skein_512_up_rules(76): spark__crypto__word_count_t__size >= 0 may_be_deduced. skein_512_up_rules(77): spark__crypto__word_count_t__first may_be_replaced_by 0. skein_512_up_rules(78): spark__crypto__word_count_t__last may_be_replaced_by 268435455. skein_512_up_rules(79): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. skein_512_up_rules(80): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. skein_512_up_rules(81): hash_bit_length__size >= 0 may_be_deduced. skein_512_up_rules(82): hash_bit_length__first may_be_replaced_by 0. skein_512_up_rules(83): hash_bit_length__last may_be_replaced_by 2147483640. skein_512_up_rules(84): hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_up_rules(85): hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_up_rules(86): initialized_hash_bit_length__size >= 0 may_be_deduced. skein_512_up_rules(87): initialized_hash_bit_length__first may_be_replaced_by 1. skein_512_up_rules(88): initialized_hash_bit_length__last may_be_replaced_by 2147483640. skein_512_up_rules(89): initialized_hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_up_rules(90): initialized_hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_up_rules(91): skein_512_state_words_index__size >= 0 may_be_deduced. skein_512_up_rules(92): skein_512_state_words_index__first may_be_replaced_by 0. skein_512_up_rules(93): skein_512_state_words_index__last may_be_replaced_by 7. skein_512_up_rules(94): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. skein_512_up_rules(95): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. skein_512_up_rules(96): skein_512_block_bytes_count__size >= 0 may_be_deduced. skein_512_up_rules(97): skein_512_block_bytes_count__first may_be_replaced_by 0. skein_512_up_rules(98): skein_512_block_bytes_count__last may_be_replaced_by 64. skein_512_up_rules(99): skein_512_block_bytes_count__base__first may_be_replaced_by -2147483648. skein_512_up_rules(100): skein_512_block_bytes_count__base__last may_be_replaced_by 2147483647. skein_512_up_rules(101): skein_512_block_bytes_index__size >= 0 may_be_deduced. skein_512_up_rules(102): skein_512_block_bytes_index__first may_be_replaced_by 0. skein_512_up_rules(103): skein_512_block_bytes_index__last may_be_replaced_by 63. skein_512_up_rules(104): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. skein_512_up_rules(105): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. skein_512_up_rules(106): positive_block_512_count_t__size >= 0 may_be_deduced. skein_512_up_rules(107): positive_block_512_count_t__first may_be_replaced_by 1. skein_512_up_rules(108): positive_block_512_count_t__last may_be_replaced_by 33554431. skein_512_up_rules(109): positive_block_512_count_t__base__first may_be_replaced_by -2147483648. skein_512_up_rules(110): positive_block_512_count_t__base__last may_be_replaced_by 2147483647. skein_512_up_rules(111): skein_512_context__size >= 0 may_be_deduced. skein_512_up_rules(112): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. skein_512_up_rules(113): msg__index__subtype__1__first >= natural__first may_be_deduced. skein_512_up_rules(114): msg__index__subtype__1__last <= natural__last may_be_deduced. skein_512_up_rules(115): msg__index__subtype__1__first <= msg__index__subtype__1__last may_be_deduced. skein_512_up_rules(116): msg__index__subtype__1__last >= natural__first may_be_deduced. skein_512_up_rules(117): msg__index__subtype__1__first <= natural__last may_be_deduced. skein_512_up_rules(118): tweak_value__size >= 0 may_be_deduced. skein_512_up_rules(119): tweak_value__size may_be_replaced_by 128. skein_512_up_rules(120): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. skein_512_up_rules(121): context_header__size >= 0 may_be_deduced. skein_512_up_rules(122): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.siv0000644000175000017500000006214311712513676026744 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Update For path(s) from start to run-time check associated with statement of line 785: procedure_skein_512_update_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 786: procedure_skein_512_update_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 788: procedure_skein_512_update_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 791: procedure_skein_512_update_4. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 793: procedure_skein_512_update_5. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 795: procedure_skein_512_update_6. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 796: procedure_skein_512_update_7. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 797: procedure_skein_512_update_8. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 798: procedure_skein_512_update_9. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 800: procedure_skein_512_update_10. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 803: procedure_skein_512_update_11. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 808: procedure_skein_512_update_12. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 814: procedure_skein_512_update_13. *** true . /* all conclusions proved */ procedure_skein_512_update_14. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 816: procedure_skein_512_update_15. H1: fld_hash_bit_len(fld_h(ctx)) >= 1 . H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H3: fld_byte_count(fld_h(ctx)) <= 64 . H4: msg__index__subtype__1__first = 0 . H5: msg__index__subtype__1__last <= 2147483582 . H6: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H7: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H8: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H9: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H10: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H11: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H12: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H13: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H14: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H15: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H16: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H17: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H18: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1]) and element(msg, [i___1]) <= 255) . H19: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 . H20: fld_byte_count(fld_h(ctx)) > 0 . H21: fld_hash_bit_len(fld_h(ctx)) > 0 . H22: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(ctx)) . H23: fld_byte_count(fld_h(ctx__1)) = 64 . H24: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) . H25: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) . H26: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H27: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 . H28: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 . H29: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 . H30: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 . H31: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 . H32: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 . H33: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 . H34: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 . H35: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 . H36: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 . H37: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) . H38: fld_hash_bit_len(fld_h(ctx)) >= 1 . H39: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H40: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx)) . H41: fld_byte_count(fld_h(ctx__1)) >= 0 . H42: fld_byte_count(fld_h(ctx__1)) <= 64 . H43: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H44: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) . H45: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) . H46: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 . H47: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 . H48: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 . H49: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 . H50: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 . H51: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 . H52: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 . H53: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 . H54: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 . H55: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 . H56: msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)) > 127 . H57: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 >= 1 . H58: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 <= 33554431 . H59: integer__size >= 0 . H60: natural__size >= 0 . H61: spark__unsigned__u6__size >= 0 . H62: spark__unsigned__u7__size >= 0 . H63: spark__unsigned__byte__size >= 0 . H64: spark__unsigned__u16__size >= 0 . H65: spark__unsigned__u32__size >= 0 . H66: spark__unsigned__u64__size >= 0 . H67: spark__crypto__word_count_t__size >= 0 . H68: hash_bit_length__size >= 0 . H69: initialized_hash_bit_length__size >= 0 . H70: skein_512_state_words_index__size >= 0 . H71: skein_512_block_bytes_count__size >= 0 . H72: skein_512_block_bytes_index__size >= 0 . H73: positive_block_512_count_t__size >= 0 . H74: skein_512_context__size >= 0 . H75: msg__index__subtype__1__first <= msg__index__subtype__1__last . H76: context_header__size >= 0 . H77: msg__index__subtype__1__first >= 0 . H78: msg__index__subtype__1__last >= 0 . H79: msg__index__subtype__1__last <= 2147483647 . H80: msg__index__subtype__1__first <= 2147483647 . -> C1: msg__index__subtype__1__last + 64 * ((64 - (msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)))) div 64) + fld_byte_count(fld_h(ctx)) > 62 . C2: 64 - fld_byte_count(fld_h(ctx)) + 63 <= msg__index__subtype__1__last . procedure_skein_512_update_16. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 822: procedure_skein_512_update_17. *** true . /* all conclusions proved */ procedure_skein_512_update_18. *** true . /* all conclusions proved */ For path(s) from start to check associated with statement of line 824: procedure_skein_512_update_19. H1: fld_hash_bit_len(fld_h(ctx)) >= 1 . H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H3: fld_byte_count(fld_h(ctx)) <= 64 . H4: msg__index__subtype__1__first = 0 . H5: msg__index__subtype__1__last <= 2147483582 . H6: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H7: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H8: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H9: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H10: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H11: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H12: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H13: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H14: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H15: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H16: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H17: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H18: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1]) and element(msg, [i___1]) <= 255) . H19: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 . H20: fld_byte_count(fld_h(ctx)) > 0 . H21: fld_hash_bit_len(fld_h(ctx)) > 0 . H22: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(ctx)) . H23: fld_byte_count(fld_h(ctx__1)) = 64 . H24: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) . H25: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) . H26: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H27: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 . H28: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 . H29: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 . H30: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 . H31: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 . H32: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 . H33: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 . H34: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 . H35: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 . H36: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 . H37: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) . H38: fld_hash_bit_len(fld_h(ctx)) >= 1 . H39: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H40: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx)) . H41: fld_byte_count(fld_h(ctx__1)) >= 0 . H42: fld_byte_count(fld_h(ctx__1)) <= 64 . H43: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H44: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) . H45: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) . H46: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 . H47: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 . H48: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 . H49: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 . H50: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 . H51: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 . H52: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 . H53: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 . H54: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 . H55: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 . H56: msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)) > 127 . H57: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 >= 1 . H58: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 <= 33554431 . H59: msg__index__subtype__1__last + 64 * ((64 - (msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)))) div 64) + fld_byte_count(fld_h(ctx)) > 62 . H60: fld_hash_bit_len(fld_h(ctx)) >= 1 . H61: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H62: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(ctx__2)) . H63: fld_byte_count(fld_h(ctx__3)) = 0 . H64: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) . H65: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) . H66: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 . H67: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 . H68: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 . H69: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 . H70: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 . H71: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 . H72: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 . H73: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 . H74: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 . H75: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 . H76: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 >= 0 . H77: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 <= 2147483647 . H78: integer__size >= 0 . H79: natural__size >= 0 . H80: spark__unsigned__u6__size >= 0 . H81: spark__unsigned__u7__size >= 0 . H82: spark__unsigned__byte__size >= 0 . H83: spark__unsigned__u16__size >= 0 . H84: spark__unsigned__u32__size >= 0 . H85: spark__unsigned__u64__size >= 0 . H86: spark__crypto__word_count_t__size >= 0 . H87: hash_bit_length__size >= 0 . H88: initialized_hash_bit_length__size >= 0 . H89: skein_512_state_words_index__size >= 0 . H90: skein_512_block_bytes_count__size >= 0 . H91: skein_512_block_bytes_index__size >= 0 . H92: positive_block_512_count_t__size >= 0 . H93: skein_512_context__size >= 0 . H94: msg__index__subtype__1__first <= msg__index__subtype__1__last . H95: context_header__size >= 0 . H96: msg__index__subtype__1__first >= 0 . H97: msg__index__subtype__1__last >= 0 . H98: msg__index__subtype__1__first <= 2147483647 . -> C1: msg__index__subtype__1__last + 64 * ((64 - (msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)))) div 64) + fld_byte_count(fld_h(ctx)) > 63 . procedure_skein_512_update_20. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 825: procedure_skein_512_update_21. *** true . /* all conclusions proved */ procedure_skein_512_update_22. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 827: procedure_skein_512_update_23. *** true . /* all conclusions proved */ procedure_skein_512_update_24. *** true . /* all conclusions proved */ For path(s) from start to precondition check associated with statement of line 835: procedure_skein_512_update_25. H1: fld_hash_bit_len(fld_h(ctx)) >= 1 . H2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H3: fld_byte_count(fld_h(ctx)) <= 64 . H4: msg__index__subtype__1__first = 0 . H5: msg__index__subtype__1__last <= 2147483582 . H6: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) . H7: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) . H8: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 . H9: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 . H10: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 . H11: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 . H12: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 . H13: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 . H14: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 . H15: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 . H16: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 . H17: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 . H18: for_all(i___1 : integer, msg__index__subtype__1__first <= i___1 and i___1 <= msg__index__subtype__1__last -> 0 <= element(msg, [i___1]) and element(msg, [i___1]) <= 255) . H19: msg__index__subtype__1__last + 1 + fld_byte_count(fld_h(ctx)) <= 2147483647 . H20: fld_byte_count(fld_h(ctx)) > 0 . H21: fld_hash_bit_len(fld_h(ctx)) > 0 . H22: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(ctx)) . H23: fld_byte_count(fld_h(ctx__1)) = 64 . H24: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__1), [i___2]) and element(fld_b(ctx__1), [i___2]) <= 255) . H25: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__1), [i___1]) and element(fld_x(ctx__1), [i___1]) <= 18446744073709551615) . H26: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H27: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 . H28: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 . H29: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 . H30: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 . H31: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 . H32: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 . H33: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 . H34: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 . H35: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 . H36: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 . H37: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 63 -> 0 <= element( fld_b(ctx__1), [i___1]) and element(fld_b(ctx__1), [i___1]) <= 255) . H38: fld_hash_bit_len(fld_h(ctx)) >= 1 . H39: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H40: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx)) . H41: fld_byte_count(fld_h(ctx__1)) >= 0 . H42: fld_byte_count(fld_h(ctx__1)) <= 64 . H43: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) . H44: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= 255) . H45: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= 18446744073709551615) . H46: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 . H47: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 . H48: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= 0 . H49: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= 127 . H50: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= 0 . H51: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= 65535 . H52: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= 0 . H53: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= 4294967295 . H54: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= 0 . H55: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= 18446744073709551615 . H56: msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)) > 127 . H57: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 >= 1 . H58: fld_hash_bit_len(fld_h(ctx)) >= 1 . H59: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 . H60: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(ctx__2)) . H61: fld_byte_count(fld_h(ctx__3)) = 0 . H62: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element( fld_b(ctx__3), [i___2]) and element(fld_b(ctx__3), [i___2]) <= 255) . H63: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x( ctx__3), [i___1]) and element(fld_x(ctx__3), [i___1]) <= 18446744073709551615) . H64: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= 0 . H65: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= 63 . H66: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= 0 . H67: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= 127 . H68: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= 0 . H69: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= 65535 . H70: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= 0 . H71: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= 4294967295 . H72: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= 0 . H73: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= 18446744073709551615 . H74: (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 >= 0 . H75: msg__index__subtype__1__last + 64 * ((64 - (msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)))) div 64) + fld_byte_count(fld_h(ctx)) > 63 . H76: - 63 + (msg__index__subtype__1__last + 64 * ((64 - ( msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)))) div 64) + fld_byte_count(fld_h(ctx))) <= 2147483647 . H77: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - ( 64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 >= 0 . H78: 64 * ((64 - (msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)))) div 64) + fld_byte_count(fld_h(ctx)) > - 2147483584 . H79: integer__size >= 0 . H80: natural__size >= 0 . H81: spark__unsigned__u6__size >= 0 . H82: spark__unsigned__u7__size >= 0 . H83: spark__unsigned__byte__size >= 0 . H84: spark__unsigned__u16__size >= 0 . H85: spark__unsigned__u32__size >= 0 . H86: spark__unsigned__u64__size >= 0 . H87: spark__crypto__word_count_t__size >= 0 . H88: hash_bit_length__size >= 0 . H89: initialized_hash_bit_length__size >= 0 . H90: skein_512_state_words_index__size >= 0 . H91: skein_512_block_bytes_count__size >= 0 . H92: skein_512_block_bytes_index__size >= 0 . H93: positive_block_512_count_t__size >= 0 . H94: skein_512_context__size >= 0 . H95: msg__index__subtype__1__first <= msg__index__subtype__1__last . H96: context_header__size >= 0 . H97: msg__index__subtype__1__first >= 0 . H98: msg__index__subtype__1__last >= 0 . H99: msg__index__subtype__1__first <= 2147483647 . -> C1: 64 - fld_byte_count(fld_h(ctx)) + (msg__index__subtype__1__last + 1 - ( 64 - fld_byte_count(fld_h(ctx))) - 1) div 64 * 64 + ( msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx))) - (msg__index__subtype__1__last + 1 - (64 - fld_byte_count(fld_h(ctx)) ) - 1) div 64 * 64 - 1) <= msg__index__subtype__1__last . C2: - 64 + (msg__index__subtype__1__last + 64 * ((64 - ( msg__index__subtype__1__last + fld_byte_count(fld_h(ctx)))) div 64) + fld_byte_count(fld_h(ctx)) + fld_byte_count(fld_h(ctx__3))) <= 63 . procedure_skein_512_update_26. *** true . /* all conclusions proved */ procedure_skein_512_update_27. *** true . /* all conclusions proved */ procedure_skein_512_update_28. *** true . /* contradiction within hypotheses. */ procedure_skein_512_update_29. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_skein_512_update_30. *** true . /* all conclusions proved */ procedure_skein_512_update_31. *** true . /* all conclusions proved */ procedure_skein_512_update_32. *** true . /* all conclusions proved */ procedure_skein_512_update_33. *** true . /* all conclusions proved */ procedure_skein_512_update_34. *** true . /* all conclusions proved */ For checks of refinement integrity: procedure_skein_512_update_35. *** true . /* proved using user-defined proof rules. */ procedure_skein_512_update_36. *** true . /* proved using user-defined proof rules. */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.vct0000644000175000017500000000000011712513676030115 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.log0000644000175000017500000000330711712513676026530 0ustar eugeneugenSPARK Simplifier Pro Edition Reading skein_512_final.fdl (for inherited FDL type declarations) Reading skein.rlu (for user-defined proof rules) Processing skein_512_final.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - All conclusions proved Simplified VC: 20 - 2 conclusions remain unproven Simplified VC: 21 - 2 conclusions remain unproven Simplified VC: 22 - All conclusions proved Simplified VC: 23 - All conclusions proved Simplified VC: 24 - All conclusions proved Simplified VC: 25 - All conclusions proved Simplified VC: 26 - All conclusions proved Simplified VC: 27 - All conclusions proved Simplified VC: 28 - All conclusions proved Simplified VC: 29 - All conclusions proved Simplified VC: 30 - All conclusions proved Simplified VC: 31 - All conclusions proved Simplified VC: 32 - All conclusions proved Simplified VC: 33 - All conclusions proved Simplified VC: 34 - All conclusions proved Automatic simplification completed. Simplified output sent to skein_512_final.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.rls0000644000175000017500000002773711712513676030323 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Process_Block*/ rule_family skein_512_pr_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. skein_512_pr_rules(1): skein_512_block_bytes_c may_be_replaced_by 64. skein_512_pr_rules(2): integer__size >= 0 may_be_deduced. skein_512_pr_rules(3): integer__first may_be_replaced_by -2147483648. skein_512_pr_rules(4): integer__last may_be_replaced_by 2147483647. skein_512_pr_rules(5): integer__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(6): integer__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(7): natural__size >= 0 may_be_deduced. skein_512_pr_rules(8): natural__first may_be_replaced_by 0. skein_512_pr_rules(9): natural__last may_be_replaced_by 2147483647. skein_512_pr_rules(10): natural__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(11): natural__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(12): interfaces__unsigned_8__size >= 0 may_be_deduced. skein_512_pr_rules(13): interfaces__unsigned_8__size may_be_replaced_by 8. skein_512_pr_rules(14): interfaces__unsigned_8__first may_be_replaced_by 0. skein_512_pr_rules(15): interfaces__unsigned_8__last may_be_replaced_by 255. skein_512_pr_rules(16): interfaces__unsigned_8__base__first may_be_replaced_by 0. skein_512_pr_rules(17): interfaces__unsigned_8__base__last may_be_replaced_by 255. skein_512_pr_rules(18): interfaces__unsigned_8__modulus may_be_replaced_by 256. skein_512_pr_rules(19): interfaces__unsigned_16__size >= 0 may_be_deduced. skein_512_pr_rules(20): interfaces__unsigned_16__size may_be_replaced_by 16. skein_512_pr_rules(21): interfaces__unsigned_16__first may_be_replaced_by 0. skein_512_pr_rules(22): interfaces__unsigned_16__last may_be_replaced_by 65535. skein_512_pr_rules(23): interfaces__unsigned_16__base__first may_be_replaced_by 0. skein_512_pr_rules(24): interfaces__unsigned_16__base__last may_be_replaced_by 65535. skein_512_pr_rules(25): interfaces__unsigned_16__modulus may_be_replaced_by 65536. skein_512_pr_rules(26): interfaces__unsigned_32__size >= 0 may_be_deduced. skein_512_pr_rules(27): interfaces__unsigned_32__size may_be_replaced_by 32. skein_512_pr_rules(28): interfaces__unsigned_32__first may_be_replaced_by 0. skein_512_pr_rules(29): interfaces__unsigned_32__last may_be_replaced_by 4294967295. skein_512_pr_rules(30): interfaces__unsigned_32__base__first may_be_replaced_by 0. skein_512_pr_rules(31): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. skein_512_pr_rules(32): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. skein_512_pr_rules(33): interfaces__unsigned_64__size >= 0 may_be_deduced. skein_512_pr_rules(34): interfaces__unsigned_64__size may_be_replaced_by 64. skein_512_pr_rules(35): interfaces__unsigned_64__first may_be_replaced_by 0. skein_512_pr_rules(36): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. skein_512_pr_rules(37): interfaces__unsigned_64__base__first may_be_replaced_by 0. skein_512_pr_rules(38): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. skein_512_pr_rules(39): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. skein_512_pr_rules(40): spark__unsigned__u6__size >= 0 may_be_deduced. skein_512_pr_rules(41): spark__unsigned__u6__first may_be_replaced_by 0. skein_512_pr_rules(42): spark__unsigned__u6__last may_be_replaced_by 63. skein_512_pr_rules(43): spark__unsigned__u6__base__first may_be_replaced_by 0. skein_512_pr_rules(44): spark__unsigned__u6__base__last may_be_replaced_by 63. skein_512_pr_rules(45): spark__unsigned__u6__modulus may_be_replaced_by 64. skein_512_pr_rules(46): spark__unsigned__u7__size >= 0 may_be_deduced. skein_512_pr_rules(47): spark__unsigned__u7__first may_be_replaced_by 0. skein_512_pr_rules(48): spark__unsigned__u7__last may_be_replaced_by 127. skein_512_pr_rules(49): spark__unsigned__u7__base__first may_be_replaced_by 0. skein_512_pr_rules(50): spark__unsigned__u7__base__last may_be_replaced_by 127. skein_512_pr_rules(51): spark__unsigned__u7__modulus may_be_replaced_by 128. skein_512_pr_rules(52): spark__unsigned__byte__size >= 0 may_be_deduced. skein_512_pr_rules(53): spark__unsigned__byte__first may_be_replaced_by 0. skein_512_pr_rules(54): spark__unsigned__byte__last may_be_replaced_by 255. skein_512_pr_rules(55): spark__unsigned__byte__base__first may_be_replaced_by 0. skein_512_pr_rules(56): spark__unsigned__byte__base__last may_be_replaced_by 255. skein_512_pr_rules(57): spark__unsigned__byte__modulus may_be_replaced_by 256. skein_512_pr_rules(58): spark__unsigned__u16__size >= 0 may_be_deduced. skein_512_pr_rules(59): spark__unsigned__u16__first may_be_replaced_by 0. skein_512_pr_rules(60): spark__unsigned__u16__last may_be_replaced_by 65535. skein_512_pr_rules(61): spark__unsigned__u16__base__first may_be_replaced_by 0. skein_512_pr_rules(62): spark__unsigned__u16__base__last may_be_replaced_by 65535. skein_512_pr_rules(63): spark__unsigned__u16__modulus may_be_replaced_by 65536. skein_512_pr_rules(64): spark__unsigned__u32__size >= 0 may_be_deduced. skein_512_pr_rules(65): spark__unsigned__u32__first may_be_replaced_by 0. skein_512_pr_rules(66): spark__unsigned__u32__last may_be_replaced_by 4294967295. skein_512_pr_rules(67): spark__unsigned__u32__base__first may_be_replaced_by 0. skein_512_pr_rules(68): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. skein_512_pr_rules(69): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. skein_512_pr_rules(70): spark__unsigned__u64__size >= 0 may_be_deduced. skein_512_pr_rules(71): spark__unsigned__u64__first may_be_replaced_by 0. skein_512_pr_rules(72): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. skein_512_pr_rules(73): spark__unsigned__u64__base__first may_be_replaced_by 0. skein_512_pr_rules(74): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. skein_512_pr_rules(75): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. skein_512_pr_rules(76): spark__crypto__i3__size >= 0 may_be_deduced. skein_512_pr_rules(77): spark__crypto__i3__first may_be_replaced_by 0. skein_512_pr_rules(78): spark__crypto__i3__last may_be_replaced_by 2. skein_512_pr_rules(79): spark__crypto__i3__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(80): spark__crypto__i3__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(81): spark__crypto__i8__size >= 0 may_be_deduced. skein_512_pr_rules(82): spark__crypto__i8__first may_be_replaced_by 0. skein_512_pr_rules(83): spark__crypto__i8__last may_be_replaced_by 7. skein_512_pr_rules(84): spark__crypto__i8__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(85): spark__crypto__i8__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(86): spark__crypto__i9__size >= 0 may_be_deduced. skein_512_pr_rules(87): spark__crypto__i9__first may_be_replaced_by 0. skein_512_pr_rules(88): spark__crypto__i9__last may_be_replaced_by 8. skein_512_pr_rules(89): spark__crypto__i9__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(90): spark__crypto__i9__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(91): spark__crypto__word_count_t__size >= 0 may_be_deduced. skein_512_pr_rules(92): spark__crypto__word_count_t__first may_be_replaced_by 0. skein_512_pr_rules(93): spark__crypto__word_count_t__last may_be_replaced_by 268435455. skein_512_pr_rules(94): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(95): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(96): hash_bit_length__size >= 0 may_be_deduced. skein_512_pr_rules(97): hash_bit_length__first may_be_replaced_by 0. skein_512_pr_rules(98): hash_bit_length__last may_be_replaced_by 2147483640. skein_512_pr_rules(99): hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(100): hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(101): initialized_hash_bit_length__size >= 0 may_be_deduced. skein_512_pr_rules(102): initialized_hash_bit_length__first may_be_replaced_by 1. skein_512_pr_rules(103): initialized_hash_bit_length__last may_be_replaced_by 2147483640. skein_512_pr_rules(104): initialized_hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(105): initialized_hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(106): skein_512_state_words_index__size >= 0 may_be_deduced. skein_512_pr_rules(107): skein_512_state_words_index__first may_be_replaced_by 0. skein_512_pr_rules(108): skein_512_state_words_index__last may_be_replaced_by 7. skein_512_pr_rules(109): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(110): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(111): skein_512_block_bytes_count__size >= 0 may_be_deduced. skein_512_pr_rules(112): skein_512_block_bytes_count__first may_be_replaced_by 0. skein_512_pr_rules(113): skein_512_block_bytes_count__last may_be_replaced_by 64. skein_512_pr_rules(114): skein_512_block_bytes_count__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(115): skein_512_block_bytes_count__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(116): skein_512_block_bytes_index__size >= 0 may_be_deduced. skein_512_pr_rules(117): skein_512_block_bytes_index__first may_be_replaced_by 0. skein_512_pr_rules(118): skein_512_block_bytes_index__last may_be_replaced_by 63. skein_512_pr_rules(119): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(120): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(121): positive_block_512_count_t__size >= 0 may_be_deduced. skein_512_pr_rules(122): positive_block_512_count_t__first may_be_replaced_by 1. skein_512_pr_rules(123): positive_block_512_count_t__last may_be_replaced_by 33554431. skein_512_pr_rules(124): positive_block_512_count_t__base__first may_be_replaced_by -2147483648. skein_512_pr_rules(125): positive_block_512_count_t__base__last may_be_replaced_by 2147483647. skein_512_pr_rules(126): skein_512_context__size >= 0 may_be_deduced. skein_512_pr_rules(127): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. skein_512_pr_rules(128): tweak_value__size >= 0 may_be_deduced. skein_512_pr_rules(129): tweak_value__size may_be_replaced_by 128. skein_512_pr_rules(130): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. skein_512_pr_rules(131): context_header__size >= 0 may_be_deduced. skein_512_pr_rules(132): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. skein_512_pr_rules(133): block__index__subtype__1__first >= natural__first may_be_deduced. skein_512_pr_rules(134): block__index__subtype__1__last <= natural__last may_be_deduced. skein_512_pr_rules(135): block__index__subtype__1__first <= block__index__subtype__1__last may_be_deduced. skein_512_pr_rules(136): block__index__subtype__1__last >= natural__first may_be_deduced. skein_512_pr_rules(137): block__index__subtype__1__first <= natural__last may_be_deduced. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.fdl0000644000175000017500000001023611712513676027032 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Put_64_LSB_First} title procedure put_64_lsb_first; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__word_count_t = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const src__index__subtype__1__first : integer = pending; const src__index__subtype__1__last : integer = pending; const dst__index__subtype__1__first : integer = pending; const dst__index__subtype__1__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var dst : spark__crypto__byte_seq; var dst_offset : integer; var src : spark__crypto__u64_seq; var byte_count : integer; var loop__1__n : integer; function spark__unsigned__shift_right_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.rls0000644000175000017500000001205111712513676027031 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Get_64_LSB_First*/ rule_family get_64_lsb_f_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. get_64_lsb_f_rules(1): integer__size >= 0 may_be_deduced. get_64_lsb_f_rules(2): integer__first may_be_replaced_by -2147483648. get_64_lsb_f_rules(3): integer__last may_be_replaced_by 2147483647. get_64_lsb_f_rules(4): integer__base__first may_be_replaced_by -2147483648. get_64_lsb_f_rules(5): integer__base__last may_be_replaced_by 2147483647. get_64_lsb_f_rules(6): natural__size >= 0 may_be_deduced. get_64_lsb_f_rules(7): natural__first may_be_replaced_by 0. get_64_lsb_f_rules(8): natural__last may_be_replaced_by 2147483647. get_64_lsb_f_rules(9): natural__base__first may_be_replaced_by -2147483648. get_64_lsb_f_rules(10): natural__base__last may_be_replaced_by 2147483647. get_64_lsb_f_rules(11): interfaces__unsigned_8__size >= 0 may_be_deduced. get_64_lsb_f_rules(12): interfaces__unsigned_8__size may_be_replaced_by 8. get_64_lsb_f_rules(13): interfaces__unsigned_8__first may_be_replaced_by 0. get_64_lsb_f_rules(14): interfaces__unsigned_8__last may_be_replaced_by 255. get_64_lsb_f_rules(15): interfaces__unsigned_8__base__first may_be_replaced_by 0. get_64_lsb_f_rules(16): interfaces__unsigned_8__base__last may_be_replaced_by 255. get_64_lsb_f_rules(17): interfaces__unsigned_8__modulus may_be_replaced_by 256. get_64_lsb_f_rules(18): interfaces__unsigned_64__size >= 0 may_be_deduced. get_64_lsb_f_rules(19): interfaces__unsigned_64__size may_be_replaced_by 64. get_64_lsb_f_rules(20): interfaces__unsigned_64__first may_be_replaced_by 0. get_64_lsb_f_rules(21): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. get_64_lsb_f_rules(22): interfaces__unsigned_64__base__first may_be_replaced_by 0. get_64_lsb_f_rules(23): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. get_64_lsb_f_rules(24): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. get_64_lsb_f_rules(25): spark__unsigned__byte__size >= 0 may_be_deduced. get_64_lsb_f_rules(26): spark__unsigned__byte__first may_be_replaced_by 0. get_64_lsb_f_rules(27): spark__unsigned__byte__last may_be_replaced_by 255. get_64_lsb_f_rules(28): spark__unsigned__byte__base__first may_be_replaced_by 0. get_64_lsb_f_rules(29): spark__unsigned__byte__base__last may_be_replaced_by 255. get_64_lsb_f_rules(30): spark__unsigned__byte__modulus may_be_replaced_by 256. get_64_lsb_f_rules(31): spark__unsigned__u64__size >= 0 may_be_deduced. get_64_lsb_f_rules(32): spark__unsigned__u64__first may_be_replaced_by 0. get_64_lsb_f_rules(33): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. get_64_lsb_f_rules(34): spark__unsigned__u64__base__first may_be_replaced_by 0. get_64_lsb_f_rules(35): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. get_64_lsb_f_rules(36): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. get_64_lsb_f_rules(37): spark__unsigned__shift_count__size >= 0 may_be_deduced. get_64_lsb_f_rules(38): spark__unsigned__shift_count__first may_be_replaced_by 0. get_64_lsb_f_rules(39): spark__unsigned__shift_count__last may_be_replaced_by 64. get_64_lsb_f_rules(40): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. get_64_lsb_f_rules(41): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. get_64_lsb_f_rules(42): spark__crypto__word_count_t__size >= 0 may_be_deduced. get_64_lsb_f_rules(43): spark__crypto__word_count_t__first may_be_replaced_by 0. get_64_lsb_f_rules(44): spark__crypto__word_count_t__last may_be_replaced_by 268435455. get_64_lsb_f_rules(45): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. get_64_lsb_f_rules(46): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. get_64_lsb_f_rules(47): dst__index__subtype__1__first >= spark__crypto__word_count_t__first may_be_deduced. get_64_lsb_f_rules(48): dst__index__subtype__1__last <= spark__crypto__word_count_t__last may_be_deduced. get_64_lsb_f_rules(49): dst__index__subtype__1__first <= dst__index__subtype__1__last may_be_deduced. get_64_lsb_f_rules(50): dst__index__subtype__1__last >= spark__crypto__word_count_t__first may_be_deduced. get_64_lsb_f_rules(51): dst__index__subtype__1__first <= spark__crypto__word_count_t__last may_be_deduced. get_64_lsb_f_rules(52): src__index__subtype__1__first >= natural__first may_be_deduced. get_64_lsb_f_rules(53): src__index__subtype__1__last <= natural__last may_be_deduced. get_64_lsb_f_rules(54): src__index__subtype__1__first <= src__index__subtype__1__last may_be_deduced. get_64_lsb_f_rules(55): src__index__subtype__1__last >= natural__first may_be_deduced. get_64_lsb_f_rules(56): src__index__subtype__1__first <= natural__last may_be_deduced. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.log0000644000175000017500000000160511712513676026401 0ustar eugeneugenSPARK Simplifier Pro Edition Reading skein_512_init.fdl (for inherited FDL type declarations) Reading skein.rlu (for user-defined proof rules) Processing skein_512_init.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Automatic simplification completed. Simplified output sent to skein_512_init.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.vcg0000644000175000017500000020264411712513676026405 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Init For path(s) from start to run-time check associated with statement of line 682: procedure_skein_512_init_1. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . -> C1: hashbitlen >= hash_bit_length__first . C2: hashbitlen <= hash_bit_length__last . For path(s) from start to run-time check associated with statement of line 684: procedure_skein_512_init_2. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . -> C1: skein_block_type_cfg >= spark__unsigned__u6__first . C2: skein_block_type_cfg <= spark__unsigned__u6__last . For path(s) from start to check associated with statement of line 686: procedure_skein_512_init_3. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . -> C1: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . For path(s) from start to run-time check associated with statement of line 690: procedure_skein_512_init_4. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . -> C1: skein_schema_ver >= interfaces__unsigned_64__first . C2: skein_schema_ver <= interfaces__unsigned_64__last . For path(s) from start to run-time check associated with statement of line 690: procedure_skein_512_init_5. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . -> C1: hashbitlen >= interfaces__unsigned_64__first . C2: hashbitlen <= interfaces__unsigned_64__last . C3: hashbitlen >= spark__unsigned__u64__first . C4: hashbitlen <= spark__unsigned__u64__last . C5: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . C6: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 690: procedure_skein_512_init_6. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . H36: hashbitlen >= interfaces__unsigned_64__first . H37: hashbitlen <= interfaces__unsigned_64__last . H38: hashbitlen >= spark__unsigned__u64__first . H39: hashbitlen <= spark__unsigned__u64__last . H40: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . H41: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . H42: spark__unsigned__to_littleendian(hashbitlen) >= interfaces__unsigned_64__first . H43: spark__unsigned__to_littleendian(hashbitlen) <= interfaces__unsigned_64__last . -> C1: skein_cfg_tree_info_sequential >= interfaces__unsigned_64__first . C2: skein_cfg_tree_info_sequential <= interfaces__unsigned_64__last . C3: spark__unsigned__to_littleendian(hashbitlen) >= spark__unsigned__u64__first . C4: spark__unsigned__to_littleendian(hashbitlen) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 690: procedure_skein_512_init_7. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . H36: hashbitlen >= interfaces__unsigned_64__first . H37: hashbitlen <= interfaces__unsigned_64__last . H38: hashbitlen >= spark__unsigned__u64__first . H39: hashbitlen <= spark__unsigned__u64__last . H40: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . H41: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . H42: spark__unsigned__to_littleendian(hashbitlen) >= interfaces__unsigned_64__first . H43: spark__unsigned__to_littleendian(hashbitlen) <= interfaces__unsigned_64__last . H44: skein_cfg_tree_info_sequential >= interfaces__unsigned_64__first . H45: skein_cfg_tree_info_sequential <= interfaces__unsigned_64__last . H46: spark__unsigned__to_littleendian(hashbitlen) >= spark__unsigned__u64__first . H47: spark__unsigned__to_littleendian(hashbitlen) <= spark__unsigned__u64__last . H48: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= interfaces__unsigned_64__first . H49: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= interfaces__unsigned_64__last . -> C1: 0 >= spark__unsigned__u64__first . C2: 0 <= spark__unsigned__u64__last . C3: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= spark__unsigned__u64__first . C4: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= spark__unsigned__u64__last . For path(s) from start to run-time check associated with statement of line 698: procedure_skein_512_init_8. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . H36: hashbitlen >= interfaces__unsigned_64__first . H37: hashbitlen <= interfaces__unsigned_64__last . H38: hashbitlen >= spark__unsigned__u64__first . H39: hashbitlen <= spark__unsigned__u64__last . H40: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . H41: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . H42: spark__unsigned__to_littleendian(hashbitlen) >= interfaces__unsigned_64__first . H43: spark__unsigned__to_littleendian(hashbitlen) <= interfaces__unsigned_64__last . H44: skein_cfg_tree_info_sequential >= interfaces__unsigned_64__first . H45: skein_cfg_tree_info_sequential <= interfaces__unsigned_64__last . H46: spark__unsigned__to_littleendian(hashbitlen) >= spark__unsigned__u64__first . H47: spark__unsigned__to_littleendian(hashbitlen) <= spark__unsigned__u64__last . H48: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= interfaces__unsigned_64__first . H49: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= interfaces__unsigned_64__last . H50: 0 >= spark__unsigned__u64__first . H51: 0 <= spark__unsigned__u64__last . H52: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= spark__unsigned__u64__first . H53: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= spark__unsigned__u64__last . -> C1: 0 >= spark__unsigned__u64__first . C2: 0 <= spark__unsigned__u64__last . For path(s) from start to check associated with statement of line 701: procedure_skein_512_init_9. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . H36: hashbitlen >= interfaces__unsigned_64__first . H37: hashbitlen <= interfaces__unsigned_64__last . H38: hashbitlen >= spark__unsigned__u64__first . H39: hashbitlen <= spark__unsigned__u64__last . H40: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . H41: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . H42: spark__unsigned__to_littleendian(hashbitlen) >= interfaces__unsigned_64__first . H43: spark__unsigned__to_littleendian(hashbitlen) <= interfaces__unsigned_64__last . H44: skein_cfg_tree_info_sequential >= interfaces__unsigned_64__first . H45: skein_cfg_tree_info_sequential <= interfaces__unsigned_64__last . H46: spark__unsigned__to_littleendian(hashbitlen) >= spark__unsigned__u64__first . H47: spark__unsigned__to_littleendian(hashbitlen) <= spark__unsigned__u64__last . H48: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= interfaces__unsigned_64__first . H49: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= interfaces__unsigned_64__last . H50: 0 >= spark__unsigned__u64__first . H51: 0 <= spark__unsigned__u64__last . H52: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= spark__unsigned__u64__first . H53: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= spark__unsigned__u64__last . H54: 0 >= spark__unsigned__u64__first . H55: 0 <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) = hashbitlen . For path(s) from start to precondition check associated with statement of line 704: procedure_skein_512_init_10. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . H36: hashbitlen >= interfaces__unsigned_64__first . H37: hashbitlen <= interfaces__unsigned_64__last . H38: hashbitlen >= spark__unsigned__u64__first . H39: hashbitlen <= spark__unsigned__u64__last . H40: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . H41: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . H42: spark__unsigned__to_littleendian(hashbitlen) >= interfaces__unsigned_64__first . H43: spark__unsigned__to_littleendian(hashbitlen) <= interfaces__unsigned_64__last . H44: skein_cfg_tree_info_sequential >= interfaces__unsigned_64__first . H45: skein_cfg_tree_info_sequential <= interfaces__unsigned_64__last . H46: spark__unsigned__to_littleendian(hashbitlen) >= spark__unsigned__u64__first . H47: spark__unsigned__to_littleendian(hashbitlen) <= spark__unsigned__u64__last . H48: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= interfaces__unsigned_64__first . H49: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= interfaces__unsigned_64__last . H50: 0 >= spark__unsigned__u64__first . H51: 0 <= spark__unsigned__u64__last . H52: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= spark__unsigned__u64__first . H53: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= spark__unsigned__u64__last . H54: 0 >= spark__unsigned__u64__first . H55: 0 <= spark__unsigned__u64__last . H56: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) = hashbitlen . -> C1: skein_cfg_str_len >= natural__first . C2: skein_cfg_str_len <= natural__last . C3: 1 >= positive_block_512_count_t__first . C4: 1 <= positive_block_512_count_t__last . C5: 0 >= natural__first . C6: 0 <= natural__last . C7: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) >= initialized_hash_bit_length__first . C8: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) <= initialized_hash_bit_length__last . C9: fld_byte_count(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) >= skein_512_block_bytes_count__first . C10: fld_byte_count(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) <= skein_512_block_bytes_count__last . C11: skein_512_state_bytes_index__first = 0 . C12: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_state_bytes_index__last . C13: 0 + 63 <= skein_512_state_bytes_index__last . C14: skein_512_state_bytes_index__last <= natural__last . C15: 0 <= natural__last - 63 . For path(s) from start to check associated with statement of line 711: procedure_skein_512_init_11. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . H36: hashbitlen >= interfaces__unsigned_64__first . H37: hashbitlen <= interfaces__unsigned_64__last . H38: hashbitlen >= spark__unsigned__u64__first . H39: hashbitlen <= spark__unsigned__u64__last . H40: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . H41: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . H42: spark__unsigned__to_littleendian(hashbitlen) >= interfaces__unsigned_64__first . H43: spark__unsigned__to_littleendian(hashbitlen) <= interfaces__unsigned_64__last . H44: skein_cfg_tree_info_sequential >= interfaces__unsigned_64__first . H45: skein_cfg_tree_info_sequential <= interfaces__unsigned_64__last . H46: spark__unsigned__to_littleendian(hashbitlen) >= spark__unsigned__u64__first . H47: spark__unsigned__to_littleendian(hashbitlen) <= spark__unsigned__u64__last . H48: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= interfaces__unsigned_64__first . H49: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= interfaces__unsigned_64__last . H50: 0 >= spark__unsigned__u64__first . H51: 0 <= spark__unsigned__u64__last . H52: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= spark__unsigned__u64__first . H53: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= spark__unsigned__u64__last . H54: 0 >= spark__unsigned__u64__first . H55: 0 <= spark__unsigned__u64__last . H56: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) = hashbitlen . H57: skein_cfg_str_len >= natural__first . H58: skein_cfg_str_len <= natural__last . H59: 1 >= positive_block_512_count_t__first . H60: 1 <= positive_block_512_count_t__last . H61: 0 >= natural__first . H62: 0 <= natural__last . H63: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) >= initialized_hash_bit_length__first . H64: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) <= initialized_hash_bit_length__last . H65: fld_byte_count(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) >= skein_512_block_bytes_count__first . H66: fld_byte_count(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) <= skein_512_block_bytes_count__last . H67: skein_512_state_bytes_index__first = 0 . H68: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_state_bytes_index__last . H69: 0 + 63 <= skein_512_state_bytes_index__last . H70: skein_512_state_bytes_index__last <= natural__last . H71: 0 <= natural__last - 63 . H72: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H73: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H74: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(upf_x( ctx__1, mk__spark__crypto__u64_seq(0)))) . H75: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H76: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H77: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(upf_x( ctx__1, mk__spark__crypto__u64_seq(0)))) . H78: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H79: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H80: fld_byte_count(fld_h(ctx__2)) >= natural__first . H81: fld_byte_count(fld_h(ctx__2)) <= natural__last . H82: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H83: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H84: true . H85: true . H86: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H87: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H88: true . H89: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H90: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H91: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H92: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H93: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H94: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H95: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H96: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . -> C1: fld_hash_bit_len(fld_h(ctx__2)) = hashbitlen . For path(s) from start to run-time check associated with statement of line 714: procedure_skein_512_init_12. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . H36: hashbitlen >= interfaces__unsigned_64__first . H37: hashbitlen <= interfaces__unsigned_64__last . H38: hashbitlen >= spark__unsigned__u64__first . H39: hashbitlen <= spark__unsigned__u64__last . H40: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . H41: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . H42: spark__unsigned__to_littleendian(hashbitlen) >= interfaces__unsigned_64__first . H43: spark__unsigned__to_littleendian(hashbitlen) <= interfaces__unsigned_64__last . H44: skein_cfg_tree_info_sequential >= interfaces__unsigned_64__first . H45: skein_cfg_tree_info_sequential <= interfaces__unsigned_64__last . H46: spark__unsigned__to_littleendian(hashbitlen) >= spark__unsigned__u64__first . H47: spark__unsigned__to_littleendian(hashbitlen) <= spark__unsigned__u64__last . H48: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= interfaces__unsigned_64__first . H49: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= interfaces__unsigned_64__last . H50: 0 >= spark__unsigned__u64__first . H51: 0 <= spark__unsigned__u64__last . H52: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= spark__unsigned__u64__first . H53: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= spark__unsigned__u64__last . H54: 0 >= spark__unsigned__u64__first . H55: 0 <= spark__unsigned__u64__last . H56: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) = hashbitlen . H57: skein_cfg_str_len >= natural__first . H58: skein_cfg_str_len <= natural__last . H59: 1 >= positive_block_512_count_t__first . H60: 1 <= positive_block_512_count_t__last . H61: 0 >= natural__first . H62: 0 <= natural__last . H63: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) >= initialized_hash_bit_length__first . H64: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) <= initialized_hash_bit_length__last . H65: fld_byte_count(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) >= skein_512_block_bytes_count__first . H66: fld_byte_count(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) <= skein_512_block_bytes_count__last . H67: skein_512_state_bytes_index__first = 0 . H68: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_state_bytes_index__last . H69: 0 + 63 <= skein_512_state_bytes_index__last . H70: skein_512_state_bytes_index__last <= natural__last . H71: 0 <= natural__last - 63 . H72: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H73: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H74: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(upf_x( ctx__1, mk__spark__crypto__u64_seq(0)))) . H75: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H76: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H77: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(upf_x( ctx__1, mk__spark__crypto__u64_seq(0)))) . H78: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H79: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H80: fld_byte_count(fld_h(ctx__2)) >= natural__first . H81: fld_byte_count(fld_h(ctx__2)) <= natural__last . H82: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H83: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H84: true . H85: true . H86: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H87: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H88: true . H89: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H90: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H91: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H92: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H93: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H94: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H95: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H96: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H97: fld_hash_bit_len(fld_h(ctx__2)) = hashbitlen . -> C1: skein_block_type_msg >= spark__unsigned__u6__first . C2: skein_block_type_msg <= spark__unsigned__u6__last . For path(s) from start to finish: procedure_skein_512_init_13. H1: true . H2: hashbitlen >= initialized_hash_bit_length__first . H3: hashbitlen <= initialized_hash_bit_length__last . H4: hashbitlen >= hash_bit_length__first . H5: hashbitlen <= hash_bit_length__last . H6: skein_block_type_cfg >= spark__unsigned__u6__first . H7: skein_block_type_cfg <= spark__unsigned__u6__last . H8: fld_h(ctx__1) = upf_byte_count(upf_tweak_words(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen))), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)), 0) . H9: fld_hash_bit_len(fld_h(ctx__1)) = fld_hash_bit_len(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h( null_skein_512_context), hashbitlen)))) . H10: fld_byte_count(fld_h(ctx__1)) = 0 . H11: fld_byte_count(fld_h(ctx__1)) >= natural__first . H12: fld_byte_count(fld_h(ctx__1)) <= natural__last . H13: fld_hash_bit_len(fld_h(ctx__1)) >= hash_bit_length__first . H14: fld_hash_bit_len(fld_h(ctx__1)) <= hash_bit_length__last . H15: true . H16: true . H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u6__first . H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u6__last . H19: true . H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u7__first . H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u7__last . H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u16__first . H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u16__last . H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u32__first . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u32__last . H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= spark__unsigned__u64__first . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= spark__unsigned__u64__last . H28: fld_x(ctx__1) = fld_x(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H29: fld_b(ctx__1) = fld_b(upf_h(null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context), hashbitlen))) . H30: ctx__1 = upf_h(upf_h(null_skein_512_context, upf_hash_bit_len( fld_h(null_skein_512_context), hashbitlen)), fld_h(ctx__1)) . H31: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen . H32: skein_schema_ver >= interfaces__unsigned_64__first . H33: skein_schema_ver <= interfaces__unsigned_64__last . H34: spark__unsigned__to_littleendian(skein_schema_ver) >= interfaces__unsigned_64__first . H35: spark__unsigned__to_littleendian(skein_schema_ver) <= interfaces__unsigned_64__last . H36: hashbitlen >= interfaces__unsigned_64__first . H37: hashbitlen <= interfaces__unsigned_64__last . H38: hashbitlen >= spark__unsigned__u64__first . H39: hashbitlen <= spark__unsigned__u64__last . H40: spark__unsigned__to_littleendian(skein_schema_ver) >= spark__unsigned__u64__first . H41: spark__unsigned__to_littleendian(skein_schema_ver) <= spark__unsigned__u64__last . H42: spark__unsigned__to_littleendian(hashbitlen) >= interfaces__unsigned_64__first . H43: spark__unsigned__to_littleendian(hashbitlen) <= interfaces__unsigned_64__last . H44: skein_cfg_tree_info_sequential >= interfaces__unsigned_64__first . H45: skein_cfg_tree_info_sequential <= interfaces__unsigned_64__last . H46: spark__unsigned__to_littleendian(hashbitlen) >= spark__unsigned__u64__first . H47: spark__unsigned__to_littleendian(hashbitlen) <= spark__unsigned__u64__last . H48: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= interfaces__unsigned_64__first . H49: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= interfaces__unsigned_64__last . H50: 0 >= spark__unsigned__u64__first . H51: 0 <= spark__unsigned__u64__last . H52: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) >= spark__unsigned__u64__first . H53: spark__unsigned__to_littleendian( skein_cfg_tree_info_sequential) <= spark__unsigned__u64__last . H54: 0 >= spark__unsigned__u64__first . H55: 0 <= spark__unsigned__u64__last . H56: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) = hashbitlen . H57: skein_cfg_str_len >= natural__first . H58: skein_cfg_str_len <= natural__last . H59: 1 >= positive_block_512_count_t__first . H60: 1 <= positive_block_512_count_t__last . H61: 0 >= natural__first . H62: 0 <= natural__last . H63: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) >= initialized_hash_bit_length__first . H64: fld_hash_bit_len(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) <= initialized_hash_bit_length__last . H65: fld_byte_count(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) >= skein_512_block_bytes_count__first . H66: fld_byte_count(fld_h(upf_x(ctx__1, mk__spark__crypto__u64_seq(0)))) <= skein_512_block_bytes_count__last . H67: skein_512_state_bytes_index__first = 0 . H68: 0 + (1 - 1) * skein_512_block_bytes_c + 63 <= skein_512_state_bytes_index__last . H69: 0 + 63 <= skein_512_state_bytes_index__last . H70: skein_512_state_bytes_index__last <= natural__last . H71: 0 <= natural__last - 63 . H72: fld_hash_bit_len(fld_h(ctx__2)) >= initialized_hash_bit_length__first . H73: fld_hash_bit_len(fld_h(ctx__2)) <= initialized_hash_bit_length__last . H74: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(upf_x( ctx__1, mk__spark__crypto__u64_seq(0)))) . H75: fld_byte_count(fld_h(ctx__2)) >= skein_512_block_bytes_count__first . H76: fld_byte_count(fld_h(ctx__2)) <= skein_512_block_bytes_count__last . H77: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(upf_x( ctx__1, mk__spark__crypto__u64_seq(0)))) . H78: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__2), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last))) . H79: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__2), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last))) . H80: fld_byte_count(fld_h(ctx__2)) >= natural__first . H81: fld_byte_count(fld_h(ctx__2)) <= natural__last . H82: fld_hash_bit_len(fld_h(ctx__2)) >= hash_bit_length__first . H83: fld_hash_bit_len(fld_h(ctx__2)) <= hash_bit_length__last . H84: true . H85: true . H86: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u6__first . H87: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u6__last . H88: true . H89: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u7__first . H90: fld_tree_level(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u7__last . H91: fld_reserved(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u16__first . H92: fld_reserved(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u16__last . H93: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u32__first . H94: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u32__last . H95: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) >= spark__unsigned__u64__first . H96: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__2))) <= spark__unsigned__u64__last . H97: fld_hash_bit_len(fld_h(ctx__2)) = hashbitlen . H98: skein_block_type_msg >= spark__unsigned__u6__first . H99: skein_block_type_msg <= spark__unsigned__u6__last . H100: fld_h(ctx__3) = upf_byte_count(upf_tweak_words(fld_h(ctx__2), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_msg, first_block := true, final_block := false)), 0) . H101: fld_hash_bit_len(fld_h(ctx__3)) = fld_hash_bit_len(fld_h(ctx__2)) . H102: fld_byte_count(fld_h(ctx__3)) = 0 . H103: fld_byte_count(fld_h(ctx__3)) >= natural__first . H104: fld_byte_count(fld_h(ctx__3)) <= natural__last . H105: fld_hash_bit_len(fld_h(ctx__3)) >= hash_bit_length__first . H106: fld_hash_bit_len(fld_h(ctx__3)) <= hash_bit_length__last . H107: true . H108: true . H109: fld_field_type(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u6__first . H110: fld_field_type(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u6__last . H111: true . H112: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u7__first . H113: fld_tree_level(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u7__last . H114: fld_reserved(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u16__first . H115: fld_reserved(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u16__last . H116: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u32__first . H117: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u32__last . H118: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) >= spark__unsigned__u64__first . H119: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__3))) <= spark__unsigned__u64__last . H120: fld_x(ctx__3) = fld_x(ctx__2) . H121: fld_b(ctx__3) = fld_b(ctx__2) . H122: ctx__3 = upf_h(ctx__2, fld_h(ctx__3)) . -> C1: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last . C3: fld_hash_bit_len(fld_h(ctx__3)) = hashbitlen . C4: fld_byte_count(fld_h(ctx__3)) = 0 . C5: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first . C6: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last . For checks of refinement integrity: procedure_skein_512_init_14. *** true . /* trivially true VC removed by Examiner */ procedure_skein_512_init_15. H1: true . H2: true . H3: hashbitlen >= initialized_hash_bit_length__first . H4: hashbitlen <= initialized_hash_bit_length__last . H5: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H6: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H7: fld_byte_count(fld_h(ctx)) >= natural__first . H8: fld_byte_count(fld_h(ctx)) <= natural__last . H9: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H10: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H11: true . H12: true . H13: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H14: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H15: true . H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H17: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H18: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H19: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H21: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H23: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H24: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H25: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H26: fld_hash_bit_len(fld_h(ctx)) = hashbitlen . H27: fld_byte_count(fld_h(ctx)) = 0 . H28: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H29: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . -> C1: hash_bit_len_of(ctx) >= initialized_hash_bit_length__first . C2: hash_bit_len_of(ctx) <= initialized_hash_bit_length__last . C3: hash_bit_len_of(ctx) = hashbitlen . C4: byte_count_of(ctx) = 0 . C5: byte_count_of(ctx) >= skein_512_block_bytes_count__first . C6: byte_count_of(ctx) <= skein_512_block_bytes_count__last . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.fdl0000644000175000017500000001306711712513676030107 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_Start_New_Type} title procedure skein_start_new_type; function round__(real) : integer; type natural = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type hash_bit_length = integer; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var field_type : spark__unsigned__u6; var first_block : boolean; var final_block : boolean; var ctx : context_header; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.rls0000644000175000017500000001470011712513676030135 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_Start_New_Type*/ rule_family skein_start__rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. skein_start__rules(1): integer__size >= 0 may_be_deduced. skein_start__rules(2): integer__first may_be_replaced_by -2147483648. skein_start__rules(3): integer__last may_be_replaced_by 2147483647. skein_start__rules(4): integer__base__first may_be_replaced_by -2147483648. skein_start__rules(5): integer__base__last may_be_replaced_by 2147483647. skein_start__rules(6): natural__size >= 0 may_be_deduced. skein_start__rules(7): natural__first may_be_replaced_by 0. skein_start__rules(8): natural__last may_be_replaced_by 2147483647. skein_start__rules(9): natural__base__first may_be_replaced_by -2147483648. skein_start__rules(10): natural__base__last may_be_replaced_by 2147483647. skein_start__rules(11): interfaces__unsigned_16__size >= 0 may_be_deduced. skein_start__rules(12): interfaces__unsigned_16__size may_be_replaced_by 16. skein_start__rules(13): interfaces__unsigned_16__first may_be_replaced_by 0. skein_start__rules(14): interfaces__unsigned_16__last may_be_replaced_by 65535. skein_start__rules(15): interfaces__unsigned_16__base__first may_be_replaced_by 0. skein_start__rules(16): interfaces__unsigned_16__base__last may_be_replaced_by 65535. skein_start__rules(17): interfaces__unsigned_16__modulus may_be_replaced_by 65536. skein_start__rules(18): interfaces__unsigned_32__size >= 0 may_be_deduced. skein_start__rules(19): interfaces__unsigned_32__size may_be_replaced_by 32. skein_start__rules(20): interfaces__unsigned_32__first may_be_replaced_by 0. skein_start__rules(21): interfaces__unsigned_32__last may_be_replaced_by 4294967295. skein_start__rules(22): interfaces__unsigned_32__base__first may_be_replaced_by 0. skein_start__rules(23): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. skein_start__rules(24): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. skein_start__rules(25): interfaces__unsigned_64__size >= 0 may_be_deduced. skein_start__rules(26): interfaces__unsigned_64__size may_be_replaced_by 64. skein_start__rules(27): interfaces__unsigned_64__first may_be_replaced_by 0. skein_start__rules(28): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. skein_start__rules(29): interfaces__unsigned_64__base__first may_be_replaced_by 0. skein_start__rules(30): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. skein_start__rules(31): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. skein_start__rules(32): spark__unsigned__u6__size >= 0 may_be_deduced. skein_start__rules(33): spark__unsigned__u6__first may_be_replaced_by 0. skein_start__rules(34): spark__unsigned__u6__last may_be_replaced_by 63. skein_start__rules(35): spark__unsigned__u6__base__first may_be_replaced_by 0. skein_start__rules(36): spark__unsigned__u6__base__last may_be_replaced_by 63. skein_start__rules(37): spark__unsigned__u6__modulus may_be_replaced_by 64. skein_start__rules(38): spark__unsigned__u7__size >= 0 may_be_deduced. skein_start__rules(39): spark__unsigned__u7__first may_be_replaced_by 0. skein_start__rules(40): spark__unsigned__u7__last may_be_replaced_by 127. skein_start__rules(41): spark__unsigned__u7__base__first may_be_replaced_by 0. skein_start__rules(42): spark__unsigned__u7__base__last may_be_replaced_by 127. skein_start__rules(43): spark__unsigned__u7__modulus may_be_replaced_by 128. skein_start__rules(44): spark__unsigned__u16__size >= 0 may_be_deduced. skein_start__rules(45): spark__unsigned__u16__first may_be_replaced_by 0. skein_start__rules(46): spark__unsigned__u16__last may_be_replaced_by 65535. skein_start__rules(47): spark__unsigned__u16__base__first may_be_replaced_by 0. skein_start__rules(48): spark__unsigned__u16__base__last may_be_replaced_by 65535. skein_start__rules(49): spark__unsigned__u16__modulus may_be_replaced_by 65536. skein_start__rules(50): spark__unsigned__u32__size >= 0 may_be_deduced. skein_start__rules(51): spark__unsigned__u32__first may_be_replaced_by 0. skein_start__rules(52): spark__unsigned__u32__last may_be_replaced_by 4294967295. skein_start__rules(53): spark__unsigned__u32__base__first may_be_replaced_by 0. skein_start__rules(54): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. skein_start__rules(55): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. skein_start__rules(56): spark__unsigned__u64__size >= 0 may_be_deduced. skein_start__rules(57): spark__unsigned__u64__first may_be_replaced_by 0. skein_start__rules(58): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. skein_start__rules(59): spark__unsigned__u64__base__first may_be_replaced_by 0. skein_start__rules(60): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. skein_start__rules(61): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. skein_start__rules(62): hash_bit_length__size >= 0 may_be_deduced. skein_start__rules(63): hash_bit_length__first may_be_replaced_by 0. skein_start__rules(64): hash_bit_length__last may_be_replaced_by 2147483640. skein_start__rules(65): hash_bit_length__base__first may_be_replaced_by -2147483648. skein_start__rules(66): hash_bit_length__base__last may_be_replaced_by 2147483647. skein_start__rules(67): tweak_value__size >= 0 may_be_deduced. skein_start__rules(68): tweak_value__size may_be_replaced_by 128. skein_start__rules(69): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. skein_start__rules(70): context_header__size >= 0 may_be_deduced. skein_start__rules(71): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.log0000644000175000017500000000347211712513676026724 0ustar eugeneugenSPARK Simplifier Pro Edition Reading skein_512_update.fdl (for inherited FDL type declarations) Reading skein.rlu (for user-defined proof rules) Processing skein_512_update.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - 2 conclusions remain unproven Simplified VC: 16 - All conclusions proved Simplified VC: 17 - All conclusions proved Simplified VC: 18 - All conclusions proved Simplified VC: 19 - 1 conclusion remains unproven Simplified VC: 20 - All conclusions proved Simplified VC: 21 - All conclusions proved Simplified VC: 22 - All conclusions proved Simplified VC: 23 - All conclusions proved Simplified VC: 24 - All conclusions proved Simplified VC: 25 - 2 conclusions remain unproven Simplified VC: 26 - All conclusions proved Simplified VC: 27 - All conclusions proved Simplified VC: 28 - Proved by contradiction within hypotheses Simplified VC: 29 - All conclusions proved Simplified VC: 30 - All conclusions proved Simplified VC: 31 - All conclusions proved Simplified VC: 32 - All conclusions proved Simplified VC: 33 - All conclusions proved Simplified VC: 34 - All conclusions proved Simplified VC: 35 - All conclusions proved Simplified VC: 36 - All conclusions proved Automatic simplification completed. Simplified output sent to skein_512_update.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.slg0000644000175000017500000012754711712513676027070 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Put_64_LSB_First RRS The following user defined rule files have been read: &&& skein.rlu SEM No semantic checks are performed on the rules. @@@@@@@@@@ VC: procedure_put_64_lsb_first_1. @@@@@@@@@@ %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule put_64_lsb_f_rules(2). This was achieved by replacing all occurrences of integer__first by: - 2147483648. New C1: byte_count >= - 2147483647 New C5: true -S- Applied substitution rule put_64_lsb_f_rules(3). This was achieved by replacing all occurrences of integer__last by: 2147483647. New C2: byte_count <= 2147483648 New C6: true -S- Applied substitution rule put_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C3: byte_count >= - 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C4: byte_count <= 2147483648 *** Proved C1: byte_count >= - 2147483647 using hypothesis H11. *** Proved C5: true *** Proved C6: true *** Proved C3: byte_count >= - 2147483647 using hypothesis H11. -S- Applied substitution rule put_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: dst_offset >= 0 New H9: byte_count >= 0 -S- Applied substitution rule put_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: dst_offset <= 2147483647 New H10: byte_count <= 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule put_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= 255) -S- Applied substitution rule put_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule put_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 18446744073709551615) *** Proved C2: byte_count <= 2147483648 using hypothesis H10. *** Proved C4: byte_count <= 2147483648 using hypothesis H10. *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_2. @@@@@@@@@@ %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C1 on reading formula in, to give: %%% C1: 1 <= byte_count -> byte_count - 1 >= natural__first and byte_count - 1 <= natural__last %%% Simplified C2 on reading formula in, to give: %%% C2: 1 <= byte_count -> 0 >= natural__first and 0 <= natural__last -S- Applied substitution rule put_64_lsb_f_rules(2). This was achieved by replacing all occurrences of integer__first by: - 2147483648. New H12: byte_count >= - 2147483647 New H16: true -S- Applied substitution rule put_64_lsb_f_rules(3). This was achieved by replacing all occurrences of integer__last by: 2147483647. New H13: byte_count <= 2147483648 New H17: true -S- Applied substitution rule put_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H14: byte_count >= - 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H15: byte_count <= 2147483648 -S- Applied substitution rule put_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: dst_offset >= 0 New H9: byte_count >= 0 New C1: 1 <= byte_count -> byte_count >= 1 and byte_count - 1 <= natural__last New C2: 1 <= byte_count -> 0 <= natural__last -S- Applied substitution rule put_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: dst_offset <= 2147483647 New H10: byte_count <= 2147483647 New C1: 1 <= byte_count -> byte_count >= 1 and byte_count <= 2147483648 New C2: true -S- Applied substitution rule put_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule put_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= 255) -S- Applied substitution rule put_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule put_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 18446744073709551615) *** Proved C1: 1 <= byte_count -> byte_count >= 1 and byte_count <= 2147483648 using hypotheses H11 & H13. *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_3. @@@@@@@@@@ %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) %%% Simplified H18 on reading formula in, to give: %%% H18: 1 <= byte_count -> byte_count - 1 >= natural__first and byte_count - 1 <= natural__last %%% Simplified H19 on reading formula in, to give: %%% H19: 1 <= byte_count -> 0 >= natural__first and 0 <= natural__last %%% Simplified H20 on reading formula in, to give: %%% H20: 1 <= byte_count %%% Simplified C2 on reading formula in, to give: %%% C2: true %%% Simplified C3 on reading formula in, to give: %%% C3: 1 <= byte_count %%% Simplified C7 on reading formula in, to give: %%% C7: - 7 <= src__index__subtype__1__last * 8 %%% Simplified C8 on reading formula in, to give: %%% C8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) %%% Simplified C11 on reading formula in, to give: %%% C11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C20 on reading formula in, to give: %%% C20: true %%% Simplified C21 on reading formula in, to give: %%% C21: 1 <= byte_count *** Proved C1: byte_count >= 1 using hypothesis H11. *** Proved C2: true *** Proved C3: 1 <= byte_count using hypothesis H20. *** Proved C4: 0 < byte_count using hypothesis H20. *** Proved C5: byte_count <= (src__index__subtype__1__last + 1) * 8 using hypothesis H4. *** Proved C6: 0 < (src__index__subtype__1__last + 1) * 8 using hypotheses H4 & H20. *** Proved C8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) using hypothesis H5. *** Proved C9: dst_offset >= natural__first using hypothesis H6. *** Proved C10: dst_offset <= natural__last using hypothesis H7. *** Proved C11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) using hypothesis H8. *** Proved C12: byte_count >= natural__first using hypothesis H9. *** Proved C13: byte_count <= natural__last using hypothesis H10. *** Proved C14: dst__index__subtype__1__first = 0 using hypothesis H1. *** Proved C15: src__index__subtype__1__first = 0 using hypothesis H2. *** Proved C16: dst__index__subtype__1__last >= dst_offset + (byte_count - 1) using hypothesis H3. *** Proved C17: byte_count <= (src__index__subtype__1__last + 1) * 8 using hypothesis H4. *** Proved C19: 0 <= natural__last using hypotheses H10 & H20. *** Proved C20: true *** Proved C21: 1 <= byte_count using hypothesis H20. -S- Applied substitution rule put_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: dst_offset >= 0 New H9: byte_count >= 0 New H18: 1 <= byte_count -> byte_count >= 1 and byte_count - 1 <= natural__last New H19: 1 <= byte_count -> 0 <= natural__last New C18: true *** Proved C18: true +++ Using "A->B, A |- B" on hypotheses H18 & H20 yields a new hypothesis: +++ H21: byte_count >= 1 and byte_count - 1 <= natural__last +++ Using "A->B, A |- B" on hypotheses H19 & H20 yields a new hypothesis: +++ H22: 0 <= natural__last -S- Applied substitution rule put_64_lsb_f_rules(2). This was achieved by replacing all occurrences of integer__first by: - 2147483648. New H12: byte_count >= - 2147483647 New H16: true -S- Applied substitution rule put_64_lsb_f_rules(3). This was achieved by replacing all occurrences of integer__last by: 2147483647. New H13: byte_count <= 2147483648 New H17: true -S- Applied substitution rule put_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H14: byte_count >= - 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H15: byte_count <= 2147483648 -S- Applied substitution rule put_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: dst_offset <= 2147483647 New H10: byte_count <= 2147483647 New H18: 1 <= byte_count -> byte_count >= 1 and byte_count <= 2147483648 New H19: true New H21: byte_count >= 1 and byte_count <= 2147483648 New H22: true -S- Applied substitution rule put_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule put_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H5: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= 255) -S- Applied substitution rule put_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule put_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H8: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 18446744073709551615) --- Eliminated hypothesis H16 (true-hypothesis). --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H19 (true-hypothesis). --- Eliminated hypothesis H22 (true-hypothesis). --- Eliminated hypothesis H14 (duplicate of H12). --- Eliminated hypothesis H15 (duplicate of H13). --- Eliminated hypothesis H20 (duplicate of H11). >>> Using "A->B, A |- B" on H18, given H20, we simplify the former to: >>> H18: byte_count >= 1 and byte_count <= 2147483648 --- Eliminated hypothesis H9 (redundant, given H11). --- Eliminated hypothesis H12 (redundant, given H11). --- Eliminated hypothesis H13 (redundant, given H10). *** Proved C7: - 7 <= src__index__subtype__1__last * 8 via its standard form, which is: Std.Fm C7: 8 * src__index__subtype__1__last > - 8 using hypotheses H4 & H11. *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_4. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H20 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H21 has been replaced by "true". (It is already present, as H3) . %%% Simplified H30 on reading formula in, to give: %%% H30: true --- Hypothesis H34 has been replaced by "true". (It is already present, as H23). --- Hypothesis H37 has been replaced by "true". (It is already present, as H30). --- Hypothesis H46 has been replaced by "true". (It is already present, as H44). --- Hypothesis H47 has been replaced by "true". (It is already present, as H45). %%% Simplified C2 on reading formula in, to give: %%% C2: loop__1__n >= - 1 %%% Simplified C8 on reading formula in, to give: %%% C8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(update(dst, [dst_offset + loop__1__n], bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255)), [i___1]) and element(update(dst, [dst_offset + loop__1__n], bit__and( spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255)), [i___1]) <= spark__unsigned__byte__last) %%% Simplified C11 on reading formula in, to give: %%% C11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) %%% Simplified C20 on reading formula in, to give: %%% C20: loop__1__n >= - 1 *** Proved C1: byte_count >= 1 using hypothesis H1. *** Proved C2: loop__1__n >= - 1 using hypothesis H2. *** Proved C4: loop__1__n + 1 < byte_count using hypotheses H3 & H48. *** Proved C5: byte_count <= (src__index__subtype__1__last + 1) * 8 using hypothesis H5. *** Proved C8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(update(dst, [dst_offset + loop__1__n], bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255)), [i___1]) and element(update(dst, [dst_offset + loop__1__n], bit__and( spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255)), [i___1]) <= spark__unsigned__byte__last) using hypotheses H8, H38, H39, H44 & H45. *** Proved C9: dst_offset >= natural__first using hypothesis H9. *** Proved C10: dst_offset <= natural__last using hypothesis H10. *** Proved C11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) using hypothesis H11. *** Proved C12: byte_count >= natural__first using hypothesis H12. *** Proved C13: byte_count <= natural__last using hypothesis H13. *** Proved C14: dst__index__subtype__1__first = 0 using hypothesis H14. *** Proved C15: src__index__subtype__1__first = 0 using hypothesis H15. *** Proved C16: dst__index__subtype__1__last >= dst_offset + (byte_count - 1) using hypothesis H16. *** Proved C17: byte_count <= (src__index__subtype__1__last + 1) * 8 using hypothesis H5. *** Proved C18: loop__1__n + 1 >= natural__first using hypothesis H18. *** Proved C20: loop__1__n >= - 1 using hypothesis H2. -S- Applied substitution rule put_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: dst_offset <= 2147483647 New H13: byte_count <= 2147483647 New H19: loop__1__n <= 2147483647 New C19: loop__1__n <= 2147483646 *** Proved C19: loop__1__n <= 2147483646 using hypotheses H4 & H13. >>> Restructured hypothesis H48 into: >>> H48: loop__1__n <> byte_count - 1 -S- Applied substitution rule put_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H26: 8 * (loop__1__n mod 8) >= - 2147483648 New H28: loop__1__n mod 8 >= - 2147483648 New H35: loop__1__n div 8 >= - 2147483648 New H40: dst_offset + loop__1__n >= - 2147483648 -S- Applied substitution rule put_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H27: 8 * (loop__1__n mod 8) <= 2147483647 New H29: loop__1__n mod 8 <= 2147483647 New H36: loop__1__n div 8 <= 2147483647 New H41: dst_offset + loop__1__n <= 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: dst_offset >= 0 New H12: byte_count >= 0 New H18: loop__1__n >= 0 -S- Applied substitution rule put_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: element(src, [loop__1__n div 8]) >= 0 New H42: spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]) , 8 * (loop__1__n mod 8)) >= 0 -S- Applied substitution rule put_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: element(src, [loop__1__n div 8]) <= 18446744073709551615 New H43: spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]) , 8 * (loop__1__n mod 8)) <= 18446744073709551615 -S- Applied substitution rule put_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= spark__unsigned__byte__last) New H44: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= 0 -S- Applied substitution rule put_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H45: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= 255 New H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= 255) -S- Applied substitution rule put_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule put_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 18446744073709551615) -S- Applied substitution rule put_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H24: 8 * (loop__1__n mod 8) >= 0 -S- Applied substitution rule put_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H25: 8 * (loop__1__n mod 8) <= 64 %%% Hypotheses H3 & H48 together imply that loop__1__n < byte_count - 1. H3 & H48 have therefore been deleted and a new H49 added to this effect. %%% Simplified H44 further, to give: %%% H44: spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]) , 8 * (loop__1__n mod 8)) mod 256 >= 0 %%% Simplified H45 further, to give: %%% H45: spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]) , 8 * (loop__1__n mod 8)) mod 256 <= 255 *** Proved C3: loop__1__n + 1 <= byte_count - 1 via its standard form, which is: Std.Fm C3: byte_count - loop__1__n > 1 using hypothesis H49. *** Proved C21: loop__1__n + 1 <= byte_count - 1 via its standard form, which is: Std.Fm C21: byte_count - loop__1__n > 1 using hypothesis H49. --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H20 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H30 (true-hypothesis). --- Eliminated hypothesis H34 (true-hypothesis). --- Eliminated hypothesis H37 (true-hypothesis). --- Eliminated hypothesis H46 (true-hypothesis). --- Eliminated hypothesis H47 (true-hypothesis). --- Eliminated hypothesis H18 (duplicate of H2). --- Eliminated hypothesis H7 (duplicate of H6). --- Eliminated hypothesis H1 (redundant, given H2 & H49). --- Eliminated hypothesis H4 (redundant, given H49). --- Eliminated hypothesis H6 (redundant, given H5 & H49). --- Eliminated hypothesis H12 (redundant, given H2 & H49). --- Eliminated hypothesis H19 (redundant, given H13 & H49). --- Eliminated hypothesis H26 (redundant, given H24). --- Eliminated hypothesis H27 (redundant, given H25). --- Eliminated hypothesis H28 (redundant, given H24). --- Eliminated hypothesis H29 (redundant, given H25). --- Eliminated hypothesis H35 (redundant, given H22). --- Eliminated hypothesis H39 (redundant, given H16 & H49). --- Eliminated hypothesis H40 (redundant, given H14 & H38). --- Eliminated conclusion C7, which is a duplicate of C6. *** Proved C6: loop__1__n + 1 < (src__index__subtype__1__last + 1) * 8 via its standard form, which is: Std.Fm C6: - loop__1__n + 8 * src__index__subtype__1__last > - 7 using hypotheses H5 & H49. *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_5. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H20 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H21 has been replaced by "true". (It is already present, as H3) . -S- Applied substitution rule put_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: dst_offset >= 0 New H12: byte_count >= 0 New H18: loop__1__n >= 0 -S- Applied substitution rule put_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: dst_offset <= 2147483647 New H13: byte_count <= 2147483647 New H19: loop__1__n <= 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule put_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= 255) -S- Applied substitution rule put_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule put_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 18446744073709551615) --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H20 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H18 (duplicate of H2). --- Eliminated hypothesis H4 (duplicate of H3). --- Eliminated hypothesis H7 (duplicate of H6). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H19 (redundant, given H3 & H13). *** Proved C1: loop__1__n div 8 >= 0 using hypotheses H2 & H14. *** Proved C2: loop__1__n div 8 <= src__index__subtype__1__last using hypothesis H7. *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_6. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H20 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H21 has been replaced by "true". (It is already present, as H3) . %%% Simplified C7 on reading formula in, to give: %%% C7: true %%% Simplified C14 on reading formula in, to give: %%% C14: true *** Proved C7: true *** Proved C10: loop__1__n div 8 >= src__index__subtype__1__first using hypotheses H15 & H22. *** Proved C11: loop__1__n div 8 <= src__index__subtype__1__last using hypothesis H23. *** Proved C14: true *** Proved C16: dst_offset + loop__1__n <= dst__index__subtype__1__last using hypotheses H3 & H16. -S- Applied substitution rule put_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: 8 * (loop__1__n mod 8) >= 0 -S- Applied substitution rule put_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: 8 * (loop__1__n mod 8) <= 64 -S- Applied substitution rule put_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New C3: 8 * (loop__1__n mod 8) >= - 2147483648 New C5: loop__1__n mod 8 >= - 2147483648 New C12: loop__1__n div 8 >= - 2147483648 New C17: dst_offset + loop__1__n >= - 2147483648 -S- Applied substitution rule put_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New C4: 8 * (loop__1__n mod 8) <= 2147483647 New C6: loop__1__n mod 8 <= 2147483647 New C13: loop__1__n div 8 <= 2147483647 New C18: dst_offset + loop__1__n <= 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C8: element(src, [loop__1__n div 8]) >= 0 -S- Applied substitution rule put_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C9: element(src, [loop__1__n div 8]) <= 18446744073709551615 *** Proved C5: loop__1__n mod 8 >= - 2147483648 *** Proved C12: loop__1__n div 8 >= - 2147483648 using hypothesis H22. *** Proved C6: loop__1__n mod 8 <= 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: dst_offset >= 0 New H12: byte_count >= 0 New H18: loop__1__n >= 0 -S- Applied substitution rule put_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: dst_offset <= 2147483647 New H13: byte_count <= 2147483647 New H19: loop__1__n <= 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= spark__unsigned__byte__last) -S- Applied substitution rule put_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= 255) -S- Applied substitution rule put_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule put_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 18446744073709551615) *** Proved C15: dst_offset + loop__1__n >= dst__index__subtype__1__first using hypotheses H2, H9 & H14. *** Proved C17: dst_offset + loop__1__n >= - 2147483648 using hypotheses H2 & H9. *** Proved C8: element(src, [loop__1__n div 8]) >= 0 using hypotheses H11, H15, H22 & H23. *** Proved C9: element(src, [loop__1__n div 8]) <= 18446744073709551615 using hypotheses H11, H15, H22 & H23. --- Eliminated hypothesis H17 (true-hypothesis). --- Eliminated hypothesis H20 (true-hypothesis). --- Eliminated hypothesis H21 (true-hypothesis). --- Eliminated hypothesis H18 (duplicate of H2). --- Eliminated hypothesis H4 (duplicate of H3). --- Eliminated hypothesis H7 (duplicate of H6). --- Eliminated hypothesis H12 (redundant, given H1). --- Eliminated hypothesis H19 (redundant, given H3 & H13). *** Proved C1: 8 * (loop__1__n mod 8) >= 0 *** Proved C2: 8 * (loop__1__n mod 8) <= 64 *** Proved C3: 8 * (loop__1__n mod 8) >= - 2147483648 *** Proved C4: 8 * (loop__1__n mod 8) <= 2147483647 *** Proved C13: loop__1__n div 8 <= 2147483647 using hypotheses H1, H3, H9 & H16. *** Proved C18: dst_offset + loop__1__n <= 2147483647 using hypotheses H3 & H16. *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_7. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> spark__unsigned__byte__first <= element(dst, [i___1]) and element( dst, [i___1]) <= spark__unsigned__byte__last) %%% Simplified H11 on reading formula in, to give: %%% H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> spark__unsigned__u64__first <= element(src, [i___1]) and element(src, [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H17 has been replaced by "true". (It is already present, as H5) . --- Hypothesis H20 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H21 has been replaced by "true". (It is already present, as H3) . %%% Simplified H30 on reading formula in, to give: %%% H30: true --- Hypothesis H34 has been replaced by "true". (It is already present, as H23). --- Hypothesis H37 has been replaced by "true". (It is already present, as H30). -S- Applied substitution rule put_64_lsb_f_rules(26). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= spark__unsigned__byte__last) New C1: bit__and(spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= 0 New C3: bit__and(spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= 0 -S- Applied substitution rule put_64_lsb_f_rules(27). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H8: for_all(i___1 : integer, dst__index__subtype__1__first <= i___1 and i___1 <= dst__index__subtype__1__last -> 0 <= element(dst, [i___1] ) and element(dst, [i___1]) <= 255) New C2: bit__and(spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= 255 New C4: bit__and(spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= 255 -S- Applied substitution rule put_64_lsb_f_rules(4). This was achieved by replacing all occurrences of integer__base__first by: - 2147483648. New H26: 8 * (loop__1__n mod 8) >= - 2147483648 New H28: loop__1__n mod 8 >= - 2147483648 New H35: loop__1__n div 8 >= - 2147483648 New H40: dst_offset + loop__1__n >= - 2147483648 -S- Applied substitution rule put_64_lsb_f_rules(5). This was achieved by replacing all occurrences of integer__base__last by: 2147483647. New H27: 8 * (loop__1__n mod 8) <= 2147483647 New H29: loop__1__n mod 8 <= 2147483647 New H36: loop__1__n div 8 <= 2147483647 New H41: dst_offset + loop__1__n <= 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H9: dst_offset >= 0 New H12: byte_count >= 0 New H18: loop__1__n >= 0 -S- Applied substitution rule put_64_lsb_f_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H10: dst_offset <= 2147483647 New H13: byte_count <= 2147483647 New H19: loop__1__n <= 2147483647 -S- Applied substitution rule put_64_lsb_f_rules(20). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: element(src, [loop__1__n div 8]) >= 0 New H42: spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]) , 8 * (loop__1__n mod 8)) >= 0 -S- Applied substitution rule put_64_lsb_f_rules(21). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: element(src, [loop__1__n div 8]) <= 18446744073709551615 New H43: spark__unsigned__shift_right_64(element(src, [loop__1__n div 8]) , 8 * (loop__1__n mod 8)) <= 18446744073709551615 -S- Applied substitution rule put_64_lsb_f_rules(32). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule put_64_lsb_f_rules(33). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H11: for_all(i___1 : integer, src__index__subtype__1__first <= i___1 and i___1 <= src__index__subtype__1__last -> 0 <= element(src, [i___1] ) and element(src, [i___1]) <= 18446744073709551615) -S- Applied substitution rule put_64_lsb_f_rules(38). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H24: 8 * (loop__1__n mod 8) >= 0 -S- Applied substitution rule put_64_lsb_f_rules(39). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H25: 8 * (loop__1__n mod 8) <= 64 *** Proved C1: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= 0 *** Proved C3: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) >= 0 *** Proved C2: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= 255 *** Proved C4: bit__and(spark__unsigned__shift_right_64(element(src, [ loop__1__n div 8]), 8 * (loop__1__n mod 8)), 255) <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_8. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_9. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. @@@@@@@@@@ VC: procedure_put_64_lsb_first_10. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.vct0000644000175000017500000000000011712513676026360 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.fdl0000644000175000017500000002241011712513676026342 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {function Skein.Skein_512_Hash} title function skein_512_hash; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type initialized_hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_count = integer; type skein_512_block_bytes_index = integer; type skein_512_state_bytes_index = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_c : integer = pending; const skein_512_state_bytes_index__base__first : integer = pending; const skein_512_state_bytes_index__base__last : integer = pending; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_block_bytes_count__base__first : integer = pending; const skein_512_block_bytes_count__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const initialized_hash_bit_length__base__first : integer = pending; const initialized_hash_bit_length__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const data__index__subtype__1__first : integer = pending; const data__index__subtype__1__last : integer = pending; const skein_512_context__size : integer = pending; const skein_512_state_bytes_index__first : integer = pending; const skein_512_state_bytes_index__last : integer = pending; const skein_512_state_bytes_index__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_block_bytes_count__first : integer = pending; const skein_512_block_bytes_count__last : integer = pending; const skein_512_block_bytes_count__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const initialized_hash_bit_length__first : integer = pending; const initialized_hash_bit_length__last : integer = pending; const initialized_hash_bit_length__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var data : spark__crypto__byte_seq; var ctx : skein_512_context; var ctx__2 : skein_512_context; var ctx__1 : skein_512_context; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.vsm0000644000175000017500000000006511712765060027034 0ustar eugeneugenget_64_lsb_first,0,0,3,3,0,0,0,100.0, 0.0, 0.0, 0.0, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.log0000644000175000017500000000126411712513676027047 0ustar eugeneugenSPARK Simplifier Pro Edition Reading put_64_lsb_first.fdl (for inherited FDL type declarations) Reading skein.rlu (for user-defined proof rules) Processing put_64_lsb_first.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Automatic simplification completed. Simplified output sent to put_64_lsb_first.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.slg0000644000175000017500000017543011712513676026415 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Init RRS The following user defined rule files have been read: &&& skein.rlu SEM No semantic checks are performed on the rules. @@@@@@@@@@ VC: procedure_skein_512_init_1. @@@@@@@@@@ -S- Applied substitution rule skein_512_in_rules(105). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New C1: hashbitlen >= 0 -S- Applied substitution rule skein_512_in_rules(106). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New C2: hashbitlen <= 2147483640 -S- Applied substitution rule skein_512_in_rules(110). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H2: hashbitlen >= 1 -S- Applied substitution rule skein_512_in_rules(111). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H3: hashbitlen <= 2147483640 *** Proved C1: hashbitlen >= 0 using hypothesis H2. *** Proved C2: hashbitlen <= 2147483640 using hypothesis H3. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_2. @@@@@@@@@@ -S- Applied substitution rule skein_512_in_rules(2). This was achieved by replacing all occurrences of skein_block_type_cfg by: 4. New C1: 4 >= spark__unsigned__u6__first New C2: 4 <= spark__unsigned__u6__last -S- Applied substitution rule skein_512_in_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New C1: true -S- Applied substitution rule skein_512_in_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_3. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) *** Proved C1: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen using hypothesis H9. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_4. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) -S- Applied substitution rule skein_512_in_rules(22). This was achieved by replacing all occurrences of skein_schema_ver by: 5154883667. New C1: 5154883667 >= interfaces__unsigned_64__first New C2: 5154883667 <= interfaces__unsigned_64__last -S- Applied substitution rule skein_512_in_rules(58). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C1: true -S- Applied substitution rule skein_512_in_rules(59). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_5. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) -S- Applied substitution rule skein_512_in_rules(58). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H32: skein_schema_ver >= 0 New H34: spark__unsigned__to_littleendian(skein_schema_ver) >= 0 New C1: hashbitlen >= 0 -S- Applied substitution rule skein_512_in_rules(59). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H33: skein_schema_ver <= 18446744073709551615 New H35: spark__unsigned__to_littleendian(skein_schema_ver) <= 18446744073709551615 New C2: hashbitlen <= 18446744073709551615 -S- Applied substitution rule skein_512_in_rules(94). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New C3: hashbitlen >= 0 New C5: spark__unsigned__to_littleendian(skein_schema_ver) >= 0 -S- Applied substitution rule skein_512_in_rules(95). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New C4: hashbitlen <= 18446744073709551615 New C6: spark__unsigned__to_littleendian(skein_schema_ver) <= 18446744073709551615 *** Proved C5: spark__unsigned__to_littleendian(skein_schema_ver) >= 0 using hypothesis H34. *** Proved C6: spark__unsigned__to_littleendian(skein_schema_ver) <= 18446744073709551615 using hypothesis H35. -S- Applied substitution rule skein_512_in_rules(2). This was achieved by replacing all occurrences of skein_block_type_cfg by: 4. New H6: 4 >= spark__unsigned__u6__first New H7: 4 <= spark__unsigned__u6__last New H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 4, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_in_rules(22). This was achieved by replacing all occurrences of skein_schema_ver by: 5154883667. New H32: true New H33: true New H34: spark__unsigned__to_littleendian(5154883667) >= 0 New H35: spark__unsigned__to_littleendian(5154883667) <= 18446744073709551615 -S- Applied substitution rule skein_512_in_rules(31). This was achieved by replacing all occurrences of natural__first by: 0. New H11: fld_byte_count(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_in_rules(32). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H12: fld_byte_count(fld_h(ctx__1)) <= 2147483647 -S- Applied substitution rule skein_512_in_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H6: true -S- Applied substitution rule skein_512_in_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H7: true -S- Applied substitution rule skein_512_in_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 -S- Applied substitution rule skein_512_in_rules(82). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(83). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 -S- Applied substitution rule skein_512_in_rules(88). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(89). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 -S- Applied substitution rule skein_512_in_rules(105). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H4: hashbitlen >= 0 New H13: fld_hash_bit_len(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_in_rules(106). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H5: hashbitlen <= 2147483640 New H14: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 -S- Applied substitution rule skein_512_in_rules(110). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H2: hashbitlen >= 1 -S- Applied substitution rule skein_512_in_rules(111). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H3: hashbitlen <= 2147483640 *** Proved C1: hashbitlen >= 0 using hypothesis H4. *** Proved C2: hashbitlen <= 18446744073709551615 using hypothesis H5. *** Proved C3: hashbitlen >= 0 using hypothesis H4. *** Proved C4: hashbitlen <= 18446744073709551615 using hypothesis H5. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_6. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) -S- Applied substitution rule skein_512_in_rules(23). This was achieved by replacing all occurrences of skein_cfg_tree_info_sequential by: 0. New C1: 0 >= interfaces__unsigned_64__first New C2: 0 <= interfaces__unsigned_64__last -S- Applied substitution rule skein_512_in_rules(58). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H32: skein_schema_ver >= 0 New H34: spark__unsigned__to_littleendian(skein_schema_ver) >= 0 New H36: hashbitlen >= 0 New H42: spark__unsigned__to_littleendian(hashbitlen) >= 0 New C1: true -S- Applied substitution rule skein_512_in_rules(59). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H33: skein_schema_ver <= 18446744073709551615 New H35: spark__unsigned__to_littleendian(skein_schema_ver) <= 18446744073709551615 New H37: hashbitlen <= 18446744073709551615 New H43: spark__unsigned__to_littleendian(hashbitlen) <= 18446744073709551615 New C2: true -S- Applied substitution rule skein_512_in_rules(94). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H38: hashbitlen >= 0 New H40: spark__unsigned__to_littleendian(skein_schema_ver) >= 0 New C3: spark__unsigned__to_littleendian(hashbitlen) >= 0 -S- Applied substitution rule skein_512_in_rules(95). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H39: hashbitlen <= 18446744073709551615 New H41: spark__unsigned__to_littleendian(skein_schema_ver) <= 18446744073709551615 New C4: spark__unsigned__to_littleendian(hashbitlen) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true *** Proved C3: spark__unsigned__to_littleendian(hashbitlen) >= 0 using hypothesis H42. *** Proved C4: spark__unsigned__to_littleendian(hashbitlen) <= 18446744073709551615 using hypothesis H43. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_7. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) -S- Applied substitution rule skein_512_in_rules(94). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H38: hashbitlen >= 0 New H40: spark__unsigned__to_littleendian(skein_schema_ver) >= 0 New H46: spark__unsigned__to_littleendian(hashbitlen) >= 0 New C1: true New C3: spark__unsigned__to_littleendian(skein_cfg_tree_info_sequential) >= 0 -S- Applied substitution rule skein_512_in_rules(95). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H39: hashbitlen <= 18446744073709551615 New H41: spark__unsigned__to_littleendian(skein_schema_ver) <= 18446744073709551615 New H47: spark__unsigned__to_littleendian(hashbitlen) <= 18446744073709551615 New C2: true New C4: spark__unsigned__to_littleendian(skein_cfg_tree_info_sequential) <= 18446744073709551615 *** Proved C1: true *** Proved C2: true -S- Applied substitution rule skein_512_in_rules(2). This was achieved by replacing all occurrences of skein_block_type_cfg by: 4. New H6: 4 >= spark__unsigned__u6__first New H7: 4 <= spark__unsigned__u6__last New H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 4, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_in_rules(22). This was achieved by replacing all occurrences of skein_schema_ver by: 5154883667. New H32: 5154883667 >= interfaces__unsigned_64__first New H33: 5154883667 <= interfaces__unsigned_64__last New H34: spark__unsigned__to_littleendian(5154883667) >= interfaces__unsigned_64__first New H35: spark__unsigned__to_littleendian(5154883667) <= interfaces__unsigned_64__last New H40: spark__unsigned__to_littleendian(5154883667) >= 0 New H41: spark__unsigned__to_littleendian(5154883667) <= 18446744073709551615 -S- Applied substitution rule skein_512_in_rules(23). This was achieved by replacing all occurrences of skein_cfg_tree_info_sequential by: 0. New H44: 0 >= interfaces__unsigned_64__first New H45: 0 <= interfaces__unsigned_64__last New H48: spark__unsigned__to_littleendian(0) >= interfaces__unsigned_64__first New H49: spark__unsigned__to_littleendian(0) <= interfaces__unsigned_64__last New C3: spark__unsigned__to_littleendian(0) >= 0 New C4: spark__unsigned__to_littleendian(0) <= 18446744073709551615 -S- Applied substitution rule skein_512_in_rules(31). This was achieved by replacing all occurrences of natural__first by: 0. New H11: fld_byte_count(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_in_rules(32). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H12: fld_byte_count(fld_h(ctx__1)) <= 2147483647 -S- Applied substitution rule skein_512_in_rules(58). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H36: hashbitlen >= 0 New H42: spark__unsigned__to_littleendian(hashbitlen) >= 0 New H32: true New H34: spark__unsigned__to_littleendian(5154883667) >= 0 New H44: true New H48: spark__unsigned__to_littleendian(0) >= 0 -S- Applied substitution rule skein_512_in_rules(59). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H37: hashbitlen <= 18446744073709551615 New H43: spark__unsigned__to_littleendian(hashbitlen) <= 18446744073709551615 New H33: true New H35: spark__unsigned__to_littleendian(5154883667) <= 18446744073709551615 New H45: true New H49: spark__unsigned__to_littleendian(0) <= 18446744073709551615 -S- Applied substitution rule skein_512_in_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H6: true -S- Applied substitution rule skein_512_in_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H7: true -S- Applied substitution rule skein_512_in_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 -S- Applied substitution rule skein_512_in_rules(82). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(83). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 -S- Applied substitution rule skein_512_in_rules(88). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(89). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 -S- Applied substitution rule skein_512_in_rules(105). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H4: hashbitlen >= 0 New H13: fld_hash_bit_len(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_in_rules(106). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H5: hashbitlen <= 2147483640 New H14: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 -S- Applied substitution rule skein_512_in_rules(110). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H2: hashbitlen >= 1 -S- Applied substitution rule skein_512_in_rules(111). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H3: hashbitlen <= 2147483640 *** Proved C3: spark__unsigned__to_littleendian(0) >= 0 using hypothesis H48. *** Proved C4: spark__unsigned__to_littleendian(0) <= 18446744073709551615 using hypothesis H49. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_8. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) *** Proved C1: 0 >= spark__unsigned__u64__first using hypothesis H50. *** Proved C2: 0 <= spark__unsigned__u64__last using hypothesis H51. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_9. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) --- Hypothesis H54 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H51). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen *** Proved C1: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen using hypothesis H9. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_10. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) --- Hypothesis H54 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H51). %%% Simplified H56 on reading formula in, to give: %%% H56: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified C7 on reading formula in, to give: %%% C7: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first %%% Simplified C8 on reading formula in, to give: %%% C8: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last %%% Simplified C9 on reading formula in, to give: %%% C9: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first %%% Simplified C10 on reading formula in, to give: %%% C10: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last %%% Simplified C12 on reading formula in, to give: %%% C12: 63 <= skein_512_state_bytes_index__last %%% Simplified C13 on reading formula in, to give: %%% C13: 63 <= skein_512_state_bytes_index__last %%% Simplified C15 on reading formula in, to give: %%% C15: 63 <= natural__last *** Proved C5: 0 >= natural__first using hypotheses H10 & H11. *** Proved C6: 0 <= natural__last using hypotheses H10 & H12. *** Proved C7: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first using hypotheses H2 & H9. *** Proved C8: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last using hypotheses H3 & H9. -S- Applied substitution rule skein_512_in_rules(24). This was achieved by replacing all occurrences of skein_cfg_str_len by: 32. New C1: 32 >= natural__first New C2: 32 <= natural__last -S- Applied substitution rule skein_512_in_rules(31). This was achieved by replacing all occurrences of natural__first by: 0. New H11: fld_byte_count(fld_h(ctx__1)) >= 0 New C1: true -S- Applied substitution rule skein_512_in_rules(32). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H12: fld_byte_count(fld_h(ctx__1)) <= 2147483647 New C14: skein_512_state_bytes_index__last <= 2147483647 New C15: true New C2: true -S- Applied substitution rule skein_512_in_rules(135). This was achieved by replacing all occurrences of positive_block_512_count_t__first by: 1. New C3: true -S- Applied substitution rule skein_512_in_rules(136). This was achieved by replacing all occurrences of positive_block_512_count_t__last by: 33554431. New C4: true -S- Applied substitution rule skein_512_in_rules(120). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New C9: fld_byte_count(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_in_rules(121). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New C10: fld_byte_count(fld_h(ctx__1)) <= 64 -S- Applied substitution rule skein_512_in_rules(131). This was achieved by replacing all occurrences of skein_512_state_bytes_index__last by: 63. New C12: true New C13: true New C14: true *** Proved C1: true *** Proved C15: true *** Proved C2: true *** Proved C3: true *** Proved C4: true *** Proved C9: fld_byte_count(fld_h(ctx__1)) >= 0 using hypothesis H11. *** Proved C10: fld_byte_count(fld_h(ctx__1)) <= 64 using hypothesis H10. *** Proved C12: true *** Proved C13: true *** Proved C14: true -S- Applied substitution rule skein_512_in_rules(2). This was achieved by replacing all occurrences of skein_block_type_cfg by: 4. New H6: 4 >= spark__unsigned__u6__first New H7: 4 <= spark__unsigned__u6__last New H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := 4, first_block := true, final_block := true)) -S- Applied substitution rule skein_512_in_rules(22). This was achieved by replacing all occurrences of skein_schema_ver by: 5154883667. New H32: 5154883667 >= interfaces__unsigned_64__first New H33: 5154883667 <= interfaces__unsigned_64__last New H34: spark__unsigned__to_littleendian(5154883667) >= interfaces__unsigned_64__first New H35: spark__unsigned__to_littleendian(5154883667) <= interfaces__unsigned_64__last New H40: spark__unsigned__to_littleendian(5154883667) >= spark__unsigned__u64__first New H41: spark__unsigned__to_littleendian(5154883667) <= spark__unsigned__u64__last -S- Applied substitution rule skein_512_in_rules(23). This was achieved by replacing all occurrences of skein_cfg_tree_info_sequential by: 0. New H44: 0 >= interfaces__unsigned_64__first New H45: 0 <= interfaces__unsigned_64__last New H48: spark__unsigned__to_littleendian(0) >= interfaces__unsigned_64__first New H49: spark__unsigned__to_littleendian(0) <= interfaces__unsigned_64__last New H52: spark__unsigned__to_littleendian(0) >= spark__unsigned__u64__first New H53: spark__unsigned__to_littleendian(0) <= spark__unsigned__u64__last -S- Applied substitution rule skein_512_in_rules(58). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H36: hashbitlen >= 0 New H42: spark__unsigned__to_littleendian(hashbitlen) >= 0 New H32: true New H34: spark__unsigned__to_littleendian(5154883667) >= 0 New H44: true New H48: spark__unsigned__to_littleendian(0) >= 0 -S- Applied substitution rule skein_512_in_rules(59). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H37: hashbitlen <= 18446744073709551615 New H43: spark__unsigned__to_littleendian(hashbitlen) <= 18446744073709551615 New H33: true New H35: spark__unsigned__to_littleendian(5154883667) <= 18446744073709551615 New H45: true New H49: spark__unsigned__to_littleendian(0) <= 18446744073709551615 -S- Applied substitution rule skein_512_in_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H6: true -S- Applied substitution rule skein_512_in_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H7: true -S- Applied substitution rule skein_512_in_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H20: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H21: fld_tree_level(fld_tweak_words(fld_h(ctx__1))) <= 127 -S- Applied substitution rule skein_512_in_rules(82). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H22: fld_reserved(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(83). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H23: fld_reserved(fld_tweak_words(fld_h(ctx__1))) <= 65535 -S- Applied substitution rule skein_512_in_rules(88). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H24: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) >= 0 -S- Applied substitution rule skein_512_in_rules(89). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__1))) <= 4294967295 -S- Applied substitution rule skein_512_in_rules(94). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H26: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) >= 0 New H38: hashbitlen >= 0 New H46: spark__unsigned__to_littleendian(hashbitlen) >= 0 New H50: true New H40: spark__unsigned__to_littleendian(5154883667) >= 0 New H52: spark__unsigned__to_littleendian(0) >= 0 -S- Applied substitution rule skein_512_in_rules(95). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__1))) <= 18446744073709551615 New H39: hashbitlen <= 18446744073709551615 New H47: spark__unsigned__to_littleendian(hashbitlen) <= 18446744073709551615 New H51: true New H41: spark__unsigned__to_littleendian(5154883667) <= 18446744073709551615 New H53: spark__unsigned__to_littleendian(0) <= 18446744073709551615 -S- Applied substitution rule skein_512_in_rules(105). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H4: hashbitlen >= 0 New H13: fld_hash_bit_len(fld_h(ctx__1)) >= 0 -S- Applied substitution rule skein_512_in_rules(106). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H5: hashbitlen <= 2147483640 New H14: fld_hash_bit_len(fld_h(ctx__1)) <= 2147483640 -S- Applied substitution rule skein_512_in_rules(110). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H2: hashbitlen >= 1 -S- Applied substitution rule skein_512_in_rules(111). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H3: hashbitlen <= 2147483640 -S- Applied substitution rule skein_512_in_rules(130). This was achieved by replacing all occurrences of skein_512_state_bytes_index__first by: 0. New C11: true *** Proved C11: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_11. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) --- Hypothesis H54 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H51). %%% Simplified H56 on reading formula in, to give: %%% H56: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H63 on reading formula in, to give: %%% H63: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first %%% Simplified H64 on reading formula in, to give: %%% H64: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last %%% Simplified H65 on reading formula in, to give: %%% H65: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first %%% Simplified H66 on reading formula in, to give: %%% H66: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last %%% Simplified H68 on reading formula in, to give: %%% H68: 63 <= skein_512_state_bytes_index__last %%% Simplified H69 on reading formula in, to give: %%% H69: 63 <= skein_512_state_bytes_index__last %%% Simplified H71 on reading formula in, to give: %%% H71: 63 <= natural__last %%% Simplified H74 on reading formula in, to give: %%% H74: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1) ) %%% Simplified H77 on reading formula in, to give: %%% H77: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) %%% Simplified H78 on reading formula in, to give: %%% H78: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H79 on reading formula in, to give: %%% H79: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) *** Proved C1: fld_hash_bit_len(fld_h(ctx__2)) = hashbitlen using hypotheses H9 & H74. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_12. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) --- Hypothesis H54 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H51). %%% Simplified H56 on reading formula in, to give: %%% H56: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H63 on reading formula in, to give: %%% H63: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first %%% Simplified H64 on reading formula in, to give: %%% H64: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last %%% Simplified H65 on reading formula in, to give: %%% H65: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first %%% Simplified H66 on reading formula in, to give: %%% H66: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last %%% Simplified H68 on reading formula in, to give: %%% H68: 63 <= skein_512_state_bytes_index__last %%% Simplified H69 on reading formula in, to give: %%% H69: 63 <= skein_512_state_bytes_index__last %%% Simplified H71 on reading formula in, to give: %%% H71: 63 <= natural__last %%% Simplified H74 on reading formula in, to give: %%% H74: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1) ) %%% Simplified H77 on reading formula in, to give: %%% H77: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) %%% Simplified H78 on reading formula in, to give: %%% H78: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H79 on reading formula in, to give: %%% H79: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_in_rules(3). This was achieved by replacing all occurrences of skein_block_type_msg by: 48. New C1: 48 >= spark__unsigned__u6__first New C2: 48 <= spark__unsigned__u6__last -S- Applied substitution rule skein_512_in_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H6: skein_block_type_cfg >= 0 New H17: fld_field_type(fld_tweak_words(fld_h(ctx__1))) >= 0 New H86: fld_field_type(fld_tweak_words(fld_h(ctx__2))) >= 0 New C1: true -S- Applied substitution rule skein_512_in_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H7: skein_block_type_cfg <= 63 New H18: fld_field_type(fld_tweak_words(fld_h(ctx__1))) <= 63 New H87: fld_field_type(fld_tweak_words(fld_h(ctx__2))) <= 63 New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_13. @@@@@@@@@@ %%% Simplified H8 on reading formula in, to give: %%% H8: fld_h(ctx__1) = upf_tweak_words(upf_byte_count(fld_h(upf_h( null_skein_512_context, upf_hash_bit_len(fld_h(null_skein_512_context) , hashbitlen))), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_cfg, first_block := true, final_block := true)) %%% Simplified H9 on reading formula in, to give: %%% H9: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H28 on reading formula in, to give: %%% H28: fld_x(ctx__1) = fld_x(null_skein_512_context) %%% Simplified H29 on reading formula in, to give: %%% H29: fld_b(ctx__1) = fld_b(null_skein_512_context) %%% Simplified H30 on reading formula in, to give: %%% H30: ctx__1 = upf_h(null_skein_512_context, fld_h(ctx__1)) --- Hypothesis H54 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H51). %%% Simplified H56 on reading formula in, to give: %%% H56: fld_hash_bit_len(fld_h(ctx__1)) = hashbitlen %%% Simplified H63 on reading formula in, to give: %%% H63: fld_hash_bit_len(fld_h(ctx__1)) >= initialized_hash_bit_length__first %%% Simplified H64 on reading formula in, to give: %%% H64: fld_hash_bit_len(fld_h(ctx__1)) <= initialized_hash_bit_length__last %%% Simplified H65 on reading formula in, to give: %%% H65: fld_byte_count(fld_h(ctx__1)) >= skein_512_block_bytes_count__first %%% Simplified H66 on reading formula in, to give: %%% H66: fld_byte_count(fld_h(ctx__1)) <= skein_512_block_bytes_count__last %%% Simplified H68 on reading formula in, to give: %%% H68: 63 <= skein_512_state_bytes_index__last %%% Simplified H69 on reading formula in, to give: %%% H69: 63 <= skein_512_state_bytes_index__last %%% Simplified H71 on reading formula in, to give: %%% H71: 63 <= natural__last %%% Simplified H74 on reading formula in, to give: %%% H74: fld_hash_bit_len(fld_h(ctx__2)) = fld_hash_bit_len(fld_h(ctx__1) ) %%% Simplified H77 on reading formula in, to give: %%% H77: fld_byte_count(fld_h(ctx__2)) = fld_byte_count(fld_h(ctx__1)) %%% Simplified H78 on reading formula in, to give: %%% H78: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx__2), [i___2]) and element(fld_b(ctx__2), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H79 on reading formula in, to give: %%% H79: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx__2), [i___1]) and element(fld_x(ctx__2), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H100 on reading formula in, to give: %%% H100: fld_h(ctx__3) = upf_tweak_words(upf_byte_count(fld_h(ctx__2), 0), mk__tweak_value(byte_count_lsb := 0, byte_count_msb := 0, reserved := 0, tree_level := 0, bit_pad := false, field_type := skein_block_type_msg, first_block := true, final_block := false)) *** Proved C1: fld_hash_bit_len(fld_h(ctx__3)) >= initialized_hash_bit_length__first using hypotheses H72 & H101. *** Proved C2: fld_hash_bit_len(fld_h(ctx__3)) <= initialized_hash_bit_length__last using hypotheses H73 & H101. *** Proved C3: fld_hash_bit_len(fld_h(ctx__3)) = hashbitlen using hypotheses H97 & H101. *** Proved C4: fld_byte_count(fld_h(ctx__3)) = 0 using hypothesis H102. *** Proved C5: fld_byte_count(fld_h(ctx__3)) >= skein_512_block_bytes_count__first using hypotheses H10, H65 & H102. *** Proved C6: fld_byte_count(fld_h(ctx__3)) <= skein_512_block_bytes_count__last using hypotheses H10, H66 & H102. *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_14. @@@@@@@@@@ *** Proved C1: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_512_init_15. @@@@@@@@@@ %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H6 on reading formula in, to give: %%% H6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule skein_512_in_rules(110). This was achieved by replacing all occurrences of initialized_hash_bit_length__first by: 1. New H3: hashbitlen >= 1 New H24: fld_hash_bit_len(fld_h(ctx)) >= 1 New C1: hash_bit_len_of(ctx) >= 1 -S- Applied substitution rule skein_512_in_rules(111). This was achieved by replacing all occurrences of initialized_hash_bit_length__last by: 2147483640. New H4: hashbitlen <= 2147483640 New H25: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New C2: hash_bit_len_of(ctx) <= 2147483640 -S- Applied substitution rule skein_512_in_rules(120). This was achieved by replacing all occurrences of skein_512_block_bytes_count__first by: 0. New H28: fld_byte_count(fld_h(ctx)) >= 0 New C5: byte_count_of(ctx) >= 0 -S- Applied substitution rule skein_512_in_rules(121). This was achieved by replacing all occurrences of skein_512_block_bytes_count__last by: 64. New H29: fld_byte_count(fld_h(ctx)) <= 64 New C6: byte_count_of(ctx) <= 64 -S- Applied substitution rule skein_512_in_rules(31). This was achieved by replacing all occurrences of natural__first by: 0. New H7: fld_byte_count(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_in_rules(32). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H8: fld_byte_count(fld_h(ctx)) <= 2147483647 -S- Applied substitution rule skein_512_in_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H13: fld_field_type(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_in_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H14: fld_field_type(fld_tweak_words(fld_h(ctx))) <= 63 -S- Applied substitution rule skein_512_in_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H16: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_in_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H17: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= 127 -S- Applied substitution rule skein_512_in_rules(76). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule skein_512_in_rules(77). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_in_rules(82). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H18: fld_reserved(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_in_rules(83). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H19: fld_reserved(fld_tweak_words(fld_h(ctx))) <= 65535 -S- Applied substitution rule skein_512_in_rules(88). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_in_rules(89). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H21: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= 4294967295 -S- Applied substitution rule skein_512_in_rules(94). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= spark__unsigned__u64__last) New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= 0 -S- Applied substitution rule skein_512_in_rules(95). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H23: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= 18446744073709551615 New H6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_in_rules(105). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H9: fld_hash_bit_len(fld_h(ctx)) >= 0 -S- Applied substitution rule skein_512_in_rules(106). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H10: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 -S- Applied substitution rule skein_512_in_rules(115). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H6: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_in_rules(116). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H6: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(ctx), [i___1]) and element(fld_x(ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule skein_512_in_rules(125). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H5: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) -S- Applied substitution rule skein_512_in_rules(126). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H5: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(ctx), [i___2]) and element(fld_b(ctx), [i___2]) <= 255) --- Eliminated hypothesis H1 (true-hypothesis). --- Eliminated hypothesis H2 (true-hypothesis). --- Eliminated hypothesis H11 (true-hypothesis). --- Eliminated hypothesis H12 (true-hypothesis). --- Eliminated hypothesis H15 (true-hypothesis). --- Eliminated hypothesis H28 (duplicate of H7). --- Eliminated hypothesis H25 (duplicate of H10). --- Eliminated hypothesis H7 (redundant, given H27). --- Eliminated hypothesis H8 (redundant, given H27). --- Eliminated hypothesis H9 (redundant, given H24). --- Eliminated hypothesis H29 (redundant, given H27). -S- Eliminated hypothesis H26. This was achieved by replacing all occurrences of hashbitlen by: fld_hash_bit_len(fld_h(ctx)). New H3: fld_hash_bit_len(fld_h(ctx)) >= 1 New H4: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 New C3: hash_bit_len_of(ctx) = fld_hash_bit_len(fld_h(ctx)) +++ New H30: integer__size >= 0 +++ New H31: natural__size >= 0 +++ New H32: spark__unsigned__u6__size >= 0 +++ New H33: spark__unsigned__u7__size >= 0 +++ New H34: spark__unsigned__byte__size >= 0 +++ New H35: spark__unsigned__u16__size >= 0 +++ New H36: spark__unsigned__u32__size >= 0 +++ New H37: spark__unsigned__u64__size >= 0 +++ New H38: spark__crypto__word_count_t__size >= 0 +++ New H39: hash_bit_length__size >= 0 +++ New H40: initialized_hash_bit_length__size >= 0 +++ New H41: skein_512_state_words_index__size >= 0 +++ New H42: skein_512_block_bytes_count__size >= 0 +++ New H43: skein_512_block_bytes_index__size >= 0 +++ New H44: skein_512_state_bytes_index__size >= 0 +++ New H45: positive_block_512_count_t__size >= 0 +++ New H46: skein_512_context__size >= 0 +++ New H47: context_header__size >= 0 +++ New H48: 0 <= fld_byte_count(fld_h(null_skein_512_context)) +++ New H49: fld_byte_count(fld_h(null_skein_512_context)) <= 2147483647 +++ New H50: 0 <= fld_field_type(fld_tweak_words(fld_h( null_skein_512_context))) +++ New H51: fld_field_type(fld_tweak_words(fld_h(null_skein_512_context) )) <= 63 +++ New H52: 0 <= fld_tree_level(fld_tweak_words(fld_h( null_skein_512_context))) +++ New H53: fld_tree_level(fld_tweak_words(fld_h(null_skein_512_context) )) <= 127 +++ New H54: 0 <= fld_reserved(fld_tweak_words(fld_h( null_skein_512_context))) +++ New H55: fld_reserved(fld_tweak_words(fld_h(null_skein_512_context))) <= 65535 +++ New H56: 0 <= fld_byte_count_msb(fld_tweak_words(fld_h( null_skein_512_context))) +++ New H57: fld_byte_count_msb(fld_tweak_words(fld_h( null_skein_512_context))) <= 4294967295 +++ New H58: 0 <= fld_byte_count_lsb(fld_tweak_words(fld_h( null_skein_512_context))) +++ New H59: fld_byte_count_lsb(fld_tweak_words(fld_h( null_skein_512_context))) <= 18446744073709551615 +++ New H60: 0 <= fld_hash_bit_len(fld_h(null_skein_512_context)) +++ New H61: fld_hash_bit_len(fld_h(null_skein_512_context)) <= 2147483640 *** Proved C4: byte_count_of(ctx) = 0 This was achieved by applying the rewrite rule skein_rules(1) [from rulefile skein.rlu] to rewrite this conclusion to: C4: fld_byte_count(fld_h(ctx)) = 0 This rule could be applied because its side-conditions hold, as follows: <<< From H27, proved: fld_byte_count(fld_h(ctx)) = 0 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C1: hash_bit_len_of(ctx) >= 1 This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C1: fld_hash_bit_len(fld_h(ctx)) >= 1 This rule could be applied because its side-conditions hold, as follows: <<< From H3, proved: fld_hash_bit_len(fld_h(ctx)) >= 1 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C2: hash_bit_len_of(ctx) <= 2147483640 This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C2: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 This rule could be applied because its side-conditions hold, as follows: <<< From H4, proved: fld_hash_bit_len(fld_h(ctx)) <= 2147483640 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C5: byte_count_of(ctx) >= 0 This was achieved by applying the rewrite rule skein_rules(1) [from rulefile skein.rlu] to rewrite this conclusion to: C5: fld_byte_count(fld_h(ctx)) >= 0 This rule could be applied because its side-conditions hold, as follows: <<< From H7, proved: fld_byte_count(fld_h(ctx)) >= 0 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C6: byte_count_of(ctx) <= 64 This was achieved by applying the rewrite rule skein_rules(1) [from rulefile skein.rlu] to rewrite this conclusion to: C6: fld_byte_count(fld_h(ctx)) <= 64 This rule could be applied because its side-conditions hold, as follows: <<< From H29, proved: fld_byte_count(fld_h(ctx)) <= 64 <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** Proved C3: hash_bit_len_of(ctx) = fld_hash_bit_len(fld_h(ctx)) This was achieved by applying the rewrite rule skein_rules(2) [from rulefile skein.rlu] to rewrite this conclusion to: C3: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx)) This rule could be applied because its side-conditions hold, as follows: <<< By simple reasoning, proved: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h(ctx)) <<< Immediate condition checktype(ctx, skein_512_context) evaluated successfully *** PROVED VC. VCN 15: Summary of user rule application. FIL skein.rlu RUL skein_rules(1) CON 4, 5, 6 RUL skein_rules(2) CON 1, 2, 3 OVR Overall summary of VCs using user rules. FIL skein.rlu RUL skein_rules(1) VCS 15 RUL skein_rules(2) VCS 15 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.vsm0000644000175000017500000000007011712765060030131 0ustar eugeneugenskein_start_new_type,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.vlg0000644000175000017500000000327011712765060027020 0ustar eugeneugen Non-option args: get_64_lsb_first Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=get_64_lsb_first \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 3 (100.0%) unproven: 0 ( 0.0%) error: 0 ( 0.0%) total: 3 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.vcg0000644000175000017500000022721511712513676030273 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Process_Block For path(s) from start to run-time check associated with statement of line 620: procedure_skein_512_process_block_1. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: block__index__subtype__1__first = 0 . H6: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H7: starting_offset + 63 <= block__index__subtype__1__last . H8: block__index__subtype__1__last <= natural__last . H9: starting_offset <= natural__last - 63 . H10: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H11: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H12: fld_byte_count(fld_h(ctx)) >= natural__first . H13: fld_byte_count(fld_h(ctx)) <= natural__last . H14: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H15: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H16: true . H17: true . H18: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H20: true . H21: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H23: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H29: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H30: starting_offset >= natural__first . H31: starting_offset <= natural__last . H32: block_count >= positive_block_512_count_t__first . H33: block_count <= positive_block_512_count_t__last . H34: byte_count_add >= natural__first . H35: byte_count_add <= natural__last . -> C1: starting_offset >= natural__first . C2: starting_offset <= natural__last . For path(s) from start to run-time check associated with statement of line 621: procedure_skein_512_process_block_2. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: block__index__subtype__1__first = 0 . H6: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H7: starting_offset + 63 <= block__index__subtype__1__last . H8: block__index__subtype__1__last <= natural__last . H9: starting_offset <= natural__last - 63 . H10: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H11: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H12: fld_byte_count(fld_h(ctx)) >= natural__first . H13: fld_byte_count(fld_h(ctx)) <= natural__last . H14: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H15: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H16: true . H17: true . H18: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H20: true . H21: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H23: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H29: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H30: starting_offset >= natural__first . H31: starting_offset <= natural__last . H32: block_count >= positive_block_512_count_t__first . H33: block_count <= positive_block_512_count_t__last . H34: byte_count_add >= natural__first . H35: byte_count_add <= natural__last . H36: starting_offset >= natural__first . H37: starting_offset <= natural__last . -> C1: 1 >= positive_block_512_count_t__first . C2: 1 <= positive_block_512_count_t__last . For path(s) from start to assertion of line 624: procedure_skein_512_process_block_3. H1: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . H2: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . H3: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . H4: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . H5: block__index__subtype__1__first = 0 . H6: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H7: starting_offset + 63 <= block__index__subtype__1__last . H8: block__index__subtype__1__last <= natural__last . H9: starting_offset <= natural__last - 63 . H10: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H11: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H12: fld_byte_count(fld_h(ctx)) >= natural__first . H13: fld_byte_count(fld_h(ctx)) <= natural__last . H14: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H15: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H16: true . H17: true . H18: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H20: true . H21: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H23: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H25: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H27: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H29: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H30: starting_offset >= natural__first . H31: starting_offset <= natural__last . H32: block_count >= positive_block_512_count_t__first . H33: block_count <= positive_block_512_count_t__last . H34: byte_count_add >= natural__first . H35: byte_count_add <= natural__last . H36: starting_offset >= natural__first . H37: starting_offset <= natural__last . H38: 1 >= positive_block_512_count_t__first . H39: 1 <= positive_block_512_count_t__last . -> C1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx)) . C2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx)) . C3: 1 >= 1 . C4: 1 <= block_count . C5: starting_offset = starting_offset + (1 - 1) * skein_512_block_bytes_c . C6: starting_offset + 63 <= block__index__subtype__1__last . C7: starting_offset + spark__crypto__i8__last * 8 <= natural__last . C8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . C9: block__index__subtype__1__last <= natural__last . C10: (1 < block_count) -> (starting_offset + skein_512_block_bytes_c <= natural__last) . C11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . C12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . C13: fld_byte_count(fld_h(ctx)) >= natural__first . C14: fld_byte_count(fld_h(ctx)) <= natural__last . C15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . C16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . C17: true . C18: true . C19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . C20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . C21: true . C22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . C23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . C24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . C25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . C26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . C27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . C28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . C29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . C30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . C31: starting_offset >= natural__first . C32: starting_offset <= natural__last . C33: block_count >= positive_block_512_count_t__first . C34: block_count <= positive_block_512_count_t__last . C35: byte_count_add >= natural__first . C36: byte_count_add <= natural__last . C37: fld_hash_bit_len(fld_h(ctx)) >= initialized_hash_bit_length__first . C38: fld_hash_bit_len(fld_h(ctx)) <= initialized_hash_bit_length__last . C39: fld_byte_count(fld_h(ctx)) >= skein_512_block_bytes_count__first . C40: fld_byte_count(fld_h(ctx)) <= skein_512_block_bytes_count__last . C41: block__index__subtype__1__first = 0 . C42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . C43: starting_offset + 63 <= block__index__subtype__1__last . C44: block__index__subtype__1__last <= natural__last . C45: starting_offset <= natural__last - 63 . For path(s) from assertion of line 624 to assertion of line 624: procedure_skein_512_process_block_4. H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H3: j >= 1 . H4: j <= block_count . H5: src_offset = starting_offset + (j - 1) * skein_512_block_bytes_c . H6: src_offset + 63 <= block__index__subtype__1__last . H7: src_offset + spark__crypto__i8__last * 8 <= natural__last . H8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H9: block__index__subtype__1__last <= natural__last . H10: (j < block_count) -> (src_offset + skein_512_block_bytes_c <= natural__last) . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H31: starting_offset >= natural__first . H32: starting_offset <= natural__last . H33: block_count >= positive_block_512_count_t__first . H34: block_count <= positive_block_512_count_t__last . H35: byte_count_add >= natural__first . H36: byte_count_add <= natural__last . H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . H41: block__index__subtype__1__first = 0 . H42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H43: starting_offset + 63 <= block__index__subtype__1__last . H44: block__index__subtype__1__last <= natural__last . H45: starting_offset <= natural__last - 63 . H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H48: byte_count_add >= spark__unsigned__u64__first . H49: byte_count_add <= spark__unsigned__u64__last . H50: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element(ks__1, [ i___1]) >= spark__unsigned__u64__first) and (element(ks__1, [ i___1]) <= spark__unsigned__u64__last))) . H51: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element(ts__2, [ i___1]) >= spark__unsigned__u64__first) and (element(ts__2, [ i___1]) <= spark__unsigned__u64__last))) . H52: src_offset >= natural__first . H53: src_offset <= natural__last . H54: src_offset >= natural__first . H55: src_offset <= natural__last . H56: block__index__subtype__1__first = 0 . H57: spark__crypto__i8__first = 0 . H58: src_offset <= block__index__subtype__1__last . H59: src_offset + spark__crypto__i8__last * 8 + 7 >= block__index__subtype__1__first . H60: src_offset + spark__crypto__i8__last * 8 + 7 <= block__index__subtype__1__last . H61: src_offset + 7 <= block__index__subtype__1__last . H62: src_offset + spark__crypto__i8__last * 8 <= natural__last . H63: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H64: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(w__3, [ i___1]) >= spark__unsigned__u64__first) and (element(w__3, [ i___1]) <= spark__unsigned__u64__last))) . H65: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H66: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__4, [ i___1]) >= spark__unsigned__u64__first) and (element(x__4, [ i___1]) <= spark__unsigned__u64__last))) . H67: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__5, [ i___1]) >= spark__unsigned__u64__first) and (element(x__5, [ i___1]) <= spark__unsigned__u64__last))) . H68: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_byte_count_lsb( fld_tweak_words(fld_h(ctx)), (fld_byte_count_lsb( fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus))))) . H69: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_byte_count_lsb( fld_tweak_words(fld_h(ctx)), (fld_byte_count_lsb( fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus))))) . H70: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__6), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last))) . H71: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__6), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last))) . H72: fld_byte_count(fld_h(ctx__6)) >= natural__first . H73: fld_byte_count(fld_h(ctx__6)) <= natural__last . H74: fld_hash_bit_len(fld_h(ctx__6)) >= hash_bit_length__first . H75: fld_hash_bit_len(fld_h(ctx__6)) <= hash_bit_length__last . H76: true . H77: true . H78: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u6__first . H79: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u6__last . H80: true . H81: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u7__first . H82: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u7__last . H83: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u16__first . H84: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u16__last . H85: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u32__first . H86: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u32__last . H87: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u64__first . H88: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u64__last . H89: j >= positive_block_512_count_t__first . H90: j <= positive_block_512_count_t__last . H91: not (j >= block_count) . H92: j >= positive_block_512_count_t__first . H93: j <= positive_block_512_count_t__last . H94: j + 1 >= positive_block_512_count_t__first . H95: j + 1 <= positive_block_512_count_t__last . H96: src_offset >= natural__first . H97: src_offset <= natural__last . H98: src_offset + skein_512_block_bytes_c >= natural__first . H99: src_offset + skein_512_block_bytes_c <= natural__last . -> C1: fld_hash_bit_len(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h( ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) = fld_hash_bit_len(fld_h(ctx~)) . C2: fld_byte_count(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) = fld_byte_count(fld_h(ctx~)) . C3: j + 1 >= 1 . C4: j + 1 <= block_count . C5: src_offset + skein_512_block_bytes_c = starting_offset + (j + 1 - 1) * skein_512_block_bytes_c . C6: src_offset + skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . C7: src_offset + skein_512_block_bytes_c + spark__crypto__i8__last * 8 <= natural__last . C8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . C9: block__index__subtype__1__last <= natural__last . C10: (j + 1 < block_count) -> (src_offset + skein_512_block_bytes_c + skein_512_block_bytes_c <= natural__last) . C11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_h( ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))), [ i___2]) >= spark__unsigned__byte__first) and (element(fld_b( upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))), [ i___2]) <= spark__unsigned__byte__last))) . C12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_h( ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))), [ i___1]) >= spark__unsigned__u64__first) and (element(fld_x( upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))), [ i___1]) <= spark__unsigned__u64__last))) . C13: fld_byte_count(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) >= natural__first . C14: fld_byte_count(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) <= natural__last . C15: fld_hash_bit_len(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h( ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) >= hash_bit_length__first . C16: fld_hash_bit_len(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h( ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) <= hash_bit_length__last . C17: true . C18: true . C19: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))))) >= spark__unsigned__u6__first . C20: fld_field_type(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))))) <= spark__unsigned__u6__last . C21: true . C22: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))))) >= spark__unsigned__u7__first . C23: fld_tree_level(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))))) <= spark__unsigned__u7__last . C24: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words( fld_h(ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false)))))) >= spark__unsigned__u16__first . C25: fld_reserved(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words( fld_h(ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false)))))) <= spark__unsigned__u16__last . C26: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))))) >= spark__unsigned__u32__first . C27: fld_byte_count_msb(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))))) <= spark__unsigned__u32__last . C28: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))))) >= spark__unsigned__u64__first . C29: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block( fld_tweak_words(fld_h(ctx__6)), false)))))) <= spark__unsigned__u64__last . C30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . C31: starting_offset >= natural__first . C32: starting_offset <= natural__last . C33: block_count >= positive_block_512_count_t__first . C34: block_count <= positive_block_512_count_t__last . C35: byte_count_add >= natural__first . C36: byte_count_add <= natural__last . C37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . C38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . C39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . C40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . C41: block__index__subtype__1__first = 0 . C42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . C43: starting_offset + 63 <= block__index__subtype__1__last . C44: block__index__subtype__1__last <= natural__last . C45: starting_offset <= natural__last - 63 . For path(s) from assertion of line 624 to run-time check associated with statement of line 637: procedure_skein_512_process_block_5. H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H3: j >= 1 . H4: j <= block_count . H5: src_offset = starting_offset + (j - 1) * skein_512_block_bytes_c . H6: src_offset + 63 <= block__index__subtype__1__last . H7: src_offset + spark__crypto__i8__last * 8 <= natural__last . H8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H9: block__index__subtype__1__last <= natural__last . H10: (j < block_count) -> (src_offset + skein_512_block_bytes_c <= natural__last) . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H31: starting_offset >= natural__first . H32: starting_offset <= natural__last . H33: block_count >= positive_block_512_count_t__first . H34: block_count <= positive_block_512_count_t__last . H35: byte_count_add >= natural__first . H36: byte_count_add <= natural__last . H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . H41: block__index__subtype__1__first = 0 . H42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H43: starting_offset + 63 <= block__index__subtype__1__last . H44: block__index__subtype__1__last <= natural__last . H45: starting_offset <= natural__last - 63 . -> C1: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . C2: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . C3: byte_count_add >= spark__unsigned__u64__first . C4: byte_count_add <= spark__unsigned__u64__last . For path(s) from assertion of line 624 to precondition check associated with statement of line 643: procedure_skein_512_process_block_6. H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H3: j >= 1 . H4: j <= block_count . H5: src_offset = starting_offset + (j - 1) * skein_512_block_bytes_c . H6: src_offset + 63 <= block__index__subtype__1__last . H7: src_offset + spark__crypto__i8__last * 8 <= natural__last . H8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H9: block__index__subtype__1__last <= natural__last . H10: (j < block_count) -> (src_offset + skein_512_block_bytes_c <= natural__last) . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H31: starting_offset >= natural__first . H32: starting_offset <= natural__last . H33: block_count >= positive_block_512_count_t__first . H34: block_count <= positive_block_512_count_t__last . H35: byte_count_add >= natural__first . H36: byte_count_add <= natural__last . H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . H41: block__index__subtype__1__first = 0 . H42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H43: starting_offset + 63 <= block__index__subtype__1__last . H44: block__index__subtype__1__last <= natural__last . H45: starting_offset <= natural__last - 63 . H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H48: byte_count_add >= spark__unsigned__u64__first . H49: byte_count_add <= spark__unsigned__u64__last . H50: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element(ks__1, [ i___1]) >= spark__unsigned__u64__first) and (element(ks__1, [ i___1]) <= spark__unsigned__u64__last))) . H51: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element(ts__2, [ i___1]) >= spark__unsigned__u64__first) and (element(ts__2, [ i___1]) <= spark__unsigned__u64__last))) . H52: src_offset >= natural__first . H53: src_offset <= natural__last . -> C1: src_offset >= natural__first . C2: src_offset <= natural__last . C3: block__index__subtype__1__first = 0 . C4: spark__crypto__i8__first = 0 . C5: src_offset <= block__index__subtype__1__last . C6: src_offset + spark__crypto__i8__last * 8 + 7 >= block__index__subtype__1__first . C7: src_offset + spark__crypto__i8__last * 8 + 7 <= block__index__subtype__1__last . C8: src_offset + 7 <= block__index__subtype__1__last . C9: src_offset + spark__crypto__i8__last * 8 <= natural__last . For path(s) from assertion of line 624 to check associated with statement of line 647: procedure_skein_512_process_block_7. H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H3: j >= 1 . H4: j <= block_count . H5: src_offset = starting_offset + (j - 1) * skein_512_block_bytes_c . H6: src_offset + 63 <= block__index__subtype__1__last . H7: src_offset + spark__crypto__i8__last * 8 <= natural__last . H8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H9: block__index__subtype__1__last <= natural__last . H10: (j < block_count) -> (src_offset + skein_512_block_bytes_c <= natural__last) . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H31: starting_offset >= natural__first . H32: starting_offset <= natural__last . H33: block_count >= positive_block_512_count_t__first . H34: block_count <= positive_block_512_count_t__last . H35: byte_count_add >= natural__first . H36: byte_count_add <= natural__last . H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . H41: block__index__subtype__1__first = 0 . H42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H43: starting_offset + 63 <= block__index__subtype__1__last . H44: block__index__subtype__1__last <= natural__last . H45: starting_offset <= natural__last - 63 . H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H48: byte_count_add >= spark__unsigned__u64__first . H49: byte_count_add <= spark__unsigned__u64__last . H50: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element(ks__1, [ i___1]) >= spark__unsigned__u64__first) and (element(ks__1, [ i___1]) <= spark__unsigned__u64__last))) . H51: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element(ts__2, [ i___1]) >= spark__unsigned__u64__first) and (element(ts__2, [ i___1]) <= spark__unsigned__u64__last))) . H52: src_offset >= natural__first . H53: src_offset <= natural__last . H54: src_offset >= natural__first . H55: src_offset <= natural__last . H56: block__index__subtype__1__first = 0 . H57: spark__crypto__i8__first = 0 . H58: src_offset <= block__index__subtype__1__last . H59: src_offset + spark__crypto__i8__last * 8 + 7 >= block__index__subtype__1__first . H60: src_offset + spark__crypto__i8__last * 8 + 7 <= block__index__subtype__1__last . H61: src_offset + 7 <= block__index__subtype__1__last . H62: src_offset + spark__crypto__i8__last * 8 <= natural__last . H63: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H64: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(w__3, [ i___1]) >= spark__unsigned__u64__first) and (element(w__3, [ i___1]) <= spark__unsigned__u64__last))) . -> C1: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . For path(s) from assertion of line 624 to run-time check associated with statement of line 661: procedure_skein_512_process_block_8. H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H3: j >= 1 . H4: j <= block_count . H5: src_offset = starting_offset + (j - 1) * skein_512_block_bytes_c . H6: src_offset + 63 <= block__index__subtype__1__last . H7: src_offset + spark__crypto__i8__last * 8 <= natural__last . H8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H9: block__index__subtype__1__last <= natural__last . H10: (j < block_count) -> (src_offset + skein_512_block_bytes_c <= natural__last) . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H31: starting_offset >= natural__first . H32: starting_offset <= natural__last . H33: block_count >= positive_block_512_count_t__first . H34: block_count <= positive_block_512_count_t__last . H35: byte_count_add >= natural__first . H36: byte_count_add <= natural__last . H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . H41: block__index__subtype__1__first = 0 . H42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H43: starting_offset + 63 <= block__index__subtype__1__last . H44: block__index__subtype__1__last <= natural__last . H45: starting_offset <= natural__last - 63 . H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H48: byte_count_add >= spark__unsigned__u64__first . H49: byte_count_add <= spark__unsigned__u64__last . H50: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element(ks__1, [ i___1]) >= spark__unsigned__u64__first) and (element(ks__1, [ i___1]) <= spark__unsigned__u64__last))) . H51: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element(ts__2, [ i___1]) >= spark__unsigned__u64__first) and (element(ts__2, [ i___1]) <= spark__unsigned__u64__last))) . H52: src_offset >= natural__first . H53: src_offset <= natural__last . H54: src_offset >= natural__first . H55: src_offset <= natural__last . H56: block__index__subtype__1__first = 0 . H57: spark__crypto__i8__first = 0 . H58: src_offset <= block__index__subtype__1__last . H59: src_offset + spark__crypto__i8__last * 8 + 7 >= block__index__subtype__1__first . H60: src_offset + spark__crypto__i8__last * 8 + 7 <= block__index__subtype__1__last . H61: src_offset + 7 <= block__index__subtype__1__last . H62: src_offset + spark__crypto__i8__last * 8 <= natural__last . H63: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H64: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(w__3, [ i___1]) >= spark__unsigned__u64__first) and (element(w__3, [ i___1]) <= spark__unsigned__u64__last))) . H65: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H66: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__4, [ i___1]) >= spark__unsigned__u64__first) and (element(x__4, [ i___1]) <= spark__unsigned__u64__last))) . H67: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__5, [ i___1]) >= spark__unsigned__u64__first) and (element(x__5, [ i___1]) <= spark__unsigned__u64__last))) . H68: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_byte_count_lsb( fld_tweak_words(fld_h(ctx)), (fld_byte_count_lsb( fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus))))) . H69: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_byte_count_lsb( fld_tweak_words(fld_h(ctx)), (fld_byte_count_lsb( fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus))))) . H70: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__6), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last))) . H71: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__6), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last))) . H72: fld_byte_count(fld_h(ctx__6)) >= natural__first . H73: fld_byte_count(fld_h(ctx__6)) <= natural__last . H74: fld_hash_bit_len(fld_h(ctx__6)) >= hash_bit_length__first . H75: fld_hash_bit_len(fld_h(ctx__6)) <= hash_bit_length__last . H76: true . H77: true . H78: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u6__first . H79: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u6__last . H80: true . H81: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u7__first . H82: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u7__last . H83: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u16__first . H84: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u16__last . H85: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u32__first . H86: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u32__last . H87: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u64__first . H88: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u64__last . H89: j >= positive_block_512_count_t__first . H90: j <= positive_block_512_count_t__last . H91: not (j >= block_count) . H92: j >= positive_block_512_count_t__first . H93: j <= positive_block_512_count_t__last . -> C1: j + 1 >= positive_block_512_count_t__first . C2: j + 1 <= positive_block_512_count_t__last . For path(s) from assertion of line 624 to run-time check associated with statement of line 662: procedure_skein_512_process_block_9. H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H3: j >= 1 . H4: j <= block_count . H5: src_offset = starting_offset + (j - 1) * skein_512_block_bytes_c . H6: src_offset + 63 <= block__index__subtype__1__last . H7: src_offset + spark__crypto__i8__last * 8 <= natural__last . H8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H9: block__index__subtype__1__last <= natural__last . H10: (j < block_count) -> (src_offset + skein_512_block_bytes_c <= natural__last) . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H31: starting_offset >= natural__first . H32: starting_offset <= natural__last . H33: block_count >= positive_block_512_count_t__first . H34: block_count <= positive_block_512_count_t__last . H35: byte_count_add >= natural__first . H36: byte_count_add <= natural__last . H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . H41: block__index__subtype__1__first = 0 . H42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H43: starting_offset + 63 <= block__index__subtype__1__last . H44: block__index__subtype__1__last <= natural__last . H45: starting_offset <= natural__last - 63 . H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H48: byte_count_add >= spark__unsigned__u64__first . H49: byte_count_add <= spark__unsigned__u64__last . H50: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element(ks__1, [ i___1]) >= spark__unsigned__u64__first) and (element(ks__1, [ i___1]) <= spark__unsigned__u64__last))) . H51: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element(ts__2, [ i___1]) >= spark__unsigned__u64__first) and (element(ts__2, [ i___1]) <= spark__unsigned__u64__last))) . H52: src_offset >= natural__first . H53: src_offset <= natural__last . H54: src_offset >= natural__first . H55: src_offset <= natural__last . H56: block__index__subtype__1__first = 0 . H57: spark__crypto__i8__first = 0 . H58: src_offset <= block__index__subtype__1__last . H59: src_offset + spark__crypto__i8__last * 8 + 7 >= block__index__subtype__1__first . H60: src_offset + spark__crypto__i8__last * 8 + 7 <= block__index__subtype__1__last . H61: src_offset + 7 <= block__index__subtype__1__last . H62: src_offset + spark__crypto__i8__last * 8 <= natural__last . H63: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H64: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(w__3, [ i___1]) >= spark__unsigned__u64__first) and (element(w__3, [ i___1]) <= spark__unsigned__u64__last))) . H65: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H66: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__4, [ i___1]) >= spark__unsigned__u64__first) and (element(x__4, [ i___1]) <= spark__unsigned__u64__last))) . H67: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__5, [ i___1]) >= spark__unsigned__u64__first) and (element(x__5, [ i___1]) <= spark__unsigned__u64__last))) . H68: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_byte_count_lsb( fld_tweak_words(fld_h(ctx)), (fld_byte_count_lsb( fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus))))) . H69: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_byte_count_lsb( fld_tweak_words(fld_h(ctx)), (fld_byte_count_lsb( fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus))))) . H70: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__6), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last))) . H71: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__6), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last))) . H72: fld_byte_count(fld_h(ctx__6)) >= natural__first . H73: fld_byte_count(fld_h(ctx__6)) <= natural__last . H74: fld_hash_bit_len(fld_h(ctx__6)) >= hash_bit_length__first . H75: fld_hash_bit_len(fld_h(ctx__6)) <= hash_bit_length__last . H76: true . H77: true . H78: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u6__first . H79: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u6__last . H80: true . H81: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u7__first . H82: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u7__last . H83: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u16__first . H84: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u16__last . H85: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u32__first . H86: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u32__last . H87: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u64__first . H88: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u64__last . H89: j >= positive_block_512_count_t__first . H90: j <= positive_block_512_count_t__last . H91: not (j >= block_count) . H92: j >= positive_block_512_count_t__first . H93: j <= positive_block_512_count_t__last . H94: j + 1 >= positive_block_512_count_t__first . H95: j + 1 <= positive_block_512_count_t__last . H96: src_offset >= natural__first . H97: src_offset <= natural__last . -> C1: src_offset + skein_512_block_bytes_c >= natural__first . C2: src_offset + skein_512_block_bytes_c <= natural__last . For path(s) from assertion of line 624 to finish: procedure_skein_512_process_block_10. H1: fld_hash_bit_len(fld_h(ctx)) = fld_hash_bit_len(fld_h( ctx~)) . H2: fld_byte_count(fld_h(ctx)) = fld_byte_count(fld_h( ctx~)) . H3: j >= 1 . H4: j <= block_count . H5: src_offset = starting_offset + (j - 1) * skein_512_block_bytes_c . H6: src_offset + 63 <= block__index__subtype__1__last . H7: src_offset + spark__crypto__i8__last * 8 <= natural__last . H8: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H9: block__index__subtype__1__last <= natural__last . H10: (j < block_count) -> (src_offset + skein_512_block_bytes_c <= natural__last) . H11: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx), [i___2]) <= spark__unsigned__byte__last))) . H12: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx), [i___1]) <= spark__unsigned__u64__last))) . H13: fld_byte_count(fld_h(ctx)) >= natural__first . H14: fld_byte_count(fld_h(ctx)) <= natural__last . H15: fld_hash_bit_len(fld_h(ctx)) >= hash_bit_length__first . H16: fld_hash_bit_len(fld_h(ctx)) <= hash_bit_length__last . H17: true . H18: true . H19: fld_field_type(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u6__first . H20: fld_field_type(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u6__last . H21: true . H22: fld_tree_level(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u7__first . H23: fld_tree_level(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u7__last . H24: fld_reserved(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u16__first . H25: fld_reserved(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u16__last . H26: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u32__first . H27: fld_byte_count_msb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u32__last . H28: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) >= spark__unsigned__u64__first . H29: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) <= spark__unsigned__u64__last . H30: for_all(i___1: integer, ((i___1 >= block__index__subtype__1__first) and (i___1 <= block__index__subtype__1__last)) -> ((element( block, [i___1]) >= spark__unsigned__byte__first) and (element( block, [i___1]) <= spark__unsigned__byte__last))) . H31: starting_offset >= natural__first . H32: starting_offset <= natural__last . H33: block_count >= positive_block_512_count_t__first . H34: block_count <= positive_block_512_count_t__last . H35: byte_count_add >= natural__first . H36: byte_count_add <= natural__last . H37: fld_hash_bit_len(fld_h(ctx~)) >= initialized_hash_bit_length__first . H38: fld_hash_bit_len(fld_h(ctx~)) <= initialized_hash_bit_length__last . H39: fld_byte_count(fld_h(ctx~)) >= skein_512_block_bytes_count__first . H40: fld_byte_count(fld_h(ctx~)) <= skein_512_block_bytes_count__last . H41: block__index__subtype__1__first = 0 . H42: starting_offset + (block_count - 1) * skein_512_block_bytes_c + 63 <= block__index__subtype__1__last . H43: starting_offset + 63 <= block__index__subtype__1__last . H44: block__index__subtype__1__last <= natural__last . H45: starting_offset <= natural__last - 63 . H46: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus >= spark__unsigned__u64__first . H47: (fld_byte_count_lsb(fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus <= spark__unsigned__u64__last . H48: byte_count_add >= spark__unsigned__u64__first . H49: byte_count_add <= spark__unsigned__u64__last . H50: for_all(i___1: integer, ((i___1 >= spark__crypto__i9__first) and ( i___1 <= spark__crypto__i9__last)) -> ((element(ks__1, [ i___1]) >= spark__unsigned__u64__first) and (element(ks__1, [ i___1]) <= spark__unsigned__u64__last))) . H51: for_all(i___1: integer, ((i___1 >= spark__crypto__i3__first) and ( i___1 <= spark__crypto__i3__last)) -> ((element(ts__2, [ i___1]) >= spark__unsigned__u64__first) and (element(ts__2, [ i___1]) <= spark__unsigned__u64__last))) . H52: src_offset >= natural__first . H53: src_offset <= natural__last . H54: src_offset >= natural__first . H55: src_offset <= natural__last . H56: block__index__subtype__1__first = 0 . H57: spark__crypto__i8__first = 0 . H58: src_offset <= block__index__subtype__1__last . H59: src_offset + spark__crypto__i8__last * 8 + 7 >= block__index__subtype__1__first . H60: src_offset + spark__crypto__i8__last * 8 + 7 <= block__index__subtype__1__last . H61: src_offset + 7 <= block__index__subtype__1__last . H62: src_offset + spark__crypto__i8__last * 8 <= natural__last . H63: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H64: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(w__3, [ i___1]) >= spark__unsigned__u64__first) and (element(w__3, [ i___1]) <= spark__unsigned__u64__last))) . H65: for_all(i_: integer, ((i_ >= spark__crypto__i8__first) and ( i_ <= spark__crypto__i8__last)) -> ((element(w__3, [ i_]) >= spark__unsigned__u64__first) and (element(w__3, [ i_]) <= spark__unsigned__u64__last))) . H66: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__4, [ i___1]) >= spark__unsigned__u64__first) and (element(x__4, [ i___1]) <= spark__unsigned__u64__last))) . H67: for_all(i___1: integer, ((i___1 >= spark__crypto__i8__first) and ( i___1 <= spark__crypto__i8__last)) -> ((element(x__5, [ i___1]) >= spark__unsigned__u64__first) and (element(x__5, [ i___1]) <= spark__unsigned__u64__last))) . H68: fld_hash_bit_len(fld_h(ctx__6)) = fld_hash_bit_len(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_byte_count_lsb( fld_tweak_words(fld_h(ctx)), (fld_byte_count_lsb( fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus))))) . H69: fld_byte_count(fld_h(ctx__6)) = fld_byte_count(fld_h(upf_h( ctx, upf_tweak_words(fld_h(ctx), upf_byte_count_lsb( fld_tweak_words(fld_h(ctx)), (fld_byte_count_lsb( fld_tweak_words(fld_h(ctx))) + byte_count_add) mod interfaces__unsigned_64__modulus))))) . H70: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( ctx__6), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(ctx__6), [i___2]) <= spark__unsigned__byte__last))) . H71: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( ctx__6), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(ctx__6), [i___1]) <= spark__unsigned__u64__last))) . H72: fld_byte_count(fld_h(ctx__6)) >= natural__first . H73: fld_byte_count(fld_h(ctx__6)) <= natural__last . H74: fld_hash_bit_len(fld_h(ctx__6)) >= hash_bit_length__first . H75: fld_hash_bit_len(fld_h(ctx__6)) <= hash_bit_length__last . H76: true . H77: true . H78: fld_field_type(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u6__first . H79: fld_field_type(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u6__last . H80: true . H81: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u7__first . H82: fld_tree_level(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u7__last . H83: fld_reserved(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u16__first . H84: fld_reserved(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u16__last . H85: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u32__first . H86: fld_byte_count_msb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u32__last . H87: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) >= spark__unsigned__u64__first . H88: fld_byte_count_lsb(fld_tweak_words(fld_h(ctx__6))) <= spark__unsigned__u64__last . H89: j >= positive_block_512_count_t__first . H90: j <= positive_block_512_count_t__last . H91: j >= block_count . -> C1: fld_hash_bit_len(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h( ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) >= initialized_hash_bit_length__first . C2: fld_hash_bit_len(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h( ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) <= initialized_hash_bit_length__last . C3: fld_hash_bit_len(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h( ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) = fld_hash_bit_len(fld_h(ctx~)) . C4: fld_byte_count(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) >= skein_512_block_bytes_count__first . C5: fld_byte_count(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) <= skein_512_block_bytes_count__last . C6: fld_byte_count(fld_h(upf_h(ctx__6, upf_tweak_words(fld_h(ctx__6), upf_first_block(fld_tweak_words(fld_h(ctx__6)), false))))) = fld_byte_count(fld_h(ctx~)) . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.slg0000644000175000017500000001011311712513676030114 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_Start_New_Type RRS The following user defined rule files have been read: &&& skein.rlu SEM No semantic checks are performed on the rules. @@@@@@@@@@ VC: procedure_skein_start_new_type_1. @@@@@@@@@@ *** Proved C1: field_type >= spark__unsigned__u6__first using hypothesis H2. *** Proved C2: field_type <= spark__unsigned__u6__last using hypothesis H3. -S- Applied substitution rule skein_start__rules(39). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(ctx)) >= 0 New C3: true -S- Applied substitution rule skein_start__rules(40). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(ctx)) <= 127 New C4: true -S- Applied substitution rule skein_start__rules(45). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(ctx)) >= 0 New C5: true -S- Applied substitution rule skein_start__rules(46). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(ctx)) <= 65535 New C6: true -S- Applied substitution rule skein_start__rules(51). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(ctx)) >= 0 New C7: true -S- Applied substitution rule skein_start__rules(52). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(ctx)) <= 4294967295 New C8: true -S- Applied substitution rule skein_start__rules(57). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H21: fld_byte_count_lsb(fld_tweak_words(ctx)) >= 0 New C9: true -S- Applied substitution rule skein_start__rules(58). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H22: fld_byte_count_lsb(fld_tweak_words(ctx)) <= 18446744073709551615 New C10: true *** Proved C3: true *** Proved C4: true *** Proved C5: true *** Proved C6: true *** Proved C7: true *** Proved C8: true *** Proved C9: true *** Proved C10: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_start_new_type_2. @@@@@@@@@@ --- Hypothesis H23 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H24 has been replaced by "true". (It is already present, as H3) . -S- Applied substitution rule skein_start__rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(ctx) >= 0 New C1: true -S- Applied substitution rule skein_start__rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(ctx) <= 2147483647 New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_skein_start_new_type_3. @@@@@@@@@@ --- Hypothesis H23 has been replaced by "true". (It is already present, as H2) . --- Hypothesis H24 has been replaced by "true". (It is already present, as H3) . %%% Simplified C1 on reading formula in, to give: %%% C1: true %%% Simplified C2 on reading formula in, to give: %%% C2: true %%% Simplified C3 on reading formula in, to give: %%% C3: true *** Proved C1: true *** Proved C2: true *** Proved C3: true *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.rls0000644000175000017500000003273011712513676026423 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Init*/ rule_family skein_512_in_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. skein_512_in_rules(1): skein_512_block_bytes_c may_be_replaced_by 64. skein_512_in_rules(2): skein_block_type_cfg may_be_replaced_by 4. skein_512_in_rules(3): skein_block_type_msg may_be_replaced_by 48. skein_512_in_rules(4): spark__unsigned__u64__first <= fld_byte_count_lsb(fld_tweak_words(fld_h(null_skein_512_context))) may_be_deduced. skein_512_in_rules(5): fld_byte_count_lsb(fld_tweak_words(fld_h(null_skein_512_context))) <= spark__unsigned__u64__last may_be_deduced. skein_512_in_rules(6): spark__unsigned__u32__first <= fld_byte_count_msb(fld_tweak_words(fld_h(null_skein_512_context))) may_be_deduced. skein_512_in_rules(7): fld_byte_count_msb(fld_tweak_words(fld_h(null_skein_512_context))) <= spark__unsigned__u32__last may_be_deduced. skein_512_in_rules(8): spark__unsigned__u16__first <= fld_reserved(fld_tweak_words(fld_h(null_skein_512_context))) may_be_deduced. skein_512_in_rules(9): fld_reserved(fld_tweak_words(fld_h(null_skein_512_context))) <= spark__unsigned__u16__last may_be_deduced. skein_512_in_rules(10): spark__unsigned__u7__first <= fld_tree_level(fld_tweak_words(fld_h(null_skein_512_context))) may_be_deduced. skein_512_in_rules(11): fld_tree_level(fld_tweak_words(fld_h(null_skein_512_context))) <= spark__unsigned__u7__last may_be_deduced. skein_512_in_rules(12): spark__unsigned__u6__first <= fld_field_type(fld_tweak_words(fld_h(null_skein_512_context))) may_be_deduced. skein_512_in_rules(13): fld_field_type(fld_tweak_words(fld_h(null_skein_512_context))) <= spark__unsigned__u6__last may_be_deduced. skein_512_in_rules(14): hash_bit_length__first <= fld_hash_bit_len(fld_h(null_skein_512_context)) may_be_deduced. skein_512_in_rules(15): fld_hash_bit_len(fld_h(null_skein_512_context)) <= hash_bit_length__last may_be_deduced. skein_512_in_rules(16): natural__first <= fld_byte_count(fld_h(null_skein_512_context)) may_be_deduced. skein_512_in_rules(17): fld_byte_count(fld_h(null_skein_512_context)) <= natural__last may_be_deduced. skein_512_in_rules(18): spark__unsigned__u64__first <= element(fld_x(null_skein_512_context), [I]) may_be_deduced_from [0 <= I, I <= 7]. skein_512_in_rules(19): element(fld_x(null_skein_512_context), [I]) <= spark__unsigned__u64__last may_be_deduced_from [0 <= I, I <= 7]. skein_512_in_rules(20): spark__unsigned__byte__first <= element(fld_b(null_skein_512_context), [I]) may_be_deduced_from [0 <= I, I <= 63]. skein_512_in_rules(21): element(fld_b(null_skein_512_context), [I]) <= spark__unsigned__byte__last may_be_deduced_from [0 <= I, I <= 63]. skein_512_in_rules(22): skein_schema_ver may_be_replaced_by 5154883667. skein_512_in_rules(23): skein_cfg_tree_info_sequential may_be_replaced_by 0. skein_512_in_rules(24): skein_cfg_str_len may_be_replaced_by 32. skein_512_in_rules(25): integer__size >= 0 may_be_deduced. skein_512_in_rules(26): integer__first may_be_replaced_by -2147483648. skein_512_in_rules(27): integer__last may_be_replaced_by 2147483647. skein_512_in_rules(28): integer__base__first may_be_replaced_by -2147483648. skein_512_in_rules(29): integer__base__last may_be_replaced_by 2147483647. skein_512_in_rules(30): natural__size >= 0 may_be_deduced. skein_512_in_rules(31): natural__first may_be_replaced_by 0. skein_512_in_rules(32): natural__last may_be_replaced_by 2147483647. skein_512_in_rules(33): natural__base__first may_be_replaced_by -2147483648. skein_512_in_rules(34): natural__base__last may_be_replaced_by 2147483647. skein_512_in_rules(35): interfaces__unsigned_8__size >= 0 may_be_deduced. skein_512_in_rules(36): interfaces__unsigned_8__size may_be_replaced_by 8. skein_512_in_rules(37): interfaces__unsigned_8__first may_be_replaced_by 0. skein_512_in_rules(38): interfaces__unsigned_8__last may_be_replaced_by 255. skein_512_in_rules(39): interfaces__unsigned_8__base__first may_be_replaced_by 0. skein_512_in_rules(40): interfaces__unsigned_8__base__last may_be_replaced_by 255. skein_512_in_rules(41): interfaces__unsigned_8__modulus may_be_replaced_by 256. skein_512_in_rules(42): interfaces__unsigned_16__size >= 0 may_be_deduced. skein_512_in_rules(43): interfaces__unsigned_16__size may_be_replaced_by 16. skein_512_in_rules(44): interfaces__unsigned_16__first may_be_replaced_by 0. skein_512_in_rules(45): interfaces__unsigned_16__last may_be_replaced_by 65535. skein_512_in_rules(46): interfaces__unsigned_16__base__first may_be_replaced_by 0. skein_512_in_rules(47): interfaces__unsigned_16__base__last may_be_replaced_by 65535. skein_512_in_rules(48): interfaces__unsigned_16__modulus may_be_replaced_by 65536. skein_512_in_rules(49): interfaces__unsigned_32__size >= 0 may_be_deduced. skein_512_in_rules(50): interfaces__unsigned_32__size may_be_replaced_by 32. skein_512_in_rules(51): interfaces__unsigned_32__first may_be_replaced_by 0. skein_512_in_rules(52): interfaces__unsigned_32__last may_be_replaced_by 4294967295. skein_512_in_rules(53): interfaces__unsigned_32__base__first may_be_replaced_by 0. skein_512_in_rules(54): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. skein_512_in_rules(55): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. skein_512_in_rules(56): interfaces__unsigned_64__size >= 0 may_be_deduced. skein_512_in_rules(57): interfaces__unsigned_64__size may_be_replaced_by 64. skein_512_in_rules(58): interfaces__unsigned_64__first may_be_replaced_by 0. skein_512_in_rules(59): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. skein_512_in_rules(60): interfaces__unsigned_64__base__first may_be_replaced_by 0. skein_512_in_rules(61): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. skein_512_in_rules(62): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. skein_512_in_rules(63): spark__unsigned__u6__size >= 0 may_be_deduced. skein_512_in_rules(64): spark__unsigned__u6__first may_be_replaced_by 0. skein_512_in_rules(65): spark__unsigned__u6__last may_be_replaced_by 63. skein_512_in_rules(66): spark__unsigned__u6__base__first may_be_replaced_by 0. skein_512_in_rules(67): spark__unsigned__u6__base__last may_be_replaced_by 63. skein_512_in_rules(68): spark__unsigned__u6__modulus may_be_replaced_by 64. skein_512_in_rules(69): spark__unsigned__u7__size >= 0 may_be_deduced. skein_512_in_rules(70): spark__unsigned__u7__first may_be_replaced_by 0. skein_512_in_rules(71): spark__unsigned__u7__last may_be_replaced_by 127. skein_512_in_rules(72): spark__unsigned__u7__base__first may_be_replaced_by 0. skein_512_in_rules(73): spark__unsigned__u7__base__last may_be_replaced_by 127. skein_512_in_rules(74): spark__unsigned__u7__modulus may_be_replaced_by 128. skein_512_in_rules(75): spark__unsigned__byte__size >= 0 may_be_deduced. skein_512_in_rules(76): spark__unsigned__byte__first may_be_replaced_by 0. skein_512_in_rules(77): spark__unsigned__byte__last may_be_replaced_by 255. skein_512_in_rules(78): spark__unsigned__byte__base__first may_be_replaced_by 0. skein_512_in_rules(79): spark__unsigned__byte__base__last may_be_replaced_by 255. skein_512_in_rules(80): spark__unsigned__byte__modulus may_be_replaced_by 256. skein_512_in_rules(81): spark__unsigned__u16__size >= 0 may_be_deduced. skein_512_in_rules(82): spark__unsigned__u16__first may_be_replaced_by 0. skein_512_in_rules(83): spark__unsigned__u16__last may_be_replaced_by 65535. skein_512_in_rules(84): spark__unsigned__u16__base__first may_be_replaced_by 0. skein_512_in_rules(85): spark__unsigned__u16__base__last may_be_replaced_by 65535. skein_512_in_rules(86): spark__unsigned__u16__modulus may_be_replaced_by 65536. skein_512_in_rules(87): spark__unsigned__u32__size >= 0 may_be_deduced. skein_512_in_rules(88): spark__unsigned__u32__first may_be_replaced_by 0. skein_512_in_rules(89): spark__unsigned__u32__last may_be_replaced_by 4294967295. skein_512_in_rules(90): spark__unsigned__u32__base__first may_be_replaced_by 0. skein_512_in_rules(91): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. skein_512_in_rules(92): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. skein_512_in_rules(93): spark__unsigned__u64__size >= 0 may_be_deduced. skein_512_in_rules(94): spark__unsigned__u64__first may_be_replaced_by 0. skein_512_in_rules(95): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. skein_512_in_rules(96): spark__unsigned__u64__base__first may_be_replaced_by 0. skein_512_in_rules(97): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. skein_512_in_rules(98): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. skein_512_in_rules(99): spark__crypto__word_count_t__size >= 0 may_be_deduced. skein_512_in_rules(100): spark__crypto__word_count_t__first may_be_replaced_by 0. skein_512_in_rules(101): spark__crypto__word_count_t__last may_be_replaced_by 268435455. skein_512_in_rules(102): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. skein_512_in_rules(103): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. skein_512_in_rules(104): hash_bit_length__size >= 0 may_be_deduced. skein_512_in_rules(105): hash_bit_length__first may_be_replaced_by 0. skein_512_in_rules(106): hash_bit_length__last may_be_replaced_by 2147483640. skein_512_in_rules(107): hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_in_rules(108): hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_in_rules(109): initialized_hash_bit_length__size >= 0 may_be_deduced. skein_512_in_rules(110): initialized_hash_bit_length__first may_be_replaced_by 1. skein_512_in_rules(111): initialized_hash_bit_length__last may_be_replaced_by 2147483640. skein_512_in_rules(112): initialized_hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_in_rules(113): initialized_hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_in_rules(114): skein_512_state_words_index__size >= 0 may_be_deduced. skein_512_in_rules(115): skein_512_state_words_index__first may_be_replaced_by 0. skein_512_in_rules(116): skein_512_state_words_index__last may_be_replaced_by 7. skein_512_in_rules(117): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. skein_512_in_rules(118): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. skein_512_in_rules(119): skein_512_block_bytes_count__size >= 0 may_be_deduced. skein_512_in_rules(120): skein_512_block_bytes_count__first may_be_replaced_by 0. skein_512_in_rules(121): skein_512_block_bytes_count__last may_be_replaced_by 64. skein_512_in_rules(122): skein_512_block_bytes_count__base__first may_be_replaced_by -2147483648. skein_512_in_rules(123): skein_512_block_bytes_count__base__last may_be_replaced_by 2147483647. skein_512_in_rules(124): skein_512_block_bytes_index__size >= 0 may_be_deduced. skein_512_in_rules(125): skein_512_block_bytes_index__first may_be_replaced_by 0. skein_512_in_rules(126): skein_512_block_bytes_index__last may_be_replaced_by 63. skein_512_in_rules(127): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. skein_512_in_rules(128): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. skein_512_in_rules(129): skein_512_state_bytes_index__size >= 0 may_be_deduced. skein_512_in_rules(130): skein_512_state_bytes_index__first may_be_replaced_by 0. skein_512_in_rules(131): skein_512_state_bytes_index__last may_be_replaced_by 63. skein_512_in_rules(132): skein_512_state_bytes_index__base__first may_be_replaced_by -2147483648. skein_512_in_rules(133): skein_512_state_bytes_index__base__last may_be_replaced_by 2147483647. skein_512_in_rules(134): positive_block_512_count_t__size >= 0 may_be_deduced. skein_512_in_rules(135): positive_block_512_count_t__first may_be_replaced_by 1. skein_512_in_rules(136): positive_block_512_count_t__last may_be_replaced_by 33554431. skein_512_in_rules(137): positive_block_512_count_t__base__first may_be_replaced_by -2147483648. skein_512_in_rules(138): positive_block_512_count_t__base__last may_be_replaced_by 2147483647. skein_512_in_rules(139): skein_512_context__size >= 0 may_be_deduced. skein_512_in_rules(140): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. skein_512_in_rules(141): tweak_value__size >= 0 may_be_deduced. skein_512_in_rules(142): tweak_value__size may_be_replaced_by 128. skein_512_in_rules(143): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. skein_512_in_rules(144): context_header__size >= 0 may_be_deduced. skein_512_in_rules(145): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/put_64_lsb_first.vct0000644000175000017500000000000011712513676027045 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final.vct0000644000175000017500000000012411712513676026535 0ustar eugeneugen,skein_512_final,procedure,,,20,,true,,,, ,skein_512_final,procedure,,,21,,true,,,, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.vct0000644000175000017500000000020111712513676026722 0ustar eugeneugen,skein_512_update,procedure,,,15,,true,,,, ,skein_512_update,procedure,,,19,,true,,,, ,skein_512_update,procedure,,,25,,true,,,, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.vlg0000644000175000017500000000327711712765060030130 0ustar eugeneugen Non-option args: skein_start_new_type Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=skein_start_new_type \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.vsm0000644000175000017500000000006211712765060026375 0ustar eugeneugenskein_512_hash,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_start_new_type.log0000644000175000017500000000063111712513676030114 0ustar eugeneugenSPARK Simplifier Pro Edition Reading skein_start_new_type.fdl (for inherited FDL type declarations) Reading skein.rlu (for user-defined proof rules) Processing skein_start_new_type.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Automatic simplification completed. Simplified output sent to skein_start_new_type.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.vct0000644000175000017500000000020011712513676027016 0ustar eugeneugen,get_64_lsb_first,procedure,,,4,,true,,,, ,get_64_lsb_first,procedure,,,21,,true,,,, ,get_64_lsb_first,procedure,,,22,,true,,,, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/0000755000175000017500000000000011753202331026006 5ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.vct0000644000175000017500000000000011712513676031361 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.siv0000644000175000017500000000247411712513676030660 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Final.Zero_Pad_B For path(s) from start to run-time check associated with statement of line 870: procedure_zero_pad_b_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 870: procedure_zero_pad_b_2. *** true . /* all conclusions proved */ For path(s) from start to assertion of line 871: procedure_zero_pad_b_3. *** true . /* all conclusions proved */ For path(s) from assertion of line 871 to assertion of line 871: procedure_zero_pad_b_4. *** true . /* all conclusions proved */ For path(s) from assertion of line 871 to run-time check associated with statement of line 875: procedure_zero_pad_b_5. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_zero_pad_b_6. *** true . /* all conclusions proved */ For path(s) from assertion of line 871 to finish: procedure_zero_pad_b_7. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.fdl0000644000175000017500000001772711712513676030633 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Final.Zero_Pad_B} title procedure zero_pad_b; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_index = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_c : integer = pending; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const skein_512_context__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var local_ctx : skein_512_context; var loop__1__i : integer; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.vcg0000644000175000017500000007053411712513676030640 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Final.Zero_Pad_B For path(s) from start to run-time check associated with statement of line 870: procedure_zero_pad_b_1. H1: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . H2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H3: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H4: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H5: fld_byte_count(fld_h(local_ctx)) >= natural__first . H6: fld_byte_count(fld_h(local_ctx)) <= natural__last . H7: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H8: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H9: true . H10: true . H11: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H13: true . H14: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H16: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H18: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . -> C1: skein_512_block_bytes_index__last >= integer__first . C2: skein_512_block_bytes_index__last <= integer__last . C3: fld_byte_count(fld_h(local_ctx)) >= integer__first . C4: fld_byte_count(fld_h(local_ctx)) <= integer__last . For path(s) from start to run-time check associated with statement of line 870: procedure_zero_pad_b_2. H1: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . H2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H3: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H4: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H5: fld_byte_count(fld_h(local_ctx)) >= natural__first . H6: fld_byte_count(fld_h(local_ctx)) <= natural__last . H7: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H8: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H9: true . H10: true . H11: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H13: true . H14: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H16: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H18: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H22: skein_512_block_bytes_index__last >= integer__first . H23: skein_512_block_bytes_index__last <= integer__last . H24: fld_byte_count(fld_h(local_ctx)) >= integer__first . H25: fld_byte_count(fld_h(local_ctx)) <= integer__last . -> C1: (fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last) -> (( skein_512_block_bytes_index__last >= skein_512_block_bytes_index__first) and ( skein_512_block_bytes_index__last <= skein_512_block_bytes_index__last)) . C2: (fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last) -> ((fld_byte_count(fld_h( local_ctx)) >= skein_512_block_bytes_index__first) and ( fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last)) . For path(s) from start to assertion of line 871: procedure_zero_pad_b_3. H1: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . H2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H3: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H4: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H5: fld_byte_count(fld_h(local_ctx)) >= natural__first . H6: fld_byte_count(fld_h(local_ctx)) <= natural__last . H7: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H8: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H9: true . H10: true . H11: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H13: true . H14: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H16: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H18: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H22: skein_512_block_bytes_index__last >= integer__first . H23: skein_512_block_bytes_index__last <= integer__last . H24: fld_byte_count(fld_h(local_ctx)) >= integer__first . H25: fld_byte_count(fld_h(local_ctx)) <= integer__last . H26: (fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last) -> (( skein_512_block_bytes_index__last >= skein_512_block_bytes_index__first) and ( skein_512_block_bytes_index__last <= skein_512_block_bytes_index__last)) . H27: (fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last) -> ((fld_byte_count(fld_h( local_ctx)) >= skein_512_block_bytes_index__first) and ( fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last)) . H28: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last . -> C1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx)) . C2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . C3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . C4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx)) . C5: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . C6: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . C7: fld_byte_count(fld_h(local_ctx)) >= natural__first . C8: fld_byte_count(fld_h(local_ctx)) <= natural__last . C9: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . C10: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . C11: true . C12: true . C13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . C14: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . C15: true . C16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . C17: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . C18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . C19: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . C20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . C21: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . C22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . C23: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . C24: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . C25: fld_hash_bit_len(fld_h(local_ctx)) > 0 . C26: fld_byte_count(fld_h(local_ctx)) >= skein_512_block_bytes_index__first . C27: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last . C28: fld_byte_count(fld_h(local_ctx)) >= fld_byte_count(fld_h( local_ctx)) . C29: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last . For path(s) from assertion of line 871 to assertion of line 871: procedure_zero_pad_b_4. H1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) . H2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . H4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) . H5: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H6: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H7: fld_byte_count(fld_h(local_ctx)) >= natural__first . H8: fld_byte_count(fld_h(local_ctx)) <= natural__last . H9: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H10: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H11: true . H12: true . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H14: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H15: true . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H17: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H19: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H21: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H23: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c . H25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 . H26: loop__1__i >= skein_512_block_bytes_index__first . H27: loop__1__i <= skein_512_block_bytes_index__last . H28: loop__1__i >= fld_byte_count(fld_h(local_ctx)) . H29: loop__1__i <= skein_512_block_bytes_index__last . H30: 0 >= spark__unsigned__byte__first . H31: 0 <= spark__unsigned__byte__last . H32: loop__1__i >= skein_512_block_bytes_index__first . H33: loop__1__i <= skein_512_block_bytes_index__last . H34: not (loop__1__i = skein_512_block_bytes_index__last) . -> C1: fld_hash_bit_len(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) = fld_hash_bit_len(fld_h( local_ctx~)) . C2: fld_hash_bit_len(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) > 0 . C3: fld_byte_count(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) < skein_512_block_bytes_c . C4: fld_byte_count(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) = fld_byte_count(fld_h( local_ctx~)) . C5: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b(upf_b( local_ctx, update(fld_b(local_ctx), [loop__1__i], 0))), [ i___2]) >= spark__unsigned__byte__first) and (element(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [ loop__1__i], 0))), [i___2]) <= spark__unsigned__byte__last))) . C6: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x(upf_b( local_ctx, update(fld_b(local_ctx), [loop__1__i], 0))), [ i___1]) >= spark__unsigned__u64__first) and (element(fld_x( upf_b(local_ctx, update(fld_b(local_ctx), [ loop__1__i], 0))), [i___1]) <= spark__unsigned__u64__last))) . C7: fld_byte_count(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) >= natural__first . C8: fld_byte_count(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) <= natural__last . C9: fld_hash_bit_len(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) >= hash_bit_length__first . C10: fld_hash_bit_len(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) <= hash_bit_length__last . C11: true . C12: true . C13: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx, update( fld_b(local_ctx), [loop__1__i], 0))))) >= spark__unsigned__u6__first . C14: fld_field_type(fld_tweak_words(fld_h(upf_b(local_ctx, update( fld_b(local_ctx), [loop__1__i], 0))))) <= spark__unsigned__u6__last . C15: true . C16: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx, update( fld_b(local_ctx), [loop__1__i], 0))))) >= spark__unsigned__u7__first . C17: fld_tree_level(fld_tweak_words(fld_h(upf_b(local_ctx, update( fld_b(local_ctx), [loop__1__i], 0))))) <= spark__unsigned__u7__last . C18: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0))))) >= spark__unsigned__u16__first . C19: fld_reserved(fld_tweak_words(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0))))) <= spark__unsigned__u16__last . C20: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b( local_ctx, update(fld_b(local_ctx), [loop__1__i], 0))))) >= spark__unsigned__u32__first . C21: fld_byte_count_msb(fld_tweak_words(fld_h(upf_b( local_ctx, update(fld_b(local_ctx), [loop__1__i], 0))))) <= spark__unsigned__u32__last . C22: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b( local_ctx, update(fld_b(local_ctx), [loop__1__i], 0))))) >= spark__unsigned__u64__first . C23: fld_byte_count_lsb(fld_tweak_words(fld_h(upf_b( local_ctx, update(fld_b(local_ctx), [loop__1__i], 0))))) <= spark__unsigned__u64__last . C24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c . C25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 . C26: loop__1__i + 1 >= skein_512_block_bytes_index__first . C27: loop__1__i + 1 <= skein_512_block_bytes_index__last . C28: loop__1__i + 1 >= fld_byte_count(fld_h(upf_b( local_ctx, update(fld_b(local_ctx), [loop__1__i], 0)))) . C29: loop__1__i + 1 <= skein_512_block_bytes_index__last . For path(s) from assertion of line 871 to run-time check associated with statement of line 875: procedure_zero_pad_b_5. H1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) . H2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . H4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) . H5: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H6: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H7: fld_byte_count(fld_h(local_ctx)) >= natural__first . H8: fld_byte_count(fld_h(local_ctx)) <= natural__last . H9: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H10: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H11: true . H12: true . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H14: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H15: true . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H17: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H19: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H21: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H23: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c . H25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 . H26: loop__1__i >= skein_512_block_bytes_index__first . H27: loop__1__i <= skein_512_block_bytes_index__last . H28: loop__1__i >= fld_byte_count(fld_h(local_ctx)) . H29: loop__1__i <= skein_512_block_bytes_index__last . -> C1: 0 >= spark__unsigned__byte__first . C2: 0 <= spark__unsigned__byte__last . C3: loop__1__i >= skein_512_block_bytes_index__first . C4: loop__1__i <= skein_512_block_bytes_index__last . For path(s) from start to finish: procedure_zero_pad_b_6. H1: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . H2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H3: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H4: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H5: fld_byte_count(fld_h(local_ctx)) >= natural__first . H6: fld_byte_count(fld_h(local_ctx)) <= natural__last . H7: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H8: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H9: true . H10: true . H11: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H13: true . H14: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H16: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H18: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H20: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H22: skein_512_block_bytes_index__last >= integer__first . H23: skein_512_block_bytes_index__last <= integer__last . H24: fld_byte_count(fld_h(local_ctx)) >= integer__first . H25: fld_byte_count(fld_h(local_ctx)) <= integer__last . H26: (fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last) -> (( skein_512_block_bytes_index__last >= skein_512_block_bytes_index__first) and ( skein_512_block_bytes_index__last <= skein_512_block_bytes_index__last)) . H27: (fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last) -> ((fld_byte_count(fld_h( local_ctx)) >= skein_512_block_bytes_index__first) and ( fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last)) . H28: not (fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last) . -> C1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx)) . C2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . C3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . C4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx)) . For path(s) from assertion of line 871 to finish: procedure_zero_pad_b_7. H1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) . H2: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c . H4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) . H5: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H6: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H7: fld_byte_count(fld_h(local_ctx)) >= natural__first . H8: fld_byte_count(fld_h(local_ctx)) <= natural__last . H9: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H10: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H11: true . H12: true . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H14: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H15: true . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H17: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H19: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H21: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H23: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c . H25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 . H26: loop__1__i >= skein_512_block_bytes_index__first . H27: loop__1__i <= skein_512_block_bytes_index__last . H28: loop__1__i >= fld_byte_count(fld_h(local_ctx)) . H29: loop__1__i <= skein_512_block_bytes_index__last . H30: 0 >= spark__unsigned__byte__first . H31: 0 <= spark__unsigned__byte__last . H32: loop__1__i >= skein_512_block_bytes_index__first . H33: loop__1__i <= skein_512_block_bytes_index__last . H34: loop__1__i = skein_512_block_bytes_index__last . -> C1: fld_hash_bit_len(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) = fld_hash_bit_len(fld_h( local_ctx~)) . C2: fld_hash_bit_len(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) > 0 . C3: fld_byte_count(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) < skein_512_block_bytes_c . C4: fld_byte_count(fld_h(upf_b(local_ctx, update(fld_b( local_ctx), [loop__1__i], 0)))) = fld_byte_count(fld_h( local_ctx~)) . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.log0000644000175000017500000000157411712513676031367 0ustar eugeneugenSPARK Simplifier Pro Edition Reading set_b_counter.fdl (for inherited FDL type declarations) Processing set_b_counter.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Simplified VC: 8 - All conclusions proved Simplified VC: 9 - All conclusions proved Simplified VC: 10 - All conclusions proved Simplified VC: 11 - All conclusions proved Simplified VC: 12 - All conclusions proved Simplified VC: 13 - All conclusions proved Simplified VC: 14 - All conclusions proved Simplified VC: 15 - All conclusions proved Simplified VC: 16 - All conclusions proved Automatic simplification completed. Simplified output sent to set_b_counter.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.siv0000644000175000017500000000544311712513676031406 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Final.Set_B_Counter For path(s) from start to run-time check associated with statement of line 887: procedure_set_b_counter_1. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 888: procedure_set_b_counter_2. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 888: procedure_set_b_counter_3. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 889: procedure_set_b_counter_4. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 889: procedure_set_b_counter_5. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 890: procedure_set_b_counter_6. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 890: procedure_set_b_counter_7. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 891: procedure_set_b_counter_8. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 891: procedure_set_b_counter_9. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 892: procedure_set_b_counter_10. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 892: procedure_set_b_counter_11. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 893: procedure_set_b_counter_12. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 893: procedure_set_b_counter_13. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 894: procedure_set_b_counter_14. *** true . /* all conclusions proved */ For path(s) from start to run-time check associated with statement of line 894: procedure_set_b_counter_15. *** true . /* all conclusions proved */ For path(s) from start to finish: procedure_set_b_counter_16. *** true . /* all conclusions proved */ spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.vlg0000644000175000017500000000326111712765060031365 0ustar eugeneugen Non-option args: set_b_counter Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=set_b_counter \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.vlg0000644000175000017500000000325311712765060030637 0ustar eugeneugen Non-option args: zero_pad_b Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=zero_pad_b \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 0 (-nan%) unproven: 0 (-nan%) error: 0 (-nan%) total: 0 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.log0000644000175000017500000000076211712513676030636 0ustar eugeneugenSPARK Simplifier Pro Edition Reading zero_pad_b.fdl (for inherited FDL type declarations) Processing zero_pad_b.vcg ... Simplified VC: 1 - All conclusions proved Simplified VC: 2 - All conclusions proved Simplified VC: 3 - All conclusions proved Simplified VC: 4 - All conclusions proved Simplified VC: 5 - All conclusions proved Simplified VC: 6 - All conclusions proved Simplified VC: 7 - All conclusions proved Automatic simplification completed. Simplified output sent to zero_pad_b.siv. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.slg0000644000175000017500000013224411712513676030643 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Final.Zero_Pad_B @@@@@@@@@@ VC: procedure_zero_pad_b_1. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule zero_pad_b_rules(93). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New C1: 63 >= integer__first New C2: 63 <= integer__last -S- Applied substitution rule zero_pad_b_rules(3). This was achieved by replacing all occurrences of integer__first by: - 2147483648. New C3: fld_byte_count(fld_h(local_ctx)) >= - 2147483648 New C1: true -S- Applied substitution rule zero_pad_b_rules(4). This was achieved by replacing all occurrences of integer__last by: 2147483647. New C4: fld_byte_count(fld_h(local_ctx)) <= 2147483647 New C2: true *** Proved C1: true *** Proved C2: true -S- Applied substitution rule zero_pad_b_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H1: fld_byte_count(fld_h(local_ctx)) < 64 -S- Applied substitution rule zero_pad_b_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H5: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule zero_pad_b_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule zero_pad_b_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H11: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule zero_pad_b_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H14: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule zero_pad_b_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule zero_pad_b_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) -S- Applied substitution rule zero_pad_b_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H16: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule zero_pad_b_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H18: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule zero_pad_b_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H7: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule zero_pad_b_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H8: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule zero_pad_b_rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(88). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H3: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C3: fld_byte_count(fld_h(local_ctx)) >= - 2147483648 using hypothesis H5. *** Proved C4: fld_byte_count(fld_h(local_ctx)) <= 2147483647 using hypothesis H6. *** PROVED VC. @@@@@@@@@@ VC: procedure_zero_pad_b_2. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C1 on reading formula in, to give: %%% C1: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last -> skein_512_block_bytes_index__last >= skein_512_block_bytes_index__first -S- Applied substitution rule zero_pad_b_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H1: fld_byte_count(fld_h(local_ctx)) < 64 -S- Applied substitution rule zero_pad_b_rules(3). This was achieved by replacing all occurrences of integer__first by: - 2147483648. New H22: skein_512_block_bytes_index__last >= - 2147483648 New H24: fld_byte_count(fld_h(local_ctx)) >= - 2147483648 -S- Applied substitution rule zero_pad_b_rules(4). This was achieved by replacing all occurrences of integer__last by: 2147483647. New H23: skein_512_block_bytes_index__last <= 2147483647 New H25: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule zero_pad_b_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H5: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule zero_pad_b_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule zero_pad_b_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H11: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule zero_pad_b_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H14: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule zero_pad_b_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule zero_pad_b_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) -S- Applied substitution rule zero_pad_b_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H16: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule zero_pad_b_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H18: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule zero_pad_b_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H7: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule zero_pad_b_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H8: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule zero_pad_b_rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(88). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H3: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C1: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last -> skein_512_block_bytes_index__last >= 0 New C2: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last -> fld_byte_count(fld_h(local_ctx)) >= 0 and fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last -S- Applied substitution rule zero_pad_b_rules(93). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H22: true New H23: true New H3: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) New C1: true New C2: fld_byte_count(fld_h(local_ctx)) <= 63 -> fld_byte_count(fld_h( local_ctx)) >= 0 and fld_byte_count(fld_h(local_ctx)) <= 63 *** Proved C1: true *** Proved C2: fld_byte_count(fld_h(local_ctx)) <= 63 -> fld_byte_count(fld_h( local_ctx)) >= 0 and fld_byte_count(fld_h(local_ctx)) <= 63 using hypotheses H1 & H5. *** PROVED VC. @@@@@@@@@@ VC: procedure_zero_pad_b_3. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H26 on reading formula in, to give: %%% H26: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last -> skein_512_block_bytes_index__last >= skein_512_block_bytes_index__first %%% Simplified C1 on reading formula in, to give: %%% C1: true %%% Simplified C4 on reading formula in, to give: %%% C4: true %%% Simplified C5 on reading formula in, to give: %%% C5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C6 on reading formula in, to give: %%% C6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C28 on reading formula in, to give: %%% C28: true *** Proved C1: true *** Proved C2: fld_hash_bit_len(fld_h(local_ctx)) > 0 using hypothesis H2. *** Proved C3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c using hypothesis H1. *** Proved C4: true *** Proved C5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) using hypothesis H3. *** Proved C6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H4. *** Proved C7: fld_byte_count(fld_h(local_ctx)) >= natural__first using hypothesis H5. *** Proved C8: fld_byte_count(fld_h(local_ctx)) <= natural__last using hypothesis H6. *** Proved C9: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first using hypothesis H7. *** Proved C10: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last using hypothesis H8. *** Proved C11: true *** Proved C12: true *** Proved C13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first using hypothesis H11. *** Proved C14: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last using hypothesis H12. *** Proved C15: true *** Proved C16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first using hypothesis H14. *** Proved C17: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last using hypothesis H15. *** Proved C18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first using hypothesis H16. *** Proved C19: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last using hypothesis H17. *** Proved C20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first using hypothesis H18. *** Proved C21: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last using hypothesis H19. *** Proved C22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first using hypothesis H20. *** Proved C23: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last using hypothesis H21. *** Proved C24: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c using hypothesis H1. *** Proved C25: fld_hash_bit_len(fld_h(local_ctx)) > 0 using hypothesis H2. *** Proved C27: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last using hypothesis H28. *** Proved C28: true *** Proved C29: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last using hypothesis H28. -S- Applied substitution rule zero_pad_b_rules(92). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H3: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= spark__unsigned__byte__last) New H26: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last -> skein_512_block_bytes_index__last >= 0 New H27: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last -> fld_byte_count(fld_h(local_ctx)) >= 0 and fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last New C26: fld_byte_count(fld_h(local_ctx)) >= 0 +++ Using "A->B, A |- B" on hypotheses H26 & H28 yields a new hypothesis: +++ H29: skein_512_block_bytes_index__last >= 0 +++ Using "A->B, A |- B" on hypotheses H27 & H28 yields a new hypothesis: +++ H30: fld_byte_count(fld_h(local_ctx)) >= 0 and fld_byte_count(fld_h( local_ctx)) <= skein_512_block_bytes_index__last -S- Applied substitution rule zero_pad_b_rules(1). This was achieved by replacing all occurrences of skein_512_block_bytes_c by: 64. New H1: fld_byte_count(fld_h(local_ctx)) < 64 -S- Applied substitution rule zero_pad_b_rules(3). This was achieved by replacing all occurrences of integer__first by: - 2147483648. New H22: skein_512_block_bytes_index__last >= - 2147483648 New H24: fld_byte_count(fld_h(local_ctx)) >= - 2147483648 -S- Applied substitution rule zero_pad_b_rules(4). This was achieved by replacing all occurrences of integer__last by: 2147483647. New H23: skein_512_block_bytes_index__last <= 2147483647 New H25: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule zero_pad_b_rules(8). This was achieved by replacing all occurrences of natural__first by: 0. New H5: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule zero_pad_b_rules(9). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H6: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule zero_pad_b_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H11: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(42). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule zero_pad_b_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H14: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(48). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule zero_pad_b_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H3: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) -S- Applied substitution rule zero_pad_b_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H3: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) -S- Applied substitution rule zero_pad_b_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H16: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(60). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule zero_pad_b_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H18: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(66). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule zero_pad_b_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H20: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule zero_pad_b_rules(72). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(82). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H7: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule zero_pad_b_rules(83). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H8: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule zero_pad_b_rules(87). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(88). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H4: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule zero_pad_b_rules(93). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H26: true New H27: fld_byte_count(fld_h(local_ctx)) <= 63 -> fld_byte_count(fld_h( local_ctx)) >= 0 and fld_byte_count(fld_h(local_ctx)) <= 63 New H28: fld_byte_count(fld_h(local_ctx)) <= 63 New H29: true New H30: fld_byte_count(fld_h(local_ctx)) >= 0 and fld_byte_count(fld_h( local_ctx)) <= 63 New H22: true New H23: true New H3: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C26: fld_byte_count(fld_h(local_ctx)) >= 0 using hypothesis H5. *** PROVED VC. @@@@@@@@@@ VC: procedure_zero_pad_b_4. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) %%% Simplified H4 on reading formula in, to give: %%% H4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H6 on reading formula in, to give: %%% H6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H24 on reading formula in, to give: %%% H24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c %%% Simplified H25 on reading formula in, to give: %%% H25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 --- Hypothesis H29 has been replaced by "true". (It is already present, as H27). --- Hypothesis H32 has been replaced by "true". (It is already present, as H26). --- Hypothesis H33 has been replaced by "true". (It is already present, as H27). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) %%% Simplified C2 on reading formula in, to give: %%% C2: fld_hash_bit_len(fld_h(local_ctx)) > 0 %%% Simplified C3 on reading formula in, to give: %%% C3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c %%% Simplified C4 on reading formula in, to give: %%% C4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) %%% Simplified C5 on reading formula in, to give: %%% C5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(update(fld_b(local_ctx), [ loop__1__i], 0), [i___2]) and element(update(fld_b(local_ctx), [ loop__1__i], 0), [i___2]) <= spark__unsigned__byte__last) %%% Simplified C6 on reading formula in, to give: %%% C6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified C7 on reading formula in, to give: %%% C7: fld_byte_count(fld_h(local_ctx)) >= natural__first %%% Simplified C8 on reading formula in, to give: %%% C8: fld_byte_count(fld_h(local_ctx)) <= natural__last %%% Simplified C9 on reading formula in, to give: %%% C9: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first %%% Simplified C10 on reading formula in, to give: %%% C10: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last %%% Simplified C13 on reading formula in, to give: %%% C13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first %%% Simplified C14 on reading formula in, to give: %%% C14: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last %%% Simplified C16 on reading formula in, to give: %%% C16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first %%% Simplified C17 on reading formula in, to give: %%% C17: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last %%% Simplified C18 on reading formula in, to give: %%% C18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first %%% Simplified C19 on reading formula in, to give: %%% C19: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last %%% Simplified C20 on reading formula in, to give: %%% C20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first %%% Simplified C21 on reading formula in, to give: %%% C21: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last %%% Simplified C22 on reading formula in, to give: %%% C22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first %%% Simplified C23 on reading formula in, to give: %%% C23: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last %%% Simplified C24 on reading formula in, to give: %%% C24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c %%% Simplified C25 on reading formula in, to give: %%% C25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 %%% Simplified C28 on reading formula in, to give: %%% C28: loop__1__i + 1 >= fld_byte_count(fld_h(local_ctx)) *** Proved C1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) using hypothesis H1. *** Proved C2: fld_hash_bit_len(fld_h(local_ctx)) > 0 using hypothesis H2. *** Proved C3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c using hypothesis H3. *** Proved C4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) using hypothesis H4. *** Proved C5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(update(fld_b(local_ctx), [ loop__1__i], 0), [i___2]) and element(update(fld_b(local_ctx), [ loop__1__i], 0), [i___2]) <= spark__unsigned__byte__last) using hypotheses H5, H26, H27, H30 & H31. *** Proved C6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) using hypothesis H6. *** Proved C7: fld_byte_count(fld_h(local_ctx)) >= natural__first using hypothesis H7. *** Proved C8: fld_byte_count(fld_h(local_ctx)) <= natural__last using hypothesis H8. *** Proved C9: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first using hypothesis H9. *** Proved C10: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last using hypothesis H10. *** Proved C11: true *** Proved C12: true *** Proved C13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first using hypothesis H13. *** Proved C14: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last using hypothesis H14. *** Proved C15: true *** Proved C16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first using hypothesis H16. *** Proved C17: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last using hypothesis H17. *** Proved C18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first using hypothesis H18. *** Proved C19: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last using hypothesis H19. *** Proved C20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first using hypothesis H20. *** Proved C21: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last using hypothesis H21. *** Proved C22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first using hypothesis H22. *** Proved C23: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last using hypothesis H23. *** Proved C24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c using hypothesis H24. *** Proved C25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 using hypothesis H25. *** Proved C26: loop__1__i + 1 >= skein_512_block_bytes_index__first using hypothesis H26. *** Proved C28: loop__1__i + 1 >= fld_byte_count(fld_h(local_ctx)) using hypothesis H28. -S- Applied substitution rule zero_pad_b_rules(93). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H27: loop__1__i <= 63 New H34: not loop__1__i = 63 New C27: loop__1__i <= 62 New C29: loop__1__i <= 62 *** Proved C27: loop__1__i <= 62 using hypotheses H27 & H34. *** Proved C29: loop__1__i <= 62 using hypotheses H27 & H34. *** PROVED VC. @@@@@@@@@@ VC: procedure_zero_pad_b_5. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) %%% Simplified H4 on reading formula in, to give: %%% H4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H6 on reading formula in, to give: %%% H6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H24 on reading formula in, to give: %%% H24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c %%% Simplified H25 on reading formula in, to give: %%% H25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 --- Hypothesis H29 has been replaced by "true". (It is already present, as H27). *** Proved C3: loop__1__i >= skein_512_block_bytes_index__first using hypothesis H26. *** Proved C4: loop__1__i <= skein_512_block_bytes_index__last using hypothesis H27. -S- Applied substitution rule zero_pad_b_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New C1: true -S- Applied substitution rule zero_pad_b_rules(54). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: true *** Proved C1: true *** Proved C2: true *** PROVED VC. @@@@@@@@@@ VC: procedure_zero_pad_b_6. @@@@@@@@@@ %%% Simplified H3 on reading formula in, to give: %%% H3: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H26 on reading formula in, to give: %%% H26: fld_byte_count(fld_h(local_ctx)) <= skein_512_block_bytes_index__last -> skein_512_block_bytes_index__last >= skein_512_block_bytes_index__first %%% Simplified C1 on reading formula in, to give: %%% C1: true %%% Simplified C4 on reading formula in, to give: %%% C4: true *** Proved C1: true *** Proved C2: fld_hash_bit_len(fld_h(local_ctx)) > 0 using hypothesis H2. *** Proved C3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c using hypothesis H1. *** Proved C4: true *** PROVED VC. @@@@@@@@@@ VC: procedure_zero_pad_b_7. @@@@@@@@@@ %%% Simplified H1 on reading formula in, to give: %%% H1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) %%% Simplified H4 on reading formula in, to give: %%% H4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H6 on reading formula in, to give: %%% H6: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) %%% Simplified H24 on reading formula in, to give: %%% H24: fld_byte_count(fld_h(local_ctx~)) < skein_512_block_bytes_c %%% Simplified H25 on reading formula in, to give: %%% H25: fld_hash_bit_len(fld_h(local_ctx~)) > 0 --- Hypothesis H29 has been replaced by "true". (It is already present, as H27). --- Hypothesis H32 has been replaced by "true". (It is already present, as H26). --- Hypothesis H33 has been replaced by "true". (It is already present, as H27). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) %%% Simplified C2 on reading formula in, to give: %%% C2: fld_hash_bit_len(fld_h(local_ctx)) > 0 %%% Simplified C3 on reading formula in, to give: %%% C3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c %%% Simplified C4 on reading formula in, to give: %%% C4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) *** Proved C1: fld_hash_bit_len(fld_h(local_ctx)) = fld_hash_bit_len(fld_h( local_ctx~)) using hypothesis H1. *** Proved C2: fld_hash_bit_len(fld_h(local_ctx)) > 0 using hypothesis H2. *** Proved C3: fld_byte_count(fld_h(local_ctx)) < skein_512_block_bytes_c using hypothesis H3. *** Proved C4: fld_byte_count(fld_h(local_ctx)) = fld_byte_count(fld_h( local_ctx~)) using hypothesis H4. *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.fdl0000644000175000017500000002064311712513676031351 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Final.Set_B_Counter} title procedure set_b_counter; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_index = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const skein_512_context__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var local_ctx : skein_512_context; var counter : interfaces__unsigned_64; function spark__unsigned__shift_right_64( interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.vcg0000644000175000017500000035701411712513676031370 0ustar eugeneugen ******************************************************* Semantic Analysis of SPARK Text Examiner Pro Edition ******************************************************* procedure Skein.Skein_512_Final.Set_B_Counter For path(s) from start to run-time check associated with statement of line 887: procedure_set_b_counter_1. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . -> C1: bit__and(counter, 255) >= spark__unsigned__byte__first . C2: bit__and(counter, 255) <= spark__unsigned__byte__last . C3: bit__and(counter, 255) >= spark__unsigned__byte__first . C4: bit__and(counter, 255) <= spark__unsigned__byte__last . C5: 0 >= skein_512_block_bytes_index__first . C6: 0 <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 888: procedure_set_b_counter_2. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . -> C1: 8 >= spark__unsigned__shift_count__first . C2: 8 <= spark__unsigned__shift_count__last . C3: counter >= interfaces__unsigned_64__first . C4: counter <= interfaces__unsigned_64__last . C5: 1 >= skein_512_block_bytes_index__first . C6: 1 <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 888: procedure_set_b_counter_3. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . -> C1: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . C2: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . C3: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . C4: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . For path(s) from start to run-time check associated with statement of line 889: procedure_set_b_counter_4. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . -> C1: 16 >= spark__unsigned__shift_count__first . C2: 16 <= spark__unsigned__shift_count__last . C3: counter >= interfaces__unsigned_64__first . C4: counter <= interfaces__unsigned_64__last . C5: 2 >= skein_512_block_bytes_index__first . C6: 2 <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 889: procedure_set_b_counter_5. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . -> C1: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . C2: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . C3: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . C4: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . For path(s) from start to run-time check associated with statement of line 890: procedure_set_b_counter_6. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . -> C1: 24 >= spark__unsigned__shift_count__first . C2: 24 <= spark__unsigned__shift_count__last . C3: counter >= interfaces__unsigned_64__first . C4: counter <= interfaces__unsigned_64__last . C5: 3 >= skein_512_block_bytes_index__first . C6: 3 <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 890: procedure_set_b_counter_7. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . -> C1: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . C2: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . C3: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . C4: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . For path(s) from start to run-time check associated with statement of line 891: procedure_set_b_counter_8. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . -> C1: 32 >= spark__unsigned__shift_count__first . C2: 32 <= spark__unsigned__shift_count__last . C3: counter >= interfaces__unsigned_64__first . C4: counter <= interfaces__unsigned_64__last . C5: 4 >= skein_512_block_bytes_index__first . C6: 4 <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 891: procedure_set_b_counter_9. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H65: 32 >= spark__unsigned__shift_count__first . H66: 32 <= spark__unsigned__shift_count__last . H67: counter >= interfaces__unsigned_64__first . H68: counter <= interfaces__unsigned_64__last . H69: 4 >= skein_512_block_bytes_index__first . H70: 4 <= skein_512_block_bytes_index__last . H71: spark__unsigned__shift_right_64(counter, 32) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_right_64(counter, 32) <= interfaces__unsigned_64__last . -> C1: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . C2: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . C3: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . C4: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . For path(s) from start to run-time check associated with statement of line 892: procedure_set_b_counter_10. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H65: 32 >= spark__unsigned__shift_count__first . H66: 32 <= spark__unsigned__shift_count__last . H67: counter >= interfaces__unsigned_64__first . H68: counter <= interfaces__unsigned_64__last . H69: 4 >= skein_512_block_bytes_index__first . H70: 4 <= skein_512_block_bytes_index__last . H71: spark__unsigned__shift_right_64(counter, 32) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_right_64(counter, 32) <= interfaces__unsigned_64__last . H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H75: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H76: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . -> C1: 40 >= spark__unsigned__shift_count__first . C2: 40 <= spark__unsigned__shift_count__last . C3: counter >= interfaces__unsigned_64__first . C4: counter <= interfaces__unsigned_64__last . C5: 5 >= skein_512_block_bytes_index__first . C6: 5 <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 892: procedure_set_b_counter_11. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H65: 32 >= spark__unsigned__shift_count__first . H66: 32 <= spark__unsigned__shift_count__last . H67: counter >= interfaces__unsigned_64__first . H68: counter <= interfaces__unsigned_64__last . H69: 4 >= skein_512_block_bytes_index__first . H70: 4 <= skein_512_block_bytes_index__last . H71: spark__unsigned__shift_right_64(counter, 32) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_right_64(counter, 32) <= interfaces__unsigned_64__last . H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H75: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H76: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H77: 40 >= spark__unsigned__shift_count__first . H78: 40 <= spark__unsigned__shift_count__last . H79: counter >= interfaces__unsigned_64__first . H80: counter <= interfaces__unsigned_64__last . H81: 5 >= skein_512_block_bytes_index__first . H82: 5 <= skein_512_block_bytes_index__last . H83: spark__unsigned__shift_right_64(counter, 40) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_right_64(counter, 40) <= interfaces__unsigned_64__last . -> C1: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . C2: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . C3: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . C4: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . For path(s) from start to run-time check associated with statement of line 893: procedure_set_b_counter_12. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H65: 32 >= spark__unsigned__shift_count__first . H66: 32 <= spark__unsigned__shift_count__last . H67: counter >= interfaces__unsigned_64__first . H68: counter <= interfaces__unsigned_64__last . H69: 4 >= skein_512_block_bytes_index__first . H70: 4 <= skein_512_block_bytes_index__last . H71: spark__unsigned__shift_right_64(counter, 32) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_right_64(counter, 32) <= interfaces__unsigned_64__last . H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H75: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H76: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H77: 40 >= spark__unsigned__shift_count__first . H78: 40 <= spark__unsigned__shift_count__last . H79: counter >= interfaces__unsigned_64__first . H80: counter <= interfaces__unsigned_64__last . H81: 5 >= skein_512_block_bytes_index__first . H82: 5 <= skein_512_block_bytes_index__last . H83: spark__unsigned__shift_right_64(counter, 40) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_right_64(counter, 40) <= interfaces__unsigned_64__last . H85: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H86: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H87: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H88: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . -> C1: 48 >= spark__unsigned__shift_count__first . C2: 48 <= spark__unsigned__shift_count__last . C3: counter >= interfaces__unsigned_64__first . C4: counter <= interfaces__unsigned_64__last . C5: 6 >= skein_512_block_bytes_index__first . C6: 6 <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 893: procedure_set_b_counter_13. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H65: 32 >= spark__unsigned__shift_count__first . H66: 32 <= spark__unsigned__shift_count__last . H67: counter >= interfaces__unsigned_64__first . H68: counter <= interfaces__unsigned_64__last . H69: 4 >= skein_512_block_bytes_index__first . H70: 4 <= skein_512_block_bytes_index__last . H71: spark__unsigned__shift_right_64(counter, 32) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_right_64(counter, 32) <= interfaces__unsigned_64__last . H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H75: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H76: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H77: 40 >= spark__unsigned__shift_count__first . H78: 40 <= spark__unsigned__shift_count__last . H79: counter >= interfaces__unsigned_64__first . H80: counter <= interfaces__unsigned_64__last . H81: 5 >= skein_512_block_bytes_index__first . H82: 5 <= skein_512_block_bytes_index__last . H83: spark__unsigned__shift_right_64(counter, 40) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_right_64(counter, 40) <= interfaces__unsigned_64__last . H85: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H86: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H87: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H88: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H89: 48 >= spark__unsigned__shift_count__first . H90: 48 <= spark__unsigned__shift_count__last . H91: counter >= interfaces__unsigned_64__first . H92: counter <= interfaces__unsigned_64__last . H93: 6 >= skein_512_block_bytes_index__first . H94: 6 <= skein_512_block_bytes_index__last . H95: spark__unsigned__shift_right_64(counter, 48) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_right_64(counter, 48) <= interfaces__unsigned_64__last . -> C1: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= spark__unsigned__byte__first . C2: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= spark__unsigned__byte__last . C3: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= spark__unsigned__byte__first . C4: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= spark__unsigned__byte__last . For path(s) from start to run-time check associated with statement of line 894: procedure_set_b_counter_14. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H65: 32 >= spark__unsigned__shift_count__first . H66: 32 <= spark__unsigned__shift_count__last . H67: counter >= interfaces__unsigned_64__first . H68: counter <= interfaces__unsigned_64__last . H69: 4 >= skein_512_block_bytes_index__first . H70: 4 <= skein_512_block_bytes_index__last . H71: spark__unsigned__shift_right_64(counter, 32) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_right_64(counter, 32) <= interfaces__unsigned_64__last . H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H75: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H76: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H77: 40 >= spark__unsigned__shift_count__first . H78: 40 <= spark__unsigned__shift_count__last . H79: counter >= interfaces__unsigned_64__first . H80: counter <= interfaces__unsigned_64__last . H81: 5 >= skein_512_block_bytes_index__first . H82: 5 <= skein_512_block_bytes_index__last . H83: spark__unsigned__shift_right_64(counter, 40) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_right_64(counter, 40) <= interfaces__unsigned_64__last . H85: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H86: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H87: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H88: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H89: 48 >= spark__unsigned__shift_count__first . H90: 48 <= spark__unsigned__shift_count__last . H91: counter >= interfaces__unsigned_64__first . H92: counter <= interfaces__unsigned_64__last . H93: 6 >= skein_512_block_bytes_index__first . H94: 6 <= skein_512_block_bytes_index__last . H95: spark__unsigned__shift_right_64(counter, 48) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_right_64(counter, 48) <= interfaces__unsigned_64__last . H97: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= spark__unsigned__byte__first . H98: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= spark__unsigned__byte__last . H99: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= spark__unsigned__byte__first . H100: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= spark__unsigned__byte__last . -> C1: 56 >= spark__unsigned__shift_count__first . C2: 56 <= spark__unsigned__shift_count__last . C3: counter >= interfaces__unsigned_64__first . C4: counter <= interfaces__unsigned_64__last . C5: 7 >= skein_512_block_bytes_index__first . C6: 7 <= skein_512_block_bytes_index__last . For path(s) from start to run-time check associated with statement of line 894: procedure_set_b_counter_15. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H65: 32 >= spark__unsigned__shift_count__first . H66: 32 <= spark__unsigned__shift_count__last . H67: counter >= interfaces__unsigned_64__first . H68: counter <= interfaces__unsigned_64__last . H69: 4 >= skein_512_block_bytes_index__first . H70: 4 <= skein_512_block_bytes_index__last . H71: spark__unsigned__shift_right_64(counter, 32) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_right_64(counter, 32) <= interfaces__unsigned_64__last . H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H75: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H76: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H77: 40 >= spark__unsigned__shift_count__first . H78: 40 <= spark__unsigned__shift_count__last . H79: counter >= interfaces__unsigned_64__first . H80: counter <= interfaces__unsigned_64__last . H81: 5 >= skein_512_block_bytes_index__first . H82: 5 <= skein_512_block_bytes_index__last . H83: spark__unsigned__shift_right_64(counter, 40) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_right_64(counter, 40) <= interfaces__unsigned_64__last . H85: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H86: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H87: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H88: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H89: 48 >= spark__unsigned__shift_count__first . H90: 48 <= spark__unsigned__shift_count__last . H91: counter >= interfaces__unsigned_64__first . H92: counter <= interfaces__unsigned_64__last . H93: 6 >= skein_512_block_bytes_index__first . H94: 6 <= skein_512_block_bytes_index__last . H95: spark__unsigned__shift_right_64(counter, 48) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_right_64(counter, 48) <= interfaces__unsigned_64__last . H97: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= spark__unsigned__byte__first . H98: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= spark__unsigned__byte__last . H99: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= spark__unsigned__byte__first . H100: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= spark__unsigned__byte__last . H101: 56 >= spark__unsigned__shift_count__first . H102: 56 <= spark__unsigned__shift_count__last . H103: counter >= interfaces__unsigned_64__first . H104: counter <= interfaces__unsigned_64__last . H105: 7 >= skein_512_block_bytes_index__first . H106: 7 <= skein_512_block_bytes_index__last . H107: spark__unsigned__shift_right_64(counter, 56) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_right_64(counter, 56) <= interfaces__unsigned_64__last . -> C1: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) >= spark__unsigned__byte__first . C2: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) <= spark__unsigned__byte__last . C3: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) >= spark__unsigned__byte__first . C4: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) <= spark__unsigned__byte__last . For path(s) from start to finish: procedure_set_b_counter_16. H1: fld_hash_bit_len(fld_h(local_ctx)) > 0 . H2: counter >= spark__unsigned__u64__first . H3: counter <= spark__unsigned__u64__last . H4: for_all(i___2: integer, ((i___2 >= skein_512_block_bytes_index__first) and (i___2 <= skein_512_block_bytes_index__last)) -> ((element(fld_b( local_ctx), [i___2]) >= spark__unsigned__byte__first) and (element( fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last))) . H5: for_all(i___1: integer, ((i___1 >= skein_512_state_words_index__first) and (i___1 <= skein_512_state_words_index__last)) -> ((element(fld_x( local_ctx), [i___1]) >= spark__unsigned__u64__first) and (element( fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last))) . H6: fld_byte_count(fld_h(local_ctx)) >= natural__first . H7: fld_byte_count(fld_h(local_ctx)) <= natural__last . H8: fld_hash_bit_len(fld_h(local_ctx)) >= hash_bit_length__first . H9: fld_hash_bit_len(fld_h(local_ctx)) <= hash_bit_length__last . H10: true . H11: true . H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u6__first . H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u6__last . H14: true . H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u7__first . H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u7__last . H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u16__first . H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u16__last . H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u32__first . H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u32__last . H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= spark__unsigned__u64__first . H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= spark__unsigned__u64__last . H23: bit__and(counter, 255) >= spark__unsigned__byte__first . H24: bit__and(counter, 255) <= spark__unsigned__byte__last . H25: bit__and(counter, 255) >= spark__unsigned__byte__first . H26: bit__and(counter, 255) <= spark__unsigned__byte__last . H27: 0 >= skein_512_block_bytes_index__first . H28: 0 <= skein_512_block_bytes_index__last . H29: 8 >= spark__unsigned__shift_count__first . H30: 8 <= spark__unsigned__shift_count__last . H31: counter >= interfaces__unsigned_64__first . H32: counter <= interfaces__unsigned_64__last . H33: 1 >= skein_512_block_bytes_index__first . H34: 1 <= skein_512_block_bytes_index__last . H35: spark__unsigned__shift_right_64(counter, 8) >= interfaces__unsigned_64__first . H36: spark__unsigned__shift_right_64(counter, 8) <= interfaces__unsigned_64__last . H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H39: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= spark__unsigned__byte__first . H40: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= spark__unsigned__byte__last . H41: 16 >= spark__unsigned__shift_count__first . H42: 16 <= spark__unsigned__shift_count__last . H43: counter >= interfaces__unsigned_64__first . H44: counter <= interfaces__unsigned_64__last . H45: 2 >= skein_512_block_bytes_index__first . H46: 2 <= skein_512_block_bytes_index__last . H47: spark__unsigned__shift_right_64(counter, 16) >= interfaces__unsigned_64__first . H48: spark__unsigned__shift_right_64(counter, 16) <= interfaces__unsigned_64__last . H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H51: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= spark__unsigned__byte__first . H52: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= spark__unsigned__byte__last . H53: 24 >= spark__unsigned__shift_count__first . H54: 24 <= spark__unsigned__shift_count__last . H55: counter >= interfaces__unsigned_64__first . H56: counter <= interfaces__unsigned_64__last . H57: 3 >= skein_512_block_bytes_index__first . H58: 3 <= skein_512_block_bytes_index__last . H59: spark__unsigned__shift_right_64(counter, 24) >= interfaces__unsigned_64__first . H60: spark__unsigned__shift_right_64(counter, 24) <= interfaces__unsigned_64__last . H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H63: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= spark__unsigned__byte__first . H64: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= spark__unsigned__byte__last . H65: 32 >= spark__unsigned__shift_count__first . H66: 32 <= spark__unsigned__shift_count__last . H67: counter >= interfaces__unsigned_64__first . H68: counter <= interfaces__unsigned_64__last . H69: 4 >= skein_512_block_bytes_index__first . H70: 4 <= skein_512_block_bytes_index__last . H71: spark__unsigned__shift_right_64(counter, 32) >= interfaces__unsigned_64__first . H72: spark__unsigned__shift_right_64(counter, 32) <= interfaces__unsigned_64__last . H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H75: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= spark__unsigned__byte__first . H76: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= spark__unsigned__byte__last . H77: 40 >= spark__unsigned__shift_count__first . H78: 40 <= spark__unsigned__shift_count__last . H79: counter >= interfaces__unsigned_64__first . H80: counter <= interfaces__unsigned_64__last . H81: 5 >= skein_512_block_bytes_index__first . H82: 5 <= skein_512_block_bytes_index__last . H83: spark__unsigned__shift_right_64(counter, 40) >= interfaces__unsigned_64__first . H84: spark__unsigned__shift_right_64(counter, 40) <= interfaces__unsigned_64__last . H85: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H86: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H87: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= spark__unsigned__byte__first . H88: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= spark__unsigned__byte__last . H89: 48 >= spark__unsigned__shift_count__first . H90: 48 <= spark__unsigned__shift_count__last . H91: counter >= interfaces__unsigned_64__first . H92: counter <= interfaces__unsigned_64__last . H93: 6 >= skein_512_block_bytes_index__first . H94: 6 <= skein_512_block_bytes_index__last . H95: spark__unsigned__shift_right_64(counter, 48) >= interfaces__unsigned_64__first . H96: spark__unsigned__shift_right_64(counter, 48) <= interfaces__unsigned_64__last . H97: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= spark__unsigned__byte__first . H98: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= spark__unsigned__byte__last . H99: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= spark__unsigned__byte__first . H100: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= spark__unsigned__byte__last . H101: 56 >= spark__unsigned__shift_count__first . H102: 56 <= spark__unsigned__shift_count__last . H103: counter >= interfaces__unsigned_64__first . H104: counter <= interfaces__unsigned_64__last . H105: 7 >= skein_512_block_bytes_index__first . H106: 7 <= skein_512_block_bytes_index__last . H107: spark__unsigned__shift_right_64(counter, 56) >= interfaces__unsigned_64__first . H108: spark__unsigned__shift_right_64(counter, 56) <= interfaces__unsigned_64__last . H109: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) >= spark__unsigned__byte__first . H110: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) <= spark__unsigned__byte__last . H111: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) >= spark__unsigned__byte__first . H112: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) <= spark__unsigned__byte__last . -> C1: fld_hash_bit_len(fld_h(upf_b(upf_b(upf_b(upf_b(upf_b(upf_b(upf_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255))), update(fld_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255)))), [4], bit__and( spark__unsigned__shift_right_64(counter, 32), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255)))), [4], bit__and( spark__unsigned__shift_right_64(counter, 32), 255)))), [5], bit__and( spark__unsigned__shift_right_64(counter, 40), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(upf_b(upf_b( local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255))), update(fld_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255)))), [4], bit__and( spark__unsigned__shift_right_64(counter, 32), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255)))), [4], bit__and( spark__unsigned__shift_right_64(counter, 32), 255)))), [5], bit__and( spark__unsigned__shift_right_64(counter, 40), 255)))), [6], bit__and( spark__unsigned__shift_right_64(counter, 48), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(upf_b(upf_b(upf_b( local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255))), update(fld_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255)))), [4], bit__and( spark__unsigned__shift_right_64(counter, 32), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255)))), [4], bit__and( spark__unsigned__shift_right_64(counter, 32), 255)))), [5], bit__and( spark__unsigned__shift_right_64(counter, 40), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(upf_b(upf_b( local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255))), update(fld_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255)))), [4], bit__and( spark__unsigned__shift_right_64(counter, 32), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255))), update( fld_b(upf_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255))), update( fld_b(upf_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255))), update( fld_b(upf_b(upf_b(local_ctx, update(fld_b( local_ctx), [0], bit__and(counter, 255))), update(fld_b( upf_b(local_ctx, update(fld_b(local_ctx), [0], bit__and( counter, 255)))), [1], bit__and( spark__unsigned__shift_right_64(counter, 8), 255)))), [2], bit__and( spark__unsigned__shift_right_64(counter, 16), 255)))), [3], bit__and( spark__unsigned__shift_right_64(counter, 24), 255)))), [4], bit__and( spark__unsigned__shift_right_64(counter, 32), 255)))), [5], bit__and( spark__unsigned__shift_right_64(counter, 40), 255)))), [6], bit__and( spark__unsigned__shift_right_64(counter, 48), 255)))), [7], bit__and( spark__unsigned__shift_right_64(counter, 56), 255))))) > 0 . spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.rls0000644000175000017500000002155611712513676030661 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Final.Zero_Pad_B*/ rule_family zero_pad_b_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. zero_pad_b_rules(1): skein_512_block_bytes_c may_be_replaced_by 64. zero_pad_b_rules(2): integer__size >= 0 may_be_deduced. zero_pad_b_rules(3): integer__first may_be_replaced_by -2147483648. zero_pad_b_rules(4): integer__last may_be_replaced_by 2147483647. zero_pad_b_rules(5): integer__base__first may_be_replaced_by -2147483648. zero_pad_b_rules(6): integer__base__last may_be_replaced_by 2147483647. zero_pad_b_rules(7): natural__size >= 0 may_be_deduced. zero_pad_b_rules(8): natural__first may_be_replaced_by 0. zero_pad_b_rules(9): natural__last may_be_replaced_by 2147483647. zero_pad_b_rules(10): natural__base__first may_be_replaced_by -2147483648. zero_pad_b_rules(11): natural__base__last may_be_replaced_by 2147483647. zero_pad_b_rules(12): interfaces__unsigned_8__size >= 0 may_be_deduced. zero_pad_b_rules(13): interfaces__unsigned_8__size may_be_replaced_by 8. zero_pad_b_rules(14): interfaces__unsigned_8__first may_be_replaced_by 0. zero_pad_b_rules(15): interfaces__unsigned_8__last may_be_replaced_by 255. zero_pad_b_rules(16): interfaces__unsigned_8__base__first may_be_replaced_by 0. zero_pad_b_rules(17): interfaces__unsigned_8__base__last may_be_replaced_by 255. zero_pad_b_rules(18): interfaces__unsigned_8__modulus may_be_replaced_by 256. zero_pad_b_rules(19): interfaces__unsigned_16__size >= 0 may_be_deduced. zero_pad_b_rules(20): interfaces__unsigned_16__size may_be_replaced_by 16. zero_pad_b_rules(21): interfaces__unsigned_16__first may_be_replaced_by 0. zero_pad_b_rules(22): interfaces__unsigned_16__last may_be_replaced_by 65535. zero_pad_b_rules(23): interfaces__unsigned_16__base__first may_be_replaced_by 0. zero_pad_b_rules(24): interfaces__unsigned_16__base__last may_be_replaced_by 65535. zero_pad_b_rules(25): interfaces__unsigned_16__modulus may_be_replaced_by 65536. zero_pad_b_rules(26): interfaces__unsigned_32__size >= 0 may_be_deduced. zero_pad_b_rules(27): interfaces__unsigned_32__size may_be_replaced_by 32. zero_pad_b_rules(28): interfaces__unsigned_32__first may_be_replaced_by 0. zero_pad_b_rules(29): interfaces__unsigned_32__last may_be_replaced_by 4294967295. zero_pad_b_rules(30): interfaces__unsigned_32__base__first may_be_replaced_by 0. zero_pad_b_rules(31): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. zero_pad_b_rules(32): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. zero_pad_b_rules(33): interfaces__unsigned_64__size >= 0 may_be_deduced. zero_pad_b_rules(34): interfaces__unsigned_64__size may_be_replaced_by 64. zero_pad_b_rules(35): interfaces__unsigned_64__first may_be_replaced_by 0. zero_pad_b_rules(36): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. zero_pad_b_rules(37): interfaces__unsigned_64__base__first may_be_replaced_by 0. zero_pad_b_rules(38): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. zero_pad_b_rules(39): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. zero_pad_b_rules(40): spark__unsigned__u6__size >= 0 may_be_deduced. zero_pad_b_rules(41): spark__unsigned__u6__first may_be_replaced_by 0. zero_pad_b_rules(42): spark__unsigned__u6__last may_be_replaced_by 63. zero_pad_b_rules(43): spark__unsigned__u6__base__first may_be_replaced_by 0. zero_pad_b_rules(44): spark__unsigned__u6__base__last may_be_replaced_by 63. zero_pad_b_rules(45): spark__unsigned__u6__modulus may_be_replaced_by 64. zero_pad_b_rules(46): spark__unsigned__u7__size >= 0 may_be_deduced. zero_pad_b_rules(47): spark__unsigned__u7__first may_be_replaced_by 0. zero_pad_b_rules(48): spark__unsigned__u7__last may_be_replaced_by 127. zero_pad_b_rules(49): spark__unsigned__u7__base__first may_be_replaced_by 0. zero_pad_b_rules(50): spark__unsigned__u7__base__last may_be_replaced_by 127. zero_pad_b_rules(51): spark__unsigned__u7__modulus may_be_replaced_by 128. zero_pad_b_rules(52): spark__unsigned__byte__size >= 0 may_be_deduced. zero_pad_b_rules(53): spark__unsigned__byte__first may_be_replaced_by 0. zero_pad_b_rules(54): spark__unsigned__byte__last may_be_replaced_by 255. zero_pad_b_rules(55): spark__unsigned__byte__base__first may_be_replaced_by 0. zero_pad_b_rules(56): spark__unsigned__byte__base__last may_be_replaced_by 255. zero_pad_b_rules(57): spark__unsigned__byte__modulus may_be_replaced_by 256. zero_pad_b_rules(58): spark__unsigned__u16__size >= 0 may_be_deduced. zero_pad_b_rules(59): spark__unsigned__u16__first may_be_replaced_by 0. zero_pad_b_rules(60): spark__unsigned__u16__last may_be_replaced_by 65535. zero_pad_b_rules(61): spark__unsigned__u16__base__first may_be_replaced_by 0. zero_pad_b_rules(62): spark__unsigned__u16__base__last may_be_replaced_by 65535. zero_pad_b_rules(63): spark__unsigned__u16__modulus may_be_replaced_by 65536. zero_pad_b_rules(64): spark__unsigned__u32__size >= 0 may_be_deduced. zero_pad_b_rules(65): spark__unsigned__u32__first may_be_replaced_by 0. zero_pad_b_rules(66): spark__unsigned__u32__last may_be_replaced_by 4294967295. zero_pad_b_rules(67): spark__unsigned__u32__base__first may_be_replaced_by 0. zero_pad_b_rules(68): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. zero_pad_b_rules(69): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. zero_pad_b_rules(70): spark__unsigned__u64__size >= 0 may_be_deduced. zero_pad_b_rules(71): spark__unsigned__u64__first may_be_replaced_by 0. zero_pad_b_rules(72): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. zero_pad_b_rules(73): spark__unsigned__u64__base__first may_be_replaced_by 0. zero_pad_b_rules(74): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. zero_pad_b_rules(75): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. zero_pad_b_rules(76): spark__crypto__word_count_t__size >= 0 may_be_deduced. zero_pad_b_rules(77): spark__crypto__word_count_t__first may_be_replaced_by 0. zero_pad_b_rules(78): spark__crypto__word_count_t__last may_be_replaced_by 268435455. zero_pad_b_rules(79): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. zero_pad_b_rules(80): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. zero_pad_b_rules(81): hash_bit_length__size >= 0 may_be_deduced. zero_pad_b_rules(82): hash_bit_length__first may_be_replaced_by 0. zero_pad_b_rules(83): hash_bit_length__last may_be_replaced_by 2147483640. zero_pad_b_rules(84): hash_bit_length__base__first may_be_replaced_by -2147483648. zero_pad_b_rules(85): hash_bit_length__base__last may_be_replaced_by 2147483647. zero_pad_b_rules(86): skein_512_state_words_index__size >= 0 may_be_deduced. zero_pad_b_rules(87): skein_512_state_words_index__first may_be_replaced_by 0. zero_pad_b_rules(88): skein_512_state_words_index__last may_be_replaced_by 7. zero_pad_b_rules(89): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. zero_pad_b_rules(90): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. zero_pad_b_rules(91): skein_512_block_bytes_index__size >= 0 may_be_deduced. zero_pad_b_rules(92): skein_512_block_bytes_index__first may_be_replaced_by 0. zero_pad_b_rules(93): skein_512_block_bytes_index__last may_be_replaced_by 63. zero_pad_b_rules(94): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. zero_pad_b_rules(95): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. zero_pad_b_rules(96): skein_512_context__size >= 0 may_be_deduced. zero_pad_b_rules(97): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. zero_pad_b_rules(98): tweak_value__size >= 0 may_be_deduced. zero_pad_b_rules(99): tweak_value__size may_be_replaced_by 128. zero_pad_b_rules(100): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. zero_pad_b_rules(101): context_header__size >= 0 may_be_deduced. zero_pad_b_rules(102): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.vsm0000644000175000017500000000005611712765060030652 0ustar eugeneugenzero_pad_b,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.slg0000644000175000017500000035260411712513676031376 0ustar eugeneugen***************************************************************************** Semantic Analysis of SPARK Text Examiner Pro Edition ***************************************************************************** SPARK Simplifier Pro Edition procedure Skein.Skein_512_Final.Set_B_Counter @@@@@@@@@@ VC: procedure_set_b_counter_1. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New C1: bit__and(counter, 255) >= 0 New C3: bit__and(counter, 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: bit__and(counter, 255) <= 255 New C4: bit__and(counter, 255) <= 255 -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C5: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) New C6: true *** Proved C5: true *** Proved C6: true -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) %%% Simplified C1 further, to give: %%% C1: counter mod 256 >= 0 %%% Simplified C2 further, to give: %%% C2: counter mod 256 <= 255 %%% Simplified C3 further, to give: %%% C3: counter mod 256 >= 0 %%% Simplified C4 further, to give: %%% C4: counter mod 256 <= 255 *** Proved C1: counter mod 256 >= 0 *** Proved C2: counter mod 256 <= 255 *** Proved C3: counter mod 256 >= 0 *** Proved C4: counter mod 256 <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_2. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). *** Proved C5: 1 >= skein_512_block_bytes_index__first using hypothesis H27. -S- Applied substitution rule set_b_counte_rules(76). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New C1: true -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New C2: true -S- Applied substitution rule set_b_counte_rules(34). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New C3: counter >= 0 -S- Applied substitution rule set_b_counte_rules(35). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New C4: counter <= 18446744073709551615 -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H28: true New C6: true *** Proved C1: true *** Proved C2: true *** Proved C6: true -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H23: bit__and(counter, 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H24: bit__and(counter, 255) <= 255 New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H27: true New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C3: counter >= 0 using hypothesis H2. *** Proved C4: counter <= 18446744073709551615 using hypothesis H3. *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_3. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H23: bit__and(counter, 255) >= 0 New C1: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 New C3: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H24: bit__and(counter, 255) <= 255 New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 New C4: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(34). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: counter >= 0 New H35: spark__unsigned__shift_right_64(counter, 8) >= 0 -S- Applied substitution rule set_b_counte_rules(35). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: counter <= 18446744073709551615 New H36: spark__unsigned__shift_right_64(counter, 8) <= 18446744073709551615 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(76). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H29: true -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New H27: true New H33: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H28: true New H34: true New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C1: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 *** Proved C3: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 *** Proved C2: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 *** Proved C4: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_4. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). *** Proved C1: 16 >= spark__unsigned__shift_count__first using hypothesis H29. *** Proved C3: counter >= interfaces__unsigned_64__first using hypothesis H31. *** Proved C4: counter <= interfaces__unsigned_64__last using hypothesis H32. *** Proved C5: 2 >= skein_512_block_bytes_index__first using hypothesis H27. -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New C2: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H28: true New H34: true New C6: true *** Proved C2: true *** Proved C6: true *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_5. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H23: bit__and(counter, 255) >= 0 New H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 New C1: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 New C3: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H24: bit__and(counter, 255) <= 255 New H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 New C4: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(34). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: counter >= 0 New H35: spark__unsigned__shift_right_64(counter, 8) >= 0 New H47: spark__unsigned__shift_right_64(counter, 16) >= 0 -S- Applied substitution rule set_b_counte_rules(35). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: counter <= 18446744073709551615 New H36: spark__unsigned__shift_right_64(counter, 8) <= 18446744073709551615 New H48: spark__unsigned__shift_right_64(counter, 16) <= 18446744073709551615 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(76). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H29: true New H41: true -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New H27: true New H33: true New H45: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H28: true New H34: true New H46: true New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C1: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 *** Proved C3: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 *** Proved C2: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 *** Proved C4: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_6. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). *** Proved C1: 24 >= spark__unsigned__shift_count__first using hypothesis H29. *** Proved C3: counter >= interfaces__unsigned_64__first using hypothesis H31. *** Proved C4: counter <= interfaces__unsigned_64__last using hypothesis H32. *** Proved C5: 3 >= skein_512_block_bytes_index__first using hypothesis H27. -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New C2: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H28: true New H34: true New H46: true New C6: true *** Proved C2: true *** Proved C6: true *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_7. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H23: bit__and(counter, 255) >= 0 New H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 New H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 New C1: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= 0 New C3: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H24: bit__and(counter, 255) <= 255 New H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 New H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= 255 New C4: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= 255 -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(34). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: counter >= 0 New H35: spark__unsigned__shift_right_64(counter, 8) >= 0 New H47: spark__unsigned__shift_right_64(counter, 16) >= 0 New H59: spark__unsigned__shift_right_64(counter, 24) >= 0 -S- Applied substitution rule set_b_counte_rules(35). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: counter <= 18446744073709551615 New H36: spark__unsigned__shift_right_64(counter, 8) <= 18446744073709551615 New H48: spark__unsigned__shift_right_64(counter, 16) <= 18446744073709551615 New H60: spark__unsigned__shift_right_64(counter, 24) <= 18446744073709551615 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(76). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H29: true New H41: true New H53: true -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New H27: true New H33: true New H45: true New H57: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H28: true New H34: true New H46: true New H58: true New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C1: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= 0 *** Proved C3: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= 0 *** Proved C2: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= 255 *** Proved C4: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_8. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). *** Proved C1: 32 >= spark__unsigned__shift_count__first using hypothesis H29. *** Proved C3: counter >= interfaces__unsigned_64__first using hypothesis H31. *** Proved C4: counter <= interfaces__unsigned_64__last using hypothesis H32. *** Proved C5: 4 >= skein_512_block_bytes_index__first using hypothesis H27. -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true New C2: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H28: true New H34: true New H46: true New H58: true New C6: true *** Proved C2: true *** Proved C6: true *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_9. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H23: bit__and(counter, 255) >= 0 New H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 New H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 New H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= 0 New C1: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= 0 New C3: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H24: bit__and(counter, 255) <= 255 New H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 New H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 New H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= 255 New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= 255 New C4: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= 255 -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(34). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: counter >= 0 New H35: spark__unsigned__shift_right_64(counter, 8) >= 0 New H47: spark__unsigned__shift_right_64(counter, 16) >= 0 New H59: spark__unsigned__shift_right_64(counter, 24) >= 0 New H71: spark__unsigned__shift_right_64(counter, 32) >= 0 -S- Applied substitution rule set_b_counte_rules(35). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: counter <= 18446744073709551615 New H36: spark__unsigned__shift_right_64(counter, 8) <= 18446744073709551615 New H48: spark__unsigned__shift_right_64(counter, 16) <= 18446744073709551615 New H60: spark__unsigned__shift_right_64(counter, 24) <= 18446744073709551615 New H72: spark__unsigned__shift_right_64(counter, 32) <= 18446744073709551615 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(76). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H29: true New H41: true New H53: true New H65: true -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true New H66: true -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New H27: true New H33: true New H45: true New H57: true New H69: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H28: true New H34: true New H46: true New H58: true New H70: true New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C1: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= 0 *** Proved C3: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= 0 *** Proved C2: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= 255 *** Proved C4: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_10. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). --- Hypothesis H75 has been replaced by "true". (It is already present, as H73). --- Hypothesis H76 has been replaced by "true". (It is already present, as H74). *** Proved C1: 40 >= spark__unsigned__shift_count__first using hypothesis H29. *** Proved C3: counter >= interfaces__unsigned_64__first using hypothesis H31. *** Proved C4: counter <= interfaces__unsigned_64__last using hypothesis H32. *** Proved C5: 5 >= skein_512_block_bytes_index__first using hypothesis H27. -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true New H66: true New C2: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H28: true New H34: true New H46: true New H58: true New H70: true New C6: true *** Proved C2: true *** Proved C6: true *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_11. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). --- Hypothesis H75 has been replaced by "true". (It is already present, as H73). --- Hypothesis H76 has been replaced by "true". (It is already present, as H74). --- Hypothesis H79 has been replaced by "true". (It is already present, as H31). --- Hypothesis H80 has been replaced by "true". (It is already present, as H32). -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H23: bit__and(counter, 255) >= 0 New H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 New H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 New H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= 0 New H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= 0 New C1: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= 0 New C3: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H24: bit__and(counter, 255) <= 255 New H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 New H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 New H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= 255 New H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= 255 New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= 255 New C4: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= 255 -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(34). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: counter >= 0 New H35: spark__unsigned__shift_right_64(counter, 8) >= 0 New H47: spark__unsigned__shift_right_64(counter, 16) >= 0 New H59: spark__unsigned__shift_right_64(counter, 24) >= 0 New H71: spark__unsigned__shift_right_64(counter, 32) >= 0 New H83: spark__unsigned__shift_right_64(counter, 40) >= 0 -S- Applied substitution rule set_b_counte_rules(35). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: counter <= 18446744073709551615 New H36: spark__unsigned__shift_right_64(counter, 8) <= 18446744073709551615 New H48: spark__unsigned__shift_right_64(counter, 16) <= 18446744073709551615 New H60: spark__unsigned__shift_right_64(counter, 24) <= 18446744073709551615 New H72: spark__unsigned__shift_right_64(counter, 32) <= 18446744073709551615 New H84: spark__unsigned__shift_right_64(counter, 40) <= 18446744073709551615 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(76). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H29: true New H41: true New H53: true New H65: true New H77: true -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true New H66: true New H78: true -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New H27: true New H33: true New H45: true New H57: true New H69: true New H81: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H28: true New H34: true New H46: true New H58: true New H70: true New H82: true New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C1: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= 0 *** Proved C3: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= 0 *** Proved C2: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= 255 *** Proved C4: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_12. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). --- Hypothesis H75 has been replaced by "true". (It is already present, as H73). --- Hypothesis H76 has been replaced by "true". (It is already present, as H74). --- Hypothesis H79 has been replaced by "true". (It is already present, as H31). --- Hypothesis H80 has been replaced by "true". (It is already present, as H32). --- Hypothesis H87 has been replaced by "true". (It is already present, as H85). --- Hypothesis H88 has been replaced by "true". (It is already present, as H86). *** Proved C1: 48 >= spark__unsigned__shift_count__first using hypothesis H29. *** Proved C3: counter >= interfaces__unsigned_64__first using hypothesis H31. *** Proved C4: counter <= interfaces__unsigned_64__last using hypothesis H32. *** Proved C5: 6 >= skein_512_block_bytes_index__first using hypothesis H27. -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true New H66: true New H78: true New C2: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H28: true New H34: true New H46: true New H58: true New H70: true New H82: true New C6: true *** Proved C2: true *** Proved C6: true *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_13. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). --- Hypothesis H75 has been replaced by "true". (It is already present, as H73). --- Hypothesis H76 has been replaced by "true". (It is already present, as H74). --- Hypothesis H79 has been replaced by "true". (It is already present, as H31). --- Hypothesis H80 has been replaced by "true". (It is already present, as H32). --- Hypothesis H87 has been replaced by "true". (It is already present, as H85). --- Hypothesis H88 has been replaced by "true". (It is already present, as H86). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H23: bit__and(counter, 255) >= 0 New H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 New H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 New H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= 0 New H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= 0 New H85: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= 0 New C1: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= 0 New C3: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H24: bit__and(counter, 255) <= 255 New H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 New H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 New H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= 255 New H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= 255 New H86: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= 255 New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= 255 New C4: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= 255 -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(34). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: counter >= 0 New H35: spark__unsigned__shift_right_64(counter, 8) >= 0 New H47: spark__unsigned__shift_right_64(counter, 16) >= 0 New H59: spark__unsigned__shift_right_64(counter, 24) >= 0 New H71: spark__unsigned__shift_right_64(counter, 32) >= 0 New H83: spark__unsigned__shift_right_64(counter, 40) >= 0 New H95: spark__unsigned__shift_right_64(counter, 48) >= 0 -S- Applied substitution rule set_b_counte_rules(35). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: counter <= 18446744073709551615 New H36: spark__unsigned__shift_right_64(counter, 8) <= 18446744073709551615 New H48: spark__unsigned__shift_right_64(counter, 16) <= 18446744073709551615 New H60: spark__unsigned__shift_right_64(counter, 24) <= 18446744073709551615 New H72: spark__unsigned__shift_right_64(counter, 32) <= 18446744073709551615 New H84: spark__unsigned__shift_right_64(counter, 40) <= 18446744073709551615 New H96: spark__unsigned__shift_right_64(counter, 48) <= 18446744073709551615 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(76). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H29: true New H41: true New H53: true New H65: true New H77: true New H89: true -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true New H66: true New H78: true New H90: true -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New H27: true New H33: true New H45: true New H57: true New H69: true New H81: true New H93: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H28: true New H34: true New H46: true New H58: true New H70: true New H82: true New H94: true New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C1: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= 0 *** Proved C3: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= 0 *** Proved C2: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= 255 *** Proved C4: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_14. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). --- Hypothesis H75 has been replaced by "true". (It is already present, as H73). --- Hypothesis H76 has been replaced by "true". (It is already present, as H74). --- Hypothesis H79 has been replaced by "true". (It is already present, as H31). --- Hypothesis H80 has been replaced by "true". (It is already present, as H32). --- Hypothesis H87 has been replaced by "true". (It is already present, as H85). --- Hypothesis H88 has been replaced by "true". (It is already present, as H86). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H99 has been replaced by "true". (It is already present, as H97). --- Hypothesis H100 has been replaced by "true". (It is already present, as H98). *** Proved C1: 56 >= spark__unsigned__shift_count__first using hypothesis H29. *** Proved C3: counter >= interfaces__unsigned_64__first using hypothesis H31. *** Proved C4: counter <= interfaces__unsigned_64__last using hypothesis H32. *** Proved C5: 7 >= skein_512_block_bytes_index__first using hypothesis H27. -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true New H66: true New H78: true New H90: true New C2: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= 63 -> spark__unsigned__byte__first <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H28: true New H34: true New H46: true New H58: true New H70: true New H82: true New H94: true New C6: true *** Proved C2: true *** Proved C6: true *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_15. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). --- Hypothesis H75 has been replaced by "true". (It is already present, as H73). --- Hypothesis H76 has been replaced by "true". (It is already present, as H74). --- Hypothesis H79 has been replaced by "true". (It is already present, as H31). --- Hypothesis H80 has been replaced by "true". (It is already present, as H32). --- Hypothesis H87 has been replaced by "true". (It is already present, as H85). --- Hypothesis H88 has been replaced by "true". (It is already present, as H86). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H99 has been replaced by "true". (It is already present, as H97). --- Hypothesis H100 has been replaced by "true". (It is already present, as H98). --- Hypothesis H103 has been replaced by "true". (It is already present, as H31). --- Hypothesis H104 has been replaced by "true". (It is already present, as H32). -S- Applied substitution rule set_b_counte_rules(52). This was achieved by replacing all occurrences of spark__unsigned__byte__first by: 0. New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) New H23: bit__and(counter, 255) >= 0 New H37: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) >= 0 New H49: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) >= 0 New H61: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) >= 0 New H73: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) >= 0 New H85: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) >= 0 New H97: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) >= 0 New C1: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) >= 0 New C3: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) >= 0 -S- Applied substitution rule set_b_counte_rules(53). This was achieved by replacing all occurrences of spark__unsigned__byte__last by: 255. New H24: bit__and(counter, 255) <= 255 New H38: bit__and(spark__unsigned__shift_right_64(counter, 8), 255) <= 255 New H50: bit__and(spark__unsigned__shift_right_64(counter, 16), 255) <= 255 New H62: bit__and(spark__unsigned__shift_right_64(counter, 24), 255) <= 255 New H74: bit__and(spark__unsigned__shift_right_64(counter, 32), 255) <= 255 New H86: bit__and(spark__unsigned__shift_right_64(counter, 40), 255) <= 255 New H98: bit__and(spark__unsigned__shift_right_64(counter, 48), 255) <= 255 New H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element( fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New C2: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) <= 255 New C4: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) <= 255 -S- Applied substitution rule set_b_counte_rules(7). This was achieved by replacing all occurrences of natural__first by: 0. New H6: fld_byte_count(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(8). This was achieved by replacing all occurrences of natural__last by: 2147483647. New H7: fld_byte_count(fld_h(local_ctx)) <= 2147483647 -S- Applied substitution rule set_b_counte_rules(34). This was achieved by replacing all occurrences of interfaces__unsigned_64__first by: 0. New H31: counter >= 0 New H35: spark__unsigned__shift_right_64(counter, 8) >= 0 New H47: spark__unsigned__shift_right_64(counter, 16) >= 0 New H59: spark__unsigned__shift_right_64(counter, 24) >= 0 New H71: spark__unsigned__shift_right_64(counter, 32) >= 0 New H83: spark__unsigned__shift_right_64(counter, 40) >= 0 New H95: spark__unsigned__shift_right_64(counter, 48) >= 0 New H107: spark__unsigned__shift_right_64(counter, 56) >= 0 -S- Applied substitution rule set_b_counte_rules(35). This was achieved by replacing all occurrences of interfaces__unsigned_64__last by: 18446744073709551615. New H32: counter <= 18446744073709551615 New H36: spark__unsigned__shift_right_64(counter, 8) <= 18446744073709551615 New H48: spark__unsigned__shift_right_64(counter, 16) <= 18446744073709551615 New H60: spark__unsigned__shift_right_64(counter, 24) <= 18446744073709551615 New H72: spark__unsigned__shift_right_64(counter, 32) <= 18446744073709551615 New H84: spark__unsigned__shift_right_64(counter, 40) <= 18446744073709551615 New H96: spark__unsigned__shift_right_64(counter, 48) <= 18446744073709551615 New H108: spark__unsigned__shift_right_64(counter, 56) <= 18446744073709551615 -S- Applied substitution rule set_b_counte_rules(40). This was achieved by replacing all occurrences of spark__unsigned__u6__first by: 0. New H12: fld_field_type(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(41). This was achieved by replacing all occurrences of spark__unsigned__u6__last by: 63. New H13: fld_field_type(fld_tweak_words(fld_h(local_ctx))) <= 63 -S- Applied substitution rule set_b_counte_rules(46). This was achieved by replacing all occurrences of spark__unsigned__u7__first by: 0. New H15: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(47). This was achieved by replacing all occurrences of spark__unsigned__u7__last by: 127. New H16: fld_tree_level(fld_tweak_words(fld_h(local_ctx))) <= 127 -S- Applied substitution rule set_b_counte_rules(58). This was achieved by replacing all occurrences of spark__unsigned__u16__first by: 0. New H17: fld_reserved(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(59). This was achieved by replacing all occurrences of spark__unsigned__u16__last by: 65535. New H18: fld_reserved(fld_tweak_words(fld_h(local_ctx))) <= 65535 -S- Applied substitution rule set_b_counte_rules(64). This was achieved by replacing all occurrences of spark__unsigned__u32__first by: 0. New H19: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(65). This was achieved by replacing all occurrences of spark__unsigned__u32__last by: 4294967295. New H20: fld_byte_count_msb(fld_tweak_words(fld_h(local_ctx))) <= 4294967295 -S- Applied substitution rule set_b_counte_rules(70). This was achieved by replacing all occurrences of spark__unsigned__u64__first by: 0. New H2: counter >= 0 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) New H21: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) >= 0 -S- Applied substitution rule set_b_counte_rules(71). This was achieved by replacing all occurrences of spark__unsigned__u64__last by: 18446744073709551615. New H3: counter <= 18446744073709551615 New H22: fld_byte_count_lsb(fld_tweak_words(fld_h(local_ctx))) <= 18446744073709551615 New H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element( fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(76). This was achieved by replacing all occurrences of spark__unsigned__shift_count__first by: 0. New H29: true New H41: true New H53: true New H65: true New H77: true New H89: true New H101: true -S- Applied substitution rule set_b_counte_rules(77). This was achieved by replacing all occurrences of spark__unsigned__shift_count__last by: 64. New H30: true New H42: true New H54: true New H66: true New H78: true New H90: true New H102: true -S- Applied substitution rule set_b_counte_rules(86). This was achieved by replacing all occurrences of hash_bit_length__first by: 0. New H8: fld_hash_bit_len(fld_h(local_ctx)) >= 0 -S- Applied substitution rule set_b_counte_rules(87). This was achieved by replacing all occurrences of hash_bit_length__last by: 2147483640. New H9: fld_hash_bit_len(fld_h(local_ctx)) <= 2147483640 -S- Applied substitution rule set_b_counte_rules(91). This was achieved by replacing all occurrences of skein_512_state_words_index__first by: 0. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= skein_512_state_words_index__last -> 0 <= element(fld_x(local_ctx), [ i___1]) and element(fld_x(local_ctx), [i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(92). This was achieved by replacing all occurrences of skein_512_state_words_index__last by: 7. New H5: for_all(i___1 : integer, 0 <= i___1 and i___1 <= 7 -> 0 <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [ i___1]) <= 18446744073709551615) -S- Applied substitution rule set_b_counte_rules(96). This was achieved by replacing all occurrences of skein_512_block_bytes_index__first by: 0. New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= skein_512_block_bytes_index__last -> 0 <= element(fld_b(local_ctx), [ i___2]) and element(fld_b(local_ctx), [i___2]) <= 255) New H27: true New H33: true New H45: true New H57: true New H69: true New H81: true New H93: true New H105: true -S- Applied substitution rule set_b_counte_rules(97). This was achieved by replacing all occurrences of skein_512_block_bytes_index__last by: 63. New H28: true New H34: true New H46: true New H58: true New H70: true New H82: true New H94: true New H106: true New H4: for_all(i___2 : integer, 0 <= i___2 and i___2 <= 63 -> 0 <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [ i___2]) <= 255) *** Proved C1: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) >= 0 *** Proved C3: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) >= 0 *** Proved C2: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) <= 255 *** Proved C4: bit__and(spark__unsigned__shift_right_64(counter, 56), 255) <= 255 *** PROVED VC. @@@@@@@@@@ VC: procedure_set_b_counter_16. @@@@@@@@@@ %%% Simplified H4 on reading formula in, to give: %%% H4: for_all(i___2 : integer, skein_512_block_bytes_index__first <= i___2 and i___2 <= skein_512_block_bytes_index__last -> spark__unsigned__byte__first <= element(fld_b(local_ctx), [i___2]) and element(fld_b(local_ctx), [i___2]) <= spark__unsigned__byte__last) %%% Simplified H5 on reading formula in, to give: %%% H5: for_all(i___1 : integer, skein_512_state_words_index__first <= i___1 and i___1 <= skein_512_state_words_index__last -> spark__unsigned__u64__first <= element(fld_x(local_ctx), [i___1]) and element(fld_x(local_ctx), [i___1]) <= spark__unsigned__u64__last) --- Hypothesis H25 has been replaced by "true". (It is already present, as H23). --- Hypothesis H26 has been replaced by "true". (It is already present, as H24). --- Hypothesis H39 has been replaced by "true". (It is already present, as H37). --- Hypothesis H40 has been replaced by "true". (It is already present, as H38). --- Hypothesis H43 has been replaced by "true". (It is already present, as H31). --- Hypothesis H44 has been replaced by "true". (It is already present, as H32). --- Hypothesis H51 has been replaced by "true". (It is already present, as H49). --- Hypothesis H52 has been replaced by "true". (It is already present, as H50). --- Hypothesis H55 has been replaced by "true". (It is already present, as H31). --- Hypothesis H56 has been replaced by "true". (It is already present, as H32). --- Hypothesis H63 has been replaced by "true". (It is already present, as H61). --- Hypothesis H64 has been replaced by "true". (It is already present, as H62). --- Hypothesis H67 has been replaced by "true". (It is already present, as H31). --- Hypothesis H68 has been replaced by "true". (It is already present, as H32). --- Hypothesis H75 has been replaced by "true". (It is already present, as H73). --- Hypothesis H76 has been replaced by "true". (It is already present, as H74). --- Hypothesis H79 has been replaced by "true". (It is already present, as H31). --- Hypothesis H80 has been replaced by "true". (It is already present, as H32). --- Hypothesis H87 has been replaced by "true". (It is already present, as H85). --- Hypothesis H88 has been replaced by "true". (It is already present, as H86). --- Hypothesis H91 has been replaced by "true". (It is already present, as H31). --- Hypothesis H92 has been replaced by "true". (It is already present, as H32). --- Hypothesis H99 has been replaced by "true". (It is already present, as H97). --- Hypothesis H100 has been replaced by "true". (It is already present, as H98). --- Hypothesis H103 has been replaced by "true". (It is already present, as H31). --- Hypothesis H104 has been replaced by "true". (It is already present, as H32). --- Hypothesis H111 has been replaced by "true". (It is already present, as H109). --- Hypothesis H112 has been replaced by "true". (It is already present, as H110). %%% Simplified C1 on reading formula in, to give: %%% C1: fld_hash_bit_len(fld_h(local_ctx)) > 0 *** Proved C1: fld_hash_bit_len(fld_h(local_ctx)) > 0 using hypothesis H1. *** PROVED VC. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.vsm0000644000175000017500000000006111712765060031375 0ustar eugeneugenset_b_counter,0,0,0,0,0,0,0,-nan,-nan,-nan,-nan, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/zero_pad_b.vct0000644000175000017500000000000011712513676030632 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_final/set_b_counter.rls0000644000175000017500000002266011712513676031405 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*procedure Skein.Skein_512_Final.Set_B_Counter*/ rule_family set_b_counte_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. set_b_counte_rules(1): integer__size >= 0 may_be_deduced. set_b_counte_rules(2): integer__first may_be_replaced_by -2147483648. set_b_counte_rules(3): integer__last may_be_replaced_by 2147483647. set_b_counte_rules(4): integer__base__first may_be_replaced_by -2147483648. set_b_counte_rules(5): integer__base__last may_be_replaced_by 2147483647. set_b_counte_rules(6): natural__size >= 0 may_be_deduced. set_b_counte_rules(7): natural__first may_be_replaced_by 0. set_b_counte_rules(8): natural__last may_be_replaced_by 2147483647. set_b_counte_rules(9): natural__base__first may_be_replaced_by -2147483648. set_b_counte_rules(10): natural__base__last may_be_replaced_by 2147483647. set_b_counte_rules(11): interfaces__unsigned_8__size >= 0 may_be_deduced. set_b_counte_rules(12): interfaces__unsigned_8__size may_be_replaced_by 8. set_b_counte_rules(13): interfaces__unsigned_8__first may_be_replaced_by 0. set_b_counte_rules(14): interfaces__unsigned_8__last may_be_replaced_by 255. set_b_counte_rules(15): interfaces__unsigned_8__base__first may_be_replaced_by 0. set_b_counte_rules(16): interfaces__unsigned_8__base__last may_be_replaced_by 255. set_b_counte_rules(17): interfaces__unsigned_8__modulus may_be_replaced_by 256. set_b_counte_rules(18): interfaces__unsigned_16__size >= 0 may_be_deduced. set_b_counte_rules(19): interfaces__unsigned_16__size may_be_replaced_by 16. set_b_counte_rules(20): interfaces__unsigned_16__first may_be_replaced_by 0. set_b_counte_rules(21): interfaces__unsigned_16__last may_be_replaced_by 65535. set_b_counte_rules(22): interfaces__unsigned_16__base__first may_be_replaced_by 0. set_b_counte_rules(23): interfaces__unsigned_16__base__last may_be_replaced_by 65535. set_b_counte_rules(24): interfaces__unsigned_16__modulus may_be_replaced_by 65536. set_b_counte_rules(25): interfaces__unsigned_32__size >= 0 may_be_deduced. set_b_counte_rules(26): interfaces__unsigned_32__size may_be_replaced_by 32. set_b_counte_rules(27): interfaces__unsigned_32__first may_be_replaced_by 0. set_b_counte_rules(28): interfaces__unsigned_32__last may_be_replaced_by 4294967295. set_b_counte_rules(29): interfaces__unsigned_32__base__first may_be_replaced_by 0. set_b_counte_rules(30): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. set_b_counte_rules(31): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. set_b_counte_rules(32): interfaces__unsigned_64__size >= 0 may_be_deduced. set_b_counte_rules(33): interfaces__unsigned_64__size may_be_replaced_by 64. set_b_counte_rules(34): interfaces__unsigned_64__first may_be_replaced_by 0. set_b_counte_rules(35): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. set_b_counte_rules(36): interfaces__unsigned_64__base__first may_be_replaced_by 0. set_b_counte_rules(37): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. set_b_counte_rules(38): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. set_b_counte_rules(39): spark__unsigned__u6__size >= 0 may_be_deduced. set_b_counte_rules(40): spark__unsigned__u6__first may_be_replaced_by 0. set_b_counte_rules(41): spark__unsigned__u6__last may_be_replaced_by 63. set_b_counte_rules(42): spark__unsigned__u6__base__first may_be_replaced_by 0. set_b_counte_rules(43): spark__unsigned__u6__base__last may_be_replaced_by 63. set_b_counte_rules(44): spark__unsigned__u6__modulus may_be_replaced_by 64. set_b_counte_rules(45): spark__unsigned__u7__size >= 0 may_be_deduced. set_b_counte_rules(46): spark__unsigned__u7__first may_be_replaced_by 0. set_b_counte_rules(47): spark__unsigned__u7__last may_be_replaced_by 127. set_b_counte_rules(48): spark__unsigned__u7__base__first may_be_replaced_by 0. set_b_counte_rules(49): spark__unsigned__u7__base__last may_be_replaced_by 127. set_b_counte_rules(50): spark__unsigned__u7__modulus may_be_replaced_by 128. set_b_counte_rules(51): spark__unsigned__byte__size >= 0 may_be_deduced. set_b_counte_rules(52): spark__unsigned__byte__first may_be_replaced_by 0. set_b_counte_rules(53): spark__unsigned__byte__last may_be_replaced_by 255. set_b_counte_rules(54): spark__unsigned__byte__base__first may_be_replaced_by 0. set_b_counte_rules(55): spark__unsigned__byte__base__last may_be_replaced_by 255. set_b_counte_rules(56): spark__unsigned__byte__modulus may_be_replaced_by 256. set_b_counte_rules(57): spark__unsigned__u16__size >= 0 may_be_deduced. set_b_counte_rules(58): spark__unsigned__u16__first may_be_replaced_by 0. set_b_counte_rules(59): spark__unsigned__u16__last may_be_replaced_by 65535. set_b_counte_rules(60): spark__unsigned__u16__base__first may_be_replaced_by 0. set_b_counte_rules(61): spark__unsigned__u16__base__last may_be_replaced_by 65535. set_b_counte_rules(62): spark__unsigned__u16__modulus may_be_replaced_by 65536. set_b_counte_rules(63): spark__unsigned__u32__size >= 0 may_be_deduced. set_b_counte_rules(64): spark__unsigned__u32__first may_be_replaced_by 0. set_b_counte_rules(65): spark__unsigned__u32__last may_be_replaced_by 4294967295. set_b_counte_rules(66): spark__unsigned__u32__base__first may_be_replaced_by 0. set_b_counte_rules(67): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. set_b_counte_rules(68): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. set_b_counte_rules(69): spark__unsigned__u64__size >= 0 may_be_deduced. set_b_counte_rules(70): spark__unsigned__u64__first may_be_replaced_by 0. set_b_counte_rules(71): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. set_b_counte_rules(72): spark__unsigned__u64__base__first may_be_replaced_by 0. set_b_counte_rules(73): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. set_b_counte_rules(74): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. set_b_counte_rules(75): spark__unsigned__shift_count__size >= 0 may_be_deduced. set_b_counte_rules(76): spark__unsigned__shift_count__first may_be_replaced_by 0. set_b_counte_rules(77): spark__unsigned__shift_count__last may_be_replaced_by 64. set_b_counte_rules(78): spark__unsigned__shift_count__base__first may_be_replaced_by -2147483648. set_b_counte_rules(79): spark__unsigned__shift_count__base__last may_be_replaced_by 2147483647. set_b_counte_rules(80): spark__crypto__word_count_t__size >= 0 may_be_deduced. set_b_counte_rules(81): spark__crypto__word_count_t__first may_be_replaced_by 0. set_b_counte_rules(82): spark__crypto__word_count_t__last may_be_replaced_by 268435455. set_b_counte_rules(83): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. set_b_counte_rules(84): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. set_b_counte_rules(85): hash_bit_length__size >= 0 may_be_deduced. set_b_counte_rules(86): hash_bit_length__first may_be_replaced_by 0. set_b_counte_rules(87): hash_bit_length__last may_be_replaced_by 2147483640. set_b_counte_rules(88): hash_bit_length__base__first may_be_replaced_by -2147483648. set_b_counte_rules(89): hash_bit_length__base__last may_be_replaced_by 2147483647. set_b_counte_rules(90): skein_512_state_words_index__size >= 0 may_be_deduced. set_b_counte_rules(91): skein_512_state_words_index__first may_be_replaced_by 0. set_b_counte_rules(92): skein_512_state_words_index__last may_be_replaced_by 7. set_b_counte_rules(93): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. set_b_counte_rules(94): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. set_b_counte_rules(95): skein_512_block_bytes_index__size >= 0 may_be_deduced. set_b_counte_rules(96): skein_512_block_bytes_index__first may_be_replaced_by 0. set_b_counte_rules(97): skein_512_block_bytes_index__last may_be_replaced_by 63. set_b_counte_rules(98): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. set_b_counte_rules(99): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. set_b_counte_rules(100): skein_512_context__size >= 0 may_be_deduced. set_b_counte_rules(101): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. set_b_counte_rules(102): tweak_value__size >= 0 may_be_deduced. set_b_counte_rules(103): tweak_value__size may_be_replaced_by 128. set_b_counte_rules(104): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. set_b_counte_rules(105): context_header__size >= 0 may_be_deduced. set_b_counte_rules(106): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_update.vlg0000644000175000017500000000327011712765060026723 0ustar eugeneugen Non-option args: skein_512_update Option args: -fuse-concls \ -decls=[SUPRESSED IN PLAIN MODE] \ -unique-working-files \ -rules=[SUPRESSED IN PLAIN MODE] \ -rules=[SUPRESSED IN PLAIN MODE] \ -elim-enums \ -ground-eval-exp \ -abstract-exp \ -abstract-divmod \ -utick \ -gtick \ -longtick \ -echo-final-stats \ -csv-reports-include-unit-kind \ -level=warning \ -bit-type \ -bit-type-bool-eq-to-iff \ -refine-types \ -refine-int-subrange-type \ -abstract-arrays-records-late \ -elim-array-constructors \ -add-array-select-box-update-axioms \ -abstract-array-box-updates \ -add-array-select-update-axioms \ -abstract-array-select-updates \ -abstract-array-types \ -abstract-record-types \ -abstract-bit-ops \ -abstract-bit-valued-eqs \ -abstract-bit-valued-int-le \ -elim-bit-type-and-consts \ -abstract-reals \ -lift-quants \ -strip-quantifier-patterns \ -elim-type-aliases \ -interface-mode=smtlib \ -refine-bit-type-as-int-subtype \ -refine-bit-eq-equiv \ -elim-record-constructors \ -add-record-select-update-axioms \ -abstract-record-selects-updates \ -logic=AUFNIRA \ -report=skein_512_update \ -prover-command=[SUPRESSED IN PLAIN MODE] \ -siv \ -plain \ -read-unit-rlu-files \ -read-directory-rlu-files \ -delete-rules-with-undeclared-ids \ Total ERROR messages: 0 Total WARNING messages: 0 Summary Stats: true: 3 (100.0%) unproven: 0 ( 0.0%) error: 0 ( 0.0%) total: 3 spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_hash.rls0000644000175000017500000002547611712513676026414 0ustar eugeneugen /*********************************************************/ /*Proof Rule Declarations*/ /*Examiner Pro Edition*/ /*********************************************************/ /*function Skein.Skein_512_Hash*/ rule_family skein_512_ha_rules: X requires [X:any] & X <= Y requires [X:ire, Y:ire] & X >= Y requires [X:ire, Y:ire]. skein_512_ha_rules(1): skein_512_block_bytes_c may_be_replaced_by 64. skein_512_ha_rules(2): integer__size >= 0 may_be_deduced. skein_512_ha_rules(3): integer__first may_be_replaced_by -2147483648. skein_512_ha_rules(4): integer__last may_be_replaced_by 2147483647. skein_512_ha_rules(5): integer__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(6): integer__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(7): natural__size >= 0 may_be_deduced. skein_512_ha_rules(8): natural__first may_be_replaced_by 0. skein_512_ha_rules(9): natural__last may_be_replaced_by 2147483647. skein_512_ha_rules(10): natural__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(11): natural__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(12): interfaces__unsigned_8__size >= 0 may_be_deduced. skein_512_ha_rules(13): interfaces__unsigned_8__size may_be_replaced_by 8. skein_512_ha_rules(14): interfaces__unsigned_8__first may_be_replaced_by 0. skein_512_ha_rules(15): interfaces__unsigned_8__last may_be_replaced_by 255. skein_512_ha_rules(16): interfaces__unsigned_8__base__first may_be_replaced_by 0. skein_512_ha_rules(17): interfaces__unsigned_8__base__last may_be_replaced_by 255. skein_512_ha_rules(18): interfaces__unsigned_8__modulus may_be_replaced_by 256. skein_512_ha_rules(19): interfaces__unsigned_16__size >= 0 may_be_deduced. skein_512_ha_rules(20): interfaces__unsigned_16__size may_be_replaced_by 16. skein_512_ha_rules(21): interfaces__unsigned_16__first may_be_replaced_by 0. skein_512_ha_rules(22): interfaces__unsigned_16__last may_be_replaced_by 65535. skein_512_ha_rules(23): interfaces__unsigned_16__base__first may_be_replaced_by 0. skein_512_ha_rules(24): interfaces__unsigned_16__base__last may_be_replaced_by 65535. skein_512_ha_rules(25): interfaces__unsigned_16__modulus may_be_replaced_by 65536. skein_512_ha_rules(26): interfaces__unsigned_32__size >= 0 may_be_deduced. skein_512_ha_rules(27): interfaces__unsigned_32__size may_be_replaced_by 32. skein_512_ha_rules(28): interfaces__unsigned_32__first may_be_replaced_by 0. skein_512_ha_rules(29): interfaces__unsigned_32__last may_be_replaced_by 4294967295. skein_512_ha_rules(30): interfaces__unsigned_32__base__first may_be_replaced_by 0. skein_512_ha_rules(31): interfaces__unsigned_32__base__last may_be_replaced_by 4294967295. skein_512_ha_rules(32): interfaces__unsigned_32__modulus may_be_replaced_by 4294967296. skein_512_ha_rules(33): interfaces__unsigned_64__size >= 0 may_be_deduced. skein_512_ha_rules(34): interfaces__unsigned_64__size may_be_replaced_by 64. skein_512_ha_rules(35): interfaces__unsigned_64__first may_be_replaced_by 0. skein_512_ha_rules(36): interfaces__unsigned_64__last may_be_replaced_by 18446744073709551615. skein_512_ha_rules(37): interfaces__unsigned_64__base__first may_be_replaced_by 0. skein_512_ha_rules(38): interfaces__unsigned_64__base__last may_be_replaced_by 18446744073709551615. skein_512_ha_rules(39): interfaces__unsigned_64__modulus may_be_replaced_by 18446744073709551616. skein_512_ha_rules(40): spark__unsigned__u6__size >= 0 may_be_deduced. skein_512_ha_rules(41): spark__unsigned__u6__first may_be_replaced_by 0. skein_512_ha_rules(42): spark__unsigned__u6__last may_be_replaced_by 63. skein_512_ha_rules(43): spark__unsigned__u6__base__first may_be_replaced_by 0. skein_512_ha_rules(44): spark__unsigned__u6__base__last may_be_replaced_by 63. skein_512_ha_rules(45): spark__unsigned__u6__modulus may_be_replaced_by 64. skein_512_ha_rules(46): spark__unsigned__u7__size >= 0 may_be_deduced. skein_512_ha_rules(47): spark__unsigned__u7__first may_be_replaced_by 0. skein_512_ha_rules(48): spark__unsigned__u7__last may_be_replaced_by 127. skein_512_ha_rules(49): spark__unsigned__u7__base__first may_be_replaced_by 0. skein_512_ha_rules(50): spark__unsigned__u7__base__last may_be_replaced_by 127. skein_512_ha_rules(51): spark__unsigned__u7__modulus may_be_replaced_by 128. skein_512_ha_rules(52): spark__unsigned__byte__size >= 0 may_be_deduced. skein_512_ha_rules(53): spark__unsigned__byte__first may_be_replaced_by 0. skein_512_ha_rules(54): spark__unsigned__byte__last may_be_replaced_by 255. skein_512_ha_rules(55): spark__unsigned__byte__base__first may_be_replaced_by 0. skein_512_ha_rules(56): spark__unsigned__byte__base__last may_be_replaced_by 255. skein_512_ha_rules(57): spark__unsigned__byte__modulus may_be_replaced_by 256. skein_512_ha_rules(58): spark__unsigned__u16__size >= 0 may_be_deduced. skein_512_ha_rules(59): spark__unsigned__u16__first may_be_replaced_by 0. skein_512_ha_rules(60): spark__unsigned__u16__last may_be_replaced_by 65535. skein_512_ha_rules(61): spark__unsigned__u16__base__first may_be_replaced_by 0. skein_512_ha_rules(62): spark__unsigned__u16__base__last may_be_replaced_by 65535. skein_512_ha_rules(63): spark__unsigned__u16__modulus may_be_replaced_by 65536. skein_512_ha_rules(64): spark__unsigned__u32__size >= 0 may_be_deduced. skein_512_ha_rules(65): spark__unsigned__u32__first may_be_replaced_by 0. skein_512_ha_rules(66): spark__unsigned__u32__last may_be_replaced_by 4294967295. skein_512_ha_rules(67): spark__unsigned__u32__base__first may_be_replaced_by 0. skein_512_ha_rules(68): spark__unsigned__u32__base__last may_be_replaced_by 4294967295. skein_512_ha_rules(69): spark__unsigned__u32__modulus may_be_replaced_by 4294967296. skein_512_ha_rules(70): spark__unsigned__u64__size >= 0 may_be_deduced. skein_512_ha_rules(71): spark__unsigned__u64__first may_be_replaced_by 0. skein_512_ha_rules(72): spark__unsigned__u64__last may_be_replaced_by 18446744073709551615. skein_512_ha_rules(73): spark__unsigned__u64__base__first may_be_replaced_by 0. skein_512_ha_rules(74): spark__unsigned__u64__base__last may_be_replaced_by 18446744073709551615. skein_512_ha_rules(75): spark__unsigned__u64__modulus may_be_replaced_by 18446744073709551616. skein_512_ha_rules(76): spark__crypto__word_count_t__size >= 0 may_be_deduced. skein_512_ha_rules(77): spark__crypto__word_count_t__first may_be_replaced_by 0. skein_512_ha_rules(78): spark__crypto__word_count_t__last may_be_replaced_by 268435455. skein_512_ha_rules(79): spark__crypto__word_count_t__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(80): spark__crypto__word_count_t__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(81): hash_bit_length__size >= 0 may_be_deduced. skein_512_ha_rules(82): hash_bit_length__first may_be_replaced_by 0. skein_512_ha_rules(83): hash_bit_length__last may_be_replaced_by 2147483640. skein_512_ha_rules(84): hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(85): hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(86): initialized_hash_bit_length__size >= 0 may_be_deduced. skein_512_ha_rules(87): initialized_hash_bit_length__first may_be_replaced_by 1. skein_512_ha_rules(88): initialized_hash_bit_length__last may_be_replaced_by 2147483640. skein_512_ha_rules(89): initialized_hash_bit_length__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(90): initialized_hash_bit_length__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(91): skein_512_state_words_index__size >= 0 may_be_deduced. skein_512_ha_rules(92): skein_512_state_words_index__first may_be_replaced_by 0. skein_512_ha_rules(93): skein_512_state_words_index__last may_be_replaced_by 7. skein_512_ha_rules(94): skein_512_state_words_index__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(95): skein_512_state_words_index__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(96): skein_512_block_bytes_count__size >= 0 may_be_deduced. skein_512_ha_rules(97): skein_512_block_bytes_count__first may_be_replaced_by 0. skein_512_ha_rules(98): skein_512_block_bytes_count__last may_be_replaced_by 64. skein_512_ha_rules(99): skein_512_block_bytes_count__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(100): skein_512_block_bytes_count__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(101): skein_512_block_bytes_index__size >= 0 may_be_deduced. skein_512_ha_rules(102): skein_512_block_bytes_index__first may_be_replaced_by 0. skein_512_ha_rules(103): skein_512_block_bytes_index__last may_be_replaced_by 63. skein_512_ha_rules(104): skein_512_block_bytes_index__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(105): skein_512_block_bytes_index__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(106): skein_512_state_bytes_index__size >= 0 may_be_deduced. skein_512_ha_rules(107): skein_512_state_bytes_index__first may_be_replaced_by 0. skein_512_ha_rules(108): skein_512_state_bytes_index__last may_be_replaced_by 63. skein_512_ha_rules(109): skein_512_state_bytes_index__base__first may_be_replaced_by -2147483648. skein_512_ha_rules(110): skein_512_state_bytes_index__base__last may_be_replaced_by 2147483647. skein_512_ha_rules(111): skein_512_context__size >= 0 may_be_deduced. skein_512_ha_rules(112): A = B may_be_deduced_from [goal(checktype(A,skein_512_context)), goal(checktype(B,skein_512_context)), fld_h(A) = fld_h(B), fld_x(A) = fld_x(B), fld_b(A) = fld_b(B)]. skein_512_ha_rules(113): data__index__subtype__1__first >= natural__first may_be_deduced. skein_512_ha_rules(114): data__index__subtype__1__last <= natural__last may_be_deduced. skein_512_ha_rules(115): data__index__subtype__1__first <= data__index__subtype__1__last may_be_deduced. skein_512_ha_rules(116): data__index__subtype__1__last >= natural__first may_be_deduced. skein_512_ha_rules(117): data__index__subtype__1__first <= natural__last may_be_deduced. skein_512_ha_rules(118): tweak_value__size >= 0 may_be_deduced. skein_512_ha_rules(119): tweak_value__size may_be_replaced_by 128. skein_512_ha_rules(120): A = B may_be_deduced_from [goal(checktype(A,tweak_value)), goal(checktype(B,tweak_value)), fld_byte_count_lsb(A) = fld_byte_count_lsb(B), fld_byte_count_msb(A) = fld_byte_count_msb(B), fld_reserved(A) = fld_reserved(B), fld_tree_level(A) = fld_tree_level(B), fld_bit_pad(A) = fld_bit_pad(B), fld_field_type(A) = fld_field_type(B), fld_first_block(A) = fld_first_block(B), fld_final_block(A) = fld_final_block(B)]. skein_512_ha_rules(121): context_header__size >= 0 may_be_deduced. skein_512_ha_rules(122): A = B may_be_deduced_from [goal(checktype(A,context_header)), goal(checktype(B,context_header)), fld_tweak_words(A) = fld_tweak_words(B), fld_hash_bit_len(A) = fld_hash_bit_len(B), fld_byte_count(A) = fld_byte_count(B)]. spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_process_block.vsm0000644000175000017500000000007411712765060030305 0ustar eugeneugenskein_512_process_block,0,0,2,2,0,0,0,100.0, 0.0, 0.0, 0.0, spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.fdl0000644000175000017500000002413011712513676026363 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Skein_512_Init} title procedure skein_512_init; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_16 = integer; type interfaces__unsigned_32 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__u6 = integer; type spark__unsigned__u7 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u16 = integer; type spark__unsigned__u32 = integer; type spark__unsigned__u64 = integer; type spark__crypto__word_count_t = integer; type hash_bit_length = integer; type initialized_hash_bit_length = integer; type skein_512_state_words_index = integer; type skein_512_block_bytes_count = integer; type skein_512_block_bytes_index = integer; type skein_512_state_bytes_index = integer; type positive_block_512_count_t = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; type tweak_value = record byte_count_lsb : spark__unsigned__u64; byte_count_msb : spark__unsigned__u32; reserved : spark__unsigned__u16; tree_level : spark__unsigned__u7; bit_pad : boolean; field_type : spark__unsigned__u6; first_block : boolean; final_block : boolean end; type context_header = record tweak_words : tweak_value; hash_bit_len : hash_bit_length; byte_count : natural end; type skein_512_context = record h : context_header; x : spark__crypto__u64_seq; b : spark__crypto__byte_seq end; const skein_512_block_bytes_c : integer = pending; const skein_block_type_cfg : spark__unsigned__u6 = pending; const skein_block_type_msg : spark__unsigned__u6 = pending; const null_skein_512_context : skein_512_context = pending; const skein_schema_ver : interfaces__unsigned_64 = pending; const skein_cfg_tree_info_sequential : integer = pending; const skein_cfg_str_len : integer = pending; const positive_block_512_count_t__base__first : integer = pending; const positive_block_512_count_t__base__last : integer = pending; const skein_512_state_bytes_index__base__first : integer = pending; const skein_512_state_bytes_index__base__last : integer = pending; const skein_512_block_bytes_index__base__first : integer = pending; const skein_512_block_bytes_index__base__last : integer = pending; const skein_512_block_bytes_count__base__first : integer = pending; const skein_512_block_bytes_count__base__last : integer = pending; const skein_512_state_words_index__base__first : integer = pending; const skein_512_state_words_index__base__last : integer = pending; const initialized_hash_bit_length__base__first : integer = pending; const initialized_hash_bit_length__base__last : integer = pending; const hash_bit_length__base__first : integer = pending; const hash_bit_length__base__last : integer = pending; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__u32__base__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__base__last : interfaces__unsigned_32 = pending; const spark__unsigned__u16__base__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__base__last : interfaces__unsigned_16 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const spark__unsigned__u7__base__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__base__last : spark__unsigned__u7 = pending; const spark__unsigned__u6__base__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__base__last : spark__unsigned__u6 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_32__base__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__base__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_16__base__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__base__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const context_header__size : integer = pending; const tweak_value__size : integer = pending; const skein_512_context__size : integer = pending; const positive_block_512_count_t__first : integer = pending; const positive_block_512_count_t__last : integer = pending; const positive_block_512_count_t__size : integer = pending; const skein_512_state_bytes_index__first : integer = pending; const skein_512_state_bytes_index__last : integer = pending; const skein_512_state_bytes_index__size : integer = pending; const skein_512_block_bytes_index__first : integer = pending; const skein_512_block_bytes_index__last : integer = pending; const skein_512_block_bytes_index__size : integer = pending; const skein_512_block_bytes_count__first : integer = pending; const skein_512_block_bytes_count__last : integer = pending; const skein_512_block_bytes_count__size : integer = pending; const skein_512_state_words_index__first : integer = pending; const skein_512_state_words_index__last : integer = pending; const skein_512_state_words_index__size : integer = pending; const initialized_hash_bit_length__first : integer = pending; const initialized_hash_bit_length__last : integer = pending; const initialized_hash_bit_length__size : integer = pending; const hash_bit_length__first : integer = pending; const hash_bit_length__last : integer = pending; const hash_bit_length__size : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__u32__first : interfaces__unsigned_32 = pending; const spark__unsigned__u32__last : interfaces__unsigned_32 = pending; const spark__unsigned__u32__modulus : integer = pending; const spark__unsigned__u32__size : integer = pending; const spark__unsigned__u16__first : interfaces__unsigned_16 = pending; const spark__unsigned__u16__last : interfaces__unsigned_16 = pending; const spark__unsigned__u16__modulus : integer = pending; const spark__unsigned__u16__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const spark__unsigned__u7__first : spark__unsigned__u7 = pending; const spark__unsigned__u7__last : spark__unsigned__u7 = pending; const spark__unsigned__u7__modulus : integer = pending; const spark__unsigned__u7__size : integer = pending; const spark__unsigned__u6__first : spark__unsigned__u6 = pending; const spark__unsigned__u6__last : spark__unsigned__u6 = pending; const spark__unsigned__u6__modulus : integer = pending; const spark__unsigned__u6__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_32__first : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__last : interfaces__unsigned_32 = pending; const interfaces__unsigned_32__modulus : integer = pending; const interfaces__unsigned_32__size : integer = pending; const interfaces__unsigned_16__first : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__last : interfaces__unsigned_16 = pending; const interfaces__unsigned_16__modulus : integer = pending; const interfaces__unsigned_16__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var ctx : skein_512_context; var hashbitlen : integer; function spark__unsigned__to_littleendian( interfaces__unsigned_64) : interfaces__unsigned_64; function hash_bit_len_of(skein_512_context) : integer; function byte_count_of(skein_512_context) : integer; var ctx__3 : skein_512_context; var ctx__2 : skein_512_context; var ctx__1 : skein_512_context; end; spark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/skein_512_init.vct0000644000175000017500000000000011712513676026400 0ustar eugeneugenspark-2012.0.deb/lib/spark/current/spark_/crypto_/hash_/skein/get_64_lsb_first.fdl0000644000175000017500000001023311712513676026776 0ustar eugeneugen {*******************************************************} {FDL Declarations} {Examiner Pro Edition} {*******************************************************} {procedure Skein.Get_64_LSB_First} title procedure get_64_lsb_first; function round__(real) : integer; type natural = integer; type interfaces__unsigned_8 = integer; type interfaces__unsigned_64 = integer; type spark__unsigned__byte = integer; type spark__unsigned__u64 = integer; type spark__unsigned__shift_count = integer; type spark__crypto__word_count_t = integer; type spark__crypto__byte_seq = array [natural] of spark__unsigned__byte; type spark__crypto__u64_seq = array [ spark__crypto__word_count_t] of spark__unsigned__u64; const spark__crypto__word_count_t__base__first : integer = pending; const spark__crypto__word_count_t__base__last : integer = pending; const spark__unsigned__shift_count__base__first : integer = pending; const spark__unsigned__shift_count__base__last : integer = pending; const spark__unsigned__u64__base__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__base__last : interfaces__unsigned_64 = pending; const spark__unsigned__byte__base__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__base__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_64__base__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__base__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_8__base__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__base__last : interfaces__unsigned_8 = pending; const natural__base__first : integer = pending; const natural__base__last : integer = pending; const integer__base__first : integer = pending; const integer__base__last : integer = pending; const src__index__subtype__1__first : integer = pending; const src__index__subtype__1__last : integer = pending; const dst__index__subtype__1__first : integer = pending; const dst__index__subtype__1__last : integer = pending; const spark__crypto__word_count_t__first : integer = pending; const spark__crypto__word_count_t__last : integer = pending; const spark__crypto__word_count_t__size : integer = pending; const spark__unsigned__shift_count__first : integer = pending; const spark__unsigned__shift_count__last : integer = pending; const spark__unsigned__shift_count__size : integer = pending; const spark__unsigned__u64__first : interfaces__unsigned_64 = pending; const spark__unsigned__u64__last : interfaces__unsigned_64 = pending; const spark__unsigned__u64__modulus : integer = pending; const spark__unsigned__u64__size : integer = pending; const spark__unsigned__byte__first : interfaces__unsigned_8 = pending; const spark__unsigned__byte__last : interfaces__unsigned_8 = pending; const spark__unsigned__byte__modulus : integer = pending; const spark__unsigned__byte__size : integer = pending; const interfaces__unsigned_64__first : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__last : interfaces__unsigned_64 = pending; const interfaces__unsigned_64__modulus : integer = pending; const interfaces__unsigned_64__size : integer = pending; const interfaces__unsigned_8__first : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__last : interfaces__unsigned_8 = pending; const interfaces__unsigned_8__modulus : integer = pending; const interfaces__unsigned_8__size : integer = pending; const natural__first : integer = pending; const natural__last : integer = pending; const natural__size : integer = pending; const integer__first : integer = pending; const integer__last : integer = pending; const integer__size : integer = pending; var dst : spark__crypto__u64_seq; var src : spark__crypto__byte_seq; var src_offset : integer; var dst_index : integer; var src_index : integer; function spark__unsigned__shift_left_64(interfaces__unsigned_64, integer) : interfaces__unsigned_64; end; spark-2012.0.deb/lib/spark/current/spark-ada-strings-not_spark.ads0000644000175000017500000000522611753202341024033 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Strings.Not_SPARK -- -- -- -- Description -- -- This package provides the features of Ada.Strings which are not -- -- compatible with SPARK. Please refer to the Ada LRM description of -- -- Ada.Strings for usage information. -- -- -- ------------------------------------------------------------------------------- with Ada.Strings; package SPARK.Ada.Strings.Not_SPARK is pragma Warnings ("Y"); -- Turn off warnings for Ada 2005 features Wide_Space : constant Wide_Character := ' '; -- The following declaration is for Ada 2005 (AI-285) Wide_Wide_Space : constant Wide_Wide_Character := ' '; ------------------------------------------------ -- Conversion functions from SPARK.Ada to Ada -- ------------------------------------------------ function To_Alignment (From : Alignment) return Standard.Ada.Strings.Alignment; function To_Truncation (From : Truncation) return Standard.Ada.Strings.Truncation; function To_Membership (From : Membership) return Standard.Ada.Strings.Membership; function To_Direction (From : Direction) return Standard.Ada.Strings.Direction; function To_Trim_End (From : Trim_End) return Standard.Ada.Strings.Trim_End; end SPARK.Ada.Strings.Not_SPARK; spark-2012.0.deb/lib/spark/current/interfaces.idx0000644000175000017500000000014211526254022020634 0ustar eugeneugenInterfaces specification is in interfaces.shs Interfaces.C specification is in interfaces-c.shs spark-2012.0.deb/lib/spark/current/spark-crypto-debug.ads0000644000175000017500000000617511753202341022231 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Crypto.Debug -- -- -- -- Description -- -- -- -- Simple output facilities for the types declared in SPARK.Crypto -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- Depends on and uses Ada.Text_IO -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- ------------------------------------------------------------------------------- with SPARK; with SPARK.Crypto; --# inherit SPARK, --# SPARK.Crypto, --# SPARK.Unsigned; package SPARK.Crypto.Debug is -- Prints Msg followed by Count Bytes from S to Standard_Output procedure Show_Msg_8 (Msg : in String; S : in Crypto.Byte_Seq; Count : in Natural); --# derives null from Msg, S, Count; --# pre S'First = 0 and --# Count <= S'Length; end SPARK.Crypto.Debug; spark-2012.0.deb/lib/spark/current/spark-ada-strings-unbounded.ads0000644000175000017500000007201411753202341024015 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Strings.Unbounded -- -- -- -- Description -- -- This is a binding to package Ada.Strings.Unbounded -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : Ada -- -- Body : Ada 2005 -- -- -- -- Runtime Requirements and Dependencies -- -- Full Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- Ada.Strings.Index_Error (Guarded) -- -- Ada.Strings.Pattern_Error (Guarded) -- -- -- ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- -- -- A few subprograms, in particular To_String, cannot currently be -- SPARK, you will find them in -- SPARK.Ada.Strings.Unbounded.Not_SPARK. -- ------------------------------------------------------------------------------- with Ada.Strings.Unbounded; with SPARK.Ada.Strings.Maps; --# inherit SPARK.Ada.Strings.Maps; package SPARK.Ada.Strings.Unbounded is type Unbounded_String is private; Null_Unbounded_String : constant Unbounded_String; -- function Length (Source : Unbounded_String) return Natural; function Get_Length (Source : Unbounded_String) return Natural; function Get_Null_Unbounded_String return Unbounded_String; --# return S => Get_Length (S) = 0 and S = Null_Unbounded_String; -- function Element -- (Source : Unbounded_String; -- Index : Positive) return Character; function Get_Element (Source : Unbounded_String; Index : Positive) return Character; --# pre Index <= Get_Length (Source); -------------------------------------------------------- -- Conversion, Concatenation, and Selection Functions -- -------------------------------------------------------- -- function To_Unbounded_String -- (Source : String) return Unbounded_String; function String_To_Unbounded_String (Source : String) return Unbounded_String; --# return X => Get_Length (X) = Source'Length; -- Return an unbounded string initialized with spaces. -- function To_Unbounded_String -- (Length : Natural) return Unbounded_String; function Length_To_Unbounded_String (Length : Natural) return Unbounded_String; --# return X => Get_Length (X) = Length; -- Set_Unbounded_String is only defined in Ada 2005 procedure Set_Unbounded_String (Target : out Unbounded_String; Source : in String); --# derives Target from Source; --# post Get_Length (Target) = Source'Length; -- procedure Append -- (Source : in out Unbounded_String; -- New_Item : in Unbounded_String); procedure Append_Unbounded_String (Source : in out Unbounded_String; New_Item : in Unbounded_String); --# derives Source from *, --# New_Item; --# pre Get_Length (Source) + Get_Length (New_Item) <= Natural'Last; --# post Get_Length (Source) = Get_Length (Source~) + Get_Length (New_Item); -- procedure Append -- (Source : in out Unbounded_String; -- New_Item : in String); procedure Append_String (Source : in out Unbounded_String; New_Item : in String); --# derives Source from *, --# New_Item; --# pre Get_Length (Source) + New_Item'Length <= Natural'Last; --# post Get_Length (Source) = Get_Length (Source~) + New_Item'Length; -- procedure Append -- (Source : in out Unbounded_String; -- New_Item : in Character); procedure Append_Char (Source : in out Unbounded_String; New_Item : in Character); --# derives Source from *, --# New_Item; --# pre Get_Length (Source) < Natural'Last; --# post Get_Length (Source) = Get_Length (Source~) + 1; -- function "&" -- (Left : Unbounded_String; -- Right : Unbounded_String) return Unbounded_String; function Concat_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Unbounded_String; --# pre Get_Length (Left) + Get_Length (Right) <= Natural'Last; --# return X => Get_Length (X) = Get_Length (Left) + Get_Length (Right); -- function "&" -- (Left : Unbounded_String; -- Right : String) return Unbounded_String; function Concat_Unbounded_String_String (Left : Unbounded_String; Right : String) return Unbounded_String; --# pre Get_Length (Left) + Right'Length <= Natural'Last; --# return X => Get_Length (X) = Get_Length (Left) + Right'Length; -- function "&" -- (Left : String; -- Right : Unbounded_String) return Unbounded_String; function Concat_String_Unbounded_String (Left : String; Right : Unbounded_String) return Unbounded_String; --# pre Left'Length + Get_Length (Right) <= Natural'Last; --# return X => Get_Length (X) = Left'Length + Get_Length (Right); -- function "&" -- (Left : Unbounded_String; -- Right : Character) return Unbounded_String; function Concat_Unbounded_String_Char (Left : Unbounded_String; Right : Character) return Unbounded_String; --# pre Get_Length (Left) < Natural'Last; --# return X => Get_Length (X) = Get_Length (Left) + 1; -- function "&" -- (Left : Character; -- Right : Unbounded_String) return Unbounded_String; function Concat_Char_Unbounded_String (Left : Character; Right : Unbounded_String) return Unbounded_String; --# pre Get_Length (Right) < Natural'Last; --# return X => Get_Length (X) = 1 + Get_Length (Right); procedure Replace_Element (Source : in out Unbounded_String; Index : in Positive; By : in Character); --# derives Source from *, --# By, --# Index; --# pre Index <= Get_Length (Source); --# post Get_Length (Source) = Get_Length (Source~); -- Unbounded_Slice is only defined in Ada 2005 -- function Unbounded_Slice -- (Source : Unbounded_String; -- Low : Positive; -- High : Natural) return Unbounded_String; function Function_Unbounded_Slice (Source : Unbounded_String; Low : Positive; High : Natural) return Unbounded_String; --# pre Low <= Get_Length (Source) + 1 and High <= Get_Length (Source); --# return X => ((Low > High -> Get_Length (X) = 0) and --# (Low <= High -> Get_Length (X) = (High - Low) + 1)); -- Unbounded_Slice is only defined in Ada 2005 -- procedure Unbounded_Slice -- (Source : in Unbounded_String; -- Target : out Unbounded_String; -- Low : in Positive; -- High : in Natural); procedure Procedure_Unbounded_Slice (Source : in Unbounded_String; Target : out Unbounded_String; Low : in Positive; High : in Natural); --# derives Target from High, --# Low, --# Source; --# pre Low <= Get_Length (Source) + 1 and High <= Get_Length (Source); --# post (Low > High -> Get_Length (Target) = 0) and --# (Low <= High -> Get_Length (Target) = (High - Low) + 1); -- function "=" -- (Left : Unbounded_String; -- Right : Unbounded_String) return Boolean; function Equal_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean; --# return (Get_Length (Left) = Get_Length (Right)) and --# (for all I in Natural range 1 .. Get_Length (Left) => --# (Get_Element (Left, I) = Get_Element (Right, I))); -- function "=" -- (Left : Unbounded_String; -- Right : String) return Boolean; function Equal_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean; --# return (Get_Length (Left) = Right'Length) and --# (for all I in Natural range 1 .. Right'Length => --# (Get_Element (Left, I) = Right (I))); -- function "=" -- (Left : String; -- Right : Unbounded_String) return Boolean; function Equal_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean; --# return (Left'Length = Get_Length (Right)) and --# (for all I in Natural range 1 .. Left'Length => --# (Left (I) = Get_Element (Right, I))); -- function "<" -- (Left : Unbounded_String; -- Right : Unbounded_String) return Boolean; function Less_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean; -- function "<" -- (Left : Unbounded_String; -- Right : String) return Boolean; function Less_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean; -- function "<" -- (Left : String; -- Right : Unbounded_String) return Boolean; function Less_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean; -- function "<=" -- (Left : Unbounded_String; -- Right : Unbounded_String) return Boolean; function Less_Equal_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean; -- function "<=" -- (Left : Unbounded_String; -- Right : String) return Boolean; function Less_Equal_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean; -- function "<=" -- (Left : String; -- Right : Unbounded_String) return Boolean; function Less_Equal_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean; -- function ">" -- (Left : Unbounded_String; -- Right : Unbounded_String) return Boolean; function Greater_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean; -- function ">" -- (Left : Unbounded_String; -- Right : String) return Boolean; function Greater_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean; -- function ">" -- (Left : String; -- Right : Unbounded_String) return Boolean; function Greater_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean; -- function ">=" -- (Left : Unbounded_String; -- Right : Unbounded_String) return Boolean; function Greater_Equal_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean; -- function ">=" -- (Left : Unbounded_String; -- Right : String) return Boolean; function Greater_Equal_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean; -- function ">=" -- (Left : String; -- Right : Unbounded_String) return Boolean; function Greater_Equal_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean; ------------------------ -- Search Subprograms -- ------------------------ -- function Index -- (Source : Unbounded_String; -- Pattern : String; -- Going : Direction := Forward; -- Mapping : Maps.Character_Mapping := Maps.Identity) return Natural; function Index_Pattern (Source : Unbounded_String; Pattern : String; Going : Strings.Direction; Mapping : Maps.Character_Mapping) return Natural; --# pre Pattern'Length > 0; --# return X => (X <= (Get_Length (Source) - Pattern'Length) + 1); -- function Index -- (Source : Unbounded_String; -- Set : Maps.Character_Set; -- Test : Membership := Inside; -- Going : Direction := Forward) return Natural; function Index_Set (Source : Unbounded_String; Arg_Set : Maps.Character_Set; Test : Strings.Membership; Going : Strings.Direction) return Natural; --# return X => (X <= Get_Length (Source)); -- Index is only defined in Ada 2005 -- function Index -- (Source : Unbounded_String; -- Pattern : String; -- From : Positive; -- Going : Direction := Forward; -- Mapping : Maps.Character_Mapping := Maps.Identity) return Natural; function Index_Pattern_From (Source : Unbounded_String; Pattern : String; Arg_From : Positive; Going : Strings.Direction; Mapping : Maps.Character_Mapping) return Natural; --# pre Pattern'Length > 0 and --# (Going = Strings.Direction_Backward -> --# Arg_From <= Get_Length (Source)); --# return X => --# (X = 0 or --# ((Going = Strings.Direction_Forward -> --# X in Arg_From .. ((Get_Length (Source) - Pattern'Length) + 1)) --# and --# (Going = Strings.Direction_Backward -> --# X in 1 .. (Arg_From - Pattern'Length) + 1))); -- Index is only defined in Ada 2005 -- function Index -- (Source : Unbounded_String; -- Set : Maps.Character_Set; -- From : Positive; -- Test : Membership := Inside; -- Going : Direction := Forward) return Natural; function Index_Set_From (Source : Unbounded_String; Arg_Set : Maps.Character_Set; Arg_From : Positive; Test : Strings.Membership; Going : Strings.Direction) return Natural; --# pre Arg_From <= Get_Length (Source); --# return X => --# (X = 0 or --# ((Going = Strings.Direction_Forward -> --# X in Arg_From .. Get_Length (Source)) and --# (Going = Strings.Direction_Backward -> X in 1 .. Arg_From))); -- Index is only defined in Ada 2005 -- function Index_Non_Blank -- (Source : Unbounded_String; -- Going : Direction := Forward) return Natural; function Index_Non_Blank (Source : Unbounded_String; Going : Strings.Direction) return Natural; --# return X => (X <= Get_Length (Source)); -- Index is only defined in Ada 2005 -- function Index_Non_Blank -- (Source : Unbounded_String; -- From : Positive; -- Going : Direction := Forward) return Natural; function Index_Non_Blank_From (Source : Unbounded_String; Arg_From : Positive; Going : Strings.Direction) return Natural; --# pre Going = Strings.Direction_Backward -> --# Arg_From <= Get_Length (Source); --# return X => --# (X = 0 or --# ((Going = Strings.Direction_Forward -> --# X in Arg_From .. Get_Length (Source)) and --# (Going = Strings.Direction_Backward -> X in 1 .. Arg_From))); -- function Count -- (Source : Unbounded_String; -- Pattern : String; -- Mapping : Maps.Character_Mapping := Maps.Identity) return Natural; function Count_Pattern (Source : Unbounded_String; Pattern : String; Mapping : Maps.Character_Mapping) return Natural; --# pre Pattern'Length > 0; --# return X => (X <= Get_Length (Source) / Pattern'Length); -- function Count -- (Source : Unbounded_String; -- Set : Maps.Character_Set) return Natural; function Count_Set (Source : Unbounded_String; Arg_Set : Maps.Character_Set) return Natural; --# return X => (X <= Get_Length (Source)); -- procedure Find_Token -- (Source : in Unbounded_String; -- Set : in Maps.Character_Set; -- Test : in Membership; -- First : out Positive; -- Last : out Natural); procedure Find_Token (Source : in Unbounded_String; Arg_Set : in Maps.Character_Set; Test : in Strings.Membership; Arg_First : out Positive; Arg_Last : out Natural); --# derives Arg_First, --# Arg_Last from Arg_Set, --# Source, --# Test; --# post (Arg_First <= Get_Length (Source)) --# and (Arg_Last <= Get_Length (Source)) --# and ((Arg_First <= Arg_Last) or (Arg_First = 1 and Arg_Last = 0)) --# and (for all I in Natural range Arg_First .. Arg_Last => --# (Maps.Is_In (Get_Element (Source, I), Arg_Set))); ------------------------------------ -- String Translation Subprograms -- ------------------------------------ -- function Translate -- (Source : Unbounded_String; -- Mapping : Maps.Character_Mapping) return Unbounded_String; function Function_Translate (Source : Unbounded_String; Mapping : Maps.Character_Mapping) return Unbounded_String; --# return X => (Get_Length (X) = Get_Length (Source)); -- procedure Translate -- (Source : in out Unbounded_String; -- Mapping : in Maps.Character_Mapping); procedure Procedure_Translate (Source : in out Unbounded_String; Mapping : in Maps.Character_Mapping); --# derives Source from *, --# Mapping; --# post Get_Length (Source) = Get_Length (Source~); --------------------------------------- -- String Transformation Subprograms -- --------------------------------------- -- The two Replace_Slice implementations have very difficult -- corner cases. We present an implementation that has slightly -- stronger pre- and postconditions in order to make proof of -- absence of RTE still possible but simplify writing down the -- postconditions. -- Furthermore, note that a bug in the Ada standard library -- provided by GNAT up to 6.4.1 requires the extra precondition: -- High <= Get_Length (Source) + 1 -- We feel that this slightly stronger pre-condition does not -- invalidate sensible uses of this function. -- function Replace_Slice -- (Source : Unbounded_String; -- Low : Positive; -- High : Natural; -- By : String) return Unbounded_String; function Function_Replace_Slice (Source : Unbounded_String; Low : Positive; High : Natural; By : String) return Unbounded_String; --# pre Low <= Get_Length (Source) + 1 and High <= Get_Length (Source) + 1 --# and By'Length + Get_Length (Source) <= Natural'Last; --# return X => ((Low <= High -> --# Get_Length (X) = By'Length + Get_Length (Source) + Low - --# Natural'Min (High, Get_Length (Source)) - 1) and --# (Low > High -> --# Get_Length (X) = By'Length + Get_Length (Source))); -- procedure Replace_Slice -- (Source : in out Unbounded_String; -- Low : in Positive; -- High : in Natural; -- By : in String); procedure Procedure_Replace_Slice (Source : in out Unbounded_String; Low : in Positive; High : in Natural; By : in String); --# derives Source from *, --# By, --# High, --# Low; --# pre Low <= Get_Length (Source) + 1 and High <= Get_Length (Source) + 1 --# and By'Length + Get_Length (Source) <= Natural'Last; --# post (Low <= High -> --# Get_Length (Source) = By'Length + Get_Length (Source~) + Low - --# Natural'Min (High, Get_Length (Source~)) - 1) and --# (Low > High -> --# Get_Length (Source) = By'Length + Get_Length (Source~)); --# -- function Insert -- (Source : Unbounded_String; -- Before : Positive; -- New_Item : String) return Unbounded_String; function Function_Insert (Source : Unbounded_String; Before : Positive; New_Item : String) return Unbounded_String; --# pre Before <= Get_Length (Source) + 1 and --# Get_Length (Source) + New_Item'Length <= Natural'Last; --# return X => (Get_Length (X) = Get_Length (Source) + New_Item'Length); -- procedure Insert -- (Source : in out Unbounded_String; -- Before : in Positive; -- New_Item : in String); procedure Procedure_Insert (Source : in out Unbounded_String; Before : in Positive; New_Item : in String); --# derives Source from *, --# Before, --# New_Item; --# pre Before <= Get_Length (Source) + 1 and --# Get_Length (Source) + New_Item'Length <= Natural'Last; --# post Get_Length (Source) = Get_Length (Source~) + New_Item'Length; -- function Overwrite -- (Source : Unbounded_String; -- Position : Positive; -- New_Item : String) return Unbounded_String; function Function_Overwrite (Source : Unbounded_String; Position : Positive; New_Item : String) return Unbounded_String; --# pre Position <= Get_Length (Source) + 1 and --# Position + New_Item'Length - 1 <= Natural'Last; --# return X => (Get_Length (X) = Natural'Max --# (Get_Length (Source), Position + New_Item'Length - 1)); -- procedure Overwrite -- (Source : in out Unbounded_String; -- Position : in Positive; -- New_Item : in String); procedure Procedure_Overwrite (Source : in out Unbounded_String; Position : in Positive; New_Item : in String); --# derives Source from *, --# New_Item, --# Position; --# pre Position <= Get_Length (Source) + 1 and --# Position + New_Item'Length - 1 <= Natural'Last; --# post Get_Length (Source) = Natural'Max --# (Get_Length (Source~), Position + New_Item'Length - 1); -- The last part of the return annotation is not strictly -- necessary, but may make some RTE-freedom proofs easier. -- function Delete -- (Source : Unbounded_String; -- From : Positive; -- Through : Natural) return Unbounded_String; function Function_Delete (Source : Unbounded_String; Arg_From : Positive; Through : Natural) return Unbounded_String; --# pre Through <= Get_Length (Source); --# return X => --# ((Arg_From > Through -> Get_Length (X) = 0) and --# (Arg_From <= Through -> --# Get_Length (X) = Get_Length (Source) - --# (Through - Arg_From + 1)) and --# (Get_Length (X) <= Get_Length (Source))); -- The last part of the postcondition is not strictly necessary, -- but may make some RTE-freedom proofs easier. -- procedure Delete -- (Source : in out Unbounded_String; -- From : in Positive; -- Through : in Natural); procedure Procedure_Delete (Source : in out Unbounded_String; Arg_From : in Positive; Through : in Natural); --# derives Source from *, --# Arg_From, --# Through; --# pre Through <= Get_Length (Source); --# post ((Arg_From > Through -> Get_Length (Source) = 0) and --# (Arg_From <= Through -> --# Get_Length (Source) = Get_Length (Source~) - --# (Through - Arg_From + 1)) and --# (Get_Length (Source) <= Get_Length (Source~))); -- function Trim -- (Source : Unbounded_String; -- Side : Trim_End) return Unbounded_String; function Function_Trim_Side (Source : Unbounded_String; Side : Strings.Trim_End) return Unbounded_String; --# return X => (Get_Length (X) <= Get_Length (Source)); -- procedure Trim -- (Source : in out Unbounded_String; -- Side : in Trim_End); procedure Procedure_Trim_Side (Source : in out Unbounded_String; Side : in Strings.Trim_End); --# derives Source from *, --# Side; --# post Get_Length (Source) <= Get_Length (Source~); -- function Trim -- (Source : Unbounded_String; -- Left : Maps.Character_Set; -- Right : Maps.Character_Set) return Unbounded_String; function Function_Trim_Character_Set (Source : Unbounded_String; Left : Maps.Character_Set; Right : Maps.Character_Set) return Unbounded_String; --# return X => (Get_Length (X) <= Get_Length (Source)); -- procedure Trim -- (Source : in out Unbounded_String; -- Left : in Maps.Character_Set; -- Right : in Maps.Character_Set); procedure Procedure_Trim_Character_Set (Source : in out Unbounded_String; Left : in Maps.Character_Set; Right : in Maps.Character_Set); --# derives Source from *, --# Left, --# Right; --# post Get_Length (Source) <= Get_Length (Source~); -- function Head -- (Source : Unbounded_String; -- Count : Natural; -- Pad : Character := Space) return Unbounded_String; function Function_Head (Source : Unbounded_String; Count : Natural; Pad : Character) return Unbounded_String; --# return X => (Get_Length (X) = Count); -- procedure Head -- (Source : in out Unbounded_String; -- Count : in Natural; -- Pad : in Character := Space); procedure Procedure_Head (Source : in out Unbounded_String; Count : in Natural; Pad : in Character); --# derives Source from *, --# Count, --# Pad; --# post Get_Length (Source) = Count; -- function Tail -- (Source : Unbounded_String; -- Count : Natural; -- Pad : Character := Space) return Unbounded_String; function Function_Tail (Source : Unbounded_String; Count : Natural; Pad : Character) return Unbounded_String; --# return X => (Get_Length (X) = Count); -- procedure Tail -- (Source : in out Unbounded_String; -- Count : in Natural; -- Pad : in Character := Space); procedure Procedure_Tail (Source : in out Unbounded_String; Count : in Natural; Pad : in Character); --# derives Source from *, --# Count, --# Pad; --# post Get_Length (Source) = Count; -- function "*" -- (Left : Natural; -- Right : Character) return Unbounded_String; function Times_Char (Left : Natural; Right : Character) return Unbounded_String; --# return X => (Get_Length (X) = Left); -- function "*" -- (Left : Natural; -- Right : String) return Unbounded_String; function Times_String (Left : Natural; Right : String) return Unbounded_String; --# pre Left * Right'Length <= Natural'Last; --# return X => (Get_Length (X) = Left * Right'Length); -- function "*" -- (Left : Natural; -- Right : Unbounded_String) return Unbounded_String; function Times_Unbounded_String (Left : Natural; Right : Unbounded_String) return Unbounded_String; --# pre Left * Get_Length (Right) <= Natural'Last; --# return X => (Get_Length (X) = Left * Get_Length (Right)); private --# hide SPARK.Ada.Strings.Unbounded; type Unbounded_String is new Standard.Ada.Strings.Unbounded.Unbounded_String; Null_Unbounded_String : constant Unbounded_String := Unbounded_String (Standard.Ada.Strings.Unbounded.Null_Unbounded_String); end SPARK.Ada.Strings.Unbounded; spark-2012.0.deb/lib/spark/current/spark-ada-text_io-not_spark.adb0000644000175000017500000001032411753202341023767 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= pragma Ada_95; package body SPARK.Ada.Text_IO.Not_SPARK is --# hide SPARK.Ada.Text_IO.Not_SPARK; pragma Warnings ("Y"); -- Turn off warnings for Ada 2005 features function Name (File : File_Type) return String is begin return Standard.Ada.Text_IO.Name (File => File.The_File_Type); end Name; function Form (File : File_Type) return String is begin return Standard.Ada.Text_IO.Form (File => File.The_File_Type); end Form; function Standard_Input return File_Access is function Get_Standard_Input return Standard.Ada.Text_IO.File_Access renames Standard.Ada.Text_IO.Standard_Input; begin return File_Access (Get_Standard_Input); end Standard_Input; function Standard_Output return File_Access is function Get_Standard_Output return Standard.Ada.Text_IO.File_Access renames Standard.Ada.Text_IO.Standard_Output; begin return File_Access (Get_Standard_Output); end Standard_Output; function Standard_Error return File_Access is function Get_Standard_Error return Standard.Ada.Text_IO.File_Access renames Standard.Ada.Text_IO.Standard_Error; begin return File_Access (Get_Standard_Error); end Standard_Error; function Current_Input return File_Access is function Get_Current_Input return Standard.Ada.Text_IO.File_Access renames Standard.Ada.Text_IO.Current_Input; begin return File_Access (Get_Current_Input); end Current_Input; function Current_Output return File_Access is function Get_Current_Output return Standard.Ada.Text_IO.File_Access renames Standard.Ada.Text_IO.Current_Output; begin return File_Access (Get_Current_Output); end Current_Output; function Current_Error return File_Access is function Get_Current_Error return Standard.Ada.Text_IO.File_Access renames Standard.Ada.Text_IO.Current_Error; begin return File_Access (Get_Current_Error); end Current_Error; function Get_Line (File : File_Type) return String is begin return Standard.Ada.Text_IO.Get_Line (File => File.The_File_Type); end Get_Line; function Get_Line return String is begin return Standard.Ada.Text_IO.Get_Line; end Get_Line; ------------------------------------------------ -- Conversion functions from SPARK.Ada to Ada -- ------------------------------------------------ function To_File_Type (From : File_Type) return Standard.Ada.Text_IO.File_Type is begin return From.The_File_Type; end To_File_Type; function To_File_Mode (From : File_Mode) return Standard.Ada.Text_IO.File_Mode is begin case From is when In_File => return Standard.Ada.Text_IO.In_File; when Out_File => return Standard.Ada.Text_IO.Out_File; when Append_File => return Standard.Ada.Text_IO.Append_File; end case; end To_File_Mode; function To_Type_Set (From : Type_Set) return Standard.Ada.Text_IO.Type_Set is begin case From is when Lower_Case => return Standard.Ada.Text_IO.Lower_Case; when Upper_Case => return Standard.Ada.Text_IO.Upper_Case; end case; end To_Type_Set; end SPARK.Ada.Text_IO.Not_SPARK; spark-2012.0.deb/lib/spark/current/spark-ada-command_line-unbounded_string.ads0000644000175000017500000000631411753202341026337 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Command_Line.Unbounded_String -- -- -- -- Description -- -- This is a binding to package Ada.Command_Line -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : None -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- Full Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- ------------------------------------------------------------------------------- with SPARK.Ada.Strings.Unbounded; --# inherit SPARK.Ada.Command_Line, --# SPARK.Ada.Strings.Unbounded; package SPARK.Ada.Command_Line.Unbounded_String is -- function Argument (Number : Positive) return String; function Argument (Number : Positive) return Strings.Unbounded.Unbounded_String; --# global Command_Line.State; --# pre Number <= Command_Line.Argument_Count (Command_Line.State); -- function Command_Name return String; function Command_Name return Strings.Unbounded.Unbounded_String; --# global Command_Line.State; end SPARK.Ada.Command_Line.Unbounded_String; spark-2012.0.deb/lib/spark/current/interfaces-c.shs0000644000175000017500000000770611753202341021101 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- Interfaces.C -- -- -- -- Description -- -- This is a SHADOW specification of the predefined package Interfaces.C -- -- It presents a subset of the facilities of the standard Ada package, -- -- but is SPARK compatible. -- -- -- -- Do not attempt to compile this file - it is intended the for Examiner -- -- only. -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime -- -- -- -- Assumes C's "char" and "unsigned char" are 8 bits. -- -- -- -- This is known to be OK for GNAT Pro, but should be reviewed for other -- -- implementations. -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- package Interfaces.C is CHAR_BIT : constant := 8; SCHAR_MIN : constant := -128; SCHAR_MAX : constant := 127; UCHAR_MAX : constant := 255; type signed_char is range SCHAR_MIN .. SCHAR_MAX; for signed_char'Size use CHAR_BIT; type unsigned_char is mod (UCHAR_MAX + 1); for unsigned_char'Size use CHAR_BIT; subtype plain_char is unsigned_char; -- Possibly add declarations of int, short, c_float here when -- derived scalar types are supported in SPARK. end Interfaces.C; spark-2012.0.deb/lib/spark/current/spark-crypto.ads0000644000175000017500000001001311753202341021127 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Crypto -- -- -- -- Description -- -- -- -- This package supplies some basic type declarations that are common -- -- to all packages rooted here. -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- None -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- ------------------------------------------------------------------------------- with SPARK; with SPARK.Unsigned; --# inherit SPARK, --# SPARK.Unsigned; package SPARK.Crypto is subtype I3 is Natural range 0 .. 2; subtype I4 is Natural range 0 .. 3; subtype I6 is Natural range 0 .. 5; subtype I7 is Natural range 0 .. 6; subtype I8 is Natural range 0 .. 7; subtype I9 is Natural range 0 .. 8; subtype I16 is Natural range 0 .. 15; subtype I64 is Natural range 0 .. 63; subtype I128 is Natural range 0 .. 127; type Byte_Seq is array (Natural range <>) of Unsigned.Byte; for Byte_Seq'Alignment use 8; subtype Byte_Seq_4 is Byte_Seq (I4); subtype Byte_Seq_8 is Byte_Seq (I8); subtype Byte_Seq_16 is Byte_Seq (I16); subtype Byte_Seq_64 is Byte_Seq (I64); subtype Byte_Seq_128 is Byte_Seq (I128); -- 2**N bytes is 2**(N-3) 64-bit words subtype Word_Count_T is Natural range 0 .. ((Natural'Last + 1) / 8 - 1); subtype Positive_Word_Count_T is Natural range 1 .. Word_Count_T'Last; type U64_Seq is array (Word_Count_T range <>) of Unsigned.U64; for U64_Seq'Alignment use 8; subtype U64_Seq_3 is U64_Seq (I3); subtype U64_Seq_4 is U64_Seq (I4); subtype U64_Seq_8 is U64_Seq (I8); subtype U64_Seq_9 is U64_Seq (I9); subtype U64_Seq_16 is U64_Seq (I16); end SPARK.Crypto; spark-2012.0.deb/lib/spark/current/spark-crypto-debug.adb0000644000175000017500000000613511753202341022204 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Crypto.Debug -- -- -- -- Implementation Notes -- -- -- -- None -- ------------------------------------------------------------------------------- with Ada.Text_IO; use Ada.Text_IO; with SPARK.Unsigned; package body SPARK.Crypto.Debug is Indent : constant String := " "; procedure Put_Byte (B : in Unsigned.Byte) is use type Unsigned.Byte; subtype Nibble is Unsigned.Byte range 0 .. 15; type Nibble_To_Char is array (Nibble) of Character; To_Char : constant Nibble_To_Char := Nibble_To_Char'('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'); MSB : constant Nibble := B / 16; LSB : constant Nibble := B mod 16; begin Put (Standard_Output, To_Char (MSB)); Put (Standard_Output, To_Char (LSB)); end Put_Byte; procedure Show_8 (S : in Byte_Seq; Count : in Natural) is begin if Count >= 1 then for I in Natural range S'First .. Count - 1 loop if (I mod 16) = 0 then Put (Indent); else if (I mod 4) = 0 then Put (' '); end if; end if; Put (' '); Put_Byte (S (I)); if (I mod 16) = 15 or I = S'Last then New_Line; end if; end loop; end if; end Show_8; ---------------- -- Show_Msg_8 -- ---------------- procedure Show_Msg_8 (Msg : in String; S : in Crypto.Byte_Seq; Count : in Natural) is begin Put_Line (Msg); Show_8 (S, Count); New_Line; end Show_Msg_8; end SPARK.Crypto.Debug; spark-2012.0.deb/lib/spark/current/ada.idx0000644000175000017500000000102111526254022017233 0ustar eugeneugen-- The following packages are synthesized internally by -- the Examiner when in SPARK95 or SPARK2005 mode, -- and so do not need to appear here: -- Ada -- Ada.Characters -- Ada.Characters.Latin_1 -- Ada.Unchecked_Conversion -- -- The following packages are also synthesized internally by -- the Examiner when in RavenSPARK mode, -- and so do not need to appear here: -- Ada.Real_Time -- Ada.Synchronous_Task_Control -- Ada.Interrupts Ada.Characters.Handling specification is in ada-characters-handling.shs spark-2012.0.deb/lib/spark/current/current.sum0000644000175000017500000013363611712513676020244 0ustar eugeneugen------------------------------------------------------------------------------- Semantic Analysis Summary POGS Pro Edition ------------------------------------------------------------------------------- Summary of: Verification Condition files (.vcg) Simplified Verification Condition files (.siv) ViCToR result files (.vct) Proof Logs (.plg) Dead Path Conjecture files (.dpc) Summary Dead Path files (.sdp) "status" column keys: 1st character: '-' - No VC 'S' - No SIV 'U' - Undischarged 'E' - Proved by Examiner 'I' - Proved by Simplifier by Inference 'X' - Proved by Simplifier by Contradiction 'P' - Proved by Simplifier using User Defined Proof Rules 'V' - Proved by ViCToR 'C' - Proved by Checker 'R' - Proved by Review 'F' - VC is False 2nd character: '-' - No DPC 'S' - No SDP 'U' - Unchecked 'D' - Dead path 'L' - Live path in the directory: Summary produced: Ignore Dates option selected. File get_64_lsb_first.vcg procedure Skein.Get_64_LSB_First *** Warning: VC date stamps ignored *** VCs for procedure_get_64_lsb_first : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 148 | Inference | No DPC | I- | | 2 | start | rtc check @ 149 | Inference | No DPC | I- | | 3 | start | check stm @ 151 | Inference | No DPC | I- | | 4 | 172 | check stm @ 151 | ViCToR | No DPC | V- | | 5 | start | rtc check @ 161 | Inference | No DPC | I- | | 6 | 172 | rtc check @ 161 | Inference | No DPC | I- | | 7 | start | rtc check @ 161 | Inference | No DPC | I- | | 8 | 172 | rtc check @ 161 | Inference | No DPC | I- | | 9 | start | rtc check @ 161 | Inference | No DPC | I- | | 10 | 172 | rtc check @ 161 | Inference | No DPC | I- | | 11 | start | rtc check @ 161 | Inference | No DPC | I- | | 12 | 172 | rtc check @ 161 | Inference | No DPC | I- | | 13 | start | rtc check @ 161 | Inference | No DPC | I- | | 14 | 172 | rtc check @ 161 | Inference | No DPC | I- | | 15 | start | rtc check @ 161 | Inference | No DPC | I- | | 16 | 172 | rtc check @ 161 | Inference | No DPC | I- | | 17 | start | rtc check @ 161 | Inference | No DPC | I- | | 18 | 172 | rtc check @ 161 | Inference | No DPC | I- | | 19 | start | rtc check @ 161 | Inference | No DPC | I- | | 20 | 172 | rtc check @ 161 | Inference | No DPC | I- | | 21 | start | assert @ 172 | ViCToR | No DPC | V- | | 22 | 172 | assert @ 172 | ViCToR | No DPC | V- | | 23 | 172 | check stm @ 183 | Inference | No DPC | I- | | 24 | 172 | rtc check @ 184 | Inference | No DPC | I- | | 25 | 172 | check stm @ 186 | Inference | No DPC | I- | | 26 | 172 | rtc check @ 187 | Inference | No DPC | I- | | 27 | 172 | assert @ finish | Inference | No DPC | I- | ----------------------------------------------------------------------------- File put_64_lsb_first.vcg procedure Skein.Put_64_LSB_First *** Warning: VC date stamps ignored *** VCs for procedure_put_64_lsb_first : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 102 | Inference | No DPC | I- | | 2 | start | rtc check @ 102 | Inference | No DPC | I- | | 3 | start | assert @ 103 | Inference | No DPC | I- | | 4 | 103 | assert @ 103 | Inference | No DPC | I- | | 5 | 103 | check stm @ 111 | Inference | No DPC | I- | | 6 | 103 | rtc check @ 113 | Inference | No DPC | I- | | 7 | 103 | rtc check @ 113 | Inference | No DPC | I- | | 8 | start | assert @ finish | Examiner | No DPC | E- | | 9 | start | assert @ finish | Examiner | No DPC | E- | | 10 | 103 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File skein_512_final.vcg procedure Skein.Skein_512_Final *** Warning: VC date stamps ignored *** The following user rules were used: from spark_/crypto_/hash_/skein/skein.rlu skein_rules(2) used in proving VCs: 33. skein_rules(1) used in proving VCs: 33. VCs for procedure_skein_512_final : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | check stm @ 900 | Inference | No DPC | I- | | 2 | start | rtc check @ 901 | Inference | No DPC | I- | | 3 | start | pre check @ 905 | Inference | No DPC | I- | | 4 | start | rtc check @ 909 | Inference | No DPC | I- | | 5 | start | rtc check @ 909 | Inference | No DPC | I- | | 6 | start | pre check @ 911 | Inference | No DPC | I- | | 7 | start | pre check @ 911 | Inference | No DPC | I- | | 8 | start | rtc check @ 918 | Inference | No DPC | I- | | 9 | start | rtc check @ 918 | Inference | No DPC | I- | | 10 | start | check stm @ 920 | Inference | No DPC | I- | | 11 | start | check stm @ 920 | Inference | No DPC | I- | | 12 | start | rtc check @ 923 | Inference | No DPC | I- | | 13 | start | rtc check @ 923 | Inference | No DPC | I- | | 14 | start | rtc check @ 926 | Inference | No DPC | I- | | 15 | start | rtc check @ 926 | Inference | No DPC | I- | | 16 | start | rtc check @ 927 | Inference | No DPC | I- | | 17 | start | rtc check @ 927 | Inference | No DPC | I- | | 18 | start | assert @ 930 | Inference | No DPC | I- | | 19 | start | assert @ 930 | Inference | No DPC | I- | | 20 | 930 | assert @ 930 | ViCToR | No DPC | V- | | 21 | 930 | assert @ 930 | ViCToR | No DPC | V- | | 22 | 930 | pre check @ 937 | Inference | No DPC | I- | | 23 | 930 | rtc check @ 939 | Inference | No DPC | I- | | 24 | 930 | pre check @ 947 | Inference | No DPC | I- | | 25 | 930 | rtc check @ 953 | Inference | No DPC | I- | | 26 | 930 | rtc check @ 955 | Inference | No DPC | I- | | 27 | 930 | pre check @ 959 | Inference | No DPC | I- | | 28 | 930 | pre check @ 959 | Inference | No DPC | I- | | 29 | 930 | rtc check @ 966 | Inference | No DPC | I- | | 30 | 930 | rtc check @ 966 | Inference | No DPC | I- | | 31 | 930 | assert @ finish | Examiner | No DPC | E- | | 32 | 930 | assert @ finish | Examiner | No DPC | E- | | 33 | | refinement | User Rules | No DPC | P- | | 34 | | refinement | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File set_b_counter.vcg procedure Skein.Skein_512_Final.Set_B_Counter *** Warning: VC date stamps ignored *** VCs for procedure_set_b_counter : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 887 | Inference | No DPC | I- | | 2 | start | rtc check @ 888 | Inference | No DPC | I- | | 3 | start | rtc check @ 888 | Inference | No DPC | I- | | 4 | start | rtc check @ 889 | Inference | No DPC | I- | | 5 | start | rtc check @ 889 | Inference | No DPC | I- | | 6 | start | rtc check @ 890 | Inference | No DPC | I- | | 7 | start | rtc check @ 890 | Inference | No DPC | I- | | 8 | start | rtc check @ 891 | Inference | No DPC | I- | | 9 | start | rtc check @ 891 | Inference | No DPC | I- | | 10 | start | rtc check @ 892 | Inference | No DPC | I- | | 11 | start | rtc check @ 892 | Inference | No DPC | I- | | 12 | start | rtc check @ 893 | Inference | No DPC | I- | | 13 | start | rtc check @ 893 | Inference | No DPC | I- | | 14 | start | rtc check @ 894 | Inference | No DPC | I- | | 15 | start | rtc check @ 894 | Inference | No DPC | I- | | 16 | start | assert @ finish | Inference | No DPC | I- | ----------------------------------------------------------------------------- File zero_pad_b.vcg procedure Skein.Skein_512_Final.Zero_Pad_B *** Warning: VC date stamps ignored *** VCs for procedure_zero_pad_b : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 870 | Inference | No DPC | I- | | 2 | start | rtc check @ 870 | Inference | No DPC | I- | | 3 | start | assert @ 871 | Inference | No DPC | I- | | 4 | 871 | assert @ 871 | Inference | No DPC | I- | | 5 | 871 | rtc check @ 875 | Inference | No DPC | I- | | 6 | start | assert @ finish | Inference | No DPC | I- | | 7 | 871 | assert @ finish | Inference | No DPC | I- | ----------------------------------------------------------------------------- File skein_512_hash.vcg function Skein.Skein_512_Hash *** Warning: VC date stamps ignored *** VCs for function_skein_512_hash : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 979 | Inference | No DPC | I- | | 2 | start | pre check @ 982 | Inference | No DPC | I- | | 3 | start | pre check @ 985 | Inference | No DPC | I- | | 4 | start | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File skein_512_init.vcg procedure Skein.Skein_512_Init *** Warning: VC date stamps ignored *** The following user rules were used: from spark_/crypto_/hash_/skein/skein.rlu skein_rules(1) used in proving VCs: 15. skein_rules(2) used in proving VCs: 15. VCs for procedure_skein_512_init : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 682 | Inference | No DPC | I- | | 2 | start | rtc check @ 684 | Inference | No DPC | I- | | 3 | start | check stm @ 686 | Inference | No DPC | I- | | 4 | start | rtc check @ 690 | Inference | No DPC | I- | | 5 | start | rtc check @ 690 | Inference | No DPC | I- | | 6 | start | rtc check @ 690 | Inference | No DPC | I- | | 7 | start | rtc check @ 690 | Inference | No DPC | I- | | 8 | start | rtc check @ 698 | Inference | No DPC | I- | | 9 | start | check stm @ 701 | Inference | No DPC | I- | | 10 | start | pre check @ 704 | Inference | No DPC | I- | | 11 | start | check stm @ 711 | Inference | No DPC | I- | | 12 | start | rtc check @ 714 | Inference | No DPC | I- | | 13 | start | assert @ finish | Inference | No DPC | I- | | 14 | | refinement | Examiner | No DPC | E- | | 15 | | refinement | User Rules | No DPC | P- | ----------------------------------------------------------------------------- File skein_512_process_block.vcg procedure Skein.Skein_512_Process_Block *** Warning: VC date stamps ignored *** VCs for procedure_skein_512_process_block : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 620 | Inference | No DPC | I- | | 2 | start | rtc check @ 621 | Inference | No DPC | I- | | 3 | start | assert @ 624 | ViCToR | No DPC | V- | | 4 | 624 | assert @ 624 | ViCToR | No DPC | V- | | 5 | 624 | rtc check @ 637 | Inference | No DPC | I- | | 6 | 624 | pre check @ 643 | Inference | No DPC | I- | | 7 | 624 | check stm @ 647 | Inference | No DPC | I- | | 8 | 624 | rtc check @ 661 | Inference | No DPC | I- | | 9 | 624 | rtc check @ 662 | Inference | No DPC | I- | | 10 | 624 | assert @ finish | Inference | No DPC | I- | ----------------------------------------------------------------------------- File do_first_key_injection.vcg procedure Skein.Skein_512_Process_Block.Do_First_Key_Injection *** Warning: VC date stamps ignored *** VCs for procedure_do_first_key_injection : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 564 | Inference | No DPC | I- | | 2 | start | rtc check @ 572 | Inference | No DPC | I- | | 3 | start | rtc check @ 573 | Inference | No DPC | I- | | 4 | start | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File initialize_key_schedule.vcg procedure Skein.Skein_512_Process_Block.Initialize_Key_Schedule *** Warning: VC date stamps ignored *** VCs for procedure_initialize_key_schedule : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 522 | Inference | No DPC | I- | | 2 | start | rtc check @ 525 | Inference | No DPC | I- | | 3 | 527 | rtc check @ 525 | Inference | No DPC | I- | | 4 | start | rtc check @ 526 | Inference | No DPC | I- | | 5 | 527 | rtc check @ 526 | Inference | No DPC | I- | | 6 | start | assert @ 527 | Inference | No DPC | I- | | 7 | 527 | assert @ 527 | ViCToR | No DPC | V- | | 8 | 527 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File initialize_ts.vcg procedure Skein.Skein_512_Process_Block.Initialize_TS *** Warning: VC date stamps ignored *** VCs for procedure_initialize_ts : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 547 | Inference | No DPC | I- | | 2 | start | rtc check @ 548 | Inference | No DPC | I- | | 3 | start | rtc check @ 550 | Inference | No DPC | I- | | 4 | start | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File inject_key.vcg procedure Skein.Skein_512_Process_Block.Inject_Key *** Warning: VC date stamps ignored *** VCs for procedure_inject_key : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | assert @ 260 | Inference | No DPC | I- | | 2 | 260 | assert @ 260 | Inference | No DPC | I- | | 3 | 260 | rtc check @ 261 | Inference | No DPC | I- | | 4 | 260 | rtc check @ 264 | Inference | No DPC | I- | | 5 | 260 | rtc check @ 265 | Inference | No DPC | I- | | 6 | 260 | rtc check @ 266 | Inference | No DPC | I- | | 7 | 260 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File round_1.vcg procedure Skein.Skein_512_Process_Block.Round_1 *** Warning: VC date stamps ignored *** VCs for procedure_round_1 : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 275 | Inference | No DPC | I- | | 2 | start | rtc check @ 276 | Inference | No DPC | I- | | 3 | start | rtc check @ 276 | Inference | No DPC | I- | | 4 | start | rtc check @ 277 | Inference | No DPC | I- | | 5 | start | assert @ 280 | Inference | No DPC | I- | | 6 | 280 | rtc check @ 282 | Inference | No DPC | I- | | 7 | 280 | rtc check @ 283 | Inference | No DPC | I- | | 8 | 280 | rtc check @ 283 | Inference | No DPC | I- | | 9 | 280 | rtc check @ 284 | Inference | No DPC | I- | | 10 | 280 | assert @ 286 | Inference | No DPC | I- | | 11 | 286 | rtc check @ 288 | Inference | No DPC | I- | | 12 | 286 | rtc check @ 289 | Inference | No DPC | I- | | 13 | 286 | rtc check @ 289 | Inference | No DPC | I- | | 14 | 286 | rtc check @ 290 | Inference | No DPC | I- | | 15 | 286 | assert @ 292 | Inference | No DPC | I- | | 16 | 292 | rtc check @ 294 | Inference | No DPC | I- | | 17 | 292 | rtc check @ 295 | Inference | No DPC | I- | | 18 | 292 | rtc check @ 295 | Inference | No DPC | I- | | 19 | 292 | rtc check @ 296 | Inference | No DPC | I- | | 20 | 292 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File round_2.vcg procedure Skein.Skein_512_Process_Block.Round_2 *** Warning: VC date stamps ignored *** VCs for procedure_round_2 : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 305 | Inference | No DPC | I- | | 2 | start | rtc check @ 306 | Inference | No DPC | I- | | 3 | start | rtc check @ 306 | Inference | No DPC | I- | | 4 | start | rtc check @ 307 | Inference | No DPC | I- | | 5 | start | assert @ 310 | Inference | No DPC | I- | | 6 | 310 | rtc check @ 312 | Inference | No DPC | I- | | 7 | 310 | rtc check @ 313 | Inference | No DPC | I- | | 8 | 310 | rtc check @ 313 | Inference | No DPC | I- | | 9 | 310 | rtc check @ 314 | Inference | No DPC | I- | | 10 | 310 | assert @ 316 | Inference | No DPC | I- | | 11 | 316 | rtc check @ 318 | Inference | No DPC | I- | | 12 | 316 | rtc check @ 319 | Inference | No DPC | I- | | 13 | 316 | rtc check @ 319 | Inference | No DPC | I- | | 14 | 316 | rtc check @ 320 | Inference | No DPC | I- | | 15 | 316 | assert @ 322 | Inference | No DPC | I- | | 16 | 322 | rtc check @ 324 | Inference | No DPC | I- | | 17 | 322 | rtc check @ 325 | Inference | No DPC | I- | | 18 | 322 | rtc check @ 325 | Inference | No DPC | I- | | 19 | 322 | rtc check @ 326 | Inference | No DPC | I- | | 20 | 322 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File round_3.vcg procedure Skein.Skein_512_Process_Block.Round_3 *** Warning: VC date stamps ignored *** VCs for procedure_round_3 : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 336 | Inference | No DPC | I- | | 2 | start | rtc check @ 337 | Inference | No DPC | I- | | 3 | start | rtc check @ 337 | Inference | No DPC | I- | | 4 | start | rtc check @ 338 | Inference | No DPC | I- | | 5 | start | assert @ 341 | Inference | No DPC | I- | | 6 | 341 | rtc check @ 343 | Inference | No DPC | I- | | 7 | 341 | rtc check @ 344 | Inference | No DPC | I- | | 8 | 341 | rtc check @ 344 | Inference | No DPC | I- | | 9 | 341 | rtc check @ 345 | Inference | No DPC | I- | | 10 | 341 | assert @ 347 | Inference | No DPC | I- | | 11 | 347 | rtc check @ 349 | Inference | No DPC | I- | | 12 | 347 | rtc check @ 350 | Inference | No DPC | I- | | 13 | 347 | rtc check @ 350 | Inference | No DPC | I- | | 14 | 347 | rtc check @ 351 | Inference | No DPC | I- | | 15 | 347 | assert @ 353 | Inference | No DPC | I- | | 16 | 353 | rtc check @ 355 | Inference | No DPC | I- | | 17 | 353 | rtc check @ 356 | Inference | No DPC | I- | | 18 | 353 | rtc check @ 356 | Inference | No DPC | I- | | 19 | 353 | rtc check @ 357 | Inference | No DPC | I- | | 20 | 353 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File round_4.vcg procedure Skein.Skein_512_Process_Block.Round_4 *** Warning: VC date stamps ignored *** VCs for procedure_round_4 : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 366 | Inference | No DPC | I- | | 2 | start | rtc check @ 367 | Inference | No DPC | I- | | 3 | start | rtc check @ 367 | Inference | No DPC | I- | | 4 | start | rtc check @ 368 | Inference | No DPC | I- | | 5 | start | assert @ 371 | Inference | No DPC | I- | | 6 | 371 | rtc check @ 373 | Inference | No DPC | I- | | 7 | 371 | rtc check @ 374 | Inference | No DPC | I- | | 8 | 371 | rtc check @ 374 | Inference | No DPC | I- | | 9 | 371 | rtc check @ 375 | Inference | No DPC | I- | | 10 | 371 | assert @ 377 | Inference | No DPC | I- | | 11 | 377 | rtc check @ 379 | Inference | No DPC | I- | | 12 | 377 | rtc check @ 380 | Inference | No DPC | I- | | 13 | 377 | rtc check @ 380 | Inference | No DPC | I- | | 14 | 377 | rtc check @ 381 | Inference | No DPC | I- | | 15 | 377 | assert @ 383 | Inference | No DPC | I- | | 16 | 383 | rtc check @ 385 | Inference | No DPC | I- | | 17 | 383 | rtc check @ 386 | Inference | No DPC | I- | | 18 | 383 | rtc check @ 386 | Inference | No DPC | I- | | 19 | 383 | rtc check @ 387 | Inference | No DPC | I- | | 20 | 383 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File round_5.vcg procedure Skein.Skein_512_Process_Block.Round_5 *** Warning: VC date stamps ignored *** VCs for procedure_round_5 : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 396 | Inference | No DPC | I- | | 2 | start | rtc check @ 397 | Inference | No DPC | I- | | 3 | start | rtc check @ 397 | Inference | No DPC | I- | | 4 | start | rtc check @ 398 | Inference | No DPC | I- | | 5 | start | assert @ 401 | Inference | No DPC | I- | | 6 | 401 | rtc check @ 403 | Inference | No DPC | I- | | 7 | 401 | rtc check @ 404 | Inference | No DPC | I- | | 8 | 401 | rtc check @ 404 | Inference | No DPC | I- | | 9 | 401 | rtc check @ 405 | Inference | No DPC | I- | | 10 | 401 | assert @ 407 | Inference | No DPC | I- | | 11 | 407 | rtc check @ 409 | Inference | No DPC | I- | | 12 | 407 | rtc check @ 410 | Inference | No DPC | I- | | 13 | 407 | rtc check @ 410 | Inference | No DPC | I- | | 14 | 407 | rtc check @ 411 | Inference | No DPC | I- | | 15 | 407 | assert @ 413 | Inference | No DPC | I- | | 16 | 413 | rtc check @ 415 | Inference | No DPC | I- | | 17 | 413 | rtc check @ 416 | Inference | No DPC | I- | | 18 | 413 | rtc check @ 416 | Inference | No DPC | I- | | 19 | 413 | rtc check @ 417 | Inference | No DPC | I- | | 20 | 413 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File round_6.vcg procedure Skein.Skein_512_Process_Block.Round_6 *** Warning: VC date stamps ignored *** VCs for procedure_round_6 : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 426 | Inference | No DPC | I- | | 2 | start | rtc check @ 427 | Inference | No DPC | I- | | 3 | start | rtc check @ 427 | Inference | No DPC | I- | | 4 | start | rtc check @ 428 | Inference | No DPC | I- | | 5 | start | assert @ 431 | Inference | No DPC | I- | | 6 | 431 | rtc check @ 433 | Inference | No DPC | I- | | 7 | 431 | rtc check @ 434 | Inference | No DPC | I- | | 8 | 431 | rtc check @ 434 | Inference | No DPC | I- | | 9 | 431 | rtc check @ 435 | Inference | No DPC | I- | | 10 | 431 | assert @ 437 | Inference | No DPC | I- | | 11 | 437 | rtc check @ 439 | Inference | No DPC | I- | | 12 | 437 | rtc check @ 440 | Inference | No DPC | I- | | 13 | 437 | rtc check @ 440 | Inference | No DPC | I- | | 14 | 437 | rtc check @ 441 | Inference | No DPC | I- | | 15 | 437 | assert @ 443 | Inference | No DPC | I- | | 16 | 443 | rtc check @ 445 | Inference | No DPC | I- | | 17 | 443 | rtc check @ 446 | Inference | No DPC | I- | | 18 | 443 | rtc check @ 446 | Inference | No DPC | I- | | 19 | 443 | rtc check @ 447 | Inference | No DPC | I- | | 20 | 443 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File round_7.vcg procedure Skein.Skein_512_Process_Block.Round_7 *** Warning: VC date stamps ignored *** VCs for procedure_round_7 : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 456 | Inference | No DPC | I- | | 2 | start | rtc check @ 457 | Inference | No DPC | I- | | 3 | start | rtc check @ 457 | Inference | No DPC | I- | | 4 | start | rtc check @ 458 | Inference | No DPC | I- | | 5 | start | assert @ 461 | Inference | No DPC | I- | | 6 | 461 | rtc check @ 463 | Inference | No DPC | I- | | 7 | 461 | rtc check @ 464 | Inference | No DPC | I- | | 8 | 461 | rtc check @ 464 | Inference | No DPC | I- | | 9 | 461 | rtc check @ 465 | Inference | No DPC | I- | | 10 | 461 | assert @ 467 | Inference | No DPC | I- | | 11 | 467 | rtc check @ 469 | Inference | No DPC | I- | | 12 | 467 | rtc check @ 470 | Inference | No DPC | I- | | 13 | 467 | rtc check @ 470 | Inference | No DPC | I- | | 14 | 467 | rtc check @ 471 | Inference | No DPC | I- | | 15 | 467 | assert @ 473 | Inference | No DPC | I- | | 16 | 473 | rtc check @ 475 | Inference | No DPC | I- | | 17 | 473 | rtc check @ 476 | Inference | No DPC | I- | | 18 | 473 | rtc check @ 476 | Inference | No DPC | I- | | 19 | 473 | rtc check @ 477 | Inference | No DPC | I- | | 20 | 473 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File round_8.vcg procedure Skein.Skein_512_Process_Block.Round_8 *** Warning: VC date stamps ignored *** VCs for procedure_round_8 : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 486 | Inference | No DPC | I- | | 2 | start | rtc check @ 487 | Inference | No DPC | I- | | 3 | start | rtc check @ 487 | Inference | No DPC | I- | | 4 | start | rtc check @ 488 | Inference | No DPC | I- | | 5 | start | assert @ 491 | Inference | No DPC | I- | | 6 | 491 | rtc check @ 493 | Inference | No DPC | I- | | 7 | 491 | rtc check @ 494 | Inference | No DPC | I- | | 8 | 491 | rtc check @ 494 | Inference | No DPC | I- | | 9 | 491 | rtc check @ 495 | Inference | No DPC | I- | | 10 | 491 | assert @ 497 | Inference | No DPC | I- | | 11 | 497 | rtc check @ 499 | Inference | No DPC | I- | | 12 | 497 | rtc check @ 500 | Inference | No DPC | I- | | 13 | 497 | rtc check @ 500 | Inference | No DPC | I- | | 14 | 497 | rtc check @ 501 | Inference | No DPC | I- | | 15 | 497 | assert @ 503 | Inference | No DPC | I- | | 16 | 503 | rtc check @ 505 | Inference | No DPC | I- | | 17 | 503 | rtc check @ 506 | Inference | No DPC | I- | | 18 | 503 | rtc check @ 506 | Inference | No DPC | I- | | 19 | 503 | rtc check @ 507 | Inference | No DPC | I- | | 20 | 503 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File threefish_block.vcg procedure Skein.Skein_512_Process_Block.Threefish_Block *** Warning: VC date stamps ignored *** VCs for procedure_threefish_block : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 584 | Inference | No DPC | I- | | 2 | start | rtc check @ 584 | Inference | No DPC | I- | | 3 | start | assert @ 584 | Inference | No DPC | I- | | 4 | 584 | assert @ 584 | Inference | No DPC | I- | | 5 | 584 | rtc check @ 589 | Inference | No DPC | I- | | 6 | 584 | rtc check @ 594 | Inference | No DPC | I- | | 7 | start | assert @ finish | Examiner | No DPC | E- | | 8 | 584 | assert @ finish | Examiner | No DPC | E- | ----------------------------------------------------------------------------- File update_context.vcg procedure Skein.Skein_512_Process_Block.Update_Context *** Warning: VC date stamps ignored *** VCs for procedure_update_context : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 608 | Inference | No DPC | I- | | 2 | start | assert @ finish | Inference | No DPC | I- | ----------------------------------------------------------------------------- File skein_512_update.vcg procedure Skein.Skein_512_Update *** Warning: VC date stamps ignored *** The following user rules were used: from spark_/crypto_/hash_/skein/skein.rlu skein_rules(2) used in proving VCs: 35, 36. skein_rules(1) used in proving VCs: 35, 36. VCs for procedure_skein_512_update : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 785 | Inference | No DPC | I- | | 2 | start | rtc check @ 786 | Inference | No DPC | I- | | 3 | start | rtc check @ 788 | Inference | No DPC | I- | | 4 | start | rtc check @ 791 | Inference | No DPC | I- | | 5 | start | check stm @ 793 | Inference | No DPC | I- | | 6 | start | check stm @ 795 | Inference | No DPC | I- | | 7 | start | pre check @ 796 | Inference | No DPC | I- | | 8 | start | rtc check @ 797 | Inference | No DPC | I- | | 9 | start | rtc check @ 798 | Inference | No DPC | I- | | 10 | start | check stm @ 800 | Inference | No DPC | I- | | 11 | start | pre check @ 803 | Inference | No DPC | I- | | 12 | start | rtc check @ 808 | Inference | No DPC | I- | | 13 | start | rtc check @ 814 | Inference | No DPC | I- | | 14 | start | rtc check @ 814 | Inference | No DPC | I- | | 15 | start | pre check @ 816 | ViCToR | No DPC | V- | | 16 | start | pre check @ 816 | Inference | No DPC | I- | | 17 | start | rtc check @ 822 | Inference | No DPC | I- | | 18 | start | rtc check @ 822 | Inference | No DPC | I- | | 19 | start | check stm @ 824 | ViCToR | No DPC | V- | | 20 | start | check stm @ 824 | Inference | No DPC | I- | | 21 | start | rtc check @ 825 | Inference | No DPC | I- | | 22 | start | rtc check @ 825 | Inference | No DPC | I- | | 23 | start | rtc check @ 827 | Inference | No DPC | I- | | 24 | start | rtc check @ 827 | Inference | No DPC | I- | | 25 | start | pre check @ 835 | ViCToR | No DPC | V- | | 26 | start | pre check @ 835 | Inference | No DPC | I- | | 27 | start | pre check @ 835 | Inference | No DPC | I- | | 28 | start | pre check @ 835 | Contradiction | No DPC | X- | | 29 | start | pre check @ 835 | Inference | No DPC | I- | | 30 | start | assert @ finish | Inference | No DPC | I- | | 31 | start | assert @ finish | Inference | No DPC | I- | | 32 | start | assert @ finish | Inference | No DPC | I- | | 33 | start | assert @ finish | Inference | No DPC | I- | | 34 | start | assert @ finish | Inference | No DPC | I- | | 35 | | refinement | User Rules | No DPC | P- | | 36 | | refinement | User Rules | No DPC | P- | ----------------------------------------------------------------------------- File copy_msg_to_b.vcg procedure Skein.Skein_512_Update.Copy_Msg_To_B *** Warning: VC date stamps ignored *** VCs for procedure_copy_msg_to_b : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 758 | Inference | No DPC | I- | | 2 | start | rtc check @ 760 | Inference | No DPC | I- | | 3 | start | rtc check @ 762 | Inference | No DPC | I- | | 4 | start | rtc check @ 763 | Inference | No DPC | I- | | 5 | start | rtc check @ 766 | Inference | No DPC | I- | | 6 | 768 | rtc check @ 766 | ViCToR | No DPC | V- | | 7 | start | assert @ 768 | Inference | No DPC | I- | | 8 | 768 | assert @ 768 | Inference | No DPC | I- | | 9 | 768 | rtc check @ 776 | Inference | No DPC | I- | | 10 | 768 | rtc check @ 777 | Inference | No DPC | I- | | 11 | 768 | rtc check @ 780 | Inference | No DPC | I- | | 12 | start | assert @ finish | Inference | No DPC | I- | | 13 | 768 | assert @ finish | Inference | No DPC | I- | ----------------------------------------------------------------------------- File skein_start_new_type.vcg procedure Skein.Skein_Start_New_Type *** Warning: VC date stamps ignored *** VCs for procedure_skein_start_new_type : ----------------------------------------------------------------------------- | # | From | To | Proved By | Dead Path | Status | |----------------------------------------------------------------------------- | 1 | start | rtc check @ 211 | Inference | No DPC | I- | | 2 | start | rtc check @ 219 | Inference | No DPC | I- | | 3 | start | assert @ finish | Inference | No DPC | I- | ----------------------------------------------------------------------------- =============================================================================== Summary: The following user-defined rule files have been used: spark_/crypto_/hash_/skein/skein.rlu The following subprograms have VCs proved using a user-defined proof rule: 1 spark_/crypto_/hash_/skein/skein_512_final.vcg 1 spark_/crypto_/hash_/skein/skein_512_init.vcg 2 spark_/crypto_/hash_/skein/skein_512_update.vcg The following subprograms have VCs proved by contradiction: 1 spark_/crypto_/hash_/skein/skein_512_update.vcg The following subprograms have VCs proved by ViCToR: 3 spark_/crypto_/hash_/skein/get_64_lsb_first.vcg 2 spark_/crypto_/hash_/skein/skein_512_final.vcg 2 spark_/crypto_/hash_/skein/skein_512_process_block.vcg 1 spark_/crypto_/hash_/skein/skein_512_process_block/initialize_key_schedule.vcg 3 spark_/crypto_/hash_/skein/skein_512_update.vcg 1 spark_/crypto_/hash_/skein/skein_512_update/copy_msg_to_b.vcg Proof strategies used by subprograms ------------------------------------------------------------------------- Total subprograms with at least one VC proved by examiner: 17 Total subprograms with at least one VC proved by simplifier: 25 Total subprograms with at least one VC proved by contradiction: 1 Total subprograms with at least one VC proved with user proof rule: 3 Total subprograms with at least one VC proved by ViCToR: 6 Total subprograms with at least one VC proved using checker: 0 Total subprograms with at least one VC discharged by review: 0 Maximum extent of strategies used for fully proved subprograms: ------------------------------------------------------------------------- Total subprograms with proof completed by examiner: 0 Total subprograms with proof completed by simplifier: 18 Total subprograms with proof completed with user defined rules: 1 Total subprograms with proof completed by ViCToR: 6 Total subprograms with proof completed by checker: 0 Total subprograms with VCs discharged by review: 0 Overall subprogram summary: ------------------------------------------------------------------------- Total subprograms fully proved: 25 Total subprograms with at least one undischarged VC: 0 Total subprograms with at least one false VC: 0 ----- Total subprograms for which VCs have been generated: 25 ZombieScope Summary: ------------------------------------------------------------------------- Total subprograms for which DPCs have been generated: 0 Total number subprograms with dead paths found: 0 Total number of dead paths found: 0 VC summary: ------------------------------------------------------------------------- Note: (User) denotes where the Simplifier has proved VCs using one or more user-defined proof rules. Total VCs by type: -----------Proved By Or Using----------- Total Examnr Simp (User) ViCToR Checkr Review False Undisc Assert/Post 79 20 52 7 0 0 0 0 Precondition 20 0 18 2 0 0 0 0 Check stmnt. 17 0 15 2 0 0 0 0 Runtime check 246 0 245 1 0 0 0 0 Refinem. VCs 6 2 4( 4) 0 0 0 0 0 Inherit. VCs 0 0 0 0 0 0 0 0 =============================================================================== Totals: 368 22 334( 4) 12 0 0 0 0 %Totals: 6% 91%( 1%) 3% 0% 0% 0% 0% !!! WARNING: Experimental feature used: Proof by ViCToR ===================== End of Semantic Analysis Summary ======================== spark-2012.0.deb/lib/spark/current/spark-ada-text_io.ads0000644000175000017500000006443711753202341022030 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Text_IO -- -- -- -- Description -- -- This is a binding to package Ada.Text_IO -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : Ada -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- Full Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- Functions and Look_Ahead_* procedures may raise exceptions at present. -- -- The intention is to add preconditions in future to guard against these. -- -- For details of the exceptions that can be raised please refer to -- -- Ada.Text_IO. -- -- -- -- Exceptions raised in procedures are caught and returned via the File -- -- parameter, which must be tested for exception status by calling the -- -- appropriate Get_Last_Exception_* function. The exception can be -- -- reraised by calling the appropriate Raise_Last_Exception_* procedure. -- -- -- -- IMPORTANT NOTE. When adding a new procedure to this package the -- -- procedure body must re-raise the exception if Get_Exception_T returns -- -- No_Exception, otherwise unexpected exceptions will be silently ignored. -- -- The existing procedure bodies provide examples of this. -- -- -- ------------------------------------------------------------------------------- with Ada.Text_IO; package SPARK.Ada.Text_IO --# own The_Standard_Input : Input_Type; --# The_Standard_Output : Output_Type; --# The_Standard_Error : Error_Type; --# initializes The_Standard_Input, --# The_Standard_Output, --# The_Standard_Error; is --# type Input_Type is abstract; --# type Output_Type is abstract; --# type Error_Type is abstract; type File_Type is limited private; type File_Mode is (In_File, Out_File, Append_File); type Count is range 0 .. Natural'Last; -- The value of Count'Last must be large enough so that the -- assumption that the Line, Column and Page counts can never -- exceed this value is a valid assumption. subtype Positive_Count is Count range 1 .. Count'Last; Unbounded : constant Count := 0; -- Line and page length subtype Field is Integer range 0 .. 255; -- Note: if for any reason, there is a need to increase this value, -- then it will be necessary to change the corresponding value in -- System.Img_Real in file s-imgrea.adb. subtype Number_Base is Integer range 2 .. 16; type Type_Set is (Lower_Case, Upper_Case); type Exception_T is (No_Exception, Status_Error, Mode_Error, Name_Error, Use_Error, Device_Error, End_Error, Data_Error, Layout_Error); --------------------- -- File Management -- --------------------- -- procedure Create -- (File : in out File_Type; -- Mode : in File_Mode := Out_File; -- Name : in String := ""; -- Form : in String := ""); procedure Create (File : out File_Type; Mode : in File_Mode; Name : in String; Form : in String); --# derives File from Form, --# Mode, --# Name; --# declare delay; -- procedure Open -- (File : in out File_Type; -- Mode : in File_Mode; -- Name : in String; -- Form : in String := ""); procedure Open (File : out File_Type; Mode : in File_Mode; Name : in String; Form : in String); --# derives File from Form, --# Mode, --# Name; --# declare delay; procedure Close (File : in out File_Type); --# derives File from *; --# declare delay; procedure Delete (File : in out File_Type); --# derives File from *; --# declare delay; procedure Reset_Mode (File : in out File_Type; Mode : in File_Mode); --# derives File from *, --# Mode; --# declare delay; procedure Reset (File : in out File_Type); --# derives File from *; --# declare delay; function Mode (File : File_Type) return File_Mode; function Is_Open (File : File_Type) return Boolean; -------------------- -- Buffer control -- -------------------- -- Note: The parameter file is IN OUT in the RM, but this is clearly -- an oversight, and was intended to be IN, see AI95-00057. -- procedure Flush (File : in File_Type); procedure Flush_File (File : in out File_Type); --# derives File from *; --# declare delay; -- procedure Flush (File : in File_Type := Standard_Output); procedure Flush_Output; --# global in out The_Standard_Output; --# derives The_Standard_Output from *; --# declare delay; -- procedure Flush (File : in File_Type := Standard_Error); procedure Flush_Error; --# global in out The_Standard_Error; --# derives The_Standard_Error from *; --# declare delay; -------------------------------------------- -- Specification of line and page lengths -- -------------------------------------------- -- procedure Set_Line_Length (File : in File_Type; -- To : in Count); procedure Set_Line_Length_File (File : in out File_Type; To : in Count); --# derives File from *, --# To; --# declare delay; -- procedure Set_Line_Length (File : in File_Type := Standard_Output; -- To : in Count); procedure Set_Line_Length_Output (To : in Count); --# global in out The_Standard_Output; --# derives The_Standard_Output from *, --# To; --# declare delay; -- procedure Set_Line_Length (File : in File_Type := Standard_Error; -- To : in Count); procedure Set_Line_Length_Error (To : in Count); --# global in out The_Standard_Error; --# derives The_Standard_Error from *, --# To; --# declare delay; -- procedure Set_Page_Length (File : in File_Type; -- To : in Count); procedure Set_Page_Length_File (File : in out File_Type; To : in Count); --# derives File from *, --# To; --# declare delay; -- procedure Set_Page_Length (File : in File_Type := Standard_Output; -- To : in Count); procedure Set_Page_Length_Output (To : in Count); --# global in out The_Standard_Output; --# derives The_Standard_Output from *, --# To; --# declare delay; -- procedure Set_Page_Length (File : in File_Type := Standard_Error; -- To : in Count); procedure Set_Page_Length_Error (To : in Count); --# global in out The_Standard_Error; --# derives The_Standard_Error from *, --# To; --# declare delay; -- function Line_Length (File : File_Type) return Count; function Line_Length_File (File : File_Type) return Count; -- function Line_Length (File : File_Type := Standard_Output) return Count; function Line_Length_Output return Count; --# global in The_Standard_Output; -- function Line_Length (File : File_Type := Standard_Error) return Count; function Line_Length_Error return Count; --# global in The_Standard_Error; -- function Page_Length (File : File_Type) return Count; function Page_Length_File (File : File_Type) return Count; -- function Page_Length (File : File_Type := Standard_Output) return Count; function Page_Length_Output return Count; --# global in The_Standard_Output; -- function Page_Length (File : File_Type := Standard_Error) return Count; function Page_Length_Error return Count; --# global in The_Standard_Error; ------------------------------------ -- Column, Line, and Page Control -- ------------------------------------ -- procedure New_Line (File : in File_Type; -- Spacing : in Positive_Count := 1); procedure New_Line_File (File : in out File_Type; Spacing : in Positive_Count); --# derives File from *, --# Spacing; --# declare delay; -- procedure New_Line (File : in File_Type := Standard_Output; -- Spacing : in Positive_Count := 1); procedure New_Line_Output (Spacing : in Positive_Count); --# global in out The_Standard_Output; --# derives The_Standard_Output from *, --# Spacing; --# declare delay; -- procedure New_Line (File : in File_Type := Standard_Error; -- Spacing : in Positive_Count := 1); procedure New_Line_Error (Spacing : in Positive_Count); --# global in out The_Standard_Error; --# derives The_Standard_Error from *, --# Spacing; --# declare delay; -- procedure Skip_Line (File : in File_Type; -- Spacing : in Positive_Count := 1); procedure Skip_Line_File (File : in out File_Type; Spacing : in Positive_Count); --# derives File from *, --# Spacing; --# declare delay; -- procedure Skip_Line (File : in File_Type := Standard_Input; -- Spacing : in Positive_Count := 1); procedure Skip_Line_Input (Spacing : in Positive_Count); --# global in out The_Standard_Input; --# derives The_Standard_Input from *, --# Spacing; --# declare delay; -- function End_Of_Line -- (File : File_Type) return Boolean; function End_Of_Line_File (File : File_Type) return Boolean; -- function End_Of_Line -- (File : File_Type := Standard_Input) return Boolean; function End_Of_Line_Input return Boolean; --# global in The_Standard_Input; -- procedure New_Page (File : in File_Type); procedure New_Page_File (File : in out File_Type); --# derives File from *; --# declare delay; -- procedure New_Page (File : in File_Type := Standard_Output); procedure New_Page_Output; --# global in out The_Standard_Output; --# derives The_Standard_Output from *; --# declare delay; -- procedure New_Page (File : in File_Type := Standard_Error); procedure New_Page_Error; --# global in out The_Standard_Error; --# derives The_Standard_Error from *; --# declare delay; -- procedure Skip_Page (File : File_Type); procedure Skip_Page_File (File : in out File_Type); --# derives File from *; --# declare delay; -- procedure Skip_Page (File : File_Type := Standard_Input); procedure Skip_Page_Input; --# global in out The_Standard_Input; --# derives The_Standard_Input from *; --# declare delay; -- function End_Of_Page -- (File : File_Type) return Boolean; function End_Of_Page_File (File : File_Type) return Boolean; -- function End_Of_Page -- (File : File_Type := Standard_Input) return Boolean; function End_Of_Page_Input return Boolean; --# global in The_Standard_Input; -- function End_Of_File -- (File : File_Type) return Boolean; function End_Of_File_File (File : File_Type) return Boolean; -- function End_Of_File -- (File : File_Type := Standard_Input) return Boolean; function End_Of_File_Input return Boolean; --# global in The_Standard_Input; -- procedure Set_Col (File : in File_Type; -- To : in Positive_Count); procedure Set_Col_File (File : in out File_Type; To : in Positive_Count); --# derives File from *, --# To; --# declare delay; -- procedure Set_Col (File : in File_Type := Standard_Input; -- To : in Positive_Count); procedure Set_Col_Input (To : in Positive_Count); --# global in out The_Standard_Input; --# derives The_Standard_Input from *, --# To; --# declare delay; -- procedure Set_Col (File : in File_Type := Standard_Output; -- To : in Positive_Count); procedure Set_Col_Output (To : in Positive_Count); --# global in out The_Standard_Output; --# derives The_Standard_Output from *, --# To; --# declare delay; -- procedure Set_Col (File : in File_Type := Standard_Error; -- To : in Positive_Count); procedure Set_Col_Error (To : in Positive_Count); --# global in out The_Standard_Error; --# derives The_Standard_Error from *, --# To; --# declare delay; -- procedure Set_Line (File : in File_Type; -- To : in Positive_Count); procedure Set_Line_File (File : in out File_Type; To : in Positive_Count); --# derives File from *, --# To; --# declare delay; -- procedure Set_Line (File : in File_Type := Standard_Input; -- To : in Positive_Count); procedure Set_Line_Input (To : in Positive_Count); --# global in out The_Standard_Input; --# derives The_Standard_Input from *, --# To; --# declare delay; -- procedure Set_Line (File : in File_Type := Standard_Output; -- To : in Positive_Count); procedure Set_Line_Output (To : in Positive_Count); --# global in out The_Standard_Output; --# derives The_Standard_Output from *, --# To; --# declare delay; -- procedure Set_Line (File : in File_Type := Standard_Error; -- To : in Positive_Count); procedure Set_Line_Error (To : in Positive_Count); --# global in out The_Standard_Error; --# derives The_Standard_Error from *, --# To; --# declare delay; -- function Col -- (File : File_Type) return Positive_Count; function Col_File (File : File_Type) return Positive_Count; -- function Col -- (File : File_Type := Standard_Input) return Positive_Count; function Col_Input return Positive_Count; --# global in The_Standard_Input; -- function Col -- (File : File_Type := Standard_Output) return Positive_Count; function Col_Output return Positive_Count; --# global in The_Standard_Output; -- function Col -- (File : File_Type := Standard_Error) return Positive_Count; function Col_Error return Positive_Count; --# global in The_Standard_Error; -- function Line (File : File_Type) return Positive_Count; function Line_File (File : File_Type) return Positive_Count; -- function Line -- (File : File_Type := Standard_Input) return Positive_Count; function Line_Input return Positive_Count; --# global in The_Standard_Input; -- function Line -- (File : File_Type := Standard_Output) return Positive_Count; function Line_Output return Positive_Count; --# global in The_Standard_Output; -- function Line -- (File : File_Type := Standard_Error) return Positive_Count; function Line_Error return Positive_Count; --# global in The_Standard_Error; -- function Page (File : File_Type) return Positive_Count; function Page_File (File : File_Type) return Positive_Count; -- function Page -- (File : File_Type := Standard_Input) return Positive_Count; function Page_Input return Positive_Count; --# global in The_Standard_Input; -- function Page -- (File : File_Type := Standard_Output) return Positive_Count; function Page_Output return Positive_Count; --# global in The_Standard_Output; -- function Page -- (File : File_Type := Standard_Error) return Positive_Count; function Page_Error return Positive_Count; --# global in The_Standard_Error; ---------------------------- -- Character Input-Output -- ---------------------------- -- procedure Get (File : in File_Type; -- Item : out Character); procedure Get_Character_File (File : in out File_Type; Item : out Character); --# derives File, --# Item from File; --# declare delay; -- procedure Get (File : in File_Type := Standard_Input; -- Item : out Character); procedure Get_Character_Input (Item : out Character); --# global in out The_Standard_Input; --# derives Item, --# The_Standard_Input from The_Standard_Input; --# declare delay; -- procedure Put (File : in File_Type; -- Item : in Character); procedure Put_Character_File (File : in out File_Type; Item : in Character); --# derives File from *, --# Item; --# declare delay; -- procedure Put (File : in File_Type := Standard_Output; -- Item : in Character); procedure Put_Character_Output (Item : in Character); --# global in out The_Standard_Output; --# derives The_Standard_Output from *, --# Item; --# declare delay; -- procedure Put (File : in File_Type := Standard_Error; -- Item : in Character); procedure Put_Character_Error (Item : in Character); --# global in out The_Standard_Error; --# derives The_Standard_Error from *, --# Item; --# declare delay; -- procedure Look_Ahead -- (File : in File_Type; -- Item : out Character; -- End_Of_Line : out Boolean); procedure Look_Ahead_File (File : in File_Type; Item : out Character; End_Of_Line : out Boolean); --# derives End_Of_Line, --# Item from File; --# declare delay; -- procedure Look_Ahead -- (File : in File_Type := Standard_Input; -- Item : out Character; -- End_Of_Line : out Boolean); procedure Look_Ahead_Input (Item : out Character; End_Of_Line : out Boolean); --# global in The_Standard_Input; --# derives End_Of_Line, --# Item from The_Standard_Input; --# declare delay; -- procedure Get_Immediate -- (File : in File_Type; -- Item : out Character); procedure Get_Immediate_File (File : in out File_Type; Item : out Character); --# derives File, --# Item from File; --# declare delay; -- procedure Get_Immediate -- (File : in File_Type := Standard_Input; -- Item : out Character); procedure Get_Immediate_Input (Item : out Character); --# global in out The_Standard_Input; --# derives Item, --# The_Standard_Input from The_Standard_Input; --# declare delay; -- procedure Get_Immediate -- (File : in File_Type; -- Item : out Character; -- Available : out Boolean); procedure Get_Immediate_Available_File (File : in out File_Type; Item : out Character; Available : out Boolean); --# derives Available, --# File, --# Item from File; --# declare delay; -- procedure Get_Immediate -- (File : in File_Type := Standard_Input; -- Item : out Character; -- Available : out Boolean); procedure Get_Immediate_Available_Input (Item : out Character; Available : out Boolean); --# global in out The_Standard_Input; --# derives Available, --# Item, --# The_Standard_Input from The_Standard_Input; --# declare delay; ------------------------- -- String Input-Output -- ------------------------- -- procedure Get (File : in File_Type; -- Item : out String); procedure Get_File (File : in out File_Type; Item : out String); --# derives File, --# Item from File; --# declare delay; -- procedure Get (File : in File_Type := Standard_Input; -- Item : out String); procedure Get_Input (Item : out String); --# global in out The_Standard_Input; --# derives Item, --# The_Standard_Input from The_Standard_Input; --# declare delay; -- procedure Put (File : in File_Type; -- Item : in String); procedure Put_File (File : in out File_Type; Item : in String); --# derives File from *, --# Item; --# declare delay; -- procedure Put (File : in File_Type := Standard_Output; -- Item : in String); procedure Put_Output (Item : in String); --# global in out The_Standard_Output; --# derives The_Standard_Output from *, --# Item; --# declare delay; -- procedure Put (File : in File_Type := Standard_Error; -- Item : in String); procedure Put_Error (Item : in String); --# global in out The_Standard_Error; --# derives The_Standard_Error from *, --# Item; --# declare delay; -- procedure Get_Line -- (File : in File_Type; -- Item : out String; -- Last : out Natural); procedure Procedure_Get_Line_File (File : in out File_Type; Item : out String; Arg_Last : out Natural); --# derives Arg_Last, --# File, --# Item from File; --# declare delay; -- procedure Get_Line -- (File : in File_Type := Standard_Input; -- Item : out String; -- Last : out Natural); procedure Procedure_Get_Line_Input (Item : out String; Arg_Last : out Natural); --# global in out The_Standard_Input; --# derives Arg_Last, --# Item, --# The_Standard_Input from The_Standard_Input; --# declare delay; -- procedure Put_Line -- (File : in File_Type; -- Item : in String); procedure Put_Line_File (File : in out File_Type; Item : in String); --# derives File from *, --# Item; --# declare delay; -- procedure Put_Line -- (File : in File_Type := Standard_Output; -- Item : in String); procedure Put_Line_Output (Item : in String); --# global in out The_Standard_Output; --# derives The_Standard_Output from *, --# Item; --# declare delay; -- procedure Put_Line -- (File : in File_Type := Standard_Error; -- Item : in String); procedure Put_Line_Error (Item : in String); --# global in out The_Standard_Error; --# derives The_Standard_Error from *, --# Item; --# declare delay; function Get_Last_Exception_File (File : File_Type) return Exception_T; function Get_Last_Exception_Input return Exception_T; --# global The_Standard_Input; function Get_Last_Exception_Output return Exception_T; --# global The_Standard_Output; function Get_Last_Exception_Error return Exception_T; --# global The_Standard_Error; procedure Raise_Last_Exception_File (File : in File_Type); --# derives null from File; procedure Raise_Last_Exception_Input; --# global in The_Standard_Input; --# derives null from The_Standard_Input; procedure Raise_Last_Exception_Output; --# global in The_Standard_Output; --# derives null from The_Standard_Output; procedure Raise_Last_Exception_Error; --# global in The_Standard_Error; --# derives null from The_Standard_Error; private --# hide SPARK.Ada.Text_IO; type File_Type is record The_File_Type : Standard.Ada.Text_IO.File_Type; The_Exception : Exception_T; end record; The_Standard_Input_Exception : Exception_T; The_Standard_Output_Exception : Exception_T; The_Standard_Error_Exception : Exception_T; function Get_Exception_T (The_Exception_Name : String) return Exception_T; end SPARK.Ada.Text_IO; spark-2012.0.deb/lib/spark/current/spark-ada-text_io-unbounded_string.ads0000644000175000017500000002026611753202341025367 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Text_IO.Unbounded_String -- -- -- -- Description -- -- This is a binding to package Ada.Text_IO -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : Ada -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- Full Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- Functions and Look_Ahead_* procedures may raise exceptions at present. -- -- The intention is to add preconditions in future to guard against these. -- -- For details of the exceptions that can be raised please refer to -- -- Ada.Text_IO. -- -- -- -- Exceptions raised in procedures are caught and returned via the File -- -- parameter, which must be tested for exception status by calling the -- -- appropriate Get_Last_Exception_* function. The exception can be -- -- reraised by calling the appropriate Raise_Last_Exception_* procedure. -- -- -- -- IMPORTANT NOTE. When adding a new procedure to this package the -- -- procedure body must re-raise the exception if Get_Exception_T returns -- -- No_Exception, otherwise unexpected exceptions will be silently ignored. -- -- The existing procedure bodies provide examples of this. -- -- -- ------------------------------------------------------------------------------- with SPARK.Ada.Strings.Unbounded; --# inherit SPARK.Ada.Strings.Unbounded, --# SPARK.Ada.Text_IO; package SPARK.Ada.Text_IO.Unbounded_String is -- procedure Create -- (File : in out File_Type; -- Mode : in File_Mode := Out_File; -- Name : in String := ""; -- Form : in String := ""); procedure Create (File : out Text_IO.File_Type; Mode : in Text_IO.File_Mode; Name : in Strings.Unbounded.Unbounded_String; Form : in Strings.Unbounded.Unbounded_String); --# derives File from Form, --# Mode, --# Name; --# declare delay; -- procedure Open -- (File : in out File_Type; -- Mode : in File_Mode; -- Name : in String; -- Form : in String := ""); procedure Open (File : out Text_IO.File_Type; Mode : in Text_IO.File_Mode; Name : in Strings.Unbounded.Unbounded_String; Form : in Strings.Unbounded.Unbounded_String); --# derives File from Form, --# Mode, --# Name; --# declare delay; function Name (File : Text_IO.File_Type) return Strings.Unbounded.Unbounded_String; function Form (File : Text_IO.File_Type) return Strings.Unbounded.Unbounded_String; -- procedure Put (File : in File_Type; -- Item : in String); procedure Put_File (File : in out Text_IO.File_Type; Item : in Strings.Unbounded.Unbounded_String); --# derives File from *, --# Item; --# declare delay; -- procedure Put (File : in File_Type := Standard_Output; -- Item : in String); procedure Put_Output (Item : in Strings.Unbounded.Unbounded_String); --# global in out Text_IO.The_Standard_Output; --# derives Text_IO.The_Standard_Output from *, --# Item; --# declare delay; -- procedure Put (File : in File_Type := Standard_Error; -- Item : in String); procedure Put_Error (Item : in Strings.Unbounded.Unbounded_String); --# global in out Text_IO.The_Standard_Error; --# derives Text_IO.The_Standard_Error from *, --# Item; --# declare delay; -- procedure Get_Line -- (File : in File_Type; -- Item : out String; -- Last : out Natural); procedure Procedure_Get_Line_File (File : in out Text_IO.File_Type; Item : out Strings.Unbounded.Unbounded_String; Arg_Last : out Natural); --# derives Arg_Last, --# File, --# Item from File; --# declare delay; -- procedure Get_Line -- (File : in File_Type := Standard_Input; -- Item : out String; -- Last : out Natural); procedure Procedure_Get_Line_Input (Item : out Strings.Unbounded.Unbounded_String; Arg_Last : out Natural); --# global in out Text_IO.The_Standard_Input; --# derives Arg_Last, --# Item, --# Text_IO.The_Standard_Input from Text_IO.The_Standard_Input; --# declare delay; -- function Get_Line (File : File_Type) return String; function Function_Get_Line_File (File : Text_IO.File_Type) return Strings.Unbounded.Unbounded_String; -- function Get_Line (File : File_Type := Standard_Input) return String; function Function_Get_Line_Input return Strings.Unbounded.Unbounded_String; -- procedure Put_Line -- (File : in File_Type; -- Item : in String); procedure Put_Line_File (File : in out Text_IO.File_Type; Item : in Strings.Unbounded.Unbounded_String); --# derives File from *, --# Item; --# declare delay; -- procedure Put_Line -- (File : in File_Type := Standard_Output; -- Item : in String); procedure Put_Line_Output (Item : in Strings.Unbounded.Unbounded_String); --# global in out Text_IO.The_Standard_Output; --# derives Text_IO.The_Standard_Output from *, --# Item; --# declare delay; -- procedure Put_Line -- (File : in File_Type := Standard_Error; -- Item : in String); procedure Put_Line_Error (Item : in Strings.Unbounded.Unbounded_String); --# global in out Text_IO.The_Standard_Error; --# derives Text_IO.The_Standard_Error from *, --# Item; --# declare delay; end SPARK.Ada.Text_IO.Unbounded_String; spark-2012.0.deb/lib/spark/current/spark-unsigned.adb0000644000175000017500000000171011753202341021406 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= spark-2012.0.deb/lib/spark/current/spark-ada-strings-maps.adb0000644000175000017500000001130611753202341022746 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body SPARK.Ada.Strings.Maps is --# hide SPARK.Ada.Strings.Maps; function Character_Range_To_Set (Span : Character_Range) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps.To_Set (Span => Standard.Ada.Strings.Maps.Character_Range' (Low => Span.Low, High => Span.High))); end Character_Range_To_Set; function Operator_Equal (Left, Right : Character_Set) return Boolean is begin return Standard.Ada.Strings.Maps."=" (Left => Standard.Ada.Strings.Maps.Character_Set (Left), Right => Standard.Ada.Strings.Maps.Character_Set (Right)); end Operator_Equal; function Operator_Not (Right : Character_Set) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps."not" (Right => Standard.Ada.Strings.Maps.Character_Set (Right))); end Operator_Not; function Operator_And (Left, Right : Character_Set) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps."and" (Left => Standard.Ada.Strings.Maps.Character_Set (Left), Right => Standard.Ada.Strings.Maps.Character_Set (Right))); end Operator_And; function Operator_Or (Left, Right : Character_Set) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps."or" (Left => Standard.Ada.Strings.Maps.Character_Set (Left), Right => Standard.Ada.Strings.Maps.Character_Set (Right))); end Operator_Or; function Operator_Xor (Left, Right : Character_Set) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps."xor" (Left => Standard.Ada.Strings.Maps.Character_Set (Left), Right => Standard.Ada.Strings.Maps.Character_Set (Right))); end Operator_Xor; function Operator_Minus (Left, Right : Character_Set) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps."-" (Left => Standard.Ada.Strings.Maps.Character_Set (Left), Right => Standard.Ada.Strings.Maps.Character_Set (Right))); end Operator_Minus; function Is_In (Arg_Element : Character; Arg_Set : Character_Set) return Boolean is begin return Standard.Ada.Strings.Maps.Is_In (Element => Arg_Element, Set => Standard.Ada.Strings.Maps.Character_Set (Arg_Set)); end Is_In; function Is_Subset (Elements : Character_Set; Arg_Set : Character_Set) return Boolean is begin return Standard.Ada.Strings.Maps.Is_Subset (Elements => Standard.Ada.Strings.Maps.Character_Set (Elements), Set => Standard.Ada.Strings.Maps.Character_Set (Arg_Set)); end Is_Subset; function Character_Sequence_To_Set (Arg_Sequence : String) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps.To_Set (Sequence => Arg_Sequence)); end Character_Sequence_To_Set; function Singleton_To_Set (Singleton : Character) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps.To_Set (Singleton => Singleton)); end Singleton_To_Set; function Value (Map : Character_Mapping; Arg_Element : Character) return Character is begin return Standard.Ada.Strings.Maps.Value (Map => Standard.Ada.Strings.Maps.Character_Mapping (Map), Element => Arg_Element); end Value; function To_Mapping (Arg_From, To : String) return Character_Mapping is begin return Character_Mapping (Standard.Ada.Strings.Maps.To_Mapping (To => To, From => Arg_From)); end To_Mapping; end SPARK.Ada.Strings.Maps; spark-2012.0.deb/lib/spark/current/spark-ada-strings.ads0000644000175000017500000000651311753202341022035 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Strings -- -- -- -- Description -- -- This is a binding to package Ada.Strings -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- package SPARK.Ada.Strings is Space : constant Character := ' '; -- type Alignment is (Left, Right, Center); type Alignment is (Alignment_Left, Alignment_Right, Alignment_Center); -- type Truncation is (Left, Right, Error); type Truncation is (Truncation_Left, Truncation_Right, Truncation_Error); -- type Membership is (Inside, Outside); type Membership is (Membership_Inside, Membership_Outside); -- type Direction is (Forward, Backward); type Direction is (Direction_Forward, Direction_Backward); -- type Trim_End is (Left, Right, Both); type Trim_End is (Trim_End_Left, Trim_End_Right, Trim_End_Both); end SPARK.Ada.Strings; spark-2012.0.deb/lib/spark/current/spark-ada-strings-maps.ads0000644000175000017500000001525311753202341022774 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Strings.Maps -- -- -- -- Description -- -- This is a binding to package Ada.Strings.Maps -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : Ada -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- Ada.Strings.Translation_Error (Guarded) -- -- -- ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- -- -- A few subprograms, in particular ones involving Character_Ranges -- or Character_Sequence cannot currently be SPARK; you will find -- them in SPARK.Ada.Strings.Maps.Not_SPARK. -- ------------------------------------------------------------------------------- with Ada.Strings.Maps; package SPARK.Ada.Strings.Maps is -------------------------------- -- Character Set Declarations -- -------------------------------- type Character_Set is private; Null_Set : constant Character_Set; --------------------------- -- Constructors for Sets -- --------------------------- type Character_Range is record Low : Character; High : Character; end record; -- function To_Set (Span : Character_Range) return Character_Set; function Character_Range_To_Set (Span : Character_Range) return Character_Set; ---------------------------------- -- Operations on Character Sets -- ---------------------------------- -- function "=" (Left, Right : Character_Set) return Boolean; function Operator_Equal (Left, Right : Character_Set) return Boolean; -- function "not" (Right : Character_Set) return Character_Set; function Operator_Not (Right : Character_Set) return Character_Set; -- function "and" (Left, Right : Character_Set) return Character_Set; function Operator_And (Left, Right : Character_Set) return Character_Set; -- function "or" (Left, Right : Character_Set) return Character_Set; function Operator_Or (Left, Right : Character_Set) return Character_Set; -- function "xor" (Left, Right : Character_Set) return Character_Set; function Operator_Xor (Left, Right : Character_Set) return Character_Set; -- function "-" (Left, Right : Character_Set) return Character_Set; function Operator_Minus (Left, Right : Character_Set) return Character_Set; -- function Is_In -- (Element : Character; -- Set : Character_Set) return Boolean; function Is_In (Arg_Element : Character; Arg_Set : Character_Set) return Boolean; -- function Is_Subset -- (Elements : Character_Set; -- Set : Character_Set) return Boolean; function Is_Subset (Elements : Character_Set; Arg_Set : Character_Set) return Boolean; -- function To_Set (Sequence : Character_Sequence) return Character_Set; function Character_Sequence_To_Set (Arg_Sequence : String) return Character_Set; -- function To_Set (Singleton : Character) return Character_Set; function Singleton_To_Set (Singleton : Character) return Character_Set; ------------------------------------ -- Character Mapping Declarations -- ------------------------------------ type Character_Mapping is private; -- Representation for a character to character mapping: -- function Value -- (Map : Character_Mapping; -- Element : Character) return Character; function Value (Map : Character_Mapping; Arg_Element : Character) return Character; Identity : constant Character_Mapping; ---------------------------- -- Operations on Mappings -- ---------------------------- -- function To_Mapping -- (From, To : Character_Sequence) return Character_Mapping; function To_Mapping (Arg_From, To : String) return Character_Mapping; --# pre (Arg_From'Length = To'Length) and --# (for all I in Positive range Arg_From'Range => --# (for all J in Positive range I + 1 .. Arg_From'Last => --# (Arg_From (I) /= Arg_From (J)))); private --# hide SPARK.Ada.Strings.Maps; type Character_Set is new Standard.Ada.Strings.Maps.Character_Set; Null_Set : constant Character_Set := Character_Set (Standard.Ada.Strings.Maps.Null_Set); type Character_Mapping is new Standard.Ada.Strings.Maps.Character_Mapping; Identity : constant Character_Mapping := Character_Mapping (Standard.Ada.Strings.Maps.Identity); end SPARK.Ada.Strings.Maps; spark-2012.0.deb/lib/spark/current/spark-ada-strings-unbounded.adb0000644000175000017500000006067711753202341024010 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with SPARK.Ada.Strings.Not_SPARK; with SPARK.Ada.Strings.Maps.Not_SPARK; package body SPARK.Ada.Strings.Unbounded is --# hide SPARK.Ada.Strings.Unbounded; pragma Warnings ("Y"); -- Turn off warnings for Ada 2005 features function Get_Length (Source : Unbounded_String) return Natural is begin return Standard.Ada.Strings.Unbounded.Length (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source)); end Get_Length; function Get_Null_Unbounded_String return Unbounded_String is begin return Null_Unbounded_String; end Get_Null_Unbounded_String; function Get_Element (Source : Unbounded_String; Index : Positive) return Character is begin return Standard.Ada.Strings.Unbounded.Element (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Index => Index); end Get_Element; function String_To_Unbounded_String (Source : String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.To_Unbounded_String (Source => Source)); end String_To_Unbounded_String; function Length_To_Unbounded_String (Length : Natural) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."*" (Left => Length, Right => ' ')); end Length_To_Unbounded_String; -- Set_Unbounded_String is only defined in Ada 2005 procedure Set_Unbounded_String (Target : out Unbounded_String; Source : in String) is begin Standard.Ada.Strings.Unbounded.Set_Unbounded_String (Target => Standard.Ada.Strings.Unbounded.Unbounded_String (Target), Source => Source); end Set_Unbounded_String; procedure Append_Unbounded_String (Source : in out Unbounded_String; New_Item : in Unbounded_String) is begin Standard.Ada.Strings.Unbounded.Append (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), New_Item => Standard.Ada.Strings.Unbounded.Unbounded_String (New_Item)); end Append_Unbounded_String; procedure Append_String (Source : in out Unbounded_String; New_Item : in String) is begin Standard.Ada.Strings.Unbounded.Append (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), New_Item => New_Item); end Append_String; procedure Append_Char (Source : in out Unbounded_String; New_Item : in Character) is begin Standard.Ada.Strings.Unbounded.Append (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), New_Item => New_Item); end Append_Char; function Concat_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."&" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right))); end Concat_Unbounded_String_Unbounded_String; function Concat_Unbounded_String_String (Left : Unbounded_String; Right : String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."&" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Right)); end Concat_Unbounded_String_String; function Concat_String_Unbounded_String (Left : String; Right : Unbounded_String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."&" (Left => Left, Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right))); end Concat_String_Unbounded_String; function Concat_Unbounded_String_Char (Left : Unbounded_String; Right : Character) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."&" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Right)); end Concat_Unbounded_String_Char; function Concat_Char_Unbounded_String (Left : Character; Right : Unbounded_String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."&" (Left => Left, Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right))); end Concat_Char_Unbounded_String; procedure Replace_Element (Source : in out Unbounded_String; Index : in Positive; By : in Character) is begin Standard.Ada.Strings.Unbounded.Replace_Element (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Index => Index, By => By); end Replace_Element; -- Unbounded_Slice is only available in Ada 2005 function Function_Unbounded_Slice (Source : Unbounded_String; Low : Positive; High : Natural) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Unbounded_Slice (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Low => Low, High => High)); end Function_Unbounded_Slice; procedure Procedure_Unbounded_Slice (Source : in Unbounded_String; Target : out Unbounded_String; Low : in Positive; High : in Natural) is begin Standard.Ada.Strings.Unbounded.Unbounded_Slice (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Target => Standard.Ada.Strings.Unbounded.Unbounded_String (Target), Low => Low, High => High); end Procedure_Unbounded_Slice; function Equal_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded."=" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Equal_Unbounded_String_Unbounded_String; function Equal_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean is begin return Standard.Ada.Strings.Unbounded."=" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Right); end Equal_Unbounded_String_String; function Equal_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded."=" (Left => Left, Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Equal_String_Unbounded_String; function Less_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded."<" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Less_Unbounded_String_Unbounded_String; function Less_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean is begin return Standard.Ada.Strings.Unbounded."<" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Right); end Less_Unbounded_String_String; function Less_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded."<" (Left => Left, Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Less_String_Unbounded_String; function Less_Equal_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded."<=" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Less_Equal_Unbounded_String_Unbounded_String; function Less_Equal_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean is begin return Standard.Ada.Strings.Unbounded."<=" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Right); end Less_Equal_Unbounded_String_String; function Less_Equal_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded."<=" (Left => Left, Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Less_Equal_String_Unbounded_String; function Greater_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded.">" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Greater_Unbounded_String_Unbounded_String; function Greater_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean is begin return Standard.Ada.Strings.Unbounded.">" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Right); end Greater_Unbounded_String_String; function Greater_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded.">" (Left => Left, Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Greater_String_Unbounded_String; function Greater_Equal_Unbounded_String_Unbounded_String (Left : Unbounded_String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded.">=" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Greater_Equal_Unbounded_String_Unbounded_String; function Greater_Equal_Unbounded_String_String (Left : Unbounded_String; Right : String) return Boolean is begin return Standard.Ada.Strings.Unbounded.">=" (Left => Standard.Ada.Strings.Unbounded.Unbounded_String (Left), Right => Right); end Greater_Equal_Unbounded_String_String; function Greater_Equal_String_Unbounded_String (Left : String; Right : Unbounded_String) return Boolean is begin return Standard.Ada.Strings.Unbounded.">=" (Left => Left, Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right)); end Greater_Equal_String_Unbounded_String; function Index_Pattern (Source : Unbounded_String; Pattern : String; Going : Strings.Direction; Mapping : Maps.Character_Mapping) return Natural is begin return Standard.Ada.Strings.Unbounded.Index (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Pattern => Pattern, Going => SPARK.Ada.Strings.Not_SPARK.To_Direction (From => Going), Mapping => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Mapping (From => Mapping)); end Index_Pattern; function Index_Set (Source : Unbounded_String; Arg_Set : Maps.Character_Set; Test : Strings.Membership; Going : Strings.Direction) return Natural is begin return Standard.Ada.Strings.Unbounded.Index (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Set => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Set (From => Arg_Set), Test => SPARK.Ada.Strings.Not_SPARK.To_Membership (From => Test), Going => SPARK.Ada.Strings.Not_SPARK.To_Direction (From => Going)); end Index_Set; -- Index is only defined in Ada 2005 function Index_Pattern_From (Source : Unbounded_String; Pattern : String; Arg_From : Positive; Going : Strings.Direction; Mapping : Maps.Character_Mapping) return Natural is begin return Standard.Ada.Strings.Unbounded.Index (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Pattern => Pattern, From => Arg_From, Going => SPARK.Ada.Strings.Not_SPARK.To_Direction (From => Going), Mapping => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Mapping (From => Mapping)); end Index_Pattern_From; function Index_Set_From (Source : Unbounded_String; Arg_Set : Maps.Character_Set; Arg_From : Positive; Test : Strings.Membership; Going : Strings.Direction) return Natural is begin return Standard.Ada.Strings.Unbounded.Index (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Set => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Set (From => Arg_Set), From => Arg_From, Test => SPARK.Ada.Strings.Not_SPARK.To_Membership (From => Test), Going => SPARK.Ada.Strings.Not_SPARK.To_Direction (From => Going)); end Index_Set_From; -- Index_Non_Blank is only defined in Ada 2005 function Index_Non_Blank (Source : Unbounded_String; Going : Strings.Direction) return Natural is begin return Standard.Ada.Strings.Unbounded.Index_Non_Blank (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Going => SPARK.Ada.Strings.Not_SPARK.To_Direction (From => Going)); end Index_Non_Blank; function Index_Non_Blank_From (Source : Unbounded_String; Arg_From : Positive; Going : Strings.Direction) return Natural is begin return Standard.Ada.Strings.Unbounded.Index_Non_Blank (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), From => Arg_From, Going => SPARK.Ada.Strings.Not_SPARK.To_Direction (From => Going)); end Index_Non_Blank_From; function Count_Pattern (Source : Unbounded_String; Pattern : String; Mapping : Maps.Character_Mapping) return Natural is begin return Standard.Ada.Strings.Unbounded.Count (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Pattern => Pattern, Mapping => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Mapping (From => Mapping)); end Count_Pattern; function Count_Set (Source : Unbounded_String; Arg_Set : Maps.Character_Set) return Natural is begin return Standard.Ada.Strings.Unbounded.Count (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Set => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Set (From => Arg_Set)); end Count_Set; procedure Find_Token (Source : in Unbounded_String; Arg_Set : in Maps.Character_Set; Test : in Strings.Membership; Arg_First : out Positive; Arg_Last : out Natural) is begin Standard.Ada.Strings.Unbounded.Find_Token (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Set => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Set (From => Arg_Set), Test => SPARK.Ada.Strings.Not_SPARK.To_Membership (From => Test), First => Arg_First, Last => Arg_Last); end Find_Token; function Function_Translate (Source : Unbounded_String; Mapping : Maps.Character_Mapping) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Translate (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Mapping => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Mapping (From => Mapping))); end Function_Translate; procedure Procedure_Translate (Source : in out Unbounded_String; Mapping : in Maps.Character_Mapping) is begin Standard.Ada.Strings.Unbounded.Translate (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Mapping => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Mapping (From => Mapping)); end Procedure_Translate; function Function_Replace_Slice (Source : Unbounded_String; Low : Positive; High : Natural; By : String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Replace_Slice (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Low => Low, High => High, By => By)); end Function_Replace_Slice; procedure Procedure_Replace_Slice (Source : in out Unbounded_String; Low : in Positive; High : in Natural; By : in String) is begin Standard.Ada.Strings.Unbounded.Replace_Slice (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Low => Low, High => High, By => By); end Procedure_Replace_Slice; function Function_Insert (Source : Unbounded_String; Before : Positive; New_Item : String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Insert (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Before => Before, New_Item => New_Item)); end Function_Insert; procedure Procedure_Insert (Source : in out Unbounded_String; Before : in Positive; New_Item : in String) is begin Standard.Ada.Strings.Unbounded.Insert (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Before => Before, New_Item => New_Item); end Procedure_Insert; function Function_Overwrite (Source : Unbounded_String; Position : Positive; New_Item : String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Overwrite (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Position => Position, New_Item => New_Item)); end Function_Overwrite; procedure Procedure_Overwrite (Source : in out Unbounded_String; Position : in Positive; New_Item : in String) is begin Standard.Ada.Strings.Unbounded.Overwrite (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Position => Position, New_Item => New_Item); end Procedure_Overwrite; function Function_Delete (Source : Unbounded_String; Arg_From : Positive; Through : Natural) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Delete (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), From => Arg_From, Through => Through)); end Function_Delete; procedure Procedure_Delete (Source : in out Unbounded_String; Arg_From : in Positive; Through : in Natural) is begin Standard.Ada.Strings.Unbounded.Delete (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), From => Arg_From, Through => Through); end Procedure_Delete; function Function_Trim_Side (Source : Unbounded_String; Side : Trim_End) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Trim (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Side => SPARK.Ada.Strings.Not_SPARK.To_Trim_End (From => Side))); end Function_Trim_Side; procedure Procedure_Trim_Side (Source : in out Unbounded_String; Side : in Trim_End) is begin Standard.Ada.Strings.Unbounded.Trim (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Side => SPARK.Ada.Strings.Not_SPARK.To_Trim_End (From => Side)); end Procedure_Trim_Side; function Function_Trim_Character_Set (Source : Unbounded_String; Left : Maps.Character_Set; Right : Maps.Character_Set) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Trim (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Left => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Set (From => Left), Right => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Set (From => Right))); end Function_Trim_Character_Set; procedure Procedure_Trim_Character_Set (Source : in out Unbounded_String; Left : in Maps.Character_Set; Right : in Maps.Character_Set) is begin Standard.Ada.Strings.Unbounded.Trim (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Left => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Set (From => Left), Right => SPARK.Ada.Strings.Maps.Not_SPARK.To_Character_Set (From => Right)); end Procedure_Trim_Character_Set; function Function_Head (Source : Unbounded_String; Count : Natural; Pad : Character) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Head (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Count => Count, Pad => Pad)); end Function_Head; procedure Procedure_Head (Source : in out Unbounded_String; Count : in Natural; Pad : in Character) is begin Standard.Ada.Strings.Unbounded.Head (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Count => Count, Pad => Pad); end Procedure_Head; function Function_Tail (Source : Unbounded_String; Count : Natural; Pad : Character) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded.Tail (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Count => Count, Pad => Pad)); end Function_Tail; procedure Procedure_Tail (Source : in out Unbounded_String; Count : in Natural; Pad : in Character) is begin Standard.Ada.Strings.Unbounded.Tail (Source => Standard.Ada.Strings.Unbounded.Unbounded_String (Source), Count => Count, Pad => Pad); end Procedure_Tail; function Times_Char (Left : Natural; Right : Character) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."*" (Left => Left, Right => Right)); end Times_Char; function Times_String (Left : Natural; Right : String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."*" (Left => Left, Right => Right)); end Times_String; function Times_Unbounded_String (Left : Natural; Right : Unbounded_String) return Unbounded_String is begin return Unbounded_String (Standard.Ada.Strings.Unbounded."*" (Left => Left, Right => Standard.Ada.Strings.Unbounded.Unbounded_String (Right))); end Times_Unbounded_String; end SPARK.Ada.Strings.Unbounded; spark-2012.0.deb/lib/spark/current/spark-ada-strings-maps-not_spark.adb0000644000175000017500000000612411753202341024746 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= package body SPARK.Ada.Strings.Maps.Not_SPARK is --# hide SPARK.Ada.Strings.Maps.Not_SPARK; function To_Set (Ranges : Character_Ranges) return Character_Set is begin return Character_Set (Standard.Ada.Strings.Maps.To_Set (Ranges => Standard.Ada.Strings.Maps.Character_Ranges (Ranges))); end To_Set; function To_Ranges (Set : Character_Set) return Character_Ranges is begin return Character_Ranges (Standard.Ada.Strings.Maps.To_Ranges (Set => Standard.Ada.Strings.Maps.Character_Set (Set))); end To_Ranges; function To_Sequence (Set : Character_Set) return Character_Sequence is begin return Standard.Ada.Strings.Maps.To_Sequence (Set => Standard.Ada.Strings.Maps.Character_Set (Set)); end To_Sequence; function To_Domain (Map : Character_Mapping) return Character_Sequence is begin return Standard.Ada.Strings.Maps.To_Domain (Map => Standard.Ada.Strings.Maps.Character_Mapping (Map)); end To_Domain; function To_Range (Map : Character_Mapping) return Character_Sequence is begin return Standard.Ada.Strings.Maps.To_Range (Map => Standard.Ada.Strings.Maps.Character_Mapping (Map)); end To_Range; ------------------------------------------------ -- Conversion functions from SPARK.Ada to Ada -- ------------------------------------------------ function To_Character_Set (From : Character_Set) return Standard.Ada.Strings.Maps.Character_Set is begin return Standard.Ada.Strings.Maps.Character_Set (From); end To_Character_Set; function To_Character_Range (From : Character_Range) return Standard.Ada.Strings.Maps.Character_Range is begin return Standard.Ada.Strings.Maps.Character_Range'(Low => From.Low, High => From.High); end To_Character_Range; function To_Character_Mapping (From : Character_Mapping) return Standard.Ada.Strings.Maps.Character_Mapping is begin return Standard.Ada.Strings.Maps.Character_Mapping (From); end To_Character_Mapping; end SPARK.Ada.Strings.Maps.Not_SPARK; spark-2012.0.deb/lib/spark/current/spark-ada-containers.ads0000644000175000017500000000600511753202341022505 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Containers -- -- -- -- Description -- -- This is a binding to package Ada.Containers -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : N/A -- -- Body : N/A -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- package SPARK.Ada.Containers is -- These declarations comply with the minimum range required by -- the Ada2005 LRM, and also agree with the ranges used by GNAT Pro. type Hash_Type is mod 2 ** 32; type Count_Type is range 0 .. 2 ** 31 - 1; -- GNAT's Capacity_Error exception is not supported in SPARK. end SPARK.Ada.Containers; spark-2012.0.deb/lib/spark/current/spark-crypto-hash-skein.ads0000644000175000017500000003304411753202341023170 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Crypto.Hash.Skein -- -- -- -- Description -- -- -- -- Skein hash function. Derived from the SPARKSkein release originally -- -- appearing on www.skein-hash.info -- -- -- -- Currently, this package only implements the main Skein hash function -- -- with a 512-bit block size -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : SPARK -- -- Body : SPARK -- -- -- -- Runtime Requirements and Dependencies -- -- None -- -- -- -- Verification -- -- Full proof of type-safety with Simplifer and Victor. Clients must -- -- respect the stated preconditions. -- -- -- -- Exceptions -- -- None -- ------------------------------------------------------------------------------- with SPARK; with SPARK.Crypto; with SPARK.Unsigned; with System; use type SPARK.Unsigned.U64; --# inherit Ada.Unchecked_Conversion, --# SPARK, --# SPARK.Crypto, --# SPARK.Unsigned, --# SPARK.Crypto.Hash, --# System; package SPARK.Crypto.Hash.Skein is -- We limit the length of the output hash to U64'Last - 7 to -- avoid overflow in the calculation of the number of bytes needed -- in Skein_512_Final. This is an undocumented limitation of reference -- implementation in C. The value 0 is used to indicate a context -- that has not yet been initialized. subtype Hash_Bit_Length is Natural range 0 .. Natural'Last - 7; subtype Initialized_Hash_Bit_Length is Hash_Bit_Length range 1 .. Hash_Bit_Length'Last; ------------------------------------------------------------------- -- Constants and types specific to Skein_512 ------------------------------------------------------------------- Skein_512_State_Words_C : constant := 8; Skein_512_State_Bytes_C : constant := 8 * Skein_512_State_Words_C; Skein_512_State_Bits_C : constant := 64 * Skein_512_State_Words_C; Skein_512_Block_Bytes_C : constant := 8 * Skein_512_State_Words_C; subtype Skein_512_State_Words_Index is Natural range 0 .. (Skein_512_State_Words_C - 1); subtype Skein_512_State_Words is Crypto.U64_Seq (Skein_512_State_Words_Index); subtype Skein_512_Block_Bytes_Count is Natural range 0 .. Skein_512_Block_Bytes_C; subtype Skein_512_Block_Bytes_Index is Natural range 0 .. (Skein_512_Block_Bytes_C - 1); subtype Skein_512_Block_Bytes is Crypto.Byte_Seq (Skein_512_Block_Bytes_Index); subtype Skein_512_State_Bytes_Index is Natural range 0 .. (Skein_512_State_Bytes_C - 1); subtype Skein_512_State_Bytes is Crypto.Byte_Seq (Skein_512_State_Bytes_Index); -- (Natural'Last + 1) bytes is (Natural'Last + 1) / 64 512-bit blocks subtype Block_512_Count_T is Natural range 0 .. ((Natural'Last + 1) / 64 - 1); subtype Positive_Block_512_Count_T is Natural range 1 .. Block_512_Count_T'Last; -- Make the context limited private to prevent assignment and comparison -- of contexts. These operations almost certainly don't make sense. type Skein_512_Context is limited private; ------------------------------------------------------------------- -- Proof functions that yield properties of a Skein_512_Context -- These form a simple refinement relation between the private -- and full views of this type. These are used below to specify -- particular pre- and post-conditions on a Context, but without -- having to make the entire type publically visible. ------------------------------------------------------------------- --# function Hash_Bit_Len_Of (Ctx : in Skein_512_Context) return Hash_Bit_Length; --# function Byte_Count_Of (Ctx : in Skein_512_Context) return Natural; ------------------------------------------------------------------- -- Skein 512 Exported Operations ------------------------------------------------------------------- procedure Skein_512_Init (Ctx : out Skein_512_Context; HashBitLen : in Initialized_Hash_Bit_Length); --# derives Ctx from HashBitLen; --# post Hash_Bit_Len_Of (Ctx) in Initialized_Hash_Bit_Length and --# Hash_Bit_Len_Of (Ctx) = HashBitLen and --# Byte_Count_Of (Ctx) = 0 and --# Byte_Count_Of (Ctx) in Skein_512_Block_Bytes_Count; procedure Skein_512_Update (Ctx : in out Skein_512_Context; Msg : in Crypto.Byte_Seq); --# derives Ctx from Ctx, Msg; --# pre Hash_Bit_Len_Of (Ctx) in Initialized_Hash_Bit_Length and --# Byte_Count_Of (Ctx) in Skein_512_Block_Bytes_Count and --# Msg'First = 0 and --# Msg'Last < Natural'Last and --# Msg'Last + Skein_512_Block_Bytes_C < Natural'Last; --# post Hash_Bit_Len_Of (Ctx) in Initialized_Hash_Bit_Length and --# Hash_Bit_Len_Of (Ctx) = Hash_Bit_Len_Of (Ctx~) and --# Byte_Count_Of (Ctx) in Skein_512_Block_Bytes_Count; procedure Skein_512_Final (Ctx : in Skein_512_Context; Result : out Crypto.Byte_Seq); --# derives Result from Ctx; --# pre Hash_Bit_Len_Of (Ctx) in Initialized_Hash_Bit_Length and --# Byte_Count_Of (Ctx) in Skein_512_Block_Bytes_Count and --# Result'First = 0 and --# (Hash_Bit_Len_Of (Ctx) + 7) / 8 <= Result'Last + 1; -- Returns a 512-bit hash of Data using 512-bit block size. function Skein_512_Hash (Data : in Crypto.Byte_Seq) return Skein_512_State_Bytes; --# pre Data'First = 0 and --# Data'Last + Skein_512_Block_Bytes_C < Natural'Last; private Skein_Max_State_Words_C : constant := 16; Skein_Modifier_Words_C : constant := 2; -- number of modifier (tweak) words subtype Modifier_Words_Index is Natural range 0 .. (Skein_Modifier_Words_C - 1); subtype Modifier_Words is Crypto.U64_Seq (Modifier_Words_Index); -- Constant for values of Field_Type below. Could use an -- enumeration type here with a non-standard representation, but -- this can be slow. Skein_Block_Type_Key : constant Unsigned.U6 := 0; -- key, for MAC and KDF Skein_Block_Type_Cfg : constant Unsigned.U6 := 4; -- configuration block Skein_Block_Type_Pers : constant Unsigned.U6 := 8; -- personalization string Skein_Block_Type_PK : constant Unsigned.U6 := 12; -- public key (for digital signature hashing) Skein_Block_Type_KDF : constant Unsigned.U6 := 16; -- key identifier for KDF Skein_Block_Type_Nonce : constant Unsigned.U6 := 20; -- nonce for PRNG Skein_Block_Type_Msg : constant Unsigned.U6 := 48; -- message processing Skein_Block_Type_Out : constant Unsigned.U6 := 63; -- output stage Skein_Block_Type_Mask : constant Unsigned.U6 := 63; -- bit field mask -- System_Default_Bit_Order (SDBO for short) -- Set up this constant so that -- 0 = Little-endian -- 1 = Big-endian SDBO : constant := 1 - System.Bit_Order'Pos (System.Default_Bit_Order); -- NOTE - in the declaration of three "one bit" fields here, it seem -- more natural to use Boolean than a modular integer types. To meet -- the Skein spec, this relies on the fact that False is represented -- by the value 0, and True is represented by the value 1. -- -- This behaviour is implied by AARM 13.4(8) and is known to be OK -- for all known implementations. type Tweak_Value is record Byte_Count_LSB : Unsigned.U64; Byte_Count_MSB : Unsigned.U32; Reserved : Unsigned.U16; Tree_Level : Unsigned.U7; Bit_Pad : Boolean; Field_Type : Unsigned.U6; First_Block : Boolean; Final_Block : Boolean; end record; ---------------------------------------------------------------------------- -- See Skein Specification, Table 5. -- -- On a LITTLE ENDIAN machine, we lay out this record exactly as specified -- in Table 5 of the specification. -- -- On a BIG ENDIAN machine, we swap the bytes of T1 (the second 64-bit word) -- around, so that when Unchecked_Converted to Modifier_Words, the second -- word has its MSB where expected. -- -- For example, we expect Final_Block to be the most-significant bit, so -- this is furthest "up" away from the base of the record on a little-endian -- machine, at bit postition 127. -- -- On a big-endian machine, we need to place Final_Block where the MSB -- will be _after_ conversion to words, so we place it "nearest" the base -- of the second word at bit postion 64. -- -- SDBO has value 0 (little-endian) or 1 (big-endian), so we can use -- it apply the necessary adjustment to the bit positions below. ---------------------------------------------------------------------------- for Tweak_Value use record Byte_Count_LSB at 0 range 0 .. 63; Byte_Count_MSB at 0 range 64 + (SDBO * 32) .. 64 + (SDBO * 32) + 31; -- 32 bits Reserved at 0 range 96 - (SDBO * 16) .. 96 - (SDBO * 16) + 15; -- 16 bits Tree_Level at 0 range 112 - (SDBO * 39) .. 112 - (SDBO * 39) + 6; -- 7 bits Bit_Pad at 0 range 119 - (SDBO * 47) .. 119 - (SDBO * 47) + 0; -- 1 bit Field_Type at 0 range 120 - (SDBO * 54) .. 120 - (SDBO * 54) + 5; -- 6 bits First_Block at 0 range 126 - (SDBO * 61) .. 126 - (SDBO * 61) + 0; -- 1 bit Final_Block at 0 range 127 - (SDBO * 63) .. 127 - (SDBO * 63) + 0; -- 1 bit end record; for Tweak_Value'Size use 128; for Tweak_Value'Alignment use 8; Null_Tweak_Value : constant Tweak_Value := Tweak_Value'(Byte_Count_LSB => 0, Byte_Count_MSB => 0, Reserved => 0, Tree_Level => 0, Bit_Pad => False, Field_Type => 0, First_Block => False, Final_Block => False); -- Context header common to all block sizes type Context_Header is record Tweak_Words : Tweak_Value; -- size of hash result, in bits. 0 = not yet initialized Hash_Bit_Len : Hash_Bit_Length; -- Current byte count in buffer - actual range depends on -- block size. -- In Skein_256, Byte_Count is range 0 .. 32; -- In Skein_512, Byte_Count is range 0 .. 64; -- In Skein_1024, Byte_Count is range 0 .. 128; -- -- These constraints are asserted as preconditions -- on the specific _Init, _Update, and _Final -- procedures above for each block size. Byte_Count : Natural; end record; Null_Context_Header : constant Context_Header := Context_Header'(Hash_Bit_Len => 0, Byte_Count => 0, Tweak_Words => Null_Tweak_Value); ------------------------------------------------------------------- -- Constants and types specific to Skein_512 ------------------------------------------------------------------- type Skein_512_Context is record -- 512-bit Skein hash context structure H : Context_Header; -- common header context variables X : Skein_512_State_Words; -- chaining variables B : Skein_512_Block_Bytes; -- partial block buffer (8-byte aligned) end record; for Skein_512_Context'Alignment use 64; Null_Skein_512_Context : constant Skein_512_Context := Skein_512_Context'(H => Null_Context_Header, X => Skein_512_State_Words'(others => 0), B => Skein_512_Block_Bytes'(others => 0)); end SPARK.Crypto.Hash.Skein; spark-2012.0.deb/lib/spark/current/spark-unsigned.ads0000644000175000017500000002206611753202341021436 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Unsigned -- -- -- -- Description -- -- -- -- This package is Ada, not SPARK. Do not submit it to the Examiner. -- -- -- -- It supplies renamings of the Intrinsic Shift and Rotate functions -- -- declared in Interfaces, so that these may be used from a SPARK program -- -- via the shadow specifiction of this package. -- -- -- -- The content of this specification must be kep in sync with its -- -- shadow specification in spark-interfaces.shs -- -- -- -- Language -- -- Specification : Ada -- -- Private Part : Ada -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- No Ada Runtime, assuming Shift_ and Rotate_ functions are -- -- Intrinsic on the target platform. -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- with Interfaces; package SPARK.Unsigned is -------------------------------------------------------------------- -- Shorthand names for common unsigned types -------------------------------------------------------------------- type U6 is mod 2**6; type U7 is mod 2**7; subtype Byte is Interfaces.Unsigned_8; subtype U16 is Interfaces.Unsigned_16; subtype U32 is Interfaces.Unsigned_32; subtype U64 is Interfaces.Unsigned_64; -------------------------------------------------------------------- -- Shift and Rotate functions -- -- These functions supply a non-overloaded, and therefore -- SPARK-compatible, declaration of the standard Shift and -- Rotate functions for the standard modular types. -- -- For the compiler, this package specification -- supplies these declarations as renamings of the (overloaded) -- functions in package Interfaces, thus yielded the efficiency -- of the Intrinsic functions. -- -- The de-overloading scheme used is to replace function -- XXX for type Unsigned_N with a function called -- XXX_N which renames the original entity. -------------------------------------------------------------------- -- Interfaces uses "Natural" for the Amount parameter of each -- function below, but we choose to introduce a named subtype here -- to ease RTC proof of calling units in SPARK. subtype Shift_Count is Natural range 0 .. 64; -- Rotate towards MSB function Rotate_Left_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8 renames Interfaces.Rotate_Left; -- Rotate towards LSB function Rotate_Right_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8 renames Interfaces.Rotate_Right; -- Shift towards MSB function Shift_Left_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8 renames Interfaces.Shift_Left; -- Shift towards LSB function Shift_Right_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8 renames Interfaces.Shift_Right; -- Arithmetic Shift towards LSB function Shift_Right_Arithmetic_8 (Value : Interfaces.Unsigned_8; Amount : Shift_Count) return Interfaces.Unsigned_8 renames Interfaces.Shift_Right_Arithmetic; -- Rotate towards MSB function Rotate_Left_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16 renames Interfaces.Rotate_Left; -- Rotate towards LSB function Rotate_Right_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16 renames Interfaces.Rotate_Right; -- Shift towards MSB function Shift_Left_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16 renames Interfaces.Shift_Left; -- Shift towards LSB function Shift_Right_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16 renames Interfaces.Shift_Right; -- Arithmetic Shift towards LSB function Shift_Right_Arithmetic_16 (Value : Interfaces.Unsigned_16; Amount : Shift_Count) return Interfaces.Unsigned_16 renames Interfaces.Shift_Right_Arithmetic; -- Rotate towards MSB function Rotate_Left_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32 renames Interfaces.Rotate_Left; -- Rotate towards LSB function Rotate_Right_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32 renames Interfaces.Rotate_Right; -- Shift towards MSB function Shift_Left_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32 renames Interfaces.Shift_Left; -- Shift towards LSB function Shift_Right_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32 renames Interfaces.Shift_Right; -- Arithmetic Shift towards LSB function Shift_Right_Arithmetic_32 (Value : Interfaces.Unsigned_32; Amount : Shift_Count) return Interfaces.Unsigned_32 renames Interfaces.Shift_Right_Arithmetic; -- Rotate towards MSB function Rotate_Left_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64 renames Interfaces.Rotate_Left; -- Rotate towards LSB function Rotate_Right_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64 renames Interfaces.Rotate_Right; -- Shift towards MSB function Shift_Left_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64 renames Interfaces.Shift_Left; -- Shift towards LSB function Shift_Right_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64 renames Interfaces.Shift_Right; -- Arithmetic Shift towards LSB function Shift_Right_Arithmetic_64 (Value : Interfaces.Unsigned_64; Amount : Shift_Count) return Interfaces.Unsigned_64 renames Interfaces.Shift_Right_Arithmetic; -------------------------------------------------------------------- -- Endian-ness conversion functions -------------------------------------------------------------------- -- Returns W in Little-Endian format. -- -- On a machine which is naturally Little-Endian, this function is a no-op. -- On a big-endian machine, the 8 bytes of W are reversed. function To_LittleEndian (W : in Interfaces.Unsigned_64) return Interfaces.Unsigned_64; pragma Inline (To_LittleEndian); end SPARK.Unsigned; spark-2012.0.deb/lib/spark/current/spark-ada-text_io-unbounded_string.adb0000644000175000017500000002256611753202341025353 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Ada.Exceptions; with SPARK.Ada.Strings.Unbounded.Not_SPARK; with SPARK.Ada.Text_IO.Not_SPARK; package body SPARK.Ada.Text_IO.Unbounded_String is --# hide SPARK.Ada.Text_IO.Unbounded_String; pragma Warnings ("Y"); -- Turn off warnings for Ada 2005 features procedure Create (File : out Text_IO.File_Type; Mode : in Text_IO.File_Mode; Name : in Strings.Unbounded.Unbounded_String; Form : in Strings.Unbounded.Unbounded_String) is begin Standard.Ada.Text_IO.Create (File => File.The_File_Type, Mode => SPARK.Ada.Text_IO.Not_SPARK.To_File_Mode (From => Mode), Name => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Name), Form => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Form)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Create; procedure Open (File : out Text_IO.File_Type; Mode : in Text_IO.File_Mode; Name : in Strings.Unbounded.Unbounded_String; Form : in Strings.Unbounded.Unbounded_String) is begin Standard.Ada.Text_IO.Open (File => File.The_File_Type, Mode => SPARK.Ada.Text_IO.Not_SPARK.To_File_Mode (From => Mode), Name => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Name), Form => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Form)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Open; function Name (File : Text_IO.File_Type) return Strings.Unbounded.Unbounded_String is begin return SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Standard.Ada.Text_IO.Name (File => File.The_File_Type)); end Name; function Form (File : Text_IO.File_Type) return Strings.Unbounded.Unbounded_String is begin return SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Standard.Ada.Text_IO.Form (File => File.The_File_Type)); end Form; procedure Put_File (File : in out Text_IO.File_Type; Item : in Strings.Unbounded.Unbounded_String) is begin Standard.Ada.Text_IO.Put (File => File.The_File_Type, Item => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Item)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Put_File; procedure Put_Output (Item : in Strings.Unbounded.Unbounded_String) is begin Standard.Ada.Text_IO.Put (File => Standard.Ada.Text_IO.Standard_Output, Item => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Item)); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Put_Output; procedure Put_Error (Item : in Strings.Unbounded.Unbounded_String) is begin Standard.Ada.Text_IO.Put (File => Standard.Ada.Text_IO.Standard_Error, Item => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Item)); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Put_Error; procedure Procedure_Get_Line_File (File : in out Text_IO.File_Type; Item : out Strings.Unbounded.Unbounded_String; Arg_Last : out Natural) is Tmp_Item : constant String := Standard.Ada.Text_IO.Get_Line (File => File.The_File_Type); begin Item := SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Tmp_Item); Arg_Last := SPARK.Ada.Strings.Unbounded.Get_Length (Source => Item); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Procedure_Get_Line_File; procedure Procedure_Get_Line_Input (Item : out Strings.Unbounded.Unbounded_String; Arg_Last : out Natural) is Tmp_Item : constant String := Standard.Ada.Text_IO.Get_Line (File => Standard.Ada.Text_IO.Standard_Input); begin Item := SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Tmp_Item); Arg_Last := SPARK.Ada.Strings.Unbounded.Get_Length (Source => Item); The_Standard_Input_Exception := No_Exception; exception when Event : others => The_Standard_Input_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Input_Exception = No_Exception then raise; end if; end Procedure_Get_Line_Input; function Function_Get_Line_File (File : Text_IO.File_Type) return Strings.Unbounded.Unbounded_String is begin return SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Standard.Ada.Text_IO.Get_Line (File => File.The_File_Type)); end Function_Get_Line_File; function Function_Get_Line_Input return Strings.Unbounded.Unbounded_String is begin return SPARK.Ada.Strings.Unbounded.String_To_Unbounded_String (Source => Standard.Ada.Text_IO.Get_Line (File => Standard.Ada.Text_IO.Standard_Input)); end Function_Get_Line_Input; procedure Put_Line_File (File : in out Text_IO.File_Type; Item : in Strings.Unbounded.Unbounded_String) is begin Standard.Ada.Text_IO.Put_Line (File => File.The_File_Type, Item => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Item)); File.The_Exception := No_Exception; exception when Event : others => File.The_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if File.The_Exception = No_Exception then raise; end if; end Put_Line_File; procedure Put_Line_Output (Item : in Strings.Unbounded.Unbounded_String) is begin Standard.Ada.Text_IO.Put_Line (File => Standard.Ada.Text_IO.Standard_Output, Item => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Item)); The_Standard_Output_Exception := No_Exception; exception when Event : others => The_Standard_Output_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Output_Exception = No_Exception then raise; end if; end Put_Line_Output; procedure Put_Line_Error (Item : in Strings.Unbounded.Unbounded_String) is begin Standard.Ada.Text_IO.Put_Line (File => Standard.Ada.Text_IO.Standard_Error, Item => SPARK.Ada.Strings.Unbounded.Not_SPARK.To_String (Source => Item)); The_Standard_Error_Exception := No_Exception; exception when Event : others => The_Standard_Error_Exception := Get_Exception_T (The_Exception_Name => Standard.Ada.Exceptions.Exception_Name (X => Event)); if The_Standard_Error_Exception = No_Exception then raise; end if; end Put_Line_Error; end SPARK.Ada.Text_IO.Unbounded_String; spark-2012.0.deb/lib/spark/current/spark.smf0000644000175000017500000000027711555502631017647 0ustar eugeneugenspark-ada-command_line.adb spark-ada-command_line-unbounded_string.adb spark-ada-strings-unbounded.adb spark-ada-strings-maps.adb spark-ada-text_io.adb spark-ada-text_io-unbounded_string.adb spark-2012.0.deb/lib/spark/current/spark-ada-text_io-not_spark.ads0000644000175000017500000000545511753202341024021 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Text_IO.Not_SPARK -- -- -- -- Description -- -- This package provides the features of Ada.Text_IO which are not -- -- compatible with SPARK. Please refer to the Ada LRM description of -- -- Ada.Text_IO for usage information. -- -- -- ------------------------------------------------------------------------------- package SPARK.Ada.Text_IO.Not_SPARK is function Name (File : File_Type) return String; function Form (File : File_Type) return String; type File_Access is new Standard.Ada.Text_IO.File_Access; function Standard_Input return File_Access; function Standard_Output return File_Access; function Standard_Error return File_Access; function Current_Input return File_Access; function Current_Output return File_Access; function Current_Error return File_Access; function Get_Line (File : File_Type) return String; pragma Ada_05 (Get_Line); function Get_Line return String; pragma Ada_05 (Get_Line); ------------------------------------------------ -- Conversion functions from SPARK.Ada to Ada -- ------------------------------------------------ function To_File_Type (From : File_Type) return Standard.Ada.Text_IO.File_Type; function To_File_Mode (From : File_Mode) return Standard.Ada.Text_IO.File_Mode; function To_Type_Set (From : Type_Set) return Standard.Ada.Text_IO.Type_Set; end SPARK.Ada.Text_IO.Not_SPARK; spark-2012.0.deb/lib/spark/current/spark-ada-strings-unbounded-not_spark.ads0000644000175000017500000000604111753202341026010 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK.Ada.Strings.Unbounded.Not_SPARK -- -- -- -- Description -- -- This package provides the features of Ada.Strings.Unbounded which are -- -- not compatible with SPARK. Please refer to the Ada LRM description of -- -- Ada.Strings.Unbounded for usage information. -- -- -- ------------------------------------------------------------------------------- with SPARK.Ada.Strings.Maps.Not_SPARK; package SPARK.Ada.Strings.Unbounded.Not_SPARK is type String_Access is new Standard.Ada.Strings.Unbounded.String_Access; procedure Free (X : in out String_Access); function To_String (Source : Unbounded_String) return String; function Slice (Source : Unbounded_String; Low : Positive; High : Natural) return String; function Index (Source : Unbounded_String; Pattern : String; Going : Direction := Direction_Forward; Mapping : Maps.Not_SPARK.Character_Mapping_Function) return Natural; -- Index is only defined in Ada 2005 function Index (Source : Unbounded_String; Pattern : String; From : Positive; Going : Direction := Direction_Forward; Mapping : Maps.Not_SPARK.Character_Mapping_Function) return Natural; function Count (Source : Unbounded_String; Pattern : String; Mapping : Maps.Not_SPARK.Character_Mapping_Function) return Natural; function Translate (Source : Unbounded_String; Mapping : Maps.Not_SPARK.Character_Mapping_Function) return Unbounded_String; procedure Translate (Source : in out Unbounded_String; Mapping : in Maps.Not_SPARK.Character_Mapping_Function); end SPARK.Ada.Strings.Unbounded.Not_SPARK; spark-2012.0.deb/lib/spark/sparkreport.xsd0000644000175000017500000001462411753202341017426 0ustar eugeneugen spark-2012.0.deb/lib/spark/spark_io_05.adb0000644000175000017500000005435611753202341017123 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= with Text_IO; with Unchecked_Deallocation; package body SPARK_IO_05 is --# hide SPARK_IO_05; -- File Management type File_Descriptor is record File_Ref : Text_IO.File_Type; end record; File_System_Standard_Input : aliased File_Descriptor := File_Descriptor'(File_Ref => Text_IO.Standard_Input); File_System_Standard_Output : aliased File_Descriptor := File_Descriptor'(File_Ref => Text_IO.Standard_Output); type File_System is record Standard_Input : File_Type; Standard_Output : File_Type; end record; File_Sys : constant File_System := File_System'(Standard_Input => File_System_Standard_Input'Access, Standard_Output => File_System_Standard_Output'Access); function Standard_Input return File_Type is begin return File_Sys.Standard_Input; end Standard_Input; function Standard_Output return File_Type is begin return File_Sys.Standard_Output; end Standard_Output; procedure Dispose is new Unchecked_Deallocation (File_Descriptor, File_Type); procedure Create (File : out File_Type; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is begin Create_Flex (File => File, Name_Length => Name_Of_File'Length, Name_Of_File => Name_Of_File, Form_Of_File => Form_Of_File, Status => Status); end Create; procedure Create_Flex (File : out File_Type; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is begin File := new File_Descriptor; Text_IO.Create (File.File_Ref, Text_IO.Out_File, Name_Of_File (Name_Of_File'First .. Name_Length), Form_Of_File); Status := Ok; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Name_Error => Status := Name_Error; Dispose (File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File); when Standard.Program_Error => Status := Program_Error; Dispose (File); end Create_Flex; procedure Open (File : out File_Type; Mode_Of_File : in File_Mode; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is begin Open_Flex (File => File, Mode_Of_File => Mode_Of_File, Name_Length => Name_Of_File'Length, Name_Of_File => Name_Of_File, Form_Of_File => Form_Of_File, Status => Status); end Open; procedure Open_Flex (File : out File_Type; Mode_Of_File : in File_Mode; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status) is F_Mode : Text_IO.File_Mode; begin File := new File_Descriptor; case Mode_Of_File is when In_File => F_Mode := Text_IO.In_File; when Out_File => F_Mode := Text_IO.Out_File; when Append_File => F_Mode := Text_IO.Append_File; end case; Text_IO.Open (File.File_Ref, F_Mode, Name_Of_File (Name_Of_File'First .. Name_Length), Form_Of_File); Status := Ok; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Name_Error => Status := Name_Error; Dispose (File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File); when Standard.Program_Error => Status := Program_Error; Dispose (File); end Open_Flex; procedure Close (File : in out File_Type; Status : out File_Status) is begin if File = null then Status := Status_Error; else Text_IO.Close (File.File_Ref); Dispose (File); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); when Constraint_Error => Status := Use_Error; Dispose (File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File); when Standard.Program_Error => Status := Program_Error; Dispose (File); end Close; procedure Delete (File : in out File_Type; Status : out File_Status) is begin if File = null then Status := Status_Error; else Text_IO.Delete (File.File_Ref); Dispose (File); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); when Constraint_Error => Status := Use_Error; Dispose (File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File); when Standard.Program_Error => Status := Program_Error; Dispose (File); end Delete; procedure Reset (File : in out File_Type; Mode_Of_File : in File_Mode; Status : out File_Status) is F_Mode : Text_IO.File_Mode; begin if File = null then Status := Status_Error; else case Mode_Of_File is when In_File => F_Mode := Text_IO.In_File; when Out_File => F_Mode := Text_IO.Out_File; when Append_File => F_Mode := Text_IO.Append_File; end case; Text_IO.Reset (File.File_Ref, F_Mode); Status := Ok; end if; exception when Text_IO.Status_Error => Status := Status_Error; Dispose (File); when Text_IO.Use_Error => Status := Use_Error; Dispose (File); when Text_IO.Device_Error => Status := Device_Error; Dispose (File); when Standard.Storage_Error => Status := Storage_Error; Dispose (File); when Standard.Program_Error => Status := Program_Error; Dispose (File); end Reset; function Valid_File (File : File_Type) return Boolean is begin return File /= null; end Valid_File; function Is_Open (File : File_Type) return Boolean is begin return Valid_File (File) and then Text_IO.Is_Open (File.File_Ref); end Is_Open; function Mode (File : File_Type) return File_Mode is F_Mode : File_Mode; begin if Is_Open (File) and then Text_IO.Is_Open (File.File_Ref) then case Text_IO.Mode (File.File_Ref) is when Text_IO.In_File => F_Mode := In_File; when Text_IO.Out_File => F_Mode := Out_File; when Text_IO.Append_File => F_Mode := Append_File; end case; else F_Mode := In_File; end if; return F_Mode; end Mode; function Is_In (File : File_Type) return Boolean; function Is_In (File : File_Type) return Boolean is begin return Is_Open (File) and then Mode (File) = In_File; end Is_In; function Is_Out (File : File_Type) return Boolean; function Is_Out (File : File_Type) return Boolean is begin return Is_Open (File) and then (Mode (File) = Out_File or Mode (File) = Append_File); end Is_Out; procedure Name (File : in File_Type; Name_Of_File : out String; Stop : out Natural) is begin if Is_Open (File) then declare FN : constant String := Text_IO.Name (File.File_Ref); begin if Name_Of_File'Length >= FN'Length then Name_Of_File (FN'Range) := FN; Stop := FN'Length; else Name_Of_File := FN (Name_Of_File'Range); Stop := Name_Of_File'Length; end if; end; else Stop := Name_Of_File'First - 1; end if; exception when others => Stop := Name_Of_File'First - 1; end Name; procedure Form (File : in File_Type; Form_Of_File : out String; Stop : out Natural) is begin if Is_Open (File) then declare FM : constant String := Text_IO.Form (File.File_Ref); begin if Form_Of_File'Length >= FM'Length then Form_Of_File (FM'Range) := FM; Stop := FM'Length; else Form_Of_File := FM (Form_Of_File'Range); Stop := Form_Of_File'Length; end if; end; else Stop := Form_Of_File'First - 1; end if; exception when others => Stop := Form_Of_File'First - 1; end Form; -- Line and file terminator control function P_To_PC (P : Positive) return Text_IO.Positive_Count; function P_To_PC (P : Positive) return Text_IO.Positive_Count is begin return Text_IO.Positive_Count (P); end P_To_PC; function PC_To_P (PC : Text_IO.Positive_Count) return Positive; function PC_To_P (PC : Text_IO.Positive_Count) return Positive is begin return Positive (PC); end PC_To_P; procedure New_Line (File : in File_Type; Spacing : in Positive) is Gap : Text_IO.Positive_Count; begin if Is_Out (File) then Gap := P_To_PC (Spacing); Text_IO.New_Line (File.File_Ref, Gap); end if; exception when others => null; end New_Line; procedure Skip_Line (File : in File_Type; Spacing : in Positive) is Gap : Text_IO.Positive_Count; begin if Is_In (File) then Gap := P_To_PC (Spacing); Text_IO.Skip_Line (File.File_Ref, Gap); end if; exception when others => null; end Skip_Line; procedure New_Page (File : in File_Type) is begin if Is_Out (File) then Text_IO.New_Page (File.File_Ref); end if; exception when others => null; end New_Page; function End_Of_Line (File : File_Type) return Boolean is EOLN : Boolean; begin if Is_In (File) then EOLN := Text_IO.End_Of_Line (File.File_Ref); else EOLN := False; end if; return EOLN; end End_Of_Line; function End_Of_File (File : File_Type) return Boolean is EOF : Boolean; begin if Is_In (File) then EOF := Text_IO.End_Of_File (File.File_Ref); else EOF := True; end if; return EOF; end End_Of_File; procedure Set_Col (File : in File_Type; Posn : in Positive); procedure Set_Col (File : in File_Type; Posn : in Positive) is Col : Text_IO.Positive_Count; begin if Is_Open (File) then Col := P_To_PC (Posn); Text_IO.Set_Col (File.File_Ref, Col); end if; exception when others => null; end Set_Col; procedure Set_In_File_Col (File : in File_Type; Posn : in Positive) is begin if Is_In (File) then Set_Col (File, Posn); end if; end Set_In_File_Col; procedure Set_Out_File_Col (File : in File_Type; Posn : in Positive) is begin if Is_Out (File) then Set_Col (File, Posn); end if; end Set_Out_File_Col; function Col (File : File_Type) return Positive; function Col (File : File_Type) return Positive is Posn : Positive; Col : Text_IO.Positive_Count; begin if Is_Open (File) then Col := Text_IO.Col (File.File_Ref); Posn := PC_To_P (Col); else Posn := 1; end if; return Posn; exception when Text_IO.Status_Error => return 1; when Text_IO.Layout_Error => return PC_To_P (Text_IO.Count'Last); when Text_IO.Device_Error => return 1; when Standard.Storage_Error => return 1; when Standard.Program_Error => return 1; end Col; function In_File_Col (File : File_Type) return Positive is begin if Is_In (File) then return Col (File); else return 1; end if; end In_File_Col; function Out_File_Col (File : File_Type) return Positive is begin if Is_Out (File) then return Col (File); else return 1; end if; end Out_File_Col; function Line (File : File_Type) return Positive; function Line (File : File_Type) return Positive is Posn : Positive; Line : Text_IO.Positive_Count; begin if Is_Open (File) then Line := Text_IO.Line (File.File_Ref); Posn := PC_To_P (Line); else Posn := 1; end if; return Posn; exception when Text_IO.Status_Error => return 1; when Text_IO.Layout_Error => return PC_To_P (Text_IO.Count'Last); when Text_IO.Device_Error => return 1; when Standard.Storage_Error => return 1; when Standard.Program_Error => return 1; end Line; function In_File_Line (File : File_Type) return Positive is begin if Is_In (File) then return Line (File); else return 1; end if; end In_File_Line; function Out_File_Line (File : File_Type) return Positive is begin if Is_Out (File) then return Line (File); else return 1; end if; end Out_File_Line; -- Character IO procedure Get_Char (File : in File_Type; Item : out Character) is begin if Is_In (File) then Text_IO.Get (File.File_Ref, Item); else Item := Character'First; end if; exception when others => null; end Get_Char; procedure Put_Char (File : in File_Type; Item : in Character) is begin if Is_Out (File) then Text_IO.Put (File.File_Ref, Item); end if; exception when others => null; end Put_Char; procedure Get_Char_Immediate (File : in File_Type; Item : out Character; Status : out File_Status) is begin if Is_In (File) then Text_IO.Get_Immediate (File.File_Ref, Item); Status := Ok; else Item := Character'First; Status := Mode_Error; end if; exception when others => Item := Character'First; Status := End_Error; end Get_Char_Immediate; -- String IO procedure Get_String (File : in File_Type; Item : out String; Stop : out Natural) is LSTP : Natural; begin if Is_In (File) then LSTP := Item'First - 1; loop exit when End_Of_File (File); LSTP := LSTP + 1; Get_Char (File, Item (LSTP)); exit when LSTP = Item'Last; end loop; Stop := LSTP; else Stop := Item'First - 1; end if; end Get_String; -- CFR 718 The behaviour of Put_String is now as follows: -- If Stop is 0 then all characters in Item are output. -- If Stop <= Item'Last then output Item(Item'First .. Stop). -- If Stop > Item'Last then output all characters in Item, then pad with -- spaces to width specified by Stop. procedure Put_String (File : in File_Type; Item : in String; Stop : in Natural) is Pad : Natural; begin if Is_Out (File) then if Stop = 0 then Text_IO.Put (File.File_Ref, Item); elsif Stop <= Item'Last then Text_IO.Put (File.File_Ref, Item (Item'First .. Stop)); else Pad := Stop - Item'Last; Text_IO.Put (File.File_Ref, Item); while Pad > 0 loop Text_IO.Put (File.File_Ref, ' '); Pad := Pad - 1; end loop; end if; end if; exception when others => null; end Put_String; procedure Get_Line (File : in File_Type; Item : out String; Stop : out Natural) is begin if Is_In (File) then Text_IO.Get_Line (File.File_Ref, Item, Stop); else Stop := Item'First - 1; end if; exception when others => Stop := Item'First - 1; end Get_Line; procedure Put_Line (File : in File_Type; Item : in String; Stop : in Natural) is ES : Positive; begin if Stop = 0 then ES := Item'Last; else ES := Stop; end if; if Is_Out (File) then Text_IO.Put_Line (File.File_Ref, Item (Item'First .. ES)); end if; exception when others => null; end Put_Line; -- Integer IO package Integer_IO is new Text_IO.Integer_IO (Integer); procedure Get_Integer (File : in File_Type; Item : out Integer; Width : in Natural; Read : out Boolean) is begin if Is_In (File) then Integer_IO.Get (File.File_Ref, Item, Width); Read := True; else Read := False; end if; exception when others => Read := False; end Get_Integer; procedure Put_Integer (File : in File_Type; Item : in Integer; Width : in Natural; Base : in Number_Base) is begin if Is_Out (File) then Integer_IO.Put (File.File_Ref, Item, Width, Base); end if; exception when others => null; end Put_Integer; procedure Get_Int_From_String (Source : in String; Item : out Integer; Start_Pos : in Positive; Stop : out Natural) is begin Integer_IO.Get (Source (Start_Pos .. Source'Last), Item, Stop); exception when others => Stop := Start_Pos - 1; end Get_Int_From_String; procedure Put_Int_To_String (Dest : in out String; Item : in Integer; Start_Pos : in Positive; Base : in Number_Base) is begin Integer_IO.Put (Dest (Start_Pos .. Dest'Last), Item, Base); exception when others => null; end Put_Int_To_String; -- Float IO package Real_IO is new Text_IO.Float_IO (Float); procedure Get_Float (File : in File_Type; Item : out Float; Width : in Natural; Read : out Boolean) is begin if Is_In (File) then Real_IO.Get (File.File_Ref, Item, Width); Read := True; else Read := False; end if; exception when others => Read := False; end Get_Float; procedure Put_Float (File : in File_Type; Item : in Float; Fore : in Natural; Aft : in Natural; Exp : in Natural) is begin if Is_Out (File) then Real_IO.Put (File.File_Ref, Item, Fore, Aft, Exp); end if; exception when others => null; end Put_Float; procedure Get_Float_From_String (Source : in String; Item : out Float; Start_Pos : in Positive; Stop : out Natural) is begin Real_IO.Get (Source (Start_Pos .. Source'Last), Item, Stop); exception when others => Stop := Start_Pos - 1; end Get_Float_From_String; procedure Put_Float_To_String (Dest : in out String; Item : in Float; Start_Pos : in Positive; Aft : in Natural; Exp : in Natural) is begin Real_IO.Put (Dest (Start_Pos .. Dest'Last), Item, Aft, Exp); exception when others => null; end Put_Float_To_String; end SPARK_IO_05; spark-2012.0.deb/lib/spark/spark_io.ads0000644000175000017500000004172511753202341016634 0ustar eugeneugen------------------------------------------------------------------------------- -- (C) Altran Praxis Limited ------------------------------------------------------------------------------- -- -- The SPARK toolset is free software; you can redistribute it and/or modify it -- under terms of the GNU General Public License as published by the Free -- Software Foundation; either version 3, or (at your option) any later -- version. The SPARK toolset is distributed in the hope that it will be -- useful, but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -- Public License for more details. You should have received a copy of the GNU -- General Public License distributed with the SPARK toolset; see file -- COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of -- the license. -- --============================================================================= ------------------------------------------------------------------------------- -- -- -- SPARK_IO -- -- -- -- Description -- -- This is a thick binding to package Ada.Text_IO -- -- -- -- Language -- -- Specification : SPARK -- -- Private Part : Ada -- -- Body : Ada -- -- -- -- Runtime Requirements and Dependencies -- -- Full Ada Runtime -- -- -- -- Verification -- -- N/A -- -- -- -- Exceptions -- -- None -- -- -- ------------------------------------------------------------------------------- with Text_IO; package SPARK_IO --# own State : State_Type; --# Inputs : Inputs_Type; --# Outputs : Outputs_Type; --# initializes State, --# Inputs, --# Outputs; is --# type State_Type is abstract; --# type Inputs_Type is abstract; --# type Outputs_Type is abstract; type File_Type is private; type File_Mode is (In_File, Out_File, Append_File); type File_Status is (Ok, Status_Error, Mode_Error, Name_Error, Use_Error, Device_Error, End_Error, Data_Error, Layout_Error, Storage_Error, Program_Error); subtype Number_Base is Integer range 2 .. 16; function Standard_Input return File_Type; --# global in Inputs; function Standard_Output return File_Type; --# global in Outputs; Null_File : constant File_Type; --------------------- -- File Management -- --------------------- procedure Create (File : out File_Type; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out State; --# derives File, --# State, --# Status from Form_Of_File, --# Name_Of_File, --# State; --# declare delay; procedure Create_Flex (File : out File_Type; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out State; --# derives File, --# State, --# Status from Form_Of_File, --# Name_Length, --# Name_Of_File, --# State; --# declare delay; procedure Open (File : out File_Type; Mode_Of_File : in File_Mode; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out State; --# derives File, --# State, --# Status from Form_Of_File, --# Mode_Of_File, --# Name_Of_File, --# State; --# declare delay; procedure Open_Flex (File : out File_Type; Mode_Of_File : in File_Mode; Name_Length : in Natural; Name_Of_File : in String; Form_Of_File : in String; Status : out File_Status); --# global in out State; --# derives File, --# State, --# Status from Form_Of_File, --# Mode_Of_File, --# Name_Length, --# Name_Of_File, --# State; --# declare delay; procedure Close (File : in out File_Type; Status : out File_Status); --# global in out State; --# derives State, --# Status from File, --# State & --# File from ; --# declare delay; procedure Delete (File : in out File_Type; Status : out File_Status); --# global in out State; --# derives State, --# Status from File, --# State & --# File from ; --# declare delay; procedure Reset (File : in out File_Type; Mode_Of_File : in File_Mode; Status : out File_Status); --# derives File, --# Status from File, --# Mode_Of_File; --# declare delay; function Valid_File (File : File_Type) return Boolean; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function Mode (File : File_Type) return File_Mode; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. procedure Name (File : in File_Type; Name_Of_File : out String; Stop : out Natural); --# derives Name_Of_File, --# Stop from File; --# declare delay; procedure Form (File : in File_Type; Form_Of_File : out String; Stop : out Natural); --# derives Form_Of_File, --# Stop from File; --# declare delay; function Is_Open (File : File_Type) return Boolean; --# global State; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. ----------------------------------------------- -- Control of default input and output files -- ----------------------------------------------- -- -- Not supported in SPARK_IO -- -------------------------------------------- -- Specification of line and page lengths -- -------------------------------------------- -- -- Not supported in SPARK_IO -- ----------------------------------- -- Column, Line and Page Control -- ----------------------------------- procedure New_Line (File : in File_Type; Spacing : in Positive); --# global in out Outputs; --# derives Outputs from *, --# File, --# Spacing; --# declare delay; procedure Skip_Line (File : in File_Type; Spacing : in Positive); --# global in out Inputs; --# derives Inputs from *, --# File, --# Spacing; --# declare delay; procedure New_Page (File : in File_Type); --# global in out Outputs; --# derives Outputs from *, --# File; --# declare delay; function End_Of_Line (File : File_Type) return Boolean; --# global Inputs; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function End_Of_File (File : File_Type) return Boolean; --# global Inputs; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. procedure Set_In_File_Col (File : in File_Type; Posn : in Positive); --# global in out Inputs; --# derives Inputs from *, --# File, --# Posn; --# declare delay; --# pre Mode (File) = In_File; procedure Set_Out_File_Col (File : in File_Type; Posn : in Positive); --# global in out Outputs; --# derives Outputs from *, --# File, --# Posn; --# declare delay; --# pre Mode( File ) = Out_File or --# Mode (File) = Append_File; function In_File_Col (File : File_Type) return Positive; --# global Inputs; --# pre Mode (File) = In_File; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function Out_File_Col (File : File_Type) return Positive; --# global Outputs; --# pre Mode (File) = Out_File or --# Mode (File) = Append_File; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function In_File_Line (File : File_Type) return Positive; --# global Inputs; --# pre Mode (File) = In_File; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. function Out_File_Line (File : File_Type) return Positive; --# global Outputs; --# pre Mode (File) = Out_File or --# Mode (File) = Append_File; -- This is a potentially blocking function. -- DO NOT CALL THIS FUNCTION FROM A PROTECTED OPERATION. ---------------------------- -- Character Input-Output -- ---------------------------- procedure Get_Char (File : in File_Type; Item : out Character); --# global in out Inputs; --# derives Inputs, --# Item from File, --# Inputs; --# declare delay; procedure Put_Char (File : in File_Type; Item : in Character); --# global in out Outputs; --# derives Outputs from *, --# File, --# Item; --# declare delay; procedure Get_Char_Immediate (File : in File_Type; Item : out Character; Status : out File_Status); --# global in out Inputs; --# derives Inputs, --# Item, --# Status from File, --# Inputs; --# declare delay; -- NOTE. Only the variant of Get_Immediate that waits for a character to -- become available is supported. -- On return Status is one of Ok, Mode_Error or End_Error. See ALRM A.10.7 -- Item is Character'First if Status /= Ok ------------------------- -- String Input-Output -- ------------------------- procedure Get_String (File : in File_Type; Item : out String; Stop : out Natural); --# global in out Inputs; --# derives Inputs, --# Item, --# Stop from File, --# Inputs; --# declare delay; procedure Put_String (File : in File_Type; Item : in String; Stop : in Natural); --# global in out Outputs; --# derives Outputs from *, --# File, --# Item, --# Stop; --# declare delay; procedure Get_Line (File : in File_Type; Item : out String; Stop : out Natural); --# global in out Inputs; --# derives Inputs, --# Item, --# Stop from File, --# Inputs; --# declare delay; procedure Put_Line (File : in File_Type; Item : in String; Stop : in Natural); --# global in out Outputs; --# derives Outputs from *, --# File, --# Item, --# Stop; --# declare delay; -------------------------- -- Integer Input-Output -- -------------------------- -- SPARK_IO only supports input-output of -- the built-in integer type Integer procedure Get_Integer (File : in File_Type; Item : out Integer; Width : in Natural; Read : out Boolean); --# global in out Inputs; --# derives Inputs, --# Item, --# Read from File, --# Inputs, --# Width; --# declare delay; procedure Put_Integer (File : in File_Type; Item : in Integer; Width : in Natural; Base : in Number_Base); --# global in out Outputs; --# derives Outputs from *, --# Base, --# File, --# Item, --# Width; --# declare delay; procedure Get_Int_From_String (Source : in String; Item : out Integer; Start_Pos : in Positive; Stop : out Natural); --# derives Item, --# Stop from Source, --# Start_Pos; --# declare delay; procedure Put_Int_To_String (Dest : in out String; Item : in Integer; Start_Pos : in Positive; Base : in Number_Base); --# derives Dest from *, --# Base, --# Item, --# Start_Pos; --# declare delay; ------------------------ -- Float Input-Output -- ------------------------ -- SPARK_IO only supports input-output of -- the built-in real type Float procedure Get_Float (File : in File_Type; Item : out Float; Width : in Natural; Read : out Boolean); --# global in out Inputs; --# derives Inputs, --# Item, --# Read from File, --# Inputs, --# Width; --# declare delay; procedure Put_Float (File : in File_Type; Item : in Float; Fore : in Natural; Aft : in Natural; Exp : in Natural); --# global in out Outputs; --# derives Outputs from *, --# Aft, --# Exp, --# File, --# Fore, --# Item; --# declare delay; procedure Get_Float_From_String (Source : in String; Item : out Float; Start_Pos : in Positive; Stop : out Natural); --# derives Item, --# Stop from Source, --# Start_Pos; --# declare delay; procedure Put_Float_To_String (Dest : in out String; Item : in Float; Start_Pos : in Positive; Aft : in Natural; Exp : in Natural); --# derives Dest from *, --# Aft, --# Exp, --# Item, --# Start_Pos; --# declare delay; private --# hide SPARK_IO; type IO_TYPE is (Stdin, Stdout, NamedFile); type File_PTR is access Text_IO.File_Type; -- In addition to the fields listed here, we consider the -- FILE_PTR.all record to contain the name and mode of the -- file from the point of view of the annotations above. type File_Type is record File : File_PTR := null; IO_Sort : IO_TYPE := NamedFile; end record; Null_File : constant File_Type := File_Type'(null, NamedFile); end SPARK_IO; spark-2012.0.deb/lib/checker/0000755000175000017500000000000011753202331014606 5ustar eugeneugenspark-2012.0.deb/lib/checker/rules/0000755000175000017500000000000011753202341015741 5ustar eugeneugenspark-2012.0.deb/lib/checker/rules/ENUMERATION.RUL0000644000175000017500000001732611753202341020124 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % enumeration : more properties of enumerated types (pred & succ etc.) %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family enumeration: % X <= Y requires [ X:e, Y:e ] & % X < Y requires [ X:e, Y:e ] & % X >= Y requires [ X:e, Y:e ] & % X > Y requires [ X:e, Y:e ] & % X <> Y requires [ X:e, Y:e ] & % succ(X) requires [ X:e ] & % pred(X) requires [ X:e ]. %------------------------------------------------------------------------------- /*** Enumerated type inequality rules ***/ /* Predecessor */ enumeration(1): X <= pred(Y) may_be_deduced_from [ X < Y ]. enumeration(2): pred(X) <= Y may_be_deduced_from [ X <= Y, goal(checktype(X, T)), goal(enumeration(T, [E|_])), X <> E ]. enumeration(3): pred(X) >= Y may_be_deduced_from [ X > Y ]. enumeration(4): X >= pred(Y) may_be_deduced_from [ X >= Y, goal(checktype(Y, T)), goal(enumeration(T, [E|_])), Y <> E ]. enumeration(5): X > Y may_be_deduced_from [ pred(X) >= Y ]. enumeration(6): X < Y may_be_deduced_from [ X <= pred(Y) ]. enumeration(7): X <= Y may_be_deduced_from [ pred(X) < Y ]. enumeration(8): X >= Y may_be_deduced_from [ X > pred(Y) ]. enumeration(9): pred(X) < Y may_be_deduced_from [ X <= Y, goal(checktype(X, T)), goal(enumeration(T, [E|_])), X <> E ]. enumeration(10): X > pred(Y) may_be_deduced_from [ X >= Y, goal(checktype(Y, T)), goal(enumeration(T, [E|_])), Y <> E ]. enumeration(11): pred(X) < X may_be_deduced_from [ goal(checktype(X, T)), goal(enumeration(T,[E|_])), X <> E ]. enumeration(12): X > pred(X) may_be_deduced_from [ goal(checktype(X, T)), goal(enumeration(T,[E|_])), X <> E ]. /* Successor */ enumeration(13): X <= succ(Y) may_be_deduced_from [ X <= Y, goal(checktype(Y, T)), goal(enumeration(T, L)), goal(last(L, E)), Y <> E ]. enumeration(14): succ(X) <= Y may_be_deduced_from [ X < Y ]. enumeration(15): succ(X) >= Y may_be_deduced_from [ X >= Y, goal(checktype(X, T)), goal(enumeration(T, L)), goal(last(L, E)), X <> E ]. enumeration(16): X >= succ(Y) may_be_deduced_from [ X > Y ]. enumeration(17): X < Y may_be_deduced_from [ succ(X) <= Y ]. enumeration(18): X > Y may_be_deduced_from [ X >= succ(Y) ]. enumeration(19): X >= Y may_be_deduced_from [ succ(X) > Y ]. enumeration(20): X <= Y may_be_deduced_from [ X < succ(Y) ]. enumeration(21): succ(X) > Y may_be_deduced_from [ X >= Y, goal(checktype(X, T)), goal(enumeration(T, L)), goal(last(L, E)), X <> E ]. enumeration(22): X < succ(Y) may_be_deduced_from [ X <= Y, goal(checktype(Y, T)), goal(enumeration(T, L)), goal(last(L, E)), Y <> E ]. enumeration(23): succ(X) > X may_be_deduced_from [ goal(checktype(X, T)), goal(enumeration(T, L)), goal(last(L, E)), X <> E ]. enumeration(24): X < succ(X) may_be_deduced_from [ goal(checktype(X, T)), goal(enumeration(T, L)), goal(last(L, E)), X <> E ]. /* General */ enumeration(25): X <> E may_be_deduced_from [ X > Y, goal(checktype(X, T)), goal(enumeration(T, [E|_])) ]. enumeration(26): X <> E may_be_deduced_from [ X < Y, goal(checktype(X, T)), goal(enumeration(T, L)), goal(last(L, E)) ]. /*============================================================================* Justifications -------------- 1: If X < Y, then X <= pred(Y) must hold. (Note that because X < Y, Y cannot be equal to the first enumeration literal in its type.) 2: If X <= Y, then if pred(X) exists it must also be <= Y. (The additional immediate conditions ensure X is not the first literal, so pred(X) does indeed exist.) 3: This rule is equivalent to rule 1 (though with X and Y interchanged). 4: This rule is equivalent to rule 2 (though with X and Y interchanged). 5: Given pred(X) >= Y, X cannot be the first enumeration literal in its type and given X > pred(X) (rule 12) we see X > Y by transitivity. 6: This rule is equivalent to rule 5 (though with X and Y interchanged). 7: Given pred(X) < Y, X cannot be the first enumeration literal in its type. If pred(X) < Y it follows that X <= Y (since if X > Y were to hold, then pred(X) would have to be at least Y). 8: This rule is equivalent to rule 7 (though with X and Y interchanged). 9: If X <= Y then provided X is not equal to the first enumeration literal in its type, pred(X) exists and pred(X) < X (rule 11), so pred(X) < Y by transitivity. 10: This rule is equivalent to rule 9 (though with X and Y interchanged). 11: If X is not equal to the first enumeration literal in its type, then pred(X) exists. Whenever this is so, pred(X) precedes X in the type, so pred(X) < X holds in such cases as required. 12: This rule is equivalent to rule 11. 13: If Y is not equal to the last enumeration literal in its type, then succ(Y) exists and Y Y, then X must be at least Y as required. 20: This rule is equivalent to rule 19 (though with X and Y interchanged). 21: If X is not the last enumeration literal in its type, then succ(X) > X (rule 23), so succ(X) > Y follows from X >= Y by transitivity. 22: This rule is equivalent to rule 21 (though with X and Y interchanged). 23: If X is not equal to the last enumeration literal in its type, then succ(X) exists. Whenever this is so, succ(X) succeeds X in the type, so succ(X) > X holds in such cases as required. 24: This rule is equivalent to rule 23. 25: If X is bigger than something (Y), then X cannot be equal to the first enumeration literal in its type. 26: If X is smaller than something (Y), then X cannot be equal to the last enumeration literal in its type. *============================================================================*/ spark-2012.0.deb/lib/checker/rules/SEQ.RUL0000644000175000017500000001374311753202341016765 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % seqlen : properties of the length of sequence function % append : properties of the append operator % first : properties of the sequence function first % last : properties of the sequence function last % nonfirst : properties of the sequence function nonfirst % nonlast : properties of the sequence function nonlast % seq : equality properties of sequences %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family seqlen: % X >= Y requires [ X:i, Y:i ] & % X > Y requires [ X:i, Y:i ] & % length(X) requires [ X:any ] & % X + Y requires [ X:i, Y:i ] & % X - Y requires [ X:i, Y:i ]. % % rule_family append: % X @ Y requires [ X:any, Y:any ]. % % rule_family first: % first(X) requires [ X:any ]. % % rule_family last: % last(X) requires [ X:any ]. % % rule_family nonfirst: % nonfirst(X) requires [ X:any ] & % X @ Y requires [ X:any, Y:any ]. % % rule_family nonlast: % nonlast(X) requires [ X:any ] & % [X|Y] requires [ X:any, Y:any ] & % X @ Y requires [ X:any, Y:any ]. % % rule_family seq: % X <-> Y requires [ X:any, Y:any ] & % X = Y requires [ X:any, Y:any ]. %------------------------------------------------------------------------------- /*** Sequence length rules ***/ seqlen(1): length(S)>=0 may_be_deduced. seqlen(2): length([H|T])>0 may_be_deduced. seqlen(3): length([H|T])>=1 may_be_deduced. seqlen(4): length([]) may_be_replaced_by 0. seqlen(5): length([H|T]) may_be_replaced_by N if [ goal(length([H|T],N)) ]. seqlen(6): length(S @ T) & length(S)+length(T) are_interchangeable. seqlen(7): length(S) & length(nonlast(S))+1 are_interchangeable if [ S<>[] ]. seqlen(8): length(S) & length(nonlast(S))+1 are_interchangeable if [ length(S)>=1 ]. seqlen(9): length(S)-1 & length(nonlast(S)) are_interchangeable if [ S<>[] ]. seqlen(10): length(S)-1 & length(nonlast(S)) are_interchangeable if [ length(S)>=1 ]. seqlen(11): length(S) & length(nonfirst(S))+1 are_interchangeable if [ S<>[] ]. seqlen(12): length(S) & length(nonfirst(S))+1 are_interchangeable if [ length(S)>=1 ]. seqlen(13): length(S)-1 & length(nonfirst(S)) are_interchangeable if [ S<>[] ]. seqlen(14): length(S)-1 & length(nonfirst(S)) are_interchangeable if [ length(S)>=1 ]. /*** Append rules ***/ append(1): ([] @ L) may_be_replaced_by L. append(2): (L @ []) may_be_replaced_by L. append(3): ([H1|T1] @ [H2|T2]) may_be_replaced_by L if [ goal(append([H1|T1],[H2|T2],L)) ]. append(4): ([first(S)] @ nonfirst(S)) may_be_replaced_by S if [ S<>[] ]. append(5): (nonlast(S) @ [last(S)]) may_be_replaced_by S if [ S<>[] ]. append(6): ((X @ Y) @ Z) & (X @ (Y @ Z)) are_interchangeable. /*** First rules ***/ first(1): first([H|T]) may_be_replaced_by H. first(2): first([H|T] @ Y) may_be_replaced_by H. first(3): first(X @ Y) & first(X) are_interchangeable if [ X<>[] ]. /*** Last rules ***/ last(1): last([H]) may_be_replaced_by H. last(2): last(X @ [H|T]) may_be_replaced_by last([H|T]). last(3): last([H|T]) & last(T) are_interchangeable if [ T<>[] ]. last(4): last(X @ Y) & last(Y) are_interchangeable if [ Y<>[] ]. /*** Nonfirst rules ***/ nonfirst(1): nonfirst([H|T]) may_be_replaced_by T. nonfirst(2): nonfirst(T) may_be_replaced_by [] if [ length(T)=1 ]. nonfirst(3): nonfirst(X @ Y) & nonfirst(X) @ Y are_interchangeable if [ X<>[] ]. nonfirst(4): nonfirst([H|T] @ Y) may_be_replaced_by T @ Y. /*** Nonlast rules ***/ nonlast(1): nonlast([H]) may_be_replaced_by []. nonlast(2): nonlast(T) may_be_replaced_by [] if [ length(T)=1 ]. nonlast(3): nonlast([H|T]) & [H|T1] are_interchangeable if [ nonlast(T)=T1 ]. nonlast(4): nonlast(X @ Y) & X @ nonlast(Y) are_interchangeable if [ Y<>[] ]. nonlast(5): nonlast(X @ [H]) may_be_replaced_by X. /*** Sequence Equality rules ***/ seq(1): X = Y <-> ((first(X)=first(Y) and nonfirst(X)=nonfirst(Y)) or (X=[] and Y=[])) may_be_deduced. seq(2): X = Y may_be_deduced_from [ first(X)=first(Y), nonfirst(X)=nonfirst(Y) ]. seq(3): X = Y <-> ((nonlast(X)=nonlast(Y) and last(X)=last(Y)) or (X=[] and Y=[])) may_be_deduced. seq(4): X = Y may_be_deduced_from [ nonlast(X)=nonlast(Y), last(X)=last(Y) ]. spark-2012.0.deb/lib/checker/rules/QUANTIF.RUL0000644000175000017500000000774011753202341017444 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILY CONTAINED HEREIN :- % % quant : rules for quantified expressions %------------------------------------------------------------------------------- % MODEL DECLARATION FOR THIS FILE :- % % rule_family quant: % for_all(X, Y) requires [ X:any, Y:any ] & % not X requires [ X:any ] & % for_some(X, Y) requires [ X:any, Y:any ] & % X or Y requires [ X:any, Y:any ]. %------------------------------------------------------------------------------- /*** COMMON QUANTIFICATION TECHNIQUES ***/ /*** Rewriting of quantified terms ***/ quant(1): for_all(X,Y) & (not for_some(X,not Y)) are_interchangeable. quant(2): for_all(X,not Y) & (not for_some(X,Y)) are_interchangeable. quant(3): for_some(X,Y) & (not for_all(X,not Y)) are_interchangeable. quant(4): for_some(X,not Y) & (not for_all(X,Y)) are_interchangeable. quant(5): for_all(X,for_all(Y,P)) & for_all(Y,for_all(X,P)) are_interchangeable. quant(6): for_some(X,for_some(Y,P)) & for_some(Y,for_some(X,P)) are_interchangeable. quant(7): for_some(Y,for_all(X,P)) may_be_replaced_by for_all(X,for_some(Y,P)). /*** Simple inferences ***/ quant(8): for_some(X,P) may_be_deduced_from [ for_all(X,P) ]. /*** Inferences involving 'AND' ***/ quant(9): for_all(X,A) may_be_deduced_from [ for_all(X,A and B) ]. quant(10): for_all(X,B) may_be_deduced_from [ for_all(X,A and B) ]. quant(11): for_all(X,A and B) may_be_deduced_from [ for_all(X,A), for_all(X,B) ]. quant(12): for_some(X,A) may_be_deduced_from [ for_some(X,A and B) ]. quant(13): for_some(X,B) may_be_deduced_from [ for_some(X,A and B) ]. /*** Inferences involving 'OR' ***/ quant(14): for_some(X,A or B) may_be_deduced_from [ for_some(X,A) ]. quant(15): for_some(X,A or B) may_be_deduced_from [ for_some(X,B) ]. quant(16): for_some(X,A) or for_some(X,B) may_be_deduced_from [ for_some(X,A or B) ]. quant(17): for_all(X,A or B) may_be_deduced_from [ for_all(X,A) ]. quant(18): for_all(X,A or B) may_be_deduced_from [ for_all(X,B) ]. /*** Inferences involving '->' ***/ quant(19): for_some(X,A -> B) may_be_deduced_from [ for_some(X,not A) ]. quant(20): for_some(X,A -> B) may_be_deduced_from [ for_some(X,B) ]. quant(21): for_some(X,not A) or for_some(X,B) may_be_deduced_from [ for_some(X,A -> B) ]. quant(22): for_all(X,A -> B) may_be_deduced_from [ for_all(X,not A) ]. quant(23): for_all(X,A -> B) may_be_deduced_from [ for_all(X,B) ]. /*** Inferences involving '<->' ***/ quant(24): for_all(X,A -> B) may_be_deduced_from [ for_all(X,A <-> B) ]. quant(25): for_all(X,B -> A) may_be_deduced_from [ for_all(X,A <-> B) ]. quant(26): for_all(X,A <-> B) may_be_deduced_from [ for_all(X,A -> B), for_all(X,B -> A) ]. quant(27): for_some(X,A -> B) may_be_deduced_from [ for_some(X,A <-> B) ]. quant(28): for_some(X,B -> A) may_be_deduced_from [ for_some(X,A <-> B) ]. spark-2012.0.deb/lib/checker/rules/RECORD.RUL0000644000175000017500000005770411753202341017320 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % record : record-type simplification rules % record_equality : equality of two record-type objects % mk__record : rules specific to SPARK record aggregates %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family record: % X requires [ X:any ]. % % rule_family record_equality: % X=Y requires [ X:any, Y:any ]. % % rule_family mk__record: % X requires [ X:any ]. %------------------------------------------------------------------------------- /*** RECORD SIMPLIFICATION rules ***/ /* record(1): fld_F(upf_F(_, VALUE)) may_be_replaced_by VALUE. */ RECORD1: FLD may_be_replaced_by VALUE if [ goal(nonvar(RECORD1)), goal(RECORD1=record(1)), goal(record_function(_, FLD, access, F, [RECORD], TYPE)), goal(record_function(_, RECORD, update, F, [_, VALUE], TYPE)) ]. /* record(2): upf_F(REC, fld_F(REC)) may_be_replaced_by REC. */ RECORD2: UPF may_be_replaced_by REC if [ goal(nonvar(RECORD2)), goal(RECORD2=record(2)), goal(record_function(_, UPF, update, F, [REC, VALUE], TYPE)), goal(record_function(_, VALUE, access, F, [REC], TYPE)) ]. /* record(3): upf_F(upf_G(R,VG),VF) may_be_replaced_by upf_G(upf_F(R,VF),VG) if [ "F" <> "G" ]. */ RECORD3: UPF_F may_be_replaced_by UPF_G if [ goal(nonvar(RECORD3)), goal(RECORD3=record(3)), goal(record_function(_, UPF_F, update, F, [REC1, VF], TYPE)), goal(record_function(_, REC1, update, G, [R, VG], TYPE)), goal(F \= G), goal(record_function(_, REC2, update, F, [R, VF], TYPE)), goal(record_function(_, UPF_G, update, G, [REC2, VG], TYPE)) ]. /* record(4): fld_F(upf_G(R, V)) may_be_replaced_by fld_F(R) if [ "F" <> "G" ]. */ RECORD4: FLD_F_UPF_G may_be_replaced_by FLD_F_R if [ goal(nonvar(RECORD4)), goal(RECORD4=record(4)), goal(record_function(_, FLD_F_UPF_G, access, F, [UPF_G], TYPE)), goal(record_function(_, UPF_G, update, G, [R, V], TYPE)), goal(F \= G), goal(record_function(_, FLD_F_R, access, F, [R], TYPE)) ]. /* record(5): upf_F(upf_F(R, _), V) may_be_replaced_by upf_F(R, V). */ RECORD5: UPF_F_UPF_F_R may_be_replaced_by UPF_F_R if [ goal(nonvar(RECORD5)), goal(RECORD5=record(5)), goal(record_function(_, UPF_F_UPF_F_R, update, F, [UPF_F, V], TYPE)), goal(record_function(_, UPF_F, update, F, [R, _], TYPE)), goal(record_function(_, UPF_F_R, update, F, [R, V], TYPE)) ]. /*** RECORD EQUALITY RULES ***/ /* record_equality(1): {for records with ONE field} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S) ]. */ RECORD_EQUALITY1: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY1)), goal(RECORD_EQUALITY1=record_equality(1)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), R1 = S1 ]. /* record_equality(2): {for records with TWO fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S) ]. */ RECORD_EQUALITY2: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY2)), goal(RECORD_EQUALITY2=record_equality(2)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), R1 = S1, R2 = S2 ]. /* record_equality(3): {for records with THREE fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S) ]. */ RECORD_EQUALITY3: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY3)), goal(RECORD_EQUALITY3=record_equality(3)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_],[F3,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), goal(record_function(_, R3, access, F3, [R], RECORD_TYPE)), goal(record_function(_, S3, access, F3, [S], RECORD_TYPE)), R1 = S1, R2 = S2, R3 = S3 ]. /* record_equality(4): {for records with FOUR fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S), fld_F4(R)=fld_F4(S) ]. */ RECORD_EQUALITY4: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY4)), goal(RECORD_EQUALITY4=record_equality(4)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_],[F3,_],[F4,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), goal(record_function(_, R3, access, F3, [R], RECORD_TYPE)), goal(record_function(_, S3, access, F3, [S], RECORD_TYPE)), goal(record_function(_, R4, access, F4, [R], RECORD_TYPE)), goal(record_function(_, S4, access, F4, [S], RECORD_TYPE)), R1 = S1, R2 = S2, R3 = S3, R4 = S4 ]. /* record_equality(5): {for records with FIVE fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S), fld_F4(R)=fld_F4(S), fld_F5(R)=fld_F5(S) ]. */ RECORD_EQUALITY5: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY5)), goal(RECORD_EQUALITY5=record_equality(5)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_],[F3,_],[F4,_],[F5,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), goal(record_function(_, R3, access, F3, [R], RECORD_TYPE)), goal(record_function(_, S3, access, F3, [S], RECORD_TYPE)), goal(record_function(_, R4, access, F4, [R], RECORD_TYPE)), goal(record_function(_, S4, access, F4, [S], RECORD_TYPE)), goal(record_function(_, R5, access, F5, [R], RECORD_TYPE)), goal(record_function(_, S5, access, F5, [S], RECORD_TYPE)), R1 = S1, R2 = S2, R3 = S3, R4 = S4, R5 = S5 ]. /* record_equality(6): {for records with SIX fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S), fld_F4(R)=fld_F4(S), fld_F5(R)=fld_F5(S), fld_F6(R)=fld_F6(S) ]. */ RECORD_EQUALITY6: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY6)), goal(RECORD_EQUALITY6=record_equality(6)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_],[F3,_],[F4,_],[F5,_],[F6,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), goal(record_function(_, R3, access, F3, [R], RECORD_TYPE)), goal(record_function(_, S3, access, F3, [S], RECORD_TYPE)), goal(record_function(_, R4, access, F4, [R], RECORD_TYPE)), goal(record_function(_, S4, access, F4, [S], RECORD_TYPE)), goal(record_function(_, R5, access, F5, [R], RECORD_TYPE)), goal(record_function(_, S5, access, F5, [S], RECORD_TYPE)), goal(record_function(_, R6, access, F6, [R], RECORD_TYPE)), goal(record_function(_, S6, access, F6, [S], RECORD_TYPE)), R1 = S1, R2 = S2, R3 = S3, R4 = S4, R5 = S5, R6 = S6 ]. /* record_equality(7): {for records with SEVEN fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S), fld_F4(R)=fld_F4(S), fld_F5(R)=fld_F5(S), fld_F6(R)=fld_F6(S), fld_F7(R)=fld_F7(S) ]. */ RECORD_EQUALITY7: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY7)), goal(RECORD_EQUALITY7=record_equality(7)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_],[F3,_],[F4,_],[F5,_],[F6,_], [F7,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), goal(record_function(_, R3, access, F3, [R], RECORD_TYPE)), goal(record_function(_, S3, access, F3, [S], RECORD_TYPE)), goal(record_function(_, R4, access, F4, [R], RECORD_TYPE)), goal(record_function(_, S4, access, F4, [S], RECORD_TYPE)), goal(record_function(_, R5, access, F5, [R], RECORD_TYPE)), goal(record_function(_, S5, access, F5, [S], RECORD_TYPE)), goal(record_function(_, R6, access, F6, [R], RECORD_TYPE)), goal(record_function(_, S6, access, F6, [S], RECORD_TYPE)), goal(record_function(_, R7, access, F7, [R], RECORD_TYPE)), goal(record_function(_, S7, access, F7, [S], RECORD_TYPE)), R1=S1, R2=S2, R3=S3, R4=S4, R5=S5, R6=S6, R7=S7 ]. /* record_equality(8): {for records with EIGHT fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S), fld_F4(R)=fld_F4(S), fld_F5(R)=fld_F5(S), fld_F6(R)=fld_F6(S), fld_F7(R)=fld_F7(S), fld_F8(R)=fld_F8(S) ]. */ RECORD_EQUALITY8: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY8)), goal(RECORD_EQUALITY8=record_equality(8)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_],[F3,_],[F4,_],[F5,_],[F6,_], [F7,_],[F8,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), goal(record_function(_, R3, access, F3, [R], RECORD_TYPE)), goal(record_function(_, S3, access, F3, [S], RECORD_TYPE)), goal(record_function(_, R4, access, F4, [R], RECORD_TYPE)), goal(record_function(_, S4, access, F4, [S], RECORD_TYPE)), goal(record_function(_, R5, access, F5, [R], RECORD_TYPE)), goal(record_function(_, S5, access, F5, [S], RECORD_TYPE)), goal(record_function(_, R6, access, F6, [R], RECORD_TYPE)), goal(record_function(_, S6, access, F6, [S], RECORD_TYPE)), goal(record_function(_, R7, access, F7, [R], RECORD_TYPE)), goal(record_function(_, S7, access, F7, [S], RECORD_TYPE)), goal(record_function(_, R8, access, F8, [R], RECORD_TYPE)), goal(record_function(_, S8, access, F8, [S], RECORD_TYPE)), R1=S1, R2=S2, R3=S3, R4=S4, R5=S5, R6=S6, R7=S7, R8=S8 ]. /* record_equality(9): {for records with NINE fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S), fld_F4(R)=fld_F4(S), fld_F5(R)=fld_F5(S), fld_F6(R)=fld_F6(S), fld_F7(R)=fld_F7(S), fld_F8(R)=fld_F8(S), fld_F9(R)=fld_F9(S) ]. */ RECORD_EQUALITY9: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY9)), goal(RECORD_EQUALITY9=record_equality(9)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_],[F3,_],[F4,_],[F5,_],[F6,_], [F7,_],[F8,_],[F9,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), goal(record_function(_, R3, access, F3, [R], RECORD_TYPE)), goal(record_function(_, S3, access, F3, [S], RECORD_TYPE)), goal(record_function(_, R4, access, F4, [R], RECORD_TYPE)), goal(record_function(_, S4, access, F4, [S], RECORD_TYPE)), goal(record_function(_, R5, access, F5, [R], RECORD_TYPE)), goal(record_function(_, S5, access, F5, [S], RECORD_TYPE)), goal(record_function(_, R6, access, F6, [R], RECORD_TYPE)), goal(record_function(_, S6, access, F6, [S], RECORD_TYPE)), goal(record_function(_, R7, access, F7, [R], RECORD_TYPE)), goal(record_function(_, S7, access, F7, [S], RECORD_TYPE)), goal(record_function(_, R8, access, F8, [R], RECORD_TYPE)), goal(record_function(_, S8, access, F8, [S], RECORD_TYPE)), goal(record_function(_, R9, access, F9, [R], RECORD_TYPE)), goal(record_function(_, S9, access, F9, [S], RECORD_TYPE)), R1=S1, R2=S2, R3=S3, R4=S4, R5=S5, R6=S6, R7=S7, R8=S8, R9=S9 ]. /* record_equality(10): {for records with TEN fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S), fld_F4(R)=fld_F4(S), fld_F5(R)=fld_F5(S), fld_F6(R)=fld_F6(S), fld_F7(R)=fld_F7(S), fld_F8(R)=fld_F8(S), fld_F9(R)=fld_F9(S), fld_F10(R)=fld_F10(S) ]. */ RECORD_EQUALITY10: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY10)), goal(RECORD_EQUALITY10=record_equality(10)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE, record([[F1,_],[F2,_],[F3,_],[F4,_],[F5,_],[F6,_], [F7,_],[F8,_],[F9,_],[F10,_]]))), goal(checktype(S,RECORD_TYPE)), goal(record_function(_, R1, access, F1, [R], RECORD_TYPE)), goal(record_function(_, S1, access, F1, [S], RECORD_TYPE)), goal(record_function(_, R2, access, F2, [R], RECORD_TYPE)), goal(record_function(_, S2, access, F2, [S], RECORD_TYPE)), goal(record_function(_, R3, access, F3, [R], RECORD_TYPE)), goal(record_function(_, S3, access, F3, [S], RECORD_TYPE)), goal(record_function(_, R4, access, F4, [R], RECORD_TYPE)), goal(record_function(_, S4, access, F4, [S], RECORD_TYPE)), goal(record_function(_, R5, access, F5, [R], RECORD_TYPE)), goal(record_function(_, S5, access, F5, [S], RECORD_TYPE)), goal(record_function(_, R6, access, F6, [R], RECORD_TYPE)), goal(record_function(_, S6, access, F6, [S], RECORD_TYPE)), goal(record_function(_, R7, access, F7, [R], RECORD_TYPE)), goal(record_function(_, S7, access, F7, [S], RECORD_TYPE)), goal(record_function(_, R8, access, F8, [R], RECORD_TYPE)), goal(record_function(_, S8, access, F8, [S], RECORD_TYPE)), goal(record_function(_, R9, access, F9, [R], RECORD_TYPE)), goal(record_function(_, S9, access, F9, [S], RECORD_TYPE)), goal(record_function(_, R10, access, F10, [R], RECORD_TYPE)), goal(record_function(_, S10, access, F10, [S], RECORD_TYPE)), R1=S1, R2=S2, R3=S3, R4=S4, R5=S5, R6=S6, R7=S7, R8=S8, R9=S9, R10=S10 ]. /* record_equality(11): {for records with ELEVEN or more fields} R=S may_be_deduced_from [ fld_F1(R)=fld_F1(S), fld_F2(R)=fld_F2(S), fld_F3(R)=fld_F3(S), fld_F4(R)=fld_F4(S), fld_F5(R)=fld_F5(S), fld_F6(R)=fld_F6(S), fld_F7(R)=fld_F7(S), fld_F8(R)=fld_F8(S), fld_F9(R)=fld_F9(S), fld_F10(R)=fld_F10(S), ... ]. */ RECORD_EQUALITY11: R = S may_be_deduced_from [ goal(nonvar(RECORD_EQUALITY11)), goal(RECORD_EQUALITY11=record_equality(11)), goal(checktype(R,RECORD_TYPE)), goal(type(RECORD_TYPE,record(FIELD_LIST))), goal(length(FIELD_LIST, NO_OF_FIELDS)), goal(NO_OF_FIELDS > 10), goal(checktype(S,RECORD_TYPE)), goal(make_record_equality_goal(FIELD_LIST,R,S,GOAL)), GOAL ]. /* mk__record(1): fld_FIELD(mk__record(LARGS...(FIELD := VALUE)...)) may_be_replaced_by VALUE if [ \+ ((FIELD := ...) is_in LARGS), \+ ((FIELD := ...) is_in RARGS) ]. N.B.: Side conditions check uniqueness of assignment to FIELD. */ MK__RECORD1: FLD_FIELD_MK__RECORD_LARGS_FIELD_VALUE_RARGS may_be_replaced_by VALUE if [ goal(nonvar(MK__RECORD1)), goal(MK__RECORD1 = mk__record(1)), goal(record_function(_, FLD_FIELD_MK__RECORD_LARGS_FIELD_VALUE_RARGS, access, FIELD, [MK__RECORD_LARGS_FIELD_VALUE_RARGS], _)), goal(nonvar(MK__RECORD_LARGS_FIELD_VALUE_RARGS)), goal(MK__RECORD_LARGS_FIELD_VALUE_RARGS =.. [mk__record|LARGS_FIELD_VALUE_RARGS]), goal(append(LARGS, [(FIELD := VALUE)|RARGS], LARGS_FIELD_VALUE_RARGS)), goal(\+ is_in((FIELD := _), LARGS)), goal(\+ is_in((FIELD := _), RARGS)) ]. /* mk__record(2): upf_FIELD(mk__record(LARGS...(FIELD := OLDVAL)...RARGS), NEWVAL) may_be_replaced_by mk__record(LARGS...(FIELD := NEWVAL)...RARGS) if [ \+ ((FIELD := ...) is_in LARGS), \+ ((FIELD := ...) is_in RARGS) ]. N.B.: Side conditions check uniqueness of assignment to FIELD. */ MK__RECORD2: UPF_FIELD_MK__RECORD_LARGS_FIELD_OLDVAL_RARGS_NEWVAL may_be_replaced_by MK__RECORD_LARGS_FIELD_NEWVAL_RARGS if [ goal(nonvar(MK__RECORD2)), goal(MK__RECORD2 = mk__record(2)), goal(record_function(_, UPF_FIELD_MK__RECORD_LARGS_FIELD_OLDVAL_RARGS_NEWVAL, update, FIELD, [MK__RECORD_LARGS_FIELD_OLDVAL_RARGS, NEWVAL], _)), goal(nonvar(MK__RECORD_LARGS_FIELD_OLDVAL_RARGS)), goal(MK__RECORD_LARGS_FIELD_OLDVAL_RARGS =.. [mk__record|LARGS_FIELD_OLDVAL_RARGS]), goal(append(LARGS, [(FIELD := OLDVAL)|RARGS], LARGS_FIELD_OLDVAL_RARGS)), goal(\+ is_in((FIELD := _), LARGS)), goal(\+ is_in((FIELD := _), RARGS)), goal(append(LARGS, [(FIELD := NEWVAL)|RARGS], LARGS_FIELD_NEWVAL_RARGS)), goal(MK__RECORD_LARGS_FIELD_NEWVAL_RARGS =.. [mk__record|LARGS_FIELD_NEWVAL_RARGS]) ]. /* mk__record(3): fld_FIELD(mk__RECORDTYPE(LARGS...(FIELD := VALUE)...)) may_be_replaced_by VALUE if [ \+ ((FIELD := ...) is_in LARGS), \+ ((FIELD := ...) is_in RARGS) ]. N.B.: Side conditions check uniqueness of assignment to FIELD. */ MK__RECORD3: FLD_FIELD_MK__RECORD_LARGS_FIELD_VALUE_RARGS may_be_replaced_by VALUE if [ goal(nonvar(MK__RECORD3)), goal(MK__RECORD3 = mk__record(3)), goal(record_function(_, FLD_FIELD_MK__RECORD_LARGS_FIELD_VALUE_RARGS, access, FIELD, [MK__RECORD_LARGS_FIELD_VALUE_RARGS], _)), goal(nonvar(MK__RECORD_LARGS_FIELD_VALUE_RARGS)), goal(MK__RECORD_LARGS_FIELD_VALUE_RARGS =.. [MK__RECORDTYPE|LARGS_FIELD_VALUE_RARGS]), goal(mk__function_name(MK__RECORDTYPE, _, record)), goal(append(LARGS, [(FIELD := VALUE)|RARGS], LARGS_FIELD_VALUE_RARGS)), goal(\+ is_in((FIELD := _), LARGS)), goal(\+ is_in((FIELD := _), RARGS)) ]. /* mk__record(4): upf_FIELD(mk__RECORDTYPE(LARGS...(FIELD := OLDVAL)...RARGS), NEWVAL) may_be_replaced_by mk__RECORDTYPE(LARGS...(FIELD := NEWVAL)...RARGS) if [ \+ ((FIELD := ...) is_in LARGS), \+ ((FIELD := ...) is_in RARGS) ]. N.B.: Side conditions check uniqueness of assignment to FIELD. */ MK__RECORD4: UPF_FIELD_MK__RECORD_LARGS_FIELD_OLDVAL_RARGS_NEWVAL may_be_replaced_by MK__RECORD_LARGS_FIELD_NEWVAL_RARGS if [ goal(nonvar(MK__RECORD4)), goal(MK__RECORD4 = mk__record(4)), goal(record_function(_, UPF_FIELD_MK__RECORD_LARGS_FIELD_OLDVAL_RARGS_NEWVAL, update, FIELD, [MK__RECORD_LARGS_FIELD_OLDVAL_RARGS, NEWVAL], _)), goal(nonvar(MK__RECORD_LARGS_FIELD_OLDVAL_RARGS)), goal(MK__RECORD_LARGS_FIELD_OLDVAL_RARGS =.. [MK__RECORDTYPE|LARGS_FIELD_OLDVAL_RARGS]), goal(mk__function_name(MK__RECORDTYPE, _, record)), goal(append(LARGS, [(FIELD := OLDVAL)|RARGS], LARGS_FIELD_OLDVAL_RARGS)), goal(\+ is_in((FIELD := _), LARGS)), goal(\+ is_in((FIELD := _), RARGS)), goal(append(LARGS, [(FIELD := NEWVAL)|RARGS], LARGS_FIELD_NEWVAL_RARGS)), goal(MK__RECORD_LARGS_FIELD_NEWVAL_RARGS =.. [MK__RECORDTYPE|LARGS_FIELD_NEWVAL_RARGS]) ]. spark-2012.0.deb/lib/checker/rules/SETS.RUL0000644000175000017500000001233411753202341017106 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILY CONTAINED HEREIN :- % % sets : properties of FDL set types %------------------------------------------------------------------------------- % MODEL DECLARATION FOR THIS FILE :- % % rule_family sets: % X in Y requires [ X:any, Y:any ] & % X not_in Y requires [ X:any, Y:any ] & % not X requires [ X:any ] & % X \/ Y requires [ X:any, Y:any ] & % X /\ Y requires [ X:any, Y:any ] & % X \ Y requires [ X:any, Y:any ] & % X or Y requires [ X:any, Y:any ] & % X subset_of Y requires [ X:any, Y:any ] & % X strict_subset_of Y requires [ X:any, Y:any ]. %------------------------------------------------------------------------------- /*** Set rules ***/ /*** Element of ***/ sets(1): X in (set A) may_be_deduced_from [ goal(is_in(X,A)) ]. sets(2): (X not_in A) & (not (X in A)) are_interchangeable. sets(3): (X in A) & (not (X not_in A)) are_interchangeable. /*** Union ***/ sets(4): X in (A \/ B) may_be_deduced_from [ X in A ]. sets(5): X in (A \/ B) may_be_deduced_from [ X in B ]. sets(6): X not_in (A \/ B) may_be_deduced_from [ X not_in A, X not_in B ]. sets(7): A \/ B may_be_replaced_by A if [ B subset_of A ]. sets(8): B \/ A may_be_replaced_by A if [ B subset_of A ]. sets(9): A \/ (set []) may_be_replaced_by A if []. sets(10): (set []) \/ A may_be_replaced_by A if []. sets(11): (set A) \/ (set B) may_be_replaced_by (set C) if [ goal(set_union(A,B,C)) ]. /*** Intersection ***/ sets(12): X in (A /\ B) may_be_deduced_from [ X in A, X in B ]. sets(13): X not_in (A /\ B) may_be_deduced_from [ X not_in A ]. sets(14): X not_in (A /\ B) may_be_deduced_from [ X not_in B ]. sets(15): A /\ B may_be_replaced_by A if [ A subset_of B ]. sets(16): B /\ A may_be_replaced_by A if [ A subset_of B ]. sets(17): (set []) /\ B may_be_replaced_by (set []) if []. sets(18): A /\ (set []) may_be_replaced_by (set []) if []. sets(19): ((set A) /\ (set B)) may_be_replaced_by (set C) if [ goal(set_intersect(A,B,C)) ]. /*** 'Lacking' ***/ sets(20): X in (A \ B) may_be_deduced_from [ X in A, X not_in B ]. sets(21): X not_in (A \ B) may_be_deduced_from [ X not_in A ]. sets(22): X not_in (A \ B) may_be_deduced_from [ X in B ]. sets(23): (A \ B) may_be_replaced_by A if [ B=(set []) ]. sets(24): (A \ B) may_be_replaced_by A if [ (A /\ B)=(set []) ]. sets(25): (A \ B) may_be_replaced_by A if [ (B /\ A)=(set []) ]. sets(26): (set A) \ (set B) may_be_replaced_by (set C) if [ goal(set_lacking(A,B,C)) ]. sets(27): X=E or X in S may_be_deduced_from [ X in (set U), goal(is_in(E,U), set_lacking(U,[E],S)) ]. /*** Empty set ***/ sets(28): X not_in (set []) may_be_deduced. /*** Subset rules ***/ sets(29): (set []) subset_of A may_be_deduced. sets(30): A subset_of (A \/ B) may_be_deduced. sets(31): A subset_of (B \/ A) may_be_deduced. sets(32): (A /\ B) subset_of A may_be_deduced. sets(33): (B /\ A) subset_of A may_be_deduced. sets(34): (A \ B) subset_of A may_be_deduced. sets(35): A subset_of B may_be_deduced_from [ A strict_subset_of B ]. sets(36): (set A) subset_of (set B) may_be_deduced_from [ goal(subset(A,B)) ]. /*** Strict subset rule ***/ sets(37): A strict_subset_of B may_be_deduced_from [ A subset_of B, A <> B ]. /*** Commutativity of /\, \/ ***/ sets(38): (X \/ Y) may_be_replaced_by (Y \/ X). sets(39): (X /\ Y) may_be_replaced_by (Y /\ X). /*** Associativity of /\, \/ ***/ sets(40): (X \/ (Y \/ Z)) & ((X \/ Y) \/ Z) are_interchangeable. sets(41): (X /\ (Y /\ Z)) & ((X /\ Y) /\ Z) are_interchangeable. /*** Distributivity of /\, \/ ***/ sets(42): (X /\ (Y \/ Z)) & ((X /\ Y) \/ (X /\ Z)) are_interchangeable. sets(43): (X \/ (Y /\ Z)) & ((X \/ Y) /\ (X \/ Z)) are_interchangeable. spark-2012.0.deb/lib/checker/rules/NUMINEQS.RUL0000644000175000017500000002137111753202341017570 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % inequals : inequalities rules for integers and reals % zero : equality/non-equality to 0 with multiplication rules %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family inequals: % X < Y requires [ X:ir, Y:ir ] & % X <= Y requires [ X:ir, Y:ir ] & % X > Y requires [ X:ir, Y:ir ] & % X >= Y requires [ X:ir, Y:ir ] & % X <> Y requires [ X:ir, Y:ir ] & % X = Y requires [ X:ir, Y:ir ]. % % rule_family zero: % X = Y requires [ X:ir, Y:ir ] & % X <> Y requires [ X:ir, Y:ir ] & % X or Y requires [ X:any, Y:any ]. %------------------------------------------------------------------------------- /*** INEQUALS Rules ***/ /*** Addition or subtraction of a number from one side of an inequality ***/ inequals(1): I-N=0 ]. inequals(2): I-N0 ]. inequals(3): I-N<=J may_be_deduced_from [ I<=J, N>=0 ]. inequals(4): J>I-N may_be_deduced_from [ I=0 ]. inequals(5): J>I-N may_be_deduced_from [ I<=J, N>0 ]. inequals(6): J>=I-N may_be_deduced_from [ I<=J, N>=0 ]. inequals(7): I+N>J may_be_deduced_from [ I>J, N>=0 ]. inequals(8): I+N>J may_be_deduced_from [ I>=J, N>0 ]. inequals(9): I+N>=J may_be_deduced_from [ I>=J, N>=0 ]. inequals(10): N+I>J may_be_deduced_from [ I>J, N>=0 ]. inequals(11): N+I>J may_be_deduced_from [ I>=J, N>0 ]. inequals(12): N+I>=J may_be_deduced_from [ I>=J, N>=0 ]. inequals(13): JJ, N>=0 ]. inequals(14): J=J, N>0 ]. inequals(15): J<=I+N may_be_deduced_from [ I>=J, N>=0 ]. inequals(16): JJ, N>=0 ]. inequals(17): J=J, N>0 ]. inequals(18): J<=N+I may_be_deduced_from [ I>=J, N>=0 ]. /*** Addition and subtraction of a number on both sides of an inequality ***/ inequals(19): I+NJ+N may_be_deduced_from [ I>J ]. inequals(24): N+I>J+N may_be_deduced_from [ I>J ]. inequals(25): I+N>N+J may_be_deduced_from [ I>J ]. inequals(26): N+I>N+J may_be_deduced_from [ I>J ]. inequals(27): I+N<>J+N may_be_deduced_from [ I<>J ]. inequals(28): N+I<>J+N may_be_deduced_from [ I<>J ]. inequals(29): I+N<>N+J may_be_deduced_from [ I<>J ]. inequals(30): N+I<>N+J may_be_deduced_from [ I<>J ]. inequals(31): I-NJ-N may_be_deduced_from [ I>J ]. inequals(33): I-N<>J-N may_be_deduced_from [ I<>J ]. /*** Multiplication on both sides of an inequality by a number ***/ inequals(34): I*N<>J*N may_be_deduced_from [ I<>J, N<>0 ]. inequals(35): I*N>J*N may_be_deduced_from [ I>J, N>0 ]. inequals(36): I*N0 ]. inequals(37): I*N>J*N may_be_deduced_from [ IJ, N<0 ]. inequals(39): I*N>=J*N may_be_deduced_from [ I>=J, N>=0 ]. inequals(40): I*N<=J*N may_be_deduced_from [ I<=J, N>=0 ]. inequals(41): I*N>=J*N may_be_deduced_from [ I<=J, N<=0 ]. inequals(42): I*N<=J*N may_be_deduced_from [ I>=J, N<=0 ]. /*** Addition and subtraction of two inequalities ***/ inequals(43): A+C>B+D may_be_deduced_from [ A>=B, C>D ]. inequals(44): A+C>B+D may_be_deduced_from [ A>B, C>=D ]. inequals(45): A+C>B+D may_be_deduced_from [ A>=D, C>B ]. inequals(46): A+C>B+D may_be_deduced_from [ A>D, C>=B ]. inequals(47): A-D>B-C may_be_deduced_from [ A>=B, C>D ]. inequals(48): A-D>B-C may_be_deduced_from [ A>B, C>=D ]. inequals(49): A+CB+D may_be_deduced_from [ A<>B, C=D ]. inequals(56): A+C<>B+D may_be_deduced_from [ A=B, C<>D ]. inequals(57): A+C<>B+D may_be_deduced_from [ A<>D, B=C ]. inequals(58): A+C<>B+D may_be_deduced_from [ A=D, B<>C ]. inequals(59): A-D<>B-C may_be_deduced_from [ A<>B, C=D ]. inequals(60): A-D<>B-C may_be_deduced_from [ A=B, C<>D ]. /*** Addition and subtraction of a number on both sides of an inequality ***/ inequals(61): I+N=J+N may_be_deduced_from [ I=J ]. inequals(62): N+I=J+N may_be_deduced_from [ I=J ]. inequals(63): I+N=N+J may_be_deduced_from [ I=J ]. inequals(64): N+I=N+J may_be_deduced_from [ I=J ]. inequals(65): I+N<=J+N may_be_deduced_from [ I<=J ]. inequals(66): N+I<=J+N may_be_deduced_from [ I<=J ]. inequals(67): I+N<=N+J may_be_deduced_from [ I<=J ]. inequals(68): N+I<=N+J may_be_deduced_from [ I<=J ]. inequals(69): I+N>=J+N may_be_deduced_from [ I>=J ]. inequals(70): N+I>=J+N may_be_deduced_from [ I>=J ]. inequals(71): I+N>=N+J may_be_deduced_from [ I>=J ]. inequals(72): N+I>=N+J may_be_deduced_from [ I>=J ]. inequals(73): I-N=J-N may_be_deduced_from [ I=J ]. inequals(74): I-N<=J-N may_be_deduced_from [ I<=J ]. inequals(75): I-N>=J-N may_be_deduced_from [ I>=J ]. /*** Multiplication on both sides of an inequality by a number ***/ inequals(76): I*N=J*N may_be_deduced_from [ I=J ]. /*** Addition and subtraction of two inequalities ***/ inequals(77): A+C>=B+D may_be_deduced_from [ A>=B, C>=D ]. inequals(78): A+C>=B+D may_be_deduced_from [ A>=D, C>=B ]. inequals(79): A-D>=B-C may_be_deduced_from [ A>=B, C>=D ]. inequals(80): A+C<=B+D may_be_deduced_from [ A<=B, C<=D ]. inequals(81): A+C<=B+D may_be_deduced_from [ A<=D, C<=B ]. inequals(82): A-D<=B-C may_be_deduced_from [ A<=B, C<=D ]. inequals(83): A+C=B+D may_be_deduced_from [ A=B, C=D ]. inequals(84): A+C=B+D may_be_deduced_from [ A=D, B=C ]. inequals(85): A-D=B-C may_be_deduced_from [ A=B, C=D ]. /* Rule numbering gap: 86-103 are in INTINEQS; retained for compatibility */ /*** Additional rules for inequalities with multiplication ***/ inequals(104): X=Y may_be_deduced_from [ X*N=Y*N, N<>0 ]. inequals(105): X<>Y may_be_deduced_from [ X*N<>Y*N, N<>0 ]. inequals(106): X<=Y may_be_deduced_from [ X*N<=Y*N, N>0 ]. inequals(107): X<=Y may_be_deduced_from [ X*N>=Y*N, N<0 ]. inequals(108): X>=Y may_be_deduced_from [ X*N>=Y*N, N>0 ]. inequals(109): X>=Y may_be_deduced_from [ X*N<=Y*N, N<0 ]. inequals(110): X0 ]. inequals(111): XY*N, N<0 ]. inequals(112): X>Y may_be_deduced_from [ X*N>Y*N, N>0 ]. inequals(113): X>Y may_be_deduced_from [ X*N=0, Y<=0 ]. inequals(115): X*Y<=0 may_be_deduced_from [ X<=0, Y>=0 ]. inequals(116): X*Y>=0 may_be_deduced_from [ X>=0, Y>=0 ]. inequals(117): X*Y>=0 may_be_deduced_from [ X<=0, Y<=0 ]. inequals(118): X*Y<0 may_be_deduced_from [ X>0, Y<0 ]. inequals(119): X*Y<0 may_be_deduced_from [ X<0, Y>0 ]. inequals(120): X*Y>0 may_be_deduced_from [ X>0, Y>0 ]. inequals(121): X*Y>0 may_be_deduced_from [ X<0, Y<0 ]. /*** Rules for equality and non-equality to zero involving multiplication ***/ zero(1): X * Y = 0 may_be_deduced_from [ X = 0 ]. zero(2): X * Y = 0 may_be_deduced_from [ Y = 0 ]. zero(3): X=0 or Y=0 may_be_deduced_from [ X*Y=0 ]. zero(4): X * Y <> 0 may_be_deduced_from [ X<>0, Y<>0 ]. zero(5): X <> 0 may_be_deduced_from [ X * Y <> 0 ]. zero(6): Y <> 0 may_be_deduced_from [ X * Y <> 0 ]. spark-2012.0.deb/lib/checker/rules/ARRAY.RUL0000644000175000017500000001053511753202341017207 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILY CONTAINED HEREIN :- % array : basic properties of FDL array manipulation functions % mk__array : rules specific to SPARK array aggregates %------------------------------------------------------------------------------- % MODEL DECLARATION FOR THIS FILE :- % % rule_family array: % element(X, Y) requires [ X:any, Y:any ] & % update(X, Y, Z) requires [ X:any, Y:any, Z:any ]. % % rule_family mk__array: % element(X, Y) requires [ X:any, Y:any ]. %------------------------------------------------------------------------------- /*** Rules specific to ARRAY MANIPULATIONS ***/ array(1): element(update(A,I,X),I) may_be_replaced_by X. array(2): update(A,I,element(A,I)) may_be_replaced_by A. array(3): element(update(A,J,X),K) & element(A,K) are_interchangeable if [ J<>K ]. array(4): update(update(A,I,X),J,Y) & update(update(A,J,Y),I,X) are_interchangeable if [ I<>J ]. array(5): update(update(A,I,X),I,Y) may_be_replaced_by update(A,I,Y). /*** Rules specific to SPARK ARRAY AGGREGATES ***/ /* mk__array(1): element(mk__array(LARGS, Inds := X), [I]) may_be_replaced_by X if [ "satisfies_index_constraint(Inds, I)" ]. In the above rule, the quoted side-condition can be met as follows: (1) if Inds is of the form "[J]", by satisfying I = J; (2) if Inds is of the form "[J .. K]", by satisfying J <= I <= K; (3) if Inds is of the form "Is & Js", by satisfying either Is or Js. Note that the last case makes the definition recursive. Equally, if we can prove that I does not satisfy the index constraints represented by Inds, we can instead consider whether it satisfies any of the index constraints in LARGS, and so on recursively until we either succeed or reach a point where neither satisfaction nor non-satisfaction can be proved (because of an insufficiently powerful inference engine, for instance, or because the index I is insufficiently constrained by the hypotheses of the VC). Given the above, the following is a general-purpose rule for fetching the value of an element of an array aggregate (where this is non-trivial): */ mk__array(1): element(MK__ARRAY_PART, [I]) may_be_replaced_by VALUE if [ goal(nonvar(MK__ARRAY_PART)), goal(MK__ARRAY_PART =.. [mk__array|ARGUMENTS]), goal(find_element(MK__ARRAY_PART, [I], VALUE)) ]. /* mk__array(2): element(mk__ARRAYTYPE(LARGS, Inds := X), [I]) may_be_replaced_by X if [ "satisfies_index_constraint(Inds, I)" ]. In the above rule, the quoted side-condition can be met as follows: (1) if Inds is of the form "[J]", by satisfying I = J; (2) if Inds is of the form "[J .. K]", by satisfying J <= I <= K; (3) if Inds is of the form "Is & Js", by satisfying either Is or Js. Note that the last case makes the definition recursive. Given the above, the following is a general-purpose rule for fetching the value of an element of a typed array aggregate (where non-trivial): */ mk__array(2): element(MK__ARRAY_PART, [I]) may_be_replaced_by VALUE if [ goal(nonvar(MK__ARRAY_PART)), goal(MK__ARRAY_PART =.. [MK__ARRAYTYPE|ARGUMENTS]), goal(mk__function_name(MK__ARRAYTYPE, _, array)), goal(find_element(MK__ARRAY_PART, [I], VALUE)) ]. /* It is recommended that the user should avoid reasoning about the equality (or otherwise) of two array aggregates. */ spark-2012.0.deb/lib/checker/rules/LOGIC.RUL0000644000175000017500000001505111753202341017164 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % assoc : associativity of "and", "or" & "<->" % commut : commutativity of "and", "or" & "<->" % distrib : distributivity of "and" over "or" and vice versa % equivalence : simplification of logical "<->" expressions % implies : simplification of logical "->" expressions % logical : general logical simplification rules % logical_and : simplification of logical "and" expressions % logical_not : simplification of logical "not" expressions % logical_or : simplification of logical "or" expressions %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family assoc: % X and Y requires [ X:any, Y:any ] & % X or Y requires [ X:any, Y:any ] & % X <-> Y requires [ X:any, Y:any ]. % % rule_family commut: % X and Y requires [ X:any, Y:any ] & % X or Y requires [ X:any, Y:any ] & % X <-> Y requires [ X:any, Y:any ]. % % rule_family distrib: % X and Y requires [ X:any, Y:any ] & % X or Y requires [ X:any, Y:any ]. % % rule_family equivalence: % X <-> Y requires [ X:any, Y:any ]. % % rule_family implies: % X -> Y requires [ X:any, Y:any ]. % % rule_family logical_and: % X and Y requires [ X:any, Y:any ]. % % rule_family logical_not: % not X requires [ X:any ]. % % rule_family logical_or: % X or Y requires [ X:any, Y:any ]. % % rule_family logical: % not X requires [ X:any ] & % X and Y requires [ X:any, Y:any ] & % X or Y requires [ X:any, Y:any ] & % X -> Y requires [ X:any, Y:any ] & % X <-> Y requires [ X:any, Y:any ]. %------------------------------------------------------------------------------- /*** SIMPLIFICATION OF LOGICAL EXPRESSIONS Rules ***/ /*** Simplification of logical "not" expressions ***/ logical_not(1): not true may_be_replaced_by false. logical_not(2): not false may_be_replaced_by true. logical_not(3): not not A may_be_replaced_by A. /*** Simplification of logical "and" expressions ***/ logical_and(1): A and true may_be_replaced_by A. logical_and(2): true and A may_be_replaced_by A. logical_and(3): A and false may_be_replaced_by false. logical_and(4): false and A may_be_replaced_by false. logical_and(5): A and A may_be_replaced_by A. logical_and(6): A and not A may_be_replaced_by false. logical_and(7): (not A) and A may_be_replaced_by false. /*** Simplification of logical "or" expressions ***/ logical_or(1): A or false may_be_replaced_by A. logical_or(2): false or A may_be_replaced_by A. logical_or(3): A or true may_be_replaced_by true. logical_or(4): true or A may_be_replaced_by true. logical_or(5): A or A may_be_replaced_by A. logical_or(6): A or not A may_be_replaced_by true. logical_or(7): (not A) or A may_be_replaced_by true. /*** Simplification of logical "->" expressions ***/ implies(1): A -> true may_be_replaced_by true. implies(2): A -> false may_be_replaced_by not A. implies(3): true -> A may_be_replaced_by A. implies(4): false -> A may_be_replaced_by true. implies(5): A -> A may_be_replaced_by true. implies(6): A -> not A may_be_replaced_by not A. implies(7): (not A) -> A may_be_replaced_by A. /*** Simplification of logical "<->" expressions ***/ equivalence(1): A <-> true may_be_replaced_by A. equivalence(2): true <-> A may_be_replaced_by A. equivalence(3): A <-> false may_be_replaced_by not A. equivalence(4): false <-> A may_be_replaced_by not A. equivalence(5): A <-> A may_be_replaced_by true. equivalence(6): A <-> not A may_be_replaced_by false. equivalence(7): (not A) <-> A may_be_replaced_by false. /*** ASSOCIATIVITY of "and", "or" & "<->" Rules ***/ assoc(5): A and (B and C) & (A and B) and C are_interchangeable. assoc(6): A or (B or C) & (A or B) or C are_interchangeable. assoc(7): A <-> (B <-> C) & (A <-> B) <-> C are_interchangeable. /*** COMMUTATIVITY of "and", "or" & "<->" Rules ***/ commut(3): A and B may_be_replaced_by B and A. commut(4): A or B may_be_replaced_by B or A. commut(5): A <-> B may_be_replaced_by B <-> A. /*** DISTRIBUTIVITY of "and" and "or" over each other Rules ***/ distrib(5): (A and B) or C & (A or C) and (B or C) are_interchangeable. distrib(6): (A and B) or C & (C or A) and (C or B) are_interchangeable. distrib(7): C or (A and B) & (C or A) and (C or B) are_interchangeable. distrib(8): C or (A and B) & (A or C) and (B or C) are_interchangeable. distrib(9): (A or B) and C & (A and C) or (B and C) are_interchangeable. distrib(10): (A or B) and C & (C and A) or (C and B) are_interchangeable. distrib(11): C and (A or B) & (C and A) or (C and B) are_interchangeable. distrib(12): C and (A or B) & (A and C) or (B and C) are_interchangeable. /*** Other handy logical equivalences (DeMorgan, etc.) ***/ logical(1): not (A or B) & (not A) and (not B) are_interchangeable. logical(2): not (A and B) & (not A) or (not B) are_interchangeable. logical(3): A -> B & (not A) or B are_interchangeable. logical(4): A <-> B & (A -> B) and (B -> A) are_interchangeable. logical(5): A -> (B -> C) may_be_replaced_by (A and B) -> C. spark-2012.0.deb/lib/checker/rules/SPECIAL.RUL0000644000175000017500000001627311753202341017416 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % inference : simple inference rules (useful for subgoaling) % eq : equality substitution rule % equiv : equivalence substitution rule % simplify : simplification of expressions rule % logic : truth-table logic rules % standardisation : standardisation rules % unification : top-level unification rules %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family inference: % X requires [ X:any ] & % X or Y requires [ X:any, Y:any ] & % X and Y requires [ X:any, Y:any ] & % X -> Y requires [ X:any, Y:any ] & % X <-> Y requires [ X:any, Y:any ]. % % rule_family eq: % X requires [ X:any ]. % % rule_family equiv: % X requires [ X:any ]. % % rule_family simplify: % X requires [ X:any ]. % % rule_family logic: % X requires [ X:any ]. % % rule_family standardisation: % X requires [ X:any ]. % % rule_family unification: % X requires [ X:any ]. %------------------------------------------------------------------------------- /*** Standard INFERENCE Rules ***/ inference(1): X may_be_deduced_from [ X ]. inference(2): X may_be_deduced_from [ Y -> X, Y ]. inference(3): X or Y may_be_deduced_from [ X ]. inference(4): X or Y may_be_deduced_from [ Y ]. inference(5): X and Y may_be_deduced_from [ X, Y ]. inference(6): X -> Y may_be_deduced_from [ not X ]. inference(7): X -> Y may_be_deduced_from [ Y ]. inference(8): X <-> Y may_be_deduced_from [ X -> Y, Y -> X ]. /*** SUBSTITUTION rule for equality-substitution ***/ eq(1): X may_be_replaced_by Y if [ X=Y, goal(X\=Y) ]. /*** SUBSTITUTION rule for equivalence-substitution ***/ EQUIV1: X may_be_replaced_by Y if [ goal(nonvar(EQUIV1)), goal(EQUIV1=equiv(1)), X <-> Y, goal(X\=Y) ]. /*** SIMPLIFY rule for simplification of expressions ***/ SIMPLIFY1: X may_be_replaced_by Y if [ goal(nonvar(SIMPLIFY1)), goal(SIMPLIFY1=simplify(1)), goal(var(Y)), goal(simplify(X, Y)), goal(X \= Y) ]. SIMPLIFY2: X may_be_replaced_by Y if [ goal(nonvar(SIMPLIFY2)), goal(SIMPLIFY2=simplify(2)), goal(novars(Y)), goal(X \= Y), goal(simplify(X, Z)), goal(simplify(Y, Z)) ]. /*** LOGICAL DEDUCTION rule - only allowed by explicit call ***/ LOGIC1: X may_be_deduced_from [ goal(nonvar(LOGIC1)), goal(LOGIC1=logic(1)), goal(try_deduce(X)) ]. LOGIC2: X may_be_replaced_by Y if [ goal(nonvar(LOGIC2)), goal(LOGIC2=logic(2)), goal(novars(Y)), goal(checktype(X,boolean)), goal(try_deduce(X <-> Y)) ]. /*** STANDARDISATION rule - only allowed by explicit call ***/ STAND1: X may_be_replaced_by Y if [ goal(nonvar(STAND1)), goal(STAND1=standardisation(1)), goal(novars(Y)), goal(X\=Y), goal(checktype(X,T)), goal(norm_typed_expr(X,T,Z)), goal(norm_typed_expr(Y,T,Z)) ]. STAND2: X may_be_replaced_by Y if [ goal(nonvar(STAND2)), goal(STAND2=standardisation(2)), goal(var(Y)), goal(checktype(X,T)), goal(norm_typed_expr(X,T,Y)) ]. STAND3: X may_be_deduced_from [ goal(nonvar(STAND3)), goal(STAND3=standardisation(3)), goal(norm_typed_expr(X,boolean,true)) ]. /*** UNIFICATION rules - only allowed by explicit call ***/ /* This rule family allows f(x1,...,xn)=f(y1,...,yn) to be deduced from x1=y1, ..., xn=yn (for n in 1..9). unification(n) is for the case when f takes n arguments. For any n, you can survive without these rules if you wish: simply prove f(x1,...,xn)=f(y1,...,yn) by contradiction, then establish contradiction by replacing each yi by xi (or vice versa), thereby gaining the hypothesis f(x1,...,xn)<>f(x1,...,xn), which gives false. */ UNIF1: X = Y may_be_deduced_from [ goal(nonvar(UNIF1)), goal(UNIF1=unification(1)), goal(X =.. [F,X1]), goal(Y =.. [F,Y1]), X1 = Y1 ]. UNIF2: X = Y may_be_deduced_from [ goal(nonvar(UNIF2)), goal(UNIF2=unification(2)), goal(X =.. [F,X1,X2]), goal(Y =.. [F,Y1,Y2]), X1 = Y1, X2 = Y2 ]. UNIF3: X = Y may_be_deduced_from [ goal(nonvar(UNIF3)), goal(UNIF3=unification(3)), goal(X =.. [F,X1,X2,X3]), goal(Y =.. [F,Y1,Y2,Y3]), X1 = Y1, X2 = Y2, X3 = Y3 ]. UNIF4: X = Y may_be_deduced_from [ goal(nonvar(UNIF4)), goal(UNIF4=unification(4)), goal(X =.. [F,X1,X2,X3,X4]), goal(Y =.. [F,Y1,Y2,Y3,Y4]), X1 = Y1, X2 = Y2, X3 = Y3, X4 = Y4 ]. UNIF5: X = Y may_be_deduced_from [ goal(nonvar(UNIF5)), goal(UNIF5=unification(5)), goal(X =.. [F,X1,X2,X3,X4,X5]), goal(Y =.. [F,Y1,Y2,Y3,Y4,Y5]), X1 = Y1, X2 = Y2, X3 = Y3, X4 = Y4, X5 = Y5 ]. UNIF6: X = Y may_be_deduced_from [ goal(nonvar(UNIF6)), goal(UNIF6=unification(6)), goal(X =.. [F,X1,X2,X3,X4,X5,X6]), goal(Y =.. [F,Y1,Y2,Y3,Y4,Y5,Y6]), X1 = Y1, X2 = Y2, X3 = Y3, X4 = Y4, X5 = Y5, X6 = Y6 ]. UNIF7: X = Y may_be_deduced_from [ goal(nonvar(UNIF7)), goal(UNIF7=unification(7)), goal(X =.. [F,X1,X2,X3,X4,X5,X6,X7]), goal(Y =.. [F,Y1,Y2,Y3,Y4,Y5,Y6,Y7]), X1=Y1, X2=Y2, X3=Y3, X4=Y4, X5=Y5, X6=Y6, X7=Y7 ]. UNIF8: X = Y may_be_deduced_from [ goal(nonvar(UNIF8)), goal(UNIF8=unification(8)), goal(X =.. [F,X1,X2,X3,X4,X5,X6,X7,X8]), goal(Y =.. [F,Y1,Y2,Y3,Y4,Y5,Y6,Y7,Y8]), X1=Y1, X2=Y2, X3=Y3, X4=Y4, X5=Y5, X6=Y6, X7=Y7, X8=Y8 ]. UNIF9: X = Y may_be_deduced_from [ goal(nonvar(UNIF9)), goal(UNIF9=unification(9)), goal(X =.. [F,X1,X2,X3,X4,X5,X6,X7,X8,X9]), goal(Y =.. [F,Y1,Y2,Y3,Y4,Y5,Y6,Y7,Y8,Y9]), X1=Y1,X2=Y2,X3=Y3,X4=Y4,X5=Y5,X6=Y6,X7=Y7,X8=Y8,X9=Y9 ]. spark-2012.0.deb/lib/checker/rules/MODULAR.RUL0000644000175000017500000001015411753202341017431 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % modular : modular arithmetic %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family modular: % X mod Y requires [X : i, Y : i] & % X <= Y requires [X : i, Y : i] & % X <> Y requires [X : i, Y : i] & % X < Y requires [X : i, Y : i] & % X = Y requires [X : i, Y : i]. %------------------------------------------------------------------------------- /*** Definition of mod (modulo) ***/ modular(1): X mod Y = X - N * Y may_be_deduced_from [ N * Y <= X, X < (N + 1)* Y, 0 < Y ]. modular(2): X mod Y = X - N * Y may_be_deduced_from [ X <= N * Y, (N + 1) * Y < X, Y < 0 ]. /*** Bounds for 0 < Y ***/ modular(10): 0 <= (X * Y + Z) mod Y may_be_deduced_from [ 0 <= Z, Z < Y ]. modular(11): (X * Y + Z) mod Y < Y may_be_deduced_from [ 0 <= Z, Z < Y ]. /*** Bounds for Y < 0 ***/ modular(20): Y < (X * Y + Z) mod Y may_be_deduced_from [ Y < Z, Z <= 0 ]. modular(21): (X * Y + Z) mod Y <= 0 may_be_deduced_from [ Y < Z, Z <= 0 ]. /** Properties ***/ modular(30): (X * Y + Z) mod Y = Z may_be_deduced_from [ 0 <= Z, Z < Y ]. modular(31): (X * Y + Z) mod Y = Z may_be_deduced_from [ Z <= 0, Y < Z ]. modular(32): N mod N may_be_replaced_by 0 if [ N <> 0 ]. modular(33): N mod N may_be_replaced_by 0 if [ N > 0 ]. modular(34): N mod N may_be_replaced_by 0 if [ N < 0 ]. modular(40): X * Y mod Y may_be_replaced_by 0 if [ Y <> 0 ]. modular(41): X mod Y may_be_replaced_by X if [ 0 <= X, X < Y ]. modular(42): X mod Y may_be_replaced_by X if [ Y < X, X <= 0 ]. modular(50): X mod Y mod Z = X mod Y may_be_deduced_from [ 0 <= B, B < Y, Y <= Z, X = A*Y + B ]. modular(51): X mod Y mod Z = X mod Y may_be_deduced_from [ Z <= Y, Y < B, B <= 0, X = A*Y + B ]. modular(52): X mod Y mod Z = X mod Z may_be_deduced_from [ X = A * Y + B, B = C * Z + D, Y = Z * E, 0 <= B, B < Y, 0 <= D, D < Z ]. modular(53): X mod Y mod Z = X mod Z may_be_deduced_from [ X = A * Y + B, B = C * Z + D, Y = Z * E, 0 <= B, B < Y, D <= 0, Z < D ]. modular(54): X mod Y mod Z = X mod Z may_be_deduced_from [ X = A * Y + B, B = C * Z + D, Y = Z * E, B <= 0, Y < B, 0 <= D, D < Z ]. modular(55): X mod Y mod Z = X mod Z may_be_deduced_from [ X = A * Y + B, B = C * Z + D, Y = Z * E, B <= 0, Y < B, D <= 0, Z < 0 ]. modular(60): X mod Y = -((-X) mod (-Y)) may_be_deduced_from [ 0 <= B, B < Y, X = A * Y + B ]. modular(61): X mod Y = -((-X) mod (-Y)) may_be_deduced_from [ Y < B, B <= 0, X = A * Y + B ]. spark-2012.0.deb/lib/checker/rules/ARITH.RUL0000644000175000017500000001416611753202341017204 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % arith : simple arithmetic simplification rules % assoc : associativity of + and * % commut : commutativity of + and * % distrib : distributivity of * over + and - % minus : rules for unary and binary minus % intdiv : rules for the integer div operator %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family arith: % X * Y requires [ X:ir, Y:ir ] & % X + Y requires [ X:ir, Y:ir ] & % X - Y requires [ X:ir, Y:ir ] & % X div Y requires [ X:i, Y:i ] & % X / Y requires [ X:ir, Y:ir ]. % % rule_family assoc: % X + Y requires [ X:ir, Y:ir ] & % X * Y requires [ X:ir, Y:ir ]. % % rule_family commut: % X + Y requires [ X:ir, Y:ir ] & % X * Y requires [ X:ir, Y:ir ]. % % rule_family distrib: % X + Y requires [ X:ir, Y:ir ] & % X - Y requires [ X:ir, Y:ir ] & % X * Y requires [ X:ir, Y:ir ]. % % rule_family minus: % - X requires [ X:ir ] & % X + Y requires [ X:ir, Y:ir ] & % X - Y requires [ X:ir, Y:ir ] & % X * Y requires [ X:ir, Y:ir ]. % % rule_family intdiv: % X div Y requires [ X:i, Y:i ] & % X + Y requires [ X:i, Y:i ] & % - X requires [ X:i ]. %------------------------------------------------------------------------------- /*** SIMPLIFICATION OF ARITHMETIC EXPRESSIONS Rules ***/ arith(1): X*1 may_be_replaced_by X. arith(2): 1*X may_be_replaced_by X. arith(3): X+0 may_be_replaced_by X. arith(4): 0+X may_be_replaced_by X. arith(5): X - 0 may_be_replaced_by X. arith(6): X*0 may_be_replaced_by 0. arith(7): 0*X may_be_replaced_by 0. arith(8): X div 1 may_be_replaced_by X. arith(9): (X*N) div N may_be_replaced_by X if [ N<>0 ]. arith(10): (N*X) div N may_be_replaced_by X if [ N<>0 ]. arith(11): X/1 may_be_replaced_by X. arith(12): (X/Y)*Y may_be_replaced_by X if [ Y<>0 ]. arith(13): Y*(X/Y) may_be_replaced_by X if [ Y<>0 ]. arith(14): (X*Y)/Y may_be_replaced_by X if [ Y<>0 ]. arith(15): (Y*X)/Y may_be_replaced_by X if [ Y<>0 ]. /*** ASSOCIATIVITY of + & * Rules ***/ assoc(1): (A+B)+C may_be_replaced_by A+(B+C). assoc(2): A+(B+C) may_be_replaced_by (A+B)+C. assoc(3): (A*B)*C may_be_replaced_by A*(B*C). assoc(4): A*(B*C) may_be_replaced_by (A*B)*C. /*** COMMUTATIVITY of + & * Rules ***/ commut(1): A+B may_be_replaced_by B+A. commut(2): A*B may_be_replaced_by B*A. /*** DISTRIBUTIVITY of * over + & - Rules ***/ distrib(1): A*(B+C) & A*B+A*C are_interchangeable. distrib(2): (B+C)*A & A*B+A*C are_interchangeable. distrib(3): A*(B-C) & A*B-A*C are_interchangeable. distrib(4): (B-C)*A & A*B-A*C are_interchangeable. /*** Rules for manipulation of unary and binary MINUS operators ***/ minus(1): X-X may_be_replaced_by 0. minus(2): -(0) may_be_replaced_by 0. minus(3): -(-X) may_be_replaced_by X. minus(4): -(A+B) & -A+(-B) are_interchangeable. minus(5): -(A+B) & -A-B are_interchangeable. minus(6): -A+(-B) & -A-B are_interchangeable. minus(7): A+(-B) & A-B are_interchangeable. minus(8): A+(-B) & -(B-A) are_interchangeable. minus(9): A-B & -(B-A) are_interchangeable. minus(10): -A*B & A*(-B) are_interchangeable. minus(11): -A*B & -(A*B) are_interchangeable. minus(12): -(A*B) & A*(-B) are_interchangeable. minus(13): -A*(-B) & A*B are_interchangeable. /*** Some rules for INTEGER DIVISION ***/ intdiv(1): (A+B) div C & A div C+B div C are_interchangeable if [B=K*C, A*B>=0]. intdiv(2): (A+B) div C & A div C+B div C are_interchangeable if [B=C*K, A*B>=0]. intdiv(3): (A+B) div C & A div C+B div C are_interchangeable if [A=K*C, A*B>=0]. intdiv(4): (A+B) div C & A div C+B div C are_interchangeable if [A=C*K, A*B>=0]. intdiv(5): (A+B) div C & A div C+D are_interchangeable if [goal(integer(B)), goal(integer(C)), goal(C\=0), goal(D iss B div C), goal(B iss D*C), A*B>=0]. intdiv(6): (A+B) div C & D+B div C are_interchangeable if [goal(integer(A)), goal(integer(C)), goal(C\=0), goal(D iss A div C), goal(A iss D*C), A*B>=0]. intdiv(7): -A div B & A div (-B) are_interchangeable. intdiv(8): -A div B & -(A div B) are_interchangeable. intdiv(9): A div (-B) & -(A div B) are_interchangeable. intdiv(10): -A div (-B) & A div B are_interchangeable. intdiv(11): (A*B) div B may_be_replaced_by A if [B<>0]. intdiv(12): (A*B) div (C*B) may_be_replaced_by A div C if [B<>0]. spark-2012.0.deb/lib/checker/rules/FDLFUNCS.RUL0000644000175000017500000001353311753202341017536 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % abs : properties of the absolute value function abs(_) % sqr : properties of the square function sqr(_) % odd : properties of predicate odd(_) % exp : properties of the exponentiation operator ** %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family abs: % abs(X) requires [ X:ir ] & % X >= Y requires [ X:ir, Y:ir ] & % X > Y requires [ X:ir, Y:ir ] & % X or Y requires [ X:any, Y:any ] & % X * Y requires [ X:ir, Y:ir ]. % % rule_family sqr: % sqr(X) requires [ X:ir ] & % X >= Y requires [ X:ir, Y:ir ] & % X > Y requires [ X:ir, Y:ir ] & % X * Y requires [ X:ir, Y:ir ] & % abs(X) requires [ X:ir ]. % % rule_family odd: % odd(X) requires [ X:i ] & % not X requires [ X:any ] & % X <> Y requires [ X:i, Y:i ] & % X = Y requires [ X:i, Y:i ]. % % rule_family exp: % X ** Y requires [ X:ir, Y:i ] & % X * Y requires [ X:ir, Y:ir ] & % X >= Y requires [ X:ir, Y:ir ] & % X <= Y requires [ X:ir, Y:ir ] & % X > Y requires [ X:ir, Y:ir ] & % X < Y requires [ X:ir, Y:ir ]. %------------------------------------------------------------------------------- /*** ABS: absolute value function properties ***/ abs(1): abs(X) may_be_replaced_by X if [ X>=0 ]. abs(2): abs(X) may_be_replaced_by -X if [ X<=0 ]. abs(3): abs(X)>=0 may_be_deduced. abs(4): abs(X)>0 may_be_deduced_from [ X<>0 ]. abs(5): abs(X)=X or abs(X)=(-X) may_be_deduced. abs(6): abs(abs(X)) may_be_replaced_by abs(X). abs(7): abs(X*Y) & abs(X)*abs(Y) are_interchangeable. /*** SQR: square function properties ***/ sqr(1): sqr(X) may_be_replaced_by X*X. sqr(2): sqr(X)>=0 may_be_deduced. sqr(3): sqr(X)>0 may_be_deduced_from [ X<>0 ]. sqr(4): sqr(X*Y) & sqr(X)*sqr(Y) are_interchangeable. sqr(5): abs(sqr(X)) may_be_replaced_by sqr(X). sqr(6): sqr(abs(X)) may_be_replaced_by sqr(X). /*** ODD: odd function properties ***/ odd(1): odd(X) & (X div 2) * 2 <> X are_interchangeable. odd(2): not odd(X) & (X div 2) * 2 = X are_interchangeable. odd(3): odd(X) may_be_deduced_from [ X=2*K+1 ]. odd(4): not odd(X) may_be_deduced_from [ X=2*K ]. odd(5): odd(X) may_be_deduced_from [ odd(-X) ]. odd(6): odd(-X) may_be_deduced_from [ odd(X) ]. odd(7): not odd(X) may_be_deduced_from [ not odd(-X) ]. odd(8): not odd(-X) may_be_deduced_from [ not odd(X) ]. odd(9): odd(X+Y) may_be_deduced_from [ odd(X), not odd(Y) ]. odd(10): odd(X+Y) may_be_deduced_from [ not odd(X), odd(Y) ]. odd(11): not odd(X+Y) may_be_deduced_from [ not odd(X), not odd(Y) ]. odd(12): not odd(X+Y) may_be_deduced_from [ odd(X), odd(Y) ]. odd(13): odd(X-Y) may_be_deduced_from [ odd(X), not odd(Y) ]. odd(14): odd(X-Y) may_be_deduced_from [ not odd(X), odd(Y) ]. odd(15): not odd(X-Y) may_be_deduced_from [ not odd(X), not odd(Y) ]. odd(16): not odd(X-Y) may_be_deduced_from [ odd(X), odd(Y) ]. odd(17): odd(X*Y) may_be_deduced_from [ odd(X), odd(Y) ]. odd(18): not odd(X*Y) may_be_deduced_from [ not odd(X) ]. odd(19): not odd(X*Y) may_be_deduced_from [ not odd(Y) ]. odd(20): odd(sqr(X)) may_be_deduced_from [ odd(X) ]. odd(21): odd(X) may_be_deduced_from [ odd(sqr(X)) ]. odd(22): not odd(sqr(X)) may_be_deduced_from [ not odd(X) ]. odd(23): not odd(X) may_be_deduced_from [ not odd(sqr(X)) ]. odd(24): odd(abs(X)) may_be_deduced_from [ odd(X) ]. odd(25): odd(X) may_be_deduced_from [ odd(abs(X)) ]. odd(26): not odd(abs(X)) may_be_deduced_from [ not odd(X) ]. odd(27): not odd(X) may_be_deduced_from [ not odd(abs(X)) ]. /*** EXP: exponentiation operator properties - for non-negative exponents ***/ exp(1): X**0 may_be_replaced_by 1. exp(2): 0**Y may_be_replaced_by 0 if [ Y>=1 ]. exp(3): 1**Y may_be_replaced_by 1 if [ Y>=0 ]. exp(4): X**Y & X*(X**(Y-1)) are_interchangeable if [ Y>0 ]. exp(5): X**(Y+1) & X*(X**Y) are_interchangeable if [ Y>=0 ]. exp(6): (-X)**Y may_be_replaced_by X**Y if [ not odd(Y) ]. exp(7): (-X)**Y may_be_replaced_by -(X**Y) if [ odd(Y) ]. exp(8): X**1 may_be_replaced_by X. exp(9): X**2 may_be_replaced_by X*X. exp(10): X**3 may_be_replaced_by X*X*X. exp(11): X**4 may_be_replaced_by X*X*X*X. exp(12): X**Y>=0 may_be_deduced_from [ X>=0, Y>=0 ]. exp(13): X**Y>=0 may_be_deduced_from [ not odd(Y) ]. exp(14): X**Y<=0 may_be_deduced_from [ X<=0, odd(Y) ]. exp(15): X**Y>0 may_be_deduced_from [ X>0 ]. exp(16): X**Y>0 may_be_deduced_from [ X<0, not odd(Y) ]. exp(17): X**Y<0 may_be_deduced_from [ X<0, odd(Y) ]. spark-2012.0.deb/lib/checker/rules/ENUM.RUL0000644000175000017500000003342311753202341017076 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % enum : properties of enumerated types (pred & succ etc.) % enum_cases : list of cases for enumerations of size 2..25 elements %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family enum: % X = Y requires [ X:e, Y:e ] & % X <= Y requires [ X:e, Y:e ] & % X < Y requires [ X:e, Y:e ] & % X >= Y requires [ X:e, Y:e ] & % X > Y requires [ X:e, Y:e ] & % X <> Y requires [ X:e, Y:e ] & % X or Y requires [ X:any, Y:any ] & % succ(X) requires [ X:e ] & % pred(X) requires [ X:e ]. % % rule_family enum_cases: % X or Y requires [ X:any, Y:any ]. %------------------------------------------------------------------------------- /*** Enumerated type rules ***/ enum(1): pred(succ(X))=X may_be_deduced_from [ goal(enumeration_list(X,L)), goal(\+ last(L,X)) ]. enum(2): X=pred(succ(X)) may_be_deduced_from [ goal(enumeration_list(X,L)), goal(\+ last(L,X)) ]. enum(3): succ(pred(X))=X may_be_deduced_from [ goal(enumeration_list(X,L)), goal(L\=[X|_]) ]. enum(4): X=succ(pred(X)) may_be_deduced_from [ goal(enumeration_list(X,L)), goal(L\=[X|_]) ]. enum(5): X <= Y may_be_deduced_from [ goal(enumeration_list(X,L)), goal(in_order(X,Y,L)) ]. enum(6): X < Y may_be_deduced_from [ goal(enumeration_list(X,L)), goal(in_order(X,Y,L)), goal(X\=Y) ]. enum(7): Y >= X may_be_deduced_from [ goal(enumeration_list(X,L)), goal(in_order(X,Y,L)) ]. enum(8): Y > X may_be_deduced_from [ goal(enumeration_list(X,L)), goal(in_order(X,Y,L)), goal(X\=Y) ]. enum(9): X<>Y may_be_deduced_from [ goal(enumeration_list(X,L)), goal(is_in(X,L)), goal(is_in(Y,L)), goal(X\=Y) ]. enum(10): X=E may_be_deduced_from [ X<=E, goal(enumeration_list(E,[E|_])) ]. enum(11): X=E may_be_deduced_from [ X=E, goal(enumeration_list(E,L)), goal(last(L,E)) ]. enum(13): X=E may_be_deduced_from [ X>F, goal(enumeration_list(E,L)), goal(last(L,E)), goal(strict_sublist([F,E],L)) ]. enum(14): X=E1 or X>=E2 may_be_deduced_from [ X>=E1, goal(enumeration_list(E1,L)), goal(strict_sublist([E1,E2],L)) ]. enum(15): X=E or REST may_be_deduced_from [ goal(atom(E)), goal(enumeration_list(E,L)), goal(build_other_cases(X,E,L,REST)), goal(REST\=false) ]. enum(16): X=E may_be_deduced_from [ goal(atom(E)), goal(enumeration_list(E,L)), goal(build_other_cases(X,E,L,false)) ]. enum(17): succ(Y) may_be_replaced_by X if [ goal(enumeration_list(Y,L)), goal(strict_sublist([Y,X],L)) ]. enum(18): pred(X) may_be_replaced_by Y if [ goal(enumeration_list(X,L)), goal(strict_sublist([Y,X],L)) ]. enum_cases(2): X=A or X=B may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B])) ]. enum_cases(3): X=A or X=B or X=C may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C])) ]. enum_cases(4): X=A or X=B or X=C or X=D may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D])) ]. enum_cases(5): X=A or X=B or X=C or X=D or X=E may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E])) ]. enum_cases(6): X=A or X=B or X=C or X=D or X=E or X=F may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F])) ]. enum_cases(7): X=A or X=B or X=C or X=D or X=E or X=F or X=G may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G])) ]. enum_cases(8): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H])) ]. enum_cases(9): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I])) ]. enum_cases(10): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J])) ]. enum_cases(11): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K])) ]. enum_cases(12): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L])) ]. enum_cases(13): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M])) ]. enum_cases(14): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N])) ]. enum_cases(15): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O])) ]. enum_cases(16): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P])) ]. enum_cases(17): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q])) ]. enum_cases(18): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q or X=R may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q,R])) ]. enum_cases(19): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q or X=R or X=S may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q,R,S])) ]. enum_cases(20): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q or X=R or X=S or X=T may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q,R,S,T])) ]. enum_cases(21): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q or X=R or X=S or X=T or X=U may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q,R,S,T,U])) ]. enum_cases(22): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q or X=R or X=S or X=T or X=U or X=V may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q,R,S,T,U,V])) ]. enum_cases(23): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q or X=R or X=S or X=T or X=U or X=V or X=W may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q,R,S,T,U,V,W])) ]. enum_cases(24): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q or X=R or X=S or X=T or X=U or X=V or X=W or X=Y may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q,R,S,T,U,V,W,Y])) ]. enum_cases(25): X=A or X=B or X=C or X=D or X=E or X=F or X=G or X=H or X=I or X=J or X=K or X=L or X=M or X=N or X=O or X=P or X=Q or X=R or X=S or X=T or X=U or X=V or X=W or X=Y or X=Z may_be_deduced_from [ goal(atom(X)), goal(checktype(X,TYPE)), goal(type(TYPE,enumerated)), goal(enumeration(TYPE,[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O, P,Q,R,S,T,U,V,W,Y,Z])) ]. spark-2012.0.deb/lib/checker/rules/GENINEQS.RUL0000644000175000017500000001200611753202341017535 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % % transitivity : transitivity of relational operators % strengthen : inequalities strengthening rules % negation : negation of inequalities rules %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family transitivity: % X <= Y requires [ X:ire, Y:ire ] & % X >= Y requires [ X:ire, Y:ire ] & % X < Y requires [ X:ire, Y:ire ] & % X > Y requires [ X:ire, Y:ire ] & % X = Y requires [ X:any, Y:any ] & % X <> Y requires [ X:any, Y:any ]. % % rule_family strengthen: % X > Y requires [ X:ire, Y:ire ] & % X < Y requires [ X:ire, Y:ire ] & % X = Y requires [ X:ire, Y:ire ]. % % rule_family negation: % not X requires [ X:any ] & % X <> Y requires [ X:any, Y:any ] & % X = Y requires [ X:any, Y:any ] & % X <= Y requires [ X:ire, Y:ire ] & % X > Y requires [ X:ire, Y:ire ] & % X >= Y requires [ X:ire, Y:ire ] & % X < Y requires [ X:ire, Y:ire ]. %------------------------------------------------------------------------------- /*** TRANSITIVITY Rules ***/ transitivity(1): I<=K may_be_deduced_from [ I<=J, J<=K ]. transitivity(2): I<=K may_be_deduced_from [ I<=J, J=I may_be_deduced_from [ I<=J, J<=K ]. transitivity(11): K>=I may_be_deduced_from [ I<=J, J=I may_be_deduced_from [ I<=J, J=K ]. transitivity(13): K>=I may_be_deduced_from [ I=I may_be_deduced_from [ I=I may_be_deduced_from [ I=I may_be_deduced_from [ I=J, J<=K ]. transitivity(17): K>=I may_be_deduced_from [ I=J, J=I may_be_deduced_from [ I=J, J=K ]. transitivity(19): II may_be_deduced_from [ I<=J, JI may_be_deduced_from [ II may_be_deduced_from [ II may_be_deduced_from [ II may_be_deduced_from [ I=J, JK may_be_deduced_from [ I<>J, J=K ]. /*** STRENGTHEN Rules ***/ strengthen(1): I>J may_be_deduced_from [ I>=J, I<>J ]. strengthen(2): J=J, I<>J ]. strengthen(3): I=J may_be_deduced_from [ I>=J, I<=J ]. /*** Rules for manipulation of logical NOT operator ***/ negation(1): not(A=B) & A<>B are_interchangeable. negation(2): not(A<>B) & A=B are_interchangeable. negation(3): not(A>B) & A<=B are_interchangeable. negation(4): not(A<=B) & A>B are_interchangeable. negation(5): not(A=B are_interchangeable. negation(6): not(A>=B) & AA are_interchangeable. negation(8): not(A<>B) & B=A are_interchangeable. negation(9): not(A>B) & B>=A are_interchangeable. negation(10): not(A<=B) & B=B) & B>A are_interchangeable. spark-2012.0.deb/lib/checker/rules/BITWISE.RUL0000644000175000017500000001557411753202341017447 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILIES CONTAINED HEREIN :- % bitwise : bitwise logical operators for SPARK Modular Types %------------------------------------------------------------------------------- % MODEL DECLARATIONS FOR THIS FILE :- % % rule_family bitwise: % bit__and(I,J) requires [I : i, J : i] & % bit__or(I,J) requires [I : i, J : i] & % bit__xor(I,J) requires [I : i, J : i] & % bit__and(I,J) = K requires [I : i, J : i, K : i] & % bit__or(I,J) = K requires [I : i, J : i, K : i] & % bit__xor(I,J) = K requires [I : i, J : i, K : i] & % I < J requires [I : i, J : i] & % I <= J requires [I : i, J : i] & % I = J requires [I : i, J : i]. %------------------------------------------------------------------------------- /*** Simplification of bitwise operators ***/ bitwise(1): bit__and(X,X) may_be_replaced_by X if [ 0 <= X ]. bitwise(2): bit__or(X,X) may_be_replaced_by X if [ 0 <= X ]. bitwise(3): bit__xor(X,X) may_be_replaced_by 0 if [ 0 <= X ]. bitwise(4): bit__and(X, 2**N - 1) may_be_replaced_by X if [ 0 <= X, 0 <= N, X <= 2**N - 1 ]. /*** Properties of zero ***/ bitwise(11): bit__and(X,0) may_be_replaced_by 0 if [ 0 <= X ]. bitwise(12): bit__or(X,0) may_be_replaced_by X if [ 0 <= X ]. bitwise(13): bit__xor(X,0) may_be_replaced_by X if [ 0 <= X ]. /*** Commutativity ***/ bitwise(21): bit__and(X,Y) may_be_replaced_by bit__and(Y,X) if [ 0 <= X, 0 <= Y ]. bitwise(22): bit__or(X,Y) may_be_replaced_by bit__or(Y,X) if [ 0 <= X, 0 <= Y ]. bitwise(23): bit__xor(X,Y) may_be_replaced_by bit__xor(Y,X) if [ 0 <= X, 0 <= Y ]. /*** Associativity ***/ bitwise(31): bit__and(bit__and(X,Y), Z) may_be_replaced_by bit__and(bit__and(X,Z), Y) if [ 0 <= X, 0 <= Y ]. bitwise(32): bit__or(bit__or(X,Y), Z) may_be_replaced_by bit__or(bit__or(X,Z), Y) if [ 0 <= X, 0 <= Y ]. bitwise(33): bit__xor(bit__xor(X,Y), Z) may_be_replaced_by bit__xor(bit__xor(X,Z), Y) if [ 0 <= X, 0 <= Y ]. /*** Distributivity of bit__and and bit__or (bit__xor requires negation)***/ bitwise(41): bit__or(bit__and(X,Y), Z) & bit__and(bit__or(X,Z), bit__or(Y,Z)) are_interchangeable if [ 0 <= X, 0 <= Y, 0 <= Z ]. bitwise(42): bit__and(bit__or(X,Y), Z) & bit__or(bit__and(X,Z), bit__and(Y,Z)) are_interchangeable if [ 0 <= X, 0 <= Y, 0 <= Z ]. /*** Lower bounds ***/ bitwise(51): 0 <= bit__and(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(52): 0 <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(53): 0 <= bit__xor(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(54): X <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(55): Y <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(56): X - Y <= bit__xor(X, Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(57): Y - X <= bit__xor(X, Y) may_be_deduced_from [ 0 <= X, 0 <= Y ]. /*** Upper bounds ***/ bitwise(61): bit__and(X,Y) <= X may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(62): bit__and(X,Y) <= Y may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(63): bit__or(X,Y) <= X + Y may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(64): bit__xor(X,Y) <= X + Y may_be_deduced_from [ 0 <= X, 0 <= Y ]. bitwise(65): bit__and(X,Y) <= Z may_be_deduced_from [ 0 <= X, 0 <= Y, X <= Z or Y <= Z ]. bitwise(66): bit__or(X,Y) <= 2**N - 1 may_be_deduced_from [ 0 <= X, 0 <= Y, 0 <= N, X <= 2**N - 1, Y <= 2**N - 1]. bitwise(67): bit__xor(X,Y) <= 2**N - 1 may_be_deduced_from [ 0 <= X, 0 <= Y, 0 <= N, X <= 2**N - 1, Y <= 2**N - 1]. /*** Values ***/ bitwise(72): bit__or(X,Y) may_be_replaced_by X + Y - bit__and(X,Y) if [ 0 <= X, 0 <= Y ]. bitwise(73): bit__xor(X,Y) may_be_replaced_by X + Y - 2 * bit__and(X,Y) if [ 0 <= X, 0 <= Y ]. /*** Comparison ***/ bitwise(81): bit__and(X,Y) <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y]. bitwise(82): bit__xor(X,Y) <= bit__or(X,Y) may_be_deduced_from [ 0 <= X, 0 <= Y]. /*** Combination of X with ~X ***/ bitwise(91): bit__and(X, 2**N - X - 1) = 0 may_be_deduced_from [ 0 <= X, 0 <= N, X <= 2**N - 1 ]. bitwise(92): bit__or(X, 2**N - X - 1) = 2**N - 1 may_be_deduced_from [ 0 <= X, 0 <= N, X <= 2**N - 1 ]. bitwise(93): bit__xor(X, 2**N - X - 1) = 2**N - 1 may_be_deduced_from [ 0 <= X, 0 <= N, X <= 2**N - 1 ]. /*** Maximum value argument ***/ bitwise(101): bit__and(X, 2**N - 1) <= X may_be_deduced_from [ 0 <= X, 0 <= N, X <= 2**N - 1 ]. bitwise(102): bit__or(X, 2**N - 1) = 2**N - 1 may_be_deduced_from [ 0 <= X, 0 <= N, X <= 2**N - 1 ]. bitwise(103): bit__xor(X, 2**N - 1) = 2**N - X - 1 may_be_deduced_from [ 0 <= X, 0 <= N, X <= 2**N - 1 ]. /*** Definition of bit__and ***/ bitwise(111): bit__and(X, Y) may_be_replaced_by 2 * bit__and(X div 2, Y div 2) + (X mod 2) * (Y mod 2) if [0 <= X, 0 <= Y]. spark-2012.0.deb/lib/checker/rules/checker.ini0000644000175000017500000000007211712753445020060 0ustar eugeneugenset prooflog_width to 0. set display_var_free_only to on. spark-2012.0.deb/lib/checker/rules/INTINEQS.RUL0000644000175000017500000000536711753202341017572 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %------------------------------------------------------------------------------- % RULE FAMILY CONTAINED HEREIN :- % % inequals : inequalities rules applicable to integers only %------------------------------------------------------------------------------- % MODEL DECLARATION FOR THIS FILE :- % % rule_family inequals: % X >= Y requires [ X:i, Y:i ] & % X <= Y requires [ X:i, Y:i ] & % X > Y requires [ X:i, Y:i ] & % X = Y requires [ X:i, Y:i ]. %------------------------------------------------------------------------------- /*** INEQUALS Rules ***/ /*** Integer division basic properties ***/ inequals(86): N*(A div N)>=A-N+1 may_be_deduced_from [ N>0, A>=0 ]. inequals(87): (A div N)*N>=A-N+1 may_be_deduced_from [ N>0, A>=0 ]. inequals(88): N*(A div N)<=A may_be_deduced_from [ N<>0, A>=0 ]. inequals(89): (A div N)*N<=A may_be_deduced_from [ N<>0, A>=0 ]. inequals(90): A div N>=0 may_be_deduced_from [ A>=0, N>0 ]. inequals(91): A div N>0 may_be_deduced_from [ A>=N, N>0 ]. inequals(92): A div N>0 may_be_deduced_from [ A<=N, N<0 ]. inequals(93): I div N<=J div N may_be_deduced_from [ I<=J, N>0 ]. inequals(94): I div N>=J div N may_be_deduced_from [ I>=J, N>0 ]. inequals(95): I div M=J div N may_be_deduced_from [ I=J, M=N ]. inequals(96): N*(A div N)>=A+N+1 may_be_deduced_from [ N<0, A>=0 ]. inequals(97): (A div N)*N>=A+N+1 may_be_deduced_from [ N<0, A>=0 ]. inequals(98): A div B>=0 may_be_deduced_from [ A<=0, B<0 ]. inequals(99): A div B<=0 may_be_deduced_from [ A>=0, B<0 ]. inequals(100): A div B<=0 may_be_deduced_from [ A<=0, B>0 ]. /*** Other useful integers-only rules ***/ inequals(101): X>=Y+1 may_be_deduced_from [ X>Y ]. inequals(102): Y+1<=X may_be_deduced_from [ X>Y ]. inequals(103): X>=1 may_be_deduced_from [ X>0 ]. spark-2012.0.deb/lib/checker/helptext/0000755000175000017500000000000011753202331016443 5ustar eugeneugenspark-2012.0.deb/lib/checker/helptext/declare.chl0000644000175000017500000000047511343737761020557 0ustar eugeneugen[declare]. 'Command example: CHECK|: declare.'. ''. 'This command allows the user to make new declarations for use in the proof'. 'attempt. Any valid declaration which might appear in a .FDL file may be'. 'used. The declaration will be prompted for, and must be typed in FDL'. 'syntax, terminated by a ";".'. spark-2012.0.deb/lib/checker/helptext/rules.chl0000644000175000017500000000166511343737761020314 0ustar eugeneugen[rules]. 'THE BUILT-IN RULES LIBRARY'. ''. 'The following rule families are built-in at present:'. ''. ' abs append arith array'. ' assoc bitwise commut distrib'. ' enum enum_cases enumeration eq'. ' equiv equivalence exp first'. ' implies inequals inference intdiv'. ' last logic logical logical_and'. ' logical_not logical_or minus mk__array'. ' mk__record modular negation nonfirst'. ' nonlast odd quant record'. ' record_equality seq seqlen sets'. ' simplify sqr standardisation strengthen'. ' transitivity unification zero'. ''. 'For more help on a particular rule family, type "help rules=RULENAME."'. spark-2012.0.deb/lib/checker/helptext/instantia.chl0000644000175000017500000000070111343737761021142 0ustar eugeneugen[instantiate]. 'Command examples: CHECK|: instantiate.'. ' CHECK|: instantiate int_Y_1.'. ' CHECK|: instantiate int_X_1 with x+1.'. ''. 'This command allows the user to instantiate a universal variable, to'. 'derive a specific instance of an unwrapped formula.'. 'These universal variables arise when universally quantified hypotheses,'. 'or existentially quantified conclusions, are unwrapped.'. spark-2012.0.deb/lib/checker/helptext/remember.chl0000644000175000017500000000051411343737761020750 0ustar eugeneugen[remember]. 'Command examples: CHECK|: remember [3,5,6].'. ' CHECK|: remember h#3 & h#5-6.'. ''. 'The "remember" command allows you to tell the checker to remember one or'. 'more forgotten hypotheses of the current VC, so that these hypotheses'. 'will once again be displayed on the screen by "list".'. spark-2012.0.deb/lib/checker/helptext/rxarray.chl0000644000175000017500000000264111343737761020645 0ustar eugeneugen[rules = array]. 'Rules: array. Location: SPADE$CHECKER:ARRAY.RUL'. ''. 'Substitution rules for the FDL array-manipulation functions'. 'element(_,_) and update(_,_,_), thus:'. ''. ' (1) element(update(A, I, X), I) --> X.'. ' (2) update(A, I, element(A, I)) --> A.'. ' (3) element(update(A, I, X), J ) <--> element(A, J) if I<>J.'. ' (4) update(update(A, I, X), J, Y) <--> update(A, J, Y), I, X)'. ' provided I<>J.'. ' (5) update(update(A, I, X), I, Y) --> update(A, I, Y).'. ''. 'The simplification rules in this family can be performed automatically'. 'by the expression simplifier.'. [rules = mk__array]. 'Rules: mk__array. Location: SPADE$CHECKER:ARRAY.RUL'. ''. 'A single rule for accessing elements from a SPARK array aggregate.'. 'We illustrate its use with the following; let ARR be the expression:'. ''. ' mk__array(17, [x+1] := 5, [1..x] := 11, [y] := 4).'. ''. 'With the above expression for ARR, we find that we can replace:'. ''. ' (1) element(ARR, [y]) by 4.'. ' (2) element(ARR, [x-1]) by 11, provided x-1<>y and 1<=x-1.'. ' (3) element(ARR, [x+1] by 5, provided x+1<>y.'. ' (4) element(ARR, [x+2] by 17, provided x+2<>y.'. ' (5) element(ARR, [0] by 17, provided y<>0 and x+1<>0.'. ''. 'If any of the side-conditions cannot be met by the Checker\'s'. 'inference engine, the rule-application may return no simpler result.'. spark-2012.0.deb/lib/checker/helptext/rxlogic.chl0000644000175000017500000000435111343737761020624 0ustar eugeneugen[rules = logical_not]. 'Rules: logical_not. Location: SPADE$CHECKER:LOGIC.RUL'. ''. 'These rules are elementary simplification rules for "not":'. ''. ' not true --> false not false --> true'. ' not not X --> X'. [rules = logical_and]. 'Rules: logical_and. Location: SPADE$CHECKER:LOGIC.RUL'. ''. 'These rules are elementary simplification rules for "and":'. ''. ' X and true --> X true and X --> X'. ' X and false --> false false and X --> false'. ' X and not X --> false not X and X --> false'. ' X and X --> X'. [rules = logical_or]. 'Rules: logical_or. Location: SPADE$CHECKER:LOGIC.RUL'. ''. 'These rules are elementary simplification rules for "or":'. ''. ' X or false --> X false or X --> X'. ' X or true --> true true or X --> true'. ' X or not X --> true not X or X --> true'. ' X or X --> X'. [rules = implies]. 'Rules: implies. Location: SPADE$CHECKER:LOGIC.RUL'. ''. 'These rules are elementary simplification rules for "->":'. ''. ' X -> true --> true X -> false --> not X'. ' true -> X --> X false -> X --> true'. ' not X -> X --> X X -> not X --> not X'. ' X -> X --> true'. [rules = equivalence]. 'Rules: equivalence. Location: SPADE$CHECKER:LOGIC.RUL'. ''. 'These rules are elementary simplification rules for "<->":'. ''. ' X <-> true --> X true <-> X --> X'. ' X <-> false --> not X false <-> X --> not X'. ' X <-> not X --> false not X <-> X --> false'. ' X <-> X --> true'. [rules = logical]. 'Rules: logical. Location: SPADE$CHECKER:LOGIC.RUL'. ''. 'These rules provide other useful rules of classical logic, thus:'. ''. ' not (A or B) <--> (not A) and (not B)'. ' not (A and B) <--> (not A) or (not B)'. ' A -> B <--> (not A) or B'. ' A <-> B <--> (A -> B) and (B -> A)'. ' A -> (B -> C) <--> (A and B) -> C.'. spark-2012.0.deb/lib/checker/helptext/set.chl0000644000175000017500000000066511343737761017754 0ustar eugeneugen[set]. 'Command examples: CHECK|: set.'. ' CHECK|: set simplify_in_infer.'. ' CHECK|: set auto_done to on.'. ''. 'You can use the "set" command, both interactively and in a CHECKER.INI'. 'initialisation file, to set the value of various flags. See Chapter 3'. 'of the Checker documentation for a description of each flag. Use the'. '"show" command to inspect the current settings.'. spark-2012.0.deb/lib/checker/helptext/deduce.chl0000644000175000017500000000052211343737761020402 0ustar eugeneugen[deduce]. 'Command examples: CHECK|: deduce.'. ' CHECK|: deduce c#1.'. ' CHECK|: deduce x>0 or x<0 from [1,4].'. ''. 'This command invokes truth-table logical deduction. You must state the'. 'formula you wish to prove and provide a list of (zero or more)'. 'hypothesis numbers to be used.'. spark-2012.0.deb/lib/checker/helptext/execute.chl0000644000175000017500000000270211343737761020615 0ustar eugeneugen[execute]. 'Command examples: CHECK|: execute.'. ' CHECK|: execute \'replay.cmd\'.'. ''. 'This command redirects input, so that the Checker fetches commands and'. 'replies to queries from a script file instead of from the keyboard.'. ''. 'Ideally, this script file is one which was created by an earlier proof'. 'session with the Checker. A script file which is created manually is'. 'quite likely to contain slips which may lead to inappropriate actions.'. ''. 'Note that any "errors" in the script file will be faithfully followed:'. 'the script file will be read until the end of the file is reached,'. 'unless some other error forces abandonment of the script.'. ''. 'Nesting of script files is allowed: if during execution of A.CMD, for'. 'instance, a command to execute B.CMD is encountered, control will pass'. 'to B.CMD until the end of this file is reached, at which point control'. 'will revert to A.CMD. On reaching the end of a script file invoked'. 'interactively (or with the /execute=... command-line switch), control is'. 'returned to the user for further interactive proof work.'. ''. 'If you wish to replay a script from an earlier proof attempt after'. 'making changes to a program\'s assertions, please check that numbering'. 'of hypotheses and conclusions is unaffected. If this is not the case,'. 'it may also be necessary to edit the script file first to adjust the'. 'hypothesis/conclusion numbers used in commands.'. spark-2012.0.deb/lib/checker/helptext/forwardch.chl0000644000175000017500000000137511343737761021137 0ustar eugeneugen[forwardchain]. 'Command examples: CHECK|: forwardchain.'. ' CHECK|: forwardchain h#3.'. ' CHECK|: forwardchain c#2.'. ''. 'This command attempts to use a hypothesis of the form "A -> B" (or'. '"A <-> B" or "B <-> A") by proving that A holds and inferring that B'. 'holds in consequence. If the first form of the command is used, the'. 'user is prompted to give a hypothesis or conclusion to be used. If'. 'a hypothesis is given, it must be of one of the forms mentioned above.'. 'If a conclusion C is given, forward chaining will be applied to any'. 'hypothesis which would allow C to be inferred in this way. Where'. 'there is more than one possibility, the user is prompted to decide'. 'what is required.'. spark-2012.0.deb/lib/checker/helptext/rxspecial.chl0000644000175000017500000000562411343737761021153 0ustar eugeneugen[rules = inference]. 'Rules: inference. Location: SPADE$CHECKER:SPECIAL.RUL'. ''. 'This is a family of logic rules. For example:'. ''. 'If you wish to claim that X is true, and prove it, rule'. 'inference(1) can be useful, as it says X follows from X, so you'. 'enter a new proof frame with X as your goal.'. ''. 'If you have a conclusion of the form A or B, rules'. 'inference(3-4) can be useful, to select which side of the'. 'disjunction you wish to try to prove.'. [rules = eq]. 'Rules: eq. Location: SPADE$CHECKER:SPECIAL.RUL'. ''. 'This is the equality substitution rule: it allows you to replace A by B'. 'provided A=B (and A and B are different expressions). It is probably'. 'the most commonly-used substitution rule.'. [rules = equiv]. 'Rules: equiv. Location: SPADE$CHECKER:SPECIAL.RUL'. ''. 'This is the equivalence substitution rule: you can use it to replace'. 'A by B (or vice versa) provided A <-> B. It is a special-invocation'. 'rule, i.e. you can only apply it if you specifically ask for it: it'. 'will not be applied by a wild-card rulename, for instance.'. [rules = simplify]. 'Rules: simplify. Location: SPADE$CHECKER:SPECIAL.RUL'. ''. 'Two special-invocation rules which allow you to replace an'. 'expression by its simplified form or by another expression which'. 'simplifies to the same form.'. [rules = logic]. 'Rules: logic. Location: SPADE$CHECKER:SPECIAL.RUL'. ''. 'A special-invocation family of rules which allow you to make use of'. 'the logical deduction capabilities of the "deduce" command as'. 'a (not particularly strong) inference or a (stronger) rewrite rule.'. [rules = standardisation]. 'Rules: standardisation. Location: SPADE$CHECKER:SPECIAL.RUL'. ''. 'A family of special-invocation rules which allow you to use'. 'the expression standardiser as a rewrite or inference rule.'. ''. 'The most useful means of using this rule to rewrite an'. 'expression is to give the exact new expression that you wish'. 'to replace your original expression by: you cannot use a'. 'partially-instantiated expression as your new subexpression'. 'pattern, though you can use a completely wild new'. 'subexpression pattern (i.e. X).'. [rules = unification]. 'Rules: unification. Location: SPADE$CHECKER:SPECIAL.RUL'. ''. 'A special-invocation family of rules which, for any function (or'. 'operator) F, allows you to infer F(X1,...,Xn)=F(Y1,...,Yn) from'. 'X1=Y1, ..., Xn=Yn (for n in 1..9 at present).'. ''. 'In fact, you can get along without these rules (useful to know if F'. 'is a function with 10 or more arguments): if you want to prove'. 'F(X1,...,Xn)=F(Y1,...,Yn) and you can prove X1=Y1, ..., Xn=Yn, then'. 'just replace each Yi by Xi in F(X1,...,Xn)=F(Y1,...,Yn) to get the'. 'required result; if necessary, you can use proof by contradiction to'. 'explicitly introduce the F(Y1,...,Yn) expression first.'. spark-2012.0.deb/lib/checker/helptext/rxgenineq.chl0000644000175000017500000000222611343737761021154 0ustar eugeneugen[rules = transitivity]. 'Rules: transitivity. Location: SPADE$CHECKER:GENINEQS.RUL'. ''. 'A rule family expressing transitivity properties of the relational'. 'operators (e.g. X>=Y and Y>=Z -> X>=Z).'. ''. 'The rules are grouped as follows:'. ''. ' To infer X <= Y try transitivity(1-9).'. ' To infer X >= Y try transitivity(10-18).'. ' To infer X < Y try transitivity(19-23).'. ' To infer X > Y try transitivity(24-28).'. ' To infer X = Y try transitivity(29).'. ' To infer X <> Y try transitivity(30).'. [rules = strengthen]. 'Rules: strengthen. Location: SPADE$CHECKER:GENINEQS.RUL'. ''. 'A small family of rules which combines two inequalities to get a'. 'stronger inequality, thus:'. ''. ' (1) from X>=Y and X<>Y infer X>Y.'. ' (2) from X>=Y and X<>Y infer Y=Y and X<=Y infer X=Y.'. [rules = negation]. 'Rules: negation. Location: SPADE$CHECKER:GENINEQS.RUL'. ''. 'A rule family which allows inequalities to be rewritten to include'. 'or exclude not, e.g. A<>B & (not A=B), etc.'. spark-2012.0.deb/lib/checker/helptext/rxarith.chl0000644000175000017500000000463011343737761020636 0ustar eugeneugen[rules = arith]. 'Rules: arith. Location: SPADE$CHECKER:ARITH.RUL'. ''. 'These rules are elementary arithmetic simplification rules, e.g.'. ''. ' X*1, 1*X --> X X+0, 0+X --> X'. ' X*0,0*X --> 0 X-0 --> X'. ' X div 1 --> X X/1 --> X'. ' (X*N) div N --> X (N*X) div N --> X'. ' (X/Y)*Y --> X (X/Y)*Y --> X'. ' (X*Y)/Y --> Y (Y*X)/Y --> Y'. [rules = assoc]. 'Rules: assoc. Location: SPADE$CHECKER:ARITH.RUL'. ' & SPADE$CHECKER:LOGIC.RUL'. ''. 'Rules for the associativity of the + and * arithmetic operators (in'. 'ARITH.RUL) and for the associativity of "and", "or" and "<->" (in'. 'LOGIC.RUL).'. ''. '(N.B. For associativity of set operators, see rules "sets"; for'. 'associativity of sequence @ (append) operator, see rules "append".)'. [rules = commut]. 'Rules: commut. Location: SPADE$CHECKER:ARITH.RUL'. ' & SPADE$CHECKER:LOGIC.RUL'. ''. 'Rules for the commutativity of the + and * arithmetic operators (in'. 'ARITH.RUL) and for the commutativity of "and", "or" and "<->" (in'. 'LOGIC.RUL).'. ''. [rules = distrib]. 'Rules: distrib. Location: SPADE$CHECKER:ARITH.RUL'. ' & SPADE$CHECKER:LOGIC.RUL'. ''. 'Rules for the distributivity of * over + and - operators (in ARITH.RUL)'. 'and for the distributivity of "and" over "or" and of "or" over "and"'. '(in LOGIC.RUL).'. ''. '(N.B. For distributivity of set operators, see rules "sets".)'. [rules = minus]. 'Rules: minus. Location: SPADE$CHECKER:ARITH.RUL'. ''. 'A family of rules for the unary and binary minus operators, thus:'. ''. ' X-X,-(0) --> 0 -(-X) --> X'. ' -(A+B) <--> (-A)+(-B) <--> (-A)-B'. ' A+(-B) <--> A-B <--> -(B-A)'. ' -A*B <--> -(A*B) <--> A*(-B)'. ' -A*(-B) <--> A*B .'. [rules = intdiv]. 'Rules: intdiv. Location: SPADE$CHECKER:ARITH.RUL'. ''. 'Rules about the integer "div" operator.'. ''. 'Rules include relationship between (A+B) div C and (A div C)+(B div C)'. 'when A (or B) is a multiple of C and arithmetic negation within div'. 'expressions.'. spark-2012.0.deb/lib/checker/helptext/rxrecord.chl0000644000175000017500000000415611343737761021010 0ustar eugeneugen[rules = record]. 'Rules: record. Location: SPADE$CHECKER:RECORD.RUL'. ''. 'A special-invocation family of rules which express knowledge of the'. 'FDL record functions. (This knowledge is built into the checker\'s'. 'expression simplifier.) The rules are:'. ''. 'record(1): fld_F(upf_F(_, VALUE) may_be_replaced_by VALUE.'. 'record(2): upf_F(REC, fld_F(REC)) may_be_replaced_by REC.'. 'record(3): upf_F(upf_G(R, VG), VF) may_be_replaced_by'. ' upf_G(upf_F(R, VF), VG) if [ "F" <> "G" ].'. 'record(4): fld_F(upf_G(R, V)) may_be_replaced_by fld_F(R)'. ' if [ "F" <> "G" ].'. 'record(5): upf_F(upf_F(R, _), V) may_be_replaced_by upf_F(R, V).'. [rules = record_equality]. 'Rules: record_equality. Location: SPADE$CHECKER:RECORD.RUL'. ''. 'A special-invocation family of rules which allow one to infer that two'. 'records are equal if all of their fields are equal, i.e. we can infer'. 'that R=S provided we can prove each of fld_field1(R)=fld_field1(S), ...,'. 'fld_fieldN(R)=fld_fieldN(S) in turn.'. ''. 'Rules record_equality(1-10) are for records with 1 to 10 fields'. 'respectively; rule record_equality(11) deals with records of 11 or'. 'more fields in a more general way.'. [rules = mk__record]. 'Rules: mk__record. Location: SPADE$CHECKER:RECORD.RUL'. ''. 'mk__record(1) is a rule which allows a field in a SPARK record'. 'aggregate expression to be accessed. Thus, given'. ''. ' R = mk__record(day := 21, month := jul, year := 1830)'. ''. 'mk__record(1) may be used to replace:'. ''. ' (1) fld_day(R) by 21,'. ' (2) fld_month(R) by jul, and'. ' (3) fld_year(R) by 1830.'. ''. 'mk__record(2) allows updating of record aggregate expressions. Thus,'. 'with this rule we can replace, for instance,'. ''. ' upf_day(R, 22) by mk__record(day := 22, month := jul, year := 1830)'. ''. 'and so on.'. ''. 'Note that rule mk__record(2) can only update one field at a time; so'. ''. ' upf_month(upf_year(R, 1688), nov)'. ''. 'would require two applications of mk__record(2) to simplify it down to'. 'a single record aggregate expression (on upf_year, then upf_month).'. spark-2012.0.deb/lib/checker/helptext/quit.chl0000644000175000017500000000042411343737761020134 0ustar eugeneugen[quit]. 'Command example: CHECK|: quit.'. ' '. 'This command quits the current proof frame (if any) entered by subgoaling'. 'or by an attempted proof by cases, contradiction, implication, induction'. 'or quantifier unwrapping, and reverts to the previous proof frame.'. spark-2012.0.deb/lib/checker/helptext/case.chl0000644000175000017500000000075511343737761020074 0ustar eugeneugen[case]. 'Command example: CHECK|: case 2.'. ''. 'The "case" command tells the checker which case to consider. When the'. '"prove by cases" command is issued, the checker automatically begins on'. 'case 1; then, when the goal formula is proved using case 1, the checker'. 'automatically moves on to case 2, etc., until all cases have been proved.'. 'The user can override this sequence, moving back to an earlier case or'. 'forward to a later case, using the "case" command as above.'. spark-2012.0.deb/lib/checker/helptext/rxmodular.chl0000644000175000017500000000077611343737761021201 0ustar eugeneugen[rules = bitwise]. 'Rules: bitwise Location: SPADE$CHECKER:BITWISE.RUL'. ''. 'Rules for handling the bit__and, bit__or and bit__xor functions'. 'associated with SPARK modular types.'. [rules = modular]. 'Rules: modular Location: SPADE$CHECKER:MODULAR.RUL'. ''. 'Rules for handling the SPARK mod operator. Please note that the'. 'SPARK mod operator differs from the Pascal mod operator, and that'. 'these rules are only for use with the VC output of the SPARK'. 'Examiner.'. spark-2012.0.deb/lib/checker/helptext/show.chl0000644000175000017500000000055711343737761020141 0ustar eugeneugen[show]. 'Command example: CHECK|: show.'. ''. 'The "show" command displays the setting of each of the user-configurable'. 'flags, which may be modified via the "set" command. It also displays'. 'statistics for the proof session, giving the CPU (and garbage collect)'. 'time used, the amount of memory used and the amount of memory still free'. 'at present.'. spark-2012.0.deb/lib/checker/helptext/replace.chl0000644000175000017500000000166711343737761020577 0ustar eugeneugen[replace]. 'Command examples: CHECK|: replace.'. ' CHECK|: replace h#3.'. ' CHECK|: replace c#1: X+0.'. ' CHECK|: replace h#7: element(_,_) by 0.'. ' CHECK|: replace c#4: X+Y by Y+X using commut.'. ' CHECK|: replace all: a1+0 by a1 using arith.'. ''. 'The "replace" command may be used to replace a subexpression of one of'. 'the current hypotheses or conclusions by another expression, provided'. 'that the replacement may be justified by one of the replacement rules in'. 'the rules database. The user is prompted for patterns for the appropriate'. 'expressions and may supply the name of the rule to be used.'. ''. 'If "replace all: ..." is used and a suitable substitution is found, all'. 'occurrences of the chosen (fully-instantiated) expression are replaced'. 'by the new expression, in every hypothesis and conclusion.'. spark-2012.0.deb/lib/checker/helptext/forget.chl0000644000175000017500000000121011343737761020432 0ustar eugeneugen[forget]. 'Command examples: CHECK|: forget [3,5,6].'. ' CHECK|: forget h#3 & h#5-6.'. ''. 'The "forget" command allows you to tell the checker to forget one or more'. 'hypotheses of the current VC. Thereafter, although these hypotheses still'. 'exist and can be used in constructing a proof, they are not displayed on'. 'the screen by "list". The exception to this is if the user uses'. 'the "list" command to list a single hypothesis; in this case, the'. 'hypothesis will be displayed whether or not it has been forgotten.'. ''. 'This command can be useful to cut down the amount of information on the'. 'screen.'. spark-2012.0.deb/lib/checker/helptext/rxseq.chl0000644000175000017500000000526611343737761020325 0ustar eugeneugen[rules = seqlen]. 'Rules: seqlen. Location: SPADE$CHECKER:SEQ.RUL'. ''. 'A family of rules for the length of a sequence (refer also to section'. '1.2.6 of the Checker manual).'. [rules = append]. 'Rules: append. Location: SPADE$CHECKER:SEQ.RUL'. ''. 'These rules give properties of the FDL "@" (append) operator, thus:'. ''. ' (1-2): appending the empty sequence to either side of a list leaves'. ' the list unchanged.'. ' (3): if A and B are actual lists [a1,...,an], [b1,...,bm], then'. ' A @ B is the list [a1,...,an,b1,...,bm].'. ' (4-5): a non-empty list S is equal to [first(S)] @ nonfirst(S), and'. ' to nonlast(S) @ [last(S)].'. ' (6): @ is associative, i.e. (X @ Y) @ Z = X @ (Y @ Z).'. [rules = first]. 'Rules: first. Location: SPADE$CHECKER:SEQ.RUL'. ''. 'These rules express properties of the FDL sequence function first(_):'. ''. ' (1): first([H,...]) is H.'. ' (2): first([H,...] @ L) is H.'. ' (3): first(X @ Y) is first(X) provided X <> [].'. ''. '(N.B. to show first([] @ X) is first(X), use append(1-2) to rewrite'. '[] @ X to X.)'. [rules = last]. 'Rules: last. Location: SPADE$CHECKER:SEQ.RUL'. ''. 'These rules express properties of the FDL sequence function last(_):'. ''. ' (1): last([H]) is H.'. ' (2): last(X @ [H,...]) is last([H,...]).'. ' (3): last([H|T]) is last(T) provided T <> [].'. ' (4): last(X @ Y) is last(Y) provided Y <> [].'. [rules = nonfirst]. 'Rules: nonfirst. Location: SPADE$CHECKER:SEQ.RUL'. ''. 'These rules express properties of the FDL sequence function nonfirst(_):'. ''. ' (1): nonfirst([H|T]) is T.'. ' (2): nonfirst(T) is [] provided length(T)=1.'. ' (3): nonfirst(X @ Y) is nonfirst(X) @ Y provided X <> [].'. ' (4): nonfirst([H|T] @ Y) is T @ Y.'. [rules = nonlast]. 'Rules: nonlast. Location: SPADE$CHECKER:SEQ.RUL'. ''. 'These rules express properties of the FDL sequence function nonlast(_):'. ''. ' (1): nonlast([H]) is [].'. ' (2): nonlast(T) is [] provided length(T)=1.'. ' (3): nonlast([H|T]) is [H|T1] provided nonlast(T)=T1.'. ' (4): nonlast(X @ Y) is X @ nonlast(Y) provided Y <> [].'. ' (5): nonlast(X @ [H]) is X.'. [rules = seq]. 'Rules: seq. Location: SPADE$CHECKER:SEQ.RUL'. ''. 'These rules express simple sequence equality properties: that two'. 'sequences are equal if and only if they are either both empty or'. 'they are both non-empty and have the same first and nonfirst (or'. 'last and nonlast) components.'. spark-2012.0.deb/lib/checker/helptext/rxfdlfunc.chl0000644000175000017500000000121111343737761021140 0ustar eugeneugen[rules = abs]. 'Rules: abs. Location: SPADE$CHECKER:FDLFUNCS.RUL'. ''. 'The "abs" family of rules provides a number of rewrite and inference'. 'rules about the FDL abs(_) function'. [rules = sqr]. 'Rules: sqr. Location: SPADE$CHECKER:FDLFUNCS.RUL'. ''. 'A rule family for the FDL sqr(X) (i.e. X*X) function.'. [rules = odd]. 'Rules: odd. Location: SPADE$CHECKER:FDLFUNCS.RUL,'. ''. 'A collection of rules for the FDL odd(_) function.'. [rules = exp]. 'Rules: exp. Location: SPADE$CHECKER:FDLFUNCS.RUL'. ''. 'Rules about the exponentiation operator (for SPARK).'. spark-2012.0.deb/lib/checker/helptext/unwrap.chl0000644000175000017500000000112311343737761020463 0ustar eugeneugen[unwrap]. 'Command examples: CHECK|: unwrap.'. ' CHECK|: unwrap h#4.'. ' CHECK|: unwrap c#1.'. ''. 'With the "unwrap" command, the user may "unwrap" the outermost quantifier'. 'of a hypothesis or conclusion of the current VC, where this is possible.'. 'Note that in certain cases -- an existentially quantified hypothesis or'. 'a universally quantified conclusion -- the quantified formula must be free'. 'of universal variables (i.e. all such variables must have been instantiated'. 'in these cases before further unwrapping is permitted).'. spark-2012.0.deb/lib/checker/helptext/prove.chl0000644000175000017500000000132411343737761020305 0ustar eugeneugen[prove]. 'Command examples: CHECK|: prove x>0 by contradiction.'. ' CHECK|: prove c#1 by implication.'. ' CHECK|: prove c#3 by induction.'. ' CHECK|: prove c#2 by cases.'. ' CHECK|: prove x+y>=0 by cases on h#4.'. ''. 'The "prove ... by ..." command allows you to employ any of the four proof'. 'strategies illustrated above on a conclusion or formula. You will enter'. 'a new proof frame in which the conclusions are what must be proved to'. 'meet the goals of the selected proof strategy. There may also be new'. 'hypotheses which hold in this proof frame, again added temporarily while'. 'you are inside this proof attempt.'. spark-2012.0.deb/lib/checker/helptext/infer.chl0000644000175000017500000000121411343737761020253 0ustar eugeneugen[infer]. 'Command examples: CHECK|: infer.'. ' CHECK|: infer c#3.'. ' CHECK|: infer x+y>0.'. ' CHECK|: infer c#2 using inequals.'. ' CHECK|: infer x>=y using WILD from [2,3,6].'. ''. 'This command may be used to attempt to infer a formula directly from one'. 'of the checker\'s inference rules, given the current hypotheses hold. If'. 'the formula can be inferred, it will be added to the current collection'. 'of hypotheses (if not already present), otherwise the checker will offer'. 'the user the choice of subgoaling if a suitable rulematch was found.'. spark-2012.0.deb/lib/checker/helptext/standard.chl0000644000175000017500000000206711343737761020757 0ustar eugeneugen[standardise]. 'Command examples: CHECK|: standardise.'. ' CHECK|: standardise i-1+1.'. ' CHECK|: standardise p or not p.'. ' CHECK|: standardise h#7.'. ' CHECK|: standardise c#3.'. ''. 'This command may be used to convert a current hypothesis or conclusion'. 'into standard form, or to standardise a user-supplied expression.'. 'Once calculated, the standardised expression is displayed, and the'. 'user has the choice of whether or not the new expression should be'. 'saved. If the user opts to save the standardised expression, it either'. 'overwrites the old expression (if a hypothesis or conclusion is given)'. 'or it is added as a new hypothesis, e.g. saving the fact that i-1+1 has'. 'the standard form i results in the addition of a new hypothesis i-1+1=i.'. 'If a user-supplied boolean expression was given, the result will'. 'be saved either by saving the original expression (if the standard form'. 'is "true") or by adding the new hypothesis OLD_EXPR <-> NEW_EXPR.'. spark-2012.0.deb/lib/checker/helptext/consult.chl0000644000175000017500000000152011343737761020637 0ustar eugeneugen[consult]. 'Command examples: CHECK|: consult.'. ' CHECK|: consult \'dt.rls\'.'. ''. 'This command declares a new file of user-defined proof rules to the'. 'checker. It is scanned, to check that all rules are well-formed and do'. 'not violate the restrictions placed on rules by the checker, so it may'. 'take a little while before it succeeds. Watch out for any messages, as'. 'these may indicate syntax (or other) errors in your rulefile. If more'. 'than one file of rules is to be submitted to the checker for use in the'. 'proof attempt, the order in which they are declared to the checker with'. 'this command will determine the order in which the files are searched,'. 'with earlier-declared files being searched first. The filename (for'. 'which, if omitted, the user is prompted) should be a Prolog atom.'. spark-2012.0.deb/lib/checker/helptext/help.chl0000644000175000017500000000050711343737761020104 0ustar eugeneugen['help']. 'Command examples: CHECK|: help.'. ' CHECK|: help infer.'. ''. '"help" calls up useful help-text from a file for the user about the'. 'checker and its commands. To see the general help page, which lists the'. 'commands available, type "help", otherwise use the "help command"'. 'form.'. spark-2012.0.deb/lib/checker/helptext/newvc.chl0000644000175000017500000000201111343737761020266 0ustar eugeneugen[newvc]. 'Command examples: CHECK|: newvc.'. ' CHECK|: newvc 2.'. ''. 'This command invokes the checker on a particular VC; you are asked for'. 'the number of the VC you wish to work on and this VC is then copied into'. 'the checker\'s current workspace ready for your proof-attempt. The VC'. 'must be one of those loaded in from a .VCG file.'. ''. 'If all VCs have been proved, the command will fail. If the VC number'. 'which you have supplied is non-existent (either because it is too large'. 'or because you have already proved that particular VC), you will be'. 'told which VCs remain to be proved and prompted to try again.'. ''. 'A new feature, introduced in version 1.2, is that if you type "newvc"'. 'before you have completed your proof of the currently-selected VC, you'. 'will be prompted to confirm that you wish to abandon the current proof'. 'attempt. Type "yes" to abandon the proof and execute the "newvc"'. 'command, or "no" to abandon the "newvc" and return to the current proof.'. spark-2012.0.deb/lib/checker/helptext/rxsets.chl0000644000175000017500000000126411343737761020505 0ustar eugeneugen[rules = sets]. 'Rules: sets. Location: SPADE$CHECKER:SETS.RUL'. ''. 'A family of rules for the FDL set operators. The main groupings are:'. ''. ' (1-3): simple X in A & X not_in A rules.'. ' (4-11): properties of set union (\\/).'. ' (12-19): properties of set intersection (/\\).'. ' (20-27): properties of set "lacking" (\\).'. ' (28): X not_in (set []): nothing is in the empty set.'. ' (29-36): properties of the subset_of relation.'. ' (37): definition of strict_subset_of relation.'. ' (38-39): commutativity of \\/ and /\\.'. ' (40-41): associativity of \\/ and /\\.'. ' (42-43): distributivity of /\\ over \\/ and \\/ over /\\.'. spark-2012.0.deb/lib/checker/helptext/rxenum.chl0000644000175000017500000000166411343737761020477 0ustar eugeneugen[rules = enum]. 'Rules: enum. Location: SPADE$CHECKER:ENUM.RUL'. ''. 'These rules allow the manipulation of user-defined enumerated types,'. 'e.g. by allowing pred(succ(X)) and succ(pred(X)) to be replaced by X'. '(where not at the relevant extremity) and by allowing various inferences'. 'about the inherited ordering of enumerated constants.'. [rules = enum_cases]. 'Rules: enum_cases. Location: SPADE$CHECKER:ENUM.RUL'. ''. 'This is a family of rules allowing you to infer'. ''. ' X=A1 or X=A2 or ... or X=An'. ''. 'for a variable X of enumerated type T, where'. ''. ' type T = (A1, A2, ..., An)'. ''. 'for some n in 2..25 inclusive. Rule enum_cases(N) is the rule'. 'for an enumeration with N enumeration constants.'. [rules = enumeration]. 'Rules: enumeration. Location: SPADE$CHECKER:ENUM.RUL'. ''. 'These rules define further useful inequalities of enumerated types.'. spark-2012.0.deb/lib/checker/helptext/rxquantif.chl0000644000175000017500000000044411343737761021175 0ustar eugeneugen[rules = quant]. 'Rules: quant. Location: SPADE$CHECKER:QUANTIF.RUL'. ''. 'Some simple general properties of quantified formulae, which may be'. 'used to simplify certain quantified predicates before application of'. 'the built-in Checker commands unwrap and instantiate.'. spark-2012.0.deb/lib/checker/helptext/simplify.chl0000644000175000017500000000103111343737761021001 0ustar eugeneugen[simplify]. 'Command examples: CHECK|: simplify.'. ' CHECK|: simplify h#1'. ' CHECK|: simplify c#1-2 & h#1-12'. ''. 'This command tidies up the current VC, rewriting terms such as'. '(not (not X)) to X, (X and Y) to separate hypotheses/conclusions, etc.'. ''. '(Note that any argument to "simplify" is ignored at present: the'. 'restructuring carried out by simplification is applied to the whole'. 'VC. The command arguments will become supported in a future release'. 'of the Proof Checker.)'. spark-2012.0.deb/lib/checker/helptext/save.chl0000644000175000017500000000051511343737761020111 0ustar eugeneugen[save_state]. 'Command example: CHECK|: save.'. ''. 'The "save" command saves the current proof session in a .CSV file.'. 'You can then resume a proof session later on from where you left off'. '(see "help resume."). The .CSV file will be in internal Prolog'. 'syntax: you should not attempt to modify this file in any way.'. spark-2012.0.deb/lib/checker/helptext/rxinequal.chl0000644000175000017500000000474111343737761021170 0ustar eugeneugen[rules = inequals]. 'Rules: inequals. Location: SPADE$CHECKER:INTINEQS.RUL'. ' & SPADE$CHECKER:NUMINEQS.RUL'. ''. 'This is a large family of inference rules for integer, numeric and'. 'general inequalities. The main groupings are:'. ''. ' (1-6): subtraction from one side of inequality (e.g. i-Nj if...)'. ' (19-22): addition of same no. both sides of < (e.g. i+N (e.g. i+N>N+j if...)'. ' (27-30): addition of same no. both sides of <> (e.g. N+i<>N+j if...)'. ' (31-33): subtract same no. both sides /<> (e.g. i-N>j-N if...)'. ' (34-42): multiplication both sides by same no. (e.g. i*N<=j*N if...)'. ' (43-48): addition > : A+C>B+D from (A...B or D), (C...D or B)'. ' (49-54): addition < : A+C : A+C<>B+D from (A...B or D), (C...D or B)'. ' (61-64): addition of same no. both sides of = (e.g. i+N=N+j if...)'. ' (65-68): addition of same no. both sides of <= (e.g. N+i<=j+N if...)'. ' (69-72): addition of same no. both sides of >= (e.g. i+N>=j+N if...)'. ' (73-75): subtract same no. both sides =/<=/>= (e.g. i-N>j-N if...)'. ' (76): I*N=J*N may_be_deduced_from I=J. (* same no. each side =)'. ' (77-79): addition >= : A+C>=B+D from (A...B or D), (C...D or B)'. ' (80-82): addition <= : A+C>=B+D from (A...B or D), (C...D or B)'. ' (83-85): addition = : A+C>=B+D from (A...B or D), (C...D or B)'. ' (86-87): lower bounds for N*(A div N) for A>=0, N>0'. ' (88-89): upper bounds for N*(A div N) for A>=0'. ' (90-92): sign of A div N'. ' (93-95): relations between I div N and J div N'. ' (96-97): lower bounds for N*(A div N) for A>=0, N<0'. ' (98-100): sign of A div N'. ' (101-103): strong to weak inequality for integers only'. ' (104-113): Given X*N r Y*N, rules about X r Y for N>0 and N<0'. ' (114-121): Rules about the "sign" of X*Y, given signs of X and Y'. [rules = zero]. 'Rules: zero. Location: SPADE$CHECKER:NUMINEQS.RUL'. ''. 'This is a short family of rules about equality (or otherwise) to zero.'. 'The rules are:'. ''. ' (1-2): infer X*Y = 0 from X = 0 (or Y = 0).'. ' (3): infer X=0 or Y=0 from X*Y = 0.'. ' (4): infer X*Y <> 0 from X<>0 and Y<>0.'. ' (5-6): infer X <> 0 from X*Y <> 0 (or Y*X <> 0).'. spark-2012.0.deb/lib/checker/helptext/exit.chl0000644000175000017500000000026611343737761020127 0ustar eugeneugen[exit]. 'Command example: CHECK|: exit.'. ' '. 'Exit from this invocation of the checker, closing the proof log with a'. 'list of all proof rules used in this proof session.'. spark-2012.0.deb/lib/checker/helptext/traverse.chl0000644000175000017500000000266111343737761021012 0ustar eugeneugen[traverse]. 'Command examples: CHECK|: traverse.'. ' CHECK|: traverse h#8.'. ' CHECK|: traverse c#2.'. ' CHECK|: traverse X where h#5=(X or _).'. ''. 'This command may be used to "explore" the structure of an expression to'. 'whatever depth is required, and is particularly useful in gaining an'. 'understanding of the composition of large expressions.'. ''. 'One enters at the toplevel of the chosen expression and may descend via'. 'dN (Down argument number N) and ascend via u (up) commands. At any'. 'level, r redisplays the principal functor (function or operator at the'. 'top of the current expression tree) and arguments, t shows typechecking'. 'information for this level, l shows the location of the current'. 'subexpression within the toplevel expression (lb - the default - is'. '"locate brief" in which other subexpressions are abbreviated to ...,'. 'while lf - "locate full" - does not abbreviate), s shows the toplevel'. 'expression in full and h displays a help summary of traverse commands.'. 'To exit from the traverse command, type x.'. ''. 'Traverse commands may be in either upper or lower case, and do not need'. 'termination by a full-stop - though any which are typed will be ignored.'. 'Multiple commands may be typed on a line, with optional white-space'. 'separators. Synonyms permitted are "-" for "u" (up), "+" for "d" (down)'. 'and "?" for "h" (help).'. spark-2012.0.deb/lib/checker/helptext/done.chl0000644000175000017500000000062611343737761020103 0ustar eugeneugen[done]. 'Command examples: CHECK|: done.'. ' CHECK|: done c#2.'. ' CHECK|: done c#2 & c#5-7.'. ''. 'The "done" command checks to see if any of the current conclusions can'. 'be inferred reasonably directly from the current hypotheses and, if so,'. 'these conclusions are taken to be true (under current conditions, i.e.'. 'taking account cases, etc.).'. spark-2012.0.deb/lib/checker/helptext/delete.chl0000644000175000017500000000101011343737761020404 0ustar eugeneugen[delete]. 'Command examples: CHECK|: delete [3,5,6].'. ' CHECK|: delete h#3 & h#5-6.'. ''. 'The "delete" command allows you to tell the checker to delete one or more'. 'hypotheses of the current VC. Thereafter, these hypotheses cannot be'. 'used in constructing a proof, unless they are undeleted with "undelete".'. ''. 'This command can be useful to cut down the amount of information on the'. 'screen and to speed up proof work by eliminating information which is no.'. 'longer needed.'. spark-2012.0.deb/lib/checker/helptext/printvc.chl0000644000175000017500000000056011343737761020640 0ustar eugeneugen[printvc]. 'Command example: CHECK|: printvc.'. ''. 'This command creates a file, VCn.LIS (where n is the current VC number)'. 'in the user\'s default directory containing a listing of the current VC'. 'with related information. This is particularly useful in proofs of'. 'large/complex VCs, and can save time and trouble issuing repeated'. '"list" commands.'. spark-2012.0.deb/lib/checker/helptext/undelete.chl0000644000175000017500000000051511343737761020760 0ustar eugeneugen[undelete]. 'Command examples: CHECK|: undelete [3,5,6].'. ' CHECK|: undelete h#3 & h#5-6.'. ''. 'The "undelete" command allows you to tell the checker to undelete one or'. 'more deleted hypotheses of the current VC, so that these hypotheses will'. 'once again be useable by the checker in proof steps'. spark-2012.0.deb/lib/checker/helptext/list.chl0000644000175000017500000000063311343737761020127 0ustar eugeneugen[list]. 'Command examples: CHECK|: list.'. ' CHECK|: list h#2.'. ' CHECK|: list h#1-4.'. ' CHECK|: list c#3.'. ' CHECK|: list c#2-5.'. ' CHECK|: list h#1-4 & c#2-5.'. ''. 'This command is used to display the current VC (or part of it) at the'. 'terminal, in hypothesis/conclusion format.'. spark-2012.0.deb/lib/checker/helptext/status.chl0000644000175000017500000000047411343737761020502 0ustar eugeneugen[status]. 'Command example: CHECK|: status.'. ''. 'This command gives a status report on which conclusions (if any) have been'. 'proved for which particular cases, if a case-consideration is in effect,'. 'or what the goal at the preceding level is in a proof by contradiction'. 'or in a subgoaling attempt.'. spark-2012.0.deb/share/0000755000175000017500000000000011753202341013537 5ustar eugeneugenspark-2012.0.deb/share/spark/0000755000175000017500000000000011753203755014671 5ustar eugeneugenspark-2012.0.deb/checker/0000755000175000017500000000000011753203756014054 5ustar eugeneugenspark-2012.0.deb/checker/infer2.pro0000644000175000017500000005112111753202340015747 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /******** INFER: top-level checker command ********/ infer :- retract(on_filename(FNAME)), file_exists_and_is_readable(FNAME), see(FNAME), seen, see_correct_input_stream, /* CFR015 */ fail. infer :- ( command_arg(expression, G) ; prompt_user('INFER -- Type formula to infer.','INFER -- Formula? '), rread(GX), parse_expression(GX, G) ), ( G=c#N, conc(N,F) ; F=G ), !, ( command_arg(rule, exists), ! ; prompt_user('Rulename pattern? '), rread(RP), parse_rulename(RP) ), !, ( command_arg(hyplist, HYPLIST), !, temp_del_hyps(HYPLIST) ; true ), !, retractall(inf_match), ( try_infer(F,R,[],on), novars(F), /* CFR009 */ ( typechecking(on), ( checktype(F, boolean) ; write('!!! ERROR: Above formula did not type-check correctly.'), nl, !, fail ) ; true ), nl, write('*** '), print(F), write(' by '), print(R), write('.'), nl, nl, restore_temp_del_hyps, add_new_hyp(F,1), ( G=c#N, done(N) ; true ) ; nl, !, ( inf_fail(R,F), restore_temp_del_hyps ; restore_temp_del_hyps, fail ) ), !. /*** TRY_INFER(F,R,J,D) - try to infer F using rules R, leaving J unproven ***/ try_infer(FORMULA,RULENAME,REMAINING_GOALS,DISPLAY_STATUS) :- retractall(type_classification_done), retractall(type_classification(_,_)), retractall(abandon_search), /* CFR047 */ retractall(search_count(_)), /* CFR047 */ ( novars(FORMULA), ( atom(FORMULA) ; FORMULA=..[_F|ARGS], save_type_classification_list(ARGS), asserta(type_classification_done) ) ; true ), !, ( rule_match(RULENAME,FORMULA,GOALS), on_filename(FILE), ok_type_classification(FORMULA, FILE, RULENAME), fulfil(GOALS,UNSATISFIED_GOALS) ; \+ search_count(_), !, command_arg(rulename, R), nl, write('Cannot infer '), print(FORMULA), write(' using '), ( novars(R), write('rule '), print(R) ; var(R), write('rule '), print(R) ; nonvar(R), R =.. [Rfamily |_ ], write('rule-family '), print(Rfamily) ), write('.'), nl, !, fail ), ( abandon_search, /* CFR047 */ !, /* CFR047 */ fail /* CFR047 */ ; /* CFR047 */ true /* CFR047 */ ), /* CFR047 */ display_inf_rule(RULENAME,FORMULA,UNSATISFIED_GOALS,DISPLAY_STATUS), UNSATISFIED_GOALS=REMAINING_GOALS, ( REMAINING_GOALS=[], assertz(logfact(rulematch, ([FILE,RULENAME]: (FORMULA may_be_deduced_from GOALS)))) ; REMAINING_GOALS\=[] ). /*** DISPLAY_INF_RULE(R,F,J,D) - display the rule on the screen if J o.k. ***/ display_inf_rule(R,F,J,D) :- ( D=on, ( display_subgoals_max(N) ; N=99 ), length(J, LEN), LEN =< N ; D=off ), ( display_var_free_only(off), TESTED=no ; var_free(J), TESTED=yes ), nl, nl, print(R), write(': '), print(F), put_code(32), ( J=[], write('may be inferred directly') ; write('follows from '), write_justs(J) ), ( ( TESTED=yes ; var_free(J) ), ( inf_match ; assertz(inf_match) ), increment_search_count /* CFR047 */ ; true ), !. display_inf_rule(_R,_F,J,_D) :- var_free(J), ( inf_match ; assertz(inf_match) ), increment_search_count, /* CFR047 */ !. display_inf_rule(_,_,_,_) :- !. increment_search_count :- /* CFR047 */ retract(search_count(OLD)), /* CFR047 */ NEW is OLD + 1, /* CFR047 */ assertz(search_count(NEW)), /* CFR047 */ !, /* CFR047 */ ( /* CFR047 */ NEW = 20, /* CFR047 */ !, /* CFR047 */ ask_user_if_search_should_continue /* CFR047 */ ; /* CFR047 */ true /* CFR047 */ ), /* CFR047 */ !. /* CFR047 */ increment_search_count :- assertz(search_count(1)), !. /* CFR047 */ ask_user_if_search_should_continue :- /* CFR047 */ nl, nl, /* CFR047 */ write('%%% TWENTY MATCHES FOUND: Do you wish to continue the search?'), nl, /* CFR047 */ write(' Type Y(es) to continue search, N(o) to abandon it...'), nl, nl, /* CFR047 */ read_answer(' Continue search', Answer), /* CFR047 */ ( /* CFR047 */ Answer = yes, /* CFR047 */ retractall(search_count(_)) /* CFR047 */ ; /* CFR047 */ Answer = no, /* CFR047 */ assertz(abandon_search) /* CFR047 */ ), /* CFR047 */ !. /* CFR047 */ /*** INF_FAIL(R,F) - called if infer fails to try subgoaling if desired ***/ inf_fail(_R,_F) :- (\+ inf_match), !, fail. inf_fail(R,F) :- inf_match, !, nl, nl, read_answer('Do you wish to subgoal',Ans), !, ( Ans=yes, try_infer(F,R,J,off), novars(F), /* CFR009 */ checktype(F, boolean), /* CFR009 */ var_free(J), nl, read_answer_once('Use this rule',A), A=yes, on_filename(FILE), assertz(logfact(subgoal, ([FILE,R]: (F may_be_deduced_from J)))), restore_temp_del_hyps, start_subgoal(F,J,true,'SUBGOALING') ; Ans=no, !, fail ), !. /*** RULE_MATCH(R,G,J) - general rule, name R, to match goal G & return J ***/ rule_match(R,F,J) :- use_subst_rules_for_equality(on), nonvar(F), F=(X=Y), nonvar(X), nonvar(Y), X=..[XOP|XArgs], Y=..[YOP|YArgs], make_up(X1,XOP,XArgs), make_up(Y1,YOP,YArgs), !, X1=..[_|X1Args], Y1=..[_|Y1Args], fetch_inf_or_subst_rule_for_eq(R,X1,Y1,J1), add_conds(XArgs=X1Args,J1,JJ), add_conds(YArgs=Y1Args,JJ,J). rule_match(R,F,J) :- nonvar(F), F=..[OP|Args], make_up(F1,OP,Args), !, F1=..[_|Args1], fetch_inference_rule(R,F1,J1), add_conds(Args=Args1,J1,J). /*** MAKE_UP(F1,OP,Args) - make F1 have functor OP & most general args ***/ make_up(F1,OP,Args) :- !, generalise(Args,Blanks), !, F1=..[OP|Blanks], !. /*** GENERALISE(A,Blanks) - create list Blank of vars, same length as A ***/ generalise([],[]) :- !. generalise([_|T],[_|U]) :- !, generalise(T,U), !. /*** ADD_CONDS(XL=YL,OJ,NJ) - unify lists XL & YL, creating NJ from OJ ***/ add_conds([]=[],J,J) :- !. add_conds([C1|T1]=[C1|T2],OJ,NJ) :- !, add_conds(T1=T2,OJ,NJ), !. add_conds([C1|T1]=[C2|T2],OJ,[C1=C2|NJ]) :- !, add_conds(T1=T2,OJ,NJ), !. /*** FETCH_INFERENCE_RULE(NAME, FORMULA, JUSTIFICATIONS) ***/ fetch_inference_rule(R,F,J) :- use_rulefile(F,FNAME), get_term(FNAME,T), ( T=(R: (F may_be_deduced_from J)) ; T=(R: (F may_be_deduced)), J=[] ), \+ banned_rule(FNAME,R), is_chosen_rulename(R). /*** FETCH_INF_OR_SUBST_RULE_FOR_EQ(NAME, LHS, RHS, JUSTIFICATIONS) ***/ fetch_inf_or_subst_rule_for_eq(R,X1,Y1,J) :- use_rulefile(_ = _,FNAME), get_term(FNAME,T), ( T=(R: (X=Y may_be_deduced_from J1)), /* CFR006 */ add_conds([X1,Y1]=[X,Y],J1,J) /* CFR006 */ ; T=(R: (X=Y may_be_deduced)), /* CFR006 */ add_conds([X1,Y1]=[X,Y],[],J) /* CFR006 */ ; T=(R: (X1 may_be_replaced_by Y1 if J)) ; T=(R: (Y1 may_be_replaced_by X1 if J)) ; T=(R: (X1 & Y1 are_interchangeable if J)) ; T=(R: (Y1 & X1 are_interchangeable if J)) ; T=(R: (X1 may_be_replaced_by Y1)), J=[] ; T=(R: (Y1 may_be_replaced_by X1)), J=[] ; T=(R: (X1 & Y1 are_interchangeable)), J=[] ; T=(R: (Y1 & X1 are_interchangeable)), J=[] ), \+ banned_rule(FNAME,R), is_chosen_rulename(R). /*** USE_RULEFILE -- finds the rulefile FNAME appropriate for proving F ***/ /*** If FNAME cannot be found, checks to make sure that no rule-family of ***/ /*** requested name exists and prints up an error message. ***/ use_rulefile(F,FNAME) :- ( find_rulefile(F,FNAME) ; !, command_arg(rulename, R), nonvar(R), \+ ( built_in_rulefile(file==_, R) ; user_rulefile(_, R) ; special_rulefile(file==_, R) ), R =.. [Rfamily | _ ], nl, write('Could not find rule-family '), print(Rfamily), write('.'), nl, !, fail ). /*** FIND_RULEFILE(F, FNAME) -- get & save rulefile name. Searches by ***/ /*** principle functor ***/ find_rulefile(F,FNAME) :- rulefile(F,FNAME), atom(FNAME), has_matching_rulename(FNAME), file_exists_and_is_readable(FNAME), see(FNAME), seen, see_correct_input_stream, /* CFR015 */ retractall(on_filename(_)), asserta(on_filename(FNAME)). find_rulefile(_F, FNAME) :- atom(FNAME), \+ file_exists_and_is_readable(FNAME), write('Aborted: '), print(FNAME), write(' does not exist or cannot be read.'), nl, !, close_all_streams, halt. /*** HAS_MATCHING_RULENAME(FNAME) - there's a rule of chosen name in FNAME ***/ has_matching_rulename(FNAME) :- clause(command_arg(rulename, R), _), rulefile(FNAME, R), !. /*** GET_TERM(FNAME, T) -- read T from FNAME & revert to previous input ***/ get_term(FNAME,T) :- repeat, seeing(OLDFILE), see(FNAME), read_unless_abandon_search(T), /* CFR047 */ ( T\=end_of_file ; T=end_of_file, !, seen ), see(OLDFILE). read_unless_abandon_search(end_of_file) :- abandon_search, !. /* CFR047 */ read_unless_abandon_search(T) :- read_term_and_layout(T). /* CFR047 */ /***************************************************************************** INFERENCE RULE STRATEGY: F may_be_deduced_from GOALS. 1. Split GOALS into fully-instantiated-goals (i.e. primary goals) and partially-instantiated-goals (i.e. secondary goals). 2. Attempt to satisfy all primary goals. Cut & branch point. Either: a. All were satisfied. Then attempt to satisfy secondary goals. As soon as one becomes satisfied, split the remainder into primaries and secondaries and attempt to satisfy them in the same way (i.e. recursively). b. Some were not satisfied. Leave secondary goals and backtrack. Problems: direct goals. Eliminate where possible. *****************************************************************************/ fulfil([], []) :- !. fulfil(GOALS, UNSATISFIED_GOALS) :- split(GOALS, PRIMARIES,SECONDARIES, REST), try_to_satisfy(PRIMARIES, UNSATISFIED_PRIMARIES), ( UNSATISFIED_PRIMARIES=[], match_up(SECONDARIES, UNSATISFIED_SECONDARIES) ; UNSATISFIED_PRIMARIES\=[], UNSATISFIED_SECONDARIES=SECONDARIES ), append(UNSATISFIED_PRIMARIES, UNSATISFIED_SECONDARIES, UNSATISFIED_FRONT), ( REST=[], UNSATISFIED_GOALS=UNSATISFIED_FRONT ; REST\=[], do_direct_goals(REST, REMAINDER), fulfil(REMAINDER, UNSATISFIED_REMAINDER), append(UNSATISFIED_FRONT, UNSATISFIED_REMAINDER, UNSATISFIED_GOALS) ). split([],[],[],[]) :- !. split([G|GOALS],[],[],[G|GOALS]) :- nonvar(G), G=goal(_), !. split([G|GOALS],[G|PRIMARIES],SECONDARIES,REMAINDER) :- novars(G), G\=goal(_), !, split(GOALS,PRIMARIES,SECONDARIES,REMAINDER), !. split([G|GOALS],PRIMARIES,[G|SECONDARIES],REMAINDER) :- split(GOALS,PRIMARIES,SECONDARIES,REMAINDER), !. do_direct_goals([G|GOALS],REMAINDER) :- nonvar(G), G=goal(DIRECT), !, call(DIRECT), do_direct_goals(GOALS,REMAINDER), !. do_direct_goals(REMAINDER,REMAINDER) :- !. try_to_satisfy([],[]) :- !. try_to_satisfy([G|GOALS],REMAINDER) :- nonvar(G), G=goal(D), !, call(D), !, try_to_satisfy(GOALS,REMAINDER), !. try_to_satisfy([G|GOALS],REMAINDER) :- infer(G), !, try_to_satisfy(GOALS,REMAINDER), !. try_to_satisfy([G|GOALS],[G|REMAINDER]) :- try_to_satisfy(GOALS,REMAINDER), !. match_up([],[]) :- !. match_up(GOALS,UNSATISFIED_GOALS) :- seek_solutions([],GOALS,UNSATISFIED), split(UNSATISFIED, PRIMARIES, SECONDARIES, REST), try_to_satisfy(PRIMARIES, REMAINDER), append(SECONDARIES, REST, UNSATISFIED_TAIL), append(REMAINDER, UNSATISFIED_TAIL, UNSATISFIED_GOALS). seek_solutions(PASSED,[G|GOALS],UNSATISFIED_GOALS) :- do_satisfy_goal(G), seek_solutions(PASSED, GOALS, UNSATISFIED_GOALS). seek_solutions(PASSED,[G|GOALS],UNSATISFIED_GOALS) :- append(PASSED,[G],NOW_PASSED), seek_solutions(NOW_PASSED,GOALS,UNSATISFIED_GOALS). seek_solutions(UNSATISFIED_GOALS,[],UNSATISFIED_GOALS) :- !. do_satisfy_goal(G) :- retractall(current_sat_goal(_)), asserta(current_sat_goal(G)), !, try_satisfy_goal(G, []). try_satisfy_goal(G, INSTANCE_LIST) :- current_sat_goal(GOAL), satisfy_goal(GOAL), \+ is_in(GOAL, INSTANCE_LIST), !, ( G=GOAL ; try_satisfy_goal(G, [GOAL|INSTANCE_LIST]) ). satisfy_goal(A=B) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X=B). satisfy_goal(A=B) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A=X). satisfy_goal(A<>B) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X<>B). satisfy_goal(A<>B) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A<>X). satisfy_goal(A>=B) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X>=B). satisfy_goal(A>=B) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A>=X). satisfy_goal(A<=B) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X<=B). satisfy_goal(A<=B) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A<=X). satisfy_goal(A>B) :- novars(A), simplify(A,X), A\=X, !, satisfy_goal(X>B). satisfy_goal(A>B) :- novars(B), simplify(B,X), B\=X, !, satisfy_goal(A>X). satisfy_goal(AB) ; G=(A<>B), H=(A=B) ; G=(A>B), H=(A<=B) ; G=(A=B) ; G=(A>=B), H=(AB) ), satisfy_goal(H). satisfy_goal(A and B) :- novars(A), !, infer(A), satisfy_goal(B). satisfy_goal(A and B) :- novars(B), !, infer(B), satisfy_goal(A). satisfy_goal(A and B) :- satisfy_goal(A), satisfy_goal(B). satisfy_goal(A or B) :- novars(A), !, ( infer(A) ; satisfy_goal(B) ). satisfy_goal(A or B) :- novars(B), !, ( infer(B) ; satisfy_goal(A) ). satisfy_goal(A or _B) :- satisfy_goal(A). satisfy_goal(_A or B) :- satisfy_goal(B). satisfy_goal(A -> B) :- novars(A), !, ( infer(not A) ; satisfy_goal(B) ). satisfy_goal(A -> B) :- novars(B), !, ( infer(B) ; satisfy_goal(not A) ). satisfy_goal(A -> B) :- ( satisfy_goal(not A) ; satisfy_goal(B) ). satisfy_goal(A <-> B) :- satisfy_goal(A -> B), satisfy_goal(B -> A). satisfy_goal(A=B) :- ( novars(B), ( var(A), intexp(B), \+ integer(B), A iss B ; A=B ) ; novars(A), ( var(B), intexp(A), \+ integer(A), B iss A ; B=A ) ). satisfy_goal(A<>B) :- ( fact(A>B) ; fact(AB) :- satisfy_goal(A>=B), novars(A<>B), infer(A<>B). satisfy_goal(AB), infer(A<>B). satisfy_goal(A>=B) :- ( fact(A>B) ; fact(BA) ; satisfy_goal(A=B) ). satisfy_goal(X=A+B) :- novars(X), ( novars(B), A=X-B ; novars(A), B=X-A ). satisfy_goal(X=A-B) :- novars(X), ( novars(B), A=X+B ; novars(A), B=A-X ). /*** IS_CHOSEN_RULENAME(R) - R matches a rulename specified by user ***/ is_chosen_rulename(R) :- nonvar(R), !, command_arg(rulename, R), !. is_chosen_rulename(R) :- command_arg(rulename, R). /*** OK_TYPE_CLASSIFICATION(FORMULA,FILENAME,RULENAME) ***/ ok_type_classification(F,FNAME,RULENAME) :- type_requirements(F,FNAME,RULENAME,TYPES), has_type_classification_list(TYPES), !. clear_up_could_facts :- retractall(could_infer(_)), retractall(could_not_infer(_)), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/loadvc5.pro0000644000175000017500000022233511753202340016126 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** LOAD_VC -- load in a VC from the .VCG and .FDL files ***/ (load_vc) :- cmd_line_filename(FILENAME), ( ( done__resume, STATE = resume ; \+ done__resume, STATE = ordinary ), form_file_names(FILENAME, FL), load_vc(STATE), form_log_file_names(FL), /* CFR016 */ create_backups, write_proof_log_banner ; write('Please try again.'), nl, fail ), !. (load_vc) :- repeat, nl, write('Please type filename, without extension, in lowercase, within single'), nl, write('quotes if it is not in this directory, followed by a full-stop.'), nl, ( % Resume may be specified on the command line. done__resume, write('Resume requested. FILENAME.csv will be read.') ; \+ done__resume, write('FILENAME.vcg and FILENAME.fdl will be read.') ), nl, prompt_user('Filename? '), read_term_and_layout(FILE), ( atom(FILE), FILE = forceexit, close_all_streams, halt ; done__resume, atom(FILE), FILENAME=FILE, STATE=resume ; \+ done__resume, atom(FILE), FILENAME=FILE, STATE=ordinary ; write('Please try again.'), nl, fail ), form_file_names(FILENAME, FL), load_vc(STATE), form_log_file_names(FL), /* CFR016 */ create_backups, write_proof_log_banner, !. load_vc(ordinary) :- checkfilesexist(ordinary), nl, fdlfile_name(FDLFILE), vcgfile_name(VCGFILE), /* CFR035 */ scan_file_header_to_see_if_spark(VCGFILE), /* CFR035 */ write('Reading '), print(FDLFILE), write(' (for inherited FDL type declarations)'), nl, load_decs(FDLFILE), ( echo(off) ; nl, write(' ------------------------------------------------------------'), nl, nl ), write('Loading '), print(VCGFILE), write(' (verification conditions)'), nl, load_vcs(VCGFILE), !, asserta(do_do_newvc), !. load_vc(resume) :- checkfilesexist(resume), csvfile_name(CSVFILE), nl, write('Consulting '), print(CSVFILE), write(' (checker saved proof state)'), % Supress all redefine warnings when loading the data file. set_prolog_flag(redefine_warnings, off), load_files(CSVFILE, [when(always), load_type(source), compilation_mode(assert_all)]), % Re-enable all redefine warnings. set_prolog_flag(redefine_warnings, on), !. form_file_names(FILENAME, FL) :- retractall(fdlfile_name(_)), retractall(vcgfile_name(_)), retractall(csvfile_name(_)), !, name(FILENAME,FILENAMELIST), /* CFR022 */ ( /* CFR022 */ gen_append(FL, ".siv", FILENAMELIST), /* CFR022 */ asserta(vcgfile_name(FILENAME)) /* CFR022 */ ; /* CFR022 */ FL = FILENAMELIST, /* CFR022 */ append(FL,".vcg",INLIST), /* CFR022 */ name(VCGFILE,INLIST), /* CFR022 */ asserta(vcgfile_name(VCGFILE)) /* CFR022 */ ), /* CFR022 */ !, /* CFR022 */ append(FL,".fdl",FLIST), name(FDLNAME,FLIST), asserta(fdlfile_name(FDLNAME)), !, append(FL,".csv",CLIST), name(CSVFILE,CLIST), asserta(csvfile_name(CSVFILE)), !. /* CFR016 */ /* specifies file names for .cmd (both input and output) and .plg files */ form_log_file_names(FL) :- /* CFR016 */ retractall(logfile_name(_)), retractall(command_log_filename(_)), /* CFR016 */ /* proof log */ ( /* if /proof_log is specified use that */ cmd_line_proof_log(PROOFLOG), asserta(logfile_name(PROOFLOG)) ; /* if no /proof_log specified, use filename.plg */ append(FL,".plg",PLIST), name(PROOFLOG, PLIST), asserta(logfile_name(PROOFLOG)) ), !, /* CFR016 */ /* command log */ ( /* CFR016 */ /* if /command_log is specified use that */ cmd_line_command_log(CMDLOG), /* CFR016 */ asserta(command_log_filename(CMDLOG)), /* CFR016 */ /* check that /command_log and script file have not been set to the same thing */ ( perform_script_file(CMDLOG), write('Aborted: Cannot set command_log and execute qualifiers to the same filename.'), nl, close_all_streams, halt ; true ) ; /* CFR016 */ /* if no /command_log specified, use filename.cmd */ append(FL,".cmd",CLIST), /* CFR016 */ name(CMDLOG, CLIST), /* CFR016 */ /* check that /command_log and script file have not been set to the same thing */ ( perform_script_file(CMDLOG), write('Aborted: Cannot have command_log and execute qualifiers as the same filename.'), nl, write('Consider renaming the execute log.'), nl, close_all_streams, halt ; true ), /* use filename.cmd as command log */ asserta(command_log_filename(CMDLOG)) /* CFR016 */ ), /* CFR016 */ !. /* Notify user that file does not have write permissions */ not_writeable_warning(EXISTINGFILE) :- tell(user), nl, write('!!! ERROR trying to delete '), printq(EXISTINGFILE), nl, nl, write('File is not writeable.'), close_all_streams, halt. /* Notify user that file already exists, and ask for solution */ consult_user(FILENAME, EXISTINGFILE) :- tell(user), nl, write('!!! ERROR trying to move '), printq(FILENAME), write(' to '), printq(EXISTINGFILE), nl, nl, write('File already exists.'), !, nl, no_echo_read_answer('Do you want to delete this file?', ANSWER), ( ANSWER = yes, ( \+ file_can_be_written(EXISTINGFILE), not_writeable_warning(EXISTINGFILE) ; delete_file(EXISTINGFILE) ) ; ANSWER = no, close_all_streams, halt ). create_backups :- overwrite_warning(WARN), /* create backup .cmd file */ command_log_filename(COMMANDLOG), ( file_exists(COMMANDLOG), name(COMMANDLOG, CTEMP), append(CTEMP, "-", BACKUPCMD), name(CMD2, BACKUPCMD), ( file_exists(CMD2), ( WARN = on, consult_user(COMMANDLOG, CMD2) ; WARN = off, ( \+ file_can_be_written(CMD2), not_writeable_warning(CMD2) ; delete_file(CMD2) ) ) ; \+ file_exists(CMD2) ), rename_file(COMMANDLOG, CMD2) ; \+ file_exists(COMMANDLOG) ), /* create backup .plg file */ logfile_name(PROOFLOG), ( file_exists(PROOFLOG), name(PROOFLOG, PTEMP), append(PTEMP, "-", BACKUPPLG), name(PLG2, BACKUPPLG), ( file_exists(PLG2), ( WARN = on, consult_user(PROOFLOG, PLG2) ; WARN = off, ( \+ file_can_be_written(PLG2), not_writeable_warning(PLG2) ; delete_file(PLG2) ) ) ; \+ file_exists(PLG2) ), rename_file(PROOFLOG, PLG2) ; \+ file_exists(PROOFLOG) ). checkfilesexist(ordinary) :- vcgfile_name(VCG), ( file_exists_and_is_readable(VCG) ; tell_off(vcg) /* CFR048 */ ), !, fdlfile_name(FDL), ( file_exists_and_is_readable(FDL) ; tell_off(fdl) /* CFR048 */ ), !. checkfilesexist(resume) :- csvfile_name(CSV), ( file_exists_and_is_readable(CSV) ; tell_off(csv) /* CFR048 */ ), !. tell_off(TYPE) :- nl, write('No .'), print(TYPE), write(' file of this name exists.'), nl, nl, write('List of .'), print(TYPE), write(' files in current region:'), nl, list_files_with_extension(TYPE), fail. /*** LOAD_DECS(FILENAME) -- load the declarations file ***/ load_decs(FILENAME) :- assertz(current_record_field_number(1)), see(FILENAME), repeat, get_fdl_declaration(DECLARATION), process_fdl_dec(DECLARATION), /* UNTIL */ final_declaration(DECLARATION), seen, !. final_declaration([start|_]). final_declaration([end|_]). /* CFR037 */ final_declaration([W|_]) :- eof_char(EOF), name(W, [EOF]). /*** IS_IN ***/ is_in(X,[X|_]) :- !. is_in(X,[_|Y]) :- is_in(X,Y), !. /*** GET_FDL_DECLARATION(TOKEN_LIST) ***/ get_fdl_declaration([WORD|REST]) :- eof_char(EOF), ( retract(previous_character(CHAR)) ; fget0(CHAR) ), !, ( CHAR=EOF, WORD=end_of_file, REST=[] ; read_word(CHAR, WORD, NEXT_CHAR), read_rest(WORD, NEXT_CHAR, REST) ), !. /*** READ_WORD(CHAR, WORD, NEXT_CHAR) ***/ read_word(CHAR, WORD, NEXT_CHAR) :- is_a_single_character(CHAR), !, name(WORD, [CHAR]), fget0(NEXT_CHAR), !. read_word(CHAR, WORD, NEXT_CHAR) :- is_a_word_split_char(CHAR), fget0(NEW_CHAR), read_word(NEW_CHAR, WORD, NEXT_CHAR), !. read_word(CHAR, WORD, NEXT_CHAR) :- make_lower_case(CHAR, CONVERTED_CHAR), fget0(NEW_CHAR), restword(NEW_CHAR, CHAR_LIST, NEXT_CHAR), truncate_list(24, [CONVERTED_CHAR|CHAR_LIST], CHOPPED_LIST), name(WORD, CHOPPED_LIST), !. /*** RESTWORD(CHAR, CHAR_LIST, NEXT_CHAR) ***/ restword(CHAR, [], CHAR) :- is_a_word_split_char(CHAR), !. restword(CHAR, [CONVERTED_CHAR|CHAR_LIST], NEXT_CHAR) :- make_lower_case(CHAR, CONVERTED_CHAR), fget0(NEW_CHAR), restword(NEW_CHAR, CHAR_LIST, NEXT_CHAR), !. /*** IS_A_SINGLE_CHARACTER(CHAR) ***/ is_a_single_character(44). /* , */ is_a_single_character(58). /* : */ is_a_single_character(59). /* ; */ is_a_single_character(40). /* ( */ is_a_single_character(41). /* ) */ is_a_single_character(46). /* . */ is_a_single_character(61). /* = */ is_a_single_character(91). /* [ */ is_a_single_character(93). /* ] */ is_a_single_character(EOF) :- eof_char(EOF). /* EOF */ /*** IS_A_WORD_SPLIT_CHAR(CHAR) ***/ is_a_word_split_char(9). /* TAB */ is_a_word_split_char(32). /* ' ' */ is_a_word_split_char(46). /* . */ is_a_word_split_char(44). /* , */ is_a_word_split_char(40). /* ( */ is_a_word_split_char(41). /* ) */ is_a_word_split_char(58). /* : */ is_a_word_split_char(59). /* ; */ is_a_word_split_char(61). /* = */ is_a_word_split_char(91). /* [ */ is_a_word_split_char(93). /* ] */ is_a_word_split_char(EOF) :- eof_char(EOF). /* EOF */ is_a_word_split_char(EOL) :- eol_char(EOL). /* EOL */ /*** LASTWORD(WORD) ***/ lastword(';'). lastword(start). lastword(W) :- eof_char(EOF), name(W,EOF). /*** MAKE_LOWER_CASE(CHAR, CONVERTED_CHAR) ***/ make_lower_case(CHAR, CONVERTED_CHAR) :- CHAR>64, CHAR<91, CONVERTED_CHAR is CHAR+32, !. make_lower_case(CHAR, CHAR). /*** READ_REST(WORD, NEXT_CHAR, REST) ***/ read_rest(WORD, CHAR, []) :- lastword(WORD), !, asserta(previous_character(CHAR)). read_rest(_WORD, NEXT_CHAR, [NEXT_WORD|REST_OF_STMT]) :- read_word(NEXT_CHAR, NEXT_WORD, NEW_CHAR), read_rest(NEXT_WORD, NEW_CHAR, REST_OF_STMT), !. /*** TRUNCATE_LIST(LIMIT, OLDLIST, NEWLIST) -- return NEWLIST = (OLDLIST up to LIMIT chars) ***/ truncate_list(_LIM, OLD, OLD) :- spark_enabled, !. /* CFR035 */ truncate_list(LIM, OLD, NEW) :- length(OLD, LEN), LEN=LIM, NUM_TO_REMOVE is LEN-LIM, chop_off(NUM_TO_REMOVE, OLD, NEW), !. /*** CHOP_OFF(NUMBER, OLDLIST, NEWLIST) -- chop NUMBER elements off tail ***/ chop_off(1,OLD,NEW) :- append(NEW,[_],OLD), !. chop_off(2,OLD,NEW) :- append(NEW,[_,_],OLD), !. chop_off(3,OLD,NEW) :- append(NEW,[_,_,_],OLD), !. chop_off(4,OLD,NEW) :- append(NEW,[_,_,_,_],OLD), !. chop_off(5,OLD,NEW) :- append(NEW,[_,_,_,_,_],OLD), !. chop_off(6,OLD,NEW) :- append(NEW,[_,_,_,_,_,_],OLD), !. chop_off(7,OLD,NEW) :- append(NEW,[_,_,_,_,_,_,_],OLD), !. chop_off(8,OLD,NEW) :- append(NEW,[_,_,_,_,_,_,_,_],OLD), !. chop_off(9,OLD,NEW) :- append(NEW,[_,_,_,_,_,_,_,_,_],OLD), !. chop_off(N,_,_) :- N=<0, !, fail. chop_off(N,OLD,NEW) :- N1 is N-9, chop_off(N1,OLD,NEW1), chop_off(9,NEW1,NEW), !. /*** PROCESS_FDL_DEC(DECLARATION) ***/ process_fdl_dec([title | TITLEWORDS]) :- make_title_name(TITLEWORDS, TITLE), asserta(fdl_file_title(TITLE)), !. process_fdl_dec([proof | DECLARATION]) :- process_fdl_dec(DECLARATION), !. process_fdl_dec([var, VAR, ':', TYPE, ';']) :- find_core_type(TYPE, CORE_TYPE), maybe_add(var_const(VAR, CORE_TYPE, v)), save_used_identifier(VAR, var_const), !. process_fdl_dec([const, CONST, ':', TYPE, '=' | _VALUE]) :- find_core_type(TYPE, CORE_TYPE), maybe_add(var_const(CONST, CORE_TYPE, c)), save_used_identifier(CONST, var_const), !. process_fdl_dec([type, TYPE, '=', LOWER, '.', '.', UPPER, ';']) :- handle_negatives_etc(LOWER, LOW), /* CFR056 */ handle_negatives_etc(UPPER, UPP), /* CFR056 */ checktype(LOW, RANGE_TYPE), /* CFR056 */ checktype(UPP, RANGE_TYPE), /* CFR056 */ !, maybe_add(type_alias(TYPE, RANGE_TYPE)), save_used_identifier(TYPE, type), !. process_fdl_dec([type, TYPE, '=', '-', LOWER, '.', '.', UPPER, ';']) :- /*056*/ handle_negatives_etc(UPPER, UPP), /* CFR056 */ checktype(-LOWER, RANGE_TYPE), /* CFR056 */ checktype(UPP, RANGE_TYPE), /* CFR056 */ !, /* CFR056 */ maybe_add(type_alias(TYPE, RANGE_TYPE)), /* CFR056 */ save_used_identifier(TYPE, type), /* CFR056 */ !. /* CFR056 */ process_fdl_dec([type, TYPE, '=', '+', LOWER, '.', '.', UPPER, ';']) :- /*056*/ handle_negatives_etc(UPPER, UPP), /* CFR056 */ checktype(LOWER, RANGE_TYPE), /* CFR056 */ checktype(UPP, RANGE_TYPE), /* CFR056 */ !, /* CFR056 */ maybe_add(type_alias(TYPE, RANGE_TYPE)), /* CFR056 */ save_used_identifier(TYPE, type), /* CFR056 */ !. /* CFR056 */ process_fdl_dec([type, TYPE, '=', LOWER, '.', '.', '-', UPPER, ';']) :- /*056*/ handle_negatives_etc(LOWER, LOW), /* CFR056 */ checktype(LOW, RANGE_TYPE), /* CFR056 */ checktype(-UPPER, RANGE_TYPE), /* CFR056 */ !, /* CFR056 */ maybe_add(type_alias(TYPE, RANGE_TYPE)), /* CFR056 */ save_used_identifier(TYPE, type), /* CFR056 */ !. /* CFR056 */ process_fdl_dec([type, TYPE, '=', LOWER, '.', '.', '+', UPPER, ';']) :- /*056*/ handle_negatives_etc(LOWER, LOW), /* CFR056 */ checktype(LOW, RANGE_TYPE), /* CFR056 */ checktype(-UPPER, RANGE_TYPE), /* CFR056 */ !, /* CFR056 */ maybe_add(type_alias(TYPE, RANGE_TYPE)), /* CFR056 */ save_used_identifier(TYPE, type), /* CFR056 */ !. /* CFR056 */ process_fdl_dec([type, TYPE, '=', '-', LOWER, '.', '.', '-', UPPER, ';']) :- /*056*/ checktype(-LOWER, RANGE_TYPE), /* CFR056 */ checktype(-UPPER, RANGE_TYPE), /* CFR056 */ !, /* CFR056 */ maybe_add(type_alias(TYPE, RANGE_TYPE)), /* CFR056 */ save_used_identifier(TYPE, type), /* CFR056 */ !. /* CFR056 */ process_fdl_dec([type, TYPE, '=', '-', LOWER, '.', '.', '+', UPPER, ';']) :- /*056*/ checktype(-LOWER, RANGE_TYPE), /* CFR056 */ checktype(UPPER, RANGE_TYPE), /* CFR056 */ !, /* CFR056 */ maybe_add(type_alias(TYPE, RANGE_TYPE)), /* CFR056 */ save_used_identifier(TYPE, type), /* CFR056 */ !. /* CFR056 */ process_fdl_dec([type, TYPE, '=', '+', LOWER, '.', '.', '-', UPPER, ';']) :- /*056*/ checktype(LOWER, RANGE_TYPE), /* CFR056 */ checktype(-UPPER, RANGE_TYPE), /* CFR056 */ !, /* CFR056 */ maybe_add(type_alias(TYPE, RANGE_TYPE)), /* CFR056 */ save_used_identifier(TYPE, type), /* CFR056 */ !. /* CFR056 */ process_fdl_dec([type, TYPE, '=', '+', LOWER, '.', '.', '+', UPPER, ';']) :- /*056*/ checktype(LOWER, RANGE_TYPE), /* CFR056 */ checktype(UPPER, RANGE_TYPE), /* CFR056 */ !, /* CFR056 */ maybe_add(type_alias(TYPE, RANGE_TYPE)), /* CFR056 */ save_used_identifier(TYPE, type), /* CFR056 */ !. /* CFR056 */ process_fdl_dec([type, TYPE, '=', pending, ';']) :- maybe_add(type(TYPE, abstract)), save_used_identifier(TYPE, type), !. process_fdl_dec([type, TYPE, '=', ALIAS, ';']) :- maybe_add(type_alias(TYPE, ALIAS)), save_used_identifier(TYPE, type), !. process_fdl_dec([type, TYPE, '=', array, '[' | REST]) :- process_array_list(REST, INDEX_TYPES, ELEM_TYPE), ( type(OTHER_TYPE, array(INDEX_TYPES, ELEM_TYPE)), TYPE \= OTHER_TYPE, maybe_add(type_alias(TYPE, OTHER_TYPE)) ; maybe_add(type(TYPE, array(INDEX_TYPES, ELEM_TYPE))) ), !, name(TYPE, TL), append("mk__", TL, MKFL), name(MK__FUNCTION, MKFL), maybe_add(mk__function_name(MK__FUNCTION, TYPE, array)), !, save_used_identifier(TYPE, type), !. process_fdl_dec([type, TYPE, '=', '(' | REST]) :- process_enumeration_list(REST, ENUMERATION), maybe_add(type(TYPE, enumerated)), save_used_identifier(TYPE, type), maybe_add(enumeration(TYPE, ENUMERATION)), save_enumeration_constants(TYPE, ENUMERATION), !. process_fdl_dec([type, TYPE, '=', record | REST]) :- process_record_fields(REST, FIELD_LIST), assertz(type(TYPE, record(FIELD_LIST))), save_used_identifier(TYPE, type), save_field_list(TYPE, FIELD_LIST), !, name(TYPE, TL), append("mk__", TL, MKFL), name(MK__FUNCTION, MKFL), maybe_add(mk__function_name(MK__FUNCTION, TYPE, record)), !. process_fdl_dec([type, TYPE, '=', sequence, of, ELEMENT_TYPE, ';']) :- op(20,fy,TYPE), find_core_type(ELEMENT_TYPE, ELEM_TYPE), !, maybe_add(type(TYPE, sequence(ELEM_TYPE))), save_used_identifier(TYPE, type), !. process_fdl_dec([type, TYPE, '=', set, of, ELEMENT_TYPE, ';']) :- op(20,fy,TYPE), find_core_type(ELEMENT_TYPE, ELEM_TYPE), !, maybe_add(type(TYPE, set(ELEM_TYPE))), save_used_identifier(TYPE, type), !. process_fdl_dec([function, FUNCTION, '(' | REST]) :- process_function_list(REST, ARG_TYPES, RESULT_TYPE), maybe_add(function(FUNCTION, ARG_TYPES, RESULT_TYPE)), save_used_identifier(FUNCTION, function), save_function_template(FUNCTION, ARG_TYPES), !. process_fdl_dec([function, CONST, ':', TYPE, ';']) :- find_core_type(TYPE, CORE_TYPE), maybe_add(var_const(CONST, CORE_TYPE, c)), save_used_identifier(CONST, var_const), !. process_fdl_dec([var | REST]) :- process_var_list(REST, _TYPE), !. process_fdl_dec([start | _]) :- !. process_fdl_dec([end | _]) :- !. /* CFR046 */ process_fdl_dec([pre | _]) :- !. process_fdl_dec([post | _]) :- !. process_fdl_dec([derives | _]) :- !. /*** MAYBE_ADD(FACT) -- add fact if not already known ***/ maybe_add(X) :- call(X), !. maybe_add(X) :- assertz(X), !. handle_negatives_etc(X, NEWX) :- /* CFR056 */ atom(X), /* CFR056 */ name(X, XL), /* CFR056 */ ( /* CFR056 */ XL = [45|REST], /* "-" */ /* CFR056 */ name(ID, REST), /* CFR056 */ NEWX = (- ID) /* CFR056 */ ; /* CFR056 */ XL = [43|REST], /* "+" */ /* CFR056 */ name(NEWX, REST) /* CFR056 */ ), /* CFR056 */ !. /* CFR056 */ handle_negatives_etc(X, X) :- !. /* CFR056 */ /*** find_core_type(TYPE, CORE_TYPE) - return core alias or self in none ***/ find_core_type(TYPE, CORE_TYPE) :- type_alias(TYPE, CORE_TYPE), !. find_core_type(TYPE, TYPE) :- !. /*** save_enumeration_constants(TYPE, CONSTANTS_LIST) ***/ save_enumeration_constants(TYPE, [HEAD|TAIL]) :- maybe_add(var_const(HEAD, TYPE, c)), save_used_identifier(HEAD, var_const), !, save_enumeration_constants(TYPE, TAIL). save_enumeration_constants(_TYPE, []) :- !. /*** PROCESS_ARRAY_LIST(REST, INDEX_TYPES, ELEM_TYPE) ***/ process_array_list([INDEX, ']', of, ELEM_TYPE, ';'], [IND], EL_TYPE) :- find_core_type(INDEX, IND), find_core_type(ELEM_TYPE, EL_TYPE), !. process_array_list([INDEX, ',' | REST], [IND|OTHER_INDS], ELEM_TYPE) :- find_core_type(INDEX, IND), process_array_list(REST, OTHER_INDS, ELEM_TYPE), !. /*** PROCESS_ENUMERATION_LIST(REST, ENUMERATION) ***/ process_enumeration_list([CONST, ')', ';'], [CONST]) :- !. process_enumeration_list([CONST, ',' | REST], [CONST|OTHER_CONSTS]) :- process_enumeration_list(REST, OTHER_CONSTS), !. /*** PROCESS_RECORD_FIELDS(REST, FIELD_LIST) ***/ process_record_fields([FIELD1, ',', FIELD2 | REST], FIELD_LIST) :- /* CFR033 */ !, /* CFR033 */ rewrite_record_field_list([FIELD1, ',', FIELD2| REST], NEW_LIST), !, process_record_fields(NEW_LIST, FIELD_LIST), /* CFR033 */ !. /* CFR033 */ process_record_fields([FIELD_TAG, ':', TYPE, end, ';'], [[FIELD_TAG, CTYPE]]) :- find_core_type(TYPE, CTYPE), !. process_record_fields([FIELD_TAG, ':', TYPE, ';'], [[FIELD_TAG, CTYPE]|REST]) :- find_core_type(TYPE, CTYPE), get_fdl_declaration(NEXT_FIELD_DEC), process_record_fields(NEXT_FIELD_DEC, REST), !. process_record_fields([FIELD_TAG, ':', TYPE, ';' | MORE], /* CFR033 */ [[FIELD_TAG, CTYPE] | REST]) :- /* CFR033 */ MORE \= [], /* CFR033 */ !, /* CFR033 */ find_core_type(TYPE, CTYPE), /* CFR033 */ process_record_fields(MORE, REST), /* CFR033 */ !. /* CFR033 */ /* rewrite_record_field_list(TOKEN_LIST, NEW_TOKEN_LIST) */ /* CFR033 */ rewrite_record_field_list([F1,',',F2,':',T|REST], /* CFR033 */ [F1,':',T,';',F2,':',T|REST]) :- !. /* CFR033 */ rewrite_record_field_list([F1,',',F2|REST], /* CFR033 */ [F1,':',T,';',F2,':',T|TAIL]) :- /* CFR033 */ !, /* CFR033 */ rewrite_record_field_list([F2|REST],[F2,':',T|TAIL]), /* CFR033 */ !. /* CFR033 */ /*** PROCESS_FUNCTION_LIST(REST, ARG_TYPES, RESULT_TYPE) ***/ process_function_list([ARG_TYPE, ')', ':', RESULT_TYPE, ';'], [CORE_ARG_TYPE], CORE_RESULT_TYPE) :- find_core_type(ARG_TYPE, CORE_ARG_TYPE), find_core_type(RESULT_TYPE, CORE_RESULT_TYPE), !. process_function_list([ARG_TYPE, ','|REST], [C_ARG_TYPE|OTHERS], RESULT_TYPE) :- find_core_type(ARG_TYPE, C_ARG_TYPE), !, process_function_list(REST, OTHERS, RESULT_TYPE), !. /*** PROCESS_VAR_LIST(REST) ***/ process_var_list([VAR, ':', TYPE, ';'], CORE_TYPE) :- find_core_type(TYPE, CORE_TYPE), maybe_add(var_const(VAR, CORE_TYPE, v)), save_used_identifier(VAR, var_const), !. process_var_list([VAR, ',' | REST], TYPE) :- process_var_list(REST, TYPE), maybe_add(var_const(VAR, TYPE, v)), save_used_identifier(VAR, var_const), !. save_used_identifier(IDENTIFIER, record_function) :- /* CFR029 */ used_ident(IDENTIFIER, record_function), /* CFR029 */ !. /* CFR029 */ save_used_identifier(IDENTIFIER, _CLASS) :- used_ident(IDENTIFIER, _), !, write('!!! FATAL-ERROR: Identifier declared multiple times - '), print(IDENTIFIER), nl, !, maybe_halt. save_used_identifier(IDENTIFIER, _CLASS) :- built_in_ident(IDENTIFIER), !, write('!!! FATAL-ERROR: Identifier reserved or already predeclared - '), print(IDENTIFIER), nl, !, maybe_halt. save_used_identifier(IDENTIFIER, CLASS) :- assertz(used_ident(IDENTIFIER, CLASS)), !. maybe_halt :- vc(_, _), /* so we're interactive... */ !. /* just have to continue!! */ maybe_halt :- write('CANNOT CONTINUE: Proof session terminated.'), nl, !, close_all_streams, halt. built_in_ident(update). built_in_ident(element). built_in_ident(set). built_in_ident(succ). built_in_ident(pred). built_in_ident(first). built_in_ident(last). built_in_ident(nonfirst). built_in_ident(nonlast). built_in_ident(abs). built_in_ident(sqr). built_in_ident(odd). built_in_ident(div). built_in_ident(mod). built_in_ident(subset_of). built_in_ident(strict_subset_of). built_in_ident(true). built_in_ident(false). built_in_ident(integer). built_in_ident(boolean). built_in_ident(real). built_in_ident(in). built_in_ident(not_in). built_in_ident(and). built_in_ident(or). built_in_ident(not). /*** make_title_name(TITLEWORDS, TITLE) -- build VC title component ***/ make_title_name([TITLE, ';'], TITLE) :- !. make_title_name([';'], vc) :- !. make_title_name([], vc) :- !. make_title_name([WORD|REST], TITLE) :- make_title_name(REST, RESTNAME), name(WORD, WL), name(RESTNAME, RL), append(WL,[95|RL], TL), name(TITLE, TL), !. /*** FGET0(CHAR) - get & echo next non-comment character ***/ fget0(CHAR) :- eget0(CH), ( CH\=123, /* { */ CHAR=CH ; CH=123, skip_to_end_of_comment, fget0(CHAR) ), !. /*** SKIP_TO_END_OF_COMMENT - find '}' at end of comment ***/ skip_to_end_of_comment :- repeat, eget0(CHAR), /* UNTIL */ CHAR = 125, /* } */ !. increment_current_record_field_number :- retract(current_record_field_number(N)), M is N+1, asserta(current_record_field_number(M)), !. /*** SAVE_FIELD_LIST(TYPE, FIELDS_LIST) -- create functions and store ***/ save_field_list(TID,[[FD,FT]]) :- name(FD,L), truncate_list(20,L,L1), append("upf_",L1,UL), name(UPF,UL), assertz(function(UPF,[TID,FT],TID)), save_used_identifier(UPF, record_function), /* CFR029 */ UFUN=..[UPF,A1,A2], current_record_field_number(CRFN), add_new_record_function(UFUN,CRFN,update,FD,[A1,A2],TID), /* CFR029 */ append("fld_",L1,CL), name(FLD,CL), assertz(function(FLD,[TID],FT)), save_used_identifier(FLD, record_function), /* CFR029 */ CFUN=..[FLD,A1], add_new_record_function(CFUN,CRFN,access,FD,[A1],TID), /* CFR029 */ !. save_field_list(TID,[[FD,FT]|FL]) :- name(FD,L), truncate_list(20,L,L1), append("upf_",L1,UL), name(UPF,UL), assertz(function(UPF,[TID,FT],TID)), save_used_identifier(UPF, record_function), /* CFR029 */ UFUN=..[UPF,A1,A2], current_record_field_number(CRFN), add_new_record_function(UFUN,CRFN,update,FD,[A1,A2],TID), /* CFR029 */ append("fld_",L1,CL), name(FLD,CL), assertz(function(FLD,[TID],FT)), save_used_identifier(FLD, record_function), /* CFR029 */ CFUN=..[FLD,A1], add_new_record_function(CFUN,CRFN,access,FD,[A1],TID), /* CFR029 */ !, increment_current_record_field_number, !, save_field_list(TID,FL), !. /*** ADD_NEW_RECORD_FUNCTION(FUNCTION, START) -- save record function ***/ add_new_record_function(FUNCTION, START, MODE, FIELD, ARGS, TYPE) :- /*CFR029*/ record_function(START,_,MODE,_,_,TYPE), /* CFR029 */ !, write('Warning - record field number already used.'), !, NEWSTART is START+1, add_new_record_function(FUNCTION, NEWSTART, MODE, FIELD, ARGS, TYPE), /*29*/ !. add_new_record_function(FUNCTION, START, MODE, FIELD, ARGS, TYPE) :- /*CFR029*/ assertz(record_function(START,FUNCTION,MODE, FIELD, ARGS, TYPE)), /*CFR029*/ !. save_function_template(FUNCTION, ARG_TYPES) :- length(ARG_TYPES, LENGTH), form_function_var_list(LENGTH, VAR_LIST), FUNCTION_CALL =.. [FUNCTION|VAR_LIST], assertz(function_template(FUNCTION_CALL, VAR_LIST, FUNCTION)), !. form_function_var_list(1, [_]) :- !. form_function_var_list(2, [_,_]) :- !. form_function_var_list(3, [_,_,_]) :- !. form_function_var_list(4, [_,_,_,_]) :- !. form_function_var_list(5, [_,_,_,_,_]) :- !. form_function_var_list(6, [_,_,_,_,_,_]) :- !. form_function_var_list(7, [_,_,_,_,_,_,_]) :- !. form_function_var_list(8, [_,_,_,_,_,_,_,_]) :- !. form_function_var_list(9, [_,_,_,_,_,_,_,_,_]) :- !. form_function_var_list(10, [_,_,_,_,_,_,_,_,_,_]) :- !. form_function_var_list(N, [_,_,_,_,_|X]) :- N>10, N1 is N-5, !, form_function_var_list(N1, X), !. form_function_var_list(0, []) :- !. /* Shouldn't ever get here anyway! */ /*** LOAD_VCS(FILENAME) -- load the VCs from the modified .VCG file ***/ load_vcs(VCGFILE) :- see(VCGFILE), ( echo(off) ; nl, nl ), retractall(current_vc_no(_)), asserta(current_vc_no(0)), /* CFR034 */ skip_initial_crap, read_vcs, seen, !. /*** SCAN_FILE_HEADER_TO_SEE_IF_SPARK(FILENAME) -- look for keyword SPARK ***/ scan_file_header_to_see_if_spark(VCGFILE) :- /* CFR035 */ see(VCGFILE), /* CFR035 */ mini_skip_initial_crap, /* CFR035 */ seen, /* CFR035 */ !. /*** read_vcs -- repeatedly get the next VC ***/ read_vcs :- eof_char(EOF), repeat, increment_vc_number, /* CFR034 */ get_next_vc(C), /* until */ C==EOF, /* CFR034 */ current_vc_no(N), M is N-1, make_numbers_list(1,M,LIST), remove_true_vcs_from_numbers_list([LIST], REVISED_LIST), /* CFR004 */ assertz(vcs_to_prove(REVISED_LIST)), /* CFR004 */ !. /* increment_vc_number */ /* CFR034 */ increment_vc_number :- /* CFR034 */ retract(current_vc_no(N)), /* CFR034 */ M is N+1, /* CFR034 */ asserta(current_vc_no(M)), /* CFR034 */ !. /* CFR034 */ /*** get_next_vc(NEXT_CHAR) -- get & save next vc and return next character ***/ get_next_vc(NEXT_CHAR) :- eof_char(EOF), repeat, /* CFR034 */ read_and_echo_vc_line(L), /* CFR034 */ /* until */ ( is_terminator_line(L, NEXT_CHAR) ; /* CFR034 */ is_vc_line(L) ), /* CFR034 */ !, /* CFR034 */ current_vc_no(N), fdl_file_title(TITLE), makename(TITLE,N,VCNAME), !, /* CFR034 */ ( /* CFR034 */ NEXT_CHAR == EOF /* CFR034 */ ; /* CFR034 */ read_verification_condition(VCNAME) /* CFR034 */ ), /* CFR034 */ !. /* CFR034 */ /* read_and_echo_vc_line(L) */ /* CFR034 */ read_and_echo_vc_line(L) :- /* CFR034 */ eof_char(EOF), eol_char(EOL), eget0(C), /* CFR034 */ !, /* CFR034 */ ( /* CFR034 */ C = EOL, /* CFR034 */ L = [] /* CFR034 */ ; /* CFR034 */ C = EOF, /* CFR034 */ L = [EOF] /* CFR034 */ ; /* CFR034 */ L = [C|REST], /* CFR034 */ !, /* CFR034 */ read_and_echo_vc_line(REST) /* CFR034 */ ), /* CFR034 */ !. /* CFR034 */ /* read_vc_line_noecho(L) */ /* CFR035 */ read_vc_line_noecho(L) :- /* CFR035 */ eof_char(EOF), eol_char(EOL), get_code(C), /* CFR035 */ !, /* CFR035 */ ( /* CFR035 */ C = EOL, /* CFR035 */ L = [] /* CFR035 */ ; /* CFR035 */ C = EOF, /* CFR035 */ L = [EOF] /* CFR035 */ ; /* CFR035 */ L = [C|REST], /* CFR035 */ !, /* CFR035 */ read_vc_line_noecho(REST) /* CFR035 */ ), /* CFR035 */ !. /* CFR035 */ /* is_terminator_line(L, EOF) */ /* CFR034 */ is_terminator_line(L, EOF) :- /* CFR034 */ eof_char(EOF), is_in(EOF, L), /* CFR034 */ !. /* CFR034 */ /* is_vc_line(L) */ /* CFR034 */ is_vc_line(L) :- /* CFR034 */ triple_append(FRONT_PART, "_", DIGITS_AND_DOT, L), /* CFR034 */ is_digits_and_dot(DIGITS_AND_DOT), /* CFR034 */ is_ok_front_part_of_vc_line(FRONT_PART), /* CFR034 */ !. /* CFR034 */ /* is_digits_and_dot(DIGITS_AND_DOT) */ /* CFR034 */ is_digits_and_dot(DIGITS_AND_DOT) :- /* CFR034 */ gen_append(DIGITS, ".", DIGITS_AND_DOT), /* CFR034 */ are_all_digits(DIGITS). /* CFR034 */ /* are_all_digits(DIGIT_LIST) */ /* CFR034 */ are_all_digits([H|T]) :- /* CFR034 */ 48 =< H, H =< 57, !, are_all_digits(T). /* CFR034 */ are_all_digits([]) :- !. /* CFR034 */ /* is_ok_front_part_of_vc_line(LIST_OF_CHARS) */ /* CFR034 */ is_ok_front_part_of_vc_line([H|_T]) :- /* CFR034 */ ( /* CFR034 */ 48 =< H, H =< 57 /* CFR034 */ ; /* CFR034 */ 65 =< H, H =< 90 /* CFR034 */ ; /* CFR034 */ 97 =< H, H =< 122 /* CFR034 */ ; /* CFR034 */ [H] = "_" /* CFR034 */ ), /* CFR034 */ !. /* CFR034 */ is_ok_front_part_of_vc_line([]) :- !. /* CFR034 */ /* read_verification_condition(VCNAME) */ /* CFR034 */ read_verification_condition(VCNAME) :- /* CFR034 */ repeat, /* CFR034 */ eget0(CHAR), /* CFR034 */ process_rest_of_component(VCNAME, CHAR, FINISHED), /* CFR034 */ /* until */ FINISHED, /* CFR034 */ ( /* CFR034 */ is_true_vc(_, VCNAME) /* CFR034 */ ; /* CFR034 */ assertz(is_vc(VCNAME)) /* CFR034 */ ), /* CFR034 */ !. /* CFR034 */ process_rest_of_component(VCNAME, CHAR, FINISHED) :- eol_char(EOL), /* CFR034 */ ( ( CHAR = 32 ; CHAR = EOL ), /* CFR034 */ eget0(NEXT_CHAR), /* CFR034 */ ( /* CFR052 */ NEXT_CHAR = EOL /* CFR034,052 */ ; /* CFR052 */ CHAR = EOL, /* CFR052 */ NEXT_CHAR = 32 /* CFR052 */ ), /* CFR052 */ !, /* CFR034 */ FINISHED = true /* CFR034 */ ; /* CFR034 */ CHAR=42, /* "*" */ find_char(EOL), /* CFR034 */ find_char(EOL), /* CFR034 */ current_vc_no(N), /* CFR004,034 */ assertz(is_true_vc(N, VCNAME)), /* CFR004 */ FINISHED = true /* CFR034 */ ; CHAR=33, /* "!" */ find_char(EOL), /* CFR034 */ find_char(EOL), /* CFR034 */ write('!!! WARNING: UNPROVEABLE VC! Suggest you take corrective action.'), nl, assertz(vc(VCNAME,conc(1,false))), FINISHED = true /* CFR034 */ ; eof_char(EOF), CHAR = EOF, /* shouldn't, but just in case! */ /* CFR034 */ FINISHED = true /* CFR034 */ ; /* CFR034 */ FINISHED = fail, /* CFR034 */ ( /* CFR034 */ ( /* CFR034 */ CHAR=72, /* "H" */ /* CFR034 */ F=hyp /* CFR034 */ ; /* CFR034 */ CHAR=67, /* "C" */ /* CFR034 */ F=conc /* CFR034 */ ), /* CFR034 */ read_component_number(N), /* CFR034 */ eread(FORM), /* CFR034 */ process_formula(FORM,FORMULA), /* CFR034 */ COMPONENT=..[F,N,FORMULA], /* CFR034 */ assertz(vc(VCNAME,COMPONENT)) /* CFR034 */ ; /* CFR034 */ CHAR=32, /* " " not followed by */ /* CFR034 */ find_char(EOL) /* CFR034 */ ; /* CFR034 */ true /* CFR034 */ ) /* CFR034 */ ), !. /*** process_formula(OLD,NEW) -- filter & simplify OLD to get NEW ***/ process_formula(OLD_FORMULA, NEW_FORMULA) :- ( novars(OLD_FORMULA) ; nl, write('*** ABORTED: Prolog variables occur in formula.'), nl, close_all_streams, halt ), !, ( restructure_formula(OLD_FORMULA, INTERMEDIATE) ; nl, write('*** ABORTED: could not restructure above formula.'), nl, close_all_streams, halt ), !, ( typechecking_during_load(on), ( checktype(INTERMEDIATE, boolean) ; write('*** ABORTED: above formula did not typecheck as boolean.'), nl, !, close_all_streams, halt ) ; true ), !, ( simplify_during_load(on), ( simplify(INTERMEDIATE, NEW_FORMULA) ; nl, write('!!! WARNING: Could not simplify above formula properly.'), nl, NEW_FORMULA=INTERMEDIATE ) ; NEW_FORMULA=INTERMEDIATE ), !. /*** restructure_formula(OLD,NEW) -- no ~, set & seq prefix changes ***/ % In the simplifier, the restructured quantified expression adopts the % discovered core type. This minor simplification has not been adopted in % the checker, to reduce the risk of breaking existing proof scripts. restructure_formula(for_all(V:T, P), for_all(V:T, NewP)) :- find_core_type(T, CT), ( % Required quantified variable already exists. % The variable is reused. var_const(V, CT, _), !, restructure_formula(P, NewP) ; % Required quantified variable does not already exist. It is % introduced, to support restructuring, and removed afterwards. asserta(var_const(V, CT, temp)), ( restructure_formula(P, NewP), retract(var_const(V, CT, temp)) ; retract(var_const(V, CT, temp)), !, fail ) ), !. % In the simplifier, the restructured quantified expression adopts the % discovered core type. This minor simplification has not been adopted in % the checker, to reduce the risk of breaking existing proof scripts. restructure_formula(for_some(V:T, P), for_some(V:T, NewP)) :- find_core_type(T, CT), ( % Required quantified variable already exists. % The variable is reused. var_const(V, CT, _), !, restructure_formula(P, NewP) ; % Required quantified variable does not already exist. It is % introduced, to support restructuring, and removed afterwards. asserta(var_const(V, CT, temp)), ( restructure_formula(P, NewP), retract(var_const(V, CT, temp)) ; retract(var_const(V, CT, temp)), !, fail ) ), !. restructure_formula(X+Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWX \/ NEWY) ; restructure_nonset(X, NEWX), NEW = NEWX + NEWY ), !. restructure_formula(X*Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWX /\ NEWY) ; restructure_nonset(X, NEWX), NEW = NEWX * NEWY ), !. restructure_formula(X-Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWX \ NEWY) ; restructure_nonset(X, NEWX), NEW = NEWX - NEWY ), !. restructure_formula(X/Y, NEW) :- /*1.4*/ restructure_formula(X, NEWX), /*1.4*/ restructure_formula(Y, NEWY), /*1.4*/ !, /*1.4*/ checktype(X, TX), /*1.4*/ checktype(Y, TY), /*1.4*/ !, /*1.4*/ ( /*1.4*/ TX = integer, /*1.4*/ TY = integer, /*1.4*/ integer(NEWX), integer(NEWY), NEWY \= 0, NEW iss (NEWX div NEWY), /*1.4*/ NEWX =:= NEW * NEWY /* only if Y divides X */ /*1.4*/ ; /*1.4*/ NEW = (NEWX / NEWY) /*1.4*/ ), /*1.4*/ !. /*1.4*/ restructure_formula(X<=Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_formula(X, NEWX), NEW = (NEWX subset_of NEWY) ; restructure_formula(X, NEWX), NEW = (NEWX <= NEWY) ), !. restructure_formula(X>=Y, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWY subset_of NEWX) ; restructure_nonset(X, NEWX), NEW = (NEWX >= NEWY) ), !. restructure_formula(XY, NEW) :- restructure_formula(Y, NEWY), !, ( checktype(NEWY, T), type(T, set(_)), restructure_set(X, NEWX), NEW = (NEWY strict_subset_of NEWX) ; restructure_nonset(X, NEWX), NEW = (NEWX > NEWY) ), !. restructure_formula(element(A, I), element(NEWA, NEWI)) :- restructure_formula(A, NEWA), !, restructure_formula_list(I, NEWI), !. restructure_formula(update(A, I, X), update(NEWA, NEWI, NEWX)) :- restructure_formula(A, NEWA), !, restructure_formula_list(I, NEWI), !, restructure_formula(X, NEWX), !. restructure_formula(first(X), first(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(last(X), last(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(nonfirst(X), nonfirst(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(nonlast(X), nonlast(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(X @ Y, NEWX @ NEWY) :- restructure_formula(X, NEWX), restructure_formula(Y, NEWY), !. restructure_formula(succ(X), succ(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(pred(X), pred(NEWX)) :- restructure_formula(X, NEWX), !. restructure_formula(abs(X), abs(NEWX)) :- restructure_nonset(X, NEWX), !. restructure_formula(sqr(X), sqr(NEWX)) :- restructure_nonset(X, NEWX), !. restructure_formula(odd(X), odd(NEWX)) :- restructure_nonset(X, NEWX), !. restructure_formula((X~), NV) :- twiddles_conversion(X, NV), !. restructure_formula((X~), NV) :- atom(X), name(X,XL), append(XL,"__OLD",NVL), name(NV,NVL), var_const(X,TYPE,v), assertz(var_const(NV,TYPE,c)), save_used_identifier(NV, var_const), assertz(twiddles_conversion(X, NV)), !. restructure_formula(X, X) :- atomic(X), !. restructure_formula(+X, Y) :- restructure_formula(X, Y), !. /* CFR039 */ restructure_formula(X, NEWX) :- nonvar(X), X =.. [F|ARGS], spark_enabled, ( F = mk__array, !, restructure_array_aggregate(ARGS, NEWARGS) ; F = mk__record, !, restructure_record_aggregate(ARGS, NEWARGS) ; mk__function_name(F, _, array), !, restructure_array_aggregate(ARGS, NEWARGS) ; mk__function_name(F, _, record), !, restructure_record_aggregate(ARGS, NEWARGS) ), !, NEWX =.. [F|NEWARGS]. restructure_formula(X, Y) :- function_template(X, XL, F), !, restructure_formula_list(XL, YL), function_template(Y, YL, F), !. restructure_formula(X, Y) :- record_function(K, X, _, F, XL, _), /* CFR029,053 */ !, restructure_formula_list(XL, YL), record_function(K, Y, _, F, YL, _), /* CFR029,053 */ !. restructure_formula(X, Y) :- nonvar(X), X=..[OP|XARGS], ( type(OP,set(_)), XARGS=[XL], restructure_formula_list(XL, YL), Y=(set YL) ; type(OP,sequence(_)), XARGS=[XL], restructure_formula_list(XL, Y) ; ( OP = mk__array, /* CFR034 */ spark_enabled, /* CFR034 */ !, /* CFR034 */ restructure_array_aggregate(XARGS, YARGS) /* CFR034 */ ; /* CFR034 */ OP = mk__record, /* CFR034 */ spark_enabled, /* CFR034 */ !, /* CFR034 */ restructure_record_aggregate(XARGS, YARGS) /* CFR034 */ ; /* CFR034 */ restructure_formula_list(XARGS, YARGS) /* CFR034 */ ), /* CFR034 */ Y=..[OP|YARGS] /* CFR034 */ ), !. restructure_formula_list([X], [Y]) :- restructure_formula(X, Y), !. restructure_formula_list([X|XL], [Y|YL]) :- restructure_formula(X, Y), !, restructure_formula_list(XL, YL), !. restructure_formula_list([], []) :- !. /*** restructure_set(OLD,NEW) -- no ~, set & seq prefix changes ***/ restructure_set(X+Y, NEWX \/ NEWY) :- restructure_set(X, NEWX), restructure_set(Y, NEWY), !. restructure_set(X*Y, NEWX /\ NEWY) :- restructure_set(X, NEWX), restructure_set(Y, NEWY), !. restructure_set(X-Y, NEWX \ NEWY) :- restructure_set(X, NEWX), restructure_set(Y, NEWY), !. restructure_set(X, Y) :- !, restructure_formula(X, Y), !. /*** restructure_nonset(OLD,NEW) -- no ~, set & seq prefix changes ***/ restructure_nonset(X+Y, NEWX+NEWY) :- restructure_nonset(X, NEWX), restructure_nonset(Y, NEWY), !. restructure_nonset(X*Y, NEWX*NEWY) :- restructure_nonset(X, NEWX), restructure_nonset(Y, NEWY), !. restructure_nonset(X-Y, NEWX-NEWY) :- restructure_nonset(X, NEWX), restructure_nonset(Y, NEWY), !. restructure_nonset(X, Y) :- !, restructure_formula(X, Y), !. restructure_array_aggregate([X|XL], [Y|YL]) :- /* CFR034 */ ( /* CFR034 */ X = (IND := EXPR), /* CFR034 */ !, /* CFR034 */ restructure_formula(EXPR, NEWEXPR), /* CFR034 */ !, /* CFR034 */ ( /* CFR034 */ IND = (IND1 & IND2), /* CFR034 */ restructure_indices(IND1, NEWIND1), /* CFR034 */ restructure_indices(IND2, NEWIND2), /* CFR034 */ !, /* CFR034 */ NEWIND = (NEWIND1 & NEWIND2) /* CFR034 */ ; /* CFR034 */ IND = [LO .. HI], /* CFR034 */ restructure_formula(LO, NEWLO), /* CFR034 */ restructure_formula(HI, NEWHI), /* CFR034 */ !, /* CFR034 */ NEWIND = [NEWLO .. NEWHI] /* CFR034 */ ; /* CFR034 */ IND = [I], /* CFR034 */ restructure_formula(I, NEWI), /* CFR034 */ !, /* CFR034 */ NEWIND = [NEWI] /* CFR034 */ ), /* CFR034 */ !, /* CFR034 */ Y = (NEWIND := NEWEXPR) /* CFR034 */ ; /* CFR034 */ restructure_formula(X, Y) /* CFR034 */ ), /* CFR034 */ !, /* CFR034 */ restructure_array_aggregate(XL, YL), /* CFR034 */ !. /* CFR034 */ restructure_array_aggregate([], []) :- !. /* CFR034 */ restructure_indices(X & Y, NEWX & NEWY) :- /* CFR034 */ restructure_indices(X, NEWX), /* CFR034 */ restructure_indices(Y, NEWY), /* CFR034 */ !. /* CFR034 */ restructure_indices([X .. Y], [NEWX .. NEWY]) :- /* CFR034 */ restructure_formula(X, NEWX), /* CFR034 */ restructure_formula(Y, NEWY), /* CFR034 */ !. /* CFR034 */ restructure_indices([I], NEWIND) :- /* CFR034 */ restructure_formula(I, NEWI), /* CFR034 */ !, /* CFR034 */ NEWIND = [NEWI], /* CFR034 */ !. /* CFR034 */ restructure_record_aggregate([F := EXP | REST], [F := NEWEXP | NEWREST]) :- !, /* CFR034 */ restructure_formula(EXP, NEWEXP), /* CFR034 */ !, /* CFR034 */ restructure_record_aggregate(REST, NEWREST), /* CFR034 */ !. /* CFR034 */ restructure_record_aggregate([], []) :- !. /* CFR034 */ /*** read_component_number(NO) -- get number of hypothesis/conclusion ***/ read_component_number(NUMBER) :- read_number_codes(CODES), name(NUMBER, CODES), !. /*** read_number_codes(CODELIST) -- read codes until hitting a colon ***/ read_number_codes(CODES) :- eget0(CHAR), ( CHAR=58, !, CODES=[], put_code(32), put_code(32) ; read_number_codes(REST), CODES=[CHAR|REST] ), !. /*** MAKENAME(TITLE,NUMBER,VCNAME) -- create a unique VC name for VC ***/ makename(TITLE,N,VCNAME) :- codelist(N,BACK), name(TITLE,FRONT), append(FRONT,[95|BACK],VCL), name(VCNAME,VCL), !. /*** CODELIST(NUMBER,LIST) -- break NUMBER into LIST of ASCII codes ***/ codelist(N,[M]) :- N>=0, N=<9, M is 48+N, !. codelist(N,C) :- N>=10, M iss N div 10, codelist(M,C1), C2 is (N mod 10)+48, append(C1,[C2],C), !. /*** make_numbers_list(L,U,LIST) - make a list of the numbers in L..U ***/ make_numbers_list(L,U,[]) :- L>U, !. make_numbers_list(L,L,[L]) :- !. make_numbers_list(L,U,[L|LIST]) :- L1 is L+1, make_numbers_list(L1,U,LIST), !. /*** SKIP_INITIAL_CRAP -- read past banner ***/ skip_initial_crap :- /* CFR034 */ read_and_echo_vc_line(_Line1), /* CFR034 */ read_and_echo_vc_line(_Line2), /* CFR034 */ read_and_echo_vc_line(_Line3), /* CFR034 */ read_and_echo_vc_line(_Line4), /* CFR034 */ read_and_echo_vc_line(_Line5), /* CFR034 */ read_and_echo_vc_line(_Line6), /* CFR034 */ read_and_echo_vc_line(_Line7), /* CFR034 */ read_and_echo_vc_line(_Line8), /* CFR034 */ read_and_echo_vc_line(_Line9), /* CFR034 */ read_and_echo_vc_line(_Line10), /* CFR034 */ read_and_echo_vc_line(_Line11), /* CFR034 */ read_and_echo_vc_line(_Line12), /* CFR034 */ read_and_echo_vc_line(_Line13), /* CFR034 */ fail. /* CFR034 */ skip_initial_crap :- !. /* CFR034 */ /*** MINI_SKIP_INITIAL_CRAP -- read top of banner to see if SPARK ***/ mini_skip_initial_crap :- /* CFR035 */ read_vc_line_noecho(_Line1), /* CFR035 */ read_vc_line_noecho(_Line2), /* CFR035 */ read_vc_line_noecho(Line3), /* CFR035 */ scan_line_three_for_SPARK_marque(Line3), /* CFR035 */ !. /* CFR035 */ /* scan_line_three_for_SPARK_marque(L) */ scan_line_three_for_SPARK_marque(L) :- ( triple_append(_, "Examiner", _, L) /* now */ ; triple_append(_, "SPARK", _, L) /* previously */ ), !, assert(spark_enabled). scan_line_three_for_SPARK_marque(_L) :- !. /*** FIND_CHAR(N) -- repeatedly fetch characters until hit next char N ***/ find_char(N) :- repeat, eget0(C), /* until */ C=N, !. % Note POGS collects both the DATE and TIME, and expects the following format. % % Once all trailing and leading space has been deleted: % % The text 'DATE' must occur in column: 1..4. % % The date format is like: 09-JAN-1980 % The time format is like: 01:59:01 % % The date must start on column: 8 and be 11 chars wide. % The time must start on column: 28 and be 8 chars wide. % % Currently, where the text 'DATE' is detected, POGS semantically compares the % items parsed against the date embedded into the SIV file. /*** WRITE_PROOF_LOG_BANNER -- write banner heading to proof log ***/ write_proof_log_banner :- logfile_name(PROOFLOG), file_can_be_written(PROOFLOG), plain_output(off), !, tell(PROOFLOG), write('*****************************************************************************'), nl, write('SPADE Transcript of Interactive Proof Session'), nl, nl, current_output(Stream), display_header(Stream), nl, write('*****************************************************************************'), nl, fetch_date_and_time(DATE, TIME), nl, write('DATE : '), print(DATE), write(' TIME : '), print(TIME), nl, nl, tell(user), !. write_proof_log_banner :- logfile_name(PROOFLOG), file_can_be_written(PROOFLOG), plain_output(on), !, tell(PROOFLOG), write('*****************************************************************************'), nl, write('SPADE Transcript of Interactive Proof Session'), nl, nl, current_output(Stream), display_header(Stream), nl, write('*****************************************************************************'), nl, nl, nl, nl, tell(user), !. write_proof_log_banner :- logfile_name(PROOFLOG), \+ file_can_be_written(PROOFLOG), nl, write('Aborted: '), print(PROOFLOG), write(' cannot be written.'), nl, !, close_all_streams, halt. /*** EGET0(CHAR) -- get, and optionally echo, character from file ***/ eget0(C) :- in_declare_command, !, lget0(C). /* CFR030 */ eget0(C) :- get_code(C), echo_char(C), !. /*** EREAD(TERM) -- read, and optionally echo, term from file ***/ eread(V) :- read_term_and_layout(V), echo_term(V), !. /*** ECHO_CHAR(CHAR) -- echo character when echoing is on ***/ echo_char(EOF) :- eof_char(EOF), !. /* CFR034 */ echo_char(C) :- echo(on), put_code(C), !. echo_char(_) :- !. /*** ECHO_TERM(TERM) -- echo term when echoing is on ***/ echo_term(end_of_file) :- !. echo_term(V) :- echo(on), print(V), write('.'), nl, !. echo_term(_) :- !. /*** FORMAT_FORMULA(VC) -- save VC as hypotheses and conclusions ***/ format_formula(MS, X->Y) :- /* CFR054 */ !, layout(MS, hyp, X), !, layout(MS, conc, Y), !. format_formula(MS, Y) :- /* CFR054 */ layout(MS, conc, Y), !. /*** LAYOUT(HYP_OR_CONC, FORMULA) -- simplify, flatten and save the VC ***/ layout(MS, L, F) :- ( simplify_in_infer(on), simplify(F,FF) ; FF = F ), !, flatten(FF,F1), !, store_vc(MS, L, 1, F1). /*** FLATTEN(OLD, NEW) -- flatten out (& translate) formula ***/ flatten((A and B) and C,F) :- flatten(A and (B and C),F), !. flatten((A or B) or C,F) :- flatten(A or (B or C),F), !. flatten(A and B,A1 and B1) :- flatten(A,A1), flatten(B,B1), !. flatten(A or B,A1 or B1) :- flatten(A,A1), flatten(B,B1), !. flatten(not A,not A1) :- flatten(A,A1), !. flatten(A -> F,A1 -> F1) :- flatten(A,A1), flatten(F,F1), !. flatten(A <-> F,A1 <-> F1) :- flatten(A,A1), flatten(F,F1), !. flatten(X,X) :- atomic(X), !. flatten(X,Y) :- novars(X), X=..[OP|ArgsX], flattenlist(ArgsX,ArgsY), Y=..[OP|ArgsY], !. flattenlist([],[]) :- !. flattenlist([X|XL],[Y|YL]) :- flatten(X,Y), flattenlist(XL,YL), !. /*** STORE_VC(HYP_OR_CONC,NUM,FORMULA) -- assert components into VC ***/ store_vc(MS,L,K,F and R) :- !, add_formula(MS,L,F), !, K1 is K+1, store_vc(MS,L,K1,R), !. store_vc(MS,L,_K,F) :- !, add_formula(MS,L,F), !. /*** add_formula(HYP_OR_CONC,FORMULA) -- add formula to current VC ***/ add_formula(logmessage,hyp,X) :- add_new_hyp(X,1), !. add_formula(logmessage,conc,X) :- add_new_conc(X,1), !. add_formula(nomessage,hyp,X) :- quiet_add_hyp(X,1), !. add_formula(nomessage,conc,X) :- quiet_add_conc(X,1), !. /*** QUIET_ADD_HYP(H,N) - adds formula H as the first free hypothesis number after N ***/ quiet_add_hyp(true,_) :- !. quiet_add_hyp(H,1) :- hyp(_,H), !. quiet_add_hyp(H,N) :- hyp(N,_), M is N+1, quiet_add_hyp(H,M), !. quiet_add_hyp(H,N) :- assertz(hyp(N,H)), assertz(logfact(newhyp, hyp(N,H))), stand_all, !. /* CFR018 */ /*** QUIET_ADD_CONC(C,N) - adds formula C as the first free conclusion number after N ***/ quiet_add_conc(C,N) :- conc(N,_), M is N+1, quiet_add_conc(C,M), !. quiet_add_conc(C,N) :- assertz(conc(N,C)), assertz(logfact(newconc,conc(N,C))), !. /* CFR018 */ declare :- assert(in_declare_command), /* CFR030 */ prompt_user('Please type your FDL-syntax declaration, terminated by a ";"','FDL> '), get_fdl_declaration(DECLARATION), !, process_fdl_dec(DECLARATION), retractall(in_declare_command), /* CFR030 */ !. /* remove_true_vcs_from_numbers_list(OLD_LIST, NEW_LIST) */ remove_true_vcs_from_numbers_list(LIST, REVISED_LIST) :- /* CFR004 */ retract(is_true_vc(N, VCNAME)), delete_number(N, LIST, NEW_LIST), assertz(logfact(true_vc, VCNAME)), update_vcs_proved(N), nl, write('+++ True VC '), print(VCNAME), write(' eliminated automatically.'), nl, !, remove_true_vcs_from_numbers_list(NEW_LIST, REVISED_LIST), !. remove_true_vcs_from_numbers_list(LIST, LIST) :- !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/rulefiles.pro0000644000175000017500000007205611753202340016566 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** (1) Map principal functor structure to built-in rulefile name ***/ built_in_rulefile( <(_,_), file=="NUMINEQS" ). built_in_rulefile( <=(_,_), file=="NUMINEQS" ). built_in_rulefile( >(_,_), file=="NUMINEQS" ). built_in_rulefile( >=(_,_), file=="NUMINEQS" ). built_in_rulefile( =(_,_), file=="NUMINEQS" ). built_in_rulefile( <>(_,_), file=="NUMINEQS" ). built_in_rulefile( or(_,_), file=="NUMINEQS" ). built_in_rulefile( <(_,_), file=="GENINEQS" ). built_in_rulefile( <=(_,_), file=="GENINEQS" ). built_in_rulefile( >(_,_), file=="GENINEQS" ). built_in_rulefile( >=(_,_), file=="GENINEQS" ). built_in_rulefile( =(_,_), file=="GENINEQS" ). built_in_rulefile( <>(_,_), file=="GENINEQS" ). built_in_rulefile( not(_), file=="GENINEQS" ). built_in_rulefile( <=(_,_), file=="INTINEQS" ). built_in_rulefile( >(_,_), file=="INTINEQS" ). built_in_rulefile( >=(_,_), file=="INTINEQS" ). built_in_rulefile( =(_,_), file=="INTINEQS" ). built_in_rulefile( _ + _, file=="ARITH" ). built_in_rulefile( _ - _, file=="ARITH" ). built_in_rulefile( _ * _, file=="ARITH" ). built_in_rulefile( _ div _, file=="ARITH" ). built_in_rulefile( -(_), file=="ARITH" ). built_in_rulefile( _ / _, file=="ARITH" ). built_in_rulefile( _ = _, file=="ARITH" ) :- use_subst_rules_for_equality(on). built_in_rulefile( element(_,_), file=="ARRAY" ). built_in_rulefile( update(_,_,_), file=="ARRAY" ). built_in_rulefile( _ = _, file=="ARRAY" ) :- use_subst_rules_for_equality(on). built_in_rulefile( =(_,_), file=="ENUM" ). built_in_rulefile( <=(_,_), file=="ENUM" ). built_in_rulefile( <(_,_), file=="ENUM" ). built_in_rulefile( >=(_,_), file=="ENUM" ). built_in_rulefile( >(_,_), file=="ENUM" ). built_in_rulefile( <>(_,_), file=="ENUM" ). built_in_rulefile( succ(_), file=="ENUM" ). built_in_rulefile( pred(_), file=="ENUM" ). built_in_rulefile( or(_,_), file=="ENUM" ). built_in_rulefile( <=(_,_), file=="ENUMERATION" ). built_in_rulefile( <(_,_), file=="ENUMERATION" ). built_in_rulefile( >=(_,_), file=="ENUMERATION" ). built_in_rulefile( >(_,_), file=="ENUMERATION" ). built_in_rulefile( <>(_,_), file=="ENUMERATION" ). built_in_rulefile( succ(_), file=="ENUMERATION" ). built_in_rulefile( pred(_), file=="ENUMERATION" ). built_in_rulefile( abs(_), file=="FDLFUNCS" ). built_in_rulefile( >=(_,_), file=="FDLFUNCS" ). built_in_rulefile( >(_,_), file=="FDLFUNCS" ). built_in_rulefile( or(_,_), file=="FDLFUNCS" ). built_in_rulefile( *(_,_), file=="FDLFUNCS" ). built_in_rulefile( sqr(_), file=="FDLFUNCS" ). built_in_rulefile( odd(_), file=="FDLFUNCS" ). built_in_rulefile( not(_), file=="FDLFUNCS" ). built_in_rulefile( =(_,_), file=="FDLFUNCS" ). built_in_rulefile( <>(_,_), file=="FDLFUNCS" ). built_in_rulefile( **(_,_), file=="FDLFUNCS" ). /* CFR042 */ built_in_rulefile( <=(_,_), file=="FDLFUNCS" ). /* CFR042 */ built_in_rulefile( <(_,_), file=="FDLFUNCS" ). /* CFR042 */ built_in_rulefile( and(_,_), file=="LOGIC" ). built_in_rulefile( or(_,_), file=="LOGIC" ). built_in_rulefile( <->(_,_), file=="LOGIC" ). built_in_rulefile( not(_), file=="LOGIC" ). built_in_rulefile( ->(_,_), file=="LOGIC" ). built_in_rulefile( _ = _, file=="LOGIC" ) :- use_subst_rules_for_equality(on). built_in_rulefile( >=(_,_), file=="SEQ" ). built_in_rulefile( >(_,_), file=="SEQ" ). built_in_rulefile( _ @ _, file=="SEQ" ). built_in_rulefile( [_|_], file=="SEQ" ). built_in_rulefile( length(_), file=="SEQ" ). built_in_rulefile( first(_), file=="SEQ" ). built_in_rulefile( last(_), file=="SEQ" ). built_in_rulefile( nonfirst(_), file=="SEQ" ). built_in_rulefile( nonlast(_), file=="SEQ" ). built_in_rulefile( _ + _, file=="SEQ" ). built_in_rulefile( _ - _, file=="SEQ" ). built_in_rulefile( _ = _, file=="SEQ" ). built_in_rulefile( _ <-> _, file=="SEQ" ). built_in_rulefile( in(_,_), file=="SETS" ). built_in_rulefile( not_in(_,_), file=="SETS" ). built_in_rulefile( subset_of(_,_), file=="SETS" ). built_in_rulefile( strict_subset_of(_,_), file=="SETS" ). built_in_rulefile( _ \/ _, file=="SETS" ). built_in_rulefile( _ /\ _, file=="SETS" ). built_in_rulefile( _ \ _, file=="SETS" ). built_in_rulefile( not(_), file=="SETS" ). built_in_rulefile( or(_,_), file=="SETS" ). built_in_rulefile( _ = _, file=="SETS" ) :- use_subst_rules_for_equality(on). built_in_rulefile( for_all(_,_), file=="QUANTIF" ). built_in_rulefile( for_some(_,_), file=="QUANTIF" ). built_in_rulefile( not _, file=="QUANTIF" ). built_in_rulefile( _ or _, file=="QUANTIF" ). built_in_rulefile( _ = _, file=="QUANTIF" ) :- use_subst_rules_for_equality(on). built_in_rulefile( bit__and(_,_), file=="BITWISE"). built_in_rulefile( bit__or(_,_), file=="BITWISE"). built_in_rulefile( bit__xor(_,_), file=="BITWISE"). built_in_rulefile( _ < _ , file=="BITWISE"). built_in_rulefile( _ <= _ , file=="BITWISE"). built_in_rulefile( _ = _ , file=="BITWISE"). built_in_rulefile( _ mod _, file=="MODULAR"). built_in_rulefile( _ <= _, file=="MODULAR"). built_in_rulefile( _ <> _, file=="MODULAR"). built_in_rulefile( _ < _, file=="MODULAR"). built_in_rulefile( _ = _, file=="MODULAR"). /*** (2) Map built-in rulefile name to rule name ***/ built_in_rulefile( file=="NUMINEQS", inequals(_) ). built_in_rulefile( file=="NUMINEQS", zero(_) ). built_in_rulefile( file=="GENINEQS", transitivity(_) ). built_in_rulefile( file=="GENINEQS", strengthen(_) ). built_in_rulefile( file=="GENINEQS", negation(_) ). built_in_rulefile( file=="INTINEQS", inequals(_) ). built_in_rulefile( file=="ARITH", arith(_) ). built_in_rulefile( file=="ARITH", assoc(_) ). built_in_rulefile( file=="ARITH", commut(_) ). built_in_rulefile( file=="ARITH", distrib(_) ). built_in_rulefile( file=="ARITH", minus(_) ). built_in_rulefile( file=="ARITH", intdiv(_) ). built_in_rulefile( file=="ARRAY", array(_) ). built_in_rulefile( file=="ARRAY", mk__array(_) ) :- spark_enabled. /* CFR040 */ built_in_rulefile( file=="ENUM", enum(_) ). built_in_rulefile( file=="ENUM", enum_cases(_) ). built_in_rulefile( file=="ENUMERATION", enumeration(_) ). built_in_rulefile( file=="FDLFUNCS", abs(_) ). built_in_rulefile( file=="FDLFUNCS", sqr(_) ). built_in_rulefile( file=="FDLFUNCS", odd(_) ). built_in_rulefile( file=="FDLFUNCS", exp(_) ). /* CFR042 */ built_in_rulefile( file=="LOGIC", assoc(_) ). built_in_rulefile( file=="LOGIC", commut(_) ). built_in_rulefile( file=="LOGIC", distrib(_) ). built_in_rulefile( file=="LOGIC", equivalence(_) ). built_in_rulefile( file=="LOGIC", implies(_) ). built_in_rulefile( file=="LOGIC", logical_and(_) ). built_in_rulefile( file=="LOGIC", logical_not(_) ). built_in_rulefile( file=="LOGIC", logical_or(_) ). built_in_rulefile( file=="LOGIC", logical(_) ). built_in_rulefile( file=="SEQ", seqlen(_) ). built_in_rulefile( file=="SEQ", append(_) ). built_in_rulefile( file=="SEQ", first(_) ). built_in_rulefile( file=="SEQ", last(_) ). built_in_rulefile( file=="SEQ", nonfirst(_) ). built_in_rulefile( file=="SEQ", nonlast(_) ). built_in_rulefile( file=="SEQ", seq(_) ). built_in_rulefile( file=="SETS", sets(_) ). built_in_rulefile( file=="QUANTIF", quant(_) ). built_in_rulefile( file=="BITWISE", bitwise(_) ) :- spark_enabled. built_in_rulefile( file=="MODULAR", modular(_) ) :- spark_enabled. /*** SPECIAL RULES ***/ special_rulefile( _, file=="SPECIAL" ). special_rulefile( _, file=="RECORD" ). special_rulefile( file=="SPECIAL", inference(_) ). special_rulefile( file=="SPECIAL", simplify(_) ). special_rulefile( file=="SPECIAL", logic(_) ). special_rulefile( file=="SPECIAL", standardisation(_) ). special_rulefile( file=="SPECIAL", eq(_) ). special_rulefile( file=="SPECIAL", equiv(_) ). special_rulefile( file=="SPECIAL", unification(_) ). special_rulefile( file=="RECORD", record(_) ). special_rulefile( file=="RECORD", record_equality(_) ). special_rulefile( file=="RECORD", mk__record(_) ) :- spark_enabled. /* CFR040 */ /*** Definition of rulefile(_,_) predicate ***/ rulefile(X,Y) :- nonvar(X), X = (_ = _), spade_checker_prefix(SPADE_CHECKER), /* CFR048 */ !, ( ( var(Y) ; atom(Y) ), /* CFR048 */ built_in_rulefile(X,file==YY), /* CFR048 */ append(YY, ".RUL", YYL), /* CFR048 */ append(SPADE_CHECKER, YYL, YL), /* CFR048 */ name(Y, YL) /* CFR048 */ ; user_rulefile(X,Y) ; user_rulefile(_A,Y), \+ user_rulefile(X,Y), use_subst_rules_for_equality(on) ; ( var(Y) ; atom(Y) ), /* CFR048 */ special_rulefile(X,file==YY), /* CFR048 */ append(YY, ".RUL", YYL), /* CFR048 */ append(SPADE_CHECKER, YYL, YL), /* CFR048 */ name(Y, YL) /* CFR048 */ ). rulefile(X,Y) :- spade_checker_prefix(SPADE_CHECKER), /* CFR048 */ ( ( var(Y) ; atom(Y) ), /* CFR048 */ built_in_rulefile(X,file==YY), /* CFR048 */ append(YY, ".RUL", YYL), /* CFR048 */ append(SPADE_CHECKER, YYL, YL), /* CFR048 */ name(Y, YL) /* CFR048 */ ; /* CFR048 */ atom(X), /* CFR048 */ name(X, XL), /* CFR048 */ triple_append(SPADE_CHECKER, XX, ".RUL", XL), /* CFR048 */ built_in_rulefile(file==XX,Y) /* CFR048 */ ; user_rulefile(X,Y) ; ( var(Y) ; atom(Y) ), /* CFR048 */ special_rulefile(X,file==YY), /* CFR048 */ append(YY, ".RUL", YYL), /* CFR048 */ append(SPADE_CHECKER, YYL, YL), /* CFR048 */ name(Y, YL) /* CFR048 */ ; /* CFR048 */ atom(X), /* CFR048 */ name(X, XL), /* CFR048 */ triple_append(SPADE_CHECKER, XX, ".RUL", XL), /* CFR048 */ special_rulefile(file==XX,Y) /* CFR048 */ ). /*** TYPE CONSTRAINTS OF BUILT-IN RULE FILES ***/ built_in_classification(X+Y, "ARITH", arith(_), [X:ir, Y:ir]). built_in_classification(X-Y, "ARITH", arith(_), [X:ir, Y:ir]). built_in_classification(X*Y, "ARITH", arith(_), [X:ir, Y:ir]). built_in_classification(X/Y, "ARITH", arith(_), [X:ir, Y:ir]). built_in_classification(X div Y, "ARITH", arith(_), [X:i, Y:i]). built_in_classification(X=Y, "ARITH", arith(_), [X:ir, Y:ir]) :- use_subst_rules_for_equality(on). built_in_classification(X+Y, "ARITH", assoc(_), [X:ir, Y:ir]). built_in_classification(X*Y, "ARITH", assoc(_), [X:ir, Y:ir]). built_in_classification(X*Y, "ARITH", assoc(_), [X:ir, Y:ir]). built_in_classification(X=Y, "ARITH", assoc(_), [X:ir, Y:ir]) :- use_subst_rules_for_equality(on). built_in_classification(X+Y, "ARITH", commut(_), [X:ir, Y:ir]). built_in_classification(X*Y, "ARITH", commut(_), [X:ir, Y:ir]). built_in_classification(X=Y, "ARITH", commut(_), [X:ir, Y:ir]) :- use_subst_rules_for_equality(on). built_in_classification(X+Y, "ARITH", distrib(_), [X:ir, Y:ir]). built_in_classification(X-Y, "ARITH", distrib(_), [X:ir, Y:ir]). built_in_classification(X*Y, "ARITH", distrib(_), [X:ir, Y:ir]). built_in_classification(X=Y, "ARITH", distrib(_), [X:ir, Y:ir]) :- use_subst_rules_for_equality(on). built_in_classification(X-Y, "ARITH", minus(_), [X:ir, Y:ir]). built_in_classification(X+Y, "ARITH", minus(_), [X:ir, Y:ir]). built_in_classification(-X, "ARITH", minus(_), [X:ir]). built_in_classification(X*Y, "ARITH", minus(_), [X:ir, Y:ir]). built_in_classification(X=Y, "ARITH", minus(_), [X:ir, Y:ir]) :- use_subst_rules_for_equality(on). built_in_classification(X+Y, "ARITH", intdiv(_), [X:i, Y:i]). built_in_classification(X div Y, "ARITH", intdiv(_), [X:i, Y:i]). built_in_classification(-X, "ARITH", intdiv(_), [X:i]). built_in_classification(X=Y, "ARITH", intdiv(_), [X:i, Y:i]) :- use_subst_rules_for_equality(on). built_in_classification(_, "ARRAY", array(_), []). built_in_classification(element(_,_),"ARRAY", mk__array(_), []) :- spark_enabled. /* CFR040 */ built_in_classification(X=Y, "ENUM", enum(_), [X:e, Y:e]). built_in_classification(X<>Y, "ENUM", enum(_), [X:e, Y:e]). built_in_classification(X<=Y, "ENUM", enum(_), [X:e, Y:e]). built_in_classification(X=Y, "ENUM", enum(_), [X:e, Y:e]). built_in_classification(X>Y, "ENUM", enum(_), [X:e, Y:e]). built_in_classification(pred(_), "ENUM", enum(_), []). built_in_classification(succ(_), "ENUM", enum(_), []). built_in_classification(_X or _Y, "ENUM", enum(_), []). built_in_classification(_X or _Y, "ENUM", enum_cases(_), []). built_in_classification(X<>Y, "ENUMERATION", enumeration(_), [X:e, Y:e]). built_in_classification(X<=Y, "ENUMERATION", enumeration(_), [X:e, Y:e]). built_in_classification(X=Y, "ENUMERATION", enumeration(_), [X:e, Y:e]). built_in_classification(X>Y, "ENUMERATION", enumeration(_), [X:e, Y:e]). built_in_classification(pred(_), "ENUMERATION", enumeration(_), []). built_in_classification(succ(_), "ENUMERATION", enumeration(_), []). built_in_classification(abs(X), "FDLFUNCS", abs(_), [X:ir]). built_in_classification(X>=Y, "FDLFUNCS", abs(_), [X:ir,Y:ir]). built_in_classification(X>Y, "FDLFUNCS", abs(_), [X:ir,Y:ir]). built_in_classification(_X or _Y, "FDLFUNCS", abs(_), []). built_in_classification(X*Y, "FDLFUNCS", abs(_), [X:ir,Y:ir]). built_in_classification(X=Y, "FDLFUNCS", abs(_), [X:ir, Y:ir]) :- use_subst_rules_for_equality(on). built_in_classification(sqr(X), "FDLFUNCS", sqr(_), [X:ir]). built_in_classification(X>=Y, "FDLFUNCS", sqr(_), [X:ir,Y:ir]). built_in_classification(X>Y, "FDLFUNCS", sqr(_), [X:ir,Y:ir]). built_in_classification(X*Y, "FDLFUNCS", sqr(_), [X:ir,Y:ir]). built_in_classification(abs(X), "FDLFUNCS", sqr(_), [X:ir]). built_in_classification(X=Y, "FDLFUNCS", sqr(_), [X:ir, Y:ir]) :- use_subst_rules_for_equality(on). built_in_classification(odd(X), "FDLFUNCS", odd(_), [X:i]). built_in_classification(not(_X), "FDLFUNCS", odd(_), []). built_in_classification(X=Y, "FDLFUNCS", odd(_), [X:i,Y:i]). built_in_classification(X<>Y, "FDLFUNCS", odd(_), [X:i,Y:i]). built_in_classification(X**Y, "FDLFUNCS", exp(_), [X:ir,Y:i]). /* CFR042 */ built_in_classification(X*Y, "FDLFUNCS", exp(_), [X:ir,Y:ir]). /* CFR042 */ built_in_classification(X>=Y, "FDLFUNCS", exp(_), [X:ir,Y:ir]). /* CFR042 */ built_in_classification(X<=Y, "FDLFUNCS", exp(_), [X:ir,Y:ir]). /* CFR042 */ built_in_classification(X>Y, "FDLFUNCS", exp(_), [X:ir,Y:ir]). /* CFR042 */ built_in_classification(X _Y, "LOGIC", assoc(_), []). built_in_classification(_X=_Y, "LOGIC", assoc(_), []) :- use_subst_rules_for_equality(on). built_in_classification(_X and _Y, "LOGIC", commut(_), []). built_in_classification(_X or _Y, "LOGIC", commut(_), []). built_in_classification(_X <-> _Y, "LOGIC", commut(_), []). built_in_classification(_X=_Y, "LOGIC", commut(_), []) :- use_subst_rules_for_equality(on). built_in_classification(_X and _Y, "LOGIC", distrib(_), []). built_in_classification(_X or _Y, "LOGIC", distrib(_), []). built_in_classification(_X=_Y, "LOGIC", distrib(_), []) :- use_subst_rules_for_equality(on). built_in_classification(_X <-> _Y, "LOGIC", equivalence(_), []). built_in_classification(_X=_Y, "LOGIC", equivalence(_), []) :- use_subst_rules_for_equality(on). built_in_classification(_X -> _Y, "LOGIC", implies(_), []). built_in_classification(_X=_Y, "LOGIC", implies(_), []) :- use_subst_rules_for_equality(on). built_in_classification(_X and _Y, "LOGIC", logical_and(_), []). built_in_classification(_X=_Y, "LOGIC", logical_and(_), []) :- use_subst_rules_for_equality(on). built_in_classification(not _X, "LOGIC", logical_not(_), []). built_in_classification(_X=_Y, "LOGIC", logical_not(_), []) :- use_subst_rules_for_equality(on). built_in_classification(_X or _Y, "LOGIC", logical_or(_), []). built_in_classification(_X=_Y, "LOGIC", logical_or(_), []) :- use_subst_rules_for_equality(on). built_in_classification(not _X, "LOGIC", logical(_), []). built_in_classification(_X and _Y, "LOGIC", logical(_), []). built_in_classification(_X or _Y, "LOGIC", logical(_), []). built_in_classification(_X -> _Y, "LOGIC", logical(_), []). built_in_classification(_X <-> _Y, "LOGIC", logical(_), []). built_in_classification(_X=_Y, "LOGIC", logical(_), []) :- use_subst_rules_for_equality(on). built_in_classification(X=Y, "INTINEQS", inequals(_), [X:i, Y:i]). built_in_classification(X<=Y, "INTINEQS", inequals(_), [X:i, Y:i]). built_in_classification(X>=Y, "INTINEQS", inequals(_), [X:i, Y:i]). built_in_classification(X>Y, "INTINEQS", inequals(_), [X:i, Y:i]). built_in_classification(X=Y, "NUMINEQS", inequals(_), [X:ir, Y:ir]). built_in_classification(X<>Y, "NUMINEQS", inequals(_), [X:ir, Y:ir]). built_in_classification(X<=Y, "NUMINEQS", inequals(_), [X:ir, Y:ir]). built_in_classification(X>=Y, "NUMINEQS", inequals(_), [X:ir, Y:ir]). built_in_classification(XY, "NUMINEQS", inequals(_), [X:ir, Y:ir]). built_in_classification(X=Y, "NUMINEQS", zero(_), [X:ir, Y:ir]). built_in_classification(X<>Y, "NUMINEQS", zero(_), [X:ir, Y:ir]). built_in_classification(_X or _Y, "NUMINEQS", zero(_), []). built_in_classification(_X=_Y, "GENINEQS", transitivity(_), []). built_in_classification(_X<>_Y, "GENINEQS", transitivity(_), []). built_in_classification(_X<=_Y, "GENINEQS", transitivity(_), []). built_in_classification(_X>=_Y, "GENINEQS", transitivity(_), []). built_in_classification(_X<_Y, "GENINEQS", transitivity(_), []). built_in_classification(_X>_Y, "GENINEQS", transitivity(_), []). built_in_classification(_X=_Y, "GENINEQS", strengthen(_), []). built_in_classification(_X<_Y, "GENINEQS", strengthen(_), []). built_in_classification(_X>_Y, "GENINEQS", strengthen(_), []). built_in_classification(_, "GENINEQS", negation(_), []). built_in_classification(_, "QUANTIF", quant(_), []). built_in_classification(X>=Y, "SEQ", seqlen(_), [X:i, Y:i]). built_in_classification(X>Y, "SEQ", seqlen(_), [X:i, Y:i]). built_in_classification(X+Y, "SEQ", seqlen(_), [X:i, Y:i]). built_in_classification(X-Y, "SEQ", seqlen(_), [X:i, Y:i]). built_in_classification(length(_), "SEQ", seqlen(_), []). built_in_classification(X=Y, "SEQ", seqlen(_), [X:i, Y:i]) :- use_subst_rules_for_equality(on). built_in_classification(_X @ _Y, "SEQ", append(_), []). built_in_classification(_X=_Y, "SEQ", append(_), []) :- use_subst_rules_for_equality(on). built_in_classification(first(_), "SEQ", first(_), []). built_in_classification(_X=_Y, "SEQ", first(_), []) :- use_subst_rules_for_equality(on). built_in_classification(last(_), "SEQ", last(_), []). built_in_classification(_X=_Y, "SEQ", last(_), []) :- use_subst_rules_for_equality(on). built_in_classification(nonfirst(_), "SEQ", nonfirst(_), []). built_in_classification(_X @ _Y, "SEQ", nonfirst(_), []). built_in_classification(_X=_Y, "SEQ", nonfirst(_), []) :- use_subst_rules_for_equality(on). built_in_classification(nonlast(_), "SEQ", nonlast(_), []). built_in_classification(_X @ _Y, "SEQ", nonlast(_), []). built_in_classification([_|_], "SEQ", nonlast(_), []). built_in_classification(_X=_Y, "SEQ", nonlast(_), []) :- use_subst_rules_for_equality(on). built_in_classification(_X = _Y, "SEQ", seq(_), []). built_in_classification(_X <-> _Y, "SEQ", seq(_), []). built_in_classification(_X in _Y, "SETS", sets(_), []). built_in_classification(_X not_in _Y, "SETS", sets(_), []). built_in_classification(not _X, "SETS", sets(_), []). built_in_classification(_X \/ _Y, "SETS", sets(_), []). built_in_classification(_X /\ _Y, "SETS", sets(_), []). built_in_classification(_X \ _Y, "SETS", sets(_), []). built_in_classification(_X subset_of _Y, "SETS", sets(_), []). built_in_classification(_X strict_subset_of _Y, "SETS", sets(_), []). built_in_classification(_X or _Y, "SETS", sets(_), []). built_in_classification(_X=_Y, "SETS", sets(_), []) :- use_subst_rules_for_equality(on). built_in_classification(_, "SPECIAL", _, []). built_in_classification(_, "RECORD", _, []). /* Rule family declarations for BITWISE.RUL */ built_in_classification(bit__and(I,J), "BITWISE", bitwise(_), [I:i, J:i]). built_in_classification(bit__or(I,J), "BITWISE", bitwise(_), [I:i, J:i]). built_in_classification(bit__xor(I,J), "BITWISE", bitwise(_), [I:i, J:i]). built_in_classification(I <= J, "BITWISE", bitwise(_), [I:i, J:i]). built_in_classification(I = J, "BITWISE", bitwise(_), [I:i, J:i]). /* Rule family declarations for MODULAR.RUL */ built_in_classification(I mod J, "MODULAR", modular(_), [I:i, J:i]). built_in_classification(I <= J, "MODULAR", modular(_), [I:i, J:i]). built_in_classification(I <> J, "MODULAR", modular(_), [I:i, J:i]). built_in_classification(I < J, "MODULAR", modular(_), [I:i, J:i]). built_in_classification(I = J, "MODULAR", modular(_), [I:i, J:i]). /*** Definition of type_requirements(_,_,_,_) predicate ***/ type_requirements(A,B,C,D) :- spade_checker_prefix(SPADE_CHECKER), /* CFR048 */ ( atom(B), /* CFR048 */ name(B, BL), /* CFR048 */ triple_append(SPADE_CHECKER, BB, ".RUL", BL), /* CFR048 */ built_in_classification(A,BB,C,D) /* CFR048 */ ; /* CFR048 */ var(B), /* CFR048 */ built_in_classification(A,BB,C,D), /* CFR048 */ append(BB, ".RUL", BBL), /* CFR048 */ append(SPADE_CHECKER, BBL, BL), /* CFR048 */ name(B, BL) /* CFR048 */ ; user_classification(A,B,C,D) ). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/newrules.pro0000644000175000017500000002711711753202340016436 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= (consult) :- ( command_arg(filename, FILENAME), ! ; prompt_user('Filename? '), rread(FILENAME) ), inform_user, user_rules(FILENAME), !. user_rules(FNAME) :- atom(FNAME), file_exists_and_is_readable(FNAME), ( plain_output(off), get_file_attrib(FNAME, [FULL_NAME|_], _), record_consultation_of(FULL_NAME) ; plain_output(on), record_consultation_of(FNAME) ), see(FNAME), seen, see(FNAME), retractall(banned_rule(FNAME,_)), process_rulefile(FNAME), write('Rulefile '), print(FNAME), write(' processed.'), nl, seen, /* CFR015 */ see_correct_input_stream, /* CFR015,051 */ !. /* CFR051 */ user_rules(FNAME) :- atom(FNAME), \+ file_exists_and_is_readable(FNAME), write('Warning: '), print(FNAME), write(' does not exist or cannot be read.'), nl, !, fail. inform_user :- write('-------------------------------------------------------------------'), nl, write('Badly-formed rules will now be displayed on the screen as they are'), nl, write('read and possibly on subsequent consultations of this rulefile.'), nl, write('-------------------------------------------------------------------'), nl, write('Watch for other messages informing you of bad rulenames or improper'), nl, write('rule conditions; bad rules will not be used by the checker.'), nl, write('If too serious an error is found, the rulefile may be abandoned.'), nl, write('-------------------------------------------------------------------'), nl, !. process_rulefile(FNAME) :- repeat, read_term_and_layout(RULE), process_single_rule(FNAME, RULE, RULENAME, PF, OK), ( var(OK), add_rulefacts(FNAME,RULENAME,PF) ; nonvar(OK), report_bad_rule(FNAME,RULE,RULENAME,OK) ), /*until*/ (RULE=end_of_file ; bad_rulefile, scrap_rulefile(FNAME)), !, ( logfact(text, _) ; assertz(logfact(text, 'No errors trapped in reading this rulefile.')) ), !. process_single_rule(FNAME, RULE, _, _, _) :- nonvar(RULE), RULE = (A : B), nonvar(A), A = (rule_family NAME), atom(NAME), nonvar(B), !, RNAME =.. [NAME,_], save_requirements(FNAME, RNAME, B), !, fail. process_single_rule(FNAME, RULE, RULENAME, PF, OK) :- check_structure(RULE,RULENAME,BODY,CONDITIONS,OK), check_rulename(RULENAME,OK), check_conditions(CONDITIONS,OK), check_body(BODY,PF,OK), check_rule_family_declared(FNAME, RULENAME, PF, OK), !. save_requirements(FNAME, RNAME, REST & REQ) :- save_single_requirement(FNAME, RNAME, REQ), save_requirements(FNAME, RNAME, REST), !. save_requirements(FNAME, RNAME, REQ) :- save_single_requirement(FNAME, RNAME, REQ), !. save_single_requirement(FNAME, RNAME, REQ) :- nonvar(REQ), REQ = (EXPR requires CONSTRAINTS), check_constraints(CONSTRAINTS), check_all_requirements(EXPR, CONSTRAINTS), maybe_add(user_classification(EXPR, FNAME, RNAME, CONSTRAINTS)), !. check_constraints([H|T]) :- nonvar(H), H = (V:C), var(V), atom(C), ( C=i ; C=ir ; C=ire ; C=e ; C=any ), check_constraints(T), !. check_constraints([]) :- !. check_all_requirements(EXPR, CONSTRAINTS) :- \+ any_holes_left(EXPR, CONSTRAINTS), !. any_holes_left(EXPR, CONSTRAINTS) :- instantiate_all_vars(EXPR, CONSTRAINTS), novars(EXPR), !, fail. any_holes_left(_EXPR, _CONSTRAINTS) :- !. instantiate_all_vars(_EXPR, []) :- !. instantiate_all_vars(EXPR, [(H:X) | T]) :- H=X, instantiate_all_vars(EXPR, T), !. check_structure(RULE,RULENAME,BODY,CONDITIONS,_) :- nonvar(RULE), ( RULE = (RULENAME: (BODY if CONDITIONS)) ; RULE = (RULENAME: (B may_be_deduced_from CONDITIONS)), BODY = infer(B) ; RULE = (RULENAME: (B may_be_deduced)), CONDITIONS = [], BODY = infer(B) ; RULE = (RULENAME: BODY), CONDITIONS = [] ), !. check_structure(_,_,_,_,bad_structure) :- !. check_rulename(RULENAME,_) :- nonvar(RULENAME), RULENAME=..[PF,ARG], atom(PF), integer(ARG), ARG >= 0, !. check_rulename(_,bad_rulename) :- !. check_rulename(_,_). check_conditions(CONDITIONS,OK) :- nonvar(CONDITIONS), ( CONDITIONS=[] ; CONDITIONS=[COND|REST], good_condition(COND), check_conditions(REST,OK) ), !. check_conditions(_,bad_conditions) :- !. check_conditions(_,_). good_condition(V) :- var(V), !. good_condition(goal(G)) :- !, permitted_immediate(G), !. good_condition(_V). permitted_immediate(G) :- var(G), !, fail. permitted_immediate(','(G1,G2)) :- permitted_immediate(G1), permitted_immediate(G2), !. permitted_immediate(';'(G1,G2)) :- permitted_immediate(G1), permitted_immediate(G2), !. permitted_immediate(not G) :- permitted_immediate(G). permitted_immediate(checktype(_,_)). permitted_immediate(type(_,_)). permitted_immediate(enumeration(_,_)). permitted_immediate(enumeration_list(_,_)). permitted_immediate(last(_,_)). permitted_immediate(in_order(_,_,_)). permitted_immediate(_ = _). permitted_immediate(_ \= _). permitted_immediate(in(_,_)). permitted_immediate(subset(_,_)). permitted_immediate(subst_vbl(_,_,_,_)). permitted_immediate(strict_sublist(_,_)). permitted_immediate(append(_,_,_)). permitted_immediate(set_union(_,_,_)). permitted_immediate(set_intersect(_,_,_)). permitted_immediate(set_lacking(_,_,_)). permitted_immediate(simplify(_,_)). permitted_immediate(integer(_)). permitted_immediate(nonvar(_)). permitted_immediate(var(_)). permitted_immediate(atom(_)). permitted_immediate(atomic(_)). permitted_immediate(length(_,_)). permitted_immediate(element(_,_,_)). permitted_immediate(iss(_,_)). permitted_immediate(genvar(_,_)). % Make these predicate calls, potentially made via the goal portion of user % created external rules, visible to the spxref tool. % These predicates are implemented as part of the checker. Note that some % are also called internally by the checker. :- public checktype/2. :- public enumeration/2. :- public enumeration_list/2. :- public genvar/2. :- public in_order/3. :- public iss/2. :- public set_intersect/3. :- public set_lacking/3. :- public set_union/3. :- public simplify/2. :- public strict_sublist/2. :- public subset/2. :- public subst_vbl/4. :- public type/2. % These predicates are standard in sicstus, or provided through a sicstus % library. The spxref tool is weak in processing these, thus it is % preferable to suppress their public declarations. % :- public append/3. % :- public atom/1. % :- public atomic/1. % :- public element/3. % :- public in/2. % :- public integer/1. % :- public last/2. % :- public length/2. % :- public nonvar/1. % :- public var/1. % The built-in rules make use of further predicates, not available to user % created external rules. Here, these potential command calls are also made % visible to the spxref tool. :- public build_other_cases/4. :- public find_element/3. :- public is_in/2. :- public make_record_equality_goal/4. :- public mk__function_name/3. :- public norm_typed_expr/3. :- public novars/1. :- public record_function/6. :- public try_deduce/1. check_body(BODY,[PF|REST],_) :- nonvar(BODY), ( BODY=(F may_be_replaced_by G), REST=[] ; BODY=(F & G are_interchangeable), ( var(G), REST=[_] ; nonvar(G), G=..[GOP|GArgs], make_up(R,GOP,GArgs), REST=[R] ) ; BODY=infer(F), REST=[] ), ( var(F), PF=(_) ; nonvar(F), F=..[FOP|FArgs], make_up(PF,FOP,FArgs) ), !. check_body(_,_,bad_body) :- !. check_body(_,_,_). check_rule_family_declared(FNAME, RULENAME, PF, OK) :- var(OK), !, PF=[X|R], ( var(X), XX=(_) ; X=..[XOP|Xargs], make_up(XX,XOP,Xargs) ), !, ( user_classification(XX, FNAME, RULENAME, _) ; OK = rule_family_not_declared_for_this_usage ), !, ( R=[] ; R=[Y], ( var(Y), YY=(_) ; Y=..[YOP|Yargs], make_up(YY,YOP,Yargs) ), !, ( user_classification(YY, FNAME, RULENAME, _) ; OK = rule_family_not_declared_for_this_usage ) ), !. check_rule_family_declared(_,_,_,_). add_rulefacts(FNAME,RULENAME,[PF]) :- ( rulefile(F,FNAME), var(F) ; var(PF), retractall(user_rulefile(_,FNAME)), assertz(user_rulefile(_,FNAME)) ; rulefile(PF,FNAME) ; assertz(user_rulefile(PF,FNAME)) ), ( rulefile(FNAME,RULENAME) ; RULENAME=..[FAMILY,_MEMBER], RN=..[FAMILY,_], assertz(user_rulefile(FNAME,RN)) ), !. add_rulefacts(FNAME,RULENAME,[PF1,PF2]) :- add_rulefacts(FNAME,RULENAME,[PF1]), add_rulefacts(FNAME,RULENAME,[PF2]), !. report_bad_rule(_,end_of_file,_,_) :- !. report_bad_rule(_,RULE,RULENAME,WARNING) :- write('!!! '), print(RULE), nl, write('*** WARNING: Rule '), print(RULENAME), write(' -- '), print(WARNING), maybe_add(logfact(text, 'Error(s) were found in consulting this rulefile')), nl, fail. report_bad_rule(_,_,RULENAME,bad_rulename) :- var(RULENAME), assertz(bad_rulefile), write('*** ERROR TOO SERIOUS: Cannot use this file as it stands.'), nl, write('PROCESSING ABORTED.'), nl, nl, maybe_add(logfact(text, 'SERIOUS ERROR: Could not accept this file for use.')), !. report_bad_rule(_,_,_,bad_rulename) :- nl, !. report_bad_rule(FNAME,_,RULENAME,_) :- assertz(banned_rule(FNAME,RULENAME)), nl, !. scrap_rulefile(FNAME) :- retractall(user_rulefile(_,FNAME)), retractall(user_rulefile(FNAME,_)), retractall(user_classification(_,FNAME,_,_)), retractall(banned_rule(FNAME,_)), retractall(bad_rulefile), !. record_consultation_of(_) :- record_consults(off), !. record_consultation_of(FULL_NAME) :- assertz(logfact((consult), FULL_NAME)), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/inferenc2.pro0000644000175000017500000003165411753202340016446 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** INFER(F) - try to show F follows directly from current hypotheses ***/ infer(A) :- var(A), !, fail. infer(A) :- ( simplify_in_infer(on), simplify(A, B), !, ( see_if_can_infer(B) ; A \= B, see_if_can_infer(A) ; standardise_in_infer(on), norm_typed_expr(A, boolean, C), see_if_can_infer(C) ) ; see_if_can_infer(A) ; standardise_in_infer(on), norm_typed_expr(A, boolean, B), A \= B, see_if_can_infer(B) ), !. see_if_can_infer(X) :- could_infer(X), !. see_if_can_infer(X) :- could_not_infer(X), !, fail. see_if_can_infer(X) :- do_infer(X), assertz(could_infer(X)), !. see_if_can_infer(X) :- assertz(could_not_infer(X)), !, fail. % Initialise. % The dynamic 'used/1' predicate is central to the operation of the % 'testused/1' predicate, which is exploited by some portions of % 'do_infer/1'. The 'used/1' predicate records when properties have been % explored during a portion of reasoning. It should be initialised before % each new portion of reasoning, to prevent old results undermining the % proof effort. do_infer(_) :- retractall(used(_)), fail. do_infer(A=A) :- !. do_infer(A<>A) :- !, fail. do_infer([H1|T1]=[H2|T2]) :- do_infer(H1=H2), do_infer(T1=T2), !. do_infer([H1|_]<>[H2|_]) :- do_infer(H1<>H2), !. do_infer([_|T1]<>[_|T2]) :- do_infer(T1<>T2), !. do_infer([]<>[_|_]) :- !. do_infer([_|_]<>[]) :- !. do_infer((set [])<>(set [_|_])) :- !. do_infer((set [_|_])<>(set [])) :- !. do_infer(true) :- !. do_infer(not false) :- !. do_infer(A) :- hyp(_, A), !. do_infer(_) :- % Discharge VC if auto_infer_from_false is switched on. auto_infer_from_false(on), hyp(_, false), !. do_infer(A and B) :- do_infer(A), do_infer(B), !. do_infer(A or B) :- (do_infer(A) ; do_infer(B)), !. do_infer(A -> B) :- (do_infer(B) ; do_infer(not A)), !. do_infer(A <-> B) :- do_infer(A -> B), do_infer(B -> A), !. do_infer(not not A) :- do_infer(A), !. do_infer(not A) :- neg(A,B), B\=(not A), do_infer(B), !. do_infer(A=B) :- find_mutual_types(A, B, T), try_to_infer((=), A, B, T), !. do_infer(A<>B) :- find_mutual_types(A, B, T), try_to_infer((<>), A, B, T), !. do_infer(A<=B) :- find_mutual_types(A, B, T), try_to_infer((<=), A, B, T), !. do_infer(A>=B) :- find_mutual_types(A, B, T), try_to_infer((>=), A, B, T), !. do_infer(AB) :- find_mutual_types(A, B, T), try_to_infer((>), A, B, T), !. do_infer(E in (set [X|Y])) :- ( do_infer(E=X) ; do_infer(E in (set Y)) ), !. do_infer(E in X \/ Y) :- ( do_infer(E in X) ; do_infer(E in Y) ), !. do_infer(E in X /\ Y) :- do_infer(E in X), do_infer(E in Y), !. do_infer(E in X \ Y) :- do_infer(E in X), do_infer(E not_in Y), !. do_infer(_E not_in (set [])) :- !. do_infer(E not_in (set [X|Y])) :- do_infer(E<>X), do_infer(E not_in (set Y)), !. do_infer(E not_in X \/ Y) :- do_infer(E not_in X), do_infer(E not_in Y), !. do_infer(E not_in X /\ Y) :- ( do_infer(E not_in X) ; do_infer(E not_in Y) ), !. do_infer(E not_in X \ Y) :- ( do_infer(E not_in X) ; do_infer(E in Y) ), !. do_infer(X subset_of Y) :- do_infer(X = Y), !. do_infer((set []) subset_of _X) :- !. do_infer(X \ _Y subset_of Z) :- do_infer(X subset_of Z), !. do_infer(X \ Y subset_of X \ Z) :- ( do_infer(Z subset_of Y) ; do_infer(X /\ Z subset_of X /\ Y) ), !. do_infer(X \/ Y subset_of X \/ Z) :- do_infer(Y subset_of Z), !. do_infer(Y \/ X subset_of Z \/ X) :- do_infer(Y subset_of Z), !. do_infer(Y \/ X subset_of X \/ Z) :- do_infer(Y subset_of Z), !. do_infer(X \/ Y subset_of Z \/ X) :- do_infer(Y subset_of Z), !. do_infer(X /\ Y subset_of X /\ Z) :- do_infer(Y subset_of Z), !. do_infer(Y /\ X subset_of Z /\ X) :- do_infer(Y subset_of Z), !. do_infer(Y /\ X subset_of X /\ Z) :- do_infer(Y subset_of Z), !. do_infer(X /\ Y subset_of Z /\ X) :- do_infer(Y subset_of Z), !. do_infer(X /\ Y subset_of X \/ Y) :- !. do_infer(Y /\ X subset_of X \/ Y) :- !. do_infer(X subset_of Y \/ Z) :- ( do_infer(X subset_of Y) ; do_infer(X subset_of Z) ), !. do_infer(X subset_of Y /\ Z) :- do_infer(X subset_of Y), do_infer(X subset_of Z), !. do_infer(X /\ Y subset_of Z) :- ( do_infer(X subset_of Z) ; do_infer(Y subset_of Z) ), !. do_infer((set X) subset_of (set Y)) :- is_subset_of(X, Y), !. do_infer((set []) strict_subset_of X) :- set_infrule(_E in X), !. do_infer(X \ Y strict_subset_of Z) :- /* CFR012 */ ( do_infer(X strict_subset_of Z) ; do_infer(X subset_of Z), set_infer(Y /\ Z <> (set [])) ), !. do_infer(X /\ Y strict_subset_of Z) :- ( do_infer(X strict_subset_of Z) ; do_infer(Y strict_subset_of Z) ), !. do_infer(X strict_subset_of Y \/ Z) :- ( do_infer(X strict_subset_of Y) ; do_infer(X strict_subset_of Z) ), !. do_infer((set X) strict_subset_of (set Y)) :- is_strict_subset_of(X, Y), !. do_infer(first([H|_T]) = X) :- do_infer(H=X), !. do_infer(first([H|_T] @ _Z) = X) :- do_infer(H=X), !. do_infer(last([H|T]) = X) :- last([H|T],L), do_infer(L=X), !. do_infer(last(_F @ [H|T]) = X) :- last([H|T],L), do_infer(L=X), !. try_to_infer(RO, A, B, TYPE) :- GOAL =.. [RO, A, B], ( type(TYPE, set(_)), !, set_infer(GOAL) ; type(TYPE, sequence(_)), !, sequence_infer(GOAL) ; type(TYPE, enumerated), enumeration(TYPE, ENUMLIST), !, enumerated_infer(GOAL, ENUMLIST) ; retractall(used(_)), !, deduce(GOAL, TYPE) ), !. /*** INTEXP(E) - succeeds if E is a nice integers-only expression ***/ intexp(A) :- var(A), !, fail. intexp(A) :- integer(A), !. intexp(-A) :- intexp(A), !. intexp(A+B) :- intexp(A), intexp(B), !. intexp(A-B) :- intexp(A), intexp(B), !. intexp(A*B) :- intexp(A), intexp(B), !. intexp(A div B) :- intexp(A), intexp(B), !. /*** intexp(A mod B) :- intexp(A), intexp(B), !. TEMPORARILY DELETED ***/ /*** SET_INFER(X R Y) - set strategies ***/ set_infer(_X /\ (set []) = (set [])) :- !. set_infer((set []) = _X /\ (set [])) :- !. set_infer((set []) /\ _X = (set [])) :- !. set_infer((set []) = (set []) /\ _X) :- !. set_infer(X /\ X = X) :- !. set_infer(X = X /\ X) :- !. set_infer(X /\ Y = Y /\ X) :- !. set_infer(X \/ (set []) = X) :- !. set_infer(X = X \/ (set [])) :- !. set_infer((set []) \/ X = X) :- !. set_infer(X = (set []) \/ X) :- !. set_infer(X \/ X = X) :- !. set_infer(X = X \/ X) :- !. set_infer(X \/ Y = Y \/ X) :- !. set_infer(X \ (set []) = X) :- !. set_infer(X = X \ (set [])) :- !. set_infer((set []) \ _X = (set [])) :- !. set_infer((set []) = (set []) \ _X) :- !. set_infer(X \ X = (set [])) :- !. set_infer(X=Y) :- ( X=Y ; set_infrule(X=Y) ; set_infrule(X subset_of Y), set_infrule(Y subset_of X) ), !. set_infer((set [_X|_Y]) <> (set [])) :- !. set_infer((set []) <> (set [_X|_Y])) :- !. set_infer(X <> (set [])) :- set_infrule(_E in X), !. set_infer((set []) <> X) :- set_infrule(_E in X), !. set_infer(X <> Y) :- set_infrule(X <> Y), !. set_infrule(X) :- fact(X). set_infrule(X=Y) :- fact(X=Z), testused(X=Z), set_infrule(Y=Z). set_infrule(X<>Y) :- fact(X=Z), testused(X=Z), set_infrule(Z<>Y). set_infrule(X<>Y) :- fact(X<>Z), set_infrule(Z=Y). set_infrule(X<>Y) :- fact(Y<>Z), set_infrule(X=Z). set_infrule(X in Y) :- ( fact(not (X not_in Y)) ; fact(X=Z), testused(X=Z), set_infrule(Z in Y) ; fact(Y=Z), testused(Y=Z), set_infrule(X in Z) ). set_infrule(X not_in Y) :- ( fact(not (X in Y)) ; fact(X=Z), testused(X=Z), set_infrule(Z not_in Y) ; fact(Y=Z), testused(Y=Z), set_infrule(X not_in Z) ). sequence_infer(X = []) :- do_infer(length(X)=0), !. sequence_infer([] = X) :- do_infer(length(X)=0), !. sequence_infer(X @ Y = []) :- sequence_infer(X=[]), sequence_infer(Y=[]), !. sequence_infer([] = X @ Y) :- sequence_infer(X=[]), sequence_infer(Y=[]), !. sequence_infer(X @ [] = X) :- !. sequence_infer([] @ X = X) :- !. sequence_infer(X = X @ []) :- !. sequence_infer(X = [] @ X) :- !. sequence_infer([X|XT]=[Y|YT]) :- do_infer(X=Y), sequence_infer(XT=YT), !. sequence_infer(first(X) @ nonfirst(X) = X) :- !. sequence_infer(X = first(X) @ nonfirst(X)) :- !. sequence_infer(nonlast(X) @ last(X) = X) :- !. sequence_infer(X = nonlast(X) @ last(X)) :- !. sequence_infer(nonfirst([_H|T]) = X) :- do_infer(T=X), !. sequence_infer(nonlast([H|T]) = X) :- append(N, [_L], [H|T]), do_infer(N=X), !. sequence_infer(X1 @ Y1 = X2 @ Y2) :- do_infer(X1=X2), do_infer(Y1=Y2), !. sequence_infer(X=Y) :- ( X=Y ; sequence_infrule(X=Y) ), !. sequence_infer([_|_] <> []) :- !. sequence_infer([] <> [_|_]) :- !. sequence_infer([X|_XT] <> [Y|_YT]) :- do_infer(X<>Y), !. sequence_infer([_X|XT] <> [_Y|YT]) :- sequence_infer(XT<>YT), !. sequence_infer(X @ Y <> []) :- ( sequence_infer(X <> []) ; sequence_infer(Y <> []) ), !. sequence_infer(X @ Y <> X) :- sequence_infer(Y <> []), !. sequence_infer(X @ Y <> Y) :- sequence_infer(X <> []), !. sequence_infer(X @ Y <> X @ Z) :- sequence_infer(Y <> Z), !. sequence_infer(X @ Y <> Z @ Y) :- sequence_infer(X <> Z), !. sequence_infer(X <> Y) :- sequence_infrule(X <> Y), !. sequence_infrule(X) :- fact(X). sequence_infrule(X=Y) :- fact(X=Z), testused(X=Z), sequence_infrule(Y=Z). sequence_infrule(X<>Y) :- fact(X=Z), testused(X=Z), sequence_infrule(Z<>Y). sequence_infrule(X<>Y) :- fact(X<>Z), sequence_infrule(Z=Y). sequence_infrule(X<>Y) :- fact(Y<>Z), sequence_infrule(X=Z). enumerated_infer(X, L) :- enum_infrule(X, L), !. enum_infrule(X, _) :- fact(X). enum_infrule(X=Y, L) :- ( fact(X=Z), testused(X=Z), enum_infrule(Z=Y, L) ; testused(Y<=X), enum_infrule(Y<=X, L), testused(X<=Y), enum_infrule(X<=Y, L) ). enum_infrule(X<=Y, L) :- ( enum_infrule(X=Y, L) ; enum_infrule(X=Y, L) :- enum_infrule(Y<=X, L). enum_infrule(XY, L) :- enum_infrule(YY, L) :- ( is_in(X, L), is_in(Y, L), X\=Y ; enum_infrule(XY, L) ; fact(Y=Z), testused(Y=Z), enum_infrule(X<>Z, L) ). find_mutual_types(A, B, T) :- checktype(A, T), checktype(B, T), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/initvals.pro0000644000175000017500000001231611753202340016416 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** DISPLAY_SUBGOALS_MAX(N) - maximum number of subgoals for rule display ***/ :- dynamic(display_subgoals_max/1). display_subgoals_max(10). /*** DISPLAY_VAR_FREE_ONLY(FLAG) - on <-> only display if subgoals var-free ***/ :- dynamic(display_var_free_only/1). display_var_free_only(off). /*** CASE_POINTER(N) - proof-by-cases depth counter: initially zero ***/ :- dynamic(case_pointer/1). case_pointer(0). /*** ADDED 8/3/85 ***/ /*** INPUT_FROM_TERMINAL - true if standard input is sys$input ***/ :- dynamic(input_from_terminal/0). input_from_terminal. /* retracted by command in file if not */ % The 'input_from_terminal' predicate is not used in the Checker code. % However, there is a suggestion that it might be retracted in a command % file. The behaviour of the retract will be different, depending on % whether or not the predicate is added here. Thus to preserve this % potential legacy behaviour, the code is not removed. The potential % retract is considered to be a potential call, and is thus made visible to % the spxref tool. :- public input_from_terminal/0. /*** VC_STANDARDISATION(FLAG) - automatic standardisation status (on/off) ***/ :- dynamic(vc_standardisation/1). vc_standardisation(off). /*** ECHO(FLAG) - echoing of file input (on/off) ***/ :- dynamic(echo/1). echo(on). /*** AUTO_DONE(FLAG) - automatic multi-level exit of "done" (on/off) ***/ :- dynamic(auto_done/1). auto_done(on). /*** SIMPLIFY_IN_INFER(FLAG) - whether "infer" may use simplifier (on/off) ***/ :- dynamic(simplify_in_infer/1). simplify_in_infer(on). /***MODIFIED***/ /*** SIMPLIFY_DURING_LOAD(FLAG) - whether "loadvc" may simplify (on/off) ***/ :- dynamic(simplify_during_load/1). simplify_during_load(on). /***MODIFIED***/ /*** STANDARDISE_IN_INFER(FLAG) - can "infer" use standardiser? (on/off) ***/ :- dynamic(standardise_in_infer/1). standardise_in_infer(off). /*** step_number(N) - step number ***/ :- dynamic(step_number/1). step_number(0). /*** Indentation for proof log - should initially be 0 ***/ :- dynamic(indentation/1). indentation(0). /*** Proof frames indentation increment for proof log ***/ :- dynamic(indentation_increment/1). indentation_increment(2). /*** Typechecking(FLAG) -- should normally be ON ***/ :- dynamic(typechecking/1). typechecking(on). /*** Prooflog_width(WIDTH) -- maximum columns for proof log ***/ :- dynamic(prooflog_width/1). prooflog_width(0). /*** record_consults(FLAG) -- whether to record rule-file consultations in proof log ***/ :- dynamic(record_consults/1). record_consults(on). /*** inverse_video(X)/normal_video(X) -- for highlighting on screen ***/ :- dynamic(inverse_video/1). inverse_video([]). :- dynamic(normal_video/1). normal_video([]). /*** typechecking_during_load(FLAG) -- prevent typechecking if VCGenerated ***/ :- dynamic(typechecking_during_load/1). typechecking_during_load(on). /*** use_subst_rules_for_equality(FLAG) -- allow infer to use rewrite rules ***/ :- dynamic(use_subst_rules_for_equality/1). use_subst_rules_for_equality(on). /*** command_logging(FLAG) -- whether or not to log user's commands/replies ***/ :- dynamic(command_logging/1). command_logging(on). /* CFR015 */ /*** show_vc_changes(FLAG) -- whether or not to show new/changed hyps/concs ***/ :- dynamic(show_vc_changes/1). show_vc_changes(on). /*** auto_newvc(FLAG) -- whether or not to do a newvc automatically ***/ :- dynamic(auto_newvc/1). auto_newvc(on). /* CFR025 */ /* newline_after_prompts(FLAG) -- whether or not to do a after each prompt ***/ :- dynamic(newline_after_prompts/1). newline_after_prompts(off). /* CFR1334 */ /*** plain_output(FLAG) -- plain output mode on or off. Default off ***/ :- dynamic(plain_output/1). plain_output(off). /*** overwrite_warning(FLAG) -- on or off. Default off ***/ :- dynamic(overwrite_warning/1). overwrite_warning(off). % Prompt user for more replaces -- Default off. :- dynamic(replace_more/1). replace_more(off). % Apply rule to discharge VCs with false hypothesis. % Default is off. :- dynamic(auto_infer_from_false/1). auto_infer_from_false(on). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/save.pro0000644000175000017500000001703211753202340015523 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= save_state :- write('Saving'), csvfile_name(OUTFILE), file_can_be_written(OUTFILE), tell(OUTFILE), fetch_date_and_time(DATE, TIME), write('/* SAVED: '), print(DATE), write(', at '), print(TIME), write(' */'), nl, !, do_the_saving(OUTFILE). save_state :- csvfile_name(OUTFILE), \+ file_can_be_written(OUTFILE), write('Warning: '), print(OUTFILE), write(' cannot be written.'), nl, !, close_all_streams, halt. do_the_saving(OUTFILE) :- to_be_saved(PREDICATE), tell(user), write('.'), save_a_copy(OUTFILE, PREDICATE). do_the_saving(OUTFILE) :- tell(OUTFILE), told, !. save_a_copy(OUT, PRED) :- tell(OUT), call(PRED), distinguish_vars_in(PRED, 1, _), safe_write(PRED, 80, _), write('.'), nl, fail. safe_write(A1, OC, NC) :- atom(A1), double_slash(A1, A), size(A, L), ( L=OC, nl, put_code(39), print(A), put_code(39), ( L=<80, NC is 80-L ; L>80, NC=0 ) ), !. safe_write(A, OC, NC) :- integer(A), size(A, L), ( L=OC, nl, printq(A), ( L=<80, NC is 80-L ; L>80, NC=0 ) ), !. safe_write(A, OC, NC) :- var(A), !, write('_'), NC is OC+1, !. safe_write([X|Y], OC, NC) :- write('['), safe_write_list([X|Y], OC, NC), write(']'), !. safe_write('$$$'(N), OC, NC) :- write('A'), print(N), !, NCX is OC-3, ( NCX > 0, NC = NCX ; nl, NC = 79 ), !. safe_write(A, OC, NC) :- \+(atomic(A)), nonvar(A), A =.. [F|ARGS], safe_write(F, OC, NC1), write('('), safe_write_list(ARGS, NC1, NC2), ( NC2>0, write(')'), NC is NC2-1 ; NC2=<0, nl, write(')'), NC=79 ), !. safe_write_list([A], OC, NC) :- safe_write(A, OC, NC), !. safe_write_list([A|AL], OC, NC) :- safe_write(A, OC, NC1), write(', '), NC2 is NC1-2, safe_write_list(AL, NC2, NC), !. distinguish_vars_in(VAR, N, N1) :- var(VAR), !, VAR = '$$$'(N), N1 is N+1, !. distinguish_vars_in(ATOMIC, N, N) :- atomic(ATOMIC), !. distinguish_vars_in(EXPRESSION, N, M) :- EXPRESSION =.. [_OP|ARGS], !, distinguish_vars_in_list(ARGS, N, M), !. distinguish_vars_in_list([ARG], N, M) :- distinguish_vars_in(ARG, N, M), !. distinguish_vars_in_list([EXPRESSION|ARGS], N, M) :- distinguish_vars_in(EXPRESSION, N, N1), !, distinguish_vars_in_list(ARGS, N1, M), !. distinguish_vars_in_list([], N, N) :- !. to_be_saved(auto_done(_)). to_be_saved(auto_newvc(_)). /* CFR1334 */ to_be_saved(banned_rule(_,_)). to_be_saved(case(_,_,_)). to_be_saved(case_pointer(_)). to_be_saved(command_logging(_)). /* CFR1334 */ to_be_saved(conc(_,_)). to_be_saved(csvfile_name(_)). to_be_saved(current_root(_,_)). to_be_saved(current_vc(_,_)). to_be_saved(current_vc_no(_)). to_be_saved(deleted(_)). to_be_saved(deleted_hyp(_,_)). to_be_saved(display_subgoals_max(_)). to_be_saved(display_var_free_only(_)). to_be_saved(echo(_)). to_be_saved(enumeration(_,_)). to_be_saved(fdl_file_title(_)). to_be_saved(fdlfile_name(_)). to_be_saved(forgotten(_)). to_be_saved(function(_,_,_)). to_be_saved(function_template(_,_,_)). to_be_saved(hyp(_,_)). to_be_saved(indentation(_)). to_be_saved(indentation_increment(_)). to_be_saved(inverse_video(_)). to_be_saved(is_vc(_)). to_be_saved(logfile_name(_)). to_be_saved(mk__function_name(_,_,_)). /*1.4*/ to_be_saved(newline_after_prompts(_)). /* CFR1334 */ to_be_saved(normal_video(_)). to_be_saved(on_case(_,_,_)). to_be_saved(on_filename(_)). to_be_saved(prooflog_width(_)). to_be_saved(proved_for_case(_,_)). to_be_saved(qvar(_)). to_be_saved(record_consults(_)). /* CFR1334 */ to_be_saved(record_function(_,_,_,_,_,_)). /* CFR029 */ to_be_saved(ruleused(_)). to_be_saved(ruleused_this_session(_)). to_be_saved(saved_vc(_,_)). to_be_saved(show_vc_changes(_)). /* CFR1334 */ to_be_saved(simplify_in_infer(_)). to_be_saved(simplify_during_load(_)). to_be_saved(spark_enabled). /* CFR034 */ to_be_saved(step_number(_)). to_be_saved(subgoal_formula(_,_,_,_)). to_be_saved(type(_,_)). to_be_saved(type_alias(_,_)). to_be_saved(typechecking(_)). to_be_saved(typechecking_during_load(_)). to_be_saved(use_subst_rules_for_equality(_)). to_be_saved(used_ident(_,_)). to_be_saved(user_rulefile(_,_)). to_be_saved(user_classification(_,_,_,_)). to_be_saved(var_const(_,_,_)). to_be_saved(vc(_,_)). to_be_saved(vcgfile_name(_)). to_be_saved(vcs_to_prove(_)). to_be_saved(vc_name(_)). /* The following predicates, double_slash and double_slash_list, are required * due to Poplog's Itemiser. In short, on reading any two characters from an * input stream where the first character is "\", the Poplog itemiser * automatically interprets these two characters as a control characted. For * example, "\t" gets interpreted as character 9 (IE, tab). This means that * we never get to see a single "\" -- any input files need to say "\\". * * So, when writing the rulebase, we need to make sure that any single "\" * gets doubled-up, so that when the Checker reads in the RLB, it sees correct * paths. */ double_slash(A,A1) :- atom(A), !, name(A, ALIST), double_slash_list(ALIST, ALIST2), name(A1, ALIST2), !. double_slash_list([], []). double_slash_list([92|ALIST], [92,92|ALIST2]) :- double_slash_list(ALIST, ALIST2). double_slash_list([X|ALIST], [X|ALIST2]) :- double_slash_list(ALIST, ALIST2). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/toplevel.pro0000644000175000017500000012137611753202340016426 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= start :- restore_temp_del_hyps, repeat, see_correct_input_stream, /* CFR015 */ nl, write_check_prompt, /* CFR015 */ retractall(command_arg(_,_)), read_user_command(COMMAND,ARGUMENTS), check_command_arguments(COMMAND,ARGUMENTS), execute_command(COMMAND), ( /* CFR005 */ COMMAND \= exit, /* CFR005 */ COMMAND \= forceexit, write_log, /* CFR005 */ maybe_do_auto_newvc /* CFR025 */ ; /* CFR005 */ COMMAND = exit /* CFR005 */ ; /* CFR005 */ COMMAND = forceexit ), /* CFR005 */ /* UNTIL */ verified_exit_command(COMMAND), /* CFR005 */ retract(step_number(N)), /* CFR005 */ M is N+1, /* CFR005 */ asserta(step_number(M)), /* CFR005 */ write_log, /* CFR005 */ set_exit_status, close_all_streams, halt. /* Check performed in getdcldat to ensure FILE exists and is readable */ see_correct_input_stream :- /* CFR015 */ perform_script_file(FILE), /* CFR015 */ see(FILE), /* CFR015 */ !. /* CFR015 */ see_correct_input_stream :- /* CFR015 */ see(user), /* CFR015 */ !. /* CFR015 */ write_check_prompt :- /* CFR015 */ seeing(user), /* CFR015 */ prompt_user('CHECK|:'), /* CFR015 */ !. /* CFR015 */ write_check_prompt :- /* CFR015 */ write('[EXECUTE]: '), /* CFR015 */ !. /* CFR015 */ maybe_do_auto_newvc :- /* CFR025 */ retract(time_for_new_vc), /* CFR025 */ auto_newvc(on), /* CFR025 */ \+ vcs_to_prove([]), /* CFR025 */ nl, /* CFR025 */ write('AUTO-NEWVC:'), /* CFR025 */ execute_command(newvc), /* CFR025 */ write_log, /* CFR025 */ !. /* CFR025 */ check_command_arguments(COMMAND,ARGUMENTS) :- ( parse_command_arguments(COMMAND,ARGUMENTS), ! ; show_permitted_arguments(COMMAND) ), !. read_user_command(COMMAND,ARGUMENTS) :- fetch_keyword(KEYWORD,NEXTCHAR), ( match_command(KEYWORD,COMMAND), !, ( NEXTCHAR=46, ARGUMENTS=[] ; rread(ARGUMENTS) ) ; warn_of_unknown_command, !, ( NEXTCHAR=46 ; eol_char(EOL), lskip(EOL) /* CFR015 */ ), !, fail ), !. fetch_keyword(KEYWORD,NEXTCHAR) :- fetch_chars(KEYWORD,[],NEXTCHAR), !. fetch_chars(KEYWORD,SO_FAR,NEXTCHAR) :- eol_char(EOL), lget0(CHAR), /* CFR015 */ ( CHAR=46, !, KEYWORD=SO_FAR, NEXTCHAR=46, lskip(EOL) /* CFR015 */ ; ( CHAR=9 ; CHAR=EOL ; CHAR=32 ), !, ( SO_FAR=[], !, fetch_chars(KEYWORD,SO_FAR,NEXTCHAR) ; KEYWORD=SO_FAR, ( CHAR=EOL, !, NEXTCHAR=46 ; NEXTCHAR=32 ) ) ; make_lower_case(CHAR,LOWER_CASE_CHAR), name(CHAR_ATOM,[LOWER_CASE_CHAR]), append(SO_FAR,[CHAR_ATOM],FURTHER), fetch_chars(KEYWORD,FURTHER,NEXTCHAR) ), !. lskip(CH) :- /* CFR015 */ repeat, /* CFR015 */ lget0(X), /* CFR015 */ /* until */ X = CH, /* CFR015 */ !. /* CFR015 */ match_command([r,e,p|LACE],replace) :- gen_append(LACE,_,[l,a,c,e]), !. match_command([l,i|ST],list) :- gen_append(ST,_,[s,t]), !. match_command([s,t,a,n|DARDISE],standardise) :- gen_append(DARDISE,_,[d,a,r,d,i,s,e]), !. match_command([d,o|NE],done) :- gen_append(NE,_,[n,e]), !. match_command([i,n,f|ER],infer) :- gen_append(ER,_,[e,r]), !. match_command([d,e,d|UCE],deduce) :- gen_append(UCE,_,[u,c,e]), !. match_command([s,i|MPLIFY],simplify) :- gen_append(MPLIFY,_,[m,p,l,i,f,y]), !. match_command([c,a,s,e],case) :- !. match_command([s,t,a,t|US],status) :- gen_append(US,_,[u,s]), !. match_command([f,o,r,w|ARDCHAIN],forwardchain) :- gen_append(ARDCHAIN,_,[a,r,d,c,h,a,i,n]), !. match_command([e,x,i,t],exit) :- !. match_command([f,o,r,c,e,e,x,i,t],forceexit) :- !. match_command([h|ELP],'help') :- gen_append(ELP,_,[e,l,p]), !. match_command([f,o,r,g|ET],forget) :- gen_append(ET,_,[e,t]), !. match_command([r,e,m|EMBER],remember) :- gen_append(EMBER,_,[e,m,b,e,r]), !. match_command([d,e,l|ETE],delete) :- gen_append(ETE,_,[e,t,e]), !. match_command([u,n,d|ELETE],undelete) :- gen_append(ELETE,_,[e,l,e,t,e]), !. match_command([n,e|WVC],newvc) :- gen_append(WVC,_,[w,v,c]), !. match_command([p,r,o|VE],prove) :- gen_append(VE,_,[v,e]), !. match_command([q,u,i,t],quit) :- !. match_command([c,o|NSULT],consult) :- gen_append(NSULT,_,[n,s,u,l,t]), !. match_command([u,n,w|RAP],unwrap) :- gen_append(RAP,_,[r,a,p]), !. match_command([i,n,s|TANTIATE],instantiate) :- gen_append(TANTIATE,_,[t,a,n,t,i,a,t,e]), !. match_command([s,e|T],set) :- gen_append(T,_,[t]), !. match_command([s,h|OW],show) :- gen_append(OW,_,[o,w]), !. match_command([d,e,c|LARE],declare) :- gen_append(LARE,_,[l,a,r,e]), !. match_command([s,a|VE],save_state) :- gen_append(VE,_,[v,e]), !. match_command([p,r,i|NTVC],printvc) :- gen_append(NTVC,_,[n,t,v,c]), !. match_command([t|RAVERSE],traverse) :- gen_append(RAVERSE,_,[r,a,v,e,r,s,e]), !. match_command([e,x,e|CUTE],execute) :- /* CFR017 */ gen_append(CUTE, _, [c,u,t,e]), !. /* CFR017 */ match_command([c,a,l,l,p,r,o],callpro) :- !. /*** PARSE_COMMAND_ARGUMENTS(CMND,ARGS) -- check & save o.k. ARGS for CMND ***/ parse_command_arguments(_,X) :- var(X), nl, write('!!! VAR argument not permitted: retype command.'), nl, !, fail. parse_command_arguments(_,[]) :- !. parse_command_arguments(list,HC#N) :- ( HC==h ; HC==c ), !, integer(N), assertz(command_arg(list,HC#N)), !. parse_command_arguments(list,HHCC-U) :- nonvar(HHCC), integer(U), HHCC=HC#L, ( HC==h ; HC==c ), !, integer(L), LN, assertz(command_arg(to_do,c#N-M)), /* CFR024 */ !. parse_command_arguments(case,N) :- !, integer(N), assertz(command_arg(case_number,N)), !. parse_command_arguments(prove,EXPRN_by_STRAT on FORM) :- !, nonvar(EXPRN_by_STRAT), EXPRN_by_STRAT=(EXPRN by STRAT), strategy_keyword(STRAT,cases), parse_expression(FORM,FORMULA), parse_goal_expression(EXPRN,EXPRESSION), /* CFR008 */ assertz(command_arg(on,FORMULA)), assertz(command_arg(strategy,cases)), assertz(command_arg(expression,EXPRESSION)), !. parse_command_arguments(prove,EXPRN by STRAT) :- !, strategy_keyword(STRAT,STRATEGY), parse_goal_expression(EXPRN,EXPRESSION), /* CFR008 */ assertz(command_arg(strategy,STRATEGY)), assertz(command_arg(expression,EXPRESSION)), !. parse_command_arguments(unwrap,HC) :- check_hyp_or_conc(HC), !, assertz(command_arg(expression,HC)), !. parse_command_arguments(instantiate,QVAR with EXPRN) :- atom(QVAR), qvar(QVAR), !, parse_expression(EXPRN,EXPRESSION), assertz(command_arg(value,EXPRESSION)), assertz(command_arg(var,QVAR)), !. parse_command_arguments(instantiate,QVAR) :- atom(QVAR), qvar(QVAR), !, assertz(command_arg(var,QVAR)), !. parse_command_arguments(instantiate,EXPRN) :- \+ qvar(EXPRN), !, parse_goal_expression(EXPRN,EXPRESSION), /* CFR008 */ assertz(command_arg(value,EXPRESSION)), !. parse_command_arguments(newvc,N) :- !, integer(N), assertz(command_arg(vc_number,N)), !. parse_command_arguments(consult,FILENAME) :- !, atom(FILENAME), assertz(command_arg(filename,FILENAME)), !. parse_command_arguments(execute,FILENAME) :- /* CFR017 */ !, /* CFR017 */ atom(FILENAME), /* CFR017 */ assertz(command_arg(filename,FILENAME)), /* CFR017 */ !. /* CFR017 */ parse_command_arguments('help',IDENTIFIER) :- assertz(command_arg(subject,IDENTIFIER)), !. parse_command_arguments(forget,X & Y) :- nonvar(X), nonvar(Y), !, parse_command_arguments(forget,X), parse_command_arguments(forget,Y), !. parse_command_arguments(forget,H#N) :- H==h, !, integer(N), assertz(command_arg(hyplist,[N])), !. parse_command_arguments(forget,HHN-M) :- nonvar(HHN), HHN=H#N, H==h, !, integer(N), integer(M), N=2, ( STRATEGY='implication' ; STRATEGY='induction' ; STRATEGY='contradiction' ; STRATEGY='cases' ), name(STRATEGY,SL), gen_append(LIST,_,SL), !. /*** WARN_OF_UNKNOWN_COMMAND -- what the heck's this? ***/ warn_of_unknown_command :- nl, write('!!! Command not recognised. Please retype.'), nl, !. /*** VERIFIED_EXIT_COMMAND(C) -- check if it's safe to exit from Checker ***/ verified_exit_command(exit) :- /* CFR005 */ vcs_to_prove([]), /* it's safe if there are no VCs left */ !. verified_exit_command(exit) :- recent_save_command_issued, !. verified_exit_command(exit) :- write('There are still VCs to prove and you have not "save"d recently.'), nl, read_answer('Do you still wish to exit', Answer), !, Answer=yes. verified_exit_command(forceexit). /* 'help' is predefined in sicstus - need to convert to 'checkerhelp' */ convertHelp(help, checkerhelp) :- !. convertHelp(C, C) :- !. /***** EXECUTE_COMMAND(C) - execute & save proof log fact *****/ execute_command(C) :- retractall(logfact(_,_)), asserta(logfact(command, C)), tidy_up_inference_database(C), convertHelp(C, CC), call_once(CC), !, ( /* CFR005 */ trivial_command(CC) /* CFR005 */ ; /* CFR005 */ CC = exit /* CFR005 */ ; /* CFR005 */ CC = forceexit ; /* CFR005 */ retractall(recent_save_command_issued) /* CFR005 */ ), /* CFR005 */ !, /* CFR005 */ tidy_up_logfacts, !. execute_command(_C) :- write('FAIL'), nl, retractall(logfact(_,_)), !, fail. %Make command calls visible to the spxref tool. :- public replace/0. :- public list/0. :- public standardise/0. :- public done/0. :- public infer/0. :- public deduce/0. :- public simplify/0. :- public case/0. :- public status/0. :- public forwardchain/0. :- public exit/0. :- public forceexit/0. % Note that command 'help' is executed as 'checkerhelp'. :- public checkerhelp/0. :- public forget/0. :- public remember/0. :- public delete/0. :- public undelete/0. :- public newvc/0. :- public prove/0. :- public quit/0. :- public consult/0. :- public unwrap/0. :- public instantiate/0. :- public set/0. :- public show/0. :- public declare/0. :- public save_state/0. :- public printvc/0. :- public traverse/0. :- public execute/0. :- public callpro/0. call_once(C) :- call(C), !. /***** TIDY_UP_LOGFACTS: increment step no (if necessary) & collect args ***/ tidy_up_logfacts :- logfact(command, C), trivial_command(C), retractall(logfact(_,_)), !, fail. tidy_up_logfacts :- logfact(command, exit), !. tidy_up_logfacts :- logfact(command, forceexit), !. tidy_up_logfacts :- write('OK'), nl, retract(step_number(N)), M is N+1, asserta(step_number(M)), !. tidy_up_inference_database(delete) :- retractall(could_infer(_)), !. tidy_up_inference_database(undelete) :- retractall(could_not_infer(_)), !. tidy_up_inference_database(consult) :- !. tidy_up_inference_database(save_state) :- /* CFR005 */ assertz(recent_save_command_issued), /* CFR005 */ !. /* CFR005 */ tidy_up_inference_database(C) :- trivial_command(C), !. tidy_up_inference_database(done) :- !. tidy_up_inference_database(_C) :- retractall(could_not_infer(_)), !. /*** SHOW_PERMITTED_ARGUMENTS(COMMAND) -- show allowed syntax ***/ show_permitted_arguments(COMMAND) :- nl, nl, write('General syntax of command:'), nl, gfa(COMMAND,FORM), print(FORM), nl, fail. show_permitted_arguments(COMMAND) :- nl, write('Examples of valid command syntax for "'), print(COMMAND), write('" are:'), nl, nl, tab(5), print(COMMAND), write('.'), nl, spa(COMMAND,TEXT), tab(5), print(COMMAND), put_code(32), print(TEXT), write('.'), nl, fail. spa(list,'h#5'). spa(list,'h#3-6'). spa(list,'c#2'). spa(list,'c#1-5'). spa(list,'h#1-3 & h#7 & c#2-3 & h#15-17'). /* spa(simplify,X) :- spa(list,X). -- NOT YET! */ spa(deduce,'c#1'). spa(deduce,'not x=y'). spa(deduce,'n>0 or n=0 from [2,3]'). spa(deduce,'X or not X where c#1=X from []'). spa(infer,'c#3'). spa(infer,'X+1>0 where h#2=(X>=0)'). spa(infer,'c#1 using inequals'). spa(infer,'is_even(a+b) using even(1)'). spa(infer,'RHS where c#2=(LHS -> RHS) using logic from [2,3,6]'). spa(replace,'h#6'). spa(replace,'c#4'). spa(replace,'h#2: X+Y'). spa(replace,'c#3: X+0 by Y'). spa(replace,'h#14: X+(Y+Z) by B where h#1=(A=B)'). spa(replace,'c#5: A+B by B+A using commut'). spa(replace,'all: n+0 by n using arith'). spa(standardise,'h#6'). spa(standardise,'c#1'). spa(standardise,'i-1+1'). spa(standardise,'X=Y+1 -> Y=X-1 where h#2=(X=A) & h#3=(A=Y+1)'). spa(forwardchain,'h#3'). spa(forwardchain,'c#3'). spa(done,'c#1'). spa(done,'c#1-3'). spa(done,'c#2-3 & c#8-11 & c#5'). spa(case,'2'). spa(prove,'c#1 by implication'). spa(prove,'X or Y where c#1=X & c#2=Y by contradiction'). spa(prove,'c#5 by induction'). spa(prove,'c#3 by cases'). spa(prove,'p(x+y) by cases on h#7'). spa(prove,'x*x>=0 by cases on x<0 or x=0 or x>0'). spa(unwrap,'h#5'). spa(unwrap,'c#2'). spa(instantiate,'h#8'). spa(instantiate,'c#11'). spa(instantiate,'h#1 with 2*n-1'). spa(instantiate,'c#7 with int_p_2 for int_P_1'). spa(newvc,'3'). spa(consult,'\'sort.rls\''). spa(consult,'\'DISK$1:[-.RULES]INOUT.RLS\''). spa(execute,'\'prove1to5.cmd\''). spa(execute,'\'DISK$1:[-.PROOFS]REPLAY1.CMD\''). spa('help','infer'). spa('help','instantiate'). spa(forget,'h#3'). spa(forget,'h#1-3 & h#5-6 & h#10'). spa(forget,'[1,2,3,5,6,10]'). spa(remember,'h#3'). spa(remember,'h#1-3 & h#5-6 & h#10'). spa(remember,'[1,2,3,5,6,10]'). spa(delete,'h#3'). spa(delete,'h#1-3 & h#5-6 & h#10'). spa(delete,'[1,2,3,5,6,10]'). spa(undelete,'h#3'). spa(undelete,'h#1-3 & h#5-6 & h#10'). spa(undelete,'[1,2,3,5,6,10]'). spa(set,'simplify_in_infer'). spa(set,'auto_done to on'). spa(set,'display_subgoals_max to 2'). spa(traverse,'h#6'). spa(traverse,'c#2'). spa(traverse,'X where c#5=(X or _)'). spa(callpro,'statistics'). gfa(list,'li(st) [ HYP_OR_CONC_RANGE { & HYP_OR_CONC_RANGE } ].'). gfa(list,''). gfa(list,'where: HYP_OR_CONC_RANGE is HC#N-M or HC#N,'). gfa(list,' HC is "h" or "c", and N & M are integers (with N>>'), /* CFR017 */ nl, /* CFR017 */ retractall(command_logging(_)), assertz(command_logging(off)), !. /* CFR017 */ %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/induction.pro0000644000175000017500000000436211753202340016563 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= induction(F,VAR,BASE) :- ( F=c#N, integer(N), conc(N,FORMULA) ; checktype(F,boolean), FORMULA=F ), subst_vbl(VAR,BASE,FORMULA,BASECASE), ( atom(VAR), make_new_var(uvar,VAR,integer,INDVAR) ; make_new_var(uvar,ind,integer,INDVAR) ), make_new_var(uvar,z,integer,QUANTVAR), !, subst_vbl(VAR,QUANTVAR,FORMULA,ASSUME), subst_vbl(VAR,INDVAR+1,FORMULA,PROVE), write('ENTERING PROOF BY INDUCTION ATTEMPT'), INDCASE=(INDVAR>=BASE -> (for_all(QUANTVAR:integer, (BASE<=QUANTVAR and QUANTVAR<=INDVAR) -> ASSUME) -> PROVE)), nl, assertz(logfact(induction, [FORMULA, VAR, BASE])), start_subgoal(FORMULA,[BASECASE,INDCASE],true,'INDUCTION'), !. induction :- prompt_user('INDUCTION -- on what? '), rread(INDVAR), prompt_user('INDUCTION -- Base case? '), rread(BASECASE), novars(INDVAR>=BASECASE), infer(INDVAR>=BASECASE), command_arg(expression, FORMULA), induction(FORMULA, INDVAR, BASECASE). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/repall.pro0000644000175000017500000001015611753202340016044 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** do_replace_all(OLD, NEW) -- replace OLD by New in all hyps & concs ***/ do_replace_all(OLD, NEW) :- max_hyp_no(MAX), replace_all_hyps(OLD, NEW, 1, MAX), fail. do_replace_all(OLD, NEW) :- max_conc_no(MAX), replace_all_concs(OLD, NEW, 1, MAX), fail. do_replace_all(_, _) :- !. replace_all_hyps(OLD, NEW, MAX, MAX) :- !, replace_in_hyp(MAX, OLD, NEW), !. replace_all_hyps(_OLD, _NEW, MIN, MAX) :- MIN > MAX, !. /* shouldn't get here... */ replace_all_hyps(OLD, NEW, MIN, MAX) :- replace_in_hyp(MIN, OLD, NEW), !, NEWMIN is MIN + 1, replace_all_hyps(OLD, NEW, NEWMIN, MAX), !. replace_in_hyp(HYP, _OLD, _NEW) :- /* CFR019 */ \+ command_arg(expression, all), /* CFR019 */ \+ hyp_to_replace(HYP), /* CFR019 */ !. /* CFR019 */ replace_in_hyp(HYP, OLD, NEW) :- ( hyp(HYP, OLDEXPR), !, handle_quantifiers_in(OLDEXPR, OLDTEMPEXPR), /* CFR027 */ subst_vbl(OLD, NEW, OLDTEMPEXPR, NEWTEMPEXPR), /* CFR027 */ unhandle_quantifiers_in(NEWTEMPEXPR, NEWEXPR), /* CFR027 */ !, ( OLDEXPR = NEWEXPR ; assertz(hyp(HYP, NEWEXPR)), assertz(logfact(newhyp, hyp(HYP, NEWEXPR))), new_hyp_message(HYP, NEWEXPR), /* CFR018 */ retract(hyp(HYP, OLDEXPR)) ) ; true /* for case when no hyp: e.g. simplify cmnd.! */ ), !. replace_all_concs(OLD, NEW, MAX, MAX) :- !, replace_in_conc(MAX, OLD, NEW), !. replace_all_concs(_OLD, _NEW, MIN, MAX) :- MIN > MAX, !. /* shouldn't get here... */ replace_all_concs(OLD, NEW, MIN, MAX) :- replace_in_conc(MIN, OLD, NEW), !, NEWMIN is MIN + 1, replace_all_concs(OLD, NEW, NEWMIN, MAX), !. replace_in_conc(CONC, _OLD, _NEW) :- /* CFR019 */ \+ command_arg(expression, all), /* CFR019 */ \+ conc_to_replace(CONC), /* CFR019 */ !. /* CFR019 */ replace_in_conc(CONC, OLD, NEW) :- conc(CONC, OLDEXPR), !, handle_quantifiers_in(OLDEXPR, OLDTEMPEXPR), /* CFR027 */ subst_vbl(OLD, NEW, OLDTEMPEXPR, NEWTEMPEXPR), /* CFR027 */ unhandle_quantifiers_in(NEWTEMPEXPR, NEWEXPR), /* CFR027 */ !, ( OLDEXPR = NEWEXPR ; assertz(conc(CONC, NEWEXPR)), assertz(logfact(newconc, conc(CONC, NEWEXPR))), new_conc_message(CONC, NEWEXPR), /* CFR018 */ retract(conc(CONC, OLDEXPR)) ), !. replace_in_conc(_CONC, _OLD, _NEW) :- !. /* when no conclusion left */ %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/subgoal.pro0000644000175000017500000001104611753202340016220 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** QUIT -- quit current frame (if any) ***/ quit :- clear_up_could_facts, ( case_pointer(0), !, write('NO FRAME LEFT TO QUIT') ; case_pointer(CP), CP>0, assertz(logfact(quitframe, CP)), ( subgoal_formula(CP,_,_,METHOD), !, write('QUITTING CURRENT PROOF BY '), print(METHOD), write(' ATTEMPT'), nl, /* CFR007 */ abort_subgoal ; on_case(CP,_,_), !, write('QUITTING CURRENT PROOF BY CASES ATTEMPT'), nl, /* CFR007 */ abort_case ) ), !. /*** ABORT_SUBGOAL -- abort current attempt to prove by subgoaling ***/ abort_subgoal :- case_pointer(CP), subgoal_formula(CP,_,_,_), retractall(subgoal_formula(CP,_,_,_)), retractall(on_case(CP,_,_)), retractall(case(CP,_,_)), retractall(proved_for_case(CP,_)), C is CP-1, retractall(case_pointer(_)), asserta(case_pointer(C)), case_restore(CP), !. /*** START_SUBGOAL(FORMULA,CONDS,HYPS,METHOD) -- set up for subgoaling ***/ start_subgoal(FORMULA,CONDITIONS,HYPOTHESES,METHOD) :- ( /* CFR009 */ checktype(FORMULA, boolean), /* CFR009 */ checkhastypelist(CONDITIONS, boolean), /* CFR009 */ checktype(HYPOTHESES, boolean) /* CFR009 */ ; /* CFR009 */ write('!!! Error: type-check failed on subgoal proof-frame entry attempt'), nl, /* CFR009 */ !, /* CFR009 */ fail /* CFR009 */ ), /* CFR009 */ !, /* CFR009 */ clear_up_could_facts, case_pointer(CP), retractall(case_pointer(_)), C is CP+1, assertz(case_pointer(C)), retractall(proved_for_case(C,_)), retractall(case(C,_,_)), retractall(on_case(C,_,_)), assertz(logfact(method, METHOD)), assertz(logfact(enterframe, C)), case_save(C), ( conc(NUM,FORMULA) ; NUM=[] ), retractall(conc(_,_)), assertz(logfact(text, 'All current conclusions withdrawn')), assertz(subgoal_formula(C,FORMULA,NUM,METHOD)), create_formula(CONDITIONS,HYPOTHESES,CONCS -> HYPS), format_formula(logmessage, HYPS -> CONCS), /* CFR054 */ !. %Make command calls visible to the spxref tool. :- public contradict/1. :- public implication/1. :- public induction/0. :- public cases/0. /*** PROVE -- prove FORMULA by STRATEGY [ on DISJUNCTION ] ***/ prove :- clear_up_could_facts, command_arg(expression, EXPRN), command_arg(strategy, STRATEGY), ( STRATEGY='contradiction', CALL=contradict(EXPRN) ; STRATEGY='implication', CALL=implication(EXPRN) ; STRATEGY='induction', CALL=induction ; STRATEGY='cases', CALL=cases ), !, call(CALL), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/typecheck5.pro0000644000175000017500000007604411753202340016641 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= checktype(E, T) :- nonvar(T), !, find_core_type(T, CT), !, ( checkhastype(E, CT) ; CT = real, checkhastype(E, integer) ), !. checktype(E, T) :- checkhastype(E, TYPE), !, ( T = TYPE ; TYPE = integer, !, T = real ; /* CFR036 */ E =.. [mk__record|_], /* CFR036 */ !, /* CFR036 */ compatible_record_type(TYPE, T), /* CFR036 */ checkhastype(E, T) /* CFR036 */ ; /* CFR036 */ E =.. [mk__array|_], /* CFR036 */ !, /* CFR036 */ compatible_array_type(TYPE, T), /* CFR036 */ checkhastype(E, T) /* CFR036 */ ). checktypes([E|EL], [T|TL]) :- checktype(E, T), checktypes(EL, TL). checktypes([], []). /* BOOL1 */ checkhastype(true, boolean) :- !. /* BOOL2 */ checkhastype(false, boolean) :- !. /* BOOL3 */ checkhastype(for_all(V:AT, FORMULA), boolean) :- !, atom(V), find_core_type(AT, T), ( type(T, _) ; T = integer ; T = real ; T = boolean ), !, ( var_const(V, T, v), !, checkhastype(FORMULA, boolean) ; asserta(var_const(V, T, v)), checkhastype(FORMULA, boolean), retract(var_const(V, T, v)), ! ; retract(var_const(V, T, v)), fail ), !. /* BOOL4 */ checkhastype(for_some(V:AT, FORMULA), boolean) :- !, atom(V), find_core_type(AT, T), ( type(T, _) ; T = integer ; T = real ; T = boolean ), !, ( var_const(V, T, v), !, checkhastype(FORMULA, boolean) ; asserta(var_const(V, T, v)), checkhastype(FORMULA, boolean), retract(var_const(V, T, v)), ! ; retract(var_const(V, T, v)), fail ), !. /* I/R1 */ checkhastype(X+Y, IR) :- checkhastype(X, IRX), !, checkhastype(Y, IRY), !, ( ( IRX=real ; IRY=real ), !, IR=real ; IRX=integer, IRY=integer, (IR=integer ; IR=real) ), !. /* I/R2 */ checkhastype(X-Y, IR) :- checkhastype(X, IRX), !, checkhastype(Y, IRY), !, ( ( IRX=real ; IRY=real ), !, IR=real ; IRX=integer, IRY=integer, (IR=integer ; IR=real) ), !. /* I/R3 */ checkhastype(X*Y, IR) :- checkhastype(X, IRX), !, checkhastype(Y, IRY), !, ( ( IRX=real ; IRY=real ), !, IR=real ; IRX=integer, IRY=integer, (IR=integer ; IR=real) ), !. /* I/R4 */ checkhastype(-X, IR) :- checkhastype(X, IRX), !, ( IRX=real, IR=real ; IRX=integer, (IR=integer ; IR=real) ), !. /* INT1 */ checkhastype(X div Y, integer) :- checkhastype(X, integer), !, checkhastype(Y, integer), !. /* INT2 */ checkhastype(X mod Y, integer) :- checkhastype(X, integer), !, checkhastype(Y, integer), !. /* I/R5 */ checkhastype(abs(X), IR) :- checkhastype(X, IRX), !, ( IRX=real, IR=real ; IRX=integer, (IR=integer ; IR=real) ), !. /* I/R6 */ checkhastype(sqr(X), IR) :- checkhastype(X, IRX), !, ( IRX=real, IR=real ; IRX=integer, (IR=integer ; IR=real) ), !. /* I/R7 */ checkhastype(X ** Y, IR) :- /* CFR038 */ checkhastype(Y, integer), /* CFR038 */ checkhastype(X, IR), /* CFR038 */ ( IR = integer ; IR = real ), /* CFR038 */ !. /* CFR038 */ /* I/R8 */ checkhastype(+X, IR) :- /* CFR039 */ checkhastype(X, IR), /* CFR039 */ !, /* CFR039 */ ( IR=real ; IR=integer ), /* CFR039 */ !. /* CFR039 */ /* REA1 */ checkhastype(X/Y, real) :- checkhastype(X, real), !, checkhastype(Y, real), !. /* REL1 */ checkhastype(X=Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; checkhastype(Y, TX) ), !. /* REL2 */ checkhastype(X<>Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; checkhastype(Y, TX) ), !. /* REL3 */ checkhastype(X>Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; type(TX, enumerated), checkhastype(Y, TX) ), !. /* REL4 */ checkhastype(X=Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; type(TX, enumerated), checkhastype(Y, TX) ), !. /* REL6 */ checkhastype(X<=Y, boolean) :- checkhastype(X, TX), ( ( TX=integer ; TX=real ), !, checkhastype(Y, TY), ( TY=integer ; TY=real ) ; type(TX, enumerated), checkhastype(Y, TX) ), !. /* ARR1 */ checkhastype(update(A, IL, E), T) :- checkhastype(A, T), ( /*1.4*/ type(T, array(ITL, ET)) /*1.4*/ ; /*1.4*/ find_core_type(T, TN), /*1.4*/ type(TN, array(ITL, ET)) /*1.4*/ ), /*1.4*/ checkhastypelist(IL, ITL), checkhastype(E, ET), !. /* ARR2 */ checkhastype(element(A, IL), ET) :- checkhastype(A, T), ( /*1.4*/ type(T, array(ITL, ET)) /*1.4*/ ; /*1.4*/ find_core_type(T, TN), /*1.4*/ type(TN, array(ITL, ET)) /*1.4*/ ), /*1.4*/ checkhastypelist(IL, ITL), !. /* BOOL5 */ checkhastype((not X), boolean) :- checkhastype(X, boolean), !. /* BOOL6 */ checkhastype(X and Y, boolean) :- checkhastype(X, boolean), !, checkhastype(Y, boolean), !. /* BOOL7 */ checkhastype(X or Y, boolean) :- checkhastype(X, boolean), !, checkhastype(Y, boolean), !. /* BOOL8 */ checkhastype(X -> Y, boolean) :- checkhastype(X, boolean), !, checkhastype(Y, boolean), !. /* BOOL9 */ checkhastype(X <-> Y, boolean) :- checkhastype(X, boolean), !, checkhastype(Y, boolean), !. /* INT3 */ checkhastype(A, integer) :- integer(A), !. /* TYP1 */ checkhastype(A, T) :- atomic(A), var_const(A, T, _), !. /* ODD */ checkhastype(odd(X), boolean) :- checkhastype(X, integer), !. /* ORD1 */ checkhastype(pred(X), T) :- checkhastype(X, T), !, (type(T,enumerated) ; T=integer), !. /* ORD2 */ checkhastype(succ(X), T) :- checkhastype(X, T), !, (type(T,enumerated) ; T=integer), !. /* SEQ1 */ checkhastype(length(S), integer) :- checkhastype(S, ST), !, type(ST, sequence(_)), !. /* SEQ2 */ checkhastype(first(S), ET) :- checkhastype(S, ST), type(ST, sequence(ET)). /* SEQ3 */ checkhastype(last(S), ET) :- checkhastype(S, ST), type(ST, sequence(ET)). /* SEQ4 */ checkhastype(nonfirst(S), ST) :- checkhastype(S, ST), type(ST, sequence(_)). /* SEQ5 */ checkhastype(nonlast(S), ST) :- checkhastype(S, ST), type(ST, sequence(_)). /* SEQ6 */ checkhastype(X @ Y, ST) :- checkhastype(X, ST), type(ST, sequence(_)), checkhastype(Y, ST). /* SET1 */ checkhastype(X \/ Y, ST) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST). /* SET2 */ checkhastype(X \ Y, ST) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST). /* SET3 */ checkhastype(X /\ Y, ST) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST). /* SET4 */ checkhastype(X in Y, boolean) :- checkhastype(Y, ST), type(ST, set(ET)), checkhastype(X, ET), !. /* SET5 */ checkhastype(X not_in Y, boolean) :- checkhastype(Y, ST), type(ST, set(ET)), checkhastype(X, ET), !. /* SET6 */ checkhastype(X subset_of Y, boolean) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST), !. /* SET7 */ checkhastype(X strict_subset_of Y, boolean) :- checkhastype(X, ST), type(ST, set(_)), checkhastype(Y, ST), !. /* SEQ7 */ checkhastype([], T) :- type(T, sequence(_)). /* SEQ8 */ checkhastype([E|EL], T) :- checkhastype(E, ET), type(T, sequence(ET)), checkhastype(EL, T). /* SET8 */ checkhastype(set [], T) :- type(T, set(_)). /* SET9 */ checkhastype(set [E|EL], T) :- checkhastype(E, ET), type(T, set(ET)), checkhastype(set EL, T). /* ARR3 */ checkhastype(mk__array(VALUE), T) :- /* CFR036 */ ( type(T, array([IT], VT)) /* 036,1.4*/ ; /*1.4*/ find_core_type(T, TN), /*1.4*/ type(TN, array([IT], VT)) /*1.4*/ ), /*1.4*/ VALUE \= (_ := _), /* CFR036 */ checktype(VALUE, VT). /* CFR036 */ /* ARR4 */ checkhastype(AGG, T) :- /* CFR036 */ \+ atomic(AGG), /* CFR036 */ AGG =.. [F|ARGS], /* CFR036 */ spark_enabled, /* CFR036 */ ( /* CFR036 */ F = mk__array, /* CFR036 */ !, /* CFR036 */ collect_indices_and_values( /* CFR036 */ ARGS, [I|INDS], [V|VALS]), /* CFR036 */ !, /* CFR036 */ checktype(I, IT), /* CFR036 */ checkrestoflist(INDS, IT), /* CFR036 */ checktype(V, VT), /* CFR036 */ checkrestoflist(VALS, VT), /* CFR036 */ type(T, array([IT], VT)) /* CFR036 */ ; /* CFR036 */ F = mk__record, /* CFR036 */ !, /* CFR036 */ collect_fields_and_values( /* CFR036 */ ARGS, FIELDS, VALUES), /* CFR036 */ type(T, record(RECFIELDS)), /* CFR036 */ permutation_of_fields( /* CFR036 */ FIELDS, RECFIELDS, TYPES), /* CFR036 */ checktypelist(VALUES, TYPES) /* CFR036 */ ). /* CFR036 */ /* ARR5 */ checkhastype(E, T) :- \+ atomic(E), E =.. [F,Arg], mk__function_name(F, TN, array), Arg \= (_ := _), /*1.4*/ !, ( /*1.4*/ type(TN, array([IT], VT)), /*1.4*/ T = TN /*1.4*/ ; /*1.4*/ find_core_type(TN, T), /*1.4*/ type(T, array([IT], VT)) /*1.4*/ ), /*1.4*/ checktype(Arg, VT). /* ARR6 */ checkhastype(AGG, T) :- \+ atomic(AGG), AGG =.. [F|ARGS], spark_enabled, ( mk__function_name(F , TN, array), !, collect_indices_and_values( ARGS, [I|INDS], [V|VALS]), !, checktype(I, IT), checkrestoflist(INDS, IT), checktype(V, VT), checkrestoflist(VALS, VT), ( /*1.4*/ type(TN, array([IT], VT)), /*1.4*/ T = TN /*1.4*/ ; /*1.4*/ find_core_type(TN, T), /*1.4*/ type(T, array([IT], VT)) /*1.4*/ ) /*1.4*/ ; mk__function_name(F, T, record), !, collect_fields_and_values( ARGS, FIELDS, VALUES), type(T, record(RECFIELDS)), permutation_of_fields( FIELDS, RECFIELDS, TYPES), checktypelist(VALUES, TYPES) ). /* FUNC */ checkhastype(A, T) :- \+ atomic(A), function_template(A, AL, F), checktypelist(AL, ATL), ( function(F, ATLX, T) ; function(F, ATLX, XT), compatible_type_lists([XT], [T]) ), compatible_type_lists(ATL, ATLX), !. /* REC1 */ checkhastype(A, T) :- /* CFR029 */ \+ atomic(A), /* CFR029 */ record_function(_N, A, access, _FIELD, [REC], _), /* CFR029,053 */ checkhastype(REC, RTYPE), /* CFR029 */ A =.. [F|_], /* CFR029 */ ( /* CFR029 */ function(F, [RTYPE], T) /* CFR029 */ ; /* CFR029 */ function(F, [RTYPE], XT), /* CFR029 */ compatible_type_lists([XT], [T]) /* CFR029 */ ), /* CFR029 */ !. /* CFR029 */ /* REC2 */ checkhastype(A, RTYPE) :- /* CFR029 */ \+ atomic(A), /* CFR029 */ record_function(_N, A, update, _FIELD, [R,V], _), /* CFR029,053 */ checkhastype(R, RTYPE), /* CFR029 */ A =.. [F|_], /* CFR029 */ function(F, [RTYPE,VTYPE], RTYPE), /* CFR029 */ checkhastype(V, VTYPE), /* CFR029 */ !. /* CFR029 */ /* UNIV */ checkhastype(A, T) :- \+ atomic(A), \+ function_template(A, _, _), \+ record_function(_, A, _, _, _, _), /* CFR029,053 */ A=..[F|ARGS], check_is_an_ok_arity_function(F, ARGS), checktypelist(ARGS, ATL), ( function(F, ATLX, T) ; function(F, ATLX, XT), compatible_type_lists([XT], [T]) ), compatible_type_lists(ATL, ATLX), !. /* BITWISE1 */ checkhastype(bit__and(X,Y), Z) :- spark_enabled, checkhastype(X, Z), checkhastype(Y, Z), !. /* BITWISE2 */ checkhastype(bit__or(X,Y), integer) :- spark_enabled, checkhastype(X, integer), checkhastype(Y, integer), !. /* BITWISE3 */ checkhastype(bit__xor(X,Y), integer) :- spark_enabled, checkhastype(X, integer), checkhastype(Y, integer), !. /* REA2 */ checkhastype(X, real) :- checkhastype(X, integer), !. /* CTL1 */ checktypelist([E], [T]) :- !, checkhastype(E, T), !. /* CTL2 */ checktypelist([E|EL], [T|TL]) :- checkhastype(E, T), checktypelist(EL, TL), !. /* CHL1 */ checkhastypelist([E], [T]) :- !, checkhastype(E, T), !. /* CHL2 */ checkhastypelist([E|EL], [T|TL]) :- checkhastype(E, T), !, checkhastypelist(EL, TL), !. /* CHL3 */ checkhastypelist([E], T) :- !, checkhastype(E, T), !. /* CHL4 */ checkhastypelist([E|EL], T) :- checkhastype(E, T), !, checkhastypelist(EL, T), !. compatible_type_lists([A], [A]) :- !. compatible_type_lists([integer], [real]) :- !. compatible_type_lists([A|AL], [A|RL]) :- compatible_type_lists(AL, RL), !. compatible_type_lists([integer|AL],[real|RL]) :- compatible_type_lists(AL,RL),!. compatible_type_lists([T1|AL],[T2|RL]) :- compatible_set_or_seq_types(T1, T2), !, compatible_type_lists(AL, RL), !. compatible_type_lists([], []) :- !. compatible_set_or_seq_types(T1, T2) :- type(T1, set(ET1)), type(T2, set(ET2)), !, compatible_type_lists([ET1],[ET2]), !. compatible_set_or_seq_types(T1, T2) :- type(T1, sequence(ET1)), type(T2, sequence(ET2)), !, compatible_type_lists([ET1],[ET2]), !. check_is_an_ok_arity_function(F, ARGS) :- function(F, ARGL, _), length(ARGL, LEN), length(ARGS, LEN), !. save_type_classification(E) :- type_classification(E, _X), !. save_type_classification(E) :- checktype(E, T), ( T = integer, !, maybe_add(type_classification(E, i)) ; T = real, !, maybe_add(type_classification(E, r)) ; type(T, enumerated), !, maybe_add(type_classification(E, e)) ; true ), !. save_type_classification(_E) :- !. save_type_classification_list([E|L]) :- save_type_classification(E), save_type_classification_list(L), !. save_type_classification_list([]) :- !. has_type_classification(E, C) :- type_classification(E, C), !. has_type_classification(E, ir) :- ( type_classification(E, i) ; type_classification(E, r) ), !. has_type_classification(E, ire) :- ( type_classification(E, i) ; type_classification(E, r) ; type_classification(E, e) ), !. has_type_classification(_E, any) :- !. has_type_classification_list([]) :- !. has_type_classification_list(X) :- ( type_classification_done, !, check_has_type_classifications_list(X) ; var_free(X), ensure_has_type_classifications_list(X) ), !. check_has_type_classifications_list([F:T | REST]) :- has_type_classification(F, T), check_has_type_classifications_list(REST), !. check_has_type_classifications_list([]) :- !. ensure_has_type_classifications_list([F:T | REST]) :- save_type_classification(F), has_type_classification(F, T), ensure_has_type_classifications_list(REST), !. ensure_has_type_classifications_list([]) :- !. compatible_record_type(T1, T2) :- /* CFR036 */ type(T1, record(F1)), /* CFR036 */ type(T2, record(F2)), /* CFR036 */ T1 \= T2, /* CFR036 */ same_record_field_names(F1, F2). /* CFR036 */ same_record_field_names(Fs, Gs) :- /* CFR036 */ collect_record_field_names(Fs, Fn), /* CFR036 */ collect_record_field_names(Gs, Gn), /* CFR036 */ sort(Fn, S), /* CFR036 */ sort(Gn, S), /* CFR036 */ !. /* CFR036 */ collect_record_field_names([[F,_]|FTs], [F|Fs]) :- /* CFR036 */ !, /* CFR036 */ collect_record_field_names(FTs, Fs). /* CFR036 */ collect_record_field_names([], []) :- !. /* CFR036 */ compatible_array_type(T1, T2) :- /* CFR036 */ type(T1, array(I1, R1)), /* CFR036 */ type(T2, array(I2, R2)), /* CFR036 */ T1 \= T2, /* CFR036 */ compatible_array_result_types(R1, R2), /* CFR036 */ compatible_array_indices(I1, I2). /* CFR036 */ compatible_array_result_types(T, T) :- !. /* CFR036 */ compatible_array_result_types(integer, real) :- !. /* CFR036 */ compatible_array_result_types(real, integer) :- !. /* CFR036 */ compatible_array_result_types(T1, T2) :- /* CFR036 */ compatible_record_type(T1, T2), /* CFR036 */ !. /* CFR036 */ compatible_array_indices([I|Is], [I|Js]) :- /* CFR036 */ !, /* CFR036 */ compatible_array_indices(Is, Js). /* CFR036 */ compatible_array_indices([], []) :- !. /* CFR036 */ collect_indices_and_values([A|ARGS], INDS, [V|VALS]) :- /* CFR036 */ ( /* CFR036 */ A = (LHS := V), /* CFR036 */ collect_indices(LHS, I), /* CFR036 */ collect_indices_and_values(ARGS, RESTINDS, VALS), /* CFR036 */ !, /* CFR036 */ append(I, RESTINDS, INDS) /* CFR036 */ ; /* CFR036 */ V = A, /* CFR036 */ !, /* CFR036 */ collect_indices_and_values(ARGS, INDS, VALS) /* CFR036 */ ), /* CFR036 */ !. /* CFR036 */ collect_indices_and_values([], [], []) :- !. /* CFR036 */ collect_indices(X & Y, I) :- /* CFR036 */ collect_indices(X, XL), /* CFR036 */ collect_indices(Y, YL), /* CFR036 */ !, /* CFR036 */ append(XL, YL, I), /* CFR036 */ !. /* CFR036 */ collect_indices([X .. Y], [X,Y]) :- !. /* CFR036 */ collect_indices([X], [X]) :- !. /* CFR036 */ collect_fields_and_values([(F := V)|ARGS], [F|Fs], [V|Vs]) :- /* CFR036 */ !, /* CFR036 */ collect_fields_and_values(ARGS, Fs, Vs), /* CFR036 */ !. /* CFR036 */ collect_fields_and_values([], [], []) :- !. /* CFR036 */ permutation_of_fields([F|FIELDS], FTL, [T|TYPES]) :- /* CFR036 */ gen_append(LHS, [[F,T]|RHS], FTL), /* CFR036 */ !, /* CFR036 */ append(LHS, RHS, NEWFTL), /* CFR036 */ !, /* CFR036 */ permutation_of_fields(FIELDS, NEWFTL, TYPES), /* CFR036 */ !. /* CFR036 */ permutation_of_fields([], [], []) :- !. /* CFR036 */ checkrestoflist([X|XL], T) :- /* CFR036 */ checktype(X, T), /* CFR036 */ checkrestoflist(XL, T). /* CFR036 */ checkrestoflist([], _). /* CFR036 */ %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/initialise.pro0000644000175000017500000001122411753202340016714 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= read_initialisations :- spade_checker_prefix(SPADE_CHECKER), /* CFR048 */ append(SPADE_CHECKER, "checker.ini", INILIST), /* CFR048 */ name(INIFILE, INILIST), /* CFR048 */ read_init_file(INIFILE). /* CFR048 */ read_initialisations :- read_init_file('checker.ini'). /* CFR048 */ read_initialisations. read_init_file(FILE) :- file_exists_and_is_readable(FILE), !, repeat, see(FILE), read_term_and_layout(DECLARATION), process_initialisation(DECLARATION), /* UNTIL */ DECLARATION == end_of_file, seen, !, fail. /* Don't fail as it looks for two files - one in rules dir, one in current dir */ /* It will use both if it finds both */ read_init_file(FILE) :- \+ file_exists_and_is_readable(FILE), fail. process_initialisation(DECLARATION) :- var(DECLARATION), !, write('!!! PROLOG VAR IN INITIALISATION FILE'), nl, !. process_initialisation(end_of_file) :- !. process_initialisation(DECLARATION) :- \+ novars(DECLARATION), !, write('!!! PROLOG VAR IN INITIALISATION FILE'), nl, !. process_initialisation(set memory_limit to VALUE) :- integer(VALUE), !, ( VALUE >= 250000, set_memory_limit(VALUE) ; write('WARNING: Memory limit value too small'), !, fail ), !. process_initialisation(set FLAG to VALUE) :- is_a_flag(FLAG, VALUETYPE), ok_value(VALUE, VALUETYPE), OLD =.. [FLAG, _], NEW =.. [FLAG, VALUE], !, retractall(OLD), asserta(NEW), !. process_initialisation(set FLAG to VALUE) :- is_a_flag(FLAG, _VALUETYPE), !, write('!!! INITIALISATION: '), print(VALUE), write(' is not of appropriate type for '), print(FLAG), nl, !. process_initialisation(set FLAG to _VALUE) :- !, write('!!! INITIALISATION: '), print(FLAG), write(' is not a user-configurable flag.'), nl, !. process_initialisation(consult FILE) :- atom(FILE), !, maybe_add(ini_file_consult(FILE)), /* CFR021 */ !. is_a_flag(display_subgoals_max, integer/0/99). is_a_flag(display_var_free_only, boolean). is_a_flag(echo, boolean). is_a_flag(auto_done, boolean). is_a_flag(simplify_in_infer, boolean). is_a_flag(simplify_during_load, boolean). is_a_flag(typechecking, boolean). is_a_flag(typechecking_during_load, boolean). is_a_flag(prooflog_width, integer/80/255). is_a_flag(record_consults, boolean). is_a_flag(inverse_video, int_list/integer). is_a_flag(normal_video, int_list/integer). is_a_flag(use_subst_rules_for_equality, boolean). is_a_flag(command_logging, boolean). /* CFR032 */ is_a_flag(show_vc_changes, boolean). /* CFR032 */ is_a_flag(auto_newvc, boolean). /* CFR032 */ is_a_flag(newline_after_prompts, boolean). /* CFR1334 */ is_a_flag(indentation_increment, integer/0/EOL) :- eol_char(EOL). is_a_flag(replace_more, boolean). is_a_flag(auto_infer_from_false, boolean). ok_value(on, boolean). ok_value(off, boolean). ok_value(0, integer/_A/_B). ok_value(NUM, integer/A/B) :- integer(NUM), NUM>=A, NUM =< B. ok_value([H|T], int_list/X) :- integer(H), 0 < H, H < 128, !, ok_value(T, int_list/X). ok_value([], int_list/_X). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/help.pro0000644000175000017500000003457511753202340015530 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= h('The following commands are available at present in the checker:-'). h(''). h(' case forget prove standardise '). h(' consult forwardchain quit status '). /* CFR017 */ h(' declare help remember traverse '). h(' deduce infer replace undelete '). h(' delete instantiate save unwrap '). h(' done list set '). h(' execute newvc show '). h(' exit printvc simplify '). h(''). h('For more specific help on a particular command, type "help COMMAND."'). h('For help on the built-in rules library, type "help rules."'). checkerhelp :- command_arg(subject,X), !, checkerhelp(X), !. checkerhelp :- display_general_help, !. display_general_help :- h(L), print(L), nl, fail. display_general_help :- !. checkerhelp(X = Y) :- get_help_identifier(X, XID), get_help_identifier(Y, YID), show_help(XID = YID), !. checkerhelp(X) :- atomic(X), !, get_help_identifier(X, XID), ( show_help(XID) ; show_help(rules = XID) ), !. get_help_identifier(OLD_ATOM, NEW_ATOM) :- atomic(OLD_ATOM), name(OLD_ATOM, OLD_LIST), make_letter_list(OLD_LIST, NEW_LIST), !, find_help_atom(NEW_LIST, NEW_ATOM). make_letter_list([H|T], [HL|TL]) :- make_letter(H, HL), !, make_letter_list(T, TL), !. make_letter_list([], []) :- !. make_letter(H, L) :- 64 < H, H < 91, H1 is H+32, !, name(L, [H1]), !. make_letter(H, L) :- name(L, [H]), !. find_help_atom(NEW_LIST, NEW_ATOM) :- match_command(NEW_LIST, NEW_ATOM). find_help_atom([a,b|S], abs) :- gen_append(S, _, [s]), !. find_help_atom([a,p|PEND], append) :- gen_append(PEND, _, [p,e,n,d]), !. find_help_atom([a,r,i|TH], arith) :- gen_append(TH, _, [t,h]), !. find_help_atom([a,r,r|AY], array) :- gen_append(AY, _, [a,y]), !. find_help_atom([a,s|SOC], assoc) :- gen_append(SOC, _, [s,o,c]), !. find_help_atom([b|ITWISE], bitwise) :- gen_append(ITWISE, _, [i,t,w,i,s,e]), !. find_help_atom([c|OMMUT], commut) :- gen_append(OMMUT, _, [o,m,m,u,t]), !. find_help_atom([d|ISTRIB], distrib) :- gen_append(ISTRIB, _, [i,s,t,r,i,b]), !. find_help_atom([e,n|UM], enum) :- gen_append(UM, _, [u,m]), !. find_help_atom([e,n,u,m,'_'|CASES], enum_cases) :- gen_append(CASES, _, [c,a,s,e,s]), !. find_help_atom([e,n,u,m,e|RATION], enumeration) :- gen_append(RATION, _, [r,a,t,i,o,n]), !. find_help_atom([e,q], eq) :- !. find_help_atom([e,q,u|IV], equiv) :- gen_append(IV, _, [i,v]), !. find_help_atom([e,q,u,i,v,a|LENCE], equivalence) :- gen_append(LENCE, _, [l,e,n,c,e]), !. find_help_atom([e,x,p], exp) :- !. /* CFR042 */ find_help_atom([f|IRST], first) :- gen_append(IRST, _, [i,r,s,t]), !. find_help_atom([i,m|PLIES], implies) :- gen_append(PLIES, _, [p,l,i,e,s]), !. find_help_atom([i,n,e|QUALS], inequals) :- gen_append(QUALS, _, [q,u,a,l,s]), !. find_help_atom([i,n,f|ERENCE], inference) :- gen_append(ERENCE, _, [e,r,e,n,c,e]), !. find_help_atom([i,n,t|DIV], intdiv) :- gen_append(DIV, _, [d,i,v]), !. find_help_atom([l,a|ST], last) :- gen_append(ST, _, [s,t]), !. find_help_atom([l,o,g,i,c], logic) :- !. find_help_atom([l,o,g,i,c,a,l], logical) :- !. find_help_atom([l,o,g,i,c,a,l,'_',n|OT], logical_not) :- gen_append(OT, _, [o,t]), !. find_help_atom([l,o,g,i,c,a,l,'_',a|ND], logical_and) :- gen_append(ND, _, [n,d]), !. find_help_atom([l,o,g,i,c,a,l,'_',o|R], logical_or) :- gen_append(R, _, [r]), !. find_help_atom([m,i|NUS], minus) :- gen_append(NUS, _, [n,u,s]), !. /*CFR043*/ find_help_atom([m,k,'_','_',a|RRAY], mk__array) :- /* CFR043 */ gen_append(RRAY, _, [r,r,a,y]), !. /* CFR043 */ find_help_atom([m,k,'_','_',r|ECORD], mk__record) :- /* CFR043 */ gen_append(ECORD, _, [e,c,o,r,d]), !. /* CFR043 */ find_help_atom([m,o|DULAR], modular) :- gen_append(DULAR, _, [d,u,l,a,r]), !. find_help_atom([n,e|GATION], negation) :- gen_append(GATION, _, [g,a,t,i,o,n]), !. find_help_atom([n,o,n,f|IRST], nonfirst) :- gen_append(IRST, _, [i,r,s,t]), !. find_help_atom([n,o,n,l|AST], nonlast) :- gen_append(AST, _, [a,s,t]), !. find_help_atom([o|DD], odd) :- gen_append(DD, _, [d,d]), !. find_help_atom([q|UANT], quant) :- gen_append(UANT, _, [u,a,n,t]), !. find_help_atom([r,e,c|ORD], record) :- gen_append(ORD, _, [o,r,d]), !. find_help_atom([r,e,c,o,r,d,'_'|EQUALITY], record_equality) :- gen_append(EQUALITY, _, [e,q,u,a,l,i,t,y]), !. find_help_atom([r,u|LES], rules) :- gen_append(LES, _, [l,e,s]), !. find_help_atom([s,e,q], seq) :- !. find_help_atom([s,e,q,l|EN], seqlen) :- gen_append(EN, _, [e,n]), !. find_help_atom([s,e,t|S], sets) :- gen_append(S, _, [s]), !. find_help_atom([s,q|R], sqr) :- gen_append(R, _, [r]), !. find_help_atom([s,t,a|NDARDISATION], standardisation) :- gen_append(NDARDISATION, _, [n,d,a,r,d,i,s,a,t,i,o,n]), !. find_help_atom([s,t,r|ENGTHEN], strengthen) :- gen_append(ENGTHEN, _, [e,n,g,t,h,e,n]), !. find_help_atom([t,r,a|NSITIVITY], transitivity) :- gen_append(NSITIVITY, _, [n,s,i,t,i,v,i,t,y]), !. find_help_atom([u,n,i|FICATION], unification) :- gen_append(FICATION, _, [f,i,c,a,t,i,o,n]), !. find_help_atom([z|ERO], zero) :- gen_append(ERO, _, [e,r,o]), !. show_help(PATTERN) :- helpfilename(PATTERN, FILELIST), /* CFR048 */ spade_chkhelp_prefix(SPADE_CHKHELP), /* CFR048 */ append(SPADE_CHKHELP, FILELIST, NAMELIST), /* CFR048 */ name(FILENAME, NAMELIST), /* CFR048 */ ( open_help_file(FILENAME), show_help_for(PATTERN), seen, see_correct_input_stream /* CFR015 */ ; write('ERROR: Cannot open or display Checker help library file -- '), print(FILENAME), nl ), !. open_help_file(FILENAME) :- file_exists_and_is_readable(FILENAME), see(FILENAME), seen, see(FILENAME), !. show_help_for(PATTERN) :- repeat, read_term_and_layout(X), /* until */ found_help_pattern(PATTERN, X), !, read_help_lines(X, Y), !, display_help_lines(Y, 20), !. found_help_pattern(PATTERN, [PATTERN]) :- !. found_help_pattern(_PATTERN, end_of_file) :- !. read_help_lines(end_of_file, []) :- !. read_help_lines(P, LIST) :- read_term_and_layout(X), ( X=[_|_], LIST = [] ; X=end_of_file, LIST = [] ; LIST=[X|REST], !, read_help_lines(P, REST) ), !. display_help_lines([], _) :- !. display_help_lines([X|Y], N) :- N>0, print(X), nl, N1 is N-1, !, display_help_lines(Y, N1), !. display_help_lines(X, 0) :- nl, nl, is_inverse_video(I), print(I), write('MORE'), is_normal_video(N), print(N), write(' (Press Return)...'), see_correct_input_stream, /* CFR015 */ eol_char(EOL), lskip(EOL), /* CFR014 */ !, display_help_lines(X, 20), !. /*** HELPFILES -- Commands ***/ helpfilename( case, "case.chl" ). /* CFR048 */ helpfilename( consult, "consult.chl" ). /* CFR048 */ helpfilename( declare, "declare.chl" ). /* CFR048 */ helpfilename( deduce, "deduce.chl" ). /* CFR048 */ helpfilename( delete, "delete.chl" ). /* CFR048 */ helpfilename( done, "done.chl" ). /* CFR048 */ helpfilename( exit, "exit.chl" ). /* CFR048 */ helpfilename( execute, "execute.chl" ). /* CFR017,048 */ helpfilename( forget, "forget.chl" ). /* CFR048 */ helpfilename( forwardchain, "forwardch.chl" ). /* CFR048 */ helpfilename( 'help', "help.chl" ). /* CFR048 */ helpfilename( infer, "infer.chl" ). /* CFR048 */ helpfilename( instantiate, "instantia.chl" ). /* CFR048 */ helpfilename( list, "list.chl" ). /* CFR048 */ helpfilename( newvc, "newvc.chl" ). /* CFR048 */ helpfilename( printvc, "printvc.chl" ). /* CFR048 */ helpfilename( prove, "prove.chl" ). /* CFR048 */ helpfilename( quit, "quit.chl" ). /* CFR048 */ helpfilename( remember, "remember.chl" ). /* CFR048 */ helpfilename( replace, "replace.chl" ). /* CFR048 */ helpfilename( save_state, "save.chl" ). /* CFR048 */ helpfilename( (set), "set.chl" ). /* CFR048 */ helpfilename( show, "show.chl" ). /* CFR048 */ helpfilename( simplify, "simplify.chl" ). /* CFR048 */ helpfilename( standardise, "standard.chl" ). /* CFR048 */ helpfilename( status, "status.chl" ). /* CFR048 */ helpfilename( traverse, "traverse.chl" ). /* CFR048 */ helpfilename( undelete, "undelete.chl" ). /* CFR048 */ helpfilename( unwrap, "unwrap.chl" ). /* CFR048 */ /*** HELPFILES -- built-in rule libraries ***/ helpfilename( rules, "rules.chl" ). /* CFR048 */ helpfilename( rules = arith, "rxarith.chl" ). /* CFR048 */ helpfilename( rules = assoc, "rxarith.chl" ). /* CFR048 */ helpfilename( rules = bitwise, "rxmodular.chl" ). helpfilename( rules = commut, "rxarith.chl" ). /* CFR048 */ helpfilename( rules = distrib, "rxarith.chl" ). /* CFR048 */ helpfilename( rules = minus, "rxarith.chl" ). /* CFR048 */ helpfilename( rules = intdiv, "rxarith.chl" ). /* CFR048 */ helpfilename( rules = array, "rxarray.chl" ). /* CFR048 */ helpfilename( rules = mk__array, "rxarray.chl" ). /* CFR043,048 */ helpfilename( rules = enum, "rxenum.chl" ). /* CFR048 */ helpfilename( rules = enum_cases, "rxenum.chl" ). /* CFR048 */ helpfilename( rules = enumeration, "rxenum.chl" ). /* CFR 1054 */ helpfilename( rules = abs, "rxfdlfunc.chl" ). /* CFR048 */ helpfilename( rules = sqr, "rxfdlfunc.chl" ). /* CFR048 */ helpfilename( rules = odd, "rxfdlfunc.chl" ). /* CFR048 */ helpfilename( rules = exp, "rxfdlfunc.chl" ). /* CFR042,048 */ helpfilename( rules = transitivity, "rxgenineq.chl" ). /* CFR048 */ helpfilename( rules = strengthen, "rxgenineq.chl" ). /* CFR048 */ helpfilename( rules = negation, "rxgenineq.chl" ). /* CFR048 */ helpfilename( rules = inequals, "rxinequal.chl" ). /* CFR048 */ helpfilename( rules = zero, "rxinequal.chl" ). /* CFR048 */ helpfilename( rules = logical_not, "rxlogic.chl" ). /* CFR048 */ helpfilename( rules = logical_and, "rxlogic.chl" ). /* CFR048 */ helpfilename( rules = logical_or, "rxlogic.chl" ). /* CFR048 */ helpfilename( rules = implies, "rxlogic.chl" ). /* CFR048 */ helpfilename( rules = equivalence, "rxlogic.chl" ). /* CFR048 */ helpfilename( rules = logical, "rxlogic.chl" ). /* CFR048 */ helpfilename( rules = modular, "rxmodular.chl" ). helpfilename( rules = quant, "rxquantif.chl" ). /* CFR048 */ helpfilename( rules = record, "rxrecord.chl" ). /* CFR048 */ helpfilename( rules = record_equality, "rxrecord.chl" ). /* CFR048 */ helpfilename( rules = mk__record, "rxrecord.chl" ). /* CFR043,048 */ helpfilename( rules = seqlen, "rxseq.chl" ). /* CFR048 */ helpfilename( rules = append, "rxseq.chl" ). /* CFR048 */ helpfilename( rules = first, "rxseq.chl" ). /* CFR048 */ helpfilename( rules = last, "rxseq.chl" ). /* CFR048 */ helpfilename( rules = nonfirst, "rxseq.chl" ). /* CFR048 */ helpfilename( rules = nonlast, "rxseq.chl" ). /* CFR048 */ helpfilename( rules = seq, "rxseq.chl" ). /* CFR048 */ helpfilename( rules = sets, "rxsets.chl" ). /* CFR048 */ helpfilename( rules = inference, "rxspecial.chl" ). /* CFR048 */ helpfilename( rules = eq, "rxspecial.chl" ). /* CFR048 */ helpfilename( rules = equiv, "rxspecial.chl" ). /* CFR048 */ helpfilename( rules = simplify, "rxspecial.chl" ). /* CFR048 */ helpfilename( rules = logic, "rxspecial.chl" ). /* CFR048 */ helpfilename( rules = standardisation, "rxspecial.chl" ). /* CFR048 */ helpfilename( rules = unification, "rxspecial.chl" ). /* CFR048 */ %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/checker.pro0000644000175000017500000001115511753202340016171 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Top level file, to build the Checker. %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### :- use_module(library(file_systems), [close_all_streams/0]). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % LOAD ENHANCED DEFECT REPORTING %=============================================================================== :- use_module('../simplifier/defectreporting.pro'). %=============================================================================== %=============================================================================== % DYNAMIC FEATURES PRE-LOAD %=============================================================================== % Establish sicstus settings. :- set_prolog_flag(unknown, error). % Establish all dynamic predicates. :- include('dynamics.pro'). %=============================================================================== %=============================================================================== %FIXED SYSTEM CONFIGURATION %------------------------------------------------------------------------------- %This information will be the same for every execution of the tool. %=============================================================================== :-use_module('../simplifier/data__system.pro', [add_system_toolname/1]). :-add_system_toolname('Proof Checker'). %=============================================================================== %=============================================================================== % LOAD SYSTEM %=============================================================================== :- include('loadsicstus.pro'). %=============================================================================== %=============================================================================== % runtime_entry(start). %------------------------------------------------------------------------------- % This is a special Sicstus predicate, which will be automatically invoked % when restoring a saved state. It is the top level, or main, predicate. %=============================================================================== runtime_entry(start):- startup_sequence, close_all_streams, halt(0), !. %=============================================================================== %=============================================================================== % BUILD SAV AND HALT %=============================================================================== buildsav:- zero_defects_reported, save_program('checker.sav'), close_all_streams, halt(0). buildsav:- report_total_defects, close_all_streams, halt(1). %=============================================================================== %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/Makefile0000644000175000017500000000701511753202340015503 0ustar eugeneugen# ----------------------------------------------------------------------------- # (C) Altran Praxis Limited # ----------------------------------------------------------------------------- # # The SPARK toolset is free software; you can redistribute it and/or modify it # under terms of the GNU General Public License as published by the Free # Software Foundation; either version 3, or (at your option) any later # version. The SPARK toolset is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the GNU # General Public License distributed with the SPARK toolset; see file # COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of # the license. # # ============================================================================= .NOTPARALLEL: ################################################################################ # PURPOSE #------------------------------------------------------------------------------- # Makefile for the Checker. # For Linux, Windows, Solaris or Mac OS X (aka Darwin). ################################################################################ ################################################################################ # BUILD CONFIGURATION ################################################################################ # Name of the output program. OUTPUT_NAME:=checker # Location of root. ROOT:=.. # Location of common. COMMON:=${ROOT}/common include ${COMMON}/Makefile.inc ################################################################################ # PLATFORM SPECIFIC CONFIGURATION ################################################################################ # Darwin (Mac OS X). ifeq (${TARGET},Darwin) # Modify PATH to pick up Apple's gcc before GNAT Pro PATH:=/usr/bin:${PATH} endif ################################################################################ # TARGETS ################################################################################ all: ${OUTPUT_NAME}${EXE_EXTN} build_manifest # Note that the build assumes PATH is set to include SICSTUS binaries and (on # Windows) MS VC Tools and LIB and INCLUDE are set to point at MS VC libraries # and includes respectively. ${OUTPUT_NAME}${EXE_EXTN}: *.pro sicstus ${SICSTUS_SWITCHES} --goal "compile('checker.pro'), buildsav." spld ${SPLD_CONF} -o $@ --static --resources=${OUTPUT_NAME}${SAV_EXTN}=/${OUTPUT_NAME}${SAV_EXTN},${SICSTUS_LIBS} # Manifest only required on Windows. ifeq ($(findstring ${TARGET},Windows),${TARGET}) build_manifest: # Copy over the correct manifest file for this windows build. cp ${OUTPUT_NAME}.windows.manifest ${OUTPUT_NAME}${EXE_EXTN}.manifest else build_manifest: endif spxref: spxref -R -i spxref.pro checker.pro -w spxref_warning.txt -x spxref_cross.txt -m spxref_ported.txt -u spxref_undefined.txt spxrefplain: spxref # Normalize anonymous prolog variables. sed -e 's/_[0-9][0-9]*/_/g' spxref_undefined.txt > spxref_undefined.txt.tmp mv spxref_undefined.txt.tmp spxref_undefined.txt # Cleaning code base #=================== clean: rm -f checker.sav checker.exp checker.ilk checker.lib checker.pdb reallyclean: clean rm -f ${OUTPUT_NAME}${EXE_EXTN} ${OUTPUT_NAME}${EXE_EXTN}.manifest rm -f spxref_warning.txt spxref_cross.txt spxref_ported.txt spxref_undefined.txt ################################################################################ # END-OF-FILE spark-2012.0.deb/checker/simp.pro0000644000175000017500000014363611753202340015547 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** SIMPLIFY(in X,out Y) -- simplify expression X to get Y ***/ /* Simplify expressions of the form 'not(A)' where possible */ /* NOT1 */ simplify(not(true),false) :- !. /* NOT2 */ simplify(not(false),true) :- !. /* NOT3 */ simplify(not(not(X)),A) :- simplify(X,A), !. /* NOT4 */ simplify((not X),B) :- simplify(X,A), ( A=true, B=false ; A=false, B=true ; B=(not A) ), !. /* Simplify expressions of the form 'A and B' where possible */ /* AND1 */ simplify(false and _X,false) :- !. /* AND2 */ simplify(_X and false,false) :- !. /* AND3 */ simplify(X and (not X),false) :- !. /* AND4 */ simplify((not X) and X,false) :- !. /* AND5 */ simplify(true and X,Y) :- simplify(X,Y), !. /* AND6 */ simplify(X and true,Y) :- simplify(X,Y), !. /* AND7 */ simplify(X and X,Y) :- simplify(X,Y), !. /* AND8 */ simplify(X and Y,Z) :- simplify(X,A), ( A=true, simplify(Y,Z) ; A=false, Z=A ; simplify(Y,B), ( ( B=true, Z=A ; B=false, Z=B ) ; ( A=(not B) ; A=(not Y) ; B=(not A) ; B=(not X) ), Z=false ; A=B, Z=A ; Z=(A and B) ) ), !. /* Simplify expressions of the form 'A or B' where possible. */ /* OR1 */ simplify(true or _X,true) :- !. /* OR2 */ simplify(_X or true,true) :- !. /* OR3 */ simplify(X or (not X),true) :- !. /* OR4 */ simplify((not X) or X,true) :- !. /* OR5 */ simplify(false or X,Y) :- simplify(X,Y), !. /* OR6 */ simplify(X or false,Y) :- simplify(X,Y), !. /* OR7 */ simplify(X or X,Y) :- simplify(X,Y), !. /* OR8 */ simplify(X or Y,Z) :- simplify(X,A), ( A=true, Z=A ; A=false, simplify(Y,Z) ; simplify(Y,B), ( B=true, Z=B ; B=false, Z=A ; ( A=(not B) ; A=(not Y) ; B=(not A) ; B=(not X) ), Z=true ; A=B, Z=A ; Z=(A or B) ) ), !. /* Simplify expressions of the form 'A -> B' where possible */ /* IMP1 */ simplify(false->_X,true) :- !. /* IMP2 */ simplify(_X->true,true) :- !. /* IMP3 */ simplify(X->X,true) :- !. /* IMP4 */ simplify(true->X,Y) :- simplify(X,Y), !. /* IMP5 */ simplify(X->false,A) :- simplify((not X),A), !. /* IMP6 */ simplify(X->(not X),Y) :- simplify((not X),Y), !. /* IMP7 */ simplify((not X)->X,Y) :- simplify(X,Y), !. /* IMP8 */ simplify(X -> (Y -> Z),A) :- simplify((X and Y) -> Z,A), !. /* IMP9 */ simplify(X->Y,Z) :- simplify(X,A), ( A=true, simplify(Y,Z) ; A=false, Z=true ; simplify(Y,B), ( B=true, Z=B ; B=false, simplify((not X),Z) ; ( A=(not B) ; A=(not Y) ; B=(not A) ; B=(not X) ), Z=B ; A=B, Z=true ; Z=(A->B) ) ), !. /* Simplify expressions of the form 'A <-> B' where possible */ /* EQV1 */ simplify(X<->X,true) :- !. /* EQV2 */ simplify(X<->(not X),false) :- !. /* EQV3 */ simplify((not X)<->X,false) :- !. /* EQV4 */ simplify(X<->true,Y) :- simplify(X,Y), !. /* EQV5 */ simplify(true<->X,Y) :- simplify(X,Y), !. /* EQV6 */ simplify(X<->false,Y) :- simplify((not X),Y), !. /* EQV7 */ simplify(false<->X,Y) :- simplify((not X),Y), !. /* EQV8 */ simplify(X <-> (Y <-> Z),B) :- simplify(X <-> Y,A), simplify(A <-> Z,B), !. /* EQV9 */ simplify(X <-> Y <-> Z,B) :- simplify(Y <-> Z,A), (Y <-> Z)\=A, simplify(X <-> A,B), !. /* EQV10 */ simplify(X <-> Y <-> Z,B) :- simplify(X <-> Z,A), (X <-> Z)\=A, simplify(A <-> Y,B), !. /* EQV11 */ simplify(X<->Y,Z) :- simplify(X,A), ( A=true, simplify(Y,Z) ; A=false, simplify((not Y),Z) ; simplify(Y,B), ( B=true, Z=A ; B=false, simplify((not X),Z) ; ( A=(not B) ; A=(not Y) ; B=(not A) ; B=(not X) ), Z=false ; A=B, Z=true ; Z=(A<->B) ) ), !. /* Simplify 'for_all( )' exprs. where possible */ /* ALL3 */ simplify(for_all(V:T,X),NEW) :- find_core_type(T, CT), ( var_const(V, CT, _), STATE = dont_retract ; asserta(var_const(V, CT, temp)), STATE = retract ), simplify(X,Y), ( Y = true, NEW = true ; Y = false, NEW = false ; NEW = (for_all(V:T, Y)) ), !, ( STATE = dont_retract ; retract(var_const(V, CT, temp)) ), !. /* Simplify 'for_some( )' exprs. where possible */ /* EXI3 */ simplify(for_some(V:T,X),NEW) :- find_core_type(T, CT), ( var_const(V, CT, _), STATE = dont_retract ; asserta(var_const(V, CT, temp)), STATE = retract ), simplify(X,Y), ( Y = true, NEW = true ; Y = false, NEW = false ; NEW = (for_some(V:T, Y)) ), !, ( STATE = dont_retract ; retract(var_const(V, CT, temp)) ), !. /* Simplify set-type expressions where possible */ /* SET1 */ simplify(A \/ B, S) :- !, set_simplify(A \/ B, S), !. /* SET2 */ simplify(A \ B, S) :- !, set_simplify(A \ B, S), !. /* SET3 */ simplify(A /\ B, S) :- !, set_simplify(A /\ B, S), !. /* SET4 */ simplify((set A), S) :- !, set_simplify((set A), S), !. /* SET5 */ simplify(A subset_of B, S) :- !, set_simplify(A subset_of B, S), !. /* SET6 */ simplify(A strict_subset_of B, S) :- !, set_simplify(A strict_subset_of B, S), !. /* SET7 */ simplify(A in B, S) :- !, set_simplify(A in B, S), !. /* SET8 */ simplify(A not_in B, S) :- !, set_simplify(A not_in B, S), !. /* Simplify atomic formulae where possible */ /* REL1 */ simplify(X=Y,Z) :- checktype(X,T), ( type(T, set(_)), !, set_simplify(X=Y,Z) ; type(T,enumerated), !, enumerated_simplify(X=Y,Z) ; simplify(X,A), simplify(Y,B), ( A=B, Z=true ; signed_integer(A), ( /* CFR050 */ signed_integer(B), /* CFR050 */ A\=B, /* CFR050 */ Z=false /* CFR050 */ ; /* CFR050 */ B=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(A-P,R), /* CFR050 */ Z=(Q=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A-Q,R), /* CFR050 */ Z=(P=R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-A,R), /* CFR050 */ Z=(Q=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A+Q,R), /* CFR050 */ Z=(P=R) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ A=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(B-P,R), /* CFR050 */ Z=(Q=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B-Q,R), /* CFR050 */ Z=(P=R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ A=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-B,R), /* CFR050 */ Z=(Q=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B+Q,R), /* CFR050 */ Z=(P=R) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ Z=(A=B) /* CFR050 */ ) /* CFR050 */ ), !. /* REL2 */ simplify(X>Y,Z) :- checktype(X,T), ( type(T,enumerated), !, enumerated_simplify(X>Y,Z) ; simplify(X,A), simplify(Y,B), ( signed_integer(A), ( /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ B less_than A, /* CFR050 */ Z=true /* CFR050 */ ; /* CFR050 */ ( /* CFR050 */ A=B /* CFR050 */ ; /* CFR050 */ A less_than B /* CFR050 */ ), /* CFR050 */ Z=false /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(A-P,R), /* CFR050 */ Z=(QR) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A+Q,R), /* CFR050 */ Z=(PR) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B-Q,R), /* CFR050 */ Z=(P>R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ A=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-B,R), /* CFR050 */ Z=(QR) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ Z=(A>B) /* CFR050 */ ) /* CFR050 */ ), !. /* REL3 */ simplify(XR) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A-Q,R), /* CFR050 */ Z=(P>R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-A,R), /* CFR050 */ Z=(QR) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ A=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(B-P,R), /* CFR050 */ Z=(QR) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B+Q,R), /* CFR050 */ Z=(PY,Z) :- checktype(X,T), ( type(T, set(_)), !, set_simplify(X<>Y,Z) ; type(T,enumerated), !, enumerated_simplify(X<>Y,Z) ; simplify(X,A), simplify(Y,B), ( A=B, Z=false ; signed_integer(A), ( /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ A\=B, /* CFR050 */ Z=true /* CFR050 */ ; /* CFR050 */ A=B, /* CFR050 */ Z=true /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(A-P,R), /* CFR050 */ Z=(Q<>R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A-Q,R), /* CFR050 */ Z=(P<>R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-A,R), /* CFR050 */ Z=(Q<>R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A+Q,R), /* CFR050 */ Z=(P<>R) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ A=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(B-P,R), /* CFR050 */ Z=(Q<>R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B-Q,R), /* CFR050 */ Z=(P<>R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ A=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-B,R), /* CFR050 */ Z=(Q<>R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B+Q,R), /* CFR050 */ Z=(P<>R) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ Z=(A<>B) /* CFR050 */ ) /* CFR050 */ ), !. /* REL5 */ simplify(X<=Y,Z) :- checktype(X,T), ( type(T,enumerated), !, enumerated_simplify(X<=Y,Z) ; simplify(X,A), simplify(Y,B), ( A=B, /* CFR050 */ Z=true /* CFR050 */ ; /* CFR050 */ signed_integer(A), /* CFR050 */ ( /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ A less_than B, /* CFR050 */ Z=true /* CFR050 */ ; /* CFR050 */ B less_than A, /* CFR050 */ Z=false /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(A-P,R), /* CFR050 */ Z=(R<=Q) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A-Q,R), /* CFR050 */ Z=(R<=P) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-A,R), /* CFR050 */ Z=(Q<=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A+Q,R), /* CFR050 */ Z=(R<=P) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ A=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(B-P,R), /* CFR050 */ Z=(Q<=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B-Q,R), /* CFR050 */ Z=(P<=R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ A=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-B,R), /* CFR050 */ Z=(R<=Q) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B+Q,R), /* CFR050 */ Z=(P<=R) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ Z=(A<=B) /* CFR050 */ ) /* CFR050 */ ), !. /* REL6 */ simplify(X>=Y,Z) :- checktype(X,T), ( type(T,enumerated), !, enumerated_simplify(X>=Y,Z) ; simplify(X,A), simplify(Y,B), ( A=B, Z=true ; signed_integer(A), ( /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ B less_than A, /* CFR050 */ Z=true /* CFR050 */ ; /* CFR050 */ A less_than B, /* CFR050 */ Z=false /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(A-P,R), /* CFR050 */ Z=(Q<=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A-Q,R), /* CFR050 */ Z=(P<=R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ B=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-A,R), /* CFR050 */ Z=(Q>=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(A+Q,R), /* CFR050 */ Z=(P<=R) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ signed_integer(B), /* CFR050 */ ( /* CFR050 */ A=P+Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(B-P,R), /* CFR050 */ Z=(Q>=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B-Q,R), /* CFR050 */ Z=(P>=R) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ A=P-Q, /* CFR050 */ ( /* CFR050 */ signed_integer(P), /* CFR050 */ simplify(P-B,R), /* CFR050 */ Z=(Q<=R) /* CFR050 */ ; /* CFR050 */ signed_integer(Q), /* CFR050 */ simplify(B+Q,R), /* CFR050 */ Z=(P>=R) /* CFR050 */ ) /* CFR050 */ ) /* CFR050 */ ; /* CFR050 */ Z=(A>=B) /* CFR050 */ ) /* CFR050 */ ), !. /* ODD */ simplify(odd(X), ODD) :- simplify(X, NEWX), ( signed_integer(NEWX), X1 iss NEWX*NEWX, ( X2 iss (X1 div 2)*2, X1 =:= X2, ODD=false ; X2 iss (X1 div 2)*2, X1 =\= X2, ODD=true ) ; NEWX=A*B, simplify(odd(A), TFA), simplify(odd(B), TFB), ( ( TFA = false ; TFB = false ), ODD = false ; TFA = true, ODD = TFB ; TFB = true, ODD = TFA ) ; ( NEWX=A+B ; NEWX=A-B ), simplify(odd(A), TFA), simplify(odd(B), TFB), ( TFA = true, ( TFB = true, ODD = false ; TFB = false, ODD = true ; ODD = (not TFB) ) ; TFA = false, ODD = TFB ; TFB = true, ODD = (not TFA) ; TFB = false, ODD = TFA ) ; NEWX = (-A), simplify(odd(A), ODD) ; ODD = odd(NEWX) ), !. /* SQR1*/ simplify(sqr(abs(X)), SQR) :- simplify(X*X, SQR), !. /* SQR2*/ simplify(sqr(X), SQR) :- simplify(X*X, SQR), !. /* ABS */ simplify(abs(X), ABS) :- simplify(X, NEWX), ( signed_integer(NEWX), ( NEWX >= 0, ABS = NEWX ; NEWX < 0, ABS is -NEWX ) ; NEWX = A*A, ABS = NEWX ; NEWX = A*B, simplify(abs(A), ABSA), simplify(abs(B), ABSB), ABS = ABSA*ABSB ; NEWX = abs(_EXPR), ABS = NEWX ; ABS = abs(NEWX) ), !. /* Simplify array, record & sequence type objects if possible */ /* ARR */ simplify(X, Y) :- array_simplify(X, Y), !. /* REC */ simplify(X, Y) :- record_simplify(X, Y), !. /* SEQ */ simplify(X, Y) :- sequence_simplify(X, Y), !. /* ENU1 */ simplify(succ(X), Y) :- enumerated_simplify(succ(X), Y), !. /* ENU2 */ simplify(pred(X), Y) :- enumerated_simplify(pred(X), Y), !. /* Final catch-all */ /* EVAL */ simplify(X,Y) :- evaluate(X,Y), !. /*** EVALUATE(in X,out Y) -- evaluate (non-boolean) X to get Y ***/ /* EVAL_BASE1 */ evaluate(X,X) :- ( signed_integer(X) ; X=true ; X=false ), !. /* EVAL_BASE2 */ evaluate(X,Y) :- integer(X), X<0, Y iss X, !. /* UMIN1 */ evaluate(-(-X),A) :- simplify(X,A), !. /* UMIN2 */ evaluate(-X,A) :- simplify(X,B), ( signed_integer(B), A iss -B ; A=(-B) ), !. /* UPLUS */ evaluate(+X, A) :- simplify(X, A), !. /* CFR039 */ /*SPECIAL*/ evaluate(X+N-N,Y) :- simplify(X,Y), !. evaluate(X-N+N,Y) :- simplify(X,Y), !. /* PLUS */ evaluate(X+Y,Z) :- simplify(X,A), simplify(Y,B), ( signed_integer(A), signed_integer(B), Z iss A+B ; A=0, Z=B ; B=0, Z=A ; Z=A+B ), !. /* MINUS */ evaluate(X-Y,Z) :- simplify(X,A), simplify(Y,B), ( signed_integer(A), signed_integer(B), Z iss A-B ; B=0, Z=A ; A=0, simplify(-Y,Z) ; Z=A-B ), !. /* MULT */ evaluate(X*Y,Z) :- simplify(X,A), simplify(Y,B), ( signed_integer(A), signed_integer(B), Z iss A*B ; ( A=0 ; B=0 ), Z=0 ; A=1, Z=B ; B=1, Z=A ; Z=A*B ), !. /* DIV */ evaluate(X div Y,Z) :- simplify(X,A), simplify(Y,B), ( signed_integer(A), signed_integer(B), B\=0, Z iss A div B ; B=1, Z=A ; Z=A div B ), !. /* / */ evaluate(X / Y,Z) :- /*1.4*/ simplify(X,A), /*1.4*/ simplify(Y,B), /*1.4*/ ( /*1.4*/ signed_integer(A), /*1.4*/ signed_integer(B), /*1.4*/ B\=0, /*1.4*/ Z iss A div B, /*1.4*/ A =:= Z * B /*1.4*/ ; /*1.4*/ B=1, /*1.4*/ Z=A /*1.4*/ ; /*1.4*/ Z=A / B /*1.4*/ ), !. /*1.4*/ /* MOD */ evaluate(X mod Y,A mod B) :- simplify(X,A), simplify(Y,B), !. /* TEMPORARILY */ /* OLD MOD STUFF: ( signed_integer(A), signed_integer(B), B\=0, Z iss A mod B ; B=1, Z=0 ; Z=(A mod B) ), !. DELETED */ /* EXP */ evaluate(X**Y,Z) :- /* CFR038 */ simplify(X,A), /* CFR038 */ simplify(Y,B), /* CFR038 */ ( /* CFR038 */ signed_integer(A), /* CFR038 */ signed_integer(B), /* CFR038 */ Z iss A**B /* CFR038 */ ; /* CFR038 */ B=0, /* CFR038 */ Z=1 /* CFR038 */ ; /* CFR038 */ B=1, /* CFR038 */ Z=A /* CFR038 */ ; /* CFR038 */ B=2, /* CFR038 */ Z=A*A /* CFR038 */ ; /* CFR038 */ Z=A**B /* CFR038 */ ), !. /* CFR038 */ /* EVAL_VAL */ evaluate(X,Y) :- val(X,Y), !. /* FUNC_SPLIT */ evaluate(X,Z) :- (\+ atomic(X)), X=..[H|T], eval_list(T,U), Z=..[H|U], !. /*** EVAL_LIST(in XL,out YL) -- simplify/evaluate each element of XL to get YL ***/ /* EVL1 */ eval_list([],[]) :- !. /* EVL2 */ eval_list([H1|T1],[H2|T2]) :- simplify(H1,H2), eval_list(T1,T2), !. /*** VAL(in X,out Y) -- additional simplification rules slot in here ***/ val(X,X) :- atomic(X), !. /*** SIGNED_INTEGER(I) -- is I an integer (with an optional -)? ***/ signed_integer(I) :- ( integer(I), I>=0 ; I=(-I1), integer(I1), I1>0 ). /*** X LESS_THAN Y -- compare two signed_integers ***/ -X less_than -Y :- integer(X), X>0, integer(Y), Y>0, Y less_than X. -X less_than Y :- integer(X), X>0, integer(Y), Y>=0. X less_than Y :- integer(X), X>=0, integer(Y), Y>X. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/deduce.pro0000644000175000017500000000745611753202340016027 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** DEDUCE -- top-level checker command ***/ deduce :- ( command_arg(expression, EXPRN) ; prompt_user('DEDUCE -- Type formula to deduce.', 'DEDUCE -- Formula? '), rread(EXP), parse_expression(EXP, EXPRN) ), ( EXPRN=c#N, conc(N,FORMULA) ; novars(EXPRN), /* CFR009 */ checktype(EXPRN, boolean), /* CFR009 */ FORMULA=EXPRN ), ( command_arg(hyplist, HYPLIST) ; prompt_user('Hypotheses to be used? '), rread(HYPLIST), check_hyplist(HYPLIST) ), build_formula(FORMULA, HYPLIST, F1), ( try_deduce(F1), !, write('*** '), print(F1), nl, write('*** '), print(FORMULA), write(' by logical deduction'), nl, add_new_hyp(FORMULA,1) ; !, write('*** FAILED'), nl, fail ), ( EXPRN=c#N, done(N) ; true ), !. /*** BUILD_FORMULA(C,HYPS,F) - create a "HYPS -> C" formula, F ***/ build_formula(F,[],F) :- !. build_formula(F,[N],X -> F) :- !, hyp(N,X), !. build_formula(F,[N|T],(H and Y) -> F) :- build_formula(F,T,Y -> F), hyp(N,H), !. /*** TRY_DEDUCE(F) - deduce formula F by truth-table means if possible ***/ try_deduce(F) :- var_in(F,V), !, subst_vbl(V,false,F,F1), try_deduce(F1), subst_vbl(V,true,F,F2), try_deduce(F2), !. try_deduce(F) :- simplify(F,true), !. /*** VAR_IN(FORM,ATF) - find an atf ATF in formula FORM if possible ***/ var_in(not F,V) :- var_in(F,V). var_in(X and Y,V) :- (var_in(X,V) ; var_in(Y,V)). var_in(X or Y,V) :- (var_in(X,V) ; var_in(Y,V)). var_in(X -> Y,V) :- (var_in(X,V) ; var_in(Y,V)). var_in(X <-> Y,V) :- (var_in(X,V) ; var_in(Y,V)). var_in(V,V) :- logic_free(V), V\=true, V\=false. /*** LOGIC_FREE(F) - succeeds if no connectives in F, i.e. if F is an atf ***/ logic_free(not _) :- !, fail. logic_free(_ or _) :- !, fail. logic_free(_ and _) :- !, fail. logic_free(_ -> _) :- !, fail. logic_free(_ <-> _) :- !, fail. logic_free(_). /*** SUBST_VBL(V,X,OLD,NEW) - substitute all V in OLD by X to get NEW ***/ subst_vbl(V,X,V,X) :- !. subst_vbl(_V,_X,Y,Y) :- atomic(Y), !. subst_vbl(V,X,F,F1) :- F=..[OP|Args], subst_vbl_list(V,X,Args,Args1), F1=..[OP|Args1], !. /*** SUBST_VBL_LIST(V,X,OL,NL) - substitute all V in OL by X to get NL ***/ subst_vbl_list(V,X,[A],[A1]) :- subst_vbl(V,X,A,A1), !. subst_vbl_list(V,X,[A|Args],[A1|Args1]) :- subst_vbl(V,X,A,A1), !, subst_vbl_list(V,X,Args,Args1), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/semistan.pro0000644000175000017500000000517611753202340016416 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** STAND_ALL - (update) standardisation of all hypotheses of the VC ***/ stand_all :- vc_standardisation(off), !. stand_all :- hyp(N,_X), integer(N), stan_hyp(N), fail. stand_all. /*** STAN_HYP(N) -- standardise, and if possible semi-standardise, hyp N ***/ stan_hyp(N) :- hyp(s(N),_), !. stan_hyp(N) :- \+ hyp(s(N),_), hyp(N,X), norm_typed_expr(X,boolean,X1), assertz(hyp(s(N),X1)), semi_stan(N,X), !. /*** SEMI_STAN(N,FORMULA) -- semi-standardise relational FORMULA, hyp N ***/ semi_stan(N,X1=X2) :- checktype(X1,T), norm_typed_expr(X1,T,Y1), norm_typed_expr(X2,T,Y2), assertz(hyp(ss(N),Y1=Y2)), !. semi_stan(N,X1<>X2) :- checktype(X1,T), norm_typed_expr(X1,T,Y1), norm_typed_expr(X2,T,Y2), assertz(hyp(ss(N),Y1<>Y2)), !. semi_stan(N,X1>X2) :- norm_typed_expr(X1,integer,Y1), norm_typed_expr(X2,integer,Y2), assertz(hyp(ss(N),Y1>Y2)), !. semi_stan(N,X1=X2) :- norm_typed_expr(X1,integer,Y1), norm_typed_expr(X2,integer,Y2), assertz(hyp(ss(N),Y1>=Y2)), !. semi_stan(N,X1<=X2) :- norm_typed_expr(X1,integer,Y1), norm_typed_expr(X2,integer,Y2), assertz(hyp(ss(N),Y1<=Y2)), !. semi_stan(N,not X) :- neg(X,Y), Y\=(not _), semi_stan(N,Y), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/setflags.pro0000644000175000017500000001032711753202340016375 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** SET -- top-level predicate to set a flag to a particular value ***/ (set) :- ( command_arg(flag, FLAG) ; prompt_user('Flag name? '), rread(FLAG) ), is_a_flag(FLAG, VALUETYPE), !, ( command_arg(value, VALUE) ; prompt_user('New value? '), rread(VALUE) ), ok_value(VALUE, VALUETYPE), !, OLD =.. [FLAG, OLDVAL], /* CFR045 */ NEW =.. [FLAG, VALUE], !, retractall(OLD), asserta(NEW), !, ( /* CFR045 */ FLAG = command_logging, /* CFR045 */ ( /* CFR045 */ VALUE = OLDVAL /* so no change */ /* CFR045 */ ; /* CFR045 */ VALUE = off, /* CFR045 */ command_log_filename(CLG), /* CFR045 */ tell(CLG), /* CFR045 */ told /* CFR045 */ ; /* CFR045 */ VALUE = on, /* CFR045 */ command_log_filename(CLG), /* CFR045 */ telling(NOW), /* CFR045 */ ( file_can_be_written(CLG) ; \+ file_can_be_written(CLG), write('Aborted: '), print(CLG), write(' cannot be written.'), nl, !, close_all_streams, halt ), tell(CLG), /* CFR045 */ write("set command_logging to on."), /* CFR045 */ nl, /* CFR045 */ telling(NOW), /* CFR045 */ tell(NOW) /* CFR045 */ ) /* CFR045 */ ; /* CFR045 */ true /* CFR045 */ ), /* CFR045 */ !. /*** SHOW -- top-level command to display current values of all flags ***/ show :- is_a_flag(FLAG, VALUETYPE), PREDICATE =.. [FLAG, VALUE], call(PREDICATE), print(FLAG), write(' := '), write_flag_value(VALUETYPE, VALUE), nl, fail. show :- statistics, fail. show. write_flag_value(int_list/string, LIST) :- put_code(34), write_flag_string(LIST), put_code(34), !. write_flag_value(_, VALUE) :- print(VALUE), !. write_flag_string([H|T]) :- put_code(H), !, write_flag_string(T). write_flag_string([]) :- !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/done3.pro0000644000175000017500000001550511753202340015600 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /******** DONE: top-level checker command ********/ done :- \+ command_arg(to_do, _), /* CFR024 */ nl, max_conc_no(MAX), done_range(1,MAX), fail. done :- command_arg(expression, CONCS), ( CONCS=c#N, done(N) ; CONCS=c#N-M, done_range(N,M) ), fail. done :- all_done, fail. done :- logfact(proved, _), !, retractall(do_not_issue_failure_message). done :- retract(do_not_issue_failure_message), !, retractall(do_not_issue_failure_message). done :- write('*** Cannot eliminate any conclusions at present'), nl, fail. done(N) :- conc(N,X), infer(X), write('*** PROVED C'), print(N), write(': '), print(X), assertz(logfact(proved, conc(N,X))), retract(conc(N,X)), case_pointer(CP), ( on_case(CP,CN,_), write(' FOR CASE '), print(CN) ; true ), !, nl, /* CFR024 */ ( /* CFR024 */ \+ conc(_, _), /* CFR024 */ auto_done(on), /* CFR024 */ all_done /* CFR024 */ ; /* CFR024 */ true /* CFR024 */ ), /* CFR024 */ !. /* CFR024 */ /*** ALL_DONE - see if all conclusions at present depth done & act if so ***/ all_done :- \+ conc(_,_), case_pointer(CP), ( CP=0, /* CFR024 */ logfact(proved, vc(_)), /* CFR024 */ ! /* CFR024 */ ; /* CFR024 */ CP=0, !, write('*** VC PROVED -- Well done!'), nl, assertz(logfact(proved, all)), current_vc(VCNAME, _), assertz(logfact(proved, vc(VCNAME))), update_vcs_to_prove, nl, /* CFR025 */ assertz(time_for_new_vc) /* CFR025 */ ; CP>0, ( on_case(CP, CASE, _), maybe_add(proved_for_case(CP, CASE)), fail ; case(CP, N, F), \+ proved_for_case(CP, N), !, maybe_add(do_not_issue_failure_message), case(N) ; subgoal_formula(CP, F, N, METHOD), write('*** PROVED '), print(F), write(' BY '), print(METHOD), nl, CP1 is CP-1, retractall(case_pointer(_)), asserta(case_pointer(CP1)), case_restore(CP), /* ??? */ assertz(logfact(exitframe, CP)), write('[Exiting depth '), /* CFR024 */ print(CP), /* CFR024 */ write(' proof frame]'), /* CFR024 */ nl, /* CFR024 */ retractall(proved_for_case(CP,_)), retractall(case(CP,_,_)), retractall(subgoal_formula(CP,_,_,_)), clear_up_could_facts, add_new_hyp(F,1), ( integer(N), !, ( auto_done(on), !, done ; done(N) ) ; maybe_add(do_not_issue_failure_message) ) ) ), !. done_range(N,N) :- done(N), !. done_range(N,M) :- N ), op(700, yfx, subset_of ), op(700, yfx, strict_subset_of ), op(700, xfx, iss), /* Signed 'is' */ op(700, yfx, less_than), /* Arithmetic */ op(700, yfx, <= ), /* Different ordering to Simplifier */ op(800, yfx, in ), op(800, yfx, not_in ), op(800, yfx, '..'), /* Used for range notation */ op(900, fy, not ), op(925, yfx, and ), op(950, yfx, or ), op(950, yfx, xor ), /* Guesstimated precedence! */ /* Different ordering to Simplifier */ op(975, yfx, -> ), op(975, yfx, requires), op(985, yfx, <-> ), /* Numbering different to Simplifier from here downwards */ op(994, yfx, ':='), /* Used in array aggregates */ /* Consult and load don't appear in Simplifier */ op(995,fx,consult), /* For initialisation file */ op(995,fx,load), /* For initialisation file */ op(995, yfx, &), /* Used in array aggregates */ op(995, fx, rule_family), /* Not in Simplifier */ op(996,yfx,where), /* New command-line syntax */ /* Not in Simplifier */ op(997,yfx,by), /* New command-line syntax */ op(997,yfx,with), /* New command-line syntax */ op(997,yfx,to), /* New command-line syntax */ op(997, yfx, may_be_deduced_from), op(997, yfx, may_be_replaced_by), op(997, xf, are_interchangeable), op(997, yfx, if), op(997, xf, may_be_deduced), /* Not in Simplifier */ op(998,yfx,=>), /* Substitution */ op(998,yfx,using), /* New command-line syntax */ op(998,yfx,for), /* New command-line syntax */ op(998,yfx,on), /* New command-line syntax */ op(999, xfy, :), /* Used in quantification */ /* Not in Simplifier */ op(999,yfx,from), /* New command-line syntax */ !. :- declare_operators. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/loadsicstus.pro0000644000175000017500000001023311753202340017116 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /* CONSULT CHECKER MODULES */ :- use_module('checker_ioutilities.pro'). :- include('sicstus.pro'). :- include('declar.pro'). :- include('portray.pro'). :- include('utilities.pro'). :- include('newvc.pro'). :- include('listthm.pro'). :- include('aritheval.pro'). :- include('replace2.pro'). :- include('repall.pro'). /* New for 1.2 */ :- include('standard.pro'). :- include('simp.pro'). :- include('infer2.pro'). :- include('inferenc2.pro'). :- include('records2.pro'). /*** MODIFICATIONS ***/ :- include('subgoal.pro'). :- include('deduce.pro'). :- include('deduction.pro'). :- include('done3.pro'). /*** MODIFICATIONS ***/ :- include('cases2.pro'). :- include('contra.pro'). :- include('fwdch2.pro'). :- include('simplify.pro'). :- include('typecheck5.pro'). :- include('traverse.pro'). /* New for 1.2 */ :- include('semistan.pro'). :- include('prooflogs.pro'). :- include('newrules.pro'). :- include('loadvc5.pro'). :- include('quantif.pro'). :- include('induction.pro'). :- include('toplevel.pro'). :- include('setflags.pro'). :- include('help.pro'). :- include('rulefiles.pro'). :- include('getdcldat.pro'). /* New for 1.2 */ :- include('initialise.pro'). :- include('save.pro'). /* If the license(yes) predicate is in the database then load the * getlicence.pl file. If a call fails then it is printed at execution * time. The second clause is included to prevent this. */ :- [initvals]. /* Dynamic predicates */ /* _After_ all dynamic predicates have been introduced, we need */ /* to disable "dynamic" as an operator in case a user has an FDL */ /* entity called "dynamic". Same goes for the other predefined */ /* PROLOG operators that might look like FDL identifiers */ /* See the SICSTUS manual section "Standard Operators" */ :- op(0, fx, [mode,public,dynamic,multifile,volatile,block,meta_predicate,discontiguous,initialization]). :- op(0, fx, [spy,nospy]). /*** PREPARE CHECKER FOR SAVING !!! ***/ startup_sequence :- /* C1.41: newlines moved to after getlicence */ machine_startup, /* CFR048 */ prompt(_OLD, ' '), process_command_line_data, display_header(user_output), format(user_output, '~n~n', []), read_initialisations, (load_vc), nl, write('Welcome to the SPADE Proof Checker -- for assistance type "help"'), nl, nl, fail. %-------------------------------------------------------------------------------- startup_sequence :- load_buffered_libs, write_log, fail. startup_sequence :- do_do_newvc, see_correct_input_stream, execute_command(newvc), write_log, fail. startup_sequence :- !, start. load_buffered_libs :- retract(ini_file_consult(FILE)), write('Consulting rule file '), print(FILE), write(' ...'), nl, user_rules(FILE), fail. load_buffered_libs :- !. machine_startup :- fetch_environment_variables, % 45 is '-'. assertz(qualifier_prefix(45)), % 47 is '/'. assertz(qualifier_prefix(47)), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/spxref.pro0000644000175000017500000000435311753202340016076 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % This code is not part of the core checker, it exists for analysis % purposes only. The small file loads all dynamic features of the checker % environment, enabling the use of the sicstus spxref analysis tool. %############################################################################### %############################################################################### % DEPENDENCIES %############################################################################### %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### :- use_module('declar.pro'). :- declare_operators. %############################################################################### % END-OF-FILE spark-2012.0.deb/checker/records2.pro0000644000175000017500000005516311753202340016317 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= record_simplify(OLD, NEW) :- ( record_function(N,OLD,update,F,[REC,VAL],T), /* CFR029 */ ( record_function(_,VAL,access,F,[REC],T), /* CFR029 */ simplify(REC, NEW) ; order_updates(OLD, NEW) ) ; record_function(_,OLD,access,F,[RECORD],_), /* OLD is a fld_xxx(RECORD) */ !, /* so commit to this branch */ checktype(RECORD, T), /* find the record type, */ record_function(N,OLD,access,F,[RECORD],T), /* find the N for type T */ record_access(N, T, RECORD, NEW) /* and call record_access */ ), !. order_updates(OLD, NEW) :- checktype(OLD, TYPE), /* CFR029 */ make_update_list(OLD, TYPE, OLDLIST, CORE), /* CFR029 */ keysort(OLDLIST, NEWLIST), reconstruct_record(NEW, TYPE, NEWLIST, CORE), /* CFR029 */ !. make_update_list(OLD, TYPE, LIST, CORE) :- /* CFR029 */ record_function(N,OLD,update,_F,[RECORD,VALUE],TYPE), /* CFR029 */ !, make_update_list(RECORD, TYPE, SO_FAR, CORE), /* CFR029 */ !, ( is_in((N-_),SO_FAR), !, append(FRONT, [(N-_)|TAIL], SO_FAR), append(FRONT, [(N-VALUE)|TAIL], LIST) ; LIST=[(N-VALUE)|SO_FAR] ), !. make_update_list(CORE, _, [], CORE) :- !. /* CFR029 */ reconstruct_record(NEW, TYPE, [(N-V)|REST], CORE) :- /* CFR029 */ reconstruct_record(SO_FAR, TYPE, REST, CORE), /* CFR029 */ !, simplify(V, VALUE), record_function(N,NEW,update,_F,[SO_FAR,VALUE],TYPE), /* CFR029 */ !. reconstruct_record(CORE, _, [], CORE) :- !. /* CFR029 */ record_access(N, T, RECORD, VALUE) :- /* CFR029 */ record_function(M,RECORD,update,_F,[R,V],T), /* CFR029 */ !, ( M=N, !, simplify(V,VALUE) ; record_access(N, T, R, VALUE) /* CFR029 */ ), !. record_access(N, T, RECORD, VALUE) :- /* CFR036 */ \+ atomic(RECORD), /* CFR036 */ RECORD =.. [mk__record|COMPONENTS], /* CFR036 */ spark_enabled, /* CFR036 */ !, /* CFR036 */ record_function(N, _, access, FIELD, _, T), /* CFR036 */ is_in(FIELD := V, COMPONENTS), /* CFR036 */ simplify(V, VALUE), /* CFR036 */ !. /* CFR036 */ record_access(N, T, RECORD, VALUE) :- \+ atomic(RECORD), RECORD =.. [F|COMPONENTS], mk__function_name(F, T, record), spark_enabled, !, record_function(N, _, access, FIELD, _, T), is_in(FIELD := V, COMPONENTS), simplify(V, VALUE), !. record_access(N, T, RECORD, VALUE) :- /* CFR029 */ ( record_simp_of_type(T, RECORD, NEWREC) /* CFR 1234 - Don't chuck away type */ ; NEWREC=RECORD ), !, record_function(N,VALUE,access,_F,[NEWREC],T), /* CFR029 */ !. /* CFR 1224 introduces record_simp_of_type to improve performance */ record_simp_of_type(T, OLD, NEW) :- ( record_function(N, OLD, update, F, [REC,VAL], T), ( record_function(_, VAL, access, F, [REC], T), simplify(REC, NEW) ; order_updates(OLD, NEW) ) ; record_function(_, OLD, access, F, [RECORD], _), !, checktype(RECORD, T2), record_function(N, OLD, access, F, [RECORD], T2), record_access(N, T2, RECORD, NEW) ), !. array_simplify(update(A,I,X), NEW) :- !, remove_update_duplicates(update(A,I,X), SO_FAR), !, do_array_simplify(SO_FAR, NEW), !. array_simplify(element(A,I), NEW) :- !, do_array_simplify(element(A,I), NEW), !. do_array_simplify(update(A, I, X), NEW) :- do_array_simplify(A, NEWA), simplify(X, NEWX), eval_list(I, NEWI), !, ( ( NEWX=element(A, J) ; A\=NEWA, NEWX=element(NEWA, J) ; X\=NEWX, ( X=element(A, J) ; A\=NEWA, X=element(NEWA, J) ) ), ( infer(NEWI=J) ; I\=NEWI, infer(I=J) ), !, NEW=NEWA ; NEWA=update(AA, J, _Y), ( infer(NEWI=J) ; I\=NEWI, infer(I=J) ), !, NEW=update(AA, NEWI, NEWX) ; NEW=update(NEWA, NEWI, NEWX) ), !. do_array_simplify(element(A, I), NEW) :- do_array_simplify(A, NEWA), eval_list(I, NEWI), find_element(NEWA, NEWI, NEW), !. do_array_simplify(X,Y) :- !, simplify(X,Y), !. find_element(update(A, I, _X), J, E) :- infer(I<>J), !, find_element(A, J, E), !. find_element(update(_A, I, X), J, X) :- infer(I=J), !. find_element(A, [J], X) :- /* CFR036 */ \+ atomic(A), /* CFR036 */ A =.. [mk__array|COMPONENTS], /* CFR036 */ spark_enabled, /* CFR036 */ reverse(COMPONENTS, REV_COMPS), /* CFR036 */ find_array_component(REV_COMPS, J, X). /* CFR036 */ find_element(A, [J], X) :- \+ atomic(A), A =.. [F|COMPONENTS], mk__function_name(F, _, array), spark_enabled, reverse(COMPONENTS, REV_COMPS), find_array_component(REV_COMPS, J, X). find_element(A, J, element(A, J)) :- !. remove_update_duplicates(update(A,I,X), update(NEW,I,X)) :- remove_updates(A,I,NEWA), !, remove_update_duplicates(NEWA, NEW), !. remove_update_duplicates(X, X) :- !. remove_updates(update(A,I,X), J, NEW) :- ( infer(I=J), !, remove_updates(A, J, NEW) ; remove_updates(A, J, NEWA), NEW=update(NEWA, I, X) ), !. remove_updates(X, _, X) :- !. find_array_component([H := V|T], J, X) :- /* CFR036 */ ( /* CFR036 */ satisfies_index_constraint(H, J), /* CFR036 */ !, /* CFR036 */ simplify(V, X) /* CFR036 */ ; /* CFR036 */ does_not_satisfy_index_constraint(H, J), /* CFR036 */ !, /* CFR036 */ find_array_component(T, J, X) /* CFR036 */ ), /* CFR036 */ !. /* CFR036 */ find_array_component([V], _J, X) :- /* CFR036 */ V \= (_ := _), /* CFR049 */ simplify(V, X), /* CFR036 */ checktype(X, _), /* CFR036 */ !. /* CFR036 */ satisfies_index_constraint(I1 & I2, J) :- /* CFR036 */ ( /* CFR036 */ satisfies_index_constraint(I1, J) /* CFR036 */ ; /* CFR036 */ satisfies_index_constraint(I2, J) /* CFR036 */ ), /* CFR036 */ !. /* CFR036 */ satisfies_index_constraint([I1 .. I2], J) :- /* CFR036 */ infer(I1 <= J), /* CFR036 */ infer(J <= I2), /* CFR036 */ !. /* CFR036 */ satisfies_index_constraint([I], J) :- /* CFR036 */ infer(I=J), /* CFR036 */ !. /* CFR036 */ does_not_satisfy_index_constraint(I1 & I2, J) :- /* CFR036 */ does_not_satisfy_index_constraint(I1, J), /* CFR036 */ does_not_satisfy_index_constraint(I2, J), /* CFR036 */ !. /* CFR036 */ does_not_satisfy_index_constraint([I1 .. I2], J) :- /* CFR036 */ !, /* CFR036 */ ( /* CFR036 */ infer(J < I1) /* CFR036 */ ; /* CFR036 */ infer(J > I2) /* CFR036 */ ), /* CFR036 */ !. /* CFR036 */ does_not_satisfy_index_constraint([I], J) :- /* CFR036 */ infer(I <> J), /* CFR036 */ !. /* CFR036 */ set_simplify(A \/ B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=NEWB, !, NEW=NEWA ; NEWA = (set ASET), NEWB = (set BSET), append(ASET, BSET, USET), simp_set_list(USET, U), sort(U, UNION), NEW = (set UNION) ; do_infer(NEWA subset_of NEWB), !, NEW=NEWB ; do_infer(NEWB subset_of NEWA), !, NEW=NEWA ; NEW = (NEWA \/ NEWB) ), !. set_simplify(A /\ B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=NEWB, !, NEW=NEWA ; NEWA = (set ASET), NEWB = (set BSET), make_intersection(ASET, BSET, NEW) ; do_infer(NEWA subset_of NEWB), !, NEW=NEWA ; do_infer(NEWB subset_of NEWA), !, NEW=NEWB ; NEW = (NEWA /\ NEWB) ), !. set_simplify(A \ B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=NEWB, !, NEW=(set []) ; NEWB=(set []), NEW=NEWA ; NEWA = (set ASET), NEWB = (set BSET), make_difference(ASET, BSET, NEW) ; do_infer(NEWA subset_of NEWB), !, NEW=(set []) ; NEW = (NEWA \ NEWB) ), !. set_simplify(A subset_of B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( ( NEWA=NEWB ; NEWA=(set []) ; NEWB = (B1 \/ B2), ( set_simplify(NEWA subset_of B1, true) ; set_simplify(NEWA subset_of B2, true) ) ; NEWA = (A1 \ A2), set_simplify(A1 subset_of NEWB, true) ; NEWA = (A1 /\ A2), ( set_simplify(A1 subset_of NEWB, true) ; set_simplify(A2 subset_of NEWB, true) ) ; NEWA = (set ASET), NEWB = (set BSET), is_subset_of(ASET, BSET) ), NEW=true ; NEW = (NEWA subset_of NEWB) ), !. set_simplify(A strict_subset_of B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=(set ASET), NEWB=(set BSET), is_strict_subset_of(ASET, BSET), NEW=true ; NEW = (NEWA strict_subset_of NEWB) ), !. set_simplify(A in B, NEW) :- simplify(A, NEWA), set_simplify(B, NEWB), ( NEWB=(set _), ( do_infer(NEWA in NEWB), NEW=true ; do_infer(NEWA not_in NEWB), NEW=false ) ; NEW = (NEWA in NEWB) ), !. set_simplify(A not_in B, NEW) :- simplify(A, NEWA), set_simplify(B, NEWB), ( NEWB=(set _), ( do_infer(NEWA not_in NEWB), NEW=true ; do_infer(NEWA in NEWB), NEW=false ) ; NEW = (NEWA not_in NEWB) ), !. set_simplify(A=B, NEW) :- set_simplify(A, NEWA), set_simplify(B, NEWB), ( NEWA=NEWB, NEW=true ; NEW=(NEWA=NEWB) ), !. set_simplify(A<>B, NEW) :- set_simplify(A=B, EQ), ( EQ=true, NEW=false ; EQ=(A1=B1), NEW=(A1<>B1) ; NEW=(not EQ) ), !. set_simplify((set L), (set M)) :- simp_set_list(L, N), sort(N, M), !. set_simplify(SOMETHING, ELSE) :- ( atom(SOMETHING), ELSE=SOMETHING ; SOMETHING =.. [F|SARGS], eval_list(SARGS, EARGS), ELSE =.. [F|EARGS] ), !. simp_set_list([H|T], LIST) :- simp_set_list(T, TAIL), simplify(H, NEWH), ( in_set_list(NEWH, TAIL), LIST=TAIL ; LIST=[NEWH|TAIL] ), !. simp_set_list([], []) :- !. in_set_list(E, [H|_T]) :- infer(E=H), !. in_set_list(E, [_|T]) :- in_set_list(E, T), !. not_in_set_list(_E, []) :- !. not_in_set_list(E, [H|T]) :- infer(E<>H), not_in_set_list(E, T), !. make_intersection([_H|_T], [], (set [])). make_intersection([], [_H|_T], (set [])). make_intersection(S1, S2, (set S)) :- mk_intersect(S1, S2, SET), sort(SET, S), !. mk_intersect([], _L, []) :- !. mk_intersect([H|T], L, S) :- mk_intersect(T, L, I), ( in_set_list(H, L), ( not_in_set_list(H, I), S=[H|I] ; in_set_list(H, I), S=I ; S=[H|I] ) ; not_in_set_list(H, L), S=I ), !. make_difference([], _, (set [])) :- !. make_difference(L, [], (set L)) :- !. make_difference(A, B, (set S)) :- mk_diff(A, B, SET), sort(SET, S), !. mk_diff([], _, []) :- !. mk_diff([H|T], L, S) :- mk_diff(T, L, D), ( in_set_list(H, L), S=D ; not_in_set_list(H, L), S=[H|D] ), !. is_subset_of([], _) :- !. is_subset_of([H|T], L) :- in_set_list(H, L), is_subset_of(T, L), !. is_strict_subset_of(A, B) :- is_subset_of(A, B), mk_diff(B, A, S), S=[_|_], !. sequence_simplify(L1 @ L2, LIST) :- ( /* CFR050 */ sequence_simplify(L1, LL1) /* CFR050 */ ; /* CFR050 */ simplify(L1, LL1) /* CFR050 */ ), /* CFR050 */ ( /* CFR050 */ sequence_simplify(L2, LL2) /* CFR050 */ ; /* CFR050 */ simplify(L2, LL2) /* CFR050 */ ), /* CFR050 */ !, /* CFR050 */ ( /* CFR050 */ LL1 = [], /* CFR050 */ LIST = LL2 /* CFR050 */ ; /* CFR050 */ LL2 = [], /* CFR050 */ LIST = LL1 /* CFR050 */ ; /* CFR050 */ LL1 = [_|_], /* CFR050 */ LL2 = [_|_], /* CFR050 */ append(LL1, LL2, LIST) /* CFR050 */ ; /* CFR050 */ LIST = (LL1 @ LL2) /* CFR050 */ ), /* CFR050 */ !. /* CFR050 */ sequence_simplify(first(SEQ), NEW) :- sequence_simplify(SEQ, NEWSEQ), ( NEWSEQ=[H|_], NEW=H ; NEW=first(NEWSEQ) ), !. sequence_simplify(last(SEQ), NEW) :- sequence_simplify(SEQ, NEWSEQ), ( NEWSEQ=[_|_], last(NEWSEQ, NEW) ; NEW=last(NEWSEQ) ), !. sequence_simplify(nonfirst(SEQ), NEW) :- sequence_simplify(SEQ, NEWSEQ), ( NEWSEQ=[_|T], NEW=T ; NEW=nonfirst(NEWSEQ) ), !. sequence_simplify(nonlast(SEQ), NEW) :- sequence_simplify(SEQ, NEWSEQ), ( NEWSEQ=[_|_], append(NEW, [_LAST], NEWSEQ) ; NEW=nonlast(NEWSEQ) ), !. sequence_simplify([H|T], NEW) :- eval_list([H|T], NEW), !. sequence_simplify([], []) :- !. enumerated_simplify(succ(X), NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(succ(X), L, NEW), !. enumerated_simplify(pred(X), NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(pred(X), L, NEW), !. enumerated_simplify(X=Y, NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(X, L, NEWX), enumerated_simp(Y, L, NEWY), ( NEWX=NEWY, !, NEW=true ; is_in(NEWX, L), is_in(NEWY, L), !, NEW=false ; NEW=(NEWX=NEWY) ), !. enumerated_simplify(X<>Y, NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(X, L, NEWX), enumerated_simp(Y, L, NEWY), ( NEWX=NEWY, !, NEW=false ; is_in(NEWX, L), is_in(NEWY, L), !, NEW=true ; NEW=(NEWX<>NEWY) ), !. enumerated_simplify(XY, NEW) :- enumerated_simplify(Y=Y, NEW) :- enumerated_simplify(Y<=X, NEW), !. enumerated_simplify(X, NEW) :- checktype(X, T), enumeration(T, L), enumerated_simp(X, L, NEW), !. enumerated_simp(succ(X), L, NEW) :- !, enumerated_simp(X, L, NEWX), ( strict_sublist([NEWX,NEW], L) ; NEWX=pred(NEW), L=[H|_], infer(NEW<>H) ; NEW=succ(NEWX) ), !. enumerated_simp(pred(X), L, NEW) :- !, enumerated_simp(X, L, NEWX), ( strict_sublist([NEW,NEWX], L) ; NEWX=succ(NEW), last(L, H), infer(NEW<>H) ; NEW=pred(NEWX) ), !. enumerated_simp(X, _, NEWX) :- simplify(X, NEWX), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/quantif.pro0000644000175000017500000002177011753202340016240 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** UNWRAP(HC,N) -- unwrap quantification around HC(N) ***/ unwrap :- ( command_arg(expression,HC#N) ; prompt_user('UNWRAP -- Which quantified hypothesis/conclusion?','Type h#N or c#N as appropriate ... '), rread(F), nonvar(F), check_hyp_or_conc(F), F=HC#N ), !, clear_up_could_facts, !, retract(logfact(command, unwrap)), asserta(logfact(command, unwrap(HC#N))), !, unwrap(HC,N), !. unwrap(HORC,N) :- nonvar(HORC), nonvar(N), ( HORC=h, HC=hyp ; HORC=c, HC=conc ), F=..[HC,N,FORM], call(F), strip(HC,FORM), !. /*** STRIP(HC,F) -- strip/enter proof frame on formula F if possible ***/ strip(hyp,for_all(X:TYPE,FORMULA)) :- make_new_var(qvar,X,TYPE,QVAR), subst_vbl(X,QVAR,FORMULA,NEW_FORMULA), add_new_hyp(NEW_FORMULA,1), !. strip(conc,for_some(X:TYPE,FORMULA)) :- make_new_var(qvar,X,TYPE,QVAR), subst_vbl(X,QVAR,FORMULA,NEW_FORMULA), start_subgoal(for_some(X:TYPE,FORMULA),[NEW_FORMULA],true,'QUANTIFICATION'), !. strip(hyp,for_some(X:TYPE,FORMULA)) :- no_qvars_in(FORMULA), make_new_var(uvar,X,TYPE,UVAR), subst_vbl(X,UVAR,FORMULA,NEW_FORMULA), add_new_hyp(NEW_FORMULA,1), !. strip(conc,for_all(X:TYPE,FORMULA)) :- no_qvars_in(FORMULA), make_new_var(uvar,X,TYPE,UVAR), subst_vbl(X,UVAR,FORMULA,NEW_FORMULA), start_subgoal(for_all(X:TYPE,FORMULA),[NEW_FORMULA],true,'QUANTIFICATION'), !. /*** MAKE_NEW_VAR(C,X,T,V) -- make V of class C from X & T ****/ make_new_var(VARCLASS,VAR,TYPE,NEWVAR) :- name(VAR,VT), ( VARCLASS=uvar, VL=VT ; make_capital(VT,VL) ), !, name(TYPE,TT), ( length(TT,Len), Len=<3, TL=TT ; TT=[T1,T2,T3|_], TL=[T1,T2,T3] ), !, append(TL,[95|VL],ROOT), repeat, nextnumber(ROOT,N), codelist(N,NUMBER), append(ROOT,[95|NUMBER],NL), name(NEWVAR,NL), /*until*/ nondeclared(NEWVAR), find_core_type(TYPE, CORE_TYPE), assertz(var_const(NEWVAR, CORE_TYPE, p)), CLASSDEC=..[VARCLASS,NEWVAR], assertz(CLASSDEC), !. /*** MAKE_CAPITAL(OLD,NEW) -- convert a list of chars to capitals only ***/ make_capital([],[]) :- !. make_capital([A|AL],[B|BL]) :- ( A>=97, A=<122, !, B is A-32 ; B=A ), !, make_capital(AL,BL), !. /*** NEXTNUMBER(R,M) -- generate new number for root M ***/ nextnumber(ROOT,M) :- ( retract(current_root(ROOT,N)), M is N+1, asserta(current_root(ROOT,M)) ; asserta(current_root(ROOT,1)), M=1 ), !. /*** NONDECLARED(ATOM) -- check ATOM has not been declared already ***/ nondeclared(X) :- var_const(X, _, _), !, fail. nondeclared(_) :- !. /*** QVARS_IN(F, Q) -- Q is the list of QVARS in F ***/ qvars_in(FORMULA, QV) :- atomic(FORMULA), ( qvar(FORMULA), !, QV=[FORMULA] ; QV=[] ), !. qvars_in(FORMULA, QV) :- FORMULA=..[_OP|ARGS], qvars_in_list(ARGS, QV), !. /*** QVARS_IN_LIST(LIST, QVLIST) -- QVLIST is the list of QVARS in LIST ***/ qvars_in_list([FORMULA],QV) :- qvars_in(FORMULA, QV), !. qvars_in_list([FORMULA|REST],QV) :- qvars_in(FORMULA, QF), qvars_in_list(REST, QR), merge_lists(QF, QR, QV), !. /*** MERGE_LISTS(L1, L2, M) -- merge L1 & L2 to get M ***/ merge_lists([], X, X) :- !. merge_lists([H|T], X, L) :- ( is_in(H,X), !, merge_lists(T, X, L) ; merge_lists(T, X, S), L=[H|S] ), !. /*** NO_QVARS_IN(FORMULA) -- guarantee formula free of qvars ***/ no_qvars_in(FORMULA) :- qvars_in(FORMULA, []), !. /*** QVARS_IN_VC(QVARS) -- list of qvars in VC ***/ qvars_in_vc(LIST) :- findall(Q, qvar(Q), LIST), LIST \== [], !. /*** INSTANTIATE -- instantiate a qvar in a formula ***/ instantiate :- clear_up_could_facts, !, ( command_arg(var, QVAR) ; qvars_in_vc(QVARS), ( QVARS = [QVAR] ; QVARS = [], !, fail ; prompt_user('Instantiate what? '), rread(QVAR), nonvar(QVAR) ) ), !, qvar(QVAR), var_const(QVAR, TYPE, p), ( command_arg(value, VALUE) ; prompt_user('With what? '), rread(VAL), parse_expression(VAL, VALUE) ), !, novars(VALUE), no_qvars_in(VALUE), checktype(VALUE,TYPE), !, put_value(QVAR,VALUE), !. /*** PUT_VALUE(QVAR,VALUE) -- replace QVAR by VALUE in all HCs ***/ put_value(QVAR,VALUE) :- ( HC = hyp ; HC = deleted_hyp ; HC = conc ), F=..[HC,N,FORMULA], call(F), subst_vbl(QVAR,VALUE,FORMULA,NEW_FORMULA), ( FORMULA \= NEW_FORMULA, assertz(inst_form(HC,N,NEW_FORMULA)) ; true ), fail. put_value(QVAR,VALUE) :- retractall(saved_vc(_, qvar(QVAR))), saved_vc(N, FACT), subst_vbl(QVAR, VALUE, FACT, NEW_FACT), FACT \= NEW_FACT, assertz(inst_saved_vc(N, FACT, NEW_FACT)), fail. put_value(_,_) :- retract(inst_saved_vc(N, FACT, NEW_FACT)), retract(saved_vc(N, FACT)), assertz(saved_vc(N, NEW_FACT)), fail. put_value(QVAR,VALUE) :- case(CP, N, FACT), subst_vbl(QVAR, VALUE, FACT, NEW_FACT), FACT \= NEW_FACT, assertz(inst_case(CP, N, FACT, NEW_FACT)), fail. put_value(_,_) :- retract(inst_case(CP, N, FACT, NEW_FACT)), retract(case(CP, N, FACT)), assertz(case(CP, N, NEW_FACT)), fail. put_value(QVAR,VALUE) :- subgoal_formula(CP, FACT, N, METHOD), subst_vbl(QVAR, VALUE, FACT, NEW_FACT), FACT \= NEW_FACT, assertz(inst_subgoal_formula(CP, FACT, N, METHOD, NEW_FACT)), fail. put_value(_,_) :- retract(inst_subgoal_formula(CP, FACT, N, METHOD, NEW_FACT)), retract(subgoal_formula(CP, FACT, N, METHOD)), assertz(subgoal_formula(CP, NEW_FACT, N, METHOD)), fail. put_value(_, _) :- ( HC = hyp, NHC = newhyp, OLD = hyp(N, _), NEW = hyp(N, FORMULA), LOGFACT = NEW, MESSAGE = new_hyp_message(N, FORMULA) /* CFR018 */ ; HC = deleted_hyp, NHC = newhyp, OLD = deleted_hyp(N, _), NEW = deleted_hyp(N, FORMULA), LOGFACT = hyp(N, FORMULA), MESSAGE = true /* CFR018 */ ; HC = conc, NHC = newconc, OLD = conc(N, _), NEW = conc(N, FORMULA), LOGFACT = NEW, MESSAGE = new_conc_message(N, FORMULA) /* CFR018 */ ), retract(inst_form(HC,N,FORMULA)), retractall(OLD), assertz(NEW), assertz(logfact(NHC, LOGFACT)), call(MESSAGE), /* CFR018 */ fail. put_value(_,_) :- retract(inst_saved_vc(N, FACT, NEW_FACT)), retract(saved_vc(N, FACT)), assertz(saved_vc(N, NEW_FACT)), fail. put_value(QVAR, _) :- retractall(qvar(QVAR)), retractall(var_const(QVAR, _, _)), !. /*** GENVAR(R,V) -- make variable V with root R ****/ genvar(R,R) :- nondeclared(R), !. genvar(R,V) :- name(R,ROOT), repeat, nextnumber(ROOT,N), codelist(N,NUMBER), append(ROOT,NUMBER,VAR), name(V,VAR), /*until*/ nondeclared(V), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/aritheval.pro0000644000175000017500000001267611753202340016555 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /* VAR */ _X iss V :- var(V), !, fail. /* +INT */ A iss A :- integer(A), A>=0, !. /* -INT */ X iss A :- integer(A), A<0, !, A1 is -A, X=(-A1), !. /* DMIN */ X iss -(-A) :- X iss A, !. /* UMIN */ X iss -(A) :- P iss A, !, ( P=0, X=0 ; integer(P), P>0, X=(-P) ; P=(-X), X>0 ), !. /* PLUS */ X iss A+B :- P iss A, Q iss B, !, X1 is P+Q, ( X1>=0, X=X1 ; X1<0, X2 is -X1, X=(-X2) ), !. /* MINUS */ X iss A-B :- X iss A+(-B), !. /* MULT */ X iss A*B :- P iss A, Q iss B, !, X1 is P*Q, ( X1>=0, X=X1 ; X1<0, X2 is -X1, X=(-X2) ), !. /* DIV */ X iss A div B :- P iss A, Q iss B, !, Q\=0, eval_div(P, Q, X1), ( X1>=0, X=X1 ; X1<0, X2 is -X1, X=(-X2) ), !. /* MOD 1 */ X iss A mod B :- spark_enabled, /* SPARK definition */ !, P iss A, Q iss B, !, Q \= 0, eval_div(P, Q, X1), Remainder iss P - (X1 * Q), ( Remainder = 0, X = 0 ; signed_integer(P), signed_integer(Q), ( ( P >= 0, Q > 0 ; P =< 0, Q < 0 ), X = Remainder ; ( P >= 0, Q < 0 ; P =< 0, Q > 0 ), X iss Remainder + Q ) ; X = (P mod Q) ), !. /* MOD 2 */ X iss A mod B :- \+ spark_enabled, /* Non-SPARK definition */ !, P iss A, Q iss B, !, Q \= 0, eval_div(P, Q, X1), Remainder iss P - (X1 * Q), ( Remainder = 0, X = 0 ; Q = 1, X = 0 ; X = (P mod Q) ), !. /* POWER */ X iss A**B :- /* CFR038 */ _P iss A, /* CFR038 */ _Q iss B, /* CFR038 */ !, /* CFR038 */ ( /* CFR038 */ B = 0, /* CFR038 */ X = 1 /* CFR038 */ ; /* CFR038 */ integer(B), /* CFR038 */ B > 0, /* CFR038 */ B1 is B - 1, /* CFR038 */ !, /* CFR038 */ A1 iss A**B1, /* CFR038 */ !, /* CFR038 */ X iss A * A1 /* CFR038 */ ), /* CFR038 */ !. /* CFR038 */ %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/checker_ioutilities.pro0000644000175000017500000001324311753202340020614 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= %############################################################################### % PURPOSE %------------------------------------------------------------------------------- % Provides input and output utilities that are specific to the Checker. %############################################################################### %############################################################################### % MODULE %############################################################################### :- module(simplifier_ioutilities, [display_header/1, display_help/1]). %############################################################################### % DEPENDENCIES %############################################################################### :- use_module('../simplifier/ioutilities.pro', [show_error/2, display_header_full/1, display_header_plain/1]). :- use_module('../common/versioning/version.pro', [toolset_support_line1/1, toolset_support_line2/1, toolset_support_line3/1, toolset_support_line4/1]). %############################################################################### % TYPES %############################################################################### %############################################################################### % DATA %############################################################################### %############################################################################### % PREDICATES %############################################################################### %=============================================================================== % display_header(+Stream). %------------------------------------------------------------------------------- % Display the header for the Simplifier. For correct processing, the % command line arguments must have already been processed. %=============================================================================== % Is not in plain mode. display_header(Stream):- user:plain_output(off), display_header_full(Stream), !. % Is in plain mode. display_header(Stream):- user:plain_output(on), display_header_plain(Stream), !. %None of the above is an error. display_header(_Stream):- show_error('Could not calcuate banner. Unexpected system configuration.', []). %=============================================================================== %=============================================================================== % display_help(Stream). %------------------------------------------------------------------------------- % Display help text. %=============================================================================== display_help(Stream):- display_header(Stream), % Get common support information. toolset_support_line1(SupportLine1_Atom), toolset_support_line2(SupportLine2_Atom), toolset_support_line3(SupportLine3_Atom), toolset_support_line4(SupportLine4_Atom), format(Stream, '~n',[]), format(Stream, 'Usage: checker [options] Target_File~n', []), format(Stream, '~n', []), format(Stream, 'Target_File - Either a .vcg or .siv file~n', []), format(Stream, '~n', []), format(Stream, 'All options may be abbreviated to the shortest unique prefix.~n', []), format(Stream, '~n', []), format(Stream, 'Standard options~n', []), format(Stream, '----------------~n', []), format(Stream, '-help - Display this help information.~n', []), format(Stream, '-version - Display version information.~n', []), format(Stream, '-plain - Adopt a plain output style (e.g. no dates~n', []), format(Stream, ' or version numbers).~n', []), format(Stream, '-overwrite_warning - Confirmation needed to overwrite command or~n', []), format(Stream, ' proof log files.~n', []), format(Stream, '-command_log=Log_File - Specify filename for the command log file.~n', []), format(Stream, '-proof_log=Plg_File - Specify filename for the proof log file.~n', []), format(Stream, '-execute=Log_File - Execute a previously generated command log file.~n', []), format(Stream, '-resume - Resume a previously saved session.~n', []), format(Stream, '~n', []), format(Stream, '~a~n', [SupportLine1_Atom]), format(Stream, '~a~n', [SupportLine2_Atom]), format(Stream, '~a~n', [SupportLine3_Atom]), format(Stream, '~a~n', [SupportLine4_Atom]), !. %=============================================================================== %############################################################################### % END-OF-FILE spark-2012.0.deb/checker/checker.windows.manifest0000755000175000017500000000056611753202340020677 0ustar eugeneugen spark-2012.0.deb/checker/traverse.pro0000644000175000017500000004256211753202340016426 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /******** TRAVERSE: top-level checker command ********/ traverse :- ( command_arg(expression, EXPRESSION) ; prompt_user('TRAVERSE -- Please specify expression to be traversed.', 'Expression? '), rread(EXPRN), parse_expression(EXPRN, EXPRESSION) ), !, traverse(EXPRESSION). /*** traverse(EXPRESSION) -- setup expression and enter tv environment! ***/ traverse(HORC#N) :- ( HORC=h, HC=hyp ; HORC=c, HC=conc ), X=..[HC,N,E], call(X), !, traverse(E), !. traverse(X) :- novars(X), !, retractall(tv_depth(_)), retractall(tv_expr(_,_)), retractall(tv_trace(_)), retractall(tv_cmd_buffer(_)), !, asserta(tv_depth(0)), asserta(tv_expr(0,X)), asserta(tv_trace([])), !, tv_environment, !. tv_environment :- tv_process_command(redisplay), !, repeat, tv_get_command(COMMAND), tv_process_command(COMMAND), /* until */ COMMAND = exit, !. /*----------------------------------------------------------------------------*/ tv_get_command(COMMAND) :- retract(tv_cmd_buffer(COMMAND)), !. tv_get_command(_) :- prompt_user('Traverse-Command>>'), /* CFR1334 */ tv_read_line_of_chars(LINE_OF_CHARS), tv_process_line_of_chars(LINE_OF_CHARS), !, fail. tv_read_line_of_chars(LINE) :- lget0(CHAR), /* CFR015 */ !, ( tv_cmd_terminator(CHAR), LINE = [] ; tv_blank_char(CHAR), !, tv_read_line_of_chars(LINE) ; LINE=[CH|REST], tv_make_atom(CHAR, CH), !, tv_read_line_of_chars(REST) ), !. tv_cmd_terminator(13). tv_cmd_terminator(EOL) :- eol_char(EOL). tv_cmd_terminator(EOF) :- eof_char(EOF), see_correct_input_stream, !. /* CFR015 */ tv_blank_char(32). tv_blank_char(8). tv_blank_char(46). tv_make_atom(CHAR, CH) :- CHAR>64, CHAR<91, /* so in A..Z */ CHAR1 is CHAR+32, !, name(CH, [CHAR1]), !. tv_make_atom(45, u) :- !. /* "-" synonymous with "up" */ tv_make_atom(43, d) :- !. /* "+" synonymous with "down" */ tv_make_atom(63, h) :- !. /* "?" synonymous with "help" */ tv_make_atom(CHAR, CH) :- name(CH, [CHAR]), /* o.k. otherwise */ !. tv_process_line_of_chars([x|_]) :- assertz(tv_cmd_buffer(exit)), !. tv_process_line_of_chars([t|REST]) :- assertz(tv_cmd_buffer(type)), !, tv_process_line_of_chars(REST), !. tv_process_line_of_chars([l,b|REST]) :- assertz(tv_cmd_buffer(locate(brief))), !, tv_process_line_of_chars(REST), !. tv_process_line_of_chars([l,f|REST]) :- assertz(tv_cmd_buffer(locate(full))), !, tv_process_line_of_chars(REST), !. tv_process_line_of_chars([l|REST]) :- assertz(tv_cmd_buffer(locate(brief))), !, tv_process_line_of_chars(REST), !. tv_process_line_of_chars([u|REST]) :- assertz(tv_cmd_buffer(up)), !, tv_process_line_of_chars(REST), !. tv_process_line_of_chars([d|REST]) :- tv_fetch_number(REST, NUMBER, REMAINDER), !, assertz(tv_cmd_buffer(down(NUMBER))), !, tv_process_line_of_chars(REMAINDER), !. tv_process_line_of_chars([h|REST]) :- assertz(tv_cmd_buffer('help')), !, tv_process_line_of_chars(REST), !. tv_process_line_of_chars([r|REST]) :- assertz(tv_cmd_buffer(redisplay)), !, tv_process_line_of_chars(REST), !. tv_process_line_of_chars([s|REST]) :- assertz(tv_cmd_buffer(showtop)), !, tv_process_line_of_chars(REST), !. tv_process_line_of_chars([]) :- !. tv_process_line_of_chars(REST) :- assertz(tv_cmd_buffer(error(REST))), !. /*----------------------------------------------------------------------------*/ tv_process_command(redisplay) :- tv_depth(DEPTH), is_inverse_video(INV), is_normal_video(NORM), write('Depth: '), print(DEPTH), write(', Trace: '), tv_trace(TRACE), wnl(TRACE), tv_expr(DEPTH, EXPRESSION), write('*** '), wnl(EXPRESSION), write('Principal functor: '), EXPRESSION =.. [FUNCTOR|ARGUMENTS], !, print(INV), print(FUNCTOR), print(NORM), length(ARGUMENTS, LENGTH), !, tv_display_arg_info(LENGTH, ARGUMENTS), !. tv_process_command(up) :- tv_depth(OLDDEPTH), OLDDEPTH > 0, !, retractall(tv_depth(_)), retractall(tv_expr(OLDDEPTH, _)), NEWDEPTH is OLDDEPTH - 1, asserta(tv_depth(NEWDEPTH)), tv_trace(OLDTRACE), retractall(tv_trace(_)), gen_append(NEWTRACE, [_], OLDTRACE), asserta(tv_trace(NEWTRACE)), tv_expr(NEWDEPTH, NEWEXPRESSION), ( ( NEWEXPRESSION = for_all(V:T, _) ; NEWEXPRESSION = for_some(V:T, _) ), find_core_type(T, CT), retractall(var_const(V, CT, tv)) ; true ), !, tv_process_command(redisplay), !. tv_process_command(up) :- tv_cmd_buffer(_), !, wnl('CANNOT "UP" AT TOP-LEVEL. (Rest of command-line ignored)'), retractall(tv_cmd_buffer(_)), !. tv_process_command(up) :- wnl('CANNOT "UP" AT TOP-LEVEL.'), !. tv_process_command(down(N)) :- tv_depth(OLDDEPTH), tv_expr(OLDDEPTH, OLDEXPRESSION), \+ atomic(OLDEXPRESSION), tv_trace(OLDTRACE), !, NEWDEPTH is OLDDEPTH + 1, functor(OLDEXPRESSION, _, ARITY), ( ( integer(N), N >= 1, N =< ARITY, NN = N ; N = -1, ARITY = 1, NN = 1 ), !, arg(NN, OLDEXPRESSION, NEWEXPRESSION), retractall(tv_depth(_)), asserta(tv_depth(NEWDEPTH)), asserta(tv_expr(NEWDEPTH, NEWEXPRESSION)), retractall(tv_trace(_)), append(OLDTRACE, [NN], NEWTRACE), asserta(tv_trace(NEWTRACE)), ( ( OLDEXPRESSION = for_all(V:T, _) ; OLDEXPRESSION = for_some(V:T, _) ), find_core_type(T, CT), assertz(var_const(V, CT, tv)) ; true ), !, tv_process_command(redisplay) ; tv_cmd_buffer(_), wnl('"DOWN" NON-EXISTENT ARGUMENT. (Rest of command-line ignored)'), retractall(tv_cmd_buffer(_)) ; wnl('"DOWN" NON-EXISTENT ARGUMENT.') ), !. tv_process_command(down(_)) :- tv_cmd_buffer(_), !, wnl('CANNOT "DOWN" AT TREE-LEAF. (Rest of command-line ignored)'), retractall(tv_cmd_buffer(_)), !. tv_process_command(down(_)) :- !, wnl('CANNOT "DOWN" AT TREE-LEAF.'), !. tv_process_command(type) :- tv_depth(DEPTH), tv_expr(DEPTH, EXPRESSION), !, tv_show_type_template(EXPRESSION), !. tv_process_command(locate(B_OR_F)) :- tv_build_locate_expr(EXPRESSION, B_OR_F), !, wnl(EXPRESSION), !. tv_process_command('help') :- wnl(' TRAVERSE ENVIRONMENT COMMANDS:'), wnl(' ------------------------------'), wnl(' u (or -) UP (ascend a level in expression tree)'), wnl(' d (or +) DOWN (descend a level); must be followed by argument no.'), wnl(' (e.g. d2 = descend argument 2)'), wnl(' l LOCATION of current level w.r.t. original expression'), wnl(' (follow by "f" for FULL or ["b"] (default) BRIEF)'), wnl(' t TYPE information for current level'), wnl(' r REDISPLAY current level'), wnl(' s SHOW top-level expression (in full)'), wnl(' h (or ?) HELP -- displays this message'), wnl(' x EXIT from traverse command environment'), /* CFR031 */ nl, !. tv_process_command(showtop) :- tv_expr(0, TOP_EXPR), wnl('TOPLEVEL EXPRESSION:'), wnl(TOP_EXPR), !. tv_process_command(exit) :- retractall(var_const(_, _, tv)), !. tv_process_command(error(LIST)) :- !, write('ERROR(S) IN COMMAND SEQUENCE: '), tv_write_list(LIST), wnl('Please retype command-line.'), !. wnl(X) :- print(X), nl, !. /*----------------------------------------------------------------------------*/ tv_fetch_number(REST, NUMBER, REMAINDER) :- tv_fetch_digits(REST, DIGITS, REMAINDER), tv_form_number(DIGITS, 0, NUMBER), !. tv_fetch_digits([FIRST|REST], [FIRST|DIGITS], REMAINDER) :- integer(FIRST), 0 =< FIRST, FIRST =< 9, !, tv_fetch_digits(REST, DIGITS, REMAINDER), !. tv_fetch_digits(REMAINDER, [], REMAINDER) :- !. tv_form_number([N], S, R) :- R is 10 * S + N, !. tv_form_number([N|L], S, R) :- S1 is 10 * S + N, !, tv_form_number(L, S1, R), !. tv_form_number([], _, -1) :- !. /* error case: d~ */ tv_display_arg_info(0, _ARGUMENTS) :- wnl(' (atomic object: leaf of expression tree)'), !. tv_display_arg_info(LENGTH, ARGUMENTS) :- write(' ('), print(LENGTH), wnl(' arguments)'), !, tv_display_arguments(ARGUMENTS, 1), !. tv_display_arguments([ARG|ARGUMENTS], N) :- is_inverse_video(INV), is_normal_video(NORM), print(INV), write('ARG'), print(N), write(':'), print(NORM), write(' '), wnl(ARG), N1 is N+1, !, tv_display_arguments(ARGUMENTS, N1), !. tv_display_arguments([], _) :- !. tv_show_type_template(EXPRESSION) :- atomic(EXPRESSION), !, ( checktype(EXPRESSION, TYPE) ; is_a_valid_type(EXPRESSION), TYPE = '{type-identifier}' ; type_alias(EXPRESSION, _), TYPE = '{type-identifier}' ), !, wnl('Type information for atomic object:'), !, tv_print_type_data(EXPRESSION, TYPE), !. tv_show_type_template(update(A, I, X)) :- checktype(A, ARRAYTYPE), type(ARRAYTYPE, array(INDEXTYPES, ELEM_TYPE)), checktypes(I, INDEXTYPES), checktype(X, ELEM_TYPE), !, tv_print_function_name(update), !, tv_print_type_data(update(ARRAYTYPE,INDEXTYPES,ELEM_TYPE), ARRAYTYPE), !. tv_show_type_template(element(A, I)) :- checktype(A, ARRAYTYPE), type(ARRAYTYPE, array(INDEXTYPES, ELEM_TYPE)), checktypes(I, INDEXTYPES), !, tv_print_function_name(element), !, tv_print_type_data(element(ARRAYTYPE, INDEXTYPES), ELEM_TYPE), !. tv_show_type_template([X|Y]) :- checktype([X|Y], TYPE), type(TYPE, sequence(ELEM_TYPE)), !, tv_print_function_name('.'), !, tv_print_type_data([ELEM_TYPE|TYPE], TYPE), !. tv_show_type_template([X|Y]) :- checktypes([X|Y], TYPES), !, tv_print_function_name('.'), !, tv_print_type_data(TYPES, '{list}'), !. tv_show_type_template(set X) :- checktype(set X, TYPE), type(TYPE, set(_ELEM_TYPE)), !, tv_print_function_name('set'), !, tv_print_type_data((set '{list}'), TYPE), !. tv_show_type_template(for_all(X, Y)) :- checktype(for_all(X, Y), boolean), !, tv_print_function_name(for_all), !, tv_print_type_data(for_all('{binding}', boolean), boolean), !. tv_show_type_template(for_some(X, Y)) :- checktype(for_some(X, Y), boolean), !, tv_print_function_name(for_some), !, tv_print_type_data(for_some('{binding}', boolean), boolean), !. tv_show_type_template(X:Y) :- atom(X), ( is_a_valid_type(Y) ; type_alias(Y, _) ), !, tv_print_function_name(':'), !, tv_print_type_data('{ {identifier} : {type} }', '{binding}'), !. tv_show_type_template(EXPRESSION) :- function_template(EXPRESSION, VARS, FUNCTION_NAME), function(FUNCTION_NAME, TYPES, RESULT_TYPE), !, checktypes(VARS, TYPES), !, function_template(TYPE_MATCH, TYPES, FUNCTION_NAME), !, tv_print_function_name(FUNCTION_NAME), !, tv_print_type_data(TYPE_MATCH, RESULT_TYPE), !. tv_show_type_template(EXPRESSION) :- checktype(EXPRESSION, RESULT_TYPE), /* CFR029 */ record_function(_, EXPRESSION, _, XXX, VARS, RESULT_TYPE), /* 029 */ functor(EXPRESSION, FUNCTION_NAME, _), function(FUNCTION_NAME, TYPES, RESULT_TYPE), !, checktypes(VARS, TYPES), !, record_function(_, TYPE_MATCH, _, XXX, TYPES, RESULT_TYPE), /*029 */ !, tv_print_function_name(FUNCTION_NAME), !, tv_print_type_data(TYPE_MATCH, RESULT_TYPE), !. tv_show_type_template(EXPRESSION) :- EXPRESSION =.. [FUNCTION_NAME | VARS], checktype(EXPRESSION, RESULT_TYPE), !, checktypes(VARS, TYPES), !, TYPE_MATCH =.. [FUNCTION_NAME | TYPES], !, tv_print_function_name(FUNCTION_NAME), !, tv_print_type_data(TYPE_MATCH, RESULT_TYPE), !. tv_print_function_name(FUNCTION_NAME) :- write('Type information for function '), is_inverse_video(INV), is_normal_video(NORM), print(INV), print(FUNCTION_NAME), print(NORM), wnl(':'), !. tv_print_type_data(TYPE_MATCH, RESULT_TYPE) :- print(TYPE_MATCH), write(': '), print(RESULT_TYPE), wnl('.'), !. tv_build_locate_expr(EXPRESSION, B_OR_F) :- tv_trace(TRACE), tv_expr(0, START_EXPR), tv_build_expr(TRACE, START_EXPR, EXPRESSION, B_OR_F), !. tv_build_expr([ARG|TRACE], START_EXPR, EXPRESSION, B_OR_F) :- functor(START_EXPR, F, A), functor(EXPRESSION, F, A), !, tv_instantiate(EXPRESSION, ARG, 1, A, B_OR_F, START_EXPR), arg(ARG, START_EXPR, NEW_START), arg(ARG, EXPRESSION, NEW_EXPR), !, tv_build_expr(TRACE, NEW_START, NEW_EXPR, B_OR_F), !. tv_build_expr([], _, EXPRESSION, _B_OR_F) :- is_inverse_video(INV), is_normal_video(NORM), name(INV, IL), name(NORM, NL), append(IL, [42,72,69,82,69,42|NL], EL), /* "*HERE*" ! */ !, name(EXPRESSION, EL), !. tv_instantiate(_EXPRESSION, ARG, ARG, ARG, _B_OR_F, _) :- !. tv_instantiate(EXPRESSION, ARG, ARG, UPP, B_OR_F, START_EXPR) :- LOW is ARG + 1, !, tv_instantiate(EXPRESSION, ARG, LOW, UPP, B_OR_F, START_EXPR), !. tv_instantiate(EXPRESSION, _ARG, UPP, UPP, brief, _) :- arg(UPP, EXPRESSION, '...'), !. tv_instantiate(EXPRESSION, _ARG, UPP, UPP, full, START_EXPR) :- arg(UPP, START_EXPR, XXX), arg(UPP, EXPRESSION, XXX), !. tv_instantiate(EXPRESSION, ARG, LOW, UPP, brief, _) :- arg(LOW, EXPRESSION, '...'), LO_ is LOW + 1, !, tv_instantiate(EXPRESSION, ARG, LO_, UPP, brief, []), !. tv_instantiate(EXPRESSION, ARG, LOW, UPP, full, START_EXPR) :- arg(LOW, START_EXPR, XXX), arg(LOW, EXPRESSION, XXX), LO_ is LOW + 1, !, tv_instantiate(EXPRESSION, ARG, LO_, UPP, full, START_EXPR), !. tv_write_list([HEAD|TAIL]) :- print(HEAD), !, tv_write_list(TAIL), !. tv_write_list([]) :- nl, !. is_inverse_video(INV) :- inverse_video(L), !, name(INV, L), !. is_normal_video(NORM) :- normal_video(L), !, name(NORM, L), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/cs.pro0000644000175000017500000000215511753202340015172 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= :- compile(compilesicstus). :- save_program(checker, true). :- halt. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/utilities.pro0000644000175000017500000010123211753202340016574 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** Normalize_negative_literals/2 ***/ /*** This clause transforms all negative integer literals ***/ /*** in A into application of the unart - operator in B ***/ /*** This is called from read_term_and_layout/1 ***/ /* Special base case - negative literal */ normalize_negative_literals(A, B) :- integer(A), A < 0, NA is -A, B = -(NA), !. /* Base case - any other atom */ normalize_negative_literals(A, A) :- atomic(A), !. /* Base case - any variable */ normalize_negative_literals(A, A) :- var(A), !. /* General case */ normalize_negative_literals(A, B) :- A =.. [F|ARGS], normalize_arg_list(ARGS, BARGS), B =.. [F|BARGS], !. normalize_arg_list([X], [Y]) :- normalize_negative_literals(X, Y), !. normalize_arg_list([X|XL], [Y|YL]) :- normalize_negative_literals(X, Y), !, normalize_arg_list(XL, YL), !. normalize_arg_list([], []) :- !. /*** It isn't possible to backtrack over printq, so this stops the interpreter from trying to. ***/ backtrack_printq(X) :- printq(X), !. /*** Renumber all variables in X from 1 (if there are any) and printq ***/ renumber_and_print(X) :- ( novars(X), printq(X) ; \+ novars(X), mynumbervars(X, 1, _), backtrack_printq(X), fail ; true ). rread(X) :- repeat, ( seeing(F), /* CFR015 */ read_term_and_layout(XX), /* CFR015 */ ( XX==end_of_file, /* CFR015 */ ( /* CFR015 */ F = user /* CFR015 */ ; /* CFR015 */ write('<<< End of command script '), /* CFR015 */ print(F), /* CFR015 */ write(' reached >>>'), /* CFR015 */ nl, /* CFR015 */ retract(perform_script_file(F)) /* CFR015 */ ), /* CFR015 */ !, /* CFR015 */ seen, /* CFR015 */ ( /* CFR015 */ perform_script_file(S), /* CFR015 */ write('<<< Resuming command script '), /* CFR015 */ print(S), /* CFR015 */ write(' >>>'), /* CFR015 */ nl, /* CFR015 */ see(S) /* CFR015 */ ; /* CFR015 */ write('<<< Returning to interactive input >>>'), /*15 */ nl, /* CFR015 */ see(user), /* CFR015 */ prompt_user('CHECK|:') ), /* CFR015 */ /* Check to see whether command logging should be switched back on */ findall(X, perform_script_file(X), B), length(B, DEPTH), toplevel_execute(SOURCE, ORIGLOG), ( DEPTH = 0, SOURCE = user, retractall(command_logging(_)), assertz(command_logging(ORIGLOG)) ; DEPTH = 1, SOURCE = script, retractall(command_logging(_)), assertz(command_logging(ORIGLOG)) ; true ), rread(X) /* CFR015 */ ; XX == '.', seen, write('Error on input stream - returning to interactive input'), /* CFR015 */ nl, /* CFR015 */ retractall(perform_script_file(_)), /* CFR015 */ see(user), /* CFR015 */ rread(X) /* CFR015 */ ; X = XX, /* CFR015 */ ( /* CFR015 */ F = user /* CFR015 */ ; /* CFR015 */ renumber_and_print(X), /* CFR015 */ write('.') /* CFR015 */ ), /* CFR015 */ !, /* CFR015 */ ( /* CFR014 */ command_logging(on), /* CFR014 */ command_log_filename(L), /* CFR014 */ telling(T), /* CFR014 */ /* halt if file can't be written to */ ( file_can_be_written(L) ; \+ file_can_be_written(L), write('Aborted: '), print(L), write(' cannot be written.'), nl, !, close_all_streams, halt ), tell(L), /* CFR014 */ renumber_and_print(X), write('.'), /* CFR014 */ nl, /* CFR014 */ tell(T) /* CFR014 */ ; /* CFR014 */ command_logging(off) /* CFR014 */ ) /* CFR014 */ ) /* CFR014 */ ; write('READ ERROR: garbage on input stream mandates reset.'), nl, nl, write('Please type CONTROL-C to resume use of the checker...'), nl, retractall(perform_script_file(_)), /* CFR015 */ repeat, fail ), !. /*** ADD_NEW_HYP(H,N) - adds formula H as the first free hypothesis number after N ***/ add_new_hyp(true,_) :- !. add_new_hyp(H,1) :- hyp(_,H), !. add_new_hyp(H,N) :- hyp(N,_), M is N+1, !, add_new_hyp(H,M), !. add_new_hyp(H,N) :- assertz(hyp(N,H)), assertz(logfact(newhyp, hyp(N,H))), retractall(could_not_infer(_)), stand_all, new_hyp_message(N, H), !. /* CFR018 */ /*** ADD_NEW_CONC(C,N) - adds formula C as the first free conclusion number after N ***/ add_new_conc(C,N) :- conc(N,_), M is N+1, add_new_conc(C,M), !. add_new_conc(C,N) :- assertz(conc(N,C)), assertz(logfact(newconc,conc(N,C))), new_conc_message(N, C), !. /* CFR018 */ /*** new_hyp_message(No, Hyp) -- show it on screen ***/ /* CFR018 */ new_hyp_message(_N, _H) :- /* CFR018 */ show_vc_changes(off), !. /* CFR018 */ new_hyp_message(N, H) :- /* CFR018 */ write('*** New H'), /* CFR018 */ print(N), /* CFR018 */ write(': '), /* CFR018 */ print(H), /* CFR018 */ nl, /* CFR018 */ !. /* CFR018 */ /*** new_conc_message(No, Conc) -- show it on screen ***/ /* CFR018 */ new_conc_message(_N, _C) :- /* CFR018 */ show_vc_changes(off), !. /* CFR018 */ new_conc_message(N, C) :- /* CFR018 */ write('>>> New goal C'), /* CFR018 */ print(N), /* CFR018 */ write(': '), /* CFR018 */ print(C), /* CFR018 */ nl, /* CFR018 */ !. /* CFR018 */ /* On NT, we get months as two-character numeric strings, which need to */ /* be converted into 3-character month names as follows: */ numeric_month_to_string("01", "JAN"). numeric_month_to_string("02", "FEB"). numeric_month_to_string("03", "MAR"). numeric_month_to_string("04", "APR"). numeric_month_to_string("05", "MAY"). numeric_month_to_string("06", "JUN"). numeric_month_to_string("07", "JUL"). numeric_month_to_string("08", "AUG"). numeric_month_to_string("09", "SEP"). numeric_month_to_string("10", "OCT"). numeric_month_to_string("11", "NOV"). numeric_month_to_string("12", "DEC"). /*** NEGIN(F,NewF) - move the nots in F in as far as possible to get NewF ***/ negin((not(P)),P1) :- !, neg(P,P1). negin(for_all(X,P),for_all(X,P1)) :- !, negin(P,P1). negin(for_some(X,P),for_some(X,P1)) :- !, negin(P,P1). negin((P and Q),(P1 and Q1)) :- !, negin(P,P1), negin(Q,Q1). negin((P or Q),(P1 or Q1)) :- !, negin(P,P1), negin(Q,Q1). negin(P,P). /*** NEG(F,NF) - return NF equivalent to "not F" but with nots moved in ***/ neg((not(P)),P1) :- !, negin(P,P1). neg(for_all(X,P),for_some(X,P1)) :- !, neg(P,P1). neg(for_some(X,P),for_all(X,P1)) :- !, neg(P,P1). neg((P and Q),(P1 or Q1)) :- !, neg(P,P1), neg(Q,Q1). neg((P or Q),(P1 and Q1)) :- !, neg(P,P1), neg(Q,Q1). neg(A>B,A<=B) :- !. neg(A=B,AB) :- !. neg(A<>B,A=B) :- !. neg(P,(not(P))) :- !. /*** VAR_FREE(J) - check no Prolog vars (or "goals") in justifications J ***/ var_free([]) :- !. var_free([goal(_)|_]) :- !, fail. /** Prevent subgoaling on goal clauses **/ var_free([K|K1]) :- novars(K), var_free(K1), !. /*** NO_VARS(F) - check no vars in (non-list) structure F ***/ novars(K) :- atomic(K), !. novars(K) :- nonvar(K), K=..[_OP|Args], var_free(Args), !. /* READ_ANSWER(Answer) -- read in a `yes' or a `no' from the user. */ read_answer(Prompt,Answer):- repeat, print(Prompt), write(' (yes/no)? '), output_newline_if_necessary, /* CFR1334 */ flush_output, get_yes_no_answer(Answer), /* CFR002 */ /* until */ (Answer=yes ; Answer=no). /* don't log output */ no_echo_read_answer(Prompt,Answer):- command_logging(ORIGLOG), retractall(command_logging(_)), assertz(command_logging(off)), repeat, print(Prompt), write(' (yes/no)? '), output_newline_if_necessary, /* CFR1334 */ flush_output, get_yes_no_answer(Answer), /* CFR002 */ /* until */ (Answer=yes ; Answer=no), retractall(command_logging(_)), assertz(command_logging(ORIGLOG)). /* output_newline_if_necessary -- for interactive use inside editors, etc. */ output_newline_if_necessary :- /* CFR1334 */ newline_after_prompts(off), /* so no newline needed */ /* CFR1334 */ !. /* CFR1334 */ output_newline_if_necessary :- /* Otherwise... */ /* CFR1334 */ nl, /* CFR1334 */ !. /* CFR1334 */ /* GET_YES_NO_ANSWER(Answer) -- return `yes' or `no' from char input. */ get_yes_no_answer(Answer) :- /* CFR002 */ repeat, lget0(CH), /* CFR014 */ ( ( CH = 89 /* "Y" */ ; CH = 121 /* "y" */ ), Answer = yes, skip_to_terminator ; ( CH = 78 /* "N" */ ; CH = 110 /* "n" */ ), Answer = no, skip_to_terminator ; eol_char(EOL), CH = EOL, /* RET */ Answer = neither ), !. /* skip_to_terminator -- skip to space, tab, CR, or full-stop */ skip_to_terminator :- /* CFR002 */ repeat, lget0(CHAR), /* CFR014 */ /* until */ ( CHAR = 32 /* " " */ ; CHAR = 9 /* TAB */ ; eol_char(EOL), CHAR = EOL /* RET */ ), !. /* lget0(CHAR) -- get0 CHAR and log if necessary + switch to user if EOF */ lget0(CHAR) :- /* CFR014 */ seeing(F), /* CFR014 */ get_code(CH), /* CFR014 */ !, /* CFR014 */ ( /* CFR014 */ eof_char(EOF), CH = EOF, /* CFR015 */ F \= user, /* CFR015 */ nl, /* CFR015 */ write('<<< End of command script '), /* CFR015 */ print(F), /* CFR015 */ write(' reached >>>'), /* CFR015 */ nl, /* CFR015 */ seen, /* CFR015 */ retract(perform_script_file(F)), /* CFR015 */ ( /* CFR015 */ perform_script_file(S), /* CFR015 */ write('<<< Resuming command script '), /* CFR015 */ print(S), /* CFR015 */ write(' >>>'), /* CFR015 */ nl, /* CFR015 */ see(S) /* CFR015 */ ; /* CFR015 */ write('<<< Returning to interactive input >>>'), /* CFR015 */ nl, /* CFR015 */ see(user), /* CFR015 */ prompt_user('CHECK|:') ), /* CFR015 */ /* Check to see whether command logging should be switched back on */ findall(X, perform_script_file(X), B), length(B, DEPTH), toplevel_execute(SOURCE, ORIGLOG), ( DEPTH = 0, SOURCE = user, retractall(command_logging(_)), assertz(command_logging(ORIGLOG)) ; DEPTH = 1, SOURCE = script, retractall(command_logging(_)), assertz(command_logging(ORIGLOG)) ; true ), lget0(CHAR) /* CFR015 */ ; /* CFR014 */ CHAR = CH, /* CFR014 */ ( /* CFR015 */ F = user /* CFR015 */ ; /* CFR015 */ put_code(CHAR) /* CFR015 */ ), /* CFR015 */ !, /* CFR015 */ ( /* CFR014 */ command_logging(on), /* CFR014 */ command_log_filename(L), /* CFR014 */ telling(T), /* CFR014 */ /* halt if file can't be written to */ ( file_can_be_written(L) ; \+ file_can_be_written(L), write('Aborted: '), print(L), write(' cannot be written.'), nl, !, close_all_streams, halt ), tell(L), /* CFR014 */ put_code(CH), /* CFR014 */ tell(T) /* CFR014 */ ; /* CFR014 */ true /* CFR014 */ ) /* CFR014 */ ), /* CFR014 */ !. /* CFR014 */ /*** READ_ANSWER_ONCE(P,A) - prompt with P & get answer A; backtrack fails ***/ read_answer_once(P,A) :- read_answer(P,A), !. /*** WRITE_JUSTS(J) - write out the list of justifications J on the screen ***/ write_justs([]) :- !. write_justs([H|T]) :- nl, write(' '), print(H), write_justs(T), !. /*** CREATE_FORMULA(L,F,G) - make G from list L & formula F as "L -> F" ***/ create_formula([],F,F) :- !. create_formula([X],F,X -> F) :- !. create_formula([X|Y],F,(X and Z) -> F) :- create_formula(Y,F,Z -> F), !. /***** MESSAGE: useful for messages during non-interactive display-proof *****/ message :- nl, repeat, nl, read_term_and_layout(X), (X=stop ; print(X)), /* until */ X=stop, nl, !. % Make this potential predicate call (potentially made during a % non-interactive display-proof) visible to the spxref tool. :- public message/0. /******** TRIVIAL_COMMAND(C): command C does not affect VC ********/ trivial_command(list). trivial_command(status). trivial_command('help'). trivial_command(forget). trivial_command(remember). trivial_command(delete). trivial_command(undelete). trivial_command(consult) :- record_consults(off), !. trivial_command('set'). trivial_command(show). trivial_command(declare). trivial_command(save_state). trivial_command(traverse). trivial_command(printvc). trivial_command(execute). /* CFR017 */ trivial_command(callpro). /*** FORGET: tell checker not to show certain hypotheses ***/ forget :- ( command_arg(hyplist,_) ; prompt_user('FORGET -- which hypothesis or hypotheses?','Hypotheses? '), rread(HYPS), parse_command_arguments(forget,HYPS) ), do_forgetting, !. /*** DO_FORGETTING -- forget each hypothesis (or range) in turn ***/ do_forgetting :- command_arg(hyplist, HYPLIST), forget(HYPLIST), fail. do_forgetting :- !. /*** FORGET(HYPLIST) -- forget each hypothesis in hyplist ***/ forget([H|T]) :- forget(H), forget(T), !. forget([]). forget(N) :- integer(N), N > 0, /* CFR003 */ \+ forgotten(N), \+ deleted(N), assertz(forgotten(N)), !. forget(N) :- integer(N), N > 0, /* CFR003 */ write('H'), print(N), write(' cannot be forgotten (it is already forgotten/deleted)'), nl, !. forget(N) :- /* CFR003 */ write('Illegal hypothesis number to forget: '), /* CFR003 */ print(N), /* CFR003 */ nl, /* CFR003 */ !. /* CFR003 */ /*** DELETE: tell checker not to use certain hypotheses ***/ delete :- ( command_arg(hyplist,_) ; prompt_user('DELETE -- which hypothesis or hypotheses?','Hypotheses? '), rread(HYPS), parse_command_arguments(delete,HYPS) ), do_deleting, !. /*** DO_DELETING -- delete each hypothesis (or range) in turn ***/ do_deleting :- command_arg(hyplist, HYPLIST), delete(HYPLIST), retractall(could_infer(_)), fail. do_deleting :- !. /*** DELETE(HYPLIST) -- delete each hypothesis in hyplist ***/ delete([H|T]) :- delete(H), delete(T), !. delete([]) :- !. delete(N) :- integer(N), N > 0, /* CFR003 */ \+ deleted(N), retract(hyp(N,X)), assertz(deleted(N)), assertz(deleted_hyp(N,X)), retractall(forgotten(N)), assertz(hyp(N,true)), !. delete(N) :- integer(N), N > 0, /* CFR003 */ write('H'), print(N), write(' is already deleted'), nl, !. delete(N) :- /* CFR003 */ write('Illegal hypothesis number to delete: '), /* CFR003 */ print(N), /* CFR003 */ nl, /* CFR003 */ !. /* CFR003 */ /*** REMEMBER: tell checker to show certain hypotheses once again ***/ remember :- ( command_arg(hyplist,_) ; prompt_user('REMEMBER -- which hypothesis or hypotheses?', 'Hypotheses? '), rread(HYPS), parse_command_arguments(remember,HYPS) ), do_remembering, !. /*** DO_REMEMBERING -- remember each hypothesis (or range) in turn ***/ do_remembering :- command_arg(hyplist, HYPLIST), remember(HYPLIST), fail. do_remembering :- !. /*** REMEMBER(HYPLIST) -- remember each hypothesis in hyplist ***/ remember([H|T]) :- remember(H), remember(T), !. remember([]) :- !. remember(N) :- integer(N), N > 0, /* CFR003 */ retract(forgotten(N)), !. remember(N) :- /* CFR003 */ integer(N), /* CFR003 */ N > 0, /* CFR003 */ \+ forgotten(N), /* CFR003 */ write('H'), /* CFR003 */ print(N), /* CFR003 */ write(' has not been forgotten.'), /* CFR003 */ nl, /* CFR003 */ !. /* CFR003 */ remember(N) :- /* CFR003 */ write('Illegal hypothesis number to remember: '), /* CFR003 */ print(N), /* CFR003 */ nl, /* CFR003 */ !. /* CFR003 */ /*** UNDELETE: tell checker to use certain hypotheses once again ***/ undelete :- ( command_arg(hyplist,_) ; prompt_user('UNDELETE -- which hypothesis or hypotheses?','Hypotheses? '), rread(HYPS), parse_command_arguments(undelete,HYPS) ), do_undeleting, !. /*** DO_UNDELETING -- undelete each hypothesis (or range) in turn ***/ do_undeleting :- command_arg(hyplist, HYPLIST), undelete(HYPLIST), fail. do_undeleting :- !. /*** UNDELETE(HYPLIST) -- undelete each hypothesis in hyplist ***/ undelete([H|T]) :- undelete(H), undelete(T), !. undelete([]) :- !. undelete(N) :- integer(N), N > 0, /* CFR003 */ retract(deleted(N)), retract(hyp(N,true)), retract(deleted_hyp(N,X)), assertz(hyp(N,X)), !. undelete(N) :- integer(N), N > 0, /* CFR003 */ write('H'), print(N), write(' has not been deleted.'), nl, /* CFR003 */ !. undelete(N) :- /* CFR003 */ write('Illegal hypothesis number to undelete: '), /* CFR003 */ print(N), /* CFR003 */ nl, /* CFR003 */ !. /* CFR003 */ temp_del_hyps(LIST) :- repeat, /* UNTIL */ del_til_none_left_in(LIST). del_til_none_left_in(LIST) :- hyp(N,X), \+ is_in(N,LIST), !, assertz(temp_del_hyp(N,X)), retract(hyp(N,X)), !, fail. del_til_none_left_in(_). restore_temp_del_hyps :- retract(temp_del_hyp(N,X)), assertz(hyp(N,X)), fail. restore_temp_del_hyps. /*** EXIT: the way out ***/ exit :- asserta(logfact(exit, [])), !. /*** FORCEEXIT: another way out ***/ forceexit :- asserta(logfact(forceexit, [])), !. /*** enumeration_list(CONST, LIST) -- CONST occurs in enumeration LIST ***/ enumeration_list(CONST, LIST) :- atom(CONST), enumeration(_, LIST), is_in(CONST, LIST), !. /*** in_order(E1, E2, LIST) -- E1 occurs before E2 in LIST ***/ in_order(E, E, LIST) :- is_in(E, LIST). in_order(X, Y, LIST) :- append(_, [X|REST], LIST), is_in(Y, REST). /*** strict_sublist(SUB, LIST) -- SUB is a sublist of LIST ***/ strict_sublist(SUB, LIST) :- append(SUB, _, LIST). strict_sublist(SUB, [_|LIST]) :- strict_sublist(SUB, LIST). /*** build_other_cases(VAR, CONST, ENUMERATION, DISJUNCTION) ***/ build_other_cases(V, C, E, D) :- do_build_other_cases(V, C, E, F), flatten_disjunction(F, D), !. /*** do_build_other_cases(VAR, CONST, ENUMERATION, DISJUNCTION) ***/ do_build_other_cases(_X, E, [E], false) :- !. do_build_other_cases(X, _E, [F], false) :- infer(X<>F), !. do_build_other_cases(X, _E, [F], X=F) :- !. do_build_other_cases(X, E, [C|CL], DISJ) :- do_build_other_cases(X, E, CL, FORM), ( ( C=E ; infer(X<>C) ), DISJ=FORM ; ( FORM=false, DISJ=(X=C) ; DISJ=(X=C or FORM) ) ), !. /*** flatten_disjunction(OLD, NEW) -- make it ((..(D1 or D2) or ...) or Dn) ***/ flatten_disjunction(A or (B or C), F) :- flatten_disjunction((A or B) or C, F), !. flatten_disjunction(A or B, AA or BB) :- flatten_disjunction(B, BB), !, flatten_disjunction(A, AA), !. flatten_disjunction(X, X) :- !. /*** set_union(A,B,C) -- C = (A union B) ***/ set_union(A, B, C) :- append(A,B,CC), sort(CC,C), !. /*** set_intersect(A,B,C) -- C = (A intersect B) ***/ set_intersect([],_A,[]) :- !. set_intersect(_A,[],[]) :- !. set_intersect([A|AA],B,C) :- set_intersect(AA,B,CC), !, ( set_find_in(A,B), C = [A|CC] ; set_not_in(A,B), C = CC ), !. /*** set_find_in(A,B) -- A is definitely equal to an element of B ***/ set_find_in(A,B) :- is_in(A,B), !. set_find_in(A,[B|_BB]) :- infer(A=B), !. set_find_in(A,[_B|BB]) :- !, set_find_in(A,BB), !. /*** set_not_in(A,B) -- A is definitely not equal to a member of B ***/ set_not_in(A,[B|BB]) :- !, infer(A<>B), !, set_not_in(A,BB), !. set_not_in(_A,[]) :- !. /*** subset(A,B) -- A is a subset of B ***/ subset([A|AA],B) :- set_find_in(A,B), !, subset(AA,B), !. subset([],_) :- !. /*** set_lacking(A,B,C) -- C is A \ B ***/ set_lacking([],_,[]) :- !. set_lacking(A,[],A) :- !. set_lacking(A,[B|BB],C) :- set_lacking(A,BB,CC), !, ( set_not_in(B,CC), C = CC ; set_remove_all(B,CC,C) ), !. /*** set_remove_all(A,B,C) -- C is B \ {A} ***/ set_remove_all(A,[B|BB],C) :- ( infer(A=B), !, set_remove_all(A,BB,C) ; infer(A<>B), !, set_remove_all(A,BB,CC), C = [B|CC] ), !. set_remove_all(_A,[],[]) :- !. /*** make_record_equality_goal(FIELDS, R1, R2, GOAL) -- for RECORD.RUL ***/ make_record_equality_goal([[F,_T]], R, S, LHS=RHS) :- !, record_function(_, LHS, access, F, [R], Type), /* CFR029 */ record_function(_, RHS, access, F, [S], Type), /* CFR029 */ !. make_record_equality_goal([[F,_T]|FIELDS], R, S, REST_GOAL and LHS=RHS) :- make_record_equality_goal(FIELDS, R, S, REST_GOAL), !, record_function(_, LHS, access, F, [R], Type), /* CFR029 */ record_function(_, RHS, access, F, [S], Type), /* CFR029 */ !. /*** callpro - call a predicate utility ***/ callpro :- ( command_arg(goal, GOAL) ; prompt_user('Goal? '), rread(GOAL), nonvar(GOAL) ), nl, !, ( novars(GOAL), ( call(GOAL), write('SUCCEEDED') ; write('FAILED') ) ; ( call(GOAL), write('*** '), print(GOAL) ; write('FAILED') ) ), !. prompt_user(Prompt):- write(Prompt), output_newline_if_necessary, flush_output, !. prompt_user(Prompt1, Prompt2):- write(Prompt1), nl, write(Prompt2), output_newline_if_necessary, flush_output, !. tab(0) :- !. tab(N) :- !, put_char(' '), NewN is N - 1, tab(NewN). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/contra.pro0000644000175000017500000000360611753202340016055 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** CONTRADICT(FORMULA_OR_CONC_NUM) -- proof by contradiction ***/ contradict(F) :- ( F=c#N, integer(N), conc(N,FORMULA) ; checktype(F,boolean), FORMULA=F ), write('ENTERING PROOF BY CONTRADICTION ATTEMPT'), nl, start_subgoal(FORMULA,[false],(not FORMULA),'CONTRADICTION'), !. /*** IMPLICATION(FORMULA_OR_CONC_NUM) -- proof by implication ***/ implication(F) :- ( F=c#N, integer(N), conc(N,FORMULA) ; checktype(F,boolean), FORMULA=F ), FORMULA=(ASSUMPTIONS -> GOAL), create_formula(GOALS,false,GOAL -> false), write('ENTERING PROOF BY IMPLICATION ATTEMPT'), nl, start_subgoal(FORMULA,GOALS,ASSUMPTIONS,'IMPLICATION'), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/cases2.pro0000644000175000017500000002175411753202340015753 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /******** CASES: top-level checker command ********/ cases :- ( command_arg(on, Formula) ; prompt_user('Type hypothesis no. or cases formula...','CASES> '), rread(F), parse_expression(F, Formula) ), !, cases(Formula), !. cases(F) :- ( F=h#N, integer(N) ; valid(F), no_qvars_in(F), free_hyp_no(N), add_new_hyp(F,N) ), !, do_cases(N), !. /*** FREE_HYP_NO(N) - returns smallest N for which no hypothesis exists ***/ free_hyp_no(N) :- find_first_free_starting_at(1, N). /*** find_first_free_starting_at(Start, Num) -- return the number of the smallest vacant hypothesis number Num counting from Start upwards. ***/ find_first_free_starting_at(S, S) :- \+ hyp(S, _), !. find_first_free_starting_at(S, N) :- NewS is S+1, !, find_first_free_starting_at(NewS, N). /*** DO_CASES(N) - use hypothesis N as case-generator & invoke case-proof ***/ do_cases(N) :- hyp(N,F), no_qvars_in(F), command_arg(expression, EXPRN), nonvar(EXPRN), ( EXPRN = c#CONCNO, conc(CONCNO, FORMULA) ; novars(EXPRN), /* CFR009 */ checktype(EXPRN, boolean), /* CFR009 */ FORMULA = EXPRN, CONCNO = [] ), case_pointer(CP), C is CP+1, retractall(case(C,_,_)), save_cases(C,F), retractall(case_pointer(_)), retractall(on_case(C,_,_)), assertz(logfact(enterframe, C)), assertz(case_pointer(C)), retractall(proved_for_case(C,_)), free_hyp_no(HN), assertz(on_case(C,1,HN)), case_save(C), retractall(subgoal_formula(C,_,_,_)), assertz(subgoal_formula(C, FORMULA, CONCNO, 'CASES')), case(1). /*** VALID(F) - checks if F covers all integers for some subexpression or if F is basically "X or not X" for some formula X ***/ valid(A or (not A)) :- !. valid((not A) or A) :- !. valid(A or B) :- norm_typed_expr((not A)<->B,boolean,true), !. valid(F) :- covers_interval(F,E,L,U), ( L=[] ; infer(E>=L) ), ( U=[] ; infer(E<=U) ), !. /*** COVERS_INTERVAL(F,E,L,U) - expression E ranges over L..U in F ***/ covers_interval(F1 and F2,E,L,U) :- find_range(F1 and F2,E,L,U). covers_interval(E=N,E,N,N). covers_interval(N=E,E,N,N). covers_interval(EE,E,[],N-1). covers_interval(E<=N,E,[],N). covers_interval(N>=E,E,[],N). covers_interval(E>N,E,N+1,[]). covers_interval(N=N,E,N,[]). covers_interval(N<=E,E,N,[]). covers_interval(A or B,E,L,U) :- covers_interval(A,E,L1,U1), covers_interval(B,E,L2,U2), combine_intervals(L1,U1,L2,U2,L,U). /*** FIND_RANGE(CONJ,E,L,U) - find E in CONJ covering L..U in CONJ ***/ find_range(F1 and F2,E,L,U) :- covers_interval(F1,E,L1,U1), covers_interval(F2,E,L2,U2), find_max(L1,L2,L), find_min(U1,U2,U), infer(L<=U). /*** FIND_MAX(N1,N2,N) - find N in {N1,N2} s.t. N>=N1 and N>=N2 ***/ find_max(N1,N2,N) :- ( intexp(N1), ( intexp(N2), ( N1>=N2, N is N1 ; N2>N1, N is N2 ) ; V is N1, ( infer(V>=N2), N=V ; infer(N2>=V), N=N2 ) ) ; intexp(N2), V is N2, ( infer(N1>=V), N=N1 ; infer(V>=N1), N=V ) ; infer(N1>=N2), N=N1 ; infer(N2>=N1), N=N2 ), !. /*** FIND_MIN(N1,N2,N) - find N in {N1,N2} s.t. N<=N1 and N<=N2 ***/ find_min(N1,N2,N) :- ( intexp(N1), ( intexp(N2), ( N1>=N2, N is N2 ; N2>N1, N is N1 ) ; V is N1, ( infer(V>=N2), N=N2 ; infer(N2>=V), N=V ) ) ; intexp(N2), V is N2, ( infer(N1>=V), N=V ; infer(V>=N1), N=N1 ) ; infer(N1>=N2), N=N2 ; infer(N2>=N1), N=N1 ), !. /*** COMBINE_INTERVALS(L1,L2,U1,U2,LRes,URes) - combine L1..L2 & L2..U2 ***/ combine_intervals(L1,U1,U1,U2,L1,U2) :- U1\=[]. combine_intervals(U1,U2,L1,U1,L1,U2) :- U1\=[]. combine_intervals(L1,L2-1,L2,U2,L1,U2). combine_intervals(L2,U2,L1,L2-1,L1,U2). combine_intervals(L1,L2,L2+1,U2,L1,U2). combine_intervals(L2+1,U2,L1,L2,L1,U2). combine_intervals(L1,U1,L2,U2,L2,U1) :- U2\=[], L1\=[], ( norm_typed_expr(U2+1>=L1,boolean,true) ; infer(U2+1>=L1) ; infer(U2>=L1-1) ; infer(L1<=U2) ). combine_intervals(L1,U1,L2,U2,L1,U2) :- U1\=[], L2\=[], ( norm_typed_expr(U1+1>=L2,boolean,true) ; infer(U1+1>=L2) ; infer(U1>=L2-1) ; infer(L2<=U1) ). combine_intervals(L1,_U1,L2,U2,L2,U2) :- L1\=[], L2\=[], ( norm_typed_expr(L2<=L1,boolean,true) ; infer(L2<=L1) ). /******** STATUS: top-level checker command ********/ status :- nl, case_pointer(CP), CP>0, write('[DEPTH: '), print(CP), write(']'), nl, subgoal_formula(CP,F,_N,METHOD), print(METHOD), write(': '), print(F), nl, list_case_status(CP), !. status :- case_pointer(0), write('[TOP-LEVEL]'), nl, !. status :- !. list_case_status(CP) :- case(CP,N,_F), ( proved_for_case(CP,N), write('*** PROVED FOR CASE '), print(N), nl ; (\+ proved_for_case(CP,N)), write(''), nl ), fail. list_case_status(_) :- !. /******** ABORT_CASE: top-level checker command ********/ abort_case :- case_pointer(CP), CP>0, on_case(CP,_,HN), retractall(on_case(CP,_,_)), retract(hyp(HN,_)), retractall(case(CP,_,_)), retractall(proved_for_case(CP,_)), retractall(case_pointer(_)), C is CP-1, NC is C-1, assertz(case_pointer(NC)), case_restore(C), !. /******** CASE: top-level checker command ********/ case :- ( command_arg(case_number, N) ; prompt_user('Which case? '), rread(N), integer(N) ), !, case(N), !. case(N) :- case_pointer(CP), case(CP,N,F), nl, write('CASE '), print(N), write(': '), print(F), nl, on_case(CP,_,HN), case_restore(CP), assertz(hyp(HN,F)), new_hyp_message(HN, F), /* CFR018 */ retractall(conc(_,_)), subgoal_formula(CP, FORMULA, _, _), assertz(logfact((case), N)), assertz(logfact(newhyp, hyp(HN, F))), clear_up_could_facts, format_formula(logmessage, true -> FORMULA), /* CFR054 */ retractall(on_case(CP,_,HN)), assertz(on_case(CP,N,HN)), !. /*** SAVE_CASES(CP,CG) - save cases in formula CG as cases at depth CP ***/ save_cases(CP,X or Y) :- save_cases(CP,X), save_cases(CP,Y), !. save_cases(CP,F) :- add_new_case(CP,1,F), !. /*** ADD_NEW_CASE(CP,N,F) - add formula F as case N at depth CP ***/ add_new_case(CP,N,F) :- case(CP,N,_), M is N+1, add_new_case(CP,M,F), !. add_new_case(CP,N,F) :- assertz(case(CP,N,F)), !. /*** SAVE(LABEL) - save current VC as LABEL for future recall ***/ case_save(CP) :- retractall(saved_vc(CP,_)), case_save(CP,hyp(_,_)), case_save(CP,conc(_,_)), case_save(CP,forgotten(_)), case_save(CP,deleted(_)), case_save(CP,deleted_hyp(_,_)), case_save(CP,qvar(_)), !. case_save(CP,X) :- call(X), assertz(saved_vc(CP,X)), fail. case_save(_,_). /*** case_restore(LABEL) - recall a previously-saved VC state ***/ case_restore(CP) :- retractall(hyp(_,_)), retractall(conc(_,_)), retractall(forgotten(_)), retractall(deleted(_)), retractall(deleted_hyp(_,_)), retractall(qvar(_)), restore_vc(CP), !. restore_vc(CP) :- saved_vc(CP,Fact), assertz(Fact), fail. restore_vc(_). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/fwdch2.pro0000644000175000017500000000614511753202340015745 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** FORWARDCHAIN -- top-level checker command ***/ forwardchain :- ( command_arg(expression,HC#N) ; prompt_user('FORWARD_CHAIN -- For which hypothesis/conclusion?', 'Type h#N or c#N as appropriate ... '), rread(F), F=HC#N ), !, retract(logfact(command, forwardchain)), asserta(logfact(command, forwardchain(HC#N))), forward_chain(HC,N), !. forward_chain(h,NUMBER) :- hyp(NUMBER,FORMULA), ( FORMULA=(X -> Y), forward_prove(X), NEWFACT=hyp(NUMBER,Y), NEWFORMULA = Y /* CFR026 */ ; FORMULA=(X <-> Y), ( forward_prove(X), find_if_save(NUMBER,Y,A), A=yes, NEWFACT=hyp(NUMBER,Y), NEWFORMULA = Y /* CFR026 */ ; forward_prove(Y), find_if_save(NUMBER,X,A), A=yes, NEWFACT=hyp(NUMBER,X), NEWFORMULA = X /* CFR026 */ ) ), retract(hyp(NUMBER,FORMULA)), assertz(logfact(newhyp, NEWFACT)), assertz(NEWFACT), new_hyp_message(NUMBER, NEWFORMULA), /* CFR026 */ !. forward_chain(c,NUMBER) :- conc(NUMBER,FORMULA), ( hyp(N,_ -> FORMULA) ; hyp(N,_ <-> FORMULA) ; hyp(N,FORMULA <-> _) ), forward_chain(h,N), done(NUMBER), !. /*** FORWARD_PROVE(F) - try to prove F, displaying tracking messages ***/ forward_prove(X) :- nl, write('TRYING TO PROVE: '), print(X), infer(X), nl, write('PROVED: '), print(X), nl, !. forward_prove(X) :- nl, write('FAILED TO PROVE: '), print(X), fail. /*** FIND_IF_SAVE(N,F,A) - show F follows from hypothesis N & ask if save ***/ find_if_save(N,_,_) :- nl, nl, write('From H'), print(N), put_code(58), /* ":" */ put_code(32), /* " " */ hyp(N,X), print(X), nl, fail. find_if_save(_,F,A) :- write('Proved: '), print(F), nl, read_answer('Keep this result',A), !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/sicstus.pro0000644000175000017500000002447611753202340016274 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= :- use_module(library(lists)). :- use_module(library(codesio)). :- use_module(library(terms)). /* Provides access to deprecated * * 'system' library from SICStus3 */ :- use_module(library(system3)). :- set_prolog_flag(syntax_errors, dec10). /* Memory */ /* Setting of memory limit is TBD for SICSTUS */ set_memory_limit(_M) :- !. /* Mathematics */ eval_div(X, Y, Z) :- Z is X // Y. fetch_date_and_time(DATE, TIME) :- datime(datime(Year, Month, Day, Hour, Min, Sec)), number_codes(Year, [Y1, Y2, Y3, Y4]), ( number_codes(Month, [M1, M2]) ; number_codes(Month, [M2]), M1 = 48 ), ( number_codes(Day, [D1, D2]) ; number_codes(Day, [D2]), D1 = 48 ), ( number_codes(Hour, [H1, H2]) ; number_codes(Hour, [H2]), H1 = 48 ), ( number_codes(Min, [Mi1, Mi2]) ; number_codes(Min, [Mi2]), Mi1 = 48 ), ( number_codes(Sec, [S1, S2]) ; number_codes(Sec, [S2]), S1 = 48 ), numeric_month_to_string([M1, M2], [MU1, MU2, MU3]), atom_codes(DATE, [D1,D2,45,MU1,MU2,MU3,45,Y1,Y2,Y3,Y4]), atom_codes(TIME, [H1, H2, 58, Mi1, Mi2, 58, S1, S2]), !. get_command_line(DATA) :- prolog_flag(argv, DATA), !. eof_char(-1). /* Always -1 on all SICSTUS platforms */ % All platforms have the same eol character. eol_char(10). :- set_prolog_flag(compiling, compactcode). set_exit_status :- !. /* TBD for SICSTUS */ /* In iso language mode, SICSTUS read/1 _doesn't_ consume layout by default, so we */ /* have to use the more flexible read_term/2 */ read_term_and_layout(V) :- read_term(A, [consume_layout(true)]), !, normalize_negative_literals(A, V). /*** Succeeds iff FNAME denotes a readable, regular file */ file_exists_and_is_readable(FNAME) :- file_exists(FNAME, [exists]), file_exists(FNAME, [read]). /*** Succeeds if file exists and can be written, or file does not exist */ file_can_be_written(FNAME) :- file_exists(FNAME, [exists]), file_exists(FNAME, [write]). file_can_be_written(FNAME) :- \+ file_exists(FNAME). /* get_file_attrib/3 for SICSTUS, where we want the full file-name only */ get_file_attrib(FNAME, [FULL_NAME|_], _) :- absolute_file_name(FNAME, FULL_NAME). list_files_with_extension(Extension) :- name(Extension, ExtensionCharList), append(".", ExtensionCharList, DotExtensionCharList), /* Determine the current working directory*/ working_directory(Dir, Dir), /* Find list of all files */ directory_files(Dir, FileAtomList), /* Find list of files with appropriate extension. */ filesWithExtension(FileAtomList, DotExtensionCharList, FilesMatchingList), ( /* No matching files found. */ FilesMatchingList=[], write(' ') ; /* Some matching files found */ displayListOfFiles(FilesMatchingList) ), !, fail. displayListOfFiles([]). displayListOfFiles([H_FilesMatching | T_FilesMatchingList]) :- /* tab is obsolescent in SICSTUS but still works */ tab(5), write(H_FilesMatching), nl, displayListOfFiles(T_FilesMatchingList). filesWithExtension([], _, []). filesWithExtension([H_InFileAtom | T_InFileAtomList], DotExtensionCharList, [H_OutFileAtom | T_OutFileAtomList]) :- name(H_InFileAtom, H_InFileCharList), /* Check this has suffix looking for */ /* Does have suffix, record this file, minus the suffix */ append(JustNameCharList, DotExtensionCharList, H_InFileCharList), name(JustNameAtom, JustNameCharList), H_OutFileAtom=JustNameAtom, filesWithExtension(T_InFileAtomList, DotExtensionCharList, T_OutFileAtomList). filesWithExtension([_ | T_InFileAtomList], DotExtensionCharList, OutFileAtomList) :- filesWithExtension(T_InFileAtomList, DotExtensionCharList, OutFileAtomList). /************************************************************************/ /* For a run-time system build of the checker, the environment variable */ /* SPADE_CHECKER may be used to override the default. The default is */ /* $SP_APP_DIR/../lib/checker/rules/ where $SP_APP_DIR is set by SICSTUS*/ /* to be the directory in which the checker binary is located - the */ /* normal place for rules to be located in a customer installation. */ /* */ /* Help files are similarly located, but may be overridden by a user */ /* setting SPADE_CHKHELP */ /* */ /* This policy mimics the behaviour of the wrapper.apb program used */ /* with POPLOG-built checkers. */ /************************************************************************/ fetch_environment_variables :- current_prolog_flag(system_type, runtime), !, environ('SP_APP_DIR', CHECKER_ROOT), atom_codes(CHECKER_ROOT, CR), ( environ('SPADE_CHECKER', RULES_PATH), atom_codes(RULES_PATH, RULES_PATH_STR), append(RULES_PATH_STR, "/", RULES_PATH_STR2), assertz(spade_checker_prefix(RULES_PATH_STR2)) ; \+ environ('SPADE_CHECKER', _Y), append(CR, "/../lib/checker/rules/", RULES_PATH), assertz(spade_checker_prefix(RULES_PATH)) ), ( environ('SPADE_CHKHELP', HELP_PATH), atom_codes(HELP_PATH, HELP_PATH_STR), append(HELP_PATH_STR, "/", HELP_PATH_STR2), assertz(spade_chkhelp_prefix(HELP_PATH_STR2)) ; \+ environ('SPADE_CHKHELP', _Y), append(CR, "/../lib/checker/helptext/", HELP_PATH), assertz(spade_chkhelp_prefix(HELP_PATH)) ). /************************************************************************/ /* For a development build of the checker, the environment variable */ /* SPADE_CHECKER may be used to override the default. The default is */ /* $CWD/../../customer/rules/ where $CWD is the current working */ /* directory - the normal place for rules to be located relative to the */ /* checker source code. */ /* */ /* Help files are similarly located in $CWD/../../customer/helptext/ */ /* by default but this may also be overridden by setting SPADE_CHKHELP */ /* */ /* This version also reports its action to the standard output for */ /* convenicence and debugging */ /************************************************************************/ fetch_environment_variables :- current_prolog_flag(system_type, development), !, environ('SP_APP_DIR', CHECKER_ROOT), working_directory(CWD, CWD), atom_codes(CWD, CR), print('!!! This is a SICSTUS Development System'), nl, print('!!! SP_APP_DIR is '), print(CHECKER_ROOT), nl, print('!!! CWD is '), print(CWD), nl, ( environ('SPADE_CHECKER', RULES_PATH), print('!!! Rules in '), print(RULES_PATH), nl, atom_codes(RULES_PATH, RULES_PATH_STR), append(RULES_PATH_STR, "/", RULES_PATH_STR2), assertz(spade_checker_prefix(RULES_PATH_STR2)) ; \+ environ('SPADE_CHECKER', _X), append(CR, "/../../customer/rules/", RULES_PATH), atom_codes(RP_STR, RULES_PATH), print('!!! Rules in '), print(RP_STR), nl, assertz(spade_checker_prefix(RULES_PATH)) ), ( environ('SPADE_CHKHELP', HELP_PATH), print('!!! Help in '), print(HELP_PATH), nl, atom_codes(HELP_PATH, HELP_PATH_STR), append(HELP_PATH_STR, "/", HELP_PATH_STR2), assertz(spade_chkhelp_prefix(HELP_PATH_STR2)) ; \+ environ('SPADE_CHKHELP', _Y), append(CR, "/../../customer/helptext/", HELP_PATH), atom_codes(HP_STR, HELP_PATH), print('!!! Help in '), print(HP_STR), nl, assertz(spade_chkhelp_prefix(HELP_PATH)) ). printq(X) :- write_term(X, [portrayed(true), quoted(true), numbervars(true)]). /* This duplicates the poplog version of numbervars/3. The sicstus version unifies */ /* the variables with letters of the alphabet, whereas poplog uses _ */ mynumbervars(Term, N, M) :- term_variables(Term, Var), format_vars(Var, N, X, M), Var=X. convert(Number, '$VAR2'(Number)):- !. format_vars([], FinalNum, [], FinalNum) :- !. format_vars([_H|T], Num, [Y|L], FinalNum) :- convert(Num, Y), Num1 is Num + 1, format_vars(T, Num1, L, FinalNum). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/dynamics.pro0000644000175000017500000001007311753202340016372 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= :- dynamic(abandon_search/0). :- dynamic(all_done/1). :- dynamic(bad_rulefile/0). :- dynamic(banned_rule/2). :- dynamic(case/3). :- dynamic(cmd_line_command_log/1). :- dynamic(cmd_line_filename/1). :- dynamic(cmd_line_proof_log/1). :- dynamic(command_arg/2). :- dynamic(command_log_filename/1). :- dynamic(con/1). :- dynamic(conc/2). :- dynamic(conc_to_replace/1). :- dynamic(could_infer/1). :- dynamic(could_not_infer/1). :- dynamic(csvfile_name/1). :- dynamic(current_record_field_number/1). :- dynamic(current_root/2). :- dynamic(current_sat_goal/1). :- dynamic(current_vc/2). :- dynamic(current_vc_no/1). :- dynamic(deleted/1). :- dynamic(deleted_hyp/2). :- dynamic(do_do_newvc/0). :- dynamic(do_not_issue_failure_message/0). :- dynamic(done__proof_log/0). :- dynamic(done__resume/0). :- dynamic(enumeration/2). :- dynamic(fdl_file_title/1). :- dynamic(fdlfile_name/1). :- dynamic(forgotten/1). :- dynamic(function/3). :- dynamic(function_template/3). :- dynamic(hn/1). :- dynamic(hyp/2). :- dynamic(hyp_to_replace/1). :- dynamic(in_declare_command/0). :- dynamic(inf_match/0). :- dynamic(ini_file_consult/1). :- dynamic(inst_case/4). :- dynamic(inst_form/3). :- dynamic(inst_saved_vc/3). :- dynamic(inst_subgoal_formula/5). :- dynamic(is_vc/1). :- dynamic(is_true_vc/2). :- dynamic(logfact/2). :- dynamic(logfile_name/1). :- dynamic(logged_rule_match/3). :- dynamic(mk__function_name/3). :- dynamic(newsub/1). :- dynamic(num_matches/1). :- dynamic(numsubs/1). :- dynamic(occstoreplace/1). :- dynamic(on_case/3). :- dynamic(on_filename/1). :- dynamic(pattern/1). :- dynamic(perform_script_file/1). :- dynamic(pos_newsub/1). :- dynamic(posslog/2). :- dynamic(previous_character/1). :- dynamic(proved_for_case/2). :- dynamic(qbindingname/2). :- dynamic(qualifier_prefix/1). :- dynamic(qvar/1). :- dynamic(recent_save_command_issued/0). :- dynamic(record_function/6). :- dynamic(rep_working_on/3). :- dynamic(replace_all_expr_type/1). :- dynamic(required_sub/1). :- dynamic(rule_applied/1). :- dynamic(ruleused/1). :- dynamic(ruleused_this_session/1). :- dynamic(satisfies/2). :- dynamic(saved_vc/2). :- dynamic(search_count/1). :- dynamic(spade_checker_prefix/1). :- dynamic(spade_chkhelp_prefix/1). :- dynamic(spark_enabled/0). :- dynamic(stage_num/1). :- dynamic(status/1). :- dynamic(sub/1). :- dynamic(subgoal_formula/4). :- dynamic(temp_del_hyp/2). :- dynamic(tidied_subs/1). :- dynamic(time_for_new_vc/0). :- dynamic(toplevel_execute/2). :- dynamic(totally_specified_replace/0). :- dynamic(trying_a_replace_all/0). :- dynamic(tv_cmd_buffer/1). :- dynamic(tv_depth/1). :- dynamic(tv_expr/2). :- dynamic(tv_trace/1). :- dynamic(twiddles_conversion/2). :- dynamic(type/2). :- dynamic(type_alias/2). :- dynamic(type_classification_done/0). :- dynamic(type_classification/2). :- dynamic(used/1). :- dynamic(used_ident/2). :- dynamic(user_rulefile/2). :- dynamic(user_classification/4). :- dynamic(uvar/1). :- dynamic(var_const/3). :- dynamic(vc/2). :- dynamic(vcgfile_name/1). :- dynamic(vcs_to_prove/1). :- dynamic(vc_name/1). :- dynamic(vcs_proved_this_session/1). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/prooflogs.pro0000644000175000017500000005500711753202340016603 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** write_log - write proof log output for this step ***/ write_log :- \+ logfact(_, _), !. write_log :- logfile_name(LOGNAME), step_number(STEP), fetch_indentation(SPC), file_can_be_written(LOGNAME), tell(LOGNAME), nl, process_logfact(SPC, step, STEP), nl, retract(logfact(TYPE, OBJECT)), fetch_indentation(SPACES), process_logfact(SPACES, TYPE, OBJECT), nl, fail. write_log :- logfile_name(LOGNAME), \+ file_can_be_written(LOGNAME), write('Aborted: '), print(LOGNAME), write(' cannot be written.'), nl, !, close_all_streams, halt. write_log :- tell(user), !. fetch_indentation(INDENTATION) :- indentation(INDENTATION), !. process_logfact(SPACES, step, STEP) :- ( ( \+ logfact(exit, []) ; \+ logfact(forceexit, []) ), tab(SPACES), write('STEP '), print(STEP) ; true ), !. process_logfact(SPACES, text, TEXT) :- tab(SPACES), prooflog_width(W), ( W=0, print(TEXT) ; WIDTH is W-SPACES, pretty_write(TEXT,WIDTH,_) ), !. process_logfact(SPACES, command, COMMAND) :- tab(SPACES), write('Command: '), print(COMMAND), !. process_logfact(SPACES, method, [(cases), h#N]) :- tab(SPACES), write('Commence PROOF BY CASES attempt on H'), print(N), !. process_logfact(SPACES, method, METHOD) :- tab(SPACES), write('Commence PROOF BY '), print(METHOD), write(' attempt'), !. process_logfact(SPACES, newhyp, hyp(N,F)) :- tab(SPACES), write('*** New H'), prooflog_width(W), ( W=0, print(N), write(': '), print(F) ; WIDTH is W-SPACES-9, pretty_write(N, WIDTH, W1), pretty_write(': ', W1, W2), pretty_write(F, W2, _) ), !. process_logfact(SPACES, newconc, conc(N,F)) :- tab(SPACES), write('>>> New goal C'), prooflog_width(W), ( W=0, print(N), write(': '), print(F) ; WIDTH is W-SPACES-14, pretty_write(N, WIDTH, W1), pretty_write(': ', W1, W2), pretty_write(F, W2, _) ), !. process_logfact(SPACES, vcname, VCNAME) :- tab(SPACES), write('Now attempting proof of VC: '), print(VCNAME), retractall(ruleused(_)), nl, list, !. process_logfact(SPACES, rulematch, ([FILE,NAME]: (F may_be_deduced_from G))) :- tab(SPACES), write('Successful inference with rule: '), print(NAME), nl, IN is SPACES+2, write_subgoal_list_proved(IN, G), tab(SPACES), write('Therefore '), prooflog_width(W), ( W=0, print(F) ; WIDTH is W-SPACES-10, pretty_write(F, WIDTH, _) ), save_ruleused([FILE,NAME]), !. process_logfact(SPACES, rulematch, ([FILE,NAME]: (X may_be_replaced_by Y if G))) :- tab(SPACES), write('Successful substitution with rule: '), print(NAME), nl, IN is SPACES+2, write_subgoal_list_proved(IN, G), tab(SPACES), write('Allowing substitution of '), prooflog_width(W), ( W=0, print(Y), nl, tab(SPACES), write('for '), print(X) ; WIDTH is W-SPACES-25, pretty_write(Y, WIDTH, _), nl, tab(SPACES), write('for '), WW is W-SPACES-4, pretty_write(X, WW, _) ), save_ruleused([FILE,NAME]), !. process_logfact(0, proved, vc(NAME)) :- write('*** PROVED VC '), print(NAME), nl, nl, write_rules_used, nl, nl, nl, !. process_logfact(SPACES, proved, all) :- tab(SPACES), write('*** Proved all conclusions'), nl, !. process_logfact(SPACES, proved, conc(N,F)) :- tab(SPACES), write('*** Proved C'), prooflog_width(W), ( W=0, print(N), write(': '), print(F) ; WIDTH is W-SPACES-12, pretty_write(N, WIDTH, W1), pretty_write(': ', W1, W2), pretty_write(F, W2, _) ), !. process_logfact(SPACES, enterframe, NO) :- tab(SPACES), write('Entering new proof frame (DEPTH '), print(NO), write(') ...'), indentation(OLD_INDENT), retractall(indentation(_)), indentation_increment(INCREMENT), NEW_INDENT is OLD_INDENT+INCREMENT, asserta(indentation(NEW_INDENT)), !. process_logfact(SPACES, exitframe, NO) :- tab(SPACES), write('Exiting current proof frame (DEPTH '), print(NO), write(')'), indentation(OLD_INDENT), retractall(indentation(_)), indentation_increment(INCREMENT), NEW_INDENT is OLD_INDENT-INCREMENT, asserta(indentation(NEW_INDENT)), !. process_logfact(SPACES, quitframe, _NO) :- tab(SPACES), write('QUIT: give up attempt to prove subgoals of current proof frame'), indentation(OLD_INDENT), retractall(indentation(_)), indentation_increment(INCREMENT), NEW_INDENT is OLD_INDENT-INCREMENT, asserta(indentation(NEW_INDENT)), !. process_logfact(SPACES, infer, FORMULA) :- tab(SPACES), write('Attempting to infer '), prooflog_width(W), ( W=0, print(FORMULA) ; WIDTH is W-SPACES-20, pretty_write(FORMULA, WIDTH, _) ), !. process_logfact(SPACES, subgoal, ([FILE,NAME]: (F may_be_deduced_from G))) :- tab(SPACES), write('Attempt to prove '), prooflog_width(W), ( W=0, print(F) ; WIDTH is W-SPACES-17, pretty_write(F, WIDTH, _) ), nl, tab(SPACES), write('by subgoaling on rule '), print(NAME), write('.'), nl, tab(SPACES), write('Unsatisfied subgoals are:'), nl, IN is SPACES+2, write_unproved_subgoals(IN, G), save_ruleused([FILE,NAME]), !. process_logfact(SPACES, deduce, FORMULA) :- tab(SPACES), write('Proved: '), prooflog_width(W), ( W=0, print(FORMULA) ; WIDTH is W-SPACES-8, pretty_write(FORMULA, WIDTH, _) ), nl, tab(SPACES), write(' by logical deduction'), !. process_logfact(SPACES, standardisation, [OLD, NEW]) :- tab(SPACES), write('Use of standardisation'), nl, tab(SPACES), write(' on '), prooflog_width(W), ( W=0, print(OLD) ; WIDTH is W-SPACES-8, pretty_write(OLD, WIDTH, _) ), nl, tab(SPACES), write(' gives '), ( W=0, print(NEW) ; pretty_write(NEW, WIDTH, _) ), !. process_logfact(SPACES, standardisation, hyp(N)) :- tab(SPACES), write('Apply standardiser on H'), print(N), !. process_logfact(SPACES, standardisation, conc(N)) :- tab(SPACES), write('Apply standardiser on C'), print(N), !. process_logfact(SPACES, cases, hyp(N)) :- tab(SPACES), write('Start proof-by-cases attempt on disjunction formula H'), print(N), !. process_logfact(SPACES, (case), NO) :- tab(SPACES), write('Begin CASE '), print(NO), write(':'), !. process_logfact(SPACES, induction, [FORMULA, VAR, BASE]) :- tab(SPACES), write('Attempt to prove '), prooflog_width(W), ( W=0, print(FORMULA) ; WIDTH is W-SPACES-17, pretty_write(FORMULA, WIDTH, _) ), nl, tab(SPACES), write('by induction on '), print(VAR), write(' (base case: '), print(VAR), write(' = '), print(BASE), write(')'), !. process_logfact(SPACES, (consult), FILENAME) :- tab(SPACES), write('Consulted rulefile: '), print(FILENAME), !. process_logfact(SPACES, library_name, FILENAME) :- tab(SPACES), write('Library rulefile: '), print(FILENAME), !. process_logfact(SPACES, lib_fault, [IDENT, STRUCT]) :- tab(SPACES), write('LIBRARY REJECTED: mismatch between .FDL declarations and library.'), nl, write('Involves identifier '), print(IDENT), write(' in: '), print(STRUCT), write('.'), !. process_logfact(SPACES, lib_ok, []) :- tab(SPACES), write('LIBRARY LOADED: No FDL declaration mismatches found.'), !. process_logfact(_, exit, _) :- write('*** END OF PROOF SESSION'), nl, nl, nl, write_summary_of_rules_used, nl, write_summary_of_proof_status, nl, !. process_logfact(_, forceexit, _) :- write('*** END OF PROOF SESSION'), nl, nl, nl, write_summary_of_rules_used, nl, write_summary_of_proof_status, nl, !. process_logfact(_, true_vc, VCNAME) :- /* CFR004 */ write('*** True VC eliminated: '), /* CFR004 */ print(VCNAME), /* CFR004 */ !. /* CFR004 */ process_logfact(SPACES, TYPE, OBJECT) :- tab(SPACES), write('!!! UNEXPECTED LOGFACT: logfact('), print(TYPE), write(', '), print(OBJECT), write(')'), !. write_subgoal_list_proved(IN, [goal(G)]) :- tab(IN), write('Met constraint: '), /* G might have a variable in it, so renumber for consistency */ mynumbervars(G, 1, _), prooflog_width(W), ( W=0, print(G) ; WIDTH is W-IN-16, pretty_write(G, WIDTH, _) ), nl, !. write_subgoal_list_proved(IN, [F]) :- tab(IN), write('Proved subgoal: '), prooflog_width(W), ( W=0, print(F) ; WIDTH is W-IN-16, pretty_write(F, WIDTH, _) ), nl, !. write_subgoal_list_proved(IN, [goal(G)|REST]) :- tab(IN), write('Met constraint: '), /* G might have a variable in it, so renumber for consistency */ mynumbervars(G, 1, _), prooflog_width(W), ( W=0, print(G) ; WIDTH is W-IN-16, pretty_write(G, WIDTH, _) ), nl, write_subgoal_list_proved(IN, REST), !. write_subgoal_list_proved(IN, [F|REST]) :- tab(IN), write('Proved subgoal: '), prooflog_width(W), ( W=0, print(F) ; WIDTH is W-IN-16, pretty_write(F, WIDTH, _) ), nl, write_subgoal_list_proved(IN, REST), !. write_subgoal_list_proved(IN, []) :- tab(IN), write('(unconstrained rule: no subgoals)'), nl, !. write_unproved_subgoals(IN, [F]) :- tab(IN), write('Subgoal: '), prooflog_width(W), ( W=0, print(F) ; WIDTH is W-IN-9, pretty_write(F, WIDTH, _) ), nl, !. write_unproved_subgoals(IN, [F|REST]) :- tab(IN), write('Subgoal: '), prooflog_width(W), ( W=0, print(F) ; WIDTH is W-IN-9, pretty_write(F, WIDTH, _) ), nl, write_unproved_subgoals(IN, REST), !. write_unproved_subgoals(IN, []) :- tab(IN), write('(no subgoals remaining (?!))'), nl, !. /*** If F begins with the value of $SPADE_CHECKER then strip it off ***/ strip_rule_prefix(F, B) :- spade_checker_prefix(SCP), name(F, FCHARS), append(SCP, BASECHARS, FCHARS), name(B, BASECHARS), !. /*** otherwise just leave it alone ***/ strip_rule_prefix(F, B) :- F = B, !. print_rulename(N, R) :- plain_output(off), !, print(N), write('::'), print(R), nl. print_rulename(N, R) :- plain_output(on), !, strip_rule_prefix(N, B), print(B), write('::'), print(R), nl. write_rules_used :- sort_rules_used, fail. write_rules_used :- \+ ruleused(_), write('The above proof did not make use of the proof rules database'), nl, !. write_rules_used :- ruleused(X), \+ used_rule_other_than(X), maybe_add(ruleused_this_session(X)), write('The only rule used in proving the above VC was:'), nl, tab(10), retract(ruleused([N,R])), print_rulename(N, R), !. write_rules_used :- write('The following rules were used in proving the above VC:'), nl, retract(ruleused([N,R])), maybe_add(ruleused_this_session([N,R])), tab(10), print_rulename(N, R), fail. write_rules_used :- !. used_rule_other_than(X) :- ruleused(Y), Y\=X, !. write_summary_of_rules_used :- sort_rules_used_this_session, fail. write_summary_of_rules_used :- \+ ruleused_this_session(_), write('The above proof session did not make use of the proof rules database'), nl, !. write_summary_of_rules_used :- ruleused_this_session(X), \+ used_rule_this_session_other_than(X), write('The only rule used in the above proof session was:'), nl, tab(10), retract(ruleused_this_session([N,R])), print_rulename(N, R), !. write_summary_of_rules_used :- write('The following rules were used during the above proof session:'), nl, retract(ruleused_this_session([N,R])), tab(10), print_rulename(N, R), fail. write_summary_of_rules_used :- !. used_rule_this_session_other_than(X) :- ruleused_this_session(Y), Y\=X, !. save_ruleused(R) :- ruleused(R), !. save_ruleused(R) :- assertz(ruleused(R)), !. ruleused_noprefix([SN, FN, RN]) :- ruleused([FN, RN]), strip_rule_prefix(FN, SN). ruleused_this_session_noprefix([SN, FN, RN]) :- ruleused_this_session([FN, RN]), strip_rule_prefix(FN, SN). sort_rules_used :- /* Sort rules using the simple name (without the prefix) as primary key */ findall(X, ruleused_noprefix(X), A), A \== [], sort(A, U), !, retractall(ruleused(_)), add_rules_again(ruleused,U), !. sort_rules_used_this_session :- findall(X, ruleused_this_session_noprefix(X), B), B \== [], sort(B, U), !, retractall(ruleused_this_session(_)), add_rules_again(ruleused_this_session,U), !. add_rules_again(_P,[]) :- !. add_rules_again(P,[H|T]) :- H = [_SN, FN, RN], F =.. [P,[FN, RN]], assertz(F), !, add_rules_again(P,T), !. write_summary_of_proof_status :- write_vcs_proved, nl, write_vcs_not_proved, !. write_vcs_proved :- vcs_proved_this_session([]), !, write('No VCs were proved during this proof session.'), nl, !. write_vcs_proved :- vcs_proved_this_session([[N]]), !, write('The only VC proved during this proof session was: '), print(N), nl, !. write_vcs_proved :- vcs_proved_this_session(VCS), !, write('The following VCs were proved during this proof session:'), !, nl, tab(8), write_numbers_left(VCS), !. write_vcs_proved :- \+ vcs_proved_this_session(_), !, write('No VCs were proved during this proof session.'), nl, !. write_vcs_not_proved :- vcs_to_prove([]), !, write('There are no more VCs left to prove.'), !. write_vcs_not_proved :- vcs_to_prove([[N]]), !, write('The only VC left to prove is: '), print(N), !. write_vcs_not_proved :- vcs_to_prove(VCS), !, write('The following VCs have not yet been proved: '), nl, tab(8), write_numbers_left(VCS), !. write_vcs_not_proved :- \+ vcs_to_prove(_), !, write('There are no more VCs left to prove.'), !. /*** pretty_write(EXPRN, OLD_COLUMNS_LEFT, NEW_COLUMNS_LEFT) ***/ pretty_write(A, OC, NC) :- atomic(A), size(A,L), !, ( L=OC, nl, print(A), prooflog_width(LW), ( L=LW, NC=0 ) ), !. pretty_write([X|Y],OC,NC) :- pretty_write('[',OC,NC1), !, pretty_write_arg_list([X|Y],NC1,NC2), !, pretty_write(']',NC2,NC), !. pretty_write(A, OC, NC) :- \+(atomic(A)), nonvar(A), A=..[F|Args], !, ( current_op(PREC,ASSOC,F), arg_nums_compatible(ASSOC,Args), ( Args=[AA], ( AA=..[G,A1], B=[A1] ; AA=..[G,A1,A2], B=[A1,A2] ), current_op(Prec2,Assoc2,G), arg_nums_compatible(Assoc2,B), Prec2>=PREC, ( ( ASSOC=fx ; ASSOC=fy ), pretty_write(F,OC,NC1), !, pretty_write(' (',NC1,NC2), !, pretty_write(AA,NC2,NC3), !, pretty_write(')',NC3,NC) ; ( ASSOC=xf ; ASSOC=yf ), pretty_write('(',OC,NC1), !, pretty_write(AA,NC1,NC2), !, pretty_write(') ',NC2,NC3), !, pretty_write(F,NC3,NC) ) ; Args=[A1,A2], ( A1=..[G1|Args1], current_op(Prec1,Assoc1,G1), arg_nums_compatible(Assoc1,Args1), Prec1>=PREC, pretty_write('(',OC,NC1), !, pretty_write(A1,NC1,NC2), !, pretty_write(')',NC2,NC3) ; pretty_write(A1,OC,NC3) ), !, pretty_write(' ',NC3,NC4), !, pretty_write(F,NC4,NC5), !, pretty_write(' ',NC5,NC6), ( A2=..[G2|Args2], current_op(Prec2,Assoc2,G2), arg_nums_compatible(Assoc2,Args2), Prec2>=PREC, pretty_write('(',NC6,NC7), !, pretty_write(A2,NC7,NC8), !, pretty_write(')',NC8,NC) ; pretty_write(A2,NC6,NC) ) ) ; OC1 is OC-1, pretty_write(F,OC1,NC1), !, write('('), pretty_write_arg_list(Args, NC1, NC2), !, ( NC2>0, write(')'), NC is NC2-1 ; NC2=<0, nl, write(')'), prooflog_width(LW), NC is LW-1 ) ), !. pretty_write_arg_list([A],OC,NC) :- !, pretty_write(A,OC,NC), !. pretty_write_arg_list([A|AL],OC,NC) :- !, pretty_write(A,OC,NC1), !, pretty_write(', ',NC1,NC2), !, pretty_write_arg_list(AL,NC2,NC), !. arg_nums_compatible(fx,[_]). arg_nums_compatible(fy,[_]). arg_nums_compatible(xf,[_]). arg_nums_compatible(yf,[_]). arg_nums_compatible(xfx,[_,_]). arg_nums_compatible(xfy,[_,_]). arg_nums_compatible(yfx,[_,_]). arg_nums_compatible(yfy,[_,_]). size(A,N) :- atom(A), name(A,L), !, length(L,N), !. size(A,N) :- integer(A), ( A>=0, S=0, B=A ; A<0, S=1, B is -A ), !, count_places(B,P), !, N is P+S. count_places(I,1) :- I<10. count_places(I,P) :- I>9, I1 iss I div 10, !, count_places(I1,P1), !, P is P1+1. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/listthm.pro0000644000175000017500000002335211753202340016253 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /******** LIST: top-level checker command ********/ list :- /* LIST THE WHOLE VC */ \+ command_arg(list,_), max_hyp_no(HMAX), list(h,1-HMAX), write('-->'), nl, max_conc_no(CMAX), listconcs(1,CMAX), !. list :- command_arg(list,VCPART), ( VCPART=HC#N, list(HC,N) ; VCPART=HC#N-M, list(HC,N-M) ; /* CFR010 */ VCPART = deleted, /* CFR010 */ list_deleted_hypotheses /* CFR010 */ ; /* CFR010 */ VCPART = forgotten, /* CFR010 */ list_forgotten_hypotheses /* CFR010 */ ), fail. list :- !. list(HORC,N) :- /* LIST A SINGLE HYPOTHESIS/CONCLUSION */ integer(N), ( HORC=h, HC=hyp ; HORC=c, HC=conc ), X=..[HC,N,Y], call(X), ( HC=hyp, write('H') ; HC=conc, write(' C') ), print(N), put_code(58), /* ":" */ tab(2), print(Y), nl, !. list(HORC,N-M) :- /* LIST HYPOTHESES/CONCLUSIONS IN A RANGE */ integer(N), integer(M), N=N )), !. max_conc_no(0). /*** MAX_HYP_NO(MAX) -- highest used hypothesis number ***/ max_hyp_no(N) :- hyp(N,_), \+ (( hyp(M,_), M>N )), !. max_hyp_no(0). /*** PRINTVC - write VC out to a file. */ printvc :- current_vc(NAME, NUM), pvc_make_filename(NUM, FILENAME), !, write_vc_data(FILENAME, NAME), !, write('Printed VC to file: '), write(FILENAME), nl, !. /* PVC_MAKE_FILENAME(NUM, NAME) -- create filename for writing VC to ***/ pvc_make_filename(N, FILE) :- name(N, NL), append(NL, ".LIS", TL), append("VC", TL, FL), name(FILE, FL), !. /*** WRITE_VC_DATA(FILENAME, VC) -- write VC & status info out to file ***/ write_vc_data(FILE, VC) :- file_can_be_written(FILE), tell(FILE), write_vc_description(VC), !, list, !, write_forgotten_hyps, !, write_deleted_hyps, !, write_depth_information, !, told. write_vc_data(FILE, _VC) :- \+ file_can_be_written(FILE), write('Warning: '), print(FILE), write(' cannot be written.'), nl, !, fail. /*** WRITE_VC_DESCRIPTION(VC) -- write its name and current step no. ***/ write_vc_description(VC) :- write('VC: '), print(VC), write(', Step: '), step_number(N), print(N), put_code(46), nl, nl, !. /*** WRITE_FORGOTTEN_HYPS ***/ write_forgotten_hyps :- \+ forgotten(_), !. write_forgotten_hyps :- nl, nl, write('PLUS the following forgotten (hidden but useable) hypotheses:'), nl, nl, fail. write_forgotten_hyps :- forgotten(N), list(h,N), fail. write_forgotten_hyps :- !. /*** WRITE_DELETED_HYPS ***/ write_deleted_hyps :- \+ deleted(_), !. write_deleted_hyps :- nl, nl, write('PLUS the following deleted (currently unuseable) hypotheses:'), nl, nl, fail. write_deleted_hyps :- deleted(N), write_deleted_hyp(N), fail. write_deleted_hyps :- !. /*** WRITE_DELETED_HYP(N): write it out */ write_deleted_hyp(N) :- /* WRITE SINGLE DELETED HYPOTHESIS */ deleted_hyp(N, Y), write('D'), print(N), put_code(58), /* ":" */ tab(2), print(Y), nl, !. /*** WRITE_DEPTH_INFORMATION -- summarise current status ***/ write_depth_information :- case_pointer(0), !. write_depth_information :- nl, nl, write('You are currently at depth '), case_pointer(DEPTH), print(DEPTH), write('. Back-trace from here is:'), nl, nl, !, print_status_info_at_depth(DEPTH), !. /*** PRINT_STATUS_INFO_AT_DEPTH(DEPTH) -- list previous depths back to top ***/ print_status_info_at_depth(0) :- !. print_status_info_at_depth(DEPTH) :- DEPTH>0, !, NEWDEPTH is DEPTH-1, pvc_print_info_at_depth(DEPTH, NEWDEPTH), !, print_status_info_at_depth(NEWDEPTH), !. /*** PVC_PRINT_INFO_AT_DEPTH(DEPTH, NEWDEPTH) -- info at this depth. ***/ pvc_print_info_at_depth(DEPTH, NEWDEPTH) :- subgoal_formula(DEPTH, _FORMULA, CONC_NO, METHOD), write('On exiting successfully from depth '), print(DEPTH), write(' you will have proved '), ( integer(CONC_NO), write('C'), print(CONC_NO) ; write('') ), !, ( NEWDEPTH = 0, write(' at the *TOP-LEVEL*, by ') ; write(' at the previous depth, by ') ), print(METHOD), put_code(46), nl, !, ( METHOD = 'CASES', write('Progress so far on proof by cases:'), nl, list_case_status(DEPTH) ; true ), !. /* list_deleted_hypotheses */ /* CFR010 */ list_deleted_hypotheses :- /* CFR010 */ \+ deleted(_), /* CFR010 */ !, /* CFR010 */ write('No hypotheses have been deleted.'), /* CFR010 */ nl, /* CFR010 */ fail. /* CFR010 */ list_deleted_hypotheses :- /* CFR010 */ write('DELETED HYPOTHESES :-'), /* CFR010 */ nl, /* CFR010 */ deleted(N), /* CFR010 */ write_deleted_hyp(N), /* CFR010 */ fail. /* CFR010 */ list_deleted_hypotheses :- !. /* CFR010 */ /* list_forgotten_hypotheses */ /* CFR010 */ list_forgotten_hypotheses :- /* CFR010 */ \+ forgotten(_), /* CFR010 */ !, /* CFR010 */ write('No hypotheses have been forgotten.'), /* CFR010 */ nl, /* CFR010 */ fail. /* CFR010 */ list_forgotten_hypotheses :- /* CFR010 */ write('FORGOTTEN HYPOTHESES :-'), /* CFR010 */ nl, /* CFR010 */ forgotten(N), /* CFR010 */ list(h,N), /* CFR010 */ fail. /* CFR010 */ list_forgotten_hypotheses :- !. /* CFR010 */ %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/portray.pro0000644000175000017500000001617711753202340016276 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /* used to print out unbound variables in SICSTUS */ portray('$VAR2'(Num)) :- print('_'), print(Num), !. /* Portray a BINARY operator */ portray(Term):- nonvar(Term), functor(Term, Name_Atom, 2), arg(1,Term, A), arg(2,Term, B), /* Find a BINARY operator that matches */ current_op(Pred,F,Name_Atom), ( F = xfx ; F = yfx ; F = xfy ), /* Find the precedences of sub-terms A and B */ term_pri(A, AP), term_pri(B, BP), ( /* We parenthesize A in two cases: */ /* 1) For xfx operators, we parenthesize if A's functor is */ /* equal or lower in precedence to that of Term/ */ /* 2) If A's principal functor is lower precedence than that of Term, then */ /* we need to parenthesize A. */ /* Note that in ISO PROLOG, operator precedence numbers are SMALLER for */ /* more tightly binding operators, so we need the ">" operator to test */ /* for "lower precedence" here. */ ( F = xfx, /* Case 1 */ AP >= Pred ; F \= xfx, /* Case 2 */ AP > Pred ), write('('), write_term(A, [priority(Pred), portrayed(true), numbervars(true)]), write(')') ; ( F = xfx, AP < Pred /* not Case 1 */ ; F \= xfx, AP =< Pred /* not case 2 */ ), write_term(A, [priority(Pred), portrayed(true), numbervars(true)]) ), write(' '), write(Name_Atom), write(' '), ( /* If B's principal functor is lower or equal precedence than that of Term, then */ /* we need to parenthesize B */ /* This is to re-produce the POPLOG behaviour where multi-term expressions */ /* with equal-precedence operators are printed with the RHS parenthesized */ /* For example, we want "A + (B + C)" NOT "A + B + C" */ BP >= Pred, write('('), write_term(B, [priority(Pred), portrayed(true), numbervars(true)]), write(')') ; BP < Pred, write_term(B, [priority(Pred), portrayed(true), numbervars(true)]) ), !. /* Portray an UNARY operator of type fx or fy */ portray(Term):- nonvar(Term), functor(Term, Name_Atom, 1), arg(1,Term, A), /* Find a UNARY operator that matches */ ( current_op(Pred,fx,Name_Atom) ; current_op(Pred,fy,Name_Atom) ), /* Find the precedences of sub-term A */ term_pri(A, AP), write(Name_Atom), write(' '), ( /* If A's principal functor is lower or equal precedence than that of */ /* Term, then we need to parenthesize A */ AP >= Pred, write('('), write_term(A, [priority(Pred), portrayed(true), numbervars(true)]), write(')') ; AP < Pred, write_term(A, [priority(Pred), portrayed(true), numbervars(true)]) ), !. /* Portray an UNARY operator of type xf or yf */ portray(Term):- nonvar(Term), functor(Term, Name_Atom, 1), arg(1,Term, A), /* Find a UNARY operator that matches */ ( current_op(Pred,xf,Name_Atom) ; current_op(Pred,yf,Name_Atom) ), /* Find the precedences of sub-term B */ term_pri(A, AP), ( /* If A's principal functor is lower precedence than that of Term, then */ /* we need to parenthesize A */ AP > Pred, write('('), write_term(A, [priority(Pred), portrayed(true), numbervars(true)]), write(')') ; AP =< Pred, write_term(A, [priority(Pred), portrayed(true), numbervars(true)]) ), write(' '), write(Name_Atom), !. /* Portray clauses for built-in PROLOG operators */ portray(List) :- (List = [] ; List = [_|_]), !, print_list(List). /* Catch all - functor F, which is not an operator, with non-zero */ /* number of arguments - print as F(Args) with commas */ /* between the arguments. */ portray(X) :- X =.. [F|Args], atomic(F), Args \== [], !, write(F), write('('), print_list1(Args), write(')'), !. /* print_list/1 - prints a list enclosed in [ ] with */ /* comma and space between each element */ print_list(List) :- write('['), print_list1(List), write(']'). print_list1(.(X, A)) :- A == [], !, print(X). /* Special case to deal with the tail of a list which is an unbound */ /* variable generated by mynumbervars/3 */ print_list1(.(X, A)) :- A = '$VAR2'(Num), !, print(X), write(' | _'), print(Num). print_list1(.(X, A)) :- var(A), !, print(X), write(' | '), print(A). print_list1([X|Xs]) :- !, print(X), write(', '), print_list1(Xs). print_list1([]). /* term_pri/2 - returns the precedence of the principal functor of Term */ term_pri(Term, Prio) :- /* Careful here to only look for BINARY operators */ nonvar(Term), functor(Term, Name_Atom, 2), ( current_op(Prio, xfx, Name_Atom) ; current_op(Prio, yfx, Name_Atom) ; current_op(Prio, xfy, Name_Atom) ). term_pri(Term, Prio) :- /* Careful here to only look for UNARY operators */ nonvar(Term), functor(Term, Name_Atom, 1), ( current_op(Prio, fx, Name_Atom) ; current_op(Prio, fy, Name_Atom) ; current_op(Prio, xf, Name_Atom) ; current_op(Prio, yf, Name_Atom) ). /* We don't want to parenthesize atoms, literals and so on, so we pretend */ /* that these have a very high precedence */ term_pri(_Term, Prio) :- Prio = 1, !. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/standard.pro0000644000175000017500000007365011753202340016375 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /******** STANDARDISE: top-level checker command ********/ standardise :- ( command_arg(expression, EXPRESSION) ; prompt_user('STANDARDISE -- Please specify expression to be standardised.','Expression? '), rread(EXPRN), parse_expression(EXPRN, EXPRESSION) ), !, standardise(EXPRESSION). standardise(HORC#N) :- ( HORC=h, HC=hyp, NHC=newhyp, MESSAGE = new_hyp_message(N, F) /* CFR018 */ ; HORC=c, HC=conc, NHC=newconc, MESSAGE = new_conc_message(N, F) /* CFR018 */ ), X=..[HC,N,E], call(X), norm_expr(E,F), stan_display(E,F), ask_if_save(R), ( R=yes, retract(X), Y=..[HC,N,F], assertz(Y), Z=..[HC,N], assertz(logfact(standardisation, Z)), assertz(logfact(NHC, Y)), call(MESSAGE) /* CFR018 */ ; R=no, !, fail ), !. standardise(EXPRESSION) :- checktype(EXPRESSION, TYPE), norm_typed_expr(EXPRESSION, TYPE, NEW_EXPRESSION), stan_display(EXPRESSION, NEW_EXPRESSION), ask_if_save(R), ( R=yes, assertz(logfact(standardisation, [EXPRESSION, NEW_EXPRESSION])), ( TYPE\=boolean, add_new_hyp(EXPRESSION=NEW_EXPRESSION,1) ; TYPE=boolean, ( NEW_EXPRESSION=true, add_new_hyp(EXPRESSION,1) ; NEW_EXPRESSION=false, add_new_hyp(not EXPRESSION,1) ; add_new_hyp(EXPRESSION<->NEW_EXPRESSION,1) ) ) ; R=no, !, fail ), !. /*** STAN_DISPLAY(OLD,NEW) - display pre- & post-standardised expression ***/ stan_display(E,F) :- nl, write('OLD: '), print(E), nl, write('NEW: '), print(F), nl, !. /*** ASK_IF_SAVE(ANSWER) - read "yes" or "no" ANSWER from user ***/ ask_if_save(R) :- repeat, nl, read_answer('Shall I save this result', R), /* CFR002 */ /* until */ (R=yes ; R=no), !. /*-------------------------------------------------------------*/ /* */ /* Integer and Boolean Expression Standardisation Module */ /* */ /*-------------------------------------------------------------*/ /* MAIN PROCEDURE: Normalise an expression of given type */ norm_typed_expr(OLD, TYPE, NEW) :- do_norm_typed_expr(OLD, TYPE, SO_FAR), ( simplify(SO_FAR, NEW) ; NEW = SO_FAR ), !. do_norm_typed_expr(element(A,I),_T,element(NEWA, NEWI)) :- checktype(A, AT), type(AT, array(INDTYPES, _ELEMTYPE)), do_norm_typed_expr(A, AT, NEWA), do_norm_typed_exprs(I, INDTYPES, NEWI), !. do_norm_typed_expr(update(A, I, X),T,update(NEWA, NEWI, NEWX)) :- type(T, array(INDTYPES, ELEMTYPE)), do_norm_typed_expr(A, T, NEWA), do_norm_typed_exprs(I, INDTYPES, NEWI), do_norm_typed_expr(X, ELEMTYPE, NEWX), !. do_norm_typed_expr(X,INT,X1) :- ( INT=integer ; INT=real ), !, apply(X,X1), !. do_norm_typed_expr(X,boolean,X1) :- !, do_norm_expr(X,X1),!. do_norm_typed_expr(E,_,E) :- atomic(E), !. do_norm_typed_expr(E1,_,E2) :- (\+ atomic(E1)), E1=..[F|Args1], checktypes(Args1,Types), do_norm_typed_exprs(Args1,Types,Args2), EE=..[F|Args2], ( simplify(EE, E2) ; E2=EE ), !. /*** Deal with list of objects and list of their types ***/ do_norm_typed_exprs([],[],[]) :- !. do_norm_typed_exprs([A|A1],[T|T1],[B|B1]) :- do_norm_typed_expr(A,T,B), do_norm_typed_exprs(A1,T1,B1), !. /* FUNCTIONS */ /* ========= */ /* DEFINED_FUNCT succeeds if 'Structure' has the same 'Name' */ /* and 'Arity' as a function defined in the database. The */ /* corresponding 'Arg_types' and 'Result_type' are noted. */ defined_funct(Structure,Arg_types,Result_type) :- functor(Structure,Name,Arity), ( function(Name,Arg_types,Result_type) ; built_in(Name,Arg_types,Result_type) ), length(Arg_types,Arity), !. /* If the arguments of a function are of type 'integer' */ /* or 'boolean' they may be standardised. */ /* Note care in filtering errors when reforming function. */ simp_funct(X ** Y, _, Result) :- /* CFR055 */ simp_args([X, Y], [integer, integer], [XS, YS]), /* CFR055 */ ( /* CFR055 */ YS = 0, /* CFR055 */ Result = 1 /* CFR055 */ ; /* CFR055 */ YS = 1, /* CFR055 */ Result = XS /* CFR055 */ ; /* CFR055 */ YS = 2, /* CFR055 */ Result = XS * XS /* CFR055 */ ; /* CFR055 */ XS = 1, /* CFR055 */ Result = 1 /* CFR055 */ ; /* CFR055 */ XS = 0, /* CFR055 */ integer(YS), /* CFR055 */ YS >= 1, /* CFR055 */ Result = 0 /* CFR055 */ ; /* CFR055 */ Result = (XS ** YS) /* CFR055 */ ), /* CFR055 */ !. /* CFR055 */ simp_funct(X,Arg_types,Y):- X=..[Name|Arg_list], simp_args(Arg_list,Arg_types,Args1), /* simplify arguments. */ Y=..[Name|Args1], !. /* SIMP_ARGS(A,L,A1) : A is a list of arguments, */ /* L is a list of types, */ /* A1 becomes a list of simplified arguments */ simp_args([X],[Type],[X1]) :- is_a_valid_type(Type), !, norm_typed_expr(X,Type,X1), !. simp_args([X],[_],[X1]) :- checktype(X, Type), !, norm_typed_expr(X,Type,X1), !. simp_args([X|Y],[Type|T],[X1|Y1]) :- is_a_valid_type(Type), !, norm_typed_expr(X,Type,X1), !, simp_args(Y,T,Y1), !. simp_args([X|Y],[_|T],[X1|Y1]) :- checktype(X, Type), !, norm_typed_expr(X,Type,X1), !, simp_args(Y,T,Y1), !. /*------------------------ END of functions ---------------------------*/ /* *** S T A N D A R D F O R M S *** */ /* ===================================== */ /* Puts arithmetic expressions in standard form. */ /* Integer expression standardisation - main predicates */ apply(A,B,C,D,E,F,G,H,I,J,K,L) :- cv(A,B), sp(B,C), leftint(C,D), oneint(D,E), createlist(E,F), sortlist(F,G), busort_prods(G,H), compress(H,I), nozeros(I,J), form_expr(J,K), tidy(K,L). apply(A,L) :- apply(A,_,_,_,_,_,_,_,_,_,_,L). /* - To produce standard form with first product_term positive */ /* and sign outside. eg. + or -(a - b + etc.) */ standard(A,M) :- cv(A,B), sp(B,C), leftint(C,D), oneint(D,E), createlist(E,F), sortlist(F,G), busort_prods(G,H), compress(H,I), nozeros(I,J), form_expr(J,K), sign(K,L), tidy(L,M). /* 'Types' */ /*** S_ATOMIC(X) - succeeds if X is an atom or a signed integer ***/ s_atomic(X) :- ( atom(X) ; (integer(X), X>=0) ), !. s_atomic(-X) :- integer(X), X>=0, !. /*** S_INTEGER(X) - succeeds if X is a signed integer ***/ s_integer(X) :- integer(X), X>=0, !. s_integer(-X) :- integer(X), X>=0, !. /*** TERM(X) - define a 'Term' ***/ term(update(_,_,_)) :- !. term(element(_,_)) :- !. term(_X div _Y) :- !. term(_X mod _Y) :- !. term(odd(_X)) :- !. term(abs(_X)) :- !. term(sqr(_X)) :- !. term(first(_X)) :- !. term(last(_X)) :- !. term(nonfirst(_X)) :- !. term(nonlast(_X)) :- !. term(length(_X)) :- !. term(_X @ _Y) :- !. term(pred(_X)) :- !. term(last(_X)) :- !. term(_X \/ _Y) :- !. term(_X /\ _Y) :- !. term(_X \ _Y) :- !. term(_X in _Y) :- !. term(_X not_in _Y) :- !. term(_X subset_of _Y) :- !. term(_X strict_subset_of _Y) :- !. term(set _X) :- !. term([_X|_Y]) :- !. term(_X ** _Y) :- !. /* CFR055 */ term(X) :- s_atomic(X), !. term(X) :- record_function(_, X, _, _, _, _), !. /* CFR029 */ term(X) :- function_template(X, _, _), !. term(X) :- X=..[N|_], function(N,_,_), !. /*** PRODUCT(X) - define a 'Product' ***/ product(X*Y) :- !, product(X), product(Y), !. product(X) :- term(X), !. /* STAGE 1 */ /* Multiplies out expression to give sum of products form */ /* Simplifies all occurances of X div Y & X mod Y & */ /* functions. Also checks for illegal expressions */ cv(X,_) :- var(X), !, fail. cv(X*Y,A) :- cv(X,X1), cv(Y,Y1), multiply_out(X1,Y1,A), !. cv(X+Y,X1+Y1) :- cv(X,X1), cv(Y,Y1), !. cv(X-Y,X1+Y1) :- cv(X,X1), cv(Y*(-(1)),Y1), !. cv(X div Y,A) :- standard(X,X1), standard(Y,Y1), simp_num(X1 div Y1,A), !. cv(X mod Y,A) :- standard(X,X1), standard(Y,Y1), simp_num(X1 mod Y1,A), !. cv(element(A,X),Y) :- checktype(element(A,X),T), !, do_norm_typed_expr(element(A,X),T,Y), !. cv(X,X) :- s_atomic(X), !. cv(INT,-NEGINT) :- integer(INT), INT<0, NEGINT is -INT, !. cv(-X,Y) :- cv(X*(-(1)),Y), !. cv(X,Y) :- defined_funct(X,Arg_types,_), simp_funct(X,Arg_types,Y), !. cv(X,X). /* catch-all */ /* Multiply out two expressions to form a sum of products */ /* Note the input expressions are in sum of products form */ multiply_out(X1+X2,Y1+Y2,A1+A2+A3+A4):- multiply_out(X1,Y1,A1), multiply_out(X1,Y2,A2), multiply_out(X2,Y1,A3), multiply_out(X2,Y2,A4), !. multiply_out(X,Y1+Y2,A1+A2) :- product(X), multiply_out(X,Y1,A1), multiply_out(X,Y2,A2), !. multiply_out(X1+X2,Y,A1+A2) :- product(Y), multiply_out(X1,Y,A1), multiply_out(X2,Y,A2), !. multiply_out(X,Y,X*Y) :- product(X), product(Y), !. /* SIMP_NUM(X div Y,Z) - simplify term if possible; X & Y are in std form ***/ simp_num(X div Y,Z) :- s_integer(X), s_integer(Y), Z iss X div Y, !. simp_num(_X div 0,_) :- !, fail. simp_num(X div 1,Y):- simp_num(X,Y), !. simp_num((X div Y) div Z,B) :- standard(Y*Z,A), simp_num(X div A,B), !. simp_num((-X) div (-Y),Z) :- simp_num(X div Y,Z), !. simp_num((-X) div Y,A*(-(1))) :- simp_num(X div Y,A), !. simp_num(X div (-Y),A*(-(1))) :- simp_num(X div Y,A), !. /*** SIMP_NUM(X mod Y,Z) - simplify term: X & Y are already in std form */ simp_num(X mod Y,Z) :- s_integer(X), s_integer(Y), Z iss X mod Y, !. simp_num(_X mod 0,_) :- !, fail. simp_num(_X mod 1,0) :- !. simp_num(X,X) :- !. /* Catch all */ /* STAGE 2 */ /*** SP(OLD,NEW) - remove redundant brackets ***/ sp(X*(Y*Z),A) :- sp(X*Y*Z,A), !. sp(X*Y,Z*Y) :- term(Y), sp(X,Z), !. sp(X+(Y+Z),A) :- sp(X+Y+Z,A), !. sp(X+Y,X1+Y1) :- product(Y), sp(Y,Y1), sp(X,X1), !. sp(X,X) :- term(X), !. /* STAGE 3 */ /*** LEFTINT(OLD,NEW) - for each product move all integers to the left ***/ leftint(X*Y,A) :- s_integer(Y), !, ( term(X), A=Y*X ; leftint(X,B), sp(Y*B,A) ), !. leftint(X*Y,Z*Y) :- leftint(X,Z), !. /* implicit: (\+ s_integer(Y)) */ leftint(X+Y,X1+Y1) :- leftint(X,X1), leftint(Y,Y1), !. leftint(X,X) :- term(X), !. /* STAGE 4 */ /*** ONEINT(OLD,NEW) - evaluate integer part of product ***/ oneint(X*Y,A) :- s_integer(Y), A iss X*Y, !. oneint(X*Nonint,Z*Nonint) :- oneint(X,Z), !. oneint(X+Y,X1+Y1) :- oneint(X,X1), oneint(Y,Y1), !. oneint(X,X) :- term(X), !. /* STAGE 5 */ /*** CREATELIST(OLD,NEW) - form a list of product terms ***/ createlist(X+Y,[Y|Z]) :- createlist(X,Z), !. createlist(X,[X]) :- product(X), !. /* STAGE 6 */ /* SORTLIST(OLD,NEW) - for each product, order the list of terms ***/ sortlist([X1|Y1],[X2|Y2]) :- sortprod(X1,X2), sortlist(Y1,Y2), !. sortlist([],[]) :- !. /*** SORTPROD(OLD,NEW) - sort a product term ***/ sortprod(X,Y) :- list_terms(X,Z), /* Put in list form */ busort_terms(Z,W), /* Bubble sort the terms */ list_terms(Y,W), !. /* Reform product */ /*** LIST_TERMS(X,XLIST) - form a list of terms from a product term ***/ list_terms(X*Y,[Y|Z]) :- list_terms(X,Z), !. list_terms(X,[X]) :- term(X), !. /*** BUSORT_TERMS(OLD,NEW) - bubblesort the list of terms ***/ busort_terms(L,S) :- gen_append(X,[A,B|Y],L), order_terms(B,A), gen_append(X,[B,A|Y],M), busort_terms(M,S), !. busort_terms(L,L) :- !. /*** GEN_APPEND(L1,L2,LL) - general (i.e. nondeterministic) append ***/ gen_append([],L,L). gen_append([H|T],L,[H|V]) :- gen_append(T,L,V). /*** ORDER_TERMS(OLD,NEW) - succeeds if arguments are in required order ***/ order_terms(A div B,C div B) :- /* ORDER 'div' EXPRNS */ !, order_exprs(A,C), !. order_terms(_A div B,_C div D) :- !, order_exprs(B,D), !. order_terms(_A div _B,_C mod _D) :- !. order_terms(_A mod _B,_C div _D) :- /* ORDER mod EXPRNS */ !, fail. order_terms(A mod B,C mod B) :- !, order_exprs(A,C), !. order_terms(_A mod B,_C mod D) :- !, order_exprs(B,D), !. order_terms(A,B) :- s_atomic(A), !, ( s_atomic(B), !, less(A,B) ; true ), !. order_terms(_A,B) :- s_atomic(B), /* implicit: (\+ s_atomic(A)) */ !, fail. order_terms(A,B) :- ( defined_funct(A,_,_) ; A = element(_,_) ; A = update(_,_,_) ), \+ ( defined_funct(B,_,_) ; B = element(_,_) ; B = update(_,_,_) ), !. order_terms(A,B) :- !, A=..Function1, /* put functions */ B=..Function2, /* in list form */ order_functs(Function1,Function2), !. /* less(A,B) succeeds if the s_atomic A is ordered before B */ /* - integers first, then atoms in alphabetic order - */ /* Note : if A=B , the predicate fails. */ less(Y,X) :- s_integer(Y), !, \+ s_integer(X). less(_Y,X) :- s_integer(X), !, fail. less(Y,X) :- Y @< X, !. /*** ORDER_EXPRS(A,B) - succeeds if expressions A & B are in right order ***/ order_exprs(X,Y) :- listexp(X,X1), /* write X as a list of products */ listexp(Y,Y1), /* write Y as a list of products */ orderlist(X1,Y1), !. /* order by the products in the lists */ /*** LISTEXP(E,LP) - write expression E as a list LP of products ***/ listexp(A,I) :- cv(A,B), sp(B,C), leftint(C,D), oneint(D,E), createlist(E,F), sortlist(F,G), busort_prods(G,H), compress(H,I), !. /*** ORDERLIST(A,B) - order expressions by the products in them ***/ orderlist(_,[]) :- !, fail. orderlist([],_) :- !. orderlist([H|T1],[H|T2]) :- !, orderlist(T1,T2), !. orderlist([H1|_T1],[H2|_T2]) :- list_terms(H1,L1), /* implicit: H1\=H2 */ list_terms(H2,L2), !, order(L1,L2), !. /*** ORDER_FUNCTS - Functions are ordered first by the predicate name ***/ /*** then by the arguments. Here the functions are in ***/ /*** list form : predicate followed by arguments. ***/ order_functs([Name|Arg_list1],[Name|Arg_list2]) :- !, orderargs(Arg_list1,Arg_list2), !. order_functs([Name1|_],[Name2|_]) :- less(Name1,Name2), !. /* Equal functions are ordered by the arguments they contain. */ /* Only expressions of type 'integer' can be compared, */ orderargs([Arg|A],[Arg|B]) :- !, orderargs(A,B), !. orderargs([Arg1|_],[Arg2|_]) :- checktype(Arg1, integer), checktype(Arg2, integer), !, order_exprs(Arg1,Arg2), !. orderargs([Arg1|_Rest1], [Arg2|_Rest2]) :- /* 24/1/91: Last ditch? */ !, Arg1 @< Arg2. /* STAGE 7 */ /*** BUSORT_PRODS(OLD,NEW) - order the list of products itself ***/ busort_prods(L,S) :- gen_append(X,[A,B|Y],L), order_prods(B,A), gen_append(X,[B,A|Y],M), busort_prods(M,S), !. busort_prods(L,L) :- !. /*** ORDER_PRODS(A,B) - succeeds if product terms A & B are in right order ***/ order_prods(A,B) :- essence(A,A1,_), list_terms(A1,L1), essence(B,B1,_), list_terms(B1,L2), order(L1,L2), !. /*** ORDER(OLD,NEW) - lists of terms are ordered by the terms they contain ***/ order(_,[]) :- !, fail. order([],_) :- !. order([H|T1],[H|T2]) :- !, order(T1,T2), !. order([I1|_T1],[I2|_T2]) :- s_integer(I1), s_integer(I2), !, I1 < I2. order([H1|_T1],[H2|_T2]) :- order_terms(H1,H2), !. /* STAGE 8 */ /*** COMPRESS(OLD,NEW) - add integers together & add similar product terms ***/ compress([X,Y|Z],R) :- s_integer(X), s_integer(Y), T iss X+Y, compress([T|Z],R), !. compress([X,Y|Z],R) :- essence(X,A,M), essence(Y,A,N), Sum iss M+N, compress([A*Sum|Z],R), !. compress([X,Y|Z],R) :- compress([Y|Z],S), compress([X],[C]), R= [C|S], !. compress([X*1],[X]) :- !. compress([X],[X]) :- !. compress([],[]) :- !. /*** ESSENCE(XI,X,I) - gives integer part I & remainder X of expression ***/ essence(X*Y,X,Y) :- s_integer(Y), !. essence(X*Y,X*Y,1) :- term(Y), !. /* implicit: (\+ s_integer(Y)) */ essence(X,[],X) :- s_integer(X), !. essence(X,X,1) :- term(X), !. /* implicit: (\+ s_integer(X)) */ /* STAGE 9 */ /*** NOZEROS - Remove all zero product terms ***/ /*** Note: (a div b)*0 can NOT be removed as b may be zero ***/ nozeros([X*Y*A|T],Z) :- zero(A), s_atomic(Y), nozeros([X*A|T],Z), !. nozeros([X*A|T],Z) :- zero(A), s_atomic(X), nozeros(T,Z), !. nozeros([A|T],Z) :- zero(A), !, nozeros(T,Z), !. nozeros([X|T],[X|Z]) :- nozeros(T,Z), !. /* implicit: (\+ zero(X)) */ nozeros([],[]) :- !. /*** ZERO(X) - tests to see if X is zero, i.e. either "0" or "-0" ***/ zero(0). zero(-(0)). /* STAGE 10 */ /*** FORM_EXPR(L,E) - reform expression E from list L ***/ form_expr([X],Y) :- reorder(X,Y), !. form_expr([H|T],Z) :- reorder(H,X), form_expr(T,Y), sp1(X+Y,Z), !. form_expr([],0) :- !. /*** REORDER(X,Y) - rewrite product X the right way round to get Y ***/ reorder(X*1,Y) :- reorder(X,Y), !. reorder(X*(-Y),-Z) :- integer(Y), reorder(X*Y,Z), !. reorder(X*Y,Z) :- reorder(X,A), sp(Y*A,Z), !. reorder(X,X) :- term(X), !. /*** SP1(X,Y) - use associativity to get rid of brackets in X giving Y ***/ sp1(X+(-Y),X1+(-Y)) :- product(Y), sp1(X,X1), !. sp1(X+Y,X1+Y) :- product(Y), sp1(X,X1), !. sp1(X+(Y+Z),A) :- sp1(X+Y+Z,A), !. sp1(-X,-X) :- product(X). sp1(X,X) :- product(X). /*** SIGN - If the sign of the leftmost product term is minus then ***/ /*** change the sign of every product term and enclose the ***/ /*** whole expression with the unary minus operator ***/ sign(X+Y,-(X1+Y1)) :- sign(X,-X1), changesign(Y,Y1), !. sign(X+Y,X+Y):- !. sign(X,X) :- product(X),!. sign(-X,-X) :- product(X),!. /*** CHANGESIGN(X,MINUSX) - change the sign of X to get MINUSX ***/ changesign(-X,X) :- !. changesign(X,-X) :- !. /*** TIDY - converts +(-product_term) to -product_term. ***/ tidy(X+(-Y),Z-Y) :- tidy(X,Z), !. tidy(X+Y,Z+Y) :- tidy(X,Z), !. tidy(-X,-Y) :- tidy(X,Y), !. tidy(X,X) :- product(X), !. /**** ------- END of integer expression standardiser ------- ****/ /* STANDARDISE BOOLEAN EXPRESSIONS */ /* -------------------------------- */ /* Reduce relational exprs. in boolean exprs. to standard form, */ /* and simplify exprs. such as 'true or false' to 'true'. */ /* also rewrite exprs such as */ /* 'a and (b and c)' to 'a and b and c'. */ /* Standardise the arguments of quantifers & boolean functions. */ /*** NORM_EXPR(OLD,NEW) - normalise boolean expression OLD to get NEW ***/ norm_expr(OLD, NEW) :- do_norm_expr(OLD, SO_FAR), ( simplify(SO_FAR, NEW) ; NEW = SO_FAR ), !. do_norm_expr(E,_):- var(E), nl, write('<<< ERROR: illegal variable >>>'), nl, !, fail. do_norm_expr(true,true):- !. do_norm_expr(false,false):- !. do_norm_expr(E,E) :- atomic(E), !. /* boolean variable */ /* Write all integer relational expressions in the form: */ /* expr = 0 ; expr <> 0 ; expr > 0. */ do_norm_expr(A=B,N) :- checktype(A,TYPE), ( TYPE=integer ; TYPE=real ), !, /* just in case */ standard(A-B,E), /* write +;- N[A-B] => E , */ /* where 1st term of N[A-B] is without - */ simp_rel(E=0,N), !. /* simplify where possible */ do_norm_expr(A=B,Z) :- checktype(A,T), do_norm_typed_expr(A,T,X), do_norm_typed_expr(B,T,Y), ( (X=Y, Z=true) ; Z=(X=Y) ), !. do_norm_expr(A<>B,N):- checktype(A,TYPE), ( TYPE=integer ; TYPE=real ), !, /* just in case */ standard(A-B,E), /* write in form +;- N[A-B] */ /* where 1st term of N[A-B] is without - */ simp_rel(E<>0,N), !. /* simplify where possible */ do_norm_expr(A<>B,Z) :- checktype(A,T), do_norm_typed_expr(A,T,X), do_norm_typed_expr(B,T,Y), ( (X=Y, Z=false) ; Z=(X<>Y) ), !. do_norm_expr(A>=B,N):- checktype(A,integer), checktype(B,integer), !, apply(A-B+1,E), /* write N[A-B+1] as E */ simp_rel(E>0,N), !. /* and simplify E>0. */ do_norm_expr(A>=B,N) :- do_norm_expr(B<=A,N), !. do_norm_expr(A<=B,N):- checktype(A,integer), checktype(B,integer), !, apply(B-A+1,E), /* write N[B-A+1] as E */ simp_rel(E>0,N), !. /* and simplify E>0. */ do_norm_expr(A<=B,N):- checktype(A,real), checktype(B,real), !, apply(A-B,E), /* write N[A-B+1] as E */ simp_rel(E<=0,N), !. /* and simplify E>0. */ do_norm_expr(A<=B,N) :- checktype(A,T), do_norm_typed_expr(A,T,X), do_norm_typed_expr(B,T,Y), ( X=Y, N=true ; N=(X<=Y) ), !. do_norm_expr(A0,N), !. /* and simplify E>0 */ do_norm_expr(AB,N) :- do_norm_expr(BB,N), !. do_norm_expr(not(A<>B),N) :- do_norm_expr(A=B,N), !. do_norm_expr(not(A=B,N), !. do_norm_expr(not(A>B),N) :- do_norm_expr(A<=B,N), !. do_norm_expr(not(A<=B),N) :- do_norm_expr(A>B,N), !. do_norm_expr(not(A>=B),N) :- do_norm_expr(A B,A1 -> B1):- do_norm_expr(A,A1), do_norm_expr(B,B1), !. do_norm_expr(A <-> B,A1 <-> B1) :- do_norm_expr(A,A1), do_norm_expr(B,B1), !. /* Quantifiers: reduce boolean expressions to normal form */ /* and simplify whole expression where possible, */ /* e.g. 'for_all(x:integer,true)' is equivalent to 'true' */ do_norm_expr(for_all(X:T, Exp), for_all(X:T, E1)) :- find_core_type(T, CT), !, ( var_const(X, CT, _), !, do_norm_expr(Exp, E1) ; asserta(var_const(X, CT, p)), do_norm_expr(Exp, E1), retract(var_const(X, CT, p)) ; retract(var_const(X, CT, p)), fail ), !. do_norm_expr(for_some(X:T, Exp),for_some(X:T, E1)) :- find_core_type(T, CT), !, ( var_const(X, CT, _), !, do_norm_expr(Exp, E1) ; asserta(var_const(X, CT, p)), do_norm_expr(Exp, E1), retract(var_const(X, CT, p)) ; retract(var_const(X, CT, p)), fail ), !. do_norm_expr(update(A,I,X),Y) :- checktype(A,T), !, do_norm_typed_expr(update(A,I,X),T,Y), !. do_norm_expr(A,B) :- defined_funct(A,Arg_types,boolean), simp_funct(A,Arg_types,B), !. /*** SIMP_REL(E=0,S) - simplify equality expressions where possible ***/ simp_rel(0=0,true) :- !. simp_rel(A=0,false) :- s_integer(A), \+ (0 is A), !. simp_rel(-A=0,A=0) :- !. simp_rel(A=0,A=0) :- !. /* catch all */ /*** SIMP_REL(E<>0,S) - simplify inequality expressions where possible ***/ simp_rel(0<>0,false) :-!. simp_rel(A<>0,true) :- integer(A), \+ (0 is A), !. simp_rel(-A<>0,A<>0) :- !. simp_rel(A<>0,A<>0) :- !. /* catch all */ /*** SIMP_REL(E>0,S) - simplify greater-than expressions where possible ***/ simp_rel(-A>0,false):- integer(A), A>=0, !. simp_rel(0>0,false) :- !. simp_rel(A>0,true) :- integer(A), A\=0, !. simp_rel(A>0,A>0) :- !. /* catch all */ /*** SIMP_REL(E<=0,S) - simplify >= expressions (reals) where possible ***/ simp_rel(0<=0,true) :- !. simp_rel(A<=0,TRUTH) :- intexp(A), _VAL iss A, ( A =< 0, TRUTH = true ; A > 0, TRUTH = false ), !. simp_rel(A<=0,A<=0) :- !. /* catch all */ is_a_valid_type(integer). is_a_valid_type(boolean). is_a_valid_type(real). is_a_valid_type(X) :- type(X, _). built_in((+), [integer, integer], integer). built_in((-), [integer, integer], integer). built_in((*), [integer, integer], integer). built_in((div), [integer, integer], integer). built_in((mod), [integer, integer], integer). built_in((**), [integer, integer], integer). /* CFR055 */ built_in((-), [integer], integer). built_in((/), [real, real], real ). built_in(abs, [integer], integer). built_in(sqr, [integer], integer). built_in((=), ['ANY', 'ANY'], boolean). built_in((<>), ['ANY', 'ANY'], boolean). built_in((>), ['ANY', 'ANY'], boolean). built_in((<), ['ANY', 'ANY'], boolean). built_in((>=), ['ANY', 'ANY'], boolean). built_in((<=), ['ANY', 'ANY'], boolean). built_in((not), [boolean], boolean). built_in((and), [boolean, boolean], boolean). built_in((or), [boolean, boolean], boolean). built_in((->), [boolean, boolean], boolean). built_in((<->), [boolean, boolean], boolean). built_in(odd, [integer], boolean). built_in(pred, ['ANY'], 'ANY' ). built_in(succ, ['ANY'], 'ANY' ). built_in(length, ['ANY'], integer). built_in(first, ['ANY'], 'ANY' ). built_in(last, ['ANY'], 'ANY' ). built_in(nonfirst, ['ANY'], 'ANY' ). built_in(nonlast, ['ANY'], 'ANY' ). built_in((@), ['ANY', 'ANY'], 'ANY' ). built_in((\/), ['ANY', 'ANY'], 'ANY' ). built_in((/\), ['ANY', 'ANY'], 'ANY' ). built_in((/), ['ANY', 'ANY'], 'ANY' ). built_in((in), ['ANY', 'ANY'], boolean). built_in((not_in), ['ANY', 'ANY'], boolean). built_in((subset_of), ['ANY', 'ANY'], boolean). built_in((strict_subset_of), ['ANY', 'ANY'], boolean). built_in('.', ['ANY', 'ANY'], 'ANY' ). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/simplify.pro0000644000175000017500000000637211753202340016426 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** SIMPLIFY -- top-level checker command ***/ simplify :- movenots ; split_hyps ; split_concs ; do_implication ; equivalence. simplify :- !. /*** MOVENOTS - move nots as far into expressions as possible ***/ movenots:- retractall(hn(_)), assertz(hn(1)), repeat, hn(N), ( gethyp(N,H), negin(H,H1), assertz(hyp(N,H1)), ( H1=H ; H1\=H, assertz(logfact(newhyp, hyp(N,H1))), new_hyp_message(N, H1) /* CFR018 */ ) ; hn(N) ), M is N+1, retract(hn(N)), assertz(hn(M)), bigger_than_all_hyps(M), !, fail. bigger_than_all_hyps(M) :- hyp(N,_), N>=M, !, fail. bigger_than_all_hyps(_) :- !. /*** GETHYP(N,H) - instantiates H to the Nth hypothesis & retracts it ***/ gethyp(N,H) :- hyp(N,H), retract(hyp(N,H)), !. /*** SPLIT_HYPS - split conjunction hypotheses into two or more hypotheses ***/ split_hyps :- retract(hyp(_N,A and B)), add_new_hyp(A,1), add_new_hyp(B,1), !, split_hyps. /*** SPLIT_CONCS - split conjunction conclusions into separate conclusions ***/ split_concs :- retract(conc(_N,A and B)), add_new_conc(A,1), add_new_conc(B,1), !, split_concs. /*** DO_IMPLICATION - given "A" and "A -> B", add "B" as a hypothesis; given "not B" and "A -> B", add "not A" as a hypothesis ***/ /*** ADDED 4/3/85 -- needs improvement for generality ***/ do_implication :- hyp(_,A -> B), hyp(_,A), add_new_hyp(B,1), fail. do_implication :- hyp(_,A -> B), hyp(_,(not B)), negin((not A),C), add_new_hyp(C,1), fail. /*** EQUIVALENCE - given "A" and "A <-> B" or "B <-> A", add "B"; given "not A" and "A <-> B" or "B <-> A", add "not B" ***/ equivalence :- hyp(_,A <-> B), hyp(_,A), add_new_hyp(B,1), fail. equivalence :- hyp(_,A <-> B), hyp(_,B), add_new_hyp(A,1), fail. equivalence :- hyp(_,A <-> B), hyp(_,not(A)), negin(not(B),C), add_new_hyp(C,1), fail. equivalence :- hyp(_,A <-> B), hyp(_,not(B)), negin(not(A),C), add_new_hyp(C,1), fail. %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/deduction.pro0000644000175000017500000011227111753202340016544 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /****** INTEGER TEST ******/ int(X) :- (integer(X) ; (X=(-Y), integer(Y))), !. /****** SIMPLE INFERENCE ******/ testused(X=Y) :- ( used(X=Y) ; used(Y=X) ), !, fail. testused(X=Y) :- ( ((\+ X=Y), (\+ used(X=Y)), (\+ used(Y=X)), assertz(used(X=Y)) ) ; ( retract(used(X=Y)) ; retract(used(Y=X)) ), !, fail ). testused(X>=Y) :- used(X>=Y), !, fail. testused(X>=Y) :- ( (\+ used(X>=Y)), assertz(used(X>=Y)) ; retract(used(X>=Y)), !, fail ). fact(X) :- hyp(_,X). fact(X=Y) :- hyp(_,not X<>Y) ; hyp(_,Y=X) ; hyp(_,not Y<>X). fact(X<>Y) :- hyp(_,not X=Y) ; hyp(_,Y<>X) ; hyp(_,not Y=X). fact(X>Y) :- hyp(_,not X<=Y) ; hyp(_,Y=X). fact(X=Y) ; hyp(_,Y>X) ; hyp(_,not Y<=X). fact(X>=Y) :- hyp(_,not XX). fact(X<=Y) :- hyp(_,not X>Y) ; hyp(_,Y>=X) ; hyp(_,not Y=Y), fact(X<=Y). infrule(X<>Y) :- fact(X>Y) ; fact(XY) :- fact(X>=Y), fact(X<>Y). infrule(XY). infrule(X>=Y) :- (fact(X=Y), testused(X=Y)) ; fact(X>Y). infrule(X<=Y) :- (fact(X=Y), testused(X=Y)) ; fact(XX-B,T) :- deduce(A<>B,T). deduce(X+A<>X+B,T) :- deduce(A<>B,T). /* CFR013 */ /* Alternative representations of N+X<>Y, where N is an integer. */ /* Each is converted into N+X<>Y form and the code for X+Y=\Z */ /* on the next page used to attempt the proof. The conditions */ /* used prevent infinite looping. */ deduce(X+N<>Y,T) :- int(N), (\+ int(X)), deduce(N+X<>Y,T). deduce(X-N<>Y,T) :- int(N), evaluate(-N,M), deduce(M+X<>Y,T). deduce(Y<>X-N,T) :- int(N), evaluate(-N,M), deduce(M+X<>Y,T). deduce(N-X<>Y,T) :- int(N), evaluate(-X,Z), deduce(N+Z<>Y,T). deduce(Y<>N+X,T) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X<>Y,T). deduce(Y<>X+N,T) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X<>Y,T). deduce(Y<>N-X,T) :- int(N), Y\=_+_, Y\=_-_, evaluate(-X,Z), deduce(N+Z<>Y,T). deduce(X+Y<>N,T) :- int(N), \+ int(X), \+ int(Y), evaluate(-X,Z), deduce(N+Z<>Y,T). deduce(X-Y<>N,T) :- int(N), \+ int(X), \+ int(Y), deduce(N+Y<>X,T). /****** INEQUALITY ******/ deduce(X+Y<>Z,T) :- ( infrule(Y+X<>Z) ; infrule(Y<>Z-X) ; evaluate(-X,W), infrule(Y<>W+Z) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and =0, prove Y<>Z or fail */ ( deduce(Y<>Z,T) ; !, fail ) ; deduce(Y=Z,T) /* - and <>0, prove Y=Z */ ; int(Y), /* - and Y is an integer, sum */ ( /* them & prove sum=Z or fail */ evaluate(Y+X,W), deduce(Z<>W,T) ; !, fail ) ; int(Z), /* - and Z is an integer subtract */ ( /* & prove diff<>Y or fail */ evaluate(Z-X,W), deduce(Y<>W,T) ; !, fail ) ) ; (\+ int(Y)), /* If neither Y nor Z is an */ (\+ int(Z)), /* integer, try finding a */ ( /* substitution for Y and */ nonvar(Y), infrule(Y=W), /* proving resulting inequality. */ testused(Y=W), deduce(X+W<>Z,T) ; !, fail ) ). /****** GREATER-THAN ******/ deduce(X+Y>Z,T) :- ( infrule(Y+X>Z) ; infrule(Y>Z-X) ; evaluate(-X,W), infrule(Y>W+Z) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Y>Z or fail */ ( deduce(Y>Z,T) ; !, fail ) ; int(Y), /* - and Y is an integer, sum and */ ( /* prove sum>Z or fail */ evaluate(Y+X,W), deduce(ZW,T) ; !, fail ) ; nonvar(Y), /* - and Y & Z are the same, show */ nonvar(Z), /* X>0 or fail */ Y=Z, ( simplify(X>0,true) ; !, fail ) ; simplify(X>0,true), /* - and X>0, prove Y>=Z */ deduce(Y>=Z,T) ; T=integer, evaluate(X-1,W), /* - subtract 1 to give more */ ( /* equivalent forms to search */ infrule(W+Y>=Z) /* for which use the operator */ ; /* >= instead of >. */ infrule(Y+W>=Z) ; infrule(Y>=Z-W) ; evaluate(-W,V), infrule(Y>=V+Z) ) ) ; (\+ int(Y)), /* If neither Y nor Z are */ (\+ int(Z)), /* integers, try finding a */ ( /* substitution for Y and prove */ ( /* the resulting expression. Two */ nonvar(Y), infrule(Y>W), /* >= followed by >. */ deduce(X+W>=Z,T) ; !, fail ) ; ( nonvar(Y), infrule(Y>=W), testused(Y>=W), deduce(X+W>Z,T) ; !, fail ) ) ). /* Alternate representations of N+X>Y, where N is an integer. */ /* All are converted to a N+X>Y form and the above code used */ /* to attempt the proof. The conditions before altering */ /* expressions are included to prevent infinite loops. */ deduce(X+N>Y,T) :- int(N), (\+ int(X)), deduce(N+X>Y,T). deduce(X-N>Y,T) :- int(N), evaluate(-N,M), deduce(M+X>Y,T). deduce(YY,T). deduce(N-X>Y,T) :- int(N), evaluate(-X,Z), deduce(N+Z>Y,T). deduce(YY,T). deduce(YY,T). deduce(YY,T). deduce(X+YY,T). deduce(X-YX,T). /****** LESS-THAN ******/ deduce(X+YW,T) ; !, fail ) ; int(Z), /* - and Z is an integer, take X */ ( /* away & prove diff>Y or fail */ evaluate(Z-X,W), deduce(YX,true) ; !, fail ) ; simplify(0>X,true), /* - and X<0, prove Y<=Z */ deduce(Y<=Z,T) ; T=integer, evaluate(X+1,W), /* - add 1 to X to give more */ ( /* equivalent forms to search */ infrule(W+Y<=Z) /* for which use the operator */ ; /* <= instead of <. */ infrule(Y+W<=Z) ; infrule(Y<=Z-W) ; evaluate(-W,V), infrule(Y<=V+Z) ) ) ; (\+ int(Y)), /* If neither Y nor Z are */ (\+ int(Z)), /* integers, try finding a */ ( /* substitution for Y and prove */ ( /* the resulting expression. Two */ nonvar(Y), infrule(Y=Y), deduce(X+W>Z,T) ; !, fail ) ) ). /* Alternate representations of N+XX-N,T) :- int(N), evaluate(-N,M), deduce(M+XN+X,T) :- int(N), Y\=_+_, Y\=_-_, deduce(N+XX+N,T) :- int(N), Y\=_+_, Y\=_-_, deduce(N+XN-X,T) :- int(N), Y\=_+_, Y\=_-_, evaluate(-X,Z), deduce(N+ZN,T) :- int(N), (\+ int(X)), (\+ int(Y)), evaluate(-X,Z), deduce(N+ZN,T) :- int(N), (\+ int(X)), (\+ int(Y)), deduce(N+Y=Z,T) :- ( infrule(Y+X>=Z) ; infrule(Y>=Z-X) ; evaluate(-X,W), infrule(Y>=W+Z) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Y>=Z or fail */ ( deduce(Y>=Z,T) ; !, fail ) ; int(Y), /* - and Y is an integer, sum and */ ( /* prove sum>=Z or fail */ evaluate(Y+X,W), deduce(Z<=W,T) ; !, fail ) ; int(Z), /* - and Z is an integer, take */ ( /* away X & show diff<=Y or fail*/ evaluate(Z-X,W), deduce(Y>=W,T) ; !, fail ) ; nonvar(Y), nonvar(Z), /* - and Y & Z are the same, show */ Y=Z, /* X>=0 or fail */ ( simplify(X>=0,true) ; !, fail ) ; simplify(X>=0,true),deduce(Y>=Z,T) /* - and X>=0, prove Y>=Z */ ; T=integer, evaluate(X+1,W), /* - add 1 to X giving more */ ( /* equivalent forms using the */ infrule(W+Y>Z) /* operator > instead of >= */ ; infrule(Y+W>Z) ; infrule(Y>Z-W) ; evaluate(-W,V), infrule(Y>V+Z) ) ) ; (\+ int(Y)), (\+ int(Z)), /* If neither Y nor Z are */ ( /* integers, try finding a */ nonvar(Y), infrule(Y>=W), testused(Y>=W), deduce(X+W>=Z,T) ; !, fail ) ). /* Alternative representations of N+X>=Y where N is an integer. */ /* All are converted into N+X>=Y form and code above is used to */ /* attempt the proof. Conditions before altering expressions */ /* are included to prevent infinite looping. */ deduce(X+N>=Y,T) :- int(N), (\+ int(X)), deduce(N+X>=Y,T). deduce(X-N>=Y,T) :- int(N), evaluate(-N,M), deduce(M+X>=Y,T). deduce(Y<=X-N,T) :- int(N), evaluate(-N,M), deduce(M+X>=Y,T). deduce(N-X>=Y,T) :- int(N), evaluate(-X,Z), deduce(N+Z>=Y,T). deduce(Y<=N+X,T) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X>=Y,T). deduce(Y<=X+N,T) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X>=Y,T). deduce(Y<=N-X,T) :- int(N), Y\=_+_, Y\=_-_, evaluate(-X,Z), deduce(N+Z>=Y,T). deduce(X+Y<=N,T) :- int(N), (\+ int(X)), (\+ int(Y)), evaluate(-X,Z), deduce(N+Z>=Y,T). deduce(X-Y<=N,T) :- int(N), (\+ int(X)), (\+ int(Y)), deduce(N+Y>=X,T). /****** LESS-THAN-OR-EQUALS ******/ /* Alternative representations of N+X>=Y where N is an integer. */ /* All are converted into N+X>=Y form and code above is used to */ /* attempt the proof. Conditions before altering expressions */ /* are included to prevent infinite looping. */ deduce(X+N<=Y,T) :- int(N), (\+ int(X)), deduce(N+X<=Y,T). deduce(X-N<=Y,T) :- int(N), evaluate(-N,M), deduce(M+X<=Y,T). deduce(Y>=X-N,T) :- int(N), evaluate(-N,M), deduce(M+X<=Y,T). deduce(N-X<=Y,T) :- int(N), evaluate(-X,Z), deduce(N+Z<=Y,T). deduce(Y>=N+X,T) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X<=Y,T). deduce(Y>=X+N,T) :- int(N), Y\=_+_, Y\=_-_, deduce(N+X<=Y,T). deduce(Y>=N-X,T) :- int(N), Y\=_+_, Y\=_-_, evaluate(-X,Z), deduce(N+Z<=Y,T). deduce(X+Y>=N,T) :- int(N), (\+ int(X)), (\+ int(Y)), evaluate(-X,Z), deduce(N+Z<=Y,T). deduce(X-Y>=N,T) :- int(N), (\+ int(X)), (\+ int(Y)), deduce(N+Y<=X,T). deduce(X+Y<=Z,T) :- ( infrule(Y+X<=Z) ; infrule(Y<=Z-X) ; evaluate(-X,W), infrule(Y<=W+Z) ; int(X), /* If X is an integer - */ ( /* - and X=0, prove Y>=Z or fail */ simplify(X=0,true), ( deduce(Y<=Z,T) ; !, fail ) ; int(Y), /* - and Y is an integer, sum and */ ( /* prove sum>=Z or fail */ evaluate(Y+X,W), deduce(Z>=W,T) ; !, fail ) ; int(Z), /* - and Z is an integer, take */ ( /* away X & show diff<=Y or fail*/ evaluate(Z-X,W), deduce(Y<=W,T) ; !, fail ) ; nonvar(Y), nonvar(Z), /* - and Y & Z are the same, show */ Y=Z, /* X<=0 or fail */ ( simplify(0>=X,true) ; !, fail ) ; simplify(0>=X,true), /* - and X<=0, prove Y>=Z */ deduce(Y<=Z,T) ; T=integer, evaluate(X-1,W), /* - add 1 to X giving more */ ( /* equivalent forms using the */ infrule(W+Y instead of >= */ ; infrule(Y+W=Y), deduce(X+W<=Z,T) ; !, fail ) ). /************ (2) MULTIPLICATION ************/ /****** EQUALITY ******/ deduce(X*Y=Z,T) :- ( infrule(Y*X=Z) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z=0 or fail */ ( deduce(Z=0,T) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y=Z or fail */ ( deduce(Y=Z,T) ; !, fail ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y=Z or fail */ ( evaluate(-Y,W), deduce(W=Z,T) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* by X & prove prod=Z or fail */ evaluate(X*Y,W), deduce(W=Z,T) ; !, fail ) ; infrule(Y=W), /* - try substituting for Y */ testused(Y=W), deduce(X*W=Z,T) ) ; (\+ int(X)), (\+ int(Y)), int(Z), simplify(Z=0,true), /* If only Z is an integer & Z=0, */ ( /* try to prove either X=0 or */ nonvar(X), deduce(X=0,T) /* Y=0. */ ; nonvar(Y), deduce(Y=0,T) ) ). /* Alternative forms of N*X=Y - converted to this form to use */ /* above code, avoiding looping. */ deduce(X*N=Y,T) :- int(N), deduce(N*X=Y,T). deduce(Y=N*X,T) :- int(N), Y\=_*_, deduce(N*X=Y,T). deduce(Y=X*N,T) :- int(N), Y\=_*_, deduce(N*X=Y,T). /****** INEQUALITY ******/ deduce(X*Y<>Z,T) :- ( infrule(Y*X<>Z) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z<>0 or fail */ ( deduce(Z<>0,T) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y<>Z or fail */ ( deduce(Y<>Z,T) ; !, fail ) ; deduce(Y=Z,T), /* - and X<>1, prove Y=Z<>0 */ ( infrule(Y<>0) ; infrule(Z<>0) ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y<>Z or fail */ ( evaluate(-Y,W), deduce(W<>Z,T) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* by X & prove prod<>Z or fail */ evaluate(X*Y,W), deduce(W<>Z,T) ; !, fail ) ; infrule(Y=W), testused(Y=W), /* - try substituting for Y */ deduce(X*W<>Z,T) ) ; (\+ int(X)), (\+ int(Y)), int(Z), /* If only Z is an integer - */ ( simplify(Z=0,true), /* - and Z=0, prove neither X nor */ deduce(X<>0,T), deduce(Y<>0,T) /* Y is zero */ ; simplify(Z>0,true), /* - and Z>0, prove X*Y<0, i.e. */ nonvar(X), nonvar(Y), ( /* one is greater than zero and */ deduce(X>0,T), deduce(Y<0,T) /* the other is less than zero */ ; deduce(X<0,T), deduce(Y>0,T) ) ; simplify(0>Z,true), /* - and Z<0, prove X*Y>0, i.e. */ nonvar(X), nonvar(Y), ( /* both X and Y have the same */ deduce(X>0,T), deduce(Y>0,T) /* sign. */ ; deduce(X<0,T), deduce(Y<0,T) ) ) ). /* Alternative forms of N*X<>Y - converted to this form to use above */ deduce(X*N<>Y,T) :- int(N), deduce(N*X<>Y,T). deduce(Y<>N*X,T) :- int(N), Y\=_*_, deduce(N*X<>Y,T). deduce(Y<>X*N,T) :- int(N), Y\=_*_, deduce(N*X<>Y,T). /****** GREATER-THAN ******/ deduce(X*Y>Z,T) :- ( infrule(Y*X>Z) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z<0 or fail */ ( deduce(Z<0,T) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y>Z or fail */ ( deduce(Y>Z,T) ; !, fail ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y>Z or fail */ ( evaluate(-Y,W), deduce(W>Z,T) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* & prove prod>Z or fail */ evaluate(X*Y,W), deduce(W>Z,T) ; !, fail ) ; simplify(X>0,true), /* - try substituting for Y, */ ( /* depending on sign of X. For */ infrule(Y>=W), testused(Y>=W),/* each sign of X, there are two*/ deduce(X*W>Z,T) /* cases: >= then > and > then */ ; /* >= for X>0, or <= then > and */ infrule(Y>W), deduce(X*W>=Z,T) /* < then >= for X<0. */ ) ; simplify(0>X,true), ( infrule(Y<=W), testused(W>=Y), deduce(X*W>Z,T) ; infrule(Y=Z,T) ) ) ; (\+ int(X)), (\+ int(Y)), int(Z), simplify(0>=Z,true), /* If only Z is an integer, and Z */ nonvar(X), nonvar(Y), ( /* is less than or equal to zero, */ deduce(X>0,T), deduce(Y>0,T) /* try proving X*Y product>0, by */ ; /* showing X & Y have same sign. */ deduce(X<0,T), deduce(Y<0,T) ) ). /* Alternative forms of N*X>Y, converted to this form to use above */ deduce(X*N>Y,T) :- int(N), deduce(N*X>Y,T). deduce(YY,T). deduce(YY,T). /****** LESS-THAN ******/ deduce(X*Y0 or fail */ ( deduce(Z>0,T) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y0,true), /* - try substituting for Y, */ ( /* depending on sign of X. For */ infrule(Y<=W), testused(W>=Y), /* each sign of X, there are two*/ deduce(X*W0, or >= then < and */ infrule(Y then <= for X<0. */ ) ; simplify(0>X,true), ( infrule(Y>=W), testused(Y>=W), deduce(X*WW), deduce(X*W<=Z,T) ) ) ; (\+ int(X)), (\+ int(Y)), /* If only Z is an integer, and Z */ int(Z), simplify(Z>=0,true), /* is greater than or equal to 0, */ nonvar(X), nonvar(Y), ( /* try proving X*Y product<0, by */ deduce(X>0,T), deduce(Y<0,T) /* showing X & Y have diff signs. */ ; deduce(X<0,T), deduce(Y>0,T) ) ). /* Alternative forms of N*XN*X,T) :- int(N), Y\=_*_, deduce(N*XX*N,T) :- int(N), Y\=_*_, deduce(N*X=Z,T) :- ( infrule(Y*X>=Z) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z<=0 or fail */ ( deduce(Z<=0,T) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y>=Z or fail */ ( deduce(Y>=Z,T) ; !, fail ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y>=Z or fail*/ ( evaluate(-Y,W), deduce(W>=Z,T) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* and prove prod>=Z or fail */ evaluate(X*Y,W), deduce(W>=Z,T) ; !, fail ) ; simplify(X>0,true), /* - try finding substitution for */ infrule(Y>=W), testused(Y>=W), /* Y depending on sign of X and */ deduce(X*W>=Z,T) ; simplify(0>X,true), /* prove resulting expression. */ infrule(Y<=W), testused(W>=Y), deduce(X*W>=Z,T) ) ; (\+ int(X)), (\+ int(Y)), /* If only Z is an integer, less */ int(Z), simplify(0>=Z,true), /* than or equal to 0, prove X*Y */ nonvar(X), nonvar(Y), ( /* product greater than or equal */ deduce(X>=0,T), deduce(Y>=0,T) /* to 0. */ ; deduce(X<=0,T), deduce(Y<=0,T) ) ). /* Alternative forms of N*X>=Y - converted to this form to use */ /* above code, avoiding looping. */ deduce(X*N>=Y,T) :- int(N), deduce(N*X>=Y,T). deduce(Y<=N*X,T) :- int(N), Y\=_*_, deduce(N*X>=Y,T). deduce(Y<=X*N,T) :- int(N), Y\=_*_, deduce(N*X>=Y,T). /****** LESS-THAN-OR-EQUALS ******/ deduce(X*Y<=Z,T) :- ( infrule(Y*X<=Z) ; int(X), /* If X is an integer - */ ( simplify(X=0,true), /* - and X=0, prove Z>=0 or fail */ ( deduce(Z>=0,T) ; !, fail ) ; simplify(X=1,true), /* - and X=1, prove Y<=Z or fail */ ( deduce(Y<=Z,T) ; !, fail ) ; simplify(X=(-1),true), /* - and X=-1, prove -Y<=Z or fail*/ ( evaluate(-Y,W), deduce(W<=Z,T) ; !, fail ) ; int(Y), /* - and Y is an integer, multiply*/ ( /* and prove prod<=Z or fail */ evaluate(X*Y,W), deduce(W<=Z,T) ; !, fail ) ; simplify(X>0,true), /* - try finding substitution for */ infrule(Y<=W), /* Y depending on sign of X and */ testused(W>=Y), /* prove resulting expression. */ deduce(X*W<=Z,T) ; simplify(0>X,true), infrule(Y>=W), testused(Y>=W), deduce(X*W<=Z,T) ) ; (\+ int(X)), (\+ int(Y)), /* If only Z is an integer greater*/ int(Z), simplify(Z>=0,true), /* than or equal to 0, prove X*Y */ nonvar(X), nonvar(Y), ( /* product less than or equal to */ deduce(X>=0,T), /* 0. */ deduce(Y<=0,T) ; deduce(X<=0,T), deduce(Y>=0,T) ) ). /* Alternative forms of N*X<=Y - converted to this form to use */ /* above code, avoiding looping. */ deduce(X*N<=Y,T) :- int(N), deduce(N*X<=Y,T). deduce(Y>=N*X,T) :- int(N), Y\=_*_, deduce(N*X<=Y,T). deduce(Y>=X*N,T) :- int(N), Y\=_*_, deduce(N*X<=Y,T). /************ (3) GENERAL RULES ************/ /****** EQUALITY ******/ deduce(X=Y,T) :- ( X=Y /* Proved if same expression. */ ; int(X), /* If both ints, equal or fail. */ int(Y), ( simplify(X=Y,true) ; !, fail ) ; infrule(X=Z), /* Try transitive chain. */ testused(X=Z), deduce(Z=Y,T) ). /****** INEQUALITY ******/ deduce(X<>Y,T) :- ( int(X), /* If X is an integer - */ ( int(Y), /* - and Y is an integer, prove */ ( /* not equal or fail */ (\+ simplify(X=Y,true)) ; !, fail ) ; deduce(Y<>X,T) /* - swap sides and prove */ ) ; ( /* Equivalent forms of the goal */ infrule(Z+X=Y) /* giving subgoals */ ; infrule(X+Z=Y) ; infrule(X-Z=Y) ), ( int(Z), (\+ simplify(Z=0,true)) ; infrule(Z<>0) ) ; infrule(X<>Z), deduce(Z=Y,T) ; infrule(X=Z), testused(X=Z), deduce(Z<>Y,T) ). /****** GREATER-THAN ******/ deduce(X>Y,T) :- ( int(X), /* If X is an integer - */ ( int(Y), /* - and Y is an integer, prove */ ( simplify(X>Y,true) ; !, fail ) /* X greater than Y or fail */ ; deduce(Y=Y) /* Equivalent forms of the */ ; /* expression using different */ infrule(X+Z>=Y) /* operator and integer addition */ ), /* or subtraction. */ ( int(Z), simplify(0>Z,true) ; infrule(Z<0) ) ; infrule(X-Z>=Y), ( int(Z), simplify(Z>0,true) ; infrule(Z>0) ) ; infrule(X>=Z), testused(X>=Z), /* Two possible intermediate */ deduce(Z>Y,T) /* steps of deduction. */ ; infrule(X>Z), deduce(Z>=Y,T) ). /****** LESS-THAN ******/ deduce(XX,true) ; !, fail ) /* X is less than Y or fail */ ; deduce(Y>X,T) /* - swap sides then prove. */ ) ; ( infrule(Z+X<=Y) /* Equivalent forms of the */ ; /* expression using different */ infrule(X+Z<=Y) /* operator and integer addition */ ), /* or subtraction. */ ( int(Z), simplify(Z>0,true) ; infrule(Z>0) ) ; infrule(X-Z<=Y), ( int(Z), simplify(0>Z,true) ; infrule(Z<0) ) ; infrule(X<=Z), testused(Z>=X), /* Two possible intermediate */ deduce(Z=Y,T) :- ( int(X), /* If X is an integer - */ ( int(Y), /* - and Y is an integer, prove */ ( /* X>=Y or fail */ simplify(X>=Y,true) ; !, fail ) ; deduce(Y<=X,T) /* - swap sides then prove. */ ) ; nonvar(X), nonvar(Y), X=Y /* Proved if X & Y are the same. */ ; ( infrule(Z+X>=Y) /* Equivalent forms of the */ ; infrule(X+Z>=Y) ), ( /* expression using different */ int(Z), /* operator and integer addition */ simplify(0>=Z,true) /* or subtraction. */ ; infrule(Z<=0) ) ; infrule(X-Z>=Y), ( int(Z), simplify(Z>=0,true) ; infrule(Z>=0) ) ; infrule(X>=Z), /* Try an intermediate step. */ testused(X>=Z), deduce(Z>=Y,T) ). /****** LESS-THAN-OR-EQUALS ******/ deduce(X<=Y,T) :- ( int(X), /* If X is an integer - */ ( int(Y), /* - and Y is an integer, prove */ ( /* Y>=X or fail */ simplify(Y>=X,true) ; !, fail ) ; deduce(Y>=X,T) /* - swap sides then prove. */ ) ; nonvar(X), nonvar(Y), /* Proved if X & Y are the same. */ X=Y ; ( /* Equivalent forms of the */ infrule(Z+X<=Y) /* expression using different */ ; /* operator and integer addition */ infrule(X+Z<=Y) /* or subtraction. */ ), ( int(Z), simplify(Z>=0,true) ; infrule(Z>=0) ) ; infrule(X-Z<=Y), ( int(Z), simplify(0>=Z,true) ; infrule(Z<=0) ) ; infrule(X<=Z), /* Try an intermediate step. */ testused(Z>=X), deduce(Z<=Y,T) ). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/replace2.pro0000644000175000017500000015617511753202340016276 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%* % % % ACKNOWLEDGEMENT % % =============== % % This module of the checker was originally developed by Fiona % % Maclennan (Department of Electronics, University of Southampton, % % May 1984) as part of a third year project. % % % *%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%*/ /***************************************************************************** * MODIFICATION HISTORY * * ==================== * * * * Date CFR Brief description of change * * -------- --- ---------------------------------------- * * 13/05/93 011 Failure to trap invalid responses in "replace" * * 14/05/93 015 Allow replaying of command scripts * * 24/05/93 019 "replace all" on selected hypotheses/conclusions * * 02/06/93 023 Allow wild-card patterns with replace-all * * 04/06/93 027 Substitution avoiding bound variables * * 13/08/93 041 Allow single hyps/concs in "replace ... & ..." * * 17/08/93 047 Limit rule matches to 20 then ask if continue * * 19/05/05 1511 Port to SICSTUS - remove singleton variables * * 01/06/05 1511 Port to SICSTUS - replace non-FDL 'not' with \+ * * 02/06/05 1511 Replace assert with assertz * * 06/06/05 1511 Port to SICSTUS - replace non-stand fast_setof/3 * * 07/06/05 1511 Port to SICSTUS - print/1 replaces write/1 for variables * * 08/06/05 1511 Port to SICSTUS - replace setof/3 with findall/3 * * 10/06/05 1511 Strengthen file_exists check to and_is_readable before see * * 13/07/05 1511 Move handling of quantifiers outside loop * * 05/08/05 1511 Introduction of prompt_user * *****************************************************************************/ /******** REPLACE: top-level checker command ********/ replace :- command_arg(expression, all), !, replace_all. replace :- /* CFR019 */ command_arg(expression, ARG), /* CFR019 */ nonvar(ARG), /* CFR019 */ ( /* CFR019 */ ARG = (_X & _Y) /* CFR019 */ ; /* CFR019 */ ARG = (_HC # N - M), /* CFR019 */ integer(N), /* CFR019 */ integer(M), /* CFR019 */ N < M /* CFR019 */ ), /* CFR019 */ !, /* CFR019 */ retractall(hyp_to_replace(_)), /* CFR019 */ retractall(conc_to_replace(_)), /* CFR019 */ process_replace_arg_expression(ARG), /* CFR019 */ !, /* CFR019 */ replace_all. /* CFR019 */ replace :- ( command_arg(expression, HC#N) ; prompt_user('REPLACE -- Which hypothesis/conclusion?', 'Type h#N or c#N as appropriate ... '), rread(F), F=HC#N ), !, replace(HC,N). replace(_,_) :- retract(on_filename(FNAME)), file_exists_and_is_readable(FNAME), see(FNAME), seen, fail. replace(HORC,N) :- see_correct_input_stream, /* CFR015 */ ( HORC=h, HC=hyp, NHC=newhyp ; HORC=c, HC=conc, NHC=newconc ), retractall(trying_a_replace_all), retractall(replace_all_expr_type(_)), retractall(logfact(command, _)), asserta(logfact(command, replace(HORC#N))), retractall(rep_working_on(_,_,_)), asserta(rep_working_on(NHC,HC,N)), X=..[HC,N,E], retractall(con(_)), call(X), assertz(con(E)), !, check, retract(con(C)), ( C=E, !, fail ; C\=E, retract(X), Z=..[HC,N,C], assertz(Z) ), !. replace_all :- retract(on_filename(FNAME)), file_exists_and_is_readable(FNAME), see(FNAME), seen, fail. replace_all :- see_correct_input_stream, /* CFR015 */ assertz(trying_a_replace_all), retractall(replace_all_expr_type(_)), retractall(logfact(command, _)), asserta(logfact(command, replace(all))), retractall(rep_working_on(_,_,_)), retractall(con(_)), retractall(pattern(_)), /* CFR023 */ command_arg(old, OLD), ( /* CFR023 */ novars(OLD), /* CFR023 */ OLD_EXP = OLD /* CFR023 */ ; /* CFR023 */ var(OLD), /* CFR023 */ write('Total wildcard not allowed: aborted.'), /* CFR023 */ nl, /* CFR023 */ retractall(trying_a_replace_all), /* CFR023 */ !, /* CFR023 */ fail /* CFR023 */ ; /* CFR023 */ assertz(pattern(OLD)), /* CFR023 */ clear_facts, /* CFR023 */ get_all_subexpressions_in_vc(Tsubs), /* CFR023 */ ( /* CFR023 */ Tsubs = [], /* CFR023 */ write('No matching subexpressions found: aborted.'), /* CFR023 */ nl, /* CFR023 */ retractall(trying_a_replace_all), /* CFR023 */ !, /* CFR023 */ fail /* CFR023 */ ; /* CFR023 */ assertz(tidied_subs(Tsubs)), /* CFR023 */ ( /* CFR023 */ Tsubs = [_OneElement], /* CFR023 */ N = 1 /* CFR023 */ ; /* CFR023 */ print_subs([], Tsubs), /* CFR023 */ determine_sub(N) /* CFR023 */ ), /* CFR023 */ !, /* CFR023 */ ( /* CFR023 */ N = none, /* CFR023 */ !, /* CFR023 */ fail /* CFR023 */ ; /* CFR023 */ gets(Tsubs, N, [OLD_EXP,_]) /* CFR023 */ ) /* CFR023 */ ) /* CFR023 */ ), /* CFR023 */ checktype(OLD_EXP, TYPE), /* CFR023 */ assertz(replace_all_expr_type(TYPE)), !, assertz(con(OLD_EXP)), /* CFR023 */ retractall(command_arg(old, _)), /* CFR023 */ asserta(command_arg(old, OLD_EXP)), /* CFR023 */ !, check, retract(con(NEW)), ( NEW=OLD_EXP, /* CFR023 */ !, fail ; NEW\=OLD_EXP, /* CFR023 */ !, do_replace_all(OLD_EXP, NEW) /* CFR023 */ ), retractall(trying_a_replace_all), !. /****************************************************************************/ /************** RULES FOR THE REPLACEMENT OF SUBEXPRESSIONS ***************/ /****************************************************************************/ rule(Name,X=>Y) :- X=..[OP|Args], make_up(X1,OP,Args), retractall(type_classification(_,_)), retractall(type_classification_done), save_type_classification_list(Args), asserta(type_classification_done), !, use_rulefile(X1,FNAME), get_term(FNAME,T), ( T = (Name: (X1 may_be_replaced_by Y if Justifications)) ; T = (Name: (X1 may_be_replaced_by Y)), Justifications = [] ; ( T = (Name: (A & B are_interchangeable if Justifications)) ; T = (Name: (A & B are_interchangeable)), Justifications = [] ), ( X1=A, Y=B ; X1=B, Y=A ) ), is_chosen_rulename(Name), \+ banned_rule(FNAME,Name), ok_type_classification(X,FNAME,Name), X1=..[OP|Args1], add_conds(Args=Args1,Justifications,J), fulfil(J,J1), ( abandon_search, /* CFR047 */ !, /* CFR047 */ fail /* CFR047 */ ; /* CFR047 */ true /* CFR047 */ ), /* CFR047 */ (\+ satisfies(Name,Y)), display_rep_rule(Name,X,Y,J1), J1=[], on_filename(File), assertz(logged_rule_match(Y,[File,Name],Justifications)). display_rep_rule(R,X,Y,J) :- ( display_subgoals_max(N) ; N = 99 ), length(J, LEN), LEN =< N, ( display_var_free_only(off) ; var_free(J) ), nl, print(R), write(' allows '), print(X), write(' to be replaced by '), print(Y), ( (J=[], write(' directly')) ; (write(' provided '), write_justs(J)) ), increment_search_count, /* CFR047 */ nl, !. display_rep_rule(_,_,_,_) :- !. /****************************************************************************/ /************************** FRAMEWORK OF PROGRAM **************************/ /****************************************************************************/ /* The following clauses comprise the framework of the program.The struct- */ /* ure of each constituent clause can be found later in the listing */ /****************************************************************************/ /* CHECK -- clear up then reduce expression. */ check:- nl, write('OLD EXPRESSION: '), con(E), print(E), nl, reduce. /* REDUCE -- get the pattern, carry out the reduction/replacement, and carry on until the user wishes to give up. */ reduce:- repeat, specify_patt, reduction_procedure, retractall(totally_specified_replace), retractall(command_arg(rulename, _)), retractall(could_not_infer(_)), /* until */ reduce_again, !. /* SPECIFY_PATT -- read in the pattern to be sought in the expression */ specify_patt:- retractall(pattern(_)), ( command_arg(old, OLD) ; prompt_user('Pattern? '), rread(P), parse_expression(P, OLD) ), assertz(pattern(OLD)), !. /* REDUCTION_PROCEDURE -- clear up, carry out the five stages of the reduction procedure, then tidy up again. */ reduction_procedure:- clear_facts, assertz(status(go)), assertz(stage_num(1)), handle_quantifiers_in_con, /* CFR027 */ repeat, retract(stage_num(N)), stage(N), /* until */ ( N=5, /* CFR027 */ unhandle_quantifiers_in_con /* CFR027 */ ; (status(stop), unhandle_quantifiers_in_con, /* CFR027 */ con(E), nochange(E)) ), clear_facts, !. /* CLEAR_FACTS -- clears up all temporary clauses asserted by program */ clear_facts:- retractall(status(_)), retractall(stage_num(_)), retractall(tidied_subs(_)), retractall(required_sub(_)), retractall(occstoreplace(_)), retractall(numsubs(_)), retractall(newsub(_)), retractall(sub(_)), retractall(pos_newsub(_)), retractall(satisfies(_,_)), retractall(all_done(_)), retractall(logged_rule_match(_,_,_)), retractall(posslog(_,_)), !. /* STAGE(1) -- We form the list of all subexpressions of con(E) which match the specified pattern. This list is then tidied up to give a list in which multiple occurrences of a specific match are represented as a doubleton list [SUB,N], where SUB is the matching subexpression and N is the number of occurrences of the match in con(E). This list is then presented to the user and if there are no matching sub- expressions, the status is changed to `stop'; otherwise, the list is asserted into the database and execution proceeds to stage two. */ stage(1):- con(E), find_subs(E,Subs), tidy_replist(Subs,Tsubs), print_subs(E,Tsubs), ( (Tsubs=[], retract(status(_S)), assertz(status(stop))) ; (assertz(tidied_subs(Tsubs)), assertz(stage_num(2))) ), !. /* STAGE(2) -- ask the user which subexpression is to be replaced (or, if there is only one subexpression, ask whether or not it is to be replaced). If the user does not wish to replace any such subexpression, change the status to `stop', otherwise move on to stage three. */ stage(2):- determine_sub(N), ( (N=none, retract(status(_S)), assertz(status(stop))) ; (assertz(required_sub(N)), assertz(stage_num(3))) ), !. /* STAGE(3) -- find out which occurrences, if any, of the matching subexpression are to be replaced. If no occurrences of the subexpression are to be replaced, change the status to `stop', otherwise save the occurrence numbers to be replaced and move on to stage four. */ stage(3):- retract(tidied_subs(Tsubs)), retract(required_sub(N)), gets(Tsubs,N,Term), ascertain(Term,Sub), ( (occstoreplace([]), retract(status(_S)), assertz(status(stop))) ; (assertz(sub(Sub)), assertz(stage_num(4))) ), !. /* STAGE(4) -- ask the user to specify the new subexpression by which the old match(es) is/are to be replaced. If the answer is `none', the status is changed to `stop', otherwise we move on to stage five. */ stage(4):- sub(Sub), determine_newsub(Sub), ( (newsub(none), retract(status(_S)), assertz(status(stop))) ; assertz(stage_num(5)) ), !. /* STAGE(5) -- replace the required occurrence(s) of the matching subexp- ression by the replacement expression and display the new expression on the screen. The user is asked if the replacement is acceptable. */ stage(5):- con(E), retract(sub(Sub)), replace(E,Pos_newE,Sub), print_new_E(Pos_newE), ask_if_ok(Pos_newE,E), !. /* REDUCE_AGAIN -- when the replace_more flag is set, ask the user if reduction of the expression is to be continued, and insists on a yes/no answer. */ reduce_again:- trying_a_replace_all, !. reduce_again:- replace_more(off). reduce_again:- replace_more(on), read_answer('Replace more',A), !, A=no. /****************************************************************************/ /***************************** COMMON CLAUSES *****************************/ /****************************************************************************/ /* The following clauses are common to other clauses in the program ie each */ /* of the following is called by more than one other clause */ /****************************************************************************/ /* PARSE(EXPRN,OPERATOR,COMPONENTS) -- split up expressions in right order */ parse(X,_,Y):- var(X), var(Y), !, abort. parse(X,_,[]):- atomic(X), !. parse(E,'.',E):- E=..['.'|_L], !. parse(E,F,L):- E=..[F|L], !. /* TYPE_PATT(Pattern) -- show the user a pattern, first replacing Prolog variables by capital letters, for presentability. */ type_patt(E):- inst(E,65,_N), print(E), !. /* INST(Exp,Oldnum,Newnum) -- instantiate Prolog variables to capital letters of the alphabet, from `A' upwards */ inst(E,N,N):- atomic(E), !. inst(E,N,N1):- var(E), name(E,[N]), N1 is N+1, !. inst(E,N,N1):- parse(E,_F,L), inst_list(L,N,N1), !. /* INST_LIST(List,Oldnum,NewNum) -- instantiate an expression by instantiating its subexpressions. */ inst_list([],N,N):- !. inst_list([H|T],N,N1):- atomic(H), inst_list(T,N,N1), !. inst_list([H|T],N,N1):- var(H), name(H,[N]), N0 is N+1, inst_list(T,N0,N1), !. inst_list([H|T],N,N1):- inst(H,N,N0), inst_list(T,N0,N1), !. /* READ_TERM(Term,MaxN,Numvalatoms) -- read in terms until a valid one is encountered, i.e. a number less than the maximum, or an atom such as `all' or `none'. */ read_term(Prompt,T,MaxN,Numvalatoms):- repeat, prompt_user(Prompt), rread(T), /* until */ check(T,MaxN,Numvalatoms), !. /* CHECK(Term,MaxN,Numvalatoms) -- write an error message if the term is not valid. */ check(T,MaxN,Numvalatoms):- ( invalidatom(T,Numvalatoms) ; invalidnum(T,MaxN) ; var(T) ), !, write('Invalid term. Try again'), nl, fail. check(T, _MaxN, _Numvalatoms) :- /* CFR011 */ \+ atom(T), /* CFR011 */ \+ integer(T), /* CFR011 */ !, /* CFR011 */ write('Invalid term. Try again'), /* CFR011 */ nl, /* CFR011 */ fail. /* CFR011 */ check(_T,_MaxN,_):- !. /* INVALIDATOM(Term,Num) -- allow `none' if one atom is allowed (Num=1), and `all' or `none' if two are allowed (Num=2). */ invalidatom(T,1):- atom(T), T\=none, !. /* CFR011 */ invalidatom(T,2):- atom(T), T\=none, T\=all, !. /* CFR011 */ /* INVALIDNUM(Term,Max) -- allow Term if it is at most Max and at least 0. */ invalidnum(T,MaxN):- integer(T), (T>MaxN ; T=<0), !. /* CFR011 */ /* GETS(List,N,Term) -- set Term to the Nth. element of list List. */ gets([Term|_T],1,Term):- !. gets([_H|T],N,Term):- N1 is N-1, gets(T,N1,Term), !. /* NOCHANGE(Exprn) -- displays message if no change is made to the main expression. */ nochange(_E) :- trying_a_replace_all, !. nochange(E):- write('EXPRESSION REMAINS: '), print(E), nl, !. /****************************************************************************/ /**************************** FIND_SUBS CLAUSE ****************************/ /****************************************************************************/ /* The following clauses all constitute the 'find_subs' clause */ /****************************************************************************/ /* FIND_SUBS(Exp,List) -- find matching subexpressions and put them in List */ find_subs(E,Subs):- parse(E,_,L), obtain(L,Ss), determine(Subs,E,Ss), !. /* OBTAIN(Exp_list,Sub_list) -- get all the matching subexpressions from the list of subexpressions Exp_list and put them in Sub_list. */ obtain([],[]):- !. obtain([H|T],Ss):- obtain(T,St), find_subs(H,Sh), append(Sh,St,Ss), !. /* DETERMINE(Sub_list,Exprn,New_list) -- updates the list of subexpressions according to whether or not Exprn matches the pattern. */ determine([P|S],P,S):- pattern(P), !. determine(Subs,_E,Subs):- !. /****************************************************************************/ /****************************** TIDY CLAUSE *******************************/ /****************************************************************************/ /* The following clauses all constitute the 'tidy' clause */ /****************************************************************************/ /* TIDY_REPLIST(Oldlist,Newlist) -- clear up multiple occurrences of the same subexpression in the list of matching subexpressions, replacing them by doubleton lists. */ tidy_replist([],[]):- !. tidy_replist([H|T],[[H,N]|Ts]):- find_del(H,T,T1,N), tidy_replist(T1,Ts), !. /* FIND_DEL(Exp,Oldlist,Newlist,N) -- delete N-1 occurrences of expression Exp from Oldlist to give Newlist. */ find_del(_H,[],[],1):- !. find_del(H,[H|T],T1,N):- find_del(H,T,T1,N1), N is N1+1, !. find_del(H,[A|T],[A|T1],N):- find_del(H,T,T1,N), !. /****************************************************************************/ /*************************** PRINT_SUBS CLAUSE ****************************/ /****************************************************************************/ /* The following clauses all constitute the 'print_subs' clause */ /****************************************************************************/ /* PRINT_SUBS(Exp,Sublist) -- display the subexpressions on the screen. */ print_subs(_Exp,[]):- write('There are NO subexpressions of the form '), pattern(Pattern), type_patt(Pattern), retractall(command_arg(_,_)), nl, nl, !. print_subs(_Exp,[Term]):- ( Term=[Sub,_N] ; Term=Sub ), assertz(numsubs(1)), write('Subexpression is '), print(Sub), nl, !. print_subs(_Exp,Tsubs):- write('Possible subexpressions:-'), nl, !, list_subs(Tsubs,1), !. /* LIST_SUBS(Sublist,Num) -- display subexpressions on screen, numbering them with the counter Num. */ list_subs([],N):- Numsubs is N-1, assertz(numsubs(Numsubs)), !. list_subs([H|Rsubs],N):- tab(5), print(N), write('. '), type_sub(H), M is N+1, list_subs(Rsubs,M), !. /* TYPE_SUB(Subexpr) -- type Subexpr (either [S,N] or S) out on the screen. */ type_sub([Sub,_N]):- print(Sub), nl, !. type_sub(Sub):- print(Sub), nl, !. /****************************************************************************/ /************************** DETERMINE_SUB CLAUSE **************************/ /****************************************************************************/ /* DETERMINE_SUB(Term) -- find out which subexpression is to be altered. */ determine_sub(T):- numsubs(1), ( retract(command_arg(old, _)), ( command_arg(new, _), command_arg(rule, exists), asserta(totally_specified_replace) ; true ), T=1 ; read_answer('Change this subexpression',Answer), !, ( (Answer=yes, T=1) ; (Answer=no, T=none) ) ), !. determine_sub(T):- retractall(command_arg(old, _)), retract(numsubs(Nsubs)), read_term('Change which subexpression (number/none)? ',T,Nsubs,1), !. /****************************************************************************/ /**************************** ASCERTAIN CLAUSE ****************************/ /****************************************************************************/ /* The following two clauses constitute the 'ascertain' clause */ /****************************************************************************/ /* ASCERTAIN(Term,Sub) -- find out which occurrence of the particular subexpression chosen is to be replaced. */ ascertain(Term,Sub):- Term=[Sub,1], assertz(occstoreplace([1])), !. ascertain(Term,Sub):- Term=[Sub,Num], specify(N,Sub,Num), occs_list(L,N,Num), assertz(occstoreplace(L)), !. /* SPECIFY(OccN,Sub,NumofOccs) -- get the occurrence number, OccN, of Sub to be replaced. OccN must be between 1 and NumofOccs inclusive. */ specify(N,_Sub,Num):- read_term('Change which occurrence (number/none/all)? ',N,Num,2), !. /* OCCS_LIST(OccsList,Occ,NumofOccs) -- set OccsList to [] if Occ=`none', to [N] if Occ=N, or to the list of integers 1..NumofOccs if Occ=`all'. */ occs_list([],none,_):- !. occs_list(L,all,Num):- make_occs_list(1,L,Num), !. occs_list([N],N,_):- !. /* MAKE_OCCS_LIST(A,List,B) -- Make List into the list of integers A..B. */ make_occs_list(N,[],Num):- N is Num+1, !. make_occs_list(N,[N|T],Num):- N1 is N+1, make_occs_list(N1,T,Num), !. /****************************************************************************/ /************************ DETERMINE_NEWSUB CLAUSE *************************/ /****************************************************************************/ /* The following clauses all constitute the 'determine_newsub' clause */ /****************************************************************************/ /* DETERMINE_NEWSUB(Sub) -- get the new subexpression pattern by which the old match is to be replaced and the rule which justifies this. Finally, determine a unique value for Sub, referring to the user if this proves to be necessary. */ determine_newsub(Sub):- ( retract(command_arg(new, Nsub)) ; prompt_user('Type new subexpression pattern: '), rread(NS), parse_expression(NS, Nsub) ), assertz(pos_newsub(Nsub)), !, ( retract(command_arg(rule, exists)), ! ; repeat, retractall(command_arg(rulename,_)), prompt_user('By which rule? '), rread(Rname), parse_rulename(Rname) ), !, try_rule(Sub=>Nsub,N1), solution(Sub,Nsub,N1), !. /* TRY_RULE(Sub=>Nsub,N1) -- find rules which may be applied to replace Sub by Nsub and return N1, the number of different such rule-applications */ try_rule(Sub=>Nsub,N1):- pos_newsub(Nsub), retractall(num_matches(_)), asserta(num_matches(0)), retractall(abandon_search), /* CFR047 */ retractall(search_count(_)), /* CFR047 */ search_rules(_Rulename,Sub=>Nsub), num_matches(N1), !. /* SEARCH_RULES(NAME,SUB=>NSUB) -- linear search of rulebase */ search_rules(Rulename,Sub=>Nsub) :- rule(Rulename,Sub=>Nsub), assertz(satisfies(Rulename,Nsub)), update_match_count, fail. search_rules(_,_). /* UPDATE_MATCH_COUNT -- increment number of matches found */ update_match_count :- retract(num_matches(N)), M is N+1, asserta(num_matches(M)), !. /* SOLUTION(Sub,Nsub,Num) -- display the solution(s) (Num in number) on the screen for inspection/choice */ solution(_Sub,_Nsub,0):- !, write('There is no such rule which allows this replacement'), nl, assertz(newsub(none)), !. solution(Sub,_Nsub,_N):- make_list(0,Numsubs,Nsublist), ( Numsubs=1, ( totally_specified_replace ; write('The only possible replacement for '), print(Sub), write(' is:') ) ; Numsubs>1, write('Possible replacements for '), print(Sub), write(' are:') ), nl, display_list(Numsubs,Nsublist), !. /* MAKE_LIST(N,N1,Nsublist) -- create Nsublist, a list of sublists, each sublist of the form [Nsub|R], where Nsub is a possible new subexpression and R is a list of rulenames which can generate this new subexpression from the old subexpression. */ make_list(N,N1,Nsublist):- retract(satisfies(Rname,Nsub)), rulelist(R,Nsub), append([Nsub,Rname],R,L), N0 is N+1, make_list(N0,N1,T), Nsublist=[L|T], !. make_list(N,N,[]):- !. /* RULELIST(Rulelist,Nsub) -- generate a list of rules which generate Nsub as the new subexpression. */ rulelist([Rname|Tr],Nsub):- retract(satisfies(Rname,Nsub)), rulelist(Tr,Nsub), !. rulelist([],_):- !. /* DISPLAY_LIST(N,Nsublist) -- display the possible replacement subexp- ressions together with the rules which justifies each and, if necessary, ask which replacement to use. */ display_list(1,[[Nsub|[Rname|R]]]):- ( totally_specified_replace, A=yes ; tab(5), print(Nsub), nl, tab(11), write('according to rule '), print(Rname), nl, list_rules(R), read_answer('Proceed',A) ), !, find_if_ok(A,Nsub,[Rname|R]), !. display_list(N,Nsublist):- print_Nsubs(1,Nsublist), read_term('Select (number/none): ',T,N,1), find_newsub(T,Nsublist,Newsub), assertz(newsub(Newsub)), !. /* PRINT_NSUBS(N,Nsublist) -- display the possible replacements. */ print_Nsubs(_,[]):- !. print_Nsubs(N,[[Nsub|[Rname|R]]|T]):- tab(5), print(N), write('. '), print(Nsub), nl, tab(11), write('according to rule '), print(Rname), nl, list_rules(R), N1 is N+1, print_Nsubs(N1,T), !. /* LIST_RULES(Rulelist) -- list the rulenames on the screen. */ list_rules([]):- !. list_rules([H|T]):- tab(11), write('and rule '), print(H), nl, list_rules(T), !. /* FIND_IF_OK(Yesno,Nsub,Rules) -- update the action history by asserting the new subexpression and Rule(s) used into the database. */ find_if_ok(yes,Nsub,[Rule|R]):- sub(Sub), assertz(newsub(Nsub)), assertz(rule_applied([Rule|R])), retract(logged_rule_match(Nsub,[File,Rule],Goals)), retractall(logged_rule_match(_,_,_)), asserta(posslog(rulematch, ([File,Rule]: (Sub may_be_replaced_by Nsub if Goals)))), !. find_if_ok(no,_,_):- assertz(newsub(none)), !. /* FIND_NEWSUB(Occ,Nsublist,Nsub) -- find the Occ'th element of Nsublist and set Nsub to this, unless Occ is `none', in which case Nsub is `none'. */ find_newsub(none,_Nsublist,none):- !. find_newsub(N,Nsublist,Newsub):- sub(Sub), gets(Nsublist,N,[Newsub|[Rule|R]]), assertz(rule_applied([Rule|R])), retract(logged_rule_match(Newsub,[File,Rule],Goals)), retractall(logged_rule_match(_,_,_)), asserta(posslog(rulematch, ([File,Rule]:(Sub may_be_replaced_by Newsub if Goals)))), !. /****************************************************************************/ /***************************** REPLACE CLAUSE *****************************/ /****************************************************************************/ /* The following clauses all constitute the 'replace' clause */ /****************************************************************************/ /* REPLACE(Exp,NewE,Sub) -- replace all required occurrences of Sub within expression Exp to get NewE. */ replace(E,NewE,Sub):- assertz(all_done(no)), change(E,NewE,Sub,1,_N), !. /* CHANGE(Exp,NewE,Sub,N0,N1) -- change Exp to NewE by changing Sub if it is one of the occurrences to be replaced according to the occstoreplace list. N0 and N1 are counters. */ change(E,Newsub,E,N,N):- occstoreplace([N]), !, retract(all_done(no)), assertz(all_done(yes)), retract(newsub(Newsub)), !. change(E,Newsub,E,N,N1):- occstoreplace([N|T]), !, retract(occstoreplace([N|T])), assertz(occstoreplace(T)), newsub(Newsub), N1 is N+1, !. change(E,E,E,N,N1):- N1 is N+1, !. change(E,E,_Sub,N,N):- atomic(E), !. change(E,NewE,Sub,N,N1):- parse(E,F,L), changelist(L,NewL,Sub,N,N1), parse(NewE,F,NewL), !. /* CHANGELIST(OldL,NewL,Sub,N0,N1) -- change OldL to NewL by changing Sub to the new subexpression where necessary in OldL. */ changelist([],[],_,N,N):- !. changelist([H|T],NewL,Sub,N,N1):- change(H,NewH,Sub,N,N0), ( (all_done(yes), NewL=[NewH|T]) ; (changelist(T,T1,Sub,N0,N1), NewL=[NewH|T1]) ), !. /****************************************************************************/ /*************************** PRINT_NEW_E CLAUSE ***************************/ /****************************************************************************/ /* PRINT_NEW_E(NewE) -- displays new expression on the screen. */ print_new_E(NewE):- write('NEW EXPRESSION: '), print(NewE), nl, !. /****************************************************************************/ /**************************** ASK_IF_OK CLAUSE ****************************/ /****************************************************************************/ /* The following two clauses constitute the 'ask_if_ok' clause */ /****************************************************************************/ /* ASK_IF_OK(Pos_newE,E) -- ask if the new expression is acceptable and, according to the reply, update the expression in the database accordingly. */ ask_if_ok(Pos_newE,E):- read_answer('Is this OK',Answer), !, is_it_ok(Answer,Pos_newE,E), !. /* IS_IT_OK(Yesno,NewE,OldE) -- update the expression in the database. */ is_it_ok(no,_,E):- nochange(E), !. is_it_ok(yes,NewE,_):- ( typechecking(on), ( trying_a_replace_all, replace_all_expr_type(TYPE), checktype(NewE, TYPE) ; \+ trying_a_replace_all, checktype(NewE, boolean) ; write('!!! ERROR: New expression does not typecheck properly.'), nl, fail ) ; typechecking(off) ), retract(con(_E)), assertz(con(NewE)), unhandle_quantifiers_in_con, /* CFR027 */ retract(posslog(X, Y)), assertz(logfact(X, Y)), ( trying_a_replace_all ; rep_working_on(NHC, HC, N), con(UnhandledNewE), /* CFR027 */ Z=..[HC, N, UnhandledNewE], /* CFR027 */ assertz(logfact(NHC, Z)) ), !. is_it_ok(_,_,_) :- !. /* process_replace_arg_expression - store which hyps and conc to change */ process_replace_arg_expression(ARG) :- /* CFR019 */ var(ARG), !, write('!!! Error in argument syntax: illegal Prolog variable found.'), nl, fail. process_replace_arg_expression(X & Y) :- process_replace_arg_expression(X), !, process_replace_arg_expression(Y). process_replace_arg_expression(h # N - N) :- assertz(hyp_to_replace(N)), !. process_replace_arg_expression(c # N - N) :- assertz(conc_to_replace(N)), !. process_replace_arg_expression(h # N - M) :- integer(N), integer(M), N < M, assertz(hyp_to_replace(N)), N1 is N + 1, !, process_replace_arg_expression(h # N1 - M), !. process_replace_arg_expression(c # N - M) :- integer(N), integer(M), N < M, assertz(conc_to_replace(N)), N1 is N + 1, !, process_replace_arg_expression(c # N1 - M), !. process_replace_arg_expression(h # N) :- /* CFR041 */ integer(N), /* CFR041 */ assertz(hyp_to_replace(N)), /* CFR041 */ !. /* CFR041 */ process_replace_arg_expression(c # N) :- /* CFR041 */ integer(N), /* CFR041 */ assertz(conc_to_replace(N)), /* CFR041 */ !. /* CFR041 */ /* get_all_subexpressions_in_vc(Tsubs) - from all current hyps & concs */ get_all_subexpressions_in_vc(Tsubs) :- /* CFR023 */ ( findall(Subs1, (hyp(N, H), find_subs(H, Subs1)), A), A \== [], sort(A, SubsList1) ; SubsList1 = [] ), !, ( findall(Subs2, (conc(N, C), find_subs(C, Subs2)), B), B \== [], sort(B, SubsList2) ; SubsList2 = [] ), !, append(SubsList2, SubsList1, SubsList), !, general_list_append(SubsList, RawSubs), !, tidy_replist(RawSubs, Tsubs), !. /* general_list_append(ListOfLists, SingleList) - append list of lists */ general_list_append([List], List) :- !. /* CFR023 */ general_list_append([Head|Tail], List) :- general_list_append(Tail, TailList), !, append(Head, TailList, List), !. general_list_append([], []) :- !. /* handle_quantifiers_in_con - rename to prevent illegal replacement */ handle_quantifiers_in_con :- /* CFR027 */ con(X), /* CFR027 */ handle_quantifiers_in(X, Y), /* CFR027 */ !, /* CFR027 */ ( /* CFR027 */ X = Y /* CFR027 */ ; /* CFR027 */ retract(con(X)), /* CFR027 */ assertz(con(Y)) /* CFR027 */ ), /* CFR027 */ !. /* CFR027 */ /* unhandle_quantifiers_in_con - rename quantifiers back to original names */ unhandle_quantifiers_in_con :- /* CFR027 */ \+ qbindingname(_, _), /* CFR027 */ !. /* CFR027 */ unhandle_quantifiers_in_con :- /* CFR027 */ con(X), /* CFR027 */ unhandle_quantifiers_in(X, Y), /* CFR027 */ !, /* CFR027 */ ( /* CFR027 */ X = Y /* CFR027 */ ; /* CFR027 */ retract(con(X)), /* CFR027 */ assertz(con(Y)) /* CFR027 */ ), /* CFR027 */ !. /* CFR027 */ /* handle_quantifiers_in(X,Y) - rename bound variables to prevent capture */ handle_quantifiers_in(X, Y) :- /* CFR027 */ retractall(qbindingname(_, _)), /* CFR027 */ !, /* CFR027 */ do_handle_quantifiers_in(X, Y, 1, _), /* CFR027 */ !. /* CFR027 */ /* do_handle_quantifiers_in(X, Y, N, N') - handle, counting from N upwards */ do_handle_quantifiers_in(for_all(X:T, P), for_all(DollarX:T, Q), N, M) :- atom(X), /* CFR027 */ name(N, NL), /* CFR027 */ name(DollarX, [36|NL]), /* chr(36) = '$' */ /* CFR027 */ !, /* CFR027 */ assertz(qbindingname(X, DollarX)), /* CFR027 */ safe_subst_vbl(X, DollarX, P, PP), /* CFR027 */ !, /* CFR027 */ Nplus1 is N + 1, /* CFR027 */ do_handle_quantifiers_in(PP, Q, Nplus1, M), /* CFR027 */ !. /* CFR027 */ do_handle_quantifiers_in(for_some(X:T, P), for_some(DollarX:T, Q), N, M) :- atom(X), /* CFR027 */ name(N, NL), /* CFR027 */ name(DollarX, [36|NL]), /* chr(36) = '$' */ /* CFR027 */ !, /* CFR027 */ assertz(qbindingname(X, DollarX)), /* CFR027 */ safe_subst_vbl(X, DollarX, P, PP), /* CFR027 */ !, /* CFR027 */ Nplus1 is N + 1, /* CFR027 */ do_handle_quantifiers_in(PP, Q, Nplus1, M), /* CFR027 */ !. /* CFR027 */ do_handle_quantifiers_in(X, X, N, N) :- /* CFR027 */ atomic(X), /* CFR027 */ !. /* CFR027 */ do_handle_quantifiers_in([H|T], RESULT, N, M) :- /* CFR027 */ do_handle_quantifiers_in_list([H|T], RESULT, N, M), /* CFR027 */ !. /* CFR027 */ do_handle_quantifiers_in(OLD, NEW, N, M) :- /* CFR027 */ \+ atomic(OLD), /* CFR027 */ nonvar(OLD), /* CFR027 */ OLD =.. [F|OldArgs], /* CFR027 */ do_handle_quantifiers_in_list(OldArgs, NewArgs, N, M), /* CFR027 */ !, /* CFR027 */ NEW =.. [F|NewArgs]. /* CFR027 */ do_handle_quantifiers_in_list([H|T], [NewH|NewT], N, M) :- /* CFR027 */ do_handle_quantifiers_in(H, NewH, N, K), /* CFR027 */ !, /* CFR027 */ do_handle_quantifiers_in_list(T, NewT, K, M). /* CFR027 */ do_handle_quantifiers_in_list([], [], N, N) :- !. /* CFR027 */ /* SAFE_SUBST_VBL(V,X,OLD,NEW) - substitute all V in OLD by X to get NEW */ safe_subst_vbl(V,X,V,X) :- !. /* CFR027 */ safe_subst_vbl(_V,_X,Y,Y) :- /* CFR027 */ atomic(Y), /* CFR027 */ !. /* CFR027 */ safe_subst_vbl(V,_X,for_all(V:T,P),for_all(V:T,P)) :- !. /* CFR027 */ safe_subst_vbl(V,_X,for_some(V:T,P),for_some(V:T,P)) :- !. /* CFR027 */ safe_subst_vbl(V,X,F,F1) :- /* CFR027 */ F=..[OP|Args], /* CFR027 */ safe_subst_vbl_list(V,X,Args,Args1), /* CFR027 */ F1=..[OP|Args1], /* CFR027 */ !. /* CFR027 */ /* SAFE_SUBST_VBL_LIST(V,X,OL,NL) - substitute all V in OL by X to get NL */ safe_subst_vbl_list(V,X,[A],[A1]) :- /* CFR027 */ safe_subst_vbl(V,X,A,A1), /* CFR027 */ !. /* CFR027 */ safe_subst_vbl_list(V,X,[A|Args],[A1|Args1]) :- /* CFR027 */ safe_subst_vbl(V,X,A,A1), /* CFR027 */ !, /* CFR027 */ safe_subst_vbl_list(V,X,Args,Args1), /* CFR027 */ !. /* CFR027 */ /* unhandle_quantifiers_in(X,Y) - replace $Var's by original atoms */ unhandle_quantifiers_in(X, X) :- /* CFR027 */ \+ qbindingname(_, _), /* CFR027 */ !. /* CFR027 */ unhandle_quantifiers_in(X, Y) :- /* CFR027 */ fetch_vars_in(X, VL), /* CFR027 */ adjust_qbindingnames(VL), /* CFR027 */ !, /* CFR027 */ do_unhandle_quantifiers_in(X, Y), /* CFR027 */ !. /* CFR027 */ /* do_unhandle_quantifiers_in(X,Y) - use updated qbindingname to revert */ do_unhandle_quantifiers_in(DollarX, X) :- /* CFR027 */ atom(DollarX), /* CFR027 */ qbindingname(X, DollarX), /* CFR027 */ !. /* CFR027 */ do_unhandle_quantifiers_in(X, X) :- /* CFR027 */ atomic(X), /* CFR027 */ !. /* CFR027 */ do_unhandle_quantifiers_in(OLD, NEW) :- /* CFR027 */ \+ atomic(OLD), /* CFR027 */ nonvar(OLD), /* CFR027 */ OLD =.. [F|OldArgs], /* CFR027 */ do_unhandle_quantifiers_in_list(OldArgs, NewArgs), /* CFR027 */ !, /* CFR027 */ NEW =.. [F|NewArgs]. /* CFR027 */ /* do_unhandle_quantifiers_in_list(X,Y) - do for each element of list */ do_unhandle_quantifiers_in_list([H|T], [NewH|NewT]) :- /* CFR027 */ do_unhandle_quantifiers_in(H, NewH), /* CFR027 */ !, /* CFR027 */ do_unhandle_quantifiers_in_list(T, NewT). /* CFR027 */ do_unhandle_quantifiers_in_list([], []) :- !. /* CFR027 */ /* fetch_vars_in(EXPR, LIST_OF_VARS) - return list of identifiers in EXP */ fetch_vars_in(X, L) :- /* CFR027 */ do_fetch_vars_in(X, T), /* CFR027 */ !, /* CFR027 */ sort(T, L). /* CFR027 */ /* do_fetch_vars_in(EXPR, LIST_OF_VARS) - list with duplicates etc. */ do_fetch_vars_in(X, [X]) :- /* CFR027 */ atom(X), /* CFR027 */ !. /* CFR027 */ do_fetch_vars_in(X, []) :- /* CFR027 */ atomic(X), /* CFR027 */ !. /* CFR027 */ do_fetch_vars_in(X, L) :- /* CFR027 */ nonvar(X), /* CFR027 */ X =.. [_|Xs], /* CFR027 */ !, /* CFR027 */ do_fetch_vars_in_list(Xs, L), /* CFR027 */ !. /* CFR027 */ /* do_fetch_vars_in_list(EXP_LIST, LIST_OF_VARS) - return list of identifiers */ do_fetch_vars_in_list([H|T], L) :- /* CFR027 */ do_fetch_vars_in(H, Hs), /* CFR027 */ !, /* CFR027 */ do_fetch_vars_in_list(T, Ts), /* CFR027 */ append(Hs, Ts, L), /* CFR027 */ !. /* CFR027 */ do_fetch_vars_in_list([], []) :- !. /* CFR027 */ /* adjust_qbindingnames(VL) - rename any which appear in list VL */ adjust_qbindingnames([H|T]) :- /* CFR027 */ \+ qbindingname(H, _), /* CFR027 */ !, /* CFR027 */ adjust_qbindingnames(T). /* CFR027 */ adjust_qbindingnames([H|T]) :- /* CFR027 */ rename_qbinding_name(H), /* CFR027 */ !, /* CFR027 */ adjust_qbindingnames(T). /* CFR027 */ adjust_qbindingnames([]) :- !. /* CFR027 */ /* rename_qbinding_name(ATOM) - generate a "fresh" atom based on old */ rename_qbinding_name(X) :- /* CFR027 */ generate_new_qbinding_name(X, NEWX), /* CFR027 */ retract(qbindingname(X, N)), /* CFR027 */ assertz(qbindingname(NEWX, N)), /* CFR027 */ fail. /* CFR027 */ rename_qbinding_name(_) :- !. /* CFR027 */ /* generate_new_qbinding_name(OLD, NEW) - try sticking a number on the end */ generate_new_qbinding_name(OLD, NEW) :- /* CFR027 */ name(OLD, LIST), /* CFR027 */ do_generate_new_qbinding_name(LIST, 0, NEW), /* CFR027 */ !. /* CFR027 */ /* do_generate_new_qbinding_name(LIST, NUM, NEW_ATOM) - generate it */ do_generate_new_qbinding_name(LIST, NUM, NEW_ATOM) :- /* CFR027 */ name(NUM, NUMLIST), /* CFR027 */ append(LIST, NUMLIST, NEW_LIST), /* CFR027 */ name(CANDIDATE_ATOM, NEW_LIST), /* CFR027 */ !, /* CFR027 */ ( /* CFR027 */ used_ident(CANDIDATE_ATOM, _), /* CFR027 */ NEW_NUM is NUM + 1, /* CFR027 */ !, /* CFR027 */ do_generate_new_qbinding_name(LIST,NEW_NUM,NEW_ATOM) /* CFR027 */ ; /* CFR027 */ NEW_ATOM = CANDIDATE_ATOM /* CFR027 */ ), /* CFR027 */ !. /* CFR027 */ %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/newvc.pro0000644000175000017500000001135311753202340015707 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= /*** NEWVC - load a new VC ready for a proof attempt ***/ newvc :- \+ is_vc(_X), !, nl, write('There are no VCs to prove.'), nl, fail. newvc :- check_whether_to_proceed, clear_up_could_facts, fdl_file_title(TITLE), ( command_arg(vc_number, VCNUM) ; nl, write('The following VCs have not yet been proved:'), nl, list_vcs_still_to_prove, repeat, nl, prompt_user('Which VC? '), rread(VCNUM) ), integer(VCNUM), makename(TITLE, VCNUM, VCNAME), ( is_vc(VCNAME) ; write('VC does not exist; try again'), fail ), !, do_vc(VCNAME), assertz(logfact(vcname, VCNAME)), retractall(current_vc(_,_)), asserta(current_vc(VCNAME,VCNUM)), retractall(step_number(_)), asserta(step_number(0)), retractall(indentation(_)), asserta(indentation(0)), retractall(current_root(_,_)), /* CFR028 */ retractall(var_const(_,_,p)), /* CFR028 */ list, !. /* DO_VC(LABEL) - clear up and try to prove VC stored as vc(LABEL,FACT) ***/ do_vc(N) :- clear_vc, retractall(case_pointer(_)), assertz(case_pointer(0)), retractall(vc_name(_)), retractall(on_case(_,_,_)), retractall(case(_,_,_)), retractall(proved_for_case(_,_)), add_vc(N), !. /*** CLEAR_VC - retract all current hypotheses and conclusions ***/ clear_vc :- retractall(hyp(_,_)), retractall(conc(_,_)), retractall(forgotten(_)), retractall(deleted(_)), retractall(deleted_hyp(_,_)), retractall(subgoal_formula(_,_,_,_)), retractall(qvar(_)), retractall(uvar(_)), nl. add_vc(N) :- vc(N,Fact), asserta(Fact), fail. add_vc(_). list_vcs_still_to_prove :- vcs_to_prove(VCLIST), tab(5), write_numbers_left(VCLIST), !. write_numbers_left([[N]]) :- print(N), nl, !. write_numbers_left([[L|REST]]) :- last(REST,U), print(L-U), nl, !. write_numbers_left([[N]|OTHERS]) :- print(N), write(', '), write_numbers_left(OTHERS), !. write_numbers_left([[L|REST]|OTHERS]) :- last(REST,U), print(L-U), write(', '), write_numbers_left(OTHERS), !. check_whether_to_proceed :- \+ conc(_,_), case_pointer(0), current_vc(_, N), vcs_to_prove(VCS), \+ still_left_to_prove(N, VCS), !. check_whether_to_proceed :- \+ conc(_,_), case_pointer(0), \+ current_vc(_, _), /* for startup case */ !. check_whether_to_proceed :- nl, write('WARNING: The proof of the current VC is incomplete, either because there'), nl, write('are still some unproven conclusions, or because you need another "done".'), nl, write('(You may wish to complete proof (with DONE?) before proceeding.)'), /* CFR002 */ nl, nl, repeat, write('Type "yes" to continue NEWVC command, "no" to continue this proof ...'), nl, /* CFR002 */ read_answer('Perform NEWVC', REPLY), /* CFR002 */ /* until */ (REPLY = yes ; REPLY = no), !, REPLY = yes. /*** still_left_to_prove(N, VCS) ***/ still_left_to_prove(N, [N]). still_left_to_prove(N, [H|_]) :- is_in(N, H). still_left_to_prove(N, [_|T]) :- still_left_to_prove(N, T). %############################################################################### %END-OF-FILE spark-2012.0.deb/checker/getdcldat.pro0000644000175000017500000002530211753202340016517 0ustar eugeneugen% ----------------------------------------------------------------------------- % (C) Altran Praxis Limited % ----------------------------------------------------------------------------- % % The SPARK toolset is free software; you can redistribute it and/or modify it % under terms of the GNU General Public License as published by the Free % Software Foundation; either version 3, or (at your option) any later % version. The SPARK toolset is distributed in the hope that it will be % useful, but WITHOUT ANY WARRANTY; without even the implied warranty of % MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General % Public License for more details. You should have received a copy of the GNU % General Public License distributed with the SPARK toolset; see file % COPYING3. If not, go to http://www.gnu.org/licenses for a complete copy of % the license. % % ============================================================================= process_command_line_data :- set_toplevel, !, get_command_line(DATA), !, process_dcl_args(DATA), !. set_toplevel :- command_logging(ORIGLOG), asserta(toplevel_execute(user, ORIGLOG)). process_dcl_args([]) :- !. process_dcl_args(LIST) :- split_qualifiers(LIST, QUALIFIERS), !, process_dcl_qualifiers(QUALIFIERS), !. split_qualifiers([FIRST|REST], RESULT) :- split_atom(FIRST, NEW_FIRST), !, split_qualifiers(REST, NEW_REST), !, ( NEW_FIRST = [SINGLE_ATOM], RESULT = [SINGLE_ATOM|NEW_REST] ; append(NEW_FIRST, NEW_REST, RESULT) ), !. split_qualifiers([], []) :- !. split_atom(ATOM, LIST) :- name(ATOM, ATOM_CHARS), !, ( is_in(59, ATOM_CHARS), /* i.e. ";" */ eliminate_semi_colons(ATOM_CHARS, NEW_ATOM_CHARS) ; NEW_ATOM_CHARS = ATOM_CHARS ), !, split_atom_list(NEW_ATOM_CHARS, LIST), !. eliminate_semi_colons([59|REST], [46|NEW_REST]) :- eliminate_semi_colons(REST, NEW_REST), !. eliminate_semi_colons([X|REST], [X|NEW_REST]) :- eliminate_semi_colons(REST, NEW_REST), !. eliminate_semi_colons([], []) :- !. split_atom_list(CHARS, [ATOM|LIST]) :- qualifier_prefix(QP), /* CFR048 */ ( triple_append(FIRST, [QP], REST, CHARS), /* CFR048 */ /* N.B. QP=47 -> "/" (VAX), 45 -> "-" (SUN) */ /* CFR048 */ name(ATOM, FIRST), !, split_atom_list([QP|REST], LIST) /* CFR048 */ ; name(ATOM, CHARS), LIST = [] ), !. process_dcl_qualifiers([QUALIFIER|REST]) :- process_dcl_qualifier(QUALIFIER), !, process_dcl_qualifiers(REST), !. process_dcl_qualifiers([]) :- !. process_dcl_qualifier(QUALIFIER) :- qualifier_prefix(QP), /* CFR048 */ name(QUALIFIER, [QP|CHARS]), /* CFR048 */ length(CHARS, LENGTH), LENGTH >= 1, !, ( triple_append(QUAL, [61], FILELIST, CHARS), /* N.B. chr(61) = "=" */ QUAL=[_|_], /* so >= 1 character! */ ( ( gen_append(QUAL, _, "proof_log") ; gen_append(QUAL, _, "prooflog") /* allow variant */ ), \+ done__proof_log, !, process_proof_log_file_name(FILELIST), assertz(done__proof_log) ; /* CFR014 */ gen_append(QUAL, _, "execute"), /* CFR014 */ \+ perform_script_file(_), /* CFR014 */ !, /* CFR014 */ process_execute_file_name(FILELIST) /* CFR014 */ ; ( /* CFR016 */ gen_append(QUAL, _, "command_log") /* CFR016 */ ; /* CFR016 */ gen_append(QUAL, _, "commandlog") /* CFR016 */ ), /* CFR016 */ \+ cmd_line_command_log(_), /* CFR016 */ !, /* CFR016 */ process_command_log_file_name(FILELIST) /* CFR016 */ ; scream_blue_murder ) ; gen_append(CHARS, _, "resume"), ( \+ done__resume, asserta(done__resume) ; scream_blue_murder ) ; gen_append(CHARS, _, "plain"), !, retractall(plain_output(_)), asserta(plain_output(on)) ; gen_append(CHARS, _, "overwrite_warning"), !, retractall(overwrite_warning(_)), assertz(overwrite_warning(on)) ; gen_append(CHARS, _, "version"), !, process_version ; gen_append(CHARS, _, "help"), !, process_help ; scream_blue_murder ), !. process_dcl_qualifier(QUALIFIER) :- %Does not lead with a qualifier. name(QUALIFIER, [QP|_CHARS]), \+ qualifier_prefix(QP), %Have not yet encountered a filename. \+ cmd_line_filename(_FILENAME), %Take this as the filename. assert(cmd_line_filename(QUALIFIER)), !. process_dcl_qualifier(QUALIFIER) :- %Does not lead with a qualifier. name(QUALIFIER, [QP|_CHARS]), \+ qualifier_prefix(QP), %Have already encountered a filename. cmd_line_filename(FILENAME), %Raise error. format('Error: Multiple files provided on command line: ~a and ~a.~n', [FILENAME, QUALIFIER]), scream_blue_murder, !. process_dcl_qualifier(_QUALIFIER) :- scream_blue_murder, !. scream_blue_murder :- nl, write('FAILED: ERROR IN COMMAND LINE SYNTAX'), nl, !, close_all_streams, halt. process_version:- display_header(user_output), close_all_streams, halt(0), !. process_help:- display_help(user_output), close_all_streams, halt(0), !. process_proof_log_file_name(LIST) :- triple_append(_FRONT, ".", EXTENSION, LIST), \+ is_in(46, EXTENSION), /* so no "." */ name(FILE, LIST), !, assertz(cmd_line_proof_log(FILE)), !. process_proof_log_file_name(LIST) :- append(LIST, ".plg", FILELIST), /* CFR048 */ name(FILE, FILELIST), !, assertz(cmd_line_proof_log(FILE)), !. process_execute_file_name(LIST) :- /* CFR014 */ triple_append(_FRONT, ".", EXTENSION, LIST), /* CFR014 */ \+ is_in(46, EXTENSION), /* so no "." */ /* CFR014 */ name(FILE, LIST), /* CFR014 */ ( /* CFR014 */ file_exists_and_is_readable(FILE) /* CFR014 */ ; /* CFR014 */ write('Aborted: '), /* CFR014 */ print(FILE), write(' does not exist or cannot be read.'), nl, /* CFR014 */ !, /* CFR014 */ close_all_streams, halt /* CFR014 */ ), /* CFR014 */ !, /* CFR014 */ asserta(perform_script_file(FILE)), /* CFR014 */ command_logging(ORIGLOG), asserta(toplevel_execute(script, ORIGLOG)), !. /* CFR014 */ process_execute_file_name(LIST) :- /* CFR014 */ append(LIST, ".cmd", FILELIST), /* CFR014 */ name(FILE, FILELIST), /* CFR014 */ ( /* CFR014 */ file_exists_and_is_readable(FILE) /* CFR014 */ ; /* CFR014 */ write('Aborted: '), /* CFR014 */ print(FILE), write(' does not exist or cannot be read.'), nl, /* CFR014 */ !, /* CFR014 */ close_all_streams, halt /* CFR014 */ ), /* CFR014 */ !, /* CFR014 */ asserta(perform_script_file(FILE)), /* CFR014 */ !. /* CFR014 */ process_command_log_file_name(LIST) :- /* CFR016 */ triple_append(_FRONT, ".", EXTENSION, LIST), /* CFR016 */ \+ is_in(46, EXTENSION), /* so no "." */ /* CFR016 */ name(FILE, LIST), /* CFR016 */ !, /* CFR016 */ assertz(cmd_line_command_log(FILE)), /* CFR016 */ !. /* CFR016 */ process_command_log_file_name(LIST) :- /* CFR016 */ append(LIST, ".cmd", FILELIST), /* CFR016,048 */ name(FILE, FILELIST), /* CFR016 */ !, /* CFR016 */ assertz(cmd_line_command_log(FILE)), /* CFR016 */ !. /* CFR016 */ /*** triple_append(L1, L2, L3, RESULT) - RESULT = L1@L2@L3 and L1,L2 <> [] ***/ triple_append(X, Y, Z, L) :- gen_append(X, T, L), X \= [], gen_append(Y, Z, T), Y \= []. %############################################################################### %END-OF-FILE
  • subtype Interrupt_Priority is Any_Priority range Priority'Last+1 .. Any_Priority'Last.

    *** Semantic Error : 776 : In SPARK95 mode, only packages Standard, System, Ada.Real_Time and Ada.Interrupts may be specified in the config file.

    In SPARK95 mode, the packages that may be specified in the target configuration file are: Standard, System, Ada.Real_Time and Ada.Interrupts. The latter two are ignored unless the Ravenscar profile is selected.

    *** Semantic Error : 777 : In package System, Priority must be an immediate subtype of Integer.

    Ada 95, and hence SPARK95, defines Priority as being an immediate subtype of Integer.

    *** Semantic Error : 778 : This identifier is not valid at this point in the target configuration file.

    The specified identifier cannot be used here; it is most probably either not valid in the target configuration file at all, or might be valid in a different package, but not here.

    *** Semantic Error : 779 : Definition of this package in the target configuration file is not allowed in SPARK83 mode.

    In SPARK83 mode, only package Standard may be specified in the target configuration file.

    *** Semantic Error : 780 : Type XXX must be private.

    This type may only be declared as private in the target configuration file.

    *** Semantic Error : 781 : The lower bound of a signed integer type declaration must be greater than or equal to System.Min_Int.

    This error can only be generated in SPARK95 mode when the configuration file specifies a value for System.Min_Int.

    *** Semantic Error : 782 : The upper bound of a signed integer type declaration must be less than or equal to System.Max_Int.

    This error can only be generated in SPARK95 mode when the configuration file specifies a value for System.Max_Int.

    *** Semantic Error : 783 : Modulus must be less than or equal to System.Max_Binary_Modulus.

    This error can only be generated in SPARK95 mode when the configuration file specifies a value for System.Max_Binary_Modulus.

    *** Semantic Error : 784 : System.Max_Binary_Modulus must be a positive power of 2.

    *** Semantic Error : 785 : The number of digits specified exceeds the value defined for System.Max_Digits.

    The maximum decimal precision for a floating point type, where a range specification has not been included, is defined by System.Max_Digits.

    *** Semantic Error : 786 : The number of digits specified exceeds the value defined for System.Max_Base_Digits.

    The maximum decimal precision for a floating point type, where a range specification has been included, is defined by System.Max_Base_Digits.

    *** Semantic Error : 787 : Digits value must be positive.

    *** Semantic Error : 788 : Delta value must be positive.

    *** Semantic Error : 789 : The only currently supported type attribute in this context is 'Base.

    *** Semantic Error : 790 : A base type assertion requires a type here.

    *** Semantic Error : 791 : The base type in this assertion must be a predefined type.

    Predefined types are those defined either by the language, or in package Standard, using the configuration file mechanism.

    *** Semantic Error : 792 : The types in this assertion must both be either floating point or signed integer.

    *** Semantic Error : 793 : This base type must have a defined range in the configuration file.

    If a predefined type is to be used in a base type assertion or in a derived type declaration, then it must appear in the configuration file and have a well-defined range.

    *** Semantic Error : 794 : Range of subtype exceeds range of base type.

    *** Semantic Error : 795 : A base type assertion must be in the same declarative region as that of the full type definition.

    *** Semantic Error : 796 : This type already has a base type: either it already has a base type assertion, or is explicitly derived, or is a predefined type.

    A base type assertion can only be given exactly once. Explicitly derived scalar types and predefined types never need a base type assertion.

    *** Semantic Error : 797 : The base type in a floating point base type assertion must have a defined accuracy.

    *** Semantic Error : 798 : The accuracy of the base type in a base type assertion must be at least that of the type which is the subject of the assertion.

    *** Semantic Error : 799 : Only a simple type can be the subject of a base type assertion .

    *** Semantic Error : 800 : Modulus must be a positive power of 2.

    In SPARK, modular types must have a modulus which is a positive power of 2.

    *** Semantic Error : 801 : Modular types may only be used in SPARK95.

    Ada83 (and hence SPARK83) does not include modular types.

    *** Semantic Error : 803 : Unary arithmetic operators are not permitted for modular types.

    Unary arithmetic operators are of little value. The "abs" and "+" operators have no effect for modular types, and so are not required. The unary minus operator is a source of potential confusion, and so is not permitted in SPARK.

    *** Semantic Error : 804 : Universal expression may not be implicitly converted to a modular type here. Left hand operand requires qualification to type XXX.

    A universal expression cannot be used as the left hand operand of a binary operator if the right hand operand is of a modular type. Qualification of the left hand expression is required in this case.

    *** Semantic Error : 805 : Universal expression may not be implicitly converted to a modular type here. Right hand operand requires qualification to type XXX.

    A universal expression cannot be used as the right hand operand of a binary operator if the left hand operand is of a modular type. Qualification of the right hand expression is required in this case.

    *** Semantic Error : 806 : Universal expression may not be implicitly converted to a modular type here. Right hand operand requires qualification.

    A universal expression cannot be used as operand of an unary "not" operator if no type can be determined from the context of the expression. Qualification of the operand is required in this case.

    *** Semantic Error : 814 : Default_Bit_Order must be of type Bit_Order.

    The only possible type for the constant System.Default_Bit_Order is System.Bit_Order when it appears in the configuration file.

    *** Semantic Error : 815 : The only allowed values of Default_Bit_Order are Low_Order_First and High_Order_First.

    System.Bit_Order is implicity declared in package System when a configuration file is given. This is an enumeration type with only two literals Low_Order_First and High_Order_First.

    *** Semantic Error : 820 : Abstract types are not currently permitted in SPARK.

    Only non-abstract tagged types are currently supported. It is hoped to lift this restriction in a future Examiner release.

    *** Semantic Error : 821 : This type declaration must be a tagged record because it's private type is tagged.

    If a type is declared as "tagged private" then its full declaration must be a tagged record.

    *** Semantic Error : 822 : XXX is not a tagged type; only tagged types may be extended.

    In SPARK, "new" can only be used to declare a type extension; other derived types are not permitted.

    *** Semantic Error : 823 : This type may not be extended in the same package in which it is declared.

    SPARK only permits types from another library package to be extended. This rule prevents overloading of inherited operations.

    *** Semantic Error : 824 : This package already extends a type from package XXX. Only one type extension per package is permitted.

    SPARK only permits one type extension per package. This rule prevents overloading of inherited operations.

    *** Semantic Error : 825 : Type XXX expected in order to complete earlier private extension.

    Since SPARK only permits one type extension per package it follows that the declaration "new XXX with private" in a package visible part must be paired with "new XXX with record..." in its private part. The ancestor type XXX must be the same in both declarations.

    *** Semantic Error : 826 : Type extension is not permitted in SPARK 83.

    Type extension is an Ada 95 feature not included in Ada or SPARK 83.

    *** Semantic Error : 827 : The actual parameter associated with a tagged formal parameter in an inherited operation must be an object not an expression.

    There are several reasons for this SPARK rule. Firstly, Ada requires tagged parameters to be passed by reference and so an object must exist at least implicitly. Secondly, in order to perform flow analysis of inherited subprogram calls, the Examiner needs identify what subset of the information available at the point of call is passed to and from the called subprogram. Since information can only flow through objects it follows that actual parameter must be an object.

    *** Semantic Error : 828 : Tagged types and tagged type extensions may only be declared in library-level package specifications.

    This SPARK rule facilitates the main uses of tagged types while greatly simplifying visibility rules.

    *** Semantic Error : 829 : Illegal re-declaration: this subprogram shares the same name as the inheritable root operation XXX but does not override it.

    To avoid overloading, SPARK prohibits more than one potentially visible subprogram having the same name.

    *** Semantic Error : 830 : A private type may not be implemented as a tagged type or an extension of a tagged type.

    This rule means that a private type can only be implemented as a tagged type if the private type itself is tagged.

    *** Semantic Error : 831 : Extended tagged types may only be converted in the direction of their root type.

    This is an Ada rule: type conversions simply omit unused fields of the extended type. It follows that conversions must be in the direction of the root type.

    *** Semantic Error : 832 : Only tagged objects, not expressions, may be converted.

    For flow analysis purposes the Examiner needs to know what subset of the information in the unconverted view is available in the converted view. Since information can only flow through objects it follows that only objects can be converted.

    *** Semantic Error : 833 : Invalid record aggregate: type XXX has a private ancestor.

    If an extended type has a private ancestor then an extension aggregate must be used rather than a normal aggregate.

    *** Semantic Error : 834 : Null records are only permitted if they are tagged.

    An empty record can have no use in a SPARK program others than as a root type from which other types can be derived and extended. For this reason, null records are only allowed if they are tagged.

    *** Semantic Error : 835 : XXX is not an extended tagged record type.

    An extension aggregate is only appropriate if the record type it is defining is an extended record. A normal aggregate should be used for other record (and array) types.

    *** Semantic Error : 836 : This expression does not represent a valid ancestor type of the aggregate XXX.

    The expression before the reserved word "with" must be of an ancestor type of the overall aggregate type. In SPARK, the ancestor expression may not be a subtype mark.

    *** Semantic Error : 837 : Invalid record aggregate: there is a private ancestor between the type of this expression and the type of the aggregate XXX.

    The ancestor type can be an tagged type with a private extension; however, there must be no private extensions between the ancestor type and the type of the aggregate.

    *** Semantic Error : 838 : Incomplete aggregate: null record cannot be used here because fields in XXX require values.

    The aggregate form "with null record" can only be used if the type of the aggregate is a null record extension of the ancestor type. If any fields are added between the ancestor type and the aggregate type then values need to be supplied for them so "null record" is inappropriate.

    *** Semantic Error : 839 : This package already contains a root tagged type or tagged type extension. Only one such declaration per package is permitted.

    SPARK permits one root tagged type or one tagged type extension per package, but not both. This rule prevents the declaration of illegal operations with more than one controlling parameter.

    *** Semantic Error : 840 : A tagged or extended type may not appear here. SPARK does not permit the declaration of primitive functions with controlling results.

    A primitive function controlled by its return result would be almost unusable in SPARK because a data flow error would occur wherever it was used.

    *** Semantic Error : 841 : The return type in the declaration of this function contained an error. It is not possible to check the validity of this return type.

    Issued when there is an error in the return type on a function's initial declaration. In this situation we cannot be sure what return type is expected in the function's body. It would be misleading to simply report a type mismatch since the types might match perfectly and both be wrong. Instead, the Examiner reports the above error and refuses to analyse the function body until its specification is corrected.

    *** Semantic Error : 842 : Pragma Atomic_Components is not permitted in SPARK when the Ravenscar profile is selected.

    *** Semantic Error : 843 : Pragma Volatile_Components is not permitted in SPARK when the Ravenscar profile is selected.

    *** Semantic Error : 844 : Missing or contradictory overriding_indicator for operation XXX. This operation successfully overrides its parent operation.

    In SPARK2005, an operation which successfully overrides a parent operation must be specified as Overriding.

    *** Semantic Error : 845 : Subprogram XXX does not successfully override a parent operation.

    In SPARK2005, an overriding operation must successfully override an operation inherited from the parent.

    *** Semantic Error : 850 : This construct may only be used when the Ravenscar profile is selected.

    Support for concurrent features of the SPARK language, including protected objects, tasking, etc. are only supported when the Ravenscar profile is selected.

    *** Semantic Error : 851 : The parameter to pragma Atomic must be a simple_name.

    The parameter to pragma Atomic must be a simple_name; and may not be passed using a named association.

    *** Semantic Error : 852 : pragma Atomic may only appear in the same immediate scope as the type to which it applies.

    This is an Ada rule (pragma Atomic takes a local name see LRM 13.1(1)). Note that this precludes the use of pragma Atomic on a predefined type.

    *** Semantic Error : 853 : pragma Atomic may only apply to a scalar base type, or to a non-tagged record type with exactly 1 field that is a predefined scalar type.

    pragma Atomic may only be applied to base types that are scalar. (i.e. enumeration types, integer types, real types, modular types) or a non-tagged record type with a single field which is a predefined scalar type, such as Integer, Character or Boolean. As an additional special case, a record type with a single field of type System.Address is also allowed.

    *** Semantic Error : 854 : pragma Atomic takes exactly one parameter.

    *** Semantic Error : 855 : The type of own variable XXX is not consistent with its modifier.

    An own variable with a task modifier must be of a task type. A task own variable must have the task modifier. An own variable with a protected modifier must be a protected object, suspension object or pragma atomic type. A protected or suspension object own variable must have the protected modifier.

    *** Semantic Error : 858 : A variable that appears in a protects property list may not appear in a refinement clause.

    A variable in a protects list is effectively protected and hence cannot be refined.

    *** Semantic Error : 859 : A protected own variable may not appear in a refinement clause.

    Protected state cannot be refined or be used as refinement constituents.

    *** Semantic Error : 860 : Own variable XXX appears in a protects list and hence must appear in the initializes clause.

    Protected state (including all refinement constituents) must be initialized.

    *** Semantic Error : 861 : Both abstract own variable XXX and refinement constitutent YYY must have an Integrity property.

    If an abstract own variable has an Integrity property, then so must all its refinement constituents, and vice-versa.

    *** Semantic Error : 862 : Both abstract own variable XXX and refinement constitutent YYY must have the same Integrity value.

    If both an abstract own variable and a refinement constituent have Integrity properties specified, then the value of the Integrity must be the same.

    *** Semantic Error : 863 : Own variable XXX is protected and may not appear in an initializes clause.

    Protected own variables must always be initialized, and should not appear in initializes annotations.

    *** Semantic Error : 864 : Unexpected initialization specification - all own variables of this package are either implicitly initialized, or do not require initialization.

    An own variable initialization clause and that of its refinement constituents must be consistent.

    *** Semantic Error : 865 : Field XXX is part of the ancestor part of this aggregate and does not require a value here.

    An extension aggregate must supply values for all fields that are part of the overall aggregate type but not those which are part of the ancestor part.

    *** Semantic Error : 866 : The expression in a delay_until statement must be of type Ada.Real_Time.Time.

    When the Ravenscar Profile is selected, the delay until statement may be used. The argument of this statement must be of type Ada.Real_Time.Time.

    *** Semantic Error : 867 : Subprogram XXX contains a delay statement but does not have a delay property.

    Any subprogram that may call delay until must have a delay property in a declare annotation. Your subprogram is directly or indirectly making a call to delay until.

    *** Semantic Error : 868 : Protected object XXX may only be declared immediately within a library package.

    This error message is issued if a type mark representing a protected type appears anywhere other than in a library level variable declaration or library-level own variable type announcement.

    *** Semantic Error : 869 : Protected type XXX already contains an Entry declaration; only one Entry is permitted.

    The Ravenscar profile prohibits a protected type from declaring more than one entry.

    *** Semantic Error : 870 : Protected type XXX does not have any operations, at least one operation must be declared.

    A protected type which provides no operations can never be used so SPARK requires the declaration of at least one.

    *** Semantic Error : 871 : A type can only be explicitly derived from a predefined Integer or Floating Point type or from a tagged record type.

    *** Semantic Error : 872 : Variable XXX is not protected; only protected items may be globally accessed by protected operations.

    In order to avoid the possibility of shared data corruption, SPARK prohibits protected operations from accessing unprotected data items.

    *** Semantic Error : 873 : This subprogram requires a global annotation which references the protected type name XXX.

    In order to statically-detect certain bounded errors defined by the Ravenscar profile, SPARK requires every visible operation of protected type to globally reference the abstract state of the type.

    *** Semantic Error : 874 : Protected state XXX must be initialized at declaration.

    Because there is no guarantee that a concurrent thread that initializes a protected object will be executed before one that reads it, the only way we can be sure that a protected object is properly initialized is to do so at the point of declaration. You have either declared some protected state and not included an initialization or you have tried to initialize some protected state in package body elaboration.

    *** Semantic Error : 875 : Protected type expected; access discriminants may only refer to protected types in SPARK.

    Access discriminants have been allowed in SPARK solely to allow devices made up of co-operating Ravenscar-compliant units to be constructed. For this reason only protected types may appear in access discriminants.

    *** Semantic Error : 876 : This protected type or task declaration must include either a pragma Priority or pragma Interrupt_Priority.

    To allow the static detection of certain bounded errors defined by the Ravenscar profile, SPARK requires an explicitly-set priority for each protected type, task type or object of those types. The System.Default_Priority may used explicitly provided package System has been defined in the configuration file.

    *** Semantic Error : 877 : Priority values require an argument which is an expression of type integer.

    *** Semantic Error : 878 : This protected type declaration contains a pragma Attach_Handler and must therefore also include a pragma Interrupt_Priority.

    To allow the static detection of certain bounded errors defined by the Ravenscar profile, SPARK requires an explicitly-set priority for each protected type or object. The System.Default_Priority may used explicitly provided package System has been defined in the configuration file.

    *** Semantic Error : 879 : Unexpected pragma XXX: this pragma may not appear here.

    pragma Interrupt_Priority must be the first item in a protected type declaration or task type declaration; pragma Priority must be the first item in a protected type declaration, task type declaration or the main program.

    *** Semantic Error : 880 : Pragma Priority or Interrupt_Priority expected here.

    Issued when a pragma other than Priority or Interrupt_Priority appears as the first item in a protected type or task type declaration.

    *** Semantic Error : 881 : The priority of XXX must be in the range YYY.

    See LRM D.1(17).

    *** Semantic Error : 882 : Integrity property requires an argument which is an expression of type Natural.

    *** Semantic Error : 883 : Pragma Interrupt_Handler may not be used; SPARK does not support the dynamic attachment of interrupt handlers [LRM C3.1(9)].

    Interrupt_Handler is of no use unless dynamic attachment of interrupt handlers is to be used.

    *** Semantic Error : 884 : Pragma Attach_Handler is only permitted immediately after the corresponding protected procedure declaration in a protected type declaration.

    Pragma Attach_Handler may only be used within a protected type declaration. Furthermore, it must immediately follow a protected procedure declaration with the same name as the first argument to the pragma.

    *** Semantic Error : 885 : Pragma Attach_Handler may only be applied to a procedure with no parameters.

    See LRM C.3.1(5).

    *** Semantic Error : 887 : A discriminant may only appear alone, not in an expression.

    Issued when a task or protected type priority is set using an expression involving a discriminant. The use of such an expression greatly complicates the static evaluation of the priority of task or protected subtypes thus preventing the static elimination of certain Ravenscar bounded errors.

    *** Semantic Error : 888 : Unexpected Delay, XXX already has a Delay property.

    A procedure may only have a maximum of one delay annotation.

    *** Semantic Error : 889 : The own variable XXX must have the suspendable property.

    The type used to declare this object must be a protected type with and entry or a suspension object type.

    *** Semantic Error : 890 : The name XXX already appears in the suspends list.

    Items may not appear more than once in an a suspends list.

    *** Semantic Error : 891 : Task type or protected type required.

    Issued in a subtype declaration where the constraint is a discriminant constraint. Only task and protected types may take a discriminant constraint as part of a subtype declaration.

    *** Semantic Error : 892 : Array type, task type or protected type required.

    Issued in a subtype declaration where the constraint is a either a discriminant constraint or an index constraint (these two forms cannot always be distinguished syntactically). Only task and protected types may take a discriminant constraint and only array types may take an index constraint as part of a subtype declaration.

    *** Semantic Error : 893 : Number of discriminant constraints differs from number of known discriminants of type XXX.

    Issued in a subtype declaration if too many or two few discriminant constraints are supplied.

    *** Semantic Error : 894 : Only variables of a protected type may be aliased.

    SPARK supports the keyword aliased in variable declarations only so that protected and task types can support access discriminants. Since it has no other purpose it may not be used except in a protected object declaration.

    *** Semantic Error : 895 : Attribute Access may only be applied to variables which are declared as aliased, variable XXX is not aliased.

    This is a slightly annoying Ada issue. Marking a variable as aliased prevents it being placed in a register which would make pointing at it hazardous; however, SPARK only permits 'Access on protected types which are limited and therefore always passed by reference anyway and immune from register optimization. Requiring aliased on protected objects that will appear in discriminant constraints is therefore unwanted syntactic sugar only.

    *** Semantic Error : 896 : The task type XXX does not have an associated body.

    Issued at the end of a package body if a task type declared in its specification contains neither a body nor a body stub for it.

    *** Semantic Error : 897 : The protected type XXX does not have an associated body.

    Issued at the end of a package body if a protected type declared in its specification contains neither a body nor a body stub for it.

    *** Semantic Error : 898 : XXX is not a protected or task type which requires a body.

    Issued if a body or body stub for a task or protected type is encountered and there is no matching specification.

    *** Semantic Error : 899 : A body for type XXX has already been declared.

    Issued if a body or body stub for a task or protected type is encountered and an earlier body has already been encountered.

    *** Semantic Error : 901 : Suspension object XXX may only be declared immediately within a library package specification or body.

    Suspension objects must be declared at library level. They cannot be used in protected type state or as local variables in subprograms.

    *** Semantic Error : 902 : Recursive use of typemark XXX in known descriminant.

    *** Semantic Error : 903 : Protected or suspension object types cannot be used to declare constants.

    Protected and suspension objects are used to ensure integrity of shared objects. If it is necessary to share constant data then these constructs should not be used.

    *** Semantic Error : 904 : Protected or suspension objects cannot be used as subprogram parameters.

    SPARK does not currently support this feature.

    *** Semantic Error : 905 : Protected or suspension objects cannot be returned from functions.

    SPARK does not currently support this feature.

    *** Semantic Error : 906 : Protected or suspension objects cannot be used in composite types.

    Protected and suspension objects cannot be used in record or array structures.

    *** Semantic Error : 907 : Delay until must be called from a task or unprotected procedure body.

    You are calling delay until from an invalid construct. Any construct that calls delay until must have a delay property in the declare annotation. This construct must be one of a task or procedure body.

    *** Semantic Error : 908 : Blocking properties are not allowed in protected scope.

    Procedures in protected scope must not block and therefore blocking properties are prohibited.

    *** Semantic Error : 909 : Object XXX cannot suspend.

    You are either applying the suspendable property to an own variable that cannot suspend or you have declared a variable (whose own variable has the suspendable property) which cannot suspend. Or you have used an item in a suspends list that does not have the suspendable property. An object can only suspend if it is a suspension object or a protected type with an entry.

    *** Semantic Error : 910 : Name XXX must appear in the suspends list property for the enclosing unit.

    Protected entry calls and calls to Ada.Synchronous_Task_Control.Suspend_Until_True may block the currently executing task. SPARK requires you announce this fact by placing the actual callee name in the suspends list for the enclosing unit.

    *** Semantic Error : 911 : The argument in pragma Priority for the main program must be an integer literal or a local constant of static integer value.

    If the main program priority is not an integer literal then you should declare a constant that has the required value in the declarative part of the main program prior to the position of the pragma.

    *** Semantic Error : 912 : This call contains a delay property that is not propagated to the enclosing unit.

    The call being made has a declare annotation that contains a delay property. SPARK requires that this property is propagated up the call chain and hence must appear in a declare annotation for the enclosing unit.

    *** Semantic Error : 913 : This call has a name in its suspends list which is not propagated to the enclosing unit.

    The call being made has a declare annotation that contains a suspends list. SPARK requires that the entire list is propagated up the call chain and hence must appear in a declare annotation for the enclosing unit.

    *** Semantic Error : 914 : The name XXX specified in the suspends list can never be called.

    You have specified the name of a protected or suspension object in the suspends list that can never be called by this procedure or task.

    *** Semantic Error : 915 : Procedure XXX has a delay property but cannot delay.

    You have specified a delay property for this procedure but delay until can never be called from it.

    *** Semantic Error : 916 : Protected object XXX has a circular dependency in subprogram YYY.

    The type of the protected object mentions the protected object name in the derives list for the given subprogram.

    *** Semantic Error : 917 : Procedure XXX cannot be called from a protected action.

    The procedure being called may block and hence cannot be called from a protected action.

    *** Semantic Error : 918 : The delay property is not allowed for XXX.

    The delay property may only be applied to a procedure.

    *** Semantic Error : 919 : The priority property is not allowed for XXX.

    The priority property can only be applied to protected own variables which are type announced. If the type has been declared it must be a protected type.

    *** Semantic Error : 920 : The suspends property is not allowed for XXX.

    The suspends property may only be applied to task type specifications and procedures.

    *** Semantic Error : 921 : The identifier XXX is not recognised as a component of a property list.

    The property list can only specify the reserved word delay, suspends or priority.

    *** Semantic Error : 922 : The own variable XXX must have the priority property.

    In order to perform the ceiling priority checks the priority property must be given to all own variables of protected type.

    *** Semantic Error : 923 : The procedure XXX cannot be called from a function as it has a blocking side effect.

    Blocking is seen as a side effect and hence procedures that potentially block cannot be called from functions.

    *** Semantic Error : 924 : The suspendable property is not allowed for XXX.

    Objects that suspend must be declared as own protected variables.

    *** Semantic Error : 925 : The own variable or task XXX must have a type announcement.

    Own variables of protected type and own tasks must have a type announcement.

    *** Semantic Error : 926 : Illegal declaration of task XXX. Task objects must be declared at library level.

    Task objects must be declared in library level package specifications or bodies.

    *** Semantic Error : 927 : The own task annotation for this task is missing the name XXX in its suspends list.

    The task type declaration has name XXX in its list and this must appear in the own task annotation.

    *** Semantic Error : 928 : Private elements are not allowed for protected type XXX.

    Protected type XXX has been used to declare a protected, moded own variable. Protected, moded own variables are refined onto a set of virtual elements with the same mode. As such private elements are not allowed.

    *** Semantic Error : 929 : Unexpected declare annotation. Procedure XXX should have the declare annotation on the specification.

    Declare annotations cannot appear on the procedure body if it appears on the procedure specification.

    *** Semantic Error : 930 : Task XXX does not appear in the own task annotation for this package.

    A task has been declared that is not specified as an own task of the package.

    *** Semantic Error : 931 : Task XXX does not have a definition.

    A task name appears in the own task annotation for this package but is never declared.

    *** Semantic Error : 932 : The priority for protected object XXX does not match that given in the own variable declaration.

    The priority given in the priority property must match that given in the protected type.

    *** Semantic Error : 933 : A pragma Priority is required for the main program when Ravenscar Profile is enabled.

    When SPARK profile Ravenscar is selected, all tasks, protected objects and the main program must explicitly be assigned a priority.

    *** Semantic Error : 934 : Priority ceiling check failure: the priority of YYY is less than that of XXX.

    The active priority of a task is the higher of its base priority and the ceiling priorities of all protected objects that it is executing. The active priority at the point of a call to a protected operation must not exceed the ceiling priority of the callee.

    *** Semantic Error : 935 : The own variable XXX must have the interrupt property.

    An own variable has been declared using a protected type with a pragma attach handler. Such objects are used in interrupt processing and must have the interrupt property specified in their own variable declaration.

    *** Semantic Error : 936 : The interrupt property is not allowed for XXX.

    The interrupt property can only be applied to protected own variables that are type announced. If the type is declared then it must be a protected type that contains an attach handler.

    *** Semantic Error : 937 : The protects property is not allowed for XXX.

    The protects property can only be applied to protected own variables that are type announced. If the type is declared then it must be a protected type.

    *** Semantic Error : 938 : The unprotected variable XXX is shared by YYY and ZZZ.

    XXX is an unprotected variable that appears in the global list of the threads YYY and ZZZ. Unprotected variables cannot be shared between threads in SPARK. A thread is one of: the main program, a task, an interrupt handler.

    *** Semantic Error : 939 : The suspendable item XXX is referenced by YYY and ZZZ.

    XXX is an own variable with the suspends property that appears in the suspends list of the threads YYY and ZZZ. SPARK prohibits this to prevent more than one thread being suspended on the same item at any one time. A thread is one of: the main program, a task, an interrupt handler.

    *** Semantic Error : 940 : XXX is a protected own variable. Protected variables may not be used in proof contexts.

    The use of protected variables in pre and postconditions or other proof annotations is not (currently) supported. Protected variables are volatile because they can be changed at any time by another program thread and this may invalidate some common proof techniques. The prohibition of protected variables does not prevent proof of absence of run-time errors nor proof of protected operation bodies. See the manual "SPARK Proof Manual" for more details.

    *** Semantic Error : 941 : The type of own variable XXX must be local to this package.

    The type used to an announce an own variable with a protects property must be declared in the same package.

    *** Semantic Error : 942 : Only one instance of the type XXX is allowed.

    Type XXX has a protects property. This means there can be only one object in the package that has this type or any subtype of this type.

    *** Semantic Error : 943 : The name XXX cannot appear in a protects list.

    All items in a protects list must be unprotected own variables owned by this package.

    *** Semantic Error : 944 : The name XXX is already protected by YYY.

    The name XXX appears in more than one protects list. The first time it appeared was for own variable YYY. XXX should appear in at most one protects list.

    *** Semantic Error : 945 : The property XXX must be given a static expression for its value.

    This property can only accept a static expression.

    *** Semantic Error : 946 : The own variable XXX must only ever be accessed from operations in protected type YYY.

    The own variable XXX is protected by the protected type YYY and hence must never be accessed from anywhere else.

    *** Semantic Error : 947 : The own variable XXX appears in a protects list for type YYY but is not used in the body.

    The protected type YYY claims to protect XXX via a protects property. However, the variable XXX is not used by any operation in YYY.

    *** Semantic Error : 948 : The type of own variable or task XXX must be a base type.

    Own tasks and protected own variables of a protected type must be announced using the base type. The subsequent variable declaration may be a subtype of the base type.

    *** Semantic Error : 949 : Unexpected partition annotation: a global annotation may only appear here when the Ravenscar profile is selected.

    When the sequential SPARK profile is selected, the global and derives annotation on the main program describes the entire program's behaviour. No additional, partition annotation is required or permitted. Note that an annotation must appear here if the Ravenscar profile is selected.

    *** Semantic Error : 950 : Partition annotation expected: a global and, optionally, a derives annotation must appear after 'main_program' when the Ravenscar profile is selected.

    When the Ravenscar profile is selected the global and derives annotation on the main program describes the behaviour of the environment task only, not the entire program. An additional annotation, called the partition annotation, is required to describe the entire program's behaviour; this annotation follows immediately after 'main_program;'.

    *** Semantic Error : 951 : Inherited package XXX contains tasks and/or interrupt handlers and must therefore appear in the preceding WITH clause.

    In order to ensure that a Ravenscar program is complete, SPARK requires that all 'active' packages inherited by the environment task also appear in a corresponding with clause. This check ensures that any program entities described in the partition annotation are also linked into the program itself.

    *** Semantic Error : 952 : Subprogram XXX is an interrupt handler and cannot be called.

    Interrupt handler operations cannot be called.

    *** Semantic Error : 953 : Interrupt property error for own variable YYY. XXX is not an interrupt handler in type ZZZ.

    The handler names in an interrupt property must match one in the protected type of the own variable.

    *** Semantic Error : 954 : Interrupt property error for own variable XXX. Interrupt stream name YYY is illegal.

    The stream name must be unprefixed and not already in use within the scope of the package.

    *** Semantic Error : 955 : XXX can only appear in the partition wide flow annotation.

    Interrupt stream variables are used only to enhance the partition wide flow annotation and must not be used elsewhere.

    *** Semantic Error : 956 : XXX already appears in as an interrupt handler in the interrupt mappings.

    An interrupt handler can be mapped onto exactly one interrupt stream variable. An interrupt stream variable may be mapped onto many interrupt handlers.

    *** Semantic Error : 957 : Consecutive updates of protected variable XXX are disallowed when they do not depend directly on its preceding value.

    A protected variable cannot be updated without direct reference to its preceding value more than once within a subprogram or task. Each update of a protected variable may have a wider effect than just the change of value of the protected variable. The overall change is considered to be the accumulation of all updates and reads of the protected variable and to preseve this information flow successive updates must directly depend on the preceding value of the variable.

    *** Semantic Error : 958 : A task may not import the unprotected state XXX.

    A task may not import unprotected state unless it is mode in. This is because under the concurrent elaboration policy, the task cannot rely on the state being initialized before it is run.

    *** Semantic Error : 959 : Unprotected state XXX is exported by a task and hence must not appear in an initializes clause.

    Own variable XXX is being accessed by a task. The order in which the task is run and the own variable initialized is non-deterministic under a concurrent elaboration policy. In this case SPARK forces the task to perform the initialization and as such the own variable must not appear in an initializes clause.

    *** Semantic Error : 960 : The function Ada.Real_Time.Clock can only be used directly (1) in an assignment or return statement or (2) to initialize a library a level constant.